From 81537f8a0c210dfa455b31e0d300cdee1f11a6d0 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 18 Feb 2019 11:31:03 +0900 Subject: [PATCH 001/163] Change the range for tau to [0, 10] --- R/ra_prospect.R | 2 +- inst/stan_files/ra_prospect.stan | 16 ++++++++-------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/R/ra_prospect.R b/R/ra_prospect.R index 30b85f89..a9fe24a0 100644 --- a/R/ra_prospect.R +++ b/R/ra_prospect.R @@ -37,7 +37,7 @@ ra_prospect <- hBayesDM_model( data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list("rho" = c(0, 1, 2), "lambda" = c(0, 1, 5), - "tau" = c(0, 1, 5)), + "tau" = c(0, 1, 10)), preprocess_func = function(raw_data, general_info) { # Currently class(raw_data) == "data.table" diff --git a/inst/stan_files/ra_prospect.stan b/inst/stan_files/ra_prospect.stan index 8ec58a18..05f026d5 100644 --- a/inst/stan_files/ra_prospect.stan +++ b/inst/stan_files/ra_prospect.stan @@ -19,14 +19,14 @@ parameters { vector[N] tau_pr; } transformed parameters { - vector[N] rho; - vector[N] lambda; - vector[N] tau; + vector[N] rho; + vector[N] lambda; + vector[N] tau; for (i in 1:N) { rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; lambda[i] = Phi_approx(mu_pr[2] + sigma[2] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 10; } } model { @@ -55,9 +55,9 @@ model { } } generated quantities { - real mu_rho; - real mu_lambda; - real mu_tau; + real mu_rho; + real mu_lambda; + real mu_tau; real log_lik[N]; @@ -73,7 +73,7 @@ generated quantities { mu_rho = Phi_approx(mu_pr[1]) * 2; mu_lambda = Phi_approx(mu_pr[2]) * 5; - mu_tau = Phi_approx(mu_pr[3]) * 5; + mu_tau = Phi_approx(mu_pr[3]) * 10; { // local section, this saves time and space for (i in 1:N) { From 3fd0e54b9050cba485706bd9a53b6baceef83e54 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 18 Feb 2019 11:36:34 +0900 Subject: [PATCH 002/163] Modify the range of tau for all the ra models --- R/ra_noLA.R | 2 +- R/ra_noRA.R | 2 +- R/ra_prospect.R | 2 +- inst/stan_files/ra_noLA.stan | 8 ++++---- inst/stan_files/ra_noRA.stan | 8 ++++---- inst/stan_files/ra_prospect.stan | 8 ++++---- 6 files changed, 15 insertions(+), 15 deletions(-) diff --git a/R/ra_noLA.R b/R/ra_noLA.R index 73cbb967..8fa5de1d 100644 --- a/R/ra_noLA.R +++ b/R/ra_noLA.R @@ -35,7 +35,7 @@ ra_noLA <- hBayesDM_model( model_name = "noLA", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list("rho" = c(0, 1, 2), - "tau" = c(0, 1, 5)), + "tau" = c(0, 1, 30)), preprocess_func = function(raw_data, general_info) { # Currently class(raw_data) == "data.table" diff --git a/R/ra_noRA.R b/R/ra_noRA.R index 1fabcf63..0b4b7995 100644 --- a/R/ra_noRA.R +++ b/R/ra_noRA.R @@ -35,7 +35,7 @@ ra_noRA <- hBayesDM_model( model_name = "noRA", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list("lambda" = c(0, 1, 5), - "tau" = c(0, 1, 5)), + "tau" = c(0, 1, 30)), preprocess_func = function(raw_data, general_info) { # Currently class(raw_data) == "data.table" diff --git a/R/ra_prospect.R b/R/ra_prospect.R index a9fe24a0..50175d98 100644 --- a/R/ra_prospect.R +++ b/R/ra_prospect.R @@ -37,7 +37,7 @@ ra_prospect <- hBayesDM_model( data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list("rho" = c(0, 1, 2), "lambda" = c(0, 1, 5), - "tau" = c(0, 1, 10)), + "tau" = c(0, 1, 30)), preprocess_func = function(raw_data, general_info) { # Currently class(raw_data) == "data.table" diff --git a/inst/stan_files/ra_noLA.stan b/inst/stan_files/ra_noLA.stan index 1b1fc52d..c5c599c4 100644 --- a/inst/stan_files/ra_noLA.stan +++ b/inst/stan_files/ra_noLA.stan @@ -22,11 +22,11 @@ parameters { transformed parameters { vector[N] rho; - vector[N] tau; + vector[N] tau; for (i in 1:N) { rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; } } @@ -55,7 +55,7 @@ model { } generated quantities { real mu_rho; - real mu_tau; + real mu_tau; real log_lik[N]; @@ -70,7 +70,7 @@ generated quantities { } mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_tau = Phi_approx(mu_pr[2]) * 5; + mu_tau = Phi_approx(mu_pr[2]) * 30; { // local section, this saves time and space for (i in 1:N) { diff --git a/inst/stan_files/ra_noRA.stan b/inst/stan_files/ra_noRA.stan index a489b1fe..0f36c3be 100644 --- a/inst/stan_files/ra_noRA.stan +++ b/inst/stan_files/ra_noRA.stan @@ -22,11 +22,11 @@ parameters { transformed parameters { vector[N] lambda; - vector[N] tau; + vector[N] tau; for (i in 1:N) { lambda[i] = Phi_approx(mu_pr[1] + sigma[1] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; } } @@ -56,7 +56,7 @@ model { } generated quantities { real mu_lambda; - real mu_tau; + real mu_tau; real log_lik[N]; @@ -71,7 +71,7 @@ generated quantities { } mu_lambda = Phi_approx(mu_pr[1]) * 5; - mu_tau = Phi_approx(mu_pr[2]) * 5; + mu_tau = Phi_approx(mu_pr[2]) * 30; { // local section, this saves time and space for (i in 1:N) { diff --git a/inst/stan_files/ra_prospect.stan b/inst/stan_files/ra_prospect.stan index 05f026d5..542ea460 100644 --- a/inst/stan_files/ra_prospect.stan +++ b/inst/stan_files/ra_prospect.stan @@ -21,12 +21,12 @@ parameters { transformed parameters { vector[N] rho; vector[N] lambda; - vector[N] tau; + vector[N] tau; for (i in 1:N) { rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; lambda[i] = Phi_approx(mu_pr[2] + sigma[2] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 10; + tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 30; } } model { @@ -57,7 +57,7 @@ model { generated quantities { real mu_rho; real mu_lambda; - real mu_tau; + real mu_tau; real log_lik[N]; @@ -73,7 +73,7 @@ generated quantities { mu_rho = Phi_approx(mu_pr[1]) * 2; mu_lambda = Phi_approx(mu_pr[2]) * 5; - mu_tau = Phi_approx(mu_pr[3]) * 10; + mu_tau = Phi_approx(mu_pr[3]) * 30; { // local section, this saves time and space for (i in 1:N) { From 9d610a4511d1046c325a9f63e0e74b3535210ae5 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 19 Feb 2019 12:19:54 +0900 Subject: [PATCH 003/163] Also modify range of tau in all ra model JSON files --- man-roxygen/data/ra_noLA.json | 2 +- man-roxygen/data/ra_noRA.json | 2 +- man-roxygen/data/ra_prospect.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/man-roxygen/data/ra_noLA.json b/man-roxygen/data/ra_noLA.json index f44ce55b..6a5cd001 100644 --- a/man-roxygen/data/ra_noLA.json +++ b/man-roxygen/data/ra_noLA.json @@ -3,5 +3,5 @@ "data_columns": ["subjID", "gain", "loss", "cert", "gamble"], "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "gamble"], "parameters": ["rho", "tau"], - "gen_init": [[0, 1, 2], [0, 1, 5]] + "gen_init": [[0, 1, 2], [0, 1, 30]] } diff --git a/man-roxygen/data/ra_noRA.json b/man-roxygen/data/ra_noRA.json index 4d12a9cf..ff53fa09 100644 --- a/man-roxygen/data/ra_noRA.json +++ b/man-roxygen/data/ra_noRA.json @@ -3,5 +3,5 @@ "data_columns": ["subjID", "gain", "loss", "cert", "gamble"], "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "gamble"], "parameters": ["lambda", "tau"], - "gen_init": [[0, 1, 5], [0, 1, 5]] + "gen_init": [[0, 1, 5], [0, 1, 30]] } diff --git a/man-roxygen/data/ra_prospect.json b/man-roxygen/data/ra_prospect.json index a5036746..36cbc86f 100644 --- a/man-roxygen/data/ra_prospect.json +++ b/man-roxygen/data/ra_prospect.json @@ -3,5 +3,5 @@ "data_columns": ["subjID", "gain", "loss", "cert", "gamble"], "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "gamble"], "parameters": ["rho", "lambda", "tau"], - "gen_init": [[0, 1, 2], [0, 1, 5], [0, 1, 5]] + "gen_init": [[0, 1, 2], [0, 1, 5], [0, 1, 30]] } From 3096212d885c9c6751094beb36e9847774c19335 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 16 Apr 2019 15:19:33 +0900 Subject: [PATCH 004/163] Add submodule as inst/common/ --- .gitmodules | 3 +++ inst/common | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 inst/common diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..f7a75667 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "hBayesDM-models"] + path = inst/common + url = https://github.com/CCS-Lab/hBayesDM-models.git diff --git a/inst/common b/inst/common new file mode 160000 index 00000000..bf544254 --- /dev/null +++ b/inst/common @@ -0,0 +1 @@ +Subproject commit bf544254404e303f43355180018e6534fe133bb0 From 7c207488211fe688c6b21a0f129154a5b229eaa3 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 16 Apr 2019 15:42:59 +0900 Subject: [PATCH 005/163] Remove existing (overlapping) files --- inst/extdata/bandit2arm_exampleData.txt | 2001 --- inst/extdata/bandit4arm2_exampleData.txt | 3001 ---- inst/extdata/bandit4arm_exampleData.txt | 2001 --- inst/extdata/bart_exampleData.txt | 91 - inst/extdata/choiceRT_exampleData.txt | 5001 ------ inst/extdata/choiceRT_single_exampleData.txt | 1001 -- inst/extdata/cra_exampleData.txt | 541 - inst/extdata/dbdm_exampleData.txt | 15001 ---------------- inst/extdata/dd_exampleData.txt | 2161 --- inst/extdata/dd_single_exampleData.txt | 109 - inst/extdata/gng_exampleData.txt | 2401 --- inst/extdata/igt_exampleData.txt | 401 - inst/extdata/peer_exampleData.txt | 361 - inst/extdata/prl_exampleData.txt | 2001 --- inst/extdata/prl_multipleB_exampleData.txt | 1801 -- inst/extdata/pst_exampleData.txt | 1021 -- inst/extdata/ra_data_attend.txt | 4192 ----- inst/extdata/ra_data_reappraisal.txt | 4190 ----- inst/extdata/ra_exampleData.txt | 701 - inst/extdata/rdt_exampleData.txt | 901 - inst/extdata/ts_exampleData.txt | 2191 --- inst/extdata/ug_exampleData.txt | 1801 -- inst/extdata/wcs_answersheet.txt | 4 - inst/extdata/wcs_exampleData.txt | 1158 -- inst/stan_files/bandit2arm_delta.stan | 109 - .../stan_files/bandit4arm2_kalman_filter.stan | 163 - inst/stan_files/bandit4arm_2par_lapse.stan | 173 - inst/stan_files/bandit4arm_4par.stan | 176 - inst/stan_files/bandit4arm_lapse.stan | 182 - inst/stan_files/bandit4arm_lapse_decay.stan | 201 - inst/stan_files/bandit4arm_singleA_lapse.stan | 177 - inst/stan_files/bart_par4.stan | 129 - inst/stan_files/choiceRT_ddm.stan | 98 - inst/stan_files/choiceRT_ddm_single.stan | 58 - inst/stan_files/choiceRT_lba.stan | 278 - inst/stan_files/choiceRT_lba_single.stan | 239 - inst/stan_files/cra_exp.stan | 134 - inst/stan_files/cra_linear.stan | 130 - inst/stan_files/dbdm_prob_weight.stan | 154 - inst/stan_files/dd_cs.stan | 107 - inst/stan_files/dd_cs_single.stan | 63 - inst/stan_files/dd_exp.stan | 101 - inst/stan_files/dd_hyperbolic.stan | 101 - inst/stan_files/dd_hyperbolic_single.stan | 57 - inst/stan_files/gng_m1.stan | 149 - inst/stan_files/gng_m2.stan | 160 - inst/stan_files/gng_m3.stan | 179 - inst/stan_files/gng_m4.stan | 210 - inst/stan_files/igt_orl.stan | 207 - inst/stan_files/igt_pvl_decay.stan | 134 - inst/stan_files/igt_pvl_delta.stan | 132 - inst/stan_files/igt_vpp.stan | 188 - inst/stan_files/peer_ocu.stan | 115 - inst/stan_files/pre/license.stan | 14 - inst/stan_files/prl_ewa.stan | 179 - inst/stan_files/prl_fictitious.stan | 173 - inst/stan_files/prl_fictitious_multipleB.stan | 185 - inst/stan_files/prl_fictitious_rp.stan | 188 - inst/stan_files/prl_fictitious_rp_woa.stan | 180 - inst/stan_files/prl_fictitious_woa.stan | 165 - inst/stan_files/prl_rp.stan | 149 - inst/stan_files/prl_rp_multipleB.stan | 161 - inst/stan_files/pst_gainloss_Q.stan | 114 - inst/stan_files/ra_noLA.stan | 95 - inst/stan_files/ra_noRA.stan | 95 - inst/stan_files/ra_prospect.stan | 97 - inst/stan_files/rdt_happiness.stan | 146 - inst/stan_files/ts_par4.stan | 204 - inst/stan_files/ts_par6.stan | 213 - inst/stan_files/ts_par7.stan | 217 - inst/stan_files/ug_bayes.stan | 167 - inst/stan_files/ug_delta.stan | 129 - inst/stan_files/wcs_sql.stan | 176 - man-roxygen/ModelFunctionInfo.schema.json | 42 - man-roxygen/ModelFunctionInfo.schema.md | 11 - man-roxygen/data/bandit2arm_delta.json | 7 - .../data/bandit4arm2_kalman_filter.json | 7 - man-roxygen/data/bandit4arm_4par.json | 7 - man-roxygen/data/bandit4arm_lapse.json | 7 - man-roxygen/data/bart_par4.json | 7 - man-roxygen/data/choiceRT_ddm.json | 7 - man-roxygen/data/choiceRT_ddm_single.json | 7 - man-roxygen/data/choiceRT_lba_single.json | 7 - man-roxygen/data/cra_exp.json | 8 - man-roxygen/data/cra_linear.json | 8 - man-roxygen/data/dbdm_prob_weight.json | 7 - man-roxygen/data/dd_cs.json | 7 - man-roxygen/data/dd_cs_single.json | 7 - man-roxygen/data/dd_exp.json | 7 - man-roxygen/data/dd_hyperbolic.json | 7 - man-roxygen/data/dd_hyperbolic_single.json | 7 - man-roxygen/data/gng_m1.json | 8 - man-roxygen/data/gng_m2.json | 8 - man-roxygen/data/gng_m3.json | 8 - man-roxygen/data/gng_m4.json | 8 - man-roxygen/data/igt_orl.json | 7 - man-roxygen/data/igt_pvl_decay.json | 7 - man-roxygen/data/igt_pvl_delta.json | 7 - man-roxygen/data/igt_vpp.json | 7 - man-roxygen/data/peer_ocu.json | 7 - man-roxygen/data/prl_ewa.json | 8 - man-roxygen/data/prl_fictitious.json | 8 - .../data/prl_fictitious_multipleB.json | 8 - man-roxygen/data/prl_fictitious_rp.json | 8 - man-roxygen/data/prl_fictitious_rp_woa.json | 8 - man-roxygen/data/prl_fictitious_woa.json | 8 - man-roxygen/data/prl_rp.json | 8 - man-roxygen/data/prl_rp_multipleB.json | 8 - man-roxygen/data/pst_gainloss_Q.json | 7 - man-roxygen/data/ra_noLA.json | 7 - man-roxygen/data/ra_noRA.json | 7 - man-roxygen/data/ra_prospect.json | 7 - man-roxygen/data/rdt_happiness.json | 7 - man-roxygen/data/ts_par4.json | 7 - man-roxygen/data/ts_par6.json | 7 - man-roxygen/data/ts_par7.json | 7 - man-roxygen/data/ug_bayes.json | 7 - man-roxygen/data/ug_delta.json | 7 - man-roxygen/data/wcs_sql.json | 7 - 119 files changed, 61758 deletions(-) delete mode 100644 inst/extdata/bandit2arm_exampleData.txt delete mode 100644 inst/extdata/bandit4arm2_exampleData.txt delete mode 100644 inst/extdata/bandit4arm_exampleData.txt delete mode 100644 inst/extdata/bart_exampleData.txt delete mode 100644 inst/extdata/choiceRT_exampleData.txt delete mode 100644 inst/extdata/choiceRT_single_exampleData.txt delete mode 100644 inst/extdata/cra_exampleData.txt delete mode 100644 inst/extdata/dbdm_exampleData.txt delete mode 100644 inst/extdata/dd_exampleData.txt delete mode 100644 inst/extdata/dd_single_exampleData.txt delete mode 100644 inst/extdata/gng_exampleData.txt delete mode 100644 inst/extdata/igt_exampleData.txt delete mode 100644 inst/extdata/peer_exampleData.txt delete mode 100644 inst/extdata/prl_exampleData.txt delete mode 100644 inst/extdata/prl_multipleB_exampleData.txt delete mode 100644 inst/extdata/pst_exampleData.txt delete mode 100644 inst/extdata/ra_data_attend.txt delete mode 100644 inst/extdata/ra_data_reappraisal.txt delete mode 100644 inst/extdata/ra_exampleData.txt delete mode 100644 inst/extdata/rdt_exampleData.txt delete mode 100644 inst/extdata/ts_exampleData.txt delete mode 100644 inst/extdata/ug_exampleData.txt delete mode 100644 inst/extdata/wcs_answersheet.txt delete mode 100644 inst/extdata/wcs_exampleData.txt delete mode 100644 inst/stan_files/bandit2arm_delta.stan delete mode 100644 inst/stan_files/bandit4arm2_kalman_filter.stan delete mode 100644 inst/stan_files/bandit4arm_2par_lapse.stan delete mode 100644 inst/stan_files/bandit4arm_4par.stan delete mode 100644 inst/stan_files/bandit4arm_lapse.stan delete mode 100644 inst/stan_files/bandit4arm_lapse_decay.stan delete mode 100644 inst/stan_files/bandit4arm_singleA_lapse.stan delete mode 100644 inst/stan_files/bart_par4.stan delete mode 100644 inst/stan_files/choiceRT_ddm.stan delete mode 100644 inst/stan_files/choiceRT_ddm_single.stan delete mode 100644 inst/stan_files/choiceRT_lba.stan delete mode 100644 inst/stan_files/choiceRT_lba_single.stan delete mode 100644 inst/stan_files/cra_exp.stan delete mode 100644 inst/stan_files/cra_linear.stan delete mode 100644 inst/stan_files/dbdm_prob_weight.stan delete mode 100644 inst/stan_files/dd_cs.stan delete mode 100644 inst/stan_files/dd_cs_single.stan delete mode 100644 inst/stan_files/dd_exp.stan delete mode 100644 inst/stan_files/dd_hyperbolic.stan delete mode 100644 inst/stan_files/dd_hyperbolic_single.stan delete mode 100644 inst/stan_files/gng_m1.stan delete mode 100644 inst/stan_files/gng_m2.stan delete mode 100644 inst/stan_files/gng_m3.stan delete mode 100644 inst/stan_files/gng_m4.stan delete mode 100644 inst/stan_files/igt_orl.stan delete mode 100644 inst/stan_files/igt_pvl_decay.stan delete mode 100644 inst/stan_files/igt_pvl_delta.stan delete mode 100644 inst/stan_files/igt_vpp.stan delete mode 100644 inst/stan_files/peer_ocu.stan delete mode 100644 inst/stan_files/pre/license.stan delete mode 100644 inst/stan_files/prl_ewa.stan delete mode 100644 inst/stan_files/prl_fictitious.stan delete mode 100644 inst/stan_files/prl_fictitious_multipleB.stan delete mode 100644 inst/stan_files/prl_fictitious_rp.stan delete mode 100644 inst/stan_files/prl_fictitious_rp_woa.stan delete mode 100644 inst/stan_files/prl_fictitious_woa.stan delete mode 100644 inst/stan_files/prl_rp.stan delete mode 100644 inst/stan_files/prl_rp_multipleB.stan delete mode 100644 inst/stan_files/pst_gainloss_Q.stan delete mode 100644 inst/stan_files/ra_noLA.stan delete mode 100644 inst/stan_files/ra_noRA.stan delete mode 100644 inst/stan_files/ra_prospect.stan delete mode 100644 inst/stan_files/rdt_happiness.stan delete mode 100644 inst/stan_files/ts_par4.stan delete mode 100644 inst/stan_files/ts_par6.stan delete mode 100644 inst/stan_files/ts_par7.stan delete mode 100644 inst/stan_files/ug_bayes.stan delete mode 100644 inst/stan_files/ug_delta.stan delete mode 100644 inst/stan_files/wcs_sql.stan delete mode 100644 man-roxygen/ModelFunctionInfo.schema.json delete mode 100644 man-roxygen/ModelFunctionInfo.schema.md delete mode 100644 man-roxygen/data/bandit2arm_delta.json delete mode 100644 man-roxygen/data/bandit4arm2_kalman_filter.json delete mode 100644 man-roxygen/data/bandit4arm_4par.json delete mode 100644 man-roxygen/data/bandit4arm_lapse.json delete mode 100644 man-roxygen/data/bart_par4.json delete mode 100644 man-roxygen/data/choiceRT_ddm.json delete mode 100644 man-roxygen/data/choiceRT_ddm_single.json delete mode 100644 man-roxygen/data/choiceRT_lba_single.json delete mode 100644 man-roxygen/data/cra_exp.json delete mode 100644 man-roxygen/data/cra_linear.json delete mode 100644 man-roxygen/data/dbdm_prob_weight.json delete mode 100644 man-roxygen/data/dd_cs.json delete mode 100644 man-roxygen/data/dd_cs_single.json delete mode 100644 man-roxygen/data/dd_exp.json delete mode 100644 man-roxygen/data/dd_hyperbolic.json delete mode 100644 man-roxygen/data/dd_hyperbolic_single.json delete mode 100644 man-roxygen/data/gng_m1.json delete mode 100644 man-roxygen/data/gng_m2.json delete mode 100644 man-roxygen/data/gng_m3.json delete mode 100644 man-roxygen/data/gng_m4.json delete mode 100644 man-roxygen/data/igt_orl.json delete mode 100644 man-roxygen/data/igt_pvl_decay.json delete mode 100644 man-roxygen/data/igt_pvl_delta.json delete mode 100644 man-roxygen/data/igt_vpp.json delete mode 100644 man-roxygen/data/peer_ocu.json delete mode 100644 man-roxygen/data/prl_ewa.json delete mode 100644 man-roxygen/data/prl_fictitious.json delete mode 100644 man-roxygen/data/prl_fictitious_multipleB.json delete mode 100644 man-roxygen/data/prl_fictitious_rp.json delete mode 100644 man-roxygen/data/prl_fictitious_rp_woa.json delete mode 100644 man-roxygen/data/prl_fictitious_woa.json delete mode 100644 man-roxygen/data/prl_rp.json delete mode 100644 man-roxygen/data/prl_rp_multipleB.json delete mode 100644 man-roxygen/data/pst_gainloss_Q.json delete mode 100644 man-roxygen/data/ra_noLA.json delete mode 100644 man-roxygen/data/ra_noRA.json delete mode 100644 man-roxygen/data/ra_prospect.json delete mode 100644 man-roxygen/data/rdt_happiness.json delete mode 100644 man-roxygen/data/ts_par4.json delete mode 100644 man-roxygen/data/ts_par6.json delete mode 100644 man-roxygen/data/ts_par7.json delete mode 100644 man-roxygen/data/ug_bayes.json delete mode 100644 man-roxygen/data/ug_delta.json delete mode 100644 man-roxygen/data/wcs_sql.json diff --git a/inst/extdata/bandit2arm_exampleData.txt b/inst/extdata/bandit2arm_exampleData.txt deleted file mode 100644 index d28e2ca2..00000000 --- a/inst/extdata/bandit2arm_exampleData.txt +++ /dev/null @@ -1,2001 +0,0 @@ -subjID trial choice outcome -1 1 1 1 -1 2 2 -1 -1 3 2 -1 -1 4 2 -1 -1 5 1 -1 -1 6 2 -1 -1 7 2 1 -1 8 1 1 -1 9 1 1 -1 10 1 -1 -1 11 2 1 -1 12 1 1 -1 13 2 -1 -1 14 1 -1 -1 15 1 1 -1 16 1 1 -1 17 2 -1 -1 18 2 -1 -1 19 2 -1 -1 20 2 1 -1 21 1 -1 -1 22 1 -1 -1 23 2 -1 -1 24 2 -1 -1 25 2 -1 -1 26 1 1 -1 27 1 -1 -1 28 2 -1 -1 29 1 1 -1 30 1 1 -1 31 1 1 -1 32 1 1 -1 33 2 -1 -1 34 1 1 -1 35 1 1 -1 36 1 -1 -1 37 2 -1 -1 38 1 1 -1 39 2 -1 -1 40 1 1 -1 41 1 1 -1 42 1 1 -1 43 2 -1 -1 44 1 1 -1 45 1 1 -1 46 1 1 -1 47 1 1 -1 48 2 1 -1 49 1 1 -1 50 1 1 -1 51 1 1 -1 52 2 -1 -1 53 1 1 -1 54 1 -1 -1 55 1 -1 -1 56 2 -1 -1 57 1 -1 -1 58 2 1 -1 59 2 1 -1 60 2 -1 -1 61 1 1 -1 62 1 -1 -1 63 2 1 -1 64 1 1 -1 65 1 1 -1 66 2 -1 -1 67 1 -1 -1 68 2 1 -1 69 2 -1 -1 70 2 -1 -1 71 2 -1 -1 72 1 1 -1 73 2 1 -1 74 1 1 -1 75 2 -1 -1 76 1 -1 -1 77 1 -1 -1 78 2 -1 -1 79 1 -1 -1 80 2 -1 -1 81 1 1 -1 82 2 -1 -1 83 2 -1 -1 84 1 -1 -1 85 1 1 -1 86 1 1 -1 87 1 1 -1 88 1 1 -1 89 1 1 -1 90 1 -1 -1 91 2 1 -1 92 2 1 -1 93 1 1 -1 94 2 1 -1 95 2 -1 -1 96 1 -1 -1 97 1 1 -1 98 1 -1 -1 99 1 -1 -1 100 1 -1 -2 1 2 1 -2 2 2 -1 -2 3 2 -1 -2 4 1 1 -2 5 2 1 -2 6 1 1 -2 7 1 1 -2 8 1 -1 -2 9 1 -1 -2 10 2 1 -2 11 1 1 -2 12 2 -1 -2 13 2 -1 -2 14 1 1 -2 15 2 1 -2 16 1 1 -2 17 1 1 -2 18 1 1 -2 19 2 1 -2 20 1 1 -2 21 2 1 -2 22 1 1 -2 23 2 1 -2 24 2 -1 -2 25 1 1 -2 26 2 1 -2 27 1 1 -2 28 2 1 -2 29 2 -1 -2 30 1 1 -2 31 2 1 -2 32 1 1 -2 33 2 1 -2 34 2 1 -2 35 2 1 -2 36 2 -1 -2 37 1 1 -2 38 1 -1 -2 39 2 1 -2 40 1 1 -2 41 2 1 -2 42 2 -1 -2 43 1 1 -2 44 2 -1 -2 45 2 -1 -2 46 1 1 -2 47 1 -1 -2 48 1 1 -2 49 2 -1 -2 50 1 1 -2 51 1 1 -2 52 1 1 -2 53 2 -1 -2 54 1 1 -2 55 1 -1 -2 56 1 -1 -2 57 2 1 -2 58 1 1 -2 59 2 -1 -2 60 1 1 -2 61 1 1 -2 62 1 1 -2 63 1 1 -2 64 1 1 -2 65 1 -1 -2 66 1 1 -2 67 2 -1 -2 68 1 -1 -2 69 2 1 -2 70 1 1 -2 71 2 -1 -2 72 2 1 -2 73 1 1 -2 74 2 -1 -2 75 1 -1 -2 76 2 1 -2 77 1 1 -2 78 1 1 -2 79 1 1 -2 80 1 -1 -2 81 2 -1 -2 82 2 -1 -2 83 1 1 -2 84 2 1 -2 85 1 -1 -2 86 2 1 -2 87 1 1 -2 88 1 1 -2 89 1 -1 -2 90 1 -1 -2 91 1 1 -2 92 1 1 -2 93 2 1 -2 94 2 -1 -2 95 1 -1 -2 96 1 1 -2 97 2 1 -2 98 1 1 -2 99 1 -1 -2 100 2 -1 -3 1 1 1 -3 2 2 1 -3 3 1 1 -3 4 2 -1 -3 5 1 1 -3 6 1 1 -3 7 1 1 -3 8 2 -1 -3 9 1 1 -3 10 1 1 -3 11 1 1 -3 12 1 1 -3 13 2 1 -3 14 2 1 -3 15 1 1 -3 16 2 -1 -3 17 2 -1 -3 18 1 1 -3 19 2 1 -3 20 2 -1 -3 21 2 1 -3 22 2 -1 -3 23 1 1 -3 24 2 -1 -3 25 1 1 -3 26 2 -1 -3 27 1 -1 -3 28 1 1 -3 29 2 1 -3 30 1 -1 -3 31 2 -1 -3 32 1 1 -3 33 1 -1 -3 34 2 1 -3 35 2 1 -3 36 1 1 -3 37 2 1 -3 38 1 1 -3 39 2 1 -3 40 1 -1 -3 41 2 -1 -3 42 2 -1 -3 43 2 -1 -3 44 1 1 -3 45 1 -1 -3 46 1 1 -3 47 1 1 -3 48 1 1 -3 49 1 1 -3 50 1 1 -3 51 2 -1 -3 52 1 1 -3 53 2 -1 -3 54 1 -1 -3 55 1 -1 -3 56 1 1 -3 57 1 -1 -3 58 1 1 -3 59 1 1 -3 60 1 -1 -3 61 1 1 -3 62 2 -1 -3 63 1 1 -3 64 1 1 -3 65 1 1 -3 66 2 -1 -3 67 1 -1 -3 68 1 -1 -3 69 2 -1 -3 70 2 -1 -3 71 2 1 -3 72 2 -1 -3 73 1 1 -3 74 2 1 -3 75 2 -1 -3 76 1 -1 -3 77 1 1 -3 78 1 1 -3 79 2 -1 -3 80 1 1 -3 81 1 -1 -3 82 1 -1 -3 83 1 1 -3 84 1 1 -3 85 2 1 -3 86 1 1 -3 87 1 1 -3 88 1 1 -3 89 1 1 -3 90 2 -1 -3 91 1 -1 -3 92 2 -1 -3 93 2 -1 -3 94 2 -1 -3 95 2 -1 -3 96 1 1 -3 97 1 -1 -3 98 1 -1 -3 99 2 1 -3 100 1 1 -4 1 2 -1 -4 2 2 1 -4 3 2 1 -4 4 2 1 -4 5 1 1 -4 6 2 1 -4 7 1 1 -4 8 1 1 -4 9 1 1 -4 10 2 -1 -4 11 2 -1 -4 12 1 1 -4 13 1 -1 -4 14 2 -1 -4 15 1 1 -4 16 1 1 -4 17 1 -1 -4 18 2 1 -4 19 1 1 -4 20 2 -1 -4 21 2 1 -4 22 1 1 -4 23 1 -1 -4 24 2 -1 -4 25 1 1 -4 26 1 -1 -4 27 1 -1 -4 28 2 -1 -4 29 2 1 -4 30 2 -1 -4 31 2 1 -4 32 2 -1 -4 33 2 -1 -4 34 1 1 -4 35 1 -1 -4 36 2 -1 -4 37 1 -1 -4 38 2 1 -4 39 2 -1 -4 40 2 -1 -4 41 1 1 -4 42 2 1 -4 43 1 -1 -4 44 1 -1 -4 45 2 1 -4 46 1 -1 -4 47 2 1 -4 48 2 1 -4 49 2 -1 -4 50 2 -1 -4 51 1 1 -4 52 1 1 -4 53 1 1 -4 54 2 1 -4 55 1 1 -4 56 1 1 -4 57 1 1 -4 58 1 1 -4 59 2 1 -4 60 1 1 -4 61 2 1 -4 62 1 -1 -4 63 2 -1 -4 64 2 -1 -4 65 2 -1 -4 66 1 1 -4 67 2 -1 -4 68 1 -1 -4 69 1 -1 -4 70 1 1 -4 71 2 1 -4 72 2 -1 -4 73 2 1 -4 74 1 -1 -4 75 2 -1 -4 76 1 1 -4 77 1 1 -4 78 1 -1 -4 79 2 -1 -4 80 1 1 -4 81 2 -1 -4 82 1 1 -4 83 1 -1 -4 84 1 -1 -4 85 2 1 -4 86 1 1 -4 87 1 1 -4 88 2 1 -4 89 2 -1 -4 90 2 -1 -4 91 1 1 -4 92 1 1 -4 93 2 1 -4 94 1 1 -4 95 2 1 -4 96 2 -1 -4 97 2 1 -4 98 1 1 -4 99 2 -1 -4 100 2 1 -5 1 2 -1 -5 2 2 1 -5 3 1 -1 -5 4 2 1 -5 5 2 -1 -5 6 1 1 -5 7 1 -1 -5 8 1 -1 -5 9 2 1 -5 10 1 -1 -5 11 1 -1 -5 12 2 -1 -5 13 1 1 -5 14 1 -1 -5 15 1 1 -5 16 2 -1 -5 17 1 -1 -5 18 1 -1 -5 19 1 1 -5 20 1 1 -5 21 1 -1 -5 22 1 1 -5 23 2 -1 -5 24 2 1 -5 25 1 1 -5 26 1 1 -5 27 2 -1 -5 28 1 1 -5 29 1 1 -5 30 2 -1 -5 31 1 -1 -5 32 2 1 -5 33 1 -1 -5 34 2 -1 -5 35 2 -1 -5 36 1 1 -5 37 1 -1 -5 38 2 1 -5 39 1 1 -5 40 2 -1 -5 41 1 1 -5 42 1 1 -5 43 1 1 -5 44 1 -1 -5 45 1 1 -5 46 2 -1 -5 47 1 1 -5 48 2 1 -5 49 1 1 -5 50 1 1 -5 51 1 1 -5 52 2 -1 -5 53 1 1 -5 54 2 -1 -5 55 1 1 -5 56 1 -1 -5 57 1 1 -5 58 1 -1 -5 59 2 1 -5 60 2 1 -5 61 2 -1 -5 62 1 -1 -5 63 2 1 -5 64 1 1 -5 65 2 1 -5 66 2 1 -5 67 1 1 -5 68 1 -1 -5 69 2 -1 -5 70 1 -1 -5 71 2 1 -5 72 1 1 -5 73 2 -1 -5 74 2 -1 -5 75 2 -1 -5 76 2 -1 -5 77 1 -1 -5 78 1 1 -5 79 1 1 -5 80 2 -1 -5 81 1 1 -5 82 2 -1 -5 83 2 1 -5 84 2 1 -5 85 1 1 -5 86 1 1 -5 87 2 1 -5 88 1 1 -5 89 1 1 -5 90 2 -1 -5 91 1 1 -5 92 2 -1 -5 93 1 -1 -5 94 1 1 -5 95 1 1 -5 96 1 1 -5 97 1 -1 -5 98 1 1 -5 99 1 1 -5 100 1 -1 -6 1 1 -1 -6 2 2 -1 -6 3 2 -1 -6 4 1 -1 -6 5 1 1 -6 6 1 1 -6 7 2 1 -6 8 1 -1 -6 9 2 -1 -6 10 2 -1 -6 11 2 1 -6 12 1 1 -6 13 2 -1 -6 14 2 -1 -6 15 2 1 -6 16 1 1 -6 17 1 -1 -6 18 2 -1 -6 19 2 1 -6 20 1 -1 -6 21 2 -1 -6 22 1 -1 -6 23 2 -1 -6 24 1 -1 -6 25 1 1 -6 26 1 1 -6 27 1 1 -6 28 1 1 -6 29 2 -1 -6 30 1 -1 -6 31 1 1 -6 32 1 1 -6 33 1 -1 -6 34 1 -1 -6 35 1 -1 -6 36 1 1 -6 37 1 -1 -6 38 2 1 -6 39 2 1 -6 40 1 1 -6 41 2 1 -6 42 1 -1 -6 43 2 -1 -6 44 1 1 -6 45 1 1 -6 46 2 -1 -6 47 1 1 -6 48 1 1 -6 49 1 1 -6 50 2 -1 -6 51 1 1 -6 52 1 1 -6 53 1 1 -6 54 1 -1 -6 55 1 1 -6 56 1 -1 -6 57 2 1 -6 58 2 -1 -6 59 1 1 -6 60 1 1 -6 61 1 1 -6 62 2 -1 -6 63 1 1 -6 64 1 -1 -6 65 1 1 -6 66 1 -1 -6 67 1 -1 -6 68 2 1 -6 69 2 -1 -6 70 1 -1 -6 71 1 1 -6 72 1 1 -6 73 2 -1 -6 74 1 -1 -6 75 1 -1 -6 76 2 -1 -6 77 2 -1 -6 78 1 1 -6 79 1 1 -6 80 1 1 -6 81 1 -1 -6 82 2 1 -6 83 1 1 -6 84 1 -1 -6 85 1 -1 -6 86 2 -1 -6 87 2 -1 -6 88 1 1 -6 89 1 1 -6 90 2 -1 -6 91 2 1 -6 92 1 1 -6 93 2 -1 -6 94 1 1 -6 95 2 1 -6 96 1 1 -6 97 1 -1 -6 98 1 -1 -6 99 1 -1 -6 100 1 1 -7 1 2 -1 -7 2 1 -1 -7 3 1 1 -7 4 2 -1 -7 5 2 -1 -7 6 1 1 -7 7 1 1 -7 8 1 -1 -7 9 1 1 -7 10 1 1 -7 11 1 -1 -7 12 1 -1 -7 13 1 1 -7 14 1 -1 -7 15 1 -1 -7 16 1 1 -7 17 1 1 -7 18 2 -1 -7 19 1 -1 -7 20 1 -1 -7 21 1 1 -7 22 2 1 -7 23 2 -1 -7 24 1 1 -7 25 1 1 -7 26 1 1 -7 27 1 -1 -7 28 2 -1 -7 29 1 1 -7 30 1 1 -7 31 2 -1 -7 32 1 1 -7 33 1 -1 -7 34 1 1 -7 35 1 1 -7 36 1 -1 -7 37 2 -1 -7 38 1 1 -7 39 1 -1 -7 40 2 -1 -7 41 1 1 -7 42 1 1 -7 43 1 1 -7 44 1 1 -7 45 1 1 -7 46 1 1 -7 47 1 -1 -7 48 1 -1 -7 49 2 1 -7 50 1 1 -7 51 2 1 -7 52 2 1 -7 53 2 -1 -7 54 2 1 -7 55 2 1 -7 56 1 1 -7 57 1 1 -7 58 1 -1 -7 59 2 -1 -7 60 1 -1 -7 61 2 -1 -7 62 1 1 -7 63 1 1 -7 64 1 1 -7 65 1 1 -7 66 1 -1 -7 67 1 1 -7 68 1 1 -7 69 1 1 -7 70 1 1 -7 71 1 1 -7 72 2 1 -7 73 1 1 -7 74 1 1 -7 75 1 1 -7 76 2 -1 -7 77 1 1 -7 78 1 -1 -7 79 2 -1 -7 80 1 1 -7 81 1 1 -7 82 2 -1 -7 83 1 -1 -7 84 1 1 -7 85 2 -1 -7 86 1 1 -7 87 1 1 -7 88 1 -1 -7 89 1 -1 -7 90 2 -1 -7 91 1 1 -7 92 1 1 -7 93 1 -1 -7 94 1 1 -7 95 1 -1 -7 96 1 1 -7 97 1 1 -7 98 2 1 -7 99 2 -1 -7 100 1 1 -8 1 2 1 -8 2 2 -1 -8 3 2 -1 -8 4 1 1 -8 5 2 1 -8 6 1 -1 -8 7 2 -1 -8 8 2 1 -8 9 1 1 -8 10 2 1 -8 11 1 1 -8 12 1 1 -8 13 2 -1 -8 14 1 1 -8 15 1 -1 -8 16 2 1 -8 17 2 -1 -8 18 2 -1 -8 19 2 1 -8 20 1 1 -8 21 2 1 -8 22 2 -1 -8 23 1 -1 -8 24 1 -1 -8 25 2 1 -8 26 2 -1 -8 27 2 1 -8 28 2 1 -8 29 1 1 -8 30 2 -1 -8 31 1 1 -8 32 1 -1 -8 33 1 1 -8 34 1 1 -8 35 2 -1 -8 36 2 -1 -8 37 1 -1 -8 38 2 -1 -8 39 1 1 -8 40 1 1 -8 41 1 1 -8 42 1 -1 -8 43 1 1 -8 44 1 1 -8 45 1 1 -8 46 1 -1 -8 47 2 -1 -8 48 2 -1 -8 49 1 1 -8 50 2 -1 -8 51 1 -1 -8 52 2 -1 -8 53 2 -1 -8 54 2 -1 -8 55 1 1 -8 56 2 1 -8 57 1 1 -8 58 1 -1 -8 59 1 -1 -8 60 2 1 -8 61 2 -1 -8 62 2 1 -8 63 2 -1 -8 64 1 -1 -8 65 2 -1 -8 66 1 1 -8 67 1 -1 -8 68 1 -1 -8 69 1 1 -8 70 2 -1 -8 71 2 -1 -8 72 2 1 -8 73 1 1 -8 74 1 -1 -8 75 1 -1 -8 76 1 1 -8 77 1 1 -8 78 1 -1 -8 79 2 -1 -8 80 2 1 -8 81 2 -1 -8 82 1 1 -8 83 1 1 -8 84 1 1 -8 85 1 1 -8 86 1 1 -8 87 1 1 -8 88 1 1 -8 89 2 -1 -8 90 1 -1 -8 91 2 1 -8 92 2 -1 -8 93 1 1 -8 94 1 1 -8 95 2 -1 -8 96 1 1 -8 97 1 1 -8 98 1 1 -8 99 1 1 -8 100 1 1 -9 1 1 1 -9 2 1 1 -9 3 1 1 -9 4 1 1 -9 5 1 -1 -9 6 1 1 -9 7 1 1 -9 8 1 1 -9 9 1 1 -9 10 2 -1 -9 11 1 1 -9 12 2 -1 -9 13 2 -1 -9 14 1 -1 -9 15 1 1 -9 16 1 -1 -9 17 1 1 -9 18 1 1 -9 19 1 1 -9 20 1 1 -9 21 1 -1 -9 22 1 1 -9 23 2 -1 -9 24 2 1 -9 25 1 1 -9 26 1 -1 -9 27 2 -1 -9 28 1 -1 -9 29 1 -1 -9 30 2 -1 -9 31 1 -1 -9 32 1 1 -9 33 1 1 -9 34 1 -1 -9 35 1 -1 -9 36 2 -1 -9 37 2 1 -9 38 1 1 -9 39 1 1 -9 40 2 -1 -9 41 1 -1 -9 42 1 1 -9 43 1 1 -9 44 2 1 -9 45 1 1 -9 46 2 -1 -9 47 1 1 -9 48 1 1 -9 49 1 1 -9 50 2 -1 -9 51 1 -1 -9 52 1 -1 -9 53 1 1 -9 54 2 -1 -9 55 1 -1 -9 56 2 -1 -9 57 1 1 -9 58 1 -1 -9 59 1 1 -9 60 2 -1 -9 61 1 -1 -9 62 1 -1 -9 63 1 1 -9 64 1 -1 -9 65 1 1 -9 66 1 -1 -9 67 1 -1 -9 68 2 -1 -9 69 2 -1 -9 70 2 -1 -9 71 2 1 -9 72 2 1 -9 73 1 1 -9 74 1 1 -9 75 1 1 -9 76 2 1 -9 77 2 -1 -9 78 1 -1 -9 79 1 1 -9 80 1 1 -9 81 1 1 -9 82 2 1 -9 83 2 1 -9 84 1 1 -9 85 2 -1 -9 86 2 1 -9 87 2 -1 -9 88 2 -1 -9 89 1 -1 -9 90 1 -1 -9 91 2 1 -9 92 2 -1 -9 93 2 -1 -9 94 2 -1 -9 95 1 1 -9 96 1 1 -9 97 1 1 -9 98 2 -1 -9 99 1 -1 -9 100 2 1 -10 1 1 -1 -10 2 2 -1 -10 3 2 -1 -10 4 2 -1 -10 5 1 1 -10 6 1 1 -10 7 1 1 -10 8 2 -1 -10 9 1 1 -10 10 1 1 -10 11 2 -1 -10 12 1 -1 -10 13 2 -1 -10 14 2 1 -10 15 2 -1 -10 16 1 -1 -10 17 1 -1 -10 18 2 -1 -10 19 1 1 -10 20 1 1 -10 21 2 1 -10 22 2 -1 -10 23 1 -1 -10 24 1 -1 -10 25 2 -1 -10 26 1 1 -10 27 2 -1 -10 28 1 1 -10 29 2 1 -10 30 1 1 -10 31 1 1 -10 32 2 -1 -10 33 1 1 -10 34 1 1 -10 35 1 -1 -10 36 1 1 -10 37 1 1 -10 38 2 -1 -10 39 1 -1 -10 40 1 1 -10 41 1 -1 -10 42 1 -1 -10 43 1 1 -10 44 1 1 -10 45 1 1 -10 46 1 -1 -10 47 1 1 -10 48 1 -1 -10 49 2 -1 -10 50 1 1 -10 51 1 1 -10 52 1 -1 -10 53 1 1 -10 54 2 -1 -10 55 1 -1 -10 56 1 1 -10 57 1 -1 -10 58 2 -1 -10 59 2 -1 -10 60 1 1 -10 61 1 -1 -10 62 1 1 -10 63 1 1 -10 64 1 1 -10 65 1 -1 -10 66 2 -1 -10 67 2 -1 -10 68 1 1 -10 69 1 1 -10 70 1 1 -10 71 1 1 -10 72 1 1 -10 73 1 -1 -10 74 1 1 -10 75 1 -1 -10 76 1 -1 -10 77 1 1 -10 78 1 1 -10 79 2 -1 -10 80 1 1 -10 81 1 -1 -10 82 2 -1 -10 83 1 1 -10 84 1 1 -10 85 1 1 -10 86 1 -1 -10 87 1 1 -10 88 1 -1 -10 89 1 1 -10 90 2 1 -10 91 1 1 -10 92 2 -1 -10 93 1 -1 -10 94 1 1 -10 95 1 1 -10 96 2 -1 -10 97 1 -1 -10 98 2 -1 -10 99 2 -1 -10 100 2 1 -11 1 2 -1 -11 2 1 1 -11 3 2 1 -11 4 1 -1 -11 5 2 -1 -11 6 2 1 -11 7 2 -1 -11 8 1 1 -11 9 1 -1 -11 10 2 -1 -11 11 2 -1 -11 12 1 1 -11 13 1 1 -11 14 2 -1 -11 15 2 -1 -11 16 2 -1 -11 17 1 -1 -11 18 1 1 -11 19 1 1 -11 20 1 -1 -11 21 2 -1 -11 22 1 1 -11 23 1 -1 -11 24 2 -1 -11 25 1 -1 -11 26 1 1 -11 27 2 -1 -11 28 1 1 -11 29 1 1 -11 30 1 1 -11 31 1 -1 -11 32 1 1 -11 33 2 -1 -11 34 2 -1 -11 35 1 1 -11 36 1 -1 -11 37 1 1 -11 38 1 -1 -11 39 2 -1 -11 40 2 -1 -11 41 1 1 -11 42 2 -1 -11 43 1 -1 -11 44 1 -1 -11 45 1 1 -11 46 1 1 -11 47 1 -1 -11 48 1 1 -11 49 1 1 -11 50 1 1 -11 51 1 -1 -11 52 1 -1 -11 53 1 1 -11 54 1 -1 -11 55 1 1 -11 56 1 -1 -11 57 1 -1 -11 58 1 1 -11 59 2 1 -11 60 1 -1 -11 61 1 1 -11 62 1 -1 -11 63 1 1 -11 64 1 1 -11 65 1 -1 -11 66 1 1 -11 67 1 -1 -11 68 1 1 -11 69 1 1 -11 70 2 -1 -11 71 1 -1 -11 72 2 -1 -11 73 1 1 -11 74 1 1 -11 75 1 1 -11 76 2 -1 -11 77 1 -1 -11 78 2 -1 -11 79 1 -1 -11 80 1 -1 -11 81 1 -1 -11 82 2 -1 -11 83 2 -1 -11 84 2 -1 -11 85 1 -1 -11 86 1 -1 -11 87 2 -1 -11 88 1 -1 -11 89 1 -1 -11 90 2 -1 -11 91 1 1 -11 92 1 1 -11 93 1 1 -11 94 1 -1 -11 95 1 1 -11 96 1 1 -11 97 1 1 -11 98 1 1 -11 99 1 1 -11 100 1 1 -12 1 2 1 -12 2 2 -1 -12 3 2 -1 -12 4 2 -1 -12 5 1 1 -12 6 2 1 -12 7 2 1 -12 8 2 -1 -12 9 1 1 -12 10 2 -1 -12 11 2 -1 -12 12 2 1 -12 13 2 1 -12 14 1 1 -12 15 2 -1 -12 16 2 1 -12 17 2 -1 -12 18 1 -1 -12 19 1 -1 -12 20 2 1 -12 21 2 1 -12 22 1 1 -12 23 1 1 -12 24 1 1 -12 25 2 -1 -12 26 1 -1 -12 27 2 -1 -12 28 2 -1 -12 29 2 -1 -12 30 1 1 -12 31 1 1 -12 32 1 1 -12 33 1 -1 -12 34 1 1 -12 35 2 -1 -12 36 1 1 -12 37 2 -1 -12 38 1 -1 -12 39 2 -1 -12 40 1 -1 -12 41 1 1 -12 42 1 1 -12 43 1 1 -12 44 1 -1 -12 45 1 1 -12 46 1 -1 -12 47 1 1 -12 48 1 -1 -12 49 1 1 -12 50 1 -1 -12 51 2 -1 -12 52 1 1 -12 53 1 1 -12 54 1 1 -12 55 1 1 -12 56 1 1 -12 57 2 -1 -12 58 1 -1 -12 59 2 -1 -12 60 1 1 -12 61 1 1 -12 62 1 1 -12 63 1 -1 -12 64 1 1 -12 65 1 1 -12 66 1 1 -12 67 1 1 -12 68 1 1 -12 69 1 1 -12 70 2 -1 -12 71 1 1 -12 72 1 1 -12 73 1 1 -12 74 1 -1 -12 75 1 1 -12 76 1 1 -12 77 1 1 -12 78 1 1 -12 79 1 1 -12 80 1 1 -12 81 1 1 -12 82 1 1 -12 83 1 -1 -12 84 2 -1 -12 85 2 -1 -12 86 2 -1 -12 87 2 -1 -12 88 1 1 -12 89 1 -1 -12 90 2 -1 -12 91 2 1 -12 92 2 -1 -12 93 2 1 -12 94 1 -1 -12 95 2 -1 -12 96 1 -1 -12 97 2 -1 -12 98 2 -1 -12 99 1 1 -12 100 2 -1 -13 1 2 -1 -13 2 1 1 -13 3 1 1 -13 4 1 -1 -13 5 2 -1 -13 6 1 1 -13 7 1 -1 -13 8 1 -1 -13 9 1 -1 -13 10 1 1 -13 11 2 -1 -13 12 2 -1 -13 13 1 1 -13 14 2 1 -13 15 2 -1 -13 16 2 -1 -13 17 1 1 -13 18 1 -1 -13 19 2 -1 -13 20 1 1 -13 21 1 1 -13 22 1 -1 -13 23 1 -1 -13 24 2 1 -13 25 1 1 -13 26 1 1 -13 27 1 -1 -13 28 1 1 -13 29 1 -1 -13 30 2 1 -13 31 1 -1 -13 32 2 -1 -13 33 2 -1 -13 34 2 -1 -13 35 2 1 -13 36 1 1 -13 37 1 -1 -13 38 2 -1 -13 39 2 -1 -13 40 1 -1 -13 41 1 1 -13 42 2 -1 -13 43 2 1 -13 44 1 1 -13 45 2 -1 -13 46 2 1 -13 47 1 1 -13 48 1 1 -13 49 2 -1 -13 50 2 -1 -13 51 2 -1 -13 52 1 1 -13 53 2 1 -13 54 1 1 -13 55 1 1 -13 56 1 1 -13 57 1 1 -13 58 1 1 -13 59 1 -1 -13 60 1 -1 -13 61 2 1 -13 62 2 1 -13 63 2 1 -13 64 2 -1 -13 65 2 -1 -13 66 1 1 -13 67 2 -1 -13 68 2 1 -13 69 1 1 -13 70 2 1 -13 71 2 1 -13 72 2 -1 -13 73 2 1 -13 74 1 -1 -13 75 1 -1 -13 76 1 1 -13 77 1 -1 -13 78 1 1 -13 79 1 1 -13 80 2 1 -13 81 2 -1 -13 82 2 1 -13 83 1 1 -13 84 2 -1 -13 85 1 1 -13 86 2 -1 -13 87 1 1 -13 88 1 1 -13 89 1 -1 -13 90 1 1 -13 91 1 -1 -13 92 1 1 -13 93 1 1 -13 94 1 1 -13 95 1 1 -13 96 1 1 -13 97 1 1 -13 98 1 1 -13 99 1 1 -13 100 1 1 -14 1 2 -1 -14 2 1 1 -14 3 1 1 -14 4 1 1 -14 5 1 -1 -14 6 1 1 -14 7 2 -1 -14 8 1 1 -14 9 1 1 -14 10 2 -1 -14 11 1 -1 -14 12 2 -1 -14 13 2 1 -14 14 2 -1 -14 15 2 -1 -14 16 1 1 -14 17 1 1 -14 18 1 -1 -14 19 1 1 -14 20 1 1 -14 21 1 1 -14 22 2 -1 -14 23 2 -1 -14 24 1 1 -14 25 1 1 -14 26 2 -1 -14 27 2 1 -14 28 2 1 -14 29 2 -1 -14 30 2 -1 -14 31 1 1 -14 32 2 -1 -14 33 1 -1 -14 34 1 -1 -14 35 2 1 -14 36 2 1 -14 37 1 -1 -14 38 2 1 -14 39 2 -1 -14 40 2 1 -14 41 1 -1 -14 42 2 -1 -14 43 2 -1 -14 44 2 -1 -14 45 2 -1 -14 46 2 -1 -14 47 2 -1 -14 48 1 -1 -14 49 2 1 -14 50 2 1 -14 51 2 1 -14 52 2 -1 -14 53 1 1 -14 54 2 -1 -14 55 1 1 -14 56 2 -1 -14 57 1 1 -14 58 1 1 -14 59 2 -1 -14 60 2 -1 -14 61 2 1 -14 62 1 -1 -14 63 1 1 -14 64 1 -1 -14 65 1 -1 -14 66 1 1 -14 67 1 1 -14 68 1 1 -14 69 1 1 -14 70 2 -1 -14 71 2 -1 -14 72 2 1 -14 73 2 -1 -14 74 1 1 -14 75 2 -1 -14 76 1 1 -14 77 1 1 -14 78 1 -1 -14 79 2 -1 -14 80 2 -1 -14 81 1 1 -14 82 1 1 -14 83 1 1 -14 84 1 -1 -14 85 1 1 -14 86 2 -1 -14 87 2 1 -14 88 1 1 -14 89 1 1 -14 90 2 -1 -14 91 1 1 -14 92 1 -1 -14 93 1 1 -14 94 1 1 -14 95 1 1 -14 96 2 1 -14 97 1 -1 -14 98 1 1 -14 99 1 1 -14 100 1 1 -15 1 1 -1 -15 2 2 -1 -15 3 1 1 -15 4 1 1 -15 5 1 -1 -15 6 2 1 -15 7 2 1 -15 8 2 -1 -15 9 2 -1 -15 10 1 1 -15 11 1 -1 -15 12 1 1 -15 13 1 1 -15 14 1 -1 -15 15 2 1 -15 16 1 -1 -15 17 2 1 -15 18 1 -1 -15 19 2 1 -15 20 1 1 -15 21 2 1 -15 22 1 1 -15 23 1 1 -15 24 2 -1 -15 25 2 -1 -15 26 2 -1 -15 27 2 1 -15 28 2 1 -15 29 2 1 -15 30 1 1 -15 31 1 1 -15 32 1 1 -15 33 2 -1 -15 34 1 1 -15 35 1 1 -15 36 1 1 -15 37 2 1 -15 38 2 -1 -15 39 1 -1 -15 40 2 -1 -15 41 1 -1 -15 42 1 -1 -15 43 1 1 -15 44 1 1 -15 45 1 -1 -15 46 1 1 -15 47 1 1 -15 48 2 -1 -15 49 1 -1 -15 50 1 1 -15 51 2 1 -15 52 1 -1 -15 53 1 -1 -15 54 1 1 -15 55 1 -1 -15 56 1 1 -15 57 1 -1 -15 58 1 1 -15 59 2 1 -15 60 1 -1 -15 61 2 1 -15 62 2 1 -15 63 1 1 -15 64 2 -1 -15 65 2 -1 -15 66 1 1 -15 67 1 -1 -15 68 1 1 -15 69 1 1 -15 70 1 1 -15 71 1 -1 -15 72 1 1 -15 73 1 -1 -15 74 1 1 -15 75 1 1 -15 76 2 -1 -15 77 1 -1 -15 78 2 -1 -15 79 2 1 -15 80 1 -1 -15 81 2 -1 -15 82 2 1 -15 83 1 -1 -15 84 2 -1 -15 85 1 1 -15 86 1 1 -15 87 1 -1 -15 88 2 -1 -15 89 2 -1 -15 90 1 1 -15 91 1 1 -15 92 1 -1 -15 93 1 1 -15 94 1 1 -15 95 1 -1 -15 96 2 1 -15 97 1 1 -15 98 2 1 -15 99 1 1 -15 100 2 1 -16 1 2 -1 -16 2 2 1 -16 3 2 1 -16 4 1 1 -16 5 2 -1 -16 6 2 -1 -16 7 2 -1 -16 8 1 1 -16 9 2 -1 -16 10 1 1 -16 11 2 1 -16 12 1 -1 -16 13 1 1 -16 14 2 1 -16 15 1 1 -16 16 1 -1 -16 17 2 -1 -16 18 1 -1 -16 19 2 -1 -16 20 1 1 -16 21 1 1 -16 22 1 1 -16 23 2 1 -16 24 1 1 -16 25 1 1 -16 26 2 -1 -16 27 1 1 -16 28 2 -1 -16 29 1 1 -16 30 1 1 -16 31 1 -1 -16 32 1 -1 -16 33 1 -1 -16 34 2 -1 -16 35 1 1 -16 36 1 -1 -16 37 1 -1 -16 38 1 -1 -16 39 1 -1 -16 40 1 -1 -16 41 2 -1 -16 42 1 1 -16 43 2 -1 -16 44 1 1 -16 45 2 -1 -16 46 2 -1 -16 47 1 1 -16 48 1 1 -16 49 2 -1 -16 50 1 1 -16 51 1 1 -16 52 1 1 -16 53 1 1 -16 54 1 -1 -16 55 1 1 -16 56 1 1 -16 57 1 -1 -16 58 2 -1 -16 59 2 1 -16 60 2 -1 -16 61 1 1 -16 62 1 -1 -16 63 1 -1 -16 64 1 1 -16 65 1 1 -16 66 1 1 -16 67 1 1 -16 68 1 1 -16 69 1 -1 -16 70 1 1 -16 71 1 1 -16 72 1 -1 -16 73 1 1 -16 74 2 -1 -16 75 1 1 -16 76 1 -1 -16 77 2 1 -16 78 2 1 -16 79 1 -1 -16 80 2 -1 -16 81 1 1 -16 82 2 -1 -16 83 2 -1 -16 84 2 -1 -16 85 1 1 -16 86 1 1 -16 87 1 1 -16 88 2 1 -16 89 1 -1 -16 90 2 1 -16 91 2 -1 -16 92 2 1 -16 93 1 -1 -16 94 1 -1 -16 95 1 1 -16 96 2 -1 -16 97 1 -1 -16 98 2 -1 -16 99 2 -1 -16 100 1 1 -17 1 1 1 -17 2 1 1 -17 3 1 1 -17 4 1 1 -17 5 1 1 -17 6 1 1 -17 7 1 1 -17 8 1 1 -17 9 2 -1 -17 10 1 1 -17 11 2 1 -17 12 1 1 -17 13 1 1 -17 14 1 -1 -17 15 1 -1 -17 16 2 -1 -17 17 2 -1 -17 18 2 -1 -17 19 2 -1 -17 20 1 1 -17 21 1 1 -17 22 1 -1 -17 23 2 1 -17 24 1 -1 -17 25 1 1 -17 26 1 1 -17 27 1 1 -17 28 1 -1 -17 29 2 -1 -17 30 2 -1 -17 31 1 -1 -17 32 1 1 -17 33 1 -1 -17 34 1 1 -17 35 2 -1 -17 36 1 -1 -17 37 2 1 -17 38 2 -1 -17 39 2 -1 -17 40 1 1 -17 41 1 1 -17 42 1 1 -17 43 1 1 -17 44 1 -1 -17 45 1 1 -17 46 1 -1 -17 47 1 -1 -17 48 1 -1 -17 49 1 1 -17 50 2 1 -17 51 1 1 -17 52 1 1 -17 53 2 -1 -17 54 1 -1 -17 55 2 1 -17 56 2 1 -17 57 2 -1 -17 58 2 -1 -17 59 1 -1 -17 60 2 -1 -17 61 2 -1 -17 62 1 1 -17 63 2 1 -17 64 1 -1 -17 65 2 -1 -17 66 2 -1 -17 67 2 -1 -17 68 2 -1 -17 69 1 1 -17 70 1 1 -17 71 1 1 -17 72 1 1 -17 73 1 -1 -17 74 2 -1 -17 75 1 1 -17 76 1 1 -17 77 2 -1 -17 78 1 1 -17 79 1 -1 -17 80 1 1 -17 81 1 1 -17 82 1 -1 -17 83 1 1 -17 84 1 -1 -17 85 1 1 -17 86 1 1 -17 87 1 1 -17 88 1 1 -17 89 1 -1 -17 90 1 -1 -17 91 1 1 -17 92 1 1 -17 93 1 1 -17 94 2 -1 -17 95 1 -1 -17 96 1 -1 -17 97 1 1 -17 98 1 1 -17 99 1 1 -17 100 2 -1 -18 1 2 -1 -18 2 1 -1 -18 3 2 -1 -18 4 2 1 -18 5 2 -1 -18 6 2 -1 -18 7 1 -1 -18 8 1 1 -18 9 1 1 -18 10 2 -1 -18 11 1 1 -18 12 2 1 -18 13 1 1 -18 14 1 1 -18 15 1 1 -18 16 1 -1 -18 17 2 -1 -18 18 2 1 -18 19 1 -1 -18 20 1 1 -18 21 1 1 -18 22 1 -1 -18 23 2 -1 -18 24 2 -1 -18 25 1 1 -18 26 1 1 -18 27 2 -1 -18 28 1 1 -18 29 1 -1 -18 30 2 1 -18 31 1 1 -18 32 2 -1 -18 33 1 1 -18 34 2 1 -18 35 2 -1 -18 36 1 -1 -18 37 1 1 -18 38 1 1 -18 39 2 1 -18 40 2 -1 -18 41 2 1 -18 42 2 1 -18 43 1 -1 -18 44 2 1 -18 45 2 1 -18 46 2 -1 -18 47 1 1 -18 48 2 -1 -18 49 2 -1 -18 50 2 1 -18 51 1 1 -18 52 1 1 -18 53 2 -1 -18 54 2 1 -18 55 2 -1 -18 56 1 1 -18 57 1 1 -18 58 2 1 -18 59 1 1 -18 60 1 -1 -18 61 2 1 -18 62 2 -1 -18 63 1 -1 -18 64 2 -1 -18 65 1 1 -18 66 2 1 -18 67 1 -1 -18 68 1 -1 -18 69 1 1 -18 70 1 -1 -18 71 2 -1 -18 72 2 -1 -18 73 1 1 -18 74 1 -1 -18 75 2 1 -18 76 1 1 -18 77 2 -1 -18 78 1 1 -18 79 2 -1 -18 80 2 -1 -18 81 1 1 -18 82 1 1 -18 83 1 1 -18 84 2 1 -18 85 2 -1 -18 86 2 1 -18 87 1 1 -18 88 1 1 -18 89 2 -1 -18 90 1 1 -18 91 1 1 -18 92 1 -1 -18 93 1 -1 -18 94 1 -1 -18 95 1 1 -18 96 1 1 -18 97 1 1 -18 98 1 -1 -18 99 2 -1 -18 100 1 -1 -19 1 1 1 -19 2 1 -1 -19 3 2 -1 -19 4 1 1 -19 5 1 1 -19 6 1 -1 -19 7 1 1 -19 8 2 -1 -19 9 1 1 -19 10 1 1 -19 11 1 -1 -19 12 2 1 -19 13 2 1 -19 14 2 -1 -19 15 1 1 -19 16 2 1 -19 17 2 -1 -19 18 2 -1 -19 19 1 1 -19 20 2 -1 -19 21 1 1 -19 22 2 -1 -19 23 1 -1 -19 24 1 1 -19 25 1 1 -19 26 1 -1 -19 27 1 1 -19 28 2 1 -19 29 1 1 -19 30 1 1 -19 31 2 1 -19 32 2 1 -19 33 2 -1 -19 34 1 -1 -19 35 1 1 -19 36 1 1 -19 37 1 1 -19 38 1 -1 -19 39 1 1 -19 40 2 1 -19 41 2 1 -19 42 2 1 -19 43 2 1 -19 44 2 -1 -19 45 1 -1 -19 46 1 1 -19 47 2 -1 -19 48 2 -1 -19 49 1 1 -19 50 1 1 -19 51 1 1 -19 52 2 -1 -19 53 1 1 -19 54 1 1 -19 55 2 -1 -19 56 1 -1 -19 57 1 -1 -19 58 1 1 -19 59 1 1 -19 60 1 1 -19 61 1 1 -19 62 1 -1 -19 63 1 -1 -19 64 1 1 -19 65 1 -1 -19 66 1 1 -19 67 1 1 -19 68 1 1 -19 69 1 1 -19 70 1 -1 -19 71 2 -1 -19 72 1 -1 -19 73 2 1 -19 74 2 -1 -19 75 2 1 -19 76 1 -1 -19 77 1 1 -19 78 2 -1 -19 79 2 -1 -19 80 1 1 -19 81 1 -1 -19 82 1 -1 -19 83 2 1 -19 84 2 -1 -19 85 1 -1 -19 86 1 1 -19 87 2 -1 -19 88 2 1 -19 89 2 1 -19 90 1 1 -19 91 2 -1 -19 92 1 -1 -19 93 2 -1 -19 94 1 1 -19 95 1 -1 -19 96 2 1 -19 97 1 1 -19 98 2 -1 -19 99 1 1 -19 100 2 -1 -20 1 1 1 -20 2 1 -1 -20 3 1 -1 -20 4 2 -1 -20 5 2 -1 -20 6 1 -1 -20 7 1 -1 -20 8 2 -1 -20 9 1 1 -20 10 1 -1 -20 11 1 -1 -20 12 1 -1 -20 13 2 -1 -20 14 2 1 -20 15 2 1 -20 16 2 -1 -20 17 2 1 -20 18 2 -1 -20 19 1 -1 -20 20 2 -1 -20 21 1 1 -20 22 1 1 -20 23 1 1 -20 24 1 1 -20 25 1 1 -20 26 1 1 -20 27 1 1 -20 28 1 1 -20 29 1 1 -20 30 1 -1 -20 31 1 -1 -20 32 2 -1 -20 33 1 -1 -20 34 1 1 -20 35 2 -1 -20 36 2 1 -20 37 1 1 -20 38 1 -1 -20 39 1 -1 -20 40 1 1 -20 41 2 1 -20 42 1 1 -20 43 2 1 -20 44 1 -1 -20 45 2 -1 -20 46 2 -1 -20 47 2 1 -20 48 1 1 -20 49 1 1 -20 50 1 1 -20 51 1 -1 -20 52 1 1 -20 53 1 -1 -20 54 1 1 -20 55 1 1 -20 56 2 1 -20 57 1 1 -20 58 1 -1 -20 59 2 -1 -20 60 2 -1 -20 61 1 1 -20 62 1 1 -20 63 1 -1 -20 64 2 1 -20 65 2 -1 -20 66 2 -1 -20 67 1 -1 -20 68 1 1 -20 69 1 -1 -20 70 2 -1 -20 71 1 1 -20 72 1 -1 -20 73 2 -1 -20 74 1 -1 -20 75 1 1 -20 76 1 -1 -20 77 1 1 -20 78 1 -1 -20 79 1 1 -20 80 1 -1 -20 81 2 -1 -20 82 1 -1 -20 83 2 -1 -20 84 1 1 -20 85 1 1 -20 86 1 -1 -20 87 2 -1 -20 88 2 1 -20 89 1 1 -20 90 1 -1 -20 91 2 -1 -20 92 1 -1 -20 93 2 -1 -20 94 2 1 -20 95 1 -1 -20 96 2 1 -20 97 2 -1 -20 98 1 1 -20 99 1 -1 -20 100 2 1 diff --git a/inst/extdata/bandit4arm2_exampleData.txt b/inst/extdata/bandit4arm2_exampleData.txt deleted file mode 100644 index 68ac99e3..00000000 --- a/inst/extdata/bandit4arm2_exampleData.txt +++ /dev/null @@ -1,3001 +0,0 @@ -subjID choice outcome -1 4 33 -1 3 84 -1 3 88 -1 2 36 -1 1 67 -1 2 28 -1 1 74 -1 1 76 -1 1 79 -1 1 84 -1 1 82 -1 3 87 -1 3 85 -1 4 48 -1 1 79 -1 3 76 -1 3 73 -1 1 75 -1 1 71 -1 3 61 -1 3 60 -1 1 72 -1 1 66 -1 1 61 -1 4 42 -1 2 41 -1 1 57 -1 4 32 -1 3 67 -1 1 57 -1 1 58 -1 3 71 -1 3 66 -1 2 41 -1 1 70 -1 1 73 -1 1 74 -1 4 45 -1 1 72 -1 1 73 -1 1 70 -1 1 66 -1 1 69 -1 4 42 -1 3 58 -1 1 68 -1 2 53 -1 3 58 -1 4 50 -1 1 73 -1 3 65 -1 2 44 -1 3 64 -1 4 44 -1 2 41 -1 1 61 -1 1 56 -1 4 44 -1 1 51 -1 1 55 -1 4 47 -1 4 48 -1 1 59 -1 3 55 -1 3 44 -1 1 59 -1 2 25 -1 1 59 -1 2 31 -1 3 45 -1 1 58 -1 1 56 -1 1 58 -1 3 51 -1 4 52 -1 1 55 -1 4 56 -1 4 55 -1 1 54 -1 1 50 -1 3 58 -1 1 53 -1 1 51 -1 2 31 -1 3 58 -1 4 55 -1 4 55 -1 3 59 -1 1 50 -1 1 54 -1 1 53 -1 4 52 -1 3 57 -1 4 60 -1 4 60 -1 4 58 -1 4 60 -1 4 55 -1 4 56 -1 4 59 -1 2 36 -1 4 59 -1 3 48 -1 2 41 -1 4 62 -1 4 62 -1 1 44 -1 2 48 -1 4 66 -1 2 53 -1 3 56 -1 2 56 -1 2 61 -1 3 49 -1 2 58 -1 4 68 -1 2 54 -1 3 49 -1 4 70 -1 4 69 -1 4 73 -1 4 77 -1 2 54 -1 4 70 -1 1 49 -1 4 69 -1 1 50 -1 2 63 -1 1 55 -1 2 57 -1 3 53 -1 2 57 -1 4 85 -1 4 85 -1 4 88 -1 4 82 -1 2 62 -1 4 75 -1 3 61 -1 3 61 -1 3 62 -1 4 77 -1 2 66 -1 3 59 -1 4 74 -1 4 79 -1 4 79 -1 1 73 -1 4 76 -1 4 76 -1 4 78 -1 4 77 -1 4 78 -1 2 66 -1 4 80 -1 4 74 -1 1 69 -1 4 69 -1 4 73 -1 3 70 -1 3 67 -1 1 69 -1 1 71 -1 2 79 -1 1 68 -1 2 82 -1 2 84 -1 2 86 -1 2 84 -1 2 82 -1 2 84 -1 2 85 -1 2 82 -1 3 76 -1 1 62 -1 2 85 -1 3 74 -1 3 70 -1 3 69 -1 2 86 -1 3 60 -1 2 89 -1 2 86 -1 2 81 -1 2 80 -1 3 64 -1 3 67 -1 2 86 -1 1 56 -1 2 91 -1 2 91 -1 4 55 -1 2 87 -1 4 52 -1 2 85 -1 3 63 -1 2 92 -1 2 90 -1 2 90 -1 3 56 -1 2 89 -1 3 60 -1 3 60 -1 2 88 -1 2 84 -1 2 86 -1 3 53 -1 2 81 -1 2 82 -1 3 63 -1 2 78 -1 1 57 -1 2 80 -1 4 48 -1 4 44 -1 3 61 -1 3 64 -1 4 40 -1 3 67 -1 4 39 -1 1 60 -1 1 59 -1 3 68 -1 2 73 -1 3 70 -1 2 70 -1 2 70 -1 3 77 -1 2 74 -1 1 62 -1 3 82 -1 4 29 -1 4 29 -1 4 34 -1 3 73 -1 2 71 -1 2 67 -1 2 61 -1 2 60 -1 1 76 -1 1 70 -1 2 60 -1 2 57 -1 1 71 -1 1 69 -1 1 67 -1 3 58 -1 3 55 -1 2 50 -1 1 62 -1 2 52 -1 4 47 -1 2 54 -1 3 50 -1 2 53 -1 1 66 -1 4 51 -1 1 65 -1 3 50 -1 2 50 -1 3 51 -1 2 47 -1 1 60 -1 3 52 -1 4 47 -1 2 32 -1 3 56 -1 4 46 -1 1 63 -1 4 43 -1 2 24 -1 2 27 -1 1 63 -1 1 61 -1 4 40 -1 1 65 -1 1 69 -1 3 70 -1 2 25 -1 1 71 -1 1 73 -1 3 62 -1 3 69 -1 3 65 -1 1 72 -1 3 63 -1 1 75 -1 3 68 -1 2 28 -1 3 61 -1 3 64 -1 3 63 -1 4 36 -1 3 58 -1 2 35 -1 1 64 -1 3 67 -1 3 68 -1 3 66 -1 3 64 -2 1 62 -2 4 34 -2 2 36 -2 3 88 -2 3 91 -2 3 97 -2 3 94 -2 3 94 -2 3 93 -2 3 93 -2 3 88 -2 3 87 -2 3 85 -2 3 85 -2 3 81 -2 3 76 -2 3 73 -2 3 65 -2 3 64 -2 3 61 -2 3 60 -2 1 72 -2 1 66 -2 1 61 -2 1 62 -2 1 61 -2 3 62 -2 2 43 -2 1 56 -2 3 68 -2 3 68 -2 1 62 -2 4 41 -2 3 64 -2 1 70 -2 1 73 -2 1 74 -2 1 74 -2 1 72 -2 3 59 -2 1 70 -2 1 66 -2 1 69 -2 1 69 -2 2 46 -2 1 68 -2 1 70 -2 1 70 -2 1 72 -2 1 73 -2 1 70 -2 3 61 -2 3 64 -2 3 64 -2 1 66 -2 1 61 -2 1 56 -2 1 53 -2 1 51 -2 3 58 -2 4 47 -2 3 60 -2 3 57 -2 3 55 -2 3 44 -2 1 59 -2 3 44 -2 1 59 -2 4 50 -2 1 59 -2 1 58 -2 1 56 -2 2 20 -2 1 59 -2 4 52 -2 4 55 -2 1 53 -2 3 61 -2 3 58 -2 3 57 -2 3 58 -2 3 56 -2 4 57 -2 3 58 -2 4 57 -2 3 58 -2 3 59 -2 1 51 -2 4 61 -2 3 54 -2 3 50 -2 1 47 -2 4 57 -2 3 62 -2 1 49 -2 3 61 -2 1 46 -2 4 55 -2 1 42 -2 3 55 -2 3 53 -2 2 39 -2 1 43 -2 4 57 -2 3 50 -2 4 62 -2 1 44 -2 4 63 -2 4 66 -2 4 71 -2 4 71 -2 4 67 -2 4 66 -2 4 69 -2 4 72 -2 4 68 -2 4 70 -2 4 71 -2 4 70 -2 4 69 -2 4 73 -2 4 77 -2 4 72 -2 4 70 -2 4 68 -2 4 69 -2 4 72 -2 4 75 -2 2 58 -2 4 80 -2 4 81 -2 4 84 -2 4 85 -2 4 85 -2 4 88 -2 4 82 -2 4 80 -2 4 75 -2 4 73 -2 4 75 -2 4 76 -2 4 77 -2 4 77 -2 4 77 -2 4 74 -2 4 79 -2 4 79 -2 4 79 -2 4 76 -2 4 76 -2 4 78 -2 4 77 -2 4 78 -2 4 76 -2 4 80 -2 4 74 -2 4 72 -2 4 69 -2 4 73 -2 2 76 -2 2 76 -2 4 68 -2 2 78 -2 2 79 -2 2 82 -2 2 82 -2 2 84 -2 2 86 -2 2 84 -2 2 82 -2 2 84 -2 2 85 -2 2 82 -2 2 84 -2 2 84 -2 2 85 -2 2 88 -2 2 90 -2 2 87 -2 2 86 -2 2 86 -2 2 89 -2 2 86 -2 2 81 -2 2 80 -2 2 77 -2 2 81 -2 2 86 -2 2 85 -2 2 91 -2 2 91 -2 2 87 -2 2 87 -2 2 83 -2 4 51 -2 2 91 -2 2 92 -2 2 90 -2 2 90 -2 2 91 -2 2 89 -2 2 88 -2 2 90 -2 2 88 -2 2 84 -2 2 86 -2 2 81 -2 2 81 -2 2 82 -2 2 81 -2 2 78 -2 2 78 -2 2 80 -2 2 80 -2 2 80 -2 2 81 -2 2 78 -2 2 80 -2 2 78 -2 2 76 -2 2 78 -2 2 74 -2 2 73 -2 2 73 -2 2 73 -2 2 70 -2 2 70 -2 2 74 -2 2 74 -2 2 76 -2 2 74 -2 2 75 -2 2 73 -2 2 73 -2 2 69 -2 2 71 -2 4 32 -2 2 61 -2 2 60 -2 1 76 -2 1 70 -2 1 72 -2 1 69 -2 1 71 -2 2 52 -2 1 67 -2 1 63 -2 2 49 -2 1 63 -2 1 62 -2 2 52 -2 1 56 -2 3 52 -2 1 60 -2 1 62 -2 1 66 -2 1 67 -2 1 65 -2 1 63 -2 1 60 -2 1 61 -2 1 59 -2 2 45 -2 1 62 -2 1 64 -2 1 64 -2 1 63 -2 1 63 -2 1 63 -2 1 63 -2 1 63 -2 1 64 -2 1 63 -2 3 68 -2 3 68 -2 3 69 -2 3 69 -2 3 70 -2 3 67 -2 3 64 -2 3 60 -2 3 62 -2 1 73 -2 1 73 -2 1 72 -2 1 73 -2 1 75 -2 1 72 -2 1 72 -2 1 67 -2 1 64 -2 1 65 -2 1 65 -2 3 58 -2 1 63 -2 3 57 -2 4 27 -2 1 65 -2 1 62 -2 3 64 -3 3 85 -3 1 60 -3 3 88 -3 2 36 -3 3 91 -3 3 97 -3 3 94 -3 3 94 -3 3 93 -3 3 93 -3 4 37 -3 3 87 -3 4 46 -3 1 82 -3 3 81 -3 3 76 -3 3 73 -3 1 75 -3 3 64 -3 1 71 -3 1 70 -3 1 72 -3 1 66 -3 3 67 -3 1 62 -3 3 68 -3 1 57 -3 3 61 -3 3 67 -3 3 68 -3 4 37 -3 3 71 -3 2 37 -3 1 65 -3 3 68 -3 4 41 -3 1 74 -3 1 74 -3 3 64 -3 4 48 -3 4 43 -3 3 56 -3 1 69 -3 4 42 -3 4 44 -3 3 59 -3 1 70 -3 1 70 -3 1 72 -3 3 64 -3 3 65 -3 1 67 -3 2 45 -3 1 65 -3 1 66 -3 2 38 -3 3 67 -3 3 65 -3 2 29 -3 4 48 -3 3 62 -3 1 57 -3 1 59 -3 1 57 -3 3 44 -3 4 55 -3 4 51 -3 1 59 -3 4 50 -3 1 59 -3 1 58 -3 2 22 -3 4 50 -3 4 53 -3 3 51 -3 4 55 -3 4 56 -3 3 61 -3 3 58 -3 1 50 -3 4 52 -3 4 53 -3 3 55 -3 3 58 -3 3 58 -3 4 55 -3 3 59 -3 1 51 -3 1 50 -3 4 61 -3 4 56 -3 4 52 -3 4 57 -3 2 31 -3 1 49 -3 3 61 -3 3 57 -3 4 55 -3 3 58 -3 2 32 -3 3 53 -3 3 51 -3 4 60 -3 4 57 -3 4 62 -3 1 47 -3 3 47 -3 2 48 -3 3 53 -3 1 38 -3 1 41 -3 4 67 -3 4 66 -3 2 58 -3 2 58 -3 2 55 -3 3 49 -3 2 53 -3 2 54 -3 3 51 -3 2 52 -3 3 48 -3 3 45 -3 4 70 -3 2 55 -3 2 59 -3 4 72 -3 4 75 -3 1 55 -3 4 80 -3 2 61 -3 1 64 -3 4 85 -3 4 85 -3 2 60 -3 4 82 -3 4 80 -3 4 75 -3 4 73 -3 4 75 -3 4 76 -3 4 77 -3 4 77 -3 4 77 -3 4 74 -3 4 79 -3 4 79 -3 1 73 -3 1 72 -3 4 76 -3 4 78 -3 1 66 -3 3 68 -3 4 76 -3 3 68 -3 1 65 -3 3 68 -3 4 69 -3 2 78 -3 1 72 -3 4 69 -3 1 69 -3 1 71 -3 2 79 -3 2 82 -3 1 68 -3 1 69 -3 2 86 -3 2 84 -3 1 62 -3 3 64 -3 2 85 -3 2 82 -3 2 84 -3 2 84 -3 2 85 -3 2 88 -3 2 90 -3 1 60 -3 2 86 -3 1 53 -3 2 89 -3 2 86 -3 1 62 -3 2 80 -3 2 77 -3 2 81 -3 3 67 -3 2 85 -3 2 91 -3 2 91 -3 1 52 -3 2 87 -3 2 83 -3 2 85 -3 2 91 -3 2 92 -3 2 90 -3 2 90 -3 1 47 -3 2 89 -3 2 88 -3 2 90 -3 2 88 -3 2 84 -3 2 86 -3 2 81 -3 2 81 -3 3 59 -3 4 49 -3 2 78 -3 2 78 -3 2 80 -3 2 80 -3 2 80 -3 2 81 -3 2 78 -3 2 80 -3 2 78 -3 1 60 -3 4 35 -3 3 68 -3 2 73 -3 1 64 -3 3 70 -3 3 72 -3 3 75 -3 2 74 -3 2 74 -3 2 76 -3 2 74 -3 3 79 -3 2 73 -3 1 69 -3 3 73 -3 3 69 -3 3 67 -3 2 61 -3 3 66 -3 3 63 -3 3 62 -3 2 60 -3 2 57 -3 3 66 -3 3 63 -3 2 52 -3 2 51 -3 2 49 -3 4 45 -3 2 46 -3 1 61 -3 2 49 -3 3 52 -3 4 52 -3 1 62 -3 3 47 -3 3 49 -3 3 51 -3 4 48 -3 1 60 -3 4 50 -3 4 53 -3 4 52 -3 1 62 -3 1 64 -3 3 52 -3 2 26 -3 4 46 -3 2 31 -3 4 43 -3 3 67 -3 1 64 -3 3 65 -3 3 68 -3 1 62 -3 1 65 -3 3 69 -3 3 70 -3 4 46 -3 3 64 -3 3 60 -3 4 45 -3 4 45 -3 1 73 -3 3 65 -3 2 25 -3 1 75 -3 3 68 -3 4 40 -3 4 37 -3 1 64 -3 1 65 -3 3 63 -3 1 66 -3 3 57 -3 1 64 -3 3 67 -3 1 65 -3 1 62 -3 1 56 -4 2 38 -4 3 84 -4 3 88 -4 3 88 -4 3 91 -4 3 97 -4 3 94 -4 3 94 -4 3 93 -4 3 93 -4 3 88 -4 3 87 -4 3 85 -4 3 85 -4 3 81 -4 3 76 -4 3 73 -4 3 65 -4 3 64 -4 1 71 -4 1 70 -4 3 61 -4 3 68 -4 2 31 -4 3 69 -4 3 68 -4 1 57 -4 3 61 -4 3 67 -4 4 33 -4 3 68 -4 1 62 -4 1 63 -4 1 65 -4 1 70 -4 1 73 -4 1 74 -4 1 74 -4 1 72 -4 4 48 -4 4 43 -4 1 66 -4 2 48 -4 1 69 -4 3 58 -4 3 59 -4 3 59 -4 3 58 -4 3 62 -4 1 73 -4 1 70 -4 1 67 -4 1 67 -4 2 43 -4 3 65 -4 1 61 -4 1 56 -4 1 53 -4 1 51 -4 4 48 -4 3 62 -4 4 48 -4 1 59 -4 2 31 -4 3 44 -4 3 44 -4 1 59 -4 3 47 -4 2 31 -4 1 59 -4 4 46 -4 1 56 -4 1 58 -4 3 51 -4 2 28 -4 4 55 -4 3 58 -4 1 52 -4 3 58 -4 2 35 -4 4 52 -4 3 56 -4 1 51 -4 3 58 -4 3 58 -4 4 55 -4 3 59 -4 3 59 -4 4 61 -4 3 54 -4 4 56 -4 4 52 -4 2 33 -4 4 60 -4 1 49 -4 3 61 -4 3 57 -4 3 57 -4 3 58 -4 4 59 -4 3 53 -4 3 51 -4 4 60 -4 4 57 -4 4 62 -4 1 47 -4 3 47 -4 4 63 -4 4 66 -4 1 38 -4 2 56 -4 4 67 -4 4 66 -4 4 69 -4 4 72 -4 4 68 -4 4 70 -4 4 71 -4 4 70 -4 1 51 -4 4 73 -4 4 77 -4 4 72 -4 4 70 -4 4 68 -4 4 69 -4 4 72 -4 3 48 -4 4 79 -4 1 57 -4 4 81 -4 4 84 -4 4 85 -4 2 58 -4 3 53 -4 4 82 -4 4 80 -4 4 75 -4 2 64 -4 4 75 -4 4 76 -4 1 73 -4 3 59 -4 4 77 -4 3 61 -4 3 65 -4 4 79 -4 2 66 -4 1 72 -4 2 68 -4 1 69 -4 4 77 -4 4 78 -4 3 68 -4 4 80 -4 2 68 -4 3 68 -4 2 75 -4 3 71 -4 2 76 -4 3 67 -4 2 72 -4 4 69 -4 2 79 -4 4 66 -4 2 82 -4 2 84 -4 2 86 -4 4 70 -4 2 82 -4 1 66 -4 4 78 -4 2 82 -4 3 76 -4 2 84 -4 4 72 -4 2 88 -4 2 90 -4 3 69 -4 4 76 -4 1 53 -4 4 73 -4 4 69 -4 3 64 -4 2 80 -4 3 64 -4 2 81 -4 2 86 -4 2 85 -4 2 91 -4 2 91 -4 2 87 -4 2 87 -4 2 83 -4 2 85 -4 2 91 -4 2 92 -4 4 49 -4 2 90 -4 1 47 -4 3 59 -4 2 88 -4 3 60 -4 2 88 -4 3 57 -4 2 86 -4 2 81 -4 2 81 -4 2 82 -4 2 81 -4 2 78 -4 2 78 -4 2 80 -4 2 80 -4 2 80 -4 2 81 -4 2 78 -4 2 80 -4 1 57 -4 2 76 -4 2 78 -4 2 74 -4 2 73 -4 2 73 -4 2 73 -4 2 70 -4 3 75 -4 3 77 -4 3 76 -4 3 79 -4 2 74 -4 2 75 -4 3 75 -4 3 76 -4 2 69 -4 3 69 -4 2 67 -4 4 32 -4 2 60 -4 3 63 -4 1 70 -4 1 72 -4 2 57 -4 4 36 -4 3 63 -4 1 67 -4 2 51 -4 2 49 -4 1 63 -4 4 46 -4 3 51 -4 4 47 -4 3 52 -4 3 50 -4 1 62 -4 2 55 -4 2 55 -4 3 51 -4 1 63 -4 1 60 -4 1 61 -4 4 53 -4 1 60 -4 2 41 -4 4 47 -4 1 64 -4 1 63 -4 1 63 -4 3 58 -4 1 63 -4 3 67 -4 1 64 -4 1 63 -4 3 68 -4 3 68 -4 4 44 -4 3 69 -4 3 70 -4 3 67 -4 4 45 -4 3 60 -4 1 73 -4 1 73 -4 1 73 -4 4 39 -4 3 63 -4 4 37 -4 2 23 -4 3 67 -4 3 61 -4 4 36 -4 1 65 -4 3 63 -4 3 58 -4 1 63 -4 3 57 -4 3 67 -4 1 65 -4 3 66 -4 1 56 -5 3 85 -5 2 40 -5 3 88 -5 4 40 -5 1 67 -5 3 97 -5 3 94 -5 3 94 -5 3 93 -5 1 84 -5 3 88 -5 1 81 -5 3 85 -5 3 85 -5 3 81 -5 3 76 -5 3 73 -5 1 75 -5 3 64 -5 3 61 -5 3 60 -5 1 72 -5 1 66 -5 1 61 -5 1 62 -5 4 42 -5 3 62 -5 3 61 -5 1 56 -5 2 40 -5 4 37 -5 3 71 -5 1 63 -5 3 64 -5 1 70 -5 1 73 -5 1 74 -5 1 74 -5 3 64 -5 1 73 -5 1 70 -5 1 66 -5 1 69 -5 3 57 -5 1 71 -5 3 59 -5 2 53 -5 1 70 -5 1 72 -5 1 73 -5 4 46 -5 1 67 -5 3 64 -5 2 43 -5 1 66 -5 2 38 -5 3 67 -5 3 65 -5 3 59 -5 1 55 -5 2 29 -5 3 60 -5 4 51 -5 4 53 -5 3 44 -5 4 55 -5 1 59 -5 1 59 -5 4 50 -5 4 48 -5 1 58 -5 1 56 -5 3 46 -5 1 59 -5 1 61 -5 1 55 -5 3 58 -5 2 30 -5 4 57 -5 1 50 -5 1 48 -5 4 53 -5 3 55 -5 1 57 -5 1 51 -5 3 58 -5 3 59 -5 1 51 -5 4 61 -5 4 61 -5 1 53 -5 3 53 -5 4 57 -5 2 31 -5 4 60 -5 4 58 -5 3 57 -5 1 44 -5 1 42 -5 2 32 -5 4 59 -5 3 51 -5 4 60 -5 3 47 -5 4 62 -5 4 62 -5 3 47 -5 2 48 -5 4 66 -5 4 71 -5 4 71 -5 4 67 -5 3 48 -5 4 69 -5 4 72 -5 4 68 -5 2 54 -5 2 53 -5 4 70 -5 2 53 -5 1 51 -5 3 48 -5 2 54 -5 4 70 -5 4 68 -5 4 69 -5 2 60 -5 4 75 -5 4 79 -5 4 80 -5 2 61 -5 2 57 -5 4 85 -5 4 85 -5 2 60 -5 2 62 -5 4 80 -5 3 60 -5 3 61 -5 2 60 -5 4 76 -5 2 64 -5 4 77 -5 4 77 -5 4 74 -5 2 66 -5 4 79 -5 4 79 -5 4 76 -5 2 68 -5 4 78 -5 4 77 -5 2 69 -5 4 76 -5 4 80 -5 2 68 -5 2 72 -5 2 75 -5 4 73 -5 4 74 -5 4 69 -5 2 72 -5 4 69 -5 1 72 -5 1 68 -5 4 62 -5 3 71 -5 1 67 -5 4 70 -5 2 82 -5 2 84 -5 2 85 -5 2 82 -5 4 76 -5 3 73 -5 2 85 -5 2 88 -5 2 90 -5 1 60 -5 2 86 -5 1 53 -5 2 89 -5 2 86 -5 2 81 -5 3 66 -5 2 77 -5 2 81 -5 2 86 -5 2 85 -5 2 91 -5 1 54 -5 2 87 -5 4 56 -5 2 83 -5 2 85 -5 2 91 -5 2 92 -5 2 90 -5 2 90 -5 2 91 -5 2 89 -5 2 88 -5 2 90 -5 2 88 -5 2 84 -5 2 86 -5 2 81 -5 2 81 -5 2 82 -5 2 81 -5 1 55 -5 4 50 -5 1 54 -5 2 80 -5 2 80 -5 2 81 -5 2 78 -5 2 80 -5 2 78 -5 2 76 -5 2 78 -5 3 68 -5 2 73 -5 1 64 -5 1 61 -5 4 26 -5 2 70 -5 2 74 -5 2 74 -5 3 79 -5 3 82 -5 3 79 -5 3 75 -5 3 76 -5 2 69 -5 3 69 -5 3 67 -5 2 61 -5 4 36 -5 3 63 -5 3 62 -5 4 34 -5 3 59 -5 2 55 -5 4 39 -5 2 52 -5 2 51 -5 2 49 -5 2 50 -5 4 46 -5 3 51 -5 2 49 -5 2 54 -5 3 50 -5 3 52 -5 1 66 -5 1 67 -5 1 65 -5 3 50 -5 2 50 -5 1 61 -5 1 59 -5 1 60 -5 4 50 -5 4 47 -5 2 32 -5 3 56 -5 1 63 -5 3 58 -5 2 26 -5 1 63 -5 4 43 -5 1 63 -5 4 38 -5 1 62 -5 2 20 -5 1 69 -5 1 70 -5 4 46 -5 1 71 -5 1 73 -5 3 62 -5 4 45 -5 1 73 -5 4 39 -5 4 38 -5 3 66 -5 3 68 -5 1 72 -5 1 67 -5 3 64 -5 3 63 -5 1 65 -5 3 58 -5 3 57 -5 1 64 -5 3 67 -5 2 42 -5 3 66 -5 2 46 -6 2 38 -6 1 60 -6 3 88 -6 3 88 -6 4 40 -6 3 97 -6 3 94 -6 1 76 -6 3 93 -6 3 93 -6 2 29 -6 3 87 -6 3 85 -6 2 30 -6 4 49 -6 1 77 -6 1 76 -6 3 65 -6 3 64 -6 1 71 -6 1 70 -6 3 61 -6 3 68 -6 1 61 -6 1 62 -6 4 42 -6 2 38 -6 3 61 -6 1 56 -6 4 33 -6 3 68 -6 3 71 -6 2 37 -6 3 64 -6 3 68 -6 1 73 -6 1 74 -6 1 74 -6 1 72 -6 3 59 -6 1 70 -6 1 66 -6 4 41 -6 1 69 -6 1 71 -6 1 68 -6 4 53 -6 3 58 -6 1 72 -6 1 73 -6 4 46 -6 1 67 -6 1 67 -6 1 65 -6 3 65 -6 1 61 -6 1 56 -6 1 53 -6 1 51 -6 2 28 -6 4 47 -6 4 48 -6 1 59 -6 1 57 -6 4 56 -6 4 55 -6 4 51 -6 2 31 -6 1 59 -6 1 59 -6 1 58 -6 3 48 -6 3 46 -6 1 59 -6 1 61 -6 1 55 -6 1 53 -6 2 30 -6 4 57 -6 1 50 -6 3 58 -6 3 56 -6 4 57 -6 4 59 -6 2 34 -6 4 55 -6 3 59 -6 3 59 -6 1 50 -6 1 54 -6 4 56 -6 4 52 -6 3 57 -6 3 62 -6 4 60 -6 4 58 -6 3 57 -6 3 57 -6 3 58 -6 4 59 -6 4 59 -6 2 39 -6 4 60 -6 2 41 -6 1 42 -6 3 52 -6 2 47 -6 4 63 -6 3 53 -6 4 71 -6 1 41 -6 3 50 -6 4 66 -6 1 51 -6 3 54 -6 2 55 -6 2 54 -6 1 52 -6 3 47 -6 1 51 -6 2 52 -6 1 49 -6 2 54 -6 1 47 -6 4 68 -6 2 59 -6 3 47 -6 3 48 -6 1 55 -6 2 57 -6 2 61 -6 2 57 -6 2 56 -6 1 63 -6 4 88 -6 4 82 -6 2 62 -6 3 60 -6 4 73 -6 4 75 -6 4 76 -6 2 64 -6 2 66 -6 3 59 -6 2 67 -6 1 70 -6 3 61 -6 2 66 -6 3 65 -6 1 71 -6 1 69 -6 2 66 -6 1 67 -6 2 66 -6 4 80 -6 4 74 -6 4 72 -6 2 75 -6 4 73 -6 1 72 -6 1 70 -6 1 69 -6 1 71 -6 1 72 -6 4 66 -6 3 71 -6 4 61 -6 3 66 -6 3 65 -6 3 61 -6 2 84 -6 3 64 -6 3 70 -6 1 68 -6 1 62 -6 1 60 -6 1 64 -6 1 62 -6 4 70 -6 2 86 -6 2 86 -6 1 55 -6 4 69 -6 2 81 -6 2 80 -6 3 64 -6 1 63 -6 2 86 -6 2 85 -6 4 55 -6 1 54 -6 1 52 -6 3 60 -6 3 63 -6 4 51 -6 1 47 -6 4 49 -6 3 62 -6 2 90 -6 2 91 -6 2 89 -6 2 88 -6 2 90 -6 1 51 -6 2 84 -6 4 52 -6 2 81 -6 2 81 -6 2 82 -6 3 63 -6 3 60 -6 1 57 -6 2 80 -6 4 48 -6 2 80 -6 2 81 -6 1 53 -6 2 80 -6 2 78 -6 3 65 -6 1 60 -6 4 33 -6 3 68 -6 2 73 -6 2 73 -6 2 70 -6 4 26 -6 2 74 -6 2 74 -6 2 76 -6 2 74 -6 2 75 -6 3 75 -6 3 76 -6 3 73 -6 2 71 -6 1 70 -6 3 65 -6 3 66 -6 4 32 -6 2 62 -6 1 72 -6 4 35 -6 3 66 -6 1 69 -6 1 67 -6 2 51 -6 3 55 -6 2 50 -6 1 62 -6 2 52 -6 1 56 -6 4 48 -6 2 59 -6 3 52 -6 2 55 -6 2 55 -6 1 65 -6 3 50 -6 4 47 -6 2 51 -6 1 59 -6 2 45 -6 1 62 -6 3 54 -6 3 52 -6 1 63 -6 3 60 -6 2 31 -6 1 63 -6 4 42 -6 4 43 -6 3 65 -6 3 68 -6 2 23 -6 3 69 -6 1 69 -6 1 70 -6 3 67 -6 1 71 -6 3 60 -6 1 73 -6 3 69 -6 3 65 -6 1 72 -6 3 63 -6 1 75 -6 1 72 -6 2 28 -6 1 67 -6 2 37 -6 4 37 -6 4 36 -6 2 33 -6 3 57 -6 1 64 -6 3 67 -6 4 27 -6 1 62 -6 3 64 -7 4 33 -7 1 60 -7 2 36 -7 3 88 -7 3 91 -7 3 97 -7 3 94 -7 3 94 -7 3 93 -7 3 93 -7 3 88 -7 3 87 -7 3 85 -7 1 82 -7 3 81 -7 1 77 -7 3 73 -7 3 65 -7 1 71 -7 3 61 -7 2 23 -7 1 72 -7 4 45 -7 1 61 -7 3 69 -7 2 41 -7 1 57 -7 3 61 -7 1 56 -7 2 40 -7 3 68 -7 3 71 -7 3 66 -7 1 65 -7 1 70 -7 1 73 -7 1 74 -7 1 74 -7 1 72 -7 1 73 -7 2 45 -7 3 56 -7 1 69 -7 4 42 -7 1 71 -7 1 68 -7 1 70 -7 1 70 -7 4 50 -7 3 64 -7 3 65 -7 1 67 -7 3 64 -7 1 65 -7 3 65 -7 1 61 -7 1 56 -7 3 65 -7 3 59 -7 2 28 -7 4 47 -7 1 57 -7 3 57 -7 3 55 -7 1 57 -7 1 59 -7 3 44 -7 4 51 -7 4 50 -7 4 48 -7 1 58 -7 4 48 -7 1 58 -7 1 59 -7 4 52 -7 3 55 -7 1 53 -7 2 30 -7 1 54 -7 4 55 -7 3 58 -7 3 56 -7 1 51 -7 4 59 -7 4 57 -7 1 49 -7 1 51 -7 4 60 -7 1 50 -7 4 61 -7 1 53 -7 4 52 -7 3 57 -7 3 62 -7 3 63 -7 3 61 -7 3 57 -7 2 28 -7 4 56 -7 1 43 -7 4 59 -7 3 51 -7 4 60 -7 4 57 -7 1 42 -7 3 52 -7 3 47 -7 4 63 -7 4 66 -7 4 71 -7 2 56 -7 1 44 -7 3 48 -7 4 69 -7 4 72 -7 2 55 -7 1 54 -7 2 53 -7 3 47 -7 4 69 -7 2 52 -7 4 77 -7 4 72 -7 3 46 -7 4 68 -7 4 69 -7 4 72 -7 4 75 -7 3 49 -7 4 80 -7 4 81 -7 4 84 -7 4 85 -7 4 85 -7 4 88 -7 4 82 -7 4 80 -7 4 75 -7 1 67 -7 4 75 -7 4 76 -7 4 77 -7 4 77 -7 4 77 -7 4 74 -7 1 70 -7 4 79 -7 4 79 -7 1 72 -7 4 76 -7 4 78 -7 4 77 -7 4 78 -7 4 76 -7 4 80 -7 4 74 -7 1 69 -7 4 69 -7 4 73 -7 4 74 -7 4 69 -7 1 69 -7 1 71 -7 4 68 -7 1 68 -7 4 62 -7 1 69 -7 1 67 -7 4 70 -7 3 61 -7 3 64 -7 1 60 -7 3 70 -7 1 68 -7 1 62 -7 3 72 -7 3 74 -7 1 62 -7 4 70 -7 1 61 -7 3 60 -7 4 73 -7 1 56 -7 4 67 -7 3 66 -7 4 65 -7 4 59 -7 3 67 -7 3 68 -7 3 63 -7 4 56 -7 2 87 -7 2 87 -7 1 46 -7 2 85 -7 3 63 -7 2 92 -7 2 90 -7 2 90 -7 2 91 -7 2 89 -7 2 88 -7 2 90 -7 2 88 -7 2 84 -7 2 86 -7 2 81 -7 2 81 -7 3 59 -7 2 81 -7 2 78 -7 2 78 -7 2 80 -7 2 80 -7 2 80 -7 3 61 -7 2 78 -7 2 80 -7 2 78 -7 3 65 -7 2 78 -7 2 74 -7 3 68 -7 1 64 -7 2 73 -7 2 70 -7 2 70 -7 2 74 -7 3 76 -7 1 62 -7 3 82 -7 1 64 -7 3 75 -7 2 73 -7 2 69 -7 2 71 -7 3 67 -7 3 65 -7 4 36 -7 3 63 -7 3 62 -7 2 60 -7 1 69 -7 2 55 -7 1 69 -7 2 52 -7 1 63 -7 3 55 -7 1 63 -7 3 51 -7 1 61 -7 1 56 -7 1 58 -7 1 60 -7 2 53 -7 1 66 -7 1 67 -7 2 52 -7 1 63 -7 3 54 -7 2 51 -7 1 59 -7 4 52 -7 4 50 -7 2 32 -7 2 32 -7 1 63 -7 1 63 -7 1 63 -7 4 43 -7 1 63 -7 3 62 -7 3 65 -7 3 68 -7 3 68 -7 3 69 -7 1 69 -7 1 70 -7 1 72 -7 1 71 -7 3 60 -7 2 27 -7 1 73 -7 1 73 -7 1 72 -7 1 73 -7 1 75 -7 1 72 -7 4 40 -7 1 67 -7 4 36 -7 1 65 -7 3 63 -7 1 66 -7 2 35 -7 2 41 -7 1 66 -7 3 68 -7 1 62 -7 3 64 -8 3 85 -8 3 84 -8 3 88 -8 4 40 -8 1 67 -8 3 97 -8 2 26 -8 3 94 -8 3 93 -8 3 93 -8 3 88 -8 3 87 -8 1 85 -8 3 85 -8 3 81 -8 1 77 -8 3 73 -8 1 75 -8 1 71 -8 3 61 -8 1 70 -8 1 72 -8 1 66 -8 3 67 -8 3 69 -8 1 61 -8 4 36 -8 3 61 -8 3 67 -8 1 57 -8 2 39 -8 1 62 -8 3 66 -8 3 64 -8 3 68 -8 3 71 -8 3 69 -8 3 68 -8 1 72 -8 3 59 -8 4 43 -8 1 66 -8 1 69 -8 3 57 -8 3 58 -8 1 68 -8 1 70 -8 3 58 -8 1 72 -8 1 73 -8 4 46 -8 4 44 -8 1 67 -8 4 44 -8 1 66 -8 3 67 -8 3 67 -8 1 53 -8 3 59 -8 1 55 -8 4 47 -8 4 48 -8 3 57 -8 1 57 -8 3 44 -8 4 55 -8 4 51 -8 4 51 -8 1 59 -8 2 27 -8 3 45 -8 1 56 -8 4 50 -8 3 51 -8 4 52 -8 4 55 -8 3 58 -8 4 55 -8 3 58 -8 1 50 -8 3 58 -8 3 56 -8 4 57 -8 1 57 -8 3 58 -8 3 58 -8 3 59 -8 1 51 -8 4 61 -8 4 61 -8 4 56 -8 3 53 -8 3 57 -8 4 60 -8 3 63 -8 3 61 -8 3 57 -8 4 55 -8 3 58 -8 1 43 -8 2 36 -8 4 59 -8 1 43 -8 2 41 -8 4 62 -8 4 62 -8 4 63 -8 4 63 -8 4 66 -8 4 71 -8 4 71 -8 3 50 -8 4 66 -8 1 51 -8 4 72 -8 1 52 -8 4 70 -8 4 71 -8 2 54 -8 1 51 -8 4 73 -8 4 77 -8 4 72 -8 4 70 -8 4 68 -8 4 69 -8 1 50 -8 4 75 -8 4 79 -8 3 51 -8 3 53 -8 4 84 -8 4 85 -8 4 85 -8 4 88 -8 4 82 -8 4 80 -8 4 75 -8 4 73 -8 4 75 -8 2 60 -8 4 77 -8 3 59 -8 4 77 -8 4 74 -8 4 79 -8 4 79 -8 4 79 -8 4 76 -8 4 76 -8 4 78 -8 4 77 -8 4 78 -8 4 76 -8 4 80 -8 4 74 -8 4 72 -8 4 69 -8 4 73 -8 4 74 -8 4 69 -8 4 68 -8 3 72 -8 3 70 -8 4 66 -8 2 82 -8 2 84 -8 3 66 -8 2 84 -8 3 61 -8 2 84 -8 2 85 -8 2 82 -8 2 84 -8 2 84 -8 2 85 -8 2 88 -8 2 90 -8 2 87 -8 2 86 -8 2 86 -8 2 89 -8 2 86 -8 2 81 -8 2 80 -8 2 77 -8 4 59 -8 2 86 -8 2 85 -8 2 91 -8 2 91 -8 2 87 -8 2 87 -8 2 83 -8 2 85 -8 2 91 -8 4 49 -8 2 90 -8 2 90 -8 2 91 -8 3 59 -8 2 88 -8 2 90 -8 2 88 -8 2 84 -8 2 86 -8 2 81 -8 1 51 -8 2 82 -8 2 81 -8 2 78 -8 2 78 -8 2 80 -8 2 80 -8 2 80 -8 2 81 -8 2 78 -8 2 80 -8 2 78 -8 2 76 -8 2 78 -8 2 74 -8 2 73 -8 2 73 -8 4 28 -8 2 70 -8 1 61 -8 2 74 -8 2 74 -8 2 76 -8 2 74 -8 2 75 -8 2 73 -8 2 73 -8 2 69 -8 2 71 -8 2 67 -8 1 70 -8 2 60 -8 1 76 -8 3 62 -8 3 62 -8 2 57 -8 3 66 -8 1 69 -8 3 58 -8 1 63 -8 1 63 -8 4 45 -8 1 62 -8 2 52 -8 1 56 -8 1 58 -8 4 52 -8 4 52 -8 1 66 -8 3 49 -8 1 65 -8 3 50 -8 3 54 -8 1 61 -8 1 59 -8 1 60 -8 4 50 -8 1 64 -8 1 64 -8 1 63 -8 1 63 -8 3 58 -8 2 26 -8 1 63 -8 1 64 -8 1 63 -8 3 68 -8 4 40 -8 3 69 -8 3 69 -8 3 70 -8 3 67 -8 1 71 -8 1 73 -8 1 73 -8 3 69 -8 1 73 -8 1 72 -8 1 73 -8 1 75 -8 1 72 -8 1 72 -8 3 61 -8 2 37 -8 3 63 -8 3 63 -8 1 66 -8 4 30 -8 3 57 -8 1 66 -8 1 65 -8 3 66 -8 1 56 -9 4 33 -9 1 60 -9 1 61 -9 3 88 -9 3 91 -9 3 97 -9 3 94 -9 3 94 -9 3 93 -9 3 93 -9 3 88 -9 4 43 -9 3 85 -9 3 85 -9 4 49 -9 1 77 -9 1 76 -9 2 20 -9 3 64 -9 1 71 -9 1 70 -9 3 61 -9 1 66 -9 3 67 -9 1 62 -9 3 68 -9 4 36 -9 3 61 -9 1 56 -9 3 68 -9 3 68 -9 2 38 -9 4 41 -9 1 65 -9 1 70 -9 1 73 -9 1 74 -9 1 74 -9 1 72 -9 1 73 -9 3 55 -9 1 66 -9 1 69 -9 1 69 -9 4 44 -9 3 59 -9 2 53 -9 1 70 -9 1 72 -9 4 50 -9 1 70 -9 4 44 -9 4 41 -9 2 43 -9 1 66 -9 1 61 -9 4 44 -9 2 34 -9 1 51 -9 1 55 -9 1 58 -9 3 60 -9 1 59 -9 4 53 -9 3 44 -9 1 59 -9 4 51 -9 4 51 -9 1 59 -9 4 48 -9 2 23 -9 2 22 -9 4 50 -9 1 59 -9 3 51 -9 3 55 -9 1 53 -9 3 61 -9 4 57 -9 1 50 -9 2 31 -9 3 56 -9 4 57 -9 4 59 -9 2 34 -9 3 58 -9 1 51 -9 3 59 -9 3 58 -9 1 54 -9 4 56 -9 3 53 -9 1 49 -9 1 48 -9 1 49 -9 4 58 -9 4 60 -9 4 55 -9 4 56 -9 4 59 -9 4 59 -9 4 59 -9 4 60 -9 4 57 -9 2 45 -9 3 52 -9 4 63 -9 4 63 -9 2 51 -9 2 53 -9 4 71 -9 2 56 -9 4 66 -9 1 51 -9 4 72 -9 3 48 -9 4 70 -9 2 53 -9 2 54 -9 2 53 -9 2 52 -9 1 49 -9 3 45 -9 4 70 -9 4 68 -9 4 69 -9 4 72 -9 3 48 -9 4 79 -9 4 80 -9 1 59 -9 4 84 -9 4 85 -9 4 85 -9 4 88 -9 4 82 -9 4 80 -9 1 65 -9 4 73 -9 1 68 -9 4 76 -9 2 64 -9 4 77 -9 1 71 -9 1 70 -9 4 79 -9 4 79 -9 4 79 -9 4 76 -9 4 76 -9 4 78 -9 4 77 -9 4 78 -9 4 76 -9 2 67 -9 1 65 -9 1 69 -9 1 73 -9 4 73 -9 4 74 -9 4 69 -9 4 68 -9 2 78 -9 2 79 -9 2 82 -9 4 62 -9 2 84 -9 4 59 -9 2 84 -9 4 72 -9 2 84 -9 4 78 -9 1 64 -9 4 76 -9 4 73 -9 2 85 -9 4 70 -9 4 73 -9 2 87 -9 2 86 -9 2 86 -9 2 89 -9 2 86 -9 2 81 -9 2 80 -9 2 77 -9 3 67 -9 4 57 -9 3 68 -9 2 91 -9 2 91 -9 2 87 -9 2 87 -9 2 83 -9 1 45 -9 3 63 -9 2 92 -9 2 90 -9 2 90 -9 2 91 -9 4 49 -9 2 88 -9 2 90 -9 2 88 -9 2 84 -9 2 86 -9 2 81 -9 2 81 -9 2 82 -9 2 81 -9 1 55 -9 2 78 -9 2 80 -9 2 80 -9 2 80 -9 2 81 -9 2 78 -9 2 80 -9 2 78 -9 3 65 -9 2 78 -9 3 68 -9 2 73 -9 3 68 -9 3 70 -9 2 70 -9 3 75 -9 3 77 -9 2 74 -9 3 79 -9 3 82 -9 3 79 -9 3 75 -9 2 73 -9 1 67 -9 3 69 -9 4 32 -9 3 65 -9 1 74 -9 3 63 -9 1 70 -9 3 62 -9 2 57 -9 3 66 -9 2 52 -9 1 67 -9 1 63 -9 3 55 -9 4 45 -9 3 51 -9 3 51 -9 3 54 -9 1 58 -9 1 60 -9 1 62 -9 1 66 -9 1 67 -9 1 65 -9 2 52 -9 4 47 -9 1 61 -9 1 59 -9 1 60 -9 3 52 -9 1 64 -9 1 64 -9 4 46 -9 4 46 -9 1 63 -9 1 63 -9 4 42 -9 2 27 -9 1 63 -9 1 61 -9 3 68 -9 3 69 -9 3 69 -9 3 70 -9 3 67 -9 1 71 -9 1 73 -9 3 62 -9 3 69 -9 1 73 -9 3 65 -9 4 38 -9 3 66 -9 3 68 -9 1 72 -9 1 67 -9 3 64 -9 3 63 -9 4 36 -9 1 66 -9 3 57 -9 1 64 -9 1 66 -9 3 68 -9 1 62 -9 3 64 -10 1 62 -10 4 34 -10 3 88 -10 2 36 -10 3 91 -10 3 97 -10 3 94 -10 3 94 -10 3 93 -10 3 93 -10 3 88 -10 3 87 -10 3 85 -10 3 85 -10 3 81 -10 3 76 -10 3 73 -10 3 65 -10 3 64 -10 3 61 -10 3 60 -10 3 61 -10 3 68 -10 2 31 -10 3 69 -10 3 68 -10 3 62 -10 3 61 -10 4 32 -10 3 68 -10 3 68 -10 1 62 -10 3 66 -10 3 64 -10 3 68 -10 3 71 -10 3 69 -10 2 44 -10 1 72 -10 3 59 -10 1 70 -10 3 56 -10 1 69 -10 1 69 -10 1 71 -10 1 68 -10 1 70 -10 1 70 -10 1 72 -10 3 64 -10 1 70 -10 1 67 -10 1 67 -10 1 65 -10 1 66 -10 2 38 -10 2 36 -10 1 53 -10 1 51 -10 3 58 -10 1 58 -10 3 60 -10 2 32 -10 4 53 -10 3 44 -10 4 55 -10 1 59 -10 3 47 -10 3 48 -10 4 48 -10 2 23 -10 4 48 -10 1 58 -10 1 59 -10 1 61 -10 4 55 -10 1 53 -10 1 52 -10 4 57 -10 4 55 -10 2 31 -10 4 53 -10 4 57 -10 3 58 -10 3 58 -10 3 58 -10 3 59 -10 4 60 -10 3 58 -10 4 61 -10 2 37 -10 3 53 -10 1 49 -10 4 60 -10 1 49 -10 4 58 -10 4 60 -10 4 55 -10 3 58 -10 3 55 -10 3 53 -10 3 51 -10 4 60 -10 3 47 -10 4 62 -10 4 62 -10 4 63 -10 1 48 -10 4 66 -10 1 38 -10 4 71 -10 4 67 -10 3 48 -10 4 69 -10 4 72 -10 4 68 -10 4 70 -10 4 71 -10 4 70 -10 4 69 -10 1 51 -10 4 77 -10 4 72 -10 4 70 -10 2 55 -10 3 49 -10 1 50 -10 4 75 -10 4 79 -10 4 80 -10 4 81 -10 4 84 -10 4 85 -10 4 85 -10 4 88 -10 4 82 -10 4 80 -10 4 75 -10 4 73 -10 4 75 -10 4 76 -10 4 77 -10 4 77 -10 4 77 -10 4 74 -10 4 79 -10 4 79 -10 4 79 -10 4 76 -10 4 76 -10 4 78 -10 4 77 -10 2 69 -10 4 76 -10 4 80 -10 4 74 -10 4 72 -10 4 69 -10 4 73 -10 4 74 -10 4 69 -10 4 68 -10 1 71 -10 1 72 -10 1 68 -10 4 62 -10 2 84 -10 4 59 -10 1 65 -10 1 62 -10 2 84 -10 2 85 -10 2 82 -10 2 84 -10 2 84 -10 2 85 -10 2 88 -10 2 90 -10 2 87 -10 2 86 -10 2 86 -10 2 89 -10 2 86 -10 2 81 -10 2 80 -10 2 77 -10 2 81 -10 2 86 -10 2 85 -10 2 91 -10 2 91 -10 2 87 -10 2 87 -10 2 83 -10 2 85 -10 2 91 -10 2 92 -10 2 90 -10 2 90 -10 2 91 -10 2 89 -10 2 88 -10 2 90 -10 2 88 -10 2 84 -10 2 86 -10 2 81 -10 2 81 -10 2 82 -10 2 81 -10 2 78 -10 2 78 -10 2 80 -10 2 80 -10 2 80 -10 2 81 -10 2 78 -10 2 80 -10 2 78 -10 2 76 -10 2 78 -10 2 74 -10 2 73 -10 2 73 -10 3 70 -10 3 72 -10 4 26 -10 3 77 -10 3 76 -10 3 79 -10 2 74 -10 2 75 -10 3 75 -10 2 73 -10 2 69 -10 4 32 -10 2 67 -10 3 65 -10 2 60 -10 3 63 -10 2 62 -10 3 62 -10 3 59 -10 2 55 -10 2 52 -10 3 58 -10 1 63 -10 2 49 -10 1 63 -10 3 51 -10 3 51 -10 1 56 -10 1 58 -10 3 50 -10 2 53 -10 1 66 -10 1 67 -10 1 65 -10 1 63 -10 1 60 -10 1 61 -10 1 59 -10 1 60 -10 1 62 -10 1 64 -10 1 64 -10 1 63 -10 3 60 -10 1 63 -10 1 63 -10 1 63 -10 1 64 -10 4 42 -10 1 61 -10 1 62 -10 3 69 -10 3 69 -10 4 42 -10 3 67 -10 4 45 -10 3 60 -10 3 62 -10 3 69 -10 3 65 -10 3 65 -10 3 63 -10 3 66 -10 3 68 -10 3 67 -10 3 61 -10 3 64 -10 2 32 -10 3 63 -10 3 58 -10 3 57 -10 3 57 -10 3 67 -10 3 68 -10 3 66 -10 3 64 diff --git a/inst/extdata/bandit4arm_exampleData.txt b/inst/extdata/bandit4arm_exampleData.txt deleted file mode 100644 index 92ffad0d..00000000 --- a/inst/extdata/bandit4arm_exampleData.txt +++ /dev/null @@ -1,2001 +0,0 @@ -subjID gain loss choice -102 0 0 2 -102 1 0 1 -102 0 -1 1 -102 1 0 3 -102 0 -1 3 -102 0 -1 4 -102 0 -1 4 -102 0 0 2 -102 1 0 2 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 -1 3 -102 0 0 1 -102 1 0 1 -102 0 0 2 -102 0 0 2 -102 1 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 1 -102 0 -1 2 -102 1 0 3 -102 0 0 1 -102 1 0 3 -102 0 -1 3 -102 0 0 1 -102 0 -1 3 -102 0 0 1 -102 0 0 1 -102 1 0 2 -102 0 -1 3 -102 0 0 2 -102 1 0 1 -102 1 0 3 -102 0 0 2 -102 0 0 1 -102 0 -1 3 -102 0 0 3 -102 0 0 1 -102 0 0 2 -102 1 -1 3 -102 0 0 1 -102 0 0 3 -102 0 0 1 -102 0 -1 3 -102 0 0 1 -102 0 -1 2 -102 0 -1 1 -102 0 -1 3 -102 0 0 2 -102 1 -1 1 -102 0 0 2 -102 1 0 1 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 1 0 4 -102 0 0 1 -102 0 0 4 -102 0 0 1 -102 0 0 2 -102 0 0 4 -102 0 0 3 -102 0 -1 1 -102 0 -1 4 -102 0 0 2 -102 1 -1 3 -102 0 -1 2 -102 0 -1 3 -102 0 0 1 -102 0 -1 1 -102 1 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 1 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 1 0 1 -102 1 0 1 -102 0 0 1 -102 0 0 3 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 2 -102 0 -1 1 -102 1 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 1 -102 1 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 1 -102 0 0 2 -102 0 -1 3 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 1 0 3 -102 0 0 2 -102 0 0 3 -102 0 0 3 -102 0 0 4 -102 1 0 2 -102 0 -1 2 -102 0 0 1 -102 1 0 1 -102 1 -1 1 -102 0 0 2 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 0 0 4 -102 0 -1 2 -102 0 0 1 -102 0 0 4 -102 0 -1 3 -102 1 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 4 -102 0 0 4 -102 0 -1 3 -102 0 0 2 -102 0 0 1 -102 1 -1 4 -102 0 0 2 -102 1 0 1 -102 0 0 1 -102 1 0 2 -102 0 -1 2 -102 0 0 1 -102 0 -1 4 -102 0 0 2 -102 0 -1 2 -102 1 0 1 -102 0 0 1 -102 1 0 2 -102 0 -1 2 -102 0 0 1 -102 1 0 2 -102 1 0 2 -102 0 0 2 -102 0 0 4 -102 0 0 2 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 0 -1 4 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 3 -102 0 0 4 -102 0 0 2 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 1 0 4 -102 0 -1 4 -102 0 0 3 -102 0 0 3 -102 1 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 3 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 0 0 1 -102 1 0 1 -102 0 -1 1 -102 1 0 3 -102 1 0 3 -102 0 -1 3 -102 0 0 4 -102 0 0 2 -102 1 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 1 0 1 -102 0 0 1 -102 0 0 3 -201 0 0 1 -201 0 -1 2 -201 0 -1 3 -201 0 -1 4 -201 0 0 1 -201 1 -1 1 -201 0 0 4 -201 0 0 3 -201 0 0 3 -201 1 0 1 -201 0 0 4 -201 0 0 1 -201 0 0 2 -201 1 -1 3 -201 1 0 3 -201 1 -1 2 -201 1 0 2 -201 0 0 4 -201 0 0 3 -201 0 -1 1 -201 0 -1 4 -201 0 0 2 -201 0 -1 3 -201 0 0 2 -201 0 0 3 -201 0 -1 1 -201 1 0 4 -201 0 -1 4 -201 0 -1 1 -201 1 0 2 -201 1 -1 3 -201 0 0 3 -201 1 0 3 -201 0 0 2 -201 1 0 4 -201 0 -1 1 -201 0 0 2 -201 1 0 3 -201 0 0 2 -201 0 -1 3 -201 0 -1 1 -201 0 0 4 -201 0 0 3 -201 0 -1 2 -201 0 0 3 -201 0 0 3 -201 0 0 3 -201 0 0 3 -201 1 0 1 -201 0 -1 4 -201 0 0 1 -201 0 0 4 -201 1 -1 4 -201 0 0 4 -201 1 0 1 -201 0 -1 2 -201 0 -1 2 -201 0 0 1 -201 0 0 3 -201 1 0 3 -201 0 -1 2 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 0 2 -201 1 -1 3 -201 0 -1 3 -201 0 0 2 -201 1 -1 3 -201 1 -1 3 -201 0 -1 3 -201 1 -1 2 -201 0 -1 3 -201 1 0 4 -201 0 -1 1 -201 0 0 4 -201 1 0 4 -201 0 0 4 -201 0 0 4 -201 0 0 2 -201 0 0 4 -201 0 0 2 -201 0 0 1 -201 0 0 4 -201 0 0 3 -201 0 0 4 -201 1 0 1 -201 1 0 1 -201 0 -1 2 -201 0 0 4 -201 1 0 1 -201 0 0 4 -201 0 0 1 -201 0 -1 4 -201 0 0 2 -201 0 -1 3 -201 0 0 1 -201 1 -1 4 -201 1 0 1 -201 0 0 1 -201 0 0 4 -201 0 0 2 -201 0 -1 3 -201 1 0 2 -201 0 -1 2 -201 0 -1 1 -201 1 -1 4 -201 0 -1 4 -201 1 0 1 -201 1 0 4 -201 0 0 2 -201 0 0 4 -201 0 -1 3 -201 0 0 2 -201 1 -1 4 -201 1 -1 1 -201 0 0 4 -201 0 0 1 -201 1 0 4 -201 0 0 1 -201 1 0 1 -201 1 0 2 -201 1 0 4 -201 0 0 3 -201 1 0 1 -201 0 -1 2 -201 0 0 1 -201 1 0 4 -201 0 0 3 -201 0 0 2 -201 1 -1 1 -201 1 0 4 -201 0 0 1 -201 0 0 1 -201 0 0 1 -201 0 0 1 -201 0 0 4 -201 0 -1 1 -201 0 -1 4 -201 0 0 2 -201 1 0 1 -201 0 -1 4 -201 0 -1 1 -201 0 0 3 -201 0 -1 3 -201 0 -1 4 -201 0 -1 3 -201 1 0 2 -201 1 0 3 -201 0 0 1 -201 1 0 2 -201 0 0 4 -201 0 0 2 -201 0 0 1 -201 0 -1 3 -201 1 0 2 -201 0 -1 4 -201 0 -1 2 -201 1 0 4 -201 0 -1 2 -201 1 0 3 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 -1 2 -201 0 -1 3 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 0 2 -201 0 -1 3 -201 0 -1 2 -201 0 -1 2 -201 1 0 1 -201 1 -1 3 -201 0 0 2 -201 1 0 3 -201 1 0 1 -201 0 0 3 -201 0 0 4 -201 0 0 1 -201 1 -1 3 -201 1 -1 2 -201 0 0 3 -201 1 0 2 -201 0 0 3 -201 0 0 4 -201 0 0 1 -201 0 -1 4 -201 0 0 1 -201 0 0 3 -201 0 0 2 -201 0 0 3 -201 0 -1 2 -201 0 0 2 -201 0 0 3 -201 0 0 4 -201 0 0 4 -202 0 0 3 -202 0 0 1 -202 1 -1 1 -202 0 0 3 -202 0 0 2 -202 0 0 4 -202 1 0 1 -202 0 0 1 -202 0 0 3 -202 1 -1 2 -202 0 0 2 -202 0 0 4 -202 0 -1 3 -202 1 0 1 -202 0 0 4 -202 1 0 1 -202 0 0 1 -202 0 0 2 -202 1 -1 3 -202 0 0 3 -202 0 0 1 -202 0 0 4 -202 0 0 3 -202 0 0 1 -202 0 0 2 -202 0 0 1 -202 0 -1 3 -202 0 0 1 -202 1 0 4 -202 0 -1 1 -202 1 0 4 -202 0 -1 1 -202 0 -1 3 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 -1 3 -202 0 0 1 -202 0 0 2 -202 0 0 4 -202 0 -1 1 -202 1 0 3 -202 0 0 1 -202 0 0 2 -202 0 -1 3 -202 0 0 1 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 0 1 -202 0 0 3 -202 1 0 2 -202 0 0 3 -202 0 -1 2 -202 0 0 1 -202 0 0 4 -202 0 0 4 -202 0 0 1 -202 1 0 3 -202 0 0 2 -202 0 0 1 -202 1 -1 4 -202 0 0 1 -202 0 -1 4 -202 0 -1 2 -202 1 -1 3 -202 1 0 1 -202 0 -1 3 -202 0 0 4 -202 0 0 2 -202 0 0 4 -202 0 0 3 -202 0 0 2 -202 0 -1 1 -202 1 0 4 -202 1 0 3 -202 0 0 1 -202 1 -1 4 -202 0 -1 3 -202 0 0 2 -202 0 -1 2 -202 0 0 2 -202 0 0 3 -202 0 0 1 -202 1 0 4 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 0 2 -202 0 0 2 -202 1 -1 1 -202 0 0 3 -202 0 0 4 -202 1 0 2 -202 0 0 1 -202 0 0 4 -202 0 0 3 -202 0 0 2 -202 0 0 2 -202 0 0 3 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 -1 2 -202 0 -1 1 -202 1 0 4 -202 0 -1 3 -202 1 0 4 -202 1 0 4 -202 0 0 4 -202 0 -1 1 -202 0 0 2 -202 0 0 3 -202 0 0 3 -202 0 0 4 -202 1 0 1 -202 1 0 1 -202 0 -1 2 -202 0 0 3 -202 0 0 4 -202 0 0 2 -202 0 -1 1 -202 1 0 4 -202 1 0 4 -202 0 -1 3 -202 1 0 4 -202 1 -1 4 -202 0 0 4 -202 0 -1 2 -202 0 0 4 -202 0 -1 1 -202 0 0 3 -202 1 0 4 -202 0 0 4 -202 0 -1 1 -202 1 0 4 -202 1 0 4 -202 0 0 3 -202 1 0 2 -202 1 0 2 -202 0 -1 1 -202 0 0 4 -202 1 0 2 -202 0 0 2 -202 0 -1 3 -202 0 0 2 -202 0 0 4 -202 0 -1 1 -202 0 0 3 -202 0 0 1 -202 0 -1 2 -202 0 0 3 -202 0 0 1 -202 0 0 4 -202 1 0 3 -202 0 0 1 -202 0 -1 2 -202 0 0 3 -202 0 0 1 -202 0 0 1 -202 0 0 4 -202 0 -1 1 -202 0 0 4 -202 0 -1 2 -202 0 -1 3 -202 0 0 1 -202 0 0 4 -202 1 0 2 -202 1 0 2 -202 0 0 2 -202 1 0 3 -202 0 0 3 -202 0 0 1 -202 0 0 1 -202 0 0 4 -202 0 0 2 -202 1 0 3 -202 0 0 1 -202 0 0 4 -202 1 0 3 -202 1 0 3 -202 1 0 3 -202 0 0 3 -202 0 0 1 -202 0 -1 4 -202 0 -1 2 -202 0 0 2 -202 0 -1 3 -202 0 0 1 -202 0 0 4 -202 0 0 3 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 0 2 -202 1 0 3 -202 0 0 4 -203 0 0 3 -203 0 0 2 -203 0 0 3 -203 0 -1 1 -203 0 0 3 -203 1 0 4 -203 0 -1 4 -203 0 -1 2 -203 0 -1 1 -203 0 0 3 -203 1 0 2 -203 1 0 2 -203 0 -1 2 -203 0 0 4 -203 0 0 3 -203 0 -1 1 -203 0 0 3 -203 0 -1 2 -203 0 0 4 -203 0 -1 3 -203 1 -1 1 -203 0 0 1 -203 0 -1 3 -203 0 0 2 -203 0 0 1 -203 1 0 4 -203 1 0 4 -203 0 0 4 -203 1 0 3 -203 0 -1 3 -203 1 0 1 -203 1 0 1 -203 0 0 2 -203 1 -1 4 -203 0 -1 3 -203 0 0 1 -203 0 0 2 -203 0 -1 3 -203 0 -1 4 -203 0 -1 1 -203 1 -1 2 -203 1 0 4 -203 0 0 3 -203 0 0 4 -203 0 0 1 -203 1 0 2 -203 0 -1 3 -203 0 -1 4 -203 1 0 2 -203 0 -1 1 -203 0 0 3 -203 0 -1 1 -203 0 0 2 -203 0 -1 4 -203 0 0 3 -203 0 -1 1 -203 0 0 2 -203 0 0 1 -203 0 0 4 -203 0 -1 3 -203 0 0 2 -203 0 -1 3 -203 0 0 1 -203 1 -1 4 -203 0 -1 4 -203 1 0 2 -203 0 0 2 -203 0 0 2 -203 1 0 1 -203 0 0 1 -203 0 0 2 -203 1 0 1 -203 0 -1 3 -203 0 -1 1 -203 0 -1 2 -203 0 0 4 -203 0 -1 3 -203 1 0 1 -203 0 0 1 -203 0 0 1 -203 1 0 2 -203 1 0 2 -203 0 0 2 -203 0 0 3 -203 0 0 4 -203 0 0 2 -203 1 -1 3 -203 1 0 1 -203 0 0 1 -203 0 0 1 -203 0 0 4 -203 1 0 3 -203 1 0 3 -203 0 0 3 -203 1 0 3 -203 1 -1 3 -203 0 -1 2 -203 0 0 3 -203 0 -1 1 -203 0 0 4 -203 0 0 2 -203 0 0 3 -203 0 0 4 -203 0 0 2 -203 1 -1 1 -203 1 -1 2 -203 1 0 3 -203 0 0 1 -203 0 -1 3 -203 0 0 4 -203 0 0 2 -203 1 0 3 -203 0 0 1 -203 0 0 3 -203 0 0 4 -203 0 0 2 -203 0 -1 1 -203 1 0 3 -203 0 0 2 -203 1 0 3 -203 0 -1 3 -203 1 -1 4 -203 0 0 1 -203 0 -1 3 -203 0 0 2 -203 0 0 4 -203 1 -1 3 -203 0 0 1 -203 0 0 2 -203 0 0 3 -203 0 -1 4 -203 0 0 2 -203 0 0 3 -203 1 0 1 -203 0 0 1 -203 0 0 3 -203 0 0 2 -203 1 -1 4 -203 0 0 3 -203 0 0 1 -203 0 0 3 -203 1 0 2 -203 0 0 1 -203 0 0 2 -203 0 -1 4 -203 0 0 3 -203 0 -1 1 -203 0 0 2 -203 1 -1 3 -203 1 -1 2 -203 0 0 1 -203 0 -1 3 -203 0 0 4 -203 0 -1 2 -203 0 0 3 -203 0 -1 1 -203 1 0 2 -203 0 -1 4 -203 1 -1 3 -203 0 -1 2 -203 0 -1 1 -203 0 0 4 -203 1 0 3 -203 1 -1 1 -203 1 0 3 -203 1 0 3 -203 1 0 3 -203 0 0 3 -203 1 0 3 -203 0 0 3 -203 0 0 3 -203 0 0 3 -203 0 0 3 -203 0 -1 1 -203 0 0 3 -203 1 -1 4 -203 0 -1 2 -203 1 0 3 -203 0 0 3 -203 0 -1 3 -203 0 0 1 -203 1 -1 4 -203 0 0 2 -203 1 0 3 -203 0 0 1 -203 0 -1 3 -203 1 -1 3 -203 0 -1 3 -203 1 0 2 -203 0 0 2 -203 0 0 1 -203 1 0 4 -203 1 0 4 -203 0 0 4 -203 0 0 3 -203 0 -1 4 -203 0 0 2 -203 0 0 3 -203 0 0 1 -203 1 0 4 -204 0 0 1 -204 0 -1 4 -204 1 0 3 -204 1 -1 2 -204 1 0 2 -204 0 0 2 -204 0 0 2 -204 0 0 1 -204 0 -1 3 -204 0 0 4 -204 0 0 2 -204 0 0 4 -204 0 0 2 -204 0 -1 1 -204 0 0 2 -204 0 0 4 -204 1 0 3 -204 1 0 3 -204 1 0 2 -204 0 -1 3 -204 1 0 1 -204 0 0 2 -204 0 0 4 -204 0 0 2 -204 1 0 3 -204 0 0 1 -204 0 -1 2 -204 0 0 3 -204 0 0 2 -204 0 0 4 -204 0 0 3 -204 1 0 2 -204 0 -1 1 -204 0 0 2 -204 0 -1 3 -204 0 -1 3 -204 0 0 2 -204 0 0 2 -204 0 0 1 -204 1 -1 4 -204 0 0 4 -204 1 0 3 -204 0 0 2 -204 1 0 2 -204 0 -1 4 -204 0 -1 3 -204 0 0 2 -204 0 0 2 -204 1 -1 3 -204 0 0 2 -204 1 0 1 -204 0 0 2 -204 0 -1 1 -204 0 -1 4 -204 1 0 3 -204 0 -1 2 -204 1 0 3 -204 0 0 1 -204 0 0 1 -204 1 0 3 -204 0 0 1 -204 0 0 4 -204 1 0 3 -204 0 0 2 -204 1 0 1 -204 1 -1 2 -204 1 0 3 -204 0 0 1 -204 1 0 4 -204 0 0 2 -204 0 -1 4 -204 0 0 1 -204 1 0 3 -204 0 0 2 -204 0 -1 3 -204 0 -1 1 -204 1 0 3 -204 0 -1 2 -204 1 0 4 -204 1 0 3 -204 1 0 3 -204 0 0 1 -204 0 0 2 -204 0 0 3 -204 0 0 4 -204 0 0 2 -204 0 -1 1 -204 0 0 3 -204 0 0 2 -204 1 -1 3 -204 0 0 1 -204 0 0 3 -204 0 -1 4 -204 1 0 2 -204 0 0 3 -204 0 0 1 -204 0 0 2 -204 1 0 3 -204 0 0 2 -204 0 -1 3 -204 1 0 1 -204 0 0 1 -204 0 0 2 -204 0 0 3 -204 0 -1 4 -204 0 0 2 -204 0 0 4 -204 0 0 3 -204 0 -1 1 -204 0 -1 2 -204 1 0 4 -204 1 0 4 -204 1 -1 3 -204 0 0 3 -204 0 -1 3 -204 1 -1 3 -204 0 -1 2 -204 1 0 2 -204 0 -1 2 -204 1 0 4 -204 1 0 4 -204 0 0 1 -204 0 -1 1 -204 0 -1 1 -204 0 0 4 -204 0 0 4 -204 1 0 4 -204 0 0 2 -204 0 -1 2 -204 1 -1 2 -204 1 -1 3 -204 0 0 4 -204 1 0 4 -204 0 0 4 -204 0 0 4 -204 0 0 4 -204 0 -1 1 -204 0 -1 2 -204 0 0 2 -204 0 -1 3 -204 1 -1 4 -204 0 0 4 -204 0 0 4 -204 0 -1 2 -204 1 0 1 -204 0 0 4 -204 0 0 3 -204 0 -1 3 -204 0 0 3 -204 0 -1 1 -204 0 -1 2 -204 0 -1 1 -204 0 0 3 -204 0 0 4 -204 0 0 4 -204 0 -1 4 -204 0 0 4 -204 0 0 3 -204 0 0 3 -204 0 -1 3 -204 0 -1 2 -204 0 0 2 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 4 -204 0 0 4 -204 0 -1 4 -204 0 0 3 -204 1 0 3 -204 0 -1 3 -204 0 -1 3 -204 1 0 3 -204 0 0 2 -204 0 -1 4 -204 1 0 4 -204 0 0 3 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 3 -204 0 0 4 -204 0 0 4 -204 0 -1 3 -204 0 -1 3 -204 0 0 3 -204 0 0 4 -204 0 0 3 -204 0 0 4 -204 0 0 2 -204 1 0 1 -204 0 -1 4 -204 0 -1 4 -204 1 0 3 -205 1 0 1 -205 0 -1 4 -205 0 0 3 -205 0 0 1 -205 0 0 2 -205 1 0 4 -205 1 0 4 -205 0 -1 4 -205 0 0 3 -205 0 0 4 -205 0 -1 1 -205 1 0 2 -205 1 0 4 -205 0 -1 2 -205 0 0 4 -205 0 0 3 -205 0 0 4 -205 0 -1 1 -205 1 -1 3 -205 0 0 4 -205 0 0 2 -205 0 0 2 -205 0 0 3 -205 0 -1 4 -205 0 0 1 -205 1 0 1 -205 1 0 3 -205 0 0 4 -205 0 -1 1 -205 1 -1 3 -205 0 0 2 -205 1 0 2 -205 0 0 3 -205 1 -1 3 -205 1 0 4 -205 0 0 4 -205 0 0 2 -205 0 0 1 -205 0 -1 3 -205 0 -1 4 -205 1 0 2 -205 0 0 2 -205 0 -1 1 -205 0 -1 3 -205 0 0 4 -205 0 -1 1 -205 0 -1 3 -205 0 -1 4 -205 0 0 2 -205 0 0 2 -205 0 0 4 -205 0 0 2 -205 0 -1 1 -205 0 0 3 -205 1 0 4 -205 0 0 4 -205 0 0 2 -205 0 0 1 -205 0 0 3 -205 0 -1 4 -205 0 0 2 -205 1 0 1 -205 1 0 3 -205 0 0 1 -205 0 0 3 -205 0 -1 4 -205 0 0 2 -205 0 0 1 -205 0 -1 4 -205 0 0 4 -205 0 0 3 -205 0 -1 3 -205 0 -1 2 -205 0 0 4 -205 0 0 4 -205 0 0 3 -205 0 0 4 -205 0 0 4 -205 0 -1 1 -205 0 0 2 -205 1 0 4 -205 0 0 4 -205 0 -1 3 -205 1 0 1 -205 0 0 2 -205 0 -1 3 -205 0 -1 1 -205 0 -1 4 -205 0 0 2 -205 0 0 3 -205 0 0 1 -205 0 0 4 -205 1 0 2 -205 1 0 3 -205 1 -1 2 -205 0 0 2 -205 1 0 3 -205 0 0 3 -205 0 0 1 -205 0 0 4 -205 0 0 4 -205 0 0 3 -205 1 0 2 -205 0 0 2 -205 0 0 4 -205 0 0 1 -205 1 0 3 -205 1 -1 3 -205 0 0 4 -205 0 -1 2 -205 0 0 2 -205 0 0 3 -205 0 0 4 -205 0 0 1 -205 0 0 1 -205 1 -1 4 -205 0 0 3 -205 1 0 4 -205 1 -1 2 -205 0 0 4 -205 0 0 3 -205 1 -1 2 -205 0 0 1 -205 0 0 3 -205 0 0 4 -205 1 0 2 -205 1 -1 4 -205 0 0 3 -205 0 0 2 -205 0 0 1 -205 1 0 2 -205 1 0 2 -205 0 0 2 -205 0 0 3 -205 0 0 4 -205 0 -1 1 -205 0 0 4 -205 0 0 3 -205 0 -1 2 -205 0 0 4 -205 1 -1 1 -205 1 0 1 -205 1 0 1 -205 0 0 1 -205 1 -1 1 -205 0 -1 3 -205 0 0 2 -205 0 0 4 -205 0 0 4 -205 0 0 1 -205 1 0 1 -205 0 0 1 -205 1 -1 3 -205 0 0 2 -205 0 0 1 -205 0 -1 4 -205 0 0 1 -205 0 -1 2 -205 0 0 1 -205 1 0 3 -205 0 -1 3 -205 0 0 2 -205 0 0 1 -205 1 0 4 -205 0 0 1 -205 0 0 4 -205 0 0 2 -205 0 0 1 -205 1 -1 3 -205 0 0 2 -205 1 0 1 -205 0 0 1 -205 0 0 4 -205 1 0 1 -205 0 0 3 -205 0 -1 2 -205 0 0 1 -205 0 -1 4 -205 0 -1 3 -205 0 0 2 -205 1 0 1 -205 0 0 1 -205 0 -1 3 -205 0 0 1 -205 0 0 2 -205 0 0 3 -205 0 0 4 -205 0 0 1 -205 0 0 4 -205 0 -1 3 -205 0 0 2 -205 1 -1 2 -205 1 0 1 -205 0 0 1 -205 0 0 4 -205 0 0 1 -205 0 0 3 -205 1 -1 1 -205 0 0 1 -205 0 0 2 -206 1 0 1 -206 0 0 2 -206 0 0 1 -206 1 -1 1 -206 0 -1 3 -206 0 0 1 -206 0 0 2 -206 0 0 4 -206 0 0 1 -206 1 -1 2 -206 1 0 3 -206 0 0 3 -206 0 0 4 -206 0 -1 1 -206 0 -1 2 -206 0 0 3 -206 1 0 3 -206 0 0 4 -206 1 0 3 -206 1 -1 3 -206 0 0 3 -206 0 0 1 -206 0 0 3 -206 0 0 2 -206 1 0 3 -206 0 -1 4 -206 1 0 3 -206 1 0 1 -206 1 0 1 -206 0 0 1 -206 0 0 3 -206 0 0 3 -206 0 0 3 -206 1 -1 1 -206 0 -1 2 -206 0 0 4 -206 0 0 3 -206 1 0 3 -206 1 0 3 -206 0 0 3 -206 0 -1 1 -206 0 0 3 -206 0 0 3 -206 0 0 3 -206 0 0 3 -206 1 0 1 -206 0 0 3 -206 0 0 2 -206 0 -1 4 -206 0 0 1 -206 1 0 1 -206 0 0 3 -206 0 0 2 -206 0 0 4 -206 0 -1 1 -206 0 0 3 -206 0 -1 4 -206 0 0 1 -206 0 0 3 -206 0 0 1 -206 0 0 4 -206 0 0 1 -206 1 0 3 -206 1 -1 3 -206 0 0 1 -206 0 -1 3 -206 0 -1 4 -206 0 -1 2 -206 0 -1 3 -206 1 0 1 -206 1 0 1 -206 1 0 1 -206 1 0 1 -206 1 -1 1 -206 0 -1 1 -206 0 -1 1 -206 0 0 3 -206 0 0 1 -206 0 0 4 -206 1 0 2 -206 0 0 1 -206 1 0 1 -206 1 0 3 -206 1 0 1 -206 1 0 3 -206 0 0 1 -206 0 0 3 -206 1 -1 1 -206 1 0 3 -206 0 -1 1 -206 0 -1 3 -206 0 -1 1 -206 1 0 2 -206 0 0 2 -206 0 -1 1 -206 0 -1 4 -206 0 0 3 -206 0 0 1 -206 0 0 3 -206 1 0 1 -206 0 0 1 -206 0 0 3 -206 1 0 4 -206 0 0 1 -206 0 0 4 -206 1 -1 2 -206 1 0 4 -206 1 0 4 -206 1 0 4 -206 0 -1 4 -206 0 0 3 -206 1 0 4 -206 0 0 4 -206 0 0 3 -206 0 0 4 -206 1 0 2 -206 0 0 2 -206 0 0 4 -206 0 0 2 -206 0 0 1 -206 0 -1 2 -206 0 0 4 -206 0 0 3 -206 0 0 2 -206 1 0 2 -206 0 0 2 -206 0 0 1 -206 1 0 4 -206 0 0 4 -206 0 -1 3 -206 0 -1 2 -206 0 -1 1 -206 0 0 4 -206 0 0 1 -206 0 -1 3 -206 1 0 2 -206 1 -1 2 -206 1 -1 2 -206 0 0 2 -206 1 -1 4 -206 1 0 4 -206 1 0 4 -206 0 0 4 -206 0 0 3 -206 0 0 1 -206 0 0 2 -206 0 0 4 -206 0 0 3 -206 0 0 1 -206 0 0 2 -206 0 0 1 -206 0 0 3 -206 0 -1 3 -206 0 0 4 -206 0 -1 4 -206 0 0 1 -206 0 -1 1 -206 0 -1 2 -206 0 -1 2 -206 0 0 1 -206 0 0 1 -206 0 -1 1 -206 0 -1 3 -206 1 0 4 -206 0 0 4 -206 0 0 4 -206 0 0 4 -206 0 0 4 -206 0 0 2 -206 0 0 4 -206 0 0 2 -206 0 0 4 -206 0 0 4 -206 0 0 2 -206 0 0 4 -206 0 0 3 -206 0 -1 4 -206 0 0 2 -206 1 0 2 -206 0 -1 2 -206 0 -1 4 -206 0 0 3 -206 0 0 3 -206 0 0 2 -206 1 0 2 -206 0 0 2 -206 1 0 2 -206 0 0 2 -206 1 -1 2 -206 1 0 2 -206 1 0 2 -206 0 0 2 -206 1 0 2 -206 0 -1 2 -206 0 -1 2 -206 0 0 4 -206 1 0 2 -206 0 -1 2 -206 0 0 2 -206 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 1 -207 1 0 1 -207 0 0 1 -207 0 0 3 -207 0 0 4 -207 0 -1 3 -207 0 0 1 -207 0 0 1 -207 0 0 4 -207 1 -1 3 -207 0 0 3 -207 0 0 1 -207 0 0 1 -207 0 0 1 -207 0 -1 2 -207 0 0 4 -207 0 -1 4 -207 0 0 3 -207 1 -1 1 -207 0 0 3 -207 0 0 3 -207 0 0 3 -207 1 0 3 -207 0 0 3 -207 1 0 4 -207 0 -1 4 -207 1 -1 1 -207 0 0 4 -207 0 -1 3 -207 0 0 3 -207 0 0 3 -207 0 0 3 -207 1 0 4 -207 1 0 3 -207 0 0 3 -207 0 -1 1 -207 0 0 3 -207 1 0 3 -207 0 -1 3 -207 0 0 4 -207 0 -1 3 -207 0 -1 4 -207 0 0 1 -207 0 -1 3 -207 1 0 1 -207 1 0 1 -207 0 0 1 -207 0 -1 1 -207 0 0 1 -207 0 -1 2 -207 1 0 3 -207 0 0 3 -207 0 -1 3 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 1 -207 1 -1 1 -207 0 0 4 -207 0 -1 4 -207 0 0 3 -207 0 0 3 -207 1 0 3 -207 0 0 3 -207 0 -1 3 -207 1 -1 1 -207 0 -1 1 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 -1 2 -207 1 0 4 -207 0 -1 4 -207 0 -1 3 -207 0 0 1 -207 0 0 1 -207 1 0 1 -207 0 0 1 -207 1 -1 1 -207 0 -1 1 -207 0 0 4 -207 0 -1 3 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 -1 2 -207 0 -1 1 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 2 -207 0 0 2 -207 0 -1 2 -207 0 -1 1 -207 1 0 1 -207 0 0 1 -207 0 -1 3 -207 0 0 3 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 0 -1 4 -207 1 0 2 -207 0 -1 2 -207 0 0 2 -207 0 -1 2 -207 1 0 1 -207 0 -1 1 -207 1 -1 1 -207 1 0 1 -207 0 0 3 -207 0 -1 3 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 1 -1 4 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 -1 2 -207 1 0 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 0 0 4 -207 0 -1 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 0 0 4 -207 1 -1 4 -207 0 -1 4 -207 0 0 3 -207 1 0 3 -207 0 -1 3 -207 0 0 3 -207 1 -1 3 -207 0 0 3 -207 0 0 3 -207 0 0 3 -207 0 0 1 -207 0 0 1 -207 0 0 1 -207 0 -1 1 -207 0 0 1 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 -1 2 -207 1 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 4 -207 1 -1 4 -207 0 0 4 -207 0 -1 4 -207 0 -1 4 -207 0 0 3 -207 1 -1 3 -207 1 -1 3 -207 1 0 3 -207 1 -1 3 -207 0 0 3 -207 0 -1 3 -207 0 -1 2 -207 0 0 1 -207 0 -1 1 -207 0 -1 1 -207 0 0 4 -207 0 0 4 -207 0 -1 4 -208 0 0 1 -208 0 0 2 -208 1 0 3 -208 0 0 3 -208 0 0 1 -208 0 -1 2 -208 1 -1 3 -208 0 0 4 -208 0 0 4 -208 0 -1 3 -208 0 0 4 -208 0 0 1 -208 0 0 1 -208 0 0 4 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 3 -208 1 -1 3 -208 1 0 3 -208 1 0 3 -208 1 0 3 -208 1 0 3 -208 1 0 3 -208 1 -1 3 -208 1 0 3 -208 1 -1 3 -208 0 -1 3 -208 0 -1 3 -208 0 -1 3 -208 0 0 2 -208 1 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 1 -208 0 -1 1 -208 1 0 1 -208 0 0 1 -208 0 0 1 -208 1 0 1 -208 0 0 1 -208 0 -1 1 -208 0 0 1 -208 0 -1 1 -208 0 -1 3 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 2 -208 0 0 1 -208 0 0 2 -208 0 0 3 -208 0 -1 4 -208 0 0 1 -208 0 0 2 -208 0 0 3 -208 1 0 1 -208 0 0 1 -208 0 0 1 -208 1 0 1 -208 1 0 1 -208 0 0 1 -208 0 -1 1 -208 0 0 2 -208 0 0 3 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 1 -208 0 0 2 -208 0 -1 3 -208 1 -1 1 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 1 0 1 -208 0 0 1 -208 0 -1 1 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 3 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 1 -208 0 0 2 -208 0 -1 3 -208 0 0 1 -208 0 0 2 -208 0 -1 1 -208 0 -1 2 -208 0 0 3 -208 0 0 4 -208 1 -1 3 -208 0 -1 3 -208 1 0 4 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 1 -1 4 -208 0 0 4 -208 1 -1 4 -208 0 -1 4 -208 0 0 4 -208 1 -1 4 -208 0 0 4 -208 0 0 4 -208 1 -1 4 -208 1 0 4 -208 1 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 4 -208 1 0 4 -208 1 0 4 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 4 -208 0 0 4 -208 1 0 4 -208 0 0 4 -208 1 0 4 -208 1 0 4 -208 1 -1 4 -208 1 0 4 -208 1 -1 4 -208 0 0 4 -208 1 0 4 -208 1 0 4 -208 0 -1 4 -208 0 0 4 -208 1 -1 4 -208 0 -1 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 1 -208 0 -1 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 -1 4 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 -1 3 -208 0 0 3 -208 0 -1 3 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 0 2 -208 0 0 2 -208 1 -1 1 -208 0 0 2 -208 0 0 1 -208 0 -1 2 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 -1 3 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 1 0 2 -208 0 0 2 -208 0 0 2 -208 1 0 2 -208 0 -1 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 1 -1 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 1 -1 2 -208 0 0 1 -208 0 -1 2 -208 0 0 2 -208 0 0 2 -209 0 0 3 -209 0 0 1 -209 0 0 4 -209 0 0 2 -209 0 0 4 -209 0 0 1 -209 1 0 3 -209 0 0 2 -209 1 -1 2 -209 0 0 4 -209 0 0 1 -209 0 0 3 -209 0 0 2 -209 0 0 1 -209 1 0 3 -209 1 0 3 -209 0 0 3 -209 0 0 2 -209 0 0 4 -209 1 -1 1 -209 1 0 1 -209 1 -1 1 -209 0 0 1 -209 1 0 2 -209 0 0 2 -209 1 0 4 -209 0 0 4 -209 0 -1 1 -209 0 0 3 -209 1 -1 2 -209 0 -1 2 -209 0 0 2 -209 0 -1 4 -209 0 -1 3 -209 0 -1 1 -209 1 -1 4 -209 1 -1 3 -209 0 0 3 -209 0 0 2 -209 1 -1 4 -209 0 0 4 -209 0 0 1 -209 0 0 3 -209 0 0 2 -209 0 0 4 -209 0 0 3 -209 1 0 1 -209 1 -1 1 -209 0 0 1 -209 0 0 2 -209 0 -1 3 -209 1 0 4 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 0 -1 3 -209 0 -1 3 -209 0 0 1 -209 0 0 2 -209 0 0 3 -209 0 0 4 -209 0 0 1 -209 0 0 2 -209 0 0 2 -209 1 0 4 -209 0 0 3 -209 0 0 1 -209 0 0 4 -209 0 -1 2 -209 1 0 3 -209 1 -1 3 -209 0 0 4 -209 1 0 4 -209 0 -1 4 -209 0 -1 1 -209 0 -1 3 -209 0 0 2 -209 1 0 2 -209 0 -1 2 -209 1 0 4 -209 0 -1 4 -209 1 0 3 -209 0 0 3 -209 0 0 3 -209 0 0 1 -209 1 0 1 -209 0 -1 2 -209 0 0 2 -209 0 -1 3 -209 0 0 4 -209 0 -1 4 -209 0 0 3 -209 1 -1 3 -209 1 0 4 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 1 0 3 -209 0 0 2 -209 1 0 3 -209 0 0 2 -209 0 0 1 -209 0 -1 2 -209 0 0 3 -209 0 0 1 -209 1 0 4 -209 0 0 3 -209 0 -1 1 -209 0 0 2 -209 1 -1 4 -209 0 0 1 -209 0 0 4 -209 1 0 2 -209 0 0 2 -209 0 0 1 -209 0 -1 3 -209 1 0 1 -209 0 0 4 -209 0 0 4 -209 0 0 4 -209 0 -1 2 -209 0 0 2 -209 0 0 2 -209 0 0 1 -209 0 0 3 -209 0 0 3 -209 0 -1 3 -209 0 0 3 -209 0 0 3 -209 1 0 1 -209 1 0 1 -209 1 0 4 -209 0 0 4 -209 0 0 3 -209 1 -1 2 -209 0 0 2 -209 0 0 3 -209 1 -1 1 -209 0 0 4 -209 0 -1 1 -209 0 0 1 -209 0 -1 2 -209 0 -1 4 -209 1 -1 4 -209 0 0 4 -209 0 -1 3 -209 0 0 3 -209 1 0 2 -209 0 0 2 -209 0 0 1 -209 0 -1 3 -209 0 0 2 -209 0 0 1 -209 0 0 1 -209 0 0 4 -209 0 0 1 -209 1 0 3 -209 0 0 3 -209 0 -1 3 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 0 0 3 -209 0 -1 1 -209 0 -1 3 -209 0 0 3 -209 0 0 1 -209 0 -1 4 -209 0 -1 4 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 1 0 2 -209 0 0 1 -209 0 -1 4 -209 1 0 1 -209 0 0 3 -209 0 -1 4 -209 0 -1 3 -209 0 0 3 -209 0 0 4 -209 0 0 2 -209 0 0 3 -209 0 0 3 -209 0 0 1 -209 0 -1 4 -209 0 0 3 -209 0 -1 3 -209 0 -1 2 -209 0 0 4 -209 0 0 1 -209 1 0 2 -209 0 0 4 -209 1 0 2 -209 1 0 1 -209 0 0 2 -209 0 0 3 -209 0 -1 4 -209 0 0 4 -209 0 0 1 \ No newline at end of file diff --git a/inst/extdata/bart_exampleData.txt b/inst/extdata/bart_exampleData.txt deleted file mode 100644 index 4890955f..00000000 --- a/inst/extdata/bart_exampleData.txt +++ /dev/null @@ -1,91 +0,0 @@ -subjID group trial reward pumps explosion -101 0 1 1 14 1 -101 0 2 1 39 0 -101 0 3 1 50 0 -101 0 4 1 81 0 -101 0 5 1 45 1 -101 0 6 1 80 0 -101 0 7 1 45 1 -101 0 8 1 90 0 -101 0 9 1 65 1 -101 0 10 1 27 1 -101 0 11 1 40 1 -101 0 12 1 80 1 -101 0 13 1 95 1 -101 0 14 1 12 1 -101 0 15 1 36 1 -101 0 16 1 75 0 -101 0 17 1 55 1 -101 0 18 1 70 0 -101 0 19 1 26 1 -101 0 20 1 68 0 -101 0 21 1 55 1 -101 0 22 1 75 0 -101 0 23 1 64 1 -101 0 24 1 5 1 -101 0 25 1 38 1 -101 0 26 1 76 0 -101 0 27 1 57 0 -101 0 28 1 56 1 -101 0 29 1 48 1 -101 0 30 1 88 1 -103 0 1 1 88 1 -103 0 2 1 36 1 -103 0 3 1 30 0 -103 0 4 1 50 0 -103 0 5 1 56 1 -103 0 6 1 40 0 -103 0 7 1 6 1 -103 0 8 1 33 1 -103 0 9 1 30 0 -103 0 10 1 50 0 -103 0 11 1 22 1 -103 0 12 1 56 1 -103 0 13 1 20 0 -103 0 14 1 30 0 -103 0 15 1 40 0 -103 0 16 1 30 0 -103 0 17 1 30 0 -103 0 18 1 40 0 -103 0 19 1 45 0 -103 0 20 1 50 0 -103 0 21 1 55 1 -103 0 22 1 50 0 -103 0 23 1 60 0 -103 0 24 1 3 1 -103 0 25 1 40 0 -103 0 26 1 50 0 -103 0 27 1 30 0 -103 0 28 1 50 0 -103 0 29 1 30 0 -103 0 30 1 50 0 -104 0 1 1 50 0 -104 0 2 1 38 1 -104 0 3 1 50 0 -104 0 4 1 24 1 -104 0 5 1 46 1 -104 0 6 1 42 0 -104 0 7 1 11 1 -104 0 8 1 50 0 -104 0 9 1 28 1 -104 0 10 1 50 0 -104 0 11 1 50 1 -104 0 12 1 40 0 -104 0 13 1 40 0 -104 0 14 1 2 1 -104 0 15 1 40 0 -104 0 16 1 40 0 -104 0 17 1 40 0 -104 0 18 1 40 0 -104 0 19 1 25 1 -104 0 20 1 40 0 -104 0 21 1 40 0 -104 0 22 1 40 0 -104 0 23 1 36 1 -104 0 24 1 38 0 -104 0 25 1 50 0 -104 0 26 1 38 1 -104 0 27 1 40 0 -104 0 28 1 40 0 -104 0 29 1 30 0 -104 0 30 1 40 0 diff --git a/inst/extdata/choiceRT_exampleData.txt b/inst/extdata/choiceRT_exampleData.txt deleted file mode 100644 index bbe2b474..00000000 --- a/inst/extdata/choiceRT_exampleData.txt +++ /dev/null @@ -1,5001 +0,0 @@ -RT choice subjID condition -0.238126253704183 1 1 1 -0.788334139249308 2 1 1 -0.524351202388138 2 1 1 -1.30852451859186 1 1 1 -0.244177006142252 1 1 1 -0.512534281943979 1 1 1 -0.570872020376975 2 1 1 -0.552056452179357 1 1 1 -0.298121361381527 2 1 1 -0.323864684737407 1 1 1 -0.542476237007045 1 1 1 -0.457829931981559 2 1 1 -0.214443816443766 1 1 1 -0.282641758197282 2 1 1 -0.577296397953241 1 1 1 -0.80363268095685 2 1 1 -0.630866151842371 2 1 1 -0.561537877283935 2 1 1 -0.447864619700588 1 1 1 -0.271079966516117 2 1 1 -0.286558308483825 2 1 1 -0.402853789793329 2 1 1 -0.261247265870358 2 1 1 -0.954323974954787 1 1 1 -0.233982750292549 2 1 1 -0.534509968347321 2 1 1 -1.38489463892966 1 1 1 -0.51382752398596 2 1 1 -0.877226598584423 2 1 1 -0.59661096895894 2 1 1 -0.653486235884601 2 1 1 -0.499754559401486 2 1 1 -0.234607668817517 2 1 1 -0.531596228343812 2 1 1 -0.517067421390557 2 1 1 -0.286714432990514 2 1 1 -0.280389415416944 1 1 1 -0.770848791728697 2 1 1 -0.242534242474749 2 1 1 -1.21402951161598 1 1 1 -0.254230773115822 1 1 1 -0.235607609409862 1 1 1 -0.893859490775577 2 1 1 -0.4248828895841 2 1 1 -0.806633683066691 1 1 1 -0.52846751057204 2 1 1 -0.283404274358359 2 1 1 -0.38261147359119 1 1 1 -0.366467333270928 2 1 1 -0.89906087165271 2 1 1 -0.473523175525898 2 1 1 -0.61052334774835 2 1 1 -0.348877038822898 2 1 1 -0.509848343105319 2 1 1 -0.714362767211544 2 1 1 -0.366653361634071 1 1 1 -0.504639516528354 2 1 1 -0.789291266027802 1 1 1 -0.220496731951155 2 1 1 -0.225368494671686 2 1 1 -0.935425512110651 1 1 1 -0.596093103065834 2 1 1 -0.751187828634478 2 1 1 -0.398369973292919 2 1 1 -0.803192132747886 1 1 1 -0.653642313281921 2 1 1 -0.759465190620081 2 1 1 -0.301158475484036 2 1 1 -0.468546635484975 2 1 1 -0.45136376067397 2 1 1 -0.225730206953994 1 1 1 -0.871541732294617 2 1 1 -1.02231746439083 2 1 1 -0.453015412970327 1 1 1 -0.198424664401742 2 1 1 -0.300531454438104 2 1 1 -0.473723469079576 1 1 1 -0.397993417619097 2 1 1 -0.990744721453659 2 1 1 -0.576175729949669 2 1 1 -0.200104343196362 2 1 1 -0.397950225292451 2 1 1 -0.595871677587168 1 1 1 -0.631283245367399 2 1 1 -0.225640535433198 2 1 1 -0.671278939344137 2 1 1 -0.562888330598081 1 1 1 -0.713201556333214 1 1 1 -0.429372024083033 2 1 1 -0.559437949496943 1 1 1 -0.747758954300599 2 1 1 -0.668556572370471 2 1 1 -0.179933868089705 1 1 1 -0.557946405103375 2 1 1 -0.781925159045207 2 1 1 -0.629998909619026 2 1 1 -0.73419031432803 1 1 1 -0.917048954570217 2 1 1 -1.27326330493077 1 1 1 -0.715099066135782 1 1 1 -0.561629162179203 2 1 1 -0.439342876745989 1 1 1 -0.212480989248291 2 1 1 -0.431997523692581 2 1 1 -0.504823085985375 1 1 1 -0.209443682735351 2 1 1 -0.535478168252645 1 1 1 -0.523309589143815 2 1 1 -0.292526841667345 2 1 1 -0.292598915819633 1 1 1 -0.383304045988112 2 1 1 -0.220801631101784 2 1 1 -0.240025256471961 2 1 1 -0.374617088048471 2 1 1 -0.225139772246513 2 1 1 -0.504765209525881 1 1 1 -0.536719069622199 1 1 1 -0.265730079523484 2 1 1 -0.788176797412021 2 1 1 -0.317054055572024 1 1 1 -0.984288372815029 1 1 1 -0.242704368769227 1 1 1 -1.16310843477133 1 1 1 -0.671512143534472 2 1 1 -0.235624281398265 2 1 1 -0.585278561981407 1 1 1 -0.313710683818167 1 1 1 -0.512453071354528 2 1 1 -0.318816084203735 2 1 1 -0.492290766723273 2 1 1 -0.869267244819061 2 1 1 -0.416347372277426 2 1 1 -1.25171209855063 1 1 1 -0.421124063985099 2 1 1 -0.330265759909128 2 1 1 -0.32442759213596 2 1 1 -0.499405834143408 2 1 1 -0.605809814064198 2 1 1 -0.441813584555195 1 1 1 -0.355018580197292 2 1 1 -0.284917824978601 2 1 1 -0.712509291577718 2 1 1 -0.360087543146394 2 1 1 -0.593758109292972 2 1 1 -0.21793928877364 2 1 1 -0.511916501085172 1 1 1 -1.65665966055448 2 1 1 -0.462252095429733 1 1 1 -0.375694324193756 2 1 1 -0.573330510111478 1 1 1 -0.624806212238662 1 1 1 -0.3221554867038 1 1 1 -0.394184550616579 1 1 1 -0.233898257977356 2 1 1 -0.616451835954318 2 1 1 -0.435745652986984 2 1 1 -0.309831870195393 1 1 1 -0.303567774481905 1 1 1 -0.268141575894932 2 1 1 -0.685546680374616 2 1 1 -0.315857448984633 2 1 1 -0.358148021225324 1 1 1 -0.561597978729496 2 1 1 -0.575763837785002 2 1 1 -0.408409797786314 2 1 1 -0.456470478096314 2 1 1 -0.211008154807298 1 1 1 -0.537560426488747 1 1 1 -0.474119050536192 1 1 1 -0.349680702914349 2 1 1 -0.43874642118394 2 1 1 -0.741099937281951 2 1 1 -0.397490501092685 1 1 1 -0.455993632903328 2 1 1 -0.531917883353318 2 1 1 -0.544592749033783 2 1 1 -0.74575081631549 1 1 1 -0.482830763020483 2 1 1 -0.280104823458282 2 1 1 -0.674827163589054 2 1 1 -0.27232449929437 1 1 1 -0.33609945965603 2 1 1 -0.642687813456977 2 1 1 -0.45152584390343 2 1 1 -0.393612819207325 1 1 1 -0.403513480920972 2 1 1 -0.55270209232572 2 1 1 -0.282474350101989 1 1 1 -0.225686494015142 1 1 1 -0.3032960404285 1 1 1 -0.741695387202929 1 1 1 -0.23627922113503 1 1 1 -0.317661404771517 2 1 1 -0.365881950379812 1 1 1 -0.671407911504626 2 1 1 -0.6327672361385 2 1 1 -0.408730216599132 2 1 1 -1.05295329016947 2 1 1 -0.647929253014634 2 1 1 -0.272505386795946 1 1 1 -0.477000937785718 2 1 1 -0.593679670773664 2 1 1 -0.485804513765726 1 1 1 -0.685108031619407 2 1 1 -0.463863491717212 2 1 1 -0.280869562583906 2 1 1 -0.484442256816249 1 1 1 -0.374203282894535 1 1 1 -0.205270568757322 2 1 1 -0.285730023779721 1 1 1 -0.420031671350127 2 1 1 -0.304140334800815 2 1 1 -0.455400240565684 1 1 1 -0.319295225911816 2 1 1 -0.853456173431349 2 1 1 -1.28296521539738 2 1 1 -0.402276812108308 2 1 1 -1.60365089898574 2 1 1 -0.513436951554669 2 1 1 -0.635287982445216 2 1 1 -0.756725913746622 1 1 1 -0.538411817875012 2 1 1 -0.252807751300543 1 1 1 -0.306493263230248 2 1 1 -0.328940637779731 1 1 1 -0.295149174376265 2 1 1 -0.428772773247104 2 1 1 -0.710257617392816 2 1 1 -2.1398843380733 2 1 1 -0.390964230021283 1 1 1 -0.30264732818644 1 1 1 -0.24604561365542 2 1 1 -0.326118394989355 2 1 1 -0.444302762917929 2 1 1 -0.994994120515054 2 1 1 -0.329747734506691 2 1 1 -0.244820417609073 2 1 1 -0.434344901812039 2 1 1 -0.245526039713125 2 1 1 -0.371387027622059 2 1 1 -0.396016682526436 2 1 1 -0.868293655068221 2 1 1 -0.339580118779972 2 1 1 -0.377321305638716 1 1 1 -0.352058350011174 2 1 1 -0.523222420484193 1 1 1 -1.63006360968846 2 1 1 -0.403780279358626 1 1 1 -0.33450821318739 2 1 1 -0.246049648436144 1 1 1 -0.73900563703035 2 1 1 -0.70659002598455 1 1 1 -0.251224036209508 1 1 1 -0.279682884105716 2 1 1 -0.446835970242547 2 1 1 -0.344773155307199 2 1 1 -1.18156313011751 2 1 1 -0.40159469187599 1 1 1 -0.662618250249293 2 1 1 -0.484088636200293 2 1 1 -0.249177412018199 1 1 1 -0.635565342005854 2 1 1 -0.237344366033974 2 1 1 -0.262837667936303 2 1 1 -0.223824529758815 2 1 1 -0.544851049052962 1 1 1 -1.22941904309934 1 1 1 -1.18790150456476 2 1 1 -0.397012831119724 2 1 1 -0.542500816372649 1 1 1 -0.215934076714995 2 1 1 -1.30455859046761 2 1 1 -0.536099297245265 2 1 1 -0.414264536316934 2 1 1 -0.777679859044325 2 1 1 -0.697575719549679 1 1 1 -0.704761484394448 2 1 1 -0.286893353427223 1 1 1 -0.681973124438239 2 1 1 -0.397462829482937 2 1 1 -0.257670640245336 2 1 1 -0.236649584180499 2 1 1 -0.436790435094707 1 1 1 -0.574656753851278 1 1 1 -0.253082319735779 1 1 1 -0.61806692862892 1 1 1 -0.46661603680114 2 1 1 -0.195332992231242 1 1 1 -0.229629897436442 1 1 1 -0.992646398039104 2 1 1 -0.94136783174252 2 1 1 -0.837333099387364 1 1 1 -0.430204780391451 2 1 1 -1.37515921760222 2 1 1 -0.89875556054097 2 1 1 -0.696864042518777 2 1 1 -0.332933586834615 2 1 1 -0.334070550417085 2 1 1 -0.38676132253602 2 1 1 -0.306404665389991 2 1 1 -0.478254432945422 2 1 1 -0.601997570889218 2 1 1 -0.373642558748753 2 1 1 -0.29388256861859 1 1 1 -0.403146732540824 2 1 1 -0.754379822737839 2 1 1 -0.20827688411218 2 1 1 -0.211975975201092 2 1 1 -0.591340246795799 1 1 1 -0.263322621163444 2 1 1 -0.525170614901281 1 1 1 -0.206823345071543 1 1 1 -0.935520204615524 1 1 1 -0.550910831841 2 1 1 -0.232504114652867 2 1 1 -0.391975720570035 1 1 1 -0.52537232580037 2 1 1 -0.604827669281913 2 1 1 -0.440173374557048 2 1 1 -0.294878838994327 2 1 1 -0.323868811622971 1 1 1 -0.240824506056104 2 1 1 -0.423271049333481 2 1 1 -0.849356591210965 2 1 1 -0.335818515496422 2 1 1 -0.538745656799135 2 1 1 -0.4208751745964 2 1 1 -0.55146359110108 2 1 1 -0.467882029849217 2 1 1 -0.567777388073783 2 1 1 -0.311394332684366 1 1 1 -1.33531192845093 1 1 1 -0.368867535882799 2 1 1 -0.340092989922591 2 1 1 -0.299811445088077 2 1 1 -0.198131285653104 2 1 1 -1.57688580580023 2 1 1 -0.671467937043381 2 1 1 -0.422481878776226 2 1 1 -0.48313672040092 1 1 1 -0.473697344635179 1 1 1 -0.68951966048344 2 1 1 -0.633967567703449 2 1 1 -0.382761102393661 1 1 1 -0.68736056335558 1 1 1 -0.677810537621417 2 1 1 -0.249614606331914 2 1 1 -0.755321813543998 2 1 1 -0.370198385669219 1 1 1 -0.697638915493631 1 1 1 -0.684828719775979 2 1 1 -0.502349799392655 1 1 1 -0.583605969114717 1 1 1 -0.517041977935336 1 1 1 -0.411670106317747 1 1 1 -0.207610898625408 1 1 1 -0.414006631133478 2 1 1 -0.921999310392829 1 1 1 -0.409507167245215 2 1 1 -0.584716070617761 2 1 1 -0.666513112126972 2 1 1 -0.233005827550518 2 1 1 -0.60770657746225 1 1 1 -0.221784346267773 2 1 1 -0.771391695716424 2 1 1 -1.21988159355549 1 1 1 -0.408933678664394 2 1 1 -1.03374983542661 2 1 1 -0.939198644733114 2 1 1 -0.962067734082042 2 1 1 -0.473406448845882 2 1 1 -0.223644602219167 2 1 1 -1.12139515597077 1 1 1 -0.299025722625131 2 1 1 -0.55009896091157 1 1 1 -0.578799507502895 1 1 1 -0.962097234341087 1 1 1 -0.348861796367042 2 1 1 -0.35773121339554 2 1 1 -0.246740510307971 1 1 1 -0.407111627051893 1 1 1 -0.550930871978825 1 1 1 -0.422754497543909 2 1 1 -0.758063342099552 2 1 1 -0.380847347114823 2 1 1 -0.377055603284598 2 1 1 -0.65608839650545 1 1 1 -1.42357385911498 2 1 1 -0.232798506755752 2 1 1 -0.539867634108279 2 1 1 -0.392489725525737 2 1 1 -0.367110223983889 2 1 1 -0.839639947757427 2 1 1 -0.592327430792799 2 1 1 -0.239126262427817 2 1 1 -0.328625329636268 2 1 1 -0.284257957756146 1 1 1 -0.331590178883346 1 1 1 -0.620620411662111 2 1 1 -0.755967038010479 2 1 1 -0.331900743408574 2 1 1 -0.421146686045199 1 1 1 -1.07476503410067 2 1 1 -0.507112598176372 1 1 1 -0.311566718621004 2 1 1 -0.301319320070233 2 1 1 -0.393257034342845 2 1 1 -0.673521481008061 2 1 1 -0.291567562966672 2 1 1 -1.05249328382332 2 1 1 -0.694698152076518 2 1 1 -0.733055920143737 2 1 1 -0.334350894107303 2 1 1 -0.542807697456418 1 1 1 -0.579281169009386 1 1 1 -0.320837583848137 1 1 1 -0.488074071042795 1 1 1 -0.213060081069537 2 1 1 -0.237230647833275 2 1 1 -0.237572229668373 1 1 1 -0.241805498724672 1 1 1 -0.21505246069559 2 1 1 -0.625069689033177 2 1 1 -0.391789762960315 1 1 1 -0.360924641936915 2 1 1 -0.434831888026175 1 1 1 -1.53947356804897 2 1 1 -0.390459073072731 2 1 1 -0.327186719063663 1 1 1 -0.451681415339723 1 1 1 -0.551841771615269 2 1 1 -0.41039773179749 1 1 1 -0.926634118987433 2 1 1 -0.813362027443744 2 1 1 -0.632371052186083 2 1 1 -1.07271976627787 1 1 1 -0.347281073927582 1 1 1 -0.44423560152159 1 1 1 -0.576366534316911 2 1 1 -0.279713029952993 2 1 1 -0.881466843024701 2 1 1 -0.374654223890455 1 1 1 -0.246340230252564 1 1 1 -0.46051090791758 2 1 1 -0.610478508455545 1 1 1 -0.290070606427311 2 1 1 -0.544420557842503 1 1 1 -0.776693279362721 1 1 1 -0.235406028367375 2 1 1 -0.239531675743827 1 1 1 -0.44775078332261 2 1 1 -0.272084709816774 1 1 1 -0.490027056594032 2 1 1 -1.11466956380519 2 1 1 -0.270448404879725 2 1 1 -0.442949902437612 1 1 1 -0.570651632322539 1 1 1 -0.32265845661882 2 1 1 -0.407435441210764 2 1 1 -0.200085052390358 2 1 1 -0.358511835895485 2 1 1 -1.2431214333383 2 1 1 -0.696171754957839 1 1 1 -0.2777627469669 1 1 1 -0.429359856138122 2 1 1 -0.340524177360971 2 1 1 -0.199944337376957 2 1 1 -0.398334292684942 2 1 1 -0.388541579168816 2 1 1 -0.398547679838622 1 1 1 -0.839309822360769 2 1 1 -0.280253849702043 2 1 1 -0.547345720269382 2 1 1 -0.376647832731017 2 1 1 -0.455530332435412 2 1 1 -0.334196466045242 2 1 1 -0.759777271734527 2 1 1 -1.10869967729068 2 1 1 -0.222920909328599 2 1 1 -0.243727194101031 2 1 1 -0.331283374352904 2 1 1 -0.489803545251022 1 1 1 -0.2736011848833 2 1 1 -0.432409628386385 1 1 1 -0.447747022319498 2 1 1 -0.736283852147818 2 1 1 -0.461500847594122 1 1 1 -0.359367876631285 1 1 1 -0.418098062593873 2 1 1 -0.502693165924066 1 1 1 -0.260188072876792 1 1 1 -0.348437996297828 1 1 1 -1.57562306974174 2 1 1 -0.316108820930013 2 1 1 -0.421685918698271 2 1 1 -0.578695918727619 2 1 1 -1.12879309366769 2 1 1 -1.03916993441652 2 1 1 -0.492207222672778 1 1 1 -0.33283217994747 2 1 1 -0.39422420306568 2 1 2 -0.362300838201913 1 1 2 -0.469662901313467 2 1 2 -0.820030023322582 1 1 2 -0.234551440695508 2 1 2 -0.331679248955791 1 1 2 -0.527229640837085 2 1 2 -0.91734807805308 2 1 2 -0.319175515877037 2 1 2 -0.651053459158852 1 1 2 -0.661459624685597 2 1 2 -0.281279784597852 2 1 2 -0.342078529279457 1 1 2 -0.3636800828231 2 1 2 -0.484151346003298 1 1 2 -0.658827635325395 1 1 2 -0.622208937699232 1 1 2 -0.580811030835409 2 1 2 -0.441808620117506 1 1 2 -0.36060243933493 2 1 2 -0.831194064165385 2 1 2 -0.361776006347027 2 1 2 -0.777351339265196 1 1 2 -0.278293909155803 2 1 2 -0.278507100800553 2 1 2 -0.884402648451047 1 1 2 -0.342560342613834 2 1 2 -0.809676649841315 2 1 2 -0.516858099569803 1 1 2 -0.634645370682583 2 1 2 -0.249686099229778 1 1 2 -1.33141985698474 1 1 2 -0.453726915386914 2 1 2 -0.290504549136735 2 1 2 -0.487095756746479 2 1 2 -0.346501172556082 2 1 2 -0.393430828426059 1 1 2 -0.504449494787339 2 1 2 -0.367999687491587 2 1 2 -0.352469038071531 1 1 2 -0.234560015153837 2 1 2 -0.940841504372444 1 1 2 -0.2046902513565 2 1 2 -0.461341997193658 1 1 2 -0.610339950737745 2 1 2 -0.446921029186028 1 1 2 -0.515591108864551 2 1 2 -1.58260395843454 2 1 2 -0.344764743329778 2 1 2 -0.427254054893139 2 1 2 -0.516158776880019 1 1 2 -1.2612303673015 2 1 2 -0.613528615965816 2 1 2 -0.267963577139406 2 1 2 -0.307594651280269 2 1 2 -0.24101706884499 1 1 2 -0.455753268732021 2 1 2 -0.405040912881131 2 1 2 -0.288094483330521 1 1 2 -0.545610622237084 2 1 2 -0.452142838999807 1 1 2 -0.594527943497764 1 1 2 -0.88116621589308 2 1 2 -0.277767297820233 2 1 2 -0.279551393619652 2 1 2 -0.365460511604365 2 1 2 -0.556212898406868 2 1 2 -0.328560209842821 1 1 2 -0.531013993625691 1 1 2 -0.231888430468412 1 1 2 -0.677110774143983 2 1 2 -0.453921989085917 2 1 2 -0.459571696136957 2 1 2 -0.393356837769246 1 1 2 -0.511202810478497 1 1 2 -0.693614307574487 1 1 2 -0.240863923388269 2 1 2 -0.321852817508144 1 1 2 -0.270908403919833 2 1 2 -0.820724000663825 1 1 2 -0.235189573689813 2 1 2 -0.326155088030317 2 1 2 -0.631590224724998 1 1 2 -0.441990726662034 1 1 2 -0.84336570752273 2 1 2 -0.359995826600722 2 1 2 -0.251400135935091 1 1 2 -0.412798716611553 1 1 2 -0.257997459005081 1 1 2 -0.324911808695266 2 1 2 -0.869954063020224 2 1 2 -0.316774804913553 1 1 2 -0.802438949561354 2 1 2 -0.753010120858102 2 1 2 -0.50447570028204 1 1 2 -0.472994968867572 2 1 2 -0.365558799398694 2 1 2 -0.355836646801112 1 1 2 -0.571157381310202 2 1 2 -0.634686215618027 2 1 2 -0.270208965991148 2 1 2 -0.328585338874615 1 1 2 -0.384434393299423 2 1 2 -0.316023575731398 1 1 2 -0.494817395995112 2 1 2 -0.300504460120145 2 1 2 -0.347783059904907 2 1 2 -1.02851702876777 2 1 2 -0.364863367923789 2 1 2 -0.460777943415657 2 1 2 -0.382793622325279 2 1 2 -0.273403607994913 2 1 2 -0.609426470046583 2 1 2 -0.297792901344866 1 1 2 -0.370479141756967 2 1 2 -0.882238434259769 1 1 2 -0.496857265474561 2 1 2 -0.277702369672893 2 1 2 -0.446926962878622 1 1 2 -0.36757607051588 1 1 2 -0.557136267106436 2 1 2 -1.00333007744122 2 1 2 -0.760219976689289 2 1 2 -0.332068843559009 2 1 2 -0.548961093445682 2 1 2 -0.313465233961872 2 1 2 -0.550216771807154 1 1 2 -0.29794278574353 1 1 2 -0.234198048951483 2 1 2 -0.273445183254746 2 1 2 -0.574886295740124 2 1 2 -0.258382409058055 1 1 2 -0.409845586460725 2 1 2 -0.326206723132256 2 1 2 -0.642595268751117 2 1 2 -0.232356531769144 2 1 2 -1.70736951927255 1 1 2 -0.274687338325608 2 1 2 -0.40877430223826 2 1 2 -0.365729356985064 2 1 2 -0.6050000403314 2 1 2 -0.592011487134505 2 1 2 -0.557179211825432 1 1 2 -0.873296855773591 1 1 2 -0.216826762785491 1 1 2 -0.517886780128018 2 1 2 -0.398323720600925 1 1 2 -1.12139464302831 2 1 2 -0.249538486660475 2 1 2 -0.360304338880141 1 1 2 -0.627773044075362 2 1 2 -0.996274959906684 2 1 2 -0.202797819180771 1 1 2 -0.383153769101205 1 1 2 -0.324797856324902 1 1 2 -0.239421301531662 2 1 2 -0.24289898785908 1 1 2 -0.547746136913622 1 1 2 -0.386255965400912 2 1 2 -0.60223673049116 2 1 2 -0.549261776998216 2 1 2 -0.395992071688511 2 1 2 -0.217402932038072 2 1 2 -0.295305459515413 2 1 2 -0.447909826549637 2 1 2 -0.71950962867128 2 1 2 -0.794816583397332 1 1 2 -0.241318968932987 2 1 2 -0.556293493098233 2 1 2 -0.238208378562322 2 1 2 -0.499247181746743 1 1 2 -0.317050968536836 2 1 2 -0.322686857249444 2 1 2 -0.71276761076242 2 1 2 -0.301030966624334 2 1 2 -0.336641004565653 2 1 2 -0.812046026214206 2 1 2 -0.270220261704131 2 1 2 -0.701954145112022 2 1 2 -0.43964095073941 2 1 2 -0.384704421988213 2 1 2 -0.501487364681699 2 1 2 -0.455023781459671 2 1 2 -0.332474164305816 2 1 2 -0.567142874907982 2 1 2 -0.253324335182053 2 1 2 -0.444329558298367 2 1 2 -0.750457236950695 2 1 2 -0.292500297080332 2 1 2 -0.319745451630673 2 1 2 -0.286210384865368 2 1 2 -0.283637752128579 1 1 2 -0.236044970372654 2 1 2 -0.606532173767213 1 1 2 -1.32620595835061 2 1 2 -0.49881945892801 2 1 2 -1.00559201100603 2 1 2 -0.498129494834216 2 1 2 -0.682007132416635 2 1 2 -0.521249610973914 2 1 2 -0.229929750671033 2 1 2 -1.12814610238938 1 1 2 -0.74135939367203 2 1 2 -1.14362542630031 2 1 2 -0.261969169934014 1 1 2 -0.240668217312327 1 1 2 -1.59220860546119 2 1 2 -0.459005868330534 2 1 2 -0.290018768199601 2 1 2 -0.204589440835719 1 1 2 -0.619039312673667 2 1 2 -0.667083334382893 1 1 2 -0.359845320132008 1 1 2 -0.912937103767445 2 1 2 -0.522430834145349 2 1 2 -0.297762304149053 1 1 2 -0.276240304783596 2 1 2 -0.399051717562123 2 1 2 -0.404254481667734 2 1 2 -1.23765251352633 1 1 2 -1.2213528437925 2 1 2 -0.554106620313858 2 1 2 -0.513543854359058 2 1 2 -0.718560875752879 2 1 2 -0.299045404005468 1 1 2 -0.197161504481574 2 1 2 -0.355424533393654 2 1 2 -0.601322385280793 2 1 2 -0.31408110064814 1 1 2 -0.681928297252204 2 1 2 -0.257899160580357 1 1 2 -0.331853308281021 2 1 2 -0.932271244383807 2 1 2 -0.762290747363875 1 1 2 -0.610315223598599 2 1 2 -0.508310743979851 2 1 2 -0.293542339726516 1 1 2 -0.249532498898509 2 1 2 -0.240661946068682 1 1 2 -0.480573774515142 2 1 2 -0.26503112695042 1 1 2 -0.745033574361612 2 1 2 -0.313418912457887 2 1 2 -0.428468490020874 2 1 2 -0.619836697801129 2 1 2 -0.404856983338945 2 1 2 -0.225135719018744 2 1 2 -0.247203725168153 2 1 2 -0.473126435201081 2 1 2 -0.758881984366834 2 1 2 -0.530103620429835 2 1 2 -0.609787747426196 2 1 2 -0.42023331047044 1 1 2 -0.294545387085857 1 1 2 -0.311952071319945 2 1 2 -0.793299410776987 2 1 2 -0.376179978035794 2 1 2 -0.230418084856786 2 1 2 -0.25879024565358 2 1 2 -0.264796453159985 2 1 2 -0.745485785923675 2 1 2 -0.224026456721164 2 1 2 -0.6030135494348 1 1 2 -0.489733962171922 2 1 2 -0.39466687509252 1 1 2 -0.552205654391275 2 1 2 -0.575332864606377 2 1 2 -0.673079198373531 1 1 2 -0.346849143283538 1 1 2 -0.384205850032696 1 1 2 -0.382157410278578 2 1 2 -0.294710963958947 2 1 2 -0.487164402385991 1 1 2 -0.571768796864126 2 1 2 -0.243155946253846 1 1 2 -0.366816988109117 1 1 2 -0.556560232965345 2 1 2 -0.842238270178048 2 1 2 -0.630587019970835 1 1 2 -0.849346128585895 2 1 2 -0.301910596058078 1 1 2 -0.494388435823995 1 1 2 -0.635279696032103 2 1 2 -0.244333041807845 1 1 2 -0.462722638825509 2 1 2 -0.355580520263025 1 1 2 -0.419159925222802 2 1 2 -0.229441499742296 2 1 2 -0.272011206196529 2 1 2 -0.457900548880182 1 1 2 -0.42581270796691 2 1 2 -0.192946477357373 2 1 2 -0.920226167527353 2 1 2 -0.870356567626495 2 1 2 -0.506429857300226 1 1 2 -1.35129991323996 2 1 2 -0.81782131154567 1 1 2 -0.312753351203148 2 1 2 -0.240147726849663 2 1 2 -0.331594506915926 2 1 2 -0.303605405427918 2 1 2 -1.3692312598303 2 1 2 -0.464969591870211 2 1 2 -0.365081121121442 2 1 2 -0.53678523283272 1 1 2 -0.362543809949933 2 1 2 -0.300077415892361 2 1 2 -0.565255726546058 2 1 2 -0.508969800017276 1 1 2 -0.197931347436034 2 1 2 -0.425448002968464 2 1 2 -0.917689004198981 2 1 2 -0.265209389680314 1 1 2 -0.399534037154238 1 1 2 -0.292118455947818 2 1 2 -0.352289208022807 2 1 2 -0.800608594982045 2 1 2 -0.251768687680971 1 1 2 -0.280448242529617 2 1 2 -0.243473452337431 1 1 2 -0.286916333216323 2 1 2 -0.838049565027792 1 1 2 -0.244529227888586 1 1 2 -0.71863102671265 2 1 2 -1.98916958946463 2 1 2 -0.238806320688673 2 1 2 -0.693785935221629 2 1 2 -0.495890282805749 1 1 2 -0.191174545766406 2 1 2 -0.836607694563896 2 1 2 -0.391165841939288 2 1 2 -0.566993167116615 2 1 2 -0.240715729525045 2 1 2 -0.354039896192607 1 1 2 -1.21434836670206 2 1 2 -0.48154154974369 2 1 2 -0.798332748413893 2 1 2 -0.650917466844914 2 1 2 -0.384224495536896 1 1 2 -0.945026137940947 2 1 2 -0.418508744931679 2 1 2 -0.659365265496408 1 1 2 -0.271823834279208 2 1 2 -0.43920360190222 2 1 2 -0.449532948575899 2 1 2 -1.02510373135742 2 1 2 -0.49889822568904 1 1 2 -1.23542122006285 1 1 2 -0.274287087904293 2 1 2 -0.673318487358746 2 1 2 -0.397619138281994 2 1 2 -1.0555886269523 2 1 2 -0.229089202292979 1 1 2 -0.697871360095817 2 1 2 -0.614287525537126 1 1 2 -0.322578991679628 1 1 2 -0.605688688250448 2 1 2 -0.534481750546624 2 1 2 -0.565101205666666 1 1 2 -0.378499737439249 1 1 2 -0.52995012536057 2 1 2 -2.45554336829165 1 1 2 -0.744067895318506 2 1 2 -0.673980171567151 2 1 2 -0.33496535179204 2 1 2 -0.703852317870538 2 1 2 -0.623851718541645 2 1 2 -0.275936871629696 2 1 2 -0.245843960416957 2 1 2 -0.220780887604494 2 1 2 -0.585098991357547 2 1 2 -0.343992796279959 1 1 2 -0.580010432096859 2 1 2 -0.377174286172397 2 1 2 -0.614794867960386 1 1 2 -0.235740390671863 1 1 2 -0.498093604359181 2 1 2 -0.422668225465882 2 1 2 -0.85458472173833 2 1 2 -0.318077105190021 2 1 2 -0.660599386236034 2 1 2 -0.44253879597235 2 1 2 -0.482452162905769 2 1 2 -0.569360166827625 2 1 2 -1.5195957937337 1 1 2 -0.335177741698269 2 1 2 -0.241392133198455 2 1 2 -0.503619286339201 2 1 2 -0.578489345701315 2 1 2 -0.327768204464024 2 1 2 -0.436095089114902 2 1 2 -0.424989568541 1 1 2 -0.214045608149353 2 1 2 -0.306116924393253 1 1 2 -0.476975246716608 2 1 2 -0.756750965776553 2 1 2 -0.312822136071239 2 1 2 -0.470827169455528 1 1 2 -0.37315029361616 2 1 2 -0.685155315108113 2 1 2 -0.959530818891534 2 1 2 -0.913595420136271 2 1 2 -0.595438752846243 2 1 2 -0.36802176344941 2 1 2 -0.418544504566566 2 1 2 -0.363048213885529 2 1 2 -0.254338756573215 1 1 2 -0.730957519992839 1 1 2 -0.263046554830887 1 1 2 -0.46094182659418 2 1 2 -0.531059000798822 2 1 2 -0.353280265477637 2 1 2 -0.464342980616116 2 1 2 -0.229724823533327 2 1 2 -0.504945673660676 2 1 2 -0.532493395334424 2 1 2 -0.423801763698387 1 1 2 -0.787113721614964 2 1 2 -0.223160559034952 2 1 2 -0.419595856308554 2 1 2 -0.396648316145306 2 1 2 -0.308908241587595 2 1 2 -0.627802576140553 2 1 2 -0.64888860721256 2 1 2 -0.738730808101364 2 1 2 -0.658745774579089 1 1 2 -0.679191956616965 1 1 2 -0.278164538209912 1 1 2 -0.205911141408479 2 1 2 -0.241638399787725 2 1 2 -0.24858355547484 1 1 2 -0.73740496979995 2 1 2 -0.247935082999496 2 1 2 -0.826311099617232 2 1 2 -0.590607775557781 1 1 2 -0.351249908681046 1 1 2 -0.370792468725378 2 1 2 -0.389722068994738 1 1 2 -0.251157837165118 2 1 2 -0.663087218040623 2 1 2 -0.454359737429872 2 1 2 -0.435474095638232 2 1 2 -0.284410206592962 2 1 2 -0.344506290138683 2 1 2 -1.01768620078799 1 1 2 -0.331330031800195 1 1 2 -0.277021859762052 1 1 2 -0.347332671037543 1 1 2 -0.286836805838407 1 1 2 -0.340934631295205 2 1 2 -1.22270556676254 1 1 2 -0.360534849486478 1 1 2 -0.359892263518994 2 1 2 -0.552595743599511 2 1 2 -0.301744081404754 2 1 2 -0.416037514267758 2 1 2 -0.541344562283886 2 1 2 -0.579986637345764 2 1 2 -0.221912718773351 2 1 2 -0.465245817277264 2 1 2 -0.474738754014913 2 1 2 -1.70409538281312 2 1 2 -1.02235518855245 2 1 2 -0.301214497598036 1 1 2 -0.991176433131545 2 1 2 -0.401432084705109 2 1 2 -1.11715380433533 2 1 2 -0.81719064511715 2 1 2 -0.549517654685354 1 1 2 -0.251345033237621 2 1 2 -0.357859075575934 2 1 2 -0.90132423193762 2 1 2 -0.272936669704676 1 1 2 -0.455508577827349 2 1 2 -0.861185664428614 2 1 2 -0.266987292082781 2 1 2 -0.578879341650739 2 1 2 -0.649256823455797 2 1 2 -0.418711362089519 2 1 2 -0.433426379919396 2 1 2 -0.642462173639701 1 1 2 -0.406446379518523 2 1 2 -0.290863063788828 1 1 2 -0.395803052313048 2 1 2 -0.311087619708231 2 1 2 -0.279185686505835 1 1 2 -0.412823984876793 1 1 2 -0.314508721309633 2 1 2 -0.417280760034167 2 1 2 -0.357813047077128 2 1 2 -0.256161295149574 2 1 2 -0.240326641914136 2 1 2 -0.469105961018824 2 1 2 -0.23311026462364 2 1 2 -0.219699590325278 2 1 2 -0.267828103451759 2 1 2 -0.324090708482963 1 1 2 -0.882370084866449 2 1 2 -0.296556033418114 2 1 2 -0.535028311840886 1 1 2 -0.43175137215661 2 2 1 -0.644941841007773 2 2 1 -0.454575049110823 1 2 1 -0.279695948494544 1 2 1 -0.426171246559654 2 2 1 -0.610510950165697 2 2 1 -0.230689244897577 1 2 1 -0.548095008243392 1 2 1 -0.98541029605035 2 2 1 -0.468950980918864 2 2 1 -0.684795778239259 2 2 1 -0.24699387402904 2 2 1 -0.27748506685569 2 2 1 -0.223809391596467 2 2 1 -0.400670922523929 2 2 1 -0.281960311355797 2 2 1 -0.231152419723023 2 2 1 -0.222978006492174 2 2 1 -0.524142717204131 1 2 1 -0.668726417947444 2 2 1 -0.683251780945197 1 2 1 -0.40863429831843 2 2 1 -0.276342613901865 2 2 1 -0.213193944799305 1 2 1 -0.40877289939876 2 2 1 -0.331817008251402 2 2 1 -0.258591934173063 1 2 1 -0.630903225088589 2 2 1 -0.987067038242542 2 2 1 -0.388841679090983 2 2 1 -0.50346695279445 2 2 1 -0.806091271285255 2 2 1 -0.492528933755195 2 2 1 -0.333653132977917 2 2 1 -0.385279766168457 2 2 1 -0.595417467221174 2 2 1 -0.365549592893083 2 2 1 -0.433959310098169 2 2 1 -0.573461315434403 2 2 1 -0.344529930746843 2 2 1 -0.291958498883562 1 2 1 -0.70457842689903 1 2 1 -0.806575731336316 2 2 1 -0.398225114239535 1 2 1 -0.377149378210516 2 2 1 -0.4258972597472 2 2 1 -0.345195995455676 1 2 1 -0.351061263845004 1 2 1 -0.608134033587742 2 2 1 -0.59067213970621 2 2 1 -0.422680728979774 1 2 1 -0.365154436343711 2 2 1 -0.329038190670398 2 2 1 -0.50673589153492 1 2 1 -0.370741158040407 2 2 1 -0.326087804776541 2 2 1 -0.255452470867296 2 2 1 -0.390552106391914 2 2 1 -0.26482744433547 2 2 1 -0.33730434871047 2 2 1 -0.394549621635902 2 2 1 -0.309813859295583 2 2 1 -0.63449833976955 2 2 1 -1.06805758473031 2 2 1 -0.421454806981705 2 2 1 -0.281636679807289 2 2 1 -0.554306471235953 2 2 1 -0.604870136671619 2 2 1 -1.08217269575099 1 2 1 -0.379299151823082 2 2 1 -0.689740592470931 2 2 1 -0.254395650138749 2 2 1 -0.414172534052434 2 2 1 -0.520113577322035 2 2 1 -0.431996596446885 2 2 1 -1.08053726808782 2 2 1 -0.3988833679393 1 2 1 -0.353686935845557 2 2 1 -0.260219916678923 2 2 1 -0.86593681879694 2 2 1 -0.225590599418998 2 2 1 -0.349164601087427 2 2 1 -0.458075994072888 2 2 1 -0.294956639081246 2 2 1 -0.313858826908574 2 2 1 -0.31162986062042 1 2 1 -0.507204360257467 2 2 1 -0.352032600138167 1 2 1 -0.663557530182887 2 2 1 -0.199857632822527 1 2 1 -0.514502853877809 2 2 1 -0.469535266171427 2 2 1 -0.355020801164096 2 2 1 -0.683808271711007 2 2 1 -0.301603299502107 2 2 1 -0.269580060746496 2 2 1 -0.299073546316696 2 2 1 -1.10983985933577 2 2 1 -0.360307123921532 2 2 1 -0.442261294563025 2 2 1 -0.435168028548888 1 2 1 -0.270793007645254 2 2 1 -0.607563481133469 2 2 1 -0.242718448543483 2 2 1 -0.760580795652265 2 2 1 -0.431996589129307 2 2 1 -0.374282624407255 2 2 1 -0.416206432567024 2 2 1 -0.519571791432021 2 2 1 -0.196032953071603 2 2 1 -0.315596729977301 1 2 1 -0.679912355835528 2 2 1 -0.264862579134914 1 2 1 -0.218987554044978 2 2 1 -0.355620540652982 1 2 1 -0.543244665580406 1 2 1 -0.457512624736921 2 2 1 -1.3226460471116 1 2 1 -0.525011653461871 1 2 1 -0.370965283148772 2 2 1 -0.347902925695899 1 2 1 -0.398586878749805 2 2 1 -0.548961196694153 1 2 1 -0.410882145807631 2 2 1 -0.198715843872579 1 2 1 -0.225346382503031 2 2 1 -0.42578338272523 2 2 1 -0.270583704112055 2 2 1 -0.219387522590806 1 2 1 -0.342735985144739 2 2 1 -0.553884896165182 2 2 1 -0.34639458884139 2 2 1 -0.283955583130347 2 2 1 -0.223220560804016 2 2 1 -0.243212165170184 1 2 1 -0.417166480278331 2 2 1 -0.339544388395638 2 2 1 -0.546503987947626 1 2 1 -0.318410466893085 1 2 1 -0.339234853728487 1 2 1 -0.340516936127161 2 2 1 -0.518403351170541 2 2 1 -0.195409190162283 1 2 1 -0.358173332839706 2 2 1 -0.699954775815217 1 2 1 -0.348838538015419 2 2 1 -0.270023193413005 2 2 1 -0.266646456805835 2 2 1 -1.16775233367232 1 2 1 -0.432285328985634 2 2 1 -0.392918105200082 2 2 1 -0.280124804921595 2 2 1 -0.339253528717098 1 2 1 -0.612654152551717 2 2 1 -1.09354543233683 1 2 1 -0.253131913451641 1 2 1 -0.407884093764528 2 2 1 -0.729923816383264 2 2 1 -0.412724482985278 1 2 1 -0.422136530830045 1 2 1 -0.195907626308766 1 2 1 -0.250168829070227 2 2 1 -0.361614330778561 2 2 1 -0.477174944954289 1 2 1 -0.630016603117949 1 2 1 -0.465295384344847 2 2 1 -0.294143656815915 2 2 1 -0.264628027587206 2 2 1 -0.207214488692379 2 2 1 -0.275361465213498 2 2 1 -0.390244603648003 2 2 1 -0.392019933911482 2 2 1 -0.419421788233775 2 2 1 -0.398851313639373 2 2 1 -0.222684342528921 1 2 1 -0.309535304324601 2 2 1 -0.518814359944856 1 2 1 -0.852128247986192 2 2 1 -0.281410288223871 2 2 1 -1.04526845496218 1 2 1 -0.299671718323509 2 2 1 -1.2206902247079 2 2 1 -0.266245221651821 2 2 1 -0.585856917539606 2 2 1 -0.246713344194944 2 2 1 -0.243221392767397 1 2 1 -0.466101604938217 2 2 1 -0.681358629889813 2 2 1 -0.316032029193665 2 2 1 -0.37711415731265 2 2 1 -0.321756308895114 2 2 1 -0.339426132611737 1 2 1 -0.462776164378388 2 2 1 -0.242651330034962 1 2 1 -0.246338808493067 2 2 1 -0.351443718315621 2 2 1 -0.879179474160666 2 2 1 -0.275892228592311 2 2 1 -1.19843207513575 2 2 1 -0.492212434904309 2 2 1 -0.235334077460408 1 2 1 -0.658781985580715 2 2 1 -0.496722023486868 2 2 1 -0.389333111607481 1 2 1 -0.290682514514568 1 2 1 -0.296078339261109 2 2 1 -1.13573146386925 2 2 1 -0.321243222665541 1 2 1 -0.47852089377703 2 2 1 -0.706173198859061 2 2 1 -0.342262066554139 1 2 1 -0.229700613420935 2 2 1 -0.262779311846245 1 2 1 -0.292772839155619 2 2 1 -0.412064699877953 2 2 1 -0.459836048826499 2 2 1 -0.490523892738996 2 2 1 -0.279524933441799 2 2 1 -0.369615897239809 2 2 1 -0.230758234694844 2 2 1 -0.252861805612104 2 2 1 -0.491841566756603 2 2 1 -0.381802651857499 2 2 1 -0.347186500235918 2 2 1 -0.608385233814657 2 2 1 -1.06849187905581 2 2 1 -0.312240566682192 1 2 1 -0.846639420443936 1 2 1 -0.712399792744085 2 2 1 -0.25012681536537 2 2 1 -0.474130073908507 2 2 1 -0.669096572588102 2 2 1 -0.245614214665135 1 2 1 -1.09046461098125 2 2 1 -0.865104839101706 2 2 1 -0.725212442091508 2 2 1 -0.328860852235035 2 2 1 -0.526209106175903 2 2 1 -0.421498807641989 2 2 1 -0.293969113717582 2 2 1 -0.390793667483304 2 2 1 -0.74262292487233 2 2 1 -0.308167280867968 2 2 1 -0.359026772195073 1 2 1 -0.328927185365953 2 2 1 -1.21905584255683 1 2 1 -0.500288047433814 2 2 1 -0.224842633452238 2 2 1 -0.382005686607667 2 2 1 -0.300634446023351 1 2 1 -0.417876867416724 2 2 1 -0.371249215012469 2 2 1 -0.788689811346923 1 2 1 -0.662689531590809 1 2 1 -0.471005868314423 2 2 1 -0.594444358601939 2 2 1 -1.55077240941125 1 2 1 -0.927706317276666 1 2 1 -0.649826050593124 1 2 1 -0.28075741006474 2 2 1 -0.505810290842985 2 2 1 -0.49711754981939 2 2 1 -0.317978096635881 2 2 1 -0.684248959928731 2 2 1 -0.24282378340995 2 2 1 -0.481707664140375 2 2 1 -0.373537373349082 1 2 1 -0.405447957366669 2 2 1 -0.748014256841301 2 2 1 -0.711834286991734 2 2 1 -0.907962085626992 1 2 1 -0.369967811242 2 2 1 -0.295993682640687 2 2 1 -0.373284266243751 2 2 1 -0.34166217722553 2 2 1 -0.42937750854584 1 2 1 -0.470915823976768 2 2 1 -0.740950067010803 2 2 1 -0.674240772478605 2 2 1 -0.424450122438996 2 2 1 -0.26745277302258 2 2 1 -1.16123242588962 1 2 1 -0.579767054462027 2 2 1 -1.13724912004989 2 2 1 -0.310882482715339 2 2 1 -0.510927903962018 2 2 1 -0.344111958329695 2 2 1 -0.618226135802301 2 2 1 -0.351479460921543 2 2 1 -0.286917418430935 2 2 1 -0.297983520129636 2 2 1 -0.56257712830786 2 2 1 -0.716682250022604 2 2 1 -0.414592630645323 1 2 1 -0.238976677322081 2 2 1 -0.272443854818692 2 2 1 -1.16955807004935 2 2 1 -1.09939549809574 2 2 1 -0.270028442968248 2 2 1 -0.788721847905805 2 2 1 -0.4191755150052 2 2 1 -0.316400373681771 2 2 1 -0.609802807606279 2 2 1 -0.242772508104779 2 2 1 -0.793135194837104 1 2 1 -0.225673630294491 2 2 1 -0.368031893686271 2 2 1 -0.276628839207783 2 2 1 -0.431489483371041 2 2 1 -0.389776699040007 1 2 1 -0.561033032142085 2 2 1 -0.330526167790471 2 2 1 -0.420110538629517 2 2 1 -0.270319448711143 2 2 1 -0.531423698665226 1 2 1 -0.476628212169931 2 2 1 -0.232314221820144 1 2 1 -0.941428986722565 2 2 1 -0.212025112102429 2 2 1 -0.368427723696994 2 2 1 -0.70992072587502 2 2 1 -0.925525840482286 2 2 1 -0.377883521547475 1 2 1 -0.78063938574767 2 2 1 -1.74503683482489 1 2 1 -0.251612907306528 2 2 1 -0.301650511821631 2 2 1 -0.813066289415148 2 2 1 -0.578407966843961 1 2 1 -0.348273146613647 2 2 1 -0.626193739500669 2 2 1 -0.253140397879093 2 2 1 -0.259307456267337 1 2 1 -0.415832848798801 1 2 1 -0.556832384556447 1 2 1 -0.673572632335394 1 2 1 -0.799853330760023 2 2 1 -0.930742156958785 1 2 1 -0.535762437608495 1 2 1 -0.473189488245964 2 2 1 -0.524542993511125 2 2 1 -0.739545131805635 1 2 1 -0.235355785766015 2 2 1 -0.260584654694577 1 2 1 -0.629506660259397 2 2 1 -0.231557754200238 2 2 1 -0.441319321469825 2 2 1 -0.583215313492174 2 2 1 -0.716830295625359 2 2 1 -0.199491993130699 2 2 1 -0.431091799266252 2 2 1 -0.206797413339198 2 2 1 -0.891303968036612 2 2 1 -0.418088670691812 2 2 1 -0.695686622676713 2 2 1 -0.509442640220052 2 2 1 -0.534434162394219 1 2 1 -0.22957675981285 2 2 1 -0.237510411071828 2 2 1 -0.384742470864086 2 2 1 -1.13440323753284 2 2 1 -0.371282462508375 2 2 1 -0.586952463908924 2 2 1 -0.751807274502031 2 2 1 -0.411626801231686 2 2 1 -0.788795034331271 2 2 1 -0.20733393183141 1 2 1 -0.638857588359423 1 2 1 -0.261472367531119 2 2 1 -0.373277752845772 2 2 1 -1.02436843366298 2 2 1 -0.302502960194587 1 2 1 -0.661168427682398 2 2 1 -0.233395542415348 2 2 1 -0.294733463977297 1 2 1 -0.26544588339993 2 2 1 -0.279478601813994 1 2 1 -0.926988733721204 2 2 1 -0.47577107073081 2 2 1 -0.265434794900874 2 2 1 -0.285106533088262 2 2 1 -0.888150923648132 1 2 1 -0.533632864862185 2 2 1 -0.816980040369266 2 2 1 -0.453517844009076 2 2 1 -0.32540514132032 1 2 1 -0.755359450830742 2 2 1 -0.388781842189814 2 2 1 -0.411602949797336 2 2 1 -0.269196234885745 2 2 1 -0.403147780188977 2 2 1 -0.815435476047168 2 2 1 -0.384261600014836 2 2 1 -0.267710822428141 2 2 1 -0.376570816086018 2 2 1 -0.364120979475635 2 2 1 -0.217074883970687 2 2 1 -0.354010708705527 2 2 1 -2.19563497894271 2 2 1 -0.269514173923494 2 2 1 -0.589071950609085 2 2 1 -0.343631456123552 2 2 1 -0.567816994849473 2 2 1 -0.510957888944779 2 2 1 -0.608732197392097 2 2 1 -0.312065520452347 2 2 1 -0.719826312987153 2 2 1 -0.991538495850398 2 2 1 -0.590296881002275 2 2 1 -0.585207716920772 2 2 1 -0.513932742073003 1 2 1 -0.29300310455318 2 2 1 -0.395229070645386 2 2 1 -0.251238693438004 1 2 1 -0.358100976516223 2 2 1 -0.604658428518133 2 2 1 -0.28898691264998 1 2 1 -0.616268731936217 1 2 1 -0.241534354644201 2 2 1 -0.586222445209675 2 2 1 -0.530578635018236 1 2 1 -0.400144208555685 2 2 1 -0.385729124722071 2 2 1 -0.397295110458581 2 2 1 -1.1980503220687 2 2 1 -0.284244205357729 2 2 1 -0.241347374440344 2 2 1 -0.379496078440646 1 2 1 -0.313029336995714 1 2 1 -0.233874351279794 2 2 1 -0.592645188650851 2 2 1 -0.380520487588823 2 2 1 -0.273459017317749 2 2 1 -0.381468845247399 2 2 1 -0.50996411763119 2 2 1 -0.26417116674038 2 2 1 -0.445725904718431 1 2 1 -0.284100667163705 2 2 1 -0.372087396465745 2 2 1 -0.259337317980368 1 2 1 -1.90238201143875 2 2 1 -0.420439527811802 2 2 1 -0.676790701130044 2 2 1 -0.491676544128052 2 2 1 -0.41814454475348 1 2 1 -0.325129499365181 2 2 1 -0.250560441839221 2 2 1 -0.42121953335634 2 2 1 -0.720448454867087 2 2 1 -0.32286005578195 2 2 1 -0.401634215363402 2 2 1 -0.823101550994882 1 2 1 -0.22652814141488 2 2 1 -0.574012915325073 2 2 1 -0.303814059799115 2 2 1 -0.38868794499924 2 2 1 -0.468185403754697 1 2 1 -0.64039931341012 2 2 1 -0.4922813732329 2 2 1 -0.685240592163985 2 2 1 -0.460729493739463 2 2 1 -0.454264406687532 2 2 1 -0.292098065982487 2 2 1 -0.259711243117317 2 2 1 -0.476819420143709 2 2 1 -0.54147202807433 2 2 1 -0.294960782219479 2 2 1 -0.265064471822996 2 2 1 -0.268586719914729 2 2 1 -0.663075377720097 2 2 1 -0.443778226905662 2 2 1 -0.33084725716745 2 2 1 -0.45665127223439 2 2 1 -0.937763503430533 2 2 1 -1.25335333650901 2 2 1 -0.882712190513773 2 2 1 -0.292970601755812 2 2 1 -0.34861595618408 1 2 1 -0.278204853725307 2 2 1 -0.677235764875177 1 2 1 -1.08316216966813 2 2 1 -0.365942431227363 2 2 1 -0.305484095875439 2 2 1 -0.235624184994017 2 2 1 -0.398658337232737 2 2 1 -0.434318631638857 2 2 1 -0.355187848050399 1 2 1 -0.454420611999672 2 2 1 -0.198639116074042 2 2 1 -0.359080427717729 2 2 1 -1.74384487531936 2 2 1 -0.191245680862617 2 2 1 -0.783931590262253 2 2 1 -0.567473626830682 2 2 1 -0.45296456401947 2 2 1 -0.248317703257658 2 2 1 -0.699649264703012 2 2 1 -0.611361282985861 2 2 1 -0.941610741093518 1 2 1 -0.58654507029159 2 2 1 -0.371419809546963 2 2 2 -0.476259551913618 2 2 2 -0.31638619890226 2 2 2 -0.623249848667632 2 2 2 -0.241497913775783 2 2 2 -0.270061097781809 1 2 2 -0.293966968248291 1 2 2 -0.299480877137146 1 2 2 -0.576031229139046 2 2 2 -0.574375556532669 2 2 2 -0.484563476656291 2 2 2 -0.397331777673979 2 2 2 -0.501973158919077 2 2 2 -0.354716577854211 2 2 2 -0.608633700459432 2 2 2 -0.263227756156975 2 2 2 -0.812103027527017 2 2 2 -0.433996647919137 2 2 2 -0.516347639257426 2 2 2 -0.272600890706883 2 2 2 -0.286047555800837 2 2 2 -0.342521016193953 2 2 2 -0.443573882489982 1 2 2 -0.526526295854292 2 2 2 -0.372529192399058 1 2 2 -0.294310906384799 2 2 2 -0.964693955497906 1 2 2 -0.388120088040495 2 2 2 -0.292344532077584 2 2 2 -1.15960223884759 2 2 2 -0.405153223966 1 2 2 -0.354989409321607 2 2 2 -0.218636295771409 2 2 2 -0.517752319489056 2 2 2 -0.288570371961949 2 2 2 -0.557762064455683 2 2 2 -0.216932595356505 1 2 2 -0.586061017160239 2 2 2 -0.269412734371731 2 2 2 -0.622722702637302 1 2 2 -0.244923602459739 2 2 2 -0.685296064602417 2 2 2 -0.292439180652386 1 2 2 -0.26310281088841 1 2 2 -0.234853617821344 2 2 2 -0.505912235218254 2 2 2 -0.221940510664284 2 2 2 -0.204399980929288 2 2 2 -0.848247148946075 2 2 2 -0.652791317257383 2 2 2 -0.258633103875262 2 2 2 -0.66307312794457 2 2 2 -0.4115021817099 2 2 2 -0.61208665562783 2 2 2 -0.384453937267756 2 2 2 -0.661908613432912 1 2 2 -0.606234829188098 2 2 2 -1.29671884936154 2 2 2 -0.286389778612672 2 2 2 -0.44358020190157 2 2 2 -0.331248138375298 2 2 2 -0.699098366691914 2 2 2 -0.286835588654431 2 2 2 -0.27329751741887 2 2 2 -0.578226988740838 1 2 2 -0.411952465608936 2 2 2 -0.373654621521716 2 2 2 -0.51548587469447 1 2 2 -1.11290090946189 2 2 2 -0.61606619557625 1 2 2 -0.698620145254327 1 2 2 -0.213186526028619 1 2 2 -0.256304787769457 2 2 2 -0.203323922453708 2 2 2 -0.451141466550498 2 2 2 -0.432233770300206 1 2 2 -0.619352781461983 2 2 2 -0.858258004341018 2 2 2 -0.325535345759254 2 2 2 -0.284928261958504 2 2 2 -0.292632701103941 2 2 2 -0.448776353232441 2 2 2 -0.629255153926353 2 2 2 -0.263200495144972 2 2 2 -0.352588083053461 1 2 2 -0.236540541826709 2 2 2 -0.707959296574283 2 2 2 -1.28313325031642 2 2 2 -0.301751574718914 1 2 2 -0.561892223203863 1 2 2 -0.282412604594248 2 2 2 -0.304262276952806 2 2 2 -0.407357953713203 2 2 2 -0.210707057789059 2 2 2 -0.560396081439257 2 2 2 -0.339009581511832 2 2 2 -0.996419746213449 2 2 2 -1.00183750288417 2 2 2 -1.32504284872589 2 2 2 -1.0721106707744 2 2 2 -0.377861404944634 2 2 2 -0.71900023167808 1 2 2 -0.319706843290023 2 2 2 -0.416610305545232 2 2 2 -0.206654488398495 2 2 2 -0.921766469149586 1 2 2 -0.658742909074791 2 2 2 -0.263893467268196 2 2 2 -0.190201252069023 1 2 2 -0.517337895143614 2 2 2 -0.224739734085673 2 2 2 -0.218898805354731 2 2 2 -0.717954990040875 2 2 2 -0.209228192652069 1 2 2 -0.256025079388851 1 2 2 -0.326258537383908 2 2 2 -0.689759693215715 2 2 2 -0.610908694182847 1 2 2 -0.337065226697079 2 2 2 -0.1870490728342 2 2 2 -0.299662174395397 2 2 2 -0.277717334862863 2 2 2 -1.23731761519909 2 2 2 -0.43474766698581 2 2 2 -0.557318058556568 2 2 2 -0.680232429047272 1 2 2 -0.322869377985879 2 2 2 -0.438605410585611 2 2 2 -0.87241634651293 2 2 2 -0.539311881419031 2 2 2 -0.475182882058131 2 2 2 -0.271154490775633 2 2 2 -0.385232918900933 2 2 2 -0.505906394481136 1 2 2 -0.442070078279938 1 2 2 -0.571547043533657 2 2 2 -0.655792477355547 2 2 2 -0.298499878396393 2 2 2 -0.193724242862765 2 2 2 -0.26006530791065 1 2 2 -0.356708786110689 1 2 2 -0.491543605775341 2 2 2 -0.393111774151399 2 2 2 -0.64026773631928 2 2 2 -1.21142909598262 2 2 2 -0.474157252918212 2 2 2 -0.262364047166446 1 2 2 -0.258812965103118 1 2 2 -0.348162908277828 2 2 2 -0.272495641205976 2 2 2 -0.312010686501704 2 2 2 -0.38306021754942 2 2 2 -0.661593514913509 2 2 2 -0.433229374187291 1 2 2 -0.361015067322576 2 2 2 -0.240000417220632 2 2 2 -0.354358867878031 1 2 2 -0.30397159906092 2 2 2 -0.678050772142903 2 2 2 -0.716435686835505 1 2 2 -0.688441301707592 2 2 2 -0.394328672411684 2 2 2 -0.46447905310017 2 2 2 -0.884150949192416 2 2 2 -0.248771015164462 2 2 2 -0.379182107844877 2 2 2 -1.29071049141673 2 2 2 -0.277475559903592 2 2 2 -0.428681740063866 2 2 2 -0.206857299277309 2 2 2 -0.669454595982171 2 2 2 -0.324919636356833 2 2 2 -0.395123689833804 1 2 2 -0.351936531306306 2 2 2 -0.55429721844539 2 2 2 -0.250263471266211 2 2 2 -0.372186767472496 1 2 2 -1.00061699085178 2 2 2 -1.09921193229266 2 2 2 -0.650060223521224 2 2 2 -0.232120071117234 2 2 2 -0.410458958763894 1 2 2 -0.26086224185435 2 2 2 -0.602658853022438 2 2 2 -0.282185336344145 2 2 2 -1.26553444840965 2 2 2 -0.382143185875273 2 2 2 -0.248988201311841 2 2 2 -0.982723892409823 2 2 2 -0.444303934998749 2 2 2 -0.64934036686621 1 2 2 -0.476803291197273 2 2 2 -0.385320489981782 2 2 2 -0.345492479856484 2 2 2 -0.422553263338974 2 2 2 -0.450135392979508 2 2 2 -0.18351739521214 2 2 2 -0.72669927106078 1 2 2 -0.375683539988626 1 2 2 -0.217452533729198 2 2 2 -0.664573219425088 2 2 2 -0.40675170248381 2 2 2 -0.687937270624779 2 2 2 -0.746504000572466 2 2 2 -0.315055664212289 2 2 2 -0.567678006192237 2 2 2 -0.35926586984242 2 2 2 -0.260726328355797 2 2 2 -0.420592363854161 1 2 2 -0.396878488001735 2 2 2 -0.25808424693846 1 2 2 -0.690268836967113 2 2 2 -0.829884430478792 2 2 2 -0.53272810901146 1 2 2 -1.01904300744411 2 2 2 -0.404198494338956 2 2 2 -0.406416489634305 2 2 2 -0.740461569878665 2 2 2 -0.397707721968874 2 2 2 -0.316960678261711 2 2 2 -0.393796380182408 2 2 2 -0.612468746065398 2 2 2 -0.45411966684652 1 2 2 -0.785721727701694 2 2 2 -0.9899071317292 2 2 2 -0.301479726835548 2 2 2 -0.202054252934703 1 2 2 -0.404304395375019 2 2 2 -0.405109216362114 2 2 2 -0.437782938427307 2 2 2 -0.323522608388588 2 2 2 -0.498830694826068 2 2 2 -0.37932833915468 2 2 2 -0.566103976465953 2 2 2 -0.285143334962036 2 2 2 -0.331554043990072 1 2 2 -1.1056014894372 1 2 2 -0.526579897732621 1 2 2 -0.498630096551189 1 2 2 -0.302146695343523 2 2 2 -0.593188221646493 2 2 2 -0.875545940047122 1 2 2 -0.21829415545408 2 2 2 -1.27038332570518 2 2 2 -0.282958152993897 1 2 2 -0.316439713175455 2 2 2 -0.579173988441469 1 2 2 -0.444463360833938 2 2 2 -0.269250618007444 1 2 2 -0.470819079103018 2 2 2 -0.209668973816132 2 2 2 -0.458009773429269 2 2 2 -0.408503850676956 2 2 2 -0.27215890031715 2 2 2 -0.326582192001007 2 2 2 -1.06297741258528 1 2 2 -0.322713389608647 1 2 2 -0.457320344283686 2 2 2 -0.358703584766666 1 2 2 -0.266227903632889 2 2 2 -0.559988919421634 2 2 2 -0.286133456649917 2 2 2 -0.505411943083196 1 2 2 -0.525780093131127 1 2 2 -0.469126014876269 2 2 2 -1.04689087989818 2 2 2 -0.319875950338349 2 2 2 -0.250597639059042 1 2 2 -0.457613518448636 2 2 2 -0.387669523459911 1 2 2 -0.434913766029881 2 2 2 -0.48328427011083 2 2 2 -0.243610412662936 2 2 2 -0.342488023626944 2 2 2 -1.02638570164986 2 2 2 -1.63528669167027 2 2 2 -0.547318790274417 2 2 2 -0.440550940111696 1 2 2 -0.373207977309306 1 2 2 -0.4309907178462 2 2 2 -0.687697858349405 2 2 2 -0.285905993586428 1 2 2 -0.295218110682198 2 2 2 -0.650238504586291 2 2 2 -0.353180609354725 2 2 2 -0.336105599731412 2 2 2 -0.308137951395616 2 2 2 -0.347726332414955 1 2 2 -0.36943111917592 2 2 2 -0.281602615433194 2 2 2 -0.341345778831345 2 2 2 -0.467241317856716 2 2 2 -0.805429950125371 2 2 2 -0.235507874506382 1 2 2 -0.60030833998794 2 2 2 -0.676534495912984 2 2 2 -0.217928389514833 2 2 2 -1.05751503498892 2 2 2 -0.279644280745951 2 2 2 -0.658652885367294 2 2 2 -0.958304606178503 2 2 2 -0.346568405752533 1 2 2 -0.56248781541788 2 2 2 -0.209725804674779 2 2 2 -0.846930432147221 2 2 2 -0.330138876610716 1 2 2 -0.420408554089049 2 2 2 -0.620358503800179 2 2 2 -0.429248922416652 2 2 2 -0.382833067577587 2 2 2 -0.340311828954195 2 2 2 -0.389953896192246 2 2 2 -0.53663347892141 2 2 2 -0.49895548899123 2 2 2 -0.941153754879819 1 2 2 -0.507411654917284 2 2 2 -1.20160702354942 2 2 2 -0.478889238903742 1 2 2 -0.901438524126996 2 2 2 -0.918390742809495 2 2 2 -0.493350491230538 1 2 2 -0.52166085506493 2 2 2 -0.389420617429571 2 2 2 -0.270742745239298 1 2 2 -1.37088764103588 2 2 2 -0.727863288960697 2 2 2 -1.12836933231538 2 2 2 -0.386561751596785 2 2 2 -0.4309399047834 2 2 2 -0.614786852311502 2 2 2 -0.411950362176773 2 2 2 -1.78194623155386 2 2 2 -0.539225103492103 2 2 2 -1.02211318479885 2 2 2 -0.544572050183936 2 2 2 -0.584989507154119 2 2 2 -0.248709232375088 2 2 2 -0.641762262858976 2 2 2 -0.535282924615562 2 2 2 -0.329604040226998 2 2 2 -0.279268417344702 2 2 2 -0.233349948825794 2 2 2 -0.412932663530443 2 2 2 -0.415689266706035 1 2 2 -0.482166933803416 2 2 2 -0.893443627004475 2 2 2 -0.446076551783159 2 2 2 -0.843967533571949 2 2 2 -0.292399559523647 2 2 2 -0.638203851975096 2 2 2 -0.993243483944454 2 2 2 -0.268455934511667 2 2 2 -0.248177339713737 2 2 2 -0.317553308658084 2 2 2 -0.312268015328109 2 2 2 -0.340662839522388 2 2 2 -0.277346119406243 2 2 2 -0.583825795661779 1 2 2 -0.315409861504152 1 2 2 -1.07687410829711 2 2 2 -0.444325240814203 2 2 2 -0.387565615939017 2 2 2 -0.267788731901758 2 2 2 -0.311270050983203 1 2 2 -1.22802442335164 2 2 2 -0.853133012546484 2 2 2 -0.258818891608348 2 2 2 -0.545664728569855 1 2 2 -0.243040509115306 1 2 2 -0.616018319395203 2 2 2 -0.424267110499089 2 2 2 -0.663051122325687 2 2 2 -0.226946615302446 2 2 2 -0.523585890324027 2 2 2 -0.35581713292406 2 2 2 -0.266158931754381 1 2 2 -0.720311139462917 1 2 2 -0.218118535654997 2 2 2 -0.821362912627226 1 2 2 -0.264989552139514 1 2 2 -0.511682799792117 2 2 2 -0.56125463965235 1 2 2 -0.973606014834926 2 2 2 -0.518075507295568 1 2 2 -0.272191894573665 2 2 2 -0.310819538858286 2 2 2 -0.368211947094363 2 2 2 -0.465474933911655 2 2 2 -0.561177518235567 1 2 2 -0.603470353749625 1 2 2 -0.334194497275073 2 2 2 -0.641942706323965 2 2 2 -0.356630874252134 2 2 2 -0.232871443126139 1 2 2 -0.333649448973833 2 2 2 -0.609159624944822 1 2 2 -1.45610468230462 2 2 2 -0.466354725792031 2 2 2 -0.304184098280551 1 2 2 -0.477141349721271 2 2 2 -0.303009331142076 2 2 2 -0.524109669978762 2 2 2 -0.43671698415524 2 2 2 -0.42787128143151 2 2 2 -1.08981029598678 1 2 2 -0.226054298232117 2 2 2 -0.383400885338281 2 2 2 -0.583586502122542 1 2 2 -0.778194753021581 2 2 2 -0.268775804780166 1 2 2 -0.502664308069978 2 2 2 -1.36226361256616 2 2 2 -0.335324976828731 2 2 2 -0.43949839083169 2 2 2 -0.432777757912515 2 2 2 -0.267162022023955 1 2 2 -0.879352118632758 2 2 2 -0.413066977072891 2 2 2 -0.421261106481918 2 2 2 -0.513682794838585 2 2 2 -0.232814503782821 2 2 2 -0.977943934285527 1 2 2 -0.278625342042981 2 2 2 -0.978405820837462 2 2 2 -0.43674850370889 2 2 2 -0.237684082723394 2 2 2 -0.244342880645148 2 2 2 -0.313124526412448 1 2 2 -0.244341553331277 2 2 2 -0.850477515504803 2 2 2 -0.346690313973946 2 2 2 -0.427706912439349 2 2 2 -0.383097524593988 1 2 2 -0.672506676903199 2 2 2 -0.325668111506743 2 2 2 -0.501131325233736 2 2 2 -0.240168060476825 2 2 2 -0.235178051076048 2 2 2 -0.47616856065887 2 2 2 -0.262979002698665 1 2 2 -0.526351217536873 2 2 2 -0.337727201047472 2 2 2 -0.255335801167391 2 2 2 -0.382811211430241 2 2 2 -0.526518217287997 2 2 2 -0.212982195364599 2 2 2 -0.197639872379599 2 2 2 -0.401129269762392 2 2 2 -2.19377506417666 2 2 2 -0.254835580976153 2 2 2 -0.635043789020716 2 2 2 -0.603192459522677 1 2 2 -0.780119188280459 1 2 2 -0.387445357368451 2 2 2 -0.352999715984171 2 2 2 -0.215260154150075 2 2 2 -0.438343141309741 1 2 2 -0.383351790215377 2 2 2 -0.290180848854179 2 2 2 -0.30327695172642 2 2 2 -1.16042778280559 2 2 2 -0.223187293483549 2 2 2 -0.699562632635369 2 2 2 -0.469536665963427 2 2 2 -0.614760127368693 1 2 2 -0.250342025167407 2 2 2 -0.192874356525872 1 2 2 -0.246314460692576 2 2 2 -0.37206273201885 2 2 2 -0.206582351239156 2 2 2 -0.534304707606674 2 2 2 -0.343229958652054 2 2 2 -0.777141268031828 2 2 2 -0.209204354796177 2 2 2 -0.897870484588665 1 2 2 -0.484064308449244 2 2 2 -0.309753836749031 2 2 2 -0.420217764048858 2 2 2 -0.564902124329689 2 2 2 -0.393733362161397 1 2 2 -0.266506772096653 2 2 2 -0.28962567887685 2 2 2 -0.528206137761708 2 2 2 -0.301389722137156 1 2 2 -0.223917441106893 2 2 2 -0.779205007871088 1 2 2 -0.218027803393641 2 2 2 -0.288022434145067 2 2 2 -0.303093997774882 1 2 2 -0.390286981959269 2 2 2 -0.464770428369033 2 2 2 -1.73960719684067 2 2 2 -0.248026714745345 2 2 2 -0.36728769019827 2 2 2 -0.596680154881044 1 2 2 -0.383715207146668 2 2 2 -0.376823540619621 2 2 2 -0.685403427627866 1 2 2 -0.441814166283547 2 2 2 -0.493957818252071 2 2 2 -1.09158112036438 2 3 1 -0.303214556399873 2 3 1 -0.546502981537546 1 3 1 -0.217431965663716 2 3 1 -0.268170730263647 2 3 1 -0.346976002816777 2 3 1 -0.229324148845003 1 3 1 -0.816902188437 1 3 1 -0.294576387592954 2 3 1 -0.633680684608576 1 3 1 -1.72847500258562 2 3 1 -0.296884162972746 2 3 1 -0.382321634766408 1 3 1 -0.763284418821156 1 3 1 -0.434224234531732 1 3 1 -1.19271589875813 1 3 1 -0.377781463528736 2 3 1 -0.79392960381382 2 3 1 -0.282338627630539 2 3 1 -0.327667168963983 2 3 1 -0.364638540293463 2 3 1 -0.690522801097352 1 3 1 -0.278680481396497 2 3 1 -1.52418840340842 2 3 1 -0.311969951458862 1 3 1 -0.317776650235954 2 3 1 -0.423691550838739 1 3 1 -0.671006456874602 1 3 1 -0.877883789761534 2 3 1 -0.302971325325345 1 3 1 -0.671517638524883 2 3 1 -1.11390681916392 2 3 1 -0.46388915584611 2 3 1 -0.439407447713224 2 3 1 -0.304368717367806 1 3 1 -0.598571664636264 2 3 1 -0.293087082176115 1 3 1 -0.88569612171942 1 3 1 -0.250899042539296 1 3 1 -0.357059055876667 2 3 1 -1.18166158962524 2 3 1 -0.470682360024002 2 3 1 -0.894894450156942 1 3 1 -0.435735118013038 2 3 1 -0.30295075552671 2 3 1 -0.470570639524463 2 3 1 -0.23748433775057 2 3 1 -0.360451685172226 1 3 1 -0.441474734419253 2 3 1 -0.586503558927763 2 3 1 -0.489843937397201 1 3 1 -0.466272618907063 2 3 1 -0.614130590008736 1 3 1 -0.328854179555165 1 3 1 -1.00309638651768 1 3 1 -0.616986168975414 2 3 1 -0.708134443160147 1 3 1 -0.187898870895148 1 3 1 -0.54082217240692 2 3 1 -1.57411391072384 1 3 1 -0.4493227844752 1 3 1 -0.713892489238243 2 3 1 -0.342186658762456 1 3 1 -1.02117655005718 1 3 1 -0.683440987874987 2 3 1 -0.423935184637998 1 3 1 -0.973133914601076 1 3 1 -0.38306019074276 2 3 1 -0.616703039430407 2 3 1 -0.424716691275681 2 3 1 -0.571910388646059 2 3 1 -0.321910647628946 2 3 1 -0.279467364732086 1 3 1 -0.511770823160077 1 3 1 -0.448839994649654 1 3 1 -0.408993038286618 2 3 1 -0.384671509573393 1 3 1 -0.59475727306752 2 3 1 -0.248904553159929 2 3 1 -0.730389140239337 2 3 1 -0.919037835604557 1 3 1 -0.264109362057892 1 3 1 -0.989121286812907 1 3 1 -0.828041122491036 1 3 1 -0.266898502330599 2 3 1 -0.254034221622117 2 3 1 -1.11127936853007 2 3 1 -0.877022707380551 2 3 1 -0.497101983703828 2 3 1 -0.55789237566765 2 3 1 -0.759066137604798 2 3 1 -0.346006778408851 2 3 1 -0.258561043266019 2 3 1 -0.934282414397615 2 3 1 -0.328642052298081 2 3 1 -0.550227936556864 1 3 1 -0.376633857594849 1 3 1 -0.304037777411272 2 3 1 -0.431708301902904 1 3 1 -0.349019174949225 1 3 1 -0.920298316488849 2 3 1 -0.39826548226189 1 3 1 -0.725829444504715 2 3 1 -0.588644591374367 1 3 1 -0.246128207487776 1 3 1 -1.11590498746582 2 3 1 -0.294876994112035 2 3 1 -0.641111356601665 1 3 1 -0.508053986081123 1 3 1 -0.771118458382302 1 3 1 -0.285699501579415 1 3 1 -1.02462070103652 1 3 1 -0.272825662912259 2 3 1 -0.484400350353985 2 3 1 -0.484569939314766 2 3 1 -0.531386883569837 1 3 1 -0.410632364466612 1 3 1 -0.529817458600413 2 3 1 -1.13935537418794 2 3 1 -0.2756098273084 1 3 1 -0.578175605563475 1 3 1 -1.05572643035039 2 3 1 -0.404530205629778 2 3 1 -0.68459004427736 2 3 1 -0.512697405878432 2 3 1 -0.429332115307925 2 3 1 -0.295864219583054 2 3 1 -0.619738889875145 2 3 1 -0.574787509981818 2 3 1 -0.25342580412108 2 3 1 -0.510997220464868 1 3 1 -0.314205597309042 2 3 1 -0.445509182794708 2 3 1 -1.21240465641764 2 3 1 -0.394946660382341 2 3 1 -0.452801112877752 2 3 1 -0.403350637136158 2 3 1 -0.576985676386101 2 3 1 -0.389264776452976 1 3 1 -0.823295103130808 1 3 1 -0.463468613723993 1 3 1 -0.245377944795518 2 3 1 -0.996044085392399 2 3 1 -0.690423137827953 2 3 1 -0.663845653189127 2 3 1 -0.459849111784745 2 3 1 -1.17832890976462 2 3 1 -1.71465565607573 2 3 1 -0.559470318252231 2 3 1 -1.62127201031263 2 3 1 -1.75932882254012 1 3 1 -0.810068975707212 2 3 1 -0.32567798881547 2 3 1 -0.405265927230293 2 3 1 -0.312319795786779 2 3 1 -0.664164798713009 2 3 1 -0.972174600565453 1 3 1 -0.350736426389176 1 3 1 -0.464183487885217 1 3 1 -0.979942810598283 2 3 1 -0.312621099364353 1 3 1 -0.321946657262611 2 3 1 -0.662512744175165 1 3 1 -0.265782966766695 2 3 1 -0.554547549403016 1 3 1 -0.670230788357581 2 3 1 -0.545148391569713 2 3 1 -0.3944126912798 2 3 1 -0.350245544303979 2 3 1 -0.447316724864116 2 3 1 -0.494291506086329 2 3 1 -0.294165307093089 2 3 1 -0.600045380632821 2 3 1 -0.653173135952646 2 3 1 -0.802716451437717 1 3 1 -0.425486199464103 1 3 1 -0.567162388988331 2 3 1 -0.216466918462054 2 3 1 -0.274646226936591 1 3 1 -0.27952911656832 2 3 1 -0.310673451915856 2 3 1 -1.85814147833547 2 3 1 -0.410157918175516 2 3 1 -0.422501872163458 1 3 1 -0.47177034302856 1 3 1 -0.371140269829411 2 3 1 -0.595311459484279 2 3 1 -1.50368663175704 1 3 1 -1.01238877285822 1 3 1 -0.330032112990162 2 3 1 -0.803689764137746 1 3 1 -1.11335733385196 2 3 1 -1.77758737390525 2 3 1 -0.960114443062577 1 3 1 -0.459716792002552 2 3 1 -1.48363248910813 2 3 1 -0.52360780431933 2 3 1 -0.377016971697135 2 3 1 -1.48796279286523 2 3 1 -1.5584173576682 2 3 1 -0.477222600875173 2 3 1 -0.806192776105325 2 3 1 -1.22714760697165 2 3 1 -0.285202509776337 2 3 1 -0.353998885887131 1 3 1 -0.614217926473409 2 3 1 -0.677981366002188 1 3 1 -1.39319663705638 2 3 1 -1.34011509916811 2 3 1 -0.736100370936723 2 3 1 -0.600109865280918 2 3 1 -1.28500130472893 1 3 1 -1.36640296134039 2 3 1 -0.801718417343498 2 3 1 -0.529860706811969 2 3 1 -0.389318546329582 2 3 1 -0.535588867826002 2 3 1 -0.920404797918709 2 3 1 -0.560133933106455 2 3 1 -0.239473881876335 1 3 1 -0.473860966075698 1 3 1 -0.230702658461256 1 3 1 -1.73611605008423 2 3 1 -0.497233574120993 2 3 1 -1.52778704234278 2 3 1 -1.02496145425289 2 3 1 -0.715732898893069 2 3 1 -0.499095116872872 2 3 1 -0.638470641059599 2 3 1 -0.352348027353716 1 3 1 -0.309076440163353 2 3 1 -0.351142175060277 2 3 1 -0.342163837984379 2 3 1 -0.796243815020877 1 3 1 -0.235901518677146 2 3 1 -0.848976396909524 2 3 1 -0.986793834419597 2 3 1 -1.52074724886378 2 3 1 -0.364767724102028 1 3 1 -1.12083572206547 2 3 1 -0.230356527206167 1 3 1 -0.472472696293136 2 3 1 -0.83628255017543 1 3 1 -2.45314242351987 2 3 1 -0.272291207710981 2 3 1 -0.944208776627134 2 3 1 -0.331972697221215 2 3 1 -0.240983354619397 2 3 1 -0.26997177771289 2 3 1 -0.920385413917779 1 3 1 -0.510927528682238 2 3 1 -0.287553521793071 2 3 1 -2.17875010817382 2 3 1 -0.292020199007728 2 3 1 -0.358620202445595 2 3 1 -1.36196670709987 2 3 1 -0.532103665995839 2 3 1 -1.78987248486592 1 3 1 -0.723800730925627 2 3 1 -0.389487868366274 2 3 1 -0.523651645920756 2 3 1 -0.564094358706312 2 3 1 -0.438071614397111 1 3 1 -0.396815552735571 1 3 1 -0.835877625163203 2 3 1 -0.960913558586309 2 3 1 -0.38021035653061 2 3 1 -0.637286675900738 1 3 1 -0.290784805544286 2 3 1 -0.568957565244384 2 3 1 -0.236438662927156 2 3 1 -1.10987563109661 2 3 1 -0.394257199940267 2 3 1 -0.941426455590548 1 3 1 -1.16694900264559 2 3 1 -0.449530936018223 1 3 1 -1.30291521810678 2 3 1 -0.663336993076141 2 3 1 -0.412620028111287 1 3 1 -0.213769081676035 1 3 1 -1.14008562289037 2 3 1 -0.642154856872125 2 3 1 -1.12186732245763 2 3 1 -0.530059942824884 2 3 1 -0.743562690339846 2 3 1 -0.467733874019439 1 3 1 -0.347897157855929 1 3 1 -0.271346908743046 2 3 1 -1.64808023049025 1 3 1 -0.873213094661973 1 3 1 -0.36045322327288 2 3 1 -0.415893829939983 2 3 1 -0.263744233102411 2 3 1 -0.540852200357253 2 3 1 -1.27190438964105 2 3 1 -0.692221005703411 2 3 1 -0.885861728476599 2 3 1 -0.465274050871376 1 3 1 -0.446986860614239 2 3 1 -1.14339397523192 1 3 1 -0.698229667938408 1 3 1 -0.545774956181041 2 3 1 -0.252737225149388 1 3 1 -0.956242959384857 1 3 1 -0.559955447458839 1 3 1 -0.321668964016761 1 3 1 -0.495953400106333 1 3 1 -0.435695907294935 2 3 1 -1.29125035927742 1 3 1 -1.14445342406167 2 3 1 -0.303314792589389 1 3 1 -0.550870572581426 1 3 1 -0.589452519460692 1 3 1 -0.793764837082831 2 3 1 -1.15478617130203 2 3 1 -0.423154299941937 2 3 1 -0.441625445567769 1 3 1 -1.04879230934071 1 3 1 -0.328428851869649 2 3 1 -0.679231844674899 2 3 1 -1.15492451938846 2 3 1 -1.08528509664462 1 3 1 -0.483072408512607 2 3 1 -1.64223021381801 2 3 1 -0.51690071016677 1 3 1 -0.912126868764157 2 3 1 -0.628163734423868 1 3 1 -0.698176751617721 1 3 1 -0.333876511447483 2 3 1 -0.479648257326482 2 3 1 -0.387197092688304 2 3 1 -0.692552401206789 2 3 1 -0.842058155042385 2 3 1 -0.71750243288607 1 3 1 -0.305258214294853 1 3 1 -0.443541182282758 2 3 1 -1.57795342301233 2 3 1 -0.849830671160219 2 3 1 -2.64336117374313 2 3 1 -0.602149680031763 1 3 1 -0.494502380433554 1 3 1 -0.301592730343628 2 3 1 -0.838419470710967 1 3 1 -0.667334464560839 2 3 1 -1.50007402393947 2 3 1 -0.389803326920504 2 3 1 -0.882528306696846 2 3 1 -0.372982160615418 2 3 1 -0.349280124360808 1 3 1 -0.512012574758371 2 3 1 -1.17220110413599 2 3 1 -0.414969949251035 2 3 1 -1.63347309320552 1 3 1 -0.303593516632502 2 3 1 -0.4186484600358 2 3 1 -0.339028778633365 2 3 1 -0.717224138012012 2 3 1 -0.779291021701807 2 3 1 -0.561060495047965 2 3 1 -1.80155372469456 2 3 1 -0.350666104484292 2 3 1 -1.75384028882697 2 3 1 -0.34614655108637 2 3 1 -0.857888702654049 2 3 1 -0.538538273376274 1 3 1 -0.808165431176924 2 3 1 -1.12566956065676 2 3 1 -0.401747844392863 1 3 1 -0.420638560385403 1 3 1 -0.689567547922525 1 3 1 -0.687101320313498 2 3 1 -1.11450021231709 1 3 1 -1.05673194424108 2 3 1 -0.378493955443519 1 3 1 -0.374806303246874 2 3 1 -0.605170645685489 1 3 1 -0.568600361954804 1 3 1 -0.279564048875058 2 3 1 -0.737344841204778 1 3 1 -0.383626489427317 1 3 1 -0.236592626403799 2 3 1 -0.606395313320835 2 3 1 -0.258446694712414 1 3 1 -0.667654851854366 1 3 1 -0.709849063285861 2 3 1 -1.06652304155128 2 3 1 -0.375309922256882 1 3 1 -0.305631875482354 2 3 1 -1.00431113766878 2 3 1 -0.731812037602777 1 3 1 -0.414232775562026 2 3 1 -0.826500384443797 2 3 1 -0.719116830057354 1 3 1 -0.61855729451436 2 3 1 -0.399444901441129 1 3 1 -0.2295675381027 2 3 1 -0.361734141102366 2 3 1 -0.928229131678883 1 3 1 -0.767105903673052 2 3 1 -0.312954135574329 2 3 1 -0.316125600053679 2 3 1 -0.83356862967393 2 3 1 -0.807805288376209 2 3 1 -0.529747082048339 2 3 1 -0.579438577427002 2 3 1 -0.748413890954428 2 3 1 -0.282714299825272 2 3 1 -1.32349690412234 2 3 1 -0.265264643396819 2 3 1 -1.05199545660803 1 3 1 -0.354844761549886 2 3 1 -0.673382389127187 2 3 1 -0.353927127140679 2 3 1 -0.742708866815784 2 3 1 -0.32440131369228 1 3 1 -1.15424939546308 2 3 1 -0.341988033892061 2 3 1 -0.385901020315423 2 3 1 -0.286688838450355 2 3 1 -0.862986046754551 1 3 1 -0.362564960443358 2 3 1 -0.618539574023911 1 3 1 -0.618839114124994 1 3 1 -0.351742202769926 2 3 1 -1.04713435904685 2 3 1 -1.13219276272773 1 3 1 -0.431897141646835 2 3 1 -0.903368625643694 2 3 1 -0.845773237970464 1 3 1 -0.448267685056844 1 3 1 -0.227965869389189 2 3 1 -0.727189656817706 1 3 1 -0.920965873390772 2 3 1 -2.86369573335364 2 3 1 -1.79113426784109 2 3 1 -0.434371557096156 1 3 1 -0.297049698054049 2 3 1 -0.908700456343662 2 3 1 -0.840014957530216 1 3 1 -0.391451573590056 2 3 1 -0.964762751519269 2 3 1 -1.97499804009749 1 3 1 -1.58759823353077 2 3 1 -2.10229181480408 1 3 1 -0.573075746749156 2 3 1 -0.87229795094926 2 3 1 -0.764360676764928 1 3 1 -0.236433389533537 2 3 1 -0.500319361165157 2 3 1 -0.436148207342909 1 3 1 -1.17237545076077 2 3 1 -0.305589800257361 2 3 1 -0.269400199640921 2 3 1 -1.35212659081556 1 3 1 -0.377929769476066 2 3 1 -0.899332425662463 1 3 1 -1.00394626493931 2 3 1 -1.10094408622287 2 3 1 -1.18830151405781 2 3 1 -0.284872387153195 2 3 1 -0.375024301973256 1 3 1 -0.404469325185188 1 3 1 -0.727775813795801 2 3 1 -0.252419279794447 2 3 1 -0.375032470671666 2 3 1 -0.309265085564879 2 3 1 -0.462609251209814 2 3 1 -0.320190368690629 1 3 1 -0.477686210715907 1 3 1 -0.275346713714152 2 3 1 -0.438717701647585 2 3 1 -0.810110401646601 2 3 1 -1.07883036639961 2 3 1 -0.809608091311169 2 3 1 -0.418153700722572 1 3 1 -0.764475113911811 1 3 1 -0.412993008145338 2 3 1 -1.335962806722 1 3 1 -0.875235294298827 1 3 1 -0.326172123137794 2 3 1 -0.231731510095046 2 3 1 -0.914448514006612 1 3 1 -0.550916483189837 2 3 1 -0.880466208259979 2 3 1 -0.601176005143088 2 3 1 -1.43879123063957 2 3 1 -0.621745991817644 2 3 1 -0.3528215159095 2 3 1 -0.30994437648555 1 3 1 -0.90362627319135 2 3 1 -0.903886767560117 2 3 1 -0.835640862308006 2 3 1 -1.20726153384552 2 3 1 -0.653989199174602 2 3 1 -0.915035948130758 2 3 1 -0.726052417728461 2 3 1 -1.01911267691402 2 3 1 -0.352826644011026 2 3 1 -0.440580677663477 1 3 1 -0.999490854549375 1 3 1 -0.514934236463869 1 3 2 -0.270420118537311 2 3 2 -0.646471943779092 2 3 2 -0.338830766035059 2 3 2 -0.491671962582901 2 3 2 -0.629886758393846 2 3 2 -0.51589107754444 2 3 2 -0.353946635128139 2 3 2 -0.331203176196343 1 3 2 -1.99524093298412 2 3 2 -1.02440243540096 2 3 2 -0.282898155808958 2 3 2 -0.599827056733371 2 3 2 -0.238817716879006 2 3 2 -0.206191544000187 1 3 2 -0.855183187835193 2 3 2 -0.722927152841454 2 3 2 -0.780157089830913 2 3 2 -0.509420971748398 2 3 2 -0.96103805001364 2 3 2 -0.493670434412268 1 3 2 -0.214424723176226 2 3 2 -0.392688836409781 1 3 2 -0.658620383209045 2 3 2 -0.216107419356536 2 3 2 -0.57723401544534 2 3 2 -0.527348367007325 1 3 2 -0.473776142345069 2 3 2 -0.424350872006699 2 3 2 -2.16246776879602 1 3 2 -0.579491048291868 2 3 2 -0.317300006903978 1 3 2 -1.25002685289334 1 3 2 -0.322077006208459 2 3 2 -0.65598919200563 1 3 2 -1.4631363964763 2 3 2 -1.68342497778485 1 3 2 -0.950063435678861 1 3 2 -0.205802941406673 2 3 2 -0.442509433023036 1 3 2 -0.818174077396608 2 3 2 -0.286175835912647 2 3 2 -0.33842480954584 2 3 2 -1.38048451044948 2 3 2 -0.39988067006981 1 3 2 -0.253643662104766 2 3 2 -0.560976394476593 2 3 2 -0.517941831953703 1 3 2 -0.487317274258782 1 3 2 -0.722089923618615 2 3 2 -0.468435888325438 2 3 2 -0.673049975899187 2 3 2 -0.404615085266369 2 3 2 -0.337034159076431 2 3 2 -0.435942135312604 2 3 2 -0.369014730704062 2 3 2 -0.567411102529725 1 3 2 -0.683010310238025 2 3 2 -0.290243271494673 2 3 2 -0.316097101922202 1 3 2 -1.26062843312539 2 3 2 -0.511300941444236 2 3 2 -2.2216549447733 2 3 2 -0.6322594359434 2 3 2 -1.05478582553533 2 3 2 -0.435637851164182 1 3 2 -0.468027792640505 2 3 2 -0.420219455300886 1 3 2 -0.273929740512875 1 3 2 -0.793839005062366 2 3 2 -0.700039266476368 2 3 2 -0.698393675491842 2 3 2 -0.320657243989843 1 3 2 -1.16100839379935 1 3 2 -0.64693140923479 1 3 2 -0.512516768633988 2 3 2 -0.742023676531162 2 3 2 -0.972058194099057 2 3 2 -0.398653703474479 1 3 2 -0.410331563672334 2 3 2 -0.222283100080092 1 3 2 -0.469355586816016 1 3 2 -1.19333537174541 2 3 2 -0.307250192296768 2 3 2 -0.818587555385279 1 3 2 -0.236745366451503 2 3 2 -0.881972502593121 2 3 2 -0.277399097478032 2 3 2 -0.522816700136251 1 3 2 -1.03885756789198 2 3 2 -0.423490173116203 2 3 2 -0.603714429771403 1 3 2 -0.343602016522983 2 3 2 -1.06315814327229 2 3 2 -1.64394374580309 2 3 2 -0.258950271624856 2 3 2 -0.839767784808717 1 3 2 -0.422102182085427 2 3 2 -1.19755245402792 2 3 2 -0.36311410858205 1 3 2 -1.32548841200299 2 3 2 -1.25935619927138 2 3 2 -1.94209928030842 2 3 2 -0.373060834025449 2 3 2 -0.3101485804146 2 3 2 -0.533892648348211 2 3 2 -0.744784639587278 2 3 2 -1.18381744027858 2 3 2 -0.240744324843731 2 3 2 -0.946050579944945 1 3 2 -0.627753953881302 1 3 2 -0.370445161933509 1 3 2 -0.300650302064735 1 3 2 -0.373459020239413 2 3 2 -0.430948400866446 1 3 2 -1.49574317491573 2 3 2 -1.3101827626935 2 3 2 -1.67262325791643 1 3 2 -0.241993699720061 2 3 2 -0.328265869000834 2 3 2 -0.384015136641366 1 3 2 -0.217469612226687 2 3 2 -0.353910918208547 1 3 2 -1.26306667028009 2 3 2 -0.283701100770668 2 3 2 -0.442696536960043 1 3 2 -1.23508427666644 2 3 2 -0.211641858949563 2 3 2 -1.31762978216566 2 3 2 -0.427189324868075 1 3 2 -0.427280548495511 2 3 2 -1.48538078369227 1 3 2 -0.628708315466251 2 3 2 -1.78361132590903 2 3 2 -0.239589904781277 1 3 2 -0.562274806702486 2 3 2 -0.721667014209347 1 3 2 -0.659149048175133 2 3 2 -0.829239328543952 2 3 2 -1.02272830279552 2 3 2 -0.671890699523076 2 3 2 -1.72636732645653 1 3 2 -0.615737367953486 2 3 2 -1.07678479171103 1 3 2 -0.538623051607739 2 3 2 -1.39867639023568 1 3 2 -0.218925904964068 2 3 2 -0.240724605293016 2 3 2 -0.452439100915242 2 3 2 -0.694089462161358 2 3 2 -1.84709801261543 2 3 2 -0.564582100435542 2 3 2 -0.725448687770764 2 3 2 -0.30708178683197 1 3 2 -0.47535866323473 2 3 2 -0.829935945880499 2 3 2 -0.90098748577001 2 3 2 -0.321123642659435 2 3 2 -0.295296832786178 2 3 2 -0.312203918236697 2 3 2 -0.967729509776647 1 3 2 -1.5438301792962 2 3 2 -0.493390293618284 2 3 2 -0.799464309749491 2 3 2 -0.492248525373071 1 3 2 -0.876464392051748 1 3 2 -0.914622363311664 2 3 2 -0.939704807763569 1 3 2 -0.901114296634733 1 3 2 -2.83967944372257 1 3 2 -0.633623902249606 1 3 2 -0.590830718948575 2 3 2 -0.644050309060636 2 3 2 -0.618329617213195 1 3 2 -0.406935681035188 2 3 2 -0.680935599818192 2 3 2 -0.689990604040213 2 3 2 -0.234441659901205 1 3 2 -0.628614985132006 2 3 2 -0.603104314434233 1 3 2 -0.458166460126185 2 3 2 -0.5410221389939 2 3 2 -0.806727630952272 1 3 2 -0.74995864564573 2 3 2 -0.925796642818387 2 3 2 -1.48881282804597 2 3 2 -1.32567553513673 2 3 2 -0.720300933060513 1 3 2 -0.845676771367771 2 3 2 -1.03943195815275 2 3 2 -0.269814661026592 2 3 2 -0.303313872334609 1 3 2 -0.668337346160446 1 3 2 -0.802759823174443 1 3 2 -1.0891459476003 1 3 2 -0.524162399076158 2 3 2 -0.843176928462498 2 3 2 -0.378654464972701 2 3 2 -0.37200187900001 2 3 2 -1.24108370520966 2 3 2 -0.407967861247184 1 3 2 -0.385604160482279 2 3 2 -1.37807312575617 2 3 2 -1.6236739129127 2 3 2 -0.299498250287133 2 3 2 -1.50283781972101 2 3 2 -0.425002443191797 2 3 2 -0.793010798627147 1 3 2 -0.443607069543917 2 3 2 -0.553450577754003 2 3 2 -0.856593591339924 2 3 2 -0.525593955394394 2 3 2 -0.896348840912902 2 3 2 -2.32094781645657 2 3 2 -0.343409204205168 2 3 2 -2.55188788674935 1 3 2 -0.59347479355615 2 3 2 -1.14133837346965 1 3 2 -0.769316859974066 1 3 2 -0.553345822537459 1 3 2 -0.280403390069466 1 3 2 -0.351752315403098 2 3 2 -0.464937188483399 1 3 2 -0.498643366432877 2 3 2 -1.20768606813256 2 3 2 -0.810552918965932 2 3 2 -0.814572293196432 2 3 2 -1.12466835158388 1 3 2 -0.605892652281983 2 3 2 -0.718062408112775 1 3 2 -1.43756814834945 2 3 2 -0.434523586879683 2 3 2 -1.1331064813251 2 3 2 -0.347724946096093 2 3 2 -0.493607135881693 1 3 2 -0.67651193182361 1 3 2 -0.552022281319177 1 3 2 -0.321142527517636 2 3 2 -0.597883853359763 1 3 2 -0.895001662908153 1 3 2 -0.48578933838309 1 3 2 -1.68093599717435 1 3 2 -0.70886900345093 2 3 2 -0.366715507465807 2 3 2 -0.229193753495571 1 3 2 -0.456535243655345 2 3 2 -0.96352225520781 1 3 2 -0.744795713668557 2 3 2 -0.469577483093902 2 3 2 -0.32463756963168 1 3 2 -0.718346029022071 2 3 2 -1.29491659083823 2 3 2 -0.461155144420931 2 3 2 -0.5798492161351 2 3 2 -1.05582449803871 2 3 2 -0.28481585650096 2 3 2 -0.410730259856026 2 3 2 -1.60060704532405 2 3 2 -0.534513028770362 2 3 2 -0.290600969494565 1 3 2 -0.508134824209082 2 3 2 -0.733677137815329 1 3 2 -0.524546051284814 2 3 2 -0.626615353700544 2 3 2 -0.207338061930642 2 3 2 -2.49191138211878 1 3 2 -0.446516299324413 2 3 2 -0.568970465239259 2 3 2 -1.30104078909728 2 3 2 -0.573721490299906 2 3 2 -0.395039645429215 2 3 2 -0.868269050301906 2 3 2 -1.39110795542721 2 3 2 -0.732569267335244 2 3 2 -0.346330122949029 2 3 2 -0.28247517183272 2 3 2 -0.339777576105551 2 3 2 -0.709070836414342 2 3 2 -0.741775977804924 2 3 2 -1.3105501266272 2 3 2 -0.650809742033691 1 3 2 -1.21685252483908 2 3 2 -1.31226952331015 2 3 2 -0.671641212759192 2 3 2 -2.60476050728261 1 3 2 -0.423290539217436 1 3 2 -0.747391695995648 2 3 2 -0.415214528560093 2 3 2 -0.732547499620541 2 3 2 -1.11203310275713 1 3 2 -0.310793178630083 1 3 2 -0.532873467984992 2 3 2 -0.24543893048753 2 3 2 -0.262637774460857 2 3 2 -0.638852263528672 2 3 2 -0.268251093022516 2 3 2 -0.754730587787048 1 3 2 -0.304771345055942 2 3 2 -0.600949799200535 2 3 2 -0.454921964270315 2 3 2 -0.291060243483869 2 3 2 -0.850896084981839 2 3 2 -1.02404745430124 2 3 2 -0.740373725034996 2 3 2 -0.697249433946795 2 3 2 -3.22861057623448 2 3 2 -0.614917615221698 2 3 2 -0.417940802999645 2 3 2 -0.428130364139945 1 3 2 -0.330671065628431 2 3 2 -0.62670607061658 2 3 2 -1.26038076404455 2 3 2 -0.322284823454811 2 3 2 -0.429191549101784 2 3 2 -0.455247274613782 2 3 2 -0.663368332878807 2 3 2 -0.305769717251401 2 3 2 -1.50140899013577 1 3 2 -1.45389192339163 2 3 2 -2.98105925544205 2 3 2 -1.04255718339312 2 3 2 -0.942508435934038 2 3 2 -2.787892838843 2 3 2 -0.840734403641314 2 3 2 -0.489925811963693 2 3 2 -1.8552326561657 2 3 2 -0.38632322022465 1 3 2 -0.256936541763573 2 3 2 -0.766126754945232 1 3 2 -0.451147102611256 2 3 2 -0.268349419782926 2 3 2 -0.284282929689453 2 3 2 -0.71369157188727 1 3 2 -0.984328464038398 1 3 2 -0.391254858951931 2 3 2 -0.473609040280498 1 3 2 -0.392371474332231 1 3 2 -0.512961553349468 2 3 2 -0.49196894137911 2 3 2 -1.98481178504207 2 3 2 -0.335597786997023 1 3 2 -0.951871051474144 2 3 2 -0.43773254271624 2 3 2 -0.759326775665626 1 3 2 -0.964163992380983 2 3 2 -0.243214557688043 2 3 2 -1.06020924390952 2 3 2 -0.624129170951697 2 3 2 -0.631213229469376 2 3 2 -1.22749512645753 1 3 2 -1.4544220968578 2 3 2 -0.704504240730269 2 3 2 -0.358484343002385 2 3 2 -0.550361017867011 2 3 2 -0.300375078256161 2 3 2 -0.739678180371973 1 3 2 -0.646734220557972 1 3 2 -0.777682254344784 2 3 2 -0.87982617759661 2 3 2 -0.418244912065538 2 3 2 -0.443192766363974 2 3 2 -0.61563753996371 1 3 2 -1.56614219537768 2 3 2 -2.65065478085341 2 3 2 -0.305863823661165 2 3 2 -0.954934661408583 2 3 2 -0.449191627811582 1 3 2 -0.793807602132907 2 3 2 -0.519871565006984 1 3 2 -0.293207709999379 1 3 2 -0.634867211065706 1 3 2 -0.469994277604704 2 3 2 -0.343616111556125 2 3 2 -0.23004178016569 1 3 2 -0.335931900338173 2 3 2 -0.743164799438406 2 3 2 -0.582587466556771 2 3 2 -0.412610130745763 2 3 2 -0.303143772129072 2 3 2 -1.28143560920008 2 3 2 -1.17554195841916 1 3 2 -0.656531128634536 2 3 2 -0.29995775158261 1 3 2 -0.89568315126836 2 3 2 -1.30618389742279 2 3 2 -3.0496460415615 2 3 2 -0.743633550842162 2 3 2 -0.976245680087169 2 3 2 -0.376124867059714 2 3 2 -0.537516104244315 2 3 2 -0.212538006220838 2 3 2 -0.571699948650591 2 3 2 -0.658537447407892 2 3 2 -0.409180121003016 2 3 2 -0.38453187413556 2 3 2 -1.39971354422791 2 3 2 -1.48526501316284 2 3 2 -0.765450963277084 2 3 2 -0.45683796523625 2 3 2 -0.49455927446116 2 3 2 -0.565813339958499 2 3 2 -0.51579075751888 2 3 2 -0.579482217533706 1 3 2 -0.464431057746426 2 3 2 -0.33385297359306 1 3 2 -0.237785587972524 2 3 2 -0.788856968485086 2 3 2 -0.247738584125967 2 3 2 -1.29521163578326 1 3 2 -0.51405310891982 2 3 2 -0.248969113363235 1 3 2 -0.8258802772869 2 3 2 -0.858915328254726 2 3 2 -1.04412031745921 1 3 2 -1.67795472999734 1 3 2 -0.560822550684719 2 3 2 -0.639828932713558 2 3 2 -0.606061526335406 1 3 2 -0.446197468121209 2 3 2 -0.889197611107733 2 3 2 -0.229591061070164 1 3 2 -0.598751053548388 1 3 2 -0.42084079282726 2 3 2 -0.280912227540918 2 3 2 -0.353535095083615 1 3 2 -0.533533031895995 2 3 2 -0.40991726170081 2 3 2 -0.386705907004533 2 3 2 -0.663325409649471 2 3 2 -1.05367422973975 1 3 2 -1.41642911541684 2 3 2 -0.728074140704459 2 3 2 -0.448237114304907 2 3 2 -0.471483947133633 2 3 2 -1.26271559797945 1 3 2 -0.241118847003316 2 3 2 -0.551833217379812 1 3 2 -0.508606043806118 2 3 2 -0.364460896466132 2 3 2 -0.412399264984449 2 3 2 -0.305920977565598 2 3 2 -0.705398182563824 2 3 2 -0.204036911418345 1 3 2 -1.04820599938717 1 3 2 -0.643323321586422 2 3 2 -3.66898798188367 2 3 2 -1.08829013517781 1 3 2 -0.361592831884118 1 3 2 -0.979363639648445 2 3 2 -0.521111784853412 1 3 2 -0.343395604193243 1 3 2 -0.587048881881688 2 3 2 -1.41201467474607 2 3 2 -0.443024780470065 2 3 2 -2.95728532098558 2 3 2 -0.319216793259789 2 3 2 -0.221315652964487 2 3 2 -0.77093692467471 1 3 2 -0.73558455041612 1 3 2 -0.259119262605434 2 3 2 -0.48994285788748 1 3 2 -0.571960539121533 2 3 2 -0.30717971899547 2 3 2 -1.97586628351188 2 3 2 -0.375432444639877 2 3 2 -0.811045564934994 2 3 2 -0.911400482590164 2 3 2 -0.421553307064521 1 3 2 -0.836499690800059 1 3 2 -0.709656783694355 1 3 2 -0.738884945936119 1 3 2 -0.369565008846999 1 3 2 -0.379597876167422 1 3 2 -0.673815169801798 2 3 2 -1.61852146139474 1 3 2 -0.341581465482509 1 3 2 -0.263351833487348 2 3 2 -0.34610526127482 2 3 2 -0.840263767605542 2 3 2 -0.860111461225023 2 3 2 -0.500246193912611 2 3 2 -0.611622543641809 2 3 2 -0.304839820514316 1 3 2 -0.478872857619653 2 3 2 -0.966509448052867 2 3 2 -0.369970546426949 2 3 2 -0.424912789436735 2 3 2 -0.639361694609756 2 3 2 -0.638867619514155 2 3 2 -0.439145854141595 2 3 2 -1.2433130957394 2 3 2 -0.546013305487959 2 3 2 -0.621366845453756 1 3 2 -0.371921153976491 1 3 2 -1.72030292611725 2 3 2 -0.265728845949588 2 3 2 -0.250485215272467 2 3 2 -0.260055352791922 2 3 2 -1.04055348391978 2 3 2 -0.639817829535305 2 3 2 -0.436687399202203 2 4 1 -0.390002899730434 2 4 1 -0.689914798071677 2 4 1 -0.89709020993931 2 4 1 -0.427183990353492 1 4 1 -0.340777120972685 2 4 1 -0.251597448645233 1 4 1 -0.270029795224852 1 4 1 -0.36506923015438 2 4 1 -0.925483297260795 1 4 1 -0.491542011496093 1 4 1 -0.584011448243567 2 4 1 -1.27419822711881 2 4 1 -0.33102420792392 2 4 1 -0.351900683919713 1 4 1 -0.393343181453058 2 4 1 -1.11700088666809 2 4 1 -0.383684827552196 2 4 1 -0.319389244865323 2 4 1 -0.207961019321362 1 4 1 -0.247906583019937 2 4 1 -0.664874815584718 2 4 1 -0.633174401608791 2 4 1 -0.228811949915 2 4 1 -0.313807509549483 1 4 1 -0.258061143771553 1 4 1 -1.40526930242479 2 4 1 -0.617601017184864 2 4 1 -0.448539769566249 2 4 1 -0.301286656201828 2 4 1 -0.35035514703207 2 4 1 -1.08961020047286 1 4 1 -0.29793786190371 1 4 1 -0.384511098165857 2 4 1 -0.761604738984846 1 4 1 -0.341173784779225 2 4 1 -0.376431442957684 2 4 1 -0.484742402575381 2 4 1 -0.257151781895977 1 4 1 -0.597747950821735 2 4 1 -0.510086318540574 2 4 1 -0.414694239051273 2 4 1 -0.583680054953304 1 4 1 -0.260760440689632 2 4 1 -0.798233246796322 1 4 1 -0.299861199950565 1 4 1 -0.276645559734816 1 4 1 -0.754832912251529 2 4 1 -0.409850111348969 2 4 1 -0.357937922566155 1 4 1 -1.74732047917189 2 4 1 -0.234683752237039 1 4 1 -0.379935287004687 2 4 1 -0.400355036755306 2 4 1 -0.199836116985197 1 4 1 -0.29654658741011 1 4 1 -0.40070736644743 2 4 1 -0.810244937262253 1 4 1 -0.232476182488422 1 4 1 -0.441531921393063 1 4 1 -0.266981994270395 1 4 1 -0.594986042709096 2 4 1 -0.226949086978422 2 4 1 -0.521975525478104 2 4 1 -0.342357539413783 1 4 1 -0.317929036077879 1 4 1 -0.252848528033154 1 4 1 -0.323519822370531 1 4 1 -0.543317920252121 2 4 1 -0.24136946576349 2 4 1 -0.924045894157614 1 4 1 -0.206395302547672 1 4 1 -0.32962698245246 1 4 1 -0.351093830002051 2 4 1 -0.393905213694999 1 4 1 -0.215647621306677 1 4 1 -1.07707197772823 2 4 1 -0.389509392394056 2 4 1 -0.223818285290267 1 4 1 -0.506337167510338 2 4 1 -0.471183820790944 2 4 1 -0.845694161269827 2 4 1 -0.709573151871471 1 4 1 -0.17746060467044 1 4 1 -0.31073859900678 2 4 1 -0.441048237227571 1 4 1 -0.46110944700935 1 4 1 -0.54945827722732 2 4 1 -0.784046498525351 2 4 1 -0.67011316512292 2 4 1 -0.835724249453141 2 4 1 -0.446765564545247 2 4 1 -0.672425348568567 2 4 1 -0.327795527459012 1 4 1 -0.638194736699407 1 4 1 -0.706923294313998 2 4 1 -0.599365457528674 1 4 1 -0.323701086823648 2 4 1 -0.219227233336576 1 4 1 -0.457730880006645 1 4 1 -0.315697871971176 2 4 1 -0.589925939568761 1 4 1 -0.247439085357404 1 4 1 -0.32425097397633 1 4 1 -0.597517113461805 2 4 1 -0.718798195346532 2 4 1 -0.638921440877047 1 4 1 -0.255295790134737 2 4 1 -0.327574345419546 2 4 1 -0.290528460395922 2 4 1 -0.812154747100783 2 4 1 -0.62206059702272 2 4 1 -0.692613337029294 2 4 1 -0.625883974901828 2 4 1 -0.441430346433252 2 4 1 -0.490081824112803 2 4 1 -0.244708505986005 2 4 1 -0.670449909527048 2 4 1 -0.548567982146401 2 4 1 -0.670609785865862 2 4 1 -0.232196621565708 1 4 1 -1.00556527970583 1 4 1 -0.362798089622382 2 4 1 -0.299887052564318 1 4 1 -0.29571247705273 2 4 1 -0.379234401357032 1 4 1 -0.370440077361359 1 4 1 -0.431164494502316 1 4 1 -0.420428289154959 2 4 1 -0.730550233135442 2 4 1 -0.196484263350659 1 4 1 -0.322244565661879 1 4 1 -0.419903128817093 2 4 1 -0.887401080614827 2 4 1 -0.347054627607897 1 4 1 -0.478136351931499 2 4 1 -0.22065845309499 1 4 1 -0.230551027213474 1 4 1 -0.774783709604525 2 4 1 -0.4017025367099 2 4 1 -0.473979554090044 2 4 1 -0.647672920929279 2 4 1 -0.498264780188815 2 4 1 -0.242543415916194 1 4 1 -1.50936872822537 2 4 1 -0.566867242699491 1 4 1 -1.47001919200243 2 4 1 -0.443050568657419 1 4 1 -0.617820295621617 2 4 1 -0.359479796449494 2 4 1 -0.604637805760835 2 4 1 -0.586057575592382 2 4 1 -0.440372792115214 1 4 1 -0.33669989715765 2 4 1 -0.46750885766708 2 4 1 -0.256884377023786 2 4 1 -0.404464415727674 1 4 1 -1.31921997047773 1 4 1 -0.430411845210472 2 4 1 -0.191395478638844 1 4 1 -2.65424233758138 2 4 1 -0.446895568163648 2 4 1 -1.16838122758826 1 4 1 -0.404164043001054 1 4 1 -0.453588390001177 1 4 1 -0.489843569086762 2 4 1 -0.511073192179454 1 4 1 -0.366748489459315 2 4 1 -1.08784444950982 1 4 1 -0.3127679937141 1 4 1 -0.374479796833337 2 4 1 -0.536583188537731 2 4 1 -0.468590946410184 1 4 1 -0.844010712248646 2 4 1 -0.557108724831039 1 4 1 -0.221613723664657 1 4 1 -0.485469547021943 2 4 1 -0.263615872608948 1 4 1 -0.441259334915482 2 4 1 -0.198072070553861 1 4 1 -1.50553220647041 2 4 1 -0.489507279251992 1 4 1 -0.864537266238903 2 4 1 -0.45282939606678 1 4 1 -0.746554343478926 2 4 1 -0.867773306761754 1 4 1 -0.767200054356521 1 4 1 -0.23660645439005 1 4 1 -1.39880762611901 2 4 1 -0.350271079317704 1 4 1 -1.40586843906866 1 4 1 -0.478210911942578 2 4 1 -0.338115182852339 2 4 1 -0.730320711764892 1 4 1 -0.956280538468675 1 4 1 -0.420525233125089 1 4 1 -0.347936329704388 1 4 1 -0.474630289993715 1 4 1 -0.819946152352902 2 4 1 -0.934441627702885 1 4 1 -0.730580755467428 1 4 1 -0.271300024998423 2 4 1 -0.304557666577329 1 4 1 -0.693593360198042 2 4 1 -0.275013203541771 2 4 1 -1.04949192092882 1 4 1 -0.912930864419114 2 4 1 -0.245497508402249 1 4 1 -0.340496536712849 1 4 1 -0.206302342316397 1 4 1 -0.349666013963138 1 4 1 -0.391997601731428 2 4 1 -0.31394682736151 2 4 1 -0.19581160832742 1 4 1 -1.06233406823564 1 4 1 -0.415830063959857 2 4 1 -0.593378048695084 2 4 1 -0.684233440252769 2 4 1 -0.688883920330433 2 4 1 -1.6950993730873 2 4 1 -0.432896756723563 2 4 1 -0.431906140578051 2 4 1 -1.52282637445437 1 4 1 -0.405643762755849 2 4 1 -0.3906375449839 2 4 1 -0.493637490245746 2 4 1 -0.253401712848908 1 4 1 -0.651891849887022 1 4 1 -0.191738721475655 1 4 1 -0.274200778844831 1 4 1 -0.687221734267395 1 4 1 -0.401284964672383 2 4 1 -1.30759449667971 2 4 1 -0.45371033421329 1 4 1 -0.414999090718536 1 4 1 -0.339864441346799 2 4 1 -0.224062601508878 1 4 1 -0.267986100808596 2 4 1 -0.205728192609412 1 4 1 -0.294602964122564 1 4 1 -0.57048745621061 2 4 1 -0.312999536718822 2 4 1 -0.468539738808252 2 4 1 -0.433964076498741 2 4 1 -0.263357126730855 1 4 1 -0.403225684575735 2 4 1 -0.505097149684945 1 4 1 -0.352103525497929 1 4 1 -0.353799144457459 2 4 1 -1.93501413202878 2 4 1 -0.234778101685937 1 4 1 -0.915861513813591 1 4 1 -0.222561853971709 2 4 1 -0.704078227413159 1 4 1 -0.324489257545476 1 4 1 -0.360961619402915 1 4 1 -1.08221841300972 2 4 1 -0.255068442126581 1 4 1 -0.427031071589034 2 4 1 -0.268383547265101 2 4 1 -0.527836858914301 2 4 1 -0.436588005092254 2 4 1 -0.63511533446365 1 4 1 -0.234879561500897 2 4 1 -0.401628516319902 1 4 1 -0.244232194192342 2 4 1 -0.228576135937129 1 4 1 -0.479791504967864 1 4 1 -0.623390007344234 2 4 1 -0.464270391434225 1 4 1 -0.366609113909401 2 4 1 -0.96274637101792 1 4 1 -0.576483736943277 2 4 1 -0.403581781434509 2 4 1 -0.639484168435868 2 4 1 -0.62685113380266 2 4 1 -0.504458338039312 1 4 1 -0.402748748564798 2 4 1 -0.214342237683536 2 4 1 -0.349689666731501 1 4 1 -0.920646509992372 2 4 1 -0.27944693184416 1 4 1 -0.44815474516242 2 4 1 -0.205465436502175 1 4 1 -0.58051540184786 2 4 1 -1.39132555887266 2 4 1 -0.364514642632317 2 4 1 -0.70643155804624 1 4 1 -0.323310314942546 2 4 1 -0.175430213692877 1 4 1 -0.324665401127095 2 4 1 -0.339971125940635 1 4 1 -0.357027781829466 2 4 1 -0.197390554919544 1 4 1 -1.19812104028543 1 4 1 -0.362575817654 2 4 1 -0.38070286088775 2 4 1 -0.468393488473505 2 4 1 -0.69309042773624 2 4 1 -0.489049481806581 2 4 1 -0.550976348834216 2 4 1 -0.359974012139019 2 4 1 -0.430662557513296 2 4 1 -0.230762012931962 1 4 1 -0.464324012490502 2 4 1 -0.312270150962999 2 4 1 -0.407424579565668 1 4 1 -1.69173371199779 2 4 1 -0.279156190669336 1 4 1 -1.09358568468554 2 4 1 -0.799141912889128 2 4 1 -0.471229301469417 2 4 1 -0.86570523590372 2 4 1 -0.83707942075239 2 4 1 -0.301114204037757 2 4 1 -0.26254546100438 1 4 1 -0.480187644205008 2 4 1 -1.26489425453197 1 4 1 -0.348490685840493 1 4 1 -1.80827808496735 2 4 1 -0.688328204172357 1 4 1 -0.327390856146583 1 4 1 -0.296891297267103 2 4 1 -0.537842103983798 2 4 1 -0.61750762959656 1 4 1 -0.514317599884349 2 4 1 -0.484707489461247 2 4 1 -1.02339278515862 2 4 1 -0.261167936671032 1 4 1 -0.304205039689018 2 4 1 -0.663106162019803 2 4 1 -0.717654991060284 2 4 1 -0.303534711764191 1 4 1 -0.330517625218401 2 4 1 -0.421085131279027 1 4 1 -0.205818877232142 2 4 1 -0.316549542909507 2 4 1 -0.204577406444421 1 4 1 -0.23632265848357 1 4 1 -0.548244833679468 2 4 1 -0.388542899040365 1 4 1 -0.664718352371726 2 4 1 -0.675589276314572 2 4 1 -0.797202947732321 2 4 1 -0.210476690239724 1 4 1 -0.368876471933727 1 4 1 -3.36252865887517 2 4 1 -1.08817433313251 2 4 1 -0.302267582741235 2 4 1 -0.528662894227581 2 4 1 -0.405868957673161 2 4 1 -0.78835957170157 2 4 1 -0.348515180995261 1 4 1 -0.400786810234322 2 4 1 -0.723106946883847 1 4 1 -0.302360607513298 1 4 1 -0.398732629036892 2 4 1 -0.404922292377484 1 4 1 -0.317519731671834 2 4 1 -0.34850072573164 1 4 1 -0.236746589712662 1 4 1 -0.343500348276907 2 4 1 -0.913186037917405 1 4 1 -0.399298167621525 1 4 1 -0.384145099266467 2 4 1 -0.202414771592286 1 4 1 -1.18165252887664 2 4 1 -0.330694054268851 1 4 1 -0.32152973903766 2 4 1 -0.491640817467057 2 4 1 -0.355523637611526 2 4 1 -0.652568975873668 1 4 1 -0.83445083655571 2 4 1 -1.2259345279659 2 4 1 -0.359422092118994 2 4 1 -0.706531400652435 2 4 1 -0.393761838705891 2 4 1 -0.637784009231585 2 4 1 -0.72390141241909 1 4 1 -0.250576856916352 1 4 1 -0.294901560775219 2 4 1 -0.360607616504287 2 4 1 -0.94020952091997 2 4 1 -0.512044648878922 2 4 1 -0.795154031544809 2 4 1 -0.483945751646636 2 4 1 -1.00668402779973 1 4 1 -0.354165221763607 2 4 1 -0.589695938125084 1 4 1 -0.317505434986294 1 4 1 -1.43373221832127 1 4 1 -0.288390917154746 2 4 1 -0.405502079908248 1 4 1 -0.446525389342848 2 4 1 -0.532552597088654 1 4 1 -0.313955963239798 2 4 1 -0.353727419903597 2 4 1 -0.370222838745741 1 4 1 -0.53179693872268 2 4 1 -0.367449479194125 1 4 1 -0.552379937342038 2 4 1 -0.990656154189996 2 4 1 -0.339737828162076 2 4 1 -0.637641033103 2 4 1 -0.42876311661298 2 4 1 -0.23749457142906 2 4 1 -0.319603538159136 2 4 1 -0.374605787276471 1 4 1 -0.849512129786118 2 4 1 -1.00104076228899 2 4 1 -0.198287811121547 1 4 1 -0.298821665244969 2 4 1 -0.291809945237506 1 4 1 -0.629558789118005 2 4 1 -0.387853650221803 2 4 1 -0.403253897590665 1 4 1 -0.467497496264389 2 4 1 -0.76163717207652 2 4 1 -0.509727112968688 2 4 1 -0.298777861255781 1 4 1 -0.511840234402182 2 4 1 -0.285361728745086 2 4 1 -0.597484562760948 2 4 1 -0.289800259644921 2 4 1 -0.583545618964623 2 4 1 -0.567804592187857 1 4 1 -0.507416826265034 1 4 1 -0.802494675316203 2 4 1 -0.977640762711063 1 4 1 -0.310065306648296 1 4 1 -0.345153633596149 2 4 1 -0.347175805412226 1 4 1 -0.412977137398828 2 4 1 -0.285153755999717 1 4 1 -0.239659186319816 2 4 1 -0.339455800385343 2 4 1 -0.268577501046517 2 4 1 -0.658394714841171 1 4 1 -0.778666141135073 2 4 1 -0.316640157058985 1 4 1 -0.223585419102432 1 4 1 -0.865653039717611 2 4 1 -0.366142103215405 1 4 1 -1.06763238053885 1 4 1 -0.649850247862938 1 4 1 -0.512047384065615 2 4 1 -0.416754656003573 2 4 1 -0.251040108432236 1 4 1 -0.360104447735964 2 4 1 -0.69217517705459 2 4 1 -1.20890660646077 2 4 1 -0.395473147659342 1 4 1 -2.01509671350874 2 4 1 -0.257146924592025 1 4 1 -0.56334930950831 2 4 1 -0.283190539517862 1 4 1 -0.926385510466009 2 4 1 -0.670839555017221 1 4 1 -0.209765778999125 1 4 1 -0.736401422311992 2 4 1 -0.652193628618288 1 4 1 -0.21004611504302 1 4 1 -0.427729746817706 2 4 1 -0.279727994007439 1 4 1 -0.973427619297358 1 4 1 -0.484447288964673 2 4 1 -0.773172203339819 1 4 1 -0.451517138859729 2 4 1 -0.366340544422608 1 4 1 -0.537698395391627 1 4 1 -0.502490066721104 2 4 1 -0.320507642052774 1 4 1 -0.521457049830957 2 4 1 -0.516601763002701 2 4 1 -0.335922415995894 1 4 1 -0.433364895325246 2 4 1 -0.680419249073426 2 4 1 -0.297293517357851 1 4 1 -0.442221587569742 1 4 1 -0.27166805138535 1 4 1 -0.61736599846165 2 4 1 -0.486875798898661 1 4 1 -0.221131066783682 1 4 1 -0.631893098333021 1 4 1 -0.680334804706098 2 4 1 -0.439239414100042 2 4 1 -0.844299183787246 1 4 1 -0.879748183908203 2 4 1 -0.881004157734928 2 4 1 -0.503475477012917 2 4 1 -0.345571841551121 2 4 1 -0.60106814148883 2 4 1 -0.380097253620339 2 4 1 -0.414803174135158 2 4 1 -0.205017263198972 1 4 1 -0.284670026282373 1 4 1 -0.362803142116261 1 4 1 -0.244497025354602 1 4 1 -0.227388249258125 2 4 1 -0.427473866189717 2 4 1 -0.753126345774075 2 4 2 -0.390711548236765 1 4 2 -0.270643332729808 1 4 2 -0.226363776905272 2 4 2 -0.465280739944602 2 4 2 -0.436604485335886 2 4 2 -0.682522814767707 2 4 2 -0.346349842215826 1 4 2 -0.661058914850539 1 4 2 -0.283908385401013 1 4 2 -0.439045953436687 2 4 2 -0.357784731597969 1 4 2 -0.201869884529536 1 4 2 -0.360806495344085 1 4 2 -0.925367484247164 2 4 2 -0.415837898905798 1 4 2 -0.227962933710507 1 4 2 -0.312737890779197 2 4 2 -0.432575973780529 1 4 2 -0.253471376207378 2 4 2 -0.288980617785268 2 4 2 -0.205568280701361 1 4 2 -0.355465859040812 1 4 2 -0.533249899426881 1 4 2 -0.428341046561918 1 4 2 -0.468946411082872 2 4 2 -0.64190965481191 2 4 2 -0.208274022160394 1 4 2 -0.217167590394544 1 4 2 -0.348200391766719 1 4 2 -0.617665880709998 2 4 2 -1.2550193745344 2 4 2 -0.566722271395131 2 4 2 -0.9275505182546 1 4 2 -0.360441788744519 2 4 2 -0.334083907534406 1 4 2 -0.621024094129476 1 4 2 -0.21468225890971 1 4 2 -0.764606539690629 2 4 2 -0.513552230720492 2 4 2 -0.274575330019494 1 4 2 -0.442639118882473 2 4 2 -0.333565588104835 1 4 2 -0.32578303931817 1 4 2 -0.937568367512386 1 4 2 -0.267480287372774 2 4 2 -0.531291803062624 2 4 2 -0.356804813639121 2 4 2 -0.229458300439067 1 4 2 -0.491510334559396 1 4 2 -0.294506874060557 2 4 2 -0.450689439535521 2 4 2 -0.330518336785702 2 4 2 -0.629929903785771 2 4 2 -0.556005918181015 2 4 2 -0.298001295445146 2 4 2 -0.373025553749853 1 4 2 -0.470482102722226 1 4 2 -0.341137316160965 2 4 2 -0.395674428886868 1 4 2 -0.612038098963857 2 4 2 -0.357119967218812 1 4 2 -0.248258993043406 2 4 2 -0.43888644731273 2 4 2 -0.236904461349091 1 4 2 -0.175318379144127 1 4 2 -0.295979215918683 2 4 2 -0.830651376675517 2 4 2 -0.282437606937073 2 4 2 -0.398046893313355 1 4 2 -0.438749921630364 2 4 2 -0.910170394806412 1 4 2 -0.612914933569449 2 4 2 -0.321192322725182 1 4 2 -0.568814752481222 2 4 2 -0.897945007327145 2 4 2 -0.498239908323843 2 4 2 -1.13610274041636 1 4 2 -0.38015282393527 2 4 2 -1.88042120748211 1 4 2 -0.475694408506186 2 4 2 -0.783173222507789 1 4 2 -1.15066675545721 2 4 2 -0.557329951105813 2 4 2 -0.571462117657645 1 4 2 -0.67890197694106 1 4 2 -0.602173033246056 1 4 2 -0.394020910343337 2 4 2 -0.313420994224523 1 4 2 -0.286589909536176 1 4 2 -0.705950642195192 2 4 2 -0.229774735082982 1 4 2 -1.23278117367534 1 4 2 -0.22937452069431 1 4 2 -0.280442062594111 2 4 2 -0.327747771853932 2 4 2 -0.235540251439368 1 4 2 -0.482389789621482 2 4 2 -0.221470341978539 1 4 2 -1.543390472393 1 4 2 -0.407965459047609 2 4 2 -0.861415260414181 2 4 2 -0.257801138767577 1 4 2 -1.08565280721424 2 4 2 -0.581197798001077 1 4 2 -0.324088998372876 2 4 2 -0.357086553411739 1 4 2 -1.40348937798243 1 4 2 -0.193914649716736 1 4 2 -1.21046346658098 1 4 2 -0.67741215417897 1 4 2 -0.329541261493576 1 4 2 -0.708070291827501 2 4 2 -0.799545815866931 1 4 2 -0.284679156256938 2 4 2 -0.394752939066119 2 4 2 -1.28735898040161 2 4 2 -0.28801399136096 2 4 2 -0.648970026347328 1 4 2 -0.441656916239289 1 4 2 -0.255895819732594 1 4 2 -0.273287514635962 1 4 2 -0.355331159272485 2 4 2 -0.660329500255681 2 4 2 -1.60223591193477 2 4 2 -0.312706096050944 2 4 2 -0.370271886124509 1 4 2 -0.4718314797338 2 4 2 -0.27614855720277 1 4 2 -0.483448608439343 1 4 2 -0.234968080618399 1 4 2 -0.391112229320968 2 4 2 -0.462383731150063 1 4 2 -1.84554462107946 2 4 2 -0.368383992484067 2 4 2 -0.427732918131404 2 4 2 -0.261183473976277 1 4 2 -0.443004159664207 1 4 2 -0.319045852909917 1 4 2 -0.459946072664613 1 4 2 -0.308634108085083 2 4 2 -0.698726479780432 2 4 2 -0.236639844152539 1 4 2 -0.331687357050262 1 4 2 -0.478759287978135 2 4 2 -0.368101563033333 2 4 2 -0.305576932610112 1 4 2 -0.429846006190982 2 4 2 -0.760633202811727 2 4 2 -0.413548649793985 2 4 2 -0.854550457981442 1 4 2 -0.793486300366621 2 4 2 -0.865787783075263 2 4 2 -0.29645719210445 2 4 2 -0.286957661251985 2 4 2 -0.799348039285717 2 4 2 -0.655555119608612 2 4 2 -0.742570590536339 2 4 2 -0.618028617950327 2 4 2 -0.287032292482981 1 4 2 -0.810817641683865 2 4 2 -0.542866362535735 2 4 2 -0.461735825500108 1 4 2 -0.360081585122882 2 4 2 -0.686393593652603 2 4 2 -0.406506979162325 1 4 2 -0.417722137978479 2 4 2 -0.199502298944795 1 4 2 -0.283998907427584 1 4 2 -0.641575484713698 1 4 2 -0.536268680798931 1 4 2 -1.30578408194073 2 4 2 -0.184223471103508 1 4 2 -0.69467941754697 2 4 2 -0.22448622806897 2 4 2 -0.286771373284366 1 4 2 -0.688354585035901 1 4 2 -0.967997645085735 1 4 2 -0.379271417939524 1 4 2 -0.657624551473419 2 4 2 -0.288156373424384 1 4 2 -0.798729464202731 2 4 2 -0.299408333666402 2 4 2 -0.569629360200182 2 4 2 -0.368672408571234 2 4 2 -0.689419089326237 2 4 2 -1.39439675244552 2 4 2 -0.349220352929323 1 4 2 -0.83628328787567 1 4 2 -0.954621512616944 1 4 2 -0.974085341355247 2 4 2 -0.232985801064047 1 4 2 -0.392841148562383 1 4 2 -0.215131830305328 1 4 2 -0.293782797485867 2 4 2 -0.271250958358236 1 4 2 -0.624040890183475 1 4 2 -0.805265827410823 2 4 2 -0.294091703458089 2 4 2 -0.710826711143953 2 4 2 -1.22718633292131 1 4 2 -0.634166705031742 2 4 2 -0.34847720815033 2 4 2 -0.401314348829917 1 4 2 -0.232628383153352 1 4 2 -0.732405673120817 1 4 2 -0.721193585825821 2 4 2 -0.409057462385994 2 4 2 -0.584778932939187 1 4 2 -0.232717575480756 2 4 2 -0.815862413646243 2 4 2 -0.384196628908383 1 4 2 -0.578948587380573 1 4 2 -1.09711900247466 1 4 2 -0.848385257280831 2 4 2 -0.569159709314235 2 4 2 -0.549417965516691 1 4 2 -0.732526266656702 2 4 2 -0.308790829645616 2 4 2 -0.99654060065045 1 4 2 -0.589889727186112 1 4 2 -0.5484884279984 2 4 2 -0.479934907640303 2 4 2 -0.270818907737512 1 4 2 -0.21367589349819 2 4 2 -0.400772720552252 1 4 2 -1.19500378366517 2 4 2 -0.247627583441975 2 4 2 -0.344233591133932 2 4 2 -0.226694192794682 2 4 2 -0.838668645891454 1 4 2 -0.706466288045509 1 4 2 -0.294054475401509 2 4 2 -0.686122317625166 1 4 2 -0.403497095541725 2 4 2 -0.243178882660292 2 4 2 -0.699616329368069 1 4 2 -0.307343994601662 1 4 2 -0.42437788244234 1 4 2 -0.502486986226067 2 4 2 -1.28866364433504 2 4 2 -0.305567946577639 2 4 2 -0.378703212426002 2 4 2 -0.792580021830969 1 4 2 -0.304089937760933 1 4 2 -0.311408526252319 1 4 2 -0.430396597223223 2 4 2 -0.297243961067233 2 4 2 -0.44197992729068 2 4 2 -0.316197941983761 2 4 2 -0.352238116060612 1 4 2 -0.537215363451521 1 4 2 -0.313054348616914 2 4 2 -2.41486023609302 2 4 2 -0.278189933299107 1 4 2 -0.608375967955867 2 4 2 -0.322433656896367 2 4 2 -0.510101175645582 2 4 2 -0.536008515884717 2 4 2 -0.411222727405007 1 4 2 -0.322077650775546 1 4 2 -0.376593462100595 1 4 2 -0.373346452642805 1 4 2 -0.204108290848997 1 4 2 -1.13673544543737 1 4 2 -0.608043497918468 2 4 2 -0.492504866468463 1 4 2 -2.64459046440055 2 4 2 -0.451189826082847 2 4 2 -0.576354637504905 2 4 2 -1.03341312876181 2 4 2 -1.06203790709498 2 4 2 -0.467283293752499 1 4 2 -0.59305758857692 1 4 2 -1.02820034315313 2 4 2 -0.243096585776871 2 4 2 -0.241979269170888 1 4 2 -2.12555571038889 2 4 2 -0.251788236043855 1 4 2 -0.723821381450342 1 4 2 -0.433526132962029 2 4 2 -0.21544327239061 1 4 2 -0.393688512078822 1 4 2 -0.284221169948672 2 4 2 -0.576093893566291 2 4 2 -1.15305709835803 1 4 2 -0.577913219145157 1 4 2 -0.715910910811142 1 4 2 -0.75590490398109 1 4 2 -0.360319468198415 2 4 2 -0.632185262786175 1 4 2 -0.18238157652647 1 4 2 -0.562586077551566 2 4 2 -0.215854179362236 1 4 2 -0.312933166228936 2 4 2 -0.555832005681486 2 4 2 -0.318457487234359 1 4 2 -0.582899849996915 2 4 2 -0.245962703292602 1 4 2 -0.465283341657699 2 4 2 -0.414444633194467 2 4 2 -1.24551335125207 2 4 2 -0.545757922901803 1 4 2 -0.231000979626702 1 4 2 -0.99685036055635 2 4 2 -0.384519234744453 2 4 2 -0.313068825633183 2 4 2 -0.272366676123266 2 4 2 -0.369113998245052 1 4 2 -0.852502642553233 2 4 2 -0.500222326108646 2 4 2 -1.33492159400307 2 4 2 -0.334531945802007 2 4 2 -0.345532493468953 2 4 2 -0.411524900150396 1 4 2 -0.412179135130737 1 4 2 -0.181615360189367 1 4 2 -0.362837785162399 2 4 2 -0.357363228746261 1 4 2 -0.341279195040987 1 4 2 -0.509319762686041 2 4 2 -0.832335744829212 1 4 2 -0.626841628293068 2 4 2 -0.410567315100069 2 4 2 -0.323450520248485 1 4 2 -0.460402879304217 2 4 2 -0.978638366687773 2 4 2 -0.391658527387104 2 4 2 -0.451809465395574 2 4 2 -0.593742477301499 2 4 2 -0.662146826253225 2 4 2 -0.437989819033819 1 4 2 -0.269628201689827 2 4 2 -0.435351210611835 1 4 2 -0.873512993112175 2 4 2 -0.303862086205385 2 4 2 -0.273370196428826 2 4 2 -0.346348296895532 1 4 2 -0.998766995801073 1 4 2 -0.500807556509458 2 4 2 -0.377700114272957 1 4 2 -0.557960766756848 2 4 2 -0.445694136085316 1 4 2 -0.45906855277066 2 4 2 -0.754461289543913 1 4 2 -0.400053087442967 2 4 2 -0.551849440102895 2 4 2 -0.442947303118676 2 4 2 -0.333326868270498 2 4 2 -0.299790315442093 2 4 2 -0.553945654472965 2 4 2 -0.451050492701825 2 4 2 -0.324222556361926 1 4 2 -0.535724245715277 2 4 2 -0.238621696921591 2 4 2 -1.05972477089422 2 4 2 -0.381000045748109 2 4 2 -0.622451138992948 2 4 2 -0.373517923040452 1 4 2 -0.379879533262278 2 4 2 -0.61017258538097 1 4 2 -0.373931156084275 2 4 2 -0.271265445543895 2 4 2 -1.13340860093329 2 4 2 -0.48332741629014 2 4 2 -0.278034332012135 2 4 2 -0.617732524410426 2 4 2 -0.295315882068786 1 4 2 -0.399809513238085 2 4 2 -0.593796551838838 1 4 2 -0.448034890388041 1 4 2 -0.507568706345751 1 4 2 -0.361775441496837 2 4 2 -0.680971086222875 2 4 2 -0.760052527880231 1 4 2 -0.523382086841163 2 4 2 -0.782073278018563 2 4 2 -0.366504296431615 1 4 2 -0.546586584253047 1 4 2 -0.214235924471223 2 4 2 -0.366212595435754 1 4 2 -0.291167146710121 1 4 2 -0.409610194588736 2 4 2 -0.356114057741968 2 4 2 -0.219739077268034 1 4 2 -0.672870970697509 2 4 2 -0.459606203597495 1 4 2 -0.526098601526635 2 4 2 -0.372672585132755 2 4 2 -0.597289086386129 2 4 2 -0.626632354698616 1 4 2 -0.332982149438951 1 4 2 -0.278836235260797 1 4 2 -0.322540461337514 2 4 2 -0.78120963284111 1 4 2 -0.23866016678153 1 4 2 -0.50573581272823 1 4 2 -0.307268642300638 1 4 2 -0.780733176074318 2 4 2 -0.524376179559523 2 4 2 -0.189183803390314 1 4 2 -0.845904136307582 2 4 2 -0.278587013457745 1 4 2 -0.225849178715147 1 4 2 -0.505248402948615 2 4 2 -0.366510147437898 2 4 2 -0.786656646299225 1 4 2 -0.28859561129487 2 4 2 -1.0544558000918 2 4 2 -0.393129941472734 2 4 2 -0.209426913346118 1 4 2 -0.557848900331885 1 4 2 -0.428353664374262 1 4 2 -0.530074401536137 2 4 2 -0.364796976039387 1 4 2 -0.31697830785037 2 4 2 -0.257110187742915 2 4 2 -0.860619019042301 2 4 2 -0.275753333898939 2 4 2 -0.397675327020122 2 4 2 -0.400511098922939 2 4 2 -0.948090448726289 2 4 2 -0.792802580857493 2 4 2 -0.492364374799986 2 4 2 -0.535712706435388 2 4 2 -0.303750231704068 2 4 2 -0.418198300886687 1 4 2 -0.242831317224143 1 4 2 -0.366807263977893 1 4 2 -0.950283301933651 2 4 2 -0.482878099739341 1 4 2 -0.97631347487202 1 4 2 -0.244665797059032 1 4 2 -0.383566210558107 2 4 2 -0.301865356343443 1 4 2 -0.649745063591824 2 4 2 -0.697050710541562 2 4 2 -0.809378147682444 2 4 2 -0.462123476270074 2 4 2 -0.245248883261683 1 4 2 -0.243212808416138 2 4 2 -0.314440748173076 2 4 2 -1.04698406308231 2 4 2 -0.544087182534972 2 4 2 -0.406124565426359 2 4 2 -0.718327290138134 2 4 2 -0.595497257669703 1 4 2 -0.666308342077789 1 4 2 -0.22438634675569 1 4 2 -0.365080530436751 1 4 2 -0.461875979293363 1 4 2 -0.815985780732118 2 4 2 -0.332674339664941 2 4 2 -0.567202795788581 2 4 2 -0.673655794950335 2 4 2 -0.477370669971454 2 4 2 -0.479764717684493 1 4 2 -0.68004172954729 2 4 2 -0.257963031440288 1 4 2 -0.321677163018359 2 4 2 -0.31627004676119 1 4 2 -0.529702779903144 2 4 2 -0.248585059238814 1 4 2 -0.282689321559121 1 4 2 -0.470660250683795 1 4 2 -0.250018593647379 1 4 2 -0.374469379392143 2 4 2 -0.275948837992336 1 4 2 -0.404124761645375 2 4 2 -0.461681809050967 2 4 2 -0.564107772448883 2 4 2 -0.384697573024493 1 4 2 -0.554591581848138 2 4 2 -0.584464813875503 2 4 2 -0.342621685779372 1 4 2 -1.16358906465507 1 4 2 -0.988193013514123 2 4 2 -0.332789439955037 2 4 2 -1.25271325447985 2 4 2 -0.177397789305329 1 4 2 -0.362176598671433 1 4 2 -0.419965871836781 1 4 2 -0.60515910819425 2 4 2 -0.31624308445277 2 4 2 -0.844077751163308 1 4 2 -0.3735652345134 1 4 2 -0.291452020039806 2 4 2 -0.232044027590617 2 4 2 -0.401670392843103 1 4 2 -0.453346479512868 1 4 2 -0.538601982527949 2 4 2 -0.830402429105011 2 4 2 -0.335367042770124 2 4 2 -1.69818743759647 2 4 2 -0.685148294374147 2 4 2 -0.563455535517706 1 4 2 -0.346418976965531 2 4 2 -0.662726370377329 2 4 2 -0.325948295612478 1 4 2 -0.790574826320759 2 4 2 -0.350496877596168 2 5 1 -0.874105967045303 2 5 1 -0.618625438899854 2 5 1 -0.418505998819245 2 5 1 -0.314600840328866 2 5 1 -0.381663329894006 2 5 1 -0.45975664723401 2 5 1 -0.629213989765841 2 5 1 -0.54170350431826 2 5 1 -0.765292807564366 2 5 1 -0.385154972025414 2 5 1 -1.0123064604624 1 5 1 -0.594468240472198 1 5 1 -0.432884690638713 1 5 1 -0.489852620287608 1 5 1 -0.439171145526351 2 5 1 -1.22541928040664 2 5 1 -0.489630344610841 1 5 1 -0.274646407464894 2 5 1 -0.321919494972732 2 5 1 -0.77321886884371 2 5 1 -0.618168337784024 1 5 1 -0.534313291237878 1 5 1 -0.248497494092332 2 5 1 -0.902277840078671 2 5 1 -0.362848845387321 1 5 1 -0.51771202639715 1 5 1 -0.354988230953876 1 5 1 -0.408263930712262 1 5 1 -0.515818454942625 2 5 1 -1.13442796786063 1 5 1 -0.922450951783412 2 5 1 -0.918236744271086 1 5 1 -0.615340787166512 2 5 1 -1.33086229422069 2 5 1 -0.773498580150888 1 5 1 -0.355111810070351 2 5 1 -0.463437947168048 1 5 1 -0.448309657003003 2 5 1 -0.6988174528086 1 5 1 -0.681543871813962 2 5 1 -0.388077823480071 1 5 1 -0.769550702497453 2 5 1 -0.276308219836844 2 5 1 -0.284812859728204 2 5 1 -0.362311034619243 2 5 1 -1.17071306250983 2 5 1 -0.866609929152046 2 5 1 -0.835449276907315 1 5 1 -0.398346916321057 2 5 1 -0.358189328108751 2 5 1 -0.554585348153169 1 5 1 -0.39760626253423 1 5 1 -0.392260790271942 1 5 1 -0.374565507395057 1 5 1 -0.490572674372733 1 5 1 -0.262430183982469 2 5 1 -0.406476847741613 2 5 1 -0.734398707641991 2 5 1 -0.808457812365959 2 5 1 -0.48642275256533 2 5 1 -0.652503153805679 2 5 1 -0.549320171737622 1 5 1 -0.363404443230036 1 5 1 -0.840787255553956 2 5 1 -0.659531467134479 2 5 1 -0.52542297118843 2 5 1 -1.641319692926 1 5 1 -0.349747975299305 2 5 1 -0.696664492661082 1 5 1 -0.999158454741339 2 5 1 -0.494799923420879 1 5 1 -0.696955053780108 2 5 1 -0.320987556616071 1 5 1 -0.49483529420845 2 5 1 -0.327928353910945 2 5 1 -0.243634957449529 2 5 1 -0.382623755383196 2 5 1 -0.340391545486682 1 5 1 -0.370852704813291 1 5 1 -0.513953236176426 1 5 1 -0.98488957304638 1 5 1 -0.72920423163152 1 5 1 -0.417878765259703 2 5 1 -0.969267542459561 2 5 1 -0.588656109235448 1 5 1 -0.309310108339579 2 5 1 -0.359079679822589 2 5 1 -0.432293522236456 2 5 1 -0.365699146412261 2 5 1 -0.280030960748012 1 5 1 -0.267439129890066 1 5 1 -1.64146572476405 1 5 1 -0.383985418553408 2 5 1 -0.820780704522388 2 5 1 -0.287252707592816 2 5 1 -0.471656559488085 1 5 1 -0.688654531662119 1 5 1 -1.51116458250868 1 5 1 -0.256595632268106 1 5 1 -1.44192189522027 1 5 1 -0.423719184953423 2 5 1 -0.474641530750688 1 5 1 -0.95167387507805 1 5 1 -0.249940083181891 2 5 1 -0.948956034762691 1 5 1 -0.497334710175687 2 5 1 -0.401258517347727 1 5 1 -0.593937951684305 1 5 1 -0.399453837192165 1 5 1 -0.339706036542822 2 5 1 -0.358808063230196 2 5 1 -0.399870518236517 2 5 1 -0.402281253533687 1 5 1 -1.21105144216928 2 5 1 -0.276484649301743 2 5 1 -0.614037537089416 1 5 1 -0.468119101565478 2 5 1 -0.363072435808267 2 5 1 -0.424349274964132 2 5 1 -0.817780271882891 2 5 1 -0.348979602184705 2 5 1 -0.428954419119663 1 5 1 -0.377745649623147 2 5 1 -1.52858746330548 1 5 1 -0.38829844572601 2 5 1 -0.272639358091468 1 5 1 -0.410898025055377 2 5 1 -0.345789355606247 1 5 1 -0.29532986306681 1 5 1 -1.41353962836894 1 5 1 -0.405012581079275 2 5 1 -0.308523509241669 1 5 1 -0.277244934012402 1 5 1 -0.317403964863673 2 5 1 -0.590597620939944 2 5 1 -2.60905439597824 2 5 1 -0.312334336841847 1 5 1 -0.437294940463007 2 5 1 -0.749488878001604 1 5 1 -1.55915796602619 2 5 1 -0.672396814187568 1 5 1 -0.32268652713371 1 5 1 -2.33549213748734 2 5 1 -0.533821174405078 1 5 1 -0.29694295172823 2 5 1 -0.428694990290054 1 5 1 -0.744566758964104 2 5 1 -0.902120586395754 2 5 1 -0.373865886414247 2 5 1 -0.34709384467313 2 5 1 -0.579724512470357 2 5 1 -0.480526240272104 2 5 1 -0.280388545170291 2 5 1 -0.327331492746605 2 5 1 -0.307049013223802 1 5 1 -0.67645551514592 2 5 1 -1.45539495712329 1 5 1 -0.414781606222836 2 5 1 -0.464869946932275 2 5 1 -0.502525874440138 1 5 1 -0.395815499456874 2 5 1 -0.354740455281115 2 5 1 -0.387644457183764 1 5 1 -0.441119885285695 2 5 1 -0.541299937122488 1 5 1 -0.462769272386231 2 5 1 -0.568510300909836 1 5 1 -0.739186686655553 2 5 1 -0.590037876611731 1 5 1 -0.876929440339061 2 5 1 -0.38745072608018 1 5 1 -0.874268369237665 1 5 1 -0.595292592424216 1 5 1 -0.704226262163879 2 5 1 -0.297080465628835 1 5 1 -0.873924224577836 1 5 1 -0.466850429992947 2 5 1 -2.19102033049238 2 5 1 -0.707932653803704 2 5 1 -0.52289483412265 1 5 1 -0.291400893995193 2 5 1 -0.334262174169645 1 5 1 -0.413969229169715 2 5 1 -0.368758425061548 2 5 1 -0.480077442145665 1 5 1 -1.00325797217147 1 5 1 -0.34303135632964 1 5 1 -0.449371536501468 2 5 1 -0.626712575183433 2 5 1 -0.300330886841042 1 5 1 -0.601032456393769 1 5 1 -0.398517086169262 2 5 1 -0.492233676994753 1 5 1 -0.559665551547645 2 5 1 -0.322079290494793 1 5 1 -1.00119635579758 2 5 1 -0.273572712925721 2 5 1 -0.997674531871953 2 5 1 -0.660614796170524 2 5 1 -0.682509343887865 2 5 1 -0.709029245629886 1 5 1 -0.450254655958821 2 5 1 -0.489362486290041 2 5 1 -0.256831501902699 1 5 1 -0.277749492989249 2 5 1 -0.570615557380466 1 5 1 -0.660249945470972 1 5 1 -0.913042539886274 2 5 1 -0.276157449004863 2 5 1 -0.42554491044995 1 5 1 -0.504500759944897 1 5 1 -0.464430914488706 2 5 1 -0.619432554673588 2 5 1 -0.344307477421206 2 5 1 -0.353936127201951 2 5 1 -0.542570048569743 1 5 1 -0.670056529092721 2 5 1 -1.69346839376907 1 5 1 -0.329122766203157 2 5 1 -0.580777607436693 2 5 1 -0.425192302825809 2 5 1 -1.01918716149399 1 5 1 -0.451237334938158 2 5 1 -0.365780961202646 1 5 1 -0.435969164806333 2 5 1 -0.446219693449255 2 5 1 -0.86148405175356 1 5 1 -0.378754506829674 2 5 1 -0.636129328599341 2 5 1 -0.484019299704474 1 5 1 -0.35361046366484 2 5 1 -0.778577922802192 2 5 1 -0.553712077213866 2 5 1 -0.576470926490595 1 5 1 -0.956990527979208 2 5 1 -0.676276969153139 2 5 1 -0.600336478541173 2 5 1 -1.1497148815834 2 5 1 -0.311559946333853 1 5 1 -0.32583211853549 2 5 1 -0.5129988563233 2 5 1 -0.706544486405484 2 5 1 -0.437094165223538 1 5 1 -0.824751569569288 1 5 1 -0.809546777157912 2 5 1 -0.537141407451724 2 5 1 -0.662502129558617 1 5 1 -0.501691224984125 2 5 1 -0.30901205010725 2 5 1 -0.47867098754842 1 5 1 -0.456986416516351 2 5 1 -0.275926112108479 1 5 1 -0.410807847551661 2 5 1 -0.828356765357645 1 5 1 -0.40635702823069 2 5 1 -0.986564014728032 2 5 1 -0.567057974036205 2 5 1 -0.322394251750056 2 5 1 -0.722132927785589 2 5 1 -0.409410905169481 2 5 1 -0.709898951889328 1 5 1 -0.541547402623561 1 5 1 -0.256670165225662 2 5 1 -0.49411541489472 1 5 1 -0.919104100863578 1 5 1 -0.549914334516905 2 5 1 -0.505235686112949 2 5 1 -0.282338529860808 2 5 1 -0.383976247257099 2 5 1 -0.680828005837556 1 5 1 -0.540327329724261 1 5 1 -1.05569191297992 2 5 1 -0.349514052734471 1 5 1 -0.496154085894348 2 5 1 -0.456989724057387 2 5 1 -0.730859840316074 1 5 1 -1.05327361608711 2 5 1 -0.614480004129318 2 5 1 -1.09479710995942 2 5 1 -0.456403655924994 2 5 1 -0.248929266015883 2 5 1 -0.345437497681469 2 5 1 -0.390161598039643 2 5 1 -1.59636606625796 1 5 1 -0.297653976651779 1 5 1 -0.53027456008198 2 5 1 -0.325786425396216 1 5 1 -0.412885828047688 1 5 1 -0.430942952085056 2 5 1 -0.628988308893905 2 5 1 -0.303432064418254 2 5 1 -0.502479650486677 2 5 1 -0.374956563195693 2 5 1 -0.317036830382213 1 5 1 -0.360257190747668 2 5 1 -0.392605112045462 2 5 1 -0.263946843562968 2 5 1 -0.543352649787032 2 5 1 -0.451523452374327 2 5 1 -0.588811763316336 2 5 1 -0.496429527557548 1 5 1 -0.363606147439157 1 5 1 -0.590754640562368 2 5 1 -0.399066967540575 2 5 1 -0.481461654111483 1 5 1 -0.44511198129432 2 5 1 -0.733957111974783 2 5 1 -0.233363499390124 2 5 1 -0.290775652443224 2 5 1 -1.0231168093639 2 5 1 -0.531611729352149 1 5 1 -0.825587045098161 2 5 1 -0.902074251648083 2 5 1 -0.323175839216028 2 5 1 -0.505991444493804 2 5 1 -0.261543288336422 1 5 1 -0.554486494360695 1 5 1 -0.351246068594391 2 5 1 -0.515328779203654 2 5 1 -0.639677471438791 2 5 1 -0.480386256226869 2 5 1 -0.712967189057983 2 5 1 -1.34226678264378 2 5 1 -0.522093483130097 2 5 1 -0.335177947867927 1 5 1 -0.427739523462974 2 5 1 -0.724445283326656 2 5 1 -0.520065928077709 2 5 1 -0.690085575348652 2 5 1 -0.395666881799587 2 5 1 -0.253416617473968 2 5 1 -0.292975240297012 2 5 1 -0.880042969703422 2 5 1 -1.18765090929155 2 5 1 -0.471824574051573 1 5 1 -0.3819371403618 1 5 1 -0.489731712873816 2 5 1 -0.517213617852645 2 5 1 -1.02045998930259 1 5 1 -0.353507003760541 2 5 1 -0.28738904500089 2 5 1 -0.761600567581336 1 5 1 -0.744138933405505 1 5 1 -0.628607646940531 1 5 1 -0.892684033273368 2 5 1 -0.415671512759732 2 5 1 -0.862657966498822 2 5 1 -0.44210809069166 2 5 1 -0.524200017230228 1 5 1 -0.324994378309518 2 5 1 -0.426992098360617 2 5 1 -0.421266101618318 1 5 1 -0.592739096667285 2 5 1 -0.555131716241223 2 5 1 -0.881544992259764 2 5 1 -0.712182588708822 2 5 1 -0.744752154767 2 5 1 -0.606487498310839 2 5 1 -0.461699520660315 2 5 1 -0.745717699517249 1 5 1 -1.24267939276122 2 5 1 -1.06725166167186 2 5 1 -0.688125268149 2 5 1 -0.322706592623082 2 5 1 -0.625340088917108 1 5 1 -0.481462311469004 2 5 1 -1.11856169670045 2 5 1 -0.496653373252084 2 5 1 -0.306702987850203 1 5 1 -0.299778976884002 2 5 1 -0.255084547535916 2 5 1 -0.324409887328244 2 5 1 -0.28656822676276 2 5 1 -0.685862574486554 1 5 1 -0.25080565529039 2 5 1 -0.423005534054917 2 5 1 -0.567258936488528 2 5 1 -0.453882515712717 2 5 1 -0.889040190992262 1 5 1 -0.527722880894638 1 5 1 -0.25678851823785 1 5 1 -0.569171422366059 1 5 1 -0.438002657123947 1 5 1 -1.07105043333329 2 5 1 -0.371311478728212 2 5 1 -0.427478236470728 2 5 1 -0.690413385769963 2 5 1 -1.23990009741994 1 5 1 -0.35055316921775 2 5 1 -0.320095074553569 2 5 1 -0.360451278466255 2 5 1 -1.3783410124737 1 5 1 -0.342094531805991 2 5 1 -0.939591296738548 1 5 1 -0.377754647410711 2 5 1 -0.397257993518603 2 5 1 -0.556583275671363 2 5 1 -0.814148052150712 1 5 1 -0.445052953408459 2 5 1 -0.635785394694251 1 5 1 -1.45734899209363 2 5 1 -0.553038339349569 2 5 1 -0.487114354718737 2 5 1 -0.326029703773354 2 5 1 -0.378499631880882 1 5 1 -0.249998652473012 2 5 1 -0.777922576637263 1 5 1 -0.344758853651022 2 5 1 -0.469680901216615 1 5 1 -0.572885048100009 2 5 1 -0.813047280725817 2 5 1 -0.31824550606856 1 5 1 -1.52076632666272 2 5 1 -0.525665900915789 2 5 1 -0.835443800969408 2 5 1 -0.411960036228098 2 5 1 -0.366444785673607 2 5 1 -0.583008070195596 2 5 1 -0.306789626562622 2 5 1 -0.32265104149373 2 5 1 -0.60706446511574 1 5 1 -0.660415820740065 1 5 1 -0.41961055869239 2 5 1 -1.18355949324619 2 5 1 -1.7831326042876 1 5 1 -1.32079987606431 1 5 1 -0.31469679737234 2 5 1 -0.742130214925183 2 5 1 -0.278159578075745 2 5 1 -0.944262180658039 1 5 1 -1.00185185390192 2 5 1 -0.723912539579018 2 5 1 -0.730046670620961 2 5 1 -0.594612038608092 2 5 1 -0.609003026408566 1 5 1 -0.703005542893214 2 5 1 -0.371613566829059 2 5 1 -0.839961901535526 2 5 1 -0.444097755957375 2 5 1 -0.358077624513172 2 5 1 -0.284271913540958 2 5 1 -0.479974130859609 2 5 1 -0.545791838695864 1 5 1 -0.332965450514954 2 5 1 -0.591764667761429 2 5 1 -0.298607348317416 2 5 1 -0.247276738340079 2 5 1 -0.798461231259409 2 5 1 -0.441564925769082 2 5 1 -1.48128308355737 2 5 1 -0.31434796484399 1 5 1 -1.20347173161341 2 5 1 -0.739554951126451 1 5 1 -0.497097781531935 2 5 1 -0.484159304177581 1 5 1 -0.244849956861225 1 5 1 -0.504629073199244 1 5 1 -0.569133223143607 2 5 1 -1.12622703221084 2 5 1 -0.764250286320223 2 5 1 -0.484641004950062 2 5 1 -0.514942303502618 2 5 1 -0.605883128391681 2 5 1 -0.49588937623075 1 5 1 -0.396953900942646 2 5 1 -0.520906400655699 1 5 1 -0.745428404112575 2 5 1 -0.351085063768204 1 5 1 -0.996492367823443 2 5 1 -0.831728540518663 2 5 1 -0.277512426545143 2 5 1 -0.25988569649848 2 5 1 -0.309494901893108 2 5 1 -0.506418492133261 1 5 1 -1.0882032936743 2 5 1 -0.561957049307115 1 5 1 -0.818825222243945 2 5 1 -0.53741814818001 2 5 1 -0.398894432067162 1 5 1 -0.370547579851629 2 5 1 -0.441101002850091 1 5 1 -0.335051494643735 1 5 1 -0.359857472939855 1 5 1 -1.23898294637209 2 5 1 -0.303629801372895 2 5 1 -1.27191285267288 2 5 1 -0.255294810871718 2 5 1 -0.387744030748659 2 5 1 -0.950171765183971 2 5 1 -0.338933464143833 2 5 1 -0.374472149784474 1 5 1 -0.850121988549967 1 5 1 -0.368357738596241 2 5 1 -0.422292677162516 1 5 1 -0.670555601613663 2 5 1 -0.443253833059252 2 5 1 -0.445156135335308 1 5 1 -0.570124671736916 1 5 1 -0.715586041727328 1 5 1 -0.59944925951305 1 5 2 -0.571270118524135 1 5 2 -0.478721803809417 2 5 2 -0.677464125838552 2 5 2 -0.663628439043173 2 5 2 -0.269338514686921 1 5 2 -0.602552243929772 1 5 2 -0.291299463050662 1 5 2 -0.44101267885359 1 5 2 -0.311931727273563 2 5 2 -0.352091009578926 1 5 2 -0.313916230690944 1 5 2 -0.374014526157238 1 5 2 -0.300236548555279 1 5 2 -0.361852622471219 1 5 2 -0.290120165572002 1 5 2 -1.3190023778617 1 5 2 -0.813585821304588 2 5 2 -0.563167577163749 2 5 2 -0.424847079638711 1 5 2 -0.431682396745519 1 5 2 -0.742666504831229 2 5 2 -1.3016483866513 1 5 2 -0.355061924288677 1 5 2 -0.645449822853174 2 5 2 -0.288378961868379 1 5 2 -0.374463137914422 2 5 2 -0.405984822829934 2 5 2 -0.460634380609883 2 5 2 -0.750352906162385 1 5 2 -0.644664165310704 1 5 2 -0.300369902496596 1 5 2 -0.443395281476769 1 5 2 -0.242967135467797 2 5 2 -0.286922136386556 2 5 2 -0.287290418411787 2 5 2 -0.521549068525531 2 5 2 -0.292427955172831 1 5 2 -0.689267068868006 2 5 2 -0.518413816193551 2 5 2 -0.318270239089209 1 5 2 -0.382457012636577 2 5 2 -1.01440803729853 1 5 2 -1.00158066089162 1 5 2 -0.568667032400329 2 5 2 -0.414610843415938 1 5 2 -0.400258366212628 2 5 2 -1.00809741464603 2 5 2 -1.27133812224764 2 5 2 -0.743864210325877 2 5 2 -0.775841632110899 2 5 2 -0.300321917136189 2 5 2 -0.3583802317039 2 5 2 -0.460038260487213 1 5 2 -0.364671001753739 2 5 2 -0.452730720520749 2 5 2 -0.650435164535667 1 5 2 -1.30257818709608 2 5 2 -0.467408775207611 2 5 2 -0.413873488779555 2 5 2 -0.362883875858316 1 5 2 -0.706627565831075 2 5 2 -0.965894478924112 1 5 2 -0.364415938903456 1 5 2 -0.302357207565824 2 5 2 -0.523675741606119 1 5 2 -0.246098564525208 1 5 2 -0.534354328367928 2 5 2 -0.305317088730255 2 5 2 -0.360877156880599 2 5 2 -0.269313918771234 1 5 2 -0.522107720783827 2 5 2 -0.798496955992481 1 5 2 -0.774270654545548 1 5 2 -0.689189662698451 2 5 2 -0.541317395948974 1 5 2 -0.758336347994286 2 5 2 -1.04260254410127 1 5 2 -0.298448066365288 2 5 2 -0.687244199835044 1 5 2 -0.321017344151699 2 5 2 -1.30053982037248 1 5 2 -0.871190362112565 2 5 2 -0.854567944471819 1 5 2 -0.406633311451941 2 5 2 -0.595529004717776 2 5 2 -0.932485076424955 2 5 2 -0.670527589251614 2 5 2 -0.349539633417549 2 5 2 -0.527533696703081 2 5 2 -0.604204077013109 1 5 2 -0.396372501238897 2 5 2 -0.54363245627552 2 5 2 -1.01374770072993 2 5 2 -0.442516401166061 1 5 2 -0.337787899261388 2 5 2 -0.611607046647056 2 5 2 -0.582231962637285 2 5 2 -0.531731222343829 2 5 2 -0.70961760628681 2 5 2 -0.983672547433239 2 5 2 -0.853752013607103 2 5 2 -0.299100847268621 2 5 2 -0.458144991493894 2 5 2 -0.646448464231831 2 5 2 -0.349113081280827 2 5 2 -0.371396695556209 2 5 2 -0.380762038115444 2 5 2 -0.626193352278793 2 5 2 -0.319957343473038 2 5 2 -0.894827474594344 1 5 2 -0.371085339061208 2 5 2 -0.274700685448708 2 5 2 -0.472512525590188 2 5 2 -0.428272329032601 1 5 2 -0.81108343332377 2 5 2 -0.29226124190246 1 5 2 -0.709302851449251 1 5 2 -0.441502263285431 2 5 2 -0.377865986040148 1 5 2 -0.299141714875424 2 5 2 -0.852037775518583 1 5 2 -0.367443619829809 2 5 2 -0.833467723342496 1 5 2 -0.518949570328891 2 5 2 -1.08071399168627 2 5 2 -1.45803253458175 1 5 2 -0.403737991042524 2 5 2 -0.430597937237068 1 5 2 -0.693327507928039 2 5 2 -0.409427255768774 2 5 2 -0.415266426974346 2 5 2 -0.399348890241505 1 5 2 -1.00807008741471 1 5 2 -0.600334249477256 2 5 2 -0.723090984967466 1 5 2 -0.30124593772841 2 5 2 -0.526226731553072 1 5 2 -0.909950421520731 2 5 2 -0.327267873435672 1 5 2 -1.07209919243311 2 5 2 -0.29726599356851 1 5 2 -0.552066919100173 2 5 2 -0.494894143356916 2 5 2 -0.32978714763287 2 5 2 -0.590111357521564 2 5 2 -0.453342223889263 2 5 2 -0.402928162250864 2 5 2 -1.28649448740156 2 5 2 -0.307709380863996 1 5 2 -0.28828133716514 2 5 2 -0.293039690509469 2 5 2 -0.416060987991466 2 5 2 -0.475558200663385 2 5 2 -1.05487716699584 2 5 2 -0.338172024306927 1 5 2 -0.595539455240045 1 5 2 -0.579146103187199 2 5 2 -0.786799876060352 1 5 2 -0.241065859944711 2 5 2 -0.582321691985273 2 5 2 -0.460354760620268 1 5 2 -0.841722330305237 1 5 2 -1.01167714961156 2 5 2 -0.424713060969898 1 5 2 -0.394167127869212 2 5 2 -1.03416630772583 1 5 2 -1.32959777810628 1 5 2 -1.75972860107492 1 5 2 -0.615179768965791 2 5 2 -0.319854717980425 2 5 2 -0.489657096763377 1 5 2 -0.670661576197628 2 5 2 -0.472203770212263 2 5 2 -0.765998706988045 1 5 2 -0.333182293252276 1 5 2 -0.570043234180048 1 5 2 -0.30363661516842 1 5 2 -0.332466771451815 2 5 2 -0.997841282553196 2 5 2 -0.45787243741055 1 5 2 -0.334190036053108 2 5 2 -0.419647452096217 2 5 2 -0.477289639435407 2 5 2 -0.319493821935502 2 5 2 -0.602105491492108 2 5 2 -0.901453621588105 2 5 2 -0.302006597660369 2 5 2 -0.370541701419962 1 5 2 -0.632484359068564 2 5 2 -0.675493181471213 1 5 2 -0.702604527810739 2 5 2 -0.457494041206726 1 5 2 -1.02133348990189 1 5 2 -1.02366570258029 2 5 2 -0.6241274220945 2 5 2 -1.13319965338725 1 5 2 -0.608397741451449 2 5 2 -0.275232263227218 2 5 2 -0.646078828525116 1 5 2 -0.392384020972726 2 5 2 -0.611608179642105 1 5 2 -0.403732771272542 2 5 2 -0.475709943826484 1 5 2 -0.644107463291593 1 5 2 -0.529061673835631 2 5 2 -0.76205402727557 1 5 2 -0.478372023349786 1 5 2 -0.721916054782157 2 5 2 -0.298281970063148 2 5 2 -0.308419274846538 2 5 2 -1.13676249928442 2 5 2 -0.489633518133111 1 5 2 -0.81718265267318 2 5 2 -0.296510774521216 1 5 2 -0.41545087025183 2 5 2 -0.858883400769863 1 5 2 -0.588983529667847 2 5 2 -1.05271712018426 2 5 2 -0.305721646872819 1 5 2 -0.639265026387045 2 5 2 -0.640474642443045 1 5 2 -0.896096517036621 2 5 2 -0.42307255863364 2 5 2 -0.415674104307401 1 5 2 -0.27547910375578 1 5 2 -0.348853636625538 2 5 2 -0.283695041149401 2 5 2 -0.305964485294963 2 5 2 -0.993384639595699 2 5 2 -0.747661562638797 1 5 2 -0.374983766761583 2 5 2 -0.709902782274922 1 5 2 -0.39029996774982 2 5 2 -0.443342047659481 1 5 2 -0.25232121385486 2 5 2 -0.606258771370627 2 5 2 -0.56630014842487 2 5 2 -0.407573540497359 2 5 2 -0.488426329742728 2 5 2 -0.530548943298116 1 5 2 -0.836491434275815 2 5 2 -0.368718555457284 2 5 2 -0.627508832177755 1 5 2 -0.658507784089307 2 5 2 -0.494633132243137 2 5 2 -0.767210656356636 1 5 2 -0.279455460728518 2 5 2 -0.680268781163734 1 5 2 -0.434657646694503 1 5 2 -0.975121716315162 2 5 2 -0.454458052785016 2 5 2 -0.870719661036972 2 5 2 -0.601447265395704 2 5 2 -0.883151345159082 2 5 2 -0.79883814953365 2 5 2 -0.520533085849907 2 5 2 -0.740289828056042 1 5 2 -0.808434675051423 2 5 2 -0.323990809323793 1 5 2 -0.402569321518717 1 5 2 -0.520691303484606 1 5 2 -0.401064022355165 2 5 2 -0.402344022560083 1 5 2 -0.532289549565749 2 5 2 -0.706963165521545 2 5 2 -0.784955813139153 2 5 2 -0.360194468075243 2 5 2 -0.409815687475514 1 5 2 -0.82043050263301 2 5 2 -0.460274040204098 2 5 2 -0.419051670972866 2 5 2 -0.599443515950589 2 5 2 -0.966096764539077 2 5 2 -0.366186511338898 1 5 2 -1.4222044721659 2 5 2 -0.777184212128937 2 5 2 -0.591852836588032 2 5 2 -0.770749892926039 2 5 2 -0.70434735829414 1 5 2 -0.765666276417329 2 5 2 -0.40346241426283 1 5 2 -0.391165632121021 2 5 2 -0.370020173988749 2 5 2 -1.58683703850196 1 5 2 -1.57793133770567 2 5 2 -0.80075289464325 2 5 2 -0.256904564618549 1 5 2 -0.53622262912349 1 5 2 -0.453580971173257 2 5 2 -0.410069535718748 1 5 2 -0.515797332567113 2 5 2 -0.952842898198181 1 5 2 -0.363748661621775 2 5 2 -0.428437274072119 2 5 2 -0.370569493908707 1 5 2 -1.55504308977282 1 5 2 -0.2910968027665 2 5 2 -0.889633822477091 2 5 2 -1.06768254922828 1 5 2 -0.737740843880572 2 5 2 -0.999020093280879 2 5 2 -0.332193354553405 2 5 2 -0.489468556048885 1 5 2 -0.276614446525954 2 5 2 -0.411812431184961 1 5 2 -0.645266731187802 2 5 2 -0.355603707761157 2 5 2 -0.390302407266954 1 5 2 -0.77292099479717 2 5 2 -0.252389739406451 2 5 2 -0.876313308318194 2 5 2 -0.58287646271364 1 5 2 -0.288751011208501 1 5 2 -0.431566553814174 2 5 2 -0.589008102697159 2 5 2 -0.829161903382978 2 5 2 -0.628538983915815 2 5 2 -0.621394296269468 2 5 2 -0.579181298723461 1 5 2 -0.727901955182036 2 5 2 -0.631355875603683 2 5 2 -0.860874327171326 2 5 2 -0.343433754720578 2 5 2 -0.5773516679935 2 5 2 -0.279980088015754 1 5 2 -0.297786857526651 2 5 2 -0.352028385521676 2 5 2 -0.718284977347952 1 5 2 -0.386491843574301 1 5 2 -0.733817922945248 2 5 2 -0.330039961374457 1 5 2 -0.562137897292054 2 5 2 -0.493718153186244 2 5 2 -0.384521948665274 2 5 2 -0.541882345607494 2 5 2 -0.899433484810609 1 5 2 -0.239671549366562 1 5 2 -0.903508962409293 2 5 2 -0.437062486670204 1 5 2 -0.47204968825503 2 5 2 -0.463054778904269 1 5 2 -0.337988558333662 1 5 2 -0.945352936382255 2 5 2 -0.319331081252348 1 5 2 -1.10841845342301 1 5 2 -0.727028251710372 1 5 2 -0.418174877683897 2 5 2 -0.83459503151359 2 5 2 -0.341783851166967 2 5 2 -0.409549042090065 2 5 2 -1.39638569008014 2 5 2 -0.282660568650718 2 5 2 -0.377102854745555 2 5 2 -0.559130208630371 1 5 2 -0.406889638299026 2 5 2 -0.874392505642916 2 5 2 -0.549370228408864 1 5 2 -0.702882081610178 1 5 2 -0.387196237366316 2 5 2 -0.254396302589893 2 5 2 -0.661141881945967 2 5 2 -0.353850139949898 2 5 2 -0.940367165724872 2 5 2 -0.257306998632217 2 5 2 -0.25948459184901 1 5 2 -0.282447060372156 2 5 2 -0.434471601810715 2 5 2 -0.531847599879585 2 5 2 -0.831639598690597 1 5 2 -0.476057177723281 1 5 2 -0.689960964595721 2 5 2 -0.762885904963058 2 5 2 -0.622547245815982 2 5 2 -0.467121910624321 2 5 2 -0.444998878563145 2 5 2 -0.610618477959826 2 5 2 -0.406946642496399 1 5 2 -1.0606594018364 2 5 2 -0.554673891728355 2 5 2 -0.249462989686065 2 5 2 -0.391072779178118 1 5 2 -0.502851072240924 2 5 2 -0.375337554826657 2 5 2 -0.459877784492948 1 5 2 -0.37669560180294 2 5 2 -0.35853976242436 2 5 2 -0.299654614882035 2 5 2 -0.722167944915479 2 5 2 -0.470173817048549 2 5 2 -0.434030303678653 1 5 2 -0.38807847057575 2 5 2 -0.461429537822728 1 5 2 -0.810700838760469 2 5 2 -0.249051828431355 1 5 2 -0.317554462771952 2 5 2 -0.262256455504448 1 5 2 -0.2953873757043 2 5 2 -0.696729236528574 2 5 2 -0.528266303919385 2 5 2 -0.55142571005823 2 5 2 -0.586822599864067 2 5 2 -0.418740386790856 2 5 2 -0.335528681201811 1 5 2 -0.842500045429954 2 5 2 -0.285946789650486 2 5 2 -0.408435800240321 2 5 2 -0.413125087979462 2 5 2 -0.393886958711384 1 5 2 -0.253356738206904 2 5 2 -0.312045370960966 1 5 2 -0.798452098494563 2 5 2 -0.492707665345048 2 5 2 -0.716083098282908 2 5 2 -0.234541570552336 2 5 2 -1.35239146034105 2 5 2 -0.718129009054262 1 5 2 -0.718707390761021 2 5 2 -0.491926442341928 2 5 2 -0.654723295742436 2 5 2 -2.2535330863484 2 5 2 -0.427000474398908 2 5 2 -0.709004302987488 1 5 2 -0.516478985375353 2 5 2 -0.399159476675353 2 5 2 -0.442334074498277 2 5 2 -0.305764408172937 1 5 2 -0.374730267131031 2 5 2 -0.258402933869162 2 5 2 -0.356729146842492 2 5 2 -0.249659208975827 2 5 2 -0.513939361328391 2 5 2 -0.273198932158475 2 5 2 -0.299847483659362 1 5 2 -0.541237531522651 2 5 2 -0.392932766582102 1 5 2 -0.525638020825498 2 5 2 -0.333414128837149 1 5 2 -0.576494101054249 2 5 2 -0.511222521868291 1 5 2 -0.412380296323655 2 5 2 -0.399606860754613 2 5 2 -0.997015772263903 2 5 2 -0.443550015156711 2 5 2 -0.837716892291427 2 5 2 -0.390478993062678 1 5 2 -0.636766756207244 2 5 2 -0.737585807116948 2 5 2 -0.295250014308323 2 5 2 -0.608066528187843 2 5 2 -1.24729266077018 2 5 2 -0.41288102715652 1 5 2 -1.24953422410599 1 5 2 -0.351993007234192 2 5 2 -0.335694580769538 1 5 2 -0.423647638118759 1 5 2 -0.734734535205897 1 5 2 -0.260015569529333 1 5 2 -0.750757643884208 2 5 2 -0.595132916679284 2 5 2 -0.522805168311647 2 5 2 -0.843659849398215 2 5 2 -1.10699652185756 2 5 2 -0.391076744361603 2 5 2 -0.312026720740498 2 5 2 -0.607969730004942 2 5 2 -1.00812364162894 2 5 2 -0.43084197949303 2 5 2 -0.453046882496868 1 5 2 -0.370369606134876 1 5 2 -0.775483364298362 1 5 2 -0.396231522637068 2 5 2 -0.413528320853371 2 5 2 -1.46772948611848 2 5 2 -0.341708188807674 2 5 2 -0.419394806122751 1 5 2 -0.257895217630086 2 5 2 -0.415728977418159 2 5 2 -0.316442984226336 2 5 2 -0.968259065135459 2 5 2 -0.387870086772944 1 5 2 -0.716049265883702 2 5 2 -0.633645416807576 1 5 2 -0.953173953972706 2 5 2 -0.690728024005709 2 5 2 -1.10833203046202 2 5 2 -2.21338412745891 2 5 2 -0.324098654160468 2 5 2 -0.673932874285758 2 5 2 -0.800813516749607 2 5 2 -0.696084502169422 1 5 2 -0.355840028465312 1 5 2 -0.683207616367023 1 5 2 -0.320920277031855 2 5 2 -1.03878518793101 1 5 2 -0.609374353605396 2 5 2 -0.578037696967778 2 5 2 -0.322774773347465 2 5 2 -0.444321937393125 2 5 2 -0.706403834907649 2 5 2 diff --git a/inst/extdata/choiceRT_single_exampleData.txt b/inst/extdata/choiceRT_single_exampleData.txt deleted file mode 100644 index c925a82a..00000000 --- a/inst/extdata/choiceRT_single_exampleData.txt +++ /dev/null @@ -1,1001 +0,0 @@ -RT choice subjID condition -0.238126253704183 1 1 1 -0.788334139249308 2 1 1 -0.524351202388138 2 1 1 -1.30852451859186 1 1 1 -0.244177006142252 1 1 1 -0.512534281943979 1 1 1 -0.570872020376975 2 1 1 -0.552056452179357 1 1 1 -0.298121361381527 2 1 1 -0.323864684737407 1 1 1 -0.542476237007045 1 1 1 -0.457829931981559 2 1 1 -0.214443816443766 1 1 1 -0.282641758197282 2 1 1 -0.577296397953241 1 1 1 -0.80363268095685 2 1 1 -0.630866151842371 2 1 1 -0.561537877283935 2 1 1 -0.447864619700588 1 1 1 -0.271079966516117 2 1 1 -0.286558308483825 2 1 1 -0.402853789793329 2 1 1 -0.261247265870358 2 1 1 -0.954323974954787 1 1 1 -0.233982750292549 2 1 1 -0.534509968347321 2 1 1 -1.38489463892966 1 1 1 -0.51382752398596 2 1 1 -0.877226598584423 2 1 1 -0.59661096895894 2 1 1 -0.653486235884601 2 1 1 -0.499754559401486 2 1 1 -0.234607668817517 2 1 1 -0.531596228343812 2 1 1 -0.517067421390557 2 1 1 -0.286714432990514 2 1 1 -0.280389415416944 1 1 1 -0.770848791728697 2 1 1 -0.242534242474749 2 1 1 -1.21402951161598 1 1 1 -0.254230773115822 1 1 1 -0.235607609409862 1 1 1 -0.893859490775577 2 1 1 -0.4248828895841 2 1 1 -0.806633683066691 1 1 1 -0.52846751057204 2 1 1 -0.283404274358359 2 1 1 -0.38261147359119 1 1 1 -0.366467333270928 2 1 1 -0.89906087165271 2 1 1 -0.473523175525898 2 1 1 -0.61052334774835 2 1 1 -0.348877038822898 2 1 1 -0.509848343105319 2 1 1 -0.714362767211544 2 1 1 -0.366653361634071 1 1 1 -0.504639516528354 2 1 1 -0.789291266027802 1 1 1 -0.220496731951155 2 1 1 -0.225368494671686 2 1 1 -0.935425512110651 1 1 1 -0.596093103065834 2 1 1 -0.751187828634478 2 1 1 -0.398369973292919 2 1 1 -0.803192132747886 1 1 1 -0.653642313281921 2 1 1 -0.759465190620081 2 1 1 -0.301158475484036 2 1 1 -0.468546635484975 2 1 1 -0.45136376067397 2 1 1 -0.225730206953994 1 1 1 -0.871541732294617 2 1 1 -1.02231746439083 2 1 1 -0.453015412970327 1 1 1 -0.198424664401742 2 1 1 -0.300531454438104 2 1 1 -0.473723469079576 1 1 1 -0.397993417619097 2 1 1 -0.990744721453659 2 1 1 -0.576175729949669 2 1 1 -0.200104343196362 2 1 1 -0.397950225292451 2 1 1 -0.595871677587168 1 1 1 -0.631283245367399 2 1 1 -0.225640535433198 2 1 1 -0.671278939344137 2 1 1 -0.562888330598081 1 1 1 -0.713201556333214 1 1 1 -0.429372024083033 2 1 1 -0.559437949496943 1 1 1 -0.747758954300599 2 1 1 -0.668556572370471 2 1 1 -0.179933868089705 1 1 1 -0.557946405103375 2 1 1 -0.781925159045207 2 1 1 -0.629998909619026 2 1 1 -0.73419031432803 1 1 1 -0.917048954570217 2 1 1 -1.27326330493077 1 1 1 -0.715099066135782 1 1 1 -0.561629162179203 2 1 1 -0.439342876745989 1 1 1 -0.212480989248291 2 1 1 -0.431997523692581 2 1 1 -0.504823085985375 1 1 1 -0.209443682735351 2 1 1 -0.535478168252645 1 1 1 -0.523309589143815 2 1 1 -0.292526841667345 2 1 1 -0.292598915819633 1 1 1 -0.383304045988112 2 1 1 -0.220801631101784 2 1 1 -0.240025256471961 2 1 1 -0.374617088048471 2 1 1 -0.225139772246513 2 1 1 -0.504765209525881 1 1 1 -0.536719069622199 1 1 1 -0.265730079523484 2 1 1 -0.788176797412021 2 1 1 -0.317054055572024 1 1 1 -0.984288372815029 1 1 1 -0.242704368769227 1 1 1 -1.16310843477133 1 1 1 -0.671512143534472 2 1 1 -0.235624281398265 2 1 1 -0.585278561981407 1 1 1 -0.313710683818167 1 1 1 -0.512453071354528 2 1 1 -0.318816084203735 2 1 1 -0.492290766723273 2 1 1 -0.869267244819061 2 1 1 -0.416347372277426 2 1 1 -1.25171209855063 1 1 1 -0.421124063985099 2 1 1 -0.330265759909128 2 1 1 -0.32442759213596 2 1 1 -0.499405834143408 2 1 1 -0.605809814064198 2 1 1 -0.441813584555195 1 1 1 -0.355018580197292 2 1 1 -0.284917824978601 2 1 1 -0.712509291577718 2 1 1 -0.360087543146394 2 1 1 -0.593758109292972 2 1 1 -0.21793928877364 2 1 1 -0.511916501085172 1 1 1 -1.65665966055448 2 1 1 -0.462252095429733 1 1 1 -0.375694324193756 2 1 1 -0.573330510111478 1 1 1 -0.624806212238662 1 1 1 -0.3221554867038 1 1 1 -0.394184550616579 1 1 1 -0.233898257977356 2 1 1 -0.616451835954318 2 1 1 -0.435745652986984 2 1 1 -0.309831870195393 1 1 1 -0.303567774481905 1 1 1 -0.268141575894932 2 1 1 -0.685546680374616 2 1 1 -0.315857448984633 2 1 1 -0.358148021225324 1 1 1 -0.561597978729496 2 1 1 -0.575763837785002 2 1 1 -0.408409797786314 2 1 1 -0.456470478096314 2 1 1 -0.211008154807298 1 1 1 -0.537560426488747 1 1 1 -0.474119050536192 1 1 1 -0.349680702914349 2 1 1 -0.43874642118394 2 1 1 -0.741099937281951 2 1 1 -0.397490501092685 1 1 1 -0.455993632903328 2 1 1 -0.531917883353318 2 1 1 -0.544592749033783 2 1 1 -0.74575081631549 1 1 1 -0.482830763020483 2 1 1 -0.280104823458282 2 1 1 -0.674827163589054 2 1 1 -0.27232449929437 1 1 1 -0.33609945965603 2 1 1 -0.642687813456977 2 1 1 -0.45152584390343 2 1 1 -0.393612819207325 1 1 1 -0.403513480920972 2 1 1 -0.55270209232572 2 1 1 -0.282474350101989 1 1 1 -0.225686494015142 1 1 1 -0.3032960404285 1 1 1 -0.741695387202929 1 1 1 -0.23627922113503 1 1 1 -0.317661404771517 2 1 1 -0.365881950379812 1 1 1 -0.671407911504626 2 1 1 -0.6327672361385 2 1 1 -0.408730216599132 2 1 1 -1.05295329016947 2 1 1 -0.647929253014634 2 1 1 -0.272505386795946 1 1 1 -0.477000937785718 2 1 1 -0.593679670773664 2 1 1 -0.485804513765726 1 1 1 -0.685108031619407 2 1 1 -0.463863491717212 2 1 1 -0.280869562583906 2 1 1 -0.484442256816249 1 1 1 -0.374203282894535 1 1 1 -0.205270568757322 2 1 1 -0.285730023779721 1 1 1 -0.420031671350127 2 1 1 -0.304140334800815 2 1 1 -0.455400240565684 1 1 1 -0.319295225911816 2 1 1 -0.853456173431349 2 1 1 -1.28296521539738 2 1 1 -0.402276812108308 2 1 1 -1.60365089898574 2 1 1 -0.513436951554669 2 1 1 -0.635287982445216 2 1 1 -0.756725913746622 1 1 1 -0.538411817875012 2 1 1 -0.252807751300543 1 1 1 -0.306493263230248 2 1 1 -0.328940637779731 1 1 1 -0.295149174376265 2 1 1 -0.428772773247104 2 1 1 -0.710257617392816 2 1 1 -2.1398843380733 2 1 1 -0.390964230021283 1 1 1 -0.30264732818644 1 1 1 -0.24604561365542 2 1 1 -0.326118394989355 2 1 1 -0.444302762917929 2 1 1 -0.994994120515054 2 1 1 -0.329747734506691 2 1 1 -0.244820417609073 2 1 1 -0.434344901812039 2 1 1 -0.245526039713125 2 1 1 -0.371387027622059 2 1 1 -0.396016682526436 2 1 1 -0.868293655068221 2 1 1 -0.339580118779972 2 1 1 -0.377321305638716 1 1 1 -0.352058350011174 2 1 1 -0.523222420484193 1 1 1 -1.63006360968846 2 1 1 -0.403780279358626 1 1 1 -0.33450821318739 2 1 1 -0.246049648436144 1 1 1 -0.73900563703035 2 1 1 -0.70659002598455 1 1 1 -0.251224036209508 1 1 1 -0.279682884105716 2 1 1 -0.446835970242547 2 1 1 -0.344773155307199 2 1 1 -1.18156313011751 2 1 1 -0.40159469187599 1 1 1 -0.662618250249293 2 1 1 -0.484088636200293 2 1 1 -0.249177412018199 1 1 1 -0.635565342005854 2 1 1 -0.237344366033974 2 1 1 -0.262837667936303 2 1 1 -0.223824529758815 2 1 1 -0.544851049052962 1 1 1 -1.22941904309934 1 1 1 -1.18790150456476 2 1 1 -0.397012831119724 2 1 1 -0.542500816372649 1 1 1 -0.215934076714995 2 1 1 -1.30455859046761 2 1 1 -0.536099297245265 2 1 1 -0.414264536316934 2 1 1 -0.777679859044325 2 1 1 -0.697575719549679 1 1 1 -0.704761484394448 2 1 1 -0.286893353427223 1 1 1 -0.681973124438239 2 1 1 -0.397462829482937 2 1 1 -0.257670640245336 2 1 1 -0.236649584180499 2 1 1 -0.436790435094707 1 1 1 -0.574656753851278 1 1 1 -0.253082319735779 1 1 1 -0.61806692862892 1 1 1 -0.46661603680114 2 1 1 -0.195332992231242 1 1 1 -0.229629897436442 1 1 1 -0.992646398039104 2 1 1 -0.94136783174252 2 1 1 -0.837333099387364 1 1 1 -0.430204780391451 2 1 1 -1.37515921760222 2 1 1 -0.89875556054097 2 1 1 -0.696864042518777 2 1 1 -0.332933586834615 2 1 1 -0.334070550417085 2 1 1 -0.38676132253602 2 1 1 -0.306404665389991 2 1 1 -0.478254432945422 2 1 1 -0.601997570889218 2 1 1 -0.373642558748753 2 1 1 -0.29388256861859 1 1 1 -0.403146732540824 2 1 1 -0.754379822737839 2 1 1 -0.20827688411218 2 1 1 -0.211975975201092 2 1 1 -0.591340246795799 1 1 1 -0.263322621163444 2 1 1 -0.525170614901281 1 1 1 -0.206823345071543 1 1 1 -0.935520204615524 1 1 1 -0.550910831841 2 1 1 -0.232504114652867 2 1 1 -0.391975720570035 1 1 1 -0.52537232580037 2 1 1 -0.604827669281913 2 1 1 -0.440173374557048 2 1 1 -0.294878838994327 2 1 1 -0.323868811622971 1 1 1 -0.240824506056104 2 1 1 -0.423271049333481 2 1 1 -0.849356591210965 2 1 1 -0.335818515496422 2 1 1 -0.538745656799135 2 1 1 -0.4208751745964 2 1 1 -0.55146359110108 2 1 1 -0.467882029849217 2 1 1 -0.567777388073783 2 1 1 -0.311394332684366 1 1 1 -1.33531192845093 1 1 1 -0.368867535882799 2 1 1 -0.340092989922591 2 1 1 -0.299811445088077 2 1 1 -0.198131285653104 2 1 1 -1.57688580580023 2 1 1 -0.671467937043381 2 1 1 -0.422481878776226 2 1 1 -0.48313672040092 1 1 1 -0.473697344635179 1 1 1 -0.68951966048344 2 1 1 -0.633967567703449 2 1 1 -0.382761102393661 1 1 1 -0.68736056335558 1 1 1 -0.677810537621417 2 1 1 -0.249614606331914 2 1 1 -0.755321813543998 2 1 1 -0.370198385669219 1 1 1 -0.697638915493631 1 1 1 -0.684828719775979 2 1 1 -0.502349799392655 1 1 1 -0.583605969114717 1 1 1 -0.517041977935336 1 1 1 -0.411670106317747 1 1 1 -0.207610898625408 1 1 1 -0.414006631133478 2 1 1 -0.921999310392829 1 1 1 -0.409507167245215 2 1 1 -0.584716070617761 2 1 1 -0.666513112126972 2 1 1 -0.233005827550518 2 1 1 -0.60770657746225 1 1 1 -0.221784346267773 2 1 1 -0.771391695716424 2 1 1 -1.21988159355549 1 1 1 -0.408933678664394 2 1 1 -1.03374983542661 2 1 1 -0.939198644733114 2 1 1 -0.962067734082042 2 1 1 -0.473406448845882 2 1 1 -0.223644602219167 2 1 1 -1.12139515597077 1 1 1 -0.299025722625131 2 1 1 -0.55009896091157 1 1 1 -0.578799507502895 1 1 1 -0.962097234341087 1 1 1 -0.348861796367042 2 1 1 -0.35773121339554 2 1 1 -0.246740510307971 1 1 1 -0.407111627051893 1 1 1 -0.550930871978825 1 1 1 -0.422754497543909 2 1 1 -0.758063342099552 2 1 1 -0.380847347114823 2 1 1 -0.377055603284598 2 1 1 -0.65608839650545 1 1 1 -1.42357385911498 2 1 1 -0.232798506755752 2 1 1 -0.539867634108279 2 1 1 -0.392489725525737 2 1 1 -0.367110223983889 2 1 1 -0.839639947757427 2 1 1 -0.592327430792799 2 1 1 -0.239126262427817 2 1 1 -0.328625329636268 2 1 1 -0.284257957756146 1 1 1 -0.331590178883346 1 1 1 -0.620620411662111 2 1 1 -0.755967038010479 2 1 1 -0.331900743408574 2 1 1 -0.421146686045199 1 1 1 -1.07476503410067 2 1 1 -0.507112598176372 1 1 1 -0.311566718621004 2 1 1 -0.301319320070233 2 1 1 -0.393257034342845 2 1 1 -0.673521481008061 2 1 1 -0.291567562966672 2 1 1 -1.05249328382332 2 1 1 -0.694698152076518 2 1 1 -0.733055920143737 2 1 1 -0.334350894107303 2 1 1 -0.542807697456418 1 1 1 -0.579281169009386 1 1 1 -0.320837583848137 1 1 1 -0.488074071042795 1 1 1 -0.213060081069537 2 1 1 -0.237230647833275 2 1 1 -0.237572229668373 1 1 1 -0.241805498724672 1 1 1 -0.21505246069559 2 1 1 -0.625069689033177 2 1 1 -0.391789762960315 1 1 1 -0.360924641936915 2 1 1 -0.434831888026175 1 1 1 -1.53947356804897 2 1 1 -0.390459073072731 2 1 1 -0.327186719063663 1 1 1 -0.451681415339723 1 1 1 -0.551841771615269 2 1 1 -0.41039773179749 1 1 1 -0.926634118987433 2 1 1 -0.813362027443744 2 1 1 -0.632371052186083 2 1 1 -1.07271976627787 1 1 1 -0.347281073927582 1 1 1 -0.44423560152159 1 1 1 -0.576366534316911 2 1 1 -0.279713029952993 2 1 1 -0.881466843024701 2 1 1 -0.374654223890455 1 1 1 -0.246340230252564 1 1 1 -0.46051090791758 2 1 1 -0.610478508455545 1 1 1 -0.290070606427311 2 1 1 -0.544420557842503 1 1 1 -0.776693279362721 1 1 1 -0.235406028367375 2 1 1 -0.239531675743827 1 1 1 -0.44775078332261 2 1 1 -0.272084709816774 1 1 1 -0.490027056594032 2 1 1 -1.11466956380519 2 1 1 -0.270448404879725 2 1 1 -0.442949902437612 1 1 1 -0.570651632322539 1 1 1 -0.32265845661882 2 1 1 -0.407435441210764 2 1 1 -0.200085052390358 2 1 1 -0.358511835895485 2 1 1 -1.2431214333383 2 1 1 -0.696171754957839 1 1 1 -0.2777627469669 1 1 1 -0.429359856138122 2 1 1 -0.340524177360971 2 1 1 -0.199944337376957 2 1 1 -0.398334292684942 2 1 1 -0.388541579168816 2 1 1 -0.398547679838622 1 1 1 -0.839309822360769 2 1 1 -0.280253849702043 2 1 1 -0.547345720269382 2 1 1 -0.376647832731017 2 1 1 -0.455530332435412 2 1 1 -0.334196466045242 2 1 1 -0.759777271734527 2 1 1 -1.10869967729068 2 1 1 -0.222920909328599 2 1 1 -0.243727194101031 2 1 1 -0.331283374352904 2 1 1 -0.489803545251022 1 1 1 -0.2736011848833 2 1 1 -0.432409628386385 1 1 1 -0.447747022319498 2 1 1 -0.736283852147818 2 1 1 -0.461500847594122 1 1 1 -0.359367876631285 1 1 1 -0.418098062593873 2 1 1 -0.502693165924066 1 1 1 -0.260188072876792 1 1 1 -0.348437996297828 1 1 1 -1.57562306974174 2 1 1 -0.316108820930013 2 1 1 -0.421685918698271 2 1 1 -0.578695918727619 2 1 1 -1.12879309366769 2 1 1 -1.03916993441652 2 1 1 -0.492207222672778 1 1 1 -0.33283217994747 2 1 1 -0.39422420306568 2 1 2 -0.362300838201913 1 1 2 -0.469662901313467 2 1 2 -0.820030023322582 1 1 2 -0.234551440695508 2 1 2 -0.331679248955791 1 1 2 -0.527229640837085 2 1 2 -0.91734807805308 2 1 2 -0.319175515877037 2 1 2 -0.651053459158852 1 1 2 -0.661459624685597 2 1 2 -0.281279784597852 2 1 2 -0.342078529279457 1 1 2 -0.3636800828231 2 1 2 -0.484151346003298 1 1 2 -0.658827635325395 1 1 2 -0.622208937699232 1 1 2 -0.580811030835409 2 1 2 -0.441808620117506 1 1 2 -0.36060243933493 2 1 2 -0.831194064165385 2 1 2 -0.361776006347027 2 1 2 -0.777351339265196 1 1 2 -0.278293909155803 2 1 2 -0.278507100800553 2 1 2 -0.884402648451047 1 1 2 -0.342560342613834 2 1 2 -0.809676649841315 2 1 2 -0.516858099569803 1 1 2 -0.634645370682583 2 1 2 -0.249686099229778 1 1 2 -1.33141985698474 1 1 2 -0.453726915386914 2 1 2 -0.290504549136735 2 1 2 -0.487095756746479 2 1 2 -0.346501172556082 2 1 2 -0.393430828426059 1 1 2 -0.504449494787339 2 1 2 -0.367999687491587 2 1 2 -0.352469038071531 1 1 2 -0.234560015153837 2 1 2 -0.940841504372444 1 1 2 -0.2046902513565 2 1 2 -0.461341997193658 1 1 2 -0.610339950737745 2 1 2 -0.446921029186028 1 1 2 -0.515591108864551 2 1 2 -1.58260395843454 2 1 2 -0.344764743329778 2 1 2 -0.427254054893139 2 1 2 -0.516158776880019 1 1 2 -1.2612303673015 2 1 2 -0.613528615965816 2 1 2 -0.267963577139406 2 1 2 -0.307594651280269 2 1 2 -0.24101706884499 1 1 2 -0.455753268732021 2 1 2 -0.405040912881131 2 1 2 -0.288094483330521 1 1 2 -0.545610622237084 2 1 2 -0.452142838999807 1 1 2 -0.594527943497764 1 1 2 -0.88116621589308 2 1 2 -0.277767297820233 2 1 2 -0.279551393619652 2 1 2 -0.365460511604365 2 1 2 -0.556212898406868 2 1 2 -0.328560209842821 1 1 2 -0.531013993625691 1 1 2 -0.231888430468412 1 1 2 -0.677110774143983 2 1 2 -0.453921989085917 2 1 2 -0.459571696136957 2 1 2 -0.393356837769246 1 1 2 -0.511202810478497 1 1 2 -0.693614307574487 1 1 2 -0.240863923388269 2 1 2 -0.321852817508144 1 1 2 -0.270908403919833 2 1 2 -0.820724000663825 1 1 2 -0.235189573689813 2 1 2 -0.326155088030317 2 1 2 -0.631590224724998 1 1 2 -0.441990726662034 1 1 2 -0.84336570752273 2 1 2 -0.359995826600722 2 1 2 -0.251400135935091 1 1 2 -0.412798716611553 1 1 2 -0.257997459005081 1 1 2 -0.324911808695266 2 1 2 -0.869954063020224 2 1 2 -0.316774804913553 1 1 2 -0.802438949561354 2 1 2 -0.753010120858102 2 1 2 -0.50447570028204 1 1 2 -0.472994968867572 2 1 2 -0.365558799398694 2 1 2 -0.355836646801112 1 1 2 -0.571157381310202 2 1 2 -0.634686215618027 2 1 2 -0.270208965991148 2 1 2 -0.328585338874615 1 1 2 -0.384434393299423 2 1 2 -0.316023575731398 1 1 2 -0.494817395995112 2 1 2 -0.300504460120145 2 1 2 -0.347783059904907 2 1 2 -1.02851702876777 2 1 2 -0.364863367923789 2 1 2 -0.460777943415657 2 1 2 -0.382793622325279 2 1 2 -0.273403607994913 2 1 2 -0.609426470046583 2 1 2 -0.297792901344866 1 1 2 -0.370479141756967 2 1 2 -0.882238434259769 1 1 2 -0.496857265474561 2 1 2 -0.277702369672893 2 1 2 -0.446926962878622 1 1 2 -0.36757607051588 1 1 2 -0.557136267106436 2 1 2 -1.00333007744122 2 1 2 -0.760219976689289 2 1 2 -0.332068843559009 2 1 2 -0.548961093445682 2 1 2 -0.313465233961872 2 1 2 -0.550216771807154 1 1 2 -0.29794278574353 1 1 2 -0.234198048951483 2 1 2 -0.273445183254746 2 1 2 -0.574886295740124 2 1 2 -0.258382409058055 1 1 2 -0.409845586460725 2 1 2 -0.326206723132256 2 1 2 -0.642595268751117 2 1 2 -0.232356531769144 2 1 2 -1.70736951927255 1 1 2 -0.274687338325608 2 1 2 -0.40877430223826 2 1 2 -0.365729356985064 2 1 2 -0.6050000403314 2 1 2 -0.592011487134505 2 1 2 -0.557179211825432 1 1 2 -0.873296855773591 1 1 2 -0.216826762785491 1 1 2 -0.517886780128018 2 1 2 -0.398323720600925 1 1 2 -1.12139464302831 2 1 2 -0.249538486660475 2 1 2 -0.360304338880141 1 1 2 -0.627773044075362 2 1 2 -0.996274959906684 2 1 2 -0.202797819180771 1 1 2 -0.383153769101205 1 1 2 -0.324797856324902 1 1 2 -0.239421301531662 2 1 2 -0.24289898785908 1 1 2 -0.547746136913622 1 1 2 -0.386255965400912 2 1 2 -0.60223673049116 2 1 2 -0.549261776998216 2 1 2 -0.395992071688511 2 1 2 -0.217402932038072 2 1 2 -0.295305459515413 2 1 2 -0.447909826549637 2 1 2 -0.71950962867128 2 1 2 -0.794816583397332 1 1 2 -0.241318968932987 2 1 2 -0.556293493098233 2 1 2 -0.238208378562322 2 1 2 -0.499247181746743 1 1 2 -0.317050968536836 2 1 2 -0.322686857249444 2 1 2 -0.71276761076242 2 1 2 -0.301030966624334 2 1 2 -0.336641004565653 2 1 2 -0.812046026214206 2 1 2 -0.270220261704131 2 1 2 -0.701954145112022 2 1 2 -0.43964095073941 2 1 2 -0.384704421988213 2 1 2 -0.501487364681699 2 1 2 -0.455023781459671 2 1 2 -0.332474164305816 2 1 2 -0.567142874907982 2 1 2 -0.253324335182053 2 1 2 -0.444329558298367 2 1 2 -0.750457236950695 2 1 2 -0.292500297080332 2 1 2 -0.319745451630673 2 1 2 -0.286210384865368 2 1 2 -0.283637752128579 1 1 2 -0.236044970372654 2 1 2 -0.606532173767213 1 1 2 -1.32620595835061 2 1 2 -0.49881945892801 2 1 2 -1.00559201100603 2 1 2 -0.498129494834216 2 1 2 -0.682007132416635 2 1 2 -0.521249610973914 2 1 2 -0.229929750671033 2 1 2 -1.12814610238938 1 1 2 -0.74135939367203 2 1 2 -1.14362542630031 2 1 2 -0.261969169934014 1 1 2 -0.240668217312327 1 1 2 -1.59220860546119 2 1 2 -0.459005868330534 2 1 2 -0.290018768199601 2 1 2 -0.204589440835719 1 1 2 -0.619039312673667 2 1 2 -0.667083334382893 1 1 2 -0.359845320132008 1 1 2 -0.912937103767445 2 1 2 -0.522430834145349 2 1 2 -0.297762304149053 1 1 2 -0.276240304783596 2 1 2 -0.399051717562123 2 1 2 -0.404254481667734 2 1 2 -1.23765251352633 1 1 2 -1.2213528437925 2 1 2 -0.554106620313858 2 1 2 -0.513543854359058 2 1 2 -0.718560875752879 2 1 2 -0.299045404005468 1 1 2 -0.197161504481574 2 1 2 -0.355424533393654 2 1 2 -0.601322385280793 2 1 2 -0.31408110064814 1 1 2 -0.681928297252204 2 1 2 -0.257899160580357 1 1 2 -0.331853308281021 2 1 2 -0.932271244383807 2 1 2 -0.762290747363875 1 1 2 -0.610315223598599 2 1 2 -0.508310743979851 2 1 2 -0.293542339726516 1 1 2 -0.249532498898509 2 1 2 -0.240661946068682 1 1 2 -0.480573774515142 2 1 2 -0.26503112695042 1 1 2 -0.745033574361612 2 1 2 -0.313418912457887 2 1 2 -0.428468490020874 2 1 2 -0.619836697801129 2 1 2 -0.404856983338945 2 1 2 -0.225135719018744 2 1 2 -0.247203725168153 2 1 2 -0.473126435201081 2 1 2 -0.758881984366834 2 1 2 -0.530103620429835 2 1 2 -0.609787747426196 2 1 2 -0.42023331047044 1 1 2 -0.294545387085857 1 1 2 -0.311952071319945 2 1 2 -0.793299410776987 2 1 2 -0.376179978035794 2 1 2 -0.230418084856786 2 1 2 -0.25879024565358 2 1 2 -0.264796453159985 2 1 2 -0.745485785923675 2 1 2 -0.224026456721164 2 1 2 -0.6030135494348 1 1 2 -0.489733962171922 2 1 2 -0.39466687509252 1 1 2 -0.552205654391275 2 1 2 -0.575332864606377 2 1 2 -0.673079198373531 1 1 2 -0.346849143283538 1 1 2 -0.384205850032696 1 1 2 -0.382157410278578 2 1 2 -0.294710963958947 2 1 2 -0.487164402385991 1 1 2 -0.571768796864126 2 1 2 -0.243155946253846 1 1 2 -0.366816988109117 1 1 2 -0.556560232965345 2 1 2 -0.842238270178048 2 1 2 -0.630587019970835 1 1 2 -0.849346128585895 2 1 2 -0.301910596058078 1 1 2 -0.494388435823995 1 1 2 -0.635279696032103 2 1 2 -0.244333041807845 1 1 2 -0.462722638825509 2 1 2 -0.355580520263025 1 1 2 -0.419159925222802 2 1 2 -0.229441499742296 2 1 2 -0.272011206196529 2 1 2 -0.457900548880182 1 1 2 -0.42581270796691 2 1 2 -0.192946477357373 2 1 2 -0.920226167527353 2 1 2 -0.870356567626495 2 1 2 -0.506429857300226 1 1 2 -1.35129991323996 2 1 2 -0.81782131154567 1 1 2 -0.312753351203148 2 1 2 -0.240147726849663 2 1 2 -0.331594506915926 2 1 2 -0.303605405427918 2 1 2 -1.3692312598303 2 1 2 -0.464969591870211 2 1 2 -0.365081121121442 2 1 2 -0.53678523283272 1 1 2 -0.362543809949933 2 1 2 -0.300077415892361 2 1 2 -0.565255726546058 2 1 2 -0.508969800017276 1 1 2 -0.197931347436034 2 1 2 -0.425448002968464 2 1 2 -0.917689004198981 2 1 2 -0.265209389680314 1 1 2 -0.399534037154238 1 1 2 -0.292118455947818 2 1 2 -0.352289208022807 2 1 2 -0.800608594982045 2 1 2 -0.251768687680971 1 1 2 -0.280448242529617 2 1 2 -0.243473452337431 1 1 2 -0.286916333216323 2 1 2 -0.838049565027792 1 1 2 -0.244529227888586 1 1 2 -0.71863102671265 2 1 2 -1.98916958946463 2 1 2 -0.238806320688673 2 1 2 -0.693785935221629 2 1 2 -0.495890282805749 1 1 2 -0.191174545766406 2 1 2 -0.836607694563896 2 1 2 -0.391165841939288 2 1 2 -0.566993167116615 2 1 2 -0.240715729525045 2 1 2 -0.354039896192607 1 1 2 -1.21434836670206 2 1 2 -0.48154154974369 2 1 2 -0.798332748413893 2 1 2 -0.650917466844914 2 1 2 -0.384224495536896 1 1 2 -0.945026137940947 2 1 2 -0.418508744931679 2 1 2 -0.659365265496408 1 1 2 -0.271823834279208 2 1 2 -0.43920360190222 2 1 2 -0.449532948575899 2 1 2 -1.02510373135742 2 1 2 -0.49889822568904 1 1 2 -1.23542122006285 1 1 2 -0.274287087904293 2 1 2 -0.673318487358746 2 1 2 -0.397619138281994 2 1 2 -1.0555886269523 2 1 2 -0.229089202292979 1 1 2 -0.697871360095817 2 1 2 -0.614287525537126 1 1 2 -0.322578991679628 1 1 2 -0.605688688250448 2 1 2 -0.534481750546624 2 1 2 -0.565101205666666 1 1 2 -0.378499737439249 1 1 2 -0.52995012536057 2 1 2 -2.45554336829165 1 1 2 -0.744067895318506 2 1 2 -0.673980171567151 2 1 2 -0.33496535179204 2 1 2 -0.703852317870538 2 1 2 -0.623851718541645 2 1 2 -0.275936871629696 2 1 2 -0.245843960416957 2 1 2 -0.220780887604494 2 1 2 -0.585098991357547 2 1 2 -0.343992796279959 1 1 2 -0.580010432096859 2 1 2 -0.377174286172397 2 1 2 -0.614794867960386 1 1 2 -0.235740390671863 1 1 2 -0.498093604359181 2 1 2 -0.422668225465882 2 1 2 -0.85458472173833 2 1 2 -0.318077105190021 2 1 2 -0.660599386236034 2 1 2 -0.44253879597235 2 1 2 -0.482452162905769 2 1 2 -0.569360166827625 2 1 2 -1.5195957937337 1 1 2 -0.335177741698269 2 1 2 -0.241392133198455 2 1 2 -0.503619286339201 2 1 2 -0.578489345701315 2 1 2 -0.327768204464024 2 1 2 -0.436095089114902 2 1 2 -0.424989568541 1 1 2 -0.214045608149353 2 1 2 -0.306116924393253 1 1 2 -0.476975246716608 2 1 2 -0.756750965776553 2 1 2 -0.312822136071239 2 1 2 -0.470827169455528 1 1 2 -0.37315029361616 2 1 2 -0.685155315108113 2 1 2 -0.959530818891534 2 1 2 -0.913595420136271 2 1 2 -0.595438752846243 2 1 2 -0.36802176344941 2 1 2 -0.418544504566566 2 1 2 -0.363048213885529 2 1 2 -0.254338756573215 1 1 2 -0.730957519992839 1 1 2 -0.263046554830887 1 1 2 -0.46094182659418 2 1 2 -0.531059000798822 2 1 2 -0.353280265477637 2 1 2 -0.464342980616116 2 1 2 -0.229724823533327 2 1 2 -0.504945673660676 2 1 2 -0.532493395334424 2 1 2 -0.423801763698387 1 1 2 -0.787113721614964 2 1 2 -0.223160559034952 2 1 2 -0.419595856308554 2 1 2 -0.396648316145306 2 1 2 -0.308908241587595 2 1 2 -0.627802576140553 2 1 2 -0.64888860721256 2 1 2 -0.738730808101364 2 1 2 -0.658745774579089 1 1 2 -0.679191956616965 1 1 2 -0.278164538209912 1 1 2 -0.205911141408479 2 1 2 -0.241638399787725 2 1 2 -0.24858355547484 1 1 2 -0.73740496979995 2 1 2 -0.247935082999496 2 1 2 -0.826311099617232 2 1 2 -0.590607775557781 1 1 2 -0.351249908681046 1 1 2 -0.370792468725378 2 1 2 -0.389722068994738 1 1 2 -0.251157837165118 2 1 2 -0.663087218040623 2 1 2 -0.454359737429872 2 1 2 -0.435474095638232 2 1 2 -0.284410206592962 2 1 2 -0.344506290138683 2 1 2 -1.01768620078799 1 1 2 -0.331330031800195 1 1 2 -0.277021859762052 1 1 2 -0.347332671037543 1 1 2 -0.286836805838407 1 1 2 -0.340934631295205 2 1 2 -1.22270556676254 1 1 2 -0.360534849486478 1 1 2 -0.359892263518994 2 1 2 -0.552595743599511 2 1 2 -0.301744081404754 2 1 2 -0.416037514267758 2 1 2 -0.541344562283886 2 1 2 -0.579986637345764 2 1 2 -0.221912718773351 2 1 2 -0.465245817277264 2 1 2 -0.474738754014913 2 1 2 -1.70409538281312 2 1 2 -1.02235518855245 2 1 2 -0.301214497598036 1 1 2 -0.991176433131545 2 1 2 -0.401432084705109 2 1 2 -1.11715380433533 2 1 2 -0.81719064511715 2 1 2 -0.549517654685354 1 1 2 -0.251345033237621 2 1 2 -0.357859075575934 2 1 2 -0.90132423193762 2 1 2 -0.272936669704676 1 1 2 -0.455508577827349 2 1 2 -0.861185664428614 2 1 2 -0.266987292082781 2 1 2 -0.578879341650739 2 1 2 -0.649256823455797 2 1 2 -0.418711362089519 2 1 2 -0.433426379919396 2 1 2 -0.642462173639701 1 1 2 -0.406446379518523 2 1 2 -0.290863063788828 1 1 2 -0.395803052313048 2 1 2 -0.311087619708231 2 1 2 -0.279185686505835 1 1 2 -0.412823984876793 1 1 2 -0.314508721309633 2 1 2 -0.417280760034167 2 1 2 -0.357813047077128 2 1 2 -0.256161295149574 2 1 2 -0.240326641914136 2 1 2 -0.469105961018824 2 1 2 -0.23311026462364 2 1 2 -0.219699590325278 2 1 2 -0.267828103451759 2 1 2 -0.324090708482963 1 1 2 -0.882370084866449 2 1 2 -0.296556033418114 2 1 2 -0.535028311840886 1 1 2 diff --git a/inst/extdata/cra_exampleData.txt b/inst/extdata/cra_exampleData.txt deleted file mode 100644 index a658ea69..00000000 --- a/inst/extdata/cra_exampleData.txt +++ /dev/null @@ -1,541 +0,0 @@ -subjID trial_number RT prob reward_var reward_fix outcome types ambig choice -1 1 2579 0.5 342 50 0 ambiguous 0.75 0 -1 2 1736 0.375 91 50 0 low 0 1 -1 3 1006 0.5 342 50 342 ambiguous 0.5 1 -1 4 1374 0.375 183 50 0 low 0 1 -1 5 1119 0.25 648 50 648 low 0 1 -1 6 1147 0.375 648 50 0 low 0 1 -1 7 1034 0.375 99 50 99 low 0 1 -1 8 953 0.375 98 50 98 low 0 1 -1 9 1114 0.5 54 50 50 ambiguous 0.5 0 -1 10 3243 0.5 99 50 50 ambiguous 0.5 0 -1 11 2955 0.5 340 50 50 ambiguous 0.75 0 -1 12 1105 0.5 91 50 50 ambiguous 0.5 0 -1 13 920 0.375 342 50 0 low 0 1 -1 14 242 0.125 98 50 0 low 0 0 -1 15 1665 0.25 181 50 0 low 0 0 -1 16 801 0.5 183 50 183 ambiguous 0.25 1 -1 17 793 0.5 183 50 0 ambiguous 0.75 0 -1 18 816 0.5 46 50 0 ambiguous 0.5 0 -1 19 1009 0.375 340 50 0 low 0 1 -1 20 191 0.25 46 50 50 low 0 0 -1 21 64 0.25 342 50 0 low 0 0 -1 22 807 0.125 648 50 0 low 0 1 -1 23 1047 0.25 98 50 50 low 0 0 -1 24 401 0.125 91 50 0 low 0 0 -1 25 1009 0.125 342 50 50 low 0 0 -1 26 707 0.5 99 50 0 ambiguous 0.25 1 -1 27 516 0.5 181 50 181 ambiguous 0.25 1 -1 28 66 0.375 48 50 0 low 0 0 -1 29 2206 0.5 340 50 0 ambiguous 0.5 0 -1 30 826 0.5 343 50 343 ambiguous 0.5 1 -1 31 391 0.125 343 50 50 low 0 0 -1 32 293 0.25 54 50 0 low 0 0 -1 33 310 0.5 648 50 648 ambiguous 0.5 1 -1 34 923 0.375 648 50 0 low 0 1 -1 35 744 0.5 48 50 0 ambiguous 0.25 0 -1 36 278 0.5 48 50 0 ambiguous 0.75 0 -1 37 450 0.375 46 50 0 low 0 0 -1 38 267 0.5 654 50 0 ambiguous 0.5 1 -1 39 169 0.5 54 50 50 ambiguous 0.25 0 -1 40 179 0.5 46 50 0 ambiguous 0.75 0 -1 41 142 0.5 648 50 0 ambiguous 0.25 1 -1 42 863 0.5 648 50 0 ambiguous 0.75 0 -1 43 75 0.25 183 50 0 low 0 0 -1 44 183 0.25 91 50 0 low 0 0 -1 45 84 0.125 181 50 50 low 0 0 -1 46 2191 0.375 343 50 0 low 0 1 -1 47 269 0.125 648 50 0 low 0 0 -1 48 396 0.125 99 50 50 low 0 0 -1 49 137 0.5 654 50 0 ambiguous 0.25 1 -1 50 124 0.5 342 50 0 ambiguous 0.25 1 -1 51 1926 0.5 91 50 50 ambiguous 0.75 0 -1 52 96 0.125 183 50 50 low 0 0 -1 53 59 0.5 98 50 0 ambiguous 0.75 0 -1 54 342 0.125 340 50 0 low 0 0 -1 55 157 0.375 54 50 50 low 0 0 -1 56 122 0.5 183 50 0 ambiguous 0.75 0 -1 57 29 0.125 48 50 50 low 0 0 -1 58 985 0.375 183 50 0 low 0 1 -1 59 142 0.5 183 50 183 ambiguous 0.25 1 -1 60 246 0.125 54 50 50 low 0 0 -1 61 254 0.5 99 50 50 ambiguous 0.75 0 -1 62 107 0.5 648 50 648 ambiguous 0.25 1 -1 63 86 0.5 343 50 0 ambiguous 0.25 1 -1 64 50 0.25 48 50 50 low 0 0 -1 65 1507 0.125 183 50 50 low 0 0 -1 66 247 0.25 99 50 50 low 0 0 -1 67 21 0.5 98 50 98 ambiguous 0.25 1 -1 68 276 0.5 183 50 0 ambiguous 0.5 0 -1 69 1697 0.25 343 50 0 low 0 0 -1 70 208 0.5 648 50 648 ambiguous 0.5 1 -1 71 874 0.5 183 50 50 ambiguous 0.5 0 -1 72 4451 0.25 654 50 50 low 0 0 -1 73 255 0.5 181 50 0 ambiguous 0.75 0 -1 74 220 0.5 654 50 50 ambiguous 0.75 0 -1 75 2058 0.5 46 50 0 ambiguous 0.25 0 -1 76 198 0.125 46 50 50 low 0 0 -1 77 293 0.5 91 50 91 ambiguous 0.25 1 -1 78 133 0.5 54 50 50 ambiguous 0.75 0 -1 79 281 0.375 181 50 181 low 0 1 -1 80 63 0.5 48 50 0 ambiguous 0.5 0 -1 81 1945 0.5 181 50 181 ambiguous 0.5 1 -1 82 238 0.25 183 50 50 low 0 0 -1 83 210 0.25 340 50 0 low 0 0 -1 84 3110 0.5 648 50 648 ambiguous 0.75 1 -1 85 660 0.5 343 50 0 ambiguous 0.75 0 -1 86 13 0.5 98 50 0 ambiguous 0.5 0 -1 87 744 0.375 654 50 0 low 0 1 -1 88 3835 0.125 654 50 0 low 0 0 -1 89 72 0.25 648 50 0 low 0 1 -1 90 90 0.5 340 50 340 ambiguous 0.25 1 -2 1 857 0.375 647 50 0 low 0 1 -2 2 437 0.5 99 50 0 ambiguous 0.75 0 -2 3 289 0.5 96 50 0 ambiguous 0.25 1 -2 4 514 0.5 184 50 184 ambiguous 0.75 1 -2 5 233 0.5 336 50 336 ambiguous 0.25 1 -2 6 321 0.375 180 50 0 low 0 1 -2 7 266 0.5 47 50 50 ambiguous 0.75 0 -2 8 288 0.375 181 50 0 low 0 0 -2 9 480 0.25 647 50 50 low 0 0 -2 10 330 0.5 180 50 180 ambiguous 0.25 1 -2 11 421 0.25 180 50 0 low 0 1 -2 12 290 0.5 47 50 0 ambiguous 0.25 0 -2 13 540 0.125 91 50 0 low 0 0 -2 14 71 0.5 91 50 0 ambiguous 0.5 0 -2 15 184 0.5 647 50 0 ambiguous 0.25 0 -2 16 236 0.5 649 50 649 ambiguous 0.5 1 -2 17 364 0.375 336 50 0 low 0 1 -2 18 241 0.375 91 50 0 low 0 1 -2 19 62 0.375 96 50 50 low 0 0 -2 20 456 0.25 649 50 50 low 0 0 -2 21 653 0.5 91 50 0 ambiguous 0.75 1 -2 22 282 0.25 184 50 50 low 0 0 -2 23 42 0.125 49 50 0 low 0 0 -2 24 52 0.125 181 50 0 low 0 0 -2 25 443 0.125 180 50 0 low 0 0 -2 26 353 0.25 181 50 181 low 0 1 -2 27 265 0.375 48 50 0 low 0 0 -2 28 245 0.5 647 50 0 ambiguous 0.5 1 -2 29 286 0.125 647 50 0 low 0 0 -2 30 198 0.25 336 50 0 low 0 0 -2 31 76 0.5 49 50 0 ambiguous 0.5 0 -2 32 261 0.5 340 50 0 ambiguous 0.25 1 -2 33 166 0.5 99 50 50 ambiguous 0.5 0 -2 34 333 0.125 336 50 0 low 0 0 -2 35 99 0.125 340 50 0 low 0 0 -2 36 255 0.5 647 50 0 ambiguous 0.25 0 -2 37 257 0.5 647 50 0 ambiguous 0.75 1 -2 38 199 0.375 184 50 0 low 0 1 -2 39 118 0.375 49 50 0 low 0 0 -2 40 233 0.5 180 50 50 ambiguous 0.5 0 -2 41 49 0.5 49 50 49 ambiguous 0.25 1 -2 42 102 0.25 48 50 0 low 0 0 -2 43 512 0.5 181 50 181 ambiguous 0.75 1 -2 44 20 0.125 336 50 50 low 0 0 -2 45 198 0.5 48 50 50 ambiguous 0.5 0 -2 46 201 0.25 340 50 50 low 0 0 -2 47 17 0.5 96 50 96 ambiguous 0.5 1 -2 48 74 0.25 47 50 50 low 0 0 -2 49 211 0.25 99 50 50 low 0 0 -2 50 109 0.5 49 50 0 ambiguous 0.75 0 -2 51 410 0.125 649 50 50 low 0 0 -2 52 304 0.5 649 50 50 ambiguous 0.25 0 -2 53 220 0.25 91 50 0 low 0 0 -2 54 21 0.5 336 50 50 ambiguous 0.75 0 -2 55 271 0.5 48 50 0 ambiguous 0.75 0 -2 56 1458 0.125 99 50 50 low 0 0 -2 57 254 0.25 49 50 50 low 0 0 -2 58 216 0.5 340 50 0 ambiguous 0.75 1 -2 59 241 0.375 647 50 0 low 0 1 -2 60 21 0.5 647 50 0 ambiguous 0.75 1 -2 61 8 0.5 340 50 340 ambiguous 0.5 1 -2 62 168 0.5 336 50 0 ambiguous 0.5 1 -2 63 387 0.5 184 50 50 ambiguous 0.5 0 -2 64 266 0.375 99 50 0 low 0 0 -2 65 277 0.5 91 50 0 ambiguous 0.25 0 -2 66 350 0.5 647 50 0 ambiguous 0.5 0 -2 67 358 0.5 47 50 0 ambiguous 0.5 0 -2 68 407 0.5 184 50 0 ambiguous 0.25 0 -2 69 5 0.125 647 50 50 low 0 0 -2 70 369 0.125 48 50 50 low 0 0 -2 71 175 0.375 649 50 649 low 0 1 -2 72 650 0.25 647 50 50 low 0 0 -2 73 459 0.5 336 50 0 ambiguous 0.5 1 -2 74 129 0.5 96 50 0 ambiguous 0.75 0 -2 75 443 0.125 96 50 50 low 0 0 -2 76 398 0.375 340 50 50 low 0 0 -2 77 105 0.5 99 50 0 ambiguous 0.25 1 -2 78 239 0.125 47 50 0 low 0 0 -2 79 76 0.5 48 50 50 ambiguous 0.25 0 -2 80 198 0.25 336 50 336 low 0 1 -2 81 186 0.5 649 50 649 ambiguous 0.75 1 -2 82 130 0.5 181 50 0 ambiguous 0.25 1 -2 83 211 0.5 336 50 336 ambiguous 0.75 1 -2 84 231 0.5 180 50 50 ambiguous 0.75 0 -2 85 75 0.5 181 50 0 ambiguous 0.5 1 -2 86 41 0.375 47 50 50 low 0 0 -2 87 406 0.125 184 50 0 low 0 1 -2 88 367 0.25 96 50 0 low 0 1 -2 89 100 0.5 336 50 336 ambiguous 0.25 1 -2 90 967 0.375 336 50 0 low 0 0 -3 1 2755 0.5 341 50 0 ambiguous 0.25 1 -3 2 1695 0.125 183 50 0 low 0 1 -3 3 1291 0.5 92 50 0 ambiguous 0.75 0 -3 4 940 0.25 341 50 0 low 0 1 -3 5 1716 0.25 342 50 0 low 0 1 -3 6 1165 0.375 653 50 0 low 0 1 -3 7 1306 0.5 343 50 343 ambiguous 0.75 1 -3 8 1815 0.5 182 50 0 ambiguous 0.25 1 -3 9 1467 0.125 653 50 0 low 0 1 -3 10 1420 0.5 343 50 0 ambiguous 0.25 1 -3 11 1625 0.25 653 50 0 low 0 1 -3 12 1157 0.5 646 50 646 ambiguous 0.5 1 -3 13 1225 0.5 183 50 0 ambiguous 0.25 1 -3 14 1438 0.25 183 50 183 low 0 1 -3 15 1683 0.5 653 50 0 ambiguous 0.75 1 -3 16 1838 0.5 50 50 0 ambiguous 0.5 0 -3 17 1618 0.25 50 50 0 low 0 0 -3 18 1708 0.5 183 50 183 ambiguous 0.75 1 -3 19 970 0.5 94 50 0 ambiguous 0.75 1 -3 20 1151 0.5 653 50 0 ambiguous 0.75 1 -3 21 1928 0.5 646 50 0 ambiguous 0.25 1 -3 22 1758 0.5 653 50 0 ambiguous 0.5 1 -3 23 2629 0.125 653 50 0 low 0 1 -3 24 1439 0.5 183 50 0 ambiguous 0.25 1 -3 25 1328 0.5 50 50 0 ambiguous 0.75 0 -3 26 1193 0.5 342 50 0 ambiguous 0.75 1 -3 27 1290 0.5 94 50 94 ambiguous 0.5 1 -3 28 1487 0.5 183 50 0 ambiguous 0.5 1 -3 29 1154 0.5 94 50 50 ambiguous 0.25 0 -3 30 1205 0.375 94 50 50 low 0 0 -3 31 1449 0.25 182 50 0 low 0 1 -3 32 1497 0.5 342 50 0 ambiguous 0.25 1 -3 33 1430 0.25 183 50 0 low 0 1 -3 34 1514 0.375 92 50 0 low 0 0 -3 35 992 0.5 653 50 0 ambiguous 0.25 1 -3 36 1920 0.5 343 50 0 ambiguous 0.5 1 -3 37 1612 0.5 653 50 653 ambiguous 0.5 1 -3 38 1224 0.5 341 50 0 ambiguous 0.5 1 -3 39 549 0.375 342 50 342 low 0 1 -3 40 617 0.5 94 50 0 ambiguous 0.5 0 -3 41 1139 0.125 341 50 341 low 0 1 -3 42 1991 0.375 50 50 50 low 0 1 -3 43 1678 0.125 94 50 0 low 0 0 -3 44 1776 0.375 94 50 50 low 0 0 -3 45 1733 0.125 183 50 0 low 0 1 -3 46 1216 0.25 343 50 0 low 0 1 -3 47 1125 0.375 182 50 0 low 0 1 -3 48 1618 0.125 342 50 0 low 0 1 -3 49 1828 0.375 51 50 50 low 0 0 -3 50 1781 0.25 646 50 0 low 0 1 -3 51 553 0.375 183 50 0 low 0 1 -3 52 899 0.5 183 50 183 ambiguous 0.75 1 -3 53 388 0.125 47 50 50 low 0 0 -3 54 615 0.5 47 50 0 ambiguous 0.75 0 -3 55 594 0.375 343 50 0 low 0 1 -3 56 346 0.25 47 50 0 low 0 0 -3 57 1069 0.125 343 50 0 low 0 1 -3 58 894 0.5 51 50 50 ambiguous 0.25 0 -3 59 576 0.5 646 50 0 ambiguous 0.75 1 -3 60 592 0.125 182 50 0 low 0 1 -3 61 1508 0.5 92 50 0 ambiguous 0.5 0 -3 62 383 0.375 646 50 646 low 0 1 -3 63 428 0.5 51 50 50 ambiguous 0.5 0 -3 64 432 0.125 51 50 50 low 0 0 -3 65 454 0.375 47 50 0 low 0 0 -3 66 926 0.5 92 50 0 ambiguous 0.25 0 -3 67 346 0.375 341 50 341 low 0 1 -3 68 355 0.5 51 50 0 ambiguous 0.75 0 -3 69 879 0.25 94 50 50 low 0 0 -3 70 827 0.125 92 50 0 low 0 0 -3 71 437 0.5 182 50 182 ambiguous 0.5 1 -3 72 432 0.5 47 50 0 ambiguous 0.25 0 -3 73 411 0.5 341 50 0 ambiguous 0.75 0 -3 74 1125 0.375 183 50 183 low 0 1 -3 75 422 0.125 646 50 0 low 0 1 -3 76 290 0.5 47 50 50 ambiguous 0.5 0 -3 77 366 0.25 94 50 0 low 0 0 -3 78 360 0.25 653 50 0 low 0 1 -3 79 396 0.375 653 50 653 low 0 1 -3 80 408 0.125 94 50 0 low 0 0 -3 81 442 0.5 183 50 0 ambiguous 0.5 1 -3 82 419 0.25 92 50 50 low 0 0 -3 83 1415 0.5 50 50 0 ambiguous 0.25 0 -3 84 1163 0.5 182 50 50 ambiguous 0.75 0 -3 85 717 0.5 94 50 50 ambiguous 0.25 0 -3 86 537 0.5 342 50 0 ambiguous 0.5 1 -3 87 1530 0.5 94 50 50 ambiguous 0.75 0 -3 88 1024 0.25 51 50 0 low 0 0 -3 89 375 0.5 653 50 653 ambiguous 0.25 1 -3 90 777 0.125 50 50 0 low 0 0 -4 1 940 0.5 339 50 339 ambiguous 0.75 1 -4 2 3222 0.5 337 50 337 ambiguous 0.75 1 -4 3 1295 0.25 184 50 0 low 0 1 -4 4 1943 0.5 182 50 0 ambiguous 0.25 1 -4 5 1176 0.375 652 50 652 low 0 1 -4 6 918 0.5 337 50 0 ambiguous 0.25 1 -4 7 1404 0.25 99 50 0 low 0 1 -4 8 1259 0.125 52 50 0 low 0 1 -4 9 1847 0.125 337 50 0 low 0 1 -4 10 952 0.5 182 50 182 ambiguous 0.75 1 -4 11 1341 0.5 52 50 0 ambiguous 0.25 1 -4 12 2206 0.5 93 50 0 ambiguous 0.75 1 -4 13 4242 0.375 182 50 0 low 0 1 -4 14 13020 0.125 339 50 0 low 0 0 -4 15 1142 0.375 179 50 0 low 0 1 -4 16 1633 0.5 339 50 0 ambiguous 0.5 1 -4 17 1077 0.25 94 50 50 low 0 0 -4 18 2892 0.5 48 50 0 ambiguous 0.75 0 -4 19 524 0.5 652 50 652 ambiguous 0.5 1 -4 20 797 0.5 337 50 337 ambiguous 0.5 1 -4 21 1576 0.5 650 50 650 ambiguous 0.5 1 -4 22 1018 0.25 339 50 0 low 0 1 -4 23 1626 0.25 339 50 0 low 0 1 -4 24 766 0.5 94 50 0 ambiguous 0.25 1 -4 25 1089 0.5 94 50 94 ambiguous 0.75 1 -4 26 546 0.5 650 50 650 ambiguous 0.75 1 -4 27 982 0.125 93 50 0 low 0 0 -4 28 1950 0.125 650 50 650 low 0 1 -4 29 663 0.125 179 50 0 low 0 1 -4 30 482 0.375 650 50 650 low 0 1 -4 31 634 0.25 337 50 337 low 0 1 -4 32 466 0.5 94 50 94 ambiguous 0.5 1 -4 33 1844 0.25 182 50 0 low 0 1 -4 34 576 0.375 339 50 0 low 0 1 -4 35 618 0.125 182 50 50 low 0 0 -4 36 659 0.5 48 50 0 ambiguous 0.75 0 -4 37 389 0.125 652 50 0 low 0 1 -4 38 1116 0.375 99 50 0 low 0 1 -4 39 2504 0.25 93 50 0 low 0 1 -4 40 374 0.5 650 50 650 ambiguous 0.25 1 -4 41 342 0.5 179 50 179 ambiguous 0.5 1 -4 42 409 0.375 48 50 48 low 0 1 -4 43 2010 0.125 48 50 50 low 0 0 -4 44 445 0.5 179 50 0 ambiguous 0.25 1 -4 45 412 0.5 184 50 184 ambiguous 0.25 1 -4 46 404 0.375 650 50 0 low 0 1 -4 47 392 0.125 184 50 50 low 0 0 -4 48 1678 0.125 339 50 339 low 0 1 -4 49 428 0.5 339 50 0 ambiguous 0.75 1 -4 50 385 0.5 99 50 99 ambiguous 0.25 1 -4 51 370 0.5 93 50 93 ambiguous 0.25 1 -4 52 537 0.25 48 50 50 low 0 0 -4 53 1625 0.5 52 50 0 ambiguous 0.5 1 -4 54 355 0.25 650 50 0 low 0 1 -4 55 400 0.25 650 50 650 low 0 1 -4 56 381 0.5 48 50 50 ambiguous 0.5 0 -4 57 339 0.5 339 50 339 ambiguous 0.25 1 -4 58 320 0.375 339 50 0 low 0 1 -4 59 375 0.375 48 50 50 low 0 0 -4 60 525 0.5 184 50 184 ambiguous 0.75 1 -4 61 1071 0.125 99 50 50 low 0 0 -4 62 1389 0.5 652 50 652 ambiguous 0.75 1 -4 63 359 0.5 652 50 0 ambiguous 0.25 1 -4 64 412 0.375 337 50 0 low 0 1 -4 65 434 0.5 650 50 0 ambiguous 0.75 1 -4 66 566 0.5 339 50 0 ambiguous 0.5 1 -4 67 875 0.375 94 50 0 low 0 1 -4 68 361 0.375 93 50 93 low 0 1 -4 69 381 0.125 48 50 0 low 0 0 -4 70 671 0.5 650 50 650 ambiguous 0.5 1 -4 71 900 0.25 48 50 0 low 0 0 -4 72 394 0.5 48 50 0 ambiguous 0.25 0 -4 73 294 0.5 179 50 0 ambiguous 0.75 1 -4 74 249 0.5 93 50 93 ambiguous 0.5 1 -4 75 341 0.375 184 50 0 low 0 1 -4 76 1096 0.5 182 50 0 ambiguous 0.5 1 -4 77 1049 0.25 52 50 0 low 0 0 -4 78 339 0.5 48 50 0 ambiguous 0.25 0 -4 79 418 0.5 650 50 0 ambiguous 0.25 1 -4 80 415 0.375 52 50 50 low 0 0 -4 81 354 0.5 339 50 0 ambiguous 0.25 1 -4 82 1097 0.25 652 50 0 low 0 1 -4 83 580 0.125 94 50 50 low 0 0 -4 84 360 0.5 99 50 0 ambiguous 0.5 1 -4 85 1281 0.25 179 50 0 low 0 1 -4 86 642 0.125 650 50 0 low 0 1 -4 87 279 0.5 99 50 0 ambiguous 0.75 1 -4 88 926 0.5 52 50 0 ambiguous 0.75 0 -4 89 906 0.5 48 50 0 ambiguous 0.5 0 -4 90 326 0.5 184 50 0 ambiguous 0.5 1 -5 1 459 0.5 340 50 340 ambiguous 0.5 1 -5 2 762 0.5 52 50 0 ambiguous 0.5 0 -5 3 623 0.5 97 50 97 ambiguous 0.75 1 -5 4 722 0.5 337 50 337 ambiguous 0.5 1 -5 5 1220 0.5 183 50 0 ambiguous 0.25 1 -5 6 983 0.25 52 50 0 low 0 0 -5 7 919 0.375 650 50 0 low 0 1 -5 8 802 0.375 183 50 183 low 0 1 -5 9 834 0.5 339 50 339 ambiguous 0.75 1 -5 10 810 0.5 52 50 0 ambiguous 0.75 0 -5 11 657 0.5 649 50 649 ambiguous 0.75 1 -5 12 801 0.25 650 50 0 low 0 1 -5 13 803 0.5 50 50 50 ambiguous 0.75 0 -5 14 839 0.125 50 50 0 low 0 0 -5 15 824 0.125 50 50 0 low 0 0 -5 16 950 0.5 50 50 50 ambiguous 0.25 0 -5 17 870 0.5 183 50 183 ambiguous 0.5 1 -5 18 776 0.375 92 50 92 low 0 1 -5 19 854 0.125 97 50 50 low 0 0 -5 20 760 0.5 92 50 92 ambiguous 0.75 1 -5 21 713 0.125 649 50 0 low 0 1 -5 22 821 0.5 337 50 337 ambiguous 0.25 1 -5 23 810 0.5 650 50 0 ambiguous 0.75 1 -5 24 1050 0.375 340 50 0 low 0 1 -5 25 928 0.375 654 50 0 low 0 1 -5 26 725 0.5 50 50 50 ambiguous 0.75 0 -5 27 728 0.25 337 50 0 low 0 1 -5 28 657 0.5 654 50 654 ambiguous 0.25 1 -5 29 703 0.5 92 50 92 ambiguous 0.5 1 -5 30 823 0.375 183 50 183 low 0 1 -5 31 852 0.5 99 50 0 ambiguous 0.5 1 -5 32 638 0.5 649 50 649 ambiguous 0.5 1 -5 33 861 0.5 339 50 0 ambiguous 0.5 1 -5 34 768 0.375 184 50 184 low 0 1 -5 35 641 0.25 340 50 0 low 0 0 -5 36 741 0.375 339 50 339 low 0 1 -5 37 829 0.5 183 50 183 ambiguous 0.5 1 -5 38 782 0.5 340 50 0 ambiguous 0.75 1 -5 39 909 0.5 97 50 97 ambiguous 0.5 1 -5 40 736 0.125 654 50 0 low 0 0 -5 41 883 0.5 649 50 649 ambiguous 0.25 1 -5 42 681 0.25 97 50 50 low 0 0 -5 43 893 0.25 92 50 0 low 0 1 -5 44 810 0.375 50 50 50 low 0 0 -5 45 1219 0.5 52 50 0 ambiguous 0.25 0 -5 46 911 0.25 649 50 50 low 0 0 -5 47 781 0.5 340 50 0 ambiguous 0.25 1 -5 48 763 0.375 337 50 0 low 0 1 -5 49 810 0.5 184 50 184 ambiguous 0.5 1 -5 50 756 0.25 654 50 50 low 0 0 -5 51 735 0.375 97 50 0 low 0 1 -5 52 728 0.375 649 50 0 low 0 1 -5 53 1035 0.5 337 50 0 ambiguous 0.75 1 -5 54 743 0.5 183 50 183 ambiguous 0.25 1 -5 55 857 0.5 99 50 50 ambiguous 0.75 0 -5 56 742 0.5 339 50 339 ambiguous 0.25 1 -5 57 652 0.5 650 50 0 ambiguous 0.5 1 -5 58 777 0.5 92 50 0 ambiguous 0.25 1 -5 59 837 0.5 50 50 50 ambiguous 0.25 0 -5 60 775 0.5 50 50 50 ambiguous 0.5 0 -5 61 872 0.25 183 50 50 low 0 0 -5 62 789 0.5 654 50 654 ambiguous 0.75 1 -5 63 793 0.375 99 50 0 low 0 1 -5 64 888 0.125 650 50 50 low 0 0 -5 65 851 0.5 99 50 50 ambiguous 0.25 0 -5 66 878 0.25 50 50 0 low 0 0 -5 67 920 0.375 52 50 50 low 0 0 -5 68 772 0.25 183 50 0 low 0 1 -5 69 784 0.25 184 50 0 low 0 1 -5 70 957 0.5 650 50 650 ambiguous 0.25 1 -5 71 746 0.5 183 50 183 ambiguous 0.75 1 -5 72 784 0.5 184 50 0 ambiguous 0.25 1 -5 73 750 0.125 340 50 50 low 0 0 -5 74 746 0.5 50 50 0 ambiguous 0.5 0 -5 75 937 0.125 184 50 50 low 0 0 -5 76 836 0.125 339 50 0 low 0 1 -5 77 720 0.25 50 50 50 low 0 0 -5 78 729 0.25 99 50 0 low 0 1 -5 79 639 0.5 183 50 183 ambiguous 0.75 1 -5 80 784 0.125 99 50 0 low 0 0 -5 81 599 0.25 339 50 0 low 0 1 -5 82 705 0.375 50 50 0 low 0 0 -5 83 817 0.125 183 50 0 low 0 0 -5 84 785 0.5 97 50 0 ambiguous 0.25 1 -5 85 726 0.125 183 50 50 low 0 0 -5 86 1112 0.125 92 50 0 low 0 0 -5 87 799 0.125 52 50 0 low 0 0 -5 88 818 0.5 654 50 0 ambiguous 0.5 1 -5 89 847 0.5 184 50 0 ambiguous 0.75 1 -5 90 778 0.125 337 50 0 low 0 0 -6 1 7265 0.25 648 50 0 low 0 1 -6 2 8033 0.375 651 50 0 low 0 1 -6 3 5415 0.375 338 50 338 low 0 1 -6 4 5183 0.5 337 50 0 ambiguous 0.5 1 -6 5 1609 0.375 54 50 0 low 0 0 -6 6 3036 0.5 646 50 0 ambiguous 0.75 1 -6 7 10138 0.5 49 50 50 ambiguous 0.75 0 -6 8 3121 0.375 648 50 0 low 0 1 -6 9 2224 0.25 176 50 50 low 0 0 -6 10 3415 0.125 49 50 0 low 0 0 -6 11 3309 0.5 646 50 0 ambiguous 0.25 1 -6 12 5624 0.25 184 50 184 low 0 1 -6 13 5032 0.5 54 50 50 ambiguous 0.75 0 -6 14 5991 0.5 53 50 0 ambiguous 0.5 0 -6 15 2220 0.25 176 50 0 low 0 1 -6 16 665 0.25 49 50 0 low 0 0 -6 17 6233 0.125 651 50 651 low 0 1 -6 18 6381 0.125 91 50 0 low 0 0 -6 19 15254 0.5 338 50 0 ambiguous 0.25 1 -6 20 8786 0.375 337 50 0 low 0 1 -6 21 11423 0.5 91 50 91 ambiguous 0.25 1 -6 22 5114 0.125 99 50 50 low 0 0 -6 23 2545 0.125 53 50 50 low 0 0 -6 24 13957 0.5 341 50 50 ambiguous 0.5 0 -6 25 1837 0.5 648 50 0 ambiguous 0.25 1 -6 26 4679 0.375 91 50 0 low 0 1 -6 27 2697 0.125 91 50 50 low 0 0 -6 28 12661 0.5 651 50 0 ambiguous 0.75 1 -6 29 1942 0.5 99 50 99 ambiguous 0.5 1 -6 30 3170 0.5 99 50 99 ambiguous 0.25 1 -6 31 6455 0.375 99 50 99 low 0 1 -6 32 3171 0.25 651 50 0 low 0 1 -6 33 5667 0.375 176 50 0 low 0 1 -6 34 4606 0.5 91 50 0 ambiguous 0.75 1 -6 35 9317 0.125 646 50 0 low 0 0 -6 36 1734 0.5 651 50 651 ambiguous 0.5 1 -6 37 6134 0.5 91 50 0 ambiguous 0.5 1 -6 38 1547 0.375 91 50 0 low 0 1 -6 39 729 0.5 176 50 176 ambiguous 0.25 1 -6 40 4438 0.5 49 50 0 ambiguous 0.25 0 -6 41 4940 0.25 54 50 50 low 0 0 -6 42 1126 0.5 49 50 50 ambiguous 0.5 0 -6 43 1726 0.5 176 50 50 ambiguous 0.75 0 -6 44 611 0.5 341 50 341 ambiguous 0.25 1 -6 45 982 0.5 91 50 91 ambiguous 0.25 1 -6 46 3389 0.5 184 50 184 ambiguous 0.5 1 -6 47 372 0.375 184 50 0 low 0 1 -6 48 54 0.125 341 50 0 low 0 1 -6 49 5306 0.25 91 50 50 low 0 0 -6 50 806 0.25 91 50 50 low 0 0 -6 51 2225 0.25 341 50 0 low 0 0 -6 52 1382 0.5 651 50 0 ambiguous 0.25 1 -6 53 4960 0.5 176 50 0 ambiguous 0.25 1 -6 54 641 0.375 646 50 0 low 0 1 -6 55 1525 0.5 646 50 646 ambiguous 0.5 1 -6 56 1188 0.25 646 50 0 low 0 0 -6 57 2095 0.375 53 50 50 low 0 0 -6 58 346 0.125 54 50 50 low 0 0 -6 59 4855 0.25 338 50 50 low 0 0 -6 60 4182 0.25 337 50 50 low 0 0 -6 61 788 0.125 338 50 50 low 0 0 -6 62 2593 0.5 91 50 0 ambiguous 0.75 0 -6 63 163 0.125 184 50 50 low 0 0 -6 64 965 0.5 184 50 0 ambiguous 0.25 1 -6 65 927 0.5 176 50 0 ambiguous 0.5 1 -6 66 2085 0.125 176 50 50 low 0 0 -6 67 826 0.375 341 50 341 low 0 1 -6 68 5905 0.5 54 50 50 ambiguous 0.5 0 -6 69 27 0.375 49 50 0 low 0 0 -6 70 2324 0.25 53 50 0 low 0 0 -6 71 606 0.5 648 50 648 ambiguous 0.5 1 -6 72 8977 0.5 53 50 50 ambiguous 0.25 0 -6 73 3788 0.125 337 50 0 low 0 0 -6 74 3013 0.375 176 50 176 low 0 1 -6 75 732 0.5 53 50 50 ambiguous 0.75 0 -6 76 2932 0.5 648 50 0 ambiguous 0.75 1 -6 77 520 0.5 337 50 0 ambiguous 0.25 1 -6 78 4407 0.25 99 50 0 low 0 0 -6 79 5193 0.5 54 50 50 ambiguous 0.25 0 -6 80 3191 0.5 91 50 0 ambiguous 0.5 1 -6 81 390 0.5 176 50 0 ambiguous 0.5 1 -6 82 7450 0.5 341 50 341 ambiguous 0.75 1 -6 83 2018 0.5 337 50 50 ambiguous 0.75 0 -6 84 1206 0.5 184 50 0 ambiguous 0.75 0 -6 85 1363 0.125 648 50 0 low 0 0 -6 86 3957 0.5 338 50 0 ambiguous 0.5 1 -6 87 6344 0.125 176 50 0 low 0 0 -6 88 5897 0.5 99 50 0 ambiguous 0.75 0 -6 89 1421 0.5 338 50 50 ambiguous 0.75 0 -6 90 885 0.5 176 50 0 ambiguous 0.75 0 \ No newline at end of file diff --git a/inst/extdata/dbdm_exampleData.txt b/inst/extdata/dbdm_exampleData.txt deleted file mode 100644 index 0bb2520d..00000000 --- a/inst/extdata/dbdm_exampleData.txt +++ /dev/null @@ -1,15001 +0,0 @@ -subjID opt1hprob opt2hprob opt1hval opt1lval opt2hval opt2lval choice -1 0.9 0.5 -14 -30 30 -43 2 -1 0.3 0.3 18 -15 46 36 2 -1 0.5 0.5 -26 -44 10 -5 2 -1 0.2 0.4 -8 -43 26 17 2 -1 0.3 0.3 30 -37 44 24 2 -1 0.6 0.2 46 -26 10 -14 1 -1 0.8 0.9 48 -49 -12 -30 1 -1 0.8 0.1 -8 -16 48 0 2 -1 0.2 0.5 27 -30 28 27 2 -1 0.1 0.3 -3 -48 2 -34 2 -1 0.6 0.4 -30 -39 49 -31 2 -1 0.1 0.9 29 -4 8 7 2 -1 0.9 0.9 12 -21 27 -13 1 -1 0.9 0.1 -1 -39 43 11 2 -1 0.5 0.2 22 -18 22 -12 2 -1 0.5 0.2 -9 -50 -4 -12 2 -1 0.4 0.2 -22 -45 -12 -49 2 -1 0.2 0.7 39 -4 19 -36 2 -1 0.8 0.3 32 -24 3 -25 1 -1 0.7 0.6 41 0 38 31 2 -1 0.7 0.4 28 5 43 -4 2 -1 0.5 0.3 28 -24 33 -22 1 -1 0.6 0.8 23 -15 -7 -35 1 -1 0.1 0.2 25 -42 -31 -35 1 -1 0.1 0.5 49 -34 3 -9 2 -1 0.6 0.5 38 -16 -42 -49 1 -1 0.6 0.5 6 -46 21 -3 2 -1 0.5 0.9 -18 -50 32 -42 2 -1 0.8 0.3 9 4 42 13 2 -1 0.3 0.8 41 34 -23 -25 1 -1 0.1 0.6 38 30 21 -7 1 -1 0.4 0.3 21 -32 -3 -40 1 -1 0.8 0.6 43 4 33 -40 1 -1 0.2 0.4 2 -4 5 -30 1 -1 0.5 0.9 -11 -37 6 -28 2 -1 0.8 0.8 31 -4 31 9 2 -1 0.2 0.6 33 -26 -4 -44 2 -1 0.8 0.3 43 14 49 9 2 -1 0.8 0.4 33 20 30 18 2 -1 0.8 0.2 19 -35 -5 -41 1 -1 0.4 0.8 7 -9 16 -11 2 -1 0.1 0.3 -8 -41 34 -35 2 -1 0.8 0.5 47 15 8 7 1 -1 0.4 0.9 -1 -8 22 -6 2 -1 0.8 0.8 16 6 34 -43 1 -1 0.5 0.4 22 -44 1 -29 1 -1 0.5 0.4 17 -22 -15 -20 1 -1 0.6 0.7 14 -33 -3 -14 1 -1 0.3 0.6 19 10 -34 -45 1 -1 0.4 0.6 30 18 35 28 2 -1 0.3 0.4 -4 -5 43 -13 2 -1 0.7 0.4 -7 -43 11 0 2 -1 0.4 0.7 24 5 14 -47 1 -1 0.3 0.7 42 14 22 13 1 -1 0.7 0.9 44 8 32 -11 1 -1 0.9 0.4 47 36 -36 -41 1 -1 0.3 0.9 7 -47 15 -26 2 -1 0.9 0.8 4 -39 47 0 2 -1 0.6 0.8 48 -8 28 -46 1 -1 0.6 0.5 47 35 38 12 2 -1 0.9 0.9 35 9 11 -37 1 -1 0.4 0.1 30 -16 -29 -40 1 -1 0.6 0.7 17 -31 -32 -39 1 -1 0.6 0.1 40 32 31 11 1 -1 0.1 0.1 -20 -38 49 -17 2 -1 0.7 0.6 36 -2 -42 -48 1 -1 0.5 0.5 9 -39 -1 -18 2 -1 0.6 0.5 46 -48 49 33 2 -1 0.6 0.7 -34 -46 49 38 2 -1 0.8 0.5 47 39 -5 -44 1 -1 0.5 0.9 41 -32 44 -35 2 -1 0.8 0.4 50 -41 38 6 1 -1 0.8 0.5 14 -24 -30 -43 1 -1 0.7 0.7 27 -32 17 -3 2 -1 0.6 0.1 48 -4 8 4 1 -1 0.6 0.3 10 -10 -22 -30 1 -1 0.3 0.7 3 -45 0 -39 2 -1 0.5 0.4 41 33 45 12 1 -1 0.5 0.1 39 -32 -34 -41 1 -1 0.9 0.5 40 33 10 8 1 -1 0.2 0.3 -2 -17 -4 -35 1 -1 0.6 0.2 25 -13 45 5 1 -1 0.2 0.1 10 -7 19 -23 1 -1 0.9 0.1 49 -21 29 25 2 -1 0.8 0.1 45 19 39 -44 1 -1 0.7 0.3 48 40 48 1 1 -1 0.8 0.7 37 -37 41 28 2 -1 0.3 0.8 26 -20 35 30 2 -1 0.2 0.2 0 -17 14 -36 2 -1 0.8 0.2 20 -19 -4 -29 1 -1 0.5 0.7 -7 -11 -16 -29 2 -1 0.8 0.4 48 -27 -1 -39 1 -1 0.3 0.9 15 -33 18 -14 2 -1 0.6 0.2 -12 -21 -34 -44 1 -1 0.5 0.7 26 1 10 -6 1 -1 0.9 0.1 35 -48 35 -9 1 -1 0.5 0.6 32 1 -4 -5 1 -1 0.7 0.7 28 2 42 -19 2 -1 0.6 0.6 20 3 42 7 2 -1 0.2 0.4 36 -25 16 -28 2 -1 0.1 0.4 12 -7 -10 -48 1 -1 0.7 0.2 -1 -24 47 -4 2 -1 0.3 0.7 -24 -35 33 27 2 -1 0.9 0.8 19 -47 23 -43 1 -1 0.1 0.7 38 -24 15 2 2 -1 0.1 0.4 48 -9 34 -40 1 -1 0.7 0.1 32 -35 23 -14 1 -1 0.1 0.4 23 4 -15 -34 1 -1 0.2 0.8 -9 -13 38 -42 2 -1 0.9 0.3 -35 -44 15 -44 1 -1 0.4 0.2 4 -8 18 -39 1 -1 0.7 0.4 22 17 42 -14 1 -1 0.5 0.5 25 -16 -4 -19 2 -1 0.8 0.2 41 -24 0 -22 1 -1 0.4 0.3 27 22 23 7 1 -1 0.1 0.3 17 5 15 -16 1 -1 0.8 0.5 -7 -41 49 16 2 -1 0.2 0.7 35 3 -6 -36 1 -1 0.3 0.6 19 -15 16 14 2 -1 0.7 0.3 34 14 22 -27 1 -1 0.7 0.3 39 -42 45 43 2 -1 0.2 0.5 47 -32 41 -39 2 -1 0.8 0.1 20 -4 43 29 2 -1 0.4 0.3 42 -3 8 -26 1 -1 0.6 0.9 24 2 -27 -44 1 -1 0.7 0.4 39 -44 47 16 2 -1 0.2 0.9 -2 -14 0 -24 2 -1 0.9 0.7 -24 -50 45 27 2 -1 0.9 0.4 -22 -35 26 -21 2 -1 0.2 0.1 16 11 26 -49 1 -1 0.2 0.6 1 -21 32 16 2 -1 0.1 0.7 -25 -42 50 -8 2 -1 0.7 0.1 -2 -37 -7 -10 1 -1 0.9 0.6 -24 -26 -7 -25 1 -1 0.8 0.1 33 -35 43 -47 1 -1 0.7 0.8 24 -23 49 15 2 -1 0.8 0.8 -15 -20 26 16 2 -1 0.3 0.4 40 -18 14 -47 2 -1 0.1 0.3 23 -17 49 -36 2 -1 0.1 0.2 14 -41 17 -36 2 -1 0.4 0.7 34 3 -20 -44 1 -1 0.5 0.7 -48 -50 38 12 2 -1 0.1 0.2 -20 -47 -20 -45 2 -1 0.4 0.3 41 -16 13 -27 1 -1 0.8 0.4 40 20 29 -12 1 -1 0.6 0.1 -9 -15 -6 -46 1 -1 0.1 0.1 16 -41 48 -24 2 -1 0.9 0.7 7 -50 11 -33 1 -1 0.3 0.1 39 -34 7 -19 1 -1 0.1 0.5 17 -35 -33 -35 1 -1 0.1 0.1 46 38 10 -13 1 -1 0.2 0.2 35 -30 7 3 2 -1 0.7 0.3 -44 -48 6 5 2 -1 0.6 0.8 -33 -36 5 -7 2 -1 0.2 0.4 -42 -45 0 -22 2 -1 0.9 0.1 -15 -49 -6 -33 1 -1 0.4 0.2 46 3 -26 -33 1 -1 0.5 0.9 7 -21 7 -9 2 -1 0.6 0.9 45 3 37 25 2 -1 0.3 0.1 9 1 25 -41 1 -1 0.4 0.6 -3 -10 22 15 2 -1 0.5 0.1 39 -22 4 -28 1 -1 0.2 0.7 4 -35 -12 -14 2 -1 0.4 0.4 17 0 24 -49 1 -1 0.3 0.7 28 6 19 -18 1 -1 0.9 0.7 26 -28 28 -47 1 -1 0.9 0.5 37 -34 16 10 1 -1 0.8 0.2 0 -47 45 43 2 -1 0.1 0.3 44 39 -6 -47 1 -1 0.7 0.6 -24 -33 35 1 2 -1 0.5 0.1 24 5 23 16 2 -1 0.2 0.4 -8 -41 22 -46 2 -1 0.3 0.7 16 4 36 35 2 -1 0.6 0.1 -11 -26 45 44 2 -1 0.7 0.7 -8 -49 48 -48 2 -1 0.2 0.4 36 1 3 -29 1 -1 0.6 0.4 13 -16 18 -17 1 -1 0.9 0.1 43 26 -15 -41 1 -1 0.6 0.7 12 -35 43 29 2 -1 0.9 0.7 36 1 23 -23 1 -1 0.3 0.4 4 -10 28 -26 1 -1 0.8 0.1 -19 -46 33 -30 2 -1 0.2 0.3 43 -31 50 3 2 -1 0.2 0.5 42 -6 1 -40 2 -1 0.8 0.5 24 -31 43 33 2 -1 0.2 0.1 -20 -47 26 -25 2 -1 0.5 0.3 4 -19 50 -48 1 -1 0.7 0.7 31 30 -16 -26 1 -1 0.2 0.2 42 -7 13 -13 2 -1 0.2 0.8 47 -37 25 -23 2 -1 0.6 0.6 46 -4 19 -1 1 -1 0.2 0.5 22 16 31 8 1 -1 0.5 0.8 11 2 -26 -47 1 -1 0.4 0.6 -3 -27 4 -47 1 -1 0.7 0.3 3 -30 46 4 2 -1 0.5 0.2 40 14 50 15 2 -1 0.6 0.6 26 -3 23 -42 1 -1 0.2 0.4 17 -5 48 38 2 -1 0.7 0.1 -41 -50 33 5 2 -1 0.3 0.3 36 -3 38 -16 1 -1 0.8 0.4 17 -37 7 -11 1 -1 0.8 0.4 44 -15 -8 -47 1 -1 0.1 0.9 37 10 34 21 2 -1 0.8 0.5 -18 -29 5 -16 2 -1 0.8 0.7 48 -25 -5 -8 1 -1 0.6 0.1 36 -25 36 -37 1 -1 0.6 0.6 29 19 8 -19 1 -1 0.8 0.9 16 -26 38 -33 2 -1 0.4 0.4 9 1 42 8 2 -1 0.6 0.3 36 -19 49 43 2 -1 0.3 0.9 23 12 -9 -24 1 -1 0.5 0.2 -2 -34 -9 -32 1 -1 0.9 0.2 -3 -44 42 -3 2 -1 0.6 0.9 41 -47 15 -34 1 -1 0.1 0.5 38 33 -23 -48 1 -1 0.9 0.7 15 -5 23 -19 2 -1 0.5 0.7 34 -29 23 19 2 -1 0.4 0.1 44 -25 3 -27 1 -1 0.4 0.9 26 25 -27 -37 1 -1 0.6 0.4 32 -9 31 -18 1 -1 0.1 0.5 -22 -29 32 -10 2 -1 0.1 0.3 26 10 31 -47 1 -1 0.6 0.5 42 -40 42 -41 1 -1 0.7 0.9 47 -34 40 -28 1 -1 0.8 0.6 -12 -36 20 -16 2 -1 0.9 0.6 25 -31 27 10 1 -1 0.5 0.6 21 -29 -4 -8 1 -1 0.5 0.8 -2 -19 47 41 2 -1 0.4 0.7 37 -14 -5 -8 1 -1 0.1 0.4 4 -17 -4 -27 2 -1 0.5 0.1 0 -49 40 12 2 -1 0.2 0.1 -9 -18 17 -49 1 -1 0.3 0.3 43 -47 30 -16 2 -1 0.8 0.7 39 -3 43 -21 1 -1 0.8 0.2 -28 -33 9 -25 2 -1 0.5 0.2 24 -50 50 5 2 -1 0.2 0.5 33 0 44 -18 2 -1 0.9 0.8 34 1 38 36 2 -1 0.2 0.7 -22 -36 15 -6 2 -1 0.6 0.3 42 16 31 -29 1 -1 0.7 0.9 9 -11 49 30 2 -1 0.6 0.6 43 -22 32 -22 2 -1 0.3 0.4 38 37 41 -39 1 -1 0.9 0.2 32 25 42 -33 1 -1 0.8 0.3 32 30 48 -30 1 -1 0.3 0.7 -4 -30 10 8 2 -1 0.7 0.7 -12 -14 -34 -50 1 -1 0.9 0.8 42 38 31 -40 1 -1 0.4 0.2 4 -43 -8 -11 2 -1 0.1 0.5 13 -16 27 10 2 -1 0.5 0.4 7 -22 5 -46 1 -1 0.3 0.4 45 -31 32 4 2 -1 0.8 0.7 38 -26 45 -27 1 -1 0.9 0.5 -4 -10 48 -7 2 -1 0.6 0.9 20 -43 38 18 2 -1 0.9 0.2 -1 -6 34 -44 1 -1 0.7 0.2 37 2 49 -2 1 -1 0.2 0.6 14 -43 21 -40 2 -1 0.5 0.3 22 -16 42 39 2 -1 0.7 0.1 -15 -45 16 -4 2 -1 0.9 0.6 2 -14 50 -25 2 -1 0.1 0.1 -33 -38 9 0 2 -1 0.2 0.2 -13 -28 26 -28 2 -1 0.9 0.2 35 -38 37 6 1 -1 0.7 0.5 -2 -50 39 -27 2 -1 0.8 0.3 42 -47 40 -20 1 -1 0.4 0.1 9 -9 -10 -46 1 -1 0.4 0.9 -27 -28 45 12 2 -1 0.2 0.8 23 21 40 -18 2 -1 0.8 0.8 9 -49 46 6 2 -1 0.7 0.8 -12 -13 -35 -50 1 -1 0.4 0.2 37 -8 27 -24 1 -1 0.3 0.6 -19 -28 45 -31 2 -1 0.4 0.4 -26 -50 -14 -16 2 -1 0.6 0.9 18 -9 24 19 2 -1 0.2 0.6 17 7 -10 -27 1 -1 0.5 0.8 47 -40 15 -33 2 -1 0.5 0.1 19 6 46 2 1 -1 0.7 0.2 12 -30 27 -8 2 -1 0.5 0.9 31 -32 43 -41 2 -1 0.4 0.7 -35 -45 -27 -45 2 -1 0.2 0.6 45 -13 47 -13 2 -1 0.7 0.2 19 -27 -12 -48 1 -1 0.9 0.9 26 -31 20 -8 2 -1 0.8 0.6 27 24 35 12 1 -1 0.4 0.1 22 -20 30 3 2 -1 0.5 0.5 16 -31 38 -19 1 -1 0.4 0.2 47 44 38 18 1 -1 0.7 0.4 8 -39 50 -18 2 -1 0.7 0.4 19 -25 33 -41 1 -1 0.7 0.9 39 15 23 -42 1 -1 0.8 0.4 8 -39 21 -40 1 -1 0.2 0.7 5 4 47 13 2 -1 0.2 0.5 4 -4 20 -43 1 -1 0.5 0.6 -3 -34 48 34 2 -1 0.5 0.7 16 -11 34 14 2 -1 0.5 0.2 35 -2 27 -44 1 -1 0.4 0.6 -9 -35 24 -36 2 -1 0.8 0.2 28 -21 30 8 1 -1 0.4 0.6 43 -31 13 -33 1 -2 0.2 0.2 8 -22 43 35 2 -2 0.2 0.6 18 -12 -19 -32 1 -2 0.3 0.2 29 -37 28 19 2 -2 0.3 0.1 -39 -45 -7 -16 2 -2 0.3 0.9 34 12 49 25 2 -2 0.9 0.6 43 -25 50 -29 1 -2 0.5 0.1 -13 -35 21 -19 2 -2 0.2 0.5 22 -12 25 -28 2 -2 0.8 0.5 -12 -50 15 8 2 -2 0.2 0.5 50 -5 6 -22 1 -2 0.7 0.2 33 22 4 -37 1 -2 0.1 0.1 3 -47 -15 -25 1 -2 0.5 0.9 19 -34 39 -49 2 -2 0.8 0.3 29 19 4 -41 1 -2 0.9 0.9 26 2 17 5 2 -2 0.1 0.1 -12 -16 45 37 2 -2 0.2 0.9 7 -44 9 -42 2 -2 0.9 0.1 23 -24 39 14 2 -2 0.4 0.8 32 21 29 10 1 -2 0.8 0.3 -37 -49 18 -49 2 -2 0.5 0.4 26 -31 18 -30 1 -2 0.6 0.4 15 -27 19 10 2 -2 0.9 0.9 8 -21 38 33 2 -2 0.8 0.7 30 4 -14 -31 1 -2 0.7 0.4 20 17 21 -38 1 -2 0.4 0.9 21 -40 -11 -29 2 -2 0.2 0.4 25 -8 30 -3 2 -2 0.9 0.6 24 11 7 -12 1 -2 0.3 0.7 44 -17 -14 -48 1 -2 0.8 0.4 25 1 19 -14 1 -2 0.3 0.1 35 -2 4 -20 1 -2 0.2 0.7 2 -42 8 -14 2 -2 0.9 0.3 35 1 39 -50 1 -2 0.3 0.8 13 -4 33 -49 1 -2 0.9 0.2 24 -12 15 -5 1 -2 0.7 0.1 23 -19 -20 -27 1 -2 0.4 0.5 24 -39 49 -6 2 -2 0.9 0.9 50 28 41 -19 1 -2 0.9 0.6 15 -28 -13 -22 1 -2 0.2 0.2 50 -6 47 42 2 -2 0.4 0.6 10 -38 0 -12 2 -2 0.1 0.7 39 -17 26 4 2 -2 0.2 0.1 38 -49 28 -22 1 -2 0.9 0.2 -11 -25 50 37 2 -2 0.4 0.7 30 -32 34 -39 2 -2 0.7 0.8 6 -22 -22 -50 1 -2 0.9 0.9 9 -14 40 -23 2 -2 0.8 0.6 -43 -46 20 13 2 -2 0.6 0.1 31 -39 15 -16 1 -2 0.8 0.9 -12 -23 15 0 2 -2 0.1 0.3 -4 -19 38 10 2 -2 0.9 0.6 37 -46 24 -27 1 -2 0.4 0.7 16 -32 34 -31 2 -2 0.9 0.9 36 27 14 -32 1 -2 0.2 0.7 1 -49 33 -48 2 -2 0.6 0.8 -7 -29 33 7 2 -2 0.4 0.2 9 -8 29 -24 1 -2 0.3 0.6 -16 -35 45 10 2 -2 0.5 0.6 49 -14 17 -13 1 -2 0.3 0.6 -4 -11 18 -10 2 -2 0.6 0.4 -20 -43 -8 -18 1 -2 0.5 0.7 15 -16 16 11 2 -2 0.1 0.7 32 8 -10 -12 1 -2 0.8 0.3 10 -35 2 -35 1 -2 0.9 0.4 -30 -41 9 -25 2 -2 0.1 0.2 -25 -37 -9 -17 2 -2 0.9 0.7 22 -5 34 -35 1 -2 0.9 0.3 -28 -37 -4 -42 2 -2 0.1 0.7 27 -34 9 -3 2 -2 0.6 0.9 16 6 -45 -49 1 -2 0.6 0.3 33 11 -1 -7 1 -2 0.7 0.5 42 -1 15 -42 1 -2 0.6 0.8 49 25 20 -29 1 -2 0.4 0.3 27 -38 -13 -19 1 -2 0.8 0.5 11 -11 -5 -41 1 -2 0.2 0.8 22 -3 1 -35 1 -2 0.1 0.9 15 -18 -14 -48 1 -2 0.1 0.6 47 33 48 -23 1 -2 0.7 0.1 39 -8 -24 -27 1 -2 0.9 0.7 16 -22 48 -27 2 -2 0.3 0.7 28 14 37 -33 1 -2 0.2 0.8 40 22 28 25 2 -2 0.9 0.2 9 -35 34 -50 1 -2 0.9 0.9 37 -45 40 -45 2 -2 0.9 0.5 -13 -33 30 -26 2 -2 0.5 0.6 32 -23 -3 -45 1 -2 0.1 0.5 34 9 47 -41 1 -2 0.5 0.4 -26 -35 35 24 2 -2 0.5 0.4 6 -15 -4 -47 1 -2 0.1 0.5 45 -28 16 -17 2 -2 0.2 0.4 47 -5 38 -39 2 -2 0.4 0.5 15 -41 40 -12 2 -2 0.5 0.5 49 -21 49 -38 1 -2 0.5 0.5 10 -5 45 -37 2 -2 0.5 0.7 24 19 3 -39 1 -2 0.5 0.9 19 3 -1 -37 1 -2 0.8 0.7 -9 -43 44 -32 2 -2 0.9 0.1 47 27 50 -22 1 -2 0.3 0.8 35 30 27 -32 1 -2 0.4 0.2 25 -25 29 28 2 -2 0.5 0.9 -38 -42 24 11 2 -2 0.5 0.1 -1 -38 46 -47 2 -2 0.2 0.6 26 2 12 -14 1 -2 0.3 0.2 -35 -42 28 7 2 -2 0.9 0.2 7 -37 21 6 1 -2 0.3 0.1 2 -29 40 -38 1 -2 0.1 0.5 7 -47 3 -18 2 -2 0.6 0.4 -14 -50 6 -49 2 -2 0.5 0.3 32 17 45 -31 1 -2 0.5 0.8 -10 -26 5 -48 2 -2 0.7 0.1 -8 -33 26 -10 2 -2 0.7 0.4 43 -26 32 -41 1 -2 0.1 0.1 30 -24 38 -28 1 -2 0.7 0.4 -21 -26 10 -22 2 -2 0.6 0.4 23 17 7 -32 1 -2 0.5 0.2 41 30 33 -37 1 -2 0.2 0.8 -28 -48 34 3 2 -2 0.4 0.2 -13 -30 47 33 2 -2 0.9 0.2 47 25 20 -43 1 -2 0.6 0.4 -3 -32 -7 -34 1 -2 0.2 0.5 -20 -49 2 -22 2 -2 0.7 0.9 9 -19 -12 -44 1 -2 0.7 0.8 1 -20 17 -1 2 -2 0.4 0.9 19 -38 -9 -48 1 -2 0.7 0.3 8 -5 47 38 2 -2 0.7 0.2 35 -4 16 6 1 -2 0.8 0.4 46 9 -27 -43 1 -2 0.2 0.7 -6 -28 40 31 2 -2 0.2 0.2 31 -17 44 21 2 -2 0.7 0.5 9 5 9 -2 1 -2 0.9 0.3 26 -23 14 9 1 -2 0.8 0.3 27 -12 -31 -35 1 -2 0.1 0.7 -23 -25 16 -27 2 -2 0.9 0.9 8 -48 2 -49 1 -2 0.3 0.6 19 -34 -20 -31 1 -2 0.4 0.3 26 -5 8 -31 1 -2 0.4 0.2 40 25 39 -17 1 -2 0.2 0.2 33 -33 40 31 2 -2 0.1 0.1 28 -43 10 -26 2 -2 0.6 0.9 14 6 3 -14 1 -2 0.9 0.1 -2 -37 12 -33 1 -2 0.9 0.5 35 26 44 39 2 -2 0.5 0.9 50 36 35 -13 1 -2 0.3 0.1 6 -44 -10 -36 1 -2 0.8 0.9 47 10 3 -12 1 -2 0.2 0.2 48 -43 -23 -31 2 -2 0.1 0.1 -27 -50 12 4 2 -2 0.3 0.7 33 2 48 7 2 -2 0.4 0.8 24 23 34 -36 1 -2 0.9 0.6 22 -30 25 21 2 -2 0.3 0.5 48 43 -11 -49 1 -2 0.6 0.4 10 -16 1 -9 2 -2 0.4 0.7 41 -32 28 -21 2 -2 0.6 0.9 45 25 14 -5 1 -2 0.4 0.3 43 -49 44 6 2 -2 0.1 0.9 36 -9 12 -30 2 -2 0.9 0.5 -37 -48 42 -29 2 -2 0.8 0.6 42 -42 2 -13 1 -2 0.8 0.3 30 -10 4 -40 1 -2 0.2 0.9 33 9 -21 -46 1 -2 0.3 0.7 -24 -33 -2 -20 2 -2 0.8 0.3 30 18 17 8 1 -2 0.1 0.6 23 -37 26 -39 2 -2 0.5 0.4 31 -50 49 -3 2 -2 0.4 0.1 -4 -33 41 -8 2 -2 0.7 0.1 16 -46 14 -35 1 -2 0.6 0.9 -4 -34 46 2 2 -2 0.6 0.1 -13 -21 10 -8 1 -2 0.9 0.9 3 2 48 -35 2 -2 0.4 0.6 13 -4 43 -43 2 -2 0.6 0.6 -19 -43 24 -18 2 -2 0.2 0.2 20 -14 50 -1 2 -2 0.6 0.5 -42 -48 -24 -27 2 -2 0.8 0.2 31 -20 4 -44 1 -2 0.8 0.1 8 2 27 -28 1 -2 0.4 0.1 29 1 3 -22 1 -2 0.1 0.8 27 -34 9 -7 2 -2 0.4 0.4 14 -7 45 -43 2 -2 0.9 0.2 7 5 38 -33 1 -2 0.5 0.9 37 -2 42 -26 2 -2 0.4 0.1 45 3 13 -34 1 -2 0.9 0.6 47 -28 39 -15 1 -2 0.4 0.2 1 -3 26 -8 2 -2 0.9 0.7 34 22 23 13 1 -2 0.5 0.9 -2 -34 6 1 2 -2 0.1 0.2 35 -36 44 -29 2 -2 0.9 0.1 -5 -20 42 36 2 -2 0.1 0.3 2 1 35 -20 1 -2 0.6 0.6 -24 -39 39 -9 2 -2 0.9 0.8 6 -35 13 -49 1 -2 0.5 0.7 17 -30 1 -46 2 -2 0.5 0.2 30 -43 34 9 2 -2 0.3 0.1 49 -42 13 -1 2 -2 0.6 0.2 50 19 -16 -29 1 -2 0.1 0.2 36 6 34 3 1 -2 0.9 0.7 -8 -26 12 1 2 -2 0.7 0.3 -15 -35 8 -35 2 -2 0.1 0.3 -27 -47 40 28 2 -2 0.7 0.8 12 -15 -29 -38 1 -2 0.7 0.9 26 -25 -5 -22 1 -2 0.1 0.3 50 35 49 17 1 -2 0.6 0.7 0 -28 26 -12 2 -2 0.6 0.9 8 -8 8 1 2 -2 0.4 0.4 15 13 35 -16 1 -2 0.6 0.8 32 8 47 -8 2 -2 0.4 0.4 48 -7 13 -1 1 -2 0.9 0.9 7 -12 28 -24 2 -2 0.2 0.3 36 11 34 -24 1 -2 0.2 0.1 -36 -46 -34 -46 2 -2 0.3 0.4 41 -28 11 -5 2 -2 0.3 0.9 35 31 22 12 1 -2 0.2 0.7 34 -47 7 4 2 -2 0.3 0.5 -4 -7 23 -36 1 -2 0.6 0.7 30 12 -6 -22 1 -2 0.4 0.5 28 27 12 -35 1 -2 0.5 0.2 -7 -28 27 17 2 -2 0.1 0.8 50 -17 4 -16 2 -2 0.1 0.6 -15 -30 34 19 2 -2 0.8 0.7 19 -19 -17 -32 1 -2 0.3 0.7 24 14 -8 -31 1 -2 0.4 0.3 34 -16 12 -17 1 -2 0.3 0.7 25 -35 43 41 2 -2 0.7 0.8 37 11 39 26 2 -2 0.1 0.7 21 -29 39 -41 2 -2 0.4 0.5 25 14 50 -19 2 -2 0.7 0.9 24 -14 18 -10 1 -2 0.7 0.7 37 -31 13 -1 1 -2 0.8 0.4 3 -26 -20 -48 1 -2 0.9 0.8 35 -14 24 -40 1 -2 0.3 0.5 7 -47 31 -21 2 -2 0.1 0.6 28 -27 -34 -46 2 -2 0.9 0.6 -18 -41 37 -44 2 -2 0.8 0.2 -22 -28 -5 -46 1 -2 0.2 0.4 12 -39 12 -3 2 -2 0.1 0.7 29 -13 -17 -42 1 -2 0.9 0.1 31 26 6 -23 1 -2 0.4 0.2 42 -11 47 -29 1 -2 0.9 0.8 -9 -29 -4 -47 2 -2 0.1 0.9 31 -50 42 2 2 -2 0.6 0.1 6 -14 26 -46 1 -2 0.8 0.9 35 -46 -10 -35 1 -2 0.1 0.8 11 -6 26 -13 2 -2 0.8 0.8 31 -2 22 -15 1 -2 0.7 0.8 22 6 41 -22 2 -2 0.8 0.6 -42 -47 33 -9 2 -2 0.5 0.9 15 -25 1 -22 1 -2 0.8 0.8 -40 -50 11 4 2 -2 0.2 0.7 50 34 5 -24 1 -2 0.1 0.6 7 -17 49 -1 2 -2 0.7 0.8 27 -50 17 -10 2 -2 0.2 0.7 47 -33 23 -46 2 -2 0.6 0.5 27 -37 -18 -31 1 -2 0.5 0.9 -1 -49 -2 -17 2 -2 0.5 0.2 -19 -28 46 -47 2 -2 0.5 0.4 -7 -20 38 4 2 -2 0.3 0.4 20 -4 27 13 2 -2 0.8 0.1 -15 -16 45 -10 2 -2 0.6 0.5 19 1 -21 -42 1 -2 0.1 0.3 5 -4 20 -8 2 -2 0.1 0.7 -36 -37 29 22 2 -2 0.4 0.5 24 0 11 -20 1 -2 0.5 0.3 -42 -48 24 -7 2 -2 0.1 0.7 29 6 15 -35 2 -2 0.4 0.6 -13 -47 48 -2 2 -2 0.5 0.1 36 4 21 -24 1 -2 0.8 0.4 -13 -38 38 -8 2 -2 0.5 0.1 41 -22 -1 -12 1 -2 0.7 0.4 40 30 16 -18 1 -2 0.4 0.3 11 -9 47 38 2 -2 0.4 0.7 -10 -17 2 -11 2 -2 0.9 0.1 -2 -20 28 13 2 -2 0.9 0.6 -8 -38 45 -14 2 -2 0.9 0.1 5 -8 0 -37 2 -2 0.3 0.7 -10 -24 46 19 2 -2 0.9 0.7 -22 -25 -14 -16 2 -2 0.8 0.4 -21 -28 44 -4 2 -2 0.2 0.7 18 -49 46 -17 2 -2 0.2 0.6 50 -49 26 -18 2 -2 0.5 0.6 30 17 50 38 2 -2 0.4 0.7 43 -31 0 -27 1 -2 0.3 0.3 21 -22 35 -49 1 -2 0.2 0.3 25 -43 43 -50 2 -2 0.8 0.5 8 -22 4 -39 1 -2 0.8 0.6 -13 -20 21 -18 2 -2 0.4 0.6 -8 -31 1 -9 2 -2 0.2 0.1 20 -25 -3 -23 1 -2 0.7 0.3 41 -47 46 -35 1 -2 0.2 0.8 23 -48 31 -9 2 -2 0.9 0.8 20 -38 -19 -38 1 -2 0.8 0.5 27 12 47 35 2 -2 0.5 0.8 43 8 19 -45 1 -2 0.7 0.4 -7 -14 7 5 2 -2 0.9 0.5 41 -12 48 29 1 -2 0.5 0.8 29 -34 -21 -49 1 -2 0.1 0.4 1 -6 49 -18 2 -2 0.2 0.2 49 -40 -15 -20 1 -2 0.8 0.6 40 -15 37 -8 1 -2 0.5 0.9 7 -48 -17 -50 1 -2 0.4 0.3 40 15 -6 -49 1 -2 0.4 0.9 36 14 13 -29 1 -3 0.5 0.5 46 32 36 10 1 -3 0.3 0.1 0 -26 -34 -42 1 -3 0.7 0.1 49 25 -29 -39 1 -3 0.5 0.5 3 -18 8 -11 2 -3 0.4 0.8 -12 -40 -17 -50 2 -3 0.2 0.3 49 -14 -33 -39 1 -3 0.3 0.1 -18 -19 30 8 2 -3 0.2 0.4 48 -4 49 30 2 -3 0.1 0.3 42 29 14 -7 1 -3 0.1 0.3 -20 -40 39 1 2 -3 0.9 0.2 37 -25 4 -9 1 -3 0.8 0.7 -12 -46 25 -11 2 -3 0.6 0.6 15 -41 17 -40 2 -3 0.7 0.8 0 -4 32 9 2 -3 0.2 0.6 47 -4 13 -41 1 -3 0.4 0.7 11 -7 -11 -30 1 -3 0.8 0.3 38 -42 -26 -39 1 -3 0.1 0.7 47 -32 -11 -34 2 -3 0.3 0.3 33 -7 32 -27 2 -3 0.7 0.6 -9 -10 33 -47 2 -3 0.1 0.9 17 -23 11 -2 2 -3 0.9 0.8 -2 -49 36 4 2 -3 0.3 0.3 11 -19 2 -38 1 -3 0.6 0.8 -16 -20 25 -16 2 -3 0.2 0.1 49 -43 18 -39 1 -3 0.3 0.4 31 -49 -12 -50 1 -3 0.5 0.3 42 1 49 -27 1 -3 0.5 0.4 23 -21 29 -47 1 -3 0.3 0.8 0 -35 6 -17 2 -3 0.3 0.4 29 -22 35 6 2 -3 0.7 0.7 -28 -35 -20 -34 1 -3 0.8 0.1 2 -25 39 -5 1 -3 0.4 0.5 18 -37 39 -40 2 -3 0.8 0.7 19 -35 22 1 2 -3 0.5 0.6 -32 -50 -2 -19 2 -3 0.7 0.5 25 -47 35 0 1 -3 0.8 0.5 -12 -27 36 10 2 -3 0.9 0.3 28 9 12 -18 1 -3 0.7 0.2 38 20 42 25 2 -3 0.4 0.7 36 -20 21 -12 2 -3 0.7 0.4 25 -36 -9 -24 1 -3 0.9 0.6 34 -24 29 27 2 -3 0.7 0.5 42 -14 49 30 2 -3 0.7 0.6 -12 -18 -10 -49 1 -3 0.1 0.3 -8 -30 29 -11 2 -3 0.6 0.2 -13 -42 42 -1 2 -3 0.4 0.1 -14 -31 -23 -46 1 -3 0.5 0.9 37 -32 33 15 2 -3 0.9 0.8 -6 -9 43 -27 2 -3 0.3 0.3 11 -49 39 -27 1 -3 0.7 0.8 19 -28 8 -23 1 -3 0.5 0.4 -32 -46 22 -48 2 -3 0.7 0.2 22 -30 36 30 2 -3 0.3 0.3 13 -4 10 -36 1 -3 0.5 0.6 19 -42 35 -34 2 -3 0.1 0.2 49 -19 -16 -47 1 -3 0.1 0.5 50 37 32 -17 1 -3 0.1 0.3 -43 -45 28 -25 2 -3 0.3 0.3 24 -7 34 -45 1 -3 0.1 0.1 39 21 -22 -36 1 -3 0.4 0.7 28 24 -26 -33 1 -3 0.6 0.8 -13 -32 13 -34 2 -3 0.4 0.2 -42 -50 20 3 2 -3 0.4 0.9 41 32 35 9 1 -3 0.5 0.2 18 -38 -48 -50 1 -3 0.4 0.6 49 26 32 14 1 -3 0.3 0.1 -3 -49 18 -36 2 -3 0.9 0.7 42 33 -20 -30 1 -3 0.4 0.9 -5 -27 -15 -42 2 -3 0.8 0.9 -15 -43 3 -10 2 -3 0.7 0.3 -7 -43 -23 -27 1 -3 0.1 0.6 40 -50 10 -43 2 -3 0.2 0.3 11 6 37 29 2 -3 0.7 0.1 50 -3 28 -5 1 -3 0.5 0.6 47 43 16 0 1 -3 0.9 0.1 21 -2 48 -50 1 -3 0.5 0.4 9 -5 -1 -2 2 -3 0.5 0.1 -4 -36 28 16 2 -3 0.8 0.5 -1 -3 44 5 2 -3 0.5 0.3 5 -7 33 11 2 -3 0.8 0.8 50 -21 47 -8 1 -3 0.6 0.5 -11 -12 -5 -14 2 -3 0.8 0.1 49 -23 8 -17 1 -3 0.4 0.8 48 -15 13 -8 1 -3 0.3 0.6 46 -47 -9 -44 2 -3 0.5 0.1 -34 -44 37 -25 2 -3 0.7 0.3 49 -43 26 -3 1 -3 0.8 0.1 15 -30 37 -9 1 -3 0.6 0.3 49 -26 50 -41 1 -3 0.1 0.2 8 -6 11 -31 1 -3 0.9 0.3 34 29 -7 -50 1 -3 0.3 0.4 46 12 44 7 1 -3 0.7 0.7 31 5 37 -5 1 -3 0.5 0.4 15 -24 50 -33 2 -3 0.6 0.6 -12 -17 13 -20 2 -3 0.3 0.3 40 -50 28 13 2 -3 0.1 0.5 21 -6 38 -32 2 -3 0.2 0.8 -30 -40 -3 -5 2 -3 0.4 0.6 12 -50 21 -43 2 -3 0.3 0.5 -5 -32 30 -4 2 -3 0.3 0.5 33 -6 11 -3 2 -3 0.6 0.4 20 -6 -11 -40 1 -3 0.5 0.9 19 -19 38 30 2 -3 0.2 0.5 41 24 14 -44 1 -3 0.8 0.4 -26 -49 20 -20 2 -3 0.8 0.4 11 -5 -22 -26 1 -3 0.8 0.6 44 -34 5 4 1 -3 0.7 0.2 25 10 32 -7 1 -3 0.5 0.2 45 31 24 11 1 -3 0.9 0.6 47 -23 27 -21 1 -3 0.7 0.9 42 27 32 16 2 -3 0.9 0.2 37 26 14 -19 1 -3 0.3 0.2 28 -44 47 -3 2 -3 0.2 0.6 42 10 30 23 1 -3 0.7 0.2 10 -48 -8 -18 1 -3 0.1 0.7 39 21 45 42 1 -3 0.1 0.8 28 -36 32 -46 2 -3 0.9 0.1 -35 -48 12 -15 2 -3 0.3 0.6 -1 -14 35 28 2 -3 0.2 0.5 34 1 48 -44 2 -3 0.3 0.2 34 -27 49 21 2 -3 0.7 0.6 -18 -49 -18 -46 1 -3 0.5 0.1 8 -47 29 -7 2 -3 0.9 0.7 47 -45 7 -7 1 -3 0.8 0.7 48 -30 5 -39 1 -3 0.3 0.7 9 -26 32 -35 2 -3 0.1 0.6 0 -16 33 -9 2 -3 0.1 0.6 35 19 21 -46 1 -3 0.3 0.6 -10 -44 -5 -9 2 -3 0.8 0.5 -24 -44 20 -43 2 -3 0.7 0.2 21 -37 19 -35 1 -3 0.6 0.6 39 31 -23 -26 1 -3 0.2 0.9 40 -47 45 36 2 -3 0.6 0.8 27 -6 24 22 1 -3 0.1 0.5 38 -45 39 -7 2 -3 0.3 0.5 -1 -2 11 -50 1 -3 0.5 0.7 -38 -42 30 -31 2 -3 0.2 0.1 29 8 28 -12 1 -3 0.1 0.5 10 -3 44 1 2 -3 0.2 0.2 29 27 41 -21 1 -3 0.6 0.1 24 -35 -6 -37 1 -3 0.1 0.7 14 -39 21 -45 2 -3 0.3 0.6 -26 -39 26 -43 2 -3 0.1 0.4 7 -17 -35 -45 1 -3 0.2 0.8 16 -50 46 -30 2 -3 0.6 0.9 29 -47 -15 -17 1 -3 0.8 0.9 46 -27 45 -5 2 -3 0.4 0.4 12 11 -5 -47 1 -3 0.4 0.1 48 42 18 -36 1 -3 0.5 0.3 -11 -23 50 -35 2 -3 0.4 0.5 29 -35 47 -21 2 -3 0.3 0.5 19 -21 -40 -49 1 -3 0.4 0.1 42 -1 -37 -50 1 -3 0.4 0.9 13 -17 -2 -11 1 -3 0.2 0.7 37 -4 -19 -22 1 -3 0.9 0.5 33 -28 33 -6 1 -3 0.1 0.7 15 -28 -41 -49 1 -3 0.4 0.7 41 -40 48 39 2 -3 0.4 0.1 28 0 32 26 2 -3 0.8 0.1 -13 -27 -19 -28 1 -3 0.6 0.6 4 -13 31 1 2 -3 0.2 0.5 -18 -42 47 26 2 -3 0.4 0.1 43 40 -20 -28 1 -3 0.6 0.6 14 -50 -34 -35 1 -3 0.3 0.3 -2 -37 50 -41 2 -3 0.9 0.8 24 18 46 -48 1 -3 0.5 0.5 7 2 41 -23 2 -3 0.8 0.6 26 20 29 24 1 -3 0.4 0.7 31 -26 31 -8 2 -3 0.9 0.1 14 -1 -33 -50 1 -3 0.4 0.2 -28 -30 37 -41 2 -3 0.3 0.6 41 -42 37 29 2 -3 0.5 0.6 29 -50 42 6 2 -3 0.1 0.7 31 -42 47 -12 2 -3 0.2 0.9 22 -36 -2 -5 1 -3 0.2 0.7 49 21 24 -25 1 -3 0.8 0.5 48 31 7 -21 1 -3 0.2 0.3 -12 -38 14 -22 2 -3 0.7 0.5 39 27 12 -28 1 -3 0.9 0.7 4 -26 9 -16 1 -3 0.4 0.6 -8 -37 34 16 2 -3 0.7 0.4 3 -50 2 -27 1 -3 0.2 0.1 4 -14 27 26 2 -3 0.9 0.7 -33 -36 43 -36 2 -3 0.1 0.2 -27 -40 22 5 2 -3 0.4 0.7 31 -13 6 -45 1 -3 0.3 0.5 24 -3 -38 -44 1 -3 0.7 0.7 48 -46 3 -41 1 -3 0.5 0.7 2 -9 30 2 2 -3 0.1 0.7 46 14 26 -49 1 -3 0.8 0.5 -34 -42 48 24 2 -3 0.8 0.1 37 -25 37 -14 1 -3 0.1 0.2 38 30 12 -2 1 -3 0.2 0.3 -3 -50 35 -27 2 -3 0.2 0.9 -28 -39 48 5 2 -3 0.4 0.9 27 -9 -12 -30 1 -3 0.6 0.1 -7 -27 25 7 2 -3 0.7 0.7 -36 -42 9 -27 2 -3 0.2 0.6 22 -38 40 11 2 -3 0.4 0.4 42 -45 50 -34 2 -3 0.3 0.5 50 -6 38 -40 1 -3 0.9 0.4 40 -11 40 -18 1 -3 0.3 0.6 -7 -13 34 -25 2 -3 0.5 0.6 36 -14 -7 -15 1 -3 0.5 0.6 36 5 -11 -28 1 -3 0.2 0.2 43 31 9 -5 1 -3 0.9 0.2 0 -42 34 -49 1 -3 0.4 0.4 31 13 4 -4 1 -3 0.8 0.1 -1 -30 -21 -38 1 -3 0.6 0.1 28 -9 24 -43 1 -3 0.9 0.9 -4 -21 13 -24 2 -3 0.2 0.7 7 -50 48 33 2 -3 0.7 0.7 -17 -46 33 -40 2 -3 0.2 0.5 22 -43 31 -14 2 -3 0.5 0.9 -4 -14 18 -18 2 -3 0.6 0.4 -10 -13 -45 -48 1 -3 0.3 0.4 9 -32 -15 -44 2 -3 0.3 0.7 41 7 0 -1 1 -3 0.3 0.9 5 -19 -20 -28 1 -3 0.5 0.1 41 -42 -30 -31 1 -3 0.8 0.7 -5 -45 30 -48 2 -3 0.8 0.4 -8 -44 23 -5 2 -3 0.5 0.1 -22 -28 -36 -39 1 -3 0.7 0.6 -2 -48 33 -28 2 -3 0.3 0.7 15 -11 7 -36 2 -3 0.6 0.4 25 -25 50 16 2 -3 0.6 0.2 19 -37 34 -8 1 -3 0.2 0.9 -8 -10 30 14 2 -3 0.4 0.1 31 -23 44 -45 1 -3 0.6 0.3 2 -28 44 -47 1 -3 0.7 0.6 -21 -47 -4 -9 2 -3 0.1 0.3 47 33 -45 -47 1 -3 0.2 0.1 43 12 34 2 1 -3 0.3 0.3 26 6 -21 -27 1 -3 0.2 0.2 -30 -41 48 9 2 -3 0.6 0.5 32 29 27 25 1 -3 0.3 0.5 18 -16 4 -34 1 -3 0.8 0.3 39 33 3 -13 1 -3 0.7 0.2 5 -45 1 -3 1 -3 0.1 0.1 -33 -34 32 7 2 -3 0.5 0.7 40 23 4 -15 1 -3 0.1 0.7 26 -21 16 -27 2 -3 0.7 0.4 -18 -27 42 -40 2 -3 0.6 0.4 -8 -49 48 -36 2 -3 0.9 0.8 29 -3 -43 -49 1 -3 0.5 0.9 34 -5 41 -15 2 -3 0.2 0.1 -10 -16 34 -22 2 -3 0.2 0.8 27 22 31 -37 2 -3 0.9 0.9 -17 -29 37 -22 2 -3 0.5 0.9 -9 -32 45 38 2 -3 0.8 0.6 36 10 12 -22 1 -3 0.3 0.2 -6 -32 19 -36 2 -3 0.2 0.7 7 -6 16 -35 2 -3 0.2 0.6 45 -43 37 36 2 -3 0.3 0.6 41 -33 23 -14 2 -3 0.5 0.6 -7 -28 -2 -24 2 -3 0.8 0.7 44 12 14 -14 1 -3 0.9 0.6 32 -49 46 -39 1 -3 0.8 0.2 -34 -37 -6 -41 2 -3 0.3 0.1 47 4 -6 -42 1 -3 0.9 0.9 37 -17 29 -26 1 -3 0.6 0.3 -16 -18 44 -46 1 -3 0.4 0.5 4 -45 7 -12 2 -3 0.5 0.8 -27 -48 38 -50 2 -3 0.6 0.6 10 -27 28 -30 2 -3 0.3 0.8 -18 -42 -22 -31 1 -3 0.8 0.7 39 -12 -34 -49 1 -3 0.6 0.7 46 -37 43 4 2 -3 0.2 0.6 30 -6 -30 -32 1 -3 0.5 0.2 38 22 36 -26 1 -3 0.3 0.6 -27 -39 50 46 2 -3 0.8 0.2 -20 -27 50 -25 2 -3 0.9 0.5 27 -33 38 -23 1 -3 0.4 0.9 24 -43 -14 -15 1 -3 0.4 0.4 33 17 33 -1 1 -3 0.8 0.7 28 25 -30 -48 1 -3 0.7 0.4 -12 -31 5 -46 2 -3 0.8 0.2 31 9 47 3 1 -3 0.4 0.8 -9 -18 20 5 2 -3 0.9 0.9 -2 -49 8 -25 2 -3 0.8 0.9 33 -19 -3 -35 1 -3 0.6 0.3 36 32 25 17 1 -3 0.4 0.5 12 4 28 12 2 -3 0.3 0.6 22 6 5 -16 1 -3 0.1 0.4 44 0 15 -35 1 -3 0.6 0.9 17 14 -8 -10 1 -3 0.6 0.4 40 15 16 -32 1 -3 0.2 0.4 -1 -50 -4 -17 2 -3 0.4 0.4 -15 -39 44 5 2 -3 0.6 0.7 41 -48 14 -9 2 -3 0.2 0.6 11 -42 16 -2 2 -3 0.6 0.3 42 -5 33 -8 1 -3 0.2 0.5 45 36 20 -31 1 -3 0.8 0.8 24 18 35 32 2 -3 0.2 0.3 47 22 33 -19 1 -3 0.8 0.8 9 0 -13 -20 1 -3 0.8 0.6 -27 -42 42 -45 2 -3 0.3 0.4 32 8 31 18 2 -3 0.6 0.6 49 -21 39 12 2 -3 0.5 0.8 48 24 -5 -49 1 -4 0.9 0.3 -20 -48 17 6 2 -4 0.8 0.7 13 -33 -2 -13 1 -4 0.8 0.6 -24 -26 28 -48 2 -4 0.6 0.2 1 -19 -12 -46 1 -4 0.3 0.2 10 -44 -26 -38 1 -4 0.9 0.8 -12 -20 15 -50 2 -4 0.1 0.2 12 -27 34 13 2 -4 0.2 0.3 10 -30 23 -15 2 -4 0.7 0.5 29 -13 38 9 2 -4 0.1 0.9 23 16 45 -27 2 -4 0.6 0.3 11 -46 9 -46 1 -4 0.1 0.5 -36 -43 32 15 2 -4 0.8 0.5 -7 -35 22 -30 2 -4 0.6 0.7 50 24 -3 -43 1 -4 0.6 0.5 49 4 -41 -46 1 -4 0.7 0.5 39 38 45 -15 1 -4 0.1 0.8 50 -2 7 -26 2 -4 0.8 0.3 23 -14 34 6 2 -4 0.2 0.8 22 3 -8 -23 2 -4 0.5 0.5 33 -9 -19 -25 1 -4 0.8 0.7 48 6 32 8 1 -4 0.7 0.6 48 -26 48 5 2 -4 0.7 0.6 9 -25 -22 -39 1 -4 0.5 0.6 41 -21 -28 -41 1 -4 0.8 0.7 19 11 47 -32 1 -4 0.6 0.7 22 0 -21 -44 1 -4 0.9 0.1 -9 -44 50 -32 2 -4 0.3 0.3 1 -22 32 12 2 -4 0.4 0.7 22 -16 27 -12 2 -4 0.7 0.6 14 -10 27 -8 2 -4 0.4 0.3 50 31 2 -2 1 -4 0.1 0.8 47 -36 -22 -32 2 -4 0.7 0.9 1 -3 37 -30 2 -4 0.3 0.9 36 8 23 10 1 -4 0.1 0.3 28 27 -36 -45 1 -4 0.5 0.3 42 -27 46 1 2 -4 0.9 0.5 28 10 29 20 2 -4 0.1 0.9 10 -17 25 18 2 -4 0.2 0.3 37 23 -24 -41 1 -4 0.2 0.2 18 -40 42 -48 2 -4 0.3 0.6 18 -27 41 39 2 -4 0.4 0.7 21 6 -1 -50 1 -4 0.4 0.4 15 14 20 -35 1 -4 0.1 0.8 13 -49 44 33 2 -4 0.3 0.2 -31 -42 30 -45 2 -4 0.6 0.4 -12 -23 22 -42 2 -4 0.9 0.1 -26 -40 -25 -30 2 -4 0.7 0.2 38 -27 34 -13 1 -4 0.9 0.1 42 -31 45 -1 1 -4 0.5 0.8 41 -3 37 -12 1 -4 0.2 0.5 31 2 17 -44 1 -4 0.8 0.8 -4 -31 16 -15 2 -4 0.9 0.3 24 1 15 -4 1 -4 0.8 0.4 9 -22 28 -5 2 -4 0.9 0.3 47 -24 17 -16 1 -4 0.8 0.4 -12 -14 -13 -21 2 -4 0.1 0.9 1 -50 4 -36 2 -4 0.6 0.7 10 -36 -16 -22 1 -4 0.5 0.2 26 13 24 -31 1 -4 0.4 0.9 1 -15 18 -41 2 -4 0.1 0.5 41 32 -33 -47 1 -4 0.6 0.4 4 -25 -12 -50 1 -4 0.6 0.2 -2 -22 25 5 2 -4 0.5 0.3 -2 -8 34 31 2 -4 0.9 0.9 6 -26 -6 -45 1 -4 0.1 0.2 23 0 38 -6 2 -4 0.8 0.7 -25 -40 9 -37 1 -4 0.5 0.2 -5 -14 48 -18 2 -4 0.2 0.1 6 -34 0 -50 1 -4 0.9 0.1 22 -2 -7 -47 1 -4 0.5 0.5 41 29 41 23 1 -4 0.3 0.5 2 -4 20 -24 2 -4 0.5 0.9 -14 -42 2 -13 2 -4 0.9 0.9 46 16 49 2 1 -4 0.9 0.3 49 39 -16 -29 1 -4 0.1 0.2 36 -28 40 -15 2 -4 0.5 0.9 3 -7 35 15 2 -4 0.1 0.4 3 -18 32 -12 2 -4 0.7 0.6 22 -34 18 4 2 -4 0.8 0.2 5 -6 16 -14 1 -4 0.3 0.9 24 9 -9 -32 1 -4 0.4 0.9 23 13 11 -46 1 -4 0.9 0.5 38 -6 -5 -32 1 -4 0.7 0.5 -23 -48 -26 -32 2 -4 0.2 0.8 19 -14 48 31 2 -4 0.3 0.2 50 39 33 30 1 -4 0.5 0.8 -33 -50 35 -45 2 -4 0.8 0.3 30 -5 32 -33 1 -4 0.1 0.8 -31 -40 35 4 2 -4 0.9 0.1 41 10 4 -49 1 -4 0.7 0.3 -9 -30 41 -26 2 -4 0.9 0.5 -20 -31 -25 -41 1 -4 0.3 0.8 45 -15 28 -17 2 -4 0.5 0.4 -24 -37 36 32 2 -4 0.8 0.1 18 -45 19 -10 1 -4 0.8 0.7 37 -19 -10 -40 1 -4 0.1 0.6 46 13 -30 -39 1 -4 0.3 0.4 15 -4 46 17 2 -4 0.4 0.4 -12 -32 45 38 2 -4 0.5 0.4 36 -19 -11 -13 1 -4 0.3 0.5 47 32 -21 -47 1 -4 0.3 0.3 -2 -25 -33 -34 1 -4 0.4 0.2 39 1 25 -35 1 -4 0.3 0.9 32 -32 35 21 2 -4 0.2 0.3 -9 -26 36 -4 2 -4 0.9 0.1 -10 -21 9 -3 2 -4 0.2 0.7 47 28 11 -22 1 -4 0.1 0.8 38 -4 39 38 2 -4 0.8 0.2 -28 -36 36 17 2 -4 0.6 0.6 10 -50 -2 -42 1 -4 0.7 0.4 37 -5 5 -44 1 -4 0.9 0.6 22 -36 26 -12 1 -4 0.8 0.5 11 -2 49 -41 1 -4 0.2 0.9 8 -25 29 -49 2 -4 0.6 0.3 -39 -49 6 -42 2 -4 0.7 0.9 44 -3 44 -33 2 -4 0.2 0.4 48 43 46 -34 1 -4 0.2 0.4 33 -33 -16 -36 1 -4 0.5 0.4 47 -48 19 12 2 -4 0.3 0.8 50 49 18 -3 1 -4 0.2 0.6 -11 -34 24 -21 2 -4 0.2 0.3 -33 -39 -20 -21 2 -4 0.5 0.8 -23 -46 -30 -43 2 -4 0.3 0.5 32 23 34 7 2 -4 0.3 0.5 -15 -50 -32 -37 2 -4 0.1 0.9 7 -25 46 7 2 -4 0.2 0.9 -40 -48 19 -1 2 -4 0.4 0.6 41 -21 -23 -25 1 -4 0.9 0.5 -29 -36 13 -27 2 -4 0.4 0.7 37 34 32 21 1 -4 0.2 0.3 21 2 -39 -49 1 -4 0.5 0.9 39 3 6 -24 1 -4 0.3 0.9 37 -19 -2 -17 1 -4 0.4 0.2 50 -8 -18 -41 1 -4 0.5 0.7 -7 -29 16 -41 2 -4 0.2 0.2 27 -32 20 -31 1 -4 0.3 0.5 32 -7 24 -11 2 -4 0.5 0.6 -25 -40 -28 -33 2 -4 0.4 0.3 42 30 45 -19 1 -4 0.8 0.4 25 -26 -4 -20 1 -4 0.4 0.3 9 -50 36 -14 2 -4 0.2 0.1 6 -30 -8 -33 1 -4 0.7 0.1 4 -14 3 -12 1 -4 0.4 0.7 32 10 47 -36 2 -4 0.5 0.4 36 15 24 -3 1 -4 0.8 0.2 23 7 10 -16 1 -4 0.6 0.7 14 -44 19 14 2 -4 0.7 0.4 -27 -39 -41 -43 2 -4 0.4 0.6 50 -19 -25 -42 1 -4 0.2 0.6 1 -34 29 -29 2 -4 0.2 0.3 16 -49 7 -32 1 -4 0.6 0.6 5 -39 12 -43 1 -4 0.4 0.2 28 -36 32 -24 1 -4 0.9 0.9 17 -9 5 0 1 -4 0.4 0.4 23 -47 -9 -37 1 -4 0.4 0.9 45 -44 28 -37 2 -4 0.8 0.6 21 12 -14 -17 1 -4 0.1 0.2 33 16 37 -24 1 -4 0.7 0.9 -31 -35 50 13 2 -4 0.5 0.7 -4 -17 -24 -35 1 -4 0.2 0.3 45 40 14 -15 1 -4 0.1 0.5 39 -10 -13 -21 1 -4 0.4 0.8 -3 -24 27 15 2 -4 0.2 0.5 16 0 37 -46 2 -4 0.6 0.4 3 -41 49 27 2 -4 0.7 0.6 21 5 11 -8 1 -4 0.6 0.4 48 -4 -12 -37 1 -4 0.9 0.7 18 -49 30 -22 2 -4 0.2 0.6 -28 -32 23 -35 2 -4 0.3 0.9 48 -41 40 -27 2 -4 0.6 0.9 1 -1 4 -30 1 -4 0.1 0.5 1 0 33 3 2 -4 0.5 0.9 -8 -33 33 -46 2 -4 0.1 0.4 -29 -37 22 4 2 -4 0.4 0.3 20 -8 48 -12 2 -4 0.9 0.2 15 -38 36 2 1 -4 0.1 0.3 3 -37 34 -22 2 -4 0.1 0.4 25 -48 26 -20 2 -4 0.7 0.5 -1 -50 36 -11 2 -4 0.8 0.3 50 -25 49 48 2 -4 0.3 0.9 -4 -36 29 -5 2 -4 0.8 0.1 47 21 12 -33 1 -4 0.1 0.6 45 -31 33 26 2 -4 0.5 0.7 37 -8 7 3 1 -4 0.1 0.5 12 -29 19 -36 1 -4 0.1 0.4 34 27 -14 -26 1 -4 0.1 0.1 32 30 23 -46 1 -4 0.7 0.7 45 -14 33 -40 1 -4 0.3 0.7 -1 -22 19 -46 2 -4 0.7 0.9 -30 -45 -14 -33 2 -4 0.3 0.9 -39 -44 15 -36 2 -4 0.7 0.1 23 -35 13 -35 1 -4 0.2 0.3 16 -42 48 -1 2 -4 0.2 0.1 20 -38 38 -7 2 -4 0.1 0.9 -33 -38 47 -2 2 -4 0.3 0.9 47 -44 19 -36 2 -4 0.7 0.4 40 27 49 46 2 -4 0.5 0.3 -3 -42 24 9 2 -4 0.5 0.9 9 -44 15 -19 2 -4 0.6 0.3 -19 -33 -24 -26 2 -4 0.4 0.5 -32 -48 27 -1 2 -4 0.1 0.4 38 -15 -26 -35 2 -4 0.1 0.2 39 -6 -14 -22 1 -4 0.8 0.5 40 -35 17 -21 1 -4 0.6 0.1 24 17 7 -28 1 -4 0.4 0.5 36 12 45 -10 1 -4 0.2 0.2 49 -23 -6 -18 2 -4 0.7 0.1 47 -22 45 -43 1 -4 0.1 0.3 29 17 21 16 1 -4 0.2 0.7 16 -33 16 -37 2 -4 0.5 0.8 7 -9 15 -44 2 -4 0.8 0.2 40 -24 -4 -45 1 -4 0.2 0.6 -11 -34 21 -17 2 -4 0.8 0.6 7 -32 34 27 2 -4 0.2 0.3 -17 -40 46 -45 2 -4 0.6 0.8 16 -6 6 -44 1 -4 0.2 0.7 17 6 12 8 1 -4 0.5 0.4 -2 -23 34 -38 2 -4 0.9 0.1 -30 -34 19 -45 2 -4 0.9 0.6 42 -23 44 -46 1 -4 0.5 0.8 40 -24 -16 -35 1 -4 0.4 0.1 11 -1 -11 -38 1 -4 0.4 0.2 -6 -40 27 -34 2 -4 0.7 0.7 21 -12 23 -38 1 -4 0.6 0.1 47 -14 9 -24 1 -4 0.3 0.9 36 -23 16 -26 1 -4 0.8 0.3 29 -20 35 -14 1 -4 0.4 0.1 29 -33 21 -42 1 -4 0.9 0.2 -27 -43 11 -48 2 -4 0.2 0.1 22 -25 29 -13 2 -4 0.9 0.6 14 -8 -12 -23 1 -4 0.6 0.1 -13 -18 1 -23 1 -4 0.3 0.5 22 -20 -2 -48 1 -4 0.4 0.1 24 13 38 -43 1 -4 0.9 0.7 40 -30 44 38 2 -4 0.3 0.7 1 -26 -4 -41 1 -4 0.2 0.6 20 -6 -22 -29 1 -4 0.6 0.9 -9 -16 37 32 2 -4 0.7 0.7 6 -8 40 2 2 -4 0.6 0.6 10 -21 39 18 2 -4 0.1 0.5 -3 -33 2 -47 2 -4 0.9 0.3 -20 -43 37 11 2 -4 0.9 0.1 27 -20 5 -14 1 -4 0.9 0.2 3 -19 -41 -49 1 -4 0.2 0.1 18 -5 45 -5 2 -4 0.8 0.1 -13 -36 -10 -38 2 -4 0.4 0.6 45 34 40 -17 1 -4 0.5 0.9 36 17 -2 -36 1 -4 0.1 0.1 14 1 -17 -38 1 -4 0.4 0.1 6 -10 -1 -49 1 -4 0.4 0.2 -10 -45 16 -13 2 -4 0.8 0.9 -11 -18 4 -38 2 -4 0.2 0.5 4 -49 8 1 2 -4 0.1 0.4 -32 -39 41 27 2 -4 0.1 0.5 14 1 -8 -21 1 -4 0.3 0.8 7 0 -18 -24 1 -4 0.9 0.1 24 -15 13 -40 1 -4 0.8 0.9 33 -36 -2 -48 1 -4 0.8 0.7 -42 -45 -17 -36 2 -4 0.8 0.8 31 20 -29 -31 1 -4 0.8 0.1 31 -25 -14 -45 1 -4 0.5 0.8 29 -28 28 -37 2 -4 0.6 0.3 -4 -23 -38 -43 1 -4 0.8 0.2 36 -6 47 -35 1 -4 0.4 0.5 -16 -41 48 22 2 -4 0.5 0.3 -10 -20 -21 -23 1 -4 0.9 0.3 17 -17 35 -17 1 -4 0.4 0.2 12 5 47 11 2 -4 0.8 0.8 10 -36 -23 -47 1 -4 0.6 0.6 24 -20 11 2 2 -4 0.9 0.5 35 -38 17 -26 1 -4 0.1 0.9 9 -5 -32 -40 1 -4 0.9 0.8 -37 -45 11 -28 2 -4 0.1 0.2 13 -35 29 -46 1 -4 0.2 0.2 36 23 33 -4 1 -4 0.9 0.2 38 5 -4 -39 1 -4 0.6 0.7 8 -35 43 40 2 -4 0.6 0.2 16 -14 23 19 2 -4 0.8 0.2 43 15 -21 -23 1 -4 0.5 0.3 50 -45 11 -42 1 -4 0.3 0.9 8 -18 6 -8 2 -4 0.8 0.7 -34 -49 11 -7 2 -4 0.5 0.6 22 -12 44 19 2 -4 0.8 0.1 2 -15 22 -29 2 -4 0.4 0.9 14 -20 27 -36 2 -4 0.8 0.7 6 -18 42 -23 2 -4 0.7 0.6 3 -17 33 19 2 -4 0.3 0.3 7 -23 3 -17 1 -4 0.4 0.6 29 -21 -26 -32 1 -4 0.6 0.3 15 -4 14 -45 1 -4 0.4 0.9 -10 -25 24 17 2 -4 0.7 0.5 5 -41 15 -29 2 -4 0.3 0.9 40 -23 10 -3 2 -4 0.4 0.5 -1 -20 -3 -11 2 -4 0.7 0.5 45 9 32 -22 1 -4 0.2 0.7 18 -37 42 -15 2 -4 0.6 0.9 16 -36 29 -32 2 -4 0.3 0.2 5 -41 44 -43 2 -4 0.2 0.6 23 -31 43 -33 2 -4 0.3 0.4 43 9 -4 -31 1 -5 0.5 0.9 6 -44 -11 -14 1 -5 0.1 0.6 43 -4 34 -9 2 -5 0.7 0.7 24 -25 -19 -21 1 -5 0.9 0.3 -9 -14 22 -31 1 -5 0.7 0.3 2 -16 30 -18 1 -5 0.4 0.3 28 -27 35 26 2 -5 0.8 0.3 27 -15 -2 -39 1 -5 0.5 0.2 16 -40 18 -43 1 -5 0.5 0.4 26 -33 20 15 2 -5 0.7 0.1 8 -33 -18 -34 1 -5 0.7 0.9 15 -24 32 9 1 -5 0.7 0.4 -1 -50 30 -2 2 -5 0.9 0.6 43 2 42 -10 2 -5 0.6 0.7 12 -48 46 40 2 -5 0.9 0.7 49 -40 49 26 1 -5 0.2 0.3 47 -20 27 -12 2 -5 0.6 0.3 41 20 -22 -44 1 -5 0.5 0.4 32 -5 7 4 2 -5 0.3 0.4 1 -10 15 -9 2 -5 0.6 0.6 45 43 37 30 2 -5 0.6 0.1 39 33 -31 -45 1 -5 0.5 0.6 48 -47 39 -32 2 -5 0.1 0.9 46 40 27 -9 1 -5 0.6 0.3 -15 -28 16 -38 1 -5 0.9 0.3 50 -24 -9 -21 1 -5 0.3 0.2 44 -26 23 -5 1 -5 0.1 0.6 46 16 32 4 2 -5 0.8 0.4 30 8 -6 -33 1 -5 0.5 0.4 50 -32 -15 -19 1 -5 0.9 0.4 1 -34 9 -22 2 -5 0.6 0.3 43 5 -5 -30 1 -5 0.4 0.3 44 0 45 21 2 -5 0.8 0.4 9 -5 47 -1 2 -5 0.6 0.1 17 7 47 -5 1 -5 0.5 0.7 2 -2 29 -20 2 -5 0.7 0.7 35 32 38 7 1 -5 0.6 0.3 15 -18 25 -31 1 -5 0.3 0.3 32 -45 28 26 2 -5 0.6 0.2 14 -42 15 -6 1 -5 0.3 0.3 29 -18 35 17 2 -5 0.9 0.6 44 -14 24 14 1 -5 0.3 0.2 38 3 -20 -21 1 -5 0.7 0.1 3 -7 2 -46 1 -5 0.5 0.7 32 -45 0 -45 1 -5 0.5 0.1 -2 -29 28 -37 2 -5 0.4 0.4 31 -24 -7 -12 1 -5 0.5 0.6 -21 -28 -27 -49 1 -5 0.7 0.9 21 12 34 21 2 -5 0.9 0.2 -3 -29 -43 -46 1 -5 0.8 0.6 19 7 50 24 2 -5 0.7 0.5 38 15 30 -40 1 -5 0.3 0.4 0 -26 49 -19 2 -5 0.9 0.6 23 -10 48 -1 1 -5 0.9 0.8 16 -17 -2 -48 1 -5 0.7 0.4 46 10 42 10 2 -5 0.9 0.8 23 22 35 -2 1 -5 0.1 0.6 47 14 22 -43 2 -5 0.9 0.4 12 -41 27 18 2 -5 0.3 0.2 27 25 36 16 1 -5 0.1 0.4 7 -47 17 -15 2 -5 0.4 0.7 33 32 36 -1 2 -5 0.1 0.3 48 -8 45 -35 1 -5 0.6 0.9 -23 -33 35 -18 2 -5 0.8 0.5 12 -47 26 -46 1 -5 0.3 0.8 19 -17 21 3 2 -5 0.2 0.9 30 14 30 -9 1 -5 0.5 0.8 44 -38 30 -11 2 -5 0.1 0.1 -23 -40 47 44 2 -5 0.2 0.9 40 7 24 6 1 -5 0.6 0.4 26 7 21 12 2 -5 0.7 0.6 41 -15 -16 -46 1 -5 0.3 0.4 12 1 2 -40 1 -5 0.6 0.2 -6 -13 4 -9 2 -5 0.5 0.9 49 47 -16 -41 1 -5 0.2 0.6 28 -22 10 -14 2 -5 0.5 0.3 -9 -28 -13 -46 2 -5 0.7 0.6 -1 -44 24 -10 2 -5 0.5 0.4 50 -41 49 -8 2 -5 0.4 0.7 -14 -33 -2 -9 2 -5 0.4 0.7 15 -50 -2 -26 1 -5 0.5 0.1 -38 -41 34 -46 1 -5 0.3 0.5 19 -45 4 -33 2 -5 0.5 0.1 48 32 36 -14 1 -5 0.5 0.9 -18 -37 45 26 2 -5 0.6 0.5 24 17 -21 -36 1 -5 0.9 0.3 -5 -35 8 -33 2 -5 0.5 0.8 40 26 35 29 2 -5 0.2 0.3 14 -34 -3 -23 2 -5 0.9 0.4 12 -22 5 -29 1 -5 0.2 0.6 48 43 25 -40 1 -5 0.4 0.8 -26 -41 23 5 2 -5 0.6 0.8 37 2 -13 -40 2 -5 0.4 0.2 12 10 47 -11 1 -5 0.8 0.6 3 -19 16 -21 1 -5 0.8 0.5 -2 -46 -16 -37 2 -5 0.3 0.2 41 36 9 -28 1 -5 0.6 0.4 0 -50 -15 -22 1 -5 0.1 0.8 23 -27 -9 -22 1 -5 0.8 0.7 13 -48 44 -50 1 -5 0.9 0.8 2 1 32 29 2 -5 0.9 0.1 40 -39 -10 -30 1 -5 0.8 0.7 -15 -46 -25 -27 1 -5 0.6 0.7 3 -5 -34 -49 1 -5 0.2 0.8 47 45 -12 -30 1 -5 0.4 0.7 19 -7 -20 -36 2 -5 0.1 0.3 15 -32 3 -9 2 -5 0.7 0.6 -15 -35 14 -21 2 -5 0.2 0.4 -42 -43 -7 -40 1 -5 0.9 0.2 12 -6 33 -37 1 -5 0.8 0.4 49 -39 25 12 2 -5 0.7 0.1 43 -3 -29 -45 1 -5 0.9 0.5 25 -3 -6 -14 1 -5 0.7 0.3 39 -36 42 -33 1 -5 0.9 0.8 -6 -30 46 -21 2 -5 0.5 0.7 41 24 11 -18 1 -5 0.7 0.9 23 -19 30 19 2 -5 0.4 0.1 -3 -36 37 14 2 -5 0.5 0.1 9 -24 16 -19 1 -5 0.8 0.3 43 -44 28 -27 1 -5 0.9 0.6 16 -43 47 36 2 -5 0.9 0.9 43 -14 -2 -12 1 -5 0.6 0.5 -11 -14 39 -40 1 -5 0.1 0.8 5 -13 45 -48 2 -5 0.5 0.7 37 36 21 -44 1 -5 0.4 0.5 49 8 -13 -25 1 -5 0.1 0.1 17 7 29 -25 1 -5 0.7 0.1 39 -46 15 -8 1 -5 0.8 0.4 42 -23 8 -45 1 -5 0.1 0.8 -43 -44 18 -19 2 -5 0.1 0.5 42 25 13 -8 1 -5 0.7 0.5 42 -40 41 -32 1 -5 0.7 0.9 46 -43 40 19 1 -5 0.9 0.9 2 -21 44 -42 2 -5 0.3 0.1 25 -7 40 4 1 -5 0.1 0.4 28 -27 -27 -31 1 -5 0.1 0.1 18 -17 22 -15 1 -5 0.7 0.5 17 7 28 -7 2 -5 0.2 0.2 44 5 34 -3 2 -5 0.2 0.9 16 -6 7 -7 2 -5 0.3 0.8 46 44 31 18 1 -5 0.6 0.4 9 -43 -3 -6 2 -5 0.9 0.8 43 -41 18 -38 1 -5 0.1 0.2 47 -49 45 37 2 -5 0.8 0.3 9 -22 8 4 1 -5 0.9 0.2 12 -17 -13 -47 1 -5 0.5 0.2 -20 -24 7 -9 1 -5 0.5 0.4 31 -46 -1 -37 1 -5 0.1 0.8 49 14 43 -13 1 -5 0.7 0.4 38 -24 37 -31 1 -5 0.2 0.2 17 -50 -5 -23 1 -5 0.6 0.8 -12 -36 14 -22 2 -5 0.3 0.9 48 16 21 -33 2 -5 0.1 0.1 -6 -9 45 -50 2 -5 0.2 0.5 41 12 -22 -31 1 -5 0.6 0.2 43 -30 -14 -20 1 -5 0.3 0.7 27 -49 -3 -49 1 -5 0.3 0.9 34 -47 49 19 2 -5 0.6 0.2 21 -46 28 -32 2 -5 0.8 0.5 -12 -49 17 -21 2 -5 0.2 0.8 8 -44 20 -47 2 -5 0.1 0.4 30 -24 33 -28 2 -5 0.9 0.2 5 -30 10 0 1 -5 0.5 0.3 -17 -25 38 31 2 -5 0.4 0.6 -8 -37 29 14 2 -5 0.8 0.7 40 3 6 -32 1 -5 0.5 0.7 -5 -38 40 1 2 -5 0.4 0.4 -3 -5 3 -36 1 -5 0.7 0.8 13 -36 46 -24 2 -5 0.6 0.6 47 -10 8 -30 1 -5 0.6 0.3 49 -30 -1 -3 1 -5 0.4 0.8 8 -18 27 12 2 -5 0.5 0.4 -11 -23 -3 -32 2 -5 0.4 0.8 43 -44 19 -7 1 -5 0.1 0.5 43 -2 16 -15 1 -5 0.9 0.6 -3 -28 9 -44 2 -5 0.4 0.7 -24 -28 7 -27 2 -5 0.1 0.6 16 -9 -31 -46 2 -5 0.8 0.7 43 27 46 -27 1 -5 0.3 0.4 -10 -25 41 -9 2 -5 0.8 0.1 36 -1 23 -6 1 -5 0.8 0.1 25 24 8 -13 1 -5 0.2 0.7 -14 -36 -25 -50 1 -5 0.5 0.3 -9 -44 40 -6 2 -5 0.5 0.2 35 26 -8 -36 1 -5 0.4 0.7 -18 -34 4 -28 2 -5 0.4 0.3 34 -49 -9 -16 1 -5 0.4 0.6 23 -7 -10 -19 1 -5 0.9 0.7 31 12 47 -37 1 -5 0.9 0.8 4 -25 9 -7 1 -5 0.2 0.1 19 -44 47 4 2 -5 0.8 0.8 -4 -12 20 -22 2 -5 0.2 0.1 15 -47 7 -40 2 -5 0.8 0.7 17 9 31 -32 2 -5 0.5 0.2 25 0 -35 -37 1 -5 0.1 0.6 -3 -40 -9 -35 1 -5 0.3 0.6 42 25 36 -2 2 -5 0.2 0.7 -12 -13 44 -10 2 -5 0.3 0.4 34 -2 4 -5 1 -5 0.6 0.9 9 -43 -3 -25 1 -5 0.1 0.7 25 -14 38 -18 2 -5 0.7 0.9 -18 -32 25 -20 2 -5 0.5 0.9 32 -39 41 -19 2 -5 0.3 0.2 -26 -28 21 -5 2 -5 0.2 0.2 48 6 35 -6 1 -5 0.9 0.4 -13 -34 19 2 2 -5 0.6 0.7 26 -26 11 -23 1 -5 0.2 0.8 34 -19 4 -20 2 -5 0.6 0.6 -10 -42 6 -30 2 -5 0.3 0.9 -17 -34 -7 -10 2 -5 0.2 0.2 8 -30 27 23 2 -5 0.5 0.3 33 4 1 -36 1 -5 0.1 0.7 -14 -34 -4 -26 2 -5 0.1 0.5 -26 -27 35 -37 2 -5 0.1 0.7 46 -50 42 0 2 -5 0.3 0.4 23 -50 13 -3 2 -5 0.1 0.6 28 -14 39 37 2 -5 0.2 0.8 9 -11 15 5 2 -5 0.2 0.5 -29 -44 12 -10 2 -5 0.4 0.6 -31 -40 -5 -40 2 -5 0.2 0.5 44 -22 36 -29 2 -5 0.7 0.5 11 2 14 1 1 -5 0.9 0.8 18 -22 26 -12 2 -5 0.8 0.7 -11 -35 14 -11 2 -5 0.6 0.4 2 -46 36 27 1 -5 0.5 0.3 -42 -50 24 13 2 -5 0.1 0.6 48 -36 31 -27 2 -5 0.4 0.4 49 -11 -35 -49 1 -5 0.4 0.2 45 25 44 42 2 -5 0.3 0.5 -7 -36 12 3 2 -5 0.6 0.6 -4 -23 45 38 2 -5 0.5 0.5 33 -46 -15 -21 1 -5 0.6 0.6 -1 -20 -4 -30 2 -5 0.4 0.7 41 -14 24 -23 2 -5 0.9 0.7 -30 -48 29 18 2 -5 0.7 0.6 -12 -22 13 -35 2 -5 0.7 0.7 -10 -31 48 -10 2 -5 0.6 0.3 37 20 -39 -47 1 -5 0.1 0.4 -6 -14 16 -32 2 -5 0.5 0.9 45 15 11 -13 1 -5 0.2 0.3 -20 -32 44 -19 2 -5 0.4 0.6 49 -44 17 -25 2 -5 0.6 0.2 27 -46 -9 -32 1 -5 0.1 0.5 -2 -19 26 -12 2 -5 0.4 0.9 35 3 50 -36 2 -5 0.7 0.7 -22 -33 4 -28 2 -5 0.3 0.1 9 -43 49 -38 1 -5 0.7 0.1 3 -45 49 -11 1 -5 0.6 0.2 48 32 35 -39 1 -5 0.8 0.3 -18 -49 28 24 2 -5 0.1 0.6 40 -35 -11 -22 2 -5 0.3 0.4 3 -46 20 13 2 -5 0.4 0.8 -11 -36 -25 -26 2 -5 0.9 0.1 34 -28 30 0 1 -5 0.2 0.4 41 -3 39 -20 2 -5 0.4 0.4 -35 -49 -9 -31 1 -5 0.2 0.8 27 20 23 6 2 -5 0.8 0.3 -1 -44 -43 -49 1 -5 0.5 0.1 42 24 44 -26 1 -5 0.4 0.9 50 20 49 25 1 -5 0.6 0.6 47 38 -9 -24 1 -5 0.5 0.4 23 -33 35 -37 2 -5 0.5 0.8 23 -15 -1 -48 1 -5 0.1 0.8 -14 -47 17 -39 2 -5 0.4 0.4 33 -12 20 -23 1 -5 0.6 0.5 49 10 47 43 1 -5 0.4 0.8 23 -2 21 -35 1 -5 0.3 0.8 -20 -36 16 -23 2 -5 0.6 0.7 37 34 47 -26 1 -5 0.2 0.4 14 -2 42 14 2 -5 0.6 0.6 10 -42 -28 -48 1 -5 0.2 0.7 43 -27 -1 -38 1 -5 0.6 0.3 40 -41 -3 -47 1 -5 0.1 0.7 -7 -35 41 39 2 -5 0.5 0.1 13 -7 -22 -40 1 -5 0.8 0.1 20 -19 0 -17 1 -5 0.6 0.4 34 22 22 -6 1 -5 0.3 0.9 46 -20 23 3 2 -5 0.4 0.3 20 -12 11 10 1 -5 0.5 0.1 35 28 37 28 1 -5 0.3 0.8 16 -30 14 -4 2 -5 0.9 0.9 13 -43 -6 -28 1 -5 0.4 0.8 0 -17 -5 -49 2 -5 0.5 0.3 -21 -47 32 -5 2 -5 0.1 0.6 47 25 3 -48 1 -5 0.6 0.3 7 -42 -5 -47 2 -5 0.2 0.9 44 34 -22 -33 1 -5 0.3 0.5 48 -19 17 -23 1 -5 0.6 0.9 -12 -24 45 40 2 -5 0.1 0.1 46 21 33 -22 1 -5 0.2 0.8 33 -16 -15 -28 1 -5 0.5 0.1 20 -19 36 11 2 -5 0.5 0.8 6 -5 10 -2 2 -5 0.6 0.1 6 -3 44 11 2 -5 0.8 0.8 29 -21 2 -25 1 -5 0.4 0.6 48 -29 18 -18 1 -5 0.7 0.5 46 -12 16 -25 1 -5 0.9 0.1 42 18 -34 -48 1 -5 0.9 0.7 -36 -39 19 -2 2 -5 0.8 0.6 41 10 30 -50 1 -5 0.3 0.1 49 -20 47 26 2 -6 0.8 0.6 43 -30 11 3 1 -6 0.2 0.6 8 2 29 7 2 -6 0.8 0.4 -18 -50 21 8 2 -6 0.7 0.1 -12 -24 34 -39 1 -6 0.9 0.4 49 -9 1 -38 1 -6 0.9 0.7 28 -12 33 -41 1 -6 0.5 0.1 30 22 40 28 2 -6 0.5 0.6 -26 -28 1 -1 2 -6 0.5 0.7 15 7 23 -25 2 -6 0.1 0.8 43 18 -13 -22 1 -6 0.7 0.8 -2 -35 28 1 2 -6 0.4 0.6 21 -20 36 11 2 -6 0.7 0.9 -39 -40 -11 -43 2 -6 0.5 0.3 1 -41 45 16 2 -6 0.6 0.7 8 -34 30 -43 2 -6 0.6 0.2 8 -43 18 -50 1 -6 0.9 0.2 21 -39 27 -37 1 -6 0.8 0.1 -18 -41 46 16 2 -6 0.1 0.2 3 -17 15 -30 1 -6 0.3 0.8 7 -39 4 -38 2 -6 0.9 0.5 41 -11 -41 -43 1 -6 0.3 0.8 -2 -50 1 -4 2 -6 0.2 0.3 48 0 34 9 2 -6 0.8 0.1 -29 -41 47 10 2 -6 0.1 0.9 21 -9 -23 -35 1 -6 0.4 0.9 -36 -39 25 -17 2 -6 0.1 0.6 50 13 -1 -20 1 -6 0.3 0.6 35 16 50 8 2 -6 0.8 0.3 48 -40 43 -39 1 -6 0.4 0.7 -6 -16 45 22 2 -6 0.7 0.9 28 1 33 -16 2 -6 0.4 0.6 30 -22 33 20 2 -6 0.6 0.1 47 -14 -2 -47 1 -6 0.1 0.5 16 -16 46 5 2 -6 0.4 0.2 43 27 -9 -27 1 -6 0.4 0.2 -1 -17 6 -35 1 -6 0.3 0.3 19 9 19 -41 1 -6 0.6 0.7 50 -23 -9 -22 1 -6 0.8 0.4 -16 -46 -34 -46 1 -6 0.7 0.3 35 23 49 -1 1 -6 0.3 0.2 27 13 5 -29 1 -6 0.6 0.7 44 2 15 -18 1 -6 0.5 0.5 31 10 -10 -38 1 -6 0.2 0.4 45 1 16 -11 1 -6 0.2 0.6 -40 -43 -4 -36 2 -6 0.3 0.2 20 -20 20 -46 1 -6 0.6 0.4 24 17 44 -9 1 -6 0.3 0.6 35 0 45 -24 2 -6 0.6 0.2 15 -3 23 -26 1 -6 0.8 0.8 31 10 34 32 2 -6 0.4 0.5 37 -40 8 -36 2 -6 0.3 0.5 13 -42 28 -6 2 -6 0.3 0.1 9 4 26 -38 1 -6 0.4 0.6 35 -50 42 35 2 -6 0.3 0.7 1 -44 36 30 2 -6 0.5 0.3 12 -18 -14 -46 1 -6 0.4 0.2 32 -21 44 8 2 -6 0.8 0.8 6 -22 44 15 2 -6 0.7 0.6 -8 -30 -8 -25 1 -6 0.5 0.8 18 15 -6 -7 1 -6 0.3 0.4 -31 -42 39 20 2 -6 0.1 0.3 33 6 22 -40 1 -6 0.2 0.6 26 -7 31 -42 2 -6 0.1 0.5 22 -10 41 -5 2 -6 0.4 0.2 6 -6 33 27 2 -6 0.3 0.8 29 -4 48 -37 2 -6 0.5 0.4 -22 -29 8 -36 2 -6 0.7 0.7 1 -13 10 -11 2 -6 0.1 0.2 -11 -25 15 -28 2 -6 0.4 0.1 27 7 11 1 1 -6 0.7 0.5 -11 -35 0 -7 2 -6 0.9 0.2 50 32 -6 -25 1 -6 0.2 0.2 -11 -23 34 -27 2 -6 0.7 0.6 30 -15 44 -37 1 -6 0.4 0.5 31 22 13 0 1 -6 0.6 0.6 44 -3 38 36 2 -6 0.5 0.5 8 2 38 -7 2 -6 0.3 0.2 -11 -30 18 -21 2 -6 0.7 0.4 -37 -38 -21 -48 1 -6 0.5 0.8 24 -33 38 32 2 -6 0.7 0.5 33 6 31 12 1 -6 0.9 0.8 39 -16 40 -35 1 -6 0.2 0.7 35 6 -35 -46 1 -6 0.8 0.3 18 -45 -16 -17 1 -6 0.8 0.9 41 -45 45 3 2 -6 0.4 0.2 38 28 21 -10 1 -6 0.5 0.6 -17 -43 6 -8 2 -6 0.7 0.3 43 -12 48 19 2 -6 0.2 0.9 39 15 37 -26 2 -6 0.5 0.9 17 -20 -11 -48 1 -6 0.3 0.6 -6 -13 36 -21 2 -6 0.6 0.2 12 -49 14 -4 1 -6 0.8 0.6 30 20 18 -47 1 -6 0.1 0.2 7 -10 29 -46 1 -6 0.8 0.2 4 -16 -7 -9 1 -6 0.6 0.7 22 -38 -8 -19 1 -6 0.1 0.8 18 -13 -26 -45 1 -6 0.3 0.3 -34 -38 22 -38 2 -6 0.1 0.9 32 -24 -25 -34 1 -6 0.3 0.5 16 -23 20 -11 2 -6 0.6 0.6 38 -36 -13 -24 1 -6 0.9 0.5 -6 -20 -27 -42 1 -6 0.4 0.5 1 -24 34 -18 2 -6 0.8 0.4 44 29 -3 -37 1 -6 0.2 0.8 49 -33 50 42 2 -6 0.4 0.9 2 -25 36 -8 2 -6 0.3 0.8 35 -40 22 -44 2 -6 0.8 0.1 27 -50 -34 -40 1 -6 0.3 0.4 45 -49 21 10 2 -6 0.5 0.6 20 -7 -34 -45 1 -6 0.3 0.9 -5 -21 35 33 2 -6 0.6 0.6 41 -11 -20 -35 1 -6 0.3 0.5 34 -43 -8 -16 1 -6 0.6 0.2 28 -36 33 -4 1 -6 0.9 0.4 49 -21 30 -11 1 -6 0.3 0.9 9 -23 28 -18 2 -6 0.3 0.9 31 20 4 -22 1 -6 0.3 0.1 31 10 -35 -44 1 -6 0.2 0.8 10 1 27 -2 2 -6 0.7 0.4 29 -29 5 -33 1 -6 0.6 0.9 4 -47 40 26 2 -6 0.1 0.9 8 -26 -22 -30 1 -6 0.2 0.3 26 -29 26 24 2 -6 0.2 0.4 41 -20 -18 -30 2 -6 0.7 0.1 6 -7 6 -3 1 -6 0.7 0.1 50 17 33 31 1 -6 0.5 0.9 15 -7 22 -17 2 -6 0.2 0.2 48 30 4 -13 1 -6 0.6 0.7 37 -39 32 -39 1 -6 0.6 0.7 28 -26 49 47 2 -6 0.7 0.5 46 -11 18 -38 1 -6 0.9 0.4 45 -7 47 42 2 -6 0.2 0.6 25 21 -21 -33 1 -6 0.1 0.6 37 -44 -13 -26 2 -6 0.5 0.5 41 33 27 -20 1 -6 0.7 0.9 14 2 35 28 1 -6 0.2 0.2 2 -26 23 2 2 -6 0.6 0.1 32 10 -12 -46 1 -6 0.1 0.1 -29 -43 39 4 2 -6 0.7 0.5 42 20 -37 -39 1 -6 0.2 0.3 17 4 22 3 1 -6 0.4 0.6 -12 -30 22 11 2 -6 0.8 0.9 41 24 35 -17 1 -6 0.6 0.7 -5 -38 36 24 2 -6 0.6 0.8 -37 -38 34 15 2 -6 0.6 0.8 6 -8 -7 -15 1 -6 0.8 0.5 39 33 45 -33 1 -6 0.8 0.7 22 -6 4 -14 1 -6 0.7 0.9 -19 -22 -33 -41 1 -6 0.6 0.6 39 -25 -18 -27 1 -6 0.7 0.3 -11 -24 -5 -33 1 -6 0.6 0.1 45 25 28 22 1 -6 0.3 0.9 28 -2 -2 -37 1 -6 0.9 0.8 10 -44 20 9 1 -6 0.2 0.9 50 -16 13 -21 2 -6 0.8 0.4 -14 -33 -42 -44 2 -6 0.8 0.2 31 -10 0 -7 1 -6 0.9 0.9 11 -38 36 -39 2 -6 0.3 0.5 15 -4 -16 -20 1 -6 0.3 0.5 16 -43 34 -43 2 -6 0.9 0.6 -30 -41 -31 -36 1 -6 0.1 0.2 -18 -49 18 5 2 -6 0.1 0.1 25 -19 35 -49 1 -6 0.2 0.1 14 4 -7 -30 1 -6 0.5 0.9 42 21 29 -19 1 -6 0.2 0.8 17 1 -1 -16 1 -6 0.7 0.3 -17 -31 44 -37 2 -6 0.9 0.4 0 -10 20 13 2 -6 0.4 0.9 16 -4 22 -44 2 -6 0.1 0.9 25 -40 -17 -33 2 -6 0.3 0.5 -7 -48 21 -24 2 -6 0.7 0.1 -16 -31 34 7 2 -6 0.9 0.2 22 -12 28 24 1 -6 0.1 0.5 28 3 27 -50 2 -6 0.9 0.7 -13 -38 -17 -18 2 -6 0.9 0.3 3 -41 37 -1 2 -6 0.6 0.2 -16 -36 -2 -37 2 -6 0.9 0.5 30 26 41 -40 1 -6 0.3 0.7 24 -18 -3 -24 1 -6 0.1 0.2 -16 -41 39 17 2 -6 0.7 0.7 40 36 -37 -46 1 -6 0.2 0.4 27 -46 41 8 2 -6 0.7 0.3 27 2 1 -16 1 -6 0.1 0.9 49 -36 10 -21 2 -6 0.5 0.2 22 11 30 -8 1 -6 0.8 0.8 0 -7 24 -3 2 -6 0.5 0.5 -7 -11 36 6 2 -6 0.4 0.4 39 14 -27 -44 1 -6 0.3 0.2 36 -14 21 6 2 -6 0.7 0.8 4 -25 46 17 2 -6 0.1 0.8 20 -10 25 -26 2 -6 0.6 0.6 12 0 11 -46 1 -6 0.1 0.5 43 23 32 -28 1 -6 0.6 0.9 -3 -18 47 38 2 -6 0.9 0.8 -18 -36 -14 -42 2 -6 0.8 0.4 10 -3 -9 -36 1 -6 0.2 0.8 3 -31 29 14 2 -6 0.6 0.1 11 -23 -25 -34 1 -6 0.7 0.5 39 -9 46 19 2 -6 0.8 0.6 44 -50 13 6 1 -6 0.2 0.9 27 -46 -8 -48 1 -6 0.2 0.7 50 -32 -28 -43 1 -6 0.8 0.5 32 -41 -1 -18 1 -6 0.6 0.5 -27 -33 38 -38 2 -6 0.8 0.8 31 -23 -35 -37 1 -6 0.5 0.9 -37 -38 10 -24 2 -6 0.3 0.6 11 10 33 -20 1 -6 0.3 0.5 -21 -22 11 -17 2 -6 0.7 0.7 29 12 46 -34 1 -6 0.7 0.5 17 -30 26 -3 1 -6 0.5 0.2 8 -15 1 -4 1 -6 0.5 0.7 35 20 36 25 2 -6 0.1 0.4 -25 -44 -8 -28 2 -6 0.8 0.9 46 -17 -6 -12 1 -6 0.3 0.4 -5 -40 -24 -36 1 -6 0.2 0.7 27 13 50 -50 2 -6 0.9 0.8 49 39 48 -2 2 -6 0.6 0.2 -15 -41 -7 -9 1 -6 0.5 0.2 49 22 38 -50 1 -6 0.3 0.7 35 17 48 12 2 -6 0.3 0.9 15 -44 35 -11 2 -6 0.1 0.9 27 7 46 25 2 -6 0.8 0.4 38 -46 15 -32 1 -6 0.6 0.7 27 -10 44 21 2 -6 0.2 0.7 10 -49 41 -10 2 -6 0.7 0.3 29 19 28 -28 1 -6 0.3 0.6 49 -6 26 -14 2 -6 0.7 0.4 20 -29 -5 -17 1 -6 0.8 0.9 20 -15 5 -1 1 -6 0.9 0.6 -19 -45 32 -6 2 -6 0.5 0.6 2 -29 -15 -48 1 -6 0.1 0.1 45 -40 7 -4 2 -6 0.3 0.1 -8 -39 30 -38 2 -6 0.8 0.8 37 1 -28 -32 1 -6 0.7 0.7 0 -26 35 -14 2 -6 0.7 0.1 -6 -19 10 -45 1 -6 0.5 0.7 42 -44 11 -17 2 -6 0.2 0.7 40 -33 8 -18 2 -6 0.8 0.2 25 -23 48 43 2 -6 0.7 0.2 10 1 20 5 1 -6 0.4 0.5 22 -39 -8 -19 2 -6 0.2 0.5 42 -45 17 -17 2 -6 0.9 0.5 -20 -39 48 -34 2 -6 0.8 0.7 -25 -26 11 -26 2 -6 0.5 0.3 20 -15 36 -16 1 -6 0.8 0.1 -18 -33 15 -1 2 -6 0.3 0.4 -3 -21 30 -8 2 -6 0.9 0.8 -4 -25 49 -24 2 -6 0.1 0.8 47 -13 33 -45 2 -6 0.6 0.6 -34 -37 2 -6 2 -6 0.8 0.3 0 -5 29 -1 2 -6 0.3 0.7 30 -25 45 -8 2 -6 0.7 0.1 14 -24 1 -3 1 -6 0.1 0.7 15 -10 23 -11 2 -6 0.1 0.7 -6 -48 -9 -30 2 -6 0.9 0.5 38 -8 -25 -39 1 -6 0.7 0.9 -32 -39 2 0 2 -6 0.2 0.9 -1 -45 14 5 2 -6 0.7 0.7 -3 -36 41 -8 2 -6 0.3 0.9 -2 -26 47 -11 2 -6 0.8 0.6 47 8 45 12 2 -6 0.9 0.1 -39 -44 43 37 2 -6 0.3 0.7 -3 -28 -18 -33 1 -6 0.3 0.5 23 13 14 -18 1 -6 0.2 0.4 11 6 35 -21 2 -6 0.3 0.2 50 19 -17 -47 1 -6 0.1 0.3 49 37 16 8 1 -6 0.5 0.1 -7 -23 -7 -34 1 -6 0.3 0.6 25 -11 50 35 2 -6 0.2 0.3 44 41 37 -23 1 -6 0.8 0.6 39 -18 2 -45 1 -6 0.3 0.5 18 -10 40 -15 2 -6 0.6 0.8 23 -37 2 1 1 -6 0.5 0.2 17 -50 1 -22 1 -6 0.9 0.2 20 8 -1 -12 1 -6 0.8 0.8 40 -32 44 1 2 -6 0.7 0.4 36 -50 -17 -22 1 -6 0.8 0.8 -27 -41 30 29 2 -6 0.5 0.9 19 11 -3 -34 1 -6 0.6 0.3 2 -38 -11 -20 1 -6 0.8 0.5 -3 -30 23 -6 2 -6 0.7 0.9 -4 -19 33 -8 2 -6 0.3 0.6 4 2 26 -37 1 -6 0.3 0.2 -33 -36 3 -13 2 -6 0.9 0.3 -31 -32 -15 -21 2 -6 0.9 0.8 35 -29 27 -42 1 -6 0.2 0.7 44 34 -3 -26 1 -6 0.3 0.4 -27 -39 -33 -50 1 -6 0.1 0.6 20 13 14 9 1 -6 0.8 0.4 40 -19 -1 -10 1 -6 0.3 0.8 44 5 -6 -37 1 -6 0.1 0.9 42 37 33 -46 2 -6 0.2 0.9 7 -25 -15 -24 2 -6 0.9 0.1 39 -46 -3 -4 1 -6 0.5 0.9 -21 -24 -27 -36 1 -6 0.7 0.3 37 -22 -12 -31 1 -6 0.9 0.9 4 -50 43 -13 2 -6 0.2 0.7 40 1 50 23 2 -6 0.7 0.9 6 -11 -5 -47 1 -6 0.1 0.4 -26 -28 48 -35 2 -7 0.5 0.3 37 7 -2 -6 1 -7 0.7 0.6 -10 -35 16 -38 2 -7 0.5 0.4 40 14 48 -7 1 -7 0.6 0.9 20 -27 22 -1 2 -7 0.3 0.5 46 -2 -5 -30 1 -7 0.8 0.3 -1 -6 17 -18 2 -7 0.5 0.5 39 23 -19 -47 1 -7 0.3 0.2 40 -16 -17 -40 1 -7 0.5 0.1 6 -24 50 -3 2 -7 0.7 0.1 35 6 -5 -42 1 -7 0.3 0.3 41 19 49 -44 1 -7 0.4 0.7 -16 -48 24 -24 2 -7 0.5 0.5 18 -9 37 15 2 -7 0.7 0.7 42 21 25 -50 1 -7 0.8 0.9 16 -35 -3 -15 1 -7 0.2 0.7 45 41 32 15 1 -7 0.3 0.4 39 -4 45 38 2 -7 0.8 0.4 -19 -34 15 -39 2 -7 0.8 0.7 27 -21 -15 -30 1 -7 0.6 0.6 38 -32 47 -49 2 -7 0.5 0.1 6 -38 7 -43 1 -7 0.8 0.9 36 18 44 24 2 -7 0.7 0.3 36 -16 37 -29 1 -7 0.6 0.9 33 27 45 -32 1 -7 0.1 0.4 -19 -41 13 -13 2 -7 0.9 0.3 8 -25 32 -20 1 -7 0.9 0.2 8 -39 44 -38 1 -7 0.4 0.3 20 -46 10 -37 2 -7 0.9 0.2 39 24 17 0 1 -7 0.7 0.1 39 -22 3 -40 1 -7 0.7 0.5 -22 -25 49 21 2 -7 0.8 0.9 -2 -41 -1 -26 2 -7 0.3 0.9 40 -25 17 -28 2 -7 0.9 0.4 43 23 21 -31 1 -7 0.3 0.2 -8 -34 2 -44 2 -7 0.1 0.6 27 -48 39 21 2 -7 0.8 0.9 6 -7 -42 -46 1 -7 0.8 0.8 -10 -35 45 14 2 -7 0.2 0.7 27 11 36 -42 2 -7 0.1 0.8 14 -19 -2 -20 2 -7 0.9 0.3 33 27 19 7 1 -7 0.2 0.8 15 -3 38 -7 2 -7 0.3 0.5 48 4 23 -5 1 -7 0.7 0.8 13 10 10 -23 1 -7 0.4 0.9 33 0 32 14 2 -7 0.2 0.6 43 -26 50 -7 2 -7 0.9 0.2 43 35 -24 -44 1 -7 0.8 0.4 30 21 20 -12 1 -7 0.5 0.2 6 2 11 -3 1 -7 0.3 0.8 2 -17 19 -50 2 -7 0.5 0.3 -28 -43 -17 -35 2 -7 0.9 0.7 36 13 27 10 1 -7 0.4 0.8 44 -4 27 -42 2 -7 0.9 0.3 2 -9 -23 -27 1 -7 0.9 0.5 18 -2 38 -50 1 -7 0.1 0.2 48 47 16 -18 1 -7 0.5 0.4 6 -2 26 15 2 -7 0.4 0.1 -16 -48 -3 -23 2 -7 0.1 0.9 12 -41 17 -25 2 -7 0.4 0.1 -1 -15 33 1 2 -7 0.2 0.1 -14 -39 34 -30 2 -7 0.3 0.5 -3 -11 29 -41 2 -7 0.9 0.3 41 17 -11 -13 1 -7 0.3 0.6 21 15 49 -44 1 -7 0.3 0.7 46 -1 42 39 2 -7 0.3 0.1 -11 -37 13 -49 2 -7 0.4 0.6 33 -9 41 -50 2 -7 0.5 0.1 16 -10 40 -43 1 -7 0.7 0.6 20 10 -6 -47 1 -7 0.7 0.6 21 -18 42 40 2 -7 0.8 0.8 -12 -28 -12 -43 1 -7 0.7 0.5 28 -48 0 -41 1 -7 0.3 0.1 36 -43 29 -19 2 -7 0.9 0.4 31 -15 12 -45 1 -7 0.8 0.3 -8 -25 48 47 2 -7 0.4 0.5 32 -27 41 23 2 -7 0.8 0.9 45 -36 20 8 1 -7 0.6 0.3 39 30 44 16 1 -7 0.7 0.6 4 -39 33 -48 2 -7 0.9 0.3 50 28 38 -27 1 -7 0.9 0.2 12 -25 -1 -26 1 -7 0.1 0.2 28 -6 4 -2 2 -7 0.7 0.5 29 -37 40 14 2 -7 0.6 0.9 16 -43 10 -3 2 -7 0.2 0.8 21 -24 31 -24 2 -7 0.3 0.9 50 -9 -3 -24 1 -7 0.1 0.2 4 -12 44 5 2 -7 0.9 0.8 47 -42 10 -16 1 -7 0.1 0.7 29 -24 -27 -43 1 -7 0.8 0.6 45 37 37 -2 1 -7 0.8 0.5 37 -9 49 -36 1 -7 0.2 0.6 10 -8 -45 -47 1 -7 0.9 0.4 37 36 12 -49 1 -7 0.9 0.3 17 -14 -14 -16 1 -7 0.8 0.1 33 30 -24 -46 1 -7 0.7 0.5 32 -34 15 -16 1 -7 0.8 0.4 30 19 43 -18 1 -7 0.8 0.3 38 -15 -7 -11 1 -7 0.1 0.4 35 -39 -17 -25 2 -7 0.8 0.5 34 25 -10 -11 1 -7 0.4 0.6 -1 -26 34 -34 2 -7 0.7 0.7 30 -21 4 -11 1 -7 0.5 0.1 13 -33 3 -45 1 -7 0.6 0.2 34 -23 35 -24 1 -7 0.2 0.3 24 -14 -6 -47 1 -7 0.5 0.2 28 -34 50 -19 1 -7 0.7 0.7 35 26 -7 -16 1 -7 0.5 0.5 34 13 17 -10 1 -7 0.1 0.3 -30 -49 10 -44 2 -7 0.9 0.7 23 1 44 22 2 -7 0.6 0.9 50 6 -29 -36 1 -7 0.1 0.9 24 -5 32 5 2 -7 0.4 0.3 31 3 1 -50 1 -7 0.4 0.3 0 -37 -16 -32 1 -7 0.5 0.8 34 4 31 -31 2 -7 0.2 0.1 50 -45 50 -49 1 -7 0.2 0.1 22 14 33 -36 1 -7 0.2 0.8 11 -26 -3 -25 2 -7 0.8 0.8 23 -1 32 5 2 -7 0.7 0.4 -24 -47 28 -32 2 -7 0.9 0.4 -16 -18 -1 -33 2 -7 0.6 0.2 -14 -42 36 -3 2 -7 0.7 0.4 49 19 -12 -37 1 -7 0.7 0.6 -3 -22 24 5 2 -7 0.8 0.6 15 -16 22 -37 1 -7 0.4 0.3 40 22 32 26 2 -7 0.4 0.8 20 -24 -33 -37 1 -7 0.5 0.5 48 -35 -17 -50 1 -7 0.1 0.5 8 0 44 23 2 -7 0.8 0.8 14 -33 48 -30 2 -7 0.4 0.7 31 -35 -1 -50 1 -7 0.8 0.9 -8 -49 -14 -24 1 -7 0.9 0.5 42 -11 -22 -41 1 -7 0.7 0.6 29 -34 25 12 2 -7 0.2 0.4 35 -24 37 15 2 -7 0.6 0.3 48 12 21 18 1 -7 0.2 0.9 -22 -34 12 -11 2 -7 0.3 0.8 48 -10 7 -23 2 -7 0.4 0.2 2 -19 38 -8 2 -7 0.7 0.9 24 -8 41 -26 2 -7 0.7 0.3 -3 -12 -3 -5 2 -7 0.6 0.2 40 9 40 -38 1 -7 0.7 0.2 3 -15 46 12 2 -7 0.4 0.8 4 -27 46 -40 2 -7 0.8 0.4 24 -7 -24 -29 1 -7 0.8 0.3 32 -1 41 -29 1 -7 0.9 0.4 -3 -34 35 3 2 -7 0.9 0.3 40 -27 29 -11 1 -7 0.2 0.9 -3 -35 -13 -49 2 -7 0.2 0.8 37 28 25 -13 1 -7 0.1 0.2 -15 -36 50 25 2 -7 0.3 0.5 -7 -30 4 -25 2 -7 0.6 0.7 21 10 13 8 1 -7 0.3 0.4 2 -22 41 -31 2 -7 0.5 0.1 0 -47 29 -45 1 -7 0.3 0.5 23 6 14 -25 1 -7 0.2 0.3 -24 -27 6 -5 2 -7 0.2 0.6 14 -1 43 -8 2 -7 0.6 0.2 -40 -41 30 17 2 -7 0.7 0.7 7 -50 9 -34 1 -7 0.9 0.4 37 -41 15 -1 1 -7 0.9 0.8 26 19 22 -45 1 -7 0.1 0.2 10 -31 30 13 2 -7 0.1 0.6 -31 -48 1 -36 2 -7 0.3 0.4 32 1 -7 -8 1 -7 0.3 0.1 -6 -27 24 10 2 -7 0.7 0.5 5 2 48 -15 2 -7 0.6 0.6 13 -23 14 -9 2 -7 0.2 0.9 13 -41 31 2 2 -7 0.2 0.5 34 22 10 -49 1 -7 0.2 0.8 29 1 3 -35 1 -7 0.4 0.6 26 -4 49 -23 2 -7 0.3 0.3 35 -40 -33 -34 1 -7 0.7 0.5 38 -41 3 -7 1 -7 0.7 0.1 22 -49 44 30 2 -7 0.3 0.4 33 -47 31 30 2 -7 0.3 0.2 28 -36 10 -44 1 -7 0.9 0.1 -23 -29 7 -26 1 -7 0.1 0.7 -20 -32 19 17 2 -7 0.3 0.7 8 -41 4 -11 2 -7 0.7 0.2 42 -50 40 5 2 -7 0.8 0.5 -7 -38 10 3 2 -7 0.9 0.8 39 -29 8 -29 1 -7 0.1 0.3 6 -37 20 -3 2 -7 0.7 0.7 21 -22 36 -42 2 -7 0.8 0.2 42 -16 29 16 1 -7 0.5 0.6 36 -43 35 6 2 -7 0.6 0.1 14 2 50 18 2 -7 0.1 0.2 45 -22 38 -49 2 -7 0.4 0.8 -23 -37 7 -32 2 -7 0.5 0.7 12 -25 20 -32 2 -7 0.4 0.7 10 -5 28 10 2 -7 0.3 0.9 22 19 32 23 2 -7 0.7 0.9 -3 -34 50 38 2 -7 0.7 0.1 -16 -47 19 11 2 -7 0.5 0.4 -21 -31 -12 -50 1 -7 0.4 0.1 39 30 37 -15 1 -7 0.6 0.3 -14 -31 30 -17 2 -7 0.9 0.4 25 14 44 -16 1 -7 0.9 0.6 -4 -17 21 -28 2 -7 0.5 0.9 1 -20 14 -3 2 -7 0.6 0.7 46 -4 -21 -27 1 -7 0.5 0.9 48 24 0 -37 1 -7 0.2 0.1 36 35 37 -44 1 -7 0.7 0.5 32 22 17 7 1 -7 0.9 0.8 38 -49 42 36 2 -7 0.9 0.9 -8 -35 20 -33 2 -7 0.2 0.9 23 -33 47 -12 2 -7 0.5 0.7 41 30 -1 -36 1 -7 0.1 0.7 48 -24 29 9 2 -7 0.1 0.1 -15 -21 10 -47 1 -7 0.3 0.5 38 35 -17 -32 1 -7 0.6 0.6 13 -13 19 -49 1 -7 0.5 0.7 7 -15 24 -20 2 -7 0.5 0.1 15 -39 22 21 2 -7 0.4 0.3 49 -3 41 -5 1 -7 0.3 0.4 23 -27 -2 -33 1 -7 0.5 0.8 15 -16 33 -34 2 -7 0.9 0.9 -21 -26 13 -24 2 -7 0.6 0.4 48 16 -6 -21 1 -7 0.3 0.1 45 -25 -16 -44 1 -7 0.5 0.9 -15 -43 29 -2 2 -7 0.7 0.5 42 -45 -12 -34 1 -7 0.8 0.5 39 6 32 24 2 -7 0.7 0.3 -18 -49 -20 -32 2 -7 0.5 0.6 -21 -32 10 7 2 -7 0.3 0.4 8 -26 -30 -44 1 -7 0.8 0.3 -6 -25 32 -14 2 -7 0.3 0.9 0 -27 13 -22 2 -7 0.5 0.1 40 39 41 -33 1 -7 0.6 0.8 34 0 43 -49 2 -7 0.7 0.5 -11 -19 9 -36 2 -7 0.8 0.2 46 -45 27 20 1 -7 0.2 0.2 14 -19 29 20 2 -7 0.2 0.9 4 -35 -1 -18 2 -7 0.5 0.1 -45 -49 23 -44 1 -7 0.6 0.4 -36 -40 39 10 2 -7 0.4 0.8 18 -10 -25 -47 1 -7 0.8 0.9 45 2 30 13 1 -7 0.7 0.5 -28 -44 -31 -45 2 -7 0.3 0.9 30 10 -25 -27 1 -7 0.3 0.5 33 -46 18 9 2 -7 0.2 0.7 35 3 49 -40 2 -7 0.1 0.4 28 -11 26 -49 1 -7 0.3 0.2 -32 -33 35 24 2 -7 0.5 0.3 25 -21 45 18 2 -7 0.5 0.2 15 -6 0 -25 1 -7 0.3 0.1 35 -36 -4 -14 2 -7 0.8 0.6 42 4 -6 -36 1 -7 0.6 0.8 29 -21 36 -15 2 -7 0.4 0.8 49 21 30 20 1 -7 0.8 0.7 8 -23 15 -43 2 -7 0.7 0.6 33 14 20 9 1 -7 0.5 0.5 14 -18 27 23 2 -7 0.9 0.4 -25 -49 15 -16 2 -7 0.7 0.5 19 3 35 -30 1 -7 0.2 0.8 48 0 47 -40 2 -7 0.1 0.1 42 -47 -14 -43 2 -7 0.3 0.4 17 -15 45 -45 2 -7 0.4 0.4 36 -33 7 -19 1 -7 0.2 0.2 29 -18 -5 -10 2 -7 0.2 0.7 21 -39 19 3 2 -7 0.3 0.7 25 1 -8 -23 1 -7 0.3 0.1 5 -10 11 1 2 -7 0.3 0.3 17 6 41 -6 2 -7 0.8 0.5 30 -23 36 31 2 -7 0.9 0.8 -22 -31 31 -23 2 -7 0.1 0.4 0 -43 39 14 2 -7 0.1 0.5 21 -13 1 -19 2 -7 0.2 0.6 23 -35 -17 -42 2 -7 0.3 0.8 35 22 -12 -13 1 -7 0.1 0.1 25 -28 37 -29 2 -7 0.1 0.4 12 -8 48 -45 1 -7 0.5 0.7 -8 -35 44 -29 2 -7 0.7 0.1 46 -45 6 -46 1 -7 0.9 0.6 17 -42 11 -13 1 -7 0.4 0.4 26 -31 49 14 2 -7 0.9 0.2 -23 -38 18 -43 2 -7 0.2 0.2 21 -16 -2 -3 1 -7 0.8 0.6 2 -25 45 17 2 -7 0.6 0.9 33 -45 -6 -10 1 -7 0.9 0.5 44 21 20 -34 1 -7 0.7 0.1 3 2 -30 -37 1 -7 0.5 0.2 29 -25 44 -16 1 -7 0.7 0.2 13 1 34 33 2 -7 0.4 0.1 3 -31 32 -8 2 -7 0.1 0.7 3 -28 31 4 2 -7 0.2 0.9 45 25 -7 -10 1 -7 0.9 0.5 45 5 38 -32 1 -7 0.1 0.5 -30 -41 -26 -42 1 -7 0.1 0.2 35 -33 1 -37 2 -7 0.6 0.6 27 -11 50 35 2 -7 0.8 0.8 15 -6 31 -13 2 -7 0.5 0.2 38 32 30 -17 1 -7 0.9 0.7 29 -35 49 20 2 -7 0.4 0.2 23 -47 -20 -37 1 -7 0.7 0.2 40 10 10 0 1 -7 0.8 0.3 -13 -48 47 35 2 -7 0.5 0.1 -4 -31 29 -19 1 -7 0.3 0.4 12 -44 28 1 2 -8 0.3 0.4 49 10 22 -3 1 -8 0.3 0.3 36 -15 15 0 2 -8 0.1 0.5 23 -24 -6 -40 1 -8 0.4 0.7 19 -40 35 16 2 -8 0.5 0.8 6 -11 18 -39 1 -8 0.1 0.5 26 -22 26 -44 2 -8 0.6 0.8 35 -38 50 7 2 -8 0.4 0.1 11 5 6 -7 1 -8 0.8 0.5 11 4 -11 -30 1 -8 0.2 0.9 12 -23 33 30 2 -8 0.3 0.1 45 -6 22 21 2 -8 0.4 0.5 5 -46 43 -42 2 -8 0.7 0.2 -15 -23 23 10 2 -8 0.4 0.4 0 -27 45 -4 2 -8 0.6 0.2 -8 -15 24 -45 2 -8 0.7 0.5 37 -14 8 -8 1 -8 0.9 0.6 25 -16 14 -12 1 -8 0.5 0.3 -25 -27 5 -43 2 -8 0.2 0.8 14 4 32 30 2 -8 0.6 0.2 21 -42 44 -49 1 -8 0.3 0.5 29 22 49 15 2 -8 0.7 0.2 -15 -29 22 -2 2 -8 0.6 0.5 39 -36 34 20 2 -8 0.5 0.3 32 -30 13 1 2 -8 0.1 0.7 43 6 -15 -28 1 -8 0.3 0.6 46 -41 4 -18 1 -8 0.1 0.1 9 -21 7 -9 2 -8 0.5 0.2 43 11 32 -46 1 -8 0.1 0.3 24 0 -42 -45 1 -8 0.3 0.4 40 26 47 -10 1 -8 0.2 0.8 10 -32 -30 -44 1 -8 0.3 0.6 47 0 19 16 2 -8 0.9 0.1 -34 -48 25 17 2 -8 0.1 0.5 4 -49 34 15 2 -8 0.6 0.5 4 -11 -10 -37 2 -8 0.7 0.2 -13 -42 -39 -45 1 -8 0.7 0.6 1 -35 10 -27 2 -8 0.2 0.3 -25 -40 40 -44 2 -8 0.9 0.9 8 -14 -2 -13 1 -8 0.4 0.1 49 19 20 -32 1 -8 0.9 0.8 43 -10 37 34 1 -8 0.7 0.6 27 -44 -4 -14 1 -8 0.6 0.4 12 -44 -1 -29 1 -8 0.5 0.5 44 41 -15 -22 1 -8 0.3 0.8 9 -34 47 -23 2 -8 0.5 0.8 14 8 3 -7 1 -8 0.8 0.9 5 -2 29 19 2 -8 0.7 0.8 41 12 -2 -9 1 -8 0.8 0.6 -4 -17 13 -32 2 -8 0.4 0.2 39 36 41 -38 1 -8 0.1 0.3 46 -22 34 7 2 -8 0.3 0.7 24 -41 3 -32 2 -8 0.2 0.6 50 -17 24 -33 2 -8 0.4 0.7 26 14 37 -21 1 -8 0.3 0.6 32 13 16 -37 1 -8 0.2 0.8 43 14 24 -46 1 -8 0.1 0.5 7 -42 35 -17 2 -8 0.1 0.6 29 2 -29 -44 1 -8 0.3 0.2 12 -33 14 -21 1 -8 0.4 0.3 31 8 31 -33 1 -8 0.7 0.9 13 -31 17 5 2 -8 0.8 0.2 31 20 6 4 1 -8 0.9 0.8 -1 -27 31 -7 2 -8 0.5 0.3 23 -29 27 -32 1 -8 0.2 0.8 -25 -46 -18 -38 2 -8 0.6 0.7 -3 -29 44 15 2 -8 0.4 0.5 42 -28 9 -16 1 -8 0.6 0.3 15 -39 49 -32 1 -8 0.5 0.9 43 -2 48 22 2 -8 0.1 0.4 21 -43 -19 -36 1 -8 0.1 0.2 7 -31 42 -1 2 -8 0.2 0.5 47 -7 31 30 2 -8 0.4 0.4 31 -9 19 -7 1 -8 0.4 0.9 32 12 -15 -36 1 -8 0.1 0.1 -5 -28 30 -49 1 -8 0.1 0.6 -28 -37 44 -41 2 -8 0.4 0.5 -46 -50 -26 -47 1 -8 0.1 0.5 28 -19 -23 -39 1 -8 0.3 0.8 16 -22 -4 -10 1 -8 0.7 0.1 26 21 3 -2 1 -8 0.2 0.2 38 -40 -16 -17 1 -8 0.3 0.5 -5 -19 32 -3 2 -8 0.7 0.7 34 -9 42 -6 2 -8 0.6 0.8 24 13 -16 -44 1 -8 0.8 0.4 14 8 4 -37 1 -8 0.6 0.5 23 -35 24 -7 2 -8 0.5 0.9 2 -17 -3 -29 1 -8 0.5 0.2 2 -35 19 -23 2 -8 0.2 0.2 -3 -42 -12 -36 2 -8 0.8 0.6 50 2 23 22 1 -8 0.3 0.9 3 -8 -1 -2 1 -8 0.7 0.6 -14 -33 41 -6 2 -8 0.1 0.1 44 -36 -2 -27 2 -8 0.4 0.8 17 -35 -10 -50 1 -8 0.9 0.3 37 -6 42 1 1 -8 0.2 0.5 3 -20 23 10 2 -8 0.4 0.6 6 -40 20 -12 2 -8 0.7 0.3 48 30 -21 -27 1 -8 0.9 0.9 22 -48 8 -9 1 -8 0.5 0.7 40 -48 9 -37 2 -8 0.5 0.2 -42 -43 13 5 2 -8 0.6 0.3 14 -46 24 -4 1 -8 0.5 0.1 43 -48 18 4 2 -8 0.5 0.8 -22 -45 42 -16 2 -8 0.4 0.1 32 -6 12 -29 1 -8 0.2 0.6 -4 -33 -2 -35 1 -8 0.4 0.4 23 -4 50 29 2 -8 0.9 0.4 46 -41 0 -33 1 -8 0.2 0.5 44 -7 -3 -34 1 -8 0.7 0.7 41 23 22 12 1 -8 0.1 0.5 11 -4 14 5 2 -8 0.7 0.9 34 31 32 14 1 -8 0.4 0.1 -7 -28 12 -2 2 -8 0.4 0.5 10 -35 23 -39 2 -8 0.1 0.2 -18 -40 20 -20 2 -8 0.1 0.6 27 -36 12 -4 2 -8 0.9 0.3 40 15 -2 -31 1 -8 0.9 0.8 -11 -35 25 -23 2 -8 0.4 0.2 17 -31 38 -15 2 -8 0.9 0.6 2 -22 26 -24 2 -8 0.2 0.5 -22 -43 19 -47 2 -8 0.8 0.5 43 39 27 -28 1 -8 0.1 0.9 -4 -41 -9 -28 2 -8 0.7 0.4 42 -42 47 -3 1 -8 0.6 0.4 6 3 38 29 2 -8 0.2 0.9 24 11 22 -32 2 -8 0.3 0.8 45 0 49 27 2 -8 0.9 0.2 20 -17 34 10 2 -8 0.4 0.1 40 -39 24 -4 1 -8 0.6 0.4 39 -24 49 -34 1 -8 0.3 0.9 5 -8 -16 -40 1 -8 0.9 0.6 -13 -38 33 -23 2 -8 0.2 0.3 48 -8 -3 -35 1 -8 0.8 0.4 5 -44 39 31 2 -8 0.4 0.3 -1 -10 48 -18 2 -8 0.3 0.1 28 9 12 5 1 -8 0.7 0.6 33 -34 5 -2 1 -8 0.7 0.4 5 -49 46 35 2 -8 0.7 0.6 49 -44 13 -8 1 -8 0.3 0.8 21 -37 24 -12 2 -8 0.4 0.5 9 -1 -14 -21 1 -8 0.7 0.4 38 -49 34 -4 1 -8 0.5 0.3 26 -48 32 6 2 -8 0.7 0.3 -4 -14 49 -17 2 -8 0.9 0.1 10 -41 -11 -15 1 -8 0.6 0.8 24 -23 4 -21 2 -8 0.9 0.1 31 -39 4 -8 1 -8 0.1 0.8 -31 -39 37 -36 2 -8 0.4 0.7 20 9 24 -23 1 -8 0.9 0.8 42 14 9 -27 1 -8 0.1 0.3 -14 -32 32 -48 1 -8 0.9 0.8 7 -30 14 2 2 -8 0.3 0.1 11 -7 38 -29 1 -8 0.2 0.4 33 -48 16 8 2 -8 0.9 0.5 33 11 30 -41 1 -8 0.3 0.5 33 23 -29 -37 1 -8 0.2 0.2 41 -2 3 -34 1 -8 0.9 0.7 33 -2 11 0 1 -8 0.9 0.8 49 -33 -15 -30 1 -8 0.5 0.4 34 33 32 15 1 -8 0.2 0.5 32 11 13 -47 1 -8 0.5 0.8 26 -35 43 41 2 -8 0.1 0.4 21 -45 29 7 2 -8 0.7 0.2 15 -2 27 0 1 -8 0.9 0.2 50 -7 46 -43 1 -8 0.7 0.5 20 -24 43 7 2 -8 0.5 0.7 35 -50 14 -37 2 -8 0.9 0.5 10 8 -13 -50 1 -8 0.1 0.7 1 -12 -14 -15 1 -8 0.3 0.5 -6 -42 24 -17 2 -8 0.8 0.7 17 9 -24 -28 1 -8 0.3 0.8 -46 -48 41 -21 2 -8 0.9 0.8 -28 -42 40 -18 2 -8 0.1 0.2 49 17 28 -28 1 -8 0.8 0.5 50 -19 -24 -33 1 -8 0.6 0.9 48 33 47 -11 1 -8 0.9 0.3 49 -42 -18 -41 1 -8 0.3 0.2 -17 -45 -2 -37 1 -8 0.4 0.1 32 -41 22 5 2 -8 0.7 0.5 18 -2 13 -10 2 -8 0.2 0.3 37 23 25 -25 1 -8 0.3 0.9 -13 -27 6 -49 2 -8 0.7 0.1 1 -28 -1 -20 1 -8 0.8 0.7 29 22 23 -16 1 -8 0.1 0.7 43 -47 22 -45 2 -8 0.4 0.8 31 -5 47 8 2 -8 0.3 0.1 33 -17 4 -14 1 -8 0.4 0.2 -8 -28 -42 -46 1 -8 0.9 0.2 48 -4 49 -35 1 -8 0.8 0.6 -11 -25 43 -35 2 -8 0.2 0.5 38 -48 -14 -30 1 -8 0.6 0.4 34 15 35 -6 1 -8 0.5 0.2 7 -3 -19 -25 1 -8 0.9 0.4 1 -1 25 -24 2 -8 0.8 0.1 12 -9 23 -45 1 -8 0.6 0.5 -14 -24 2 -18 2 -8 0.2 0.3 35 -11 35 8 1 -8 0.2 0.3 30 -9 38 13 2 -8 0.4 0.3 -11 -22 29 -20 2 -8 0.9 0.7 18 -20 24 -13 1 -8 0.1 0.1 50 -37 26 4 2 -8 0.4 0.8 32 -36 0 -46 1 -8 0.8 0.1 43 41 26 -13 1 -8 0.7 0.2 27 -30 49 19 2 -8 0.5 0.3 -16 -29 -13 -14 2 -8 0.7 0.5 -22 -25 39 37 2 -8 0.2 0.4 25 -16 1 -40 1 -8 0.4 0.2 32 -18 30 22 2 -8 0.9 0.6 -8 -24 0 -2 2 -8 0.2 0.8 10 -1 11 -18 2 -8 0.7 0.7 5 0 47 -24 2 -8 0.1 0.8 -30 -36 34 27 2 -8 0.8 0.5 -19 -29 16 -29 2 -8 0.2 0.4 5 -18 -5 -28 1 -8 0.1 0.1 34 9 29 -46 1 -8 0.4 0.9 14 -6 50 4 2 -8 0.3 0.7 38 -45 -28 -32 1 -8 0.3 0.5 50 -21 32 -37 2 -8 0.3 0.4 30 -44 47 -43 2 -8 0.4 0.4 49 3 36 -11 1 -8 0.5 0.7 -33 -36 37 10 2 -8 0.8 0.9 -17 -36 48 -17 2 -8 0.6 0.5 26 -33 44 11 2 -8 0.2 0.6 4 -6 20 16 2 -8 0.7 0.6 34 9 -14 -28 1 -8 0.3 0.2 4 -3 29 -35 2 -8 0.4 0.6 39 -14 4 -49 1 -8 0.7 0.3 -10 -16 24 -44 1 -8 0.7 0.3 41 -25 24 -34 1 -8 0.9 0.8 34 11 3 -1 1 -8 0.9 0.3 45 39 18 -11 1 -8 0.8 0.8 36 -38 22 9 1 -8 0.9 0.4 18 7 -33 -35 1 -8 0.6 0.4 38 -13 47 4 2 -8 0.5 0.6 28 -32 6 -42 1 -8 0.7 0.2 10 -48 37 -46 1 -8 0.2 0.7 37 -25 22 -17 2 -8 0.4 0.8 26 -16 44 28 2 -8 0.9 0.2 49 4 26 -43 1 -8 0.9 0.7 47 -27 11 -21 1 -8 0.7 0.3 30 -20 28 7 1 -8 0.9 0.4 -18 -47 14 11 2 -8 0.2 0.4 5 -44 -11 -15 1 -8 0.4 0.6 46 -1 18 -21 1 -8 0.7 0.5 16 -49 1 -30 1 -8 0.1 0.8 -5 -37 -28 -44 2 -8 0.9 0.1 -33 -41 12 -27 1 -8 0.9 0.4 -18 -48 -37 -41 1 -8 0.2 0.6 24 -6 -14 -30 1 -8 0.5 0.4 20 -19 -9 -28 1 -8 0.4 0.9 33 -33 32 30 2 -8 0.1 0.5 -6 -42 11 -15 2 -8 0.6 0.5 45 36 6 -23 1 -8 0.8 0.7 21 -33 13 -28 1 -8 0.9 0.2 42 33 -23 -38 1 -8 0.6 0.1 14 -40 22 -5 2 -8 0.4 0.2 -22 -24 -3 -31 1 -8 0.1 0.1 -31 -33 24 -23 1 -8 0.7 0.6 46 -39 33 24 2 -8 0.6 0.9 43 33 -13 -48 1 -8 0.3 0.1 44 15 22 -42 1 -8 0.9 0.5 49 46 48 6 1 -8 0.5 0.1 42 25 38 -26 1 -8 0.6 0.1 9 -49 21 -14 1 -8 0.4 0.4 46 -41 41 23 2 -8 0.4 0.8 30 -7 22 12 2 -8 0.3 0.6 42 21 35 21 1 -8 0.5 0.5 47 -27 12 -18 1 -8 0.4 0.5 44 40 24 8 2 -8 0.6 0.1 -22 -25 46 -29 2 -8 0.5 0.5 41 17 50 0 1 -8 0.9 0.1 -1 -23 -1 -27 1 -8 0.1 0.5 12 -42 -11 -42 2 -8 0.9 0.6 9 -29 48 30 2 -8 0.6 0.4 39 12 -24 -47 1 -8 0.6 0.2 -13 -22 -28 -50 2 -8 0.9 0.7 41 -35 6 -20 1 -8 0.2 0.4 48 -7 17 -46 1 -8 0.2 0.2 -8 -17 35 33 2 -8 0.3 0.8 41 -16 11 -26 2 -8 0.3 0.7 -5 -31 23 -31 2 -8 0.5 0.9 38 -7 12 2 2 -8 0.6 0.7 24 -33 7 -25 1 -8 0.8 0.4 46 13 -8 -45 1 -8 0.6 0.6 35 0 -12 -27 1 -8 0.3 0.2 -1 -36 17 -5 2 -8 0.3 0.9 -15 -50 9 -48 2 -8 0.1 0.6 22 -41 3 -18 2 -8 0.6 0.2 46 41 42 -35 1 -8 0.5 0.2 -22 -25 42 33 2 -8 0.5 0.2 47 -1 24 3 1 -8 0.4 0.1 -30 -44 18 -27 2 -8 0.8 0.4 47 35 12 -3 1 -8 0.9 0.2 46 -46 35 -20 1 -8 0.4 0.3 11 -24 16 7 2 -8 0.5 0.6 20 14 12 -14 1 -8 0.5 0.8 41 37 22 -50 1 -8 0.1 0.8 41 -43 40 -23 2 -8 0.1 0.1 -4 -32 23 15 2 -8 0.4 0.2 33 15 26 24 1 -9 0.1 0.5 -3 -20 19 -30 2 -9 0.9 0.4 23 -2 48 2 1 -9 0.7 0.5 44 31 33 20 1 -9 0.6 0.6 40 4 -5 -38 1 -9 0.2 0.9 50 -46 7 -34 1 -9 0.3 0.9 -3 -20 -32 -33 1 -9 0.8 0.1 -22 -33 45 24 2 -9 0.8 0.4 44 -10 30 15 2 -9 0.8 0.5 6 -25 50 3 2 -9 0.9 0.3 40 39 25 -43 1 -9 0.1 0.8 33 30 31 -17 2 -9 0.7 0.3 39 -35 16 -34 1 -9 0.8 0.9 26 -1 8 -27 1 -9 0.8 0.3 40 -39 49 17 1 -9 0.8 0.1 -11 -29 46 12 2 -9 0.4 0.5 9 6 -23 -42 1 -9 0.7 0.9 47 35 41 -20 1 -9 0.6 0.1 49 43 20 -32 1 -9 0.7 0.8 37 19 38 36 1 -9 0.6 0.8 24 -31 39 -36 2 -9 0.6 0.1 -14 -44 -4 -5 2 -9 0.2 0.1 30 -33 8 -1 1 -9 0.7 0.4 -7 -20 12 -10 2 -9 0.8 0.6 -34 -38 -7 -15 1 -9 0.7 0.6 8 -30 22 -30 2 -9 0.4 0.2 46 -26 -44 -46 1 -9 0.8 0.2 34 -23 34 -25 1 -9 0.3 0.9 35 -2 24 -43 2 -9 0.4 0.5 -25 -46 10 -35 2 -9 0.7 0.9 24 -8 -27 -40 1 -9 0.2 0.9 31 26 38 20 2 -9 0.7 0.3 40 4 9 -29 1 -9 0.7 0.8 48 -6 45 -49 2 -9 0.6 0.2 9 -1 31 2 1 -9 0.6 0.4 8 -8 45 -38 1 -9 0.5 0.2 -1 -31 25 -23 2 -9 0.7 0.4 49 -28 27 23 2 -9 0.3 0.1 45 -28 -36 -38 1 -9 0.5 0.8 20 14 -17 -44 1 -9 0.9 0.2 14 -17 41 39 2 -9 0.2 0.8 46 4 40 -14 1 -9 0.8 0.2 50 -12 1 -40 1 -9 0.8 0.5 -13 -34 -14 -44 1 -9 0.5 0.1 26 12 -36 -47 1 -9 0.2 0.9 47 -30 48 39 2 -9 0.7 0.4 36 11 24 12 1 -9 0.9 0.7 27 -7 41 -38 1 -9 0.3 0.4 -30 -42 -23 -31 2 -9 0.7 0.6 14 -6 -29 -43 1 -9 0.9 0.4 -13 -24 -11 -20 1 -9 0.5 0.4 32 -7 0 -15 1 -9 0.2 0.1 41 -29 28 -47 2 -9 0.7 0.6 -11 -19 3 -46 2 -9 0.8 0.9 -20 -24 49 33 2 -9 0.4 0.6 20 18 -23 -48 1 -9 0.1 0.6 38 -7 5 -2 2 -9 0.5 0.8 -26 -27 41 -15 2 -9 0.7 0.8 46 -26 45 -49 2 -9 0.1 0.4 5 -26 -19 -31 2 -9 0.7 0.6 18 -47 40 -18 2 -9 0.1 0.6 33 31 31 16 1 -9 0.7 0.7 -27 -44 -33 -45 2 -9 0.4 0.8 26 23 -15 -21 1 -9 0.1 0.5 29 18 -1 -3 1 -9 0.8 0.4 15 -14 35 -28 1 -9 0.6 0.6 33 12 34 15 2 -9 0.1 0.1 37 -17 -4 -36 1 -9 0.7 0.1 -5 -12 15 -31 1 -9 0.4 0.7 -14 -24 41 -4 2 -9 0.4 0.6 39 35 -26 -44 1 -9 0.9 0.7 46 13 -34 -35 1 -9 0.9 0.5 -17 -26 2 -22 2 -9 0.8 0.4 -2 -10 28 -37 2 -9 0.2 0.1 1 -50 30 -17 2 -9 0.5 0.8 -34 -45 43 -18 2 -9 0.1 0.3 -4 -27 39 -22 2 -9 0.2 0.8 50 -44 -3 -10 2 -9 0.5 0.7 -25 -34 24 -11 2 -9 0.4 0.7 39 37 37 -43 1 -9 0.7 0.6 -13 -46 -33 -35 1 -9 0.3 0.4 28 -14 -3 -16 1 -9 0.4 0.4 33 -4 33 2 2 -9 0.5 0.2 35 -8 45 43 2 -9 0.8 0.4 41 23 18 -48 1 -9 0.3 0.8 46 -21 33 -34 2 -9 0.3 0.4 3 -50 40 17 2 -9 0.4 0.9 8 -12 42 31 2 -9 0.9 0.7 26 16 36 -12 2 -9 0.3 0.8 -11 -44 47 -34 2 -9 0.5 0.9 20 -31 -41 -43 1 -9 0.7 0.6 -23 -42 2 -30 2 -9 0.3 0.9 40 20 -26 -49 1 -9 0.8 0.5 29 -32 10 -45 1 -9 0.3 0.8 -28 -44 16 -10 2 -9 0.1 0.4 -5 -30 -2 -24 2 -9 0.3 0.2 28 21 41 -50 1 -9 0.6 0.7 25 -31 12 -44 1 -9 0.7 0.4 17 -3 37 -12 1 -9 0.5 0.1 30 -22 33 -7 1 -9 0.9 0.2 -35 -37 36 9 2 -9 0.8 0.3 5 -46 34 22 2 -9 0.3 0.9 21 7 -29 -42 1 -9 0.8 0.2 24 -7 35 23 2 -9 0.6 0.3 44 0 29 -21 1 -9 0.9 0.4 3 -48 -23 -30 1 -9 0.3 0.1 35 1 30 -25 1 -9 0.9 0.2 20 -8 42 -24 1 -9 0.1 0.1 8 -41 -26 -38 1 -9 0.6 0.9 39 -3 -12 -28 1 -9 0.9 0.6 6 -46 18 0 1 -9 0.2 0.7 -13 -49 34 -1 2 -9 0.3 0.3 2 -12 31 21 2 -9 0.6 0.1 -18 -30 -35 -41 1 -9 0.4 0.4 48 -32 9 1 2 -9 0.2 0.9 -2 -24 41 -40 2 -9 0.2 0.4 -7 -34 26 -20 2 -9 0.2 0.2 49 -2 50 16 2 -9 0.9 0.7 47 -33 36 -13 1 -9 0.5 0.9 29 -38 17 14 2 -9 0.8 0.7 -19 -27 -26 -48 2 -9 0.8 0.9 38 17 13 1 1 -9 0.9 0.4 39 -42 35 -1 1 -9 0.5 0.5 -38 -46 -29 -42 1 -9 0.6 0.9 -11 -50 26 -6 2 -9 0.7 0.6 -4 -23 36 -50 2 -9 0.6 0.9 36 23 32 -14 2 -9 0.4 0.2 -18 -48 5 -18 2 -9 0.8 0.4 42 6 47 23 1 -9 0.1 0.8 3 -35 32 -48 2 -9 0.9 0.4 30 -23 42 35 2 -9 0.4 0.7 13 -40 42 -8 2 -9 0.2 0.8 -38 -50 -10 -27 2 -9 0.4 0.1 -10 -47 21 -4 2 -9 0.9 0.1 20 -37 47 -33 1 -9 0.8 0.9 40 -34 17 -22 2 -9 0.8 0.7 -3 -6 -21 -30 1 -9 0.9 0.8 1 -32 24 -41 2 -9 0.4 0.8 43 -31 46 27 2 -9 0.1 0.7 18 -22 13 -19 2 -9 0.4 0.3 20 -50 49 46 2 -9 0.2 0.4 -8 -50 40 33 2 -9 0.7 0.6 25 -11 29 -7 2 -9 0.5 0.2 -12 -33 25 -3 2 -9 0.2 0.1 31 -48 -4 -16 2 -9 0.6 0.7 25 -16 33 17 2 -9 0.1 0.4 32 31 5 -25 1 -9 0.9 0.1 29 -23 -5 -27 1 -9 0.6 0.9 12 -18 31 -48 2 -9 0.3 0.7 -12 -23 34 -45 2 -9 0.8 0.2 15 -49 44 -3 1 -9 0.4 0.6 35 -40 35 -4 1 -9 0.6 0.8 4 -35 47 33 2 -9 0.9 0.5 49 -41 -36 -49 1 -9 0.2 0.5 11 10 30 -28 1 -9 0.1 0.6 11 -36 49 -24 2 -9 0.7 0.3 1 -31 45 27 2 -9 0.1 0.6 -6 -42 48 2 2 -9 0.7 0.4 40 6 23 -14 1 -9 0.6 0.2 25 -46 45 7 2 -9 0.4 0.3 2 -32 34 -9 2 -9 0.1 0.2 -37 -46 -5 -37 2 -9 0.2 0.6 25 -11 38 -21 2 -9 0.2 0.2 -10 -47 33 -2 2 -9 0.8 0.2 24 -10 -36 -46 1 -9 0.2 0.9 48 7 25 -16 2 -9 0.7 0.5 48 -43 29 -42 1 -9 0.4 0.5 -30 -42 38 -50 2 -9 0.8 0.9 41 20 33 -49 1 -9 0.9 0.6 -2 -48 -28 -30 2 -9 0.5 0.8 -1 -50 24 -16 2 -9 0.8 0.4 40 37 -27 -29 1 -9 0.2 0.3 14 -19 -23 -37 2 -9 0.1 0.4 10 -25 -22 -33 2 -9 0.1 0.5 47 -27 40 11 2 -9 0.6 0.3 -19 -20 35 -8 2 -9 0.4 0.8 24 -17 29 18 2 -9 0.5 0.4 25 -21 28 6 2 -9 0.7 0.5 -12 -31 -25 -46 2 -9 0.6 0.1 46 -11 31 7 1 -9 0.6 0.9 21 -14 14 -14 1 -9 0.3 0.4 3 -33 14 -19 2 -9 0.3 0.6 8 5 39 -43 1 -9 0.7 0.4 13 -23 42 0 1 -9 0.5 0.4 6 -37 28 -49 1 -9 0.9 0.6 34 4 15 -42 1 -9 0.2 0.1 42 10 17 -46 1 -9 0.4 0.2 27 -49 -35 -41 1 -9 0.9 0.4 49 -38 44 37 1 -9 0.1 0.2 -18 -34 -29 -50 1 -9 0.8 0.8 22 15 35 33 1 -9 0.5 0.8 43 21 -28 -37 1 -9 0.2 0.4 49 28 24 -2 1 -9 0.8 0.4 4 -7 43 33 2 -9 0.9 0.7 11 5 -25 -38 1 -9 0.8 0.1 11 10 34 17 2 -9 0.7 0.9 -2 -22 39 28 2 -9 0.4 0.1 35 8 42 -30 1 -9 0.3 0.2 40 37 22 -27 1 -9 0.9 0.4 4 -26 26 -45 1 -9 0.1 0.6 48 46 49 -50 1 -9 0.8 0.9 42 -9 30 7 2 -9 0.6 0.2 23 -35 17 -7 2 -9 0.2 0.6 39 -31 10 -23 1 -9 0.5 0.5 36 -40 -25 -49 2 -9 0.7 0.9 32 -42 34 -29 2 -9 0.8 0.9 4 -35 19 -13 2 -9 0.1 0.3 38 -42 49 -21 2 -9 0.2 0.6 12 -34 0 -15 2 -9 0.9 0.6 26 3 -13 -49 1 -9 0.3 0.6 27 -14 39 32 2 -9 0.1 0.6 19 -44 31 17 2 -9 0.1 0.2 24 1 -10 -33 1 -9 0.4 0.1 35 -25 12 -45 1 -9 0.6 0.2 23 4 27 -28 1 -9 0.9 0.1 46 8 32 22 1 -9 0.2 0.9 39 26 23 -11 2 -9 0.7 0.8 49 20 50 -42 1 -9 0.9 0.4 1 -27 -10 -50 1 -9 0.4 0.2 7 -13 47 6 2 -9 0.4 0.2 41 8 32 9 2 -9 0.9 0.4 44 -14 22 17 1 -9 0.8 0.8 38 24 31 -22 1 -9 0.9 0.8 21 -44 15 1 2 -9 0.2 0.4 14 -19 -20 -47 1 -9 0.9 0.1 -5 -48 49 -19 2 -9 0.7 0.1 34 -4 2 -26 1 -9 0.1 0.6 5 -4 -24 -50 1 -9 0.5 0.4 26 -50 14 -49 1 -9 0.2 0.7 16 -25 44 12 2 -9 0.8 0.9 30 14 -18 -20 1 -9 0.4 0.9 -18 -29 -24 -31 1 -9 0.6 0.5 39 -35 30 -14 1 -9 0.9 0.2 44 -43 13 -35 1 -9 0.4 0.5 47 38 34 -14 1 -9 0.3 0.2 41 22 -41 -50 1 -9 0.5 0.6 41 -5 34 -3 2 -9 0.8 0.3 27 -26 44 -49 1 -9 0.6 0.9 43 14 19 -44 1 -9 0.5 0.4 -40 -49 49 12 2 -9 0.1 0.5 14 -33 46 -5 2 -9 0.6 0.8 49 45 18 -14 1 -9 0.4 0.2 3 -48 13 6 2 -9 0.5 0.1 -1 -34 -38 -46 1 -9 0.5 0.1 -9 -14 1 -48 1 -9 0.1 0.8 -12 -36 45 -24 2 -9 0.3 0.1 11 -6 26 -32 2 -9 0.1 0.8 -28 -48 48 -39 2 -9 0.1 0.8 35 14 -3 -21 1 -9 0.3 0.9 41 14 9 -18 1 -9 0.3 0.4 2 -40 -2 -38 1 -9 0.2 0.5 50 29 25 -4 1 -9 0.9 0.9 24 1 -42 -44 1 -9 0.5 0.9 36 -9 35 -30 2 -9 0.3 0.5 -8 -20 -5 -43 1 -9 0.2 0.6 41 9 4 -4 1 -9 0.9 0.1 7 -34 8 5 2 -9 0.2 0.6 -22 -48 16 -45 2 -9 0.3 0.4 38 36 23 -44 1 -9 0.5 0.2 46 -10 33 -46 1 -9 0.3 0.8 38 34 46 6 1 -9 0.5 0.3 3 -27 49 40 2 -9 0.2 0.5 39 -9 45 -11 2 -9 0.8 0.8 20 15 40 -46 2 -9 0.4 0.4 45 -21 33 27 2 -9 0.2 0.9 33 -50 33 15 2 -9 0.6 0.6 -2 -50 18 0 2 -9 0.4 0.1 41 33 -6 -23 1 -9 0.2 0.8 41 16 7 -46 1 -9 0.9 0.3 -10 -47 48 -5 2 -9 0.7 0.6 33 -16 26 -10 1 -9 0.3 0.6 14 -21 50 45 2 -9 0.7 0.3 49 -13 35 20 2 -9 0.1 0.7 20 -39 35 -38 2 -9 0.5 0.1 35 -45 0 -15 2 -9 0.1 0.9 -3 -13 -5 -14 1 -9 0.5 0.7 46 -26 37 -11 2 -9 0.8 0.9 9 -32 31 14 2 -9 0.6 0.4 44 -10 12 -24 2 -9 0.9 0.4 -18 -49 41 37 2 -9 0.4 0.7 34 -32 26 -37 2 -9 0.6 0.8 7 -26 24 -22 2 -9 0.3 0.1 -37 -39 49 -11 2 -9 0.6 0.4 43 -15 -32 -41 1 -9 0.6 0.6 21 -11 -1 -10 1 -9 0.7 0.1 10 -17 44 18 2 -9 0.5 0.8 16 -4 30 -27 2 -9 0.9 0.2 45 5 49 35 1 -9 0.1 0.2 -5 -35 45 -48 1 -9 0.8 0.5 12 6 -8 -20 1 -9 0.1 0.8 35 -34 34 -30 2 -9 0.3 0.2 13 -42 38 -23 2 -9 0.8 0.5 -32 -47 -3 -5 2 -9 0.1 0.5 37 -45 3 -3 2 -9 0.7 0.1 37 15 29 24 2 -9 0.6 0.9 35 -7 46 34 2 -9 0.6 0.2 41 -36 34 -22 1 -9 0.5 0.3 35 -40 -8 -39 2 -9 0.9 0.2 15 -48 -4 -25 1 -9 0.5 0.1 -22 -49 26 -29 2 -9 0.2 0.9 -2 -16 0 -14 2 -10 0.5 0.2 23 -35 7 -7 2 -10 0.9 0.2 35 33 14 -26 1 -10 0.5 0.9 32 -38 22 -30 2 -10 0.4 0.6 47 -39 3 2 2 -10 0.9 0.8 38 -29 1 -7 1 -10 0.7 0.4 -34 -48 42 -24 2 -10 0.9 0.6 -12 -49 -9 -36 2 -10 0.3 0.5 7 0 21 -17 1 -10 0.3 0.3 27 -11 11 -8 1 -10 0.6 0.3 41 9 32 -34 1 -10 0.3 0.3 22 -34 41 7 2 -10 0.5 0.9 40 -30 -22 -34 1 -10 0.7 0.1 22 -7 39 -38 1 -10 0.3 0.8 -18 -34 -5 -35 2 -10 0.4 0.8 -10 -27 -35 -47 2 -10 0.4 0.6 45 21 -9 -31 1 -10 0.3 0.4 34 -32 32 -24 1 -10 0.5 0.3 33 -26 35 0 2 -10 0.9 0.5 -5 -33 17 -27 2 -10 0.5 0.1 11 -43 -18 -48 1 -10 0.7 0.2 40 -39 15 -33 1 -10 0.4 0.8 23 -38 19 -22 2 -10 0.8 0.5 26 -46 -24 -43 1 -10 0.1 0.8 25 17 49 9 2 -10 0.5 0.9 19 -31 -10 -38 1 -10 0.5 0.3 43 -14 -9 -46 1 -10 0.3 0.5 43 -20 29 -36 2 -10 0.7 0.7 28 -41 28 -35 1 -10 0.8 0.2 -23 -29 35 14 2 -10 0.9 0.4 21 14 32 1 1 -10 0.7 0.8 45 24 26 -4 1 -10 0.2 0.1 24 -19 39 -9 2 -10 0.2 0.5 35 -47 49 -16 2 -10 0.4 0.7 45 5 4 -18 1 -10 0.5 0.8 34 -25 -32 -50 1 -10 0.8 0.1 -6 -9 43 39 2 -10 0.2 0.7 -30 -35 0 -20 2 -10 0.2 0.4 -7 -36 31 -48 2 -10 0.3 0.2 48 -16 -3 -6 2 -10 0.2 0.5 0 -10 31 -27 2 -10 0.8 0.2 -10 -50 12 -7 2 -10 0.3 0.1 34 25 47 41 2 -10 0.9 0.9 21 -30 11 -44 1 -10 0.5 0.3 -14 -44 -5 -28 2 -10 0.2 0.4 39 -26 -3 -28 1 -10 0.8 0.8 -26 -36 43 -4 2 -10 0.9 0.6 50 11 1 -20 1 -10 0.9 0.5 -14 -30 47 3 2 -10 0.6 0.8 45 -15 14 -1 1 -10 0.4 0.7 41 4 39 -37 1 -10 0.7 0.5 -15 -41 -8 -19 2 -10 0.1 0.6 7 -31 29 -37 2 -10 0.3 0.1 11 -50 38 -35 1 -10 0.9 0.2 28 -39 44 -16 1 -10 0.7 0.1 49 11 29 -43 1 -10 0.1 0.5 -5 -35 19 6 2 -10 0.4 0.1 17 13 -15 -19 1 -10 0.1 0.1 46 42 -5 -36 1 -10 0.2 0.6 36 33 32 -48 1 -10 0.1 0.1 -3 -32 18 -43 1 -10 0.2 0.2 22 -42 -30 -39 1 -10 0.4 0.6 40 -24 43 -28 2 -10 0.1 0.6 41 -48 23 -6 2 -10 0.1 0.8 -42 -44 23 11 2 -10 0.1 0.6 34 -23 -21 -32 1 -10 0.6 0.3 41 27 -9 -30 1 -10 0.1 0.7 40 20 50 5 2 -10 0.9 0.3 -15 -16 20 2 2 -10 0.1 0.9 36 -13 42 19 2 -10 0.6 0.1 14 -47 26 -27 1 -10 0.9 0.5 1 -7 42 9 2 -10 0.3 0.2 42 -11 18 16 2 -10 0.3 0.2 35 -40 3 -47 2 -10 0.4 0.3 40 -18 -6 -34 1 -10 0.8 0.5 29 -34 47 -8 1 -10 0.7 0.8 -23 -32 11 -12 2 -10 0.3 0.9 -31 -39 -16 -49 2 -10 0.7 0.1 37 -17 21 -48 1 -10 0.9 0.3 44 -11 34 -23 1 -10 0.5 0.5 -26 -44 -21 -41 2 -10 0.1 0.2 37 -38 17 -3 2 -10 0.4 0.5 34 20 -3 -11 1 -10 0.4 0.7 -8 -11 -10 -23 1 -10 0.8 0.9 21 -19 17 -40 1 -10 0.5 0.1 6 3 38 -18 1 -10 0.7 0.8 26 -16 27 -44 2 -10 0.5 0.1 39 -6 29 16 2 -10 0.6 0.2 14 13 17 0 1 -10 0.2 0.9 -22 -39 27 -32 2 -10 0.9 0.6 44 39 25 -35 1 -10 0.3 0.9 18 -4 45 -11 2 -10 0.3 0.6 15 -35 32 -1 2 -10 0.7 0.7 3 -15 25 -29 2 -10 0.6 0.8 41 -7 -8 -35 1 -10 0.9 0.8 -2 -45 36 5 2 -10 0.3 0.9 -34 -49 32 15 2 -10 0.8 0.4 39 -41 32 -40 1 -10 0.8 0.6 46 43 48 14 1 -10 0.7 0.8 17 -39 25 -2 2 -10 0.6 0.5 30 -41 46 -18 2 -10 0.9 0.8 0 -2 -23 -32 1 -10 0.9 0.7 44 4 47 36 2 -10 0.5 0.9 4 -48 21 -41 2 -10 0.8 0.5 -3 -45 46 43 2 -10 0.8 0.9 37 -21 13 -37 1 -10 0.2 0.2 10 -6 35 -15 2 -10 0.5 0.6 26 -40 -28 -48 1 -10 0.2 0.5 8 -4 -12 -46 1 -10 0.6 0.5 46 30 45 -18 1 -10 0.7 0.9 36 29 -12 -18 1 -10 0.1 0.9 40 4 -29 -36 1 -10 0.4 0.9 26 5 26 -43 1 -10 0.7 0.6 36 -46 49 -7 2 -10 0.2 0.1 49 1 45 -13 1 -10 0.3 0.8 -27 -48 0 -36 2 -10 0.5 0.9 -2 -9 40 -4 2 -10 0.7 0.4 1 -42 35 34 2 -10 0.7 0.5 -27 -42 -23 -25 2 -10 0.3 0.2 11 -23 19 -25 2 -10 0.7 0.6 50 -43 -25 -39 1 -10 0.7 0.4 46 35 48 5 1 -10 0.6 0.8 19 -18 30 13 2 -10 0.2 0.8 42 -48 40 0 2 -10 0.6 0.6 42 14 47 44 1 -10 0.9 0.7 47 -27 10 1 1 -10 0.4 0.6 -4 -34 34 16 2 -10 0.6 0.7 -26 -47 24 -3 2 -10 0.9 0.3 0 -41 17 -19 2 -10 0.8 0.5 8 -22 50 22 2 -10 0.7 0.4 29 -42 29 24 2 -10 0.5 0.2 -6 -42 14 -6 2 -10 0.4 0.7 49 -9 -7 -48 1 -10 0.4 0.9 16 8 26 3 2 -10 0.9 0.9 17 -13 -9 -49 1 -10 0.7 0.1 10 -40 12 -11 2 -10 0.1 0.1 -18 -30 -13 -45 1 -10 0.9 0.3 -22 -48 -6 -46 1 -10 0.1 0.3 49 3 -29 -43 1 -10 0.9 0.3 25 -45 32 21 2 -10 0.3 0.1 15 -48 14 -35 2 -10 0.7 0.9 44 42 43 -2 1 -10 0.9 0.7 33 -37 40 -37 2 -10 0.2 0.2 23 -44 49 34 2 -10 0.3 0.4 16 -34 17 -23 2 -10 0.9 0.3 46 11 48 -39 1 -10 0.4 0.9 29 5 9 5 2 -10 0.5 0.5 45 -31 48 22 2 -10 0.6 0.2 40 18 13 -15 1 -10 0.9 0.6 45 0 1 -24 1 -10 0.6 0.3 -14 -41 -21 -31 2 -10 0.3 0.3 33 -23 40 -33 1 -10 0.2 0.7 21 -1 44 16 2 -10 0.2 0.6 -25 -47 31 -17 2 -10 0.7 0.1 4 -38 33 18 2 -10 0.4 0.6 26 -38 31 -9 2 -10 0.8 0.1 27 -37 6 -38 1 -10 0.7 0.8 20 -48 12 -17 2 -10 0.5 0.4 32 -26 47 19 2 -10 0.2 0.2 42 4 0 -44 1 -10 0.1 0.5 6 -1 50 41 2 -10 0.3 0.7 39 11 50 -29 1 -10 0.4 0.1 -1 -50 50 -19 1 -10 0.9 0.3 -2 -24 22 -41 1 -10 0.3 0.1 -32 -33 36 24 2 -10 0.2 0.1 2 -45 -27 -32 2 -10 0.6 0.5 31 5 -5 -11 1 -10 0.9 0.9 32 -19 -7 -27 1 -10 0.4 0.8 10 -38 -17 -35 1 -10 0.9 0.5 25 -32 -37 -40 1 -10 0.1 0.9 23 13 -8 -23 1 -10 0.6 0.1 29 1 22 14 1 -10 0.4 0.1 30 -3 24 -35 1 -10 0.5 0.3 20 -48 40 6 2 -10 0.5 0.5 -33 -38 18 -15 2 -10 0.8 0.8 33 11 39 20 1 -10 0.8 0.7 29 -30 28 25 2 -10 0.2 0.2 -29 -30 -4 -43 2 -10 0.2 0.4 25 -13 35 -5 2 -10 0.5 0.2 41 22 13 -24 1 -10 0.1 0.6 -14 -47 40 2 2 -10 0.9 0.2 24 -43 13 -14 1 -10 0.8 0.8 3 -8 -3 -50 1 -10 0.5 0.8 -5 -16 38 -49 2 -10 0.3 0.4 23 12 -29 -41 1 -10 0.9 0.1 39 -46 27 -3 1 -10 0.2 0.8 43 17 7 -23 1 -10 0.5 0.8 17 13 -16 -30 1 -10 0.3 0.6 29 10 20 -3 1 -10 0.7 0.7 46 -11 27 -42 1 -10 0.8 0.5 32 -9 49 -4 1 -10 0.7 0.6 6 -28 45 -24 2 -10 0.1 0.1 38 -6 14 -15 1 -10 0.8 0.4 20 11 45 -18 1 -10 0.2 0.9 -6 -50 27 8 2 -10 0.4 0.3 30 -43 10 -9 2 -10 0.9 0.4 -17 -50 22 -19 2 -10 0.2 0.6 -31 -38 22 -32 2 -10 0.6 0.9 0 -48 -22 -43 1 -10 0.5 0.7 50 49 35 -5 1 -10 0.4 0.2 32 -48 50 1 2 -10 0.6 0.9 48 35 18 0 1 -10 0.8 0.8 41 -17 -16 -40 1 -10 0.9 0.5 12 -28 28 -17 2 -10 0.7 0.7 44 9 32 -28 1 -10 0.2 0.6 3 -9 46 27 2 -10 0.9 0.9 -23 -26 15 -23 2 -10 0.7 0.8 35 17 43 1 2 -10 0.8 0.2 50 -35 -18 -40 1 -10 0.2 0.1 23 -43 23 -11 2 -10 0.3 0.4 28 -17 43 -15 2 -10 0.1 0.4 -10 -14 30 10 2 -10 0.2 0.8 3 -32 38 25 2 -10 0.9 0.7 -8 -41 32 -12 2 -10 0.5 0.4 -5 -9 16 -24 1 -10 0.5 0.7 17 14 26 -32 1 -10 0.5 0.4 20 -25 11 -1 1 -10 0.4 0.7 -27 -41 -19 -49 2 -10 0.8 0.7 35 23 28 -20 1 -10 0.6 0.1 50 22 50 -18 1 -10 0.6 0.9 -14 -40 15 8 2 -10 0.2 0.2 37 -41 49 -6 2 -10 0.7 0.8 -4 -31 8 -34 2 -10 0.5 0.9 33 -8 18 -35 2 -10 0.6 0.4 25 -10 15 -34 1 -10 0.2 0.8 38 6 -48 -50 1 -10 0.6 0.1 49 0 -8 -18 1 -10 0.2 0.6 0 -33 -24 -27 1 -10 0.3 0.4 35 -7 39 -38 1 -10 0.2 0.5 10 7 25 0 2 -10 0.5 0.1 43 12 44 -13 1 -10 0.2 0.9 -19 -24 -14 -47 2 -10 0.5 0.5 27 14 -29 -47 1 -10 0.6 0.8 43 -35 -9 -38 1 -10 0.1 0.4 20 -21 32 -47 2 -10 0.7 0.7 0 -23 12 3 2 -10 0.3 0.1 6 -35 40 16 2 -10 0.4 0.4 13 -44 46 27 2 -10 0.7 0.9 23 -12 38 8 2 -10 0.3 0.4 31 3 9 -27 1 -10 0.4 0.6 22 -40 42 32 2 -10 0.5 0.3 47 -21 -13 -34 1 -10 0.2 0.1 33 -33 -2 -23 1 -10 0.3 0.3 29 -10 29 -16 2 -10 0.4 0.2 4 -15 -23 -50 1 -10 0.9 0.9 -23 -27 40 -12 2 -10 0.7 0.4 39 -24 39 -33 1 -10 0.8 0.6 -9 -23 50 18 2 -10 0.1 0.8 31 21 13 -41 1 -10 0.3 0.6 43 -31 4 -31 2 -10 0.2 0.8 13 -14 42 -45 2 -10 0.3 0.3 45 14 -31 -38 1 -10 0.3 0.5 18 -38 3 -32 2 -10 0.4 0.7 -9 -14 7 -31 2 -10 0.8 0.4 46 -37 33 9 1 -10 0.6 0.7 17 -23 3 -38 1 -10 0.3 0.7 -4 -37 38 0 2 -10 0.7 0.6 14 -47 40 -26 2 -10 0.1 0.6 36 -16 38 19 2 -10 0.5 0.5 6 -11 47 7 2 -10 0.8 0.2 40 33 28 27 1 -10 0.6 0.3 -14 -37 30 0 2 -10 0.1 0.3 40 -5 -11 -47 1 -10 0.2 0.6 23 -14 5 -22 1 -10 0.3 0.5 29 -15 20 -20 1 -10 0.7 0.2 -30 -46 -2 -14 2 -10 0.5 0.4 -15 -16 21 -36 1 -10 0.8 0.4 29 10 -9 -18 1 -10 0.7 0.9 34 27 0 -7 1 -10 0.1 0.5 8 -11 24 -16 2 -10 0.1 0.1 11 -25 49 28 2 -10 0.3 0.2 45 -16 33 -5 2 -10 0.4 0.4 -4 -19 -10 -35 2 -10 0.6 0.1 48 -36 -19 -35 1 -10 0.8 0.1 -8 -20 41 -30 1 -10 0.6 0.5 -6 -26 16 11 2 -10 0.8 0.1 7 -10 27 -36 1 -10 0.6 0.5 -21 -36 48 -9 2 -10 0.6 0.1 39 -42 30 -25 1 -10 0.3 0.4 36 -29 43 -22 1 -10 0.3 0.4 45 12 -14 -19 1 -10 0.8 0.9 19 -23 6 -3 1 -10 0.7 0.6 45 -39 13 -50 1 -10 0.6 0.8 27 -33 28 22 2 -10 0.1 0.1 -17 -34 18 -7 2 -10 0.3 0.8 40 -4 33 31 2 -10 0.4 0.6 34 -19 0 -10 1 -10 0.8 0.5 0 -5 -7 -23 1 -10 0.6 0.2 18 7 45 -36 1 -10 0.5 0.6 20 -5 31 -3 2 -10 0.9 0.3 20 -19 35 -25 1 -10 0.6 0.7 39 -30 39 12 2 -10 0.2 0.6 25 23 35 13 1 -10 0.7 0.8 -7 -30 32 14 2 -10 0.7 0.1 20 -50 5 -39 1 -10 0.4 0.6 33 12 -21 -29 1 -10 0.6 0.9 44 37 30 -45 1 -10 0.2 0.9 50 -28 39 33 2 -10 0.8 0.2 35 27 5 -21 1 -10 0.7 0.9 1 -27 2 -5 2 -10 0.3 0.7 -37 -38 -1 -3 2 -11 0.6 0.8 -4 -11 18 1 2 -11 0.6 0.3 38 5 17 -11 1 -11 0.5 0.7 33 -9 -13 -17 1 -11 0.1 0.2 -26 -49 -15 -29 2 -11 0.6 0.6 -19 -24 11 -20 2 -11 0.5 0.8 25 14 11 -43 1 -11 0.7 0.5 22 -2 -24 -37 1 -11 0.4 0.8 -34 -35 23 -29 2 -11 0.8 0.3 -1 -6 14 1 2 -11 0.9 0.5 25 -32 15 -1 1 -11 0.8 0.1 16 -26 50 -35 1 -11 0.1 0.7 48 -4 42 -8 2 -11 0.1 0.1 8 -39 8 -34 1 -11 0.6 0.6 35 -11 45 -46 1 -11 0.7 0.8 49 -26 33 1 1 -11 0.6 0.3 50 12 49 -35 1 -11 0.5 0.8 37 -38 -11 -35 1 -11 0.5 0.6 30 -46 -14 -22 1 -11 0.6 0.5 48 -2 4 -26 1 -11 0.3 0.6 40 -4 48 10 2 -11 0.3 0.2 13 -8 13 4 2 -11 0.7 0.8 -19 -43 21 -5 2 -11 0.8 0.4 -31 -32 10 -20 2 -11 0.7 0.4 8 -33 -46 -49 1 -11 0.4 0.9 34 26 40 -33 1 -11 0.4 0.4 -12 -50 -4 -9 2 -11 0.1 0.5 50 -31 50 46 2 -11 0.9 0.8 -4 -34 4 -50 2 -11 0.3 0.5 23 19 32 -30 1 -11 0.6 0.2 -10 -31 0 -48 2 -11 0.7 0.8 30 -7 44 2 2 -11 0.4 0.1 49 27 7 -22 1 -11 0.4 0.9 18 -4 25 -16 2 -11 0.4 0.3 32 -33 46 -24 1 -11 0.9 0.8 24 -14 15 -39 1 -11 0.4 0.6 46 43 33 -36 1 -11 0.1 0.6 47 -32 40 35 2 -11 0.9 0.5 17 -31 -2 -12 1 -11 0.4 0.4 43 20 -13 -23 1 -11 0.6 0.2 17 -34 -24 -45 1 -11 0.6 0.1 9 8 47 19 1 -11 0.9 0.5 32 21 10 -17 1 -11 0.3 0.3 37 33 -22 -36 1 -11 0.8 0.3 4 -6 39 24 2 -11 0.1 0.9 14 -20 5 -15 2 -11 0.3 0.1 -4 -37 45 11 2 -11 0.2 0.5 -20 -43 44 -49 2 -11 0.6 0.5 47 -26 15 -39 1 -11 0.3 0.9 -17 -27 -11 -20 2 -11 0.1 0.9 35 -14 49 14 2 -11 0.1 0.1 13 -34 49 -13 2 -11 0.8 0.7 30 -43 21 -19 1 -11 0.3 0.4 -14 -16 -7 -47 2 -11 0.3 0.9 13 -36 37 -44 1 -11 0.5 0.8 -8 -27 20 -25 2 -11 0.8 0.2 22 5 1 -5 1 -11 0.7 0.1 27 -25 -13 -45 1 -11 0.4 0.5 50 18 -2 -7 1 -11 0.4 0.5 31 -9 17 8 2 -11 0.7 0.8 7 0 24 -38 2 -11 0.6 0.4 9 -39 -28 -34 1 -11 0.4 0.4 39 -42 -4 -13 1 -11 0.3 0.4 28 -43 16 -32 1 -11 0.7 0.5 50 -29 32 -40 1 -11 0.1 0.1 18 -33 40 26 2 -11 0.7 0.7 48 -27 10 -44 1 -11 0.9 0.8 47 3 -1 -46 1 -11 0.3 0.7 2 -33 -28 -48 1 -11 0.6 0.5 16 2 -11 -27 1 -11 0.6 0.1 3 -20 11 -43 1 -11 0.8 0.6 7 -27 -13 -24 1 -11 0.5 0.9 22 -13 5 -33 1 -11 0.3 0.6 40 21 39 -25 1 -11 0.4 0.3 13 -12 13 10 2 -11 0.1 0.6 12 -37 5 -5 2 -11 0.8 0.7 44 4 12 -12 1 -11 0.1 0.7 37 14 39 -4 1 -11 0.7 0.2 22 4 20 19 1 -11 0.4 0.7 46 42 6 -26 1 -11 0.5 0.2 10 -3 13 -21 1 -11 0.3 0.3 16 6 34 -12 1 -11 0.6 0.2 17 11 36 -31 1 -11 0.2 0.3 5 1 15 -15 1 -11 0.4 0.7 43 -10 40 -40 2 -11 0.8 0.8 -12 -23 1 -26 2 -11 0.1 0.3 42 41 48 -14 1 -11 0.1 0.8 -12 -22 -17 -30 2 -11 0.5 0.2 18 -29 39 -7 1 -11 0.8 0.2 24 17 30 -30 1 -11 0.6 0.3 -2 -4 15 -4 2 -11 0.8 0.1 2 -46 5 -38 1 -11 0.5 0.7 -34 -44 32 31 2 -11 0.9 0.2 18 -24 11 -22 1 -11 0.3 0.8 15 -17 -22 -31 1 -11 0.6 0.3 39 12 9 -40 1 -11 0.9 0.1 36 32 35 32 1 -11 0.1 0.1 41 4 44 -47 1 -11 0.7 0.2 18 -13 33 27 2 -11 0.4 0.9 46 -41 26 -10 2 -11 0.3 0.2 38 -43 -23 -41 1 -11 0.1 0.6 -23 -24 37 0 2 -11 0.6 0.1 33 -45 35 18 2 -11 0.5 0.5 5 -47 -16 -45 1 -11 0.8 0.9 33 -38 42 -15 2 -11 0.4 0.1 -43 -47 46 25 2 -11 0.5 0.1 -18 -32 33 -21 2 -11 0.1 0.6 19 15 -23 -40 1 -11 0.2 0.6 31 -31 29 2 2 -11 0.5 0.1 8 -47 42 15 2 -11 0.2 0.7 39 -20 -1 -47 1 -11 0.1 0.3 9 -17 -35 -36 1 -11 0.6 0.3 37 35 31 -11 1 -11 0.6 0.8 49 -49 12 -31 1 -11 0.9 0.2 9 -33 25 -23 1 -11 0.6 0.5 25 4 9 -31 1 -11 0.5 0.8 20 -7 45 18 2 -11 0.7 0.6 48 -1 34 23 2 -11 0.7 0.3 23 15 -1 -16 1 -11 0.4 0.3 -6 -43 21 13 2 -11 0.9 0.3 -32 -38 20 6 2 -11 0.2 0.3 -27 -32 30 -48 1 -11 0.7 0.9 46 6 31 27 1 -11 0.8 0.5 -16 -39 41 3 2 -11 0.8 0.6 22 10 50 -26 1 -11 0.8 0.2 28 -23 8 -40 1 -11 0.3 0.3 33 -33 18 -38 2 -11 0.7 0.7 34 -8 23 -23 1 -11 0.6 0.5 29 -12 29 -10 2 -11 0.2 0.1 -23 -25 41 -28 2 -11 0.2 0.4 -14 -44 50 44 2 -11 0.9 0.4 4 -11 9 -19 1 -11 0.3 0.4 -6 -44 50 -35 2 -11 0.2 0.2 15 -12 24 9 2 -11 0.3 0.4 39 32 30 -40 1 -11 0.8 0.5 42 29 14 -28 1 -11 0.7 0.7 -11 -21 39 -31 2 -11 0.9 0.8 40 -13 27 -1 2 -11 0.9 0.9 -30 -41 -27 -50 1 -11 0.8 0.9 36 -30 -11 -36 1 -11 0.6 0.9 -31 -43 17 -43 2 -11 0.4 0.1 28 -39 44 -11 1 -11 0.8 0.5 47 15 47 18 2 -11 0.8 0.6 -11 -46 45 7 2 -11 0.9 0.3 16 -45 10 4 1 -11 0.5 0.6 0 -31 14 -8 2 -11 0.7 0.2 -7 -25 12 -31 2 -11 0.6 0.2 14 -41 -29 -44 1 -11 0.6 0.7 21 -8 35 18 2 -11 0.4 0.3 2 -31 11 2 2 -11 0.7 0.5 44 -43 -49 -50 1 -11 0.4 0.8 3 0 -3 -12 2 -11 0.4 0.4 31 11 -36 -37 1 -11 0.5 0.6 11 -50 -20 -50 1 -11 0.3 0.3 49 -15 49 -7 2 -11 0.1 0.3 28 -46 25 -46 2 -11 0.9 0.7 19 -6 41 -42 1 -11 0.1 0.8 2 -25 24 -1 2 -11 0.4 0.7 44 -40 7 -19 2 -11 0.7 0.1 4 -4 47 38 2 -11 0.4 0.8 10 -2 26 -47 2 -11 0.2 0.5 23 -12 -12 -49 1 -11 0.2 0.8 36 23 20 7 1 -11 0.4 0.5 46 24 28 -44 1 -11 0.3 0.6 29 -41 43 28 2 -11 0.8 0.2 5 -47 43 21 2 -11 0.5 0.4 13 -18 20 -33 1 -11 0.7 0.5 37 -27 33 18 2 -11 0.6 0.2 48 -44 17 14 2 -11 0.5 0.5 15 11 47 18 2 -11 0.1 0.5 35 -16 39 23 2 -11 0.4 0.4 3 2 38 3 2 -11 0.4 0.7 31 -13 7 -20 2 -11 0.5 0.4 -10 -25 15 8 2 -11 0.2 0.5 -22 -45 20 -21 2 -11 0.9 0.1 25 -5 -11 -24 1 -11 0.4 0.2 37 -30 5 -16 1 -11 0.5 0.6 24 -37 37 -49 2 -11 0.5 0.4 35 -15 -35 -49 1 -11 0.6 0.7 43 -22 30 3 2 -11 0.3 0.3 32 -32 48 -23 1 -11 0.2 0.3 3 -1 -40 -43 1 -11 0.7 0.6 28 8 0 -21 1 -11 0.2 0.8 38 -36 0 -27 2 -11 0.7 0.9 -17 -39 -2 -29 2 -11 0.3 0.7 -22 -40 -17 -33 1 -11 0.9 0.1 2 -14 -17 -28 1 -11 0.4 0.5 -9 -33 -27 -35 2 -11 0.4 0.1 33 -39 43 32 2 -11 0.6 0.7 36 -48 -4 -25 1 -11 0.3 0.7 37 27 13 -3 1 -11 0.1 0.1 -1 -9 20 -31 1 -11 0.4 0.4 -9 -42 -32 -45 2 -11 0.7 0.1 2 -23 9 0 2 -11 0.7 0.4 11 -2 -36 -45 1 -11 0.1 0.3 -13 -23 44 -37 2 -11 0.4 0.3 -40 -48 4 -45 2 -11 0.7 0.5 19 -46 21 -43 1 -11 0.8 0.6 14 8 22 -24 2 -11 0.4 0.6 7 -1 41 -41 2 -11 0.3 0.2 46 43 38 -39 1 -11 0.4 0.9 -21 -40 49 -42 2 -11 0.3 0.6 7 -44 40 20 2 -11 0.1 0.3 50 -13 47 -30 1 -11 0.1 0.4 -7 -15 23 -30 2 -11 0.6 0.2 35 12 40 20 1 -11 0.5 0.3 -1 -17 -11 -32 2 -11 0.3 0.8 46 32 27 -1 2 -11 0.7 0.6 25 -36 -2 -16 1 -11 0.4 0.4 8 -20 40 37 2 -11 0.4 0.7 6 -14 -1 -40 2 -11 0.8 0.7 35 -45 37 -20 1 -11 0.5 0.9 -14 -23 -6 -19 2 -11 0.1 0.2 38 30 47 -25 1 -11 0.5 0.9 12 -10 25 -15 2 -11 0.8 0.3 -19 -24 31 9 2 -11 0.6 0.4 39 -8 -36 -44 1 -11 0.6 0.4 15 -33 23 21 2 -11 0.2 0.6 33 -1 -19 -30 2 -11 0.1 0.8 28 -9 2 -40 2 -11 0.3 0.9 37 -9 22 -34 1 -11 0.7 0.2 -2 -28 -4 -7 2 -11 0.4 0.6 43 -8 -31 -42 1 -11 0.1 0.6 42 -15 21 20 2 -11 0.3 0.4 46 38 44 21 2 -11 0.2 0.2 19 -6 29 -19 1 -11 0.2 0.6 20 -29 38 35 1 -11 0.3 0.3 -2 -16 41 36 2 -11 0.7 0.3 33 -5 -7 -41 1 -11 0.6 0.4 34 11 40 -12 1 -11 0.6 0.4 -32 -35 45 -36 2 -11 0.1 0.5 -9 -40 -29 -47 2 -11 0.2 0.3 -37 -39 -26 -40 1 -11 0.3 0.3 37 -15 6 -27 1 -11 0.5 0.8 49 46 -6 -17 1 -11 0.7 0.6 26 -48 26 2 2 -11 0.7 0.2 11 10 25 -48 1 -11 0.1 0.2 4 -43 -6 -28 1 -11 0.6 0.8 -9 -41 5 -36 2 -11 0.4 0.5 28 -27 27 -39 1 -11 0.3 0.2 -16 -24 -31 -41 1 -11 0.3 0.1 30 21 21 -4 1 -11 0.8 0.6 16 -15 17 11 2 -11 0.8 0.4 -3 -40 26 21 2 -11 0.1 0.7 8 -18 36 35 2 -11 0.4 0.2 22 -13 50 -13 1 -11 0.4 0.6 40 -36 39 -40 2 -11 0.8 0.5 -4 -30 -1 -32 2 -11 0.2 0.5 39 -19 -9 -25 1 -11 0.7 0.7 3 -46 40 9 2 -11 0.3 0.2 -36 -44 34 18 2 -11 0.4 0.2 -37 -41 31 2 2 -11 0.3 0.2 -37 -41 37 -37 2 -11 0.3 0.6 15 -33 31 30 2 -11 0.4 0.5 -19 -26 -28 -34 1 -11 0.2 0.9 33 -3 23 -41 2 -11 0.7 0.2 30 20 24 9 1 -11 0.4 0.6 0 -49 18 13 2 -11 0.7 0.3 45 -46 49 -9 1 -11 0.8 0.3 -9 -46 18 -37 2 -11 0.9 0.7 24 -23 20 -27 1 -11 0.2 0.2 39 0 21 9 2 -11 0.6 0.2 19 -45 45 10 2 -11 0.7 0.2 15 11 9 -8 1 -11 0.2 0.7 27 -15 19 -5 2 -11 0.6 0.5 18 -13 35 27 2 -11 0.2 0.2 35 -22 31 -45 1 -11 0.4 0.1 47 -1 18 -27 1 -11 0.7 0.3 3 -32 -23 -38 1 -11 0.7 0.2 8 -35 -25 -35 1 -11 0.3 0.8 33 29 5 -6 1 -11 0.2 0.8 -25 -32 24 2 2 -11 0.9 0.1 33 -4 25 21 1 -11 0.8 0.8 -7 -20 25 14 2 -11 0.4 0.5 -17 -26 28 26 2 -11 0.1 0.4 -1 -25 -6 -33 1 -11 0.5 0.5 34 -21 35 12 2 -11 0.3 0.1 35 25 6 -14 1 -11 0.8 0.3 31 -19 41 -7 2 -11 0.8 0.4 -38 -40 22 -11 2 -11 0.3 0.1 14 -5 -28 -49 1 -11 0.3 0.1 31 30 -16 -45 1 -11 0.5 0.2 38 -46 -23 -48 1 -11 0.7 0.6 45 -20 -8 -18 1 -11 0.7 0.3 49 25 -2 -33 1 -11 0.9 0.4 35 32 6 -25 1 -11 0.9 0.1 -1 -13 40 -6 2 -11 0.3 0.1 -7 -16 -18 -43 1 -11 0.6 0.6 -10 -49 34 9 2 -11 0.6 0.2 0 -41 -3 -49 1 -11 0.3 0.3 -11 -45 -29 -47 1 -11 0.1 0.3 -19 -35 9 3 2 -11 0.4 0.1 -23 -30 26 -38 2 -11 0.1 0.4 6 -6 28 -27 2 -11 0.8 0.3 -14 -36 39 9 2 -11 0.8 0.2 5 -23 45 -44 1 -11 0.6 0.2 36 -9 -15 -20 1 -11 0.9 0.2 1 -15 -4 -38 1 -11 0.1 0.6 19 4 39 -3 2 -11 0.7 0.6 36 28 46 -43 1 -11 0.4 0.1 19 -3 12 -33 1 -12 0.8 0.1 49 41 -1 -43 1 -12 0.5 0.1 32 -3 47 -25 1 -12 0.9 0.3 21 -26 33 21 2 -12 0.8 0.2 37 12 -20 -47 1 -12 0.4 0.4 47 -1 45 41 2 -12 0.7 0.9 0 -4 4 -42 1 -12 0.5 0.5 36 21 -26 -31 1 -12 0.4 0.9 23 -2 46 -14 2 -12 0.3 0.1 11 -14 26 24 2 -12 0.9 0.4 46 14 48 27 1 -12 0.6 0.6 28 27 0 -50 1 -12 0.7 0.8 46 -20 44 -33 2 -12 0.9 0.5 44 -3 28 -3 1 -12 0.9 0.7 -34 -43 -11 -16 2 -12 0.4 0.7 49 -30 1 -15 1 -12 0.5 0.7 11 7 31 -46 2 -12 0.5 0.2 21 9 -14 -24 1 -12 0.4 0.6 14 -17 46 -26 2 -12 0.7 0.3 -5 -40 18 3 2 -12 0.4 0.8 27 -1 50 46 2 -12 0.5 0.3 23 -4 46 -18 1 -12 0.1 0.1 25 -26 -9 -46 1 -12 0.8 0.4 23 -45 37 13 2 -12 0.2 0.5 37 12 46 -44 1 -12 0.5 0.9 2 -2 45 31 2 -12 0.5 0.4 43 -46 10 -21 1 -12 0.9 0.1 27 -4 39 -36 1 -12 0.5 0.2 32 -44 -26 -29 1 -12 0.1 0.6 42 -48 27 -17 2 -12 0.5 0.7 36 -28 18 13 2 -12 0.3 0.8 32 -50 48 -10 2 -12 0.4 0.4 50 5 32 27 2 -12 0.6 0.4 -31 -47 12 -24 2 -12 0.2 0.9 38 7 8 0 1 -12 0.9 0.3 35 28 50 0 1 -12 0.3 0.4 15 2 31 -20 1 -12 0.8 0.5 -30 -50 23 7 2 -12 0.8 0.1 38 -19 48 39 2 -12 0.3 0.5 45 17 -2 -19 1 -12 0.1 0.3 11 -5 -26 -29 1 -12 0.8 0.7 25 -13 45 0 2 -12 0.7 0.8 29 -48 20 -26 2 -12 0.7 0.8 8 -24 6 -30 1 -12 0.1 0.9 -17 -25 -11 -45 2 -12 0.7 0.5 45 -8 -18 -50 1 -12 0.3 0.8 24 -1 45 -48 2 -12 0.6 0.1 32 28 -13 -37 1 -12 0.6 0.4 -41 -49 -22 -44 2 -12 0.7 0.3 12 -15 36 23 2 -12 0.4 0.1 -29 -44 50 -39 2 -12 0.9 0.2 48 -8 -17 -47 1 -12 0.4 0.1 41 5 36 -16 1 -12 0.3 0.1 28 -28 4 -3 2 -12 0.4 0.1 28 12 24 -36 1 -12 0.4 0.7 26 -18 15 10 2 -12 0.1 0.3 44 -17 33 -7 2 -12 0.2 0.6 -26 -49 20 -40 2 -12 0.6 0.1 21 -28 21 -6 1 -12 0.2 0.7 41 8 -8 -46 1 -12 0.2 0.7 48 -22 17 -10 2 -12 0.9 0.2 42 30 29 11 1 -12 0.3 0.3 -11 -28 36 -37 2 -12 0.8 0.9 -4 -39 20 7 2 -12 0.6 0.5 10 -22 15 -50 1 -12 0.5 0.2 49 -38 27 -16 1 -12 0.3 0.3 -30 -48 27 -1 2 -12 0.7 0.1 -1 -49 30 14 2 -12 0.8 0.9 -22 -37 -42 -46 1 -12 0.9 0.4 12 -45 48 -42 1 -12 0.2 0.2 39 -10 0 -24 1 -12 0.4 0.5 12 -45 0 -4 2 -12 0.6 0.2 -16 -31 41 -4 2 -12 0.1 0.7 2 -12 -13 -41 1 -12 0.1 0.9 10 -33 -11 -13 2 -12 0.2 0.4 1 -8 -5 -31 1 -12 0.5 0.3 32 -10 48 -38 1 -12 0.4 0.5 13 -36 36 -22 2 -12 0.2 0.9 41 16 49 43 2 -12 0.3 0.9 -24 -50 8 -28 2 -12 0.5 0.2 50 14 47 42 2 -12 0.2 0.8 -9 -17 -43 -45 1 -12 0.5 0.6 41 11 37 -10 1 -12 0.6 0.6 15 -9 37 -27 2 -12 0.2 0.4 -37 -45 36 -33 2 -12 0.2 0.1 45 -4 21 -6 1 -12 0.2 0.9 22 -33 35 31 2 -12 0.8 0.2 36 -10 -7 -20 1 -12 0.6 0.5 13 -22 40 23 2 -12 0.8 0.8 -9 -30 47 -38 2 -12 0.1 0.2 30 -15 35 -42 1 -12 0.6 0.9 9 -15 -13 -17 1 -12 0.9 0.4 22 -27 3 -29 1 -12 0.9 0.6 17 -12 24 -21 1 -12 0.6 0.8 41 5 15 1 1 -12 0.6 0.1 35 -37 11 -42 1 -12 0.1 0.3 45 -2 37 -46 1 -12 0.7 0.3 9 6 33 17 2 -12 0.4 0.9 32 -50 6 -23 2 -12 0.8 0.7 1 -30 4 -45 1 -12 0.7 0.9 -18 -24 -23 -30 1 -12 0.7 0.2 -35 -40 13 5 2 -12 0.8 0.4 34 -24 29 5 1 -12 0.5 0.3 32 -4 24 -44 1 -12 0.6 0.1 -21 -30 -4 -9 2 -12 0.7 0.9 13 -15 11 -28 2 -12 0.3 0.8 -8 -38 -14 -28 2 -12 0.2 0.3 -37 -43 -17 -18 2 -12 0.7 0.5 29 -5 -24 -32 1 -12 0.3 0.3 -35 -37 2 -9 2 -12 0.5 0.8 42 11 -9 -44 1 -12 0.5 0.4 42 -20 -1 -11 1 -12 0.1 0.6 17 -5 -17 -40 1 -12 0.8 0.2 26 0 9 -21 1 -12 0.2 0.8 -20 -34 47 -24 2 -12 0.4 0.9 -10 -23 22 -36 2 -12 0.3 0.7 20 -8 3 -12 1 -12 0.4 0.8 0 -5 29 -15 2 -12 0.2 0.4 21 13 -10 -24 1 -12 0.3 0.6 4 -16 37 -47 2 -12 0.4 0.4 43 4 22 -35 1 -12 0.1 0.7 48 -12 21 -3 2 -12 0.5 0.4 4 -22 -24 -28 1 -12 0.8 0.9 44 -49 46 14 2 -12 0.4 0.2 -1 -10 36 -17 2 -12 0.2 0.6 -28 -32 27 -2 2 -12 0.1 0.4 26 5 -13 -33 1 -12 0.5 0.2 2 -23 0 -5 2 -12 0.2 0.9 38 5 -2 -6 1 -12 0.9 0.5 38 -43 37 18 1 -12 0.6 0.3 17 -19 -35 -47 1 -12 0.7 0.1 6 -17 40 0 2 -12 0.2 0.1 -7 -38 35 20 2 -12 0.4 0.4 30 9 -8 -48 1 -12 0.6 0.5 24 -20 16 -32 1 -12 0.4 0.2 -7 -11 12 -19 1 -12 0.3 0.6 -18 -48 -17 -45 2 -12 0.2 0.5 -6 -37 13 -47 2 -12 0.1 0.1 40 -27 3 -17 1 -12 0.6 0.4 8 5 28 27 2 -12 0.3 0.2 -7 -40 -31 -43 1 -12 0.7 0.5 -34 -42 50 35 2 -12 0.6 0.2 15 4 -13 -26 1 -12 0.6 0.7 -19 -32 47 46 2 -12 0.5 0.7 2 -25 -4 -37 2 -12 0.2 0.6 22 -38 46 -46 2 -12 0.4 0.1 24 -32 -19 -43 1 -12 0.5 0.7 -10 -45 -17 -31 2 -12 0.5 0.2 22 -28 15 7 2 -12 0.5 0.4 18 -24 26 -9 2 -12 0.1 0.2 14 5 1 -17 1 -12 0.4 0.9 41 37 21 -22 1 -12 0.7 0.2 26 -11 -2 -43 1 -12 0.9 0.6 9 -20 31 -4 2 -12 0.3 0.9 11 -10 22 1 2 -12 0.3 0.2 -22 -27 -28 -48 1 -12 0.6 0.9 -35 -43 11 8 2 -12 0.2 0.3 -30 -32 43 28 2 -12 0.9 0.7 24 23 33 2 1 -12 0.9 0.4 -28 -36 35 28 2 -12 0.2 0.3 25 -10 -7 -29 1 -12 0.7 0.1 46 -25 9 3 1 -12 0.5 0.7 26 -30 -11 -29 1 -12 0.7 0.6 18 -23 26 -43 1 -12 0.6 0.2 30 16 -8 -25 1 -12 0.7 0.3 44 -30 43 -15 1 -12 0.2 0.7 13 -32 35 -29 2 -12 0.8 0.1 -20 -32 14 -10 2 -12 0.3 0.9 37 -15 -20 -21 1 -12 0.7 0.3 -10 -23 40 -11 2 -12 0.4 0.5 8 -40 0 -11 2 -12 0.8 0.2 -48 -49 21 13 2 -12 0.2 0.5 42 -22 40 34 2 -12 0.3 0.2 -35 -42 43 -28 2 -12 0.3 0.1 50 3 49 -22 1 -12 0.2 0.2 16 -23 18 -47 1 -12 0.4 0.3 -1 -28 29 -13 2 -12 0.9 0.8 22 3 10 0 1 -12 0.8 0.8 46 -30 -5 -22 1 -12 0.9 0.8 10 -4 50 45 2 -12 0.2 0.2 12 -1 -30 -35 1 -12 0.2 0.4 3 -46 27 18 2 -12 0.4 0.7 1 -5 28 -14 2 -12 0.7 0.8 31 -48 12 7 2 -12 0.8 0.8 10 -12 19 -48 1 -12 0.1 0.3 29 -13 -1 -22 1 -12 0.3 0.7 50 -45 -11 -33 1 -12 0.2 0.3 23 -2 -15 -35 1 -12 0.6 0.4 -2 -34 -12 -28 1 -12 0.2 0.5 -39 -43 30 -45 2 -12 0.4 0.9 38 -18 29 1 2 -12 0.4 0.3 -25 -39 29 -30 2 -12 0.7 0.3 31 -10 16 -32 1 -12 0.3 0.5 20 -39 -10 -17 2 -12 0.4 0.3 2 -48 12 -16 2 -12 0.6 0.8 -17 -34 15 -33 2 -12 0.1 0.1 23 -8 20 6 2 -12 0.2 0.2 -29 -39 49 3 2 -12 0.2 0.3 33 9 30 -6 1 -12 0.5 0.3 36 6 45 37 2 -12 0.9 0.3 30 -24 2 -22 1 -12 0.8 0.6 39 14 28 23 1 -12 0.7 0.2 34 27 37 14 1 -12 0.6 0.7 -35 -39 29 -38 2 -12 0.5 0.5 50 -21 -26 -44 1 -12 0.4 0.9 37 -36 39 -35 2 -12 0.4 0.1 17 -39 -22 -25 1 -12 0.3 0.4 50 19 -5 -33 1 -12 0.1 0.7 13 -39 13 -6 2 -12 0.5 0.1 45 -28 -30 -33 1 -12 0.4 0.1 -45 -49 18 -25 2 -12 0.1 0.8 21 -7 25 -37 2 -12 0.9 0.1 -42 -43 48 -13 2 -12 0.3 0.4 46 -21 33 -13 2 -12 0.5 0.7 6 -11 40 -5 2 -12 0.3 0.5 31 -34 38 -17 2 -12 0.4 0.5 -6 -10 36 -30 2 -12 0.7 0.3 41 -36 -28 -31 1 -12 0.5 0.2 -9 -20 21 6 2 -12 0.9 0.3 -9 -24 45 19 2 -12 0.9 0.3 41 31 7 -5 1 -12 0.9 0.3 5 -37 19 -36 1 -12 0.7 0.6 47 -35 27 20 2 -12 0.3 0.8 36 8 49 -10 2 -12 0.6 0.8 21 2 48 2 2 -12 0.3 0.7 -17 -29 28 -36 2 -12 0.3 0.9 50 1 4 3 1 -12 0.5 0.8 -4 -31 28 -7 2 -12 0.9 0.8 48 16 45 -5 1 -12 0.2 0.3 -22 -33 36 -46 2 -12 0.7 0.2 49 12 -10 -46 1 -12 0.4 0.6 24 23 3 -1 1 -12 0.9 0.8 -3 -37 -8 -23 2 -12 0.5 0.6 32 6 27 -21 1 -12 0.1 0.2 49 27 10 -13 1 -12 0.1 0.9 3 -1 -5 -37 1 -12 0.6 0.1 37 -39 36 -48 1 -12 0.8 0.3 -7 -9 14 -6 2 -12 0.4 0.4 32 17 -25 -31 1 -12 0.8 0.7 39 -7 43 -1 1 -12 0.5 0.5 -12 -46 37 -18 2 -12 0.1 0.6 16 -32 30 -31 2 -12 0.8 0.2 4 2 46 -28 1 -12 0.1 0.8 27 -49 0 -14 2 -12 0.2 0.4 19 -39 5 -22 2 -12 0.9 0.2 9 -38 12 -46 1 -12 0.1 0.5 11 -41 13 -6 2 -12 0.5 0.2 41 40 -2 -18 1 -12 0.2 0.9 50 39 -8 -12 1 -12 0.3 0.8 3 -46 -23 -47 2 -12 0.1 0.8 1 -20 28 2 2 -12 0.7 0.8 -11 -18 43 41 2 -12 0.7 0.3 31 -38 -29 -37 1 -12 0.8 0.9 28 -34 38 29 2 -12 0.2 0.8 39 -38 29 -27 2 -12 0.1 0.7 4 2 26 -11 2 -12 0.3 0.1 48 -36 -18 -34 1 -12 0.5 0.7 -4 -13 -19 -37 1 -12 0.7 0.1 49 -39 39 1 1 -12 0.4 0.8 29 20 -24 -28 1 -12 0.7 0.8 16 -31 14 -12 2 -12 0.3 0.2 49 -8 29 -45 1 -12 0.1 0.3 40 -3 18 -9 1 -12 0.8 0.6 -7 -9 3 -27 2 -12 0.2 0.2 14 -20 24 -28 1 -12 0.8 0.5 9 -27 0 -39 1 -12 0.4 0.8 46 -4 47 -49 2 -12 0.8 0.1 -13 -24 32 -26 1 -12 0.8 0.8 -28 -36 22 12 2 -12 0.4 0.9 23 -47 7 -50 2 -12 0.1 0.7 0 -11 6 -2 2 -12 0.3 0.4 29 -9 20 -27 1 -12 0.8 0.3 -9 -33 12 8 2 -12 0.1 0.8 20 -30 29 -38 2 -12 0.3 0.6 16 -4 18 6 2 -12 0.1 0.6 -42 -44 8 -14 2 -12 0.2 0.8 38 -41 28 -39 2 -12 0.8 0.2 49 21 44 25 1 -12 0.1 0.1 -8 -23 45 -8 2 -12 0.5 0.4 12 -31 36 3 2 -12 0.4 0.9 -20 -27 18 -1 2 -12 0.5 0.2 -2 -22 38 -23 2 -12 0.9 0.4 49 -34 43 24 1 -12 0.8 0.6 41 -38 -20 -33 1 -12 0.1 0.3 35 -27 34 3 2 -12 0.2 0.3 -10 -50 44 -28 2 -12 0.1 0.2 33 27 -1 -25 1 -12 0.9 0.2 -14 -37 31 21 2 -12 0.2 0.3 -23 -32 7 -43 2 -12 0.2 0.5 38 -7 0 -9 1 -12 0.7 0.8 10 -44 41 36 2 -12 0.9 0.3 19 -5 15 -31 1 -12 0.4 0.2 41 33 -32 -44 1 -12 0.6 0.7 18 -44 13 -44 2 -12 0.8 0.3 48 -44 -9 -33 1 -12 0.4 0.9 -11 -20 25 -36 2 -12 0.3 0.4 3 -47 36 29 2 -12 0.3 0.2 23 -49 -6 -49 1 -12 0.4 0.4 5 -37 -14 -20 1 -12 0.3 0.8 -11 -24 19 -36 2 -12 0.5 0.6 2 -34 41 -30 2 -13 0.9 0.1 35 -15 49 -44 1 -13 0.5 0.8 33 26 23 -21 1 -13 0.5 0.3 27 -26 44 3 2 -13 0.5 0.8 26 -15 29 -11 2 -13 0.7 0.1 -13 -24 42 15 2 -13 0.8 0.4 45 -22 44 -5 1 -13 0.2 0.6 50 -23 12 -35 2 -13 0.1 0.4 33 12 6 -32 1 -13 0.2 0.9 14 2 0 -6 1 -13 0.2 0.7 21 -8 15 -31 1 -13 0.8 0.8 43 23 6 -24 1 -13 0.2 0.9 -5 -38 26 5 2 -13 0.4 0.1 47 -41 15 10 2 -13 0.9 0.7 24 13 35 28 2 -13 0.4 0.3 22 11 43 -27 1 -13 0.5 0.5 45 33 14 -44 1 -13 0.8 0.9 -42 -47 27 -33 2 -13 0.8 0.7 0 -32 21 -1 2 -13 0.5 0.6 37 26 37 -4 1 -13 0.8 0.2 -12 -47 -37 -39 1 -13 0.9 0.1 49 -49 13 -21 1 -13 0.9 0.6 -3 -28 34 -16 2 -13 0.8 0.1 23 -9 29 8 1 -13 0.4 0.3 -29 -32 -3 -44 2 -13 0.8 0.4 -18 -22 38 -39 2 -13 0.5 0.6 35 -6 13 -15 1 -13 0.8 0.3 25 -8 46 36 2 -13 0.4 0.7 43 -49 20 17 2 -13 0.5 0.6 18 -13 45 -17 2 -13 0.8 0.1 40 -25 -3 -15 1 -13 0.3 0.1 49 23 46 11 1 -13 0.3 0.5 -23 -33 -37 -38 1 -13 0.1 0.5 45 17 4 -31 1 -13 0.5 0.7 16 15 18 1 1 -13 0.2 0.6 28 -10 25 18 2 -13 0.1 0.7 41 -12 -33 -37 1 -13 0.2 0.1 17 -7 42 -5 2 -13 0.6 0.8 41 -21 18 11 2 -13 0.3 0.1 17 -1 -28 -35 1 -13 0.7 0.3 6 -1 29 -26 1 -13 0.3 0.1 26 17 49 4 1 -13 0.1 0.5 28 27 -18 -42 1 -13 0.4 0.1 -20 -22 -8 -28 1 -13 0.3 0.8 46 27 20 -50 1 -13 0.8 0.1 -30 -34 36 -36 1 -13 0.8 0.7 18 17 47 39 2 -13 0.7 0.3 -39 -42 -15 -37 2 -13 0.6 0.3 42 8 20 6 1 -13 0.1 0.3 24 9 31 12 2 -13 0.5 0.7 -10 -48 22 3 2 -13 0.5 0.1 47 -11 34 -23 1 -13 0.2 0.1 6 4 1 -2 1 -13 0.8 0.3 31 -18 11 2 1 -13 0.5 0.2 35 -3 24 -12 1 -13 0.3 0.9 -17 -18 10 -24 2 -13 0.3 0.2 -20 -37 36 7 2 -13 0.4 0.3 19 16 41 9 2 -13 0.6 0.5 18 -10 14 -1 1 -13 0.3 0.1 -3 -11 36 -7 2 -13 0.1 0.6 48 -5 49 11 2 -13 0.2 0.1 30 -18 21 -12 1 -13 0.4 0.4 16 -12 19 -35 1 -13 0.8 0.8 46 21 38 -23 1 -13 0.4 0.4 -17 -47 19 6 2 -13 0.2 0.5 17 -4 -9 -15 1 -13 0.6 0.8 3 -36 39 -19 2 -13 0.1 0.3 45 -27 -42 -43 1 -13 0.1 0.3 42 -27 -6 -29 1 -13 0.7 0.8 10 -29 14 -34 2 -13 0.9 0.8 29 5 -28 -34 1 -13 0.8 0.7 -22 -45 39 -20 2 -13 0.1 0.3 2 1 4 -20 1 -13 0.4 0.6 12 -32 -14 -23 1 -13 0.9 0.8 43 -2 44 6 1 -13 0.1 0.8 -41 -49 39 -2 2 -13 0.4 0.6 37 6 47 24 2 -13 0.7 0.3 -1 -43 -26 -42 1 -13 0.8 0.8 48 34 -41 -49 1 -13 0.4 0.3 43 -21 -28 -35 1 -13 0.1 0.4 -31 -44 41 23 2 -13 0.2 0.4 37 -47 39 3 2 -13 0.4 0.4 -10 -17 -1 -27 2 -13 0.7 0.7 21 -25 29 0 2 -13 0.6 0.3 30 17 29 -37 1 -13 0.1 0.8 7 -31 2 -11 2 -13 0.3 0.8 -21 -36 -9 -47 2 -13 0.8 0.2 12 -38 13 -23 1 -13 0.4 0.1 42 -17 39 -9 1 -13 0.9 0.7 43 -49 13 -31 1 -13 0.9 0.9 12 -30 -36 -48 1 -13 0.9 0.7 24 -47 -30 -45 1 -13 0.5 0.2 -1 -4 44 -8 2 -13 0.8 0.3 -16 -39 39 -36 2 -13 0.7 0.8 35 2 26 -20 1 -13 0.1 0.5 48 32 27 -14 1 -13 0.5 0.6 -3 -17 49 1 2 -13 0.9 0.6 22 -18 28 -38 1 -13 0.4 0.6 32 -13 7 -7 1 -13 0.8 0.3 32 5 26 24 2 -13 0.2 0.8 4 -50 20 -8 2 -13 0.8 0.2 43 -34 2 0 1 -13 0.9 0.6 48 -26 22 -32 1 -13 0.4 0.4 0 -22 -5 -42 1 -13 0.8 0.9 17 -32 12 10 2 -13 0.1 0.3 42 34 43 24 1 -13 0.7 0.4 39 -43 26 -22 1 -13 0.6 0.2 47 3 -35 -37 1 -13 0.9 0.1 30 -16 49 22 2 -13 0.2 0.3 -7 -20 8 -9 2 -13 0.6 0.4 46 -50 29 25 2 -13 0.3 0.4 40 -10 44 -48 1 -13 0.1 0.1 -26 -39 7 -29 2 -13 0.5 0.3 48 -21 -14 -41 1 -13 0.5 0.4 -26 -40 13 -46 2 -13 0.7 0.5 4 -47 -9 -34 1 -13 0.4 0.7 41 -12 -9 -43 1 -13 0.6 0.4 38 3 24 20 2 -13 0.7 0.6 37 -40 30 -27 1 -13 0.4 0.9 30 -18 13 -41 1 -13 0.2 0.1 43 27 20 -12 1 -13 0.8 0.5 11 -26 33 12 2 -13 0.7 0.1 42 -32 30 19 1 -13 0.8 0.5 49 -36 46 11 2 -13 0.8 0.7 4 -17 9 -11 2 -13 0.1 0.3 36 -16 34 -50 2 -13 0.1 0.5 40 -41 -48 -49 1 -13 0.2 0.9 2 1 40 -42 2 -13 0.8 0.7 15 -14 -25 -39 1 -13 0.7 0.5 28 -37 17 -48 1 -13 0.3 0.8 29 28 -22 -43 1 -13 0.7 0.5 36 -29 23 -13 1 -13 0.4 0.6 38 -22 20 -50 1 -13 0.5 0.4 -22 -47 14 -8 2 -13 0.6 0.4 5 -44 16 -24 2 -13 0.9 0.3 17 -23 24 -28 1 -13 0.5 0.5 36 27 -41 -49 1 -13 0.6 0.3 -37 -45 15 -16 2 -13 0.6 0.6 0 -46 33 -7 2 -13 0.8 0.9 14 -49 44 43 2 -13 0.5 0.2 45 -22 0 -34 1 -13 0.9 0.5 30 -45 -17 -42 1 -13 0.3 0.5 34 13 21 -8 1 -13 0.7 0.9 31 14 -4 -22 1 -13 0.6 0.7 9 -8 -27 -49 1 -13 0.7 0.1 50 39 -17 -48 1 -13 0.7 0.2 16 -24 20 -46 1 -13 0.4 0.7 50 -20 3 -8 1 -13 0.8 0.2 47 41 -10 -49 1 -13 0.6 0.6 42 -34 19 -6 1 -13 0.4 0.8 33 -23 28 19 2 -13 0.4 0.5 1 -48 32 17 2 -13 0.4 0.7 31 2 43 14 2 -13 0.5 0.8 -35 -46 -17 -48 2 -13 0.1 0.7 13 0 36 -7 2 -13 0.6 0.6 -22 -49 35 -22 2 -13 0.3 0.5 32 -22 46 -8 2 -13 0.1 0.8 24 -16 23 -7 2 -13 0.4 0.1 -25 -29 47 5 2 -13 0.7 0.8 38 -45 36 28 2 -13 0.5 0.7 37 -26 -8 -33 1 -13 0.2 0.6 34 -35 -6 -17 2 -13 0.8 0.9 21 -21 -19 -24 1 -13 0.7 0.1 37 -42 -32 -38 1 -13 0.3 0.7 14 -15 36 -12 2 -13 0.9 0.8 44 -19 4 -16 1 -13 0.2 0.2 34 -30 19 -36 1 -13 0.1 0.9 44 19 22 4 2 -13 0.3 0.1 -27 -28 -10 -20 2 -13 0.8 0.9 -2 -27 27 -47 2 -13 0.8 0.1 -9 -31 -4 -22 1 -13 0.3 0.3 16 -31 -6 -43 1 -13 0.5 0.4 46 12 -11 -43 1 -13 0.2 0.3 -12 -50 38 3 2 -13 0.6 0.5 17 -25 4 -27 1 -13 0.4 0.4 -8 -44 -2 -49 1 -13 0.4 0.9 29 4 8 -23 1 -13 0.6 0.3 10 -35 13 -26 1 -13 0.9 0.9 9 -35 -5 -16 1 -13 0.1 0.2 33 3 35 -32 1 -13 0.4 0.1 26 24 0 -7 1 -13 0.7 0.2 9 4 20 15 2 -13 0.2 0.2 35 -33 -6 -18 1 -13 0.2 0.5 41 -41 8 -10 2 -13 0.4 0.8 20 -40 23 -16 2 -13 0.3 0.3 48 9 16 -35 1 -13 0.7 0.9 50 -6 47 -17 2 -13 0.5 0.9 46 -11 -5 -9 1 -13 0.7 0.6 41 -12 0 -32 1 -13 0.9 0.1 -12 -32 -7 -15 1 -13 0.8 0.2 37 29 -5 -16 1 -13 0.4 0.8 36 7 -25 -44 1 -13 0.2 0.9 -4 -15 -22 -37 1 -13 0.8 0.1 -31 -34 33 -5 2 -13 0.1 0.3 9 -17 -33 -41 1 -13 0.4 0.1 8 -2 48 -8 1 -13 0.3 0.4 47 -26 48 -40 2 -13 0.5 0.8 23 -26 34 -19 2 -13 0.6 0.5 11 -47 47 43 2 -13 0.6 0.5 47 37 25 -2 1 -13 0.5 0.1 28 24 33 -47 1 -13 0.8 0.4 17 3 22 6 2 -13 0.1 0.8 -46 -50 -13 -33 2 -13 0.4 0.5 34 24 22 16 1 -13 0.6 0.9 18 -45 -4 -40 1 -13 0.7 0.8 16 11 4 -25 1 -13 0.3 0.2 -20 -37 4 -43 2 -13 0.5 0.9 -1 -10 44 32 2 -13 0.4 0.8 43 -36 27 -38 2 -13 0.3 0.3 4 -42 41 -1 2 -13 0.7 0.8 47 -24 -40 -41 1 -13 0.6 0.4 4 -2 48 25 2 -13 0.2 0.3 33 -11 35 26 2 -13 0.7 0.1 8 0 13 3 1 -13 0.4 0.5 -5 -26 43 34 2 -13 0.8 0.7 -18 -50 -3 -32 2 -13 0.2 0.9 34 -2 29 14 2 -13 0.3 0.6 1 -27 18 6 2 -13 0.8 0.4 40 2 12 -14 1 -13 0.5 0.9 -14 -37 -18 -34 1 -13 0.9 0.2 6 -11 42 -50 1 -13 0.5 0.4 47 38 34 3 1 -13 0.9 0.7 21 -31 5 3 1 -13 0.9 0.3 19 8 23 -1 1 -13 0.9 0.1 -1 -14 0 -23 1 -13 0.2 0.3 -6 -15 47 -40 2 -13 0.7 0.8 40 -26 44 -33 2 -13 0.6 0.8 36 -26 -4 -10 1 -13 0.7 0.3 25 -17 -8 -42 1 -13 0.3 0.6 31 -25 14 7 2 -13 0.6 0.7 17 -25 28 4 2 -13 0.3 0.9 34 -8 26 8 2 -13 0.7 0.5 16 -45 35 29 2 -13 0.2 0.7 28 -20 27 -29 2 -13 0.6 0.4 45 -31 -11 -33 1 -13 0.9 0.6 31 -18 46 24 2 -13 0.3 0.6 12 -42 3 -5 2 -13 0.5 0.6 6 -12 23 -26 2 -13 0.8 0.5 12 -21 50 -2 2 -13 0.9 0.8 45 23 -22 -29 1 -13 0.3 0.6 18 16 16 -37 1 -13 0.2 0.5 43 -21 29 -29 1 -13 0.5 0.2 38 27 -35 -41 1 -13 0.8 0.7 46 5 49 -42 1 -13 0.9 0.3 6 -46 7 -50 1 -13 0.2 0.8 -11 -12 -19 -40 1 -13 0.6 0.2 8 -44 -10 -43 1 -13 0.3 0.7 41 -6 40 23 2 -13 0.7 0.8 10 -25 -32 -50 1 -13 0.5 0.3 49 13 37 -31 1 -13 0.8 0.5 39 19 50 -14 1 -13 0.4 0.6 29 24 44 13 2 -13 0.7 0.4 -1 -41 46 -23 2 -13 0.6 0.4 44 28 -3 -17 1 -13 0.3 0.8 49 -40 50 -41 2 -13 0.8 0.8 48 -37 26 21 1 -13 0.3 0.7 -17 -19 4 -30 2 -13 0.5 0.7 36 2 26 -2 2 -13 0.4 0.1 12 -42 -15 -25 1 -13 0.1 0.4 32 -29 45 -17 2 -13 0.4 0.2 40 4 33 -40 1 -13 0.5 0.1 33 -8 -32 -45 1 -13 0.7 0.6 8 -42 -3 -5 1 -13 0.4 0.6 -29 -45 46 15 2 -13 0.7 0.3 18 -14 40 -14 1 -13 0.1 0.6 -31 -44 -34 -36 2 -13 0.7 0.3 20 -14 7 1 1 -13 0.2 0.3 16 -38 50 2 2 -13 0.5 0.4 8 -39 -4 -49 1 -13 0.7 0.2 18 3 -6 -30 1 -13 0.3 0.8 43 -4 -2 -40 1 -13 0.6 0.6 16 -17 34 33 2 -13 0.7 0.6 -17 -32 17 10 2 -13 0.2 0.8 40 -40 48 18 2 -13 0.9 0.5 41 35 50 -31 1 -13 0.9 0.3 20 13 23 -50 1 -13 0.8 0.7 33 -2 47 -14 2 -13 0.2 0.7 32 -49 34 11 2 -13 0.6 0.9 40 8 44 5 2 -13 0.8 0.5 50 48 21 3 1 -13 0.3 0.3 -46 -49 28 -39 2 -13 0.7 0.6 -12 -21 33 3 2 -13 0.5 0.8 -29 -35 23 -34 2 -13 0.8 0.1 9 -34 36 34 2 -13 0.7 0.8 6 -21 40 36 2 -13 0.5 0.7 11 -35 19 10 2 -13 0.4 0.6 14 6 32 -3 1 -13 0.4 0.8 45 34 -30 -31 1 -13 0.5 0.6 20 5 43 -6 2 -13 0.4 0.7 -12 -38 22 15 2 -13 0.3 0.3 10 -17 16 -47 1 -13 0.6 0.5 44 26 35 29 1 -13 0.6 0.6 6 0 -2 -47 1 -13 0.5 0.8 38 33 15 -44 1 -13 0.5 0.1 7 -31 27 -6 2 -13 0.6 0.8 10 2 24 -18 2 -13 0.2 0.5 36 20 40 -35 1 -13 0.7 0.9 18 -14 -20 -24 1 -13 0.2 0.9 -36 -37 7 -50 2 -13 0.5 0.7 20 -25 15 -5 2 -13 0.9 0.7 47 40 -26 -29 1 -14 0.6 0.5 49 -49 -13 -37 1 -14 0.1 0.7 22 7 50 3 2 -14 0.3 0.9 -32 -39 21 13 2 -14 0.2 0.1 6 -44 -7 -33 1 -14 0.6 0.1 6 -34 2 -15 2 -14 0.6 0.9 43 -25 -35 -46 1 -14 0.2 0.6 33 -16 31 -28 2 -14 0.5 0.9 9 -35 31 -20 2 -14 0.3 0.5 34 -40 -20 -21 1 -14 0.4 0.1 49 -27 19 -41 1 -14 0.6 0.4 26 22 34 28 2 -14 0.6 0.2 18 -32 1 -27 1 -14 0.7 0.6 32 3 44 24 2 -14 0.2 0.9 44 37 42 13 2 -14 0.1 0.4 -35 -38 40 -29 2 -14 0.2 0.8 39 -44 33 11 2 -14 0.7 0.9 19 -2 21 -7 1 -14 0.1 0.3 -30 -32 23 -42 2 -14 0.8 0.7 3 -23 -5 -37 1 -14 0.2 0.5 24 -21 -15 -44 1 -14 0.4 0.2 4 -11 48 -29 2 -14 0.8 0.3 25 -30 17 -48 1 -14 0.4 0.6 22 18 14 -10 1 -14 0.4 0.2 39 8 48 -24 2 -14 0.3 0.6 8 -42 36 -35 2 -14 0.1 0.6 -3 -11 -6 -15 2 -14 0.5 0.1 5 -4 -2 -23 1 -14 0.3 0.7 27 -4 25 4 1 -14 0.8 0.8 -17 -29 -4 -5 1 -14 0.9 0.6 -2 -43 -22 -40 1 -14 0.9 0.5 23 -19 -5 -13 1 -14 0.5 0.7 -7 -8 30 -8 2 -14 0.8 0.7 37 35 27 -15 1 -14 0.1 0.1 50 22 29 9 2 -14 0.3 0.2 19 -11 0 -9 1 -14 0.1 0.5 36 -3 48 -16 2 -14 0.3 0.8 13 -39 43 12 2 -14 0.6 0.6 30 -14 11 -1 2 -14 0.3 0.8 28 -48 -4 -11 1 -14 0.2 0.7 33 -42 -18 -38 1 -14 0.2 0.9 25 -36 -15 -25 2 -14 0.5 0.6 -1 -30 41 -17 2 -14 0.7 0.5 -6 -40 13 -49 2 -14 0.8 0.3 21 4 -11 -47 1 -14 0.1 0.1 -10 -34 30 -28 2 -14 0.7 0.3 12 -27 15 -40 1 -14 0.5 0.2 -35 -42 13 -5 2 -14 0.8 0.8 18 5 21 -13 2 -14 0.1 0.5 22 6 41 -8 1 -14 0.4 0.8 45 -13 -16 -49 1 -14 0.5 0.1 9 2 -37 -49 1 -14 0.6 0.8 -4 -24 -14 -43 1 -14 0.4 0.9 31 -31 -7 -28 1 -14 0.3 0.6 38 -16 20 -27 1 -14 0.4 0.3 -11 -48 -1 -18 2 -14 0.5 0.5 -17 -38 38 -43 2 -14 0.7 0.8 20 -31 32 -21 2 -14 0.3 0.3 20 -8 35 -15 2 -14 0.8 0.3 -11 -34 17 -17 2 -14 0.6 0.4 -16 -20 26 -49 1 -14 0.8 0.1 30 -29 46 42 2 -14 0.3 0.8 40 14 24 14 2 -14 0.1 0.1 40 -37 -3 -44 1 -14 0.7 0.5 50 33 42 37 1 -14 0.7 0.5 16 6 20 -6 1 -14 0.5 0.4 13 -2 19 -37 2 -14 0.5 0.9 41 16 41 -1 2 -14 0.2 0.1 16 2 38 22 2 -14 0.5 0.7 29 -33 31 -4 2 -14 0.6 0.5 42 -30 12 -33 1 -14 0.5 0.4 31 -14 -3 -10 1 -14 0.3 0.4 5 -36 35 -24 1 -14 0.5 0.2 31 -18 50 -24 1 -14 0.5 0.7 2 -38 44 40 2 -14 0.3 0.7 27 2 -2 -17 1 -14 0.1 0.4 26 3 26 -19 1 -14 0.2 0.8 34 31 -7 -48 1 -14 0.2 0.4 35 -25 -21 -38 1 -14 0.6 0.8 -7 -49 -26 -35 1 -14 0.3 0.8 33 -45 21 9 2 -14 0.3 0.5 -27 -43 36 -35 2 -14 0.1 0.3 27 6 -22 -44 1 -14 0.7 0.2 4 -35 21 -7 2 -14 0.2 0.3 40 -32 4 -14 1 -14 0.3 0.6 45 8 9 -7 1 -14 0.1 0.8 39 -13 37 30 2 -14 0.9 0.3 -44 -49 39 -43 2 -14 0.5 0.4 0 -9 34 1 2 -14 0.2 0.1 45 -26 48 -23 1 -14 0.2 0.2 20 -22 -18 -50 1 -14 0.6 0.3 19 -41 22 -47 1 -14 0.6 0.7 26 -10 27 1 2 -14 0.9 0.1 25 -17 32 21 2 -14 0.3 0.8 26 -41 4 -16 2 -14 0.7 0.2 27 -42 -16 -45 1 -14 0.4 0.5 -2 -3 19 -26 2 -14 0.7 0.6 33 21 49 31 1 -14 0.3 0.6 18 -25 -12 -13 1 -14 0.6 0.4 13 -10 28 8 2 -14 0.8 0.9 32 -27 1 -7 1 -14 0.8 0.2 32 -6 22 -37 1 -14 0.1 0.5 -15 -31 42 -35 2 -14 0.7 0.7 -20 -33 24 -14 2 -14 0.2 0.7 46 -18 30 -14 2 -14 0.1 0.5 46 -33 39 -23 2 -14 0.8 0.6 18 -28 -17 -35 1 -14 0.5 0.6 35 -26 48 8 2 -14 0.1 0.2 -22 -24 35 7 2 -14 0.6 0.6 -1 -21 27 -19 1 -14 0.3 0.6 33 -6 39 10 2 -14 0.3 0.9 -27 -32 20 -47 2 -14 0.4 0.7 -16 -43 -15 -45 1 -14 0.2 0.9 16 -40 12 10 2 -14 0.8 0.1 -13 -43 1 -47 1 -14 0.3 0.8 -23 -39 47 13 2 -14 0.2 0.2 37 -48 43 12 2 -14 0.7 0.9 -6 -48 -33 -45 1 -14 0.7 0.8 43 -19 -1 -38 1 -14 0.3 0.8 -5 -7 -5 -26 2 -14 0.7 0.7 31 -18 11 -49 1 -14 0.1 0.1 6 -13 21 -44 1 -14 0.5 0.4 40 9 31 5 1 -14 0.9 0.9 5 -46 -25 -44 1 -14 0.7 0.8 44 39 3 -31 1 -14 0.9 0.9 3 -36 24 -1 2 -14 0.7 0.3 -3 -45 23 -36 2 -14 0.4 0.7 21 -16 2 -10 1 -14 0.6 0.6 6 -3 42 19 2 -14 0.8 0.4 34 -28 35 33 1 -14 0.5 0.7 11 2 22 -42 2 -14 0.8 0.6 22 -23 43 -45 1 -14 0.4 0.9 3 -21 47 -1 2 -14 0.4 0.7 12 4 23 -39 1 -14 0.7 0.5 40 -5 24 -30 1 -14 0.1 0.9 20 9 -2 -42 1 -14 0.1 0.1 40 21 44 -45 1 -14 0.4 0.1 21 -31 5 -25 1 -14 0.2 0.2 -24 -34 25 23 2 -14 0.9 0.3 23 -21 1 -25 1 -14 0.1 0.3 -1 -47 8 7 2 -14 0.9 0.1 10 -30 43 -39 2 -14 0.1 0.8 13 0 21 -3 2 -14 0.5 0.3 37 17 17 -36 1 -14 0.4 0.1 10 -28 34 0 2 -14 0.2 0.1 -4 -31 37 -36 2 -14 0.4 0.4 38 33 22 -13 1 -14 0.6 0.5 36 -10 -27 -35 1 -14 0.1 0.7 -2 -42 22 -10 2 -14 0.7 0.9 40 25 32 -35 1 -14 0.8 0.9 6 -41 39 23 2 -14 0.8 0.7 49 14 -30 -32 1 -14 0.7 0.1 -15 -36 17 16 2 -14 0.8 0.1 -3 -42 34 -19 1 -14 0.2 0.8 48 43 14 -12 1 -14 0.4 0.5 24 23 30 27 2 -14 0.3 0.1 28 -38 34 -36 1 -14 0.8 0.5 23 -46 -20 -27 1 -14 0.4 0.5 50 -6 38 1 1 -14 0.9 0.4 11 -11 42 -16 1 -14 0.4 0.6 41 38 36 -13 1 -14 0.6 0.6 -8 -14 47 -9 2 -14 0.4 0.5 -5 -47 7 -26 2 -14 0.4 0.2 -2 -44 3 -28 1 -14 0.2 0.5 -4 -33 10 -14 1 -14 0.2 0.3 28 -7 35 -6 2 -14 0.3 0.3 34 31 40 -43 1 -14 0.3 0.5 -5 -45 20 -7 2 -14 0.9 0.2 -12 -40 45 -17 2 -14 0.3 0.3 40 15 24 -8 1 -14 0.7 0.4 4 1 40 -44 2 -14 0.9 0.3 47 -25 23 4 1 -14 0.6 0.1 31 -19 -3 -6 1 -14 0.4 0.1 -10 -24 48 -11 2 -14 0.1 0.7 41 -46 -24 -34 2 -14 0.3 0.2 5 1 26 -49 1 -14 0.5 0.2 40 9 15 8 1 -14 0.3 0.5 46 8 19 2 1 -14 0.3 0.2 38 -23 35 -19 2 -14 0.8 0.6 44 -24 49 -31 1 -14 0.1 0.1 48 -37 4 -11 1 -14 0.2 0.5 46 8 13 -31 1 -14 0.9 0.4 23 16 15 -3 1 -14 0.7 0.5 31 -44 44 -30 1 -14 0.9 0.8 37 -46 -27 -31 1 -14 0.1 0.1 -25 -27 46 20 2 -14 0.6 0.9 -2 -36 46 25 2 -14 0.2 0.3 -1 -8 -36 -47 1 -14 0.7 0.6 29 9 13 -26 2 -14 0.5 0.8 23 -16 32 -3 2 -14 0.9 0.9 10 -41 33 -12 2 -14 0.9 0.7 3 -27 46 36 2 -14 0.6 0.6 39 -24 21 11 2 -14 0.3 0.3 14 -27 10 -21 1 -14 0.6 0.7 -17 -44 19 -18 2 -14 0.9 0.4 39 -47 47 -12 1 -14 0.9 0.6 -38 -45 16 -35 2 -14 0.7 0.2 35 30 33 23 2 -14 0.8 0.6 -9 -42 19 -35 2 -14 0.2 0.6 -14 -25 18 -36 2 -14 0.5 0.9 -22 -35 35 32 2 -14 0.3 0.8 18 17 44 4 1 -14 0.6 0.5 -2 -33 29 14 2 -14 0.2 0.8 -3 -38 17 -47 2 -14 0.1 0.7 7 -31 -18 -26 1 -14 0.3 0.7 16 -33 -17 -29 1 -14 0.8 0.2 15 -16 -44 -49 1 -14 0.1 0.4 43 -5 21 -41 1 -14 0.3 0.6 10 -28 22 -4 2 -14 0.1 0.4 29 -8 8 6 2 -14 0.3 0.9 6 -12 20 5 2 -14 0.1 0.5 -25 -49 24 -16 2 -14 0.8 0.1 40 -29 -24 -31 1 -14 0.4 0.1 22 -18 -22 -28 1 -14 0.6 0.4 5 -8 17 -41 1 -14 0.7 0.5 20 -1 24 -11 1 -14 0.2 0.2 40 -30 23 11 2 -14 0.8 0.2 29 -30 -12 -13 1 -14 0.1 0.5 30 -1 31 -18 1 -14 0.8 0.8 21 17 20 -9 1 -14 0.6 0.5 16 -10 -3 -14 1 -14 0.4 0.5 28 -49 36 27 2 -14 0.4 0.9 17 6 14 6 2 -14 0.4 0.1 1 -16 28 -8 1 -14 0.2 0.8 5 -14 -35 -48 1 -14 0.1 0.8 42 7 23 -14 2 -14 0.2 0.7 29 11 32 -11 1 -14 0.3 0.9 32 27 48 -4 2 -14 0.8 0.2 34 -17 12 -42 1 -14 0.1 0.7 15 -13 -9 -50 1 -14 0.2 0.4 40 33 5 -44 1 -14 0.5 0.3 46 -50 -16 -22 1 -14 0.5 0.7 21 -36 41 15 2 -14 0.8 0.3 -4 -5 35 16 2 -14 0.6 0.3 25 -31 35 21 2 -14 0.5 0.6 49 15 -40 -46 1 -14 0.7 0.9 -3 -21 41 -20 2 -14 0.4 0.8 37 27 -32 -50 1 -14 0.7 0.9 30 -50 28 -47 1 -14 0.6 0.3 46 -16 -17 -30 2 -14 0.9 0.8 6 -30 -6 -44 1 -14 0.2 0.4 23 12 9 -25 1 -14 0.3 0.6 18 -44 11 -41 2 -14 0.6 0.1 46 -46 -12 -47 1 -14 0.2 0.4 44 40 46 -23 1 -14 0.6 0.6 39 26 40 -47 1 -14 0.7 0.1 36 -14 -35 -49 1 -14 0.6 0.2 39 -21 -23 -28 1 -14 0.6 0.9 -1 -40 -3 -25 2 -14 0.5 0.9 18 -18 20 -6 2 -14 0.6 0.1 27 -5 46 18 2 -14 0.4 0.7 43 26 6 -45 1 -14 0.7 0.5 39 6 50 32 2 -14 0.9 0.5 29 -23 -7 -26 1 -14 0.8 0.7 -40 -45 -1 -30 2 -14 0.7 0.6 35 1 45 -7 2 -14 0.9 0.3 -13 -30 2 -20 2 -14 0.3 0.4 30 -18 -17 -50 1 -14 0.9 0.9 -25 -41 38 28 2 -14 0.8 0.1 -31 -36 39 -16 1 -14 0.3 0.1 -25 -34 9 -24 2 -14 0.7 0.5 14 3 3 -30 1 -14 0.3 0.8 -29 -40 -4 -31 2 -14 0.5 0.5 -18 -20 22 -25 2 -14 0.2 0.7 43 12 39 37 2 -14 0.9 0.4 15 -21 10 5 2 -14 0.5 0.4 45 -18 5 -16 1 -14 0.5 0.3 15 -1 -26 -31 1 -14 0.2 0.2 41 -13 47 -50 1 -14 0.8 0.2 31 -47 43 24 2 -14 0.6 0.2 44 -14 49 20 2 -14 0.8 0.6 22 -4 25 -6 1 -14 0.4 0.4 15 2 -4 -35 2 -14 0.2 0.6 -30 -36 17 -40 2 -14 0.4 0.9 -19 -22 32 24 2 -14 0.1 0.7 29 -38 30 -38 2 -14 0.1 0.3 19 -45 27 19 2 -14 0.8 0.7 16 -29 14 -11 2 -14 0.8 0.2 50 22 45 -33 1 -14 0.6 0.1 -22 -37 49 -33 2 -14 0.6 0.4 3 -43 26 9 2 -14 0.7 0.6 50 12 -13 -44 1 -14 0.4 0.4 5 -45 41 28 2 -14 0.4 0.2 1 -4 7 -37 2 -14 0.8 0.5 38 32 9 -21 1 -14 0.2 0.7 22 -37 46 -25 2 -14 0.8 0.8 -42 -44 22 13 2 -14 0.1 0.3 49 44 43 0 1 -14 0.2 0.1 16 -35 47 -39 2 -14 0.6 0.5 44 -35 -20 -37 1 -14 0.4 0.8 6 -18 24 -28 2 -14 0.9 0.4 8 -11 30 -42 2 -14 0.5 0.3 14 -25 44 -10 2 -14 0.9 0.2 49 22 46 -9 1 -14 0.6 0.6 29 2 21 -4 2 -14 0.4 0.2 0 -46 41 -29 2 -14 0.7 0.4 35 10 44 19 2 -14 0.8 0.8 50 35 15 -21 1 -14 0.1 0.1 45 2 40 -30 1 -14 0.7 0.6 -28 -30 5 -13 2 -14 0.7 0.8 -17 -29 48 29 2 -15 0.9 0.1 -19 -33 31 15 2 -15 0.4 0.5 36 5 -2 -8 1 -15 0.5 0.2 13 -23 39 -33 1 -15 0.8 0.3 36 -9 24 -35 1 -15 0.4 0.1 10 8 37 17 1 -15 0.3 0.8 5 -26 -9 -10 1 -15 0.4 0.3 14 -34 21 -45 1 -15 0.2 0.4 -31 -49 2 -24 2 -15 0.6 0.4 50 48 -41 -50 1 -15 0.2 0.8 -8 -34 45 15 2 -15 0.1 0.6 32 -27 14 0 2 -15 0.9 0.6 10 -49 -5 -43 1 -15 0.2 0.1 2 -6 14 -15 2 -15 0.1 0.7 -6 -34 18 -34 2 -15 0.2 0.6 13 -32 -36 -45 1 -15 0.1 0.7 0 -38 23 -22 2 -15 0.2 0.1 4 -27 -13 -38 1 -15 0.2 0.8 48 -35 -1 -46 2 -15 0.9 0.2 16 -40 10 -25 1 -15 0.7 0.8 13 -6 44 6 2 -15 0.8 0.7 35 31 -14 -47 1 -15 0.1 0.3 50 17 3 -12 1 -15 0.4 0.1 37 -42 18 -29 2 -15 0.8 0.6 -8 -22 49 6 2 -15 0.9 0.7 9 -17 27 -50 2 -15 0.2 0.7 0 -44 21 -16 2 -15 0.5 0.2 18 -8 35 -21 1 -15 0.1 0.1 37 27 46 18 1 -15 0.6 0.1 -32 -47 -6 -19 2 -15 0.4 0.6 6 -31 31 11 2 -15 0.5 0.4 34 26 50 49 2 -15 0.2 0.6 2 -10 36 21 2 -15 0.1 0.4 -42 -44 30 -43 2 -15 0.5 0.4 25 -23 29 -11 2 -15 0.9 0.5 46 0 39 -37 1 -15 0.6 0.5 41 -20 16 8 1 -15 0.1 0.9 46 23 -45 -50 1 -15 0.9 0.5 -3 -35 29 -50 2 -15 0.4 0.2 28 -49 3 -23 1 -15 0.7 0.1 30 -26 2 -35 1 -15 0.7 0.6 49 2 12 -28 1 -15 0.5 0.3 45 -2 17 10 1 -15 0.7 0.1 42 -12 7 -35 1 -15 0.1 0.7 48 -8 45 -19 2 -15 0.3 0.5 -10 -13 49 -46 2 -15 0.2 0.5 24 4 11 5 1 -15 0.1 0.2 27 18 -7 -34 1 -15 0.2 0.9 28 18 42 33 2 -15 0.9 0.1 18 9 31 -33 1 -15 0.6 0.9 11 6 7 -30 1 -15 0.1 0.4 32 -42 35 -47 2 -15 0.9 0.3 15 -23 -28 -37 1 -15 0.6 0.8 -18 -39 28 18 2 -15 0.8 0.6 28 -30 45 11 2 -15 0.1 0.8 27 23 -3 -18 1 -15 0.5 0.2 -5 -27 6 -38 2 -15 0.8 0.1 39 23 50 -19 1 -15 0.4 0.6 30 13 49 -2 2 -15 0.9 0.1 46 14 0 -19 1 -15 0.6 0.7 37 5 -29 -30 1 -15 0.3 0.8 42 -45 22 -12 2 -15 0.2 0.7 18 -14 47 24 2 -15 0.5 0.5 30 -46 22 -18 2 -15 0.5 0.4 49 -7 -15 -41 1 -15 0.9 0.5 -35 -38 42 33 2 -15 0.9 0.1 39 -19 -29 -34 1 -15 0.9 0.2 -42 -43 -36 -41 2 -15 0.9 0.9 13 -31 24 3 2 -15 0.3 0.8 44 23 35 -13 1 -15 0.1 0.6 41 30 42 28 2 -15 0.2 0.4 17 -25 13 1 2 -15 0.5 0.2 -6 -9 22 -17 2 -15 0.3 0.3 17 -35 -44 -45 1 -15 0.1 0.4 32 -2 41 17 2 -15 0.9 0.8 25 -49 49 18 2 -15 0.1 0.8 38 34 31 9 1 -15 0.5 0.7 20 -36 41 12 2 -15 0.5 0.2 5 -11 -28 -36 1 -15 0.8 0.5 45 -6 14 5 1 -15 0.1 0.6 4 -13 10 -18 2 -15 0.8 0.8 -32 -39 14 -36 2 -15 0.5 0.3 9 -38 45 15 2 -15 0.5 0.3 -5 -20 35 -17 2 -15 0.6 0.5 16 -5 50 10 2 -15 0.5 0.3 42 -16 4 3 2 -15 0.6 0.7 -11 -36 -6 -49 2 -15 0.9 0.2 14 -12 15 12 2 -15 0.8 0.6 -25 -26 25 10 2 -15 0.2 0.4 -27 -42 49 -18 2 -15 0.7 0.5 1 -33 17 8 2 -15 0.2 0.6 32 -47 50 -30 2 -15 0.3 0.4 -11 -34 46 -44 2 -15 0.3 0.8 21 -5 39 -1 2 -15 0.2 0.6 19 10 13 -28 1 -15 0.1 0.8 10 -12 13 -35 2 -15 0.6 0.7 -18 -46 -25 -46 1 -15 0.1 0.5 15 2 21 0 2 -15 0.3 0.4 48 -30 33 -30 1 -15 0.3 0.6 46 32 -12 -29 1 -15 0.5 0.2 5 4 28 0 1 -15 0.8 0.8 46 9 28 12 2 -15 0.3 0.2 13 4 35 12 2 -15 0.8 0.9 21 -35 20 -37 2 -15 0.7 0.3 46 -18 10 -39 1 -15 0.1 0.6 28 -31 22 -12 1 -15 0.8 0.1 -18 -22 44 19 2 -15 0.4 0.3 49 -47 -9 -29 2 -15 0.3 0.8 42 40 23 -31 1 -15 0.4 0.3 44 -40 35 21 2 -15 0.7 0.3 8 -49 -3 -30 1 -15 0.3 0.3 31 -3 26 -31 1 -15 0.8 0.6 -2 -50 -3 -22 2 -15 0.5 0.7 12 -41 37 9 2 -15 0.7 0.7 41 -27 15 -33 1 -15 0.7 0.6 22 14 17 8 1 -15 0.5 0.6 47 -14 31 1 1 -15 0.9 0.9 50 -4 -6 -45 1 -15 0.6 0.9 -13 -30 -36 -40 1 -15 0.8 0.3 39 -38 14 1 1 -15 0.1 0.9 40 10 10 -23 1 -15 0.4 0.9 -45 -46 8 -8 2 -15 0.2 0.8 10 -12 -2 -41 2 -15 0.8 0.6 38 2 32 -17 1 -15 0.5 0.5 31 -23 49 -28 2 -15 0.8 0.5 25 -14 -20 -22 1 -15 0.7 0.6 -30 -41 45 33 2 -15 0.1 0.2 2 -17 41 -47 1 -15 0.3 0.3 10 -42 30 -4 2 -15 0.3 0.5 49 -35 33 26 2 -15 0.5 0.3 45 -18 -3 -30 1 -15 0.4 0.8 45 -22 21 -42 2 -15 0.7 0.7 44 -45 21 -21 1 -15 0.1 0.4 17 10 43 35 2 -15 0.4 0.3 42 -36 6 -9 1 -15 0.6 0.7 12 3 35 30 2 -15 0.2 0.7 -42 -46 5 -32 2 -15 0.7 0.1 49 15 -29 -38 1 -15 0.5 0.4 0 -43 28 -3 2 -15 0.1 0.4 19 -34 -30 -44 1 -15 0.5 0.5 28 1 -34 -39 1 -15 0.9 0.5 0 -1 16 5 2 -15 0.1 0.8 18 -31 40 -9 2 -15 0.7 0.1 -4 -28 29 -26 1 -15 0.4 0.4 23 19 32 -11 2 -15 0.7 0.8 14 -5 -14 -47 1 -15 0.6 0.7 -20 -25 13 -21 2 -15 0.4 0.3 27 -3 -12 -18 1 -15 0.4 0.8 19 6 4 -35 1 -15 0.1 0.4 32 -43 23 14 2 -15 0.7 0.4 -38 -44 12 -21 2 -15 0.2 0.3 40 4 28 -9 1 -15 0.8 0.9 41 -8 -11 -22 1 -15 0.1 0.8 4 -34 -7 -16 2 -15 0.3 0.9 49 8 44 22 2 -15 0.6 0.1 5 -20 28 -50 1 -15 0.1 0.1 -45 -48 42 10 2 -15 0.9 0.8 11 -12 47 -6 2 -15 0.5 0.1 35 -38 -25 -41 1 -15 0.5 0.1 -18 -41 29 -24 2 -15 0.9 0.1 7 -38 18 -45 1 -15 0.6 0.9 41 -18 48 -16 2 -15 0.7 0.8 44 -44 19 -37 1 -15 0.5 0.6 32 -13 1 -48 1 -15 0.1 0.6 38 -43 -7 -21 2 -15 0.6 0.3 -35 -41 -13 -22 2 -15 0.1 0.6 -33 -42 8 -17 2 -15 0.1 0.3 20 -43 -23 -33 1 -15 0.5 0.5 21 -35 11 5 2 -15 0.3 0.8 15 4 -6 -16 1 -15 0.2 0.2 -11 -32 7 -6 2 -15 0.4 0.9 39 3 12 -8 2 -15 0.4 0.5 23 20 49 25 2 -15 0.1 0.1 22 -23 -5 -39 1 -15 0.7 0.7 6 -14 2 -49 1 -15 0.2 0.8 -41 -48 27 -12 2 -15 0.7 0.7 5 -22 25 -27 2 -15 0.3 0.3 -7 -43 26 13 2 -15 0.9 0.3 37 -42 22 -20 1 -15 0.9 0.6 21 -44 34 15 2 -15 0.1 0.8 -1 -14 14 -10 2 -15 0.6 0.6 50 37 42 28 1 -15 0.9 0.3 34 28 -24 -27 1 -15 0.2 0.5 21 16 -15 -40 1 -15 0.7 0.9 -38 -50 19 3 2 -15 0.5 0.8 -35 -50 26 -28 2 -15 0.3 0.2 37 -7 32 -17 1 -15 0.7 0.6 48 40 39 6 1 -15 0.1 0.3 49 29 40 22 1 -15 0.6 0.1 -13 -39 -24 -30 1 -15 0.5 0.6 8 -7 1 -3 1 -15 0.9 0.3 38 -31 36 4 1 -15 0.5 0.3 25 -5 3 -31 1 -15 0.2 0.8 4 -34 22 1 2 -15 0.9 0.2 10 8 0 -6 1 -15 0.8 0.9 -14 -43 28 -47 2 -15 0.1 0.8 43 6 -1 -37 1 -15 0.9 0.1 16 -45 40 -2 1 -15 0.8 0.8 -20 -26 29 -29 2 -15 0.9 0.5 41 34 19 -7 1 -15 0.9 0.4 9 -21 24 9 2 -15 0.5 0.7 13 -14 7 -26 2 -15 0.5 0.2 -26 -45 41 16 2 -15 0.9 0.6 -25 -37 34 -32 2 -15 0.2 0.4 17 -26 46 -31 2 -15 0.2 0.4 27 -9 34 -4 2 -15 0.1 0.9 16 -34 13 -14 2 -15 0.1 0.9 20 -40 34 -18 2 -15 0.6 0.9 30 -35 -25 -50 1 -15 0.4 0.5 14 -21 48 -24 2 -15 0.6 0.9 14 -12 29 -7 2 -15 0.9 0.2 23 -17 -6 -15 1 -15 0.3 0.7 11 3 1 -47 1 -15 0.7 0.2 21 4 44 19 2 -15 0.3 0.1 19 -23 25 2 2 -15 0.9 0.9 -14 -33 13 -27 2 -15 0.2 0.9 6 -33 12 -2 2 -15 0.8 0.2 -5 -25 29 -7 2 -15 0.7 0.8 22 -24 29 0 2 -15 0.4 0.7 44 -40 20 -27 2 -15 0.1 0.3 35 -17 29 23 2 -15 0.8 0.5 20 5 3 -25 1 -15 0.5 0.1 -17 -24 34 13 2 -15 0.9 0.1 40 16 42 -30 1 -15 0.7 0.9 -16 -27 40 -7 2 -15 0.2 0.3 33 31 12 -27 1 -15 0.3 0.4 5 -19 -35 -42 1 -15 0.6 0.7 2 -5 37 -1 2 -15 0.2 0.5 37 35 -6 -9 1 -15 0.4 0.9 27 15 38 -45 2 -15 0.2 0.3 14 -20 19 -43 2 -15 0.6 0.3 20 -33 25 -24 2 -15 0.8 0.8 19 5 20 -42 2 -15 0.2 0.8 5 -10 25 -16 2 -15 0.8 0.1 40 16 44 15 2 -15 0.5 0.3 48 -44 41 21 2 -15 0.6 0.5 36 30 35 28 1 -15 0.3 0.2 17 -18 45 29 2 -15 0.6 0.5 44 17 26 -28 1 -15 0.1 0.5 13 -42 50 -24 2 -15 0.2 0.2 39 5 48 5 2 -15 0.2 0.9 -7 -20 -1 -47 2 -15 0.7 0.5 38 27 50 -18 1 -15 0.9 0.1 18 -47 -10 -15 1 -15 0.3 0.5 31 -45 -14 -35 2 -15 0.7 0.2 -37 -38 0 -46 1 -15 0.5 0.7 28 -22 25 7 2 -15 0.3 0.1 3 -48 -13 -15 2 -15 0.5 0.3 -14 -15 49 17 2 -15 0.2 0.4 -17 -49 -34 -47 1 -15 0.8 0.7 -5 -48 13 -22 2 -15 0.1 0.8 12 -5 11 10 2 -15 0.9 0.2 -25 -40 -16 -42 1 -15 0.7 0.6 48 -14 33 -4 1 -15 0.5 0.9 12 -27 11 3 2 -15 0.5 0.1 39 -1 31 -21 1 -15 0.3 0.5 48 -29 21 -20 2 -15 0.6 0.9 40 -30 43 12 2 -15 0.6 0.5 28 -32 37 -19 1 -15 0.5 0.9 -9 -49 34 20 2 -15 0.5 0.2 46 -30 25 5 1 -15 0.1 0.4 -32 -34 -7 -35 1 -15 0.4 0.3 -7 -35 6 -41 1 -15 0.2 0.5 40 -30 -35 -49 1 -15 0.1 0.3 22 -3 38 -4 2 -15 0.3 0.6 -19 -43 47 4 2 -15 0.5 0.5 44 -32 -37 -45 1 -15 0.6 0.2 21 -18 -16 -27 1 -15 0.7 0.5 48 34 27 12 1 -15 0.9 0.2 40 -43 40 -6 1 -15 0.9 0.2 29 -4 8 7 1 -15 0.4 0.2 8 -50 44 13 2 -15 0.9 0.3 44 31 38 4 1 -15 0.3 0.2 20 -40 39 -14 2 -15 0.2 0.4 18 -36 44 40 2 -15 0.3 0.1 -6 -22 30 -22 1 -15 0.3 0.5 34 -21 48 -31 2 -15 0.4 0.1 5 -33 29 10 2 -15 0.4 0.2 48 -26 38 -26 1 -15 0.3 0.6 16 -33 21 -16 2 -15 0.1 0.2 -21 -45 36 13 2 -15 0.6 0.8 35 -14 5 -39 1 -15 0.7 0.7 39 -28 21 6 2 -15 0.8 0.6 -18 -25 35 -21 2 -15 0.2 0.9 30 -34 33 -36 2 -15 0.3 0.3 47 22 37 -47 1 -15 0.1 0.7 18 -47 -15 -28 1 -15 0.3 0.5 7 4 27 -40 1 -15 0.1 0.7 42 -35 -9 -50 1 -15 0.7 0.6 50 -12 23 14 1 -15 0.1 0.5 21 -31 16 -17 2 -15 0.8 0.4 -1 -10 24 11 2 -15 0.2 0.5 45 -37 -14 -28 1 -15 0.5 0.2 -24 -48 3 -21 1 -15 0.7 0.6 29 -1 40 10 2 -15 0.7 0.5 41 -20 38 -26 2 -15 0.1 0.2 33 1 41 -16 1 -15 0.2 0.2 32 21 42 -8 1 -15 0.9 0.3 40 29 7 -29 1 -15 0.3 0.3 10 -47 39 37 2 -15 0.7 0.8 46 22 17 -22 1 -16 0.1 0.3 -23 -45 -1 -32 2 -16 0.7 0.1 41 8 4 -2 1 -16 0.6 0.7 44 -23 -11 -17 1 -16 0.3 0.8 45 17 27 25 1 -16 0.4 0.9 10 -7 29 16 2 -16 0.4 0.4 16 3 31 -30 1 -16 0.6 0.5 49 -12 40 29 2 -16 0.7 0.5 10 -9 -36 -43 1 -16 0.7 0.6 37 -47 29 -23 1 -16 0.8 0.6 32 -18 48 -40 1 -16 0.9 0.3 17 -26 45 31 2 -16 0.2 0.8 24 -5 -1 -19 1 -16 0.1 0.4 17 -18 -5 -19 2 -16 0.2 0.5 42 -31 23 -38 2 -16 0.5 0.7 27 -18 39 -8 2 -16 0.4 0.1 24 3 30 -30 1 -16 0.1 0.4 21 -10 29 5 2 -16 0.8 0.4 6 -42 50 22 2 -16 0.7 0.3 29 -32 14 -8 1 -16 0.8 0.3 38 36 26 -7 1 -16 0.2 0.2 12 -18 21 -8 2 -16 0.2 0.3 -33 -42 33 -4 2 -16 0.3 0.9 14 -33 36 11 2 -16 0.7 0.5 19 -15 -36 -44 1 -16 0.5 0.2 28 -47 28 -48 1 -16 0.5 0.8 24 -45 13 -8 2 -16 0.6 0.8 43 -24 35 -32 2 -16 0.9 0.7 12 -41 17 -14 1 -16 0.4 0.9 20 -8 4 -38 1 -16 0.9 0.4 44 10 28 -44 1 -16 0.2 0.4 2 -27 42 5 2 -16 0.9 0.5 2 -1 38 -30 1 -16 0.1 0.4 19 -3 -5 -23 1 -16 0.1 0.6 29 26 43 -7 1 -16 0.6 0.8 -25 -29 9 6 2 -16 0.2 0.6 26 -31 24 -22 2 -16 0.7 0.8 19 -37 32 -36 2 -16 0.7 0.7 19 -30 50 -34 2 -16 0.9 0.4 50 15 -26 -44 1 -16 0.9 0.7 1 -8 -24 -29 1 -16 0.8 0.7 4 -14 4 -12 1 -16 0.6 0.7 34 32 40 0 2 -16 0.5 0.2 -26 -50 -20 -46 1 -16 0.9 0.7 44 -47 3 -14 1 -16 0.9 0.9 33 12 32 -33 1 -16 0.4 0.3 50 -2 -17 -28 1 -16 0.9 0.1 22 -32 49 5 1 -16 0.9 0.4 29 18 -38 -39 1 -16 0.6 0.1 31 0 47 41 2 -16 0.5 0.9 23 -3 -4 -16 1 -16 0.4 0.2 34 17 35 -47 1 -16 0.3 0.3 35 -3 -7 -40 1 -16 0.5 0.6 4 -19 -28 -48 1 -16 0.6 0.7 11 10 6 -41 1 -16 0.8 0.3 14 -45 -19 -50 1 -16 0.1 0.9 27 -35 2 -43 2 -16 0.1 0.8 10 9 -13 -45 1 -16 0.5 0.4 -33 -45 45 -25 2 -16 0.1 0.5 -5 -31 -26 -35 2 -16 0.5 0.7 -1 -19 27 -7 2 -16 0.7 0.9 12 1 -37 -49 1 -16 0.7 0.2 38 34 4 -6 1 -16 0.6 0.1 22 -32 32 8 2 -16 0.4 0.4 31 -7 43 19 2 -16 0.3 0.5 25 -38 22 -26 2 -16 0.4 0.8 -3 -19 44 -2 2 -16 0.5 0.3 37 -23 18 4 2 -16 0.9 0.1 30 -12 9 -48 1 -16 0.6 0.9 4 -7 30 -25 2 -16 0.2 0.9 34 -46 9 -34 2 -16 0.5 0.2 20 -26 40 -12 2 -16 0.9 0.2 -29 -50 34 -33 2 -16 0.3 0.6 44 23 20 -30 1 -16 0.1 0.9 3 -15 20 -2 2 -16 0.4 0.4 -22 -42 -27 -38 2 -16 0.7 0.1 -39 -48 45 -2 2 -16 0.1 0.8 43 8 45 22 2 -16 0.5 0.4 -11 -43 -33 -48 1 -16 0.9 0.7 8 -8 24 -18 2 -16 0.1 0.4 0 -25 40 -21 2 -16 0.7 0.5 42 34 22 -45 1 -16 0.5 0.3 28 2 31 -16 1 -16 0.4 0.8 2 -4 36 24 2 -16 0.1 0.2 48 -38 47 27 2 -16 0.8 0.1 -1 -4 -32 -44 1 -16 0.4 0.8 -4 -44 39 -8 2 -16 0.2 0.9 28 -34 7 -43 2 -16 0.1 0.3 -13 -24 16 -34 2 -16 0.9 0.1 41 26 15 -31 1 -16 0.6 0.6 48 -33 -32 -33 1 -16 0.8 0.1 42 -40 22 -48 1 -16 0.4 0.9 -1 -19 49 46 2 -16 0.2 0.5 29 -49 3 -21 2 -16 0.8 0.8 43 11 -34 -35 1 -16 0.1 0.5 43 -6 44 42 2 -16 0.2 0.6 -28 -35 -38 -39 2 -16 0.4 0.1 -26 -38 21 -38 2 -16 0.6 0.3 14 2 32 -30 1 -16 0.1 0.1 25 7 -12 -32 1 -16 0.7 0.4 43 10 49 41 2 -16 0.1 0.8 42 -38 41 2 2 -16 0.2 0.8 -11 -23 -3 -8 2 -16 0.5 0.1 -15 -38 38 -45 2 -16 0.3 0.7 -27 -49 -8 -18 2 -16 0.2 0.4 18 -34 40 -37 2 -16 0.5 0.7 0 -18 41 16 2 -16 0.1 0.1 28 -50 40 24 2 -16 0.5 0.4 18 -4 -10 -16 1 -16 0.1 0.7 -1 -38 23 -17 2 -16 0.5 0.5 -10 -15 12 -31 2 -16 0.5 0.1 1 -11 50 -27 1 -16 0.2 0.6 48 25 41 8 1 -16 0.3 0.8 -35 -42 -2 -3 2 -16 0.4 0.4 42 18 42 37 2 -16 0.7 0.6 9 -19 -2 -20 1 -16 0.6 0.3 -29 -33 14 -8 2 -16 0.2 0.8 5 -2 44 17 2 -16 0.2 0.5 38 -41 -4 -19 2 -16 0.8 0.6 48 -14 -31 -41 1 -16 0.9 0.5 2 -7 46 19 2 -16 0.4 0.7 11 -14 37 -19 2 -16 0.3 0.8 45 -33 41 -28 2 -16 0.3 0.3 11 -39 6 -24 2 -16 0.9 0.8 47 -27 -5 -19 1 -16 0.7 0.8 30 -23 48 26 2 -16 0.4 0.3 46 -17 28 -19 1 -16 0.3 0.9 -22 -23 -14 -37 1 -16 0.7 0.9 50 -24 -1 -15 1 -16 0.4 0.9 -5 -50 50 -16 2 -16 0.6 0.7 26 -23 50 -48 2 -16 0.4 0.1 49 3 15 -39 1 -16 0.9 0.4 29 23 -13 -33 1 -16 0.2 0.6 33 -25 13 -44 2 -16 0.9 0.6 24 -25 27 -5 1 -16 0.6 0.1 28 -42 21 -35 1 -16 0.4 0.3 1 -6 9 -12 1 -16 0.3 0.2 -9 -33 42 33 2 -16 0.7 0.4 -4 -46 17 -5 2 -16 0.7 0.9 21 -12 32 26 2 -16 0.2 0.4 -31 -46 49 12 2 -16 0.2 0.6 20 -44 46 -28 2 -16 0.1 0.1 3 -49 -6 -35 2 -16 0.7 0.5 37 11 -21 -38 1 -16 0.3 0.8 36 -42 -9 -25 1 -16 0.7 0.2 16 -6 18 17 2 -16 0.7 0.8 15 -37 35 -12 2 -16 0.6 0.4 19 -11 48 46 2 -16 0.3 0.6 46 -39 19 -29 2 -16 0.9 0.3 -16 -21 -26 -31 1 -16 0.4 0.3 11 -21 -6 -41 1 -16 0.7 0.2 -2 -22 38 -37 1 -16 0.7 0.4 44 -20 21 -22 1 -16 0.3 0.1 28 27 41 -26 1 -16 0.9 0.8 31 -46 -23 -26 1 -16 0.1 0.7 -1 -34 14 -45 2 -16 0.6 0.6 6 -46 23 -8 2 -16 0.9 0.8 15 -20 23 -40 1 -16 0.2 0.5 -1 -10 34 29 2 -16 0.9 0.8 -10 -14 30 -40 2 -16 0.1 0.6 0 -5 10 2 2 -16 0.8 0.2 3 -28 -5 -45 1 -16 0.6 0.9 20 -17 36 -32 2 -16 0.5 0.6 20 -47 47 -41 2 -16 0.6 0.6 18 -23 40 -10 2 -16 0.3 0.6 7 -29 -6 -24 1 -16 0.5 0.7 42 -2 41 -20 1 -16 0.7 0.8 26 -30 18 -27 1 -16 0.8 0.5 -11 -30 -31 -42 1 -16 0.1 0.9 19 12 35 7 2 -16 0.2 0.8 25 -8 37 -2 2 -16 0.5 0.9 16 -29 32 -42 2 -16 0.1 0.2 26 -29 20 -27 2 -16 0.9 0.9 39 34 42 16 2 -16 0.8 0.6 -2 -16 38 -22 2 -16 0.1 0.1 -12 -49 39 17 2 -16 0.5 0.2 44 -22 34 33 2 -16 0.5 0.8 37 -9 4 -31 1 -16 0.8 0.1 13 -21 44 10 2 -16 0.6 0.5 42 -37 40 33 2 -16 0.7 0.5 10 -8 26 -7 2 -16 0.3 0.6 30 0 38 1 2 -16 0.5 0.5 9 8 19 17 2 -16 0.9 0.5 8 -36 49 -15 2 -16 0.5 0.6 -15 -16 46 24 2 -16 0.2 0.6 21 15 31 -48 1 -16 0.9 0.2 -44 -47 32 -43 2 -16 0.2 0.2 14 1 40 2 2 -16 0.8 0.5 35 28 32 -11 1 -16 0.5 0.5 -38 -46 -31 -44 2 -16 0.4 0.1 -16 -41 18 -1 2 -16 0.4 0.6 28 -31 -14 -48 1 -16 0.7 0.1 -3 -41 -4 -41 1 -16 0.8 0.9 14 -21 31 -15 2 -16 0.5 0.6 40 -29 48 -15 2 -16 0.8 0.2 41 24 34 -28 1 -16 0.1 0.2 26 3 -9 -15 1 -16 0.1 0.8 18 -24 -3 -27 2 -16 0.9 0.8 33 -21 44 41 2 -16 0.8 0.2 45 -17 29 -16 1 -16 0.2 0.2 35 32 50 -18 1 -16 0.8 0.1 48 -28 36 -32 1 -16 0.8 0.2 35 -9 5 -25 1 -16 0.4 0.6 21 -5 15 -50 1 -16 0.5 0.7 50 16 37 -49 1 -16 0.8 0.9 -7 -24 18 -42 2 -16 0.8 0.8 45 -23 32 24 1 -16 0.3 0.7 -5 -44 45 42 2 -16 0.3 0.9 -35 -49 37 -2 2 -16 0.5 0.4 25 -21 26 -46 1 -16 0.7 0.3 20 -46 38 26 2 -16 0.5 0.8 16 -3 46 -13 2 -16 0.6 0.6 17 -22 48 18 2 -16 0.4 0.6 26 13 -36 -50 1 -16 0.4 0.6 40 23 41 -20 1 -16 0.2 0.6 -24 -50 -17 -23 2 -16 0.1 0.6 25 12 23 -37 1 -16 0.4 0.2 22 -22 49 -47 1 -16 0.6 0.2 -6 -42 -11 -32 1 -16 0.3 0.5 40 0 -5 -23 1 -16 0.3 0.7 16 5 24 -6 2 -16 0.7 0.7 36 21 46 -33 1 -16 0.9 0.3 12 -45 43 13 2 -16 0.7 0.2 -3 -27 32 0 2 -16 0.2 0.5 4 -23 8 0 2 -16 0.9 0.3 47 -18 48 7 1 -16 0.8 0.1 22 -23 30 -1 1 -16 0.4 0.2 20 -34 30 -40 1 -16 0.4 0.3 49 -27 -38 -46 1 -16 0.7 0.7 44 14 -7 -26 1 -16 0.3 0.6 50 -3 21 20 2 -16 0.4 0.9 8 -5 -35 -46 1 -16 0.4 0.6 24 -45 -18 -29 1 -16 0.9 0.1 21 -14 20 3 1 -16 0.9 0.6 9 -15 -16 -27 1 -16 0.6 0.6 29 -44 40 -17 2 -16 0.2 0.8 4 -16 9 -8 2 -16 0.5 0.5 40 -19 37 -28 1 -16 0.2 0.3 41 -23 12 -21 1 -16 0.5 0.3 27 -8 9 -11 1 -16 0.9 0.8 27 -10 22 -37 1 -16 0.4 0.6 29 -43 17 5 2 -16 0.7 0.8 4 -35 42 22 2 -16 0.4 0.3 45 14 -11 -16 1 -16 0.2 0.7 11 -37 7 5 2 -16 0.7 0.3 17 -26 34 -9 2 -16 0.9 0.5 42 26 38 6 1 -16 0.2 0.1 8 -30 17 -24 1 -16 0.5 0.5 36 0 28 -41 1 -16 0.6 0.7 15 -23 32 2 2 -16 0.5 0.7 0 -42 -23 -44 1 -16 0.5 0.1 -11 -38 -24 -36 1 -16 0.4 0.9 42 33 -2 -28 1 -16 0.3 0.7 49 40 34 31 1 -16 0.8 0.1 45 -23 -37 -48 1 -16 0.5 0.6 7 5 11 -26 1 -16 0.6 0.8 -42 -47 21 9 2 -16 0.2 0.9 49 -23 -27 -31 1 -16 0.5 0.8 32 4 21 -15 1 -16 0.7 0.4 38 -30 3 2 1 -16 0.9 0.5 44 9 37 13 1 -16 0.4 0.4 50 -11 40 5 2 -16 0.3 0.9 25 -22 30 24 2 -16 0.3 0.1 -1 -9 -30 -38 1 -16 0.9 0.7 18 -21 5 -18 1 -16 0.7 0.8 20 12 5 -25 1 -16 0.5 0.2 15 -37 8 -34 1 -16 0.6 0.3 41 9 47 -10 1 -16 0.1 0.2 22 -5 42 -25 1 -16 0.9 0.7 8 -7 10 -37 1 -16 0.3 0.3 -32 -43 43 -7 2 -16 0.7 0.4 30 -7 24 -40 1 -16 0.4 0.6 44 -43 -37 -40 1 -16 0.9 0.7 48 18 29 -7 1 -16 0.3 0.5 38 2 10 4 1 -16 0.5 0.8 13 -45 4 -9 2 -16 0.1 0.6 43 -33 23 16 2 -16 0.9 0.3 15 -23 22 15 2 -16 0.1 0.7 34 -48 -16 -20 2 -16 0.5 0.6 -6 -14 48 -35 2 -16 0.1 0.2 -36 -49 -2 -29 2 -16 0.2 0.9 9 -2 -8 -19 1 -16 0.7 0.1 8 -40 23 -30 1 -16 0.9 0.9 49 -44 32 -48 1 -16 0.9 0.8 -24 -32 12 -24 2 -16 0.9 0.4 29 17 28 -44 1 -16 0.5 0.6 1 -50 48 40 2 -16 0.2 0.6 27 -36 43 14 2 -16 0.3 0.7 -28 -46 42 12 2 -16 0.4 0.2 -15 -37 39 22 2 -16 0.1 0.8 17 -24 -39 -50 1 -16 0.1 0.6 38 -16 42 -49 2 -16 0.3 0.8 39 24 -1 -43 1 -16 0.9 0.6 29 -13 32 -19 1 -16 0.4 0.4 37 -9 8 -23 1 -16 0.7 0.5 38 -17 12 8 1 -16 0.8 0.4 8 2 -21 -36 1 -16 0.8 0.5 9 8 -7 -42 1 -16 0.1 0.1 -1 -49 -42 -46 1 -16 0.7 0.5 47 39 2 -1 1 -16 0.6 0.6 48 17 28 -48 1 -17 0.6 0.3 -6 -13 46 -19 2 -17 0.4 0.7 32 -2 -15 -40 1 -17 0.3 0.9 33 -24 44 41 2 -17 0.4 0.1 13 -42 -11 -24 1 -17 0.5 0.6 25 -18 38 3 2 -17 0.7 0.5 45 -1 31 -35 1 -17 0.3 0.2 24 -19 -14 -21 1 -17 0.1 0.4 -27 -44 -6 -50 2 -17 0.4 0.6 30 -13 17 -48 1 -17 0.5 0.1 35 28 1 -5 1 -17 0.2 0.2 18 -16 16 -15 1 -17 0.8 0.2 12 -24 -11 -41 1 -17 0.5 0.9 38 -41 27 11 2 -17 0.3 0.3 23 -3 -26 -43 1 -17 0.3 0.8 18 -46 41 22 2 -17 0.4 0.5 -26 -33 18 14 2 -17 0.7 0.7 -9 -23 38 -14 2 -17 0.7 0.7 34 32 16 -7 1 -17 0.4 0.2 17 -44 7 -26 1 -17 0.4 0.6 -5 -25 -33 -46 1 -17 0.1 0.8 47 32 39 -28 1 -17 0.9 0.5 30 -36 20 -9 1 -17 0.3 0.6 2 -4 33 15 2 -17 0.8 0.2 -27 -44 38 18 2 -17 0.2 0.4 3 -47 50 -31 2 -17 0.2 0.2 -20 -41 11 -2 2 -17 0.3 0.3 24 -18 39 -18 2 -17 0.4 0.1 -29 -47 47 31 2 -17 0.7 0.6 -12 -13 18 -44 2 -17 0.9 0.2 33 -38 49 41 2 -17 0.7 0.5 -3 -8 -33 -42 1 -17 0.6 0.3 47 -34 15 -1 1 -17 0.5 0.9 -22 -36 38 -23 2 -17 0.6 0.2 13 7 -16 -17 1 -17 0.4 0.7 -27 -29 16 -4 2 -17 0.2 0.8 49 -7 -3 -21 1 -17 0.5 0.6 3 -31 10 -33 2 -17 0.3 0.4 -23 -38 49 37 2 -17 0.2 0.6 45 41 24 -28 1 -17 0.8 0.3 45 36 25 -25 1 -17 0.8 0.1 35 11 -7 -13 1 -17 0.3 0.7 -12 -39 11 -7 2 -17 0.9 0.3 45 14 -17 -47 1 -17 0.4 0.4 3 -2 -5 -14 1 -17 0.2 0.3 19 -28 5 -28 2 -17 0.2 0.2 42 -16 47 33 2 -17 0.7 0.7 -34 -42 37 -24 2 -17 0.4 0.1 28 -5 6 -20 1 -17 0.4 0.8 -21 -23 19 -16 2 -17 0.1 0.8 11 -27 27 -14 2 -17 0.8 0.5 49 1 5 2 1 -17 0.9 0.9 -20 -36 44 25 2 -17 0.8 0.2 -4 -41 39 -48 2 -17 0.2 0.1 42 33 29 -29 1 -17 0.9 0.8 46 13 -5 -35 1 -17 0.8 0.8 18 -17 47 -11 2 -17 0.4 0.4 42 16 45 30 1 -17 0.3 0.6 42 30 15 -46 1 -17 0.3 0.2 47 41 13 -11 1 -17 0.2 0.4 43 25 46 16 2 -17 0.1 0.5 0 -45 0 -46 2 -17 0.8 0.2 49 -31 48 -4 1 -17 0.3 0.3 -9 -20 41 26 2 -17 0.2 0.1 30 -28 -9 -42 1 -17 0.2 0.2 -14 -17 46 -37 2 -17 0.6 0.2 37 22 48 16 1 -17 0.9 0.7 17 -12 -10 -22 1 -17 0.4 0.8 31 10 30 15 2 -17 0.8 0.7 -6 -32 44 3 2 -17 0.1 0.3 34 33 49 -2 1 -17 0.6 0.3 -42 -43 40 4 2 -17 0.2 0.6 27 -1 32 16 2 -17 0.7 0.1 -2 -17 -23 -28 1 -17 0.9 0.7 -45 -50 32 -35 2 -17 0.1 0.8 31 -36 42 -15 2 -17 0.4 0.9 34 -48 45 -22 2 -17 0.1 0.6 27 -6 15 -27 2 -17 0.2 0.3 12 0 27 0 2 -17 0.4 0.3 33 28 45 -43 1 -17 0.7 0.9 11 -50 30 26 2 -17 0.8 0.6 -9 -38 23 -30 2 -17 0.3 0.3 22 -38 19 -45 1 -17 0.9 0.5 43 11 29 -2 1 -17 0.9 0.2 -46 -49 -37 -49 1 -17 0.9 0.5 -1 -38 23 -11 2 -17 0.3 0.8 14 2 44 22 2 -17 0.8 0.4 24 -1 40 -36 1 -17 0.1 0.6 4 -13 18 -22 2 -17 0.1 0.5 22 15 49 44 2 -17 0.4 0.4 17 12 13 -28 1 -17 0.8 0.8 36 -26 19 -5 1 -17 0.6 0.8 12 -7 13 -14 2 -17 0.3 0.8 8 -26 -2 -32 2 -17 0.4 0.6 9 -45 10 -25 2 -17 0.2 0.2 32 -14 -44 -47 1 -17 0.4 0.1 -22 -38 -15 -39 1 -17 0.3 0.7 -40 -47 43 33 2 -17 0.5 0.5 6 -38 -9 -37 1 -17 0.4 0.2 -45 -48 -4 -26 2 -17 0.8 0.4 8 -50 40 -36 1 -17 0.5 0.2 15 -36 41 -3 2 -17 0.5 0.8 38 -33 35 -41 2 -17 0.3 0.1 -17 -35 -16 -46 1 -17 0.8 0.2 22 -17 -1 -31 1 -17 0.9 0.1 -14 -42 37 -22 2 -17 0.6 0.8 -25 -27 41 -9 2 -17 0.5 0.1 -6 -7 35 1 2 -17 0.2 0.4 19 -30 50 -32 2 -17 0.3 0.6 -7 -34 -5 -13 2 -17 0.9 0.5 35 14 33 0 1 -17 0.7 0.3 48 -35 32 24 2 -17 0.1 0.5 21 -4 8 -35 1 -17 0.7 0.6 39 -40 21 -30 1 -17 0.7 0.4 -40 -49 12 -8 2 -17 0.3 0.9 48 16 39 -39 2 -17 0.9 0.5 36 -42 42 6 1 -17 0.8 0.1 14 -10 42 37 2 -17 0.4 0.7 17 -31 42 -9 2 -17 0.8 0.1 43 8 49 -37 1 -17 0.1 0.2 30 -44 33 -5 2 -17 0.8 0.1 21 -49 9 -23 1 -17 0.3 0.6 38 29 50 36 2 -17 0.2 0.4 10 -15 43 13 2 -17 0.1 0.8 15 -9 33 17 2 -17 0.2 0.1 39 -32 42 37 2 -17 0.6 0.1 45 -47 26 -30 1 -17 0.1 0.9 36 10 38 11 2 -17 0.8 0.2 7 -35 38 -20 2 -17 0.1 0.8 27 -19 -16 -20 2 -17 0.2 0.9 0 -9 33 -32 2 -17 0.1 0.9 27 -50 9 -21 2 -17 0.1 0.7 -31 -47 41 -31 2 -17 0.5 0.1 -1 -30 49 -27 2 -17 0.9 0.5 -9 -43 -24 -47 1 -17 0.5 0.2 -8 -35 36 8 2 -17 0.1 0.4 20 -40 -12 -27 1 -17 0.6 0.9 34 15 49 -30 2 -17 0.6 0.9 -4 -6 48 14 2 -17 0.9 0.4 26 -20 -33 -39 1 -17 0.2 0.3 40 -43 -20 -47 1 -17 0.5 0.6 -15 -22 43 30 2 -17 0.8 0.2 19 2 -21 -26 1 -17 0.7 0.9 33 29 24 -35 1 -17 0.5 0.1 48 37 19 -36 1 -17 0.2 0.2 35 -24 -2 -27 1 -17 0.1 0.1 -19 -32 33 8 2 -17 0.4 0.1 -12 -36 21 18 2 -17 0.9 0.7 -13 -25 33 -27 2 -17 0.2 0.1 40 -19 -20 -26 1 -17 0.2 0.9 23 -8 -8 -13 1 -17 0.8 0.7 24 7 -31 -45 1 -17 0.8 0.1 15 -28 -23 -49 1 -17 0.1 0.5 28 -22 3 -22 2 -17 0.8 0.4 22 -40 43 -4 2 -17 0.7 0.2 28 22 43 4 1 -17 0.3 0.9 36 -12 19 -22 2 -17 0.7 0.7 28 -47 -9 -11 1 -17 0.2 0.1 45 15 5 -36 1 -17 0.2 0.1 -32 -41 38 -39 2 -17 0.8 0.9 34 -13 -29 -47 1 -17 0.7 0.7 31 -28 45 -2 2 -17 0.8 0.2 -3 -13 49 -21 2 -17 0.3 0.3 38 9 45 -30 1 -17 0.5 0.3 -24 -44 38 -12 2 -17 0.8 0.8 23 -29 48 -9 2 -17 0.7 0.4 -4 -34 3 -8 1 -17 0.1 0.1 5 -42 30 -44 2 -17 0.5 0.8 -34 -43 5 -32 2 -17 0.9 0.9 31 -37 -36 -38 1 -17 0.9 0.7 -11 -38 33 -41 2 -17 0.2 0.7 31 -20 3 -24 1 -17 0.2 0.5 -26 -41 14 9 2 -17 0.8 0.8 -43 -49 21 -10 2 -17 0.6 0.7 15 11 24 5 2 -17 0.8 0.4 39 -5 27 -19 1 -17 0.8 0.3 22 -31 49 -8 1 -17 0.8 0.8 18 7 33 6 2 -17 0.4 0.8 44 38 30 -26 1 -17 0.6 0.2 41 36 50 0 1 -17 0.9 0.2 37 -27 47 -7 1 -17 0.9 0.9 35 -41 -13 -44 1 -17 0.8 0.8 3 -17 21 -22 1 -17 0.3 0.4 -4 -47 -9 -21 2 -17 0.9 0.3 23 -15 0 -19 1 -17 0.1 0.7 27 -4 -23 -48 1 -17 0.3 0.8 46 -44 45 -16 2 -17 0.2 0.5 -22 -33 37 -45 2 -17 0.7 0.6 -14 -15 44 -49 2 -17 0.2 0.4 23 -10 25 -41 2 -17 0.9 0.1 -4 -20 12 6 2 -17 0.7 0.4 28 -6 36 18 2 -17 0.2 0.1 20 -22 48 -48 1 -17 0.8 0.8 17 -26 30 15 2 -17 0.8 0.9 37 -16 -10 -19 1 -17 0.4 0.5 -9 -40 21 -6 2 -17 0.1 0.9 41 6 48 35 2 -17 0.9 0.1 -12 -40 17 4 2 -17 0.7 0.8 34 -15 30 22 2 -17 0.8 0.4 25 10 4 -35 1 -17 0.8 0.1 9 -49 -16 -32 1 -17 0.7 0.7 10 -48 45 -13 2 -17 0.8 0.8 26 -20 8 -34 1 -17 0.8 0.1 -8 -20 2 -17 1 -17 0.3 0.5 -12 -50 45 -27 2 -17 0.7 0.8 -36 -37 -40 -46 1 -17 0.1 0.5 -14 -50 37 33 2 -17 0.2 0.7 5 -46 18 -4 2 -17 0.6 0.5 32 -45 26 24 2 -17 0.2 0.8 -13 -25 36 -36 2 -17 0.6 0.8 -20 -39 32 -19 2 -17 0.2 0.2 41 -34 -35 -46 1 -17 0.6 0.5 7 -35 16 13 2 -17 0.2 0.3 47 32 22 -35 1 -17 0.1 0.3 9 -43 36 -14 2 -17 0.9 0.4 25 -18 48 -5 1 -17 0.6 0.3 3 1 -8 -11 1 -17 0.9 0.1 28 3 -17 -45 1 -17 0.4 0.7 22 -4 -35 -38 1 -17 0.7 0.6 45 -2 -4 -49 1 -17 0.3 0.1 17 -20 49 31 2 -17 0.1 0.6 16 12 16 -15 1 -17 0.4 0.2 -2 -36 18 -30 2 -17 0.7 0.5 11 -29 19 -14 2 -17 0.1 0.4 -26 -33 -11 -50 2 -17 0.1 0.8 -35 -47 33 -36 2 -17 0.7 0.3 49 -45 27 -35 1 -17 0.9 0.6 40 -38 36 -15 1 -17 0.1 0.4 32 -16 -18 -22 1 -17 0.1 0.3 27 -23 47 30 2 -17 0.4 0.8 42 12 34 8 2 -17 0.4 0.1 10 -28 -9 -23 1 -17 0.5 0.4 -9 -48 -20 -39 2 -17 0.4 0.4 -20 -50 37 -14 2 -17 0.7 0.5 18 4 15 -15 1 -17 0.4 0.7 -25 -42 34 12 2 -17 0.9 0.7 48 18 22 9 1 -17 0.7 0.7 44 36 1 -34 1 -17 0.1 0.5 5 -17 45 -40 2 -17 0.5 0.3 22 -38 12 4 2 -17 0.1 0.8 39 -19 3 -48 2 -17 0.3 0.5 47 -49 -17 -50 1 -17 0.9 0.1 -20 -38 -2 -16 2 -17 0.2 0.1 42 4 26 -26 1 -17 0.9 0.7 50 -24 19 -38 1 -17 0.9 0.2 49 13 -8 -46 1 -17 0.3 0.1 45 -13 -35 -38 1 -17 0.3 0.4 -5 -30 13 -35 2 -17 0.9 0.4 27 -31 43 36 2 -17 0.4 0.5 33 -22 -29 -33 1 -17 0.6 0.2 40 -40 8 -12 1 -17 0.1 0.3 43 -22 0 -39 1 -17 0.1 0.5 13 -40 18 -34 2 -17 0.5 0.8 -1 -26 41 -46 2 -17 0.5 0.9 -2 -49 25 5 2 -17 0.4 0.8 -32 -36 40 -3 2 -17 0.2 0.8 32 -17 37 3 2 -17 0.7 0.4 18 16 44 -50 1 -17 0.5 0.2 -5 -36 -20 -27 1 -17 0.8 0.6 -12 -17 20 -30 2 -17 0.5 0.1 -36 -41 8 7 2 -17 0.3 0.1 43 -50 13 10 2 -17 0.4 0.5 -16 -39 34 13 2 -17 0.9 0.8 37 8 28 8 2 -17 0.3 0.9 1 -8 8 -13 2 -17 0.7 0.8 4 -31 5 -16 2 -17 0.8 0.9 -17 -36 35 -22 2 -17 0.7 0.8 -23 -25 6 -28 2 -17 0.5 0.2 43 -34 26 -2 2 -17 0.7 0.8 14 -32 17 2 2 -17 0.7 0.9 8 -33 4 -32 2 -17 0.9 0.5 23 -43 -1 -6 1 -17 0.9 0.7 -10 -50 32 3 2 -17 0.7 0.3 6 -30 -12 -19 1 -17 0.2 0.6 48 -4 30 -15 2 -17 0.6 0.1 5 -8 37 14 2 -17 0.7 0.8 17 -4 25 -49 2 -17 0.9 0.4 -10 -47 -5 -46 1 -17 0.6 0.5 45 -4 -26 -38 1 -17 0.3 0.8 -13 -44 31 6 2 -17 0.4 0.9 4 2 -10 -35 1 -17 0.5 0.9 42 16 44 27 2 -17 0.4 0.1 50 0 -27 -38 1 -17 0.7 0.1 37 26 9 -33 1 -17 0.2 0.9 47 -14 36 35 2 -17 0.7 0.3 37 15 47 -32 1 -17 0.1 0.7 -6 -19 36 27 2 -17 0.9 0.1 46 17 2 -47 1 -17 0.4 0.6 8 -12 -28 -45 1 -17 0.4 0.6 8 -44 38 -24 2 -17 0.3 0.7 19 -5 -18 -29 1 -17 0.2 0.8 22 -9 23 -20 2 -17 0.1 0.7 21 -13 23 0 2 -17 0.2 0.4 -6 -8 34 -41 2 -17 0.6 0.6 10 -32 45 28 2 -17 0.1 0.7 39 23 49 16 2 -17 0.1 0.7 29 7 30 -27 2 -17 0.8 0.3 6 -46 -8 -29 1 -17 0.2 0.7 46 -44 28 -26 2 -17 0.7 0.6 19 -20 13 -49 1 -17 0.3 0.2 28 21 -22 -48 1 -18 0.5 0.5 21 -3 35 -17 2 -18 0.4 0.7 9 -38 4 -42 2 -18 0.3 0.4 -13 -42 35 13 2 -18 0.8 0.8 30 -41 32 -47 2 -18 0.1 0.8 34 -4 -20 -35 1 -18 0.4 0.8 14 -25 8 -43 2 -18 0.5 0.6 42 27 21 -38 1 -18 0.5 0.2 -8 -41 49 -17 2 -18 0.7 0.4 45 4 -11 -16 1 -18 0.5 0.7 1 -2 -7 -41 1 -18 0.6 0.3 -1 -35 10 1 2 -18 0.4 0.2 0 -12 -21 -31 2 -18 0.2 0.7 0 -21 -7 -12 2 -18 0.7 0.3 27 -16 -15 -19 1 -18 0.2 0.3 9 -10 25 -22 2 -18 0.5 0.9 -21 -29 48 11 2 -18 0.2 0.1 38 -13 -23 -46 1 -18 0.3 0.5 23 9 15 -47 1 -18 0.9 0.8 9 -1 39 4 2 -18 0.1 0.4 17 9 31 -28 1 -18 0.4 0.6 3 -15 27 -23 2 -18 0.3 0.6 21 -6 48 -10 2 -18 0.1 0.8 46 14 42 19 2 -18 0.6 0.4 47 -19 46 -34 1 -18 0.1 0.3 -32 -50 -9 -11 2 -18 0.2 0.2 9 -38 -32 -33 1 -18 0.9 0.4 -10 -23 25 -40 2 -18 0.4 0.1 -24 -31 -33 -39 2 -18 0.6 0.8 9 -41 -8 -50 1 -18 0.2 0.5 50 38 22 -24 1 -18 0.1 0.3 -10 -44 22 19 2 -18 0.9 0.4 35 17 28 13 1 -18 0.4 0.8 22 5 30 11 2 -18 0.1 0.7 36 9 8 4 1 -18 0.3 0.5 -7 -14 11 0 2 -18 0.9 0.7 33 -46 21 -32 1 -18 0.8 0.2 -16 -24 30 0 2 -18 0.8 0.4 50 8 19 -8 1 -18 0.5 0.7 21 -10 49 10 2 -18 0.6 0.7 48 14 49 9 1 -18 0.3 0.9 -30 -47 -4 -39 2 -18 0.3 0.4 -22 -28 35 -28 2 -18 0.1 0.4 27 11 -25 -29 1 -18 0.9 0.1 20 17 35 6 1 -18 0.5 0.4 44 -16 -32 -46 1 -18 0.7 0.6 35 -17 -30 -33 1 -18 0.3 0.6 36 24 25 -34 1 -18 0.4 0.9 50 -26 8 -17 2 -18 0.9 0.2 20 11 30 -41 1 -18 0.5 0.6 -14 -49 22 -42 2 -18 0.9 0.9 36 -34 21 -23 1 -18 0.8 0.7 29 -3 23 17 2 -18 0.5 0.8 35 14 -37 -49 1 -18 0.2 0.6 35 -1 44 -40 2 -18 0.9 0.8 47 37 -28 -42 1 -18 0.6 0.5 -40 -43 39 -27 2 -18 0.9 0.1 26 8 44 12 1 -18 0.3 0.7 39 -17 -2 -27 1 -18 0.7 0.6 -30 -40 39 -28 2 -18 0.2 0.9 32 -17 47 -43 2 -18 0.7 0.7 17 -47 21 -14 2 -18 0.6 0.7 -34 -40 47 -15 2 -18 0.9 0.1 20 16 -17 -32 1 -18 0.2 0.4 47 39 47 25 1 -18 0.5 0.4 9 -50 10 -31 1 -18 0.2 0.8 17 1 45 10 2 -18 0.4 0.5 -28 -33 33 -19 2 -18 0.9 0.8 19 5 17 -7 1 -18 0.6 0.5 28 25 49 -2 1 -18 0.4 0.1 33 29 -10 -46 1 -18 0.4 0.9 -15 -31 44 26 2 -18 0.1 0.1 -26 -28 11 -40 1 -18 0.8 0.3 41 -33 41 8 2 -18 0.4 0.7 1 -35 13 -46 2 -18 0.8 0.2 31 -36 19 -23 1 -18 0.4 0.9 24 -30 21 -45 2 -18 0.2 0.1 41 -26 7 -46 1 -18 0.9 0.9 -34 -39 29 -46 2 -18 0.9 0.9 12 -39 13 -32 1 -18 0.4 0.5 39 -18 5 -34 1 -18 0.2 0.2 8 -27 -38 -42 1 -18 0.6 0.3 36 -16 12 -38 1 -18 0.2 0.5 10 -48 24 20 2 -18 0.7 0.2 34 28 49 39 2 -18 0.3 0.5 44 -47 -20 -47 1 -18 0.3 0.5 21 3 16 -6 1 -18 0.6 0.1 21 19 -18 -47 1 -18 0.7 0.7 47 -23 29 -20 2 -18 0.4 0.6 25 18 16 -40 1 -18 0.6 0.4 -39 -50 -11 -13 2 -18 0.4 0.9 28 -35 12 -49 2 -18 0.1 0.4 14 -40 32 -28 2 -18 0.5 0.6 35 -17 36 -40 1 -18 0.6 0.6 31 15 28 24 1 -18 0.5 0.2 43 -21 -19 -36 1 -18 0.7 0.9 30 14 29 -41 1 -18 0.8 0.3 21 -42 48 35 2 -18 0.9 0.5 38 32 32 -31 1 -18 0.9 0.6 47 -34 35 20 1 -18 0.4 0.2 -25 -48 -40 -47 1 -18 0.2 0.5 -24 -28 46 44 2 -18 0.4 0.6 46 -35 18 -26 2 -18 0.7 0.4 11 -20 30 27 2 -18 0.4 0.3 37 -33 -3 -36 1 -18 0.7 0.2 -18 -31 40 -1 2 -18 0.9 0.4 17 -40 -26 -43 1 -18 0.1 0.6 12 4 -5 -35 1 -18 0.1 0.8 35 24 42 -3 2 -18 0.5 0.9 19 -2 5 -29 2 -18 0.8 0.9 42 -19 23 9 1 -18 0.3 0.9 37 -15 -6 -11 1 -18 0.4 0.5 46 36 -17 -40 1 -18 0.1 0.9 39 -23 38 -50 2 -18 0.3 0.3 24 18 -1 -19 1 -18 0.5 0.3 22 10 6 -50 1 -18 0.7 0.8 14 -41 25 -30 2 -18 0.8 0.2 47 42 41 29 1 -18 0.3 0.1 17 -2 47 -5 1 -18 0.6 0.9 50 37 39 -29 1 -18 0.9 0.1 14 11 6 -38 1 -18 0.8 0.3 29 -20 15 5 1 -18 0.6 0.1 -7 -33 47 -27 2 -18 0.4 0.6 33 -24 40 -6 2 -18 0.7 0.3 -35 -40 48 -36 2 -18 0.1 0.5 32 16 24 -20 1 -18 0.7 0.7 47 -47 32 30 2 -18 0.8 0.9 49 22 38 -48 1 -18 0.2 0.3 38 -27 6 -24 2 -18 0.5 0.3 21 -25 41 -16 1 -18 0.6 0.9 26 -10 32 -48 2 -18 0.7 0.5 -16 -21 45 39 2 -18 0.2 0.3 2 -30 29 -8 2 -18 0.5 0.3 16 5 40 6 2 -18 0.6 0.3 19 -18 21 -29 1 -18 0.8 0.9 48 -1 13 8 1 -18 0.9 0.3 19 18 -5 -46 1 -18 0.1 0.7 30 -15 19 -8 2 -18 0.3 0.5 -28 -50 -16 -47 1 -18 0.1 0.1 50 -44 35 12 2 -18 0.1 0.8 20 -43 35 26 2 -18 0.3 0.4 33 -37 23 -25 2 -18 0.7 0.5 8 2 -8 -37 1 -18 0.3 0.2 3 -15 -10 -39 1 -18 0.4 0.6 -15 -50 19 -13 2 -18 0.3 0.3 18 12 43 21 2 -18 0.7 0.5 40 38 11 -7 1 -18 0.9 0.4 16 -34 -16 -36 1 -18 0.2 0.8 44 -32 -38 -48 1 -18 0.7 0.5 50 26 42 39 2 -18 0.4 0.5 37 -18 45 -4 2 -18 0.3 0.8 22 -40 17 -12 2 -18 0.5 0.3 5 -11 36 -21 2 -18 0.5 0.3 2 -42 38 23 2 -18 0.7 0.8 -11 -39 33 4 2 -18 0.4 0.4 39 11 -28 -37 1 -18 0.5 0.1 -12 -36 -41 -49 1 -18 0.8 0.3 -5 -13 -29 -32 1 -18 0.2 0.8 12 4 42 0 2 -18 0.8 0.1 3 -14 6 -19 1 -18 0.1 0.4 -8 -40 -22 -48 2 -18 0.3 0.4 36 -27 20 -46 1 -18 0.3 0.2 41 6 42 39 2 -18 0.2 0.4 19 -7 9 -27 1 -18 0.9 0.6 31 5 16 -46 1 -18 0.3 0.5 -9 -18 10 -38 2 -18 0.8 0.9 34 -4 34 11 2 -18 0.3 0.3 38 14 14 -48 1 -18 0.3 0.7 28 3 25 7 2 -18 0.2 0.8 11 -17 12 -27 2 -18 0.1 0.2 1 -32 -22 -24 2 -18 0.4 0.2 -5 -30 19 -37 1 -18 0.9 0.8 -20 -22 -23 -25 1 -18 0.5 0.1 8 -42 19 -25 1 -18 0.1 0.7 41 31 13 -42 1 -18 0.5 0.5 -6 -32 -17 -39 1 -18 0.4 0.7 36 -45 44 33 2 -18 0.6 0.6 19 -41 -11 -12 1 -18 0.4 0.5 8 -50 24 -8 2 -18 0.5 0.8 5 -12 5 -9 2 -18 0.5 0.4 48 -5 26 -23 1 -18 0.3 0.9 19 12 10 -30 1 -18 0.6 0.8 12 -27 45 42 2 -18 0.4 0.3 46 5 39 -20 1 -18 0.2 0.7 33 25 31 27 1 -18 0.5 0.5 17 -7 3 -5 2 -18 0.9 0.8 13 -36 43 14 2 -18 0.6 0.7 20 -8 47 -34 2 -18 0.6 0.3 -5 -16 -23 -38 1 -18 0.2 0.7 25 -29 29 18 2 -18 0.2 0.7 -20 -45 44 -40 2 -18 0.9 0.2 50 -38 21 -46 1 -18 0.2 0.2 3 -27 35 7 2 -18 0.8 0.1 43 -34 46 13 2 -18 0.1 0.4 27 -18 -33 -49 1 -18 0.7 0.5 21 -4 41 -2 2 -18 0.2 0.1 46 -6 42 -16 1 -18 0.7 0.6 27 12 0 -21 1 -18 0.3 0.9 38 -47 41 -48 2 -18 0.3 0.4 50 21 45 18 2 -18 0.4 0.6 -17 -49 -9 -44 2 -18 0.5 0.4 37 -12 30 -28 1 -18 0.1 0.3 35 -10 -9 -28 1 -18 0.7 0.5 5 -27 -25 -38 1 -18 0.7 0.8 48 -11 27 -11 1 -18 0.7 0.7 20 -7 41 -22 2 -18 0.3 0.6 9 -38 -14 -16 2 -18 0.6 0.8 12 -25 -11 -46 1 -18 0.1 0.4 38 -4 25 -20 2 -18 0.5 0.7 34 18 -12 -34 1 -18 0.1 0.1 -8 -21 40 -36 2 -18 0.5 0.8 21 7 18 4 2 -18 0.9 0.7 35 22 21 -21 1 -18 0.6 0.2 49 33 35 -8 1 -18 0.5 0.4 18 -7 35 -2 2 -18 0.8 0.3 36 -41 41 -40 1 -18 0.2 0.2 5 -47 -8 -9 2 -18 0.3 0.7 -20 -46 31 19 2 -18 0.1 0.7 -14 -50 49 37 2 -18 0.6 0.6 9 -41 50 16 2 -18 0.7 0.6 7 -31 49 -19 2 -18 0.8 0.2 -18 -29 20 -27 2 -18 0.1 0.3 16 -4 -8 -41 1 -18 0.2 0.2 45 -29 43 19 2 -18 0.4 0.3 6 -22 50 7 2 -18 0.8 0.1 -33 -44 -7 -23 2 -18 0.3 0.2 49 14 -33 -47 1 -18 0.1 0.1 0 -30 32 27 2 -18 0.4 0.6 36 -45 38 -32 1 -18 0.4 0.9 42 25 50 -34 2 -18 0.3 0.2 43 5 20 -45 1 -18 0.1 0.2 -8 -30 45 27 2 -18 0.5 0.4 -10 -27 25 -22 2 -18 0.2 0.6 46 41 34 -29 1 -18 0.6 0.9 13 4 -12 -27 1 -18 0.8 0.4 6 -8 -29 -48 1 -18 0.3 0.9 26 -45 -34 -35 1 -18 0.8 0.4 48 -10 -26 -48 1 -18 0.6 0.1 1 -8 5 -12 2 -18 0.8 0.1 -47 -48 43 37 2 -18 0.9 0.7 27 -17 38 3 1 -18 0.5 0.2 44 8 40 -42 1 -18 0.1 0.7 30 28 33 26 2 -18 0.7 0.9 -5 -12 -4 -28 1 -18 0.3 0.3 43 10 11 -5 1 -18 0.4 0.9 41 -33 25 -48 2 -18 0.2 0.9 32 13 21 -31 1 -18 0.6 0.4 -28 -39 27 16 2 -18 0.5 0.3 -4 -38 40 12 2 -18 0.2 0.5 -3 -31 5 -39 2 -18 0.6 0.2 -18 -32 17 -11 2 -18 0.5 0.1 6 -43 18 -47 1 -18 0.6 0.5 6 -41 35 16 2 -18 0.3 0.4 24 -24 10 -6 2 -18 0.8 0.8 -14 -27 7 -14 2 -18 0.2 0.7 30 -47 5 -13 2 -18 0.8 0.5 -25 -49 23 -9 2 -18 0.2 0.1 39 18 22 -38 1 -18 0.2 0.7 -14 -31 37 -39 2 -18 0.3 0.6 46 -6 8 1 2 -18 0.6 0.1 23 -26 34 5 1 -18 0.5 0.3 49 36 40 -43 1 -18 0.5 0.2 45 18 42 40 2 -18 0.7 0.4 0 -33 20 -12 2 -18 0.4 0.3 46 39 28 -44 1 -18 0.5 0.5 45 -10 30 10 2 -18 0.2 0.3 29 -25 19 -16 2 -18 0.1 0.5 50 -5 43 -37 2 -18 0.9 0.5 30 25 -10 -34 1 -18 0.2 0.8 39 -26 -26 -34 1 -18 0.1 0.1 27 -8 19 0 1 -18 0.5 0.8 50 40 38 34 2 -18 0.7 0.8 9 -6 35 -31 2 -18 0.5 0.9 -47 -48 43 -48 2 -18 0.1 0.6 23 5 -11 -42 1 -18 0.8 0.1 11 -25 34 18 2 -18 0.7 0.2 45 -43 21 -39 1 -18 0.5 0.8 47 26 37 21 2 -18 0.3 0.7 50 43 23 -30 1 -18 0.9 0.7 30 -34 -30 -37 1 -18 0.2 0.6 46 -30 -12 -39 1 -18 0.3 0.4 -23 -47 -14 -36 1 -18 0.4 0.5 -20 -50 11 -45 2 -18 0.4 0.4 33 -31 -1 -3 1 -18 0.5 0.5 25 -29 17 11 2 -18 0.3 0.6 21 -42 46 27 2 -18 0.9 0.5 -16 -25 -23 -26 2 -18 0.9 0.8 8 -49 -5 -24 1 -18 0.2 0.3 9 -50 0 -14 2 -18 0.6 0.2 24 -13 39 -7 1 -18 0.8 0.2 -3 -39 35 2 2 -18 0.9 0.7 44 38 41 -21 1 -18 0.8 0.6 26 -21 14 -28 1 -18 0.9 0.7 38 -9 45 -28 1 -18 0.3 0.9 49 20 26 -1 1 -18 0.2 0.9 36 26 -20 -29 1 -18 0.3 0.5 -31 -32 17 4 2 -18 0.6 0.3 34 -26 50 44 2 -18 0.8 0.5 35 18 41 7 1 -18 0.9 0.8 -21 -24 10 -33 2 -18 0.8 0.5 38 -41 49 28 2 -19 0.5 0.5 49 -4 -7 -50 1 -19 0.6 0.5 3 -17 45 -39 2 -19 0.2 0.9 19 18 27 8 1 -19 0.9 0.2 50 20 -10 -19 1 -19 0.3 0.6 42 9 13 -40 1 -19 0.6 0.4 28 -14 18 6 2 -19 0.3 0.3 9 -31 -11 -33 1 -19 0.6 0.8 45 21 -18 -50 1 -19 0.7 0.2 50 0 -21 -38 1 -19 0.7 0.2 12 -41 -14 -37 1 -19 0.4 0.6 31 15 -6 -18 1 -19 0.2 0.4 32 -44 47 20 2 -19 0.8 0.5 21 -15 3 -35 1 -19 0.2 0.5 21 4 20 -48 1 -19 0.1 0.5 28 -47 -16 -18 2 -19 0.9 0.5 40 29 33 -26 1 -19 0.7 0.1 44 36 8 -3 1 -19 0.7 0.6 4 -34 18 -29 2 -19 0.3 0.7 16 12 -32 -49 1 -19 0.8 0.3 -23 -35 24 -20 2 -19 0.5 0.1 -20 -30 37 -3 2 -19 0.8 0.9 21 -48 28 6 2 -19 0.7 0.7 21 12 38 -34 2 -19 0.9 0.6 44 -19 44 -49 1 -19 0.2 0.9 6 -8 21 10 2 -19 0.7 0.7 23 -21 50 -43 1 -19 0.1 0.2 -17 -35 16 -7 2 -19 0.5 0.2 14 -45 -30 -48 1 -19 0.4 0.3 21 -24 -21 -22 1 -19 0.3 0.1 13 -46 9 -30 1 -19 0.3 0.4 -22 -38 -25 -41 2 -19 0.7 0.9 48 3 25 12 2 -19 0.1 0.3 50 43 29 -37 1 -19 0.8 0.4 16 8 -33 -40 1 -19 0.2 0.3 49 -5 19 -41 1 -19 0.4 0.7 49 46 50 -9 1 -19 0.2 0.5 -18 -29 9 -14 2 -19 0.4 0.7 23 -45 21 -18 2 -19 0.6 0.4 30 -13 33 -7 1 -19 0.5 0.3 15 -14 42 11 2 -19 0.9 0.6 -15 -21 45 -40 2 -19 0.5 0.7 23 -35 26 24 2 -19 0.1 0.3 27 -40 -5 -21 2 -19 0.1 0.2 -16 -20 43 -45 2 -19 0.8 0.1 33 14 27 -9 1 -19 0.9 0.5 -22 -44 -15 -18 1 -19 0.4 0.1 21 -20 42 -46 1 -19 0.6 0.5 17 -28 45 -50 2 -19 0.2 0.1 8 -2 45 30 2 -19 0.5 0.6 -42 -50 31 -18 2 -19 0.2 0.8 29 3 28 -6 1 -19 0.4 0.2 25 -29 1 -34 1 -19 0.9 0.3 -34 -42 31 -37 2 -19 0.4 0.3 49 -23 -13 -19 1 -19 0.3 0.2 49 34 -6 -41 1 -19 0.4 0.4 0 -45 13 5 2 -19 0.8 0.5 47 -36 41 -5 1 -19 0.5 0.7 -29 -46 -7 -38 2 -19 0.8 0.6 30 24 17 -32 1 -19 0.9 0.1 -18 -49 31 1 2 -19 0.7 0.4 40 -43 50 -36 1 -19 0.2 0.4 38 30 -14 -45 1 -19 0.2 0.7 3 -41 -27 -38 2 -19 0.7 0.9 36 -36 -10 -46 1 -19 0.5 0.5 -9 -13 32 -41 2 -19 0.6 0.7 37 36 -20 -44 1 -19 0.1 0.2 30 1 43 39 2 -19 0.6 0.8 35 -31 46 -25 2 -19 0.2 0.5 24 -12 -29 -48 1 -19 0.5 0.6 -32 -44 -32 -35 1 -19 0.7 0.2 50 -50 39 -34 1 -19 0.5 0.3 4 -27 32 -6 2 -19 0.8 0.9 46 -10 15 -4 1 -19 0.7 0.5 12 -43 43 13 2 -19 0.3 0.8 18 -21 24 12 2 -19 0.4 0.1 25 24 44 -40 1 -19 0.6 0.6 -23 -46 44 -42 2 -19 0.7 0.2 -13 -18 20 -15 2 -19 0.1 0.5 -19 -28 -25 -41 1 -19 0.9 0.7 -14 -19 29 -35 2 -19 0.1 0.1 43 41 20 0 1 -19 0.5 0.7 -7 -35 23 -23 2 -19 0.2 0.7 24 22 43 -26 2 -19 0.2 0.8 32 -19 -7 -41 2 -19 0.3 0.9 15 -30 22 -49 2 -19 0.5 0.5 37 28 -34 -48 1 -19 0.5 0.4 26 -32 -24 -43 1 -19 0.4 0.8 34 33 23 -50 1 -19 0.7 0.2 34 -41 31 -6 1 -19 0.8 0.1 28 -15 32 -16 1 -19 0.2 0.9 41 4 32 -50 2 -19 0.4 0.8 43 -35 49 -22 2 -19 0.6 0.9 43 27 32 22 1 -19 0.1 0.1 36 -50 8 -50 1 -19 0.6 0.1 35 -35 -19 -44 1 -19 0.8 0.4 33 -2 -6 -7 1 -19 0.4 0.4 46 25 6 2 1 -19 0.3 0.2 18 -11 -33 -39 1 -19 0.7 0.5 36 29 -9 -44 1 -19 0.3 0.8 13 -7 1 -25 2 -19 0.6 0.7 -24 -32 21 -35 2 -19 0.7 0.2 9 -20 32 26 2 -19 0.5 0.3 47 4 -31 -33 1 -19 0.8 0.7 -10 -45 6 -35 2 -19 0.4 0.2 -10 -30 39 -33 2 -19 0.6 0.1 -25 -34 28 -45 2 -19 0.7 0.7 23 7 -17 -27 1 -19 0.2 0.2 15 -32 7 -36 2 -19 0.9 0.3 34 28 -1 -10 1 -19 0.3 0.2 49 22 16 -7 1 -19 0.3 0.1 27 2 -7 -24 1 -19 0.5 0.1 25 -48 22 -35 1 -19 0.5 0.5 8 3 49 -48 1 -19 0.1 0.3 -2 -45 12 -45 2 -19 0.1 0.1 -23 -31 29 -48 1 -19 0.7 0.5 18 0 49 11 2 -19 0.8 0.2 -2 -23 42 9 2 -19 0.2 0.8 9 -10 -8 -50 1 -19 0.4 0.1 -23 -39 8 -12 2 -19 0.7 0.9 21 15 45 28 2 -19 0.5 0.8 -25 -46 16 -42 2 -19 0.5 0.4 -1 -16 22 -39 1 -19 0.5 0.5 5 -37 -4 -24 1 -19 0.2 0.8 -16 -19 -32 -35 1 -19 0.7 0.9 11 -4 -12 -34 1 -19 0.6 0.3 39 -37 25 15 2 -19 0.1 0.8 13 -37 7 -16 2 -19 0.5 0.8 -36 -50 49 -18 2 -19 0.2 0.9 47 1 15 -3 2 -19 0.2 0.2 49 31 6 -35 1 -19 0.9 0.9 45 2 -31 -33 1 -19 0.9 0.9 5 -31 35 -33 2 -19 0.1 0.5 19 16 19 -3 1 -19 0.2 0.5 43 -6 38 -16 2 -19 0.8 0.3 -33 -47 0 -49 2 -19 0.9 0.8 -31 -49 48 -49 2 -19 0.4 0.8 25 -23 24 -11 2 -19 0.5 0.6 42 37 -12 -20 1 -19 0.6 0.4 47 -33 50 38 2 -19 0.7 0.2 -19 -21 17 0 2 -19 0.7 0.2 13 1 -1 -36 1 -19 0.1 0.5 43 -36 -2 -12 2 -19 0.1 0.4 47 42 13 -14 1 -19 0.7 0.1 45 9 32 -6 1 -19 0.8 0.9 33 -34 3 -48 1 -19 0.8 0.2 -14 -25 -1 -34 2 -19 0.4 0.3 16 4 -34 -36 1 -19 0.4 0.5 -2 -47 33 16 2 -19 0.2 0.9 20 -39 -16 -30 1 -19 0.1 0.3 22 17 3 -44 1 -19 0.8 0.3 -8 -42 -23 -31 1 -19 0.7 0.1 48 6 43 5 1 -19 0.6 0.3 9 -35 -47 -48 1 -19 0.9 0.3 36 -43 26 -28 1 -19 0.4 0.1 -40 -48 25 -26 2 -19 0.5 0.2 45 -39 25 -7 2 -19 0.2 0.3 45 -46 11 -15 2 -19 0.3 0.2 43 4 -37 -39 1 -19 0.6 0.5 10 9 45 -36 1 -19 0.6 0.7 30 -12 26 -39 2 -19 0.1 0.3 49 1 19 -4 2 -19 0.8 0.4 24 -46 47 32 2 -19 0.8 0.9 -7 -41 6 -26 2 -19 0.1 0.9 -31 -47 20 -1 2 -19 0.9 0.6 -28 -41 9 0 2 -19 0.7 0.9 -32 -48 44 -11 2 -19 0.3 0.4 47 41 49 -12 1 -19 0.9 0.3 15 -12 18 6 1 -19 0.1 0.3 37 -8 31 11 2 -19 0.7 0.6 4 -41 19 -12 2 -19 0.7 0.7 43 -30 26 -3 1 -19 0.5 0.7 -6 -8 -11 -35 1 -19 0.4 0.8 -20 -21 34 -3 2 -19 0.9 0.1 32 10 48 -3 1 -19 0.9 0.8 16 15 41 21 2 -19 0.7 0.2 46 -15 -10 -45 1 -19 0.9 0.3 30 16 3 -22 1 -19 0.3 0.8 -1 -3 23 -17 2 -19 0.1 0.1 15 -49 44 30 2 -19 0.7 0.5 27 -35 40 -44 1 -19 0.4 0.8 14 -25 48 -41 2 -19 0.5 0.5 31 -24 35 5 2 -19 0.9 0.7 48 1 -32 -37 1 -19 0.2 0.6 50 2 8 -16 1 -19 0.7 0.8 -27 -46 41 -16 2 -19 0.3 0.5 13 -30 25 -5 2 -19 0.1 0.3 23 0 12 -19 1 -19 0.7 0.4 45 16 50 -20 1 -19 0.8 0.2 7 -39 -14 -32 1 -19 0.2 0.7 39 -49 13 -2 2 -19 0.6 0.9 21 -3 33 -1 2 -19 0.9 0.2 48 -3 0 -47 1 -19 0.5 0.1 -16 -28 35 -13 2 -19 0.3 0.3 44 -1 49 -34 1 -19 0.7 0.2 -11 -21 -19 -20 2 -19 0.7 0.4 3 -33 47 7 2 -19 0.7 0.8 -28 -46 37 -36 2 -19 0.7 0.5 -1 -25 15 11 2 -19 0.7 0.2 48 21 -27 -50 1 -19 0.2 0.8 -5 -25 16 -8 2 -19 0.4 0.9 50 22 48 44 1 -19 0.8 0.2 36 -49 14 13 1 -19 0.8 0.9 40 -21 27 -6 1 -19 0.3 0.7 49 3 12 9 1 -19 0.4 0.7 46 -23 17 -29 1 -19 0.9 0.2 -30 -49 41 -9 2 -19 0.1 0.6 23 0 -21 -39 1 -19 0.5 0.1 14 -30 -16 -19 1 -19 0.6 0.1 46 -46 -4 -26 1 -19 0.2 0.4 40 12 39 19 2 -19 0.5 0.9 4 -10 40 4 2 -19 0.9 0.7 -23 -26 18 -45 2 -19 0.7 0.3 14 -22 -30 -38 1 -19 0.6 0.2 32 -35 16 9 2 -19 0.9 0.2 -27 -47 -23 -50 1 -19 0.2 0.2 -23 -25 49 -44 2 -19 0.5 0.7 42 -48 19 -45 2 -19 0.7 0.4 -33 -43 43 -44 2 -19 0.2 0.5 10 -20 8 -19 2 -19 0.2 0.3 1 -17 -19 -47 1 -19 0.1 0.6 48 -27 -12 -48 1 -19 0.7 0.3 30 -40 36 -2 1 -19 0.1 0.5 38 -32 14 -24 2 -19 0.2 0.6 10 -35 0 -27 2 -19 0.6 0.8 25 -6 -19 -27 1 -19 0.4 0.1 -41 -47 -7 -47 1 -19 0.9 0.8 27 -50 49 37 2 -19 0.2 0.7 -43 -50 37 -21 2 -19 0.5 0.2 8 -27 44 42 2 -19 0.6 0.9 49 -32 35 -28 2 -19 0.7 0.9 27 25 2 -39 1 -19 0.7 0.2 45 -2 -5 -30 1 -19 0.2 0.4 46 26 11 -44 1 -19 0.2 0.2 32 -24 25 -31 1 -19 0.3 0.7 40 33 23 -8 1 -19 0.3 0.7 9 -16 5 -23 2 -19 0.1 0.4 38 -45 9 -18 2 -19 0.2 0.2 41 -9 1 -39 1 -19 0.6 0.7 15 -12 -9 -48 1 -19 0.6 0.8 -14 -45 5 -44 2 -19 0.9 0.4 -19 -47 7 -15 2 -19 0.9 0.6 44 -4 27 -29 1 -19 0.6 0.5 0 -15 8 -43 1 -19 0.1 0.3 19 -1 30 25 2 -19 0.8 0.5 19 7 -25 -41 1 -19 0.8 0.2 -5 -33 44 -24 2 -19 0.8 0.5 20 -34 -22 -40 1 -19 0.9 0.2 -17 -48 -11 -39 1 -19 0.7 0.4 7 -37 43 -21 2 -19 0.2 0.7 35 -20 38 29 2 -19 0.1 0.9 48 40 48 14 2 -19 0.4 0.5 -12 -33 -2 -16 1 -19 0.1 0.9 14 -45 19 17 2 -19 0.5 0.1 17 -33 24 -47 1 -19 0.7 0.6 -7 -23 25 5 2 -19 0.8 0.3 41 34 -12 -20 1 -19 0.6 0.3 13 3 8 -18 1 -19 0.3 0.5 43 12 -10 -22 1 -19 0.7 0.3 48 -5 35 -33 1 -19 0.3 0.7 -1 -5 -22 -41 1 -19 0.6 0.3 50 22 14 -8 1 -19 0.2 0.5 30 -26 39 -6 2 -19 0.5 0.5 12 -7 21 11 2 -19 0.4 0.4 41 -50 -13 -50 1 -19 0.2 0.9 35 -49 -1 -31 2 -19 0.2 0.2 11 -44 16 -16 2 -19 0.1 0.2 40 -9 10 -50 1 -19 0.1 0.4 -25 -42 -15 -20 2 -19 0.8 0.8 30 -43 29 -8 1 -19 0.3 0.5 -9 -25 50 45 2 -19 0.2 0.4 30 -33 20 -29 2 -19 0.2 0.8 45 25 -5 -8 1 -19 0.6 0.5 22 -21 0 -21 1 -19 0.8 0.2 -12 -36 28 -34 2 -19 0.8 0.9 27 -26 -31 -40 1 -19 0.2 0.8 -21 -44 28 -23 2 -19 0.9 0.6 -10 -16 28 27 2 -19 0.8 0.5 45 -45 -15 -39 1 -19 0.4 0.8 5 2 -10 -13 2 -19 0.4 0.1 44 -7 44 -25 1 -19 0.8 0.4 18 5 19 3 1 -19 0.6 0.4 48 12 -48 -50 1 -19 0.6 0.5 49 -47 37 -38 1 -19 0.1 0.4 -36 -49 15 -49 2 -19 0.2 0.1 41 -5 -27 -35 1 -19 0.6 0.7 12 -5 5 -32 1 -19 0.3 0.3 -9 -32 18 -22 2 -19 0.8 0.5 25 11 -31 -32 1 -19 0.5 0.8 8 -27 41 -20 2 -19 0.5 0.6 8 -31 47 22 2 -19 0.9 0.1 37 13 38 -24 1 -19 0.6 0.2 -4 -50 18 -42 2 -19 0.7 0.7 10 -1 3 -2 2 -19 0.3 0.3 7 -35 12 -15 2 -19 0.3 0.6 42 -5 28 -50 1 -19 0.5 0.6 12 -27 26 20 2 -19 0.7 0.2 -7 -50 31 -50 1 -19 0.9 0.9 2 -35 47 37 2 -19 0.5 0.4 23 18 25 0 1 -19 0.4 0.8 18 -6 7 -35 1 -20 0.9 0.3 27 0 40 20 2 -20 0.4 0.8 4 -12 39 -9 2 -20 0.8 0.6 -18 -48 26 -37 2 -20 0.6 0.4 43 -21 20 -26 1 -20 0.1 0.9 26 13 5 1 1 -20 0.8 0.3 45 39 41 8 1 -20 0.8 0.1 39 -41 27 -5 1 -20 0.5 0.2 49 -10 -7 -15 1 -20 0.1 0.6 49 -35 48 -19 2 -20 0.2 0.8 26 -8 10 -20 1 -20 0.6 0.1 34 5 35 8 1 -20 0.3 0.5 -17 -47 31 -4 2 -20 0.5 0.2 48 -29 14 -3 1 -20 0.2 0.5 25 2 38 26 2 -20 0.6 0.1 31 -6 30 -8 1 -20 0.6 0.2 10 -27 -6 -14 1 -20 0.6 0.4 28 17 37 20 2 -20 0.1 0.7 36 13 20 0 1 -20 0.1 0.9 12 -37 -11 -25 2 -20 0.8 0.9 -9 -15 47 11 2 -20 0.9 0.1 43 1 -7 -31 1 -20 0.6 0.4 -13 -41 9 -25 2 -20 0.4 0.6 36 -26 14 -9 1 -20 0.2 0.8 -1 -36 2 -42 2 -20 0.4 0.4 32 5 45 30 2 -20 0.5 0.3 -22 -43 -6 -8 2 -20 0.6 0.2 21 13 -23 -43 1 -20 0.7 0.6 28 20 16 -30 1 -20 0.3 0.9 16 -32 24 14 2 -20 0.6 0.6 -26 -29 -6 -31 2 -20 0.1 0.5 43 33 22 -43 1 -20 0.3 0.2 7 -39 3 -19 2 -20 0.4 0.7 38 -3 22 3 2 -20 0.7 0.5 45 12 27 -16 1 -20 0.3 0.8 -18 -43 43 30 2 -20 0.6 0.2 37 17 25 -16 1 -20 0.4 0.7 31 -29 40 -44 2 -20 0.2 0.3 -1 -4 -3 -20 1 -20 0.1 0.3 33 19 33 15 1 -20 0.4 0.6 45 41 -3 -25 1 -20 0.3 0.3 34 -16 34 -11 1 -20 0.6 0.5 19 17 17 -40 1 -20 0.2 0.6 24 8 26 -31 2 -20 0.9 0.5 31 29 42 4 1 -20 0.7 0.1 -11 -23 14 -13 2 -20 0.4 0.6 -23 -43 13 -7 2 -20 0.8 0.2 36 -18 34 -30 1 -20 0.6 0.8 -33 -50 26 -8 2 -20 0.9 0.6 3 -31 34 -14 2 -20 0.5 0.6 27 6 21 -10 1 -20 0.7 0.2 -11 -29 13 -47 1 -20 0.5 0.6 -9 -48 19 -50 2 -20 0.7 0.9 3 -14 -34 -38 1 -20 0.1 0.4 34 -14 39 -2 2 -20 0.3 0.2 8 -36 37 -3 2 -20 0.7 0.2 2 -43 -30 -46 1 -20 0.5 0.7 37 -11 36 -9 2 -20 0.8 0.7 39 -4 25 -38 1 -20 0.6 0.7 25 -34 -16 -18 1 -20 0.6 0.2 21 5 23 21 2 -20 0.9 0.8 -16 -22 25 -25 2 -20 0.5 0.2 -11 -27 19 0 2 -20 0.1 0.9 20 6 31 -9 2 -20 0.6 0.1 19 -45 6 -37 1 -20 0.8 0.4 -4 -46 20 -48 1 -20 0.5 0.7 20 4 44 5 2 -20 0.9 0.1 1 -49 24 5 2 -20 0.4 0.7 35 9 33 -8 1 -20 0.5 0.6 26 -44 -12 -21 1 -20 0.6 0.2 12 -40 44 -29 1 -20 0.8 0.8 31 -24 25 15 2 -20 0.9 0.4 27 9 31 -47 1 -20 0.8 0.5 -45 -46 8 -1 2 -20 0.4 0.8 50 -49 20 -35 2 -20 0.9 0.5 -8 -17 43 -44 2 -20 0.9 0.6 43 -10 32 -13 1 -20 0.9 0.3 31 9 38 -12 1 -20 0.4 0.3 46 -38 -30 -37 1 -20 0.1 0.4 47 -44 13 4 2 -20 0.5 0.7 14 -18 5 -10 2 -20 0.6 0.6 6 -8 10 7 2 -20 0.6 0.4 -20 -28 -25 -41 1 -20 0.6 0.5 37 5 -31 -48 1 -20 0.2 0.9 3 -21 -31 -38 1 -20 0.7 0.6 -9 -22 3 -27 2 -20 0.2 0.7 -18 -48 0 -25 2 -20 0.3 0.7 47 46 24 -31 1 -20 0.5 0.4 -40 -43 -16 -39 1 -20 0.8 0.3 37 -42 45 16 2 -20 0.9 0.8 21 -3 34 3 2 -20 0.5 0.2 21 -6 -11 -21 1 -20 0.7 0.7 48 -9 -35 -37 1 -20 0.3 0.8 22 -34 -14 -50 1 -20 0.5 0.7 4 3 19 -2 2 -20 0.3 0.2 -7 -42 20 -21 2 -20 0.5 0.3 -9 -20 -20 -30 2 -20 0.8 0.1 36 -39 -1 -45 1 -20 0.8 0.8 18 -36 -34 -40 1 -20 0.4 0.3 33 1 -14 -20 1 -20 0.8 0.7 37 -21 46 -40 1 -20 0.8 0.7 -37 -42 33 27 2 -20 0.2 0.6 46 -22 45 14 2 -20 0.5 0.6 -4 -39 11 -46 2 -20 0.4 0.2 25 -22 48 47 2 -20 0.8 0.4 38 -13 27 -47 1 -20 0.2 0.9 -26 -48 -20 -22 2 -20 0.6 0.8 -13 -25 30 -4 2 -20 0.4 0.6 -6 -28 29 27 2 -20 0.5 0.6 38 -46 17 -24 2 -20 0.8 0.3 27 -43 27 -1 1 -20 0.4 0.1 29 -2 -2 -7 1 -20 0.5 0.9 -26 -32 14 -11 2 -20 0.8 0.2 40 -47 9 2 1 -20 0.5 0.2 50 17 -21 -25 1 -20 0.8 0.4 38 -44 19 -13 1 -20 0.9 0.3 41 17 28 -25 1 -20 0.7 0.5 34 -3 48 -33 1 -20 0.4 0.8 42 41 44 41 2 -20 0.2 0.7 15 7 3 -31 1 -20 0.7 0.6 -13 -27 40 21 2 -20 0.8 0.5 -5 -12 -14 -33 1 -20 0.6 0.3 3 -23 45 18 2 -20 0.7 0.1 48 45 44 -10 1 -20 0.8 0.8 29 -10 29 -8 1 -20 0.3 0.9 -10 -23 25 -8 2 -20 0.3 0.8 18 -40 48 -34 2 -20 0.3 0.7 30 -33 7 -44 1 -20 0.7 0.4 40 -3 -5 -16 1 -20 0.7 0.1 -25 -38 -24 -26 2 -20 0.7 0.2 49 10 -14 -37 1 -20 0.1 0.5 6 -44 36 -33 2 -20 0.9 0.1 45 -40 38 -29 1 -20 0.4 0.7 34 11 49 20 1 -20 0.5 0.4 12 -48 -15 -35 1 -20 0.2 0.9 -7 -9 -26 -30 1 -20 0.4 0.4 -21 -26 40 -7 2 -20 0.8 0.4 18 -10 33 -1 2 -20 0.2 0.7 25 -14 15 -16 2 -20 0.9 0.4 46 16 42 -25 1 -20 0.9 0.3 44 25 -21 -32 1 -20 0.1 0.1 -4 -41 5 -37 1 -20 0.1 0.2 39 -28 22 -2 2 -20 0.8 0.5 35 -13 2 -7 1 -20 0.5 0.2 32 -47 42 -29 1 -20 0.8 0.6 -3 -25 6 -33 2 -20 0.4 0.4 1 -15 36 -44 1 -20 0.2 0.5 46 2 -1 -43 1 -20 0.9 0.7 29 -3 22 10 1 -20 0.4 0.3 13 -37 19 15 2 -20 0.3 0.2 28 -4 35 19 2 -20 0.1 0.5 -20 -36 19 -32 2 -20 0.9 0.5 18 10 -1 -29 1 -20 0.8 0.7 39 8 8 -10 1 -20 0.4 0.1 3 -24 31 -34 1 -20 0.4 0.3 29 -29 25 14 2 -20 0.7 0.1 -25 -30 2 -19 2 -20 0.1 0.6 22 -17 21 -11 2 -20 0.4 0.9 15 -31 -1 -36 1 -20 0.9 0.2 -3 -47 32 5 2 -20 0.1 0.9 43 -9 43 -44 2 -20 0.6 0.1 40 -40 27 2 1 -20 0.8 0.3 26 -1 49 -6 1 -20 0.6 0.5 48 3 -35 -44 1 -20 0.1 0.1 13 5 40 39 2 -20 0.1 0.5 20 -50 -13 -29 2 -20 0.1 0.6 -4 -19 -28 -34 1 -20 0.5 0.1 47 43 35 4 1 -20 0.4 0.8 -8 -39 48 28 2 -20 0.8 0.5 7 3 49 23 2 -20 0.4 0.9 2 -36 38 12 2 -20 0.6 0.9 47 23 28 -25 1 -20 0.3 0.9 -1 -5 34 -10 2 -20 0.4 0.7 34 -15 27 13 2 -20 0.8 0.3 50 13 32 -45 1 -20 0.8 0.8 13 8 14 -26 1 -20 0.8 0.2 5 -8 48 41 2 -20 0.7 0.6 34 -10 49 37 2 -20 0.7 0.3 40 33 21 -22 1 -20 0.5 0.6 12 -17 45 27 2 -20 0.4 0.1 -30 -41 48 27 2 -20 0.9 0.1 -24 -39 3 -41 2 -20 0.2 0.6 43 -3 34 27 2 -20 0.7 0.9 41 -16 38 12 2 -20 0.4 0.6 18 -30 38 -24 2 -20 0.7 0.5 36 21 28 -43 1 -20 0.5 0.2 43 -15 -18 -20 1 -20 0.6 0.5 2 -39 -15 -32 1 -20 0.1 0.5 -42 -49 -17 -36 2 -20 0.1 0.9 36 -40 -2 -24 2 -20 0.1 0.5 17 -34 10 -38 2 -20 0.7 0.4 11 -18 35 30 2 -20 0.7 0.4 20 -36 -35 -44 1 -20 0.8 0.8 -21 -30 -6 -39 2 -20 0.2 0.8 48 27 -24 -48 1 -20 0.4 0.7 14 -24 -30 -49 1 -20 0.2 0.4 -9 -23 45 -49 2 -20 0.2 0.8 36 -48 25 19 2 -20 0.6 0.9 1 -48 33 16 2 -20 0.7 0.6 27 -31 37 -47 1 -20 0.4 0.5 -5 -49 31 20 2 -20 0.3 0.2 49 -2 46 23 2 -20 0.5 0.9 45 -16 44 -47 2 -20 0.8 0.8 1 -26 49 -1 2 -20 0.9 0.8 -20 -30 3 -9 2 -20 0.1 0.1 29 -47 43 40 2 -20 0.2 0.7 8 -8 -8 -34 1 -20 0.4 0.5 26 13 28 1 1 -20 0.9 0.4 48 18 25 -2 1 -20 0.5 0.4 47 28 -34 -45 1 -20 0.1 0.9 -40 -43 15 -3 2 -20 0.6 0.5 10 4 32 -25 1 -20 0.3 0.3 11 -14 30 -10 2 -20 0.4 0.7 24 -36 29 -50 2 -20 0.9 0.8 40 -1 39 -35 1 -20 0.7 0.2 -14 -17 -46 -49 1 -20 0.5 0.9 21 17 34 22 2 -20 0.7 0.1 30 -43 36 -35 1 -20 0.5 0.6 8 -28 31 17 2 -20 0.7 0.8 38 21 44 18 1 -20 0.3 0.3 0 -37 29 -26 2 -20 0.7 0.8 23 6 46 -26 2 -20 0.8 0.4 41 8 39 -42 1 -20 0.7 0.5 -12 -27 10 -24 2 -20 0.9 0.1 -29 -49 37 11 2 -20 0.6 0.1 -16 -18 6 -45 1 -20 0.8 0.2 -4 -30 19 3 2 -20 0.7 0.3 9 -43 33 27 2 -20 0.1 0.8 50 30 12 10 1 -20 0.3 0.1 -40 -42 33 -25 2 -20 0.9 0.7 17 -29 36 10 2 -20 0.3 0.8 33 -37 18 16 2 -20 0.5 0.1 -21 -25 13 -27 2 -20 0.9 0.7 -21 -36 -17 -28 1 -20 0.6 0.1 50 37 43 -42 1 -20 0.5 0.2 46 7 29 -17 1 -20 0.3 0.4 19 -40 36 26 2 -20 0.7 0.3 34 -40 43 2 2 -20 0.8 0.3 20 1 26 -6 1 -20 0.2 0.9 6 -26 39 -36 2 -20 0.6 0.7 48 9 29 24 2 -20 0.8 0.5 9 -16 -4 -15 1 -20 0.9 0.3 -9 -48 24 -22 2 -20 0.5 0.5 14 -3 -8 -15 1 -20 0.2 0.3 20 -14 23 -12 2 -20 0.9 0.9 48 -46 24 -29 1 -20 0.7 0.3 41 12 34 -20 1 -20 0.9 0.4 13 -32 31 -37 1 -20 0.2 0.6 30 -22 26 -49 2 -20 0.5 0.9 46 -16 39 -50 2 -20 0.5 0.7 42 34 -37 -39 1 -20 0.4 0.4 24 7 5 -42 1 -20 0.4 0.2 18 -42 11 -21 2 -20 0.8 0.8 -28 -45 13 -1 2 -20 0.3 0.9 20 -38 19 -6 2 -20 0.3 0.5 49 8 -19 -22 1 -20 0.5 0.2 40 -30 -10 -13 1 -20 0.3 0.8 40 29 28 26 1 -20 0.7 0.1 -6 -21 2 -22 1 -20 0.8 0.4 40 -35 -1 -22 1 -20 0.4 0.2 38 -31 1 -7 1 -20 0.7 0.9 13 -6 41 5 2 -20 0.5 0.1 46 31 36 -42 1 -20 0.7 0.7 -27 -49 47 -46 2 -20 0.8 0.3 -14 -33 -28 -40 1 -20 0.8 0.1 7 -31 24 -34 1 -20 0.3 0.4 28 3 26 -37 1 -20 0.8 0.6 22 -4 -14 -18 1 -20 0.7 0.8 -3 -7 14 -45 2 -20 0.3 0.4 46 -32 37 8 2 -20 0.1 0.6 38 -8 47 -16 2 -20 0.6 0.6 25 -33 28 -17 2 -20 0.9 0.8 42 25 18 -44 1 -20 0.8 0.7 44 -50 49 38 2 -20 0.3 0.4 28 -28 22 6 2 -20 0.4 0.6 -21 -47 43 -14 2 -20 0.5 0.2 23 -39 -15 -16 1 -20 0.1 0.3 7 -3 45 20 2 -20 0.7 0.1 21 11 38 -11 1 -20 0.6 0.2 6 -4 43 -17 1 -20 0.9 0.7 48 25 25 16 1 -20 0.4 0.7 6 -29 12 -30 2 -20 0.2 0.7 -47 -49 -20 -49 2 -20 0.7 0.8 -13 -26 -21 -39 1 -20 0.6 0.4 41 33 -20 -30 1 -20 0.8 0.7 -14 -43 19 -47 2 -20 0.1 0.3 5 -48 24 -8 2 -20 0.7 0.5 -8 -48 -3 -19 2 -20 0.5 0.5 40 20 -6 -17 1 -20 0.3 0.9 4 3 41 31 2 -20 0.6 0.6 -10 -18 40 -10 2 -20 0.4 0.4 -13 -36 -23 -48 1 -20 0.7 0.4 -18 -29 28 -27 2 -20 0.1 0.1 40 29 -28 -36 1 -20 0.6 0.6 33 -38 0 -48 1 -20 0.1 0.7 27 -34 5 -15 2 -20 0.7 0.7 -10 -11 3 -12 2 -20 0.6 0.7 24 -36 31 -17 2 -20 0.8 0.2 -29 -50 20 -46 2 -20 0.1 0.3 -9 -29 -26 -46 1 -20 0.6 0.9 15 -41 21 4 2 -21 0.3 0.3 -1 -35 6 -34 2 -21 0.2 0.9 45 25 45 -3 1 -21 0.2 0.1 -18 -29 -11 -46 1 -21 0.7 0.3 -7 -37 18 6 2 -21 0.4 0.4 34 -27 1 -17 1 -21 0.1 0.8 32 20 -2 -23 1 -21 0.8 0.7 42 13 50 31 2 -21 0.8 0.9 23 -15 15 -40 2 -21 0.7 0.6 -27 -42 -16 -39 2 -21 0.7 0.5 40 30 43 -5 1 -21 0.4 0.6 30 13 26 -36 1 -21 0.4 0.6 -4 -46 -9 -37 1 -21 0.9 0.1 42 -48 47 22 1 -21 0.6 0.9 24 2 11 -48 1 -21 0.8 0.5 46 20 40 16 1 -21 0.7 0.3 34 -29 45 6 2 -21 0.1 0.1 22 -22 38 -43 1 -21 0.8 0.7 -21 -30 50 -35 2 -21 0.2 0.9 45 -38 50 -42 2 -21 0.1 0.7 -7 -29 0 -7 2 -21 0.8 0.4 9 -50 -20 -26 1 -21 0.7 0.6 33 31 28 -21 1 -21 0.6 0.7 -17 -49 20 -43 2 -21 0.7 0.9 36 23 -8 -14 1 -21 0.2 0.9 49 -42 10 -13 2 -21 0.3 0.3 -11 -30 -20 -36 2 -21 0.8 0.5 31 7 14 -43 1 -21 0.7 0.7 24 4 12 -14 1 -21 0.7 0.2 -14 -23 -11 -38 1 -21 0.8 0.3 47 -18 15 -24 1 -21 0.5 0.1 -10 -35 44 -14 2 -21 0.3 0.5 41 10 37 -30 1 -21 0.8 0.5 -16 -45 45 39 2 -21 0.3 0.4 -44 -49 17 -40 2 -21 0.1 0.1 32 24 0 -10 1 -21 0.5 0.2 -6 -47 9 -28 2 -21 0.2 0.3 49 -16 43 -44 2 -21 0.5 0.4 47 -21 -22 -45 1 -21 0.5 0.3 48 41 7 -25 1 -21 0.9 0.2 -20 -41 31 -2 2 -21 0.8 0.9 48 4 36 -37 2 -21 0.1 0.9 -43 -48 -13 -30 2 -21 0.7 0.7 -3 -44 7 -36 2 -21 0.3 0.2 31 0 -21 -27 1 -21 0.4 0.8 6 -41 44 12 2 -21 0.6 0.7 33 -22 25 0 1 -21 0.8 0.8 -24 -43 12 -35 2 -21 0.1 0.4 30 4 -11 -44 1 -21 0.3 0.4 33 -24 24 -18 1 -21 0.1 0.7 31 -23 27 -16 2 -21 0.9 0.8 27 1 27 -21 1 -21 0.4 0.4 -21 -31 -17 -35 2 -21 0.4 0.5 0 -39 20 -49 2 -21 0.9 0.5 25 22 3 -46 1 -21 0.8 0.7 -38 -42 5 -50 2 -21 0.3 0.6 40 23 1 -20 1 -21 0.3 0.9 33 -18 -8 -19 1 -21 0.7 0.9 36 18 -40 -41 1 -21 0.3 0.7 10 -20 50 37 2 -21 0.5 0.4 39 -41 45 -12 1 -21 0.8 0.4 12 -8 -29 -33 1 -21 0.9 0.6 18 2 49 -10 1 -21 0.7 0.1 25 -8 -6 -11 1 -21 0.6 0.7 44 -14 1 -14 1 -21 0.4 0.6 -30 -39 8 -27 2 -21 0.2 0.3 -1 -49 48 -29 2 -21 0.1 0.3 50 -49 10 6 2 -21 0.7 0.7 34 31 1 -27 1 -21 0.1 0.4 28 -19 45 20 2 -21 0.1 0.3 -35 -48 1 -7 2 -21 0.2 0.6 -29 -32 18 -16 2 -21 0.3 0.2 5 -19 34 -8 2 -21 0.6 0.2 -16 -36 -22 -23 2 -21 0.1 0.1 14 -39 23 14 2 -21 0.1 0.9 47 39 -15 -29 1 -21 0.8 0.8 15 -46 34 7 2 -21 0.2 0.7 38 -34 -11 -19 1 -21 0.1 0.5 -12 -47 16 -33 2 -21 0.6 0.1 -14 -24 -7 -40 1 -21 0.8 0.8 9 -13 35 -44 2 -21 0.5 0.8 -11 -14 13 -47 2 -21 0.6 0.1 1 -10 36 18 2 -21 0.6 0.9 7 -25 -35 -41 1 -21 0.7 0.9 3 -12 24 5 2 -21 0.4 0.9 19 -49 45 16 2 -21 0.9 0.6 4 -4 21 -32 2 -21 0.6 0.7 46 -22 30 -21 1 -21 0.9 0.4 -4 -5 30 -41 2 -21 0.1 0.5 40 -35 45 33 2 -21 0.7 0.1 49 -49 45 5 1 -21 0.1 0.3 -34 -48 0 -44 2 -21 0.2 0.2 -4 -45 16 -2 2 -21 0.1 0.1 14 1 -37 -46 2 -21 0.5 0.2 -9 -31 -27 -29 1 -21 0.9 0.4 -2 -20 -22 -29 1 -21 0.2 0.9 13 -10 -7 -43 1 -21 0.5 0.1 -12 -26 -28 -38 2 -21 0.9 0.9 16 -40 47 -4 2 -21 0.5 0.4 -3 -18 49 27 2 -21 0.6 0.1 44 35 2 -9 1 -21 0.1 0.5 -10 -45 16 -19 2 -21 0.9 0.3 30 2 23 -19 1 -21 0.4 0.7 -2 -25 -16 -48 2 -21 0.3 0.9 0 -18 8 -7 2 -21 0.6 0.7 -5 -42 37 17 2 -21 0.1 0.3 48 -39 50 -20 2 -21 0.1 0.8 40 24 15 -34 1 -21 0.5 0.4 36 -48 41 -37 1 -21 0.8 0.4 9 -28 41 11 2 -21 0.2 0.9 -8 -47 -29 -33 2 -21 0.5 0.8 24 9 -22 -48 1 -21 0.7 0.2 28 -42 18 -8 1 -21 0.7 0.2 42 -33 -6 -12 1 -21 0.3 0.3 35 21 35 -6 2 -21 0.3 0.5 13 -40 30 26 2 -21 0.9 0.4 50 35 34 0 1 -21 0.8 0.3 -3 -7 43 -5 1 -21 0.2 0.9 -20 -25 7 1 2 -21 0.1 0.1 -9 -45 40 -34 2 -21 0.5 0.9 -13 -39 50 9 2 -21 0.5 0.7 39 24 23 -44 1 -21 0.1 0.9 48 35 17 1 1 -21 0.2 0.9 -10 -38 18 -30 2 -21 0.5 0.2 -2 -42 3 -4 2 -21 0.7 0.1 15 10 8 -27 1 -21 0.1 0.7 31 -39 -32 -40 2 -21 0.3 0.8 33 0 -39 -46 1 -21 0.3 0.1 50 0 29 -25 1 -21 0.8 0.3 30 -21 -34 -48 1 -21 0.5 0.1 32 -41 35 34 2 -21 0.2 0.6 2 -13 -8 -26 2 -21 0.1 0.8 37 25 1 -3 1 -21 0.1 0.2 18 -6 24 -45 1 -21 0.1 0.3 42 37 -19 -23 1 -21 0.9 0.6 35 31 4 -37 1 -21 0.6 0.3 7 -19 26 16 2 -21 0.8 0.4 11 -18 23 -44 1 -21 0.9 0.3 -6 -31 20 -42 2 -21 0.3 0.7 42 35 19 -47 1 -21 0.7 0.1 22 -38 26 20 2 -21 0.4 0.4 -5 -40 0 -7 2 -21 0.6 0.2 30 -6 33 -49 1 -21 0.1 0.6 43 32 -14 -44 1 -21 0.6 0.2 48 9 2 -4 1 -21 0.1 0.5 -13 -26 21 -30 2 -21 0.9 0.5 19 -2 42 -19 1 -21 0.5 0.3 -26 -42 49 9 2 -21 0.2 0.8 16 -9 32 1 2 -21 0.7 0.2 38 21 42 1 1 -21 0.2 0.5 -37 -50 -37 -40 2 -21 0.5 0.1 -27 -47 45 -28 2 -21 0.1 0.2 5 -13 5 -23 2 -21 0.3 0.4 19 -36 32 12 2 -21 0.8 0.7 -2 -42 40 -20 1 -21 0.2 0.1 -14 -17 11 -38 1 -21 0.7 0.5 -2 -23 49 48 2 -21 0.3 0.4 31 -47 27 8 2 -21 0.1 0.9 32 -31 36 17 2 -21 0.7 0.6 50 0 47 -24 1 -21 0.6 0.2 -42 -43 33 10 2 -21 0.8 0.7 38 -6 -38 -48 1 -21 0.6 0.4 10 -40 32 5 2 -21 0.5 0.1 22 12 -2 -17 1 -21 0.3 0.4 29 -38 -20 -37 1 -21 0.3 0.6 50 40 -5 -41 1 -21 0.9 0.3 36 -50 43 -34 1 -21 0.9 0.8 36 -24 -12 -47 1 -21 0.9 0.3 20 -20 44 17 2 -21 0.9 0.4 39 -6 37 -43 1 -21 0.6 0.9 -13 -28 13 -47 2 -21 0.3 0.4 -9 -23 29 -34 2 -21 0.3 0.3 26 -40 23 -42 2 -21 0.5 0.7 38 35 36 20 1 -21 0.9 0.1 25 -22 37 23 1 -21 0.5 0.1 30 -38 33 -33 1 -21 0.4 0.9 18 -7 -14 -47 1 -21 0.2 0.2 -20 -36 -9 -50 2 -21 0.6 0.9 19 16 50 -28 2 -21 0.7 0.8 36 -6 -6 -20 1 -21 0.6 0.7 50 -34 46 15 2 -21 0.5 0.6 33 -36 -18 -25 1 -21 0.3 0.7 50 -46 25 -14 2 -21 0.4 0.5 -9 -46 50 -3 2 -21 0.2 0.1 11 -38 26 -12 2 -21 0.7 0.8 38 19 33 11 1 -21 0.5 0.2 39 -23 7 -4 2 -21 0.6 0.9 9 -11 -22 -39 1 -21 0.3 0.7 -21 -44 24 0 2 -21 0.6 0.4 29 -37 7 -34 1 -21 0.9 0.1 16 -26 37 -33 1 -21 0.3 0.4 24 6 20 19 1 -21 0.1 0.1 -17 -50 47 1 2 -21 0.6 0.3 45 -44 -37 -40 1 -21 0.4 0.8 16 -5 49 25 2 -21 0.7 0.7 38 -12 22 14 2 -21 0.9 0.3 -16 -27 -28 -31 1 -21 0.4 0.6 30 -8 -27 -42 1 -21 0.2 0.9 13 5 7 -13 2 -21 0.7 0.2 -22 -41 50 -15 2 -21 0.3 0.5 -35 -43 10 -1 2 -21 0.1 0.7 17 -49 7 -35 1 -21 0.9 0.6 21 7 -31 -45 1 -21 0.6 0.2 19 -8 -4 -22 2 -21 0.4 0.7 41 18 15 -38 1 -21 0.9 0.3 35 -14 50 -21 1 -21 0.5 0.6 41 18 27 -28 1 -21 0.8 0.1 -3 -17 29 -16 1 -21 0.7 0.6 -29 -32 -16 -21 2 -21 0.1 0.3 -17 -18 -4 -33 1 -21 0.3 0.4 34 -17 30 -15 2 -21 0.7 0.4 48 41 38 20 1 -21 0.8 0.1 -15 -27 37 -15 1 -21 0.5 0.2 -20 -43 25 0 2 -21 0.4 0.9 28 15 3 -36 1 -21 0.4 0.9 43 -17 -16 -21 1 -21 0.3 0.4 34 -17 23 -36 1 -21 0.5 0.9 5 -30 40 24 2 -21 0.9 0.2 49 10 0 -17 1 -21 0.3 0.3 46 -27 24 19 2 -21 0.7 0.9 44 2 -19 -25 1 -21 0.9 0.5 -9 -49 37 -5 2 -21 0.1 0.6 40 -19 -30 -42 1 -21 0.6 0.7 15 -14 -3 -45 1 -21 0.3 0.9 48 40 50 28 1 -21 0.1 0.5 -13 -16 8 -35 2 -21 0.8 0.4 50 -4 45 -31 1 -21 0.9 0.3 38 22 50 30 1 -21 0.8 0.9 -2 -46 -17 -37 2 -21 0.3 0.9 23 -17 11 -23 2 -21 0.7 0.8 -9 -50 37 -32 2 -21 0.4 0.9 43 22 1 -37 1 -21 0.2 0.3 19 -25 -35 -48 2 -21 0.3 0.2 26 9 -15 -50 1 -21 0.9 0.5 29 -14 -11 -17 1 -21 0.6 0.9 45 2 -29 -38 1 -21 0.2 0.9 -23 -27 41 -9 2 -21 0.6 0.7 36 8 4 1 1 -21 0.1 0.4 4 -5 11 -21 1 -21 0.5 0.1 49 -18 -35 -43 1 -21 0.1 0.7 16 -36 33 -39 2 -21 0.9 0.6 48 -44 38 -9 1 -21 0.8 0.2 22 -29 -42 -44 1 -21 0.5 0.3 14 -32 -17 -37 1 -21 0.5 0.7 18 -30 20 -32 2 -21 0.3 0.4 40 38 41 28 1 -21 0.3 0.4 47 -43 20 -20 2 -21 0.2 0.9 21 6 26 -19 2 -21 0.8 0.3 24 -18 44 28 2 -21 0.4 0.4 -8 -23 1 -45 2 -21 0.1 0.6 16 -46 49 -3 2 -21 0.9 0.6 19 -42 -20 -42 1 -21 0.8 0.5 -13 -23 39 30 2 -21 0.1 0.6 49 8 46 -18 1 -21 0.1 0.9 32 5 3 -14 2 -21 0.2 0.4 36 35 22 -17 1 -21 0.7 0.8 -9 -43 26 -38 2 -21 0.8 0.7 13 -26 1 -29 1 -21 0.3 0.3 34 27 26 -21 1 -21 0.3 0.7 48 41 -11 -40 1 -21 0.3 0.1 11 -32 -30 -38 1 -21 0.3 0.3 42 -2 -5 -7 2 -21 0.1 0.8 43 4 20 -47 1 -21 0.1 0.5 -9 -31 11 -5 2 -21 0.5 0.9 1 -4 17 5 2 -21 0.7 0.2 26 9 50 49 2 -21 0.9 0.7 0 -47 12 8 2 -21 0.7 0.6 1 -25 48 -36 2 -21 0.6 0.5 19 -4 32 1 1 -21 0.3 0.7 23 5 46 31 2 -21 0.3 0.4 30 -33 7 -10 2 -21 0.8 0.9 7 -47 42 -20 2 -21 0.9 0.6 46 -28 16 -19 1 -21 0.3 0.6 49 41 50 -14 1 -21 0.7 0.5 25 -48 -13 -34 1 -21 0.3 0.9 27 -37 9 -12 2 -21 0.2 0.1 11 -5 -35 -36 1 -21 0.1 0.6 30 -9 0 -23 1 -21 0.1 0.2 -33 -44 34 21 2 -21 0.8 0.2 42 39 6 -26 1 -21 0.4 0.4 -4 -40 -48 -49 1 -21 0.5 0.3 37 19 41 -22 1 -21 0.1 0.6 16 -31 31 -24 2 -21 0.4 0.5 9 -1 49 44 2 -21 0.8 0.7 35 -47 50 48 2 -21 0.6 0.2 5 -18 -11 -15 1 -21 0.2 0.1 27 -13 9 -41 1 -21 0.2 0.4 -5 -31 28 -7 2 -21 0.5 0.8 29 -49 50 -16 2 -21 0.6 0.1 -23 -43 8 -17 2 -21 0.5 0.5 -20 -35 18 9 2 -21 0.4 0.8 23 -33 32 -6 2 -21 0.7 0.4 48 -16 12 11 1 -21 0.2 0.8 41 -2 25 24 2 -21 0.8 0.2 -19 -40 -15 -33 1 -21 0.1 0.9 50 31 20 -19 1 -21 0.8 0.2 7 -38 47 7 2 -21 0.9 0.7 27 6 -17 -23 1 -21 0.2 0.9 11 -37 12 -7 2 -21 0.4 0.6 36 -13 -15 -36 1 -21 0.3 0.2 -24 -48 4 -42 2 -22 0.9 0.2 23 12 32 -49 1 -22 0.1 0.6 -4 -17 47 -17 2 -22 0.7 0.8 -17 -25 32 -16 2 -22 0.3 0.8 31 -29 13 -7 2 -22 0.9 0.6 47 -28 46 -44 1 -22 0.8 0.4 29 -29 35 34 2 -22 0.4 0.4 -20 -47 13 -3 2 -22 0.4 0.3 24 9 1 -30 1 -22 0.3 0.2 22 -25 27 -34 1 -22 0.1 0.8 -16 -25 0 -27 2 -22 0.5 0.5 12 -19 -2 -14 1 -22 0.3 0.8 32 -19 5 -1 2 -22 0.8 0.3 30 13 3 -23 1 -22 0.8 0.2 39 35 34 30 1 -22 0.3 0.2 3 -4 15 -5 2 -22 0.6 0.5 33 7 26 21 1 -22 0.1 0.3 48 45 18 -44 1 -22 0.7 0.1 16 -7 45 33 2 -22 0.3 0.4 27 -46 20 -26 2 -22 0.7 0.6 39 -13 -14 -22 1 -22 0.2 0.7 22 15 42 -31 1 -22 0.1 0.5 48 5 32 -37 1 -22 0.8 0.8 -19 -35 42 34 2 -22 0.6 0.5 45 -17 23 -35 1 -22 0.2 0.7 4 -34 30 0 2 -22 0.8 0.4 13 8 43 7 2 -22 0.3 0.9 33 19 48 -46 1 -22 0.4 0.3 43 -20 29 10 2 -22 0.3 0.6 45 -17 16 -29 2 -22 0.3 0.2 13 11 41 21 1 -22 0.9 0.9 37 23 -1 -46 1 -22 0.4 0.9 6 -25 -15 -22 1 -22 0.1 0.8 -21 -32 23 5 2 -22 0.6 0.5 -12 -48 -16 -28 2 -22 0.8 0.6 32 6 -9 -31 1 -22 0.9 0.6 -12 -29 25 16 2 -22 0.1 0.7 43 13 13 -25 1 -22 0.9 0.7 41 13 42 -6 2 -22 0.9 0.1 38 -43 12 -19 1 -22 0.3 0.6 50 22 46 -6 1 -22 0.1 0.8 16 -19 -18 -49 2 -22 0.3 0.4 1 -9 46 31 2 -22 0.1 0.2 42 36 8 -2 1 -22 0.9 0.7 5 -19 -18 -49 1 -22 0.2 0.3 13 -3 5 -48 2 -22 0.6 0.8 4 -25 -10 -26 2 -22 0.8 0.6 31 -23 7 -21 1 -22 0.9 0.1 41 2 4 -24 1 -22 0.7 0.8 1 -32 44 8 2 -22 0.7 0.3 40 -49 17 -26 1 -22 0.6 0.3 -11 -33 36 -39 2 -22 0.5 0.7 -27 -34 25 15 2 -22 0.6 0.4 9 -3 26 -26 1 -22 0.1 0.4 -6 -35 28 26 2 -22 0.1 0.6 32 9 49 -44 2 -22 0.9 0.5 29 -12 39 -47 1 -22 0.8 0.8 47 2 -34 -35 1 -22 0.6 0.5 46 -39 41 -17 2 -22 0.3 0.7 38 -13 9 -22 1 -22 0.8 0.3 -17 -39 25 21 2 -22 0.8 0.2 -9 -38 -11 -31 1 -22 0.2 0.8 33 -6 45 40 2 -22 0.1 0.3 -16 -21 17 -46 1 -22 0.1 0.9 23 5 46 7 2 -22 0.8 0.8 24 -26 -2 -27 1 -22 0.4 0.8 23 -19 43 -45 2 -22 0.6 0.5 20 -18 1 -48 1 -22 0.2 0.2 -8 -30 32 -11 2 -22 0.7 0.6 17 16 -15 -21 1 -22 0.7 0.7 46 -5 17 -35 1 -22 0.4 0.8 12 -43 -16 -48 1 -22 0.6 0.2 35 14 -22 -36 2 -22 0.4 0.9 -26 -48 -21 -49 1 -22 0.1 0.9 14 -50 4 -2 2 -22 0.4 0.2 47 -30 41 -38 1 -22 0.8 0.6 13 -7 -9 -19 1 -22 0.4 0.5 50 20 39 -34 1 -22 0.6 0.3 22 5 2 -36 1 -22 0.8 0.8 31 -24 10 -46 1 -22 0.9 0.1 -26 -43 15 -2 2 -22 0.1 0.1 5 2 5 -43 1 -22 0.8 0.2 6 -1 37 3 2 -22 0.2 0.9 50 -40 22 7 2 -22 0.3 0.6 -6 -40 -39 -42 1 -22 0.2 0.6 -4 -44 41 13 2 -22 0.6 0.2 -16 -26 29 -9 2 -22 0.4 0.2 21 16 -27 -43 1 -22 0.3 0.8 49 26 36 -4 1 -22 0.2 0.5 -8 -45 -13 -37 1 -22 0.1 0.1 14 -27 8 -40 2 -22 0.4 0.9 4 -49 14 -42 2 -22 0.7 0.2 35 -47 -14 -40 1 -22 0.5 0.4 37 -44 17 14 2 -22 0.2 0.8 24 8 5 -3 2 -22 0.3 0.6 -20 -31 34 13 2 -22 0.8 0.7 48 24 -12 -13 1 -22 0.8 0.7 41 -36 36 26 2 -22 0.4 0.1 -18 -41 2 -24 2 -22 0.1 0.9 -25 -36 -3 -49 1 -22 0.1 0.3 40 32 -12 -21 1 -22 0.7 0.3 5 -42 15 -9 1 -22 0.3 0.5 21 -13 9 -45 2 -22 0.4 0.1 -5 -10 -22 -35 2 -22 0.4 0.4 35 8 -31 -46 1 -22 0.4 0.2 -11 -28 36 -45 1 -22 0.7 0.1 16 -11 49 19 2 -22 0.7 0.9 16 -21 47 7 2 -22 0.8 0.1 25 -49 9 -49 1 -22 0.2 0.6 3 0 46 -31 2 -22 0.8 0.3 -9 -20 3 -5 2 -22 0.2 0.3 36 -34 12 1 2 -22 0.7 0.2 -18 -39 43 -48 2 -22 0.7 0.6 23 -38 -15 -35 1 -22 0.5 0.1 37 -10 34 -22 1 -22 0.7 0.2 43 -1 7 -30 1 -22 0.4 0.7 42 -22 32 -2 2 -22 0.8 0.9 6 -36 34 9 2 -22 0.1 0.8 -12 -50 29 -2 2 -22 0.5 0.5 -19 -46 -36 -48 1 -22 0.4 0.4 9 -7 -34 -49 1 -22 0.5 0.4 41 22 50 -35 1 -22 0.7 0.4 -39 -45 49 -36 2 -22 0.5 0.1 49 40 28 2 1 -22 0.7 0.1 -13 -27 41 30 2 -22 0.9 0.9 43 33 50 -15 1 -22 0.6 0.5 27 -19 36 -25 1 -22 0.3 0.6 -17 -50 -24 -27 1 -22 0.4 0.5 20 16 32 16 2 -22 0.8 0.8 24 -20 7 -17 1 -22 0.1 0.2 19 10 22 -18 1 -22 0.1 0.4 8 -12 11 -21 2 -22 0.1 0.2 -8 -33 -29 -36 1 -22 0.2 0.8 23 -38 3 -9 1 -22 0.1 0.1 22 -42 -21 -35 2 -22 0.6 0.7 28 20 38 13 2 -22 0.2 0.7 -11 -35 15 -30 2 -22 0.2 0.8 43 -6 -10 -48 1 -22 0.6 0.5 20 -32 16 -48 1 -22 0.6 0.2 24 12 24 -41 1 -22 0.7 0.3 34 -25 42 28 1 -22 0.4 0.8 -8 -13 -34 -45 1 -22 0.7 0.8 37 16 -19 -41 1 -22 0.8 0.8 -23 -44 28 -30 2 -22 0.5 0.1 40 27 50 -41 1 -22 0.3 0.7 -2 -19 8 -19 2 -22 0.7 0.7 13 -41 20 -47 2 -22 0.7 0.9 35 33 -18 -39 1 -22 0.7 0.4 -5 -34 33 -2 2 -22 0.3 0.5 -17 -42 -14 -33 1 -22 0.6 0.9 -13 -14 21 -37 2 -22 0.9 0.6 12 -17 -23 -36 2 -22 0.2 0.6 -21 -45 48 -31 2 -22 0.3 0.7 32 -43 34 -50 2 -22 0.6 0.8 40 -42 49 -36 2 -22 0.5 0.4 11 10 -24 -46 1 -22 0.2 0.9 23 -14 21 -31 2 -22 0.3 0.5 50 0 40 -48 1 -22 0.6 0.4 -26 -49 50 26 2 -22 0.8 0.6 12 -45 32 28 2 -22 0.5 0.5 35 -15 -18 -33 1 -22 0.1 0.7 1 -27 26 -39 2 -22 0.2 0.2 32 12 -18 -43 2 -22 0.2 0.5 35 -20 12 8 2 -22 0.2 0.9 44 -6 36 -43 2 -22 0.2 0.8 -3 -19 -5 -21 2 -22 0.4 0.1 48 20 15 -38 1 -22 0.2 0.1 -23 -43 38 0 2 -22 0.2 0.2 -18 -32 16 -21 1 -22 0.7 0.3 41 9 21 20 1 -22 0.9 0.6 49 -12 48 19 1 -22 0.1 0.5 32 -36 5 -27 2 -22 0.3 0.3 30 -9 13 -6 2 -22 0.1 0.8 32 4 40 -8 2 -22 0.2 0.8 27 -30 -4 -26 2 -22 0.6 0.3 22 -25 35 -24 2 -22 0.8 0.4 -34 -43 24 -27 1 -22 0.5 0.8 -39 -43 -18 -41 2 -22 0.7 0.7 23 -1 44 -44 1 -22 0.6 0.8 -32 -43 -8 -31 1 -22 0.1 0.9 40 31 47 -22 2 -22 0.3 0.6 2 0 -20 -47 1 -22 0.5 0.2 -36 -37 -21 -33 2 -22 0.6 0.4 34 20 8 -2 1 -22 0.9 0.8 -15 -20 16 -32 2 -22 0.9 0.4 -21 -27 35 -20 2 -22 0.9 0.9 27 -36 35 -15 2 -22 0.8 0.2 -13 -45 5 -9 2 -22 0.4 0.9 31 -7 50 16 2 -22 0.9 0.8 -9 -50 22 -45 2 -22 0.6 0.2 16 6 -22 -35 1 -22 0.2 0.6 48 20 29 -2 2 -22 0.5 0.8 36 -3 41 5 2 -22 0.5 0.4 15 -5 26 -16 2 -22 0.8 0.3 6 -24 -18 -38 1 -22 0.6 0.8 46 44 49 -44 1 -22 0.9 0.7 34 33 25 -5 1 -22 0.3 0.7 3 -19 25 13 2 -22 0.9 0.2 -37 -50 -16 -23 1 -22 0.2 0.4 4 -3 -45 -46 1 -22 0.7 0.2 30 -6 26 12 2 -22 0.6 0.3 8 -45 5 -17 2 -22 0.4 0.9 -2 -27 35 -50 2 -22 0.9 0.8 39 13 -30 -33 1 -22 0.3 0.9 45 7 26 -46 2 -22 0.7 0.8 35 -9 2 -37 1 -22 0.6 0.9 -16 -27 -18 -20 2 -22 0.8 0.7 14 -30 46 -32 1 -22 0.1 0.8 50 26 43 -6 2 -22 0.6 0.7 3 -16 -41 -42 1 -22 0.9 0.3 24 -15 10 -22 1 -22 0.1 0.3 10 2 18 -45 1 -22 0.8 0.5 -21 -45 -1 -25 1 -22 0.1 0.7 2 -39 -14 -22 1 -22 0.6 0.1 7 -34 18 -18 2 -22 0.4 0.4 5 -16 48 -12 1 -22 0.5 0.8 24 -42 15 11 2 -22 0.1 0.8 48 -5 16 -45 2 -22 0.3 0.6 41 14 38 -7 1 -22 0.2 0.5 34 -5 -10 -23 1 -22 0.8 0.7 -10 -16 39 -45 2 -22 0.1 0.6 27 -39 -9 -37 2 -22 0.3 0.5 36 27 40 -44 1 -22 0.2 0.3 -9 -29 8 -19 2 -22 0.8 0.9 38 17 23 13 2 -22 0.7 0.4 48 15 -22 -24 1 -22 0.5 0.6 9 4 8 7 1 -22 0.7 0.8 35 -32 -5 -34 1 -22 0.3 0.9 26 -5 -20 -46 1 -22 0.3 0.3 -17 -28 -2 -33 2 -22 0.1 0.8 -13 -17 4 -12 1 -22 0.4 0.3 -1 -39 28 -28 2 -22 0.2 0.9 8 -37 38 16 2 -22 0.2 0.7 50 -16 -9 -19 1 -22 0.9 0.1 -21 -32 41 -7 2 -22 0.4 0.5 32 -32 14 -35 2 -22 0.8 0.7 44 35 -9 -10 1 -22 0.5 0.8 13 -27 49 4 1 -22 0.6 0.3 34 20 -22 -47 1 -22 0.8 0.5 36 -3 48 30 1 -22 0.4 0.9 50 3 2 -32 1 -22 0.4 0.5 35 -25 39 -13 1 -22 0.7 0.6 8 -35 -12 -16 1 -22 0.8 0.8 21 -50 -31 -46 1 -22 0.1 0.8 41 -23 46 6 2 -22 0.5 0.9 10 -3 14 -30 2 -22 0.2 0.1 43 -21 -44 -48 1 -22 0.5 0.2 30 8 43 40 2 -22 0.8 0.9 38 -47 42 -41 2 -22 0.2 0.1 13 -7 24 11 2 -22 0.7 0.7 8 -12 31 13 2 -22 0.4 0.1 39 -8 -21 -31 1 -22 0.7 0.3 7 -39 -17 -34 2 -22 0.1 0.5 49 -37 42 -9 2 -22 0.2 0.8 45 5 38 6 2 -22 0.9 0.4 33 -27 42 40 2 -22 0.3 0.7 14 -29 -5 -9 1 -22 0.8 0.3 45 -28 31 -47 1 -22 0.6 0.5 11 5 26 -2 2 -22 0.7 0.5 42 -5 11 -14 2 -22 0.4 0.7 47 28 29 26 2 -22 0.5 0.2 31 -20 -12 -47 1 -22 0.4 0.2 46 -6 49 -23 1 -22 0.7 0.8 18 -10 46 -17 2 -22 0.5 0.2 -19 -32 37 11 2 -22 0.1 0.9 42 32 48 -42 2 -22 0.3 0.4 21 -47 0 -18 1 -22 0.1 0.1 30 18 39 -13 1 -22 0.3 0.9 39 -12 3 1 2 -22 0.5 0.8 40 -36 7 -5 2 -22 0.2 0.6 -4 -39 -18 -30 1 -22 0.7 0.2 14 -1 -26 -41 1 -22 0.2 0.3 15 -31 22 -7 2 -22 0.4 0.5 50 -38 15 -8 2 -22 0.8 0.7 16 -37 -28 -34 1 -22 0.8 0.9 42 31 27 0 1 -22 0.4 0.6 -13 -29 11 -31 2 -22 0.6 0.8 14 -13 37 -8 2 -22 0.1 0.8 29 -29 8 -47 2 -22 0.9 0.1 3 -1 -13 -41 2 -22 0.1 0.5 -24 -42 24 20 2 -22 0.8 0.5 44 -5 16 -41 1 -22 0.7 0.4 34 -36 48 7 1 -22 0.1 0.9 35 8 10 -31 2 -22 0.3 0.9 48 -43 23 -17 1 -22 0.7 0.3 44 34 34 19 1 -22 0.3 0.2 27 -2 28 7 1 -22 0.3 0.1 -5 -17 44 -23 2 -22 0.8 0.2 24 2 23 -24 1 -22 0.7 0.3 38 -40 29 -6 1 -22 0.3 0.5 23 2 26 -49 1 -22 0.3 0.4 32 -6 14 -30 1 -22 0.6 0.2 42 20 39 -48 1 -22 0.1 0.9 35 29 20 12 1 -22 0.1 0.1 -18 -27 33 -45 1 -22 0.3 0.4 -6 -20 -7 -13 2 -22 0.7 0.7 30 -35 31 -36 1 -22 0.5 0.3 0 -31 -26 -30 2 -22 0.5 0.9 28 -19 -2 -7 1 -22 0.4 0.2 -13 -27 43 1 2 -22 0.1 0.7 39 12 -26 -35 1 -23 0.5 0.1 30 -35 18 -48 1 -23 0.7 0.9 -17 -43 -42 -46 1 -23 0.7 0.9 34 16 23 -7 1 -23 0.8 0.9 40 21 48 29 2 -23 0.6 0.2 -16 -20 23 -37 1 -23 0.3 0.2 21 -9 38 29 2 -23 0.4 0.8 50 -30 19 -32 2 -23 0.6 0.8 -14 -29 37 -35 2 -23 0.2 0.4 36 29 50 -2 1 -23 0.1 0.7 5 -33 -20 -38 2 -23 0.2 0.1 10 -41 24 -22 2 -23 0.5 0.4 5 -15 -17 -29 1 -23 0.6 0.3 32 3 -19 -33 1 -23 0.1 0.6 30 -44 2 -33 2 -23 0.8 0.1 38 -12 -22 -31 1 -23 0.3 0.3 -6 -37 33 17 2 -23 0.8 0.5 9 0 33 -13 2 -23 0.9 0.7 15 -30 23 -24 2 -23 0.7 0.4 39 -40 21 7 2 -23 0.7 0.5 8 -6 48 -33 2 -23 0.5 0.6 -11 -12 46 -26 2 -23 0.5 0.3 41 38 22 -47 1 -23 0.4 0.7 27 -25 -2 -50 1 -23 0.1 0.3 -19 -46 40 -45 2 -23 0.9 0.1 18 -2 28 -29 1 -23 0.7 0.4 26 -29 17 16 2 -23 0.1 0.1 -11 -24 35 -28 2 -23 0.2 0.2 18 -39 50 10 2 -23 0.7 0.7 14 -21 -46 -49 1 -23 0.1 0.8 31 -2 -8 -24 1 -23 0.2 0.8 30 -38 -11 -37 1 -23 0.6 0.1 19 -35 -6 -45 1 -23 0.6 0.9 -1 -10 -1 -50 1 -23 0.1 0.7 -24 -42 -33 -39 2 -23 0.7 0.1 -40 -50 27 -7 2 -23 0.9 0.1 34 -12 -40 -48 1 -23 0.3 0.5 46 -12 -9 -44 1 -23 0.4 0.8 11 -33 -1 -20 2 -23 0.6 0.4 36 27 3 -25 1 -23 0.4 0.2 -16 -22 35 25 2 -23 0.1 0.4 18 -37 50 36 2 -23 0.9 0.1 20 -27 -28 -49 1 -23 0.2 0.8 -17 -50 5 -23 2 -23 0.5 0.5 28 9 6 -12 1 -23 0.3 0.3 40 -41 4 2 2 -23 0.6 0.8 -10 -28 30 4 2 -23 0.2 0.1 -8 -13 -11 -43 1 -23 0.6 0.3 29 -22 38 24 2 -23 0.2 0.8 25 11 34 -32 2 -23 0.7 0.5 44 -37 -40 -50 1 -23 0.8 0.8 7 -46 18 -30 2 -23 0.6 0.7 49 3 41 9 2 -23 0.2 0.9 16 -25 39 2 2 -23 0.9 0.9 44 -48 -4 -21 1 -23 0.7 0.8 41 -32 18 -2 1 -23 0.3 0.1 41 -28 -15 -48 1 -23 0.5 0.7 48 47 -30 -39 1 -23 0.5 0.3 14 -47 19 6 2 -23 0.4 0.3 48 11 36 -48 1 -23 0.9 0.1 19 12 6 -31 1 -23 0.3 0.3 13 -23 -29 -37 1 -23 0.5 0.4 0 -44 50 -25 2 -23 0.7 0.4 -13 -25 10 -45 1 -23 0.7 0.4 -29 -37 47 31 2 -23 0.5 0.2 45 -2 12 -1 1 -23 0.5 0.6 14 -33 -28 -36 1 -23 0.3 0.5 40 -21 31 16 2 -23 0.1 0.6 -15 -18 10 -22 2 -23 0.9 0.8 22 16 35 -37 2 -23 0.7 0.5 28 -34 29 -27 1 -23 0.9 0.4 48 -49 10 -45 1 -23 0.2 0.7 -27 -37 21 -42 2 -23 0.8 0.9 6 -45 5 -35 1 -23 0.8 0.5 43 -27 10 -50 1 -23 0.3 0.5 47 -20 16 3 2 -23 0.2 0.7 26 19 15 -48 1 -23 0.4 0.9 26 -17 -13 -32 1 -23 0.2 0.6 12 -45 3 -31 2 -23 0.9 0.9 -21 -27 -3 -33 2 -23 0.9 0.6 -4 -22 19 -49 2 -23 0.2 0.4 -22 -39 20 -13 2 -23 0.4 0.6 34 22 -6 -33 1 -23 0.3 0.2 29 0 -26 -41 1 -23 0.7 0.2 48 24 36 16 1 -23 0.3 0.6 -31 -33 -31 -42 1 -23 0.2 0.5 -14 -49 39 24 2 -23 0.1 0.3 1 -17 43 -13 2 -23 0.9 0.8 -13 -33 -7 -8 2 -23 0.3 0.2 -19 -44 26 -49 2 -23 0.3 0.6 39 -42 40 -41 2 -23 0.3 0.6 30 6 25 -20 1 -23 0.1 0.1 33 20 -14 -49 1 -23 0.8 0.2 -4 -13 0 -43 1 -23 0.2 0.2 44 12 39 -42 1 -23 0.4 0.7 -24 -27 -35 -44 1 -23 0.9 0.4 15 -4 -13 -45 1 -23 0.9 0.1 -19 -39 38 -11 2 -23 0.4 0.7 41 2 -35 -41 1 -23 0.5 0.9 20 -31 13 -22 2 -23 0.1 0.3 20 -2 34 -49 1 -23 0.8 0.4 -18 -31 40 -24 2 -23 0.8 0.5 23 -29 40 -43 1 -23 0.7 0.5 15 -15 43 40 2 -23 0.8 0.3 43 20 11 5 1 -23 0.1 0.2 39 -12 8 -7 2 -23 0.9 0.8 -23 -33 42 8 2 -23 0.1 0.4 16 15 42 -21 1 -23 0.3 0.6 -18 -48 -14 -17 2 -23 0.3 0.7 32 21 49 39 2 -23 0.8 0.1 47 -14 49 15 1 -23 0.1 0.2 30 -37 -7 -38 2 -23 0.1 0.1 27 -37 34 -21 2 -23 0.5 0.9 26 -41 15 -17 2 -23 0.7 0.3 37 32 -12 -48 1 -23 0.1 0.8 45 15 -36 -48 1 -23 0.2 0.2 -31 -39 16 -33 2 -23 0.3 0.4 34 11 6 -10 1 -23 0.9 0.7 38 36 44 -24 1 -23 0.3 0.9 47 -2 34 17 2 -23 0.2 0.9 35 -4 28 -14 2 -23 0.9 0.8 15 -16 -21 -45 1 -23 0.5 0.7 46 45 -10 -44 1 -23 0.4 0.8 49 -23 -26 -32 1 -23 0.3 0.2 -19 -34 47 29 2 -23 0.1 0.3 24 -14 27 -48 2 -23 0.6 0.9 16 11 10 -6 1 -23 0.8 0.8 5 -9 -3 -46 1 -23 0.8 0.9 29 -34 37 -16 2 -23 0.6 0.7 16 -44 -5 -33 1 -23 0.1 0.2 4 -10 8 -5 2 -23 0.9 0.1 44 8 42 -42 1 -23 0.8 0.2 50 30 26 6 1 -23 0.6 0.9 50 -42 33 1 2 -23 0.5 0.1 13 -50 -29 -47 1 -23 0.3 0.2 32 9 40 6 1 -23 0.4 0.7 43 -24 -19 -40 1 -23 0.1 0.8 -16 -38 43 -2 2 -23 0.3 0.9 -15 -18 39 37 2 -23 0.9 0.8 34 8 6 2 1 -23 0.6 0.5 3 -23 -13 -20 1 -23 0.1 0.4 41 26 43 -44 1 -23 0.3 0.5 -9 -46 34 -27 2 -23 0.1 0.6 -14 -21 -25 -46 1 -23 0.3 0.3 23 -4 41 25 2 -23 0.2 0.4 -27 -34 29 3 2 -23 0.4 0.5 -10 -11 18 1 2 -23 0.3 0.9 32 -24 42 23 2 -23 0.9 0.3 5 -37 39 -23 1 -23 0.2 0.7 40 -6 17 -46 2 -23 0.6 0.6 43 -10 45 8 2 -23 0.2 0.4 18 11 34 -38 1 -23 0.2 0.1 50 33 41 1 1 -23 0.4 0.1 43 -40 2 -39 1 -23 0.9 0.2 -16 -50 -4 -34 1 -23 0.8 0.2 36 -26 45 -26 1 -23 0.9 0.9 28 -10 46 -25 2 -23 0.9 0.5 28 0 37 30 2 -23 0.3 0.6 -23 -28 -16 -42 2 -23 0.3 0.4 7 -8 37 -37 2 -23 0.2 0.3 39 8 33 -28 1 -23 0.4 0.2 28 -35 29 -43 1 -23 0.7 0.9 38 20 29 -20 2 -23 0.6 0.9 -5 -27 -15 -42 1 -23 0.9 0.2 -4 -14 46 -23 1 -23 0.2 0.7 41 22 13 -40 1 -23 0.6 0.3 33 -31 -5 -24 1 -23 0.3 0.6 32 16 42 29 2 -23 0.1 0.9 42 -10 -26 -45 1 -23 0.1 0.3 26 10 45 -22 1 -23 0.6 0.5 -33 -42 1 -16 2 -23 0.2 0.8 16 1 31 0 2 -23 0.4 0.5 -20 -50 49 -30 2 -23 0.2 0.7 37 25 24 23 1 -23 0.3 0.6 34 25 15 -12 1 -23 0.5 0.3 13 -38 47 -49 1 -23 0.4 0.8 6 -4 18 -14 2 -23 0.3 0.5 48 -10 16 -19 2 -23 0.2 0.3 -28 -38 -24 -35 1 -23 0.4 0.9 41 -38 -26 -48 1 -23 0.7 0.2 46 -18 31 -31 1 -23 0.8 0.8 42 8 18 -28 1 -23 0.6 0.4 9 -45 31 -30 2 -23 0.7 0.4 30 9 36 24 1 -23 0.8 0.4 48 40 -6 -32 1 -23 0.8 0.1 30 -22 3 -6 1 -23 0.1 0.2 -37 -44 49 42 2 -23 0.7 0.4 8 -43 49 38 2 -23 0.8 0.2 49 -37 -6 -23 1 -23 0.9 0.2 18 -12 12 0 1 -23 0.2 0.3 47 -16 47 23 2 -23 0.7 0.4 4 -2 42 12 2 -23 0.3 0.6 35 -44 37 7 2 -23 0.2 0.5 19 2 22 -46 1 -23 0.2 0.4 5 -43 42 0 2 -23 0.1 0.4 -21 -30 46 -45 2 -23 0.1 0.9 -9 -22 3 -31 2 -23 0.5 0.3 47 -50 -13 -15 1 -23 0.7 0.6 13 -20 -36 -41 1 -23 0.9 0.6 -2 -32 35 11 2 -23 0.3 0.4 27 -35 41 -10 2 -23 0.9 0.9 -45 -47 -23 -34 2 -23 0.6 0.7 -15 -22 23 5 2 -23 0.2 0.1 26 22 -39 -50 1 -23 0.3 0.3 26 12 5 -35 1 -23 0.6 0.1 -25 -26 20 -47 1 -23 0.5 0.8 -15 -45 32 -32 2 -23 0.1 0.9 38 29 -11 -15 1 -23 0.5 0.1 37 -49 39 38 2 -23 0.3 0.8 40 7 41 6 2 -23 0.5 0.1 1 -35 21 -13 2 -23 0.2 0.4 25 20 35 8 1 -23 0.8 0.7 50 -11 25 12 1 -23 0.9 0.1 33 22 29 -38 1 -23 0.7 0.2 13 -48 21 -6 1 -23 0.8 0.4 22 -9 18 -50 1 -23 0.1 0.2 9 -48 48 -30 2 -23 0.1 0.8 7 -5 18 -21 2 -23 0.9 0.9 18 -49 48 16 2 -23 0.7 0.3 33 8 0 -25 1 -23 0.8 0.1 46 -4 -9 -32 1 -23 0.2 0.5 -16 -46 44 13 2 -23 0.3 0.2 44 -44 21 10 2 -23 0.2 0.6 49 1 7 -3 1 -23 0.1 0.2 21 -13 44 -3 2 -23 0.2 0.6 35 -14 -5 -7 1 -23 0.2 0.2 -9 -39 30 18 2 -23 0.8 0.7 -23 -33 19 -43 2 -23 0.1 0.1 43 -24 0 -5 2 -23 0.2 0.5 48 4 -23 -46 1 -23 0.4 0.5 -16 -23 25 -36 2 -23 0.4 0.8 33 31 -19 -41 1 -23 0.5 0.2 29 -2 31 -10 1 -23 0.8 0.6 29 -45 32 -37 1 -23 0.2 0.3 26 -30 39 -49 1 -23 0.9 0.3 -26 -36 -42 -45 1 -23 0.6 0.5 43 -24 -21 -38 1 -23 0.9 0.9 8 -4 6 -49 1 -23 0.2 0.4 -23 -45 20 -3 2 -23 0.5 0.8 -7 -49 38 -38 2 -23 0.6 0.1 -5 -38 21 -17 2 -23 0.2 0.5 26 15 -18 -37 1 -23 0.1 0.4 5 -16 41 17 2 -23 0.6 0.2 8 5 34 32 2 -23 0.7 0.1 -13 -14 31 26 2 -23 0.5 0.4 38 21 33 -45 1 -23 0.4 0.7 42 -13 42 -12 2 -23 0.1 0.8 -28 -31 50 23 2 -23 0.6 0.4 19 -26 21 -36 1 -23 0.3 0.9 39 -19 41 38 2 -23 0.5 0.5 -17 -31 10 -23 2 -23 0.6 0.4 47 41 -22 -43 1 -23 0.6 0.3 1 -32 2 -17 1 -23 0.6 0.4 -5 -28 19 -4 2 -23 0.6 0.2 27 -26 33 -47 1 -23 0.8 0.7 36 -29 25 -14 1 -23 0.3 0.6 -23 -29 23 -27 2 -23 0.1 0.1 15 12 33 24 2 -23 0.8 0.9 6 -50 -27 -35 1 -23 0.2 0.5 46 19 -20 -40 1 -23 0.2 0.8 14 4 17 -10 2 -23 0.3 0.7 8 -25 -2 -41 2 -23 0.3 0.2 27 -33 50 -49 1 -23 0.8 0.5 13 -30 0 -10 1 -23 0.3 0.8 22 5 -2 -19 1 -23 0.6 0.2 10 -15 41 10 2 -23 0.9 0.9 -10 -40 36 -38 2 -23 0.6 0.7 24 11 -18 -48 1 -23 0.4 0.3 1 -44 30 -21 2 -23 0.7 0.6 39 10 41 -9 1 -23 0.6 0.8 22 -36 43 27 2 -23 0.1 0.5 35 25 11 -2 1 -23 0.3 0.6 2 -37 -39 -45 1 -23 0.4 0.6 48 24 4 -24 1 -23 0.1 0.5 27 -4 6 2 2 -23 0.9 0.8 42 28 38 -44 1 -23 0.1 0.2 48 -24 31 27 2 -23 0.3 0.9 45 -20 3 -6 2 -23 0.8 0.7 37 -31 30 26 2 -23 0.3 0.7 -6 -19 30 10 2 -23 0.9 0.4 39 16 50 -46 1 -23 0.2 0.1 47 -3 41 11 2 -23 0.1 0.8 -34 -46 7 -9 2 -23 0.3 0.3 38 32 37 -18 1 -23 0.2 0.6 31 -46 30 -38 2 -23 0.7 0.2 -6 -32 -1 -50 1 -23 0.8 0.8 35 34 20 -29 1 -23 0.4 0.9 -22 -45 17 -36 2 -23 0.1 0.4 47 -48 39 -8 2 -23 0.9 0.1 -41 -45 16 -45 1 -23 0.1 0.3 21 -18 35 -16 2 -23 0.2 0.9 43 1 31 13 2 -23 0.6 0.1 8 -35 31 -35 1 -23 0.9 0.5 31 17 17 -22 1 -23 0.7 0.6 38 0 -11 -30 1 -23 0.5 0.9 45 22 45 6 2 -23 0.3 0.1 -9 -35 49 6 2 -23 0.2 0.4 -18 -21 -16 -45 1 -23 0.7 0.6 14 -14 -22 -38 1 -23 0.4 0.6 34 -4 -28 -29 1 -23 0.2 0.6 5 -38 -30 -50 1 -24 0.9 0.3 0 -26 50 12 2 -24 0.7 0.4 29 26 -36 -50 1 -24 0.7 0.1 26 -30 37 -14 1 -24 0.9 0.4 -5 -30 36 1 2 -24 0.3 0.1 37 5 -3 -25 1 -24 0.8 0.5 5 -16 -4 -29 1 -24 0.5 0.6 27 -20 -24 -43 1 -24 0.3 0.6 -5 -12 2 -4 2 -24 0.5 0.2 -12 -27 -10 -38 1 -24 0.5 0.3 46 -40 17 -11 1 -24 0.5 0.6 47 -6 35 -27 1 -24 0.5 0.7 36 32 1 -41 1 -24 0.6 0.9 -34 -50 48 43 2 -24 0.7 0.1 41 -47 18 -19 1 -24 0.7 0.4 37 25 -8 -30 1 -24 0.2 0.3 31 -26 -13 -48 1 -24 0.4 0.6 28 -43 29 7 2 -24 0.8 0.4 36 -30 42 32 1 -24 0.4 0.4 36 -25 -27 -38 1 -24 0.3 0.5 22 -41 35 -44 2 -24 0.6 0.7 32 -25 27 -13 2 -24 0.5 0.2 36 20 25 7 1 -24 0.9 0.2 7 -33 -9 -36 1 -24 0.6 0.5 47 29 15 -41 1 -24 0.6 0.6 4 -50 39 -2 2 -24 0.6 0.2 49 15 -2 -42 1 -24 0.8 0.9 -31 -40 10 -30 2 -24 0.7 0.2 -4 -49 -18 -47 1 -24 0.6 0.9 35 15 22 -30 2 -24 0.2 0.6 9 -14 38 -39 2 -24 0.1 0.8 21 -34 -20 -22 2 -24 0.2 0.7 13 11 45 -5 2 -24 0.5 0.5 -29 -49 -23 -50 1 -24 0.4 0.3 15 -21 50 -21 1 -24 0.9 0.9 22 7 -37 -45 1 -24 0.3 0.6 44 26 3 -38 1 -24 0.1 0.7 44 -25 -27 -36 1 -24 0.4 0.9 -26 -38 11 -38 2 -24 0.3 0.2 42 -15 2 -39 1 -24 0.2 0.8 6 -14 27 -48 2 -24 0.4 0.4 40 7 47 5 2 -24 0.9 0.6 45 -11 -17 -49 1 -24 0.4 0.2 14 -45 -11 -32 1 -24 0.1 0.7 14 -19 18 -11 2 -24 0.4 0.5 25 -17 50 -28 2 -24 0.5 0.6 18 -38 30 25 2 -24 0.5 0.4 -12 -18 3 -10 1 -24 0.7 0.3 2 -44 -11 -29 1 -24 0.6 0.7 47 3 46 -2 2 -24 0.5 0.4 -23 -37 20 -45 2 -24 0.7 0.2 2 -28 -27 -45 1 -24 0.1 0.4 44 14 6 -19 1 -24 0.3 0.4 6 -44 46 -1 2 -24 0.1 0.7 15 -21 34 -7 2 -24 0.3 0.4 40 -43 -8 -30 1 -24 0.3 0.9 40 -33 41 -41 2 -24 0.5 0.5 29 -22 0 -5 2 -24 0.6 0.2 16 -45 22 -45 1 -24 0.8 0.1 -3 -42 17 15 2 -24 0.6 0.6 18 -6 50 31 2 -24 0.6 0.9 -15 -19 36 -37 2 -24 0.8 0.8 11 -18 19 -24 2 -24 0.1 0.9 -39 -42 41 -21 2 -24 0.2 0.4 -39 -49 2 -23 2 -24 0.4 0.5 36 6 -18 -23 1 -24 0.7 0.9 -15 -30 39 8 2 -24 0.1 0.9 44 -40 10 -32 2 -24 0.1 0.2 23 -13 -37 -46 1 -24 0.4 0.1 32 22 29 -31 1 -24 0.4 0.4 15 -3 32 19 1 -24 0.3 0.8 47 36 35 16 1 -24 0.3 0.5 37 32 7 -16 1 -24 0.6 0.6 41 -36 46 -10 2 -24 0.8 0.7 50 17 -14 -44 1 -24 0.3 0.9 7 -44 27 -4 2 -24 0.9 0.8 39 -37 21 -30 1 -24 0.7 0.4 0 -34 -22 -23 1 -24 0.1 0.9 12 -13 32 18 2 -24 0.4 0.9 -8 -49 29 24 2 -24 0.8 0.4 -5 -36 45 -8 2 -24 0.4 0.2 8 -13 -33 -48 1 -24 0.6 0.4 30 14 49 -31 1 -24 0.1 0.1 33 28 -32 -33 1 -24 0.4 0.1 44 4 18 -17 1 -24 0.7 0.6 -16 -40 3 -46 1 -24 0.5 0.6 45 -8 31 -49 2 -24 0.1 0.1 26 -50 23 -6 2 -24 0.7 0.1 -21 -42 49 -23 2 -24 0.1 0.3 -3 -30 41 -40 2 -24 0.3 0.6 17 -26 48 -4 2 -24 0.7 0.7 17 7 5 -43 1 -24 0.4 0.1 42 6 -14 -24 1 -24 0.7 0.9 43 -12 50 -13 2 -24 0.4 0.8 21 -40 9 -50 1 -24 0.1 0.5 27 -19 -30 -44 1 -24 0.4 0.8 43 25 -1 -42 1 -24 0.1 0.2 14 -22 -22 -30 1 -24 0.6 0.9 35 29 -20 -49 1 -24 0.9 0.9 25 -50 0 -31 1 -24 0.5 0.7 9 -45 48 -33 2 -24 0.9 0.5 43 22 11 -9 1 -24 0.4 0.3 -12 -46 -4 -29 2 -24 0.2 0.3 -29 -40 12 -11 1 -24 0.6 0.3 41 7 26 21 2 -24 0.3 0.7 29 9 30 -23 1 -24 0.1 0.5 46 29 49 25 1 -24 0.4 0.8 37 8 44 -49 1 -24 0.5 0.8 3 -5 46 -4 2 -24 0.6 0.3 -10 -48 16 -9 1 -24 0.7 0.6 14 -39 38 -11 2 -24 0.2 0.5 32 13 -27 -48 1 -24 0.7 0.2 -1 -21 14 -15 2 -24 0.1 0.3 29 3 14 -38 1 -24 0.9 0.7 29 -31 -21 -41 1 -24 0.4 0.9 30 -33 10 -18 2 -24 0.9 0.9 50 31 44 -12 1 -24 0.5 0.1 22 -15 -6 -9 1 -24 0.3 0.1 38 23 31 16 2 -24 0.8 0.6 50 8 32 -34 1 -24 0.1 0.7 -7 -15 48 -22 2 -24 0.4 0.9 -13 -50 45 34 2 -24 0.2 0.4 15 -23 49 31 2 -24 0.1 0.1 33 7 37 -21 1 -24 0.9 0.4 18 -45 28 12 2 -24 0.8 0.4 14 -43 -7 -25 1 -24 0.2 0.7 15 -31 -25 -43 1 -24 0.9 0.6 31 20 -16 -38 1 -24 0.7 0.4 -1 -6 19 -10 2 -24 0.6 0.5 40 -17 -5 -38 2 -24 0.3 0.2 36 -25 43 28 2 -24 0.7 0.5 50 -40 25 -21 1 -24 0.2 0.9 47 44 31 -35 1 -24 0.2 0.3 48 -27 46 21 2 -24 0.6 0.7 27 -19 33 -44 2 -24 0.8 0.1 39 -1 36 -13 1 -24 0.3 0.6 -10 -47 50 32 2 -24 0.6 0.4 27 7 17 -49 1 -24 0.1 0.8 28 -11 16 -2 2 -24 0.6 0.6 -7 -31 29 15 2 -24 0.9 0.2 49 47 -6 -29 1 -24 0.1 0.9 28 14 -12 -28 1 -24 0.1 0.4 15 3 -6 -10 1 -24 0.6 0.4 40 25 -14 -36 1 -24 0.2 0.3 6 -47 38 0 2 -24 0.4 0.3 -21 -26 41 16 2 -24 0.4 0.4 36 -20 5 -22 1 -24 0.8 0.5 33 -38 23 9 1 -24 0.2 0.4 5 2 16 0 2 -24 0.7 0.4 -19 -37 21 -21 2 -24 0.3 0.5 -4 -35 -7 -9 2 -24 0.1 0.1 -29 -33 29 23 2 -24 0.1 0.1 22 -13 28 -32 2 -24 0.3 0.4 37 16 48 -29 1 -24 0.4 0.5 6 -14 16 -36 1 -24 0.9 0.7 -1 -6 -8 -40 1 -24 0.4 0.3 -7 -40 47 6 2 -24 0.5 0.2 22 -32 20 -44 2 -24 0.7 0.4 14 -38 18 15 2 -24 0.3 0.6 36 -44 18 -20 2 -24 0.4 0.6 19 -49 -24 -43 1 -24 0.2 0.4 16 13 37 16 2 -24 0.8 0.6 41 30 29 15 2 -24 0.2 0.7 18 -23 40 24 2 -24 0.7 0.4 8 -2 -13 -46 2 -24 0.6 0.5 -10 -23 40 2 2 -24 0.8 0.3 18 -39 -9 -27 1 -24 0.3 0.7 -19 -31 23 8 2 -24 0.5 0.3 -9 -15 48 34 2 -24 0.1 0.1 36 -24 -7 -43 1 -24 0.5 0.7 13 0 34 -39 2 -24 0.7 0.4 30 28 34 -27 1 -24 0.5 0.9 -10 -47 -24 -36 1 -24 0.1 0.7 2 -5 41 14 2 -24 0.3 0.7 8 -46 -19 -44 1 -24 0.5 0.2 17 -12 38 -49 1 -24 0.3 0.5 -33 -39 17 8 2 -24 0.2 0.4 14 -11 47 -44 1 -24 0.5 0.9 43 -41 -14 -36 1 -24 0.8 0.2 -18 -21 49 23 2 -24 0.6 0.3 49 -25 -45 -48 1 -24 0.4 0.6 16 -46 36 -2 2 -24 0.4 0.9 -8 -11 42 5 2 -24 0.5 0.7 50 7 30 -11 2 -24 0.9 0.6 46 6 3 -26 1 -24 0.6 0.5 -18 -49 36 -5 2 -24 0.5 0.3 38 6 -22 -44 1 -24 0.6 0.8 9 5 45 -23 1 -24 0.4 0.6 33 -14 14 -16 2 -24 0.4 0.6 39 -47 -22 -35 1 -24 0.9 0.2 11 -28 29 -12 1 -24 0.3 0.9 -26 -27 38 36 2 -24 0.5 0.6 32 -38 15 -30 1 -24 0.1 0.3 24 22 43 30 1 -24 0.6 0.3 27 -28 -41 -49 1 -24 0.8 0.8 22 12 42 31 2 -24 0.8 0.9 38 28 29 -41 2 -24 0.4 0.7 41 -38 -1 -33 2 -24 0.2 0.4 46 16 28 -48 1 -24 0.5 0.9 13 -30 36 16 2 -24 0.1 0.8 41 25 46 -18 1 -24 0.8 0.3 -34 -35 15 -2 2 -24 0.2 0.3 -16 -40 31 8 2 -24 0.9 0.3 44 6 -11 -19 1 -24 0.7 0.9 43 23 13 -16 1 -24 0.3 0.4 22 -50 13 -33 1 -24 0.4 0.1 33 -1 48 -22 1 -24 0.6 0.5 -24 -45 38 35 2 -24 0.6 0.3 42 -47 20 -26 1 -24 0.9 0.4 19 14 -1 -42 1 -24 0.7 0.2 -18 -43 2 -30 1 -24 0.1 0.4 19 -14 12 8 2 -24 0.2 0.8 18 6 13 -33 2 -24 0.6 0.6 49 -50 40 32 2 -24 0.9 0.8 -9 -11 15 -30 2 -24 0.4 0.9 -24 -41 -31 -46 1 -24 0.1 0.3 -5 -42 -25 -43 1 -24 0.8 0.9 -18 -36 -3 -32 2 -24 0.8 0.1 14 -14 26 -38 1 -24 0.5 0.9 -4 -20 -6 -22 1 -24 0.1 0.4 17 -14 -17 -22 1 -24 0.2 0.4 6 -30 19 -37 2 -24 0.7 0.9 34 -33 19 9 2 -24 0.8 0.5 31 -36 5 -34 1 -24 0.8 0.7 7 -14 -10 -43 2 -24 0.4 0.3 34 10 29 -1 1 -24 0.4 0.5 1 -33 -20 -22 1 -24 0.1 0.3 18 -12 -3 -8 2 -24 0.7 0.6 4 -36 15 -37 2 -24 0.6 0.8 48 -23 2 -6 1 -24 0.1 0.4 -4 -9 43 -1 1 -24 0.5 0.1 16 5 35 31 2 -24 0.1 0.5 -3 -40 -13 -29 2 -24 0.8 0.7 40 -17 4 -44 1 -24 0.3 0.3 45 -15 26 -17 1 -24 0.8 0.7 38 -31 -16 -44 1 -24 0.7 0.3 -34 -41 4 -35 2 -24 0.9 0.2 -17 -47 36 14 2 -24 0.9 0.6 23 -3 49 4 2 -24 0.4 0.2 42 6 36 16 1 -24 0.5 0.7 -10 -32 19 -22 2 -24 0.8 0.9 31 -17 7 5 2 -24 0.9 0.8 49 -30 5 -19 1 -24 0.1 0.9 37 -24 48 44 2 -24 0.8 0.7 8 -36 -26 -31 1 -24 0.7 0.9 45 37 46 -39 1 -24 0.6 0.1 38 19 17 -4 1 -24 0.9 0.6 37 -7 1 0 1 -24 0.1 0.7 44 -22 47 -49 2 -24 0.6 0.4 28 13 37 -1 1 -24 0.8 0.3 -17 -44 28 -22 2 -24 0.7 0.3 44 10 25 6 1 -24 0.8 0.2 -29 -33 10 8 2 -24 0.1 0.7 44 -36 5 -1 2 -24 0.8 0.7 21 10 13 -22 2 -24 0.1 0.6 30 22 45 11 1 -24 0.1 0.1 39 -18 46 40 2 -24 0.4 0.2 10 -3 31 26 2 -24 0.9 0.1 41 -22 -39 -44 1 -24 0.5 0.3 -10 -37 -12 -34 2 -24 0.9 0.5 0 -14 -5 -33 2 -24 0.7 0.7 22 -49 3 -24 1 -24 0.8 0.3 29 1 9 -43 1 -24 0.1 0.9 29 6 -38 -41 1 -24 0.5 0.7 -10 -27 39 -3 2 -24 0.3 0.8 -9 -10 34 -50 2 -24 0.4 0.1 43 -46 43 1 2 -24 0.7 0.7 15 -22 -2 -15 1 -24 0.6 0.5 39 -21 -24 -30 1 -24 0.3 0.4 39 -27 48 1 2 -24 0.3 0.7 20 -27 38 9 2 -24 0.6 0.9 43 36 24 -34 2 -24 0.3 0.8 -16 -33 29 -22 2 -24 0.3 0.9 1 -34 -11 -16 2 -24 0.4 0.4 15 -25 12 10 2 -24 0.5 0.2 23 -3 49 44 2 -24 0.7 0.6 -42 -44 23 -47 2 -24 0.5 0.1 46 35 27 19 1 -24 0.8 0.5 31 21 41 6 1 -24 0.3 0.8 2 -21 34 9 2 -24 0.7 0.7 22 -12 25 -15 2 -24 0.1 0.6 50 45 32 -13 1 -24 0.4 0.5 5 -47 41 -31 2 -24 0.2 0.8 17 -4 43 -48 2 -24 0.6 0.6 28 23 10 -7 1 -24 0.7 0.7 36 -35 23 -16 1 -24 0.7 0.8 33 -15 36 23 2 -24 0.5 0.3 -4 -16 -36 -39 1 -24 0.5 0.4 46 37 39 14 1 -24 0.1 0.4 50 3 -15 -29 1 -24 0.5 0.6 24 -36 2 -5 2 -24 0.3 0.8 25 -17 -15 -40 1 -24 0.1 0.4 33 -50 9 6 2 -24 0.2 0.9 -32 -48 38 -40 2 -24 0.3 0.4 39 13 31 3 1 -24 0.5 0.3 -7 -33 -11 -43 1 -24 0.5 0.6 12 6 -25 -39 1 -24 0.1 0.3 27 -32 49 -31 2 -24 0.1 0.6 -2 -34 5 -23 1 -24 0.8 0.3 22 -45 16 10 1 -24 0.5 0.5 -16 -25 -6 -13 1 -25 0.4 0.5 41 1 38 24 2 -25 0.8 0.3 49 -23 -7 -43 1 -25 0.5 0.2 10 5 20 -16 2 -25 0.2 0.5 3 -43 34 14 2 -25 0.2 0.9 46 -2 -10 -17 1 -25 0.3 0.9 19 18 19 -16 1 -25 0.7 0.7 -26 -45 -34 -48 1 -25 0.9 0.3 28 -6 17 -25 1 -25 0.2 0.4 45 1 44 -3 1 -25 0.1 0.9 37 29 10 -46 1 -25 0.1 0.7 33 -38 -2 -48 1 -25 0.3 0.8 3 -36 -10 -29 2 -25 0.4 0.3 41 -48 36 -35 1 -25 0.3 0.5 -19 -42 25 14 2 -25 0.7 0.4 41 -43 29 9 1 -25 0.1 0.7 17 -28 9 -18 2 -25 0.1 0.1 23 -25 46 25 2 -25 0.9 0.9 35 18 47 -4 2 -25 0.4 0.3 28 -26 -6 -44 2 -25 0.3 0.3 17 9 50 8 2 -25 0.4 0.1 -25 -37 36 -10 2 -25 0.1 0.9 7 -10 -5 -13 1 -25 0.2 0.5 -18 -49 28 -43 2 -25 0.4 0.2 12 -20 -30 -36 1 -25 0.8 0.5 47 5 3 -26 1 -25 0.9 0.9 35 -13 29 11 2 -25 0.6 0.6 44 -7 15 -23 1 -25 0.1 0.6 50 17 32 -31 1 -25 0.7 0.6 3 -3 -23 -24 1 -25 0.8 0.1 10 -24 50 -29 1 -25 0.1 0.8 47 9 44 18 1 -25 0.6 0.8 21 -1 40 -11 2 -25 0.5 0.1 35 14 9 -20 1 -25 0.7 0.7 19 16 38 -13 1 -25 0.3 0.6 -30 -36 -17 -50 2 -25 0.6 0.8 22 -49 24 12 2 -25 0.2 0.5 -2 -12 32 -13 2 -25 0.3 0.2 34 19 -8 -10 1 -25 0.1 0.8 21 -15 45 -18 2 -25 0.2 0.5 -10 -21 20 3 2 -25 0.1 0.1 37 -28 -6 -27 1 -25 0.9 0.4 11 -33 37 2 2 -25 0.5 0.3 -15 -32 -19 -44 1 -25 0.2 0.7 -3 -48 -21 -48 2 -25 0.5 0.2 10 -38 -35 -42 1 -25 0.6 0.7 17 -28 2 -32 1 -25 0.3 0.5 -16 -21 3 -33 1 -25 0.2 0.2 46 23 8 -37 1 -25 0.6 0.8 44 -46 30 -34 2 -25 0.8 0.8 -36 -39 38 31 2 -25 0.7 0.7 31 3 -23 -30 1 -25 0.7 0.9 27 -12 30 -31 1 -25 0.5 0.3 38 -30 3 -16 1 -25 0.2 0.1 24 -26 27 4 2 -25 0.7 0.9 21 11 2 -4 1 -25 0.4 0.8 -49 -50 16 -29 2 -25 0.8 0.2 14 -43 46 11 2 -25 0.3 0.6 29 -36 18 -47 1 -25 0.7 0.1 30 -31 28 -7 1 -25 0.8 0.2 31 30 32 -21 1 -25 0.9 0.8 34 15 14 2 1 -25 0.6 0.7 46 -16 26 -1 2 -25 0.3 0.2 36 25 11 -49 1 -25 0.2 0.4 -5 -42 -9 -25 1 -25 0.8 0.4 0 -48 16 -21 2 -25 0.9 0.4 21 -17 16 11 2 -25 0.5 0.8 -27 -46 48 25 2 -25 0.1 0.9 20 -8 45 3 2 -25 0.9 0.4 -25 -26 6 -15 2 -25 0.8 0.8 -25 -33 10 -17 2 -25 0.9 0.3 38 -10 10 -26 1 -25 0.7 0.7 -30 -32 43 -14 2 -25 0.6 0.3 -11 -43 -39 -46 2 -25 0.4 0.2 7 -45 -36 -42 1 -25 0.9 0.9 -25 -43 37 -30 2 -25 0.8 0.4 -15 -35 17 -39 2 -25 0.7 0.6 -38 -39 43 -3 2 -25 0.4 0.2 -37 -44 -11 -44 2 -25 0.6 0.2 26 -44 11 -35 1 -25 0.9 0.5 22 -41 -11 -35 1 -25 0.6 0.5 24 15 -1 -19 1 -25 0.7 0.4 22 -10 13 -5 1 -25 0.7 0.5 38 26 -20 -35 1 -25 0.3 0.6 40 8 29 12 2 -25 0.8 0.4 12 -21 38 22 2 -25 0.7 0.7 25 -5 30 -43 1 -25 0.1 0.3 -20 -29 -17 -33 2 -25 0.6 0.7 -10 -44 0 -1 2 -25 0.5 0.7 50 8 12 -6 1 -25 0.7 0.5 30 -8 47 -4 2 -25 0.2 0.9 50 -36 30 -5 2 -25 0.3 0.9 12 -17 33 -17 2 -25 0.7 0.8 37 -24 24 18 2 -25 0.3 0.1 36 -26 -21 -36 1 -25 0.1 0.9 20 6 6 -7 2 -25 0.1 0.9 -40 -46 9 8 2 -25 0.2 0.6 48 29 0 -12 1 -25 0.4 0.8 9 -6 11 8 2 -25 0.7 0.6 12 -2 44 -22 2 -25 0.6 0.1 -14 -23 8 -11 2 -25 0.6 0.6 26 -19 -33 -44 1 -25 0.8 0.3 41 5 48 -47 1 -25 0.9 0.8 15 -33 42 35 2 -25 0.4 0.1 45 19 -4 -19 1 -25 0.3 0.4 -20 -28 -2 -11 1 -25 0.7 0.4 43 15 11 10 1 -25 0.9 0.3 -3 -41 46 32 2 -25 0.1 0.7 13 -11 -38 -45 1 -25 0.5 0.1 -15 -28 1 -32 2 -25 0.8 0.9 -22 -50 -24 -30 2 -25 0.5 0.5 -33 -50 -22 -44 2 -25 0.5 0.2 25 -32 25 0 2 -25 0.1 0.8 41 12 38 -2 1 -25 0.1 0.4 -29 -49 28 -47 2 -25 0.9 0.6 -37 -44 -10 -27 2 -25 0.7 0.4 37 -18 8 3 1 -25 0.7 0.3 45 43 24 18 1 -25 0.3 0.6 29 -3 -7 -14 2 -25 0.3 0.5 6 -35 24 -30 2 -25 0.3 0.7 -45 -47 18 -46 2 -25 0.5 0.4 7 -3 32 1 2 -25 0.9 0.5 26 14 -6 -8 1 -25 0.6 0.9 10 -47 48 46 2 -25 0.7 0.4 19 7 -13 -34 1 -25 0.9 0.8 8 -42 22 -14 2 -25 0.4 0.6 27 -11 -12 -42 1 -25 0.8 0.8 49 23 46 -35 1 -25 0.8 0.2 -23 -45 -4 -39 2 -25 0.4 0.9 25 -1 38 22 2 -25 0.3 0.9 21 -45 -2 -36 1 -25 0.1 0.4 24 -43 28 9 2 -25 0.1 0.7 12 -10 49 -33 2 -25 0.1 0.5 28 -2 35 -9 2 -25 0.7 0.3 8 -31 -8 -16 1 -25 0.3 0.5 50 11 44 -24 1 -25 0.1 0.6 49 17 -5 -26 1 -25 0.9 0.6 -5 -44 41 -47 2 -25 0.4 0.2 -20 -33 6 -34 2 -25 0.5 0.7 15 -47 24 2 2 -25 0.4 0.7 47 -7 28 -39 1 -25 0.5 0.9 -22 -29 28 -46 2 -25 0.9 0.7 24 13 15 -30 1 -25 0.2 0.2 12 -45 29 -37 2 -25 0.4 0.9 36 -11 34 -32 2 -25 0.7 0.7 -39 -42 -45 -46 2 -25 0.6 0.7 -20 -34 32 25 2 -25 0.5 0.9 30 -14 -10 -29 1 -25 0.7 0.8 -15 -17 45 -27 2 -25 0.8 0.6 15 -44 41 12 2 -25 0.4 0.7 36 15 20 -9 1 -25 0.4 0.6 30 -32 -11 -12 1 -25 0.7 0.5 29 -26 24 -33 2 -25 0.9 0.7 36 -6 38 -36 1 -25 0.5 0.4 14 -13 48 45 2 -25 0.1 0.3 -34 -39 41 22 2 -25 0.8 0.4 24 -16 7 -45 1 -25 0.2 0.5 48 37 32 -8 1 -25 0.8 0.9 36 33 19 -14 1 -25 0.4 0.2 -25 -31 20 11 2 -25 0.9 0.1 -16 -17 -15 -23 2 -25 0.3 0.6 -40 -47 40 -14 2 -25 0.5 0.7 13 2 37 -27 1 -25 0.4 0.3 11 -30 42 -47 1 -25 0.2 0.4 41 -5 29 7 2 -25 0.1 0.4 3 -1 -5 -48 1 -25 0.5 0.6 -33 -46 26 -38 2 -25 0.8 0.3 23 -38 10 -42 1 -25 0.3 0.3 37 4 41 -30 1 -25 0.3 0.1 13 2 37 4 2 -25 0.9 0.3 12 -15 4 -17 1 -25 0.3 0.9 45 -31 36 -18 2 -25 0.4 0.3 25 -29 -12 -21 1 -25 0.6 0.9 35 -20 -23 -33 1 -25 0.8 0.2 4 -23 18 -22 1 -25 0.4 0.4 23 -12 32 -4 1 -25 0.1 0.1 50 -47 21 8 2 -25 0.1 0.4 18 -4 29 -22 2 -25 0.4 0.3 39 -34 -3 -27 1 -25 0.8 0.1 -21 -48 23 -12 2 -25 0.2 0.4 44 38 8 -23 1 -25 0.4 0.3 -7 -8 33 -22 2 -25 0.4 0.3 24 -30 33 -46 2 -25 0.1 0.6 16 -32 33 -5 2 -25 0.4 0.7 -30 -43 11 -47 1 -25 0.5 0.8 29 -38 9 -19 2 -25 0.3 0.6 -25 -45 41 -13 2 -25 0.6 0.2 9 -18 18 -6 2 -25 0.5 0.1 37 -38 27 -22 1 -25 0.7 0.1 -10 -17 27 -15 1 -25 0.8 0.2 41 -45 29 19 1 -25 0.5 0.1 46 -42 19 -29 1 -25 0.4 0.5 29 5 -18 -39 1 -25 0.6 0.3 30 -24 1 -18 1 -25 0.8 0.2 -10 -22 6 -42 1 -25 0.2 0.4 -1 -46 -11 -19 2 -25 0.4 0.4 18 -30 9 -22 2 -25 0.5 0.6 26 11 44 -8 2 -25 0.9 0.6 -31 -43 12 -18 2 -25 0.1 0.2 44 -45 36 29 2 -25 0.1 0.7 22 7 11 4 2 -25 0.2 0.3 38 -33 39 -39 1 -25 0.8 0.8 43 -38 42 -19 2 -25 0.9 0.9 -1 -45 -26 -27 1 -25 0.5 0.4 30 19 31 -34 1 -25 0.6 0.1 48 -29 43 14 1 -25 0.2 0.5 46 3 25 5 2 -25 0.7 0.5 -1 -4 34 -26 2 -25 0.1 0.8 -12 -33 26 -18 2 -25 0.7 0.6 50 6 -6 -48 1 -25 0.2 0.9 9 -50 -10 -29 2 -25 0.2 0.9 34 -50 15 -3 2 -25 0.2 0.7 10 -46 19 -29 2 -25 0.4 0.8 12 -9 -6 -17 1 -25 0.5 0.4 -19 -38 -6 -50 1 -25 0.3 0.7 33 -21 -15 -17 1 -25 0.1 0.4 7 -2 36 -35 2 -25 0.3 0.8 -13 -33 25 5 2 -25 0.8 0.4 40 20 49 1 1 -25 0.7 0.6 -4 -26 34 -5 2 -25 0.9 0.2 47 43 14 2 1 -25 0.6 0.4 30 14 17 -22 1 -25 0.3 0.4 7 -25 24 -32 2 -25 0.1 0.8 29 24 40 -29 1 -25 0.6 0.8 -18 -38 -36 -46 2 -25 0.6 0.8 -27 -48 49 6 2 -25 0.1 0.7 8 -48 20 -15 2 -25 0.9 0.8 28 4 32 28 1 -25 0.2 0.7 18 2 37 -10 2 -25 0.9 0.2 36 26 38 -19 1 -25 0.4 0.9 -23 -24 15 -14 2 -25 0.2 0.2 -22 -33 -7 -22 2 -25 0.8 0.1 -3 -42 43 -20 2 -25 0.9 0.1 -19 -39 2 0 2 -25 0.7 0.5 8 -24 21 -34 1 -25 0.1 0.4 -12 -13 40 -35 2 -25 0.3 0.9 41 29 13 -14 1 -25 0.4 0.1 -43 -44 -22 -50 1 -25 0.8 0.6 46 44 0 -14 1 -25 0.2 0.3 41 -35 37 15 2 -25 0.5 0.8 36 -9 12 -39 1 -25 0.7 0.3 38 11 22 -11 1 -25 0.8 0.1 44 -49 32 4 1 -25 0.1 0.3 -26 -38 34 8 2 -25 0.3 0.7 7 -15 29 9 2 -25 0.8 0.3 26 -18 9 -43 1 -25 0.5 0.9 10 -46 18 -49 2 -25 0.9 0.7 -14 -36 26 -16 2 -25 0.9 0.1 44 -15 5 -33 2 -25 0.7 0.6 40 -15 40 30 2 -25 0.2 0.6 22 2 33 -12 1 -25 0.7 0.8 33 -28 30 15 2 -25 0.7 0.8 17 -12 -14 -44 1 -25 0.2 0.2 11 9 40 -1 1 -25 0.5 0.1 23 -7 49 -7 1 -25 0.6 0.3 49 0 37 -14 1 -25 0.6 0.8 43 -44 27 -16 2 -25 0.5 0.8 -28 -48 45 32 2 -25 0.8 0.4 50 -22 17 7 1 -25 0.9 0.5 24 -40 13 -10 1 -25 0.7 0.4 34 25 42 -24 1 -25 0.3 0.9 41 -10 -5 -37 1 -25 0.7 0.1 20 -22 -26 -49 1 -25 0.7 0.1 42 32 40 0 1 -25 0.8 0.8 -9 -10 21 15 2 -25 0.3 0.4 29 -49 32 11 2 -25 0.6 0.6 16 -11 45 11 2 -25 0.3 0.9 32 -37 -9 -31 1 -25 0.4 0.7 -9 -28 47 -15 2 -25 0.5 0.4 12 -38 5 -17 1 -25 0.2 0.6 -11 -39 17 -22 2 -25 0.8 0.6 -1 -7 48 26 2 -25 0.1 0.3 37 -15 44 -3 2 -25 0.1 0.5 -12 -47 39 1 2 -25 0.1 0.1 30 7 40 -29 1 -25 0.7 0.1 -12 -41 7 -19 2 -25 0.6 0.8 4 -5 7 -14 1 -25 0.5 0.4 32 -12 26 -48 2 -25 0.2 0.9 22 -9 -13 -36 1 -25 0.4 0.2 4 -10 47 -26 1 -25 0.4 0.4 34 -21 2 -24 1 -25 0.7 0.3 38 -33 0 -50 1 -25 0.3 0.3 34 -19 27 18 2 -25 0.6 0.6 44 -10 2 -50 1 -25 0.6 0.4 -34 -38 39 20 2 -25 0.7 0.6 28 -44 33 -23 1 -25 0.8 0.7 -3 -8 29 -38 2 -25 0.3 0.8 31 -29 33 6 2 -25 0.6 0.9 45 -34 -2 -4 1 -25 0.6 0.2 19 -23 21 7 2 -25 0.8 0.6 -14 -33 1 -39 2 -25 0.3 0.5 22 3 -3 -39 1 -25 0.9 0.9 -26 -45 34 29 2 -25 0.7 0.3 25 17 33 -47 1 -25 0.4 0.2 -13 -34 6 -45 1 -25 0.7 0.5 29 -16 -4 -35 1 -25 0.8 0.9 26 -39 36 -38 2 -25 0.9 0.2 12 -4 -28 -46 1 -25 0.4 0.1 16 -17 22 -44 1 -25 0.2 0.7 49 10 -1 -16 1 -25 0.6 0.5 -13 -43 12 -47 1 -26 0.7 0.7 28 -31 44 31 2 -26 0.8 0.4 40 -2 49 -21 1 -26 0.7 0.9 -40 -46 32 -50 2 -26 0.2 0.2 1 -47 30 -5 2 -26 0.7 0.2 25 -30 47 17 2 -26 0.4 0.2 50 39 -10 -18 1 -26 0.4 0.2 -31 -48 13 -50 2 -26 0.7 0.1 23 0 36 -24 1 -26 0.1 0.3 26 10 24 -32 1 -26 0.8 0.2 47 -8 44 -1 1 -26 0.2 0.4 43 3 23 12 2 -26 0.3 0.6 20 -18 22 -21 2 -26 0.9 0.5 -5 -38 33 -13 2 -26 0.9 0.8 -20 -40 48 12 2 -26 0.8 0.3 -2 -20 48 -27 2 -26 0.3 0.5 38 -29 -30 -43 1 -26 0.7 0.1 -15 -18 -8 -45 1 -26 0.8 0.6 10 -43 -30 -38 1 -26 0.4 0.5 22 -38 3 -26 1 -26 0.7 0.9 28 -9 -3 -21 1 -26 0.2 0.9 12 -31 21 -35 2 -26 0.5 0.4 38 -33 -2 -40 1 -26 0.9 0.1 16 -32 4 -50 1 -26 0.5 0.8 46 -12 41 -36 2 -26 0.1 0.2 21 -41 -10 -30 1 -26 0.9 0.9 29 -1 14 -36 1 -26 0.5 0.7 12 -3 -37 -39 1 -26 0.2 0.2 33 -29 50 27 2 -26 0.2 0.2 -12 -45 -26 -35 1 -26 0.7 0.1 9 -18 33 31 2 -26 0.7 0.8 24 -21 10 -27 1 -26 0.5 0.6 48 -41 26 -23 1 -26 0.2 0.8 35 -17 28 10 2 -26 0.7 0.7 -17 -28 -43 -49 1 -26 0.3 0.5 38 21 -20 -31 1 -26 0.9 0.1 -2 -23 -18 -35 1 -26 0.5 0.4 45 -23 24 -1 1 -26 0.5 0.1 -40 -43 8 2 2 -26 0.1 0.7 44 41 -7 -26 1 -26 0.4 0.7 30 -12 50 7 2 -26 0.7 0.6 18 -48 21 -19 1 -26 0.7 0.4 2 -40 -4 -32 1 -26 0.7 0.5 -27 -50 -38 -47 1 -26 0.5 0.1 45 -14 -1 -42 1 -26 0.4 0.5 7 -31 43 41 2 -26 0.5 0.1 -19 -26 32 13 2 -26 0.7 0.4 21 20 48 -42 1 -26 0.7 0.4 -20 -46 -14 -33 1 -26 0.3 0.7 -7 -15 2 -8 2 -26 0.1 0.7 49 -23 41 -16 2 -26 0.7 0.9 49 47 47 41 2 -26 0.7 0.9 48 -49 35 33 2 -26 0.8 0.9 30 -2 -20 -35 1 -26 0.7 0.1 38 29 -32 -47 1 -26 0.8 0.1 22 7 8 -15 1 -26 0.1 0.1 24 13 -15 -26 1 -26 0.1 0.2 -2 -44 37 34 2 -26 0.7 0.3 25 -17 14 -38 1 -26 0.7 0.3 41 28 12 11 1 -26 0.1 0.1 -36 -47 36 -17 2 -26 0.7 0.9 14 5 46 35 2 -26 0.6 0.2 36 32 29 1 1 -26 0.1 0.4 27 -44 22 -38 2 -26 0.8 0.7 47 6 39 -2 1 -26 0.2 0.4 49 -21 49 29 2 -26 0.7 0.7 4 -34 45 -47 2 -26 0.8 0.6 40 -12 -13 -19 1 -26 0.8 0.5 47 14 10 -43 1 -26 0.6 0.1 19 -39 32 -24 1 -26 0.1 0.5 -2 -18 -41 -46 1 -26 0.6 0.1 -14 -31 29 -20 2 -26 0.1 0.1 7 -10 31 -24 1 -26 0.1 0.8 40 39 12 -32 1 -26 0.7 0.4 43 -35 14 -33 1 -26 0.2 0.5 19 -31 42 9 2 -26 0.6 0.2 39 -4 41 -17 1 -26 0.2 0.2 1 -12 0 -42 1 -26 0.2 0.5 38 -15 -2 -21 1 -26 0.2 0.7 35 -27 32 2 2 -26 0.2 0.6 47 6 12 -23 1 -26 0.6 0.9 -12 -32 38 -6 2 -26 0.7 0.7 24 -29 5 4 2 -26 0.2 0.8 50 -44 25 -9 2 -26 0.5 0.7 -13 -22 23 -29 2 -26 0.6 0.3 3 -38 30 -5 2 -26 0.6 0.4 44 -44 -10 -18 1 -26 0.7 0.5 -36 -38 3 -23 2 -26 0.6 0.7 -9 -15 2 -32 2 -26 0.9 0.4 40 16 44 -32 1 -26 0.3 0.3 12 2 39 -29 1 -26 0.7 0.3 39 26 47 34 2 -26 0.4 0.4 5 -19 44 -6 2 -26 0.5 0.9 23 -35 -3 -22 1 -26 0.3 0.3 48 -15 10 -39 1 -26 0.6 0.2 9 -45 49 41 2 -26 0.9 0.6 40 -39 7 -8 1 -26 0.9 0.7 -8 -13 -41 -49 1 -26 0.6 0.6 29 -5 21 -21 1 -26 0.5 0.8 1 -45 48 37 2 -26 0.6 0.8 6 2 9 -2 2 -26 0.9 0.8 36 35 41 -13 1 -26 0.7 0.9 17 -5 -15 -43 1 -26 0.3 0.9 8 -27 10 5 2 -26 0.3 0.8 -22 -25 16 13 2 -26 0.8 0.3 -20 -44 -6 -11 2 -26 0.6 0.2 9 -9 46 -13 1 -26 0.7 0.2 -13 -42 30 10 2 -26 0.7 0.2 -11 -39 25 15 2 -26 0.5 0.7 -7 -9 41 25 2 -26 0.7 0.2 -5 -37 30 -31 2 -26 0.4 0.7 24 -12 -4 -41 1 -26 0.5 0.4 0 -31 -24 -26 1 -26 0.7 0.3 18 -20 7 -34 1 -26 0.9 0.1 -26 -40 22 12 2 -26 0.2 0.7 -22 -30 -16 -20 2 -26 0.7 0.3 39 -20 35 24 1 -26 0.2 0.1 36 -47 35 28 2 -26 0.4 0.6 18 -32 41 39 2 -26 0.7 0.7 50 42 24 -13 1 -26 0.4 0.6 -14 -34 18 -17 2 -26 0.3 0.1 47 -24 48 -18 1 -26 0.7 0.5 42 30 13 -11 1 -26 0.6 0.5 33 28 -40 -44 1 -26 0.4 0.4 -33 -40 16 -18 2 -26 0.8 0.8 49 22 -15 -49 1 -26 0.5 0.4 36 -19 -42 -45 1 -26 0.2 0.9 37 7 -10 -30 1 -26 0.6 0.9 -21 -46 26 -9 2 -26 0.1 0.9 -25 -41 25 -20 2 -26 0.5 0.3 -27 -44 27 -23 2 -26 0.3 0.7 45 25 0 -9 1 -26 0.6 0.7 10 -2 -15 -17 1 -26 0.1 0.8 -27 -50 -6 -18 2 -26 0.4 0.3 8 -30 -16 -28 1 -26 0.9 0.2 -33 -46 -16 -22 2 -26 0.8 0.9 -29 -35 24 -30 2 -26 0.5 0.3 -5 -47 -15 -27 1 -26 0.1 0.6 5 -31 -1 -7 2 -26 0.7 0.1 47 -24 22 -39 1 -26 0.6 0.4 21 19 39 -48 1 -26 0.1 0.6 26 11 0 -40 1 -26 0.4 0.5 20 -40 20 18 2 -26 0.9 0.7 -25 -46 23 -31 2 -26 0.9 0.9 47 -15 -23 -26 1 -26 0.3 0.6 -13 -45 50 -13 2 -26 0.2 0.1 -32 -33 -31 -32 1 -26 0.7 0.5 9 -26 50 24 2 -26 0.5 0.6 -20 -49 39 6 2 -26 0.6 0.1 22 -15 28 -21 1 -26 0.6 0.1 30 -43 30 -30 1 -26 0.2 0.9 -28 -44 3 -32 2 -26 0.7 0.6 12 -30 -11 -17 1 -26 0.4 0.9 21 -44 38 8 2 -26 0.2 0.7 4 -14 -19 -34 1 -26 0.2 0.3 9 -25 -41 -46 1 -26 0.7 0.5 41 -6 12 -25 1 -26 0.4 0.7 35 31 37 10 1 -26 0.4 0.6 19 12 -16 -43 1 -26 0.5 0.9 36 -14 45 24 2 -26 0.7 0.3 -38 -48 23 -48 2 -26 0.5 0.8 25 -37 22 -28 2 -26 0.4 0.1 -15 -24 8 -42 2 -26 0.1 0.1 35 1 16 -34 1 -26 0.4 0.2 42 13 -3 -5 1 -26 0.1 0.8 0 -10 32 14 2 -26 0.7 0.1 35 -7 -5 -23 1 -26 0.7 0.8 -21 -30 32 0 2 -26 0.7 0.8 11 2 17 -10 1 -26 0.8 0.7 -18 -20 9 2 2 -26 0.7 0.7 46 3 50 -17 1 -26 0.2 0.8 5 -27 50 -26 2 -26 0.3 0.6 41 -4 -7 -50 1 -26 0.8 0.6 30 -3 11 -50 1 -26 0.9 0.8 35 -21 6 -33 1 -26 0.2 0.1 -26 -49 45 -10 2 -26 0.4 0.3 8 -20 -4 -42 1 -26 0.8 0.7 24 0 41 -25 2 -26 0.1 0.6 26 -49 11 -25 2 -26 0.6 0.4 42 -37 -7 -35 1 -26 0.1 0.2 48 30 -26 -40 1 -26 0.5 0.3 36 -46 27 -34 1 -26 0.2 0.1 47 26 18 -10 1 -26 0.7 0.4 41 -50 25 -48 1 -26 0.8 0.5 -8 -25 23 -46 2 -26 0.8 0.7 17 -49 17 -32 2 -26 0.6 0.2 -18 -49 -33 -50 1 -26 0.8 0.9 33 -44 8 -9 1 -26 0.4 0.4 30 -39 -31 -35 1 -26 0.2 0.2 -1 -48 14 -45 2 -26 0.4 0.8 -22 -34 49 -10 2 -26 0.8 0.6 14 -17 5 3 1 -26 0.2 0.5 13 -21 45 -6 2 -26 0.3 0.7 41 -37 50 -16 2 -26 0.8 0.2 6 4 28 -25 1 -26 0.4 0.7 -34 -35 -8 -20 2 -26 0.6 0.8 49 40 40 -8 1 -26 0.8 0.7 19 -33 41 1 2 -26 0.5 0.3 45 27 25 -1 1 -26 0.8 0.3 34 21 29 14 2 -26 0.5 0.9 42 -12 11 -43 2 -26 0.9 0.6 4 -45 1 -3 2 -26 0.2 0.1 43 32 29 -14 1 -26 0.9 0.6 -7 -24 15 -39 2 -26 0.3 0.7 45 -22 -28 -31 1 -26 0.3 0.7 37 -20 17 3 2 -26 0.6 0.3 4 -45 -9 -24 1 -26 0.6 0.7 -4 -30 47 34 2 -26 0.6 0.3 -38 -49 44 27 2 -26 0.7 0.9 4 -19 25 -9 2 -26 0.5 0.2 -16 -37 25 -2 2 -26 0.8 0.3 33 -34 8 -26 1 -26 0.9 0.1 20 -9 21 -30 1 -26 0.6 0.6 30 -8 21 1 1 -26 0.1 0.6 45 -7 36 -50 2 -26 0.4 0.9 37 -32 11 -20 1 -26 0.4 0.6 -17 -18 -18 -45 1 -26 0.1 0.3 16 -26 41 -15 2 -26 0.8 0.8 -1 -48 30 26 2 -26 0.5 0.7 31 -43 11 -22 1 -26 0.9 0.2 46 45 -24 -26 1 -26 0.2 0.1 5 -44 23 -43 1 -26 0.1 0.9 -16 -47 8 -48 2 -26 0.7 0.2 38 29 -10 -21 1 -26 0.9 0.5 13 -47 25 17 2 -26 0.2 0.1 42 27 25 -26 1 -26 0.6 0.1 45 11 19 -7 1 -26 0.7 0.4 2 -39 -34 -37 1 -26 0.6 0.8 16 -18 4 -50 1 -26 0.3 0.5 18 -40 22 -14 2 -26 0.8 0.6 16 -19 29 7 2 -26 0.6 0.9 46 -5 31 -40 1 -26 0.2 0.8 16 2 37 13 2 -26 0.6 0.1 -1 -4 16 9 1 -26 0.9 0.2 35 1 47 -6 1 -26 0.3 0.1 -6 -42 0 -6 2 -26 0.4 0.1 14 -17 5 -43 1 -26 0.6 0.1 -20 -43 2 -47 1 -26 0.6 0.2 -29 -36 42 25 2 -26 0.8 0.7 17 1 25 10 2 -26 0.8 0.8 -33 -43 40 -45 2 -26 0.4 0.9 39 36 16 1 1 -26 0.5 0.8 -1 -46 36 22 2 -26 0.3 0.6 -12 -21 29 -41 2 -26 0.9 0.9 40 -42 -28 -41 1 -26 0.9 0.9 25 -41 31 -14 1 -26 0.8 0.3 1 -29 33 -33 1 -26 0.5 0.1 -4 -14 15 -29 2 -26 0.8 0.9 5 -24 0 -2 1 -26 0.2 0.1 -23 -46 36 25 2 -26 0.3 0.1 33 -8 26 -43 1 -26 0.7 0.6 50 27 -14 -46 1 -26 0.8 0.9 24 -38 -8 -18 1 -26 0.8 0.1 42 -43 3 -18 1 -26 0.3 0.5 26 16 7 -20 1 -26 0.7 0.2 35 32 14 -18 1 -26 0.8 0.9 -18 -48 12 10 2 -26 0.4 0.8 -11 -49 33 22 2 -26 0.3 0.5 -18 -31 -32 -49 1 -26 0.8 0.1 37 22 39 -48 1 -26 0.3 0.9 39 -14 12 2 2 -26 0.9 0.1 50 21 42 0 1 -26 0.1 0.5 42 -40 -3 -8 2 -26 0.1 0.7 41 -29 46 -14 2 -26 0.7 0.2 48 22 41 -26 1 -26 0.2 0.1 -37 -50 42 -28 2 -26 0.2 0.3 24 -32 7 -17 2 -26 0.3 0.9 35 25 20 19 2 -26 0.6 0.7 -23 -46 -38 -45 1 -26 0.7 0.9 16 8 -2 -11 1 -26 0.3 0.3 50 10 47 -5 1 -26 0.5 0.5 26 18 38 -34 1 -26 0.4 0.7 1 -11 41 -36 2 -26 0.4 0.2 -2 -11 49 16 2 -26 0.2 0.3 42 -25 50 -35 1 -26 0.2 0.4 8 -49 0 -26 2 -26 0.5 0.8 39 -29 -4 -30 1 -26 0.7 0.6 32 19 -39 -50 1 -26 0.3 0.8 10 -45 36 -13 2 -26 0.6 0.8 21 12 15 -2 1 -26 0.3 0.4 -15 -39 12 -16 2 -26 0.3 0.7 43 -18 -10 -41 1 -26 0.5 0.4 16 2 11 7 1 -26 0.4 0.2 16 -32 34 -12 2 -26 0.1 0.2 46 -15 -11 -40 1 -26 0.5 0.4 1 -28 21 -39 2 -26 0.1 0.9 6 -38 28 22 2 -26 0.5 0.3 43 9 1 -46 1 -26 0.5 0.4 0 -23 35 22 2 -26 0.3 0.2 -2 -9 27 -1 2 -26 0.9 0.9 -3 -44 25 -15 2 -26 0.1 0.8 -30 -47 -1 -13 2 -26 0.8 0.8 -13 -25 49 -30 2 -26 0.1 0.6 12 -14 46 7 2 -26 0.6 0.6 -35 -39 23 -4 2 -26 0.9 0.4 21 -4 -19 -21 1 -26 0.3 0.6 18 -11 -25 -36 1 -26 0.7 0.7 10 -50 7 -34 2 -26 0.1 0.9 17 -36 48 -18 2 -26 0.9 0.6 11 -18 -15 -34 1 -26 0.5 0.3 -26 -42 -10 -36 1 -27 0.1 0.6 -6 -24 -17 -23 2 -27 0.9 0.1 50 8 34 -19 1 -27 0.7 0.5 44 2 -36 -39 1 -27 0.9 0.4 19 -28 -35 -48 1 -27 0.5 0.8 50 -39 -27 -31 1 -27 0.2 0.9 -11 -23 12 9 2 -27 0.2 0.2 31 4 1 -43 1 -27 0.5 0.9 39 -17 18 -3 1 -27 0.5 0.1 45 -40 19 -25 1 -27 0.4 0.2 -19 -24 50 -12 2 -27 0.5 0.6 42 6 35 -5 1 -27 0.2 0.4 26 19 34 15 2 -27 0.7 0.4 44 43 4 -20 1 -27 0.8 0.6 48 -46 41 -18 1 -27 0.1 0.4 36 -22 21 -29 2 -27 0.1 0.5 -38 -48 45 -8 2 -27 0.7 0.4 5 -25 33 -6 2 -27 0.6 0.7 43 39 12 -28 1 -27 0.4 0.7 23 13 36 -47 1 -27 0.4 0.8 50 -35 22 -47 2 -27 0.3 0.3 -31 -45 42 8 2 -27 0.7 0.8 18 -28 8 -10 2 -27 0.9 0.6 16 -3 10 4 1 -27 0.4 0.4 40 -39 49 -15 2 -27 0.8 0.3 -20 -22 -13 -23 2 -27 0.2 0.8 3 -4 21 -34 2 -27 0.4 0.6 -33 -46 35 -19 2 -27 0.9 0.6 -40 -43 46 39 2 -27 0.7 0.4 26 -47 35 -40 1 -27 0.2 0.1 0 -27 5 -25 2 -27 0.1 0.8 50 -39 -8 -17 2 -27 0.6 0.7 18 1 -23 -28 1 -27 0.4 0.2 -12 -25 50 -21 2 -27 0.4 0.6 31 0 49 41 2 -27 0.5 0.2 41 -34 41 19 2 -27 0.7 0.6 47 17 43 34 2 -27 0.2 0.5 6 -49 6 -42 2 -27 0.1 0.8 -12 -19 31 -30 2 -27 0.3 0.4 46 -28 -18 -29 1 -27 0.7 0.8 10 -29 32 -38 2 -27 0.5 0.1 41 -34 21 5 2 -27 0.4 0.6 46 -22 46 19 2 -27 0.2 0.1 47 -8 46 -19 1 -27 0.2 0.8 -22 -48 41 39 2 -27 0.7 0.6 46 21 29 -21 1 -27 0.6 0.8 38 -1 49 43 2 -27 0.5 0.5 -9 -25 26 6 2 -27 0.6 0.1 -31 -50 27 20 2 -27 0.8 0.1 43 32 36 34 2 -27 0.5 0.8 24 -48 34 14 2 -27 0.7 0.4 49 36 37 -27 1 -27 0.2 0.4 -14 -38 -30 -44 1 -27 0.9 0.8 27 -11 17 -35 1 -27 0.1 0.9 -39 -47 -1 -50 2 -27 0.6 0.7 -20 -49 45 16 2 -27 0.3 0.6 9 2 -10 -35 1 -27 0.4 0.9 16 11 25 -16 2 -27 0.5 0.9 18 -34 -21 -24 1 -27 0.7 0.6 26 -49 42 -36 2 -27 0.8 0.2 41 -14 29 -50 1 -27 0.2 0.4 43 -2 42 21 2 -27 0.1 0.8 48 29 35 -14 1 -27 0.1 0.6 22 20 30 -18 1 -27 0.6 0.9 26 16 26 -33 1 -27 0.1 0.5 6 -7 48 -49 2 -27 0.6 0.1 3 -28 48 -2 2 -27 0.2 0.5 -21 -39 12 11 2 -27 0.5 0.3 49 -37 48 -41 1 -27 0.6 0.1 41 -2 5 -38 1 -27 0.8 0.4 25 11 29 -15 1 -27 0.3 0.1 36 -38 22 -32 1 -27 0.4 0.2 2 -28 -12 -34 1 -27 0.1 0.1 20 -32 23 -27 2 -27 0.8 0.9 13 7 15 -37 1 -27 0.2 0.1 -34 -38 2 -43 2 -27 0.8 0.4 -6 -12 32 -34 2 -27 0.2 0.8 6 -44 3 -47 2 -27 0.4 0.6 24 16 1 -23 1 -27 0.7 0.9 50 -50 46 39 2 -27 0.3 0.1 25 -32 49 -21 1 -27 0.5 0.6 7 3 28 -11 2 -27 0.1 0.7 26 -7 0 -13 1 -27 0.4 0.5 43 -11 -20 -48 1 -27 0.1 0.1 50 -22 16 -37 1 -27 0.3 0.8 31 -15 -22 -33 1 -27 0.4 0.2 31 14 29 -32 1 -27 0.9 0.1 48 -4 42 17 1 -27 0.4 0.4 15 -22 18 16 2 -27 0.5 0.6 23 -25 7 -44 1 -27 0.1 0.9 32 24 -23 -45 1 -27 0.3 0.2 10 4 -33 -34 1 -27 0.4 0.1 -10 -31 16 -42 1 -27 0.2 0.4 -4 -37 47 6 2 -27 0.9 0.8 18 16 45 -41 2 -27 0.2 0.6 1 -45 -8 -24 2 -27 0.4 0.7 29 -16 -9 -38 1 -27 0.6 0.8 31 -2 -42 -46 1 -27 0.9 0.4 -26 -50 42 1 2 -27 0.4 0.9 18 14 16 -44 1 -27 0.3 0.8 -7 -33 49 -36 2 -27 0.6 0.9 49 -33 37 30 2 -27 0.5 0.1 42 -36 8 -37 1 -27 0.2 0.9 -2 -21 23 -48 2 -27 0.5 0.1 14 -43 -3 -36 1 -27 0.6 0.2 27 -36 30 17 2 -27 0.7 0.9 1 -39 40 2 2 -27 0.4 0.8 -3 -43 -20 -35 2 -27 0.7 0.3 28 0 -25 -30 1 -27 0.4 0.2 49 -42 44 -21 1 -27 0.7 0.4 31 -36 44 26 2 -27 0.5 0.1 -20 -33 5 -29 2 -27 0.4 0.8 38 -21 30 29 2 -27 0.4 0.7 47 -46 32 -33 2 -27 0.1 0.1 26 13 11 -28 1 -27 0.2 0.5 26 -26 22 -33 2 -27 0.7 0.5 -5 -10 44 33 2 -27 0.7 0.7 47 -21 10 2 1 -27 0.3 0.7 -37 -49 18 -24 2 -27 0.5 0.3 39 28 -9 -23 1 -27 0.6 0.4 40 15 50 -33 1 -27 0.3 0.4 12 -32 43 25 2 -27 0.5 0.2 11 -20 5 -23 1 -27 0.2 0.1 40 5 -11 -20 1 -27 0.5 0.9 40 -7 27 11 2 -27 0.6 0.3 -17 -40 -12 -20 2 -27 0.1 0.1 42 -14 -18 -35 1 -27 0.7 0.2 48 33 -12 -41 1 -27 0.7 0.1 11 -8 -1 -50 1 -27 0.4 0.5 45 14 12 7 1 -27 0.4 0.5 0 -23 14 -38 2 -27 0.6 0.7 46 30 32 -48 1 -27 0.4 0.3 4 -31 26 -29 2 -27 0.3 0.1 8 -13 39 -13 2 -27 0.1 0.3 3 -35 -5 -22 2 -27 0.7 0.3 22 -6 18 -49 1 -27 0.2 0.7 16 -36 15 -40 2 -27 0.1 0.5 7 -48 13 -33 2 -27 0.3 0.9 -7 -37 27 -18 2 -27 0.3 0.7 -7 -32 -36 -48 1 -27 0.9 0.4 35 7 -27 -45 1 -27 0.5 0.9 23 -22 16 1 2 -27 0.3 0.1 36 -1 48 44 2 -27 0.1 0.4 43 6 -14 -36 1 -27 0.5 0.8 -4 -22 47 2 2 -27 0.5 0.7 21 -33 10 -35 1 -27 0.3 0.2 -8 -40 -19 -50 1 -27 0.4 0.6 47 -7 -19 -44 1 -27 0.5 0.1 -20 -45 15 -9 2 -27 0.7 0.5 47 -17 -5 -39 1 -27 0.8 0.1 9 -29 40 -3 2 -27 0.6 0.8 11 -42 -35 -46 1 -27 0.5 0.5 0 -15 -24 -44 1 -27 0.5 0.7 -2 -45 48 -38 2 -27 0.4 0.8 -10 -29 39 -23 2 -27 0.5 0.3 43 -15 -11 -31 1 -27 0.7 0.9 41 -24 47 -12 2 -27 0.9 0.9 -14 -46 16 -10 2 -27 0.4 0.1 -2 -14 15 -1 2 -27 0.1 0.9 3 -11 -15 -31 1 -27 0.2 0.3 8 1 18 -30 1 -27 0.8 0.4 31 16 14 -50 1 -27 0.8 0.7 40 -27 2 -9 1 -27 0.7 0.1 -21 -40 -1 -30 1 -27 0.3 0.5 50 14 33 -25 1 -27 0.9 0.6 25 20 -2 -42 1 -27 0.9 0.1 6 -49 49 41 2 -27 0.9 0.5 47 31 -37 -38 1 -27 0.4 0.8 45 -12 -23 -25 1 -27 0.8 0.9 45 -36 33 -29 2 -27 0.1 0.5 20 -44 48 -24 2 -27 0.9 0.7 35 -1 -6 -7 1 -27 0.2 0.6 -13 -42 16 -12 2 -27 0.1 0.7 35 22 12 0 1 -27 0.4 0.2 -29 -41 23 -13 2 -27 0.9 0.3 6 -42 46 7 2 -27 0.3 0.6 11 -5 29 -47 1 -27 0.3 0.9 16 -49 -19 -42 1 -27 0.6 0.2 39 21 33 -32 1 -27 0.8 0.6 -21 -37 30 -42 2 -27 0.6 0.9 12 11 9 -24 1 -27 0.7 0.6 -13 -49 5 -2 2 -27 0.9 0.5 -1 -49 1 -34 1 -27 0.9 0.5 29 -19 42 32 2 -27 0.3 0.3 0 -1 -7 -46 1 -27 0.5 0.7 29 -50 34 -3 2 -27 0.3 0.8 38 22 36 -32 1 -27 0.7 0.8 19 -24 32 26 2 -27 0.1 0.5 -42 -49 12 -19 2 -27 0.4 0.1 43 -3 -27 -32 1 -27 0.8 0.2 50 -6 3 -23 1 -27 0.6 0.8 24 -11 43 20 2 -27 0.8 0.9 33 -15 -6 -12 1 -27 0.6 0.8 42 -39 47 -24 2 -27 0.3 0.4 32 7 7 -18 1 -27 0.1 0.4 -7 -36 30 -14 2 -27 0.3 0.9 -3 -36 12 -38 2 -27 0.9 0.1 -1 -11 31 18 2 -27 0.1 0.9 11 -37 -9 -44 2 -27 0.1 0.8 22 20 24 -14 1 -27 0.5 0.2 -29 -30 17 -5 2 -27 0.5 0.5 -1 -16 37 17 2 -27 0.3 0.1 4 -31 39 32 2 -27 0.1 0.7 15 -49 36 -6 2 -27 0.9 0.4 43 -47 17 -11 1 -27 0.5 0.3 39 -5 41 -31 1 -27 0.2 0.4 16 14 14 -16 1 -27 0.1 0.4 31 20 23 -45 1 -27 0.7 0.8 0 -8 29 -38 2 -27 0.9 0.7 -5 -49 -10 -44 1 -27 0.4 0.9 38 -35 28 2 2 -27 0.7 0.7 1 -15 5 -47 1 -27 0.1 0.3 33 -13 17 14 2 -27 0.7 0.7 49 -14 18 10 1 -27 0.4 0.3 -23 -32 10 -47 2 -27 0.5 0.7 30 -29 14 -13 2 -27 0.1 0.5 -27 -43 6 -24 2 -27 0.8 0.5 47 34 7 5 1 -27 0.5 0.1 8 -32 -14 -28 1 -27 0.6 0.1 45 -28 30 -46 1 -27 0.5 0.5 46 -10 20 -17 1 -27 0.5 0.7 45 19 46 22 2 -27 0.1 0.2 26 21 34 -29 1 -27 0.2 0.3 -6 -20 3 -48 1 -27 0.8 0.2 39 -35 22 -49 1 -27 0.1 0.4 44 -25 7 -7 2 -27 0.4 0.9 25 6 9 -46 1 -27 0.8 0.4 -11 -13 36 -45 2 -27 0.1 0.7 -15 -19 21 20 2 -27 0.8 0.5 45 -34 7 -20 1 -27 0.1 0.9 -17 -43 21 -19 2 -27 0.2 0.4 -15 -41 46 32 2 -27 0.1 0.4 49 -37 -19 -46 1 -27 0.8 0.2 17 6 -10 -31 1 -27 0.6 0.8 40 -45 26 -49 2 -27 0.2 0.9 16 4 -11 -46 1 -27 0.7 0.1 -15 -47 19 -49 1 -27 0.2 0.7 35 -29 31 0 2 -27 0.7 0.6 3 -3 47 -14 2 -27 0.6 0.3 -4 -15 -40 -50 1 -27 0.1 0.7 48 14 35 -22 1 -27 0.3 0.8 22 -32 33 18 2 -27 0.2 0.5 29 -36 37 -45 2 -27 0.7 0.3 -25 -42 34 -39 2 -27 0.8 0.4 -20 -49 45 -15 2 -27 0.6 0.1 5 -27 38 -26 1 -27 0.2 0.6 30 26 38 -3 1 -27 0.9 0.8 18 9 -28 -40 1 -27 0.5 0.6 27 -25 12 11 2 -27 0.6 0.3 14 -42 46 -18 2 -27 0.2 0.2 25 13 30 -20 1 -27 0.8 0.2 -4 -25 -7 -47 1 -27 0.3 0.9 50 6 23 20 2 -27 0.3 0.2 7 -9 43 -28 1 -27 0.6 0.9 37 3 -18 -43 1 -27 0.6 0.1 42 -31 -17 -22 1 -27 0.6 0.2 46 20 21 -12 1 -27 0.6 0.6 38 -35 21 -25 1 -27 0.9 0.7 6 -9 43 27 2 -27 0.2 0.8 46 9 34 -41 2 -27 0.8 0.4 18 -42 19 -44 1 -27 0.7 0.9 13 -10 -4 -39 1 -27 0.9 0.7 28 4 31 -36 1 -27 0.4 0.1 27 -36 49 13 2 -27 0.1 0.2 18 8 -35 -45 1 -27 0.2 0.6 30 2 43 0 2 -27 0.5 0.3 -21 -50 -8 -40 2 -27 0.8 0.6 44 3 -36 -42 1 -27 0.2 0.6 2 -8 22 -47 2 -27 0.6 0.5 32 -44 4 -10 1 -27 0.7 0.2 17 -39 3 0 2 -27 0.7 0.3 11 -37 1 -31 1 -27 0.3 0.9 44 -6 39 14 2 -27 0.9 0.5 35 24 41 -4 1 -27 0.9 0.5 32 -41 35 -10 1 -27 0.6 0.1 31 10 28 -36 1 -27 0.2 0.5 -15 -43 -26 -29 2 -27 0.1 0.6 17 4 -30 -42 1 -27 0.1 0.2 -19 -45 -29 -30 2 -27 0.3 0.2 -31 -32 31 9 2 -27 0.2 0.2 5 4 6 -27 1 -27 0.8 0.5 -17 -21 50 -49 2 -27 0.4 0.7 24 -32 9 -41 2 -27 0.9 0.2 28 -10 20 -36 1 -27 0.9 0.7 -4 -10 37 29 2 -27 0.6 0.4 5 -30 -30 -32 1 -27 0.8 0.8 -7 -32 47 28 2 -27 0.2 0.7 23 4 47 -17 2 -27 0.2 0.7 -2 -33 7 -37 2 -27 0.4 0.7 43 -1 50 -13 2 -27 0.8 0.5 21 -8 0 -49 1 -27 0.6 0.6 11 -10 38 33 2 -27 0.1 0.9 18 10 5 -14 1 -27 0.5 0.2 33 5 40 -5 1 -27 0.8 0.7 21 -38 -24 -27 1 -27 0.9 0.5 33 -21 -21 -43 1 -27 0.6 0.1 39 -9 -28 -47 1 -27 0.3 0.7 28 -26 -26 -41 1 -27 0.9 0.8 -6 -9 37 -12 2 -27 0.6 0.5 36 -49 40 34 2 -27 0.5 0.8 -1 -2 -34 -46 1 -28 0.9 0.3 21 -45 41 32 2 -28 0.8 0.8 15 -46 50 -23 2 -28 0.2 0.6 25 -15 18 15 2 -28 0.3 0.3 -24 -42 16 -2 2 -28 0.5 0.9 31 -40 24 -3 2 -28 0.3 0.1 30 28 35 -37 1 -28 0.4 0.6 5 -34 48 -10 2 -28 0.1 0.3 -24 -34 27 11 2 -28 0.6 0.1 33 -32 17 -25 1 -28 0.9 0.8 41 -27 27 -45 1 -28 0.1 0.2 -29 -41 25 14 2 -28 0.1 0.3 34 -47 28 -15 2 -28 0.4 0.1 19 -39 -14 -34 1 -28 0.8 0.3 38 -9 45 -8 1 -28 0.4 0.6 30 -2 26 -49 1 -28 0.4 0.3 34 31 32 17 2 -28 0.2 0.4 47 -5 50 -25 2 -28 0.4 0.5 38 -34 42 -23 2 -28 0.1 0.4 26 -9 11 -31 1 -28 0.2 0.7 -17 -34 47 2 2 -28 0.4 0.4 -22 -26 33 -5 2 -28 0.2 0.8 25 -26 31 -30 2 -28 0.2 0.3 41 21 29 -47 1 -28 0.2 0.1 26 -32 7 1 2 -28 0.2 0.9 47 -6 11 -18 2 -28 0.1 0.1 39 10 4 -46 1 -28 0.2 0.6 -11 -38 -2 -48 2 -28 0.4 0.3 15 -2 34 12 2 -28 0.8 0.2 42 -27 33 29 2 -28 0.9 0.7 7 -27 50 10 2 -28 0.1 0.1 -26 -31 2 -38 2 -28 0.5 0.8 20 1 4 -29 1 -28 0.1 0.4 47 -18 16 -46 2 -28 0.9 0.8 13 -40 1 -34 1 -28 0.8 0.2 29 -15 -24 -39 1 -28 0.1 0.7 29 -35 29 19 2 -28 0.5 0.4 41 -39 34 -10 1 -28 0.1 0.5 44 -37 14 0 2 -28 0.7 0.6 38 25 29 -19 1 -28 0.3 0.1 -15 -38 46 9 2 -28 0.6 0.1 5 -29 44 -35 1 -28 0.3 0.5 46 -31 1 -24 1 -28 0.8 0.1 -42 -49 42 21 2 -28 0.9 0.8 7 -15 2 -32 1 -28 0.2 0.8 35 -35 7 -32 2 -28 0.3 0.9 7 -7 38 23 2 -28 0.1 0.2 -13 -14 33 -37 1 -28 0.6 0.7 23 -49 17 -48 1 -28 0.6 0.6 38 -4 48 -10 2 -28 0.5 0.3 16 -23 0 -26 1 -28 0.1 0.1 35 20 41 -2 1 -28 0.7 0.4 -31 -44 21 16 2 -28 0.1 0.7 25 -4 21 5 2 -28 0.6 0.3 1 -40 -34 -49 1 -28 0.5 0.1 21 13 0 -10 1 -28 0.6 0.1 -15 -20 38 36 2 -28 0.2 0.8 45 -23 22 -28 2 -28 0.6 0.8 48 -31 21 -28 1 -28 0.8 0.3 -29 -32 31 -26 2 -28 0.5 0.7 44 1 43 -36 1 -28 0.3 0.9 -19 -50 45 9 2 -28 0.5 0.9 3 -28 48 37 2 -28 0.5 0.9 16 -8 24 -29 2 -28 0.8 0.6 43 1 -28 -41 1 -28 0.1 0.1 8 -26 16 -42 1 -28 0.3 0.5 -2 -18 50 -17 2 -28 0.7 0.1 -20 -38 18 -13 2 -28 0.8 0.2 16 0 17 -36 1 -28 0.3 0.1 22 13 32 12 1 -28 0.1 0.9 47 30 9 -3 1 -28 0.6 0.6 0 -19 23 -18 2 -28 0.8 0.6 43 -26 23 11 1 -28 0.2 0.5 13 -27 47 -36 2 -28 0.1 0.4 5 -33 29 -41 2 -28 0.6 0.4 -7 -42 32 -49 2 -28 0.2 0.5 12 -5 4 -7 1 -28 0.1 0.8 -11 -28 -14 -47 2 -28 0.9 0.8 41 23 20 9 1 -28 0.4 0.6 32 19 35 -41 1 -28 0.5 0.3 26 -40 -15 -16 1 -28 0.7 0.6 32 12 6 -3 1 -28 0.8 0.4 -22 -41 -16 -43 2 -28 0.8 0.8 39 1 4 0 1 -28 0.2 0.7 17 -45 37 -12 2 -28 0.8 0.4 35 -18 42 3 2 -28 0.7 0.2 7 -7 14 -3 1 -28 0.3 0.2 40 19 31 -40 1 -28 0.5 0.4 45 21 21 20 1 -28 0.6 0.6 22 21 3 -38 1 -28 0.7 0.7 34 -17 20 9 1 -28 0.2 0.6 -15 -37 37 -12 2 -28 0.9 0.5 38 -9 50 4 1 -28 0.7 0.1 16 -19 5 -10 1 -28 0.8 0.2 -21 -39 7 -38 1 -28 0.6 0.8 -26 -33 47 8 2 -28 0.2 0.7 20 -38 -7 -22 2 -28 0.6 0.5 43 33 28 17 1 -28 0.6 0.2 40 -2 37 -47 1 -28 0.8 0.8 25 -5 -12 -22 1 -28 0.5 0.2 22 9 -29 -43 1 -28 0.9 0.7 49 23 17 -35 1 -28 0.7 0.2 45 38 29 -39 1 -28 0.1 0.2 20 -9 35 33 2 -28 0.7 0.2 42 -41 38 -14 1 -28 0.6 0.9 26 -37 17 -36 2 -28 0.6 0.2 38 22 35 -7 1 -28 0.5 0.3 19 8 22 6 1 -28 0.4 0.7 -16 -39 16 4 2 -28 0.6 0.7 48 1 19 2 1 -28 0.1 0.5 47 -30 46 3 2 -28 0.1 0.1 5 -23 16 -47 1 -28 0.3 0.8 46 -50 47 -11 2 -28 0.1 0.7 -16 -46 -35 -47 1 -28 0.6 0.4 -2 -12 4 -39 1 -28 0.6 0.8 -5 -10 43 -19 2 -28 0.3 0.8 43 36 48 26 1 -28 0.1 0.8 21 -3 48 38 2 -28 0.8 0.8 2 -28 38 37 2 -28 0.9 0.8 -35 -47 -3 -50 2 -28 0.5 0.7 44 18 26 -8 1 -28 0.1 0.2 41 -36 1 -20 2 -28 0.3 0.1 15 -1 34 11 2 -28 0.3 0.8 44 -22 42 21 2 -28 0.4 0.4 -30 -46 46 27 2 -28 0.3 0.2 -15 -44 31 0 2 -28 0.8 0.9 -12 -35 48 -26 2 -28 0.3 0.6 27 -8 -26 -44 1 -28 0.4 0.1 -31 -49 22 -44 2 -28 0.3 0.1 -10 -18 35 3 2 -28 0.6 0.8 10 -20 -17 -36 1 -28 0.1 0.5 -17 -46 12 -32 2 -28 0.6 0.9 5 -38 47 29 2 -28 0.5 0.2 24 -30 36 16 2 -28 0.8 0.9 -12 -46 1 -9 2 -28 0.1 0.2 6 -19 -11 -17 2 -28 0.7 0.1 -12 -46 49 -48 1 -28 0.1 0.6 11 9 20 19 2 -28 0.3 0.1 16 6 32 -13 1 -28 0.7 0.1 4 -12 40 33 2 -28 0.2 0.9 49 33 37 -34 2 -28 0.2 0.4 25 -29 27 -17 2 -28 0.7 0.8 42 -24 23 -48 1 -28 0.1 0.3 44 -48 1 -46 1 -28 0.3 0.8 10 7 -5 -26 1 -28 0.6 0.7 7 -13 46 9 2 -28 0.5 0.6 36 8 20 -12 1 -28 0.9 0.7 37 -42 36 -4 1 -28 0.6 0.9 33 9 -18 -47 1 -28 0.9 0.8 41 28 47 -44 1 -28 0.1 0.7 -5 -8 -14 -47 1 -28 0.5 0.4 11 3 18 -9 1 -28 0.3 0.2 49 19 47 45 2 -28 0.6 0.7 49 34 13 -33 1 -28 0.3 0.8 -37 -44 48 -45 2 -28 0.6 0.9 -23 -33 12 -33 2 -28 0.2 0.4 9 -4 -15 -24 1 -28 0.2 0.1 48 10 50 22 2 -28 0.2 0.9 36 18 47 -8 2 -28 0.2 0.7 29 -40 -29 -47 2 -28 0.2 0.6 38 36 29 -2 1 -28 0.6 0.8 9 -46 16 -38 2 -28 0.1 0.6 39 6 -3 -11 1 -28 0.9 0.4 7 -35 35 -47 1 -28 0.8 0.5 41 27 29 -43 1 -28 0.6 0.7 32 -16 47 -24 2 -28 0.5 0.8 11 -31 6 -46 2 -28 0.3 0.9 34 3 32 27 2 -28 0.1 0.7 -3 -37 -36 -43 2 -28 0.4 0.3 4 -27 -11 -34 1 -28 0.5 0.7 33 -21 12 -20 1 -28 0.2 0.6 9 -44 35 -8 2 -28 0.8 0.1 44 -17 -45 -47 1 -28 0.6 0.6 9 -46 3 -11 1 -28 0.1 0.2 15 -18 17 -5 2 -28 0.1 0.9 35 -46 32 8 2 -28 0.4 0.9 23 22 31 2 2 -28 0.4 0.7 34 20 -16 -29 1 -28 0.7 0.9 40 -14 -28 -41 1 -28 0.3 0.7 31 -4 39 24 2 -28 0.7 0.6 50 37 19 -23 1 -28 0.1 0.1 33 -43 45 -36 1 -28 0.5 0.8 -14 -30 8 -9 2 -28 0.3 0.2 3 -9 43 -28 1 -28 0.7 0.6 33 -16 36 -12 2 -28 0.1 0.7 -4 -35 1 -7 2 -28 0.4 0.5 -5 -48 -1 -2 2 -28 0.1 0.8 31 -8 42 -44 2 -28 0.9 0.6 44 -48 26 -10 1 -28 0.4 0.2 39 -9 47 46 2 -28 0.6 0.4 35 4 -3 -38 1 -28 0.1 0.1 -32 -42 -8 -44 2 -28 0.9 0.9 -2 -8 26 -9 2 -28 0.8 0.2 -43 -47 40 -6 2 -28 0.8 0.7 29 -4 -22 -35 1 -28 0.7 0.5 47 -46 28 -36 1 -28 0.8 0.5 -14 -35 17 -39 2 -28 0.4 0.9 30 20 41 31 2 -28 0.3 0.1 -30 -45 50 -32 2 -28 0.7 0.8 11 -40 -2 -8 1 -28 0.3 0.6 15 3 18 11 2 -28 0.3 0.8 -19 -34 29 -8 2 -28 0.2 0.5 43 8 38 5 2 -28 0.2 0.5 24 -8 3 -48 1 -28 0.4 0.2 5 -34 47 -43 2 -28 0.2 0.1 12 -17 9 -7 2 -28 0.6 0.9 6 -38 -4 -38 2 -28 0.9 0.2 28 -21 41 7 1 -28 0.3 0.4 -6 -20 -13 -34 1 -28 0.4 0.5 29 -14 19 -40 1 -28 0.8 0.7 23 19 17 15 1 -28 0.9 0.5 12 -36 49 23 2 -28 0.8 0.6 27 0 35 -46 1 -28 0.2 0.2 -8 -46 50 42 2 -28 0.4 0.8 47 27 -4 -16 1 -28 0.7 0.7 37 -10 32 30 2 -28 0.8 0.3 27 -11 2 -47 1 -28 0.5 0.6 -21 -35 29 4 2 -28 0.5 0.5 39 -11 34 -6 2 -28 0.3 0.9 5 -30 10 5 2 -28 0.1 0.9 20 -27 36 -29 2 -28 0.8 0.8 -38 -42 40 -41 2 -28 0.9 0.7 -31 -42 25 -48 2 -28 0.2 0.6 46 21 35 -48 1 -28 0.3 0.3 -8 -34 9 -7 2 -28 0.7 0.4 -15 -31 46 -19 2 -28 0.9 0.5 43 -6 32 15 1 -28 0.4 0.1 28 24 47 -33 1 -28 0.2 0.5 35 -37 -20 -23 1 -28 0.5 0.5 36 -44 1 -26 1 -28 0.7 0.1 13 -31 35 4 2 -28 0.8 0.1 -34 -40 18 -46 2 -28 0.9 0.3 15 -3 25 -36 1 -28 0.3 0.2 14 -31 29 -2 2 -28 0.2 0.1 49 45 -31 -50 1 -28 0.9 0.6 12 9 39 -20 1 -28 0.1 0.4 -18 -24 5 -25 2 -28 0.6 0.1 -36 -40 -2 -26 2 -28 0.1 0.4 18 -42 -4 -42 2 -28 0.9 0.9 24 -48 16 -13 1 -28 0.6 0.3 12 -46 24 -12 2 -28 0.5 0.2 7 -18 17 -13 1 -28 0.9 0.5 26 -27 2 -46 1 -28 0.6 0.4 37 -18 32 -31 1 -28 0.6 0.4 30 -17 12 -50 1 -28 0.9 0.9 7 -5 22 18 2 -28 0.7 0.2 25 -44 -34 -48 1 -28 0.5 0.4 40 -22 32 -39 1 -28 0.1 0.2 21 -13 45 -31 1 -28 0.2 0.7 -6 -34 7 -18 2 -28 0.2 0.8 35 -42 45 3 2 -28 0.6 0.4 8 -13 41 23 2 -28 0.7 0.1 17 -26 35 15 2 -28 0.8 0.8 -37 -45 1 0 2 -28 0.9 0.3 -18 -34 42 10 2 -28 0.4 0.1 49 -15 -12 -35 1 -28 0.4 0.4 44 -49 -31 -38 1 -28 0.8 0.9 -20 -37 43 42 2 -28 0.3 0.1 50 6 5 -14 1 -28 0.8 0.4 42 37 8 -23 1 -28 0.6 0.7 -18 -26 2 -26 2 -28 0.5 0.8 18 -26 31 -20 2 -28 0.3 0.4 -22 -50 28 -49 2 -28 0.2 0.8 36 -47 30 -2 2 -28 0.8 0.4 2 -9 11 -6 2 -28 0.4 0.5 9 -48 -5 -41 1 -28 0.8 0.8 -12 -18 8 -23 2 -28 0.8 0.7 0 -17 36 35 2 -28 0.3 0.6 25 21 49 -43 1 -28 0.3 0.3 24 -33 23 4 2 -28 0.6 0.3 30 -36 -25 -50 1 -28 0.6 0.8 -34 -43 -31 -32 2 -28 0.9 0.3 33 -25 46 11 1 -28 0.6 0.6 9 -2 39 29 2 -28 0.4 0.9 24 -41 -7 -20 1 -28 0.9 0.6 -13 -49 47 11 2 -28 0.6 0.3 -12 -35 25 -35 2 -28 0.4 0.4 -8 -47 26 -11 2 -28 0.2 0.6 28 15 12 -17 1 -28 0.4 0.9 -14 -50 -21 -28 1 -28 0.9 0.7 -9 -35 45 43 2 -28 0.1 0.6 -14 -16 44 2 2 -28 0.2 0.4 41 20 49 -25 1 -28 0.7 0.3 49 14 -37 -50 1 -28 0.3 0.4 -3 -4 45 -7 2 -28 0.5 0.6 24 20 -20 -21 1 -28 0.7 0.4 -14 -41 9 8 2 -28 0.7 0.3 16 -36 27 11 2 -28 0.1 0.5 -10 -36 40 -30 2 -28 0.9 0.6 32 15 -10 -13 1 -28 0.9 0.6 50 -40 -29 -32 1 -28 0.9 0.4 1 -50 42 34 2 -28 0.7 0.9 50 -16 -10 -50 1 -28 0.8 0.2 44 -27 33 7 1 -28 0.9 0.3 10 -19 34 24 2 -28 0.9 0.3 6 -15 -2 -30 1 -28 0.1 0.2 32 -20 28 19 2 -28 0.5 0.4 -14 -46 49 4 2 -28 0.1 0.9 24 13 -19 -35 1 -28 0.9 0.9 21 -41 39 -16 2 -28 0.6 0.5 42 21 40 -23 1 -29 0.1 0.8 20 -39 12 8 2 -29 0.8 0.9 35 28 -13 -31 1 -29 0.2 0.4 39 -19 18 14 2 -29 0.8 0.9 28 -22 -5 -40 1 -29 0.1 0.9 2 -5 36 27 2 -29 0.3 0.7 9 -26 45 4 2 -29 0.7 0.1 1 -44 49 -10 1 -29 0.5 0.6 16 -19 38 21 2 -29 0.2 0.8 0 -32 24 -45 2 -29 0.5 0.4 18 -1 43 15 2 -29 0.4 0.5 46 -27 38 29 2 -29 0.4 0.4 24 11 3 -48 1 -29 0.3 0.3 42 -16 48 -6 2 -29 0.7 0.3 -9 -28 5 -19 1 -29 0.1 0.8 -14 -31 -21 -45 2 -29 0.9 0.4 -7 -42 50 48 2 -29 0.8 0.9 2 -44 -18 -25 1 -29 0.5 0.2 26 -45 40 11 2 -29 0.7 0.7 16 -23 -3 -12 1 -29 0.2 0.7 43 -17 23 -19 2 -29 0.3 0.2 -12 -35 33 -7 2 -29 0.2 0.6 13 -15 15 -34 1 -29 0.8 0.1 -37 -44 31 16 2 -29 0.5 0.6 16 -9 15 -22 2 -29 0.7 0.3 4 -18 -32 -44 1 -29 0.3 0.4 2 -23 18 -6 2 -29 0.8 0.1 46 -36 29 3 1 -29 0.6 0.5 43 21 39 -46 1 -29 0.6 0.2 -5 -45 -5 -25 1 -29 0.4 0.1 11 -19 23 -41 1 -29 0.9 0.4 47 -5 47 1 1 -29 0.2 0.9 29 -31 20 9 2 -29 0.1 0.9 48 22 18 -16 1 -29 0.2 0.3 30 -14 -3 -18 1 -29 0.2 0.9 20 -45 15 -25 2 -29 0.1 0.2 48 -37 10 -7 1 -29 0.3 0.5 25 23 18 -45 1 -29 0.1 0.3 25 -34 19 -27 2 -29 0.5 0.5 13 -19 48 -40 2 -29 0.5 0.9 -16 -38 8 -40 2 -29 0.6 0.8 20 -35 47 -41 2 -29 0.5 0.5 -7 -14 48 30 2 -29 0.3 0.2 23 -30 19 9 2 -29 0.9 0.8 38 18 36 24 2 -29 0.4 0.3 34 -40 39 -32 2 -29 0.4 0.7 24 23 24 3 1 -29 0.1 0.4 11 -31 44 4 2 -29 0.2 0.3 14 -48 -4 -8 2 -29 0.3 0.2 42 -36 -42 -49 1 -29 0.2 0.8 30 18 20 -49 1 -29 0.6 0.5 25 -32 -28 -47 1 -29 0.9 0.1 45 32 -5 -30 1 -29 0.8 0.6 26 -20 32 5 1 -29 0.4 0.7 22 -22 -16 -49 1 -29 0.4 0.1 20 8 43 33 2 -29 0.9 0.3 26 8 44 28 2 -29 0.7 0.2 22 -40 44 -44 2 -29 0.3 0.4 31 -29 -15 -26 2 -29 0.5 0.5 45 36 48 -24 1 -29 0.5 0.6 34 3 50 -22 1 -29 0.6 0.6 48 36 28 -40 1 -29 0.7 0.7 33 -11 11 -26 1 -29 0.1 0.9 30 0 3 -32 2 -29 0.8 0.4 16 -46 19 -7 1 -29 0.8 0.7 -24 -36 41 -12 2 -29 0.8 0.5 25 -9 -5 -39 1 -29 0.9 0.2 21 -34 9 -28 1 -29 0.3 0.4 49 -35 27 12 2 -29 0.3 0.6 -14 -24 20 -45 2 -29 0.9 0.5 -20 -25 7 -8 2 -29 0.8 0.1 38 -23 24 13 1 -29 0.4 0.4 30 0 44 39 2 -29 0.7 0.8 24 -2 -31 -32 1 -29 0.3 0.3 -34 -35 22 -35 2 -29 0.1 0.4 21 5 16 12 2 -29 0.2 0.1 -21 -26 -10 -13 2 -29 0.2 0.2 -18 -45 24 -37 2 -29 0.5 0.6 32 23 14 -20 1 -29 0.2 0.9 -9 -29 -13 -36 1 -29 0.4 0.8 -15 -44 41 38 2 -29 0.2 0.9 33 -36 -34 -38 1 -29 0.7 0.5 38 6 -31 -45 1 -29 0.8 0.7 30 -40 47 -46 1 -29 0.9 0.8 -33 -36 46 -3 2 -29 0.9 0.6 10 -29 45 39 2 -29 0.1 0.3 50 10 42 34 2 -29 0.4 0.3 -16 -29 9 5 2 -29 0.2 0.5 -5 -26 -35 -45 1 -29 0.8 0.7 -32 -35 30 -44 2 -29 0.8 0.5 15 -37 38 3 2 -29 0.3 0.3 -11 -45 24 -33 2 -29 0.3 0.3 -13 -16 -6 -13 2 -29 0.4 0.1 47 -7 29 25 2 -29 0.8 0.3 21 -45 -13 -50 1 -29 0.6 0.1 -10 -47 32 12 2 -29 0.3 0.1 -5 -42 47 26 2 -29 0.4 0.2 13 -11 15 -38 1 -29 0.3 0.7 -10 -13 31 4 2 -29 0.4 0.3 6 -38 40 25 2 -29 0.7 0.9 -27 -36 -13 -46 1 -29 0.4 0.9 33 -1 -11 -41 1 -29 0.5 0.6 12 2 8 -9 2 -29 0.4 0.8 30 0 29 -18 2 -29 0.3 0.2 33 -41 -19 -48 1 -29 0.1 0.5 30 -2 4 -2 1 -29 0.3 0.9 32 26 45 13 2 -29 0.9 0.8 2 -37 41 -32 2 -29 0.7 0.3 28 4 47 -22 1 -29 0.2 0.2 -1 -8 30 2 2 -29 0.3 0.7 21 -46 -4 -16 1 -29 0.5 0.4 2 -21 32 16 2 -29 0.7 0.8 47 -42 41 -19 1 -29 0.4 0.3 -1 -24 45 12 2 -29 0.6 0.1 47 43 48 24 2 -29 0.7 0.4 37 18 -1 -50 1 -29 0.5 0.2 48 12 34 -19 1 -29 0.3 0.3 24 6 35 29 2 -29 0.6 0.7 44 32 -2 -32 1 -29 0.3 0.2 21 17 -4 -22 1 -29 0.7 0.2 42 16 -24 -33 1 -29 0.1 0.9 18 7 44 -26 2 -29 0.8 0.8 39 -40 42 -19 2 -29 0.4 0.6 -1 -31 37 -2 2 -29 0.8 0.9 27 7 7 -35 1 -29 0.5 0.6 47 -14 -1 -29 1 -29 0.9 0.8 20 -45 46 29 2 -29 0.6 0.2 -17 -35 45 -15 2 -29 0.6 0.1 -17 -30 27 -2 2 -29 0.2 0.7 47 42 -26 -34 1 -29 0.8 0.2 -12 -49 22 13 2 -29 0.6 0.6 8 -40 -12 -46 1 -29 0.9 0.1 -28 -50 -12 -13 1 -29 0.6 0.2 15 -9 29 -35 1 -29 0.6 0.7 -6 -36 43 0 2 -29 0.4 0.3 41 -7 -33 -38 1 -29 0.7 0.1 25 -10 -25 -30 1 -29 0.9 0.1 34 -26 38 -35 1 -29 0.9 0.2 16 -15 3 -37 1 -29 0.7 0.9 -5 -9 37 -26 2 -29 0.8 0.2 35 -28 36 -40 1 -29 0.5 0.1 28 -34 -21 -46 1 -29 0.5 0.1 8 -24 28 -15 1 -29 0.9 0.6 14 -15 31 12 2 -29 0.3 0.8 12 6 47 -20 2 -29 0.1 0.1 2 -19 32 -47 1 -29 0.5 0.5 30 -29 -33 -47 1 -29 0.1 0.9 48 -9 -17 -45 2 -29 0.4 0.5 21 -29 -31 -44 1 -29 0.2 0.5 1 -1 -38 -40 1 -29 0.8 0.4 11 -5 47 -12 1 -29 0.2 0.1 -14 -49 -3 -26 2 -29 0.3 0.8 29 -21 9 2 2 -29 0.9 0.8 34 -21 33 4 2 -29 0.3 0.1 28 16 17 -18 1 -29 0.5 0.7 37 -49 35 34 2 -29 0.7 0.7 23 -26 50 9 2 -29 0.5 0.2 34 32 10 -10 1 -29 0.2 0.3 37 -3 44 -43 2 -29 0.6 0.8 41 10 3 -2 1 -29 0.8 0.7 33 10 15 -19 1 -29 0.7 0.9 14 10 -18 -48 1 -29 0.4 0.3 49 18 -5 -24 1 -29 0.3 0.3 -47 -49 31 23 2 -29 0.3 0.3 40 -30 26 -7 2 -29 0.3 0.5 17 -36 38 36 2 -29 0.1 0.5 34 30 -14 -36 1 -29 0.4 0.2 35 -7 -14 -36 1 -29 0.1 0.4 -12 -23 4 -37 1 -29 0.7 0.3 0 -14 -11 -31 2 -29 0.8 0.8 23 -18 17 11 2 -29 0.1 0.5 27 -42 31 -11 2 -29 0.9 0.5 47 -8 -23 -32 1 -29 0.5 0.4 16 -31 25 -37 2 -29 0.3 0.4 29 -39 42 -25 2 -29 0.6 0.6 47 -1 32 -28 1 -29 0.3 0.8 48 40 24 -38 2 -29 0.7 0.7 -3 -27 48 28 2 -29 0.1 0.7 6 -43 -34 -44 2 -29 0.2 0.1 39 -50 48 -1 1 -29 0.9 0.5 48 -25 10 -49 1 -29 0.5 0.6 -6 -45 41 -20 2 -29 0.1 0.7 -19 -29 22 6 2 -29 0.4 0.6 17 -8 34 12 2 -29 0.1 0.3 -8 -15 -26 -34 1 -29 0.1 0.7 31 -4 -9 -16 1 -29 0.1 0.5 -3 -34 14 -35 2 -29 0.4 0.7 36 -21 -4 -30 1 -29 0.3 0.8 40 16 2 -5 2 -29 0.8 0.2 18 -27 12 4 1 -29 0.9 0.8 46 5 44 43 2 -29 0.6 0.5 39 -24 42 -15 2 -29 0.8 0.5 21 -47 -22 -29 1 -29 0.5 0.7 15 -31 -3 -31 1 -29 0.6 0.8 -20 -23 39 -35 2 -29 0.3 0.2 32 -11 26 24 2 -29 0.5 0.9 7 -17 17 2 2 -29 0.5 0.1 23 -30 -19 -41 1 -29 0.3 0.7 39 25 50 39 2 -29 0.2 0.5 12 -10 36 28 2 -29 0.6 0.9 30 -42 42 -14 2 -29 0.4 0.5 22 10 35 -27 1 -29 0.7 0.5 45 -2 -22 -43 1 -29 0.5 0.3 2 -48 26 10 2 -29 0.2 0.6 8 -1 44 -24 1 -29 0.7 0.5 -7 -36 32 -45 2 -29 0.3 0.2 -10 -44 42 -25 2 -29 0.2 0.3 46 -47 50 44 2 -29 0.4 0.3 31 -22 3 2 2 -29 0.7 0.5 -2 -50 5 -43 2 -29 0.6 0.4 29 22 41 38 1 -29 0.3 0.7 -11 -36 -21 -37 1 -29 0.3 0.2 42 0 23 -47 1 -29 0.5 0.6 6 -4 -21 -48 1 -29 0.9 0.7 29 -24 27 -1 1 -29 0.4 0.5 0 -26 45 -39 2 -29 0.7 0.2 29 -5 9 8 1 -29 0.9 0.4 -39 -44 12 -36 2 -29 0.2 0.5 -34 -39 20 -48 2 -29 0.8 0.3 29 -7 -7 -35 1 -29 0.9 0.6 18 1 39 27 2 -29 0.4 0.9 36 -16 -36 -43 1 -29 0.3 0.8 42 -34 16 -48 2 -29 0.4 0.8 -26 -44 -9 -31 2 -29 0.1 0.7 9 -43 -17 -18 2 -29 0.5 0.5 -25 -32 23 15 2 -29 0.1 0.6 46 -13 42 -46 2 -29 0.2 0.6 44 -6 26 8 2 -29 0.6 0.9 29 -39 15 -28 1 -29 0.8 0.6 4 -45 1 -36 2 -29 0.6 0.8 39 31 38 -32 1 -29 0.7 0.4 32 11 -20 -28 1 -29 0.8 0.8 -20 -37 9 -20 2 -29 0.4 0.7 -25 -26 40 10 2 -29 0.2 0.4 45 -15 1 -21 1 -29 0.4 0.8 39 -44 47 14 2 -29 0.6 0.6 49 -21 34 -34 1 -29 0.3 0.6 32 23 1 -5 1 -29 0.8 0.5 13 -1 5 -29 1 -29 0.5 0.7 24 -42 -9 -27 2 -29 0.4 0.9 16 -43 22 9 2 -29 0.9 0.9 49 4 50 -16 1 -29 0.5 0.9 -33 -39 -30 -47 2 -29 0.1 0.6 45 23 -4 -28 1 -29 0.6 0.7 27 -42 36 -39 2 -29 0.9 0.7 -4 -41 -13 -37 1 -29 0.8 0.9 19 -39 40 -3 2 -29 0.5 0.8 9 -29 50 9 2 -29 0.5 0.8 43 19 24 -40 1 -29 0.8 0.3 27 1 49 -17 1 -29 0.5 0.2 38 -2 41 -19 1 -29 0.7 0.1 23 5 48 40 2 -29 0.7 0.8 -15 -44 30 -24 2 -29 0.5 0.7 50 22 8 7 1 -29 0.5 0.5 43 25 -24 -50 1 -29 0.4 0.5 42 -15 20 -17 1 -29 0.9 0.1 41 30 -12 -28 1 -29 0.7 0.2 30 -6 30 -47 1 -29 0.3 0.6 -3 -14 -22 -34 2 -29 0.6 0.3 34 -47 19 -10 2 -29 0.5 0.5 44 -7 29 -17 1 -29 0.1 0.2 31 13 32 -6 1 -29 0.1 0.1 39 20 2 -34 1 -29 0.4 0.5 49 2 24 15 2 -29 0.2 0.8 -8 -39 37 5 2 -29 0.8 0.4 24 2 -28 -43 1 -29 0.8 0.5 41 6 -7 -50 1 -29 0.7 0.8 7 -44 40 -33 2 -29 0.9 0.1 28 3 9 5 1 -29 0.3 0.7 33 -7 27 -14 2 -29 0.5 0.8 2 -40 -4 -34 2 -29 0.4 0.9 41 29 -14 -23 1 -29 0.7 0.8 12 -38 -37 -43 1 -29 0.2 0.7 24 11 -27 -33 1 -29 0.8 0.6 40 19 27 -50 1 -29 0.8 0.3 -43 -50 46 -30 2 -29 0.7 0.4 42 4 -27 -46 1 -29 0.4 0.9 23 -38 47 29 2 -29 0.9 0.9 34 18 48 -35 2 -29 0.6 0.4 0 -10 28 5 2 -29 0.6 0.9 20 -39 50 32 2 -29 0.5 0.4 43 10 4 -12 2 -29 0.4 0.3 0 -32 25 -24 2 -29 0.1 0.4 28 23 -9 -50 1 -29 0.7 0.2 31 -37 27 -8 1 -29 0.7 0.6 -4 -17 0 -16 2 -29 0.5 0.7 11 -43 -6 -38 1 -29 0.6 0.7 34 -12 -8 -38 1 -29 0.3 0.4 -7 -20 -3 -37 1 -29 0.5 0.5 5 -7 9 -46 1 -29 0.4 0.4 48 -26 39 -31 1 -29 0.6 0.4 31 -1 42 30 2 -29 0.4 0.4 -40 -47 33 -41 1 -29 0.9 0.1 40 36 25 -3 1 -29 0.2 0.9 -13 -14 35 10 2 -29 0.3 0.6 23 -26 -27 -47 1 -29 0.4 0.9 37 -32 36 -10 2 -29 0.2 0.8 -2 -25 33 32 2 -29 0.6 0.6 49 30 -10 -20 1 -29 0.1 0.4 -18 -43 -26 -31 1 -29 0.4 0.6 -22 -49 35 -17 2 -30 0.2 0.6 43 13 47 33 2 -30 0.1 0.2 10 -5 14 -25 1 -30 0.1 0.5 26 12 33 -30 1 -30 0.8 0.9 30 -40 43 -8 2 -30 0.2 0.7 -7 -39 -1 -38 2 -30 0.4 0.8 -1 -14 0 -29 2 -30 0.5 0.9 47 23 10 -20 1 -30 0.5 0.1 45 28 14 -19 1 -30 0.9 0.4 45 -25 9 -3 1 -30 0.1 0.7 34 -45 15 -19 2 -30 0.3 0.9 32 22 26 8 1 -30 0.2 0.5 50 -6 22 21 2 -30 0.5 0.7 46 -47 24 -47 1 -30 0.1 0.8 -38 -42 13 -2 2 -30 0.5 0.8 41 -38 50 12 2 -30 0.4 0.9 -25 -30 39 24 2 -30 0.6 0.5 42 -15 -8 -36 1 -30 0.4 0.9 2 -13 8 1 2 -30 0.6 0.4 47 -13 -19 -37 1 -30 0.3 0.7 50 -46 43 29 2 -30 0.9 0.4 24 -28 -17 -26 1 -30 0.9 0.1 41 30 27 -27 1 -30 0.8 0.9 25 -18 3 -14 1 -30 0.9 0.4 45 -15 49 -9 1 -30 0.3 0.4 12 -40 36 -22 2 -30 0.6 0.3 49 2 -7 -11 1 -30 0.6 0.9 -1 -47 28 11 2 -30 0.6 0.4 -12 -27 7 0 2 -30 0.7 0.3 49 -24 45 34 2 -30 0.4 0.8 33 19 -30 -43 1 -30 0.2 0.1 26 -36 11 -33 1 -30 0.1 0.2 24 -22 50 -2 2 -30 0.7 0.3 42 24 -38 -46 1 -30 0.2 0.2 -8 -32 26 -37 2 -30 0.5 0.3 12 -28 29 -2 2 -30 0.8 0.7 40 -10 26 -46 1 -30 0.1 0.4 41 16 -7 -32 1 -30 0.5 0.4 36 27 44 31 2 -30 0.8 0.2 1 -32 32 -7 2 -30 0.9 0.9 -1 -24 44 33 2 -30 0.6 0.5 19 7 32 -16 1 -30 0.5 0.2 42 18 4 -33 1 -30 0.2 0.2 31 -48 50 -17 2 -30 0.7 0.3 47 -29 -23 -25 1 -30 0.7 0.4 50 12 44 -43 1 -30 0.1 0.2 17 -50 32 31 2 -30 0.4 0.3 34 30 33 28 1 -30 0.1 0.4 41 36 27 12 1 -30 0.7 0.9 39 -4 -31 -47 1 -30 0.7 0.7 28 -31 40 -46 2 -30 0.9 0.5 3 -38 10 -19 1 -30 0.1 0.3 0 -48 -15 -47 2 -30 0.6 0.9 50 5 32 7 1 -30 0.7 0.9 -12 -43 49 -17 2 -30 0.5 0.6 43 -16 -46 -49 1 -30 0.5 0.1 39 -17 49 37 2 -30 0.7 0.3 -7 -37 -1 -42 1 -30 0.3 0.4 10 9 -27 -46 1 -30 0.8 0.3 -6 -14 3 -3 2 -30 0.1 0.4 36 -47 34 -25 2 -30 0.8 0.1 -33 -43 18 12 2 -30 0.1 0.8 36 -49 47 10 2 -30 0.8 0.2 -22 -50 21 -9 2 -30 0.7 0.7 41 37 30 -31 1 -30 0.2 0.1 3 -30 16 -47 1 -30 0.7 0.7 26 -5 50 -32 2 -30 0.5 0.1 8 -20 18 7 2 -30 0.3 0.8 24 -36 -25 -38 1 -30 0.1 0.4 37 -40 30 -19 2 -30 0.7 0.9 -8 -40 44 34 2 -30 0.5 0.2 -11 -16 8 3 2 -30 0.6 0.3 -34 -36 -20 -22 2 -30 0.9 0.7 -2 -38 17 -40 2 -30 0.6 0.3 43 21 -42 -50 1 -30 0.8 0.1 45 -7 -13 -22 1 -30 0.8 0.9 21 -9 34 -11 2 -30 0.3 0.2 -7 -32 1 -34 1 -30 0.8 0.2 -4 -48 49 -11 2 -30 0.4 0.1 6 -30 32 -9 2 -30 0.6 0.7 21 19 -11 -14 1 -30 0.1 0.2 -6 -39 -2 -16 2 -30 0.1 0.8 -5 -26 9 -5 2 -30 0.2 0.1 -10 -24 24 -38 1 -30 0.8 0.3 18 3 48 -50 1 -30 0.1 0.5 -6 -38 29 17 2 -30 0.7 0.6 22 17 47 17 2 -30 0.2 0.6 15 -42 34 25 2 -30 0.4 0.1 -11 -40 25 24 2 -30 0.4 0.5 -16 -47 -4 -9 2 -30 0.6 0.2 12 8 18 -50 1 -30 0.5 0.9 -26 -30 50 -42 2 -30 0.8 0.3 40 -6 -1 -42 1 -30 0.1 0.7 36 -34 -7 -46 2 -30 0.1 0.3 24 11 -6 -25 1 -30 0.9 0.2 38 16 -33 -49 1 -30 0.4 0.1 -19 -33 48 45 2 -30 0.8 0.8 3 0 43 13 2 -30 0.5 0.4 39 25 45 1 1 -30 0.1 0.2 4 -7 44 5 2 -30 0.9 0.8 36 23 44 -8 2 -30 0.5 0.6 43 30 -20 -44 1 -30 0.4 0.1 26 -2 38 12 2 -30 0.2 0.4 -12 -35 41 -47 2 -30 0.3 0.9 37 19 36 -36 2 -30 0.7 0.4 41 -2 18 -20 1 -30 0.6 0.9 10 -28 -12 -50 1 -30 0.7 0.4 25 17 -16 -46 1 -30 0.9 0.6 -28 -37 2 -49 2 -30 0.3 0.9 22 7 38 -23 2 -30 0.8 0.9 39 28 48 -47 2 -30 0.7 0.9 26 0 -31 -50 1 -30 0.9 0.4 43 42 22 -36 1 -30 0.1 0.2 7 1 22 -25 1 -30 0.9 0.7 40 19 46 -28 1 -30 0.3 0.2 0 -4 -4 -34 1 -30 0.8 0.2 48 46 6 -12 1 -30 0.8 0.9 5 2 41 -27 2 -30 0.5 0.3 13 -5 25 -34 1 -30 0.4 0.8 26 7 47 -4 2 -30 0.9 0.4 -25 -33 39 -42 2 -30 0.7 0.3 15 -39 31 -28 1 -30 0.2 0.2 -33 -41 47 31 2 -30 0.7 0.4 46 -6 5 -1 1 -30 0.6 0.6 33 -44 -4 -31 1 -30 0.4 0.4 -35 -45 20 15 2 -30 0.1 0.4 40 2 -19 -25 1 -30 0.3 0.8 -2 -29 0 -40 2 -30 0.6 0.7 19 -17 -2 -20 1 -30 0.7 0.3 13 -31 -7 -42 1 -30 0.6 0.6 15 -24 18 -18 2 -30 0.9 0.4 23 -5 45 -7 1 -30 0.3 0.2 -40 -44 15 -34 2 -30 0.3 0.3 38 -41 26 -20 2 -30 0.4 0.3 26 -44 37 -2 2 -30 0.2 0.2 13 -41 37 -27 2 -30 0.1 0.1 -35 -40 50 -28 2 -30 0.4 0.5 -6 -25 26 -46 2 -30 0.1 0.6 20 -35 18 -10 2 -30 0.6 0.8 13 2 -7 -38 1 -30 0.1 0.4 26 24 5 -15 1 -30 0.5 0.2 -4 -7 30 -24 1 -30 0.6 0.2 18 -20 44 -7 2 -30 0.1 0.7 43 0 16 -17 1 -30 0.5 0.1 1 -38 26 -16 2 -30 0.8 0.7 -26 -45 43 19 2 -30 0.2 0.3 38 -42 25 8 2 -30 0.1 0.1 28 -46 44 -4 2 -30 0.2 0.3 -5 -48 18 -47 2 -30 0.4 0.8 4 -37 27 -22 2 -30 0.5 0.6 33 11 10 -18 1 -30 0.3 0.3 43 -44 0 -21 2 -30 0.9 0.9 36 26 30 -5 1 -30 0.4 0.2 48 -15 -10 -48 1 -30 0.3 0.7 18 14 33 -10 2 -30 0.5 0.4 34 20 1 -19 1 -30 0.1 0.4 48 46 41 14 1 -30 0.2 0.3 39 -22 31 -32 1 -30 0.6 0.5 33 -17 33 -14 1 -30 0.3 0.1 -4 -20 29 -16 2 -30 0.9 0.2 35 28 33 -6 1 -30 0.5 0.7 20 -43 31 -27 2 -30 0.4 0.1 -29 -47 -25 -30 2 -30 0.8 0.5 -9 -25 29 -33 2 -30 0.4 0.9 28 -42 43 6 2 -30 0.8 0.4 42 -6 8 -25 1 -30 0.3 0.4 28 -8 46 21 2 -30 0.5 0.7 40 -17 -28 -41 1 -30 0.9 0.9 39 -34 -6 -45 1 -30 0.5 0.6 19 10 41 35 2 -30 0.1 0.5 19 -23 48 19 2 -30 0.7 0.3 10 -38 -1 -9 1 -30 0.5 0.2 39 -40 2 -45 1 -30 0.4 0.3 42 41 46 34 1 -30 0.6 0.6 -9 -37 -32 -44 1 -30 0.5 0.8 10 -8 25 -44 2 -30 0.2 0.1 40 -24 31 15 2 -30 0.2 0.5 -17 -36 -3 -24 2 -30 0.4 0.3 -39 -50 30 16 2 -30 0.8 0.8 46 5 -3 -21 1 -30 0.5 0.3 -2 -4 29 -4 2 -30 0.6 0.6 23 -37 26 12 2 -30 0.3 0.8 -48 -49 25 -48 2 -30 0.5 0.7 4 -14 31 23 2 -30 0.7 0.3 9 -17 29 28 2 -30 0.4 0.5 44 -21 37 -20 2 -30 0.2 0.3 48 2 -33 -39 1 -30 0.4 0.9 34 24 5 -49 1 -30 0.5 0.7 22 12 8 -10 1 -30 0.8 0.5 21 -49 37 1 2 -30 0.4 0.5 11 -37 6 -31 1 -30 0.8 0.1 45 36 43 -36 1 -30 0.4 0.1 2 -28 30 29 2 -30 0.6 0.6 2 -33 32 -2 2 -30 0.8 0.1 -27 -41 8 -13 2 -30 0.5 0.6 36 -11 32 -41 1 -30 0.2 0.7 37 -48 41 38 2 -30 0.3 0.4 37 -31 -21 -50 1 -30 0.7 0.9 26 -2 12 -5 1 -30 0.7 0.6 25 -49 -17 -42 1 -30 0.8 0.4 13 -1 10 -35 1 -30 0.1 0.6 44 -36 -6 -31 2 -30 0.7 0.4 48 7 22 -40 1 -30 0.6 0.7 -7 -36 8 -29 2 -30 0.1 0.9 13 1 47 -41 2 -30 0.6 0.2 -40 -47 2 -9 2 -30 0.7 0.2 -27 -46 47 -31 2 -30 0.7 0.4 31 -38 45 39 2 -30 0.5 0.2 42 -38 28 19 2 -30 0.7 0.8 17 -2 50 13 2 -30 0.1 0.3 1 -36 46 44 2 -30 0.9 0.2 33 6 16 -22 1 -30 0.2 0.5 14 -3 6 -17 1 -30 0.4 0.3 -11 -43 40 16 2 -30 0.9 0.2 46 -1 -10 -45 1 -30 0.4 0.9 38 -40 7 -44 2 -30 0.8 0.4 35 -18 47 39 2 -30 0.6 0.4 -17 -26 26 10 2 -30 0.8 0.1 43 -24 -26 -34 1 -30 0.6 0.1 47 -50 -7 -16 1 -30 0.1 0.3 -13 -27 50 -37 2 -30 0.6 0.8 28 24 31 11 2 -30 0.2 0.5 30 28 -26 -43 1 -30 0.6 0.6 24 -20 -2 -42 1 -30 0.5 0.4 31 -38 6 -20 1 -30 0.4 0.1 22 -44 45 7 2 -30 0.9 0.9 43 -35 9 -35 1 -30 0.7 0.2 18 -9 19 8 2 -30 0.8 0.6 18 -6 -28 -36 1 -30 0.9 0.1 49 -4 41 15 1 -30 0.5 0.2 -16 -38 -17 -42 1 -30 0.9 0.7 46 -18 31 -46 1 -30 0.6 0.8 46 17 6 -15 1 -30 0.6 0.8 46 -21 -22 -50 1 -30 0.1 0.9 46 31 49 22 2 -30 0.7 0.5 4 -25 17 -30 1 -30 0.3 0.1 -18 -24 -15 -27 1 -30 0.9 0.7 11 -10 -6 -47 1 -30 0.6 0.6 42 -8 4 -8 1 -30 0.8 0.3 13 -3 45 -6 1 -30 0.8 0.7 18 -1 49 -46 2 -30 0.2 0.8 16 -40 44 -12 2 -30 0.1 0.3 -31 -34 28 -22 2 -30 0.3 0.8 24 -23 41 -39 2 -30 0.3 0.5 24 16 46 -9 1 -30 0.1 0.9 6 -48 39 22 2 -30 0.9 0.5 47 -12 -7 -35 1 -30 0.9 0.9 -41 -46 50 17 2 -30 0.3 0.1 -22 -30 26 18 2 -30 0.2 0.9 12 -24 40 21 2 -30 0.9 0.7 29 -22 35 3 1 -30 0.8 0.4 27 -25 13 -23 1 -30 0.8 0.7 -3 -22 35 1 2 -30 0.7 0.7 23 9 21 -1 1 -30 0.3 0.7 -7 -11 43 4 2 -30 0.4 0.3 -18 -48 38 32 2 -30 0.2 0.3 19 2 38 27 2 -30 0.4 0.6 25 -27 45 2 2 -30 0.6 0.2 17 -32 -21 -42 1 -30 0.8 0.6 2 -13 9 -45 1 -30 0.5 0.9 -6 -26 40 -16 2 -30 0.2 0.3 23 -47 29 12 2 -30 0.2 0.2 33 20 14 11 1 -30 0.9 0.3 -8 -28 5 -17 2 -30 0.2 0.6 48 27 41 17 1 -30 0.6 0.2 15 -49 -14 -36 1 -30 0.1 0.2 32 18 16 -37 1 -30 0.4 0.5 22 -28 41 32 2 -30 0.4 0.4 -12 -33 -33 -38 1 -30 0.8 0.5 27 -28 -38 -40 1 -30 0.7 0.7 16 -17 45 -31 2 -30 0.2 0.8 42 25 41 1 1 -30 0.1 0.3 35 -22 46 -40 2 -30 0.7 0.5 7 -3 41 -35 1 -30 0.6 0.8 -19 -37 42 -50 2 -30 0.5 0.9 13 -20 31 -42 2 -30 0.4 0.9 42 -28 33 4 2 -30 0.8 0.7 2 -32 42 -4 2 -30 0.1 0.6 -1 -44 7 0 2 -30 0.4 0.4 31 -23 7 -10 1 -30 0.1 0.7 24 10 -11 -48 1 -30 0.7 0.6 11 -14 10 -42 1 -30 0.1 0.3 -29 -39 5 -44 2 -30 0.4 0.4 37 2 27 26 2 -30 0.4 0.7 -18 -36 -17 -46 2 -30 0.6 0.4 32 -24 -25 -36 1 -30 0.4 0.6 12 -19 -31 -46 1 -30 0.8 0.8 28 8 46 -50 2 -30 0.3 0.5 37 1 14 7 1 -30 0.4 0.2 45 13 24 17 1 -30 0.2 0.2 18 -49 48 -35 2 -30 0.9 0.1 23 10 13 -11 1 -30 0.2 0.3 43 37 36 22 1 -30 0.5 0.5 9 -28 17 -24 2 -30 0.8 0.7 32 15 12 -49 1 -30 0.4 0.6 -2 -33 44 -15 2 -30 0.1 0.2 42 -46 29 17 2 -30 0.4 0.3 -38 -40 39 6 2 -30 0.3 0.7 29 12 36 34 2 -30 0.8 0.9 -30 -43 43 -8 2 -30 0.5 0.1 -4 -39 -10 -17 2 -31 0.8 0.1 35 -25 14 -1 1 -31 0.7 0.9 -2 -7 47 -42 2 -31 0.8 0.6 -20 -24 48 -38 2 -31 0.7 0.4 49 36 35 -14 1 -31 0.9 0.3 48 47 32 13 1 -31 0.1 0.3 38 -25 -23 -47 1 -31 0.8 0.9 24 -21 13 -16 1 -31 0.8 0.1 37 21 -31 -41 1 -31 0.6 0.1 10 -43 -10 -36 1 -31 0.9 0.4 19 1 26 -15 1 -31 0.1 0.9 43 17 13 -8 1 -31 0.3 0.8 22 -22 2 -3 2 -31 0.8 0.2 -5 -37 25 -23 1 -31 0.5 0.9 22 7 49 -13 2 -31 0.8 0.5 -12 -17 17 -6 2 -31 0.7 0.7 7 -18 48 37 2 -31 0.8 0.5 -33 -34 31 -37 2 -31 0.3 0.3 -17 -21 49 -19 2 -31 0.7 0.4 12 8 -7 -21 1 -31 0.3 0.3 24 -30 21 -49 1 -31 0.4 0.6 4 -5 23 -35 1 -31 0.2 0.6 33 18 29 21 1 -31 0.3 0.9 -8 -39 -23 -45 2 -31 0.6 0.5 9 -2 42 33 2 -31 0.6 0.1 7 -10 21 -35 1 -31 0.1 0.1 50 12 32 -28 1 -31 0.2 0.5 50 -24 5 -19 2 -31 0.4 0.5 19 -7 17 -42 2 -31 0.3 0.4 9 -38 2 -46 1 -31 0.6 0.9 45 -18 42 25 2 -31 0.5 0.1 1 -41 27 -48 1 -31 0.7 0.5 42 -15 19 -13 1 -31 0.4 0.1 5 -19 1 0 1 -31 0.9 0.6 -17 -23 41 -50 2 -31 0.5 0.1 -9 -38 18 5 2 -31 0.2 0.2 39 37 12 2 1 -31 0.6 0.2 -26 -34 10 -40 2 -31 0.5 0.2 -17 -46 15 14 2 -31 0.8 0.5 22 -29 31 14 2 -31 0.8 0.4 16 6 29 -5 1 -31 0.8 0.3 -11 -26 22 -19 2 -31 0.6 0.2 20 -41 26 -45 1 -31 0.3 0.9 13 3 34 10 2 -31 0.6 0.9 20 -34 7 -39 1 -31 0.9 0.8 49 21 42 -10 1 -31 0.2 0.7 29 -11 21 0 2 -31 0.3 0.2 40 -9 34 -26 2 -31 0.2 0.8 25 -24 16 -26 2 -31 0.7 0.8 -26 -40 26 -22 2 -31 0.8 0.9 -21 -38 12 -36 2 -31 0.4 0.5 39 28 46 -10 1 -31 0.1 0.8 38 27 10 1 1 -31 0.6 0.5 41 -7 10 3 1 -31 0.4 0.3 -1 -44 -25 -35 1 -31 0.2 0.1 32 -7 50 -14 1 -31 0.4 0.5 47 -38 22 -13 2 -31 0.3 0.5 30 12 -3 -33 1 -31 0.6 0.5 29 11 -7 -50 1 -31 0.4 0.6 19 -19 6 -46 1 -31 0.2 0.4 39 29 20 -33 1 -31 0.7 0.5 10 -36 -29 -34 1 -31 0.5 0.7 48 -19 18 -12 2 -31 0.2 0.4 22 -3 19 -44 1 -31 0.3 0.3 -8 -13 27 15 2 -31 0.7 0.1 6 -4 29 -1 1 -31 0.6 0.3 1 -8 25 -20 1 -31 0.2 0.2 -24 -27 15 -16 2 -31 0.5 0.8 27 10 34 16 2 -31 0.5 0.6 32 7 24 -13 2 -31 0.8 0.7 37 16 15 12 1 -31 0.4 0.4 33 -27 -11 -49 1 -31 0.3 0.9 43 3 -43 -50 1 -31 0.2 0.4 20 -34 46 -10 2 -31 0.5 0.3 22 -12 38 -39 1 -31 0.8 0.6 -18 -46 -22 -45 1 -31 0.1 0.8 46 -23 24 21 2 -31 0.3 0.4 9 -41 40 16 2 -31 0.2 0.6 16 -13 -13 -31 1 -31 0.9 0.8 12 -26 -13 -23 1 -31 0.6 0.9 31 -36 47 5 2 -31 0.4 0.5 14 -19 40 31 2 -31 0.4 0.9 7 -40 -1 -40 2 -31 0.8 0.7 38 17 -29 -34 1 -31 0.5 0.8 24 9 14 3 2 -31 0.2 0.3 43 2 41 -12 1 -31 0.5 0.2 43 12 50 -50 1 -31 0.4 0.2 46 -4 18 -47 1 -31 0.4 0.7 4 -38 -8 -38 2 -31 0.2 0.6 47 -24 38 7 2 -31 0.3 0.4 48 5 27 15 1 -31 0.3 0.8 41 -35 20 -41 2 -31 0.4 0.2 0 -44 5 -9 2 -31 0.9 0.1 37 -20 -1 -29 1 -31 0.3 0.9 48 -17 31 11 2 -31 0.8 0.8 47 42 31 -9 1 -31 0.1 0.6 50 49 -4 -33 1 -31 0.8 0.8 -36 -39 36 -2 2 -31 0.5 0.1 30 -16 -8 -19 1 -31 0.2 0.6 12 2 6 -38 1 -31 0.6 0.3 -13 -49 6 -47 1 -31 0.7 0.1 -20 -43 45 -28 2 -31 0.8 0.9 31 5 7 -31 1 -31 0.1 0.7 -1 -6 39 -26 2 -31 0.5 0.4 27 -5 35 -40 1 -31 0.7 0.1 36 28 49 -13 1 -31 0.9 0.1 24 -3 39 -23 1 -31 0.3 0.1 -2 -25 40 32 2 -31 0.9 0.7 47 25 34 -35 1 -31 0.7 0.2 6 1 21 -4 1 -31 0.2 0.6 -34 -41 12 4 2 -31 0.8 0.6 15 -38 19 -5 2 -31 0.5 0.9 17 -34 34 -8 2 -31 0.6 0.8 44 34 20 -30 1 -31 0.1 0.6 46 24 15 -13 1 -31 0.2 0.6 29 10 49 5 2 -31 0.1 0.7 18 -1 -5 -41 1 -31 0.1 0.9 48 42 -30 -36 1 -31 0.4 0.2 -1 -19 -37 -49 1 -31 0.9 0.4 48 -48 49 45 2 -31 0.6 0.6 -3 -47 0 -24 2 -31 0.7 0.2 -21 -47 23 9 2 -31 0.3 0.2 24 -7 44 -25 1 -31 0.3 0.6 -37 -39 33 -48 2 -31 0.8 0.8 -9 -36 -24 -46 1 -31 0.8 0.4 1 -47 -15 -49 1 -31 0.3 0.1 44 29 19 -23 1 -31 0.7 0.9 28 -21 22 0 2 -31 0.9 0.8 -31 -38 48 3 2 -31 0.6 0.3 47 -7 31 -42 1 -31 0.2 0.7 -4 -23 6 -46 2 -31 0.4 0.8 -4 -44 10 -4 2 -31 0.5 0.1 41 -41 42 -2 1 -31 0.8 0.3 -9 -48 23 -38 2 -31 0.5 0.1 44 -30 38 8 1 -31 0.2 0.8 23 -12 18 -12 2 -31 0.8 0.3 39 -37 16 -43 1 -31 0.7 0.9 41 -43 11 -23 1 -31 0.6 0.3 4 -11 -6 -50 1 -31 0.6 0.8 11 -10 9 -26 2 -31 0.9 0.8 9 0 14 -14 2 -31 0.3 0.6 2 -25 14 -36 2 -31 0.9 0.1 -38 -39 38 12 2 -31 0.7 0.1 35 -24 49 30 2 -31 0.4 0.2 18 -14 11 -27 1 -31 0.7 0.3 -24 -37 38 3 2 -31 0.9 0.1 3 -34 2 -18 1 -31 0.6 0.8 26 -26 23 -26 2 -31 0.6 0.1 8 -41 -5 -26 1 -31 0.1 0.9 39 -31 -20 -42 1 -31 0.9 0.3 40 10 -6 -40 1 -31 0.8 0.4 28 -21 49 -36 1 -31 0.2 0.2 24 -38 -6 -45 1 -31 0.5 0.9 43 -23 -7 -18 1 -31 0.8 0.8 -20 -25 38 -26 2 -31 0.8 0.1 10 -13 18 -50 1 -31 0.9 0.9 -42 -50 41 3 2 -31 0.6 0.8 -8 -44 16 -36 2 -31 0.5 0.2 43 40 16 11 1 -31 0.3 0.6 28 2 33 -47 1 -31 0.6 0.7 12 -34 50 -19 2 -31 0.8 0.5 23 -45 -1 -28 1 -31 0.5 0.2 17 3 0 -3 1 -31 0.2 0.4 15 -26 11 -13 2 -31 0.8 0.5 44 -24 28 -40 1 -31 0.8 0.4 16 -17 11 -26 1 -31 0.1 0.6 8 -21 35 2 2 -31 0.2 0.9 10 -38 49 -8 2 -31 0.2 0.8 14 -30 -17 -35 1 -31 0.6 0.8 41 -27 45 9 1 -31 0.7 0.8 0 -8 24 3 2 -31 0.1 0.8 11 9 -22 -49 1 -31 0.3 0.6 34 28 38 9 1 -31 0.5 0.9 -5 -50 8 -3 2 -31 0.3 0.7 -31 -37 42 -16 2 -31 0.8 0.7 8 -5 44 -24 2 -31 0.4 0.1 10 -31 23 2 2 -31 0.4 0.9 48 43 42 27 2 -31 0.6 0.9 38 -38 46 -23 2 -31 0.5 0.7 13 3 -32 -42 1 -31 0.5 0.4 45 -4 34 -37 1 -31 0.6 0.3 25 -11 -38 -45 1 -31 0.1 0.9 -16 -30 19 -9 2 -31 0.1 0.2 40 -1 36 0 2 -31 0.9 0.2 32 -8 47 27 2 -31 0.6 0.9 43 -22 35 -39 1 -31 0.6 0.5 25 17 -3 -41 1 -31 0.5 0.5 28 -24 50 -42 1 -31 0.1 0.7 12 -18 45 9 2 -31 0.4 0.8 -43 -48 44 -14 2 -31 0.6 0.6 29 -44 5 2 2 -31 0.8 0.9 -24 -48 36 -7 2 -31 0.8 0.3 3 -1 17 6 2 -31 0.5 0.2 -19 -49 42 -12 2 -31 0.5 0.3 48 -3 -33 -44 1 -31 0.3 0.4 20 -43 50 19 2 -31 0.4 0.5 13 5 35 -22 1 -31 0.9 0.4 33 6 -28 -46 1 -31 0.2 0.8 -15 -25 43 -31 2 -31 0.6 0.1 27 14 23 -46 1 -31 0.9 0.8 18 -40 20 -46 1 -31 0.9 0.7 28 -47 -10 -19 1 -31 0.6 0.3 4 -17 -38 -44 1 -31 0.2 0.2 -11 -38 -19 -32 1 -31 0.7 0.1 11 -31 11 -42 1 -31 0.6 0.6 16 -6 0 -14 1 -31 0.9 0.6 30 12 19 -4 1 -31 0.9 0.3 29 13 36 -9 1 -31 0.4 0.6 43 32 31 -12 1 -31 0.3 0.7 24 -6 19 -46 1 -31 0.6 0.7 -1 -18 33 12 2 -31 0.2 0.7 33 -32 -11 -17 2 -31 0.5 0.1 19 15 12 -37 1 -31 0.8 0.3 -1 -49 10 -20 1 -31 0.5 0.5 -2 -47 15 10 2 -31 0.9 0.7 43 18 49 12 2 -31 0.8 0.4 -5 -46 19 -8 2 -31 0.4 0.6 15 12 20 -2 1 -31 0.5 0.5 -18 -33 25 -14 2 -31 0.1 0.2 23 -17 -4 -35 1 -31 0.1 0.3 42 -23 2 -6 1 -31 0.8 0.9 46 18 30 6 2 -31 0.8 0.8 40 8 27 -3 1 -31 0.6 0.7 31 24 35 28 2 -31 0.3 0.7 31 -27 -6 -35 1 -31 0.8 0.2 -33 -44 16 -41 2 -31 0.1 0.9 16 -13 33 2 2 -31 0.8 0.7 23 19 30 -47 1 -31 0.6 0.6 18 -38 -5 -8 1 -31 0.2 0.5 4 -44 39 -15 2 -31 0.3 0.1 41 -42 -5 -35 1 -31 0.7 0.5 47 -36 28 4 1 -31 0.6 0.5 14 3 -4 -37 1 -31 0.1 0.1 39 7 42 15 2 -31 0.6 0.3 46 17 14 2 1 -31 0.2 0.3 47 -11 38 1 2 -31 0.1 0.8 45 -37 34 -13 2 -31 0.3 0.2 -18 -21 -7 -15 1 -31 0.8 0.4 1 -45 -13 -19 2 -31 0.4 0.2 5 -7 32 12 2 -31 0.3 0.9 21 12 14 -34 1 -31 0.3 0.2 4 -11 25 -34 2 -31 0.8 0.2 50 -35 -16 -38 1 -31 0.4 0.8 44 -9 46 -1 2 -31 0.4 0.5 24 -10 9 -27 2 -31 0.6 0.7 -26 -49 34 -14 2 -31 0.6 0.8 49 7 49 41 1 -31 0.4 0.1 13 -43 3 -17 2 -31 0.1 0.3 29 17 4 -3 1 -31 0.2 0.3 40 -30 36 16 2 -31 0.3 0.8 26 -7 4 -20 2 -31 0.3 0.1 25 -21 -14 -39 1 -31 0.5 0.1 -19 -45 43 -26 2 -31 0.8 0.4 -19 -34 -7 -44 1 -31 0.8 0.1 -43 -48 -34 -39 1 -31 0.2 0.1 12 7 -9 -32 1 -31 0.7 0.5 42 33 27 -33 1 -31 0.8 0.9 -30 -38 -3 -19 2 -31 0.5 0.8 -6 -40 20 18 2 -31 0.8 0.9 28 10 -14 -28 1 -31 0.8 0.6 37 -31 34 -12 1 -31 0.4 0.4 43 -47 16 3 2 -31 0.4 0.1 27 -47 8 -43 1 -31 0.2 0.6 13 -27 -16 -48 2 -31 0.5 0.1 16 -15 32 -35 1 -31 0.8 0.5 -24 -41 40 35 2 -31 0.2 0.5 32 12 38 -1 2 -31 0.8 0.5 -16 -45 46 -20 2 -31 0.3 0.4 -22 -23 42 15 2 -31 0.5 0.6 -7 -29 41 15 2 -31 0.3 0.9 -29 -49 4 -36 2 -31 0.5 0.7 48 12 45 44 1 -31 0.1 0.7 10 -44 31 -37 2 -31 0.3 0.9 36 -31 38 -40 2 -31 0.8 0.3 34 -15 11 -19 1 -31 0.8 0.2 47 -33 2 -23 1 -31 0.3 0.2 -32 -35 23 -31 2 -31 0.5 0.9 10 7 28 -13 2 -31 0.3 0.8 -9 -32 2 -43 2 -31 0.2 0.6 25 -40 -2 -38 1 -31 0.5 0.8 31 -13 27 -28 2 -31 0.7 0.9 -1 -26 49 16 2 -31 0.1 0.5 -44 -45 16 11 2 -31 0.2 0.3 19 -33 43 41 2 -31 0.5 0.5 21 6 38 -10 2 -31 0.4 0.4 7 -5 -13 -45 1 -31 0.8 0.8 36 15 25 16 1 -31 0.1 0.4 -32 -45 -13 -42 2 -31 0.2 0.1 21 17 37 36 2 -31 0.2 0.5 6 -37 47 34 2 -31 0.8 0.6 12 -16 36 -7 2 -31 0.4 0.5 -18 -27 -36 -46 1 -31 0.9 0.9 35 32 48 33 2 -31 0.1 0.7 27 -50 44 25 2 -31 0.9 0.2 -41 -49 29 5 2 -31 0.8 0.5 41 19 17 -18 1 -31 0.8 0.6 19 -40 -9 -18 1 -31 0.7 0.2 46 -49 32 -43 1 -31 0.1 0.2 11 -36 47 43 2 -31 0.2 0.6 -32 -47 13 -32 2 -31 0.4 0.2 43 -42 24 2 1 -32 0.4 0.6 -30 -39 32 -41 2 -32 0.3 0.8 12 8 45 -35 2 -32 0.8 0.1 13 -12 9 -48 1 -32 0.3 0.4 18 -18 8 -33 1 -32 0.2 0.7 46 8 -29 -35 1 -32 0.4 0.9 24 -18 7 -5 2 -32 0.8 0.2 -37 -39 4 -25 2 -32 0.7 0.6 -19 -48 29 22 2 -32 0.1 0.6 15 -27 46 -45 2 -32 0.8 0.1 26 -4 9 -22 1 -32 0.1 0.1 32 11 17 -20 1 -32 0.4 0.2 48 -47 12 -48 1 -32 0.1 0.6 43 -16 20 -30 2 -32 0.1 0.5 23 -5 7 3 2 -32 0.5 0.4 -12 -20 42 -15 2 -32 0.1 0.4 48 -30 35 15 2 -32 0.4 0.9 -2 -5 43 -25 2 -32 0.5 0.1 -15 -21 49 24 2 -32 0.7 0.5 21 -17 14 -4 1 -32 0.1 0.9 48 14 30 -31 1 -32 0.7 0.2 26 -31 4 -11 1 -32 0.5 0.6 -1 -20 31 14 2 -32 0.7 0.3 10 -9 23 22 2 -32 0.7 0.7 19 -45 -31 -48 1 -32 0.9 0.8 48 -27 49 30 2 -32 0.2 0.4 19 -2 17 -6 2 -32 0.1 0.1 25 18 28 17 1 -32 0.5 0.6 44 -12 47 -28 2 -32 0.7 0.2 50 -27 9 -49 1 -32 0.3 0.3 26 -15 -7 -16 1 -32 0.1 0.6 23 -45 -38 -47 1 -32 0.7 0.4 23 -35 44 -43 1 -32 0.1 0.2 -19 -40 34 17 2 -32 0.4 0.9 45 -30 42 -32 2 -32 0.9 0.8 30 -15 36 -2 2 -32 0.1 0.5 35 31 47 -50 1 -32 0.8 0.8 -13 -21 -17 -45 1 -32 0.3 0.3 23 -23 33 19 2 -32 0.9 0.7 47 45 15 -14 1 -32 0.6 0.6 -13 -25 -5 -16 2 -32 0.1 0.9 12 -41 23 -43 2 -32 0.4 0.7 25 -30 14 5 2 -32 0.2 0.7 35 16 8 -43 1 -32 0.6 0.8 38 -27 -5 -46 1 -32 0.8 0.6 37 -47 23 2 2 -32 0.5 0.2 18 -27 43 22 2 -32 0.7 0.5 29 18 30 18 2 -32 0.9 0.7 50 26 -27 -44 1 -32 0.3 0.6 40 29 44 2 1 -32 0.1 0.1 44 -39 26 8 2 -32 0.2 0.1 48 9 48 19 2 -32 0.1 0.5 -5 -37 39 36 2 -32 0.2 0.9 41 30 16 4 1 -32 0.1 0.9 -11 -17 32 -37 2 -32 0.9 0.8 -28 -29 49 14 2 -32 0.5 0.4 25 14 0 -16 1 -32 0.2 0.7 4 -30 -22 -24 2 -32 0.8 0.7 -14 -23 49 -33 2 -32 0.2 0.4 39 22 49 -49 1 -32 0.9 0.6 25 14 -19 -46 1 -32 0.4 0.1 -19 -33 47 42 2 -32 0.3 0.4 19 -17 33 32 2 -32 0.2 0.5 31 13 1 -33 1 -32 0.2 0.4 -31 -46 3 -34 2 -32 0.2 0.4 -4 -20 -20 -48 1 -32 0.8 0.4 30 8 14 -39 1 -32 0.9 0.2 37 22 7 4 1 -32 0.2 0.6 -26 -29 45 -40 2 -32 0.7 0.8 23 -23 46 -21 2 -32 0.2 0.1 0 -27 -24 -50 1 -32 0.9 0.8 41 -18 4 -50 1 -32 0.9 0.9 29 14 46 -42 2 -32 0.4 0.6 35 15 25 -39 1 -32 0.9 0.7 40 -4 -9 -46 1 -32 0.1 0.5 -30 -41 42 -29 2 -32 0.3 0.6 46 15 45 19 2 -32 0.6 0.1 -13 -34 13 -13 2 -32 0.3 0.4 34 -25 33 -31 2 -32 0.9 0.8 9 -16 36 30 2 -32 0.1 0.9 27 -23 7 -29 2 -32 0.4 0.3 50 47 34 23 2 -32 0.1 0.6 -18 -22 1 -47 2 -32 0.5 0.8 13 -35 -1 -15 2 -32 0.9 0.6 39 -33 -6 -44 1 -32 0.3 0.3 39 -36 42 5 2 -32 0.5 0.6 39 -48 45 -2 2 -32 0.3 0.1 -33 -48 45 -40 1 -32 0.6 0.7 23 -14 33 -36 2 -32 0.1 0.2 48 -41 31 14 2 -32 0.8 0.2 31 -21 50 -12 1 -32 0.3 0.4 23 -42 12 -14 2 -32 0.1 0.1 24 -47 13 -35 2 -32 0.4 0.6 48 -5 26 -39 1 -32 0.4 0.7 4 -6 -40 -42 1 -32 0.5 0.4 37 18 -1 -18 1 -32 0.6 0.1 28 -38 42 6 2 -32 0.8 0.2 33 -27 40 1 1 -32 0.9 0.8 37 -1 9 -42 1 -32 0.7 0.3 27 -34 31 -31 1 -32 0.1 0.7 20 -9 6 -48 2 -32 0.4 0.6 24 -27 -26 -42 1 -32 0.6 0.1 21 -19 45 36 2 -32 0.3 0.7 48 26 -27 -43 1 -32 0.4 0.7 16 13 4 -37 1 -32 0.5 0.1 50 40 27 -45 1 -32 0.8 0.7 42 -38 14 -20 1 -32 0.6 0.8 1 -44 -1 -42 2 -32 0.8 0.4 -16 -26 27 -35 1 -32 0.3 0.1 46 45 10 -11 1 -32 0.6 0.5 33 -50 39 -10 1 -32 0.7 0.8 29 -43 46 -3 2 -32 0.2 0.9 40 -18 -12 -48 1 -32 0.7 0.9 13 -46 49 20 2 -32 0.6 0.8 -25 -47 38 -24 2 -32 0.7 0.5 35 -10 22 -3 1 -32 0.7 0.1 33 6 -19 -23 1 -32 0.9 0.9 -12 -20 19 -17 2 -32 0.8 0.2 30 -32 21 -37 1 -32 0.7 0.6 12 -36 33 -39 2 -32 0.1 0.2 22 -26 -24 -32 1 -32 0.9 0.9 34 -28 -17 -45 1 -32 0.6 0.3 21 -20 23 -39 1 -32 0.6 0.8 6 -9 5 -37 2 -32 0.5 0.4 0 -2 -4 -6 1 -32 0.2 0.7 26 -31 28 10 2 -32 0.1 0.3 27 6 24 -32 1 -32 0.1 0.1 48 -42 -11 -46 1 -32 0.8 0.8 48 -37 -11 -13 1 -32 0.8 0.7 -19 -20 1 -21 1 -32 0.9 0.4 14 -11 36 25 2 -32 0.2 0.5 34 4 8 -4 1 -32 0.5 0.8 32 -1 6 -38 1 -32 0.3 0.9 31 25 41 -20 2 -32 0.1 0.1 4 -34 46 -50 2 -32 0.3 0.5 14 -11 8 -24 2 -32 0.5 0.7 10 4 48 -38 2 -32 0.8 0.7 35 -13 21 8 1 -32 0.1 0.2 11 -22 37 -27 2 -32 0.6 0.1 8 -40 6 -32 1 -32 0.6 0.9 21 -24 39 -19 2 -32 0.3 0.5 22 -33 41 -19 2 -32 0.8 0.7 21 -40 50 -9 2 -32 0.5 0.3 -7 -41 14 -4 2 -32 0.4 0.7 20 -42 -32 -39 1 -32 0.2 0.3 41 29 -12 -26 1 -32 0.6 0.6 30 -25 47 -17 2 -32 0.3 0.6 41 39 15 -15 1 -32 0.9 0.8 -1 -23 30 10 2 -32 0.2 0.7 29 -40 34 -12 2 -32 0.1 0.1 7 -33 34 -16 2 -32 0.1 0.3 15 -22 7 -14 2 -32 0.9 0.2 38 -16 -17 -31 1 -32 0.8 0.7 29 -7 30 -6 1 -32 0.8 0.4 19 6 18 4 1 -32 0.7 0.3 -4 -38 22 -28 2 -32 0.1 0.3 49 7 23 -39 1 -32 0.2 0.7 31 1 -21 -44 1 -32 0.4 0.9 48 10 38 19 2 -32 0.3 0.7 33 30 -4 -50 1 -32 0.3 0.5 42 -5 -22 -31 1 -32 0.6 0.7 50 -17 -38 -45 1 -32 0.7 0.3 27 -47 40 36 2 -32 0.4 0.4 28 -15 30 -41 1 -32 0.7 0.9 3 2 12 -15 2 -32 0.3 0.8 17 -35 -2 -40 2 -32 0.8 0.3 44 -23 45 3 1 -32 0.6 0.9 40 11 44 43 2 -32 0.9 0.9 31 28 45 3 2 -32 0.5 0.5 -14 -48 12 -16 2 -32 0.6 0.7 18 4 13 5 1 -32 0.8 0.7 41 18 28 -32 1 -32 0.3 0.6 -8 -28 0 -17 2 -32 0.9 0.6 48 -26 20 -26 1 -32 0.6 0.9 21 -16 16 -27 1 -32 0.5 0.9 26 -29 40 39 2 -32 0.3 0.9 36 -44 12 -12 2 -32 0.2 0.4 40 -1 19 10 2 -32 0.5 0.7 45 -38 44 -21 2 -32 0.5 0.4 39 -10 -3 -38 1 -32 0.5 0.6 -16 -29 29 -27 2 -32 0.4 0.3 47 -11 19 -8 1 -32 0.6 0.2 18 -29 7 -26 1 -32 0.5 0.3 36 -19 7 -17 1 -32 0.3 0.5 34 26 -28 -29 1 -32 0.6 0.8 20 -36 40 25 2 -32 0.8 0.8 -27 -37 24 17 2 -32 0.2 0.5 40 3 50 22 2 -32 0.1 0.2 24 -39 -39 -41 1 -32 0.8 0.6 -6 -40 14 3 2 -32 0.1 0.8 32 11 40 8 2 -32 0.6 0.5 9 -20 47 -4 2 -32 0.5 0.6 44 -47 -30 -42 1 -32 0.8 0.5 -21 -35 1 -23 2 -32 0.4 0.9 1 -44 3 -15 2 -32 0.5 0.5 -7 -30 10 -42 2 -32 0.6 0.6 -2 -30 32 -29 2 -32 0.3 0.1 -8 -40 17 -9 2 -32 0.4 0.2 13 -21 22 -10 1 -32 0.7 0.7 -1 -48 -39 -42 2 -32 0.3 0.5 8 -14 35 -4 2 -32 0.2 0.2 25 17 12 -17 1 -32 0.8 0.2 41 31 -4 -49 1 -32 0.7 0.1 3 -3 31 -2 2 -32 0.1 0.1 49 -19 45 39 2 -32 0.2 0.4 -1 -45 -15 -38 2 -32 0.9 0.6 -27 -45 -27 -38 2 -32 0.7 0.4 20 -45 -17 -29 1 -32 0.7 0.5 49 -36 39 18 2 -32 0.9 0.5 45 16 33 -36 1 -32 0.3 0.7 47 41 25 -30 1 -32 0.5 0.8 -13 -43 4 -29 2 -32 0.1 0.3 -8 -11 -25 -42 1 -32 0.4 0.2 36 -32 -16 -20 1 -32 0.7 0.5 -31 -40 40 1 2 -32 0.8 0.8 26 -9 -12 -22 1 -32 0.2 0.6 28 -46 -25 -29 1 -32 0.8 0.2 36 1 -6 -34 1 -32 0.6 0.9 42 -39 48 -19 2 -32 0.8 0.3 -10 -49 2 -43 2 -32 0.8 0.2 16 -30 12 -24 1 -32 0.9 0.8 0 -2 -16 -43 1 -32 0.1 0.5 3 -21 -43 -49 1 -32 0.7 0.5 36 26 47 -50 1 -32 0.1 0.1 26 -29 -20 -24 1 -32 0.7 0.3 27 -44 12 1 1 -32 0.7 0.9 -8 -45 36 -43 2 -32 0.1 0.6 25 2 -7 -26 1 -32 0.8 0.1 46 -9 27 -35 1 -32 0.9 0.4 -1 -4 -15 -37 2 -32 0.2 0.6 22 -31 -13 -49 1 -32 0.5 0.1 42 -38 -26 -30 1 -32 0.7 0.3 48 9 1 -25 1 -32 0.6 0.4 22 4 28 -4 1 -32 0.6 0.2 23 15 2 -28 1 -32 0.5 0.1 19 9 46 16 1 -32 0.1 0.1 7 1 37 -37 1 -32 0.3 0.4 39 -13 25 4 2 -32 0.2 0.3 35 26 6 -21 1 -32 0.8 0.8 38 -32 42 -30 2 -32 0.9 0.3 26 -16 -28 -43 1 -32 0.1 0.3 22 -10 -39 -43 1 -32 0.9 0.4 -4 -26 39 34 2 -32 0.4 0.8 18 -41 25 -46 2 -32 0.7 0.2 -4 -15 50 -46 1 -32 0.6 0.1 -34 -36 30 -23 2 -32 0.5 0.1 32 -1 7 -35 1 -32 0.7 0.7 8 -37 42 -2 2 -32 0.9 0.7 47 5 19 12 1 -32 0.7 0.8 42 7 10 -34 1 -32 0.9 0.1 34 23 -6 -26 1 -32 0.2 0.5 28 -48 8 -42 2 -32 0.9 0.3 7 -6 33 -33 1 -32 0.3 0.3 -16 -48 42 -17 2 -32 0.3 0.1 37 25 5 -42 1 -32 0.8 0.1 46 39 39 13 1 -32 0.4 0.5 -7 -48 36 -7 2 -32 0.8 0.1 -8 -39 37 13 2 -32 0.1 0.7 -17 -40 45 -23 2 -32 0.1 0.4 37 -8 21 -35 2 -32 0.9 0.3 13 -10 34 -14 2 -32 0.1 0.9 25 10 34 11 1 -32 0.3 0.8 -21 -33 31 -11 2 -32 0.5 0.7 23 4 49 37 2 -32 0.8 0.2 1 -9 25 12 2 -32 0.1 0.1 -1 -50 -26 -37 2 -32 0.2 0.7 5 -21 -4 -34 2 -32 0.6 0.8 -19 -35 23 -6 2 -32 0.1 0.4 45 40 18 -30 1 -32 0.3 0.2 18 -39 4 -1 2 -32 0.9 0.9 23 -11 44 1 2 -32 0.5 0.9 21 -17 10 -41 2 -32 0.3 0.6 34 -47 44 -26 2 -32 0.4 0.1 13 -24 38 29 2 -32 0.1 0.2 42 -1 -37 -49 1 -32 0.9 0.4 27 -8 39 -23 1 -32 0.4 0.5 22 18 13 -12 1 -32 0.5 0.9 37 5 2 -32 1 -32 0.2 0.7 40 19 4 -42 1 -32 0.4 0.9 34 -21 -22 -33 1 -32 0.4 0.3 11 -12 46 38 2 -32 0.3 0.7 39 -11 23 -49 2 -32 0.1 0.2 29 1 14 5 1 -32 0.5 0.9 18 8 27 -48 2 -32 0.9 0.8 25 8 27 4 1 -32 0.2 0.5 28 -4 37 8 2 -32 0.7 0.8 36 10 16 -28 1 -32 0.6 0.1 13 -44 46 38 2 -32 0.8 0.6 -11 -39 12 -24 2 -32 0.7 0.5 10 7 26 18 2 -32 0.7 0.9 -8 -13 23 -23 2 -32 0.1 0.8 23 -2 2 -33 1 -32 0.6 0.4 38 -13 -4 -11 1 -32 0.3 0.4 43 -34 25 -49 2 -32 0.8 0.2 9 -17 -1 -46 1 -32 0.6 0.4 34 -47 12 -15 2 -32 0.1 0.8 -33 -46 -1 -13 2 -32 0.6 0.8 -9 -29 45 -7 2 -32 0.9 0.5 37 -49 42 -18 1 -32 0.9 0.2 40 -32 33 3 1 -32 0.2 0.9 13 -43 5 -35 2 -33 0.8 0.3 3 -33 0 -34 1 -33 0.6 0.4 -12 -42 -8 -18 2 -33 0.6 0.5 13 -40 13 -26 1 -33 0.2 0.4 -20 -35 -7 -14 2 -33 0.7 0.9 32 -11 42 38 2 -33 0.2 0.1 -5 -33 13 -45 1 -33 0.6 0.3 28 -48 -46 -50 1 -33 0.1 0.5 26 -2 48 41 2 -33 0.2 0.9 33 -43 32 -34 2 -33 0.3 0.9 50 -4 41 -7 2 -33 0.7 0.4 -12 -29 0 -22 2 -33 0.7 0.7 38 34 1 -47 1 -33 0.4 0.5 27 -15 21 -11 2 -33 0.5 0.6 12 -39 -3 -11 2 -33 0.3 0.9 36 -34 41 24 2 -33 0.1 0.6 31 -2 21 9 2 -33 0.9 0.3 39 16 -12 -28 1 -33 0.6 0.4 40 -34 8 -16 1 -33 0.5 0.8 16 -37 3 0 2 -33 0.6 0.9 -16 -24 11 -20 2 -33 0.7 0.2 20 -14 21 -31 1 -33 0.2 0.6 -2 -49 -22 -49 2 -33 0.5 0.8 34 31 -2 -22 1 -33 0.4 0.9 32 25 15 -34 1 -33 0.9 0.7 14 -10 37 -23 2 -33 0.1 0.7 14 -24 -31 -43 1 -33 0.9 0.1 -5 -40 39 18 2 -33 0.8 0.4 31 2 -20 -45 1 -33 0.2 0.1 36 -6 -5 -26 1 -33 0.4 0.9 16 -48 -14 -36 1 -33 0.5 0.7 47 -17 -5 -10 1 -33 0.7 0.7 49 -40 -31 -44 1 -33 0.1 0.9 20 -28 22 -15 2 -33 0.3 0.2 -15 -42 33 -17 2 -33 0.3 0.1 22 -26 -8 -24 1 -33 0.4 0.3 42 -19 45 44 2 -33 0.9 0.7 42 -21 40 2 1 -33 0.8 0.1 9 -3 -6 -8 1 -33 0.9 0.2 38 10 -9 -18 1 -33 0.8 0.3 42 27 -13 -49 1 -33 0.6 0.2 43 -28 23 3 1 -33 0.9 0.2 -24 -31 35 -35 2 -33 0.9 0.6 -12 -49 4 -48 2 -33 0.6 0.1 28 1 19 -48 1 -33 0.8 0.2 6 -24 50 -11 2 -33 0.1 0.9 36 28 49 23 2 -33 0.4 0.2 49 -2 2 -44 1 -33 0.4 0.1 40 -14 45 28 2 -33 0.7 0.9 32 -2 45 2 2 -33 0.6 0.1 34 -39 49 32 2 -33 0.5 0.5 -29 -42 -23 -46 1 -33 0.5 0.9 7 -8 3 -3 2 -33 0.4 0.9 29 -33 43 -27 2 -33 0.5 0.7 17 -46 27 -1 2 -33 0.6 0.6 47 -17 -25 -34 1 -33 0.4 0.6 41 1 46 -42 1 -33 0.4 0.3 17 -23 27 -49 1 -33 0.3 0.8 11 -21 29 -10 2 -33 0.9 0.9 43 -48 1 -4 1 -33 0.5 0.6 -27 -41 48 43 2 -33 0.6 0.4 26 -37 -23 -31 1 -33 0.7 0.6 38 0 -16 -31 1 -33 0.9 0.8 32 -48 20 -46 1 -33 0.3 0.2 40 -48 6 -6 2 -33 0.3 0.7 -5 -34 42 31 2 -33 0.7 0.4 25 -21 19 11 2 -33 0.9 0.9 38 32 21 -3 1 -33 0.8 0.6 40 -27 29 13 2 -33 0.8 0.2 43 -19 44 -32 1 -33 0.6 0.8 5 -23 18 7 2 -33 0.4 0.5 -25 -32 33 -38 2 -33 0.7 0.4 25 -31 20 -36 1 -33 0.4 0.9 29 -25 41 3 2 -33 0.9 0.7 -20 -34 46 29 2 -33 0.4 0.4 44 -30 34 22 2 -33 0.8 0.2 32 -49 19 14 2 -33 0.9 0.5 -2 -32 -4 -44 1 -33 0.6 0.8 19 -40 34 0 2 -33 0.4 0.3 -4 -10 -5 -36 1 -33 0.2 0.2 43 5 18 12 1 -33 0.8 0.3 17 -8 13 -1 1 -33 0.9 0.4 12 -22 34 -29 1 -33 0.5 0.4 5 -13 37 -36 2 -33 0.3 0.3 44 34 32 -49 1 -33 0.7 0.9 9 7 30 -42 2 -33 0.7 0.6 37 -42 37 -38 1 -33 0.3 0.4 35 -15 41 -41 1 -33 0.7 0.8 50 -24 39 -23 1 -33 0.6 0.6 38 -23 -1 -3 1 -33 0.8 0.2 -27 -30 -6 -25 2 -33 0.7 0.2 18 11 0 -11 1 -33 0.7 0.1 20 -11 50 24 2 -33 0.1 0.1 38 -47 26 -41 2 -33 0.5 0.5 -32 -44 22 20 2 -33 0.1 0.4 -49 -50 -39 -47 2 -33 0.3 0.4 10 -47 48 -18 2 -33 0.1 0.5 -2 -16 41 2 2 -33 0.9 0.2 39 36 32 -22 1 -33 0.7 0.9 40 -6 46 -33 2 -33 0.2 0.2 46 -20 43 35 2 -33 0.6 0.4 48 6 47 14 1 -33 0.4 0.2 50 -29 6 -27 1 -33 0.1 0.1 40 -32 31 -20 2 -33 0.5 0.9 21 13 -28 -43 1 -33 0.7 0.1 34 -19 46 -11 1 -33 0.8 0.5 47 38 -14 -32 1 -33 0.7 0.3 34 32 29 -46 1 -33 0.3 0.6 22 -7 -1 -41 1 -33 0.8 0.6 45 40 -23 -42 1 -33 0.8 0.9 45 34 48 1 1 -33 0.9 0.4 13 -35 44 7 2 -33 0.7 0.5 49 10 10 5 1 -33 0.4 0.3 41 -45 21 -38 1 -33 0.5 0.9 38 22 -17 -35 1 -33 0.6 0.1 -22 -33 -7 -22 2 -33 0.4 0.8 -27 -37 40 -8 2 -33 0.2 0.3 -4 -16 11 -25 1 -33 0.7 0.2 29 8 19 -31 1 -33 0.7 0.9 50 -6 13 -48 1 -33 0.2 0.6 37 -4 27 -21 2 -33 0.8 0.2 -26 -28 36 18 2 -33 0.7 0.8 -4 -17 13 -24 2 -33 0.2 0.6 24 -2 36 5 2 -33 0.1 0.1 37 -11 47 -22 1 -33 0.7 0.9 -23 -48 47 31 2 -33 0.4 0.7 -20 -21 27 -42 2 -33 0.8 0.4 45 -26 37 -38 1 -33 0.8 0.2 -1 -25 34 31 2 -33 0.1 0.1 24 -24 -27 -44 1 -33 0.7 0.8 -11 -27 44 41 2 -33 0.4 0.3 39 -1 43 23 2 -33 0.1 0.1 24 -8 -18 -29 1 -33 0.6 0.6 5 -41 -28 -49 1 -33 0.1 0.6 6 -42 46 22 2 -33 0.5 0.5 -32 -43 5 -1 2 -33 0.4 0.3 1 -33 -14 -31 1 -33 0.1 0.4 -21 -45 35 -13 2 -33 0.3 0.3 -30 -39 48 41 2 -33 0.7 0.2 34 -16 48 2 1 -33 0.4 0.1 37 -11 42 -37 1 -33 0.3 0.7 29 -9 -30 -31 1 -33 0.7 0.1 17 6 31 12 1 -33 0.7 0.2 44 -14 -43 -50 1 -33 0.9 0.7 -15 -25 36 -4 2 -33 0.3 0.6 33 22 18 16 1 -33 0.1 0.9 -12 -35 32 9 2 -33 0.6 0.1 13 -25 43 -2 2 -33 0.6 0.8 48 -40 8 -17 1 -33 0.1 0.7 14 1 35 18 2 -33 0.2 0.3 -30 -45 9 5 2 -33 0.7 0.5 8 -24 48 15 2 -33 0.4 0.1 -10 -13 17 -49 1 -33 0.7 0.4 11 -7 21 -34 1 -33 0.2 0.9 16 -33 39 8 2 -33 0.1 0.6 20 -27 43 -48 2 -33 0.5 0.5 34 11 -18 -30 1 -33 0.9 0.6 9 -24 49 -15 2 -33 0.1 0.7 28 -9 45 -11 2 -33 0.8 0.5 43 -41 -26 -40 1 -33 0.1 0.7 25 -25 42 14 2 -33 0.8 0.9 10 5 36 31 2 -33 0.5 0.9 24 -29 -44 -50 1 -33 0.5 0.2 30 -19 16 -19 1 -33 0.6 0.6 32 10 -2 -25 1 -33 0.6 0.5 6 -28 32 -20 2 -33 0.6 0.9 -5 -15 34 23 2 -33 0.6 0.9 21 -40 2 -30 1 -33 0.2 0.2 12 -31 -1 -5 2 -33 0.4 0.3 42 -4 -5 -30 1 -33 0.5 0.2 20 -23 -6 -13 1 -33 0.7 0.2 46 40 -4 -5 1 -33 0.7 0.8 23 15 18 11 1 -33 0.8 0.7 11 -50 7 -14 2 -33 0.6 0.5 -20 -39 32 1 2 -33 0.2 0.7 43 -35 14 -6 2 -33 0.4 0.2 28 1 20 -50 1 -33 0.8 0.9 38 -20 42 4 2 -33 0.9 0.3 41 -30 27 14 1 -33 0.7 0.5 -22 -34 -45 -46 1 -33 0.4 0.8 44 -24 11 -41 1 -33 0.9 0.9 37 1 9 -16 1 -33 0.8 0.9 -5 -16 1 -44 2 -33 0.2 0.4 30 -3 37 -31 1 -33 0.2 0.3 14 -43 6 -28 2 -33 0.8 0.8 7 -43 27 0 2 -33 0.5 0.9 5 -6 45 30 2 -33 0.8 0.5 -10 -45 15 3 2 -33 0.5 0.6 30 -2 34 3 2 -33 0.5 0.9 37 -44 21 19 2 -33 0.2 0.9 30 -45 34 -6 2 -33 0.7 0.6 32 -38 -10 -37 1 -33 0.5 0.4 -11 -26 -19 -49 1 -33 0.1 0.1 20 -40 34 -30 2 -33 0.7 0.1 35 11 6 -35 1 -33 0.9 0.7 18 -36 -7 -23 1 -33 0.6 0.2 30 18 25 -28 1 -33 0.8 0.3 -28 -32 45 -44 2 -33 0.6 0.9 -32 -39 40 -39 2 -33 0.6 0.1 43 5 -38 -43 1 -33 0.5 0.4 42 33 -38 -40 1 -33 0.7 0.9 -5 -10 38 -39 2 -33 0.5 0.6 22 -7 32 16 2 -33 0.1 0.4 41 34 9 -15 1 -33 0.1 0.7 29 -28 31 -48 2 -33 0.8 0.7 27 -26 31 -19 1 -33 0.5 0.4 19 15 50 -31 1 -33 0.5 0.4 -14 -35 9 -31 2 -33 0.3 0.7 41 -47 -26 -34 1 -33 0.5 0.3 48 29 39 -24 1 -33 0.9 0.1 34 26 19 -30 1 -33 0.8 0.5 49 -31 43 25 2 -33 0.8 0.1 34 33 -17 -21 1 -33 0.9 0.9 22 -48 48 -22 2 -33 0.3 0.1 21 14 15 -20 1 -33 0.6 0.5 -16 -46 17 -12 2 -33 0.3 0.2 -41 -49 -7 -40 2 -33 0.9 0.8 24 -10 17 -50 1 -33 0.9 0.5 50 -37 -27 -31 1 -33 0.3 0.8 -26 -29 -22 -44 2 -33 0.3 0.2 41 -19 33 -16 1 -33 0.6 0.1 42 -5 -23 -38 1 -33 0.2 0.9 -28 -50 28 2 2 -33 0.2 0.6 25 -8 -9 -32 1 -33 0.9 0.6 16 -43 14 -29 1 -33 0.1 0.7 28 2 46 39 2 -33 0.3 0.9 22 10 -9 -43 1 -33 0.5 0.1 35 15 23 12 1 -33 0.6 0.5 48 -44 -15 -43 1 -33 0.9 0.6 44 10 -30 -38 1 -33 0.1 0.6 39 9 48 -8 2 -33 0.9 0.1 -27 -30 32 11 2 -33 0.9 0.5 -6 -11 40 -29 2 -33 0.6 0.6 -23 -42 41 16 2 -33 0.3 0.7 -19 -38 -9 -25 2 -33 0.8 0.7 -4 -24 14 0 2 -33 0.1 0.1 12 1 11 -4 1 -33 0.8 0.4 -18 -30 29 -50 1 -33 0.7 0.5 -4 -50 32 -8 2 -33 0.8 0.5 21 5 27 2 1 -33 0.2 0.7 33 2 28 -39 2 -33 0.7 0.8 10 -25 12 -49 2 -33 0.3 0.6 36 -38 22 3 2 -33 0.8 0.2 48 28 39 9 1 -33 0.9 0.4 19 -11 34 -45 1 -33 0.1 0.6 -19 -48 9 -22 2 -33 0.3 0.2 13 -13 44 -30 1 -33 0.9 0.3 20 4 -15 -50 1 -33 0.2 0.1 10 9 24 8 1 -33 0.7 0.5 -17 -34 33 20 2 -33 0.4 0.2 -29 -32 -16 -45 1 -33 0.6 0.9 -11 -32 25 -43 2 -33 0.9 0.8 5 -28 33 -22 2 -33 0.7 0.9 50 -37 45 -24 2 -33 0.3 0.1 -10 -38 6 -11 2 -33 0.8 0.6 10 -34 50 -18 2 -33 0.6 0.7 24 -47 -16 -17 1 -33 0.8 0.4 36 24 5 -38 1 -33 0.3 0.3 -23 -31 -29 -34 1 -33 0.6 0.9 10 -10 22 9 2 -33 0.3 0.6 32 16 48 23 2 -33 0.5 0.6 42 -50 40 -18 2 -33 0.5 0.3 47 30 46 -9 1 -33 0.3 0.3 -40 -45 40 -21 2 -33 0.8 0.2 29 -43 39 37 2 -33 0.1 0.9 1 -33 46 -35 2 -33 0.2 0.9 -9 -28 -20 -35 1 -33 0.1 0.5 -2 -3 28 -41 2 -33 0.5 0.9 -3 -14 28 18 2 -33 0.5 0.3 -4 -37 26 -37 2 -33 0.3 0.5 -26 -29 -3 -31 2 -33 0.7 0.1 1 -31 19 -33 1 -33 0.9 0.2 0 -24 10 -27 1 -33 0.4 0.2 2 -26 10 -43 1 -33 0.1 0.5 37 -33 -6 -19 2 -33 0.2 0.7 37 -50 7 -26 2 -33 0.7 0.6 36 33 13 -7 1 -33 0.7 0.3 6 -10 -6 -29 1 -33 0.6 0.2 36 -39 0 -20 1 -33 0.6 0.4 -27 -37 -20 -49 1 -33 0.4 0.9 48 -5 -45 -46 1 -33 0.5 0.4 48 21 35 -33 1 -33 0.8 0.3 -4 -18 13 -16 2 -33 0.9 0.8 42 19 40 -37 1 -33 0.1 0.3 11 -2 5 -37 1 -33 0.2 0.8 -21 -38 45 39 2 -33 0.8 0.6 -19 -36 21 -10 2 -33 0.5 0.4 41 -32 -23 -40 1 -33 0.2 0.2 25 -46 28 -12 2 -33 0.4 0.2 -12 -48 6 -40 2 -33 0.3 0.1 -26 -38 13 -34 2 -33 0.6 0.4 47 2 -23 -45 1 -33 0.8 0.2 30 2 -7 -12 1 -33 0.1 0.1 2 -49 -35 -45 1 -33 0.7 0.2 40 9 -29 -32 1 -33 0.4 0.8 24 -47 13 -26 2 -33 0.3 0.4 48 1 17 -22 1 -33 0.1 0.9 -11 -24 29 24 2 -33 0.8 0.8 21 -35 -26 -46 1 -33 0.9 0.7 38 28 -9 -28 1 -33 0.7 0.3 -1 -13 -3 -41 1 -34 0.5 0.4 30 -27 14 -1 2 -34 0.8 0.3 -3 -41 21 10 2 -34 0.6 0.7 35 -36 19 -11 1 -34 0.3 0.6 -14 -50 34 -21 2 -34 0.3 0.8 -12 -38 47 -10 2 -34 0.7 0.2 40 -9 34 -44 1 -34 0.4 0.1 -17 -38 28 26 2 -34 0.1 0.9 -16 -39 11 4 2 -34 0.5 0.3 -45 -49 -26 -29 2 -34 0.1 0.3 1 -35 21 -6 2 -34 0.5 0.3 35 18 30 -34 1 -34 0.7 0.5 -5 -30 29 25 2 -34 0.7 0.6 23 -2 16 -28 1 -34 0.3 0.1 -8 -25 24 20 2 -34 0.7 0.6 36 -33 -5 -14 1 -34 0.6 0.6 21 -48 -1 -41 1 -34 0.6 0.2 -7 -44 11 -29 1 -34 0.4 0.6 49 46 23 -5 1 -34 0.2 0.2 34 26 26 21 1 -34 0.3 0.6 41 39 16 10 1 -34 0.5 0.5 18 -30 -16 -35 1 -34 0.8 0.8 49 -48 19 -1 1 -34 0.8 0.7 19 -9 46 -43 2 -34 0.5 0.4 -14 -20 -28 -36 1 -34 0.5 0.8 -10 -49 26 -39 2 -34 0.2 0.6 18 -36 -19 -46 1 -34 0.6 0.2 -7 -38 10 -21 2 -34 0.9 0.4 24 -13 42 40 2 -34 0.8 0.2 24 22 28 -31 1 -34 0.9 0.3 -8 -19 22 -43 1 -34 0.4 0.9 -10 -45 47 23 2 -34 0.9 0.1 37 -4 1 -29 1 -34 0.7 0.8 36 16 44 -23 1 -34 0.4 0.8 19 18 -32 -50 1 -34 0.7 0.1 14 9 29 -1 1 -34 0.8 0.4 -10 -22 14 -22 2 -34 0.3 0.5 -2 -5 -15 -48 1 -34 0.1 0.7 45 15 18 1 1 -34 0.1 0.1 10 -24 45 -43 1 -34 0.9 0.8 19 -44 17 14 2 -34 0.8 0.1 43 -45 48 25 2 -34 0.3 0.9 31 13 45 40 2 -34 0.8 0.4 24 -29 -24 -37 1 -34 0.4 0.8 5 -42 34 10 2 -34 0.3 0.1 31 26 22 -32 1 -34 0.5 0.3 22 -47 -9 -13 2 -34 0.5 0.9 35 -41 3 -16 1 -34 0.7 0.6 20 -4 46 41 2 -34 0.4 0.4 41 -34 27 13 2 -34 0.8 0.9 20 17 28 15 2 -34 0.3 0.3 33 -50 39 -25 2 -34 0.3 0.5 28 -31 48 42 2 -34 0.4 0.5 44 6 -4 -50 1 -34 0.2 0.4 44 -2 16 11 2 -34 0.8 0.1 18 -12 -5 -43 1 -34 0.9 0.1 41 -40 25 -13 1 -34 0.1 0.3 7 -29 32 15 2 -34 0.4 0.6 -1 -33 17 16 2 -34 0.7 0.1 30 -19 27 18 2 -34 0.5 0.4 44 18 26 14 1 -34 0.3 0.5 29 -42 30 -47 2 -34 0.4 0.3 27 24 4 -40 1 -34 0.9 0.3 26 20 38 28 2 -34 0.3 0.7 31 6 38 35 2 -34 0.4 0.8 35 -37 29 6 2 -34 0.4 0.1 20 5 5 -18 1 -34 0.2 0.7 45 31 -30 -45 1 -34 0.4 0.3 47 -10 -15 -50 1 -34 0.4 0.3 -38 -48 5 -20 2 -34 0.3 0.9 34 -37 31 -46 2 -34 0.2 0.1 -34 -42 6 -8 2 -34 0.9 0.2 37 -43 -21 -48 1 -34 0.6 0.7 6 0 -6 -19 1 -34 0.1 0.9 7 5 19 -18 2 -34 0.2 0.8 -22 -23 37 36 2 -34 0.7 0.5 10 -12 14 -49 1 -34 0.4 0.8 28 12 9 4 1 -34 0.2 0.6 13 4 18 -43 1 -34 0.1 0.5 -8 -38 30 -39 2 -34 0.8 0.4 -1 -16 23 10 2 -34 0.6 0.7 46 -33 15 -2 1 -34 0.6 0.5 50 33 -12 -14 1 -34 0.1 0.5 34 -25 2 -50 1 -34 0.7 0.6 33 14 8 7 1 -34 0.3 0.7 39 -21 29 -36 2 -34 0.3 0.6 41 18 20 -29 1 -34 0.2 0.1 28 -2 5 -40 1 -34 0.6 0.9 43 10 12 -32 1 -34 0.2 0.7 43 -46 -7 -45 2 -34 0.6 0.5 26 -38 23 -42 1 -34 0.8 0.7 -13 -31 40 15 2 -34 0.4 0.3 -32 -35 50 -6 2 -34 0.4 0.2 40 -2 -40 -42 1 -34 0.4 0.7 27 -2 12 10 2 -34 0.7 0.1 -11 -25 37 -40 1 -34 0.5 0.9 49 -47 -43 -46 1 -34 0.7 0.3 11 -44 44 2 2 -34 0.7 0.9 -2 -23 42 -21 2 -34 0.8 0.6 36 -43 -14 -22 1 -34 0.4 0.2 28 27 25 -8 1 -34 0.7 0.9 -14 -34 31 -22 2 -34 0.3 0.6 5 -15 -20 -50 1 -34 0.7 0.4 26 0 50 7 1 -34 0.2 0.7 2 -27 46 -20 2 -34 0.6 0.7 1 -28 14 12 2 -34 0.4 0.3 -8 -9 34 -20 1 -34 0.3 0.4 49 48 27 -21 1 -34 0.4 0.5 48 -35 36 24 2 -34 0.8 0.7 9 -24 26 22 2 -34 0.2 0.5 38 -20 -11 -41 1 -34 0.5 0.1 16 -15 10 -8 1 -34 0.7 0.1 10 -5 50 -32 1 -34 0.8 0.4 -26 -44 29 -47 2 -34 0.7 0.8 42 0 48 9 2 -34 0.2 0.3 -21 -33 46 -38 2 -34 0.8 0.7 39 18 -4 -48 1 -34 0.9 0.4 -14 -41 -32 -48 1 -34 0.1 0.5 36 22 -34 -39 1 -34 0.9 0.9 50 -3 -16 -39 1 -34 0.1 0.1 -39 -49 -15 -32 2 -34 0.1 0.2 -17 -28 -28 -30 1 -34 0.8 0.6 22 -48 13 5 2 -34 0.6 0.9 8 -6 47 11 2 -34 0.4 0.9 -7 -10 -5 -17 1 -34 0.7 0.8 -10 -19 5 -4 2 -34 0.9 0.4 26 -17 -34 -42 1 -34 0.4 0.7 36 14 39 -49 1 -34 0.2 0.3 19 11 32 22 2 -34 0.7 0.8 34 -20 48 -3 2 -34 0.5 0.2 22 -17 -8 -49 1 -34 0.3 0.5 45 -25 26 -2 2 -34 0.4 0.4 -28 -46 35 -50 2 -34 0.7 0.1 -11 -29 22 -44 1 -34 0.4 0.9 10 -15 32 -22 2 -34 0.3 0.6 -3 -20 -23 -31 1 -34 0.5 0.6 36 15 22 6 1 -34 0.5 0.7 -12 -23 -9 -25 2 -34 0.2 0.4 42 25 -14 -37 1 -34 0.2 0.6 22 3 36 -29 2 -34 0.3 0.8 2 -15 21 19 2 -34 0.6 0.5 13 -13 35 -28 2 -34 0.7 0.9 32 28 -3 -10 1 -34 0.5 0.4 -36 -42 44 32 2 -34 0.6 0.4 -20 -31 39 -34 2 -34 0.4 0.2 15 -45 32 16 2 -34 0.4 0.2 46 43 40 20 1 -34 0.5 0.8 34 -9 -32 -37 1 -34 0.4 0.1 -19 -33 -12 -36 1 -34 0.7 0.1 12 -44 18 12 2 -34 0.6 0.3 -20 -45 -11 -19 2 -34 0.5 0.4 43 -35 31 -10 1 -34 0.5 0.1 -6 -35 -12 -24 1 -34 0.6 0.1 44 -18 31 5 2 -34 0.3 0.2 -4 -44 -8 -31 2 -34 0.5 0.3 27 -37 10 8 2 -34 0.5 0.8 21 -19 -1 -12 1 -34 0.7 0.3 8 5 48 12 2 -34 0.6 0.9 46 44 32 15 1 -34 0.1 0.2 -41 -44 41 37 2 -34 0.7 0.4 13 -18 17 -41 1 -34 0.3 0.9 -9 -44 23 7 2 -34 0.6 0.9 -23 -34 26 12 2 -34 0.9 0.1 5 -50 4 -17 1 -34 0.1 0.2 48 37 3 -5 1 -34 0.9 0.8 37 5 -1 -50 1 -34 0.4 0.3 48 -48 -26 -38 1 -34 0.5 0.8 43 -21 -20 -25 1 -34 0.3 0.5 45 40 35 4 1 -34 0.1 0.1 20 -15 -18 -32 1 -34 0.3 0.3 32 -43 12 -14 2 -34 0.6 0.8 26 -9 36 -34 2 -34 0.8 0.6 3 -14 9 -5 2 -34 0.8 0.3 42 -21 39 -5 1 -34 0.6 0.8 30 29 -2 -13 1 -34 0.3 0.3 38 30 14 -21 1 -34 0.5 0.4 6 -31 23 0 2 -34 0.7 0.6 18 -32 36 17 2 -34 0.3 0.7 19 14 47 6 2 -34 0.5 0.6 1 -15 39 -44 2 -34 0.8 0.1 -28 -40 48 38 2 -34 0.8 0.7 5 -24 -17 -35 1 -34 0.7 0.5 15 -33 3 -22 1 -34 0.8 0.5 -6 -48 -20 -36 1 -34 0.2 0.4 44 -7 48 -47 1 -34 0.2 0.6 -13 -44 40 25 2 -34 0.5 0.7 -16 -39 40 -27 2 -34 0.4 0.2 -13 -30 -24 -44 1 -34 0.8 0.9 15 -19 39 -20 2 -34 0.2 0.5 0 -9 33 -7 2 -34 0.4 0.7 21 -40 37 -23 2 -34 0.9 0.4 31 -1 17 -7 1 -34 0.6 0.6 -15 -20 -26 -32 1 -34 0.1 0.1 -18 -49 30 0 2 -34 0.1 0.6 -35 -48 35 -42 2 -34 0.3 0.2 38 -46 18 -3 2 -34 0.5 0.2 -34 -46 37 -2 2 -34 0.8 0.8 25 -3 13 -8 1 -34 0.2 0.1 13 -16 23 -8 2 -34 0.8 0.9 38 -32 -6 -31 1 -34 0.5 0.8 22 -23 32 -2 2 -34 0.5 0.1 41 35 -18 -34 1 -34 0.1 0.4 37 -33 24 7 2 -34 0.4 0.1 33 -36 18 -22 1 -34 0.3 0.1 -21 -29 -35 -49 1 -34 0.1 0.5 13 -40 8 -27 2 -34 0.1 0.7 -14 -41 -10 -16 2 -34 0.3 0.9 31 -7 30 10 2 -34 0.3 0.6 -15 -31 48 -12 2 -34 0.6 0.2 30 17 43 -13 1 -34 0.7 0.4 50 -31 -3 -25 1 -34 0.3 0.4 28 12 27 -12 1 -34 0.2 0.4 26 -24 6 -40 1 -34 0.6 0.4 47 -23 49 20 2 -34 0.2 0.5 2 -37 45 -29 2 -34 0.8 0.3 17 -15 -8 -46 1 -34 0.4 0.3 41 -38 43 -43 1 -34 0.7 0.9 36 6 -15 -17 1 -34 0.9 0.2 8 -29 18 -22 1 -34 0.9 0.7 46 8 6 -49 1 -34 0.2 0.2 4 3 45 31 2 -34 0.9 0.4 -36 -46 48 -48 2 -34 0.2 0.2 -1 -3 18 -41 1 -34 0.8 0.5 39 -33 0 -38 1 -34 0.8 0.8 33 20 27 26 1 -34 0.3 0.6 24 14 8 1 1 -34 0.5 0.3 -7 -48 42 -3 2 -34 0.9 0.4 37 34 36 30 1 -34 0.5 0.7 -19 -25 3 -32 2 -34 0.6 0.3 26 17 32 -38 1 -34 0.1 0.4 48 15 47 -6 1 -34 0.9 0.2 32 -28 8 -46 1 -34 0.6 0.6 -19 -28 38 -15 2 -34 0.4 0.5 1 -31 -4 -19 2 -34 0.4 0.3 18 -49 50 -44 2 -34 0.5 0.5 1 -39 -21 -32 1 -34 0.2 0.5 48 32 9 -1 1 -34 0.3 0.9 20 -47 -25 -29 1 -34 0.1 0.5 43 -26 -38 -41 1 -34 0.9 0.3 31 -17 24 -11 1 -34 0.1 0.3 -23 -27 44 -18 2 -34 0.1 0.5 46 -5 25 5 2 -34 0.5 0.3 49 28 -10 -38 1 -34 0.8 0.3 -5 -32 32 -50 2 -34 0.9 0.4 22 19 37 11 1 -34 0.2 0.6 6 -9 42 40 2 -34 0.4 0.5 8 -16 -8 -11 1 -34 0.3 0.4 47 -32 3 -26 1 -34 0.4 0.3 46 -2 -4 -24 1 -34 0.4 0.6 43 35 7 -5 1 -34 0.5 0.3 8 -6 39 -5 2 -34 0.2 0.3 33 19 46 -48 1 -34 0.7 0.1 36 28 12 8 1 -34 0.8 0.6 45 -32 -31 -50 1 -34 0.5 0.1 6 -43 -17 -41 1 -34 0.5 0.8 24 -7 47 -22 2 -34 0.9 0.1 -16 -19 -16 -50 1 -34 0.2 0.8 -11 -34 15 -14 2 -34 0.7 0.2 28 13 42 6 1 -34 0.3 0.5 17 -26 24 6 2 -34 0.5 0.5 -26 -37 -8 -37 2 -34 0.7 0.7 -3 -9 1 -43 2 -34 0.5 0.1 31 -21 -17 -39 1 -34 0.6 0.2 42 -44 23 -28 1 -34 0.5 0.2 3 -25 9 -11 2 -34 0.7 0.8 28 -5 19 -6 2 -34 0.4 0.6 22 -47 26 6 2 -34 0.7 0.2 42 -7 24 23 1 -34 0.9 0.6 42 36 -25 -27 1 -34 0.9 0.4 28 -19 41 -14 1 -34 0.1 0.5 31 -33 2 -23 2 -34 0.3 0.6 18 -45 44 8 2 -34 0.4 0.7 37 22 33 5 1 -34 0.1 0.8 7 -47 -11 -42 2 -34 0.9 0.1 17 -45 33 18 2 -34 0.2 0.1 19 -16 33 -47 1 -34 0.7 0.8 1 -42 17 10 2 -34 0.4 0.4 -15 -26 8 -14 2 -34 0.7 0.3 36 -29 13 -43 1 -34 0.2 0.5 38 19 34 -5 2 -34 0.9 0.8 48 -4 11 -17 1 -34 0.8 0.3 -1 -36 43 31 2 -34 0.8 0.3 9 -2 8 -50 1 -34 0.5 0.5 50 -29 39 5 2 -34 0.8 0.9 25 15 5 -35 1 -34 0.7 0.9 35 -20 -2 -24 1 -34 0.7 0.4 30 -40 48 -15 1 -34 0.6 0.4 8 -44 36 -10 2 -34 0.4 0.3 17 -9 26 1 2 -34 0.5 0.3 -22 -25 48 30 2 -34 0.6 0.7 1 -39 28 -43 2 -34 0.6 0.2 36 -11 31 -15 1 -34 0.4 0.5 38 0 -29 -31 1 -34 0.6 0.3 9 -14 11 4 2 -34 0.2 0.5 -34 -50 41 -27 2 -34 0.9 0.7 28 -4 33 11 2 -34 0.8 0.5 -13 -31 49 -26 2 -34 0.1 0.5 45 16 8 -40 1 -34 0.3 0.3 15 -46 9 0 2 -34 0.4 0.7 35 -48 34 2 2 -34 0.8 0.3 33 -42 10 4 1 -35 0.9 0.1 49 -42 26 -3 1 -35 0.3 0.3 9 0 25 -42 1 -35 0.4 0.9 23 -48 37 13 2 -35 0.3 0.4 8 -7 -12 -17 1 -35 0.9 0.7 6 -14 6 -35 2 -35 0.9 0.6 18 -7 11 -14 1 -35 0.8 0.9 -19 -37 28 -34 2 -35 0.1 0.1 5 -11 41 33 2 -35 0.6 0.2 45 26 -17 -28 1 -35 0.8 0.1 4 -10 43 8 2 -35 0.6 0.8 18 -45 -15 -34 1 -35 0.3 0.2 33 -32 15 -19 1 -35 0.4 0.1 47 28 5 -36 1 -35 0.7 0.1 31 -35 41 31 2 -35 0.2 0.2 6 -4 -28 -49 1 -35 0.1 0.1 -11 -40 31 -9 2 -35 0.3 0.9 26 -49 9 -25 2 -35 0.2 0.3 6 -30 46 13 2 -35 0.9 0.9 13 -2 48 -16 2 -35 0.2 0.8 50 49 36 -13 1 -35 0.1 0.8 11 -45 43 -33 2 -35 0.2 0.7 39 -23 -36 -49 1 -35 0.9 0.6 -20 -21 16 -23 2 -35 0.7 0.2 -8 -18 40 -30 2 -35 0.2 0.5 33 24 23 -31 1 -35 0.7 0.6 13 -3 42 -50 1 -35 0.7 0.6 38 -12 -6 -15 1 -35 0.3 0.5 25 -38 32 5 2 -35 0.2 0.8 35 -44 15 -31 2 -35 0.4 0.1 37 -45 5 -28 1 -35 0.5 0.4 38 -37 -19 -49 1 -35 0.5 0.1 27 18 27 -16 1 -35 0.4 0.1 -27 -33 49 26 2 -35 0.5 0.7 9 -10 50 9 2 -35 0.8 0.8 49 -16 38 19 2 -35 0.4 0.3 -17 -42 2 -43 1 -35 0.4 0.9 -33 -35 18 -40 2 -35 0.5 0.8 -6 -11 39 15 2 -35 0.1 0.5 45 -13 -14 -19 1 -35 0.4 0.7 31 -15 45 -24 2 -35 0.2 0.5 4 -21 50 9 2 -35 0.1 0.9 45 -8 -14 -33 1 -35 0.6 0.9 21 -25 26 -45 2 -35 0.2 0.6 -25 -26 -6 -9 2 -35 0.6 0.6 46 27 12 -8 1 -35 0.9 0.8 40 36 7 5 1 -35 0.9 0.7 23 -10 36 -16 1 -35 0.5 0.4 18 -25 -17 -29 1 -35 0.8 0.5 6 -24 26 -21 2 -35 0.4 0.9 48 -39 9 -48 2 -35 0.1 0.9 -27 -46 40 -34 2 -35 0.6 0.1 32 -39 -39 -42 1 -35 0.2 0.1 36 -46 -10 -26 1 -35 0.6 0.7 -35 -42 23 -34 2 -35 0.1 0.3 33 11 2 -30 1 -35 0.2 0.6 46 -23 21 14 2 -35 0.9 0.1 35 -25 -29 -50 1 -35 0.2 0.4 -6 -11 -16 -44 1 -35 0.4 0.8 -19 -24 36 5 2 -35 0.6 0.4 32 23 19 0 1 -35 0.3 0.3 -3 -24 1 -50 1 -35 0.6 0.1 24 16 2 -43 1 -35 0.3 0.8 25 -16 20 -44 2 -35 0.3 0.9 46 -8 45 -24 2 -35 0.1 0.7 38 -26 -16 -21 1 -35 0.7 0.6 9 -9 19 1 2 -35 0.1 0.1 43 -44 1 -23 2 -35 0.3 0.7 15 -26 30 -2 2 -35 0.2 0.9 -19 -50 19 -16 2 -35 0.2 0.2 28 -4 -22 -23 1 -35 0.4 0.8 48 0 38 30 2 -35 0.5 0.7 17 9 23 -11 1 -35 0.9 0.2 36 -6 45 7 1 -35 0.5 0.7 -12 -45 -21 -39 2 -35 0.2 0.7 29 2 43 -26 2 -35 0.8 0.6 17 14 36 4 1 -35 0.7 0.3 9 -3 23 15 2 -35 0.7 0.7 29 -31 11 -28 1 -35 0.8 0.6 10 -38 28 17 2 -35 0.4 0.2 -6 -13 24 -25 1 -35 0.2 0.3 23 -24 21 19 2 -35 0.5 0.1 40 10 28 -18 1 -35 0.5 0.6 -27 -32 13 -40 2 -35 0.3 0.2 39 16 24 17 1 -35 0.6 0.5 49 -35 25 -5 1 -35 0.5 0.5 50 17 33 -48 1 -35 0.6 0.8 49 -11 15 -31 1 -35 0.7 0.2 -4 -16 -3 -48 1 -35 0.4 0.7 38 -23 15 11 2 -35 0.7 0.7 14 -19 20 -45 1 -35 0.4 0.5 43 13 28 -25 1 -35 0.7 0.8 17 -45 42 7 2 -35 0.6 0.3 -11 -37 -3 -40 1 -35 0.2 0.6 38 -29 33 4 2 -35 0.7 0.3 48 -21 49 6 1 -35 0.3 0.6 46 -45 35 -25 2 -35 0.7 0.6 -20 -42 3 -16 2 -35 0.7 0.2 40 -39 36 32 2 -35 0.5 0.2 13 -47 22 9 2 -35 0.2 0.9 25 19 -27 -49 1 -35 0.6 0.6 -9 -49 43 -29 2 -35 0.4 0.3 27 16 21 -7 1 -35 0.2 0.1 20 1 22 -27 1 -35 0.9 0.1 -40 -41 37 26 2 -35 0.3 0.6 -24 -48 9 -19 2 -35 0.9 0.5 35 20 28 -13 1 -35 0.5 0.1 -21 -43 32 10 2 -35 0.9 0.1 46 -30 37 7 1 -35 0.2 0.4 30 -15 -4 -29 1 -35 0.2 0.9 -15 -35 6 -12 2 -35 0.6 0.2 32 -48 -4 -9 1 -35 0.3 0.1 -18 -30 49 31 2 -35 0.2 0.6 9 5 -3 -36 1 -35 0.9 0.1 46 -7 28 -39 1 -35 0.1 0.7 40 -44 11 -29 2 -35 0.8 0.8 15 0 43 -22 2 -35 0.4 0.5 46 -38 45 -31 1 -35 0.3 0.1 24 -17 28 -27 1 -35 0.8 0.2 -11 -35 50 37 2 -35 0.7 0.1 -5 -48 -13 -37 1 -35 0.9 0.8 30 -11 -13 -17 1 -35 0.9 0.6 22 2 37 -8 2 -35 0.2 0.3 20 -7 34 -32 2 -35 0.3 0.5 36 -12 -27 -43 1 -35 0.3 0.3 48 30 -2 -28 1 -35 0.1 0.9 -14 -45 8 -14 2 -35 0.9 0.5 6 -41 13 -40 1 -35 0.7 0.1 36 -43 -27 -38 1 -35 0.5 0.6 9 8 12 -37 1 -35 0.3 0.3 47 -7 -4 -25 1 -35 0.7 0.7 4 -31 46 -49 2 -35 0.8 0.5 43 40 44 -47 1 -35 0.4 0.3 -13 -37 37 -37 2 -35 0.3 0.2 34 -10 -1 -32 1 -35 0.6 0.9 30 -13 49 -15 2 -35 0.5 0.6 -31 -46 -6 -28 2 -35 0.4 0.3 -32 -43 -43 -48 2 -35 0.8 0.8 14 -42 17 2 2 -35 0.1 0.8 -18 -38 -17 -48 2 -35 0.9 0.1 24 -26 0 -22 1 -35 0.5 0.5 41 -28 -1 -31 1 -35 0.1 0.5 33 -22 50 -28 2 -35 0.2 0.3 23 20 7 -46 1 -35 0.4 0.6 -17 -33 48 -5 2 -35 0.3 0.1 -17 -44 47 46 2 -35 0.4 0.8 2 -15 47 15 2 -35 0.4 0.1 41 0 35 -40 1 -35 0.9 0.1 44 23 47 -17 1 -35 0.5 0.9 -6 -36 15 -16 2 -35 0.9 0.8 43 -11 -5 -20 1 -35 0.2 0.6 7 3 -7 -18 1 -35 0.9 0.6 31 -31 34 1 1 -35 0.1 0.7 40 17 -16 -33 1 -35 0.1 0.1 8 -30 44 -36 2 -35 0.9 0.9 12 -48 13 8 2 -35 0.8 0.5 -16 -38 46 -5 2 -35 0.9 0.7 17 5 29 -26 1 -35 0.5 0.7 50 39 33 -44 1 -35 0.7 0.7 8 -23 9 -50 1 -35 0.9 0.9 48 2 33 12 1 -35 0.5 0.9 38 -32 14 -46 2 -35 0.7 0.9 20 -40 47 8 2 -35 0.1 0.2 37 -9 46 -16 2 -35 0.4 0.8 -3 -16 47 35 2 -35 0.2 0.3 21 -50 23 1 2 -35 0.7 0.8 0 -5 30 20 2 -35 0.7 0.5 39 27 -7 -21 1 -35 0.4 0.8 -8 -49 39 3 2 -35 0.6 0.3 44 8 -1 -12 1 -35 0.2 0.7 12 -4 -14 -38 1 -35 0.7 0.4 0 -26 4 -17 2 -35 0.9 0.8 33 -15 6 -10 1 -35 0.9 0.3 38 -1 31 -24 1 -35 0.3 0.4 -2 -6 21 -1 2 -35 0.6 0.8 -11 -14 19 -10 2 -35 0.5 0.4 -19 -32 17 13 2 -35 0.7 0.8 21 -37 9 -44 1 -35 0.1 0.3 -26 -50 14 11 2 -35 0.4 0.8 24 -6 1 -18 1 -35 0.5 0.2 -26 -29 12 -36 2 -35 0.4 0.4 41 -47 39 -5 2 -35 0.9 0.1 -45 -48 27 -3 2 -35 0.4 0.8 30 -7 50 20 2 -35 0.4 0.3 12 -50 27 -44 2 -35 0.6 0.6 24 -7 32 -7 2 -35 0.9 0.5 46 34 -15 -24 1 -35 0.2 0.7 20 5 7 -23 1 -35 0.4 0.5 29 18 -42 -46 1 -35 0.3 0.6 28 -6 -19 -21 1 -35 0.8 0.6 6 2 -13 -16 1 -35 0.2 0.3 38 -47 -28 -33 1 -35 0.5 0.1 -7 -12 -8 -34 1 -35 0.4 0.2 -30 -45 15 -50 2 -35 0.8 0.4 19 -49 -3 -35 1 -35 0.5 0.4 9 -42 32 -12 2 -35 0.1 0.8 38 -40 46 -49 2 -35 0.7 0.5 4 -17 2 -40 1 -35 0.1 0.2 26 12 1 -14 1 -35 0.6 0.3 -19 -34 -20 -25 1 -35 0.7 0.3 23 -10 -5 -50 1 -35 0.1 0.5 -9 -25 -6 -48 1 -35 0.2 0.6 12 -29 28 21 2 -35 0.6 0.6 -1 -9 -15 -36 1 -35 0.7 0.9 48 29 34 -8 1 -35 0.3 0.6 22 -43 -3 -20 2 -35 0.9 0.2 35 24 43 2 1 -35 0.1 0.3 35 26 45 -43 1 -35 0.5 0.1 46 -40 3 2 1 -35 0.3 0.8 -1 -13 34 -14 2 -35 0.3 0.1 35 -9 45 22 2 -35 0.1 0.5 -35 -41 32 -33 2 -35 0.6 0.4 2 -44 -2 -23 1 -35 0.9 0.6 -44 -50 12 -28 2 -35 0.8 0.6 29 -5 5 -13 1 -35 0.9 0.7 35 25 44 7 2 -35 0.5 0.1 20 -34 33 24 2 -35 0.6 0.5 41 -7 37 26 2 -35 0.2 0.2 34 4 13 -32 1 -35 0.4 0.6 -22 -32 32 11 2 -35 0.9 0.8 8 2 48 24 2 -35 0.8 0.7 -25 -29 -2 -25 2 -35 0.6 0.2 39 -15 48 47 2 -35 0.7 0.5 10 -24 40 34 2 -35 0.3 0.6 -2 -37 42 -38 2 -35 0.7 0.1 37 -38 50 -46 1 -35 0.5 0.2 -7 -44 9 -30 2 -35 0.1 0.8 26 -35 37 11 2 -35 0.5 0.3 38 -11 49 9 2 -35 0.2 0.5 8 -27 31 -27 2 -35 0.7 0.2 36 -29 47 15 2 -35 0.5 0.9 35 26 38 -10 1 -35 0.5 0.8 7 -13 47 -44 2 -35 0.5 0.5 7 -24 -2 -7 2 -35 0.9 0.1 0 -50 26 14 2 -35 0.5 0.7 31 12 18 6 1 -35 0.7 0.7 12 2 17 -21 2 -35 0.3 0.8 -4 -47 14 5 2 -35 0.5 0.6 4 -2 8 -41 1 -35 0.5 0.4 -15 -38 39 -19 2 -35 0.5 0.4 37 0 28 -37 1 -35 0.1 0.9 47 8 -7 -46 1 -35 0.6 0.7 -11 -35 20 -33 2 -35 0.6 0.6 44 -31 34 8 2 -35 0.4 0.9 -30 -32 11 -49 2 -35 0.8 0.9 12 -25 31 -3 2 -35 0.4 0.3 -17 -32 30 -23 2 -35 0.8 0.5 15 6 24 20 2 -35 0.1 0.9 26 -48 33 3 2 -35 0.6 0.9 1 -32 30 -20 2 -35 0.8 0.8 -2 -21 15 -21 2 -35 0.7 0.9 31 -35 -6 -24 1 -35 0.3 0.7 6 -11 39 -1 2 -35 0.7 0.1 0 -47 47 -8 2 -35 0.3 0.7 41 -49 42 -48 2 -35 0.6 0.4 12 -12 -48 -49 1 -35 0.5 0.9 22 -29 38 -1 2 -35 0.9 0.2 -20 -38 23 -44 2 -35 0.8 0.8 10 -15 -20 -31 1 -35 0.6 0.5 18 -37 15 -10 2 -35 0.9 0.2 41 5 16 -19 1 -35 0.3 0.8 25 -14 10 -18 2 -35 0.9 0.7 8 -41 27 4 2 -35 0.8 0.1 46 -49 30 23 1 -35 0.4 0.3 17 -44 44 37 2 -35 0.2 0.8 29 -19 -2 -27 1 -35 0.1 0.9 11 -45 8 -46 2 -35 0.3 0.1 20 -13 5 -44 1 -35 0.1 0.1 44 -6 5 -23 1 -35 0.5 0.9 20 -43 43 -19 2 -35 0.1 0.8 45 -33 15 -43 2 -35 0.7 0.2 30 18 38 -9 1 -35 0.4 0.2 28 24 5 -10 1 -35 0.8 0.2 -2 -23 42 32 2 -35 0.9 0.9 42 -15 -27 -47 1 -35 0.9 0.9 25 -16 27 -17 2 -35 0.2 0.4 42 23 -2 -40 1 -35 0.8 0.5 43 -48 -34 -40 1 -35 0.4 0.5 6 -17 6 -27 1 -35 0.4 0.7 14 -36 -19 -40 1 -35 0.8 0.9 34 -36 30 12 2 -35 0.7 0.5 32 -37 34 -42 1 -35 0.7 0.1 37 0 -18 -45 1 -35 0.3 0.7 -15 -38 6 -6 2 -35 0.2 0.4 31 -5 -5 -8 1 -35 0.9 0.2 13 12 -30 -47 1 -35 0.6 0.8 44 41 17 -47 1 -35 0.4 0.6 11 -45 -26 -45 1 -35 0.5 0.1 10 6 16 -22 1 -35 0.8 0.5 -9 -45 37 4 2 -35 0.1 0.4 36 29 7 -9 1 -35 0.2 0.2 33 -4 28 -45 1 -35 0.7 0.6 48 7 22 -24 1 -35 0.4 0.2 37 -39 27 -13 2 -35 0.4 0.5 17 -16 16 1 2 -35 0.2 0.4 24 -8 42 -49 1 -35 0.5 0.8 40 21 1 -5 1 -35 0.8 0.3 36 19 -21 -38 1 -35 0.8 0.5 -6 -22 -30 -44 1 -35 0.3 0.2 31 15 -12 -44 1 -35 0.5 0.6 -5 -13 49 -34 2 -36 0.4 0.6 21 -7 -19 -46 1 -36 0.2 0.2 -10 -46 37 -31 2 -36 0.5 0.6 -21 -25 10 -33 2 -36 0.6 0.4 -1 -47 20 -36 2 -36 0.7 0.4 45 -44 1 -20 1 -36 0.6 0.5 32 -14 -21 -45 1 -36 0.9 0.6 20 -45 -6 -15 1 -36 0.5 0.7 -9 -34 11 -9 2 -36 0.3 0.6 32 16 -13 -23 1 -36 0.6 0.2 47 -14 13 -10 1 -36 0.4 0.2 47 1 2 -28 1 -36 0.5 0.8 34 -23 5 -10 1 -36 0.4 0.8 36 -24 1 -42 1 -36 0.1 0.5 14 -31 25 21 2 -36 0.4 0.5 48 18 10 -5 1 -36 0.2 0.7 34 -12 -6 -34 1 -36 0.8 0.5 19 -42 -38 -39 1 -36 0.2 0.5 4 -32 22 -42 2 -36 0.8 0.3 26 -30 -4 -6 1 -36 0.3 0.3 4 -41 39 2 2 -36 0.9 0.3 13 -35 7 -31 1 -36 0.7 0.8 50 -20 -25 -48 1 -36 0.2 0.2 -14 -18 -29 -37 1 -36 0.6 0.3 46 10 12 -37 1 -36 0.6 0.2 49 4 -17 -47 1 -36 0.1 0.6 5 2 23 -36 2 -36 0.3 0.3 26 21 -22 -32 1 -36 0.4 0.4 14 -20 40 -35 2 -36 0.3 0.2 -35 -38 47 31 2 -36 0.2 0.8 6 -24 -3 -35 2 -36 0.9 0.7 40 34 28 -44 1 -36 0.2 0.6 -15 -16 11 -14 2 -36 0.2 0.9 -11 -30 -5 -13 2 -36 0.4 0.8 43 20 42 -48 1 -36 0.7 0.5 28 19 30 -17 1 -36 0.9 0.6 27 -42 42 7 2 -36 0.8 0.7 -9 -36 23 -26 2 -36 0.5 0.9 35 9 30 21 2 -36 0.6 0.6 11 8 2 -12 1 -36 0.3 0.5 33 -30 48 -16 2 -36 0.4 0.6 -29 -49 43 -40 2 -36 0.9 0.1 49 5 -38 -41 1 -36 0.8 0.5 31 -13 49 -14 1 -36 0.8 0.7 10 -42 31 29 2 -36 0.7 0.1 35 -4 29 4 1 -36 0.9 0.8 48 -37 -16 -24 1 -36 0.8 0.6 23 -19 44 31 2 -36 0.8 0.3 7 -9 8 -31 1 -36 0.1 0.4 -20 -49 30 -35 2 -36 0.2 0.9 -25 -27 30 -17 2 -36 0.1 0.3 42 -14 44 43 2 -36 0.5 0.6 46 -26 -11 -31 1 -36 0.8 0.1 -24 -39 16 -24 2 -36 0.3 0.8 23 -14 -4 -27 1 -36 0.7 0.1 15 9 50 -9 1 -36 0.9 0.5 40 11 21 -29 1 -36 0.4 0.7 24 -50 41 -18 2 -36 0.5 0.5 14 -14 -18 -48 1 -36 0.8 0.1 21 -25 -1 -22 1 -36 0.1 0.7 3 -17 46 -22 2 -36 0.4 0.5 30 9 45 -1 1 -36 0.5 0.5 5 -32 26 -39 2 -36 0.6 0.3 43 40 38 -8 1 -36 0.7 0.2 -24 -27 23 -4 2 -36 0.1 0.5 12 -32 20 -38 2 -36 0.8 0.8 4 -50 36 -12 2 -36 0.2 0.2 40 26 47 -43 1 -36 0.4 0.8 -25 -44 32 -39 2 -36 0.4 0.9 24 8 23 -15 2 -36 0.4 0.7 23 0 -14 -44 1 -36 0.7 0.5 2 -43 -43 -47 1 -36 0.8 0.2 36 -14 5 -17 1 -36 0.7 0.9 29 15 -27 -40 1 -36 0.6 0.7 13 -49 0 -17 2 -36 0.9 0.3 3 1 50 41 2 -36 0.2 0.2 -18 -29 40 -17 2 -36 0.7 0.9 24 -29 17 -1 2 -36 0.7 0.9 -33 -41 -27 -34 2 -36 0.9 0.9 9 8 7 2 1 -36 0.9 0.3 39 26 -32 -46 1 -36 0.1 0.9 -11 -33 32 23 2 -36 0.4 0.1 28 -38 -22 -31 1 -36 0.1 0.3 36 -6 32 -23 2 -36 0.4 0.3 20 14 -12 -50 1 -36 0.8 0.3 41 7 -6 -20 1 -36 0.9 0.9 -6 -11 -2 -6 2 -36 0.3 0.7 50 -18 45 -20 2 -36 0.9 0.4 48 38 -1 -24 1 -36 0.4 0.1 48 43 26 11 1 -36 0.2 0.3 50 -9 -41 -43 1 -36 0.5 0.7 16 -13 3 -46 1 -36 0.9 0.1 21 -22 25 23 2 -36 0.5 0.8 -23 -49 40 -18 2 -36 0.5 0.9 4 -12 2 -7 2 -36 0.7 0.8 -25 -44 38 -23 2 -36 0.5 0.3 -3 -22 45 -17 2 -36 0.3 0.7 49 -46 9 -6 2 -36 0.8 0.5 32 -17 40 -7 1 -36 0.6 0.8 8 -30 -33 -47 1 -36 0.5 0.5 45 -30 36 -19 2 -36 0.3 0.3 26 -3 11 -1 2 -36 0.7 0.5 -19 -24 3 -47 2 -36 0.9 0.6 26 -22 41 -18 1 -36 0.1 0.7 22 -33 48 23 2 -36 0.3 0.8 28 -18 37 -28 2 -36 0.8 0.3 27 16 -13 -32 1 -36 0.3 0.9 6 3 26 -26 2 -36 0.9 0.7 41 -22 -4 -21 1 -36 0.9 0.7 39 -3 30 21 1 -36 0.7 0.8 48 44 23 -7 1 -36 0.1 0.8 -2 -9 -23 -27 1 -36 0.3 0.6 45 1 -21 -41 1 -36 0.8 0.5 23 -10 -6 -22 1 -36 0.5 0.9 40 6 0 -27 1 -36 0.1 0.9 33 19 33 22 2 -36 0.9 0.2 28 -7 31 -43 1 -36 0.3 0.9 32 -46 -41 -50 1 -36 0.7 0.8 -3 -50 -2 -38 2 -36 0.3 0.6 46 7 30 -36 1 -36 0.8 0.9 -8 -50 49 -12 2 -36 0.1 0.8 20 9 46 5 2 -36 0.7 0.7 -12 -19 25 3 2 -36 0.5 0.6 35 2 -3 -32 1 -36 0.2 0.8 -12 -44 -9 -34 2 -36 0.4 0.2 -5 -7 48 -45 1 -36 0.7 0.2 42 -43 32 25 2 -36 0.6 0.1 36 20 50 -50 1 -36 0.2 0.3 38 3 3 -27 1 -36 0.7 0.3 15 -13 37 -37 1 -36 0.2 0.2 40 30 16 2 1 -36 0.1 0.8 46 40 30 -20 1 -36 0.2 0.5 22 1 34 7 2 -36 0.1 0.2 33 3 13 -45 1 -36 0.7 0.8 34 18 -39 -40 1 -36 0.7 0.9 40 -15 49 -29 2 -36 0.4 0.9 -2 -11 31 27 2 -36 0.7 0.6 -6 -33 36 2 2 -36 0.5 0.7 20 -7 46 -15 2 -36 0.9 0.4 25 -9 41 14 2 -36 0.9 0.9 37 -14 0 -6 1 -36 0.9 0.6 10 -22 21 -42 1 -36 0.2 0.4 31 7 -10 -15 1 -36 0.5 0.3 11 5 50 -18 1 -36 0.1 0.1 34 -20 22 -43 1 -36 0.8 0.4 22 -26 -36 -48 1 -36 0.4 0.4 13 9 42 -41 1 -36 0.5 0.2 -24 -38 49 -27 2 -36 0.1 0.5 -21 -41 34 -33 2 -36 0.4 0.6 29 -20 10 3 2 -36 0.7 0.2 3 -30 29 -14 1 -36 0.7 0.3 -28 -37 26 -33 2 -36 0.4 0.4 -10 -20 24 7 2 -36 0.6 0.1 1 -29 48 4 2 -36 0.5 0.2 -27 -40 19 -18 2 -36 0.9 0.2 -2 -11 39 3 2 -36 0.9 0.1 36 -47 -12 -39 1 -36 0.9 0.4 19 11 41 1 1 -36 0.6 0.1 36 -43 38 25 2 -36 0.1 0.5 -13 -25 -21 -23 2 -36 0.1 0.3 -4 -30 49 28 2 -36 0.2 0.1 -26 -50 -8 -24 2 -36 0.2 0.1 13 -36 49 22 2 -36 0.1 0.5 21 -17 36 19 2 -36 0.2 0.3 3 -26 40 13 2 -36 0.7 0.9 19 -21 29 16 2 -36 0.5 0.8 22 -46 29 -41 2 -36 0.1 0.7 16 14 -34 -43 1 -36 0.4 0.5 -36 -50 17 -3 2 -36 0.3 0.2 36 5 24 -23 1 -36 0.6 0.6 -8 -44 50 -24 2 -36 0.5 0.2 31 -40 -5 -33 1 -36 0.5 0.4 39 5 -3 -36 1 -36 0.8 0.5 4 -47 31 18 2 -36 0.4 0.5 21 -37 43 32 2 -36 0.5 0.2 -16 -36 40 -28 2 -36 0.4 0.4 38 37 26 -45 1 -36 0.2 0.6 16 -30 -3 -7 2 -36 0.3 0.5 -12 -41 48 8 2 -36 0.6 0.6 17 7 42 -6 2 -36 0.1 0.1 38 21 47 15 2 -36 0.3 0.5 20 -10 28 -15 2 -36 0.1 0.9 7 -11 18 -35 2 -36 0.4 0.8 45 -25 30 -41 2 -36 0.7 0.6 -32 -40 34 -4 2 -36 0.9 0.3 33 -2 26 -29 1 -36 0.3 0.5 40 -32 38 -24 2 -36 0.1 0.8 16 -10 -44 -50 1 -36 0.3 0.8 50 -40 17 14 2 -36 0.9 0.8 19 11 13 -28 1 -36 0.7 0.5 6 -14 40 -28 2 -36 0.8 0.5 -47 -48 0 -37 2 -36 0.6 0.3 25 18 -36 -43 1 -36 0.8 0.7 48 -30 46 17 2 -36 0.8 0.9 19 -19 -37 -46 1 -36 0.5 0.9 -21 -29 41 -16 2 -36 0.3 0.9 48 4 35 0 2 -36 0.6 0.3 39 -42 28 -30 1 -36 0.9 0.2 36 -15 9 -24 1 -36 0.7 0.2 8 -46 -24 -41 1 -36 0.8 0.6 16 -25 25 -8 2 -36 0.8 0.4 38 -7 25 -4 1 -36 0.5 0.3 46 -23 -1 -8 1 -36 0.5 0.5 43 -32 0 -50 1 -36 0.2 0.1 10 -46 16 -25 2 -36 0.1 0.1 -38 -48 -38 -44 2 -36 0.7 0.2 8 -2 32 -34 1 -36 0.2 0.2 31 -41 33 12 2 -36 0.8 0.1 19 -21 45 34 2 -36 0.4 0.2 -36 -43 49 -13 2 -36 0.7 0.9 12 -40 15 3 2 -36 0.9 0.5 15 12 20 19 2 -36 0.6 0.1 38 27 33 -44 1 -36 0.5 0.9 38 26 44 40 2 -36 0.5 0.5 -6 -34 34 -21 2 -36 0.8 0.5 22 -19 38 0 2 -36 0.7 0.1 -28 -42 46 17 2 -36 0.2 0.3 40 20 13 -38 1 -36 0.8 0.5 48 -27 -35 -44 1 -36 0.9 0.8 -27 -50 -19 -28 2 -36 0.6 0.9 43 15 35 7 2 -36 0.9 0.4 41 -19 -26 -39 1 -36 0.1 0.5 4 -33 44 -28 2 -36 0.2 0.5 8 -26 45 -39 2 -36 0.9 0.7 42 34 14 -14 1 -36 0.3 0.8 32 -32 -32 -38 1 -36 0.5 0.1 8 -36 38 -3 2 -36 0.7 0.8 13 -19 28 6 2 -36 0.3 0.8 -20 -40 -16 -41 2 -36 0.1 0.5 14 -8 34 -37 2 -36 0.3 0.8 7 -11 -24 -41 1 -36 0.3 0.4 19 -11 -25 -26 1 -36 0.4 0.3 -1 -16 47 46 2 -36 0.8 0.8 38 7 -13 -19 1 -36 0.3 0.2 20 4 27 -45 1 -36 0.8 0.3 0 -2 35 10 2 -36 0.4 0.5 -13 -15 27 -45 2 -36 0.4 0.6 33 -2 11 -42 1 -36 0.8 0.7 46 24 28 -45 1 -36 0.4 0.1 46 36 12 7 1 -36 0.6 0.4 -11 -29 14 -29 2 -36 0.5 0.7 29 2 34 -14 2 -36 0.3 0.7 17 -43 25 -41 2 -36 0.4 0.2 45 9 49 1 1 -36 0.2 0.3 27 23 -10 -26 1 -36 0.6 0.4 26 -26 3 -17 1 -36 0.2 0.2 33 28 20 -25 1 -36 0.2 0.4 47 -2 -7 -18 1 -36 0.2 0.6 5 -39 -11 -16 2 -36 0.2 0.7 2 -45 35 -36 2 -36 0.9 0.4 16 5 -5 -19 1 -36 0.7 0.5 7 4 16 -40 1 -36 0.9 0.7 -12 -30 23 -34 2 -36 0.2 0.3 46 37 29 -20 1 -36 0.2 0.5 40 -12 26 -24 2 -36 0.1 0.3 20 -4 30 -42 1 -36 0.7 0.1 16 -28 -5 -49 1 -36 0.8 0.6 46 9 22 -34 1 -36 0.9 0.7 47 20 -27 -28 1 -36 0.8 0.2 47 -41 27 21 1 -36 0.7 0.5 26 4 -25 -39 1 -36 0.4 0.8 -22 -39 49 30 2 -36 0.7 0.4 2 -5 27 -6 2 -36 0.6 0.1 46 -18 25 13 2 -36 0.6 0.3 8 -31 -4 -9 2 -36 0.3 0.4 48 -40 28 13 2 -36 0.3 0.4 19 -17 0 -8 2 -36 0.1 0.1 -44 -48 19 -16 2 -36 0.4 0.8 49 -37 -30 -38 1 -36 0.2 0.5 -24 -49 14 -14 2 -36 0.5 0.2 48 35 29 15 1 -36 0.3 0.9 -19 -38 13 -33 2 -36 0.7 0.2 7 -34 44 10 2 -36 0.7 0.7 44 -47 -31 -39 1 -36 0.5 0.4 40 -44 -25 -50 1 -36 0.2 0.3 50 -3 -10 -31 1 -36 0.8 0.9 -40 -49 35 -25 2 -36 0.2 0.5 39 9 30 -20 1 -36 0.5 0.7 24 3 21 -26 1 -36 0.6 0.1 5 -20 -6 -7 2 -36 0.8 0.5 23 -10 15 -16 1 -36 0.6 0.5 28 -8 -31 -40 1 -36 0.5 0.1 12 -34 4 -24 1 -36 0.7 0.1 -15 -37 -7 -36 1 -36 0.7 0.8 24 -33 29 -40 2 -36 0.1 0.3 43 25 28 7 1 -36 0.8 0.9 41 -17 23 -26 1 -36 0.5 0.8 -11 -46 15 -39 2 -36 0.9 0.5 -21 -42 34 -21 2 -36 0.3 0.1 -2 -34 2 -36 1 -36 0.2 0.2 29 -40 -10 -33 1 -36 0.5 0.2 36 23 5 -29 1 -36 0.8 0.1 18 -4 -3 -16 1 -36 0.9 0.2 43 -16 47 38 1 -36 0.1 0.3 28 -16 28 -6 2 -36 0.7 0.3 25 -35 49 4 2 -36 0.9 0.3 18 -20 30 6 2 -36 0.2 0.7 42 -13 21 17 2 -36 0.8 0.2 37 -37 29 27 2 -36 0.9 0.7 48 14 45 -41 1 -36 0.5 0.9 35 -48 20 -46 2 -37 0.5 0.5 -12 -16 45 43 2 -37 0.3 0.5 -13 -17 3 -45 2 -37 0.7 0.6 29 -7 16 -25 1 -37 0.2 0.7 43 33 26 -14 1 -37 0.1 0.7 24 -30 46 -21 2 -37 0.7 0.4 25 -28 33 -45 1 -37 0.3 0.5 36 5 43 22 2 -37 0.5 0.9 10 -14 7 -38 1 -37 0.5 0.6 16 -24 46 -4 2 -37 0.4 0.3 16 -9 22 -2 1 -37 0.5 0.2 20 -7 22 16 2 -37 0.4 0.7 -26 -32 8 -10 2 -37 0.4 0.6 -40 -47 9 -4 2 -37 0.9 0.9 -1 -4 -20 -24 1 -37 0.6 0.5 -10 -45 -20 -49 1 -37 0.4 0.3 39 28 49 -4 1 -37 0.5 0.8 47 -37 44 -23 2 -37 0.1 0.2 19 -16 25 -15 1 -37 0.1 0.2 35 8 35 9 1 -37 0.3 0.1 25 0 35 -27 1 -37 0.3 0.3 -12 -21 5 -44 2 -37 0.1 0.2 -27 -34 15 -13 2 -37 0.5 0.7 42 31 4 -17 1 -37 0.7 0.7 -10 -16 45 41 2 -37 0.5 0.7 29 20 -3 -36 1 -37 0.3 0.8 1 -18 -16 -39 1 -37 0.5 0.2 17 -40 -40 -43 1 -37 0.2 0.6 29 -22 -1 -35 1 -37 0.7 0.3 10 -28 20 -4 1 -37 0.4 0.7 23 -48 -8 -26 1 -37 0.2 0.5 13 -32 1 -46 1 -37 0.9 0.9 -26 -42 48 26 2 -37 0.6 0.3 15 -38 25 -4 2 -37 0.6 0.3 -22 -30 32 -28 2 -37 0.1 0.6 37 -16 39 -10 2 -37 0.7 0.9 22 -19 48 -11 2 -37 0.4 0.2 43 -35 29 -35 1 -37 0.4 0.4 11 -37 4 -2 2 -37 0.4 0.7 -3 -39 31 -5 2 -37 0.6 0.3 22 10 49 31 1 -37 0.5 0.6 19 -40 48 -3 2 -37 0.5 0.1 -4 -19 38 -6 2 -37 0.6 0.9 12 -7 7 -31 1 -37 0.7 0.2 -39 -48 44 -30 2 -37 0.8 0.3 49 -11 19 -30 1 -37 0.6 0.7 -11 -36 25 18 2 -37 0.8 0.1 22 -17 -8 -24 1 -37 0.3 0.5 -15 -26 22 -44 2 -37 0.7 0.2 20 -23 36 11 2 -37 0.4 0.4 14 -10 -16 -42 1 -37 0.9 0.4 28 -1 -25 -46 1 -37 0.3 0.8 9 8 27 7 2 -37 0.3 0.5 25 -38 45 -8 2 -37 0.8 0.9 10 1 4 -15 1 -37 0.8 0.6 -43 -47 -3 -42 2 -37 0.6 0.1 49 29 43 15 1 -37 0.5 0.4 42 6 42 5 1 -37 0.3 0.8 44 -40 15 13 2 -37 0.1 0.5 21 -33 36 -22 2 -37 0.4 0.6 30 6 -4 -13 1 -37 0.2 0.3 -33 -37 41 10 2 -37 0.5 0.7 41 24 16 -15 1 -37 0.5 0.3 44 2 49 -47 1 -37 0.6 0.5 34 -13 15 -48 1 -37 0.1 0.6 13 3 27 -40 1 -37 0.4 0.1 1 -45 -26 -37 1 -37 0.8 0.4 25 -27 -39 -50 1 -37 0.2 0.1 24 13 36 -39 1 -37 0.4 0.5 33 -47 9 -14 2 -37 0.9 0.2 -39 -46 42 14 2 -37 0.9 0.8 31 -5 28 20 2 -37 0.3 0.6 42 31 -37 -44 1 -37 0.7 0.8 26 -12 0 -8 1 -37 0.2 0.7 34 -12 26 -28 2 -37 0.2 0.7 -20 -25 31 14 2 -37 0.2 0.3 27 -9 19 -1 2 -37 0.4 0.2 -28 -45 -2 -48 1 -37 0.1 0.4 13 5 41 30 2 -37 0.6 0.9 46 0 19 -36 1 -37 0.8 0.1 -28 -38 21 -50 1 -37 0.2 0.6 -1 -35 -7 -35 2 -37 0.5 0.8 28 -21 21 -24 2 -37 0.5 0.7 15 -35 -45 -49 1 -37 0.5 0.2 43 -21 47 31 2 -37 0.7 0.1 10 4 -11 -39 1 -37 0.7 0.6 40 8 0 -13 1 -37 0.7 0.5 -10 -33 25 0 2 -37 0.5 0.5 -16 -45 14 -22 2 -37 0.1 0.4 25 -46 15 -37 2 -37 0.6 0.1 24 -36 3 -26 1 -37 0.1 0.8 29 15 50 -17 2 -37 0.3 0.1 3 -31 5 3 2 -37 0.3 0.6 37 -11 18 -32 1 -37 0.5 0.1 1 -11 40 -31 1 -37 0.2 0.3 33 10 19 -38 1 -37 0.6 0.2 10 -3 49 39 2 -37 0.7 0.3 -11 -34 35 -26 2 -37 0.3 0.7 34 -16 13 -23 1 -37 0.5 0.9 6 -41 29 27 2 -37 0.7 0.1 16 6 27 -30 1 -37 0.4 0.2 37 -30 34 6 2 -37 0.4 0.3 30 -12 49 -9 1 -37 0.1 0.5 41 -38 27 17 2 -37 0.2 0.2 38 -10 18 4 2 -37 0.8 0.4 37 7 -30 -44 1 -37 0.5 0.5 -6 -44 39 10 2 -37 0.8 0.4 1 -41 26 -10 2 -37 0.5 0.5 47 44 -18 -22 1 -37 0.4 0.2 0 -11 36 -46 2 -37 0.2 0.7 -3 -27 -2 -17 2 -37 0.7 0.7 -12 -13 36 -20 2 -37 0.3 0.3 49 -31 17 -21 2 -37 0.4 0.9 4 -8 -16 -20 1 -37 0.5 0.5 -6 -39 37 -4 2 -37 0.5 0.6 35 -28 -25 -40 1 -37 0.2 0.7 23 -49 16 -6 2 -37 0.8 0.5 44 -12 12 -19 1 -37 0.9 0.5 26 -9 23 15 2 -37 0.2 0.8 27 -38 50 -1 2 -37 0.6 0.2 13 -17 43 16 2 -37 0.2 0.8 29 12 -27 -39 1 -37 0.4 0.4 1 -16 45 -38 2 -37 0.6 0.6 31 -47 26 -12 1 -37 0.9 0.2 41 -12 25 6 1 -37 0.3 0.2 15 10 47 -11 1 -37 0.8 0.3 19 3 47 -1 1 -37 0.4 0.1 29 -46 33 -22 1 -37 0.2 0.4 45 -17 47 -47 1 -37 0.4 0.2 49 15 39 -7 1 -37 0.2 0.9 -2 -42 26 9 2 -37 0.8 0.6 38 16 32 26 1 -37 0.1 0.5 15 -20 47 -42 2 -37 0.1 0.7 19 -40 18 -30 2 -37 0.7 0.2 32 -20 -15 -24 1 -37 0.5 0.5 -11 -28 39 -26 2 -37 0.4 0.2 28 11 34 -26 1 -37 0.8 0.4 39 -34 9 6 2 -37 0.7 0.5 -17 -28 -10 -24 1 -37 0.4 0.5 29 -24 45 -38 2 -37 0.9 0.6 27 20 -10 -28 1 -37 0.9 0.7 -11 -44 7 -4 2 -37 0.2 0.6 41 1 22 -17 2 -37 0.2 0.2 -20 -31 -43 -48 1 -37 0.2 0.1 1 -50 5 -41 1 -37 0.2 0.7 23 -29 18 9 2 -37 0.7 0.6 43 -15 44 -14 2 -37 0.1 0.3 22 12 19 14 2 -37 0.9 0.2 50 -19 9 8 1 -37 0.7 0.1 49 5 21 3 1 -37 0.7 0.9 18 -20 2 -48 1 -37 0.9 0.2 37 -24 20 -33 1 -37 0.4 0.3 14 -25 -31 -43 1 -37 0.5 0.9 16 2 33 9 2 -37 0.9 0.1 50 -38 6 -26 1 -37 0.7 0.4 21 -19 -38 -41 1 -37 0.7 0.7 13 -27 -7 -37 1 -37 0.1 0.6 7 4 27 -28 1 -37 0.4 0.2 30 3 -10 -32 1 -37 0.3 0.6 49 -50 40 -31 2 -37 0.6 0.4 47 33 33 -47 1 -37 0.3 0.6 -10 -25 14 5 2 -37 0.6 0.2 36 -47 31 -34 1 -37 0.6 0.7 38 31 -14 -22 1 -37 0.9 0.6 7 -15 19 -9 1 -37 0.4 0.5 48 -7 -4 -44 1 -37 0.6 0.7 -2 -44 45 11 2 -37 0.9 0.4 29 -38 31 -49 1 -37 0.3 0.7 42 18 -9 -23 1 -37 0.5 0.3 42 -1 41 -19 1 -37 0.8 0.1 26 -45 30 25 2 -37 0.8 0.1 -11 -21 37 -20 2 -37 0.8 0.3 29 -48 50 18 2 -37 0.6 0.6 4 -6 17 -24 1 -37 0.6 0.9 -23 -32 -7 -12 1 -37 0.9 0.1 -4 -36 -11 -40 1 -37 0.9 0.6 32 -40 23 -11 1 -37 0.5 0.7 39 -7 -3 -36 1 -37 0.3 0.7 48 17 43 -7 2 -37 0.9 0.2 33 -36 16 -24 1 -37 0.9 0.1 8 -17 44 4 2 -37 0.2 0.8 29 -4 8 -3 2 -37 0.7 0.1 37 28 23 -27 1 -37 0.8 0.8 24 20 -6 -23 1 -37 0.9 0.5 3 -45 46 -29 2 -37 0.3 0.1 9 -29 -1 -26 2 -37 0.7 0.2 49 47 29 10 1 -37 0.4 0.7 49 -41 8 -45 1 -37 0.1 0.8 -22 -34 38 29 2 -37 0.7 0.9 -16 -37 38 -4 2 -37 0.8 0.3 18 1 40 -37 1 -37 0.8 0.8 21 16 39 -27 1 -37 0.1 0.1 30 9 48 47 2 -37 0.2 0.3 26 1 28 19 2 -37 0.4 0.2 -15 -38 -30 -45 1 -37 0.4 0.4 35 -40 -39 -42 1 -37 0.1 0.2 36 8 -10 -46 1 -37 0.8 0.8 10 3 32 -25 1 -37 0.8 0.2 25 12 38 -18 1 -37 0.6 0.4 42 7 37 -15 1 -37 0.2 0.5 50 31 44 -23 1 -37 0.9 0.3 42 36 38 -20 1 -37 0.5 0.4 -20 -46 -18 -23 2 -37 0.9 0.9 8 2 29 -28 2 -37 0.5 0.5 29 -25 7 -9 1 -37 0.8 0.1 48 4 50 5 1 -37 0.7 0.1 15 -43 -35 -36 1 -37 0.7 0.5 28 7 39 17 1 -37 0.3 0.2 32 -47 46 -7 2 -37 0.8 0.7 47 -45 14 -47 1 -37 0.5 0.8 -40 -46 -33 -46 1 -37 0.3 0.5 -12 -21 2 -50 2 -37 0.7 0.1 30 -47 17 -48 1 -37 0.3 0.1 -46 -47 25 20 2 -37 0.9 0.8 47 -15 -10 -49 1 -37 0.1 0.8 48 -46 50 46 2 -37 0.4 0.6 -11 -21 -9 -30 2 -37 0.3 0.9 -35 -43 -6 -17 2 -37 0.9 0.5 -45 -47 -4 -11 2 -37 0.9 0.7 -11 -44 7 -3 1 -37 0.4 0.7 20 4 26 9 2 -37 0.3 0.8 -4 -45 17 -49 2 -37 0.4 0.8 27 -21 -20 -30 1 -37 0.9 0.2 37 -32 7 -3 1 -37 0.3 0.2 45 32 38 -23 1 -37 0.2 0.2 -2 -31 -10 -17 1 -37 0.9 0.6 16 -30 36 3 2 -37 0.4 0.7 -12 -42 25 -36 2 -37 0.4 0.1 -27 -32 26 -37 2 -37 0.6 0.2 17 12 41 -45 1 -37 0.4 0.9 48 -25 8 -40 2 -37 0.7 0.3 12 -11 15 -2 2 -37 0.3 0.9 21 -23 24 -43 2 -37 0.2 0.2 50 24 34 -38 1 -37 0.2 0.9 8 -6 5 -21 2 -37 0.2 0.4 1 -26 46 -47 2 -37 0.9 0.5 -25 -47 37 -6 2 -37 0.8 0.9 13 -8 -1 -9 1 -37 0.4 0.2 42 10 50 21 1 -37 0.8 0.3 -7 -15 -1 -21 2 -37 0.8 0.5 32 7 48 39 2 -37 0.8 0.9 23 20 27 10 2 -37 0.1 0.3 28 -1 48 -2 2 -37 0.5 0.1 49 19 11 -33 1 -37 0.9 0.6 32 -2 28 -33 1 -37 0.5 0.5 30 -7 -30 -41 1 -37 0.1 0.2 36 21 44 15 1 -37 0.7 0.4 23 2 5 -13 1 -37 0.7 0.4 5 -5 35 32 2 -37 0.4 0.6 -26 -31 -8 -50 2 -37 0.4 0.2 34 28 -17 -36 1 -37 0.8 0.9 44 -17 50 -30 2 -37 0.6 0.8 31 -33 -3 -14 1 -37 0.9 0.1 -10 -48 12 -27 1 -37 0.8 0.3 8 -4 32 -14 1 -37 0.7 0.5 -2 -33 6 -22 2 -37 0.4 0.5 46 45 7 -50 1 -37 0.5 0.8 9 -37 25 -25 2 -37 0.5 0.3 32 8 32 19 2 -37 0.5 0.8 43 41 48 21 2 -37 0.3 0.9 30 -34 40 -23 2 -37 0.4 0.5 24 -32 -40 -45 1 -37 0.6 0.4 -17 -33 -22 -49 1 -37 0.6 0.8 46 26 8 -28 1 -37 0.9 0.3 -10 -30 12 9 2 -37 0.2 0.4 -1 -21 26 -29 1 -37 0.9 0.7 27 4 5 -12 1 -37 0.3 0.4 -21 -35 9 -47 2 -37 0.2 0.3 -5 -9 22 -1 2 -37 0.6 0.9 50 -47 -25 -37 1 -37 0.2 0.9 -2 -4 28 24 2 -37 0.9 0.9 7 -34 5 -15 1 -37 0.1 0.7 35 15 50 12 2 -37 0.5 0.1 49 -35 -15 -27 1 -37 0.2 0.2 27 -24 0 -40 1 -37 0.7 0.4 26 -33 -8 -49 1 -37 0.3 0.3 9 7 7 -27 1 -37 0.6 0.9 17 -28 32 22 2 -37 0.6 0.7 16 -26 -23 -35 1 -37 0.7 0.1 13 -18 45 -38 2 -37 0.5 0.8 26 8 18 -33 1 -37 0.3 0.7 -35 -40 -1 -11 2 -37 0.6 0.3 -35 -39 22 -20 2 -37 0.7 0.6 -26 -28 24 -43 2 -37 0.7 0.2 23 -50 21 -14 1 -37 0.6 0.3 5 -37 50 -15 2 -37 0.3 0.5 -10 -48 44 26 2 -37 0.9 0.5 7 -34 3 -43 1 -37 0.5 0.5 26 -13 27 9 2 -37 0.4 0.4 -40 -46 31 0 2 -37 0.7 0.2 24 15 11 -30 1 -37 0.3 0.9 23 -46 19 -17 2 -37 0.5 0.4 24 3 21 -43 1 -37 0.5 0.2 33 30 40 29 2 -37 0.9 0.4 -2 -9 24 -1 2 -37 0.9 0.2 41 30 41 -13 1 -37 0.2 0.8 20 -23 22 -25 2 -37 0.3 0.7 27 3 28 9 2 -37 0.4 0.1 50 3 39 5 1 -37 0.3 0.1 22 -29 20 6 2 -37 0.9 0.3 46 26 -31 -39 1 -38 0.2 0.1 -9 -30 -14 -40 1 -38 0.5 0.7 38 -12 11 -31 1 -38 0.5 0.2 42 -20 14 -6 1 -38 0.5 0.9 46 16 23 -10 1 -38 0.1 0.7 35 -25 48 -49 2 -38 0.6 0.2 29 -29 30 -20 1 -38 0.3 0.2 18 16 -10 -41 1 -38 0.1 0.3 26 22 -17 -46 1 -38 0.2 0.7 43 -12 49 38 2 -38 0.8 0.6 49 -27 12 -14 1 -38 0.2 0.2 41 -41 45 -13 2 -38 0.4 0.1 29 8 -41 -49 1 -38 0.1 0.5 8 -6 -2 -4 2 -38 0.5 0.4 49 42 -10 -36 1 -38 0.6 0.5 9 -10 -11 -37 1 -38 0.2 0.8 26 24 16 -46 1 -38 0.6 0.4 49 -20 27 -41 1 -38 0.6 0.3 -1 -31 -20 -33 1 -38 0.8 0.2 -24 -34 39 -23 2 -38 0.5 0.3 4 -1 -11 -47 1 -38 0.7 0.9 5 -49 50 29 2 -38 0.3 0.6 47 -4 27 5 2 -38 0.8 0.3 33 -50 9 -38 1 -38 0.4 0.5 45 -23 -18 -41 1 -38 0.3 0.2 -17 -18 23 -8 1 -38 0.6 0.9 46 -28 26 -41 2 -38 0.5 0.6 31 -36 25 -9 2 -38 0.8 0.2 26 3 31 -32 1 -38 0.2 0.9 1 -48 38 10 2 -38 0.5 0.5 -6 -11 16 8 2 -38 0.3 0.1 -22 -34 30 -27 2 -38 0.6 0.1 35 1 41 -12 1 -38 0.4 0.9 9 -14 -13 -49 1 -38 0.8 0.1 30 0 46 -4 1 -38 0.3 0.3 -4 -26 35 13 2 -38 0.6 0.7 26 -16 44 36 2 -38 0.5 0.3 12 -50 -4 -27 1 -38 0.3 0.2 5 -6 48 -12 2 -38 0.7 0.5 -10 -40 36 -32 2 -38 0.3 0.7 48 27 43 -5 1 -38 0.1 0.1 9 -45 24 -24 2 -38 0.6 0.8 -20 -27 -26 -39 1 -38 0.1 0.1 -11 -23 2 -13 1 -38 0.7 0.8 -13 -39 45 36 2 -38 0.1 0.2 50 -15 -19 -33 1 -38 0.8 0.1 41 -47 -13 -39 1 -38 0.9 0.6 22 -23 13 -12 1 -38 0.9 0.1 -24 -42 40 -33 2 -38 0.6 0.9 19 -43 25 13 2 -38 0.7 0.8 41 -46 10 4 1 -38 0.9 0.1 18 -1 0 -26 1 -38 0.2 0.7 49 35 -9 -22 1 -38 0.8 0.6 15 3 42 -28 2 -38 0.5 0.1 -25 -36 18 -35 2 -38 0.9 0.7 17 -33 -17 -43 1 -38 0.5 0.4 -2 -17 39 -24 2 -38 0.3 0.4 -7 -26 -17 -24 1 -38 0.5 0.5 50 19 50 49 2 -38 0.2 0.5 -16 -23 34 -23 2 -38 0.3 0.1 23 -32 26 2 2 -38 0.3 0.5 35 17 37 -16 1 -38 0.7 0.8 26 21 3 -11 1 -38 0.4 0.7 -20 -39 42 31 2 -38 0.5 0.4 -5 -45 34 -44 2 -38 0.1 0.3 23 -16 47 17 2 -38 0.6 0.6 16 -34 17 -3 2 -38 0.2 0.2 23 -42 49 36 2 -38 0.6 0.8 -14 -19 21 -48 2 -38 0.7 0.3 39 31 -19 -37 1 -38 0.9 0.8 30 19 8 -26 1 -38 0.3 0.5 -24 -42 41 -33 2 -38 0.9 0.4 -37 -39 46 -15 2 -38 0.9 0.3 -13 -30 -7 -21 1 -38 0.3 0.3 50 1 23 -43 1 -38 0.1 0.8 11 -17 41 -2 2 -38 0.1 0.5 -10 -18 23 -43 2 -38 0.5 0.1 -45 -46 42 -20 1 -38 0.5 0.6 30 -33 11 -3 2 -38 0.1 0.2 35 26 -27 -35 1 -38 0.3 0.2 41 -8 -5 -50 1 -38 0.3 0.9 -34 -35 22 -12 2 -38 0.3 0.6 -12 -29 -26 -36 2 -38 0.8 0.3 -34 -37 8 -48 1 -38 0.4 0.8 -1 -38 45 43 2 -38 0.7 0.7 36 19 -25 -45 1 -38 0.5 0.8 33 -39 35 20 2 -38 0.1 0.5 -18 -39 29 10 2 -38 0.6 0.6 -20 -50 42 -39 2 -38 0.7 0.7 -4 -37 43 -45 2 -38 0.5 0.7 8 -35 -22 -43 1 -38 0.1 0.6 -8 -48 0 -8 2 -38 0.9 0.6 12 -21 50 -47 2 -38 0.9 0.7 38 0 40 18 1 -38 0.7 0.8 30 17 19 -1 1 -38 0.3 0.6 -32 -50 -24 -32 2 -38 0.1 0.9 22 -35 14 -9 2 -38 0.7 0.3 2 -23 29 -8 2 -38 0.3 0.3 -13 -41 43 30 2 -38 0.5 0.2 31 -44 3 -4 1 -38 0.9 0.8 36 -5 -12 -35 1 -38 0.4 0.7 19 -14 -25 -38 1 -38 0.5 0.2 40 -38 50 28 2 -38 0.5 0.8 16 -19 21 -9 2 -38 0.4 0.6 18 -45 -8 -37 1 -38 0.4 0.5 10 -15 33 -25 2 -38 0.2 0.2 21 14 6 -36 1 -38 0.5 0.1 35 12 26 -1 1 -38 0.3 0.1 34 -6 30 3 2 -38 0.8 0.5 39 3 17 -21 1 -38 0.1 0.8 31 -25 48 35 2 -38 0.4 0.4 -9 -15 -8 -49 1 -38 0.9 0.2 40 -4 -1 -23 1 -38 0.7 0.7 27 -21 34 -5 2 -38 0.1 0.3 40 35 4 -35 1 -38 0.7 0.3 24 -22 48 30 2 -38 0.8 0.8 -24 -34 -4 -38 1 -38 0.8 0.7 16 -18 -21 -45 1 -38 0.7 0.3 40 -48 45 -19 1 -38 0.6 0.4 -12 -13 -35 -36 1 -38 0.3 0.4 39 -42 4 0 1 -38 0.9 0.5 -1 -49 -18 -26 1 -38 0.7 0.7 48 -47 36 27 2 -38 0.6 0.9 6 -47 34 -35 2 -38 0.3 0.9 41 18 1 -43 1 -38 0.6 0.1 16 -38 -34 -42 1 -38 0.5 0.9 47 -6 18 -32 2 -38 0.4 0.7 9 1 -12 -38 1 -38 0.9 0.1 40 26 -1 -4 1 -38 0.5 0.1 44 -23 16 -7 1 -38 0.9 0.7 32 -2 -39 -41 1 -38 0.3 0.4 32 22 41 -29 1 -38 0.1 0.4 42 -33 21 -8 2 -38 0.4 0.8 46 -16 27 -22 1 -38 0.8 0.7 -2 -29 21 16 2 -38 0.7 0.2 0 -46 13 7 2 -38 0.9 0.2 50 20 11 -7 1 -38 0.6 0.6 43 -21 41 30 2 -38 0.8 0.1 5 -13 6 -41 1 -38 0.5 0.1 42 -17 -5 -10 1 -38 0.2 0.8 50 13 34 -50 1 -38 0.7 0.1 -7 -47 46 22 2 -38 0.8 0.6 -21 -47 19 5 2 -38 0.4 0.8 13 -14 6 0 2 -38 0.2 0.6 44 18 11 -27 1 -38 0.2 0.7 -28 -41 24 13 2 -38 0.6 0.9 12 -4 -8 -25 1 -38 0.9 0.6 46 -30 5 -4 1 -38 0.2 0.7 6 -2 10 -27 2 -38 0.9 0.5 -8 -35 50 44 2 -38 0.2 0.4 1 -34 22 11 2 -38 0.4 0.7 27 -49 -13 -26 1 -38 0.5 0.5 29 -4 31 -32 1 -38 0.6 0.1 36 25 2 -23 1 -38 0.6 0.7 48 42 36 5 1 -38 0.9 0.8 12 7 -26 -35 1 -38 0.3 0.7 38 10 -5 -43 1 -38 0.3 0.7 -11 -41 41 26 2 -38 0.4 0.6 49 47 30 -40 1 -38 0.6 0.2 -31 -47 42 -48 2 -38 0.2 0.8 42 24 41 4 2 -38 0.2 0.7 43 -4 -10 -40 1 -38 0.6 0.4 -2 -29 5 -40 1 -38 0.6 0.9 11 7 47 -18 2 -38 0.9 0.9 50 -10 24 -34 1 -38 0.7 0.3 49 -7 35 -14 1 -38 0.4 0.3 50 38 7 -16 1 -38 0.2 0.2 -3 -37 39 17 2 -38 0.7 0.2 -24 -41 33 -11 2 -38 0.7 0.6 -12 -23 -18 -46 2 -38 0.9 0.4 45 10 35 -41 1 -38 0.8 0.9 -14 -37 44 -50 2 -38 0.6 0.9 46 -42 16 -1 1 -38 0.4 0.1 6 -28 2 -13 1 -38 0.6 0.3 7 -48 19 -25 2 -38 0.3 0.3 41 18 -14 -25 1 -38 0.5 0.7 -16 -35 41 -30 2 -38 0.8 0.7 27 11 9 7 1 -38 0.7 0.9 24 -19 29 -46 2 -38 0.1 0.1 18 8 0 -28 1 -38 0.7 0.9 -19 -42 -19 -38 2 -38 0.8 0.3 22 -2 -5 -48 1 -38 0.9 0.1 -30 -45 47 -6 2 -38 0.7 0.4 28 24 -3 -12 1 -38 0.9 0.8 26 -2 -28 -29 1 -38 0.3 0.6 41 24 17 -2 1 -38 0.5 0.9 36 -46 17 -46 2 -38 0.3 0.2 12 -47 23 19 2 -38 0.4 0.2 37 -50 49 19 2 -38 0.6 0.7 -6 -9 24 6 2 -38 0.9 0.1 32 -15 -17 -20 1 -38 0.6 0.7 49 16 -9 -37 1 -38 0.7 0.5 -4 -31 50 -25 2 -38 0.4 0.6 43 10 42 40 2 -38 0.9 0.2 -1 -25 44 -36 2 -38 0.1 0.5 -12 -37 0 -8 2 -38 0.8 0.1 41 -48 23 7 1 -38 0.5 0.2 37 -7 -32 -34 1 -38 0.9 0.8 -12 -41 25 12 2 -38 0.4 0.2 21 0 33 -24 1 -38 0.1 0.4 -33 -46 24 -41 2 -38 0.1 0.5 27 26 13 -36 1 -38 0.6 0.5 32 -15 46 -22 1 -38 0.3 0.9 36 -40 26 6 2 -38 0.6 0.5 0 -20 27 -39 2 -38 0.4 0.6 -8 -37 -5 -21 1 -38 0.9 0.8 -3 -47 47 -41 2 -38 0.8 0.9 43 10 29 -19 1 -38 0.7 0.1 40 -8 7 -47 1 -38 0.4 0.3 18 7 44 27 2 -38 0.6 0.2 50 -40 50 34 2 -38 0.4 0.9 22 -8 40 39 2 -38 0.4 0.7 24 4 21 -43 1 -38 0.9 0.3 28 -3 -12 -30 1 -38 0.6 0.1 45 -7 43 -5 1 -38 0.3 0.7 36 -21 -29 -41 1 -38 0.6 0.2 -15 -44 7 -5 2 -38 0.6 0.8 -11 -13 20 -17 2 -38 0.9 0.8 28 -16 -12 -27 1 -38 0.3 0.5 31 26 15 -48 1 -38 0.1 0.1 35 -47 46 -48 1 -38 0.7 0.9 7 -26 2 -43 2 -38 0.2 0.1 4 -45 -20 -37 1 -38 0.3 0.6 -35 -46 32 5 2 -38 0.5 0.1 -37 -41 -20 -40 2 -38 0.2 0.1 26 -36 3 -6 2 -38 0.8 0.6 -13 -19 -15 -34 2 -38 0.7 0.2 -7 -36 15 -38 1 -38 0.2 0.8 -4 -29 41 -13 2 -38 0.8 0.1 16 -15 44 29 2 -38 0.5 0.7 46 7 -15 -19 1 -38 0.5 0.1 30 29 29 9 1 -38 0.2 0.5 44 28 -24 -36 1 -38 0.1 0.5 38 10 18 -49 1 -38 0.9 0.2 -44 -50 41 12 2 -38 0.9 0.7 0 -34 -9 -49 1 -38 0.5 0.7 3 -31 31 -5 2 -38 0.2 0.5 0 -35 37 -12 2 -38 0.4 0.4 49 -20 -20 -46 1 -38 0.1 0.6 -9 -27 -43 -50 2 -38 0.7 0.8 27 -35 35 2 2 -38 0.6 0.4 -48 -49 -11 -14 2 -38 0.2 0.2 33 -9 19 6 2 -38 0.6 0.3 48 33 29 8 1 -38 0.9 0.5 -34 -39 39 3 2 -38 0.6 0.1 45 -7 0 -43 1 -38 0.3 0.2 24 16 -10 -48 1 -38 0.1 0.3 -21 -38 19 -35 2 -38 0.4 0.1 37 -23 23 -29 1 -38 0.8 0.2 -16 -39 49 8 2 -38 0.9 0.2 47 -31 47 40 2 -38 0.1 0.5 -8 -46 41 -45 2 -38 0.1 0.8 46 -4 47 10 2 -38 0.3 0.3 -13 -30 -37 -44 1 -38 0.1 0.8 49 35 -25 -39 1 -38 0.5 0.9 32 31 11 -12 1 -38 0.6 0.4 23 -3 -5 -17 1 -38 0.7 0.6 35 18 1 -29 1 -38 0.9 0.8 6 -22 12 -40 2 -38 0.8 0.3 18 -38 33 -23 1 -38 0.6 0.4 48 -43 0 -2 1 -38 0.6 0.3 -10 -22 40 25 2 -38 0.2 0.5 -3 -48 50 -3 2 -38 0.4 0.3 27 2 26 6 2 -38 0.6 0.4 10 -26 45 39 2 -38 0.1 0.3 46 -29 34 8 2 -38 0.8 0.2 42 19 13 -50 1 -38 0.5 0.3 25 -17 -22 -40 1 -38 0.6 0.1 24 -17 15 -18 1 -38 0.9 0.6 19 -11 26 -27 1 -38 0.5 0.9 -2 -25 22 8 2 -38 0.6 0.9 35 16 20 -42 1 -38 0.9 0.1 20 -12 -9 -28 1 -38 0.8 0.1 -25 -31 29 -31 1 -38 0.4 0.2 15 -42 2 -19 1 -38 0.3 0.5 -31 -48 -2 -30 2 -38 0.2 0.7 -30 -44 19 18 2 -38 0.9 0.2 3 -17 33 -42 1 -38 0.7 0.8 37 -43 -26 -28 1 -38 0.6 0.3 23 -44 8 -6 1 -38 0.4 0.4 -11 -36 42 -42 2 -38 0.6 0.5 36 24 20 -38 1 -38 0.2 0.4 4 -5 17 -45 1 -38 0.2 0.6 29 -33 48 -12 2 -38 0.3 0.3 -21 -34 -10 -13 2 -38 0.1 0.4 48 -27 31 -30 2 -38 0.9 0.7 21 10 -8 -16 1 -38 0.4 0.8 23 11 1 -11 1 -38 0.6 0.7 -10 -23 28 -1 2 -38 0.8 0.2 -8 -9 -6 -25 1 -38 0.8 0.1 6 -21 -10 -21 1 -38 0.6 0.1 9 -35 7 -36 1 -38 0.4 0.7 12 -19 32 18 2 -38 0.4 0.6 -9 -36 0 -32 2 -38 0.6 0.6 12 -45 7 -31 2 -38 0.8 0.5 -17 -20 -5 -7 1 -38 0.3 0.3 11 -32 29 -49 1 -38 0.1 0.2 -44 -47 11 -39 2 -38 0.4 0.5 45 39 44 -16 1 -38 0.3 0.8 32 -50 48 13 2 -38 0.6 0.8 7 -24 48 -3 2 -39 0.2 0.1 48 24 7 -47 1 -39 0.7 0.8 44 -29 17 -50 1 -39 0.4 0.6 14 6 13 -42 1 -39 0.3 0.7 38 -27 -7 -39 2 -39 0.5 0.8 48 20 46 33 1 -39 0.4 0.3 32 -36 3 -39 1 -39 0.7 0.7 -22 -44 50 -26 2 -39 0.6 0.5 42 3 43 15 2 -39 0.4 0.1 13 -33 41 -47 2 -39 0.6 0.1 27 -23 28 11 2 -39 0.6 0.1 -13 -48 13 -8 1 -39 0.2 0.2 36 -33 3 -33 2 -39 0.4 0.4 21 -13 16 8 2 -39 0.3 0.2 1 -23 -27 -37 1 -39 0.4 0.7 -18 -28 -5 -23 1 -39 0.5 0.5 47 -20 -9 -22 1 -39 0.6 0.2 12 0 30 -47 1 -39 0.5 0.1 21 -5 40 -2 1 -39 0.6 0.8 13 -2 -23 -42 1 -39 0.3 0.6 42 31 12 -38 1 -39 0.6 0.8 -11 -29 19 -41 2 -39 0.3 0.7 6 -21 13 -31 2 -39 0.4 0.6 13 -30 29 17 2 -39 0.1 0.4 -44 -45 44 20 2 -39 0.6 0.7 24 -14 36 31 2 -39 0.5 0.7 -10 -18 25 -42 2 -39 0.2 0.9 23 -15 12 -7 2 -39 0.5 0.4 -5 -43 -16 -26 2 -39 0.9 0.4 19 9 24 -19 1 -39 0.5 0.7 -3 -46 48 16 2 -39 0.9 0.1 31 22 25 -1 1 -39 0.9 0.7 -39 -49 32 4 2 -39 0.3 0.2 25 13 -3 -14 1 -39 0.1 0.5 2 0 41 -27 2 -39 0.3 0.9 38 32 -6 -39 1 -39 0.1 0.3 -17 -19 30 -7 1 -39 0.4 0.1 -41 -46 -1 -31 2 -39 0.5 0.8 17 -30 3 -5 2 -39 0.7 0.2 -9 -25 49 15 2 -39 0.7 0.3 6 -9 47 -36 1 -39 0.7 0.2 45 -45 6 -21 1 -39 0.9 0.7 30 8 47 -43 1 -39 0.7 0.5 45 9 13 12 1 -39 0.8 0.5 29 4 50 -39 1 -39 0.4 0.8 18 -14 17 5 1 -39 0.1 0.3 -27 -45 18 -34 1 -39 0.7 0.1 6 2 17 -22 1 -39 0.2 0.5 31 28 48 9 1 -39 0.7 0.6 32 -22 15 -25 2 -39 0.3 0.9 -15 -37 27 19 2 -39 0.2 0.9 -17 -29 48 -11 2 -39 0.7 0.1 27 -26 28 -48 2 -39 0.8 0.1 -22 -23 16 -45 2 -39 0.5 0.8 50 38 49 -34 1 -39 0.1 0.8 -12 -24 33 -21 2 -39 0.6 0.8 15 -12 15 -45 1 -39 0.7 0.4 -4 -42 -23 -25 2 -39 0.5 0.4 23 -37 39 -4 2 -39 0.1 0.6 17 14 38 -7 2 -39 0.8 0.1 0 -4 29 11 2 -39 0.3 0.8 43 -38 -31 -42 1 -39 0.3 0.7 3 -22 46 10 2 -39 0.8 0.7 15 -8 28 -43 1 -39 0.7 0.1 48 -48 50 -11 1 -39 0.2 0.6 20 -15 12 -40 2 -39 0.8 0.1 42 41 -13 -41 1 -39 0.9 0.2 16 -15 -17 -24 2 -39 0.7 0.5 41 -19 44 -20 1 -39 0.7 0.1 -5 -49 40 24 2 -39 0.1 0.9 28 27 0 -2 1 -39 0.3 0.4 2 -47 35 -39 1 -39 0.3 0.7 -27 -50 -20 -22 1 -39 0.3 0.3 20 14 49 12 2 -39 0.1 0.8 24 -19 25 -17 2 -39 0.7 0.9 38 -30 28 -14 2 -39 0.9 0.5 14 -11 11 -1 1 -39 0.7 0.1 -25 -43 -23 -37 2 -39 0.8 0.3 13 10 -14 -18 1 -39 0.7 0.8 26 15 -33 -42 1 -39 0.4 0.9 46 41 45 -22 1 -39 0.8 0.1 25 -49 36 -42 1 -39 0.9 0.3 -4 -44 -3 -48 2 -39 0.9 0.1 40 -29 10 3 1 -39 0.4 0.1 20 -49 4 -16 1 -39 0.9 0.9 1 -36 29 -6 2 -39 0.1 0.6 22 -14 44 -7 2 -39 0.6 0.4 4 -21 11 -37 2 -39 0.9 0.2 36 -12 -16 -21 1 -39 0.6 0.4 -36 -50 11 -17 2 -39 0.5 0.7 24 -7 25 -24 2 -39 0.7 0.3 -12 -37 39 -17 2 -39 0.2 0.9 2 -7 45 -13 2 -39 0.8 0.8 38 -18 33 -32 2 -39 0.7 0.4 -8 -48 -2 -11 2 -39 0.8 0.5 17 3 25 5 1 -39 0.2 0.8 33 5 37 -41 1 -39 0.7 0.6 19 5 14 -45 1 -39 0.4 0.3 50 -42 -19 -26 1 -39 0.9 0.4 -25 -28 30 -16 2 -39 0.3 0.1 4 -8 46 42 2 -39 0.8 0.9 46 44 38 36 2 -39 0.4 0.1 28 -19 25 -35 1 -39 0.5 0.6 50 30 -42 -48 1 -39 0.1 0.6 15 -3 2 -37 1 -39 0.5 0.9 43 -6 -24 -42 2 -39 0.2 0.5 29 -44 19 -6 2 -39 0.4 0.5 43 -35 -18 -34 1 -39 0.4 0.6 12 -18 -23 -42 1 -39 0.9 0.7 38 -11 -11 -47 1 -39 0.8 0.2 47 36 42 -34 1 -39 0.4 0.7 31 -50 9 6 2 -39 0.5 0.4 14 1 19 16 2 -39 0.2 0.8 -15 -37 39 -24 2 -39 0.9 0.4 1 -3 42 -3 1 -39 0.8 0.2 15 -20 -6 -19 1 -39 0.2 0.5 22 4 24 -17 2 -39 0.5 0.1 35 -45 -24 -33 1 -39 0.2 0.4 31 -27 41 -5 2 -39 0.2 0.8 50 -18 0 -11 2 -39 0.8 0.3 37 26 27 -15 1 -39 0.8 0.7 35 -15 50 42 2 -39 0.6 0.9 47 -29 42 -16 2 -39 0.1 0.8 -30 -38 -8 -32 1 -39 0.6 0.4 -1 -45 -1 -22 1 -39 0.9 0.5 31 -13 45 -8 1 -39 0.3 0.3 -3 -27 -12 -40 2 -39 0.9 0.4 -18 -31 35 -11 2 -39 0.8 0.3 36 6 -36 -49 1 -39 0.2 0.1 5 -40 -16 -29 1 -39 0.5 0.3 26 -9 37 -10 2 -39 0.9 0.2 9 -29 22 18 2 -39 0.5 0.8 -27 -49 6 -22 1 -39 0.6 0.2 -8 -50 -16 -50 1 -39 0.9 0.8 -19 -33 -1 -4 2 -39 0.2 0.1 3 -4 48 -38 2 -39 0.5 0.1 42 36 40 -43 1 -39 0.4 0.7 -13 -48 16 11 2 -39 0.9 0.9 11 -21 42 -4 2 -39 0.7 0.2 9 -39 33 -33 1 -39 0.7 0.9 21 4 8 0 2 -39 0.4 0.8 -32 -42 45 -17 2 -39 0.4 0.6 34 -20 30 -44 2 -39 0.5 0.8 29 -24 48 23 2 -39 0.3 0.8 27 6 -8 -12 1 -39 0.7 0.7 -16 -29 -34 -49 1 -39 0.3 0.8 38 -34 -17 -24 1 -39 0.5 0.1 -39 -45 48 -23 2 -39 0.5 0.7 -13 -24 -21 -34 2 -39 0.6 0.3 3 1 21 -34 2 -39 0.8 0.2 4 -8 -3 -16 1 -39 0.8 0.1 48 -12 21 -50 2 -39 0.5 0.2 -9 -25 50 -6 1 -39 0.9 0.8 -24 -36 21 -28 2 -39 0.5 0.9 3 -50 47 -1 2 -39 0.8 0.9 50 -31 14 -33 1 -39 0.6 0.4 -17 -27 49 -21 2 -39 0.7 0.9 -33 -48 -6 -43 2 -39 0.8 0.9 46 -37 -28 -50 1 -39 0.2 0.1 9 0 30 3 2 -39 0.2 0.7 3 -50 50 37 2 -39 0.6 0.9 43 10 1 -21 1 -39 0.2 0.7 -11 -14 35 -45 2 -39 0.3 0.7 -7 -34 36 23 2 -39 0.3 0.8 13 -14 42 -39 2 -39 0.4 0.1 -24 -37 29 -28 1 -39 0.8 0.8 -29 -30 22 11 2 -39 0.8 0.4 31 -36 49 25 2 -39 0.8 0.6 35 22 -1 -12 1 -39 0.3 0.9 33 -23 -25 -37 2 -39 0.9 0.8 37 -14 6 -15 1 -39 0.9 0.2 -12 -47 33 -17 2 -39 0.4 0.2 35 -25 47 2 1 -39 0.4 0.4 -19 -36 17 -18 2 -39 0.1 0.2 30 -45 30 17 2 -39 0.1 0.5 45 29 18 -48 2 -39 0.6 0.1 31 -23 39 -40 1 -39 0.2 0.1 30 27 11 -22 1 -39 0.8 0.1 -1 -33 32 20 2 -39 0.6 0.7 2 -39 38 -5 2 -39 0.3 0.9 -1 -32 10 8 2 -39 0.5 0.2 30 -47 43 -40 1 -39 0.8 0.9 -26 -44 -3 -41 2 -39 0.1 0.3 49 1 41 -27 1 -39 0.1 0.8 1 -41 40 22 2 -39 0.5 0.1 16 -8 19 -11 1 -39 0.1 0.2 4 -12 -21 -24 2 -39 0.4 0.4 50 -21 -3 -42 1 -39 0.6 0.7 6 -16 32 -38 2 -39 0.3 0.3 29 -3 44 1 2 -39 0.5 0.6 32 -30 37 -16 2 -39 0.5 0.9 28 12 14 -20 2 -39 0.3 0.6 27 13 50 -40 1 -39 0.6 0.1 22 6 40 -15 1 -39 0.2 0.8 0 -49 45 -18 2 -39 0.9 0.2 50 -24 48 20 2 -39 0.9 0.5 35 -6 48 -44 1 -39 0.6 0.3 48 22 40 -4 1 -39 0.9 0.7 -40 -42 -22 -31 2 -39 0.3 0.7 45 35 7 -27 2 -39 0.8 0.5 14 -6 12 -20 1 -39 0.5 0.3 5 -26 50 25 2 -39 0.2 0.8 -19 -40 29 9 2 -39 0.4 0.3 40 8 -16 -42 1 -39 0.8 0.9 -25 -40 -19 -38 2 -39 0.3 0.7 50 19 29 -17 1 -39 0.1 0.7 22 8 6 -13 1 -39 0.9 0.4 -21 -33 42 3 2 -39 0.1 0.3 26 -33 13 -31 2 -39 0.4 0.4 42 -47 14 -9 1 -39 0.3 0.2 0 -12 2 -7 2 -39 0.3 0.9 -3 -26 41 11 2 -39 0.1 0.6 23 1 31 22 2 -39 0.9 0.1 -11 -33 44 20 2 -39 0.4 0.9 47 32 -2 -8 1 -39 0.2 0.8 44 37 26 -30 2 -39 0.7 0.3 37 -13 30 11 2 -39 0.8 0.8 25 -22 27 -9 1 -39 0.5 0.4 48 -10 24 17 1 -39 0.8 0.8 31 -23 -8 -26 1 -39 0.7 0.4 24 -47 36 35 2 -39 0.3 0.6 39 5 1 -27 1 -39 0.8 0.9 27 4 45 29 2 -39 0.5 0.8 31 6 33 -18 1 -39 0.6 0.4 40 -16 23 -25 1 -39 0.1 0.2 22 -40 27 -34 2 -39 0.4 0.8 49 9 -6 -20 1 -39 0.2 0.7 -10 -20 -7 -38 2 -39 0.8 0.3 -9 -13 50 -4 2 -39 0.6 0.7 -25 -28 -6 -45 1 -39 0.5 0.3 -20 -26 -5 -31 2 -39 0.8 0.4 25 8 -30 -33 1 -39 0.7 0.7 30 -2 26 -18 2 -39 0.7 0.3 27 21 -15 -37 1 -39 0.6 0.1 40 -2 -10 -15 1 -39 0.6 0.4 7 -34 43 -10 2 -39 0.5 0.1 47 -21 44 18 1 -39 0.2 0.9 20 0 18 11 2 -39 0.2 0.9 9 -48 48 30 2 -39 0.7 0.9 -13 -15 40 27 2 -39 0.5 0.7 -18 -45 -11 -26 2 -39 0.6 0.4 -14 -49 -8 -11 2 -39 0.5 0.2 49 47 48 -10 1 -39 0.9 0.8 20 -46 3 -43 1 -39 0.1 0.7 -17 -48 50 17 2 -39 0.4 0.8 -35 -41 39 -43 2 -39 0.6 0.5 43 17 -42 -50 1 -39 0.1 0.1 14 -39 48 -22 1 -39 0.5 0.1 40 -4 -7 -12 1 -39 0.6 0.7 48 -37 15 -24 1 -39 0.6 0.6 23 -42 14 -32 1 -39 0.4 0.4 8 -40 37 25 2 -39 0.6 0.9 31 -1 33 -9 2 -39 0.7 0.2 -29 -49 -14 -38 1 -39 0.9 0.6 38 -36 12 -11 1 -39 0.4 0.4 -4 -49 30 -32 2 -39 0.7 0.3 34 -8 43 0 2 -39 0.7 0.8 25 15 -28 -48 1 -39 0.7 0.4 -33 -50 12 -47 1 -39 0.8 0.2 41 1 -9 -10 1 -39 0.8 0.8 48 35 49 -46 1 -39 0.5 0.6 24 23 24 22 2 -39 0.7 0.6 1 -6 5 -14 1 -39 0.1 0.9 30 26 -19 -50 1 -39 0.1 0.1 6 5 2 -29 1 -39 0.6 0.8 30 -17 38 12 2 -39 0.8 0.7 18 -30 4 -24 1 -39 0.4 0.8 48 30 20 -43 2 -39 0.5 0.3 39 -41 2 -32 1 -39 0.7 0.5 6 -42 7 -20 2 -39 0.6 0.5 1 -28 37 28 2 -39 0.1 0.9 9 -13 44 38 2 -39 0.6 0.7 41 11 13 -27 1 -39 0.7 0.2 -24 -47 11 1 2 -39 0.8 0.6 0 -12 15 -6 2 -39 0.3 0.7 18 -44 35 33 2 -39 0.6 0.7 0 -35 -24 -42 1 -39 0.7 0.9 13 -4 13 -9 2 -39 0.6 0.9 42 -34 -18 -21 1 -39 0.8 0.2 43 -25 -22 -27 1 -39 0.3 0.3 -3 -15 -25 -49 1 -39 0.5 0.2 39 -40 -24 -47 1 -39 0.2 0.2 -13 -19 10 -36 1 -39 0.3 0.5 23 2 -25 -26 1 -39 0.4 0.6 46 44 38 -14 1 -39 0.7 0.8 19 6 39 37 2 -39 0.3 0.6 -13 -41 24 -35 2 -39 0.6 0.9 2 -43 5 4 1 -39 0.1 0.6 -9 -28 -33 -50 2 -39 0.8 0.1 -34 -49 13 -46 1 -39 0.9 0.8 -6 -14 41 3 2 -39 0.7 0.5 -7 -33 20 -7 2 -39 0.4 0.8 24 -31 24 -4 2 -39 0.1 0.3 42 -28 25 12 2 -39 0.9 0.4 44 2 -44 -46 1 -39 0.9 0.4 23 2 18 -8 2 -39 0.1 0.8 -14 -23 -12 -50 1 -39 0.8 0.9 25 17 0 -4 1 -39 0.6 0.7 42 -35 30 -24 2 -39 0.7 0.1 39 9 48 20 2 -39 0.3 0.9 13 -32 23 -23 2 -40 0.3 0.1 -1 -9 -1 -8 2 -40 0.7 0.5 40 1 30 -15 1 -40 0.7 0.7 27 16 47 8 2 -40 0.1 0.1 28 13 40 -37 1 -40 0.8 0.2 31 -5 10 -13 1 -40 0.8 0.7 -34 -42 50 -23 2 -40 0.1 0.6 -12 -48 20 11 2 -40 0.8 0.2 36 -14 -18 -37 1 -40 0.4 0.7 2 -9 -32 -47 1 -40 0.7 0.3 42 -5 18 -3 1 -40 0.8 0.4 17 6 3 -50 1 -40 0.7 0.4 38 20 42 1 1 -40 0.7 0.5 20 -11 46 25 2 -40 0.8 0.3 -2 -5 49 -39 2 -40 0.1 0.5 19 -13 -20 -32 1 -40 0.9 0.8 11 -12 4 -5 1 -40 0.9 0.3 6 -1 50 16 2 -40 0.5 0.1 9 -24 26 -19 1 -40 0.7 0.1 40 -29 -9 -15 1 -40 0.8 0.9 -34 -35 29 -43 2 -40 0.1 0.9 3 -10 37 -47 2 -40 0.6 0.8 5 -49 17 -27 2 -40 0.1 0.5 25 -42 -3 -28 1 -40 0.2 0.4 4 -49 16 -50 2 -40 0.5 0.7 -5 -19 37 -48 2 -40 0.7 0.4 5 -18 37 8 2 -40 0.3 0.5 16 -48 -40 -42 1 -40 0.1 0.1 49 -50 2 -45 1 -40 0.8 0.7 11 -23 4 -17 1 -40 0.4 0.5 -33 -44 -9 -17 2 -40 0.1 0.7 -3 -22 -26 -44 1 -40 0.6 0.1 50 41 -6 -36 1 -40 0.2 0.5 29 -25 34 -31 2 -40 0.1 0.6 22 -41 35 -46 2 -40 0.1 0.8 -3 -4 -22 -49 1 -40 0.3 0.5 7 -12 -33 -37 1 -40 0.3 0.1 24 17 46 40 2 -40 0.3 0.9 33 -7 6 -39 1 -40 0.5 0.2 37 -14 44 15 2 -40 0.6 0.4 38 -6 49 -21 1 -40 0.2 0.4 -18 -22 -13 -29 2 -40 0.3 0.7 42 -24 -6 -24 1 -40 0.9 0.1 -9 -44 44 13 2 -40 0.9 0.3 21 -47 21 -10 1 -40 0.6 0.3 46 -6 14 6 2 -40 0.1 0.5 12 6 16 -1 2 -40 0.5 0.2 2 -34 36 13 2 -40 0.5 0.7 21 -31 -14 -22 1 -40 0.5 0.9 10 3 32 -37 2 -40 0.5 0.6 44 -22 -10 -38 1 -40 0.5 0.6 37 -26 29 -20 2 -40 0.7 0.6 27 -28 -17 -29 1 -40 0.6 0.5 4 -20 30 0 2 -40 0.6 0.2 39 1 38 12 2 -40 0.6 0.2 20 -27 -29 -37 1 -40 0.6 0.2 24 -5 -21 -39 1 -40 0.7 0.6 0 -25 10 3 2 -40 0.3 0.3 -18 -46 42 -10 2 -40 0.6 0.2 31 19 -34 -42 1 -40 0.3 0.2 2 -38 45 12 2 -40 0.1 0.3 44 -6 -20 -23 1 -40 0.1 0.2 23 -23 -30 -33 1 -40 0.1 0.1 32 -42 32 10 2 -40 0.9 0.7 46 22 22 18 1 -40 0.8 0.4 44 -6 1 -31 1 -40 0.5 0.2 -19 -21 -15 -29 1 -40 0.8 0.6 11 -38 -23 -36 1 -40 0.3 0.7 13 -31 28 -14 2 -40 0.2 0.7 49 17 30 -25 1 -40 0.3 0.9 30 -38 -3 -37 2 -40 0.9 0.5 -24 -30 32 -18 2 -40 0.6 0.1 -16 -21 -13 -31 1 -40 0.2 0.6 21 -38 14 -19 2 -40 0.9 0.2 -10 -21 45 -2 2 -40 0.5 0.2 47 6 -9 -34 1 -40 0.4 0.7 38 18 19 -3 1 -40 0.2 0.9 2 -47 12 -18 2 -40 0.5 0.2 50 16 -33 -38 1 -40 0.5 0.1 10 -8 6 -10 1 -40 0.5 0.5 27 11 6 -49 1 -40 0.3 0.3 38 -20 49 3 2 -40 0.1 0.4 13 -1 20 -33 1 -40 0.8 0.1 47 -13 43 39 2 -40 0.7 0.1 -12 -41 -19 -42 1 -40 0.4 0.3 -6 -48 42 -36 2 -40 0.9 0.5 19 -36 43 -9 1 -40 0.1 0.6 23 -2 -22 -33 1 -40 0.1 0.8 30 -14 5 -1 2 -40 0.1 0.5 7 -10 24 3 2 -40 0.6 0.6 48 -22 43 -23 2 -40 0.7 0.9 16 -39 33 5 2 -40 0.1 0.2 5 -5 -14 -24 1 -40 0.7 0.7 -31 -39 2 -47 2 -40 0.1 0.7 -33 -44 38 4 2 -40 0.3 0.8 -1 -15 24 -7 2 -40 0.8 0.9 6 -44 38 -36 2 -40 0.5 0.4 -6 -26 -34 -38 2 -40 0.3 0.7 18 9 40 2 2 -40 0.4 0.9 -46 -47 47 -24 2 -40 0.5 0.4 47 -18 17 -1 1 -40 0.3 0.3 -7 -32 -8 -39 1 -40 0.4 0.5 42 -3 -12 -37 1 -40 0.5 0.3 34 23 -18 -35 1 -40 0.2 0.9 -17 -41 17 0 2 -40 0.3 0.5 34 -27 32 -36 2 -40 0.9 0.4 48 12 41 37 1 -40 0.8 0.8 30 19 27 -41 1 -40 0.5 0.6 -24 -38 38 34 2 -40 0.8 0.6 48 -38 5 -48 1 -40 0.9 0.9 -15 -23 32 23 2 -40 0.3 0.3 -9 -43 39 -27 2 -40 0.6 0.4 15 -23 24 -50 1 -40 0.9 0.1 14 -28 21 -22 1 -40 0.5 0.7 5 -43 -9 -26 1 -40 0.8 0.7 36 -5 -17 -36 1 -40 0.2 0.1 37 -20 -21 -49 1 -40 0.5 0.9 6 -24 14 -40 2 -40 0.6 0.3 9 5 36 -26 1 -40 0.7 0.3 45 -34 -25 -37 1 -40 0.9 0.6 -33 -34 49 17 2 -40 0.1 0.4 -21 -42 47 -36 2 -40 0.9 0.2 4 -42 -25 -35 1 -40 0.6 0.5 32 -4 47 3 2 -40 0.3 0.7 10 -46 14 -22 2 -40 0.2 0.9 49 11 30 -37 1 -40 0.8 0.8 27 12 42 -2 1 -40 0.1 0.2 27 -12 15 -39 1 -40 0.3 0.7 30 6 50 14 2 -40 0.3 0.3 -4 -46 17 10 2 -40 0.9 0.3 31 -30 35 -21 1 -40 0.8 0.4 45 -48 -14 -37 1 -40 0.8 0.2 5 -13 -28 -29 1 -40 0.3 0.8 22 -19 -4 -26 1 -40 0.9 0.7 40 -43 -2 -5 1 -40 0.5 0.6 49 -34 -24 -26 1 -40 0.8 0.6 -35 -39 29 -36 2 -40 0.1 0.1 50 -50 46 -9 2 -40 0.3 0.8 13 -10 -10 -33 1 -40 0.5 0.6 33 -34 31 19 2 -40 0.1 0.6 35 -13 -41 -49 1 -40 0.8 0.6 33 1 41 18 2 -40 0.2 0.2 49 31 13 -16 1 -40 0.5 0.8 0 -43 10 3 2 -40 0.6 0.5 35 -19 7 -22 1 -40 0.4 0.2 3 -20 41 31 2 -40 0.1 0.3 -3 -10 34 -35 2 -40 0.7 0.5 15 -35 47 -48 2 -40 0.6 0.6 19 -21 21 0 2 -40 0.5 0.4 15 -20 13 -42 1 -40 0.3 0.6 26 17 -11 -37 1 -40 0.8 0.3 -1 -46 9 -26 1 -40 0.1 0.1 46 -2 16 -30 1 -40 0.4 0.7 10 -28 46 3 2 -40 0.3 0.4 -38 -39 10 3 2 -40 0.6 0.9 10 -33 -42 -44 1 -40 0.3 0.9 3 -46 -40 -42 1 -40 0.8 0.2 2 -33 -7 -44 1 -40 0.2 0.8 15 -39 4 -22 2 -40 0.3 0.1 23 3 -10 -34 1 -40 0.1 0.6 49 -14 24 5 2 -40 0.9 0.5 46 29 -7 -30 1 -40 0.8 0.6 -30 -39 -5 -19 2 -40 0.8 0.5 41 -5 -9 -17 1 -40 0.1 0.9 17 -16 24 16 2 -40 0.5 0.7 -38 -39 11 -24 2 -40 0.6 0.9 33 -43 42 -42 2 -40 0.1 0.4 -28 -42 44 -6 2 -40 0.2 0.4 -5 -22 -27 -43 1 -40 0.7 0.8 39 -4 -9 -20 1 -40 0.4 0.7 33 -21 25 21 2 -40 0.8 0.9 30 15 44 -15 2 -40 0.5 0.1 13 7 16 -15 1 -40 0.8 0.8 8 -40 32 5 2 -40 0.2 0.3 -40 -47 37 22 2 -40 0.4 0.2 -11 -34 1 -12 2 -40 0.3 0.6 42 15 44 7 2 -40 0.3 0.5 -15 -34 32 -43 2 -40 0.6 0.7 -16 -29 23 18 2 -40 0.6 0.9 -6 -31 -16 -29 2 -40 0.6 0.5 10 7 19 -13 1 -40 0.1 0.4 31 -30 -22 -28 1 -40 0.2 0.1 28 10 -9 -41 1 -40 0.6 0.4 -24 -33 50 10 2 -40 0.2 0.1 37 15 38 -50 1 -40 0.3 0.1 -8 -29 50 -41 1 -40 0.1 0.9 41 5 -35 -49 1 -40 0.6 0.9 -2 -27 42 -19 2 -40 0.4 0.4 38 -38 -26 -43 1 -40 0.5 0.2 9 -12 47 -46 1 -40 0.7 0.9 46 -45 -6 -16 1 -40 0.8 0.8 33 16 20 -5 1 -40 0.1 0.8 46 1 -9 -50 1 -40 0.3 0.1 49 -4 -19 -38 1 -40 0.4 0.3 7 -20 38 -45 1 -40 0.2 0.4 34 24 17 -47 1 -40 0.9 0.3 16 -8 -24 -49 1 -40 0.3 0.9 40 32 -2 -9 1 -40 0.4 0.1 -24 -37 43 -48 2 -40 0.3 0.5 5 -1 -32 -44 1 -40 0.3 0.8 35 33 19 12 1 -40 0.4 0.4 46 -20 25 -44 1 -40 0.2 0.2 -31 -37 15 6 2 -40 0.4 0.6 4 -15 -30 -42 1 -40 0.9 0.6 -34 -44 -34 -50 2 -40 0.2 0.2 49 31 -21 -27 1 -40 0.4 0.4 2 -24 48 -30 2 -40 0.9 0.5 33 32 49 11 1 -40 0.5 0.9 19 -19 46 31 2 -40 0.4 0.2 33 30 -26 -27 1 -40 0.2 0.2 10 -39 47 5 2 -40 0.6 0.2 50 -9 49 2 1 -40 0.8 0.9 26 0 11 -17 1 -40 0.4 0.2 25 -46 34 33 2 -40 0.6 0.7 -21 -23 43 30 2 -40 0.3 0.5 -22 -36 27 7 2 -40 0.2 0.9 -9 -19 1 -41 2 -40 0.9 0.4 21 10 -35 -44 1 -40 0.3 0.3 19 0 25 -37 1 -40 0.7 0.6 21 1 10 -21 1 -40 0.7 0.2 11 1 -10 -41 1 -40 0.7 0.4 22 -1 36 -39 1 -40 0.6 0.5 -2 -19 37 -40 2 -40 0.9 0.6 11 -20 42 -3 2 -40 0.7 0.5 42 -45 28 -35 1 -40 0.3 0.6 8 -12 37 -47 2 -40 0.5 0.7 43 23 -2 -18 1 -40 0.5 0.3 -34 -50 -42 -44 2 -40 0.2 0.6 -3 -26 -15 -22 1 -40 0.3 0.8 31 -28 35 -46 2 -40 0.5 0.5 32 -11 -8 -25 1 -40 0.5 0.4 44 -3 23 -21 1 -40 0.3 0.9 -23 -40 28 7 2 -40 0.9 0.6 23 -48 16 -9 1 -40 0.2 0.8 36 0 28 3 2 -40 0.6 0.1 -6 -39 49 30 2 -40 0.7 0.9 32 9 44 -3 2 -40 0.6 0.6 49 -13 -17 -21 1 -40 0.7 0.5 16 -32 11 0 1 -40 0.6 0.6 21 -26 44 24 2 -40 0.2 0.2 34 -47 19 11 2 -40 0.4 0.5 48 -25 -17 -42 1 -40 0.2 0.8 29 -17 44 -20 2 -40 0.7 0.8 11 -5 19 14 2 -40 0.2 0.7 -20 -44 6 -8 2 -40 0.4 0.2 24 22 -25 -41 1 -40 0.6 0.6 -15 -37 1 -1 2 -40 0.2 0.2 14 -1 22 -29 1 -40 0.1 0.7 32 5 28 -19 2 -40 0.8 0.9 -36 -37 -7 -31 2 -40 0.9 0.5 12 -26 19 -12 2 -40 0.2 0.9 4 -49 49 -15 2 -40 0.8 0.8 24 -34 22 18 2 -40 0.6 0.7 31 7 -20 -29 1 -40 0.5 0.8 -10 -40 -17 -45 2 -40 0.8 0.4 34 -39 -18 -21 1 -40 0.5 0.3 30 19 6 -13 1 -40 0.4 0.4 18 8 -7 -48 1 -40 0.2 0.9 -30 -49 21 16 2 -40 0.8 0.6 -12 -21 12 -42 2 -40 0.5 0.3 -22 -41 -8 -35 2 -40 0.2 0.6 -7 -35 3 -11 2 -40 0.7 0.4 46 -25 0 -43 1 -40 0.9 0.3 1 -1 48 -49 1 -40 0.4 0.9 43 -10 -23 -49 1 -40 0.3 0.2 2 -26 47 -11 2 -40 0.4 0.9 -19 -38 30 -21 2 -40 0.3 0.1 37 6 47 18 2 -40 0.9 0.2 3 -12 -23 -24 1 -40 0.8 0.3 50 45 46 16 1 -40 0.1 0.2 47 35 10 -19 1 -40 0.9 0.5 48 -28 49 27 2 -40 0.3 0.4 28 -28 48 28 2 -40 0.9 0.3 32 20 37 -37 1 -40 0.4 0.1 -16 -18 31 -6 2 -40 0.8 0.9 -8 -50 14 -20 2 -40 0.2 0.3 49 -2 38 -45 1 -40 0.4 0.7 36 33 -30 -31 1 -40 0.7 0.1 20 -7 -15 -41 1 -40 0.1 0.4 9 -43 10 -24 2 -40 0.3 0.2 31 -6 36 -22 1 -40 0.9 0.8 -18 -25 37 -37 2 -40 0.9 0.3 47 3 -4 -6 1 -40 0.4 0.6 38 9 29 10 1 -40 0.5 0.4 43 -24 39 -35 1 -40 0.6 0.2 29 -43 26 -44 1 -40 0.7 0.4 1 -34 37 22 2 -40 0.5 0.7 38 26 32 9 1 -40 0.2 0.9 22 -48 36 -48 2 -40 0.2 0.1 12 6 50 42 2 -40 0.2 0.9 29 -24 45 16 2 -40 0.5 0.9 -26 -39 -10 -23 2 -40 0.6 0.7 8 -37 46 13 2 -40 0.8 0.5 36 -25 32 -36 2 -40 0.2 0.7 29 12 -3 -41 1 -40 0.4 0.6 50 -47 34 25 2 -40 0.4 0.3 0 -39 24 15 2 -40 0.8 0.3 15 -46 27 -4 1 -40 0.1 0.9 46 25 39 -25 1 -40 0.6 0.2 20 4 30 23 2 -40 0.9 0.8 39 33 49 -1 1 -41 0.8 0.3 49 18 49 -9 1 -41 0.2 0.1 -49 -50 -2 -24 2 -41 0.8 0.7 3 -20 18 4 2 -41 0.8 0.5 31 -7 -18 -43 1 -41 0.7 0.6 35 12 32 1 1 -41 0.1 0.9 44 -41 33 -11 2 -41 0.2 0.2 -6 -24 26 -45 2 -41 0.9 0.3 11 -40 16 -15 1 -41 0.9 0.8 41 -46 9 2 1 -41 0.1 0.7 44 -41 -23 -49 1 -41 0.6 0.1 50 11 44 40 2 -41 0.9 0.6 1 -38 -24 -36 1 -41 0.8 0.8 -20 -45 50 19 2 -41 0.5 0.4 21 17 33 18 2 -41 0.7 0.5 -15 -24 48 0 2 -41 0.9 0.5 -6 -34 8 -20 2 -41 0.3 0.5 38 -24 26 6 2 -41 0.9 0.2 1 -29 -21 -22 1 -41 0.2 0.4 42 16 14 1 1 -41 0.8 0.8 -9 -26 43 24 2 -41 0.9 0.1 49 -8 29 6 1 -41 0.5 0.7 27 -6 7 -41 2 -41 0.9 0.6 -40 -43 22 -2 2 -41 0.1 0.6 44 6 26 -36 1 -41 0.6 0.8 -4 -14 34 -26 2 -41 0.7 0.9 17 14 -3 -10 1 -41 0.5 0.9 46 -25 35 -9 2 -41 0.1 0.8 -27 -43 48 -47 2 -41 0.7 0.7 23 -28 0 -18 1 -41 0.2 0.7 44 -5 16 -22 2 -41 0.1 0.4 -12 -27 37 -23 2 -41 0.5 0.5 16 -49 28 -48 1 -41 0.5 0.2 41 25 21 -6 1 -41 0.3 0.8 50 41 -17 -41 1 -41 0.3 0.4 27 -3 43 38 2 -41 0.8 0.4 -25 -43 23 -35 2 -41 0.4 0.1 25 13 45 -29 1 -41 0.6 0.5 44 21 30 10 1 -41 0.7 0.7 0 -37 48 43 2 -41 0.3 0.9 -14 -22 36 -12 2 -41 0.5 0.6 45 10 -33 -47 1 -41 0.3 0.5 39 -22 -14 -17 1 -41 0.6 0.1 19 13 -15 -25 1 -41 0.6 0.3 7 -20 12 -25 1 -41 0.9 0.9 17 -26 -7 -27 1 -41 0.8 0.4 17 -30 48 44 2 -41 0.6 0.7 -22 -46 26 -25 2 -41 0.8 0.1 -2 -14 15 -26 1 -41 0.2 0.8 34 16 42 14 2 -41 0.8 0.8 -17 -32 50 15 2 -41 0.4 0.5 44 -4 13 -41 1 -41 0.5 0.4 41 -33 34 -5 1 -41 0.2 0.8 22 -39 49 -4 2 -41 0.9 0.4 -13 -46 34 -36 2 -41 0.4 0.1 29 -20 26 5 2 -41 0.5 0.8 10 -41 29 7 2 -41 0.7 0.5 10 -12 43 22 2 -41 0.1 0.7 36 14 39 2 2 -41 0.8 0.6 33 -12 6 -47 1 -41 0.8 0.5 23 -22 17 8 2 -41 0.7 0.9 16 -22 13 -10 2 -41 0.5 0.6 -12 -24 -8 -32 2 -41 0.5 0.2 24 5 2 -1 1 -41 0.1 0.6 26 -36 -39 -50 2 -41 0.6 0.8 18 -43 49 34 2 -41 0.4 0.9 8 7 49 0 2 -41 0.4 0.9 -2 -16 39 16 2 -41 0.5 0.8 48 -43 -2 -24 1 -41 0.9 0.2 0 -35 34 17 2 -41 0.4 0.9 19 -48 -6 -23 2 -41 0.9 0.3 -18 -45 27 19 2 -41 0.8 0.7 8 -6 22 -49 2 -41 0.9 0.9 43 -25 38 1 2 -41 0.7 0.9 12 -8 20 14 2 -41 0.2 0.8 48 29 18 -11 1 -41 0.8 0.3 29 -3 30 5 1 -41 0.9 0.4 45 42 -18 -33 1 -41 0.5 0.6 46 -30 -7 -40 1 -41 0.4 0.4 36 32 -16 -43 1 -41 0.7 0.2 31 27 42 20 1 -41 0.9 0.3 42 35 26 14 1 -41 0.8 0.3 -1 -19 -9 -38 1 -41 0.6 0.2 23 -22 48 -44 1 -41 0.9 0.6 -13 -48 -16 -28 2 -41 0.6 0.7 0 -41 35 21 2 -41 0.7 0.6 34 -41 17 -20 1 -41 0.7 0.8 23 -5 44 13 2 -41 0.7 0.3 -15 -21 42 27 2 -41 0.3 0.4 35 -7 -2 -42 1 -41 0.3 0.4 41 -37 47 -50 2 -41 0.6 0.9 -29 -40 42 36 2 -41 0.4 0.9 26 -5 47 13 2 -41 0.6 0.1 50 -27 35 -8 1 -41 0.4 0.9 -26 -38 -42 -47 1 -41 0.8 0.8 -7 -49 8 -36 2 -41 0.1 0.1 35 -25 5 -47 1 -41 0.1 0.9 -2 -7 0 -48 2 -41 0.9 0.8 47 3 13 0 1 -41 0.8 0.5 -27 -37 -23 -26 2 -41 0.3 0.5 37 9 36 -18 1 -41 0.8 0.9 18 -20 31 -17 2 -41 0.7 0.1 47 -41 20 17 2 -41 0.4 0.9 42 -39 21 -7 2 -41 0.6 0.1 46 21 44 20 1 -41 0.1 0.7 26 -23 12 -38 2 -41 0.7 0.8 40 30 -13 -37 1 -41 0.5 0.4 36 -39 -4 -8 1 -41 0.4 0.5 44 21 18 -5 1 -41 0.9 0.4 24 -39 23 -50 1 -41 0.4 0.9 40 -21 -36 -47 1 -41 0.3 0.9 11 -29 7 -5 2 -41 0.9 0.6 38 -3 18 8 1 -41 0.2 0.9 41 16 -13 -37 1 -41 0.8 0.2 14 -47 24 14 2 -41 0.7 0.2 25 -29 12 -13 1 -41 0.8 0.1 11 -19 8 -40 1 -41 0.2 0.2 -11 -15 24 -45 1 -41 0.6 0.7 -27 -39 -3 -9 2 -41 0.7 0.8 31 -7 36 -15 2 -41 0.7 0.1 25 -6 22 -23 1 -41 0.2 0.4 7 -3 48 -27 2 -41 0.6 0.4 -4 -11 36 -28 2 -41 0.7 0.8 47 -35 27 -19 2 -41 0.4 0.8 -14 -48 -34 -36 1 -41 0.9 0.9 32 -4 23 -22 1 -41 0.1 0.9 8 -18 -26 -31 1 -41 0.3 0.4 3 -35 43 -4 2 -41 0.7 0.4 45 18 -27 -42 1 -41 0.6 0.3 40 -7 48 0 1 -41 0.4 0.9 9 -1 13 -6 2 -41 0.6 0.6 1 -34 18 -5 2 -41 0.3 0.3 10 -4 48 42 2 -41 0.3 0.1 44 1 5 -48 1 -41 0.5 0.1 -45 -50 1 -12 2 -41 0.8 0.8 47 10 43 -2 1 -41 0.3 0.3 11 2 23 8 2 -41 0.1 0.9 -3 -23 0 -45 2 -41 0.2 0.1 28 -22 14 -31 1 -41 0.1 0.9 16 -49 -28 -42 2 -41 0.9 0.2 15 -3 43 -19 1 -41 0.7 0.8 36 -16 33 -36 2 -41 0.2 0.8 -27 -32 35 8 2 -41 0.4 0.2 -21 -23 46 -14 2 -41 0.3 0.6 24 -33 8 -48 2 -41 0.8 0.9 -11 -40 -34 -45 1 -41 0.3 0.8 19 -43 5 -14 2 -41 0.2 0.1 44 -17 24 6 2 -41 0.3 0.2 3 -46 21 -24 2 -41 0.5 0.8 -46 -50 1 -6 2 -41 0.5 0.1 49 -33 36 30 2 -41 0.8 0.7 14 8 1 -11 1 -41 0.3 0.1 49 -27 -29 -34 1 -41 0.5 0.9 -8 -19 4 -47 2 -41 0.7 0.1 15 -36 -26 -44 1 -41 0.7 0.8 -16 -31 35 -3 2 -41 0.7 0.8 42 11 3 -9 1 -41 0.5 0.2 -13 -19 -4 -36 1 -41 0.7 0.3 -16 -24 11 -46 2 -41 0.9 0.3 42 32 48 -3 1 -41 0.3 0.7 49 -41 -22 -47 1 -41 0.3 0.8 11 -9 -32 -39 1 -41 0.5 0.4 -4 -17 -7 -10 2 -41 0.1 0.9 13 -1 20 13 2 -41 0.6 0.5 16 13 20 11 2 -41 0.7 0.5 8 -40 -6 -13 1 -41 0.2 0.8 -22 -43 26 -35 2 -41 0.4 0.1 -8 -33 47 36 2 -41 0.5 0.4 21 -32 30 -48 1 -41 0.7 0.9 0 -39 -8 -27 2 -41 0.6 0.8 8 -30 23 -31 2 -41 0.2 0.1 -8 -48 -22 -30 2 -41 0.6 0.3 36 19 26 4 1 -41 0.1 0.1 30 -22 20 -43 1 -41 0.8 0.1 21 -35 -36 -49 1 -41 0.2 0.5 27 22 34 -11 1 -41 0.7 0.5 48 -23 13 11 1 -41 0.9 0.7 48 30 -48 -50 1 -41 0.1 0.7 16 -5 50 -14 2 -41 0.5 0.8 43 -37 18 -4 2 -41 0.1 0.2 -1 -4 4 -13 1 -41 0.8 0.8 -16 -42 25 -4 2 -41 0.3 0.1 49 37 42 20 1 -41 0.9 0.9 49 31 50 -48 1 -41 0.8 0.5 20 -50 -35 -40 1 -41 0.6 0.9 45 -9 -4 -41 1 -41 0.9 0.6 -28 -46 9 -32 2 -41 0.1 0.6 33 -45 50 -43 2 -41 0.4 0.6 1 -20 16 -31 2 -41 0.4 0.4 47 -23 44 -39 1 -41 0.5 0.1 16 -25 42 -21 2 -41 0.8 0.9 12 -33 19 -35 2 -41 0.5 0.6 -5 -36 14 -19 2 -41 0.4 0.6 -22 -39 32 -42 2 -41 0.7 0.4 45 -29 -20 -35 1 -41 0.7 0.9 -35 -47 35 12 2 -41 0.7 0.7 14 9 -19 -45 1 -41 0.5 0.4 5 -37 4 3 2 -41 0.3 0.3 -7 -34 31 -13 2 -41 0.6 0.5 -9 -45 -1 -27 2 -41 0.8 0.3 -10 -25 -21 -25 1 -41 0.3 0.6 2 -47 -18 -45 1 -41 0.4 0.9 25 -1 40 -50 2 -41 0.2 0.2 -14 -30 10 -2 2 -41 0.3 0.9 12 -13 13 -16 2 -41 0.1 0.1 36 -29 46 -39 1 -41 0.9 0.2 34 -3 -21 -32 1 -41 0.8 0.8 -7 -19 8 -22 2 -41 0.4 0.4 36 -22 -22 -49 1 -41 0.3 0.9 40 -13 32 -39 2 -41 0.9 0.3 43 25 48 -41 1 -41 0.8 0.3 16 -30 15 -47 1 -41 0.7 0.8 29 -49 38 -22 2 -41 0.9 0.3 30 -6 43 37 2 -41 0.8 0.8 32 -39 34 -14 2 -41 0.3 0.5 28 -24 14 -21 1 -41 0.9 0.8 19 -25 45 -18 2 -41 0.9 0.8 45 -22 -12 -28 1 -41 0.2 0.6 39 -21 19 10 2 -41 0.4 0.6 37 36 -7 -12 1 -41 0.6 0.7 -3 -4 -2 -25 1 -41 0.6 0.3 -44 -50 19 18 2 -41 0.1 0.1 39 36 -3 -13 1 -41 0.8 0.7 17 -50 16 6 1 -41 0.4 0.3 41 2 -27 -41 1 -41 0.9 0.8 47 5 -11 -32 1 -41 0.7 0.7 37 34 41 -30 1 -41 0.8 0.1 34 33 19 -35 1 -41 0.5 0.5 19 -45 33 -40 2 -41 0.8 0.7 45 -8 -4 -13 1 -41 0.9 0.2 4 -44 39 17 2 -41 0.2 0.3 12 -20 -6 -46 2 -41 0.6 0.5 -8 -10 -26 -42 1 -41 0.3 0.6 -3 -40 44 -47 2 -41 0.9 0.7 18 -23 17 -10 1 -41 0.9 0.6 37 -2 35 4 1 -41 0.7 0.3 0 -5 8 -16 1 -41 0.2 0.4 -29 -35 22 -50 2 -41 0.7 0.9 47 -22 32 -20 2 -41 0.5 0.9 25 -9 6 -45 1 -41 0.5 0.9 34 27 -4 -45 1 -41 0.4 0.6 23 -48 23 2 2 -41 0.5 0.6 21 -5 -2 -33 1 -41 0.6 0.8 36 -37 14 -42 1 -41 0.2 0.4 23 16 -7 -14 1 -41 0.8 0.1 -11 -48 49 -7 2 -41 0.3 0.9 29 -26 36 -21 2 -41 0.1 0.1 38 -32 4 -42 1 -41 0.6 0.1 37 -24 -41 -48 1 -41 0.9 0.4 37 -47 22 -23 1 -41 0.7 0.9 6 -25 35 -6 2 -41 0.6 0.8 44 -32 45 -1 2 -41 0.7 0.6 42 -10 -12 -38 1 -41 0.9 0.2 -23 -30 4 -35 2 -41 0.6 0.3 35 -22 -30 -43 1 -41 0.2 0.3 6 0 3 -25 1 -41 0.8 0.8 22 -32 18 -41 1 -41 0.4 0.2 -14 -21 9 -33 2 -41 0.2 0.4 42 -15 -5 -12 1 -41 0.5 0.4 44 -42 -19 -40 1 -41 0.5 0.2 28 3 20 -2 1 -41 0.1 0.2 8 -9 -28 -35 1 -41 0.9 0.4 -5 -15 25 19 2 -41 0.9 0.5 14 -29 -3 -22 1 -41 0.3 0.3 -16 -33 41 32 2 -41 0.8 0.3 -10 -45 10 -35 2 -41 0.8 0.3 42 -45 22 -5 1 -41 0.8 0.9 47 20 25 -4 1 -41 0.1 0.8 -20 -38 7 -30 2 -41 0.8 0.6 37 10 46 16 2 -41 0.2 0.3 37 -35 41 -49 2 -41 0.3 0.6 -14 -17 -1 -13 2 -41 0.6 0.5 34 -33 -34 -46 1 -41 0.7 0.5 26 -1 41 -48 1 -41 0.5 0.5 47 -26 12 -46 1 -41 0.5 0.1 38 -42 10 0 1 -41 0.4 0.4 16 -48 4 -19 2 -41 0.3 0.2 -24 -26 35 22 2 -41 0.8 0.2 45 42 12 -44 1 -41 0.1 0.7 23 -8 30 -34 2 -41 0.3 0.3 30 21 43 -43 1 -41 0.7 0.7 19 -42 8 -44 1 -41 0.9 0.3 48 17 20 -30 1 -41 0.3 0.3 50 21 -20 -40 1 -41 0.6 0.4 49 -4 17 2 1 -41 0.1 0.4 23 -22 36 15 2 -41 0.9 0.4 47 40 38 13 1 -41 0.8 0.2 32 -50 -27 -48 1 -41 0.8 0.1 23 -26 42 -18 1 -41 0.8 0.4 46 -5 6 -30 1 -41 0.9 0.5 39 27 -20 -45 1 -41 0.1 0.9 29 10 38 -37 2 -41 0.2 0.4 7 -35 50 8 2 -41 0.9 0.1 -3 -41 42 -21 1 -41 0.9 0.8 37 29 46 32 2 -41 0.3 0.9 44 27 17 -13 1 -41 0.3 0.8 10 -17 27 -9 2 -41 0.7 0.6 46 26 28 -50 1 -41 0.3 0.2 20 -4 32 -30 1 -41 0.3 0.8 42 -21 44 -19 2 -41 0.6 0.5 30 -10 36 -36 1 -42 0.8 0.2 10 -15 44 -20 1 -42 0.4 0.8 25 13 38 -29 1 -42 0.5 0.3 42 37 36 5 1 -42 0.9 0.7 -34 -46 49 19 2 -42 0.3 0.6 -6 -24 4 -1 2 -42 0.7 0.6 20 13 35 -28 1 -42 0.6 0.4 47 -31 42 3 2 -42 0.6 0.7 -25 -28 -13 -14 2 -42 0.5 0.9 -37 -42 34 -24 2 -42 0.6 0.6 35 -26 -34 -45 1 -42 0.3 0.7 16 9 26 15 2 -42 0.7 0.8 49 -31 24 -4 1 -42 0.8 0.7 22 13 -28 -33 1 -42 0.3 0.8 10 -42 50 29 2 -42 0.7 0.2 37 -20 -6 -14 1 -42 0.1 0.6 35 -16 -24 -25 2 -42 0.3 0.9 4 -26 -29 -38 1 -42 0.3 0.1 -26 -50 -44 -46 2 -42 0.4 0.5 -15 -22 43 -37 2 -42 0.2 0.2 18 -25 13 -17 2 -42 0.1 0.3 45 -34 29 -2 2 -42 0.5 0.1 -28 -49 -9 -45 2 -42 0.3 0.3 44 -40 2 -22 1 -42 0.5 0.5 34 -13 49 21 2 -42 0.4 0.3 -14 -23 17 -22 2 -42 0.2 0.4 31 -4 -11 -27 1 -42 0.9 0.2 14 -49 10 -43 1 -42 0.9 0.2 1 -50 24 11 2 -42 0.6 0.7 26 -31 19 -46 1 -42 0.5 0.1 5 -30 30 4 2 -42 0.9 0.3 -26 -36 15 -14 2 -42 0.9 0.7 48 -28 37 -12 1 -42 0.1 0.3 23 -35 13 -1 2 -42 0.7 0.4 15 -8 40 -23 1 -42 0.7 0.5 -10 -36 3 -6 2 -42 0.5 0.9 41 -30 22 -15 2 -42 0.7 0.5 30 -9 17 13 2 -42 0.4 0.2 23 -1 -38 -50 1 -42 0.1 0.2 -22 -36 50 -34 2 -42 0.7 0.1 22 15 36 2 1 -42 0.9 0.8 49 -24 12 7 1 -42 0.1 0.3 6 -20 7 -41 1 -42 0.3 0.8 21 -48 49 44 2 -42 0.7 0.3 -46 -47 11 -48 2 -42 0.4 0.4 7 -19 23 9 2 -42 0.5 0.3 39 -45 -26 -48 1 -42 0.9 0.1 -20 -49 17 -48 1 -42 0.6 0.3 38 -13 0 -31 1 -42 0.7 0.5 -6 -48 46 -32 2 -42 0.2 0.3 46 33 42 -23 1 -42 0.5 0.1 36 22 -12 -28 1 -42 0.7 0.3 -14 -18 44 34 2 -42 0.4 0.3 31 13 -17 -35 1 -42 0.1 0.1 38 -42 -18 -46 1 -42 0.6 0.3 39 28 -15 -31 1 -42 0.6 0.3 4 -10 34 9 2 -42 0.5 0.8 28 26 39 -46 1 -42 0.6 0.4 16 4 12 -2 1 -42 0.7 0.8 31 -22 10 -36 1 -42 0.7 0.5 27 -15 47 -50 1 -42 0.4 0.3 -20 -27 -2 -37 2 -42 0.8 0.7 8 -24 -32 -49 1 -42 0.8 0.2 5 -7 34 -23 1 -42 0.4 0.8 39 19 -7 -40 1 -42 0.4 0.8 29 -50 35 -24 2 -42 0.8 0.6 41 8 -1 -29 1 -42 0.7 0.5 42 33 -31 -36 1 -42 0.1 0.1 -1 -34 46 -6 2 -42 0.3 0.2 7 -42 48 -18 2 -42 0.4 0.1 1 -8 -4 -23 1 -42 0.8 0.1 37 -24 43 -18 1 -42 0.7 0.9 12 -26 37 27 2 -42 0.5 0.9 48 -2 31 -41 2 -42 0.9 0.8 -24 -39 2 -39 2 -42 0.9 0.1 -5 -8 43 -39 1 -42 0.4 0.9 15 -40 27 0 2 -42 0.4 0.7 47 -7 40 -47 2 -42 0.8 0.2 0 -13 -24 -33 1 -42 0.4 0.4 46 39 -27 -40 1 -42 0.6 0.2 45 33 39 -47 1 -42 0.2 0.2 -46 -47 24 23 2 -42 0.7 0.6 46 -1 -20 -26 1 -42 0.8 0.2 -22 -29 13 -25 2 -42 0.5 0.1 50 -22 32 -35 1 -42 0.4 0.3 30 -1 7 3 1 -42 0.4 0.2 19 -34 -21 -28 1 -42 0.4 0.5 32 17 34 -36 1 -42 0.8 0.7 43 3 -9 -21 1 -42 0.4 0.2 44 -4 8 -40 1 -42 0.6 0.4 38 22 26 7 1 -42 0.1 0.7 16 -5 -7 -43 1 -42 0.6 0.7 14 -34 7 2 2 -42 0.7 0.2 35 -34 45 -42 1 -42 0.3 0.6 35 22 49 -36 1 -42 0.5 0.1 16 -35 -30 -45 1 -42 0.4 0.7 1 -45 -25 -27 2 -42 0.3 0.4 0 -23 -24 -46 1 -42 0.1 0.3 45 -26 -6 -38 1 -42 0.6 0.7 14 -48 49 7 2 -42 0.8 0.6 -17 -19 47 19 2 -42 0.3 0.8 -2 -50 37 -29 2 -42 0.8 0.7 49 -35 39 19 2 -42 0.4 0.6 7 -3 9 -21 1 -42 0.7 0.5 33 28 42 7 1 -42 0.7 0.9 40 16 -17 -20 1 -42 0.3 0.3 -10 -12 33 -19 2 -42 0.3 0.7 49 22 -26 -35 1 -42 0.5 0.8 23 -12 -39 -48 1 -42 0.8 0.6 -10 -37 12 -49 2 -42 0.3 0.2 -22 -33 46 28 2 -42 0.6 0.2 -5 -29 9 6 2 -42 0.1 0.3 24 -47 21 -45 2 -42 0.9 0.8 34 -24 24 13 1 -42 0.9 0.8 -13 -41 -11 -20 1 -42 0.7 0.2 35 -18 -28 -44 1 -42 0.9 0.5 43 30 -11 -19 1 -42 0.7 0.5 17 11 15 -23 1 -42 0.8 0.8 16 9 50 21 2 -42 0.8 0.1 41 -49 21 -24 1 -42 0.1 0.4 -37 -49 13 -18 2 -42 0.9 0.3 41 12 10 7 1 -42 0.3 0.5 38 -18 48 -45 1 -42 0.4 0.4 45 -29 -4 -20 1 -42 0.3 0.2 44 26 -32 -45 1 -42 0.2 0.4 16 4 47 20 2 -42 0.2 0.5 -21 -48 33 -20 2 -42 0.2 0.5 42 -13 24 -17 2 -42 0.6 0.8 7 -36 48 -1 2 -42 0.7 0.5 -3 -18 19 15 2 -42 0.3 0.3 -13 -49 23 -36 2 -42 0.2 0.4 0 -23 3 -12 2 -42 0.4 0.2 -11 -31 -16 -29 2 -42 0.7 0.6 49 -28 15 -48 1 -42 0.1 0.5 4 -4 -6 -43 1 -42 0.5 0.1 14 -30 24 -50 1 -42 0.4 0.6 -11 -30 47 -22 2 -42 0.1 0.4 39 33 -3 -44 1 -42 0.9 0.6 49 -35 -18 -29 1 -42 0.5 0.5 48 -20 35 -31 1 -42 0.5 0.5 7 1 33 6 2 -42 0.4 0.3 -34 -46 44 -33 2 -42 0.8 0.7 30 26 -19 -29 1 -42 0.6 0.3 42 36 36 22 1 -42 0.8 0.7 30 24 21 -33 1 -42 0.7 0.5 30 3 32 -12 1 -42 0.1 0.1 -9 -34 -8 -22 2 -42 0.4 0.8 41 -1 50 -9 2 -42 0.3 0.8 -23 -49 32 -1 2 -42 0.8 0.3 14 -8 11 -7 1 -42 0.7 0.4 30 -27 31 -19 1 -42 0.7 0.8 -21 -31 -11 -23 2 -42 0.3 0.8 41 -21 20 -43 2 -42 0.5 0.4 46 -5 24 -39 1 -42 0.9 0.1 25 12 44 41 2 -42 0.3 0.4 15 -32 38 -39 2 -42 0.8 0.9 32 -38 -8 -12 1 -42 0.5 0.6 21 -6 -32 -41 1 -42 0.5 0.4 19 12 -3 -32 1 -42 0.8 0.8 16 -25 31 -37 2 -42 0.9 0.8 -26 -38 36 7 2 -42 0.7 0.4 2 -19 6 -30 1 -42 0.3 0.7 37 -47 29 -41 2 -42 0.4 0.3 -5 -14 13 -16 2 -42 0.7 0.7 -17 -39 24 11 2 -42 0.2 0.1 30 -33 -25 -50 1 -42 0.6 0.1 29 13 24 -19 1 -42 0.2 0.4 -22 -35 -19 -21 1 -42 0.1 0.8 3 -34 30 -21 2 -42 0.6 0.4 19 9 19 4 1 -42 0.1 0.5 25 -8 38 -15 2 -42 0.9 0.1 6 -41 16 -7 1 -42 0.8 0.6 33 -33 31 -19 1 -42 0.7 0.5 33 0 34 -19 1 -42 0.2 0.3 -13 -39 -22 -40 2 -42 0.8 0.5 28 -11 -30 -50 1 -42 0.3 0.1 33 -31 8 0 2 -42 0.3 0.3 5 -48 -4 -36 2 -42 0.2 0.6 27 -15 50 2 2 -42 0.5 0.5 50 -10 -13 -14 1 -42 0.7 0.2 -31 -38 -43 -47 1 -42 0.4 0.8 11 -5 -15 -37 1 -42 0.8 0.9 11 -47 40 9 2 -42 0.9 0.3 21 -6 17 -13 1 -42 0.9 0.4 1 -17 11 7 2 -42 0.4 0.6 10 -44 40 -39 2 -42 0.6 0.2 48 11 22 -37 1 -42 0.8 0.7 22 -18 20 8 2 -42 0.9 0.5 11 -26 40 -13 2 -42 0.3 0.9 9 -49 33 15 2 -42 0.9 0.9 10 -36 22 -35 2 -42 0.6 0.8 8 -25 -11 -38 1 -42 0.9 0.8 -41 -46 38 7 2 -42 0.5 0.8 50 -39 -30 -48 1 -42 0.1 0.4 16 -15 31 -35 2 -42 0.1 0.7 46 -27 9 -6 2 -42 0.5 0.9 15 -26 28 -37 2 -42 0.3 0.2 36 -35 4 -34 1 -42 0.8 0.8 -24 -27 34 4 2 -42 0.2 0.8 39 -29 28 0 2 -42 0.6 0.2 47 -32 33 12 2 -42 0.7 0.5 18 -40 5 -34 1 -42 0.8 0.6 40 -37 -19 -34 1 -42 0.7 0.9 15 -14 -23 -29 1 -42 0.2 0.8 16 8 23 -2 1 -42 0.4 0.4 34 23 43 -5 1 -42 0.1 0.8 4 -12 8 -34 2 -42 0.5 0.9 -43 -49 43 -4 2 -42 0.1 0.4 7 -11 18 8 2 -42 0.5 0.1 11 -49 -25 -49 1 -42 0.5 0.7 41 32 48 -36 1 -42 0.2 0.2 -6 -32 8 -33 2 -42 0.4 0.5 -2 -28 -6 -26 1 -42 0.9 0.7 37 -17 38 -24 1 -42 0.5 0.6 27 -5 13 -3 1 -42 0.3 0.3 6 -7 15 -39 1 -42 0.8 0.2 46 18 38 -43 1 -42 0.9 0.5 46 39 1 -8 1 -42 0.5 0.9 48 20 -13 -30 1 -42 0.3 0.3 42 35 30 -35 1 -42 0.2 0.3 14 -17 49 -32 1 -42 0.6 0.7 42 -27 50 -1 2 -42 0.9 0.7 40 -40 29 -20 1 -42 0.7 0.8 -32 -38 -13 -27 2 -42 0.8 0.5 11 -13 20 8 2 -42 0.7 0.6 30 10 25 -50 1 -42 0.5 0.2 16 -32 -42 -46 1 -42 0.5 0.4 21 -27 8 -48 1 -42 0.6 0.4 34 -5 -9 -14 1 -42 0.8 0.6 32 4 17 -50 1 -42 0.4 0.8 2 -21 -24 -33 1 -42 0.6 0.1 22 13 5 -48 1 -42 0.2 0.8 9 -24 45 -48 2 -42 0.1 0.3 39 31 10 -31 1 -42 0.7 0.2 11 -6 37 -46 1 -42 0.7 0.8 50 22 -13 -46 1 -42 0.9 0.3 -10 -35 25 22 2 -42 0.8 0.2 -7 -46 11 10 2 -42 0.6 0.9 -15 -36 31 10 2 -42 0.6 0.2 -3 -48 31 -36 2 -42 0.8 0.8 -16 -48 18 11 2 -42 0.5 0.1 31 0 -27 -38 1 -42 0.4 0.8 -18 -39 48 30 2 -42 0.2 0.2 29 -35 8 -24 1 -42 0.4 0.6 26 21 29 -26 1 -42 0.2 0.5 -25 -42 -10 -28 2 -42 0.3 0.3 -27 -40 18 0 2 -42 0.4 0.3 9 -48 46 38 2 -42 0.1 0.4 -15 -39 18 -11 2 -42 0.6 0.6 -45 -47 36 -14 2 -42 0.1 0.5 15 0 10 9 2 -42 0.8 0.8 12 -50 50 -46 2 -42 0.9 0.5 44 13 7 -11 1 -42 0.8 0.5 49 -45 43 32 2 -42 0.5 0.5 32 -4 36 -32 1 -42 0.4 0.9 -12 -15 -14 -32 1 -42 0.1 0.6 21 -4 9 -8 2 -42 0.2 0.4 -28 -43 39 34 2 -42 0.6 0.3 14 -22 18 11 2 -42 0.1 0.3 -28 -44 -16 -24 2 -42 0.7 0.8 -24 -50 29 -21 2 -42 0.4 0.1 -38 -41 26 -30 2 -42 0.3 0.8 48 25 34 -7 1 -42 0.3 0.1 -4 -23 36 28 2 -42 0.8 0.8 -9 -32 43 -27 2 -42 0.1 0.2 30 20 38 -30 1 -42 0.5 0.2 36 -28 41 -46 1 -42 0.8 0.2 46 -38 49 35 2 -42 0.6 0.1 15 11 4 -7 1 -42 0.1 0.2 43 23 18 -1 1 -42 0.4 0.6 33 -22 3 -18 2 -42 0.1 0.9 12 4 2 -5 1 -42 0.3 0.3 -9 -23 -27 -29 1 -42 0.3 0.4 50 17 41 19 2 -42 0.5 0.3 -6 -42 -20 -38 1 -42 0.5 0.2 24 -47 17 -49 1 -42 0.9 0.7 -12 -32 25 -25 2 -42 0.8 0.1 48 14 22 13 1 -42 0.9 0.8 38 -27 -7 -45 1 -42 0.3 0.2 -21 -43 -21 -27 2 -42 0.4 0.1 23 -32 37 17 2 -42 0.7 0.4 24 7 44 -6 1 -42 0.8 0.4 16 9 44 -43 1 -42 0.1 0.4 17 -40 17 -16 2 -42 0.9 0.6 -24 -26 19 -15 2 -42 0.4 0.1 20 -35 22 8 2 -42 0.5 0.4 41 14 8 -19 1 -42 0.3 0.3 49 5 24 11 1 -42 0.5 0.8 39 21 -6 -33 1 -42 0.7 0.6 46 -7 -31 -36 1 -42 0.5 0.6 2 -9 1 -4 1 -42 0.1 0.7 12 -21 45 8 2 -42 0.6 0.8 -13 -21 36 -7 2 -42 0.8 0.8 41 12 -33 -39 1 -42 0.6 0.9 -4 -15 9 -46 2 -42 0.6 0.1 21 -35 35 -19 1 -42 0.5 0.7 -13 -36 43 -18 2 -42 0.5 0.1 0 -34 49 3 2 -42 0.5 0.2 50 6 24 -24 1 -42 0.2 0.5 -17 -19 -33 -48 1 -42 0.6 0.7 -2 -27 23 -26 2 -43 0.8 0.1 43 42 31 -33 1 -43 0.7 0.7 -43 -50 9 -3 2 -43 0.4 0.4 -15 -34 -14 -29 2 -43 0.6 0.6 48 25 27 25 1 -43 0.4 0.8 49 6 21 -39 1 -43 0.4 0.6 14 -22 45 -46 2 -43 0.5 0.6 21 -10 37 28 2 -43 0.9 0.2 35 20 43 22 1 -43 0.8 0.8 -3 -15 40 17 2 -43 0.9 0.8 -18 -22 50 14 2 -43 0.3 0.9 -3 -50 20 -38 2 -43 0.6 0.5 47 -15 35 13 1 -43 0.2 0.2 45 28 34 -20 1 -43 0.8 0.4 42 -27 15 -50 1 -43 0.4 0.1 17 -44 42 33 2 -43 0.6 0.8 14 -32 7 -7 1 -43 0.9 0.1 19 -50 48 -42 1 -43 0.1 0.5 16 -11 6 -42 1 -43 0.9 0.4 32 -8 45 -32 1 -43 0.2 0.5 4 -17 -8 -23 2 -43 0.5 0.9 35 8 10 -37 1 -43 0.3 0.2 18 -41 20 -16 1 -43 0.6 0.9 -13 -47 27 -16 2 -43 0.3 0.1 33 -3 -15 -34 1 -43 0.9 0.4 45 4 46 31 1 -43 0.3 0.7 22 -47 32 19 2 -43 0.5 0.1 23 14 40 5 2 -43 0.6 0.6 10 8 -28 -46 1 -43 0.9 0.4 5 -38 -29 -49 1 -43 0.8 0.4 -41 -47 40 -31 2 -43 0.4 0.2 21 -50 48 17 2 -43 0.3 0.1 34 -43 6 -40 1 -43 0.3 0.1 -27 -50 48 -31 2 -43 0.8 0.9 9 -29 27 -34 2 -43 0.4 0.1 42 -40 -24 -30 1 -43 0.6 0.5 28 -8 34 -25 1 -43 0.3 0.2 8 -10 11 -18 1 -43 0.7 0.4 20 13 12 8 2 -43 0.6 0.2 1 -9 12 -17 1 -43 0.6 0.7 32 -15 -19 -38 1 -43 0.3 0.6 21 -3 -15 -46 1 -43 0.4 0.1 48 37 38 -45 1 -43 0.3 0.6 12 -36 -12 -45 1 -43 0.8 0.1 0 -40 -12 -39 1 -43 0.3 0.7 -15 -49 -38 -42 2 -43 0.4 0.8 -12 -31 45 41 2 -43 0.2 0.2 -19 -49 -32 -49 1 -43 0.7 0.6 -30 -42 -25 -27 1 -43 0.9 0.9 50 10 33 -32 1 -43 0.8 0.6 44 -21 10 -6 1 -43 0.1 0.4 28 -5 -9 -43 1 -43 0.1 0.2 7 4 50 -40 1 -43 0.4 0.3 -36 -49 27 -41 1 -43 0.8 0.3 -1 -18 24 -11 1 -43 0.3 0.8 -12 -14 34 24 2 -43 0.6 0.4 42 -3 38 35 2 -43 0.4 0.1 38 -25 41 21 2 -43 0.9 0.2 11 -3 1 -6 1 -43 0.5 0.9 -3 -14 10 -13 2 -43 0.5 0.2 26 -20 38 -40 1 -43 0.1 0.2 1 -35 15 -50 1 -43 0.3 0.5 -6 -47 27 -30 2 -43 0.8 0.5 2 -37 26 16 2 -43 0.7 0.5 38 -5 1 -47 1 -43 0.3 0.5 -2 -34 27 19 2 -43 0.5 0.7 43 -32 -39 -43 1 -43 0.2 0.2 49 -15 36 -11 1 -43 0.8 0.1 40 -15 -2 -19 1 -43 0.7 0.6 49 42 35 -35 1 -43 0.9 0.9 -11 -28 -5 -30 1 -43 0.1 0.6 45 11 50 22 2 -43 0.5 0.4 6 -40 39 -41 2 -43 0.4 0.1 -5 -14 45 31 2 -43 0.7 0.1 45 -39 -23 -50 1 -43 0.2 0.7 49 12 11 -32 1 -43 0.2 0.9 45 -10 3 -2 2 -43 0.7 0.5 22 -45 41 8 2 -43 0.4 0.2 36 5 7 -37 1 -43 0.9 0.6 39 -45 43 -46 1 -43 0.6 0.3 33 -22 1 -5 2 -43 0.2 0.7 50 -18 31 14 2 -43 0.8 0.6 27 17 26 -10 1 -43 0.5 0.7 50 -30 50 -10 2 -43 0.1 0.2 44 -31 -25 -34 2 -43 0.3 0.8 -12 -48 46 -29 2 -43 0.6 0.8 36 28 16 -25 1 -43 0.2 0.5 46 -43 -17 -28 1 -43 0.3 0.6 4 -25 18 -36 1 -43 0.2 0.3 -3 -50 50 -16 2 -43 0.6 0.2 -31 -42 30 18 2 -43 0.5 0.2 18 -5 -4 -20 1 -43 0.7 0.9 48 -38 -39 -42 1 -43 0.6 0.7 9 -42 -27 -35 1 -43 0.3 0.6 46 13 -24 -36 1 -43 0.4 0.6 29 7 -14 -48 1 -43 0.4 0.3 11 -17 48 -20 1 -43 0.8 0.2 37 -47 -16 -19 1 -43 0.5 0.1 36 -35 0 -3 1 -43 0.3 0.3 -1 -30 40 23 2 -43 0.4 0.9 25 -25 33 1 2 -43 0.4 0.8 16 -16 1 -15 2 -43 0.8 0.4 -14 -37 19 11 2 -43 0.9 0.4 25 17 27 -38 1 -43 0.6 0.4 35 34 47 -36 1 -43 0.9 0.8 37 -18 27 -23 1 -43 0.5 0.2 -7 -35 45 37 2 -43 0.2 0.2 33 -13 3 -6 2 -43 0.6 0.7 44 -45 23 -26 2 -43 0.2 0.4 1 -10 39 26 2 -43 0.5 0.2 2 -28 46 -33 2 -43 0.4 0.8 16 5 36 35 2 -43 0.3 0.1 35 -2 0 -47 1 -43 0.8 0.5 5 -18 -6 -35 2 -43 0.1 0.9 3 -18 8 -46 1 -43 0.1 0.1 45 31 -35 -46 1 -43 0.3 0.1 44 -41 47 -7 1 -43 0.8 0.2 -5 -34 17 -45 2 -43 0.1 0.3 -40 -45 30 -7 2 -43 0.2 0.1 47 -9 15 14 2 -43 0.9 0.3 -10 -22 27 0 2 -43 0.7 0.8 43 16 6 -24 1 -43 0.7 0.9 -40 -46 21 -38 2 -43 0.7 0.5 48 -8 36 -8 1 -43 0.3 0.4 12 -17 50 -8 2 -43 0.2 0.7 11 -40 13 -19 2 -43 0.8 0.8 40 39 47 -28 1 -43 0.2 0.2 39 13 -3 -42 1 -43 0.6 0.6 33 14 -43 -44 1 -43 0.1 0.4 42 27 38 15 2 -43 0.7 0.8 36 -20 42 -1 2 -43 0.6 0.3 21 -22 -15 -48 1 -43 0.9 0.2 7 -43 24 6 2 -43 0.9 0.2 7 -29 17 -10 1 -43 0.2 0.3 19 -29 30 -14 2 -43 0.4 0.4 27 -31 2 -18 1 -43 0.5 0.9 -18 -46 -23 -36 2 -43 0.4 0.5 6 -50 36 4 2 -43 0.5 0.9 29 -11 -19 -42 1 -43 0.1 0.1 -7 -17 7 -28 1 -43 0.5 0.2 -15 -27 47 -21 2 -43 0.1 0.8 -1 -26 24 -30 2 -43 0.5 0.1 -6 -31 33 14 2 -43 0.1 0.5 21 -4 21 -4 2 -43 0.9 0.3 40 -47 12 -34 1 -43 0.3 0.4 45 44 16 -16 1 -43 0.3 0.8 39 26 1 -17 1 -43 0.2 0.3 9 -3 24 -26 2 -43 0.2 0.3 -4 -9 21 -34 2 -43 0.7 0.3 29 -5 36 19 2 -43 0.5 0.4 47 31 37 -17 1 -43 0.1 0.3 20 -30 -26 -29 1 -43 0.9 0.2 42 16 43 -28 1 -43 0.4 0.9 4 -47 1 -43 2 -43 0.6 0.2 50 -2 26 10 1 -43 0.2 0.3 -13 -26 -15 -20 1 -43 0.7 0.4 50 35 17 12 1 -43 0.1 0.8 8 -19 -6 -12 1 -43 0.9 0.1 25 4 35 -12 1 -43 0.8 0.1 26 23 21 2 1 -43 0.3 0.4 -13 -19 50 -5 2 -43 0.9 0.8 11 -19 49 -34 2 -43 0.8 0.5 41 -41 45 39 2 -43 0.9 0.8 40 1 32 17 1 -43 0.7 0.7 49 -50 -2 -10 1 -43 0.6 0.7 -6 -50 0 -1 1 -43 0.3 0.6 26 -47 11 6 1 -43 0.1 0.6 38 0 15 10 1 -43 0.2 0.9 27 -10 46 6 2 -43 0.8 0.6 -5 -21 0 -25 2 -43 0.8 0.3 31 -19 -10 -40 1 -43 0.1 0.2 15 -40 -25 -26 2 -43 0.2 0.6 20 16 40 -45 1 -43 0.2 0.4 43 -9 -17 -32 1 -43 0.2 0.9 47 46 26 20 1 -43 0.5 0.2 38 13 21 -14 1 -43 0.2 0.2 29 -8 9 -12 2 -43 0.5 0.1 -14 -40 -6 -33 1 -43 0.5 0.5 3 -4 17 -29 2 -43 0.1 0.1 42 10 39 16 2 -43 0.5 0.3 45 -34 -25 -46 1 -43 0.3 0.3 23 -48 37 2 2 -43 0.1 0.8 28 -23 9 8 2 -43 0.9 0.7 43 12 35 24 1 -43 0.7 0.4 16 -44 -5 -24 2 -43 0.5 0.2 34 -5 18 3 2 -43 0.6 0.2 50 27 -29 -38 1 -43 0.4 0.2 24 -5 -2 -39 1 -43 0.9 0.4 19 -36 39 -18 1 -43 0.6 0.6 49 39 -3 -42 1 -43 0.1 0.3 32 -32 33 -6 2 -43 0.5 0.9 47 29 15 -16 1 -43 0.8 0.3 32 -14 38 -50 1 -43 0.9 0.9 -5 -22 48 20 2 -43 0.8 0.4 37 -22 47 2 2 -43 0.8 0.5 45 29 32 -3 1 -43 0.2 0.5 -10 -42 -48 -49 1 -43 0.5 0.2 22 -40 45 -22 1 -43 0.7 0.5 23 -15 -1 -9 1 -43 0.6 0.9 25 -48 -24 -37 1 -43 0.6 0.1 -22 -44 47 -18 2 -43 0.5 0.8 32 -37 46 -16 2 -43 0.7 0.1 10 -17 49 -49 1 -43 0.6 0.1 28 -50 38 -43 1 -43 0.1 0.9 44 -20 3 -32 2 -43 0.6 0.6 47 19 50 5 1 -43 0.8 0.9 42 -20 6 -34 1 -43 0.6 0.6 29 12 27 20 2 -43 0.2 0.4 33 -7 32 -18 1 -43 0.1 0.3 8 -44 40 -24 2 -43 0.2 0.3 -20 -41 1 -21 2 -43 0.7 0.2 15 -31 48 28 2 -43 0.4 0.5 16 12 10 -3 1 -43 0.7 0.5 39 -5 2 -29 1 -43 0.8 0.2 40 25 11 -29 1 -43 0.3 0.5 14 -40 16 -27 2 -43 0.2 0.5 -23 -34 5 0 2 -43 0.1 0.1 4 -28 -10 -18 2 -43 0.9 0.4 27 -29 -24 -28 1 -43 0.7 0.6 46 38 44 -47 1 -43 0.3 0.4 -36 -40 7 -12 2 -43 0.5 0.2 11 -41 -1 -29 1 -43 0.2 0.6 -1 -18 31 -50 2 -43 0.9 0.9 39 -27 15 -5 1 -43 0.6 0.9 40 4 32 29 2 -43 0.7 0.7 8 -8 44 9 2 -43 0.9 0.9 28 -27 26 -32 2 -43 0.3 0.6 14 -35 38 -42 2 -43 0.2 0.1 42 23 -4 -48 1 -43 0.6 0.6 0 -1 5 -16 2 -43 0.6 0.1 -4 -38 23 12 2 -43 0.9 0.9 -9 -32 28 -28 2 -43 0.4 0.1 -4 -13 -4 -39 1 -43 0.7 0.3 42 -33 20 -42 1 -43 0.6 0.5 20 -30 -1 -38 1 -43 0.1 0.7 -19 -42 -27 -29 2 -43 0.5 0.7 -7 -27 19 4 1 -43 0.3 0.7 26 21 10 -11 1 -43 0.3 0.4 40 -4 15 -10 1 -43 0.5 0.8 18 -17 -12 -33 1 -43 0.5 0.7 -9 -30 28 7 2 -43 0.3 0.8 40 -29 50 48 2 -43 0.8 0.1 -11 -14 31 22 2 -43 0.8 0.2 38 -30 2 -25 1 -43 0.5 0.1 18 -3 -12 -34 1 -43 0.7 0.5 0 -35 -8 -24 1 -43 0.7 0.7 46 35 14 10 1 -43 0.9 0.7 -3 -21 47 6 2 -43 0.7 0.1 21 11 35 -2 1 -43 0.4 0.9 -24 -48 31 20 2 -43 0.2 0.4 43 -12 25 -39 1 -43 0.5 0.6 -24 -34 42 8 2 -43 0.6 0.5 -39 -46 19 -3 2 -43 0.7 0.6 -20 -37 17 9 2 -43 0.2 0.8 23 -17 -3 -27 1 -43 0.8 0.6 16 -40 14 5 2 -43 0.2 0.1 -13 -36 40 -13 2 -43 0.4 0.7 7 -5 25 -32 2 -43 0.8 0.7 41 28 43 28 2 -43 0.9 0.4 -13 -22 11 -34 2 -43 0.4 0.9 18 -31 36 10 2 -43 0.3 0.1 -33 -47 48 -37 2 -43 0.7 0.2 -20 -44 -34 -44 2 -43 0.5 0.3 21 -46 34 -30 1 -43 0.7 0.2 -7 -50 -13 -27 1 -43 0.3 0.4 4 -21 17 -44 2 -43 0.7 0.7 -23 -26 9 -33 2 -43 0.1 0.8 14 -13 23 -9 2 -43 0.4 0.1 10 -33 2 -33 2 -43 0.6 0.6 35 16 42 -4 1 -43 0.1 0.3 46 27 41 -42 1 -43 0.7 0.6 6 -40 27 -50 2 -43 0.1 0.4 45 -38 25 -18 1 -43 0.1 0.4 -25 -50 -22 -36 2 -43 0.5 0.7 15 -28 -31 -39 1 -43 0.5 0.7 20 -25 32 -12 2 -43 0.3 0.7 50 1 20 -35 1 -43 0.8 0.4 14 -14 45 -31 2 -43 0.1 0.2 42 -49 -33 -34 2 -43 0.4 0.9 7 4 24 -33 2 -43 0.5 0.9 8 -12 39 12 2 -43 0.5 0.7 -15 -35 24 -43 2 -43 0.4 0.4 -10 -23 -31 -32 1 -43 0.7 0.8 50 20 34 16 2 -43 0.2 0.4 -9 -46 -18 -22 2 -43 0.3 0.1 26 -28 39 -25 2 -43 0.3 0.1 48 -10 4 -50 1 -43 0.6 0.2 -14 -24 -9 -24 1 -43 0.4 0.3 38 4 4 -42 1 -43 0.3 0.9 -13 -27 30 5 2 -43 0.8 0.9 -16 -31 -40 -47 2 -43 0.7 0.7 17 5 -6 -48 1 -43 0.6 0.7 22 -34 8 -49 2 -43 0.5 0.8 30 7 -23 -43 1 -43 0.4 0.6 -21 -34 14 -27 2 -43 0.8 0.4 -13 -50 15 -7 2 -43 0.6 0.7 10 -10 -39 -50 1 -43 0.7 0.1 24 -20 3 -33 1 -43 0.7 0.1 14 -35 17 -3 1 -43 0.9 0.3 -12 -50 -8 -34 1 -43 0.1 0.9 31 27 37 1 2 -44 0.7 0.1 48 -40 32 -37 1 -44 0.8 0.5 -21 -40 39 3 2 -44 0.6 0.8 -29 -40 14 6 2 -44 0.9 0.3 37 -23 47 -34 1 -44 0.4 0.5 49 -18 -37 -50 1 -44 0.2 0.5 -24 -46 32 18 2 -44 0.8 0.4 41 27 27 -2 1 -44 0.6 0.6 29 -45 7 -28 1 -44 0.7 0.5 -33 -48 27 19 2 -44 0.1 0.8 -39 -42 -34 -49 2 -44 0.8 0.6 -36 -39 31 -44 2 -44 0.9 0.2 26 -1 -20 -21 1 -44 0.8 0.2 -5 -15 24 -32 1 -44 0.8 0.9 -21 -35 -6 -17 2 -44 0.7 0.3 27 -31 25 -41 1 -44 0.6 0.5 -8 -17 9 -28 2 -44 0.9 0.1 -15 -34 -29 -40 1 -44 0.7 0.6 28 -5 0 -4 1 -44 0.2 0.8 8 -33 8 -39 2 -44 0.3 0.4 28 10 3 -35 1 -44 0.6 0.3 -4 -12 32 28 2 -44 0.9 0.2 -40 -49 40 -44 1 -44 0.8 0.3 28 11 41 -48 1 -44 0.7 0.2 -5 -26 46 2 2 -44 0.6 0.9 12 8 -26 -45 1 -44 0.4 0.4 35 -28 5 -36 1 -44 0.4 0.2 -28 -46 39 -44 2 -44 0.8 0.7 14 -34 29 -13 2 -44 0.6 0.8 21 -7 43 37 2 -44 0.9 0.3 -4 -30 8 -38 1 -44 0.6 0.1 -1 -30 29 -38 1 -44 0.2 0.5 -4 -22 14 -32 2 -44 0.5 0.5 -38 -43 39 14 2 -44 0.2 0.5 2 -23 8 -6 2 -44 0.8 0.6 -6 -19 35 -2 2 -44 0.4 0.5 -31 -33 45 -4 2 -44 0.2 0.9 -21 -37 37 -8 2 -44 0.9 0.4 41 -34 29 -49 1 -44 0.2 0.8 -24 -28 43 -15 2 -44 0.5 0.9 -40 -50 11 -17 2 -44 0.2 0.5 -10 -38 0 -19 2 -44 0.9 0.1 40 11 34 -31 1 -44 0.3 0.7 40 14 19 -4 1 -44 0.7 0.6 -8 -36 22 11 2 -44 0.3 0.1 10 -9 8 -31 1 -44 0.3 0.8 3 -27 47 -2 2 -44 0.3 0.4 50 49 -19 -39 1 -44 0.4 0.6 38 -6 48 17 2 -44 0.6 0.4 4 -24 22 -19 2 -44 0.8 0.9 -8 -37 -11 -36 1 -44 0.9 0.1 32 -33 40 -35 1 -44 0.4 0.5 34 -41 -21 -44 1 -44 0.7 0.2 -2 -5 46 -20 2 -44 0.9 0.1 42 11 4 -9 1 -44 0.3 0.1 -12 -37 29 -39 2 -44 0.7 0.6 39 10 5 -35 1 -44 0.8 0.8 -34 -42 -14 -39 2 -44 0.3 0.4 50 25 44 18 1 -44 0.5 0.2 -25 -30 22 0 2 -44 0.8 0.2 34 -39 41 -17 1 -44 0.3 0.3 -27 -32 -33 -37 1 -44 0.5 0.7 15 -26 50 -23 2 -44 0.4 0.1 -13 -21 24 2 2 -44 0.9 0.7 4 -37 34 -16 2 -44 0.4 0.9 -14 -23 27 12 2 -44 0.9 0.9 -8 -46 5 -21 2 -44 0.4 0.3 38 14 29 -28 1 -44 0.1 0.8 -28 -39 5 -29 2 -44 0.2 0.1 25 5 -4 -26 1 -44 0.1 0.6 47 18 41 -49 1 -44 0.8 0.7 4 -29 46 40 2 -44 0.4 0.7 45 42 -27 -35 1 -44 0.9 0.5 37 -16 32 28 1 -44 0.8 0.1 17 -8 32 7 1 -44 0.7 0.8 46 14 9 7 1 -44 0.9 0.3 -3 -34 2 -8 2 -44 0.9 0.5 44 -1 45 -11 1 -44 0.5 0.1 44 26 -9 -20 1 -44 0.6 0.4 33 -40 29 7 2 -44 0.5 0.3 2 -48 31 -42 2 -44 0.7 0.3 -23 -43 1 -47 2 -44 0.5 0.9 31 -40 12 -46 2 -44 0.3 0.1 49 -4 16 -14 1 -44 0.2 0.7 20 -8 31 -3 2 -44 0.2 0.8 -26 -31 -45 -47 1 -44 0.5 0.1 35 14 40 28 2 -44 0.9 0.8 -12 -37 24 -43 2 -44 0.5 0.5 43 -46 -33 -49 1 -44 0.1 0.3 -34 -43 46 -34 2 -44 0.1 0.8 5 3 12 -5 2 -44 0.5 0.6 14 -15 -17 -43 1 -44 0.9 0.2 -27 -38 -30 -41 2 -44 0.2 0.8 6 -36 -9 -44 2 -44 0.7 0.5 48 10 32 -14 1 -44 0.1 0.2 17 -37 41 39 2 -44 0.8 0.2 15 4 24 -45 1 -44 0.4 0.9 10 7 23 -43 2 -44 0.1 0.1 34 -38 24 17 2 -44 0.3 0.4 -20 -43 26 -33 2 -44 0.1 0.4 8 -8 2 -31 2 -44 0.4 0.1 -12 -13 23 -14 2 -44 0.5 0.4 -15 -27 10 -40 2 -44 0.3 0.5 42 -31 38 30 2 -44 0.7 0.5 32 -11 -22 -31 1 -44 0.8 0.8 12 -19 -13 -21 1 -44 0.7 0.8 11 -43 44 -7 2 -44 0.7 0.3 45 7 -33 -40 1 -44 0.8 0.1 -9 -32 36 -2 2 -44 0.7 0.8 44 -11 40 5 2 -44 0.7 0.1 47 45 15 -46 1 -44 0.7 0.3 3 -9 28 -32 1 -44 0.9 0.2 32 -8 29 9 1 -44 0.9 0.7 9 -50 7 -34 1 -44 0.9 0.4 12 -28 42 -1 2 -44 0.9 0.3 6 -29 7 -16 1 -44 0.7 0.1 5 -15 -4 -14 1 -44 0.2 0.1 45 -37 -10 -33 1 -44 0.2 0.4 24 -27 -9 -44 1 -44 0.9 0.1 20 -10 40 25 2 -44 0.6 0.5 -27 -28 19 15 2 -44 0.7 0.8 0 -1 11 -33 2 -44 0.1 0.9 34 6 28 2 2 -44 0.5 0.8 8 -36 -20 -40 1 -44 0.2 0.5 38 34 -6 -9 1 -44 0.6 0.1 46 -25 50 12 1 -44 0.3 0.8 20 -3 47 7 2 -44 0.3 0.8 -11 -28 -20 -50 2 -44 0.4 0.5 43 -15 7 -20 1 -44 0.6 0.3 -11 -30 4 -45 1 -44 0.4 0.6 2 0 40 36 2 -44 0.8 0.1 -3 -41 29 18 2 -44 0.5 0.3 36 23 -10 -50 1 -44 0.4 0.3 6 -14 27 -41 1 -44 0.4 0.5 1 -20 39 -13 2 -44 0.6 0.4 28 -33 45 -16 2 -44 0.1 0.5 25 -44 11 4 2 -44 0.7 0.2 46 2 39 34 2 -44 0.9 0.8 24 -1 3 -14 1 -44 0.4 0.2 36 -11 44 -17 1 -44 0.5 0.9 -45 -50 21 -46 2 -44 0.2 0.8 32 30 26 9 1 -44 0.6 0.2 23 19 -29 -45 1 -44 0.9 0.8 20 -22 43 26 2 -44 0.2 0.9 44 37 26 -24 1 -44 0.7 0.8 27 2 22 -28 1 -44 0.1 0.9 14 -50 2 -35 2 -44 0.3 0.7 46 4 37 15 2 -44 0.9 0.2 50 49 12 -26 1 -44 0.4 0.4 36 19 29 21 2 -44 0.2 0.5 29 8 41 37 2 -44 0.2 0.4 23 11 -3 -32 1 -44 0.7 0.9 47 -45 26 -26 2 -44 0.4 0.6 25 21 -14 -43 1 -44 0.5 0.8 50 18 37 -13 1 -44 0.5 0.7 30 25 10 -27 1 -44 0.3 0.4 22 6 0 -29 1 -44 0.4 0.3 20 -35 17 5 2 -44 0.1 0.5 17 -20 11 -49 2 -44 0.3 0.5 48 -21 8 4 2 -44 0.1 0.9 37 -38 29 19 2 -44 0.6 0.9 -21 -23 -10 -19 2 -44 0.9 0.4 -19 -50 4 -14 2 -44 0.7 0.2 21 -40 -44 -49 1 -44 0.1 0.5 -10 -29 45 -46 2 -44 0.4 0.6 33 -22 39 -41 2 -44 0.7 0.8 12 -21 21 4 2 -44 0.9 0.9 45 35 8 -38 1 -44 0.8 0.1 18 -24 -27 -30 1 -44 0.9 0.9 50 23 29 -40 1 -44 0.5 0.7 24 -30 5 3 2 -44 0.2 0.1 5 -30 -3 -31 1 -44 0.7 0.1 0 -46 50 6 2 -44 0.5 0.6 43 42 -33 -39 1 -44 0.1 0.8 37 13 21 -11 1 -44 0.1 0.5 39 -17 47 32 2 -44 0.2 0.9 34 -14 35 -19 2 -44 0.7 0.8 -28 -50 20 17 2 -44 0.3 0.6 23 -7 1 -9 2 -44 0.2 0.2 46 33 36 -49 1 -44 0.9 0.2 -13 -26 12 -32 2 -44 0.1 0.3 -10 -24 26 -13 2 -44 0.7 0.9 36 -45 10 -15 1 -44 0.6 0.6 50 -25 21 -41 1 -44 0.2 0.9 25 11 -39 -42 1 -44 0.1 0.2 9 7 34 -29 1 -44 0.8 0.1 20 -21 -42 -45 1 -44 0.4 0.2 40 2 16 -22 1 -44 0.7 0.6 17 -14 46 37 2 -44 0.2 0.5 -16 -25 43 -2 2 -44 0.2 0.3 45 -19 -10 -22 1 -44 0.4 0.7 -11 -16 10 -22 2 -44 0.8 0.3 40 -49 -14 -37 1 -44 0.5 0.3 -35 -43 46 -38 2 -44 0.6 0.8 3 -22 -14 -15 1 -44 0.9 0.2 -29 -40 21 2 2 -44 0.4 0.9 38 21 42 -47 1 -44 0.3 0.2 35 27 6 -9 1 -44 0.6 0.1 47 -21 49 7 1 -44 0.1 0.2 19 2 -9 -19 1 -44 0.1 0.9 21 -10 29 -12 2 -44 0.8 0.7 22 -49 4 -6 1 -44 0.9 0.7 24 -18 13 6 1 -44 0.2 0.5 39 -10 43 -7 2 -44 0.8 0.1 24 -25 -5 -13 1 -44 0.9 0.2 26 -12 6 -22 1 -44 0.4 0.7 19 -1 3 -18 1 -44 0.8 0.5 35 -31 3 -10 1 -44 0.6 0.2 32 7 -8 -21 1 -44 0.9 0.2 8 -11 42 23 2 -44 0.8 0.5 14 -26 11 -4 1 -44 0.6 0.7 13 -27 48 -10 2 -44 0.7 0.6 39 -7 5 -13 1 -44 0.2 0.8 20 8 -3 -17 1 -44 0.4 0.3 10 5 46 21 2 -44 0.2 0.8 36 -7 22 15 2 -44 0.5 0.8 36 -11 19 -20 2 -44 0.9 0.4 35 -14 -41 -50 1 -44 0.8 0.4 42 -11 -34 -44 1 -44 0.9 0.6 -9 -28 29 26 2 -44 0.9 0.1 16 -3 -24 -49 1 -44 0.2 0.7 25 -26 3 -48 2 -44 0.7 0.6 18 -49 49 -9 2 -44 0.9 0.7 50 -30 6 -12 1 -44 0.3 0.9 45 -26 27 -29 2 -44 0.5 0.1 -1 -20 25 2 2 -44 0.2 0.1 28 -28 41 29 2 -44 0.3 0.1 45 -25 -2 -24 1 -44 0.9 0.5 -21 -30 37 -45 2 -44 0.5 0.5 41 30 -18 -40 1 -44 0.1 0.4 18 -18 47 8 2 -44 0.7 0.7 42 -41 33 -12 2 -44 0.7 0.4 45 -1 24 -38 1 -44 0.3 0.3 -42 -49 50 -1 2 -44 0.3 0.4 35 27 0 -18 1 -44 0.5 0.9 47 32 47 27 2 -44 0.4 0.2 12 -46 22 -49 1 -44 0.5 0.8 10 -17 -31 -40 1 -44 0.8 0.1 18 16 -32 -50 1 -44 0.1 0.4 -18 -41 -30 -32 2 -44 0.7 0.8 -17 -50 48 -30 2 -44 0.9 0.9 22 -48 -11 -34 1 -44 0.5 0.1 12 -18 3 -15 1 -44 0.4 0.7 25 22 4 -41 1 -44 0.8 0.3 33 -6 34 19 2 -44 0.6 0.7 -1 -12 39 -28 2 -44 0.9 0.9 33 -31 23 19 2 -44 0.2 0.4 19 -25 43 -19 2 -44 0.2 0.2 4 -29 -23 -27 1 -44 0.4 0.1 4 -16 -15 -42 1 -44 0.6 0.7 22 -33 -6 -13 1 -44 0.8 0.2 4 -2 -11 -27 1 -44 0.6 0.7 -2 -14 32 -42 2 -44 0.3 0.7 28 -19 18 7 2 -44 0.3 0.4 21 14 39 26 2 -44 0.9 0.9 -9 -38 11 -47 2 -44 0.6 0.7 32 -33 47 -42 2 -44 0.8 0.2 2 -4 50 -5 1 -44 0.2 0.5 22 -3 28 2 2 -44 0.2 0.1 38 30 -4 -12 1 -44 0.2 0.2 32 -43 16 15 2 -44 0.3 0.3 22 -3 27 -27 1 -44 0.1 0.5 -3 -19 -32 -49 1 -44 0.5 0.7 -21 -29 21 -31 2 -44 0.6 0.4 -6 -35 -21 -23 1 -44 0.4 0.7 -6 -32 10 -48 2 -44 0.1 0.9 30 5 -30 -35 1 -44 0.7 0.7 9 -1 -6 -32 1 -44 0.3 0.2 -9 -24 8 -7 2 -44 0.5 0.4 21 -11 47 -19 2 -44 0.9 0.4 47 19 25 10 1 -44 0.6 0.3 -6 -31 11 -15 2 -44 0.4 0.1 47 -31 -7 -24 1 -44 0.1 0.9 19 6 50 9 2 -44 0.4 0.5 49 22 30 -16 1 -44 0.1 0.5 7 -41 45 -21 2 -44 0.1 0.3 36 -14 21 4 2 -44 0.2 0.9 34 16 31 19 2 -44 0.4 0.5 46 -40 4 -25 1 -44 0.2 0.5 28 -35 16 -13 2 -44 0.9 0.5 32 -24 2 -16 1 -44 0.7 0.1 45 12 35 -38 1 -44 0.8 0.2 29 4 7 -17 1 -44 0.8 0.8 -27 -50 9 -23 2 -44 0.2 0.9 23 -41 2 -1 2 -44 0.9 0.2 35 -38 36 -25 1 -44 0.2 0.2 47 45 26 -11 1 -44 0.4 0.5 41 34 -12 -19 1 -44 0.4 0.6 1 -43 28 -48 2 -44 0.9 0.3 50 -24 47 -39 1 -44 0.5 0.3 38 12 11 -50 1 -44 0.1 0.5 40 15 42 34 2 -44 0.7 0.9 -13 -44 47 39 2 -44 0.2 0.8 50 -43 -4 -29 2 -44 0.2 0.4 47 -37 33 -2 2 -44 0.2 0.1 48 -23 33 -8 2 -44 0.4 0.1 22 -18 14 -48 1 -44 0.6 0.1 49 -22 35 3 1 -44 0.6 0.6 40 39 44 -31 1 -44 0.6 0.5 11 -1 30 20 1 -44 0.7 0.2 30 -44 3 -29 1 -45 0.4 0.1 25 3 22 -1 1 -45 0.9 0.9 35 2 -22 -29 1 -45 0.8 0.7 3 -7 11 -49 1 -45 0.7 0.3 -18 -36 -43 -49 1 -45 0.5 0.5 11 -2 -1 -4 1 -45 0.5 0.2 36 -6 0 -17 1 -45 0.4 0.7 -29 -38 15 -11 2 -45 0.8 0.1 -6 -27 48 -12 2 -45 0.3 0.6 31 -15 21 -12 2 -45 0.9 0.3 14 3 45 -45 1 -45 0.4 0.5 35 8 10 -2 1 -45 0.3 0.2 3 -48 36 -45 2 -45 0.4 0.4 39 0 -13 -38 1 -45 0.8 0.3 17 16 -45 -48 1 -45 0.9 0.6 10 -20 25 -16 1 -45 0.9 0.7 -20 -38 34 -18 2 -45 0.7 0.1 -13 -20 34 17 2 -45 0.2 0.9 40 14 31 25 2 -45 0.2 0.9 -13 -39 8 -4 2 -45 0.4 0.2 23 22 39 -38 1 -45 0.8 0.8 -14 -33 39 -6 2 -45 0.4 0.8 -21 -50 -2 -50 2 -45 0.4 0.8 18 -46 24 2 2 -45 0.3 0.8 32 -42 -27 -50 1 -45 0.7 0.7 28 25 46 -48 1 -45 0.2 0.1 -16 -39 9 -46 1 -45 0.1 0.4 -6 -15 5 -49 1 -45 0.3 0.1 1 -26 -19 -43 1 -45 0.4 0.3 35 30 17 -3 1 -45 0.4 0.4 30 -44 5 3 2 -45 0.4 0.8 -39 -43 46 28 2 -45 0.2 0.1 -1 -7 34 28 2 -45 0.3 0.9 8 -41 50 10 2 -45 0.8 0.7 23 -24 18 8 2 -45 0.3 0.1 -17 -43 45 19 2 -45 0.1 0.6 -37 -45 -14 -21 2 -45 0.4 0.2 -23 -41 14 -20 2 -45 0.4 0.3 -35 -44 -14 -25 2 -45 0.6 0.9 34 -47 14 -31 2 -45 0.9 0.9 5 -31 37 -29 2 -45 0.7 0.3 -3 -47 -20 -49 1 -45 0.4 0.3 49 37 43 -9 1 -45 0.6 0.3 1 -40 27 -30 1 -45 0.9 0.4 9 -31 30 -43 1 -45 0.2 0.9 -42 -49 13 8 2 -45 0.9 0.3 43 2 21 16 1 -45 0.9 0.2 -5 -21 49 15 2 -45 0.6 0.9 41 -8 17 -9 1 -45 0.6 0.3 -46 -48 -39 -46 2 -45 0.4 0.5 -1 -40 13 -19 2 -45 0.9 0.9 50 1 -4 -30 1 -45 0.4 0.8 -2 -4 35 -13 2 -45 0.8 0.8 13 12 28 -14 2 -45 0.5 0.1 34 -16 -13 -17 1 -45 0.1 0.4 8 -23 35 -38 2 -45 0.4 0.1 8 -20 19 -23 1 -45 0.6 0.8 40 -9 -4 -44 1 -45 0.6 0.6 -9 -20 47 -18 2 -45 0.2 0.5 -6 -46 2 -18 2 -45 0.1 0.7 5 -5 -3 -7 1 -45 0.3 0.9 39 -35 -4 -46 2 -45 0.7 0.4 31 26 -29 -50 1 -45 0.9 0.7 27 -16 46 27 2 -45 0.6 0.1 -10 -36 39 31 2 -45 0.3 0.4 28 -32 -27 -48 1 -45 0.4 0.8 -7 -13 -35 -48 1 -45 0.6 0.4 49 32 41 27 1 -45 0.2 0.7 14 -22 34 -47 2 -45 0.5 0.4 17 -14 27 -15 2 -45 0.8 0.5 11 10 -38 -49 1 -45 0.7 0.1 20 -18 37 7 2 -45 0.6 0.7 -45 -49 -23 -39 2 -45 0.8 0.2 -6 -39 32 -10 2 -45 0.9 0.5 47 -5 47 40 2 -45 0.4 0.8 22 -39 20 -8 2 -45 0.3 0.7 -20 -21 -8 -32 2 -45 0.2 0.2 29 -17 40 16 2 -45 0.3 0.7 0 -23 37 4 2 -45 0.7 0.9 41 -10 18 -35 1 -45 0.8 0.8 -32 -47 47 -20 2 -45 0.1 0.5 16 -18 22 -17 2 -45 0.9 0.1 28 -49 28 -1 1 -45 0.9 0.3 -40 -41 13 -5 2 -45 0.1 0.3 2 -5 43 31 2 -45 0.1 0.2 49 44 47 4 1 -45 0.1 0.6 48 34 49 11 1 -45 0.6 0.1 -28 -30 39 -1 2 -45 0.2 0.3 2 -36 14 10 2 -45 0.3 0.1 15 4 1 -20 1 -45 0.7 0.8 25 -24 17 -27 1 -45 0.1 0.7 28 -48 24 -7 2 -45 0.4 0.1 -11 -24 38 -2 2 -45 0.1 0.7 46 18 41 -1 2 -45 0.6 0.1 12 -36 34 -7 1 -45 0.4 0.1 -29 -34 2 -40 2 -45 0.7 0.8 47 9 15 -8 1 -45 0.4 0.8 10 -3 -5 -10 1 -45 0.9 0.9 13 -44 6 -43 1 -45 0.6 0.4 41 -7 -41 -43 1 -45 0.3 0.6 42 -22 21 -30 2 -45 0.7 0.2 -26 -44 45 16 2 -45 0.8 0.2 18 -10 -7 -49 1 -45 0.4 0.6 -6 -21 50 -41 2 -45 0.4 0.2 39 -9 9 -43 1 -45 0.7 0.5 -27 -40 42 -44 2 -45 0.5 0.1 36 30 0 -17 1 -45 0.7 0.4 35 25 30 -15 1 -45 0.4 0.5 -3 -26 39 24 2 -45 0.7 0.7 6 -24 -29 -49 1 -45 0.1 0.1 29 23 21 -2 1 -45 0.9 0.3 45 -8 24 -1 1 -45 0.9 0.2 29 -50 7 -16 1 -45 0.6 0.6 6 -15 19 -16 2 -45 0.1 0.1 48 -18 -25 -32 1 -45 0.7 0.4 -8 -32 29 -22 2 -45 0.7 0.7 48 -50 50 -44 2 -45 0.9 0.1 40 -30 20 -45 1 -45 0.5 0.4 42 -24 15 8 2 -45 0.1 0.7 12 3 -43 -45 1 -45 0.6 0.5 -4 -20 21 -28 2 -45 0.6 0.8 10 -39 25 -39 2 -45 0.2 0.2 36 -16 -24 -26 1 -45 0.6 0.6 37 5 -11 -22 1 -45 0.1 0.1 -22 -45 36 -6 2 -45 0.6 0.8 -5 -42 -14 -47 1 -45 0.6 0.7 11 9 39 32 2 -45 0.4 0.8 50 -13 40 -23 2 -45 0.3 0.3 10 -27 21 8 2 -45 0.3 0.1 35 -45 50 -15 2 -45 0.1 0.8 11 2 27 -15 2 -45 0.5 0.1 25 -21 -38 -44 1 -45 0.2 0.6 -5 -36 12 1 2 -45 0.6 0.8 1 -45 -15 -28 2 -45 0.2 0.3 49 -21 36 18 2 -45 0.1 0.2 24 -47 -19 -32 2 -45 0.7 0.5 39 0 -11 -47 1 -45 0.1 0.4 28 11 19 -47 1 -45 0.1 0.8 4 -5 -8 -42 1 -45 0.6 0.1 -1 -26 24 -34 1 -45 0.7 0.7 7 -24 -19 -30 1 -45 0.1 0.7 11 4 6 -17 1 -45 0.4 0.7 27 -36 26 19 2 -45 0.9 0.4 -16 -27 45 -50 2 -45 0.1 0.6 16 14 40 19 2 -45 0.8 0.6 42 19 17 -50 1 -45 0.2 0.2 36 -32 34 -33 2 -45 0.3 0.3 -21 -48 -11 -17 2 -45 0.3 0.6 -12 -44 8 2 2 -45 0.4 0.1 17 -27 -38 -47 1 -45 0.4 0.5 41 39 -2 -24 1 -45 0.4 0.9 35 0 -32 -38 1 -45 0.5 0.5 33 -9 40 -15 1 -45 0.4 0.8 33 -29 -4 -34 1 -45 0.6 0.9 -10 -42 23 -38 2 -45 0.4 0.2 34 -29 28 -8 2 -45 0.4 0.2 50 8 -30 -37 1 -45 0.4 0.6 -27 -49 49 -19 2 -45 0.2 0.9 12 2 19 10 2 -45 0.9 0.5 6 -36 -8 -27 1 -45 0.5 0.4 21 -3 40 -41 1 -45 0.7 0.7 37 -32 -9 -31 1 -45 0.3 0.5 43 -49 41 20 2 -45 0.4 0.2 26 3 39 19 2 -45 0.4 0.5 -22 -23 -37 -49 1 -45 0.6 0.8 6 -7 49 -2 2 -45 0.7 0.6 46 -33 33 -27 1 -45 0.2 0.4 -28 -32 26 6 2 -45 0.7 0.3 50 46 48 -24 1 -45 0.4 0.7 9 -37 33 1 2 -45 0.5 0.3 1 -30 -34 -36 1 -45 0.6 0.6 7 -48 26 -32 2 -45 0.6 0.1 -8 -39 37 -19 2 -45 0.5 0.5 -9 -35 -15 -48 1 -45 0.2 0.1 3 -3 29 8 2 -45 0.6 0.5 26 11 48 -7 1 -45 0.8 0.4 26 -37 2 -36 1 -45 0.8 0.9 -18 -38 1 -5 2 -45 0.8 0.1 -38 -45 34 4 2 -45 0.4 0.6 16 2 -29 -36 1 -45 0.7 0.5 -6 -36 37 3 2 -45 0.5 0.4 -1 -50 23 12 2 -45 0.7 0.9 42 -49 46 -4 2 -45 0.6 0.2 20 -48 4 -44 1 -45 0.4 0.9 30 -28 29 -4 2 -45 0.9 0.8 47 26 43 37 1 -45 0.6 0.8 19 -7 43 -49 2 -45 0.7 0.6 -43 -48 30 -20 2 -45 0.7 0.6 -4 -5 -4 -36 1 -45 0.5 0.8 13 -31 -11 -39 1 -45 0.5 0.5 8 -47 40 -32 2 -45 0.8 0.2 -45 -49 5 -24 2 -45 0.8 0.7 27 22 14 -29 1 -45 0.5 0.3 22 -33 34 -25 1 -45 0.9 0.3 -13 -16 39 -26 2 -45 0.1 0.3 43 19 17 9 1 -45 0.2 0.1 -5 -34 40 -10 2 -45 0.3 0.4 8 -47 -5 -11 2 -45 0.5 0.4 45 -37 -34 -35 1 -45 0.6 0.3 48 -8 28 -44 1 -45 0.5 0.8 49 -39 -7 -18 1 -45 0.5 0.8 35 -3 21 -11 1 -45 0.6 0.2 -11 -38 49 30 2 -45 0.8 0.7 -20 -22 8 -29 2 -45 0.9 0.4 22 -5 -6 -26 1 -45 0.5 0.2 -11 -16 3 -9 2 -45 0.5 0.2 23 19 50 12 2 -45 0.2 0.1 29 -44 48 12 2 -45 0.8 0.5 3 -23 4 -33 1 -45 0.9 0.9 44 -39 -31 -40 1 -45 0.4 0.4 39 -27 -9 -44 1 -45 0.4 0.9 -29 -30 5 -7 2 -45 0.1 0.2 50 -18 23 21 2 -45 0.6 0.6 45 -28 -5 -43 1 -45 0.7 0.2 50 -9 13 -30 1 -45 0.8 0.1 -30 -36 -15 -44 1 -45 0.3 0.4 -34 -36 18 8 2 -45 0.2 0.2 -1 -2 7 1 2 -45 0.4 0.8 6 -41 21 20 2 -45 0.9 0.9 30 1 22 3 1 -45 0.4 0.6 -33 -35 34 1 2 -45 0.4 0.2 16 -34 48 -13 2 -45 0.9 0.5 47 -40 43 -11 1 -45 0.8 0.4 16 -1 41 -4 1 -45 0.1 0.1 -17 -25 -14 -31 1 -45 0.4 0.3 38 -19 23 -18 1 -45 0.5 0.1 0 -40 21 -37 1 -45 0.4 0.4 0 -37 29 20 2 -45 0.4 0.7 47 44 -10 -38 1 -45 0.5 0.6 -2 -34 12 -12 2 -45 0.3 0.8 13 -14 30 13 2 -45 0.5 0.9 40 35 39 -14 1 -45 0.2 0.5 44 -22 43 -1 2 -45 0.6 0.3 -5 -37 -40 -41 1 -45 0.6 0.2 -10 -36 19 4 2 -45 0.4 0.9 20 -27 5 4 2 -45 0.7 0.3 31 -18 33 -22 1 -45 0.9 0.2 49 3 33 -35 1 -45 0.2 0.6 -15 -25 -3 -35 2 -45 0.9 0.5 16 -20 12 -11 1 -45 0.8 0.4 48 -8 -8 -35 1 -45 0.6 0.5 23 -5 33 14 2 -45 0.5 0.6 40 28 18 -11 1 -45 0.3 0.7 26 -25 5 -3 2 -45 0.7 0.7 -18 -28 -31 -36 1 -45 0.4 0.6 38 -13 38 34 2 -45 0.7 0.7 38 -27 14 1 1 -45 0.3 0.6 22 -4 28 18 2 -45 0.8 0.6 -15 -16 -19 -34 1 -45 0.1 0.1 41 -25 -17 -40 1 -45 0.7 0.3 12 -42 -23 -25 1 -45 0.1 0.6 -2 -5 23 9 2 -45 0.2 0.7 29 -47 -2 -43 2 -45 0.1 0.1 36 -43 31 -12 2 -45 0.7 0.8 17 -44 -18 -27 1 -45 0.5 0.6 49 19 17 -8 1 -45 0.3 0.1 18 -29 48 -48 1 -45 0.6 0.3 49 -23 32 -18 1 -45 0.2 0.4 31 -5 14 -15 2 -45 0.4 0.6 4 -10 0 -7 2 -45 0.5 0.8 28 -21 27 12 2 -45 0.2 0.8 5 -7 -32 -42 1 -45 0.4 0.4 44 42 40 33 1 -45 0.9 0.5 4 -36 19 -48 1 -45 0.7 0.4 -18 -19 22 -35 2 -45 0.7 0.8 25 -38 30 4 2 -45 0.6 0.8 18 -40 -14 -44 1 -45 0.5 0.2 -39 -48 38 -24 2 -45 0.1 0.9 18 11 -22 -38 1 -45 0.6 0.2 8 -50 50 11 2 -45 0.8 0.5 23 -4 -20 -26 1 -45 0.1 0.1 -6 -22 26 -48 1 -45 0.6 0.2 17 -46 35 -20 1 -45 0.6 0.7 15 -18 45 -49 2 -45 0.5 0.2 -34 -39 8 -23 2 -45 0.2 0.3 4 -23 -7 -45 1 -45 0.4 0.4 17 -3 39 -11 2 -45 0.5 0.2 44 11 40 -15 1 -45 0.4 0.1 6 -45 39 -27 2 -45 0.3 0.8 20 -7 18 -50 2 -45 0.2 0.5 25 -2 31 -46 1 -45 0.3 0.4 42 -9 46 -4 2 -45 0.6 0.6 -16 -44 -17 -43 1 -45 0.3 0.4 36 -49 25 -45 2 -45 0.7 0.6 1 -8 6 -31 1 -45 0.8 0.1 11 -39 22 11 2 -45 0.2 0.6 -27 -45 19 18 2 -45 0.1 0.1 31 -41 -16 -32 1 -45 0.6 0.1 33 -29 12 8 1 -45 0.5 0.8 44 2 22 -7 1 -45 0.7 0.6 50 -30 40 -31 1 -45 0.1 0.9 14 -30 44 36 2 -45 0.9 0.5 -38 -45 35 -40 2 -45 0.3 0.6 47 8 46 -3 2 -45 0.7 0.3 24 -21 10 -49 1 -45 0.7 0.1 19 -1 -23 -37 1 -45 0.9 0.2 -14 -35 28 -48 1 -45 0.7 0.2 -16 -21 45 -13 2 -45 0.8 0.6 1 -49 31 -23 2 -45 0.9 0.9 27 -32 21 11 2 -45 0.8 0.2 34 20 28 -28 1 -46 0.9 0.7 3 -50 22 1 2 -46 0.8 0.7 34 15 -6 -37 1 -46 0.3 0.7 30 -32 23 -49 2 -46 0.4 0.7 -13 -47 13 -29 2 -46 0.3 0.5 20 -30 49 -12 2 -46 0.5 0.9 45 42 20 -17 1 -46 0.3 0.8 45 -2 -22 -50 1 -46 0.3 0.2 19 0 19 5 2 -46 0.1 0.8 5 -35 -18 -43 2 -46 0.1 0.7 45 -40 -24 -31 1 -46 0.4 0.3 37 16 41 27 1 -46 0.6 0.1 14 -15 -34 -47 1 -46 0.2 0.8 32 31 30 -30 1 -46 0.4 0.3 48 -9 -17 -47 1 -46 0.6 0.6 49 7 23 -24 2 -46 0.2 0.6 19 -39 25 -34 2 -46 0.3 0.3 15 -9 13 -35 1 -46 0.6 0.7 -3 -48 -5 -43 1 -46 0.8 0.8 35 -25 30 -49 1 -46 0.2 0.3 40 13 18 -21 1 -46 0.3 0.6 40 20 11 -27 1 -46 0.6 0.1 8 5 27 -29 1 -46 0.9 0.5 15 -12 40 10 2 -46 0.4 0.5 -1 -25 -13 -31 2 -46 0.9 0.1 -45 -47 19 -41 1 -46 0.9 0.2 6 -49 9 1 1 -46 0.2 0.7 44 -40 48 19 2 -46 0.5 0.5 50 -29 19 -13 1 -46 0.8 0.8 50 -41 -27 -39 1 -46 0.2 0.1 43 -29 -20 -26 2 -46 0.4 0.3 34 20 -47 -49 1 -46 0.7 0.2 -2 -47 20 -9 2 -46 0.1 0.5 44 6 44 33 2 -46 0.8 0.5 34 -43 15 -21 1 -46 0.8 0.1 50 -22 13 -24 1 -46 0.1 0.8 10 2 48 37 2 -46 0.2 0.8 27 -1 50 -45 2 -46 0.2 0.8 -28 -50 -29 -47 2 -46 0.1 0.3 12 -29 -16 -28 1 -46 0.5 0.7 39 -3 32 -14 2 -46 0.9 0.3 19 -7 48 39 2 -46 0.5 0.5 43 -18 40 -11 2 -46 0.2 0.1 -7 -17 -22 -37 1 -46 0.4 0.5 -32 -38 38 -18 2 -46 0.3 0.5 -2 -3 -18 -28 2 -46 0.2 0.8 24 23 21 15 2 -46 0.2 0.8 25 -41 -24 -27 1 -46 0.3 0.6 44 -21 17 11 2 -46 0.4 0.4 44 5 -11 -39 2 -46 0.9 0.4 25 17 28 -15 1 -46 0.8 0.5 31 -38 49 -37 1 -46 0.7 0.4 6 -7 21 20 2 -46 0.4 0.3 -7 -43 18 4 2 -46 0.4 0.2 49 -2 30 -21 1 -46 0.6 0.1 7 -3 43 -26 2 -46 0.2 0.7 48 8 25 -47 1 -46 0.9 0.9 34 -19 -31 -44 1 -46 0.8 0.1 -1 -47 41 31 2 -46 0.7 0.8 44 19 38 -44 1 -46 0.3 0.1 11 -34 33 -35 1 -46 0.7 0.2 43 -31 7 -18 1 -46 0.6 0.1 -8 -36 28 22 2 -46 0.8 0.2 48 -12 -20 -39 1 -46 0.3 0.9 11 -30 39 -14 2 -46 0.1 0.3 41 -46 1 -38 2 -46 0.9 0.4 46 -7 1 -42 1 -46 0.1 0.3 1 -17 28 -9 2 -46 0.1 0.3 4 -35 39 -48 2 -46 0.3 0.5 7 -35 -17 -33 1 -46 0.5 0.7 15 -48 42 -9 2 -46 0.2 0.5 28 13 -14 -27 1 -46 0.2 0.1 24 -13 50 46 2 -46 0.5 0.2 -9 -20 18 -26 1 -46 0.5 0.4 46 18 45 20 1 -46 0.5 0.1 27 25 -27 -29 1 -46 0.8 0.2 14 -48 37 -50 1 -46 0.5 0.8 42 -31 26 14 2 -46 0.6 0.8 46 -9 45 -14 2 -46 0.8 0.3 -10 -25 48 -28 1 -46 0.1 0.8 17 -1 43 18 2 -46 0.9 0.4 44 -47 4 -47 1 -46 0.8 0.3 29 -35 35 -44 1 -46 0.6 0.4 6 -8 39 -31 1 -46 0.7 0.2 -4 -23 30 -17 2 -46 0.5 0.6 36 -31 23 -37 1 -46 0.7 0.5 46 18 29 20 2 -46 0.8 0.5 11 -37 -18 -28 1 -46 0.1 0.5 30 -20 25 -34 2 -46 0.2 0.6 -37 -43 5 -23 2 -46 0.6 0.9 50 30 49 47 1 -46 0.8 0.7 27 -49 24 -36 1 -46 0.5 0.5 25 11 27 19 2 -46 0.6 0.1 30 20 -10 -25 1 -46 0.3 0.7 33 3 33 -22 1 -46 0.1 0.6 8 -21 37 -37 2 -46 0.5 0.3 -4 -24 -25 -27 2 -46 0.5 0.3 34 -23 31 -13 1 -46 0.8 0.8 -21 -36 49 -14 2 -46 0.9 0.6 17 -4 -18 -37 1 -46 0.7 0.4 -4 -15 27 -11 2 -46 0.4 0.7 33 -49 27 0 2 -46 0.4 0.3 27 -22 37 -24 1 -46 0.9 0.6 -6 -41 28 -42 2 -46 0.9 0.6 20 -15 41 8 1 -46 0.4 0.4 -1 -18 15 10 2 -46 0.8 0.3 47 -28 -7 -39 1 -46 0.6 0.3 11 8 -18 -40 1 -46 0.6 0.4 27 -37 29 3 2 -46 0.4 0.5 41 17 12 -45 1 -46 0.4 0.1 40 13 -7 -45 1 -46 0.7 0.5 47 16 19 -13 1 -46 0.2 0.3 39 -30 36 12 2 -46 0.2 0.9 49 -36 22 1 2 -46 0.9 0.1 10 -35 -8 -24 1 -46 0.7 0.1 16 -49 36 -6 1 -46 0.1 0.4 43 -41 0 -44 2 -46 0.2 0.2 44 13 20 -34 1 -46 0.2 0.3 -4 -24 37 -5 1 -46 0.6 0.2 -24 -39 33 11 2 -46 0.7 0.4 50 10 -12 -39 1 -46 0.1 0.9 26 -39 -41 -43 2 -46 0.8 0.2 17 -13 -11 -26 1 -46 0.5 0.3 41 -50 41 -17 2 -46 0.3 0.6 -4 -41 11 -36 2 -46 0.8 0.6 20 -20 25 15 2 -46 0.2 0.2 18 -33 45 -41 1 -46 0.9 0.3 29 6 43 23 2 -46 0.6 0.7 -3 -21 -10 -50 1 -46 0.5 0.3 9 -28 0 -15 2 -46 0.6 0.1 -4 -17 18 -10 1 -46 0.8 0.9 12 -43 -17 -36 1 -46 0.4 0.4 49 -41 28 21 2 -46 0.1 0.9 2 -20 32 -39 2 -46 0.6 0.4 21 10 46 30 2 -46 0.5 0.7 37 -21 40 -4 2 -46 0.5 0.6 44 -9 -37 -41 1 -46 0.2 0.9 29 -14 47 -6 2 -46 0.5 0.4 26 -16 -15 -39 1 -46 0.3 0.5 46 21 -3 -29 1 -46 0.7 0.1 7 -43 23 -19 1 -46 0.5 0.9 46 -24 39 -29 2 -46 0.4 0.8 32 21 49 13 2 -46 0.3 0.1 32 8 7 -48 1 -46 0.4 0.1 0 -24 -7 -23 2 -46 0.8 0.8 50 3 -5 -20 1 -46 0.7 0.7 -10 -39 45 -29 2 -46 0.9 0.3 24 19 -37 -49 1 -46 0.9 0.6 43 36 38 -43 1 -46 0.9 0.9 34 -23 42 6 2 -46 0.4 0.2 -17 -18 47 3 2 -46 0.6 0.2 50 22 22 -5 1 -46 0.6 0.7 -6 -20 30 -46 2 -46 0.3 0.2 48 19 49 -45 1 -46 0.5 0.8 -15 -26 10 -20 2 -46 0.8 0.6 49 -24 43 9 1 -46 0.1 0.6 32 -46 33 -2 2 -46 0.9 0.8 12 -11 37 -1 2 -46 0.1 0.1 37 10 24 22 2 -46 0.6 0.1 10 -48 39 32 2 -46 0.3 0.6 30 -41 5 1 2 -46 0.7 0.4 41 18 2 -31 1 -46 0.5 0.8 44 5 23 7 1 -46 0.4 0.6 -2 -10 -3 -6 2 -46 0.5 0.8 2 -27 -16 -20 1 -46 0.4 0.9 -7 -45 7 -42 2 -46 0.9 0.4 -2 -23 45 33 2 -46 0.8 0.4 34 25 1 -11 1 -46 0.7 0.7 22 -30 18 9 2 -46 0.6 0.5 38 0 44 -48 1 -46 0.6 0.7 -12 -31 24 -5 2 -46 0.6 0.8 30 -44 47 -19 2 -46 0.4 0.6 7 -28 -24 -48 1 -46 0.2 0.7 8 -4 29 -21 2 -46 0.4 0.1 16 -8 39 -19 1 -46 0.3 0.3 40 2 4 -11 1 -46 0.5 0.7 37 3 -45 -50 1 -46 0.3 0.9 -26 -28 35 -36 2 -46 0.2 0.4 26 -30 31 -35 2 -46 0.7 0.2 3 -23 25 -35 2 -46 0.6 0.6 0 -28 0 -10 1 -46 0.1 0.5 41 16 41 23 2 -46 0.3 0.2 -8 -23 -29 -38 1 -46 0.6 0.8 -33 -45 28 -30 2 -46 0.5 0.1 14 -22 39 -32 2 -46 0.5 0.7 45 -32 -27 -46 2 -46 0.6 0.1 22 -15 19 7 1 -46 0.9 0.5 45 24 45 14 1 -46 0.8 0.2 47 -2 20 -6 1 -46 0.7 0.4 18 -45 29 -6 2 -46 0.7 0.7 -9 -39 -18 -32 2 -46 0.7 0.6 20 -14 2 -46 1 -46 0.7 0.3 25 -18 37 6 2 -46 0.1 0.1 -9 -15 -30 -47 2 -46 0.7 0.7 -22 -30 -21 -47 2 -46 0.3 0.2 5 -38 12 -2 2 -46 0.7 0.8 34 -17 41 27 2 -46 0.3 0.7 45 -45 42 11 2 -46 0.4 0.2 29 24 43 25 1 -46 0.3 0.6 24 -34 21 -16 2 -46 0.4 0.2 41 26 44 15 2 -46 0.9 0.5 21 6 23 20 1 -46 0.9 0.7 -6 -29 21 -24 2 -46 0.2 0.2 19 13 47 22 2 -46 0.7 0.3 45 22 25 -7 1 -46 0.9 0.7 -13 -44 48 10 2 -46 0.4 0.6 42 0 33 -15 1 -46 0.9 0.2 25 -21 9 -4 1 -46 0.6 0.3 42 32 29 25 1 -46 0.1 0.7 33 -46 46 37 2 -46 0.2 0.9 16 0 -24 -48 1 -46 0.6 0.5 21 -1 21 -37 1 -46 0.2 0.1 36 19 3 -49 1 -46 0.1 0.9 -39 -40 29 2 2 -46 0.5 0.8 6 -24 20 9 2 -46 0.3 0.4 2 -4 34 11 2 -46 0.6 0.4 49 -18 -15 -18 1 -46 0.6 0.2 19 -35 42 -18 1 -46 0.6 0.8 4 -38 23 18 2 -46 0.5 0.9 5 -5 -25 -33 2 -46 0.2 0.7 14 13 33 -32 2 -46 0.5 0.7 5 -33 50 25 2 -46 0.8 0.7 45 -31 43 26 2 -46 0.3 0.5 28 -31 48 -7 2 -46 0.8 0.9 20 -38 22 -23 1 -46 0.6 0.8 6 -45 -7 -15 2 -46 0.7 0.5 22 -24 -5 -41 1 -46 0.8 0.7 43 35 -26 -36 1 -46 0.6 0.6 -32 -48 -20 -31 1 -46 0.6 0.7 48 1 -9 -31 1 -46 0.4 0.2 47 -31 37 -8 2 -46 0.2 0.4 19 7 25 19 2 -46 0.5 0.7 35 -31 0 -18 2 -46 0.1 0.9 9 -17 26 0 2 -46 0.8 0.5 40 12 48 -46 1 -46 0.9 0.9 38 26 12 -31 1 -46 0.9 0.8 -24 -36 26 5 2 -46 0.4 0.2 -47 -49 -36 -37 1 -46 0.7 0.6 2 -26 5 -49 1 -46 0.8 0.2 43 -47 -28 -40 1 -46 0.4 0.9 6 -31 7 -35 2 -46 0.6 0.4 47 -18 48 -26 2 -46 0.2 0.4 49 33 35 -28 1 -46 0.3 0.9 28 19 16 -8 2 -46 0.3 0.2 35 -31 1 -22 1 -46 0.7 0.8 16 -1 33 -36 2 -46 0.7 0.2 25 -15 37 21 2 -46 0.9 0.1 42 -27 49 -48 1 -46 0.8 0.3 44 -45 -18 -19 2 -46 0.3 0.9 -45 -47 -26 -44 1 -46 0.1 0.5 30 4 31 12 2 -46 0.3 0.5 7 -19 32 -17 1 -46 0.9 0.4 41 3 32 -12 1 -46 0.1 0.6 38 19 -16 -19 1 -46 0.1 0.1 11 -39 50 -37 2 -46 0.7 0.7 36 -19 18 -34 1 -46 0.1 0.9 33 -38 12 -23 2 -46 0.1 0.3 28 -6 -1 -10 2 -46 0.8 0.7 40 24 -10 -33 1 -46 0.2 0.4 4 -1 11 3 2 -46 0.6 0.8 48 37 29 -4 2 -46 0.3 0.1 -9 -23 43 -25 2 -46 0.6 0.4 8 -18 20 -36 1 -46 0.1 0.8 22 0 50 13 2 -46 0.4 0.9 23 -45 -19 -43 1 -46 0.8 0.4 38 32 -41 -47 1 -46 0.8 0.6 -29 -38 40 -12 2 -46 0.9 0.4 10 -27 36 9 2 -46 0.5 0.4 -28 -29 16 15 2 -46 0.5 0.2 46 2 37 -27 1 -46 0.8 0.8 30 16 37 -45 1 -46 0.3 0.6 33 -28 38 36 2 -46 0.4 0.6 24 -26 23 -5 1 -46 0.2 0.7 -40 -49 -6 -33 2 -46 0.4 0.9 -29 -30 1 -20 2 -46 0.6 0.5 40 -19 41 7 1 -46 0.9 0.2 48 31 -22 -44 1 -46 0.1 0.7 -6 -34 22 -41 2 -46 0.9 0.7 10 -30 9 -24 1 -46 0.4 0.1 -2 -3 9 -30 1 -46 0.6 0.8 -40 -42 18 0 2 -46 0.1 0.8 -6 -37 30 -5 2 -46 0.9 0.9 24 13 23 2 1 -46 0.2 0.5 -18 -38 35 27 2 -46 0.5 0.1 28 -50 -40 -49 1 -46 0.5 0.8 -36 -46 -26 -27 2 -46 0.7 0.3 -44 -47 10 -29 2 -46 0.4 0.9 35 -50 -10 -17 1 -46 0.2 0.6 44 20 45 -50 1 -46 0.7 0.9 50 -45 46 39 2 -46 0.1 0.3 48 -34 48 -33 1 -46 0.5 0.6 34 -37 -6 -40 1 -46 0.4 0.5 50 2 7 -36 1 -46 0.2 0.7 -3 -24 36 9 2 -46 0.8 0.6 27 -37 -10 -33 1 -46 0.4 0.8 36 -12 30 -27 1 -46 0.9 0.7 41 -5 46 43 1 -46 0.3 0.6 -3 -6 -6 -20 2 -46 0.9 0.7 40 8 7 -35 1 -46 0.9 0.5 13 -20 -18 -38 1 -46 0.9 0.5 38 20 34 -26 1 -47 0.8 0.7 -25 -26 44 6 2 -47 0.9 0.2 25 11 -31 -39 1 -47 0.6 0.7 7 -23 47 -19 2 -47 0.4 0.6 49 40 18 -45 2 -47 0.1 0.1 -45 -50 45 1 2 -47 0.6 0.4 34 7 17 -46 1 -47 0.7 0.3 -15 -35 30 -22 2 -47 0.1 0.2 4 3 -3 -8 1 -47 0.2 0.5 48 -40 -7 -37 1 -47 0.6 0.3 16 -44 -15 -18 1 -47 0.4 0.6 34 -25 36 -9 2 -47 0.5 0.1 49 27 37 36 1 -47 0.2 0.7 35 -37 11 5 2 -47 0.3 0.5 -19 -35 -18 -36 2 -47 0.8 0.1 43 35 12 -5 1 -47 0.9 0.5 36 -32 12 -35 1 -47 0.1 0.5 -11 -43 25 -23 2 -47 0.8 0.7 27 -49 8 -46 2 -47 0.8 0.4 -10 -17 -1 -45 1 -47 0.6 0.6 -1 -10 7 -47 2 -47 0.8 0.9 -17 -23 -7 -39 2 -47 0.7 0.7 -3 -14 48 -49 1 -47 0.5 0.8 -27 -34 26 14 2 -47 0.1 0.4 31 30 28 -43 1 -47 0.9 0.6 42 11 19 -6 1 -47 0.1 0.5 45 -27 -12 -24 1 -47 0.2 0.7 25 -18 -39 -47 1 -47 0.4 0.9 14 -21 -6 -41 1 -47 0.1 0.4 44 11 48 22 1 -47 0.6 0.8 36 -32 -3 -39 1 -47 0.2 0.3 44 -30 39 23 2 -47 0.6 0.2 -29 -32 12 -31 1 -47 0.9 0.2 43 -25 26 16 1 -47 0.8 0.9 14 13 -2 -44 1 -47 0.5 0.9 36 -13 21 -38 1 -47 0.4 0.1 41 35 22 7 1 -47 0.1 0.1 14 -1 -3 -11 2 -47 0.9 0.9 27 -14 36 -30 1 -47 0.1 0.7 34 -19 15 1 1 -47 0.7 0.5 37 -26 39 -2 2 -47 0.4 0.5 43 12 45 28 1 -47 0.1 0.5 -18 -26 17 -4 2 -47 0.8 0.2 8 4 17 -16 1 -47 0.2 0.9 4 -12 -7 -21 1 -47 0.8 0.3 -5 -17 38 -3 1 -47 0.9 0.2 50 5 9 -30 1 -47 0.9 0.1 26 -35 18 -28 1 -47 0.5 0.4 42 38 -10 -22 1 -47 0.9 0.3 14 -21 29 -50 1 -47 0.3 0.4 -30 -45 25 -14 1 -47 0.7 0.6 45 4 34 7 1 -47 0.3 0.7 26 5 -25 -47 1 -47 0.9 0.2 6 -20 49 15 2 -47 0.2 0.6 35 33 32 -28 1 -47 0.8 0.8 38 27 4 -14 1 -47 0.6 0.2 -2 -27 -4 -10 1 -47 0.7 0.1 -28 -29 -14 -35 2 -47 0.9 0.6 49 8 11 5 1 -47 0.9 0.6 35 -22 28 17 2 -47 0.3 0.7 34 26 49 48 2 -47 0.6 0.3 -28 -35 -1 -21 1 -47 0.9 0.5 11 -21 41 30 2 -47 0.4 0.6 48 4 0 -48 1 -47 0.6 0.7 -13 -18 -34 -39 1 -47 0.1 0.8 50 13 34 -34 1 -47 0.7 0.6 -31 -33 47 -47 2 -47 0.9 0.3 -10 -45 17 -38 2 -47 0.9 0.1 20 -8 -9 -17 1 -47 0.8 0.3 -34 -41 19 -48 2 -47 0.8 0.7 30 -2 38 -25 1 -47 0.7 0.7 24 22 -29 -32 1 -47 0.5 0.6 39 20 36 12 2 -47 0.8 0.4 14 7 45 -6 1 -47 0.4 0.8 32 12 49 -24 2 -47 0.5 0.6 40 -17 32 -17 1 -47 0.8 0.4 38 -28 1 -32 1 -47 0.9 0.3 32 -9 -20 -31 1 -47 0.4 0.8 46 33 15 10 2 -47 0.9 0.1 20 -31 48 -50 1 -47 0.7 0.5 35 -4 46 37 2 -47 0.4 0.4 11 -28 -9 -21 2 -47 0.5 0.9 35 24 8 -11 2 -47 0.1 0.6 24 2 32 -15 2 -47 0.5 0.2 8 -22 37 15 2 -47 0.6 0.9 20 -44 42 -11 2 -47 0.5 0.6 5 -19 34 -17 2 -47 0.8 0.1 43 -8 26 -40 2 -47 0.4 0.9 6 -45 30 2 2 -47 0.1 0.4 40 -17 29 4 2 -47 0.3 0.2 -14 -18 5 -31 2 -47 0.9 0.5 -13 -15 -14 -35 1 -47 0.6 0.1 -22 -29 12 1 2 -47 0.8 0.5 6 -44 25 -49 1 -47 0.3 0.1 5 -33 41 -16 2 -47 0.2 0.2 35 25 -30 -37 1 -47 0.4 0.1 -4 -16 38 -8 2 -47 0.5 0.8 50 22 28 -25 1 -47 0.9 0.4 -14 -17 -23 -35 1 -47 0.7 0.7 45 5 45 -48 2 -47 0.7 0.5 42 -42 24 -15 2 -47 0.8 0.1 12 -19 12 -30 1 -47 0.3 0.4 27 -44 -9 -36 2 -47 0.8 0.5 21 -7 -5 -11 1 -47 0.1 0.4 49 -18 15 6 2 -47 0.9 0.9 33 0 -8 -16 1 -47 0.7 0.3 5 -41 36 -26 1 -47 0.7 0.2 37 -8 12 6 1 -47 0.4 0.1 37 -18 13 -1 1 -47 0.9 0.5 39 -29 20 12 1 -47 0.2 0.2 -37 -45 49 -17 2 -47 0.1 0.5 12 -49 -36 -48 2 -47 0.9 0.2 9 -9 10 -34 2 -47 0.7 0.8 37 6 -15 -28 1 -47 0.4 0.4 11 -16 18 -35 1 -47 0.2 0.8 37 -2 28 -49 1 -47 0.4 0.4 23 -47 22 -24 1 -47 0.4 0.5 29 3 45 -8 1 -47 0.8 0.6 31 -4 31 -29 1 -47 0.2 0.9 29 -49 -10 -48 2 -47 0.7 0.4 2 -15 32 -25 2 -47 0.7 0.7 48 41 19 -39 1 -47 0.1 0.1 -8 -37 21 -17 2 -47 0.8 0.7 28 16 46 28 2 -47 0.9 0.1 50 7 45 -24 1 -47 0.9 0.5 -7 -24 6 -1 2 -47 0.4 0.8 35 6 -30 -33 1 -47 0.4 0.8 -27 -44 -14 -37 2 -47 0.4 0.2 -10 -28 8 -8 2 -47 0.5 0.3 42 -10 6 -42 1 -47 0.9 0.9 15 -49 40 -22 2 -47 0.3 0.7 21 -36 41 11 2 -47 0.6 0.7 44 -45 46 44 2 -47 0.5 0.5 21 18 41 40 2 -47 0.4 0.9 33 16 -4 -41 2 -47 0.3 0.7 49 47 33 -30 1 -47 0.9 0.9 36 -1 -4 -40 1 -47 0.2 0.9 1 -45 19 -40 2 -47 0.8 0.3 22 -37 50 0 1 -47 0.1 0.3 32 -9 49 -38 1 -47 0.5 0.3 36 -33 44 6 2 -47 0.1 0.6 -20 -26 -33 -39 1 -47 0.1 0.6 19 -19 5 -32 2 -47 0.6 0.1 0 -32 -23 -49 2 -47 0.7 0.5 8 -38 31 27 2 -47 0.9 0.6 11 -46 30 -41 1 -47 0.5 0.5 27 -31 20 -41 1 -47 0.6 0.9 4 -20 39 3 2 -47 0.1 0.9 43 40 30 -21 1 -47 0.8 0.8 24 -11 -24 -32 1 -47 0.3 0.4 22 -36 47 -30 2 -47 0.2 0.9 44 -24 42 -31 2 -47 0.9 0.1 18 -4 -30 -32 1 -47 0.6 0.4 11 -5 47 -15 2 -47 0.5 0.2 -1 -28 9 -21 1 -47 0.4 0.7 9 3 34 19 2 -47 0.3 0.4 45 -9 45 0 2 -47 0.9 0.8 25 9 36 -1 2 -47 0.8 0.5 41 19 16 0 1 -47 0.7 0.3 2 -14 4 -44 2 -47 0.4 0.6 21 -10 21 -12 2 -47 0.3 0.5 36 25 4 -32 1 -47 0.5 0.1 35 -7 15 -34 1 -47 0.4 0.7 22 9 -5 -41 1 -47 0.8 0.8 20 -35 6 -12 2 -47 0.6 0.4 -29 -48 12 -17 2 -47 0.6 0.8 -15 -32 -30 -48 1 -47 0.9 0.8 49 41 47 8 1 -47 0.6 0.4 11 -27 25 -34 2 -47 0.9 0.3 14 7 -15 -22 1 -47 0.3 0.4 41 35 24 -10 1 -47 0.5 0.1 -5 -17 -1 -25 1 -47 0.8 0.2 -41 -44 18 -20 2 -47 0.7 0.6 2 -9 15 -27 1 -47 0.7 0.3 -21 -30 34 -31 1 -47 0.9 0.5 -2 -17 10 -23 2 -47 0.8 0.7 49 -36 17 -4 2 -47 0.7 0.2 32 4 1 -9 1 -47 0.1 0.4 -30 -44 -2 -15 2 -47 0.4 0.2 46 -30 28 22 2 -47 0.8 0.3 38 8 39 32 2 -47 0.1 0.7 3 -18 -16 -49 2 -47 0.7 0.1 -29 -46 4 -4 2 -47 0.1 0.9 30 19 -39 -40 1 -47 0.8 0.4 26 -43 29 -8 1 -47 0.9 0.9 29 18 50 7 2 -47 0.3 0.2 9 -38 -30 -44 1 -47 0.8 0.2 43 -25 -28 -43 1 -47 0.1 0.3 25 -13 45 -24 2 -47 0.3 0.6 31 -18 -11 -18 1 -47 0.3 0.9 -36 -40 13 -30 2 -47 0.1 0.6 1 -49 17 12 2 -47 0.3 0.2 -22 -48 41 -9 2 -47 0.1 0.9 24 -43 24 -42 2 -47 0.3 0.1 34 -28 12 -45 1 -47 0.9 0.7 32 -22 36 -50 1 -47 0.3 0.4 -15 -34 39 34 2 -47 0.4 0.9 49 -18 27 -43 2 -47 0.6 0.6 48 12 46 -15 1 -47 0.6 0.8 47 7 -19 -25 1 -47 0.8 0.1 45 41 9 -36 1 -47 0.5 0.1 33 -6 4 -25 1 -47 0.1 0.1 47 -45 43 20 2 -47 0.7 0.5 21 -17 19 -16 1 -47 0.6 0.8 41 -10 -1 -21 1 -47 0.4 0.2 45 -22 12 -34 1 -47 0.5 0.4 47 -43 15 -23 2 -47 0.5 0.8 18 -40 38 -47 2 -47 0.6 0.5 38 -11 -14 -37 1 -47 0.4 0.4 -11 -33 45 -5 2 -47 0.5 0.3 -17 -42 22 -6 2 -47 0.2 0.4 24 -46 35 6 2 -47 0.3 0.5 28 7 45 -30 2 -47 0.8 0.2 12 -13 -23 -44 1 -47 0.6 0.4 26 19 38 -9 2 -47 0.7 0.8 -10 -45 21 -7 2 -47 0.3 0.9 39 18 50 -36 2 -47 0.2 0.5 20 -18 35 -5 2 -47 0.2 0.5 50 5 3 -1 1 -47 0.5 0.6 50 -29 25 -27 1 -47 0.7 0.5 38 11 47 -40 1 -47 0.6 0.4 50 -48 41 16 2 -47 0.7 0.1 20 7 11 -48 1 -47 0.4 0.5 24 -23 10 -16 2 -47 0.1 0.7 37 -17 37 -16 2 -47 0.8 0.3 7 -10 -2 -10 1 -47 0.2 0.8 -15 -18 -25 -29 1 -47 0.6 0.6 44 2 9 -34 1 -47 0.2 0.3 41 -13 30 -43 1 -47 0.1 0.4 25 -14 -12 -46 1 -47 0.6 0.7 40 -11 -13 -16 1 -47 0.8 0.2 41 6 0 -12 1 -47 0.9 0.8 -14 -43 34 -11 2 -47 0.4 0.1 -38 -48 27 -4 2 -47 0.5 0.1 20 -46 32 -40 1 -47 0.1 0.9 27 -37 30 6 2 -47 0.2 0.5 -2 -33 -18 -29 2 -47 0.7 0.4 36 35 36 -48 1 -47 0.7 0.8 35 28 -13 -36 1 -47 0.8 0.3 34 10 -9 -16 1 -47 0.7 0.6 44 -25 9 -4 1 -47 0.5 0.4 22 -11 45 38 1 -47 0.9 0.7 -14 -36 24 10 2 -47 0.5 0.5 43 -29 50 46 2 -47 0.1 0.1 26 -44 26 15 2 -47 0.4 0.7 2 -50 6 -42 1 -47 0.7 0.4 35 -49 37 -11 1 -47 0.1 0.7 -27 -49 25 -3 2 -47 0.4 0.5 -14 -16 43 2 2 -47 0.9 0.8 38 -38 43 -14 1 -47 0.1 0.4 -3 -7 -1 -23 2 -47 0.5 0.2 18 -11 14 13 2 -47 0.3 0.1 8 -39 38 -34 1 -47 0.2 0.7 46 -10 24 -17 1 -47 0.6 0.1 24 4 32 -10 1 -47 0.6 0.6 41 -2 10 -25 1 -47 0.3 0.2 7 -46 49 12 2 -47 0.4 0.1 37 30 -21 -32 1 -47 0.3 0.6 50 21 -31 -35 1 -47 0.9 0.8 -28 -42 15 -15 2 -47 0.1 0.5 -6 -45 5 -23 2 -47 0.5 0.3 -12 -23 -2 -24 1 -47 0.9 0.6 38 5 34 25 1 -47 0.9 0.1 14 -12 6 -34 1 -47 0.4 0.9 45 10 8 -43 1 -47 0.2 0.6 8 -36 40 -49 2 -47 0.4 0.8 33 28 37 -5 2 -47 0.5 0.3 -10 -44 50 -9 1 -47 0.2 0.3 -4 -49 45 -4 2 -47 0.8 0.6 39 14 32 2 1 -47 0.4 0.7 -2 -16 9 6 2 -47 0.4 0.3 -16 -30 7 -8 2 -47 0.7 0.9 19 -50 17 9 2 -47 0.2 0.7 28 -38 4 -49 1 -47 0.4 0.1 48 33 1 -47 1 -47 0.3 0.9 -3 -16 35 -30 2 -47 0.6 0.2 11 1 44 -13 1 -47 0.2 0.9 7 -29 -1 -44 1 -47 0.5 0.9 43 8 -26 -42 1 -47 0.1 0.1 50 -25 2 -39 1 -47 0.8 0.1 47 44 46 39 2 -47 0.1 0.7 25 -46 32 -48 2 -47 0.1 0.4 50 -40 7 -33 2 -47 0.9 0.4 28 -22 8 6 2 -47 0.7 0.2 32 -14 41 24 2 -47 0.5 0.1 43 0 -25 -31 1 -47 0.9 0.4 20 -19 42 23 2 -47 0.9 0.7 40 7 20 -15 1 -47 0.9 0.4 -17 -26 34 -31 2 -47 0.7 0.6 -10 -50 -3 -35 2 -47 0.4 0.8 33 25 12 -11 2 -47 0.3 0.8 22 -36 43 -40 2 -47 0.5 0.9 28 14 -1 -20 1 -47 0.5 0.5 -30 -46 20 -20 1 -47 0.7 0.7 -21 -35 9 -24 1 -47 0.1 0.5 18 11 27 -41 1 -47 0.6 0.4 10 -4 32 -30 2 -47 0.4 0.2 8 -39 40 7 2 -47 0.9 0.4 49 1 11 -31 1 -47 0.1 0.1 -41 -45 -13 -25 1 -47 0.5 0.4 29 -33 -19 -38 1 -48 0.5 0.9 -1 -43 -35 -41 1 -48 0.9 0.7 -19 -31 43 27 2 -48 0.6 0.7 -41 -48 6 -40 2 -48 0.9 0.6 27 -38 -5 -24 1 -48 0.4 0.5 28 5 -47 -50 1 -48 0.8 0.6 -8 -33 33 2 2 -48 0.8 0.1 38 10 48 17 1 -48 0.4 0.4 50 15 -34 -48 1 -48 0.5 0.8 -47 -48 13 -38 2 -48 0.3 0.6 -33 -47 38 15 2 -48 0.9 0.6 3 -14 43 12 2 -48 0.5 0.5 -26 -28 41 -41 2 -48 0.5 0.5 37 -39 41 -6 2 -48 0.7 0.1 18 -3 43 -24 1 -48 0.5 0.9 -15 -21 19 0 2 -48 0.3 0.6 -17 -42 -11 -13 2 -48 0.7 0.8 29 23 23 16 1 -48 0.3 0.1 23 -30 42 -4 2 -48 0.2 0.9 33 -44 36 23 2 -48 0.8 0.1 -37 -41 27 -35 2 -48 0.6 0.4 -28 -48 50 -18 2 -48 0.3 0.5 -7 -18 35 28 2 -48 0.5 0.9 19 -28 33 -11 2 -48 0.8 0.7 39 -13 39 3 1 -48 0.6 0.2 36 8 -15 -17 1 -48 0.4 0.4 41 3 9 -22 1 -48 0.4 0.6 -16 -21 -24 -35 1 -48 0.2 0.9 35 11 26 -8 2 -48 0.2 0.5 -34 -42 6 -2 2 -48 0.5 0.8 9 -12 -5 -21 1 -48 0.8 0.8 10 -2 42 3 2 -48 0.2 0.1 -15 -43 46 -32 2 -48 0.3 0.1 -18 -37 35 23 2 -48 0.8 0.6 38 -48 50 -14 2 -48 0.3 0.5 43 -49 40 8 2 -48 0.2 0.3 -12 -40 -37 -48 1 -48 0.9 0.7 50 -14 45 25 1 -48 0.5 0.4 -40 -44 17 -28 2 -48 0.3 0.4 4 -27 14 13 2 -48 0.5 0.6 18 -7 26 4 2 -48 0.2 0.9 47 -28 16 -41 2 -48 0.3 0.7 8 -21 26 15 2 -48 0.2 0.6 -20 -27 -17 -26 1 -48 0.4 0.6 47 -13 8 -45 1 -48 0.3 0.9 19 1 -1 -39 1 -48 0.6 0.6 6 -21 41 -8 2 -48 0.9 0.9 26 -2 36 -33 1 -48 0.4 0.5 -24 -32 8 -43 2 -48 0.7 0.4 45 21 49 29 1 -48 0.9 0.8 40 -44 18 -11 1 -48 0.3 0.5 45 35 11 -30 1 -48 0.8 0.5 8 -37 -16 -31 1 -48 0.1 0.1 46 -37 35 -38 2 -48 0.3 0.7 30 29 18 -30 1 -48 0.6 0.3 0 -40 26 -33 1 -48 0.3 0.6 49 10 30 24 2 -48 0.1 0.1 27 26 41 -37 1 -48 0.1 0.8 38 33 17 -9 1 -48 0.7 0.8 48 -12 12 -36 1 -48 0.2 0.7 -7 -22 30 8 2 -48 0.5 0.4 5 -44 -19 -48 1 -48 0.8 0.5 19 11 26 17 2 -48 0.4 0.4 1 0 25 -22 1 -48 0.5 0.4 39 -33 -15 -35 1 -48 0.4 0.5 -6 -49 -26 -34 2 -48 0.7 0.8 7 -36 -14 -37 1 -48 0.3 0.7 37 7 36 35 2 -48 0.8 0.5 26 -11 20 -34 1 -48 0.9 0.2 -1 -20 12 -50 1 -48 0.1 0.5 -11 -41 24 -29 2 -48 0.1 0.8 38 33 39 -41 1 -48 0.5 0.6 34 -18 -7 -9 1 -48 0.7 0.1 40 -39 5 -26 1 -48 0.8 0.9 12 -34 -32 -50 1 -48 0.8 0.2 37 21 18 3 1 -48 0.4 0.3 17 -22 -21 -37 1 -48 0.7 0.4 13 -1 38 -50 1 -48 0.6 0.1 -44 -48 38 30 2 -48 0.7 0.9 33 7 -25 -30 1 -48 0.2 0.4 44 -14 49 24 2 -48 0.1 0.8 -29 -50 10 3 2 -48 0.2 0.4 -15 -20 29 -42 2 -48 0.5 0.3 -23 -29 -3 -31 2 -48 0.1 0.5 26 -38 13 -25 2 -48 0.8 0.9 6 -44 41 -6 2 -48 0.6 0.1 46 42 -15 -33 1 -48 0.4 0.2 -9 -21 32 -13 2 -48 0.2 0.2 31 1 31 18 2 -48 0.4 0.8 12 -19 11 -42 2 -48 0.2 0.4 -9 -21 42 12 2 -48 0.2 0.5 8 -18 27 -48 2 -48 0.2 0.6 -16 -29 45 11 2 -48 0.8 0.9 24 -25 -24 -38 1 -48 0.2 0.5 22 -3 -35 -36 1 -48 0.5 0.2 -29 -38 44 -16 2 -48 0.1 0.7 38 13 16 -21 1 -48 0.1 0.9 -9 -45 -13 -18 2 -48 0.9 0.4 22 -41 38 10 2 -48 0.3 0.7 21 -41 23 -37 2 -48 0.5 0.4 26 4 45 39 2 -48 0.1 0.4 34 -6 19 -11 2 -48 0.1 0.4 -2 -44 -11 -40 2 -48 0.7 0.3 11 -6 43 23 2 -48 0.5 0.3 16 15 -33 -42 1 -48 0.3 0.2 -32 -44 -33 -50 1 -48 0.6 0.3 26 -32 10 -46 1 -48 0.4 0.2 35 22 11 -19 1 -48 0.2 0.3 18 -28 24 -39 2 -48 0.8 0.3 17 -8 -14 -41 1 -48 0.1 0.4 44 -29 34 11 2 -48 0.5 0.7 39 11 43 -17 2 -48 0.4 0.4 -27 -38 46 -19 2 -48 0.3 0.8 0 -4 40 -22 2 -48 0.9 0.2 34 -21 6 -50 1 -48 0.4 0.6 -21 -22 -7 -32 2 -48 0.1 0.4 26 18 -26 -44 1 -48 0.3 0.9 49 15 22 -16 1 -48 0.1 0.3 41 20 1 -34 1 -48 0.7 0.8 25 18 -4 -29 1 -48 0.3 0.9 -4 -35 24 -10 2 -48 0.5 0.2 11 -17 10 -32 1 -48 0.2 0.2 -27 -32 36 -23 2 -48 0.5 0.3 -28 -50 5 -37 2 -48 0.6 0.1 8 -24 -29 -47 1 -48 0.8 0.8 14 -1 -23 -32 1 -48 0.1 0.5 -11 -12 40 27 2 -48 0.6 0.5 49 -34 2 -24 1 -48 0.6 0.3 -7 -22 50 -36 2 -48 0.8 0.5 37 1 42 10 1 -48 0.5 0.9 43 -26 14 4 1 -48 0.4 0.2 50 -31 19 11 2 -48 0.9 0.5 29 -31 7 -4 1 -48 0.7 0.2 29 8 -39 -44 1 -48 0.3 0.7 49 42 -32 -50 1 -48 0.1 0.6 6 -37 -15 -28 2 -48 0.5 0.1 37 -1 15 14 2 -48 0.8 0.8 -5 -25 9 -43 2 -48 0.3 0.1 17 5 10 -13 1 -48 0.8 0.2 -4 -41 4 -33 1 -48 0.1 0.4 -3 -49 -4 -33 2 -48 0.6 0.2 35 -6 49 -39 1 -48 0.7 0.9 -2 -6 -22 -44 1 -48 0.4 0.5 -17 -48 47 -11 2 -48 0.9 0.9 47 -33 -10 -31 1 -48 0.9 0.7 10 -16 -21 -42 1 -48 0.7 0.5 48 -39 42 -38 1 -48 0.1 0.8 -12 -47 27 16 2 -48 0.6 0.5 1 -30 3 -42 1 -48 0.1 0.5 45 39 29 16 1 -48 0.7 0.7 45 39 12 -6 1 -48 0.8 0.7 37 -31 -37 -40 1 -48 0.9 0.5 46 -8 39 -35 1 -48 0.7 0.2 20 2 22 2 1 -48 0.2 0.8 22 -49 28 -41 2 -48 0.3 0.4 -12 -25 27 -6 2 -48 0.6 0.3 50 -12 -21 -30 1 -48 0.3 0.5 15 -26 4 -1 2 -48 0.5 0.7 16 13 14 -21 1 -48 0.8 0.5 42 -41 48 34 2 -48 0.1 0.5 20 -35 42 -26 2 -48 0.4 0.2 18 -30 20 -41 1 -48 0.5 0.5 33 -35 18 -48 1 -48 0.4 0.2 -17 -39 43 11 2 -48 0.3 0.7 27 -30 3 -21 2 -48 0.4 0.1 39 20 34 18 1 -48 0.7 0.6 48 24 6 0 1 -48 0.4 0.1 39 -23 -6 -28 1 -48 0.6 0.8 -4 -29 3 2 2 -48 0.1 0.4 3 -35 14 -19 2 -48 0.5 0.5 -23 -32 11 -44 2 -48 0.6 0.5 -5 -10 26 -1 2 -48 0.7 0.4 23 -21 46 -34 1 -48 0.3 0.1 -2 -43 -15 -21 2 -48 0.9 0.4 49 39 49 26 1 -48 0.7 0.9 30 10 8 -20 1 -48 0.2 0.9 47 -13 17 -37 2 -48 0.5 0.9 4 2 -25 -32 1 -48 0.4 0.5 24 -8 46 0 2 -48 0.9 0.1 33 29 42 -47 1 -48 0.2 0.1 4 -18 -12 -17 1 -48 0.6 0.8 47 -40 10 -9 1 -48 0.9 0.8 -18 -41 23 -14 2 -48 0.5 0.4 21 15 11 -13 1 -48 0.8 0.9 39 17 46 -27 1 -48 0.7 0.6 40 -18 45 -34 1 -48 0.3 0.7 50 18 12 8 1 -48 0.9 0.5 45 41 -2 -23 1 -48 0.7 0.2 49 -47 25 -14 1 -48 0.4 0.3 35 16 28 20 1 -48 0.5 0.8 16 6 17 -10 1 -48 0.3 0.7 33 -8 5 -9 1 -48 0.6 0.8 35 -8 42 21 2 -48 0.4 0.3 37 -17 39 24 2 -48 0.3 0.9 -6 -45 31 7 2 -48 0.7 0.1 33 13 -3 -6 1 -48 0.2 0.1 32 1 3 -18 1 -48 0.6 0.2 -4 -39 45 23 2 -48 0.4 0.1 25 -13 -9 -19 1 -48 0.4 0.3 37 -25 39 -14 1 -48 0.8 0.7 35 -25 12 5 1 -48 0.3 0.1 29 22 -11 -27 1 -48 0.5 0.9 36 22 42 -15 1 -48 0.6 0.1 39 -37 -2 -5 1 -48 0.4 0.2 23 -18 -26 -45 1 -48 0.4 0.8 26 -28 -24 -28 1 -48 0.4 0.4 23 -1 1 -17 1 -48 0.9 0.7 6 -27 -5 -6 1 -48 0.7 0.9 -21 -29 29 -3 2 -48 0.7 0.4 9 -2 11 -46 1 -48 0.9 0.3 31 -13 43 21 1 -48 0.5 0.9 27 -35 2 -11 1 -48 0.1 0.8 10 -4 16 -41 1 -48 0.7 0.6 -19 -45 48 22 2 -48 0.2 0.9 16 -50 45 -3 2 -48 0.6 0.5 28 -2 50 -22 1 -48 0.1 0.2 40 -40 -36 -48 1 -48 0.8 0.4 48 -32 15 -32 1 -48 0.8 0.9 47 -45 -43 -45 1 -48 0.8 0.8 -16 -49 11 5 2 -48 0.7 0.4 13 -22 16 12 2 -48 0.9 0.8 -25 -45 -4 -50 2 -48 0.4 0.5 39 32 -17 -43 1 -48 0.7 0.2 -29 -30 15 14 2 -48 0.7 0.3 -2 -26 1 -5 2 -48 0.5 0.9 -11 -43 44 37 2 -48 0.4 0.1 35 -19 47 -41 1 -48 0.3 0.1 8 -44 10 6 2 -48 0.6 0.7 21 -46 27 18 2 -48 0.5 0.6 39 -39 15 -7 2 -48 0.3 0.5 2 -18 50 23 2 -48 0.7 0.5 19 -9 -18 -45 1 -48 0.5 0.9 38 -6 13 -5 1 -48 0.3 0.5 19 -23 -42 -49 1 -48 0.2 0.4 13 8 45 -1 2 -48 0.6 0.7 -6 -47 -2 -21 2 -48 0.2 0.4 -24 -32 25 19 2 -48 0.5 0.9 7 -41 -14 -30 1 -48 0.2 0.9 17 2 21 6 2 -48 0.1 0.3 -27 -43 24 -33 2 -48 0.6 0.8 -28 -34 18 -28 2 -48 0.4 0.3 -3 -46 10 -43 2 -48 0.2 0.7 16 -5 -27 -37 1 -48 0.9 0.3 -13 -24 -5 -41 1 -48 0.2 0.3 -13 -41 23 10 2 -48 0.5 0.7 37 -17 -19 -49 1 -48 0.9 0.9 -11 -18 -19 -47 1 -48 0.7 0.4 -3 -20 48 37 2 -48 0.6 0.7 17 -47 38 -9 2 -48 0.8 0.2 38 -21 19 6 1 -48 0.6 0.6 -3 -50 35 -3 2 -48 0.4 0.4 16 8 30 29 2 -48 0.5 0.2 38 -47 38 36 2 -48 0.8 0.4 13 -10 -24 -50 1 -48 0.9 0.6 -10 -40 4 -36 2 -48 0.2 0.1 38 -32 7 -33 1 -48 0.6 0.6 24 5 11 -23 1 -48 0.9 0.8 -5 -22 42 39 2 -48 0.5 0.1 12 -48 38 21 2 -48 0.7 0.7 45 13 32 -4 1 -48 0.5 0.6 41 3 20 9 1 -48 0.9 0.7 -5 -10 22 19 2 -48 0.1 0.8 -6 -24 0 -34 2 -48 0.5 0.7 -27 -33 44 -11 2 -48 0.8 0.8 8 -6 37 -11 2 -48 0.3 0.1 36 -23 -35 -50 1 -48 0.6 0.1 21 -1 11 -17 1 -48 0.6 0.6 -7 -26 41 10 2 -48 0.8 0.3 18 -7 42 -32 1 -48 0.3 0.9 -17 -45 18 -7 2 -48 0.1 0.9 17 -14 -39 -41 1 -48 0.4 0.8 -9 -15 14 -39 2 -48 0.8 0.4 -28 -32 -39 -41 1 -48 0.9 0.6 40 24 29 28 1 -48 0.8 0.7 48 18 15 -40 1 -48 0.4 0.7 -3 -23 -12 -15 2 -48 0.4 0.2 46 -9 15 -25 1 -48 0.8 0.9 4 -12 20 -16 2 -48 0.7 0.1 47 42 11 -6 1 -48 0.1 0.4 6 2 44 -19 2 -48 0.8 0.6 -15 -33 36 -35 2 -48 0.1 0.6 40 -24 30 -34 2 -48 0.5 0.2 48 29 48 12 1 -48 0.5 0.8 34 -14 -1 -45 1 -48 0.8 0.3 49 -18 8 -35 1 -48 0.5 0.2 -18 -48 -1 -26 1 -48 0.9 0.2 13 -35 47 -38 1 -48 0.5 0.3 33 -28 35 -12 1 -48 0.1 0.8 35 -31 9 -11 2 -48 0.6 0.4 7 -50 37 -31 2 -48 0.1 0.4 0 -39 -9 -44 1 -48 0.7 0.8 -4 -11 -1 -13 1 -48 0.4 0.6 -25 -31 -3 -14 2 -48 0.7 0.1 39 8 37 16 1 -48 0.4 0.1 6 -31 9 -31 1 -48 0.1 0.9 8 -49 -1 -39 2 -48 0.5 0.2 -2 -36 -12 -33 1 -48 0.3 0.2 12 -45 48 -18 2 -48 0.5 0.5 49 -36 -10 -14 1 -48 0.1 0.7 49 -39 46 8 2 -48 0.1 0.5 -12 -46 32 -22 2 -49 0.9 0.7 22 -29 20 -32 1 -49 0.1 0.2 37 17 25 4 1 -49 0.7 0.1 22 7 19 13 2 -49 0.4 0.6 -16 -40 -2 -8 2 -49 0.8 0.1 6 -20 -22 -36 1 -49 0.5 0.9 42 12 46 -14 1 -49 0.5 0.5 9 -11 26 3 2 -49 0.2 0.6 -25 -37 48 -12 2 -49 0.9 0.4 2 -22 8 -37 1 -49 0.4 0.2 -8 -21 9 -49 1 -49 0.1 0.7 36 -15 29 -14 2 -49 0.3 0.6 36 -27 30 -33 2 -49 0.7 0.8 37 -8 32 -35 2 -49 0.3 0.2 21 -42 31 17 2 -49 0.4 0.2 -22 -38 29 -44 1 -49 0.1 0.1 15 -18 44 33 2 -49 0.6 0.5 0 -4 19 -17 2 -49 0.3 0.3 40 35 -13 -44 1 -49 0.5 0.4 23 -32 -5 -48 1 -49 0.1 0.9 1 -29 30 -32 2 -49 0.7 0.9 50 -42 39 -18 1 -49 0.9 0.6 -7 -14 23 -12 2 -49 0.1 0.6 50 -2 -13 -33 1 -49 0.5 0.8 -20 -36 50 10 2 -49 0.9 0.9 43 28 -12 -35 1 -49 0.9 0.4 -29 -32 5 -1 2 -49 0.5 0.6 30 -35 15 -15 2 -49 0.4 0.6 7 6 39 -27 1 -49 0.5 0.9 -23 -34 27 -48 2 -49 0.6 0.7 41 -49 -11 -48 1 -49 0.6 0.6 30 26 34 16 1 -49 0.5 0.5 29 13 -24 -45 1 -49 0.6 0.8 46 22 47 12 1 -49 0.6 0.6 26 3 15 -37 1 -49 0.8 0.7 34 -27 -13 -34 1 -49 0.7 0.8 -35 -41 49 47 2 -49 0.3 0.1 26 10 33 30 2 -49 0.8 0.9 37 24 17 -30 1 -49 0.6 0.5 -25 -41 -34 -49 2 -49 0.4 0.6 22 -28 16 -37 1 -49 0.9 0.9 20 -32 31 25 2 -49 0.6 0.9 6 -13 -44 -45 1 -49 0.5 0.6 -7 -34 -33 -39 1 -49 0.7 0.5 35 -4 -14 -40 1 -49 0.3 0.4 43 -22 8 -36 1 -49 0.8 0.8 41 -9 26 20 2 -49 0.4 0.3 -8 -25 -32 -46 1 -49 0.6 0.9 45 -37 19 -45 1 -49 0.3 0.9 39 11 45 36 2 -49 0.3 0.9 25 -45 20 9 2 -49 0.9 0.5 29 -37 14 -47 1 -49 0.9 0.4 -4 -50 26 22 2 -49 0.8 0.5 -39 -43 35 -19 2 -49 0.8 0.2 2 -34 32 24 2 -49 0.5 0.3 -1 -32 17 -18 2 -49 0.8 0.2 -45 -50 10 -29 2 -49 0.9 0.6 41 12 7 -17 1 -49 0.8 0.2 -10 -39 34 -24 1 -49 0.2 0.1 49 1 -12 -29 1 -49 0.8 0.1 19 -25 25 -44 1 -49 0.4 0.5 21 -4 41 -26 2 -49 0.1 0.5 -4 -36 44 37 2 -49 0.7 0.2 30 23 -39 -50 1 -49 0.1 0.9 28 -42 -14 -46 1 -49 0.6 0.2 0 -17 48 21 2 -49 0.4 0.7 45 -13 38 -36 2 -49 0.3 0.6 0 -44 -8 -30 1 -49 0.1 0.3 -23 -47 24 -27 2 -49 0.8 0.3 0 -6 -5 -30 1 -49 0.8 0.9 46 -3 32 -35 2 -49 0.7 0.9 45 41 10 -12 1 -49 0.8 0.9 30 -22 35 34 2 -49 0.6 0.6 35 -6 18 -32 1 -49 0.2 0.2 47 35 9 -45 1 -49 0.8 0.3 32 -34 15 -24 1 -49 0.6 0.3 39 23 46 -31 1 -49 0.7 0.8 18 -4 39 35 2 -49 0.7 0.9 48 -36 17 -7 1 -49 0.7 0.7 21 2 50 17 2 -49 0.7 0.3 45 -33 17 -28 1 -49 0.3 0.2 -37 -49 39 6 2 -49 0.1 0.2 38 26 37 -21 1 -49 0.7 0.3 34 -46 44 -29 1 -49 0.2 0.9 46 -16 -6 -34 1 -49 0.4 0.8 2 -5 40 -13 2 -49 0.5 0.7 -4 -42 18 16 2 -49 0.5 0.7 21 -7 -29 -47 1 -49 0.6 0.4 48 23 18 -5 1 -49 0.5 0.5 16 -19 -30 -40 1 -49 0.6 0.8 27 26 30 -30 1 -49 0.8 0.8 17 16 30 -8 2 -49 0.3 0.6 37 4 31 23 2 -49 0.3 0.8 17 -18 31 2 2 -49 0.7 0.2 -32 -50 48 -29 2 -49 0.1 0.5 22 -16 -4 -21 2 -49 0.2 0.4 -14 -36 -18 -23 2 -49 0.9 0.1 20 -47 37 -32 1 -49 0.8 0.5 12 -6 33 22 2 -49 0.3 0.1 37 -47 -9 -38 1 -49 0.4 0.6 32 -14 -15 -32 1 -49 0.4 0.3 12 -25 38 -34 1 -49 0.5 0.3 -5 -27 10 -4 2 -49 0.8 0.8 9 -7 43 -27 2 -49 0.7 0.6 45 23 27 -41 1 -49 0.9 0.5 8 4 29 -41 1 -49 0.7 0.6 7 -21 -1 -41 1 -49 0.1 0.2 42 9 40 -27 1 -49 0.6 0.7 40 34 47 -33 2 -49 0.9 0.4 34 -35 -1 -24 1 -49 0.5 0.4 38 28 33 -50 1 -49 0.9 0.3 -2 -11 45 -28 2 -49 0.6 0.3 40 -28 5 -36 1 -49 0.3 0.6 37 36 30 11 1 -49 0.7 0.7 17 12 1 -15 1 -49 0.1 0.2 15 -14 17 -26 1 -49 0.6 0.2 43 -21 -21 -27 1 -49 0.4 0.6 10 0 48 9 2 -49 0.7 0.3 34 -43 36 35 2 -49 0.1 0.3 29 4 32 7 1 -49 0.7 0.8 -31 -45 10 -44 2 -49 0.4 0.5 36 31 2 -1 1 -49 0.6 0.8 39 28 -28 -48 1 -49 0.7 0.9 -21 -25 23 2 2 -49 0.4 0.4 24 -12 30 -24 2 -49 0.7 0.6 30 -40 -3 -21 1 -49 0.6 0.1 -28 -30 24 -42 1 -49 0.8 0.8 49 31 6 -7 1 -49 0.7 0.5 47 20 20 12 1 -49 0.3 0.8 42 -36 23 -43 2 -49 0.7 0.6 49 -8 -26 -39 1 -49 0.9 0.3 44 -34 5 -47 1 -49 0.9 0.1 39 5 44 28 2 -49 0.6 0.1 24 -38 18 2 2 -49 0.2 0.7 40 37 1 -29 1 -49 0.5 0.8 -2 -44 -9 -34 1 -49 0.9 0.3 49 -21 -24 -39 1 -49 0.1 0.2 30 -50 24 -27 2 -49 0.4 0.2 -4 -10 -5 -45 1 -49 0.6 0.5 8 -48 7 -25 2 -49 0.5 0.5 40 15 8 7 1 -49 0.2 0.7 40 4 10 -26 1 -49 0.1 0.5 -44 -46 46 25 2 -49 0.8 0.2 33 5 11 -35 1 -49 0.8 0.3 -2 -26 -13 -20 1 -49 0.9 0.5 29 -34 14 -12 1 -49 0.1 0.7 37 -16 20 -32 2 -49 0.6 0.9 21 3 14 -25 1 -49 0.2 0.9 39 -1 7 3 2 -49 0.9 0.9 7 -23 36 14 2 -49 0.7 0.5 30 26 41 -39 1 -49 0.8 0.1 5 -26 -5 -42 1 -49 0.2 0.5 1 -17 38 30 2 -49 0.3 0.4 -38 -46 30 -22 2 -49 0.6 0.4 36 -13 -7 -15 1 -49 0.8 0.7 17 -27 42 -48 2 -49 0.2 0.3 38 -34 34 9 2 -49 0.8 0.3 34 -5 -18 -44 1 -49 0.9 0.1 42 -34 41 10 1 -49 0.5 0.6 -6 -29 4 -5 2 -49 0.2 0.4 16 -3 5 -32 1 -49 0.9 0.7 45 4 26 -27 1 -49 0.8 0.6 40 3 15 -14 1 -49 0.6 0.2 7 -3 4 -13 1 -49 0.1 0.6 40 -48 -28 -30 1 -49 0.6 0.4 8 -49 35 -12 2 -49 0.2 0.4 47 -11 38 -10 2 -49 0.2 0.5 14 -47 21 -23 2 -49 0.9 0.5 -2 -50 5 -41 1 -49 0.7 0.5 5 -6 30 -47 2 -49 0.6 0.3 46 -6 14 -35 1 -49 0.8 0.5 41 -10 -9 -39 1 -49 0.8 0.5 27 2 27 -32 1 -49 0.4 0.9 -11 -47 50 -37 2 -49 0.2 0.8 24 21 -33 -43 1 -49 0.4 0.3 -41 -42 -15 -47 2 -49 0.2 0.5 -2 -18 -25 -29 1 -49 0.6 0.3 -2 -32 30 11 2 -49 0.7 0.3 15 -14 -18 -42 1 -49 0.2 0.6 33 -10 26 2 2 -49 0.2 0.4 26 -29 15 -19 2 -49 0.6 0.6 23 -14 32 -41 2 -49 0.2 0.2 37 -28 36 18 2 -49 0.5 0.3 -3 -11 -9 -37 2 -49 0.8 0.2 25 -38 37 22 2 -49 0.1 0.2 15 -13 -1 -30 1 -49 0.8 0.2 7 6 -25 -49 1 -49 0.3 0.9 23 6 -5 -9 1 -49 0.3 0.3 49 -19 42 31 2 -49 0.4 0.3 8 -46 -15 -16 1 -49 0.3 0.6 16 10 -14 -36 1 -49 0.1 0.8 40 -46 30 -47 2 -49 0.5 0.2 9 -50 -8 -14 2 -49 0.3 0.6 -23 -46 -22 -29 2 -49 0.2 0.5 43 34 -21 -24 1 -49 0.8 0.7 -20 -33 13 -30 2 -49 0.2 0.2 23 9 34 -47 1 -49 0.5 0.9 -22 -49 42 -1 2 -49 0.4 0.5 -24 -42 8 -1 2 -49 0.1 0.7 30 29 14 -41 1 -49 0.4 0.5 4 -8 -4 -27 1 -49 0.4 0.2 11 9 -5 -37 1 -49 0.2 0.8 4 -37 -7 -32 2 -49 0.8 0.7 27 -26 15 -25 1 -49 0.1 0.1 -40 -45 34 -42 2 -49 0.8 0.3 -8 -28 -38 -45 1 -49 0.8 0.1 28 -6 50 20 2 -49 0.9 0.8 -24 -44 -19 -27 1 -49 0.8 0.8 -27 -37 -3 -33 2 -49 0.1 0.3 -1 -31 -12 -21 2 -49 0.1 0.5 46 -35 23 8 2 -49 0.3 0.4 -3 -44 31 16 2 -49 0.4 0.9 8 -2 -15 -21 1 -49 0.1 0.7 47 -3 -25 -30 1 -49 0.9 0.6 24 -9 27 16 2 -49 0.3 0.2 0 -28 41 -32 2 -49 0.4 0.1 -6 -11 3 -48 1 -49 0.4 0.9 43 -47 48 -46 2 -49 0.5 0.8 9 -50 13 -7 2 -49 0.2 0.4 -11 -15 29 -31 2 -49 0.2 0.5 10 -5 37 12 2 -49 0.1 0.8 33 -44 -3 -13 2 -49 0.6 0.8 35 -20 2 -49 1 -49 0.5 0.1 -8 -46 47 -44 1 -49 0.7 0.6 -11 -44 29 -22 2 -49 0.6 0.2 -31 -47 37 -11 2 -49 0.1 0.9 -26 -28 35 21 2 -49 0.3 0.3 26 -44 39 -40 1 -49 0.6 0.4 38 -23 -17 -30 1 -49 0.8 0.8 38 14 39 31 2 -49 0.8 0.7 1 -40 48 -34 2 -49 0.1 0.7 -12 -34 45 -45 2 -49 0.1 0.4 9 -29 7 6 2 -49 0.2 0.4 21 14 0 -11 1 -49 0.8 0.6 -13 -37 36 -42 2 -49 0.4 0.1 38 5 17 -45 1 -49 0.6 0.7 47 37 -34 -44 1 -49 0.7 0.7 -13 -41 48 39 2 -49 0.6 0.2 32 -20 -9 -18 1 -49 0.8 0.6 43 -24 5 -5 1 -49 0.2 0.2 8 -20 12 -27 1 -49 0.3 0.8 13 -42 20 15 2 -49 0.7 0.9 -4 -13 24 -50 2 -49 0.5 0.9 40 31 50 34 2 -49 0.5 0.3 -39 -42 -35 -39 1 -49 0.6 0.7 -12 -31 25 11 2 -49 0.2 0.5 -6 -29 9 -36 2 -49 0.6 0.8 7 -43 -7 -49 2 -49 0.4 0.3 50 -28 26 16 2 -49 0.5 0.6 37 -2 43 8 2 -49 0.3 0.7 35 9 -26 -38 1 -49 0.5 0.5 47 38 -8 -24 1 -49 0.8 0.1 30 23 30 -47 1 -49 0.1 0.2 24 12 38 -14 1 -49 0.6 0.6 -10 -20 -35 -47 1 -49 0.7 0.4 1 -35 -6 -44 1 -49 0.7 0.3 36 -19 -38 -44 1 -49 0.6 0.5 8 -22 4 -46 1 -49 0.9 0.3 35 27 1 -3 1 -49 0.9 0.2 1 -10 47 35 2 -49 0.9 0.1 13 5 43 11 2 -49 0.3 0.6 10 -18 -22 -40 1 -49 0.3 0.2 -14 -40 29 9 2 -49 0.4 0.3 30 21 48 16 1 -49 0.9 0.2 42 -50 13 3 1 -49 0.3 0.9 38 19 20 1 1 -49 0.6 0.9 -10 -27 48 -45 2 -49 0.2 0.1 22 1 -7 -24 1 -49 0.9 0.8 50 -42 38 12 1 -49 0.2 0.9 -27 -41 0 -50 2 -49 0.2 0.7 -19 -21 -29 -43 2 -49 0.6 0.7 -15 -43 -9 -48 1 -49 0.3 0.2 36 11 -38 -40 1 -49 0.8 0.5 49 20 -18 -22 1 -49 0.1 0.5 22 -5 -5 -9 1 -49 0.2 0.4 10 -25 7 -42 1 -49 0.3 0.2 27 -20 48 -36 1 -49 0.8 0.5 24 -1 42 -31 2 -49 0.7 0.9 12 -4 20 14 2 -49 0.4 0.3 22 -14 -3 -28 1 -49 0.8 0.5 30 -23 1 -12 1 -49 0.4 0.4 -11 -44 23 -15 2 -49 0.5 0.8 13 -42 21 -12 2 -49 0.5 0.5 27 -36 -5 -22 1 -49 0.6 0.4 48 19 5 -39 1 -49 0.6 0.7 25 -14 -35 -48 1 -49 0.1 0.3 48 -22 -3 -6 2 -49 0.4 0.2 -11 -39 -34 -43 1 -49 0.2 0.3 -1 -24 -1 -49 1 -49 0.3 0.8 -29 -43 7 -9 2 -49 0.7 0.5 33 18 -15 -34 1 -49 0.2 0.5 30 -33 26 20 2 -49 0.2 0.7 29 -41 44 3 2 -49 0.5 0.5 43 37 46 20 1 -49 0.9 0.6 40 -40 -32 -46 1 -49 0.7 0.7 26 0 12 -40 1 -49 0.7 0.9 30 13 10 -24 1 -49 0.4 0.5 -19 -29 9 -9 2 -49 0.2 0.5 -26 -29 10 3 2 -49 0.2 0.4 -12 -39 16 -4 2 -49 0.9 0.8 -4 -38 -23 -41 1 -50 0.7 0.4 12 4 39 -2 1 -50 0.4 0.4 18 -27 17 -10 2 -50 0.3 0.4 -6 -21 -15 -38 1 -50 0.6 0.4 32 1 -37 -50 1 -50 0.1 0.6 45 31 22 -39 1 -50 0.3 0.7 -45 -49 8 3 2 -50 0.2 0.1 12 -4 -37 -49 1 -50 0.3 0.3 -9 -46 -6 -44 1 -50 0.1 0.6 33 -43 42 5 2 -50 0.9 0.1 45 -24 49 -12 1 -50 0.9 0.8 23 -16 -18 -25 1 -50 0.9 0.5 42 -35 26 -13 1 -50 0.4 0.4 19 -41 37 -39 2 -50 0.1 0.8 -29 -37 -37 -40 1 -50 0.2 0.4 44 22 33 10 1 -50 0.6 0.6 -36 -50 37 22 2 -50 0.2 0.1 9 -42 41 -22 2 -50 0.7 0.4 43 -31 -33 -46 1 -50 0.5 0.4 -20 -32 36 -39 2 -50 0.5 0.2 32 -17 37 13 2 -50 0.9 0.2 7 -8 48 7 2 -50 0.1 0.5 35 -6 32 -7 2 -50 0.8 0.5 36 -27 -24 -32 1 -50 0.4 0.9 17 -47 -34 -39 1 -50 0.3 0.4 11 -45 -38 -49 1 -50 0.6 0.3 -7 -8 49 10 2 -50 0.8 0.5 5 -18 35 6 2 -50 0.6 0.8 17 -11 25 -30 2 -50 0.1 0.8 48 -29 47 40 2 -50 0.6 0.4 44 -2 48 -15 1 -50 0.1 0.7 30 -1 -3 -25 1 -50 0.7 0.8 44 -10 -4 -26 1 -50 0.8 0.2 17 4 -13 -21 1 -50 0.9 0.3 11 -33 22 -15 1 -50 0.7 0.2 -33 -43 39 0 2 -50 0.7 0.6 25 -25 38 -32 1 -50 0.1 0.4 -2 -29 12 -40 2 -50 0.5 0.2 -28 -39 31 -28 2 -50 0.5 0.5 46 -1 13 11 1 -50 0.8 0.9 18 2 26 -41 2 -50 0.3 0.4 21 12 49 -13 1 -50 0.5 0.3 42 -33 31 -24 1 -50 0.1 0.9 -37 -48 43 22 2 -50 0.7 0.1 47 30 21 -3 1 -50 0.8 0.8 19 -50 39 -42 2 -50 0.4 0.7 16 1 -7 -35 1 -50 0.6 0.7 34 -13 22 12 2 -50 0.2 0.8 -11 -32 20 15 2 -50 0.9 0.4 -17 -25 35 -47 2 -50 0.4 0.5 14 -27 -22 -36 1 -50 0.6 0.8 6 -39 -29 -45 1 -50 0.6 0.1 -12 -20 21 10 2 -50 0.8 0.5 35 -35 45 28 2 -50 0.1 0.7 22 -47 26 -12 2 -50 0.3 0.8 1 -15 -10 -36 2 -50 0.1 0.4 15 -38 40 18 2 -50 0.6 0.4 -37 -44 36 -39 2 -50 0.2 0.5 46 39 29 -9 1 -50 0.5 0.2 37 -33 20 -9 1 -50 0.9 0.2 -9 -35 26 -35 1 -50 0.6 0.4 19 -22 -23 -32 1 -50 0.1 0.8 28 -5 47 46 2 -50 0.5 0.2 45 9 -5 -14 1 -50 0.3 0.3 37 -50 32 -50 1 -50 0.3 0.1 41 9 -22 -47 1 -50 0.9 0.3 2 -23 48 -8 1 -50 0.9 0.6 34 7 23 -49 1 -50 0.7 0.7 38 -34 15 -28 1 -50 0.7 0.6 10 -14 30 -45 2 -50 0.9 0.7 14 -4 -15 -33 1 -50 0.6 0.3 -23 -35 46 36 2 -50 0.6 0.2 35 -38 8 -11 1 -50 0.4 0.4 31 -33 27 25 2 -50 0.3 0.8 0 -28 31 4 2 -50 0.3 0.3 22 -6 23 -35 1 -50 0.1 0.3 32 6 34 33 2 -50 0.6 0.9 27 -40 -1 -33 1 -50 0.2 0.9 28 -28 27 6 2 -50 0.7 0.7 45 27 -10 -47 1 -50 0.8 0.6 -1 -5 -21 -26 1 -50 0.6 0.9 44 18 33 -43 1 -50 0.8 0.5 27 -24 29 -8 1 -50 0.8 0.3 -22 -39 47 29 2 -50 0.1 0.9 -3 -14 27 -19 2 -50 0.5 0.2 -37 -44 -6 -11 2 -50 0.1 0.2 8 -41 11 -40 1 -50 0.8 0.4 46 42 21 -12 1 -50 0.1 0.6 25 -38 48 15 2 -50 0.5 0.8 36 -44 37 -41 2 -50 0.9 0.2 34 1 15 -5 1 -50 0.2 0.9 -12 -19 4 -17 2 -50 0.5 0.8 20 -26 30 -1 2 -50 0.8 0.7 19 -33 -2 -3 1 -50 0.3 0.1 -19 -24 13 -4 2 -50 0.6 0.4 48 -33 -35 -42 1 -50 0.9 0.7 31 5 45 22 2 -50 0.1 0.5 7 6 12 -41 1 -50 0.5 0.4 3 -46 -5 -6 2 -50 0.2 0.1 18 6 10 -42 1 -50 0.9 0.2 -3 -50 -15 -23 1 -50 0.2 0.6 41 30 -1 -7 1 -50 0.3 0.6 41 22 28 -26 1 -50 0.6 0.8 37 -29 38 -5 2 -50 0.8 0.3 14 1 31 30 2 -50 0.9 0.4 -4 -15 15 -8 2 -50 0.9 0.8 41 -17 10 3 1 -50 0.2 0.9 36 32 20 -11 1 -50 0.7 0.3 43 -37 26 24 1 -50 0.1 0.7 7 -25 35 -49 2 -50 0.9 0.5 23 1 2 -7 1 -50 0.1 0.2 -22 -38 48 20 2 -50 0.3 0.9 -12 -50 20 13 2 -50 0.1 0.1 39 -35 -43 -44 1 -50 0.1 0.5 17 -40 16 -2 2 -50 0.3 0.3 32 -10 26 -14 2 -50 0.4 0.3 24 19 25 19 1 -50 0.8 0.8 40 -5 10 8 1 -50 0.6 0.7 14 -23 21 10 2 -50 0.3 0.4 39 -20 44 -3 2 -50 0.1 0.4 27 -9 42 17 2 -50 0.4 0.9 22 -36 20 -2 2 -50 0.6 0.3 22 -12 32 -39 1 -50 0.1 0.3 32 -17 26 -15 2 -50 0.5 0.2 49 36 28 -50 1 -50 0.8 0.9 26 -20 43 40 2 -50 0.5 0.5 43 29 24 1 1 -50 0.5 0.8 -22 -27 50 29 2 -50 0.2 0.2 20 -50 28 -11 2 -50 0.5 0.3 21 -30 37 24 2 -50 0.9 0.3 15 13 6 -39 1 -50 0.9 0.7 -25 -26 26 -8 2 -50 0.7 0.7 25 7 -11 -19 1 -50 0.6 0.7 -5 -38 29 25 2 -50 0.3 0.3 40 13 25 -1 1 -50 0.9 0.8 8 -46 -18 -32 1 -50 0.8 0.7 -15 -38 27 5 2 -50 0.2 0.2 49 -33 -1 -6 2 -50 0.6 0.2 39 -3 -14 -25 1 -50 0.3 0.8 3 -39 4 -37 2 -50 0.3 0.9 39 37 48 -8 1 -50 0.2 0.6 43 -47 50 4 2 -50 0.6 0.7 -25 -30 -8 -47 2 -50 0.3 0.1 -8 -29 43 -7 2 -50 0.7 0.1 29 -18 -6 -26 1 -50 0.7 0.2 15 -19 24 -5 1 -50 0.1 0.6 -3 -17 9 -36 2 -50 0.4 0.4 -12 -30 21 -35 2 -50 0.6 0.7 0 -50 9 -45 2 -50 0.1 0.8 44 6 23 -25 2 -50 0.1 0.1 25 -11 40 -13 1 -50 0.8 0.4 15 -13 0 -43 1 -50 0.3 0.1 -28 -40 18 -36 1 -50 0.6 0.6 38 -5 -6 -16 1 -50 0.9 0.4 17 12 -7 -28 1 -50 0.9 0.9 48 -23 49 44 2 -50 0.6 0.7 -20 -29 32 -3 2 -50 0.7 0.6 -3 -33 1 -21 2 -50 0.8 0.8 6 -36 37 32 2 -50 0.3 0.9 18 -42 47 31 2 -50 0.7 0.4 28 -49 34 -18 1 -50 0.1 0.6 -7 -43 41 15 2 -50 0.7 0.1 14 -28 -1 -16 1 -50 0.7 0.3 44 0 12 -21 1 -50 0.9 0.8 6 -41 20 -37 2 -50 0.6 0.2 31 -31 42 27 2 -50 0.2 0.2 35 -2 27 8 2 -50 0.2 0.5 -31 -32 44 5 2 -50 0.1 0.6 49 -24 40 -6 2 -50 0.3 0.8 7 -45 40 -31 2 -50 0.4 0.3 43 13 35 13 1 -50 0.9 0.8 23 -9 -5 -39 1 -50 0.8 0.4 42 -37 -8 -28 1 -50 0.4 0.9 -16 -19 30 24 2 -50 0.3 0.6 35 33 39 -12 1 -50 0.2 0.6 28 -36 5 -25 2 -50 0.3 0.3 50 20 9 -38 1 -50 0.4 0.1 2 -14 -2 -42 1 -50 0.2 0.3 -32 -40 9 -11 2 -50 0.7 0.7 39 33 31 2 1 -50 0.3 0.8 23 -50 -21 -49 1 -50 0.6 0.3 22 -33 0 -8 1 -50 0.9 0.1 -5 -48 -17 -26 1 -50 0.7 0.7 46 -30 -30 -40 1 -50 0.8 0.9 12 -9 12 -9 1 -50 0.9 0.4 37 -27 -1 -22 1 -50 0.9 0.2 -16 -38 36 -37 2 -50 0.3 0.2 -14 -25 8 -31 2 -50 0.5 0.9 2 -14 43 20 2 -50 0.8 0.6 0 -3 28 -19 2 -50 0.6 0.5 16 11 44 -2 1 -50 0.8 0.2 6 -39 43 40 2 -50 0.5 0.7 50 3 21 -14 1 -50 0.9 0.3 42 -14 38 -45 1 -50 0.8 0.7 19 -11 18 -25 1 -50 0.7 0.6 22 -13 39 31 2 -50 0.5 0.7 -30 -42 40 -22 2 -50 0.3 0.1 49 9 34 29 2 -50 0.6 0.3 -17 -36 35 0 2 -50 0.7 0.6 11 -43 46 -2 2 -50 0.2 0.1 -30 -49 40 3 2 -50 0.9 0.3 25 24 45 14 1 -50 0.2 0.4 47 29 -2 -7 1 -50 0.2 0.4 34 33 39 -14 1 -50 0.4 0.5 1 -33 18 -34 2 -50 0.3 0.6 49 -31 49 35 2 -50 0.5 0.2 -6 -37 36 30 2 -50 0.3 0.6 9 3 4 3 2 -50 0.3 0.6 -11 -19 3 -33 2 -50 0.6 0.2 43 17 17 -31 1 -50 0.3 0.8 -32 -36 -10 -14 2 -50 0.1 0.8 15 -2 0 -30 1 -50 0.7 0.4 13 -26 32 15 2 -50 0.2 0.8 -9 -18 43 -3 2 -50 0.3 0.4 17 -48 46 13 2 -50 0.9 0.5 46 -7 44 -26 1 -50 0.1 0.7 47 17 26 -27 1 -50 0.9 0.4 -13 -50 41 -17 2 -50 0.5 0.5 28 14 1 -6 1 -50 0.4 0.6 26 -16 37 25 2 -50 0.1 0.5 41 14 -6 -32 1 -50 0.8 0.4 28 -31 45 -18 2 -50 0.8 0.1 33 -6 20 19 1 -50 0.4 0.4 -2 -47 20 3 2 -50 0.8 0.9 32 -9 6 -47 1 -50 0.3 0.2 -11 -30 42 16 2 -50 0.2 0.4 15 2 9 4 1 -50 0.5 0.9 7 -15 38 -45 2 -50 0.4 0.3 31 26 49 -41 1 -50 0.9 0.2 -17 -23 33 -37 2 -50 0.9 0.3 -21 -45 -15 -38 1 -50 0.3 0.1 35 -13 46 24 2 -50 0.8 0.9 38 -46 -12 -17 1 -50 0.5 0.5 22 -22 -3 -33 1 -50 0.6 0.6 -6 -27 20 12 2 -50 0.1 0.7 2 -33 29 -31 2 -50 0.5 0.3 36 -47 2 -29 1 -50 0.2 0.2 -2 -24 -32 -49 1 -50 0.7 0.6 28 26 2 -17 1 -50 0.6 0.9 49 42 31 -50 1 -50 0.9 0.2 -7 -48 42 7 2 -50 0.9 0.6 -1 -12 13 -1 2 -50 0.9 0.1 22 -39 17 -34 1 -50 0.7 0.9 1 -25 25 20 2 -50 0.1 0.1 -20 -48 39 -32 2 -50 0.3 0.8 18 -2 8 -4 2 -50 0.5 0.7 49 -27 -12 -33 1 -50 0.9 0.7 -11 -32 8 -1 2 -50 0.1 0.5 -4 -27 -1 -44 1 -50 0.3 0.5 37 35 17 -4 1 -50 0.8 0.5 23 4 -9 -50 1 -50 0.6 0.2 25 -41 50 41 2 -50 0.8 0.4 40 1 -1 -36 1 -50 0.7 0.5 -16 -48 27 -46 2 -50 0.6 0.8 -29 -42 4 3 2 -50 0.3 0.8 -8 -13 -19 -22 1 -50 0.1 0.8 18 -5 29 -1 2 -50 0.5 0.2 18 14 42 -39 1 -50 0.1 0.1 -1 -37 13 0 2 -50 0.2 0.7 -28 -30 5 -35 2 -50 0.6 0.1 -29 -33 47 -31 2 -50 0.8 0.3 0 -29 50 32 2 -50 0.8 0.4 -18 -26 27 -26 2 -50 0.8 0.2 10 -17 6 -20 1 -50 0.7 0.8 24 -39 27 17 2 -50 0.6 0.3 -3 -43 -26 -43 1 -50 0.2 0.8 38 -11 -1 -38 1 -50 0.5 0.7 5 -1 16 15 2 -50 0.7 0.4 36 23 26 -44 1 -50 0.7 0.7 37 -48 -7 -35 1 -50 0.7 0.5 -25 -44 38 0 2 -50 0.8 0.7 -26 -35 -38 -41 1 -50 0.9 0.4 -9 -43 46 -19 2 -50 0.2 0.8 31 -19 33 -23 2 -50 0.6 0.3 30 23 46 -24 1 -50 0.2 0.9 48 -10 32 -31 2 -50 0.1 0.8 25 7 27 -47 2 -50 0.3 0.4 2 -4 9 -38 1 -50 0.4 0.9 -4 -48 27 -27 2 -50 0.1 0.1 1 -47 11 3 2 -50 0.3 0.7 21 13 21 -18 2 -50 0.9 0.6 28 5 26 10 1 -50 0.2 0.2 35 29 49 -14 1 -50 0.3 0.6 39 12 50 17 2 -50 0.4 0.2 -18 -33 0 -26 2 -50 0.7 0.1 34 32 -18 -32 1 -50 0.9 0.7 21 -28 17 -7 1 -50 0.9 0.5 -7 -25 10 -48 1 -50 0.4 0.8 -11 -28 6 -14 2 -50 0.9 0.4 33 -16 38 -44 1 -50 0.1 0.9 13 11 31 -9 2 -50 0.1 0.1 -3 -44 39 -23 2 -50 0.9 0.2 15 -23 34 -38 1 -50 0.1 0.3 43 -3 21 -19 1 -50 0.2 0.5 -13 -34 33 -23 2 -50 0.5 0.3 28 25 43 21 2 -50 0.2 0.6 32 20 25 -2 1 -50 0.1 0.1 22 7 40 -32 1 -50 0.6 0.7 29 -21 -34 -46 1 -50 0.9 0.3 -23 -46 -4 -49 1 -50 0.9 0.8 42 -26 13 -38 1 diff --git a/inst/extdata/dd_exampleData.txt b/inst/extdata/dd_exampleData.txt deleted file mode 100644 index d90c64c1..00000000 --- a/inst/extdata/dd_exampleData.txt +++ /dev/null @@ -1,2161 +0,0 @@ -subjID trial delay_later amount_later delay_sooner amount_sooner choice -1 1 6 10.5 0 10 1 -1 2 170 38.3 0 10 1 -1 3 28 13.4 0 10 1 -1 4 28 31.4 0 10 1 -1 5 85 30.9 0 10 1 -1 6 28 21.1 0 10 1 -1 7 28 13 0 10 1 -1 8 1 21.3 0 10 1 -1 9 28 21.1 0 10 1 -1 10 15 30.1 0 10 1 -1 11 1 10.7 0 10 1 -1 12 85 36.1 0 10 1 -1 13 15 10.5 0 10 1 -1 14 6 16.7 0 10 1 -1 15 1 11 0 10 1 -1 16 15 14.2 0 10 1 -1 17 15 12.5 0 10 1 -1 18 15 20.7 0 10 1 -1 19 6 11 0 10 0 -1 20 28 16.9 0 10 1 -1 21 15 30.1 0 10 1 -1 22 85 24.4 0 10 1 -1 23 170 41.3 0 10 1 -1 24 15 14.2 0 10 1 -1 25 6 10.5 0 10 1 -1 26 170 24.4 0 10 1 -1 27 15 49 0 10 1 -1 28 170 29.7 0 10 1 -1 29 1 11.8 0 10 0 -1 30 6 13.2 0 10 0 -1 31 85 30.9 0 10 1 -1 32 6 44 0 10 1 -1 33 6 35.1 0 10 1 -1 34 28 15.5 0 10 1 -1 35 170 43.3 0 10 1 -1 36 170 33.9 0 10 1 -1 37 1 11 0 10 1 -1 38 1 21.3 0 10 1 -1 39 85 45 0 10 1 -1 40 15 39.6 0 10 1 -1 41 85 10.5 0 10 0 -1 42 170 15 0 10 1 -1 43 170 49.8 0 10 1 -1 44 170 24.4 0 10 1 -1 45 28 13.4 0 10 1 -1 46 1 31.6 0 10 1 -1 47 170 35.6 0 10 1 -1 48 1 41.9 0 10 1 -1 49 6 17.4 0 10 1 -1 50 85 18.4 0 10 1 -1 51 85 27.3 0 10 1 -1 52 85 26 0 10 1 -1 53 170 38.3 0 10 1 -1 54 28 21.7 0 10 1 -1 55 1 10.7 0 10 1 -1 56 170 49.8 0 10 1 -1 57 1 11.2 0 10 1 -1 58 15 20.7 0 10 1 -1 59 6 44 0 10 1 -1 60 28 41.1 0 10 1 -1 61 28 16.9 0 10 1 -1 62 6 14 0 10 1 -1 63 1 31.6 0 10 1 -1 64 15 18.6 0 10 1 -1 65 28 12 0 10 1 -1 66 6 13.2 0 10 1 -1 67 170 43.3 0 10 1 -1 68 28 31.4 0 10 1 -1 69 85 19.5 0 10 1 -1 70 170 35.6 0 10 1 -1 71 85 18.4 0 10 1 -1 72 1 12.5 0 10 1 -1 73 170 41.3 0 10 1 -1 74 170 15 0 10 0 -1 75 28 12 0 10 0 -1 76 85 36.1 0 10 1 -1 77 1 18 0 10 1 -1 78 85 10.5 0 10 0 -1 79 170 33.9 0 10 1 -1 80 6 26.3 0 10 1 -1 81 85 45 0 10 1 -1 82 28 21.7 0 10 1 -1 83 28 13 0 10 0 -1 84 85 27.3 0 10 1 -1 85 15 18.6 0 10 1 -1 86 15 12.5 0 10 1 -1 87 6 26.3 0 10 1 -1 88 6 11 0 10 1 -1 89 15 10.7 0 10 0 -1 90 6 16.7 0 10 1 -1 91 28 41.1 0 10 1 -1 92 85 26 0 10 1 -1 93 85 24.4 0 10 1 -1 94 1 12.5 0 10 1 -1 95 6 17.4 0 10 1 -1 96 6 35.1 0 10 1 -1 97 6 14 0 10 1 -1 98 15 10.5 0 10 0 -1 99 1 11.8 0 10 1 -1 100 15 10.7 0 10 1 -1 101 15 39.6 0 10 1 -1 102 85 19.5 0 10 1 -1 103 1 11.2 0 10 1 -1 104 170 29.7 0 10 1 -1 105 15 49 0 10 1 -1 106 1 41.9 0 10 1 -1 107 1 18 0 10 1 -1 108 28 15.5 0 10 1 -2 1 1 11.8 0 10 0 -2 2 170 35.6 0 10 0 -2 3 85 10.5 0 10 0 -2 4 28 21.1 0 10 1 -2 5 28 13 0 10 0 -2 6 6 10.5 0 10 0 -2 7 15 10.5 0 10 0 -2 8 6 17.4 0 10 1 -2 9 85 26 0 10 1 -2 10 6 35.1 0 10 1 -2 11 28 21.7 0 10 1 -2 12 6 14 0 10 1 -2 13 15 14.2 0 10 0 -2 14 1 12.5 0 10 1 -2 15 170 38.3 0 10 0 -2 16 1 18 0 10 1 -2 17 15 39.6 0 10 1 -2 18 85 18.4 0 10 0 -2 19 28 21.1 0 10 0 -2 20 85 19.5 0 10 0 -2 21 6 11 0 10 1 -2 22 85 30.9 0 10 1 -2 23 1 10.7 0 10 1 -2 24 28 16.9 0 10 0 -2 25 170 29.7 0 10 1 -2 26 170 43.3 0 10 0 -2 27 6 14 0 10 1 -2 28 6 11 0 10 1 -2 29 28 41.1 0 10 1 -2 30 1 31.6 0 10 1 -2 31 15 18.6 0 10 1 -2 32 15 14.2 0 10 1 -2 33 28 12 0 10 0 -2 34 1 21.3 0 10 1 -2 35 85 36.1 0 10 1 -2 36 85 26 0 10 1 -2 37 15 49 0 10 1 -2 38 1 41.9 0 10 1 -2 39 1 21.3 0 10 1 -2 40 170 41.3 0 10 0 -2 41 170 43.3 0 10 0 -2 42 15 18.6 0 10 1 -2 43 15 49 0 10 1 -2 44 170 15 0 10 0 -2 45 85 24.4 0 10 1 -2 46 15 30.1 0 10 1 -2 47 85 18.4 0 10 0 -2 48 170 41.3 0 10 0 -2 49 28 41.1 0 10 1 -2 50 28 31.4 0 10 1 -2 51 6 35.1 0 10 1 -2 52 1 11.2 0 10 0 -2 53 170 33.9 0 10 0 -2 54 28 15.5 0 10 0 -2 55 1 18 0 10 1 -2 56 15 10.7 0 10 0 -2 57 85 45 0 10 1 -2 58 85 19.5 0 10 0 -2 59 6 44 0 10 1 -2 60 85 30.9 0 10 0 -2 61 1 11 0 10 0 -2 62 170 35.6 0 10 1 -2 63 170 29.7 0 10 0 -2 64 6 16.7 0 10 1 -2 65 28 15.5 0 10 0 -2 66 6 44 0 10 1 -2 67 85 10.5 0 10 0 -2 68 85 45 0 10 1 -2 69 1 11 0 10 0 -2 70 15 10.5 0 10 0 -2 71 170 49.8 0 10 0 -2 72 15 20.7 0 10 0 -2 73 6 13.2 0 10 1 -2 74 15 12.5 0 10 1 -2 75 28 13 0 10 1 -2 76 1 10.7 0 10 1 -2 77 28 13.4 0 10 1 -2 78 15 39.6 0 10 1 -2 79 15 20.7 0 10 1 -2 80 1 11.2 0 10 1 -2 81 85 24.4 0 10 1 -2 82 1 12.5 0 10 0 -2 83 170 49.8 0 10 1 -2 84 170 33.9 0 10 1 -2 85 85 27.3 0 10 0 -2 86 170 24.4 0 10 0 -2 87 15 10.7 0 10 0 -2 88 6 16.7 0 10 1 -2 89 1 11.8 0 10 0 -2 90 6 10.5 0 10 0 -2 91 28 12 0 10 0 -2 92 6 17.4 0 10 1 -2 93 28 16.9 0 10 0 -2 94 28 13.4 0 10 1 -2 95 1 31.6 0 10 1 -2 96 85 36.1 0 10 1 -2 97 15 30.1 0 10 0 -2 98 170 15 0 10 0 -2 99 85 27.3 0 10 0 -2 100 170 38.3 0 10 0 -2 101 15 12.5 0 10 0 -2 102 6 26.3 0 10 1 -2 103 1 41.9 0 10 1 -2 104 6 13.2 0 10 1 -2 105 28 21.7 0 10 0 -2 106 170 24.4 0 10 0 -2 107 28 31.4 0 10 0 -2 108 6 26.3 0 10 1 -3 1 28 16.9 0 10 0 -3 2 1 21.3 0 10 1 -3 3 6 44 0 10 1 -3 4 170 49.8 0 10 1 -3 5 28 13.4 0 10 1 -3 6 28 21.1 0 10 1 -3 7 15 14.2 0 10 1 -3 8 6 26.3 0 10 1 -3 9 85 24.4 0 10 0 -3 10 170 41.3 0 10 1 -3 11 28 12 0 10 0 -3 12 15 39.6 0 10 1 -3 13 85 30.9 0 10 1 -3 14 28 31.4 0 10 1 -3 15 85 10.5 0 10 1 -3 16 1 10.7 0 10 1 -3 17 28 31.4 0 10 1 -3 18 6 26.3 0 10 1 -3 19 1 41.9 0 10 1 -3 20 6 13.2 0 10 0 -3 21 28 41.1 0 10 1 -3 22 15 12.5 0 10 1 -3 23 15 39.6 0 10 1 -3 24 85 10.5 0 10 0 -3 25 28 12 0 10 1 -3 26 170 38.3 0 10 1 -3 27 85 36.1 0 10 1 -3 28 1 41.9 0 10 1 -3 29 15 10.5 0 10 0 -3 30 85 19.5 0 10 0 -3 31 85 26 0 10 1 -3 32 85 45 0 10 1 -3 33 1 12.5 0 10 1 -3 34 6 13.2 0 10 1 -3 35 15 10.7 0 10 1 -3 36 1 11 0 10 1 -3 37 15 30.1 0 10 0 -3 38 15 20.7 0 10 1 -3 39 6 17.4 0 10 1 -3 40 6 10.5 0 10 0 -3 41 170 15 0 10 1 -3 42 15 12.5 0 10 1 -3 43 1 31.6 0 10 1 -3 44 15 10.5 0 10 0 -3 45 170 41.3 0 10 1 -3 46 170 15 0 10 0 -3 47 15 18.6 0 10 1 -3 48 6 17.4 0 10 1 -3 49 85 18.4 0 10 0 -3 50 170 43.3 0 10 1 -3 51 28 21.7 0 10 0 -3 52 6 16.7 0 10 0 -3 53 170 33.9 0 10 0 -3 54 1 18 0 10 1 -3 55 1 18 0 10 1 -3 56 15 30.1 0 10 1 -3 57 1 10.7 0 10 0 -3 58 85 27.3 0 10 1 -3 59 6 35.1 0 10 1 -3 60 85 30.9 0 10 1 -3 61 85 24.4 0 10 1 -3 62 85 19.5 0 10 0 -3 63 170 33.9 0 10 1 -3 64 6 10.5 0 10 0 -3 65 85 27.3 0 10 1 -3 66 28 16.9 0 10 0 -3 67 6 35.1 0 10 1 -3 68 15 49 0 10 1 -3 69 85 26 0 10 1 -3 70 85 45 0 10 1 -3 71 1 11.8 0 10 1 -3 72 170 35.6 0 10 1 -3 73 1 31.6 0 10 1 -3 74 28 13 0 10 0 -3 75 28 21.1 0 10 1 -3 76 15 20.7 0 10 1 -3 77 15 10.7 0 10 0 -3 78 28 15.5 0 10 0 -3 79 1 21.3 0 10 1 -3 80 6 14 0 10 1 -3 81 170 49.8 0 10 1 -3 82 85 36.1 0 10 1 -3 83 1 11.2 0 10 0 -3 84 28 15.5 0 10 0 -3 85 170 29.7 0 10 1 -3 86 170 24.4 0 10 1 -3 87 170 24.4 0 10 1 -3 88 28 13.4 0 10 0 -3 89 15 18.6 0 10 1 -3 90 28 21.7 0 10 1 -3 91 85 18.4 0 10 1 -3 92 6 16.7 0 10 1 -3 93 6 11 0 10 1 -3 94 28 41.1 0 10 1 -3 95 170 43.3 0 10 1 -3 96 6 44 0 10 1 -3 97 1 11.2 0 10 1 -3 98 6 11 0 10 1 -3 99 170 35.6 0 10 1 -3 100 15 49 0 10 1 -3 101 170 38.3 0 10 1 -3 102 28 13 0 10 0 -3 103 170 29.7 0 10 1 -3 104 1 12.5 0 10 1 -3 105 1 11 0 10 1 -3 106 1 11.8 0 10 1 -3 107 6 14 0 10 0 -3 108 15 14.2 0 10 1 -4 1 170 41.3 0 10 1 -4 2 170 38.3 0 10 1 -4 3 28 21.1 0 10 1 -4 4 15 20.7 0 10 1 -4 5 85 45 0 10 1 -4 6 85 45 0 10 1 -4 7 28 21.7 0 10 1 -4 8 1 11.2 0 10 1 -4 9 170 49.8 0 10 1 -4 10 6 14 0 10 0 -4 11 28 21.7 0 10 1 -4 12 1 11.2 0 10 1 -4 13 1 31.6 0 10 1 -4 14 6 10.5 0 10 1 -4 15 1 21.3 0 10 1 -4 16 170 43.3 0 10 1 -4 17 1 18 0 10 1 -4 18 15 10.5 0 10 0 -4 19 15 20.7 0 10 1 -4 20 15 39.6 0 10 1 -4 21 170 33.9 0 10 1 -4 22 1 21.3 0 10 1 -4 23 85 30.9 0 10 1 -4 24 15 18.6 0 10 1 -4 25 28 13.4 0 10 1 -4 26 170 15 0 10 1 -4 27 170 41.3 0 10 1 -4 28 85 27.3 0 10 0 -4 29 1 11.8 0 10 0 -4 30 85 24.4 0 10 1 -4 31 15 49 0 10 1 -4 32 6 17.4 0 10 1 -4 33 6 35.1 0 10 1 -4 34 170 15 0 10 0 -4 35 6 26.3 0 10 1 -4 36 170 35.6 0 10 0 -4 37 6 13.2 0 10 1 -4 38 28 15.5 0 10 1 -4 39 1 11 0 10 1 -4 40 15 12.5 0 10 1 -4 41 6 13.2 0 10 0 -4 42 1 10.7 0 10 1 -4 43 6 17.4 0 10 1 -4 44 85 10.5 0 10 0 -4 45 28 13.4 0 10 1 -4 46 1 41.9 0 10 1 -4 47 28 13 0 10 1 -4 48 28 16.9 0 10 0 -4 49 85 36.1 0 10 1 -4 50 15 18.6 0 10 1 -4 51 85 27.3 0 10 1 -4 52 15 49 0 10 1 -4 53 15 30.1 0 10 1 -4 54 170 29.7 0 10 1 -4 55 6 14 0 10 1 -4 56 28 41.1 0 10 1 -4 57 15 30.1 0 10 1 -4 58 15 12.5 0 10 1 -4 59 85 30.9 0 10 1 -4 60 28 21.1 0 10 1 -4 61 6 44 0 10 1 -4 62 28 16.9 0 10 1 -4 63 6 11 0 10 0 -4 64 170 38.3 0 10 1 -4 65 85 18.4 0 10 1 -4 66 85 19.5 0 10 1 -4 67 170 33.9 0 10 0 -4 68 170 35.6 0 10 1 -4 69 15 14.2 0 10 1 -4 70 28 13 0 10 0 -4 71 28 31.4 0 10 1 -4 72 1 11.8 0 10 0 -4 73 1 12.5 0 10 0 -4 74 28 31.4 0 10 1 -4 75 1 12.5 0 10 1 -4 76 28 41.1 0 10 1 -4 77 1 10.7 0 10 1 -4 78 170 24.4 0 10 1 -4 79 6 16.7 0 10 1 -4 80 170 24.4 0 10 1 -4 81 6 35.1 0 10 1 -4 82 1 11 0 10 0 -4 83 28 12 0 10 0 -4 84 15 10.5 0 10 0 -4 85 15 10.7 0 10 0 -4 86 28 12 0 10 1 -4 87 85 19.5 0 10 1 -4 88 6 16.7 0 10 1 -4 89 6 11 0 10 0 -4 90 15 39.6 0 10 1 -4 91 85 24.4 0 10 0 -4 92 6 26.3 0 10 1 -4 93 85 18.4 0 10 1 -4 94 15 14.2 0 10 0 -4 95 6 10.5 0 10 0 -4 96 1 41.9 0 10 1 -4 97 85 36.1 0 10 1 -4 98 85 26 0 10 1 -4 99 28 15.5 0 10 0 -4 100 1 31.6 0 10 1 -4 101 6 44 0 10 1 -4 102 85 26 0 10 1 -4 103 170 29.7 0 10 1 -4 104 170 43.3 0 10 1 -4 105 170 49.8 0 10 1 -4 106 85 10.5 0 10 0 -4 107 1 18 0 10 1 -4 108 15 10.7 0 10 1 -5 1 170 41.3 0 10 1 -5 2 85 18.4 0 10 1 -5 3 28 21.7 0 10 1 -5 4 85 10.5 0 10 0 -5 5 15 14.2 0 10 1 -5 6 28 21.7 0 10 1 -5 7 85 30.9 0 10 1 -5 8 85 26 0 10 1 -5 9 1 10.7 0 10 1 -5 10 28 13 0 10 0 -5 11 170 33.9 0 10 1 -5 12 85 36.1 0 10 0 -5 13 15 30.1 0 10 1 -5 14 1 31.6 0 10 1 -5 15 6 13.2 0 10 1 -5 16 1 11 0 10 1 -5 17 85 24.4 0 10 1 -5 18 1 41.9 0 10 1 -5 19 15 14.2 0 10 0 -5 20 15 20.7 0 10 1 -5 21 15 10.5 0 10 0 -5 22 6 10.5 0 10 1 -5 23 85 45 0 10 1 -5 24 28 16.9 0 10 1 -5 25 1 21.3 0 10 1 -5 26 6 14 0 10 1 -5 27 28 13.4 0 10 0 -5 28 6 17.4 0 10 1 -5 29 170 33.9 0 10 0 -5 30 15 18.6 0 10 1 -5 31 85 45 0 10 1 -5 32 28 13.4 0 10 0 -5 33 15 10.5 0 10 0 -5 34 15 49 0 10 1 -5 35 170 43.3 0 10 1 -5 36 15 39.6 0 10 1 -5 37 85 18.4 0 10 0 -5 38 170 49.8 0 10 1 -5 39 15 10.7 0 10 0 -5 40 170 24.4 0 10 0 -5 41 15 39.6 0 10 1 -5 42 28 41.1 0 10 1 -5 43 85 27.3 0 10 1 -5 44 1 18 0 10 1 -5 45 1 12.5 0 10 1 -5 46 1 11.8 0 10 0 -5 47 28 15.5 0 10 0 -5 48 170 15 0 10 0 -5 49 28 21.1 0 10 1 -5 50 6 11 0 10 0 -5 51 28 31.4 0 10 1 -5 52 1 31.6 0 10 1 -5 53 15 20.7 0 10 1 -5 54 28 31.4 0 10 1 -5 55 1 11.2 0 10 1 -5 56 6 11 0 10 1 -5 57 6 10.5 0 10 1 -5 58 15 10.7 0 10 1 -5 59 28 13 0 10 0 -5 60 85 26 0 10 1 -5 61 6 35.1 0 10 1 -5 62 170 35.6 0 10 1 -5 63 85 27.3 0 10 1 -5 64 85 30.9 0 10 1 -5 65 1 41.9 0 10 1 -5 66 170 35.6 0 10 1 -5 67 28 15.5 0 10 1 -5 68 1 11.2 0 10 1 -5 69 170 49.8 0 10 1 -5 70 15 12.5 0 10 0 -5 71 85 19.5 0 10 1 -5 72 6 16.7 0 10 1 -5 73 1 10.7 0 10 1 -5 74 6 44 0 10 1 -5 75 170 29.7 0 10 1 -5 76 6 17.4 0 10 1 -5 77 1 21.3 0 10 1 -5 78 170 38.3 0 10 0 -5 79 170 24.4 0 10 0 -5 80 6 35.1 0 10 1 -5 81 1 12.5 0 10 1 -5 82 1 11.8 0 10 1 -5 83 28 12 0 10 1 -5 84 28 12 0 10 1 -5 85 85 36.1 0 10 1 -5 86 170 29.7 0 10 1 -5 87 170 43.3 0 10 1 -5 88 1 11 0 10 0 -5 89 85 24.4 0 10 0 -5 90 15 30.1 0 10 1 -5 91 6 14 0 10 0 -5 92 170 38.3 0 10 1 -5 93 6 44 0 10 1 -5 94 6 16.7 0 10 1 -5 95 6 26.3 0 10 1 -5 96 28 16.9 0 10 0 -5 97 85 10.5 0 10 0 -5 98 15 18.6 0 10 1 -5 99 28 21.1 0 10 1 -5 100 170 15 0 10 0 -5 101 15 49 0 10 1 -5 102 170 41.3 0 10 1 -5 103 6 13.2 0 10 1 -5 104 85 19.5 0 10 1 -5 105 6 26.3 0 10 1 -5 106 28 41.1 0 10 1 -5 107 1 18 0 10 1 -5 108 15 12.5 0 10 0 -6 1 15 18.6 0 10 1 -6 2 1 10.7 0 10 1 -6 3 1 11.2 0 10 1 -6 4 15 18.6 0 10 1 -6 5 28 16.9 0 10 1 -6 6 85 27.3 0 10 1 -6 7 28 13 0 10 1 -6 8 15 10.7 0 10 0 -6 9 170 33.9 0 10 1 -6 10 15 14.2 0 10 1 -6 11 15 10.5 0 10 1 -6 12 170 33.9 0 10 1 -6 13 15 39.6 0 10 1 -6 14 1 11.8 0 10 1 -6 15 15 10.7 0 10 0 -6 16 28 21.1 0 10 1 -6 17 85 18.4 0 10 0 -6 18 1 18 0 10 1 -6 19 1 11 0 10 1 -6 20 15 12.5 0 10 1 -6 21 170 38.3 0 10 1 -6 22 1 11 0 10 0 -6 23 6 16.7 0 10 0 -6 24 28 16.9 0 10 1 -6 25 6 17.4 0 10 1 -6 26 1 12.5 0 10 1 -6 27 85 18.4 0 10 0 -6 28 28 31.4 0 10 1 -6 29 6 26.3 0 10 1 -6 30 85 45 0 10 1 -6 31 85 24.4 0 10 1 -6 32 6 16.7 0 10 1 -6 33 85 10.5 0 10 0 -6 34 6 44 0 10 1 -6 35 1 12.5 0 10 1 -6 36 170 15 0 10 0 -6 37 170 15 0 10 0 -6 38 15 39.6 0 10 1 -6 39 85 19.5 0 10 1 -6 40 15 10.5 0 10 1 -6 41 85 27.3 0 10 1 -6 42 170 29.7 0 10 1 -6 43 170 24.4 0 10 1 -6 44 15 14.2 0 10 0 -6 45 6 11 0 10 1 -6 46 1 41.9 0 10 1 -6 47 1 31.6 0 10 1 -6 48 28 13.4 0 10 1 -6 49 15 30.1 0 10 1 -6 50 28 41.1 0 10 1 -6 51 28 13 0 10 1 -6 52 85 19.5 0 10 1 -6 53 170 43.3 0 10 1 -6 54 28 41.1 0 10 1 -6 55 6 17.4 0 10 1 -6 56 15 20.7 0 10 1 -6 57 15 30.1 0 10 1 -6 58 170 49.8 0 10 1 -6 59 85 36.1 0 10 1 -6 60 85 30.9 0 10 1 -6 61 170 35.6 0 10 1 -6 62 15 20.7 0 10 1 -6 63 1 11.2 0 10 0 -6 64 170 24.4 0 10 1 -6 65 28 21.7 0 10 1 -6 66 1 10.7 0 10 1 -6 67 85 45 0 10 1 -6 68 6 10.5 0 10 1 -6 69 15 12.5 0 10 1 -6 70 28 31.4 0 10 1 -6 71 170 38.3 0 10 1 -6 72 1 18 0 10 1 -6 73 1 21.3 0 10 1 -6 74 6 35.1 0 10 1 -6 75 28 13.4 0 10 0 -6 76 85 10.5 0 10 0 -6 77 28 12 0 10 1 -6 78 6 10.5 0 10 1 -6 79 1 11.8 0 10 1 -6 80 6 13.2 0 10 1 -6 81 1 41.9 0 10 1 -6 82 85 36.1 0 10 1 -6 83 28 15.5 0 10 1 -6 84 85 30.9 0 10 1 -6 85 170 43.3 0 10 1 -6 86 85 26 0 10 1 -6 87 28 21.1 0 10 1 -6 88 28 15.5 0 10 0 -6 89 6 11 0 10 1 -6 90 1 31.6 0 10 1 -6 91 170 49.8 0 10 1 -6 92 1 21.3 0 10 1 -6 93 28 21.7 0 10 1 -6 94 170 41.3 0 10 1 -6 95 15 49 0 10 1 -6 96 6 35.1 0 10 1 -6 97 15 49 0 10 1 -6 98 6 26.3 0 10 1 -6 99 28 12 0 10 1 -6 100 6 14 0 10 1 -6 101 6 44 0 10 1 -6 102 170 29.7 0 10 1 -6 103 6 14 0 10 1 -6 104 170 35.6 0 10 1 -6 105 85 26 0 10 1 -6 106 6 13.2 0 10 1 -6 107 170 41.3 0 10 1 -6 108 85 24.4 0 10 1 -7 1 28 13 0 10 1 -7 2 28 41.1 0 10 1 -7 3 170 29.7 0 10 0 -7 4 1 10.7 0 10 1 -7 5 6 17.4 0 10 1 -7 6 15 12.5 0 10 1 -7 7 15 18.6 0 10 1 -7 8 170 24.4 0 10 0 -7 9 1 11 0 10 1 -7 10 28 16.9 0 10 1 -7 11 170 41.3 0 10 1 -7 12 15 10.5 0 10 0 -7 13 6 10.5 0 10 1 -7 14 28 12 0 10 1 -7 15 170 24.4 0 10 1 -7 16 1 10.7 0 10 0 -7 17 6 35.1 0 10 1 -7 18 85 19.5 0 10 0 -7 19 6 26.3 0 10 1 -7 20 85 26 0 10 1 -7 21 1 11.2 0 10 1 -7 22 6 16.7 0 10 1 -7 23 28 31.4 0 10 1 -7 24 170 35.6 0 10 0 -7 25 1 21.3 0 10 1 -7 26 15 20.7 0 10 1 -7 27 15 14.2 0 10 1 -7 28 85 24.4 0 10 1 -7 29 1 11 0 10 1 -7 30 85 27.3 0 10 1 -7 31 15 18.6 0 10 1 -7 32 6 16.7 0 10 1 -7 33 28 21.1 0 10 1 -7 34 15 39.6 0 10 1 -7 35 28 31.4 0 10 1 -7 36 1 11.8 0 10 1 -7 37 170 38.3 0 10 1 -7 38 1 12.5 0 10 1 -7 39 1 11.8 0 10 1 -7 40 28 21.7 0 10 1 -7 41 28 21.1 0 10 1 -7 42 170 33.9 0 10 0 -7 43 6 14 0 10 1 -7 44 15 12.5 0 10 1 -7 45 15 10.7 0 10 1 -7 46 1 41.9 0 10 1 -7 47 1 18 0 10 1 -7 48 15 14.2 0 10 1 -7 49 6 11 0 10 0 -7 50 85 30.9 0 10 1 -7 51 170 49.8 0 10 1 -7 52 6 44 0 10 1 -7 53 85 45 0 10 1 -7 54 170 49.8 0 10 1 -7 55 85 10.5 0 10 0 -7 56 15 49 0 10 1 -7 57 170 15 0 10 0 -7 58 6 13.2 0 10 1 -7 59 170 35.6 0 10 1 -7 60 170 29.7 0 10 0 -7 61 170 15 0 10 0 -7 62 28 15.5 0 10 1 -7 63 28 21.7 0 10 1 -7 64 85 45 0 10 1 -7 65 28 13.4 0 10 0 -7 66 6 44 0 10 1 -7 67 6 10.5 0 10 1 -7 68 85 36.1 0 10 1 -7 69 6 14 0 10 1 -7 70 170 43.3 0 10 1 -7 71 28 12 0 10 0 -7 72 85 24.4 0 10 1 -7 73 85 18.4 0 10 0 -7 74 15 10.7 0 10 0 -7 75 6 35.1 0 10 1 -7 76 15 49 0 10 1 -7 77 85 19.5 0 10 0 -7 78 1 12.5 0 10 1 -7 79 1 18 0 10 1 -7 80 28 13 0 10 0 -7 81 6 17.4 0 10 1 -7 82 1 21.3 0 10 1 -7 83 15 30.1 0 10 1 -7 84 85 26 0 10 0 -7 85 85 30.9 0 10 1 -7 86 170 33.9 0 10 0 -7 87 15 39.6 0 10 1 -7 88 1 41.9 0 10 1 -7 89 170 43.3 0 10 1 -7 90 28 16.9 0 10 0 -7 91 85 10.5 0 10 0 -7 92 1 31.6 0 10 1 -7 93 6 26.3 0 10 1 -7 94 15 30.1 0 10 1 -7 95 1 31.6 0 10 1 -7 96 6 13.2 0 10 1 -7 97 170 38.3 0 10 1 -7 98 85 36.1 0 10 1 -7 99 170 41.3 0 10 1 -7 100 28 13.4 0 10 1 -7 101 28 15.5 0 10 0 -7 102 15 10.5 0 10 0 -7 103 6 11 0 10 0 -7 104 15 20.7 0 10 1 -7 105 85 27.3 0 10 0 -7 106 28 41.1 0 10 1 -7 107 85 18.4 0 10 1 -7 108 1 11.2 0 10 0 -8 1 85 19.5 0 10 0 -8 2 85 19.5 0 10 0 -8 3 28 21.1 0 10 0 -8 4 1 11.2 0 10 0 -8 5 170 33.9 0 10 0 -8 6 85 18.4 0 10 1 -8 7 15 20.7 0 10 1 -8 8 1 21.3 0 10 1 -8 9 15 14.2 0 10 0 -8 10 85 30.9 0 10 0 -8 11 1 11 0 10 1 -8 12 170 49.8 0 10 1 -8 13 1 41.9 0 10 1 -8 14 6 44 0 10 1 -8 15 170 38.3 0 10 1 -8 16 28 12 0 10 0 -8 17 6 10.5 0 10 0 -8 18 28 13 0 10 0 -8 19 6 14 0 10 1 -8 20 170 43.3 0 10 0 -8 21 6 17.4 0 10 1 -8 22 1 18 0 10 1 -8 23 85 36.1 0 10 0 -8 24 15 10.5 0 10 0 -8 25 85 24.4 0 10 1 -8 26 170 29.7 0 10 0 -8 27 6 14 0 10 1 -8 28 15 12.5 0 10 0 -8 29 28 15.5 0 10 0 -8 30 85 45 0 10 1 -8 31 28 13.4 0 10 0 -8 32 6 16.7 0 10 1 -8 33 170 49.8 0 10 0 -8 34 6 17.4 0 10 1 -8 35 85 26 0 10 1 -8 36 1 10.7 0 10 0 -8 37 6 11 0 10 1 -8 38 1 11.8 0 10 1 -8 39 1 12.5 0 10 0 -8 40 85 26 0 10 0 -8 41 15 10.7 0 10 0 -8 42 170 35.6 0 10 1 -8 43 85 27.3 0 10 1 -8 44 170 43.3 0 10 0 -8 45 28 13.4 0 10 0 -8 46 28 12 0 10 0 -8 47 1 31.6 0 10 1 -8 48 6 13.2 0 10 1 -8 49 85 36.1 0 10 1 -8 50 28 21.7 0 10 1 -8 51 15 18.6 0 10 0 -8 52 85 27.3 0 10 0 -8 53 6 26.3 0 10 1 -8 54 1 41.9 0 10 1 -8 55 15 30.1 0 10 1 -8 56 1 10.7 0 10 0 -8 57 170 15 0 10 0 -8 58 6 10.5 0 10 0 -8 59 28 31.4 0 10 1 -8 60 28 41.1 0 10 1 -8 61 170 29.7 0 10 0 -8 62 1 11.8 0 10 0 -8 63 15 18.6 0 10 0 -8 64 1 11 0 10 0 -8 65 170 41.3 0 10 1 -8 66 15 39.6 0 10 1 -8 67 28 31.4 0 10 0 -8 68 6 16.7 0 10 1 -8 69 15 49 0 10 1 -8 70 85 45 0 10 1 -8 71 170 24.4 0 10 1 -8 72 85 24.4 0 10 1 -8 73 1 18 0 10 1 -8 74 85 10.5 0 10 0 -8 75 28 21.7 0 10 1 -8 76 28 16.9 0 10 0 -8 77 6 44 0 10 1 -8 78 170 33.9 0 10 1 -8 79 6 11 0 10 1 -8 80 28 13 0 10 1 -8 81 28 41.1 0 10 1 -8 82 6 13.2 0 10 1 -8 83 28 15.5 0 10 0 -8 84 15 49 0 10 1 -8 85 15 14.2 0 10 1 -8 86 170 41.3 0 10 1 -8 87 15 12.5 0 10 0 -8 88 85 18.4 0 10 1 -8 89 1 12.5 0 10 1 -8 90 15 20.7 0 10 0 -8 91 6 26.3 0 10 1 -8 92 170 24.4 0 10 0 -8 93 28 21.1 0 10 1 -8 94 15 10.5 0 10 0 -8 95 6 35.1 0 10 1 -8 96 85 30.9 0 10 1 -8 97 1 21.3 0 10 1 -8 98 15 39.6 0 10 1 -8 99 170 35.6 0 10 1 -8 100 15 10.7 0 10 1 -8 101 85 10.5 0 10 0 -8 102 28 16.9 0 10 0 -8 103 170 15 0 10 0 -8 104 170 38.3 0 10 0 -8 105 6 35.1 0 10 1 -8 106 1 31.6 0 10 1 -8 107 15 30.1 0 10 1 -8 108 1 11.2 0 10 1 -9 1 1 11.2 0 10 1 -9 2 6 10.5 0 10 0 -9 3 28 31.4 0 10 1 -9 4 15 49 0 10 1 -9 5 15 12.5 0 10 1 -9 6 170 33.9 0 10 1 -9 7 170 35.6 0 10 0 -9 8 6 17.4 0 10 1 -9 9 1 21.3 0 10 1 -9 10 1 10.7 0 10 0 -9 11 1 11.8 0 10 1 -9 12 1 31.6 0 10 1 -9 13 6 16.7 0 10 0 -9 14 1 10.7 0 10 1 -9 15 170 15 0 10 0 -9 16 170 43.3 0 10 1 -9 17 85 27.3 0 10 0 -9 18 28 21.7 0 10 1 -9 19 1 11 0 10 0 -9 20 1 11.8 0 10 1 -9 21 1 12.5 0 10 1 -9 22 6 16.7 0 10 1 -9 23 170 35.6 0 10 1 -9 24 6 11 0 10 1 -9 25 85 30.9 0 10 0 -9 26 28 13 0 10 0 -9 27 28 41.1 0 10 1 -9 28 85 10.5 0 10 0 -9 29 1 11.2 0 10 1 -9 30 85 36.1 0 10 1 -9 31 1 12.5 0 10 1 -9 32 6 26.3 0 10 1 -9 33 170 33.9 0 10 1 -9 34 170 43.3 0 10 0 -9 35 85 10.5 0 10 0 -9 36 170 49.8 0 10 0 -9 37 15 18.6 0 10 1 -9 38 6 14 0 10 1 -9 39 6 11 0 10 0 -9 40 15 39.6 0 10 1 -9 41 85 19.5 0 10 0 -9 42 15 10.7 0 10 0 -9 43 85 36.1 0 10 1 -9 44 1 18 0 10 0 -9 45 170 49.8 0 10 1 -9 46 15 20.7 0 10 1 -9 47 1 11 0 10 1 -9 48 28 13.4 0 10 1 -9 49 15 20.7 0 10 1 -9 50 1 18 0 10 1 -9 51 85 18.4 0 10 1 -9 52 85 18.4 0 10 0 -9 53 85 26 0 10 1 -9 54 28 31.4 0 10 1 -9 55 6 44 0 10 1 -9 56 6 13.2 0 10 0 -9 57 6 10.5 0 10 0 -9 58 28 12 0 10 0 -9 59 15 10.5 0 10 0 -9 60 6 17.4 0 10 1 -9 61 170 24.4 0 10 0 -9 62 15 30.1 0 10 1 -9 63 6 35.1 0 10 1 -9 64 15 10.7 0 10 1 -9 65 15 14.2 0 10 1 -9 66 170 41.3 0 10 1 -9 67 28 21.1 0 10 1 -9 68 6 26.3 0 10 1 -9 69 15 14.2 0 10 1 -9 70 85 24.4 0 10 0 -9 71 85 27.3 0 10 0 -9 72 28 13.4 0 10 1 -9 73 170 29.7 0 10 0 -9 74 28 15.5 0 10 0 -9 75 85 45 0 10 1 -9 76 170 38.3 0 10 0 -9 77 28 16.9 0 10 1 -9 78 6 35.1 0 10 1 -9 79 85 19.5 0 10 0 -9 80 15 18.6 0 10 1 -9 81 15 12.5 0 10 1 -9 82 85 30.9 0 10 0 -9 83 28 12 0 10 1 -9 84 28 21.7 0 10 1 -9 85 28 13 0 10 0 -9 86 1 41.9 0 10 1 -9 87 15 39.6 0 10 1 -9 88 6 13.2 0 10 0 -9 89 1 21.3 0 10 1 -9 90 170 15 0 10 0 -9 91 15 30.1 0 10 1 -9 92 85 26 0 10 0 -9 93 15 49 0 10 1 -9 94 85 45 0 10 1 -9 95 6 14 0 10 0 -9 96 170 38.3 0 10 1 -9 97 170 29.7 0 10 0 -9 98 28 16.9 0 10 0 -9 99 6 44 0 10 1 -9 100 1 31.6 0 10 1 -9 101 15 10.5 0 10 0 -9 102 28 41.1 0 10 1 -9 103 85 24.4 0 10 0 -9 104 28 15.5 0 10 0 -9 105 28 21.1 0 10 1 -9 106 1 41.9 0 10 1 -9 107 170 41.3 0 10 1 -9 108 170 24.4 0 10 0 -10 1 170 41.3 0 10 0 -10 2 6 10.5 0 10 0 -10 3 170 15 0 10 0 -10 4 85 27.3 0 10 0 -10 5 170 15 0 10 0 -10 6 28 13 0 10 0 -10 7 6 35.1 0 10 1 -10 8 15 14.2 0 10 1 -10 9 85 19.5 0 10 0 -10 10 170 43.3 0 10 1 -10 11 85 45 0 10 1 -10 12 1 41.9 0 10 1 -10 13 15 30.1 0 10 1 -10 14 85 26 0 10 1 -10 15 28 12 0 10 1 -10 16 1 11.8 0 10 1 -10 17 15 10.7 0 10 0 -10 18 6 44 0 10 1 -10 19 1 18 0 10 1 -10 20 28 12 0 10 0 -10 21 15 20.7 0 10 1 -10 22 28 41.1 0 10 1 -10 23 15 39.6 0 10 1 -10 24 85 26 0 10 0 -10 25 6 26.3 0 10 1 -10 26 6 35.1 0 10 1 -10 27 6 26.3 0 10 1 -10 28 15 10.5 0 10 0 -10 29 1 31.6 0 10 1 -10 30 170 41.3 0 10 0 -10 31 6 10.5 0 10 1 -10 32 1 11.2 0 10 0 -10 33 170 29.7 0 10 0 -10 34 85 45 0 10 0 -10 35 15 12.5 0 10 0 -10 36 170 38.3 0 10 0 -10 37 85 19.5 0 10 0 -10 38 28 13.4 0 10 0 -10 39 28 13.4 0 10 0 -10 40 15 30.1 0 10 1 -10 41 28 41.1 0 10 1 -10 42 15 10.5 0 10 0 -10 43 170 33.9 0 10 0 -10 44 6 14 0 10 1 -10 45 170 35.6 0 10 1 -10 46 85 10.5 0 10 0 -10 47 85 30.9 0 10 1 -10 48 28 15.5 0 10 0 -10 49 15 39.6 0 10 1 -10 50 6 13.2 0 10 1 -10 51 1 10.7 0 10 1 -10 52 15 14.2 0 10 1 -10 53 6 11 0 10 0 -10 54 6 17.4 0 10 1 -10 55 170 24.4 0 10 1 -10 56 85 18.4 0 10 1 -10 57 28 31.4 0 10 1 -10 58 28 21.7 0 10 1 -10 59 15 18.6 0 10 1 -10 60 85 10.5 0 10 0 -10 61 6 16.7 0 10 1 -10 62 85 18.4 0 10 0 -10 63 6 44 0 10 1 -10 64 1 18 0 10 1 -10 65 28 16.9 0 10 0 -10 66 15 10.7 0 10 0 -10 67 1 10.7 0 10 1 -10 68 15 49 0 10 1 -10 69 170 38.3 0 10 1 -10 70 28 15.5 0 10 0 -10 71 28 31.4 0 10 1 -10 72 6 14 0 10 1 -10 73 170 35.6 0 10 0 -10 74 1 12.5 0 10 1 -10 75 15 18.6 0 10 0 -10 76 1 31.6 0 10 1 -10 77 28 16.9 0 10 1 -10 78 1 21.3 0 10 1 -10 79 15 12.5 0 10 0 -10 80 170 49.8 0 10 0 -10 81 85 27.3 0 10 0 -10 82 6 16.7 0 10 1 -10 83 85 36.1 0 10 0 -10 84 85 36.1 0 10 1 -10 85 6 17.4 0 10 1 -10 86 1 11 0 10 0 -10 87 6 13.2 0 10 0 -10 88 170 29.7 0 10 0 -10 89 1 11.2 0 10 0 -10 90 1 41.9 0 10 1 -10 91 170 33.9 0 10 0 -10 92 1 11.8 0 10 0 -10 93 15 49 0 10 1 -10 94 1 21.3 0 10 0 -10 95 85 30.9 0 10 0 -10 96 15 20.7 0 10 1 -10 97 28 21.1 0 10 0 -10 98 170 24.4 0 10 0 -10 99 85 24.4 0 10 0 -10 100 85 24.4 0 10 0 -10 101 28 21.1 0 10 0 -10 102 28 21.7 0 10 1 -10 103 170 49.8 0 10 1 -10 104 6 11 0 10 1 -10 105 1 12.5 0 10 1 -10 106 28 13 0 10 0 -10 107 170 43.3 0 10 0 -10 108 1 11 0 10 0 -11 1 6 10.5 0 10 0 -11 2 85 36.1 0 10 1 -11 3 85 27.3 0 10 0 -11 4 6 16.7 0 10 1 -11 5 1 31.6 0 10 1 -11 6 170 33.9 0 10 0 -11 7 15 10.5 0 10 0 -11 8 170 35.6 0 10 0 -11 9 15 10.7 0 10 0 -11 10 15 10.7 0 10 1 -11 11 170 15 0 10 0 -11 12 85 26 0 10 0 -11 13 28 21.1 0 10 1 -11 14 170 24.4 0 10 0 -11 15 28 13 0 10 0 -11 16 15 12.5 0 10 1 -11 17 85 19.5 0 10 0 -11 18 85 26 0 10 0 -11 19 6 11 0 10 0 -11 20 6 13.2 0 10 0 -11 21 28 15.5 0 10 0 -11 22 170 41.3 0 10 0 -11 23 6 14 0 10 1 -11 24 1 21.3 0 10 1 -11 25 85 18.4 0 10 1 -11 26 28 12 0 10 1 -11 27 15 49 0 10 1 -11 28 85 45 0 10 1 -11 29 170 41.3 0 10 0 -11 30 170 33.9 0 10 0 -11 31 28 21.7 0 10 1 -11 32 15 18.6 0 10 1 -11 33 1 12.5 0 10 0 -11 34 1 10.7 0 10 1 -11 35 28 21.1 0 10 0 -11 36 170 35.6 0 10 0 -11 37 1 11.2 0 10 1 -11 38 85 19.5 0 10 1 -11 39 1 41.9 0 10 1 -11 40 28 16.9 0 10 0 -11 41 15 30.1 0 10 1 -11 42 15 20.7 0 10 0 -11 43 15 14.2 0 10 1 -11 44 28 13 0 10 1 -11 45 15 12.5 0 10 1 -11 46 170 43.3 0 10 1 -11 47 170 49.8 0 10 1 -11 48 6 10.5 0 10 1 -11 49 15 30.1 0 10 1 -11 50 28 41.1 0 10 1 -11 51 28 41.1 0 10 1 -11 52 6 26.3 0 10 1 -11 53 85 10.5 0 10 0 -11 54 6 26.3 0 10 1 -11 55 6 44 0 10 1 -11 56 85 30.9 0 10 1 -11 57 85 24.4 0 10 0 -11 58 15 39.6 0 10 1 -11 59 1 41.9 0 10 1 -11 60 170 49.8 0 10 0 -11 61 28 31.4 0 10 1 -11 62 28 15.5 0 10 1 -11 63 28 12 0 10 0 -11 64 6 35.1 0 10 1 -11 65 85 24.4 0 10 0 -11 66 15 49 0 10 1 -11 67 15 39.6 0 10 1 -11 68 1 31.6 0 10 1 -11 69 85 36.1 0 10 0 -11 70 15 14.2 0 10 1 -11 71 28 16.9 0 10 0 -11 72 6 35.1 0 10 1 -11 73 170 15 0 10 0 -11 74 1 12.5 0 10 1 -11 75 15 20.7 0 10 0 -11 76 170 24.4 0 10 0 -11 77 85 18.4 0 10 0 -11 78 6 17.4 0 10 1 -11 79 28 31.4 0 10 1 -11 80 1 10.7 0 10 1 -11 81 6 11 0 10 1 -11 82 1 11.8 0 10 0 -11 83 170 43.3 0 10 1 -11 84 1 18 0 10 1 -11 85 1 11.8 0 10 1 -11 86 6 14 0 10 1 -11 87 85 10.5 0 10 0 -11 88 85 30.9 0 10 0 -11 89 85 27.3 0 10 0 -11 90 28 13.4 0 10 0 -11 91 6 17.4 0 10 1 -11 92 170 38.3 0 10 0 -11 93 6 16.7 0 10 1 -11 94 170 38.3 0 10 0 -11 95 1 18 0 10 1 -11 96 1 11 0 10 1 -11 97 170 29.7 0 10 0 -11 98 170 29.7 0 10 0 -11 99 15 18.6 0 10 1 -11 100 15 10.5 0 10 0 -11 101 1 21.3 0 10 1 -11 102 1 11.2 0 10 0 -11 103 28 13.4 0 10 0 -11 104 85 45 0 10 1 -11 105 28 21.7 0 10 1 -11 106 1 11 0 10 0 -11 107 6 13.2 0 10 1 -11 108 6 44 0 10 1 -12 1 1 11.2 0 10 0 -12 2 15 20.7 0 10 1 -12 3 6 10.5 0 10 0 -12 4 6 35.1 0 10 1 -12 5 28 13 0 10 0 -12 6 1 21.3 0 10 1 -12 7 170 35.6 0 10 0 -12 8 1 11 0 10 0 -12 9 1 31.6 0 10 1 -12 10 85 10.5 0 10 0 -12 11 28 13 0 10 0 -12 12 170 43.3 0 10 0 -12 13 170 29.7 0 10 0 -12 14 85 24.4 0 10 0 -12 15 85 27.3 0 10 1 -12 16 85 27.3 0 10 1 -12 17 28 16.9 0 10 1 -12 18 170 41.3 0 10 0 -12 19 28 13.4 0 10 0 -12 20 170 38.3 0 10 0 -12 21 170 43.3 0 10 1 -12 22 15 12.5 0 10 0 -12 23 15 10.7 0 10 0 -12 24 85 45 0 10 1 -12 25 170 15 0 10 0 -12 26 28 12 0 10 1 -12 27 1 41.9 0 10 1 -12 28 15 39.6 0 10 1 -12 29 6 11 0 10 1 -12 30 170 29.7 0 10 0 -12 31 170 49.8 0 10 1 -12 32 15 10.7 0 10 1 -12 33 85 10.5 0 10 1 -12 34 170 15 0 10 0 -12 35 170 41.3 0 10 1 -12 36 6 16.7 0 10 1 -12 37 15 18.6 0 10 1 -12 38 15 14.2 0 10 1 -12 39 6 35.1 0 10 1 -12 40 6 13.2 0 10 1 -12 41 1 12.5 0 10 1 -12 42 6 17.4 0 10 1 -12 43 1 18 0 10 1 -12 44 1 21.3 0 10 1 -12 45 1 11.2 0 10 0 -12 46 1 12.5 0 10 1 -12 47 1 41.9 0 10 1 -12 48 15 30.1 0 10 1 -12 49 6 17.4 0 10 1 -12 50 15 10.5 0 10 0 -12 51 15 14.2 0 10 0 -12 52 28 41.1 0 10 1 -12 53 85 45 0 10 1 -12 54 15 39.6 0 10 1 -12 55 28 15.5 0 10 0 -12 56 85 30.9 0 10 0 -12 57 85 36.1 0 10 1 -12 58 170 35.6 0 10 0 -12 59 6 16.7 0 10 1 -12 60 6 13.2 0 10 0 -12 61 85 30.9 0 10 1 -12 62 15 10.5 0 10 0 -12 63 28 12 0 10 0 -12 64 1 11 0 10 1 -12 65 15 18.6 0 10 1 -12 66 6 10.5 0 10 1 -12 67 6 11 0 10 0 -12 68 15 20.7 0 10 1 -12 69 28 13.4 0 10 1 -12 70 1 10.7 0 10 1 -12 71 6 44 0 10 1 -12 72 170 38.3 0 10 0 -12 73 28 31.4 0 10 1 -12 74 15 12.5 0 10 1 -12 75 170 33.9 0 10 0 -12 76 15 49 0 10 1 -12 77 85 26 0 10 0 -12 78 85 18.4 0 10 0 -12 79 1 11.8 0 10 0 -12 80 85 18.4 0 10 0 -12 81 85 24.4 0 10 1 -12 82 170 49.8 0 10 0 -12 83 28 21.7 0 10 1 -12 84 28 16.9 0 10 1 -12 85 1 18 0 10 0 -12 86 6 26.3 0 10 0 -12 87 28 21.7 0 10 1 -12 88 6 26.3 0 10 1 -12 89 6 44 0 10 1 -12 90 28 21.1 0 10 1 -12 91 85 36.1 0 10 1 -12 92 85 26 0 10 0 -12 93 28 41.1 0 10 1 -12 94 28 21.1 0 10 1 -12 95 28 31.4 0 10 1 -12 96 1 10.7 0 10 0 -12 97 15 30.1 0 10 1 -12 98 1 31.6 0 10 1 -12 99 85 19.5 0 10 0 -12 100 170 24.4 0 10 0 -12 101 15 49 0 10 1 -12 102 6 14 0 10 1 -12 103 85 19.5 0 10 1 -12 104 28 15.5 0 10 0 -12 105 170 24.4 0 10 0 -12 106 1 11.8 0 10 0 -12 107 6 14 0 10 1 -12 108 170 33.9 0 10 0 -13 1 170 41.3 0 10 0 -13 2 15 10.5 0 10 0 -13 3 170 15 0 10 0 -13 4 15 12.5 0 10 1 -13 5 85 45 0 10 1 -13 6 6 44 0 10 1 -13 7 1 11.2 0 10 1 -13 8 170 29.7 0 10 0 -13 9 85 27.3 0 10 1 -13 10 1 12.5 0 10 1 -13 11 15 20.7 0 10 1 -13 12 1 18 0 10 1 -13 13 6 16.7 0 10 1 -13 14 28 12 0 10 0 -13 15 6 35.1 0 10 1 -13 16 15 39.6 0 10 1 -13 17 28 41.1 0 10 1 -13 18 15 18.6 0 10 1 -13 19 1 11.2 0 10 1 -13 20 85 36.1 0 10 0 -13 21 15 10.5 0 10 0 -13 22 170 41.3 0 10 1 -13 23 28 16.9 0 10 1 -13 24 85 26 0 10 0 -13 25 28 16.9 0 10 1 -13 26 6 35.1 0 10 1 -13 27 85 24.4 0 10 1 -13 28 85 45 0 10 1 -13 29 1 11.8 0 10 1 -13 30 170 49.8 0 10 1 -13 31 170 33.9 0 10 0 -13 32 28 13.4 0 10 1 -13 33 1 41.9 0 10 1 -13 34 6 26.3 0 10 1 -13 35 170 35.6 0 10 1 -13 36 6 13.2 0 10 1 -13 37 170 29.7 0 10 0 -13 38 1 11.8 0 10 0 -13 39 85 27.3 0 10 1 -13 40 28 21.7 0 10 1 -13 41 6 14 0 10 0 -13 42 1 11 0 10 1 -13 43 6 14 0 10 1 -13 44 170 43.3 0 10 1 -13 45 15 10.7 0 10 1 -13 46 170 24.4 0 10 0 -13 47 28 21.1 0 10 1 -13 48 6 11 0 10 1 -13 49 15 39.6 0 10 1 -13 50 6 13.2 0 10 1 -13 51 15 10.7 0 10 0 -13 52 85 10.5 0 10 0 -13 53 85 18.4 0 10 1 -13 54 1 12.5 0 10 1 -13 55 15 30.1 0 10 1 -13 56 85 24.4 0 10 0 -13 57 28 12 0 10 0 -13 58 15 49 0 10 1 -13 59 28 41.1 0 10 1 -13 60 170 15 0 10 0 -13 61 85 26 0 10 1 -13 62 15 18.6 0 10 1 -13 63 28 13 0 10 0 -13 64 28 15.5 0 10 0 -13 65 28 31.4 0 10 1 -13 66 85 30.9 0 10 1 -13 67 28 13.4 0 10 0 -13 68 85 10.5 0 10 0 -13 69 1 18 0 10 1 -13 70 28 31.4 0 10 1 -13 71 170 33.9 0 10 0 -13 72 1 31.6 0 10 1 -13 73 28 21.1 0 10 1 -13 74 6 17.4 0 10 1 -13 75 1 21.3 0 10 1 -13 76 6 44 0 10 1 -13 77 85 36.1 0 10 1 -13 78 170 38.3 0 10 0 -13 79 85 30.9 0 10 1 -13 80 170 24.4 0 10 0 -13 81 15 14.2 0 10 0 -13 82 85 19.5 0 10 0 -13 83 85 19.5 0 10 1 -13 84 1 11 0 10 0 -13 85 170 49.8 0 10 1 -13 86 1 41.9 0 10 1 -13 87 6 11 0 10 0 -13 88 28 13 0 10 1 -13 89 15 14.2 0 10 1 -13 90 15 20.7 0 10 1 -13 91 170 35.6 0 10 0 -13 92 28 21.7 0 10 1 -13 93 15 49 0 10 1 -13 94 1 10.7 0 10 1 -13 95 15 12.5 0 10 1 -13 96 28 15.5 0 10 1 -13 97 170 43.3 0 10 1 -13 98 1 21.3 0 10 1 -13 99 6 10.5 0 10 0 -13 100 15 30.1 0 10 1 -13 101 6 17.4 0 10 1 -13 102 6 10.5 0 10 1 -13 103 1 10.7 0 10 1 -13 104 1 31.6 0 10 1 -13 105 6 16.7 0 10 1 -13 106 6 26.3 0 10 1 -13 107 170 38.3 0 10 0 -13 108 85 18.4 0 10 0 -14 1 28 21.7 0 10 1 -14 2 15 14.2 0 10 0 -14 3 6 11 0 10 0 -14 4 15 14.2 0 10 0 -14 5 15 10.7 0 10 1 -14 6 85 30.9 0 10 1 -14 7 6 16.7 0 10 0 -14 8 1 11.8 0 10 1 -14 9 28 13.4 0 10 1 -14 10 1 18 0 10 1 -14 11 15 39.6 0 10 1 -14 12 15 30.1 0 10 1 -14 13 1 11 0 10 0 -14 14 170 41.3 0 10 0 -14 15 6 16.7 0 10 1 -14 16 170 43.3 0 10 0 -14 17 6 35.1 0 10 1 -14 18 15 20.7 0 10 1 -14 19 85 26 0 10 1 -14 20 28 16.9 0 10 1 -14 21 85 19.5 0 10 0 -14 22 28 21.1 0 10 1 -14 23 1 31.6 0 10 1 -14 24 6 26.3 0 10 1 -14 25 28 21.7 0 10 1 -14 26 6 10.5 0 10 0 -14 27 85 24.4 0 10 0 -14 28 85 10.5 0 10 0 -14 29 15 49 0 10 1 -14 30 85 45 0 10 1 -14 31 170 29.7 0 10 1 -14 32 85 27.3 0 10 1 -14 33 170 35.6 0 10 0 -14 34 1 11.8 0 10 1 -14 35 1 18 0 10 1 -14 36 85 27.3 0 10 1 -14 37 6 14 0 10 0 -14 38 28 15.5 0 10 0 -14 39 28 12 0 10 0 -14 40 170 38.3 0 10 1 -14 41 6 13.2 0 10 1 -14 42 85 45 0 10 1 -14 43 6 17.4 0 10 1 -14 44 85 10.5 0 10 0 -14 45 15 10.5 0 10 0 -14 46 15 30.1 0 10 1 -14 47 170 24.4 0 10 0 -14 48 1 12.5 0 10 1 -14 49 15 10.5 0 10 0 -14 50 170 38.3 0 10 1 -14 51 85 18.4 0 10 1 -14 52 1 11 0 10 1 -14 53 170 24.4 0 10 1 -14 54 1 11.2 0 10 1 -14 55 6 10.5 0 10 0 -14 56 1 10.7 0 10 1 -14 57 6 35.1 0 10 1 -14 58 28 13 0 10 1 -14 59 170 29.7 0 10 0 -14 60 28 12 0 10 0 -14 61 85 36.1 0 10 1 -14 62 15 10.7 0 10 1 -14 63 28 21.1 0 10 1 -14 64 15 18.6 0 10 1 -14 65 170 43.3 0 10 1 -14 66 15 18.6 0 10 0 -14 67 85 26 0 10 1 -14 68 28 13.4 0 10 1 -14 69 1 21.3 0 10 1 -14 70 6 11 0 10 1 -14 71 170 35.6 0 10 1 -14 72 170 49.8 0 10 1 -14 73 1 41.9 0 10 1 -14 74 15 12.5 0 10 0 -14 75 1 10.7 0 10 1 -14 76 170 49.8 0 10 1 -14 77 1 31.6 0 10 1 -14 78 85 36.1 0 10 1 -14 79 28 15.5 0 10 1 -14 80 6 44 0 10 1 -14 81 28 13 0 10 0 -14 82 6 14 0 10 1 -14 83 85 18.4 0 10 0 -14 84 15 12.5 0 10 0 -14 85 1 11.2 0 10 0 -14 86 15 49 0 10 1 -14 87 170 33.9 0 10 1 -14 88 85 19.5 0 10 0 -14 89 6 17.4 0 10 1 -14 90 28 41.1 0 10 1 -14 91 6 44 0 10 1 -14 92 170 15 0 10 0 -14 93 28 31.4 0 10 1 -14 94 1 12.5 0 10 1 -14 95 28 16.9 0 10 1 -14 96 85 24.4 0 10 1 -14 97 15 39.6 0 10 1 -14 98 170 41.3 0 10 1 -14 99 1 21.3 0 10 1 -14 100 170 15 0 10 0 -14 101 170 33.9 0 10 1 -14 102 85 30.9 0 10 1 -14 103 28 41.1 0 10 1 -14 104 6 26.3 0 10 1 -14 105 28 31.4 0 10 1 -14 106 6 13.2 0 10 1 -14 107 15 20.7 0 10 1 -14 108 1 41.9 0 10 1 -15 1 15 10.7 0 10 0 -15 2 28 13.4 0 10 0 -15 3 170 33.9 0 10 0 -15 4 15 49 0 10 1 -15 5 28 21.7 0 10 0 -15 6 170 15 0 10 0 -15 7 28 41.1 0 10 1 -15 8 85 45 0 10 1 -15 9 28 13 0 10 1 -15 10 170 33.9 0 10 1 -15 11 6 11 0 10 0 -15 12 85 27.3 0 10 1 -15 13 1 11.8 0 10 1 -15 14 1 10.7 0 10 1 -15 15 28 12 0 10 0 -15 16 6 14 0 10 0 -15 17 1 11.2 0 10 0 -15 18 15 39.6 0 10 1 -15 19 15 30.1 0 10 0 -15 20 15 20.7 0 10 1 -15 21 28 13 0 10 0 -15 22 6 44 0 10 1 -15 23 170 38.3 0 10 0 -15 24 15 18.6 0 10 1 -15 25 15 14.2 0 10 1 -15 26 15 18.6 0 10 1 -15 27 170 41.3 0 10 0 -15 28 28 21.1 0 10 1 -15 29 6 14 0 10 1 -15 30 28 15.5 0 10 0 -15 31 170 24.4 0 10 0 -15 32 1 31.6 0 10 1 -15 33 6 35.1 0 10 1 -15 34 15 30.1 0 10 1 -15 35 170 49.8 0 10 1 -15 36 85 18.4 0 10 0 -15 37 15 10.5 0 10 1 -15 38 170 38.3 0 10 0 -15 39 6 26.3 0 10 1 -15 40 170 41.3 0 10 1 -15 41 85 10.5 0 10 0 -15 42 1 18 0 10 1 -15 43 6 10.5 0 10 1 -15 44 85 19.5 0 10 0 -15 45 1 21.3 0 10 1 -15 46 28 13.4 0 10 1 -15 47 15 39.6 0 10 1 -15 48 170 15 0 10 0 -15 49 85 24.4 0 10 0 -15 50 15 12.5 0 10 0 -15 51 85 30.9 0 10 0 -15 52 28 12 0 10 0 -15 53 85 18.4 0 10 0 -15 54 28 31.4 0 10 1 -15 55 170 35.6 0 10 0 -15 56 1 41.9 0 10 1 -15 57 15 10.7 0 10 0 -15 58 6 44 0 10 1 -15 59 85 26 0 10 0 -15 60 6 26.3 0 10 1 -15 61 170 29.7 0 10 0 -15 62 6 17.4 0 10 1 -15 63 85 36.1 0 10 0 -15 64 1 11 0 10 1 -15 65 1 11.2 0 10 1 -15 66 15 20.7 0 10 1 -15 67 6 10.5 0 10 0 -15 68 28 16.9 0 10 0 -15 69 170 43.3 0 10 0 -15 70 1 21.3 0 10 0 -15 71 1 31.6 0 10 1 -15 72 170 24.4 0 10 0 -15 73 170 35.6 0 10 0 -15 74 1 10.7 0 10 1 -15 75 170 29.7 0 10 0 -15 76 85 36.1 0 10 0 -15 77 6 11 0 10 1 -15 78 1 12.5 0 10 1 -15 79 15 49 0 10 1 -15 80 85 45 0 10 1 -15 81 28 41.1 0 10 1 -15 82 85 10.5 0 10 0 -15 83 1 12.5 0 10 1 -15 84 85 30.9 0 10 1 -15 85 28 16.9 0 10 0 -15 86 85 24.4 0 10 1 -15 87 1 41.9 0 10 1 -15 88 6 16.7 0 10 1 -15 89 170 43.3 0 10 1 -15 90 1 11 0 10 1 -15 91 170 49.8 0 10 1 -15 92 15 12.5 0 10 0 -15 93 1 11.8 0 10 0 -15 94 6 17.4 0 10 0 -15 95 28 21.1 0 10 1 -15 96 28 21.7 0 10 1 -15 97 85 27.3 0 10 0 -15 98 28 31.4 0 10 1 -15 99 6 13.2 0 10 0 -15 100 28 15.5 0 10 1 -15 101 1 18 0 10 0 -15 102 85 19.5 0 10 0 -15 103 6 16.7 0 10 1 -15 104 15 14.2 0 10 0 -15 105 6 13.2 0 10 1 -15 106 6 35.1 0 10 1 -15 107 15 10.5 0 10 1 -15 108 85 26 0 10 0 -16 1 85 10.5 0 10 0 -16 2 85 36.1 0 10 0 -16 3 28 41.1 0 10 1 -16 4 15 12.5 0 10 0 -16 5 6 17.4 0 10 1 -16 6 6 44 0 10 1 -16 7 6 14 0 10 0 -16 8 28 12 0 10 0 -16 9 28 41.1 0 10 1 -16 10 15 18.6 0 10 0 -16 11 85 27.3 0 10 0 -16 12 1 31.6 0 10 1 -16 13 85 45 0 10 1 -16 14 170 38.3 0 10 0 -16 15 28 16.9 0 10 0 -16 16 170 29.7 0 10 0 -16 17 170 15 0 10 0 -16 18 6 14 0 10 1 -16 19 85 18.4 0 10 0 -16 20 170 43.3 0 10 0 -16 21 170 33.9 0 10 0 -16 22 85 26 0 10 0 -16 23 15 10.7 0 10 0 -16 24 15 10.5 0 10 1 -16 25 6 13.2 0 10 0 -16 26 1 10.7 0 10 1 -16 27 28 15.5 0 10 0 -16 28 28 13.4 0 10 0 -16 29 170 35.6 0 10 0 -16 30 170 41.3 0 10 1 -16 31 1 31.6 0 10 1 -16 32 28 15.5 0 10 0 -16 33 85 10.5 0 10 0 -16 34 28 21.7 0 10 0 -16 35 1 21.3 0 10 1 -16 36 170 43.3 0 10 0 -16 37 15 49 0 10 1 -16 38 85 30.9 0 10 0 -16 39 1 11 0 10 0 -16 40 170 41.3 0 10 1 -16 41 6 13.2 0 10 0 -16 42 85 24.4 0 10 0 -16 43 170 15 0 10 1 -16 44 1 11.8 0 10 0 -16 45 85 26 0 10 0 -16 46 15 39.6 0 10 1 -16 47 15 39.6 0 10 1 -16 48 6 26.3 0 10 1 -16 49 1 10.7 0 10 0 -16 50 85 24.4 0 10 1 -16 51 15 20.7 0 10 0 -16 52 1 11 0 10 1 -16 53 1 12.5 0 10 1 -16 54 1 11.2 0 10 1 -16 55 28 21.1 0 10 0 -16 56 170 49.8 0 10 1 -16 57 1 21.3 0 10 1 -16 58 28 13.4 0 10 0 -16 59 15 10.5 0 10 0 -16 60 6 17.4 0 10 1 -16 61 28 31.4 0 10 1 -16 62 85 19.5 0 10 0 -16 63 85 36.1 0 10 1 -16 64 15 14.2 0 10 0 -16 65 6 35.1 0 10 1 -16 66 6 10.5 0 10 1 -16 67 15 18.6 0 10 0 -16 68 1 41.9 0 10 1 -16 69 1 18 0 10 1 -16 70 28 21.1 0 10 1 -16 71 170 24.4 0 10 1 -16 72 15 10.7 0 10 1 -16 73 6 16.7 0 10 0 -16 74 170 49.8 0 10 0 -16 75 15 30.1 0 10 1 -16 76 15 14.2 0 10 0 -16 77 15 20.7 0 10 1 -16 78 28 21.7 0 10 1 -16 79 85 27.3 0 10 0 -16 80 170 35.6 0 10 0 -16 81 28 16.9 0 10 0 -16 82 85 18.4 0 10 0 -16 83 28 13 0 10 0 -16 84 6 11 0 10 0 -16 85 6 35.1 0 10 1 -16 86 1 41.9 0 10 1 -16 87 1 12.5 0 10 1 -16 88 6 11 0 10 1 -16 89 6 26.3 0 10 1 -16 90 170 24.4 0 10 0 -16 91 15 30.1 0 10 1 -16 92 6 44 0 10 1 -16 93 15 12.5 0 10 0 -16 94 85 45 0 10 1 -16 95 15 49 0 10 1 -16 96 170 29.7 0 10 0 -16 97 1 11.2 0 10 1 -16 98 6 10.5 0 10 1 -16 99 170 33.9 0 10 0 -16 100 28 13 0 10 0 -16 101 85 19.5 0 10 0 -16 102 170 38.3 0 10 0 -16 103 28 31.4 0 10 1 -16 104 1 18 0 10 1 -16 105 28 12 0 10 0 -16 106 6 16.7 0 10 1 -16 107 1 11.8 0 10 0 -16 108 85 30.9 0 10 0 -17 1 28 21.7 0 10 0 -17 2 170 43.3 0 10 0 -17 3 28 21.7 0 10 0 -17 4 170 15 0 10 0 -17 5 170 43.3 0 10 0 -17 6 15 18.6 0 10 1 -17 7 85 27.3 0 10 0 -17 8 6 11 0 10 0 -17 9 28 16.9 0 10 0 -17 10 15 30.1 0 10 1 -17 11 15 20.7 0 10 1 -17 12 6 26.3 0 10 1 -17 13 28 12 0 10 0 -17 14 6 10.5 0 10 1 -17 15 1 21.3 0 10 1 -17 16 85 36.1 0 10 1 -17 17 15 18.6 0 10 1 -17 18 28 12 0 10 0 -17 19 170 15 0 10 0 -17 20 28 41.1 0 10 1 -17 21 28 31.4 0 10 1 -17 22 85 45 0 10 1 -17 23 15 12.5 0 10 0 -17 24 6 16.7 0 10 1 -17 25 15 20.7 0 10 0 -17 26 1 11.2 0 10 1 -17 27 15 39.6 0 10 1 -17 28 6 35.1 0 10 1 -17 29 1 10.7 0 10 1 -17 30 15 30.1 0 10 1 -17 31 28 13.4 0 10 0 -17 32 6 16.7 0 10 1 -17 33 170 41.3 0 10 1 -17 34 6 10.5 0 10 0 -17 35 85 19.5 0 10 0 -17 36 6 13.2 0 10 0 -17 37 6 26.3 0 10 1 -17 38 170 49.8 0 10 0 -17 39 1 31.6 0 10 1 -17 40 15 10.7 0 10 1 -17 41 170 24.4 0 10 0 -17 42 6 11 0 10 0 -17 43 15 10.5 0 10 1 -17 44 170 29.7 0 10 0 -17 45 28 15.5 0 10 0 -17 46 85 18.4 0 10 0 -17 47 85 18.4 0 10 0 -17 48 6 14 0 10 1 -17 49 170 38.3 0 10 0 -17 50 15 39.6 0 10 1 -17 51 1 18 0 10 1 -17 52 1 18 0 10 1 -17 53 1 11.8 0 10 1 -17 54 85 45 0 10 1 -17 55 170 33.9 0 10 0 -17 56 170 35.6 0 10 0 -17 57 1 12.5 0 10 0 -17 58 6 44 0 10 1 -17 59 1 11 0 10 0 -17 60 28 15.5 0 10 0 -17 61 15 49 0 10 1 -17 62 170 33.9 0 10 0 -17 63 85 26 0 10 0 -17 64 1 10.7 0 10 1 -17 65 28 16.9 0 10 0 -17 66 6 14 0 10 1 -17 67 15 10.5 0 10 1 -17 68 15 49 0 10 1 -17 69 85 36.1 0 10 0 -17 70 1 31.6 0 10 1 -17 71 1 11 0 10 1 -17 72 28 21.1 0 10 0 -17 73 85 30.9 0 10 0 -17 74 6 44 0 10 1 -17 75 15 12.5 0 10 1 -17 76 170 49.8 0 10 0 -17 77 28 13 0 10 1 -17 78 85 10.5 0 10 0 -17 79 28 13.4 0 10 0 -17 80 1 12.5 0 10 1 -17 81 28 41.1 0 10 1 -17 82 170 38.3 0 10 0 -17 83 170 35.6 0 10 0 -17 84 28 21.1 0 10 1 -17 85 15 10.7 0 10 1 -17 86 1 41.9 0 10 1 -17 87 28 31.4 0 10 1 -17 88 85 10.5 0 10 0 -17 89 1 11.8 0 10 1 -17 90 15 14.2 0 10 1 -17 91 85 24.4 0 10 0 -17 92 6 13.2 0 10 1 -17 93 85 19.5 0 10 0 -17 94 6 17.4 0 10 1 -17 95 85 30.9 0 10 1 -17 96 170 24.4 0 10 0 -17 97 28 13 0 10 0 -17 98 6 17.4 0 10 1 -17 99 170 41.3 0 10 0 -17 100 85 26 0 10 1 -17 101 85 24.4 0 10 0 -17 102 1 11.2 0 10 1 -17 103 85 27.3 0 10 1 -17 104 6 35.1 0 10 1 -17 105 170 29.7 0 10 0 -17 106 1 41.9 0 10 1 -17 107 1 21.3 0 10 1 -17 108 15 14.2 0 10 1 -18 1 170 43.3 0 10 1 -18 2 85 30.9 0 10 1 -18 3 6 14 0 10 1 -18 4 28 31.4 0 10 1 -18 5 170 38.3 0 10 1 -18 6 15 14.2 0 10 1 -18 7 6 44 0 10 1 -18 8 6 11 0 10 1 -18 9 85 19.5 0 10 1 -18 10 15 20.7 0 10 1 -18 11 6 13.2 0 10 1 -18 12 170 15 0 10 0 -18 13 85 26 0 10 1 -18 14 1 18 0 10 1 -18 15 15 14.2 0 10 1 -18 16 85 36.1 0 10 1 -18 17 1 18 0 10 0 -18 18 15 49 0 10 1 -18 19 170 49.8 0 10 1 -18 20 6 35.1 0 10 1 -18 21 85 10.5 0 10 0 -18 22 28 13.4 0 10 0 -18 23 15 20.7 0 10 1 -18 24 85 45 0 10 1 -18 25 15 39.6 0 10 1 -18 26 15 12.5 0 10 1 -18 27 1 11.8 0 10 1 -18 28 1 21.3 0 10 1 -18 29 6 26.3 0 10 1 -18 30 15 12.5 0 10 1 -18 31 6 17.4 0 10 1 -18 32 28 16.9 0 10 1 -18 33 170 41.3 0 10 0 -18 34 170 24.4 0 10 0 -18 35 15 10.7 0 10 0 -18 36 1 10.7 0 10 0 -18 37 6 35.1 0 10 1 -18 38 170 38.3 0 10 1 -18 39 6 44 0 10 1 -18 40 15 30.1 0 10 1 -18 41 28 13 0 10 0 -18 42 15 49 0 10 1 -18 43 6 11 0 10 0 -18 44 15 39.6 0 10 1 -18 45 15 10.7 0 10 0 -18 46 1 11 0 10 1 -18 47 28 21.1 0 10 1 -18 48 28 13 0 10 0 -18 49 1 11.2 0 10 1 -18 50 28 12 0 10 1 -18 51 6 16.7 0 10 1 -18 52 85 27.3 0 10 1 -18 53 170 49.8 0 10 1 -18 54 28 21.7 0 10 1 -18 55 15 10.5 0 10 0 -18 56 170 29.7 0 10 0 -18 57 85 10.5 0 10 0 -18 58 1 11 0 10 1 -18 59 6 14 0 10 1 -18 60 170 33.9 0 10 0 -18 61 170 35.6 0 10 1 -18 62 15 18.6 0 10 1 -18 63 6 26.3 0 10 1 -18 64 85 18.4 0 10 0 -18 65 1 41.9 0 10 1 -18 66 28 12 0 10 1 -18 67 6 16.7 0 10 1 -18 68 170 24.4 0 10 1 -18 69 15 18.6 0 10 1 -18 70 6 17.4 0 10 1 -18 71 85 18.4 0 10 0 -18 72 1 21.3 0 10 1 -18 73 28 41.1 0 10 1 -18 74 85 27.3 0 10 0 -18 75 85 36.1 0 10 1 -18 76 170 35.6 0 10 0 -18 77 28 21.1 0 10 1 -18 78 170 43.3 0 10 1 -18 79 28 21.7 0 10 1 -18 80 85 24.4 0 10 1 -18 81 28 31.4 0 10 1 -18 82 85 45 0 10 1 -18 83 15 10.5 0 10 0 -18 84 6 13.2 0 10 1 -18 85 1 31.6 0 10 1 -18 86 1 31.6 0 10 1 -18 87 85 30.9 0 10 1 -18 88 85 19.5 0 10 1 -18 89 85 24.4 0 10 1 -18 90 28 13.4 0 10 0 -18 91 170 29.7 0 10 1 -18 92 170 33.9 0 10 1 -18 93 28 41.1 0 10 1 -18 94 170 15 0 10 0 -18 95 85 26 0 10 0 -18 96 170 41.3 0 10 1 -18 97 1 12.5 0 10 1 -18 98 1 12.5 0 10 1 -18 99 28 15.5 0 10 1 -18 100 1 11.2 0 10 0 -18 101 6 10.5 0 10 0 -18 102 1 10.7 0 10 1 -18 103 1 11.8 0 10 1 -18 104 28 16.9 0 10 1 -18 105 6 10.5 0 10 1 -18 106 1 41.9 0 10 1 -18 107 28 15.5 0 10 0 -18 108 15 30.1 0 10 1 -19 1 28 41.1 0 10 1 -19 2 170 41.3 0 10 0 -19 3 6 10.5 0 10 0 -19 4 6 44 0 10 1 -19 5 15 12.5 0 10 1 -19 6 28 16.9 0 10 1 -19 7 6 14 0 10 0 -19 8 6 16.7 0 10 1 -19 9 1 31.6 0 10 1 -19 10 1 10.7 0 10 1 -19 11 85 19.5 0 10 1 -19 12 28 16.9 0 10 0 -19 13 170 24.4 0 10 0 -19 14 15 14.2 0 10 1 -19 15 85 26 0 10 0 -19 16 85 36.1 0 10 1 -19 17 15 30.1 0 10 1 -19 18 6 10.5 0 10 0 -19 19 170 24.4 0 10 0 -19 20 15 12.5 0 10 0 -19 21 28 21.7 0 10 0 -19 22 170 15 0 10 0 -19 23 85 10.5 0 10 0 -19 24 1 11.8 0 10 0 -19 25 1 12.5 0 10 1 -19 26 28 15.5 0 10 0 -19 27 6 26.3 0 10 1 -19 28 6 35.1 0 10 1 -19 29 15 18.6 0 10 0 -19 30 170 29.7 0 10 0 -19 31 85 19.5 0 10 0 -19 32 170 43.3 0 10 1 -19 33 28 31.4 0 10 1 -19 34 28 13.4 0 10 1 -19 35 85 26 0 10 0 -19 36 85 10.5 0 10 0 -19 37 85 45 0 10 1 -19 38 28 13 0 10 0 -19 39 170 38.3 0 10 0 -19 40 1 11 0 10 0 -19 41 1 10.7 0 10 0 -19 42 170 29.7 0 10 1 -19 43 6 26.3 0 10 1 -19 44 1 11.2 0 10 0 -19 45 28 41.1 0 10 1 -19 46 1 12.5 0 10 0 -19 47 85 30.9 0 10 1 -19 48 170 33.9 0 10 0 -19 49 28 13 0 10 0 -19 50 170 33.9 0 10 1 -19 51 170 49.8 0 10 1 -19 52 170 35.6 0 10 0 -19 53 15 49 0 10 1 -19 54 1 11.2 0 10 1 -19 55 6 11 0 10 0 -19 56 6 17.4 0 10 1 -19 57 15 49 0 10 1 -19 58 1 11 0 10 0 -19 59 85 18.4 0 10 0 -19 60 15 20.7 0 10 1 -19 61 170 38.3 0 10 0 -19 62 15 39.6 0 10 1 -19 63 6 35.1 0 10 1 -19 64 28 21.1 0 10 1 -19 65 15 39.6 0 10 1 -19 66 15 10.7 0 10 0 -19 67 1 31.6 0 10 1 -19 68 1 41.9 0 10 1 -19 69 170 49.8 0 10 0 -19 70 170 35.6 0 10 0 -19 71 85 36.1 0 10 0 -19 72 28 13.4 0 10 1 -19 73 1 18 0 10 1 -19 74 85 18.4 0 10 1 -19 75 85 24.4 0 10 0 -19 76 170 43.3 0 10 1 -19 77 15 18.6 0 10 1 -19 78 6 13.2 0 10 0 -19 79 6 44 0 10 1 -19 80 15 10.5 0 10 0 -19 81 6 14 0 10 1 -19 82 85 27.3 0 10 1 -19 83 15 30.1 0 10 1 -19 84 6 16.7 0 10 1 -19 85 28 31.4 0 10 1 -19 86 28 21.1 0 10 1 -19 87 15 10.7 0 10 0 -19 88 6 13.2 0 10 1 -19 89 170 41.3 0 10 0 -19 90 28 21.7 0 10 1 -19 91 85 24.4 0 10 1 -19 92 28 12 0 10 0 -19 93 1 11.8 0 10 0 -19 94 28 12 0 10 0 -19 95 1 18 0 10 1 -19 96 28 15.5 0 10 0 -19 97 1 21.3 0 10 1 -19 98 1 21.3 0 10 1 -19 99 1 41.9 0 10 1 -19 100 85 45 0 10 1 -19 101 15 10.5 0 10 1 -19 102 6 11 0 10 1 -19 103 15 14.2 0 10 0 -19 104 15 20.7 0 10 1 -19 105 85 30.9 0 10 0 -19 106 85 27.3 0 10 1 -19 107 6 17.4 0 10 1 -19 108 170 15 0 10 0 -20 1 6 14 0 10 1 -20 2 1 12.5 0 10 0 -20 3 6 16.7 0 10 1 -20 4 15 14.2 0 10 0 -20 5 170 24.4 0 10 0 -20 6 85 18.4 0 10 0 -20 7 28 41.1 0 10 1 -20 8 170 43.3 0 10 1 -20 9 1 21.3 0 10 1 -20 10 85 26 0 10 0 -20 11 1 11 0 10 0 -20 12 6 10.5 0 10 0 -20 13 15 20.7 0 10 1 -20 14 28 13.4 0 10 1 -20 15 170 35.6 0 10 1 -20 16 1 11 0 10 1 -20 17 6 44 0 10 1 -20 18 6 26.3 0 10 1 -20 19 15 39.6 0 10 1 -20 20 28 41.1 0 10 1 -20 21 85 10.5 0 10 0 -20 22 6 16.7 0 10 0 -20 23 1 11.8 0 10 1 -20 24 28 12 0 10 1 -20 25 1 18 0 10 1 -20 26 170 29.7 0 10 0 -20 27 28 21.7 0 10 1 -20 28 15 10.7 0 10 1 -20 29 170 41.3 0 10 1 -20 30 85 19.5 0 10 0 -20 31 85 45 0 10 1 -20 32 170 33.9 0 10 1 -20 33 28 13.4 0 10 0 -20 34 85 27.3 0 10 1 -20 35 28 13 0 10 0 -20 36 15 18.6 0 10 0 -20 37 15 12.5 0 10 1 -20 38 170 24.4 0 10 0 -20 39 6 44 0 10 1 -20 40 85 30.9 0 10 1 -20 41 6 35.1 0 10 1 -20 42 6 26.3 0 10 1 -20 43 6 13.2 0 10 1 -20 44 15 10.7 0 10 1 -20 45 28 21.7 0 10 1 -20 46 170 33.9 0 10 1 -20 47 15 20.7 0 10 1 -20 48 1 10.7 0 10 1 -20 49 28 16.9 0 10 1 -20 50 1 11.2 0 10 0 -20 51 1 12.5 0 10 1 -20 52 15 18.6 0 10 0 -20 53 28 21.1 0 10 1 -20 54 15 14.2 0 10 1 -20 55 85 18.4 0 10 0 -20 56 170 29.7 0 10 0 -20 57 85 45 0 10 1 -20 58 28 31.4 0 10 1 -20 59 15 30.1 0 10 1 -20 60 1 11.8 0 10 1 -20 61 28 31.4 0 10 1 -20 62 85 19.5 0 10 0 -20 63 6 14 0 10 1 -20 64 1 31.6 0 10 1 -20 65 1 10.7 0 10 1 -20 66 15 49 0 10 1 -20 67 1 21.3 0 10 1 -20 68 6 35.1 0 10 1 -20 69 15 10.5 0 10 1 -20 70 85 10.5 0 10 0 -20 71 6 13.2 0 10 1 -20 72 170 49.8 0 10 1 -20 73 170 35.6 0 10 1 -20 74 85 24.4 0 10 1 -20 75 6 11 0 10 1 -20 76 170 49.8 0 10 1 -20 77 15 30.1 0 10 1 -20 78 85 36.1 0 10 1 -20 79 85 26 0 10 1 -20 80 6 17.4 0 10 1 -20 81 170 15 0 10 0 -20 82 15 12.5 0 10 1 -20 83 85 30.9 0 10 0 -20 84 6 10.5 0 10 1 -20 85 1 41.9 0 10 1 -20 86 15 39.6 0 10 1 -20 87 170 43.3 0 10 1 -20 88 28 13 0 10 0 -20 89 28 15.5 0 10 0 -20 90 85 27.3 0 10 1 -20 91 28 15.5 0 10 1 -20 92 170 38.3 0 10 1 -20 93 15 10.5 0 10 1 -20 94 170 15 0 10 1 -20 95 1 41.9 0 10 1 -20 96 1 31.6 0 10 1 -20 97 6 11 0 10 0 -20 98 1 11.2 0 10 0 -20 99 170 41.3 0 10 1 -20 100 1 18 0 10 1 -20 101 28 12 0 10 1 -20 102 28 21.1 0 10 1 -20 103 28 16.9 0 10 1 -20 104 85 36.1 0 10 1 -20 105 85 24.4 0 10 1 -20 106 6 17.4 0 10 1 -20 107 170 38.3 0 10 1 -20 108 15 49 0 10 1 \ No newline at end of file diff --git a/inst/extdata/dd_single_exampleData.txt b/inst/extdata/dd_single_exampleData.txt deleted file mode 100644 index a729477e..00000000 --- a/inst/extdata/dd_single_exampleData.txt +++ /dev/null @@ -1,109 +0,0 @@ -subjID trial delay_later amount_later delay_sooner amount_sooner choice -1 1 6 10.5 0 10 1 -1 2 170 38.3 0 10 1 -1 3 28 13.4 0 10 1 -1 4 28 31.4 0 10 1 -1 5 85 30.9 0 10 1 -1 6 28 21.1 0 10 1 -1 7 28 13 0 10 1 -1 8 1 21.3 0 10 1 -1 9 28 21.1 0 10 1 -1 10 15 30.1 0 10 1 -1 11 1 10.7 0 10 1 -1 12 85 36.1 0 10 1 -1 13 15 10.5 0 10 1 -1 14 6 16.7 0 10 1 -1 15 1 11 0 10 1 -1 16 15 14.2 0 10 1 -1 17 15 12.5 0 10 1 -1 18 15 20.7 0 10 1 -1 19 6 11 0 10 0 -1 20 28 16.9 0 10 1 -1 21 15 30.1 0 10 1 -1 22 85 24.4 0 10 1 -1 23 170 41.3 0 10 1 -1 24 15 14.2 0 10 1 -1 25 6 10.5 0 10 1 -1 26 170 24.4 0 10 1 -1 27 15 49 0 10 1 -1 28 170 29.7 0 10 1 -1 29 1 11.8 0 10 0 -1 30 6 13.2 0 10 0 -1 31 85 30.9 0 10 1 -1 32 6 44 0 10 1 -1 33 6 35.1 0 10 1 -1 34 28 15.5 0 10 1 -1 35 170 43.3 0 10 1 -1 36 170 33.9 0 10 1 -1 37 1 11 0 10 1 -1 38 1 21.3 0 10 1 -1 39 85 45 0 10 1 -1 40 15 39.6 0 10 1 -1 41 85 10.5 0 10 0 -1 42 170 15 0 10 1 -1 43 170 49.8 0 10 1 -1 44 170 24.4 0 10 1 -1 45 28 13.4 0 10 1 -1 46 1 31.6 0 10 1 -1 47 170 35.6 0 10 1 -1 48 1 41.9 0 10 1 -1 49 6 17.4 0 10 1 -1 50 85 18.4 0 10 1 -1 51 85 27.3 0 10 1 -1 52 85 26 0 10 1 -1 53 170 38.3 0 10 1 -1 54 28 21.7 0 10 1 -1 55 1 10.7 0 10 1 -1 56 170 49.8 0 10 1 -1 57 1 11.2 0 10 1 -1 58 15 20.7 0 10 1 -1 59 6 44 0 10 1 -1 60 28 41.1 0 10 1 -1 61 28 16.9 0 10 1 -1 62 6 14 0 10 1 -1 63 1 31.6 0 10 1 -1 64 15 18.6 0 10 1 -1 65 28 12 0 10 1 -1 66 6 13.2 0 10 1 -1 67 170 43.3 0 10 1 -1 68 28 31.4 0 10 1 -1 69 85 19.5 0 10 1 -1 70 170 35.6 0 10 1 -1 71 85 18.4 0 10 1 -1 72 1 12.5 0 10 1 -1 73 170 41.3 0 10 1 -1 74 170 15 0 10 0 -1 75 28 12 0 10 0 -1 76 85 36.1 0 10 1 -1 77 1 18 0 10 1 -1 78 85 10.5 0 10 0 -1 79 170 33.9 0 10 1 -1 80 6 26.3 0 10 1 -1 81 85 45 0 10 1 -1 82 28 21.7 0 10 1 -1 83 28 13 0 10 0 -1 84 85 27.3 0 10 1 -1 85 15 18.6 0 10 1 -1 86 15 12.5 0 10 1 -1 87 6 26.3 0 10 1 -1 88 6 11 0 10 1 -1 89 15 10.7 0 10 0 -1 90 6 16.7 0 10 1 -1 91 28 41.1 0 10 1 -1 92 85 26 0 10 1 -1 93 85 24.4 0 10 1 -1 94 1 12.5 0 10 1 -1 95 6 17.4 0 10 1 -1 96 6 35.1 0 10 1 -1 97 6 14 0 10 1 -1 98 15 10.5 0 10 0 -1 99 1 11.8 0 10 1 -1 100 15 10.7 0 10 1 -1 101 15 39.6 0 10 1 -1 102 85 19.5 0 10 1 -1 103 1 11.2 0 10 1 -1 104 170 29.7 0 10 1 -1 105 15 49 0 10 1 -1 106 1 41.9 0 10 1 -1 107 1 18 0 10 1 -1 108 28 15.5 0 10 1 \ No newline at end of file diff --git a/inst/extdata/gng_exampleData.txt b/inst/extdata/gng_exampleData.txt deleted file mode 100644 index 40e0982a..00000000 --- a/inst/extdata/gng_exampleData.txt +++ /dev/null @@ -1,2401 +0,0 @@ -trialNum cue keyPressed success congruentOutcome outcome subjID -1 1 1 1 2 0 1 -2 2 0 1 1 1 1 -3 4 0 1 1 0 1 -4 4 1 0 1 -1 1 -5 4 0 1 1 0 1 -6 1 1 1 1 1 1 -7 3 0 0 1 -1 1 -8 1 1 1 1 1 1 -9 3 1 1 1 0 1 -10 3 0 0 1 -1 1 -11 4 0 1 1 0 1 -12 4 0 1 1 0 1 -13 4 0 1 1 0 1 -14 1 1 1 1 1 1 -15 1 1 1 1 1 1 -16 2 0 1 1 1 1 -17 2 0 1 1 1 1 -18 4 0 1 1 0 1 -19 2 0 1 1 1 1 -20 3 1 1 1 0 1 -21 3 1 1 1 0 1 -22 4 1 0 2 0 1 -23 2 0 1 1 1 1 -24 3 0 0 1 -1 1 -25 1 1 1 1 1 1 -26 3 0 0 1 -1 1 -27 4 0 1 1 0 1 -28 1 1 1 1 1 1 -29 4 1 0 2 0 1 -30 2 0 1 2 0 1 -31 2 0 1 1 1 1 -32 4 1 0 2 0 1 -33 2 0 1 1 1 1 -34 2 0 1 1 1 1 -35 3 1 1 1 0 1 -36 2 0 1 1 1 1 -37 1 1 1 1 1 1 -38 4 0 1 1 0 1 -39 4 0 1 1 0 1 -40 4 1 0 1 -1 1 -41 3 1 1 1 0 1 -42 2 0 1 1 1 1 -43 2 0 1 1 1 1 -44 2 0 1 1 1 1 -45 2 0 1 1 1 1 -46 3 1 1 1 0 1 -47 2 0 1 1 1 1 -48 2 0 1 1 1 1 -49 1 1 1 1 1 1 -50 3 1 1 2 -1 1 -51 2 1 0 1 0 1 -52 1 1 1 1 1 1 -53 3 1 1 1 0 1 -54 4 0 1 1 0 1 -55 3 1 1 2 -1 1 -56 1 1 1 1 1 1 -57 3 0 0 1 -1 1 -58 1 1 1 1 1 1 -59 3 1 1 2 -1 1 -60 1 1 1 1 1 1 -61 3 1 1 1 0 1 -62 4 1 0 1 -1 1 -63 1 1 1 1 1 1 -64 1 1 1 1 1 1 -65 4 0 1 1 0 1 -66 1 1 1 1 1 1 -67 3 1 1 1 0 1 -68 2 0 1 1 1 1 -69 3 1 1 2 -1 1 -70 1 1 1 1 1 1 -71 2 0 1 1 1 1 -72 2 0 1 1 1 1 -73 1 1 1 1 1 1 -74 4 0 1 2 -1 1 -75 2 0 1 2 0 1 -76 1 1 1 2 0 1 -77 4 1 0 1 -1 1 -78 1 1 1 1 1 1 -79 3 1 1 2 -1 1 -80 3 1 1 1 0 1 -81 1 1 1 1 1 1 -82 1 1 1 1 1 1 -83 3 0 0 1 -1 1 -84 2 0 1 1 1 1 -85 4 0 1 1 0 1 -86 3 1 1 1 0 1 -87 4 0 1 1 0 1 -88 2 0 1 1 1 1 -89 1 1 1 1 1 1 -90 4 0 1 1 0 1 -91 1 1 1 2 0 1 -92 2 0 1 2 0 1 -93 1 1 1 1 1 1 -94 4 0 1 1 0 1 -95 2 0 1 1 1 1 -96 4 1 0 1 -1 1 -97 3 1 1 1 0 1 -98 3 1 1 1 0 1 -99 3 1 1 1 0 1 -100 1 1 1 1 1 1 -101 2 0 1 1 1 1 -102 4 0 1 2 -1 1 -103 4 0 1 1 0 1 -104 3 0 0 1 -1 1 -105 1 1 1 1 1 1 -106 4 0 1 1 0 1 -107 2 0 1 1 1 1 -108 2 0 1 1 1 1 -109 3 1 1 1 0 1 -110 4 0 1 1 0 1 -111 3 1 1 1 0 1 -112 3 1 1 1 0 1 -113 1 1 1 1 1 1 -114 3 1 1 1 0 1 -115 4 0 1 2 -1 1 -116 1 0 0 1 0 1 -117 1 1 1 1 1 1 -118 1 1 1 1 1 1 -119 3 0 0 1 -1 1 -120 2 0 1 1 1 1 -121 2 0 1 2 0 1 -122 4 0 1 1 0 1 -123 1 1 1 2 0 1 -124 4 0 1 1 0 1 -125 3 1 1 2 -1 1 -126 2 0 1 1 1 1 -127 4 0 1 1 0 1 -128 4 0 1 1 0 1 -129 4 0 1 1 0 1 -130 4 1 0 1 -1 1 -131 2 0 1 1 1 1 -132 3 1 1 2 -1 1 -133 1 0 0 1 0 1 -134 1 1 1 1 1 1 -135 3 1 1 1 0 1 -136 3 1 1 1 0 1 -137 4 0 1 2 -1 1 -138 4 0 1 1 0 1 -139 3 1 1 1 0 1 -140 3 1 1 2 -1 1 -141 3 0 0 1 -1 1 -142 2 0 1 2 0 1 -143 2 0 1 2 0 1 -144 2 0 1 2 0 1 -145 4 0 1 1 0 1 -146 1 1 1 2 0 1 -147 3 1 1 1 0 1 -148 3 1 1 1 0 1 -149 2 0 1 2 0 1 -150 1 1 1 1 1 1 -151 1 1 1 2 0 1 -152 1 1 1 1 1 1 -153 3 1 1 1 0 1 -154 4 0 1 1 0 1 -155 1 1 1 1 1 1 -156 3 1 1 1 0 1 -157 4 1 0 1 -1 1 -158 4 0 1 1 0 1 -159 3 1 1 1 0 1 -160 2 0 1 1 1 1 -161 2 0 1 1 1 1 -162 2 0 1 2 0 1 -163 3 1 1 1 0 1 -164 4 0 1 2 -1 1 -165 3 1 1 1 0 1 -166 4 0 1 2 -1 1 -167 2 0 1 1 1 1 -168 2 0 1 2 0 1 -169 1 1 1 1 1 1 -170 4 0 1 1 0 1 -171 3 1 1 2 -1 1 -172 3 1 1 2 -1 1 -173 2 0 1 1 1 1 -174 3 1 1 1 0 1 -175 4 1 0 1 -1 1 -176 2 0 1 1 1 1 -177 4 0 1 1 0 1 -178 2 0 1 2 0 1 -179 4 0 1 1 0 1 -180 1 1 1 2 0 1 -181 1 1 1 1 1 1 -182 3 1 1 1 0 1 -183 2 0 1 1 1 1 -184 1 1 1 1 1 1 -185 4 0 1 1 0 1 -186 3 1 1 1 0 1 -187 1 1 1 1 1 1 -188 3 1 1 2 -1 1 -189 4 0 1 1 0 1 -190 4 0 1 1 0 1 -191 2 0 1 1 1 1 -192 2 0 1 1 1 1 -193 1 1 1 1 1 1 -194 2 0 1 1 1 1 -195 2 0 1 2 0 1 -196 2 0 1 1 1 1 -197 1 1 1 1 1 1 -198 4 0 1 1 0 1 -199 3 1 1 1 0 1 -200 3 1 1 1 0 1 -201 3 1 1 1 0 1 -202 1 1 1 1 1 1 -203 3 1 1 1 0 1 -204 2 0 1 1 1 1 -205 4 0 1 2 -1 1 -206 2 0 1 1 1 1 -207 4 0 1 1 0 1 -208 4 0 1 1 0 1 -209 1 1 1 1 1 1 -210 3 1 1 1 0 1 -211 3 1 1 1 0 1 -212 1 1 1 1 1 1 -213 1 1 1 1 1 1 -214 2 0 1 1 1 1 -215 1 1 1 1 1 1 -216 3 1 1 1 0 1 -217 4 0 1 2 -1 1 -218 2 0 1 1 1 1 -219 2 0 1 1 1 1 -220 1 1 1 2 0 1 -221 2 0 1 1 1 1 -222 1 1 1 1 1 1 -223 1 1 1 1 1 1 -224 4 0 1 2 -1 1 -225 1 1 1 1 1 1 -226 2 0 1 1 1 1 -227 4 1 0 1 -1 1 -228 3 1 1 1 0 1 -229 4 1 0 1 -1 1 -230 1 1 1 1 1 1 -231 2 0 1 1 1 1 -232 1 1 1 1 1 1 -233 3 1 1 1 0 1 -234 2 0 1 1 1 1 -235 1 1 1 1 1 1 -236 4 1 0 1 -1 1 -237 2 0 1 1 1 1 -238 1 1 1 1 1 1 -239 4 0 1 1 0 1 -240 1 1 1 1 1 1 -1 1 1 1 1 1 2 -2 1 1 1 1 1 2 -3 3 1 1 1 0 2 -4 3 0 0 1 -1 2 -5 1 1 1 1 1 2 -6 4 1 0 1 -1 2 -7 4 0 1 1 0 2 -8 3 1 1 1 0 2 -9 3 1 1 1 0 2 -10 3 0 0 1 -1 2 -11 1 1 1 1 1 2 -12 4 0 1 1 0 2 -13 1 1 1 1 1 2 -14 4 0 1 1 0 2 -15 4 0 1 1 0 2 -16 3 1 1 1 0 2 -17 2 0 1 1 1 2 -18 4 0 1 2 -1 2 -19 1 1 1 1 1 2 -20 2 0 1 1 1 2 -21 4 0 1 1 0 2 -22 4 1 0 2 0 2 -23 1 1 1 2 0 2 -24 4 0 1 1 0 2 -25 2 0 1 1 1 2 -26 2 0 1 1 1 2 -27 2 0 1 2 0 2 -28 1 1 1 1 1 2 -29 2 0 1 1 1 2 -30 1 1 1 1 1 2 -31 4 0 1 2 -1 2 -32 2 0 1 2 0 2 -33 3 1 1 2 -1 2 -34 3 1 1 2 -1 2 -35 2 0 1 1 1 2 -36 3 0 0 1 -1 2 -37 4 1 0 1 -1 2 -38 4 1 0 1 -1 2 -39 4 0 1 1 0 2 -40 1 1 1 1 1 2 -41 4 0 1 1 0 2 -42 3 1 1 1 0 2 -43 3 0 0 1 -1 2 -44 1 1 1 2 0 2 -45 3 1 1 1 0 2 -46 4 0 1 1 0 2 -47 4 0 1 1 0 2 -48 2 0 1 1 1 2 -49 2 0 1 1 1 2 -50 2 0 1 1 1 2 -51 1 1 1 1 1 2 -52 3 1 1 1 0 2 -53 3 1 1 1 0 2 -54 4 1 0 1 -1 2 -55 1 1 1 1 1 2 -56 1 1 1 1 1 2 -57 2 0 1 1 1 2 -58 1 1 1 1 1 2 -59 1 1 1 2 0 2 -60 3 1 1 1 0 2 -61 2 0 1 1 1 2 -62 1 1 1 1 1 2 -63 3 1 1 2 -1 2 -64 3 1 1 1 0 2 -65 1 1 1 2 0 2 -66 2 0 1 2 0 2 -67 2 0 1 2 0 2 -68 4 0 1 1 0 2 -69 3 1 1 1 0 2 -70 2 0 1 1 1 2 -71 4 0 1 1 0 2 -72 4 0 1 1 0 2 -73 4 0 1 2 -1 2 -74 1 1 1 1 1 2 -75 4 1 0 1 -1 2 -76 4 0 1 1 0 2 -77 3 1 1 1 0 2 -78 4 0 1 2 -1 2 -79 3 0 0 1 -1 2 -80 4 0 1 1 0 2 -81 3 1 1 2 -1 2 -82 3 1 1 1 0 2 -83 3 1 1 2 -1 2 -84 3 1 1 1 0 2 -85 3 1 1 1 0 2 -86 3 1 1 1 0 2 -87 2 0 1 1 1 2 -88 4 0 1 1 0 2 -89 4 0 1 1 0 2 -90 2 0 1 2 0 2 -91 4 1 0 1 -1 2 -92 1 1 1 2 0 2 -93 4 0 1 2 -1 2 -94 2 0 1 2 0 2 -95 2 0 1 1 1 2 -96 3 1 1 2 -1 2 -97 2 0 1 1 1 2 -98 1 1 1 1 1 2 -99 1 1 1 1 1 2 -100 1 1 1 2 0 2 -101 1 1 1 1 1 2 -102 1 1 1 2 0 2 -103 1 1 1 1 1 2 -104 4 1 0 2 0 2 -105 4 0 1 1 0 2 -106 2 0 1 2 0 2 -107 3 1 1 1 0 2 -108 3 1 1 2 -1 2 -109 3 1 1 1 0 2 -110 1 1 1 2 0 2 -111 3 1 1 1 0 2 -112 2 0 1 1 1 2 -113 3 1 1 2 -1 2 -114 1 1 1 1 1 2 -115 3 1 1 2 -1 2 -116 3 0 0 2 0 2 -117 4 1 0 1 -1 2 -118 2 0 1 1 1 2 -119 2 0 1 2 0 2 -120 4 0 1 1 0 2 -121 1 1 1 1 1 2 -122 2 0 1 1 1 2 -123 4 0 1 2 -1 2 -124 3 0 0 1 -1 2 -125 3 1 1 1 0 2 -126 4 0 1 1 0 2 -127 2 0 1 2 0 2 -128 3 1 1 1 0 2 -129 4 1 0 1 -1 2 -130 4 0 1 1 0 2 -131 2 0 1 1 1 2 -132 2 0 1 2 0 2 -133 3 1 1 1 0 2 -134 3 0 0 1 -1 2 -135 1 1 1 1 1 2 -136 4 0 1 2 -1 2 -137 2 0 1 1 1 2 -138 4 0 1 1 0 2 -139 4 0 1 1 0 2 -140 1 1 1 1 1 2 -141 3 1 1 2 -1 2 -142 2 0 1 1 1 2 -143 2 1 0 2 1 2 -144 4 0 1 1 0 2 -145 2 0 1 1 1 2 -146 4 0 1 2 -1 2 -147 2 0 1 2 0 2 -148 2 0 1 2 0 2 -149 1 1 1 1 1 2 -150 3 1 1 2 -1 2 -151 2 0 1 1 1 2 -152 4 1 0 2 0 2 -153 4 1 0 2 0 2 -154 3 1 1 1 0 2 -155 3 1 1 1 0 2 -156 2 0 1 1 1 2 -157 1 1 1 1 1 2 -158 2 0 1 2 0 2 -159 1 1 1 2 0 2 -160 1 1 1 1 1 2 -161 1 1 1 1 1 2 -162 1 1 1 1 1 2 -163 3 1 1 2 -1 2 -164 3 0 0 1 -1 2 -165 1 1 1 2 0 2 -166 3 1 1 2 -1 2 -167 2 0 1 1 1 2 -168 4 1 0 1 -1 2 -169 2 0 1 1 1 2 -170 1 1 1 1 1 2 -171 3 1 1 1 0 2 -172 1 1 1 1 1 2 -173 4 1 0 1 -1 2 -174 1 1 1 1 1 2 -175 3 1 1 2 -1 2 -176 1 1 1 1 1 2 -177 4 0 1 1 0 2 -178 2 0 1 1 1 2 -179 3 1 1 2 -1 2 -180 2 0 1 1 1 2 -181 3 1 1 2 -1 2 -182 1 1 1 1 1 2 -183 3 0 0 1 -1 2 -184 4 0 1 1 0 2 -185 3 1 1 2 -1 2 -186 2 0 1 1 1 2 -187 2 0 1 1 1 2 -188 1 1 1 1 1 2 -189 1 1 1 1 1 2 -190 1 1 1 2 0 2 -191 2 0 1 1 1 2 -192 2 0 1 1 1 2 -193 3 1 1 2 -1 2 -194 1 1 1 1 1 2 -195 2 0 1 1 1 2 -196 2 0 1 1 1 2 -197 1 1 1 1 1 2 -198 3 0 0 1 -1 2 -199 1 1 1 1 1 2 -200 4 0 1 1 0 2 -201 2 0 1 1 1 2 -202 3 0 0 1 -1 2 -203 4 0 1 2 -1 2 -204 1 1 1 1 1 2 -205 1 1 1 1 1 2 -206 2 0 1 1 1 2 -207 3 1 1 1 0 2 -208 1 1 1 1 1 2 -209 2 0 1 1 1 2 -210 1 1 1 1 1 2 -211 4 1 0 2 0 2 -212 4 0 1 1 0 2 -213 4 1 0 1 -1 2 -214 1 1 1 1 1 2 -215 3 1 1 1 0 2 -216 2 0 1 1 1 2 -217 1 1 1 1 1 2 -218 2 0 1 1 1 2 -219 4 0 1 1 0 2 -220 4 0 1 2 -1 2 -221 4 1 0 1 -1 2 -222 4 1 0 1 -1 2 -223 1 1 1 2 0 2 -224 2 0 1 1 1 2 -225 1 1 1 1 1 2 -226 1 1 1 2 0 2 -227 1 1 1 1 1 2 -228 2 1 0 1 0 2 -229 2 0 1 1 1 2 -230 2 0 1 1 1 2 -231 2 0 1 1 1 2 -232 4 1 0 1 -1 2 -233 3 1 1 1 0 2 -234 3 1 1 1 0 2 -235 4 0 1 2 -1 2 -236 1 1 1 1 1 2 -237 4 0 1 1 0 2 -238 2 0 1 2 0 2 -239 3 1 1 1 0 2 -240 2 0 1 1 1 2 -1 3 0 0 1 -1 3 -2 2 0 1 1 1 3 -3 1 0 0 1 0 3 -4 3 1 1 1 0 3 -5 2 0 1 1 1 3 -6 1 0 0 1 0 3 -7 1 1 1 1 1 3 -8 1 1 1 1 1 3 -9 1 1 1 2 0 3 -10 1 1 1 1 1 3 -11 1 0 0 2 1 3 -12 4 1 0 1 -1 3 -13 1 1 1 1 1 3 -14 4 0 1 1 0 3 -15 2 0 1 2 0 3 -16 3 1 1 1 0 3 -17 2 0 1 1 1 3 -18 3 1 1 2 -1 3 -19 3 1 1 2 -1 3 -20 1 1 1 1 1 3 -21 1 1 1 1 1 3 -22 1 1 1 1 1 3 -23 3 0 0 2 0 3 -24 3 0 0 2 0 3 -25 1 1 1 1 1 3 -26 4 0 1 1 0 3 -27 4 0 1 1 0 3 -28 1 1 1 1 1 3 -29 3 1 1 1 0 3 -30 4 0 1 1 0 3 -31 2 0 1 1 1 3 -32 3 0 0 1 -1 3 -33 2 0 1 1 1 3 -34 4 0 1 1 0 3 -35 4 1 0 1 -1 3 -36 3 0 0 2 0 3 -37 1 1 1 1 1 3 -38 1 1 1 1 1 3 -39 1 0 0 1 0 3 -40 3 0 0 1 -1 3 -41 4 1 0 1 -1 3 -42 1 1 1 1 1 3 -43 3 0 0 1 -1 3 -44 2 0 1 1 1 3 -45 1 1 1 1 1 3 -46 2 1 0 2 1 3 -47 2 0 1 1 1 3 -48 4 0 1 2 -1 3 -49 2 0 1 1 1 3 -50 3 1 1 2 -1 3 -51 1 1 1 2 0 3 -52 4 0 1 2 -1 3 -53 1 1 1 1 1 3 -54 4 1 0 1 -1 3 -55 2 0 1 1 1 3 -56 3 1 1 1 0 3 -57 3 1 1 1 0 3 -58 2 0 1 1 1 3 -59 3 0 0 2 0 3 -60 4 0 1 1 0 3 -61 1 1 1 1 1 3 -62 2 0 1 1 1 3 -63 3 1 1 1 0 3 -64 2 1 0 1 0 3 -65 2 0 1 1 1 3 -66 4 0 1 2 -1 3 -67 2 0 1 1 1 3 -68 2 0 1 1 1 3 -69 4 1 0 1 -1 3 -70 4 0 1 1 0 3 -71 2 0 1 2 0 3 -72 1 1 1 1 1 3 -73 4 0 1 1 0 3 -74 3 0 0 2 0 3 -75 3 1 1 1 0 3 -76 2 0 1 1 1 3 -77 3 1 1 1 0 3 -78 4 1 0 1 -1 3 -79 3 1 1 2 -1 3 -80 4 0 1 2 -1 3 -81 3 1 1 1 0 3 -82 1 1 1 1 1 3 -83 2 0 1 1 1 3 -84 3 0 0 1 -1 3 -85 2 0 1 1 1 3 -86 3 0 0 1 -1 3 -87 2 0 1 1 1 3 -88 2 0 1 1 1 3 -89 3 1 1 1 0 3 -90 4 0 1 2 -1 3 -91 4 1 0 1 -1 3 -92 2 0 1 1 1 3 -93 4 0 1 1 0 3 -94 1 1 1 1 1 3 -95 2 0 1 2 0 3 -96 1 1 1 1 1 3 -97 2 0 1 2 0 3 -98 4 0 1 1 0 3 -99 4 0 1 1 0 3 -100 4 0 1 2 -1 3 -101 3 1 1 1 0 3 -102 2 0 1 1 1 3 -103 2 0 1 1 1 3 -104 4 1 0 2 0 3 -105 2 0 1 1 1 3 -106 1 1 1 2 0 3 -107 1 1 1 1 1 3 -108 2 0 1 1 1 3 -109 2 0 1 2 0 3 -110 2 0 1 2 0 3 -111 2 0 1 2 0 3 -112 1 1 1 1 1 3 -113 2 0 1 1 1 3 -114 4 0 1 1 0 3 -115 1 1 1 1 1 3 -116 2 0 1 1 1 3 -117 4 0 1 2 -1 3 -118 3 0 0 1 -1 3 -119 3 1 1 2 -1 3 -120 1 1 1 1 1 3 -121 4 0 1 1 0 3 -122 1 1 1 1 1 3 -123 2 0 1 2 0 3 -124 1 1 1 1 1 3 -125 4 0 1 2 -1 3 -126 3 1 1 1 0 3 -127 2 0 1 2 0 3 -128 3 1 1 1 0 3 -129 4 0 1 1 0 3 -130 3 1 1 2 -1 3 -131 2 0 1 1 1 3 -132 3 1 1 1 0 3 -133 2 0 1 1 1 3 -134 2 0 1 2 0 3 -135 3 1 1 1 0 3 -136 3 1 1 2 -1 3 -137 1 1 1 1 1 3 -138 2 0 1 1 1 3 -139 1 1 1 2 0 3 -140 4 0 1 2 -1 3 -141 2 0 1 1 1 3 -142 1 1 1 2 0 3 -143 3 1 1 2 -1 3 -144 3 1 1 1 0 3 -145 2 0 1 1 1 3 -146 3 1 1 1 0 3 -147 2 1 0 1 0 3 -148 4 0 1 1 0 3 -149 1 1 1 1 1 3 -150 1 1 1 2 0 3 -151 1 1 1 1 1 3 -152 2 0 1 1 1 3 -153 3 0 0 1 -1 3 -154 1 1 1 1 1 3 -155 4 1 0 2 0 3 -156 1 1 1 2 0 3 -157 4 1 0 1 -1 3 -158 3 1 1 1 0 3 -159 1 1 1 1 1 3 -160 4 0 1 1 0 3 -161 1 1 1 1 1 3 -162 4 1 0 1 -1 3 -163 1 1 1 2 0 3 -164 4 0 1 1 0 3 -165 4 0 1 1 0 3 -166 1 1 1 2 0 3 -167 3 1 1 1 0 3 -168 2 0 1 1 1 3 -169 4 0 1 1 0 3 -170 2 0 1 1 1 3 -171 4 1 0 1 -1 3 -172 3 0 0 1 -1 3 -173 4 0 1 2 -1 3 -174 2 1 0 1 0 3 -175 2 0 1 1 1 3 -176 1 1 1 1 1 3 -177 4 0 1 1 0 3 -178 3 1 1 1 0 3 -179 3 1 1 1 0 3 -180 2 0 1 1 1 3 -181 1 1 1 1 1 3 -182 4 0 1 1 0 3 -183 3 0 0 1 -1 3 -184 3 1 1 1 0 3 -185 4 0 1 1 0 3 -186 4 0 1 1 0 3 -187 1 1 1 1 1 3 -188 4 0 1 1 0 3 -189 3 1 1 2 -1 3 -190 2 0 1 1 1 3 -191 1 1 1 1 1 3 -192 3 1 1 1 0 3 -193 4 0 1 1 0 3 -194 3 1 1 1 0 3 -195 2 0 1 1 1 3 -196 2 0 1 2 0 3 -197 2 0 1 1 1 3 -198 1 1 1 1 1 3 -199 4 0 1 2 -1 3 -200 4 1 0 1 -1 3 -201 2 0 1 2 0 3 -202 3 1 1 1 0 3 -203 3 1 1 1 0 3 -204 1 1 1 1 1 3 -205 4 0 1 1 0 3 -206 1 1 1 1 1 3 -207 3 0 0 1 -1 3 -208 3 1 1 2 -1 3 -209 3 1 1 1 0 3 -210 1 1 1 1 1 3 -211 4 0 1 1 0 3 -212 4 0 1 2 -1 3 -213 4 0 1 1 0 3 -214 1 1 1 1 1 3 -215 1 1 1 1 1 3 -216 4 0 1 2 -1 3 -217 2 0 1 1 1 3 -218 3 1 1 1 0 3 -219 4 0 1 1 0 3 -220 3 1 1 2 -1 3 -221 1 1 1 2 0 3 -222 1 1 1 1 1 3 -223 3 1 1 1 0 3 -224 1 1 1 1 1 3 -225 2 0 1 1 1 3 -226 1 1 1 2 0 3 -227 3 0 0 1 -1 3 -228 2 0 1 1 1 3 -229 3 0 0 1 -1 3 -230 4 0 1 1 0 3 -231 3 1 1 2 -1 3 -232 4 0 1 1 0 3 -233 4 0 1 1 0 3 -234 1 1 1 1 1 3 -235 4 0 1 1 0 3 -236 4 0 1 1 0 3 -237 2 0 1 1 1 3 -238 2 0 1 1 1 3 -239 3 1 1 2 -1 3 -240 1 1 1 2 0 3 -1 3 1 1 1 0 4 -2 3 0 0 1 -1 4 -3 2 1 0 2 1 4 -4 4 0 1 1 0 4 -5 1 0 0 1 0 4 -6 4 1 0 1 -1 4 -7 2 1 0 1 0 4 -8 2 0 1 1 1 4 -9 3 1 1 1 0 4 -10 4 1 0 1 -1 4 -11 2 0 1 1 1 4 -12 4 0 1 2 -1 4 -13 1 0 0 1 0 4 -14 4 0 1 1 0 4 -15 4 1 0 1 -1 4 -16 3 1 1 1 0 4 -17 1 1 1 1 1 4 -18 3 1 1 1 0 4 -19 2 0 1 2 0 4 -20 2 0 1 1 1 4 -21 2 1 0 1 0 4 -22 3 1 1 2 -1 4 -23 3 0 0 1 -1 4 -24 4 0 1 1 0 4 -25 1 1 1 1 1 4 -26 3 1 1 1 0 4 -27 2 0 1 1 1 4 -28 3 1 1 2 -1 4 -29 4 0 1 1 0 4 -30 4 0 1 1 0 4 -31 3 1 1 1 0 4 -32 1 1 1 2 0 4 -33 3 1 1 1 0 4 -34 2 0 1 1 1 4 -35 4 1 0 1 -1 4 -36 3 0 0 2 0 4 -37 3 0 0 1 -1 4 -38 2 0 1 1 1 4 -39 4 0 1 1 0 4 -40 2 0 1 2 0 4 -41 1 1 1 1 1 4 -42 4 0 1 1 0 4 -43 3 1 1 1 0 4 -44 2 0 1 2 0 4 -45 1 1 1 1 1 4 -46 3 1 1 1 0 4 -47 3 1 1 1 0 4 -48 2 1 0 1 0 4 -49 1 1 1 1 1 4 -50 1 1 1 1 1 4 -51 1 1 1 1 1 4 -52 2 0 1 2 0 4 -53 3 1 1 1 0 4 -54 2 0 1 1 1 4 -55 1 1 1 1 1 4 -56 1 1 1 1 1 4 -57 3 0 0 1 -1 4 -58 4 0 1 2 -1 4 -59 2 0 1 1 1 4 -60 1 1 1 2 0 4 -61 1 1 1 1 1 4 -62 2 0 1 1 1 4 -63 1 1 1 2 0 4 -64 4 0 1 1 0 4 -65 2 0 1 1 1 4 -66 1 1 1 1 1 4 -67 2 0 1 1 1 4 -68 3 1 1 2 -1 4 -69 2 0 1 1 1 4 -70 4 0 1 1 0 4 -71 4 0 1 2 -1 4 -72 1 1 1 2 0 4 -73 2 0 1 1 1 4 -74 2 0 1 1 1 4 -75 4 0 1 1 0 4 -76 4 0 1 1 0 4 -77 1 1 1 1 1 4 -78 1 1 1 1 1 4 -79 2 0 1 1 1 4 -80 4 1 0 1 -1 4 -81 4 0 1 1 0 4 -82 1 1 1 1 1 4 -83 2 0 1 1 1 4 -84 1 1 1 1 1 4 -85 1 1 1 2 0 4 -86 4 0 1 1 0 4 -87 4 0 1 1 0 4 -88 1 1 1 1 1 4 -89 2 0 1 1 1 4 -90 3 0 0 1 -1 4 -91 3 1 1 1 0 4 -92 1 1 1 1 1 4 -93 3 1 1 2 -1 4 -94 4 0 1 1 0 4 -95 2 0 1 1 1 4 -96 3 1 1 1 0 4 -97 2 0 1 1 1 4 -98 1 1 1 2 0 4 -99 3 1 1 1 0 4 -100 3 1 1 1 0 4 -101 2 0 1 1 1 4 -102 4 0 1 1 0 4 -103 2 0 1 1 1 4 -104 3 1 1 1 0 4 -105 4 0 1 1 0 4 -106 3 1 1 1 0 4 -107 1 1 1 1 1 4 -108 3 1 1 1 0 4 -109 2 0 1 1 1 4 -110 2 0 1 2 0 4 -111 3 1 1 2 -1 4 -112 1 1 1 1 1 4 -113 4 0 1 1 0 4 -114 2 0 1 1 1 4 -115 3 0 0 1 -1 4 -116 1 1 1 1 1 4 -117 3 1 1 1 0 4 -118 1 1 1 1 1 4 -119 2 0 1 1 1 4 -120 3 1 1 1 0 4 -121 3 0 0 1 -1 4 -122 4 0 1 2 -1 4 -123 2 0 1 1 1 4 -124 2 0 1 1 1 4 -125 4 0 1 1 0 4 -126 4 1 0 1 -1 4 -127 1 1 1 1 1 4 -128 1 1 1 1 1 4 -129 2 0 1 1 1 4 -130 1 1 1 1 1 4 -131 4 0 1 1 0 4 -132 3 1 1 1 0 4 -133 4 0 1 1 0 4 -134 1 1 1 1 1 4 -135 4 0 1 1 0 4 -136 2 0 1 2 0 4 -137 4 0 1 1 0 4 -138 1 1 1 1 1 4 -139 2 0 1 1 1 4 -140 1 1 1 1 1 4 -141 2 0 1 1 1 4 -142 3 1 1 1 0 4 -143 2 0 1 1 1 4 -144 4 0 1 1 0 4 -145 2 0 1 1 1 4 -146 1 1 1 2 0 4 -147 3 1 1 1 0 4 -148 2 0 1 1 1 4 -149 2 0 1 1 1 4 -150 1 1 1 1 1 4 -151 3 1 1 2 -1 4 -152 3 1 1 2 -1 4 -153 1 1 1 1 1 4 -154 1 1 1 1 1 4 -155 3 1 1 1 0 4 -156 3 1 1 1 0 4 -157 2 0 1 1 1 4 -158 1 1 1 1 1 4 -159 4 0 1 1 0 4 -160 4 0 1 1 0 4 -161 3 1 1 1 0 4 -162 3 1 1 1 0 4 -163 4 0 1 1 0 4 -164 2 0 1 1 1 4 -165 4 0 1 1 0 4 -166 4 0 1 1 0 4 -167 3 1 1 1 0 4 -168 1 1 1 1 1 4 -169 4 0 1 1 0 4 -170 2 0 1 1 1 4 -171 1 1 1 2 0 4 -172 4 0 1 1 0 4 -173 1 1 1 1 1 4 -174 4 0 1 2 -1 4 -175 3 1 1 2 -1 4 -176 4 0 1 1 0 4 -177 4 0 1 1 0 4 -178 4 0 1 1 0 4 -179 2 0 1 1 1 4 -180 3 1 1 2 -1 4 -181 2 0 1 1 1 4 -182 1 1 1 1 1 4 -183 1 1 1 1 1 4 -184 2 0 1 1 1 4 -185 3 1 1 2 -1 4 -186 4 0 1 2 -1 4 -187 2 0 1 2 0 4 -188 1 1 1 1 1 4 -189 2 0 1 1 1 4 -190 2 0 1 1 1 4 -191 4 0 1 1 0 4 -192 1 1 1 1 1 4 -193 2 0 1 1 1 4 -194 2 0 1 1 1 4 -195 2 0 1 1 1 4 -196 4 0 1 2 -1 4 -197 3 0 0 2 0 4 -198 1 1 1 1 1 4 -199 3 1 1 1 0 4 -200 3 1 1 1 0 4 -201 2 0 1 1 1 4 -202 3 1 1 1 0 4 -203 3 1 1 1 0 4 -204 2 0 1 1 1 4 -205 1 1 1 1 1 4 -206 1 1 1 2 0 4 -207 4 0 1 1 0 4 -208 4 0 1 2 -1 4 -209 4 1 0 1 -1 4 -210 1 1 1 1 1 4 -211 3 1 1 2 -1 4 -212 4 1 0 1 -1 4 -213 3 0 0 1 -1 4 -214 1 1 1 2 0 4 -215 3 1 1 1 0 4 -216 1 1 1 1 1 4 -217 2 0 1 1 1 4 -218 1 1 1 2 0 4 -219 4 1 0 1 -1 4 -220 3 1 1 1 0 4 -221 3 1 1 1 0 4 -222 3 1 1 1 0 4 -223 4 0 1 1 0 4 -224 3 1 1 1 0 4 -225 4 1 0 1 -1 4 -226 3 1 1 2 -1 4 -227 4 0 1 2 -1 4 -228 4 1 0 1 -1 4 -229 2 0 1 1 1 4 -230 1 1 1 1 1 4 -231 4 0 1 1 0 4 -232 1 1 1 1 1 4 -233 4 0 1 1 0 4 -234 1 1 1 1 1 4 -235 2 0 1 1 1 4 -236 3 1 1 2 -1 4 -237 4 0 1 1 0 4 -238 1 1 1 1 1 4 -239 1 1 1 1 1 4 -240 1 1 1 1 1 4 -1 2 0 1 1 1 5 -2 1 1 1 1 1 5 -3 4 1 0 1 -1 5 -4 1 1 1 1 1 5 -5 4 1 0 2 0 5 -6 1 1 1 1 1 5 -7 4 0 1 2 -1 5 -8 3 0 0 1 -1 5 -9 3 1 1 1 0 5 -10 4 1 0 2 0 5 -11 1 1 1 2 0 5 -12 1 1 1 1 1 5 -13 4 1 0 1 -1 5 -14 2 0 1 1 1 5 -15 2 0 1 1 1 5 -16 3 1 1 1 0 5 -17 2 0 1 1 1 5 -18 4 1 0 1 -1 5 -19 2 0 1 1 1 5 -20 1 1 1 1 1 5 -21 1 1 1 1 1 5 -22 2 0 1 1 1 5 -23 1 1 1 1 1 5 -24 3 1 1 1 0 5 -25 4 0 1 1 0 5 -26 3 1 1 1 0 5 -27 4 0 1 2 -1 5 -28 4 0 1 1 0 5 -29 1 1 1 1 1 5 -30 4 0 1 1 0 5 -31 2 0 1 1 1 5 -32 3 1 1 2 -1 5 -33 3 1 1 1 0 5 -34 4 0 1 1 0 5 -35 2 0 1 1 1 5 -36 4 0 1 1 0 5 -37 3 0 0 1 -1 5 -38 1 1 1 1 1 5 -39 3 0 0 1 -1 5 -40 3 1 1 1 0 5 -41 1 1 1 2 0 5 -42 4 0 1 1 0 5 -43 4 0 1 1 0 5 -44 1 1 1 1 1 5 -45 3 1 1 1 0 5 -46 2 0 1 2 0 5 -47 4 0 1 1 0 5 -48 4 0 1 1 0 5 -49 4 0 1 2 -1 5 -50 3 1 1 1 0 5 -51 2 0 1 1 1 5 -52 1 1 1 2 0 5 -53 4 0 1 1 0 5 -54 4 0 1 1 0 5 -55 1 1 1 1 1 5 -56 3 1 1 1 0 5 -57 2 0 1 1 1 5 -58 1 1 1 1 1 5 -59 2 0 1 1 1 5 -60 3 1 1 2 -1 5 -61 1 1 1 1 1 5 -62 1 1 1 2 0 5 -63 3 0 0 1 -1 5 -64 2 0 1 1 1 5 -65 4 1 0 2 0 5 -66 3 0 0 1 -1 5 -67 4 1 0 1 -1 5 -68 2 0 1 1 1 5 -69 1 1 1 2 0 5 -70 1 1 1 1 1 5 -71 4 0 1 1 0 5 -72 3 0 0 1 -1 5 -73 2 0 1 2 0 5 -74 3 1 1 1 0 5 -75 4 0 1 1 0 5 -76 4 0 1 2 -1 5 -77 1 1 1 2 0 5 -78 3 1 1 1 0 5 -79 2 0 1 1 1 5 -80 1 1 1 1 1 5 -81 4 0 1 2 -1 5 -82 1 1 1 1 1 5 -83 4 1 0 1 -1 5 -84 2 0 1 1 1 5 -85 1 1 1 1 1 5 -86 1 1 1 1 1 5 -87 2 0 1 2 0 5 -88 3 1 1 2 -1 5 -89 3 0 0 1 -1 5 -90 4 0 1 1 0 5 -91 2 0 1 1 1 5 -92 3 1 1 1 0 5 -93 2 0 1 1 1 5 -94 1 1 1 1 1 5 -95 2 0 1 1 1 5 -96 1 1 1 2 0 5 -97 3 1 1 1 0 5 -98 3 0 0 1 -1 5 -99 4 0 1 1 0 5 -100 1 1 1 2 0 5 -101 4 0 1 1 0 5 -102 1 1 1 1 1 5 -103 4 0 1 1 0 5 -104 1 1 1 1 1 5 -105 1 1 1 1 1 5 -106 4 1 0 1 -1 5 -107 2 0 1 1 1 5 -108 1 1 1 1 1 5 -109 1 1 1 1 1 5 -110 3 1 1 2 -1 5 -111 2 0 1 1 1 5 -112 3 1 1 2 -1 5 -113 1 1 1 1 1 5 -114 1 1 1 1 1 5 -115 2 0 1 1 1 5 -116 1 1 1 1 1 5 -117 4 0 1 1 0 5 -118 4 0 1 1 0 5 -119 4 0 1 1 0 5 -120 1 1 1 1 1 5 -121 4 0 1 2 -1 5 -122 2 0 1 1 1 5 -123 1 1 1 1 1 5 -124 2 0 1 1 1 5 -125 3 1 1 2 -1 5 -126 4 0 1 1 0 5 -127 2 0 1 1 1 5 -128 3 1 1 1 0 5 -129 3 1 1 1 0 5 -130 3 1 1 1 0 5 -131 3 1 1 1 0 5 -132 3 1 1 2 -1 5 -133 1 1 1 1 1 5 -134 2 0 1 1 1 5 -135 4 0 1 1 0 5 -136 1 1 1 2 0 5 -137 2 0 1 1 1 5 -138 2 0 1 1 1 5 -139 2 0 1 1 1 5 -140 2 0 1 2 0 5 -141 2 0 1 1 1 5 -142 2 0 1 1 1 5 -143 3 1 1 1 0 5 -144 1 1 1 2 0 5 -145 1 1 1 1 1 5 -146 1 1 1 2 0 5 -147 3 1 1 1 0 5 -148 2 0 1 1 1 5 -149 1 1 1 2 0 5 -150 2 0 1 1 1 5 -151 4 0 1 2 -1 5 -152 4 0 1 2 -1 5 -153 1 1 1 1 1 5 -154 2 0 1 2 0 5 -155 2 0 1 1 1 5 -156 4 0 1 1 0 5 -157 1 1 1 1 1 5 -158 4 0 1 1 0 5 -159 1 1 1 1 1 5 -160 2 0 1 1 1 5 -161 3 1 1 1 0 5 -162 2 0 1 1 1 5 -163 4 0 1 1 0 5 -164 1 1 1 1 1 5 -165 2 0 1 1 1 5 -166 3 1 1 2 -1 5 -167 3 1 1 2 -1 5 -168 3 1 1 1 0 5 -169 4 0 1 1 0 5 -170 2 0 1 1 1 5 -171 2 0 1 2 0 5 -172 4 1 0 2 0 5 -173 3 1 1 1 0 5 -174 4 1 0 2 0 5 -175 2 0 1 1 1 5 -176 1 1 1 1 1 5 -177 2 0 1 1 1 5 -178 3 1 1 1 0 5 -179 2 0 1 1 1 5 -180 1 0 0 2 1 5 -181 1 1 1 1 1 5 -182 4 0 1 1 0 5 -183 1 1 1 1 1 5 -184 1 1 1 1 1 5 -185 1 1 1 1 1 5 -186 3 1 1 1 0 5 -187 3 1 1 1 0 5 -188 3 1 1 2 -1 5 -189 4 0 1 1 0 5 -190 4 0 1 1 0 5 -191 4 0 1 2 -1 5 -192 2 0 1 2 0 5 -193 2 0 1 1 1 5 -194 1 1 1 1 1 5 -195 2 0 1 1 1 5 -196 3 1 1 1 0 5 -197 3 1 1 1 0 5 -198 2 0 1 1 1 5 -199 2 0 1 1 1 5 -200 3 1 1 1 0 5 -201 4 0 1 1 0 5 -202 3 1 1 2 -1 5 -203 2 0 1 1 1 5 -204 2 0 1 1 1 5 -205 3 1 1 1 0 5 -206 2 0 1 1 1 5 -207 2 0 1 2 0 5 -208 3 1 1 1 0 5 -209 2 0 1 1 1 5 -210 1 1 1 1 1 5 -211 3 1 1 1 0 5 -212 4 1 0 1 -1 5 -213 4 1 0 2 0 5 -214 4 0 1 1 0 5 -215 1 1 1 1 1 5 -216 3 0 0 2 0 5 -217 1 1 1 2 0 5 -218 2 0 1 1 1 5 -219 4 0 1 1 0 5 -220 3 0 0 2 0 5 -221 3 0 0 1 -1 5 -222 4 0 1 1 0 5 -223 3 1 1 1 0 5 -224 4 0 1 1 0 5 -225 3 1 1 1 0 5 -226 3 1 1 1 0 5 -227 1 1 1 1 1 5 -228 4 0 1 1 0 5 -229 1 1 1 2 0 5 -230 2 0 1 2 0 5 -231 3 1 1 1 0 5 -232 2 0 1 1 1 5 -233 3 1 1 1 0 5 -234 4 0 1 1 0 5 -235 1 1 1 2 0 5 -236 2 0 1 1 1 5 -237 3 1 1 1 0 5 -238 3 1 1 1 0 5 -239 4 1 0 2 0 5 -240 4 1 0 1 -1 5 -1 3 1 1 2 -1 6 -2 1 0 0 2 1 6 -3 2 1 0 2 1 6 -4 1 0 0 1 0 6 -5 4 1 0 1 -1 6 -6 4 0 1 2 -1 6 -7 2 1 0 2 1 6 -8 4 1 0 1 -1 6 -9 1 0 0 1 0 6 -10 2 1 0 2 1 6 -11 2 1 0 2 1 6 -12 2 1 0 1 0 6 -13 2 1 0 1 0 6 -14 4 0 1 2 -1 6 -15 3 0 0 1 -1 6 -16 3 0 0 1 -1 6 -17 4 1 0 1 -1 6 -18 1 1 1 1 1 6 -19 1 1 1 1 1 6 -20 3 1 1 1 0 6 -21 4 0 1 1 0 6 -22 3 1 1 1 0 6 -23 4 0 1 1 0 6 -24 1 1 1 1 1 6 -25 3 1 1 1 0 6 -26 1 1 1 2 0 6 -27 1 1 1 1 1 6 -28 1 1 1 2 0 6 -29 1 1 1 1 1 6 -30 1 1 1 1 1 6 -31 4 0 1 1 0 6 -32 1 1 1 1 1 6 -33 2 1 0 1 0 6 -34 3 1 1 1 0 6 -35 4 0 1 2 -1 6 -36 3 1 1 2 -1 6 -37 4 1 0 2 0 6 -38 4 1 0 2 0 6 -39 3 1 1 2 -1 6 -40 4 1 0 2 0 6 -41 2 1 0 1 0 6 -42 1 1 1 1 1 6 -43 3 1 1 2 -1 6 -44 1 1 1 1 1 6 -45 4 1 0 1 -1 6 -46 2 1 0 2 1 6 -47 3 0 0 1 -1 6 -48 2 1 0 2 1 6 -49 1 1 1 2 0 6 -50 1 1 1 2 0 6 -51 4 0 1 1 0 6 -52 3 1 1 1 0 6 -53 1 1 1 1 1 6 -54 2 1 0 1 0 6 -55 2 1 0 2 1 6 -56 2 1 0 1 0 6 -57 1 1 1 1 1 6 -58 1 1 1 1 1 6 -59 3 1 1 1 0 6 -60 2 1 0 1 0 6 -61 4 1 0 2 0 6 -62 2 1 0 1 0 6 -63 3 1 1 2 -1 6 -64 3 0 0 2 0 6 -65 2 1 0 1 0 6 -66 3 1 1 1 0 6 -67 4 1 0 1 -1 6 -68 4 0 1 1 0 6 -69 4 0 1 1 0 6 -70 1 1 1 2 0 6 -71 2 1 0 1 0 6 -72 4 0 1 1 0 6 -73 3 1 1 1 0 6 -74 1 1 1 2 0 6 -75 4 1 0 1 -1 6 -76 1 1 1 2 0 6 -77 3 1 1 1 0 6 -78 2 1 0 1 0 6 -79 4 0 1 1 0 6 -80 4 1 0 2 0 6 -81 2 1 0 1 0 6 -82 1 1 1 1 1 6 -83 4 0 1 2 -1 6 -84 2 1 0 2 1 6 -85 2 1 0 1 0 6 -86 4 1 0 1 -1 6 -87 3 1 1 1 0 6 -88 4 0 1 1 0 6 -89 2 1 0 2 1 6 -90 1 1 1 1 1 6 -91 1 1 1 1 1 6 -92 3 1 1 1 0 6 -93 1 1 1 1 1 6 -94 1 1 1 1 1 6 -95 4 0 1 1 0 6 -96 3 1 1 1 0 6 -97 4 0 1 1 0 6 -98 4 0 1 2 -1 6 -99 2 1 0 1 0 6 -100 1 1 1 1 1 6 -101 4 0 1 1 0 6 -102 4 0 1 1 0 6 -103 3 1 1 1 0 6 -104 4 0 1 1 0 6 -105 2 1 0 1 0 6 -106 3 1 1 1 0 6 -107 2 1 0 1 0 6 -108 3 1 1 1 0 6 -109 3 1 1 1 0 6 -110 4 0 1 1 0 6 -111 1 1 1 2 0 6 -112 2 1 0 1 0 6 -113 1 1 1 1 1 6 -114 4 1 0 1 -1 6 -115 1 1 1 2 0 6 -116 4 1 0 1 -1 6 -117 4 0 1 1 0 6 -118 3 1 1 1 0 6 -119 3 0 0 1 -1 6 -120 2 1 0 1 0 6 -121 4 0 1 2 -1 6 -122 3 1 1 1 0 6 -123 4 1 0 1 -1 6 -124 3 1 1 2 -1 6 -125 2 0 1 1 1 6 -126 2 1 0 1 0 6 -127 2 1 0 1 0 6 -128 1 1 1 1 1 6 -129 4 1 0 1 -1 6 -130 3 1 1 1 0 6 -131 4 0 1 1 0 6 -132 2 1 0 1 0 6 -133 2 0 1 1 1 6 -134 2 0 1 1 1 6 -135 3 1 1 1 0 6 -136 3 1 1 1 0 6 -137 2 0 1 1 1 6 -138 4 0 1 1 0 6 -139 1 1 1 2 0 6 -140 2 0 1 1 1 6 -141 2 0 1 2 0 6 -142 4 0 1 1 0 6 -143 1 1 1 1 1 6 -144 4 0 1 1 0 6 -145 4 0 1 2 -1 6 -146 1 1 1 1 1 6 -147 3 0 0 1 -1 6 -148 4 0 1 1 0 6 -149 1 0 0 2 1 6 -150 1 1 1 2 0 6 -151 4 0 1 1 0 6 -152 1 1 1 2 0 6 -153 3 1 1 1 0 6 -154 3 1 1 1 0 6 -155 2 0 1 2 0 6 -156 2 0 1 1 1 6 -157 1 1 1 2 0 6 -158 3 1 1 1 0 6 -159 3 0 0 1 -1 6 -160 3 1 1 1 0 6 -161 3 1 1 1 0 6 -162 1 0 0 1 0 6 -163 4 0 1 1 0 6 -164 3 0 0 1 -1 6 -165 3 1 1 1 0 6 -166 3 1 1 1 0 6 -167 2 0 1 2 0 6 -168 3 1 1 2 -1 6 -169 2 0 1 1 1 6 -170 2 0 1 2 0 6 -171 1 1 1 1 1 6 -172 2 0 1 1 1 6 -173 1 1 1 1 1 6 -174 1 1 1 1 1 6 -175 2 0 1 1 1 6 -176 2 0 1 1 1 6 -177 1 1 1 1 1 6 -178 2 0 1 1 1 6 -179 4 0 1 1 0 6 -180 1 1 1 1 1 6 -181 3 1 1 1 0 6 -182 3 1 1 2 -1 6 -183 3 1 1 1 0 6 -184 4 1 0 1 -1 6 -185 3 1 1 1 0 6 -186 4 0 1 1 0 6 -187 3 1 1 2 -1 6 -188 4 0 1 1 0 6 -189 1 1 1 1 1 6 -190 4 0 1 2 -1 6 -191 1 1 1 1 1 6 -192 3 1 1 1 0 6 -193 3 1 1 2 -1 6 -194 2 0 1 1 1 6 -195 1 1 1 1 1 6 -196 1 1 1 1 1 6 -197 2 0 1 2 0 6 -198 1 1 1 2 0 6 -199 2 1 0 1 0 6 -200 3 1 1 1 0 6 -201 2 0 1 1 1 6 -202 3 1 1 1 0 6 -203 1 1 1 1 1 6 -204 3 1 1 1 0 6 -205 1 1 1 2 0 6 -206 3 1 1 1 0 6 -207 2 0 1 1 1 6 -208 3 1 1 1 0 6 -209 2 0 1 1 1 6 -210 4 1 0 1 -1 6 -211 2 0 1 1 1 6 -212 2 0 1 1 1 6 -213 1 1 1 1 1 6 -214 3 1 1 1 0 6 -215 1 1 1 1 1 6 -216 3 1 1 1 0 6 -217 1 1 1 1 1 6 -218 2 0 1 1 1 6 -219 2 0 1 1 1 6 -220 1 1 1 1 1 6 -221 1 1 1 1 1 6 -222 4 0 1 2 -1 6 -223 1 1 1 1 1 6 -224 4 0 1 1 0 6 -225 4 0 1 1 0 6 -226 4 0 1 1 0 6 -227 3 1 1 1 0 6 -228 2 0 1 1 1 6 -229 2 0 1 2 0 6 -230 3 1 1 1 0 6 -231 2 0 1 1 1 6 -232 2 0 1 1 1 6 -233 4 0 1 1 0 6 -234 2 0 1 1 1 6 -235 1 1 1 2 0 6 -236 4 0 1 2 -1 6 -237 4 0 1 1 0 6 -238 4 0 1 1 0 6 -239 3 0 0 1 -1 6 -240 1 1 1 1 1 6 -1 2 0 1 1 1 7 -2 4 1 0 1 -1 7 -3 4 0 1 1 0 7 -4 3 1 1 1 0 7 -5 3 1 1 1 0 7 -6 3 0 0 1 -1 7 -7 4 0 1 1 0 7 -8 2 0 1 1 1 7 -9 3 1 1 1 0 7 -10 4 0 1 2 -1 7 -11 2 0 1 1 1 7 -12 4 0 1 1 0 7 -13 3 1 1 2 -1 7 -14 1 1 1 1 1 7 -15 1 1 1 1 1 7 -16 1 1 1 1 1 7 -17 1 1 1 1 1 7 -18 2 0 1 1 1 7 -19 1 1 1 1 1 7 -20 3 1 1 1 0 7 -21 2 0 1 1 1 7 -22 3 0 0 1 -1 7 -23 2 1 0 1 0 7 -24 4 0 1 1 0 7 -25 4 1 0 1 -1 7 -26 3 1 1 1 0 7 -27 4 1 0 1 -1 7 -28 1 1 1 1 1 7 -29 1 1 1 1 1 7 -30 3 1 1 2 -1 7 -31 4 0 1 1 0 7 -32 2 0 1 1 1 7 -33 4 0 1 1 0 7 -34 3 1 1 1 0 7 -35 3 0 0 1 -1 7 -36 3 1 1 1 0 7 -37 1 1 1 1 1 7 -38 3 1 1 1 0 7 -39 3 0 0 1 -1 7 -40 4 1 0 1 -1 7 -41 4 0 1 1 0 7 -42 1 1 1 1 1 7 -43 4 0 1 1 0 7 -44 2 0 1 1 1 7 -45 1 1 1 1 1 7 -46 2 0 1 2 0 7 -47 1 1 1 1 1 7 -48 3 1 1 1 0 7 -49 2 0 1 2 0 7 -50 3 1 1 1 0 7 -51 2 0 1 1 1 7 -52 2 0 1 2 0 7 -53 2 0 1 1 1 7 -54 2 1 0 1 0 7 -55 1 1 1 1 1 7 -56 1 1 1 1 1 7 -57 4 0 1 1 0 7 -58 2 0 1 1 1 7 -59 4 0 1 1 0 7 -60 1 1 1 1 1 7 -61 3 1 1 2 -1 7 -62 2 0 1 1 1 7 -63 3 0 0 1 -1 7 -64 4 0 1 1 0 7 -65 3 1 1 1 0 7 -66 4 0 1 1 0 7 -67 2 0 1 2 0 7 -68 4 0 1 1 0 7 -69 2 0 1 2 0 7 -70 1 1 1 1 1 7 -71 4 1 0 2 0 7 -72 2 0 1 2 0 7 -73 3 1 1 1 0 7 -74 4 0 1 1 0 7 -75 3 1 1 1 0 7 -76 1 1 1 1 1 7 -77 2 0 1 1 1 7 -78 4 0 1 1 0 7 -79 2 0 1 1 1 7 -80 4 1 0 2 0 7 -81 3 1 1 2 -1 7 -82 3 1 1 2 -1 7 -83 2 0 1 1 1 7 -84 3 1 1 2 -1 7 -85 2 0 1 2 0 7 -86 3 1 1 2 -1 7 -87 2 0 1 1 1 7 -88 2 0 1 2 0 7 -89 1 1 1 1 1 7 -90 4 0 1 1 0 7 -91 2 0 1 1 1 7 -92 1 1 1 1 1 7 -93 4 1 0 1 -1 7 -94 1 0 0 1 0 7 -95 3 1 1 2 -1 7 -96 1 1 1 1 1 7 -97 3 0 0 1 -1 7 -98 1 1 1 1 1 7 -99 4 0 1 1 0 7 -100 1 1 1 1 1 7 -101 3 1 1 1 0 7 -102 2 0 1 1 1 7 -103 1 1 1 1 1 7 -104 3 1 1 1 0 7 -105 1 1 1 1 1 7 -106 2 1 0 1 0 7 -107 3 1 1 1 0 7 -108 3 1 1 1 0 7 -109 4 0 1 1 0 7 -110 4 1 0 1 -1 7 -111 2 0 1 1 1 7 -112 4 0 1 1 0 7 -113 2 0 1 1 1 7 -114 1 1 1 1 1 7 -115 4 0 1 1 0 7 -116 1 1 1 2 0 7 -117 2 0 1 1 1 7 -118 2 0 1 1 1 7 -119 4 0 1 1 0 7 -120 3 1 1 1 0 7 -121 1 1 1 1 1 7 -122 1 1 1 1 1 7 -123 2 0 1 1 1 7 -124 1 1 1 2 0 7 -125 4 0 1 1 0 7 -126 1 1 1 1 1 7 -127 3 0 0 1 -1 7 -128 4 0 1 1 0 7 -129 3 1 1 2 -1 7 -130 2 0 1 2 0 7 -131 1 1 1 2 0 7 -132 2 1 0 1 0 7 -133 4 0 1 1 0 7 -134 1 1 1 1 1 7 -135 1 1 1 2 0 7 -136 3 1 1 1 0 7 -137 2 0 1 1 1 7 -138 3 1 1 1 0 7 -139 4 0 1 1 0 7 -140 2 0 1 2 0 7 -141 3 1 1 1 0 7 -142 4 0 1 1 0 7 -143 1 1 1 1 1 7 -144 3 1 1 2 -1 7 -145 1 1 1 2 0 7 -146 1 1 1 1 1 7 -147 2 0 1 1 1 7 -148 2 0 1 1 1 7 -149 3 1 1 1 0 7 -150 4 0 1 1 0 7 -151 4 0 1 1 0 7 -152 4 0 1 1 0 7 -153 2 0 1 1 1 7 -154 4 0 1 2 -1 7 -155 4 0 1 2 -1 7 -156 4 0 1 2 -1 7 -157 1 1 1 2 0 7 -158 3 1 1 1 0 7 -159 2 0 1 2 0 7 -160 2 0 1 1 1 7 -161 3 1 1 1 0 7 -162 1 1 1 2 0 7 -163 1 1 1 1 1 7 -164 4 1 0 1 -1 7 -165 4 1 0 1 -1 7 -166 1 1 1 1 1 7 -167 4 1 0 1 -1 7 -168 1 1 1 2 0 7 -169 4 0 1 1 0 7 -170 4 0 1 1 0 7 -171 2 0 1 1 1 7 -172 4 0 1 1 0 7 -173 2 0 1 1 1 7 -174 1 1 1 1 1 7 -175 4 0 1 1 0 7 -176 4 0 1 1 0 7 -177 2 0 1 2 0 7 -178 4 0 1 1 0 7 -179 2 0 1 1 1 7 -180 3 1 1 1 0 7 -181 1 1 1 2 0 7 -182 3 1 1 2 -1 7 -183 3 1 1 1 0 7 -184 1 1 1 1 1 7 -185 3 1 1 2 -1 7 -186 4 0 1 1 0 7 -187 1 1 1 1 1 7 -188 1 1 1 1 1 7 -189 3 0 0 1 -1 7 -190 2 0 1 1 1 7 -191 1 1 1 1 1 7 -192 1 1 1 2 0 7 -193 4 0 1 1 0 7 -194 4 0 1 2 -1 7 -195 1 1 1 2 0 7 -196 4 0 1 1 0 7 -197 2 0 1 1 1 7 -198 2 0 1 1 1 7 -199 2 0 1 1 1 7 -200 1 1 1 1 1 7 -201 4 0 1 2 -1 7 -202 2 0 1 1 1 7 -203 2 0 1 1 1 7 -204 3 0 0 1 -1 7 -205 3 1 1 1 0 7 -206 1 1 1 2 0 7 -207 2 0 1 1 1 7 -208 3 1 1 1 0 7 -209 2 0 1 1 1 7 -210 3 1 1 1 0 7 -211 3 1 1 2 -1 7 -212 4 0 1 2 -1 7 -213 1 1 1 1 1 7 -214 3 1 1 2 -1 7 -215 1 1 1 2 0 7 -216 2 0 1 1 1 7 -217 3 1 1 1 0 7 -218 1 1 1 2 0 7 -219 1 1 1 1 1 7 -220 2 0 1 1 1 7 -221 3 1 1 1 0 7 -222 2 0 1 1 1 7 -223 2 0 1 1 1 7 -224 2 0 1 2 0 7 -225 1 0 0 1 0 7 -226 3 1 1 1 0 7 -227 1 1 1 1 1 7 -228 3 1 1 1 0 7 -229 1 1 1 2 0 7 -230 1 0 0 1 0 7 -231 4 0 1 2 -1 7 -232 2 0 1 1 1 7 -233 3 1 1 2 -1 7 -234 3 0 0 1 -1 7 -235 3 0 0 1 -1 7 -236 4 0 1 1 0 7 -237 3 1 1 1 0 7 -238 4 0 1 1 0 7 -239 1 1 1 1 1 7 -240 4 0 1 1 0 7 -1 3 1 1 1 0 8 -2 2 0 1 1 1 8 -3 3 0 0 1 -1 8 -4 3 1 1 1 0 8 -5 1 0 0 2 1 8 -6 3 1 1 2 -1 8 -7 2 1 0 1 0 8 -8 1 0 0 1 0 8 -9 2 0 1 2 0 8 -10 2 1 0 2 1 8 -11 1 1 1 2 0 8 -12 3 0 0 1 -1 8 -13 4 1 0 2 0 8 -14 3 0 0 1 -1 8 -15 4 1 0 2 0 8 -16 3 1 1 2 -1 8 -17 1 0 0 1 0 8 -18 2 1 0 2 1 8 -19 2 1 0 1 0 8 -20 3 1 1 1 0 8 -21 2 1 0 1 0 8 -22 4 1 0 1 -1 8 -23 2 0 1 2 0 8 -24 3 1 1 1 0 8 -25 2 1 0 1 0 8 -26 3 1 1 1 0 8 -27 3 1 1 1 0 8 -28 4 0 1 2 -1 8 -29 1 1 1 1 1 8 -30 3 1 1 2 -1 8 -31 1 1 1 1 1 8 -32 1 1 1 1 1 8 -33 3 1 1 1 0 8 -34 4 1 0 1 -1 8 -35 4 0 1 1 0 8 -36 2 1 0 2 1 8 -37 3 1 1 2 -1 8 -38 1 1 1 1 1 8 -39 4 1 0 1 -1 8 -40 2 1 0 1 0 8 -41 2 1 0 1 0 8 -42 4 0 1 1 0 8 -43 3 1 1 1 0 8 -44 1 1 1 1 1 8 -45 1 1 1 1 1 8 -46 4 0 1 2 -1 8 -47 3 0 0 1 -1 8 -48 2 1 0 2 1 8 -49 2 1 0 1 0 8 -50 3 1 1 1 0 8 -51 3 1 1 1 0 8 -52 1 1 1 1 1 8 -53 4 0 1 1 0 8 -54 4 1 0 2 0 8 -55 3 1 1 2 -1 8 -56 2 1 0 2 1 8 -57 4 0 1 1 0 8 -58 2 1 0 2 1 8 -59 1 1 1 2 0 8 -60 1 1 1 1 1 8 -61 1 1 1 2 0 8 -62 2 1 0 1 0 8 -63 3 1 1 1 0 8 -64 3 1 1 2 -1 8 -65 4 0 1 1 0 8 -66 3 1 1 1 0 8 -67 3 0 0 2 0 8 -68 1 1 1 1 1 8 -69 4 0 1 1 0 8 -70 1 1 1 2 0 8 -71 4 1 0 1 -1 8 -72 4 0 1 2 -1 8 -73 3 1 1 1 0 8 -74 3 1 1 2 -1 8 -75 4 1 0 1 -1 8 -76 1 1 1 1 1 8 -77 4 0 1 1 0 8 -78 2 1 0 1 0 8 -79 1 1 1 1 1 8 -80 1 1 1 1 1 8 -81 1 1 1 2 0 8 -82 3 0 0 1 -1 8 -83 4 0 1 1 0 8 -84 2 1 0 2 1 8 -85 3 0 0 1 -1 8 -86 4 0 1 1 0 8 -87 2 1 0 1 0 8 -88 1 1 1 1 1 8 -89 2 1 0 1 0 8 -90 4 0 1 1 0 8 -91 4 0 1 1 0 8 -92 4 0 1 1 0 8 -93 1 1 1 2 0 8 -94 2 1 0 1 0 8 -95 4 1 0 2 0 8 -96 2 1 0 1 0 8 -97 2 1 0 2 1 8 -98 4 0 1 1 0 8 -99 2 1 0 1 0 8 -100 4 0 1 1 0 8 -101 1 1 1 1 1 8 -102 2 1 0 2 1 8 -103 1 1 1 1 1 8 -104 4 0 1 1 0 8 -105 4 0 1 1 0 8 -106 4 0 1 1 0 8 -107 1 1 1 1 1 8 -108 2 1 0 1 0 8 -109 2 1 0 1 0 8 -110 3 1 1 1 0 8 -111 3 1 1 1 0 8 -112 1 1 1 1 1 8 -113 3 1 1 1 0 8 -114 4 0 1 1 0 8 -115 2 0 1 1 1 8 -116 2 1 0 1 0 8 -117 4 1 0 1 -1 8 -118 4 0 1 2 -1 8 -119 1 1 1 1 1 8 -120 1 1 1 1 1 8 -121 1 1 1 2 0 8 -122 3 1 1 1 0 8 -123 3 1 1 1 0 8 -124 3 1 1 2 -1 8 -125 2 0 1 1 1 8 -126 2 0 1 1 1 8 -127 1 1 1 1 1 8 -128 1 1 1 1 1 8 -129 2 0 1 1 1 8 -130 1 1 1 1 1 8 -131 2 0 1 1 1 8 -132 3 1 1 1 0 8 -133 4 1 0 1 -1 8 -134 1 1 1 1 1 8 -135 3 1 1 1 0 8 -136 4 0 1 1 0 8 -137 1 1 1 1 1 8 -138 2 0 1 1 1 8 -139 4 0 1 1 0 8 -140 4 0 1 2 -1 8 -141 2 0 1 1 1 8 -142 1 1 1 1 1 8 -143 3 1 1 1 0 8 -144 3 1 1 1 0 8 -145 3 1 1 2 -1 8 -146 3 1 1 1 0 8 -147 4 0 1 1 0 8 -148 1 1 1 2 0 8 -149 4 1 0 2 0 8 -150 2 0 1 1 1 8 -151 4 0 1 1 0 8 -152 1 1 1 1 1 8 -153 2 0 1 1 1 8 -154 4 1 0 1 -1 8 -155 1 1 1 1 1 8 -156 4 0 1 1 0 8 -157 2 0 1 1 1 8 -158 2 0 1 2 0 8 -159 2 0 1 1 1 8 -160 3 1 1 1 0 8 -161 1 1 1 1 1 8 -162 4 0 1 1 0 8 -163 3 1 1 1 0 8 -164 1 1 1 1 1 8 -165 2 0 1 2 0 8 -166 4 0 1 1 0 8 -167 2 0 1 1 1 8 -168 2 0 1 2 0 8 -169 2 0 1 1 1 8 -170 3 1 1 1 0 8 -171 3 1 1 1 0 8 -172 4 0 1 1 0 8 -173 1 1 1 1 1 8 -174 3 1 1 2 -1 8 -175 1 1 1 1 1 8 -176 3 1 1 1 0 8 -177 3 1 1 1 0 8 -178 3 1 1 1 0 8 -179 2 0 1 1 1 8 -180 1 1 1 1 1 8 -181 1 1 1 1 1 8 -182 3 1 1 1 0 8 -183 2 0 1 1 1 8 -184 4 1 0 1 -1 8 -185 4 0 1 2 -1 8 -186 4 0 1 1 0 8 -187 3 1 1 1 0 8 -188 2 0 1 1 1 8 -189 1 1 1 1 1 8 -190 2 0 1 1 1 8 -191 1 1 1 1 1 8 -192 2 0 1 1 1 8 -193 2 0 1 2 0 8 -194 2 0 1 1 1 8 -195 1 1 1 2 0 8 -196 3 1 1 1 0 8 -197 2 0 1 1 1 8 -198 4 0 1 2 -1 8 -199 4 0 1 2 -1 8 -200 2 0 1 1 1 8 -201 3 1 1 2 -1 8 -202 4 0 1 1 0 8 -203 3 1 1 1 0 8 -204 4 0 1 1 0 8 -205 4 1 0 1 -1 8 -206 3 1 1 1 0 8 -207 1 1 1 2 0 8 -208 3 1 1 1 0 8 -209 1 1 1 2 0 8 -210 3 1 1 1 0 8 -211 4 0 1 1 0 8 -212 2 0 1 2 0 8 -213 3 1 1 1 0 8 -214 1 1 1 1 1 8 -215 2 0 1 1 1 8 -216 1 1 1 1 1 8 -217 2 1 0 1 0 8 -218 1 1 1 1 1 8 -219 1 1 1 2 0 8 -220 1 1 1 2 0 8 -221 4 0 1 2 -1 8 -222 3 1 1 1 0 8 -223 1 1 1 1 1 8 -224 3 1 1 1 0 8 -225 4 0 1 2 -1 8 -226 1 1 1 2 0 8 -227 4 1 0 1 -1 8 -228 2 0 1 1 1 8 -229 1 1 1 1 1 8 -230 2 0 1 1 1 8 -231 4 0 1 1 0 8 -232 4 0 1 2 -1 8 -233 2 0 1 1 1 8 -234 1 1 1 1 1 8 -235 4 0 1 1 0 8 -236 1 1 1 1 1 8 -237 3 1 1 2 -1 8 -238 4 0 1 2 -1 8 -239 1 1 1 2 0 8 -240 3 0 0 1 -1 8 -1 2 1 0 1 0 9 -2 3 0 0 2 0 9 -3 3 1 1 1 0 9 -4 1 1 1 1 1 9 -5 4 0 1 1 0 9 -6 1 1 1 1 1 9 -7 3 1 1 1 0 9 -8 4 1 0 1 -1 9 -9 3 1 1 1 0 9 -10 4 0 1 1 0 9 -11 4 1 0 1 -1 9 -12 2 1 0 1 0 9 -13 1 0 0 1 0 9 -14 3 0 0 1 -1 9 -15 3 1 1 1 0 9 -16 2 1 0 1 0 9 -17 2 1 0 1 0 9 -18 1 1 1 1 1 9 -19 2 0 1 2 0 9 -20 4 0 1 2 -1 9 -21 2 1 0 2 1 9 -22 2 1 0 1 0 9 -23 4 1 0 1 -1 9 -24 2 1 0 1 0 9 -25 1 1 1 1 1 9 -26 3 1 1 2 -1 9 -27 2 1 0 1 0 9 -28 3 0 0 1 -1 9 -29 4 1 0 1 -1 9 -30 1 1 1 1 1 9 -31 2 0 1 2 0 9 -32 3 0 0 1 -1 9 -33 3 1 1 1 0 9 -34 3 1 1 1 0 9 -35 2 1 0 1 0 9 -36 2 1 0 1 0 9 -37 2 1 0 1 0 9 -38 4 0 1 1 0 9 -39 1 1 1 1 1 9 -40 2 1 0 1 0 9 -41 4 0 1 1 0 9 -42 3 1 1 1 0 9 -43 1 1 1 1 1 9 -44 4 0 1 1 0 9 -45 4 1 0 1 -1 9 -46 3 1 1 1 0 9 -47 2 0 1 1 1 9 -48 3 1 1 2 -1 9 -49 3 1 1 1 0 9 -50 4 0 1 2 -1 9 -51 2 0 1 1 1 9 -52 4 0 1 1 0 9 -53 4 0 1 1 0 9 -54 1 1 1 1 1 9 -55 1 1 1 1 1 9 -56 1 1 1 2 0 9 -57 4 0 1 1 0 9 -58 1 1 1 2 0 9 -59 4 0 1 1 0 9 -60 1 1 1 1 1 9 -61 4 0 1 1 0 9 -62 2 0 1 1 1 9 -63 3 0 0 1 -1 9 -64 3 1 1 2 -1 9 -65 3 1 1 2 -1 9 -66 4 1 0 1 -1 9 -67 2 0 1 2 0 9 -68 3 0 0 2 0 9 -69 4 0 1 2 -1 9 -70 4 0 1 1 0 9 -71 3 1 1 1 0 9 -72 1 1 1 1 1 9 -73 2 0 1 1 1 9 -74 3 1 1 1 0 9 -75 3 1 1 1 0 9 -76 1 1 1 2 0 9 -77 2 0 1 1 1 9 -78 1 1 1 1 1 9 -79 4 0 1 1 0 9 -80 2 0 1 1 1 9 -81 3 1 1 2 -1 9 -82 2 0 1 1 1 9 -83 2 0 1 1 1 9 -84 1 1 1 1 1 9 -85 2 0 1 2 0 9 -86 3 0 0 1 -1 9 -87 4 0 1 1 0 9 -88 3 0 0 1 -1 9 -89 3 1 1 1 0 9 -90 2 0 1 2 0 9 -91 2 0 1 2 0 9 -92 2 0 1 2 0 9 -93 4 0 1 1 0 9 -94 2 0 1 1 1 9 -95 3 1 1 1 0 9 -96 4 0 1 1 0 9 -97 2 0 1 1 1 9 -98 3 0 0 1 -1 9 -99 4 0 1 1 0 9 -100 3 1 1 1 0 9 -101 2 0 1 1 1 9 -102 4 0 1 1 0 9 -103 2 0 1 1 1 9 -104 4 0 1 1 0 9 -105 2 0 1 2 0 9 -106 4 1 0 2 0 9 -107 1 1 1 1 1 9 -108 4 0 1 1 0 9 -109 4 0 1 1 0 9 -110 2 0 1 2 0 9 -111 2 0 1 1 1 9 -112 3 1 1 1 0 9 -113 2 0 1 2 0 9 -114 3 1 1 1 0 9 -115 3 1 1 2 -1 9 -116 4 0 1 1 0 9 -117 3 1 1 1 0 9 -118 1 0 0 1 0 9 -119 2 0 1 1 1 9 -120 3 1 1 1 0 9 -121 1 1 1 2 0 9 -122 1 1 1 1 1 9 -123 2 0 1 1 1 9 -124 2 0 1 1 1 9 -125 2 0 1 1 1 9 -126 1 1 1 1 1 9 -127 1 1 1 2 0 9 -128 4 0 1 1 0 9 -129 4 0 1 1 0 9 -130 4 0 1 2 -1 9 -131 2 0 1 2 0 9 -132 1 1 1 1 1 9 -133 1 1 1 1 1 9 -134 2 0 1 1 1 9 -135 1 1 1 2 0 9 -136 3 1 1 1 0 9 -137 2 0 1 1 1 9 -138 3 1 1 1 0 9 -139 1 1 1 1 1 9 -140 1 1 1 1 1 9 -141 4 0 1 1 0 9 -142 1 1 1 1 1 9 -143 1 1 1 1 1 9 -144 4 0 1 1 0 9 -145 3 1 1 1 0 9 -146 4 1 0 1 -1 9 -147 3 1 1 2 -1 9 -148 4 0 1 1 0 9 -149 1 1 1 1 1 9 -150 3 1 1 1 0 9 -151 1 1 1 2 0 9 -152 2 0 1 1 1 9 -153 1 0 0 2 1 9 -154 2 0 1 1 1 9 -155 1 0 0 1 0 9 -156 4 0 1 2 -1 9 -157 2 0 1 1 1 9 -158 4 0 1 1 0 9 -159 1 1 1 1 1 9 -160 3 1 1 2 -1 9 -161 2 0 1 1 1 9 -162 3 1 1 1 0 9 -163 2 0 1 1 1 9 -164 2 0 1 1 1 9 -165 4 0 1 1 0 9 -166 2 0 1 1 1 9 -167 4 0 1 1 0 9 -168 1 1 1 1 1 9 -169 3 1 1 1 0 9 -170 1 1 1 2 0 9 -171 2 0 1 2 0 9 -172 4 0 1 1 0 9 -173 4 0 1 1 0 9 -174 4 0 1 1 0 9 -175 3 1 1 1 0 9 -176 2 0 1 1 1 9 -177 4 0 1 1 0 9 -178 1 1 1 2 0 9 -179 4 0 1 1 0 9 -180 1 1 1 1 1 9 -181 3 1 1 1 0 9 -182 4 0 1 1 0 9 -183 4 0 1 1 0 9 -184 1 1 1 1 1 9 -185 3 0 0 1 -1 9 -186 4 0 1 1 0 9 -187 4 0 1 1 0 9 -188 3 1 1 1 0 9 -189 1 1 1 1 1 9 -190 4 1 0 1 -1 9 -191 3 1 1 1 0 9 -192 4 0 1 2 -1 9 -193 3 1 1 1 0 9 -194 4 0 1 1 0 9 -195 2 0 1 1 1 9 -196 1 1 1 1 1 9 -197 3 1 1 1 0 9 -198 1 1 1 1 1 9 -199 2 0 1 1 1 9 -200 2 0 1 2 0 9 -201 2 0 1 1 1 9 -202 3 1 1 1 0 9 -203 4 1 0 1 -1 9 -204 3 1 1 1 0 9 -205 3 1 1 1 0 9 -206 2 0 1 2 0 9 -207 2 0 1 1 1 9 -208 1 1 1 1 1 9 -209 2 0 1 1 1 9 -210 3 1 1 1 0 9 -211 1 1 1 1 1 9 -212 3 1 1 1 0 9 -213 1 1 1 2 0 9 -214 3 1 1 1 0 9 -215 3 1 1 1 0 9 -216 4 0 1 1 0 9 -217 3 1 1 1 0 9 -218 1 1 1 1 1 9 -219 1 1 1 1 1 9 -220 4 0 1 1 0 9 -221 1 1 1 1 1 9 -222 3 1 1 1 0 9 -223 4 1 0 1 -1 9 -224 3 1 1 1 0 9 -225 1 1 1 1 1 9 -226 4 0 1 1 0 9 -227 1 1 1 1 1 9 -228 1 1 1 2 0 9 -229 1 1 1 1 1 9 -230 4 0 1 1 0 9 -231 2 0 1 1 1 9 -232 1 1 1 1 1 9 -233 1 1 1 1 1 9 -234 1 1 1 1 1 9 -235 1 1 1 1 1 9 -236 1 1 1 2 0 9 -237 3 1 1 1 0 9 -238 1 1 1 1 1 9 -239 2 0 1 1 1 9 -240 1 1 1 1 1 9 -1 1 0 0 1 0 10 -2 1 1 1 1 1 10 -3 1 1 1 1 1 10 -4 4 1 0 1 -1 10 -5 4 1 0 1 -1 10 -6 1 1 1 1 1 10 -7 4 0 1 1 0 10 -8 4 0 1 1 0 10 -9 2 0 1 1 1 10 -10 4 0 1 1 0 10 -11 1 1 1 1 1 10 -12 4 0 1 1 0 10 -13 1 1 1 1 1 10 -14 1 1 1 1 1 10 -15 4 1 0 2 0 10 -16 4 1 0 1 -1 10 -17 1 1 1 2 0 10 -18 1 1 1 1 1 10 -19 4 0 1 1 0 10 -20 4 0 1 1 0 10 -21 1 1 1 1 1 10 -22 3 0 0 1 -1 10 -23 3 1 1 2 -1 10 -24 4 0 1 2 -1 10 -25 2 0 1 1 1 10 -26 4 1 0 1 -1 10 -27 1 1 1 1 1 10 -28 3 1 1 1 0 10 -29 3 0 0 1 -1 10 -30 2 1 0 1 0 10 -31 1 1 1 1 1 10 -32 3 1 1 1 0 10 -33 2 1 0 1 0 10 -34 3 1 1 1 0 10 -35 2 0 1 1 1 10 -36 2 0 1 1 1 10 -37 2 0 1 1 1 10 -38 4 1 0 1 -1 10 -39 3 1 1 2 -1 10 -40 1 1 1 1 1 10 -41 3 0 0 1 -1 10 -42 3 0 0 2 0 10 -43 3 0 0 2 0 10 -44 1 1 1 1 1 10 -45 2 0 1 1 1 10 -46 3 0 0 1 -1 10 -47 3 1 1 1 0 10 -48 2 0 1 1 1 10 -49 4 1 0 1 -1 10 -50 3 0 0 1 -1 10 -51 2 1 0 1 0 10 -52 3 1 1 1 0 10 -53 4 0 1 1 0 10 -54 3 1 1 1 0 10 -55 2 0 1 1 1 10 -56 1 1 1 1 1 10 -57 4 0 1 1 0 10 -58 3 1 1 2 -1 10 -59 1 1 1 1 1 10 -60 3 1 1 1 0 10 -61 4 0 1 1 0 10 -62 3 1 1 1 0 10 -63 2 0 1 1 1 10 -64 2 0 1 1 1 10 -65 2 0 1 2 0 10 -66 1 1 1 1 1 10 -67 3 0 0 2 0 10 -68 1 1 1 1 1 10 -69 2 0 1 2 0 10 -70 4 0 1 1 0 10 -71 2 0 1 1 1 10 -72 1 1 1 2 0 10 -73 1 1 1 1 1 10 -74 2 0 1 1 1 10 -75 1 1 1 1 1 10 -76 4 0 1 1 0 10 -77 4 0 1 1 0 10 -78 4 0 1 1 0 10 -79 1 1 1 1 1 10 -80 2 0 1 1 1 10 -81 2 0 1 2 0 10 -82 3 1 1 2 -1 10 -83 2 1 0 1 0 10 -84 3 0 0 2 0 10 -85 3 0 0 2 0 10 -86 3 0 0 1 -1 10 -87 2 0 1 2 0 10 -88 4 0 1 2 -1 10 -89 2 0 1 1 1 10 -90 4 0 1 1 0 10 -91 4 1 0 1 -1 10 -92 3 1 1 1 0 10 -93 4 1 0 2 0 10 -94 3 1 1 1 0 10 -95 1 1 1 1 1 10 -96 3 1 1 1 0 10 -97 2 0 1 1 1 10 -98 2 0 1 1 1 10 -99 2 0 1 1 1 10 -100 1 1 1 1 1 10 -101 4 0 1 1 0 10 -102 2 0 1 1 1 10 -103 1 1 1 1 1 10 -104 2 0 1 1 1 10 -105 1 1 1 1 1 10 -106 3 1 1 1 0 10 -107 4 1 0 1 -1 10 -108 2 0 1 1 1 10 -109 1 1 1 2 0 10 -110 3 1 1 1 0 10 -111 4 0 1 1 0 10 -112 4 0 1 1 0 10 -113 2 0 1 2 0 10 -114 3 0 0 1 -1 10 -115 1 1 1 2 0 10 -116 1 1 1 1 1 10 -117 2 0 1 1 1 10 -118 4 0 1 1 0 10 -119 4 0 1 1 0 10 -120 4 0 1 1 0 10 -121 4 0 1 2 -1 10 -122 2 0 1 2 0 10 -123 4 0 1 1 0 10 -124 3 1 1 1 0 10 -125 2 0 1 2 0 10 -126 3 1 1 1 0 10 -127 1 1 1 1 1 10 -128 4 0 1 1 0 10 -129 2 0 1 1 1 10 -130 4 0 1 2 -1 10 -131 4 0 1 1 0 10 -132 1 1 1 1 1 10 -133 3 0 0 1 -1 10 -134 4 0 1 1 0 10 -135 1 1 1 1 1 10 -136 2 0 1 1 1 10 -137 1 1 1 1 1 10 -138 1 1 1 1 1 10 -139 4 0 1 1 0 10 -140 3 1 1 1 0 10 -141 2 0 1 1 1 10 -142 4 0 1 1 0 10 -143 1 1 1 2 0 10 -144 2 0 1 1 1 10 -145 3 1 1 1 0 10 -146 4 0 1 1 0 10 -147 1 1 1 1 1 10 -148 2 0 1 1 1 10 -149 1 1 1 1 1 10 -150 3 0 0 1 -1 10 -151 2 0 1 1 1 10 -152 2 0 1 1 1 10 -153 3 1 1 1 0 10 -154 3 0 0 2 0 10 -155 2 0 1 2 0 10 -156 2 0 1 1 1 10 -157 4 0 1 1 0 10 -158 3 1 1 1 0 10 -159 4 1 0 2 0 10 -160 3 1 1 1 0 10 -161 1 1 1 1 1 10 -162 2 0 1 1 1 10 -163 1 1 1 1 1 10 -164 2 0 1 1 1 10 -165 1 1 1 1 1 10 -166 4 0 1 1 0 10 -167 3 1 1 1 0 10 -168 3 1 1 1 0 10 -169 1 0 0 1 0 10 -170 3 1 1 1 0 10 -171 3 1 1 1 0 10 -172 2 1 0 1 0 10 -173 4 0 1 1 0 10 -174 1 1 1 1 1 10 -175 2 0 1 1 1 10 -176 4 0 1 2 -1 10 -177 3 1 1 1 0 10 -178 1 1 1 1 1 10 -179 1 1 1 1 1 10 -180 1 1 1 2 0 10 -181 1 1 1 1 1 10 -182 1 1 1 1 1 10 -183 4 0 1 2 -1 10 -184 4 1 0 1 -1 10 -185 2 0 1 1 1 10 -186 1 0 0 1 0 10 -187 2 0 1 2 0 10 -188 2 0 1 1 1 10 -189 3 1 1 1 0 10 -190 4 1 0 1 -1 10 -191 2 0 1 1 1 10 -192 4 0 1 1 0 10 -193 1 1 1 2 0 10 -194 2 0 1 1 1 10 -195 3 1 1 1 0 10 -196 2 0 1 1 1 10 -197 3 1 1 1 0 10 -198 4 0 1 1 0 10 -199 3 1 1 1 0 10 -200 2 0 1 1 1 10 -201 1 1 1 1 1 10 -202 3 1 1 1 0 10 -203 1 1 1 2 0 10 -204 3 1 1 1 0 10 -205 4 0 1 1 0 10 -206 3 1 1 2 -1 10 -207 1 1 1 2 0 10 -208 1 1 1 1 1 10 -209 1 1 1 1 1 10 -210 4 0 1 1 0 10 -211 1 1 1 1 1 10 -212 3 1 1 1 0 10 -213 2 0 1 2 0 10 -214 2 0 1 2 0 10 -215 4 0 1 1 0 10 -216 3 1 1 1 0 10 -217 4 0 1 1 0 10 -218 1 1 1 1 1 10 -219 4 0 1 1 0 10 -220 1 1 1 2 0 10 -221 3 0 0 1 -1 10 -222 2 0 1 2 0 10 -223 2 0 1 1 1 10 -224 1 1 1 1 1 10 -225 4 0 1 1 0 10 -226 1 1 1 2 0 10 -227 2 0 1 1 1 10 -228 4 0 1 1 0 10 -229 2 0 1 1 1 10 -230 3 1 1 1 0 10 -231 3 1 1 1 0 10 -232 3 1 1 1 0 10 -233 3 1 1 1 0 10 -234 2 1 0 1 0 10 -235 3 1 1 2 -1 10 -236 1 1 1 1 1 10 -237 3 0 0 1 -1 10 -238 4 0 1 1 0 10 -239 4 0 1 1 0 10 -240 2 0 1 1 1 10 diff --git a/inst/extdata/igt_exampleData.txt b/inst/extdata/igt_exampleData.txt deleted file mode 100644 index 3a6252af..00000000 --- a/inst/extdata/igt_exampleData.txt +++ /dev/null @@ -1,401 +0,0 @@ -trial choice gain loss subjID -1 3 50 0 1001 -2 2 100 0 1001 -3 3 50 0 1001 -4 4 50 0 1001 -5 4 50 0 1001 -6 4 50 0 1001 -7 4 50 0 1001 -8 3 50 -50 1001 -9 4 50 0 1001 -10 4 50 0 1001 -11 3 50 0 1001 -12 4 50 0 1001 -13 4 50 0 1001 -14 4 50 0 1001 -15 4 50 -250 1001 -16 4 50 0 1001 -17 2 100 0 1001 -18 4 50 0 1001 -19 1 100 0 1001 -20 2 100 0 1001 -21 2 100 0 1001 -22 2 100 0 1001 -23 3 50 -50 1001 -24 2 100 0 1001 -25 4 50 0 1001 -26 1 100 0 1001 -27 1 100 -150 1001 -28 2 100 0 1001 -29 2 100 0 1001 -30 2 100 -1250 1001 -31 1 100 0 1001 -32 4 50 0 1001 -33 1 100 -300 1001 -34 4 50 0 1001 -35 1 100 0 1001 -36 4 50 0 1001 -37 1 100 -200 1001 -38 2 100 0 1001 -39 1 100 0 1001 -40 4 50 0 1001 -41 4 50 0 1001 -42 2 100 0 1001 -43 4 50 0 1001 -44 4 50 -250 1001 -45 4 50 0 1001 -46 2 100 0 1001 -47 4 50 0 1001 -48 1 100 -250 1001 -49 4 50 0 1001 -50 4 50 0 1001 -51 4 50 0 1001 -52 3 50 0 1001 -53 3 50 -50 1001 -54 3 50 0 1001 -55 3 50 -50 1001 -56 3 50 -50 1001 -57 2 100 0 1001 -58 2 100 -1250 1001 -59 4 50 0 1001 -60 4 50 0 1001 -61 4 50 0 1001 -62 4 50 -250 1001 -63 4 50 0 1001 -64 4 50 0 1001 -65 3 50 0 1001 -66 3 50 -25 1001 -67 3 50 -75 1001 -68 4 50 0 1001 -69 4 50 0 1001 -70 4 50 0 1001 -71 4 50 -250 1001 -72 4 50 0 1001 -73 4 50 0 1001 -74 4 50 0 1001 -75 4 50 0 1001 -76 4 50 0 1001 -77 4 50 0 1001 -78 3 50 0 1001 -79 4 50 0 1001 -80 4 50 0 1001 -81 4 50 0 1001 -82 4 50 0 1001 -83 4 50 0 1001 -84 4 50 0 1001 -85 4 50 0 1001 -86 4 50 0 1001 -87 4 50 -250 1001 -88 4 50 0 1001 -89 4 50 0 1001 -90 4 50 0 1001 -91 4 50 0 1001 -92 4 50 0 1001 -93 4 50 0 1001 -94 4 50 0 1001 -95 4 50 0 1001 -96 4 50 0 1001 -97 4 50 -250 1001 -98 4 50 0 1001 -99 4 50 0 1001 -100 4 50 0 1001 -1 3 50 0 1002 -2 3 50 0 1002 -3 3 50 -50 1002 -4 3 50 0 1002 -5 3 50 -50 1002 -6 1 100 0 1002 -7 3 50 0 1002 -8 2 100 0 1002 -9 3 50 -50 1002 -10 3 50 0 1002 -11 4 50 0 1002 -12 3 50 -50 1002 -13 3 50 -50 1002 -14 1 100 0 1002 -15 1 100 -150 1002 -16 3 50 0 1002 -17 4 50 0 1002 -18 4 50 0 1002 -19 4 50 0 1002 -20 4 50 0 1002 -21 4 50 0 1002 -22 3 50 -25 1002 -23 4 50 0 1002 -24 4 50 0 1002 -25 3 50 -75 1002 -26 3 50 0 1002 -27 4 50 0 1002 -28 4 50 -250 1002 -29 4 50 0 1002 -30 4 50 0 1002 -31 4 50 0 1002 -32 4 50 0 1002 -33 4 50 0 1002 -34 4 50 0 1002 -35 4 50 0 1002 -36 4 50 0 1002 -37 4 50 0 1002 -38 4 50 -250 1002 -39 1 100 0 1002 -40 3 50 0 1002 -41 3 50 0 1002 -42 3 50 -25 1002 -43 3 50 -75 1002 -44 1 100 -300 1002 -45 1 100 0 1002 -46 3 50 0 1002 -47 4 50 0 1002 -48 4 50 0 1002 -49 4 50 0 1002 -50 4 50 0 1002 -51 4 50 0 1002 -52 4 50 0 1002 -53 4 50 0 1002 -54 4 50 0 1002 -55 4 50 -250 1002 -56 4 50 0 1002 -57 4 50 0 1002 -58 4 50 0 1002 -59 4 50 0 1002 -60 4 50 0 1002 -61 4 50 -250 1002 -62 4 50 0 1002 -63 4 50 0 1002 -64 4 50 0 1002 -65 4 50 0 1002 -66 4 50 0 1002 -67 4 50 0 1002 -68 4 50 0 1002 -69 4 50 0 1002 -70 4 50 0 1002 -71 4 50 0 1002 -72 4 50 0 1002 -73 4 50 0 1002 -74 4 50 0 1002 -75 4 50 0 1002 -76 1 100 -200 1002 -77 4 50 -250 1002 -78 4 50 0 1002 -79 4 50 0 1002 -80 4 50 0 1002 -81 4 50 0 1002 -82 4 50 0 1002 -83 4 50 0 1002 -84 4 50 0 1002 -85 4 50 0 1002 -86 4 50 0 1002 -87 4 50 -250 1002 -88 4 50 0 1002 -89 4 50 0 1002 -90 4 50 0 1002 -91 4 50 0 1002 -92 4 50 0 1002 -93 4 50 0 1002 -94 4 50 0 1002 -95 4 50 0 1002 -96 4 50 -250 1002 -97 4 50 0 1002 -98 4 50 0 1002 -99 4 50 0 1002 -100 4 50 0 1002 -1 4 50 0 1003 -2 4 50 0 1003 -3 4 50 0 1003 -4 4 50 0 1003 -5 4 50 0 1003 -6 4 50 0 1003 -7 2 100 0 1003 -8 4 50 0 1003 -9 2 100 0 1003 -10 4 50 0 1003 -11 4 50 0 1003 -12 4 50 -250 1003 -13 4 50 0 1003 -14 2 100 0 1003 -15 1 100 0 1003 -16 3 50 0 1003 -17 2 100 0 1003 -18 1 100 0 1003 -19 2 100 0 1003 -20 2 100 0 1003 -21 2 100 0 1003 -22 2 100 0 1003 -23 2 100 -1250 1003 -24 2 100 0 1003 -25 1 100 -150 1003 -26 4 50 0 1003 -27 2 100 0 1003 -28 2 100 0 1003 -29 4 50 0 1003 -30 2 100 0 1003 -31 4 50 0 1003 -32 1 100 0 1003 -33 1 100 -300 1003 -34 4 50 0 1003 -35 4 50 0 1003 -36 3 50 0 1003 -37 4 50 0 1003 -38 4 50 0 1003 -39 4 50 0 1003 -40 4 50 -250 1003 -41 2 100 -1250 1003 -42 3 50 -50 1003 -43 1 100 0 1003 -44 3 50 0 1003 -45 3 50 -50 1003 -46 4 50 0 1003 -47 4 50 0 1003 -48 4 50 0 1003 -49 3 50 0 1003 -50 4 50 0 1003 -51 2 100 0 1003 -52 4 50 0 1003 -53 1 100 -200 1003 -54 4 50 0 1003 -55 4 50 0 1003 -56 2 100 0 1003 -57 4 50 0 1003 -58 4 50 -250 1003 -59 4 50 0 1003 -60 1 100 0 1003 -61 1 100 -250 1003 -62 2 100 0 1003 -63 4 50 0 1003 -64 3 50 -50 1003 -65 1 100 -350 1003 -66 4 50 0 1003 -67 4 50 0 1003 -68 3 50 0 1003 -69 3 50 -50 1003 -70 4 50 0 1003 -71 3 50 -50 1003 -72 3 50 0 1003 -73 4 50 -250 1003 -74 3 50 -25 1003 -75 1 100 0 1003 -76 1 100 -350 1003 -77 2 100 0 1003 -78 3 50 -75 1003 -79 2 100 0 1003 -80 2 100 0 1003 -81 3 50 0 1003 -82 2 100 -1250 1003 -83 3 50 0 1003 -84 3 50 0 1003 -85 4 50 0 1003 -86 3 50 -25 1003 -87 4 50 0 1003 -88 1 100 0 1003 -89 3 50 -75 1003 -90 3 50 0 1003 -91 3 50 -50 1003 -92 3 50 0 1003 -93 3 50 0 1003 -94 3 50 0 1003 -95 3 50 -50 1003 -96 1 100 -250 1003 -97 3 50 -25 1003 -98 3 50 -50 1003 -99 3 50 0 1003 -100 4 50 0 1003 -1 3 50 0 1004 -2 4 50 0 1004 -3 1 100 0 1004 -4 4 50 0 1004 -5 4 50 0 1004 -6 4 50 0 1004 -7 4 50 0 1004 -8 1 100 0 1004 -9 3 50 0 1004 -10 3 50 -50 1004 -11 1 100 -150 1004 -12 1 100 0 1004 -13 1 100 -300 1004 -14 4 50 0 1004 -15 1 100 0 1004 -16 4 50 0 1004 -17 4 50 0 1004 -18 2 100 0 1004 -19 4 50 0 1004 -20 4 50 -250 1004 -21 1 100 -200 1004 -22 2 100 0 1004 -23 3 50 0 1004 -24 4 50 0 1004 -25 2 100 0 1004 -26 2 100 0 1004 -27 2 100 0 1004 -28 2 100 0 1004 -29 2 100 0 1004 -30 2 100 0 1004 -31 2 100 -1250 1004 -32 3 50 -50 1004 -33 4 50 0 1004 -34 1 100 0 1004 -35 2 100 0 1004 -36 3 50 0 1004 -37 1 100 -250 1004 -38 3 50 -50 1004 -39 3 50 0 1004 -40 3 50 -50 1004 -41 4 50 0 1004 -42 4 50 0 1004 -43 4 50 0 1004 -44 4 50 0 1004 -45 4 50 0 1004 -46 4 50 0 1004 -47 4 50 0 1004 -48 4 50 -250 1004 -49 1 100 -350 1004 -50 4 50 0 1004 -51 4 50 0 1004 -52 4 50 0 1004 -53 3 50 -50 1004 -54 4 50 0 1004 -55 3 50 0 1004 -56 3 50 -25 1004 -57 4 50 0 1004 -58 4 50 0 1004 -59 4 50 0 1004 -60 4 50 0 1004 -61 4 50 -250 1004 -62 3 50 -75 1004 -63 3 50 0 1004 -64 3 50 0 1004 -65 4 50 0 1004 -66 3 50 0 1004 -67 2 100 0 1004 -68 4 50 0 1004 -69 1 100 0 1004 -70 4 50 0 1004 -71 3 50 -25 1004 -72 3 50 -75 1004 -73 4 50 0 1004 -74 4 50 0 1004 -75 3 50 0 1004 -76 4 50 -250 1004 -77 3 50 -50 1004 -78 3 50 0 1004 -79 3 50 0 1004 -80 2 100 0 1004 -81 4 50 0 1004 -82 4 50 0 1004 -83 3 50 0 1004 -84 3 50 -50 1004 -85 2 100 0 1004 -86 2 100 -1250 1004 -87 3 50 -25 1004 -88 2 100 0 1004 -89 3 50 -50 1004 -90 3 50 0 1004 -91 3 50 0 1004 -92 4 50 0 1004 -93 4 50 0 1004 -94 4 50 0 1004 -95 3 50 -75 1004 -96 4 50 0 1004 -97 4 50 0 1004 -98 3 50 -50 1004 -99 4 50 0 1004 -100 1 100 -350 1004 \ No newline at end of file diff --git a/inst/extdata/peer_exampleData.txt b/inst/extdata/peer_exampleData.txt deleted file mode 100644 index d4e222eb..00000000 --- a/inst/extdata/peer_exampleData.txt +++ /dev/null @@ -1,361 +0,0 @@ -trial condition p_gamble risky_Lpayoff risky_Hpayoff safe_Lpayoff safe_Hpayoff risky_color total_presses choice bonus subjID -1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 -2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 -5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 -7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 1 -8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 -14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 -28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 -32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 -35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 -44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 -47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 -55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 -72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -1 1 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 -2 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -3 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -4 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -5 1 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 -6 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -7 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -8 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -9 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -10 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -11 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -12 0 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 -13 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -14 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -15 0 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 -16 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -17 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -18 2 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -19 0 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 -20 3 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 -21 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -22 2 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 -23 1 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 -24 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -25 0 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 -26 0 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -27 2 0.6 1.2 51.1 23 24.4 orange 1 0 2.68 2 -28 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -29 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -30 3 0.4 1.2 51.1 23 24.4 orange 1 1 2.68 2 -31 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -32 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -33 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -34 1 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 -35 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -36 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -37 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -38 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -39 0 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 -40 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -41 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -42 1 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 -43 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -44 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -45 3 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 -46 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -47 3 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 -48 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -49 2 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 -50 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -51 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -52 2 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -53 1 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -54 2 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -55 2 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 -56 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -57 2 0.9 1.2 51.1 23 24.4 orange 1 1 2.68 2 -58 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -59 1 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 -60 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -61 3 0.8 1.2 51.1 23 24.4 orange 1 1 2.68 2 -62 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -63 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -64 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -65 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -66 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -67 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -68 2 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 -69 0 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 -70 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -71 3 0.5 1.2 51.1 23 24.4 orange 1 1 2.68 2 -72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -1 0 0.8 1.2 51.1 23 24.4 orange 2 0 0.25 3 -2 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -3 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -4 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -5 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -6 0 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 -7 3 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 -8 0 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -9 2 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -10 0 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -11 2 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 -12 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -13 1 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 -14 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -15 2 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -16 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -17 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -18 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -19 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -20 3 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -21 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -22 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -23 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -24 0 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -25 2 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 -26 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -27 1 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -28 2 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -29 0 0.6 1.2 51.1 23 24.4 orange 2 1 0.25 3 -30 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -31 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -32 2 0.9 1.2 51.1 23 24.4 orange 1 0 0.25 3 -33 0 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -34 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -35 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -36 3 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 -37 2 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -38 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -39 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -40 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -41 3 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -42 0 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -43 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -44 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -45 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -46 2 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -47 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -48 3 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 -49 1 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 -50 1 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -51 1 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -52 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -53 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -54 1 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -55 1 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 -56 3 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -57 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -58 2 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -59 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -60 2 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -61 1 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -62 1 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -63 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -64 2 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -65 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -66 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -67 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -68 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -69 3 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -70 0 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -71 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -72 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 -2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 -5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 -7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 4 -8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 -14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 -28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 -32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 -35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 -44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 -47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 -55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 -72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -1 1 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -2 1 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -3 1 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -4 1 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -5 2 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -6 1 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -7 3 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -8 0 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -9 2 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -10 3 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -11 2 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -12 0 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -13 1 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -14 3 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -15 2 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -16 2 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -17 2 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -18 0 0.7 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -19 0 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -20 2 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -21 2 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -22 1 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -23 2 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -24 0 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -25 1 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -26 3 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -27 0 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -28 1 0.6 1.6 55.5 26.6 28.3 darkcyan 2 0 0.25 5 -29 3 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -30 0 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -31 0 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -32 1 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -33 1 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -34 0 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -35 2 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -36 3 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -37 3 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -38 3 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -39 3 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -40 3 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -41 1 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -42 2 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -43 1 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -44 3 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -45 3 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -46 0 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -47 1 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -48 2 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -49 2 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -50 0 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -51 2 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -52 3 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -53 0 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -54 1 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -55 0 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -56 2 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -57 3 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -58 3 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -59 2 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -60 0 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -61 2 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -62 0 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -63 0 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -64 0 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -65 3 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -66 3 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -67 1 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -68 2 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -69 0 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -70 3 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -71 1 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -72 1 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 diff --git a/inst/extdata/prl_exampleData.txt b/inst/extdata/prl_exampleData.txt deleted file mode 100644 index 21abeae7..00000000 --- a/inst/extdata/prl_exampleData.txt +++ /dev/null @@ -1,2001 +0,0 @@ -subjID trial choice outcome -1 1 1 1 -1 2 2 1 -1 3 1 1 -1 4 1 1 -1 5 1 1 -1 6 1 1 -1 7 1 1 -1 8 1 1 -1 9 1 -1 -1 10 2 1 -1 11 2 1 -1 12 2 1 -1 13 2 1 -1 14 2 -1 -1 15 1 1 -1 16 2 1 -1 17 1 -1 -1 18 2 1 -1 19 2 -1 -1 20 2 -1 -1 21 1 1 -1 22 1 -1 -1 23 1 -1 -1 24 2 -1 -1 25 2 1 -1 26 2 1 -1 27 2 -1 -1 28 1 -1 -1 29 2 1 -1 30 2 1 -1 31 2 1 -1 32 2 1 -1 33 2 -1 -1 34 2 1 -1 35 2 -1 -1 36 2 1 -1 37 2 1 -1 38 1 -1 -1 39 1 -1 -1 40 2 -1 -1 41 2 -1 -1 42 2 -1 -1 43 1 1 -1 44 1 1 -1 45 1 -1 -1 46 1 1 -1 47 1 1 -1 48 1 1 -1 49 1 -1 -1 50 1 1 -1 51 1 1 -1 52 1 1 -1 53 1 1 -1 54 1 1 -1 55 1 1 -1 56 1 -1 -1 57 1 -1 -1 58 2 1 -1 59 2 1 -1 60 2 -1 -1 61 1 1 -1 62 2 -1 -1 63 1 1 -1 64 1 1 -1 65 1 1 -1 66 1 1 -1 67 1 1 -1 68 1 1 -1 69 1 -1 -1 70 2 1 -1 71 1 -1 -1 72 1 1 -1 73 1 -1 -1 74 2 1 -1 75 2 1 -1 76 2 -1 -1 77 1 1 -1 78 2 1 -1 79 1 1 -1 80 1 -1 -1 81 1 1 -1 82 1 1 -1 83 2 -1 -1 84 1 -1 -1 85 2 -1 -1 86 2 1 -1 87 2 -1 -1 88 1 1 -1 89 1 1 -1 90 1 -1 -1 91 1 -1 -1 92 2 1 -1 93 2 1 -1 94 1 1 -1 95 2 1 -1 96 2 1 -1 97 2 1 -1 98 2 -1 -1 99 1 -1 -1 100 2 1 -2 1 1 1 -2 2 1 -1 -2 3 1 -1 -2 4 2 -1 -2 5 2 -1 -2 6 1 1 -2 7 2 -1 -2 8 2 1 -2 9 1 -1 -2 10 1 1 -2 11 1 -1 -2 12 2 -1 -2 13 2 1 -2 14 2 -1 -2 15 1 1 -2 16 2 1 -2 17 1 1 -2 18 1 1 -2 19 2 1 -2 20 1 1 -2 21 1 1 -2 22 2 1 -2 23 1 1 -2 24 1 -1 -2 25 2 -1 -2 26 1 1 -2 27 1 -1 -2 28 1 1 -2 29 1 -1 -2 30 2 1 -2 31 1 -1 -2 32 1 1 -2 33 2 -1 -2 34 1 -1 -2 35 2 1 -2 36 2 1 -2 37 2 1 -2 38 1 -1 -2 39 2 1 -2 40 1 -1 -2 41 2 -1 -2 42 2 1 -2 43 2 -1 -2 44 2 -1 -2 45 2 1 -2 46 2 -1 -2 47 2 1 -2 48 2 1 -2 49 2 1 -2 50 2 1 -2 51 2 1 -2 52 2 1 -2 53 2 1 -2 54 2 1 -2 55 2 1 -2 56 2 1 -2 57 2 -1 -2 58 2 -1 -2 59 2 1 -2 60 1 -1 -2 61 2 1 -2 62 2 1 -2 63 2 -1 -2 64 2 -1 -2 65 1 1 -2 66 1 1 -2 67 1 1 -2 68 1 -1 -2 69 1 -1 -2 70 1 -1 -2 71 2 -1 -2 72 2 1 -2 73 1 -1 -2 74 1 1 -2 75 2 1 -2 76 2 -1 -2 77 1 1 -2 78 1 -1 -2 79 1 1 -2 80 2 -1 -2 81 1 -1 -2 82 1 -1 -2 83 1 1 -2 84 1 1 -2 85 1 1 -2 86 1 1 -2 87 2 -1 -2 88 1 1 -2 89 1 1 -2 90 1 -1 -2 91 1 -1 -2 92 1 -1 -2 93 1 1 -2 94 2 1 -2 95 1 -1 -2 96 2 -1 -2 97 1 -1 -2 98 1 -1 -2 99 1 1 -2 100 1 1 -3 1 2 -1 -3 2 1 -1 -3 3 1 1 -3 4 1 1 -3 5 2 1 -3 6 2 1 -3 7 1 -1 -3 8 2 1 -3 9 2 -1 -3 10 2 1 -3 11 2 1 -3 12 2 1 -3 13 2 1 -3 14 1 -1 -3 15 2 -1 -3 16 2 -1 -3 17 1 1 -3 18 1 -1 -3 19 2 1 -3 20 2 -1 -3 21 2 1 -3 22 1 -1 -3 23 2 1 -3 24 2 -1 -3 25 1 1 -3 26 1 1 -3 27 2 -1 -3 28 1 1 -3 29 1 -1 -3 30 2 1 -3 31 1 -1 -3 32 2 -1 -3 33 2 -1 -3 34 1 1 -3 35 2 1 -3 36 2 1 -3 37 2 -1 -3 38 2 1 -3 39 2 -1 -3 40 1 -1 -3 41 1 1 -3 42 1 1 -3 43 1 1 -3 44 1 1 -3 45 1 1 -3 46 1 1 -3 47 1 1 -3 48 1 -1 -3 49 1 1 -3 50 2 -1 -3 51 1 -1 -3 52 1 -1 -3 53 1 -1 -3 54 2 1 -3 55 2 -1 -3 56 2 1 -3 57 2 -1 -3 58 1 -1 -3 59 2 1 -3 60 1 -1 -3 61 2 -1 -3 62 1 1 -3 63 1 1 -3 64 2 1 -3 65 2 -1 -3 66 1 1 -3 67 1 1 -3 68 1 -1 -3 69 2 1 -3 70 2 1 -3 71 2 1 -3 72 2 -1 -3 73 2 1 -3 74 1 1 -3 75 2 -1 -3 76 2 -1 -3 77 1 -1 -3 78 1 1 -3 79 1 1 -3 80 2 1 -3 81 2 -1 -3 82 1 1 -3 83 1 1 -3 84 1 1 -3 85 1 -1 -3 86 2 1 -3 87 2 -1 -3 88 1 1 -3 89 1 1 -3 90 2 1 -3 91 2 -1 -3 92 1 1 -3 93 1 -1 -3 94 1 -1 -3 95 1 -1 -3 96 2 1 -3 97 2 -1 -3 98 1 -1 -3 99 2 1 -3 100 2 1 -4 1 2 -1 -4 2 1 1 -4 3 1 -1 -4 4 1 -1 -4 5 1 -1 -4 6 2 -1 -4 7 2 -1 -4 8 2 -1 -4 9 1 -1 -4 10 2 -1 -4 11 1 -1 -4 12 1 -1 -4 13 2 1 -4 14 2 -1 -4 15 1 1 -4 16 1 -1 -4 17 2 -1 -4 18 2 1 -4 19 2 1 -4 20 1 1 -4 21 2 1 -4 22 1 1 -4 23 2 -1 -4 24 1 1 -4 25 1 1 -4 26 1 -1 -4 27 2 -1 -4 28 1 1 -4 29 1 1 -4 30 2 1 -4 31 2 1 -4 32 2 -1 -4 33 2 1 -4 34 2 -1 -4 35 2 1 -4 36 2 1 -4 37 2 1 -4 38 1 1 -4 39 1 -1 -4 40 1 1 -4 41 1 -1 -4 42 2 1 -4 43 2 -1 -4 44 1 -1 -4 45 2 -1 -4 46 1 1 -4 47 1 -1 -4 48 1 -1 -4 49 1 1 -4 50 1 1 -4 51 1 1 -4 52 2 1 -4 53 1 -1 -4 54 1 1 -4 55 2 1 -4 56 2 -1 -4 57 1 1 -4 58 1 1 -4 59 1 -1 -4 60 1 1 -4 61 1 1 -4 62 1 1 -4 63 1 1 -4 64 1 1 -4 65 1 1 -4 66 1 1 -4 67 1 -1 -4 68 1 -1 -4 69 1 1 -4 70 1 1 -4 71 1 -1 -4 72 1 -1 -4 73 2 -1 -4 74 1 -1 -4 75 1 1 -4 76 2 -1 -4 77 1 1 -4 78 1 -1 -4 79 1 1 -4 80 1 1 -4 81 1 -1 -4 82 1 1 -4 83 1 1 -4 84 1 1 -4 85 1 -1 -4 86 2 1 -4 87 2 -1 -4 88 2 -1 -4 89 1 -1 -4 90 2 1 -4 91 2 -1 -4 92 1 1 -4 93 1 -1 -4 94 1 1 -4 95 1 -1 -4 96 2 1 -4 97 2 -1 -4 98 1 1 -4 99 1 -1 -4 100 2 1 -5 1 1 1 -5 2 1 1 -5 3 1 1 -5 4 1 1 -5 5 1 -1 -5 6 1 1 -5 7 1 1 -5 8 1 -1 -5 9 1 1 -5 10 1 -1 -5 11 2 1 -5 12 1 -1 -5 13 2 1 -5 14 2 1 -5 15 2 1 -5 16 2 1 -5 17 2 -1 -5 18 2 -1 -5 19 2 -1 -5 20 1 -1 -5 21 2 1 -5 22 2 -1 -5 23 1 1 -5 24 2 -1 -5 25 1 1 -5 26 1 -1 -5 27 1 1 -5 28 1 -1 -5 29 2 1 -5 30 2 1 -5 31 2 1 -5 32 2 -1 -5 33 1 -1 -5 34 2 1 -5 35 2 1 -5 36 1 1 -5 37 1 -1 -5 38 2 -1 -5 39 2 1 -5 40 2 -1 -5 41 1 -1 -5 42 2 1 -5 43 1 1 -5 44 2 -1 -5 45 2 -1 -5 46 1 1 -5 47 1 1 -5 48 1 -1 -5 49 1 1 -5 50 1 1 -5 51 1 -1 -5 52 1 -1 -5 53 2 -1 -5 54 1 1 -5 55 2 -1 -5 56 1 -1 -5 57 2 1 -5 58 2 -1 -5 59 2 1 -5 60 2 -1 -5 61 1 1 -5 62 2 -1 -5 63 1 1 -5 64 1 1 -5 65 1 1 -5 66 1 -1 -5 67 1 -1 -5 68 1 -1 -5 69 2 1 -5 70 2 1 -5 71 2 -1 -5 72 1 -1 -5 73 2 1 -5 74 2 -1 -5 75 2 1 -5 76 1 1 -5 77 1 -1 -5 78 1 -1 -5 79 2 -1 -5 80 1 1 -5 81 2 -1 -5 82 1 1 -5 83 1 1 -5 84 1 -1 -5 85 1 1 -5 86 1 1 -5 87 2 1 -5 88 1 1 -5 89 2 -1 -5 90 1 -1 -5 91 1 -1 -5 92 1 -1 -5 93 2 1 -5 94 2 1 -5 95 1 1 -5 96 2 -1 -5 97 1 -1 -5 98 1 -1 -5 99 2 -1 -5 100 2 1 -6 1 1 -1 -6 2 1 1 -6 3 1 1 -6 4 1 -1 -6 5 2 -1 -6 6 1 1 -6 7 1 1 -6 8 1 1 -6 9 1 -1 -6 10 1 -1 -6 11 2 -1 -6 12 1 -1 -6 13 2 -1 -6 14 2 1 -6 15 2 -1 -6 16 1 -1 -6 17 1 -1 -6 18 2 -1 -6 19 1 1 -6 20 1 1 -6 21 1 -1 -6 22 2 1 -6 23 2 1 -6 24 2 -1 -6 25 1 -1 -6 26 2 -1 -6 27 1 -1 -6 28 2 -1 -6 29 2 -1 -6 30 1 -1 -6 31 2 1 -6 32 2 -1 -6 33 1 -1 -6 34 2 1 -6 35 2 1 -6 36 2 1 -6 37 2 -1 -6 38 2 -1 -6 39 1 -1 -6 40 1 1 -6 41 2 -1 -6 42 1 1 -6 43 2 -1 -6 44 1 1 -6 45 1 1 -6 46 2 -1 -6 47 1 1 -6 48 2 1 -6 49 2 -1 -6 50 2 -1 -6 51 1 1 -6 52 1 1 -6 53 1 -1 -6 54 1 -1 -6 55 2 1 -6 56 2 -1 -6 57 2 -1 -6 58 1 -1 -6 59 1 1 -6 60 2 -1 -6 61 1 -1 -6 62 1 1 -6 63 2 -1 -6 64 2 -1 -6 65 2 -1 -6 66 1 1 -6 67 1 1 -6 68 1 1 -6 69 1 -1 -6 70 2 1 -6 71 2 1 -6 72 1 1 -6 73 1 -1 -6 74 1 1 -6 75 1 -1 -6 76 2 -1 -6 77 2 -1 -6 78 1 1 -6 79 1 1 -6 80 2 1 -6 81 2 1 -6 82 2 -1 -6 83 2 1 -6 84 2 -1 -6 85 1 1 -6 86 2 -1 -6 87 1 -1 -6 88 2 -1 -6 89 1 1 -6 90 1 1 -6 91 1 1 -6 92 1 -1 -6 93 1 -1 -6 94 1 1 -6 95 1 1 -6 96 1 -1 -6 97 2 1 -6 98 2 -1 -6 99 1 -1 -6 100 1 1 -7 1 2 1 -7 2 2 -1 -7 3 1 -1 -7 4 1 1 -7 5 1 1 -7 6 2 -1 -7 7 1 1 -7 8 1 1 -7 9 1 -1 -7 10 1 -1 -7 11 2 1 -7 12 1 -1 -7 13 2 -1 -7 14 1 1 -7 15 1 -1 -7 16 2 1 -7 17 2 -1 -7 18 1 1 -7 19 1 -1 -7 20 1 -1 -7 21 2 1 -7 22 2 -1 -7 23 1 1 -7 24 1 1 -7 25 1 -1 -7 26 2 -1 -7 27 2 -1 -7 28 1 -1 -7 29 1 -1 -7 30 2 1 -7 31 2 1 -7 32 2 -1 -7 33 2 1 -7 34 1 -1 -7 35 2 1 -7 36 2 -1 -7 37 1 -1 -7 38 2 -1 -7 39 1 1 -7 40 2 -1 -7 41 1 1 -7 42 1 1 -7 43 1 1 -7 44 1 1 -7 45 1 1 -7 46 1 1 -7 47 1 1 -7 48 1 1 -7 49 1 -1 -7 50 1 -1 -7 51 2 1 -7 52 2 1 -7 53 2 1 -7 54 2 1 -7 55 2 1 -7 56 1 -1 -7 57 2 -1 -7 58 2 1 -7 59 2 -1 -7 60 1 -1 -7 61 1 1 -7 62 1 -1 -7 63 2 -1 -7 64 2 -1 -7 65 1 1 -7 66 1 1 -7 67 1 1 -7 68 1 -1 -7 69 2 1 -7 70 2 1 -7 71 2 1 -7 72 2 -1 -7 73 2 1 -7 74 1 -1 -7 75 2 1 -7 76 1 -1 -7 77 2 -1 -7 78 2 1 -7 79 2 1 -7 80 2 -1 -7 81 1 1 -7 82 1 1 -7 83 1 1 -7 84 1 1 -7 85 1 1 -7 86 1 1 -7 87 1 -1 -7 88 2 1 -7 89 2 1 -7 90 2 -1 -7 91 1 1 -7 92 1 -1 -7 93 2 1 -7 94 1 -1 -7 95 1 -1 -7 96 1 -1 -7 97 2 -1 -7 98 2 1 -7 99 2 1 -7 100 2 1 -8 1 2 -1 -8 2 2 -1 -8 3 2 -1 -8 4 1 1 -8 5 1 1 -8 6 1 1 -8 7 1 -1 -8 8 2 -1 -8 9 1 -1 -8 10 1 1 -8 11 1 1 -8 12 1 1 -8 13 2 1 -8 14 2 -1 -8 15 1 -1 -8 16 2 1 -8 17 2 -1 -8 18 2 -1 -8 19 1 1 -8 20 1 -1 -8 21 1 -1 -8 22 2 -1 -8 23 1 -1 -8 24 1 -1 -8 25 2 -1 -8 26 1 1 -8 27 1 1 -8 28 1 1 -8 29 1 -1 -8 30 1 -1 -8 31 2 -1 -8 32 1 -1 -8 33 1 -1 -8 34 2 -1 -8 35 1 1 -8 36 1 -1 -8 37 2 1 -8 38 2 -1 -8 39 2 -1 -8 40 1 1 -8 41 1 1 -8 42 1 1 -8 43 1 -1 -8 44 1 1 -8 45 1 -1 -8 46 2 1 -8 47 1 -1 -8 48 2 -1 -8 49 1 1 -8 50 1 1 -8 51 1 -1 -8 52 1 -1 -8 53 2 1 -8 54 2 1 -8 55 2 1 -8 56 2 1 -8 57 2 1 -8 58 2 1 -8 59 2 1 -8 60 2 1 -8 61 2 -1 -8 62 2 -1 -8 63 1 1 -8 64 1 1 -8 65 1 1 -8 66 1 1 -8 67 1 -1 -8 68 1 -1 -8 69 2 -1 -8 70 2 1 -8 71 2 1 -8 72 1 -1 -8 73 2 1 -8 74 2 -1 -8 75 2 1 -8 76 2 -1 -8 77 2 -1 -8 78 1 -1 -8 79 2 -1 -8 80 1 -1 -8 81 1 -1 -8 82 1 1 -8 83 1 -1 -8 84 1 -1 -8 85 1 -1 -8 86 1 1 -8 87 1 1 -8 88 1 -1 -8 89 1 -1 -8 90 1 1 -8 91 1 1 -8 92 1 -1 -8 93 2 1 -8 94 2 1 -8 95 2 1 -8 96 2 1 -8 97 2 1 -8 98 2 1 -8 99 2 1 -8 100 2 1 -9 1 1 1 -9 2 2 -1 -9 3 1 1 -9 4 1 1 -9 5 1 1 -9 6 1 -1 -9 7 1 -1 -9 8 2 -1 -9 9 1 -1 -9 10 2 1 -9 11 2 -1 -9 12 2 1 -9 13 2 1 -9 14 1 1 -9 15 1 1 -9 16 2 -1 -9 17 2 1 -9 18 2 -1 -9 19 1 -1 -9 20 2 1 -9 21 2 -1 -9 22 2 1 -9 23 2 -1 -9 24 1 1 -9 25 2 -1 -9 26 1 1 -9 27 1 -1 -9 28 2 -1 -9 29 1 1 -9 30 2 1 -9 31 2 1 -9 32 2 1 -9 33 2 1 -9 34 2 1 -9 35 2 1 -9 36 1 1 -9 37 2 -1 -9 38 1 1 -9 39 1 1 -9 40 1 1 -9 41 1 -1 -9 42 2 1 -9 43 2 -1 -9 44 2 1 -9 45 2 1 -9 46 1 -1 -9 47 2 -1 -9 48 1 -1 -9 49 2 1 -9 50 1 -1 -9 51 2 1 -9 52 2 1 -9 53 2 1 -9 54 2 1 -9 55 2 1 -9 56 1 1 -9 57 1 1 -9 58 2 -1 -9 59 1 1 -9 60 1 1 -9 61 2 1 -9 62 1 1 -9 63 2 -1 -9 64 1 1 -9 65 1 -1 -9 66 2 -1 -9 67 1 1 -9 68 2 1 -9 69 2 -1 -9 70 1 -1 -9 71 2 -1 -9 72 2 1 -9 73 2 -1 -9 74 2 1 -9 75 1 1 -9 76 1 1 -9 77 2 1 -9 78 1 -1 -9 79 1 1 -9 80 1 -1 -9 81 1 -1 -9 82 2 -1 -9 83 1 1 -9 84 1 1 -9 85 1 1 -9 86 2 -1 -9 87 1 -1 -9 88 1 1 -9 89 1 -1 -9 90 1 -1 -9 91 2 1 -9 92 2 1 -9 93 2 1 -9 94 2 1 -9 95 2 1 -9 96 1 -1 -9 97 2 1 -9 98 2 -1 -9 99 2 -1 -9 100 1 -1 -10 1 2 -1 -10 2 1 1 -10 3 1 -1 -10 4 1 -1 -10 5 2 -1 -10 6 2 1 -10 7 2 -1 -10 8 1 -1 -10 9 2 1 -10 10 1 1 -10 11 1 1 -10 12 1 -1 -10 13 1 -1 -10 14 2 1 -10 15 2 1 -10 16 2 1 -10 17 2 -1 -10 18 2 -1 -10 19 1 1 -10 20 1 1 -10 21 1 1 -10 22 1 1 -10 23 1 -1 -10 24 2 1 -10 25 2 -1 -10 26 2 1 -10 27 2 -1 -10 28 1 1 -10 29 1 -1 -10 30 1 -1 -10 31 2 -1 -10 32 1 -1 -10 33 2 1 -10 34 2 1 -10 35 1 1 -10 36 1 1 -10 37 2 -1 -10 38 1 1 -10 39 1 1 -10 40 1 1 -10 41 1 1 -10 42 1 1 -10 43 2 -1 -10 44 2 1 -10 45 2 -1 -10 46 1 -1 -10 47 1 -1 -10 48 2 1 -10 49 2 1 -10 50 2 1 -10 51 2 -1 -10 52 1 1 -10 53 1 -1 -10 54 1 -1 -10 55 2 1 -10 56 2 1 -10 57 2 1 -10 58 2 1 -10 59 2 -1 -10 60 1 1 -10 61 1 -1 -10 62 1 1 -10 63 1 1 -10 64 1 1 -10 65 1 1 -10 66 1 -1 -10 67 1 -1 -10 68 2 1 -10 69 2 1 -10 70 2 -1 -10 71 2 1 -10 72 2 1 -10 73 2 1 -10 74 2 1 -10 75 2 1 -10 76 2 -1 -10 77 2 1 -10 78 2 -1 -10 79 2 -1 -10 80 2 1 -10 81 1 -1 -10 82 2 -1 -10 83 1 1 -10 84 1 1 -10 85 1 -1 -10 86 2 -1 -10 87 1 1 -10 88 2 1 -10 89 1 1 -10 90 1 1 -10 91 1 -1 -10 92 1 1 -10 93 1 -1 -10 94 2 1 -10 95 2 1 -10 96 2 1 -10 97 2 -1 -10 98 2 -1 -10 99 1 1 -10 100 1 -1 -11 1 1 -1 -11 2 1 1 -11 3 1 1 -11 4 1 1 -11 5 2 -1 -11 6 1 1 -11 7 1 -1 -11 8 2 1 -11 9 1 -1 -11 10 2 -1 -11 11 1 1 -11 12 1 1 -11 13 1 -1 -11 14 1 1 -11 15 2 -1 -11 16 2 -1 -11 17 1 1 -11 18 1 1 -11 19 1 1 -11 20 1 1 -11 21 2 -1 -11 22 1 1 -11 23 2 1 -11 24 1 1 -11 25 1 1 -11 26 2 1 -11 27 1 1 -11 28 1 1 -11 29 2 -1 -11 30 2 -1 -11 31 1 -1 -11 32 1 -1 -11 33 1 -1 -11 34 2 1 -11 35 2 1 -11 36 2 1 -11 37 2 -1 -11 38 1 1 -11 39 1 1 -11 40 1 1 -11 41 1 1 -11 42 2 -1 -11 43 1 1 -11 44 1 -1 -11 45 2 -1 -11 46 1 -1 -11 47 1 1 -11 48 2 1 -11 49 2 1 -11 50 2 -1 -11 51 2 -1 -11 52 2 -1 -11 53 1 -1 -11 54 1 1 -11 55 1 1 -11 56 1 -1 -11 57 1 -1 -11 58 2 -1 -11 59 1 -1 -11 60 2 -1 -11 61 2 -1 -11 62 2 1 -11 63 2 -1 -11 64 2 1 -11 65 2 1 -11 66 2 1 -11 67 2 1 -11 68 2 1 -11 69 2 -1 -11 70 2 1 -11 71 2 -1 -11 72 2 1 -11 73 2 1 -11 74 1 -1 -11 75 2 1 -11 76 2 1 -11 77 2 -1 -11 78 2 -1 -11 79 1 1 -11 80 1 -1 -11 81 1 1 -11 82 2 -1 -11 83 1 1 -11 84 1 1 -11 85 1 1 -11 86 1 1 -11 87 1 -1 -11 88 1 -1 -11 89 2 -1 -11 90 2 -1 -11 91 1 -1 -11 92 2 1 -11 93 2 -1 -11 94 2 1 -11 95 1 -1 -11 96 2 -1 -11 97 2 1 -11 98 2 1 -11 99 1 -1 -11 100 2 -1 -12 1 1 1 -12 2 2 1 -12 3 1 1 -12 4 2 -1 -12 5 1 1 -12 6 2 -1 -12 7 1 -1 -12 8 2 -1 -12 9 1 1 -12 10 1 -1 -12 11 1 -1 -12 12 2 1 -12 13 2 1 -12 14 1 -1 -12 15 2 1 -12 16 2 1 -12 17 1 -1 -12 18 1 1 -12 19 1 -1 -12 20 2 -1 -12 21 2 -1 -12 22 2 -1 -12 23 1 1 -12 24 2 -1 -12 25 1 1 -12 26 1 1 -12 27 1 1 -12 28 1 1 -12 29 1 -1 -12 30 2 -1 -12 31 1 -1 -12 32 1 1 -12 33 1 1 -12 34 1 1 -12 35 2 1 -12 36 1 1 -12 37 2 -1 -12 38 1 1 -12 39 1 1 -12 40 1 -1 -12 41 2 -1 -12 42 1 -1 -12 43 2 1 -12 44 1 1 -12 45 1 1 -12 46 1 -1 -12 47 2 -1 -12 48 1 -1 -12 49 1 -1 -12 50 1 1 -12 51 1 -1 -12 52 1 1 -12 53 1 1 -12 54 1 -1 -12 55 1 -1 -12 56 2 -1 -12 57 2 1 -12 58 1 -1 -12 59 2 1 -12 60 2 -1 -12 61 2 -1 -12 62 2 -1 -12 63 2 -1 -12 64 1 1 -12 65 2 1 -12 66 2 -1 -12 67 1 -1 -12 68 2 1 -12 69 1 -1 -12 70 2 1 -12 71 1 1 -12 72 2 -1 -12 73 1 1 -12 74 1 -1 -12 75 2 -1 -12 76 1 -1 -12 77 2 1 -12 78 2 1 -12 79 2 -1 -12 80 2 -1 -12 81 1 -1 -12 82 2 -1 -12 83 1 1 -12 84 1 1 -12 85 1 -1 -12 86 1 1 -12 87 2 -1 -12 88 1 1 -12 89 1 1 -12 90 1 1 -12 91 1 -1 -12 92 1 -1 -12 93 1 1 -12 94 1 1 -12 95 1 1 -12 96 1 -1 -12 97 2 1 -12 98 1 -1 -12 99 2 1 -12 100 1 -1 -13 1 1 1 -13 2 2 -1 -13 3 1 1 -13 4 2 1 -13 5 2 -1 -13 6 1 1 -13 7 1 1 -13 8 1 -1 -13 9 1 -1 -13 10 2 -1 -13 11 1 -1 -13 12 1 -1 -13 13 2 1 -13 14 2 1 -13 15 2 1 -13 16 2 1 -13 17 2 1 -13 18 2 1 -13 19 2 -1 -13 20 2 -1 -13 21 2 1 -13 22 2 1 -13 23 2 1 -13 24 2 -1 -13 25 1 -1 -13 26 2 -1 -13 27 2 1 -13 28 2 1 -13 29 2 1 -13 30 2 1 -13 31 2 -1 -13 32 1 1 -13 33 2 1 -13 34 2 1 -13 35 2 -1 -13 36 1 -1 -13 37 2 1 -13 38 1 1 -13 39 2 -1 -13 40 2 -1 -13 41 2 -1 -13 42 1 1 -13 43 1 1 -13 44 1 1 -13 45 1 1 -13 46 1 1 -13 47 2 -1 -13 48 1 1 -13 49 1 1 -13 50 1 -1 -13 51 1 1 -13 52 1 -1 -13 53 2 -1 -13 54 1 1 -13 55 1 -1 -13 56 1 -1 -13 57 2 -1 -13 58 2 1 -13 59 2 1 -13 60 2 -1 -13 61 1 -1 -13 62 2 1 -13 63 2 -1 -13 64 1 -1 -13 65 2 -1 -13 66 2 1 -13 67 1 1 -13 68 1 1 -13 69 1 -1 -13 70 1 -1 -13 71 2 1 -13 72 2 1 -13 73 2 1 -13 74 1 -1 -13 75 2 1 -13 76 2 -1 -13 77 2 1 -13 78 2 1 -13 79 2 -1 -13 80 2 -1 -13 81 1 1 -13 82 1 1 -13 83 1 -1 -13 84 2 -1 -13 85 1 -1 -13 86 2 1 -13 87 2 -1 -13 88 1 -1 -13 89 1 1 -13 90 1 1 -13 91 1 -1 -13 92 2 1 -13 93 1 1 -13 94 2 1 -13 95 2 1 -13 96 1 -1 -13 97 1 -1 -13 98 2 1 -13 99 2 1 -13 100 2 -1 -14 1 2 -1 -14 2 1 -1 -14 3 1 1 -14 4 2 -1 -14 5 1 1 -14 6 1 -1 -14 7 1 1 -14 8 1 1 -14 9 1 -1 -14 10 2 -1 -14 11 2 -1 -14 12 1 1 -14 13 1 -1 -14 14 2 1 -14 15 2 -1 -14 16 2 1 -14 17 2 -1 -14 18 1 1 -14 19 1 -1 -14 20 1 -1 -14 21 2 -1 -14 22 2 -1 -14 23 2 1 -14 24 1 -1 -14 25 2 1 -14 26 1 1 -14 27 2 -1 -14 28 1 1 -14 29 1 -1 -14 30 1 -1 -14 31 2 -1 -14 32 1 1 -14 33 1 -1 -14 34 2 -1 -14 35 1 -1 -14 36 1 1 -14 37 2 -1 -14 38 2 -1 -14 39 1 -1 -14 40 1 1 -14 41 1 -1 -14 42 1 1 -14 43 1 1 -14 44 1 1 -14 45 1 -1 -14 46 1 1 -14 47 2 -1 -14 48 2 -1 -14 49 2 1 -14 50 2 -1 -14 51 2 1 -14 52 1 -1 -14 53 2 1 -14 54 2 1 -14 55 2 1 -14 56 2 -1 -14 57 2 1 -14 58 1 -1 -14 59 2 -1 -14 60 1 1 -14 61 1 1 -14 62 1 -1 -14 63 2 -1 -14 64 1 1 -14 65 2 -1 -14 66 1 -1 -14 67 1 -1 -14 68 1 -1 -14 69 2 1 -14 70 2 1 -14 71 2 -1 -14 72 2 1 -14 73 2 -1 -14 74 2 -1 -14 75 1 1 -14 76 2 1 -14 77 2 1 -14 78 2 1 -14 79 2 -1 -14 80 1 1 -14 81 2 -1 -14 82 1 1 -14 83 1 1 -14 84 1 -1 -14 85 2 1 -14 86 1 1 -14 87 2 -1 -14 88 1 1 -14 89 1 -1 -14 90 1 -1 -14 91 2 -1 -14 92 1 1 -14 93 1 1 -14 94 1 -1 -14 95 1 1 -14 96 1 -1 -14 97 1 1 -14 98 1 1 -14 99 1 -1 -14 100 2 1 -15 1 1 1 -15 2 1 -1 -15 3 1 1 -15 4 2 -1 -15 5 1 1 -15 6 1 -1 -15 7 2 -1 -15 8 1 1 -15 9 1 -1 -15 10 2 -1 -15 11 1 -1 -15 12 2 1 -15 13 2 1 -15 14 1 1 -15 15 2 -1 -15 16 1 -1 -15 17 1 1 -15 18 1 1 -15 19 1 1 -15 20 2 1 -15 21 2 -1 -15 22 2 -1 -15 23 1 -1 -15 24 1 -1 -15 25 1 -1 -15 26 2 -1 -15 27 2 -1 -15 28 1 -1 -15 29 2 1 -15 30 2 1 -15 31 2 1 -15 32 2 -1 -15 33 1 -1 -15 34 1 -1 -15 35 2 -1 -15 36 2 1 -15 37 2 1 -15 38 2 -1 -15 39 2 1 -15 40 1 1 -15 41 1 -1 -15 42 2 -1 -15 43 1 1 -15 44 2 -1 -15 45 1 1 -15 46 1 1 -15 47 1 1 -15 48 1 -1 -15 49 1 -1 -15 50 2 1 -15 51 2 1 -15 52 2 -1 -15 53 1 -1 -15 54 2 1 -15 55 2 1 -15 56 1 1 -15 57 2 -1 -15 58 1 -1 -15 59 2 1 -15 60 2 1 -15 61 1 1 -15 62 2 1 -15 63 2 -1 -15 64 2 -1 -15 65 1 -1 -15 66 1 1 -15 67 2 -1 -15 68 1 -1 -15 69 1 1 -15 70 1 -1 -15 71 1 -1 -15 72 2 1 -15 73 2 1 -15 74 1 1 -15 75 2 1 -15 76 2 -1 -15 77 2 1 -15 78 2 -1 -15 79 1 1 -15 80 1 1 -15 81 1 1 -15 82 1 1 -15 83 1 1 -15 84 1 1 -15 85 1 -1 -15 86 1 1 -15 87 1 1 -15 88 1 -1 -15 89 2 -1 -15 90 1 -1 -15 91 1 -1 -15 92 2 1 -15 93 2 1 -15 94 2 1 -15 95 1 -1 -15 96 2 -1 -15 97 2 1 -15 98 1 -1 -15 99 2 1 -15 100 2 1 -16 1 2 1 -16 2 2 -1 -16 3 1 1 -16 4 2 1 -16 5 1 1 -16 6 1 1 -16 7 2 -1 -16 8 1 1 -16 9 1 1 -16 10 1 -1 -16 11 1 1 -16 12 1 -1 -16 13 2 -1 -16 14 1 -1 -16 15 2 1 -16 16 2 1 -16 17 2 -1 -16 18 2 -1 -16 19 1 1 -16 20 1 1 -16 21 1 -1 -16 22 1 1 -16 23 2 1 -16 24 2 -1 -16 25 1 -1 -16 26 1 1 -16 27 1 1 -16 28 1 -1 -16 29 2 -1 -16 30 2 -1 -16 31 1 1 -16 32 1 -1 -16 33 2 1 -16 34 2 1 -16 35 2 1 -16 36 2 -1 -16 37 2 -1 -16 38 1 -1 -16 39 2 -1 -16 40 1 1 -16 41 1 -1 -16 42 2 -1 -16 43 1 1 -16 44 1 1 -16 45 1 -1 -16 46 2 -1 -16 47 1 1 -16 48 1 -1 -16 49 1 1 -16 50 1 -1 -16 51 1 -1 -16 52 2 -1 -16 53 2 1 -16 54 2 1 -16 55 2 -1 -16 56 2 -1 -16 57 1 -1 -16 58 2 1 -16 59 2 1 -16 60 2 -1 -16 61 2 -1 -16 62 1 1 -16 63 1 -1 -16 64 1 1 -16 65 1 -1 -16 66 2 -1 -16 67 1 -1 -16 68 2 -1 -16 69 2 1 -16 70 2 1 -16 71 1 1 -16 72 2 1 -16 73 2 1 -16 74 2 1 -16 75 2 -1 -16 76 2 -1 -16 77 1 -1 -16 78 1 -1 -16 79 2 -1 -16 80 2 1 -16 81 2 -1 -16 82 1 -1 -16 83 2 1 -16 84 2 -1 -16 85 1 1 -16 86 1 1 -16 87 1 -1 -16 88 1 1 -16 89 1 1 -16 90 1 1 -16 91 1 1 -16 92 1 -1 -16 93 1 1 -16 94 2 1 -16 95 2 -1 -16 96 1 1 -16 97 1 -1 -16 98 1 -1 -16 99 2 1 -16 100 2 1 -17 1 2 -1 -17 2 1 1 -17 3 1 1 -17 4 1 -1 -17 5 1 1 -17 6 1 1 -17 7 1 -1 -17 8 1 -1 -17 9 1 -1 -17 10 2 1 -17 11 2 -1 -17 12 2 1 -17 13 2 1 -17 14 2 1 -17 15 1 -1 -17 16 2 1 -17 17 1 1 -17 18 2 1 -17 19 1 1 -17 20 2 -1 -17 21 2 -1 -17 22 1 1 -17 23 1 1 -17 24 1 1 -17 25 1 1 -17 26 1 1 -17 27 1 1 -17 28 1 1 -17 29 1 -1 -17 30 2 -1 -17 31 1 -1 -17 32 1 -1 -17 33 1 -1 -17 34 2 1 -17 35 2 1 -17 36 2 1 -17 37 2 -1 -17 38 1 1 -17 39 1 1 -17 40 1 1 -17 41 1 -1 -17 42 1 1 -17 43 1 1 -17 44 1 1 -17 45 1 1 -17 46 1 -1 -17 47 1 -1 -17 48 1 1 -17 49 2 1 -17 50 1 1 -17 51 2 -1 -17 52 2 1 -17 53 2 1 -17 54 2 -1 -17 55 2 1 -17 56 2 1 -17 57 1 1 -17 58 2 1 -17 59 2 1 -17 60 2 -1 -17 61 2 -1 -17 62 2 1 -17 63 1 1 -17 64 1 -1 -17 65 1 -1 -17 66 2 -1 -17 67 1 1 -17 68 2 1 -17 69 2 -1 -17 70 1 -1 -17 71 1 1 -17 72 1 -1 -17 73 2 -1 -17 74 1 -1 -17 75 2 -1 -17 76 1 -1 -17 77 2 -1 -17 78 2 -1 -17 79 1 1 -17 80 1 1 -17 81 1 1 -17 82 1 1 -17 83 1 1 -17 84 1 -1 -17 85 1 1 -17 86 1 1 -17 87 1 1 -17 88 1 1 -17 89 1 1 -17 90 1 1 -17 91 1 1 -17 92 1 -1 -17 93 1 1 -17 94 1 -1 -17 95 1 -1 -17 96 2 1 -17 97 2 -1 -17 98 2 1 -17 99 2 -1 -17 100 1 -1 -18 1 1 -1 -18 2 1 1 -18 3 1 1 -18 4 1 1 -18 5 1 1 -18 6 1 -1 -18 7 1 -1 -18 8 2 1 -18 9 2 1 -18 10 2 -1 -18 11 1 1 -18 12 1 1 -18 13 2 1 -18 14 2 1 -18 15 1 1 -18 16 1 1 -18 17 1 1 -18 18 1 1 -18 19 1 1 -18 20 1 1 -18 21 1 1 -18 22 2 -1 -18 23 2 -1 -18 24 1 1 -18 25 1 1 -18 26 1 1 -18 27 1 1 -18 28 1 1 -18 29 1 1 -18 30 1 -1 -18 31 1 1 -18 32 1 -1 -18 33 2 1 -18 34 2 1 -18 35 2 1 -18 36 2 -1 -18 37 2 1 -18 38 2 -1 -18 39 2 1 -18 40 2 -1 -18 41 2 -1 -18 42 2 -1 -18 43 1 -1 -18 44 2 1 -18 45 2 -1 -18 46 1 1 -18 47 1 1 -18 48 1 -1 -18 49 2 1 -18 50 2 -1 -18 51 1 1 -18 52 1 -1 -18 53 1 -1 -18 54 2 1 -18 55 2 1 -18 56 2 1 -18 57 2 -1 -18 58 2 1 -18 59 2 1 -18 60 2 -1 -18 61 2 -1 -18 62 2 -1 -18 63 1 1 -18 64 1 -1 -18 65 1 1 -18 66 1 1 -18 67 2 1 -18 68 1 -1 -18 69 1 1 -18 70 2 -1 -18 71 1 1 -18 72 2 -1 -18 73 1 -1 -18 74 1 -1 -18 75 2 1 -18 76 2 1 -18 77 1 -1 -18 78 2 1 -18 79 2 -1 -18 80 2 -1 -18 81 1 1 -18 82 1 -1 -18 83 2 -1 -18 84 2 1 -18 85 1 -1 -18 86 2 1 -18 87 2 -1 -18 88 1 -1 -18 89 1 -1 -18 90 2 -1 -18 91 1 -1 -18 92 1 -1 -18 93 1 -1 -18 94 1 1 -18 95 2 1 -18 96 2 1 -18 97 2 1 -18 98 1 1 -18 99 2 -1 -18 100 1 1 -19 1 2 -1 -19 2 2 -1 -19 3 2 -1 -19 4 1 -1 -19 5 2 -1 -19 6 1 1 -19 7 1 -1 -19 8 1 -1 -19 9 1 1 -19 10 1 -1 -19 11 2 -1 -19 12 2 1 -19 13 2 1 -19 14 2 1 -19 15 2 1 -19 16 2 -1 -19 17 2 -1 -19 18 1 1 -19 19 1 -1 -19 20 1 1 -19 21 1 -1 -19 22 1 1 -19 23 1 1 -19 24 1 1 -19 25 2 1 -19 26 2 -1 -19 27 1 -1 -19 28 1 1 -19 29 1 -1 -19 30 1 -1 -19 31 1 1 -19 32 2 1 -19 33 1 -1 -19 34 1 -1 -19 35 2 1 -19 36 2 1 -19 37 2 1 -19 38 1 1 -19 39 2 -1 -19 40 2 -1 -19 41 2 -1 -19 42 2 -1 -19 43 1 1 -19 44 1 1 -19 45 1 1 -19 46 1 1 -19 47 1 1 -19 48 1 1 -19 49 1 -1 -19 50 1 -1 -19 51 1 -1 -19 52 2 -1 -19 53 1 1 -19 54 1 -1 -19 55 1 -1 -19 56 2 -1 -19 57 2 1 -19 58 2 1 -19 59 2 1 -19 60 2 -1 -19 61 2 -1 -19 62 1 1 -19 63 1 1 -19 64 2 -1 -19 65 2 -1 -19 66 2 1 -19 67 1 1 -19 68 1 -1 -19 69 2 1 -19 70 2 1 -19 71 2 1 -19 72 1 -1 -19 73 2 1 -19 74 2 -1 -19 75 1 1 -19 76 1 -1 -19 77 2 1 -19 78 2 1 -19 79 2 -1 -19 80 1 1 -19 81 2 -1 -19 82 1 1 -19 83 1 1 -19 84 2 -1 -19 85 1 -1 -19 86 1 1 -19 87 2 1 -19 88 1 1 -19 89 2 -1 -19 90 1 1 -19 91 1 -1 -19 92 1 -1 -19 93 1 -1 -19 94 2 1 -19 95 2 1 -19 96 2 -1 -19 97 1 -1 -19 98 2 1 -19 99 2 1 -19 100 2 1 -20 1 1 1 -20 2 1 -1 -20 3 2 -1 -20 4 1 1 -20 5 1 -1 -20 6 1 1 -20 7 1 -1 -20 8 2 1 -20 9 2 1 -20 10 1 -1 -20 11 2 1 -20 12 2 1 -20 13 1 1 -20 14 2 -1 -20 15 2 1 -20 16 1 1 -20 17 1 1 -20 18 1 1 -20 19 1 -1 -20 20 2 -1 -20 21 1 1 -20 22 1 -1 -20 23 2 -1 -20 24 1 1 -20 25 1 1 -20 26 1 1 -20 27 1 1 -20 28 1 1 -20 29 1 1 -20 30 1 -1 -20 31 2 -1 -20 32 1 -1 -20 33 2 1 -20 34 2 1 -20 35 2 -1 -20 36 2 1 -20 37 2 -1 -20 38 1 -1 -20 39 2 -1 -20 40 1 1 -20 41 1 -1 -20 42 1 1 -20 43 1 1 -20 44 2 1 -20 45 2 -1 -20 46 1 -1 -20 47 2 -1 -20 48 1 -1 -20 49 1 1 -20 50 2 1 -20 51 1 -1 -20 52 2 -1 -20 53 1 -1 -20 54 2 1 -20 55 2 -1 -20 56 2 1 -20 57 2 1 -20 58 2 -1 -20 59 2 1 -20 60 2 -1 -20 61 2 1 -20 62 2 -1 -20 63 2 -1 -20 64 1 1 -20 65 1 1 -20 66 1 1 -20 67 1 1 -20 68 1 -1 -20 69 1 -1 -20 70 2 1 -20 71 2 1 -20 72 2 -1 -20 73 1 1 -20 74 1 -1 -20 75 1 -1 -20 76 1 -1 -20 77 2 1 -20 78 2 1 -20 79 2 1 -20 80 2 1 -20 81 2 1 -20 82 2 1 -20 83 2 1 -20 84 2 -1 -20 85 2 -1 -20 86 1 1 -20 87 1 -1 -20 88 1 1 -20 89 1 -1 -20 90 2 1 -20 91 2 -1 -20 92 1 1 -20 93 2 -1 -20 94 1 -1 -20 95 1 -1 -20 96 2 1 -20 97 2 1 -20 98 2 1 -20 99 2 1 -20 100 2 1 diff --git a/inst/extdata/prl_multipleB_exampleData.txt b/inst/extdata/prl_multipleB_exampleData.txt deleted file mode 100644 index 16725497..00000000 --- a/inst/extdata/prl_multipleB_exampleData.txt +++ /dev/null @@ -1,1801 +0,0 @@ -ExperimentName subjID block trial choice outcome choiceSwitch choice.ACC choice.RT Subject_Block -PRL_Young_Final 5038 1 1 2 25 1 1 1430 5038_1 -PRL_Young_Final 5038 1 2 2 25 0 1 439 5038_1 -PRL_Young_Final 5038 1 3 2 -25 0 1 374 5038_1 -PRL_Young_Final 5038 1 4 2 25 0 1 267 5038_1 -PRL_Young_Final 5038 1 5 2 25 0 1 331 5038_1 -PRL_Young_Final 5038 1 6 2 -25 0 1 316 5038_1 -PRL_Young_Final 5038 1 7 2 25 0 1 325 5038_1 -PRL_Young_Final 5038 1 8 2 25 0 0 264 5038_1 -PRL_Young_Final 5038 1 9 2 -25 0 0 343 5038_1 -PRL_Young_Final 5038 1 10 2 -25 0 0 292 5038_1 -PRL_Young_Final 5038 1 11 2 -25 0 0 288 5038_1 -PRL_Young_Final 5038 1 12 1 25 1 1 308 5038_1 -PRL_Young_Final 5038 1 13 1 25 0 1 383 5038_1 -PRL_Young_Final 5038 1 14 1 25 0 1 322 5038_1 -PRL_Young_Final 5038 1 15 1 25 0 1 297 5038_1 -PRL_Young_Final 5038 1 16 1 -25 0 1 350 5038_1 -PRL_Young_Final 5038 1 17 1 -25 0 0 484 5038_1 -PRL_Young_Final 5038 1 18 2 -25 1 1 442 5038_1 -PRL_Young_Final 5038 1 19 1 -25 1 0 298 5038_1 -PRL_Young_Final 5038 1 20 1 -25 0 0 312 5038_1 -PRL_Young_Final 5038 1 21 2 25 1 1 358 5038_1 -PRL_Young_Final 5038 1 22 2 25 0 1 397 5038_1 -PRL_Young_Final 5038 1 23 2 25 0 1 563 5038_1 -PRL_Young_Final 5038 1 24 2 25 0 1 351 5038_1 -PRL_Young_Final 5038 1 25 2 25 0 1 278 5038_1 -PRL_Young_Final 5038 1 26 2 25 0 1 222 5038_1 -PRL_Young_Final 5038 1 27 2 -25 0 1 391 5038_1 -PRL_Young_Final 5038 1 28 2 25 0 1 231 5038_1 -PRL_Young_Final 5038 1 29 2 25 0 0 281 5038_1 -PRL_Young_Final 5038 1 30 2 25 0 0 363 5038_1 -PRL_Young_Final 5038 1 31 2 -25 0 0 282 5038_1 -PRL_Young_Final 5038 1 32 2 -25 0 0 308 5038_1 -PRL_Young_Final 5038 1 33 2 -25 0 0 270 5038_1 -PRL_Young_Final 5038 1 34 1 25 1 1 291 5038_1 -PRL_Young_Final 5038 1 35 1 25 0 1 350 5038_1 -PRL_Young_Final 5038 1 36 1 25 0 1 271 5038_1 -PRL_Young_Final 5038 1 37 1 25 0 1 310 5038_1 -PRL_Young_Final 5038 1 38 1 25 0 1 341 5038_1 -PRL_Young_Final 5038 1 39 1 25 0 1 291 5038_1 -PRL_Young_Final 5038 1 40 1 -25 0 1 338 5038_1 -PRL_Young_Final 5038 1 41 1 25 0 1 296 5038_1 -PRL_Young_Final 5038 1 42 1 -25 0 0 419 5038_1 -PRL_Young_Final 5038 1 43 1 -25 0 0 356 5038_1 -PRL_Young_Final 5038 1 44 2 25 1 1 239 5038_1 -PRL_Young_Final 5038 1 45 2 -25 0 1 241 5038_1 -PRL_Young_Final 5038 1 46 2 25 0 1 386 5038_1 -PRL_Young_Final 5038 1 47 2 25 0 1 282 5038_1 -PRL_Young_Final 5038 1 48 2 25 0 1 276 5038_1 -PRL_Young_Final 5038 1 49 2 25 0 1 353 5038_1 -PRL_Young_Final 5038 1 50 2 25 0 1 264 5038_1 -PRL_Young_Final 5038 1 51 2 -25 0 0 412 5038_1 -PRL_Young_Final 5038 1 52 2 25 0 0 381 5038_1 -PRL_Young_Final 5038 1 53 2 -25 0 0 272 5038_1 -PRL_Young_Final 5038 1 54 2 -25 0 0 315 5038_1 -PRL_Young_Final 5038 1 55 2 -25 0 0 343 5038_1 -PRL_Young_Final 5038 1 56 1 -25 1 1 339 5038_1 -PRL_Young_Final 5038 1 57 1 -25 0 1 276 5038_1 -PRL_Young_Final 5038 1 58 2 -25 1 0 302 5038_1 -PRL_Young_Final 5038 1 59 2 -25 0 0 294 5038_1 -PRL_Young_Final 5038 1 60 1 25 1 1 382 5038_1 -PRL_Young_Final 5038 1 61 1 25 0 1 408 5038_1 -PRL_Young_Final 5038 1 62 1 25 0 1 475 5038_1 -PRL_Young_Final 5038 1 63 1 25 0 1 279 5038_1 -PRL_Young_Final 5038 1 64 1 25 0 1 678 5038_1 -PRL_Young_Final 5038 1 65 1 -25 0 0 319 5038_1 -PRL_Young_Final 5038 1 66 1 -25 0 0 1048 5038_1 -PRL_Young_Final 5038 1 67 2 25 1 1 385 5038_1 -PRL_Young_Final 5038 1 68 2 -25 0 1 465 5038_1 -PRL_Young_Final 5038 1 69 2 25 0 1 347 5038_1 -PRL_Young_Final 5038 1 70 2 25 0 1 462 5038_1 -PRL_Young_Final 5038 1 71 2 25 0 1 402 5038_1 -PRL_Young_Final 5038 1 72 2 25 0 1 426 5038_1 -PRL_Young_Final 5038 1 73 2 25 0 0 330 5038_1 -PRL_Young_Final 5038 1 74 2 -25 0 0 337 5038_1 -PRL_Young_Final 5038 1 75 2 25 0 0 236 5038_1 -PRL_Young_Final 5038 1 76 2 -25 0 0 385 5038_1 -PRL_Young_Final 5038 1 77 2 -25 0 0 391 5038_1 -PRL_Young_Final 5038 1 78 1 25 1 1 647 5038_1 -PRL_Young_Final 5038 1 79 1 25 0 1 410 5038_1 -PRL_Young_Final 5038 1 80 1 25 0 1 351 5038_1 -PRL_Young_Final 5038 1 81 1 -25 0 1 286 5038_1 -PRL_Young_Final 5038 1 82 1 25 0 1 359 5038_1 -PRL_Young_Final 5038 1 83 1 25 0 1 295 5038_1 -PRL_Young_Final 5038 1 84 1 -25 0 0 344 5038_1 -PRL_Young_Final 5038 1 85 1 -25 0 0 282 5038_1 -PRL_Young_Final 5038 1 86 2 -25 1 1 667 5038_1 -PRL_Young_Final 5038 1 87 2 25 0 1 331 5038_1 -PRL_Young_Final 5038 1 88 2 25 0 1 382 5038_1 -PRL_Young_Final 5038 1 89 2 25 0 1 300 5038_1 -PRL_Young_Final 5038 1 90 2 25 0 1 307 5038_1 -PRL_Young_Final 5038 1 91 2 25 0 1 329 5038_1 -PRL_Young_Final 5038 1 92 2 -25 0 1 375 5038_1 -PRL_Young_Final 5038 1 93 2 -25 0 1 193 5038_1 -PRL_Young_Final 5038 1 94 1 25 1 1 658 5038_1 -PRL_Young_Final 5038 1 95 1 25 0 1 466 5038_1 -PRL_Young_Final 5038 1 96 1 25 0 1 394 5038_1 -PRL_Young_Final 5038 1 97 1 25 0 1 272 5038_1 -PRL_Young_Final 5038 1 98 1 25 0 1 336 5038_1 -PRL_Young_Final 5038 1 99 1 25 0 1 446 5038_1 -PRL_Young_Final 5038 1 100 1 -25 0 1 387 5038_1 -PRL_Young_Final 5038 1 101 1 25 0 1 415 5038_1 -PRL_Young_Final 5038 1 102 1 -25 0 0 434 5038_1 -PRL_Young_Final 5038 1 103 1 -25 0 0 383 5038_1 -PRL_Young_Final 5038 1 104 2 25 1 1 341 5038_1 -PRL_Young_Final 5038 1 105 2 25 0 1 575 5038_1 -PRL_Young_Final 5038 1 106 2 25 0 1 332 5038_1 -PRL_Young_Final 5038 1 107 2 25 0 1 411 5038_1 -PRL_Young_Final 5038 1 108 2 25 0 1 408 5038_1 -PRL_Young_Final 5038 1 109 2 25 0 1 364 5038_1 -PRL_Young_Final 5038 1 110 2 25 0 0 429 5038_1 -PRL_Young_Final 5038 1 111 2 25 0 0 342 5038_1 -PRL_Young_Final 5038 1 112 2 -25 0 0 56 5038_1 -PRL_Young_Final 5038 1 113 2 -25 0 0 339 5038_1 -PRL_Young_Final 5038 1 114 1 -25 1 1 369 5038_1 -PRL_Young_Final 5038 1 115 1 25 0 1 779 5038_1 -PRL_Young_Final 5038 1 116 1 25 0 1 529 5038_1 -PRL_Young_Final 5038 1 117 1 -25 0 1 397 5038_1 -PRL_Young_Final 5038 1 118 1 25 0 1 414 5038_1 -PRL_Young_Final 5038 1 119 2 -25 1 0 392 5038_1 -PRL_Young_Final 5038 1 120 1 25 1 1 518 5038_1 -PRL_Young_Final 5038 1 121 1 25 0 1 470 5038_1 -PRL_Young_Final 5038 1 122 1 25 0 1 587 5038_1 -PRL_Young_Final 5038 1 123 1 25 0 1 377 5038_1 -PRL_Young_Final 5038 1 124 1 -25 0 1 351 5038_1 -PRL_Young_Final 5038 1 125 1 -25 0 1 331 5038_1 -PRL_Young_Final 5038 1 126 2 -25 1 0 265 5038_1 -PRL_Young_Final 5038 1 127 2 -25 0 0 327 5038_1 -PRL_Young_Final 5038 1 128 1 25 1 1 244 5038_1 -PRL_Young_Final 5038 1 129 1 25 0 1 363 5038_1 -PRL_Young_Final 5038 1 130 1 25 0 1 639 5038_1 -PRL_Young_Final 5038 1 131 1 25 0 1 435 5038_1 -PRL_Young_Final 5038 1 132 1 25 0 1 436 5038_1 -PRL_Young_Final 5038 1 133 1 25 0 1 559 5038_1 -PRL_Young_Final 5038 1 134 1 -25 0 1 388 5038_1 -PRL_Young_Final 5038 1 135 1 25 0 1 271 5038_1 -PRL_Young_Final 5038 1 136 1 -25 0 0 430 5038_1 -PRL_Young_Final 5038 1 137 1 25 0 0 536 5038_1 -PRL_Young_Final 5038 1 138 1 -25 0 0 281 5038_1 -PRL_Young_Final 5038 1 139 2 25 1 1 370 5038_1 -PRL_Young_Final 5038 1 140 2 25 0 1 335 5038_1 -PRL_Young_Final 5038 1 141 2 25 0 1 409 5038_1 -PRL_Young_Final 5038 1 142 2 25 0 1 358 5038_1 -PRL_Young_Final 5038 1 143 2 25 0 1 261 5038_1 -PRL_Young_Final 5038 1 144 2 -25 0 0 294 5038_1 -PRL_Young_Final 5038 1 145 2 -25 0 0 395 5038_1 -PRL_Young_Final 5038 1 146 1 25 1 1 417 5038_1 -PRL_Young_Final 5038 1 147 1 -25 0 1 410 5038_1 -PRL_Young_Final 5038 1 148 2 -25 1 0 348 5038_1 -PRL_Young_Final 5038 1 149 2 -25 0 0 336 5038_1 -PRL_Young_Final 5038 1 150 1 25 1 1 322 5038_1 -PRL_Young_Final 5038 1 151 1 25 0 1 363 5038_1 -PRL_Young_Final 5038 1 152 1 -25 0 1 353 5038_1 -PRL_Young_Final 5038 1 153 1 25 0 1 247 5038_1 -PRL_Young_Final 5038 1 154 1 25 0 1 347 5038_1 -PRL_Young_Final 5038 1 155 1 -25 0 0 341 5038_1 -PRL_Young_Final 5038 1 156 1 -25 0 0 358 5038_1 -PRL_Young_Final 5038 1 157 2 25 1 1 268 5038_1 -PRL_Young_Final 5038 1 158 2 25 0 1 312 5038_1 -PRL_Young_Final 5038 1 159 2 25 0 1 559 5038_1 -PRL_Young_Final 5038 1 160 2 -25 0 1 468 5038_1 -PRL_Young_Final 5038 1 161 2 -25 0 1 938 5038_1 -PRL_Young_Final 5038 1 162 1 25 1 0 277 5038_1 -PRL_Young_Final 5038 1 163 1 -25 0 0 385 5038_1 -PRL_Young_Final 5038 1 164 1 25 0 0 642 5038_1 -PRL_Young_Final 5038 1 165 1 -25 0 0 420 5038_1 -PRL_Young_Final 5038 1 166 1 -25 0 0 307 5038_1 -PRL_Young_Final 5038 1 167 2 25 1 1 260 5038_1 -PRL_Young_Final 5038 1 168 2 25 0 1 312 5038_1 -PRL_Young_Final 5038 1 169 2 25 0 1 305 5038_1 -PRL_Young_Final 5038 1 170 2 25 0 1 354 5038_1 -PRL_Young_Final 5038 1 171 2 25 0 1 341 5038_1 -PRL_Young_Final 5038 1 172 2 25 0 1 355 5038_1 -PRL_Young_Final 5038 1 173 2 -25 0 1 305 5038_1 -PRL_Young_Final 5038 1 174 2 25 0 1 279 5038_1 -PRL_Young_Final 5038 1 175 2 -25 0 0 342 5038_1 -PRL_Young_Final 5038 1 176 2 -25 0 0 116 5038_1 -PRL_Young_Final 5038 1 177 1 25 1 1 38 5038_1 -PRL_Young_Final 5038 1 178 1 25 0 1 326 5038_1 -PRL_Young_Final 5038 1 179 1 25 0 1 368 5038_1 -PRL_Young_Final 5038 1 180 1 25 0 1 373 5038_1 -PRL_Young_Final 5038 1 181 1 25 0 1 313 5038_1 -PRL_Young_Final 5038 1 182 1 25 0 1 300 5038_1 -PRL_Young_Final 5038 1 183 1 -25 0 1 296 5038_1 -PRL_Young_Final 5038 1 184 1 25 0 1 355 5038_1 -PRL_Young_Final 5038 1 185 1 -25 0 0 314 5038_1 -PRL_Young_Final 5038 1 186 2 25 1 1 286 5038_1 -PRL_Young_Final 5038 1 187 2 -25 0 1 442 5038_1 -PRL_Young_Final 5038 1 188 2 25 0 1 364 5038_1 -PRL_Young_Final 5038 1 189 2 25 0 1 336 5038_1 -PRL_Young_Final 5038 1 190 2 25 0 1 464 5038_1 -PRL_Young_Final 5038 1 191 2 25 0 1 367 5038_1 -PRL_Young_Final 5038 1 192 2 25 0 1 356 5038_1 -PRL_Young_Final 5038 1 193 2 -25 0 1 577 5038_1 -PRL_Young_Final 5038 1 194 2 -25 0 0 327 5038_1 -PRL_Young_Final 5038 1 195 1 -25 1 1 919 5038_1 -PRL_Young_Final 5038 1 196 1 25 0 1 292 5038_1 -PRL_Young_Final 5038 1 197 1 25 0 1 570 5038_1 -PRL_Young_Final 5038 1 198 1 25 0 1 397 5038_1 -PRL_Young_Final 5038 1 199 1 25 0 1 611 5038_1 -PRL_Young_Final 5038 1 200 1 25 0 0 373 5038_1 -PRL_Young_Final 5038 2 1 2 25 1 1 884 5038_2 -PRL_Young_Final 5038 2 2 2 25 0 1 435 5038_2 -PRL_Young_Final 5038 2 3 2 -25 0 1 376 5038_2 -PRL_Young_Final 5038 2 4 2 25 0 1 342 5038_2 -PRL_Young_Final 5038 2 5 2 25 0 1 300 5038_2 -PRL_Young_Final 5038 2 6 2 25 0 1 394 5038_2 -PRL_Young_Final 5038 2 7 2 25 0 0 363 5038_2 -PRL_Young_Final 5038 2 8 2 -25 0 0 356 5038_2 -PRL_Young_Final 5038 2 9 2 -25 0 0 348 5038_2 -PRL_Young_Final 5038 2 10 1 25 1 1 305 5038_2 -PRL_Young_Final 5038 2 11 1 25 0 1 448 5038_2 -PRL_Young_Final 5038 2 12 1 25 0 1 328 5038_2 -PRL_Young_Final 5038 2 13 1 25 0 1 752 5038_2 -PRL_Young_Final 5038 2 14 1 -25 0 1 647 5038_2 -PRL_Young_Final 5038 2 15 1 25 0 1 386 5038_2 -PRL_Young_Final 5038 2 16 1 25 0 1 579 5038_2 -PRL_Young_Final 5038 2 17 1 -25 0 1 530 5038_2 -PRL_Young_Final 5038 2 18 1 -25 0 0 646 5038_2 -PRL_Young_Final 5038 2 19 2 25 1 1 279 5038_2 -PRL_Young_Final 5038 2 20 2 25 0 1 401 5038_2 -PRL_Young_Final 5038 2 21 2 25 0 1 476 5038_2 -PRL_Young_Final 5038 2 22 2 25 0 1 363 5038_2 -PRL_Young_Final 5038 2 23 2 25 0 1 435 5038_2 -PRL_Young_Final 5038 2 24 2 -25 0 1 363 5038_2 -PRL_Young_Final 5038 2 25 2 -25 0 1 268 5038_2 -PRL_Young_Final 5038 2 26 1 -25 1 0 426 5038_2 -PRL_Young_Final 5038 2 27 1 -25 0 0 259 5038_2 -PRL_Young_Final 5038 2 28 2 25 1 1 315 5038_2 -PRL_Young_Final 5038 2 29 2 25 0 1 372 5038_2 -PRL_Young_Final 5038 2 30 2 25 0 1 432 5038_2 -PRL_Young_Final 5038 2 31 2 25 0 1 349 5038_2 -PRL_Young_Final 5038 2 32 2 25 0 1 346 5038_2 -PRL_Young_Final 5038 2 33 2 -25 0 0 340 5038_2 -PRL_Young_Final 5038 2 34 2 25 0 0 332 5038_2 -PRL_Young_Final 5038 2 35 2 -25 0 0 348 5038_2 -PRL_Young_Final 5038 2 36 2 -25 0 0 362 5038_2 -PRL_Young_Final 5038 2 37 1 25 1 1 245 5038_2 -PRL_Young_Final 5038 2 38 1 -25 0 1 316 5038_2 -PRL_Young_Final 5038 2 39 1 25 0 1 336 5038_2 -PRL_Young_Final 5038 2 40 1 25 0 1 312 5038_2 -PRL_Young_Final 5038 2 41 1 25 0 1 423 5038_2 -PRL_Young_Final 5038 2 42 1 25 0 1 461 5038_2 -PRL_Young_Final 5038 2 43 1 25 0 1 332 5038_2 -PRL_Young_Final 5038 2 44 1 25 0 1 336 5038_2 -PRL_Young_Final 5038 2 45 1 -25 0 0 361 5038_2 -PRL_Young_Final 5038 2 46 1 -25 0 0 738 5038_2 -PRL_Young_Final 5038 2 47 2 25 1 1 256 5038_2 -PRL_Young_Final 5038 2 48 2 -25 0 1 293 5038_2 -PRL_Young_Final 5038 2 49 2 25 0 1 732 5038_2 -PRL_Young_Final 5038 2 50 2 25 0 1 346 5038_2 -PRL_Young_Final 5038 2 51 2 -25 0 1 503 5038_2 -PRL_Young_Final 5038 2 52 2 -25 0 0 338 5038_2 -PRL_Young_Final 5038 2 53 1 25 1 1 381 5038_2 -PRL_Young_Final 5038 2 54 1 25 0 1 279 5038_2 -PRL_Young_Final 5038 2 55 1 25 0 1 393 5038_2 -PRL_Young_Final 5038 2 56 1 25 0 1 425 5038_2 -PRL_Young_Final 5038 2 57 1 25 0 1 296 5038_2 -PRL_Young_Final 5038 2 58 1 -25 0 1 445 5038_2 -PRL_Young_Final 5038 2 59 1 -25 0 1 279 5038_2 -PRL_Young_Final 5038 2 60 2 -25 1 0 23 5038_2 -PRL_Young_Final 5038 2 61 2 -25 0 0 342 5038_2 -PRL_Young_Final 5038 2 62 1 25 1 1 411 5038_2 -PRL_Young_Final 5038 2 63 1 25 0 1 359 5038_2 -PRL_Young_Final 5038 2 64 1 25 0 1 265 5038_2 -PRL_Young_Final 5038 2 65 1 25 0 1 421 5038_2 -PRL_Young_Final 5038 2 66 1 25 0 1 561 5038_2 -PRL_Young_Final 5038 2 67 1 25 0 0 325 5038_2 -PRL_Young_Final 5038 2 68 1 -25 0 0 356 5038_2 -PRL_Young_Final 5038 2 69 1 25 0 0 343 5038_2 -PRL_Young_Final 5038 2 70 1 -25 0 0 411 5038_2 -PRL_Young_Final 5038 2 71 1 -25 0 0 278 5038_2 -PRL_Young_Final 5038 2 72 2 25 1 1 329 5038_2 -PRL_Young_Final 5038 2 73 2 -25 0 1 347 5038_2 -PRL_Young_Final 5038 2 74 1 -25 1 0 314 5038_2 -PRL_Young_Final 5038 2 75 2 25 1 1 271 5038_2 -PRL_Young_Final 5038 2 76 2 25 0 1 357 5038_2 -PRL_Young_Final 5038 2 77 2 25 0 1 391 5038_2 -PRL_Young_Final 5038 2 78 2 25 0 1 371 5038_2 -PRL_Young_Final 5038 2 79 2 25 0 1 263 5038_2 -PRL_Young_Final 5038 2 80 2 25 0 1 306 5038_2 -PRL_Young_Final 5038 2 81 2 25 0 1 366 5038_2 -PRL_Young_Final 5038 2 82 2 -25 0 1 313 5038_2 -PRL_Young_Final 5038 2 83 2 -25 0 0 379 5038_2 -PRL_Young_Final 5038 2 84 1 25 1 1 328 5038_2 -PRL_Young_Final 5038 2 85 1 25 0 1 388 5038_2 -PRL_Young_Final 5038 2 86 1 -25 0 1 273 5038_2 -PRL_Young_Final 5038 2 87 1 25 0 1 324 5038_2 -PRL_Young_Final 5038 2 88 1 25 0 1 592 5038_2 -PRL_Young_Final 5038 2 89 1 25 0 1 467 5038_2 -PRL_Young_Final 5038 2 90 1 25 0 1 336 5038_2 -PRL_Young_Final 5038 2 91 1 -25 0 0 347 5038_2 -PRL_Young_Final 5038 2 92 1 -25 0 0 320 5038_2 -PRL_Young_Final 5038 2 93 2 25 1 1 447 5038_2 -PRL_Young_Final 5038 2 94 2 -25 0 1 494 5038_2 -PRL_Young_Final 5038 2 95 2 -25 0 1 456 5038_2 -PRL_Young_Final 5038 2 96 1 25 1 0 309 5038_2 -PRL_Young_Final 5038 2 97 1 25 0 0 430 5038_2 -PRL_Young_Final 5038 2 98 1 -25 0 0 315 5038_2 -PRL_Young_Final 5038 2 99 1 -25 0 0 471 5038_2 -PRL_Young_Final 5038 2 100 2 25 1 1 344 5038_2 -PRL_Young_Final 5038 2 101 2 25 0 1 325 5038_2 -PRL_Young_Final 5038 2 102 2 25 0 1 367 5038_2 -PRL_Young_Final 5038 2 103 2 25 0 1 353 5038_2 -PRL_Young_Final 5038 2 104 2 25 0 1 262 5038_2 -PRL_Young_Final 5038 2 105 2 -25 0 0 225 5038_2 -PRL_Young_Final 5038 2 106 2 -25 0 0 435 5038_2 -PRL_Young_Final 5038 2 107 1 25 1 1 319 5038_2 -PRL_Young_Final 5038 2 108 1 -25 0 1 330 5038_2 -PRL_Young_Final 5038 2 109 1 25 0 1 161 5038_2 -PRL_Young_Final 5038 2 110 1 25 0 1 347 5038_2 -PRL_Young_Final 5038 2 111 1 25 0 1 374 5038_2 -PRL_Young_Final 5038 2 112 1 25 0 1 358 5038_2 -PRL_Young_Final 5038 2 113 1 25 0 1 260 5038_2 -PRL_Young_Final 5038 2 114 1 -25 0 0 297 5038_2 -PRL_Young_Final 5038 2 115 1 -25 0 0 329 5038_2 -PRL_Young_Final 5038 2 116 2 25 1 1 295 5038_2 -PRL_Young_Final 5038 2 117 2 25 0 1 318 5038_2 -PRL_Young_Final 5038 2 118 2 -25 0 1 322 5038_2 -PRL_Young_Final 5038 2 119 2 25 0 1 16 5038_2 -PRL_Young_Final 5038 2 120 2 25 0 1 310 5038_2 -PRL_Young_Final 5038 2 121 2 -25 0 1 327 5038_2 -PRL_Young_Final 5038 2 122 1 25 1 0 334 5038_2 -PRL_Young_Final 5038 2 123 1 -25 0 0 330 5038_2 -PRL_Young_Final 5038 2 124 1 -25 0 0 355 5038_2 -PRL_Young_Final 5038 2 125 2 25 1 1 378 5038_2 -PRL_Young_Final 5038 2 126 2 25 0 1 411 5038_2 -PRL_Young_Final 5038 2 127 2 25 0 1 357 5038_2 -PRL_Young_Final 5038 2 128 2 25 0 1 400 5038_2 -PRL_Young_Final 5038 2 129 2 25 0 1 516 5038_2 -PRL_Young_Final 5038 2 130 2 -25 0 1 392 5038_2 -PRL_Young_Final 5038 2 131 2 -25 0 1 294 5038_2 -PRL_Young_Final 5038 2 132 1 -25 1 0 299 5038_2 -PRL_Young_Final 5038 2 133 2 25 1 1 506 5038_2 -PRL_Young_Final 5038 2 134 2 25 0 1 329 5038_2 -PRL_Young_Final 5038 2 135 2 25 0 1 379 5038_2 -PRL_Young_Final 5038 2 136 2 25 0 1 314 5038_2 -PRL_Young_Final 5038 2 137 2 25 0 1 398 5038_2 -PRL_Young_Final 5038 2 138 2 25 0 1 425 5038_2 -PRL_Young_Final 5038 2 139 2 -25 0 1 351 5038_2 -PRL_Young_Final 5038 2 140 2 25 0 1 342 5038_2 -PRL_Young_Final 5038 2 141 2 -25 0 0 335 5038_2 -PRL_Young_Final 5038 2 142 2 -25 0 0 392 5038_2 -PRL_Young_Final 5038 2 143 2 -25 0 0 776 5038_2 -PRL_Young_Final 5038 2 144 1 25 1 1 310 5038_2 -PRL_Young_Final 5038 2 145 1 25 0 1 304 5038_2 -PRL_Young_Final 5038 2 146 1 25 0 1 329 5038_2 -PRL_Young_Final 5038 2 147 1 25 0 1 448 5038_2 -PRL_Young_Final 5038 2 148 1 25 0 1 943 5038_2 -PRL_Young_Final 5038 2 149 1 25 0 1 370 5038_2 -PRL_Young_Final 5038 2 150 1 -25 0 1 356 5038_2 -PRL_Young_Final 5038 2 151 1 -25 0 0 431 5038_2 -PRL_Young_Final 5038 2 152 2 25 1 1 275 5038_2 -PRL_Young_Final 5038 2 153 2 25 0 1 345 5038_2 -PRL_Young_Final 5038 2 154 2 -25 0 1 506 5038_2 -PRL_Young_Final 5038 2 155 2 25 0 1 376 5038_2 -PRL_Young_Final 5038 2 156 2 25 0 1 346 5038_2 -PRL_Young_Final 5038 2 157 2 25 0 1 227 5038_2 -PRL_Young_Final 5038 2 158 2 25 0 0 512 5038_2 -PRL_Young_Final 5038 2 159 2 -25 0 0 383 5038_2 -PRL_Young_Final 5038 2 160 1 25 1 1 1125 5038_2 -PRL_Young_Final 5038 2 161 1 25 0 1 393 5038_2 -PRL_Young_Final 5038 2 162 1 -25 0 1 497 5038_2 -PRL_Young_Final 5038 2 163 1 -25 0 1 375 5038_2 -PRL_Young_Final 5038 2 164 2 25 1 0 291 5038_2 -PRL_Young_Final 5038 2 165 2 -25 0 0 390 5038_2 -PRL_Young_Final 5038 2 166 2 -25 0 0 389 5038_2 -PRL_Young_Final 5038 2 167 1 25 1 1 333 5038_2 -PRL_Young_Final 5038 2 168 1 25 0 1 383 5038_2 -PRL_Young_Final 5038 2 169 1 25 0 1 1005 5038_2 -PRL_Young_Final 5038 2 170 1 25 0 1 618 5038_2 -PRL_Young_Final 5038 2 171 1 25 0 1 448 5038_2 -PRL_Young_Final 5038 2 172 1 25 0 1 391 5038_2 -PRL_Young_Final 5038 2 173 1 -25 0 1 448 5038_2 -PRL_Young_Final 5038 2 174 1 -25 0 0 131 5038_2 -PRL_Young_Final 5038 2 175 2 25 1 1 364 5038_2 -PRL_Young_Final 5038 2 176 2 25 0 1 335 5038_2 -PRL_Young_Final 5038 2 177 2 25 0 1 473 5038_2 -PRL_Young_Final 5038 2 178 2 25 0 1 376 5038_2 -PRL_Young_Final 5038 2 179 2 25 0 1 423 5038_2 -PRL_Young_Final 5038 2 180 2 25 0 1 509 5038_2 -PRL_Young_Final 5038 2 181 2 25 0 1 659 5038_2 -PRL_Young_Final 5038 2 182 2 -25 0 1 450 5038_2 -PRL_Young_Final 5038 2 183 2 -25 0 0 415 5038_2 -PRL_Young_Final 5038 2 184 2 -25 0 0 610 5038_2 -PRL_Young_Final 5038 2 185 1 25 1 1 328 5038_2 -PRL_Young_Final 5038 2 186 1 25 0 1 313 5038_2 -PRL_Young_Final 5038 2 187 1 -25 0 1 344 5038_2 -PRL_Young_Final 5038 2 188 1 25 0 1 399 5038_2 -PRL_Young_Final 5038 2 189 1 25 0 1 436 5038_2 -PRL_Young_Final 5038 2 190 1 25 0 1 588 5038_2 -PRL_Young_Final 5038 2 191 1 25 0 1 523 5038_2 -PRL_Young_Final 5038 2 192 1 25 0 1 735 5038_2 -PRL_Young_Final 5038 2 193 1 -25 0 0 2022 5038_2 -PRL_Young_Final 5038 2 194 1 25 0 0 338 5038_2 -PRL_Young_Final 5038 2 195 1 25 0 0 458 5038_2 -PRL_Young_Final 5038 2 196 1 -25 0 0 535 5038_2 -PRL_Young_Final 5038 2 197 1 -25 0 0 325 5038_2 -PRL_Young_Final 5038 2 198 2 -25 1 1 286 5038_2 -PRL_Young_Final 5038 2 199 2 -25 0 1 355 5038_2 -PRL_Young_Final 5038 2 200 1 -25 1 0 360 5038_2 -PRL_Young_Final 5038 3 1 2 25 1 1 486 5038_3 -PRL_Young_Final 5038 3 2 2 25 0 1 366 5038_3 -PRL_Young_Final 5038 3 3 2 25 0 1 364 5038_3 -PRL_Young_Final 5038 3 4 2 25 0 1 396 5038_3 -PRL_Young_Final 5038 3 5 2 25 0 1 324 5038_3 -PRL_Young_Final 5038 3 6 2 25 0 1 460 5038_3 -PRL_Young_Final 5038 3 7 2 -25 0 1 320 5038_3 -PRL_Young_Final 5038 3 8 2 25 0 1 377 5038_3 -PRL_Young_Final 5038 3 9 2 -25 0 0 370 5038_3 -PRL_Young_Final 5038 3 10 2 -25 0 0 1010 5038_3 -PRL_Young_Final 5038 3 11 1 25 1 1 369 5038_3 -PRL_Young_Final 5038 3 12 1 25 0 1 358 5038_3 -PRL_Young_Final 5038 3 13 1 25 0 1 373 5038_3 -PRL_Young_Final 5038 3 14 1 25 0 1 56 5038_3 -PRL_Young_Final 5038 3 15 1 25 0 1 285 5038_3 -PRL_Young_Final 5038 3 16 1 25 0 1 320 5038_3 -PRL_Young_Final 5038 3 17 1 -25 0 1 319 5038_3 -PRL_Young_Final 5038 3 18 1 -25 0 0 306 5038_3 -PRL_Young_Final 5038 3 19 2 25 1 1 321 5038_3 -PRL_Young_Final 5038 3 20 2 25 0 1 351 5038_3 -PRL_Young_Final 5038 3 21 2 -25 0 1 375 5038_3 -PRL_Young_Final 5038 3 22 2 25 0 1 360 5038_3 -PRL_Young_Final 5038 3 23 2 25 0 1 387 5038_3 -PRL_Young_Final 5038 3 24 2 25 0 1 321 5038_3 -PRL_Young_Final 5038 3 25 2 25 0 1 339 5038_3 -PRL_Young_Final 5038 3 26 2 25 0 1 299 5038_3 -PRL_Young_Final 5038 3 27 2 25 0 0 214 5038_3 -PRL_Young_Final 5038 3 28 2 -25 0 0 441 5038_3 -PRL_Young_Final 5038 3 29 2 -25 0 0 476 5038_3 -PRL_Young_Final 5038 3 30 1 -25 1 1 307 5038_3 -PRL_Young_Final 5038 3 31 2 -25 1 0 597 5038_3 -PRL_Young_Final 5038 3 32 1 -25 1 1 631 5038_3 -PRL_Young_Final 5038 3 33 1 25 0 1 419 5038_3 -PRL_Young_Final 5038 3 34 1 25 0 1 289 5038_3 -PRL_Young_Final 5038 3 35 1 25 0 1 279 5038_3 -PRL_Young_Final 5038 3 36 1 25 0 1 424 5038_3 -PRL_Young_Final 5038 3 37 1 25 0 1 335 5038_3 -PRL_Young_Final 5038 3 38 1 25 0 1 522 5038_3 -PRL_Young_Final 5038 3 39 1 -25 0 0 485 5038_3 -PRL_Young_Final 5038 3 40 1 -25 0 0 401 5038_3 -PRL_Young_Final 5038 3 41 2 -25 1 1 377 5038_3 -PRL_Young_Final 5038 3 42 2 25 0 1 305 5038_3 -PRL_Young_Final 5038 3 43 2 25 0 1 19 5038_3 -PRL_Young_Final 5038 3 44 2 25 0 1 296 5038_3 -PRL_Young_Final 5038 3 45 2 25 0 1 254 5038_3 -PRL_Young_Final 5038 3 46 2 25 0 1 212 5038_3 -PRL_Young_Final 5038 3 47 2 -25 0 0 201 5038_3 -PRL_Young_Final 5038 3 48 2 -25 0 0 164 5038_3 -PRL_Young_Final 5038 3 49 1 25 1 1 727 5038_3 -PRL_Young_Final 5038 3 50 1 25 0 1 323 5038_3 -PRL_Young_Final 5038 3 51 1 -25 0 1 440 5038_3 -PRL_Young_Final 5038 3 52 2 25 1 0 705 5038_3 -PRL_Young_Final 5038 3 53 2 -25 0 0 320 5038_3 -PRL_Young_Final 5038 3 54 2 25 0 0 329 5038_3 -PRL_Young_Final 5038 3 55 2 -25 0 0 349 5038_3 -PRL_Young_Final 5038 3 56 2 -25 0 0 528 5038_3 -PRL_Young_Final 5038 3 57 1 25 1 1 338 5038_3 -PRL_Young_Final 5038 3 58 1 25 0 1 380 5038_3 -PRL_Young_Final 5038 3 59 1 -25 0 1 406 5038_3 -PRL_Young_Final 5038 3 60 1 25 0 1 419 5038_3 -PRL_Young_Final 5038 3 61 1 25 0 1 381 5038_3 -PRL_Young_Final 5038 3 62 1 25 0 1 432 5038_3 -PRL_Young_Final 5038 3 63 1 25 0 1 443 5038_3 -PRL_Young_Final 5038 3 64 1 -25 0 0 273 5038_3 -PRL_Young_Final 5038 3 65 1 -25 0 0 246 5038_3 -PRL_Young_Final 5038 3 66 2 25 1 1 321 5038_3 -PRL_Young_Final 5038 3 67 2 -25 0 1 317 5038_3 -PRL_Young_Final 5038 3 68 2 -25 0 1 409 5038_3 -PRL_Young_Final 5038 3 69 1 -25 1 0 293 5038_3 -PRL_Young_Final 5038 3 70 2 25 1 1 963 5038_3 -PRL_Young_Final 5038 3 71 2 25 0 1 398 5038_3 -PRL_Young_Final 5038 3 72 2 25 0 1 395 5038_3 -PRL_Young_Final 5038 3 73 2 25 0 1 355 5038_3 -PRL_Young_Final 5038 3 74 2 25 0 1 315 5038_3 -PRL_Young_Final 5038 3 75 2 25 0 1 467 5038_3 -PRL_Young_Final 5038 3 76 2 -25 0 0 758 5038_3 -PRL_Young_Final 5038 3 77 2 25 0 0 547 5038_3 -PRL_Young_Final 5038 3 78 2 25 0 0 339 5038_3 -PRL_Young_Final 5038 3 79 2 -25 0 0 442 5038_3 -PRL_Young_Final 5038 3 80 2 -25 0 0 471 5038_3 -PRL_Young_Final 5038 3 81 1 -25 1 1 497 5038_3 -PRL_Young_Final 5038 3 82 2 -25 1 0 2279 5038_3 -PRL_Young_Final 5038 3 83 1 25 1 1 328 5038_3 -PRL_Young_Final 5038 3 84 1 25 0 1 397 5038_3 -PRL_Young_Final 5038 3 85 1 25 0 1 531 5038_3 -PRL_Young_Final 5038 3 86 1 25 0 1 343 5038_3 -PRL_Young_Final 5038 3 87 1 25 0 1 472 5038_3 -PRL_Young_Final 5038 3 88 1 25 0 1 543 5038_3 -PRL_Young_Final 5038 3 89 1 -25 0 0 574 5038_3 -PRL_Young_Final 5038 3 90 1 -25 0 0 975 5038_3 -PRL_Young_Final 5038 3 91 2 25 1 1 1035 5038_3 -PRL_Young_Final 5038 3 92 2 -25 0 1 454 5038_3 -PRL_Young_Final 5038 3 93 2 25 0 1 370 5038_3 -PRL_Young_Final 5038 3 94 2 25 0 1 583 5038_3 -PRL_Young_Final 5038 3 95 2 -25 0 1 333 5038_3 -PRL_Young_Final 5038 3 96 2 25 0 1 508 5038_3 -PRL_Young_Final 5038 3 97 2 25 0 1 262 5038_3 -PRL_Young_Final 5038 3 98 2 -25 0 0 645 5038_3 -PRL_Young_Final 5038 3 99 2 25 0 0 1085 5038_3 -PRL_Young_Final 5038 3 100 2 -25 0 0 423 5038_3 -PRL_Young_Final 5038 3 101 2 -25 0 0 1003 5038_3 -PRL_Young_Final 5038 3 102 1 25 1 1 530 5038_3 -PRL_Young_Final 5038 3 103 1 25 0 1 388 5038_3 -PRL_Young_Final 5038 3 104 1 25 0 1 424 5038_3 -PRL_Young_Final 5038 3 105 1 -25 0 1 536 5038_3 -PRL_Young_Final 5038 3 106 1 -25 0 1 748 5038_3 -PRL_Young_Final 5038 3 107 2 -25 1 0 1117 5038_3 -PRL_Young_Final 5038 3 108 1 25 1 1 1623 5038_3 -PRL_Young_Final 5038 3 109 1 25 0 1 553 5038_3 -PRL_Young_Final 5038 3 110 1 25 0 1 348 5038_3 -PRL_Young_Final 5038 3 111 1 25 0 1 325 5038_3 -PRL_Young_Final 5038 3 112 1 25 0 1 388 5038_3 -PRL_Young_Final 5038 3 113 1 25 0 1 349 5038_3 -PRL_Young_Final 5038 3 114 1 -25 0 0 406 5038_3 -PRL_Young_Final 5038 3 115 1 -25 0 0 1710 5038_3 -PRL_Young_Final 5038 3 116 2 -25 1 1 553 5038_3 -PRL_Young_Final 5038 3 117 1 -25 1 0 356 5038_3 -PRL_Young_Final 5038 3 118 2 25 1 1 290 5038_3 -PRL_Young_Final 5038 3 119 2 25 0 1 167 5038_3 -PRL_Young_Final 5038 3 120 2 25 0 1 250 5038_3 -PRL_Young_Final 5038 3 121 2 25 0 1 278 5038_3 -PRL_Young_Final 5038 3 122 2 25 0 1 344 5038_3 -PRL_Young_Final 5038 3 123 2 -25 0 0 348 5038_3 -PRL_Young_Final 5038 3 124 2 25 0 0 511 5038_3 -PRL_Young_Final 5038 3 125 2 -25 0 0 660 5038_3 -PRL_Young_Final 5038 3 126 2 25 0 0 509 5038_3 -PRL_Young_Final 5038 3 127 2 -25 0 0 293 5038_3 -PRL_Young_Final 5038 3 128 1 25 1 1 492 5038_3 -PRL_Young_Final 5038 3 129 1 25 0 1 353 5038_3 -PRL_Young_Final 5038 3 130 1 -25 0 1 412 5038_3 -PRL_Young_Final 5038 3 131 1 25 0 1 683 5038_3 -PRL_Young_Final 5038 3 132 1 25 0 1 1084 5038_3 -PRL_Young_Final 5038 3 133 1 -25 0 1 1205 5038_3 -PRL_Young_Final 5038 3 134 1 25 0 1 292 5038_3 -PRL_Young_Final 5038 3 135 1 -25 0 0 496 5038_3 -PRL_Young_Final 5038 3 136 2 25 1 1 882 5038_3 -PRL_Young_Final 5038 3 137 2 25 0 1 419 5038_3 -PRL_Young_Final 5038 3 138 2 25 0 1 425 5038_3 -PRL_Young_Final 5038 3 139 2 25 0 1 488 5038_3 -PRL_Young_Final 5038 3 140 2 -25 0 1 625 5038_3 -PRL_Young_Final 5038 3 141 2 -25 0 1 149 5038_3 -PRL_Young_Final 5038 3 142 1 25 1 1 726 5038_3 -PRL_Young_Final 5038 3 143 1 25 0 1 479 5038_3 -PRL_Young_Final 5038 3 144 1 25 0 1 640 5038_3 -PRL_Young_Final 5038 3 145 1 25 0 1 547 5038_3 -PRL_Young_Final 5038 3 146 1 25 0 1 1157 5038_3 -PRL_Young_Final 5038 3 147 1 25 0 1 610 5038_3 -PRL_Young_Final 5038 3 148 1 -25 0 0 398 5038_3 -PRL_Young_Final 5038 3 149 1 -25 0 0 443 5038_3 -PRL_Young_Final 5038 3 150 2 -25 1 1 341 5038_3 -PRL_Young_Final 5038 3 151 2 25 0 1 453 5038_3 -PRL_Young_Final 5038 3 152 2 25 0 1 847 5038_3 -PRL_Young_Final 5038 3 153 2 25 0 1 394 5038_3 -PRL_Young_Final 5038 3 154 2 25 0 1 323 5038_3 -PRL_Young_Final 5038 3 155 2 -25 0 0 465 5038_3 -PRL_Young_Final 5038 3 156 2 -25 0 0 528 5038_3 -PRL_Young_Final 5038 3 157 1 25 1 1 628 5038_3 -PRL_Young_Final 5038 3 158 1 25 0 1 369 5038_3 -PRL_Young_Final 5038 3 159 1 25 0 1 366 5038_3 -PRL_Young_Final 5038 3 160 1 -25 0 1 420 5038_3 -PRL_Young_Final 5038 3 161 1 25 0 1 497 5038_3 -PRL_Young_Final 5038 3 162 1 25 0 0 1019 5038_3 -PRL_Young_Final 5038 3 163 1 25 0 0 468 5038_3 -PRL_Young_Final 5038 3 164 1 -25 0 0 319 5038_3 -PRL_Young_Final 5038 3 165 1 -25 0 0 819 5038_3 -PRL_Young_Final 5038 3 166 2 25 1 1 683 5038_3 -PRL_Young_Final 5038 3 167 2 -25 0 1 434 5038_3 -PRL_Young_Final 5038 3 168 2 25 0 1 417 5038_3 -PRL_Young_Final 5038 3 169 2 25 0 1 564 5038_3 -PRL_Young_Final 5038 3 170 2 25 0 1 431 5038_3 -PRL_Young_Final 5038 3 171 2 25 0 1 391 5038_3 -PRL_Young_Final 5038 3 172 2 25 0 1 331 5038_3 -PRL_Young_Final 5038 3 173 2 -25 0 1 332 5038_3 -PRL_Young_Final 5038 3 174 2 -25 0 0 561 5038_3 -PRL_Young_Final 5038 3 175 1 -25 1 1 345 5038_3 -PRL_Young_Final 5038 3 176 1 25 0 1 290 5038_3 -PRL_Young_Final 5038 3 177 1 25 0 1 514 5038_3 -PRL_Young_Final 5038 3 178 1 25 0 1 451 5038_3 -PRL_Young_Final 5038 3 179 1 25 0 1 459 5038_3 -PRL_Young_Final 5038 3 180 1 25 0 1 90 5038_3 -PRL_Young_Final 5038 3 181 1 25 0 1 449 5038_3 -PRL_Young_Final 5038 3 182 1 -25 0 1 452 5038_3 -PRL_Young_Final 5038 3 183 1 -25 0 0 161 5038_3 -PRL_Young_Final 5038 3 184 2 25 1 1 1073 5038_3 -PRL_Young_Final 5038 3 185 2 25 0 1 702 5038_3 -PRL_Young_Final 5038 3 186 2 25 0 1 1401 5038_3 -PRL_Young_Final 5038 3 187 2 25 0 1 567 5038_3 -PRL_Young_Final 5038 3 188 2 25 0 1 1081 5038_3 -PRL_Young_Final 5038 3 189 2 -25 0 0 659 5038_3 -PRL_Young_Final 5038 3 190 2 -25 0 0 977 5038_3 -PRL_Young_Final 5038 3 191 1 25 1 1 361 5038_3 -PRL_Young_Final 5038 3 192 1 25 0 1 625 5038_3 -PRL_Young_Final 5038 3 193 1 -25 0 1 355 5038_3 -PRL_Young_Final 5038 3 194 2 25 1 0 519 5038_3 -PRL_Young_Final 5038 3 195 2 -25 0 0 348 5038_3 -PRL_Young_Final 5038 3 196 1 25 1 1 616 5038_3 -PRL_Young_Final 5038 3 197 1 25 0 1 322 5038_3 -PRL_Young_Final 5038 3 198 1 -25 0 1 652 5038_3 -PRL_Young_Final 5038 3 199 2 -25 1 0 321 5038_3 -PRL_Young_Final 5038 3 200 1 25 1 1 863 5038_3 -PRL_Young_Final 5036 1 1 1 25 1 1 1282 5036_1 -PRL_Young_Final 5036 1 2 1 25 0 1 1282 5036_1 -PRL_Young_Final 5036 1 3 1 25 0 1 628 5036_1 -PRL_Young_Final 5036 1 4 1 -25 0 1 595 5036_1 -PRL_Young_Final 5036 1 5 1 25 0 1 817 5036_1 -PRL_Young_Final 5036 1 6 1 25 0 1 437 5036_1 -PRL_Young_Final 5036 1 7 1 -25 0 1 472 5036_1 -PRL_Young_Final 5036 1 8 1 25 0 0 459 5036_1 -PRL_Young_Final 5036 1 9 1 -25 0 0 739 5036_1 -PRL_Young_Final 5036 1 10 1 -25 0 0 541 5036_1 -PRL_Young_Final 5036 1 11 1 -25 0 0 538 5036_1 -PRL_Young_Final 5036 1 12 2 25 1 1 1258 5036_1 -PRL_Young_Final 5036 1 13 2 25 0 1 441 5036_1 -PRL_Young_Final 5036 1 14 2 25 0 1 485 5036_1 -PRL_Young_Final 5036 1 15 2 25 0 1 463 5036_1 -PRL_Young_Final 5036 1 16 2 25 0 1 466 5036_1 -PRL_Young_Final 5036 1 17 2 -25 0 1 610 5036_1 -PRL_Young_Final 5036 1 18 2 -25 0 0 421 5036_1 -PRL_Young_Final 5036 1 19 2 -25 0 0 455 5036_1 -PRL_Young_Final 5036 1 20 2 -25 0 0 1076 5036_1 -PRL_Young_Final 5036 1 21 1 -25 1 1 653 5036_1 -PRL_Young_Final 5036 1 22 2 25 1 0 433 5036_1 -PRL_Young_Final 5036 1 23 2 25 0 0 406 5036_1 -PRL_Young_Final 5036 1 24 2 -25 0 0 468 5036_1 -PRL_Young_Final 5036 1 25 2 -25 0 0 422 5036_1 -PRL_Young_Final 5036 1 26 2 -25 0 0 352 5036_1 -PRL_Young_Final 5036 1 27 2 -25 0 0 265 5036_1 -PRL_Young_Final 5036 1 28 2 -25 0 0 475 5036_1 -PRL_Young_Final 5036 1 29 1 25 1 1 454 5036_1 -PRL_Young_Final 5036 1 30 1 25 0 1 310 5036_1 -PRL_Young_Final 5036 1 31 1 25 0 1 289 5036_1 -PRL_Young_Final 5036 1 32 1 25 0 1 330 5036_1 -PRL_Young_Final 5036 1 33 1 25 0 1 494 5036_1 -PRL_Young_Final 5036 1 34 1 25 0 1 305 5036_1 -PRL_Young_Final 5036 1 35 1 -25 0 1 478 5036_1 -PRL_Young_Final 5036 1 36 1 25 0 1 433 5036_1 -PRL_Young_Final 5036 1 37 1 -25 0 0 172 5036_1 -PRL_Young_Final 5036 1 38 1 25 0 0 400 5036_1 -PRL_Young_Final 5036 1 39 1 -25 0 0 402 5036_1 -PRL_Young_Final 5036 1 40 1 -25 0 0 195 5036_1 -PRL_Young_Final 5036 1 41 1 -25 0 0 333 5036_1 -PRL_Young_Final 5036 1 42 1 -25 0 0 197 5036_1 -PRL_Young_Final 5036 1 43 1 -25 0 0 281 5036_1 -PRL_Young_Final 5036 1 44 1 -25 0 0 85 5036_1 -PRL_Young_Final 5036 1 45 1 -25 0 0 160 5036_1 -PRL_Young_Final 5036 1 46 2 25 1 1 857 5036_1 -PRL_Young_Final 5036 1 47 2 25 0 1 598 5036_1 -PRL_Young_Final 5036 1 48 2 25 0 1 217 5036_1 -PRL_Young_Final 5036 1 49 2 25 0 1 93 5036_1 -PRL_Young_Final 5036 1 50 2 25 0 1 450 5036_1 -PRL_Young_Final 5036 1 51 2 25 0 0 459 5036_1 -PRL_Young_Final 5036 1 52 2 -25 0 0 514 5036_1 -PRL_Young_Final 5036 1 53 2 25 0 0 1401 5036_1 -PRL_Young_Final 5036 1 54 2 -25 0 0 503 5036_1 -PRL_Young_Final 5036 1 55 2 -25 0 0 116 5036_1 -PRL_Young_Final 5036 1 56 1 25 1 1 463 5036_1 -PRL_Young_Final 5036 1 57 1 -25 0 1 377 5036_1 -PRL_Young_Final 5036 1 58 1 25 0 1 447 5036_1 -PRL_Young_Final 5036 1 59 1 25 0 1 274 5036_1 -PRL_Young_Final 5036 1 60 1 -25 0 1 434 5036_1 -PRL_Young_Final 5036 1 61 1 25 0 1 251 5036_1 -PRL_Young_Final 5036 1 62 1 25 0 1 301 5036_1 -PRL_Young_Final 5036 1 63 1 25 0 1 319 5036_1 -PRL_Young_Final 5036 1 64 1 -25 0 0 24 5036_1 -PRL_Young_Final 5036 1 65 1 -25 0 0 219 5036_1 -PRL_Young_Final 5036 1 66 1 -25 0 0 463 5036_1 -PRL_Young_Final 5036 1 67 2 25 1 1 541 5036_1 -PRL_Young_Final 5036 1 68 2 25 0 1 243 5036_1 -PRL_Young_Final 5036 1 69 2 -25 0 1 109 5036_1 -PRL_Young_Final 5036 1 70 2 -25 0 1 415 5036_1 -PRL_Young_Final 5036 1 71 1 -25 1 0 557 5036_1 -PRL_Young_Final 5036 1 72 1 25 0 0 331 5036_1 -PRL_Young_Final 5036 1 73 1 25 0 0 495 5036_1 -PRL_Young_Final 5036 1 74 1 -25 0 0 216 5036_1 -PRL_Young_Final 5036 1 75 1 -25 0 0 356 5036_1 -PRL_Young_Final 5036 1 76 1 -25 0 0 417 5036_1 -PRL_Young_Final 5036 1 77 2 25 1 1 457 5036_1 -PRL_Young_Final 5036 1 78 2 25 0 1 490 5036_1 -PRL_Young_Final 5036 1 79 2 25 0 1 196 5036_1 -PRL_Young_Final 5036 1 80 2 25 0 1 452 5036_1 -PRL_Young_Final 5036 1 81 2 25 0 1 224 5036_1 -PRL_Young_Final 5036 1 82 2 25 0 1 583 5036_1 -PRL_Young_Final 5036 1 83 2 -25 0 1 500 5036_1 -PRL_Young_Final 5036 1 84 1 -25 1 0 1289 5036_1 -PRL_Young_Final 5036 1 85 2 25 1 1 604 5036_1 -PRL_Young_Final 5036 1 86 2 25 0 1 485 5036_1 -PRL_Young_Final 5036 1 87 2 25 0 1 513 5036_1 -PRL_Young_Final 5036 1 88 2 25 0 1 1284 5036_1 -PRL_Young_Final 5036 1 89 2 25 0 1 801 5036_1 -PRL_Young_Final 5036 1 90 2 -25 0 0 686 5036_1 -PRL_Young_Final 5036 1 91 1 25 1 1 1769 5036_1 -PRL_Young_Final 5036 1 92 1 25 0 1 301 5036_1 -PRL_Young_Final 5036 1 93 1 -25 0 1 402 5036_1 -PRL_Young_Final 5036 1 94 2 -25 1 0 1137 5036_1 -PRL_Young_Final 5036 1 95 1 25 1 1 591 5036_1 -PRL_Young_Final 5036 1 96 1 25 0 1 199 5036_1 -PRL_Young_Final 5036 1 97 1 -25 0 1 263 5036_1 -PRL_Young_Final 5036 1 98 2 25 1 0 678 5036_1 -PRL_Young_Final 5036 1 99 2 -25 0 0 434 5036_1 -PRL_Young_Final 5036 1 100 2 -25 0 0 1157 5036_1 -PRL_Young_Final 5036 1 101 2 -25 0 0 1457 5036_1 -PRL_Young_Final 5036 1 102 1 25 1 1 492 5036_1 -PRL_Young_Final 5036 1 103 1 25 0 1 1344 5036_1 -PRL_Young_Final 5036 1 104 1 25 0 1 586 5036_1 -PRL_Young_Final 5036 1 105 1 25 0 1 666 5036_1 -PRL_Young_Final 5036 1 106 1 25 0 1 710 5036_1 -PRL_Young_Final 5036 1 107 1 -25 0 1 449 5036_1 -PRL_Young_Final 5036 1 108 2 -25 1 1 1025 5036_1 -PRL_Young_Final 5036 1 109 1 -25 1 0 484 5036_1 -PRL_Young_Final 5036 1 110 1 -25 0 0 427 5036_1 -PRL_Young_Final 5036 1 111 1 -25 0 0 9 5036_1 -PRL_Young_Final 5036 1 112 2 25 1 1 225 5036_1 -PRL_Young_Final 5036 1 113 2 25 0 1 519 5036_1 -PRL_Young_Final 5036 1 114 2 25 0 1 457 5036_1 -PRL_Young_Final 5036 1 115 2 25 0 1 91 5036_1 -PRL_Young_Final 5036 1 116 2 25 0 1 268 5036_1 -PRL_Young_Final 5036 1 117 2 25 0 1 535 5036_1 -PRL_Young_Final 5036 1 118 2 -25 0 1 590 5036_1 -PRL_Young_Final 5036 1 119 1 -25 1 0 727 5036_1 -PRL_Young_Final 5036 1 120 2 25 1 1 980 5036_1 -PRL_Young_Final 5036 1 121 2 25 0 1 399 5036_1 -PRL_Young_Final 5036 1 122 2 25 0 1 386 5036_1 -PRL_Young_Final 5036 1 123 2 25 0 1 294 5036_1 -PRL_Young_Final 5036 1 124 2 25 0 1 1345 5036_1 -PRL_Young_Final 5036 1 125 2 25 0 1 555 5036_1 -PRL_Young_Final 5036 1 126 2 25 0 1 516 5036_1 -PRL_Young_Final 5036 1 127 2 -25 0 1 707 5036_1 -PRL_Young_Final 5036 1 128 2 25 0 0 496 5036_1 -PRL_Young_Final 5036 1 129 2 -25 0 0 487 5036_1 -PRL_Young_Final 5036 1 130 2 25 0 0 237 5036_1 -PRL_Young_Final 5036 1 131 2 -25 0 0 455 5036_1 -PRL_Young_Final 5036 1 132 2 -25 0 0 537 5036_1 -PRL_Young_Final 5036 1 133 2 -25 0 0 514 5036_1 -PRL_Young_Final 5036 1 134 2 -25 0 0 1835 5036_1 -PRL_Young_Final 5036 1 135 2 -25 0 0 456 5036_1 -PRL_Young_Final 5036 1 136 2 -25 0 0 534 5036_1 -PRL_Young_Final 5036 1 137 1 25 1 1 1129 5036_1 -PRL_Young_Final 5036 1 138 1 25 0 1 140 5036_1 -PRL_Young_Final 5036 1 139 1 -25 0 1 409 5036_1 -PRL_Young_Final 5036 1 140 1 25 0 1 210 5036_1 -PRL_Young_Final 5036 1 141 1 25 0 1 242 5036_1 -PRL_Young_Final 5036 1 142 1 25 0 0 57 5036_1 -PRL_Young_Final 5036 1 143 1 25 0 0 49 5036_1 -PRL_Young_Final 5036 1 144 1 -25 0 0 167 5036_1 -PRL_Young_Final 5036 1 145 1 -25 0 0 1150 5036_1 -PRL_Young_Final 5036 1 146 1 -25 0 0 272 5036_1 -PRL_Young_Final 5036 1 147 1 -25 0 0 448 5036_1 -PRL_Young_Final 5036 1 148 1 -25 0 0 112 5036_1 -PRL_Young_Final 5036 1 149 1 -25 0 0 697 5036_1 -PRL_Young_Final 5036 1 150 1 25 0 0 566 5036_1 -PRL_Young_Final 5036 1 151 1 -25 0 0 570 5036_1 -PRL_Young_Final 5036 1 152 1 -25 0 0 425 5036_1 -PRL_Young_Final 5036 1 153 1 -25 0 0 551 5036_1 -PRL_Young_Final 5036 1 154 2 25 1 1 382 5036_1 -PRL_Young_Final 5036 1 155 2 25 0 1 1614 5036_1 -PRL_Young_Final 5036 1 156 2 25 0 1 297 5036_1 -PRL_Young_Final 5036 1 157 2 -25 0 1 118 5036_1 -PRL_Young_Final 5036 1 158 2 -25 0 1 445 5036_1 -PRL_Young_Final 5036 1 159 2 25 0 1 145 5036_1 -PRL_Young_Final 5036 1 160 2 25 0 1 400 5036_1 -PRL_Young_Final 5036 1 161 2 -25 0 0 112 5036_1 -PRL_Young_Final 5036 1 162 1 25 1 1 491 5036_1 -PRL_Young_Final 5036 1 163 1 25 0 1 157 5036_1 -PRL_Young_Final 5036 1 164 1 25 0 1 433 5036_1 -PRL_Young_Final 5036 1 165 1 25 0 1 401 5036_1 -PRL_Young_Final 5036 1 166 1 -25 0 1 433 5036_1 -PRL_Young_Final 5036 1 167 2 25 1 1 484 5036_1 -PRL_Young_Final 5036 1 168 2 25 0 1 595 5036_1 -PRL_Young_Final 5036 1 169 2 25 0 1 422 5036_1 -PRL_Young_Final 5036 1 170 2 25 0 1 369 5036_1 -PRL_Young_Final 5036 1 171 2 25 0 1 411 5036_1 -PRL_Young_Final 5036 1 172 2 25 0 1 450 5036_1 -PRL_Young_Final 5036 1 173 2 25 0 1 161 5036_1 -PRL_Young_Final 5036 1 174 2 -25 0 1 1909 5036_1 -PRL_Young_Final 5036 1 175 1 25 1 1 1234 5036_1 -PRL_Young_Final 5036 1 176 1 25 0 1 477 5036_1 -PRL_Young_Final 5036 1 177 1 -25 0 1 406 5036_1 -PRL_Young_Final 5036 1 178 2 -25 1 0 495 5036_1 -PRL_Young_Final 5036 1 179 2 -25 0 0 475 5036_1 -PRL_Young_Final 5036 1 180 2 -25 0 0 776 5036_1 -PRL_Young_Final 5036 1 181 2 25 0 0 410 5036_1 -PRL_Young_Final 5036 1 182 2 -25 0 0 626 5036_1 -PRL_Young_Final 5036 1 183 2 25 0 0 2067 5036_1 -PRL_Young_Final 5036 1 184 2 -25 0 0 160 5036_1 -PRL_Young_Final 5036 1 185 2 -25 0 0 633 5036_1 -PRL_Young_Final 5036 1 186 2 -25 0 0 1419 5036_1 -PRL_Young_Final 5036 1 187 1 25 1 1 1555 5036_1 -PRL_Young_Final 5036 1 188 1 25 0 1 410 5036_1 -PRL_Young_Final 5036 1 189 1 25 0 1 542 5036_1 -PRL_Young_Final 5036 1 190 1 25 0 1 441 5036_1 -PRL_Young_Final 5036 1 191 1 25 0 1 189 5036_1 -PRL_Young_Final 5036 1 192 1 -25 0 0 395 5036_1 -PRL_Young_Final 5036 1 193 1 -25 0 0 130 5036_1 -PRL_Young_Final 5036 1 194 2 -25 1 1 67 5036_1 -PRL_Young_Final 5036 1 195 2 -25 0 1 55 5036_1 -PRL_Young_Final 5036 1 196 2 25 0 1 1132 5036_1 -PRL_Young_Final 5036 1 197 2 25 0 1 2044 5036_1 -PRL_Young_Final 5036 1 198 2 25 0 1 481 5036_1 -PRL_Young_Final 5036 1 199 2 -25 0 0 482 5036_1 -PRL_Young_Final 5036 1 200 2 25 0 0 950 5036_1 -PRL_Young_Final 5036 2 1 1 25 1 1 3389 5036_2 -PRL_Young_Final 5036 2 2 1 25 0 1 450 5036_2 -PRL_Young_Final 5036 2 3 1 25 0 1 452 5036_2 -PRL_Young_Final 5036 2 4 1 -25 0 1 400 5036_2 -PRL_Young_Final 5036 2 5 1 25 0 1 391 5036_2 -PRL_Young_Final 5036 2 6 1 25 0 1 2144 5036_2 -PRL_Young_Final 5036 2 7 1 25 0 1 429 5036_2 -PRL_Young_Final 5036 2 8 1 25 0 1 502 5036_2 -PRL_Young_Final 5036 2 9 1 25 0 0 372 5036_2 -PRL_Young_Final 5036 2 10 1 -25 0 0 107 5036_2 -PRL_Young_Final 5036 2 11 1 -25 0 0 486 5036_2 -PRL_Young_Final 5036 2 12 2 25 1 1 674 5036_2 -PRL_Young_Final 5036 2 13 2 25 0 1 478 5036_2 -PRL_Young_Final 5036 2 14 2 25 0 1 506 5036_2 -PRL_Young_Final 5036 2 15 2 -25 0 1 505 5036_2 -PRL_Young_Final 5036 2 16 2 25 0 1 285 5036_2 -PRL_Young_Final 5036 2 17 2 -25 0 0 485 5036_2 -PRL_Young_Final 5036 2 18 2 -25 0 0 497 5036_2 -PRL_Young_Final 5036 2 19 1 25 1 1 392 5036_2 -PRL_Young_Final 5036 2 20 1 -25 0 1 546 5036_2 -PRL_Young_Final 5036 2 21 1 25 0 1 107 5036_2 -PRL_Young_Final 5036 2 22 1 25 0 1 1539 5036_2 -PRL_Young_Final 5036 2 23 1 25 0 1 485 5036_2 -PRL_Young_Final 5036 2 24 1 25 0 1 490 5036_2 -PRL_Young_Final 5036 2 25 1 25 0 1 272 5036_2 -PRL_Young_Final 5036 2 26 1 -25 0 1 321 5036_2 -PRL_Young_Final 5036 2 27 2 -25 1 1 1991 5036_2 -PRL_Young_Final 5036 2 28 1 -25 1 0 1638 5036_2 -PRL_Young_Final 5036 2 29 2 25 1 1 968 5036_2 -PRL_Young_Final 5036 2 30 2 25 0 1 514 5036_2 -PRL_Young_Final 5036 2 31 2 25 0 1 508 5036_2 -PRL_Young_Final 5036 2 32 2 25 0 1 308 5036_2 -PRL_Young_Final 5036 2 33 2 25 0 1 311 5036_2 -PRL_Young_Final 5036 2 34 2 25 0 1 240 5036_2 -PRL_Young_Final 5036 2 35 2 -25 0 0 499 5036_2 -PRL_Young_Final 5036 2 36 2 25 0 0 880 5036_2 -PRL_Young_Final 5036 2 37 2 -25 0 0 42 5036_2 -PRL_Young_Final 5036 2 38 1 -25 1 1 1118 5036_2 -PRL_Young_Final 5036 2 39 1 25 0 1 2073 5036_2 -PRL_Young_Final 5036 2 40 1 25 0 1 1534 5036_2 -PRL_Young_Final 5036 2 41 1 25 0 1 1537 5036_2 -PRL_Young_Final 5036 2 42 1 25 0 1 1447 5036_2 -PRL_Young_Final 5036 2 43 1 25 0 1 392 5036_2 -PRL_Young_Final 5036 2 44 1 25 0 1 447 5036_2 -PRL_Young_Final 5036 2 45 1 25 0 1 1778 5036_2 -PRL_Young_Final 5036 2 46 1 -25 0 0 1085 5036_2 -PRL_Young_Final 5036 2 47 1 -25 0 0 1708 5036_2 -PRL_Young_Final 5036 2 48 2 -25 1 1 469 5036_2 -PRL_Young_Final 5036 2 49 2 25 0 1 1336 5036_2 -PRL_Young_Final 5036 2 50 2 25 0 1 723 5036_2 -PRL_Young_Final 5036 2 51 2 -25 0 1 507 5036_2 -PRL_Young_Final 5036 2 52 2 25 0 1 261 5036_2 -PRL_Young_Final 5036 2 53 2 25 0 1 506 5036_2 -PRL_Young_Final 5036 2 54 2 25 0 1 437 5036_2 -PRL_Young_Final 5036 2 55 2 -25 0 0 178 5036_2 -PRL_Young_Final 5036 2 56 2 -25 0 0 1540 5036_2 -PRL_Young_Final 5036 2 57 1 25 1 1 458 5036_2 -PRL_Young_Final 5036 2 58 1 25 0 1 445 5036_2 -PRL_Young_Final 5036 2 59 1 -25 0 1 506 5036_2 -PRL_Young_Final 5036 2 60 1 -25 0 1 300 5036_2 -PRL_Young_Final 5036 2 61 2 -25 1 0 1634 5036_2 -PRL_Young_Final 5036 2 62 2 -25 0 0 268 5036_2 -PRL_Young_Final 5036 2 63 2 25 0 0 408 5036_2 -PRL_Young_Final 5036 2 64 2 -25 0 0 525 5036_2 -PRL_Young_Final 5036 2 65 2 25 0 0 88 5036_2 -PRL_Young_Final 5036 2 66 2 -25 0 0 1491 5036_2 -PRL_Young_Final 5036 2 67 2 -25 0 0 815 5036_2 -PRL_Young_Final 5036 2 68 1 25 1 1 829 5036_2 -PRL_Young_Final 5036 2 69 1 25 0 1 459 5036_2 -PRL_Young_Final 5036 2 70 1 25 0 1 808 5036_2 -PRL_Young_Final 5036 2 71 1 25 0 1 798 5036_2 -PRL_Young_Final 5036 2 72 1 25 0 1 541 5036_2 -PRL_Young_Final 5036 2 73 1 25 0 1 710 5036_2 -PRL_Young_Final 5036 2 74 1 -25 0 1 629 5036_2 -PRL_Young_Final 5036 2 75 1 -25 0 0 547 5036_2 -PRL_Young_Final 5036 2 76 2 25 1 1 2264 5036_2 -PRL_Young_Final 5036 2 77 2 25 0 1 443 5036_2 -PRL_Young_Final 5036 2 78 2 25 0 1 569 5036_2 -PRL_Young_Final 5036 2 79 2 25 0 1 371 5036_2 -PRL_Young_Final 5036 2 80 2 25 0 1 495 5036_2 -PRL_Young_Final 5036 2 81 2 25 0 1 464 5036_2 -PRL_Young_Final 5036 2 82 2 25 0 1 24 5036_2 -PRL_Young_Final 5036 2 83 2 -25 0 1 517 5036_2 -PRL_Young_Final 5036 2 84 2 -25 0 0 562 5036_2 -PRL_Young_Final 5036 2 85 1 25 1 1 1933 5036_2 -PRL_Young_Final 5036 2 86 1 25 0 1 485 5036_2 -PRL_Young_Final 5036 2 87 1 -25 0 1 79 5036_2 -PRL_Young_Final 5036 2 88 1 25 0 1 874 5036_2 -PRL_Young_Final 5036 2 89 1 25 0 1 125 5036_2 -PRL_Young_Final 5036 2 90 1 25 0 1 602 5036_2 -PRL_Young_Final 5036 2 91 1 25 0 1 622 5036_2 -PRL_Young_Final 5036 2 92 1 25 0 1 425 5036_2 -PRL_Young_Final 5036 2 93 1 -25 0 0 512 5036_2 -PRL_Young_Final 5036 2 94 1 -25 0 0 318 5036_2 -PRL_Young_Final 5036 2 95 2 -25 1 1 654 5036_2 -PRL_Young_Final 5036 2 96 2 -25 0 1 83 5036_2 -PRL_Young_Final 5036 2 97 2 25 0 1 195 5036_2 -PRL_Young_Final 5036 2 98 2 25 0 1 301 5036_2 -PRL_Young_Final 5036 2 99 2 25 0 1 201 5036_2 -PRL_Young_Final 5036 2 100 2 25 0 0 498 5036_2 -PRL_Young_Final 5036 2 101 2 25 0 0 467 5036_2 -PRL_Young_Final 5036 2 102 2 -25 0 0 521 5036_2 -PRL_Young_Final 5036 2 103 2 -25 0 0 529 5036_2 -PRL_Young_Final 5036 2 104 2 -25 0 0 252 5036_2 -PRL_Young_Final 5036 2 105 1 25 1 1 424 5036_2 -PRL_Young_Final 5036 2 106 1 25 0 1 448 5036_2 -PRL_Young_Final 5036 2 107 1 25 0 1 403 5036_2 -PRL_Young_Final 5036 2 108 1 -25 0 1 130 5036_2 -PRL_Young_Final 5036 2 109 1 25 0 1 200 5036_2 -PRL_Young_Final 5036 2 110 1 -25 0 0 308 5036_2 -PRL_Young_Final 5036 2 111 1 -25 0 0 566 5036_2 -PRL_Young_Final 5036 2 112 2 25 1 1 196 5036_2 -PRL_Young_Final 5036 2 113 2 25 0 1 387 5036_2 -PRL_Young_Final 5036 2 114 2 25 0 1 1008 5036_2 -PRL_Young_Final 5036 2 115 2 25 0 1 1355 5036_2 -PRL_Young_Final 5036 2 116 2 25 0 1 153 5036_2 -PRL_Young_Final 5036 2 117 2 25 0 1 319 5036_2 -PRL_Young_Final 5036 2 118 2 -25 0 1 453 5036_2 -PRL_Young_Final 5036 2 119 2 25 0 1 228 5036_2 -PRL_Young_Final 5036 2 120 2 -25 0 0 1982 5036_2 -PRL_Young_Final 5036 2 121 2 25 0 0 247 5036_2 -PRL_Young_Final 5036 2 122 2 -25 0 0 1437 5036_2 -PRL_Young_Final 5036 2 123 2 -25 0 0 287 5036_2 -PRL_Young_Final 5036 2 124 2 -25 0 0 898 5036_2 -PRL_Young_Final 5036 2 125 2 -25 0 0 451 5036_2 -PRL_Young_Final 5036 2 126 1 25 1 1 416 5036_2 -PRL_Young_Final 5036 2 127 1 -25 0 1 1363 5036_2 -PRL_Young_Final 5036 2 128 1 25 0 1 383 5036_2 -PRL_Young_Final 5036 2 129 1 25 0 1 508 5036_2 -PRL_Young_Final 5036 2 130 1 25 0 1 562 5036_2 -PRL_Young_Final 5036 2 131 1 25 0 1 546 5036_2 -PRL_Young_Final 5036 2 132 1 -25 0 0 162 5036_2 -PRL_Young_Final 5036 2 133 1 -25 0 0 88 5036_2 -PRL_Young_Final 5036 2 134 2 25 1 1 560 5036_2 -PRL_Young_Final 5036 2 135 2 -25 0 1 730 5036_2 -PRL_Young_Final 5036 2 136 2 -25 0 1 163 5036_2 -PRL_Young_Final 5036 2 137 1 -25 1 0 1807 5036_2 -PRL_Young_Final 5036 2 138 1 25 0 0 506 5036_2 -PRL_Young_Final 5036 2 139 1 -25 0 0 521 5036_2 -PRL_Young_Final 5036 2 140 1 25 0 0 350 5036_2 -PRL_Young_Final 5036 2 141 1 -25 0 0 73 5036_2 -PRL_Young_Final 5036 2 142 2 25 1 1 600 5036_2 -PRL_Young_Final 5036 2 143 2 25 0 1 441 5036_2 -PRL_Young_Final 5036 2 144 2 25 0 1 131 5036_2 -PRL_Young_Final 5036 2 145 2 25 0 1 360 5036_2 -PRL_Young_Final 5036 2 146 2 25 0 1 553 5036_2 -PRL_Young_Final 5036 2 147 2 25 0 1 36 5036_2 -PRL_Young_Final 5036 2 148 2 -25 0 1 460 5036_2 -PRL_Young_Final 5036 2 149 1 -25 1 0 1844 5036_2 -PRL_Young_Final 5036 2 150 2 25 1 1 761 5036_2 -PRL_Young_Final 5036 2 151 2 25 0 1 334 5036_2 -PRL_Young_Final 5036 2 152 2 25 0 1 407 5036_2 -PRL_Young_Final 5036 2 153 2 25 0 1 255 5036_2 -PRL_Young_Final 5036 2 154 2 25 0 1 1566 5036_2 -PRL_Young_Final 5036 2 155 2 25 0 1 656 5036_2 -PRL_Young_Final 5036 2 156 2 25 0 1 514 5036_2 -PRL_Young_Final 5036 2 157 2 -25 0 1 111 5036_2 -PRL_Young_Final 5036 2 158 1 25 1 1 1447 5036_2 -PRL_Young_Final 5036 2 159 1 25 0 1 494 5036_2 -PRL_Young_Final 5036 2 160 1 -25 0 1 629 5036_2 -PRL_Young_Final 5036 2 161 1 25 0 1 973 5036_2 -PRL_Young_Final 5036 2 162 1 25 0 1 183 5036_2 -PRL_Young_Final 5036 2 163 1 -25 0 0 75 5036_2 -PRL_Young_Final 5036 2 164 2 25 1 1 1017 5036_2 -PRL_Young_Final 5036 2 165 2 25 0 1 513 5036_2 -PRL_Young_Final 5036 2 166 2 25 0 1 1553 5036_2 -PRL_Young_Final 5036 2 167 2 -25 0 1 920 5036_2 -PRL_Young_Final 5036 2 168 2 -25 0 1 509 5036_2 -PRL_Young_Final 5036 2 169 1 -25 1 0 1115 5036_2 -PRL_Young_Final 5036 2 170 1 -25 0 0 409 5036_2 -PRL_Young_Final 5036 2 171 2 25 1 1 737 5036_2 -PRL_Young_Final 5036 2 172 2 25 0 1 19 5036_2 -PRL_Young_Final 5036 2 173 2 25 0 1 556 5036_2 -PRL_Young_Final 5036 2 174 2 25 0 1 461 5036_2 -PRL_Young_Final 5036 2 175 2 25 0 1 740 5036_2 -PRL_Young_Final 5036 2 176 2 25 0 1 483 5036_2 -PRL_Young_Final 5036 2 177 2 -25 0 1 488 5036_2 -PRL_Young_Final 5036 2 178 2 25 0 1 143 5036_2 -PRL_Young_Final 5036 2 179 2 -25 0 0 701 5036_2 -PRL_Young_Final 5036 2 180 1 25 1 1 1436 5036_2 -PRL_Young_Final 5036 2 181 1 25 0 1 471 5036_2 -PRL_Young_Final 5036 2 182 1 25 0 1 213 5036_2 -PRL_Young_Final 5036 2 183 1 25 0 1 377 5036_2 -PRL_Young_Final 5036 2 184 1 25 0 1 490 5036_2 -PRL_Young_Final 5036 2 185 1 25 0 1 631 5036_2 -PRL_Young_Final 5036 2 186 1 25 0 0 245 5036_2 -PRL_Young_Final 5036 2 187 1 25 0 0 995 5036_2 -PRL_Young_Final 5036 2 188 1 -25 0 0 974 5036_2 -PRL_Young_Final 5036 2 189 2 -25 1 1 1840 5036_2 -PRL_Young_Final 5036 2 190 1 -25 1 0 1510 5036_2 -PRL_Young_Final 5036 2 191 1 -25 0 0 412 5036_2 -PRL_Young_Final 5036 2 192 2 25 1 1 377 5036_2 -PRL_Young_Final 5036 2 193 2 25 0 1 160 5036_2 -PRL_Young_Final 5036 2 194 2 -25 0 1 410 5036_2 -PRL_Young_Final 5036 2 195 2 25 0 1 82 5036_2 -PRL_Young_Final 5036 2 196 2 25 0 1 509 5036_2 -PRL_Young_Final 5036 2 197 2 25 0 1 430 5036_2 -PRL_Young_Final 5036 2 198 2 -25 0 0 414 5036_2 -PRL_Young_Final 5036 2 199 1 25 1 1 282 5036_2 -PRL_Young_Final 5036 2 200 1 25 0 1 400 5036_2 -PRL_Young_Final 5036 3 1 1 -25 0 1 2267 5036_3 -PRL_Young_Final 5036 3 2 1 -25 0 1 628 5036_3 -PRL_Young_Final 5036 3 3 2 -25 1 0 1419 5036_3 -PRL_Young_Final 5036 3 4 2 -25 0 0 940 5036_3 -PRL_Young_Final 5036 3 5 2 25 0 0 556 5036_3 -PRL_Young_Final 5036 3 6 2 -25 0 0 378 5036_3 -PRL_Young_Final 5036 3 7 2 -25 0 0 304 5036_3 -PRL_Young_Final 5036 3 8 2 -25 0 0 819 5036_3 -PRL_Young_Final 5036 3 9 1 25 1 1 770 5036_3 -PRL_Young_Final 5036 3 10 1 25 0 1 1243 5036_3 -PRL_Young_Final 5036 3 11 1 25 0 1 587 5036_3 -PRL_Young_Final 5036 3 12 1 25 0 1 109 5036_3 -PRL_Young_Final 5036 3 13 1 25 0 1 710 5036_3 -PRL_Young_Final 5036 3 14 1 -25 0 0 446 5036_3 -PRL_Young_Final 5036 3 15 1 -25 0 0 174 5036_3 -PRL_Young_Final 5036 3 16 1 -25 0 0 946 5036_3 -PRL_Young_Final 5036 3 17 2 25 1 1 453 5036_3 -PRL_Young_Final 5036 3 18 2 -25 0 1 496 5036_3 -PRL_Young_Final 5036 3 19 2 25 0 1 447 5036_3 -PRL_Young_Final 5036 3 20 2 25 0 1 464 5036_3 -PRL_Young_Final 5036 3 21 2 25 0 1 263 5036_3 -PRL_Young_Final 5036 3 22 2 25 0 1 321 5036_3 -PRL_Young_Final 5036 3 23 2 25 0 1 326 5036_3 -PRL_Young_Final 5036 3 24 2 -25 0 0 421 5036_3 -PRL_Young_Final 5036 3 25 1 25 1 1 801 5036_3 -PRL_Young_Final 5036 3 26 1 25 0 1 452 5036_3 -PRL_Young_Final 5036 3 27 1 -25 0 1 510 5036_3 -PRL_Young_Final 5036 3 28 1 25 0 1 490 5036_3 -PRL_Young_Final 5036 3 29 1 25 0 1 464 5036_3 -PRL_Young_Final 5036 3 30 1 25 0 0 476 5036_3 -PRL_Young_Final 5036 3 31 1 -25 0 0 610 5036_3 -PRL_Young_Final 5036 3 32 2 -25 1 1 877 5036_3 -PRL_Young_Final 5036 3 33 2 25 0 1 769 5036_3 -PRL_Young_Final 5036 3 34 2 25 0 1 1131 5036_3 -PRL_Young_Final 5036 3 35 2 25 0 1 512 5036_3 -PRL_Young_Final 5036 3 36 2 25 0 1 465 5036_3 -PRL_Young_Final 5036 3 37 2 25 0 1 486 5036_3 -PRL_Young_Final 5036 3 38 2 -25 0 1 669 5036_3 -PRL_Young_Final 5036 3 39 2 -25 0 1 949 5036_3 -PRL_Young_Final 5036 3 40 1 25 1 1 830 5036_3 -PRL_Young_Final 5036 3 41 1 25 0 1 490 5036_3 -PRL_Young_Final 5036 3 42 1 25 0 1 229 5036_3 -PRL_Young_Final 5036 3 43 1 25 0 1 331 5036_3 -PRL_Young_Final 5036 3 44 1 25 0 1 462 5036_3 -PRL_Young_Final 5036 3 45 1 25 0 1 272 5036_3 -PRL_Young_Final 5036 3 46 1 25 0 0 1480 5036_3 -PRL_Young_Final 5036 3 47 1 -25 0 0 562 5036_3 -PRL_Young_Final 5036 3 48 1 -25 0 0 908 5036_3 -PRL_Young_Final 5036 3 49 2 -25 1 1 467 5036_3 -PRL_Young_Final 5036 3 50 2 25 0 1 392 5036_3 -PRL_Young_Final 5036 3 51 2 25 0 1 457 5036_3 -PRL_Young_Final 5036 3 52 2 25 0 1 667 5036_3 -PRL_Young_Final 5036 3 53 2 25 0 1 576 5036_3 -PRL_Young_Final 5036 3 54 2 -25 0 0 196 5036_3 -PRL_Young_Final 5036 3 55 2 -25 0 0 213 5036_3 -PRL_Young_Final 5036 3 56 1 25 1 1 867 5036_3 -PRL_Young_Final 5036 3 57 1 25 0 1 183 5036_3 -PRL_Young_Final 5036 3 58 1 25 0 1 403 5036_3 -PRL_Young_Final 5036 3 59 1 -25 0 1 460 5036_3 -PRL_Young_Final 5036 3 60 1 25 0 1 747 5036_3 -PRL_Young_Final 5036 3 61 1 25 0 1 758 5036_3 -PRL_Young_Final 5036 3 62 1 -25 0 0 62 5036_3 -PRL_Young_Final 5036 3 63 1 -25 0 0 715 5036_3 -PRL_Young_Final 5036 3 64 1 25 0 0 231 5036_3 -PRL_Young_Final 5036 3 65 1 25 0 0 458 5036_3 -PRL_Young_Final 5036 3 66 1 -25 0 0 487 5036_3 -PRL_Young_Final 5036 3 67 1 -25 0 0 59 5036_3 -PRL_Young_Final 5036 3 68 2 -25 1 1 52 5036_3 -PRL_Young_Final 5036 3 69 2 25 0 1 789 5036_3 -PRL_Young_Final 5036 3 70 2 25 0 1 432 5036_3 -PRL_Young_Final 5036 3 71 2 25 0 1 759 5036_3 -PRL_Young_Final 5036 3 72 2 25 0 1 3790 5036_3 -PRL_Young_Final 5036 3 73 2 -25 0 0 638 5036_3 -PRL_Young_Final 5036 3 74 2 -25 0 0 1516 5036_3 -PRL_Young_Final 5036 3 75 1 25 1 1 759 5036_3 -PRL_Young_Final 5036 3 76 1 -25 0 1 455 5036_3 -PRL_Young_Final 5036 3 77 1 -25 0 1 582 5036_3 -PRL_Young_Final 5036 3 78 1 25 0 1 133 5036_3 -PRL_Young_Final 5036 3 79 1 25 0 1 456 5036_3 -PRL_Young_Final 5036 3 80 1 25 0 1 619 5036_3 -PRL_Young_Final 5036 3 81 1 25 0 1 513 5036_3 -PRL_Young_Final 5036 3 82 1 25 0 1 985 5036_3 -PRL_Young_Final 5036 3 83 1 -25 0 0 134 5036_3 -PRL_Young_Final 5036 3 84 1 -25 0 0 569 5036_3 -PRL_Young_Final 5036 3 85 2 25 1 1 473 5036_3 -PRL_Young_Final 5036 3 86 2 -25 0 1 891 5036_3 -PRL_Young_Final 5036 3 87 2 25 0 1 487 5036_3 -PRL_Young_Final 5036 3 88 2 25 0 1 333 5036_3 -PRL_Young_Final 5036 3 89 2 25 0 1 424 5036_3 -PRL_Young_Final 5036 3 90 2 25 0 1 474 5036_3 -PRL_Young_Final 5036 3 91 2 25 0 1 407 5036_3 -PRL_Young_Final 5036 3 92 2 25 0 1 236 5036_3 -PRL_Young_Final 5036 3 93 2 25 0 0 57 5036_3 -PRL_Young_Final 5036 3 94 2 -25 0 0 785 5036_3 -PRL_Young_Final 5036 3 95 1 25 1 1 933 5036_3 -PRL_Young_Final 5036 3 96 1 -25 0 1 522 5036_3 -PRL_Young_Final 5036 3 97 1 25 0 1 243 5036_3 -PRL_Young_Final 5036 3 98 1 25 0 1 58 5036_3 -PRL_Young_Final 5036 3 99 1 -25 0 1 498 5036_3 -PRL_Young_Final 5036 3 100 2 -25 1 0 587 5036_3 -PRL_Young_Final 5036 3 101 2 -25 0 0 89 5036_3 -PRL_Young_Final 5036 3 102 2 -25 0 0 201 5036_3 -PRL_Young_Final 5036 3 103 1 25 1 1 446 5036_3 -PRL_Young_Final 5036 3 104 1 25 0 1 540 5036_3 -PRL_Young_Final 5036 3 105 1 25 0 1 507 5036_3 -PRL_Young_Final 5036 3 106 1 25 0 1 432 5036_3 -PRL_Young_Final 5036 3 107 1 25 0 1 525 5036_3 -PRL_Young_Final 5036 3 108 1 -25 0 1 47 5036_3 -PRL_Young_Final 5036 3 109 1 -25 0 1 210 5036_3 -PRL_Young_Final 5036 3 110 2 25 1 1 661 5036_3 -PRL_Young_Final 5036 3 111 2 25 0 1 225 5036_3 -PRL_Young_Final 5036 3 112 2 25 0 1 443 5036_3 -PRL_Young_Final 5036 3 113 2 25 0 1 289 5036_3 -PRL_Young_Final 5036 3 114 2 25 0 1 596 5036_3 -PRL_Young_Final 5036 3 115 2 -25 0 0 505 5036_3 -PRL_Young_Final 5036 3 116 2 -25 0 0 649 5036_3 -PRL_Young_Final 5036 3 117 1 25 1 1 1022 5036_3 -PRL_Young_Final 5036 3 118 1 -25 0 1 1342 5036_3 -PRL_Young_Final 5036 3 119 1 25 0 1 354 5036_3 -PRL_Young_Final 5036 3 120 1 25 0 1 1119 5036_3 -PRL_Young_Final 5036 3 121 1 25 0 1 911 5036_3 -PRL_Young_Final 5036 3 122 1 25 0 1 2367 5036_3 -PRL_Young_Final 5036 3 123 1 25 0 1 834 5036_3 -PRL_Young_Final 5036 3 124 1 25 0 1 1194 5036_3 -PRL_Young_Final 5036 3 125 1 -25 0 0 2371 5036_3 -PRL_Young_Final 5036 3 126 1 25 0 0 2397 5036_3 -PRL_Young_Final 5036 3 127 1 -25 0 0 313 5036_3 -PRL_Young_Final 5036 3 128 1 25 0 0 547 5036_3 -PRL_Young_Final 5036 3 129 1 -25 0 0 1113 5036_3 -PRL_Young_Final 5036 3 130 1 -25 0 0 283 5036_3 -PRL_Young_Final 5036 3 131 2 25 1 1 73 5036_3 -PRL_Young_Final 5036 3 132 2 -25 0 1 2660 5036_3 -PRL_Young_Final 5036 3 133 2 25 0 1 426 5036_3 -PRL_Young_Final 5036 3 134 2 25 0 1 950 5036_3 -PRL_Young_Final 5036 3 135 2 -25 0 1 744 5036_3 -PRL_Young_Final 5036 3 136 2 25 0 1 637 5036_3 -PRL_Young_Final 5036 3 137 2 -25 0 0 1133 5036_3 -PRL_Young_Final 5036 3 138 1 25 1 1 1031 5036_3 -PRL_Young_Final 5036 3 139 1 25 0 1 540 5036_3 -PRL_Young_Final 5036 3 140 1 25 0 1 447 5036_3 -PRL_Young_Final 5036 3 141 1 25 0 1 594 5036_3 -PRL_Young_Final 5036 3 142 1 -25 0 1 400 5036_3 -PRL_Young_Final 5036 3 143 1 -25 0 1 509 5036_3 -PRL_Young_Final 5036 3 144 2 -25 1 0 1377 5036_3 -PRL_Young_Final 5036 3 145 2 -25 0 0 503 5036_3 -PRL_Young_Final 5036 3 146 1 25 1 1 861 5036_3 -PRL_Young_Final 5036 3 147 1 25 0 1 457 5036_3 -PRL_Young_Final 5036 3 148 1 25 0 1 486 5036_3 -PRL_Young_Final 5036 3 149 1 25 0 1 470 5036_3 -PRL_Young_Final 5036 3 150 1 25 0 1 438 5036_3 -PRL_Young_Final 5036 3 151 1 25 0 1 977 5036_3 -PRL_Young_Final 5036 3 152 1 -25 0 1 762 5036_3 -PRL_Young_Final 5036 3 153 1 25 0 1 1138 5036_3 -PRL_Young_Final 5036 3 154 1 -25 0 0 578 5036_3 -PRL_Young_Final 5036 3 155 2 25 1 1 754 5036_3 -PRL_Young_Final 5036 3 156 2 25 0 1 482 5036_3 -PRL_Young_Final 5036 3 157 2 25 0 1 447 5036_3 -PRL_Young_Final 5036 3 158 2 25 0 1 952 5036_3 -PRL_Young_Final 5036 3 159 2 25 0 1 1078 5036_3 -PRL_Young_Final 5036 3 160 2 25 0 1 934 5036_3 -PRL_Young_Final 5036 3 161 2 -25 0 1 481 5036_3 -PRL_Young_Final 5036 3 162 2 25 0 0 563 5036_3 -PRL_Young_Final 5036 3 163 2 25 0 0 699 5036_3 -PRL_Young_Final 5036 3 164 2 -25 0 0 509 5036_3 -PRL_Young_Final 5036 3 165 1 25 1 1 1000 5036_3 -PRL_Young_Final 5036 3 166 1 25 0 1 488 5036_3 -PRL_Young_Final 5036 3 167 1 -25 0 1 1019 5036_3 -PRL_Young_Final 5036 3 168 1 25 0 1 613 5036_3 -PRL_Young_Final 5036 3 169 1 25 0 1 470 5036_3 -PRL_Young_Final 5036 3 170 1 25 0 1 436 5036_3 -PRL_Young_Final 5036 3 171 1 -25 0 0 269 5036_3 -PRL_Young_Final 5036 3 172 2 25 1 1 1473 5036_3 -PRL_Young_Final 5036 3 173 2 25 0 1 458 5036_3 -PRL_Young_Final 5036 3 174 2 -25 0 1 433 5036_3 -PRL_Young_Final 5036 3 175 2 -25 0 1 556 5036_3 -PRL_Young_Final 5036 3 176 1 -25 1 0 532 5036_3 -PRL_Young_Final 5036 3 177 1 -25 0 0 395 5036_3 -PRL_Young_Final 5036 3 178 1 -25 0 0 393 5036_3 -PRL_Young_Final 5036 3 179 2 25 1 1 421 5036_3 -PRL_Young_Final 5036 3 180 2 25 0 1 342 5036_3 -PRL_Young_Final 5036 3 181 2 25 0 1 436 5036_3 -PRL_Young_Final 5036 3 182 2 25 0 1 126 5036_3 -PRL_Young_Final 5036 3 183 2 25 0 1 533 5036_3 -PRL_Young_Final 5036 3 184 2 -25 0 0 474 5036_3 -PRL_Young_Final 5036 3 185 2 25 0 0 524 5036_3 -PRL_Young_Final 5036 3 186 2 -25 0 0 805 5036_3 -PRL_Young_Final 5036 3 187 1 25 1 1 1009 5036_3 -PRL_Young_Final 5036 3 188 1 -25 0 1 490 5036_3 -PRL_Young_Final 5036 3 189 1 25 0 1 795 5036_3 -PRL_Young_Final 5036 3 190 1 25 0 1 487 5036_3 -PRL_Young_Final 5036 3 191 1 25 0 1 946 5036_3 -PRL_Young_Final 5036 3 192 1 25 0 1 1127 5036_3 -PRL_Young_Final 5036 3 193 1 -25 0 0 677 5036_3 -PRL_Young_Final 5036 3 194 1 -25 0 0 782 5036_3 -PRL_Young_Final 5036 3 195 2 25 1 1 521 5036_3 -PRL_Young_Final 5036 3 196 2 25 0 1 480 5036_3 -PRL_Young_Final 5036 3 197 2 25 0 1 450 5036_3 -PRL_Young_Final 5036 3 198 2 -25 0 1 429 5036_3 -PRL_Young_Final 5036 3 199 2 25 0 1 585 5036_3 -PRL_Young_Final 5036 3 200 2 -25 0 0 102 5036_3 -PRL_Young_Final 5035 1 1 1 25 0 0 753 5035_1 -PRL_Young_Final 5035 1 2 1 -25 0 0 321 5035_1 -PRL_Young_Final 5035 1 3 1 -25 0 0 283 5035_1 -PRL_Young_Final 5035 1 4 2 25 1 1 300 5035_1 -PRL_Young_Final 5035 1 5 2 25 0 1 337 5035_1 -PRL_Young_Final 5035 1 6 2 -25 0 1 285 5035_1 -PRL_Young_Final 5035 1 7 2 25 0 1 363 5035_1 -PRL_Young_Final 5035 1 8 2 25 0 1 281 5035_1 -PRL_Young_Final 5035 1 9 2 -25 0 0 287 5035_1 -PRL_Young_Final 5035 1 10 1 -25 1 1 310 5035_1 -PRL_Young_Final 5035 1 11 2 -25 1 0 906 5035_1 -PRL_Young_Final 5035 1 12 2 -25 0 0 584 5035_1 -PRL_Young_Final 5035 1 13 1 25 1 1 239 5035_1 -PRL_Young_Final 5035 1 14 1 25 0 1 273 5035_1 -PRL_Young_Final 5035 1 15 1 25 0 1 698 5035_1 -PRL_Young_Final 5035 1 16 1 25 0 1 365 5035_1 -PRL_Young_Final 5035 1 17 1 25 0 1 295 5035_1 -PRL_Young_Final 5035 1 18 1 -25 0 0 305 5035_1 -PRL_Young_Final 5035 1 19 1 25 0 0 284 5035_1 -PRL_Young_Final 5035 1 20 1 25 0 0 278 5035_1 -PRL_Young_Final 5035 1 21 1 -25 0 0 276 5035_1 -PRL_Young_Final 5035 1 22 1 -25 0 0 239 5035_1 -PRL_Young_Final 5035 1 23 2 -25 1 1 342 5035_1 -PRL_Young_Final 5035 1 24 2 -25 0 1 536 5035_1 -PRL_Young_Final 5035 1 25 1 -25 1 0 464 5035_1 -PRL_Young_Final 5035 1 26 1 -25 0 0 277 5035_1 -PRL_Young_Final 5035 1 27 1 -25 0 0 412 5035_1 -PRL_Young_Final 5035 1 28 2 25 1 1 371 5035_1 -PRL_Young_Final 5035 1 29 2 25 0 1 311 5035_1 -PRL_Young_Final 5035 1 30 2 25 0 1 303 5035_1 -PRL_Young_Final 5035 1 31 2 25 0 1 410 5035_1 -PRL_Young_Final 5035 1 32 2 25 0 1 293 5035_1 -PRL_Young_Final 5035 1 33 2 25 0 1 706 5035_1 -PRL_Young_Final 5035 1 34 2 -25 0 1 484 5035_1 -PRL_Young_Final 5035 1 35 2 25 0 1 349 5035_1 -PRL_Young_Final 5035 1 36 2 -25 0 0 482 5035_1 -PRL_Young_Final 5035 1 37 2 25 0 0 649 5035_1 -PRL_Young_Final 5035 1 38 2 -25 0 0 543 5035_1 -PRL_Young_Final 5035 1 39 2 -25 0 0 419 5035_1 -PRL_Young_Final 5035 1 40 1 25 1 1 337 5035_1 -PRL_Young_Final 5035 1 41 1 25 0 1 331 5035_1 -PRL_Young_Final 5035 1 42 1 25 0 1 654 5035_1 -PRL_Young_Final 5035 1 43 1 25 0 1 301 5035_1 -PRL_Young_Final 5035 1 44 1 25 0 1 278 5035_1 -PRL_Young_Final 5035 1 45 1 -25 0 0 610 5035_1 -PRL_Young_Final 5035 1 46 1 -25 0 0 427 5035_1 -PRL_Young_Final 5035 1 47 2 25 1 1 336 5035_1 -PRL_Young_Final 5035 1 48 2 -25 0 1 271 5035_1 -PRL_Young_Final 5035 1 49 2 25 0 1 244 5035_1 -PRL_Young_Final 5035 1 50 2 25 0 1 577 5035_1 -PRL_Young_Final 5035 1 51 2 -25 0 1 291 5035_1 -PRL_Young_Final 5035 1 52 2 25 0 1 653 5035_1 -PRL_Young_Final 5035 1 53 2 25 0 1 327 5035_1 -PRL_Young_Final 5035 1 54 2 -25 0 0 306 5035_1 -PRL_Young_Final 5035 1 55 2 -25 0 0 295 5035_1 -PRL_Young_Final 5035 1 56 1 25 1 1 294 5035_1 -PRL_Young_Final 5035 1 57 1 25 0 1 350 5035_1 -PRL_Young_Final 5035 1 58 1 25 0 1 737 5035_1 -PRL_Young_Final 5035 1 59 1 -25 0 1 577 5035_1 -PRL_Young_Final 5035 1 60 1 -25 0 1 331 5035_1 -PRL_Young_Final 5035 1 61 2 -25 1 0 271 5035_1 -PRL_Young_Final 5035 1 62 2 25 0 0 381 5035_1 -PRL_Young_Final 5035 1 63 2 -25 0 0 332 5035_1 -PRL_Young_Final 5035 1 64 2 25 0 0 425 5035_1 -PRL_Young_Final 5035 1 65 2 -25 0 0 290 5035_1 -PRL_Young_Final 5035 1 66 2 -25 0 0 300 5035_1 -PRL_Young_Final 5035 1 67 1 25 1 1 625 5035_1 -PRL_Young_Final 5035 1 68 1 25 0 1 834 5035_1 -PRL_Young_Final 5035 1 69 1 25 0 1 448 5035_1 -PRL_Young_Final 5035 1 70 1 25 0 1 348 5035_1 -PRL_Young_Final 5035 1 71 1 25 0 1 367 5035_1 -PRL_Young_Final 5035 1 72 1 -25 0 0 296 5035_1 -PRL_Young_Final 5035 1 73 1 -25 0 0 624 5035_1 -PRL_Young_Final 5035 1 74 2 25 1 1 371 5035_1 -PRL_Young_Final 5035 1 75 2 -25 0 1 282 5035_1 -PRL_Young_Final 5035 1 76 2 25 0 1 302 5035_1 -PRL_Young_Final 5035 1 77 2 25 0 1 647 5035_1 -PRL_Young_Final 5035 1 78 2 25 0 1 374 5035_1 -PRL_Young_Final 5035 1 79 2 25 0 1 461 5035_1 -PRL_Young_Final 5035 1 80 2 25 0 1 260 5035_1 -PRL_Young_Final 5035 1 81 2 -25 0 0 342 5035_1 -PRL_Young_Final 5035 1 82 2 -25 0 0 438 5035_1 -PRL_Young_Final 5035 1 83 1 25 1 1 290 5035_1 -PRL_Young_Final 5035 1 84 1 25 0 1 350 5035_1 -PRL_Young_Final 5035 1 85 1 -25 0 1 202 5035_1 -PRL_Young_Final 5035 1 86 1 25 0 1 333 5035_1 -PRL_Young_Final 5035 1 87 1 25 0 1 687 5035_1 -PRL_Young_Final 5035 1 88 1 25 0 0 280 5035_1 -PRL_Young_Final 5035 1 89 1 25 0 0 358 5035_1 -PRL_Young_Final 5035 1 90 1 -25 0 0 570 5035_1 -PRL_Young_Final 5035 1 91 1 -25 0 0 632 5035_1 -PRL_Young_Final 5035 1 92 2 -25 1 1 334 5035_1 -PRL_Young_Final 5035 1 93 2 25 0 1 546 5035_1 -PRL_Young_Final 5035 1 94 2 25 0 1 649 5035_1 -PRL_Young_Final 5035 1 95 2 25 0 1 570 5035_1 -PRL_Young_Final 5035 1 96 2 25 0 1 651 5035_1 -PRL_Young_Final 5035 1 97 2 25 0 1 598 5035_1 -PRL_Young_Final 5035 1 98 2 -25 0 0 548 5035_1 -PRL_Young_Final 5035 1 99 2 -25 0 0 505 5035_1 -PRL_Young_Final 5035 1 100 1 -25 1 1 305 5035_1 -PRL_Young_Final 5035 1 101 1 -25 0 1 489 5035_1 -PRL_Young_Final 5035 1 102 2 -25 1 0 250 5035_1 -PRL_Young_Final 5035 1 103 2 -25 0 0 311 5035_1 -PRL_Young_Final 5035 1 104 1 25 1 1 342 5035_1 -PRL_Young_Final 5035 1 105 1 25 0 1 305 5035_1 -PRL_Young_Final 5035 1 106 1 25 0 1 310 5035_1 -PRL_Young_Final 5035 1 107 1 25 0 1 251 5035_1 -PRL_Young_Final 5035 1 108 1 25 0 1 254 5035_1 -PRL_Young_Final 5035 1 109 1 25 0 1 561 5035_1 -PRL_Young_Final 5035 1 110 1 25 0 0 287 5035_1 -PRL_Young_Final 5035 1 111 1 -25 0 0 261 5035_1 -PRL_Young_Final 5035 1 112 1 -25 0 0 276 5035_1 -PRL_Young_Final 5035 1 113 1 -25 0 0 74 5035_1 -PRL_Young_Final 5035 1 114 2 -25 1 1 257 5035_1 -PRL_Young_Final 5035 1 115 2 25 0 1 593 5035_1 -PRL_Young_Final 5035 1 116 2 25 0 1 565 5035_1 -PRL_Young_Final 5035 1 117 2 25 0 1 276 5035_1 -PRL_Young_Final 5035 1 118 2 25 0 1 614 5035_1 -PRL_Young_Final 5035 1 119 2 25 0 1 290 5035_1 -PRL_Young_Final 5035 1 120 2 25 0 1 385 5035_1 -PRL_Young_Final 5035 1 121 2 25 0 1 280 5035_1 -PRL_Young_Final 5035 1 122 2 -25 0 0 426 5035_1 -PRL_Young_Final 5035 1 123 2 -25 0 0 278 5035_1 -PRL_Young_Final 5035 1 124 2 -25 0 0 298 5035_1 -PRL_Young_Final 5035 1 125 1 -25 1 1 283 5035_1 -PRL_Young_Final 5035 1 126 1 25 0 1 338 5035_1 -PRL_Young_Final 5035 1 127 1 25 0 1 284 5035_1 -PRL_Young_Final 5035 1 128 1 -25 0 1 316 5035_1 -PRL_Young_Final 5035 1 129 1 25 0 1 287 5035_1 -PRL_Young_Final 5035 1 130 1 25 0 1 259 5035_1 -PRL_Young_Final 5035 1 131 1 -25 0 0 293 5035_1 -PRL_Young_Final 5035 1 132 1 25 0 0 301 5035_1 -PRL_Young_Final 5035 1 133 1 -25 0 0 360 5035_1 -PRL_Young_Final 5035 1 134 1 25 0 0 285 5035_1 -PRL_Young_Final 5035 1 135 1 -25 0 0 308 5035_1 -PRL_Young_Final 5035 1 136 1 -25 0 0 579 5035_1 -PRL_Young_Final 5035 1 137 1 -25 0 0 447 5035_1 -PRL_Young_Final 5035 1 138 2 25 1 1 257 5035_1 -PRL_Young_Final 5035 1 139 2 25 0 1 263 5035_1 -PRL_Young_Final 5035 1 140 2 25 0 1 268 5035_1 -PRL_Young_Final 5035 1 141 2 -25 0 1 268 5035_1 -PRL_Young_Final 5035 1 142 2 -25 0 1 268 5035_1 -PRL_Young_Final 5035 1 143 2 25 0 1 528 5035_1 -PRL_Young_Final 5035 1 144 2 25 0 1 267 5035_1 -PRL_Young_Final 5035 1 145 2 -25 0 0 314 5035_1 -PRL_Young_Final 5035 1 146 2 -25 0 0 567 5035_1 -PRL_Young_Final 5035 1 147 2 -25 0 0 797 5035_1 -PRL_Young_Final 5035 1 148 1 25 1 1 338 5035_1 -PRL_Young_Final 5035 1 149 1 25 0 1 296 5035_1 -PRL_Young_Final 5035 1 150 1 25 0 1 567 5035_1 -PRL_Young_Final 5035 1 151 1 25 0 1 579 5035_1 -PRL_Young_Final 5035 1 152 1 -25 0 1 303 5035_1 -PRL_Young_Final 5035 1 153 1 25 0 0 286 5035_1 -PRL_Young_Final 5035 1 154 1 25 0 0 278 5035_1 -PRL_Young_Final 5035 1 155 1 -25 0 0 359 5035_1 -PRL_Young_Final 5035 1 156 1 -25 0 0 571 5035_1 -PRL_Young_Final 5035 1 157 2 25 1 1 517 5035_1 -PRL_Young_Final 5035 1 158 2 25 0 1 354 5035_1 -PRL_Young_Final 5035 1 159 2 25 0 1 342 5035_1 -PRL_Young_Final 5035 1 160 2 25 0 1 289 5035_1 -PRL_Young_Final 5035 1 161 2 25 0 1 273 5035_1 -PRL_Young_Final 5035 1 162 2 25 0 1 286 5035_1 -PRL_Young_Final 5035 1 163 2 25 0 1 579 5035_1 -PRL_Young_Final 5035 1 164 2 -25 0 1 571 5035_1 -PRL_Young_Final 5035 1 165 2 -25 0 0 349 5035_1 -PRL_Young_Final 5035 1 166 1 25 1 1 289 5035_1 -PRL_Young_Final 5035 1 167 1 25 0 1 510 5035_1 -PRL_Young_Final 5035 1 168 1 -25 0 1 287 5035_1 -PRL_Young_Final 5035 1 169 1 25 0 1 289 5035_1 -PRL_Young_Final 5035 1 170 1 25 0 1 281 5035_1 -PRL_Young_Final 5035 1 171 1 25 0 1 258 5035_1 -PRL_Young_Final 5035 1 172 1 25 0 1 302 5035_1 -PRL_Young_Final 5035 1 173 1 25 0 1 590 5035_1 -PRL_Young_Final 5035 1 174 1 -25 0 0 298 5035_1 -PRL_Young_Final 5035 1 175 1 -25 0 0 261 5035_1 -PRL_Young_Final 5035 1 176 2 -25 1 1 387 5035_1 -PRL_Young_Final 5035 1 177 2 -25 0 1 362 5035_1 -PRL_Young_Final 5035 1 178 1 -25 1 0 258 5035_1 -PRL_Young_Final 5035 1 179 1 25 0 0 346 5035_1 -PRL_Young_Final 5035 1 180 1 -25 0 0 299 5035_1 -PRL_Young_Final 5035 1 181 1 -25 0 0 311 5035_1 -PRL_Young_Final 5035 1 182 2 25 1 1 274 5035_1 -PRL_Young_Final 5035 1 183 2 25 0 1 295 5035_1 -PRL_Young_Final 5035 1 184 2 25 0 1 325 5035_1 -PRL_Young_Final 5035 1 185 2 25 0 1 330 5035_1 -PRL_Young_Final 5035 1 186 2 25 0 1 278 5035_1 -PRL_Young_Final 5035 1 187 2 -25 0 0 602 5035_1 -PRL_Young_Final 5035 1 188 2 -25 0 0 594 5035_1 -PRL_Young_Final 5035 1 189 1 25 1 1 269 5035_1 -PRL_Young_Final 5035 1 190 1 -25 0 1 274 5035_1 -PRL_Young_Final 5035 1 191 1 25 0 1 271 5035_1 -PRL_Young_Final 5035 1 192 1 25 0 1 301 5035_1 -PRL_Young_Final 5035 1 193 1 25 0 1 322 5035_1 -PRL_Young_Final 5035 1 194 1 25 0 1 332 5035_1 -PRL_Young_Final 5035 1 195 1 25 0 1 337 5035_1 -PRL_Young_Final 5035 1 196 1 25 0 1 274 5035_1 -PRL_Young_Final 5035 1 197 1 -25 0 0 279 5035_1 -PRL_Young_Final 5035 1 198 1 -25 0 0 577 5035_1 -PRL_Young_Final 5035 1 199 2 25 1 1 350 5035_1 -PRL_Young_Final 5035 1 200 2 -25 0 1 262 5035_1 -PRL_Young_Final 5035 2 1 1 -25 1 0 838 5035_2 -PRL_Young_Final 5035 2 2 1 25 0 0 413 5035_2 -PRL_Young_Final 5035 2 3 1 -25 0 0 491 5035_2 -PRL_Young_Final 5035 2 4 1 25 0 0 276 5035_2 -PRL_Young_Final 5035 2 5 1 -25 0 0 381 5035_2 -PRL_Young_Final 5035 2 6 1 -25 0 0 279 5035_2 -PRL_Young_Final 5035 2 7 2 25 1 1 556 5035_2 -PRL_Young_Final 5035 2 8 2 25 0 1 297 5035_2 -PRL_Young_Final 5035 2 9 2 -25 0 1 344 5035_2 -PRL_Young_Final 5035 2 10 2 25 0 1 285 5035_2 -PRL_Young_Final 5035 2 11 2 25 0 1 306 5035_2 -PRL_Young_Final 5035 2 12 2 -25 0 0 567 5035_2 -PRL_Young_Final 5035 2 13 2 -25 0 0 597 5035_2 -PRL_Young_Final 5035 2 14 1 25 1 1 266 5035_2 -PRL_Young_Final 5035 2 15 1 25 0 1 482 5035_2 -PRL_Young_Final 5035 2 16 1 25 0 1 346 5035_2 -PRL_Young_Final 5035 2 17 1 -25 0 1 1135 5035_2 -PRL_Young_Final 5035 2 18 1 -25 0 1 294 5035_2 -PRL_Young_Final 5035 2 19 2 -25 1 0 283 5035_2 -PRL_Young_Final 5035 2 20 2 -25 0 0 356 5035_2 -PRL_Young_Final 5035 2 21 1 25 1 1 312 5035_2 -PRL_Young_Final 5035 2 22 1 25 0 1 322 5035_2 -PRL_Young_Final 5035 2 23 1 25 0 1 664 5035_2 -PRL_Young_Final 5035 2 24 1 25 0 1 586 5035_2 -PRL_Young_Final 5035 2 25 1 25 0 1 429 5035_2 -PRL_Young_Final 5035 2 26 1 25 0 0 479 5035_2 -PRL_Young_Final 5035 2 27 1 25 0 0 348 5035_2 -PRL_Young_Final 5035 2 28 1 -25 0 0 619 5035_2 -PRL_Young_Final 5035 2 29 1 -25 0 0 293 5035_2 -PRL_Young_Final 5035 2 30 2 25 1 1 272 5035_2 -PRL_Young_Final 5035 2 31 2 -25 0 1 306 5035_2 -PRL_Young_Final 5035 2 32 2 25 0 1 452 5035_2 -PRL_Young_Final 5035 2 33 2 25 0 1 262 5035_2 -PRL_Young_Final 5035 2 34 2 25 0 1 269 5035_2 -PRL_Young_Final 5035 2 35 2 25 0 1 272 5035_2 -PRL_Young_Final 5035 2 36 2 -25 0 0 294 5035_2 -PRL_Young_Final 5035 2 37 2 -25 0 0 416 5035_2 -PRL_Young_Final 5035 2 38 2 -25 0 0 368 5035_2 -PRL_Young_Final 5035 2 39 1 25 1 1 252 5035_2 -PRL_Young_Final 5035 2 40 1 25 0 1 241 5035_2 -PRL_Young_Final 5035 2 41 1 25 0 1 309 5035_2 -PRL_Young_Final 5035 2 42 1 -25 0 1 342 5035_2 -PRL_Young_Final 5035 2 43 1 25 0 1 318 5035_2 -PRL_Young_Final 5035 2 44 1 25 0 1 94 5035_2 -PRL_Young_Final 5035 2 45 1 -25 0 0 285 5035_2 -PRL_Young_Final 5035 2 46 1 25 0 0 258 5035_2 -PRL_Young_Final 5035 2 47 1 -25 0 0 363 5035_2 -PRL_Young_Final 5035 2 48 1 -25 0 0 316 5035_2 -PRL_Young_Final 5035 2 49 2 -25 1 1 310 5035_2 -PRL_Young_Final 5035 2 50 2 25 0 1 373 5035_2 -PRL_Young_Final 5035 2 51 2 25 0 1 571 5035_2 -PRL_Young_Final 5035 2 52 2 25 0 1 294 5035_2 -PRL_Young_Final 5035 2 53 2 25 0 1 314 5035_2 -PRL_Young_Final 5035 2 54 2 -25 0 0 628 5035_2 -PRL_Young_Final 5035 2 55 2 -25 0 0 545 5035_2 -PRL_Young_Final 5035 2 56 1 25 1 1 304 5035_2 -PRL_Young_Final 5035 2 57 1 -25 0 1 323 5035_2 -PRL_Young_Final 5035 2 58 1 -25 0 1 295 5035_2 -PRL_Young_Final 5035 2 59 2 -25 1 0 366 5035_2 -PRL_Young_Final 5035 2 60 2 -25 0 0 276 5035_2 -PRL_Young_Final 5035 2 61 1 25 1 1 282 5035_2 -PRL_Young_Final 5035 2 62 1 25 0 1 399 5035_2 -PRL_Young_Final 5035 2 63 1 25 0 1 334 5035_2 -PRL_Young_Final 5035 2 64 1 25 0 1 310 5035_2 -PRL_Young_Final 5035 2 65 1 25 0 1 416 5035_2 -PRL_Young_Final 5035 2 66 1 25 0 1 651 5035_2 -PRL_Young_Final 5035 2 67 1 -25 0 0 739 5035_2 -PRL_Young_Final 5035 2 68 1 25 0 0 368 5035_2 -PRL_Young_Final 5035 2 69 1 -25 0 0 274 5035_2 -PRL_Young_Final 5035 2 70 1 25 0 0 583 5035_2 -PRL_Young_Final 5035 2 71 1 -25 0 0 618 5035_2 -PRL_Young_Final 5035 2 72 1 -25 0 0 350 5035_2 -PRL_Young_Final 5035 2 73 2 -25 1 1 256 5035_2 -PRL_Young_Final 5035 2 74 2 25 0 1 289 5035_2 -PRL_Young_Final 5035 2 75 2 25 0 1 356 5035_2 -PRL_Young_Final 5035 2 76 2 25 0 1 399 5035_2 -PRL_Young_Final 5035 2 77 2 25 0 1 314 5035_2 -PRL_Young_Final 5035 2 78 2 25 0 1 325 5035_2 -PRL_Young_Final 5035 2 79 2 25 0 1 272 5035_2 -PRL_Young_Final 5035 2 80 2 25 0 1 536 5035_2 -PRL_Young_Final 5035 2 81 2 -25 0 0 372 5035_2 -PRL_Young_Final 5035 2 82 2 -25 0 0 308 5035_2 -PRL_Young_Final 5035 2 83 1 -25 1 1 460 5035_2 -PRL_Young_Final 5035 2 84 1 25 0 1 408 5035_2 -PRL_Young_Final 5035 2 85 1 25 0 1 307 5035_2 -PRL_Young_Final 5035 2 86 1 -25 0 1 635 5035_2 -PRL_Young_Final 5035 2 87 1 25 0 1 643 5035_2 -PRL_Young_Final 5035 2 88 1 25 0 1 331 5035_2 -PRL_Young_Final 5035 2 89 1 25 0 1 599 5035_2 -PRL_Young_Final 5035 2 90 1 25 0 1 288 5035_2 -PRL_Young_Final 5035 2 91 1 -25 0 0 271 5035_2 -PRL_Young_Final 5035 2 92 1 -25 0 0 324 5035_2 -PRL_Young_Final 5035 2 93 2 25 1 1 356 5035_2 -PRL_Young_Final 5035 2 94 2 -25 0 1 812 5035_2 -PRL_Young_Final 5035 2 95 2 -25 0 1 767 5035_2 -PRL_Young_Final 5035 2 96 1 25 1 0 309 5035_2 -PRL_Young_Final 5035 2 97 1 25 0 0 278 5035_2 -PRL_Young_Final 5035 2 98 1 -25 0 0 367 5035_2 -PRL_Young_Final 5035 2 99 1 -25 0 0 279 5035_2 -PRL_Young_Final 5035 2 100 1 -25 0 0 489 5035_2 -PRL_Young_Final 5035 2 101 2 25 1 1 336 5035_2 -PRL_Young_Final 5035 2 102 2 25 0 1 285 5035_2 -PRL_Young_Final 5035 2 103 2 25 0 1 299 5035_2 -PRL_Young_Final 5035 2 104 2 25 0 1 455 5035_2 -PRL_Young_Final 5035 2 105 2 25 0 1 381 5035_2 -PRL_Young_Final 5035 2 106 2 25 0 1 327 5035_2 -PRL_Young_Final 5035 2 107 2 -25 0 1 644 5035_2 -PRL_Young_Final 5035 2 108 2 -25 0 0 608 5035_2 -PRL_Young_Final 5035 2 109 2 -25 0 0 280 5035_2 -PRL_Young_Final 5035 2 110 1 25 1 1 315 5035_2 -PRL_Young_Final 5035 2 111 1 25 0 1 677 5035_2 -PRL_Young_Final 5035 2 112 1 25 0 1 260 5035_2 -PRL_Young_Final 5035 2 113 1 25 0 1 652 5035_2 -PRL_Young_Final 5035 2 114 1 25 0 1 565 5035_2 -PRL_Young_Final 5035 2 115 1 -25 0 0 283 5035_2 -PRL_Young_Final 5035 2 116 1 25 0 0 321 5035_2 -PRL_Young_Final 5035 2 117 1 -25 0 0 617 5035_2 -PRL_Young_Final 5035 2 118 1 -25 0 0 477 5035_2 -PRL_Young_Final 5035 2 119 2 25 1 1 336 5035_2 -PRL_Young_Final 5035 2 120 2 25 0 1 379 5035_2 -PRL_Young_Final 5035 2 121 2 -25 0 1 341 5035_2 -PRL_Young_Final 5035 2 122 2 25 0 1 494 5035_2 -PRL_Young_Final 5035 2 123 2 25 0 1 412 5035_2 -PRL_Young_Final 5035 2 124 2 -25 0 0 344 5035_2 -PRL_Young_Final 5035 2 125 2 -25 0 0 654 5035_2 -PRL_Young_Final 5035 2 126 1 -25 1 1 348 5035_2 -PRL_Young_Final 5035 2 127 1 25 0 1 314 5035_2 -PRL_Young_Final 5035 2 128 1 25 0 1 622 5035_2 -PRL_Young_Final 5035 2 129 1 25 0 1 298 5035_2 -PRL_Young_Final 5035 2 130 1 25 0 1 406 5035_2 -PRL_Young_Final 5035 2 131 1 25 0 1 413 5035_2 -PRL_Young_Final 5035 2 132 1 -25 0 1 479 5035_2 -PRL_Young_Final 5035 2 133 1 -25 0 0 390 5035_2 -PRL_Young_Final 5035 2 134 2 -25 1 1 1168 5035_2 -PRL_Young_Final 5035 2 135 2 25 0 1 1025 5035_2 -PRL_Young_Final 5035 2 136 2 25 0 1 383 5035_2 -PRL_Young_Final 5035 2 137 2 25 0 1 415 5035_2 -PRL_Young_Final 5035 2 138 2 25 0 1 334 5035_2 -PRL_Young_Final 5035 2 139 2 25 0 1 369 5035_2 -PRL_Young_Final 5035 2 140 2 25 0 1 428 5035_2 -PRL_Young_Final 5035 2 141 2 -25 0 1 345 5035_2 -PRL_Young_Final 5035 2 142 2 -25 0 0 326 5035_2 -PRL_Young_Final 5035 2 143 1 25 1 1 548 5035_2 -PRL_Young_Final 5035 2 144 1 25 0 1 690 5035_2 -PRL_Young_Final 5035 2 145 1 25 0 1 635 5035_2 -PRL_Young_Final 5035 2 146 1 25 0 1 1661 5035_2 -PRL_Young_Final 5035 2 147 1 25 0 1 358 5035_2 -PRL_Young_Final 5035 2 148 1 25 0 1 443 5035_2 -PRL_Young_Final 5035 2 149 1 25 0 1 353 5035_2 -PRL_Young_Final 5035 2 150 1 -25 0 0 310 5035_2 -PRL_Young_Final 5035 2 151 1 25 0 0 664 5035_2 -PRL_Young_Final 5035 2 152 1 -25 0 0 320 5035_2 -PRL_Young_Final 5035 2 153 1 25 0 0 318 5035_2 -PRL_Young_Final 5035 2 154 1 -25 0 0 630 5035_2 -PRL_Young_Final 5035 2 155 1 -25 0 0 373 5035_2 -PRL_Young_Final 5035 2 156 2 -25 1 1 385 5035_2 -PRL_Young_Final 5035 2 157 2 25 0 1 477 5035_2 -PRL_Young_Final 5035 2 158 2 25 0 1 360 5035_2 -PRL_Young_Final 5035 2 159 2 -25 0 1 524 5035_2 -PRL_Young_Final 5035 2 160 2 25 0 1 495 5035_2 -PRL_Young_Final 5035 2 161 2 -25 0 0 447 5035_2 -PRL_Young_Final 5035 2 162 2 -25 0 0 596 5035_2 -PRL_Young_Final 5035 2 163 1 25 1 1 598 5035_2 -PRL_Young_Final 5035 2 164 1 25 0 1 246 5035_2 -PRL_Young_Final 5035 2 165 1 25 0 1 283 5035_2 -PRL_Young_Final 5035 2 166 1 25 0 1 604 5035_2 -PRL_Young_Final 5035 2 167 1 -25 0 1 261 5035_2 -PRL_Young_Final 5035 2 168 1 -25 0 1 343 5035_2 -PRL_Young_Final 5035 2 169 2 -25 1 0 182 5035_2 -PRL_Young_Final 5035 2 170 2 -25 0 0 346 5035_2 -PRL_Young_Final 5035 2 171 1 25 1 1 432 5035_2 -PRL_Young_Final 5035 2 172 1 25 0 1 264 5035_2 -PRL_Young_Final 5035 2 173 1 25 0 1 347 5035_2 -PRL_Young_Final 5035 2 174 1 25 0 1 724 5035_2 -PRL_Young_Final 5035 2 175 1 25 0 1 607 5035_2 -PRL_Young_Final 5035 2 176 1 25 0 1 298 5035_2 -PRL_Young_Final 5035 2 177 1 25 0 0 292 5035_2 -PRL_Young_Final 5035 2 178 1 25 0 0 377 5035_2 -PRL_Young_Final 5035 2 179 1 -25 0 0 368 5035_2 -PRL_Young_Final 5035 2 180 1 -25 0 0 579 5035_2 -PRL_Young_Final 5035 2 181 2 -25 1 1 580 5035_2 -PRL_Young_Final 5035 2 182 2 25 0 1 371 5035_2 -PRL_Young_Final 5035 2 183 2 25 0 1 840 5035_2 -PRL_Young_Final 5035 2 184 2 25 0 1 642 5035_2 -PRL_Young_Final 5035 2 185 2 25 0 1 14 5035_2 -PRL_Young_Final 5035 2 186 2 25 0 1 262 5035_2 -PRL_Young_Final 5035 2 187 2 25 0 1 532 5035_2 -PRL_Young_Final 5035 2 188 2 25 0 1 379 5035_2 -PRL_Young_Final 5035 2 189 2 -25 0 0 327 5035_2 -PRL_Young_Final 5035 2 190 2 -25 0 0 616 5035_2 -PRL_Young_Final 5035 2 191 1 -25 1 1 319 5035_2 -PRL_Young_Final 5035 2 192 1 25 0 1 292 5035_2 -PRL_Young_Final 5035 2 193 1 25 0 1 620 5035_2 -PRL_Young_Final 5035 2 194 1 -25 0 1 318 5035_2 -PRL_Young_Final 5035 2 195 1 25 0 1 349 5035_2 -PRL_Young_Final 5035 2 196 1 -25 0 0 320 5035_2 -PRL_Young_Final 5035 2 197 1 -25 0 0 289 5035_2 -PRL_Young_Final 5035 2 198 2 25 1 1 641 5035_2 -PRL_Young_Final 5035 2 199 2 25 0 1 600 5035_2 -PRL_Young_Final 5035 2 200 2 25 0 1 597 5035_2 -PRL_Young_Final 5035 3 1 1 25 1 0 553 5035_3 -PRL_Young_Final 5035 3 2 1 -25 0 0 296 5035_3 -PRL_Young_Final 5035 3 3 1 -25 0 0 572 5035_3 -PRL_Young_Final 5035 3 4 2 25 1 1 278 5035_3 -PRL_Young_Final 5035 3 5 2 -25 0 1 527 5035_3 -PRL_Young_Final 5035 3 6 2 -25 0 1 313 5035_3 -PRL_Young_Final 5035 3 7 1 -25 1 0 293 5035_3 -PRL_Young_Final 5035 3 8 1 -25 0 0 267 5035_3 -PRL_Young_Final 5035 3 9 2 25 1 1 345 5035_3 -PRL_Young_Final 5035 3 10 2 25 0 1 314 5035_3 -PRL_Young_Final 5035 3 11 2 25 0 1 611 5035_3 -PRL_Young_Final 5035 3 12 2 25 0 1 280 5035_3 -PRL_Young_Final 5035 3 13 2 25 0 1 250 5035_3 -PRL_Young_Final 5035 3 14 2 25 0 1 266 5035_3 -PRL_Young_Final 5035 3 15 2 -25 0 1 267 5035_3 -PRL_Young_Final 5035 3 16 2 25 0 1 333 5035_3 -PRL_Young_Final 5035 3 17 2 -25 0 0 297 5035_3 -PRL_Young_Final 5035 3 18 2 -25 0 0 701 5035_3 -PRL_Young_Final 5035 3 19 1 25 1 1 311 5035_3 -PRL_Young_Final 5035 3 20 1 25 0 1 285 5035_3 -PRL_Young_Final 5035 3 21 1 25 0 1 470 5035_3 -PRL_Young_Final 5035 3 22 1 25 0 1 1365 5035_3 -PRL_Young_Final 5035 3 23 1 25 0 1 261 5035_3 -PRL_Young_Final 5035 3 24 1 25 0 1 266 5035_3 -PRL_Young_Final 5035 3 25 1 -25 0 1 298 5035_3 -PRL_Young_Final 5035 3 26 1 -25 0 0 412 5035_3 -PRL_Young_Final 5035 3 27 2 25 1 1 283 5035_3 -PRL_Young_Final 5035 3 28 2 25 0 1 499 5035_3 -PRL_Young_Final 5035 3 29 2 -25 0 1 51 5035_3 -PRL_Young_Final 5035 3 30 2 25 0 1 425 5035_3 -PRL_Young_Final 5035 3 31 2 25 0 1 597 5035_3 -PRL_Young_Final 5035 3 32 2 25 0 1 354 5035_3 -PRL_Young_Final 5035 3 33 2 25 0 1 318 5035_3 -PRL_Young_Final 5035 3 34 2 25 0 1 270 5035_3 -PRL_Young_Final 5035 3 35 2 25 0 0 322 5035_3 -PRL_Young_Final 5035 3 36 2 -25 0 0 319 5035_3 -PRL_Young_Final 5035 3 37 2 25 0 0 332 5035_3 -PRL_Young_Final 5035 3 38 2 -25 0 0 340 5035_3 -PRL_Young_Final 5035 3 39 2 -25 0 0 306 5035_3 -PRL_Young_Final 5035 3 40 1 -25 1 1 357 5035_3 -PRL_Young_Final 5035 3 41 1 -25 0 1 311 5035_3 -PRL_Young_Final 5035 3 42 2 -25 1 0 289 5035_3 -PRL_Young_Final 5035 3 43 2 -25 0 0 348 5035_3 -PRL_Young_Final 5035 3 44 1 25 1 1 397 5035_3 -PRL_Young_Final 5035 3 45 1 25 0 1 664 5035_3 -PRL_Young_Final 5035 3 46 1 25 0 1 965 5035_3 -PRL_Young_Final 5035 3 47 1 25 0 1 301 5035_3 -PRL_Young_Final 5035 3 48 1 25 0 1 277 5035_3 -PRL_Young_Final 5035 3 49 1 25 0 1 430 5035_3 -PRL_Young_Final 5035 3 50 1 -25 0 1 399 5035_3 -PRL_Young_Final 5035 3 51 1 25 0 1 398 5035_3 -PRL_Young_Final 5035 3 52 1 -25 0 0 718 5035_3 -PRL_Young_Final 5035 3 53 1 -25 0 0 388 5035_3 -PRL_Young_Final 5035 3 54 1 25 0 0 395 5035_3 -PRL_Young_Final 5035 3 55 1 25 0 0 506 5035_3 -PRL_Young_Final 5035 3 56 1 -25 0 0 343 5035_3 -PRL_Young_Final 5035 3 57 1 -25 0 0 923 5035_3 -PRL_Young_Final 5035 3 58 2 25 1 1 522 5035_3 -PRL_Young_Final 5035 3 59 2 25 0 1 294 5035_3 -PRL_Young_Final 5035 3 60 2 25 0 1 322 5035_3 -PRL_Young_Final 5035 3 61 2 25 0 1 607 5035_3 -PRL_Young_Final 5035 3 62 2 25 0 1 284 5035_3 -PRL_Young_Final 5035 3 63 2 25 0 1 461 5035_3 -PRL_Young_Final 5035 3 64 2 -25 0 1 318 5035_3 -PRL_Young_Final 5035 3 65 2 25 0 1 531 5035_3 -PRL_Young_Final 5035 3 66 2 -25 0 0 274 5035_3 -PRL_Young_Final 5035 3 67 2 -25 0 0 341 5035_3 -PRL_Young_Final 5035 3 68 1 25 1 1 390 5035_3 -PRL_Young_Final 5035 3 69 1 -25 0 1 356 5035_3 -PRL_Young_Final 5035 3 70 1 25 0 1 521 5035_3 -PRL_Young_Final 5035 3 71 1 25 0 1 302 5035_3 -PRL_Young_Final 5035 3 72 1 25 0 1 308 5035_3 -PRL_Young_Final 5035 3 73 1 25 0 1 338 5035_3 -PRL_Young_Final 5035 3 74 1 25 0 1 268 5035_3 -PRL_Young_Final 5035 3 75 1 -25 0 1 128 5035_3 -PRL_Young_Final 5035 3 76 1 -25 0 0 445 5035_3 -PRL_Young_Final 5035 3 77 2 -25 1 1 277 5035_3 -PRL_Young_Final 5035 3 78 2 25 0 1 584 5035_3 -PRL_Young_Final 5035 3 79 2 25 0 1 487 5035_3 -PRL_Young_Final 5035 3 80 2 25 0 1 368 5035_3 -PRL_Young_Final 5035 3 81 2 25 0 1 584 5035_3 -PRL_Young_Final 5035 3 82 2 25 0 1 300 5035_3 -PRL_Young_Final 5035 3 83 2 25 0 1 553 5035_3 -PRL_Young_Final 5035 3 84 2 -25 0 1 296 5035_3 -PRL_Young_Final 5035 3 85 2 -25 0 0 320 5035_3 -PRL_Young_Final 5035 3 86 2 25 0 0 299 5035_3 -PRL_Young_Final 5035 3 87 2 -25 0 0 661 5035_3 -PRL_Young_Final 5035 3 88 2 -25 0 0 304 5035_3 -PRL_Young_Final 5035 3 89 1 25 1 1 312 5035_3 -PRL_Young_Final 5035 3 90 1 25 0 1 631 5035_3 -PRL_Young_Final 5035 3 91 1 25 0 1 658 5035_3 -PRL_Young_Final 5035 3 92 1 25 0 1 248 5035_3 -PRL_Young_Final 5035 3 93 1 25 0 1 301 5035_3 -PRL_Young_Final 5035 3 94 1 25 0 1 551 5035_3 -PRL_Young_Final 5035 3 95 1 -25 0 0 597 5035_3 -PRL_Young_Final 5035 3 96 1 -25 0 0 605 5035_3 -PRL_Young_Final 5035 3 97 2 25 1 1 294 5035_3 -PRL_Young_Final 5035 3 98 2 -25 0 1 461 5035_3 -PRL_Young_Final 5035 3 99 2 25 0 1 313 5035_3 -PRL_Young_Final 5035 3 100 2 25 0 1 370 5035_3 -PRL_Young_Final 5035 3 101 2 -25 0 1 144 5035_3 -PRL_Young_Final 5035 3 102 2 -25 0 0 343 5035_3 -PRL_Young_Final 5035 3 103 1 25 1 1 334 5035_3 -PRL_Young_Final 5035 3 104 1 25 0 1 333 5035_3 -PRL_Young_Final 5035 3 105 1 25 0 1 645 5035_3 -PRL_Young_Final 5035 3 106 1 25 0 1 308 5035_3 -PRL_Young_Final 5035 3 107 1 25 0 1 334 5035_3 -PRL_Young_Final 5035 3 108 1 -25 0 0 305 5035_3 -PRL_Young_Final 5035 3 109 1 -25 0 0 313 5035_3 -PRL_Young_Final 5035 3 110 2 -25 1 1 614 5035_3 -PRL_Young_Final 5035 3 111 2 -25 0 1 585 5035_3 -PRL_Young_Final 5035 3 112 2 25 0 1 273 5035_3 -PRL_Young_Final 5035 3 113 2 25 0 1 626 5035_3 -PRL_Young_Final 5035 3 114 2 25 0 1 790 5035_3 -PRL_Young_Final 5035 3 115 2 25 0 1 402 5035_3 -PRL_Young_Final 5035 3 116 2 25 0 0 591 5035_3 -PRL_Young_Final 5035 3 117 2 -25 0 0 289 5035_3 -PRL_Young_Final 5035 3 118 2 25 0 0 404 5035_3 -PRL_Young_Final 5035 3 119 2 -25 0 0 343 5035_3 -PRL_Young_Final 5035 3 120 2 -25 0 0 635 5035_3 -PRL_Young_Final 5035 3 121 1 25 1 1 298 5035_3 -PRL_Young_Final 5035 3 122 1 25 0 1 804 5035_3 -PRL_Young_Final 5035 3 123 1 -25 0 1 304 5035_3 -PRL_Young_Final 5035 3 124 1 25 0 1 336 5035_3 -PRL_Young_Final 5035 3 125 1 25 0 1 683 5035_3 -PRL_Young_Final 5035 3 126 1 -25 0 0 290 5035_3 -PRL_Young_Final 5035 3 127 1 -25 0 0 403 5035_3 -PRL_Young_Final 5035 3 128 2 25 1 1 291 5035_3 -PRL_Young_Final 5035 3 129 2 25 0 1 311 5035_3 -PRL_Young_Final 5035 3 130 2 25 0 1 327 5035_3 -PRL_Young_Final 5035 3 131 2 25 0 1 303 5035_3 -PRL_Young_Final 5035 3 132 2 25 0 1 267 5035_3 -PRL_Young_Final 5035 3 133 2 -25 0 1 360 5035_3 -PRL_Young_Final 5035 3 134 2 25 0 1 351 5035_3 -PRL_Young_Final 5035 3 135 2 -25 0 0 358 5035_3 -PRL_Young_Final 5035 3 136 2 -25 0 0 354 5035_3 -PRL_Young_Final 5035 3 137 1 25 1 1 615 5035_3 -PRL_Young_Final 5035 3 138 1 -25 0 1 329 5035_3 -PRL_Young_Final 5035 3 139 1 25 0 1 314 5035_3 -PRL_Young_Final 5035 3 140 1 25 0 1 351 5035_3 -PRL_Young_Final 5035 3 141 1 25 0 1 356 5035_3 -PRL_Young_Final 5035 3 142 1 25 0 0 304 5035_3 -PRL_Young_Final 5035 3 143 1 25 0 0 278 5035_3 -PRL_Young_Final 5035 3 144 1 -25 0 0 346 5035_3 -PRL_Young_Final 5035 3 145 1 -25 0 0 378 5035_3 -PRL_Young_Final 5035 3 146 2 25 1 1 253 5035_3 -PRL_Young_Final 5035 3 147 2 25 0 1 336 5035_3 -PRL_Young_Final 5035 3 148 2 -25 0 1 796 5035_3 -PRL_Young_Final 5035 3 149 2 -25 0 1 621 5035_3 -PRL_Young_Final 5035 3 150 1 -25 1 0 329 5035_3 -PRL_Young_Final 5035 3 151 2 25 1 1 249 5035_3 -PRL_Young_Final 5035 3 152 2 25 0 1 302 5035_3 -PRL_Young_Final 5035 3 153 2 25 0 1 390 5035_3 -PRL_Young_Final 5035 3 154 2 25 0 1 341 5035_3 -PRL_Young_Final 5035 3 155 2 25 0 1 260 5035_3 -PRL_Young_Final 5035 3 156 2 25 0 1 278 5035_3 -PRL_Young_Final 5035 3 157 2 -25 0 1 432 5035_3 -PRL_Young_Final 5035 3 158 2 -25 0 0 276 5035_3 -PRL_Young_Final 5035 3 159 1 25 1 1 558 5035_3 -PRL_Young_Final 5035 3 160 1 25 0 1 313 5035_3 -PRL_Young_Final 5035 3 161 1 25 0 1 360 5035_3 -PRL_Young_Final 5035 3 162 1 25 0 1 557 5035_3 -PRL_Young_Final 5035 3 163 1 25 0 1 612 5035_3 -PRL_Young_Final 5035 3 164 1 25 0 1 388 5035_3 -PRL_Young_Final 5035 3 165 1 25 0 1 613 5035_3 -PRL_Young_Final 5035 3 166 1 -25 0 1 260 5035_3 -PRL_Young_Final 5035 3 167 1 -25 0 0 856 5035_3 -PRL_Young_Final 5035 3 168 2 25 1 1 586 5035_3 -PRL_Young_Final 5035 3 169 2 25 0 1 705 5035_3 -PRL_Young_Final 5035 3 170 2 -25 0 1 446 5035_3 -PRL_Young_Final 5035 3 171 2 25 0 1 266 5035_3 -PRL_Young_Final 5035 3 172 2 25 0 1 365 5035_3 -PRL_Young_Final 5035 3 173 2 25 0 1 285 5035_3 -PRL_Young_Final 5035 3 174 2 -25 0 0 268 5035_3 -PRL_Young_Final 5035 3 175 2 25 0 0 255 5035_3 -PRL_Young_Final 5035 3 176 2 -25 0 0 533 5035_3 -PRL_Young_Final 5035 3 177 1 25 1 1 320 5035_3 -PRL_Young_Final 5035 3 178 1 25 0 1 285 5035_3 -PRL_Young_Final 5035 3 179 1 -25 0 1 271 5035_3 -PRL_Young_Final 5035 3 180 1 -25 0 1 553 5035_3 -PRL_Young_Final 5035 3 181 2 -25 1 0 275 5035_3 -PRL_Young_Final 5035 3 182 1 25 1 1 293 5035_3 -PRL_Young_Final 5035 3 183 1 25 0 1 554 5035_3 -PRL_Young_Final 5035 3 184 1 25 0 1 300 5035_3 -PRL_Young_Final 5035 3 185 1 25 0 1 274 5035_3 -PRL_Young_Final 5035 3 186 1 25 0 1 289 5035_3 -PRL_Young_Final 5035 3 187 1 25 0 1 320 5035_3 -PRL_Young_Final 5035 3 188 1 -25 0 1 303 5035_3 -PRL_Young_Final 5035 3 189 1 -25 0 0 390 5035_3 -PRL_Young_Final 5035 3 190 2 25 1 1 272 5035_3 -PRL_Young_Final 5035 3 191 2 25 0 1 673 5035_3 -PRL_Young_Final 5035 3 192 2 25 0 1 263 5035_3 -PRL_Young_Final 5035 3 193 2 25 0 1 274 5035_3 -PRL_Young_Final 5035 3 194 2 25 0 1 578 5035_3 -PRL_Young_Final 5035 3 195 2 25 0 1 483 5035_3 -PRL_Young_Final 5035 3 196 2 -25 0 0 324 5035_3 -PRL_Young_Final 5035 3 197 2 -25 0 0 324 5035_3 -PRL_Young_Final 5035 3 198 1 25 1 1 299 5035_3 -PRL_Young_Final 5035 3 199 1 -25 0 1 406 5035_3 -PRL_Young_Final 5035 3 200 1 25 0 1 272 5035_3 diff --git a/inst/extdata/pst_exampleData.txt b/inst/extdata/pst_exampleData.txt deleted file mode 100644 index 76f91700..00000000 --- a/inst/extdata/pst_exampleData.txt +++ /dev/null @@ -1,1021 +0,0 @@ -subjID type choice reward -1 12 0 0 -1 56 1 0 -1 34 0 0 -1 34 1 1 -1 12 1 1 -1 56 1 0 -1 56 0 1 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 34 0 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 0 -1 56 0 1 -1 34 1 1 -1 12 1 0 -1 56 0 0 -1 12 0 0 -1 34 1 0 -1 56 0 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 1 -1 12 0 0 -1 34 1 1 -1 56 0 1 -1 12 0 0 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 1 -1 56 0 1 -1 34 1 0 -1 12 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 56 0 1 -1 12 0 1 -1 34 0 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 12 1 0 -1 34 1 1 -1 56 0 1 -1 34 1 0 -1 12 1 0 -1 56 0 1 -1 12 1 0 -1 56 0 0 -1 34 1 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 0 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 0 -1 56 0 1 -1 12 0 0 -1 34 1 0 -1 34 0 0 -1 12 0 0 -1 56 0 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 0 0 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 0 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 34 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 56 0 1 -1 12 1 0 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 0 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 12 1 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 1 -1 34 1 0 -1 56 0 0 -1 34 1 0 -1 12 1 0 -1 56 1 1 -1 12 0 0 -1 34 1 1 -1 56 0 1 -1 34 1 0 -1 12 0 1 -1 12 1 0 -1 56 0 1 -1 34 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 56 1 1 -1 34 1 1 -1 12 1 1 -1 12 1 0 -1 56 0 0 -1 34 0 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 0 0 -1 56 0 1 -1 34 1 0 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 12 1 0 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 56 0 0 -1 12 1 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 0 0 -1 56 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 0 1 -1 12 1 1 -1 56 1 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 56 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 34 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 56 1 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 12 1 0 -1 34 0 0 -1 56 0 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 0 -1 56 0 0 -1 34 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 34 1 0 -1 56 0 1 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 34 1 1 -1 12 1 0 -1 56 0 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 0 -1 34 1 0 -1 56 0 0 -1 12 1 0 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 34 1 1 -2 12 0 0 -2 56 0 0 -2 56 1 0 -2 34 1 1 -2 12 1 1 -2 56 1 0 -2 12 1 0 -2 34 1 1 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 34 1 1 -2 12 1 0 -2 56 1 0 -2 56 0 0 -2 34 1 1 -2 12 0 1 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 56 0 0 -2 12 1 0 -2 34 1 1 -2 56 1 0 -2 34 1 1 -2 12 0 0 -2 34 1 1 -2 56 1 1 -2 12 0 1 -2 56 0 1 -2 12 1 1 -2 34 1 0 -2 34 1 1 -2 56 0 1 -2 12 1 0 -2 34 1 0 -2 12 1 0 -2 56 1 1 -2 12 1 1 -2 34 1 0 -2 56 1 0 -2 12 1 1 -2 34 0 0 -2 56 1 1 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 34 1 1 -2 56 1 1 -2 12 1 1 -2 56 1 1 -2 34 1 1 -2 12 1 1 -2 56 1 0 -2 12 1 1 -2 34 1 0 -3 34 1 0 -3 56 1 1 -3 12 0 0 -3 56 1 1 -3 12 0 0 -3 34 1 1 -3 56 0 1 -3 34 1 1 -3 12 0 1 -3 12 0 0 -3 34 1 1 -3 56 0 0 -3 12 0 0 -3 34 1 1 -3 56 0 1 -3 56 0 0 -3 34 1 1 -3 12 0 0 -3 34 1 1 -3 56 0 0 -3 12 0 1 -3 34 1 1 -3 56 1 0 -3 12 0 1 -3 12 1 1 -3 56 0 0 -3 34 1 0 -3 56 0 0 -3 12 0 0 -3 34 1 0 -3 56 1 0 -3 34 0 0 -3 12 0 1 -3 12 0 0 -3 56 0 0 -3 34 1 1 -3 34 1 1 -3 12 1 1 -3 56 0 0 -3 34 1 1 -3 12 0 0 -3 56 1 0 -3 12 0 0 -3 56 0 0 -3 34 1 1 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 0 1 -3 34 1 0 -3 56 0 0 -3 34 1 0 -3 56 0 0 -3 12 1 0 -3 56 1 1 -3 34 1 0 -3 12 0 1 -3 56 0 0 -3 34 1 1 -3 12 0 0 -3 12 1 1 -3 34 1 0 -3 56 1 1 -3 56 1 1 -3 34 0 0 -3 12 1 1 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 1 1 -3 56 1 0 -3 34 1 0 -3 34 1 0 -3 12 1 0 -3 56 1 1 -3 12 1 1 -3 56 1 1 -3 34 1 1 -3 56 1 0 -3 12 1 1 -3 34 1 1 -3 12 1 1 -3 34 1 1 -3 56 1 1 -3 56 1 1 -3 12 0 0 -3 34 1 1 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 0 0 -3 56 1 1 -3 34 1 1 -3 34 1 0 -3 12 1 1 -3 56 1 1 -3 34 1 0 -3 12 1 1 -3 56 1 1 -3 34 1 1 -3 56 1 1 -3 12 1 1 -3 34 1 1 -3 12 1 1 -3 56 1 0 -3 34 1 1 -3 56 1 1 -3 12 0 0 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 0 0 -3 56 1 1 -3 34 1 1 -3 56 1 1 -3 12 0 0 -3 34 1 0 -3 12 1 1 -3 56 0 0 -3 34 1 1 -4 12 0 0 -4 34 0 0 -4 56 1 1 -4 34 1 1 -4 56 0 1 -4 12 0 0 -4 56 1 0 -4 34 0 1 -4 12 1 1 -4 34 1 0 -4 12 0 1 -4 56 0 1 -4 56 1 0 -4 12 1 1 -4 34 1 1 -4 12 0 0 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 34 0 1 -4 56 1 1 -4 12 0 1 -4 34 0 0 -4 56 0 0 -4 12 0 0 -4 34 1 1 -4 56 1 0 -4 12 0 0 -4 56 0 1 -4 34 0 0 -4 56 1 1 -4 34 0 0 -4 12 1 0 -4 12 1 1 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 56 1 0 -4 34 0 0 -4 12 0 0 -4 56 1 1 -4 34 1 1 -4 56 0 1 -4 12 0 0 -4 34 1 1 -4 12 1 1 -4 34 1 0 -4 56 0 0 -4 56 0 0 -4 34 0 0 -4 12 1 1 -4 12 1 1 -4 56 0 1 -4 34 1 0 -4 12 0 0 -4 34 1 1 -4 56 0 1 -4 34 1 0 -4 56 0 0 -4 12 1 1 -4 56 0 0 -4 12 0 0 -4 34 1 1 -4 34 1 0 -4 12 1 1 -4 56 1 0 -4 12 0 0 -4 34 1 1 -4 56 0 1 -4 34 1 1 -4 56 0 0 -4 12 1 1 -4 56 0 0 -4 12 1 1 -4 34 1 0 -4 12 1 1 -4 34 1 0 -4 56 0 1 -4 34 0 0 -4 12 0 1 -4 56 0 1 -4 34 0 0 -4 56 0 0 -4 12 0 0 -4 34 0 0 -4 12 0 0 -4 56 0 0 -4 56 0 0 -4 34 1 1 -4 12 1 1 -4 12 1 0 -4 34 1 1 -4 56 0 1 -4 12 1 1 -4 34 1 0 -4 56 0 1 -4 34 1 0 -4 12 1 0 -4 56 0 1 -4 12 0 1 -4 56 0 1 -4 34 0 0 -4 34 1 1 -4 12 0 0 -4 56 0 1 -4 12 0 0 -4 34 1 0 -4 56 0 0 -4 34 1 1 -4 12 1 0 -4 56 0 1 -4 12 0 0 -4 56 0 1 -4 34 1 0 -4 12 0 0 -4 56 0 0 -4 34 0 0 -4 12 0 0 -4 56 1 1 -4 34 0 1 -4 56 0 1 -4 12 0 0 -4 34 0 1 -4 34 0 0 -4 12 1 1 -4 56 0 0 -4 56 1 0 -4 12 1 0 -4 34 0 1 -4 56 1 1 -4 12 1 1 -4 34 0 1 -4 12 1 1 -4 56 1 1 -4 34 0 1 -4 34 0 0 -4 12 0 0 -4 56 1 1 -4 12 0 0 -4 56 1 1 -4 34 0 0 -4 56 1 0 -4 12 0 0 -4 34 1 1 -4 12 0 0 -4 34 1 1 -4 56 0 1 -4 56 0 0 -4 34 1 1 -4 12 0 0 -4 56 0 1 -4 12 1 1 -4 34 1 1 -4 34 1 1 -4 56 0 0 -4 12 0 1 -4 34 0 0 -4 56 1 0 -4 12 1 0 -4 12 0 0 -4 56 1 1 -4 34 0 1 -4 56 1 1 -4 12 1 0 -4 34 0 0 -4 56 1 1 -4 12 0 0 -4 34 1 1 -4 34 1 0 -4 56 0 1 -4 12 1 0 -4 34 1 0 -4 56 0 0 -4 12 0 0 -4 34 0 1 -4 56 0 1 -4 12 1 1 -4 12 0 1 -4 56 0 1 -4 34 0 1 -4 12 1 1 -4 34 0 1 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 1 0 -4 34 0 0 -4 56 1 1 -4 12 1 1 -4 34 1 0 -4 56 1 0 -4 12 1 1 -4 12 1 1 -4 56 1 1 -4 34 0 0 -4 56 1 1 -4 12 1 1 -4 34 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 34 1 0 -4 12 0 0 -4 56 1 1 -4 34 1 0 -4 56 1 0 -4 12 1 1 -4 12 1 1 -4 34 1 0 -4 56 1 1 -4 34 0 1 -4 56 1 1 -4 12 1 1 -4 12 1 0 -4 34 0 1 -4 56 1 1 -4 56 1 0 -4 12 0 0 -4 34 0 0 -4 56 1 1 -4 34 1 1 -4 12 1 1 -4 12 1 0 -4 34 1 0 -4 56 1 0 -4 34 0 0 -4 12 1 1 -4 56 1 0 -4 56 0 0 -4 12 1 1 -4 34 1 1 -4 34 1 0 -4 12 0 1 -4 56 0 0 -4 34 0 1 -4 56 0 0 -4 12 1 1 -4 12 0 0 -4 34 0 1 -4 56 1 1 -4 56 0 1 -4 34 0 1 -4 12 1 1 -4 56 0 1 -4 12 1 1 -4 34 0 0 -4 12 1 0 -4 56 0 0 -4 34 1 0 -4 56 0 0 -4 34 1 0 -4 12 0 0 -4 56 0 1 -4 12 1 1 -4 34 0 0 -4 56 1 0 -4 34 1 0 -4 12 0 0 -4 34 0 1 -4 12 1 1 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 0 1 -4 34 0 1 -4 12 1 1 -4 56 0 0 -4 12 1 0 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 0 1 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 56 0 1 -4 34 0 0 -4 12 1 1 -4 34 0 1 -4 12 1 1 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 0 0 -4 12 1 0 -4 34 0 0 -4 56 0 1 -4 56 0 0 -4 12 1 0 -4 34 1 1 -4 34 1 1 -4 12 0 0 -4 56 1 1 -4 56 1 0 -4 12 1 0 -4 34 0 0 -4 12 0 0 -4 34 0 0 -4 56 0 0 -4 56 0 0 -4 34 0 0 -4 12 1 0 -4 56 0 0 -4 12 0 0 -4 34 0 0 -4 56 0 0 -4 12 0 0 -4 34 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 56 0 0 -4 34 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 12 1 0 -4 34 1 0 -4 12 1 1 -4 56 1 1 -4 56 1 1 -4 34 1 0 -4 12 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 56 1 1 -4 12 1 1 -4 34 1 1 -4 56 1 0 -4 12 1 1 -4 34 1 1 -4 12 1 1 -4 34 1 0 -4 56 1 1 -4 56 1 1 -4 34 1 1 -4 12 1 0 -4 12 1 1 -4 56 1 0 -4 34 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 12 1 1 -4 12 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 56 1 0 -4 12 1 1 -4 34 1 1 -4 34 1 1 -4 12 1 1 -4 56 1 0 -5 12 1 1 -5 34 0 0 -5 56 0 1 -5 34 0 1 -5 56 0 1 -5 12 0 0 -5 34 0 0 -5 12 1 1 -5 56 0 0 -5 12 1 1 -5 56 0 0 -5 34 0 0 -5 34 0 0 -5 12 1 0 -5 56 0 0 -5 56 0 1 -5 34 0 1 -5 12 1 1 -5 34 0 0 -5 12 1 1 -5 56 1 1 -5 34 0 1 -5 12 1 1 -5 56 0 0 -5 34 0 1 -5 56 0 0 -5 12 1 1 -5 34 0 1 -5 12 0 0 -5 56 0 0 -5 34 0 0 -5 56 0 1 -5 12 1 1 -5 12 1 1 -5 56 0 0 -5 34 0 0 -5 56 1 0 -5 12 1 0 -5 34 0 1 -5 34 0 0 -5 12 1 1 -5 56 1 1 -5 56 0 1 -5 34 0 0 -5 12 1 1 -5 34 0 1 -5 12 1 1 -5 56 0 0 -5 12 1 0 -5 56 0 0 -5 34 0 0 -5 34 0 1 -5 56 0 0 -5 12 1 1 -5 12 1 1 -5 56 1 1 -5 34 1 0 -5 56 0 1 -5 12 1 1 -5 34 0 1 -5 34 0 1 -5 56 0 0 -5 12 1 0 -5 12 1 1 -5 56 0 1 -5 34 0 0 -5 12 1 1 -5 34 1 0 -5 56 0 0 -5 34 0 1 -5 56 0 0 -5 12 1 1 -5 56 1 0 -5 34 1 1 -5 12 1 1 -5 56 1 1 -5 12 1 0 -5 34 1 1 -5 34 0 1 -5 56 0 0 -5 12 1 0 -5 34 0 0 -5 56 0 0 -5 12 1 1 -5 12 1 1 -5 34 0 0 -5 56 0 0 -5 56 0 0 -5 12 1 1 -5 34 1 0 -5 12 1 1 -5 56 1 0 -5 34 0 0 -5 34 0 0 -5 56 0 0 -5 12 1 1 -5 12 1 0 -5 56 0 0 -5 34 1 0 -5 56 1 1 -5 34 1 1 -5 12 1 1 -5 56 1 1 -5 34 1 1 -5 12 1 1 -5 56 0 1 -5 12 1 1 -5 34 1 0 -5 56 1 0 -5 12 1 1 -5 34 1 1 -5 56 1 1 -5 34 1 1 -5 12 1 1 -5 56 1 0 -5 34 1 1 -5 12 1 1 -5 34 1 0 -5 56 1 1 -5 12 1 0 diff --git a/inst/extdata/ra_data_attend.txt b/inst/extdata/ra_data_attend.txt deleted file mode 100644 index 131612dd..00000000 --- a/inst/extdata/ra_data_attend.txt +++ /dev/null @@ -1,4192 +0,0 @@ -gain loss cert gamble outcome cond subjID -2 -1 0 1 2 0 1 -9 -13.5 0 0 0 0 1 -5 -6.88 0 0 0 0 1 -10 -10 0 0 0 0 1 -6 -2.25 0 1 6 0 1 -6 -6.75 0 0 0 0 1 -9 -4.5 0 1 -4.5 0 1 -10 -13.75 0 0 0 0 1 -6 -8.25 0 0 0 0 1 -5 -10 0 0 0 0 1 -10 -6.25 0 1 10 0 1 -12 -3 0 1 -3 0 1 -12 -9 0 0 0 0 1 -8 -7 0 0 0 0 1 -6 -12 0 0 0 0 1 -8 -2 0 1 -2 0 1 -12 -6 0 1 -6 0 1 -3 0 1 0 1 0 1 -10 -20 0 1 10 0 1 -5 -3.75 0 0 0 0 1 -2 -1.75 0 0 0 0 1 -6 -3.75 0 1 -3.75 0 1 -9 -12.38 0 0 0 0 1 -5 -6.25 0 0 0 0 1 -12 0 4 1 12 0 1 -2 -1.5 0 0 0 0 1 -6 -5.25 0 0 0 0 1 -10 -18.75 0 0 0 0 1 -6 -6 0 0 0 0 1 -12 0 5 1 12 0 1 -4 -2 0 1 4 0 1 -2 -4 0 0 0 0 1 -5 -2.5 0 1 5 0 1 -2 -3.75 0 0 0 0 1 -9 -15.75 0 0 0 0 1 -8 -4 0 1 8 0 1 -26 0 12 1 26 0 1 -6 -1.5 0 1 6 0 1 -4 -6 0 0 0 0 1 -10 -2.5 0 1 -2.5 0 1 -8 -12 0 0 0 0 1 -2 -3.5 0 0 0 0 1 -5 -5.63 0 0 0 0 1 -12 -24 0 0 0 0 1 -25 0 10 1 0 0 1 -4 -6.5 0 0 0 0 1 -5 -9.38 0 0 0 0 1 -5 -7.5 0 0 0 0 1 -4 -4 0 1 -4 0 1 -6 -10.5 0 0 0 0 1 -13 0 6 1 13 0 1 -12 -22.5 0 0 0 0 1 -4 -7.5 0 0 0 0 1 -5 0 2 1 5 0 1 -10 -15 0 0 0 0 1 -9 -16.88 0 0 0 0 1 -2 -2.5 0 0 0 0 1 -10 -16.25 0 0 0 0 1 -6 -11.25 0 0 0 0 1 -4 -1.5 0 1 4 0 1 -6 -9 0 0 0 0 1 -12 -19.5 0 0 0 0 1 -10 -12.5 0 0 0 0 1 -2 -3 0 0 0 0 1 -8 -16 0 0 0 0 1 -4 0 2 1 4 0 1 -12 -7.5 0 1 -7.5 0 1 -12 -13.5 0 0 0 0 1 -22 0 10 1 22 0 1 -12 -21 0 0 0 0 1 -7 0 3 1 7 0 1 -10 -8.75 0 0 0 0 1 -2 -1.25 0 0 0 0 1 -9 -6.75 0 0 0 0 1 -12 0 6 1 12 0 1 -28 0 13 1 28 0 1 -9 -10.13 0 0 0 0 1 -2 -0.5 0 1 2 0 1 -25 0 9 1 25 0 1 -6 -7.5 0 0 0 0 1 -4 -3 0 0 0 0 1 -10 -3.75 0 1 10 0 1 -12 -4.5 0 1 -4.5 0 1 -12 -15 0 0 0 0 1 -6 -3 0 0 0 0 1 -9 -14.63 0 0 0 0 1 -5 -1.25 0 1 -1.25 0 1 -8 -11 0 0 0 0 1 -10 -17.5 0 0 0 0 1 -8 -10 0 0 0 0 1 -9 -9 0 0 0 0 1 -10 -11.25 0 0 0 0 1 -12 -12 0 0 0 0 1 -8 -14 0 0 0 0 1 -12 -16.5 0 1 -16.5 0 1 -4 -7 0 1 -7 0 1 -4 -1 0 1 -1 0 1 -5 -1.88 0 1 5 0 1 -8 0 3 1 0 0 1 -2 -3.25 0 0 0 0 1 -5 -5 0 0 0 0 1 -26 0 10 1 0 0 1 -12 -10.5 0 0 0 0 1 -2 0 1 1 0 0 1 -6 -9.75 0 0 0 0 1 -8 -3 0 1 8 0 1 -13 0 5 1 13 0 1 -10 -7.5 0 0 0 0 1 -8 -13 0 0 0 0 1 -9 -3.38 0 1 -3.38 0 1 -30 0 12 1 0 0 1 -8 -8 0 0 0 0 1 -8 -5 0 0 0 0 1 -12 -18 0 0 0 0 1 -10 -5 0 1 -5 0 1 -9 -11.25 0 0 0 0 1 -9 -7.88 0 0 0 0 1 -8 -6 0 1 -6 0 1 -6 -4.5 0 0 0 0 1 -8 -9 0 0 0 0 1 -4 -5.5 0 0 0 0 1 -4 -5 0 0 0 0 1 -9 -2.25 0 1 -2.25 0 1 -23 0 10 1 0 0 1 -9 -5.63 0 1 -5.63 0 1 -4 -8 0 0 0 0 1 -19 0 8 1 19 0 1 -2 -2 0 0 0 0 1 -5 -8.13 0 0 0 0 1 -5 -4.38 0 0 0 0 1 -2 -2.25 0 0 0 0 1 -2 -0.75 0 1 -0.75 0 1 -2 -2.75 0 0 0 0 1 -5 -8.75 0 0 0 0 1 -9 -18 0 0 0 0 1 -4 -3.5 0 0 0 0 1 -9 -6.75 0 1 -6.75 0 2 -6 -6.75 0 0 0 0 2 -6 -3 0 1 6 0 2 -2 -1.5 0 0 0 0 2 -4 -3 0 0 0 0 2 -5 -6.88 0 0 0 0 2 -12 -9 0 1 12 0 2 -4 -5 0 0 0 0 2 -5 -7.5 0 0 0 0 2 -4 -4 0 1 -4 0 2 -9 -5.63 0 1 -5.63 0 2 -9 -14.63 0 0 0 0 2 -5 -9.38 0 0 0 0 2 -6 -4.5 0 1 6 0 2 -8 -7 0 0 0 0 2 -10 -16.25 0 0 0 0 2 -10 -17.5 0 0 0 0 2 -9 -16.88 0 0 0 0 2 -8 -5 0 1 8 0 2 -6 -1.5 0 1 6 0 2 -12 -18 0 0 0 0 2 -5 -6.25 0 0 0 0 2 -8 -4 0 1 8 0 2 -9 -15.75 0 0 0 0 2 -9 -13.5 0 0 0 0 2 -5 -8.13 0 0 0 0 2 -2 0 1 1 0 0 2 -2 -3.75 0 0 0 0 2 -4 -6.5 0 0 0 0 2 -10 -5 0 1 -5 0 2 -12 -22.5 0 0 0 0 2 -2 -1 0 1 2 0 2 -13 0 6 1 13 0 2 -5 -2.5 0 0 0 0 2 -2 -0.5 0 1 2 0 2 -2 -3.25 0 1 -3.25 0 2 -30 0 12 1 0 0 2 -8 -8 0 1 8 0 2 -4 -5.5 0 0 0 0 2 -23 0 10 1 0 0 2 -4 -3.5 0 0 0 0 2 -5 0 2 1 5 0 2 -8 0 3 1 0 0 2 -9 -10.13 0 0 0 0 2 -8 -16 0 0 0 0 2 -12 -24 0 0 0 0 2 -9 -3.38 0 1 -3.38 0 2 -6 -5.25 0 1 6 0 2 -2 -4 0 0 0 0 2 -4 -1 0 1 -1 0 2 -6 -11.25 0 0 0 0 2 -5 -4.38 0 1 -4.38 0 2 -6 -2.25 0 1 6 0 2 -12 -10.5 0 1 12 0 2 -9 -18 0 0 0 0 2 -10 -20 0 0 0 0 2 -4 -4.5 0 0 0 0 2 -9 -2.25 0 1 -2.25 0 2 -4 -6 0 0 0 0 2 -8 -10 0 1 -10 0 2 -5 -5 0 1 -5 0 2 -5 -8.75 0 0 0 0 2 -8 -6 0 1 -6 0 2 -10 -13.75 0 0 0 0 2 -2 -2.5 0 0 0 0 2 -8 -11 0 1 -11 0 2 -4 -2 0 1 4 0 2 -10 -7.5 0 1 -7.5 0 2 -22 0 10 1 22 0 2 -25 0 10 1 0 0 2 -6 -9.75 0 0 0 0 2 -12 0 5 1 12 0 2 -4 -2.5 0 1 -2.5 0 2 -8 -3 0 1 8 0 2 -10 -11.25 0 1 -11.25 0 2 -5 -10 0 1 5 0 2 -10 -15 0 0 0 0 2 -2 -3.5 0 0 0 0 2 -12 0 4 1 12 0 2 -13 0 5 0 5 0 2 -5 -3.75 0 1 5 0 2 -26 0 12 0 12 0 2 -5 -5.63 0 0 0 0 2 -8 -2 0 1 -2 0 2 -2 -3 0 0 0 0 2 -6 -9 0 0 0 0 2 -9 -7.88 0 0 0 0 2 -8 -14 0 0 0 0 2 -28 0 13 1 28 0 2 -9 -12.38 0 0 0 0 2 -8 -15 0 1 -15 0 2 -10 -2.5 0 1 -2.5 0 2 -4 0 2 1 4 0 2 -12 -6 0 1 -6 0 2 -12 -16.5 0 1 -16.5 0 2 -4 -7.5 0 0 0 0 2 -10 -8.75 0 1 -8.75 0 2 -10 -18.75 0 1 10 0 2 -26 0 10 1 0 0 2 -12 -21 0 1 12 0 2 -2 -0.75 0 1 -0.75 0 2 -9 -9 0 1 -9 0 2 -10 -6.25 0 1 10 0 2 -8 -12 0 1 -12 0 2 -3 0 1 1 0 0 2 -5 -1.88 0 1 5 0 2 -6 -7.5 0 1 -7.5 0 2 -12 -13.5 0 1 12 0 2 -4 -7 0 0 0 0 2 -6 -8.25 0 1 -8.25 0 2 -6 -12 0 0 0 0 2 -6 -10.5 0 0 0 0 2 -4 -8 0 0 0 0 2 -6 -6 0 1 -6 0 2 -12 0 6 1 12 0 2 -12 -19.5 0 1 12 0 2 -19 0 8 1 19 0 2 -12 -15 0 0 0 0 2 -2 -1.75 0 0 0 0 2 -6 -3.75 0 0 0 0 2 -2 -1.25 0 0 0 0 2 -5 -1.25 0 1 -1.25 0 2 -4 -1.5 0 1 4 0 2 -8 -13 0 0 0 0 2 -12 -7.5 0 1 -7.5 0 2 -12 -3 0 1 -3 0 2 -2 -2.75 0 0 0 0 2 -7 0 3 1 7 0 2 -25 0 9 1 25 0 2 -2 -2 0 0 0 0 2 -12 -4.5 0 1 -4.5 0 2 -12 -12 0 1 12 0 2 -5 -3.13 0 1 5 0 2 -9 -11.25 0 0 0 0 2 -8 -9 0 1 -9 0 2 -2 -2.25 0 0 0 0 2 -9 -4.5 0 1 -4.5 0 2 -10 -3.75 0 1 10 0 2 -10 -10 0 1 10 0 2 -10 -12.5 0 0 0 0 2 -2 -2.5 0 0 0 0 3 -5 -5.63 0 0 0 0 3 -6 -7.5 0 0 0 0 3 -26 0 10 1 0 0 3 -9 -4.5 0 0 0 0 3 -2 -1.25 0 0 0 0 3 -8 -3 0 0 0 0 3 -25 0 9 0 9 0 3 -4 -4.5 0 0 0 0 3 -5 -10 0 0 0 0 3 -6 -9 0 0 0 0 3 -10 -6.25 0 0 0 0 3 -4 -4 0 0 0 0 3 -12 -3 0 0 0 0 3 -5 -5 0 0 0 0 3 -12 0 5 0 5 0 3 -6 -9.75 0 0 0 0 3 -19 0 8 0 8 0 3 -4 -7.5 0 0 0 0 3 -12 -9 0 0 0 0 3 -4 -6.5 0 0 0 0 3 -9 -5.63 0 0 0 0 3 -9 -18 0 0 0 0 3 -10 -11.25 0 0 0 0 3 -10 -13.75 0 0 0 0 3 -6 -12 0 0 0 0 3 -10 -12.5 0 0 0 0 3 -4 -7 0 0 0 0 3 -10 -7.5 0 0 0 0 3 -4 -8 0 0 0 0 3 -8 -11 0 0 0 0 3 -12 0 4 1 12 0 3 -9 -3.38 0 0 0 0 3 -10 -18.75 0 0 0 0 3 -2 -3.5 0 0 0 0 3 -2 -1 0 0 0 0 3 -2 -3.25 0 0 0 0 3 -2 0 1 0 1 0 3 -7 0 3 0 3 0 3 -8 0 3 0 3 0 3 -12 -6 0 0 0 0 3 -2 -0.5 0 1 2 0 3 -9 -7.88 0 0 0 0 3 -8 -15 0 0 0 0 3 -2 -1.5 0 0 0 0 3 -12 -22.5 0 0 0 0 3 -8 -7 0 0 0 0 3 -4 -5.5 0 0 0 0 3 -10 -8.75 0 0 0 0 3 -8 -9 0 0 0 0 3 -2 -4 0 0 0 0 3 -4 0 2 1 4 0 3 -8 -8 0 0 0 0 3 -9 -13.5 0 0 0 0 3 -9 -9 0 0 0 0 3 -6 -3.75 0 0 0 0 3 -13 0 6 0 6 0 3 -5 -1.88 0 1 5 0 3 -6 -6 0 0 0 0 3 -5 -6.88 0 0 0 0 3 -8 -16 0 0 0 0 3 -12 -7.5 0 0 0 0 3 -5 -1.25 0 1 -1.25 0 3 -9 -14.63 0 0 0 0 3 -8 -4 0 0 0 0 3 -10 -17.5 0 0 0 0 3 -5 -3.75 0 0 0 0 3 -6 -10.5 0 0 0 0 3 -13 0 5 1 13 0 3 -10 -16.25 0 0 0 0 3 -5 -7.5 0 0 0 0 3 -2 -1.75 0 0 0 0 3 -5 -9.38 0 0 0 0 3 -2 -2.75 0 0 0 0 3 -2 -0.75 0 1 -0.75 0 3 -5 -8.13 0 0 0 0 3 -9 -11.25 0 0 0 0 3 -8 -13 0 0 0 0 3 -9 -16.88 0 0 0 0 3 -2 -2 0 0 0 0 3 -12 -18 0 0 0 0 3 -8 -2 0 1 -2 0 3 -2 -3 0 0 0 0 3 -6 -4.5 0 0 0 0 3 -5 0 2 1 5 0 3 -12 -19.5 0 0 0 0 3 -9 -15.75 0 0 0 0 3 -8 -6 0 0 0 0 3 -10 -2.5 0 1 -2.5 0 3 -9 -6.75 0 0 0 0 3 -6 -6.75 0 0 0 0 3 -2 -3.75 0 0 0 0 3 -10 -5 0 0 0 0 3 -2 -2.25 0 0 0 0 3 -26 0 12 0 12 0 3 -12 -13.5 0 0 0 0 3 -8 -5 0 0 0 0 3 -6 -3 0 0 0 0 3 -10 -3.75 0 0 0 0 3 -12 -10.5 0 0 0 0 3 -4 -5 0 0 0 0 3 -9 -2.25 0 0 0 0 3 -4 -3 0 0 0 0 3 -9 -10.13 0 0 0 0 3 -28 0 13 0 13 0 3 -22 0 10 0 10 0 3 -10 -10 0 0 0 0 3 -4 -1 0 0 0 0 3 -4 -2.5 0 0 0 0 3 -12 -24 0 0 0 0 3 -8 -12 0 0 0 0 3 -3 0 1 1 0 0 3 -9 -12.38 0 0 0 0 3 -23 0 10 0 10 0 3 -4 -3.5 0 0 0 0 3 -4 -1.5 0 0 0 0 3 -8 -10 0 0 0 0 3 -8 -14 0 0 0 0 3 -4 -6 0 0 0 0 3 -25 0 10 0 10 0 3 -12 -16.5 0 0 0 0 3 -12 -12 0 0 0 0 3 -5 -2.5 0 0 0 0 3 -5 -8.75 0 0 0 0 3 -12 -4.5 0 0 0 0 3 -12 -15 0 0 0 0 3 -5 -3.13 0 0 0 0 3 -12 -21 0 0 0 0 3 -5 -4.38 0 0 0 0 3 -6 -11.25 0 0 0 0 3 -30 0 12 0 12 0 3 -6 -1.5 0 1 6 0 3 -12 0 6 0 6 0 3 -4 -2 0 0 0 0 3 -10 -15 0 0 0 0 3 -6 -2.25 0 0 0 0 3 -10 -20 0 0 0 0 3 -6 -5.25 0 0 0 0 3 -5 -6.25 0 0 0 0 3 -6 -8.25 0 0 0 0 3 -4 -4.5 0 1 -4.5 0 4 -10 -12.5 0 0 0 0 4 -26 0 12 1 26 0 4 -6 -7.5 0 1 -7.5 0 4 -4 -6.5 0 0 0 0 4 -12 -4.5 0 1 -4.5 0 4 -5 -2.5 0 1 5 0 4 -6 -12 0 0 0 0 4 -9 -14.63 0 1 9 0 4 -6 -6 0 0 0 0 4 -22 0 10 1 22 0 4 -2 -1 0 1 2 0 4 -8 -3 0 1 8 0 4 -12 -9 0 0 0 0 4 -5 -3.75 0 1 5 0 4 -6 -3 0 1 6 0 4 -4 0 2 0 2 0 4 -28 0 13 1 28 0 4 -12 -15 0 0 0 0 4 -9 -11.25 0 0 0 0 4 -12 -10.5 0 1 12 0 4 -5 -1.88 0 1 5 0 4 -2 -2.75 0 0 0 0 4 -4 -7 0 0 0 0 4 -8 -4 0 1 8 0 4 -2 0 1 1 0 0 4 -2 -3.5 0 0 0 0 4 -2 -1.75 0 1 2 0 4 -5 -5 0 0 0 0 4 -12 -12 0 1 12 0 4 -12 0 6 1 12 0 4 -6 -4.5 0 0 0 0 4 -30 0 12 0 12 0 4 -12 -16.5 0 0 0 0 4 -6 -9.75 0 1 6 0 4 -12 -22.5 0 0 0 0 4 -6 -9 0 1 -9 0 4 -5 -3.13 0 0 0 0 4 -5 -9.38 0 0 0 0 4 -12 -7.5 0 1 -7.5 0 4 -5 0 2 1 5 0 4 -10 -15 0 0 0 0 4 -12 -3 0 1 -3 0 4 -13 0 6 0 6 0 4 -9 -16.88 0 0 0 0 4 -6 -11.25 0 0 0 0 4 -8 -5 0 1 8 0 4 -8 -14 0 0 0 0 4 -12 -24 0 1 -24 0 4 -12 0 5 1 12 0 4 -9 -13.5 0 0 0 0 4 -6 -1.5 0 1 6 0 4 -2 -3 0 0 0 0 4 -10 -2.5 0 1 -2.5 0 4 -2 -0.75 0 0 0 0 4 -6 -10.5 0 0 0 0 4 -2 -0.5 0 1 2 0 4 -10 -10 0 0 0 0 4 -8 -10 0 1 -10 0 4 -9 -12.38 0 0 0 0 4 -4 -6 0 0 0 0 4 -6 -2.25 0 1 6 0 4 -9 -15.75 0 0 0 0 4 -12 -13.5 0 0 0 0 4 -8 -6 0 0 0 0 4 -10 -18.75 0 0 0 0 4 -4 -2 0 0 0 0 4 -5 -1.25 0 1 -1.25 0 4 -6 -5.25 0 0 0 0 4 -4 -8 0 1 4 0 4 -25 0 9 1 25 0 4 -2 -3.25 0 0 0 0 4 -10 -11.25 0 1 -11.25 0 4 -4 -7.5 0 0 0 0 4 -9 -5.63 0 1 -5.63 0 4 -6 -6.75 0 0 0 0 4 -8 -2 0 1 -2 0 4 -5 -6.25 0 0 0 0 4 -23 0 10 0 10 0 4 -8 -13 0 0 0 0 4 -10 -13.75 0 0 0 0 4 -5 -10 0 1 5 0 4 -12 0 4 1 12 0 4 -2 -2.5 0 0 0 0 4 -19 0 8 1 19 0 4 -4 -4 0 0 0 0 4 -4 -1 0 1 -1 0 4 -4 -2.5 0 1 -2.5 0 4 -5 -8.13 0 0 0 0 4 -10 -3.75 0 1 10 0 4 -5 -8.75 0 0 0 0 4 -10 -7.5 0 1 -7.5 0 4 -10 -5 0 1 -5 0 4 -10 -20 0 0 0 0 4 -13 0 5 0 5 0 4 -8 -9 0 0 0 0 4 -8 -12 0 0 0 0 4 -10 -16.25 0 0 0 0 4 -5 -6.88 0 1 5 0 4 -4 -5.5 0 0 0 0 4 -5 -7.5 0 0 0 0 4 -9 -10.13 0 0 0 0 4 -6 -8.25 0 0 0 0 4 -26 0 10 0 10 0 4 -4 -5 0 0 0 0 4 -2 -2.25 0 1 2 0 4 -6 -3.75 0 1 -3.75 0 4 -8 -8 0 1 8 0 4 -9 -6.75 0 0 0 0 4 -8 -15 0 1 -15 0 4 -12 -6 0 1 -6 0 4 -25 0 10 0 10 0 4 -12 -19.5 0 0 0 0 4 -9 -7.88 0 0 0 0 4 -4 -1.5 0 1 4 0 4 -8 -7 0 0 0 0 4 -12 -18 0 1 -18 0 4 -2 -2 0 1 2 0 4 -9 -18 0 0 0 0 4 -2 -1.25 0 0 0 0 4 -8 -16 0 0 0 0 4 -5 -4.38 0 0 0 0 4 -2 -4 0 0 0 0 4 -5 -5.63 0 0 0 0 4 -8 0 3 1 0 0 4 -10 -17.5 0 0 0 0 4 -8 -11 0 0 0 0 4 -2 -1.5 0 1 2 0 4 -4 -3.5 0 0 0 0 4 -2 -3.75 0 0 0 0 4 -3 0 1 1 0 0 4 -12 -21 0 0 0 0 4 -10 -8.75 0 0 0 0 4 -9 -9 0 1 -9 0 4 -4 -3 0 0 0 0 4 -7 0 3 1 7 0 4 -9 -3.38 0 1 -3.38 0 4 -9 -2.25 0 1 -2.25 0 4 -10 -6.25 0 0 0 0 4 -9 -4.5 0 1 -4.5 0 4 -2 -1 0 1 2 0 5 -9 -13.5 0 0 0 0 5 -5 -6.88 0 1 5 0 5 -10 -10 0 1 10 0 5 -6 -2.25 0 0 0 0 5 -6 -6.75 0 1 -6.75 0 5 -9 -4.5 0 0 0 0 5 -10 -13.75 0 0 0 0 5 -6 -8.25 0 0 0 0 5 -5 -10 0 0 0 0 5 -10 -6.25 0 1 10 0 5 -12 -3 0 1 -3 0 5 -12 -9 0 0 0 0 5 -8 -7 0 1 -7 0 5 -6 -12 0 0 0 0 5 -8 -2 0 1 -2 0 5 -12 -6 0 1 -6 0 5 -3 0 1 1 0 0 5 -10 -20 0 1 10 0 5 -5 -3.75 0 1 5 0 5 -2 -1.75 0 0 0 0 5 -6 -3.75 0 0 0 0 5 -9 -12.38 0 0 0 0 5 -5 -6.25 0 0 0 0 5 -12 0 4 0 4 0 5 -2 -1.5 0 1 2 0 5 -6 -5.25 0 0 0 0 5 -10 -18.75 0 0 0 0 5 -6 -6 0 1 -6 0 5 -12 0 5 0 5 0 5 -4 -2 0 1 4 0 5 -2 -4 0 0 0 0 5 -5 -2.5 0 1 5 0 5 -2 -3.75 0 0 0 0 5 -9 -15.75 0 0 0 0 5 -8 -4 0 1 8 0 5 -26 0 12 0 12 0 5 -6 -1.5 0 1 6 0 5 -4 -6 0 0 0 0 5 -10 -2.5 0 1 -2.5 0 5 -8 -12 0 0 0 0 5 -2 -3.5 0 0 0 0 5 -5 -5.63 0 1 -5.63 0 5 -12 -24 0 0 0 0 5 -25 0 10 0 10 0 5 -4 -6.5 0 0 0 0 5 -5 -9.38 0 0 0 0 5 -5 -7.5 0 1 -7.5 0 5 -4 -4 0 1 -4 0 5 -6 -10.5 0 1 -10.5 0 5 -13 0 6 1 13 0 5 -12 -22.5 0 0 0 0 5 -4 -7.5 0 0 0 0 5 -5 0 2 1 5 0 5 -10 -15 0 0 0 0 5 -9 -16.88 0 0 0 0 5 -2 -2.5 0 1 2 0 5 -10 -16.25 0 0 0 0 5 -6 -11.25 0 0 0 0 5 -4 -1.5 0 1 4 0 5 -5 -3.13 0 1 5 0 5 -6 -9 0 0 0 0 5 -12 -19.5 0 0 0 0 5 -10 -12.5 0 0 0 0 5 -2 -3 0 0 0 0 5 -8 -16 0 0 0 0 5 -4 0 2 1 4 0 5 -12 -7.5 0 0 0 0 5 -12 -13.5 0 0 0 0 5 -22 0 10 0 10 0 5 -12 -21 0 0 0 0 5 -7 0 3 0 3 0 5 -10 -8.75 0 1 -8.75 0 5 -2 -1.25 0 1 2 0 5 -9 -6.75 0 1 -6.75 0 5 -12 0 6 0 6 0 5 -28 0 13 0 13 0 5 -9 -10.13 0 0 0 0 5 -2 -0.5 0 1 2 0 5 -25 0 9 0 9 0 5 -6 -7.5 0 1 -7.5 0 5 -4 -3 0 1 4 0 5 -10 -3.75 0 1 10 0 5 -12 -4.5 0 1 -4.5 0 5 -12 -15 0 0 0 0 5 -6 -3 0 1 6 0 5 -9 -14.63 0 0 0 0 5 -5 -1.25 0 1 -1.25 0 5 -8 -11 0 0 0 0 5 -10 -17.5 0 0 0 0 5 -8 -10 0 0 0 0 5 -9 -9 0 0 0 0 5 -10 -11.25 0 0 0 0 5 -12 -12 0 0 0 0 5 -8 -14 0 0 0 0 5 -12 -16.5 0 0 0 0 5 -4 -7 0 0 0 0 5 -4 -1 0 1 -1 0 5 -5 -1.88 0 1 5 0 5 -8 0 3 0 3 0 5 -2 -3.25 0 1 -3.25 0 5 -5 -5 0 1 -5 0 5 -26 0 10 0 10 0 5 -12 -10.5 0 0 0 0 5 -2 0 1 0 1 0 5 -6 -9.75 0 0 0 0 5 -8 -3 0 1 8 0 5 -13 0 5 1 13 0 5 -10 -7.5 0 0 0 0 5 -8 -13 0 0 0 0 5 -9 -3.38 0 1 -3.38 0 5 -8 -15 0 0 0 0 5 -30 0 12 0 12 0 5 -8 -8 0 0 0 0 5 -8 -5 0 1 8 0 5 -12 -18 0 0 0 0 5 -10 -5 0 1 -5 0 5 -9 -11.25 0 1 9 0 5 -9 -7.88 0 0 0 0 5 -8 -6 0 1 -6 0 5 -6 -4.5 0 1 6 0 5 -8 -9 0 0 0 0 5 -4 -5.5 0 0 0 0 5 -4 -5 0 1 4 0 5 -9 -2.25 0 1 -2.25 0 5 -9 -5.63 0 1 -5.63 0 5 -4 -4.5 0 0 0 0 5 -4 -8 0 0 0 0 5 -19 0 8 0 8 0 5 -2 -2 0 1 2 0 5 -5 -8.13 0 1 5 0 5 -5 -4.38 0 1 -4.38 0 5 -2 -2.25 0 0 0 0 5 -2 -0.75 0 1 -0.75 0 5 -2 -2.75 0 0 0 0 5 -5 -8.75 0 0 0 0 5 -9 -18 0 0 0 0 5 -4 -3.5 0 1 4 0 5 -4 -2.5 0 1 -2.5 0 5 -9 -6.75 0 1 -6.75 0 6 -6 -6.75 0 1 -6.75 0 6 -6 -3 0 1 6 0 6 -2 -1.5 0 1 2 0 6 -4 -3 0 1 4 0 6 -5 -6.88 0 0 0 0 6 -12 -9 0 0 0 0 6 -4 -5 0 0 0 0 6 -5 -7.5 0 0 0 0 6 -4 -4 0 1 -4 0 6 -9 -5.63 0 1 -5.63 0 6 -9 -14.63 0 0 0 0 6 -5 -9.38 0 0 0 0 6 -6 -4.5 0 1 6 0 6 -8 -7 0 1 -7 0 6 -10 -16.25 0 0 0 0 6 -10 -17.5 0 0 0 0 6 -9 -16.88 0 0 0 0 6 -8 -5 0 1 8 0 6 -6 -1.5 0 1 6 0 6 -12 -18 0 0 0 0 6 -5 -6.25 0 0 0 0 6 -8 -4 0 1 8 0 6 -9 -15.75 0 0 0 0 6 -9 -13.5 0 0 0 0 6 -5 -8.13 0 0 0 0 6 -2 0 1 1 0 0 6 -2 -3.75 0 1 -3.75 0 6 -4 -6.5 0 0 0 0 6 -10 -5 0 1 -5 0 6 -12 -22.5 0 0 0 0 6 -2 -1 0 1 2 0 6 -13 0 6 0 6 0 6 -5 -2.5 0 1 5 0 6 -2 -0.5 0 1 2 0 6 -2 -3.25 0 0 0 0 6 -30 0 12 1 0 0 6 -8 -8 0 0 0 0 6 -4 -5.5 0 0 0 0 6 -23 0 10 1 0 0 6 -4 -3.5 0 1 4 0 6 -5 0 2 1 5 0 6 -8 0 3 1 0 0 6 -9 -10.13 0 0 0 0 6 -8 -16 0 0 0 0 6 -12 -24 0 0 0 0 6 -9 -3.38 0 1 -3.38 0 6 -6 -5.25 0 0 0 0 6 -2 -4 0 0 0 0 6 -4 -1 0 1 -1 0 6 -6 -11.25 0 0 0 0 6 -5 -4.38 0 1 -4.38 0 6 -6 -2.25 0 1 6 0 6 -12 -10.5 0 0 0 0 6 -9 -18 0 0 0 0 6 -10 -20 0 0 0 0 6 -4 -4.5 0 1 -4.5 0 6 -9 -2.25 0 1 -2.25 0 6 -4 -6 0 0 0 0 6 -8 -10 0 0 0 0 6 -5 -5 0 1 -5 0 6 -5 -8.75 0 0 0 0 6 -8 -6 0 1 -6 0 6 -10 -13.75 0 0 0 0 6 -2 -2.5 0 1 2 0 6 -8 -11 0 0 0 0 6 -4 -2 0 1 4 0 6 -10 -7.5 0 1 -7.5 0 6 -22 0 10 0 10 0 6 -25 0 10 1 0 0 6 -6 -9.75 0 0 0 0 6 -12 0 5 0 5 0 6 -4 -2.5 0 1 -2.5 0 6 -8 -3 0 1 8 0 6 -10 -11.25 0 0 0 0 6 -5 -10 0 0 0 0 6 -10 -15 0 0 0 0 6 -2 -3.5 0 1 -3.5 0 6 -12 0 4 1 12 0 6 -13 0 5 0 5 0 6 -5 -3.75 0 1 5 0 6 -26 0 12 1 26 0 6 -5 -5.63 0 1 -5.63 0 6 -8 -2 0 1 -2 0 6 -2 -3 0 1 -3 0 6 -6 -9 0 0 0 0 6 -9 -7.88 0 1 -7.88 0 6 -8 -14 0 0 0 0 6 -28 0 13 0 13 0 6 -9 -12.38 0 0 0 0 6 -8 -15 0 0 0 0 6 -10 -2.5 0 1 -2.5 0 6 -4 0 2 1 4 0 6 -12 -6 0 1 -6 0 6 -12 -16.5 0 0 0 0 6 -4 -7.5 0 0 0 0 6 -10 -8.75 0 1 -8.75 0 6 -10 -18.75 0 0 0 0 6 -26 0 10 1 0 0 6 -12 -21 0 0 0 0 6 -2 -0.75 0 1 -0.75 0 6 -9 -9 0 1 -9 0 6 -10 -6.25 0 1 10 0 6 -8 -12 0 0 0 0 6 -3 0 1 1 0 0 6 -5 -1.88 0 1 5 0 6 -6 -7.5 0 0 0 0 6 -12 -13.5 0 1 12 0 6 -4 -7 0 0 0 0 6 -6 -8.25 0 0 0 0 6 -6 -12 0 0 0 0 6 -6 -10.5 0 0 0 0 6 -4 -8 0 0 0 0 6 -6 -6 0 1 -6 0 6 -12 0 6 0 6 0 6 -12 -19.5 0 0 0 0 6 -19 0 8 1 19 0 6 -12 -15 0 0 0 0 6 -2 -1.75 0 1 2 0 6 -6 -3.75 0 1 -3.75 0 6 -2 -1.25 0 1 2 0 6 -5 -1.25 0 1 -1.25 0 6 -4 -1.5 0 1 4 0 6 -8 -13 0 0 0 0 6 -12 -7.5 0 1 -7.5 0 6 -12 -3 0 1 -3 0 6 -2 -2.75 0 1 2 0 6 -7 0 3 1 7 0 6 -25 0 9 1 25 0 6 -2 -2 0 1 2 0 6 -12 -4.5 0 1 -4.5 0 6 -12 -12 0 0 0 0 6 -5 -3.13 0 1 5 0 6 -9 -11.25 0 0 0 0 6 -8 -9 0 0 0 0 6 -2 -2.25 0 1 2 0 6 -9 -4.5 0 1 -4.5 0 6 -10 -3.75 0 1 10 0 6 -10 -10 0 0 0 0 6 -10 -12.5 0 0 0 0 6 -2 -2.5 0 1 2 0 7 -5 -5.63 0 0 0 0 7 -6 -7.5 0 0 0 0 7 -26 0 10 1 0 0 7 -9 -4.5 0 1 -4.5 0 7 -2 -1.25 0 1 2 0 7 -8 -3 0 1 8 0 7 -25 0 9 1 25 0 7 -4 -4.5 0 1 -4.5 0 7 -5 -10 0 0 0 0 7 -6 -9 0 0 0 0 7 -10 -6.25 0 0 0 0 7 -4 -4 0 1 -4 0 7 -12 -3 0 1 -3 0 7 -5 -5 0 0 0 0 7 -12 0 5 1 12 0 7 -6 -9.75 0 0 0 0 7 -19 0 8 1 19 0 7 -4 -7.5 0 0 0 0 7 -12 -9 0 0 0 0 7 -4 -6.5 0 0 0 0 7 -9 -5.63 0 1 -5.63 0 7 -9 -18 0 0 0 0 7 -10 -11.25 0 0 0 0 7 -10 -13.75 0 0 0 0 7 -6 -12 0 0 0 0 7 -10 -12.5 0 0 0 0 7 -4 -7 0 0 0 0 7 -10 -7.5 0 0 0 0 7 -4 -8 0 0 0 0 7 -8 -11 0 0 0 0 7 -12 0 4 1 12 0 7 -9 -3.38 0 1 -3.38 0 7 -10 -18.75 0 0 0 0 7 -2 -3.5 0 0 0 0 7 -2 -1 0 1 2 0 7 -2 -3.25 0 0 0 0 7 -2 0 1 1 0 0 7 -7 0 3 1 7 0 7 -8 0 3 1 0 0 7 -12 -6 0 1 -6 0 7 -2 -0.5 0 1 2 0 7 -9 -7.88 0 0 0 0 7 -8 -15 0 0 0 0 7 -2 -1.5 0 1 2 0 7 -12 -22.5 0 0 0 0 7 -8 -7 0 1 -7 0 7 -4 -5.5 0 0 0 0 7 -10 -8.75 0 0 0 0 7 -8 -9 0 0 0 0 7 -2 -4 0 0 0 0 7 -4 0 2 1 4 0 7 -8 -8 0 0 0 0 7 -9 -13.5 0 0 0 0 7 -9 -9 0 0 0 0 7 -6 -3.75 0 1 -3.75 0 7 -13 0 6 0 6 0 7 -5 -1.88 0 1 5 0 7 -6 -6 0 0 0 0 7 -5 -6.88 0 0 0 0 7 -8 -16 0 0 0 0 7 -12 -7.5 0 1 -7.5 0 7 -5 -1.25 0 1 -1.25 0 7 -9 -14.63 0 0 0 0 7 -8 -4 0 1 8 0 7 -10 -17.5 0 0 0 0 7 -5 -3.75 0 1 5 0 7 -6 -10.5 0 0 0 0 7 -13 0 5 1 13 0 7 -10 -16.25 0 0 0 0 7 -5 -7.5 0 0 0 0 7 -2 -1.75 0 1 2 0 7 -5 -9.38 0 0 0 0 7 -2 -2.75 0 0 0 0 7 -2 -0.75 0 1 -0.75 0 7 -5 -8.13 0 0 0 0 7 -9 -11.25 0 0 0 0 7 -8 -13 0 0 0 0 7 -9 -16.88 0 0 0 0 7 -2 -2 0 0 0 0 7 -12 -18 0 0 0 0 7 -8 -2 0 1 -2 0 7 -2 -3 0 0 0 0 7 -6 -4.5 0 1 6 0 7 -5 0 2 1 5 0 7 -12 -19.5 0 0 0 0 7 -9 -15.75 0 0 0 0 7 -8 -6 0 0 0 0 7 -10 -2.5 0 1 -2.5 0 7 -9 -6.75 0 0 0 0 7 -6 -6.75 0 0 0 0 7 -2 -3.75 0 0 0 0 7 -10 -5 0 1 -5 0 7 -2 -2.25 0 0 0 0 7 -26 0 12 1 26 0 7 -12 -13.5 0 0 0 0 7 -8 -5 0 0 0 0 7 -6 -3 0 1 6 0 7 -10 -3.75 0 1 10 0 7 -12 -10.5 0 0 0 0 7 -4 -5 0 0 0 0 7 -9 -2.25 0 1 -2.25 0 7 -4 -3 0 0 0 0 7 -9 -10.13 0 0 0 0 7 -28 0 13 0 13 0 7 -22 0 10 1 22 0 7 -10 -10 0 0 0 0 7 -4 -1 0 1 -1 0 7 -4 -2.5 0 0 0 0 7 -12 -24 0 0 0 0 7 -8 -12 0 0 0 0 7 -3 0 1 1 0 0 7 -9 -12.38 0 0 0 0 7 -23 0 10 1 0 0 7 -4 -3.5 0 0 0 0 7 -4 -1.5 0 1 4 0 7 -8 -10 0 0 0 0 7 -8 -14 0 0 0 0 7 -4 -6 0 0 0 0 7 -25 0 10 1 0 0 7 -12 -16.5 0 0 0 0 7 -12 -12 0 0 0 0 7 -5 -2.5 0 1 5 0 7 -5 -8.75 0 0 0 0 7 -12 -4.5 0 1 -4.5 0 7 -12 -15 0 0 0 0 7 -5 -3.13 0 0 0 0 7 -12 -21 0 1 12 0 7 -5 -4.38 0 0 0 0 7 -6 -11.25 0 0 0 0 7 -30 0 12 1 0 0 7 -6 -1.5 0 1 6 0 7 -12 0 6 1 12 0 7 -4 -2 0 1 4 0 7 -10 -15 0 0 0 0 7 -6 -2.25 0 1 6 0 7 -10 -20 0 0 0 0 7 -6 -5.25 0 0 0 0 7 -5 -6.25 0 0 0 0 7 -6 -8.25 0 0 0 0 7 -4 -4.5 0 1 -4.5 0 8 -10 -12.5 0 0 0 0 8 -26 0 12 0 12 0 8 -6 -7.5 0 0 0 0 8 -4 -6.5 0 0 0 0 8 -12 -4.5 0 1 -4.5 0 8 -5 -2.5 0 1 5 0 8 -6 -12 0 0 0 0 8 -9 -14.63 0 0 0 0 8 -6 -6 0 1 -6 0 8 -22 0 10 0 10 0 8 -2 -1 0 1 2 0 8 -8 -3 0 1 8 0 8 -12 -9 0 1 12 0 8 -5 -3.75 0 1 5 0 8 -6 -3 0 1 6 0 8 -4 0 2 0 2 0 8 -28 0 13 1 28 0 8 -12 -15 0 0 0 0 8 -9 -11.25 0 0 0 0 8 -12 -10.5 0 1 12 0 8 -5 -1.88 0 1 5 0 8 -2 -2.75 0 0 0 0 8 -4 -7 0 0 0 0 8 -8 -4 0 1 8 0 8 -2 0 1 0 1 0 8 -2 -3.5 0 0 0 0 8 -2 -1.75 0 1 2 0 8 -5 -5 0 1 -5 0 8 -12 -12 0 0 0 0 8 -12 0 6 0 6 0 8 -6 -4.5 0 1 6 0 8 -30 0 12 1 0 0 8 -12 -16.5 0 0 0 0 8 -6 -9.75 0 0 0 0 8 -12 -22.5 0 0 0 0 8 -6 -9 0 0 0 0 8 -5 -3.13 0 1 5 0 8 -5 -9.38 0 0 0 0 8 -12 -7.5 0 1 -7.5 0 8 -5 0 2 1 5 0 8 -10 -15 0 0 0 0 8 -12 -3 0 1 -3 0 8 -13 0 6 0 6 0 8 -9 -16.88 0 0 0 0 8 -6 -11.25 0 0 0 0 8 -8 -5 0 1 8 0 8 -8 -14 0 0 0 0 8 -12 -24 0 0 0 0 8 -12 0 5 0 5 0 8 -9 -13.5 0 0 0 0 8 -6 -1.5 0 1 6 0 8 -2 -3 0 0 0 0 8 -10 -2.5 0 1 -2.5 0 8 -2 -0.75 0 1 -0.75 0 8 -6 -10.5 0 0 0 0 8 -2 -0.5 0 1 2 0 8 -10 -10 0 0 0 0 8 -8 -10 0 0 0 0 8 -9 -12.38 0 0 0 0 8 -4 -6 0 0 0 0 8 -6 -2.25 0 1 6 0 8 -9 -15.75 0 0 0 0 8 -12 -13.5 0 0 0 0 8 -8 -6 0 1 -6 0 8 -10 -18.75 0 0 0 0 8 -4 -2 0 1 4 0 8 -5 -1.25 0 1 -1.25 0 8 -6 -5.25 0 1 6 0 8 -4 -8 0 0 0 0 8 -25 0 9 1 25 0 8 -2 -3.25 0 0 0 0 8 -10 -11.25 0 0 0 0 8 -4 -7.5 0 0 0 0 8 -9 -5.63 0 1 -5.63 0 8 -6 -6.75 0 1 -6.75 0 8 -8 -2 0 1 -2 0 8 -5 -6.25 0 0 0 0 8 -23 0 10 0 10 0 8 -8 -13 0 0 0 0 8 -10 -13.75 0 0 0 0 8 -5 -10 0 0 0 0 8 -12 0 4 1 12 0 8 -2 -2.5 0 1 2 0 8 -19 0 8 1 19 0 8 -4 -4 0 0 0 0 8 -4 -1 0 1 -1 0 8 -4 -2.5 0 1 -2.5 0 8 -5 -8.13 0 0 0 0 8 -10 -3.75 0 1 10 0 8 -5 -8.75 0 0 0 0 8 -10 -7.5 0 1 -7.5 0 8 -10 -5 0 1 -5 0 8 -10 -20 0 0 0 0 8 -13 0 5 0 5 0 8 -8 -9 0 0 0 0 8 -8 -12 0 0 0 0 8 -10 -16.25 0 0 0 0 8 -5 -6.88 0 0 0 0 8 -4 -5.5 0 0 0 0 8 -5 -7.5 0 0 0 0 8 -9 -10.13 0 0 0 0 8 -6 -8.25 0 0 0 0 8 -26 0 10 0 10 0 8 -4 -5 0 1 4 0 8 -2 -2.25 0 1 2 0 8 -6 -3.75 0 1 -3.75 0 8 -8 -8 0 1 8 0 8 -9 -6.75 0 1 -6.75 0 8 -8 -15 0 0 0 0 8 -12 -6 0 1 -6 0 8 -25 0 10 1 0 0 8 -12 -19.5 0 0 0 0 8 -9 -7.88 0 1 -7.88 0 8 -4 -1.5 0 1 4 0 8 -8 -7 0 1 -7 0 8 -12 -18 0 0 0 0 8 -2 -2 0 1 2 0 8 -9 -18 0 0 0 0 8 -2 -1.25 0 1 2 0 8 -8 -16 0 0 0 0 8 -5 -4.38 0 1 -4.38 0 8 -2 -4 0 0 0 0 8 -5 -5.63 0 1 -5.63 0 8 -8 0 3 0 3 0 8 -10 -17.5 0 0 0 0 8 -8 -11 0 0 0 0 8 -2 -1.5 0 1 2 0 8 -4 -3.5 0 1 4 0 8 -2 -3.75 0 0 0 0 8 -3 0 1 1 0 0 8 -12 -21 0 0 0 0 8 -10 -8.75 0 1 -8.75 0 8 -9 -9 0 1 -9 0 8 -4 -3 0 1 4 0 8 -7 0 3 0 3 0 8 -9 -3.38 0 1 -3.38 0 8 -9 -2.25 0 1 -2.25 0 8 -10 -6.25 0 1 10 0 8 -9 -4.5 0 1 -4.5 0 8 -2 -1 0 1 2 0 9 -9 -13.5 0 0 0 0 9 -5 -6.88 0 0 0 0 9 -10 -10 0 0 0 0 9 -6 -2.25 0 1 6 0 9 -6 -6.75 0 1 -6.75 0 9 -9 -4.5 0 1 -4.5 0 9 -10 -13.75 0 0 0 0 9 -6 -8.25 0 0 0 0 9 -5 -10 0 0 0 0 9 -10 -6.25 0 1 10 0 9 -12 -3 0 1 -3 0 9 -12 -9 0 1 12 0 9 -8 -7 0 1 -7 0 9 -6 -12 0 0 0 0 9 -8 -2 0 1 -2 0 9 -12 -6 0 1 -6 0 9 -3 0 1 1 0 0 9 -10 -20 0 0 0 0 9 -5 -3.75 0 1 5 0 9 -2 -1.75 0 1 2 0 9 -6 -3.75 0 1 -3.75 0 9 -9 -12.38 0 0 0 0 9 -5 -6.25 0 0 0 0 9 -12 0 4 1 12 0 9 -2 -1.5 0 1 2 0 9 -6 -5.25 0 1 6 0 9 -10 -18.75 0 0 0 0 9 -6 -6 0 1 -6 0 9 -12 0 5 1 12 0 9 -4 -2 0 1 4 0 9 -2 -4 0 0 0 0 9 -5 -2.5 0 1 5 0 9 -9 -15.75 0 0 0 0 9 -8 -4 0 1 8 0 9 -26 0 12 1 26 0 9 -6 -1.5 0 1 6 0 9 -4 -6 0 1 4 0 9 -10 -2.5 0 1 -2.5 0 9 -8 -12 0 0 0 0 9 -2 -3.5 0 0 0 0 9 -5 -5.63 0 0 0 0 9 -12 -24 0 0 0 0 9 -25 0 10 1 0 0 9 -4 -6.5 0 0 0 0 9 -5 -9.38 0 0 0 0 9 -5 -7.5 0 0 0 0 9 -4 -4 0 1 -4 0 9 -6 -10.5 0 0 0 0 9 -13 0 6 1 13 0 9 -12 -22.5 0 0 0 0 9 -4 -7.5 0 0 0 0 9 -5 0 2 1 5 0 9 -10 -15 0 0 0 0 9 -9 -16.88 0 0 0 0 9 -2 -2.5 0 0 0 0 9 -10 -16.25 0 0 0 0 9 -6 -11.25 0 0 0 0 9 -4 -1.5 0 1 4 0 9 -5 -3.13 0 1 5 0 9 -6 -9 0 0 0 0 9 -12 -19.5 0 0 0 0 9 -10 -12.5 0 0 0 0 9 -2 -3 0 1 -3 0 9 -8 -16 0 0 0 0 9 -4 0 2 1 4 0 9 -12 -7.5 0 1 -7.5 0 9 -12 -13.5 0 0 0 0 9 -22 0 10 1 22 0 9 -12 -21 0 0 0 0 9 -7 0 3 1 7 0 9 -10 -8.75 0 0 0 0 9 -2 -1.25 0 1 2 0 9 -9 -6.75 0 1 -6.75 0 9 -12 0 6 1 12 0 9 -28 0 13 1 28 0 9 -9 -10.13 0 0 0 0 9 -2 -0.5 0 1 2 0 9 -25 0 9 1 25 0 9 -6 -7.5 0 1 -7.5 0 9 -4 -3 0 1 4 0 9 -10 -3.75 0 1 10 0 9 -12 -4.5 0 1 -4.5 0 9 -12 -15 0 0 0 0 9 -6 -3 0 1 6 0 9 -9 -14.63 0 0 0 0 9 -5 -1.25 0 1 -1.25 0 9 -8 -11 0 0 0 0 9 -10 -17.5 0 0 0 0 9 -8 -10 0 0 0 0 9 -9 -9 0 0 0 0 9 -10 -11.25 0 0 0 0 9 -12 -12 0 0 0 0 9 -8 -14 0 0 0 0 9 -12 -16.5 0 0 0 0 9 -4 -7 0 0 0 0 9 -4 -1 0 1 -1 0 9 -5 -1.88 0 1 5 0 9 -8 0 3 1 0 0 9 -2 -3.25 0 1 -3.25 0 9 -5 -5 0 1 -5 0 9 -26 0 10 1 0 0 9 -12 -10.5 0 0 0 0 9 -2 0 1 1 0 0 9 -6 -9.75 0 0 0 0 9 -8 -3 0 1 8 0 9 -13 0 5 1 13 0 9 -10 -7.5 0 1 -7.5 0 9 -8 -13 0 0 0 0 9 -9 -3.38 0 1 -3.38 0 9 -8 -15 0 0 0 0 9 -30 0 12 1 0 0 9 -8 -8 0 1 8 0 9 -8 -5 0 1 8 0 9 -12 -18 0 0 0 0 9 -10 -5 0 1 -5 0 9 -9 -11.25 0 0 0 0 9 -9 -7.88 0 1 -7.88 0 9 -8 -6 0 1 -6 0 9 -6 -4.5 0 1 6 0 9 -8 -9 0 0 0 0 9 -4 -5.5 0 1 -5.5 0 9 -4 -5 0 1 4 0 9 -9 -2.25 0 1 -2.25 0 9 -23 0 10 1 0 0 9 -9 -5.63 0 1 -5.63 0 9 -4 -4.5 0 1 -4.5 0 9 -4 -8 0 0 0 0 9 -19 0 8 1 19 0 9 -2 -2 0 1 2 0 9 -5 -8.13 0 0 0 0 9 -5 -4.38 0 1 -4.38 0 9 -2 -2.25 0 1 2 0 9 -2 -0.75 0 1 -0.75 0 9 -2 -2.75 0 0 0 0 9 -5 -8.75 0 0 0 0 9 -9 -18 0 0 0 0 9 -4 -3.5 0 1 4 0 9 -4 -2.5 0 1 -2.5 0 9 -4 -4.5 0 0 0 0 10 -10 -12.5 0 1 -12.5 0 10 -26 0 12 1 26 0 10 -6 -7.5 0 1 -7.5 0 10 -4 -6.5 0 1 4 0 10 -12 -4.5 0 1 -4.5 0 10 -5 -2.5 0 1 5 0 10 -6 -12 0 0 0 0 10 -9 -14.63 0 1 9 0 10 -6 -6 0 0 0 0 10 -22 0 10 0 10 0 10 -2 -1 0 1 2 0 10 -8 -3 0 1 8 0 10 -12 -9 0 1 12 0 10 -5 -3.75 0 1 5 0 10 -6 -3 0 1 6 0 10 -4 0 2 0 2 0 10 -28 0 13 0 13 0 10 -12 -15 0 1 -15 0 10 -9 -11.25 0 1 9 0 10 -12 -10.5 0 1 12 0 10 -5 -1.88 0 1 5 0 10 -2 -2.75 0 1 2 0 10 -4 -7 0 1 -7 0 10 -8 -4 0 1 8 0 10 -2 0 1 0 1 0 10 -2 -3.5 0 1 -3.5 0 10 -2 -1.75 0 1 2 0 10 -5 -5 0 1 -5 0 10 -12 -12 0 1 12 0 10 -12 0 6 0 6 0 10 -6 -4.5 0 1 6 0 10 -30 0 12 0 12 0 10 -12 -16.5 0 1 -16.5 0 10 -6 -9.75 0 1 6 0 10 -12 -22.5 0 0 0 0 10 -6 -9 0 1 -9 0 10 -5 -3.13 0 1 5 0 10 -5 -9.38 0 0 0 0 10 -12 -7.5 0 1 -7.5 0 10 -5 0 2 0 2 0 10 -10 -15 0 1 -15 0 10 -12 -3 0 1 -3 0 10 -13 0 6 1 13 0 10 -9 -16.88 0 1 9 0 10 -6 -11.25 0 1 6 0 10 -8 -5 0 1 8 0 10 -8 -14 0 1 8 0 10 -12 -24 0 1 -24 0 10 -12 0 5 1 12 0 10 -9 -13.5 0 1 9 0 10 -6 -1.5 0 1 6 0 10 -2 -3 0 1 -3 0 10 -10 -2.5 0 1 -2.5 0 10 -2 -0.75 0 1 -0.75 0 10 -6 -10.5 0 1 -10.5 0 10 -2 -0.5 0 1 2 0 10 -10 -10 0 1 10 0 10 -8 -10 0 1 -10 0 10 -9 -12.38 0 1 -12.38 0 10 -4 -6 0 1 4 0 10 -6 -2.25 0 1 6 0 10 -9 -15.75 0 1 -15.75 0 10 -12 -13.5 0 1 12 0 10 -8 -6 0 1 -6 0 10 -10 -18.75 0 1 10 0 10 -4 -2 0 1 4 0 10 -5 -1.25 0 1 -1.25 0 10 -6 -5.25 0 1 6 0 10 -4 -8 0 1 4 0 10 -25 0 9 0 9 0 10 -2 -3.25 0 0 0 0 10 -10 -11.25 0 1 -11.25 0 10 -4 -7.5 0 1 -7.5 0 10 -9 -5.63 0 1 -5.63 0 10 -6 -6.75 0 1 -6.75 0 10 -8 -2 0 1 -2 0 10 -5 -6.25 0 1 5 0 10 -23 0 10 1 0 0 10 -8 -13 0 1 -13 0 10 -10 -13.75 0 1 -13.75 0 10 -5 -10 0 1 5 0 10 -12 0 4 1 12 0 10 -2 -2.5 0 1 2 0 10 -19 0 8 1 19 0 10 -4 -4 0 1 -4 0 10 -4 -1 0 1 -1 0 10 -4 -2.5 0 1 -2.5 0 10 -5 -8.13 0 1 5 0 10 -10 -3.75 0 1 10 0 10 -5 -8.75 0 1 5 0 10 -10 -7.5 0 1 -7.5 0 10 -10 -5 0 1 -5 0 10 -10 -20 0 1 10 0 10 -13 0 5 1 13 0 10 -8 -9 0 1 -9 0 10 -8 -12 0 0 0 0 10 -10 -16.25 0 0 0 0 10 -5 -6.88 0 1 5 0 10 -4 -5.5 0 1 -5.5 0 10 -5 -7.5 0 0 0 0 10 -9 -10.13 0 0 0 0 10 -6 -8.25 0 1 -8.25 0 10 -26 0 10 1 0 0 10 -4 -5 0 1 4 0 10 -2 -2.25 0 1 2 0 10 -6 -3.75 0 1 -3.75 0 10 -9 -6.75 0 1 -6.75 0 10 -8 -15 0 0 0 0 10 -12 -6 0 1 -6 0 10 -25 0 10 1 0 0 10 -12 -19.5 0 0 0 0 10 -9 -7.88 0 1 -7.88 0 10 -4 -1.5 0 1 4 0 10 -8 -7 0 1 -7 0 10 -12 -18 0 1 -18 0 10 -2 -2 0 1 2 0 10 -9 -18 0 1 -18 0 10 -2 -1.25 0 1 2 0 10 -8 -16 0 1 -16 0 10 -5 -4.38 0 1 -4.38 0 10 -2 -4 0 1 2 0 10 -5 -5.63 0 1 -5.63 0 10 -8 0 3 1 0 0 10 -10 -17.5 0 1 -17.5 0 10 -8 -11 0 1 -11 0 10 -2 -1.5 0 1 2 0 10 -4 -3.5 0 1 4 0 10 -2 -3.75 0 1 -3.75 0 10 -3 0 1 1 0 0 10 -12 -21 0 1 12 0 10 -10 -8.75 0 1 -8.75 0 10 -9 -9 0 1 -9 0 10 -4 -3 0 1 4 0 10 -7 0 3 1 7 0 10 -9 -3.38 0 1 -3.38 0 10 -9 -2.25 0 1 -2.25 0 10 -10 -6.25 0 1 10 0 10 -9 -4.5 0 1 -4.5 0 10 -2 -1 0 1 2 0 11 -9 -13.5 0 0 0 0 11 -5 -6.88 0 0 0 0 11 -10 -10 0 1 10 0 11 -6 -2.25 0 1 6 0 11 -6 -6.75 0 0 0 0 11 -9 -4.5 0 1 -4.5 0 11 -10 -13.75 0 0 0 0 11 -6 -8.25 0 0 0 0 11 -5 -10 0 0 0 0 11 -10 -6.25 0 1 10 0 11 -12 -3 0 1 -3 0 11 -12 -9 0 1 12 0 11 -8 -7 0 0 0 0 11 -6 -12 0 0 0 0 11 -8 -2 0 1 -2 0 11 -12 -6 0 1 -6 0 11 -3 0 1 1 0 0 11 -10 -20 0 0 0 0 11 -5 -3.75 0 1 5 0 11 -2 -1.75 0 1 2 0 11 -6 -3.75 0 1 -3.75 0 11 -9 -12.38 0 0 0 0 11 -5 -6.25 0 1 5 0 11 -12 0 4 0 4 0 11 -2 -1.5 0 1 2 0 11 -6 -5.25 0 0 0 0 11 -10 -18.75 0 0 0 0 11 -6 -6 0 1 -6 0 11 -12 0 5 0 5 0 11 -4 -2 0 1 4 0 11 -2 -4 0 0 0 0 11 -5 -2.5 0 1 5 0 11 -2 -3.75 0 0 0 0 11 -9 -15.75 0 0 0 0 11 -8 -4 0 1 8 0 11 -26 0 12 0 12 0 11 -6 -1.5 0 1 6 0 11 -4 -6 0 0 0 0 11 -10 -2.5 0 1 -2.5 0 11 -8 -12 0 0 0 0 11 -2 -3.5 0 0 0 0 11 -5 -5.63 0 0 0 0 11 -12 -24 0 0 0 0 11 -25 0 10 1 0 0 11 -4 -6.5 0 0 0 0 11 -5 -9.38 0 0 0 0 11 -5 -7.5 0 0 0 0 11 -4 -4 0 1 -4 0 11 -6 -10.5 0 0 0 0 11 -13 0 6 0 6 0 11 -12 -22.5 0 0 0 0 11 -4 -7.5 0 0 0 0 11 -10 -15 0 0 0 0 11 -9 -16.88 0 0 0 0 11 -2 -2.5 0 0 0 0 11 -10 -16.25 0 0 0 0 11 -6 -11.25 0 0 0 0 11 -4 -1.5 0 1 4 0 11 -5 -3.13 0 1 5 0 11 -6 -9 0 0 0 0 11 -12 -19.5 0 0 0 0 11 -10 -12.5 0 0 0 0 11 -2 -3 0 1 -3 0 11 -8 -16 0 0 0 0 11 -4 0 2 0 2 0 11 -12 -7.5 0 1 -7.5 0 11 -12 -13.5 0 0 0 0 11 -22 0 10 0 10 0 11 -12 -21 0 0 0 0 11 -7 0 3 0 3 0 11 -10 -8.75 0 1 -8.75 0 11 -2 -1.25 0 1 2 0 11 -9 -6.75 0 1 -6.75 0 11 -12 0 6 1 12 0 11 -28 0 13 0 13 0 11 -9 -10.13 0 0 0 0 11 -2 -0.5 0 1 2 0 11 -25 0 9 1 25 0 11 -6 -7.5 0 0 0 0 11 -4 -3 0 1 4 0 11 -10 -3.75 0 1 10 0 11 -12 -4.5 0 1 -4.5 0 11 -12 -15 0 0 0 0 11 -6 -3 0 1 6 0 11 -9 -14.63 0 0 0 0 11 -5 -1.25 0 1 -1.25 0 11 -8 -11 0 1 -11 0 11 -10 -17.5 0 0 0 0 11 -8 -10 0 0 0 0 11 -9 -9 0 1 -9 0 11 -10 -11.25 0 0 0 0 11 -12 -12 0 0 0 0 11 -8 -14 0 0 0 0 11 -12 -16.5 0 0 0 0 11 -4 -7 0 0 0 0 11 -4 -1 0 1 -1 0 11 -5 -1.88 0 1 5 0 11 -8 0 3 1 0 0 11 -2 -3.25 0 0 0 0 11 -5 -5 0 1 -5 0 11 -26 0 10 0 10 0 11 -12 -10.5 0 1 12 0 11 -2 0 1 0 1 0 11 -6 -9.75 0 0 0 0 11 -8 -3 0 1 8 0 11 -13 0 5 1 13 0 11 -10 -7.5 0 1 -7.5 0 11 -8 -13 0 0 0 0 11 -9 -3.38 0 1 -3.38 0 11 -8 -15 0 0 0 0 11 -30 0 12 1 0 0 11 -8 -8 0 1 8 0 11 -8 -5 0 1 8 0 11 -12 -18 0 0 0 0 11 -10 -5 0 1 -5 0 11 -9 -11.25 0 0 0 0 11 -9 -7.88 0 1 -7.88 0 11 -8 -6 0 1 -6 0 11 -6 -4.5 0 1 6 0 11 -8 -9 0 0 0 0 11 -4 -5.5 0 0 0 0 11 -4 -5 0 0 0 0 11 -9 -2.25 0 1 -2.25 0 11 -23 0 10 0 10 0 11 -9 -5.63 0 1 -5.63 0 11 -4 -4.5 0 0 0 0 11 -4 -8 0 0 0 0 11 -19 0 8 1 19 0 11 -2 -2 0 1 2 0 11 -5 -8.13 0 0 0 0 11 -5 -4.38 0 1 -4.38 0 11 -2 -2.25 0 0 0 0 11 -2 -0.75 0 1 -0.75 0 11 -2 -2.75 0 0 0 0 11 -5 -8.75 0 0 0 0 11 -9 -18 0 0 0 0 11 -4 -3.5 0 1 4 0 11 -4 -2.5 0 1 -2.5 0 11 -9 -6.75 0 0 0 0 12 -6 -6.75 0 0 0 0 12 -6 -3 0 1 6 0 12 -2 -1.5 0 1 2 0 12 -4 -3 0 0 0 0 12 -5 -6.88 0 0 0 0 12 -12 -9 0 0 0 0 12 -4 -5 0 0 0 0 12 -5 -7.5 0 0 0 0 12 -4 -4 0 0 0 0 12 -9 -5.63 0 1 -5.63 0 12 -9 -14.63 0 1 9 0 12 -5 -9.38 0 0 0 0 12 -6 -4.5 0 0 0 0 12 -8 -7 0 0 0 0 12 -10 -16.25 0 0 0 0 12 -10 -17.5 0 0 0 0 12 -9 -16.88 0 0 0 0 12 -8 -5 0 1 8 0 12 -6 -1.5 0 1 6 0 12 -12 -18 0 0 0 0 12 -5 -6.25 0 0 0 0 12 -8 -4 0 1 8 0 12 -9 -15.75 0 0 0 0 12 -9 -13.5 0 0 0 0 12 -5 -8.13 0 0 0 0 12 -2 0 1 0 1 0 12 -2 -3.75 0 0 0 0 12 -4 -6.5 0 0 0 0 12 -10 -5 0 1 -5 0 12 -12 -22.5 0 0 0 0 12 -2 -1 0 1 2 0 12 -13 0 6 0 6 0 12 -5 -2.5 0 1 5 0 12 -2 -0.5 0 1 2 0 12 -2 -3.25 0 0 0 0 12 -30 0 12 0 12 0 12 -8 -8 0 0 0 0 12 -4 -5.5 0 0 0 0 12 -23 0 10 0 10 0 12 -4 -3.5 0 0 0 0 12 -5 0 2 0 2 0 12 -8 0 3 0 3 0 12 -9 -10.13 0 0 0 0 12 -8 -16 0 0 0 0 12 -12 -24 0 0 0 0 12 -9 -3.38 0 1 -3.38 0 12 -6 -5.25 0 0 0 0 12 -2 -4 0 0 0 0 12 -4 -1 0 1 -1 0 12 -6 -11.25 0 0 0 0 12 -5 -4.38 0 0 0 0 12 -6 -2.25 0 1 6 0 12 -12 -10.5 0 0 0 0 12 -9 -18 0 1 -18 0 12 -10 -20 0 1 10 0 12 -4 -4.5 0 0 0 0 12 -9 -2.25 0 1 -2.25 0 12 -4 -6 0 0 0 0 12 -8 -10 0 0 0 0 12 -5 -5 0 0 0 0 12 -5 -8.75 0 0 0 0 12 -8 -6 0 0 0 0 12 -10 -13.75 0 0 0 0 12 -2 -2.5 0 0 0 0 12 -8 -11 0 0 0 0 12 -4 -2 0 1 4 0 12 -10 -7.5 0 1 -7.5 0 12 -22 0 10 1 22 0 12 -25 0 10 0 10 0 12 -6 -9.75 0 0 0 0 12 -12 0 5 0 5 0 12 -4 -2.5 0 1 -2.5 0 12 -8 -3 0 1 8 0 12 -10 -11.25 0 0 0 0 12 -5 -10 0 1 5 0 12 -10 -15 0 1 -15 0 12 -2 -3.5 0 0 0 0 12 -12 0 4 1 12 0 12 -13 0 5 0 5 0 12 -5 -3.75 0 1 5 0 12 -26 0 12 0 12 0 12 -5 -5.63 0 0 0 0 12 -8 -2 0 1 -2 0 12 -2 -3 0 0 0 0 12 -6 -9 0 0 0 0 12 -9 -7.88 0 0 0 0 12 -8 -14 0 0 0 0 12 -28 0 13 1 28 0 12 -9 -12.38 0 0 0 0 12 -8 -15 0 0 0 0 12 -10 -2.5 0 1 -2.5 0 12 -4 0 2 0 2 0 12 -12 -6 0 1 -6 0 12 -12 -16.5 0 1 -16.5 0 12 -4 -7.5 0 0 0 0 12 -10 -8.75 0 1 -8.75 0 12 -10 -18.75 0 1 10 0 12 -26 0 10 0 10 0 12 -12 -21 0 0 0 0 12 -2 -0.75 0 1 -0.75 0 12 -9 -9 0 0 0 0 12 -10 -6.25 0 0 0 0 12 -8 -12 0 0 0 0 12 -3 0 1 1 0 0 12 -5 -1.88 0 1 5 0 12 -6 -7.5 0 0 0 0 12 -12 -13.5 0 0 0 0 12 -4 -7 0 0 0 0 12 -6 -8.25 0 0 0 0 12 -6 -12 0 0 0 0 12 -6 -10.5 0 0 0 0 12 -4 -8 0 0 0 0 12 -6 -6 0 0 0 0 12 -12 0 6 0 6 0 12 -12 -19.5 0 0 0 0 12 -19 0 8 0 8 0 12 -12 -15 0 0 0 0 12 -2 -1.75 0 0 0 0 12 -6 -3.75 0 1 -3.75 0 12 -2 -1.25 0 0 0 0 12 -5 -1.25 0 1 -1.25 0 12 -4 -1.5 0 1 4 0 12 -8 -13 0 0 0 0 12 -12 -7.5 0 0 0 0 12 -12 -3 0 0 0 0 12 -2 -2.75 0 0 0 0 12 -7 0 3 1 7 0 12 -25 0 9 0 9 0 12 -2 -2 0 0 0 0 12 -12 -4.5 0 1 -4.5 0 12 -12 -12 0 1 12 0 12 -5 -3.13 0 0 0 0 12 -9 -11.25 0 0 0 0 12 -8 -9 0 0 0 0 12 -2 -2.25 0 0 0 0 12 -9 -4.5 0 0 0 0 12 -10 -3.75 0 1 10 0 12 -10 -10 0 0 0 0 12 -10 -12.5 0 0 0 0 12 -2 -2.5 0 0 0 0 13 -5 -5.63 0 0 0 0 13 -6 -7.5 0 0 0 0 13 -26 0 10 1 0 0 13 -9 -4.5 0 1 -4.5 0 13 -2 -1.25 0 1 2 0 13 -8 -3 0 1 8 0 13 -25 0 9 0 9 0 13 -4 -4.5 0 0 0 0 13 -5 -10 0 0 0 0 13 -6 -9 0 0 0 0 13 -10 -6.25 0 1 10 0 13 -4 -4 0 1 -4 0 13 -12 -3 0 1 -3 0 13 -5 -5 0 1 -5 0 13 -12 0 5 0 5 0 13 -6 -9.75 0 0 0 0 13 -19 0 8 0 8 0 13 -4 -7.5 0 0 0 0 13 -12 -9 0 1 12 0 13 -4 -6.5 0 0 0 0 13 -9 -5.63 0 1 -5.63 0 13 -9 -18 0 0 0 0 13 -10 -11.25 0 0 0 0 13 -10 -13.75 0 0 0 0 13 -6 -12 0 0 0 0 13 -10 -12.5 0 0 0 0 13 -4 -7 0 0 0 0 13 -10 -7.5 0 0 0 0 13 -4 -8 0 0 0 0 13 -8 -11 0 0 0 0 13 -12 0 4 0 4 0 13 -9 -3.38 0 1 -3.38 0 13 -10 -18.75 0 0 0 0 13 -2 -3.5 0 0 0 0 13 -2 -1 0 0 0 0 13 -2 -3.25 0 0 0 0 13 -2 0 1 0 1 0 13 -7 0 3 0 3 0 13 -8 0 3 0 3 0 13 -12 -6 0 1 -6 0 13 -2 -0.5 0 1 2 0 13 -9 -7.88 0 0 0 0 13 -8 -15 0 0 0 0 13 -2 -1.5 0 0 0 0 13 -12 -22.5 0 0 0 0 13 -8 -7 0 1 -7 0 13 -4 -5.5 0 0 0 0 13 -10 -8.75 0 0 0 0 13 -8 -9 0 0 0 0 13 -2 -4 0 0 0 0 13 -4 0 2 0 2 0 13 -8 -8 0 0 0 0 13 -9 -13.5 0 0 0 0 13 -9 -9 0 0 0 0 13 -6 -3.75 0 0 0 0 13 -13 0 6 0 6 0 13 -5 -1.88 0 1 5 0 13 -6 -6 0 0 0 0 13 -5 -6.88 0 0 0 0 13 -8 -16 0 0 0 0 13 -12 -7.5 0 0 0 0 13 -5 -1.25 0 0 0 0 13 -9 -14.63 0 0 0 0 13 -8 -4 0 1 8 0 13 -10 -17.5 0 0 0 0 13 -5 -3.75 0 0 0 0 13 -6 -10.5 0 0 0 0 13 -13 0 5 0 5 0 13 -10 -16.25 0 1 10 0 13 -5 -7.5 0 0 0 0 13 -2 -1.75 0 0 0 0 13 -5 -9.38 0 0 0 0 13 -2 -2.75 0 0 0 0 13 -2 -0.75 0 1 -0.75 0 13 -5 -8.13 0 0 0 0 13 -9 -11.25 0 0 0 0 13 -8 -13 0 0 0 0 13 -9 -16.88 0 0 0 0 13 -2 -2 0 0 0 0 13 -12 -18 0 0 0 0 13 -8 -2 0 0 0 0 13 -2 -3 0 0 0 0 13 -6 -4.5 0 1 6 0 13 -5 0 2 0 2 0 13 -12 -19.5 0 0 0 0 13 -9 -15.75 0 0 0 0 13 -8 -6 0 0 0 0 13 -10 -2.5 0 1 -2.5 0 13 -9 -6.75 0 1 -6.75 0 13 -6 -6.75 0 0 0 0 13 -2 -3.75 0 0 0 0 13 -10 -5 0 0 0 0 13 -2 -2.25 0 0 0 0 13 -26 0 12 0 12 0 13 -12 -13.5 0 0 0 0 13 -8 -5 0 0 0 0 13 -6 -3 0 1 6 0 13 -10 -3.75 0 0 0 0 13 -12 -10.5 0 0 0 0 13 -4 -5 0 0 0 0 13 -9 -2.25 0 1 -2.25 0 13 -4 -3 0 0 0 0 13 -9 -10.13 0 0 0 0 13 -28 0 13 0 13 0 13 -22 0 10 0 10 0 13 -10 -10 0 0 0 0 13 -4 -1 0 1 -1 0 13 -4 -2.5 0 0 0 0 13 -12 -24 0 0 0 0 13 -8 -12 0 0 0 0 13 -3 0 1 0 1 0 13 -9 -12.38 0 0 0 0 13 -23 0 10 0 10 0 13 -4 -3.5 0 0 0 0 13 -4 -1.5 0 0 0 0 13 -8 -10 0 0 0 0 13 -8 -14 0 0 0 0 13 -4 -6 0 0 0 0 13 -25 0 10 0 10 0 13 -12 -16.5 0 0 0 0 13 -12 -12 0 0 0 0 13 -5 -2.5 0 0 0 0 13 -5 -8.75 0 0 0 0 13 -12 -4.5 0 0 0 0 13 -12 -15 0 0 0 0 13 -5 -3.13 0 0 0 0 13 -12 -21 0 0 0 0 13 -5 -4.38 0 0 0 0 13 -6 -11.25 0 0 0 0 13 -30 0 12 0 12 0 13 -6 -1.5 0 0 0 0 13 -12 0 6 0 6 0 13 -4 -2 0 0 0 0 13 -10 -15 0 0 0 0 13 -6 -2.25 0 1 6 0 13 -10 -20 0 0 0 0 13 -6 -5.25 0 0 0 0 13 -5 -6.25 0 0 0 0 13 -6 -8.25 0 0 0 0 13 -2 -1 0 1 2 0 14 -9 -13.5 0 0 0 0 14 -5 -6.88 0 1 5 0 14 -10 -10 0 1 10 0 14 -6 -2.25 0 1 6 0 14 -6 -6.75 0 0 0 0 14 -9 -4.5 0 1 -4.5 0 14 -10 -13.75 0 1 -13.75 0 14 -6 -8.25 0 0 0 0 14 -5 -10 0 0 0 0 14 -10 -6.25 0 1 10 0 14 -12 -3 0 1 -3 0 14 -12 -9 0 0 0 0 14 -8 -7 0 1 -7 0 14 -6 -12 0 1 6 0 14 -8 -2 0 1 -2 0 14 -12 -6 0 1 -6 0 14 -3 0 1 0 1 0 14 -10 -20 0 1 10 0 14 -5 -3.75 0 1 5 0 14 -2 -1.75 0 1 2 0 14 -6 -3.75 0 1 -3.75 0 14 -9 -12.38 0 0 0 0 14 -5 -6.25 0 0 0 0 14 -12 0 4 1 12 0 14 -2 -1.5 0 1 2 0 14 -6 -5.25 0 0 0 0 14 -10 -18.75 0 0 0 0 14 -6 -6 0 1 -6 0 14 -12 0 5 0 5 0 14 -4 -2 0 1 4 0 14 -2 -4 0 0 0 0 14 -5 -2.5 0 1 5 0 14 -2 -3.75 0 0 0 0 14 -9 -15.75 0 1 -15.75 0 14 -8 -4 0 1 8 0 14 -26 0 12 0 12 0 14 -6 -1.5 0 1 6 0 14 -4 -6 0 0 0 0 14 -10 -2.5 0 1 -2.5 0 14 -8 -12 0 0 0 0 14 -2 -3.5 0 0 0 0 14 -5 -5.63 0 1 -5.63 0 14 -12 -24 0 0 0 0 14 -25 0 10 1 0 0 14 -4 -6.5 0 0 0 0 14 -5 -9.38 0 0 0 0 14 -5 -7.5 0 0 0 0 14 -4 -4 0 0 0 0 14 -6 -10.5 0 1 -10.5 0 14 -13 0 6 0 6 0 14 -12 -22.5 0 0 0 0 14 -4 -7.5 0 0 0 0 14 -5 0 2 1 5 0 14 -10 -15 0 0 0 0 14 -9 -16.88 0 0 0 0 14 -2 -2.5 0 0 0 0 14 -10 -16.25 0 1 10 0 14 -6 -11.25 0 0 0 0 14 -4 -1.5 0 1 4 0 14 -5 -3.13 0 1 5 0 14 -6 -9 0 0 0 0 14 -12 -19.5 0 0 0 0 14 -10 -12.5 0 1 -12.5 0 14 -2 -3 0 0 0 0 14 -8 -16 0 1 -16 0 14 -4 0 2 1 4 0 14 -12 -7.5 0 1 -7.5 0 14 -12 -13.5 0 1 12 0 14 -22 0 10 0 10 0 14 -12 -21 0 0 0 0 14 -7 0 3 1 7 0 14 -10 -8.75 0 1 -8.75 0 14 -2 -1.25 0 0 0 0 14 -9 -6.75 0 1 -6.75 0 14 -12 0 6 1 12 0 14 -28 0 13 0 13 0 14 -9 -10.13 0 1 -10.13 0 14 -2 -0.5 0 1 2 0 14 -25 0 9 1 25 0 14 -6 -7.5 0 0 0 0 14 -4 -3 0 1 4 0 14 -10 -3.75 0 1 10 0 14 -12 -4.5 0 1 -4.5 0 14 -12 -15 0 1 -15 0 14 -6 -3 0 1 6 0 14 -9 -14.63 0 0 0 0 14 -5 -1.25 0 1 -1.25 0 14 -8 -11 0 0 0 0 14 -10 -17.5 0 0 0 0 14 -8 -10 0 0 0 0 14 -9 -9 0 1 -9 0 14 -10 -11.25 0 0 0 0 14 -12 -12 0 1 12 0 14 -8 -14 0 0 0 0 14 -12 -16.5 0 0 0 0 14 -4 -7 0 1 -7 0 14 -4 -1 0 1 -1 0 14 -5 -1.88 0 1 5 0 14 -8 0 3 1 0 0 14 -2 -3.25 0 0 0 0 14 -5 -5 0 1 -5 0 14 -26 0 10 1 0 0 14 -12 -10.5 0 1 12 0 14 -2 0 1 0 1 0 14 -6 -9.75 0 0 0 0 14 -8 -3 0 1 8 0 14 -13 0 5 0 5 0 14 -10 -7.5 0 1 -7.5 0 14 -8 -13 0 0 0 0 14 -9 -3.38 0 1 -3.38 0 14 -8 -15 0 0 0 0 14 -30 0 12 1 0 0 14 -8 -8 0 0 0 0 14 -8 -5 0 1 8 0 14 -12 -18 0 0 0 0 14 -10 -5 0 1 -5 0 14 -9 -11.25 0 0 0 0 14 -9 -7.88 0 1 -7.88 0 14 -8 -6 0 1 -6 0 14 -6 -4.5 0 1 6 0 14 -8 -9 0 1 -9 0 14 -4 -5.5 0 0 0 0 14 -4 -5 0 0 0 0 14 -9 -2.25 0 1 -2.25 0 14 -23 0 10 1 0 0 14 -9 -5.63 0 1 -5.63 0 14 -4 -4.5 0 1 -4.5 0 14 -4 -8 0 1 4 0 14 -19 0 8 0 8 0 14 -2 -2 0 1 2 0 14 -5 -8.13 0 1 5 0 14 -5 -4.38 0 1 -4.38 0 14 -2 -2.25 0 0 0 0 14 -2 -0.75 0 1 -0.75 0 14 -2 -2.75 0 0 0 0 14 -5 -8.75 0 0 0 0 14 -9 -18 0 0 0 0 14 -4 -3.5 0 1 4 0 14 -4 -2.5 0 1 -2.5 0 14 -9 -6.75 0 1 -6.75 0 15 -6 -6.75 0 1 -6.75 0 15 -6 -3 0 1 6 0 15 -2 -1.5 0 1 2 0 15 -4 -3 0 1 4 0 15 -5 -6.88 0 1 5 0 15 -12 -9 0 1 12 0 15 -4 -5 0 1 4 0 15 -5 -7.5 0 1 -7.5 0 15 -4 -4 0 1 -4 0 15 -9 -5.63 0 1 -5.63 0 15 -9 -14.63 0 0 0 0 15 -5 -9.38 0 1 5 0 15 -6 -4.5 0 1 6 0 15 -8 -7 0 1 -7 0 15 -10 -16.25 0 0 0 0 15 -10 -17.5 0 0 0 0 15 -9 -16.88 0 0 0 0 15 -8 -5 0 1 8 0 15 -6 -1.5 0 1 6 0 15 -12 -18 0 1 -18 0 15 -5 -6.25 0 1 5 0 15 -8 -4 0 1 8 0 15 -9 -15.75 0 0 0 0 15 -9 -13.5 0 1 9 0 15 -5 -8.13 0 1 5 0 15 -2 0 1 0 1 0 15 -2 -3.75 0 1 -3.75 0 15 -4 -6.5 0 1 4 0 15 -10 -5 0 1 -5 0 15 -12 -22.5 0 0 0 0 15 -2 -1 0 1 2 0 15 -13 0 6 0 6 0 15 -5 -2.5 0 1 5 0 15 -2 -0.5 0 1 2 0 15 -2 -3.25 0 1 -3.25 0 15 -30 0 12 1 0 0 15 -8 -8 0 1 8 0 15 -4 -5.5 0 1 -5.5 0 15 -23 0 10 1 0 0 15 -4 -3.5 0 1 4 0 15 -5 0 2 1 5 0 15 -8 0 3 1 0 0 15 -9 -10.13 0 0 0 0 15 -8 -16 0 0 0 0 15 -12 -24 0 0 0 0 15 -9 -3.38 0 1 -3.38 0 15 -6 -5.25 0 1 6 0 15 -2 -4 0 0 0 0 15 -4 -1 0 1 -1 0 15 -6 -11.25 0 0 0 0 15 -5 -4.38 0 1 -4.38 0 15 -6 -2.25 0 1 6 0 15 -12 -10.5 0 0 0 0 15 -9 -18 0 0 0 0 15 -10 -20 0 0 0 0 15 -4 -4.5 0 1 -4.5 0 15 -9 -2.25 0 1 -2.25 0 15 -4 -6 0 1 4 0 15 -8 -10 0 1 -10 0 15 -5 -5 0 1 -5 0 15 -5 -8.75 0 1 5 0 15 -8 -6 0 1 -6 0 15 -10 -13.75 0 0 0 0 15 -2 -2.5 0 1 2 0 15 -8 -11 0 0 0 0 15 -4 -2 0 1 4 0 15 -10 -7.5 0 1 -7.5 0 15 -22 0 10 1 22 0 15 -25 0 10 0 10 0 15 -6 -9.75 0 0 0 0 15 -12 0 5 1 12 0 15 -4 -2.5 0 1 -2.5 0 15 -8 -3 0 1 8 0 15 -10 -11.25 0 0 0 0 15 -5 -10 0 0 0 0 15 -10 -15 0 0 0 0 15 -2 -3.5 0 1 -3.5 0 15 -12 0 4 1 12 0 15 -13 0 5 1 13 0 15 -5 -3.75 0 1 5 0 15 -26 0 12 0 12 0 15 -5 -5.63 0 1 -5.63 0 15 -8 -2 0 1 -2 0 15 -2 -3 0 1 -3 0 15 -6 -9 0 0 0 0 15 -9 -7.88 0 1 -7.88 0 15 -8 -14 0 1 8 0 15 -28 0 13 1 28 0 15 -9 -12.38 0 0 0 0 15 -8 -15 0 0 0 0 15 -10 -2.5 0 1 -2.5 0 15 -4 0 2 0 2 0 15 -12 -6 0 1 -6 0 15 -12 -16.5 0 0 0 0 15 -4 -7.5 0 1 -7.5 0 15 -10 -8.75 0 1 -8.75 0 15 -10 -18.75 0 1 10 0 15 -26 0 10 1 0 0 15 -12 -21 0 0 0 0 15 -2 -0.75 0 1 -0.75 0 15 -9 -9 0 1 -9 0 15 -10 -6.25 0 1 10 0 15 -8 -12 0 0 0 0 15 -3 0 1 1 0 0 15 -5 -1.88 0 1 5 0 15 -6 -7.5 0 1 -7.5 0 15 -12 -13.5 0 1 12 0 15 -4 -7 0 0 0 0 15 -6 -8.25 0 0 0 0 15 -6 -12 0 0 0 0 15 -6 -10.5 0 1 -10.5 0 15 -4 -8 0 1 4 0 15 -6 -6 0 1 -6 0 15 -12 0 6 0 6 0 15 -12 -19.5 0 0 0 0 15 -19 0 8 1 19 0 15 -12 -15 0 0 0 0 15 -2 -1.75 0 1 2 0 15 -6 -3.75 0 1 -3.75 0 15 -2 -1.25 0 1 2 0 15 -5 -1.25 0 1 -1.25 0 15 -4 -1.5 0 1 4 0 15 -8 -13 0 0 0 0 15 -12 -7.5 0 1 -7.5 0 15 -12 -3 0 1 -3 0 15 -2 -2.75 0 1 2 0 15 -7 0 3 1 7 0 15 -25 0 9 1 25 0 15 -2 -2 0 1 2 0 15 -12 -4.5 0 1 -4.5 0 15 -12 -12 0 1 12 0 15 -5 -3.13 0 1 5 0 15 -9 -11.25 0 0 0 0 15 -8 -9 0 1 -9 0 15 -2 -2.25 0 1 2 0 15 -9 -4.5 0 1 -4.5 0 15 -10 -3.75 0 1 10 0 15 -10 -10 0 0 0 0 15 -10 -12.5 0 0 0 0 15 -2 -2.5 0 1 2 0 16 -5 -5.63 0 0 0 0 16 -6 -7.5 0 0 0 0 16 -26 0 10 1 0 0 16 -9 -4.5 0 1 -4.5 0 16 -2 -1.25 0 1 2 0 16 -8 -3 0 1 8 0 16 -25 0 9 1 25 0 16 -4 -4.5 0 0 0 0 16 -5 -10 0 0 0 0 16 -6 -9 0 0 0 0 16 -10 -6.25 0 1 10 0 16 -4 -4 0 0 0 0 16 -12 -3 0 1 -3 0 16 -5 -5 0 0 0 0 16 -12 0 5 1 12 0 16 -6 -9.75 0 0 0 0 16 -19 0 8 1 19 0 16 -4 -7.5 0 0 0 0 16 -12 -9 0 0 0 0 16 -4 -6.5 0 0 0 0 16 -9 -5.63 0 1 -5.63 0 16 -9 -18 0 1 -18 0 16 -10 -11.25 0 1 -11.25 0 16 -10 -13.75 0 1 -13.75 0 16 -6 -12 0 0 0 0 16 -10 -12.5 0 0 0 0 16 -4 -7 0 0 0 0 16 -10 -7.5 0 1 -7.5 0 16 -4 -8 0 0 0 0 16 -8 -11 0 1 -11 0 16 -12 0 4 1 12 0 16 -9 -3.38 0 1 -3.38 0 16 -10 -18.75 0 0 0 0 16 -2 -3.5 0 0 0 0 16 -2 -1 0 1 2 0 16 -2 -3.25 0 1 -3.25 0 16 -2 0 1 1 0 0 16 -7 0 3 1 7 0 16 -8 0 3 0 3 0 16 -12 -6 0 1 -6 0 16 -2 -0.5 0 1 2 0 16 -9 -7.88 0 0 0 0 16 -8 -15 0 0 0 0 16 -2 -1.5 0 1 2 0 16 -12 -22.5 0 0 0 0 16 -8 -7 0 1 -7 0 16 -4 -5.5 0 1 -5.5 0 16 -10 -8.75 0 1 -8.75 0 16 -8 -9 0 0 0 0 16 -2 -4 0 0 0 0 16 -4 0 2 1 4 0 16 -8 -8 0 0 0 0 16 -9 -13.5 0 0 0 0 16 -9 -9 0 0 0 0 16 -6 -3.75 0 1 -3.75 0 16 -13 0 6 1 13 0 16 -5 -1.88 0 1 5 0 16 -6 -6 0 0 0 0 16 -5 -6.88 0 0 0 0 16 -8 -16 0 0 0 0 16 -12 -7.5 0 1 -7.5 0 16 -5 -1.25 0 1 -1.25 0 16 -9 -14.63 0 0 0 0 16 -8 -4 0 1 8 0 16 -10 -17.5 0 0 0 0 16 -5 -3.75 0 1 5 0 16 -6 -10.5 0 0 0 0 16 -13 0 5 1 13 0 16 -10 -16.25 0 0 0 0 16 -5 -7.5 0 1 -7.5 0 16 -2 -1.75 0 1 2 0 16 -5 -9.38 0 0 0 0 16 -2 -2.75 0 1 2 0 16 -2 -0.75 0 1 -0.75 0 16 -5 -8.13 0 1 5 0 16 -9 -11.25 0 1 9 0 16 -8 -13 0 0 0 0 16 -9 -16.88 0 1 9 0 16 -2 -2 0 1 2 0 16 -12 -18 0 1 -18 0 16 -8 -2 0 1 -2 0 16 -2 -3 0 0 0 0 16 -6 -4.5 0 1 6 0 16 -5 0 2 1 5 0 16 -12 -19.5 0 1 12 0 16 -9 -15.75 0 1 -15.75 0 16 -8 -6 0 1 -6 0 16 -10 -2.5 0 1 -2.5 0 16 -9 -6.75 0 1 -6.75 0 16 -6 -6.75 0 0 0 0 16 -2 -3.75 0 1 -3.75 0 16 -10 -5 0 1 -5 0 16 -2 -2.25 0 1 2 0 16 -26 0 12 1 26 0 16 -12 -13.5 0 1 12 0 16 -8 -5 0 1 8 0 16 -6 -3 0 1 6 0 16 -10 -3.75 0 1 10 0 16 -12 -10.5 0 1 12 0 16 -4 -5 0 1 4 0 16 -9 -2.25 0 1 -2.25 0 16 -4 -3 0 1 4 0 16 -9 -10.13 0 1 -10.13 0 16 -28 0 13 1 28 0 16 -22 0 10 0 10 0 16 -10 -10 0 0 0 0 16 -4 -1 0 1 -1 0 16 -4 -2.5 0 1 -2.5 0 16 -12 -24 0 0 0 0 16 -8 -12 0 0 0 0 16 -3 0 1 1 0 0 16 -9 -12.38 0 0 0 0 16 -23 0 10 1 0 0 16 -4 -3.5 0 1 4 0 16 -4 -1.5 0 1 4 0 16 -8 -10 0 1 -10 0 16 -8 -14 0 0 0 0 16 -4 -6 0 1 4 0 16 -25 0 10 1 0 0 16 -12 -16.5 0 0 0 0 16 -12 -12 0 1 12 0 16 -5 -2.5 0 1 5 0 16 -5 -8.75 0 1 5 0 16 -12 -4.5 0 1 -4.5 0 16 -12 -15 0 1 -15 0 16 -5 -3.13 0 1 5 0 16 -12 -21 0 0 0 0 16 -5 -4.38 0 1 -4.38 0 16 -6 -11.25 0 0 0 0 16 -30 0 12 1 0 0 16 -6 -1.5 0 1 6 0 16 -12 0 6 1 12 0 16 -4 -2 0 1 4 0 16 -10 -15 0 0 0 0 16 -6 -2.25 0 1 6 0 16 -10 -20 0 0 0 0 16 -6 -5.25 0 1 6 0 16 -5 -6.25 0 1 5 0 16 -6 -8.25 0 1 -8.25 0 16 -4 -4.5 0 0 0 0 17 -10 -12.5 0 0 0 0 17 -26 0 12 0 12 0 17 -6 -7.5 0 0 0 0 17 -4 -6.5 0 0 0 0 17 -12 -4.5 0 0 0 0 17 -5 -2.5 0 0 0 0 17 -6 -12 0 0 0 0 17 -9 -14.63 0 0 0 0 17 -6 -6 0 0 0 0 17 -22 0 10 1 22 0 17 -2 -1 0 0 0 0 17 -8 -3 0 0 0 0 17 -12 -9 0 0 0 0 17 -5 -3.75 0 0 0 0 17 -6 -3 0 0 0 0 17 -4 0 2 1 4 0 17 -28 0 13 1 28 0 17 -12 -15 0 0 0 0 17 -9 -11.25 0 0 0 0 17 -12 -10.5 0 0 0 0 17 -5 -1.88 0 0 0 0 17 -2 -2.75 0 0 0 0 17 -4 -7 0 0 0 0 17 -8 -4 0 0 0 0 17 -2 0 1 1 0 0 17 -2 -3.5 0 0 0 0 17 -2 -1.75 0 0 0 0 17 -5 -5 0 0 0 0 17 -12 -12 0 0 0 0 17 -12 0 6 1 12 0 17 -6 -4.5 0 0 0 0 17 -30 0 12 1 0 0 17 -12 -16.5 0 0 0 0 17 -6 -9.75 0 0 0 0 17 -12 -22.5 0 0 0 0 17 -6 -9 0 0 0 0 17 -5 -3.13 0 0 0 0 17 -5 -9.38 0 0 0 0 17 -12 -7.5 0 1 -7.5 0 17 -5 0 2 1 5 0 17 -10 -15 0 0 0 0 17 -12 -3 0 0 0 0 17 -13 0 6 1 13 0 17 -9 -16.88 0 0 0 0 17 -6 -11.25 0 0 0 0 17 -8 -5 0 0 0 0 17 -8 -14 0 0 0 0 17 -12 -24 0 0 0 0 17 -12 0 5 1 12 0 17 -9 -13.5 0 0 0 0 17 -6 -1.5 0 1 6 0 17 -2 -3 0 0 0 0 17 -10 -2.5 0 1 -2.5 0 17 -2 -0.75 0 0 0 0 17 -6 -10.5 0 0 0 0 17 -2 -0.5 0 0 0 0 17 -10 -10 0 0 0 0 17 -8 -10 0 0 0 0 17 -9 -12.38 0 0 0 0 17 -4 -6 0 0 0 0 17 -6 -2.25 0 0 0 0 17 -9 -15.75 0 0 0 0 17 -12 -13.5 0 0 0 0 17 -8 -6 0 0 0 0 17 -10 -18.75 0 0 0 0 17 -4 -2 0 0 0 0 17 -5 -1.25 0 1 -1.25 0 17 -6 -5.25 0 0 0 0 17 -4 -8 0 0 0 0 17 -25 0 9 1 25 0 17 -2 -3.25 0 0 0 0 17 -10 -11.25 0 0 0 0 17 -4 -7.5 0 0 0 0 17 -9 -5.63 0 0 0 0 17 -6 -6.75 0 0 0 0 17 -8 -2 0 0 0 0 17 -5 -6.25 0 0 0 0 17 -23 0 10 1 0 0 17 -8 -13 0 0 0 0 17 -10 -13.75 0 0 0 0 17 -5 -10 0 0 0 0 17 -12 0 4 1 12 0 17 -2 -2.5 0 0 0 0 17 -19 0 8 1 19 0 17 -4 -4 0 0 0 0 17 -4 -1 0 1 -1 0 17 -4 -2.5 0 0 0 0 17 -5 -8.13 0 0 0 0 17 -10 -3.75 0 0 0 0 17 -5 -8.75 0 0 0 0 17 -10 -7.5 0 0 0 0 17 -10 -5 0 0 0 0 17 -10 -20 0 0 0 0 17 -13 0 5 1 13 0 17 -8 -9 0 0 0 0 17 -8 -12 0 0 0 0 17 -10 -16.25 0 0 0 0 17 -5 -6.88 0 0 0 0 17 -4 -5.5 0 0 0 0 17 -5 -7.5 0 0 0 0 17 -9 -10.13 0 0 0 0 17 -6 -8.25 0 0 0 0 17 -26 0 10 1 0 0 17 -4 -5 0 0 0 0 17 -2 -2.25 0 0 0 0 17 -6 -3.75 0 0 0 0 17 -8 -8 0 0 0 0 17 -9 -6.75 0 0 0 0 17 -8 -15 0 0 0 0 17 -12 -6 0 0 0 0 17 -25 0 10 1 0 0 17 -12 -19.5 0 0 0 0 17 -9 -7.88 0 0 0 0 17 -4 -1.5 0 0 0 0 17 -8 -7 0 0 0 0 17 -12 -18 0 0 0 0 17 -2 -2 0 0 0 0 17 -9 -18 0 0 0 0 17 -2 -1.25 0 0 0 0 17 -8 -16 0 0 0 0 17 -5 -4.38 0 0 0 0 17 -2 -4 0 0 0 0 17 -5 -5.63 0 0 0 0 17 -8 0 3 1 0 0 17 -10 -17.5 0 0 0 0 17 -8 -11 0 0 0 0 17 -2 -1.5 0 0 0 0 17 -4 -3.5 0 0 0 0 17 -2 -3.75 0 0 0 0 17 -3 0 1 1 0 0 17 -12 -21 0 0 0 0 17 -10 -8.75 0 0 0 0 17 -9 -9 0 0 0 0 17 -4 -3 0 0 0 0 17 -7 0 3 1 7 0 17 -9 -3.38 0 0 0 0 17 -9 -2.25 0 0 0 0 17 -10 -6.25 0 0 0 0 17 -9 -4.5 0 0 0 0 17 -2 -1 0 1 2 0 18 -9 -13.5 0 0 0 0 18 -5 -6.88 0 1 5 0 18 -10 -10 0 0 0 0 18 -6 -2.25 0 1 6 0 18 -6 -6.75 0 0 0 0 18 -9 -4.5 0 1 -4.5 0 18 -10 -13.75 0 1 -13.75 0 18 -6 -8.25 0 1 -8.25 0 18 -5 -10 0 0 0 0 18 -10 -6.25 0 1 10 0 18 -12 -3 0 1 -3 0 18 -12 -9 0 1 12 0 18 -8 -7 0 0 0 0 18 -6 -12 0 0 0 0 18 -8 -2 0 1 -2 0 18 -12 -6 0 1 -6 0 18 -3 0 1 1 0 0 18 -10 -20 0 0 0 0 18 -5 -3.75 0 1 5 0 18 -2 -1.75 0 1 2 0 18 -6 -3.75 0 1 -3.75 0 18 -9 -12.38 0 0 0 0 18 -5 -6.25 0 0 0 0 18 -12 0 4 0 4 0 18 -2 -1.5 0 1 2 0 18 -6 -5.25 0 1 6 0 18 -10 -18.75 0 0 0 0 18 -6 -6 0 0 0 0 18 -12 0 5 0 5 0 18 -4 -2 0 1 4 0 18 -2 -4 0 0 0 0 18 -5 -2.5 0 1 5 0 18 -2 -3.75 0 0 0 0 18 -9 -15.75 0 0 0 0 18 -8 -4 0 1 8 0 18 -26 0 12 0 12 0 18 -6 -1.5 0 1 6 0 18 -4 -6 0 0 0 0 18 -10 -2.5 0 1 -2.5 0 18 -8 -12 0 0 0 0 18 -2 -3.5 0 1 -3.5 0 18 -5 -5.63 0 0 0 0 18 -12 -24 0 0 0 0 18 -25 0 10 0 10 0 18 -4 -6.5 0 0 0 0 18 -5 -9.38 0 0 0 0 18 -5 -7.5 0 0 0 0 18 -4 -4 0 0 0 0 18 -6 -10.5 0 0 0 0 18 -13 0 6 0 6 0 18 -12 -22.5 0 0 0 0 18 -4 -7.5 0 0 0 0 18 -5 0 2 1 5 0 18 -10 -15 0 0 0 0 18 -9 -16.88 0 0 0 0 18 -2 -2.5 0 1 2 0 18 -10 -16.25 0 0 0 0 18 -6 -11.25 0 0 0 0 18 -4 -1.5 0 1 4 0 18 -5 -3.13 0 1 5 0 18 -6 -9 0 0 0 0 18 -12 -19.5 0 0 0 0 18 -10 -12.5 0 0 0 0 18 -8 -16 0 0 0 0 18 -4 0 2 1 4 0 18 -12 -7.5 0 1 -7.5 0 18 -12 -13.5 0 0 0 0 18 -22 0 10 0 10 0 18 -12 -21 0 0 0 0 18 -7 0 3 1 7 0 18 -10 -8.75 0 1 -8.75 0 18 -2 -1.25 0 1 2 0 18 -9 -6.75 0 1 -6.75 0 18 -12 0 6 0 6 0 18 -28 0 13 0 13 0 18 -9 -10.13 0 0 0 0 18 -2 -0.5 0 1 2 0 18 -25 0 9 1 25 0 18 -6 -7.5 0 0 0 0 18 -4 -3 0 1 4 0 18 -10 -3.75 0 1 10 0 18 -12 -4.5 0 1 -4.5 0 18 -12 -15 0 0 0 0 18 -6 -3 0 1 6 0 18 -9 -14.63 0 0 0 0 18 -5 -1.25 0 1 -1.25 0 18 -8 -11 0 0 0 0 18 -10 -17.5 0 0 0 0 18 -8 -10 0 0 0 0 18 -9 -9 0 1 -9 0 18 -10 -11.25 0 0 0 0 18 -12 -12 0 1 12 0 18 -8 -14 0 0 0 0 18 -12 -16.5 0 0 0 0 18 -4 -7 0 0 0 0 18 -4 -1 0 1 -1 0 18 -5 -1.88 0 1 5 0 18 -8 0 3 1 0 0 18 -2 -3.25 0 0 0 0 18 -5 -5 0 0 0 0 18 -26 0 10 0 10 0 18 -12 -10.5 0 1 12 0 18 -2 0 1 1 0 0 18 -6 -9.75 0 0 0 0 18 -8 -3 0 1 8 0 18 -13 0 5 0 5 0 18 -10 -7.5 0 0 0 0 18 -8 -13 0 0 0 0 18 -9 -3.38 0 1 -3.38 0 18 -8 -15 0 0 0 0 18 -30 0 12 1 0 0 18 -8 -8 0 0 0 0 18 -8 -5 0 1 8 0 18 -12 -18 0 0 0 0 18 -10 -5 0 1 -5 0 18 -9 -11.25 0 0 0 0 18 -9 -7.88 0 1 -7.88 0 18 -8 -6 0 1 -6 0 18 -6 -4.5 0 1 6 0 18 -8 -9 0 0 0 0 18 -4 -5.5 0 0 0 0 18 -4 -5 0 0 0 0 18 -9 -2.25 0 1 -2.25 0 18 -23 0 10 0 10 0 18 -9 -5.63 0 1 -5.63 0 18 -4 -4.5 0 0 0 0 18 -4 -8 0 0 0 0 18 -19 0 8 0 8 0 18 -2 -2 0 0 0 0 18 -5 -8.13 0 0 0 0 18 -5 -4.38 0 1 -4.38 0 18 -2 -2.25 0 1 2 0 18 -2 -0.75 0 1 -0.75 0 18 -2 -2.75 0 0 0 0 18 -5 -8.75 0 0 0 0 18 -9 -18 0 0 0 0 18 -4 -3.5 0 0 0 0 18 -4 -2.5 0 1 -2.5 0 18 -9 -6.75 0 1 -6.75 0 19 -6 -6.75 0 0 0 0 19 -6 -3 0 1 6 0 19 -2 -1.5 0 0 0 0 19 -4 -3 0 0 0 0 19 -5 -6.88 0 0 0 0 19 -12 -9 0 1 12 0 19 -4 -5 0 0 0 0 19 -5 -7.5 0 0 0 0 19 -4 -4 0 1 -4 0 19 -9 -5.63 0 1 -5.63 0 19 -9 -14.63 0 0 0 0 19 -5 -9.38 0 0 0 0 19 -6 -4.5 0 1 6 0 19 -8 -7 0 1 -7 0 19 -10 -16.25 0 0 0 0 19 -10 -17.5 0 0 0 0 19 -9 -16.88 0 0 0 0 19 -8 -5 0 1 8 0 19 -6 -1.5 0 1 6 0 19 -12 -18 0 0 0 0 19 -5 -6.25 0 0 0 0 19 -8 -4 0 1 8 0 19 -9 -15.75 0 0 0 0 19 -9 -13.5 0 0 0 0 19 -5 -8.13 0 0 0 0 19 -2 0 1 0 1 0 19 -2 -3.75 0 0 0 0 19 -4 -6.5 0 0 0 0 19 -10 -5 0 1 -5 0 19 -12 -22.5 0 0 0 0 19 -2 -1 0 1 2 0 19 -13 0 6 1 13 0 19 -5 -2.5 0 1 5 0 19 -2 -0.5 0 1 2 0 19 -2 -3.25 0 0 0 0 19 -30 0 12 1 0 0 19 -8 -8 0 0 0 0 19 -4 -5.5 0 0 0 0 19 -23 0 10 1 0 0 19 -4 -3.5 0 0 0 0 19 -5 0 2 1 5 0 19 -8 0 3 1 0 0 19 -9 -10.13 0 0 0 0 19 -8 -16 0 0 0 0 19 -12 -24 0 0 0 0 19 -9 -3.38 0 1 -3.38 0 19 -6 -5.25 0 0 0 0 19 -2 -4 0 0 0 0 19 -4 -1 0 1 -1 0 19 -6 -11.25 0 0 0 0 19 -5 -4.38 0 0 0 0 19 -6 -2.25 0 1 6 0 19 -12 -10.5 0 1 12 0 19 -9 -18 0 0 0 0 19 -10 -20 0 0 0 0 19 -4 -4.5 0 0 0 0 19 -9 -2.25 0 1 -2.25 0 19 -4 -6 0 0 0 0 19 -8 -10 0 1 -10 0 19 -5 -5 0 0 0 0 19 -5 -8.75 0 0 0 0 19 -8 -6 0 1 -6 0 19 -10 -13.75 0 0 0 0 19 -2 -2.5 0 0 0 0 19 -8 -11 0 0 0 0 19 -4 -2 0 1 4 0 19 -10 -7.5 0 1 -7.5 0 19 -22 0 10 1 22 0 19 -25 0 10 1 0 0 19 -6 -9.75 0 0 0 0 19 -12 0 5 1 12 0 19 -4 -2.5 0 0 0 0 19 -8 -3 0 1 8 0 19 -10 -11.25 0 0 0 0 19 -5 -10 0 0 0 0 19 -10 -15 0 0 0 0 19 -2 -3.5 0 0 0 0 19 -12 0 4 1 12 0 19 -13 0 5 1 13 0 19 -5 -3.75 0 0 0 0 19 -26 0 12 1 26 0 19 -5 -5.63 0 0 0 0 19 -8 -2 0 1 -2 0 19 -2 -3 0 0 0 0 19 -6 -9 0 0 0 0 19 -9 -7.88 0 0 0 0 19 -8 -14 0 0 0 0 19 -28 0 13 1 28 0 19 -9 -12.38 0 0 0 0 19 -8 -15 0 0 0 0 19 -10 -2.5 0 1 -2.5 0 19 -4 0 2 1 4 0 19 -12 -6 0 1 -6 0 19 -12 -16.5 0 0 0 0 19 -4 -7.5 0 0 0 0 19 -10 -8.75 0 0 0 0 19 -10 -18.75 0 0 0 0 19 -26 0 10 1 0 0 19 -12 -21 0 0 0 0 19 -2 -0.75 0 1 -0.75 0 19 -9 -9 0 0 0 0 19 -10 -6.25 0 1 10 0 19 -8 -12 0 0 0 0 19 -3 0 1 1 0 0 19 -5 -1.88 0 1 5 0 19 -6 -7.5 0 0 0 0 19 -12 -13.5 0 0 0 0 19 -4 -7 0 0 0 0 19 -6 -8.25 0 0 0 0 19 -6 -12 0 0 0 0 19 -6 -10.5 0 0 0 0 19 -4 -8 0 0 0 0 19 -6 -6 0 0 0 0 19 -12 0 6 0 6 0 19 -12 -19.5 0 0 0 0 19 -19 0 8 1 19 0 19 -12 -15 0 0 0 0 19 -2 -1.75 0 1 2 0 19 -6 -3.75 0 0 0 0 19 -2 -1.25 0 0 0 0 19 -5 -1.25 0 1 -1.25 0 19 -4 -1.5 0 1 4 0 19 -8 -13 0 0 0 0 19 -12 -7.5 0 1 -7.5 0 19 -12 -3 0 1 -3 0 19 -2 -2.75 0 0 0 0 19 -7 0 3 1 7 0 19 -25 0 9 1 25 0 19 -2 -2 0 0 0 0 19 -12 -4.5 0 1 -4.5 0 19 -12 -12 0 0 0 0 19 -5 -3.13 0 1 5 0 19 -9 -11.25 0 0 0 0 19 -8 -9 0 0 0 0 19 -2 -2.25 0 0 0 0 19 -9 -4.5 0 1 -4.5 0 19 -10 -3.75 0 1 10 0 19 -10 -10 0 0 0 0 19 -10 -12.5 0 0 0 0 19 -2 -2.5 0 1 2 0 20 -5 -5.63 0 1 -5.63 0 20 -6 -7.5 0 0 0 0 20 -26 0 10 0 10 0 20 -9 -4.5 0 1 -4.5 0 20 -2 -1.25 0 1 2 0 20 -8 -3 0 1 8 0 20 -25 0 9 0 9 0 20 -4 -4.5 0 1 -4.5 0 20 -5 -10 0 0 0 0 20 -6 -9 0 1 -9 0 20 -10 -6.25 0 1 10 0 20 -4 -4 0 1 -4 0 20 -12 -3 0 1 -3 0 20 -5 -5 0 0 0 0 20 -12 0 5 1 12 0 20 -6 -9.75 0 0 0 0 20 -19 0 8 0 8 0 20 -4 -7.5 0 0 0 0 20 -12 -9 0 1 12 0 20 -4 -6.5 0 0 0 0 20 -9 -5.63 0 1 -5.63 0 20 -9 -18 0 1 -18 0 20 -10 -11.25 0 1 -11.25 0 20 -10 -13.75 0 0 0 0 20 -6 -12 0 0 0 0 20 -10 -12.5 0 1 -12.5 0 20 -4 -7 0 0 0 0 20 -10 -7.5 0 1 -7.5 0 20 -4 -8 0 0 0 0 20 -8 -11 0 1 -11 0 20 -12 0 4 0 4 0 20 -9 -3.38 0 1 -3.38 0 20 -10 -18.75 0 0 0 0 20 -2 -3.5 0 1 -3.5 0 20 -2 -1 0 1 2 0 20 -2 -3.25 0 0 0 0 20 -2 0 1 0 1 0 20 -7 0 3 1 7 0 20 -8 0 3 0 3 0 20 -12 -6 0 1 -6 0 20 -2 -0.5 0 1 2 0 20 -9 -7.88 0 0 0 0 20 -8 -15 0 0 0 0 20 -2 -1.5 0 0 0 0 20 -12 -22.5 0 1 -22.5 0 20 -8 -7 0 1 -7 0 20 -4 -5.5 0 0 0 0 20 -10 -8.75 0 0 0 0 20 -8 -9 0 1 -9 0 20 -2 -4 0 0 0 0 20 -4 0 2 0 2 0 20 -8 -8 0 1 8 0 20 -9 -13.5 0 0 0 0 20 -9 -9 0 0 0 0 20 -6 -3.75 0 1 -3.75 0 20 -13 0 6 0 6 0 20 -5 -1.88 0 1 5 0 20 -6 -6 0 1 -6 0 20 -5 -6.88 0 0 0 0 20 -8 -16 0 0 0 0 20 -12 -7.5 0 1 -7.5 0 20 -5 -1.25 0 1 -1.25 0 20 -9 -14.63 0 0 0 0 20 -8 -4 0 1 8 0 20 -10 -17.5 0 0 0 0 20 -5 -3.75 0 0 0 0 20 -6 -10.5 0 0 0 0 20 -13 0 5 0 5 0 20 -10 -16.25 0 0 0 0 20 -5 -7.5 0 0 0 0 20 -2 -1.75 0 0 0 0 20 -5 -9.38 0 0 0 0 20 -2 -2.75 0 1 2 0 20 -2 -0.75 0 1 -0.75 0 20 -5 -8.13 0 0 0 0 20 -9 -11.25 0 0 0 0 20 -8 -13 0 0 0 0 20 -9 -16.88 0 0 0 0 20 -2 -2 0 1 2 0 20 -12 -18 0 0 0 0 20 -8 -2 0 1 -2 0 20 -2 -3 0 1 -3 0 20 -6 -4.5 0 1 6 0 20 -5 0 2 0 2 0 20 -12 -19.5 0 0 0 0 20 -9 -15.75 0 0 0 0 20 -8 -6 0 1 -6 0 20 -10 -2.5 0 1 -2.5 0 20 -9 -6.75 0 0 0 0 20 -6 -6.75 0 0 0 0 20 -2 -3.75 0 0 0 0 20 -10 -5 0 1 -5 0 20 -2 -2.25 0 1 2 0 20 -26 0 12 0 12 0 20 -12 -13.5 0 0 0 0 20 -8 -5 0 0 0 0 20 -6 -3 0 1 6 0 20 -10 -3.75 0 1 10 0 20 -12 -10.5 0 0 0 0 20 -4 -5 0 0 0 0 20 -9 -2.25 0 1 -2.25 0 20 -4 -3 0 0 0 0 20 -9 -10.13 0 0 0 0 20 -28 0 13 0 13 0 20 -22 0 10 1 22 0 20 -10 -10 0 0 0 0 20 -4 -1 0 1 -1 0 20 -4 -2.5 0 1 -2.5 0 20 -12 -24 0 1 -24 0 20 -8 -12 0 1 -12 0 20 -3 0 1 0 1 0 20 -9 -12.38 0 0 0 0 20 -23 0 10 0 10 0 20 -4 -3.5 0 0 0 0 20 -4 -1.5 0 1 4 0 20 -8 -10 0 0 0 0 20 -8 -14 0 0 0 0 20 -4 -6 0 0 0 0 20 -25 0 10 0 10 0 20 -12 -16.5 0 0 0 0 20 -12 -12 0 1 12 0 20 -5 -2.5 0 0 0 0 20 -5 -8.75 0 0 0 0 20 -12 -4.5 0 1 -4.5 0 20 -12 -15 0 0 0 0 20 -5 -3.13 0 0 0 0 20 -12 -21 0 0 0 0 20 -5 -4.38 0 0 0 0 20 -6 -11.25 0 0 0 0 20 -30 0 12 0 12 0 20 -6 -1.5 0 1 6 0 20 -12 0 6 0 6 0 20 -4 -2 0 1 4 0 20 -10 -15 0 0 0 0 20 -6 -2.25 0 1 6 0 20 -10 -20 0 0 0 0 20 -6 -5.25 0 0 0 0 20 -5 -6.25 0 0 0 0 20 -6 -8.25 0 0 0 0 20 -4 -4.5 0 0 0 0 21 -10 -12.5 0 0 0 0 21 -26 0 12 1 26 0 21 -6 -7.5 0 0 0 0 21 -4 -6.5 0 0 0 0 21 -12 -4.5 0 1 -4.5 0 21 -5 -2.5 0 1 5 0 21 -6 -12 0 0 0 0 21 -9 -14.63 0 0 0 0 21 -6 -6 0 0 0 0 21 -22 0 10 1 22 0 21 -2 -1 0 1 2 0 21 -8 -3 0 1 8 0 21 -12 -9 0 0 0 0 21 -5 -3.75 0 0 0 0 21 -6 -3 0 1 6 0 21 -4 0 2 1 4 0 21 -28 0 13 1 28 0 21 -12 -15 0 0 0 0 21 -9 -11.25 0 0 0 0 21 -12 -10.5 0 0 0 0 21 -5 -1.88 0 1 5 0 21 -2 -2.75 0 0 0 0 21 -4 -7 0 0 0 0 21 -8 -4 0 1 8 0 21 -2 0 1 0 1 0 21 -2 -3.5 0 0 0 0 21 -2 -1.75 0 0 0 0 21 -5 -5 0 0 0 0 21 -12 -12 0 0 0 0 21 -12 0 6 1 12 0 21 -6 -4.5 0 0 0 0 21 -30 0 12 1 0 0 21 -12 -16.5 0 0 0 0 21 -6 -9.75 0 0 0 0 21 -12 -22.5 0 0 0 0 21 -6 -9 0 0 0 0 21 -5 -3.13 0 0 0 0 21 -5 -9.38 0 0 0 0 21 -12 -7.5 0 0 0 0 21 -5 0 2 1 5 0 21 -10 -15 0 0 0 0 21 -12 -3 0 1 -3 0 21 -13 0 6 1 13 0 21 -9 -16.88 0 0 0 0 21 -6 -11.25 0 0 0 0 21 -8 -5 0 0 0 0 21 -8 -14 0 0 0 0 21 -12 -24 0 0 0 0 21 -12 0 5 1 12 0 21 -9 -13.5 0 0 0 0 21 -6 -1.5 0 1 6 0 21 -2 -3 0 0 0 0 21 -10 -2.5 0 1 -2.5 0 21 -2 -0.75 0 1 -0.75 0 21 -6 -10.5 0 0 0 0 21 -2 -0.5 0 1 2 0 21 -10 -10 0 0 0 0 21 -8 -10 0 0 0 0 21 -9 -12.38 0 0 0 0 21 -4 -6 0 0 0 0 21 -6 -2.25 0 1 6 0 21 -9 -15.75 0 0 0 0 21 -12 -13.5 0 0 0 0 21 -8 -6 0 0 0 0 21 -10 -18.75 0 0 0 0 21 -4 -2 0 1 4 0 21 -5 -1.25 0 1 -1.25 0 21 -6 -5.25 0 0 0 0 21 -4 -8 0 0 0 0 21 -25 0 9 1 25 0 21 -2 -3.25 0 0 0 0 21 -10 -11.25 0 0 0 0 21 -4 -7.5 0 0 0 0 21 -9 -5.63 0 0 0 0 21 -6 -6.75 0 0 0 0 21 -8 -2 0 1 -2 0 21 -5 -6.25 0 0 0 0 21 -23 0 10 1 0 0 21 -8 -13 0 0 0 0 21 -10 -13.75 0 0 0 0 21 -5 -10 0 0 0 0 21 -12 0 4 1 12 0 21 -2 -2.5 0 0 0 0 21 -19 0 8 1 19 0 21 -4 -4 0 0 0 0 21 -4 -1 0 1 -1 0 21 -4 -2.5 0 1 -2.5 0 21 -5 -8.13 0 0 0 0 21 -10 -3.75 0 1 10 0 21 -5 -8.75 0 0 0 0 21 -10 -7.5 0 0 0 0 21 -10 -5 0 0 0 0 21 -10 -20 0 0 0 0 21 -13 0 5 0 5 0 21 -8 -9 0 0 0 0 21 -8 -12 0 0 0 0 21 -10 -16.25 0 0 0 0 21 -5 -6.88 0 0 0 0 21 -4 -5.5 0 0 0 0 21 -5 -7.5 0 0 0 0 21 -9 -10.13 0 0 0 0 21 -6 -8.25 0 0 0 0 21 -26 0 10 0 10 0 21 -4 -5 0 0 0 0 21 -2 -2.25 0 0 0 0 21 -6 -3.75 0 1 -3.75 0 21 -8 -8 0 0 0 0 21 -9 -6.75 0 0 0 0 21 -8 -15 0 0 0 0 21 -12 -6 0 1 -6 0 21 -25 0 10 0 10 0 21 -12 -19.5 0 0 0 0 21 -9 -7.88 0 0 0 0 21 -4 -1.5 0 1 4 0 21 -8 -7 0 0 0 0 21 -12 -18 0 0 0 0 21 -2 -2 0 0 0 0 21 -9 -18 0 0 0 0 21 -2 -1.25 0 0 0 0 21 -8 -16 0 0 0 0 21 -5 -4.38 0 0 0 0 21 -2 -4 0 0 0 0 21 -5 -5.63 0 0 0 0 21 -8 0 3 1 0 0 21 -10 -17.5 0 0 0 0 21 -8 -11 0 0 0 0 21 -2 -1.5 0 0 0 0 21 -4 -3.5 0 0 0 0 21 -2 -3.75 0 0 0 0 21 -3 0 1 1 0 0 21 -12 -21 0 0 0 0 21 -10 -8.75 0 0 0 0 21 -9 -9 0 0 0 0 21 -4 -3 0 0 0 0 21 -7 0 3 1 7 0 21 -9 -3.38 0 1 -3.38 0 21 -9 -2.25 0 1 -2.25 0 21 -10 -6.25 0 0 0 0 21 -9 -4.5 0 0 0 0 21 -2 -1 0 0 0 0 22 -9 -13.5 0 0 0 0 22 -5 -6.88 0 0 0 0 22 -10 -10 0 0 0 0 22 -6 -2.25 0 1 6 0 22 -6 -6.75 0 0 0 0 22 -9 -4.5 0 1 -4.5 0 22 -10 -13.75 0 0 0 0 22 -6 -8.25 0 0 0 0 22 -5 -10 0 0 0 0 22 -10 -6.25 0 1 10 0 22 -12 -3 0 1 -3 0 22 -12 -9 0 0 0 0 22 -8 -7 0 0 0 0 22 -6 -12 0 0 0 0 22 -8 -2 0 1 -2 0 22 -12 -6 0 1 -6 0 22 -3 0 1 1 0 0 22 -10 -20 0 0 0 0 22 -5 -3.75 0 0 0 0 22 -2 -1.75 0 0 0 0 22 -6 -3.75 0 0 0 0 22 -9 -12.38 0 0 0 0 22 -5 -6.25 0 0 0 0 22 -12 0 4 1 12 0 22 -2 -1.5 0 0 0 0 22 -6 -5.25 0 0 0 0 22 -10 -18.75 0 0 0 0 22 -6 -6 0 0 0 0 22 -12 0 5 1 12 0 22 -4 -2 0 0 0 0 22 -2 -4 0 0 0 0 22 -5 -2.5 0 0 0 0 22 -2 -3.75 0 0 0 0 22 -9 -15.75 0 0 0 0 22 -8 -4 0 0 0 0 22 -26 0 12 1 26 0 22 -6 -1.5 0 0 0 0 22 -4 -6 0 0 0 0 22 -10 -2.5 0 1 -2.5 0 22 -8 -12 0 0 0 0 22 -2 -3.5 0 0 0 0 22 -5 -5.63 0 1 -5.63 0 22 -12 -24 0 0 0 0 22 -25 0 10 1 0 0 22 -4 -6.5 0 0 0 0 22 -5 -9.38 0 0 0 0 22 -5 -7.5 0 0 0 0 22 -4 -4 0 1 -4 0 22 -6 -10.5 0 0 0 0 22 -13 0 6 1 13 0 22 -12 -22.5 0 0 0 0 22 -4 -7.5 0 0 0 0 22 -5 0 2 1 5 0 22 -10 -15 0 0 0 0 22 -9 -16.88 0 0 0 0 22 -2 -2.5 0 0 0 0 22 -10 -16.25 0 0 0 0 22 -6 -11.25 0 0 0 0 22 -4 -1.5 0 0 0 0 22 -5 -3.13 0 0 0 0 22 -6 -9 0 0 0 0 22 -12 -19.5 0 0 0 0 22 -10 -12.5 0 0 0 0 22 -2 -3 0 0 0 0 22 -8 -16 0 0 0 0 22 -4 0 2 1 4 0 22 -12 -7.5 0 1 -7.5 0 22 -12 -13.5 0 0 0 0 22 -22 0 10 1 22 0 22 -12 -21 0 0 0 0 22 -7 0 3 1 7 0 22 -10 -8.75 0 0 0 0 22 -2 -1.25 0 0 0 0 22 -9 -6.75 0 0 0 0 22 -12 0 6 1 12 0 22 -28 0 13 0 13 0 22 -9 -10.13 0 0 0 0 22 -2 -0.5 0 1 2 0 22 -25 0 9 1 25 0 22 -6 -7.5 0 0 0 0 22 -4 -3 0 0 0 0 22 -10 -3.75 0 1 10 0 22 -12 -4.5 0 0 0 0 22 -12 -15 0 1 -15 0 22 -6 -3 0 0 0 0 22 -9 -14.63 0 0 0 0 22 -5 -1.25 0 1 -1.25 0 22 -8 -11 0 0 0 0 22 -10 -17.5 0 0 0 0 22 -8 -10 0 0 0 0 22 -9 -9 0 0 0 0 22 -10 -11.25 0 0 0 0 22 -12 -12 0 1 12 0 22 -8 -14 0 0 0 0 22 -12 -16.5 0 0 0 0 22 -4 -7 0 0 0 0 22 -4 -1 0 1 -1 0 22 -5 -1.88 0 1 5 0 22 -8 0 3 1 0 0 22 -2 -3.25 0 0 0 0 22 -5 -5 0 0 0 0 22 -26 0 10 1 0 0 22 -12 -10.5 0 1 12 0 22 -2 0 1 0 1 0 22 -6 -9.75 0 0 0 0 22 -8 -3 0 1 8 0 22 -13 0 5 1 13 0 22 -10 -7.5 0 0 0 0 22 -8 -13 0 0 0 0 22 -9 -3.38 0 1 -3.38 0 22 -8 -15 0 0 0 0 22 -30 0 12 1 0 0 22 -8 -8 0 0 0 0 22 -8 -5 0 0 0 0 22 -12 -18 0 0 0 0 22 -10 -5 0 1 -5 0 22 -9 -11.25 0 0 0 0 22 -9 -7.88 0 0 0 0 22 -8 -6 0 0 0 0 22 -6 -4.5 0 0 0 0 22 -8 -9 0 0 0 0 22 -4 -5.5 0 0 0 0 22 -4 -5 0 0 0 0 22 -9 -2.25 0 1 -2.25 0 22 -23 0 10 1 0 0 22 -9 -5.63 0 0 0 0 22 -4 -4.5 0 0 0 0 22 -4 -8 0 0 0 0 22 -19 0 8 1 19 0 22 -2 -2 0 0 0 0 22 -5 -8.13 0 0 0 0 22 -5 -4.38 0 0 0 0 22 -2 -2.25 0 0 0 0 22 -2 -0.75 0 0 0 0 22 -2 -2.75 0 0 0 0 22 -5 -8.75 0 0 0 0 22 -9 -18 0 0 0 0 22 -4 -3.5 0 1 4 0 22 -4 -2.5 0 0 0 0 22 -9 -6.75 0 1 -6.75 0 23 -6 -6.75 0 0 0 0 23 -6 -3 0 1 6 0 23 -2 -1.5 0 1 2 0 23 -4 -3 0 1 4 0 23 -5 -6.88 0 0 0 0 23 -12 -9 0 1 12 0 23 -4 -5 0 0 0 0 23 -5 -7.5 0 0 0 0 23 -4 -4 0 1 -4 0 23 -9 -5.63 0 1 -5.63 0 23 -9 -14.63 0 0 0 0 23 -5 -9.38 0 0 0 0 23 -6 -4.5 0 1 6 0 23 -8 -7 0 0 0 0 23 -10 -16.25 0 1 10 0 23 -10 -17.5 0 1 -17.5 0 23 -9 -16.88 0 0 0 0 23 -8 -5 0 1 8 0 23 -6 -1.5 0 1 6 0 23 -12 -18 0 0 0 0 23 -5 -6.25 0 0 0 0 23 -8 -4 0 1 8 0 23 -9 -15.75 0 0 0 0 23 -9 -13.5 0 0 0 0 23 -5 -8.13 0 0 0 0 23 -2 0 1 1 0 0 23 -2 -3.75 0 1 -3.75 0 23 -4 -6.5 0 0 0 0 23 -10 -5 0 1 -5 0 23 -12 -22.5 0 0 0 0 23 -2 -1 0 1 2 0 23 -13 0 6 1 13 0 23 -5 -2.5 0 1 5 0 23 -2 -0.5 0 1 2 0 23 -2 -3.25 0 1 -3.25 0 23 -30 0 12 1 0 0 23 -8 -8 0 0 0 0 23 -4 -5.5 0 0 0 0 23 -23 0 10 1 0 0 23 -4 -3.5 0 1 4 0 23 -5 0 2 1 5 0 23 -8 0 3 1 0 0 23 -9 -10.13 0 0 0 0 23 -8 -16 0 0 0 0 23 -12 -24 0 0 0 0 23 -9 -3.38 0 1 -3.38 0 23 -6 -5.25 0 0 0 0 23 -2 -4 0 0 0 0 23 -4 -1 0 1 -1 0 23 -6 -11.25 0 0 0 0 23 -5 -4.38 0 1 -4.38 0 23 -6 -2.25 0 1 6 0 23 -12 -10.5 0 1 12 0 23 -9 -18 0 0 0 0 23 -10 -20 0 0 0 0 23 -4 -4.5 0 1 -4.5 0 23 -9 -2.25 0 1 -2.25 0 23 -4 -6 0 0 0 0 23 -8 -10 0 0 0 0 23 -5 -5 0 1 -5 0 23 -5 -8.75 0 0 0 0 23 -8 -6 0 0 0 0 23 -10 -13.75 0 0 0 0 23 -2 -2.5 0 1 2 0 23 -8 -11 0 0 0 0 23 -4 -2 0 1 4 0 23 -10 -7.5 0 0 0 0 23 -22 0 10 1 22 0 23 -25 0 10 1 0 0 23 -6 -9.75 0 0 0 0 23 -12 0 5 1 12 0 23 -4 -2.5 0 1 -2.5 0 23 -8 -3 0 1 8 0 23 -10 -11.25 0 0 0 0 23 -5 -10 0 0 0 0 23 -10 -15 0 0 0 0 23 -2 -3.5 0 1 -3.5 0 23 -12 0 4 1 12 0 23 -13 0 5 0 5 0 23 -5 -3.75 0 1 5 0 23 -26 0 12 0 12 0 23 -5 -5.63 0 0 0 0 23 -8 -2 0 1 -2 0 23 -2 -3 0 1 -3 0 23 -6 -9 0 0 0 0 23 -9 -7.88 0 0 0 0 23 -8 -14 0 0 0 0 23 -28 0 13 0 13 0 23 -9 -12.38 0 0 0 0 23 -8 -15 0 0 0 0 23 -10 -2.5 0 1 -2.5 0 23 -4 0 2 0 2 0 23 -12 -6 0 1 -6 0 23 -12 -16.5 0 0 0 0 23 -4 -7.5 0 0 0 0 23 -10 -8.75 0 0 0 0 23 -10 -18.75 0 0 0 0 23 -26 0 10 0 10 0 23 -12 -21 0 0 0 0 23 -2 -0.75 0 1 -0.75 0 23 -9 -9 0 0 0 0 23 -10 -6.25 0 0 0 0 23 -8 -12 0 0 0 0 23 -3 0 1 1 0 0 23 -5 -1.88 0 1 5 0 23 -6 -7.5 0 0 0 0 23 -12 -13.5 0 0 0 0 23 -4 -7 0 0 0 0 23 -6 -8.25 0 0 0 0 23 -6 -12 0 0 0 0 23 -6 -10.5 0 0 0 0 23 -4 -8 0 0 0 0 23 -6 -6 0 0 0 0 23 -12 0 6 0 6 0 23 -12 -19.5 0 0 0 0 23 -19 0 8 1 19 0 23 -12 -15 0 0 0 0 23 -2 -1.75 0 1 2 0 23 -6 -3.75 0 1 -3.75 0 23 -2 -1.25 0 1 2 0 23 -5 -1.25 0 1 -1.25 0 23 -4 -1.5 0 1 4 0 23 -8 -13 0 0 0 0 23 -12 -7.5 0 0 0 0 23 -12 -3 0 1 -3 0 23 -2 -2.75 0 1 2 0 23 -7 0 3 0 3 0 23 -25 0 9 0 9 0 23 -2 -2 0 1 2 0 23 -12 -4.5 0 1 -4.5 0 23 -12 -12 0 0 0 0 23 -5 -3.13 0 1 5 0 23 -9 -11.25 0 0 0 0 23 -8 -9 0 0 0 0 23 -2 -2.25 0 1 2 0 23 -9 -4.5 0 1 -4.5 0 23 -10 -3.75 0 1 10 0 23 -10 -10 0 0 0 0 23 -10 -12.5 0 0 0 0 23 -2 -2.5 0 0 0 0 24 -5 -5.63 0 1 -5.63 0 24 -6 -7.5 0 1 -7.5 0 24 -26 0 10 1 0 0 24 -9 -4.5 0 1 -4.5 0 24 -2 -1.25 0 1 2 0 24 -8 -3 0 1 8 0 24 -25 0 9 1 25 0 24 -4 -4.5 0 1 -4.5 0 24 -5 -10 0 0 0 0 24 -6 -9 0 0 0 0 24 -10 -6.25 0 1 10 0 24 -4 -4 0 0 0 0 24 -12 -3 0 1 -3 0 24 -5 -5 0 1 -5 0 24 -12 0 5 1 12 0 24 -6 -9.75 0 0 0 0 24 -19 0 8 1 19 0 24 -4 -7.5 0 0 0 0 24 -12 -9 0 1 12 0 24 -4 -6.5 0 0 0 0 24 -9 -5.63 0 1 -5.63 0 24 -9 -18 0 0 0 0 24 -10 -11.25 0 1 -11.25 0 24 -10 -13.75 0 0 0 0 24 -6 -12 0 0 0 0 24 -10 -12.5 0 1 -12.5 0 24 -4 -7 0 0 0 0 24 -10 -7.5 0 1 -7.5 0 24 -4 -8 0 0 0 0 24 -8 -11 0 0 0 0 24 -12 0 4 1 12 0 24 -9 -3.38 0 1 -3.38 0 24 -10 -18.75 0 0 0 0 24 -2 -3.5 0 0 0 0 24 -2 -1 0 1 2 0 24 -2 -3.25 0 0 0 0 24 -2 0 1 1 0 0 24 -7 0 3 1 7 0 24 -8 0 3 1 0 0 24 -12 -6 0 1 -6 0 24 -2 -0.5 0 1 2 0 24 -9 -7.88 0 1 -7.88 0 24 -8 -15 0 0 0 0 24 -2 -1.5 0 1 2 0 24 -12 -22.5 0 0 0 0 24 -8 -7 0 1 -7 0 24 -4 -5.5 0 0 0 0 24 -10 -8.75 0 1 -8.75 0 24 -8 -9 0 0 0 0 24 -2 -4 0 0 0 0 24 -4 0 2 1 4 0 24 -8 -8 0 1 8 0 24 -9 -13.5 0 0 0 0 24 -9 -9 0 1 -9 0 24 -6 -3.75 0 1 -3.75 0 24 -13 0 6 1 13 0 24 -5 -1.88 0 1 5 0 24 -6 -6 0 1 -6 0 24 -5 -6.88 0 0 0 0 24 -8 -16 0 0 0 0 24 -12 -7.5 0 1 -7.5 0 24 -5 -1.25 0 1 -1.25 0 24 -9 -14.63 0 0 0 0 24 -8 -4 0 1 8 0 24 -10 -17.5 0 0 0 0 24 -5 -3.75 0 1 5 0 24 -6 -10.5 0 0 0 0 24 -13 0 5 1 13 0 24 -10 -16.25 0 0 0 0 24 -5 -7.5 0 0 0 0 24 -2 -1.75 0 1 2 0 24 -5 -9.38 0 0 0 0 24 -2 -2.75 0 0 0 0 24 -2 -0.75 0 1 -0.75 0 24 -5 -8.13 0 0 0 0 24 -9 -11.25 0 0 0 0 24 -8 -13 0 0 0 0 24 -9 -16.88 0 0 0 0 24 -2 -2 0 1 2 0 24 -12 -18 0 0 0 0 24 -8 -2 0 1 -2 0 24 -2 -3 0 0 0 0 24 -6 -4.5 0 1 6 0 24 -5 0 2 1 5 0 24 -12 -19.5 0 0 0 0 24 -9 -15.75 0 0 0 0 24 -8 -6 0 1 -6 0 24 -10 -2.5 0 1 -2.5 0 24 -9 -6.75 0 1 -6.75 0 24 -6 -6.75 0 0 0 0 24 -2 -3.75 0 0 0 0 24 -10 -5 0 1 -5 0 24 -2 -2.25 0 0 0 0 24 -26 0 12 1 26 0 24 -12 -13.5 0 0 0 0 24 -8 -5 0 1 8 0 24 -6 -3 0 1 6 0 24 -10 -3.75 0 1 10 0 24 -12 -10.5 0 1 12 0 24 -4 -5 0 1 4 0 24 -9 -2.25 0 1 -2.25 0 24 -4 -3 0 1 4 0 24 -9 -10.13 0 0 0 0 24 -28 0 13 1 28 0 24 -22 0 10 1 22 0 24 -10 -10 0 1 10 0 24 -4 -1 0 1 -1 0 24 -4 -2.5 0 1 -2.5 0 24 -12 -24 0 0 0 0 24 -8 -12 0 0 0 0 24 -3 0 1 1 0 0 24 -9 -12.38 0 0 0 0 24 -23 0 10 1 0 0 24 -4 -3.5 0 1 4 0 24 -4 -1.5 0 1 4 0 24 -8 -10 0 0 0 0 24 -8 -14 0 0 0 0 24 -4 -6 0 0 0 0 24 -25 0 10 1 0 0 24 -12 -16.5 0 0 0 0 24 -12 -12 0 1 12 0 24 -5 -2.5 0 1 5 0 24 -5 -8.75 0 0 0 0 24 -12 -4.5 0 1 -4.5 0 24 -12 -15 0 0 0 0 24 -5 -3.13 0 1 5 0 24 -12 -21 0 0 0 0 24 -5 -4.38 0 1 -4.38 0 24 -6 -11.25 0 0 0 0 24 -30 0 12 1 0 0 24 -6 -1.5 0 1 6 0 24 -12 0 6 1 12 0 24 -4 -2 0 1 4 0 24 -10 -15 0 0 0 0 24 -6 -2.25 0 1 6 0 24 -10 -20 0 0 0 0 24 -6 -5.25 0 1 6 0 24 -5 -6.25 0 1 5 0 24 -6 -8.25 0 0 0 0 24 -4 -4.5 0 0 0 0 25 -10 -12.5 0 1 -12.5 0 25 -26 0 12 1 26 0 25 -6 -7.5 0 0 0 0 25 -4 -6.5 0 0 0 0 25 -12 -4.5 0 1 -4.5 0 25 -5 -2.5 0 1 5 0 25 -6 -12 0 0 0 0 25 -9 -14.63 0 0 0 0 25 -6 -6 0 1 -6 0 25 -22 0 10 1 22 0 25 -2 -1 0 1 2 0 25 -8 -3 0 1 8 0 25 -12 -9 0 0 0 0 25 -5 -3.75 0 1 5 0 25 -6 -3 0 1 6 0 25 -4 0 2 1 4 0 25 -28 0 13 1 28 0 25 -12 -15 0 0 0 0 25 -9 -11.25 0 0 0 0 25 -12 -10.5 0 0 0 0 25 -5 -1.88 0 1 5 0 25 -2 -2.75 0 1 2 0 25 -4 -7 0 0 0 0 25 -8 -4 0 1 8 0 25 -2 0 1 1 0 0 25 -2 -3.5 0 1 -3.5 0 25 -2 -1.75 0 1 2 0 25 -5 -5 0 1 -5 0 25 -12 -12 0 0 0 0 25 -12 0 6 1 12 0 25 -6 -4.5 0 1 6 0 25 -30 0 12 1 0 0 25 -12 -16.5 0 0 0 0 25 -6 -9.75 0 0 0 0 25 -12 -22.5 0 0 0 0 25 -6 -9 0 0 0 0 25 -5 -3.13 0 1 5 0 25 -5 -9.38 0 1 5 0 25 -12 -7.5 0 1 -7.5 0 25 -5 0 2 1 5 0 25 -10 -15 0 0 0 0 25 -12 -3 0 1 -3 0 25 -13 0 6 1 13 0 25 -9 -16.88 0 0 0 0 25 -6 -11.25 0 0 0 0 25 -8 -5 0 1 8 0 25 -8 -14 0 0 0 0 25 -12 -24 0 0 0 0 25 -12 0 5 1 12 0 25 -9 -13.5 0 0 0 0 25 -6 -1.5 0 1 6 0 25 -2 -3 0 1 -3 0 25 -10 -2.5 0 1 -2.5 0 25 -2 -0.75 0 1 -0.75 0 25 -6 -10.5 0 0 0 0 25 -2 -0.5 0 1 2 0 25 -10 -10 0 1 10 0 25 -8 -10 0 0 0 0 25 -9 -12.38 0 0 0 0 25 -4 -6 0 0 0 0 25 -6 -2.25 0 1 6 0 25 -9 -15.75 0 0 0 0 25 -12 -13.5 0 0 0 0 25 -8 -6 0 1 -6 0 25 -10 -18.75 0 0 0 0 25 -4 -2 0 1 4 0 25 -5 -1.25 0 1 -1.25 0 25 -6 -5.25 0 1 6 0 25 -4 -8 0 0 0 0 25 -25 0 9 1 25 0 25 -2 -3.25 0 1 -3.25 0 25 -10 -11.25 0 1 -11.25 0 25 -4 -7.5 0 1 -7.5 0 25 -9 -5.63 0 1 -5.63 0 25 -6 -6.75 0 1 -6.75 0 25 -8 -2 0 1 -2 0 25 -5 -6.25 0 1 5 0 25 -23 0 10 1 0 0 25 -8 -13 0 0 0 0 25 -10 -13.75 0 0 0 0 25 -5 -10 0 0 0 0 25 -12 0 4 1 12 0 25 -2 -2.5 0 1 2 0 25 -19 0 8 1 19 0 25 -4 -4 0 1 -4 0 25 -4 -1 0 1 -1 0 25 -4 -2.5 0 1 -2.5 0 25 -5 -8.13 0 1 5 0 25 -10 -3.75 0 1 10 0 25 -5 -8.75 0 0 0 0 25 -10 -7.5 0 1 -7.5 0 25 -10 -5 0 1 -5 0 25 -10 -20 0 0 0 0 25 -13 0 5 1 13 0 25 -8 -9 0 0 0 0 25 -8 -12 0 0 0 0 25 -10 -16.25 0 0 0 0 25 -5 -6.88 0 0 0 0 25 -4 -5.5 0 1 -5.5 0 25 -5 -7.5 0 0 0 0 25 -9 -10.13 0 0 0 0 25 -6 -8.25 0 0 0 0 25 -26 0 10 1 0 0 25 -4 -5 0 1 4 0 25 -2 -2.25 0 1 2 0 25 -6 -3.75 0 1 -3.75 0 25 -8 -8 0 1 8 0 25 -9 -6.75 0 1 -6.75 0 25 -8 -15 0 0 0 0 25 -12 -6 0 1 -6 0 25 -25 0 10 0 10 0 25 -12 -19.5 0 0 0 0 25 -9 -7.88 0 1 -7.88 0 25 -4 -1.5 0 1 4 0 25 -8 -7 0 1 -7 0 25 -12 -18 0 0 0 0 25 -2 -2 0 1 2 0 25 -9 -18 0 0 0 0 25 -2 -1.25 0 1 2 0 25 -8 -16 0 0 0 0 25 -5 -4.38 0 1 -4.38 0 25 -2 -4 0 0 0 0 25 -5 -5.63 0 1 -5.63 0 25 -8 0 3 0 3 0 25 -10 -17.5 0 0 0 0 25 -8 -11 0 0 0 0 25 -2 -1.5 0 1 2 0 25 -4 -3.5 0 1 4 0 25 -2 -3.75 0 1 -3.75 0 25 -3 0 1 0 1 0 25 -12 -21 0 0 0 0 25 -10 -8.75 0 1 -8.75 0 25 -9 -9 0 1 -9 0 25 -4 -3 0 1 4 0 25 -7 0 3 1 7 0 25 -9 -3.38 0 1 -3.38 0 25 -9 -2.25 0 1 -2.25 0 25 -10 -6.25 0 1 10 0 25 -9 -4.5 0 1 -4.5 0 25 -2 -1 0 1 2 0 26 -9 -13.5 0 0 0 0 26 -5 -6.88 0 0 0 0 26 -10 -10 0 1 10 0 26 -6 -2.25 0 1 6 0 26 -6 -6.75 0 0 0 0 26 -9 -4.5 0 1 -4.5 0 26 -10 -13.75 0 0 0 0 26 -6 -8.25 0 0 0 0 26 -5 -10 0 1 5 0 26 -10 -6.25 0 1 10 0 26 -12 -3 0 1 -3 0 26 -12 -9 0 0 0 0 26 -8 -7 0 0 0 0 26 -6 -12 0 0 0 0 26 -8 -2 0 1 -2 0 26 -12 -6 0 1 -6 0 26 -3 0 1 0 1 0 26 -10 -20 0 0 0 0 26 -5 -3.75 0 1 5 0 26 -2 -1.75 0 1 2 0 26 -6 -3.75 0 0 0 0 26 -9 -12.38 0 0 0 0 26 -5 -6.25 0 0 0 0 26 -12 0 4 0 4 0 26 -2 -1.5 0 1 2 0 26 -6 -5.25 0 0 0 0 26 -10 -18.75 0 0 0 0 26 -6 -6 0 0 0 0 26 -12 0 5 1 12 0 26 -4 -2 0 1 4 0 26 -2 -4 0 0 0 0 26 -5 -2.5 0 1 5 0 26 -2 -3.75 0 0 0 0 26 -9 -15.75 0 0 0 0 26 -8 -4 0 1 8 0 26 -26 0 12 0 12 0 26 -6 -1.5 0 1 6 0 26 -4 -6 0 0 0 0 26 -10 -2.5 0 1 -2.5 0 26 -8 -12 0 0 0 0 26 -2 -3.5 0 0 0 0 26 -5 -5.63 0 1 -5.63 0 26 -12 -24 0 0 0 0 26 -25 0 10 1 0 0 26 -4 -6.5 0 0 0 0 26 -5 -9.38 0 0 0 0 26 -5 -7.5 0 1 -7.5 0 26 -4 -4 0 1 -4 0 26 -6 -10.5 0 0 0 0 26 -13 0 6 0 6 0 26 -12 -22.5 0 0 0 0 26 -4 -7.5 0 0 0 0 26 -5 0 2 1 5 0 26 -10 -15 0 0 0 0 26 -9 -16.88 0 0 0 0 26 -2 -2.5 0 0 0 0 26 -10 -16.25 0 0 0 0 26 -6 -11.25 0 0 0 0 26 -4 -1.5 0 1 4 0 26 -5 -3.13 0 1 5 0 26 -6 -9 0 0 0 0 26 -12 -19.5 0 0 0 0 26 -10 -12.5 0 0 0 0 26 -2 -3 0 0 0 0 26 -8 -16 0 0 0 0 26 -4 0 2 1 4 0 26 -12 -7.5 0 1 -7.5 0 26 -12 -13.5 0 0 0 0 26 -22 0 10 1 22 0 26 -12 -21 0 0 0 0 26 -7 0 3 0 3 0 26 -10 -8.75 0 0 0 0 26 -2 -1.25 0 1 2 0 26 -9 -6.75 0 0 0 0 26 -12 0 6 0 6 0 26 -28 0 13 1 28 0 26 -9 -10.13 0 0 0 0 26 -2 -0.5 0 1 2 0 26 -25 0 9 0 9 0 26 -6 -7.5 0 0 0 0 26 -4 -3 0 1 4 0 26 -10 -3.75 0 0 0 0 26 -12 -4.5 0 1 -4.5 0 26 -12 -15 0 0 0 0 26 -6 -3 0 1 6 0 26 -9 -14.63 0 0 0 0 26 -5 -1.25 0 1 -1.25 0 26 -8 -11 0 0 0 0 26 -10 -17.5 0 0 0 0 26 -8 -10 0 0 0 0 26 -9 -9 0 0 0 0 26 -10 -11.25 0 0 0 0 26 -12 -12 0 0 0 0 26 -8 -14 0 0 0 0 26 -12 -16.5 0 0 0 0 26 -4 -7 0 0 0 0 26 -4 -1 0 1 -1 0 26 -5 -1.88 0 1 5 0 26 -8 0 3 1 0 0 26 -2 -3.25 0 0 0 0 26 -5 -5 0 0 0 0 26 -26 0 10 0 10 0 26 -12 -10.5 0 0 0 0 26 -2 0 1 0 1 0 26 -6 -9.75 0 0 0 0 26 -8 -3 0 0 0 0 26 -13 0 5 0 5 0 26 -10 -7.5 0 0 0 0 26 -8 -13 0 0 0 0 26 -9 -3.38 0 1 -3.38 0 26 -8 -15 0 0 0 0 26 -30 0 12 1 0 0 26 -8 -8 0 0 0 0 26 -8 -5 0 1 8 0 26 -12 -18 0 0 0 0 26 -10 -5 0 1 -5 0 26 -9 -11.25 0 0 0 0 26 -9 -7.88 0 0 0 0 26 -8 -6 0 0 0 0 26 -6 -4.5 0 1 6 0 26 -8 -9 0 0 0 0 26 -4 -5.5 0 0 0 0 26 -4 -5 0 0 0 0 26 -9 -2.25 0 1 -2.25 0 26 -23 0 10 1 0 0 26 -9 -5.63 0 1 -5.63 0 26 -4 -4.5 0 0 0 0 26 -4 -8 0 1 4 0 26 -19 0 8 1 19 0 26 -2 -2 0 0 0 0 26 -5 -8.13 0 0 0 0 26 -5 -4.38 0 0 0 0 26 -2 -2.25 0 0 0 0 26 -2 -0.75 0 1 -0.75 0 26 -2 -2.75 0 0 0 0 26 -5 -8.75 0 0 0 0 26 -9 -18 0 0 0 0 26 -4 -3.5 0 0 0 0 26 -4 -2.5 0 1 -2.5 0 26 -9 -6.75 0 1 -6.75 0 27 -6 -6.75 0 1 -6.75 0 27 -6 -3 0 1 6 0 27 -2 -1.5 0 1 2 0 27 -4 -3 0 0 0 0 27 -5 -6.88 0 1 5 0 27 -12 -9 0 1 12 0 27 -4 -5 0 0 0 0 27 -5 -7.5 0 1 -7.5 0 27 -4 -4 0 1 -4 0 27 -9 -5.63 0 1 -5.63 0 27 -9 -14.63 0 1 9 0 27 -5 -9.38 0 0 0 0 27 -6 -4.5 0 1 6 0 27 -8 -7 0 1 -7 0 27 -10 -16.25 0 0 0 0 27 -10 -17.5 0 1 -17.5 0 27 -9 -16.88 0 0 0 0 27 -8 -5 0 1 8 0 27 -6 -1.5 0 1 6 0 27 -12 -18 0 1 -18 0 27 -5 -6.25 0 1 5 0 27 -8 -4 0 1 8 0 27 -9 -15.75 0 1 -15.75 0 27 -9 -13.5 0 0 0 0 27 -5 -8.13 0 1 5 0 27 -2 0 1 1 0 0 27 -2 -3.75 0 0 0 0 27 -4 -6.5 0 1 4 0 27 -10 -5 0 1 -5 0 27 -12 -22.5 0 0 0 0 27 -2 -1 0 1 2 0 27 -13 0 6 1 13 0 27 -5 -2.5 0 1 5 0 27 -2 -0.5 0 1 2 0 27 -2 -3.25 0 1 -3.25 0 27 -30 0 12 1 0 0 27 -8 -8 0 1 8 0 27 -4 -5.5 0 0 0 0 27 -23 0 10 1 0 0 27 -4 -3.5 0 0 0 0 27 -5 0 2 1 5 0 27 -8 0 3 0 3 0 27 -9 -10.13 0 1 -10.13 0 27 -8 -16 0 1 -16 0 27 -12 -24 0 1 -24 0 27 -9 -3.38 0 1 -3.38 0 27 -6 -5.25 0 1 6 0 27 -2 -4 0 1 2 0 27 -4 -1 0 1 -1 0 27 -6 -11.25 0 0 0 0 27 -5 -4.38 0 1 -4.38 0 27 -6 -2.25 0 1 6 0 27 -12 -10.5 0 1 12 0 27 -9 -18 0 1 -18 0 27 -10 -20 0 0 0 0 27 -4 -4.5 0 1 -4.5 0 27 -9 -2.25 0 1 -2.25 0 27 -4 -6 0 1 4 0 27 -8 -10 0 1 -10 0 27 -5 -5 0 1 -5 0 27 -5 -8.75 0 0 0 0 27 -8 -6 0 1 -6 0 27 -10 -13.75 0 0 0 0 27 -2 -2.5 0 1 2 0 27 -8 -11 0 1 -11 0 27 -4 -2 0 1 4 0 27 -10 -7.5 0 1 -7.5 0 27 -22 0 10 1 22 0 27 -25 0 10 1 0 0 27 -6 -9.75 0 1 6 0 27 -12 0 5 1 12 0 27 -4 -2.5 0 1 -2.5 0 27 -8 -3 0 1 8 0 27 -10 -11.25 0 1 -11.25 0 27 -5 -10 0 1 5 0 27 -10 -15 0 0 0 0 27 -2 -3.5 0 0 0 0 27 -12 0 4 0 4 0 27 -13 0 5 1 13 0 27 -5 -3.75 0 1 5 0 27 -26 0 12 1 26 0 27 -5 -5.63 0 1 -5.63 0 27 -8 -2 0 1 -2 0 27 -2 -3 0 1 -3 0 27 -6 -9 0 1 -9 0 27 -9 -7.88 0 1 -7.88 0 27 -8 -14 0 0 0 0 27 -28 0 13 1 28 0 27 -9 -12.38 0 0 0 0 27 -8 -15 0 0 0 0 27 -10 -2.5 0 1 -2.5 0 27 -4 0 2 0 2 0 27 -12 -6 0 1 -6 0 27 -12 -16.5 0 1 -16.5 0 27 -4 -7.5 0 1 -7.5 0 27 -10 -8.75 0 1 -8.75 0 27 -10 -18.75 0 0 0 0 27 -26 0 10 1 0 0 27 -12 -21 0 1 12 0 27 -2 -0.75 0 1 -0.75 0 27 -9 -9 0 1 -9 0 27 -10 -6.25 0 1 10 0 27 -8 -12 0 1 -12 0 27 -3 0 1 1 0 0 27 -5 -1.88 0 1 5 0 27 -6 -7.5 0 1 -7.5 0 27 -12 -13.5 0 1 12 0 27 -4 -7 0 0 0 0 27 -6 -8.25 0 1 -8.25 0 27 -6 -12 0 0 0 0 27 -6 -10.5 0 0 0 0 27 -4 -8 0 1 4 0 27 -6 -6 0 1 -6 0 27 -12 0 6 0 6 0 27 -12 -19.5 0 0 0 0 27 -19 0 8 1 19 0 27 -12 -15 0 0 0 0 27 -2 -1.75 0 1 2 0 27 -6 -3.75 0 1 -3.75 0 27 -2 -1.25 0 1 2 0 27 -5 -1.25 0 1 -1.25 0 27 -4 -1.5 0 1 4 0 27 -8 -13 0 0 0 0 27 -12 -7.5 0 1 -7.5 0 27 -12 -3 0 1 -3 0 27 -2 -2.75 0 0 0 0 27 -7 0 3 1 7 0 27 -25 0 9 1 25 0 27 -2 -2 0 0 0 0 27 -12 -4.5 0 1 -4.5 0 27 -12 -12 0 1 12 0 27 -5 -3.13 0 1 5 0 27 -9 -11.25 0 1 9 0 27 -8 -9 0 0 0 0 27 -2 -2.25 0 1 2 0 27 -9 -4.5 0 1 -4.5 0 27 -10 -3.75 0 1 10 0 27 -10 -10 0 1 10 0 27 -10 -12.5 0 0 0 0 27 -2 -2.5 0 0 0 0 28 -5 -5.63 0 0 0 0 28 -6 -7.5 0 0 0 0 28 -26 0 10 1 0 0 28 -9 -4.5 0 0 0 0 28 -2 -1.25 0 0 0 0 28 -8 -3 0 1 8 0 28 -25 0 9 0 9 0 28 -4 -4.5 0 0 0 0 28 -5 -10 0 0 0 0 28 -6 -9 0 0 0 0 28 -10 -6.25 0 1 10 0 28 -4 -4 0 1 -4 0 28 -12 -3 0 1 -3 0 28 -5 -5 0 0 0 0 28 -12 0 5 0 5 0 28 -6 -9.75 0 0 0 0 28 -19 0 8 0 8 0 28 -4 -7.5 0 0 0 0 28 -12 -9 0 0 0 0 28 -4 -6.5 0 0 0 0 28 -9 -5.63 0 0 0 0 28 -9 -18 0 0 0 0 28 -10 -11.25 0 0 0 0 28 -10 -13.75 0 0 0 0 28 -6 -12 0 0 0 0 28 -10 -12.5 0 0 0 0 28 -4 -7 0 0 0 0 28 -10 -7.5 0 1 -7.5 0 28 -4 -8 0 0 0 0 28 -8 -11 0 0 0 0 28 -12 0 4 0 4 0 28 -9 -3.38 0 1 -3.38 0 28 -10 -18.75 0 0 0 0 28 -2 -3.5 0 0 0 0 28 -2 -1 0 0 0 0 28 -2 -3.25 0 0 0 0 28 -2 0 1 0 1 0 28 -7 0 3 0 3 0 28 -8 0 3 0 3 0 28 -12 -6 0 1 -6 0 28 -2 -0.5 0 1 2 0 28 -9 -7.88 0 0 0 0 28 -8 -15 0 0 0 0 28 -2 -1.5 0 0 0 0 28 -12 -22.5 0 0 0 0 28 -8 -7 0 0 0 0 28 -4 -5.5 0 0 0 0 28 -10 -8.75 0 0 0 0 28 -8 -9 0 0 0 0 28 -2 -4 0 0 0 0 28 -4 0 2 1 4 0 28 -8 -8 0 0 0 0 28 -9 -13.5 0 0 0 0 28 -9 -9 0 0 0 0 28 -6 -3.75 0 0 0 0 28 -13 0 6 0 6 0 28 -5 -1.88 0 1 5 0 28 -6 -6 0 0 0 0 28 -5 -6.88 0 0 0 0 28 -8 -16 0 0 0 0 28 -12 -7.5 0 0 0 0 28 -5 -1.25 0 0 0 0 28 -9 -14.63 0 0 0 0 28 -8 -4 0 0 0 0 28 -10 -17.5 0 0 0 0 28 -5 -3.75 0 0 0 0 28 -6 -10.5 0 0 0 0 28 -13 0 5 0 5 0 28 -10 -16.25 0 0 0 0 28 -5 -7.5 0 0 0 0 28 -2 -1.75 0 0 0 0 28 -5 -9.38 0 0 0 0 28 -2 -2.75 0 0 0 0 28 -2 -0.75 0 1 -0.75 0 28 -5 -8.13 0 0 0 0 28 -9 -11.25 0 0 0 0 28 -8 -13 0 0 0 0 28 -9 -16.88 0 0 0 0 28 -2 -2 0 0 0 0 28 -12 -18 0 0 0 0 28 -8 -2 0 1 -2 0 28 -2 -3 0 0 0 0 28 -6 -4.5 0 0 0 0 28 -5 0 2 0 2 0 28 -12 -19.5 0 0 0 0 28 -9 -15.75 0 0 0 0 28 -8 -6 0 0 0 0 28 -10 -2.5 0 1 -2.5 0 28 -9 -6.75 0 0 0 0 28 -6 -6.75 0 0 0 0 28 -2 -3.75 0 0 0 0 28 -10 -5 0 0 0 0 28 -2 -2.25 0 0 0 0 28 -26 0 12 0 12 0 28 -12 -13.5 0 0 0 0 28 -8 -5 0 0 0 0 28 -6 -3 0 0 0 0 28 -10 -3.75 0 1 10 0 28 -12 -10.5 0 0 0 0 28 -4 -5 0 0 0 0 28 -9 -2.25 0 1 -2.25 0 28 -4 -3 0 0 0 0 28 -9 -10.13 0 0 0 0 28 -28 0 13 0 13 0 28 -22 0 10 0 10 0 28 -10 -10 0 0 0 0 28 -4 -1 0 1 -1 0 28 -4 -2.5 0 0 0 0 28 -12 -24 0 0 0 0 28 -8 -12 0 0 0 0 28 -3 0 1 1 0 0 28 -9 -12.38 0 0 0 0 28 -23 0 10 0 10 0 28 -4 -3.5 0 0 0 0 28 -4 -1.5 0 1 4 0 28 -8 -10 0 0 0 0 28 -8 -14 0 0 0 0 28 -4 -6 0 0 0 0 28 -25 0 10 1 0 0 28 -12 -16.5 0 0 0 0 28 -12 -12 0 0 0 0 28 -5 -2.5 0 0 0 0 28 -5 -8.75 0 0 0 0 28 -12 -4.5 0 0 0 0 28 -12 -15 0 0 0 0 28 -5 -3.13 0 0 0 0 28 -12 -21 0 0 0 0 28 -5 -4.38 0 0 0 0 28 -6 -11.25 0 0 0 0 28 -30 0 12 0 12 0 28 -6 -1.5 0 1 6 0 28 -12 0 6 0 6 0 28 -4 -2 0 0 0 0 28 -10 -15 0 0 0 0 28 -6 -2.25 0 0 0 0 28 -10 -20 0 0 0 0 28 -6 -5.25 0 0 0 0 28 -5 -6.25 0 0 0 0 28 -6 -8.25 0 0 0 0 28 -4 -4.5 0 1 -4.5 0 29 -10 -12.5 0 1 -12.5 0 29 -26 0 12 1 26 0 29 -6 -7.5 0 0 0 0 29 -4 -6.5 0 0 0 0 29 -12 -4.5 0 1 -4.5 0 29 -5 -2.5 0 1 5 0 29 -6 -12 0 0 0 0 29 -9 -14.63 0 1 9 0 29 -6 -6 0 0 0 0 29 -22 0 10 1 22 0 29 -2 -1 0 1 2 0 29 -8 -3 0 0 0 0 29 -12 -9 0 0 0 0 29 -5 -3.75 0 1 5 0 29 -6 -3 0 0 0 0 29 -4 0 2 0 2 0 29 -28 0 13 0 13 0 29 -12 -15 0 0 0 0 29 -9 -11.25 0 0 0 0 29 -12 -10.5 0 0 0 0 29 -5 -1.88 0 1 5 0 29 -2 -2.75 0 0 0 0 29 -4 -7 0 0 0 0 29 -8 -4 0 1 8 0 29 -2 0 1 0 1 0 29 -2 -3.5 0 0 0 0 29 -2 -1.75 0 1 2 0 29 -5 -5 0 0 0 0 29 -12 -12 0 0 0 0 29 -12 0 6 1 12 0 29 -6 -4.5 0 0 0 0 29 -30 0 12 1 0 0 29 -12 -16.5 0 0 0 0 29 -6 -9.75 0 0 0 0 29 -12 -22.5 0 0 0 0 29 -6 -9 0 0 0 0 29 -5 -3.13 0 1 5 0 29 -5 -9.38 0 0 0 0 29 -12 -7.5 0 1 -7.5 0 29 -5 0 2 0 2 0 29 -10 -15 0 0 0 0 29 -12 -3 0 1 -3 0 29 -13 0 6 1 13 0 29 -9 -16.88 0 0 0 0 29 -6 -11.25 0 0 0 0 29 -8 -5 0 0 0 0 29 -8 -14 0 0 0 0 29 -12 -24 0 0 0 0 29 -12 0 5 0 5 0 29 -9 -13.5 0 0 0 0 29 -6 -1.5 0 1 6 0 29 -2 -3 0 0 0 0 29 -10 -2.5 0 1 -2.5 0 29 -2 -0.75 0 1 -0.75 0 29 -6 -10.5 0 0 0 0 29 -2 -0.5 0 1 2 0 29 -10 -10 0 0 0 0 29 -8 -10 0 1 -10 0 29 -9 -12.38 0 0 0 0 29 -4 -6 0 0 0 0 29 -6 -2.25 0 1 6 0 29 -9 -15.75 0 0 0 0 29 -12 -13.5 0 0 0 0 29 -8 -6 0 0 0 0 29 -10 -18.75 0 0 0 0 29 -4 -2 0 1 4 0 29 -5 -1.25 0 1 -1.25 0 29 -6 -5.25 0 1 6 0 29 -4 -8 0 0 0 0 29 -25 0 9 0 9 0 29 -2 -3.25 0 1 -3.25 0 29 -10 -11.25 0 1 -11.25 0 29 -4 -7.5 0 1 -7.5 0 29 -9 -5.63 0 1 -5.63 0 29 -6 -6.75 0 1 -6.75 0 29 -8 -2 0 1 -2 0 29 -5 -6.25 0 0 0 0 29 -23 0 10 1 0 0 29 -8 -13 0 0 0 0 29 -10 -13.75 0 0 0 0 29 -5 -10 0 0 0 0 29 -12 0 4 0 4 0 29 -2 -2.5 0 0 0 0 29 -19 0 8 0 8 0 29 -4 -4 0 0 0 0 29 -4 -1 0 1 -1 0 29 -4 -2.5 0 1 -2.5 0 29 -5 -8.13 0 0 0 0 29 -10 -3.75 0 1 10 0 29 -5 -8.75 0 0 0 0 29 -10 -7.5 0 0 0 0 29 -10 -5 0 1 -5 0 29 -10 -20 0 0 0 0 29 -13 0 5 0 5 0 29 -8 -9 0 0 0 0 29 -8 -12 0 0 0 0 29 -10 -16.25 0 0 0 0 29 -5 -6.88 0 1 5 0 29 -4 -5.5 0 0 0 0 29 -5 -7.5 0 0 0 0 29 -9 -10.13 0 0 0 0 29 -6 -8.25 0 0 0 0 29 -26 0 10 0 10 0 29 -4 -5 0 1 4 0 29 -2 -2.25 0 0 0 0 29 -6 -3.75 0 0 0 0 29 -8 -8 0 0 0 0 29 -9 -6.75 0 0 0 0 29 -8 -15 0 0 0 0 29 -12 -6 0 1 -6 0 29 -25 0 10 0 10 0 29 -12 -19.5 0 0 0 0 29 -9 -7.88 0 0 0 0 29 -4 -1.5 0 1 4 0 29 -8 -7 0 0 0 0 29 -12 -18 0 0 0 0 29 -2 -2 0 0 0 0 29 -9 -18 0 0 0 0 29 -2 -1.25 0 1 2 0 29 -8 -16 0 0 0 0 29 -5 -4.38 0 1 -4.38 0 29 -2 -4 0 0 0 0 29 -5 -5.63 0 0 0 0 29 -8 0 3 0 3 0 29 -10 -17.5 0 0 0 0 29 -8 -11 0 0 0 0 29 -2 -1.5 0 0 0 0 29 -4 -3.5 0 0 0 0 29 -2 -3.75 0 0 0 0 29 -3 0 1 0 1 0 29 -12 -21 0 0 0 0 29 -10 -8.75 0 0 0 0 29 -9 -9 0 0 0 0 29 -4 -3 0 0 0 0 29 -7 0 3 0 3 0 29 -9 -3.38 0 0 0 0 29 -9 -2.25 0 1 -2.25 0 29 -10 -6.25 0 1 10 0 29 -9 -4.5 0 0 0 0 29 -2 -1 0 0 0 0 30 -9 -13.5 0 0 0 0 30 -5 -6.88 0 1 5 0 30 -10 -10 0 0 0 0 30 -6 -2.25 0 1 6 0 30 -6 -6.75 0 0 0 0 30 -9 -4.5 0 1 -4.5 0 30 -10 -13.75 0 0 0 0 30 -6 -8.25 0 0 0 0 30 -5 -10 0 0 0 0 30 -10 -6.25 0 1 10 0 30 -12 -3 0 1 -3 0 30 -12 -9 0 0 0 0 30 -8 -7 0 1 -7 0 30 -6 -12 0 0 0 0 30 -8 -2 0 1 -2 0 30 -12 -6 0 0 0 0 30 -3 0 1 0 1 0 30 -10 -20 0 0 0 0 30 -5 -3.75 0 1 5 0 30 -2 -1.75 0 0 0 0 30 -6 -3.75 0 1 -3.75 0 30 -9 -12.38 0 0 0 0 30 -5 -6.25 0 0 0 0 30 -12 0 4 0 4 0 30 -2 -1.5 0 0 0 0 30 -6 -5.25 0 0 0 0 30 -10 -18.75 0 0 0 0 30 -6 -6 0 0 0 0 30 -12 0 5 0 5 0 30 -4 -2 0 0 0 0 30 -2 -4 0 0 0 0 30 -5 -2.5 0 1 5 0 30 -2 -3.75 0 0 0 0 30 -9 -15.75 0 0 0 0 30 -8 -4 0 1 8 0 30 -26 0 12 1 26 0 30 -6 -1.5 0 0 0 0 30 -4 -6 0 0 0 0 30 -10 -2.5 0 1 -2.5 0 30 -8 -12 0 0 0 0 30 -2 -3.5 0 0 0 0 30 -5 -5.63 0 0 0 0 30 -12 -24 0 0 0 0 30 -25 0 10 1 0 0 30 -4 -6.5 0 0 0 0 30 -5 -9.38 0 0 0 0 30 -5 -7.5 0 0 0 0 30 -4 -4 0 0 0 0 30 -6 -10.5 0 0 0 0 30 -13 0 6 1 13 0 30 -12 -22.5 0 0 0 0 30 -4 -7.5 0 0 0 0 30 -5 0 2 0 2 0 30 -10 -15 0 0 0 0 30 -9 -16.88 0 0 0 0 30 -2 -2.5 0 0 0 0 30 -10 -16.25 0 0 0 0 30 -6 -11.25 0 0 0 0 30 -4 -1.5 0 1 4 0 30 -5 -3.13 0 0 0 0 30 -6 -9 0 0 0 0 30 -12 -19.5 0 0 0 0 30 -10 -12.5 0 0 0 0 30 -2 -3 0 0 0 0 30 -8 -16 0 0 0 0 30 -4 0 2 0 2 0 30 -12 -7.5 0 0 0 0 30 -12 -13.5 0 0 0 0 30 -22 0 10 0 10 0 30 -12 -21 0 0 0 0 30 -7 0 3 0 3 0 30 -10 -8.75 0 0 0 0 30 -2 -1.25 0 0 0 0 30 -9 -6.75 0 0 0 0 30 -12 0 6 1 12 0 30 -28 0 13 0 13 0 30 -9 -10.13 0 0 0 0 30 -2 -0.5 0 1 2 0 30 -25 0 9 0 9 0 30 -6 -7.5 0 0 0 0 30 -4 -3 0 0 0 0 30 -10 -3.75 0 1 10 0 30 -12 -4.5 0 1 -4.5 0 30 -12 -15 0 0 0 0 30 -6 -3 0 0 0 0 30 -9 -14.63 0 0 0 0 30 -5 -1.25 0 0 0 0 30 -8 -11 0 0 0 0 30 -10 -17.5 0 0 0 0 30 -8 -10 0 0 0 0 30 -9 -9 0 0 0 0 30 -10 -11.25 0 0 0 0 30 -12 -12 0 0 0 0 30 -8 -14 0 0 0 0 30 -12 -16.5 0 0 0 0 30 -4 -7 0 0 0 0 30 -4 -1 0 0 0 0 30 -5 -1.88 0 0 0 0 30 -8 0 3 0 3 0 30 -2 -3.25 0 0 0 0 30 -5 -5 0 0 0 0 30 -26 0 10 0 10 0 30 -12 -10.5 0 0 0 0 30 -2 0 1 0 1 0 30 -6 -9.75 0 0 0 0 30 -8 -3 0 0 0 0 30 -13 0 5 0 5 0 30 -10 -7.5 0 0 0 0 30 -8 -13 0 0 0 0 30 -9 -3.38 0 0 0 0 30 -8 -15 0 0 0 0 30 -30 0 12 0 12 0 30 -8 -8 0 0 0 0 30 -8 -5 0 0 0 0 30 -12 -18 0 0 0 0 30 -10 -5 0 0 0 0 30 -9 -11.25 0 0 0 0 30 -9 -7.88 0 0 0 0 30 -8 -6 0 0 0 0 30 -6 -4.5 0 0 0 0 30 -8 -9 0 0 0 0 30 -4 -5.5 0 0 0 0 30 -4 -5 0 0 0 0 30 -9 -2.25 0 1 -2.25 0 30 -23 0 10 0 10 0 30 -9 -5.63 0 0 0 0 30 -4 -4.5 0 0 0 0 30 -4 -8 0 0 0 0 30 -19 0 8 0 8 0 30 -2 -2 0 0 0 0 30 -5 -8.13 0 0 0 0 30 -5 -4.38 0 0 0 0 30 -2 -2.25 0 0 0 0 30 -2 -0.75 0 0 0 0 30 -2 -2.75 0 0 0 0 30 -5 -8.75 0 0 0 0 30 -9 -18 0 0 0 0 30 -4 -3.5 0 0 0 0 30 -4 -2.5 0 1 -2.5 0 30 \ No newline at end of file diff --git a/inst/extdata/ra_data_reappraisal.txt b/inst/extdata/ra_data_reappraisal.txt deleted file mode 100644 index b67f642b..00000000 --- a/inst/extdata/ra_data_reappraisal.txt +++ /dev/null @@ -1,4190 +0,0 @@ -gain loss cert gamble outcome cond subjID -9 -11.25 0 1 9 1 1 -8 -16 0 0 0 1 1 -9 -5.63 0 1 -5.63 1 1 -9 -4.5 0 1 9 1 1 -2 -2 0 1 2 1 1 -12 -19.5 0 0 0 1 1 -4 -4.5 0 1 4 1 1 -2 -3.75 0 1 -3.75 1 1 -2 -2.25 0 0 0 1 1 -12 -4.5 0 1 -4.5 1 1 -9 -10.13 0 0 0 1 1 -12 -3 0 1 12 1 1 -10 -17.5 0 0 0 1 1 -5 -4.38 0 1 -4.38 1 1 -5 -7.5 0 0 0 1 1 -6 -11.25 0 0 0 1 1 -2 -1.5 0 1 -1.5 1 1 -9 -6.75 0 1 -6.75 1 1 -4 -7 0 0 0 1 1 -8 -7 0 1 8 1 1 -2 -1.75 0 1 2 1 1 -2 -1 0 1 2 1 1 -10 -6.25 0 1 -6.25 1 1 -6 -6.75 0 0 0 1 1 -9 -2.25 0 1 -2.25 1 1 -2 -0.75 0 1 2 1 1 -12 0 4 1 12 1 1 -6 -3 0 1 -3 1 1 -3 0 1 1 3 1 1 -2 -3 0 0 0 1 1 -10 -13.75 0 1 -13.75 1 1 -6 -2.25 0 1 6 1 1 -5 -1.88 0 1 -1.88 1 1 -12 -13.5 0 1 12 1 1 -22 0 10 1 22 1 1 -9 -12.38 0 0 0 1 1 -26 0 10 1 26 1 1 -12 -10.5 0 1 -10.5 1 1 -10 -2.5 0 1 -2.5 1 1 -25 0 10 1 25 1 1 -9 -15.75 0 1 9 1 1 -7 0 3 1 0 1 1 -10 -10 0 1 10 1 1 -12 -15 0 0 0 1 1 -12 0 6 1 0 1 1 -6 -4.5 0 1 -4.5 1 1 -8 -13 0 0 0 1 1 -10 -16.25 0 0 0 1 1 -5 -1.25 0 1 5 1 1 -4 -4 0 1 4 1 1 -5 -3.75 0 1 5 1 1 -6 -8.25 0 0 0 1 1 -8 -15 0 0 0 1 1 -8 -8 0 1 -8 1 1 -2 -2.75 0 1 -2.75 1 1 -6 -12 0 0 0 1 1 -2 0 1 1 2 1 1 -2 -1.25 0 1 -1.25 1 1 -9 -18 0 0 0 1 1 -6 -9 0 1 -9 1 1 -10 -8.75 0 1 -8.75 1 1 -4 -7.5 0 0 0 1 1 -13 0 6 1 0 1 1 -10 -11.25 0 0 0 1 1 -4 -3 0 1 4 1 1 -10 -5 0 1 10 1 1 -8 -2 0 1 -2 1 1 -4 -2.5 0 0 0 1 1 -2 -3.5 0 0 0 1 1 -2 -2.5 0 1 2 1 1 -6 -3.75 0 0 0 1 1 -8 -3 0 1 8 1 1 -2 -3.25 0 0 0 1 1 -8 -9 0 0 0 1 1 -6 -6 0 0 0 1 1 -8 -11 0 0 0 1 1 -5 -8.75 0 0 0 1 1 -6 -9.75 0 0 0 1 1 -12 -24 0 0 0 1 1 -4 -6.5 0 0 0 1 1 -5 -10 0 0 0 1 1 -30 0 12 1 0 1 1 -12 -18 0 0 0 1 1 -9 -9 0 0 0 1 1 -5 -5 0 1 -5 1 1 -5 -9.38 0 0 0 1 1 -10 -12.5 0 0 0 1 1 -10 -18.75 0 0 0 1 1 -5 -2.5 0 1 -2.5 1 1 -9 -14.63 0 0 0 1 1 -28 0 13 1 0 1 1 -5 -6.88 0 0 0 1 1 -4 -3.5 0 0 0 1 1 -12 -16.5 0 0 0 1 1 -5 -8.13 0 0 0 1 1 -9 -16.88 0 0 0 1 1 -9 -3.38 0 1 -3.38 1 1 -12 0 5 1 0 1 1 -4 -8 0 0 0 1 1 -8 -12 0 0 0 1 1 -8 -4 0 0 0 1 1 -2 -4 0 0 0 1 1 -12 -9 0 1 -9 1 1 -4 -1.5 0 1 4 1 1 -6 -10.5 0 0 0 1 1 -5 -3.13 0 1 5 1 1 -10 -15 0 0 0 1 1 -23 0 10 0 10 1 1 -12 -7.5 0 1 -7.5 1 1 -2 -0.5 0 1 -0.5 1 1 -4 0 2 0 2 1 1 -6 -1.5 0 1 -1.5 1 1 -4 -1 0 1 4 1 1 -10 -20 0 0 0 1 1 -12 -22.5 0 0 0 1 1 -25 0 9 1 0 1 1 -13 0 5 0 5 1 1 -6 -5.25 0 0 0 1 1 -9 -13.5 0 0 0 1 1 -5 0 2 0 2 1 1 -12 -6 0 1 -6 1 1 -5 -6.25 0 0 0 1 1 -10 -3.75 0 1 10 1 1 -9 -7.88 0 0 0 1 1 -8 -6 0 0 0 1 1 -4 -5.5 0 0 0 1 1 -19 0 8 0 8 1 1 -10 -7.5 0 0 0 1 1 -4 -6 0 0 0 1 1 -8 0 3 0 3 1 1 -12 -21 0 0 0 1 1 -4 -2 0 0 0 1 1 -4 -5 0 0 0 1 1 -12 -12 0 0 0 1 1 -8 -5 0 1 -5 1 1 -26 0 12 1 0 1 1 -8 -10 0 0 0 1 1 -5 -5.63 0 0 0 1 1 -2 -1 0 1 2 1 2 -9 -6.75 0 1 -6.75 1 2 -2 -4 0 0 0 1 2 -2 -3.25 0 0 0 1 2 -4 -6.5 0 1 -6.5 1 2 -5 -5.63 0 0 0 1 2 -8 -8 0 1 -8 1 2 -12 -18 0 1 12 1 2 -2 -2.5 0 0 0 1 2 -3 0 1 1 3 1 2 -12 -16.5 0 1 12 1 2 -10 -12.5 0 1 -12.5 1 2 -5 -1.25 0 1 5 1 2 -19 0 8 1 19 1 2 -8 -9 0 0 0 1 2 -5 -10 0 0 0 1 2 -25 0 10 1 25 1 2 -7 0 3 0 3 1 2 -6 -11.25 0 0 0 1 2 -6 -1.5 0 1 -1.5 1 2 -4 -1.5 0 1 4 1 2 -10 -5 0 1 10 1 2 -10 -3.75 0 1 10 1 2 -6 -4.5 0 0 0 1 2 -12 -19.5 0 0 0 1 2 -5 -4.38 0 0 0 1 2 -8 -11 0 0 0 1 2 -2 -0.75 0 1 2 1 2 -2 -1.5 0 0 0 1 2 -6 -6.75 0 0 0 1 2 -4 -6 0 0 0 1 2 -10 -16.25 0 1 -16.25 1 2 -12 -15 0 1 -15 1 2 -6 -5.25 0 1 6 1 2 -12 -21 0 1 12 1 2 -4 -3 0 1 4 1 2 -12 -22.5 0 1 12 1 2 -2 -3.75 0 0 0 1 2 -6 -12 0 1 -12 1 2 -5 -8.13 0 1 5 1 2 -10 -8.75 0 1 -8.75 1 2 -12 -6 0 1 -6 1 2 -5 -5 0 1 -5 1 2 -22 0 10 1 22 1 2 -12 -13.5 0 1 12 1 2 -8 -7 0 1 8 1 2 -4 -3.5 0 0 0 1 2 -9 -12.38 0 1 9 1 2 -10 -7.5 0 1 -7.5 1 2 -26 0 10 1 26 1 2 -12 -4.5 0 1 -4.5 1 2 -8 -15 0 0 0 1 2 -2 -1.75 0 0 0 1 2 -12 0 6 1 0 1 2 -9 -3.38 0 1 -3.38 1 2 -2 -3 0 0 0 1 2 -9 -5.63 0 0 0 1 2 -2 -3.5 0 0 0 1 2 -8 -12 0 0 0 1 2 -10 -18.75 0 1 10 1 2 -4 0 2 1 4 1 2 -2 -2.25 0 0 0 1 2 -9 -2.25 0 1 -2.25 1 2 -10 -13.75 0 1 -13.75 1 2 -28 0 13 1 0 1 2 -4 -2.5 0 1 4 1 2 -9 -15.75 0 1 9 1 2 -10 -15 0 0 0 1 2 -10 -10 0 1 10 1 2 -9 -18 0 0 0 1 2 -12 -24 0 1 -24 1 2 -13 0 5 1 13 1 2 -5 -1.88 0 1 -1.88 1 2 -4 -4.5 0 1 4 1 2 -9 -7.88 0 1 9 1 2 -9 -9 0 1 9 1 2 -25 0 9 1 0 1 2 -12 -12 0 1 -12 1 2 -6 -2.25 0 1 6 1 2 -8 -5 0 1 -5 1 2 -4 -5.5 0 1 -5.5 1 2 -2 -1.25 0 1 -1.25 1 2 -9 -13.5 0 1 -13.5 1 2 -9 -4.5 0 1 9 1 2 -10 -11.25 0 1 10 1 2 -6 -3 0 1 -3 1 2 -10 -2.5 0 1 -2.5 1 2 -12 0 4 1 12 1 2 -10 -20 0 1 10 1 2 -5 -3.75 0 1 5 1 2 -9 -10.13 0 1 -10.13 1 2 -4 -7 0 1 -7 1 2 -12 -10.5 0 1 -10.5 1 2 -8 -16 0 1 8 1 2 -4 -7.5 0 0 0 1 2 -8 0 3 1 8 1 2 -6 -10.5 0 0 0 1 2 -6 -9.75 0 0 0 1 2 -5 -8.75 0 0 0 1 2 -5 -2.5 0 1 -2.5 1 2 -13 0 6 1 0 1 2 -23 0 10 1 0 1 2 -8 -4 0 1 -4 1 2 -9 -11.25 0 1 9 1 2 -5 -6.88 0 0 0 1 2 -4 -4 0 1 4 1 2 -10 -17.5 0 0 0 1 2 -8 -13 0 0 0 1 2 -26 0 12 1 0 1 2 -6 -8.25 0 1 -8.25 1 2 -9 -14.63 0 1 9 1 2 -8 -2 0 1 -2 1 2 -10 -6.25 0 1 -6.25 1 2 -8 -14 0 0 0 1 2 -12 0 5 1 0 1 2 -8 -10 0 0 0 1 2 -30 0 12 1 0 1 2 -5 -7.5 0 0 0 1 2 -5 0 2 1 0 1 2 -6 -3.75 0 1 6 1 2 -6 -6 0 1 -6 1 2 -4 -2 0 1 -2 1 2 -12 -7.5 0 1 -7.5 1 2 -5 -6.25 0 1 5 1 2 -4 -5 0 1 4 1 2 -2 -2.75 0 1 -2.75 1 2 -2 -2 0 1 2 1 2 -6 -9 0 1 -9 1 2 -5 -3.13 0 1 5 1 2 -12 -9 0 1 -9 1 2 -4 -8 0 1 -8 1 2 -4 -1 0 1 4 1 2 -2 0 1 1 2 1 2 -9 -16.88 0 1 9 1 2 -8 -6 0 1 -6 1 2 -2 -0.5 0 1 -0.5 1 2 -6 -7.5 0 1 -7.5 1 2 -8 -3 0 1 8 1 2 -12 -3 0 1 12 1 2 -5 -9.38 0 1 -9.38 1 2 -6 -9.75 0 0 0 1 3 -12 -13.5 0 0 0 1 3 -8 -7 0 1 8 1 3 -10 -7.5 0 0 0 1 3 -2 -2.25 0 0 0 1 3 -6 -8.25 0 0 0 1 3 -10 -16.25 0 0 0 1 3 -3 0 1 1 3 1 3 -4 -3 0 0 0 1 3 -8 -2 0 1 -2 1 3 -4 -2.5 0 0 0 1 3 -5 -5.63 0 0 0 1 3 -5 0 2 1 0 1 3 -30 0 12 1 0 1 3 -9 -4.5 0 0 0 1 3 -4 -7.5 0 0 0 1 3 -26 0 10 0 10 1 3 -10 -6.25 0 0 0 1 3 -2 -4 0 0 0 1 3 -4 -5 0 0 0 1 3 -5 -1.88 0 1 -1.88 1 3 -23 0 10 1 0 1 3 -8 -3 0 0 0 1 3 -8 -12 0 0 0 1 3 -10 -2.5 0 0 0 1 3 -5 -8.13 0 0 0 1 3 -8 -9 0 0 0 1 3 -2 -3 0 0 0 1 3 -9 -11.25 0 0 0 1 3 -9 -12.38 0 0 0 1 3 -12 -15 0 0 0 1 3 -8 -10 0 0 0 1 3 -4 -1 0 1 4 1 3 -8 0 3 1 8 1 3 -4 -3.5 0 0 0 1 3 -8 -8 0 0 0 1 3 -10 -11.25 0 0 0 1 3 -10 -5 0 1 10 1 3 -9 -13.5 0 0 0 1 3 -2 -0.75 0 1 2 1 3 -5 -4.38 0 0 0 1 3 -2 -1.5 0 0 0 1 3 -2 -3.75 0 0 0 1 3 -5 -3.75 0 0 0 1 3 -9 -16.88 0 0 0 1 3 -9 -3.38 0 1 -3.38 1 3 -5 -10 0 0 0 1 3 -26 0 12 0 12 1 3 -5 -9.38 0 0 0 1 3 -6 -1.5 0 1 -1.5 1 3 -10 -10 0 0 0 1 3 -2 -1.25 0 1 -1.25 1 3 -9 -14.63 0 0 0 1 3 -6 -4.5 0 0 0 1 3 -5 -5 0 0 0 1 3 -5 -7.5 0 0 0 1 3 -8 -13 0 0 0 1 3 -5 -3.13 0 0 0 1 3 -8 -5 0 0 0 1 3 -8 -11 0 0 0 1 3 -6 -6.75 0 0 0 1 3 -5 -8.75 0 0 0 1 3 -2 0 1 1 2 1 3 -9 -5.63 0 0 0 1 3 -6 -6 0 0 0 1 3 -4 -5.5 0 0 0 1 3 -6 -3 0 0 0 1 3 -12 -19.5 0 0 0 1 3 -10 -13.75 0 0 0 1 3 -10 -8.75 0 0 0 1 3 -5 -6.88 0 0 0 1 3 -6 -7.5 0 0 0 1 3 -10 -12.5 0 0 0 1 3 -9 -6.75 0 0 0 1 3 -4 -6 0 0 0 1 3 -8 -4 0 1 -4 1 3 -2 -1 0 1 2 1 3 -12 -24 0 0 0 1 3 -12 -6 0 0 0 1 3 -2 -2 0 0 0 1 3 -4 -7 0 0 0 1 3 -12 -9 0 0 0 1 3 -6 -11.25 0 0 0 1 3 -25 0 10 0 10 1 3 -28 0 13 0 13 1 3 -2 -2.75 0 0 0 1 3 -12 -10.5 0 0 0 1 3 -8 -14 0 0 0 1 3 -4 -6.5 0 0 0 1 3 -4 0 2 1 4 1 3 -10 -15 0 0 0 1 3 -12 0 5 1 0 1 3 -10 -18.75 0 0 0 1 3 -12 -3 0 1 12 1 3 -4 -4 0 0 0 1 3 -9 -7.88 0 0 0 1 3 -9 -2.25 0 1 -2.25 1 3 -2 -1.75 0 0 0 1 3 -12 0 6 1 0 1 3 -5 -2.5 0 0 0 1 3 -4 -4.5 0 0 0 1 3 -8 -6 0 0 0 1 3 -12 -18 0 0 0 1 3 -12 -16.5 0 0 0 1 3 -22 0 10 0 10 1 3 -12 -21 0 0 0 1 3 -12 -4.5 0 0 0 1 3 -12 -12 0 0 0 1 3 -19 0 8 0 8 1 3 -2 -2.5 0 0 0 1 3 -12 0 4 1 12 1 3 -4 -2 0 0 0 1 3 -9 -9 0 0 0 1 3 -9 -10.13 0 0 0 1 3 -6 -2.25 0 1 6 1 3 -2 -0.5 0 1 -0.5 1 3 -10 -3.75 0 1 10 1 3 -13 0 5 1 13 1 3 -4 -1.5 0 1 4 1 3 -5 -1.25 0 1 5 1 3 -6 -9 0 0 0 1 3 -10 -17.5 0 0 0 1 3 -6 -12 0 0 0 1 3 -6 -5.25 0 0 0 1 3 -12 -22.5 0 0 0 1 3 -8 -16 0 0 0 1 3 -9 -15.75 0 0 0 1 3 -10 -20 0 0 0 1 3 -13 0 6 1 0 1 3 -4 -8 0 0 0 1 3 -12 -7.5 0 0 0 1 3 -9 -18 0 0 0 1 3 -2 -3.25 0 0 0 1 3 -7 0 3 0 3 1 3 -6 -3.75 0 0 0 1 3 -5 -6.25 0 0 0 1 3 -8 -15 0 0 0 1 3 -25 0 9 0 9 1 3 -2 -3.5 0 0 0 1 3 -6 -10.5 0 0 0 1 3 -9 -10.13 0 1 -10.13 1 4 -12 -10.5 0 0 0 1 4 -25 0 10 1 25 1 4 -4 -7 0 1 -7 1 4 -9 -7.88 0 0 0 1 4 -5 -3.13 0 1 5 1 4 -5 -8.13 0 1 5 1 4 -8 -7 0 0 0 1 4 -12 -6 0 1 -6 1 4 -12 -24 0 0 0 1 4 -12 -21 0 0 0 1 4 -4 -2.5 0 1 4 1 4 -6 -9 0 0 0 1 4 -10 -15 0 1 10 1 4 -8 -6 0 1 -6 1 4 -13 0 6 1 0 1 4 -6 -12 0 1 -12 1 4 -6 -4.5 0 0 0 1 4 -9 -16.88 0 0 0 1 4 -10 -18.75 0 1 10 1 4 -9 -3.38 0 1 -3.38 1 4 -6 -9.75 0 1 -9.75 1 4 -2 -1.75 0 0 0 1 4 -5 0 2 0 2 1 4 -8 -5 0 1 -5 1 4 -8 -9 0 0 0 1 4 -12 0 6 1 0 1 4 -12 0 4 1 12 1 4 -2 -2.5 0 0 0 1 4 -6 -3 0 1 -3 1 4 -10 -7.5 0 1 -7.5 1 4 -5 -2.5 0 1 -2.5 1 4 -5 -3.75 0 1 5 1 4 -10 -3.75 0 1 10 1 4 -2 -3 0 0 0 1 4 -10 -6.25 0 1 -6.25 1 4 -4 -7.5 0 0 0 1 4 -8 -16 0 0 0 1 4 -5 -6.25 0 1 5 1 4 -4 0 2 1 4 1 4 -10 -11.25 0 1 10 1 4 -5 -6.88 0 0 0 1 4 -5 -7.5 0 1 5 1 4 -26 0 12 0 12 1 4 -8 -13 0 1 8 1 4 -4 -4.5 0 0 0 1 4 -8 -10 0 1 -10 1 4 -6 -3.75 0 1 6 1 4 -5 -5.63 0 0 0 1 4 -9 -18 0 0 0 1 4 -12 -13.5 0 1 12 1 4 -7 0 3 0 3 1 4 -8 -14 0 1 8 1 4 -2 -1.5 0 0 0 1 4 -10 -2.5 0 1 -2.5 1 4 -13 0 5 1 13 1 4 -9 -15.75 0 0 0 1 4 -8 -12 0 0 0 1 4 -28 0 13 1 0 1 4 -6 -7.5 0 0 0 1 4 -10 -16.25 0 0 0 1 4 -12 -7.5 0 1 -7.5 1 4 -5 -5 0 0 0 1 4 -2 -2 0 1 2 1 4 -22 0 10 0 10 1 4 -2 -1 0 1 2 1 4 -3 0 1 1 3 1 4 -4 -5.5 0 0 0 1 4 -2 -2.25 0 1 2 1 4 -6 -2.25 0 1 6 1 4 -4 -6.5 0 1 -6.5 1 4 -9 -12.38 0 0 0 1 4 -10 -13.75 0 1 -13.75 1 4 -10 -17.5 0 0 0 1 4 -4 -5 0 1 4 1 4 -9 -11.25 0 1 9 1 4 -10 -10 0 0 0 1 4 -2 -3.25 0 0 0 1 4 -5 -8.75 0 1 5 1 4 -5 -10 0 0 0 1 4 -9 -2.25 0 1 -2.25 1 4 -6 -6.75 0 1 6 1 4 -12 -16.5 0 1 12 1 4 -9 -14.63 0 0 0 1 4 -4 -8 0 0 0 1 4 -6 -5.25 0 1 6 1 4 -9 -6.75 0 1 -6.75 1 4 -12 -12 0 1 -12 1 4 -4 -1 0 1 4 1 4 -12 -15 0 1 -15 1 4 -4 -3.5 0 1 -3.5 1 4 -2 -1.25 0 1 -1.25 1 4 -30 0 12 0 12 1 4 -12 -19.5 0 1 12 1 4 -12 -3 0 1 12 1 4 -5 -1.25 0 0 0 1 4 -5 -1.88 0 1 -1.88 1 4 -2 -3.5 0 0 0 1 4 -12 -9 0 1 -9 1 4 -10 -20 0 0 0 1 4 -8 -4 0 1 -4 1 4 -12 0 5 1 0 1 4 -2 0 1 0 1 1 4 -4 -1.5 0 1 4 1 4 -2 -3.75 0 0 0 1 4 -6 -10.5 0 1 -10.5 1 4 -4 -2 0 1 -2 1 4 -23 0 10 1 0 1 4 -12 -18 0 1 12 1 4 -6 -8.25 0 0 0 1 4 -26 0 10 1 26 1 4 -10 -8.75 0 1 -8.75 1 4 -2 -0.75 0 1 2 1 4 -5 -9.38 0 1 -9.38 1 4 -25 0 9 1 0 1 4 -9 -4.5 0 0 0 1 4 -10 -5 0 1 10 1 4 -2 -4 0 0 0 1 4 -2 -2.75 0 1 -2.75 1 4 -4 -6 0 0 0 1 4 -10 -12.5 0 1 -12.5 1 4 -12 -22.5 0 0 0 1 4 -4 -4 0 1 4 1 4 -2 -0.5 0 1 -0.5 1 4 -8 -2 0 1 -2 1 4 -4 -3 0 0 0 1 4 -6 -11.25 0 1 6 1 4 -8 -15 0 1 -15 1 4 -8 -11 0 0 0 1 4 -12 -4.5 0 1 -4.5 1 4 -19 0 8 1 19 1 4 -6 -6 0 1 -6 1 4 -5 -4.38 0 0 0 1 4 -9 -9 0 1 9 1 4 -6 -1.5 0 1 -1.5 1 4 -9 -13.5 0 0 0 1 4 -9 -5.63 0 1 -5.63 1 4 -8 -8 0 1 -8 1 4 -8 0 3 0 3 1 4 -8 -3 0 0 0 1 4 -9 -11.25 0 1 9 1 5 -8 -16 0 0 0 1 5 -9 -5.63 0 0 0 1 5 -9 -4.5 0 1 9 1 5 -2 -2 0 1 2 1 5 -12 -19.5 0 0 0 1 5 -4 -4.5 0 0 0 1 5 -2 -3.75 0 1 -3.75 1 5 -2 -2.25 0 0 0 1 5 -12 -4.5 0 1 -4.5 1 5 -9 -10.13 0 0 0 1 5 -12 -3 0 1 12 1 5 -10 -17.5 0 0 0 1 5 -5 -4.38 0 1 -4.38 1 5 -5 -7.5 0 1 5 1 5 -6 -11.25 0 0 0 1 5 -2 -1.5 0 1 -1.5 1 5 -9 -6.75 0 1 -6.75 1 5 -4 -7 0 1 -7 1 5 -8 -7 0 1 8 1 5 -2 -1.75 0 0 0 1 5 -2 -1 0 1 2 1 5 -10 -6.25 0 1 -6.25 1 5 -6 -6.75 0 1 6 1 5 -9 -2.25 0 1 -2.25 1 5 -2 -0.75 0 1 2 1 5 -12 0 4 0 4 1 5 -6 -3 0 1 -3 1 5 -3 0 1 1 3 1 5 -2 -3 0 0 0 1 5 -10 -13.75 0 0 0 1 5 -6 -2.25 0 1 6 1 5 -5 -1.88 0 1 -1.88 1 5 -12 -13.5 0 0 0 1 5 -22 0 10 0 10 1 5 -9 -12.38 0 0 0 1 5 -26 0 10 0 10 1 5 -12 -10.5 0 0 0 1 5 -10 -2.5 0 1 -2.5 1 5 -25 0 10 0 10 1 5 -9 -15.75 0 0 0 1 5 -7 0 3 0 3 1 5 -10 -10 0 0 0 1 5 -12 -15 0 0 0 1 5 -12 0 6 0 6 1 5 -6 -4.5 0 0 0 1 5 -8 -13 0 0 0 1 5 -10 -16.25 0 0 0 1 5 -5 -1.25 0 1 5 1 5 -4 -4 0 1 4 1 5 -5 -3.75 0 1 5 1 5 -6 -8.25 0 0 0 1 5 -8 -15 0 0 0 1 5 -8 -8 0 1 -8 1 5 -2 -2.75 0 1 -2.75 1 5 -6 -12 0 1 -12 1 5 -2 0 1 1 2 1 5 -2 -1.25 0 1 -1.25 1 5 -9 -18 0 0 0 1 5 -6 -9 0 0 0 1 5 -10 -8.75 0 1 -8.75 1 5 -4 -7.5 0 0 0 1 5 -13 0 6 1 0 1 5 -10 -11.25 0 0 0 1 5 -4 -3 0 1 4 1 5 -10 -5 0 1 10 1 5 -8 -2 0 1 -2 1 5 -4 -2.5 0 1 4 1 5 -2 -3.5 0 0 0 1 5 -2 -2.5 0 0 0 1 5 -6 -3.75 0 1 6 1 5 -8 -3 0 1 8 1 5 -2 -3.25 0 0 0 1 5 -8 -9 0 0 0 1 5 -6 -6 0 1 -6 1 5 -8 -11 0 0 0 1 5 -5 -8.75 0 1 5 1 5 -6 -9.75 0 0 0 1 5 -12 -24 0 0 0 1 5 -4 -6.5 0 0 0 1 5 -5 -10 0 0 0 1 5 -30 0 12 0 12 1 5 -12 -18 0 0 0 1 5 -9 -9 0 1 9 1 5 -5 -5 0 1 -5 1 5 -5 -9.38 0 0 0 1 5 -10 -12.5 0 0 0 1 5 -10 -18.75 0 0 0 1 5 -5 -2.5 0 1 -2.5 1 5 -9 -14.63 0 0 0 1 5 -28 0 13 0 13 1 5 -5 -6.88 0 0 0 1 5 -4 -3.5 0 1 -3.5 1 5 -12 -16.5 0 0 0 1 5 -5 -8.13 0 0 0 1 5 -9 -16.88 0 0 0 1 5 -9 -3.38 0 1 -3.38 1 5 -12 0 5 1 0 1 5 -4 -8 0 0 0 1 5 -8 -12 0 1 8 1 5 -8 -4 0 1 -4 1 5 -2 -4 0 0 0 1 5 -12 -9 0 1 -9 1 5 -4 -1.5 0 1 4 1 5 -6 -10.5 0 0 0 1 5 -5 -3.13 0 1 5 1 5 -10 -15 0 0 0 1 5 -23 0 10 0 10 1 5 -12 -7.5 0 1 -7.5 1 5 -2 -0.5 0 1 -0.5 1 5 -4 0 2 0 2 1 5 -6 -1.5 0 1 -1.5 1 5 -4 -1 0 1 4 1 5 -10 -20 0 0 0 1 5 -12 -22.5 0 0 0 1 5 -25 0 9 0 9 1 5 -13 0 5 1 13 1 5 -6 -5.25 0 0 0 1 5 -9 -13.5 0 0 0 1 5 -5 0 2 0 2 1 5 -12 -6 0 1 -6 1 5 -5 -6.25 0 1 5 1 5 -10 -3.75 0 1 10 1 5 -9 -7.88 0 0 0 1 5 -8 -6 0 1 -6 1 5 -4 -5.5 0 0 0 1 5 -19 0 8 0 8 1 5 -10 -7.5 0 1 -7.5 1 5 -4 -6 0 0 0 1 5 -8 -14 0 0 0 1 5 -8 0 3 0 3 1 5 -12 -21 0 0 0 1 5 -4 -2 0 1 -2 1 5 -4 -5 0 0 0 1 5 -6 -7.5 0 1 -7.5 1 5 -12 -12 0 1 -12 1 5 -8 -5 0 1 -5 1 5 -26 0 12 0 12 1 5 -8 -10 0 0 0 1 5 -5 -5.63 0 0 0 1 5 -2 -1 0 1 2 1 6 -9 -6.75 0 1 -6.75 1 6 -2 -4 0 0 0 1 6 -2 -3.25 0 1 2 1 6 -4 -6.5 0 0 0 1 6 -5 -5.63 0 1 -5.63 1 6 -8 -8 0 1 -8 1 6 -12 -18 0 0 0 1 6 -2 -2.5 0 1 2 1 6 -3 0 1 1 3 1 6 -12 -16.5 0 1 12 1 6 -10 -12.5 0 1 -12.5 1 6 -5 -1.25 0 1 5 1 6 -19 0 8 1 19 1 6 -8 -9 0 1 -9 1 6 -5 -10 0 0 0 1 6 -25 0 10 1 25 1 6 -7 0 3 1 0 1 6 -6 -11.25 0 1 6 1 6 -6 -1.5 0 1 -1.5 1 6 -4 -1.5 0 1 4 1 6 -10 -5 0 1 10 1 6 -10 -3.75 0 1 10 1 6 -6 -4.5 0 1 -4.5 1 6 -12 -19.5 0 1 12 1 6 -5 -4.38 0 1 -4.38 1 6 -8 -11 0 0 0 1 6 -2 -0.75 0 1 2 1 6 -2 -1.5 0 1 -1.5 1 6 -6 -6.75 0 1 6 1 6 -4 -6 0 1 4 1 6 -10 -16.25 0 1 -16.25 1 6 -12 -15 0 1 -15 1 6 -6 -5.25 0 1 6 1 6 -12 -21 0 0 0 1 6 -4 -3 0 1 4 1 6 -12 -22.5 0 0 0 1 6 -2 -3.75 0 1 -3.75 1 6 -6 -12 0 1 -12 1 6 -5 -8.13 0 1 5 1 6 -10 -8.75 0 1 -8.75 1 6 -12 -6 0 1 -6 1 6 -5 -5 0 1 -5 1 6 -22 0 10 0 10 1 6 -12 -13.5 0 0 0 1 6 -8 -7 0 1 8 1 6 -4 -3.5 0 1 -3.5 1 6 -9 -12.38 0 1 9 1 6 -10 -7.5 0 1 -7.5 1 6 -26 0 10 1 26 1 6 -12 -4.5 0 1 -4.5 1 6 -8 -15 0 1 -15 1 6 -2 -1.75 0 1 2 1 6 -12 0 6 1 0 1 6 -9 -3.38 0 1 -3.38 1 6 -2 -3 0 1 -3 1 6 -9 -5.63 0 1 -5.63 1 6 -2 -3.5 0 1 -3.5 1 6 -8 -12 0 0 0 1 6 -10 -18.75 0 0 0 1 6 -4 0 2 1 4 1 6 -2 -2.25 0 1 2 1 6 -9 -2.25 0 1 -2.25 1 6 -10 -13.75 0 0 0 1 6 -28 0 13 0 13 1 6 -4 -2.5 0 1 4 1 6 -9 -15.75 0 0 0 1 6 -10 -15 0 1 10 1 6 -10 -10 0 1 10 1 6 -9 -18 0 0 0 1 6 -12 -24 0 0 0 1 6 -13 0 5 1 13 1 6 -5 -1.88 0 1 -1.88 1 6 -4 -4.5 0 1 4 1 6 -9 -7.88 0 1 9 1 6 -9 -9 0 1 9 1 6 -25 0 9 1 0 1 6 -12 -12 0 1 -12 1 6 -6 -2.25 0 1 6 1 6 -8 -5 0 1 -5 1 6 -4 -5.5 0 1 -5.5 1 6 -2 -1.25 0 1 -1.25 1 6 -9 -13.5 0 0 0 1 6 -9 -4.5 0 1 9 1 6 -10 -11.25 0 1 10 1 6 -6 -3 0 1 -3 1 6 -10 -2.5 0 1 -2.5 1 6 -12 0 4 1 12 1 6 -10 -20 0 0 0 1 6 -5 -3.75 0 1 5 1 6 -9 -10.13 0 1 -10.13 1 6 -4 -7 0 0 0 1 6 -12 -10.5 0 1 -10.5 1 6 -8 -16 0 0 0 1 6 -4 -7.5 0 1 4 1 6 -8 0 3 1 8 1 6 -6 -10.5 0 0 0 1 6 -6 -9.75 0 1 -9.75 1 6 -5 -8.75 0 0 0 1 6 -5 -2.5 0 1 -2.5 1 6 -13 0 6 1 0 1 6 -23 0 10 1 0 1 6 -8 -4 0 1 -4 1 6 -9 -11.25 0 1 9 1 6 -5 -6.88 0 1 -6.88 1 6 -4 -4 0 1 4 1 6 -10 -17.5 0 0 0 1 6 -26 0 12 0 12 1 6 -6 -8.25 0 1 -8.25 1 6 -9 -14.63 0 1 9 1 6 -8 -2 0 1 -2 1 6 -10 -6.25 0 1 -6.25 1 6 -8 -14 0 1 8 1 6 -12 0 5 0 5 1 6 -8 -10 0 1 -10 1 6 -30 0 12 1 0 1 6 -5 -7.5 0 1 5 1 6 -5 0 2 1 0 1 6 -6 -3.75 0 1 6 1 6 -6 -6 0 1 -6 1 6 -4 -2 0 1 -2 1 6 -12 -7.5 0 1 -7.5 1 6 -5 -6.25 0 0 0 1 6 -4 -5 0 1 4 1 6 -2 -2.75 0 1 -2.75 1 6 -2 -2 0 1 2 1 6 -6 -9 0 1 -9 1 6 -5 -3.13 0 1 5 1 6 -12 -9 0 1 -9 1 6 -4 -8 0 1 -8 1 6 -4 -1 0 1 4 1 6 -2 0 1 1 2 1 6 -9 -16.88 0 0 0 1 6 -8 -6 0 1 -6 1 6 -2 -0.5 0 1 -0.5 1 6 -6 -7.5 0 1 -7.5 1 6 -8 -3 0 1 8 1 6 -12 -3 0 1 12 1 6 -5 -9.38 0 1 -9.38 1 6 -6 -9.75 0 0 0 1 7 -12 -13.5 0 0 0 1 7 -8 -7 0 1 8 1 7 -10 -7.5 0 1 -7.5 1 7 -2 -2.25 0 1 2 1 7 -6 -8.25 0 0 0 1 7 -10 -16.25 0 0 0 1 7 -3 0 1 1 3 1 7 -4 -3 0 1 4 1 7 -8 -2 0 1 -2 1 7 -4 -2.5 0 1 4 1 7 -5 -5.63 0 1 -5.63 1 7 -5 0 2 0 2 1 7 -30 0 12 1 0 1 7 -9 -4.5 0 1 9 1 7 -4 -7.5 0 0 0 1 7 -26 0 10 1 26 1 7 -10 -6.25 0 1 -6.25 1 7 -2 -4 0 0 0 1 7 -4 -5 0 0 0 1 7 -5 -1.88 0 1 -1.88 1 7 -23 0 10 0 10 1 7 -8 -3 0 1 8 1 7 -8 -12 0 0 0 1 7 -10 -2.5 0 1 -2.5 1 7 -5 -8.13 0 0 0 1 7 -8 -9 0 1 -9 1 7 -2 -3 0 0 0 1 7 -9 -11.25 0 0 0 1 7 -9 -12.38 0 0 0 1 7 -12 -15 0 1 -15 1 7 -8 -10 0 0 0 1 7 -4 -1 0 1 4 1 7 -8 0 3 1 8 1 7 -4 -3.5 0 0 0 1 7 -8 -8 0 1 -8 1 7 -10 -11.25 0 0 0 1 7 -10 -5 0 1 10 1 7 -9 -13.5 0 0 0 1 7 -2 -0.75 0 1 2 1 7 -5 -4.38 0 0 0 1 7 -2 -1.5 0 1 -1.5 1 7 -2 -3.75 0 0 0 1 7 -5 -3.75 0 1 5 1 7 -9 -16.88 0 0 0 1 7 -9 -3.38 0 1 -3.38 1 7 -5 -10 0 0 0 1 7 -26 0 12 1 0 1 7 -5 -9.38 0 0 0 1 7 -6 -1.5 0 1 -1.5 1 7 -10 -10 0 1 10 1 7 -2 -1.25 0 1 -1.25 1 7 -9 -14.63 0 0 0 1 7 -6 -4.5 0 1 -4.5 1 7 -5 -5 0 0 0 1 7 -5 -7.5 0 0 0 1 7 -8 -13 0 0 0 1 7 -5 -3.13 0 1 5 1 7 -8 -5 0 1 -5 1 7 -8 -11 0 0 0 1 7 -6 -6.75 0 0 0 1 7 -2 0 1 1 2 1 7 -9 -5.63 0 0 0 1 7 -6 -6 0 0 0 1 7 -4 -5.5 0 0 0 1 7 -6 -3 0 1 -3 1 7 -12 -19.5 0 0 0 1 7 -10 -13.75 0 0 0 1 7 -10 -8.75 0 0 0 1 7 -5 -6.88 0 0 0 1 7 -6 -7.5 0 0 0 1 7 -10 -12.5 0 0 0 1 7 -9 -6.75 0 1 -6.75 1 7 -4 -6 0 0 0 1 7 -8 -4 0 1 -4 1 7 -2 -1 0 1 2 1 7 -12 -24 0 0 0 1 7 -12 -6 0 1 -6 1 7 -2 -2 0 0 0 1 7 -4 -7 0 0 0 1 7 -12 -9 0 1 -9 1 7 -6 -11.25 0 0 0 1 7 -25 0 10 1 25 1 7 -28 0 13 0 13 1 7 -2 -2.75 0 1 -2.75 1 7 -12 -10.5 0 1 -10.5 1 7 -8 -14 0 0 0 1 7 -4 -6.5 0 0 0 1 7 -4 0 2 1 4 1 7 -10 -15 0 0 0 1 7 -12 0 5 1 0 1 7 -10 -18.75 0 0 0 1 7 -12 -3 0 1 12 1 7 -4 -4 0 0 0 1 7 -9 -7.88 0 0 0 1 7 -9 -2.25 0 1 -2.25 1 7 -2 -1.75 0 0 0 1 7 -12 0 6 1 0 1 7 -5 -2.5 0 1 -2.5 1 7 -4 -4.5 0 0 0 1 7 -8 -6 0 0 0 1 7 -12 -18 0 0 0 1 7 -12 -16.5 0 0 0 1 7 -22 0 10 1 22 1 7 -12 -21 0 0 0 1 7 -12 -4.5 0 1 -4.5 1 7 -12 -12 0 0 0 1 7 -19 0 8 1 19 1 7 -2 -2.5 0 0 0 1 7 -12 0 4 1 12 1 7 -4 -2 0 0 0 1 7 -9 -9 0 1 9 1 7 -9 -10.13 0 0 0 1 7 -6 -2.25 0 1 6 1 7 -2 -0.5 0 1 -0.5 1 7 -10 -3.75 0 1 10 1 7 -13 0 5 1 13 1 7 -4 -1.5 0 1 4 1 7 -5 -1.25 0 1 5 1 7 -6 -9 0 0 0 1 7 -10 -17.5 0 0 0 1 7 -6 -12 0 0 0 1 7 -6 -5.25 0 0 0 1 7 -12 -22.5 0 0 0 1 7 -8 -16 0 0 0 1 7 -9 -15.75 0 0 0 1 7 -10 -20 0 0 0 1 7 -13 0 6 1 0 1 7 -4 -8 0 0 0 1 7 -12 -7.5 0 1 -7.5 1 7 -9 -18 0 0 0 1 7 -2 -3.25 0 0 0 1 7 -7 0 3 1 0 1 7 -6 -3.75 0 0 0 1 7 -5 -6.25 0 0 0 1 7 -8 -15 0 0 0 1 7 -25 0 9 1 0 1 7 -2 -3.5 0 0 0 1 7 -6 -10.5 0 0 0 1 7 -9 -10.13 0 0 0 1 8 -12 -10.5 0 1 -10.5 1 8 -25 0 10 1 25 1 8 -4 -7 0 0 0 1 8 -9 -7.88 0 1 9 1 8 -5 -3.13 0 1 5 1 8 -5 -8.13 0 0 0 1 8 -8 -7 0 1 8 1 8 -12 -6 0 1 -6 1 8 -12 -24 0 0 0 1 8 -12 -21 0 0 0 1 8 -4 -2.5 0 1 4 1 8 -6 -9 0 0 0 1 8 -10 -15 0 0 0 1 8 -8 -6 0 1 -6 1 8 -13 0 6 1 0 1 8 -6 -12 0 0 0 1 8 -6 -4.5 0 1 -4.5 1 8 -9 -16.88 0 0 0 1 8 -10 -18.75 0 0 0 1 8 -9 -3.38 0 1 -3.38 1 8 -6 -9.75 0 0 0 1 8 -2 -1.75 0 1 2 1 8 -5 0 2 1 0 1 8 -8 -5 0 1 -5 1 8 -8 -9 0 0 0 1 8 -12 0 6 0 6 1 8 -12 0 4 1 12 1 8 -2 -2.5 0 0 0 1 8 -6 -3 0 1 -3 1 8 -10 -7.5 0 1 -7.5 1 8 -5 -2.5 0 1 -2.5 1 8 -5 -3.75 0 0 0 1 8 -10 -3.75 0 1 10 1 8 -2 -3 0 0 0 1 8 -10 -6.25 0 1 -6.25 1 8 -4 -7.5 0 0 0 1 8 -8 -16 0 0 0 1 8 -5 -6.25 0 0 0 1 8 -4 0 2 1 4 1 8 -10 -11.25 0 0 0 1 8 -5 -6.88 0 0 0 1 8 -5 -7.5 0 0 0 1 8 -26 0 12 1 0 1 8 -8 -13 0 0 0 1 8 -4 -4.5 0 1 4 1 8 -8 -10 0 0 0 1 8 -6 -3.75 0 1 6 1 8 -5 -5.63 0 1 -5.63 1 8 -9 -18 0 0 0 1 8 -12 -13.5 0 1 12 1 8 -7 0 3 1 0 1 8 -8 -14 0 0 0 1 8 -2 -1.5 0 1 -1.5 1 8 -10 -2.5 0 1 -2.5 1 8 -13 0 5 1 13 1 8 -9 -15.75 0 0 0 1 8 -8 -12 0 0 0 1 8 -28 0 13 1 0 1 8 -6 -7.5 0 1 -7.5 1 8 -10 -16.25 0 0 0 1 8 -12 -7.5 0 1 -7.5 1 8 -5 -5 0 1 -5 1 8 -2 -2 0 1 2 1 8 -22 0 10 1 22 1 8 -2 -1 0 1 2 1 8 -3 0 1 1 3 1 8 -4 -5.5 0 1 -5.5 1 8 -2 -2.25 0 1 2 1 8 -6 -2.25 0 1 6 1 8 -4 -6.5 0 1 -6.5 1 8 -9 -12.38 0 0 0 1 8 -10 -13.75 0 0 0 1 8 -10 -17.5 0 1 10 1 8 -4 -5 0 1 4 1 8 -9 -11.25 0 0 0 1 8 -10 -10 0 1 10 1 8 -2 -3.25 0 0 0 1 8 -5 -8.75 0 0 0 1 8 -5 -10 0 0 0 1 8 -9 -2.25 0 1 -2.25 1 8 -6 -6.75 0 1 6 1 8 -12 -16.5 0 1 12 1 8 -9 -14.63 0 0 0 1 8 -4 -8 0 0 0 1 8 -6 -5.25 0 1 6 1 8 -9 -6.75 0 1 -6.75 1 8 -12 -12 0 1 -12 1 8 -4 -1 0 1 4 1 8 -12 -15 0 1 -15 1 8 -4 -3.5 0 1 -3.5 1 8 -2 -1.25 0 1 -1.25 1 8 -30 0 12 1 0 1 8 -12 -19.5 0 0 0 1 8 -12 -3 0 1 12 1 8 -5 -1.25 0 1 5 1 8 -5 -1.88 0 1 -1.88 1 8 -2 -3.5 0 0 0 1 8 -12 -9 0 1 -9 1 8 -10 -20 0 0 0 1 8 -8 -4 0 1 -4 1 8 -12 0 5 1 0 1 8 -2 0 1 0 1 1 8 -4 -1.5 0 1 4 1 8 -2 -3.75 0 1 -3.75 1 8 -6 -10.5 0 0 0 1 8 -4 -2 0 1 -2 1 8 -23 0 10 0 10 1 8 -12 -18 0 1 12 1 8 -6 -8.25 0 1 -8.25 1 8 -26 0 10 1 26 1 8 -10 -8.75 0 1 -8.75 1 8 -2 -0.75 0 1 2 1 8 -5 -9.38 0 0 0 1 8 -25 0 9 1 0 1 8 -9 -4.5 0 1 9 1 8 -10 -5 0 1 10 1 8 -2 -4 0 1 -4 1 8 -2 -2.75 0 1 -2.75 1 8 -4 -6 0 1 4 1 8 -10 -12.5 0 1 -12.5 1 8 -12 -22.5 0 0 0 1 8 -4 -4 0 1 4 1 8 -2 -0.5 0 1 -0.5 1 8 -8 -2 0 1 -2 1 8 -4 -3 0 1 4 1 8 -6 -11.25 0 0 0 1 8 -8 -15 0 0 0 1 8 -8 -11 0 1 8 1 8 -12 -4.5 0 1 -4.5 1 8 -19 0 8 1 19 1 8 -6 -6 0 1 -6 1 8 -5 -4.38 0 1 -4.38 1 8 -9 -9 0 1 9 1 8 -6 -1.5 0 1 -1.5 1 8 -9 -13.5 0 0 0 1 8 -9 -5.63 0 1 -5.63 1 8 -8 -8 0 1 -8 1 8 -8 0 3 1 8 1 8 -8 -3 0 1 8 1 8 -9 -11.25 0 1 9 1 9 -8 -16 0 0 0 1 9 -9 -5.63 0 1 -5.63 1 9 -9 -4.5 0 1 9 1 9 -2 -2 0 1 2 1 9 -12 -19.5 0 0 0 1 9 -4 -4.5 0 1 4 1 9 -2 -3.75 0 1 -3.75 1 9 -2 -2.25 0 1 2 1 9 -12 -4.5 0 1 -4.5 1 9 -9 -10.13 0 0 0 1 9 -12 -3 0 1 12 1 9 -10 -17.5 0 0 0 1 9 -5 -4.38 0 1 -4.38 1 9 -5 -7.5 0 1 5 1 9 -6 -11.25 0 0 0 1 9 -2 -1.5 0 1 -1.5 1 9 -9 -6.75 0 1 -6.75 1 9 -4 -7 0 0 0 1 9 -8 -7 0 1 8 1 9 -2 -1.75 0 1 2 1 9 -2 -1 0 1 2 1 9 -10 -6.25 0 1 -6.25 1 9 -6 -6.75 0 1 6 1 9 -9 -2.25 0 1 -2.25 1 9 -2 -0.75 0 1 2 1 9 -12 0 4 1 12 1 9 -6 -3 0 1 -3 1 9 -3 0 1 1 3 1 9 -2 -3 0 1 -3 1 9 -10 -13.75 0 0 0 1 9 -6 -2.25 0 1 6 1 9 -5 -1.88 0 1 -1.88 1 9 -12 -13.5 0 0 0 1 9 -22 0 10 1 22 1 9 -9 -12.38 0 0 0 1 9 -26 0 10 1 26 1 9 -12 -10.5 0 0 0 1 9 -10 -2.5 0 1 -2.5 1 9 -25 0 10 1 25 1 9 -9 -15.75 0 0 0 1 9 -7 0 3 1 0 1 9 -10 -10 0 0 0 1 9 -12 -15 0 0 0 1 9 -12 0 6 1 0 1 9 -6 -4.5 0 1 -4.5 1 9 -8 -13 0 0 0 1 9 -10 -16.25 0 0 0 1 9 -5 -1.25 0 1 5 1 9 -4 -4 0 1 4 1 9 -5 -3.75 0 1 5 1 9 -6 -8.25 0 0 0 1 9 -8 -15 0 0 0 1 9 -8 -8 0 1 -8 1 9 -2 -2.75 0 1 -2.75 1 9 -6 -12 0 0 0 1 9 -2 0 1 1 2 1 9 -2 -1.25 0 1 -1.25 1 9 -9 -18 0 0 0 1 9 -6 -9 0 0 0 1 9 -10 -8.75 0 0 0 1 9 -4 -7.5 0 0 0 1 9 -13 0 6 1 0 1 9 -10 -11.25 0 0 0 1 9 -4 -3 0 1 4 1 9 -10 -5 0 1 10 1 9 -8 -2 0 1 -2 1 9 -4 -2.5 0 1 4 1 9 -2 -3.5 0 1 -3.5 1 9 -2 -2.5 0 1 2 1 9 -6 -3.75 0 1 6 1 9 -8 -3 0 1 8 1 9 -2 -3.25 0 1 2 1 9 -8 -9 0 1 -9 1 9 -6 -6 0 1 -6 1 9 -8 -11 0 0 0 1 9 -5 -8.75 0 0 0 1 9 -6 -9.75 0 0 0 1 9 -12 -24 0 0 0 1 9 -5 -10 0 0 0 1 9 -30 0 12 1 0 1 9 -12 -18 0 0 0 1 9 -9 -9 0 1 9 1 9 -5 -5 0 1 -5 1 9 -5 -9.38 0 0 0 1 9 -10 -12.5 0 0 0 1 9 -10 -18.75 0 0 0 1 9 -5 -2.5 0 1 -2.5 1 9 -9 -14.63 0 0 0 1 9 -28 0 13 1 0 1 9 -5 -6.88 0 1 -6.88 1 9 -4 -3.5 0 1 -3.5 1 9 -12 -16.5 0 0 0 1 9 -5 -8.13 0 0 0 1 9 -9 -16.88 0 0 0 1 9 -9 -3.38 0 1 -3.38 1 9 -12 0 5 1 0 1 9 -4 -8 0 0 0 1 9 -8 -12 0 0 0 1 9 -8 -4 0 1 -4 1 9 -2 -4 0 1 -4 1 9 -12 -9 0 1 -9 1 9 -4 -1.5 0 1 4 1 9 -6 -10.5 0 0 0 1 9 -5 -3.13 0 1 5 1 9 -10 -15 0 0 0 1 9 -23 0 10 1 0 1 9 -12 -7.5 0 1 -7.5 1 9 -2 -0.5 0 1 -0.5 1 9 -4 0 2 1 4 1 9 -6 -1.5 0 1 -1.5 1 9 -4 -1 0 1 4 1 9 -10 -20 0 0 0 1 9 -12 -22.5 0 0 0 1 9 -25 0 9 1 0 1 9 -13 0 5 1 13 1 9 -6 -5.25 0 1 6 1 9 -9 -13.5 0 0 0 1 9 -5 0 2 1 0 1 9 -12 -6 0 1 -6 1 9 -5 -6.25 0 1 5 1 9 -10 -3.75 0 1 10 1 9 -9 -7.88 0 1 9 1 9 -8 -6 0 1 -6 1 9 -4 -5.5 0 1 -5.5 1 9 -19 0 8 1 19 1 9 -10 -7.5 0 1 -7.5 1 9 -4 -6 0 1 4 1 9 -8 -14 0 0 0 1 9 -8 0 3 1 8 1 9 -12 -21 0 0 0 1 9 -4 -2 0 1 -2 1 9 -4 -5 0 1 4 1 9 -6 -7.5 0 0 0 1 9 -12 -12 0 0 0 1 9 -8 -5 0 1 -5 1 9 -26 0 12 1 0 1 9 -8 -10 0 0 0 1 9 -5 -5.63 0 0 0 1 9 -9 -10.13 0 1 -10.13 1 10 -12 -10.5 0 1 -10.5 1 10 -25 0 10 1 25 1 10 -4 -7 0 1 -7 1 10 -9 -7.88 0 1 9 1 10 -5 -3.13 0 1 5 1 10 -5 -8.13 0 0 0 1 10 -8 -7 0 1 8 1 10 -12 -6 0 1 -6 1 10 -12 -24 0 0 0 1 10 -12 -21 0 0 0 1 10 -4 -2.5 0 1 4 1 10 -6 -9 0 0 0 1 10 -10 -15 0 0 0 1 10 -8 -6 0 1 -6 1 10 -13 0 6 1 0 1 10 -6 -12 0 1 -12 1 10 -6 -4.5 0 1 -4.5 1 10 -9 -16.88 0 1 9 1 10 -10 -18.75 0 1 10 1 10 -9 -3.38 0 1 -3.38 1 10 -6 -9.75 0 1 -9.75 1 10 -2 -1.75 0 1 2 1 10 -5 0 2 1 0 1 10 -8 -5 0 1 -5 1 10 -8 -9 0 1 -9 1 10 -12 0 6 1 0 1 10 -12 0 4 1 12 1 10 -2 -2.5 0 1 2 1 10 -6 -3 0 1 -3 1 10 -10 -7.5 0 1 -7.5 1 10 -5 -2.5 0 1 -2.5 1 10 -5 -3.75 0 1 5 1 10 -10 -3.75 0 1 10 1 10 -2 -3 0 1 -3 1 10 -10 -6.25 0 1 -6.25 1 10 -4 -7.5 0 1 4 1 10 -8 -16 0 1 8 1 10 -5 -6.25 0 1 5 1 10 -4 0 2 1 4 1 10 -10 -11.25 0 1 10 1 10 -5 -6.88 0 1 -6.88 1 10 -5 -7.5 0 1 5 1 10 -26 0 12 0 12 1 10 -8 -13 0 0 0 1 10 -4 -4.5 0 1 4 1 10 -8 -10 0 1 -10 1 10 -6 -3.75 0 1 6 1 10 -5 -5.63 0 1 -5.63 1 10 -9 -18 0 1 9 1 10 -12 -13.5 0 1 12 1 10 -7 0 3 1 0 1 10 -8 -14 0 1 8 1 10 -2 -1.5 0 1 -1.5 1 10 -10 -2.5 0 1 -2.5 1 10 -13 0 5 0 5 1 10 -9 -15.75 0 1 9 1 10 -8 -12 0 1 8 1 10 -28 0 13 0 13 1 10 -6 -7.5 0 1 -7.5 1 10 -10 -16.25 0 1 -16.25 1 10 -12 -7.5 0 1 -7.5 1 10 -5 -5 0 1 -5 1 10 -2 -2 0 1 2 1 10 -22 0 10 1 22 1 10 -2 -1 0 1 2 1 10 -3 0 1 1 3 1 10 -4 -5.5 0 1 -5.5 1 10 -6 -2.25 0 1 6 1 10 -4 -6.5 0 1 -6.5 1 10 -9 -12.38 0 1 9 1 10 -10 -13.75 0 1 -13.75 1 10 -10 -17.5 0 1 10 1 10 -4 -5 0 1 4 1 10 -9 -11.25 0 1 9 1 10 -10 -10 0 1 10 1 10 -2 -3.25 0 1 2 1 10 -5 -8.75 0 1 5 1 10 -5 -10 0 1 5 1 10 -9 -2.25 0 1 -2.25 1 10 -12 -16.5 0 0 0 1 10 -9 -14.63 0 0 0 1 10 -4 -8 0 1 -8 1 10 -6 -5.25 0 1 6 1 10 -9 -6.75 0 1 -6.75 1 10 -12 -12 0 1 -12 1 10 -4 -1 0 1 4 1 10 -12 -15 0 0 0 1 10 -4 -3.5 0 1 -3.5 1 10 -2 -1.25 0 1 -1.25 1 10 -30 0 12 0 12 1 10 -12 -19.5 0 0 0 1 10 -12 -3 0 1 12 1 10 -5 -1.25 0 1 5 1 10 -5 -1.88 0 1 -1.88 1 10 -2 -3.5 0 1 -3.5 1 10 -12 -9 0 1 -9 1 10 -10 -20 0 0 0 1 10 -8 -4 0 1 -4 1 10 -12 0 5 1 0 1 10 -2 0 1 1 2 1 10 -4 -1.5 0 1 4 1 10 -2 -3.75 0 1 -3.75 1 10 -6 -10.5 0 1 -10.5 1 10 -4 -2 0 1 -2 1 10 -23 0 10 1 0 1 10 -12 -18 0 0 0 1 10 -6 -8.25 0 1 -8.25 1 10 -26 0 10 1 26 1 10 -10 -8.75 0 1 -8.75 1 10 -2 -0.75 0 1 2 1 10 -5 -9.38 0 1 -9.38 1 10 -25 0 9 1 0 1 10 -9 -4.5 0 1 9 1 10 -10 -5 0 1 10 1 10 -2 -4 0 1 -4 1 10 -2 -2.75 0 1 -2.75 1 10 -4 -6 0 1 4 1 10 -10 -12.5 0 1 -12.5 1 10 -12 -22.5 0 1 12 1 10 -4 -4 0 1 4 1 10 -2 -0.5 0 1 -0.5 1 10 -8 -2 0 1 -2 1 10 -4 -3 0 1 4 1 10 -6 -11.25 0 1 6 1 10 -8 -15 0 1 -15 1 10 -8 -11 0 1 8 1 10 -12 -4.5 0 1 -4.5 1 10 -19 0 8 1 19 1 10 -6 -6 0 1 -6 1 10 -5 -4.38 0 1 -4.38 1 10 -9 -9 0 1 9 1 10 -6 -1.5 0 1 -1.5 1 10 -9 -13.5 0 1 -13.5 1 10 -9 -5.63 0 1 -5.63 1 10 -8 -8 0 1 -8 1 10 -8 0 3 1 8 1 10 -8 -3 0 1 8 1 10 -9 -11.25 0 0 0 1 11 -8 -16 0 0 0 1 11 -9 -5.63 0 1 -5.63 1 11 -9 -4.5 0 1 9 1 11 -2 -2 0 1 2 1 11 -12 -19.5 0 0 0 1 11 -4 -4.5 0 0 0 1 11 -2 -3.75 0 0 0 1 11 -2 -2.25 0 0 0 1 11 -12 -4.5 0 1 -4.5 1 11 -9 -10.13 0 0 0 1 11 -12 -3 0 1 12 1 11 -10 -17.5 0 0 0 1 11 -5 -4.38 0 1 -4.38 1 11 -5 -7.5 0 0 0 1 11 -6 -11.25 0 0 0 1 11 -2 -1.5 0 1 -1.5 1 11 -9 -6.75 0 1 -6.75 1 11 -4 -7 0 0 0 1 11 -8 -7 0 1 8 1 11 -2 -1.75 0 1 2 1 11 -2 -1 0 1 2 1 11 -10 -6.25 0 1 -6.25 1 11 -6 -6.75 0 1 6 1 11 -9 -2.25 0 1 -2.25 1 11 -2 -0.75 0 1 2 1 11 -12 0 4 1 12 1 11 -6 -3 0 1 -3 1 11 -3 0 1 0 1 1 11 -2 -3 0 1 -3 1 11 -10 -13.75 0 0 0 1 11 -6 -2.25 0 1 6 1 11 -5 -1.88 0 1 -1.88 1 11 -12 -13.5 0 1 12 1 11 -22 0 10 1 22 1 11 -9 -12.38 0 0 0 1 11 -26 0 10 0 10 1 11 -10 -2.5 0 1 -2.5 1 11 -25 0 10 1 25 1 11 -9 -15.75 0 0 0 1 11 -7 0 3 0 3 1 11 -10 -10 0 1 10 1 11 -12 -15 0 0 0 1 11 -12 0 6 0 6 1 11 -6 -4.5 0 1 -4.5 1 11 -8 -13 0 0 0 1 11 -10 -16.25 0 0 0 1 11 -5 -1.25 0 1 5 1 11 -4 -4 0 1 4 1 11 -5 -3.75 0 1 5 1 11 -6 -8.25 0 0 0 1 11 -8 -15 0 0 0 1 11 -8 -8 0 0 0 1 11 -2 -2.75 0 0 0 1 11 -6 -12 0 0 0 1 11 -2 0 1 0 1 1 11 -2 -1.25 0 0 0 1 11 -9 -18 0 0 0 1 11 -6 -9 0 0 0 1 11 -10 -8.75 0 1 -8.75 1 11 -4 -7.5 0 0 0 1 11 -13 0 6 1 0 1 11 -10 -11.25 0 0 0 1 11 -4 -3 0 1 4 1 11 -10 -5 0 1 10 1 11 -8 -2 0 1 -2 1 11 -4 -2.5 0 1 4 1 11 -2 -3.5 0 0 0 1 11 -2 -2.5 0 0 0 1 11 -6 -3.75 0 1 6 1 11 -8 -3 0 1 8 1 11 -2 -3.25 0 0 0 1 11 -8 -9 0 0 0 1 11 -6 -6 0 1 -6 1 11 -8 -11 0 0 0 1 11 -5 -8.75 0 0 0 1 11 -6 -9.75 0 0 0 1 11 -12 -24 0 0 0 1 11 -4 -6.5 0 0 0 1 11 -5 -10 0 0 0 1 11 -30 0 12 1 0 1 11 -12 -18 0 0 0 1 11 -9 -9 0 0 0 1 11 -5 -5 0 1 -5 1 11 -5 -9.38 0 0 0 1 11 -10 -12.5 0 0 0 1 11 -10 -18.75 0 0 0 1 11 -5 -2.5 0 1 -2.5 1 11 -9 -14.63 0 0 0 1 11 -28 0 13 0 13 1 11 -5 -6.88 0 0 0 1 11 -4 -3.5 0 1 -3.5 1 11 -12 -16.5 0 0 0 1 11 -5 -8.13 0 0 0 1 11 -9 -16.88 0 0 0 1 11 -9 -3.38 0 1 -3.38 1 11 -12 0 5 0 5 1 11 -4 -8 0 0 0 1 11 -8 -12 0 0 0 1 11 -8 -4 0 1 -4 1 11 -2 -4 0 0 0 1 11 -12 -9 0 1 -9 1 11 -4 -1.5 0 1 4 1 11 -6 -10.5 0 0 0 1 11 -5 -3.13 0 1 5 1 11 -10 -15 0 0 0 1 11 -23 0 10 0 10 1 11 -12 -7.5 0 1 -7.5 1 11 -2 -0.5 0 1 -0.5 1 11 -4 0 2 0 2 1 11 -6 -1.5 0 1 -1.5 1 11 -4 -1 0 1 4 1 11 -10 -20 0 0 0 1 11 -12 -22.5 0 0 0 1 11 -25 0 9 1 0 1 11 -13 0 5 0 5 1 11 -6 -5.25 0 1 6 1 11 -9 -13.5 0 0 0 1 11 -5 0 2 1 0 1 11 -12 -6 0 1 -6 1 11 -5 -6.25 0 0 0 1 11 -10 -3.75 0 1 10 1 11 -9 -7.88 0 1 9 1 11 -8 -6 0 1 -6 1 11 -4 -5.5 0 0 0 1 11 -19 0 8 1 19 1 11 -10 -7.5 0 1 -7.5 1 11 -4 -6 0 0 0 1 11 -8 -14 0 0 0 1 11 -8 0 3 1 8 1 11 -12 -21 0 0 0 1 11 -4 -2 0 1 -2 1 11 -4 -5 0 0 0 1 11 -6 -7.5 0 0 0 1 11 -12 -12 0 0 0 1 11 -8 -5 0 1 -5 1 11 -26 0 12 0 12 1 11 -8 -10 0 0 0 1 11 -5 -5.63 0 0 0 1 11 -2 -1 0 1 2 1 12 -9 -6.75 0 1 -6.75 1 12 -2 -4 0 0 0 1 12 -2 -3.25 0 0 0 1 12 -4 -6.5 0 0 0 1 12 -5 -5.63 0 0 0 1 12 -8 -8 0 1 -8 1 12 -12 -18 0 1 12 1 12 -2 -2.5 0 0 0 1 12 -3 0 1 0 1 1 12 -12 -16.5 0 1 12 1 12 -10 -12.5 0 0 0 1 12 -5 -1.25 0 1 5 1 12 -19 0 8 0 8 1 12 -8 -9 0 0 0 1 12 -5 -10 0 0 0 1 12 -25 0 10 0 10 1 12 -7 0 3 1 0 1 12 -6 -11.25 0 0 0 1 12 -6 -1.5 0 1 -1.5 1 12 -4 -1.5 0 1 4 1 12 -10 -5 0 1 10 1 12 -10 -3.75 0 0 0 1 12 -6 -4.5 0 0 0 1 12 -12 -19.5 0 0 0 1 12 -5 -4.38 0 0 0 1 12 -8 -11 0 0 0 1 12 -2 -0.75 0 1 2 1 12 -2 -1.5 0 1 -1.5 1 12 -6 -6.75 0 1 6 1 12 -4 -6 0 1 4 1 12 -10 -16.25 0 1 -16.25 1 12 -12 -15 0 1 -15 1 12 -6 -5.25 0 1 6 1 12 -12 -21 0 0 0 1 12 -4 -3 0 0 0 1 12 -12 -22.5 0 1 12 1 12 -2 -3.75 0 0 0 1 12 -6 -12 0 0 0 1 12 -5 -8.13 0 1 5 1 12 -10 -8.75 0 1 -8.75 1 12 -12 -6 0 1 -6 1 12 -5 -5 0 1 -5 1 12 -22 0 10 0 10 1 12 -12 -13.5 0 1 12 1 12 -8 -7 0 1 8 1 12 -4 -3.5 0 0 0 1 12 -9 -12.38 0 0 0 1 12 -10 -7.5 0 1 -7.5 1 12 -26 0 10 1 26 1 12 -12 -4.5 0 1 -4.5 1 12 -8 -15 0 1 -15 1 12 -2 -1.75 0 1 2 1 12 -12 0 6 0 6 1 12 -9 -3.38 0 1 -3.38 1 12 -2 -3 0 0 0 1 12 -9 -5.63 0 1 -5.63 1 12 -2 -3.5 0 0 0 1 12 -8 -12 0 1 8 1 12 -10 -18.75 0 1 10 1 12 -4 0 2 0 2 1 12 -2 -2.25 0 0 0 1 12 -9 -2.25 0 1 -2.25 1 12 -10 -13.75 0 1 -13.75 1 12 -28 0 13 1 0 1 12 -4 -2.5 0 1 4 1 12 -9 -15.75 0 1 9 1 12 -10 -15 0 0 0 1 12 -10 -10 0 1 10 1 12 -9 -18 0 0 0 1 12 -12 -24 0 1 -24 1 12 -13 0 5 1 13 1 12 -5 -1.88 0 1 -1.88 1 12 -4 -4.5 0 0 0 1 12 -9 -7.88 0 1 9 1 12 -9 -9 0 0 0 1 12 -25 0 9 0 9 1 12 -12 -12 0 0 0 1 12 -6 -2.25 0 1 6 1 12 -8 -5 0 0 0 1 12 -4 -5.5 0 0 0 1 12 -2 -1.25 0 0 0 1 12 -9 -13.5 0 0 0 1 12 -9 -4.5 0 1 9 1 12 -10 -11.25 0 0 0 1 12 -6 -3 0 1 -3 1 12 -10 -2.5 0 1 -2.5 1 12 -12 0 4 1 12 1 12 -10 -20 0 0 0 1 12 -5 -3.75 0 0 0 1 12 -9 -10.13 0 0 0 1 12 -4 -7 0 0 0 1 12 -12 -10.5 0 1 -10.5 1 12 -8 -16 0 1 8 1 12 -4 -7.5 0 0 0 1 12 -8 0 3 1 8 1 12 -6 -10.5 0 0 0 1 12 -6 -9.75 0 0 0 1 12 -5 -8.75 0 0 0 1 12 -5 -2.5 0 1 -2.5 1 12 -13 0 6 0 6 1 12 -23 0 10 0 10 1 12 -8 -4 0 1 -4 1 12 -9 -11.25 0 0 0 1 12 -5 -6.88 0 0 0 1 12 -4 -4 0 0 0 1 12 -10 -17.5 0 0 0 1 12 -8 -13 0 0 0 1 12 -26 0 12 0 12 1 12 -6 -8.25 0 0 0 1 12 -9 -14.63 0 0 0 1 12 -8 -2 0 1 -2 1 12 -10 -6.25 0 1 -6.25 1 12 -8 -14 0 0 0 1 12 -12 0 5 1 0 1 12 -8 -10 0 0 0 1 12 -30 0 12 1 0 1 12 -5 -7.5 0 0 0 1 12 -5 0 2 1 0 1 12 -6 -3.75 0 1 6 1 12 -6 -6 0 0 0 1 12 -4 -2 0 1 -2 1 12 -12 -7.5 0 1 -7.5 1 12 -5 -6.25 0 1 5 1 12 -4 -5 0 0 0 1 12 -2 -2.75 0 0 0 1 12 -2 -2 0 0 0 1 12 -6 -9 0 0 0 1 12 -5 -3.13 0 1 5 1 12 -12 -9 0 0 0 1 12 -4 -8 0 0 0 1 12 -4 -1 0 1 4 1 12 -2 0 1 0 1 1 12 -9 -16.88 0 1 9 1 12 -8 -6 0 0 0 1 12 -2 -0.5 0 1 -0.5 1 12 -6 -7.5 0 0 0 1 12 -8 -3 0 1 8 1 12 -12 -3 0 1 12 1 12 -5 -9.38 0 0 0 1 12 -6 -9.75 0 0 0 1 13 -12 -13.5 0 1 12 1 13 -8 -7 0 1 8 1 13 -10 -7.5 0 1 -7.5 1 13 -2 -2.25 0 0 0 1 13 -6 -8.25 0 0 0 1 13 -10 -16.25 0 0 0 1 13 -3 0 1 1 3 1 13 -4 -3 0 1 4 1 13 -8 -2 0 1 -2 1 13 -4 -2.5 0 1 4 1 13 -5 -5.63 0 0 0 1 13 -5 0 2 1 0 1 13 -30 0 12 0 12 1 13 -9 -4.5 0 1 9 1 13 -4 -7.5 0 0 0 1 13 -26 0 10 0 10 1 13 -10 -6.25 0 1 -6.25 1 13 -2 -4 0 0 0 1 13 -4 -5 0 0 0 1 13 -5 -1.88 0 1 -1.88 1 13 -23 0 10 0 10 1 13 -8 -3 0 1 8 1 13 -8 -12 0 0 0 1 13 -10 -2.5 0 1 -2.5 1 13 -5 -8.13 0 0 0 1 13 -8 -9 0 0 0 1 13 -2 -3 0 0 0 1 13 -9 -11.25 0 0 0 1 13 -9 -12.38 0 0 0 1 13 -12 -15 0 0 0 1 13 -8 -10 0 0 0 1 13 -4 -1 0 1 4 1 13 -8 0 3 0 3 1 13 -4 -3.5 0 0 0 1 13 -8 -8 0 1 -8 1 13 -10 -11.25 0 0 0 1 13 -10 -5 0 1 10 1 13 -9 -13.5 0 0 0 1 13 -2 -0.75 0 1 2 1 13 -5 -4.38 0 1 -4.38 1 13 -2 -1.5 0 1 -1.5 1 13 -2 -3.75 0 0 0 1 13 -5 -3.75 0 1 5 1 13 -9 -16.88 0 0 0 1 13 -9 -3.38 0 1 -3.38 1 13 -5 -10 0 0 0 1 13 -26 0 12 0 12 1 13 -5 -9.38 0 0 0 1 13 -6 -1.5 0 1 -1.5 1 13 -10 -10 0 0 0 1 13 -2 -1.25 0 0 0 1 13 -9 -14.63 0 0 0 1 13 -6 -4.5 0 1 -4.5 1 13 -5 -5 0 1 -5 1 13 -5 -7.5 0 0 0 1 13 -8 -13 0 0 0 1 13 -5 -3.13 0 0 0 1 13 -8 -5 0 0 0 1 13 -8 -11 0 0 0 1 13 -6 -6.75 0 0 0 1 13 -5 -8.75 0 0 0 1 13 -2 0 1 0 1 1 13 -6 -6 0 0 0 1 13 -4 -5.5 0 0 0 1 13 -6 -3 0 1 -3 1 13 -12 -19.5 0 0 0 1 13 -10 -13.75 0 0 0 1 13 -10 -8.75 0 0 0 1 13 -5 -6.88 0 0 0 1 13 -6 -7.5 0 0 0 1 13 -10 -12.5 0 0 0 1 13 -9 -6.75 0 1 -6.75 1 13 -8 -4 0 1 -4 1 13 -2 -1 0 1 2 1 13 -12 -24 0 0 0 1 13 -12 -6 0 1 -6 1 13 -2 -2 0 0 0 1 13 -4 -7 0 0 0 1 13 -12 -9 0 1 -9 1 13 -6 -11.25 0 0 0 1 13 -25 0 10 0 10 1 13 -28 0 13 0 13 1 13 -2 -2.75 0 0 0 1 13 -12 -10.5 0 0 0 1 13 -8 -14 0 0 0 1 13 -4 -6.5 0 0 0 1 13 -4 0 2 0 2 1 13 -10 -15 0 0 0 1 13 -12 0 5 0 5 1 13 -10 -18.75 0 0 0 1 13 -12 -3 0 1 12 1 13 -4 -4 0 0 0 1 13 -9 -7.88 0 0 0 1 13 -9 -2.25 0 0 0 1 13 -2 -1.75 0 0 0 1 13 -12 0 6 0 6 1 13 -5 -2.5 0 0 0 1 13 -4 -4.5 0 0 0 1 13 -8 -6 0 0 0 1 13 -12 -18 0 0 0 1 13 -12 -16.5 0 0 0 1 13 -22 0 10 0 10 1 13 -12 -21 0 0 0 1 13 -12 -4.5 0 0 0 1 13 -12 -12 0 0 0 1 13 -19 0 8 0 8 1 13 -2 -2.5 0 0 0 1 13 -12 0 4 0 4 1 13 -4 -2 0 0 0 1 13 -9 -9 0 0 0 1 13 -9 -10.13 0 0 0 1 13 -6 -2.25 0 1 6 1 13 -2 -0.5 0 1 -0.5 1 13 -10 -3.75 0 1 10 1 13 -13 0 5 0 5 1 13 -4 -1.5 0 1 4 1 13 -5 -1.25 0 1 5 1 13 -6 -9 0 0 0 1 13 -10 -17.5 0 0 0 1 13 -6 -12 0 0 0 1 13 -6 -5.25 0 0 0 1 13 -12 -22.5 0 0 0 1 13 -8 -16 0 0 0 1 13 -9 -15.75 0 0 0 1 13 -10 -20 0 0 0 1 13 -13 0 6 0 6 1 13 -4 -8 0 0 0 1 13 -12 -7.5 0 0 0 1 13 -9 -18 0 0 0 1 13 -2 -3.25 0 0 0 1 13 -7 0 3 0 3 1 13 -6 -3.75 0 1 6 1 13 -5 -6.25 0 0 0 1 13 -8 -15 0 0 0 1 13 -25 0 9 0 9 1 13 -2 -3.5 0 0 0 1 13 -6 -10.5 0 0 0 1 13 -9 -11.25 0 0 0 1 14 -8 -16 0 0 0 1 14 -9 -5.63 0 1 -5.63 1 14 -9 -4.5 0 1 9 1 14 -2 -2 0 1 2 1 14 -12 -19.5 0 0 0 1 14 -4 -4.5 0 0 0 1 14 -2 -3.75 0 0 0 1 14 -2 -2.25 0 0 0 1 14 -12 -4.5 0 1 -4.5 1 14 -9 -10.13 0 0 0 1 14 -12 -3 0 1 12 1 14 -10 -17.5 0 0 0 1 14 -5 -4.38 0 1 -4.38 1 14 -5 -7.5 0 1 5 1 14 -6 -11.25 0 0 0 1 14 -2 -1.5 0 1 -1.5 1 14 -9 -6.75 0 1 -6.75 1 14 -4 -7 0 0 0 1 14 -8 -7 0 1 8 1 14 -2 -1.75 0 1 2 1 14 -2 -1 0 1 2 1 14 -10 -6.25 0 1 -6.25 1 14 -6 -6.75 0 1 6 1 14 -9 -2.25 0 1 -2.25 1 14 -2 -0.75 0 1 2 1 14 -12 0 4 1 12 1 14 -6 -3 0 1 -3 1 14 -3 0 1 1 3 1 14 -2 -3 0 0 0 1 14 -10 -13.75 0 1 -13.75 1 14 -6 -2.25 0 1 6 1 14 -5 -1.88 0 1 -1.88 1 14 -12 -13.5 0 1 12 1 14 -22 0 10 0 10 1 14 -9 -12.38 0 1 9 1 14 -26 0 10 1 26 1 14 -12 -10.5 0 1 -10.5 1 14 -10 -2.5 0 1 -2.5 1 14 -25 0 10 0 10 1 14 -9 -15.75 0 1 9 1 14 -7 0 3 1 0 1 14 -10 -10 0 1 10 1 14 -12 -15 0 0 0 1 14 -12 0 6 1 0 1 14 -6 -4.5 0 1 -4.5 1 14 -8 -13 0 0 0 1 14 -10 -16.25 0 0 0 1 14 -5 -1.25 0 1 5 1 14 -4 -4 0 1 4 1 14 -5 -3.75 0 1 5 1 14 -6 -8.25 0 1 -8.25 1 14 -8 -15 0 1 -15 1 14 -8 -8 0 1 -8 1 14 -2 -2.75 0 0 0 1 14 -6 -12 0 1 -12 1 14 -2 0 1 1 2 1 14 -2 -1.25 0 1 -1.25 1 14 -9 -18 0 1 9 1 14 -6 -9 0 0 0 1 14 -10 -8.75 0 1 -8.75 1 14 -4 -7.5 0 0 0 1 14 -13 0 6 1 0 1 14 -10 -11.25 0 1 10 1 14 -4 -3 0 0 0 1 14 -10 -5 0 1 10 1 14 -8 -2 0 1 -2 1 14 -4 -2.5 0 0 0 1 14 -2 -3.5 0 0 0 1 14 -2 -2.5 0 1 2 1 14 -6 -3.75 0 1 6 1 14 -8 -3 0 1 8 1 14 -2 -3.25 0 0 0 1 14 -8 -9 0 1 -9 1 14 -6 -6 0 1 -6 1 14 -8 -11 0 1 8 1 14 -5 -8.75 0 0 0 1 14 -6 -9.75 0 0 0 1 14 -12 -24 0 1 -24 1 14 -4 -6.5 0 1 -6.5 1 14 -5 -10 0 1 5 1 14 -30 0 12 1 0 1 14 -12 -18 0 1 12 1 14 -9 -9 0 0 0 1 14 -5 -5 0 1 -5 1 14 -5 -9.38 0 1 -9.38 1 14 -10 -12.5 0 1 -12.5 1 14 -10 -18.75 0 0 0 1 14 -5 -2.5 0 1 -2.5 1 14 -9 -14.63 0 1 9 1 14 -28 0 13 1 0 1 14 -5 -6.88 0 1 -6.88 1 14 -4 -3.5 0 1 -3.5 1 14 -12 -16.5 0 1 12 1 14 -5 -8.13 0 0 0 1 14 -9 -16.88 0 0 0 1 14 -9 -3.38 0 1 -3.38 1 14 -12 0 5 1 0 1 14 -4 -8 0 0 0 1 14 -8 -12 0 1 8 1 14 -8 -4 0 1 -4 1 14 -2 -4 0 0 0 1 14 -12 -9 0 1 -9 1 14 -4 -1.5 0 1 4 1 14 -6 -10.5 0 0 0 1 14 -5 -3.13 0 1 5 1 14 -10 -15 0 1 10 1 14 -23 0 10 0 10 1 14 -12 -7.5 0 1 -7.5 1 14 -2 -0.5 0 1 -0.5 1 14 -4 0 2 0 2 1 14 -6 -1.5 0 1 -1.5 1 14 -4 -1 0 1 4 1 14 -10 -20 0 0 0 1 14 -12 -22.5 0 1 12 1 14 -25 0 9 0 9 1 14 -13 0 5 1 13 1 14 -6 -5.25 0 1 6 1 14 -9 -13.5 0 0 0 1 14 -5 0 2 0 2 1 14 -12 -6 0 1 -6 1 14 -5 -6.25 0 1 5 1 14 -10 -3.75 0 1 10 1 14 -9 -7.88 0 0 0 1 14 -8 -6 0 1 -6 1 14 -4 -5.5 0 0 0 1 14 -19 0 8 0 8 1 14 -10 -7.5 0 1 -7.5 1 14 -4 -6 0 0 0 1 14 -8 -14 0 0 0 1 14 -8 0 3 0 3 1 14 -12 -21 0 0 0 1 14 -4 -2 0 1 -2 1 14 -4 -5 0 1 4 1 14 -6 -7.5 0 1 -7.5 1 14 -12 -12 0 1 -12 1 14 -8 -5 0 1 -5 1 14 -26 0 12 0 12 1 14 -8 -10 0 1 -10 1 14 -5 -5.63 0 1 -5.63 1 14 -2 -1 0 1 2 1 15 -9 -6.75 0 1 -6.75 1 15 -2 -4 0 0 0 1 15 -2 -3.25 0 1 2 1 15 -4 -6.5 0 1 -6.5 1 15 -5 -5.63 0 1 -5.63 1 15 -8 -8 0 1 -8 1 15 -12 -18 0 1 12 1 15 -2 -2.5 0 1 2 1 15 -3 0 1 0 1 1 15 -12 -16.5 0 1 12 1 15 -10 -12.5 0 1 -12.5 1 15 -5 -1.25 0 1 5 1 15 -19 0 8 0 8 1 15 -8 -9 0 1 -9 1 15 -5 -10 0 1 5 1 15 -25 0 10 1 25 1 15 -7 0 3 0 3 1 15 -6 -11.25 0 0 0 1 15 -6 -1.5 0 1 -1.5 1 15 -4 -1.5 0 1 4 1 15 -10 -5 0 1 10 1 15 -10 -3.75 0 1 10 1 15 -6 -4.5 0 1 -4.5 1 15 -12 -19.5 0 1 12 1 15 -5 -4.38 0 1 -4.38 1 15 -8 -11 0 0 0 1 15 -2 -0.75 0 1 2 1 15 -2 -1.5 0 1 -1.5 1 15 -6 -6.75 0 1 6 1 15 -4 -6 0 1 4 1 15 -10 -16.25 0 1 -16.25 1 15 -12 -15 0 1 -15 1 15 -6 -5.25 0 1 6 1 15 -12 -21 0 1 12 1 15 -4 -3 0 1 4 1 15 -12 -22.5 0 0 0 1 15 -2 -3.75 0 1 -3.75 1 15 -6 -12 0 0 0 1 15 -5 -8.13 0 1 5 1 15 -10 -8.75 0 1 -8.75 1 15 -12 -6 0 1 -6 1 15 -5 -5 0 1 -5 1 15 -22 0 10 1 22 1 15 -12 -13.5 0 1 12 1 15 -8 -7 0 0 0 1 15 -4 -3.5 0 1 -3.5 1 15 -9 -12.38 0 1 9 1 15 -10 -7.5 0 1 -7.5 1 15 -26 0 10 0 10 1 15 -12 -4.5 0 1 -4.5 1 15 -8 -15 0 1 -15 1 15 -2 -1.75 0 1 2 1 15 -12 0 6 1 0 1 15 -9 -3.38 0 1 -3.38 1 15 -2 -3 0 1 -3 1 15 -9 -5.63 0 1 -5.63 1 15 -2 -3.5 0 1 -3.5 1 15 -8 -12 0 1 8 1 15 -10 -18.75 0 1 10 1 15 -4 0 2 1 4 1 15 -2 -2.25 0 1 2 1 15 -9 -2.25 0 1 -2.25 1 15 -10 -13.75 0 1 -13.75 1 15 -28 0 13 1 0 1 15 -4 -2.5 0 1 4 1 15 -9 -15.75 0 0 0 1 15 -10 -15 0 0 0 1 15 -10 -10 0 1 10 1 15 -9 -18 0 0 0 1 15 -12 -24 0 0 0 1 15 -13 0 5 0 5 1 15 -5 -1.88 0 1 -1.88 1 15 -4 -4.5 0 1 4 1 15 -9 -7.88 0 1 9 1 15 -9 -9 0 1 9 1 15 -25 0 9 1 0 1 15 -12 -12 0 1 -12 1 15 -6 -2.25 0 1 6 1 15 -8 -5 0 1 -5 1 15 -4 -5.5 0 1 -5.5 1 15 -2 -1.25 0 1 -1.25 1 15 -9 -13.5 0 1 -13.5 1 15 -9 -4.5 0 1 9 1 15 -10 -11.25 0 1 10 1 15 -6 -3 0 1 -3 1 15 -10 -2.5 0 1 -2.5 1 15 -12 0 4 1 12 1 15 -10 -20 0 0 0 1 15 -5 -3.75 0 1 5 1 15 -9 -10.13 0 1 -10.13 1 15 -4 -7 0 1 -7 1 15 -12 -10.5 0 1 -10.5 1 15 -8 -16 0 1 8 1 15 -4 -7.5 0 1 4 1 15 -8 0 3 0 3 1 15 -6 -10.5 0 0 0 1 15 -6 -9.75 0 1 -9.75 1 15 -5 -8.75 0 1 5 1 15 -5 -2.5 0 1 -2.5 1 15 -13 0 6 0 6 1 15 -23 0 10 1 0 1 15 -8 -4 0 1 -4 1 15 -9 -11.25 0 1 9 1 15 -5 -6.88 0 1 -6.88 1 15 -4 -4 0 1 4 1 15 -10 -17.5 0 0 0 1 15 -8 -13 0 1 8 1 15 -26 0 12 0 12 1 15 -6 -8.25 0 0 0 1 15 -9 -14.63 0 0 0 1 15 -8 -2 0 1 -2 1 15 -10 -6.25 0 1 -6.25 1 15 -8 -14 0 1 8 1 15 -12 0 5 1 0 1 15 -8 -10 0 1 -10 1 15 -30 0 12 1 0 1 15 -5 -7.5 0 1 5 1 15 -5 0 2 1 0 1 15 -6 -3.75 0 1 6 1 15 -6 -6 0 1 -6 1 15 -4 -2 0 1 -2 1 15 -12 -7.5 0 1 -7.5 1 15 -5 -6.25 0 1 5 1 15 -4 -5 0 1 4 1 15 -2 -2.75 0 1 -2.75 1 15 -2 -2 0 1 2 1 15 -6 -9 0 1 -9 1 15 -5 -3.13 0 1 5 1 15 -12 -9 0 1 -9 1 15 -4 -8 0 1 -8 1 15 -4 -1 0 1 4 1 15 -2 0 1 0 1 1 15 -9 -16.88 0 1 9 1 15 -8 -6 0 1 -6 1 15 -2 -0.5 0 1 -0.5 1 15 -6 -7.5 0 1 -7.5 1 15 -8 -3 0 1 8 1 15 -12 -3 0 1 12 1 15 -5 -9.38 0 0 0 1 15 -6 -9.75 0 0 0 1 16 -12 -13.5 0 1 12 1 16 -8 -7 0 0 0 1 16 -10 -7.5 0 1 -7.5 1 16 -2 -2.25 0 1 2 1 16 -6 -8.25 0 0 0 1 16 -10 -16.25 0 1 -16.25 1 16 -3 0 1 1 3 1 16 -4 -3 0 1 4 1 16 -8 -2 0 1 -2 1 16 -4 -2.5 0 1 4 1 16 -5 -5.63 0 0 0 1 16 -5 0 2 0 2 1 16 -30 0 12 1 0 1 16 -9 -4.5 0 1 9 1 16 -4 -7.5 0 0 0 1 16 -26 0 10 1 26 1 16 -10 -6.25 0 1 -6.25 1 16 -2 -4 0 1 -4 1 16 -4 -5 0 0 0 1 16 -5 -1.88 0 1 -1.88 1 16 -23 0 10 1 0 1 16 -8 -3 0 1 8 1 16 -8 -12 0 1 8 1 16 -10 -2.5 0 0 0 1 16 -5 -8.13 0 0 0 1 16 -8 -9 0 1 -9 1 16 -2 -3 0 1 -3 1 16 -9 -11.25 0 1 9 1 16 -9 -12.38 0 0 0 1 16 -12 -15 0 1 -15 1 16 -8 -10 0 0 0 1 16 -4 -1 0 1 4 1 16 -8 0 3 1 8 1 16 -4 -3.5 0 0 0 1 16 -8 -8 0 0 0 1 16 -10 -11.25 0 0 0 1 16 -10 -5 0 1 10 1 16 -9 -13.5 0 0 0 1 16 -2 -0.75 0 1 2 1 16 -5 -4.38 0 1 -4.38 1 16 -2 -1.5 0 1 -1.5 1 16 -2 -3.75 0 1 -3.75 1 16 -5 -3.75 0 1 5 1 16 -9 -16.88 0 0 0 1 16 -9 -3.38 0 1 -3.38 1 16 -5 -10 0 0 0 1 16 -26 0 12 1 0 1 16 -5 -9.38 0 0 0 1 16 -6 -1.5 0 1 -1.5 1 16 -10 -10 0 1 10 1 16 -2 -1.25 0 1 -1.25 1 16 -9 -14.63 0 0 0 1 16 -6 -4.5 0 1 -4.5 1 16 -5 -5 0 1 -5 1 16 -5 -7.5 0 0 0 1 16 -8 -13 0 0 0 1 16 -5 -3.13 0 1 5 1 16 -8 -5 0 1 -5 1 16 -8 -11 0 1 8 1 16 -6 -6.75 0 1 6 1 16 -5 -8.75 0 1 5 1 16 -2 0 1 1 2 1 16 -9 -5.63 0 1 -5.63 1 16 -6 -6 0 1 -6 1 16 -4 -5.5 0 0 0 1 16 -6 -3 0 1 -3 1 16 -12 -19.5 0 1 12 1 16 -10 -13.75 0 0 0 1 16 -10 -8.75 0 1 -8.75 1 16 -5 -6.88 0 1 -6.88 1 16 -6 -7.5 0 0 0 1 16 -10 -12.5 0 1 -12.5 1 16 -9 -6.75 0 1 -6.75 1 16 -4 -6 0 1 4 1 16 -8 -4 0 1 -4 1 16 -2 -1 0 1 2 1 16 -12 -24 0 0 0 1 16 -12 -6 0 1 -6 1 16 -2 -2 0 1 2 1 16 -4 -7 0 0 0 1 16 -12 -9 0 1 -9 1 16 -6 -11.25 0 0 0 1 16 -25 0 10 1 25 1 16 -28 0 13 1 0 1 16 -2 -2.75 0 1 -2.75 1 16 -12 -10.5 0 0 0 1 16 -8 -14 0 0 0 1 16 -4 -6.5 0 0 0 1 16 -4 0 2 1 4 1 16 -10 -15 0 0 0 1 16 -12 0 5 1 0 1 16 -10 -18.75 0 0 0 1 16 -12 -3 0 1 12 1 16 -4 -4 0 1 4 1 16 -9 -7.88 0 1 9 1 16 -9 -2.25 0 1 -2.25 1 16 -2 -1.75 0 1 2 1 16 -12 0 6 1 0 1 16 -5 -2.5 0 1 -2.5 1 16 -4 -4.5 0 1 4 1 16 -8 -6 0 1 -6 1 16 -12 -18 0 0 0 1 16 -12 -16.5 0 1 12 1 16 -22 0 10 0 10 1 16 -12 -21 0 0 0 1 16 -12 -4.5 0 1 -4.5 1 16 -12 -12 0 1 -12 1 16 -19 0 8 1 19 1 16 -2 -2.5 0 1 2 1 16 -12 0 4 1 12 1 16 -4 -2 0 1 -2 1 16 -9 -9 0 0 0 1 16 -9 -10.13 0 0 0 1 16 -6 -2.25 0 1 6 1 16 -2 -0.5 0 1 -0.5 1 16 -10 -3.75 0 1 10 1 16 -13 0 5 1 13 1 16 -4 -1.5 0 1 4 1 16 -5 -1.25 0 1 5 1 16 -6 -9 0 1 -9 1 16 -10 -17.5 0 0 0 1 16 -6 -12 0 0 0 1 16 -6 -5.25 0 1 6 1 16 -12 -22.5 0 1 12 1 16 -8 -16 0 0 0 1 16 -9 -15.75 0 1 9 1 16 -10 -20 0 1 10 1 16 -13 0 6 1 0 1 16 -4 -8 0 0 0 1 16 -12 -7.5 0 1 -7.5 1 16 -9 -18 0 0 0 1 16 -2 -3.25 0 1 2 1 16 -7 0 3 1 0 1 16 -6 -3.75 0 1 6 1 16 -5 -6.25 0 1 5 1 16 -8 -15 0 0 0 1 16 -25 0 9 1 0 1 16 -2 -3.5 0 1 -3.5 1 16 -6 -10.5 0 1 -10.5 1 16 -9 -10.13 0 0 0 1 17 -12 -10.5 0 0 0 1 17 -25 0 10 0 10 1 17 -4 -7 0 0 0 1 17 -9 -7.88 0 0 0 1 17 -5 -3.13 0 0 0 1 17 -5 -8.13 0 0 0 1 17 -8 -7 0 0 0 1 17 -12 -6 0 0 0 1 17 -12 -24 0 0 0 1 17 -12 -21 0 0 0 1 17 -4 -2.5 0 0 0 1 17 -6 -9 0 0 0 1 17 -10 -15 0 0 0 1 17 -8 -6 0 0 0 1 17 -13 0 6 1 0 1 17 -6 -12 0 0 0 1 17 -6 -4.5 0 0 0 1 17 -9 -16.88 0 0 0 1 17 -10 -18.75 0 0 0 1 17 -9 -3.38 0 1 -3.38 1 17 -6 -9.75 0 0 0 1 17 -2 -1.75 0 0 0 1 17 -5 0 2 1 0 1 17 -8 -5 0 0 0 1 17 -8 -9 0 0 0 1 17 -12 0 6 1 0 1 17 -12 0 4 1 12 1 17 -2 -2.5 0 0 0 1 17 -6 -3 0 0 0 1 17 -10 -7.5 0 0 0 1 17 -5 -2.5 0 0 0 1 17 -5 -3.75 0 0 0 1 17 -10 -3.75 0 0 0 1 17 -2 -3 0 0 0 1 17 -10 -6.25 0 0 0 1 17 -4 -7.5 0 0 0 1 17 -8 -16 0 0 0 1 17 -5 -6.25 0 0 0 1 17 -4 0 2 1 4 1 17 -10 -11.25 0 0 0 1 17 -5 -6.88 0 0 0 1 17 -5 -7.5 0 0 0 1 17 -26 0 12 1 0 1 17 -8 -13 0 0 0 1 17 -4 -4.5 0 0 0 1 17 -8 -10 0 0 0 1 17 -6 -3.75 0 0 0 1 17 -5 -5.63 0 0 0 1 17 -9 -18 0 0 0 1 17 -12 -13.5 0 0 0 1 17 -7 0 3 1 0 1 17 -8 -14 0 0 0 1 17 -2 -1.5 0 0 0 1 17 -10 -2.5 0 1 -2.5 1 17 -13 0 5 1 13 1 17 -9 -15.75 0 0 0 1 17 -8 -12 0 0 0 1 17 -28 0 13 1 0 1 17 -6 -7.5 0 0 0 1 17 -10 -16.25 0 0 0 1 17 -12 -7.5 0 0 0 1 17 -5 -5 0 0 0 1 17 -2 -2 0 0 0 1 17 -22 0 10 1 22 1 17 -2 -1 0 0 0 1 17 -3 0 1 1 3 1 17 -4 -5.5 0 0 0 1 17 -2 -2.25 0 0 0 1 17 -6 -2.25 0 0 0 1 17 -4 -6.5 0 0 0 1 17 -9 -12.38 0 0 0 1 17 -10 -13.75 0 0 0 1 17 -10 -17.5 0 0 0 1 17 -4 -5 0 0 0 1 17 -9 -11.25 0 0 0 1 17 -10 -10 0 0 0 1 17 -2 -3.25 0 0 0 1 17 -5 -8.75 0 0 0 1 17 -5 -10 0 0 0 1 17 -9 -2.25 0 1 -2.25 1 17 -6 -6.75 0 0 0 1 17 -12 -16.5 0 0 0 1 17 -9 -14.63 0 0 0 1 17 -4 -8 0 0 0 1 17 -6 -5.25 0 0 0 1 17 -9 -6.75 0 0 0 1 17 -12 -12 0 0 0 1 17 -4 -1 0 1 4 1 17 -12 -15 0 0 0 1 17 -4 -3.5 0 0 0 1 17 -2 -1.25 0 0 0 1 17 -30 0 12 1 0 1 17 -12 -19.5 0 0 0 1 17 -12 -3 0 0 0 1 17 -5 -1.25 0 0 0 1 17 -5 -1.88 0 0 0 1 17 -2 -3.5 0 0 0 1 17 -12 -9 0 0 0 1 17 -10 -20 0 0 0 1 17 -8 -4 0 0 0 1 17 -12 0 5 1 0 1 17 -2 0 1 1 2 1 17 -4 -1.5 0 0 0 1 17 -2 -3.75 0 0 0 1 17 -6 -10.5 0 0 0 1 17 -4 -2 0 0 0 1 17 -23 0 10 1 0 1 17 -12 -18 0 0 0 1 17 -6 -8.25 0 0 0 1 17 -26 0 10 1 26 1 17 -10 -8.75 0 0 0 1 17 -2 -0.75 0 0 0 1 17 -5 -9.38 0 0 0 1 17 -25 0 9 1 0 1 17 -9 -4.5 0 0 0 1 17 -10 -5 0 0 0 1 17 -2 -4 0 0 0 1 17 -2 -2.75 0 0 0 1 17 -4 -6 0 0 0 1 17 -10 -12.5 0 0 0 1 17 -12 -22.5 0 0 0 1 17 -4 -4 0 0 0 1 17 -2 -0.5 0 0 0 1 17 -8 -2 0 1 -2 1 17 -4 -3 0 0 0 1 17 -6 -11.25 0 0 0 1 17 -8 -15 0 0 0 1 17 -8 -11 0 0 0 1 17 -12 -4.5 0 0 0 1 17 -19 0 8 1 19 1 17 -6 -6 0 0 0 1 17 -5 -4.38 0 0 0 1 17 -9 -9 0 0 0 1 17 -6 -1.5 0 0 0 1 17 -9 -13.5 0 0 0 1 17 -9 -5.63 0 0 0 1 17 -8 -8 0 0 0 1 17 -8 0 3 1 8 1 17 -8 -3 0 0 0 1 17 -9 -11.25 0 1 9 1 18 -8 -16 0 0 0 1 18 -9 -5.63 0 1 -5.63 1 18 -9 -4.5 0 1 9 1 18 -2 -2 0 1 2 1 18 -12 -19.5 0 0 0 1 18 -4 -4.5 0 1 4 1 18 -2 -3.75 0 0 0 1 18 -2 -2.25 0 0 0 1 18 -12 -4.5 0 1 -4.5 1 18 -9 -10.13 0 1 -10.13 1 18 -12 -3 0 1 12 1 18 -10 -17.5 0 0 0 1 18 -5 -4.38 0 1 -4.38 1 18 -5 -7.5 0 0 0 1 18 -6 -11.25 0 0 0 1 18 -2 -1.5 0 1 -1.5 1 18 -9 -6.75 0 1 -6.75 1 18 -4 -7 0 1 -7 1 18 -8 -7 0 1 8 1 18 -2 -1.75 0 1 2 1 18 -2 -1 0 1 2 1 18 -10 -6.25 0 1 -6.25 1 18 -6 -6.75 0 1 6 1 18 -9 -2.25 0 1 -2.25 1 18 -2 -0.75 0 1 2 1 18 -12 0 4 0 4 1 18 -6 -3 0 1 -3 1 18 -3 0 1 1 3 1 18 -2 -3 0 0 0 1 18 -10 -13.75 0 0 0 1 18 -6 -2.25 0 1 6 1 18 -5 -1.88 0 1 -1.88 1 18 -12 -13.5 0 1 12 1 18 -22 0 10 0 10 1 18 -9 -12.38 0 0 0 1 18 -26 0 10 0 10 1 18 -12 -10.5 0 1 -10.5 1 18 -10 -2.5 0 1 -2.5 1 18 -25 0 10 0 10 1 18 -9 -15.75 0 0 0 1 18 -7 0 3 1 0 1 18 -10 -10 0 0 0 1 18 -12 -15 0 0 0 1 18 -12 0 6 0 6 1 18 -6 -4.5 0 1 -4.5 1 18 -8 -13 0 0 0 1 18 -10 -16.25 0 0 0 1 18 -5 -1.25 0 1 5 1 18 -4 -4 0 1 4 1 18 -5 -3.75 0 1 5 1 18 -6 -8.25 0 0 0 1 18 -8 -15 0 0 0 1 18 -8 -8 0 1 -8 1 18 -2 -2.75 0 1 -2.75 1 18 -6 -12 0 0 0 1 18 -2 0 1 1 2 1 18 -2 -1.25 0 1 -1.25 1 18 -9 -18 0 0 0 1 18 -6 -9 0 0 0 1 18 -10 -8.75 0 1 -8.75 1 18 -4 -7.5 0 0 0 1 18 -13 0 6 0 6 1 18 -10 -11.25 0 1 10 1 18 -4 -3 0 1 4 1 18 -10 -5 0 1 10 1 18 -8 -2 0 1 -2 1 18 -4 -2.5 0 1 4 1 18 -2 -3.5 0 0 0 1 18 -2 -2.5 0 0 0 1 18 -6 -3.75 0 1 6 1 18 -8 -3 0 1 8 1 18 -2 -3.25 0 0 0 1 18 -8 -9 0 0 0 1 18 -6 -6 0 0 0 1 18 -8 -11 0 0 0 1 18 -5 -8.75 0 0 0 1 18 -6 -9.75 0 0 0 1 18 -12 -24 0 0 0 1 18 -4 -6.5 0 0 0 1 18 -5 -10 0 0 0 1 18 -30 0 12 1 0 1 18 -12 -18 0 0 0 1 18 -9 -9 0 0 0 1 18 -5 -5 0 1 -5 1 18 -5 -9.38 0 0 0 1 18 -10 -12.5 0 0 0 1 18 -10 -18.75 0 0 0 1 18 -5 -2.5 0 1 -2.5 1 18 -9 -14.63 0 0 0 1 18 -28 0 13 0 13 1 18 -5 -6.88 0 0 0 1 18 -4 -3.5 0 1 -3.5 1 18 -12 -16.5 0 0 0 1 18 -5 -8.13 0 0 0 1 18 -9 -16.88 0 0 0 1 18 -9 -3.38 0 1 -3.38 1 18 -12 0 5 1 0 1 18 -4 -8 0 0 0 1 18 -8 -12 0 0 0 1 18 -8 -4 0 1 -4 1 18 -2 -4 0 1 -4 1 18 -12 -9 0 1 -9 1 18 -4 -1.5 0 1 4 1 18 -6 -10.5 0 0 0 1 18 -5 -3.13 0 1 5 1 18 -10 -15 0 0 0 1 18 -23 0 10 0 10 1 18 -12 -7.5 0 1 -7.5 1 18 -2 -0.5 0 1 -0.5 1 18 -4 0 2 1 4 1 18 -6 -1.5 0 1 -1.5 1 18 -4 -1 0 1 4 1 18 -10 -20 0 0 0 1 18 -12 -22.5 0 0 0 1 18 -25 0 9 1 0 1 18 -13 0 5 1 13 1 18 -6 -5.25 0 1 6 1 18 -9 -13.5 0 0 0 1 18 -5 0 2 1 0 1 18 -12 -6 0 1 -6 1 18 -5 -6.25 0 0 0 1 18 -10 -3.75 0 1 10 1 18 -9 -7.88 0 1 9 1 18 -8 -6 0 1 -6 1 18 -4 -5.5 0 0 0 1 18 -19 0 8 0 8 1 18 -10 -7.5 0 1 -7.5 1 18 -4 -6 0 0 0 1 18 -8 -14 0 0 0 1 18 -8 0 3 1 8 1 18 -12 -21 0 0 0 1 18 -4 -2 0 1 -2 1 18 -4 -5 0 0 0 1 18 -6 -7.5 0 0 0 1 18 -12 -12 0 0 0 1 18 -8 -5 0 1 -5 1 18 -26 0 12 0 12 1 18 -8 -10 0 0 0 1 18 -5 -5.63 0 0 0 1 18 -2 -1 0 1 2 1 19 -9 -6.75 0 1 -6.75 1 19 -2 -4 0 0 0 1 19 -2 -3.25 0 0 0 1 19 -4 -6.5 0 1 -6.5 1 19 -5 -5.63 0 1 -5.63 1 19 -8 -8 0 1 -8 1 19 -12 -18 0 1 12 1 19 -2 -2.5 0 0 0 1 19 -3 0 1 1 3 1 19 -12 -16.5 0 1 12 1 19 -10 -12.5 0 0 0 1 19 -5 -1.25 0 1 5 1 19 -19 0 8 1 19 1 19 -8 -9 0 0 0 1 19 -5 -10 0 0 0 1 19 -25 0 10 1 25 1 19 -7 0 3 1 0 1 19 -6 -11.25 0 0 0 1 19 -6 -1.5 0 1 -1.5 1 19 -4 -1.5 0 1 4 1 19 -10 -5 0 1 10 1 19 -10 -3.75 0 1 10 1 19 -6 -4.5 0 1 -4.5 1 19 -12 -19.5 0 0 0 1 19 -5 -4.38 0 1 -4.38 1 19 -8 -11 0 0 0 1 19 -2 -0.75 0 1 2 1 19 -2 -1.5 0 1 -1.5 1 19 -6 -6.75 0 0 0 1 19 -4 -6 0 0 0 1 19 -10 -16.25 0 1 -16.25 1 19 -12 -15 0 1 -15 1 19 -6 -5.25 0 1 6 1 19 -12 -21 0 0 0 1 19 -4 -3 0 1 4 1 19 -12 -22.5 0 1 12 1 19 -2 -3.75 0 0 0 1 19 -6 -12 0 0 0 1 19 -5 -8.13 0 0 0 1 19 -10 -8.75 0 1 -8.75 1 19 -12 -6 0 1 -6 1 19 -5 -5 0 1 -5 1 19 -22 0 10 1 22 1 19 -12 -13.5 0 1 12 1 19 -8 -7 0 1 8 1 19 -4 -3.5 0 1 -3.5 1 19 -9 -12.38 0 0 0 1 19 -10 -7.5 0 1 -7.5 1 19 -26 0 10 1 26 1 19 -12 -4.5 0 1 -4.5 1 19 -8 -15 0 0 0 1 19 -2 -1.75 0 1 2 1 19 -12 0 6 1 0 1 19 -9 -3.38 0 1 -3.38 1 19 -2 -3 0 0 0 1 19 -9 -5.63 0 1 -5.63 1 19 -2 -3.5 0 0 0 1 19 -8 -12 0 0 0 1 19 -10 -18.75 0 0 0 1 19 -4 0 2 1 4 1 19 -2 -2.25 0 0 0 1 19 -9 -2.25 0 1 -2.25 1 19 -10 -13.75 0 0 0 1 19 -28 0 13 1 0 1 19 -4 -2.5 0 1 4 1 19 -9 -15.75 0 0 0 1 19 -10 -15 0 0 0 1 19 -10 -10 0 1 10 1 19 -9 -18 0 0 0 1 19 -12 -24 0 0 0 1 19 -13 0 5 1 13 1 19 -5 -1.88 0 1 -1.88 1 19 -4 -4.5 0 0 0 1 19 -9 -7.88 0 0 0 1 19 -9 -9 0 0 0 1 19 -25 0 9 1 0 1 19 -12 -12 0 0 0 1 19 -6 -2.25 0 1 6 1 19 -8 -5 0 1 -5 1 19 -4 -5.5 0 0 0 1 19 -2 -1.25 0 0 0 1 19 -9 -13.5 0 0 0 1 19 -9 -4.5 0 1 9 1 19 -10 -11.25 0 0 0 1 19 -6 -3 0 1 -3 1 19 -10 -2.5 0 1 -2.5 1 19 -12 0 4 1 12 1 19 -10 -20 0 0 0 1 19 -5 -3.75 0 0 0 1 19 -9 -10.13 0 0 0 1 19 -4 -7 0 0 0 1 19 -12 -10.5 0 1 -10.5 1 19 -8 -16 0 0 0 1 19 -4 -7.5 0 0 0 1 19 -8 0 3 1 8 1 19 -6 -10.5 0 0 0 1 19 -6 -9.75 0 0 0 1 19 -5 -8.75 0 0 0 1 19 -5 -2.5 0 1 -2.5 1 19 -13 0 6 1 0 1 19 -23 0 10 0 10 1 19 -8 -4 0 1 -4 1 19 -9 -11.25 0 0 0 1 19 -5 -6.88 0 0 0 1 19 -4 -4 0 1 4 1 19 -10 -17.5 0 0 0 1 19 -8 -13 0 0 0 1 19 -26 0 12 1 0 1 19 -6 -8.25 0 0 0 1 19 -9 -14.63 0 0 0 1 19 -8 -2 0 1 -2 1 19 -10 -6.25 0 1 -6.25 1 19 -8 -14 0 0 0 1 19 -12 0 5 1 0 1 19 -8 -10 0 0 0 1 19 -30 0 12 1 0 1 19 -5 -7.5 0 0 0 1 19 -5 0 2 1 0 1 19 -6 -3.75 0 1 6 1 19 -6 -6 0 1 -6 1 19 -4 -2 0 1 -2 1 19 -12 -7.5 0 1 -7.5 1 19 -5 -6.25 0 0 0 1 19 -4 -5 0 0 0 1 19 -2 -2.75 0 0 0 1 19 -2 -2 0 1 2 1 19 -6 -9 0 0 0 1 19 -5 -3.13 0 1 5 1 19 -12 -9 0 1 -9 1 19 -4 -8 0 0 0 1 19 -4 -1 0 1 4 1 19 -2 0 1 1 2 1 19 -9 -16.88 0 0 0 1 19 -8 -6 0 1 -6 1 19 -2 -0.5 0 1 -0.5 1 19 -6 -7.5 0 0 0 1 19 -8 -3 0 1 8 1 19 -12 -3 0 1 12 1 19 -5 -9.38 0 0 0 1 19 -6 -9.75 0 0 0 1 20 -12 -13.5 0 0 0 1 20 -8 -7 0 1 8 1 20 -10 -7.5 0 1 -7.5 1 20 -2 -2.25 0 1 2 1 20 -6 -8.25 0 1 -8.25 1 20 -10 -16.25 0 0 0 1 20 -3 0 1 0 1 1 20 -4 -3 0 1 4 1 20 -8 -2 0 1 -2 1 20 -4 -2.5 0 1 4 1 20 -5 -5.63 0 1 -5.63 1 20 -5 0 2 0 2 1 20 -30 0 12 0 12 1 20 -9 -4.5 0 1 9 1 20 -4 -7.5 0 0 0 1 20 -26 0 10 1 26 1 20 -10 -6.25 0 1 -6.25 1 20 -2 -4 0 0 0 1 20 -4 -5 0 1 4 1 20 -5 -1.88 0 1 -1.88 1 20 -23 0 10 0 10 1 20 -8 -3 0 1 8 1 20 -8 -12 0 0 0 1 20 -10 -2.5 0 1 -2.5 1 20 -5 -8.13 0 0 0 1 20 -8 -9 0 1 -9 1 20 -2 -3 0 1 -3 1 20 -9 -11.25 0 1 9 1 20 -9 -12.38 0 0 0 1 20 -12 -15 0 1 -15 1 20 -8 -10 0 0 0 1 20 -4 -1 0 1 4 1 20 -8 0 3 0 3 1 20 -4 -3.5 0 0 0 1 20 -8 -8 0 1 -8 1 20 -10 -11.25 0 1 10 1 20 -10 -5 0 1 10 1 20 -9 -13.5 0 0 0 1 20 -2 -0.75 0 1 2 1 20 -5 -4.38 0 0 0 1 20 -2 -1.5 0 1 -1.5 1 20 -2 -3.75 0 0 0 1 20 -5 -3.75 0 1 5 1 20 -9 -16.88 0 0 0 1 20 -9 -3.38 0 1 -3.38 1 20 -5 -10 0 0 0 1 20 -26 0 12 0 12 1 20 -5 -9.38 0 0 0 1 20 -6 -1.5 0 1 -1.5 1 20 -10 -10 0 1 10 1 20 -2 -1.25 0 0 0 1 20 -9 -14.63 0 0 0 1 20 -6 -4.5 0 1 -4.5 1 20 -5 -5 0 1 -5 1 20 -5 -7.5 0 0 0 1 20 -8 -13 0 0 0 1 20 -5 -3.13 0 1 5 1 20 -8 -5 0 1 -5 1 20 -8 -11 0 1 8 1 20 -6 -6.75 0 1 6 1 20 -5 -8.75 0 0 0 1 20 -2 0 1 0 1 1 20 -9 -5.63 0 1 -5.63 1 20 -6 -6 0 1 -6 1 20 -4 -5.5 0 0 0 1 20 -6 -3 0 1 -3 1 20 -12 -19.5 0 0 0 1 20 -10 -13.75 0 0 0 1 20 -10 -8.75 0 1 -8.75 1 20 -5 -6.88 0 1 -6.88 1 20 -6 -7.5 0 0 0 1 20 -10 -12.5 0 1 -12.5 1 20 -9 -6.75 0 1 -6.75 1 20 -4 -6 0 0 0 1 20 -8 -4 0 1 -4 1 20 -2 -1 0 0 0 1 20 -12 -24 0 0 0 1 20 -12 -6 0 1 -6 1 20 -2 -2 0 1 2 1 20 -4 -7 0 0 0 1 20 -12 -9 0 0 0 1 20 -6 -11.25 0 1 6 1 20 -25 0 10 0 10 1 20 -28 0 13 0 13 1 20 -2 -2.75 0 1 -2.75 1 20 -12 -10.5 0 0 0 1 20 -8 -14 0 0 0 1 20 -4 -6.5 0 0 0 1 20 -4 0 2 0 2 1 20 -10 -15 0 1 10 1 20 -12 0 5 0 5 1 20 -10 -18.75 0 0 0 1 20 -12 -3 0 1 12 1 20 -4 -4 0 0 0 1 20 -9 -7.88 0 0 0 1 20 -9 -2.25 0 1 -2.25 1 20 -2 -1.75 0 0 0 1 20 -12 0 6 0 6 1 20 -5 -2.5 0 1 -2.5 1 20 -4 -4.5 0 1 4 1 20 -8 -6 0 0 0 1 20 -12 -18 0 0 0 1 20 -12 -16.5 0 0 0 1 20 -22 0 10 0 10 1 20 -12 -21 0 0 0 1 20 -12 -4.5 0 1 -4.5 1 20 -12 -12 0 1 -12 1 20 -19 0 8 0 8 1 20 -2 -2.5 0 1 2 1 20 -12 0 4 0 4 1 20 -4 -2 0 1 -2 1 20 -9 -9 0 0 0 1 20 -9 -10.13 0 0 0 1 20 -6 -2.25 0 1 6 1 20 -2 -0.5 0 1 -0.5 1 20 -10 -3.75 0 1 10 1 20 -13 0 5 0 5 1 20 -4 -1.5 0 1 4 1 20 -5 -1.25 0 1 5 1 20 -6 -9 0 0 0 1 20 -10 -17.5 0 0 0 1 20 -6 -12 0 0 0 1 20 -6 -5.25 0 0 0 1 20 -12 -22.5 0 0 0 1 20 -8 -16 0 0 0 1 20 -9 -15.75 0 0 0 1 20 -10 -20 0 0 0 1 20 -13 0 6 0 6 1 20 -4 -8 0 0 0 1 20 -12 -7.5 0 1 -7.5 1 20 -9 -18 0 0 0 1 20 -2 -3.25 0 1 2 1 20 -7 0 3 0 3 1 20 -6 -3.75 0 1 6 1 20 -5 -6.25 0 0 0 1 20 -8 -15 0 0 0 1 20 -25 0 9 0 9 1 20 -2 -3.5 0 0 0 1 20 -6 -10.5 0 0 0 1 20 -9 -10.13 0 0 0 1 21 -12 -10.5 0 0 0 1 21 -25 0 10 1 25 1 21 -4 -7 0 0 0 1 21 -9 -7.88 0 0 0 1 21 -5 -3.13 0 1 5 1 21 -5 -8.13 0 0 0 1 21 -8 -7 0 0 0 1 21 -12 -6 0 1 -6 1 21 -12 -24 0 0 0 1 21 -12 -21 0 0 0 1 21 -4 -2.5 0 1 4 1 21 -6 -9 0 0 0 1 21 -10 -15 0 0 0 1 21 -8 -6 0 1 -6 1 21 -13 0 6 1 0 1 21 -6 -12 0 0 0 1 21 -6 -4.5 0 1 -4.5 1 21 -9 -16.88 0 0 0 1 21 -10 -18.75 0 0 0 1 21 -9 -3.38 0 1 -3.38 1 21 -6 -9.75 0 0 0 1 21 -2 -1.75 0 0 0 1 21 -5 0 2 1 0 1 21 -8 -5 0 1 -5 1 21 -8 -9 0 0 0 1 21 -12 0 6 1 0 1 21 -12 0 4 1 12 1 21 -2 -2.5 0 0 0 1 21 -6 -3 0 1 -3 1 21 -10 -7.5 0 0 0 1 21 -5 -2.5 0 1 -2.5 1 21 -5 -3.75 0 0 0 1 21 -10 -3.75 0 1 10 1 21 -2 -3 0 0 0 1 21 -10 -6.25 0 0 0 1 21 -4 -7.5 0 0 0 1 21 -8 -16 0 0 0 1 21 -5 -6.25 0 0 0 1 21 -4 0 2 1 4 1 21 -10 -11.25 0 0 0 1 21 -5 -6.88 0 0 0 1 21 -5 -7.5 0 0 0 1 21 -26 0 12 1 0 1 21 -8 -13 0 0 0 1 21 -4 -4.5 0 0 0 1 21 -8 -10 0 0 0 1 21 -6 -3.75 0 1 6 1 21 -5 -5.63 0 0 0 1 21 -9 -18 0 0 0 1 21 -12 -13.5 0 0 0 1 21 -7 0 3 1 0 1 21 -8 -14 0 0 0 1 21 -2 -1.5 0 1 -1.5 1 21 -10 -2.5 0 1 -2.5 1 21 -13 0 5 1 13 1 21 -9 -15.75 0 0 0 1 21 -8 -12 0 0 0 1 21 -28 0 13 1 0 1 21 -6 -7.5 0 0 0 1 21 -10 -16.25 0 0 0 1 21 -12 -7.5 0 0 0 1 21 -5 -5 0 0 0 1 21 -2 -2 0 0 0 1 21 -22 0 10 1 22 1 21 -2 -1 0 1 2 1 21 -3 0 1 1 3 1 21 -4 -5.5 0 0 0 1 21 -2 -2.25 0 0 0 1 21 -6 -2.25 0 1 6 1 21 -4 -6.5 0 0 0 1 21 -9 -12.38 0 0 0 1 21 -10 -13.75 0 0 0 1 21 -10 -17.5 0 0 0 1 21 -4 -5 0 0 0 1 21 -9 -11.25 0 0 0 1 21 -10 -10 0 0 0 1 21 -2 -3.25 0 0 0 1 21 -5 -8.75 0 0 0 1 21 -5 -10 0 0 0 1 21 -9 -2.25 0 1 -2.25 1 21 -6 -6.75 0 0 0 1 21 -12 -16.5 0 0 0 1 21 -9 -14.63 0 0 0 1 21 -4 -8 0 0 0 1 21 -6 -5.25 0 0 0 1 21 -12 -12 0 0 0 1 21 -4 -1 0 1 4 1 21 -12 -15 0 0 0 1 21 -4 -3.5 0 0 0 1 21 -2 -1.25 0 1 -1.25 1 21 -30 0 12 1 0 1 21 -12 -19.5 0 0 0 1 21 -12 -3 0 1 12 1 21 -5 -1.25 0 1 5 1 21 -5 -1.88 0 1 -1.88 1 21 -2 -3.5 0 0 0 1 21 -12 -9 0 0 0 1 21 -10 -20 0 0 0 1 21 -8 -4 0 1 -4 1 21 -12 0 5 1 0 1 21 -2 0 1 1 2 1 21 -4 -1.5 0 1 4 1 21 -2 -3.75 0 0 0 1 21 -6 -10.5 0 0 0 1 21 -4 -2 0 1 -2 1 21 -23 0 10 1 0 1 21 -12 -18 0 0 0 1 21 -6 -8.25 0 0 0 1 21 -26 0 10 1 26 1 21 -10 -8.75 0 0 0 1 21 -2 -0.75 0 1 2 1 21 -5 -9.38 0 0 0 1 21 -25 0 9 1 0 1 21 -9 -4.5 0 1 9 1 21 -10 -5 0 1 10 1 21 -2 -4 0 0 0 1 21 -2 -2.75 0 0 0 1 21 -4 -6 0 0 0 1 21 -10 -12.5 0 0 0 1 21 -12 -22.5 0 0 0 1 21 -4 -4 0 0 0 1 21 -2 -0.5 0 1 -0.5 1 21 -8 -2 0 1 -2 1 21 -4 -3 0 0 0 1 21 -6 -11.25 0 0 0 1 21 -8 -15 0 0 0 1 21 -8 -11 0 0 0 1 21 -12 -4.5 0 1 -4.5 1 21 -19 0 8 1 19 1 21 -6 -6 0 0 0 1 21 -5 -4.38 0 0 0 1 21 -9 -9 0 0 0 1 21 -6 -1.5 0 1 -1.5 1 21 -9 -13.5 0 0 0 1 21 -9 -5.63 0 0 0 1 21 -8 -8 0 0 0 1 21 -8 0 3 1 8 1 21 -8 -3 0 1 8 1 21 -9 -11.25 0 0 0 1 22 -8 -16 0 0 0 1 22 -9 -5.63 0 0 0 1 22 -9 -4.5 0 1 9 1 22 -2 -2 0 0 0 1 22 -12 -19.5 0 0 0 1 22 -4 -4.5 0 0 0 1 22 -2 -3.75 0 0 0 1 22 -2 -2.25 0 0 0 1 22 -12 -4.5 0 1 -4.5 1 22 -9 -10.13 0 0 0 1 22 -12 -3 0 1 12 1 22 -10 -17.5 0 0 0 1 22 -5 -4.38 0 0 0 1 22 -5 -7.5 0 0 0 1 22 -6 -11.25 0 0 0 1 22 -2 -1.5 0 0 0 1 22 -9 -6.75 0 0 0 1 22 -4 -7 0 0 0 1 22 -8 -7 0 0 0 1 22 -2 -1.75 0 0 0 1 22 -2 -1 0 0 0 1 22 -10 -6.25 0 1 -6.25 1 22 -6 -6.75 0 0 0 1 22 -9 -2.25 0 1 -2.25 1 22 -2 -0.75 0 1 2 1 22 -12 0 4 1 12 1 22 -6 -3 0 0 0 1 22 -3 0 1 1 3 1 22 -2 -3 0 0 0 1 22 -10 -13.75 0 1 -13.75 1 22 -6 -2.25 0 1 6 1 22 -5 -1.88 0 1 -1.88 1 22 -12 -13.5 0 0 0 1 22 -22 0 10 1 22 1 22 -9 -12.38 0 0 0 1 22 -26 0 10 0 10 1 22 -12 -10.5 0 0 0 1 22 -10 -2.5 0 1 -2.5 1 22 -25 0 10 1 25 1 22 -9 -15.75 0 0 0 1 22 -7 0 3 1 0 1 22 -10 -10 0 1 10 1 22 -12 -15 0 0 0 1 22 -12 0 6 0 6 1 22 -6 -4.5 0 0 0 1 22 -8 -13 0 0 0 1 22 -10 -16.25 0 0 0 1 22 -5 -1.25 0 0 0 1 22 -4 -4 0 0 0 1 22 -5 -3.75 0 0 0 1 22 -6 -8.25 0 0 0 1 22 -8 -15 0 0 0 1 22 -8 -8 0 0 0 1 22 -2 -2.75 0 0 0 1 22 -6 -12 0 0 0 1 22 -2 0 1 0 1 1 22 -2 -1.25 0 1 -1.25 1 22 -9 -18 0 0 0 1 22 -6 -9 0 0 0 1 22 -10 -8.75 0 0 0 1 22 -4 -7.5 0 0 0 1 22 -13 0 6 1 0 1 22 -10 -11.25 0 0 0 1 22 -4 -3 0 1 4 1 22 -10 -5 0 0 0 1 22 -8 -2 0 1 -2 1 22 -4 -2.5 0 1 4 1 22 -2 -3.5 0 0 0 1 22 -2 -2.5 0 0 0 1 22 -6 -3.75 0 0 0 1 22 -8 -3 0 1 8 1 22 -2 -3.25 0 1 2 1 22 -8 -9 0 0 0 1 22 -6 -6 0 0 0 1 22 -8 -11 0 0 0 1 22 -5 -8.75 0 0 0 1 22 -6 -9.75 0 0 0 1 22 -12 -24 0 0 0 1 22 -4 -6.5 0 0 0 1 22 -5 -10 0 0 0 1 22 -30 0 12 1 0 1 22 -12 -18 0 0 0 1 22 -9 -9 0 1 9 1 22 -5 -5 0 0 0 1 22 -5 -9.38 0 0 0 1 22 -10 -12.5 0 0 0 1 22 -10 -18.75 0 0 0 1 22 -5 -2.5 0 1 -2.5 1 22 -9 -14.63 0 0 0 1 22 -28 0 13 0 13 1 22 -5 -6.88 0 0 0 1 22 -4 -3.5 0 1 -3.5 1 22 -12 -16.5 0 1 12 1 22 -5 -8.13 0 0 0 1 22 -9 -16.88 0 0 0 1 22 -9 -3.38 0 1 -3.38 1 22 -12 0 5 1 0 1 22 -4 -8 0 0 0 1 22 -8 -12 0 0 0 1 22 -8 -4 0 0 0 1 22 -2 -4 0 0 0 1 22 -12 -9 0 0 0 1 22 -4 -1.5 0 1 4 1 22 -6 -10.5 0 0 0 1 22 -5 -3.13 0 0 0 1 22 -10 -15 0 1 10 1 22 -23 0 10 0 10 1 22 -12 -7.5 0 0 0 1 22 -2 -0.5 0 1 -0.5 1 22 -4 0 2 1 4 1 22 -6 -1.5 0 0 0 1 22 -4 -1 0 1 4 1 22 -10 -20 0 0 0 1 22 -12 -22.5 0 0 0 1 22 -25 0 9 1 0 1 22 -13 0 5 1 13 1 22 -6 -5.25 0 0 0 1 22 -9 -13.5 0 0 0 1 22 -5 0 2 0 2 1 22 -12 -6 0 1 -6 1 22 -5 -6.25 0 0 0 1 22 -10 -3.75 0 1 10 1 22 -9 -7.88 0 0 0 1 22 -8 -6 0 0 0 1 22 -4 -5.5 0 0 0 1 22 -19 0 8 1 19 1 22 -10 -7.5 0 0 0 1 22 -4 -6 0 1 4 1 22 -8 -14 0 0 0 1 22 -8 0 3 1 8 1 22 -12 -21 0 0 0 1 22 -4 -2 0 0 0 1 22 -4 -5 0 0 0 1 22 -6 -7.5 0 0 0 1 22 -12 -12 0 0 0 1 22 -8 -5 0 1 -5 1 22 -26 0 12 1 0 1 22 -8 -10 0 1 -10 1 22 -5 -5.63 0 1 -5.63 1 22 -2 -1 0 1 2 1 23 -9 -6.75 0 1 -6.75 1 23 -2 -4 0 0 0 1 23 -2 -3.25 0 0 0 1 23 -4 -6.5 0 1 -6.5 1 23 -5 -5.63 0 0 0 1 23 -8 -8 0 1 -8 1 23 -12 -18 0 1 12 1 23 -2 -2.5 0 1 2 1 23 -3 0 1 1 3 1 23 -12 -16.5 0 1 12 1 23 -10 -12.5 0 0 0 1 23 -5 -1.25 0 1 5 1 23 -19 0 8 1 19 1 23 -8 -9 0 0 0 1 23 -5 -10 0 0 0 1 23 -25 0 10 1 25 1 23 -7 0 3 1 0 1 23 -6 -11.25 0 0 0 1 23 -6 -1.5 0 1 -1.5 1 23 -4 -1.5 0 1 4 1 23 -10 -5 0 1 10 1 23 -10 -3.75 0 1 10 1 23 -6 -4.5 0 1 -4.5 1 23 -12 -19.5 0 0 0 1 23 -5 -4.38 0 1 -4.38 1 23 -8 -11 0 0 0 1 23 -2 -0.75 0 1 2 1 23 -2 -1.5 0 1 -1.5 1 23 -6 -6.75 0 0 0 1 23 -4 -6 0 1 4 1 23 -10 -16.25 0 0 0 1 23 -12 -15 0 0 0 1 23 -6 -5.25 0 1 6 1 23 -12 -21 0 0 0 1 23 -4 -3 0 1 4 1 23 -12 -22.5 0 0 0 1 23 -2 -3.75 0 1 -3.75 1 23 -6 -12 0 0 0 1 23 -5 -8.13 0 0 0 1 23 -10 -8.75 0 1 -8.75 1 23 -12 -6 0 0 0 1 23 -5 -5 0 1 -5 1 23 -22 0 10 0 10 1 23 -12 -13.5 0 0 0 1 23 -8 -7 0 0 0 1 23 -4 -3.5 0 1 -3.5 1 23 -9 -12.38 0 0 0 1 23 -10 -7.5 0 0 0 1 23 -26 0 10 1 26 1 23 -12 -4.5 0 1 -4.5 1 23 -8 -15 0 0 0 1 23 -2 -1.75 0 1 2 1 23 -12 0 6 0 6 1 23 -9 -3.38 0 1 -3.38 1 23 -2 -3 0 1 -3 1 23 -9 -5.63 0 0 0 1 23 -2 -3.5 0 1 -3.5 1 23 -8 -12 0 1 8 1 23 -10 -18.75 0 0 0 1 23 -4 0 2 1 4 1 23 -2 -2.25 0 1 2 1 23 -9 -2.25 0 1 -2.25 1 23 -10 -13.75 0 0 0 1 23 -28 0 13 0 13 1 23 -4 -2.5 0 1 4 1 23 -9 -15.75 0 0 0 1 23 -10 -15 0 0 0 1 23 -10 -10 0 1 10 1 23 -9 -18 0 0 0 1 23 -12 -24 0 0 0 1 23 -13 0 5 1 13 1 23 -5 -1.88 0 1 -1.88 1 23 -4 -4.5 0 1 4 1 23 -9 -7.88 0 0 0 1 23 -9 -9 0 0 0 1 23 -25 0 9 1 0 1 23 -12 -12 0 0 0 1 23 -6 -2.25 0 1 6 1 23 -8 -5 0 1 -5 1 23 -4 -5.5 0 0 0 1 23 -2 -1.25 0 1 -1.25 1 23 -9 -13.5 0 0 0 1 23 -9 -4.5 0 1 9 1 23 -10 -11.25 0 0 0 1 23 -6 -3 0 1 -3 1 23 -10 -2.5 0 1 -2.5 1 23 -12 0 4 0 4 1 23 -10 -20 0 0 0 1 23 -5 -3.75 0 1 5 1 23 -9 -10.13 0 0 0 1 23 -4 -7 0 0 0 1 23 -12 -10.5 0 0 0 1 23 -8 -16 0 0 0 1 23 -4 -7.5 0 0 0 1 23 -8 0 3 1 8 1 23 -6 -10.5 0 0 0 1 23 -6 -9.75 0 0 0 1 23 -5 -8.75 0 0 0 1 23 -5 -2.5 0 1 -2.5 1 23 -13 0 6 0 6 1 23 -23 0 10 0 10 1 23 -8 -4 0 1 -4 1 23 -9 -11.25 0 0 0 1 23 -5 -6.88 0 0 0 1 23 -4 -4 0 1 4 1 23 -10 -17.5 0 0 0 1 23 -8 -13 0 0 0 1 23 -26 0 12 0 12 1 23 -6 -8.25 0 0 0 1 23 -9 -14.63 0 0 0 1 23 -8 -2 0 1 -2 1 23 -10 -6.25 0 0 0 1 23 -8 -14 0 0 0 1 23 -12 0 5 1 0 1 23 -8 -10 0 0 0 1 23 -30 0 12 0 12 1 23 -5 -7.5 0 0 0 1 23 -5 0 2 1 0 1 23 -6 -3.75 0 0 0 1 23 -6 -6 0 0 0 1 23 -4 -2 0 1 -2 1 23 -12 -7.5 0 0 0 1 23 -5 -6.25 0 0 0 1 23 -4 -5 0 1 4 1 23 -2 -2.75 0 0 0 1 23 -2 -2 0 1 2 1 23 -6 -9 0 0 0 1 23 -5 -3.13 0 1 5 1 23 -12 -9 0 0 0 1 23 -4 -8 0 0 0 1 23 -4 -1 0 1 4 1 23 -2 0 1 1 2 1 23 -9 -16.88 0 0 0 1 23 -8 -6 0 0 0 1 23 -2 -0.5 0 1 -0.5 1 23 -6 -7.5 0 0 0 1 23 -8 -3 0 1 8 1 23 -12 -3 0 1 12 1 23 -5 -9.38 0 0 0 1 23 -6 -9.75 0 0 0 1 24 -12 -13.5 0 1 12 1 24 -8 -7 0 1 8 1 24 -10 -7.5 0 1 -7.5 1 24 -2 -2.25 0 0 0 1 24 -6 -8.25 0 0 0 1 24 -10 -16.25 0 0 0 1 24 -3 0 1 1 3 1 24 -4 -3 0 1 4 1 24 -8 -2 0 1 -2 1 24 -4 -2.5 0 1 4 1 24 -5 -5.63 0 1 -5.63 1 24 -5 0 2 1 0 1 24 -30 0 12 1 0 1 24 -9 -4.5 0 1 9 1 24 -4 -7.5 0 0 0 1 24 -26 0 10 1 26 1 24 -10 -6.25 0 1 -6.25 1 24 -2 -4 0 0 0 1 24 -4 -5 0 0 0 1 24 -5 -1.88 0 1 -1.88 1 24 -23 0 10 0 10 1 24 -8 -3 0 1 8 1 24 -8 -12 0 0 0 1 24 -10 -2.5 0 1 -2.5 1 24 -5 -8.13 0 0 0 1 24 -8 -9 0 1 -9 1 24 -2 -3 0 1 -3 1 24 -9 -11.25 0 0 0 1 24 -9 -12.38 0 0 0 1 24 -12 -15 0 1 -15 1 24 -8 -10 0 0 0 1 24 -4 -1 0 1 4 1 24 -8 0 3 1 8 1 24 -4 -3.5 0 1 -3.5 1 24 -8 -8 0 1 -8 1 24 -10 -11.25 0 0 0 1 24 -10 -5 0 1 10 1 24 -9 -13.5 0 0 0 1 24 -2 -0.75 0 1 2 1 24 -5 -4.38 0 1 -4.38 1 24 -2 -1.5 0 1 -1.5 1 24 -2 -3.75 0 0 0 1 24 -5 -3.75 0 1 5 1 24 -9 -16.88 0 0 0 1 24 -9 -3.38 0 1 -3.38 1 24 -5 -10 0 0 0 1 24 -26 0 12 1 0 1 24 -5 -9.38 0 0 0 1 24 -6 -1.5 0 1 -1.5 1 24 -10 -10 0 1 10 1 24 -2 -1.25 0 1 -1.25 1 24 -9 -14.63 0 0 0 1 24 -6 -4.5 0 1 -4.5 1 24 -5 -5 0 1 -5 1 24 -5 -7.5 0 0 0 1 24 -8 -13 0 0 0 1 24 -5 -3.13 0 1 5 1 24 -8 -5 0 1 -5 1 24 -8 -11 0 0 0 1 24 -6 -6.75 0 0 0 1 24 -5 -8.75 0 0 0 1 24 -2 0 1 1 2 1 24 -9 -5.63 0 1 -5.63 1 24 -6 -6 0 1 -6 1 24 -4 -5.5 0 0 0 1 24 -6 -3 0 1 -3 1 24 -12 -19.5 0 0 0 1 24 -10 -13.75 0 0 0 1 24 -10 -8.75 0 1 -8.75 1 24 -5 -6.88 0 0 0 1 24 -6 -7.5 0 0 0 1 24 -10 -12.5 0 0 0 1 24 -9 -6.75 0 1 -6.75 1 24 -4 -6 0 0 0 1 24 -8 -4 0 1 -4 1 24 -2 -1 0 1 2 1 24 -12 -24 0 0 0 1 24 -12 -6 0 1 -6 1 24 -2 -2 0 1 2 1 24 -4 -7 0 0 0 1 24 -12 -9 0 1 -9 1 24 -6 -11.25 0 0 0 1 24 -25 0 10 1 25 1 24 -28 0 13 1 0 1 24 -2 -2.75 0 0 0 1 24 -12 -10.5 0 1 -10.5 1 24 -8 -14 0 0 0 1 24 -4 -6.5 0 0 0 1 24 -4 0 2 1 4 1 24 -10 -15 0 0 0 1 24 -12 0 5 1 0 1 24 -10 -18.75 0 0 0 1 24 -12 -3 0 1 12 1 24 -4 -4 0 1 4 1 24 -9 -7.88 0 1 9 1 24 -9 -2.25 0 1 -2.25 1 24 -2 -1.75 0 1 2 1 24 -12 0 6 1 0 1 24 -5 -2.5 0 1 -2.5 1 24 -4 -4.5 0 0 0 1 24 -8 -6 0 1 -6 1 24 -12 -18 0 0 0 1 24 -12 -16.5 0 0 0 1 24 -22 0 10 1 22 1 24 -12 -21 0 0 0 1 24 -12 -4.5 0 1 -4.5 1 24 -12 -12 0 1 -12 1 24 -19 0 8 1 19 1 24 -2 -2.5 0 0 0 1 24 -12 0 4 1 12 1 24 -4 -2 0 1 -2 1 24 -9 -9 0 1 9 1 24 -9 -10.13 0 0 0 1 24 -6 -2.25 0 1 6 1 24 -2 -0.5 0 1 -0.5 1 24 -10 -3.75 0 1 10 1 24 -13 0 5 1 13 1 24 -4 -1.5 0 1 4 1 24 -5 -1.25 0 1 5 1 24 -6 -9 0 0 0 1 24 -10 -17.5 0 0 0 1 24 -6 -12 0 0 0 1 24 -6 -5.25 0 1 6 1 24 -12 -22.5 0 0 0 1 24 -8 -16 0 0 0 1 24 -9 -15.75 0 0 0 1 24 -10 -20 0 0 0 1 24 -13 0 6 1 0 1 24 -4 -8 0 0 0 1 24 -12 -7.5 0 1 -7.5 1 24 -9 -18 0 0 0 1 24 -2 -3.25 0 0 0 1 24 -7 0 3 1 0 1 24 -6 -3.75 0 1 6 1 24 -5 -6.25 0 0 0 1 24 -8 -15 0 0 0 1 24 -25 0 9 1 0 1 24 -2 -3.5 0 0 0 1 24 -6 -10.5 0 0 0 1 24 -9 -10.13 0 1 -10.13 1 25 -12 -10.5 0 1 -10.5 1 25 -25 0 10 1 25 1 25 -4 -7 0 0 0 1 25 -9 -7.88 0 1 9 1 25 -5 -3.13 0 1 5 1 25 -5 -8.13 0 0 0 1 25 -8 -7 0 1 8 1 25 -12 -6 0 1 -6 1 25 -12 -24 0 0 0 1 25 -12 -21 0 0 0 1 25 -4 -2.5 0 1 4 1 25 -6 -9 0 1 -9 1 25 -10 -15 0 1 10 1 25 -8 -6 0 1 -6 1 25 -13 0 6 1 0 1 25 -6 -12 0 0 0 1 25 -6 -4.5 0 0 0 1 25 -9 -16.88 0 1 9 1 25 -10 -18.75 0 0 0 1 25 -9 -3.38 0 1 -3.38 1 25 -6 -9.75 0 1 -9.75 1 25 -2 -1.75 0 1 2 1 25 -5 0 2 1 0 1 25 -8 -5 0 1 -5 1 25 -8 -9 0 1 -9 1 25 -12 0 6 1 0 1 25 -12 0 4 1 12 1 25 -2 -2.5 0 1 2 1 25 -6 -3 0 0 0 1 25 -10 -7.5 0 1 -7.5 1 25 -5 -2.5 0 1 -2.5 1 25 -5 -3.75 0 1 5 1 25 -10 -3.75 0 1 10 1 25 -2 -3 0 0 0 1 25 -10 -6.25 0 1 -6.25 1 25 -4 -7.5 0 0 0 1 25 -8 -16 0 0 0 1 25 -5 -6.25 0 0 0 1 25 -4 0 2 1 4 1 25 -10 -11.25 0 1 10 1 25 -5 -6.88 0 1 -6.88 1 25 -5 -7.5 0 1 5 1 25 -26 0 12 1 0 1 25 -8 -13 0 0 0 1 25 -4 -4.5 0 1 4 1 25 -8 -10 0 1 -10 1 25 -6 -3.75 0 1 6 1 25 -5 -5.63 0 1 -5.63 1 25 -9 -18 0 0 0 1 25 -12 -13.5 0 0 0 1 25 -7 0 3 1 0 1 25 -8 -14 0 1 8 1 25 -2 -1.5 0 1 -1.5 1 25 -10 -2.5 0 1 -2.5 1 25 -13 0 5 1 13 1 25 -9 -15.75 0 0 0 1 25 -8 -12 0 1 8 1 25 -28 0 13 1 0 1 25 -6 -7.5 0 1 -7.5 1 25 -10 -16.25 0 0 0 1 25 -12 -7.5 0 1 -7.5 1 25 -5 -5 0 1 -5 1 25 -2 -2 0 1 2 1 25 -22 0 10 0 10 1 25 -2 -1 0 1 2 1 25 -3 0 1 1 3 1 25 -4 -5.5 0 1 -5.5 1 25 -2 -2.25 0 1 2 1 25 -6 -2.25 0 1 6 1 25 -4 -6.5 0 1 -6.5 1 25 -9 -12.38 0 0 0 1 25 -10 -13.75 0 0 0 1 25 -10 -17.5 0 1 10 1 25 -4 -5 0 1 4 1 25 -9 -11.25 0 1 9 1 25 -10 -10 0 0 0 1 25 -2 -3.25 0 1 2 1 25 -5 -8.75 0 1 5 1 25 -5 -10 0 0 0 1 25 -9 -2.25 0 1 -2.25 1 25 -6 -6.75 0 1 6 1 25 -12 -16.5 0 0 0 1 25 -9 -14.63 0 1 9 1 25 -4 -8 0 0 0 1 25 -6 -5.25 0 1 6 1 25 -9 -6.75 0 1 -6.75 1 25 -12 -12 0 1 -12 1 25 -4 -1 0 1 4 1 25 -12 -15 0 1 -15 1 25 -4 -3.5 0 1 -3.5 1 25 -2 -1.25 0 1 -1.25 1 25 -30 0 12 1 0 1 25 -12 -19.5 0 0 0 1 25 -12 -3 0 1 12 1 25 -5 -1.25 0 1 5 1 25 -5 -1.88 0 1 -1.88 1 25 -2 -3.5 0 1 -3.5 1 25 -12 -9 0 0 0 1 25 -10 -20 0 0 0 1 25 -8 -4 0 1 -4 1 25 -12 0 5 1 0 1 25 -2 0 1 0 1 1 25 -4 -1.5 0 1 4 1 25 -2 -3.75 0 0 0 1 25 -6 -10.5 0 0 0 1 25 -4 -2 0 1 -2 1 25 -23 0 10 0 10 1 25 -12 -18 0 1 12 1 25 -6 -8.25 0 0 0 1 25 -26 0 10 1 26 1 25 -10 -8.75 0 1 -8.75 1 25 -2 -0.75 0 1 2 1 25 -5 -9.38 0 1 -9.38 1 25 -25 0 9 1 0 1 25 -9 -4.5 0 1 9 1 25 -10 -5 0 1 10 1 25 -2 -4 0 1 -4 1 25 -2 -2.75 0 1 -2.75 1 25 -4 -6 0 0 0 1 25 -10 -12.5 0 1 -12.5 1 25 -12 -22.5 0 0 0 1 25 -4 -4 0 1 4 1 25 -2 -0.5 0 1 -0.5 1 25 -8 -2 0 1 -2 1 25 -4 -3 0 1 4 1 25 -6 -11.25 0 0 0 1 25 -8 -15 0 0 0 1 25 -8 -11 0 1 8 1 25 -12 -4.5 0 1 -4.5 1 25 -19 0 8 1 19 1 25 -6 -6 0 1 -6 1 25 -5 -4.38 0 1 -4.38 1 25 -9 -9 0 1 9 1 25 -6 -1.5 0 1 -1.5 1 25 -9 -13.5 0 1 -13.5 1 25 -9 -5.63 0 1 -5.63 1 25 -8 -8 0 1 -8 1 25 -8 0 3 1 8 1 25 -8 -3 0 1 8 1 25 -9 -11.25 0 1 9 1 26 -8 -16 0 0 0 1 26 -9 -5.63 0 1 -5.63 1 26 -9 -4.5 0 1 9 1 26 -2 -2 0 0 0 1 26 -12 -19.5 0 0 0 1 26 -4 -4.5 0 1 4 1 26 -2 -3.75 0 0 0 1 26 -2 -2.25 0 0 0 1 26 -12 -4.5 0 1 -4.5 1 26 -9 -10.13 0 0 0 1 26 -12 -3 0 1 12 1 26 -10 -17.5 0 0 0 1 26 -5 -4.38 0 1 -4.38 1 26 -5 -7.5 0 0 0 1 26 -6 -11.25 0 0 0 1 26 -2 -1.5 0 1 -1.5 1 26 -9 -6.75 0 1 -6.75 1 26 -4 -7 0 0 0 1 26 -8 -7 0 0 0 1 26 -2 -1.75 0 1 2 1 26 -2 -1 0 1 2 1 26 -10 -6.25 0 0 0 1 26 -6 -6.75 0 0 0 1 26 -9 -2.25 0 1 -2.25 1 26 -2 -0.75 0 1 2 1 26 -12 0 4 1 12 1 26 -6 -3 0 1 -3 1 26 -3 0 1 1 3 1 26 -2 -3 0 1 -3 1 26 -10 -13.75 0 1 -13.75 1 26 -6 -2.25 0 1 6 1 26 -5 -1.88 0 1 -1.88 1 26 -12 -13.5 0 0 0 1 26 -22 0 10 0 10 1 26 -9 -12.38 0 0 0 1 26 -26 0 10 0 10 1 26 -12 -10.5 0 0 0 1 26 -10 -2.5 0 1 -2.5 1 26 -25 0 10 0 10 1 26 -9 -15.75 0 0 0 1 26 -7 0 3 1 0 1 26 -10 -10 0 0 0 1 26 -12 -15 0 0 0 1 26 -12 0 6 0 6 1 26 -6 -4.5 0 1 -4.5 1 26 -8 -13 0 0 0 1 26 -10 -16.25 0 0 0 1 26 -5 -1.25 0 1 5 1 26 -4 -4 0 0 0 1 26 -5 -3.75 0 1 5 1 26 -6 -8.25 0 0 0 1 26 -8 -15 0 0 0 1 26 -8 -8 0 0 0 1 26 -2 -2.75 0 1 -2.75 1 26 -6 -12 0 0 0 1 26 -2 0 1 1 2 1 26 -2 -1.25 0 1 -1.25 1 26 -9 -18 0 0 0 1 26 -6 -9 0 0 0 1 26 -10 -8.75 0 1 -8.75 1 26 -4 -7.5 0 0 0 1 26 -13 0 6 1 0 1 26 -10 -11.25 0 0 0 1 26 -4 -3 0 0 0 1 26 -10 -5 0 1 10 1 26 -8 -2 0 1 -2 1 26 -4 -2.5 0 1 4 1 26 -2 -3.5 0 0 0 1 26 -2 -2.5 0 0 0 1 26 -6 -3.75 0 1 6 1 26 -8 -3 0 1 8 1 26 -2 -3.25 0 0 0 1 26 -8 -9 0 0 0 1 26 -6 -6 0 1 -6 1 26 -8 -11 0 0 0 1 26 -5 -8.75 0 0 0 1 26 -6 -9.75 0 0 0 1 26 -12 -24 0 0 0 1 26 -4 -6.5 0 0 0 1 26 -5 -10 0 0 0 1 26 -30 0 12 1 0 1 26 -12 -18 0 0 0 1 26 -9 -9 0 0 0 1 26 -5 -5 0 0 0 1 26 -5 -9.38 0 0 0 1 26 -10 -12.5 0 1 -12.5 1 26 -10 -18.75 0 0 0 1 26 -5 -2.5 0 1 -2.5 1 26 -9 -14.63 0 0 0 1 26 -28 0 13 1 0 1 26 -5 -6.88 0 0 0 1 26 -4 -3.5 0 0 0 1 26 -12 -16.5 0 1 12 1 26 -5 -8.13 0 0 0 1 26 -9 -16.88 0 0 0 1 26 -9 -3.38 0 1 -3.38 1 26 -12 0 5 1 0 1 26 -4 -8 0 0 0 1 26 -8 -12 0 0 0 1 26 -8 -4 0 1 -4 1 26 -2 -4 0 0 0 1 26 -12 -9 0 1 -9 1 26 -4 -1.5 0 1 4 1 26 -6 -10.5 0 0 0 1 26 -5 -3.13 0 1 5 1 26 -10 -15 0 0 0 1 26 -23 0 10 0 10 1 26 -12 -7.5 0 1 -7.5 1 26 -2 -0.5 0 1 -0.5 1 26 -4 0 2 1 4 1 26 -6 -1.5 0 1 -1.5 1 26 -4 -1 0 1 4 1 26 -10 -20 0 0 0 1 26 -12 -22.5 0 0 0 1 26 -25 0 9 1 0 1 26 -13 0 5 1 13 1 26 -6 -5.25 0 0 0 1 26 -9 -13.5 0 0 0 1 26 -5 0 2 1 0 1 26 -12 -6 0 1 -6 1 26 -5 -6.25 0 0 0 1 26 -10 -3.75 0 1 10 1 26 -9 -7.88 0 0 0 1 26 -8 -6 0 0 0 1 26 -4 -5.5 0 0 0 1 26 -19 0 8 1 19 1 26 -10 -7.5 0 0 0 1 26 -4 -6 0 0 0 1 26 -8 -14 0 0 0 1 26 -8 0 3 0 3 1 26 -12 -21 0 0 0 1 26 -4 -2 0 1 -2 1 26 -4 -5 0 0 0 1 26 -6 -7.5 0 0 0 1 26 -12 -12 0 0 0 1 26 -8 -5 0 1 -5 1 26 -26 0 12 0 12 1 26 -8 -10 0 0 0 1 26 -5 -5.63 0 0 0 1 26 -2 -1 0 1 2 1 27 -9 -6.75 0 1 -6.75 1 27 -2 -4 0 1 -4 1 27 -2 -3.25 0 1 2 1 27 -4 -6.5 0 1 -6.5 1 27 -5 -5.63 0 1 -5.63 1 27 -8 -8 0 1 -8 1 27 -12 -18 0 1 12 1 27 -2 -2.5 0 0 0 1 27 -3 0 1 1 3 1 27 -12 -16.5 0 0 0 1 27 -10 -12.5 0 1 -12.5 1 27 -5 -1.25 0 1 5 1 27 -19 0 8 1 19 1 27 -8 -9 0 1 -9 1 27 -5 -10 0 1 5 1 27 -25 0 10 0 10 1 27 -7 0 3 1 0 1 27 -6 -11.25 0 1 6 1 27 -6 -1.5 0 1 -1.5 1 27 -4 -1.5 0 1 4 1 27 -10 -5 0 1 10 1 27 -10 -3.75 0 1 10 1 27 -6 -4.5 0 1 -4.5 1 27 -12 -19.5 0 1 12 1 27 -5 -4.38 0 0 0 1 27 -8 -11 0 1 8 1 27 -2 -0.75 0 1 2 1 27 -2 -1.5 0 0 0 1 27 -6 -6.75 0 1 6 1 27 -4 -6 0 1 4 1 27 -10 -16.25 0 1 -16.25 1 27 -12 -15 0 1 -15 1 27 -6 -5.25 0 1 6 1 27 -12 -21 0 1 12 1 27 -4 -3 0 1 4 1 27 -12 -22.5 0 1 12 1 27 -2 -3.75 0 1 -3.75 1 27 -6 -12 0 1 -12 1 27 -5 -8.13 0 0 0 1 27 -10 -8.75 0 1 -8.75 1 27 -12 -6 0 1 -6 1 27 -5 -5 0 1 -5 1 27 -22 0 10 1 22 1 27 -12 -13.5 0 1 12 1 27 -8 -7 0 1 8 1 27 -4 -3.5 0 1 -3.5 1 27 -9 -12.38 0 1 9 1 27 -10 -7.5 0 1 -7.5 1 27 -26 0 10 1 26 1 27 -12 -4.5 0 1 -4.5 1 27 -8 -15 0 1 -15 1 27 -2 -1.75 0 1 2 1 27 -12 0 6 1 0 1 27 -9 -3.38 0 1 -3.38 1 27 -2 -3 0 1 -3 1 27 -9 -5.63 0 1 -5.63 1 27 -2 -3.5 0 0 0 1 27 -8 -12 0 1 8 1 27 -10 -18.75 0 0 0 1 27 -4 0 2 1 4 1 27 -2 -2.25 0 1 2 1 27 -9 -2.25 0 1 -2.25 1 27 -10 -13.75 0 1 -13.75 1 27 -28 0 13 1 0 1 27 -4 -2.5 0 1 4 1 27 -9 -15.75 0 1 9 1 27 -10 -15 0 1 10 1 27 -10 -10 0 1 10 1 27 -9 -18 0 0 0 1 27 -12 -24 0 0 0 1 27 -13 0 5 0 5 1 27 -5 -1.88 0 1 -1.88 1 27 -4 -4.5 0 1 4 1 27 -9 -7.88 0 1 9 1 27 -9 -9 0 1 9 1 27 -25 0 9 1 0 1 27 -12 -12 0 1 -12 1 27 -6 -2.25 0 1 6 1 27 -8 -5 0 1 -5 1 27 -4 -5.5 0 1 -5.5 1 27 -2 -1.25 0 1 -1.25 1 27 -9 -13.5 0 1 -13.5 1 27 -9 -4.5 0 1 9 1 27 -10 -11.25 0 1 10 1 27 -6 -3 0 1 -3 1 27 -10 -2.5 0 1 -2.5 1 27 -12 0 4 1 12 1 27 -10 -20 0 0 0 1 27 -5 -3.75 0 1 5 1 27 -9 -10.13 0 1 -10.13 1 27 -4 -7 0 1 -7 1 27 -12 -10.5 0 1 -10.5 1 27 -8 -16 0 0 0 1 27 -4 -7.5 0 1 4 1 27 -8 0 3 1 8 1 27 -6 -10.5 0 1 -10.5 1 27 -6 -9.75 0 1 -9.75 1 27 -5 -8.75 0 1 5 1 27 -5 -2.5 0 1 -2.5 1 27 -13 0 6 0 6 1 27 -23 0 10 1 0 1 27 -8 -4 0 1 -4 1 27 -9 -11.25 0 1 9 1 27 -5 -6.88 0 1 -6.88 1 27 -4 -4 0 1 4 1 27 -10 -17.5 0 1 10 1 27 -8 -13 0 0 0 1 27 -26 0 12 1 0 1 27 -6 -8.25 0 1 -8.25 1 27 -9 -14.63 0 1 9 1 27 -8 -2 0 1 -2 1 27 -10 -6.25 0 1 -6.25 1 27 -8 -14 0 0 0 1 27 -12 0 5 1 0 1 27 -8 -10 0 1 -10 1 27 -30 0 12 1 0 1 27 -5 -7.5 0 0 0 1 27 -5 0 2 1 0 1 27 -6 -3.75 0 1 6 1 27 -6 -6 0 1 -6 1 27 -4 -2 0 1 -2 1 27 -12 -7.5 0 1 -7.5 1 27 -5 -6.25 0 1 5 1 27 -4 -5 0 1 4 1 27 -2 -2.75 0 1 -2.75 1 27 -2 -2 0 1 2 1 27 -6 -9 0 1 -9 1 27 -5 -3.13 0 1 5 1 27 -12 -9 0 1 -9 1 27 -4 -8 0 1 -8 1 27 -4 -1 0 1 4 1 27 -2 0 1 0 1 1 27 -9 -16.88 0 0 0 1 27 -8 -6 0 1 -6 1 27 -2 -0.5 0 1 -0.5 1 27 -6 -7.5 0 1 -7.5 1 27 -8 -3 0 1 8 1 27 -12 -3 0 1 12 1 27 -5 -9.38 0 1 -9.38 1 27 -6 -9.75 0 0 0 1 28 -12 -13.5 0 1 12 1 28 -8 -7 0 1 8 1 28 -10 -7.5 0 1 -7.5 1 28 -2 -2.25 0 0 0 1 28 -6 -8.25 0 1 -8.25 1 28 -10 -16.25 0 0 0 1 28 -3 0 1 0 1 1 28 -4 -3 0 1 4 1 28 -8 -2 0 1 -2 1 28 -4 -2.5 0 1 4 1 28 -5 -5.63 0 0 0 1 28 -5 0 2 1 0 1 28 -30 0 12 1 0 1 28 -9 -4.5 0 0 0 1 28 -4 -7.5 0 0 0 1 28 -26 0 10 1 26 1 28 -10 -6.25 0 1 -6.25 1 28 -2 -4 0 0 0 1 28 -4 -5 0 0 0 1 28 -5 -1.88 0 1 -1.88 1 28 -23 0 10 0 10 1 28 -8 -3 0 1 8 1 28 -8 -12 0 0 0 1 28 -10 -2.5 0 0 0 1 28 -5 -8.13 0 0 0 1 28 -8 -9 0 0 0 1 28 -2 -3 0 0 0 1 28 -9 -11.25 0 0 0 1 28 -9 -12.38 0 0 0 1 28 -12 -15 0 0 0 1 28 -8 -10 0 0 0 1 28 -4 -1 0 1 4 1 28 -8 0 3 1 8 1 28 -4 -3.5 0 0 0 1 28 -8 -8 0 0 0 1 28 -10 -11.25 0 0 0 1 28 -10 -5 0 1 10 1 28 -9 -13.5 0 0 0 1 28 -2 -0.75 0 1 2 1 28 -5 -4.38 0 0 0 1 28 -2 -1.5 0 0 0 1 28 -2 -3.75 0 0 0 1 28 -5 -3.75 0 1 5 1 28 -9 -16.88 0 0 0 1 28 -9 -3.38 0 1 -3.38 1 28 -5 -10 0 0 0 1 28 -26 0 12 0 12 1 28 -5 -9.38 0 0 0 1 28 -6 -1.5 0 1 -1.5 1 28 -10 -10 0 0 0 1 28 -2 -1.25 0 0 0 1 28 -9 -14.63 0 0 0 1 28 -6 -4.5 0 0 0 1 28 -5 -5 0 0 0 1 28 -5 -7.5 0 0 0 1 28 -8 -13 0 0 0 1 28 -5 -3.13 0 0 0 1 28 -8 -5 0 1 -5 1 28 -8 -11 0 0 0 1 28 -6 -6.75 0 0 0 1 28 -5 -8.75 0 0 0 1 28 -2 0 1 1 2 1 28 -9 -5.63 0 0 0 1 28 -6 -6 0 0 0 1 28 -4 -5.5 0 0 0 1 28 -6 -3 0 1 -3 1 28 -12 -19.5 0 0 0 1 28 -10 -13.75 0 0 0 1 28 -10 -8.75 0 0 0 1 28 -5 -6.88 0 0 0 1 28 -6 -7.5 0 0 0 1 28 -10 -12.5 0 0 0 1 28 -9 -6.75 0 0 0 1 28 -4 -6 0 0 0 1 28 -8 -4 0 0 0 1 28 -2 -1 0 1 2 1 28 -12 -24 0 0 0 1 28 -12 -6 0 0 0 1 28 -2 -2 0 0 0 1 28 -4 -7 0 0 0 1 28 -12 -9 0 0 0 1 28 -6 -11.25 0 0 0 1 28 -25 0 10 1 25 1 28 -28 0 13 1 0 1 28 -2 -2.75 0 0 0 1 28 -12 -10.5 0 0 0 1 28 -8 -14 0 0 0 1 28 -4 -6.5 0 0 0 1 28 -4 0 2 1 4 1 28 -10 -15 0 0 0 1 28 -12 0 5 1 0 1 28 -10 -18.75 0 0 0 1 28 -12 -3 0 1 12 1 28 -4 -4 0 0 0 1 28 -9 -7.88 0 0 0 1 28 -9 -2.25 0 1 -2.25 1 28 -2 -1.75 0 0 0 1 28 -12 0 6 1 0 1 28 -5 -2.5 0 0 0 1 28 -4 -4.5 0 0 0 1 28 -8 -6 0 0 0 1 28 -12 -18 0 0 0 1 28 -12 -16.5 0 0 0 1 28 -22 0 10 1 22 1 28 -12 -21 0 0 0 1 28 -12 -4.5 0 0 0 1 28 -12 -12 0 0 0 1 28 -19 0 8 1 19 1 28 -2 -2.5 0 0 0 1 28 -12 0 4 1 12 1 28 -4 -2 0 0 0 1 28 -9 -9 0 0 0 1 28 -9 -10.13 0 0 0 1 28 -6 -2.25 0 1 6 1 28 -2 -0.5 0 1 -0.5 1 28 -10 -3.75 0 0 0 1 28 -13 0 5 1 13 1 28 -4 -1.5 0 1 4 1 28 -5 -1.25 0 1 5 1 28 -6 -9 0 0 0 1 28 -10 -17.5 0 0 0 1 28 -6 -12 0 0 0 1 28 -6 -5.25 0 0 0 1 28 -12 -22.5 0 0 0 1 28 -8 -16 0 0 0 1 28 -9 -15.75 0 0 0 1 28 -10 -20 0 0 0 1 28 -13 0 6 0 6 1 28 -4 -8 0 0 0 1 28 -12 -7.5 0 0 0 1 28 -9 -18 0 0 0 1 28 -2 -3.25 0 0 0 1 28 -7 0 3 1 0 1 28 -6 -3.75 0 0 0 1 28 -5 -6.25 0 0 0 1 28 -8 -15 0 0 0 1 28 -25 0 9 1 0 1 28 -2 -3.5 0 0 0 1 28 -6 -10.5 0 0 0 1 28 -9 -10.13 0 0 0 1 29 -12 -10.5 0 1 -10.5 1 29 -25 0 10 1 25 1 29 -4 -7 0 0 0 1 29 -9 -7.88 0 0 0 1 29 -5 -3.13 0 1 5 1 29 -5 -8.13 0 0 0 1 29 -8 -7 0 1 8 1 29 -12 -6 0 1 -6 1 29 -12 -24 0 0 0 1 29 -12 -21 0 0 0 1 29 -4 -2.5 0 1 4 1 29 -6 -9 0 0 0 1 29 -10 -15 0 0 0 1 29 -8 -6 0 1 -6 1 29 -13 0 6 1 0 1 29 -6 -12 0 0 0 1 29 -6 -4.5 0 1 -4.5 1 29 -9 -16.88 0 0 0 1 29 -10 -18.75 0 0 0 1 29 -9 -3.38 0 1 -3.38 1 29 -6 -9.75 0 1 -9.75 1 29 -2 -1.75 0 1 2 1 29 -5 0 2 1 0 1 29 -8 -5 0 0 0 1 29 -8 -9 0 1 -9 1 29 -12 0 6 1 0 1 29 -12 0 4 1 12 1 29 -2 -2.5 0 0 0 1 29 -6 -3 0 1 -3 1 29 -10 -7.5 0 1 -7.5 1 29 -5 -2.5 0 1 -2.5 1 29 -5 -3.75 0 1 5 1 29 -10 -3.75 0 1 10 1 29 -2 -3 0 0 0 1 29 -10 -6.25 0 0 0 1 29 -4 -7.5 0 0 0 1 29 -8 -16 0 0 0 1 29 -5 -6.25 0 0 0 1 29 -4 0 2 1 4 1 29 -10 -11.25 0 0 0 1 29 -5 -6.88 0 0 0 1 29 -5 -7.5 0 1 5 1 29 -26 0 12 0 12 1 29 -8 -13 0 0 0 1 29 -4 -4.5 0 1 4 1 29 -8 -10 0 0 0 1 29 -6 -3.75 0 1 6 1 29 -5 -5.63 0 0 0 1 29 -9 -18 0 0 0 1 29 -12 -13.5 0 1 12 1 29 -7 0 3 0 3 1 29 -8 -14 0 0 0 1 29 -2 -1.5 0 1 -1.5 1 29 -10 -2.5 0 1 -2.5 1 29 -13 0 5 0 5 1 29 -9 -15.75 0 0 0 1 29 -8 -12 0 0 0 1 29 -28 0 13 1 0 1 29 -6 -7.5 0 0 0 1 29 -10 -16.25 0 0 0 1 29 -12 -7.5 0 0 0 1 29 -5 -5 0 1 -5 1 29 -2 -2 0 1 2 1 29 -22 0 10 0 10 1 29 -2 -1 0 1 2 1 29 -3 0 1 0 1 1 29 -4 -5.5 0 0 0 1 29 -2 -2.25 0 0 0 1 29 -6 -2.25 0 1 6 1 29 -4 -6.5 0 0 0 1 29 -9 -12.38 0 0 0 1 29 -10 -13.75 0 0 0 1 29 -10 -17.5 0 0 0 1 29 -4 -5 0 1 4 1 29 -9 -11.25 0 0 0 1 29 -10 -10 0 0 0 1 29 -2 -3.25 0 1 2 1 29 -5 -8.75 0 0 0 1 29 -5 -10 0 0 0 1 29 -9 -2.25 0 1 -2.25 1 29 -6 -6.75 0 0 0 1 29 -12 -16.5 0 0 0 1 29 -9 -14.63 0 0 0 1 29 -4 -8 0 0 0 1 29 -6 -5.25 0 0 0 1 29 -9 -6.75 0 1 -6.75 1 29 -12 -12 0 1 -12 1 29 -4 -1 0 1 4 1 29 -12 -15 0 0 0 1 29 -4 -3.5 0 0 0 1 29 -2 -1.25 0 0 0 1 29 -30 0 12 0 12 1 29 -12 -19.5 0 0 0 1 29 -12 -3 0 0 0 1 29 -5 -1.25 0 0 0 1 29 -5 -1.88 0 1 -1.88 1 29 -2 -3.5 0 1 -3.5 1 29 -12 -9 0 0 0 1 29 -10 -20 0 0 0 1 29 -8 -4 0 1 -4 1 29 -12 0 5 0 5 1 29 -2 0 1 0 1 1 29 -4 -1.5 0 1 4 1 29 -2 -3.75 0 0 0 1 29 -6 -10.5 0 0 0 1 29 -4 -2 0 0 0 1 29 -23 0 10 0 10 1 29 -12 -18 0 0 0 1 29 -6 -8.25 0 0 0 1 29 -26 0 10 0 10 1 29 -10 -8.75 0 0 0 1 29 -2 -0.75 0 1 2 1 29 -5 -9.38 0 0 0 1 29 -25 0 9 0 9 1 29 -9 -4.5 0 0 0 1 29 -10 -5 0 1 10 1 29 -2 -4 0 0 0 1 29 -2 -2.75 0 1 -2.75 1 29 -4 -6 0 0 0 1 29 -10 -12.5 0 0 0 1 29 -12 -22.5 0 0 0 1 29 -4 -4 0 1 4 1 29 -2 -0.5 0 1 -0.5 1 29 -8 -2 0 1 -2 1 29 -4 -3 0 1 4 1 29 -6 -11.25 0 0 0 1 29 -8 -15 0 0 0 1 29 -8 -11 0 0 0 1 29 -12 -4.5 0 1 -4.5 1 29 -19 0 8 0 8 1 29 -6 -6 0 0 0 1 29 -5 -4.38 0 0 0 1 29 -9 -9 0 0 0 1 29 -6 -1.5 0 1 -1.5 1 29 -9 -13.5 0 1 -13.5 1 29 -9 -5.63 0 0 0 1 29 -8 -8 0 0 0 1 29 -8 0 3 0 3 1 29 -8 -3 0 1 8 1 29 -9 -11.25 0 0 0 1 30 -8 -16 0 0 0 1 30 -9 -5.63 0 1 -5.63 1 30 -9 -4.5 0 1 9 1 30 -2 -2 0 0 0 1 30 -12 -19.5 0 0 0 1 30 -4 -4.5 0 0 0 1 30 -2 -3.75 0 1 -3.75 1 30 -2 -2.25 0 0 0 1 30 -12 -4.5 0 1 -4.5 1 30 -9 -10.13 0 0 0 1 30 -12 -3 0 1 12 1 30 -10 -17.5 0 0 0 1 30 -5 -4.38 0 0 0 1 30 -5 -7.5 0 0 0 1 30 -6 -11.25 0 0 0 1 30 -2 -1.5 0 0 0 1 30 -9 -6.75 0 1 -6.75 1 30 -4 -7 0 0 0 1 30 -8 -7 0 0 0 1 30 -2 -1.75 0 0 0 1 30 -2 -1 0 0 0 1 30 -10 -6.25 0 1 -6.25 1 30 -6 -6.75 0 0 0 1 30 -9 -2.25 0 1 -2.25 1 30 -2 -0.75 0 0 0 1 30 -12 0 4 0 4 1 30 -6 -3 0 1 -3 1 30 -3 0 1 0 1 1 30 -2 -3 0 0 0 1 30 -10 -13.75 0 0 0 1 30 -6 -2.25 0 1 6 1 30 -5 -1.88 0 1 -1.88 1 30 -12 -13.5 0 0 0 1 30 -22 0 10 0 10 1 30 -9 -12.38 0 0 0 1 30 -26 0 10 0 10 1 30 -12 -10.5 0 0 0 1 30 -10 -2.5 0 1 -2.5 1 30 -25 0 10 0 10 1 30 -9 -15.75 0 0 0 1 30 -7 0 3 0 3 1 30 -10 -10 0 0 0 1 30 -12 -15 0 0 0 1 30 -12 0 6 0 6 1 30 -6 -4.5 0 0 0 1 30 -8 -13 0 0 0 1 30 -10 -16.25 0 0 0 1 30 -5 -1.25 0 1 5 1 30 -4 -4 0 0 0 1 30 -5 -3.75 0 0 0 1 30 -6 -8.25 0 0 0 1 30 -8 -15 0 0 0 1 30 -8 -8 0 0 0 1 30 -2 -2.75 0 0 0 1 30 -6 -12 0 0 0 1 30 -2 0 1 0 1 1 30 -2 -1.25 0 0 0 1 30 -9 -18 0 0 0 1 30 -6 -9 0 0 0 1 30 -10 -8.75 0 0 0 1 30 -4 -7.5 0 0 0 1 30 -13 0 6 0 6 1 30 -10 -11.25 0 0 0 1 30 -4 -3 0 0 0 1 30 -10 -5 0 0 0 1 30 -8 -2 0 0 0 1 30 -4 -2.5 0 0 0 1 30 -2 -3.5 0 0 0 1 30 -2 -2.5 0 0 0 1 30 -6 -3.75 0 0 0 1 30 -8 -3 0 0 0 1 30 -2 -3.25 0 0 0 1 30 -8 -9 0 0 0 1 30 -6 -6 0 0 0 1 30 -8 -11 0 0 0 1 30 -5 -8.75 0 0 0 1 30 -6 -9.75 0 0 0 1 30 -12 -24 0 0 0 1 30 -4 -6.5 0 0 0 1 30 -5 -10 0 0 0 1 30 -30 0 12 1 0 1 30 -12 -18 0 0 0 1 30 -9 -9 0 0 0 1 30 -5 -5 0 0 0 1 30 -5 -9.38 0 0 0 1 30 -10 -12.5 0 0 0 1 30 -10 -18.75 0 0 0 1 30 -5 -2.5 0 1 -2.5 1 30 -9 -14.63 0 0 0 1 30 -28 0 13 1 0 1 30 -5 -6.88 0 1 -6.88 1 30 -4 -3.5 0 0 0 1 30 -12 -16.5 0 0 0 1 30 -5 -8.13 0 0 0 1 30 -9 -16.88 0 0 0 1 30 -9 -3.38 0 0 0 1 30 -12 0 5 1 0 1 30 -4 -8 0 0 0 1 30 -8 -12 0 0 0 1 30 -8 -4 0 0 0 1 30 -2 -4 0 0 0 1 30 -12 -9 0 1 -9 1 30 -4 -1.5 0 0 0 1 30 -6 -10.5 0 0 0 1 30 -5 -3.13 0 0 0 1 30 -10 -15 0 0 0 1 30 -23 0 10 0 10 1 30 -12 -7.5 0 0 0 1 30 -2 -0.5 0 0 0 1 30 -4 0 2 0 2 1 30 -6 -1.5 0 0 0 1 30 -4 -1 0 0 0 1 30 -10 -20 0 0 0 1 30 -12 -22.5 0 0 0 1 30 -25 0 9 0 9 1 30 -13 0 5 0 5 1 30 -6 -5.25 0 0 0 1 30 -9 -13.5 0 0 0 1 30 -5 0 2 0 2 1 30 -12 -6 0 0 0 1 30 -5 -6.25 0 0 0 1 30 -10 -3.75 0 0 0 1 30 -9 -7.88 0 0 0 1 30 -8 -6 0 0 0 1 30 -4 -5.5 0 0 0 1 30 -19 0 8 0 8 1 30 -10 -7.5 0 0 0 1 30 -4 -6 0 0 0 1 30 -8 -14 0 0 0 1 30 -8 0 3 0 3 1 30 -12 -21 0 0 0 1 30 -4 -2 0 0 0 1 30 -4 -5 0 0 0 1 30 -6 -7.5 0 0 0 1 30 -12 -12 0 0 0 1 30 -8 -5 0 0 0 1 30 -26 0 12 1 0 1 30 -8 -10 0 0 0 1 30 -5 -5.63 0 0 0 1 30 \ No newline at end of file diff --git a/inst/extdata/ra_exampleData.txt b/inst/extdata/ra_exampleData.txt deleted file mode 100644 index dd6e3536..00000000 --- a/inst/extdata/ra_exampleData.txt +++ /dev/null @@ -1,701 +0,0 @@ -gain loss cert gamble outcome cond subjID -9 -6.75 0 1 -6.75 0 2 -6 -6.75 0 0 0 0 2 -6 -3 0 1 6 0 2 -2 -1.5 0 0 0 0 2 -4 -3 0 0 0 0 2 -5 -6.88 0 0 0 0 2 -12 -9 0 1 12 0 2 -4 -5 0 0 0 0 2 -5 -7.5 0 0 0 0 2 -4 -4 0 1 -4 0 2 -9 -5.63 0 1 -5.63 0 2 -9 -14.63 0 0 0 0 2 -5 -9.38 0 0 0 0 2 -6 -4.5 0 1 6 0 2 -8 -7 0 0 0 0 2 -10 -16.25 0 0 0 0 2 -10 -17.5 0 0 0 0 2 -9 -16.88 0 0 0 0 2 -8 -5 0 1 8 0 2 -6 -1.5 0 1 6 0 2 -12 -18 0 0 0 0 2 -5 -6.25 0 0 0 0 2 -8 -4 0 1 8 0 2 -9 -15.75 0 0 0 0 2 -9 -13.5 0 0 0 0 2 -5 -8.13 0 0 0 0 2 -2 0 1 1 0 0 2 -2 -3.75 0 0 0 0 2 -4 -6.5 0 0 0 0 2 -10 -5 0 1 -5 0 2 -12 -22.5 0 0 0 0 2 -2 -1 0 1 2 0 2 -13 0 6 1 13 0 2 -5 -2.5 0 0 0 0 2 -2 -0.5 0 1 2 0 2 -2 -3.25 0 1 -3.25 0 2 -30 0 12 1 0 0 2 -8 -8 0 1 8 0 2 -4 -5.5 0 0 0 0 2 -23 0 10 1 0 0 2 -4 -3.5 0 0 0 0 2 -5 0 2 1 5 0 2 -8 0 3 1 0 0 2 -9 -10.13 0 0 0 0 2 -8 -16 0 0 0 0 2 -12 -24 0 0 0 0 2 -9 -3.38 0 1 -3.38 0 2 -6 -5.25 0 1 6 0 2 -2 -4 0 0 0 0 2 -4 -1 0 1 -1 0 2 -6 -11.25 0 0 0 0 2 -5 -4.38 0 1 -4.38 0 2 -6 -2.25 0 1 6 0 2 -12 -10.5 0 1 12 0 2 -9 -18 0 0 0 0 2 -10 -20 0 0 0 0 2 -4 -4.5 0 0 0 0 2 -9 -2.25 0 1 -2.25 0 2 -4 -6 0 0 0 0 2 -8 -10 0 1 -10 0 2 -5 -5 0 1 -5 0 2 -5 -8.75 0 0 0 0 2 -8 -6 0 1 -6 0 2 -10 -13.75 0 0 0 0 2 -2 -2.5 0 0 0 0 2 -8 -11 0 1 -11 0 2 -4 -2 0 1 4 0 2 -10 -7.5 0 1 -7.5 0 2 -22 0 10 1 22 0 2 -25 0 10 1 0 0 2 -6 -9.75 0 0 0 0 2 -12 0 5 1 12 0 2 -4 -2.5 0 1 -2.5 0 2 -8 -3 0 1 8 0 2 -10 -11.25 0 1 -11.25 0 2 -5 -10 0 1 5 0 2 -10 -15 0 0 0 0 2 -2 -3.5 0 0 0 0 2 -12 0 4 1 12 0 2 -13 0 5 0 5 0 2 -5 -3.75 0 1 5 0 2 -26 0 12 0 12 0 2 -5 -5.63 0 0 0 0 2 -8 -2 0 1 -2 0 2 -2 -3 0 0 0 0 2 -6 -9 0 0 0 0 2 -9 -7.88 0 0 0 0 2 -8 -14 0 0 0 0 2 -28 0 13 1 28 0 2 -9 -12.38 0 0 0 0 2 -8 -15 0 1 -15 0 2 -10 -2.5 0 1 -2.5 0 2 -4 0 2 1 4 0 2 -12 -6 0 1 -6 0 2 -12 -16.5 0 1 -16.5 0 2 -4 -7.5 0 0 0 0 2 -10 -8.75 0 1 -8.75 0 2 -10 -18.75 0 1 10 0 2 -26 0 10 1 0 0 2 -12 -21 0 1 12 0 2 -2 -0.75 0 1 -0.75 0 2 -9 -9 0 1 -9 0 2 -10 -6.25 0 1 10 0 2 -8 -12 0 1 -12 0 2 -3 0 1 1 0 0 2 -5 -1.88 0 1 5 0 2 -6 -7.5 0 1 -7.5 0 2 -12 -13.5 0 1 12 0 2 -4 -7 0 0 0 0 2 -6 -8.25 0 1 -8.25 0 2 -6 -12 0 0 0 0 2 -6 -10.5 0 0 0 0 2 -4 -8 0 0 0 0 2 -6 -6 0 1 -6 0 2 -12 0 6 1 12 0 2 -12 -19.5 0 1 12 0 2 -19 0 8 1 19 0 2 -12 -15 0 0 0 0 2 -2 -1.75 0 0 0 0 2 -6 -3.75 0 0 0 0 2 -2 -1.25 0 0 0 0 2 -5 -1.25 0 1 -1.25 0 2 -4 -1.5 0 1 4 0 2 -8 -13 0 0 0 0 2 -12 -7.5 0 1 -7.5 0 2 -12 -3 0 1 -3 0 2 -2 -2.75 0 0 0 0 2 -7 0 3 1 7 0 2 -25 0 9 1 25 0 2 -2 -2 0 0 0 0 2 -12 -4.5 0 1 -4.5 0 2 -12 -12 0 1 12 0 2 -5 -3.13 0 1 5 0 2 -9 -11.25 0 0 0 0 2 -8 -9 0 1 -9 0 2 -2 -2.25 0 0 0 0 2 -9 -4.5 0 1 -4.5 0 2 -10 -3.75 0 1 10 0 2 -10 -10 0 1 10 0 2 -10 -12.5 0 0 0 0 2 -2 -2.5 0 0 0 0 3 -5 -5.63 0 0 0 0 3 -6 -7.5 0 0 0 0 3 -26 0 10 1 0 0 3 -9 -4.5 0 0 0 0 3 -2 -1.25 0 0 0 0 3 -8 -3 0 0 0 0 3 -25 0 9 0 9 0 3 -4 -4.5 0 0 0 0 3 -5 -10 0 0 0 0 3 -6 -9 0 0 0 0 3 -10 -6.25 0 0 0 0 3 -4 -4 0 0 0 0 3 -12 -3 0 0 0 0 3 -5 -5 0 0 0 0 3 -12 0 5 0 5 0 3 -6 -9.75 0 0 0 0 3 -19 0 8 0 8 0 3 -4 -7.5 0 0 0 0 3 -12 -9 0 0 0 0 3 -4 -6.5 0 0 0 0 3 -9 -5.63 0 0 0 0 3 -9 -18 0 0 0 0 3 -10 -11.25 0 0 0 0 3 -10 -13.75 0 0 0 0 3 -6 -12 0 0 0 0 3 -10 -12.5 0 0 0 0 3 -4 -7 0 0 0 0 3 -10 -7.5 0 0 0 0 3 -4 -8 0 0 0 0 3 -8 -11 0 0 0 0 3 -12 0 4 1 12 0 3 -9 -3.38 0 0 0 0 3 -10 -18.75 0 0 0 0 3 -2 -3.5 0 0 0 0 3 -2 -1 0 0 0 0 3 -2 -3.25 0 0 0 0 3 -2 0 1 0 1 0 3 -7 0 3 0 3 0 3 -8 0 3 0 3 0 3 -12 -6 0 0 0 0 3 -2 -0.5 0 1 2 0 3 -9 -7.88 0 0 0 0 3 -8 -15 0 0 0 0 3 -2 -1.5 0 0 0 0 3 -12 -22.5 0 0 0 0 3 -8 -7 0 0 0 0 3 -4 -5.5 0 0 0 0 3 -10 -8.75 0 0 0 0 3 -8 -9 0 0 0 0 3 -2 -4 0 0 0 0 3 -4 0 2 1 4 0 3 -8 -8 0 0 0 0 3 -9 -13.5 0 0 0 0 3 -9 -9 0 0 0 0 3 -6 -3.75 0 0 0 0 3 -13 0 6 0 6 0 3 -5 -1.88 0 1 5 0 3 -6 -6 0 0 0 0 3 -5 -6.88 0 0 0 0 3 -8 -16 0 0 0 0 3 -12 -7.5 0 0 0 0 3 -5 -1.25 0 1 -1.25 0 3 -9 -14.63 0 0 0 0 3 -8 -4 0 0 0 0 3 -10 -17.5 0 0 0 0 3 -5 -3.75 0 0 0 0 3 -6 -10.5 0 0 0 0 3 -13 0 5 1 13 0 3 -10 -16.25 0 0 0 0 3 -5 -7.5 0 0 0 0 3 -2 -1.75 0 0 0 0 3 -5 -9.38 0 0 0 0 3 -2 -2.75 0 0 0 0 3 -2 -0.75 0 1 -0.75 0 3 -5 -8.13 0 0 0 0 3 -9 -11.25 0 0 0 0 3 -8 -13 0 0 0 0 3 -9 -16.88 0 0 0 0 3 -2 -2 0 0 0 0 3 -12 -18 0 0 0 0 3 -8 -2 0 1 -2 0 3 -2 -3 0 0 0 0 3 -6 -4.5 0 0 0 0 3 -5 0 2 1 5 0 3 -12 -19.5 0 0 0 0 3 -9 -15.75 0 0 0 0 3 -8 -6 0 0 0 0 3 -10 -2.5 0 1 -2.5 0 3 -9 -6.75 0 0 0 0 3 -6 -6.75 0 0 0 0 3 -2 -3.75 0 0 0 0 3 -10 -5 0 0 0 0 3 -2 -2.25 0 0 0 0 3 -26 0 12 0 12 0 3 -12 -13.5 0 0 0 0 3 -8 -5 0 0 0 0 3 -6 -3 0 0 0 0 3 -10 -3.75 0 0 0 0 3 -12 -10.5 0 0 0 0 3 -4 -5 0 0 0 0 3 -9 -2.25 0 0 0 0 3 -4 -3 0 0 0 0 3 -9 -10.13 0 0 0 0 3 -28 0 13 0 13 0 3 -22 0 10 0 10 0 3 -10 -10 0 0 0 0 3 -4 -1 0 0 0 0 3 -4 -2.5 0 0 0 0 3 -12 -24 0 0 0 0 3 -8 -12 0 0 0 0 3 -3 0 1 1 0 0 3 -9 -12.38 0 0 0 0 3 -23 0 10 0 10 0 3 -4 -3.5 0 0 0 0 3 -4 -1.5 0 0 0 0 3 -8 -10 0 0 0 0 3 -8 -14 0 0 0 0 3 -4 -6 0 0 0 0 3 -25 0 10 0 10 0 3 -12 -16.5 0 0 0 0 3 -12 -12 0 0 0 0 3 -5 -2.5 0 0 0 0 3 -5 -8.75 0 0 0 0 3 -12 -4.5 0 0 0 0 3 -12 -15 0 0 0 0 3 -5 -3.13 0 0 0 0 3 -12 -21 0 0 0 0 3 -5 -4.38 0 0 0 0 3 -6 -11.25 0 0 0 0 3 -30 0 12 0 12 0 3 -6 -1.5 0 1 6 0 3 -12 0 6 0 6 0 3 -4 -2 0 0 0 0 3 -10 -15 0 0 0 0 3 -6 -2.25 0 0 0 0 3 -10 -20 0 0 0 0 3 -6 -5.25 0 0 0 0 3 -5 -6.25 0 0 0 0 3 -6 -8.25 0 0 0 0 3 -4 -4.5 0 1 -4.5 0 4 -10 -12.5 0 0 0 0 4 -26 0 12 1 26 0 4 -6 -7.5 0 1 -7.5 0 4 -4 -6.5 0 0 0 0 4 -12 -4.5 0 1 -4.5 0 4 -5 -2.5 0 1 5 0 4 -6 -12 0 0 0 0 4 -9 -14.63 0 1 9 0 4 -6 -6 0 0 0 0 4 -22 0 10 1 22 0 4 -2 -1 0 1 2 0 4 -8 -3 0 1 8 0 4 -12 -9 0 0 0 0 4 -5 -3.75 0 1 5 0 4 -6 -3 0 1 6 0 4 -4 0 2 0 2 0 4 -28 0 13 1 28 0 4 -12 -15 0 0 0 0 4 -9 -11.25 0 0 0 0 4 -12 -10.5 0 1 12 0 4 -5 -1.88 0 1 5 0 4 -2 -2.75 0 0 0 0 4 -4 -7 0 0 0 0 4 -8 -4 0 1 8 0 4 -2 0 1 1 0 0 4 -2 -3.5 0 0 0 0 4 -2 -1.75 0 1 2 0 4 -5 -5 0 0 0 0 4 -12 -12 0 1 12 0 4 -12 0 6 1 12 0 4 -6 -4.5 0 0 0 0 4 -30 0 12 0 12 0 4 -12 -16.5 0 0 0 0 4 -6 -9.75 0 1 6 0 4 -12 -22.5 0 0 0 0 4 -6 -9 0 1 -9 0 4 -5 -3.13 0 0 0 0 4 -5 -9.38 0 0 0 0 4 -12 -7.5 0 1 -7.5 0 4 -5 0 2 1 5 0 4 -10 -15 0 0 0 0 4 -12 -3 0 1 -3 0 4 -13 0 6 0 6 0 4 -9 -16.88 0 0 0 0 4 -6 -11.25 0 0 0 0 4 -8 -5 0 1 8 0 4 -8 -14 0 0 0 0 4 -12 -24 0 1 -24 0 4 -12 0 5 1 12 0 4 -9 -13.5 0 0 0 0 4 -6 -1.5 0 1 6 0 4 -2 -3 0 0 0 0 4 -10 -2.5 0 1 -2.5 0 4 -2 -0.75 0 0 0 0 4 -6 -10.5 0 0 0 0 4 -2 -0.5 0 1 2 0 4 -10 -10 0 0 0 0 4 -8 -10 0 1 -10 0 4 -9 -12.38 0 0 0 0 4 -4 -6 0 0 0 0 4 -6 -2.25 0 1 6 0 4 -9 -15.75 0 0 0 0 4 -12 -13.5 0 0 0 0 4 -8 -6 0 0 0 0 4 -10 -18.75 0 0 0 0 4 -4 -2 0 0 0 0 4 -5 -1.25 0 1 -1.25 0 4 -6 -5.25 0 0 0 0 4 -4 -8 0 1 4 0 4 -25 0 9 1 25 0 4 -2 -3.25 0 0 0 0 4 -10 -11.25 0 1 -11.25 0 4 -4 -7.5 0 0 0 0 4 -9 -5.63 0 1 -5.63 0 4 -6 -6.75 0 0 0 0 4 -8 -2 0 1 -2 0 4 -5 -6.25 0 0 0 0 4 -23 0 10 0 10 0 4 -8 -13 0 0 0 0 4 -10 -13.75 0 0 0 0 4 -5 -10 0 1 5 0 4 -12 0 4 1 12 0 4 -2 -2.5 0 0 0 0 4 -19 0 8 1 19 0 4 -4 -4 0 0 0 0 4 -4 -1 0 1 -1 0 4 -4 -2.5 0 1 -2.5 0 4 -5 -8.13 0 0 0 0 4 -10 -3.75 0 1 10 0 4 -5 -8.75 0 0 0 0 4 -10 -7.5 0 1 -7.5 0 4 -10 -5 0 1 -5 0 4 -10 -20 0 0 0 0 4 -13 0 5 0 5 0 4 -8 -9 0 0 0 0 4 -8 -12 0 0 0 0 4 -10 -16.25 0 0 0 0 4 -5 -6.88 0 1 5 0 4 -4 -5.5 0 0 0 0 4 -5 -7.5 0 0 0 0 4 -9 -10.13 0 0 0 0 4 -6 -8.25 0 0 0 0 4 -26 0 10 0 10 0 4 -4 -5 0 0 0 0 4 -2 -2.25 0 1 2 0 4 -6 -3.75 0 1 -3.75 0 4 -8 -8 0 1 8 0 4 -9 -6.75 0 0 0 0 4 -8 -15 0 1 -15 0 4 -12 -6 0 1 -6 0 4 -25 0 10 0 10 0 4 -12 -19.5 0 0 0 0 4 -9 -7.88 0 0 0 0 4 -4 -1.5 0 1 4 0 4 -8 -7 0 0 0 0 4 -12 -18 0 1 -18 0 4 -2 -2 0 1 2 0 4 -9 -18 0 0 0 0 4 -2 -1.25 0 0 0 0 4 -8 -16 0 0 0 0 4 -5 -4.38 0 0 0 0 4 -2 -4 0 0 0 0 4 -5 -5.63 0 0 0 0 4 -8 0 3 1 0 0 4 -10 -17.5 0 0 0 0 4 -8 -11 0 0 0 0 4 -2 -1.5 0 1 2 0 4 -4 -3.5 0 0 0 0 4 -2 -3.75 0 0 0 0 4 -3 0 1 1 0 0 4 -12 -21 0 0 0 0 4 -10 -8.75 0 0 0 0 4 -9 -9 0 1 -9 0 4 -4 -3 0 0 0 0 4 -7 0 3 1 7 0 4 -9 -3.38 0 1 -3.38 0 4 -9 -2.25 0 1 -2.25 0 4 -10 -6.25 0 0 0 0 4 -9 -4.5 0 1 -4.5 0 4 -9 -6.75 0 1 -6.75 0 6 -6 -6.75 0 1 -6.75 0 6 -6 -3 0 1 6 0 6 -2 -1.5 0 1 2 0 6 -4 -3 0 1 4 0 6 -5 -6.88 0 0 0 0 6 -12 -9 0 0 0 0 6 -4 -5 0 0 0 0 6 -5 -7.5 0 0 0 0 6 -4 -4 0 1 -4 0 6 -9 -5.63 0 1 -5.63 0 6 -9 -14.63 0 0 0 0 6 -5 -9.38 0 0 0 0 6 -6 -4.5 0 1 6 0 6 -8 -7 0 1 -7 0 6 -10 -16.25 0 0 0 0 6 -10 -17.5 0 0 0 0 6 -9 -16.88 0 0 0 0 6 -8 -5 0 1 8 0 6 -6 -1.5 0 1 6 0 6 -12 -18 0 0 0 0 6 -5 -6.25 0 0 0 0 6 -8 -4 0 1 8 0 6 -9 -15.75 0 0 0 0 6 -9 -13.5 0 0 0 0 6 -5 -8.13 0 0 0 0 6 -2 0 1 1 0 0 6 -2 -3.75 0 1 -3.75 0 6 -4 -6.5 0 0 0 0 6 -10 -5 0 1 -5 0 6 -12 -22.5 0 0 0 0 6 -2 -1 0 1 2 0 6 -13 0 6 0 6 0 6 -5 -2.5 0 1 5 0 6 -2 -0.5 0 1 2 0 6 -2 -3.25 0 0 0 0 6 -30 0 12 1 0 0 6 -8 -8 0 0 0 0 6 -4 -5.5 0 0 0 0 6 -23 0 10 1 0 0 6 -4 -3.5 0 1 4 0 6 -5 0 2 1 5 0 6 -8 0 3 1 0 0 6 -9 -10.13 0 0 0 0 6 -8 -16 0 0 0 0 6 -12 -24 0 0 0 0 6 -9 -3.38 0 1 -3.38 0 6 -6 -5.25 0 0 0 0 6 -2 -4 0 0 0 0 6 -4 -1 0 1 -1 0 6 -6 -11.25 0 0 0 0 6 -5 -4.38 0 1 -4.38 0 6 -6 -2.25 0 1 6 0 6 -12 -10.5 0 0 0 0 6 -9 -18 0 0 0 0 6 -10 -20 0 0 0 0 6 -4 -4.5 0 1 -4.5 0 6 -9 -2.25 0 1 -2.25 0 6 -4 -6 0 0 0 0 6 -8 -10 0 0 0 0 6 -5 -5 0 1 -5 0 6 -5 -8.75 0 0 0 0 6 -8 -6 0 1 -6 0 6 -10 -13.75 0 0 0 0 6 -2 -2.5 0 1 2 0 6 -8 -11 0 0 0 0 6 -4 -2 0 1 4 0 6 -10 -7.5 0 1 -7.5 0 6 -22 0 10 0 10 0 6 -25 0 10 1 0 0 6 -6 -9.75 0 0 0 0 6 -12 0 5 0 5 0 6 -4 -2.5 0 1 -2.5 0 6 -8 -3 0 1 8 0 6 -10 -11.25 0 0 0 0 6 -5 -10 0 0 0 0 6 -10 -15 0 0 0 0 6 -2 -3.5 0 1 -3.5 0 6 -12 0 4 1 12 0 6 -13 0 5 0 5 0 6 -5 -3.75 0 1 5 0 6 -26 0 12 1 26 0 6 -5 -5.63 0 1 -5.63 0 6 -8 -2 0 1 -2 0 6 -2 -3 0 1 -3 0 6 -6 -9 0 0 0 0 6 -9 -7.88 0 1 -7.88 0 6 -8 -14 0 0 0 0 6 -28 0 13 0 13 0 6 -9 -12.38 0 0 0 0 6 -8 -15 0 0 0 0 6 -10 -2.5 0 1 -2.5 0 6 -4 0 2 1 4 0 6 -12 -6 0 1 -6 0 6 -12 -16.5 0 0 0 0 6 -4 -7.5 0 0 0 0 6 -10 -8.75 0 1 -8.75 0 6 -10 -18.75 0 0 0 0 6 -26 0 10 1 0 0 6 -12 -21 0 0 0 0 6 -2 -0.75 0 1 -0.75 0 6 -9 -9 0 1 -9 0 6 -10 -6.25 0 1 10 0 6 -8 -12 0 0 0 0 6 -3 0 1 1 0 0 6 -5 -1.88 0 1 5 0 6 -6 -7.5 0 0 0 0 6 -12 -13.5 0 1 12 0 6 -4 -7 0 0 0 0 6 -6 -8.25 0 0 0 0 6 -6 -12 0 0 0 0 6 -6 -10.5 0 0 0 0 6 -4 -8 0 0 0 0 6 -6 -6 0 1 -6 0 6 -12 0 6 0 6 0 6 -12 -19.5 0 0 0 0 6 -19 0 8 1 19 0 6 -12 -15 0 0 0 0 6 -2 -1.75 0 1 2 0 6 -6 -3.75 0 1 -3.75 0 6 -2 -1.25 0 1 2 0 6 -5 -1.25 0 1 -1.25 0 6 -4 -1.5 0 1 4 0 6 -8 -13 0 0 0 0 6 -12 -7.5 0 1 -7.5 0 6 -12 -3 0 1 -3 0 6 -2 -2.75 0 1 2 0 6 -7 0 3 1 7 0 6 -25 0 9 1 25 0 6 -2 -2 0 1 2 0 6 -12 -4.5 0 1 -4.5 0 6 -12 -12 0 0 0 0 6 -5 -3.13 0 1 5 0 6 -9 -11.25 0 0 0 0 6 -8 -9 0 0 0 0 6 -2 -2.25 0 1 2 0 6 -9 -4.5 0 1 -4.5 0 6 -10 -3.75 0 1 10 0 6 -10 -10 0 0 0 0 6 -10 -12.5 0 0 0 0 6 -2 -2.5 0 1 2 0 7 -5 -5.63 0 0 0 0 7 -6 -7.5 0 0 0 0 7 -26 0 10 1 0 0 7 -9 -4.5 0 1 -4.5 0 7 -2 -1.25 0 1 2 0 7 -8 -3 0 1 8 0 7 -25 0 9 1 25 0 7 -4 -4.5 0 1 -4.5 0 7 -5 -10 0 0 0 0 7 -6 -9 0 0 0 0 7 -10 -6.25 0 0 0 0 7 -4 -4 0 1 -4 0 7 -12 -3 0 1 -3 0 7 -5 -5 0 0 0 0 7 -12 0 5 1 12 0 7 -6 -9.75 0 0 0 0 7 -19 0 8 1 19 0 7 -4 -7.5 0 0 0 0 7 -12 -9 0 0 0 0 7 -4 -6.5 0 0 0 0 7 -9 -5.63 0 1 -5.63 0 7 -9 -18 0 0 0 0 7 -10 -11.25 0 0 0 0 7 -10 -13.75 0 0 0 0 7 -6 -12 0 0 0 0 7 -10 -12.5 0 0 0 0 7 -4 -7 0 0 0 0 7 -10 -7.5 0 0 0 0 7 -4 -8 0 0 0 0 7 -8 -11 0 0 0 0 7 -12 0 4 1 12 0 7 -9 -3.38 0 1 -3.38 0 7 -10 -18.75 0 0 0 0 7 -2 -3.5 0 0 0 0 7 -2 -1 0 1 2 0 7 -2 -3.25 0 0 0 0 7 -2 0 1 1 0 0 7 -7 0 3 1 7 0 7 -8 0 3 1 0 0 7 -12 -6 0 1 -6 0 7 -2 -0.5 0 1 2 0 7 -9 -7.88 0 0 0 0 7 -8 -15 0 0 0 0 7 -2 -1.5 0 1 2 0 7 -12 -22.5 0 0 0 0 7 -8 -7 0 1 -7 0 7 -4 -5.5 0 0 0 0 7 -10 -8.75 0 0 0 0 7 -8 -9 0 0 0 0 7 -2 -4 0 0 0 0 7 -4 0 2 1 4 0 7 -8 -8 0 0 0 0 7 -9 -13.5 0 0 0 0 7 -9 -9 0 0 0 0 7 -6 -3.75 0 1 -3.75 0 7 -13 0 6 0 6 0 7 -5 -1.88 0 1 5 0 7 -6 -6 0 0 0 0 7 -5 -6.88 0 0 0 0 7 -8 -16 0 0 0 0 7 -12 -7.5 0 1 -7.5 0 7 -5 -1.25 0 1 -1.25 0 7 -9 -14.63 0 0 0 0 7 -8 -4 0 1 8 0 7 -10 -17.5 0 0 0 0 7 -5 -3.75 0 1 5 0 7 -6 -10.5 0 0 0 0 7 -13 0 5 1 13 0 7 -10 -16.25 0 0 0 0 7 -5 -7.5 0 0 0 0 7 -2 -1.75 0 1 2 0 7 -5 -9.38 0 0 0 0 7 -2 -2.75 0 0 0 0 7 -2 -0.75 0 1 -0.75 0 7 -5 -8.13 0 0 0 0 7 -9 -11.25 0 0 0 0 7 -8 -13 0 0 0 0 7 -9 -16.88 0 0 0 0 7 -2 -2 0 0 0 0 7 -12 -18 0 0 0 0 7 -8 -2 0 1 -2 0 7 -2 -3 0 0 0 0 7 -6 -4.5 0 1 6 0 7 -5 0 2 1 5 0 7 -12 -19.5 0 0 0 0 7 -9 -15.75 0 0 0 0 7 -8 -6 0 0 0 0 7 -10 -2.5 0 1 -2.5 0 7 -9 -6.75 0 0 0 0 7 -6 -6.75 0 0 0 0 7 -2 -3.75 0 0 0 0 7 -10 -5 0 1 -5 0 7 -2 -2.25 0 0 0 0 7 -26 0 12 1 26 0 7 -12 -13.5 0 0 0 0 7 -8 -5 0 0 0 0 7 -6 -3 0 1 6 0 7 -10 -3.75 0 1 10 0 7 -12 -10.5 0 0 0 0 7 -4 -5 0 0 0 0 7 -9 -2.25 0 1 -2.25 0 7 -4 -3 0 0 0 0 7 -9 -10.13 0 0 0 0 7 -28 0 13 0 13 0 7 -22 0 10 1 22 0 7 -10 -10 0 0 0 0 7 -4 -1 0 1 -1 0 7 -4 -2.5 0 0 0 0 7 -12 -24 0 0 0 0 7 -8 -12 0 0 0 0 7 -3 0 1 1 0 0 7 -9 -12.38 0 0 0 0 7 -23 0 10 1 0 0 7 -4 -3.5 0 0 0 0 7 -4 -1.5 0 1 4 0 7 -8 -10 0 0 0 0 7 -8 -14 0 0 0 0 7 -4 -6 0 0 0 0 7 -25 0 10 1 0 0 7 -12 -16.5 0 0 0 0 7 -12 -12 0 0 0 0 7 -5 -2.5 0 1 5 0 7 -5 -8.75 0 0 0 0 7 -12 -4.5 0 1 -4.5 0 7 -12 -15 0 0 0 0 7 -5 -3.13 0 0 0 0 7 -12 -21 0 1 12 0 7 -5 -4.38 0 0 0 0 7 -6 -11.25 0 0 0 0 7 -30 0 12 1 0 0 7 -6 -1.5 0 1 6 0 7 -12 0 6 1 12 0 7 -4 -2 0 1 4 0 7 -10 -15 0 0 0 0 7 -6 -2.25 0 1 6 0 7 -10 -20 0 0 0 0 7 -6 -5.25 0 0 0 0 7 -5 -6.25 0 0 0 0 7 -6 -8.25 0 0 0 0 7 \ No newline at end of file diff --git a/inst/extdata/rdt_exampleData.txt b/inst/extdata/rdt_exampleData.txt deleted file mode 100644 index 79d99830..00000000 --- a/inst/extdata/rdt_exampleData.txt +++ /dev/null @@ -1,901 +0,0 @@ -subjID trial_number gamble_cha RT cert gain loss type_cha trial_payoff outcome happy RT_happy gamble type -1 1 safe 1935 40 0 88 loss 0 -40 0 689 0 -1 -1 2 safe 5581 0 103 198 mixed 0 0 0 689 0 0 -1 3 safe 5871 56 0 116 loss 0 -56 0 689 0 -1 -1 4 safe 3932 0 61 124 mixed 0 0 -1 3353 0 0 -1 5 risky 3838 0 60 48 mixed 0 60 -1 3353 1 0 -1 6 risky 1228 0 304 302 mixed 0 -302 -1 3353 1 0 -1 7 safe 2443 76 0 255 loss 0 -76 -1 1064 0 -1 -1 8 safe 1024 96 197 0 gain 0 96 -1 1064 0 1 -1 9 safe 1107 60 190 0 gain 0 60 1 692 0 1 -1 10 safe 1546 80 254 0 gain 0 80 1 692 0 1 -1 11 safe 3902 37 0 70 loss 0 -37 1 944 0 -1 -1 12 risky 1349 0 158 79 mixed 0 158 1 944 1 0 -1 13 safe 794 116 0 598 loss 0 -116 2 811 0 -1 -1 14 safe 1330 60 0 144 loss 0 -60 2 811 0 -1 -1 15 risky 1210 81 228 0 gain 0 0 0 411 1 1 -1 16 risky 1138 0 303 247 mixed 0 303 0 411 1 0 -1 17 risky 996 81 148 0 gain 0 0 1 600 1 1 -1 18 risky 3145 0 101 50 mixed 0 -50 1 600 1 0 -1 19 risky 138 82 335 0 gain 0 0 1 600 1 1 -1 20 risky 3909 104 0 182 loss 0 0 -1 1103 1 -1 -1 21 safe 1575 0 301 449 mixed 0 0 -1 1103 0 0 -1 22 safe 2616 36 74 0 gain 0 36 1 756 0 1 -1 23 risky 2635 59 0 106 loss 0 0 1 756 1 -1 -1 24 risky 3355 0 102 41 mixed 0 -41 1 756 1 0 -1 25 safe 1038 101 0 419 loss 0 -101 -1 955 0 -1 -1 26 safe 893 83 0 284 loss 0 -83 -1 955 0 -1 -1 27 risky 636 39 197 0 gain 0 197 -1 955 1 1 -1 28 safe 139 100 0 503 loss 0 -100 1 629 0 -1 -1 29 risky 333 103 357 0 gain 0 0 1 629 1 1 -1 30 safe 636 117 220 0 gain 0 117 -1 611 0 1 -1 31 risky 1001 64 0 101 loss 0 0 -1 611 1 -1 -1 32 safe 2614 99 182 0 gain 0 99 1 503 0 1 -1 33 safe 596 97 0 281 loss 0 -97 1 503 0 -1 -1 34 risky 96 77 401 0 gain 0 0 0 589 1 1 -1 35 safe 215 98 0 222 loss 0 -98 0 589 0 -1 -1 36 risky 920 0 58 15 mixed 0 -15 -1 490 1 0 -1 37 safe 537 40 0 143 loss 0 -40 -1 490 0 -1 -1 38 risky 1164 0 223 113 mixed 0 -113 -1 939 1 0 -1 39 risky 3247 124 268 0 gain 0 268 -1 939 1 1 -1 40 risky 42 0 63 16 mixed 0 -16 -1 939 1 0 -1 41 risky 131 96 225 0 gain 0 0 -1 667 1 1 -1 42 risky 920 0 223 149 mixed 0 -149 -1 667 1 0 -1 43 risky 612 0 104 28 mixed 0 -28 -1 667 1 0 -1 44 risky 3925 77 0 133 loss 0 -133 -2 639 1 -1 -1 45 safe 1912 64 0 136 loss 0 -64 -2 639 0 -1 -1 46 safe 162 120 0 433 loss 0 -120 -2 639 0 -1 -1 47 risky 369 104 319 0 gain 0 0 -2 641 1 1 -1 48 risky 1531 43 0 77 loss 0 0 -2 641 1 -1 -1 49 risky 640 0 61 40 mixed 0 -40 -1 772 1 0 -1 50 risky 635 0 160 131 mixed 0 -131 -1 772 1 0 -1 51 risky 41 58 151 0 gain 0 151 -1 772 1 1 -1 52 risky 305 0 304 201 mixed 0 304 -2 684 1 0 -1 53 safe 2651 78 0 178 loss 0 -78 -2 684 0 -1 -1 54 risky 239 0 304 153 mixed 0 -153 0 248 1 0 -1 55 risky 805 116 0 200 loss 0 0 0 248 1 -1 -1 56 risky 222 0 59 34 mixed 0 -34 0 463 1 0 -1 57 safe 338 80 0 336 loss 0 -80 0 463 0 -1 -1 58 risky 248 62 252 0 gain 0 0 0 1483 1 1 -1 59 safe 216 80 0 162 loss 0 -80 0 1483 0 -1 -1 60 risky 538 0 97 19 mixed 0 97 0 1129 1 0 -1 61 safe 351 0 102 120 mixed 0 0 0 1129 0 0 -1 62 risky 2484 119 381 0 gain 0 381 0 262 1 1 -1 63 safe 217 123 0 383 loss 0 -123 0 262 0 -1 -1 64 risky 18 117 298 0 gain 0 298 0 1059 1 1 -1 65 safe 71 0 98 154 mixed 0 0 0 1059 0 0 -1 66 safe 373 83 0 221 loss 0 -83 2 753 0 -1 -1 67 risky 619 62 217 0 gain 0 217 2 753 1 1 -1 68 safe 1612 0 61 58 mixed 0 0 2 753 0 0 -1 69 risky 34 37 84 0 gain 0 0 1 874 1 1 -1 70 safe 134 100 0 252 loss 0 -100 1 874 0 -1 -1 71 safe 714 99 169 0 gain 0 99 1 874 0 1 -1 72 safe 828 0 217 179 mixed 0 0 0 3052 0 0 -1 73 safe 88 0 297 364 mixed 0 0 0 3052 0 0 -1 74 risky 558 41 171 0 gain 0 171 0 3052 1 1 -1 75 risky 364 44 79 0 gain 0 0 0 865 1 1 -1 76 risky 501 123 200 0 gain 0 200 0 865 1 1 -1 77 risky 43 82 181 0 gain 0 181 0 552 1 1 -1 78 risky 120 0 61 24 mixed 0 61 0 552 1 0 -1 79 safe 2328 102 0 203 loss 0 -102 2 583 0 -1 -1 80 risky 263 0 303 92 mixed 0 -92 2 583 1 0 -1 81 safe 358 37 0 199 loss 0 -37 -1 611 0 -1 -1 82 safe 1306 82 0 144 loss 0 -82 -1 611 0 -1 -1 83 safe 101 0 218 438 mixed 0 0 0 824 0 0 -1 84 risky 598 119 430 0 gain 0 430 0 824 1 1 -1 85 risky 541 39 110 0 gain 0 0 0 279 1 1 -1 86 risky 6345 0 103 62 mixed 0 -62 0 279 1 0 -1 87 risky 208 44 143 0 gain 0 0 0 279 1 1 -1 88 safe 142 81 0 398 loss 0 -81 -1 618 0 -1 -1 89 safe 105 63 0 215 loss 0 -63 -1 618 0 -1 -1 90 safe 1436 0 218 263 mixed 0 0 -1 618 0 0 -1 91 safe 1136 43 0 108 loss 0 -43 -1 579 0 -1 -1 92 safe 229 79 0 202 loss 0 -79 -1 579 0 -1 -1 93 safe 595 97 0 318 loss 0 -97 0 1311 0 -1 -1 94 safe 5488 0 158 191 mixed 0 0 0 1311 0 0 -1 95 safe 1038 56 0 302 loss 0 -56 0 1311 0 -1 -1 96 safe 124 103 0 364 loss 0 -103 -1 1420 0 -1 -1 97 safe 788 117 0 293 loss 0 -117 -1 1420 0 -1 -1 98 safe 270 0 301 59 mixed 0 0 -1 1420 0 0 -1 99 risky 348 99 248 0 gain 0 248 0 476 1 1 -1 100 risky 2651 0 163 45 mixed 0 -45 0 476 1 0 -1 101 safe 84 42 0 172 loss 0 -42 -1 1537 0 -1 -1 102 safe 40 0 156 243 mixed 0 0 -1 1537 0 0 -1 103 risky 204 0 157 101 mixed 0 157 -1 1537 1 0 -1 104 risky 24 57 133 0 gain 0 133 0 5156 1 1 -1 105 safe 3897 99 417 0 gain 0 99 0 5156 0 1 -1 106 safe 3165 78 130 0 gain 0 78 0 5156 0 1 -1 107 risky 157 83 202 0 gain 0 0 1 595 1 1 -1 108 risky 628 39 123 0 gain 0 0 1 595 1 1 -1 109 safe 225 0 102 78 mixed 0 0 1 595 0 0 -1 110 safe 512 61 0 252 loss 0 -61 -2 700 0 -1 -1 111 safe 185 118 0 219 loss 0 -118 -2 700 0 -1 -1 112 risky 381 56 303 0 gain 0 303 -2 700 1 1 -1 113 risky 410 83 157 0 gain 0 157 -1 1091 1 1 -1 114 risky 1205 0 303 124 mixed 0 303 -1 1091 1 0 -1 115 risky 2050 98 0 166 loss 0 -166 -1 1091 1 -1 -1 116 risky 171 0 159 61 mixed 0 159 0 258 1 0 -1 117 safe 147 0 162 161 mixed 0 0 0 258 0 0 -1 118 safe 410 0 216 334 mixed 0 0 0 258 0 0 -1 119 safe 372 61 0 185 loss 0 -61 0 2066 0 -1 -1 120 safe 1952 62 103 0 gain 0 62 0 2066 0 1 -1 121 safe 516 117 0 503 loss 0 -117 0 2621 0 -1 -1 122 safe 479 0 296 604 mixed 0 0 0 2621 0 0 -1 123 risky 154 119 335 0 gain 0 0 -1 557 1 1 -1 124 safe 3132 0 64 92 mixed 0 0 -1 557 0 0 -1 125 risky 266 0 224 65 mixed 0 224 -1 557 1 0 -1 126 risky 4022 116 240 0 gain 1 240 0 1164 1 1 -1 127 risky 123 56 166 0 gain 0 0 0 1164 1 1 -1 128 safe 499 39 71 0 gain 0 39 0 1164 0 1 -1 129 risky 611 44 101 0 gain 0 101 -1 1045 1 1 -1 130 safe 267 0 104 97 mixed 0 0 -1 1045 0 0 -1 131 safe 423 39 0 100 loss 0 -39 -1 1045 0 -1 -1 132 risky 129 0 219 40 mixed 0 -40 -1 626 1 0 -1 133 safe 903 122 0 339 loss 0 -122 -1 626 0 -1 -1 134 safe 771 0 58 73 mixed 0 0 -1 442 0 0 -1 135 risky 1178 101 497 0 gain 0 497 -1 442 1 1 -1 136 safe 156 123 0 239 loss 0 -123 -1 442 0 -1 -1 137 risky 50 123 601 0 gain 0 0 1 826 1 1 -1 138 risky 4906 83 291 0 gain 0 291 1 826 1 1 -1 139 risky 11109 0 156 31 mixed 0 -31 1 826 1 0 -1 140 risky 795 121 504 0 gain 0 0 -1 651 1 1 -1 141 risky 715 40 0 68 loss 0 0 -1 651 1 -1 -1 142 safe 449 43 0 126 loss 0 -43 -1 651 0 -1 -1 143 risky 13105 0 222 84 mixed 0 -84 0 5028 1 0 -1 144 safe 188 0 158 318 mixed 0 0 0 5028 0 0 -1 145 risky 2599 61 111 0 gain 0 111 0 5028 1 1 -1 146 risky 546 59 124 0 gain 0 124 0 816 1 1 -1 147 safe 405 0 220 223 mixed 0 0 0 816 0 0 -1 148 safe 787 61 0 172 loss 0 -61 0 816 0 -1 -1 149 safe 742 124 0 263 loss 0 -124 -1 1037 0 -1 -1 150 risky 786 103 280 0 gain 0 280 -1 1037 1 1 -2 1 risky 923 64 113 0 gain 0 113 1 4009 1 1 -2 2 safe 854 44 0 75 loss 0 -44 1 4009 0 -1 -2 3 safe 1204 0 220 440 mixed 0 0 1 4009 0 0 -2 4 risky 207 104 416 0 gain 0 416 1 2004 1 1 -2 5 safe 328 124 340 0 gain 0 124 1 2004 0 1 -2 6 risky 521 0 298 87 mixed 0 298 2 1635 1 0 -2 7 safe 488 83 136 0 gain 0 83 2 1635 0 1 -2 8 safe 49 64 145 0 gain 0 64 2 1635 0 1 -2 9 safe 420 42 0 91 loss 0 -42 3 1663 0 -1 -2 10 risky 668 0 223 149 mixed 0 223 3 1663 1 0 -2 11 safe 124 0 223 216 mixed 0 0 3 1663 0 0 -2 12 safe 304 0 224 183 mixed 0 0 4 2299 0 0 -2 13 safe 64 0 220 332 mixed 0 0 4 2299 0 0 -2 14 safe 551 0 102 41 mixed 0 0 4 2299 0 0 -2 15 risky 253 0 161 128 mixed 0 -128 3 1977 1 0 -2 16 safe 717 82 0 143 loss 0 -82 3 1977 0 -1 -2 17 risky 263 82 397 0 gain 0 0 3 1977 1 1 -2 18 safe 35 0 97 199 mixed 0 0 2 1359 0 0 -2 19 safe 251 124 297 0 gain 0 124 2 1359 0 1 -2 20 safe 339 119 0 244 loss 0 -119 3 1376 0 -1 -2 21 safe 252 100 0 503 loss 0 -100 3 1376 0 -1 -2 22 safe 305 79 253 0 gain 0 79 3 1376 0 1 -2 23 safe 50 57 0 296 loss 0 -57 2 1831 0 -1 -2 24 risky 734 77 201 0 gain 0 0 2 1831 1 1 -2 25 safe 496 122 0 505 loss 0 -122 1 1231 0 -1 -2 26 safe 484 98 0 224 loss 0 -98 1 1231 0 -1 -2 27 risky 813 0 303 364 mixed 0 -364 1 1231 1 0 -2 28 safe 447 0 161 194 mixed 0 0 -1 923 0 0 -2 29 safe 297 98 182 0 gain 0 98 -1 923 0 1 -2 30 safe 438 101 318 0 gain 0 101 -1 923 0 1 -2 31 risky 357 62 96 0 gain 0 0 0 1046 1 1 -2 32 safe 369 0 57 124 mixed 0 0 0 1046 0 0 -2 33 risky 357 117 205 0 gain 0 205 0 585 1 1 -2 34 safe 548 0 62 73 mixed 0 0 0 585 0 0 -2 35 safe 354 44 64 0 gain 0 44 1 800 0 1 -2 36 safe 955 0 156 51 mixed 0 0 1 800 0 0 -2 37 safe 752 116 0 215 loss 0 -116 0 693 0 -1 -2 38 safe 323 40 0 67 loss 0 -40 0 693 0 -1 -2 39 safe 286 79 161 0 gain 0 79 1 1575 0 1 -2 40 safe 563 0 60 92 mixed 0 0 1 1575 0 0 -2 41 safe 380 0 301 118 mixed 0 0 1 1575 0 0 -2 42 safe 374 56 0 147 loss 0 -56 1 1684 0 -1 -2 43 safe 309 99 203 0 gain 0 99 1 1684 0 1 -2 44 safe 518 103 362 0 gain 0 103 2 947 0 1 -2 45 safe 255 0 96 51 mixed 0 0 2 947 0 0 -2 46 safe 305 0 63 64 mixed 0 0 2 947 0 0 -2 47 safe 1508 61 0 193 loss 0 -61 2 1786 0 -1 -2 48 safe 241 123 500 0 gain 0 123 2 1786 0 1 -2 49 safe 131 0 102 64 mixed 0 0 2 6084 0 0 -2 50 safe 219 103 0 417 loss 0 -103 2 6084 0 -1 -2 51 safe 502 0 219 48 mixed 0 0 2 6084 0 0 -2 52 safe 326 0 57 18 mixed 0 0 2 1639 0 0 -2 53 safe 292 59 0 117 loss 0 -59 2 1639 0 -1 -2 54 safe 684 76 333 0 gain 0 76 -1 5341 0 1 -2 55 safe 3649 117 218 0 gain 0 117 -1 5341 0 1 -2 56 safe 792 98 0 167 loss 0 -98 1 1275 0 -1 -2 57 safe 296 0 223 67 mixed 0 0 1 1275 0 0 -2 58 risky 323 121 0 380 loss 0 -380 1 1275 1 -1 -2 59 safe 667 40 0 123 loss 0 -40 -2 2709 0 -1 -2 60 safe 2437 40 78 0 gain 0 40 -2 2709 0 1 -2 61 safe 338 98 0 199 loss 0 -98 -2 2709 0 -1 -2 62 safe 264 100 0 282 loss 0 -100 1 2729 0 -1 -2 63 safe 181 0 162 320 mixed 0 0 1 2729 0 0 -2 64 safe 321 0 300 156 mixed 0 0 1 2728 0 0 -2 65 safe 1519 0 299 194 mixed 0 0 1 2728 0 0 -2 66 safe 452 100 0 312 loss 0 -100 1 2728 0 -1 -2 67 safe 215 57 304 0 gain 0 57 2 3944 0 1 -2 68 safe 224 122 0 262 loss 0 -122 2 3944 0 -1 -2 69 safe 911 43 108 0 gain 0 43 1 8538 0 1 -2 70 safe 263 99 276 0 gain 0 99 1 8538 0 1 -2 71 safe 140 0 220 261 mixed 0 0 1 1240 0 0 -2 72 safe 290 81 0 251 loss 0 -81 1 1240 0 -1 -2 73 safe 262 98 496 0 gain 0 98 0 536 0 1 -2 74 safe 452 0 299 304 mixed 0 0 0 536 0 0 -2 75 safe 3302 61 0 250 loss 0 -61 0 536 0 -1 -2 76 safe 281 40 0 199 loss 0 -40 -5 2275 0 -1 -2 77 safe 413 121 0 435 loss 0 -121 -5 2275 0 -1 -2 78 safe 5118 39 127 0 gain 0 39 -5 2275 0 1 -2 79 safe 95 123 236 0 gain 0 123 -2 2182 0 1 -2 80 safe 274 39 0 167 loss 0 -39 -2 2182 0 -1 -2 81 safe 616 62 213 0 gain 0 62 -2 2182 0 1 -2 82 safe 4258 37 0 111 loss 0 -37 3 4483 0 -1 -2 83 safe 273 118 431 0 gain 0 118 3 4483 0 1 -2 84 safe 5294 0 100 104 mixed 0 0 3 4483 0 0 -2 85 safe 264 0 161 67 mixed 0 0 1 5178 0 0 -2 86 safe 344 121 381 0 gain 0 121 1 5178 0 1 -2 87 safe 311 0 158 34 mixed 0 0 -4 970 0 0 -2 88 safe 263 78 0 179 loss 1 -78 -4 970 0 -1 -2 89 safe 298 84 284 0 gain 0 84 -3 949 0 1 -2 90 risky 39 61 0 165 loss 0 -165 -3 949 1 -1 -2 91 safe 4045 43 95 0 gain 0 43 -3 949 0 1 -2 92 safe 269 0 163 79 mixed 0 0 -3 885 0 0 -2 93 safe 41 44 72 0 gain 0 44 -3 885 0 1 -2 94 safe 3629 62 189 0 gain 0 62 -3 885 0 1 -2 95 safe 924 0 103 148 mixed 0 0 -4 1299 0 0 -2 96 safe 268 81 0 404 loss 0 -81 -4 1299 0 -1 -2 97 safe 348 0 98 86 mixed 0 0 0 1213 0 0 -2 98 safe 286 37 148 0 gain 0 37 0 1213 0 1 -2 99 safe 282 0 156 104 mixed 0 0 0 1213 0 0 -2 100 safe 1223 38 0 140 loss 0 -38 -2 2135 0 -1 -2 101 safe 322 118 0 600 loss 0 -118 -2 2135 0 -1 -2 102 safe 425 0 58 42 mixed 0 0 -2 2135 0 0 -2 103 safe 288 0 96 27 mixed 0 0 -4 1296 0 0 -2 104 safe 267 0 100 20 mixed 0 0 -4 1296 0 0 -2 105 risky 957 121 0 295 loss 0 0 -4 1296 1 -1 -2 106 safe 451 0 96 121 mixed 0 0 0 1416 0 0 -2 107 safe 188 0 61 24 mixed 0 0 0 1416 0 0 -2 108 risky 1784 37 0 81 loss 0 -81 0 1416 1 -1 -2 109 safe 327 0 296 454 mixed 0 0 -1 1369 0 0 -2 110 safe 278 38 0 95 loss 0 -38 -1 1369 0 -1 -2 111 safe 406 104 0 182 loss 0 -104 -1 774 0 -1 -2 112 safe 757 56 137 0 gain 0 56 -1 774 0 1 -2 113 safe 295 0 299 249 mixed 0 0 -1 774 0 0 -2 114 safe 2575 80 224 0 gain 0 80 -1 596 0 1 -2 115 safe 499 83 0 340 loss 0 -83 -1 596 0 -1 -2 116 safe 261 60 250 0 gain 0 60 -1 596 0 1 -2 117 safe 174 60 0 112 loss 0 -60 1 1222 0 -1 -2 118 safe 224 44 197 0 gain 0 44 1 1222 0 1 -2 119 safe 272 83 0 227 loss 0 -83 -1 505 0 -1 -2 120 safe 267 0 299 57 mixed 0 0 -1 505 0 0 -2 121 safe 341 56 116 0 gain 0 56 -1 505 0 1 -2 122 safe 155 119 0 204 loss 0 -119 1 1054 0 -1 -2 123 safe 480 41 86 0 gain 0 41 1 1054 0 1 -2 124 safe 271 39 168 0 gain 0 39 1 1054 0 1 -2 125 safe 250 117 263 0 gain 0 117 -5 993 0 1 -2 126 safe 334 0 223 88 mixed 0 0 -5 993 0 0 -2 127 safe 308 63 171 0 gain 0 63 -4 1144 0 1 -2 128 safe 260 0 220 112 mixed 0 0 -4 1144 0 0 -2 129 safe 245 98 219 0 gain 0 98 -4 1023 0 1 -2 130 safe 322 103 169 0 gain 0 103 -4 1023 0 1 -2 131 safe 314 83 0 290 loss 0 -83 -4 1023 0 -1 -2 132 safe 265 0 62 46 mixed 0 0 -5 1193 0 0 -2 133 safe 454 117 0 337 loss 0 -117 -5 1193 0 -1 -2 134 safe 1290 82 146 0 gain 0 82 0 268 0 1 -2 135 safe 9 59 0 135 loss 0 -59 0 268 0 -1 -2 136 safe 233 83 174 0 gain 0 83 -2 779 0 1 -2 137 safe 314 57 0 98 loss 0 -57 -2 779 0 -1 -2 138 safe 267 99 0 249 loss 0 -99 -2 779 0 -1 -2 139 safe 245 104 252 0 gain 0 104 -3 870 0 1 -2 140 safe 79 58 0 215 loss 0 -58 -3 870 0 -1 -2 141 safe 280 76 0 158 loss 0 -76 -3 809 0 -1 -2 142 safe 255 0 164 157 mixed 0 0 -3 809 0 0 -2 143 safe 550 77 0 196 loss 0 -77 -3 969 0 -1 -2 144 safe 77 79 0 135 loss 0 -79 -3 969 0 -1 -2 145 safe 265 0 303 604 mixed 0 0 -2 1203 0 0 -2 146 safe 292 0 156 244 mixed 0 0 -2 1203 0 0 -2 147 safe 300 100 0 364 loss 0 -100 -5 2008 0 -1 -2 148 safe 369 0 64 12 mixed 0 0 -5 2008 0 0 -2 149 risky 300 0 62 27 mixed 0 -27 -3 967 1 0 -2 150 safe 125 123 603 0 gain 0 123 -3 967 0 1 -3 1 risky 1331 0 161 80 mixed 0 -80 0 2245 1 0 -3 2 risky 791 39 69 0 gain 0 0 0 2245 1 1 -3 3 risky 774 120 266 0 gain 0 0 -1 3256 1 1 -3 4 risky 818 63 169 0 gain 0 0 -1 3256 1 1 -3 5 safe 1108 0 304 89 mixed 0 0 -2 2171 0 0 -3 6 safe 1853 0 303 356 mixed 0 0 -2 2171 0 0 -3 7 safe 672 38 103 0 gain 0 38 -2 2171 0 1 -3 8 risky 1258 124 222 0 gain 0 0 1 1230 1 1 -3 9 safe 1401 0 61 30 mixed 0 0 1 1230 0 0 -3 10 risky 2620 116 0 222 loss 0 -222 1 1230 1 -1 -3 11 risky 4004 76 0 292 loss 0 0 0 2470 1 -1 -3 12 safe 2848 96 0 245 loss 0 -96 0 2470 0 -1 -3 13 safe 496 116 198 0 gain 0 116 0 1497 0 1 -3 14 risky 3183 103 0 496 loss 0 0 0 1497 1 -1 -3 15 safe 1640 123 0 300 loss 0 -123 0 1497 0 -1 -3 16 risky 2010 36 201 0 gain 0 0 1 966 1 1 -3 17 safe 1860 79 159 0 gain 0 79 1 966 0 1 -3 18 safe 731 41 170 0 gain 0 41 1 966 0 1 -3 19 safe 3531 118 0 338 loss 0 -118 2 1359 0 -1 -3 20 safe 1121 0 158 68 mixed 0 0 2 1359 0 0 -3 21 risky 642 62 0 101 loss 0 -101 2 1359 1 -1 -3 22 safe 892 64 0 130 loss 0 -64 2 1610 0 -1 -3 23 safe 2057 102 315 0 gain 0 102 2 1610 0 1 -3 24 safe 369 96 172 0 gain 0 96 2 2510 0 1 -3 25 safe 675 37 0 113 loss 0 -37 2 2510 0 -1 -3 26 risky 1180 0 102 17 mixed 0 -17 2 2510 1 0 -3 27 safe 484 0 99 62 mixed 0 0 2 1207 0 0 -3 28 safe 1374 36 0 143 loss 0 -36 2 1207 0 -1 -3 29 safe 243 42 0 167 loss 0 -42 2 1207 0 -1 -3 30 risky 5007 0 102 55 mixed 0 102 1 1043 1 0 -3 31 safe 382 43 0 197 loss 0 -43 1 1043 0 -1 -3 32 safe 1432 118 501 0 gain 0 118 1 1043 0 1 -3 33 safe 3694 60 0 149 loss 0 -60 2 1278 0 -1 -3 34 risky 930 0 160 102 mixed 0 -102 2 1278 1 0 -3 35 safe 2289 81 221 0 gain 0 81 1 1652 0 1 -3 36 safe 1958 61 218 0 gain 0 61 1 1652 0 1 -3 37 safe 2900 0 58 88 mixed 0 0 2 3032 0 0 -3 38 safe 772 122 0 506 loss 0 -122 2 3032 0 -1 -3 39 safe 560 0 223 224 mixed 0 0 2 938 0 0 -3 40 safe 691 0 300 602 mixed 0 0 2 938 0 0 -3 41 safe 843 77 142 0 gain 0 77 2 938 0 1 -3 42 risky 2174 0 300 243 mixed 0 300 2 1234 1 0 -3 43 safe 2380 44 110 0 gain 0 44 2 1234 0 1 -3 44 safe 887 83 253 0 gain 0 83 2 931 0 1 -3 45 safe 329 119 433 0 gain 0 119 2 931 0 1 -3 46 safe 2179 0 297 157 mixed 0 0 2 931 0 0 -3 47 risky 2493 59 111 0 gain 0 0 3 1445 1 1 -3 48 safe 250 0 221 443 mixed 0 0 3 1445 0 0 -3 49 safe 909 37 0 85 loss 0 -37 3 1445 0 -1 -3 50 risky 2574 57 0 117 loss 0 -117 2 1088 1 -1 -3 51 risky 2379 0 102 34 mixed 0 -34 2 1088 1 0 -3 52 risky 902 0 156 29 mixed 0 156 1 839 1 0 -3 53 safe 2553 63 0 166 loss 0 -63 1 839 0 -1 -3 54 risky 2147 100 0 178 loss 0 -178 1 839 1 -1 -3 55 safe 816 84 0 141 loss 0 -84 0 1472 0 -1 -3 56 safe 557 0 99 198 mixed 0 0 0 1472 0 0 -3 57 safe 703 120 0 244 loss 0 -120 0 1472 0 -1 -3 58 safe 404 78 0 401 loss 0 -78 0 1263 0 -1 -3 59 safe 2504 0 220 111 mixed 0 0 0 1263 0 0 -3 60 safe 2346 116 298 0 gain 0 116 -1 2409 0 1 -3 61 safe 2756 63 249 0 gain 0 63 -1 2409 0 1 -3 62 safe 1211 0 220 178 mixed 0 0 -1 2409 0 0 -3 63 safe 1053 0 304 297 mixed 0 0 1 852 0 0 -3 64 risky 1740 116 0 201 loss 0 0 1 852 1 -1 -3 65 safe 1039 83 338 0 gain 0 83 2 1081 0 1 -3 66 safe 522 82 0 197 loss 0 -82 2 1081 0 -1 -3 67 safe 1987 38 123 0 gain 0 38 2 1081 0 1 -3 68 safe 475 61 0 108 loss 0 -61 2 1125 0 -1 -3 69 safe 198 43 0 128 loss 0 -43 2 1125 0 -1 -3 70 safe 4435 0 104 97 mixed 0 0 1 876 0 0 -3 71 safe 477 122 0 429 loss 0 -122 1 876 0 -1 -3 72 safe 2442 0 157 188 mixed 0 0 1 876 0 0 -3 73 risky 2927 38 91 0 gain 0 0 1 8932 1 1 -3 74 safe 641 41 0 68 loss 0 -41 1 8932 0 -1 -3 75 safe 7035 0 101 150 mixed 0 0 2 1656 0 0 -3 76 risky 4390 104 0 165 loss 0 0 2 1656 1 -1 -3 77 risky 3113 0 57 9 mixed 0 -9 3 1419 1 0 -3 78 risky 839 0 223 45 mixed 0 -45 3 1419 1 0 -3 79 safe 576 62 134 0 gain 0 62 3 1419 0 1 -3 80 safe 774 100 0 422 loss 0 -100 3 1130 0 -1 -3 81 safe 1030 57 0 190 loss 0 -57 3 1130 0 -1 -3 82 safe 713 0 58 76 mixed 0 0 2 2386 0 0 -3 83 safe 1322 0 62 63 mixed 0 0 2 2386 0 0 -3 84 risky 2147 98 501 0 gain 0 501 3 1623 1 1 -3 85 safe 1100 120 0 376 loss 0 -120 3 1623 0 -1 -3 86 safe 417 79 0 336 loss 0 -79 3 1445 0 -1 -3 87 safe 851 0 58 23 mixed 0 0 3 1445 0 0 -3 88 safe 168 0 61 124 mixed 0 0 3 1445 0 0 -3 89 safe 93 119 336 0 gain 0 119 3 1825 0 1 -3 90 safe 993 0 156 158 mixed 0 0 3 1825 0 0 -3 91 safe 299 101 178 0 gain 0 101 3 1825 0 1 -3 92 safe 1636 0 304 123 mixed 0 0 4 5059 0 0 -3 93 risky 3777 0 62 21 mixed 0 62 4 5059 1 0 -3 94 safe 525 0 299 447 mixed 0 0 3 1872 0 0 -3 95 safe 588 102 222 0 gain 0 102 3 1872 0 1 -3 96 safe 551 0 217 87 mixed 0 0 2 1089 0 0 -3 97 safe 630 42 68 0 gain 0 42 2 1089 0 1 -3 98 risky 1675 0 96 120 mixed 0 -120 3 1358 1 0 -3 99 safe 133 83 0 137 loss 0 -83 3 1358 0 -1 -3 100 safe 210 118 598 0 gain 0 118 3 1358 0 1 -3 101 safe 355 100 0 219 loss 0 -100 3 1032 0 -1 -3 102 safe 656 63 100 0 gain 0 63 3 1032 0 1 -3 103 safe 210 0 164 324 mixed 0 0 3 1032 0 0 -3 104 safe 4184 83 0 174 loss 0 -83 2 4160 0 -1 -3 105 risky 1178 56 297 0 gain 0 297 2 4160 1 1 -3 106 risky 2517 36 143 0 gain 0 143 3 2632 1 1 -3 107 safe 1485 101 283 0 gain 0 101 3 2632 0 1 -3 108 safe 388 41 0 66 loss 0 -41 3 2632 0 -1 -3 109 safe 1077 0 303 197 mixed 0 0 3 3534 0 0 -3 110 safe 330 41 0 98 loss 0 -41 3 3534 0 -1 -3 111 risky 975 59 191 0 gain 0 0 2 956 1 1 -3 112 risky 568 56 120 0 gain 0 0 2 956 1 1 -3 113 safe 397 58 0 248 loss 0 -58 2 956 0 -1 -3 114 risky 645 0 304 59 mixed 0 304 1 804 1 0 -3 115 safe 940 0 99 38 mixed 0 0 1 804 0 0 -3 116 risky 2733 102 0 281 loss 0 -281 2 1208 1 -1 -3 117 safe 1662 104 0 357 loss 0 -104 2 1208 0 -1 -3 118 risky 2275 83 197 0 gain 0 197 2 1208 1 1 -3 119 safe 846 0 58 38 mixed 0 0 2 1291 0 0 -3 120 risky 1609 81 179 0 gain 0 179 2 1291 1 1 -3 121 safe 163 103 0 316 loss 0 -103 2 1291 0 -1 -3 122 risky 787 98 416 0 gain 0 416 3 1814 1 1 -3 123 safe 237 102 0 202 loss 0 -102 3 1814 0 -1 -3 124 safe 1772 0 161 131 mixed 0 0 3 1104 0 0 -3 125 safe 303 102 248 0 gain 0 102 3 1104 0 1 -3 126 safe 654 124 0 600 loss 0 -124 3 1195 0 -1 -3 127 safe 842 81 0 156 loss 0 -81 3 1195 0 -1 -3 128 risky 1931 0 217 70 mixed 0 217 3 2163 1 0 -3 129 safe 450 0 57 52 mixed 0 0 3 2163 0 0 -3 130 risky 1135 0 159 50 mixed 0 -50 3 2163 1 0 -3 131 risky 522 79 396 0 gain 0 396 2 1997 1 1 -3 132 safe 678 42 78 0 gain 0 42 2 1997 0 1 -3 133 safe 296 0 158 236 mixed 0 0 2 1997 0 0 -3 134 risky 560 98 364 0 gain 0 364 2 1955 1 1 -3 135 safe 617 118 236 0 gain 0 118 2 1955 0 1 -3 136 safe 618 81 0 227 loss 0 -81 2 1955 0 -1 -3 137 safe 1294 0 219 327 mixed 0 0 2 1935 0 0 -3 138 safe 407 61 0 304 loss 0 -61 2 1935 0 -1 -3 139 safe 2348 120 378 0 gain 0 120 2 1630 0 1 -3 140 safe 527 76 284 0 gain 0 76 2 1630 0 1 -3 141 safe 1267 76 137 0 gain 0 76 2 1588 0 1 -3 142 safe 19 36 0 83 loss 0 -36 2 1588 0 -1 -3 143 safe 168 82 0 256 loss 0 -82 2 1588 0 -1 -3 144 safe 540 0 99 79 mixed 0 0 3 1509 0 0 -3 145 safe 601 59 149 0 gain 0 59 3 1509 0 1 -3 146 safe 27 0 216 266 mixed 0 0 3 1509 0 0 -3 147 safe 863 61 0 217 loss 0 -61 2 1697 0 -1 -3 148 safe 326 0 218 146 mixed 0 0 2 1697 0 0 -3 149 risky 2092 101 196 0 gain 0 0 2 1563 1 1 -3 150 safe 451 124 0 265 loss 1 -124 2 1563 0 -1 -4 1 risky 1858 103 0 200 loss 0 0 0 750 1 -1 -4 2 risky 579 101 498 0 gain 0 498 0 750 1 1 -4 3 safe 898 0 100 103 mixed 0 0 0 750 0 0 -4 4 safe 903 99 0 363 loss 0 -99 2 3404 0 -1 -4 5 risky 444 57 300 0 gain 0 300 2 3404 1 1 -4 6 risky 207 79 337 0 gain 0 0 2 2347 1 1 -4 7 safe 584 36 0 82 loss 0 -36 2 2347 0 -1 -4 8 risky 261 42 99 0 gain 0 0 0 1647 1 1 -4 9 risky 77 0 220 326 mixed 0 -326 0 1647 1 0 -4 10 safe 1259 122 0 435 loss 0 -122 0 1647 0 -1 -4 11 safe 848 77 0 143 loss 0 -77 -2 1605 0 -1 -4 12 risky 93 0 303 599 mixed 0 -599 -2 1605 1 0 -4 13 safe 460 38 0 128 loss 0 -38 -2 911 0 -1 -4 14 risky 21 0 299 244 mixed 0 -244 -2 911 1 0 -4 15 risky 865 116 0 500 loss 0 -500 -2 911 1 -1 -4 16 risky 253 120 265 0 gain 0 0 0 1210 1 1 -4 17 risky 837 39 87 0 gain 0 87 0 1210 1 1 -4 18 risky 1292 56 0 215 loss 0 -215 0 1210 1 -1 -4 19 risky 904 124 0 198 loss 0 0 -3 1807 1 -1 -4 20 risky 769 124 0 294 loss 0 0 -3 1807 1 -1 -4 21 risky 1280 116 0 376 loss 0 0 0 606 1 -1 -4 22 risky 1474 64 0 102 loss 0 -102 0 606 1 -1 -4 23 risky 91 0 161 49 mixed 0 161 0 3006 1 0 -4 24 risky 558 119 430 0 gain 0 0 0 3006 1 1 -4 25 risky 393 122 377 0 gain 0 377 0 554 1 1 -4 26 risky 426 96 169 0 gain 0 169 0 554 1 1 -4 27 risky 171 98 247 0 gain 0 0 0 554 1 1 -4 28 risky 890 62 99 0 gain 0 99 1 1038 1 1 -4 29 risky 611 82 227 0 gain 0 0 1 1038 1 1 -4 30 risky 464 0 222 178 mixed 0 222 1 1038 1 0 -4 31 risky 1210 0 103 16 mixed 0 103 1 1159 1 0 -4 32 risky 41 0 300 124 mixed 0 300 1 1159 1 0 -4 33 risky 474 0 300 300 mixed 0 -300 1 1159 1 0 -4 34 safe 438 0 63 58 mixed 0 0 -1 617 0 0 -4 35 risky 437 64 0 252 loss 0 0 -1 617 1 -1 -4 36 risky 507 0 222 260 mixed 0 -260 0 315 1 0 -4 37 risky 1028 63 0 121 loss 0 0 0 315 1 -1 -4 38 risky 420 77 195 0 gain 0 0 0 315 1 1 -4 39 risky 944 0 61 71 mixed 0 61 0 1350 1 0 -4 40 risky 623 39 0 90 loss 0 0 0 1350 1 -1 -4 41 risky 320 79 400 0 gain 0 400 0 469 1 1 -4 42 risky 477 63 187 0 gain 0 187 0 469 1 1 -4 43 risky 605 96 182 0 gain 0 182 0 469 1 1 -4 44 risky 1141 96 0 277 loss 0 -277 2 1110 1 -1 -4 45 risky 452 120 240 0 gain 0 240 2 1110 1 1 -4 46 risky 694 0 60 17 mixed 0 -17 2 1110 1 0 -4 47 risky 861 44 0 68 loss 0 0 0 815 1 -1 -4 48 risky 52 0 156 30 mixed 0 156 0 815 1 0 -4 49 risky 429 0 157 133 mixed 0 -133 0 815 1 0 -4 50 risky 579 0 61 15 mixed 0 61 0 1373 1 0 -4 51 risky 533 120 508 0 gain 0 508 0 1373 1 1 -4 52 risky 112 101 277 0 gain 0 277 0 1373 1 1 -4 53 risky 642 123 596 0 gain 0 0 2 907 1 1 -4 54 safe 707 0 164 237 mixed 0 0 2 907 0 0 -4 55 risky 1610 0 98 80 mixed 0 98 0 497 1 0 -4 56 safe 1042 63 0 152 loss 0 -63 0 497 0 -1 -4 57 safe 1029 79 0 201 loss 0 -79 0 497 0 -1 -4 58 risky 385 0 162 82 mixed 0 -82 0 424 1 0 -4 59 risky 399 38 201 0 gain 0 201 0 424 1 1 -4 60 risky 549 0 301 358 mixed 0 301 0 976 1 0 -4 61 risky 453 79 251 0 gain 0 251 0 976 1 1 -4 62 risky 662 56 111 0 gain 0 111 2 894 1 1 -4 63 risky 613 103 360 0 gain 0 0 2 894 1 1 -4 64 safe 891 36 0 172 loss 0 -36 2 894 0 -1 -4 65 risky 1229 76 0 179 loss 0 -179 0 1002 1 -1 -4 66 safe 1915 0 98 149 mixed 0 0 0 1002 0 0 -4 67 risky 928 123 0 243 loss 0 0 0 1002 1 -1 -4 68 risky 883 0 159 158 mixed 0 159 1 571 1 0 -4 69 risky 411 37 127 0 gain 0 0 1 571 1 1 -4 70 risky 1488 83 0 249 loss 0 0 0 1771 1 -1 -4 71 safe 436 37 0 203 loss 0 -37 0 1771 0 -1 -4 72 risky 630 0 156 106 mixed 0 -106 0 1771 1 0 -4 73 risky 497 0 223 144 mixed 0 -144 0 593 1 0 -4 74 risky 477 0 221 117 mixed 0 221 0 593 1 0 -4 75 risky 447 81 173 0 gain 0 0 0 593 1 1 -4 76 risky 491 124 299 0 gain 0 0 0 448 1 1 -4 77 risky 3490 0 101 67 mixed 0 -67 0 448 1 0 -4 78 risky 603 0 57 34 mixed 0 -34 0 808 1 0 -4 79 risky 1740 96 0 245 loss 0 -245 0 808 1 -1 -4 80 risky 60 0 62 117 mixed 0 62 -1 2861 1 0 -4 81 risky 503 60 213 0 gain 1 0 -1 2861 1 1 -4 82 risky 184 41 108 0 gain 0 0 -1 1771 1 1 -4 83 safe 1134 57 0 105 loss 0 -57 -1 1771 0 -1 -4 84 risky 1143 39 0 74 loss 0 -74 -1 1771 1 -1 -4 85 safe 776 84 0 333 loss 0 -84 -2 650 0 -1 -4 86 risky 441 57 256 0 gain 0 0 -2 650 1 1 -4 87 risky 2638 77 131 0 gain 0 0 -2 2262 1 1 -4 88 risky 376 0 104 56 mixed 0 -56 -2 2262 1 0 -4 89 risky 1022 97 0 315 loss 0 -315 -3 827 1 -1 -4 90 safe 261 99 0 219 loss 0 -99 -3 827 0 -1 -4 91 risky 847 98 0 184 loss 0 0 -2 938 1 -1 -4 92 risky 1344 57 133 0 gain 0 133 -2 938 1 1 -4 93 risky 409 0 101 42 mixed 0 -42 -2 938 1 0 -4 94 risky 425 82 156 0 gain 0 0 0 1091 1 1 -4 95 risky 499 0 222 441 mixed 0 222 0 1091 1 0 -4 96 safe 951 76 147 0 gain 0 76 0 1091 0 1 -4 97 risky 82 0 300 92 mixed 0 300 1 1894 1 0 -4 98 risky 460 58 149 0 gain 0 0 1 1894 1 1 -4 99 safe 91 0 104 197 mixed 0 0 1 1894 0 0 -4 100 risky 876 56 165 0 gain 0 0 -1 1129 1 1 -4 101 risky 1097 43 168 0 gain 0 168 -1 1129 1 1 -4 102 safe 860 58 0 186 loss 0 -58 1 1686 0 -1 -4 103 safe 1226 81 0 286 loss 0 -81 1 1686 0 -1 -4 104 risky 1043 0 63 41 mixed 0 -41 0 1775 1 0 -4 105 risky 543 0 218 45 mixed 0 218 0 1775 1 0 -4 106 risky 414 0 217 65 mixed 0 -65 0 1205 1 0 -4 107 risky 408 42 67 0 gain 0 0 0 1205 1 1 -4 108 safe 412 63 0 171 loss 0 -63 0 1205 0 -1 -4 109 risky 428 0 163 191 mixed 0 163 -1 742 1 0 -4 110 risky 528 0 302 160 mixed 0 302 -1 742 1 0 -4 111 risky 368 103 197 0 gain 0 197 -1 742 1 1 -4 112 safe 565 117 0 601 loss 0 -117 3 912 0 -1 -4 113 risky 435 0 163 66 mixed 0 -66 3 912 1 0 -4 114 risky 1003 0 299 59 mixed 0 -59 -1 695 1 0 -4 115 risky 595 99 423 0 gain 0 423 -1 695 1 1 -4 116 risky 324 0 221 88 mixed 0 221 1 1538 1 0 -4 117 risky 1551 123 0 264 loss 0 0 1 1538 1 -1 -4 118 safe 1445 43 0 98 loss 0 -43 2 661 0 -1 -4 119 risky 364 102 226 0 gain 0 226 2 661 1 1 -4 120 risky 1307 80 0 134 loss 0 -134 1 1858 1 -1 -4 121 risky 394 0 302 447 mixed 0 -447 1 1858 1 0 -4 122 risky 519 0 299 196 mixed 0 299 -2 1036 1 0 -4 123 risky 1050 0 57 26 mixed 0 57 -2 1036 1 0 -4 124 safe 624 80 0 221 loss 0 -80 -2 1036 0 -1 -4 125 risky 720 118 220 0 gain 0 0 0 725 1 1 -4 126 risky 449 118 340 0 gain 0 340 0 725 1 1 -4 127 safe 669 0 96 121 mixed 0 0 0 725 0 0 -4 128 safe 337 37 0 116 loss 0 -37 0 490 0 -1 -4 129 risky 1343 0 60 86 mixed 0 60 0 490 1 0 -4 130 safe 484 80 0 397 loss 0 -80 0 533 0 -1 -4 131 risky 335 0 217 224 mixed 0 -224 0 533 1 0 -4 132 safe 368 56 0 302 loss 0 -56 0 327 0 -1 -4 133 safe 331 77 0 160 loss 0 -77 0 327 0 -1 -4 134 risky 24 43 84 0 gain 0 84 0 327 1 1 -4 135 safe 389 97 0 422 loss 0 -97 0 327 0 -1 -4 136 safe 781 60 0 131 loss 0 -60 0 327 0 -1 -4 137 safe 250 121 0 220 loss 0 -121 0 327 0 -1 -4 138 risky 353 58 122 0 gain 0 0 0 480 1 1 -4 139 risky 447 99 0 170 loss 0 -170 0 480 1 -1 -4 140 risky 266 123 198 0 gain 0 0 0 480 1 1 -4 141 safe 643 38 0 145 loss 0 -38 0 527 0 -1 -4 142 risky 321 79 289 0 gain 0 0 0 527 1 1 -4 143 risky 370 40 75 0 gain 0 0 0 527 1 1 -4 144 risky 347 0 62 53 mixed 0 62 -2 2527 1 0 -4 145 risky 468 103 313 0 gain 0 0 -2 2527 1 1 -4 146 risky 743 0 162 323 mixed 0 -323 0 1309 1 0 -4 147 risky 2941 40 141 0 gain 0 141 0 1309 1 1 -4 148 risky 1030 0 103 27 mixed 0 103 0 446 1 0 -4 149 safe 611 99 0 503 loss 0 -99 0 446 0 -1 -4 150 safe 1960 122 0 334 loss 0 -122 0 446 0 -1 -5 1 risky 1413 103 0 501 loss 0 0 0 2372 1 -1 -5 2 safe 288 41 0 85 loss 0 -41 0 2372 0 -1 -5 3 safe 915 79 0 291 loss 0 -79 0 2372 0 -1 -5 4 risky 990 80 145 0 gain 0 145 0 1917 1 1 -5 5 risky 704 0 221 224 mixed 0 -224 0 1917 1 0 -5 6 risky 141 0 303 596 mixed 0 -596 -1 1218 1 0 -5 7 risky 434 0 157 44 mixed 0 -44 -1 1218 1 0 -5 8 safe 734 36 86 0 gain 0 36 -1 1218 0 1 -5 9 risky 750 84 0 141 loss 0 0 -1 1715 1 -1 -5 10 safe 844 63 169 0 gain 0 63 -1 1715 0 1 -5 11 risky 435 0 220 149 mixed 0 220 -1 1715 1 0 -5 12 risky 640 0 303 200 mixed 0 -200 1 1348 1 0 -5 13 risky 1055 0 161 31 mixed 0 161 1 1348 1 0 -5 14 risky 539 84 226 0 gain 0 0 1 1814 1 1 -5 15 risky 149 0 100 48 mixed 0 -48 1 1814 1 0 -5 16 safe 772 0 158 159 mixed 0 0 1 1814 0 0 -5 17 risky 890 123 0 238 loss 0 0 0 1531 1 -1 -5 18 risky 636 0 301 244 mixed 0 301 0 1531 1 0 -5 19 risky 1118 0 164 190 mixed 0 164 0 1531 1 0 -5 20 risky 220 0 104 41 mixed 0 104 2 1240 1 0 -5 21 safe 849 0 63 70 mixed 0 0 2 1240 0 0 -5 22 risky 885 60 0 171 loss 0 0 1 1120 1 -1 -5 23 risky 222 38 200 0 gain 0 0 1 1120 1 1 -5 24 risky 590 0 220 41 mixed 0 -41 1 1120 1 0 -5 25 risky 816 76 177 0 gain 0 0 0 2236 1 1 -5 26 safe 579 0 223 182 mixed 0 0 0 2236 0 0 -5 27 risky 656 84 287 0 gain 0 287 0 1454 1 1 -5 28 safe 864 97 0 361 loss 0 -97 0 1454 0 -1 -5 29 risky 970 37 0 76 loss 0 0 -1 1625 1 -1 -5 30 risky 1232 123 0 221 loss 0 0 -1 1625 1 -1 -5 31 safe 821 38 0 164 loss 0 -38 3 1293 0 -1 -5 32 safe 938 39 0 203 loss 0 -39 3 1293 0 -1 -5 33 safe 789 123 0 503 loss 0 -123 3 1293 0 -1 -5 34 risky 931 0 60 88 mixed 0 60 -1 1131 1 0 -5 35 risky 430 60 118 0 gain 0 0 -1 1131 1 1 -5 36 safe 1011 116 0 340 loss 0 -116 -1 1131 0 -1 -5 37 safe 895 79 0 173 loss 0 -79 1 1222 0 -1 -5 38 risky 1029 101 0 314 loss 0 -314 1 1222 1 -1 -5 39 risky 790 0 224 110 mixed 0 224 1 1222 1 0 -5 40 risky 1217 60 0 120 loss 0 -120 -1 1166 1 -1 -5 41 risky 676 0 218 261 mixed 0 218 -1 1166 1 0 -5 42 risky 725 43 71 0 gain 0 71 -1 1166 1 1 -5 43 safe 839 98 0 284 loss 0 -98 2 1245 0 -1 -5 44 risky 1430 58 0 187 loss 0 0 2 1245 1 -1 -5 45 risky 786 98 423 0 gain 0 0 2 1245 1 1 -5 46 safe 997 0 156 130 mixed 0 0 1 2042 0 0 -5 47 risky 601 43 165 0 gain 0 0 1 2042 1 1 -5 48 risky 18 59 215 0 gain 0 215 1 2042 1 1 -5 49 risky 1843 120 0 301 loss 0 0 3 2216 1 -1 -5 50 safe 976 0 63 42 mixed 0 0 3 2216 0 0 -5 51 risky 742 42 116 0 gain 0 0 0 1216 1 1 -5 52 safe 1086 39 0 112 loss 0 -39 0 1216 0 -1 -5 53 risky 942 43 0 64 loss 0 -64 0 1216 1 -1 -5 54 risky 1535 77 160 0 gain 0 160 -1 573 1 1 -5 55 risky 1100 37 128 0 gain 0 128 -1 573 1 1 -5 56 risky 1199 121 265 0 gain 0 265 3 1015 1 1 -5 57 risky 800 123 205 0 gain 0 0 3 1015 1 1 -5 58 safe 1077 0 162 322 mixed 0 0 1 1827 0 0 -5 59 risky 705 116 335 0 gain 0 335 1 1827 1 1 -5 60 risky 817 58 252 0 gain 0 252 1 1827 1 1 -5 61 safe 1068 0 98 62 mixed 0 0 3 1060 0 0 -5 62 safe 1034 117 0 428 loss 0 -117 3 1060 0 -1 -5 63 risky 1105 0 103 151 mixed 0 -151 3 1060 1 0 -5 64 risky 1023 124 293 0 gain 0 293 -2 707 1 1 -5 65 safe 857 0 60 61 mixed 0 0 -2 707 0 0 -5 66 safe 1198 0 300 298 mixed 0 0 0 1657 0 0 -5 67 risky 917 100 497 0 gain 0 497 0 1657 1 1 -5 68 safe 839 63 0 301 loss 0 -63 0 1657 0 -1 -5 69 safe 1020 119 0 379 loss 0 -119 0 662 0 -1 -5 70 risky 830 78 335 0 gain 0 335 0 662 1 1 -5 71 risky 1145 0 164 83 mixed 0 164 1 1074 1 0 -5 72 risky 946 0 102 23 mixed 0 -23 1 1074 1 0 -5 73 risky 1198 60 0 129 loss 0 0 0 947 1 -1 -5 74 risky 189 0 301 156 mixed 0 -156 0 947 1 0 -5 75 risky 889 0 63 28 mixed 0 63 0 947 1 0 -5 76 risky 607 83 138 0 gain 0 138 1 1973 1 1 -5 77 safe 1003 99 0 222 loss 0 -99 1 1973 0 -1 -5 78 risky 875 80 400 0 gain 0 0 1 1973 1 1 -5 79 risky 1292 100 0 179 loss 0 0 1 2038 1 -1 -5 80 safe 1613 80 0 164 loss 0 -80 1 2038 0 -1 -5 81 risky 623 0 218 66 mixed 0 218 1 2038 1 0 -5 82 safe 1006 57 0 110 loss 0 -57 1 543 0 -1 -5 83 risky 849 116 428 0 gain 0 0 1 543 1 1 -5 84 safe 1007 43 0 130 loss 0 -43 1 2127 0 -1 -5 85 risky 1154 39 0 96 loss 0 0 1 2127 1 -1 -5 86 risky 954 61 190 0 gain 0 0 1 2127 1 1 -5 87 safe 832 96 0 244 loss 0 -96 1 720 0 -1 -5 88 risky 688 59 148 0 gain 0 0 1 720 1 1 -5 89 risky 835 44 143 0 gain 0 0 1 720 1 1 -5 90 safe 686 0 57 118 mixed 0 0 -1 839 0 0 -5 91 risky 600 96 170 0 gain 0 0 -1 839 1 1 -5 92 risky 717 0 62 19 mixed 0 -19 0 1046 1 0 -5 93 safe 885 0 158 240 mixed 0 0 0 1046 0 0 -5 94 risky 840 40 68 0 gain 0 68 0 1046 1 1 -5 95 risky 905 120 0 205 loss 0 0 1 1086 1 -1 -5 96 risky 766 104 222 0 gain 0 0 1 1086 1 1 -5 97 safe 763 60 0 249 loss 0 -60 1 1344 0 -1 -5 98 safe 939 0 217 443 mixed 0 0 1 1344 0 0 -5 99 risky 815 0 56 9 mixed 0 56 1 1344 1 0 -5 100 safe 631 76 0 403 loss 0 -76 1 1411 0 -1 -5 101 risky 756 0 102 199 mixed 0 102 1 1411 1 0 -5 102 risky 1026 0 222 331 mixed 0 -331 1 1411 1 0 -5 103 safe 1070 104 0 419 loss 0 -104 -2 1054 0 -1 -5 104 risky 811 62 132 0 gain 0 132 -2 1054 1 1 -5 105 risky 1091 64 303 0 gain 0 303 2 1088 1 1 -5 106 risky 667 0 298 116 mixed 0 298 2 1088 1 0 -5 107 safe 880 80 0 194 loss 0 -80 1 587 0 -1 -5 108 safe 1294 0 97 81 mixed 0 0 1 587 0 0 -5 109 risky 689 0 100 31 mixed 0 -31 -1 808 1 0 -5 110 risky 820 57 104 0 gain 0 0 -1 808 1 1 -5 111 safe 639 0 61 46 mixed 0 0 -2 961 0 0 -5 112 risky 783 118 377 0 gain 0 0 -2 961 1 1 -5 113 risky 816 101 358 0 gain 0 358 -1 862 1 1 -5 114 risky 747 84 0 136 loss 0 -136 -1 862 1 -1 -5 115 safe 954 76 0 253 loss 0 -76 -1 862 0 -1 -5 116 risky 1464 122 242 0 gain 0 0 -3 1262 1 1 -5 117 risky 845 82 194 0 gain 0 0 -3 1262 1 1 -5 118 safe 907 58 0 214 loss 0 -58 -3 1262 0 -1 -5 119 safe 973 96 0 164 loss 0 -96 -4 1169 0 -1 -5 120 risky 801 122 503 0 gain 0 0 -4 1169 1 1 -5 121 risky 839 0 157 64 mixed 0 157 -4 1169 1 0 -5 122 safe 1103 36 0 141 loss 0 -36 0 1215 0 -1 -5 123 risky 875 0 100 119 mixed 0 -119 0 1215 1 0 -5 124 risky 873 41 98 0 gain 0 0 -1 796 1 1 -5 125 risky 803 96 283 0 gain 0 0 -1 796 1 1 -5 126 risky 734 103 250 0 gain 0 0 -1 796 1 1 -5 127 safe 994 83 0 221 loss 0 -83 -1 576 0 -1 -5 128 risky 574 80 251 0 gain 0 0 -1 576 1 1 -5 129 risky 1024 40 0 79 loss 0 0 -2 929 1 -1 -5 130 risky 582 97 314 0 gain 0 314 -2 929 1 1 -5 131 risky 594 61 107 0 gain 0 107 -2 929 1 1 -5 132 risky 688 100 198 0 gain 0 0 2 1097 1 1 -5 133 safe 971 0 99 99 mixed 0 0 2 1097 0 0 -5 134 risky 726 0 301 90 mixed 0 301 0 734 1 0 -5 135 risky 1385 0 163 109 mixed 0 -109 0 734 1 0 -5 136 risky 753 122 0 269 loss 1 0 -1 970 1 -1 -5 137 risky 844 60 0 98 loss 0 -98 -1 970 1 -1 -5 138 risky 832 103 184 0 gain 0 0 -1 731 1 1 -5 139 safe 737 97 0 201 loss 0 -97 -1 731 0 -1 -5 140 risky 770 0 304 60 mixed 0 -60 -2 890 1 0 -5 141 safe 832 64 0 151 loss 0 -64 -2 890 0 -1 -5 142 safe 740 77 0 334 loss 0 -77 -3 1066 0 -1 -5 143 risky 696 0 61 24 mixed 0 61 -3 1066 1 0 -5 144 risky 818 0 219 84 mixed 0 219 -1 680 1 0 -5 145 safe 707 120 0 600 loss 0 -120 -1 680 0 -1 -5 146 risky 910 0 303 359 mixed 0 303 -1 489 1 0 -5 147 risky 651 122 598 0 gain 0 598 -1 489 1 1 -5 148 safe 967 0 301 454 mixed 0 0 -1 489 0 0 -5 149 risky 826 119 216 0 gain 0 216 2 808 1 1 -5 150 risky 876 41 80 0 gain 0 80 2 808 1 1 -6 1 risky 8683 0 63 10 mixed 0 -10 0 38019 1 0 -6 2 risky 6548 119 236 0 gain 0 0 0 38019 1 1 -6 3 risky 11388 99 312 0 gain 0 312 -1 2893 1 1 -6 4 risky 4920 0 302 56 mixed 0 -56 -1 2893 1 0 -6 5 risky 5889 0 221 48 mixed 0 -48 -1 1767 1 0 -6 6 safe 1711 0 299 356 mixed 0 0 -1 1767 0 0 -6 7 safe 2687 63 120 0 gain 0 63 0 3312 0 1 -6 8 risky 2482 77 340 0 gain 0 0 0 3312 1 1 -6 9 risky 1313 79 222 0 gain 0 222 -1 4237 1 1 -6 10 risky 3219 43 0 70 loss 0 -70 -1 4237 1 -1 -6 11 risky 840 101 416 0 gain 0 0 -1 2893 1 1 -6 12 risky 2690 0 304 158 mixed 0 304 -1 2893 1 0 -6 13 risky 5480 57 170 0 gain 0 170 1 3609 1 1 -6 14 risky 6195 117 506 0 gain 0 0 1 3609 1 1 -6 15 safe 1799 44 0 204 loss 0 -44 1 3609 0 -1 -6 16 risky 3339 101 0 182 loss 0 -182 0 1761 1 -1 -6 17 safe 1679 37 0 108 loss 0 -37 0 1761 0 -1 -6 18 safe 2923 121 0 378 loss 0 -121 0 1761 0 -1 -6 19 safe 3155 0 64 76 mixed 0 0 -2 2754 0 0 -6 20 safe 4114 58 0 192 loss 0 -58 -2 2754 0 -1 -6 21 risky 3842 59 191 0 gain 0 191 -2 2754 1 1 -6 22 risky 1839 83 396 0 gain 0 396 0 2197 1 1 -6 23 risky 3984 0 101 84 mixed 0 101 0 2197 1 0 -6 24 safe 1262 57 0 254 loss 0 -57 0 2197 0 -1 -6 25 safe 2746 42 0 147 loss 0 -42 0 2171 0 -1 -6 26 risky 2941 0 220 111 mixed 0 -111 0 2171 1 0 -6 27 risky 3376 37 130 0 gain 0 130 0 2171 1 1 -6 28 risky 1224 116 602 0 gain 0 0 1 1356 1 1 -6 29 risky 2118 56 212 0 gain 0 212 1 1356 1 1 -6 30 risky 3519 100 0 165 loss 0 0 1 1356 1 -1 -6 31 safe 1538 97 0 501 loss 0 -97 2 959 0 -1 -6 32 risky 1818 119 378 0 gain 0 0 2 959 1 1 -6 33 safe 2836 98 0 359 loss 0 -98 -1 1881 0 -1 -6 34 risky 6887 83 0 179 loss 0 -179 -1 1881 1 -1 -6 35 risky 972 36 114 0 gain 0 0 -1 1176 1 1 -6 36 risky 4252 0 97 67 mixed 0 97 -1 1176 1 0 -6 37 safe 2055 0 156 164 mixed 0 0 0 1240 0 0 -6 38 risky 1016 0 158 35 mixed 0 158 0 1240 1 0 -6 39 safe 1023 58 0 217 loss 0 -58 0 1240 0 -1 -6 40 risky 3274 44 0 71 loss 0 -71 0 3700 1 -1 -6 41 risky 1661 102 276 0 gain 1 276 0 3700 1 1 -6 42 risky 2404 0 304 247 mixed 0 304 1 884 1 0 -6 43 safe 5414 38 66 0 gain 0 38 1 884 0 1 -6 44 risky 4757 119 263 0 gain 0 263 1 884 1 1 -6 45 risky 2906 100 0 223 loss 0 0 2 5593 1 -1 -6 46 risky 2634 0 104 44 mixed 0 104 2 5593 1 0 -6 47 risky 900 43 142 0 gain 0 0 2 5593 1 1 -6 48 risky 804 0 296 94 mixed 0 296 1 1667 1 0 -6 49 risky 7162 0 224 264 mixed 0 224 1 1667 1 0 -6 50 safe 1344 80 0 252 loss 0 -80 2 1556 0 -1 -6 51 risky 2165 103 356 0 gain 0 0 2 1556 1 1 -6 52 risky 2000 44 197 0 gain 0 0 -1 836 1 1 -6 53 safe 2276 101 198 0 gain 0 101 -1 836 0 1 -6 54 risky 4122 38 103 0 gain 0 0 -1 836 1 1 -6 55 risky 3216 117 0 215 loss 0 0 0 2517 1 -1 -6 56 risky 2745 42 88 0 gain 0 0 0 2517 1 1 -6 57 risky 4050 80 0 156 loss 0 0 0 2517 1 -1 -6 58 safe 4801 98 165 0 gain 0 98 1 1145 0 1 -6 59 risky 4523 104 502 0 gain 0 502 1 1145 1 1 -6 60 risky 2626 0 100 23 mixed 0 100 1 1145 1 0 -6 61 safe 2020 0 59 89 mixed 0 0 2 927 0 0 -6 62 safe 1931 0 96 117 mixed 0 0 2 927 0 0 -6 63 safe 1416 77 0 334 loss 0 -77 1 1456 0 -1 -6 64 safe 1401 97 0 282 loss 0 -97 1 1456 0 -1 -6 65 risky 3129 102 0 198 loss 0 -198 1 1456 1 -1 -6 66 safe 1867 122 0 505 loss 0 -122 0 3611 0 -1 -6 67 risky 3273 0 104 98 mixed 0 104 0 3611 1 0 -6 68 risky 2041 99 218 0 gain 0 218 2 2511 1 1 -6 69 risky 1201 0 58 29 mixed 0 -29 2 2511 1 0 -6 70 safe 5182 63 0 134 loss 0 -63 1 1679 0 -1 -6 71 safe 3177 124 218 0 gain 0 124 1 1679 0 1 -6 72 safe 584 77 0 287 loss 0 -77 1 1261 0 -1 -6 73 risky 2418 82 291 0 gain 0 0 1 1261 1 1 -6 74 risky 3964 44 0 102 loss 0 0 0 1136 1 -1 -6 75 risky 3794 82 0 148 loss 0 0 0 1136 1 -1 -6 76 safe 2515 36 0 89 loss 0 -36 2 1383 0 -1 -6 77 risky 265 117 435 0 gain 0 0 2 1383 1 1 -6 78 risky 6888 57 0 164 loss 0 -164 2 1383 1 -1 -6 79 safe 2396 0 57 63 mixed 0 0 -1 1026 0 0 -6 80 risky 1278 0 297 121 mixed 0 -121 -1 1026 1 0 -6 81 risky 5505 0 296 297 mixed 0 -297 -1 1026 1 0 -6 82 risky 3784 0 61 36 mixed 0 61 0 1676 1 0 -6 83 risky 1794 77 0 136 loss 0 0 0 1676 1 -1 -6 84 risky 2456 119 0 264 loss 0 -264 1 3992 1 -1 -6 85 safe 4586 101 0 313 loss 0 -101 1 3992 0 -1 -6 86 risky 1296 96 244 0 gain 0 0 1 3992 1 1 -6 87 safe 9041 0 98 197 mixed 0 0 1 5878 0 0 -6 88 safe 1707 103 0 252 loss 0 -103 1 5878 0 -1 -6 89 safe 1719 0 99 149 mixed 0 0 1 5878 0 0 -6 90 safe 5513 36 69 0 gain 0 36 -1 1086 0 1 -6 91 safe 2391 61 105 0 gain 0 61 -1 1086 0 1 -6 92 safe 857 64 0 304 loss 0 -64 -1 1086 0 -1 -6 93 safe 5282 118 0 299 loss 0 -118 0 4927 0 -1 -6 94 risky 6335 79 181 0 gain 0 0 0 4927 1 1 -6 95 risky 2827 123 0 198 loss 0 0 -2 1551 1 -1 -6 96 risky 1180 0 159 86 mixed 0 -86 -2 1551 1 0 -6 97 risky 1852 0 56 46 mixed 0 -46 0 7386 1 0 -6 98 risky 2467 0 220 224 mixed 0 220 0 7386 1 0 -6 99 safe 3255 99 178 0 gain 0 99 2 941 0 1 -6 100 safe 1427 59 0 102 loss 0 -59 2 941 0 -1 -6 101 safe 3837 83 0 197 loss 0 -83 2 941 0 -1 -6 102 safe 1837 0 217 436 mixed 0 0 -1 843 0 0 -6 103 risky 3683 0 220 70 mixed 0 220 -1 843 1 0 -6 104 safe 747 61 104 0 gain 0 61 1 1050 0 1 -6 105 safe 2730 0 160 190 mixed 0 0 1 1050 0 0 -6 106 safe 1027 83 144 0 gain 0 83 -1 1258 0 1 -6 107 safe 1506 0 300 603 mixed 0 0 -1 1258 0 0 -6 108 safe 2397 84 161 0 gain 0 84 0 704 0 1 -6 109 safe 852 120 0 432 loss 0 -120 0 704 0 -1 -6 110 risky 1251 64 301 0 gain 0 301 -1 1932 1 1 -6 111 risky 1520 43 166 0 gain 0 166 -1 1932 1 1 -6 112 risky 5107 39 0 77 loss 0 -77 -1 1932 1 -1 -6 113 safe 1632 123 0 597 loss 0 -123 -1 1352 0 -1 -6 114 risky 3292 81 196 0 gain 0 0 -1 1352 1 1 -6 115 safe 789 0 60 119 mixed 0 0 -1 1352 0 0 -6 116 risky 164 0 297 198 mixed 0 297 -2 1108 1 0 -6 117 risky 1374 0 64 19 mixed 0 64 -2 1108 1 0 -6 118 safe 4612 119 0 334 loss 0 -119 -2 1108 0 -1 -6 119 risky 441 0 158 134 mixed 0 158 -1 5816 1 0 -6 120 risky 224 0 56 23 mixed 0 56 -1 5816 1 0 -6 121 safe 6709 62 0 151 loss 0 -62 1 992 0 -1 -6 122 risky 1360 0 161 45 mixed 0 -45 1 992 1 0 -6 123 risky 223 0 156 68 mixed 0 -68 1 992 1 0 -6 124 safe 2146 40 0 165 loss 0 -40 -1 928 0 -1 -6 125 risky 4720 0 219 146 mixed 0 -146 -1 928 1 0 -6 126 risky 1186 0 161 102 mixed 0 161 -1 820 1 0 -6 127 safe 1117 123 201 0 gain 0 123 -1 820 0 1 -6 128 safe 3741 101 0 422 loss 0 -101 -1 820 0 -1 -6 129 risky 3362 59 0 116 loss 0 -116 -1 1713 1 -1 -6 130 risky 3963 0 216 327 mixed 0 -327 -1 1713 1 0 -6 131 risky 2562 57 146 0 gain 0 0 -1 1713 1 1 -6 132 risky 54 58 250 0 gain 0 250 -1 3406 1 1 -6 133 risky 2451 40 83 0 gain 0 0 -1 3406 1 1 -6 134 risky 56 116 340 0 gain 0 0 -1 3406 1 1 -6 135 risky 3118 124 0 244 loss 0 -244 -1 994 1 -1 -6 136 safe 1001 0 300 453 mixed 0 0 -1 994 0 0 -6 137 safe 558 77 0 225 loss 0 -77 0 548 0 -1 -6 138 risky 2346 0 100 51 mixed 0 -51 0 548 1 0 -6 139 safe 5850 0 158 242 mixed 0 0 0 548 0 0 -6 140 risky 1415 0 222 85 mixed 0 222 -1 1771 1 0 -6 141 safe 2947 36 0 129 loss 0 -36 -1 1771 0 -1 -6 142 risky 3290 119 299 0 gain 0 299 -1 1771 1 1 -6 143 safe 6930 78 130 0 gain 0 78 1 1182 0 1 -6 144 risky 5721 59 0 110 loss 0 -110 1 1182 1 -1 -6 145 risky 641 0 221 177 mixed 0 221 1 1182 1 0 -6 146 safe 1530 0 161 323 mixed 0 0 1 1031 0 0 -6 147 risky 336 0 99 30 mixed 0 -30 1 1031 1 0 -6 148 risky 190 61 133 0 gain 0 0 1 1031 1 1 -6 149 risky 26 76 255 0 gain 0 255 -2 823 1 1 -6 150 safe 1139 80 0 396 loss 0 -80 -2 823 0 -1 diff --git a/inst/extdata/ts_exampleData.txt b/inst/extdata/ts_exampleData.txt deleted file mode 100644 index 648f94b9..00000000 --- a/inst/extdata/ts_exampleData.txt +++ /dev/null @@ -1,2191 +0,0 @@ -subjID trial level1_choice level2_choice reward A1prob A2prob B1prob B2prob -1 2 1 4 1 0.73174 0.44094 0.28525 0.42124 -1 3 1 1 1 0.72582 0.3864 0.30663 0.39319 -1 4 2 1 1 0.7296 0.41459 0.30549 0.34948 -1 5 1 3 0 0.77339 0.40618 0.31232 0.3926 -1 6 1 1 1 0.75457 0.45989 0.30146 0.39908 -1 7 1 1 1 0.799 0.47671 0.30695 0.4193 -1 8 1 3 1 0.8 0.4705 0.28921 0.43012 -1 9 1 4 1 0.8 0.4414 0.32746 0.40748 -1 10 2 4 0 0.79121 0.44951 0.34192 0.4238 -1 11 2 1 0 0.8 0.45063 0.30527 0.41502 -1 12 1 3 0 0.8 0.46023 0.30255 0.43582 -1 13 1 2 0 0.7713 0.45539 0.3145 0.41748 -1 14 2 1 1 0.77967 0.46743 0.33255 0.41147 -1 15 2 4 1 0.8 0.44997 0.33142 0.43247 -1 16 1 1 1 0.8 0.46545 0.38953 0.40187 -1 17 2 4 0 0.78989 0.43383 0.44462 0.39286 -1 18 1 1 1 0.8 0.45304 0.45707 0.41177 -1 19 1 1 1 0.8 0.46451 0.4644 0.35639 -1 20 1 1 1 0.8 0.46125 0.49334 0.33543 -1 21 1 1 0 0.8 0.49285 0.47484 0.36058 -1 22 1 4 0 0.8 0.49623 0.48841 0.34768 -1 23 2 3 1 0.77469 0.54065 0.50539 0.32396 -1 24 2 2 1 0.77481 0.58668 0.50524 0.32207 -1 25 2 3 1 0.78178 0.62035 0.46226 0.32988 -1 26 2 3 1 0.7996 0.59698 0.5076 0.37398 -1 27 2 3 0 0.8 0.61101 0.51855 0.37097 -1 28 2 4 0 0.8 0.57941 0.49362 0.33811 -1 29 1 1 1 0.75907 0.58061 0.49262 0.34061 -1 30 1 1 1 0.78157 0.60034 0.47932 0.32465 -1 31 1 4 1 0.73941 0.57595 0.41336 0.31351 -1 32 1 1 1 0.78407 0.57293 0.40238 0.31508 -1 33 1 1 1 0.7673 0.55497 0.44794 0.32404 -1 34 1 1 1 0.74815 0.57301 0.45619 0.30755 -1 35 1 1 1 0.76077 0.55076 0.45351 0.23356 -1 36 1 1 0 0.78983 0.53785 0.45 0.2218 -1 37 1 4 0 0.79931 0.53644 0.43941 0.25251 -1 38 1 2 0 0.78409 0.52744 0.44277 0.25328 -1 39 2 3 0 0.79235 0.54545 0.42458 0.28172 -1 40 2 4 1 0.7884 0.53537 0.40774 0.30555 -1 41 2 4 0 0.8 0.5217 0.44137 0.30486 -1 42 1 3 0 0.7987 0.53313 0.44258 0.29581 -1 43 1 1 0 0.75319 0.5575 0.46962 0.29889 -1 44 1 2 0 0.75826 0.57211 0.49623 0.34481 -1 45 2 4 0 0.8 0.59358 0.50784 0.33974 -1 46 1 2 1 0.8 0.58261 0.49178 0.31495 -1 47 1 2 0 0.76387 0.51143 0.50769 0.34591 -1 48 1 3 1 0.7373 0.55849 0.4958 0.34391 -1 49 2 3 1 0.71163 0.55437 0.50188 0.37737 -1 50 2 3 1 0.7274 0.55684 0.49608 0.42051 -1 51 2 3 1 0.74133 0.51026 0.50806 0.39224 -1 52 2 3 0 0.78899 0.52159 0.53676 0.39005 -1 53 2 4 0 0.8 0.5142 0.57107 0.33701 -1 54 1 3 1 0.8 0.55215 0.56694 0.31545 -1 55 1 2 1 0.8 0.53609 0.53305 0.30683 -1 56 1 1 1 0.8 0.51736 0.51624 0.29661 -1 57 1 1 1 0.8 0.55649 0.57046 0.30073 -1 58 1 4 0 0.77863 0.54926 0.57542 0.31415 -1 59 1 3 0 0.78765 0.57095 0.5805 0.28316 -1 60 1 1 0 0.7736 0.54228 0.58221 0.23798 -1 61 1 2 0 0.8 0.55273 0.52453 0.2241 -1 62 2 2 1 0.77377 0.54429 0.52093 0.24853 -1 63 1 2 1 0.8 0.53118 0.48452 0.22815 -1 64 1 3 0 0.8 0.5621 0.5142 0.24439 -1 65 1 3 0 0.8 0.58121 0.52545 0.24843 -1 66 1 2 1 0.8 0.59505 0.53803 0.23704 -1 67 2 4 0 0.8 0.61952 0.54213 0.20897 -1 68 1 2 1 0.8 0.5983 0.5531 0.24432 -1 69 1 2 1 0.78218 0.65305 0.57632 0.26855 -1 70 1 2 1 0.74435 0.68187 0.58155 0.30696 -1 71 1 2 1 0.75476 0.68078 0.57166 0.31697 -1 72 1 2 0 0.7518 0.67198 0.59557 0.30499 -1 73 1 1 1 0.77418 0.6968 0.58319 0.32965 -1 74 1 1 1 0.74976 0.71575 0.64715 0.2999 -1 75 1 4 1 0.76123 0.70332 0.63275 0.30766 -1 76 2 4 0 0.75946 0.70432 0.61657 0.30659 -1 77 2 1 1 0.8 0.69223 0.64135 0.32633 -1 78 1 1 1 0.8 0.67848 0.62949 0.29921 -1 79 1 1 0 0.76968 0.66689 0.64594 0.31559 -1 80 1 4 0 0.767 0.66963 0.62129 0.32788 -1 81 2 3 1 0.75012 0.63656 0.60248 0.34237 -1 82 2 3 1 0.7351 0.68337 0.63189 0.30771 -1 83 2 3 1 0.74526 0.67142 0.6594 0.30594 -1 84 2 1 1 0.76226 0.68819 0.6318 0.27628 -1 85 1 1 1 0.7758 0.73023 0.58491 0.29002 -1 86 1 1 1 0.77074 0.74821 0.58291 0.28925 -1 87 1 1 1 0.77089 0.79434 0.57504 0.32894 -1 88 1 1 1 0.74567 0.8 0.55285 0.30923 -1 89 1 1 1 0.7727 0.8 0.59163 0.31176 -1 90 1 1 1 0.79157 0.8 0.5741 0.33049 -1 91 1 1 1 0.8 0.8 0.56745 0.33548 -1 92 1 3 0 0.8 0.77512 0.59173 0.36604 -1 93 1 1 0 0.77964 0.77689 0.65552 0.29529 -1 94 1 4 0 0.72323 0.77346 0.68053 0.28964 -1 95 1 4 0 0.7587 0.79182 0.68303 0.28661 -1 96 1 2 1 0.76904 0.78153 0.69918 0.25219 -1 97 1 2 1 0.77612 0.8 0.7122 0.27558 -1 98 1 4 1 0.79077 0.79734 0.71788 0.28339 -1 99 2 4 1 0.76885 0.778 0.73227 0.29194 -1 100 2 2 0 0.72235 0.76099 0.72207 0.28469 -1 101 2 1 1 0.75343 0.75863 0.68128 0.29834 -1 102 1 1 1 0.77836 0.75896 0.6992 0.29074 -1 103 1 1 0 0.76782 0.74809 0.67502 0.27929 -1 104 2 4 0 0.76299 0.79317 0.66158 0.31297 -1 105 2 3 1 0.76924 0.8 0.64813 0.30434 -1 106 2 1 0 0.79236 0.76987 0.63234 0.29248 -1 107 2 3 0 0.76225 0.74234 0.62737 0.34844 -1 108 1 2 1 0.75963 0.71965 0.63631 0.31392 -1 109 1 2 0 0.78157 0.65906 0.63594 0.29344 -1 110 1 4 0 0.8 0.6691 0.63189 0.33999 -1 111 2 3 1 0.76426 0.64471 0.60207 0.27577 -1 112 2 2 1 0.74667 0.66462 0.62046 0.26335 -1 113 1 3 1 0.78458 0.63884 0.64195 0.27218 -1 114 2 3 0 0.79243 0.63824 0.63688 0.27592 -1 115 1 2 1 0.79322 0.65028 0.62034 0.25584 -1 116 1 4 1 0.79914 0.66745 0.60886 0.25548 -1 117 2 3 1 0.79739 0.61932 0.61802 0.28086 -1 118 2 4 0 0.79022 0.61075 0.61969 0.26407 -1 119 2 3 1 0.8 0.62074 0.62673 0.27659 -1 120 2 1 1 0.8 0.62032 0.57944 0.28841 -1 121 1 4 1 0.79253 0.61165 0.555 0.26186 -1 122 2 4 0 0.8 0.62946 0.54182 0.25526 -1 123 1 3 1 0.79597 0.60834 0.5357 0.2 -1 124 2 3 0 0.78078 0.60309 0.55323 0.22367 -1 125 1 1 1 0.78059 0.59006 0.5389 0.20545 -1 126 1 1 1 0.7415 0.5477 0.53843 0.2 -1 127 1 1 1 0.72498 0.55081 0.54774 0.2 -1 128 1 3 0 0.7273 0.53482 0.54397 0.23411 -1 129 1 1 1 0.6983 0.53396 0.57112 0.26527 -1 130 1 3 0 0.67184 0.55217 0.54923 0.26093 -1 131 1 1 1 0.64299 0.4833 0.56131 0.27607 -1 132 1 4 0 0.64678 0.48409 0.55659 0.26744 -1 133 1 1 1 0.66958 0.48672 0.55672 0.28704 -1 134 1 4 0 0.71353 0.43812 0.54296 0.26765 -1 135 1 1 1 0.72913 0.45831 0.55595 0.26157 -1 136 1 3 1 0.71214 0.40894 0.57912 0.27759 -1 137 2 3 1 0.72246 0.3716 0.5666 0.25731 -1 138 2 3 0 0.70016 0.33562 0.53811 0.26686 -1 139 1 1 0 0.68348 0.29021 0.5032 0.2907 -1 141 1 2 1 0.70413 0.24533 0.53268 0.31855 -1 142 1 2 0 0.74585 0.23758 0.54789 0.32516 -1 143 2 3 0 0.75878 0.20683 0.54172 0.32643 -1 144 1 1 1 0.75508 0.2 0.54123 0.33066 -1 145 1 1 1 0.75405 0.2 0.50283 0.33762 -1 146 1 4 0 0.72616 0.21818 0.51489 0.34734 -1 147 1 1 1 0.72165 0.2146 0.52902 0.33863 -1 148 1 1 1 0.76338 0.22901 0.53995 0.32508 -1 149 1 3 0 0.8 0.24977 0.55147 0.34688 -1 150 1 4 1 0.8 0.22491 0.55515 0.38301 -1 151 2 4 0 0.76821 0.26234 0.54065 0.37305 -1 152 2 3 0 0.77307 0.22488 0.58349 0.37869 -1 153 1 1 1 0.77173 0.21431 0.53551 0.42413 -1 154 1 1 0 0.75927 0.20014 0.50704 0.42257 -1 155 1 3 0 0.75921 0.21264 0.50199 0.38167 -1 156 1 2 0 0.74445 0.22054 0.51196 0.33042 -1 157 2 1 0 0.72395 0.21222 0.48676 0.33988 -1 158 2 1 0 0.71999 0.2298 0.51039 0.3507 -1 159 2 4 1 0.72939 0.2308 0.54111 0.32357 -1 160 2 2 0 0.69386 0.21052 0.54663 0.27117 -1 161 2 4 1 0.69174 0.2 0.53472 0.28176 -1 162 2 4 0 0.71402 0.2 0.59491 0.26687 -1 163 2 3 0 0.71077 0.2 0.5787 0.29751 -1 164 1 4 0 0.70963 0.2 0.60455 0.28655 -1 165 1 1 1 0.73785 0.2 0.60482 0.27845 -1 166 1 1 1 0.75026 0.2 0.60278 0.29223 -1 167 1 1 1 0.78057 0.2 0.59516 0.29242 -1 168 1 1 1 0.7938 0.20923 0.53569 0.27625 -1 169 1 4 0 0.77124 0.25164 0.47943 0.29059 -1 170 1 3 0 0.77023 0.2788 0.50377 0.25799 -1 171 1 1 1 0.76646 0.27905 0.51914 0.26122 -1 172 1 1 1 0.74042 0.24415 0.5069 0.27107 -1 173 1 1 1 0.73021 0.27041 0.4785 0.26917 -1 174 1 4 0 0.71286 0.28303 0.4701 0.29255 -1 175 1 1 0 0.67608 0.30914 0.48553 0.27482 -1 176 1 2 0 0.72568 0.28528 0.46698 0.28983 -1 177 2 3 0 0.75068 0.32288 0.51553 0.32661 -1 178 1 3 1 0.68976 0.33437 0.57487 0.30929 -1 179 2 3 1 0.63552 0.32788 0.56683 0.28999 -1 180 2 3 0 0.65651 0.29706 0.64643 0.32216 -1 181 1 1 0 0.63992 0.28636 0.65593 0.30065 -1 182 1 1 1 0.63118 0.29203 0.61181 0.24868 -1 183 1 1 0 0.61433 0.30691 0.58943 0.26967 -1 184 1 1 1 0.64362 0.28234 0.59775 0.25273 -1 185 2 4 0 0.65589 0.2 0.63046 0.22552 -1 186 2 4 0 0.64753 0.21033 0.62343 0.23167 -1 187 2 3 1 0.6708 0.23303 0.58866 0.24963 -1 188 2 3 1 0.68793 0.2 0.59113 0.30878 -1 189 2 3 1 0.70132 0.2 0.57037 0.30299 -1 191 1 4 1 0.70615 0.23807 0.57935 0.30751 -1 192 2 4 0 0.69038 0.24958 0.56007 0.27807 -1 193 2 4 0 0.72402 0.24868 0.58419 0.29444 -1 194 1 1 1 0.74722 0.22597 0.57091 0.27845 -1 195 1 1 1 0.77007 0.25026 0.59727 0.26951 -1 196 1 3 0 0.75861 0.24017 0.58072 0.24954 -1 197 2 4 0 0.74568 0.2 0.58408 0.24979 -1 198 1 1 1 0.78681 0.21341 0.56264 0.20372 -1 199 1 1 1 0.7694 0.24506 0.54298 0.2 -1 200 1 1 1 0.8 0.22759 0.49432 0.2 -1 201 1 1 0 0.8 0.22705 0.48005 0.2179 -2 1 2 1 1 0.24366 0.21338 0.7897 0.36247 -2 3 2 1 0 0.24195 0.22465 0.7635 0.37649 -2 4 2 2 1 0.24137 0.22427 0.79877 0.3744 -2 5 2 2 0 0.24103 0.2 0.8 0.38687 -2 6 2 3 1 0.2 0.2 0.79295 0.35462 -2 7 2 3 1 0.21009 0.22935 0.79064 0.34995 -2 8 2 2 0 0.2 0.25825 0.79677 0.32497 -2 9 2 1 0 0.2 0.27439 0.77263 0.31415 -2 10 2 2 0 0.25693 0.28699 0.8 0.35165 -2 11 2 3 1 0.23686 0.27897 0.8 0.33176 -2 12 2 3 0 0.2 0.29644 0.78883 0.34925 -2 13 2 2 0 0.21085 0.29313 0.78698 0.38282 -2 14 2 4 0 0.20371 0.30914 0.78273 0.39991 -2 15 2 1 0 0.2 0.27436 0.79031 0.37668 -2 16 2 1 0 0.2 0.31162 0.783 0.38107 -2 17 2 4 1 0.2 0.33142 0.78508 0.39967 -2 18 2 3 1 0.20132 0.3441 0.79349 0.41119 -2 19 2 3 1 0.2 0.2921 0.7947 0.39435 -2 20 2 3 0 0.2 0.28001 0.8 0.38265 -2 21 2 3 0 0.23446 0.29161 0.7848 0.40374 -2 22 2 1 1 0.24324 0.30684 0.78655 0.36654 -2 23 1 3 1 0.25357 0.28896 0.8 0.36812 -2 24 1 2 0 0.247 0.31968 0.778 0.39979 -2 25 2 1 0 0.26191 0.29039 0.78188 0.42514 -2 26 2 4 0 0.24009 0.26705 0.77572 0.43339 -2 27 1 2 0 0.23637 0.27463 0.8 0.44448 -2 28 2 3 1 0.2 0.26527 0.79768 0.43536 -2 29 2 3 0 0.2 0.2249 0.8 0.45377 -2 30 2 1 0 0.27119 0.24548 0.77507 0.47467 -2 31 1 2 0 0.25741 0.25583 0.8 0.43019 -2 32 1 1 0 0.25833 0.25345 0.7833 0.45546 -2 33 1 2 1 0.29274 0.2548 0.75592 0.48444 -2 34 1 2 0 0.24411 0.2674 0.69707 0.50089 -2 35 2 1 0 0.25087 0.29031 0.69606 0.51711 -2 36 1 1 1 0.29422 0.24655 0.7281 0.55837 -2 37 2 2 0 0.28983 0.24619 0.73075 0.64885 -2 38 1 1 0 0.28961 0.22933 0.76907 0.64365 -2 39 2 3 1 0.32305 0.2115 0.72785 0.66863 -2 40 2 3 1 0.32795 0.21391 0.75703 0.68245 -2 41 2 3 1 0.33668 0.2 0.8 0.69042 -2 42 1 4 1 0.32341 0.2 0.7744 0.76419 -2 43 2 3 1 0.2924 0.2 0.77229 0.77877 -2 44 2 2 0 0.29488 0.21148 0.8 0.77328 -2 45 2 3 1 0.32204 0.25048 0.77766 0.8 -2 46 2 2 0 0.29959 0.27915 0.78361 0.8 -2 47 2 2 1 0.30354 0.35484 0.77031 0.77172 -2 48 2 4 1 0.32089 0.33943 0.76879 0.8 -2 49 2 4 1 0.31639 0.30386 0.71735 0.8 -2 50 2 4 0 0.32926 0.34595 0.68555 0.7724 -2 51 1 1 0 0.29947 0.30318 0.6959 0.78212 -2 52 2 1 0 0.29323 0.29421 0.69798 0.8 -2 53 2 4 1 0.31145 0.28711 0.67731 0.8 -2 54 2 4 1 0.35715 0.26453 0.66623 0.8 -2 55 2 4 1 0.34242 0.25018 0.65922 0.76883 -2 56 2 4 1 0.34459 0.25371 0.68819 0.76716 -2 57 2 2 0 0.39018 0.26396 0.63748 0.78614 -2 58 1 1 1 0.3358 0.23748 0.60919 0.8 -2 59 1 1 0 0.31958 0.21064 0.63817 0.8 -2 60 1 2 0 0.29338 0.2 0.68027 0.79001 -2 61 2 1 1 0.27116 0.2 0.709 0.8 -2 62 2 3 0 0.25717 0.2 0.69624 0.77628 -2 63 2 4 1 0.27483 0.2 0.66719 0.75931 -2 64 2 4 1 0.23855 0.2 0.61004 0.74309 -2 65 2 2 1 0.21736 0.2 0.65247 0.77225 -2 66 1 1 0 0.25099 0.2 0.70211 0.74655 -2 67 2 4 1 0.2702 0.2 0.71121 0.7433 -2 68 2 2 0 0.27338 0.2358 0.65203 0.71806 -2 69 2 4 1 0.2925 0.2 0.65285 0.72883 -2 70 2 2 0 0.31246 0.22217 0.65929 0.75781 -2 71 2 1 0 0.32305 0.2 0.66168 0.75266 -2 72 2 1 0 0.28378 0.2 0.65774 0.78056 -2 73 2 2 0 0.26524 0.20141 0.59448 0.77223 -2 74 1 4 0 0.27387 0.2 0.57972 0.76982 -2 75 1 1 1 0.33482 0.2 0.5624 0.8 -2 76 1 3 1 0.30843 0.22087 0.52495 0.77129 -2 77 2 4 1 0.29104 0.24487 0.53711 0.7695 -2 78 1 2 0 0.26102 0.24152 0.50456 0.77789 -2 79 2 4 1 0.2445 0.24204 0.50356 0.75557 -2 80 2 1 1 0.26642 0.23341 0.50453 0.72099 -2 81 2 4 1 0.27563 0.23117 0.51365 0.73239 -2 82 2 2 0 0.24556 0.23887 0.49212 0.76062 -2 83 1 2 0 0.21118 0.22106 0.54552 0.79201 -2 84 1 2 1 0.2 0.26054 0.52037 0.79404 -2 85 2 4 1 0.23536 0.24661 0.57319 0.8 -2 86 2 2 0 0.23971 0.21726 0.60673 0.7575 -2 87 2 4 1 0.27447 0.21378 0.58475 0.7807 -2 88 2 1 0 0.23447 0.22887 0.53945 0.8 -2 89 2 4 1 0.23122 0.2 0.56969 0.8 -2 90 2 4 1 0.21434 0.2 0.58063 0.8 -2 91 2 4 1 0.20412 0.2 0.5776 0.77905 -2 92 2 1 0 0.23715 0.20107 0.59502 0.78801 -2 93 1 2 0 0.2 0.20172 0.56694 0.8 -2 94 2 4 1 0.2 0.23888 0.56918 0.8 -2 95 2 1 0 0.2 0.22836 0.54608 0.79578 -2 96 2 4 1 0.21792 0.22493 0.55862 0.8 -2 97 2 4 0 0.25765 0.26661 0.57298 0.76303 -2 98 2 1 1 0.25462 0.26054 0.58158 0.76424 -2 99 2 4 1 0.25058 0.2355 0.56115 0.77487 -2 100 2 4 1 0.2352 0.2 0.57613 0.77472 -2 101 2 4 0 0.24936 0.20905 0.55364 0.75352 -2 102 2 1 0 0.2433 0.2 0.55993 0.78065 -2 103 2 2 0 0.25461 0.23537 0.58316 0.7884 -2 104 2 2 0 0.25684 0.24005 0.54965 0.7952 -2 105 1 1 0 0.29907 0.2506 0.55251 0.8 -2 106 1 2 1 0.26851 0.2435 0.54227 0.8 -2 107 1 4 1 0.24851 0.22888 0.55616 0.79765 -2 108 2 4 1 0.26537 0.25165 0.56028 0.77126 -2 109 2 1 0 0.26116 0.25402 0.55846 0.73255 -2 110 2 4 0 0.2603 0.24673 0.58361 0.7276 -2 111 2 2 0 0.28591 0.22322 0.64084 0.7201 -2 112 1 4 0 0.26526 0.20484 0.6863 0.712 -2 113 1 4 1 0.26692 0.2 0.70522 0.72084 -2 114 2 4 1 0.27249 0.21392 0.68892 0.72746 -2 115 2 1 0 0.22902 0.20045 0.74818 0.71253 -2 116 2 4 1 0.2353 0.2 0.77855 0.69805 -2 117 2 4 1 0.20838 0.2 0.78606 0.68928 -2 118 2 1 0 0.20182 0.20659 0.79165 0.67785 -2 119 2 4 1 0.21032 0.247 0.77601 0.74302 -2 120 2 4 1 0.20034 0.25251 0.8 0.70396 -2 121 2 4 1 0.2 0.24629 0.79537 0.68448 -2 122 2 4 1 0.21398 0.29466 0.75251 0.66879 -2 123 2 1 0 0.2 0.31706 0.76204 0.6732 -2 124 2 4 1 0.2 0.30489 0.7534 0.71219 -2 125 2 4 1 0.2 0.32492 0.76137 0.71172 -2 126 2 1 0 0.2 0.35076 0.7997 0.71048 -2 127 2 4 0 0.20503 0.31678 0.79524 0.70346 -2 128 2 4 1 0.20516 0.29861 0.76553 0.69496 -2 129 2 4 1 0.22588 0.30163 0.7683 0.72198 -2 130 2 2 0 0.21011 0.32075 0.77334 0.72815 -2 131 2 4 1 0.21068 0.30684 0.76088 0.73397 -2 132 2 4 1 0.2087 0.30048 0.79883 0.74999 -2 133 2 4 1 0.22202 0.30679 0.8 0.7297 -2 134 2 4 0 0.20441 0.28039 0.77104 0.6871 -2 135 2 4 0 0.2029 0.26801 0.75639 0.66139 -2 136 2 4 0 0.20636 0.2252 0.741 0.63109 -2 137 2 1 0 0.24226 0.2 0.78649 0.65203 -2 138 2 4 1 0.25766 0.2 0.7582 0.643 -2 139 1 4 0 0.29617 0.2 0.7412 0.59132 -2 140 2 4 1 0.30146 0.2 0.76005 0.61217 -2 141 2 4 0 0.27104 0.2159 0.75701 0.60006 -2 142 2 4 0 0.26798 0.24948 0.7371 0.61118 -2 143 2 4 0 0.25651 0.23851 0.73358 0.60815 -2 144 2 1 0 0.26757 0.27016 0.72062 0.64522 -2 145 2 4 1 0.28294 0.2391 0.75141 0.62282 -2 146 2 1 0 0.28259 0.23563 0.69756 0.61478 -2 147 2 4 1 0.2582 0.24803 0.70625 0.58711 -2 148 2 2 1 0.28571 0.26536 0.70991 0.60658 -2 149 2 2 0 0.29377 0.23557 0.72483 0.59885 -2 150 2 4 0 0.3194 0.25725 0.74524 0.59905 -2 151 2 4 0 0.30979 0.2444 0.74963 0.58005 -2 152 2 4 1 0.35056 0.22948 0.73684 0.58931 -2 153 2 4 0 0.34977 0.22911 0.72578 0.58484 -2 154 2 4 1 0.34519 0.21168 0.71921 0.60472 -2 155 2 4 1 0.36661 0.23326 0.72028 0.57828 -2 156 2 3 0 0.40117 0.25436 0.71302 0.56412 -2 157 2 4 1 0.40102 0.27823 0.66922 0.56995 -2 158 2 3 1 0.35642 0.26836 0.67426 0.55094 -2 159 1 1 1 0.37148 0.29016 0.67501 0.51965 -2 160 2 1 1 0.3358 0.24635 0.66468 0.50215 -2 161 1 4 0 0.35501 0.24552 0.69507 0.50197 -2 162 2 3 0 0.31346 0.23161 0.66735 0.51181 -2 163 1 2 1 0.30964 0.232 0.6475 0.53865 -2 164 2 3 0 0.30373 0.22914 0.62935 0.55306 -2 165 2 3 1 0.31736 0.22369 0.62071 0.54398 -2 166 2 4 0 0.30014 0.25322 0.61517 0.55492 -2 167 2 4 1 0.34385 0.2456 0.58311 0.5534 -2 168 2 4 0 0.3473 0.2477 0.58684 0.57142 -2 169 2 4 0 0.34401 0.27733 0.59587 0.55711 -2 170 1 1 1 0.33799 0.29646 0.62267 0.58141 -2 171 2 2 0 0.36342 0.31122 0.63888 0.60783 -2 172 2 2 0 0.34621 0.32128 0.63943 0.54333 -2 173 2 4 0 0.32895 0.34686 0.68134 0.49852 -2 174 1 1 1 0.37522 0.31644 0.61196 0.4386 -2 175 2 4 0 0.39076 0.33159 0.65 0.44614 -2 176 2 2 0 0.4096 0.34605 0.68745 0.44148 -2 177 2 2 0 0.46425 0.33531 0.66985 0.44431 -2 178 2 4 0 0.48127 0.34427 0.65921 0.43196 -2 179 2 4 0 0.46951 0.32875 0.66862 0.42214 -2 180 2 4 0 0.45978 0.3009 0.65382 0.42035 -2 181 2 1 1 0.46639 0.31441 0.66291 0.41407 -2 182 2 4 0 0.49453 0.3332 0.6395 0.40546 -2 183 2 2 0 0.48048 0.32783 0.637 0.39346 -2 184 1 1 0 0.50093 0.33951 0.60778 0.42871 -2 185 1 4 1 0.47675 0.33238 0.61487 0.43485 -2 186 2 4 0 0.46652 0.35543 0.62031 0.40333 -2 187 1 4 1 0.50299 0.34544 0.60978 0.38389 -2 188 1 1 1 0.51908 0.35843 0.61294 0.38385 -2 189 2 4 1 0.56691 0.37283 0.60469 0.39722 -2 190 2 4 0 0.57641 0.40698 0.65272 0.40517 -2 191 1 1 0 0.61806 0.40434 0.62457 0.38315 -2 192 2 4 1 0.6387 0.43436 0.59972 0.37162 -2 193 2 4 0 0.6537 0.47132 0.56371 0.36873 -2 194 1 4 1 0.64354 0.44272 0.53871 0.37205 -2 195 1 1 0 0.68281 0.4423 0.53232 0.37961 -2 196 1 1 0 0.68423 0.48885 0.52515 0.38681 -2 197 2 4 0 0.69172 0.49761 0.51816 0.37109 -2 198 1 4 0 0.68823 0.49309 0.51419 0.36965 -2 199 1 2 1 0.68377 0.4935 0.50005 0.35935 -2 200 2 4 0 0.67325 0.48124 0.48284 0.34656 -2 201 2 4 1 0.68844 0.47268 0.52266 0.36539 -3 1 1 4 1 0.66883 0.37325 0.76919 0.69293 -3 3 1 2 0 0.67015 0.3856 0.76941 0.72175 -3 4 2 4 1 0.65867 0.38996 0.73512 0.76353 -3 5 2 4 1 0.61271 0.4136 0.70859 0.77052 -3 6 2 4 0 0.61433 0.42465 0.70933 0.8 -3 7 2 4 1 0.5804 0.39622 0.69341 0.8 -3 8 2 4 0 0.51841 0.38227 0.73289 0.8 -3 9 1 2 0 0.53659 0.3558 0.74592 0.8 -3 10 1 4 1 0.52065 0.38466 0.78221 0.8 -3 11 1 1 0 0.5127 0.37854 0.7661 0.78401 -3 12 2 4 1 0.49501 0.43971 0.7905 0.7796 -3 13 2 4 1 0.49142 0.46183 0.74579 0.78366 -3 14 2 4 1 0.49081 0.46637 0.74794 0.77315 -3 15 2 2 1 0.50132 0.47586 0.74207 0.8 -3 16 2 4 0 0.56473 0.46072 0.79825 0.79796 -3 17 1 4 0 0.54207 0.46664 0.8 0.74878 -3 18 1 2 0 0.58164 0.44106 0.79297 0.72317 -3 19 2 4 1 0.59149 0.45774 0.79293 0.76953 -3 20 2 4 1 0.61672 0.45676 0.77379 0.79815 -3 21 2 2 1 0.62121 0.44059 0.76258 0.8 -3 22 2 2 1 0.63551 0.4599 0.75005 0.76542 -3 23 2 4 1 0.63114 0.46266 0.75579 0.8 -3 24 2 4 0 0.61963 0.49526 0.7527 0.79561 -3 25 2 2 0 0.57841 0.49419 0.72627 0.8 -3 26 2 2 1 0.56152 0.52013 0.78467 0.8 -3 27 2 3 1 0.57869 0.51671 0.78265 0.8 -3 28 2 3 1 0.56639 0.50541 0.75377 0.76181 -3 29 2 2 1 0.53607 0.53711 0.73607 0.77493 -3 30 2 3 1 0.50742 0.57529 0.74619 0.74033 -3 31 2 3 0 0.49056 0.52378 0.73618 0.71541 -3 32 1 3 1 0.4976 0.52854 0.72432 0.70405 -3 33 1 2 1 0.53769 0.5559 0.68652 0.70031 -3 34 1 2 0 0.53378 0.54603 0.67969 0.70818 -3 35 2 3 1 0.52224 0.53683 0.70012 0.73016 -3 36 2 3 0 0.54336 0.51652 0.69302 0.7253 -3 37 1 1 0 0.50921 0.56155 0.67768 0.72735 -3 38 1 2 0 0.52346 0.5659 0.67873 0.73461 -3 39 2 2 1 0.56296 0.54234 0.64272 0.72261 -3 40 2 4 1 0.57085 0.5206 0.67906 0.72352 -3 41 2 4 1 0.58499 0.53196 0.69191 0.72011 -3 42 2 2 0 0.57616 0.51196 0.674 0.74266 -3 43 2 2 0 0.576 0.53392 0.65332 0.75823 -3 44 1 1 1 0.57044 0.52995 0.61126 0.7968 -3 45 1 1 1 0.60101 0.54231 0.60942 0.78605 -3 46 1 1 0 0.57728 0.55258 0.59843 0.8 -3 47 2 1 0 0.55056 0.54806 0.56974 0.8 -3 48 2 4 1 0.55445 0.59867 0.58828 0.7958 -3 49 2 4 0 0.56397 0.57727 0.55507 0.7543 -3 50 2 3 1 0.57406 0.59639 0.54868 0.76199 -3 51 2 3 1 0.5561 0.59867 0.58165 0.75913 -3 52 2 1 1 0.48821 0.63845 0.58467 0.79374 -3 53 1 2 0 0.47204 0.62393 0.60018 0.75774 -3 54 2 4 0 0.48959 0.6457 0.62181 0.73965 -3 55 1 3 1 0.52759 0.60195 0.61241 0.70988 -3 56 1 3 1 0.52772 0.62054 0.57173 0.7234 -3 57 1 1 0 0.50986 0.59709 0.54509 0.73144 -3 58 1 3 0 0.46038 0.60037 0.52496 0.75924 -3 59 2 3 0 0.4769 0.6381 0.50502 0.73557 -3 60 1 1 1 0.4638 0.63734 0.53088 0.73204 -3 61 1 1 0 0.44397 0.62479 0.55098 0.7317 -3 62 1 1 1 0.45771 0.64205 0.56085 0.7122 -3 63 1 3 1 0.41829 0.61723 0.53791 0.65224 -3 64 1 1 0 0.44906 0.58146 0.55191 0.66344 -3 65 2 3 1 0.47217 0.57877 0.525 0.6597 -3 66 2 1 1 0.48396 0.57911 0.48678 0.66715 -3 67 2 3 0 0.48087 0.55254 0.46851 0.68836 -3 68 2 1 0 0.40167 0.54104 0.40646 0.67455 -3 69 1 2 1 0.41253 0.55343 0.41672 0.65517 -3 70 1 2 1 0.42959 0.59563 0.41995 0.68402 -3 71 1 2 1 0.43857 0.59709 0.42729 0.70901 -3 72 1 2 1 0.44418 0.62363 0.4246 0.71959 -3 73 2 4 1 0.4767 0.64183 0.38548 0.78097 -3 74 1 2 1 0.5276 0.62719 0.3852 0.8 -3 75 1 2 1 0.49319 0.624 0.35591 0.8 -3 76 2 4 1 0.54732 0.5904 0.29778 0.8 -3 77 2 4 1 0.54944 0.58123 0.32742 0.77967 -3 78 1 2 1 0.56733 0.53663 0.30483 0.8 -3 79 1 2 0 0.57654 0.53186 0.30929 0.76943 -3 80 2 2 1 0.59232 0.54615 0.32875 0.77195 -3 81 2 4 1 0.6407 0.52331 0.29697 0.8 -3 82 2 4 1 0.63453 0.50234 0.2913 0.76079 -3 83 1 2 1 0.63164 0.53699 0.30748 0.77895 -3 84 1 2 0 0.6282 0.5123 0.30934 0.77445 -3 85 2 4 1 0.60935 0.49884 0.33065 0.74279 -3 86 2 2 0 0.61729 0.54562 0.34929 0.74988 -3 87 1 4 1 0.63495 0.52927 0.31141 0.73159 -3 88 1 1 1 0.6246 0.52432 0.34703 0.73015 -3 89 1 1 1 0.64368 0.48815 0.27377 0.73239 -3 90 2 1 0 0.59542 0.45566 0.26969 0.72239 -3 91 2 1 0 0.59224 0.4519 0.27504 0.69281 -3 92 2 4 1 0.59509 0.5055 0.24022 0.66945 -3 93 2 4 1 0.64672 0.53689 0.22287 0.66914 -3 94 2 4 1 0.63177 0.54698 0.21258 0.68408 -3 95 1 2 0 0.67391 0.57384 0.2072 0.68711 -3 96 2 4 1 0.66292 0.52497 0.2 0.70323 -3 97 2 2 1 0.6416 0.53087 0.20378 0.6965 -3 98 2 4 1 0.60641 0.50909 0.26903 0.6806 -3 99 1 2 1 0.6134 0.48996 0.27622 0.70435 -3 100 2 2 1 0.62637 0.48483 0.31202 0.73029 -3 101 2 4 1 0.58895 0.44496 0.3198 0.72504 -3 102 1 4 1 0.59891 0.50268 0.29841 0.72913 -3 103 1 2 0 0.63238 0.5181 0.2929 0.73254 -3 104 2 2 1 0.64532 0.51598 0.29077 0.79193 -3 105 2 2 0 0.64278 0.47902 0.28531 0.79905 -3 106 2 4 1 0.62308 0.49617 0.30022 0.8 -3 107 2 1 0 0.66055 0.47591 0.30855 0.78427 -3 108 2 4 1 0.66069 0.49633 0.31414 0.8 -3 109 2 4 1 0.66532 0.51261 0.33326 0.8 -3 110 2 1 1 0.66496 0.51259 0.30694 0.79976 -3 111 2 4 1 0.63477 0.50855 0.35965 0.8 -3 112 2 4 0 0.64212 0.47413 0.32055 0.7694 -3 113 2 4 1 0.60057 0.42494 0.35101 0.77125 -3 114 1 1 1 0.56903 0.38249 0.35041 0.76236 -3 115 1 1 1 0.6015 0.39316 0.36371 0.77496 -3 116 1 1 1 0.60273 0.42415 0.42261 0.77538 -3 117 1 4 1 0.64753 0.36608 0.46082 0.74709 -3 118 2 4 1 0.64442 0.40509 0.48388 0.71915 -3 119 1 4 1 0.65391 0.42951 0.48458 0.7488 -3 120 1 1 1 0.68116 0.4308 0.49861 0.71676 -3 121 1 1 0 0.65563 0.46113 0.47371 0.72506 -3 122 2 1 1 0.69349 0.49043 0.47868 0.73556 -3 123 2 4 1 0.66198 0.48623 0.51209 0.74302 -3 124 2 1 0 0.62501 0.50053 0.52244 0.73455 -3 125 2 4 1 0.65673 0.44638 0.51138 0.75814 -3 126 2 2 1 0.64113 0.45613 0.4999 0.7822 -3 127 2 2 1 0.61183 0.47796 0.47914 0.78129 -3 128 2 4 0 0.62885 0.48371 0.46325 0.76828 -3 129 1 2 0 0.65825 0.46961 0.48531 0.7496 -3 130 1 3 0 0.6435 0.48994 0.53024 0.72654 -3 131 1 2 1 0.66244 0.51286 0.52535 0.7488 -3 132 1 2 0 0.68476 0.54099 0.51799 0.7379 -3 133 2 4 1 0.68301 0.55496 0.51328 0.74206 -3 134 2 2 0 0.67316 0.55361 0.48301 0.75786 -3 135 2 2 1 0.67376 0.53684 0.49156 0.76391 -3 136 2 4 0 0.70431 0.5375 0.49248 0.72144 -3 137 2 2 0 0.73911 0.51031 0.50981 0.69143 -3 138 2 3 0 0.73501 0.54236 0.48455 0.65323 -3 139 1 2 1 0.70711 0.53633 0.51912 0.68392 -3 140 1 2 0 0.68128 0.55276 0.48967 0.66202 -3 141 1 2 1 0.66796 0.51312 0.48063 0.67974 -3 142 1 2 0 0.68706 0.52262 0.45528 0.69269 -3 143 2 4 1 0.67081 0.50414 0.40634 0.68221 -3 144 2 2 1 0.60688 0.4994 0.38689 0.68965 -3 145 2 4 1 0.64122 0.47853 0.39266 0.71406 -3 146 2 4 1 0.66933 0.47368 0.37491 0.69829 -3 147 2 2 0 0.6751 0.52406 0.38091 0.70497 -3 148 2 4 1 0.66144 0.49961 0.37475 0.69052 -3 149 2 4 0 0.63735 0.5103 0.36973 0.69204 -3 150 1 2 1 0.63358 0.48455 0.37815 0.68046 -3 151 1 4 1 0.65883 0.47061 0.3947 0.65703 -3 152 1 2 1 0.6302 0.50495 0.39799 0.65565 -3 153 2 4 1 0.62789 0.48344 0.39312 0.63916 -3 154 1 2 1 0.6335 0.46165 0.41299 0.64529 -3 155 1 2 1 0.64593 0.46122 0.38794 0.66622 -3 156 1 2 0 0.646 0.44097 0.3853 0.6999 -3 157 1 2 0 0.63902 0.45708 0.35352 0.70509 -3 158 2 4 1 0.66877 0.4357 0.31695 0.71684 -3 159 2 4 1 0.66383 0.44026 0.28375 0.73352 -3 160 2 4 1 0.6475 0.43008 0.26323 0.68252 -3 161 2 4 1 0.62258 0.43133 0.24392 0.69062 -3 162 2 1 1 0.65065 0.46271 0.22707 0.71892 -3 163 2 4 1 0.60723 0.44933 0.2092 0.71241 -3 164 1 4 1 0.59875 0.43997 0.21956 0.6914 -3 165 1 1 0 0.55818 0.40711 0.2 0.72182 -3 166 1 1 1 0.60092 0.38929 0.25299 0.74315 -3 167 1 1 1 0.6077 0.36729 0.2275 0.74274 -3 168 1 1 1 0.58144 0.36602 0.24947 0.70624 -3 169 1 1 1 0.58884 0.34827 0.2796 0.71898 -3 170 1 4 1 0.61215 0.37417 0.2637 0.74439 -3 171 1 1 1 0.63596 0.36185 0.26624 0.73248 -3 172 1 1 1 0.61559 0.37883 0.22076 0.73546 -3 173 1 1 1 0.58784 0.39491 0.20025 0.73755 -3 174 1 2 1 0.6596 0.38477 0.24322 0.77936 -3 175 1 1 1 0.64983 0.43784 0.27238 0.77963 -3 176 1 1 0 0.63608 0.43822 0.26457 0.78278 -3 177 2 4 1 0.61948 0.43996 0.23311 0.75512 -3 178 2 4 1 0.61418 0.38824 0.23349 0.75632 -3 179 2 1 0 0.6146 0.37627 0.25115 0.8 -3 180 2 2 0 0.62001 0.34678 0.2671 0.79487 -3 181 1 2 1 0.615 0.33971 0.28171 0.79225 -3 182 2 2 0 0.61682 0.32518 0.34198 0.79845 -3 183 2 4 1 0.63967 0.31349 0.3434 0.78929 -3 184 2 4 1 0.62524 0.30235 0.32921 0.78309 -3 185 2 4 1 0.65432 0.28414 0.3005 0.77878 -3 186 2 4 1 0.6499 0.28287 0.3494 0.7755 -3 187 2 4 1 0.6312 0.29965 0.3589 0.739 -3 188 2 4 1 0.60689 0.31089 0.35521 0.74163 -3 189 2 4 0 0.62744 0.29311 0.34019 0.75455 -3 190 2 2 0 0.62018 0.30403 0.37572 0.75018 -3 191 1 1 1 0.59118 0.32691 0.3682 0.74053 -3 192 1 3 0 0.62218 0.31464 0.37339 0.72332 -3 193 1 1 1 0.60768 0.30155 0.3907 0.73393 -3 194 1 1 1 0.62445 0.25367 0.40889 0.7381 -3 195 1 4 1 0.58264 0.27604 0.38269 0.73848 -3 196 1 4 1 0.5586 0.23074 0.38086 0.77833 -3 197 1 4 0 0.54563 0.22598 0.36843 0.73306 -3 198 1 1 0 0.5992 0.24965 0.35665 0.72907 -3 199 2 4 0 0.63541 0.24274 0.35439 0.68775 -3 200 1 2 0 0.64018 0.24858 0.36565 0.6627 -3 201 2 3 1 0.65081 0.25388 0.39391 0.67241 -4 1 2 4 0 0.21199 0.54628 0.68794 0.47466 -4 2 2 4 0 0.2 0.534 0.65541 0.47102 -4 3 2 3 1 0.2 0.57876 0.65958 0.47067 -4 4 2 3 1 0.2 0.56797 0.63188 0.42063 -4 6 2 1 1 0.2 0.54476 0.60146 0.47798 -4 7 2 1 0 0.2 0.52605 0.60722 0.47527 -4 8 2 3 0 0.2 0.52271 0.63572 0.47881 -4 9 2 1 0 0.21722 0.55743 0.64484 0.49461 -4 10 2 3 0 0.22466 0.5777 0.67382 0.49864 -4 11 1 4 0 0.22561 0.58815 0.68153 0.5709 -4 12 1 4 0 0.21568 0.56781 0.67591 0.5935 -4 13 1 2 1 0.22308 0.52535 0.71702 0.60735 -4 14 1 2 1 0.20689 0.53131 0.70323 0.60971 -4 15 1 2 1 0.21517 0.57119 0.70512 0.60726 -4 16 1 2 1 0.2067 0.56055 0.72736 0.59942 -4 17 1 2 0 0.2 0.55337 0.74002 0.60865 -4 18 2 3 1 0.2 0.57323 0.78656 0.53756 -4 19 2 3 0 0.24501 0.57013 0.8 0.55964 -4 20 1 1 0 0.22025 0.59802 0.78074 0.58471 -4 21 1 2 1 0.24619 0.58994 0.78157 0.62366 -4 22 1 2 1 0.2425 0.60365 0.77134 0.67168 -4 23 2 4 1 0.26134 0.62914 0.73476 0.66238 -4 24 1 2 1 0.2523 0.67328 0.76341 0.6803 -4 25 2 4 1 0.21905 0.66907 0.74752 0.67259 -4 26 2 1 0 0.2 0.63148 0.74129 0.64534 -4 27 1 2 1 0.22706 0.64474 0.7103 0.64379 -4 28 2 3 1 0.24795 0.71816 0.73498 0.65137 -4 29 2 3 0 0.25824 0.72237 0.71915 0.64535 -4 30 2 4 1 0.2023 0.73493 0.72564 0.63744 -4 31 1 4 1 0.21389 0.75741 0.72671 0.64195 -4 32 2 2 1 0.2 0.77333 0.72097 0.6328 -4 33 2 2 1 0.2 0.77558 0.69994 0.6418 -4 34 2 2 1 0.2 0.78144 0.67943 0.63996 -4 35 1 3 0 0.2 0.8 0.67301 0.61008 -4 36 1 2 0 0.2 0.8 0.68831 0.63528 -4 37 1 2 1 0.2 0.8 0.69131 0.6034 -4 38 2 3 1 0.20971 0.8 0.70393 0.57568 -4 39 2 3 1 0.21621 0.76178 0.71917 0.55994 -4 40 2 1 0 0.22745 0.74196 0.70603 0.55248 -4 41 2 4 1 0.23812 0.75206 0.68627 0.56138 -4 42 1 2 1 0.26515 0.73895 0.69746 0.57138 -4 43 2 4 0 0.28398 0.76918 0.73134 0.59407 -4 44 2 1 1 0.30592 0.75416 0.70629 0.5629 -4 45 1 4 1 0.31918 0.76789 0.70929 0.56458 -4 46 2 3 1 0.29707 0.71374 0.71305 0.56137 -4 47 1 4 0 0.30046 0.66943 0.72947 0.55543 -4 48 1 2 1 0.27898 0.69381 0.70074 0.5995 -4 49 2 4 1 0.26535 0.69816 0.68161 0.58912 -4 50 1 2 1 0.30351 0.72021 0.67091 0.56377 -4 51 2 4 1 0.33934 0.65248 0.66959 0.56201 -4 52 2 2 1 0.3872 0.63757 0.68541 0.58033 -4 53 1 2 0 0.3944 0.6497 0.67534 0.57672 -4 54 2 3 0 0.41049 0.65797 0.6736 0.57501 -4 55 1 2 1 0.36902 0.61196 0.70441 0.62472 -4 56 1 2 1 0.39708 0.658 0.73746 0.67287 -4 57 1 3 0 0.33752 0.69203 0.75871 0.63325 -4 58 1 4 1 0.33235 0.68372 0.79774 0.65964 -4 59 2 1 0 0.29321 0.66003 0.8 0.68977 -4 60 2 2 1 0.2689 0.68852 0.79386 0.6942 -4 61 1 4 0 0.27333 0.70509 0.78656 0.68412 -4 62 1 1 1 0.29007 0.6768 0.8 0.72379 -4 63 1 1 1 0.27771 0.69015 0.79024 0.74027 -4 64 1 1 0 0.27473 0.72906 0.76889 0.73509 -4 65 1 1 1 0.27454 0.7235 0.75293 0.74844 -4 66 2 3 1 0.27372 0.71364 0.77559 0.75522 -4 67 2 4 1 0.29453 0.64955 0.77966 0.74915 -4 68 2 3 1 0.28646 0.64986 0.79155 0.78968 -4 69 2 4 1 0.26537 0.63016 0.77741 0.78975 -4 70 2 3 1 0.28141 0.66991 0.74299 0.79249 -4 71 1 1 0 0.29099 0.66493 0.79439 0.79014 -4 72 1 4 1 0.31207 0.64723 0.79159 0.78607 -4 73 2 3 1 0.26992 0.64794 0.762 0.79788 -4 74 1 1 0 0.28006 0.57867 0.78492 0.78075 -4 75 2 3 1 0.25879 0.61897 0.77092 0.7282 -4 76 2 1 0 0.21374 0.6422 0.77857 0.7214 -4 77 1 2 1 0.2 0.66219 0.76089 0.71271 -4 78 1 4 1 0.23095 0.63052 0.78842 0.74988 -4 79 2 1 0 0.21211 0.67373 0.75575 0.77481 -4 80 2 4 1 0.22245 0.67839 0.71743 0.78994 -4 81 1 3 1 0.22854 0.67643 0.72384 0.76479 -4 82 2 2 1 0.2 0.65877 0.69777 0.8 -4 83 2 3 1 0.2 0.66073 0.69603 0.8 -4 84 1 1 0 0.2 0.68394 0.70717 0.8 -4 85 2 2 1 0.20384 0.66684 0.75085 0.8 -4 86 1 3 1 0.21624 0.64553 0.77035 0.76305 -4 87 2 4 1 0.22371 0.66605 0.71853 0.79022 -4 88 2 3 1 0.22751 0.70415 0.75329 0.76656 -4 89 2 2 1 0.226 0.71427 0.73792 0.75358 -4 90 2 4 1 0.25551 0.73673 0.75205 0.73508 -4 91 2 1 0 0.24871 0.75519 0.77856 0.70971 -4 92 2 4 1 0.22538 0.69685 0.77893 0.72328 -4 93 2 1 0 0.24222 0.68194 0.77438 0.65775 -4 94 2 4 1 0.25815 0.70205 0.8 0.63861 -4 95 1 1 0 0.27333 0.6861 0.8 0.65481 -4 96 1 4 0 0.27917 0.73356 0.7416 0.67907 -4 97 1 3 1 0.28182 0.71244 0.72781 0.65051 -4 98 1 2 1 0.29413 0.72278 0.7606 0.68453 -4 99 1 2 1 0.2932 0.73863 0.75846 0.68132 -4 100 1 3 1 0.31532 0.69763 0.75898 0.69651 -4 101 1 2 1 0.31612 0.70769 0.74336 0.70307 -4 102 1 2 1 0.3108 0.75304 0.76022 0.6906 -4 103 1 1 0 0.33191 0.79851 0.7261 0.709 -4 104 1 2 1 0.34414 0.79383 0.74593 0.71874 -4 105 1 2 1 0.34368 0.8 0.77512 0.71896 -4 106 1 2 1 0.34419 0.77415 0.78079 0.71189 -4 107 1 2 1 0.37746 0.79259 0.78847 0.70569 -4 108 1 2 1 0.37835 0.79968 0.77385 0.69216 -4 109 1 4 1 0.38553 0.8 0.70916 0.66968 -4 110 1 2 1 0.38058 0.8 0.69244 0.67389 -4 111 1 2 1 0.41382 0.79577 0.70813 0.67588 -4 112 1 2 0 0.36934 0.8 0.66458 0.68569 -4 113 2 3 0 0.35152 0.79807 0.65552 0.63742 -4 114 1 2 0 0.34184 0.8 0.66402 0.60133 -4 115 2 4 1 0.32713 0.8 0.70044 0.5724 -4 116 2 4 1 0.34862 0.8 0.76034 0.54769 -4 117 2 2 1 0.38828 0.8 0.79676 0.5328 -4 118 2 3 1 0.39307 0.8 0.8 0.53451 -4 119 2 4 1 0.39582 0.79676 0.79137 0.50423 -4 120 2 4 0 0.40118 0.8 0.75272 0.46582 -4 121 2 4 0 0.43031 0.8 0.74693 0.48711 -4 122 2 3 1 0.4908 0.8 0.7287 0.48293 -4 123 2 3 0 0.46163 0.8 0.68921 0.46915 -4 124 1 2 1 0.46082 0.76616 0.6904 0.44279 -4 125 1 2 1 0.46621 0.77326 0.68577 0.45188 -4 126 1 2 1 0.41896 0.77596 0.6704 0.42 -4 127 1 3 1 0.40602 0.76215 0.63875 0.37658 -4 128 1 2 1 0.42846 0.78743 0.63211 0.36063 -4 129 2 4 1 0.41213 0.75659 0.6251 0.33481 -4 130 2 4 0 0.41481 0.77493 0.59454 0.28814 -4 131 2 3 1 0.41472 0.79236 0.61594 0.2509 -4 132 2 3 1 0.39245 0.8 0.56165 0.30671 -4 133 1 1 0 0.40761 0.8 0.60698 0.33748 -4 134 1 3 1 0.42713 0.79083 0.64562 0.28783 -4 135 1 4 0 0.42478 0.7864 0.64 0.3166 -4 136 2 2 1 0.41485 0.76076 0.63457 0.29308 -4 137 2 3 1 0.4325 0.79865 0.6661 0.27684 -4 138 1 4 1 0.4363 0.75789 0.66885 0.25926 -4 139 1 2 1 0.4181 0.77731 0.68794 0.28972 -4 140 2 4 0 0.40627 0.76115 0.7093 0.31961 -4 141 2 2 1 0.37519 0.76241 0.69879 0.28667 -4 142 1 3 0 0.3901 0.7591 0.69174 0.30751 -4 143 2 4 1 0.36338 0.71133 0.69568 0.33997 -4 144 1 1 1 0.39841 0.73696 0.72225 0.34169 -4 145 1 1 1 0.41371 0.69938 0.72395 0.36836 -4 146 1 1 0 0.37714 0.71863 0.68051 0.39311 -4 147 1 1 0 0.32263 0.78138 0.72232 0.35715 -4 148 2 2 1 0.3333 0.76573 0.69665 0.40039 -4 149 1 4 0 0.3169 0.77223 0.65767 0.42938 -4 150 1 3 1 0.27789 0.78937 0.68047 0.46507 -4 151 1 2 1 0.23163 0.77209 0.72142 0.47408 -4 152 2 3 1 0.23568 0.76247 0.73256 0.46965 -4 153 1 2 1 0.26304 0.7484 0.73707 0.48612 -4 154 1 4 0 0.21324 0.72897 0.73612 0.50978 -4 155 1 2 1 0.21614 0.74213 0.72873 0.47975 -4 156 1 2 1 0.22546 0.75149 0.73982 0.51567 -4 157 2 4 0 0.2 0.76702 0.73213 0.50302 -4 158 1 2 0 0.2 0.75791 0.76103 0.49764 -4 159 1 2 0 0.24648 0.74262 0.75323 0.48225 -4 160 2 1 0 0.29166 0.7449 0.75737 0.49812 -4 161 1 2 1 0.29722 0.74881 0.78415 0.49579 -4 162 1 2 1 0.29739 0.748 0.75971 0.49409 -4 163 1 2 1 0.29061 0.74109 0.75713 0.47148 -4 164 1 2 1 0.3241 0.8 0.7562 0.49 -4 165 1 1 0 0.30304 0.8 0.74852 0.43331 -4 166 1 3 1 0.30686 0.8 0.79876 0.4492 -4 167 1 2 1 0.27929 0.79193 0.8 0.45587 -4 168 1 2 1 0.27502 0.8 0.8 0.47165 -4 169 1 2 1 0.27626 0.76784 0.8 0.45688 -4 170 1 2 1 0.26103 0.8 0.79842 0.50098 -4 171 2 1 0 0.25009 0.8 0.8 0.52076 -4 172 2 4 0 0.27084 0.76792 0.79399 0.53205 -4 173 1 4 0 0.25985 0.8 0.8 0.52452 -4 174 1 2 1 0.3034 0.8 0.76969 0.53788 -4 175 1 3 1 0.31203 0.8 0.76893 0.55553 -4 176 1 4 1 0.29759 0.79857 0.8 0.54718 -4 177 1 3 1 0.30694 0.8 0.77632 0.50738 -4 178 1 2 1 0.32205 0.79845 0.75379 0.51164 -4 179 1 2 1 0.3711 0.79228 0.75654 0.49837 -4 180 1 2 0 0.36351 0.75311 0.76007 0.52005 -4 181 1 2 1 0.43037 0.78817 0.7648 0.52297 -4 182 1 2 1 0.38317 0.8 0.76207 0.53649 -4 183 1 2 1 0.40583 0.76667 0.7691 0.51703 -4 184 1 2 1 0.37856 0.74345 0.78541 0.54304 -4 185 1 2 0 0.35465 0.75525 0.76958 0.50629 -4 186 1 1 0 0.34375 0.75051 0.8 0.51524 -4 187 1 2 1 0.32132 0.75855 0.79423 0.53117 -4 188 1 2 1 0.3636 0.77127 0.78654 0.58878 -4 189 1 2 0 0.3275 0.78351 0.77677 0.58923 -4 190 1 2 0 0.27943 0.77737 0.76301 0.61983 -4 191 1 2 1 0.27087 0.77048 0.76726 0.63355 -4 192 2 2 1 0.2608 0.7859 0.79498 0.67274 -4 193 1 2 0 0.24295 0.77068 0.8 0.6974 -4 194 1 1 1 0.21104 0.76327 0.74363 0.68911 -4 195 1 2 1 0.2056 0.77968 0.75447 0.67363 -4 196 1 2 1 0.2 0.78194 0.71332 0.67214 -4 197 1 3 1 0.2 0.79051 0.73342 0.72048 -4 198 1 3 1 0.2 0.8 0.75775 0.73538 -4 199 1 3 1 0.2 0.8 0.71951 0.74666 -4 200 1 1 0 0.2 0.79957 0.72178 0.77312 -4 201 1 3 0 0.2 0.77904 0.76431 0.79704 -5 1 2 3 0 0.52965 0.6281 0.39177 0.2627 -5 2 2 1 1 0.50844 0.64534 0.43629 0.25243 -5 3 2 3 1 0.49916 0.65298 0.43716 0.25631 -5 4 1 1 1 0.46066 0.65858 0.45227 0.24514 -5 5 1 4 0 0.46583 0.67651 0.42093 0.28374 -5 6 1 3 1 0.48888 0.66179 0.441 0.31545 -5 7 1 1 1 0.46957 0.67537 0.39708 0.31115 -5 8 1 1 0 0.47773 0.63281 0.41152 0.30077 -5 9 1 1 0 0.42138 0.60612 0.41382 0.35085 -5 10 2 3 0 0.39058 0.65772 0.43496 0.34415 -5 11 2 4 0 0.36632 0.65864 0.38564 0.32864 -5 12 1 2 1 0.3353 0.67799 0.36702 0.30257 -5 13 1 3 1 0.33005 0.67692 0.37394 0.31 -5 14 1 3 0 0.30704 0.70034 0.38948 0.29147 -5 15 1 2 0 0.30188 0.67684 0.37848 0.30749 -5 16 2 3 0 0.31777 0.70184 0.37308 0.33226 -5 17 2 4 0 0.30146 0.68226 0.36574 0.35462 -5 18 1 2 0 0.30953 0.70445 0.36379 0.35352 -5 19 1 1 0 0.29945 0.71922 0.36604 0.36233 -5 21 1 1 0 0.26136 0.76773 0.33342 0.40977 -5 22 1 2 1 0.25017 0.79726 0.26784 0.44439 -5 23 1 4 1 0.25924 0.8 0.24602 0.39868 -5 24 1 2 1 0.25417 0.8 0.26601 0.39396 -5 25 1 2 1 0.24727 0.8 0.28345 0.40097 -5 26 1 2 1 0.24463 0.8 0.27493 0.41779 -5 27 1 2 1 0.22767 0.75664 0.25281 0.37704 -5 28 1 4 1 0.24347 0.75487 0.25652 0.36365 -5 29 1 2 1 0.25231 0.72268 0.27731 0.35213 -5 30 1 4 0 0.25335 0.6809 0.32021 0.34899 -5 31 1 4 1 0.26974 0.64092 0.25591 0.36438 -5 32 1 2 1 0.26745 0.66799 0.2717 0.34281 -5 33 1 2 1 0.28884 0.69135 0.26879 0.34217 -5 34 1 4 0 0.29497 0.69864 0.25664 0.33734 -5 35 1 2 1 0.30562 0.64968 0.24518 0.32997 -5 36 1 2 1 0.28868 0.66533 0.30171 0.30097 -5 37 1 2 0 0.28809 0.7375 0.3034 0.30363 -5 38 1 4 0 0.31865 0.71161 0.30639 0.28397 -5 39 1 1 0 0.31807 0.64858 0.29697 0.30764 -5 40 1 1 0 0.31593 0.65613 0.33298 0.31225 -5 41 1 3 1 0.33026 0.61258 0.33138 0.32014 -5 42 1 2 1 0.32697 0.61912 0.34696 0.33464 -5 43 1 3 0 0.27858 0.63891 0.35506 0.31018 -5 44 1 4 1 0.32086 0.66656 0.41123 0.28709 -5 45 1 2 1 0.34782 0.66129 0.41197 0.2836 -5 46 1 2 1 0.3288 0.70515 0.42619 0.30467 -5 47 1 2 1 0.31461 0.7261 0.46665 0.28781 -5 48 1 2 1 0.29798 0.75841 0.45923 0.24544 -5 49 1 1 0 0.32415 0.74721 0.45376 0.23062 -5 50 1 4 1 0.30859 0.73631 0.42276 0.25451 -5 51 1 4 1 0.30114 0.70529 0.43194 0.24206 -5 52 1 4 0 0.29249 0.67129 0.43607 0.20447 -5 53 1 2 1 0.28941 0.65402 0.47464 0.20202 -5 54 1 4 0 0.28255 0.65782 0.44258 0.24802 -5 55 1 2 0 0.29205 0.65442 0.42603 0.2763 -5 56 1 2 1 0.28681 0.68052 0.43304 0.25667 -5 57 1 2 1 0.28534 0.69036 0.43969 0.30449 -5 58 1 4 1 0.28727 0.72614 0.40972 0.28317 -5 59 1 2 0 0.29809 0.73427 0.40003 0.25991 -5 60 1 2 1 0.32128 0.72385 0.38134 0.25928 -5 61 1 2 1 0.2904 0.77418 0.40214 0.22237 -5 62 1 2 1 0.31291 0.76574 0.39228 0.23189 -5 63 1 2 0 0.31813 0.74611 0.37152 0.21661 -5 64 1 4 1 0.34169 0.72641 0.37578 0.23515 -5 65 1 1 0 0.39352 0.70822 0.32018 0.23678 -5 66 1 4 0 0.35239 0.70569 0.33043 0.25038 -5 67 1 1 0 0.31002 0.73202 0.30254 0.22323 -5 68 1 2 1 0.32702 0.73928 0.32406 0.22419 -5 69 1 2 1 0.32569 0.74191 0.3323 0.2288 -5 70 1 2 0 0.31631 0.75926 0.35622 0.20484 -5 71 1 2 1 0.34697 0.7608 0.3981 0.2 -5 72 1 4 0 0.36965 0.74103 0.41356 0.20749 -5 73 1 4 1 0.33203 0.75547 0.40478 0.24049 -5 74 1 2 0 0.3359 0.8 0.41224 0.23604 -5 75 1 2 1 0.38071 0.77505 0.40267 0.23514 -5 76 1 2 0 0.35913 0.7656 0.44632 0.22138 -5 77 1 2 1 0.32985 0.79312 0.47177 0.24763 -5 78 1 2 1 0.34528 0.72516 0.45731 0.25059 -5 79 1 2 1 0.42887 0.70956 0.52762 0.22566 -5 80 1 2 1 0.50375 0.70408 0.55354 0.24068 -5 81 1 2 1 0.49584 0.69185 0.52126 0.21029 -5 82 1 2 1 0.49765 0.68081 0.51965 0.21723 -5 83 1 4 0 0.47827 0.70016 0.5245 0.22204 -5 84 1 2 0 0.49644 0.72369 0.54001 0.22711 -5 85 1 2 1 0.50782 0.73512 0.5403 0.24375 -5 86 1 2 1 0.48393 0.6719 0.54166 0.22529 -5 87 1 2 0 0.48789 0.6832 0.54899 0.23012 -5 88 1 2 1 0.45357 0.68183 0.54698 0.23454 -5 89 1 2 1 0.43108 0.6934 0.50771 0.28144 -5 90 1 2 1 0.41876 0.69745 0.50987 0.29576 -5 91 1 3 0 0.38172 0.67906 0.49969 0.29294 -5 92 1 2 1 0.37691 0.68526 0.47025 0.2863 -5 93 1 2 1 0.3854 0.66665 0.42952 0.27794 -5 94 1 2 1 0.4366 0.66658 0.43534 0.29518 -5 95 1 2 0 0.42289 0.69998 0.41894 0.35091 -5 96 1 4 0 0.40661 0.68082 0.40679 0.35538 -5 97 1 4 0 0.40432 0.69434 0.43249 0.38228 -5 98 1 4 0 0.39867 0.7264 0.39862 0.32107 -5 99 1 2 1 0.41113 0.6887 0.459 0.32047 -5 100 2 3 0 0.37729 0.72178 0.47418 0.33235 -5 101 1 2 1 0.3844 0.77754 0.48317 0.28709 -5 102 1 2 1 0.36407 0.79627 0.47854 0.29967 -5 103 1 2 1 0.37211 0.8 0.49278 0.26266 -5 104 1 4 0 0.36476 0.8 0.51316 0.2918 -5 105 1 2 1 0.37656 0.7832 0.52443 0.31781 -5 106 1 4 0 0.361 0.75417 0.51713 0.3391 -5 107 1 2 1 0.34127 0.69674 0.51345 0.33678 -5 108 1 3 1 0.36536 0.69128 0.54557 0.37853 -5 109 1 2 0 0.36782 0.68772 0.50025 0.3886 -5 110 1 3 1 0.37694 0.66622 0.52168 0.3531 -5 111 2 3 0 0.40396 0.67503 0.51225 0.35866 -5 112 1 3 0 0.39044 0.77402 0.48213 0.36963 -5 113 1 2 1 0.41819 0.76111 0.42435 0.36787 -5 114 1 3 0 0.43218 0.74342 0.45394 0.37659 -5 115 1 2 1 0.41543 0.7167 0.43029 0.37865 -5 116 1 2 1 0.43204 0.74695 0.47116 0.35511 -5 117 1 2 0 0.42545 0.73504 0.48081 0.38071 -5 118 1 4 1 0.40956 0.76826 0.48392 0.37526 -5 119 1 2 1 0.44331 0.7724 0.493 0.35941 -5 120 1 2 1 0.42941 0.74261 0.48721 0.32865 -5 121 1 2 1 0.46223 0.7079 0.495 0.34236 -5 122 1 4 1 0.45196 0.74791 0.51239 0.30726 -5 123 1 2 1 0.46976 0.7212 0.50553 0.29633 -5 124 1 2 1 0.49744 0.72772 0.47922 0.32832 -5 125 1 4 1 0.48511 0.70999 0.44181 0.35508 -5 126 1 2 1 0.49698 0.72154 0.4094 0.33259 -5 127 1 2 0 0.55174 0.72168 0.34913 0.29959 -5 128 1 2 1 0.56839 0.74423 0.36314 0.29836 -5 129 1 2 1 0.56329 0.74977 0.30709 0.29901 -5 130 1 4 0 0.53117 0.71506 0.30289 0.29889 -5 131 1 2 1 0.53059 0.72266 0.29907 0.27074 -5 132 2 4 0 0.52097 0.73037 0.31229 0.26118 -5 133 1 2 1 0.52505 0.73778 0.30595 0.26641 -5 134 1 2 1 0.51804 0.74373 0.29208 0.22722 -5 135 1 4 0 0.50817 0.69914 0.29086 0.2444 -5 136 1 2 0 0.46426 0.64347 0.29607 0.24786 -5 137 1 2 1 0.45112 0.65173 0.28418 0.2684 -5 138 1 2 1 0.46582 0.63202 0.27425 0.2506 -5 139 1 2 1 0.52614 0.64221 0.30124 0.26622 -5 140 1 2 1 0.48691 0.62286 0.24835 0.26166 -5 141 1 2 1 0.49674 0.65637 0.26224 0.26184 -5 142 1 2 1 0.50251 0.66832 0.27825 0.22906 -5 143 1 2 0 0.52561 0.63081 0.2888 0.25572 -5 144 1 2 0 0.5582 0.63455 0.31322 0.2142 -5 145 1 1 0 0.56369 0.67699 0.28798 0.25044 -5 146 1 2 0 0.59611 0.68733 0.30053 0.26272 -5 147 2 4 0 0.62406 0.68451 0.27671 0.28084 -5 148 2 3 0 0.62594 0.66005 0.2575 0.23694 -5 149 1 2 1 0.60473 0.64258 0.26584 0.2 -5 150 1 2 0 0.55418 0.64354 0.25955 0.2 -5 151 1 2 1 0.49225 0.65877 0.22367 0.2 -5 152 1 4 0 0.48977 0.67444 0.22502 0.2 -5 153 1 2 1 0.49144 0.68793 0.23937 0.2251 -5 154 1 2 1 0.51661 0.67634 0.25181 0.23167 -5 155 1 3 0 0.49595 0.61461 0.27478 0.25776 -5 156 1 2 0 0.51275 0.58055 0.29726 0.29554 -5 157 1 2 1 0.54337 0.57698 0.34097 0.29997 -5 158 1 2 0 0.54004 0.56746 0.33991 0.34567 -5 159 1 2 1 0.5505 0.58749 0.37857 0.33782 -5 160 1 2 0 0.55734 0.58047 0.31952 0.32997 -5 161 1 2 1 0.57778 0.58586 0.32935 0.29741 -5 162 1 3 0 0.61467 0.56721 0.34121 0.29956 -5 163 1 2 0 0.61045 0.60386 0.33623 0.31601 -5 164 1 2 1 0.61667 0.65245 0.37916 0.34916 -5 165 1 4 0 0.60631 0.64049 0.37032 0.32187 -5 166 1 4 1 0.56741 0.6463 0.39292 0.26194 -5 167 1 4 1 0.55779 0.66226 0.35642 0.30488 -5 168 1 2 1 0.60508 0.65471 0.33749 0.31078 -5 169 2 4 0 0.58856 0.68126 0.3558 0.29629 -5 170 1 4 0 0.6058 0.67863 0.36828 0.29849 -5 171 1 4 0 0.63924 0.72809 0.37309 0.27935 -5 172 1 2 0 0.61086 0.76142 0.32803 0.31131 -5 173 1 4 0 0.56857 0.72348 0.4022 0.2991 -5 174 1 2 1 0.57425 0.75776 0.38847 0.31192 -5 175 1 2 1 0.58034 0.73465 0.38833 0.33734 -5 176 1 3 1 0.54472 0.70354 0.39372 0.35991 -5 177 1 3 0 0.58202 0.70963 0.34559 0.35314 -5 178 1 2 1 0.55976 0.69322 0.34919 0.33378 -5 179 1 2 0 0.58258 0.70533 0.3781 0.31117 -5 180 1 2 1 0.55612 0.70875 0.43954 0.32156 -5 181 1 3 1 0.56855 0.67537 0.44562 0.32888 -5 182 1 4 1 0.59863 0.68329 0.42186 0.35163 -5 183 1 3 0 0.60997 0.68519 0.4303 0.37683 -5 184 1 2 0 0.60248 0.71019 0.41902 0.38277 -5 185 1 2 1 0.59809 0.76062 0.43002 0.38323 -5 186 1 4 1 0.6081 0.77644 0.47993 0.38275 -5 187 1 2 1 0.61408 0.79223 0.45948 0.39387 -5 188 1 2 0 0.60928 0.8 0.45733 0.43246 -5 189 1 3 1 0.60294 0.78355 0.42614 0.43557 -5 190 1 3 0 0.5558 0.78433 0.36131 0.4455 -5 191 2 4 0 0.60722 0.77838 0.36265 0.45639 -5 192 1 4 1 0.56958 0.74974 0.39301 0.46816 -5 193 1 2 1 0.57706 0.73897 0.38343 0.45477 -5 194 1 2 1 0.58157 0.70094 0.39122 0.42132 -5 195 1 2 1 0.60293 0.70089 0.38323 0.39178 -5 196 1 2 1 0.57593 0.66786 0.43664 0.4349 -5 197 1 2 0 0.60465 0.64527 0.44414 0.40635 -5 198 1 4 0 0.5616 0.64003 0.46539 0.42425 -5 199 1 2 1 0.58718 0.58884 0.45605 0.43693 -5 200 1 2 0 0.58107 0.59477 0.40883 0.42763 -5 201 1 2 0 0.60801 0.56536 0.38925 0.43773 -6 2 2 4 0 0.73844 0.64629 0.26467 0.27395 -6 3 2 3 0 0.69228 0.64001 0.24449 0.2622 -6 4 1 2 0 0.68433 0.59579 0.25212 0.29312 -6 5 1 1 1 0.69212 0.58937 0.24595 0.3045 -6 6 1 1 1 0.6607 0.53947 0.24873 0.32325 -6 7 1 1 1 0.66944 0.47849 0.24066 0.2849 -6 8 1 1 1 0.63452 0.50521 0.25215 0.28294 -6 9 1 3 0 0.65345 0.53719 0.26617 0.28657 -6 10 2 4 0 0.64699 0.52901 0.22662 0.27622 -6 11 1 1 1 0.61869 0.5252 0.2135 0.26357 -6 12 1 1 0 0.60212 0.53859 0.24008 0.24272 -6 13 1 2 1 0.65511 0.58001 0.26076 0.25046 -6 14 1 2 0 0.63622 0.5112 0.20641 0.28391 -6 15 1 2 1 0.61028 0.53644 0.2 0.26336 -6 16 1 2 1 0.60128 0.53024 0.22805 0.27728 -6 17 1 2 0 0.65173 0.56066 0.2 0.25872 -6 18 1 2 1 0.67288 0.5652 0.2 0.21137 -6 19 1 2 1 0.66971 0.49274 0.2218 0.22404 -6 20 1 2 0 0.68494 0.53305 0.22901 0.22827 -6 21 1 2 0 0.70233 0.55296 0.2461 0.22885 -6 22 2 2 1 0.70664 0.58177 0.2493 0.27281 -6 23 2 4 0 0.68894 0.55935 0.20401 0.2927 -6 24 1 2 0 0.63312 0.52932 0.2 0.25501 -6 26 2 3 1 0.67007 0.49809 0.2 0.29855 -6 27 2 1 1 0.70116 0.47996 0.20695 0.28267 -6 28 2 1 1 0.71325 0.49454 0.2 0.26681 -6 29 1 1 1 0.72645 0.43744 0.2 0.3196 -6 30 1 1 1 0.74659 0.44562 0.2 0.31482 -6 31 1 1 1 0.76222 0.42447 0.2 0.32081 -6 32 1 1 1 0.73448 0.43376 0.2 0.34296 -6 33 1 1 1 0.72797 0.43597 0.2 0.35101 -6 34 1 1 1 0.75074 0.46387 0.2 0.37183 -6 36 1 3 0 0.7328 0.45374 0.20442 0.42417 -6 37 2 4 0 0.7252 0.48332 0.2 0.40393 -6 38 1 1 1 0.73625 0.48365 0.2 0.41363 -6 39 1 4 1 0.70231 0.49133 0.21054 0.38507 -6 40 1 1 1 0.72107 0.49519 0.2 0.39185 -6 41 1 4 0 0.71986 0.48078 0.24709 0.37263 -6 42 1 1 1 0.7031 0.49023 0.2 0.3371 -6 43 1 1 1 0.72264 0.50759 0.2 0.37898 -6 44 1 1 1 0.71507 0.49493 0.20433 0.39462 -6 45 1 1 1 0.7487 0.48231 0.21516 0.36209 -6 46 1 4 1 0.77837 0.4936 0.2 0.37251 -6 47 1 1 0 0.8 0.4782 0.21425 0.36112 -6 48 1 4 0 0.79292 0.46445 0.21974 0.38153 -6 49 2 4 0 0.79228 0.43 0.21181 0.33966 -6 50 1 1 1 0.8 0.42849 0.21877 0.30777 -6 51 1 1 1 0.8 0.41144 0.21003 0.32592 -6 52 1 1 1 0.74326 0.40335 0.2 0.33955 -6 53 1 1 0 0.75236 0.42683 0.2552 0.33861 -6 54 1 1 0 0.77285 0.43779 0.28617 0.32257 -6 55 1 2 1 0.78413 0.42864 0.31439 0.26735 -6 56 1 2 1 0.75446 0.43688 0.29642 0.21333 -6 57 1 4 0 0.75827 0.46732 0.29081 0.2 -6 58 1 2 0 0.76712 0.44268 0.29863 0.20607 -6 59 1 1 1 0.76093 0.46029 0.29629 0.22507 -6 60 1 1 0 0.75172 0.4466 0.27871 0.20603 -6 61 1 4 0 0.76716 0.45762 0.29448 0.20525 -6 62 1 2 1 0.77346 0.48289 0.27256 0.2 -6 63 1 2 0 0.78575 0.49385 0.28216 0.20283 -6 64 1 3 0 0.8 0.53941 0.2776 0.2 -6 65 1 3 0 0.79417 0.55878 0.26483 0.20866 -6 66 1 2 1 0.8 0.55414 0.30446 0.21184 -6 67 1 2 0 0.76477 0.53706 0.30028 0.21075 -6 68 2 2 0 0.79557 0.50808 0.32894 0.24184 -6 69 1 1 1 0.79729 0.50847 0.34599 0.2038 -6 70 1 1 1 0.77915 0.52111 0.36398 0.2 -6 71 1 1 1 0.75315 0.4976 0.37342 0.2 -6 72 1 4 0 0.79673 0.52013 0.36636 0.2 -6 73 1 1 1 0.79215 0.53026 0.36133 0.20624 -6 74 1 1 1 0.79701 0.4543 0.3809 0.2 -6 75 1 1 1 0.78056 0.44464 0.38713 0.2141 -6 76 1 1 1 0.76446 0.46656 0.34142 0.2 -6 77 1 3 1 0.74452 0.47535 0.3358 0.2 -6 78 2 3 0 0.76853 0.48442 0.32546 0.21197 -6 79 1 1 1 0.8 0.43554 0.32101 0.22591 -6 80 1 1 1 0.8 0.41821 0.30145 0.23128 -6 81 1 1 1 0.7939 0.41732 0.32749 0.23821 -6 82 1 3 0 0.8 0.42489 0.40595 0.23603 -6 83 1 1 1 0.772 0.37394 0.36878 0.2 -6 84 1 1 1 0.8 0.38051 0.35345 0.20285 -6 85 1 1 1 0.76923 0.37481 0.35245 0.21921 -6 86 1 4 0 0.78747 0.3609 0.3549 0.21975 -6 87 1 1 1 0.7787 0.36849 0.38117 0.21094 -6 88 1 1 1 0.77307 0.36926 0.38108 0.22898 -6 89 1 3 1 0.75185 0.354 0.37251 0.21602 -6 90 1 3 1 0.73574 0.34884 0.37385 0.23082 -6 91 1 1 1 0.75935 0.35762 0.36155 0.22633 -6 92 1 3 0 0.7433 0.35534 0.38664 0.2 -6 93 1 1 1 0.73787 0.38545 0.38875 0.24564 -6 94 1 1 1 0.75313 0.388 0.36282 0.23557 -6 95 1 1 1 0.76958 0.40559 0.35864 0.20505 -6 96 1 1 0 0.74335 0.37624 0.33644 0.21493 -6 97 1 3 1 0.76881 0.38935 0.31529 0.23183 -6 98 1 3 1 0.8 0.44251 0.3035 0.29254 -6 99 1 1 1 0.79365 0.46767 0.2849 0.29204 -6 100 1 1 1 0.76134 0.45193 0.25515 0.333 -6 101 1 1 1 0.75759 0.47852 0.28939 0.3302 -6 102 1 1 1 0.74885 0.45144 0.32843 0.29179 -6 103 1 3 0 0.746 0.47158 0.35982 0.25733 -6 104 1 3 1 0.69118 0.51695 0.38424 0.2 -6 105 1 1 1 0.70272 0.4799 0.41213 0.2 -6 106 1 1 1 0.74316 0.49883 0.42373 0.22409 -6 107 1 1 1 0.76885 0.49184 0.44116 0.2596 -6 108 1 1 1 0.72596 0.45403 0.42147 0.29699 -6 109 1 1 1 0.73449 0.46472 0.43617 0.25409 -6 110 1 3 1 0.74143 0.43229 0.45642 0.26774 -6 111 1 1 1 0.73912 0.41536 0.47221 0.28046 -6 112 1 1 1 0.76965 0.4156 0.48204 0.28448 -6 113 1 1 0 0.8 0.43366 0.4671 0.25635 -6 114 1 3 0 0.8 0.44463 0.45515 0.23894 -6 115 1 1 1 0.76283 0.42759 0.44516 0.26106 -6 116 1 3 1 0.76396 0.39602 0.43198 0.26465 -6 117 1 1 1 0.72628 0.40751 0.44361 0.23025 -6 118 1 1 1 0.71607 0.41871 0.44249 0.22063 -6 119 1 3 1 0.70541 0.43852 0.43407 0.20998 -6 120 1 3 0 0.70211 0.43268 0.39145 0.21564 -6 121 1 1 1 0.70631 0.43664 0.41911 0.21776 -6 122 1 1 0 0.68467 0.50343 0.397 0.24363 -6 123 2 1 0 0.66341 0.43698 0.40924 0.22622 -6 124 2 3 0 0.66634 0.40102 0.37127 0.24413 -6 125 1 1 1 0.67439 0.3999 0.35503 0.21975 -6 126 2 3 0 0.64118 0.41188 0.36435 0.22777 -6 127 1 4 0 0.61951 0.41806 0.29839 0.2 -6 128 1 1 1 0.61455 0.40946 0.28627 0.20417 -6 129 1 1 0 0.63085 0.36319 0.3193 0.23203 -6 130 1 1 0 0.65582 0.38843 0.35002 0.22153 -6 131 1 2 0 0.66217 0.33886 0.35366 0.2 -6 132 1 1 1 0.68178 0.35875 0.3396 0.21985 -6 133 1 1 1 0.65448 0.34927 0.361 0.21653 -6 134 1 1 0 0.64916 0.3723 0.35015 0.2 -6 135 1 4 0 0.62925 0.37902 0.38279 0.20361 -6 136 1 1 0 0.59687 0.40627 0.42257 0.2 -6 137 1 1 1 0.60549 0.42107 0.39855 0.2 -6 138 1 1 0 0.62875 0.43311 0.41428 0.20039 -6 139 1 4 0 0.59721 0.3936 0.43017 0.2 -6 140 1 4 0 0.59071 0.34912 0.48425 0.20947 -6 141 2 3 0 0.60092 0.34219 0.47539 0.2 -6 142 1 3 0 0.59808 0.34948 0.43629 0.2 -6 143 1 1 0 0.62534 0.41234 0.3883 0.20448 -6 144 1 2 1 0.53923 0.44827 0.40399 0.20877 -6 145 1 2 1 0.54857 0.46654 0.40463 0.24354 -6 146 1 4 0 0.52858 0.4551 0.3712 0.30885 -6 147 1 4 1 0.52064 0.43257 0.35697 0.30294 -6 148 1 2 0 0.54765 0.41182 0.37944 0.29994 -6 149 1 1 0 0.54236 0.37342 0.38255 0.29 -6 150 1 4 0 0.54737 0.41134 0.43858 0.30156 -6 151 2 3 1 0.54518 0.44957 0.48541 0.29498 -6 152 2 3 0 0.54049 0.45317 0.52725 0.27868 -6 153 1 2 0 0.53167 0.48579 0.53491 0.32645 -6 154 1 3 0 0.56726 0.53268 0.52742 0.3149 -6 155 2 3 1 0.52608 0.52041 0.53536 0.36929 -6 156 2 2 1 0.5284 0.49763 0.55458 0.3797 -6 157 1 3 0 0.50124 0.48963 0.54688 0.38372 -6 158 1 2 0 0.49942 0.5344 0.5345 0.33314 -6 159 1 3 1 0.53981 0.51409 0.52742 0.3483 -6 160 2 3 0 0.53189 0.45857 0.54323 0.35816 -6 161 1 3 0 0.5403 0.41014 0.56219 0.37759 -6 162 2 3 0 0.48437 0.37569 0.56655 0.38003 -6 163 2 3 1 0.48562 0.37159 0.52849 0.40983 -6 164 2 2 1 0.51432 0.34938 0.5522 0.36539 -6 165 1 2 0 0.50373 0.31786 0.55346 0.37918 -6 166 1 3 1 0.50697 0.32369 0.53195 0.36926 -6 167 1 3 0 0.50027 0.31776 0.55123 0.40146 -6 168 2 3 1 0.49311 0.36849 0.54299 0.4182 -6 169 2 3 0 0.46946 0.39455 0.55376 0.39565 -6 170 1 2 1 0.45765 0.41354 0.53639 0.36544 -6 171 1 2 0 0.48573 0.43632 0.57664 0.31938 -6 172 2 3 0 0.49735 0.42353 0.52452 0.29386 -6 173 1 2 1 0.48042 0.42558 0.49732 0.29756 -6 174 1 4 0 0.48046 0.36183 0.5306 0.3231 -6 175 1 3 0 0.42026 0.35278 0.49506 0.30487 -6 176 2 3 0 0.35582 0.392 0.51456 0.30989 -6 177 2 3 0 0.40432 0.4218 0.49302 0.3005 -6 178 1 3 0 0.41173 0.45867 0.51064 0.34797 -6 179 1 2 1 0.41262 0.47837 0.52873 0.34487 -6 180 1 2 0 0.45323 0.49058 0.51499 0.33723 -6 181 1 2 1 0.45695 0.48239 0.52702 0.32631 -6 182 1 3 1 0.4087 0.48923 0.52477 0.29357 -6 183 1 3 1 0.37603 0.45321 0.51015 0.29681 -6 184 1 3 0 0.34802 0.45232 0.52372 0.29781 -6 185 1 4 1 0.33154 0.43127 0.55039 0.28834 -6 186 1 2 0 0.30693 0.43095 0.56428 0.31368 -6 187 2 2 0 0.32836 0.44521 0.56101 0.324 -6 188 1 1 0 0.33058 0.44933 0.59314 0.34103 -6 189 2 4 0 0.37315 0.48423 0.55358 0.32311 -6 190 2 3 1 0.38488 0.4916 0.53274 0.35724 -6 191 2 3 0 0.31661 0.52983 0.50062 0.36964 -6 192 1 1 1 0.30432 0.53627 0.48407 0.3694 -6 193 1 1 0 0.31336 0.60341 0.45643 0.38103 -6 194 2 4 0 0.2759 0.58892 0.51222 0.35864 -6 195 1 3 0 0.29853 0.59536 0.49481 0.29334 -6 196 1 1 0 0.29928 0.61646 0.47757 0.26909 -6 197 1 2 0 0.32504 0.56206 0.48146 0.26988 -6 198 1 1 1 0.31099 0.53143 0.4545 0.26122 -6 199 1 1 0 0.31326 0.55127 0.45337 0.26451 -6 200 2 4 0 0.35566 0.53366 0.46122 0.22951 -6 201 2 1 0 0.31696 0.49235 0.45925 0.21454 -7 2 1 2 1 0.73652 0.77382 0.28119 0.41581 -7 3 1 2 1 0.73617 0.72485 0.33184 0.39733 -7 4 1 2 1 0.73024 0.74529 0.34602 0.40709 -7 5 1 2 0 0.75685 0.75935 0.35679 0.43301 -7 6 1 2 1 0.72626 0.7789 0.3369 0.40037 -7 7 1 4 0 0.74553 0.76397 0.32571 0.4309 -7 8 1 3 1 0.77443 0.72212 0.35922 0.41842 -7 9 1 1 1 0.77449 0.75619 0.34035 0.4397 -7 10 1 3 0 0.8 0.7567 0.35428 0.46373 -7 11 2 3 1 0.77508 0.69086 0.37354 0.47258 -7 12 2 3 0 0.76829 0.67967 0.40001 0.50458 -7 13 1 1 0 0.76672 0.69934 0.41569 0.5421 -7 14 2 4 0 0.77127 0.71321 0.37702 0.5448 -7 15 1 2 0 0.77979 0.68525 0.36089 0.55744 -7 16 1 1 0 0.76202 0.71828 0.41109 0.5703 -7 17 1 2 0 0.78828 0.71386 0.44286 0.60376 -7 18 2 1 1 0.78199 0.74743 0.44158 0.59931 -7 19 2 1 1 0.8 0.7754 0.47841 0.56621 -7 20 2 1 1 0.77974 0.78389 0.42621 0.54334 -7 21 2 3 0 0.77194 0.8 0.42647 0.55248 -7 22 2 1 1 0.77586 0.78019 0.42254 0.56588 -7 23 2 3 0 0.76585 0.77611 0.42221 0.54927 -7 24 2 1 1 0.76275 0.78298 0.39476 0.53483 -7 25 2 4 1 0.7644 0.8 0.36015 0.50291 -7 26 2 4 1 0.6958 0.77858 0.37691 0.47691 -7 27 2 4 1 0.68262 0.8 0.36378 0.46858 -7 28 2 4 0 0.6817 0.78741 0.34606 0.45713 -7 29 2 4 1 0.63409 0.7914 0.34804 0.42898 -7 30 2 4 0 0.62849 0.72416 0.34811 0.38981 -7 31 2 4 1 0.62123 0.72053 0.3406 0.42393 -7 32 2 4 0 0.60729 0.72962 0.3686 0.43583 -7 33 1 1 0 0.63289 0.73121 0.36294 0.44422 -7 34 1 2 1 0.66452 0.70818 0.38126 0.50299 -7 35 1 4 1 0.65276 0.69137 0.34886 0.4978 -7 36 1 4 0 0.67635 0.67695 0.36245 0.48723 -7 37 1 2 1 0.6364 0.72169 0.37522 0.46262 -7 38 1 2 0 0.62136 0.68738 0.37165 0.50667 -7 39 1 2 0 0.62684 0.66038 0.3311 0.4098 -7 40 2 4 1 0.62214 0.66569 0.30715 0.39592 -7 41 2 1 1 0.64773 0.65643 0.32032 0.41724 -7 42 2 4 0 0.62275 0.63917 0.30505 0.42642 -7 43 2 2 0 0.62059 0.60141 0.28548 0.37703 -7 44 2 4 0 0.61911 0.58772 0.32664 0.37639 -7 45 1 1 1 0.59969 0.56746 0.32019 0.41392 -7 46 1 3 0 0.60794 0.56318 0.33423 0.41813 -7 47 1 1 1 0.58478 0.55692 0.30522 0.42107 -7 48 1 1 1 0.59251 0.52936 0.31046 0.42059 -7 49 1 1 0 0.62059 0.52717 0.28554 0.43034 -7 50 1 3 0 0.6537 0.58194 0.31337 0.4118 -7 51 1 1 1 0.67234 0.59347 0.33659 0.42892 -7 52 1 4 0 0.68842 0.60563 0.31471 0.44322 -7 53 1 3 0 0.64103 0.6188 0.3388 0.46047 -7 54 1 1 0 0.64114 0.62285 0.37748 0.42213 -7 55 1 3 0 0.59619 0.62455 0.40678 0.41438 -7 56 2 4 0 0.61538 0.57985 0.42666 0.43971 -7 57 2 3 0 0.60028 0.56839 0.42793 0.45859 -7 58 1 2 1 0.60161 0.55339 0.43555 0.46169 -7 59 1 4 0 0.56162 0.53272 0.42529 0.4359 -7 60 1 2 0 0.55868 0.52705 0.4066 0.45736 -7 61 1 4 1 0.58758 0.56185 0.43095 0.48555 -7 62 1 2 1 0.63743 0.55867 0.43084 0.49401 -7 63 1 2 0 0.63078 0.52857 0.47546 0.4925 -7 64 2 1 1 0.66642 0.52672 0.47409 0.51444 -7 65 2 4 1 0.63518 0.50386 0.52161 0.46751 -7 66 2 4 1 0.64562 0.46239 0.51872 0.47957 -7 67 2 4 0 0.64799 0.47066 0.52357 0.45758 -7 68 2 4 0 0.667 0.46017 0.53543 0.48102 -7 69 1 3 0 0.63757 0.46475 0.51101 0.47566 -7 70 1 1 0 0.64153 0.46559 0.5019 0.49674 -7 71 1 2 1 0.65082 0.46107 0.50177 0.46355 -7 72 1 2 0 0.69254 0.45669 0.48715 0.4624 -7 73 1 2 1 0.70784 0.45809 0.45837 0.45079 -7 74 2 4 1 0.68494 0.50599 0.4562 0.46241 -7 75 1 2 1 0.665 0.48582 0.41982 0.45064 -7 76 1 2 0 0.62657 0.47462 0.40381 0.49046 -7 77 1 2 1 0.63548 0.45809 0.43495 0.51459 -7 78 1 2 1 0.62649 0.45812 0.44179 0.52481 -7 79 1 2 0 0.63757 0.44622 0.43598 0.49922 -7 80 1 4 1 0.67383 0.43832 0.4147 0.49838 -7 81 2 4 0 0.67993 0.40577 0.46088 0.45564 -7 82 1 2 0 0.67973 0.40914 0.49205 0.4278 -7 83 1 1 0 0.70219 0.4152 0.51664 0.38622 -7 84 2 3 0 0.67299 0.4017 0.52281 0.38776 -7 85 2 4 0 0.68044 0.43412 0.51532 0.37247 -7 86 1 2 0 0.68635 0.37774 0.51596 0.37094 -7 87 1 1 0 0.70487 0.42532 0.50949 0.33338 -7 88 1 1 1 0.71384 0.40387 0.57109 0.33162 -7 89 1 3 1 0.71681 0.41858 0.56217 0.35235 -7 90 1 1 1 0.71665 0.43533 0.56571 0.3589 -7 91 1 1 0 0.69869 0.4295 0.56662 0.34501 -7 92 1 1 1 0.6688 0.42876 0.56326 0.33795 -7 93 1 1 1 0.65901 0.40379 0.51785 0.31292 -7 94 1 3 0 0.69655 0.47237 0.52794 0.28493 -7 95 1 4 0 0.70413 0.48688 0.5342 0.28189 -7 96 1 1 1 0.72048 0.47089 0.60726 0.28497 -7 97 1 1 1 0.72608 0.48122 0.62543 0.32596 -7 98 1 1 1 0.719 0.52294 0.63479 0.31364 -7 99 1 1 0 0.75845 0.54401 0.6308 0.30227 -7 100 1 1 1 0.77583 0.50055 0.58914 0.31341 -7 101 1 1 1 0.7629 0.5101 0.60475 0.30806 -7 102 1 1 1 0.75136 0.53288 0.60111 0.28629 -7 103 1 4 0 0.7449 0.53476 0.63764 0.26279 -7 104 1 1 1 0.75735 0.5178 0.60022 0.26465 -7 105 1 1 1 0.74006 0.5208 0.6246 0.28593 -7 106 1 4 0 0.74655 0.50826 0.62744 0.27347 -7 107 2 1 0 0.73213 0.49834 0.6107 0.26635 -7 108 1 1 0 0.74252 0.49545 0.58829 0.29655 -7 109 2 3 1 0.75246 0.50399 0.57983 0.30671 -7 110 2 3 1 0.75356 0.53978 0.58352 0.3279 -7 111 2 3 1 0.74623 0.54892 0.61842 0.38178 -7 112 2 1 1 0.73508 0.53497 0.62784 0.38015 -7 113 2 3 1 0.67071 0.53694 0.60079 0.38059 -7 114 2 3 1 0.65294 0.56661 0.61227 0.41993 -7 115 2 3 0 0.6813 0.53256 0.57688 0.42939 -7 116 2 3 0 0.65403 0.48876 0.56384 0.42955 -7 117 1 2 0 0.65483 0.45313 0.58652 0.44558 -7 118 2 3 0 0.62252 0.40745 0.56623 0.45398 -7 119 2 4 0 0.61672 0.43266 0.54611 0.40321 -7 120 1 1 0 0.62597 0.47728 0.55853 0.42995 -7 121 1 1 1 0.62411 0.52183 0.51648 0.40554 -7 122 1 1 0 0.61963 0.46424 0.51519 0.40227 -7 123 1 4 1 0.64348 0.4852 0.5114 0.42042 -7 124 1 1 1 0.64939 0.459 0.51596 0.44302 -7 125 1 1 1 0.62903 0.46474 0.5682 0.42705 -7 126 1 1 1 0.64059 0.44304 0.54478 0.43085 -7 127 1 1 1 0.64861 0.45858 0.59115 0.45264 -7 128 1 1 1 0.67555 0.44122 0.57081 0.45509 -7 129 1 1 1 0.69338 0.48416 0.57055 0.41559 -7 130 1 3 1 0.706 0.52255 0.58431 0.44688 -7 131 1 1 1 0.68307 0.53763 0.58693 0.44044 -7 132 1 1 1 0.74351 0.54766 0.56273 0.43455 -7 133 1 1 1 0.79692 0.52775 0.58414 0.41248 -7 134 1 3 1 0.8 0.54525 0.59477 0.39839 -7 135 1 1 1 0.78379 0.51418 0.54199 0.39777 -7 136 1 1 1 0.76691 0.50612 0.56284 0.41139 -7 137 1 1 1 0.77781 0.51617 0.5476 0.36561 -7 138 1 1 0 0.8 0.52935 0.55226 0.38513 -7 139 1 1 1 0.77705 0.5329 0.50855 0.39285 -7 140 1 1 1 0.8 0.53265 0.55625 0.42593 -7 141 1 1 1 0.78731 0.54815 0.55623 0.4615 -7 142 1 3 0 0.8 0.5041 0.54073 0.42958 -7 143 1 1 1 0.79062 0.47687 0.57157 0.44331 -7 144 1 1 1 0.79089 0.51494 0.59257 0.40092 -7 145 1 4 0 0.8 0.54651 0.57069 0.40706 -7 146 1 3 1 0.8 0.51781 0.5596 0.4409 -7 147 1 1 1 0.8 0.51138 0.56482 0.42916 -7 148 1 1 1 0.79077 0.51298 0.5507 0.4566 -7 149 1 1 1 0.8 0.50618 0.5763 0.46124 -7 150 1 1 1 0.78315 0.5093 0.54557 0.41467 -7 151 1 1 1 0.76184 0.50378 0.51372 0.4155 -7 152 1 1 1 0.8 0.51599 0.54876 0.4234 -7 153 1 4 1 0.77191 0.50295 0.54092 0.41225 -7 154 1 1 0 0.73744 0.53739 0.56046 0.41314 -7 155 1 1 1 0.73295 0.52323 0.60109 0.39622 -7 156 1 1 1 0.71701 0.56102 0.57213 0.37529 -7 157 1 1 1 0.68905 0.56982 0.58759 0.36813 -7 158 1 1 1 0.66678 0.5476 0.54347 0.39532 -7 159 1 1 1 0.64494 0.49831 0.56258 0.41057 -7 160 1 4 1 0.67528 0.52884 0.5608 0.42352 -7 161 1 1 1 0.65777 0.52094 0.52869 0.40421 -7 162 1 4 1 0.66188 0.5178 0.51787 0.40349 -7 163 1 1 1 0.6381 0.5756 0.48972 0.38619 -7 164 1 4 0 0.6234 0.58792 0.50411 0.40694 -7 165 1 1 0 0.59122 0.56722 0.51712 0.40979 -7 166 1 1 1 0.56838 0.58774 0.50826 0.41299 -7 167 1 1 1 0.5784 0.59008 0.54381 0.40637 -7 168 1 1 0 0.60385 0.60007 0.53611 0.4377 -7 169 1 1 0 0.58775 0.58172 0.5346 0.43027 -7 170 2 2 1 0.62321 0.55708 0.52841 0.46651 -7 171 1 3 0 0.63522 0.53954 0.49567 0.44104 -7 172 1 2 1 0.61149 0.53715 0.49165 0.39184 -7 173 1 2 1 0.59542 0.5716 0.50583 0.36408 -7 174 1 2 1 0.59577 0.55592 0.49924 0.35093 -7 175 1 2 0 0.57984 0.54968 0.50682 0.35664 -7 176 1 3 0 0.55362 0.52279 0.50539 0.38891 -7 177 1 2 0 0.53257 0.51765 0.49096 0.37394 -7 178 2 3 1 0.51265 0.48652 0.47513 0.33797 -7 179 1 1 1 0.54152 0.48191 0.49638 0.28258 -7 180 2 2 0 0.51037 0.44645 0.50537 0.29273 -7 181 2 4 0 0.51899 0.47024 0.49558 0.30212 -7 182 2 3 0 0.55107 0.44902 0.49999 0.31915 -7 184 2 3 0 0.53488 0.41045 0.50085 0.24677 -7 185 1 2 1 0.51874 0.40118 0.4794 0.23449 -7 186 1 4 0 0.50501 0.35356 0.48161 0.21752 -7 187 1 2 0 0.51149 0.30729 0.4853 0.24047 -7 188 1 3 1 0.51032 0.32082 0.51876 0.24294 -7 189 1 3 1 0.49661 0.33353 0.54357 0.22793 -7 190 2 2 0 0.507 0.32073 0.57431 0.22494 -7 191 2 3 0 0.52917 0.31669 0.53771 0.22422 -7 192 2 4 0 0.4745 0.28164 0.58907 0.23844 -7 193 1 3 0 0.46175 0.31539 0.56068 0.25404 -7 194 1 1 1 0.46292 0.35015 0.49346 0.26066 -7 195 1 1 1 0.47478 0.30222 0.49708 0.24947 -7 196 1 1 1 0.45377 0.31007 0.50207 0.23864 -7 197 1 1 1 0.43098 0.27325 0.5128 0.2 -7 198 1 3 1 0.44254 0.29017 0.50301 0.2 -7 199 1 1 0 0.4086 0.28842 0.50102 0.20853 -7 200 2 3 1 0.40769 0.24911 0.51045 0.20608 -7 201 2 3 1 0.41026 0.23245 0.5347 0.2 -8 2 2 1 1 0.72272 0.36159 0.66489 0.27837 -8 3 1 2 1 0.71968 0.36712 0.68895 0.30518 -8 4 2 4 0 0.72708 0.34154 0.69245 0.3467 -8 5 2 3 0 0.75828 0.38545 0.69597 0.34304 -8 6 1 2 1 0.76986 0.38524 0.72058 0.32163 -8 7 1 2 1 0.76545 0.40963 0.73387 0.34507 -8 8 1 2 1 0.7734 0.39018 0.76278 0.31589 -8 9 1 2 0 0.78821 0.40092 0.72354 0.33535 -8 10 2 4 0 0.74599 0.38238 0.71034 0.33525 -8 11 1 2 0 0.73145 0.3622 0.71513 0.34081 -8 12 1 1 1 0.76426 0.39595 0.77204 0.32657 -8 13 2 2 1 0.76519 0.34091 0.77712 0.31212 -8 14 2 2 0 0.8 0.35002 0.71746 0.31764 -8 15 2 4 0 0.8 0.39403 0.75616 0.2627 -8 16 1 2 1 0.7785 0.35638 0.72783 0.28618 -8 17 1 4 0 0.75761 0.35088 0.73161 0.26425 -8 18 1 2 0 0.71263 0.33098 0.77363 0.21803 -8 19 1 1 1 0.71657 0.35143 0.77443 0.20496 -8 20 1 1 1 0.72809 0.39404 0.75392 0.22908 -8 21 1 4 0 0.71511 0.39616 0.74218 0.2512 -8 22 1 4 0 0.71898 0.42119 0.73278 0.20837 -8 23 1 1 1 0.69772 0.43582 0.74177 0.2 -8 24 1 1 0 0.68991 0.46405 0.70513 0.20777 -8 25 1 1 0 0.68773 0.42401 0.68911 0.2 -8 26 1 1 1 0.72672 0.40715 0.70028 0.2 -8 27 1 1 1 0.73579 0.46605 0.64975 0.22909 -8 28 1 1 1 0.78535 0.49908 0.62345 0.29034 -8 29 1 1 1 0.78645 0.53396 0.63025 0.26708 -8 30 1 1 0 0.8 0.57474 0.62491 0.27928 -8 31 1 2 0 0.8 0.55664 0.59219 0.25888 -8 32 1 1 1 0.77736 0.51533 0.56281 0.28132 -8 33 1 3 1 0.78572 0.49382 0.58043 0.28552 -8 34 1 2 1 0.8 0.48829 0.60231 0.26588 -8 35 1 2 0 0.7994 0.51443 0.58259 0.22521 -8 36 1 2 1 0.8 0.51975 0.57607 0.27626 -8 37 1 1 1 0.73863 0.45143 0.56481 0.30938 -8 38 1 3 0 0.74349 0.46602 0.53322 0.32414 -8 39 2 1 1 0.78366 0.45325 0.49959 0.3419 -8 40 2 3 0 0.75988 0.47175 0.49019 0.37422 -8 41 2 4 0 0.75219 0.46767 0.48061 0.37156 -8 42 1 1 1 0.79631 0.46215 0.50231 0.3728 -8 43 1 1 1 0.79028 0.4427 0.51929 0.40784 -8 44 1 4 1 0.77799 0.41938 0.46929 0.44665 -8 45 1 1 1 0.8 0.46747 0.44668 0.44329 -8 46 1 1 1 0.7941 0.42631 0.45533 0.45917 -8 47 1 1 1 0.7867 0.45833 0.48318 0.44614 -8 48 1 2 1 0.78057 0.47779 0.49572 0.46559 -8 49 1 2 1 0.75523 0.4499 0.52687 0.44407 -8 50 1 1 1 0.76751 0.45794 0.52563 0.46388 -8 51 1 4 0 0.73705 0.40908 0.55823 0.47995 -8 52 1 1 1 0.74353 0.3851 0.57003 0.45614 -8 53 1 1 1 0.74428 0.35783 0.5685 0.50084 -8 54 1 1 1 0.73822 0.34854 0.55082 0.48659 -8 55 1 1 0 0.70796 0.35846 0.56603 0.50308 -8 56 1 1 1 0.70021 0.33682 0.55176 0.50902 -8 57 2 4 0 0.68065 0.38184 0.54209 0.51248 -8 58 1 1 0 0.70286 0.38027 0.55712 0.5348 -8 59 1 3 1 0.73473 0.36571 0.56786 0.53329 -8 60 1 3 0 0.73215 0.39102 0.57982 0.55559 -8 61 2 1 0 0.75962 0.3911 0.54845 0.54965 -8 62 2 4 0 0.8 0.44048 0.5297 0.53859 -8 63 1 1 1 0.8 0.40466 0.52765 0.54916 -8 64 1 1 0 0.77938 0.41566 0.5246 0.59296 -8 65 1 3 0 0.75575 0.37063 0.51426 0.60145 -8 66 1 4 0 0.74651 0.40092 0.5261 0.58375 -8 67 1 1 0 0.72621 0.37283 0.53563 0.56422 -8 68 1 1 1 0.75576 0.38204 0.53792 0.55336 -8 69 1 1 0 0.75841 0.41184 0.53299 0.53414 -8 70 1 4 1 0.77846 0.39225 0.51454 0.58763 -8 71 1 2 1 0.74423 0.38544 0.5424 0.58411 -8 72 1 1 1 0.73713 0.42122 0.5801 0.57288 -8 73 1 4 1 0.74995 0.41 0.55732 0.61644 -8 74 1 1 1 0.71944 0.44072 0.59852 0.63972 -8 75 1 1 1 0.70632 0.43435 0.62256 0.67195 -8 76 1 1 0 0.7103 0.46444 0.61044 0.66563 -8 77 1 1 1 0.7446 0.4446 0.62344 0.68054 -8 78 1 1 1 0.75614 0.41919 0.6287 0.68762 -8 79 1 1 1 0.75506 0.38343 0.62226 0.65367 -8 80 1 1 1 0.75653 0.38598 0.60912 0.66087 -8 81 1 4 0 0.73538 0.42836 0.62309 0.66879 -8 82 1 1 1 0.75823 0.44314 0.6169 0.62751 -8 83 2 4 0 0.79074 0.456 0.63763 0.61644 -8 84 1 1 1 0.78747 0.44861 0.65821 0.61868 -8 85 1 1 0 0.79994 0.44257 0.63397 0.61364 -8 86 1 4 0 0.79493 0.44211 0.60813 0.59908 -8 87 1 1 0 0.79839 0.40118 0.54373 0.57622 -8 88 1 3 1 0.78054 0.38787 0.53546 0.57043 -8 89 1 2 0 0.8 0.40677 0.51008 0.55589 -8 90 1 4 0 0.8 0.39744 0.49598 0.54377 -8 91 1 1 1 0.76859 0.38977 0.49369 0.54716 -8 92 1 1 1 0.8 0.41353 0.49306 0.53437 -8 93 1 1 1 0.74998 0.4016 0.48395 0.56719 -8 94 1 1 1 0.71606 0.42625 0.42105 0.54828 -8 95 1 1 0 0.76205 0.4315 0.36728 0.54724 -8 96 1 1 1 0.76019 0.45926 0.40119 0.499 -8 97 1 1 1 0.72664 0.45703 0.39698 0.49327 -8 98 2 3 0 0.72215 0.47076 0.34439 0.49322 -8 99 1 3 1 0.69887 0.45542 0.28961 0.49125 -8 100 1 3 0 0.7013 0.44834 0.24887 0.485 -8 101 1 3 0 0.69343 0.45486 0.20955 0.4761 -8 102 1 1 1 0.65582 0.46996 0.2 0.46113 -8 103 1 1 0 0.63285 0.47212 0.2 0.48287 -8 104 1 1 1 0.62979 0.47559 0.2 0.45542 -8 105 1 1 1 0.63382 0.46579 0.2 0.461 -8 106 2 4 1 0.61825 0.46631 0.2 0.50011 -8 107 1 1 0 0.58195 0.48072 0.21164 0.50888 -8 108 1 2 1 0.58192 0.47029 0.26596 0.54347 -8 109 1 4 0 0.54768 0.47038 0.2649 0.52854 -8 110 1 1 0 0.60211 0.48349 0.22934 0.53953 -8 111 1 1 1 0.59398 0.44655 0.23532 0.53892 -8 112 2 3 0 0.57221 0.49309 0.24537 0.52478 -8 113 2 3 0 0.59708 0.47542 0.27066 0.53401 -8 114 1 1 1 0.5744 0.44636 0.29851 0.51063 -8 115 1 4 1 0.58123 0.42844 0.28975 0.5042 -8 116 1 1 0 0.55918 0.40677 0.30295 0.48247 -8 117 1 4 0 0.60221 0.39264 0.25755 0.48985 -8 118 1 1 1 0.62322 0.38096 0.25997 0.47628 -8 119 1 1 0 0.60216 0.40376 0.31645 0.47469 -8 120 1 1 1 0.63668 0.38762 0.32407 0.50604 -8 121 1 4 0 0.64281 0.37437 0.29351 0.49155 -8 122 1 1 1 0.64757 0.40178 0.30047 0.53058 -8 123 1 1 1 0.64206 0.48089 0.34117 0.51659 -8 124 1 1 0 0.61338 0.48418 0.34044 0.52231 -8 125 1 3 0 0.63249 0.48818 0.33356 0.54288 -8 126 1 1 0 0.64622 0.50893 0.3428 0.56352 -8 127 2 4 0 0.65173 0.5071 0.34521 0.56125 -8 128 1 1 1 0.6364 0.49866 0.31433 0.49489 -8 129 1 1 1 0.6426 0.51138 0.30761 0.49526 -8 130 1 3 0 0.65601 0.51237 0.33386 0.49921 -8 132 1 1 1 0.70711 0.53832 0.30634 0.51059 -8 133 1 1 0 0.69874 0.56144 0.3293 0.51964 -8 134 1 4 1 0.67403 0.56679 0.34472 0.55133 -8 135 1 4 0 0.6696 0.58442 0.34797 0.54135 -8 136 1 1 1 0.63877 0.59711 0.34372 0.51172 -8 137 1 1 1 0.61784 0.63393 0.36052 0.52338 -8 138 2 4 1 0.68505 0.66969 0.36838 0.51195 -8 139 1 1 1 0.66854 0.66013 0.37246 0.52443 -8 140 1 1 0 0.67228 0.64484 0.38002 0.51714 -8 141 1 1 0 0.65464 0.58997 0.39632 0.56173 -8 142 1 1 0 0.61896 0.5957 0.37331 0.58496 -8 143 1 1 1 0.64018 0.59859 0.35183 0.58706 -8 144 1 4 0 0.62543 0.63388 0.33652 0.561 -8 145 1 3 1 0.61792 0.61865 0.30557 0.56444 -8 146 1 1 1 0.64088 0.65857 0.36269 0.55997 -8 147 1 3 1 0.66423 0.67844 0.31305 0.55433 -8 148 1 1 1 0.6649 0.75876 0.30007 0.56184 -8 149 1 1 0 0.64727 0.70167 0.27646 0.51868 -8 150 2 4 1 0.66209 0.68147 0.25647 0.51973 -8 151 1 1 1 0.64634 0.65699 0.26901 0.49803 -8 152 1 4 1 0.65724 0.65722 0.25335 0.4673 -8 153 1 2 1 0.65488 0.6575 0.26338 0.47573 -8 154 1 3 0 0.65342 0.68414 0.25197 0.49104 -8 155 2 1 1 0.64961 0.69631 0.27227 0.50288 -8 156 1 3 0 0.64466 0.64614 0.29655 0.50133 -8 157 1 1 1 0.63516 0.66127 0.26392 0.45544 -8 158 1 1 1 0.65012 0.69597 0.26444 0.44891 -8 159 1 1 1 0.63283 0.72946 0.25341 0.44042 -8 160 1 1 0 0.62373 0.74285 0.26646 0.4657 -8 161 1 2 0 0.64012 0.6942 0.25127 0.42213 -8 162 1 1 1 0.62625 0.72194 0.27347 0.38954 -8 163 1 1 1 0.65363 0.73949 0.25458 0.42168 -8 164 1 1 1 0.62054 0.70957 0.25526 0.39074 -8 165 2 4 1 0.61797 0.72743 0.24834 0.39418 -8 166 1 1 1 0.62556 0.73738 0.22064 0.40057 -8 167 1 1 0 0.60339 0.71756 0.2 0.39676 -8 168 1 1 1 0.58168 0.71372 0.21836 0.38182 -8 169 1 4 0 0.62985 0.75977 0.2 0.38573 -8 170 1 1 1 0.65444 0.72687 0.2 0.44398 -8 171 1 1 0 0.67974 0.71431 0.20057 0.4605 -8 172 1 1 1 0.72836 0.69795 0.2 0.42323 -8 173 1 1 1 0.69442 0.67182 0.2 0.41072 -8 174 1 1 0 0.68041 0.71198 0.2 0.42484 -8 175 1 1 1 0.68333 0.72709 0.2 0.39269 -8 176 1 1 1 0.67668 0.71537 0.2 0.38646 -8 177 1 1 0 0.65651 0.72791 0.2 0.39812 -8 178 1 4 1 0.711 0.75634 0.2 0.38376 -8 179 1 1 1 0.71544 0.72615 0.2 0.34603 -8 180 1 1 0 0.72974 0.74606 0.22999 0.31491 -8 181 2 2 1 0.70108 0.7698 0.25176 0.30004 -8 182 1 1 1 0.67697 0.74005 0.24558 0.29188 -8 183 1 1 1 0.76022 0.73588 0.27982 0.29037 -8 184 2 2 1 0.72328 0.73178 0.29401 0.27617 -8 185 2 3 0 0.73633 0.7141 0.3185 0.32137 -8 186 1 1 1 0.71002 0.7047 0.3316 0.28957 -8 187 1 1 0 0.75527 0.70136 0.34886 0.30559 -8 188 1 1 1 0.78373 0.68291 0.3439 0.28045 -8 189 1 1 0 0.7765 0.64914 0.36649 0.25248 -8 190 1 1 1 0.78414 0.6469 0.3685 0.25279 -8 191 1 3 1 0.78078 0.61753 0.33387 0.26636 -8 192 1 1 1 0.77767 0.63941 0.36395 0.27625 -8 193 1 3 0 0.76366 0.6321 0.35046 0.24559 -8 194 1 1 1 0.7597 0.64028 0.36988 0.21377 -8 195 1 4 0 0.73823 0.66169 0.32258 0.2 -8 196 1 1 1 0.73748 0.64822 0.31795 0.2 -8 197 1 1 1 0.75941 0.62476 0.2908 0.2 -8 198 1 1 1 0.77609 0.66111 0.32098 0.2278 -8 199 1 1 0 0.77967 0.62898 0.34737 0.22289 -8 200 1 4 0 0.73928 0.63717 0.33916 0.22653 -8 201 1 1 1 0.78518 0.6633 0.30215 0.23086 -9 1 1 3 0 0.75589 0.74958 0.4954 0.50785 -9 2 1 4 0 0.77074 0.74753 0.49091 0.51417 -9 3 2 3 0 0.75312 0.7488 0.51292 0.51973 -9 4 2 4 1 0.75142 0.73553 0.49914 0.51276 -9 5 2 2 1 0.75532 0.74618 0.51337 0.54812 -9 6 2 4 0 0.7778 0.75498 0.50815 0.57844 -9 7 2 2 1 0.8 0.79755 0.54204 0.61636 -9 8 2 3 1 0.8 0.8 0.5334 0.58682 -9 9 1 2 1 0.79834 0.8 0.49191 0.61194 -9 10 1 2 1 0.8 0.8 0.49474 0.59715 -9 11 1 3 0 0.79249 0.8 0.51127 0.59645 -9 12 1 2 1 0.75369 0.8 0.50994 0.59665 -9 13 2 4 0 0.76151 0.79994 0.53482 0.58578 -9 14 1 2 1 0.74082 0.8 0.53074 0.54457 -9 15 1 3 0 0.72549 0.79943 0.52863 0.5383 -9 16 1 2 1 0.70023 0.8 0.53768 0.52082 -9 17 1 3 1 0.73274 0.8 0.53512 0.50637 -9 18 1 2 1 0.72325 0.8 0.51422 0.47233 -9 19 1 2 1 0.6987 0.7603 0.52669 0.57786 -9 20 1 2 1 0.70606 0.7665 0.50784 0.54725 -9 21 1 3 0 0.74568 0.8 0.47528 0.48847 -9 22 1 2 1 0.73602 0.71019 0.47623 0.46379 -9 23 1 2 1 0.6995 0.78311 0.50349 0.45863 -9 24 2 3 0 0.73815 0.78553 0.48827 0.51755 -9 25 1 2 0 0.73645 0.8 0.46058 0.51965 -9 26 1 2 1 0.70921 0.8 0.43961 0.46746 -9 27 1 2 1 0.70937 0.79914 0.40737 0.52818 -9 28 1 2 1 0.72713 0.79376 0.409 0.51479 -9 29 1 2 1 0.74288 0.8 0.41613 0.5544 -9 30 1 2 1 0.7556 0.78558 0.39214 0.61406 -9 31 1 2 1 0.76292 0.8 0.40288 0.60137 -9 32 1 2 1 0.78922 0.8 0.38361 0.56391 -9 33 2 4 1 0.8 0.75956 0.33854 0.54354 -9 34 2 1 1 0.8 0.74209 0.34263 0.53183 -9 35 2 1 1 0.76504 0.6834 0.30965 0.49896 -9 36 2 4 0 0.77918 0.67152 0.31268 0.50109 -9 37 1 4 0 0.79817 0.67787 0.33283 0.51364 -9 38 1 3 0 0.8 0.65923 0.32099 0.51672 -9 39 1 2 1 0.79847 0.66575 0.28866 0.49839 -9 40 1 2 1 0.8 0.62933 0.24114 0.46222 -9 41 1 3 1 0.8 0.58321 0.25503 0.45842 -9 42 1 3 0 0.8 0.55711 0.26149 0.43178 -9 43 1 2 0 0.76933 0.54428 0.26785 0.45191 -9 44 1 2 0 0.76406 0.56701 0.25289 0.4396 -9 45 2 1 1 0.74421 0.53342 0.26598 0.44684 -9 46 2 3 1 0.8 0.52758 0.2609 0.45764 -9 47 2 3 1 0.77372 0.53752 0.28265 0.40529 -9 48 2 3 1 0.7658 0.52954 0.28731 0.40651 -9 49 2 3 1 0.78144 0.52903 0.29352 0.37285 -9 50 2 3 0 0.76062 0.50885 0.27632 0.38626 -9 51 1 2 1 0.77829 0.50797 0.25896 0.43098 -9 52 1 3 0 0.79212 0.48195 0.2946 0.42884 -9 53 2 3 1 0.7989 0.48888 0.27181 0.44709 -9 54 1 1 0 0.8 0.47165 0.28961 0.47398 -9 55 2 3 0 0.76304 0.48877 0.28486 0.45241 -9 56 2 3 1 0.76383 0.48391 0.2639 0.48111 -9 57 2 3 0 0.77142 0.42317 0.21136 0.47006 -9 58 1 3 0 0.78932 0.43569 0.20504 0.4815 -9 59 1 4 1 0.8 0.44924 0.21098 0.46179 -9 60 1 2 0 0.8 0.42929 0.2 0.49275 -9 61 1 2 0 0.8 0.4061 0.20125 0.50836 -9 62 2 2 1 0.7939 0.43189 0.2 0.45065 -9 63 2 2 0 0.77599 0.42752 0.2 0.43872 -9 64 1 2 1 0.7732 0.41677 0.22647 0.40868 -9 65 2 4 0 0.78238 0.43176 0.22137 0.41566 -9 66 2 3 0 0.76354 0.45853 0.24257 0.41647 -9 67 1 4 0 0.74726 0.45721 0.2302 0.42024 -9 68 1 3 1 0.71946 0.39431 0.25842 0.3981 -9 69 1 1 1 0.7615 0.3867 0.23508 0.41493 -9 70 1 3 0 0.769 0.40698 0.21787 0.4461 -9 71 1 1 1 0.7758 0.38461 0.27463 0.50449 -9 72 2 4 1 0.75578 0.3849 0.28129 0.51699 -9 73 1 4 0 0.75883 0.37006 0.27127 0.51168 -9 74 1 1 0 0.77596 0.34578 0.20299 0.4647 -9 75 2 3 0 0.79787 0.36426 0.2 0.45844 -9 76 1 1 1 0.79981 0.34511 0.20752 0.45614 -9 77 2 4 1 0.8 0.27499 0.2198 0.4812 -9 78 1 1 1 0.8 0.27516 0.20862 0.49358 -9 79 2 1 1 0.8 0.24672 0.2 0.47946 -9 80 2 1 1 0.76502 0.24829 0.21118 0.4635 -9 81 2 4 0 0.77632 0.27321 0.20222 0.45215 -9 82 2 3 1 0.77724 0.24133 0.22068 0.46303 -9 83 2 3 0 0.75954 0.25195 0.2 0.46168 -9 84 2 4 1 0.75151 0.23705 0.22881 0.49423 -9 85 1 1 1 0.75631 0.2 0.2 0.44735 -9 86 1 1 1 0.7591 0.2 0.20426 0.48861 -9 87 1 1 1 0.69645 0.2 0.21528 0.50524 -9 88 1 4 1 0.64578 0.2 0.2 0.51599 -9 89 1 1 0 0.61847 0.24198 0.2 0.51325 -9 90 1 4 1 0.60891 0.22231 0.20642 0.52709 -9 91 2 1 1 0.56935 0.2 0.2 0.53635 -9 92 2 4 0 0.59362 0.22588 0.20084 0.50491 -9 93 1 4 0 0.58413 0.22833 0.20064 0.52309 -9 94 1 1 0 0.59485 0.28355 0.2 0.53893 -9 95 1 3 0 0.63423 0.26507 0.2 0.52302 -9 96 2 3 0 0.60383 0.2807 0.2 0.51331 -9 97 1 2 0 0.58153 0.28407 0.2 0.50771 -9 98 1 3 0 0.58763 0.30448 0.20177 0.51951 -9 99 1 1 0 0.60168 0.29137 0.22474 0.5328 -9 100 2 4 0 0.60534 0.30633 0.22142 0.5267 -9 101 2 4 0 0.61744 0.32021 0.2 0.51496 -9 102 1 2 0 0.60371 0.33654 0.2 0.48967 -9 103 1 2 1 0.61568 0.31611 0.24811 0.48848 -9 104 2 2 1 0.64658 0.29741 0.23902 0.49939 -9 105 2 4 0 0.63724 0.34588 0.24255 0.54398 -9 106 1 3 1 0.68848 0.34246 0.21635 0.59986 -9 107 1 2 0 0.70055 0.35615 0.22945 0.65007 -9 108 1 2 1 0.72668 0.4019 0.22144 0.67431 -9 109 1 2 0 0.73614 0.37601 0.22303 0.65517 -9 110 2 4 0 0.70871 0.38318 0.21633 0.64814 -9 111 2 3 0 0.72082 0.4259 0.22911 0.64106 -9 112 1 1 1 0.74365 0.39062 0.2159 0.67835 -9 113 2 4 0 0.74488 0.42965 0.21564 0.64292 -9 114 1 1 0 0.76232 0.43526 0.23636 0.66444 -9 115 2 1 1 0.77108 0.43833 0.21982 0.72201 -9 116 1 4 1 0.7764 0.38469 0.2197 0.68629 -9 117 2 4 1 0.76204 0.37327 0.22346 0.73047 -9 118 2 4 1 0.73289 0.37751 0.24291 0.76032 -9 119 2 4 1 0.68179 0.37169 0.26213 0.7443 -9 120 2 4 1 0.66835 0.38497 0.29941 0.77099 -9 121 2 1 0 0.69142 0.35369 0.32598 0.75279 -9 122 2 4 1 0.67488 0.37777 0.34756 0.7462 -9 123 2 2 0 0.68119 0.34416 0.31133 0.73815 -9 124 2 4 1 0.66546 0.35682 0.29042 0.77882 -9 125 2 4 0 0.67253 0.39233 0.31288 0.8 -9 127 1 1 1 0.66914 0.36629 0.3453 0.73957 -9 128 2 4 1 0.61275 0.37034 0.35161 0.75057 -9 129 2 4 1 0.62213 0.36436 0.33584 0.72704 -9 130 2 4 1 0.62269 0.34953 0.30498 0.76824 -9 131 1 2 0 0.60851 0.32652 0.30339 0.72964 -9 132 2 4 1 0.60786 0.32857 0.29987 0.72062 -9 133 2 4 1 0.58617 0.33267 0.3049 0.69372 -9 134 2 1 1 0.56321 0.33668 0.30934 0.63765 -9 135 2 1 1 0.56034 0.34 0.32951 0.62249 -9 136 2 4 1 0.59695 0.35349 0.33372 0.64054 -9 137 2 4 0 0.5942 0.33099 0.2794 0.68589 -9 138 2 4 1 0.55607 0.27542 0.26988 0.68944 -9 139 2 1 0 0.55672 0.2687 0.26835 0.71414 -9 140 2 4 1 0.561 0.27377 0.26538 0.71706 -9 141 2 4 1 0.57517 0.25666 0.28595 0.71656 -9 142 2 1 1 0.61529 0.2424 0.26603 0.70067 -9 143 2 4 1 0.64528 0.27415 0.22106 0.73397 -9 144 2 1 0 0.61906 0.28121 0.23817 0.75352 -9 145 2 4 1 0.66645 0.25274 0.27652 0.78179 -9 146 2 2 1 0.63504 0.22433 0.27043 0.78352 -9 147 2 4 1 0.65923 0.22031 0.24342 0.8 -9 148 2 2 0 0.66101 0.24213 0.2582 0.79169 -9 149 2 4 0 0.68021 0.24563 0.27224 0.74907 -9 150 2 2 0 0.64328 0.24075 0.23307 0.70195 -9 151 2 2 0 0.65067 0.22962 0.23184 0.70443 -9 152 1 3 0 0.67629 0.2 0.26824 0.6873 -9 153 1 2 0 0.70799 0.20293 0.26597 0.68027 -9 154 1 1 1 0.72808 0.2 0.2558 0.69619 -9 155 1 3 0 0.67266 0.2 0.26307 0.75591 -9 156 1 4 1 0.66292 0.2 0.23587 0.79649 -9 157 1 1 0 0.66732 0.20346 0.22775 0.79178 -9 158 1 1 1 0.68327 0.20605 0.22413 0.78059 -9 159 1 4 1 0.69984 0.2 0.20639 0.79022 -9 160 1 1 1 0.67476 0.2 0.21937 0.76964 -9 161 2 4 1 0.68327 0.21611 0.20076 0.7332 -9 162 2 4 1 0.67228 0.20292 0.20451 0.72464 -9 163 2 4 1 0.67515 0.25669 0.2 0.76848 -9 164 2 4 1 0.63301 0.25602 0.2 0.77518 -9 165 2 4 1 0.6279 0.28699 0.25009 0.76979 -9 166 2 1 0 0.65378 0.28915 0.21989 0.76559 -9 167 2 4 1 0.6211 0.31583 0.24256 0.79443 -9 168 2 4 1 0.61409 0.30287 0.28766 0.8 -9 169 2 4 1 0.65255 0.35784 0.29354 0.8 -9 170 1 1 0 0.6344 0.31885 0.31769 0.79089 -9 171 2 4 1 0.65216 0.33764 0.36155 0.76434 -9 172 2 4 1 0.67819 0.39544 0.3474 0.74471 -9 173 2 4 1 0.68263 0.40543 0.33819 0.74302 -9 174 2 1 1 0.66787 0.37755 0.3207 0.77943 -9 175 2 1 1 0.65956 0.36316 0.29664 0.76993 -9 176 2 4 1 0.65754 0.35879 0.28283 0.7549 -9 177 2 1 0 0.63822 0.38018 0.27172 0.8 -9 178 1 4 0 0.63985 0.42095 0.2254 0.8 -9 179 1 2 0 0.64619 0.42085 0.21304 0.75356 -9 180 1 1 1 0.59991 0.42416 0.2385 0.75872 -9 181 2 3 0 0.59123 0.44978 0.2418 0.7741 -9 182 1 1 0 0.58685 0.47005 0.26321 0.76732 -9 183 2 1 0 0.59901 0.50363 0.23276 0.75541 -9 184 2 1 0 0.61453 0.50371 0.28101 0.75982 -9 185 1 2 1 0.6131 0.53879 0.26269 0.74025 -9 186 1 2 1 0.62768 0.50382 0.26046 0.71628 -9 187 1 3 0 0.66219 0.52766 0.26739 0.70902 -9 188 1 2 0 0.63736 0.56407 0.29745 0.72158 -9 189 1 4 0 0.62284 0.55832 0.31837 0.72099 -9 190 1 1 1 0.65609 0.53865 0.2909 0.69863 -9 191 1 4 1 0.63598 0.56449 0.29165 0.74445 -9 192 1 4 1 0.65425 0.55375 0.28118 0.76522 -9 193 2 4 1 0.68603 0.57074 0.27263 0.76083 -9 194 2 2 1 0.64706 0.56614 0.2661 0.76867 -9 195 2 4 1 0.68318 0.51971 0.26064 0.74966 -9 196 2 4 0 0.68059 0.497 0.25007 0.75177 -9 197 2 2 1 0.68456 0.51478 0.30823 0.74594 -9 198 1 2 0 0.71844 0.52649 0.33788 0.74639 -9 199 2 4 1 0.71123 0.551 0.35148 0.74464 -9 200 2 3 0 0.72001 0.52342 0.33865 0.72155 -9 201 2 2 1 0.74859 0.49433 0.34542 0.70368 -10 2 1 2 0 0.79533 0.61486 0.33702 0.64806 -10 3 1 3 0 0.77911 0.60512 0.33215 0.66138 -10 4 1 1 1 0.78337 0.61632 0.35911 0.63626 -10 5 1 4 1 0.7892 0.61803 0.31638 0.62637 -10 6 1 4 0 0.78802 0.6319 0.33504 0.61932 -10 7 1 1 1 0.8 0.62275 0.35163 0.64329 -10 8 1 1 1 0.8 0.67669 0.35841 0.6561 -10 9 1 1 0 0.79212 0.63642 0.32298 0.65235 -10 10 2 1 1 0.79185 0.65624 0.33595 0.61702 -10 11 1 1 0 0.74926 0.72931 0.34746 0.58345 -10 12 1 2 0 0.75632 0.76559 0.34628 0.58341 -10 13 1 2 1 0.7639 0.74425 0.36349 0.58836 -10 14 1 4 1 0.76054 0.77998 0.3378 0.60271 -10 15 1 1 1 0.8 0.78498 0.35739 0.61118 -10 16 1 1 1 0.8 0.8 0.35731 0.59052 -10 17 1 1 1 0.8 0.78324 0.35449 0.58059 -10 18 1 1 1 0.8 0.8 0.37265 0.5926 -10 19 1 1 1 0.8 0.79256 0.37965 0.59253 -10 20 1 2 1 0.8 0.8 0.36541 0.59428 -10 21 1 3 1 0.8 0.77415 0.41601 0.60457 -10 22 1 1 1 0.77449 0.77168 0.42524 0.55168 -10 23 1 1 0 0.8 0.75067 0.38646 0.55036 -10 24 2 3 1 0.8 0.7915 0.40166 0.48547 -10 25 1 2 1 0.76914 0.8 0.42448 0.49218 -10 26 1 4 0 0.77758 0.79562 0.44998 0.51029 -10 27 1 1 1 0.8 0.8 0.42856 0.52042 -10 28 1 4 0 0.8 0.8 0.4345 0.50575 -10 29 1 1 1 0.8 0.75882 0.4448 0.48349 -10 30 1 3 0 0.78175 0.8 0.40258 0.44428 -10 31 1 1 1 0.76652 0.8 0.39384 0.39106 -10 32 1 1 0 0.79291 0.78602 0.37873 0.40843 -10 33 1 4 1 0.8 0.76245 0.39005 0.41561 -10 34 1 1 1 0.76776 0.8 0.35845 0.37649 -10 35 1 1 0 0.76479 0.8 0.38451 0.39538 -10 36 1 1 1 0.78675 0.8 0.43522 0.37399 -10 37 1 2 1 0.76881 0.8 0.42214 0.3937 -10 38 1 1 1 0.76719 0.77319 0.46031 0.41895 -10 39 1 1 1 0.76835 0.76873 0.48054 0.4191 -10 40 1 3 1 0.73738 0.8 0.48358 0.43343 -10 41 1 1 1 0.72322 0.8 0.47422 0.44369 -10 42 1 2 1 0.71153 0.8 0.46335 0.42345 -10 43 2 2 1 0.68255 0.8 0.45305 0.44001 -10 44 1 1 1 0.69089 0.78104 0.47197 0.42788 -10 45 1 4 0 0.66732 0.79469 0.44878 0.44591 -10 46 1 3 1 0.65766 0.79189 0.46314 0.44544 -10 47 1 1 0 0.5976 0.7684 0.47052 0.42774 -10 48 1 1 0 0.55005 0.74119 0.42341 0.48673 -10 49 1 1 0 0.53023 0.72798 0.41138 0.52417 -10 50 2 4 1 0.52611 0.77921 0.39979 0.53658 -10 51 1 3 0 0.49055 0.8 0.42019 0.55675 -10 52 1 2 0 0.52201 0.8 0.38315 0.53647 -10 53 1 4 1 0.48311 0.79404 0.38362 0.52962 -10 54 1 1 1 0.47851 0.8 0.37604 0.53622 -10 55 2 3 0 0.47114 0.78186 0.32555 0.54864 -10 56 1 3 0 0.52498 0.8 0.34821 0.50809 -10 57 2 3 1 0.5568 0.73854 0.32226 0.50379 -10 58 1 1 1 0.59107 0.77508 0.35782 0.48549 -10 59 1 2 1 0.57859 0.75152 0.36376 0.45497 -10 60 1 2 1 0.58538 0.78769 0.35096 0.45682 -10 61 1 2 1 0.5831 0.77991 0.36414 0.42087 -10 62 1 2 0 0.62814 0.73505 0.34981 0.38763 -10 63 2 2 1 0.6263 0.70483 0.30452 0.36641 -10 64 2 4 1 0.59054 0.67293 0.31781 0.38955 -10 65 1 1 1 0.59062 0.68057 0.37094 0.38191 -10 66 2 2 0 0.6 0.69509 0.37937 0.3568 -10 67 2 2 1 0.56786 0.66458 0.3822 0.36675 -10 68 1 1 1 0.5604 0.70035 0.38482 0.35478 -10 69 2 2 1 0.57104 0.71245 0.39957 0.35664 -10 70 2 1 0 0.54359 0.71196 0.42378 0.3571 -10 72 2 4 0 0.52826 0.71692 0.47957 0.39249 -10 73 1 1 0 0.52679 0.7008 0.51601 0.41836 -10 74 1 4 0 0.51832 0.69455 0.52682 0.45528 -10 75 1 1 1 0.51664 0.69787 0.52532 0.41025 -10 76 1 3 1 0.47841 0.70222 0.54519 0.40828 -10 77 1 4 1 0.52181 0.7075 0.58058 0.4413 -10 78 1 1 0 0.53371 0.70709 0.61293 0.43702 -10 79 1 3 0 0.51187 0.69805 0.59148 0.41795 -10 80 1 1 0 0.54662 0.71333 0.60554 0.38272 -10 81 1 1 1 0.56164 0.69111 0.62202 0.35641 -10 82 1 3 1 0.58091 0.68219 0.60688 0.36051 -10 83 2 1 1 0.57962 0.68056 0.65021 0.36674 -10 84 1 3 1 0.55223 0.72185 0.6166 0.38495 -10 85 1 3 1 0.53855 0.73672 0.61654 0.40548 -10 86 1 3 0 0.57337 0.74868 0.60743 0.41265 -10 87 2 1 0 0.58888 0.77335 0.60741 0.42571 -10 88 1 2 1 0.57303 0.74299 0.64668 0.39972 -10 89 1 4 0 0.59801 0.76723 0.67844 0.37176 -10 90 1 1 0 0.59176 0.76852 0.67096 0.35297 -10 91 1 4 0 0.57924 0.77291 0.6587 0.31125 -10 92 1 1 1 0.51333 0.72898 0.67478 0.32844 -10 93 1 1 1 0.48641 0.7256 0.70118 0.32468 -10 94 1 1 0 0.5281 0.75115 0.7171 0.31097 -10 95 1 1 0 0.54681 0.72528 0.66602 0.3031 -10 96 1 3 1 0.55095 0.75749 0.68133 0.27906 -10 97 1 1 1 0.51773 0.75759 0.70518 0.28213 -10 98 1 1 0 0.55385 0.74856 0.71976 0.32485 -10 99 1 1 0 0.52698 0.73911 0.72153 0.34305 -10 100 1 3 0 0.52457 0.68834 0.6949 0.33497 -10 101 1 1 1 0.50922 0.70878 0.68 0.32982 -10 102 2 1 1 0.48483 0.64737 0.69028 0.34193 -10 103 2 1 0 0.45722 0.6843 0.7382 0.38723 -10 104 1 1 0 0.40999 0.68738 0.71106 0.40523 -10 105 2 4 1 0.40716 0.6782 0.72897 0.428 -10 106 1 1 1 0.37528 0.64565 0.72724 0.41672 -10 107 1 3 1 0.34385 0.62057 0.75223 0.40683 -10 108 1 2 1 0.34604 0.63244 0.75108 0.39212 -10 109 1 2 0 0.33945 0.59741 0.76341 0.40183 -10 110 2 4 1 0.33353 0.64323 0.77437 0.41816 -10 111 1 1 1 0.31872 0.64159 0.78477 0.39964 -10 112 1 4 0 0.33592 0.63011 0.75495 0.40405 -10 113 1 1 1 0.31717 0.66784 0.76968 0.42643 -10 114 2 1 0 0.29305 0.64843 0.77733 0.43549 -10 115 1 3 1 0.31068 0.65657 0.75338 0.4475 -10 116 1 4 0 0.30067 0.63712 0.75722 0.44616 -10 117 2 4 0 0.29281 0.63177 0.77055 0.45544 -10 118 2 4 1 0.27562 0.63623 0.74456 0.49864 -10 119 2 4 1 0.28123 0.6148 0.7298 0.50748 -10 120 1 1 0 0.29372 0.60176 0.74845 0.48459 -10 121 1 4 1 0.31879 0.61007 0.75153 0.51382 -10 122 1 1 0 0.34325 0.6292 0.78053 0.49692 -10 123 1 3 0 0.33735 0.59963 0.78052 0.45578 -10 124 2 4 0 0.3061 0.6266 0.78756 0.43671 -10 125 1 2 0 0.2692 0.61377 0.78148 0.46674 -10 126 2 4 0 0.28086 0.60337 0.74509 0.49568 -10 127 1 2 1 0.25623 0.61038 0.72673 0.50664 -10 128 1 2 1 0.22991 0.60778 0.69596 0.50548 -10 129 1 2 0 0.21403 0.62861 0.74918 0.56429 -10 130 2 3 1 0.2 0.61951 0.74461 0.5241 -10 131 1 2 0 0.2016 0.57322 0.73803 0.53287 -10 132 2 2 1 0.2 0.57338 0.76688 0.57414 -10 133 2 3 1 0.24192 0.54513 0.78845 0.5805 -10 134 2 3 1 0.22449 0.53931 0.76575 0.54908 -10 135 1 1 0 0.23756 0.49219 0.79371 0.58829 -10 136 2 4 0 0.2 0.51495 0.8 0.59757 -10 137 1 4 1 0.25076 0.50844 0.79613 0.56615 -10 138 1 1 0 0.21233 0.51514 0.79756 0.56617 -10 139 2 3 1 0.21396 0.53692 0.8 0.5165 -10 140 1 2 1 0.24112 0.5471 0.79205 0.53283 -10 141 1 1 0 0.2593 0.56942 0.79459 0.5476 -10 142 1 2 1 0.26206 0.52408 0.8 0.54105 -10 143 1 2 0 0.27497 0.5153 0.78909 0.49856 -10 144 2 4 1 0.27226 0.48823 0.8 0.51845 -10 145 1 3 1 0.25567 0.45553 0.79816 0.52488 -10 146 1 1 1 0.25052 0.42584 0.8 0.5299 -10 147 1 1 0 0.26973 0.42681 0.75384 0.53273 -10 148 2 2 0 0.2731 0.42783 0.77287 0.53278 -10 149 1 1 0 0.27367 0.44849 0.79407 0.55838 -10 150 2 1 0 0.2931 0.49084 0.8 0.5316 -10 151 1 3 0 0.28059 0.52164 0.74324 0.53733 -10 152 1 1 0 0.28957 0.53893 0.75354 0.54824 -10 153 2 3 1 0.28571 0.5029 0.75907 0.54966 -10 154 1 1 0 0.31502 0.49518 0.76867 0.53628 -10 155 1 2 1 0.32785 0.49781 0.7489 0.53514 -10 156 1 1 1 0.40513 0.47091 0.77671 0.51453 -10 157 1 1 1 0.43223 0.45748 0.77637 0.51789 -10 158 1 3 1 0.46163 0.47294 0.74615 0.50768 -10 159 2 4 0 0.46601 0.48429 0.74986 0.49598 -10 160 2 2 0 0.47188 0.49195 0.71257 0.45374 -10 161 2 1 1 0.44031 0.49121 0.68641 0.47482 -10 162 2 3 0 0.41901 0.49526 0.70796 0.44658 -10 163 2 4 1 0.43251 0.46366 0.70508 0.42232 -10 164 1 2 1 0.41364 0.48043 0.70468 0.42756 -10 165 1 2 1 0.40867 0.48836 0.7352 0.39318 -10 166 1 2 1 0.38058 0.5313 0.7467 0.39111 -10 167 2 2 1 0.36313 0.57037 0.70606 0.40742 -10 168 2 1 1 0.35806 0.52469 0.72541 0.39842 -10 169 2 1 1 0.3965 0.55552 0.70164 0.39636 -10 170 1 4 1 0.39881 0.58211 0.7039 0.39538 -10 171 1 2 0 0.40161 0.64315 0.66595 0.43253 -10 172 1 2 1 0.40527 0.63142 0.68344 0.47814 -10 173 1 2 1 0.41818 0.64688 0.68375 0.49872 -10 174 2 4 1 0.43695 0.66292 0.67543 0.48242 -10 175 2 3 1 0.39064 0.70592 0.72162 0.50853 -10 176 2 3 1 0.41418 0.72227 0.7404 0.50848 -10 177 1 4 0 0.42828 0.73365 0.74399 0.50729 -10 178 1 1 0 0.4385 0.714 0.75182 0.52906 -10 179 1 1 1 0.38462 0.71399 0.78125 0.51557 -10 180 1 1 0 0.38243 0.6897 0.8 0.56269 -10 181 1 2 1 0.41956 0.6537 0.78119 0.53999 -10 182 1 3 1 0.46125 0.60926 0.78334 0.5355 -10 183 1 4 1 0.5079 0.58234 0.79041 0.53978 -10 185 2 3 1 0.58773 0.61974 0.77567 0.58005 -10 186 2 1 1 0.56994 0.61598 0.77697 0.5845 -10 187 1 4 1 0.58265 0.65108 0.75528 0.55322 -10 188 1 1 1 0.61641 0.63525 0.73075 0.53583 -10 189 2 3 0 0.61089 0.68513 0.74215 0.55119 -10 190 1 4 0 0.67254 0.67502 0.74304 0.52344 -10 191 2 3 1 0.71849 0.66315 0.75994 0.54305 -10 192 1 2 1 0.73545 0.6294 0.79004 0.53427 -10 194 2 1 0 0.76957 0.61338 0.74831 0.52514 -10 195 1 3 0 0.73544 0.61721 0.75629 0.4949 -10 196 1 2 1 0.7462 0.65698 0.72142 0.46658 -10 197 1 2 0 0.74487 0.67712 0.70299 0.4334 -10 198 1 4 0 0.75813 0.63902 0.7132 0.38684 -10 199 1 3 1 0.77195 0.65874 0.73815 0.4148 -10 200 2 4 0 0.75457 0.63628 0.72188 0.43059 -10 201 1 2 1 0.72843 0.646 0.71037 0.46274 -11 1 2 3 0 0.55043 0.5779 0.35359 0.35152 -11 3 2 3 1 0.56558 0.52928 0.27501 0.34064 -11 4 2 1 0 0.53392 0.562 0.31016 0.31646 -11 5 2 3 1 0.53131 0.54329 0.26991 0.33176 -11 6 2 2 0 0.51301 0.54949 0.31655 0.3123 -11 7 2 2 1 0.49465 0.56101 0.30935 0.29295 -11 8 2 3 1 0.49977 0.59981 0.32269 0.29142 -11 9 2 2 1 0.5337 0.64168 0.27847 0.23984 -11 10 2 2 1 0.54034 0.62302 0.28238 0.25339 -11 11 2 3 0 0.54457 0.62412 0.2874 0.25673 -11 12 2 3 0 0.56074 0.61232 0.31517 0.23104 -11 13 2 1 0 0.52356 0.59777 0.32553 0.237 -11 14 2 3 1 0.51113 0.58218 0.2904 0.24704 -11 15 2 4 0 0.49545 0.58171 0.29121 0.22882 -11 16 2 3 1 0.4855 0.57166 0.29957 0.24484 -11 17 2 3 0 0.46832 0.58533 0.29036 0.26997 -11 18 2 3 0 0.48141 0.5609 0.29432 0.20644 -11 19 2 2 0 0.48431 0.57657 0.27478 0.22073 -11 20 2 4 0 0.5241 0.57124 0.29602 0.24342 -11 21 1 1 0 0.52299 0.60777 0.32382 0.27311 -11 22 1 2 1 0.55617 0.6093 0.31869 0.25054 -11 23 2 3 0 0.53125 0.56702 0.37266 0.23961 -11 24 2 3 1 0.54199 0.54908 0.40232 0.24116 -11 25 1 3 0 0.57052 0.54133 0.35003 0.22522 -11 26 2 2 0 0.55783 0.57273 0.32456 0.2 -11 27 1 1 1 0.58077 0.60062 0.32838 0.2236 -11 28 1 1 1 0.55735 0.57087 0.29145 0.22587 -11 29 1 1 1 0.57661 0.59907 0.24968 0.25374 -11 30 1 4 0 0.54437 0.63611 0.24601 0.24774 -11 31 1 1 0 0.52996 0.60176 0.21448 0.27302 -11 32 2 3 0 0.50178 0.63469 0.2 0.24459 -11 33 1 1 1 0.52212 0.64377 0.20389 0.2 -11 34 1 4 0 0.49338 0.67082 0.2 0.2053 -11 35 1 3 0 0.47172 0.66299 0.2 0.25588 -11 36 1 1 1 0.48227 0.66312 0.21198 0.24343 -11 37 1 1 1 0.4579 0.66234 0.2 0.2323 -11 38 1 1 0 0.45205 0.60848 0.24113 0.28058 -11 39 1 1 0 0.46492 0.65123 0.21024 0.2792 -11 40 1 2 0 0.46757 0.63588 0.2 0.27274 -11 41 2 2 0 0.46378 0.61741 0.2 0.20137 -11 42 2 1 1 0.43549 0.60662 0.2191 0.25492 -11 43 1 1 0 0.44882 0.58923 0.20251 0.23947 -11 44 2 4 0 0.44739 0.61681 0.2209 0.26471 -11 45 2 4 1 0.46384 0.59305 0.25319 0.24613 -11 46 2 2 1 0.51119 0.59576 0.24912 0.24026 -11 47 2 2 1 0.51119 0.61554 0.27732 0.25702 -11 48 2 4 0 0.46991 0.64183 0.23582 0.22444 -11 49 1 2 1 0.48296 0.63369 0.20535 0.27083 -11 50 1 2 0 0.5426 0.70366 0.2 0.32619 -11 51 2 4 0 0.50892 0.69992 0.2 0.32438 -11 52 1 4 1 0.4954 0.71494 0.23481 0.31973 -11 53 1 4 0 0.45152 0.75401 0.22295 0.31486 -11 54 1 4 1 0.44303 0.76902 0.2 0.33248 -11 55 1 2 1 0.44439 0.76276 0.2 0.35535 -11 56 1 2 1 0.47164 0.77372 0.20991 0.30229 -11 57 1 4 1 0.42252 0.76981 0.20823 0.27657 -11 58 1 2 0 0.4626 0.77064 0.22131 0.23807 -11 59 2 4 0 0.47876 0.74553 0.21937 0.2 -11 60 1 2 0 0.47492 0.77258 0.21553 0.20372 -11 61 2 1 1 0.50097 0.797 0.22088 0.2 -11 62 2 1 0 0.54239 0.79098 0.2 0.21974 -11 63 2 3 0 0.52331 0.79352 0.23114 0.23316 -11 64 1 1 0 0.49528 0.76334 0.22954 0.25331 -11 65 1 4 0 0.49876 0.73095 0.26264 0.27285 -11 66 1 1 1 0.53242 0.72036 0.23656 0.25391 -11 67 1 1 1 0.52189 0.67882 0.27964 0.2393 -11 68 1 1 0 0.53176 0.68164 0.25737 0.27294 -11 69 2 3 0 0.54291 0.73061 0.2078 0.27341 -11 70 2 3 1 0.54529 0.72522 0.24896 0.28794 -11 71 2 3 0 0.55668 0.75084 0.26641 0.26938 -11 72 1 1 0 0.53982 0.71284 0.25302 0.27195 -11 73 2 4 0 0.57059 0.72955 0.21981 0.28124 -11 74 2 3 0 0.60017 0.75338 0.2 0.26189 -11 75 2 3 0 0.56987 0.79254 0.23086 0.26063 -11 76 2 4 1 0.53471 0.77639 0.25951 0.27258 -11 77 1 2 1 0.577 0.75346 0.31801 0.28561 -11 78 1 2 1 0.54832 0.71056 0.29922 0.29029 -11 79 1 1 0 0.53751 0.7274 0.31287 0.29789 -11 80 1 2 1 0.54485 0.71156 0.31588 0.2826 -11 81 1 2 0 0.54433 0.69123 0.36089 0.26533 -11 82 2 4 1 0.56687 0.76312 0.36796 0.23672 -11 83 2 4 0 0.60786 0.7831 0.38363 0.23398 -11 84 2 2 1 0.60936 0.78368 0.36804 0.23087 -11 85 1 2 1 0.6211 0.74615 0.36371 0.20084 -11 86 2 4 0 0.5845 0.77236 0.3 0.2 -11 87 1 2 1 0.60376 0.8 0.32359 0.2 -11 88 1 2 1 0.62269 0.77985 0.32173 0.2031 -11 89 1 2 1 0.67831 0.79703 0.35091 0.20138 -11 90 1 2 1 0.69167 0.8 0.37196 0.2 -11 91 1 2 1 0.69856 0.7645 0.36825 0.21451 -11 92 1 2 1 0.69443 0.73286 0.37839 0.24046 -11 93 1 2 1 0.69619 0.72385 0.376 0.28831 -11 94 1 2 1 0.69996 0.7384 0.34066 0.30731 -11 95 1 2 1 0.69694 0.77351 0.34487 0.33476 -11 96 1 4 0 0.66827 0.79268 0.35615 0.27297 -11 97 1 2 0 0.67878 0.8 0.33973 0.27352 -11 98 1 1 0 0.6799 0.8 0.34456 0.25026 -11 99 2 3 0 0.62187 0.8 0.28067 0.22623 -11 100 2 2 1 0.6054 0.77345 0.25924 0.2 -11 101 1 4 0 0.65383 0.77114 0.27204 0.2008 -11 102 1 3 0 0.64727 0.76172 0.25281 0.21862 -11 103 2 4 0 0.6635 0.77711 0.305 0.2 -11 104 2 3 0 0.67197 0.79161 0.28616 0.23714 -11 105 1 2 1 0.64376 0.8 0.30672 0.23704 -11 106 1 2 0 0.63178 0.8 0.29251 0.23955 -11 107 1 2 1 0.6538 0.79894 0.29693 0.24725 -11 108 1 3 0 0.64423 0.7928 0.28867 0.21739 -11 109 1 2 0 0.61983 0.76918 0.29843 0.27407 -11 111 1 4 0 0.54281 0.76111 0.30887 0.26801 -11 112 1 2 1 0.52811 0.74021 0.30316 0.29007 -11 113 1 3 0 0.53029 0.77255 0.2792 0.28303 -11 114 1 2 1 0.53651 0.79728 0.29597 0.30773 -11 115 1 2 0 0.55624 0.78973 0.25582 0.31867 -11 116 1 2 1 0.53997 0.8 0.22837 0.31713 -11 117 1 2 1 0.52671 0.8 0.24036 0.38512 -11 118 1 2 1 0.51438 0.8 0.23973 0.37043 -11 119 1 3 1 0.53182 0.79605 0.27918 0.36907 -11 120 1 2 1 0.50286 0.8 0.28253 0.35504 -11 121 1 2 1 0.50235 0.8 0.29495 0.29598 -11 122 1 3 0 0.48894 0.79331 0.30969 0.31204 -11 123 1 2 1 0.47871 0.79948 0.3263 0.31954 -11 124 1 2 1 0.45365 0.8 0.29099 0.33413 -11 125 1 2 1 0.46635 0.8 0.26215 0.31968 -11 126 1 2 1 0.45381 0.74542 0.23663 0.31977 -11 127 1 2 1 0.44701 0.77734 0.26106 0.32745 -11 128 1 3 0 0.42234 0.8 0.25779 0.32023 -11 129 1 2 1 0.40726 0.79306 0.29614 0.30255 -11 130 1 2 1 0.36714 0.79442 0.30191 0.2763 -11 131 1 4 0 0.36376 0.79681 0.32273 0.27065 -11 132 1 2 1 0.3524 0.79521 0.28949 0.2766 -11 133 1 2 1 0.32952 0.78195 0.24297 0.25312 -11 134 1 2 1 0.28499 0.8 0.24966 0.28736 -11 135 1 2 0 0.26727 0.79224 0.22348 0.3366 -11 136 1 2 1 0.25006 0.79156 0.26094 0.27667 -11 137 1 2 1 0.21646 0.76325 0.24907 0.25544 -11 138 1 2 1 0.2 0.78589 0.26922 0.27831 -11 139 1 2 0 0.2 0.76936 0.2926 0.24553 -11 140 1 2 1 0.23753 0.79722 0.33088 0.24868 -11 141 1 2 0 0.22556 0.8 0.31513 0.27674 -11 142 1 2 0 0.21033 0.8 0.30497 0.28001 -11 143 2 3 1 0.22537 0.8 0.27327 0.29592 -11 144 2 3 0 0.24274 0.79907 0.27427 0.28701 -11 145 2 3 0 0.21851 0.8 0.26859 0.30582 -11 146 2 1 0 0.2178 0.8 0.22666 0.31128 -11 147 2 1 1 0.24484 0.8 0.20365 0.30276 -11 148 2 3 0 0.22017 0.8 0.21582 0.29858 -11 149 2 4 1 0.20838 0.8 0.20549 0.3077 -11 150 1 1 1 0.26729 0.8 0.24835 0.33951 -11 151 2 4 1 0.25459 0.78232 0.22098 0.37519 -11 152 1 1 0 0.30189 0.77343 0.22356 0.39847 -11 153 2 2 0 0.277 0.77971 0.21763 0.41577 -11 154 2 1 0 0.33926 0.74634 0.2 0.37846 -11 155 2 4 1 0.33888 0.74843 0.2 0.38359 -11 156 2 4 1 0.36489 0.74266 0.2 0.33555 -11 157 2 4 0 0.36922 0.72864 0.21039 0.36898 -11 158 2 4 0 0.37651 0.71557 0.25706 0.3556 -11 159 2 1 0 0.3562 0.70798 0.27144 0.39188 -11 160 2 4 0 0.37324 0.73197 0.24524 0.39553 -11 161 2 4 1 0.33247 0.71528 0.25213 0.40268 -11 162 2 4 0 0.32664 0.75746 0.24262 0.39697 -11 163 2 4 0 0.32737 0.75497 0.20079 0.37946 -11 164 2 1 1 0.32241 0.8 0.2 0.40464 -11 165 1 1 1 0.33485 0.8 0.20678 0.40402 -11 166 1 1 1 0.35907 0.8 0.22123 0.40926 -11 167 2 4 1 0.34949 0.8 0.27264 0.41539 -11 168 2 4 0 0.37267 0.8 0.30797 0.436 -11 169 1 4 0 0.39837 0.76644 0.29919 0.41091 -11 170 1 3 0 0.38419 0.74126 0.2761 0.42169 -11 171 1 4 0 0.38649 0.73896 0.24769 0.45184 -11 172 1 1 1 0.3735 0.79988 0.21732 0.43546 -11 173 1 3 0 0.36009 0.8 0.22321 0.44074 -11 174 1 4 0 0.38648 0.8 0.21086 0.45846 -11 175 1 1 0 0.37753 0.7835 0.20936 0.47693 -11 176 1 1 0 0.32905 0.77017 0.20358 0.42754 -11 177 1 1 0 0.3851 0.78473 0.20506 0.44052 -11 178 2 3 1 0.36796 0.78051 0.2 0.3813 -11 179 2 2 1 0.35199 0.79315 0.22019 0.36803 -11 180 2 2 1 0.31801 0.8 0.24509 0.37891 -11 181 2 3 1 0.30674 0.8 0.23432 0.38882 -11 182 2 3 0 0.33107 0.75848 0.2 0.38555 -11 183 2 3 0 0.33389 0.73813 0.2101 0.38101 -11 184 2 3 0 0.3633 0.73729 0.2 0.40103 -11 185 1 2 1 0.35394 0.73968 0.2 0.36388 -11 186 1 2 1 0.3625 0.78628 0.2 0.38816 -11 187 1 2 1 0.37426 0.8 0.2 0.46508 -11 188 1 3 1 0.34876 0.8 0.22985 0.47632 -11 189 1 3 0 0.33813 0.78029 0.2056 0.47281 -11 190 1 4 0 0.37915 0.79114 0.2 0.45265 -11 191 1 2 1 0.40233 0.8 0.24541 0.49344 -11 192 1 2 1 0.42529 0.78379 0.25813 0.51788 -11 193 1 3 0 0.44473 0.8 0.26037 0.55478 -11 194 1 2 1 0.42335 0.77949 0.25251 0.56739 -11 195 1 2 0 0.41433 0.74312 0.25601 0.56701 -11 196 1 2 0 0.3768 0.76441 0.25915 0.56346 -11 197 1 2 1 0.36343 0.78095 0.20219 0.55189 -11 198 1 2 1 0.32245 0.76431 0.2 0.52809 -11 199 1 3 0 0.28258 0.8 0.23392 0.52097 -11 200 1 4 0 0.29243 0.79748 0.22832 0.51401 -11 201 1 2 1 0.30384 0.79552 0.23835 0.52741 \ No newline at end of file diff --git a/inst/extdata/ug_exampleData.txt b/inst/extdata/ug_exampleData.txt deleted file mode 100644 index 257795cc..00000000 --- a/inst/extdata/ug_exampleData.txt +++ /dev/null @@ -1,1801 +0,0 @@ -trial offer accept subjID group -1 3 0 1 LM -2 3 0 1 LM -3 5 0 1 LM -4 4 0 1 LM -5 2 0 1 LM -6 4 0 1 LM -7 3 0 1 LM -8 4 0 1 LM -9 3 0 1 LM -10 4 0 1 LM -11 5 1 1 LM -12 3 0 1 LM -13 5 1 1 LM -14 3 0 1 LM -15 1 0 1 LM -16 2 0 1 LM -17 3 0 1 LM -18 6 1 1 LM -19 2 0 1 LM -20 2 0 1 LM -21 4 1 1 LM -22 3 0 1 LM -23 5 1 1 LM -24 2 0 1 LM -25 4 1 1 LM -26 4 1 1 LM -27 2 0 1 LM -28 6 1 1 LM -29 4 1 1 LM -30 7 1 1 LM -31 9 1 1 LM -32 7 1 1 LM -33 10 1 1 LM -34 7 1 1 LM -35 8 1 1 LM -36 8 1 1 LM -37 11 1 1 LM -38 7 1 1 LM -39 6 1 1 LM -40 6 1 1 LM -41 12 1 1 LM -42 9 1 1 LM -43 5 1 1 LM -44 8 1 1 LM -45 6 1 1 LM -46 7 1 1 LM -47 8 1 1 LM -48 7 1 1 LM -49 8 1 1 LM -50 6 1 1 LM -51 8 1 1 LM -52 7 1 1 LM -53 9 1 1 LM -54 9 1 1 LM -55 8 1 1 LM -56 10 1 1 LM -57 6 1 1 LM -58 10 1 1 LM -59 10 1 1 LM -60 8 1 1 LM -1 3 0 2 LM -2 3 0 2 LM -3 5 0 2 LM -4 4 0 2 LM -5 2 0 2 LM -6 4 0 2 LM -7 3 0 2 LM -8 4 0 2 LM -9 3 0 2 LM -10 4 0 2 LM -11 5 1 2 LM -12 3 0 2 LM -13 5 1 2 LM -14 3 0 2 LM -15 1 0 2 LM -16 2 0 2 LM -17 3 0 2 LM -18 6 1 2 LM -19 2 0 2 LM -20 2 0 2 LM -21 4 0 2 LM -22 3 0 2 LM -23 5 0 2 LM -24 2 0 2 LM -25 4 0 2 LM -26 4 0 2 LM -27 2 0 2 LM -28 6 1 2 LM -29 4 0 2 LM -30 7 1 2 LM -31 9 1 2 LM -32 7 1 2 LM -33 10 1 2 LM -34 7 1 2 LM -35 8 1 2 LM -36 8 1 2 LM -37 11 1 2 LM -38 7 1 2 LM -39 6 1 2 LM -40 6 1 2 LM -41 12 1 2 LM -42 9 1 2 LM -43 5 1 2 LM -44 8 1 2 LM -45 6 1 2 LM -46 7 1 2 LM -47 8 1 2 LM -48 7 1 2 LM -49 8 1 2 LM -50 6 1 2 LM -51 8 1 2 LM -52 7 1 2 LM -53 9 1 2 LM -54 9 1 2 LM -55 8 1 2 LM -56 10 1 2 LM -57 6 1 2 LM -58 10 1 2 LM -59 10 1 2 LM -60 8 1 2 LM -1 3 0 3 LM -2 3 0 3 LM -3 5 1 3 LM -4 4 0 3 LM -5 2 0 3 LM -6 4 0 3 LM -7 3 0 3 LM -8 4 0 3 LM -9 3 0 3 LM -10 4 0 3 LM -11 5 1 3 LM -12 3 0 3 LM -13 5 1 3 LM -14 3 0 3 LM -15 1 0 3 LM -16 2 0 3 LM -17 3 0 3 LM -18 6 1 3 LM -19 2 0 3 LM -20 2 0 3 LM -21 4 0 3 LM -22 3 0 3 LM -23 5 1 3 LM -24 2 0 3 LM -25 4 0 3 LM -26 4 0 3 LM -27 2 0 3 LM -28 6 1 3 LM -29 4 0 3 LM -30 7 1 3 LM -31 9 1 3 LM -32 7 1 3 LM -33 10 1 3 LM -34 7 1 3 LM -35 8 1 3 LM -36 8 1 3 LM -37 11 1 3 LM -38 7 1 3 LM -39 6 1 3 LM -40 6 1 3 LM -41 12 1 3 LM -42 9 1 3 LM -43 5 1 3 LM -44 8 1 3 LM -45 6 1 3 LM -46 7 1 3 LM -47 8 1 3 LM -48 7 1 3 LM -49 8 1 3 LM -50 6 1 3 LM -51 8 1 3 LM -52 7 1 3 LM -53 9 1 3 LM -54 9 1 3 LM -55 8 1 3 LM -56 10 1 3 LM -57 6 1 3 LM -58 10 1 3 LM -59 10 1 3 LM -60 8 1 3 LM -1 3 0 4 LM -2 3 0 4 LM -3 5 1 4 LM -4 4 0 4 LM -5 2 0 4 LM -6 4 0 4 LM -7 3 0 4 LM -8 4 0 4 LM -9 3 0 4 LM -10 4 0 4 LM -11 5 1 4 LM -12 3 0 4 LM -13 5 1 4 LM -14 3 0 4 LM -15 1 0 4 LM -16 2 0 4 LM -17 3 0 4 LM -18 6 1 4 LM -19 2 0 4 LM -20 2 0 4 LM -21 4 0 4 LM -22 3 0 4 LM -23 5 1 4 LM -24 2 0 4 LM -25 4 0 4 LM -26 4 0 4 LM -27 2 0 4 LM -28 6 1 4 LM -29 4 0 4 LM -30 7 1 4 LM -31 9 1 4 LM -32 7 1 4 LM -33 10 1 4 LM -34 7 1 4 LM -35 8 1 4 LM -36 8 1 4 LM -37 11 1 4 LM -38 7 1 4 LM -39 6 1 4 LM -40 6 1 4 LM -41 12 1 4 LM -42 9 1 4 LM -43 5 1 4 LM -44 8 1 4 LM -45 6 0 4 LM -46 7 1 4 LM -47 8 1 4 LM -48 7 1 4 LM -49 8 1 4 LM -50 6 1 4 LM -51 8 1 4 LM -52 7 1 4 LM -53 9 1 4 LM -54 9 1 4 LM -55 8 1 4 LM -56 10 1 4 LM -57 6 1 4 LM -58 10 1 4 LM -59 10 1 4 LM -60 8 1 4 LM -1 3 0 5 LM -2 3 0 5 LM -3 5 1 5 LM -4 4 1 5 LM -5 2 0 5 LM -6 4 0 5 LM -7 3 0 5 LM -8 4 0 5 LM -9 3 0 5 LM -10 4 0 5 LM -11 5 1 5 LM -12 3 0 5 LM -13 5 1 5 LM -14 3 0 5 LM -15 1 0 5 LM -16 2 0 5 LM -17 3 0 5 LM -18 6 1 5 LM -19 2 0 5 LM -20 2 0 5 LM -21 4 0 5 LM -22 3 0 5 LM -23 5 1 5 LM -24 2 0 5 LM -25 4 0 5 LM -26 4 0 5 LM -27 2 0 5 LM -28 6 1 5 LM -29 4 0 5 LM -30 7 1 5 LM -31 9 1 5 LM -32 7 1 5 LM -33 10 1 5 LM -34 7 1 5 LM -35 8 1 5 LM -36 8 1 5 LM -37 11 1 5 LM -38 7 1 5 LM -39 6 1 5 LM -40 6 1 5 LM -41 12 1 5 LM -42 9 1 5 LM -43 5 1 5 LM -44 8 1 5 LM -45 6 1 5 LM -46 7 1 5 LM -47 8 1 5 LM -48 7 1 5 LM -49 8 1 5 LM -50 6 1 5 LM -51 8 1 5 LM -52 7 1 5 LM -53 9 1 5 LM -54 9 1 5 LM -55 8 1 5 LM -56 10 1 5 LM -57 6 1 5 LM -58 10 1 5 LM -59 10 1 5 LM -60 8 1 5 LM -1 3 0 6 LM -2 3 0 6 LM -3 5 1 6 LM -4 4 0 6 LM -5 2 0 6 LM -6 4 0 6 LM -7 3 0 6 LM -8 4 0 6 LM -9 3 0 6 LM -10 4 0 6 LM -11 5 1 6 LM -12 3 0 6 LM -13 5 1 6 LM -14 3 0 6 LM -15 1 0 6 LM -16 2 0 6 LM -17 3 0 6 LM -18 6 1 6 LM -19 2 0 6 LM -20 2 0 6 LM -21 4 0 6 LM -22 3 0 6 LM -23 5 1 6 LM -24 2 0 6 LM -25 4 0 6 LM -26 4 0 6 LM -27 2 0 6 LM -28 6 1 6 LM -29 4 0 6 LM -30 7 1 6 LM -31 9 1 6 LM -32 7 1 6 LM -33 10 1 6 LM -34 7 1 6 LM -35 8 1 6 LM -36 8 1 6 LM -37 11 1 6 LM -38 7 1 6 LM -39 6 1 6 LM -40 6 1 6 LM -41 12 1 6 LM -42 9 1 6 LM -43 5 0 6 LM -44 8 1 6 LM -45 6 1 6 LM -46 7 1 6 LM -47 8 1 6 LM -48 7 1 6 LM -49 8 1 6 LM -50 6 1 6 LM -51 8 1 6 LM -52 7 1 6 LM -53 9 1 6 LM -54 9 1 6 LM -55 8 1 6 LM -56 10 1 6 LM -57 6 1 6 LM -58 10 1 6 LM -59 10 1 6 LM -60 8 1 6 LM -1 3 0 7 LM -2 3 0 7 LM -3 5 0 7 LM -4 4 0 7 LM -5 2 0 7 LM -6 4 0 7 LM -7 3 0 7 LM -8 4 0 7 LM -9 3 0 7 LM -10 4 0 7 LM -11 5 0 7 LM -12 3 0 7 LM -13 5 0 7 LM -14 3 0 7 LM -15 1 0 7 LM -16 2 0 7 LM -17 3 0 7 LM -18 6 1 7 LM -19 2 0 7 LM -20 2 0 7 LM -21 4 0 7 LM -22 3 0 7 LM -23 5 0 7 LM -24 2 0 7 LM -25 4 0 7 LM -26 4 0 7 LM -27 2 0 7 LM -28 6 1 7 LM -29 4 0 7 LM -30 7 1 7 LM -31 9 1 7 LM -32 7 1 7 LM -33 10 1 7 LM -34 7 1 7 LM -35 8 1 7 LM -36 8 1 7 LM -37 11 1 7 LM -38 7 1 7 LM -39 6 1 7 LM -40 6 1 7 LM -41 12 1 7 LM -42 9 1 7 LM -43 5 1 7 LM -44 8 1 7 LM -45 6 1 7 LM -46 7 1 7 LM -47 8 1 7 LM -48 7 1 7 LM -49 8 1 7 LM -50 6 1 7 LM -51 8 1 7 LM -52 7 1 7 LM -53 9 1 7 LM -54 9 1 7 LM -55 8 1 7 LM -56 10 1 7 LM -57 6 1 7 LM -58 10 1 7 LM -59 10 1 7 LM -60 8 1 7 LM -1 3 0 8 LM -2 3 0 8 LM -3 5 1 8 LM -4 4 0 8 LM -5 2 0 8 LM -6 4 0 8 LM -7 3 0 8 LM -8 4 0 8 LM -9 3 0 8 LM -10 4 0 8 LM -11 5 1 8 LM -12 3 0 8 LM -13 5 0 8 LM -14 3 0 8 LM -15 1 0 8 LM -16 2 0 8 LM -17 3 0 8 LM -18 6 1 8 LM -19 2 0 8 LM -20 2 0 8 LM -21 4 0 8 LM -22 3 0 8 LM -23 5 1 8 LM -24 2 0 8 LM -25 4 0 8 LM -26 4 0 8 LM -27 2 0 8 LM -28 6 1 8 LM -29 4 0 8 LM -30 7 1 8 LM -31 9 1 8 LM -32 7 1 8 LM -33 10 1 8 LM -34 7 1 8 LM -35 8 1 8 LM -36 8 1 8 LM -37 11 1 8 LM -38 7 1 8 LM -39 6 1 8 LM -40 6 1 8 LM -41 12 1 8 LM -42 9 1 8 LM -43 5 1 8 LM -44 8 1 8 LM -45 6 1 8 LM -46 7 1 8 LM -47 8 1 8 LM -48 7 1 8 LM -49 8 1 8 LM -50 6 1 8 LM -51 8 1 8 LM -52 7 1 8 LM -53 9 1 8 LM -54 9 1 8 LM -55 8 1 8 LM -56 10 1 8 LM -57 6 1 8 LM -58 10 1 8 LM -59 10 1 8 LM -60 8 1 8 LM -1 3 0 9 LM -2 3 0 9 LM -3 5 1 9 LM -4 4 0 9 LM -5 2 0 9 LM -6 4 0 9 LM -7 3 0 9 LM -8 4 0 9 LM -9 3 0 9 LM -10 4 0 9 LM -11 5 0 9 LM -12 3 0 9 LM -13 5 1 9 LM -14 3 0 9 LM -15 1 0 9 LM -16 2 0 9 LM -17 3 0 9 LM -18 6 1 9 LM -19 2 0 9 LM -20 2 0 9 LM -21 4 0 9 LM -22 3 0 9 LM -23 5 1 9 LM -24 2 0 9 LM -25 4 0 9 LM -26 4 0 9 LM -27 2 0 9 LM -28 6 1 9 LM -29 4 0 9 LM -30 7 1 9 LM -31 9 1 9 LM -32 7 1 9 LM -33 10 1 9 LM -34 7 1 9 LM -35 8 1 9 LM -36 8 1 9 LM -37 11 1 9 LM -38 7 1 9 LM -39 6 1 9 LM -40 6 1 9 LM -41 12 1 9 LM -42 9 1 9 LM -43 5 1 9 LM -44 8 1 9 LM -45 6 1 9 LM -46 7 1 9 LM -47 8 1 9 LM -48 7 1 9 LM -49 8 1 9 LM -50 6 1 9 LM -51 8 1 9 LM -52 7 1 9 LM -53 9 1 9 LM -54 9 1 9 LM -55 8 1 9 LM -56 10 1 9 LM -57 6 1 9 LM -58 10 1 9 LM -59 10 1 9 LM -60 8 1 9 LM -1 3 0 10 LM -2 3 0 10 LM -3 5 0 10 LM -4 4 0 10 LM -5 2 0 10 LM -6 4 0 10 LM -7 3 0 10 LM -8 4 0 10 LM -9 3 0 10 LM -10 4 0 10 LM -11 5 0 10 LM -12 3 0 10 LM -13 5 1 10 LM -14 3 0 10 LM -15 1 0 10 LM -16 2 0 10 LM -17 3 0 10 LM -18 6 1 10 LM -19 2 0 10 LM -20 2 0 10 LM -21 4 0 10 LM -22 3 0 10 LM -23 5 1 10 LM -24 2 0 10 LM -25 4 0 10 LM -26 4 0 10 LM -27 2 0 10 LM -28 6 1 10 LM -29 4 0 10 LM -30 7 1 10 LM -31 9 1 10 LM -32 7 1 10 LM -33 10 1 10 LM -34 7 1 10 LM -35 8 1 10 LM -36 8 1 10 LM -37 11 1 10 LM -38 7 1 10 LM -39 6 1 10 LM -40 6 1 10 LM -41 12 1 10 LM -42 9 1 10 LM -43 5 1 10 LM -44 8 1 10 LM -45 6 1 10 LM -46 7 1 10 LM -47 8 1 10 LM -48 7 1 10 LM -49 8 1 10 LM -50 6 1 10 LM -51 8 1 10 LM -52 7 1 10 LM -53 9 1 10 LM -54 9 1 10 LM -55 8 1 10 LM -56 10 1 10 LM -57 6 1 10 LM -58 10 1 10 LM -59 10 1 10 LM -60 8 1 10 LM -1 3 0 11 LM -2 3 0 11 LM -3 5 1 11 LM -4 4 0 11 LM -5 2 0 11 LM -6 4 0 11 LM -7 3 0 11 LM -8 4 0 11 LM -9 3 0 11 LM -10 4 0 11 LM -11 5 1 11 LM -12 3 0 11 LM -13 5 1 11 LM -14 3 0 11 LM -15 1 0 11 LM -16 2 0 11 LM -17 3 0 11 LM -18 6 1 11 LM -19 2 0 11 LM -20 2 0 11 LM -21 4 0 11 LM -22 3 0 11 LM -23 5 1 11 LM -24 2 0 11 LM -25 4 0 11 LM -26 4 0 11 LM -27 2 0 11 LM -28 6 1 11 LM -29 4 0 11 LM -30 7 1 11 LM -31 9 1 11 LM -32 7 1 11 LM -33 10 1 11 LM -34 7 1 11 LM -35 8 1 11 LM -36 8 1 11 LM -37 11 1 11 LM -38 7 1 11 LM -39 6 1 11 LM -40 6 1 11 LM -41 12 1 11 LM -42 9 1 11 LM -43 5 1 11 LM -44 8 1 11 LM -45 6 1 11 LM -46 7 1 11 LM -47 8 1 11 LM -48 7 1 11 LM -49 8 1 11 LM -50 6 1 11 LM -51 8 1 11 LM -52 7 1 11 LM -53 9 1 11 LM -54 9 1 11 LM -55 8 1 11 LM -56 10 1 11 LM -57 6 1 11 LM -58 10 1 11 LM -59 10 1 11 LM -60 8 1 11 LM -1 3 0 12 LM -2 3 0 12 LM -3 5 1 12 LM -4 4 0 12 LM -5 2 0 12 LM -6 4 0 12 LM -7 3 0 12 LM -8 4 0 12 LM -9 3 0 12 LM -10 4 0 12 LM -11 5 1 12 LM -12 3 0 12 LM -13 5 1 12 LM -14 3 0 12 LM -15 1 0 12 LM -16 2 0 12 LM -17 3 0 12 LM -18 6 1 12 LM -19 2 0 12 LM -20 2 0 12 LM -21 4 0 12 LM -22 3 0 12 LM -23 5 1 12 LM -24 2 0 12 LM -25 4 0 12 LM -26 4 0 12 LM -27 2 0 12 LM -28 6 1 12 LM -29 4 0 12 LM -30 7 1 12 LM -31 9 1 12 LM -32 7 1 12 LM -33 10 1 12 LM -34 7 1 12 LM -35 8 1 12 LM -36 8 1 12 LM -37 11 1 12 LM -38 7 1 12 LM -39 6 1 12 LM -40 6 1 12 LM -41 12 1 12 LM -42 9 1 12 LM -43 5 1 12 LM -44 8 1 12 LM -45 6 1 12 LM -46 7 1 12 LM -47 8 1 12 LM -48 7 1 12 LM -49 8 1 12 LM -50 6 1 12 LM -51 8 1 12 LM -52 7 1 12 LM -53 9 1 12 LM -54 9 1 12 LM -55 8 1 12 LM -56 10 1 12 LM -57 6 1 12 LM -58 10 1 12 LM -59 10 1 12 LM -60 8 1 12 LM -1 3 0 13 LM -2 3 0 13 LM -3 5 0 13 LM -4 4 0 13 LM -5 2 0 13 LM -6 4 0 13 LM -7 3 0 13 LM -8 4 0 13 LM -9 3 0 13 LM -10 4 0 13 LM -11 5 1 13 LM -12 3 0 13 LM -13 5 0 13 LM -14 3 0 13 LM -15 1 0 13 LM -16 2 0 13 LM -17 3 0 13 LM -18 6 1 13 LM -19 2 0 13 LM -20 2 0 13 LM -21 4 0 13 LM -22 3 0 13 LM -23 5 1 13 LM -24 2 0 13 LM -25 4 0 13 LM -26 4 0 13 LM -27 2 0 13 LM -28 6 1 13 LM -29 4 0 13 LM -30 7 1 13 LM -31 9 1 13 LM -32 7 1 13 LM -33 10 1 13 LM -34 7 1 13 LM -35 8 1 13 LM -36 8 1 13 LM -37 11 1 13 LM -38 7 1 13 LM -39 6 1 13 LM -40 6 1 13 LM -41 12 1 13 LM -42 9 1 13 LM -43 5 1 13 LM -44 8 1 13 LM -45 6 1 13 LM -46 7 1 13 LM -47 8 1 13 LM -48 7 1 13 LM -49 8 1 13 LM -50 6 1 13 LM -51 8 1 13 LM -52 7 1 13 LM -53 9 1 13 LM -54 9 1 13 LM -55 8 1 13 LM -56 10 1 13 LM -57 6 1 13 LM -58 10 1 13 LM -59 10 1 13 LM -60 8 1 13 LM -1 3 0 14 LM -2 3 0 14 LM -3 5 1 14 LM -4 4 0 14 LM -5 2 0 14 LM -6 4 0 14 LM -7 3 0 14 LM -8 4 0 14 LM -9 3 0 14 LM -10 4 0 14 LM -11 5 1 14 LM -12 3 0 14 LM -13 5 1 14 LM -14 3 0 14 LM -15 1 0 14 LM -16 2 0 14 LM -17 3 0 14 LM -18 6 1 14 LM -19 2 0 14 LM -20 2 0 14 LM -21 4 0 14 LM -22 3 0 14 LM -23 5 1 14 LM -24 2 0 14 LM -25 4 0 14 LM -26 4 0 14 LM -27 2 0 14 LM -28 6 1 14 LM -29 4 0 14 LM -30 7 1 14 LM -31 9 1 14 LM -32 7 1 14 LM -33 10 1 14 LM -34 7 1 14 LM -35 8 1 14 LM -36 8 1 14 LM -37 11 1 14 LM -38 7 1 14 LM -39 6 1 14 LM -40 6 1 14 LM -41 12 1 14 LM -42 9 1 14 LM -43 5 1 14 LM -44 8 1 14 LM -45 6 1 14 LM -46 7 1 14 LM -47 8 1 14 LM -48 7 1 14 LM -49 8 1 14 LM -50 6 1 14 LM -51 8 1 14 LM -52 7 1 14 LM -53 9 1 14 LM -54 9 1 14 LM -55 8 1 14 LM -56 10 1 14 LM -57 6 1 14 LM -58 10 1 14 LM -59 10 1 14 LM -60 8 1 14 LM -1 3 0 15 LM -2 3 0 15 LM -3 5 1 15 LM -4 4 0 15 LM -5 2 0 15 LM -6 4 0 15 LM -7 3 0 15 LM -8 4 0 15 LM -9 3 0 15 LM -10 4 0 15 LM -11 5 1 15 LM -12 3 0 15 LM -13 5 1 15 LM -14 3 0 15 LM -15 1 0 15 LM -16 2 0 15 LM -17 3 0 15 LM -18 6 1 15 LM -19 2 0 15 LM -20 2 0 15 LM -21 4 0 15 LM -22 3 0 15 LM -23 5 0 15 LM -24 2 0 15 LM -25 4 0 15 LM -26 4 0 15 LM -27 2 0 15 LM -28 6 1 15 LM -29 4 0 15 LM -30 7 1 15 LM -31 9 1 15 LM -32 7 1 15 LM -33 10 1 15 LM -34 7 1 15 LM -35 8 1 15 LM -36 8 1 15 LM -37 11 1 15 LM -38 7 1 15 LM -39 6 1 15 LM -40 6 1 15 LM -41 12 1 15 LM -42 9 1 15 LM -43 5 1 15 LM -44 8 1 15 LM -45 6 1 15 LM -46 7 1 15 LM -47 8 1 15 LM -48 7 1 15 LM -49 8 1 15 LM -50 6 1 15 LM -51 8 1 15 LM -52 7 1 15 LM -53 9 1 15 LM -54 9 1 15 LM -55 8 1 15 LM -56 10 1 15 LM -57 6 1 15 LM -58 10 1 15 LM -59 10 1 15 LM -60 8 1 15 LM -1 3 0 16 LM -2 3 0 16 LM -3 5 1 16 LM -4 4 0 16 LM -5 2 0 16 LM -6 4 0 16 LM -7 3 0 16 LM -8 4 0 16 LM -9 3 0 16 LM -10 4 0 16 LM -11 5 0 16 LM -12 3 0 16 LM -13 5 1 16 LM -14 3 0 16 LM -15 1 0 16 LM -16 2 0 16 LM -17 3 0 16 LM -18 6 1 16 LM -19 2 0 16 LM -20 2 0 16 LM -21 4 0 16 LM -22 3 0 16 LM -23 5 1 16 LM -24 2 0 16 LM -25 4 0 16 LM -26 4 0 16 LM -27 2 0 16 LM -28 6 1 16 LM -29 4 0 16 LM -30 7 1 16 LM -31 9 1 16 LM -32 7 1 16 LM -33 10 1 16 LM -34 7 1 16 LM -35 8 1 16 LM -36 8 1 16 LM -37 11 1 16 LM -38 7 1 16 LM -39 6 1 16 LM -40 6 1 16 LM -41 12 1 16 LM -42 9 1 16 LM -43 5 1 16 LM -44 8 1 16 LM -45 6 1 16 LM -46 7 1 16 LM -47 8 1 16 LM -48 7 1 16 LM -49 8 1 16 LM -50 6 1 16 LM -51 8 1 16 LM -52 7 1 16 LM -53 9 1 16 LM -54 9 1 16 LM -55 8 1 16 LM -56 10 1 16 LM -57 6 1 16 LM -58 10 1 16 LM -59 10 1 16 LM -60 8 1 16 LM -1 3 0 17 LM -2 3 0 17 LM -3 5 1 17 LM -4 4 0 17 LM -5 2 0 17 LM -6 4 0 17 LM -7 3 0 17 LM -8 4 0 17 LM -9 3 0 17 LM -10 4 0 17 LM -11 5 1 17 LM -12 3 0 17 LM -13 5 1 17 LM -14 3 0 17 LM -15 1 0 17 LM -16 2 0 17 LM -17 3 0 17 LM -18 6 1 17 LM -19 2 0 17 LM -20 2 0 17 LM -21 4 0 17 LM -22 3 0 17 LM -23 5 1 17 LM -24 2 0 17 LM -25 4 0 17 LM -26 4 0 17 LM -27 2 0 17 LM -28 6 1 17 LM -29 4 0 17 LM -30 7 1 17 LM -31 9 1 17 LM -32 7 1 17 LM -33 10 1 17 LM -34 7 1 17 LM -35 8 1 17 LM -36 8 1 17 LM -37 11 1 17 LM -38 7 1 17 LM -39 6 1 17 LM -40 6 1 17 LM -41 12 1 17 LM -42 9 1 17 LM -43 5 1 17 LM -44 8 1 17 LM -45 6 1 17 LM -46 7 1 17 LM -47 8 1 17 LM -48 7 1 17 LM -49 8 1 17 LM -50 6 1 17 LM -51 8 1 17 LM -52 7 1 17 LM -53 9 1 17 LM -54 9 1 17 LM -55 8 1 17 LM -56 10 1 17 LM -57 6 1 17 LM -58 10 1 17 LM -59 10 1 17 LM -60 8 1 17 LM -1 3 0 18 LM -2 3 0 18 LM -3 5 1 18 LM -4 4 0 18 LM -5 2 0 18 LM -6 4 0 18 LM -7 3 0 18 LM -8 4 0 18 LM -9 3 0 18 LM -10 4 0 18 LM -11 5 0 18 LM -12 3 0 18 LM -13 5 1 18 LM -14 3 0 18 LM -15 1 0 18 LM -16 2 0 18 LM -17 3 0 18 LM -18 6 1 18 LM -19 2 0 18 LM -20 2 0 18 LM -21 4 0 18 LM -22 3 0 18 LM -23 5 1 18 LM -24 2 0 18 LM -25 4 0 18 LM -26 4 0 18 LM -27 2 0 18 LM -28 6 1 18 LM -29 4 0 18 LM -30 7 1 18 LM -31 9 1 18 LM -32 7 1 18 LM -33 10 1 18 LM -34 7 1 18 LM -35 8 1 18 LM -36 8 1 18 LM -37 11 1 18 LM -38 7 1 18 LM -39 6 1 18 LM -40 6 1 18 LM -41 12 1 18 LM -42 9 1 18 LM -43 5 1 18 LM -44 8 1 18 LM -45 6 1 18 LM -46 7 1 18 LM -47 8 1 18 LM -48 7 1 18 LM -49 8 0 18 LM -50 6 1 18 LM -51 8 1 18 LM -52 7 1 18 LM -53 9 1 18 LM -54 9 1 18 LM -55 8 1 18 LM -56 10 1 18 LM -57 6 1 18 LM -58 10 1 18 LM -59 10 1 18 LM -60 8 1 18 LM -1 3 0 19 LM -2 3 0 19 LM -3 5 1 19 LM -4 4 0 19 LM -5 2 0 19 LM -6 4 0 19 LM -7 3 0 19 LM -8 4 0 19 LM -9 3 0 19 LM -10 4 0 19 LM -11 5 1 19 LM -12 3 0 19 LM -13 5 1 19 LM -14 3 0 19 LM -15 1 0 19 LM -16 2 0 19 LM -17 3 0 19 LM -18 6 1 19 LM -19 2 0 19 LM -20 2 0 19 LM -21 4 0 19 LM -22 3 0 19 LM -23 5 1 19 LM -24 2 0 19 LM -25 4 0 19 LM -26 4 0 19 LM -27 2 0 19 LM -28 6 1 19 LM -29 4 0 19 LM -30 7 1 19 LM -31 9 1 19 LM -32 7 1 19 LM -33 10 1 19 LM -34 7 1 19 LM -35 8 1 19 LM -36 8 1 19 LM -37 11 1 19 LM -38 7 1 19 LM -39 6 1 19 LM -40 6 1 19 LM -41 12 1 19 LM -42 9 1 19 LM -43 5 1 19 LM -44 8 1 19 LM -45 6 1 19 LM -46 7 1 19 LM -47 8 1 19 LM -48 7 1 19 LM -49 8 1 19 LM -50 6 1 19 LM -51 8 1 19 LM -52 7 1 19 LM -53 9 1 19 LM -54 9 1 19 LM -55 8 1 19 LM -56 10 1 19 LM -57 6 1 19 LM -58 10 1 19 LM -59 10 1 19 LM -60 8 1 19 LM -1 3 0 20 LM -2 3 0 20 LM -3 5 0 20 LM -4 4 0 20 LM -5 2 0 20 LM -6 4 0 20 LM -7 3 0 20 LM -8 4 0 20 LM -9 3 0 20 LM -10 4 0 20 LM -11 5 1 20 LM -12 3 0 20 LM -13 5 1 20 LM -14 3 0 20 LM -15 1 0 20 LM -16 2 0 20 LM -17 3 0 20 LM -18 6 1 20 LM -19 2 0 20 LM -20 2 0 20 LM -21 4 0 20 LM -22 3 0 20 LM -23 5 1 20 LM -24 2 0 20 LM -25 4 0 20 LM -26 4 0 20 LM -27 2 0 20 LM -28 6 1 20 LM -29 4 0 20 LM -30 7 1 20 LM -31 9 1 20 LM -32 7 1 20 LM -33 10 1 20 LM -34 7 1 20 LM -35 8 1 20 LM -36 8 1 20 LM -37 11 1 20 LM -38 7 1 20 LM -39 6 1 20 LM -40 6 1 20 LM -41 12 1 20 LM -42 9 1 20 LM -43 5 1 20 LM -44 8 1 20 LM -45 6 1 20 LM -46 7 1 20 LM -47 8 1 20 LM -48 7 1 20 LM -49 8 1 20 LM -50 6 1 20 LM -51 8 1 20 LM -52 7 1 20 LM -53 9 1 20 LM -54 9 1 20 LM -55 8 1 20 LM -56 10 1 20 LM -57 6 1 20 LM -58 10 1 20 LM -59 10 1 20 LM -60 8 1 20 LM -1 3 0 21 LM -2 3 0 21 LM -3 5 1 21 LM -4 4 0 21 LM -5 2 0 21 LM -6 4 0 21 LM -7 3 0 21 LM -8 4 0 21 LM -9 3 0 21 LM -10 4 0 21 LM -11 5 1 21 LM -12 3 0 21 LM -13 5 1 21 LM -14 3 0 21 LM -15 1 0 21 LM -16 2 0 21 LM -17 3 0 21 LM -18 6 1 21 LM -19 2 0 21 LM -20 2 0 21 LM -21 4 0 21 LM -22 3 0 21 LM -23 5 0 21 LM -24 2 0 21 LM -25 4 0 21 LM -26 4 0 21 LM -27 2 0 21 LM -28 6 1 21 LM -29 4 0 21 LM -30 7 1 21 LM -31 9 1 21 LM -32 7 1 21 LM -33 10 1 21 LM -34 7 1 21 LM -35 8 1 21 LM -36 8 1 21 LM -37 11 1 21 LM -38 7 1 21 LM -39 6 1 21 LM -40 6 1 21 LM -41 12 1 21 LM -42 9 1 21 LM -43 5 1 21 LM -44 8 1 21 LM -45 6 1 21 LM -46 7 1 21 LM -47 8 1 21 LM -48 7 1 21 LM -49 8 1 21 LM -50 6 1 21 LM -51 8 1 21 LM -52 7 1 21 LM -53 9 1 21 LM -54 9 1 21 LM -55 8 1 21 LM -56 10 1 21 LM -57 6 1 21 LM -58 10 1 21 LM -59 10 1 21 LM -60 8 1 21 LM -1 3 0 22 LM -2 3 0 22 LM -3 5 1 22 LM -4 4 0 22 LM -5 2 0 22 LM -6 4 0 22 LM -7 3 0 22 LM -8 4 0 22 LM -9 3 0 22 LM -10 4 0 22 LM -11 5 1 22 LM -12 3 0 22 LM -13 5 1 22 LM -14 3 0 22 LM -15 1 0 22 LM -16 2 0 22 LM -17 3 0 22 LM -18 6 1 22 LM -19 2 0 22 LM -20 2 0 22 LM -21 4 0 22 LM -22 3 0 22 LM -23 5 1 22 LM -24 2 0 22 LM -25 4 0 22 LM -26 4 0 22 LM -27 2 0 22 LM -28 6 1 22 LM -29 4 0 22 LM -30 7 1 22 LM -31 9 1 22 LM -32 7 1 22 LM -33 10 1 22 LM -34 7 1 22 LM -35 8 1 22 LM -36 8 1 22 LM -37 11 1 22 LM -38 7 1 22 LM -39 6 1 22 LM -40 6 1 22 LM -41 12 1 22 LM -42 9 1 22 LM -43 5 0 22 LM -44 8 1 22 LM -45 6 1 22 LM -46 7 1 22 LM -47 8 1 22 LM -48 7 1 22 LM -49 8 1 22 LM -50 6 1 22 LM -51 8 1 22 LM -52 7 1 22 LM -53 9 1 22 LM -54 9 1 22 LM -55 8 1 22 LM -56 10 1 22 LM -57 6 1 22 LM -58 10 1 22 LM -59 10 1 22 LM -60 8 1 22 LM -1 3 0 23 LM -2 3 0 23 LM -3 5 1 23 LM -4 4 0 23 LM -5 2 0 23 LM -6 4 0 23 LM -7 3 0 23 LM -8 4 0 23 LM -9 3 0 23 LM -10 4 0 23 LM -11 5 1 23 LM -12 3 0 23 LM -13 5 1 23 LM -14 3 0 23 LM -15 1 0 23 LM -16 2 0 23 LM -17 3 0 23 LM -18 6 1 23 LM -19 2 0 23 LM -20 2 0 23 LM -21 4 0 23 LM -22 3 0 23 LM -23 5 1 23 LM -24 2 0 23 LM -25 4 0 23 LM -26 4 0 23 LM -27 2 0 23 LM -28 6 1 23 LM -29 4 0 23 LM -30 7 1 23 LM -31 9 1 23 LM -32 7 1 23 LM -33 10 1 23 LM -34 7 1 23 LM -35 8 1 23 LM -36 8 1 23 LM -37 11 1 23 LM -38 7 1 23 LM -39 6 1 23 LM -40 6 1 23 LM -41 12 1 23 LM -42 9 1 23 LM -43 5 1 23 LM -44 8 1 23 LM -45 6 1 23 LM -46 7 1 23 LM -47 8 1 23 LM -48 7 1 23 LM -49 8 1 23 LM -50 6 1 23 LM -51 8 1 23 LM -52 7 1 23 LM -53 9 1 23 LM -54 9 1 23 LM -55 8 1 23 LM -56 10 1 23 LM -57 6 1 23 LM -58 10 1 23 LM -59 10 1 23 LM -60 8 1 23 LM -1 3 0 24 LM -2 3 0 24 LM -3 5 1 24 LM -4 4 0 24 LM -5 2 0 24 LM -6 4 0 24 LM -7 3 0 24 LM -8 4 0 24 LM -9 3 0 24 LM -10 4 0 24 LM -11 5 0 24 LM -12 3 0 24 LM -13 5 1 24 LM -14 3 0 24 LM -15 1 0 24 LM -16 2 0 24 LM -17 3 0 24 LM -18 6 1 24 LM -19 2 0 24 LM -20 2 0 24 LM -21 4 0 24 LM -22 3 0 24 LM -23 5 0 24 LM -24 2 0 24 LM -25 4 0 24 LM -26 4 0 24 LM -27 2 0 24 LM -28 6 1 24 LM -29 4 0 24 LM -30 7 1 24 LM -31 9 1 24 LM -32 7 1 24 LM -33 10 1 24 LM -34 7 1 24 LM -35 8 1 24 LM -36 8 1 24 LM -37 11 1 24 LM -38 7 1 24 LM -39 6 1 24 LM -40 6 1 24 LM -41 12 1 24 LM -42 9 1 24 LM -43 5 1 24 LM -44 8 1 24 LM -45 6 1 24 LM -46 7 1 24 LM -47 8 1 24 LM -48 7 1 24 LM -49 8 1 24 LM -50 6 1 24 LM -51 8 1 24 LM -52 7 1 24 LM -53 9 1 24 LM -54 9 1 24 LM -55 8 1 24 LM -56 10 1 24 LM -57 6 1 24 LM -58 10 1 24 LM -59 10 1 24 LM -60 8 1 24 LM -1 3 0 25 LM -2 3 0 25 LM -3 5 1 25 LM -4 4 0 25 LM -5 2 0 25 LM -6 4 0 25 LM -7 3 0 25 LM -8 4 0 25 LM -9 3 0 25 LM -10 4 0 25 LM -11 5 0 25 LM -12 3 0 25 LM -13 5 0 25 LM -14 3 0 25 LM -15 1 0 25 LM -16 2 0 25 LM -17 3 0 25 LM -18 6 1 25 LM -19 2 0 25 LM -20 2 0 25 LM -21 4 0 25 LM -22 3 0 25 LM -23 5 1 25 LM -24 2 0 25 LM -25 4 0 25 LM -26 4 0 25 LM -27 2 0 25 LM -28 6 1 25 LM -29 4 0 25 LM -30 7 1 25 LM -31 9 1 25 LM -32 7 1 25 LM -33 10 1 25 LM -34 7 1 25 LM -35 8 1 25 LM -36 8 1 25 LM -37 11 1 25 LM -38 7 1 25 LM -39 6 1 25 LM -40 6 1 25 LM -41 12 1 25 LM -42 9 1 25 LM -43 5 1 25 LM -44 8 1 25 LM -45 6 1 25 LM -46 7 1 25 LM -47 8 1 25 LM -48 7 1 25 LM -49 8 1 25 LM -50 6 1 25 LM -51 8 1 25 LM -52 7 1 25 LM -53 9 1 25 LM -54 9 1 25 LM -55 8 1 25 LM -56 10 1 25 LM -57 6 1 25 LM -58 10 1 25 LM -59 10 1 25 LM -60 8 1 25 LM -1 3 0 26 LM -2 3 0 26 LM -3 5 0 26 LM -4 4 0 26 LM -5 2 0 26 LM -6 4 0 26 LM -7 3 0 26 LM -8 4 0 26 LM -9 3 0 26 LM -10 4 0 26 LM -11 5 1 26 LM -12 3 0 26 LM -13 5 1 26 LM -14 3 0 26 LM -15 1 0 26 LM -16 2 0 26 LM -17 3 0 26 LM -18 6 1 26 LM -19 2 0 26 LM -20 2 0 26 LM -21 4 0 26 LM -22 3 0 26 LM -23 5 1 26 LM -24 2 0 26 LM -25 4 0 26 LM -26 4 0 26 LM -27 2 0 26 LM -28 6 1 26 LM -29 4 0 26 LM -30 7 1 26 LM -31 9 1 26 LM -32 7 1 26 LM -33 10 1 26 LM -34 7 1 26 LM -35 8 1 26 LM -36 8 1 26 LM -37 11 1 26 LM -38 7 1 26 LM -39 6 1 26 LM -40 6 1 26 LM -41 12 1 26 LM -42 9 1 26 LM -43 5 1 26 LM -44 8 1 26 LM -45 6 1 26 LM -46 7 1 26 LM -47 8 1 26 LM -48 7 1 26 LM -49 8 1 26 LM -50 6 1 26 LM -51 8 1 26 LM -52 7 1 26 LM -53 9 1 26 LM -54 9 1 26 LM -55 8 1 26 LM -56 10 1 26 LM -57 6 1 26 LM -58 10 1 26 LM -59 10 1 26 LM -60 8 1 26 LM -1 3 0 27 LM -2 3 0 27 LM -3 5 1 27 LM -4 4 0 27 LM -5 2 0 27 LM -6 4 0 27 LM -7 3 0 27 LM -8 4 0 27 LM -9 3 0 27 LM -10 4 0 27 LM -11 5 1 27 LM -12 3 0 27 LM -13 5 1 27 LM -14 3 0 27 LM -15 1 0 27 LM -16 2 0 27 LM -17 3 0 27 LM -18 6 1 27 LM -19 2 0 27 LM -20 2 0 27 LM -21 4 0 27 LM -22 3 0 27 LM -23 5 1 27 LM -24 2 0 27 LM -25 4 0 27 LM -26 4 0 27 LM -27 2 0 27 LM -28 6 1 27 LM -29 4 0 27 LM -30 7 1 27 LM -31 9 1 27 LM -32 7 1 27 LM -33 10 1 27 LM -34 7 1 27 LM -35 8 1 27 LM -36 8 1 27 LM -37 11 1 27 LM -38 7 1 27 LM -39 6 1 27 LM -40 6 1 27 LM -41 12 1 27 LM -42 9 1 27 LM -43 5 1 27 LM -44 8 1 27 LM -45 6 1 27 LM -46 7 1 27 LM -47 8 1 27 LM -48 7 1 27 LM -49 8 1 27 LM -50 6 1 27 LM -51 8 1 27 LM -52 7 1 27 LM -53 9 1 27 LM -54 9 1 27 LM -55 8 1 27 LM -56 10 1 27 LM -57 6 1 27 LM -58 10 1 27 LM -59 10 1 27 LM -60 8 1 27 LM -1 3 0 28 LM -2 3 0 28 LM -3 5 0 28 LM -4 4 0 28 LM -5 2 0 28 LM -6 4 0 28 LM -7 3 0 28 LM -8 4 0 28 LM -9 3 0 28 LM -10 4 0 28 LM -11 5 0 28 LM -12 3 0 28 LM -13 5 1 28 LM -14 3 0 28 LM -15 1 0 28 LM -16 2 0 28 LM -17 3 0 28 LM -18 6 1 28 LM -19 2 0 28 LM -20 2 0 28 LM -21 4 0 28 LM -22 3 0 28 LM -23 5 1 28 LM -24 2 0 28 LM -25 4 0 28 LM -26 4 0 28 LM -27 2 0 28 LM -28 6 1 28 LM -29 4 0 28 LM -30 7 1 28 LM -31 9 1 28 LM -32 7 1 28 LM -33 10 1 28 LM -34 7 1 28 LM -35 8 1 28 LM -36 8 1 28 LM -37 11 1 28 LM -38 7 1 28 LM -39 6 1 28 LM -40 6 1 28 LM -41 12 1 28 LM -42 9 1 28 LM -43 5 0 28 LM -44 8 1 28 LM -45 6 1 28 LM -46 7 1 28 LM -47 8 1 28 LM -48 7 1 28 LM -49 8 1 28 LM -50 6 1 28 LM -51 8 1 28 LM -52 7 1 28 LM -53 9 1 28 LM -54 9 1 28 LM -55 8 1 28 LM -56 10 1 28 LM -57 6 1 28 LM -58 10 1 28 LM -59 10 1 28 LM -60 8 1 28 LM -1 3 0 29 LM -2 3 0 29 LM -3 5 1 29 LM -4 4 0 29 LM -5 2 0 29 LM -6 4 0 29 LM -7 3 0 29 LM -8 4 0 29 LM -9 3 0 29 LM -10 4 0 29 LM -11 5 1 29 LM -12 3 0 29 LM -13 5 1 29 LM -14 3 0 29 LM -15 1 0 29 LM -16 2 0 29 LM -17 3 0 29 LM -18 6 1 29 LM -19 2 0 29 LM -20 2 0 29 LM -21 4 0 29 LM -22 3 0 29 LM -23 5 0 29 LM -24 2 0 29 LM -25 4 0 29 LM -26 4 0 29 LM -27 2 0 29 LM -28 6 1 29 LM -29 4 0 29 LM -30 7 1 29 LM -31 9 1 29 LM -32 7 1 29 LM -33 10 1 29 LM -34 7 1 29 LM -35 8 1 29 LM -36 8 1 29 LM -37 11 1 29 LM -38 7 1 29 LM -39 6 1 29 LM -40 6 1 29 LM -41 12 1 29 LM -42 9 1 29 LM -43 5 0 29 LM -44 8 1 29 LM -45 6 1 29 LM -46 7 1 29 LM -47 8 1 29 LM -48 7 1 29 LM -49 8 1 29 LM -50 6 1 29 LM -51 8 1 29 LM -52 7 1 29 LM -53 9 1 29 LM -54 9 1 29 LM -55 8 1 29 LM -56 10 1 29 LM -57 6 1 29 LM -58 10 1 29 LM -59 10 1 29 LM -60 8 1 29 LM -1 3 0 30 LM -2 3 0 30 LM -3 5 1 30 LM -4 4 0 30 LM -5 2 0 30 LM -6 4 0 30 LM -7 3 0 30 LM -8 4 0 30 LM -9 3 0 30 LM -10 4 0 30 LM -11 5 1 30 LM -12 3 0 30 LM -13 5 1 30 LM -14 3 0 30 LM -15 1 0 30 LM -16 2 0 30 LM -17 3 0 30 LM -18 6 1 30 LM -19 2 0 30 LM -20 2 0 30 LM -21 4 0 30 LM -22 3 0 30 LM -23 5 1 30 LM -24 2 0 30 LM -25 4 0 30 LM -26 4 0 30 LM -27 2 0 30 LM -28 6 1 30 LM -29 4 0 30 LM -30 7 1 30 LM -31 9 1 30 LM -32 7 1 30 LM -33 10 1 30 LM -34 7 1 30 LM -35 8 1 30 LM -36 8 1 30 LM -37 11 1 30 LM -38 7 1 30 LM -39 6 0 30 LM -40 6 1 30 LM -41 12 1 30 LM -42 9 1 30 LM -43 5 1 30 LM -44 8 1 30 LM -45 6 1 30 LM -46 7 1 30 LM -47 8 1 30 LM -48 7 1 30 LM -49 8 1 30 LM -50 6 1 30 LM -51 8 1 30 LM -52 7 1 30 LM -53 9 1 30 LM -54 9 1 30 LM -55 8 1 30 LM -56 10 1 30 LM -57 6 1 30 LM -58 10 1 30 LM -59 10 1 30 LM -60 8 1 30 LM diff --git a/inst/extdata/wcs_answersheet.txt b/inst/extdata/wcs_answersheet.txt deleted file mode 100644 index 207ac3a1..00000000 --- a/inst/extdata/wcs_answersheet.txt +++ /dev/null @@ -1,4 +0,0 @@ - 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 -Color 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 -Form 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 -Number 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 diff --git a/inst/extdata/wcs_exampleData.txt b/inst/extdata/wcs_exampleData.txt deleted file mode 100644 index 80cf86db..00000000 --- a/inst/extdata/wcs_exampleData.txt +++ /dev/null @@ -1,1158 +0,0 @@ -choice outcome subjID trial -1 0 1 1 -1 1 1 2 -4 1 1 3 -1 1 1 4 -2 1 1 5 -3 1 1 6 -4 1 1 7 -1 1 1 8 -2 1 1 9 -3 1 1 10 -4 1 1 11 -1 1 1 12 -4 0 1 13 -3 0 1 14 -1 1 1 15 -4 1 1 16 -2 1 1 17 -4 1 1 18 -1 1 1 19 -4 1 1 20 -2 1 1 21 -3 1 1 22 -2 1 1 23 -1 1 1 24 -2 0 1 25 -2 1 1 26 -3 1 1 27 -4 1 1 28 -2 1 1 29 -1 1 1 30 -4 1 1 31 -1 1 1 32 -3 1 1 33 -4 1 1 34 -1 1 1 35 -4 0 1 36 -2 0 1 37 -3 1 1 38 -1 0 1 39 -4 1 1 40 -1 1 1 41 -3 0 1 42 -4 0 1 43 -2 1 1 44 -1 1 1 45 -3 1 1 46 -4 1 1 47 -3 1 1 48 -2 1 1 49 -3 1 1 50 -4 1 1 51 -2 1 1 52 -1 1 1 53 -2 0 1 54 -4 0 1 55 -2 0 1 56 -1 0 1 57 -3 0 1 58 -3 1 1 59 -2 0 1 60 -4 0 1 61 -3 1 1 62 -1 1 1 63 -3 1 1 64 -1 1 1 65 -3 1 1 66 -1 1 1 67 -4 1 1 68 -2 1 1 69 -3 1 1 70 -1 1 1 71 -3 1 1 72 -3 0 1 73 -2 1 1 74 -1 1 1 75 -3 1 1 76 -2 1 1 77 -1 1 1 78 -3 1 1 79 -4 1 1 80 -2 1 1 81 -3 1 1 82 -4 1 1 83 -1 0 2 1 -1 1 2 2 -1 0 2 3 -1 1 2 4 -2 1 2 5 -3 1 2 6 -4 1 2 7 -1 1 2 8 -2 1 2 9 -3 1 2 10 -4 1 2 11 -1 1 2 12 -3 0 2 13 -3 1 2 14 -2 1 2 15 -4 1 2 16 -1 1 2 17 -3 1 2 18 -1 1 2 19 -3 1 2 20 -4 1 2 21 -2 1 2 22 -3 1 2 23 -4 0 2 24 -1 0 2 25 -3 0 2 26 -4 0 2 27 -1 0 2 28 -2 1 2 29 -3 1 2 30 -4 1 2 31 -1 0 2 32 -3 0 2 33 -3 1 2 34 -2 1 2 35 -4 0 2 36 -4 1 2 37 -3 0 2 38 -4 1 2 39 -3 1 2 40 -1 1 2 41 -4 0 2 42 -3 0 2 43 -1 1 2 44 -2 1 2 45 -3 1 2 46 -4 1 2 47 -1 1 2 48 -2 0 2 49 -2 1 2 50 -1 1 2 51 -2 1 2 52 -4 1 2 53 -2 0 2 54 -3 1 2 55 -4 1 2 56 -3 1 2 57 -1 1 2 58 -3 1 2 59 -4 1 2 60 -2 1 2 61 -3 1 2 62 -1 1 2 63 -3 1 2 64 -2 0 2 65 -3 0 2 66 -1 0 2 67 -4 0 2 68 -2 0 2 69 -3 0 2 70 -4 1 2 71 -3 1 2 72 -3 0 2 73 -4 0 2 74 -2 0 2 75 -3 1 2 76 -4 0 2 77 -1 1 2 78 -1 0 2 79 -4 1 2 80 -2 1 2 81 -3 1 2 82 -4 1 2 83 -1 1 2 84 -4 0 2 85 -3 1 2 86 -2 1 2 87 -4 0 2 88 -1 0 2 89 -3 0 2 90 -4 0 2 91 -1 0 2 92 -2 1 2 93 -1 1 2 94 -4 1 2 95 -1 1 2 96 -4 0 2 97 -3 0 2 98 -2 0 2 99 -3 0 2 100 -4 0 2 101 -3 1 2 102 -2 0 2 103 -3 0 2 104 -1 1 2 105 -4 0 2 106 -3 0 2 107 -1 0 2 108 -1 0 2 109 -3 0 2 110 -4 0 2 111 -3 1 2 112 -3 0 2 113 -2 0 2 114 -1 1 2 115 -2 0 2 116 -1 0 2 117 -1 0 2 118 -3 0 2 119 -4 0 2 120 -3 0 2 121 -3 0 2 122 -3 1 2 123 -4 0 2 124 -2 0 2 125 -3 1 2 126 -1 0 2 127 -3 0 2 128 -1 0 3 1 -1 1 3 2 -4 1 3 3 -1 1 3 4 -2 1 3 5 -3 1 3 6 -4 1 3 7 -1 1 3 8 -2 1 3 9 -3 1 3 10 -4 1 3 11 -1 1 3 12 -4 0 3 13 -3 0 3 14 -2 0 3 15 -4 1 3 16 -1 0 3 17 -3 0 3 18 -1 1 3 19 -3 0 3 20 -4 0 3 21 -3 1 3 22 -2 1 3 23 -1 1 3 24 -4 0 3 25 -1 1 3 26 -3 1 3 27 -4 1 3 28 -2 1 3 29 -3 1 3 30 -4 1 3 31 -2 1 3 32 -4 1 3 33 -3 1 3 34 -1 0 3 35 -4 0 3 36 -2 0 3 37 -3 0 3 38 -1 0 3 39 -4 0 3 40 -1 1 3 41 -3 0 3 42 -4 1 3 43 -2 0 3 44 -3 0 3 45 -3 1 3 46 -4 1 3 47 -3 0 3 48 -2 0 3 49 -3 0 3 50 -4 0 3 51 -2 1 3 52 -1 0 3 53 -2 0 3 54 -4 0 3 55 -2 0 3 56 -1 0 3 57 -1 1 3 58 -3 1 3 59 -4 1 3 60 -2 1 3 61 -3 1 3 62 -1 1 3 63 -3 1 3 64 -1 1 3 65 -3 1 3 66 -1 1 3 67 -1 1 3 68 -2 0 3 69 -1 1 3 70 -4 1 3 71 -1 0 3 72 -4 1 3 73 -2 1 3 74 -1 1 3 75 -3 1 3 76 -2 1 3 77 -1 1 3 78 -3 1 3 79 -4 1 3 80 -2 1 3 81 -3 1 3 82 -4 0 3 83 -1 0 3 84 -4 1 3 85 -2 1 3 86 -3 1 3 87 -4 1 3 88 -1 1 3 89 -3 1 3 90 -4 1 3 91 -1 1 3 92 -2 1 3 93 -1 1 3 94 -2 0 3 95 -1 0 3 96 -4 1 3 97 -3 1 3 98 -2 1 3 99 -1 1 3 100 -4 1 3 101 -2 1 3 102 -4 1 3 103 -3 1 3 104 -1 1 3 105 -2 1 3 106 -3 0 3 107 -2 1 3 108 -2 0 3 109 -2 1 3 110 -1 1 3 111 -3 1 3 112 -2 1 3 113 -4 1 3 114 -1 1 3 115 -3 1 3 116 -2 1 3 117 -4 1 3 118 -1 1 3 119 -1 0 4 1 -1 1 4 2 -4 1 4 3 -1 1 4 4 -2 1 4 5 -3 1 4 6 -4 1 4 7 -1 1 4 8 -2 1 4 9 -3 1 4 10 -4 1 4 11 -1 1 4 12 -3 1 4 13 -2 1 4 14 -1 1 4 15 -4 1 4 16 -2 1 4 17 -4 1 4 18 -1 1 4 19 -4 1 4 20 -2 1 4 21 -3 1 4 22 -2 1 4 23 -1 0 4 24 -4 1 4 25 -3 0 4 26 -3 1 4 27 -4 1 4 28 -2 1 4 29 -1 1 4 30 -4 1 4 31 -1 1 4 32 -3 1 4 33 -4 1 4 34 -1 1 4 35 -4 1 4 36 -2 0 4 37 -3 1 4 38 -1 0 4 39 -4 1 4 40 -1 1 4 41 -3 0 4 42 -4 0 4 43 -2 1 4 44 -3 0 4 45 -2 0 4 46 -4 1 4 47 -3 1 4 48 -2 1 4 49 -3 1 4 50 -4 1 4 51 -2 1 4 52 -1 1 4 53 -2 1 4 54 -4 1 4 55 -2 1 4 56 -1 0 4 57 -1 1 4 58 -3 1 4 59 -2 0 4 60 -2 1 4 61 -3 1 4 62 -1 1 4 63 -3 1 4 64 -1 1 4 65 -3 1 4 66 -1 1 4 67 -4 1 4 68 -2 1 4 69 -1 0 4 70 -4 0 4 71 -3 0 4 72 -4 0 4 73 -2 0 4 74 -1 0 4 75 -3 0 4 76 -2 0 4 77 -1 0 4 78 -3 0 4 79 -4 1 4 80 -2 1 4 81 -3 0 4 82 -1 1 4 83 -3 0 4 84 -4 0 4 85 -2 0 4 86 -3 0 4 87 -4 0 4 88 -1 0 4 89 -3 0 4 90 -4 0 4 91 -1 0 4 92 -2 1 4 93 -1 0 4 94 -2 0 4 95 -2 1 4 96 -4 1 4 97 -3 1 4 98 -2 1 4 99 -1 1 4 100 -4 1 4 101 -2 1 4 102 -4 1 4 103 -3 1 4 104 -1 1 4 105 -2 0 4 106 -3 0 4 107 -1 0 4 108 -3 1 4 109 -2 1 4 110 -1 1 4 111 -3 1 4 112 -2 1 4 113 -4 1 4 114 -1 1 4 115 -3 1 4 116 -2 1 4 117 -1 0 4 118 -4 0 4 119 -2 0 4 120 -1 0 4 121 -3 0 4 122 -1 0 4 123 -2 1 4 124 -4 1 4 125 -3 1 4 126 -1 0 4 127 -2 0 4 128 -1 0 5 1 -1 1 5 2 -4 1 5 3 -1 1 5 4 -2 1 5 5 -3 1 5 6 -4 1 5 7 -1 1 5 8 -2 1 5 9 -3 1 5 10 -4 1 5 11 -1 1 5 12 -3 1 5 13 -2 1 5 14 -1 1 5 15 -4 1 5 16 -2 1 5 17 -4 1 5 18 -1 1 5 19 -4 1 5 20 -2 1 5 21 -3 1 5 22 -2 1 5 23 -1 0 5 24 -2 0 5 25 -2 1 5 26 -4 0 5 27 -1 0 5 28 -2 1 5 29 -1 1 5 30 -4 1 5 31 -1 1 5 32 -3 1 5 33 -4 1 5 34 -1 1 5 35 -4 1 5 36 -2 1 5 37 -3 1 5 38 -1 0 5 39 -4 1 5 40 -1 1 5 41 -3 0 5 42 -4 0 5 43 -2 1 5 44 -3 0 5 45 -2 0 5 46 -4 1 5 47 -1 0 5 48 -2 1 5 49 -2 0 5 50 -4 1 5 51 -2 1 5 52 -1 1 5 53 -2 1 5 54 -4 1 5 55 -2 1 5 56 -1 1 5 57 -3 1 5 58 -1 1 5 59 -2 1 5 60 -4 0 5 61 -3 1 5 62 -1 1 5 63 -3 1 5 64 -1 1 5 65 -4 0 5 66 -1 1 5 67 -4 1 5 68 -2 1 5 69 -3 1 5 70 -1 1 5 71 -4 1 5 72 -3 1 5 73 -4 1 5 74 -2 1 5 75 -1 1 5 76 -3 0 5 77 -2 0 5 78 -1 0 5 79 -4 1 5 80 -2 1 5 81 -4 0 5 82 -1 0 5 83 -4 0 5 84 -2 1 5 85 -3 1 5 86 -2 1 5 87 -1 0 5 88 -2 0 5 89 -2 1 5 90 -3 1 5 91 -4 1 5 92 -2 1 5 93 -1 1 5 94 -4 1 5 95 -1 1 5 96 -3 1 5 97 -4 1 5 98 -1 1 5 99 -1 0 6 1 -4 0 6 2 -2 0 6 3 -1 1 6 4 -2 1 6 5 -3 1 6 6 -4 1 6 7 -1 1 6 8 -2 1 6 9 -3 1 6 10 -4 1 6 11 -1 1 6 12 -4 1 6 13 -3 0 6 14 -2 0 6 15 -4 1 6 16 -1 0 6 17 -3 0 6 18 -1 1 6 19 -4 1 6 20 -2 1 6 21 -3 1 6 22 -2 1 6 23 -1 1 6 24 -2 1 6 25 -1 1 6 26 -3 1 6 27 -4 1 6 28 -2 1 6 29 -3 0 6 30 -4 1 6 31 -2 0 6 32 -4 0 6 33 -3 0 6 34 -1 1 6 35 -4 1 6 36 -2 1 6 37 -3 1 6 38 -1 1 6 39 -4 1 6 40 -1 1 6 41 -3 1 6 42 -4 1 6 43 -2 1 6 44 -3 0 6 45 -2 0 6 46 -1 0 6 47 -3 1 6 48 -2 1 6 49 -4 0 6 50 -1 0 6 51 -2 1 6 52 -2 0 6 53 -2 1 6 54 -4 1 6 55 -2 1 6 56 -1 1 6 57 -3 1 6 58 -1 1 6 59 -2 1 6 60 -4 1 6 61 -3 1 6 62 -1 1 6 63 -2 0 6 64 -1 1 6 65 -3 1 6 66 -4 0 6 67 -4 1 6 68 -2 1 6 69 -3 1 6 70 -1 1 6 71 -4 1 6 72 -3 1 6 73 -4 1 6 74 -2 1 6 75 -1 1 6 76 -3 1 6 77 -2 0 6 78 -2 0 6 79 -4 1 6 80 -2 1 6 81 -3 1 6 82 -4 1 6 83 -1 1 6 84 -2 1 6 85 -3 1 6 86 -2 1 6 87 -3 1 6 88 -4 1 6 89 -2 1 7 1 -3 0 7 2 -4 1 7 3 -4 0 7 4 -2 1 7 5 -3 1 7 6 -4 1 7 7 -1 1 7 8 -2 1 7 9 -3 1 7 10 -4 1 7 11 -1 1 7 12 -4 1 7 13 -3 1 7 14 -2 0 7 15 -4 1 7 16 -1 0 7 17 -3 0 7 18 -1 1 7 19 -4 1 7 20 -2 1 7 21 -2 0 7 22 -2 1 7 23 -1 1 7 24 -2 1 7 25 -1 1 7 26 -3 1 7 27 -4 1 7 28 -2 1 7 29 -1 0 7 30 -4 1 7 31 -2 1 7 32 -4 1 7 33 -3 1 7 34 -1 0 7 35 -4 0 7 36 -2 0 7 37 -3 0 7 38 -1 0 7 39 -4 0 7 40 -1 1 7 41 -4 0 7 42 -4 1 7 43 -2 0 7 44 -3 0 7 45 -3 1 7 46 -4 1 7 47 -3 0 7 48 -3 1 7 49 -2 1 7 50 -1 1 7 51 -2 1 7 52 -4 1 7 53 -1 1 7 54 -4 0 7 55 -2 0 7 56 -1 0 7 57 -3 0 7 58 -1 0 7 59 -2 0 7 60 -4 0 7 61 -3 1 7 62 -1 1 7 63 -2 0 7 64 -2 0 7 65 -1 0 7 66 -4 0 7 67 -4 1 7 68 -2 1 7 69 -3 1 7 70 -1 1 7 71 -1 0 7 72 -2 0 7 73 -4 1 7 74 -2 1 7 75 -1 1 7 76 -3 1 7 77 -2 1 7 78 -1 1 7 79 -4 1 7 80 -2 1 7 81 -3 0 7 82 -1 1 7 83 -1 0 7 84 -2 1 7 85 -3 1 7 86 -2 1 7 87 -3 0 7 88 -4 0 7 89 -2 0 7 90 -3 1 7 91 -4 1 7 92 -2 1 7 93 -1 0 7 94 -4 1 7 95 -1 0 7 96 -3 0 7 97 -4 0 7 98 -1 0 7 99 -4 0 7 100 -2 0 7 101 -3 0 7 102 -4 1 7 103 -4 0 7 104 -1 1 7 105 -4 0 7 106 -3 0 7 107 -2 0 7 108 -1 0 7 109 -2 0 7 110 -4 1 7 111 -3 0 7 112 -2 0 7 113 -3 0 7 114 -1 1 7 115 -2 1 7 116 -1 0 7 117 -1 1 7 118 -3 1 7 119 -4 1 7 120 -3 1 7 121 -1 1 7 122 -3 1 7 123 -4 1 7 124 -2 1 7 125 -3 1 7 126 -2 0 7 127 -2 0 7 128 -1 0 8 1 -3 0 8 2 -4 1 8 3 -1 1 8 4 -2 1 8 5 -3 1 8 6 -4 1 8 7 -1 1 8 8 -2 1 8 9 -3 1 8 10 -4 1 8 11 -1 1 8 12 -3 1 8 13 -3 0 8 14 -1 1 8 15 -4 1 8 16 -1 0 8 17 -4 1 8 18 -1 1 8 19 -4 1 8 20 -2 1 8 21 -3 1 8 22 -2 1 8 23 -1 1 8 24 -1 0 8 25 -1 1 8 26 -4 0 8 27 -4 1 8 28 -2 1 8 29 -1 0 8 30 -4 1 8 31 -1 0 8 32 -3 0 8 33 -4 0 8 34 -1 0 8 35 -4 0 8 36 -2 0 8 37 -3 0 8 38 -1 0 8 39 -4 0 8 40 -1 1 8 41 -3 0 8 42 -4 1 8 43 -2 0 8 44 -1 0 8 45 -2 0 8 46 -4 1 8 47 -3 0 8 48 -2 0 8 49 -3 0 8 50 -4 0 8 51 -2 1 8 52 -1 0 8 53 -2 0 8 54 -4 0 8 55 -2 0 8 56 -3 1 8 57 -1 1 8 58 -3 1 8 59 -4 1 8 60 -2 1 8 61 -3 1 8 62 -1 1 8 63 -3 1 8 64 -1 1 8 65 -3 1 8 66 -1 0 8 67 -1 1 8 68 -4 1 8 69 -1 1 8 70 -4 1 8 71 -3 1 8 72 -4 1 8 73 -2 1 8 74 -1 1 8 75 -3 1 8 76 -2 1 8 77 -1 0 8 78 -3 0 8 79 -4 1 8 80 -2 0 8 81 -3 1 8 82 -4 0 8 83 -1 0 8 84 -4 1 8 85 -2 1 8 86 -3 1 8 87 -4 1 8 88 -1 1 8 89 -3 1 8 90 -4 1 8 91 -1 1 8 92 -2 1 8 93 -1 1 8 94 -2 0 8 95 -1 0 8 96 -4 1 8 97 -3 1 8 98 -2 1 8 99 -1 1 8 100 -4 1 8 101 -2 1 8 102 -4 1 8 103 -3 1 8 104 -1 1 8 105 -4 0 8 106 -4 1 8 107 -1 1 8 108 -2 1 8 109 -3 1 8 110 -4 1 8 111 -1 1 8 112 -3 1 8 113 -2 1 8 114 -1 1 8 115 -2 1 8 116 -4 0 8 117 -4 1 8 118 -1 1 8 119 -3 1 8 120 -2 1 8 121 -1 1 8 122 -3 1 8 123 -2 1 8 124 -4 1 8 125 -3 1 8 126 -2 1 8 127 -2 1 9 1 -1 1 9 2 -4 1 9 3 -1 1 9 4 -2 1 9 5 -3 1 9 6 -4 1 9 7 -1 1 9 8 -2 1 9 9 -3 1 9 10 -4 0 9 11 -1 1 9 12 -4 0 9 13 -3 0 9 14 -1 1 9 15 -4 1 9 16 -1 0 9 17 -4 1 9 18 -1 1 9 19 -4 1 9 20 -2 1 9 21 -3 1 9 22 -2 1 9 23 -1 1 9 24 -2 1 9 25 -1 1 9 26 -3 1 9 27 -4 1 9 28 -2 1 9 29 -3 0 9 30 -4 1 9 31 -1 1 9 32 -3 1 9 33 -4 1 9 34 -1 1 9 35 -4 1 9 36 -4 0 9 37 -3 1 9 38 -1 1 9 39 -4 1 9 40 -1 1 9 41 -3 1 9 42 -4 1 9 43 -2 1 9 44 -1 0 9 45 -2 1 9 46 -4 0 9 47 -3 1 9 48 -2 1 9 49 -3 0 9 50 -4 0 9 51 -2 0 9 52 -1 0 9 53 -1 0 9 54 -4 0 9 55 -2 0 9 56 -1 0 9 57 -3 0 9 58 -1 0 9 59 -2 1 9 60 -4 1 9 61 -3 1 9 62 -1 0 9 63 -2 0 9 64 -2 0 9 65 -1 0 9 66 -4 0 9 67 -4 0 9 68 -4 1 9 69 -1 1 9 70 -1 0 9 71 -4 0 9 72 -3 0 9 73 -4 0 9 74 -2 0 9 75 -1 0 9 76 -3 0 9 77 -2 0 9 78 -1 0 9 79 -4 1 9 80 -2 1 9 81 -4 0 9 82 -1 0 9 83 -4 0 9 84 -2 1 9 85 -3 1 9 86 -2 1 9 87 -1 0 9 88 -4 1 9 89 -2 1 9 90 -3 1 9 91 -4 1 9 92 -2 1 9 93 -1 1 9 94 -4 1 9 95 -1 1 9 96 -3 1 9 97 -4 1 9 98 -1 0 9 99 -4 0 9 100 -2 0 9 101 -3 1 9 102 -4 0 9 103 -4 1 9 104 -1 1 9 105 -4 1 9 106 -3 1 9 107 -2 1 9 108 -1 1 9 109 -3 1 9 110 -4 1 9 111 -3 1 9 112 -2 1 9 113 -3 0 9 114 -1 1 9 115 -3 0 9 116 -2 0 9 117 -1 1 9 118 -4 0 9 119 -4 1 9 120 -3 1 9 121 -1 1 9 122 -3 1 9 123 -4 1 9 124 -2 1 9 125 -3 1 9 126 -1 1 9 127 -1 0 9 128 -1 0 10 1 -1 1 10 2 -2 0 10 3 -1 1 10 4 -4 0 10 5 -3 1 10 6 -4 1 10 7 -1 1 10 8 -2 1 10 9 -3 1 10 10 -2 0 10 11 -1 1 10 12 -4 1 10 13 -3 1 10 14 -2 1 10 15 -4 1 10 16 -1 1 10 17 -3 1 10 18 -1 1 10 19 -4 0 10 20 -2 0 10 21 -2 1 10 22 -2 0 10 23 -1 0 10 24 -4 0 10 25 -1 0 10 26 -3 0 10 27 -4 0 10 28 -2 1 10 29 -3 0 10 30 -4 0 10 31 -2 0 10 32 -4 1 10 33 -3 1 10 34 -2 1 10 35 -1 0 10 36 -4 1 10 37 -2 0 10 38 -4 0 10 39 -3 0 10 40 -1 1 10 41 -2 0 10 42 -3 1 10 43 -1 0 10 44 -2 0 10 45 -3 1 10 46 -1 0 10 47 -3 1 10 48 -2 1 10 49 -2 0 10 50 -4 1 10 51 -3 0 10 52 -2 0 10 53 -1 0 10 54 -1 0 10 55 -4 0 10 56 -2 0 10 57 -1 0 10 58 -3 0 10 59 -2 1 10 60 -4 1 10 61 -3 1 10 62 -2 0 10 63 -1 0 10 64 -1 0 10 65 -4 0 10 66 -2 0 10 67 -1 1 10 68 -4 0 10 69 -3 1 10 70 -4 1 10 71 -1 1 10 72 -2 1 10 73 -3 1 10 74 -4 1 10 75 -1 1 10 76 -4 1 10 77 -3 1 10 78 -2 1 10 79 -4 1 10 80 -1 0 10 81 -3 0 10 82 -1 1 10 83 -3 0 10 84 -4 0 10 85 -3 1 10 86 -3 0 10 87 -4 0 10 88 -4 0 10 89 -3 0 10 90 -4 0 10 91 -1 0 10 92 -2 1 10 93 -1 0 10 94 -4 1 10 95 -1 0 10 96 -4 1 10 97 -3 1 10 98 -2 1 10 99 -3 0 10 100 -4 1 10 101 -3 0 10 102 -2 0 10 103 -4 0 10 104 -1 1 10 105 -3 0 10 106 -4 1 10 107 -2 0 10 108 -2 1 10 109 -3 1 10 110 -4 1 10 111 -1 1 10 112 -3 1 10 113 -2 1 10 114 -1 1 10 115 -2 1 10 116 -4 1 10 117 -1 1 10 118 -3 0 10 119 -4 0 10 120 -3 0 10 121 -1 1 10 122 -3 1 10 123 -4 0 10 124 -4 1 10 125 -3 1 10 126 -2 1 10 127 -2 0 10 128 diff --git a/inst/stan_files/bandit2arm_delta.stan b/inst/stan_files/bandit2arm_delta.stan deleted file mode 100644 index 3c44ddde..00000000 --- a/inst/stan_files/bandit2arm_delta.stan +++ /dev/null @@ -1,109 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; // no lower and upper bounds -} -transformed data { - vector[2] initV; // initial values for EV - initV = rep_vector(0.0, 2); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; // learning rate - vector[N] tau_pr; // inverse temperature -} -transformed parameters { - // subject-level parameters - vector[N] A; - vector[N] tau; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 5; - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1); - tau_pr ~ normal(0, 1); - - // subject loop and trial loop - for (i in 1:N) { - vector[2] ev; // expected value - real PE; // prediction error - - ev = initV; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - choice[i, t] ~ categorical_logit(tau[i] * ev); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - - // value updating (learning) - ev[choice[i, t]] += A[i] * PE; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_tau; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_tau = Phi_approx(mu_pr[2]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - vector[2] ev; // expected value - real PE; // prediction error - - // Initialize values - ev = initV; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute log likelihood of current trial - log_lik[i] += categorical_logit_lpmf(choice[i, t] | tau[i] * ev); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(tau[i] * ev)); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - - // value updating (learning) - ev[choice[i, t]] += A[i] * PE; - } - } - } -} - diff --git a/inst/stan_files/bandit4arm2_kalman_filter.stan b/inst/stan_files/bandit4arm2_kalman_filter.stan deleted file mode 100644 index 15d36c63..00000000 --- a/inst/stan_files/bandit4arm2_kalman_filter.stan +++ /dev/null @@ -1,163 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N,T]; - real outcome[N,T]; -} - -transformed data { - real sigmaO; // sigma_O = 4 - sigmaO = 4; -} - -parameters { - // group-level parameters - vector[6] mu_pr; - vector[6] sigma; - - // subject-level raw parameters, follows norm(0,1), for later Matt Trick - vector[N] lambda_pr; // decay factor - vector[N] theta_pr; // decay center - vector[N] beta_pr; // inverse softmax temperature - vector[N] mu0_pr; // anticipated initial mean of all 4 options - vector[N] sigma0_pr; // anticipated initial sd^2 (uncertainty factor) of all 4 options - vector[N] sigmaD_pr; // sd^2 of diffusion noise -} - -transformed parameters { - // subject-level parameters - vector[N] lambda; - vector[N] theta; - vector[N] beta; - vector[N] mu0; - vector[N] sigma0; - vector[N] sigmaD; - - // Matt Trick - for (i in 1:N) { - lambda[i] = Phi_approx( mu_pr[1] + sigma[1] * lambda_pr[i] ); - theta[i] = Phi_approx( mu_pr[2] + sigma[2] * theta_pr[i] ) * 100; - beta[i] = Phi_approx( mu_pr[3] + sigma[3] * beta_pr[i] ); - mu0[i] = Phi_approx( mu_pr[4] + sigma[4] * mu0_pr[i] ) * 100; - sigma0[i] = Phi_approx( mu_pr[5] + sigma[5] * sigma0_pr[i] ) * 15; - sigmaD[i] = Phi_approx( mu_pr[6] + sigma[6] * sigmaD_pr[i] ) * 15; - } -} - -model { - // prior: hyperparameters - mu_pr ~ normal(0,1); - sigma ~ cauchy(0,5); - - // prior: individual parameters - lambda_pr ~ normal(0,1);; - theta_pr ~ normal(0,1);; - beta_pr ~ normal(0,1);; - mu0_pr ~ normal(0,1);; - sigma0_pr ~ normal(0,1);; - sigmaD_pr ~ normal(0,1);; - - // subject loop and trial loop - for (i in 1:N) { - vector[4] mu_ev; // estimated mean for each option - vector[4] sd_ev_sq; // estimated sd^2 for each option - real pe; // prediction error - real k; // learning rate - - mu_ev = rep_vector(mu0[i] ,4); - sd_ev_sq = rep_vector(sigma0[i]^2, 4); - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - choice[i,t] ~ categorical_logit( beta[i] * mu_ev ); - - // learning rate - k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2 ); - - // prediction error - pe = outcome[i,t] - mu_ev[choice[i,t]]; - - // value updating (learning) - mu_ev[choice[i,t]] += k * pe; - sd_ev_sq[choice[i,t]] *= (1-k); - - // diffusion process - { - mu_ev *= lambda[i]; - mu_ev += (1 - lambda[i]) * theta[i]; - } - { - sd_ev_sq *= lambda[i]^2; - sd_ev_sq += sigmaD[i]^2; - } - } - } -} - -generated quantities { - real mu_lambda; - real mu_theta; - real mu_beta; - real mu_mu0; - real mu_sigma0; - real mu_sigmaD; - real log_lik[N]; - real y_pred[N,T]; - - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_lambda = Phi_approx(mu_pr[1]); - mu_theta = Phi_approx(mu_pr[2]) * 100; - mu_beta = Phi_approx(mu_pr[3]); - mu_mu0 = Phi_approx(mu_pr[4]) * 100; - mu_sigma0 = Phi_approx(mu_pr[5]) * 15; - mu_sigmaD = Phi_approx(mu_pr[6]) * 15; - - { // local block - for (i in 1:N) { - vector[4] mu_ev; // estimated mean for each option - vector[4] sd_ev_sq; // estimated sd^2 for each option - real pe; // prediction error - real k; // learning rate - - log_lik[i] = 0; - mu_ev = rep_vector(mu0[i] ,4); - sd_ev_sq = rep_vector(sigma0[i]^2, 4); - - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - log_lik[i] += categorical_logit_lpmf( choice[i,t] | beta[i] * mu_ev ); - y_pred[i, t] = categorical_rng(softmax(beta[i] * mu_ev)); - - // learning rate - k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2); - - // prediction error - pe = outcome[i,t] - mu_ev[choice[i,t]]; - - // value updating (learning) - mu_ev[choice[i,t]] += k * pe; - sd_ev_sq[choice[i,t]] *= (1-k); - - // diffusion process - { - mu_ev *= lambda[i]; - mu_ev += (1 - lambda[i]) * theta[i]; - } - { - sd_ev_sq *= lambda[i]^2; - sd_ev_sq += sigmaD[i]^2; - } - } - } - } // local block END -} - diff --git a/inst/stan_files/bandit4arm_2par_lapse.stan b/inst/stan_files/bandit4arm_2par_lapse.stan deleted file mode 100644 index b95da5ce..00000000 --- a/inst/stan_files/bandit4arm_2par_lapse.stan +++ /dev/null @@ -1,173 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) -// w/o reward sensitivity and punishment sensitivity -// in sum, there are three parameters - Arew, Apun, xi -// Aylward et al., 2018, PsyArXiv -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] xi_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] xi; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - xi[i] = Phi_approx(mu_pr[3] + sigma[3] * xi_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = rew[i, t] - Qr[choice[i, t]]; - PEp = los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_xi; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_xi = Phi_approx(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = rew[i, t] - Qr[choice[i, t]]; - PEp = los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/inst/stan_files/bandit4arm_4par.stan b/inst/stan_files/bandit4arm_4par.stan deleted file mode 100644 index 18d6acf9..00000000 --- a/inst/stan_files/bandit4arm_4par.stan +++ /dev/null @@ -1,176 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] R_pr; - vector[N] P_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] R; - vector[N] P; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(Qsum); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_R; - real mu_P; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_R = Phi_approx(mu_pr[3]) * 30; - mu_P = Phi_approx(mu_pr[4]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_logit_lpmf(choice[i, t] | Qsum); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum)); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/inst/stan_files/bandit4arm_lapse.stan b/inst/stan_files/bandit4arm_lapse.stan deleted file mode 100644 index 161ce311..00000000 --- a/inst/stan_files/bandit4arm_lapse.stan +++ /dev/null @@ -1,182 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[5] mu_pr; - vector[5] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] R_pr; - vector[N] P_pr; - vector[N] xi_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] R; - vector[N] P; - vector[N] xi; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; - xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_R; - real mu_P; - real mu_xi; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_R = Phi_approx(mu_pr[3]) * 30; - mu_P = Phi_approx(mu_pr[4]) * 30; - mu_xi = Phi_approx(mu_pr[5]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/inst/stan_files/bandit4arm_lapse_decay.stan b/inst/stan_files/bandit4arm_lapse_decay.stan deleted file mode 100644 index b089ee21..00000000 --- a/inst/stan_files/bandit4arm_lapse_decay.stan +++ /dev/null @@ -1,201 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Added decay rate (Niv et al., 2015, J. Neuro) -// Aylward et al., 2018, PsyArXiv -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[6] mu_pr; - vector[6] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] R_pr; - vector[N] P_pr; - vector[N] xi_pr; - vector[N] d_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] R; - vector[N] P; - vector[N] xi; - vector[N] d; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; - xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); - d[i] = Phi_approx(mu_pr[6] + sigma[6] * d_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - d_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - vector[4] tmp; // temporary vector for Qr and Qp - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - //PEr_fic = -Qr; - //PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ decay rate - //Qr += Arew[i] * PEr_fic; - //Qp += Apun[i] * PEp_fic; - tmp = (1-d[i]) * Qr; - Qr = tmp; - tmp = (1-d[i]) * Qp; - Qp = tmp; - - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_R; - real mu_P; - real mu_xi; - real mu_d; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_R = Phi_approx(mu_pr[3]) * 30; - mu_P = Phi_approx(mu_pr[4]) * 30; - mu_xi = Phi_approx(mu_pr[5]); - mu_d = Phi_approx(mu_pr[6]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - vector[4] tmp; // temporary vector for Qr and Qp - - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - //PEr_fic = -Qr; - //PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ decay rate - //Qr += Arew[i] * PEr_fic; - //Qp += Apun[i] * PEp_fic; - tmp = (1-d[i]) * Qr; - Qr = tmp; - tmp = (1-d[i]) * Qp; - Qp = tmp; - - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} diff --git a/inst/stan_files/bandit4arm_singleA_lapse.stan b/inst/stan_files/bandit4arm_singleA_lapse.stan deleted file mode 100644 index b383f389..00000000 --- a/inst/stan_files/bandit4arm_singleA_lapse.stan +++ /dev/null @@ -1,177 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Single learning rate both for R and P. -// Aylward et al., 2018, PsyArXiv -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] R_pr; - vector[N] P_pr; - vector[N] xi_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] R; - vector[N] P; - vector[N] xi; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - R[i] = Phi_approx(mu_pr[2] + sigma[2] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[3] + sigma[3] * P_pr[i]) * 30; - xi[i] = Phi_approx(mu_pr[4] + sigma[4] * xi_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += A[i] * PEr_fic; - Qp += A[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_R; - real mu_P; - real mu_xi; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_R = Phi_approx(mu_pr[2]) * 30; - mu_P = Phi_approx(mu_pr[3]) * 30; - mu_xi = Phi_approx(mu_pr[4]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += A[i] * PEr_fic; - Qp += A[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/inst/stan_files/bart_par4.stan b/inst/stan_files/bart_par4.stan deleted file mode 100644 index f66ca8f0..00000000 --- a/inst/stan_files/bart_par4.stan +++ /dev/null @@ -1,129 +0,0 @@ -#include /pre/license.stan - -data { - int N; // Number of subjects - int T; // Maximum number of trials - int Tsubj[N]; // Number of trials for each subject - int P; // Number of max pump + 1 ** CAUTION ** - int pumps[N, T]; // Number of pump - int explosion[N, T]; // Whether the balloon exploded (0 or 1) -} - -transformed data{ - // Whether a subject pump the button or not (0 or 1) - int d[N, T, P]; - - for (j in 1:N) { - for (k in 1:Tsubj[j]) { - for (l in 1:P) { - if (l <= pumps[j, k]) - d[j, k, l] = 1; - else - d[j, k, l] = 0; - } - } - } -} - -parameters { - // Group-level parameters - vector[4] mu_pr; - vector[4] sigma; - - // Normally distributed error for Matt trick - vector[N] phi_pr; - vector[N] eta_pr; - vector[N] gam_pr; - vector[N] tau_pr; -} - -transformed parameters { - // Subject-level parameters with Matt trick - vector[N] phi; - vector[N] eta; - vector[N] gam; - vector[N] tau; - - phi = Phi_approx(mu_pr[1] + sigma[1] * phi_pr); - eta = exp(mu_pr[2] + sigma[2] * eta_pr); - gam = exp(mu_pr[3] + sigma[3] * gam_pr); - tau = exp(mu_pr[4] + sigma[4] * tau_pr); -} - -model { - // Prior - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - phi_pr ~ normal(0, 1); - eta_pr ~ normal(0, 1); - gam_pr ~ normal(0, 1); - tau_pr ~ normal(0, 1); - - // Likelihood - for (j in 1:N) { - // Initialize n_succ and n_pump for a subject - int n_succ = 0; // Number of successful pumps - int n_pump = 0; // Number of total pumps - - for (k in 1:Tsubj[j]) { - real p_burst; // Belief on a balloon to be burst - real omega; // Optimal number of pumps - - p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); - omega = -gam[j] / log1m(p_burst); - - // Calculate likelihood with bernoulli distribution - for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) - d[j, k, l] ~ bernoulli_logit(tau[j] * (omega - l)); - - // Update n_succ and n_pump after each trial ends - n_succ += pumps[j, k] - explosion[j, k]; - n_pump += pumps[j, k]; - } - } -} - -generated quantities { - // Actual group-level mean - real mu_phi = Phi_approx(mu_pr[1]); - real mu_eta = exp(mu_pr[2]); - real mu_gam = exp(mu_pr[3]); - real mu_tau = exp(mu_pr[4]); - - // Log-likelihood for model fit - real log_lik = 0; - - // For posterior predictive check - real y_pred[N, T, P]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (j in 1:N) - for (k in 1:T) - for(l in 1:P) - y_pred[j, k, l] = -1; - - { // Local section to save time and space - for (j in 1:N) { - int n_succ = 0; - int n_pump = 0; - - for (k in 1:Tsubj[j]) { - real p_burst; // Belief on a balloon to be burst - real omega; // Optimal number of pumps - - p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); - omega = -gam[j] / log1m(p_burst); - - for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) { - log_lik += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); - y_pred[j, k, l] = bernoulli_logit_rng(tau[j] * (omega - l)); - } - - n_succ += pumps[j, k] - explosion[j, k]; - n_pump += pumps[j, k]; - } - } - } -} - diff --git a/inst/stan_files/choiceRT_ddm.stan b/inst/stan_files/choiceRT_ddm.stan deleted file mode 100644 index 58baaec6..00000000 --- a/inst/stan_files/choiceRT_ddm.stan +++ /dev/null @@ -1,98 +0,0 @@ -#include /pre/license.stan - -// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists -data { - int N; // Number of subjects - int Nu_max; // Max (across subjects) number of upper boundary responses - int Nl_max; // Max (across subjects) number of lower boundary responses - int Nu[N]; // Number of upper boundary responses for each subj - int Nl[N]; // Number of lower boundary responses for each subj - real RTu[N, Nu_max]; // upper boundary response times - real RTl[N, Nl_max]; // lower boundary response times - real minRT[N]; // minimum RT for each subject of the observed data - real RTbound; // lower bound or RT across all subjects (e.g., 0.1 second) -} - -parameters { - // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R - // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ - // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 - // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 - // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta - // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) - ///* upper boundary of tau must be smaller than minimum RT - //to avoid zero likelihood for fast responses. - //tau can for physiological reasone not be faster than 0.1 s.*/ - - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; - vector[N] beta_pr; - vector[N] delta_pr; - vector[N] tau_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] alpha; // boundary separation - vector[N] beta; // initial bias - vector[N] delta; // drift rate - vector[N] tau; // nondecision time - - for (i in 1:N) { - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]); - tau[i] = Phi_approx(mu_pr[4] + sigma[4] * tau_pr[i]) * (minRT[i] - RTbound) + RTbound; - } - alpha = exp(mu_pr[1] + sigma[1] * alpha_pr); - delta = exp(mu_pr[3] + sigma[3] * delta_pr); -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Individual parameters for non-centered parameterization - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - delta_pr ~ normal(0, 1); - tau_pr ~ normal(0, 1); - - // Begin subject loop - for (i in 1:N) { - // Response time distributed along wiener first passage time distribution - RTu[i, :Nu[i]] ~ wiener(alpha[i], tau[i], beta[i], delta[i]); - RTl[i, :Nl[i]] ~ wiener(alpha[i], tau[i], 1-beta[i], -delta[i]); - - } // end of subject loop -} - -generated quantities { - // For group level parameters - real mu_alpha; // boundary separation - real mu_beta; // initial bias - real mu_delta; // drift rate - real mu_tau; // nondecision time - - // For log likelihood calculation - real log_lik[N]; - - // Assign group level parameter values - mu_alpha = exp(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]); - mu_delta = exp(mu_pr[3]); - mu_tau = Phi_approx(mu_pr[4]) * (mean(minRT)-RTbound) + RTbound; - - { // local section, this saves time and space - // Begin subject loop - for (i in 1:N) { - log_lik[i] = wiener_lpdf(RTu[i, :Nu[i]] | alpha[i], tau[i], beta[i], delta[i]); - log_lik[i] += wiener_lpdf(RTl[i, :Nl[i]] | alpha[i], tau[i], 1-beta[i], -delta[i]); - } - } -} - diff --git a/inst/stan_files/choiceRT_ddm_single.stan b/inst/stan_files/choiceRT_ddm_single.stan deleted file mode 100644 index 6bacd18a..00000000 --- a/inst/stan_files/choiceRT_ddm_single.stan +++ /dev/null @@ -1,58 +0,0 @@ -#include /pre/license.stan - -// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists -data { - int Nu; // of upper boundary responses - int Nl; // of lower boundary responses - real RTu[Nu]; // upper boundary response times - real RTl[Nl]; // lower boundary response times - real minRT; // minimum RT of the observed data - real RTbound; // lower bound or RT (e.g., 0.1 second) -} - -parameters { - // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R - // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ - // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 - // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 - // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta - // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) - ///* upper boundary of tau must be smaller than minimum RT - //to avoid zero likelihood for fast responses. - //tau can for physiological reasone not be faster than 0.1 s.*/ - - real alpha; // boundary separation - real beta; // initial bias - real delta; // drift rate - real tau; // nondecision time -} - -model { - alpha ~ uniform(0, 5); - beta ~ uniform(0, 1); - delta ~ normal(0, 2); - tau ~ uniform(RTbound, minRT); - - RTu ~ wiener(alpha, tau, beta, delta); - RTl ~ wiener(alpha, tau, 1-beta, -delta); -} - -generated quantities { - - // For log likelihood calculation - real log_lik; - - // For posterior predictive check (Not implementeed yet) - // vector[Nu] y_pred_upper; - // vector[Nl] y_pred_lower; - - { // local section, this saves time and space - log_lik = wiener_lpdf(RTu | alpha, tau, beta, delta); - log_lik += wiener_lpdf(RTl | alpha, tau, 1-beta, -delta); - - // generate posterior predictions (Not implemented yet) - // y_pred_upper = wiener_rng(alpha, tau, beta, delta); - // y_pred_lower = wiener_rng(alpha, tau, 1-beta, -delta); - } -} - diff --git a/inst/stan_files/choiceRT_lba.stan b/inst/stan_files/choiceRT_lba.stan deleted file mode 100644 index 222e5a27..00000000 --- a/inst/stan_files/choiceRT_lba.stan +++ /dev/null @@ -1,278 +0,0 @@ -#include /pre/license.stan - -// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). -// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. -functions { - real lba_pdf(real t, real b, real A, real v_pdf, real s) { - //PDF of the LBA model - real b_A_tv_ts; - real b_tv_ts; - real term_1b; - real term_2b; - real term_3b; - real term_4b; - real pdf; - - b_A_tv_ts = (b - A - t * v_pdf)/(t * s); - b_tv_ts = (b - t * v_pdf)/(t * s); - - term_1b = v_pdf * Phi(b_A_tv_ts); - term_2b = s * exp(normal_lpdf(fabs(b_A_tv_ts) | 0, 1)); - term_3b = v_pdf * Phi(b_tv_ts); - term_4b = s * exp(normal_lpdf(fabs(b_tv_ts) | 0, 1)); - - pdf = (1/A) * (-term_1b + term_2b + term_3b - term_4b); - - return pdf; - } - - real lba_cdf(real t, real b, real A, real v_cdf, real s) { - //CDF of the LBA model - real b_A_tv; - real b_tv; - real ts; - real term_1a; - real term_2a; - real term_3a; - real term_4a; - real cdf; - - b_A_tv = b - A - t * v_cdf; - b_tv = b - t * v_cdf; - ts = t * s; - - term_1a = b_A_tv/A * Phi(b_A_tv/ts); - term_2a = b_tv/A * Phi(b_tv/ts); - term_3a = ts/A * exp(normal_lpdf(fabs(b_A_tv/ts) | 0, 1)); - term_4a = ts/A * exp(normal_lpdf(fabs(b_tv/ts) | 0, 1)); - - cdf = 1 + term_1a - term_2a + term_3a - term_4a; - - return cdf; - } - - real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { - - real t; - real b; - real cdf; - real pdf; - vector[cols(RT)] prob; - real out; - real prob_neg; - - b = A + d; - for (i in 1:cols(RT)) { - t = RT[1, i] - tau; - if (t > 0) { - cdf = 1; - for (j in 1:num_elements(v)) { - if (RT[2, i] == j) { - pdf = lba_pdf(t, b, A, v[j], s); - } else { - cdf *= lba_cdf(t, b, A, v[j], s); - } - } - prob_neg = 1; - for (j in 1:num_elements(v)) { - prob_neg *= Phi(-v[j]/s); - } - prob[i] = pdf * (1-cdf); - prob[i] /= (1-prob_neg); - if (prob[i] < 1e-10) { - prob[i] = 1e-10; - } - - } else { - prob[i] = 1e-10; - } - } - out = sum(log(prob)); - return out; - } - - vector lba_rng(real d, real A, vector v, real s, real tau) { - - int get_pos_drift; - int no_pos_drift; - int get_first_pos; - vector[num_elements(v)] drift; - int max_iter; - int iter; - real start[num_elements(v)]; - real ttf[num_elements(v)]; - int resp[num_elements(v)]; - real rt; - vector[2] pred; - real b; - - //try to get a positive drift rate - get_pos_drift = 1; - no_pos_drift = 0; - max_iter = 1000; - iter = 0; - while(get_pos_drift) { - for (j in 1:num_elements(v)) { - drift[j] = normal_rng(v[j], s); - if (drift[j] > 0) { - get_pos_drift = 0; - } - } - iter += 1; - if (iter > max_iter) { - get_pos_drift = 0; - no_pos_drift = 1; - } - } - //if both drift rates are <= 0 - //return an infinite response time - if (no_pos_drift) { - pred[1] = -1; - pred[2] = -1; - } else { - b = A + d; - for (i in 1:num_elements(v)) { - //start time of each accumulator - start[i] = uniform_rng(0, A); - //finish times - ttf[i] = (b-start[i])/drift[i]; - } - //rt is the fastest accumulator finish time - //if one is negative get the positive drift - resp = sort_indices_asc(ttf); - { - real temp_ttf[num_elements(v)]; - temp_ttf = sort_asc(ttf); - ttf = temp_ttf; - } - get_first_pos = 1; - iter = 1; - while(get_first_pos) { - if (ttf[iter] > 0) { - pred[1] = ttf[iter]; - pred[2] = resp[iter]; - get_first_pos = 0; - } - iter += 1; - } - } - return pred; - } -} -data { - int N; - int Max_tr; - int N_choices; - int N_cond; - int N_tr_cond[N, N_cond]; - matrix[2, Max_tr] RT[N, N_cond]; - -} - -parameters { - // Hyperparameter means - real mu_d; - real mu_A; - real mu_tau; - vector[N_choices] mu_v[N_cond]; - - // Hyperparameter sigmas - real sigma_d; - real sigma_A; - real sigma_tau; - vector[N_choices] sigma_v[N_cond]; - - // Individual parameters - real d[N]; - real A[N]; - real tau[N]; - vector[N_choices] v[N, N_cond]; -} -transformed parameters { - // s is set to 1 to make model identifiable - real s; - s = 1; -} -model { - // Hyperparameter means - mu_d ~ normal(.5, 1)T[0,]; - mu_A ~ normal(.5, 1)T[0,]; - mu_tau ~ normal(.5, .5)T[0,]; - - // Hyperparameter sigmas - sigma_d ~ gamma(1, 1); - sigma_A ~ gamma(1, 1); - sigma_tau ~ gamma(1, 1); - - // Hyperparameter means and sigmas for multiple drift rates - for (j in 1:N_cond) { - for (n in 1:N_choices) { - mu_v[j, n] ~ normal(2, 1)T[0,]; - sigma_v[j, n] ~ gamma(1, 1); - } - } - - for (i in 1:N) { - // Declare variables - int n_trials; - - // Individual parameters - d[i] ~ normal(mu_d, sigma_d)T[0,]; - A[i] ~ normal(mu_A, sigma_A)T[0,]; - tau[i] ~ normal(mu_tau, sigma_tau)T[0,]; - - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = N_tr_cond[i, j]; - - for (n in 1:N_choices) { - // Drift rate is normally distributed - v[i, j, n] ~ normal(mu_v[j, n], sigma_v[j, n])T[0,]; - } - // Likelihood of RT x Choice - RT[i, j, , 1:n_trials] ~ lba(d[i], A[i], v[i, j,], s, tau[i]); - } - } -} - -generated quantities { - // Declare variables - int n_trials; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - matrix[2, Max_tr] y_pred[N, N_cond]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (j in 1:N_cond) { - for (t in 1:Max_tr) { - y_pred[i, j, , t] = rep_vector(-1, 2); - } - } - } - - { // local section, this saves time and space - for (i in 1:N) { - // Initialize variables - log_lik[i] = 0; - - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = N_tr_cond[i, j]; - - // Sum likelihood over conditions within subjects - log_lik[i] += lba_lpdf(RT[i, j, , 1:n_trials] | d[i], A[i], v[i, j,], s, tau[i]); - - for (t in 1:n_trials) { - // generate posterior predictions - y_pred[i, j, , t] = lba_rng(d[i], A[i], v[i, j,], s, tau[i]); - } - } - } - // end of subject loop - } -} - diff --git a/inst/stan_files/choiceRT_lba_single.stan b/inst/stan_files/choiceRT_lba_single.stan deleted file mode 100644 index 1d5fd992..00000000 --- a/inst/stan_files/choiceRT_lba_single.stan +++ /dev/null @@ -1,239 +0,0 @@ -#include /pre/license.stan - -// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). -// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. -functions { - real lba_pdf(real t, real b, real A, real v, real s) { - //PDF of the LBA model - real b_A_tv_ts; - real b_tv_ts; - real term_1; - real term_2; - real term_3; - real term_4; - real pdf; - - b_A_tv_ts = (b - A - t * v)/(t * s); - b_tv_ts = (b - t * v)/(t * s); - - term_1 = v * Phi(b_A_tv_ts); - term_2 = s * exp(normal_lpdf(b_A_tv_ts | 0, 1)); - term_3 = v * Phi(b_tv_ts); - term_4 = s * exp(normal_lpdf(b_tv_ts | 0, 1)); - - pdf = (1/A) * (-term_1 + term_2 + term_3 - term_4); - - return pdf; - } - - real lba_cdf(real t, real b, real A, real v, real s) { - //CDF of the LBA model - real b_A_tv; - real b_tv; - real ts; - real term_1; - real term_2; - real term_3; - real term_4; - real cdf; - - b_A_tv = b - A - t * v; - b_tv = b - t * v; - ts = t * s; - - term_1 = b_A_tv/A * Phi(b_A_tv/ts); - term_2 = b_tv/A * Phi(b_tv/ts); - term_3 = ts/A * exp(normal_lpdf(b_A_tv/ts | 0, 1)); - term_4 = ts/A * exp(normal_lpdf(b_tv/ts | 0, 1)); - - cdf = 1 + term_1 - term_2 + term_3 - term_4; - - return cdf; - - } - - real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { - - real t; - real b; - real cdf; - real pdf; - vector[rows(RT)] prob; - real out; - real prob_neg; - - b = A + d; - for (i in 1:rows(RT)) { - t = RT[1, i] - tau; - if (t > 0) { - cdf = 1; - - for (j in 1:num_elements(v)) { - if (RT[2, i] == j) { - pdf = lba_pdf(t, b, A, v[j], s); - } else { - cdf *= (1-lba_cdf(t, b, A, v[j], s)); - } - } - prob_neg = 1; - for (j in 1:num_elements(v)) { - prob_neg *= Phi(-v[j]/s); - } - prob[i] = pdf * cdf; - prob[i] /= (1-prob_neg); - if (prob[i] < 1e-10) { - prob[i] = 1e-10; - } - - } else { - prob[i] = 1e-10; - } - } - out = sum(log(prob)); - return out; - } - - vector lba_rng(real d, real A, vector v, real s, real tau) { - - int get_pos_drift; - int no_pos_drift; - int get_first_pos; - vector[num_elements(v)] drift; - int max_iter; - int iter; - real start[num_elements(v)]; - real ttf[num_elements(v)]; - int resp[num_elements(v)]; - real rt; - vector[2] pred; - real b; - - //try to get a positive drift rate - get_pos_drift = 1; - no_pos_drift = 0; - max_iter = 1000; - iter = 0; - while(get_pos_drift) { - for (j in 1:num_elements(v)) { - drift[j] = normal_rng(v[j], s); - if (drift[j] > 0) { - get_pos_drift = 0; - } - } - iter += 1; - if (iter > max_iter) { - get_pos_drift = 0; - no_pos_drift = 1; - } - } - //if both drift rates are <= 0 - //return an infinite response time - if (no_pos_drift) { - pred[1] = -1; - pred[2] = -1; - } else { - b = A + d; - for (i in 1:num_elements(v)) { - //start time of each accumulator - start[i] = uniform_rng(0, A); - //finish times - ttf[i] = (b-start[i])/drift[i]; - } - //rt is the fastest accumulator finish time - //if one is negative get the positive drift - resp = sort_indices_asc(ttf); - { - real temp_ttf[num_elements(v)]; - temp_ttf = sort_asc(ttf); - ttf = temp_ttf; - } - get_first_pos = 1; - iter = 1; - while(get_first_pos) { - if (ttf[iter] > 0) { - pred[1] = ttf[iter] + tau; - pred[2] = resp[iter]; - get_first_pos = 0; - } - iter += 1; - } - } - return pred; - } -} -data { - int N_choice; - int N_cond; - int tr_cond[N_cond]; - int max_tr; - matrix[2, max_tr] RT[N_cond]; -} - -parameters { - real d; - real A; - real tau; - vector[N_choice] v[N_cond]; -} -transformed parameters { - real s; - s = 1; -} -model { - // Declare variables - int n_trials; - - // Individual parameters - d ~ normal(.5, 1)T[0,]; - A ~ normal(.5, 1)T[0,]; - tau ~ normal(.5, .5)T[0,]; - - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = tr_cond[j]; - - for (n in 1:N_choice) { - // Drift rate is normally distributed - v[j, n] ~ normal(2, 1)T[0,]; - } - // Likelihood of RT x Choice - RT[j, , 1:n_trials] ~ lba(d, A, v[j,], s, tau); - } -} - -generated quantities { - // Declare variables - int n_trials; - - // For log likelihood calculation - real log_lik; - - // For posterior predictive check - matrix[2, max_tr] y_pred[N_cond]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (j in 1:N_cond) { - for (t in 1:max_tr) { - y_pred[j, , t] = rep_vector(-1, 2); - } - } - - // initialize log_lik - log_lik = 0; - - { // local section, this saves time and space - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = tr_cond[j]; - - // Sum likelihood over conditions within subjects - log_lik += lba_lpdf(RT[j, , 1:n_trials] | d, A, v[j,], s, tau); - - for (t in 1:n_trials) { - // generate posterior predictions - y_pred[j, , t] = lba_rng(d, A, v[j,], s, tau); - } - } - } -} - diff --git a/inst/stan_files/cra_exp.stan b/inst/stan_files/cra_exp.stan deleted file mode 100644 index 86a44a0e..00000000 --- a/inst/stan_files/cra_exp.stan +++ /dev/null @@ -1,134 +0,0 @@ -#include /pre/license.stan - -/** - * Choice under Risk and Ambiguity Task - * - * Exponential model in Hsu et al. (2005) Science - */ - -functions { - /** - * Subjective value function with the exponential equation form - */ - real subjective_value(real alpha, real beta, real p, real a, real v) { - return pow(p, 1 + beta * a) * pow(v, alpha); - } -} - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/block for each subject - - int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) - real prob[N, T]; // The objective probability of the variable lottery - real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) - real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) - real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // risk attitude parameter - vector[N] beta_pr; // ambiguity attitude parameter - vector[N] gamma_pr; // inverse temperature parameter -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] alpha; - vector[N] beta; - vector[N] gamma; - - alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; - beta = mu_pr[2] + sigma[2] * beta_pr; - gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); -} - -model { - // hyper parameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 5); - - // individual parameters w/ Matt trick - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - gamma_pr ~ normal(0, 1); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - real p_var; // probability of choosing the variable option - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var = inv_logit(gamma[i] * (u_var - u_fix)); - - target += bernoulli_lpmf(choice[i, t] | p_var); - } - } -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_beta; - real mu_gamma; - - // For log likelihood calculation for each subject - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Model regressors - real sv[N, T]; - real sv_fix[N, T]; - real sv_var[N, T]; - real p_var[N, T]; - - // Set all posterior predictions to -1 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - sv[i, t] = 0; - sv_fix[i, t] = 0; - sv_var[i, t] = 0; - p_var[i, t] = 0; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 2; - mu_beta = mu_pr[2]; - mu_gamma = exp(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Initialize the log likelihood variable to 0. - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); - - sv_fix[i, t] = u_fix; - sv_var[i, t] = u_var; - sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; - - log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); - y_pred[i, t] = bernoulli_rng(p_var[i, t]); - } - } - } -} - diff --git a/inst/stan_files/cra_linear.stan b/inst/stan_files/cra_linear.stan deleted file mode 100644 index b8653c85..00000000 --- a/inst/stan_files/cra_linear.stan +++ /dev/null @@ -1,130 +0,0 @@ -#include /pre/license.stan - -/** - * Choice under Risk and Ambiguity Task - * - * Linear model in Levy et al. (2010) J Neurophysiol - */ - -functions { - /** - * Subjective value function with the linear equation form - */ - real subjective_value(real alpha, real beta, real p, real a, real v) { - return (p - beta * a / 2) * pow(v, alpha); - } -} - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/block for each subject - - int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) - real prob[N, T]; // The objective probability of the variable lottery - real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) - real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) - real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // risk attitude parameter - vector[N] beta_pr; // ambiguity attitude parameter - vector[N] gamma_pr; // inverse temperature parameter -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] alpha; - vector[N] beta; - vector[N] gamma; - - alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; - beta = mu_pr[2] + sigma[2] * beta_pr; - gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); -} - -model { - // hyper parameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 5); - - // individual parameters w/ Matt trick - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - gamma_pr ~ normal(0, 1); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - real p_var; // probability of choosing the variable option - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var = inv_logit(gamma[i] * (u_var - u_fix)); - - target += bernoulli_lpmf(choice[i, t] | p_var); - } - } -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_beta; - real mu_gamma; - - // For log likelihood calculation for each subject - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Model regressors - real sv[N, T]; - real sv_fix[N, T]; - real sv_var[N, T]; - real p_var[N, T]; - - // Set all posterior predictions to -1 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 2; - mu_beta = mu_pr[2]; - mu_gamma = exp(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Initialize the log likelihood variable to 0. - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); - - sv_fix[i, t] = u_fix; - sv_var[i, t] = u_var; - sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; - - log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); - y_pred[i, t] = bernoulli_rng(p_var[i, t]); - } - } - } -} - diff --git a/inst/stan_files/dbdm_prob_weight.stan b/inst/stan_files/dbdm_prob_weight.stan deleted file mode 100644 index ee248835..00000000 --- a/inst/stan_files/dbdm_prob_weight.stan +++ /dev/null @@ -1,154 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real opt1hprob[N, T]; - real opt2hprob[N, T]; - real opt1hval[N, T]; - real opt1lval[N, T]; - real opt2hval[N, T]; - real opt2lval[N, T]; -} -transformed data { -} -parameters{ - //group-level parameters - vector[4] mu_pr; - vector[4] sigma; - - //subject-level raw parameters, follows norm(0,1), for later Matt Trick - vector[N] tau_pr; //probability weight parameter - vector[N] rho_pr; //subject utility parameter - vector[N] lambda_pr; //loss aversion parameter - vector[N] beta_pr; //inverse softmax temperature -} - -transformed parameters { - //subject-level parameters - vector[N] tau; - vector[N] rho; - vector[N] lambda; - vector[N] beta; - - //Matt Trick - for (i in 1:N) { - tau[i] = Phi_approx( mu_pr[1] + sigma[1] * tau_pr[i] ); - rho[i] = Phi_approx( mu_pr[2] + sigma[2] * rho_pr[i] )*2; - lambda[i] = Phi_approx( mu_pr[3] + sigma[3] * lambda_pr[i] )*5; - beta[i] = Phi_approx( mu_pr[4] + sigma[4] * beta_pr[i] ); - } -} - -model { - //prior : hyperparameters - mu_pr ~ normal(0,1); - sigma ~ cauchy(0,5); - - //prior : individual parameters - tau_pr ~ normal(0,1); - rho_pr ~ normal(0,1); - lambda_pr ~ normal(0,1); - beta_pr ~ normal(0,1); - - //subject loop and trial loop - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - vector[4] w_prob; - vector[2] U_opt; - - //probability weight function - w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); - w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); - w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); - w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); - - if (opt1hval[i,t]>0) { - if (opt1lval[i,t]>= 0) { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); - } else { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - - if (opt2hval[i,t] > 0) { - if (opt2lval[i,t] >= 0) { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); - } else { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - // compute action probabilities - choice[i, t] ~ categorical_logit(U_opt*beta[i]); - } - } -} - -generated quantities { - real mu_tau; - real mu_rho; - real mu_lambda; - real mu_beta; - real log_lik[N]; - // For posterior predictive check - real y_pred[N,T]; - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_tau = Phi_approx(mu_pr[1]); - mu_rho = Phi_approx(mu_pr[2])*2; - mu_lambda = Phi_approx(mu_pr[3])*5; - mu_beta = Phi_approx(mu_pr[4]); - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - vector[4] w_prob; - vector[2] U_opt; - - //probability weight function - w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); - w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); - w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); - w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); - - if (opt1hval[i,t]>0) { - if (opt1lval[i,t]>= 0) { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); - } else { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - - if (opt2hval[i,t] > 0) { - if (opt2lval[i,t] >= 0) { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); - } else { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - - // compute action probabilities - log_lik[i] += categorical_logit_lpmf(choice[i,t] | U_opt*beta[i]); - y_pred[i, t] = categorical_rng(softmax(U_opt*beta[i])); - - } - } - } -} - diff --git a/inst/stan_files/dd_cs.stan b/inst/stan_files/dd_cs.stan deleted file mode 100644 index d221d34a..00000000 --- a/inst/stan_files/dd_cs.stan +++ /dev/null @@ -1,107 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real delay_later[N, T]; - real amount_later[N, T]; - real delay_sooner[N, T]; - real amount_sooner[N, T]; - int choice[N, T]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] r_pr; // (exponential) discounting rate (Impatience) - vector[N] s_pr; // time-sensitivity - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] r; - vector[N] s; - vector[N] beta; - - for (i in 1:N) { - r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); - s[i] = Phi_approx(mu_pr[2] + sigma[2] * s_pr[i]) * 10; - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 5; - } -} - -model { -// Constant-sensitivity model (Ebert & Prelec, 2007) - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - r_pr ~ normal(0, 1); - s_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); - ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); - choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); - } - } -} -generated quantities { - // For group level parameters - real mu_r; - real mu_s; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_r = Phi_approx(mu_pr[1]); - mu_s = Phi_approx(mu_pr[2]) * 10; - mu_beta = Phi_approx(mu_pr[3]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); - ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); - } - } - } -} - diff --git a/inst/stan_files/dd_cs_single.stan b/inst/stan_files/dd_cs_single.stan deleted file mode 100644 index 2436b8b1..00000000 --- a/inst/stan_files/dd_cs_single.stan +++ /dev/null @@ -1,63 +0,0 @@ -#include /pre/license.stan - -data { - int Tsubj; - real delay_later[Tsubj]; - real amount_later[Tsubj]; - real delay_sooner[Tsubj]; - real amount_sooner[Tsubj]; - int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { - real r; // (exponential) discounting rate - real s; // impatience - real beta; // inverse temperature -} - -transformed parameters { - real ev_later[Tsubj]; - real ev_sooner[Tsubj]; - - for (t in 1:Tsubj) { - ev_later[t] = amount_later[t] * exp(-1* (pow(r * delay_later[t], s))); - ev_sooner[t] = amount_sooner[t] * exp(-1* (pow(r * delay_sooner[t], s))); - } -} - -model { - // constant-sensitivity model (Ebert & Prelec, 2007) - // hyperparameters - r ~ uniform(0, 1); - s ~ uniform(0, 10); - beta ~ uniform(0, 5); - - for (t in 1:Tsubj) { - choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); - } -} - -generated quantities { - real logR; - real log_lik; - - // For posterior predictive check - real y_pred[Tsubj]; - - logR = log(r); - - { // local section, this saves time and space - log_lik = 0; - - for (t in 1:Tsubj) { - log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); - - // generate posterior prediction for current trial - y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); - } - } -} - diff --git a/inst/stan_files/dd_exp.stan b/inst/stan_files/dd_exp.stan deleted file mode 100644 index 3d772a5a..00000000 --- a/inst/stan_files/dd_exp.stan +++ /dev/null @@ -1,101 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real delay_later[N, T]; - real amount_later[N, T]; - real delay_sooner[N, T]; - real amount_sooner[N, T]; - int choice[N, T]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] r_pr; - vector[N] beta_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] r; - vector[N] beta; - - for (i in 1:N) { - r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; - } -} - -model { -// Exponential function - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - r_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); - choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); - } - } -} -generated quantities { - // For group level parameters - real mu_r; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_r = Phi_approx(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); - } - } - } -} - diff --git a/inst/stan_files/dd_hyperbolic.stan b/inst/stan_files/dd_hyperbolic.stan deleted file mode 100644 index 1551304a..00000000 --- a/inst/stan_files/dd_hyperbolic.stan +++ /dev/null @@ -1,101 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real delay_later[N, T]; - real amount_later[N, T]; - real delay_sooner[N, T]; - real amount_sooner[N, T]; - int choice[N, T]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] k_pr; - vector[N] beta_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] k; - vector[N] beta; - - for (i in 1:N) { - k[i] = Phi_approx(mu_pr[1] + sigma[1] * k_pr[i]); - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; - } -} - -model { -// Hyperbolic function - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - k_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); - choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); - } - } -} -generated quantities { - // For group level parameters - real mu_k; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_k = Phi_approx(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); - } - } - } -} - diff --git a/inst/stan_files/dd_hyperbolic_single.stan b/inst/stan_files/dd_hyperbolic_single.stan deleted file mode 100644 index be3011f0..00000000 --- a/inst/stan_files/dd_hyperbolic_single.stan +++ /dev/null @@ -1,57 +0,0 @@ -#include /pre/license.stan - -data { - int Tsubj; - real delay_later[Tsubj]; - real amount_later[Tsubj]; - real delay_sooner[Tsubj]; - real amount_sooner[Tsubj]; - int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { - real k; // discounting rate - real beta; // inverse temperature -} - -transformed parameters { - real ev_later[Tsubj]; - real ev_sooner[Tsubj]; - - for (t in 1:Tsubj) { - ev_later[t] = amount_later[t] / (1 + k * delay_later[t]); - ev_sooner[t] = amount_sooner[t] / (1 + k * delay_sooner[t]); - } -} - -model { - k ~ uniform(0, 1); - beta ~ uniform(0, 5); - - for (t in 1:Tsubj) { - choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); - } -} -generated quantities { - real logK; - real log_lik; - - // For posterior predictive check - real y_pred[Tsubj]; - - logK = log(k); - - { // local section, this saves time and space - log_lik = 0; - for (t in 1:Tsubj) { - log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); - - // generate posterior prediction for current trial - y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); - } - } -} - diff --git a/inst/stan_files/gng_m1.stan b/inst/stan_files/gng_m1.stan deleted file mode 100644 index 5ac8abd0..00000000 --- a/inst/stan_files/gng_m1.stan +++ /dev/null @@ -1,149 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[3] mu_pr; - vector[3] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] rho_pr; // rho, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] rho; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - rho = exp(mu_pr[3] + sigma[3] * rho_pr); -} - -model { -// gng_m1: RW + noise model in Guitart-Masip et al 2012 - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - rho_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_rho; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_rho = exp(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/inst/stan_files/gng_m2.stan b/inst/stan_files/gng_m2.stan deleted file mode 100644 index c9a8ced8..00000000 --- a/inst/stan_files/gng_m2.stan +++ /dev/null @@ -1,160 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[4] mu_pr; - vector[4] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] b_pr; // go bias - vector[N] rho_pr; // rho, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] b; - vector[N] rho; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - b = mu_pr[3] + sigma[3] * b_pr; // vectorization - rho = exp(mu_pr[4] + sigma[4] * rho_pr); -} - -model { -// gng_m2: RW + noise + bias model in Guitart-Masip et al 2012 - // hyper parameters - mu_pr[1] ~ normal(0, 1.0); - mu_pr[2] ~ normal(0, 1.0); - mu_pr[3] ~ normal(0, 10.0); - mu_pr[4] ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3] ~ cauchy(0, 1.0); - sigma[4] ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - b_pr ~ normal(0, 1.0); - rho_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_b; - real mu_rho; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_b = mu_pr[3]; - mu_rho = exp(mu_pr[4]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/inst/stan_files/gng_m3.stan b/inst/stan_files/gng_m3.stan deleted file mode 100644 index 2368ea1a..00000000 --- a/inst/stan_files/gng_m3.stan +++ /dev/null @@ -1,179 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[5] mu_pr; - vector[5] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] b_pr; // go bias - vector[N] pi_pr; // pavlovian bias - vector[N] rho_pr; // rho, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] b; - vector[N] pi; - vector[N] rho; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - b = mu_pr[3] + sigma[3] * b_pr; // vectorization - pi = mu_pr[4] + sigma[4] * pi_pr; - rho = exp(mu_pr[5] + sigma[5] * rho_pr); -} - -model { -// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) - // hyper parameters - mu_pr[1] ~ normal(0, 1.0); - mu_pr[2] ~ normal(0, 1.0); - mu_pr[3] ~ normal(0, 10.0); - mu_pr[4] ~ normal(0, 10.0); - mu_pr[5] ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3:4] ~ cauchy(0, 1.0); - sigma[5] ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - b_pr ~ normal(0, 1.0); - pi_pr ~ normal(0, 1.0); - rho_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // after receiving feedback, update sv[t + 1] - sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_b; - real mu_pi; - real mu_rho; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - real SV[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_b = mu_pr[3]; - mu_pi = mu_pr[4]; - mu_rho = exp(mu_pr[5]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - SV[i, t] = sv[cue[i, t]]; - - // after receiving feedback, update sv[t + 1] - sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/inst/stan_files/gng_m4.stan b/inst/stan_files/gng_m4.stan deleted file mode 100644 index 73e30cb1..00000000 --- a/inst/stan_files/gng_m4.stan +++ /dev/null @@ -1,210 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[6] mu_pr; - vector[6] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] b_pr; // go bias - vector[N] pi_pr; // pavlovian bias - vector[N] rhoRew_pr; // rho reward, inv temp - vector[N] rhoPun_pr; // rho punishment, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] b; - vector[N] pi; - vector[N] rhoRew; - vector[N] rhoPun; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - b = mu_pr[3] + sigma[3] * b_pr; // vectorization - pi = mu_pr[4] + sigma[4] * pi_pr; - rhoRew = exp(mu_pr[5] + sigma[5] * rhoRew_pr); - rhoPun = exp(mu_pr[6] + sigma[6] * rhoPun_pr); -} - -model { -// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) - // hyper parameters - mu_pr[1] ~ normal(0, 1.0); - mu_pr[2] ~ normal(0, 1.0); - mu_pr[3] ~ normal(0, 10.0); - mu_pr[4] ~ normal(0, 10.0); - mu_pr[5] ~ normal(0, 1.0); - mu_pr[6] ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3:4] ~ cauchy(0, 1.0); - sigma[5:6] ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - b_pr ~ normal(0, 1.0); - pi_pr ~ normal(0, 1.0); - rhoRew_pr ~ normal(0, 1.0); - rhoPun_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // after receiving feedback, update sv[t + 1] - if (outcome[i, t] >= 0) { - sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); - } else { - sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); - } - - // update action values - if (pressed[i, t]) { // update go value - if (outcome[i, t] >=0) { - qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { - qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); - } - } else { // update no-go value - if (outcome[i, t] >=0) { - qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } else { - qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_b; - real mu_pi; - real mu_rhoRew; - real mu_rhoPun; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - real SV[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_b = mu_pr[3]; - mu_pi = mu_pr[4]; - mu_rhoRew = exp(mu_pr[5]); - mu_rhoPun = exp(mu_pr[6]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - SV[i, t] = sv[cue[i, t]]; - - // after receiving feedback, update sv[t + 1] - if (outcome[i, t] >= 0) { - sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); - } else { - sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); - } - - // update action values - if (pressed[i, t]) { // update go value - if (outcome[i, t] >=0) { - qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { - qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); - } - } else { // update no-go value - if (outcome[i, t] >=0) { - qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } else { - qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/inst/stan_files/igt_orl.stan b/inst/stan_files/igt_orl.stan deleted file mode 100644 index a560de27..00000000 --- a/inst/stan_files/igt_orl.stan +++ /dev/null @@ -1,207 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; - real sign_out[N, T]; -} -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[5] mu_pr; - vector[5] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] K_pr; - vector[N] betaF_pr; - vector[N] betaP_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] K; - vector[N] betaF; - vector[N] betaP; - - for (i in 1:N) { - Arew[i] = Phi_approx( mu_pr[1] + sigma[1] * Arew_pr[i] ); - Apun[i] = Phi_approx( mu_pr[2] + sigma[2] * Apun_pr[i] ); - K[i] = Phi_approx(mu_pr[3] + sigma[3] + K_pr[i]) * 5; - } - betaF = mu_pr[4] + sigma[4] * betaF_pr; - betaP = mu_pr[5] + sigma[5] * betaP_pr; -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1:3] ~ normal(0, 0.2); - sigma[4:5] ~ cauchy(0, 1.0); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - K_pr ~ normal(0, 1.0); - betaF_pr ~ normal(0, 1.0); - betaP_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] ef; - vector[4] ev; - vector[4] PEfreq_fic; - vector[4] PEval_fic; - vector[4] pers; // perseverance - vector[4] util; - - real PEval; - real PEfreq; - real efChosen; - real evChosen; - real K_tr; - - // Initialize values - ef = initV; - ev = initV; - pers = initV; // initial pers values - util = initV; - K_tr = pow(3, K[i]) - 1; - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit( util ); - - // Prediction error - PEval = outcome[i,t] - ev[ choice[i,t]]; - PEfreq = sign_out[i,t] - ef[ choice[i,t]]; - PEfreq_fic = -sign_out[i,t]/3 - ef; - - // store chosen deck ev - efChosen = ef[ choice[i,t]]; - evChosen = ev[ choice[i,t]]; - - if (outcome[i,t] >= 0) { - // Update ev for all decks - ef += Apun[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Arew[i] * PEval; - } else { - // Update ev for all decks - ef += Arew[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Apun[i] * PEval; - } - - // Perseverance updating - pers[ choice[i,t] ] = 1; // perseverance term - pers /= (1 + K_tr); // decay - - // Utility of expected value and perseverance - util = ev + ef * betaF[i] + pers * betaP[i]; - } - } -} - -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_K; - real mu_betaF; - real mu_betaP; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N,T]; - - // Set all posterior predictions to -1 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i,t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_K = Phi_approx(mu_pr[3]) * 5; - mu_betaF = mu_pr[4]; - mu_betaP = mu_pr[5]; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ef; - vector[4] ev; - vector[4] PEfreq_fic; - vector[4] PEval_fic; - vector[4] pers; // perseverance - vector[4] util; - - real PEval; - real PEfreq; - real efChosen; - real evChosen; - real K_tr; - - // Initialize values - log_lik[i] = 0; - ef = initV; - ev = initV; - pers = initV; // initial pers values - util = initV; - K_tr = pow(3, K[i]) - 1; - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf( choice[i, t] | util ); - - // generate posterior prediction for current trial - y_pred[i,t] = categorical_rng(softmax(util)); - - // Prediction error - PEval = outcome[i,t] - ev[ choice[i,t]]; - PEfreq = sign_out[i,t] - ef[ choice[i,t]]; - PEfreq_fic = -sign_out[i,t]/3 - ef; - - // store chosen deck ev - efChosen = ef[ choice[i,t]]; - evChosen = ev[ choice[i,t]]; - - if (outcome[i,t] >= 0) { - // Update ev for all decks - ef += Apun[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Arew[i] * PEval; - } else { - // Update ev for all decks - ef += Arew[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Apun[i] * PEval; - } - - // Perseverance updating - pers[ choice[i,t] ] = 1; // perseverance term - pers /= (1 + K_tr); // decay - - // Utility of expected value and perseverance - util = ev + ef * betaF[i] + pers * betaP[i]; - } - } - } -} - diff --git a/inst/stan_files/igt_pvl_decay.stan b/inst/stan_files/igt_pvl_decay.stan deleted file mode 100644 index 2d908a19..00000000 --- a/inst/stan_files/igt_pvl_decay.stan +++ /dev/null @@ -1,134 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; -} -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] alpha_pr; - vector[N] cons_pr; - vector[N] lambda_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] alpha; - vector[N] cons; - vector[N] lambda; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; - cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; - lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - cons_pr ~ normal(0, 1); - lambda_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(theta * ev); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // decay-RI - ev *= A[i]; - ev[choice[i, t]] += curUtil; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_alpha; - real mu_cons; - real mu_lambda; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_alpha = Phi_approx(mu_pr[2]) * 2; - mu_cons = Phi_approx(mu_pr[3]) * 5; - mu_lambda = Phi_approx(mu_pr[4]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - log_lik[i] = 0; - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(theta * ev)); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // decay-RI - ev *= A[i]; - ev[choice[i, t]] += curUtil; - } - } - } -} - diff --git a/inst/stan_files/igt_pvl_delta.stan b/inst/stan_files/igt_pvl_delta.stan deleted file mode 100644 index 05c6e870..00000000 --- a/inst/stan_files/igt_pvl_delta.stan +++ /dev/null @@ -1,132 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; -} -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] alpha_pr; - vector[N] cons_pr; - vector[N] lambda_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] alpha; - vector[N] cons; - vector[N] lambda; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; - cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; - lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; - } -} -model { -// Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - cons_pr ~ normal(0, 1); - lambda_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(theta * ev); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // delta - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_alpha; - real mu_cons; - real mu_lambda; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_alpha = Phi_approx(mu_pr[2]) * 2; - mu_cons = Phi_approx(mu_pr[3]) * 5; - mu_lambda = Phi_approx(mu_pr[4]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - log_lik[i] = 0; - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(theta * ev)); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // delta - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - } - } - } -} - diff --git a/inst/stan_files/igt_vpp.stan b/inst/stan_files/igt_vpp.stan deleted file mode 100644 index 61c2b831..00000000 --- a/inst/stan_files/igt_vpp.stan +++ /dev/null @@ -1,188 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[8] mu_pr; - vector[8] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] alpha_pr; - vector[N] cons_pr; - vector[N] lambda_pr; - vector[N] epP_pr; - vector[N] epN_pr; - vector[N] K_pr; - vector[N] w_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] alpha; - vector[N] cons; - vector[N] lambda; - vector[N] epP; - vector[N] epN; - vector[N] K; - vector[N] w; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; - cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; - lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; - K[i] = Phi_approx(mu_pr[7] + sigma[7] * K_pr[i]); - w[i] = Phi_approx(mu_pr[8] + sigma[8] * w_pr[i]); - } - epP = mu_pr[5] + sigma[5] * epP_pr; - epN = mu_pr[6] + sigma[6] * epN_pr; -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1.0); - sigma[1:4] ~ normal(0, 0.2); - sigma[5:6] ~ cauchy(0, 1.0); - sigma[7:8] ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1.0); - alpha_pr ~ normal(0, 1.0); - cons_pr ~ normal(0, 1.0); - lambda_pr ~ normal(0, 1.0); - epP_pr ~ normal(0, 1.0); - epN_pr ~ normal(0, 1.0); - K_pr ~ normal(0, 1.0); - w_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] ev; - vector[4] p_next; - vector[4] str; - vector[4] pers; // perseverance - vector[4] V; // weighted sum of ev and pers - - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - pers = initV; // initial pers values - V = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(theta * V); - - // perseverance decay - pers *= K[i]; // decay - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - pers[choice[i, t]] += epP[i]; // perseverance term - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - pers[choice[i, t]] += epN[i]; // perseverance term - } - - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - // calculate V - V = w[i] * ev + (1-w[i]) * pers; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_alpha; - real mu_cons; - real mu_lambda; - real mu_epP; - real mu_epN; - real mu_K; - real mu_w; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_alpha = Phi_approx(mu_pr[2]) * 2; - mu_cons = Phi_approx(mu_pr[3]) * 5; - mu_lambda = Phi_approx(mu_pr[4]) * 10; - mu_epP = mu_pr[5]; - mu_epN = mu_pr[6]; - mu_K = Phi_approx(mu_pr[7]); - mu_w = Phi_approx(mu_pr[8]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ev; - vector[4] p_next; - vector[4] str; - vector[4] pers; // perseverance - vector[4] V; // weighted sum of ev and pers - - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - log_lik[i] = 0; - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - pers = initV; // initial pers values - V = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * V); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(theta * V)); - - // perseverance decay - pers *= K[i]; // decay - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - pers[choice[i, t]] += epP[i]; // perseverance term - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - pers[choice[i, t]] += epN[i]; // perseverance term - } - - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - // calculate V - V = w[i] * ev + (1-w[i]) * pers; - } - } - } -} - diff --git a/inst/stan_files/peer_ocu.stan b/inst/stan_files/peer_ocu.stan deleted file mode 100644 index cd0c52d5..00000000 --- a/inst/stan_files/peer_ocu.stan +++ /dev/null @@ -1,115 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int condition[N, T]; // 0: solo, 1: ss, 2: mix, 3: rr - real p_gamble[N, T]; - real safe_Hpayoff[N, T]; - real safe_Lpayoff[N, T]; - real risky_Hpayoff[N, T]; - real risky_Lpayoff[N, T]; - int choice[N, T]; -} - -transformed data { -} - -parameters { - vector[3] mu_pr; - vector[3] sigma; - vector[N] rho_pr; - vector[N] tau_pr; - vector[N] ocu_pr; -} - -transformed parameters { - vector[N] rho; - vector[N] tau; - vector[N] ocu; - - for (i in 1:N) { - rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - } - tau = exp(mu_pr[2] + sigma[2] * tau_pr); - ocu = mu_pr[3] + sigma[3] * ocu_pr; -} - -model { - // peer_ocu - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3] ~ cauchy(0, 1.0); - - // individual parameters w/ Matt trick - rho_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - ocu_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real U_safe; - real U_risky; - - U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); - U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); - if (condition[i, t] == 1) { // safe-safe - U_safe += ocu[i]; - } - if (condition[i, t] == 3) { // risky-risky - U_risky += ocu[i]; - } - choice[i, t] ~ bernoulli_logit(tau[i] * (U_risky - U_safe)); - } - } -} -generated quantities { - real mu_rho; - real mu_tau; - real mu_ocu; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_tau = exp(mu_pr[2]); - mu_ocu = mu_pr[3]; - - { // local section, this saves time and space - for (i in 1:N) { - - // Initialize values - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - real U_safe; - real U_risky; - - U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); - U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); - if (condition[i, t] == 1) { // safe-safe - U_safe += ocu[i]; - } - if (condition[i, t] == 3) { // risky-risky - U_risky += ocu[i]; - } - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | tau[i] * (U_risky - U_safe)); - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(tau[i] * (U_risky - U_safe))); - } - } - } -} - diff --git a/inst/stan_files/pre/license.stan b/inst/stan_files/pre/license.stan deleted file mode 100644 index dec428a6..00000000 --- a/inst/stan_files/pre/license.stan +++ /dev/null @@ -1,14 +0,0 @@ -/* - hBayesDM is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - hBayesDM is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with hBayesDM. If not, see . -*/ diff --git a/inst/stan_files/prl_ewa.stan b/inst/stan_files/prl_ewa.stan deleted file mode 100644 index 234cf467..00000000 --- a/inst/stan_files/prl_ewa.stan +++ /dev/null @@ -1,179 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Experience-Weighted Attraction model by Ouden et al. (2013) Neuron - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] phi_pr; // 1-learning rate - vector[N] rho_pr; // experience decay factor - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] phi; - vector[N] rho; - vector[N] beta; - - for (i in 1:N) { - phi[i] = Phi_approx(mu_pr[1] + sigma[1] * phi_pr[i]); - rho[i] = Phi_approx(mu_pr[2] + sigma[2] * rho_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Individual parameters - phi_pr ~ normal(0, 1); - rho_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // Expected value - vector[2] ew; // Experience weight - - real ewt1; // Experience weight of trial (t - 1) - - // Initialize values - ev = initV; // initial ev values - ew = initV; // initial ew values - - for (t in 1:Tsubj[i]) { - // Softmax choice - choice[i, t] ~ categorical_logit(ev * beta[i]); - - // Store previous experience weight value - ewt1 = ew[choice[i, t]]; - - // Update experience weight for chosen stimulus - { - ew[choice[i, t]] *= rho[i]; - ew[choice[i, t]] += 1; - } - - // Update expected value of chosen stimulus - { - ev[choice[i, t]] *= phi[i] * ewt1; - ev[choice[i, t]] += outcome[i, t]; - ev[choice[i, t]] /= ew[choice[i, t]]; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_phi; - real mu_rho; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - //real mr_ev[N, T, 2]; // Expected value - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - //real mr_ew[N, T, 2]; // Experience weight - real ew_c[N, T]; // Experience weight of the chosen option - real ew_nc[N, T]; // Experience weight of the non-chosen option - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - ew_c[i, t] = 0; - ew_nc[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_phi = Phi_approx(mu_pr[1]); - mu_rho = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // Expected value - vector[2] ew; // Experience weight - - real ewt1; // Experience weight of trial (t-1) - - // Initialize values - ev = initV; // initial ev values - ew = initV; // initial ew values - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // Softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); - - // Store values for model regressors - //mr_ev[i, t] = ev; - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - - //mr_ew[i, t] = ew; - ew_c[i, t] = ew[choice[i, t]]; - ew_nc[i, t] = ew[3 - choice[i, t]]; - - // Store previous experience weight value - ewt1 = ew[choice[i, t]]; - - // Update experience weight for chosen stimulus - { - ew[choice[i, t]] *= rho[i]; - ew[choice[i, t]] += 1; - } - - // Update expected value of chosen stimulus - { - ev[choice[i, t]] *= phi[i] * ewt1; - ev[choice[i, t]] += outcome[i, t]; - ev[choice[i, t]] /= ew[choice[i, t]]; - } - } - } - } -} - diff --git a/inst/stan_files/prl_fictitious.stan b/inst/stan_files/prl_fictitious.stan deleted file mode 100644 index 0fb8d486..00000000 --- a/inst/stan_files/prl_fictitious.stan +++ /dev/null @@ -1,173 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pr; // learning rate - vector[N] alpha_pr; // indecision point - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta; - vector[N] alpha; - vector[N] beta; - - for (i in 1:N) { - eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } - alpha = mu_pr[2] + sigma[2] * alpha_pr; -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1] ~ normal(0, 0.2); - sigma[2] ~ cauchy(0, 1.0); - sigma[3] ~ normal(0, 0.2); - - // Individual parameters - eta_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // Compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // Prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } -} - -generated quantities { - // For group level parameters - real mu_eta; - real mu_alpha; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; //Prediction error of the chosen option - real pe_nc[N, T]; //Prediction error of the non-chosen option - real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_eta = Phi_approx(mu_pr[1]); - mu_alpha = mu_pr[2]; - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } - } -} - diff --git a/inst/stan_files/prl_fictitious_multipleB.stan b/inst/stan_files/prl_fictitious_multipleB.stan deleted file mode 100644 index 264d6c8f..00000000 --- a/inst/stan_files/prl_fictitious_multipleB.stan +++ /dev/null @@ -1,185 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) - */ - -data { - int N; // Number of subjects - - int B; // Max number of blocks across subjects - int Bsubj[N]; // Number of blocks for each subject - - int T; // Max number of trials across subjects - int Tsubj[N, B]; // Number of trials/block for each subject - - int choice[N, B, T]; // Choice for each subject-block-trial - real outcome[N, B, T]; // Outcome (reward/loss) for each subject-block-trial -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pr; // learning rate - vector[N] alpha_pr; // indecision point - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta; - vector[N] alpha; - vector[N] beta; - - for (i in 1:N) { - eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } - alpha = mu_pr[2] + sigma[2] * alpha_pr; -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1] ~ normal(0, 0.2); - sigma[2] ~ cauchy(0, 1.0); - sigma[3] ~ normal(0, 0.2); - - // individual parameters - eta_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - for (bIdx in 1:Bsubj[i]) { // new - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i, bIdx])) { // new - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, bIdx, t] ~ categorical(prob); - //choice[i, t] ~ bernoulli(prob); - - // prediction error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new - PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new - - // value updating (learning) - ev[choice[i, bIdx, t]] += eta[i] * PE; //new - ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new - } // end of t loop - } // end of bIdx loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_eta; - real mu_alpha; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, B, T]; // Expected value of the chosen option - real ev_nc[N, B, T]; // Expected value of the non-chosen option - - real pe_c[N, B, T]; //Prediction error of the chosen option - real pe_nc[N, B, T]; //Prediction error of the non-chosen option - real dv[N, B, T]; //Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, B, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (b in 1:B) { - for (t in 1:T) { - ev_c[i, b, t] = 0; - ev_nc[i, b, t] = 0; - - pe_c[i, b, t] = 0; - pe_nc[i, b, t] = 0; - dv[i, b, t] = 0; - - y_pred[i, b, t] = -1; - } - } - } - - mu_eta = Phi_approx(mu_pr[1]); - mu_alpha = mu_pr[2]; - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - - log_lik[i] = 0; - - for (bIdx in 1:Bsubj[i]) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i, bIdx])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, bIdx, t] | prob); //new - - // generate posterior prediction for current trial - y_pred[i, bIdx, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new - PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new - - // Store values for model regressors - ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; - ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; - - pe_c[i, bIdx, t] = PE; - pe_nc[i, bIdx, t] = PEnc; - dv[i, bIdx, t] = PE - PEnc; - - // value updating (learning) - ev[choice[i, bIdx, t]] += eta[i] * PE; //new - ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new - } // end of t loop - } // end of bIdx loop - } - } -} - diff --git a/inst/stan_files/prl_fictitious_rp.stan b/inst/stan_files/prl_fictitious_rp.stan deleted file mode 100644 index daa0779c..00000000 --- a/inst/stan_files/prl_fictitious_rp.stan +++ /dev/null @@ -1,188 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) - */ - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pos_pr; // learning rate, positive PE - vector[N] eta_neg_pr; // learning rate, negative PE - vector[N] alpha_pr; // indecision point - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta_pos; - vector[N] eta_neg; - vector[N] alpha; - vector[N] beta; - - for (i in 1:N) { - eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); - eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); - beta[i] = Phi_approx(mu_pr[4] + sigma[4] * beta_pr[i]) * 10; - } - alpha = mu_pr[3] + sigma[3] * alpha_pr; -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1:2] ~ normal(0, 0.2); - sigma[3] ~ cauchy(0, 1.0); - sigma[4] ~ normal(0, 0.2); - - // individual parameters - eta_pos_pr ~ normal(0, 1); - eta_neg_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_eta_pos; - real mu_eta_neg; - real mu_alpha; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; // Prediction error of the chosen option - real pe_nc[N, T]; // Prediction error of the non-chosen option - - real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_eta_pos = Phi_approx(mu_pr[1]); - mu_eta_neg = Phi_approx(mu_pr[2]); - mu_alpha = mu_pr[3]; - mu_beta = Phi_approx(mu_pr[4]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // Value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } - } -} - diff --git a/inst/stan_files/prl_fictitious_rp_woa.stan b/inst/stan_files/prl_fictitious_rp_woa.stan deleted file mode 100644 index 48f78a42..00000000 --- a/inst/stan_files/prl_fictitious_rp_woa.stan +++ /dev/null @@ -1,180 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) with separate learning rates for +PE and -PE & without alpha (indecision point) - */ - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pos_pr; // learning rate, positive PE - vector[N] eta_neg_pr; // learning rate, negative PE - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta_pos; - vector[N] eta_neg; - vector[N] beta; - - for (i in 1:N) { - eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); - eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - eta_pos_pr ~ normal(0, 1); - eta_neg_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_eta_pos; - real mu_eta_neg; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; // Prediction error of the chosen option - real pe_nc[N, T]; // Prediction error of the non-chosen option - - real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_eta_pos = Phi_approx(mu_pr[1]); - mu_eta_neg = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // Value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } - } -} - diff --git a/inst/stan_files/prl_fictitious_woa.stan b/inst/stan_files/prl_fictitious_woa.stan deleted file mode 100644 index 58a4053f..00000000 --- a/inst/stan_files/prl_fictitious_woa.stan +++ /dev/null @@ -1,165 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) without alpha (indecision point) - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pr; // learning rate - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta; - vector[N] beta; - - for (i in 1:N) { - eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Individual parameters - eta_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // Compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // Prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } -} - -generated quantities { - // For group level parameters - real mu_eta; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; //Prediction error of the chosen option - real pe_nc[N, T]; //Prediction error of the non-chosen option - real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] =0; - - y_pred[i, t] = -1; - } - } - - mu_eta = Phi_approx(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } - } -} - diff --git a/inst/stan_files/prl_rp.stan b/inst/stan_files/prl_rp.stan deleted file mode 100644 index a7303744..00000000 --- a/inst/stan_files/prl_rp.stan +++ /dev/null @@ -1,149 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Reward-Punishment Model by Ouden et al. (2013) Neuron - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Apun_pr; // learning rate (punishment) - vector[N] Arew_pr; // learning rate (reward) - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Apun; - vector[N] Arew; - vector[N] beta; - - for (i in 1:N) { - Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); - Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Apun_pr ~ normal(0, 1); - Arew_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define Values - vector[2] ev; // Expected value - real PE; // prediction error - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // Softmax choice - choice[i, t] ~ categorical_logit(ev * beta[i]); - - // Prediction Error - PE = outcome[i, t] - ev[choice[i, t]]; - - // Update expected value of chosen stimulus - if (outcome[i, t] > 0) - ev[choice[i, t]] += Arew[i] * PE; - else - ev[choice[i, t]] += Apun[i] * PE; - } - } -} - -generated quantities { - // For group level parameters - real mu_Apun; - real mu_Arew; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - real pe[N, T]; // Prediction error - - // For posterior predictive check - real y_pred[N, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - pe[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_Apun = Phi_approx(mu_pr[1]); - mu_Arew = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // Expected value - real PE; // Prediction error - - // Initialize values - ev = initV; // initial ev values - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // Softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); - - // Prediction Error - PE = outcome[i, t] - ev[choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - pe[i, t] = PE; - - // Update expected value of chosen stimulus - if (outcome[i, t] > 0) - ev[choice[i, t]] += Arew[i] * PE; - else - ev[choice[i, t]] += Apun[i] * PE; - } - } - } -} - diff --git a/inst/stan_files/prl_rp_multipleB.stan b/inst/stan_files/prl_rp_multipleB.stan deleted file mode 100644 index 8cd77c43..00000000 --- a/inst/stan_files/prl_rp_multipleB.stan +++ /dev/null @@ -1,161 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Reward-Punishment Model with multiple blocks per subject by Ouden et al. (2013) Neuron - */ - -data { - int N; // Number of subjects - - int B; // Maximum number of blocks across subjects - int Bsubj[N]; // Number of blocks for each subject - - int T; // Maximum number of trials across subjects - int Tsubj[N, B]; // Number of trials/blocks for each subject - - int choice[N, B, T]; // The choices subjects made - real outcome[N, B, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Apun_pr; // learning rate (punishment) - vector[N] Arew_pr; // learning rate (reward) - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Apun; - vector[N] Arew; - vector[N] beta; - - for (i in 1:N) { - Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); - Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Apun_pr ~ normal(0, 1); - Arew_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - for (bIdx in 1:Bsubj[i]) { // new - // Define Values - vector[2] ev; // Expected value - real PE; // Prediction error - - // Initialize values - ev = initV; // Initial ev values - - for (t in 1:Tsubj[i, bIdx]) { - // Softmax choice - choice[i, bIdx, t] ~ categorical_logit(ev * beta[i]); - - // Prediction Error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; - - // Update expected value of chosen stimulus - if (outcome[i, bIdx, t] > 0) - ev[choice[i, bIdx, t]] += Arew[i] * PE; - else - ev[choice[i, bIdx, t]] += Apun[i] * PE; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_Apun; - real mu_Arew; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, B, T]; // Expected value of the chosen option - real ev_nc[N, B, T]; // Expected value of the non-chosen option - real pe[N, B, T]; // Prediction error - - // For posterior predictive check - real y_pred[N, B, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (b in 1:B) { - for (t in 1:T) { - ev_c[i, b, t] = 0; - ev_nc[i, b, t] = 0; - pe[i, b, t] = 0; - - y_pred[i, b, t] = -1; - } - } - } - - mu_Apun = Phi_approx(mu_pr[1]); - mu_Arew = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - - log_lik[i] = 0; - - for (bIdx in 1:Bsubj[i]) { // new - // Define values - vector[2] ev; // Expected value - real PE; // prediction error - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:Tsubj[i, bIdx]) { - // Softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, bIdx, t] | ev * beta[i]); - - // generate posterior prediction for current trial - y_pred[i, bIdx, t] = categorical_rng(softmax(ev * beta[i])); - - // Prediction Error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; - - // Store values for model regressors - ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; - ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; - pe[i, bIdx, t] = PE; - - // Update expected value of chosen stimulus - if (outcome[i, bIdx, t] > 0) - ev[choice[i, bIdx, t]] += Arew[i] * PE; - else - ev[choice[i, bIdx, t]] += Apun[i] * PE; - } - } - } - } -} - diff --git a/inst/stan_files/pst_gainloss_Q.stan b/inst/stan_files/pst_gainloss_Q.stan deleted file mode 100644 index 788b9a4e..00000000 --- a/inst/stan_files/pst_gainloss_Q.stan +++ /dev/null @@ -1,114 +0,0 @@ -#include /pre/license.stan - -data { - int N; // Number of subjects - int T; // Maximum # of trials - int Tsubj[N]; // # of trials for acquisition phase - - int option1[N, T]; - int option2[N, T]; - int choice[N, T]; - real reward[N, T]; -} - -transformed data { - // Default values to initialize the vector of expected values - vector[6] initial_values; - initial_values = rep_vector(0, 6); -} - -parameters { - // Group-level parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level parameters for Matt trick - vector[N] alpha_pos_pr; - vector[N] alpha_neg_pr; - vector[N] beta_pr; -} - -transformed parameters { - vector[N] alpha_pos; - vector[N] alpha_neg; - vector[N] beta; - - alpha_pos = Phi_approx(mu_pr[1] + sigma[1] * alpha_pos_pr); - alpha_neg = Phi_approx(mu_pr[2] + sigma[2] * alpha_neg_pr); - beta = Phi_approx(mu_pr[3] + sigma[3] * beta_pr) * 10; -} - -model { - // Priors for group-level parameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Priors for subject-level parameters - alpha_pos_pr ~ normal(0, 1); - alpha_neg_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - int co; // Chosen option - real delta; // Difference between two options - real pe; // Prediction error - real alpha; - vector[6] ev; // Expected values - - ev = initial_values; - - // Acquisition Phase - for (t in 1:Tsubj[i]) { - co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; - - // Luce choice rule - delta = ev[option1[i, t]] - ev[option2[i, t]]; - target += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); - - pe = reward[i, t] - ev[co]; - alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; - ev[co] += alpha * pe; - } - } -} - -generated quantities { - // For group-level parameters - real mu_alpha_pos; - real mu_alpha_neg; - real mu_beta; - - // For log-likelihood calculation - real log_lik[N]; - - mu_alpha_pos = Phi_approx(mu_pr[1]); - mu_alpha_neg = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { - for (i in 1:N) { - int co; // Chosen option - real delta; // Difference between two options - real pe; // Prediction error - real alpha; - vector[6] ev; // Expected values - - ev = initial_values; - log_lik[i] = 0; - - // Acquisition Phase - for (t in 1:Tsubj[i]) { - co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; - - // Luce choice rule - delta = ev[option1[i, t]] - ev[option2[i, t]]; - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); - - pe = reward[i, t] - ev[co]; - alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; - ev[co] += alpha * pe; - } - } - } -} - diff --git a/inst/stan_files/ra_noLA.stan b/inst/stan_files/ra_noLA.stan deleted file mode 100644 index c5c599c4..00000000 --- a/inst/stan_files/ra_noLA.stan +++ /dev/null @@ -1,95 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int gamble[N, T]; -} - -transformed data { -} - -parameters { - vector[2] mu_pr; - vector[2] sigma; - vector[N] rho_pr; - vector[N] tau_pr; -} - -transformed parameters { - vector[N] rho; - vector[N] tau; - - for (i in 1:N) { - rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; - } -} - -model { - // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - rho_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - gamble[i, t] ~ bernoulli(pGamble); - } - } -} -generated quantities { - real mu_rho; - real mu_tau; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_tau = Phi_approx(mu_pr[2]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - // loss[i, t]=absolute amount of loss (pre-converted in R) - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGamble); - } - } - } -} - diff --git a/inst/stan_files/ra_noRA.stan b/inst/stan_files/ra_noRA.stan deleted file mode 100644 index 0f36c3be..00000000 --- a/inst/stan_files/ra_noRA.stan +++ /dev/null @@ -1,95 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int gamble[N, T]; -} - -transformed data { -} - -parameters { - vector[2] mu_pr; - vector[2] sigma; - vector[N] lambda_pr; - vector[N] tau_pr; -} - -transformed parameters { - vector[N] lambda; - vector[N] tau; - - for (i in 1:N) { - lambda[i] = Phi_approx(mu_pr[1] + sigma[1] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; - } -} - -model { - // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - lambda_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - // loss[i, t]=absolute amount of loss (pre-converted in R) - evSafe = cert[i, t]; - evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - gamble[i, t] ~ bernoulli(pGamble); - } - } -} -generated quantities { - real mu_lambda; - real mu_tau; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_lambda = Phi_approx(mu_pr[1]) * 5; - mu_tau = Phi_approx(mu_pr[2]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - evSafe = cert[i, t]; - evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGamble); - } - } - } -} - diff --git a/inst/stan_files/ra_prospect.stan b/inst/stan_files/ra_prospect.stan deleted file mode 100644 index 542ea460..00000000 --- a/inst/stan_files/ra_prospect.stan +++ /dev/null @@ -1,97 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int gamble[N, T]; -} -transformed data { -} -parameters { - vector[3] mu_pr; - vector[3] sigma; - vector[N] rho_pr; - vector[N] lambda_pr; - vector[N] tau_pr; -} -transformed parameters { - vector[N] rho; - vector[N] lambda; - vector[N] tau; - - for (i in 1:N) { - rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - lambda[i] = Phi_approx(mu_pr[2] + sigma[2] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 30; - } -} -model { - // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - rho_pr ~ normal(0, 1.0); - lambda_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - // loss[i, t]=absolute amount of loss (pre-converted in R) - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(loss[i, t], rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - gamble[i, t] ~ bernoulli(pGamble); - } - } -} -generated quantities { - real mu_rho; - real mu_lambda; - real mu_tau; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_lambda = Phi_approx(mu_pr[2]) * 5; - mu_tau = Phi_approx(mu_pr[3]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(fabs(loss[i, t]), rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGamble); - } - } - } -} - diff --git a/inst/stan_files/rdt_happiness.stan b/inst/stan_files/rdt_happiness.stan deleted file mode 100644 index 3abb9e18..00000000 --- a/inst/stan_files/rdt_happiness.stan +++ /dev/null @@ -1,146 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int type[N, T]; - int gamble[N, T]; - real outcome[N, T]; - real happy[N, T]; - real RT_happy[N, T]; -} -transformed data { -} -parameters { - vector[6] mu_pr; - vector[6] sigma; - vector[N] w0_pr; - vector[N] w1_pr; - vector[N] w2_pr; - vector[N] w3_pr; - vector[N] gam_pr; - vector[N] sig_pr; -} -transformed parameters { - vector[N] w0; - vector[N] w1; - vector[N] w2; - vector[N] w3; - vector[N] gam; - vector[N] sig; - - w0 = mu_pr[1] + sigma[1] * w0_pr; - w1 = mu_pr[2] + sigma[2] * w1_pr; - w2 = mu_pr[3] + sigma[3] * w2_pr; - w3 = mu_pr[4] + sigma[4] * w3_pr; - - for (i in 1:N) { - gam[i] = Phi_approx(mu_pr[5] + sigma[5] * gam_pr[i]); - } - sig = exp(mu_pr[6] + sigma[6] * sig_pr); -} -model { - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - w0_pr ~ normal(0, 1.0); - w1_pr ~ normal(0, 1.0); - w2_pr ~ normal(0, 1.0); - w3_pr ~ normal(0, 1.0); - gam_pr ~ normal(0, 1.0); - sig_pr ~ normal(0, 1.0); - - for (i in 1:N) { - real cert_sum; - real ev_sum; - real rpe_sum; - - - cert_sum = 0; - ev_sum = 0; - rpe_sum = 0; - - for (t in 1:Tsubj[i]) { - if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ - happy[i,t] ~ normal(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); - } - - if(gamble[i,t] == 0){ - cert_sum += type[i,t] * cert[i,t]; - } else { - ev_sum += 0.5 * (gain[i,t] - loss[i,t]); - rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); - } - - cert_sum *= gam[i]; - ev_sum *= gam[i]; - rpe_sum *= gam[i]; - } - } -} -generated quantities { - real mu_w0; - real mu_w1; - real mu_w2; - real mu_w3; - real mu_gam; - real mu_sig; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_w0 = mu_pr[1]; - mu_w1 = mu_pr[2]; - mu_w2 = mu_pr[3]; - mu_w3 = mu_pr[4]; - mu_gam = Phi_approx(mu_pr[5]); - mu_sig = exp(mu_pr[6]); - - - { // local section, this saves time and space - for (i in 1:N) { - real cert_sum; - real ev_sum; - real rpe_sum; - - log_lik[i] = 0; - - cert_sum = 0; - ev_sum = 0; - rpe_sum = 0; - - for (t in 1:Tsubj[i]) { - if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ - log_lik[i] += normal_lpdf(happy[i, t] | w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); - y_pred[i, t] = normal_rng(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); - } - - if(gamble[i,t] == 0){ - cert_sum += type[i,t] * cert[i,t]; - } else { - ev_sum += 0.5 * (gain[i,t] - loss[i,t]); - rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); - } - - cert_sum *= gam[i]; - ev_sum *= gam[i]; - rpe_sum *= gam[i]; - } - } - } -} - diff --git a/inst/stan_files/ts_par4.stan b/inst/stan_files/ts_par4.stan deleted file mode 100644 index c615f6d0..00000000 --- a/inst/stan_files/ts_par4.stan +++ /dev/null @@ -1,204 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int level1_choice[N,T]; // 1: left, 2: right - int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 - int reward[N,T]; - real trans_prob; -} -transformed data { -} -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] a_pr; - vector[N] beta_pr; - vector[N] pi_pr; - vector[N] w_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] a; - vector[N] beta; - vector[N] pi; - vector[N] w; - - for (i in 1:N) { - a[i] = Phi_approx( mu_pr[1] + sigma[1] * a_pr[i] ); - beta[i] = exp( mu_pr[2] + sigma[2] * beta_pr[i] ); - pi[i] = Phi_approx( mu_pr[3] + sigma[3] * pi_pr[i] ) * 5; - w[i] = Phi_approx( mu_pr[4] + sigma[4] * w_pr[i] ); - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - a_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - pi_pr ~ normal(0, 1); - w_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); - } - level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_a; - real mu_beta; - real mu_pi; - real mu_w; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred_step1[N,T]; - real y_pred_step2[N,T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred_step1[i,t] = -1; - y_pred_step2[i,t] = -1; - } - } - - // Generate group level parameter values - mu_a = Phi_approx( mu_pr[1] ); - mu_beta = exp( mu_pr[2] ); - mu_pi = Phi_approx( mu_pr[3] ) * 5; - mu_w = Phi_approx( mu_pr[4] ); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 - // Level 2 --> choose one of two level 2 options - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); - } - log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); - - // generate posterior prediction for current trial - y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); - y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop - } -} - diff --git a/inst/stan_files/ts_par6.stan b/inst/stan_files/ts_par6.stan deleted file mode 100644 index b472afa0..00000000 --- a/inst/stan_files/ts_par6.stan +++ /dev/null @@ -1,213 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int level1_choice[N,T]; // 1: left, 2: right - int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 - int reward[N,T]; - real trans_prob; -} -transformed data { -} -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[6] mu_pr; - vector[6] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] a1_pr; - vector[N] beta1_pr; - vector[N] a2_pr; - vector[N] beta2_pr; - vector[N] pi_pr; - vector[N] w_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] a1; - vector[N] beta1; - vector[N] a2; - vector[N] beta2; - vector[N] pi; - vector[N] w; - - for (i in 1:N) { - a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); - beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); - a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); - beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); - pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; - w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - a1_pr ~ normal(0, 1); - beta1_pr ~ normal(0, 1); - a2_pr ~ normal(0, 1); - beta2_pr ~ normal(0, 1); - pi_pr ~ normal(0, 1); - w_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_a1; - real mu_beta1; - real mu_a2; - real mu_beta2; - real mu_pi; - real mu_w; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred_step1[N,T]; - real y_pred_step2[N,T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred_step1[i,t] = -1; - y_pred_step2[i,t] = -1; - } - } - - // Generate group level parameter values - mu_a1 = Phi_approx( mu_pr[1] ); - mu_beta1 = exp( mu_pr[2] ); - mu_a2 = Phi_approx( mu_pr[3] ); - mu_beta2 = exp( mu_pr[4] ); - mu_pi = Phi_approx( mu_pr[5] ) * 5; - mu_w = Phi_approx( mu_pr[6] ); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 - // Level 2 --> choose one of two level 2 options - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); - - // generate posterior prediction for current trial - y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); - y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop - } -} - diff --git a/inst/stan_files/ts_par7.stan b/inst/stan_files/ts_par7.stan deleted file mode 100644 index 089042c2..00000000 --- a/inst/stan_files/ts_par7.stan +++ /dev/null @@ -1,217 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int level1_choice[N,T]; // 1: left, 2: right - int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 - int reward[N,T]; - real trans_prob; -} -transformed data { -} -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[7] mu_pr; - vector[7] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] a1_pr; - vector[N] beta1_pr; - vector[N] a2_pr; - vector[N] beta2_pr; - vector[N] pi_pr; - vector[N] w_pr; - vector[N] lambda_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] a1; - vector[N] beta1; - vector[N] a2; - vector[N] beta2; - vector[N] pi; - vector[N] w; - vector[N] lambda; - - for (i in 1:N) { - a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); - beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); - a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); - beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); - pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; - w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); - lambda[i] = Phi_approx( mu_pr[7] + sigma[7] * lambda_pr[i] ); - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - a1_pr ~ normal(0, 1); - beta1_pr ~ normal(0, 1); - a2_pr ~ normal(0, 1); - beta2_pr ~ normal(0, 1); - pi_pr ~ normal(0, 1); - w_pr ~ normal(0, 1); - lambda_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_a1; - real mu_beta1; - real mu_a2; - real mu_beta2; - real mu_pi; - real mu_w; - real mu_lambda; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred_step1[N,T]; - real y_pred_step2[N,T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred_step1[i,t] = -1; - y_pred_step2[i,t] = -1; - } - } - - // Generate group level parameter values - mu_a1 = Phi_approx( mu_pr[1] ); - mu_beta1 = exp( mu_pr[2] ); - mu_a2 = Phi_approx( mu_pr[3] ); - mu_beta2 = exp( mu_pr[4] ); - mu_pi = Phi_approx( mu_pr[5] ) * 5; - mu_w = Phi_approx( mu_pr[6] ); - mu_lambda = Phi_approx( mu_pr[7] ); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 - // Level 2 --> choose one of two level 2 options - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); - - // generate posterior prediction for current trial - y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); - y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - } // end of t loop - } // end of i loop - } -} - diff --git a/inst/stan_files/ug_bayes.stan b/inst/stan_files/ug_bayes.stan deleted file mode 100644 index 6136e708..00000000 --- a/inst/stan_files/ug_bayes.stan +++ /dev/null @@ -1,167 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real offer[N, T]; - int accept[N, T]; -} - -transformed data { - real initV; - real mu0; - real k0; - real sig20; - real nu0; - - initV = 0.0; - mu0 = 10.0; // initial expectation - k0 = 4.0; - sig20 = 4.0; - nu0 = 10.0; -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // alpha: envy - vector[N] beta_pr; // beta: guilt - vector[N] tau_pr; // tau: inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - real alpha[N]; - real beta[N]; - real tau[N]; - - for (i in 1:N) { - alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; - tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - alpha_pr ~ normal(0, 1.0); - beta_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - real util; - real mu_old; - real mu_new; - real k_old; - real k_new; - real sig2_old; - real sig2_new; - real nu_old; - real nu_new; - real PE; // not required for computation - - // Initialize values - mu_old = mu0; - k_old = k0; - sig2_old = sig20; - nu_old = nu0; - - for (t in 1:Tsubj[i]) { - k_new = k_old + 1; - nu_new = nu_old + 1; - mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; - sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); - - PE = offer[i, t] - mu_old; - util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); - - accept[i, t] ~ bernoulli_logit(util * tau[i]); - - // replace old ones with new ones - mu_old = mu_new; - sig2_old = sig2_new; - k_old = k_new; - nu_old = nu_new; - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_beta; - real mu_tau; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 20; - mu_beta = Phi_approx(mu_pr[2]) * 10; - mu_tau = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real util; - real mu_old; - real mu_new; - real k_old; - real k_new; - real sig2_old; - real sig2_new; - real nu_old; - real nu_new; - real PE; // not required for computation - - // Initialize values - mu_old = mu0; - k_old = k0; - sig2_old = sig20; - nu_old = nu0; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - k_new = k_old + 1; - nu_new = nu_old + 1; - mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; - sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); - - PE = offer[i, t] - mu_old; - util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); - - log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); - - // replace old ones with new ones - mu_old = mu_new; - sig2_old = sig2_new; - k_old = k_new; - nu_old = nu_new; - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/inst/stan_files/ug_delta.stan b/inst/stan_files/ug_delta.stan deleted file mode 100644 index 9bb70e0a..00000000 --- a/inst/stan_files/ug_delta.stan +++ /dev/null @@ -1,129 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real offer[N, T]; - int accept[N, T]; -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // alpha: Envy (sensitivity to norm prediction error) - vector[N] tau_pr; // tau: Inverse temperature - vector[N] ep_pr; // ep: Norm adaptation rate -} - -transformed parameters { - // Transform subject-level raw parameters - real alpha[N]; - real tau[N]; - real ep[N]; - - for (i in 1:N) { - alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 10; - ep[i] = Phi_approx(mu_pr[3] + sigma[3] * ep_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - alpha_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - real f; // Internal norm - real PE; // Prediction error - real util; // Utility of offer - - // Initialize values - f = 10.0; - - for (t in 1:Tsubj[i]) { - // calculate prediction error - PE = offer[i, t] - f; - - // Update utility - util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); - - // Sampling statement - accept[i, t] ~ bernoulli_logit(util * tau[i]); - - // Update internal norm - f += ep[i] * PE; - - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_tau; - real mu_ep; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 20; - mu_tau = Phi_approx(mu_pr[2]) * 10; - mu_ep = Phi_approx(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real f; // Internal norm - real PE; // prediction error - real util; // Utility of offer - - // Initialize values - f = 10.0; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // calculate prediction error - PE = offer[i, t] - f; - - // Update utility - util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); - - // Calculate log likelihood - log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); - - // Update internal norm - f += ep[i] * PE; - - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/inst/stan_files/wcs_sql.stan b/inst/stan_files/wcs_sql.stan deleted file mode 100644 index 81b8ce17..00000000 --- a/inst/stan_files/wcs_sql.stan +++ /dev/null @@ -1,176 +0,0 @@ -#include /pre/license.stan - -data { - int N; // number of subjects - int T; // max trial - int Tsubj[N]; // number of max trials per subject - - int choice[N, 4, T]; // subject's deck choice within a trial (1, 2, 3 and 4) - int outcome[N, T]; // whether subject's choice is correct or not within a trial (1 and 0) - matrix[1, 3] choice_match_att[N, T]; // indicates which dimension the chosen card matches to within a trial - matrix[3, 4] deck_match_rule[T]; // indicates which dimension(color, form, number) each of the 4 decks matches to within a trial -} - -transformed data { - matrix[1, 3] initAtt; // each subject start with an even attention to each dimension - matrix[1, 3] unit; // used to flip attention after punishing feedback inside the model - - initAtt = rep_matrix(1.0/3.0, 1, 3); - unit = rep_matrix(1.0, 1, 3); -} - -parameters { - // hyper parameters - vector[3] mu_pr; - vector[3] sigma; - - // subject-level raw parameters (for Matt trick) - vector[N] r_pr; // sensitivity to rewarding feedback (reward learning rate) - vector[N] p_pr; // sensitivity to punishing feedback (punishment learning rate) - vector[N] d_pr; // decision consistency (inverse temperature) -} - -transformed parameters { - // transform subject-level raw parameters - vector[N] r; - vector[N] p; - vector[N] d; - - for (i in 1:N) { - r[i] = Phi_approx( mu_pr[1] + sigma[1] * r_pr[i] ); - p[i] = Phi_approx( mu_pr[2] + sigma[2] * p_pr[i] ); - d[i] = Phi_approx( mu_pr[3] + sigma[3] * d_pr[i] ) * 5; - } -} - -model { - // hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - r_pr ~ normal(0, 1); - p_pr ~ normal(0, 1); - d_pr ~ normal(0, 1); - - for (i in 1:N) { - // define values - vector[4] pred_prob_mat; // predicted probability of choosing a deck in each trial based on attention - matrix[1, 3] subj_att; // subject's attention to each dimension - matrix[1, 3] att_signal; // signal where a subject has to pay attention after reward/punishment - real sum_att_signal; // temporary variable to calculate sum(att_signal) - matrix[1, 3] tmpatt; // temporary variable to calculate subj_att - vector[4] tmpp; // temporary variable to calculate pred_prob_mat - - // initiate values - subj_att = initAtt; - pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); - - for (t in 1:Tsubj[i]) { - // multinomial choice - choice[i,,t] ~ multinomial(pred_prob_mat); - - // re-distribute attention after getting a feedback - if (outcome[i,t] == 1) { - att_signal = subj_att .* choice_match_att[i,t]; - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; - } else { - att_signal = subj_att .* (unit - choice_match_att[i,t]); - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; - } - - // scaling to avoid log(0) - subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; - - tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); - tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); - tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); - - // repeat until the final trial - if (t < Tsubj[i]) { - tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; - pred_prob_mat = tmpp/sum(tmpp); - } - - } // end of trial loop - } // end of subject loop -} -generated quantities { - // for group level parameters - real mu_r; - real mu_p; - real mu_d; - - // for log-likelihood calculation - real log_lik[N]; - - // for posterior predictive check - int y_pred[N, 4, T]; - - // initiate the variable to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - for (deck in 1:4) { - y_pred[i,deck,t] = -1; - } - } - } - - mu_r = Phi_approx(mu_pr[1]); - mu_p = Phi_approx(mu_pr[2]); - mu_d = Phi_approx(mu_pr[3]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - matrix[1, 3] subj_att; - matrix[1, 3] att_signal; - vector[4] pred_prob_mat; - - matrix[1, 3] tmpatt; - vector[4] tmpp; - - real sum_att_signal; - - subj_att = initAtt; - pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - - log_lik[i] += multinomial_lpmf(choice[i,,t] | pred_prob_mat); - - y_pred[i,,t] = multinomial_rng(pred_prob_mat, 1); - - if(outcome[i,t] == 1) { - att_signal = subj_att .* choice_match_att[i,t]; - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; - } else { - att_signal = subj_att .* (unit - choice_match_att[i,t]); - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; - } - - subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; - - tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); - tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); - tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); - - if(t < Tsubj[i]) { - tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; - pred_prob_mat = tmpp/sum(tmpp); - } - - } // end of trial loop - } // end of subject loop - } // end of local section -} - diff --git a/man-roxygen/ModelFunctionInfo.schema.json b/man-roxygen/ModelFunctionInfo.schema.json deleted file mode 100644 index e95dd7c2..00000000 --- a/man-roxygen/ModelFunctionInfo.schema.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "title": "Model Function Info", - "type": "object", - "required": ["model_function", "data_columns", "data_list", "parameters", "gen_init"], - "properties": { - "model_function": { - "type": "string" - }, - "data_columns": { - "type": "array", - "items": { - "type": "string" - } - }, - "data_list": { - "type": "array", - "items": { - "type": "string" - } - }, - "parameters": { - "type": "array", - "items": { - "type": "string" - } - }, - "gen_init": { - "type": "array", - "items": { - "type": "array", - "minItems": 3, - "maxItems": 3 - } - }, - "regressors": { - "type": "array", - "items": { - "type": "string" - } - } - } -} diff --git a/man-roxygen/ModelFunctionInfo.schema.md b/man-roxygen/ModelFunctionInfo.schema.md deleted file mode 100644 index 40a6de3f..00000000 --- a/man-roxygen/ModelFunctionInfo.schema.md +++ /dev/null @@ -1,11 +0,0 @@ -### Information currently kept track of by JSON Schema: -Property | Required | Explanation --|-|- -"model_function" | o | Name of the model function. -"data_columns" | o | Necessary data columns for the user data. -"data_list" | o | List of preprocessed user data that gets passed to Stan. -"parameters" | o | Parameters of this model. -"gen_init" | o | Initial value & bounds of the parameters **used in the R file**.
*\* Note that these bounds are just for setting the initial values; these bounds may differ from the boundary constraints given to the parameters in the Stan file.* -"regressors" | x | Regressors of this model. - -#### Written by Jethro Lee. diff --git a/man-roxygen/data/bandit2arm_delta.json b/man-roxygen/data/bandit2arm_delta.json deleted file mode 100644 index 1136c434..00000000 --- a/man-roxygen/data/bandit2arm_delta.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "bandit2arm_delta", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["A", "tau"], - "gen_init": [[0, 0.5, 1], [0, 1, 5]] -} diff --git a/man-roxygen/data/bandit4arm2_kalman_filter.json b/man-roxygen/data/bandit4arm2_kalman_filter.json deleted file mode 100644 index 91203c3b..00000000 --- a/man-roxygen/data/bandit4arm2_kalman_filter.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "bandit4arm2_kalman_filter", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["lambda", "theta", "beta", "mu0", "sigma0", "sigmaD"], - "gen_init": [[0, 0.9, 1], [0, 50, 100], [0, 0.1, 1], [0, 85, 100], [0, 6, 15], [0, 3, 15]] -} diff --git a/man-roxygen/data/bandit4arm_4par.json b/man-roxygen/data/bandit4arm_4par.json deleted file mode 100644 index 75d2fde9..00000000 --- a/man-roxygen/data/bandit4arm_4par.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "bandit4arm_4par", - "data_columns": ["subjID", "choice", "gain", "loss"], - "data_list": ["N", "T", "Tsubj", "rew", "los", "choice"], - "parameters": ["Arew", "Apun", "R", "P"], - "gen_init": [[0, 0.1, 1], [0, 0.1, 1], [0, 1, 30], [0, 1, 30]] -} diff --git a/man-roxygen/data/bandit4arm_lapse.json b/man-roxygen/data/bandit4arm_lapse.json deleted file mode 100644 index 684a506a..00000000 --- a/man-roxygen/data/bandit4arm_lapse.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "bandit4arm_lapse", - "data_columns": ["subjID", "choice", "gain", "loss"], - "data_list": ["N", "T", "Tsubj", "rew", "los", "choice"], - "parameters": ["Arew", "Apun", "R", "P", "xi"], - "gen_init": [[0, 0.1, 1], [0, 0.1, 1], [0, 1, 30], [0, 1, 30], [0, 0.1, 1]] -} diff --git a/man-roxygen/data/bart_par4.json b/man-roxygen/data/bart_par4.json deleted file mode 100644 index f302d90f..00000000 --- a/man-roxygen/data/bart_par4.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "bart_par4", - "data_columns": ["subjID", "pumps", "explosion"], - "data_list": ["N", "T", "Tsubj", "P", "pumps", "explosion"], - "parameters": ["phi", "eta", "gam", "tau"], - "gen_init": [[0, 0.5, 1], [0, 1, "inf"], [0, 1, "inf"], [0, 1, "inf"]] -} diff --git a/man-roxygen/data/choiceRT_ddm.json b/man-roxygen/data/choiceRT_ddm.json deleted file mode 100644 index d68d1560..00000000 --- a/man-roxygen/data/choiceRT_ddm.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "choiceRT_ddm", - "data_columns": ["subjID", "choice", "RT"], - "data_list": ["N", "Nu_max", "Nl_max", "Nu", "Nl", "RTu", "RTl", "minRT", "RTbound"], - "parameters": ["alpha", "beta", "delta", "tau"], - "gen_init": [[0, 0.5, "inf"], [0, 0.5, 1], [0, 0.5, "inf"], [0, 0.15, 1]] -} diff --git a/man-roxygen/data/choiceRT_ddm_single.json b/man-roxygen/data/choiceRT_ddm_single.json deleted file mode 100644 index f3a3f651..00000000 --- a/man-roxygen/data/choiceRT_ddm_single.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "choiceRT_ddm_single", - "data_columns": ["subjID", "choice", "RT"], - "data_list": ["Nu", "Nl", "RTu", "RTl", "minRT", "RTbound"], - "parameters": ["alpha", "beta", "delta", "tau"], - "gen_init": [["None", 0.5, "None"], ["None", 0.5, "None"], ["None", 0.5, "None"], ["None", 0.15, "None"]] -} diff --git a/man-roxygen/data/choiceRT_lba_single.json b/man-roxygen/data/choiceRT_lba_single.json deleted file mode 100644 index 8874e8bb..00000000 --- a/man-roxygen/data/choiceRT_lba_single.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "choiceRT_lba_single", - "data_columns": ["subjID", "choice", "RT", "condition"], - "data_list": ["N_choice", "N_cond", "tr_cond", "max_tr", "RT"], - "parameters": ["d", "A", "v", "tau"], - "gen_init": [["None", 0.25, "None"], ["None", 0.75, "None"], ["None", 2, "None"], ["None", 0.2, "None"]] -} diff --git a/man-roxygen/data/cra_exp.json b/man-roxygen/data/cra_exp.json deleted file mode 100644 index 0290526c..00000000 --- a/man-roxygen/data/cra_exp.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "cra_exp", - "data_columns": ["subjID", "prob", "ambig", "reward_var", "reward_fix", "choice"], - "data_list": ["N", "T", "Tsubj", "choice", "prob", "ambig", "reward_var", "reward_fix"], - "parameters": ["alpha", "beta", "gamma"], - "gen_init": [[0, 1, 2], ["-inf", 0, "inf"], [0, 1, "inf"]], - "regressors": ["sv", "sv_fix", "sv_var", "p_var"] -} diff --git a/man-roxygen/data/cra_linear.json b/man-roxygen/data/cra_linear.json deleted file mode 100644 index 99d53c4f..00000000 --- a/man-roxygen/data/cra_linear.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "cra_linear", - "data_columns": ["subjID", "prob", "ambig", "reward_var", "reward_fix", "choice"], - "data_list": ["N", "T", "Tsubj", "choice", "prob", "ambig", "reward_var", "reward_fix"], - "parameters": ["alpha", "beta", "gamma"], - "gen_init": [[0, 1, 2], ["-inf", 0, "inf"], [0, 1, "inf"]], - "regressors": ["sv", "sv_fix", "sv_var", "p_var"] -} diff --git a/man-roxygen/data/dbdm_prob_weight.json b/man-roxygen/data/dbdm_prob_weight.json deleted file mode 100644 index f46dbce5..00000000 --- a/man-roxygen/data/dbdm_prob_weight.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "dbdm_prob_weight", - "data_columns": ["subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice"], - "data_list": ["N", "T", "Tsubj", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice"], - "parameters": ["tau", "rho", "lambda", "beta"], - "gen_init": [[0, 0.8, 1], [0, 0.7, 2], [0, 2.5, 5], [0, 0.2, 1]] -} diff --git a/man-roxygen/data/dd_cs.json b/man-roxygen/data/dd_cs.json deleted file mode 100644 index 197077dd..00000000 --- a/man-roxygen/data/dd_cs.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "dd_cs", - "data_columns": ["subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "data_list": ["N", "T", "Tsubj", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "parameters": ["r", "s", "beta"], - "gen_init": [[0, 0.1, 1], [0, 1, 10], [0, 1, 5]] -} diff --git a/man-roxygen/data/dd_cs_single.json b/man-roxygen/data/dd_cs_single.json deleted file mode 100644 index 34cdf06f..00000000 --- a/man-roxygen/data/dd_cs_single.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "dd_cs_single", - "data_columns": ["subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "data_list": ["Tsubj", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "parameters": ["r", "s", "beta"], - "gen_init": [["None", 0.1, "None"], ["None", 1, "None"], ["None", 1, "None"]] -} diff --git a/man-roxygen/data/dd_exp.json b/man-roxygen/data/dd_exp.json deleted file mode 100644 index 31edb8b4..00000000 --- a/man-roxygen/data/dd_exp.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "dd_exp", - "data_columns": ["subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "data_list": ["N", "T", "Tsubj", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "parameters": ["r", "beta"], - "gen_init": [[0, 0.1, 1], [0, 1, 5]] -} diff --git a/man-roxygen/data/dd_hyperbolic.json b/man-roxygen/data/dd_hyperbolic.json deleted file mode 100644 index 2f4cff4a..00000000 --- a/man-roxygen/data/dd_hyperbolic.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "dd_hyperbolic", - "data_columns": ["subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "data_list": ["N", "T", "Tsubj", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "parameters": ["k", "beta"], - "gen_init": [[0, 0.1, 1], [0, 1, 5]] -} diff --git a/man-roxygen/data/dd_hyperbolic_single.json b/man-roxygen/data/dd_hyperbolic_single.json deleted file mode 100644 index 05f2fc61..00000000 --- a/man-roxygen/data/dd_hyperbolic_single.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "dd_hyperbolic_single", - "data_columns": ["subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "data_list": ["Tsubj", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"], - "parameters": ["k", "beta"], - "gen_init": [["None", 0.1, "None"], ["None", 1, "None"]] -} diff --git a/man-roxygen/data/gng_m1.json b/man-roxygen/data/gng_m1.json deleted file mode 100644 index e0c699c9..00000000 --- a/man-roxygen/data/gng_m1.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "gng_m1", - "data_columns": ["subjID", "cue", "keyPressed", "outcome"], - "data_list": ["N", "T", "Tsubj", "cue", "pressed", "outcome"], - "parameters": ["xi", "ep", "rho"], - "gen_init": [[0, 0.1, 1], [0, 0.2, 1], [0, "exp(2)", "inf"]], - "regressors": ["Qgo", "Qnogo", "Wgo", "Wnogo"] -} diff --git a/man-roxygen/data/gng_m2.json b/man-roxygen/data/gng_m2.json deleted file mode 100644 index 1d2369ae..00000000 --- a/man-roxygen/data/gng_m2.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "gng_m2", - "data_columns": ["subjID", "cue", "keyPressed", "outcome"], - "data_list": ["N", "T", "Tsubj", "cue", "pressed", "outcome"], - "parameters": ["xi", "ep", "b", "rho"], - "gen_init": [[0, 0.1, 1], [0, 0.2, 1], ["-inf", 0, "inf"], [0, "exp(2)", "inf"]], - "regressors": ["Qgo", "Qnogo", "Wgo", "Wnogo"] -} diff --git a/man-roxygen/data/gng_m3.json b/man-roxygen/data/gng_m3.json deleted file mode 100644 index a24ac8e8..00000000 --- a/man-roxygen/data/gng_m3.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "gng_m3", - "data_columns": ["subjID", "cue", "keyPressed", "outcome"], - "data_list": ["N", "T", "Tsubj", "cue", "pressed", "outcome"], - "parameters": ["xi", "ep", "b", "pi", "rho"], - "gen_init": [[0, 0.1, 1], [0, 0.2, 1], ["-inf", 0, "inf"], ["-inf", 0, "inf"], [0, "exp(2)", "inf"]], - "regressors": ["Qgo", "Qnogo", "Wgo", "Wnogo", "SV"] -} diff --git a/man-roxygen/data/gng_m4.json b/man-roxygen/data/gng_m4.json deleted file mode 100644 index 5432dc59..00000000 --- a/man-roxygen/data/gng_m4.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "gng_m4", - "data_columns": ["subjID", "cue", "keyPressed", "outcome"], - "data_list": ["N", "T", "Tsubj", "cue", "pressed", "outcome"], - "parameters": ["xi", "ep", "b", "pi", "rhoRew", "rhoPun"], - "gen_init": [[0, 0.1, 1], [0, 0.2, 1], ["-inf", 0, "inf"], ["-inf", 0, "inf"], [0, "exp(2)", "inf"], [0, "exp(2)", "inf"]], - "regressors": ["Qgo", "Qnogo", "Wgo", "Wnogo", "SV"] -} diff --git a/man-roxygen/data/igt_orl.json b/man-roxygen/data/igt_orl.json deleted file mode 100644 index 68e66ce2..00000000 --- a/man-roxygen/data/igt_orl.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "igt_orl", - "data_columns": ["subjID", "choice", "gain", "loss"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome", "sign_out"], - "parameters": ["Arew", "Apun", "K", "betaF", "betaP"], - "gen_init": [[0, 0.1, 1], [0, 0.1, 1], [0, 0.1, 5], ["-inf", 0.1, "inf"], ["-inf", 1, "inf"]] -} diff --git a/man-roxygen/data/igt_pvl_decay.json b/man-roxygen/data/igt_pvl_decay.json deleted file mode 100644 index 56327ee1..00000000 --- a/man-roxygen/data/igt_pvl_decay.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "igt_pvl_decay", - "data_columns": ["subjID", "choice", "gain", "loss"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["A", "alpha", "cons", "lambda"], - "gen_init": [[0, 0.5, 1], [0, 0.5, 2], [0, 1, 5], [0, 1, 10]] -} diff --git a/man-roxygen/data/igt_pvl_delta.json b/man-roxygen/data/igt_pvl_delta.json deleted file mode 100644 index cfe67409..00000000 --- a/man-roxygen/data/igt_pvl_delta.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "igt_pvl_delta", - "data_columns": ["subjID", "choice", "gain", "loss"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["A", "alpha", "cons", "lambda"], - "gen_init": [[0, 0.5, 1], [0, 0.5, 2], [0, 1, 5], [0, 1, 10]] -} diff --git a/man-roxygen/data/igt_vpp.json b/man-roxygen/data/igt_vpp.json deleted file mode 100644 index b8ae48be..00000000 --- a/man-roxygen/data/igt_vpp.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "igt_vpp", - "data_columns": ["subjID", "choice", "gain", "loss"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["A", "alpha", "cons", "lambda", "epP", "epN", "K", "w"], - "gen_init": [[0, 0.5, 1], [0, 0.5, 2], [0, 1, 5], [0, 1, 10], ["-inf", 0, "inf"], ["-inf", 0, "inf"], [0, 0.5, 1], [0, 0.5, 1]] -} diff --git a/man-roxygen/data/peer_ocu.json b/man-roxygen/data/peer_ocu.json deleted file mode 100644 index ef0fb04f..00000000 --- a/man-roxygen/data/peer_ocu.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "peer_ocu", - "data_columns": ["subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice"], - "data_list": ["N", "T", "Tsubj", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice"], - "parameters": ["rho", "tau", "ocu"], - "gen_init": [[0, 1, 2], [0, 1, "inf"], ["-inf", 0, "inf"]] -} diff --git a/man-roxygen/data/prl_ewa.json b/man-roxygen/data/prl_ewa.json deleted file mode 100644 index 321479e6..00000000 --- a/man-roxygen/data/prl_ewa.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_ewa", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["phi", "rho", "beta"], - "gen_init": [[0, 0.5, 1], [0, 0.1, 1], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "ew_c", "ew_nc"] -} diff --git a/man-roxygen/data/prl_fictitious.json b/man-roxygen/data/prl_fictitious.json deleted file mode 100644 index ce3307d6..00000000 --- a/man-roxygen/data/prl_fictitious.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_fictitious", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["eta", "alpha", "beta"], - "gen_init": [[0, 0.5, 1], ["-inf", 0, "inf"], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe_c", "pe_nc", "dv"] -} diff --git a/man-roxygen/data/prl_fictitious_multipleB.json b/man-roxygen/data/prl_fictitious_multipleB.json deleted file mode 100644 index fef7baa0..00000000 --- a/man-roxygen/data/prl_fictitious_multipleB.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_fictitious_multipleB", - "data_columns": ["subjID", "block", "choice", "outcome"], - "data_list": ["N", "B", "Bsubj", "T", "Tsubj", "choice", "outcome"], - "parameters": ["eta", "alpha", "beta"], - "gen_init": [[0, 0.5, 1], ["-inf", 0, "inf"], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe_c", "pe_nc", "dv"] -} diff --git a/man-roxygen/data/prl_fictitious_rp.json b/man-roxygen/data/prl_fictitious_rp.json deleted file mode 100644 index 39050f7f..00000000 --- a/man-roxygen/data/prl_fictitious_rp.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_fictitious_rp", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["eta_pos", "eta_neg", "alpha", "beta"], - "gen_init": [[0, 0.5, 1], [0, 0.5, 1], ["-inf", 0, "inf"], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe_c", "pe_nc", "dv"] -} diff --git a/man-roxygen/data/prl_fictitious_rp_woa.json b/man-roxygen/data/prl_fictitious_rp_woa.json deleted file mode 100644 index 3781565d..00000000 --- a/man-roxygen/data/prl_fictitious_rp_woa.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_fictitious_rp_woa", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["eta_pos", "eta_neg", "beta"], - "gen_init": [[0, 0.5, 1], [0, 0.5, 1], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe_c", "pe_nc", "dv"] -} diff --git a/man-roxygen/data/prl_fictitious_woa.json b/man-roxygen/data/prl_fictitious_woa.json deleted file mode 100644 index dac178bd..00000000 --- a/man-roxygen/data/prl_fictitious_woa.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_fictitious_woa", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["eta", "beta"], - "gen_init": [[0, 0.5, 1], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe_c", "pe_nc", "dv"] -} diff --git a/man-roxygen/data/prl_rp.json b/man-roxygen/data/prl_rp.json deleted file mode 100644 index e570964f..00000000 --- a/man-roxygen/data/prl_rp.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_rp", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome"], - "parameters": ["Apun", "Arew", "beta"], - "gen_init": [[0, 0.1, 1], [0, 0.1, 1], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe"] -} diff --git a/man-roxygen/data/prl_rp_multipleB.json b/man-roxygen/data/prl_rp_multipleB.json deleted file mode 100644 index 53106df4..00000000 --- a/man-roxygen/data/prl_rp_multipleB.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "model_function": "prl_rp_multipleB", - "data_columns": ["subjID", "block", "choice", "outcome"], - "data_list": ["N", "B", "Bsubj", "T", "Tsubj", "choice", "outcome"], - "parameters": ["Apun", "Arew", "beta"], - "gen_init": [[0, 0.1, 1], [0, 0.1, 1], [0, 1, 10]], - "regressors": ["ev_c", "ev_nc", "pe"] -} diff --git a/man-roxygen/data/pst_gainloss_Q.json b/man-roxygen/data/pst_gainloss_Q.json deleted file mode 100644 index b7c25374..00000000 --- a/man-roxygen/data/pst_gainloss_Q.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "pst_gainloss_Q", - "data_columns": ["subjID", "type", "choice", "reward"], - "data_list": ["N", "T", "Tsubj", "option1", "option2", "choice", "reward"], - "parameters": ["alpha_pos", "alpha_neg", "beta"], - "gen_init": [[0, 0.5, 1], [0, 0.5, 1], [0, 1, 10]] -} diff --git a/man-roxygen/data/ra_noLA.json b/man-roxygen/data/ra_noLA.json deleted file mode 100644 index 6a5cd001..00000000 --- a/man-roxygen/data/ra_noLA.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ra_noLA", - "data_columns": ["subjID", "gain", "loss", "cert", "gamble"], - "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "gamble"], - "parameters": ["rho", "tau"], - "gen_init": [[0, 1, 2], [0, 1, 30]] -} diff --git a/man-roxygen/data/ra_noRA.json b/man-roxygen/data/ra_noRA.json deleted file mode 100644 index ff53fa09..00000000 --- a/man-roxygen/data/ra_noRA.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ra_noRA", - "data_columns": ["subjID", "gain", "loss", "cert", "gamble"], - "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "gamble"], - "parameters": ["lambda", "tau"], - "gen_init": [[0, 1, 5], [0, 1, 30]] -} diff --git a/man-roxygen/data/ra_prospect.json b/man-roxygen/data/ra_prospect.json deleted file mode 100644 index 36cbc86f..00000000 --- a/man-roxygen/data/ra_prospect.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ra_prospect", - "data_columns": ["subjID", "gain", "loss", "cert", "gamble"], - "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "gamble"], - "parameters": ["rho", "lambda", "tau"], - "gen_init": [[0, 1, 2], [0, 1, 5], [0, 1, 30]] -} diff --git a/man-roxygen/data/rdt_happiness.json b/man-roxygen/data/rdt_happiness.json deleted file mode 100644 index bd0c5bbb..00000000 --- a/man-roxygen/data/rdt_happiness.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "rdt_happiness", - "data_columns": ["subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy"], - "data_list": ["N", "T", "Tsubj", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy"], - "parameters": ["w0", "w1", "w2", "w3", "gam", "sig"], - "gen_init": [["-inf", 1, "inf"], ["-inf", 1, "inf"], ["-inf", 1, "inf"], ["-inf", 1, "inf"], [0, 0.5, 1], [0, 1, "inf"]] -} diff --git a/man-roxygen/data/ts_par4.json b/man-roxygen/data/ts_par4.json deleted file mode 100644 index ea0fee8c..00000000 --- a/man-roxygen/data/ts_par4.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ts_par4", - "data_columns": ["subjID", "level1_choice", "level2_choice", "reward"], - "data_list": ["N", "T", "Tsubj", "level1_choice", "level2_choice", "reward", "trans_prob"], - "parameters": ["a", "beta", "pi", "w"], - "gen_init": [[0, 0.5, 1], [0, 1, "inf"], [0, 1, 5], [0, 0.5, 1]] -} diff --git a/man-roxygen/data/ts_par6.json b/man-roxygen/data/ts_par6.json deleted file mode 100644 index c209eb73..00000000 --- a/man-roxygen/data/ts_par6.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ts_par6", - "data_columns": ["subjID", "level1_choice", "level2_choice", "reward"], - "data_list": ["N", "T", "Tsubj", "level1_choice", "level2_choice", "reward", "trans_prob"], - "parameters": ["a1", "beta1", "a2", "beta2", "pi", "w"], - "gen_init": [[0, 0.5, 1], [0, 1, "inf"], [0, 0.5, 1], [0, 1, "inf"], [0, 1, 5], [0, 0.5, 1]] -} diff --git a/man-roxygen/data/ts_par7.json b/man-roxygen/data/ts_par7.json deleted file mode 100644 index 13e51eb5..00000000 --- a/man-roxygen/data/ts_par7.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ts_par7", - "data_columns": ["subjID", "level1_choice", "level2_choice", "reward"], - "data_list": ["N", "T", "Tsubj", "level1_choice", "level2_choice", "reward", "trans_prob"], - "parameters": ["a1", "beta1", "a2", "beta2", "pi", "w", "lambda"], - "gen_init": [[0, 0.5, 1], [0, 1, "inf"], [0, 0.5, 1], [0, 1, "inf"], [0, 1, 5], [0, 0.5, 1], [0, 0.5, 1]] -} diff --git a/man-roxygen/data/ug_bayes.json b/man-roxygen/data/ug_bayes.json deleted file mode 100644 index 4c2d8a97..00000000 --- a/man-roxygen/data/ug_bayes.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ug_bayes", - "data_columns": ["subjID", "offer", "accept"], - "data_list": ["N", "T", "Tsubj", "offer", "accept"], - "parameters": ["alpha", "beta", "tau"], - "gen_init": [[0, 1, 20], [0, 0.5, 10], [0, 1, 10]] -} diff --git a/man-roxygen/data/ug_delta.json b/man-roxygen/data/ug_delta.json deleted file mode 100644 index a25d14b4..00000000 --- a/man-roxygen/data/ug_delta.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "ug_delta", - "data_columns": ["subjID", "offer", "accept"], - "data_list": ["N", "T", "Tsubj", "offer", "accept"], - "parameters": ["alpha", "tau", "ep"], - "gen_init": [[0, 1, 20], [0, 1, 10], [0, 0.5, 1]] -} diff --git a/man-roxygen/data/wcs_sql.json b/man-roxygen/data/wcs_sql.json deleted file mode 100644 index ed839ebd..00000000 --- a/man-roxygen/data/wcs_sql.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "model_function": "wcs_sql", - "data_columns": ["subjID", "choice", "outcome"], - "data_list": ["N", "T", "Tsubj", "choice", "outcome", "choice_match_att", "deck_match_rule"], - "parameters": ["r", "p", "d"], - "gen_init": [[0, 0.1, 1], [0, 0.1, 1], [0, 1, 5]] -} From 386a1b1ffa63e2740d07d11fc87cbf615eae29c8 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 16 Apr 2019 17:54:34 +0900 Subject: [PATCH 006/163] Edit to use correct path for submodule --- R/choiceRT_lba.R | 4 ++-- R/choiceRT_lba_single.R | 4 ++-- R/hBayesDM_model.R | 4 ++-- R/ra_noLA.R | 4 ++-- R/ra_noRA.R | 4 ++-- R/ra_prospect.R | 4 ++-- R/stanmodels.R | 2 +- R/wcs_sql.R | 2 +- src/Makevars | 2 +- src/Makevars.win | 2 +- tools/make_cc.R | 4 ++-- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/R/choiceRT_lba.R b/R/choiceRT_lba.R index 8e69d54e..4e7be149 100644 --- a/R/choiceRT_lba.R +++ b/R/choiceRT_lba.R @@ -143,7 +143,7 @@ choiceRT_lba <- function(data = "choose", # For using example data if (data == "example") { - data <- system.file("extdata", "choiceRT_exampleData.txt", package = "hBayesDM") + data <- system.file("common", "extdata", "choiceRT_exampleData.txt", package = "hBayesDM") } else if (data == "choose") { data <- file.choose() } @@ -306,7 +306,7 @@ choiceRT_lba <- function(data = "choose", if (FLAG_BUILD_ALL) { m = stanmodels$choiceRT_lba } else { - model_path <- system.file("stan_files", paste0(modelName, ".stan"), + model_path <- system.file("common", "stan_files", paste0(modelName, ".stan"), package="hBayesDM") m <- rstan::stan_model(model_path) } diff --git a/R/choiceRT_lba_single.R b/R/choiceRT_lba_single.R index 1800fd8f..c3573832 100644 --- a/R/choiceRT_lba_single.R +++ b/R/choiceRT_lba_single.R @@ -143,7 +143,7 @@ choiceRT_lba_single <- function(data = "choose", # For using example data if (data == "example") { - data <- system.file("extdata", "choiceRT_single_exampleData.txt", package = "hBayesDM") + data <- system.file("common", "extdata", "choiceRT_single_exampleData.txt", package = "hBayesDM") } else if (data == "choose") { data <- file.choose() } @@ -283,7 +283,7 @@ choiceRT_lba_single <- function(data = "choose", if (FLAG_BUILD_ALL) { m = stanmodels$choiceRT_lba_single } else { - model_path <- system.file("stan_files", paste0(modelName, ".stan"), + model_path <- system.file("common", "stan_files", paste0(modelName, ".stan"), package="hBayesDM") m <- rstan::stan_model(model_path) } diff --git a/R/hBayesDM_model.R b/R/hBayesDM_model.R index 8b05952a..74fd78aa 100644 --- a/R/hBayesDM_model.R +++ b/R/hBayesDM_model.R @@ -149,7 +149,7 @@ hBayesDM_model <- function(task_name, } else { exampleData <- paste0(task_name, "_", model_type, "_", "exampleData.txt") } - data <- system.file("extdata", exampleData, package = "hBayesDM") + data <- system.file("common", "extdata", exampleData, package = "hBayesDM") } else if (data == "choose") { data <- file.choose() } @@ -387,7 +387,7 @@ hBayesDM_model <- function(task_name, if (FLAG_BUILD_ALL) { stanmodel_arg <- stanmodels[[model]] } else { - model_path <- system.file("stan_files", paste0(model, ".stan"), + model_path <- system.file("common", "stan_files", paste0(model, ".stan"), package="hBayesDM") stanmodel_arg <- rstan::stan_model(model_path) } diff --git a/R/ra_noLA.R b/R/ra_noLA.R index 8fa5de1d..86ecee19 100644 --- a/R/ra_noLA.R +++ b/R/ra_noLA.R @@ -26,8 +26,8 @@ #' #' \dontrun{ #' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' path_to_attend_data <- system.file("common", "extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("common", "extdata", "ra_data_reappraisal.txt", package = "hBayesDM") #' } ra_noLA <- hBayesDM_model( diff --git a/R/ra_noRA.R b/R/ra_noRA.R index 0b4b7995..db5cce13 100644 --- a/R/ra_noRA.R +++ b/R/ra_noRA.R @@ -26,8 +26,8 @@ #' #' \dontrun{ #' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' path_to_attend_data <- system.file("common", "extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("common", "extdata", "ra_data_reappraisal.txt", package = "hBayesDM") #' } ra_noRA <- hBayesDM_model( diff --git a/R/ra_prospect.R b/R/ra_prospect.R index 50175d98..036b80a0 100644 --- a/R/ra_prospect.R +++ b/R/ra_prospect.R @@ -27,8 +27,8 @@ #' #' \dontrun{ #' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' path_to_attend_data <- system.file("common", "extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("common", "extdata", "ra_data_reappraisal.txt", package = "hBayesDM") #' } ra_prospect <- hBayesDM_model( diff --git a/R/stanmodels.R b/R/stanmodels.R index 3201c11f..ffe24c93 100644 --- a/R/stanmodels.R +++ b/R/stanmodels.R @@ -20,7 +20,7 @@ MODELS_HOME <- "inst" if (!file.exists(MODELS_HOME)) MODELS_HOME <- sub("R$", "src", getwd()) -stan_files <- dir(file.path(MODELS_HOME, "stan_files"), +stan_files <- dir(file.path(MODELS_HOME, "common", "stan_files"), pattern = "stan$", full.names = TRUE) stanmodels <- lapply(stan_files, function(f) { model_cppname <- sub("\\.stan$", "", basename(f)) diff --git a/R/wcs_sql.R b/R/wcs_sql.R index 04ad768e..8b8cbcfc 100644 --- a/R/wcs_sql.R +++ b/R/wcs_sql.R @@ -40,7 +40,7 @@ wcs_sql <- hBayesDM_model( t_max <- 128 # Read predefined answer sheet - answersheet <- system.file("extdata", "wcs_answersheet.txt", package = "hBayesDM") + answersheet <- system.file("common", "extdata", "wcs_answersheet.txt", package = "hBayesDM") answer <- read.table(answersheet, header = TRUE) # Initialize data arrays diff --git a/src/Makevars b/src/Makevars index 720131ef..009c7083 100644 --- a/src/Makevars +++ b/src/Makevars @@ -2,7 +2,7 @@ STANHEADERS_SRC = `"$(R_HOME)/bin$(R_ARCH_BIN)/Rscript" -e "cat(system.file('inc PKG_CPPFLAGS = -I"../inst/include" -I"$(STANHEADERS_SRC)" -DBOOST_DISABLE_ASSERTS -DEIGEN_NO_DEBUG -DBOOST_MATH_OVERFLOW_ERROR_POLICY=errno_on_error -DBOOST_NO_AUTO_PTR CXX_STD = CXX14 -SOURCE_PATH = ../inst/stan_files +SOURCE_PATH = ../inst/common/stan_files ifeq ($(BUILD_ALL), true) SOURCES = $(wildcard $(SOURCE_PATH)/*.stan) endif diff --git a/src/Makevars.win b/src/Makevars.win index c9a5fc0b..fe8f7953 100644 --- a/src/Makevars.win +++ b/src/Makevars.win @@ -2,7 +2,7 @@ STANHEADERS_SRC = `"$(R_HOME)/bin$(R_ARCH_BIN)/Rscript" -e "cat(system.file('inc PKG_CPPFLAGS = -I"../inst/include" -I"$(STANHEADERS_SRC)" -DBOOST_DISABLE_ASSERTS -DEIGEN_NO_DEBUG -DBOOST_MATH_OVERFLOW_ERROR_POLICY=errno_on_error -DBOOST_NO_AUTO_PTR CXX_STD = CXX14 -SOURCE_PATH = ../inst/stan_files +SOURCE_PATH = ../inst/common/stan_files ifeq ($(BUILD_ALL), true) SOURCES = $(wildcard $(SOURCE_PATH)/*.stan) endif diff --git a/tools/make_cc.R b/tools/make_cc.R index d4817e7a..a73882b4 100644 --- a/tools/make_cc.R +++ b/tools/make_cc.R @@ -24,7 +24,7 @@ make_cc <- function(file) { cppcode <- sub("(class[[:space:]]+[A-Za-z_][A-Za-z0-9_]*[[:space:]]*: public prob_grad \\{)", paste("#include \n", "\\1"), cppcode) - cat(readLines(file.path("..", "inst", "stan_files", "pre", "license.stan")), + cat(readLines(file.path("..", "inst", "common", "stan_files", "pre", "license.stan")), "#ifndef MODELS_HPP", "#define MODELS_HPP", "#define STAN__SERVICES__COMMAND_HPP", "#include ", cppcode, "#endif", file = sub("\\.stan$", ".hpp", file), @@ -39,7 +39,7 @@ make_cc <- function(file) { "grad_log_prob", "log_prob", "unconstrain_pars", "constrain_pars", "num_pars_unconstrained", "unconstrained_param_names", "constrained_param_names"), - file = file.path("..", "inst", "stan_files", paste0(f, ".cc")), + file = file.path("..", "inst", "common", "stan_files", paste0(f, ".cc")), header = paste0('#include "', f, '.hpp"'), module = paste0("stan_fit4", f, "_mod"), CppClass = "rstan::stan_fit ", From 151660d76747e1d2faac5b6847d768210fb28c25 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 30 Apr 2019 17:34:55 +0900 Subject: [PATCH 007/163] Revert "Edit to use correct path for submodule" This reverts commit 386a1b1ffa63e2740d07d11fc87cbf615eae29c8. --- R/choiceRT_lba.R | 4 ++-- R/choiceRT_lba_single.R | 4 ++-- R/hBayesDM_model.R | 4 ++-- R/ra_noLA.R | 4 ++-- R/ra_noRA.R | 4 ++-- R/ra_prospect.R | 4 ++-- R/stanmodels.R | 2 +- R/wcs_sql.R | 2 +- src/Makevars | 2 +- src/Makevars.win | 2 +- tools/make_cc.R | 4 ++-- 11 files changed, 18 insertions(+), 18 deletions(-) diff --git a/R/choiceRT_lba.R b/R/choiceRT_lba.R index 4e7be149..8e69d54e 100644 --- a/R/choiceRT_lba.R +++ b/R/choiceRT_lba.R @@ -143,7 +143,7 @@ choiceRT_lba <- function(data = "choose", # For using example data if (data == "example") { - data <- system.file("common", "extdata", "choiceRT_exampleData.txt", package = "hBayesDM") + data <- system.file("extdata", "choiceRT_exampleData.txt", package = "hBayesDM") } else if (data == "choose") { data <- file.choose() } @@ -306,7 +306,7 @@ choiceRT_lba <- function(data = "choose", if (FLAG_BUILD_ALL) { m = stanmodels$choiceRT_lba } else { - model_path <- system.file("common", "stan_files", paste0(modelName, ".stan"), + model_path <- system.file("stan_files", paste0(modelName, ".stan"), package="hBayesDM") m <- rstan::stan_model(model_path) } diff --git a/R/choiceRT_lba_single.R b/R/choiceRT_lba_single.R index c3573832..1800fd8f 100644 --- a/R/choiceRT_lba_single.R +++ b/R/choiceRT_lba_single.R @@ -143,7 +143,7 @@ choiceRT_lba_single <- function(data = "choose", # For using example data if (data == "example") { - data <- system.file("common", "extdata", "choiceRT_single_exampleData.txt", package = "hBayesDM") + data <- system.file("extdata", "choiceRT_single_exampleData.txt", package = "hBayesDM") } else if (data == "choose") { data <- file.choose() } @@ -283,7 +283,7 @@ choiceRT_lba_single <- function(data = "choose", if (FLAG_BUILD_ALL) { m = stanmodels$choiceRT_lba_single } else { - model_path <- system.file("common", "stan_files", paste0(modelName, ".stan"), + model_path <- system.file("stan_files", paste0(modelName, ".stan"), package="hBayesDM") m <- rstan::stan_model(model_path) } diff --git a/R/hBayesDM_model.R b/R/hBayesDM_model.R index 74fd78aa..8b05952a 100644 --- a/R/hBayesDM_model.R +++ b/R/hBayesDM_model.R @@ -149,7 +149,7 @@ hBayesDM_model <- function(task_name, } else { exampleData <- paste0(task_name, "_", model_type, "_", "exampleData.txt") } - data <- system.file("common", "extdata", exampleData, package = "hBayesDM") + data <- system.file("extdata", exampleData, package = "hBayesDM") } else if (data == "choose") { data <- file.choose() } @@ -387,7 +387,7 @@ hBayesDM_model <- function(task_name, if (FLAG_BUILD_ALL) { stanmodel_arg <- stanmodels[[model]] } else { - model_path <- system.file("common", "stan_files", paste0(model, ".stan"), + model_path <- system.file("stan_files", paste0(model, ".stan"), package="hBayesDM") stanmodel_arg <- rstan::stan_model(model_path) } diff --git a/R/ra_noLA.R b/R/ra_noLA.R index 86ecee19..8fa5de1d 100644 --- a/R/ra_noLA.R +++ b/R/ra_noLA.R @@ -26,8 +26,8 @@ #' #' \dontrun{ #' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("common", "extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("common", "extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") #' } ra_noLA <- hBayesDM_model( diff --git a/R/ra_noRA.R b/R/ra_noRA.R index db5cce13..0b4b7995 100644 --- a/R/ra_noRA.R +++ b/R/ra_noRA.R @@ -26,8 +26,8 @@ #' #' \dontrun{ #' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("common", "extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("common", "extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") #' } ra_noRA <- hBayesDM_model( diff --git a/R/ra_prospect.R b/R/ra_prospect.R index 036b80a0..50175d98 100644 --- a/R/ra_prospect.R +++ b/R/ra_prospect.R @@ -27,8 +27,8 @@ #' #' \dontrun{ #' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("common", "extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("common", "extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") #' } ra_prospect <- hBayesDM_model( diff --git a/R/stanmodels.R b/R/stanmodels.R index ffe24c93..3201c11f 100644 --- a/R/stanmodels.R +++ b/R/stanmodels.R @@ -20,7 +20,7 @@ MODELS_HOME <- "inst" if (!file.exists(MODELS_HOME)) MODELS_HOME <- sub("R$", "src", getwd()) -stan_files <- dir(file.path(MODELS_HOME, "common", "stan_files"), +stan_files <- dir(file.path(MODELS_HOME, "stan_files"), pattern = "stan$", full.names = TRUE) stanmodels <- lapply(stan_files, function(f) { model_cppname <- sub("\\.stan$", "", basename(f)) diff --git a/R/wcs_sql.R b/R/wcs_sql.R index 8b8cbcfc..04ad768e 100644 --- a/R/wcs_sql.R +++ b/R/wcs_sql.R @@ -40,7 +40,7 @@ wcs_sql <- hBayesDM_model( t_max <- 128 # Read predefined answer sheet - answersheet <- system.file("common", "extdata", "wcs_answersheet.txt", package = "hBayesDM") + answersheet <- system.file("extdata", "wcs_answersheet.txt", package = "hBayesDM") answer <- read.table(answersheet, header = TRUE) # Initialize data arrays diff --git a/src/Makevars b/src/Makevars index 009c7083..720131ef 100644 --- a/src/Makevars +++ b/src/Makevars @@ -2,7 +2,7 @@ STANHEADERS_SRC = `"$(R_HOME)/bin$(R_ARCH_BIN)/Rscript" -e "cat(system.file('inc PKG_CPPFLAGS = -I"../inst/include" -I"$(STANHEADERS_SRC)" -DBOOST_DISABLE_ASSERTS -DEIGEN_NO_DEBUG -DBOOST_MATH_OVERFLOW_ERROR_POLICY=errno_on_error -DBOOST_NO_AUTO_PTR CXX_STD = CXX14 -SOURCE_PATH = ../inst/common/stan_files +SOURCE_PATH = ../inst/stan_files ifeq ($(BUILD_ALL), true) SOURCES = $(wildcard $(SOURCE_PATH)/*.stan) endif diff --git a/src/Makevars.win b/src/Makevars.win index fe8f7953..c9a5fc0b 100644 --- a/src/Makevars.win +++ b/src/Makevars.win @@ -2,7 +2,7 @@ STANHEADERS_SRC = `"$(R_HOME)/bin$(R_ARCH_BIN)/Rscript" -e "cat(system.file('inc PKG_CPPFLAGS = -I"../inst/include" -I"$(STANHEADERS_SRC)" -DBOOST_DISABLE_ASSERTS -DEIGEN_NO_DEBUG -DBOOST_MATH_OVERFLOW_ERROR_POLICY=errno_on_error -DBOOST_NO_AUTO_PTR CXX_STD = CXX14 -SOURCE_PATH = ../inst/common/stan_files +SOURCE_PATH = ../inst/stan_files ifeq ($(BUILD_ALL), true) SOURCES = $(wildcard $(SOURCE_PATH)/*.stan) endif diff --git a/tools/make_cc.R b/tools/make_cc.R index a73882b4..d4817e7a 100644 --- a/tools/make_cc.R +++ b/tools/make_cc.R @@ -24,7 +24,7 @@ make_cc <- function(file) { cppcode <- sub("(class[[:space:]]+[A-Za-z_][A-Za-z0-9_]*[[:space:]]*: public prob_grad \\{)", paste("#include \n", "\\1"), cppcode) - cat(readLines(file.path("..", "inst", "common", "stan_files", "pre", "license.stan")), + cat(readLines(file.path("..", "inst", "stan_files", "pre", "license.stan")), "#ifndef MODELS_HPP", "#define MODELS_HPP", "#define STAN__SERVICES__COMMAND_HPP", "#include ", cppcode, "#endif", file = sub("\\.stan$", ".hpp", file), @@ -39,7 +39,7 @@ make_cc <- function(file) { "grad_log_prob", "log_prob", "unconstrain_pars", "constrain_pars", "num_pars_unconstrained", "unconstrained_param_names", "constrained_param_names"), - file = file.path("..", "inst", "common", "stan_files", paste0(f, ".cc")), + file = file.path("..", "inst", "stan_files", paste0(f, ".cc")), header = paste0('#include "', f, '.hpp"'), module = paste0("stan_fit4", f, "_mod"), CppClass = "rstan::stan_fit ", From 480bae7c0329aeba4d54da78583dd694892bc551 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 30 Apr 2019 18:14:58 +0900 Subject: [PATCH 008/163] Move R-related into R/ --- .gitmodules | 3 - .Rbuildignore => R/.Rbuildignore | 0 DESCRIPTION => R/DESCRIPTION | 0 NAMESPACE => R/NAMESPACE | 0 R/{ => R}/HDIofMCMC.R | 0 R/{ => R}/bandit2arm_delta.R | 0 R/{ => R}/bandit4arm2_kalman_filter.R | 0 R/{ => R}/bandit4arm_2par_lapse.R | 0 R/{ => R}/bandit4arm_4par.R | 0 R/{ => R}/bandit4arm_lapse.R | 0 R/{ => R}/bandit4arm_lapse_decay.R | 0 R/{ => R}/bandit4arm_singleA_lapse.R | 0 R/{ => R}/bart_par4.R | 0 R/{ => R}/choiceRT_ddm.R | 0 R/{ => R}/choiceRT_ddm_single.R | 0 R/{ => R}/choiceRT_lba.R | 0 R/{ => R}/choiceRT_lba_single.R | 0 R/{ => R}/cra_exp.R | 0 R/{ => R}/cra_linear.R | 0 R/{ => R}/dbdm_prob_weight.R | 0 R/{ => R}/dd_cs.R | 0 R/{ => R}/dd_cs_single.R | 0 R/{ => R}/dd_exp.R | 0 R/{ => R}/dd_hyperbolic.R | 0 R/{ => R}/dd_hyperbolic_single.R | 0 R/{ => R}/estimate_mode.R | 0 R/{ => R}/extract_ic.R | 0 R/{ => R}/gng_m1.R | 0 R/{ => R}/gng_m2.R | 0 R/{ => R}/gng_m3.R | 0 R/{ => R}/gng_m4.R | 0 R/{ => R}/hBayesDM.R | 0 R/{ => R}/hBayesDM_model.R | 0 R/{ => R}/igt_orl.R | 0 R/{ => R}/igt_pvl_decay.R | 0 R/{ => R}/igt_pvl_delta.R | 0 R/{ => R}/igt_vpp.R | 0 R/{ => R}/multiplot.R | 0 R/{ => R}/peer_ocu.R | 0 R/{ => R}/plot.hBayesDM.R | 0 R/{ => R}/plotDist.R | 0 R/{ => R}/plotHDI.R | 0 R/{ => R}/plotInd.R | 0 R/{ => R}/printFit.R | 0 R/{ => R}/prl_ewa.R | 0 R/{ => R}/prl_fictitious.R | 0 R/{ => R}/prl_fictitious_multipleB.R | 0 R/{ => R}/prl_fictitious_rp.R | 0 R/{ => R}/prl_fictitious_rp_woa.R | 0 R/{ => R}/prl_fictitious_woa.R | 0 R/{ => R}/prl_rp.R | 0 R/{ => R}/prl_rp_multipleB.R | 0 R/{ => R}/pst_gainloss_Q.R | 0 R/{ => R}/ra_noLA.R | 0 R/{ => R}/ra_noRA.R | 0 R/{ => R}/ra_prospect.R | 0 R/{ => R}/rdt_happiness.R | 0 R/{ => R}/rhat.R | 0 R/{ => R}/settings.R | 0 R/{ => R}/stanmodels.R | 0 R/{ => R}/ts_par4.R | 0 R/{ => R}/ts_par6.R | 0 R/{ => R}/ts_par7.R | 0 R/{ => R}/ug_bayes.R | 0 R/{ => R}/ug_delta.R | 0 R/{ => R}/wcs_sql.R | 0 R/{ => R}/zzz.R | 0 _pkgdown.yml => R/_pkgdown.yml | 0 cran-comments.md => R/cran-comments.md | 0 {docs => R/docs}/LICENSE-text.html | 0 {docs => R/docs}/authors.html | 0 {docs => R/docs}/docsearch.css | 0 {docs => R/docs}/docsearch.js | 0 {docs => R/docs}/index.html | 0 {docs => R/docs}/link.svg | 0 {docs => R/docs}/news/index.html | 0 {docs => R/docs}/pkgdown.css | 0 {docs => R/docs}/pkgdown.js | 0 {docs => R/docs}/pkgdown.yml | 0 {docs => R/docs}/reference/HDIofMCMC.html | 0 .../docs}/reference/bandit2arm_delta.html | 0 .../reference/bandit4arm2_kalman_filter.html | 0 .../reference/bandit4arm_2par_lapse.html | 0 .../docs}/reference/bandit4arm_4par.html | 0 .../docs}/reference/bandit4arm_lapse.html | 0 .../reference/bandit4arm_lapse_decay.html | 0 .../reference/bandit4arm_singleA_lapse.html | 0 {docs => R/docs}/reference/bart_par4.html | 0 {docs => R/docs}/reference/choiceRT_ddm.html | 0 .../docs}/reference/choiceRT_ddm_single.html | 0 {docs => R/docs}/reference/choiceRT_lba.html | 0 .../docs}/reference/choiceRT_lba_single.html | 0 {docs => R/docs}/reference/cra_exp.html | 0 {docs => R/docs}/reference/cra_linear.html | 0 .../docs}/reference/dbdm_prob_weight.html | 0 {docs => R/docs}/reference/dd_cs.html | 0 {docs => R/docs}/reference/dd_cs_single.html | 0 {docs => R/docs}/reference/dd_exp.html | 0 {docs => R/docs}/reference/dd_hyperbolic.html | 0 .../docs}/reference/dd_hyperbolic_single.html | 0 {docs => R/docs}/reference/estimate_mode.html | 0 {docs => R/docs}/reference/extract_ic.html | 0 {docs => R/docs}/reference/gng_m1.html | 0 {docs => R/docs}/reference/gng_m2.html | 0 {docs => R/docs}/reference/gng_m3.html | 0 {docs => R/docs}/reference/gng_m4.html | 0 .../docs}/reference/hBayesDM-package.html | 0 .../docs}/reference/hBayesDM_model.html | 0 {docs => R/docs}/reference/igt_orl.html | 0 {docs => R/docs}/reference/igt_pvl_decay.html | 0 {docs => R/docs}/reference/igt_pvl_delta.html | 0 {docs => R/docs}/reference/igt_vpp.html | 0 {docs => R/docs}/reference/index.html | 0 {docs => R/docs}/reference/multiplot.html | 0 {docs => R/docs}/reference/peer_ocu.html | 0 {docs => R/docs}/reference/plot.hBayesDM.html | 0 {docs => R/docs}/reference/plotDist.html | 0 {docs => R/docs}/reference/plotHDI.html | 0 {docs => R/docs}/reference/plotInd.html | 0 {docs => R/docs}/reference/printFit.html | 0 {docs => R/docs}/reference/prl_ewa.html | 0 .../docs}/reference/prl_fictitious.html | 0 .../reference/prl_fictitious_multipleB.html | 0 .../docs}/reference/prl_fictitious_rp.html | 0 .../reference/prl_fictitious_rp_woa.html | 0 .../docs}/reference/prl_fictitious_woa.html | 0 {docs => R/docs}/reference/prl_rp.html | 0 .../docs}/reference/prl_rp_multipleB.html | 0 .../docs}/reference/pst_gainloss_Q.html | 0 {docs => R/docs}/reference/ra_noLA.html | 0 {docs => R/docs}/reference/ra_noRA.html | 0 {docs => R/docs}/reference/ra_prospect.html | 0 {docs => R/docs}/reference/rdt_happiness.html | 0 {docs => R/docs}/reference/rhat.html | 0 {docs => R/docs}/reference/ts_par4.html | 0 {docs => R/docs}/reference/ts_par6.html | 0 {docs => R/docs}/reference/ts_par7.html | 0 {docs => R/docs}/reference/ug_bayes.html | 0 {docs => R/docs}/reference/ug_delta.html | 0 {docs => R/docs}/reference/wcs_sql.html | 0 hBayesDM.Rproj => R/hBayesDM.Rproj | 0 {inst => R/inst}/CITATION | 0 R/inst/extdata/bandit2arm_exampleData.txt | 2001 +++ R/inst/extdata/bandit4arm2_exampleData.txt | 3001 ++++ R/inst/extdata/bandit4arm_exampleData.txt | 2001 +++ R/inst/extdata/bart_exampleData.txt | 91 + R/inst/extdata/choiceRT_exampleData.txt | 5001 ++++++ .../extdata/choiceRT_single_exampleData.txt | 1001 ++ R/inst/extdata/cra_exampleData.txt | 541 + R/inst/extdata/dbdm_exampleData.txt | 15001 ++++++++++++++++ R/inst/extdata/dd_exampleData.txt | 2161 +++ R/inst/extdata/dd_single_exampleData.txt | 109 + R/inst/extdata/gng_exampleData.txt | 2401 +++ R/inst/extdata/igt_exampleData.txt | 401 + R/inst/extdata/peer_exampleData.txt | 361 + R/inst/extdata/prl_exampleData.txt | 2001 +++ R/inst/extdata/prl_multipleB_exampleData.txt | 1801 ++ R/inst/extdata/pst_exampleData.txt | 1021 ++ R/inst/extdata/ra_data_attend.txt | 4192 +++++ R/inst/extdata/ra_data_reappraisal.txt | 4190 +++++ R/inst/extdata/ra_exampleData.txt | 701 + R/inst/extdata/rdt_exampleData.txt | 901 + R/inst/extdata/ts_exampleData.txt | 2191 +++ R/inst/extdata/ug_exampleData.txt | 1801 ++ R/inst/extdata/wcs_answersheet.txt | 4 + R/inst/extdata/wcs_exampleData.txt | 1158 ++ {inst => R/inst}/include/meta_header.hpp | 0 {inst => R/inst}/plotting/plot_functions.R | 0 R/inst/stan_files/bandit2arm_delta.stan | 109 + .../stan_files/bandit4arm2_kalman_filter.stan | 163 + R/inst/stan_files/bandit4arm_2par_lapse.stan | 173 + R/inst/stan_files/bandit4arm_4par.stan | 176 + R/inst/stan_files/bandit4arm_lapse.stan | 182 + R/inst/stan_files/bandit4arm_lapse_decay.stan | 201 + .../stan_files/bandit4arm_singleA_lapse.stan | 177 + R/inst/stan_files/bart_par4.stan | 129 + R/inst/stan_files/choiceRT_ddm.stan | 98 + R/inst/stan_files/choiceRT_ddm_single.stan | 58 + R/inst/stan_files/choiceRT_lba.stan | 278 + R/inst/stan_files/choiceRT_lba_single.stan | 239 + R/inst/stan_files/cra_exp.stan | 134 + R/inst/stan_files/cra_linear.stan | 130 + R/inst/stan_files/dbdm_prob_weight.stan | 154 + R/inst/stan_files/dd_cs.stan | 107 + R/inst/stan_files/dd_cs_single.stan | 63 + R/inst/stan_files/dd_exp.stan | 101 + R/inst/stan_files/dd_hyperbolic.stan | 101 + R/inst/stan_files/dd_hyperbolic_single.stan | 57 + R/inst/stan_files/gng_m1.stan | 149 + R/inst/stan_files/gng_m2.stan | 160 + R/inst/stan_files/gng_m3.stan | 179 + R/inst/stan_files/gng_m4.stan | 210 + R/inst/stan_files/igt_orl.stan | 207 + R/inst/stan_files/igt_pvl_decay.stan | 134 + R/inst/stan_files/igt_pvl_delta.stan | 132 + R/inst/stan_files/igt_vpp.stan | 188 + R/inst/stan_files/peer_ocu.stan | 115 + R/inst/stan_files/pre/license.stan | 14 + R/inst/stan_files/prl_ewa.stan | 179 + R/inst/stan_files/prl_fictitious.stan | 173 + .../stan_files/prl_fictitious_multipleB.stan | 185 + R/inst/stan_files/prl_fictitious_rp.stan | 188 + R/inst/stan_files/prl_fictitious_rp_woa.stan | 180 + R/inst/stan_files/prl_fictitious_woa.stan | 165 + R/inst/stan_files/prl_rp.stan | 149 + R/inst/stan_files/prl_rp_multipleB.stan | 161 + R/inst/stan_files/pst_gainloss_Q.stan | 114 + R/inst/stan_files/ra_noLA.stan | 95 + R/inst/stan_files/ra_noRA.stan | 95 + R/inst/stan_files/ra_prospect.stan | 97 + R/inst/stan_files/rdt_happiness.stan | 146 + R/inst/stan_files/ts_par4.stan | 204 + R/inst/stan_files/ts_par6.stan | 213 + R/inst/stan_files/ts_par7.stan | 217 + R/inst/stan_files/ug_bayes.stan | 167 + R/inst/stan_files/ug_delta.stan | 129 + R/inst/stan_files/wcs_sql.stan | 176 + .../man-roxygen/README.md | 0 .../man-roxygen}/model-documentation.R | 0 {man => R/man}/HDIofMCMC.Rd | 0 {man => R/man}/bandit2arm_delta.Rd | 0 {man => R/man}/bandit4arm2_kalman_filter.Rd | 0 {man => R/man}/bandit4arm_2par_lapse.Rd | 0 {man => R/man}/bandit4arm_4par.Rd | 0 {man => R/man}/bandit4arm_lapse.Rd | 0 {man => R/man}/bandit4arm_lapse_decay.Rd | 0 {man => R/man}/bandit4arm_singleA_lapse.Rd | 0 {man => R/man}/bart_par4.Rd | 0 {man => R/man}/choiceRT_ddm.Rd | 0 {man => R/man}/choiceRT_ddm_single.Rd | 0 {man => R/man}/choiceRT_lba.Rd | 0 {man => R/man}/choiceRT_lba_single.Rd | 0 {man => R/man}/cra_exp.Rd | 0 {man => R/man}/cra_linear.Rd | 0 {man => R/man}/dbdm_prob_weight.Rd | 0 {man => R/man}/dd_cs.Rd | 0 {man => R/man}/dd_cs_single.Rd | 0 {man => R/man}/dd_exp.Rd | 0 {man => R/man}/dd_hyperbolic.Rd | 0 {man => R/man}/dd_hyperbolic_single.Rd | 0 {man => R/man}/estimate_mode.Rd | 0 {man => R/man}/extract_ic.Rd | 0 {man => R/man}/gng_m1.Rd | 0 {man => R/man}/gng_m2.Rd | 0 {man => R/man}/gng_m3.Rd | 0 {man => R/man}/gng_m4.Rd | 0 {man => R/man}/hBayesDM-package.Rd | 0 {man => R/man}/hBayesDM_model.Rd | 0 {man => R/man}/igt_orl.Rd | 0 {man => R/man}/igt_pvl_decay.Rd | 0 {man => R/man}/igt_pvl_delta.Rd | 0 {man => R/man}/igt_vpp.Rd | 0 {man => R/man}/multiplot.Rd | 0 {man => R/man}/peer_ocu.Rd | 0 {man => R/man}/plot.hBayesDM.Rd | 0 {man => R/man}/plotDist.Rd | 0 {man => R/man}/plotHDI.Rd | 0 {man => R/man}/plotInd.Rd | 0 {man => R/man}/printFit.Rd | 0 {man => R/man}/prl_ewa.Rd | 0 {man => R/man}/prl_fictitious.Rd | 0 {man => R/man}/prl_fictitious_multipleB.Rd | 0 {man => R/man}/prl_fictitious_rp.Rd | 0 {man => R/man}/prl_fictitious_rp_woa.Rd | 0 {man => R/man}/prl_fictitious_woa.Rd | 0 {man => R/man}/prl_rp.Rd | 0 {man => R/man}/prl_rp_multipleB.Rd | 0 {man => R/man}/pst_gainloss_Q.Rd | 0 {man => R/man}/ra_noLA.Rd | 0 {man => R/man}/ra_noRA.Rd | 0 {man => R/man}/ra_prospect.Rd | 0 {man => R/man}/rdt_happiness.Rd | 0 {man => R/man}/rhat.Rd | 0 {man => R/man}/ts_par4.Rd | 0 {man => R/man}/ts_par6.Rd | 0 {man => R/man}/ts_par7.Rd | 0 {man => R/man}/ug_bayes.Rd | 0 {man => R/man}/ug_delta.Rd | 0 {man => R/man}/wcs_sql.Rd | 0 {src => R/src}/Makevars | 0 {src => R/src}/Makevars.win | 0 {src => R/src}/init.cpp | 0 {tools => R/tools}/make_cc.R | 0 inst/common | 1 - 284 files changed, 61383 insertions(+), 4 deletions(-) delete mode 100644 .gitmodules rename .Rbuildignore => R/.Rbuildignore (100%) rename DESCRIPTION => R/DESCRIPTION (100%) rename NAMESPACE => R/NAMESPACE (100%) rename R/{ => R}/HDIofMCMC.R (100%) rename R/{ => R}/bandit2arm_delta.R (100%) rename R/{ => R}/bandit4arm2_kalman_filter.R (100%) rename R/{ => R}/bandit4arm_2par_lapse.R (100%) rename R/{ => R}/bandit4arm_4par.R (100%) rename R/{ => R}/bandit4arm_lapse.R (100%) rename R/{ => R}/bandit4arm_lapse_decay.R (100%) rename R/{ => R}/bandit4arm_singleA_lapse.R (100%) rename R/{ => R}/bart_par4.R (100%) rename R/{ => R}/choiceRT_ddm.R (100%) rename R/{ => R}/choiceRT_ddm_single.R (100%) rename R/{ => R}/choiceRT_lba.R (100%) rename R/{ => R}/choiceRT_lba_single.R (100%) rename R/{ => R}/cra_exp.R (100%) rename R/{ => R}/cra_linear.R (100%) rename R/{ => R}/dbdm_prob_weight.R (100%) rename R/{ => R}/dd_cs.R (100%) rename R/{ => R}/dd_cs_single.R (100%) rename R/{ => R}/dd_exp.R (100%) rename R/{ => R}/dd_hyperbolic.R (100%) rename R/{ => R}/dd_hyperbolic_single.R (100%) rename R/{ => R}/estimate_mode.R (100%) rename R/{ => R}/extract_ic.R (100%) rename R/{ => R}/gng_m1.R (100%) rename R/{ => R}/gng_m2.R (100%) rename R/{ => R}/gng_m3.R (100%) rename R/{ => R}/gng_m4.R (100%) rename R/{ => R}/hBayesDM.R (100%) rename R/{ => R}/hBayesDM_model.R (100%) rename R/{ => R}/igt_orl.R (100%) rename R/{ => R}/igt_pvl_decay.R (100%) rename R/{ => R}/igt_pvl_delta.R (100%) rename R/{ => R}/igt_vpp.R (100%) rename R/{ => R}/multiplot.R (100%) rename R/{ => R}/peer_ocu.R (100%) rename R/{ => R}/plot.hBayesDM.R (100%) rename R/{ => R}/plotDist.R (100%) rename R/{ => R}/plotHDI.R (100%) rename R/{ => R}/plotInd.R (100%) rename R/{ => R}/printFit.R (100%) rename R/{ => R}/prl_ewa.R (100%) rename R/{ => R}/prl_fictitious.R (100%) rename R/{ => R}/prl_fictitious_multipleB.R (100%) rename R/{ => R}/prl_fictitious_rp.R (100%) rename R/{ => R}/prl_fictitious_rp_woa.R (100%) rename R/{ => R}/prl_fictitious_woa.R (100%) rename R/{ => R}/prl_rp.R (100%) rename R/{ => R}/prl_rp_multipleB.R (100%) rename R/{ => R}/pst_gainloss_Q.R (100%) rename R/{ => R}/ra_noLA.R (100%) rename R/{ => R}/ra_noRA.R (100%) rename R/{ => R}/ra_prospect.R (100%) rename R/{ => R}/rdt_happiness.R (100%) rename R/{ => R}/rhat.R (100%) rename R/{ => R}/settings.R (100%) rename R/{ => R}/stanmodels.R (100%) rename R/{ => R}/ts_par4.R (100%) rename R/{ => R}/ts_par6.R (100%) rename R/{ => R}/ts_par7.R (100%) rename R/{ => R}/ug_bayes.R (100%) rename R/{ => R}/ug_delta.R (100%) rename R/{ => R}/wcs_sql.R (100%) rename R/{ => R}/zzz.R (100%) rename _pkgdown.yml => R/_pkgdown.yml (100%) rename cran-comments.md => R/cran-comments.md (100%) rename {docs => R/docs}/LICENSE-text.html (100%) rename {docs => R/docs}/authors.html (100%) rename {docs => R/docs}/docsearch.css (100%) rename {docs => R/docs}/docsearch.js (100%) rename {docs => R/docs}/index.html (100%) rename {docs => R/docs}/link.svg (100%) rename {docs => R/docs}/news/index.html (100%) rename {docs => R/docs}/pkgdown.css (100%) rename {docs => R/docs}/pkgdown.js (100%) rename {docs => R/docs}/pkgdown.yml (100%) rename {docs => R/docs}/reference/HDIofMCMC.html (100%) rename {docs => R/docs}/reference/bandit2arm_delta.html (100%) rename {docs => R/docs}/reference/bandit4arm2_kalman_filter.html (100%) rename {docs => R/docs}/reference/bandit4arm_2par_lapse.html (100%) rename {docs => R/docs}/reference/bandit4arm_4par.html (100%) rename {docs => R/docs}/reference/bandit4arm_lapse.html (100%) rename {docs => R/docs}/reference/bandit4arm_lapse_decay.html (100%) rename {docs => R/docs}/reference/bandit4arm_singleA_lapse.html (100%) rename {docs => R/docs}/reference/bart_par4.html (100%) rename {docs => R/docs}/reference/choiceRT_ddm.html (100%) rename {docs => R/docs}/reference/choiceRT_ddm_single.html (100%) rename {docs => R/docs}/reference/choiceRT_lba.html (100%) rename {docs => R/docs}/reference/choiceRT_lba_single.html (100%) rename {docs => R/docs}/reference/cra_exp.html (100%) rename {docs => R/docs}/reference/cra_linear.html (100%) rename {docs => R/docs}/reference/dbdm_prob_weight.html (100%) rename {docs => R/docs}/reference/dd_cs.html (100%) rename {docs => R/docs}/reference/dd_cs_single.html (100%) rename {docs => R/docs}/reference/dd_exp.html (100%) rename {docs => R/docs}/reference/dd_hyperbolic.html (100%) rename {docs => R/docs}/reference/dd_hyperbolic_single.html (100%) rename {docs => R/docs}/reference/estimate_mode.html (100%) rename {docs => R/docs}/reference/extract_ic.html (100%) rename {docs => R/docs}/reference/gng_m1.html (100%) rename {docs => R/docs}/reference/gng_m2.html (100%) rename {docs => R/docs}/reference/gng_m3.html (100%) rename {docs => R/docs}/reference/gng_m4.html (100%) rename {docs => R/docs}/reference/hBayesDM-package.html (100%) rename {docs => R/docs}/reference/hBayesDM_model.html (100%) rename {docs => R/docs}/reference/igt_orl.html (100%) rename {docs => R/docs}/reference/igt_pvl_decay.html (100%) rename {docs => R/docs}/reference/igt_pvl_delta.html (100%) rename {docs => R/docs}/reference/igt_vpp.html (100%) rename {docs => R/docs}/reference/index.html (100%) rename {docs => R/docs}/reference/multiplot.html (100%) rename {docs => R/docs}/reference/peer_ocu.html (100%) rename {docs => R/docs}/reference/plot.hBayesDM.html (100%) rename {docs => R/docs}/reference/plotDist.html (100%) rename {docs => R/docs}/reference/plotHDI.html (100%) rename {docs => R/docs}/reference/plotInd.html (100%) rename {docs => R/docs}/reference/printFit.html (100%) rename {docs => R/docs}/reference/prl_ewa.html (100%) rename {docs => R/docs}/reference/prl_fictitious.html (100%) rename {docs => R/docs}/reference/prl_fictitious_multipleB.html (100%) rename {docs => R/docs}/reference/prl_fictitious_rp.html (100%) rename {docs => R/docs}/reference/prl_fictitious_rp_woa.html (100%) rename {docs => R/docs}/reference/prl_fictitious_woa.html (100%) rename {docs => R/docs}/reference/prl_rp.html (100%) rename {docs => R/docs}/reference/prl_rp_multipleB.html (100%) rename {docs => R/docs}/reference/pst_gainloss_Q.html (100%) rename {docs => R/docs}/reference/ra_noLA.html (100%) rename {docs => R/docs}/reference/ra_noRA.html (100%) rename {docs => R/docs}/reference/ra_prospect.html (100%) rename {docs => R/docs}/reference/rdt_happiness.html (100%) rename {docs => R/docs}/reference/rhat.html (100%) rename {docs => R/docs}/reference/ts_par4.html (100%) rename {docs => R/docs}/reference/ts_par6.html (100%) rename {docs => R/docs}/reference/ts_par7.html (100%) rename {docs => R/docs}/reference/ug_bayes.html (100%) rename {docs => R/docs}/reference/ug_delta.html (100%) rename {docs => R/docs}/reference/wcs_sql.html (100%) rename hBayesDM.Rproj => R/hBayesDM.Rproj (100%) rename {inst => R/inst}/CITATION (100%) create mode 100644 R/inst/extdata/bandit2arm_exampleData.txt create mode 100644 R/inst/extdata/bandit4arm2_exampleData.txt create mode 100644 R/inst/extdata/bandit4arm_exampleData.txt create mode 100644 R/inst/extdata/bart_exampleData.txt create mode 100644 R/inst/extdata/choiceRT_exampleData.txt create mode 100644 R/inst/extdata/choiceRT_single_exampleData.txt create mode 100644 R/inst/extdata/cra_exampleData.txt create mode 100644 R/inst/extdata/dbdm_exampleData.txt create mode 100644 R/inst/extdata/dd_exampleData.txt create mode 100644 R/inst/extdata/dd_single_exampleData.txt create mode 100644 R/inst/extdata/gng_exampleData.txt create mode 100644 R/inst/extdata/igt_exampleData.txt create mode 100644 R/inst/extdata/peer_exampleData.txt create mode 100644 R/inst/extdata/prl_exampleData.txt create mode 100644 R/inst/extdata/prl_multipleB_exampleData.txt create mode 100644 R/inst/extdata/pst_exampleData.txt create mode 100644 R/inst/extdata/ra_data_attend.txt create mode 100644 R/inst/extdata/ra_data_reappraisal.txt create mode 100644 R/inst/extdata/ra_exampleData.txt create mode 100644 R/inst/extdata/rdt_exampleData.txt create mode 100644 R/inst/extdata/ts_exampleData.txt create mode 100644 R/inst/extdata/ug_exampleData.txt create mode 100644 R/inst/extdata/wcs_answersheet.txt create mode 100644 R/inst/extdata/wcs_exampleData.txt rename {inst => R/inst}/include/meta_header.hpp (100%) rename {inst => R/inst}/plotting/plot_functions.R (100%) create mode 100644 R/inst/stan_files/bandit2arm_delta.stan create mode 100644 R/inst/stan_files/bandit4arm2_kalman_filter.stan create mode 100644 R/inst/stan_files/bandit4arm_2par_lapse.stan create mode 100644 R/inst/stan_files/bandit4arm_4par.stan create mode 100644 R/inst/stan_files/bandit4arm_lapse.stan create mode 100644 R/inst/stan_files/bandit4arm_lapse_decay.stan create mode 100644 R/inst/stan_files/bandit4arm_singleA_lapse.stan create mode 100644 R/inst/stan_files/bart_par4.stan create mode 100644 R/inst/stan_files/choiceRT_ddm.stan create mode 100644 R/inst/stan_files/choiceRT_ddm_single.stan create mode 100644 R/inst/stan_files/choiceRT_lba.stan create mode 100644 R/inst/stan_files/choiceRT_lba_single.stan create mode 100644 R/inst/stan_files/cra_exp.stan create mode 100644 R/inst/stan_files/cra_linear.stan create mode 100644 R/inst/stan_files/dbdm_prob_weight.stan create mode 100644 R/inst/stan_files/dd_cs.stan create mode 100644 R/inst/stan_files/dd_cs_single.stan create mode 100644 R/inst/stan_files/dd_exp.stan create mode 100644 R/inst/stan_files/dd_hyperbolic.stan create mode 100644 R/inst/stan_files/dd_hyperbolic_single.stan create mode 100644 R/inst/stan_files/gng_m1.stan create mode 100644 R/inst/stan_files/gng_m2.stan create mode 100644 R/inst/stan_files/gng_m3.stan create mode 100644 R/inst/stan_files/gng_m4.stan create mode 100644 R/inst/stan_files/igt_orl.stan create mode 100644 R/inst/stan_files/igt_pvl_decay.stan create mode 100644 R/inst/stan_files/igt_pvl_delta.stan create mode 100644 R/inst/stan_files/igt_vpp.stan create mode 100644 R/inst/stan_files/peer_ocu.stan create mode 100644 R/inst/stan_files/pre/license.stan create mode 100644 R/inst/stan_files/prl_ewa.stan create mode 100644 R/inst/stan_files/prl_fictitious.stan create mode 100644 R/inst/stan_files/prl_fictitious_multipleB.stan create mode 100644 R/inst/stan_files/prl_fictitious_rp.stan create mode 100644 R/inst/stan_files/prl_fictitious_rp_woa.stan create mode 100644 R/inst/stan_files/prl_fictitious_woa.stan create mode 100644 R/inst/stan_files/prl_rp.stan create mode 100644 R/inst/stan_files/prl_rp_multipleB.stan create mode 100644 R/inst/stan_files/pst_gainloss_Q.stan create mode 100644 R/inst/stan_files/ra_noLA.stan create mode 100644 R/inst/stan_files/ra_noRA.stan create mode 100644 R/inst/stan_files/ra_prospect.stan create mode 100644 R/inst/stan_files/rdt_happiness.stan create mode 100644 R/inst/stan_files/ts_par4.stan create mode 100644 R/inst/stan_files/ts_par6.stan create mode 100644 R/inst/stan_files/ts_par7.stan create mode 100644 R/inst/stan_files/ug_bayes.stan create mode 100644 R/inst/stan_files/ug_delta.stan create mode 100644 R/inst/stan_files/wcs_sql.stan rename man-roxygen/model-documentation.md => R/man-roxygen/README.md (100%) rename {man-roxygen => R/man-roxygen}/model-documentation.R (100%) rename {man => R/man}/HDIofMCMC.Rd (100%) rename {man => R/man}/bandit2arm_delta.Rd (100%) rename {man => R/man}/bandit4arm2_kalman_filter.Rd (100%) rename {man => R/man}/bandit4arm_2par_lapse.Rd (100%) rename {man => R/man}/bandit4arm_4par.Rd (100%) rename {man => R/man}/bandit4arm_lapse.Rd (100%) rename {man => R/man}/bandit4arm_lapse_decay.Rd (100%) rename {man => R/man}/bandit4arm_singleA_lapse.Rd (100%) rename {man => R/man}/bart_par4.Rd (100%) rename {man => R/man}/choiceRT_ddm.Rd (100%) rename {man => R/man}/choiceRT_ddm_single.Rd (100%) rename {man => R/man}/choiceRT_lba.Rd (100%) rename {man => R/man}/choiceRT_lba_single.Rd (100%) rename {man => R/man}/cra_exp.Rd (100%) rename {man => R/man}/cra_linear.Rd (100%) rename {man => R/man}/dbdm_prob_weight.Rd (100%) rename {man => R/man}/dd_cs.Rd (100%) rename {man => R/man}/dd_cs_single.Rd (100%) rename {man => R/man}/dd_exp.Rd (100%) rename {man => R/man}/dd_hyperbolic.Rd (100%) rename {man => R/man}/dd_hyperbolic_single.Rd (100%) rename {man => R/man}/estimate_mode.Rd (100%) rename {man => R/man}/extract_ic.Rd (100%) rename {man => R/man}/gng_m1.Rd (100%) rename {man => R/man}/gng_m2.Rd (100%) rename {man => R/man}/gng_m3.Rd (100%) rename {man => R/man}/gng_m4.Rd (100%) rename {man => R/man}/hBayesDM-package.Rd (100%) rename {man => R/man}/hBayesDM_model.Rd (100%) rename {man => R/man}/igt_orl.Rd (100%) rename {man => R/man}/igt_pvl_decay.Rd (100%) rename {man => R/man}/igt_pvl_delta.Rd (100%) rename {man => R/man}/igt_vpp.Rd (100%) rename {man => R/man}/multiplot.Rd (100%) rename {man => R/man}/peer_ocu.Rd (100%) rename {man => R/man}/plot.hBayesDM.Rd (100%) rename {man => R/man}/plotDist.Rd (100%) rename {man => R/man}/plotHDI.Rd (100%) rename {man => R/man}/plotInd.Rd (100%) rename {man => R/man}/printFit.Rd (100%) rename {man => R/man}/prl_ewa.Rd (100%) rename {man => R/man}/prl_fictitious.Rd (100%) rename {man => R/man}/prl_fictitious_multipleB.Rd (100%) rename {man => R/man}/prl_fictitious_rp.Rd (100%) rename {man => R/man}/prl_fictitious_rp_woa.Rd (100%) rename {man => R/man}/prl_fictitious_woa.Rd (100%) rename {man => R/man}/prl_rp.Rd (100%) rename {man => R/man}/prl_rp_multipleB.Rd (100%) rename {man => R/man}/pst_gainloss_Q.Rd (100%) rename {man => R/man}/ra_noLA.Rd (100%) rename {man => R/man}/ra_noRA.Rd (100%) rename {man => R/man}/ra_prospect.Rd (100%) rename {man => R/man}/rdt_happiness.Rd (100%) rename {man => R/man}/rhat.Rd (100%) rename {man => R/man}/ts_par4.Rd (100%) rename {man => R/man}/ts_par6.Rd (100%) rename {man => R/man}/ts_par7.Rd (100%) rename {man => R/man}/ug_bayes.Rd (100%) rename {man => R/man}/ug_delta.Rd (100%) rename {man => R/man}/wcs_sql.Rd (100%) rename {src => R/src}/Makevars (100%) rename {src => R/src}/Makevars.win (100%) rename {src => R/src}/init.cpp (100%) rename {tools => R/tools}/make_cc.R (100%) delete mode 160000 inst/common diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index f7a75667..00000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "hBayesDM-models"] - path = inst/common - url = https://github.com/CCS-Lab/hBayesDM-models.git diff --git a/.Rbuildignore b/R/.Rbuildignore similarity index 100% rename from .Rbuildignore rename to R/.Rbuildignore diff --git a/DESCRIPTION b/R/DESCRIPTION similarity index 100% rename from DESCRIPTION rename to R/DESCRIPTION diff --git a/NAMESPACE b/R/NAMESPACE similarity index 100% rename from NAMESPACE rename to R/NAMESPACE diff --git a/R/HDIofMCMC.R b/R/R/HDIofMCMC.R similarity index 100% rename from R/HDIofMCMC.R rename to R/R/HDIofMCMC.R diff --git a/R/bandit2arm_delta.R b/R/R/bandit2arm_delta.R similarity index 100% rename from R/bandit2arm_delta.R rename to R/R/bandit2arm_delta.R diff --git a/R/bandit4arm2_kalman_filter.R b/R/R/bandit4arm2_kalman_filter.R similarity index 100% rename from R/bandit4arm2_kalman_filter.R rename to R/R/bandit4arm2_kalman_filter.R diff --git a/R/bandit4arm_2par_lapse.R b/R/R/bandit4arm_2par_lapse.R similarity index 100% rename from R/bandit4arm_2par_lapse.R rename to R/R/bandit4arm_2par_lapse.R diff --git a/R/bandit4arm_4par.R b/R/R/bandit4arm_4par.R similarity index 100% rename from R/bandit4arm_4par.R rename to R/R/bandit4arm_4par.R diff --git a/R/bandit4arm_lapse.R b/R/R/bandit4arm_lapse.R similarity index 100% rename from R/bandit4arm_lapse.R rename to R/R/bandit4arm_lapse.R diff --git a/R/bandit4arm_lapse_decay.R b/R/R/bandit4arm_lapse_decay.R similarity index 100% rename from R/bandit4arm_lapse_decay.R rename to R/R/bandit4arm_lapse_decay.R diff --git a/R/bandit4arm_singleA_lapse.R b/R/R/bandit4arm_singleA_lapse.R similarity index 100% rename from R/bandit4arm_singleA_lapse.R rename to R/R/bandit4arm_singleA_lapse.R diff --git a/R/bart_par4.R b/R/R/bart_par4.R similarity index 100% rename from R/bart_par4.R rename to R/R/bart_par4.R diff --git a/R/choiceRT_ddm.R b/R/R/choiceRT_ddm.R similarity index 100% rename from R/choiceRT_ddm.R rename to R/R/choiceRT_ddm.R diff --git a/R/choiceRT_ddm_single.R b/R/R/choiceRT_ddm_single.R similarity index 100% rename from R/choiceRT_ddm_single.R rename to R/R/choiceRT_ddm_single.R diff --git a/R/choiceRT_lba.R b/R/R/choiceRT_lba.R similarity index 100% rename from R/choiceRT_lba.R rename to R/R/choiceRT_lba.R diff --git a/R/choiceRT_lba_single.R b/R/R/choiceRT_lba_single.R similarity index 100% rename from R/choiceRT_lba_single.R rename to R/R/choiceRT_lba_single.R diff --git a/R/cra_exp.R b/R/R/cra_exp.R similarity index 100% rename from R/cra_exp.R rename to R/R/cra_exp.R diff --git a/R/cra_linear.R b/R/R/cra_linear.R similarity index 100% rename from R/cra_linear.R rename to R/R/cra_linear.R diff --git a/R/dbdm_prob_weight.R b/R/R/dbdm_prob_weight.R similarity index 100% rename from R/dbdm_prob_weight.R rename to R/R/dbdm_prob_weight.R diff --git a/R/dd_cs.R b/R/R/dd_cs.R similarity index 100% rename from R/dd_cs.R rename to R/R/dd_cs.R diff --git a/R/dd_cs_single.R b/R/R/dd_cs_single.R similarity index 100% rename from R/dd_cs_single.R rename to R/R/dd_cs_single.R diff --git a/R/dd_exp.R b/R/R/dd_exp.R similarity index 100% rename from R/dd_exp.R rename to R/R/dd_exp.R diff --git a/R/dd_hyperbolic.R b/R/R/dd_hyperbolic.R similarity index 100% rename from R/dd_hyperbolic.R rename to R/R/dd_hyperbolic.R diff --git a/R/dd_hyperbolic_single.R b/R/R/dd_hyperbolic_single.R similarity index 100% rename from R/dd_hyperbolic_single.R rename to R/R/dd_hyperbolic_single.R diff --git a/R/estimate_mode.R b/R/R/estimate_mode.R similarity index 100% rename from R/estimate_mode.R rename to R/R/estimate_mode.R diff --git a/R/extract_ic.R b/R/R/extract_ic.R similarity index 100% rename from R/extract_ic.R rename to R/R/extract_ic.R diff --git a/R/gng_m1.R b/R/R/gng_m1.R similarity index 100% rename from R/gng_m1.R rename to R/R/gng_m1.R diff --git a/R/gng_m2.R b/R/R/gng_m2.R similarity index 100% rename from R/gng_m2.R rename to R/R/gng_m2.R diff --git a/R/gng_m3.R b/R/R/gng_m3.R similarity index 100% rename from R/gng_m3.R rename to R/R/gng_m3.R diff --git a/R/gng_m4.R b/R/R/gng_m4.R similarity index 100% rename from R/gng_m4.R rename to R/R/gng_m4.R diff --git a/R/hBayesDM.R b/R/R/hBayesDM.R similarity index 100% rename from R/hBayesDM.R rename to R/R/hBayesDM.R diff --git a/R/hBayesDM_model.R b/R/R/hBayesDM_model.R similarity index 100% rename from R/hBayesDM_model.R rename to R/R/hBayesDM_model.R diff --git a/R/igt_orl.R b/R/R/igt_orl.R similarity index 100% rename from R/igt_orl.R rename to R/R/igt_orl.R diff --git a/R/igt_pvl_decay.R b/R/R/igt_pvl_decay.R similarity index 100% rename from R/igt_pvl_decay.R rename to R/R/igt_pvl_decay.R diff --git a/R/igt_pvl_delta.R b/R/R/igt_pvl_delta.R similarity index 100% rename from R/igt_pvl_delta.R rename to R/R/igt_pvl_delta.R diff --git a/R/igt_vpp.R b/R/R/igt_vpp.R similarity index 100% rename from R/igt_vpp.R rename to R/R/igt_vpp.R diff --git a/R/multiplot.R b/R/R/multiplot.R similarity index 100% rename from R/multiplot.R rename to R/R/multiplot.R diff --git a/R/peer_ocu.R b/R/R/peer_ocu.R similarity index 100% rename from R/peer_ocu.R rename to R/R/peer_ocu.R diff --git a/R/plot.hBayesDM.R b/R/R/plot.hBayesDM.R similarity index 100% rename from R/plot.hBayesDM.R rename to R/R/plot.hBayesDM.R diff --git a/R/plotDist.R b/R/R/plotDist.R similarity index 100% rename from R/plotDist.R rename to R/R/plotDist.R diff --git a/R/plotHDI.R b/R/R/plotHDI.R similarity index 100% rename from R/plotHDI.R rename to R/R/plotHDI.R diff --git a/R/plotInd.R b/R/R/plotInd.R similarity index 100% rename from R/plotInd.R rename to R/R/plotInd.R diff --git a/R/printFit.R b/R/R/printFit.R similarity index 100% rename from R/printFit.R rename to R/R/printFit.R diff --git a/R/prl_ewa.R b/R/R/prl_ewa.R similarity index 100% rename from R/prl_ewa.R rename to R/R/prl_ewa.R diff --git a/R/prl_fictitious.R b/R/R/prl_fictitious.R similarity index 100% rename from R/prl_fictitious.R rename to R/R/prl_fictitious.R diff --git a/R/prl_fictitious_multipleB.R b/R/R/prl_fictitious_multipleB.R similarity index 100% rename from R/prl_fictitious_multipleB.R rename to R/R/prl_fictitious_multipleB.R diff --git a/R/prl_fictitious_rp.R b/R/R/prl_fictitious_rp.R similarity index 100% rename from R/prl_fictitious_rp.R rename to R/R/prl_fictitious_rp.R diff --git a/R/prl_fictitious_rp_woa.R b/R/R/prl_fictitious_rp_woa.R similarity index 100% rename from R/prl_fictitious_rp_woa.R rename to R/R/prl_fictitious_rp_woa.R diff --git a/R/prl_fictitious_woa.R b/R/R/prl_fictitious_woa.R similarity index 100% rename from R/prl_fictitious_woa.R rename to R/R/prl_fictitious_woa.R diff --git a/R/prl_rp.R b/R/R/prl_rp.R similarity index 100% rename from R/prl_rp.R rename to R/R/prl_rp.R diff --git a/R/prl_rp_multipleB.R b/R/R/prl_rp_multipleB.R similarity index 100% rename from R/prl_rp_multipleB.R rename to R/R/prl_rp_multipleB.R diff --git a/R/pst_gainloss_Q.R b/R/R/pst_gainloss_Q.R similarity index 100% rename from R/pst_gainloss_Q.R rename to R/R/pst_gainloss_Q.R diff --git a/R/ra_noLA.R b/R/R/ra_noLA.R similarity index 100% rename from R/ra_noLA.R rename to R/R/ra_noLA.R diff --git a/R/ra_noRA.R b/R/R/ra_noRA.R similarity index 100% rename from R/ra_noRA.R rename to R/R/ra_noRA.R diff --git a/R/ra_prospect.R b/R/R/ra_prospect.R similarity index 100% rename from R/ra_prospect.R rename to R/R/ra_prospect.R diff --git a/R/rdt_happiness.R b/R/R/rdt_happiness.R similarity index 100% rename from R/rdt_happiness.R rename to R/R/rdt_happiness.R diff --git a/R/rhat.R b/R/R/rhat.R similarity index 100% rename from R/rhat.R rename to R/R/rhat.R diff --git a/R/settings.R b/R/R/settings.R similarity index 100% rename from R/settings.R rename to R/R/settings.R diff --git a/R/stanmodels.R b/R/R/stanmodels.R similarity index 100% rename from R/stanmodels.R rename to R/R/stanmodels.R diff --git a/R/ts_par4.R b/R/R/ts_par4.R similarity index 100% rename from R/ts_par4.R rename to R/R/ts_par4.R diff --git a/R/ts_par6.R b/R/R/ts_par6.R similarity index 100% rename from R/ts_par6.R rename to R/R/ts_par6.R diff --git a/R/ts_par7.R b/R/R/ts_par7.R similarity index 100% rename from R/ts_par7.R rename to R/R/ts_par7.R diff --git a/R/ug_bayes.R b/R/R/ug_bayes.R similarity index 100% rename from R/ug_bayes.R rename to R/R/ug_bayes.R diff --git a/R/ug_delta.R b/R/R/ug_delta.R similarity index 100% rename from R/ug_delta.R rename to R/R/ug_delta.R diff --git a/R/wcs_sql.R b/R/R/wcs_sql.R similarity index 100% rename from R/wcs_sql.R rename to R/R/wcs_sql.R diff --git a/R/zzz.R b/R/R/zzz.R similarity index 100% rename from R/zzz.R rename to R/R/zzz.R diff --git a/_pkgdown.yml b/R/_pkgdown.yml similarity index 100% rename from _pkgdown.yml rename to R/_pkgdown.yml diff --git a/cran-comments.md b/R/cran-comments.md similarity index 100% rename from cran-comments.md rename to R/cran-comments.md diff --git a/docs/LICENSE-text.html b/R/docs/LICENSE-text.html similarity index 100% rename from docs/LICENSE-text.html rename to R/docs/LICENSE-text.html diff --git a/docs/authors.html b/R/docs/authors.html similarity index 100% rename from docs/authors.html rename to R/docs/authors.html diff --git a/docs/docsearch.css b/R/docs/docsearch.css similarity index 100% rename from docs/docsearch.css rename to R/docs/docsearch.css diff --git a/docs/docsearch.js b/R/docs/docsearch.js similarity index 100% rename from docs/docsearch.js rename to R/docs/docsearch.js diff --git a/docs/index.html b/R/docs/index.html similarity index 100% rename from docs/index.html rename to R/docs/index.html diff --git a/docs/link.svg b/R/docs/link.svg similarity index 100% rename from docs/link.svg rename to R/docs/link.svg diff --git a/docs/news/index.html b/R/docs/news/index.html similarity index 100% rename from docs/news/index.html rename to R/docs/news/index.html diff --git a/docs/pkgdown.css b/R/docs/pkgdown.css similarity index 100% rename from docs/pkgdown.css rename to R/docs/pkgdown.css diff --git a/docs/pkgdown.js b/R/docs/pkgdown.js similarity index 100% rename from docs/pkgdown.js rename to R/docs/pkgdown.js diff --git a/docs/pkgdown.yml b/R/docs/pkgdown.yml similarity index 100% rename from docs/pkgdown.yml rename to R/docs/pkgdown.yml diff --git a/docs/reference/HDIofMCMC.html b/R/docs/reference/HDIofMCMC.html similarity index 100% rename from docs/reference/HDIofMCMC.html rename to R/docs/reference/HDIofMCMC.html diff --git a/docs/reference/bandit2arm_delta.html b/R/docs/reference/bandit2arm_delta.html similarity index 100% rename from docs/reference/bandit2arm_delta.html rename to R/docs/reference/bandit2arm_delta.html diff --git a/docs/reference/bandit4arm2_kalman_filter.html b/R/docs/reference/bandit4arm2_kalman_filter.html similarity index 100% rename from docs/reference/bandit4arm2_kalman_filter.html rename to R/docs/reference/bandit4arm2_kalman_filter.html diff --git a/docs/reference/bandit4arm_2par_lapse.html b/R/docs/reference/bandit4arm_2par_lapse.html similarity index 100% rename from docs/reference/bandit4arm_2par_lapse.html rename to R/docs/reference/bandit4arm_2par_lapse.html diff --git a/docs/reference/bandit4arm_4par.html b/R/docs/reference/bandit4arm_4par.html similarity index 100% rename from docs/reference/bandit4arm_4par.html rename to R/docs/reference/bandit4arm_4par.html diff --git a/docs/reference/bandit4arm_lapse.html b/R/docs/reference/bandit4arm_lapse.html similarity index 100% rename from docs/reference/bandit4arm_lapse.html rename to R/docs/reference/bandit4arm_lapse.html diff --git a/docs/reference/bandit4arm_lapse_decay.html b/R/docs/reference/bandit4arm_lapse_decay.html similarity index 100% rename from docs/reference/bandit4arm_lapse_decay.html rename to R/docs/reference/bandit4arm_lapse_decay.html diff --git a/docs/reference/bandit4arm_singleA_lapse.html b/R/docs/reference/bandit4arm_singleA_lapse.html similarity index 100% rename from docs/reference/bandit4arm_singleA_lapse.html rename to R/docs/reference/bandit4arm_singleA_lapse.html diff --git a/docs/reference/bart_par4.html b/R/docs/reference/bart_par4.html similarity index 100% rename from docs/reference/bart_par4.html rename to R/docs/reference/bart_par4.html diff --git a/docs/reference/choiceRT_ddm.html b/R/docs/reference/choiceRT_ddm.html similarity index 100% rename from docs/reference/choiceRT_ddm.html rename to R/docs/reference/choiceRT_ddm.html diff --git a/docs/reference/choiceRT_ddm_single.html b/R/docs/reference/choiceRT_ddm_single.html similarity index 100% rename from docs/reference/choiceRT_ddm_single.html rename to R/docs/reference/choiceRT_ddm_single.html diff --git a/docs/reference/choiceRT_lba.html b/R/docs/reference/choiceRT_lba.html similarity index 100% rename from docs/reference/choiceRT_lba.html rename to R/docs/reference/choiceRT_lba.html diff --git a/docs/reference/choiceRT_lba_single.html b/R/docs/reference/choiceRT_lba_single.html similarity index 100% rename from docs/reference/choiceRT_lba_single.html rename to R/docs/reference/choiceRT_lba_single.html diff --git a/docs/reference/cra_exp.html b/R/docs/reference/cra_exp.html similarity index 100% rename from docs/reference/cra_exp.html rename to R/docs/reference/cra_exp.html diff --git a/docs/reference/cra_linear.html b/R/docs/reference/cra_linear.html similarity index 100% rename from docs/reference/cra_linear.html rename to R/docs/reference/cra_linear.html diff --git a/docs/reference/dbdm_prob_weight.html b/R/docs/reference/dbdm_prob_weight.html similarity index 100% rename from docs/reference/dbdm_prob_weight.html rename to R/docs/reference/dbdm_prob_weight.html diff --git a/docs/reference/dd_cs.html b/R/docs/reference/dd_cs.html similarity index 100% rename from docs/reference/dd_cs.html rename to R/docs/reference/dd_cs.html diff --git a/docs/reference/dd_cs_single.html b/R/docs/reference/dd_cs_single.html similarity index 100% rename from docs/reference/dd_cs_single.html rename to R/docs/reference/dd_cs_single.html diff --git a/docs/reference/dd_exp.html b/R/docs/reference/dd_exp.html similarity index 100% rename from docs/reference/dd_exp.html rename to R/docs/reference/dd_exp.html diff --git a/docs/reference/dd_hyperbolic.html b/R/docs/reference/dd_hyperbolic.html similarity index 100% rename from docs/reference/dd_hyperbolic.html rename to R/docs/reference/dd_hyperbolic.html diff --git a/docs/reference/dd_hyperbolic_single.html b/R/docs/reference/dd_hyperbolic_single.html similarity index 100% rename from docs/reference/dd_hyperbolic_single.html rename to R/docs/reference/dd_hyperbolic_single.html diff --git a/docs/reference/estimate_mode.html b/R/docs/reference/estimate_mode.html similarity index 100% rename from docs/reference/estimate_mode.html rename to R/docs/reference/estimate_mode.html diff --git a/docs/reference/extract_ic.html b/R/docs/reference/extract_ic.html similarity index 100% rename from docs/reference/extract_ic.html rename to R/docs/reference/extract_ic.html diff --git a/docs/reference/gng_m1.html b/R/docs/reference/gng_m1.html similarity index 100% rename from docs/reference/gng_m1.html rename to R/docs/reference/gng_m1.html diff --git a/docs/reference/gng_m2.html b/R/docs/reference/gng_m2.html similarity index 100% rename from docs/reference/gng_m2.html rename to R/docs/reference/gng_m2.html diff --git a/docs/reference/gng_m3.html b/R/docs/reference/gng_m3.html similarity index 100% rename from docs/reference/gng_m3.html rename to R/docs/reference/gng_m3.html diff --git a/docs/reference/gng_m4.html b/R/docs/reference/gng_m4.html similarity index 100% rename from docs/reference/gng_m4.html rename to R/docs/reference/gng_m4.html diff --git a/docs/reference/hBayesDM-package.html b/R/docs/reference/hBayesDM-package.html similarity index 100% rename from docs/reference/hBayesDM-package.html rename to R/docs/reference/hBayesDM-package.html diff --git a/docs/reference/hBayesDM_model.html b/R/docs/reference/hBayesDM_model.html similarity index 100% rename from docs/reference/hBayesDM_model.html rename to R/docs/reference/hBayesDM_model.html diff --git a/docs/reference/igt_orl.html b/R/docs/reference/igt_orl.html similarity index 100% rename from docs/reference/igt_orl.html rename to R/docs/reference/igt_orl.html diff --git a/docs/reference/igt_pvl_decay.html b/R/docs/reference/igt_pvl_decay.html similarity index 100% rename from docs/reference/igt_pvl_decay.html rename to R/docs/reference/igt_pvl_decay.html diff --git a/docs/reference/igt_pvl_delta.html b/R/docs/reference/igt_pvl_delta.html similarity index 100% rename from docs/reference/igt_pvl_delta.html rename to R/docs/reference/igt_pvl_delta.html diff --git a/docs/reference/igt_vpp.html b/R/docs/reference/igt_vpp.html similarity index 100% rename from docs/reference/igt_vpp.html rename to R/docs/reference/igt_vpp.html diff --git a/docs/reference/index.html b/R/docs/reference/index.html similarity index 100% rename from docs/reference/index.html rename to R/docs/reference/index.html diff --git a/docs/reference/multiplot.html b/R/docs/reference/multiplot.html similarity index 100% rename from docs/reference/multiplot.html rename to R/docs/reference/multiplot.html diff --git a/docs/reference/peer_ocu.html b/R/docs/reference/peer_ocu.html similarity index 100% rename from docs/reference/peer_ocu.html rename to R/docs/reference/peer_ocu.html diff --git a/docs/reference/plot.hBayesDM.html b/R/docs/reference/plot.hBayesDM.html similarity index 100% rename from docs/reference/plot.hBayesDM.html rename to R/docs/reference/plot.hBayesDM.html diff --git a/docs/reference/plotDist.html b/R/docs/reference/plotDist.html similarity index 100% rename from docs/reference/plotDist.html rename to R/docs/reference/plotDist.html diff --git a/docs/reference/plotHDI.html b/R/docs/reference/plotHDI.html similarity index 100% rename from docs/reference/plotHDI.html rename to R/docs/reference/plotHDI.html diff --git a/docs/reference/plotInd.html b/R/docs/reference/plotInd.html similarity index 100% rename from docs/reference/plotInd.html rename to R/docs/reference/plotInd.html diff --git a/docs/reference/printFit.html b/R/docs/reference/printFit.html similarity index 100% rename from docs/reference/printFit.html rename to R/docs/reference/printFit.html diff --git a/docs/reference/prl_ewa.html b/R/docs/reference/prl_ewa.html similarity index 100% rename from docs/reference/prl_ewa.html rename to R/docs/reference/prl_ewa.html diff --git a/docs/reference/prl_fictitious.html b/R/docs/reference/prl_fictitious.html similarity index 100% rename from docs/reference/prl_fictitious.html rename to R/docs/reference/prl_fictitious.html diff --git a/docs/reference/prl_fictitious_multipleB.html b/R/docs/reference/prl_fictitious_multipleB.html similarity index 100% rename from docs/reference/prl_fictitious_multipleB.html rename to R/docs/reference/prl_fictitious_multipleB.html diff --git a/docs/reference/prl_fictitious_rp.html b/R/docs/reference/prl_fictitious_rp.html similarity index 100% rename from docs/reference/prl_fictitious_rp.html rename to R/docs/reference/prl_fictitious_rp.html diff --git a/docs/reference/prl_fictitious_rp_woa.html b/R/docs/reference/prl_fictitious_rp_woa.html similarity index 100% rename from docs/reference/prl_fictitious_rp_woa.html rename to R/docs/reference/prl_fictitious_rp_woa.html diff --git a/docs/reference/prl_fictitious_woa.html b/R/docs/reference/prl_fictitious_woa.html similarity index 100% rename from docs/reference/prl_fictitious_woa.html rename to R/docs/reference/prl_fictitious_woa.html diff --git a/docs/reference/prl_rp.html b/R/docs/reference/prl_rp.html similarity index 100% rename from docs/reference/prl_rp.html rename to R/docs/reference/prl_rp.html diff --git a/docs/reference/prl_rp_multipleB.html b/R/docs/reference/prl_rp_multipleB.html similarity index 100% rename from docs/reference/prl_rp_multipleB.html rename to R/docs/reference/prl_rp_multipleB.html diff --git a/docs/reference/pst_gainloss_Q.html b/R/docs/reference/pst_gainloss_Q.html similarity index 100% rename from docs/reference/pst_gainloss_Q.html rename to R/docs/reference/pst_gainloss_Q.html diff --git a/docs/reference/ra_noLA.html b/R/docs/reference/ra_noLA.html similarity index 100% rename from docs/reference/ra_noLA.html rename to R/docs/reference/ra_noLA.html diff --git a/docs/reference/ra_noRA.html b/R/docs/reference/ra_noRA.html similarity index 100% rename from docs/reference/ra_noRA.html rename to R/docs/reference/ra_noRA.html diff --git a/docs/reference/ra_prospect.html b/R/docs/reference/ra_prospect.html similarity index 100% rename from docs/reference/ra_prospect.html rename to R/docs/reference/ra_prospect.html diff --git a/docs/reference/rdt_happiness.html b/R/docs/reference/rdt_happiness.html similarity index 100% rename from docs/reference/rdt_happiness.html rename to R/docs/reference/rdt_happiness.html diff --git a/docs/reference/rhat.html b/R/docs/reference/rhat.html similarity index 100% rename from docs/reference/rhat.html rename to R/docs/reference/rhat.html diff --git a/docs/reference/ts_par4.html b/R/docs/reference/ts_par4.html similarity index 100% rename from docs/reference/ts_par4.html rename to R/docs/reference/ts_par4.html diff --git a/docs/reference/ts_par6.html b/R/docs/reference/ts_par6.html similarity index 100% rename from docs/reference/ts_par6.html rename to R/docs/reference/ts_par6.html diff --git a/docs/reference/ts_par7.html b/R/docs/reference/ts_par7.html similarity index 100% rename from docs/reference/ts_par7.html rename to R/docs/reference/ts_par7.html diff --git a/docs/reference/ug_bayes.html b/R/docs/reference/ug_bayes.html similarity index 100% rename from docs/reference/ug_bayes.html rename to R/docs/reference/ug_bayes.html diff --git a/docs/reference/ug_delta.html b/R/docs/reference/ug_delta.html similarity index 100% rename from docs/reference/ug_delta.html rename to R/docs/reference/ug_delta.html diff --git a/docs/reference/wcs_sql.html b/R/docs/reference/wcs_sql.html similarity index 100% rename from docs/reference/wcs_sql.html rename to R/docs/reference/wcs_sql.html diff --git a/hBayesDM.Rproj b/R/hBayesDM.Rproj similarity index 100% rename from hBayesDM.Rproj rename to R/hBayesDM.Rproj diff --git a/inst/CITATION b/R/inst/CITATION similarity index 100% rename from inst/CITATION rename to R/inst/CITATION diff --git a/R/inst/extdata/bandit2arm_exampleData.txt b/R/inst/extdata/bandit2arm_exampleData.txt new file mode 100644 index 00000000..d28e2ca2 --- /dev/null +++ b/R/inst/extdata/bandit2arm_exampleData.txt @@ -0,0 +1,2001 @@ +subjID trial choice outcome +1 1 1 1 +1 2 2 -1 +1 3 2 -1 +1 4 2 -1 +1 5 1 -1 +1 6 2 -1 +1 7 2 1 +1 8 1 1 +1 9 1 1 +1 10 1 -1 +1 11 2 1 +1 12 1 1 +1 13 2 -1 +1 14 1 -1 +1 15 1 1 +1 16 1 1 +1 17 2 -1 +1 18 2 -1 +1 19 2 -1 +1 20 2 1 +1 21 1 -1 +1 22 1 -1 +1 23 2 -1 +1 24 2 -1 +1 25 2 -1 +1 26 1 1 +1 27 1 -1 +1 28 2 -1 +1 29 1 1 +1 30 1 1 +1 31 1 1 +1 32 1 1 +1 33 2 -1 +1 34 1 1 +1 35 1 1 +1 36 1 -1 +1 37 2 -1 +1 38 1 1 +1 39 2 -1 +1 40 1 1 +1 41 1 1 +1 42 1 1 +1 43 2 -1 +1 44 1 1 +1 45 1 1 +1 46 1 1 +1 47 1 1 +1 48 2 1 +1 49 1 1 +1 50 1 1 +1 51 1 1 +1 52 2 -1 +1 53 1 1 +1 54 1 -1 +1 55 1 -1 +1 56 2 -1 +1 57 1 -1 +1 58 2 1 +1 59 2 1 +1 60 2 -1 +1 61 1 1 +1 62 1 -1 +1 63 2 1 +1 64 1 1 +1 65 1 1 +1 66 2 -1 +1 67 1 -1 +1 68 2 1 +1 69 2 -1 +1 70 2 -1 +1 71 2 -1 +1 72 1 1 +1 73 2 1 +1 74 1 1 +1 75 2 -1 +1 76 1 -1 +1 77 1 -1 +1 78 2 -1 +1 79 1 -1 +1 80 2 -1 +1 81 1 1 +1 82 2 -1 +1 83 2 -1 +1 84 1 -1 +1 85 1 1 +1 86 1 1 +1 87 1 1 +1 88 1 1 +1 89 1 1 +1 90 1 -1 +1 91 2 1 +1 92 2 1 +1 93 1 1 +1 94 2 1 +1 95 2 -1 +1 96 1 -1 +1 97 1 1 +1 98 1 -1 +1 99 1 -1 +1 100 1 -1 +2 1 2 1 +2 2 2 -1 +2 3 2 -1 +2 4 1 1 +2 5 2 1 +2 6 1 1 +2 7 1 1 +2 8 1 -1 +2 9 1 -1 +2 10 2 1 +2 11 1 1 +2 12 2 -1 +2 13 2 -1 +2 14 1 1 +2 15 2 1 +2 16 1 1 +2 17 1 1 +2 18 1 1 +2 19 2 1 +2 20 1 1 +2 21 2 1 +2 22 1 1 +2 23 2 1 +2 24 2 -1 +2 25 1 1 +2 26 2 1 +2 27 1 1 +2 28 2 1 +2 29 2 -1 +2 30 1 1 +2 31 2 1 +2 32 1 1 +2 33 2 1 +2 34 2 1 +2 35 2 1 +2 36 2 -1 +2 37 1 1 +2 38 1 -1 +2 39 2 1 +2 40 1 1 +2 41 2 1 +2 42 2 -1 +2 43 1 1 +2 44 2 -1 +2 45 2 -1 +2 46 1 1 +2 47 1 -1 +2 48 1 1 +2 49 2 -1 +2 50 1 1 +2 51 1 1 +2 52 1 1 +2 53 2 -1 +2 54 1 1 +2 55 1 -1 +2 56 1 -1 +2 57 2 1 +2 58 1 1 +2 59 2 -1 +2 60 1 1 +2 61 1 1 +2 62 1 1 +2 63 1 1 +2 64 1 1 +2 65 1 -1 +2 66 1 1 +2 67 2 -1 +2 68 1 -1 +2 69 2 1 +2 70 1 1 +2 71 2 -1 +2 72 2 1 +2 73 1 1 +2 74 2 -1 +2 75 1 -1 +2 76 2 1 +2 77 1 1 +2 78 1 1 +2 79 1 1 +2 80 1 -1 +2 81 2 -1 +2 82 2 -1 +2 83 1 1 +2 84 2 1 +2 85 1 -1 +2 86 2 1 +2 87 1 1 +2 88 1 1 +2 89 1 -1 +2 90 1 -1 +2 91 1 1 +2 92 1 1 +2 93 2 1 +2 94 2 -1 +2 95 1 -1 +2 96 1 1 +2 97 2 1 +2 98 1 1 +2 99 1 -1 +2 100 2 -1 +3 1 1 1 +3 2 2 1 +3 3 1 1 +3 4 2 -1 +3 5 1 1 +3 6 1 1 +3 7 1 1 +3 8 2 -1 +3 9 1 1 +3 10 1 1 +3 11 1 1 +3 12 1 1 +3 13 2 1 +3 14 2 1 +3 15 1 1 +3 16 2 -1 +3 17 2 -1 +3 18 1 1 +3 19 2 1 +3 20 2 -1 +3 21 2 1 +3 22 2 -1 +3 23 1 1 +3 24 2 -1 +3 25 1 1 +3 26 2 -1 +3 27 1 -1 +3 28 1 1 +3 29 2 1 +3 30 1 -1 +3 31 2 -1 +3 32 1 1 +3 33 1 -1 +3 34 2 1 +3 35 2 1 +3 36 1 1 +3 37 2 1 +3 38 1 1 +3 39 2 1 +3 40 1 -1 +3 41 2 -1 +3 42 2 -1 +3 43 2 -1 +3 44 1 1 +3 45 1 -1 +3 46 1 1 +3 47 1 1 +3 48 1 1 +3 49 1 1 +3 50 1 1 +3 51 2 -1 +3 52 1 1 +3 53 2 -1 +3 54 1 -1 +3 55 1 -1 +3 56 1 1 +3 57 1 -1 +3 58 1 1 +3 59 1 1 +3 60 1 -1 +3 61 1 1 +3 62 2 -1 +3 63 1 1 +3 64 1 1 +3 65 1 1 +3 66 2 -1 +3 67 1 -1 +3 68 1 -1 +3 69 2 -1 +3 70 2 -1 +3 71 2 1 +3 72 2 -1 +3 73 1 1 +3 74 2 1 +3 75 2 -1 +3 76 1 -1 +3 77 1 1 +3 78 1 1 +3 79 2 -1 +3 80 1 1 +3 81 1 -1 +3 82 1 -1 +3 83 1 1 +3 84 1 1 +3 85 2 1 +3 86 1 1 +3 87 1 1 +3 88 1 1 +3 89 1 1 +3 90 2 -1 +3 91 1 -1 +3 92 2 -1 +3 93 2 -1 +3 94 2 -1 +3 95 2 -1 +3 96 1 1 +3 97 1 -1 +3 98 1 -1 +3 99 2 1 +3 100 1 1 +4 1 2 -1 +4 2 2 1 +4 3 2 1 +4 4 2 1 +4 5 1 1 +4 6 2 1 +4 7 1 1 +4 8 1 1 +4 9 1 1 +4 10 2 -1 +4 11 2 -1 +4 12 1 1 +4 13 1 -1 +4 14 2 -1 +4 15 1 1 +4 16 1 1 +4 17 1 -1 +4 18 2 1 +4 19 1 1 +4 20 2 -1 +4 21 2 1 +4 22 1 1 +4 23 1 -1 +4 24 2 -1 +4 25 1 1 +4 26 1 -1 +4 27 1 -1 +4 28 2 -1 +4 29 2 1 +4 30 2 -1 +4 31 2 1 +4 32 2 -1 +4 33 2 -1 +4 34 1 1 +4 35 1 -1 +4 36 2 -1 +4 37 1 -1 +4 38 2 1 +4 39 2 -1 +4 40 2 -1 +4 41 1 1 +4 42 2 1 +4 43 1 -1 +4 44 1 -1 +4 45 2 1 +4 46 1 -1 +4 47 2 1 +4 48 2 1 +4 49 2 -1 +4 50 2 -1 +4 51 1 1 +4 52 1 1 +4 53 1 1 +4 54 2 1 +4 55 1 1 +4 56 1 1 +4 57 1 1 +4 58 1 1 +4 59 2 1 +4 60 1 1 +4 61 2 1 +4 62 1 -1 +4 63 2 -1 +4 64 2 -1 +4 65 2 -1 +4 66 1 1 +4 67 2 -1 +4 68 1 -1 +4 69 1 -1 +4 70 1 1 +4 71 2 1 +4 72 2 -1 +4 73 2 1 +4 74 1 -1 +4 75 2 -1 +4 76 1 1 +4 77 1 1 +4 78 1 -1 +4 79 2 -1 +4 80 1 1 +4 81 2 -1 +4 82 1 1 +4 83 1 -1 +4 84 1 -1 +4 85 2 1 +4 86 1 1 +4 87 1 1 +4 88 2 1 +4 89 2 -1 +4 90 2 -1 +4 91 1 1 +4 92 1 1 +4 93 2 1 +4 94 1 1 +4 95 2 1 +4 96 2 -1 +4 97 2 1 +4 98 1 1 +4 99 2 -1 +4 100 2 1 +5 1 2 -1 +5 2 2 1 +5 3 1 -1 +5 4 2 1 +5 5 2 -1 +5 6 1 1 +5 7 1 -1 +5 8 1 -1 +5 9 2 1 +5 10 1 -1 +5 11 1 -1 +5 12 2 -1 +5 13 1 1 +5 14 1 -1 +5 15 1 1 +5 16 2 -1 +5 17 1 -1 +5 18 1 -1 +5 19 1 1 +5 20 1 1 +5 21 1 -1 +5 22 1 1 +5 23 2 -1 +5 24 2 1 +5 25 1 1 +5 26 1 1 +5 27 2 -1 +5 28 1 1 +5 29 1 1 +5 30 2 -1 +5 31 1 -1 +5 32 2 1 +5 33 1 -1 +5 34 2 -1 +5 35 2 -1 +5 36 1 1 +5 37 1 -1 +5 38 2 1 +5 39 1 1 +5 40 2 -1 +5 41 1 1 +5 42 1 1 +5 43 1 1 +5 44 1 -1 +5 45 1 1 +5 46 2 -1 +5 47 1 1 +5 48 2 1 +5 49 1 1 +5 50 1 1 +5 51 1 1 +5 52 2 -1 +5 53 1 1 +5 54 2 -1 +5 55 1 1 +5 56 1 -1 +5 57 1 1 +5 58 1 -1 +5 59 2 1 +5 60 2 1 +5 61 2 -1 +5 62 1 -1 +5 63 2 1 +5 64 1 1 +5 65 2 1 +5 66 2 1 +5 67 1 1 +5 68 1 -1 +5 69 2 -1 +5 70 1 -1 +5 71 2 1 +5 72 1 1 +5 73 2 -1 +5 74 2 -1 +5 75 2 -1 +5 76 2 -1 +5 77 1 -1 +5 78 1 1 +5 79 1 1 +5 80 2 -1 +5 81 1 1 +5 82 2 -1 +5 83 2 1 +5 84 2 1 +5 85 1 1 +5 86 1 1 +5 87 2 1 +5 88 1 1 +5 89 1 1 +5 90 2 -1 +5 91 1 1 +5 92 2 -1 +5 93 1 -1 +5 94 1 1 +5 95 1 1 +5 96 1 1 +5 97 1 -1 +5 98 1 1 +5 99 1 1 +5 100 1 -1 +6 1 1 -1 +6 2 2 -1 +6 3 2 -1 +6 4 1 -1 +6 5 1 1 +6 6 1 1 +6 7 2 1 +6 8 1 -1 +6 9 2 -1 +6 10 2 -1 +6 11 2 1 +6 12 1 1 +6 13 2 -1 +6 14 2 -1 +6 15 2 1 +6 16 1 1 +6 17 1 -1 +6 18 2 -1 +6 19 2 1 +6 20 1 -1 +6 21 2 -1 +6 22 1 -1 +6 23 2 -1 +6 24 1 -1 +6 25 1 1 +6 26 1 1 +6 27 1 1 +6 28 1 1 +6 29 2 -1 +6 30 1 -1 +6 31 1 1 +6 32 1 1 +6 33 1 -1 +6 34 1 -1 +6 35 1 -1 +6 36 1 1 +6 37 1 -1 +6 38 2 1 +6 39 2 1 +6 40 1 1 +6 41 2 1 +6 42 1 -1 +6 43 2 -1 +6 44 1 1 +6 45 1 1 +6 46 2 -1 +6 47 1 1 +6 48 1 1 +6 49 1 1 +6 50 2 -1 +6 51 1 1 +6 52 1 1 +6 53 1 1 +6 54 1 -1 +6 55 1 1 +6 56 1 -1 +6 57 2 1 +6 58 2 -1 +6 59 1 1 +6 60 1 1 +6 61 1 1 +6 62 2 -1 +6 63 1 1 +6 64 1 -1 +6 65 1 1 +6 66 1 -1 +6 67 1 -1 +6 68 2 1 +6 69 2 -1 +6 70 1 -1 +6 71 1 1 +6 72 1 1 +6 73 2 -1 +6 74 1 -1 +6 75 1 -1 +6 76 2 -1 +6 77 2 -1 +6 78 1 1 +6 79 1 1 +6 80 1 1 +6 81 1 -1 +6 82 2 1 +6 83 1 1 +6 84 1 -1 +6 85 1 -1 +6 86 2 -1 +6 87 2 -1 +6 88 1 1 +6 89 1 1 +6 90 2 -1 +6 91 2 1 +6 92 1 1 +6 93 2 -1 +6 94 1 1 +6 95 2 1 +6 96 1 1 +6 97 1 -1 +6 98 1 -1 +6 99 1 -1 +6 100 1 1 +7 1 2 -1 +7 2 1 -1 +7 3 1 1 +7 4 2 -1 +7 5 2 -1 +7 6 1 1 +7 7 1 1 +7 8 1 -1 +7 9 1 1 +7 10 1 1 +7 11 1 -1 +7 12 1 -1 +7 13 1 1 +7 14 1 -1 +7 15 1 -1 +7 16 1 1 +7 17 1 1 +7 18 2 -1 +7 19 1 -1 +7 20 1 -1 +7 21 1 1 +7 22 2 1 +7 23 2 -1 +7 24 1 1 +7 25 1 1 +7 26 1 1 +7 27 1 -1 +7 28 2 -1 +7 29 1 1 +7 30 1 1 +7 31 2 -1 +7 32 1 1 +7 33 1 -1 +7 34 1 1 +7 35 1 1 +7 36 1 -1 +7 37 2 -1 +7 38 1 1 +7 39 1 -1 +7 40 2 -1 +7 41 1 1 +7 42 1 1 +7 43 1 1 +7 44 1 1 +7 45 1 1 +7 46 1 1 +7 47 1 -1 +7 48 1 -1 +7 49 2 1 +7 50 1 1 +7 51 2 1 +7 52 2 1 +7 53 2 -1 +7 54 2 1 +7 55 2 1 +7 56 1 1 +7 57 1 1 +7 58 1 -1 +7 59 2 -1 +7 60 1 -1 +7 61 2 -1 +7 62 1 1 +7 63 1 1 +7 64 1 1 +7 65 1 1 +7 66 1 -1 +7 67 1 1 +7 68 1 1 +7 69 1 1 +7 70 1 1 +7 71 1 1 +7 72 2 1 +7 73 1 1 +7 74 1 1 +7 75 1 1 +7 76 2 -1 +7 77 1 1 +7 78 1 -1 +7 79 2 -1 +7 80 1 1 +7 81 1 1 +7 82 2 -1 +7 83 1 -1 +7 84 1 1 +7 85 2 -1 +7 86 1 1 +7 87 1 1 +7 88 1 -1 +7 89 1 -1 +7 90 2 -1 +7 91 1 1 +7 92 1 1 +7 93 1 -1 +7 94 1 1 +7 95 1 -1 +7 96 1 1 +7 97 1 1 +7 98 2 1 +7 99 2 -1 +7 100 1 1 +8 1 2 1 +8 2 2 -1 +8 3 2 -1 +8 4 1 1 +8 5 2 1 +8 6 1 -1 +8 7 2 -1 +8 8 2 1 +8 9 1 1 +8 10 2 1 +8 11 1 1 +8 12 1 1 +8 13 2 -1 +8 14 1 1 +8 15 1 -1 +8 16 2 1 +8 17 2 -1 +8 18 2 -1 +8 19 2 1 +8 20 1 1 +8 21 2 1 +8 22 2 -1 +8 23 1 -1 +8 24 1 -1 +8 25 2 1 +8 26 2 -1 +8 27 2 1 +8 28 2 1 +8 29 1 1 +8 30 2 -1 +8 31 1 1 +8 32 1 -1 +8 33 1 1 +8 34 1 1 +8 35 2 -1 +8 36 2 -1 +8 37 1 -1 +8 38 2 -1 +8 39 1 1 +8 40 1 1 +8 41 1 1 +8 42 1 -1 +8 43 1 1 +8 44 1 1 +8 45 1 1 +8 46 1 -1 +8 47 2 -1 +8 48 2 -1 +8 49 1 1 +8 50 2 -1 +8 51 1 -1 +8 52 2 -1 +8 53 2 -1 +8 54 2 -1 +8 55 1 1 +8 56 2 1 +8 57 1 1 +8 58 1 -1 +8 59 1 -1 +8 60 2 1 +8 61 2 -1 +8 62 2 1 +8 63 2 -1 +8 64 1 -1 +8 65 2 -1 +8 66 1 1 +8 67 1 -1 +8 68 1 -1 +8 69 1 1 +8 70 2 -1 +8 71 2 -1 +8 72 2 1 +8 73 1 1 +8 74 1 -1 +8 75 1 -1 +8 76 1 1 +8 77 1 1 +8 78 1 -1 +8 79 2 -1 +8 80 2 1 +8 81 2 -1 +8 82 1 1 +8 83 1 1 +8 84 1 1 +8 85 1 1 +8 86 1 1 +8 87 1 1 +8 88 1 1 +8 89 2 -1 +8 90 1 -1 +8 91 2 1 +8 92 2 -1 +8 93 1 1 +8 94 1 1 +8 95 2 -1 +8 96 1 1 +8 97 1 1 +8 98 1 1 +8 99 1 1 +8 100 1 1 +9 1 1 1 +9 2 1 1 +9 3 1 1 +9 4 1 1 +9 5 1 -1 +9 6 1 1 +9 7 1 1 +9 8 1 1 +9 9 1 1 +9 10 2 -1 +9 11 1 1 +9 12 2 -1 +9 13 2 -1 +9 14 1 -1 +9 15 1 1 +9 16 1 -1 +9 17 1 1 +9 18 1 1 +9 19 1 1 +9 20 1 1 +9 21 1 -1 +9 22 1 1 +9 23 2 -1 +9 24 2 1 +9 25 1 1 +9 26 1 -1 +9 27 2 -1 +9 28 1 -1 +9 29 1 -1 +9 30 2 -1 +9 31 1 -1 +9 32 1 1 +9 33 1 1 +9 34 1 -1 +9 35 1 -1 +9 36 2 -1 +9 37 2 1 +9 38 1 1 +9 39 1 1 +9 40 2 -1 +9 41 1 -1 +9 42 1 1 +9 43 1 1 +9 44 2 1 +9 45 1 1 +9 46 2 -1 +9 47 1 1 +9 48 1 1 +9 49 1 1 +9 50 2 -1 +9 51 1 -1 +9 52 1 -1 +9 53 1 1 +9 54 2 -1 +9 55 1 -1 +9 56 2 -1 +9 57 1 1 +9 58 1 -1 +9 59 1 1 +9 60 2 -1 +9 61 1 -1 +9 62 1 -1 +9 63 1 1 +9 64 1 -1 +9 65 1 1 +9 66 1 -1 +9 67 1 -1 +9 68 2 -1 +9 69 2 -1 +9 70 2 -1 +9 71 2 1 +9 72 2 1 +9 73 1 1 +9 74 1 1 +9 75 1 1 +9 76 2 1 +9 77 2 -1 +9 78 1 -1 +9 79 1 1 +9 80 1 1 +9 81 1 1 +9 82 2 1 +9 83 2 1 +9 84 1 1 +9 85 2 -1 +9 86 2 1 +9 87 2 -1 +9 88 2 -1 +9 89 1 -1 +9 90 1 -1 +9 91 2 1 +9 92 2 -1 +9 93 2 -1 +9 94 2 -1 +9 95 1 1 +9 96 1 1 +9 97 1 1 +9 98 2 -1 +9 99 1 -1 +9 100 2 1 +10 1 1 -1 +10 2 2 -1 +10 3 2 -1 +10 4 2 -1 +10 5 1 1 +10 6 1 1 +10 7 1 1 +10 8 2 -1 +10 9 1 1 +10 10 1 1 +10 11 2 -1 +10 12 1 -1 +10 13 2 -1 +10 14 2 1 +10 15 2 -1 +10 16 1 -1 +10 17 1 -1 +10 18 2 -1 +10 19 1 1 +10 20 1 1 +10 21 2 1 +10 22 2 -1 +10 23 1 -1 +10 24 1 -1 +10 25 2 -1 +10 26 1 1 +10 27 2 -1 +10 28 1 1 +10 29 2 1 +10 30 1 1 +10 31 1 1 +10 32 2 -1 +10 33 1 1 +10 34 1 1 +10 35 1 -1 +10 36 1 1 +10 37 1 1 +10 38 2 -1 +10 39 1 -1 +10 40 1 1 +10 41 1 -1 +10 42 1 -1 +10 43 1 1 +10 44 1 1 +10 45 1 1 +10 46 1 -1 +10 47 1 1 +10 48 1 -1 +10 49 2 -1 +10 50 1 1 +10 51 1 1 +10 52 1 -1 +10 53 1 1 +10 54 2 -1 +10 55 1 -1 +10 56 1 1 +10 57 1 -1 +10 58 2 -1 +10 59 2 -1 +10 60 1 1 +10 61 1 -1 +10 62 1 1 +10 63 1 1 +10 64 1 1 +10 65 1 -1 +10 66 2 -1 +10 67 2 -1 +10 68 1 1 +10 69 1 1 +10 70 1 1 +10 71 1 1 +10 72 1 1 +10 73 1 -1 +10 74 1 1 +10 75 1 -1 +10 76 1 -1 +10 77 1 1 +10 78 1 1 +10 79 2 -1 +10 80 1 1 +10 81 1 -1 +10 82 2 -1 +10 83 1 1 +10 84 1 1 +10 85 1 1 +10 86 1 -1 +10 87 1 1 +10 88 1 -1 +10 89 1 1 +10 90 2 1 +10 91 1 1 +10 92 2 -1 +10 93 1 -1 +10 94 1 1 +10 95 1 1 +10 96 2 -1 +10 97 1 -1 +10 98 2 -1 +10 99 2 -1 +10 100 2 1 +11 1 2 -1 +11 2 1 1 +11 3 2 1 +11 4 1 -1 +11 5 2 -1 +11 6 2 1 +11 7 2 -1 +11 8 1 1 +11 9 1 -1 +11 10 2 -1 +11 11 2 -1 +11 12 1 1 +11 13 1 1 +11 14 2 -1 +11 15 2 -1 +11 16 2 -1 +11 17 1 -1 +11 18 1 1 +11 19 1 1 +11 20 1 -1 +11 21 2 -1 +11 22 1 1 +11 23 1 -1 +11 24 2 -1 +11 25 1 -1 +11 26 1 1 +11 27 2 -1 +11 28 1 1 +11 29 1 1 +11 30 1 1 +11 31 1 -1 +11 32 1 1 +11 33 2 -1 +11 34 2 -1 +11 35 1 1 +11 36 1 -1 +11 37 1 1 +11 38 1 -1 +11 39 2 -1 +11 40 2 -1 +11 41 1 1 +11 42 2 -1 +11 43 1 -1 +11 44 1 -1 +11 45 1 1 +11 46 1 1 +11 47 1 -1 +11 48 1 1 +11 49 1 1 +11 50 1 1 +11 51 1 -1 +11 52 1 -1 +11 53 1 1 +11 54 1 -1 +11 55 1 1 +11 56 1 -1 +11 57 1 -1 +11 58 1 1 +11 59 2 1 +11 60 1 -1 +11 61 1 1 +11 62 1 -1 +11 63 1 1 +11 64 1 1 +11 65 1 -1 +11 66 1 1 +11 67 1 -1 +11 68 1 1 +11 69 1 1 +11 70 2 -1 +11 71 1 -1 +11 72 2 -1 +11 73 1 1 +11 74 1 1 +11 75 1 1 +11 76 2 -1 +11 77 1 -1 +11 78 2 -1 +11 79 1 -1 +11 80 1 -1 +11 81 1 -1 +11 82 2 -1 +11 83 2 -1 +11 84 2 -1 +11 85 1 -1 +11 86 1 -1 +11 87 2 -1 +11 88 1 -1 +11 89 1 -1 +11 90 2 -1 +11 91 1 1 +11 92 1 1 +11 93 1 1 +11 94 1 -1 +11 95 1 1 +11 96 1 1 +11 97 1 1 +11 98 1 1 +11 99 1 1 +11 100 1 1 +12 1 2 1 +12 2 2 -1 +12 3 2 -1 +12 4 2 -1 +12 5 1 1 +12 6 2 1 +12 7 2 1 +12 8 2 -1 +12 9 1 1 +12 10 2 -1 +12 11 2 -1 +12 12 2 1 +12 13 2 1 +12 14 1 1 +12 15 2 -1 +12 16 2 1 +12 17 2 -1 +12 18 1 -1 +12 19 1 -1 +12 20 2 1 +12 21 2 1 +12 22 1 1 +12 23 1 1 +12 24 1 1 +12 25 2 -1 +12 26 1 -1 +12 27 2 -1 +12 28 2 -1 +12 29 2 -1 +12 30 1 1 +12 31 1 1 +12 32 1 1 +12 33 1 -1 +12 34 1 1 +12 35 2 -1 +12 36 1 1 +12 37 2 -1 +12 38 1 -1 +12 39 2 -1 +12 40 1 -1 +12 41 1 1 +12 42 1 1 +12 43 1 1 +12 44 1 -1 +12 45 1 1 +12 46 1 -1 +12 47 1 1 +12 48 1 -1 +12 49 1 1 +12 50 1 -1 +12 51 2 -1 +12 52 1 1 +12 53 1 1 +12 54 1 1 +12 55 1 1 +12 56 1 1 +12 57 2 -1 +12 58 1 -1 +12 59 2 -1 +12 60 1 1 +12 61 1 1 +12 62 1 1 +12 63 1 -1 +12 64 1 1 +12 65 1 1 +12 66 1 1 +12 67 1 1 +12 68 1 1 +12 69 1 1 +12 70 2 -1 +12 71 1 1 +12 72 1 1 +12 73 1 1 +12 74 1 -1 +12 75 1 1 +12 76 1 1 +12 77 1 1 +12 78 1 1 +12 79 1 1 +12 80 1 1 +12 81 1 1 +12 82 1 1 +12 83 1 -1 +12 84 2 -1 +12 85 2 -1 +12 86 2 -1 +12 87 2 -1 +12 88 1 1 +12 89 1 -1 +12 90 2 -1 +12 91 2 1 +12 92 2 -1 +12 93 2 1 +12 94 1 -1 +12 95 2 -1 +12 96 1 -1 +12 97 2 -1 +12 98 2 -1 +12 99 1 1 +12 100 2 -1 +13 1 2 -1 +13 2 1 1 +13 3 1 1 +13 4 1 -1 +13 5 2 -1 +13 6 1 1 +13 7 1 -1 +13 8 1 -1 +13 9 1 -1 +13 10 1 1 +13 11 2 -1 +13 12 2 -1 +13 13 1 1 +13 14 2 1 +13 15 2 -1 +13 16 2 -1 +13 17 1 1 +13 18 1 -1 +13 19 2 -1 +13 20 1 1 +13 21 1 1 +13 22 1 -1 +13 23 1 -1 +13 24 2 1 +13 25 1 1 +13 26 1 1 +13 27 1 -1 +13 28 1 1 +13 29 1 -1 +13 30 2 1 +13 31 1 -1 +13 32 2 -1 +13 33 2 -1 +13 34 2 -1 +13 35 2 1 +13 36 1 1 +13 37 1 -1 +13 38 2 -1 +13 39 2 -1 +13 40 1 -1 +13 41 1 1 +13 42 2 -1 +13 43 2 1 +13 44 1 1 +13 45 2 -1 +13 46 2 1 +13 47 1 1 +13 48 1 1 +13 49 2 -1 +13 50 2 -1 +13 51 2 -1 +13 52 1 1 +13 53 2 1 +13 54 1 1 +13 55 1 1 +13 56 1 1 +13 57 1 1 +13 58 1 1 +13 59 1 -1 +13 60 1 -1 +13 61 2 1 +13 62 2 1 +13 63 2 1 +13 64 2 -1 +13 65 2 -1 +13 66 1 1 +13 67 2 -1 +13 68 2 1 +13 69 1 1 +13 70 2 1 +13 71 2 1 +13 72 2 -1 +13 73 2 1 +13 74 1 -1 +13 75 1 -1 +13 76 1 1 +13 77 1 -1 +13 78 1 1 +13 79 1 1 +13 80 2 1 +13 81 2 -1 +13 82 2 1 +13 83 1 1 +13 84 2 -1 +13 85 1 1 +13 86 2 -1 +13 87 1 1 +13 88 1 1 +13 89 1 -1 +13 90 1 1 +13 91 1 -1 +13 92 1 1 +13 93 1 1 +13 94 1 1 +13 95 1 1 +13 96 1 1 +13 97 1 1 +13 98 1 1 +13 99 1 1 +13 100 1 1 +14 1 2 -1 +14 2 1 1 +14 3 1 1 +14 4 1 1 +14 5 1 -1 +14 6 1 1 +14 7 2 -1 +14 8 1 1 +14 9 1 1 +14 10 2 -1 +14 11 1 -1 +14 12 2 -1 +14 13 2 1 +14 14 2 -1 +14 15 2 -1 +14 16 1 1 +14 17 1 1 +14 18 1 -1 +14 19 1 1 +14 20 1 1 +14 21 1 1 +14 22 2 -1 +14 23 2 -1 +14 24 1 1 +14 25 1 1 +14 26 2 -1 +14 27 2 1 +14 28 2 1 +14 29 2 -1 +14 30 2 -1 +14 31 1 1 +14 32 2 -1 +14 33 1 -1 +14 34 1 -1 +14 35 2 1 +14 36 2 1 +14 37 1 -1 +14 38 2 1 +14 39 2 -1 +14 40 2 1 +14 41 1 -1 +14 42 2 -1 +14 43 2 -1 +14 44 2 -1 +14 45 2 -1 +14 46 2 -1 +14 47 2 -1 +14 48 1 -1 +14 49 2 1 +14 50 2 1 +14 51 2 1 +14 52 2 -1 +14 53 1 1 +14 54 2 -1 +14 55 1 1 +14 56 2 -1 +14 57 1 1 +14 58 1 1 +14 59 2 -1 +14 60 2 -1 +14 61 2 1 +14 62 1 -1 +14 63 1 1 +14 64 1 -1 +14 65 1 -1 +14 66 1 1 +14 67 1 1 +14 68 1 1 +14 69 1 1 +14 70 2 -1 +14 71 2 -1 +14 72 2 1 +14 73 2 -1 +14 74 1 1 +14 75 2 -1 +14 76 1 1 +14 77 1 1 +14 78 1 -1 +14 79 2 -1 +14 80 2 -1 +14 81 1 1 +14 82 1 1 +14 83 1 1 +14 84 1 -1 +14 85 1 1 +14 86 2 -1 +14 87 2 1 +14 88 1 1 +14 89 1 1 +14 90 2 -1 +14 91 1 1 +14 92 1 -1 +14 93 1 1 +14 94 1 1 +14 95 1 1 +14 96 2 1 +14 97 1 -1 +14 98 1 1 +14 99 1 1 +14 100 1 1 +15 1 1 -1 +15 2 2 -1 +15 3 1 1 +15 4 1 1 +15 5 1 -1 +15 6 2 1 +15 7 2 1 +15 8 2 -1 +15 9 2 -1 +15 10 1 1 +15 11 1 -1 +15 12 1 1 +15 13 1 1 +15 14 1 -1 +15 15 2 1 +15 16 1 -1 +15 17 2 1 +15 18 1 -1 +15 19 2 1 +15 20 1 1 +15 21 2 1 +15 22 1 1 +15 23 1 1 +15 24 2 -1 +15 25 2 -1 +15 26 2 -1 +15 27 2 1 +15 28 2 1 +15 29 2 1 +15 30 1 1 +15 31 1 1 +15 32 1 1 +15 33 2 -1 +15 34 1 1 +15 35 1 1 +15 36 1 1 +15 37 2 1 +15 38 2 -1 +15 39 1 -1 +15 40 2 -1 +15 41 1 -1 +15 42 1 -1 +15 43 1 1 +15 44 1 1 +15 45 1 -1 +15 46 1 1 +15 47 1 1 +15 48 2 -1 +15 49 1 -1 +15 50 1 1 +15 51 2 1 +15 52 1 -1 +15 53 1 -1 +15 54 1 1 +15 55 1 -1 +15 56 1 1 +15 57 1 -1 +15 58 1 1 +15 59 2 1 +15 60 1 -1 +15 61 2 1 +15 62 2 1 +15 63 1 1 +15 64 2 -1 +15 65 2 -1 +15 66 1 1 +15 67 1 -1 +15 68 1 1 +15 69 1 1 +15 70 1 1 +15 71 1 -1 +15 72 1 1 +15 73 1 -1 +15 74 1 1 +15 75 1 1 +15 76 2 -1 +15 77 1 -1 +15 78 2 -1 +15 79 2 1 +15 80 1 -1 +15 81 2 -1 +15 82 2 1 +15 83 1 -1 +15 84 2 -1 +15 85 1 1 +15 86 1 1 +15 87 1 -1 +15 88 2 -1 +15 89 2 -1 +15 90 1 1 +15 91 1 1 +15 92 1 -1 +15 93 1 1 +15 94 1 1 +15 95 1 -1 +15 96 2 1 +15 97 1 1 +15 98 2 1 +15 99 1 1 +15 100 2 1 +16 1 2 -1 +16 2 2 1 +16 3 2 1 +16 4 1 1 +16 5 2 -1 +16 6 2 -1 +16 7 2 -1 +16 8 1 1 +16 9 2 -1 +16 10 1 1 +16 11 2 1 +16 12 1 -1 +16 13 1 1 +16 14 2 1 +16 15 1 1 +16 16 1 -1 +16 17 2 -1 +16 18 1 -1 +16 19 2 -1 +16 20 1 1 +16 21 1 1 +16 22 1 1 +16 23 2 1 +16 24 1 1 +16 25 1 1 +16 26 2 -1 +16 27 1 1 +16 28 2 -1 +16 29 1 1 +16 30 1 1 +16 31 1 -1 +16 32 1 -1 +16 33 1 -1 +16 34 2 -1 +16 35 1 1 +16 36 1 -1 +16 37 1 -1 +16 38 1 -1 +16 39 1 -1 +16 40 1 -1 +16 41 2 -1 +16 42 1 1 +16 43 2 -1 +16 44 1 1 +16 45 2 -1 +16 46 2 -1 +16 47 1 1 +16 48 1 1 +16 49 2 -1 +16 50 1 1 +16 51 1 1 +16 52 1 1 +16 53 1 1 +16 54 1 -1 +16 55 1 1 +16 56 1 1 +16 57 1 -1 +16 58 2 -1 +16 59 2 1 +16 60 2 -1 +16 61 1 1 +16 62 1 -1 +16 63 1 -1 +16 64 1 1 +16 65 1 1 +16 66 1 1 +16 67 1 1 +16 68 1 1 +16 69 1 -1 +16 70 1 1 +16 71 1 1 +16 72 1 -1 +16 73 1 1 +16 74 2 -1 +16 75 1 1 +16 76 1 -1 +16 77 2 1 +16 78 2 1 +16 79 1 -1 +16 80 2 -1 +16 81 1 1 +16 82 2 -1 +16 83 2 -1 +16 84 2 -1 +16 85 1 1 +16 86 1 1 +16 87 1 1 +16 88 2 1 +16 89 1 -1 +16 90 2 1 +16 91 2 -1 +16 92 2 1 +16 93 1 -1 +16 94 1 -1 +16 95 1 1 +16 96 2 -1 +16 97 1 -1 +16 98 2 -1 +16 99 2 -1 +16 100 1 1 +17 1 1 1 +17 2 1 1 +17 3 1 1 +17 4 1 1 +17 5 1 1 +17 6 1 1 +17 7 1 1 +17 8 1 1 +17 9 2 -1 +17 10 1 1 +17 11 2 1 +17 12 1 1 +17 13 1 1 +17 14 1 -1 +17 15 1 -1 +17 16 2 -1 +17 17 2 -1 +17 18 2 -1 +17 19 2 -1 +17 20 1 1 +17 21 1 1 +17 22 1 -1 +17 23 2 1 +17 24 1 -1 +17 25 1 1 +17 26 1 1 +17 27 1 1 +17 28 1 -1 +17 29 2 -1 +17 30 2 -1 +17 31 1 -1 +17 32 1 1 +17 33 1 -1 +17 34 1 1 +17 35 2 -1 +17 36 1 -1 +17 37 2 1 +17 38 2 -1 +17 39 2 -1 +17 40 1 1 +17 41 1 1 +17 42 1 1 +17 43 1 1 +17 44 1 -1 +17 45 1 1 +17 46 1 -1 +17 47 1 -1 +17 48 1 -1 +17 49 1 1 +17 50 2 1 +17 51 1 1 +17 52 1 1 +17 53 2 -1 +17 54 1 -1 +17 55 2 1 +17 56 2 1 +17 57 2 -1 +17 58 2 -1 +17 59 1 -1 +17 60 2 -1 +17 61 2 -1 +17 62 1 1 +17 63 2 1 +17 64 1 -1 +17 65 2 -1 +17 66 2 -1 +17 67 2 -1 +17 68 2 -1 +17 69 1 1 +17 70 1 1 +17 71 1 1 +17 72 1 1 +17 73 1 -1 +17 74 2 -1 +17 75 1 1 +17 76 1 1 +17 77 2 -1 +17 78 1 1 +17 79 1 -1 +17 80 1 1 +17 81 1 1 +17 82 1 -1 +17 83 1 1 +17 84 1 -1 +17 85 1 1 +17 86 1 1 +17 87 1 1 +17 88 1 1 +17 89 1 -1 +17 90 1 -1 +17 91 1 1 +17 92 1 1 +17 93 1 1 +17 94 2 -1 +17 95 1 -1 +17 96 1 -1 +17 97 1 1 +17 98 1 1 +17 99 1 1 +17 100 2 -1 +18 1 2 -1 +18 2 1 -1 +18 3 2 -1 +18 4 2 1 +18 5 2 -1 +18 6 2 -1 +18 7 1 -1 +18 8 1 1 +18 9 1 1 +18 10 2 -1 +18 11 1 1 +18 12 2 1 +18 13 1 1 +18 14 1 1 +18 15 1 1 +18 16 1 -1 +18 17 2 -1 +18 18 2 1 +18 19 1 -1 +18 20 1 1 +18 21 1 1 +18 22 1 -1 +18 23 2 -1 +18 24 2 -1 +18 25 1 1 +18 26 1 1 +18 27 2 -1 +18 28 1 1 +18 29 1 -1 +18 30 2 1 +18 31 1 1 +18 32 2 -1 +18 33 1 1 +18 34 2 1 +18 35 2 -1 +18 36 1 -1 +18 37 1 1 +18 38 1 1 +18 39 2 1 +18 40 2 -1 +18 41 2 1 +18 42 2 1 +18 43 1 -1 +18 44 2 1 +18 45 2 1 +18 46 2 -1 +18 47 1 1 +18 48 2 -1 +18 49 2 -1 +18 50 2 1 +18 51 1 1 +18 52 1 1 +18 53 2 -1 +18 54 2 1 +18 55 2 -1 +18 56 1 1 +18 57 1 1 +18 58 2 1 +18 59 1 1 +18 60 1 -1 +18 61 2 1 +18 62 2 -1 +18 63 1 -1 +18 64 2 -1 +18 65 1 1 +18 66 2 1 +18 67 1 -1 +18 68 1 -1 +18 69 1 1 +18 70 1 -1 +18 71 2 -1 +18 72 2 -1 +18 73 1 1 +18 74 1 -1 +18 75 2 1 +18 76 1 1 +18 77 2 -1 +18 78 1 1 +18 79 2 -1 +18 80 2 -1 +18 81 1 1 +18 82 1 1 +18 83 1 1 +18 84 2 1 +18 85 2 -1 +18 86 2 1 +18 87 1 1 +18 88 1 1 +18 89 2 -1 +18 90 1 1 +18 91 1 1 +18 92 1 -1 +18 93 1 -1 +18 94 1 -1 +18 95 1 1 +18 96 1 1 +18 97 1 1 +18 98 1 -1 +18 99 2 -1 +18 100 1 -1 +19 1 1 1 +19 2 1 -1 +19 3 2 -1 +19 4 1 1 +19 5 1 1 +19 6 1 -1 +19 7 1 1 +19 8 2 -1 +19 9 1 1 +19 10 1 1 +19 11 1 -1 +19 12 2 1 +19 13 2 1 +19 14 2 -1 +19 15 1 1 +19 16 2 1 +19 17 2 -1 +19 18 2 -1 +19 19 1 1 +19 20 2 -1 +19 21 1 1 +19 22 2 -1 +19 23 1 -1 +19 24 1 1 +19 25 1 1 +19 26 1 -1 +19 27 1 1 +19 28 2 1 +19 29 1 1 +19 30 1 1 +19 31 2 1 +19 32 2 1 +19 33 2 -1 +19 34 1 -1 +19 35 1 1 +19 36 1 1 +19 37 1 1 +19 38 1 -1 +19 39 1 1 +19 40 2 1 +19 41 2 1 +19 42 2 1 +19 43 2 1 +19 44 2 -1 +19 45 1 -1 +19 46 1 1 +19 47 2 -1 +19 48 2 -1 +19 49 1 1 +19 50 1 1 +19 51 1 1 +19 52 2 -1 +19 53 1 1 +19 54 1 1 +19 55 2 -1 +19 56 1 -1 +19 57 1 -1 +19 58 1 1 +19 59 1 1 +19 60 1 1 +19 61 1 1 +19 62 1 -1 +19 63 1 -1 +19 64 1 1 +19 65 1 -1 +19 66 1 1 +19 67 1 1 +19 68 1 1 +19 69 1 1 +19 70 1 -1 +19 71 2 -1 +19 72 1 -1 +19 73 2 1 +19 74 2 -1 +19 75 2 1 +19 76 1 -1 +19 77 1 1 +19 78 2 -1 +19 79 2 -1 +19 80 1 1 +19 81 1 -1 +19 82 1 -1 +19 83 2 1 +19 84 2 -1 +19 85 1 -1 +19 86 1 1 +19 87 2 -1 +19 88 2 1 +19 89 2 1 +19 90 1 1 +19 91 2 -1 +19 92 1 -1 +19 93 2 -1 +19 94 1 1 +19 95 1 -1 +19 96 2 1 +19 97 1 1 +19 98 2 -1 +19 99 1 1 +19 100 2 -1 +20 1 1 1 +20 2 1 -1 +20 3 1 -1 +20 4 2 -1 +20 5 2 -1 +20 6 1 -1 +20 7 1 -1 +20 8 2 -1 +20 9 1 1 +20 10 1 -1 +20 11 1 -1 +20 12 1 -1 +20 13 2 -1 +20 14 2 1 +20 15 2 1 +20 16 2 -1 +20 17 2 1 +20 18 2 -1 +20 19 1 -1 +20 20 2 -1 +20 21 1 1 +20 22 1 1 +20 23 1 1 +20 24 1 1 +20 25 1 1 +20 26 1 1 +20 27 1 1 +20 28 1 1 +20 29 1 1 +20 30 1 -1 +20 31 1 -1 +20 32 2 -1 +20 33 1 -1 +20 34 1 1 +20 35 2 -1 +20 36 2 1 +20 37 1 1 +20 38 1 -1 +20 39 1 -1 +20 40 1 1 +20 41 2 1 +20 42 1 1 +20 43 2 1 +20 44 1 -1 +20 45 2 -1 +20 46 2 -1 +20 47 2 1 +20 48 1 1 +20 49 1 1 +20 50 1 1 +20 51 1 -1 +20 52 1 1 +20 53 1 -1 +20 54 1 1 +20 55 1 1 +20 56 2 1 +20 57 1 1 +20 58 1 -1 +20 59 2 -1 +20 60 2 -1 +20 61 1 1 +20 62 1 1 +20 63 1 -1 +20 64 2 1 +20 65 2 -1 +20 66 2 -1 +20 67 1 -1 +20 68 1 1 +20 69 1 -1 +20 70 2 -1 +20 71 1 1 +20 72 1 -1 +20 73 2 -1 +20 74 1 -1 +20 75 1 1 +20 76 1 -1 +20 77 1 1 +20 78 1 -1 +20 79 1 1 +20 80 1 -1 +20 81 2 -1 +20 82 1 -1 +20 83 2 -1 +20 84 1 1 +20 85 1 1 +20 86 1 -1 +20 87 2 -1 +20 88 2 1 +20 89 1 1 +20 90 1 -1 +20 91 2 -1 +20 92 1 -1 +20 93 2 -1 +20 94 2 1 +20 95 1 -1 +20 96 2 1 +20 97 2 -1 +20 98 1 1 +20 99 1 -1 +20 100 2 1 diff --git a/R/inst/extdata/bandit4arm2_exampleData.txt b/R/inst/extdata/bandit4arm2_exampleData.txt new file mode 100644 index 00000000..68ac99e3 --- /dev/null +++ b/R/inst/extdata/bandit4arm2_exampleData.txt @@ -0,0 +1,3001 @@ +subjID choice outcome +1 4 33 +1 3 84 +1 3 88 +1 2 36 +1 1 67 +1 2 28 +1 1 74 +1 1 76 +1 1 79 +1 1 84 +1 1 82 +1 3 87 +1 3 85 +1 4 48 +1 1 79 +1 3 76 +1 3 73 +1 1 75 +1 1 71 +1 3 61 +1 3 60 +1 1 72 +1 1 66 +1 1 61 +1 4 42 +1 2 41 +1 1 57 +1 4 32 +1 3 67 +1 1 57 +1 1 58 +1 3 71 +1 3 66 +1 2 41 +1 1 70 +1 1 73 +1 1 74 +1 4 45 +1 1 72 +1 1 73 +1 1 70 +1 1 66 +1 1 69 +1 4 42 +1 3 58 +1 1 68 +1 2 53 +1 3 58 +1 4 50 +1 1 73 +1 3 65 +1 2 44 +1 3 64 +1 4 44 +1 2 41 +1 1 61 +1 1 56 +1 4 44 +1 1 51 +1 1 55 +1 4 47 +1 4 48 +1 1 59 +1 3 55 +1 3 44 +1 1 59 +1 2 25 +1 1 59 +1 2 31 +1 3 45 +1 1 58 +1 1 56 +1 1 58 +1 3 51 +1 4 52 +1 1 55 +1 4 56 +1 4 55 +1 1 54 +1 1 50 +1 3 58 +1 1 53 +1 1 51 +1 2 31 +1 3 58 +1 4 55 +1 4 55 +1 3 59 +1 1 50 +1 1 54 +1 1 53 +1 4 52 +1 3 57 +1 4 60 +1 4 60 +1 4 58 +1 4 60 +1 4 55 +1 4 56 +1 4 59 +1 2 36 +1 4 59 +1 3 48 +1 2 41 +1 4 62 +1 4 62 +1 1 44 +1 2 48 +1 4 66 +1 2 53 +1 3 56 +1 2 56 +1 2 61 +1 3 49 +1 2 58 +1 4 68 +1 2 54 +1 3 49 +1 4 70 +1 4 69 +1 4 73 +1 4 77 +1 2 54 +1 4 70 +1 1 49 +1 4 69 +1 1 50 +1 2 63 +1 1 55 +1 2 57 +1 3 53 +1 2 57 +1 4 85 +1 4 85 +1 4 88 +1 4 82 +1 2 62 +1 4 75 +1 3 61 +1 3 61 +1 3 62 +1 4 77 +1 2 66 +1 3 59 +1 4 74 +1 4 79 +1 4 79 +1 1 73 +1 4 76 +1 4 76 +1 4 78 +1 4 77 +1 4 78 +1 2 66 +1 4 80 +1 4 74 +1 1 69 +1 4 69 +1 4 73 +1 3 70 +1 3 67 +1 1 69 +1 1 71 +1 2 79 +1 1 68 +1 2 82 +1 2 84 +1 2 86 +1 2 84 +1 2 82 +1 2 84 +1 2 85 +1 2 82 +1 3 76 +1 1 62 +1 2 85 +1 3 74 +1 3 70 +1 3 69 +1 2 86 +1 3 60 +1 2 89 +1 2 86 +1 2 81 +1 2 80 +1 3 64 +1 3 67 +1 2 86 +1 1 56 +1 2 91 +1 2 91 +1 4 55 +1 2 87 +1 4 52 +1 2 85 +1 3 63 +1 2 92 +1 2 90 +1 2 90 +1 3 56 +1 2 89 +1 3 60 +1 3 60 +1 2 88 +1 2 84 +1 2 86 +1 3 53 +1 2 81 +1 2 82 +1 3 63 +1 2 78 +1 1 57 +1 2 80 +1 4 48 +1 4 44 +1 3 61 +1 3 64 +1 4 40 +1 3 67 +1 4 39 +1 1 60 +1 1 59 +1 3 68 +1 2 73 +1 3 70 +1 2 70 +1 2 70 +1 3 77 +1 2 74 +1 1 62 +1 3 82 +1 4 29 +1 4 29 +1 4 34 +1 3 73 +1 2 71 +1 2 67 +1 2 61 +1 2 60 +1 1 76 +1 1 70 +1 2 60 +1 2 57 +1 1 71 +1 1 69 +1 1 67 +1 3 58 +1 3 55 +1 2 50 +1 1 62 +1 2 52 +1 4 47 +1 2 54 +1 3 50 +1 2 53 +1 1 66 +1 4 51 +1 1 65 +1 3 50 +1 2 50 +1 3 51 +1 2 47 +1 1 60 +1 3 52 +1 4 47 +1 2 32 +1 3 56 +1 4 46 +1 1 63 +1 4 43 +1 2 24 +1 2 27 +1 1 63 +1 1 61 +1 4 40 +1 1 65 +1 1 69 +1 3 70 +1 2 25 +1 1 71 +1 1 73 +1 3 62 +1 3 69 +1 3 65 +1 1 72 +1 3 63 +1 1 75 +1 3 68 +1 2 28 +1 3 61 +1 3 64 +1 3 63 +1 4 36 +1 3 58 +1 2 35 +1 1 64 +1 3 67 +1 3 68 +1 3 66 +1 3 64 +2 1 62 +2 4 34 +2 2 36 +2 3 88 +2 3 91 +2 3 97 +2 3 94 +2 3 94 +2 3 93 +2 3 93 +2 3 88 +2 3 87 +2 3 85 +2 3 85 +2 3 81 +2 3 76 +2 3 73 +2 3 65 +2 3 64 +2 3 61 +2 3 60 +2 1 72 +2 1 66 +2 1 61 +2 1 62 +2 1 61 +2 3 62 +2 2 43 +2 1 56 +2 3 68 +2 3 68 +2 1 62 +2 4 41 +2 3 64 +2 1 70 +2 1 73 +2 1 74 +2 1 74 +2 1 72 +2 3 59 +2 1 70 +2 1 66 +2 1 69 +2 1 69 +2 2 46 +2 1 68 +2 1 70 +2 1 70 +2 1 72 +2 1 73 +2 1 70 +2 3 61 +2 3 64 +2 3 64 +2 1 66 +2 1 61 +2 1 56 +2 1 53 +2 1 51 +2 3 58 +2 4 47 +2 3 60 +2 3 57 +2 3 55 +2 3 44 +2 1 59 +2 3 44 +2 1 59 +2 4 50 +2 1 59 +2 1 58 +2 1 56 +2 2 20 +2 1 59 +2 4 52 +2 4 55 +2 1 53 +2 3 61 +2 3 58 +2 3 57 +2 3 58 +2 3 56 +2 4 57 +2 3 58 +2 4 57 +2 3 58 +2 3 59 +2 1 51 +2 4 61 +2 3 54 +2 3 50 +2 1 47 +2 4 57 +2 3 62 +2 1 49 +2 3 61 +2 1 46 +2 4 55 +2 1 42 +2 3 55 +2 3 53 +2 2 39 +2 1 43 +2 4 57 +2 3 50 +2 4 62 +2 1 44 +2 4 63 +2 4 66 +2 4 71 +2 4 71 +2 4 67 +2 4 66 +2 4 69 +2 4 72 +2 4 68 +2 4 70 +2 4 71 +2 4 70 +2 4 69 +2 4 73 +2 4 77 +2 4 72 +2 4 70 +2 4 68 +2 4 69 +2 4 72 +2 4 75 +2 2 58 +2 4 80 +2 4 81 +2 4 84 +2 4 85 +2 4 85 +2 4 88 +2 4 82 +2 4 80 +2 4 75 +2 4 73 +2 4 75 +2 4 76 +2 4 77 +2 4 77 +2 4 77 +2 4 74 +2 4 79 +2 4 79 +2 4 79 +2 4 76 +2 4 76 +2 4 78 +2 4 77 +2 4 78 +2 4 76 +2 4 80 +2 4 74 +2 4 72 +2 4 69 +2 4 73 +2 2 76 +2 2 76 +2 4 68 +2 2 78 +2 2 79 +2 2 82 +2 2 82 +2 2 84 +2 2 86 +2 2 84 +2 2 82 +2 2 84 +2 2 85 +2 2 82 +2 2 84 +2 2 84 +2 2 85 +2 2 88 +2 2 90 +2 2 87 +2 2 86 +2 2 86 +2 2 89 +2 2 86 +2 2 81 +2 2 80 +2 2 77 +2 2 81 +2 2 86 +2 2 85 +2 2 91 +2 2 91 +2 2 87 +2 2 87 +2 2 83 +2 4 51 +2 2 91 +2 2 92 +2 2 90 +2 2 90 +2 2 91 +2 2 89 +2 2 88 +2 2 90 +2 2 88 +2 2 84 +2 2 86 +2 2 81 +2 2 81 +2 2 82 +2 2 81 +2 2 78 +2 2 78 +2 2 80 +2 2 80 +2 2 80 +2 2 81 +2 2 78 +2 2 80 +2 2 78 +2 2 76 +2 2 78 +2 2 74 +2 2 73 +2 2 73 +2 2 73 +2 2 70 +2 2 70 +2 2 74 +2 2 74 +2 2 76 +2 2 74 +2 2 75 +2 2 73 +2 2 73 +2 2 69 +2 2 71 +2 4 32 +2 2 61 +2 2 60 +2 1 76 +2 1 70 +2 1 72 +2 1 69 +2 1 71 +2 2 52 +2 1 67 +2 1 63 +2 2 49 +2 1 63 +2 1 62 +2 2 52 +2 1 56 +2 3 52 +2 1 60 +2 1 62 +2 1 66 +2 1 67 +2 1 65 +2 1 63 +2 1 60 +2 1 61 +2 1 59 +2 2 45 +2 1 62 +2 1 64 +2 1 64 +2 1 63 +2 1 63 +2 1 63 +2 1 63 +2 1 63 +2 1 64 +2 1 63 +2 3 68 +2 3 68 +2 3 69 +2 3 69 +2 3 70 +2 3 67 +2 3 64 +2 3 60 +2 3 62 +2 1 73 +2 1 73 +2 1 72 +2 1 73 +2 1 75 +2 1 72 +2 1 72 +2 1 67 +2 1 64 +2 1 65 +2 1 65 +2 3 58 +2 1 63 +2 3 57 +2 4 27 +2 1 65 +2 1 62 +2 3 64 +3 3 85 +3 1 60 +3 3 88 +3 2 36 +3 3 91 +3 3 97 +3 3 94 +3 3 94 +3 3 93 +3 3 93 +3 4 37 +3 3 87 +3 4 46 +3 1 82 +3 3 81 +3 3 76 +3 3 73 +3 1 75 +3 3 64 +3 1 71 +3 1 70 +3 1 72 +3 1 66 +3 3 67 +3 1 62 +3 3 68 +3 1 57 +3 3 61 +3 3 67 +3 3 68 +3 4 37 +3 3 71 +3 2 37 +3 1 65 +3 3 68 +3 4 41 +3 1 74 +3 1 74 +3 3 64 +3 4 48 +3 4 43 +3 3 56 +3 1 69 +3 4 42 +3 4 44 +3 3 59 +3 1 70 +3 1 70 +3 1 72 +3 3 64 +3 3 65 +3 1 67 +3 2 45 +3 1 65 +3 1 66 +3 2 38 +3 3 67 +3 3 65 +3 2 29 +3 4 48 +3 3 62 +3 1 57 +3 1 59 +3 1 57 +3 3 44 +3 4 55 +3 4 51 +3 1 59 +3 4 50 +3 1 59 +3 1 58 +3 2 22 +3 4 50 +3 4 53 +3 3 51 +3 4 55 +3 4 56 +3 3 61 +3 3 58 +3 1 50 +3 4 52 +3 4 53 +3 3 55 +3 3 58 +3 3 58 +3 4 55 +3 3 59 +3 1 51 +3 1 50 +3 4 61 +3 4 56 +3 4 52 +3 4 57 +3 2 31 +3 1 49 +3 3 61 +3 3 57 +3 4 55 +3 3 58 +3 2 32 +3 3 53 +3 3 51 +3 4 60 +3 4 57 +3 4 62 +3 1 47 +3 3 47 +3 2 48 +3 3 53 +3 1 38 +3 1 41 +3 4 67 +3 4 66 +3 2 58 +3 2 58 +3 2 55 +3 3 49 +3 2 53 +3 2 54 +3 3 51 +3 2 52 +3 3 48 +3 3 45 +3 4 70 +3 2 55 +3 2 59 +3 4 72 +3 4 75 +3 1 55 +3 4 80 +3 2 61 +3 1 64 +3 4 85 +3 4 85 +3 2 60 +3 4 82 +3 4 80 +3 4 75 +3 4 73 +3 4 75 +3 4 76 +3 4 77 +3 4 77 +3 4 77 +3 4 74 +3 4 79 +3 4 79 +3 1 73 +3 1 72 +3 4 76 +3 4 78 +3 1 66 +3 3 68 +3 4 76 +3 3 68 +3 1 65 +3 3 68 +3 4 69 +3 2 78 +3 1 72 +3 4 69 +3 1 69 +3 1 71 +3 2 79 +3 2 82 +3 1 68 +3 1 69 +3 2 86 +3 2 84 +3 1 62 +3 3 64 +3 2 85 +3 2 82 +3 2 84 +3 2 84 +3 2 85 +3 2 88 +3 2 90 +3 1 60 +3 2 86 +3 1 53 +3 2 89 +3 2 86 +3 1 62 +3 2 80 +3 2 77 +3 2 81 +3 3 67 +3 2 85 +3 2 91 +3 2 91 +3 1 52 +3 2 87 +3 2 83 +3 2 85 +3 2 91 +3 2 92 +3 2 90 +3 2 90 +3 1 47 +3 2 89 +3 2 88 +3 2 90 +3 2 88 +3 2 84 +3 2 86 +3 2 81 +3 2 81 +3 3 59 +3 4 49 +3 2 78 +3 2 78 +3 2 80 +3 2 80 +3 2 80 +3 2 81 +3 2 78 +3 2 80 +3 2 78 +3 1 60 +3 4 35 +3 3 68 +3 2 73 +3 1 64 +3 3 70 +3 3 72 +3 3 75 +3 2 74 +3 2 74 +3 2 76 +3 2 74 +3 3 79 +3 2 73 +3 1 69 +3 3 73 +3 3 69 +3 3 67 +3 2 61 +3 3 66 +3 3 63 +3 3 62 +3 2 60 +3 2 57 +3 3 66 +3 3 63 +3 2 52 +3 2 51 +3 2 49 +3 4 45 +3 2 46 +3 1 61 +3 2 49 +3 3 52 +3 4 52 +3 1 62 +3 3 47 +3 3 49 +3 3 51 +3 4 48 +3 1 60 +3 4 50 +3 4 53 +3 4 52 +3 1 62 +3 1 64 +3 3 52 +3 2 26 +3 4 46 +3 2 31 +3 4 43 +3 3 67 +3 1 64 +3 3 65 +3 3 68 +3 1 62 +3 1 65 +3 3 69 +3 3 70 +3 4 46 +3 3 64 +3 3 60 +3 4 45 +3 4 45 +3 1 73 +3 3 65 +3 2 25 +3 1 75 +3 3 68 +3 4 40 +3 4 37 +3 1 64 +3 1 65 +3 3 63 +3 1 66 +3 3 57 +3 1 64 +3 3 67 +3 1 65 +3 1 62 +3 1 56 +4 2 38 +4 3 84 +4 3 88 +4 3 88 +4 3 91 +4 3 97 +4 3 94 +4 3 94 +4 3 93 +4 3 93 +4 3 88 +4 3 87 +4 3 85 +4 3 85 +4 3 81 +4 3 76 +4 3 73 +4 3 65 +4 3 64 +4 1 71 +4 1 70 +4 3 61 +4 3 68 +4 2 31 +4 3 69 +4 3 68 +4 1 57 +4 3 61 +4 3 67 +4 4 33 +4 3 68 +4 1 62 +4 1 63 +4 1 65 +4 1 70 +4 1 73 +4 1 74 +4 1 74 +4 1 72 +4 4 48 +4 4 43 +4 1 66 +4 2 48 +4 1 69 +4 3 58 +4 3 59 +4 3 59 +4 3 58 +4 3 62 +4 1 73 +4 1 70 +4 1 67 +4 1 67 +4 2 43 +4 3 65 +4 1 61 +4 1 56 +4 1 53 +4 1 51 +4 4 48 +4 3 62 +4 4 48 +4 1 59 +4 2 31 +4 3 44 +4 3 44 +4 1 59 +4 3 47 +4 2 31 +4 1 59 +4 4 46 +4 1 56 +4 1 58 +4 3 51 +4 2 28 +4 4 55 +4 3 58 +4 1 52 +4 3 58 +4 2 35 +4 4 52 +4 3 56 +4 1 51 +4 3 58 +4 3 58 +4 4 55 +4 3 59 +4 3 59 +4 4 61 +4 3 54 +4 4 56 +4 4 52 +4 2 33 +4 4 60 +4 1 49 +4 3 61 +4 3 57 +4 3 57 +4 3 58 +4 4 59 +4 3 53 +4 3 51 +4 4 60 +4 4 57 +4 4 62 +4 1 47 +4 3 47 +4 4 63 +4 4 66 +4 1 38 +4 2 56 +4 4 67 +4 4 66 +4 4 69 +4 4 72 +4 4 68 +4 4 70 +4 4 71 +4 4 70 +4 1 51 +4 4 73 +4 4 77 +4 4 72 +4 4 70 +4 4 68 +4 4 69 +4 4 72 +4 3 48 +4 4 79 +4 1 57 +4 4 81 +4 4 84 +4 4 85 +4 2 58 +4 3 53 +4 4 82 +4 4 80 +4 4 75 +4 2 64 +4 4 75 +4 4 76 +4 1 73 +4 3 59 +4 4 77 +4 3 61 +4 3 65 +4 4 79 +4 2 66 +4 1 72 +4 2 68 +4 1 69 +4 4 77 +4 4 78 +4 3 68 +4 4 80 +4 2 68 +4 3 68 +4 2 75 +4 3 71 +4 2 76 +4 3 67 +4 2 72 +4 4 69 +4 2 79 +4 4 66 +4 2 82 +4 2 84 +4 2 86 +4 4 70 +4 2 82 +4 1 66 +4 4 78 +4 2 82 +4 3 76 +4 2 84 +4 4 72 +4 2 88 +4 2 90 +4 3 69 +4 4 76 +4 1 53 +4 4 73 +4 4 69 +4 3 64 +4 2 80 +4 3 64 +4 2 81 +4 2 86 +4 2 85 +4 2 91 +4 2 91 +4 2 87 +4 2 87 +4 2 83 +4 2 85 +4 2 91 +4 2 92 +4 4 49 +4 2 90 +4 1 47 +4 3 59 +4 2 88 +4 3 60 +4 2 88 +4 3 57 +4 2 86 +4 2 81 +4 2 81 +4 2 82 +4 2 81 +4 2 78 +4 2 78 +4 2 80 +4 2 80 +4 2 80 +4 2 81 +4 2 78 +4 2 80 +4 1 57 +4 2 76 +4 2 78 +4 2 74 +4 2 73 +4 2 73 +4 2 73 +4 2 70 +4 3 75 +4 3 77 +4 3 76 +4 3 79 +4 2 74 +4 2 75 +4 3 75 +4 3 76 +4 2 69 +4 3 69 +4 2 67 +4 4 32 +4 2 60 +4 3 63 +4 1 70 +4 1 72 +4 2 57 +4 4 36 +4 3 63 +4 1 67 +4 2 51 +4 2 49 +4 1 63 +4 4 46 +4 3 51 +4 4 47 +4 3 52 +4 3 50 +4 1 62 +4 2 55 +4 2 55 +4 3 51 +4 1 63 +4 1 60 +4 1 61 +4 4 53 +4 1 60 +4 2 41 +4 4 47 +4 1 64 +4 1 63 +4 1 63 +4 3 58 +4 1 63 +4 3 67 +4 1 64 +4 1 63 +4 3 68 +4 3 68 +4 4 44 +4 3 69 +4 3 70 +4 3 67 +4 4 45 +4 3 60 +4 1 73 +4 1 73 +4 1 73 +4 4 39 +4 3 63 +4 4 37 +4 2 23 +4 3 67 +4 3 61 +4 4 36 +4 1 65 +4 3 63 +4 3 58 +4 1 63 +4 3 57 +4 3 67 +4 1 65 +4 3 66 +4 1 56 +5 3 85 +5 2 40 +5 3 88 +5 4 40 +5 1 67 +5 3 97 +5 3 94 +5 3 94 +5 3 93 +5 1 84 +5 3 88 +5 1 81 +5 3 85 +5 3 85 +5 3 81 +5 3 76 +5 3 73 +5 1 75 +5 3 64 +5 3 61 +5 3 60 +5 1 72 +5 1 66 +5 1 61 +5 1 62 +5 4 42 +5 3 62 +5 3 61 +5 1 56 +5 2 40 +5 4 37 +5 3 71 +5 1 63 +5 3 64 +5 1 70 +5 1 73 +5 1 74 +5 1 74 +5 3 64 +5 1 73 +5 1 70 +5 1 66 +5 1 69 +5 3 57 +5 1 71 +5 3 59 +5 2 53 +5 1 70 +5 1 72 +5 1 73 +5 4 46 +5 1 67 +5 3 64 +5 2 43 +5 1 66 +5 2 38 +5 3 67 +5 3 65 +5 3 59 +5 1 55 +5 2 29 +5 3 60 +5 4 51 +5 4 53 +5 3 44 +5 4 55 +5 1 59 +5 1 59 +5 4 50 +5 4 48 +5 1 58 +5 1 56 +5 3 46 +5 1 59 +5 1 61 +5 1 55 +5 3 58 +5 2 30 +5 4 57 +5 1 50 +5 1 48 +5 4 53 +5 3 55 +5 1 57 +5 1 51 +5 3 58 +5 3 59 +5 1 51 +5 4 61 +5 4 61 +5 1 53 +5 3 53 +5 4 57 +5 2 31 +5 4 60 +5 4 58 +5 3 57 +5 1 44 +5 1 42 +5 2 32 +5 4 59 +5 3 51 +5 4 60 +5 3 47 +5 4 62 +5 4 62 +5 3 47 +5 2 48 +5 4 66 +5 4 71 +5 4 71 +5 4 67 +5 3 48 +5 4 69 +5 4 72 +5 4 68 +5 2 54 +5 2 53 +5 4 70 +5 2 53 +5 1 51 +5 3 48 +5 2 54 +5 4 70 +5 4 68 +5 4 69 +5 2 60 +5 4 75 +5 4 79 +5 4 80 +5 2 61 +5 2 57 +5 4 85 +5 4 85 +5 2 60 +5 2 62 +5 4 80 +5 3 60 +5 3 61 +5 2 60 +5 4 76 +5 2 64 +5 4 77 +5 4 77 +5 4 74 +5 2 66 +5 4 79 +5 4 79 +5 4 76 +5 2 68 +5 4 78 +5 4 77 +5 2 69 +5 4 76 +5 4 80 +5 2 68 +5 2 72 +5 2 75 +5 4 73 +5 4 74 +5 4 69 +5 2 72 +5 4 69 +5 1 72 +5 1 68 +5 4 62 +5 3 71 +5 1 67 +5 4 70 +5 2 82 +5 2 84 +5 2 85 +5 2 82 +5 4 76 +5 3 73 +5 2 85 +5 2 88 +5 2 90 +5 1 60 +5 2 86 +5 1 53 +5 2 89 +5 2 86 +5 2 81 +5 3 66 +5 2 77 +5 2 81 +5 2 86 +5 2 85 +5 2 91 +5 1 54 +5 2 87 +5 4 56 +5 2 83 +5 2 85 +5 2 91 +5 2 92 +5 2 90 +5 2 90 +5 2 91 +5 2 89 +5 2 88 +5 2 90 +5 2 88 +5 2 84 +5 2 86 +5 2 81 +5 2 81 +5 2 82 +5 2 81 +5 1 55 +5 4 50 +5 1 54 +5 2 80 +5 2 80 +5 2 81 +5 2 78 +5 2 80 +5 2 78 +5 2 76 +5 2 78 +5 3 68 +5 2 73 +5 1 64 +5 1 61 +5 4 26 +5 2 70 +5 2 74 +5 2 74 +5 3 79 +5 3 82 +5 3 79 +5 3 75 +5 3 76 +5 2 69 +5 3 69 +5 3 67 +5 2 61 +5 4 36 +5 3 63 +5 3 62 +5 4 34 +5 3 59 +5 2 55 +5 4 39 +5 2 52 +5 2 51 +5 2 49 +5 2 50 +5 4 46 +5 3 51 +5 2 49 +5 2 54 +5 3 50 +5 3 52 +5 1 66 +5 1 67 +5 1 65 +5 3 50 +5 2 50 +5 1 61 +5 1 59 +5 1 60 +5 4 50 +5 4 47 +5 2 32 +5 3 56 +5 1 63 +5 3 58 +5 2 26 +5 1 63 +5 4 43 +5 1 63 +5 4 38 +5 1 62 +5 2 20 +5 1 69 +5 1 70 +5 4 46 +5 1 71 +5 1 73 +5 3 62 +5 4 45 +5 1 73 +5 4 39 +5 4 38 +5 3 66 +5 3 68 +5 1 72 +5 1 67 +5 3 64 +5 3 63 +5 1 65 +5 3 58 +5 3 57 +5 1 64 +5 3 67 +5 2 42 +5 3 66 +5 2 46 +6 2 38 +6 1 60 +6 3 88 +6 3 88 +6 4 40 +6 3 97 +6 3 94 +6 1 76 +6 3 93 +6 3 93 +6 2 29 +6 3 87 +6 3 85 +6 2 30 +6 4 49 +6 1 77 +6 1 76 +6 3 65 +6 3 64 +6 1 71 +6 1 70 +6 3 61 +6 3 68 +6 1 61 +6 1 62 +6 4 42 +6 2 38 +6 3 61 +6 1 56 +6 4 33 +6 3 68 +6 3 71 +6 2 37 +6 3 64 +6 3 68 +6 1 73 +6 1 74 +6 1 74 +6 1 72 +6 3 59 +6 1 70 +6 1 66 +6 4 41 +6 1 69 +6 1 71 +6 1 68 +6 4 53 +6 3 58 +6 1 72 +6 1 73 +6 4 46 +6 1 67 +6 1 67 +6 1 65 +6 3 65 +6 1 61 +6 1 56 +6 1 53 +6 1 51 +6 2 28 +6 4 47 +6 4 48 +6 1 59 +6 1 57 +6 4 56 +6 4 55 +6 4 51 +6 2 31 +6 1 59 +6 1 59 +6 1 58 +6 3 48 +6 3 46 +6 1 59 +6 1 61 +6 1 55 +6 1 53 +6 2 30 +6 4 57 +6 1 50 +6 3 58 +6 3 56 +6 4 57 +6 4 59 +6 2 34 +6 4 55 +6 3 59 +6 3 59 +6 1 50 +6 1 54 +6 4 56 +6 4 52 +6 3 57 +6 3 62 +6 4 60 +6 4 58 +6 3 57 +6 3 57 +6 3 58 +6 4 59 +6 4 59 +6 2 39 +6 4 60 +6 2 41 +6 1 42 +6 3 52 +6 2 47 +6 4 63 +6 3 53 +6 4 71 +6 1 41 +6 3 50 +6 4 66 +6 1 51 +6 3 54 +6 2 55 +6 2 54 +6 1 52 +6 3 47 +6 1 51 +6 2 52 +6 1 49 +6 2 54 +6 1 47 +6 4 68 +6 2 59 +6 3 47 +6 3 48 +6 1 55 +6 2 57 +6 2 61 +6 2 57 +6 2 56 +6 1 63 +6 4 88 +6 4 82 +6 2 62 +6 3 60 +6 4 73 +6 4 75 +6 4 76 +6 2 64 +6 2 66 +6 3 59 +6 2 67 +6 1 70 +6 3 61 +6 2 66 +6 3 65 +6 1 71 +6 1 69 +6 2 66 +6 1 67 +6 2 66 +6 4 80 +6 4 74 +6 4 72 +6 2 75 +6 4 73 +6 1 72 +6 1 70 +6 1 69 +6 1 71 +6 1 72 +6 4 66 +6 3 71 +6 4 61 +6 3 66 +6 3 65 +6 3 61 +6 2 84 +6 3 64 +6 3 70 +6 1 68 +6 1 62 +6 1 60 +6 1 64 +6 1 62 +6 4 70 +6 2 86 +6 2 86 +6 1 55 +6 4 69 +6 2 81 +6 2 80 +6 3 64 +6 1 63 +6 2 86 +6 2 85 +6 4 55 +6 1 54 +6 1 52 +6 3 60 +6 3 63 +6 4 51 +6 1 47 +6 4 49 +6 3 62 +6 2 90 +6 2 91 +6 2 89 +6 2 88 +6 2 90 +6 1 51 +6 2 84 +6 4 52 +6 2 81 +6 2 81 +6 2 82 +6 3 63 +6 3 60 +6 1 57 +6 2 80 +6 4 48 +6 2 80 +6 2 81 +6 1 53 +6 2 80 +6 2 78 +6 3 65 +6 1 60 +6 4 33 +6 3 68 +6 2 73 +6 2 73 +6 2 70 +6 4 26 +6 2 74 +6 2 74 +6 2 76 +6 2 74 +6 2 75 +6 3 75 +6 3 76 +6 3 73 +6 2 71 +6 1 70 +6 3 65 +6 3 66 +6 4 32 +6 2 62 +6 1 72 +6 4 35 +6 3 66 +6 1 69 +6 1 67 +6 2 51 +6 3 55 +6 2 50 +6 1 62 +6 2 52 +6 1 56 +6 4 48 +6 2 59 +6 3 52 +6 2 55 +6 2 55 +6 1 65 +6 3 50 +6 4 47 +6 2 51 +6 1 59 +6 2 45 +6 1 62 +6 3 54 +6 3 52 +6 1 63 +6 3 60 +6 2 31 +6 1 63 +6 4 42 +6 4 43 +6 3 65 +6 3 68 +6 2 23 +6 3 69 +6 1 69 +6 1 70 +6 3 67 +6 1 71 +6 3 60 +6 1 73 +6 3 69 +6 3 65 +6 1 72 +6 3 63 +6 1 75 +6 1 72 +6 2 28 +6 1 67 +6 2 37 +6 4 37 +6 4 36 +6 2 33 +6 3 57 +6 1 64 +6 3 67 +6 4 27 +6 1 62 +6 3 64 +7 4 33 +7 1 60 +7 2 36 +7 3 88 +7 3 91 +7 3 97 +7 3 94 +7 3 94 +7 3 93 +7 3 93 +7 3 88 +7 3 87 +7 3 85 +7 1 82 +7 3 81 +7 1 77 +7 3 73 +7 3 65 +7 1 71 +7 3 61 +7 2 23 +7 1 72 +7 4 45 +7 1 61 +7 3 69 +7 2 41 +7 1 57 +7 3 61 +7 1 56 +7 2 40 +7 3 68 +7 3 71 +7 3 66 +7 1 65 +7 1 70 +7 1 73 +7 1 74 +7 1 74 +7 1 72 +7 1 73 +7 2 45 +7 3 56 +7 1 69 +7 4 42 +7 1 71 +7 1 68 +7 1 70 +7 1 70 +7 4 50 +7 3 64 +7 3 65 +7 1 67 +7 3 64 +7 1 65 +7 3 65 +7 1 61 +7 1 56 +7 3 65 +7 3 59 +7 2 28 +7 4 47 +7 1 57 +7 3 57 +7 3 55 +7 1 57 +7 1 59 +7 3 44 +7 4 51 +7 4 50 +7 4 48 +7 1 58 +7 4 48 +7 1 58 +7 1 59 +7 4 52 +7 3 55 +7 1 53 +7 2 30 +7 1 54 +7 4 55 +7 3 58 +7 3 56 +7 1 51 +7 4 59 +7 4 57 +7 1 49 +7 1 51 +7 4 60 +7 1 50 +7 4 61 +7 1 53 +7 4 52 +7 3 57 +7 3 62 +7 3 63 +7 3 61 +7 3 57 +7 2 28 +7 4 56 +7 1 43 +7 4 59 +7 3 51 +7 4 60 +7 4 57 +7 1 42 +7 3 52 +7 3 47 +7 4 63 +7 4 66 +7 4 71 +7 2 56 +7 1 44 +7 3 48 +7 4 69 +7 4 72 +7 2 55 +7 1 54 +7 2 53 +7 3 47 +7 4 69 +7 2 52 +7 4 77 +7 4 72 +7 3 46 +7 4 68 +7 4 69 +7 4 72 +7 4 75 +7 3 49 +7 4 80 +7 4 81 +7 4 84 +7 4 85 +7 4 85 +7 4 88 +7 4 82 +7 4 80 +7 4 75 +7 1 67 +7 4 75 +7 4 76 +7 4 77 +7 4 77 +7 4 77 +7 4 74 +7 1 70 +7 4 79 +7 4 79 +7 1 72 +7 4 76 +7 4 78 +7 4 77 +7 4 78 +7 4 76 +7 4 80 +7 4 74 +7 1 69 +7 4 69 +7 4 73 +7 4 74 +7 4 69 +7 1 69 +7 1 71 +7 4 68 +7 1 68 +7 4 62 +7 1 69 +7 1 67 +7 4 70 +7 3 61 +7 3 64 +7 1 60 +7 3 70 +7 1 68 +7 1 62 +7 3 72 +7 3 74 +7 1 62 +7 4 70 +7 1 61 +7 3 60 +7 4 73 +7 1 56 +7 4 67 +7 3 66 +7 4 65 +7 4 59 +7 3 67 +7 3 68 +7 3 63 +7 4 56 +7 2 87 +7 2 87 +7 1 46 +7 2 85 +7 3 63 +7 2 92 +7 2 90 +7 2 90 +7 2 91 +7 2 89 +7 2 88 +7 2 90 +7 2 88 +7 2 84 +7 2 86 +7 2 81 +7 2 81 +7 3 59 +7 2 81 +7 2 78 +7 2 78 +7 2 80 +7 2 80 +7 2 80 +7 3 61 +7 2 78 +7 2 80 +7 2 78 +7 3 65 +7 2 78 +7 2 74 +7 3 68 +7 1 64 +7 2 73 +7 2 70 +7 2 70 +7 2 74 +7 3 76 +7 1 62 +7 3 82 +7 1 64 +7 3 75 +7 2 73 +7 2 69 +7 2 71 +7 3 67 +7 3 65 +7 4 36 +7 3 63 +7 3 62 +7 2 60 +7 1 69 +7 2 55 +7 1 69 +7 2 52 +7 1 63 +7 3 55 +7 1 63 +7 3 51 +7 1 61 +7 1 56 +7 1 58 +7 1 60 +7 2 53 +7 1 66 +7 1 67 +7 2 52 +7 1 63 +7 3 54 +7 2 51 +7 1 59 +7 4 52 +7 4 50 +7 2 32 +7 2 32 +7 1 63 +7 1 63 +7 1 63 +7 4 43 +7 1 63 +7 3 62 +7 3 65 +7 3 68 +7 3 68 +7 3 69 +7 1 69 +7 1 70 +7 1 72 +7 1 71 +7 3 60 +7 2 27 +7 1 73 +7 1 73 +7 1 72 +7 1 73 +7 1 75 +7 1 72 +7 4 40 +7 1 67 +7 4 36 +7 1 65 +7 3 63 +7 1 66 +7 2 35 +7 2 41 +7 1 66 +7 3 68 +7 1 62 +7 3 64 +8 3 85 +8 3 84 +8 3 88 +8 4 40 +8 1 67 +8 3 97 +8 2 26 +8 3 94 +8 3 93 +8 3 93 +8 3 88 +8 3 87 +8 1 85 +8 3 85 +8 3 81 +8 1 77 +8 3 73 +8 1 75 +8 1 71 +8 3 61 +8 1 70 +8 1 72 +8 1 66 +8 3 67 +8 3 69 +8 1 61 +8 4 36 +8 3 61 +8 3 67 +8 1 57 +8 2 39 +8 1 62 +8 3 66 +8 3 64 +8 3 68 +8 3 71 +8 3 69 +8 3 68 +8 1 72 +8 3 59 +8 4 43 +8 1 66 +8 1 69 +8 3 57 +8 3 58 +8 1 68 +8 1 70 +8 3 58 +8 1 72 +8 1 73 +8 4 46 +8 4 44 +8 1 67 +8 4 44 +8 1 66 +8 3 67 +8 3 67 +8 1 53 +8 3 59 +8 1 55 +8 4 47 +8 4 48 +8 3 57 +8 1 57 +8 3 44 +8 4 55 +8 4 51 +8 4 51 +8 1 59 +8 2 27 +8 3 45 +8 1 56 +8 4 50 +8 3 51 +8 4 52 +8 4 55 +8 3 58 +8 4 55 +8 3 58 +8 1 50 +8 3 58 +8 3 56 +8 4 57 +8 1 57 +8 3 58 +8 3 58 +8 3 59 +8 1 51 +8 4 61 +8 4 61 +8 4 56 +8 3 53 +8 3 57 +8 4 60 +8 3 63 +8 3 61 +8 3 57 +8 4 55 +8 3 58 +8 1 43 +8 2 36 +8 4 59 +8 1 43 +8 2 41 +8 4 62 +8 4 62 +8 4 63 +8 4 63 +8 4 66 +8 4 71 +8 4 71 +8 3 50 +8 4 66 +8 1 51 +8 4 72 +8 1 52 +8 4 70 +8 4 71 +8 2 54 +8 1 51 +8 4 73 +8 4 77 +8 4 72 +8 4 70 +8 4 68 +8 4 69 +8 1 50 +8 4 75 +8 4 79 +8 3 51 +8 3 53 +8 4 84 +8 4 85 +8 4 85 +8 4 88 +8 4 82 +8 4 80 +8 4 75 +8 4 73 +8 4 75 +8 2 60 +8 4 77 +8 3 59 +8 4 77 +8 4 74 +8 4 79 +8 4 79 +8 4 79 +8 4 76 +8 4 76 +8 4 78 +8 4 77 +8 4 78 +8 4 76 +8 4 80 +8 4 74 +8 4 72 +8 4 69 +8 4 73 +8 4 74 +8 4 69 +8 4 68 +8 3 72 +8 3 70 +8 4 66 +8 2 82 +8 2 84 +8 3 66 +8 2 84 +8 3 61 +8 2 84 +8 2 85 +8 2 82 +8 2 84 +8 2 84 +8 2 85 +8 2 88 +8 2 90 +8 2 87 +8 2 86 +8 2 86 +8 2 89 +8 2 86 +8 2 81 +8 2 80 +8 2 77 +8 4 59 +8 2 86 +8 2 85 +8 2 91 +8 2 91 +8 2 87 +8 2 87 +8 2 83 +8 2 85 +8 2 91 +8 4 49 +8 2 90 +8 2 90 +8 2 91 +8 3 59 +8 2 88 +8 2 90 +8 2 88 +8 2 84 +8 2 86 +8 2 81 +8 1 51 +8 2 82 +8 2 81 +8 2 78 +8 2 78 +8 2 80 +8 2 80 +8 2 80 +8 2 81 +8 2 78 +8 2 80 +8 2 78 +8 2 76 +8 2 78 +8 2 74 +8 2 73 +8 2 73 +8 4 28 +8 2 70 +8 1 61 +8 2 74 +8 2 74 +8 2 76 +8 2 74 +8 2 75 +8 2 73 +8 2 73 +8 2 69 +8 2 71 +8 2 67 +8 1 70 +8 2 60 +8 1 76 +8 3 62 +8 3 62 +8 2 57 +8 3 66 +8 1 69 +8 3 58 +8 1 63 +8 1 63 +8 4 45 +8 1 62 +8 2 52 +8 1 56 +8 1 58 +8 4 52 +8 4 52 +8 1 66 +8 3 49 +8 1 65 +8 3 50 +8 3 54 +8 1 61 +8 1 59 +8 1 60 +8 4 50 +8 1 64 +8 1 64 +8 1 63 +8 1 63 +8 3 58 +8 2 26 +8 1 63 +8 1 64 +8 1 63 +8 3 68 +8 4 40 +8 3 69 +8 3 69 +8 3 70 +8 3 67 +8 1 71 +8 1 73 +8 1 73 +8 3 69 +8 1 73 +8 1 72 +8 1 73 +8 1 75 +8 1 72 +8 1 72 +8 3 61 +8 2 37 +8 3 63 +8 3 63 +8 1 66 +8 4 30 +8 3 57 +8 1 66 +8 1 65 +8 3 66 +8 1 56 +9 4 33 +9 1 60 +9 1 61 +9 3 88 +9 3 91 +9 3 97 +9 3 94 +9 3 94 +9 3 93 +9 3 93 +9 3 88 +9 4 43 +9 3 85 +9 3 85 +9 4 49 +9 1 77 +9 1 76 +9 2 20 +9 3 64 +9 1 71 +9 1 70 +9 3 61 +9 1 66 +9 3 67 +9 1 62 +9 3 68 +9 4 36 +9 3 61 +9 1 56 +9 3 68 +9 3 68 +9 2 38 +9 4 41 +9 1 65 +9 1 70 +9 1 73 +9 1 74 +9 1 74 +9 1 72 +9 1 73 +9 3 55 +9 1 66 +9 1 69 +9 1 69 +9 4 44 +9 3 59 +9 2 53 +9 1 70 +9 1 72 +9 4 50 +9 1 70 +9 4 44 +9 4 41 +9 2 43 +9 1 66 +9 1 61 +9 4 44 +9 2 34 +9 1 51 +9 1 55 +9 1 58 +9 3 60 +9 1 59 +9 4 53 +9 3 44 +9 1 59 +9 4 51 +9 4 51 +9 1 59 +9 4 48 +9 2 23 +9 2 22 +9 4 50 +9 1 59 +9 3 51 +9 3 55 +9 1 53 +9 3 61 +9 4 57 +9 1 50 +9 2 31 +9 3 56 +9 4 57 +9 4 59 +9 2 34 +9 3 58 +9 1 51 +9 3 59 +9 3 58 +9 1 54 +9 4 56 +9 3 53 +9 1 49 +9 1 48 +9 1 49 +9 4 58 +9 4 60 +9 4 55 +9 4 56 +9 4 59 +9 4 59 +9 4 59 +9 4 60 +9 4 57 +9 2 45 +9 3 52 +9 4 63 +9 4 63 +9 2 51 +9 2 53 +9 4 71 +9 2 56 +9 4 66 +9 1 51 +9 4 72 +9 3 48 +9 4 70 +9 2 53 +9 2 54 +9 2 53 +9 2 52 +9 1 49 +9 3 45 +9 4 70 +9 4 68 +9 4 69 +9 4 72 +9 3 48 +9 4 79 +9 4 80 +9 1 59 +9 4 84 +9 4 85 +9 4 85 +9 4 88 +9 4 82 +9 4 80 +9 1 65 +9 4 73 +9 1 68 +9 4 76 +9 2 64 +9 4 77 +9 1 71 +9 1 70 +9 4 79 +9 4 79 +9 4 79 +9 4 76 +9 4 76 +9 4 78 +9 4 77 +9 4 78 +9 4 76 +9 2 67 +9 1 65 +9 1 69 +9 1 73 +9 4 73 +9 4 74 +9 4 69 +9 4 68 +9 2 78 +9 2 79 +9 2 82 +9 4 62 +9 2 84 +9 4 59 +9 2 84 +9 4 72 +9 2 84 +9 4 78 +9 1 64 +9 4 76 +9 4 73 +9 2 85 +9 4 70 +9 4 73 +9 2 87 +9 2 86 +9 2 86 +9 2 89 +9 2 86 +9 2 81 +9 2 80 +9 2 77 +9 3 67 +9 4 57 +9 3 68 +9 2 91 +9 2 91 +9 2 87 +9 2 87 +9 2 83 +9 1 45 +9 3 63 +9 2 92 +9 2 90 +9 2 90 +9 2 91 +9 4 49 +9 2 88 +9 2 90 +9 2 88 +9 2 84 +9 2 86 +9 2 81 +9 2 81 +9 2 82 +9 2 81 +9 1 55 +9 2 78 +9 2 80 +9 2 80 +9 2 80 +9 2 81 +9 2 78 +9 2 80 +9 2 78 +9 3 65 +9 2 78 +9 3 68 +9 2 73 +9 3 68 +9 3 70 +9 2 70 +9 3 75 +9 3 77 +9 2 74 +9 3 79 +9 3 82 +9 3 79 +9 3 75 +9 2 73 +9 1 67 +9 3 69 +9 4 32 +9 3 65 +9 1 74 +9 3 63 +9 1 70 +9 3 62 +9 2 57 +9 3 66 +9 2 52 +9 1 67 +9 1 63 +9 3 55 +9 4 45 +9 3 51 +9 3 51 +9 3 54 +9 1 58 +9 1 60 +9 1 62 +9 1 66 +9 1 67 +9 1 65 +9 2 52 +9 4 47 +9 1 61 +9 1 59 +9 1 60 +9 3 52 +9 1 64 +9 1 64 +9 4 46 +9 4 46 +9 1 63 +9 1 63 +9 4 42 +9 2 27 +9 1 63 +9 1 61 +9 3 68 +9 3 69 +9 3 69 +9 3 70 +9 3 67 +9 1 71 +9 1 73 +9 3 62 +9 3 69 +9 1 73 +9 3 65 +9 4 38 +9 3 66 +9 3 68 +9 1 72 +9 1 67 +9 3 64 +9 3 63 +9 4 36 +9 1 66 +9 3 57 +9 1 64 +9 1 66 +9 3 68 +9 1 62 +9 3 64 +10 1 62 +10 4 34 +10 3 88 +10 2 36 +10 3 91 +10 3 97 +10 3 94 +10 3 94 +10 3 93 +10 3 93 +10 3 88 +10 3 87 +10 3 85 +10 3 85 +10 3 81 +10 3 76 +10 3 73 +10 3 65 +10 3 64 +10 3 61 +10 3 60 +10 3 61 +10 3 68 +10 2 31 +10 3 69 +10 3 68 +10 3 62 +10 3 61 +10 4 32 +10 3 68 +10 3 68 +10 1 62 +10 3 66 +10 3 64 +10 3 68 +10 3 71 +10 3 69 +10 2 44 +10 1 72 +10 3 59 +10 1 70 +10 3 56 +10 1 69 +10 1 69 +10 1 71 +10 1 68 +10 1 70 +10 1 70 +10 1 72 +10 3 64 +10 1 70 +10 1 67 +10 1 67 +10 1 65 +10 1 66 +10 2 38 +10 2 36 +10 1 53 +10 1 51 +10 3 58 +10 1 58 +10 3 60 +10 2 32 +10 4 53 +10 3 44 +10 4 55 +10 1 59 +10 3 47 +10 3 48 +10 4 48 +10 2 23 +10 4 48 +10 1 58 +10 1 59 +10 1 61 +10 4 55 +10 1 53 +10 1 52 +10 4 57 +10 4 55 +10 2 31 +10 4 53 +10 4 57 +10 3 58 +10 3 58 +10 3 58 +10 3 59 +10 4 60 +10 3 58 +10 4 61 +10 2 37 +10 3 53 +10 1 49 +10 4 60 +10 1 49 +10 4 58 +10 4 60 +10 4 55 +10 3 58 +10 3 55 +10 3 53 +10 3 51 +10 4 60 +10 3 47 +10 4 62 +10 4 62 +10 4 63 +10 1 48 +10 4 66 +10 1 38 +10 4 71 +10 4 67 +10 3 48 +10 4 69 +10 4 72 +10 4 68 +10 4 70 +10 4 71 +10 4 70 +10 4 69 +10 1 51 +10 4 77 +10 4 72 +10 4 70 +10 2 55 +10 3 49 +10 1 50 +10 4 75 +10 4 79 +10 4 80 +10 4 81 +10 4 84 +10 4 85 +10 4 85 +10 4 88 +10 4 82 +10 4 80 +10 4 75 +10 4 73 +10 4 75 +10 4 76 +10 4 77 +10 4 77 +10 4 77 +10 4 74 +10 4 79 +10 4 79 +10 4 79 +10 4 76 +10 4 76 +10 4 78 +10 4 77 +10 2 69 +10 4 76 +10 4 80 +10 4 74 +10 4 72 +10 4 69 +10 4 73 +10 4 74 +10 4 69 +10 4 68 +10 1 71 +10 1 72 +10 1 68 +10 4 62 +10 2 84 +10 4 59 +10 1 65 +10 1 62 +10 2 84 +10 2 85 +10 2 82 +10 2 84 +10 2 84 +10 2 85 +10 2 88 +10 2 90 +10 2 87 +10 2 86 +10 2 86 +10 2 89 +10 2 86 +10 2 81 +10 2 80 +10 2 77 +10 2 81 +10 2 86 +10 2 85 +10 2 91 +10 2 91 +10 2 87 +10 2 87 +10 2 83 +10 2 85 +10 2 91 +10 2 92 +10 2 90 +10 2 90 +10 2 91 +10 2 89 +10 2 88 +10 2 90 +10 2 88 +10 2 84 +10 2 86 +10 2 81 +10 2 81 +10 2 82 +10 2 81 +10 2 78 +10 2 78 +10 2 80 +10 2 80 +10 2 80 +10 2 81 +10 2 78 +10 2 80 +10 2 78 +10 2 76 +10 2 78 +10 2 74 +10 2 73 +10 2 73 +10 3 70 +10 3 72 +10 4 26 +10 3 77 +10 3 76 +10 3 79 +10 2 74 +10 2 75 +10 3 75 +10 2 73 +10 2 69 +10 4 32 +10 2 67 +10 3 65 +10 2 60 +10 3 63 +10 2 62 +10 3 62 +10 3 59 +10 2 55 +10 2 52 +10 3 58 +10 1 63 +10 2 49 +10 1 63 +10 3 51 +10 3 51 +10 1 56 +10 1 58 +10 3 50 +10 2 53 +10 1 66 +10 1 67 +10 1 65 +10 1 63 +10 1 60 +10 1 61 +10 1 59 +10 1 60 +10 1 62 +10 1 64 +10 1 64 +10 1 63 +10 3 60 +10 1 63 +10 1 63 +10 1 63 +10 1 64 +10 4 42 +10 1 61 +10 1 62 +10 3 69 +10 3 69 +10 4 42 +10 3 67 +10 4 45 +10 3 60 +10 3 62 +10 3 69 +10 3 65 +10 3 65 +10 3 63 +10 3 66 +10 3 68 +10 3 67 +10 3 61 +10 3 64 +10 2 32 +10 3 63 +10 3 58 +10 3 57 +10 3 57 +10 3 67 +10 3 68 +10 3 66 +10 3 64 diff --git a/R/inst/extdata/bandit4arm_exampleData.txt b/R/inst/extdata/bandit4arm_exampleData.txt new file mode 100644 index 00000000..92ffad0d --- /dev/null +++ b/R/inst/extdata/bandit4arm_exampleData.txt @@ -0,0 +1,2001 @@ +subjID gain loss choice +102 0 0 2 +102 1 0 1 +102 0 -1 1 +102 1 0 3 +102 0 -1 3 +102 0 -1 4 +102 0 -1 4 +102 0 0 2 +102 1 0 2 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 -1 3 +102 0 0 1 +102 1 0 1 +102 0 0 2 +102 0 0 2 +102 1 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 1 +102 0 -1 2 +102 1 0 3 +102 0 0 1 +102 1 0 3 +102 0 -1 3 +102 0 0 1 +102 0 -1 3 +102 0 0 1 +102 0 0 1 +102 1 0 2 +102 0 -1 3 +102 0 0 2 +102 1 0 1 +102 1 0 3 +102 0 0 2 +102 0 0 1 +102 0 -1 3 +102 0 0 3 +102 0 0 1 +102 0 0 2 +102 1 -1 3 +102 0 0 1 +102 0 0 3 +102 0 0 1 +102 0 -1 3 +102 0 0 1 +102 0 -1 2 +102 0 -1 1 +102 0 -1 3 +102 0 0 2 +102 1 -1 1 +102 0 0 2 +102 1 0 1 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 1 0 4 +102 0 0 1 +102 0 0 4 +102 0 0 1 +102 0 0 2 +102 0 0 4 +102 0 0 3 +102 0 -1 1 +102 0 -1 4 +102 0 0 2 +102 1 -1 3 +102 0 -1 2 +102 0 -1 3 +102 0 0 1 +102 0 -1 1 +102 1 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 1 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 1 0 1 +102 1 0 1 +102 0 0 1 +102 0 0 3 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 2 +102 0 -1 1 +102 1 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 1 +102 1 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 1 +102 0 0 2 +102 0 -1 3 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 1 0 3 +102 0 0 2 +102 0 0 3 +102 0 0 3 +102 0 0 4 +102 1 0 2 +102 0 -1 2 +102 0 0 1 +102 1 0 1 +102 1 -1 1 +102 0 0 2 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 0 0 4 +102 0 -1 2 +102 0 0 1 +102 0 0 4 +102 0 -1 3 +102 1 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 4 +102 0 0 4 +102 0 -1 3 +102 0 0 2 +102 0 0 1 +102 1 -1 4 +102 0 0 2 +102 1 0 1 +102 0 0 1 +102 1 0 2 +102 0 -1 2 +102 0 0 1 +102 0 -1 4 +102 0 0 2 +102 0 -1 2 +102 1 0 1 +102 0 0 1 +102 1 0 2 +102 0 -1 2 +102 0 0 1 +102 1 0 2 +102 1 0 2 +102 0 0 2 +102 0 0 4 +102 0 0 2 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 0 -1 4 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 3 +102 0 0 4 +102 0 0 2 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 1 0 4 +102 0 -1 4 +102 0 0 3 +102 0 0 3 +102 1 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 3 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 0 0 1 +102 1 0 1 +102 0 -1 1 +102 1 0 3 +102 1 0 3 +102 0 -1 3 +102 0 0 4 +102 0 0 2 +102 1 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 1 0 1 +102 0 0 1 +102 0 0 3 +201 0 0 1 +201 0 -1 2 +201 0 -1 3 +201 0 -1 4 +201 0 0 1 +201 1 -1 1 +201 0 0 4 +201 0 0 3 +201 0 0 3 +201 1 0 1 +201 0 0 4 +201 0 0 1 +201 0 0 2 +201 1 -1 3 +201 1 0 3 +201 1 -1 2 +201 1 0 2 +201 0 0 4 +201 0 0 3 +201 0 -1 1 +201 0 -1 4 +201 0 0 2 +201 0 -1 3 +201 0 0 2 +201 0 0 3 +201 0 -1 1 +201 1 0 4 +201 0 -1 4 +201 0 -1 1 +201 1 0 2 +201 1 -1 3 +201 0 0 3 +201 1 0 3 +201 0 0 2 +201 1 0 4 +201 0 -1 1 +201 0 0 2 +201 1 0 3 +201 0 0 2 +201 0 -1 3 +201 0 -1 1 +201 0 0 4 +201 0 0 3 +201 0 -1 2 +201 0 0 3 +201 0 0 3 +201 0 0 3 +201 0 0 3 +201 1 0 1 +201 0 -1 4 +201 0 0 1 +201 0 0 4 +201 1 -1 4 +201 0 0 4 +201 1 0 1 +201 0 -1 2 +201 0 -1 2 +201 0 0 1 +201 0 0 3 +201 1 0 3 +201 0 -1 2 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 0 2 +201 1 -1 3 +201 0 -1 3 +201 0 0 2 +201 1 -1 3 +201 1 -1 3 +201 0 -1 3 +201 1 -1 2 +201 0 -1 3 +201 1 0 4 +201 0 -1 1 +201 0 0 4 +201 1 0 4 +201 0 0 4 +201 0 0 4 +201 0 0 2 +201 0 0 4 +201 0 0 2 +201 0 0 1 +201 0 0 4 +201 0 0 3 +201 0 0 4 +201 1 0 1 +201 1 0 1 +201 0 -1 2 +201 0 0 4 +201 1 0 1 +201 0 0 4 +201 0 0 1 +201 0 -1 4 +201 0 0 2 +201 0 -1 3 +201 0 0 1 +201 1 -1 4 +201 1 0 1 +201 0 0 1 +201 0 0 4 +201 0 0 2 +201 0 -1 3 +201 1 0 2 +201 0 -1 2 +201 0 -1 1 +201 1 -1 4 +201 0 -1 4 +201 1 0 1 +201 1 0 4 +201 0 0 2 +201 0 0 4 +201 0 -1 3 +201 0 0 2 +201 1 -1 4 +201 1 -1 1 +201 0 0 4 +201 0 0 1 +201 1 0 4 +201 0 0 1 +201 1 0 1 +201 1 0 2 +201 1 0 4 +201 0 0 3 +201 1 0 1 +201 0 -1 2 +201 0 0 1 +201 1 0 4 +201 0 0 3 +201 0 0 2 +201 1 -1 1 +201 1 0 4 +201 0 0 1 +201 0 0 1 +201 0 0 1 +201 0 0 1 +201 0 0 4 +201 0 -1 1 +201 0 -1 4 +201 0 0 2 +201 1 0 1 +201 0 -1 4 +201 0 -1 1 +201 0 0 3 +201 0 -1 3 +201 0 -1 4 +201 0 -1 3 +201 1 0 2 +201 1 0 3 +201 0 0 1 +201 1 0 2 +201 0 0 4 +201 0 0 2 +201 0 0 1 +201 0 -1 3 +201 1 0 2 +201 0 -1 4 +201 0 -1 2 +201 1 0 4 +201 0 -1 2 +201 1 0 3 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 -1 2 +201 0 -1 3 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 0 2 +201 0 -1 3 +201 0 -1 2 +201 0 -1 2 +201 1 0 1 +201 1 -1 3 +201 0 0 2 +201 1 0 3 +201 1 0 1 +201 0 0 3 +201 0 0 4 +201 0 0 1 +201 1 -1 3 +201 1 -1 2 +201 0 0 3 +201 1 0 2 +201 0 0 3 +201 0 0 4 +201 0 0 1 +201 0 -1 4 +201 0 0 1 +201 0 0 3 +201 0 0 2 +201 0 0 3 +201 0 -1 2 +201 0 0 2 +201 0 0 3 +201 0 0 4 +201 0 0 4 +202 0 0 3 +202 0 0 1 +202 1 -1 1 +202 0 0 3 +202 0 0 2 +202 0 0 4 +202 1 0 1 +202 0 0 1 +202 0 0 3 +202 1 -1 2 +202 0 0 2 +202 0 0 4 +202 0 -1 3 +202 1 0 1 +202 0 0 4 +202 1 0 1 +202 0 0 1 +202 0 0 2 +202 1 -1 3 +202 0 0 3 +202 0 0 1 +202 0 0 4 +202 0 0 3 +202 0 0 1 +202 0 0 2 +202 0 0 1 +202 0 -1 3 +202 0 0 1 +202 1 0 4 +202 0 -1 1 +202 1 0 4 +202 0 -1 1 +202 0 -1 3 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 -1 3 +202 0 0 1 +202 0 0 2 +202 0 0 4 +202 0 -1 1 +202 1 0 3 +202 0 0 1 +202 0 0 2 +202 0 -1 3 +202 0 0 1 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 0 1 +202 0 0 3 +202 1 0 2 +202 0 0 3 +202 0 -1 2 +202 0 0 1 +202 0 0 4 +202 0 0 4 +202 0 0 1 +202 1 0 3 +202 0 0 2 +202 0 0 1 +202 1 -1 4 +202 0 0 1 +202 0 -1 4 +202 0 -1 2 +202 1 -1 3 +202 1 0 1 +202 0 -1 3 +202 0 0 4 +202 0 0 2 +202 0 0 4 +202 0 0 3 +202 0 0 2 +202 0 -1 1 +202 1 0 4 +202 1 0 3 +202 0 0 1 +202 1 -1 4 +202 0 -1 3 +202 0 0 2 +202 0 -1 2 +202 0 0 2 +202 0 0 3 +202 0 0 1 +202 1 0 4 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 0 2 +202 0 0 2 +202 1 -1 1 +202 0 0 3 +202 0 0 4 +202 1 0 2 +202 0 0 1 +202 0 0 4 +202 0 0 3 +202 0 0 2 +202 0 0 2 +202 0 0 3 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 -1 2 +202 0 -1 1 +202 1 0 4 +202 0 -1 3 +202 1 0 4 +202 1 0 4 +202 0 0 4 +202 0 -1 1 +202 0 0 2 +202 0 0 3 +202 0 0 3 +202 0 0 4 +202 1 0 1 +202 1 0 1 +202 0 -1 2 +202 0 0 3 +202 0 0 4 +202 0 0 2 +202 0 -1 1 +202 1 0 4 +202 1 0 4 +202 0 -1 3 +202 1 0 4 +202 1 -1 4 +202 0 0 4 +202 0 -1 2 +202 0 0 4 +202 0 -1 1 +202 0 0 3 +202 1 0 4 +202 0 0 4 +202 0 -1 1 +202 1 0 4 +202 1 0 4 +202 0 0 3 +202 1 0 2 +202 1 0 2 +202 0 -1 1 +202 0 0 4 +202 1 0 2 +202 0 0 2 +202 0 -1 3 +202 0 0 2 +202 0 0 4 +202 0 -1 1 +202 0 0 3 +202 0 0 1 +202 0 -1 2 +202 0 0 3 +202 0 0 1 +202 0 0 4 +202 1 0 3 +202 0 0 1 +202 0 -1 2 +202 0 0 3 +202 0 0 1 +202 0 0 1 +202 0 0 4 +202 0 -1 1 +202 0 0 4 +202 0 -1 2 +202 0 -1 3 +202 0 0 1 +202 0 0 4 +202 1 0 2 +202 1 0 2 +202 0 0 2 +202 1 0 3 +202 0 0 3 +202 0 0 1 +202 0 0 1 +202 0 0 4 +202 0 0 2 +202 1 0 3 +202 0 0 1 +202 0 0 4 +202 1 0 3 +202 1 0 3 +202 1 0 3 +202 0 0 3 +202 0 0 1 +202 0 -1 4 +202 0 -1 2 +202 0 0 2 +202 0 -1 3 +202 0 0 1 +202 0 0 4 +202 0 0 3 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 0 2 +202 1 0 3 +202 0 0 4 +203 0 0 3 +203 0 0 2 +203 0 0 3 +203 0 -1 1 +203 0 0 3 +203 1 0 4 +203 0 -1 4 +203 0 -1 2 +203 0 -1 1 +203 0 0 3 +203 1 0 2 +203 1 0 2 +203 0 -1 2 +203 0 0 4 +203 0 0 3 +203 0 -1 1 +203 0 0 3 +203 0 -1 2 +203 0 0 4 +203 0 -1 3 +203 1 -1 1 +203 0 0 1 +203 0 -1 3 +203 0 0 2 +203 0 0 1 +203 1 0 4 +203 1 0 4 +203 0 0 4 +203 1 0 3 +203 0 -1 3 +203 1 0 1 +203 1 0 1 +203 0 0 2 +203 1 -1 4 +203 0 -1 3 +203 0 0 1 +203 0 0 2 +203 0 -1 3 +203 0 -1 4 +203 0 -1 1 +203 1 -1 2 +203 1 0 4 +203 0 0 3 +203 0 0 4 +203 0 0 1 +203 1 0 2 +203 0 -1 3 +203 0 -1 4 +203 1 0 2 +203 0 -1 1 +203 0 0 3 +203 0 -1 1 +203 0 0 2 +203 0 -1 4 +203 0 0 3 +203 0 -1 1 +203 0 0 2 +203 0 0 1 +203 0 0 4 +203 0 -1 3 +203 0 0 2 +203 0 -1 3 +203 0 0 1 +203 1 -1 4 +203 0 -1 4 +203 1 0 2 +203 0 0 2 +203 0 0 2 +203 1 0 1 +203 0 0 1 +203 0 0 2 +203 1 0 1 +203 0 -1 3 +203 0 -1 1 +203 0 -1 2 +203 0 0 4 +203 0 -1 3 +203 1 0 1 +203 0 0 1 +203 0 0 1 +203 1 0 2 +203 1 0 2 +203 0 0 2 +203 0 0 3 +203 0 0 4 +203 0 0 2 +203 1 -1 3 +203 1 0 1 +203 0 0 1 +203 0 0 1 +203 0 0 4 +203 1 0 3 +203 1 0 3 +203 0 0 3 +203 1 0 3 +203 1 -1 3 +203 0 -1 2 +203 0 0 3 +203 0 -1 1 +203 0 0 4 +203 0 0 2 +203 0 0 3 +203 0 0 4 +203 0 0 2 +203 1 -1 1 +203 1 -1 2 +203 1 0 3 +203 0 0 1 +203 0 -1 3 +203 0 0 4 +203 0 0 2 +203 1 0 3 +203 0 0 1 +203 0 0 3 +203 0 0 4 +203 0 0 2 +203 0 -1 1 +203 1 0 3 +203 0 0 2 +203 1 0 3 +203 0 -1 3 +203 1 -1 4 +203 0 0 1 +203 0 -1 3 +203 0 0 2 +203 0 0 4 +203 1 -1 3 +203 0 0 1 +203 0 0 2 +203 0 0 3 +203 0 -1 4 +203 0 0 2 +203 0 0 3 +203 1 0 1 +203 0 0 1 +203 0 0 3 +203 0 0 2 +203 1 -1 4 +203 0 0 3 +203 0 0 1 +203 0 0 3 +203 1 0 2 +203 0 0 1 +203 0 0 2 +203 0 -1 4 +203 0 0 3 +203 0 -1 1 +203 0 0 2 +203 1 -1 3 +203 1 -1 2 +203 0 0 1 +203 0 -1 3 +203 0 0 4 +203 0 -1 2 +203 0 0 3 +203 0 -1 1 +203 1 0 2 +203 0 -1 4 +203 1 -1 3 +203 0 -1 2 +203 0 -1 1 +203 0 0 4 +203 1 0 3 +203 1 -1 1 +203 1 0 3 +203 1 0 3 +203 1 0 3 +203 0 0 3 +203 1 0 3 +203 0 0 3 +203 0 0 3 +203 0 0 3 +203 0 0 3 +203 0 -1 1 +203 0 0 3 +203 1 -1 4 +203 0 -1 2 +203 1 0 3 +203 0 0 3 +203 0 -1 3 +203 0 0 1 +203 1 -1 4 +203 0 0 2 +203 1 0 3 +203 0 0 1 +203 0 -1 3 +203 1 -1 3 +203 0 -1 3 +203 1 0 2 +203 0 0 2 +203 0 0 1 +203 1 0 4 +203 1 0 4 +203 0 0 4 +203 0 0 3 +203 0 -1 4 +203 0 0 2 +203 0 0 3 +203 0 0 1 +203 1 0 4 +204 0 0 1 +204 0 -1 4 +204 1 0 3 +204 1 -1 2 +204 1 0 2 +204 0 0 2 +204 0 0 2 +204 0 0 1 +204 0 -1 3 +204 0 0 4 +204 0 0 2 +204 0 0 4 +204 0 0 2 +204 0 -1 1 +204 0 0 2 +204 0 0 4 +204 1 0 3 +204 1 0 3 +204 1 0 2 +204 0 -1 3 +204 1 0 1 +204 0 0 2 +204 0 0 4 +204 0 0 2 +204 1 0 3 +204 0 0 1 +204 0 -1 2 +204 0 0 3 +204 0 0 2 +204 0 0 4 +204 0 0 3 +204 1 0 2 +204 0 -1 1 +204 0 0 2 +204 0 -1 3 +204 0 -1 3 +204 0 0 2 +204 0 0 2 +204 0 0 1 +204 1 -1 4 +204 0 0 4 +204 1 0 3 +204 0 0 2 +204 1 0 2 +204 0 -1 4 +204 0 -1 3 +204 0 0 2 +204 0 0 2 +204 1 -1 3 +204 0 0 2 +204 1 0 1 +204 0 0 2 +204 0 -1 1 +204 0 -1 4 +204 1 0 3 +204 0 -1 2 +204 1 0 3 +204 0 0 1 +204 0 0 1 +204 1 0 3 +204 0 0 1 +204 0 0 4 +204 1 0 3 +204 0 0 2 +204 1 0 1 +204 1 -1 2 +204 1 0 3 +204 0 0 1 +204 1 0 4 +204 0 0 2 +204 0 -1 4 +204 0 0 1 +204 1 0 3 +204 0 0 2 +204 0 -1 3 +204 0 -1 1 +204 1 0 3 +204 0 -1 2 +204 1 0 4 +204 1 0 3 +204 1 0 3 +204 0 0 1 +204 0 0 2 +204 0 0 3 +204 0 0 4 +204 0 0 2 +204 0 -1 1 +204 0 0 3 +204 0 0 2 +204 1 -1 3 +204 0 0 1 +204 0 0 3 +204 0 -1 4 +204 1 0 2 +204 0 0 3 +204 0 0 1 +204 0 0 2 +204 1 0 3 +204 0 0 2 +204 0 -1 3 +204 1 0 1 +204 0 0 1 +204 0 0 2 +204 0 0 3 +204 0 -1 4 +204 0 0 2 +204 0 0 4 +204 0 0 3 +204 0 -1 1 +204 0 -1 2 +204 1 0 4 +204 1 0 4 +204 1 -1 3 +204 0 0 3 +204 0 -1 3 +204 1 -1 3 +204 0 -1 2 +204 1 0 2 +204 0 -1 2 +204 1 0 4 +204 1 0 4 +204 0 0 1 +204 0 -1 1 +204 0 -1 1 +204 0 0 4 +204 0 0 4 +204 1 0 4 +204 0 0 2 +204 0 -1 2 +204 1 -1 2 +204 1 -1 3 +204 0 0 4 +204 1 0 4 +204 0 0 4 +204 0 0 4 +204 0 0 4 +204 0 -1 1 +204 0 -1 2 +204 0 0 2 +204 0 -1 3 +204 1 -1 4 +204 0 0 4 +204 0 0 4 +204 0 -1 2 +204 1 0 1 +204 0 0 4 +204 0 0 3 +204 0 -1 3 +204 0 0 3 +204 0 -1 1 +204 0 -1 2 +204 0 -1 1 +204 0 0 3 +204 0 0 4 +204 0 0 4 +204 0 -1 4 +204 0 0 4 +204 0 0 3 +204 0 0 3 +204 0 -1 3 +204 0 -1 2 +204 0 0 2 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 4 +204 0 0 4 +204 0 -1 4 +204 0 0 3 +204 1 0 3 +204 0 -1 3 +204 0 -1 3 +204 1 0 3 +204 0 0 2 +204 0 -1 4 +204 1 0 4 +204 0 0 3 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 3 +204 0 0 4 +204 0 0 4 +204 0 -1 3 +204 0 -1 3 +204 0 0 3 +204 0 0 4 +204 0 0 3 +204 0 0 4 +204 0 0 2 +204 1 0 1 +204 0 -1 4 +204 0 -1 4 +204 1 0 3 +205 1 0 1 +205 0 -1 4 +205 0 0 3 +205 0 0 1 +205 0 0 2 +205 1 0 4 +205 1 0 4 +205 0 -1 4 +205 0 0 3 +205 0 0 4 +205 0 -1 1 +205 1 0 2 +205 1 0 4 +205 0 -1 2 +205 0 0 4 +205 0 0 3 +205 0 0 4 +205 0 -1 1 +205 1 -1 3 +205 0 0 4 +205 0 0 2 +205 0 0 2 +205 0 0 3 +205 0 -1 4 +205 0 0 1 +205 1 0 1 +205 1 0 3 +205 0 0 4 +205 0 -1 1 +205 1 -1 3 +205 0 0 2 +205 1 0 2 +205 0 0 3 +205 1 -1 3 +205 1 0 4 +205 0 0 4 +205 0 0 2 +205 0 0 1 +205 0 -1 3 +205 0 -1 4 +205 1 0 2 +205 0 0 2 +205 0 -1 1 +205 0 -1 3 +205 0 0 4 +205 0 -1 1 +205 0 -1 3 +205 0 -1 4 +205 0 0 2 +205 0 0 2 +205 0 0 4 +205 0 0 2 +205 0 -1 1 +205 0 0 3 +205 1 0 4 +205 0 0 4 +205 0 0 2 +205 0 0 1 +205 0 0 3 +205 0 -1 4 +205 0 0 2 +205 1 0 1 +205 1 0 3 +205 0 0 1 +205 0 0 3 +205 0 -1 4 +205 0 0 2 +205 0 0 1 +205 0 -1 4 +205 0 0 4 +205 0 0 3 +205 0 -1 3 +205 0 -1 2 +205 0 0 4 +205 0 0 4 +205 0 0 3 +205 0 0 4 +205 0 0 4 +205 0 -1 1 +205 0 0 2 +205 1 0 4 +205 0 0 4 +205 0 -1 3 +205 1 0 1 +205 0 0 2 +205 0 -1 3 +205 0 -1 1 +205 0 -1 4 +205 0 0 2 +205 0 0 3 +205 0 0 1 +205 0 0 4 +205 1 0 2 +205 1 0 3 +205 1 -1 2 +205 0 0 2 +205 1 0 3 +205 0 0 3 +205 0 0 1 +205 0 0 4 +205 0 0 4 +205 0 0 3 +205 1 0 2 +205 0 0 2 +205 0 0 4 +205 0 0 1 +205 1 0 3 +205 1 -1 3 +205 0 0 4 +205 0 -1 2 +205 0 0 2 +205 0 0 3 +205 0 0 4 +205 0 0 1 +205 0 0 1 +205 1 -1 4 +205 0 0 3 +205 1 0 4 +205 1 -1 2 +205 0 0 4 +205 0 0 3 +205 1 -1 2 +205 0 0 1 +205 0 0 3 +205 0 0 4 +205 1 0 2 +205 1 -1 4 +205 0 0 3 +205 0 0 2 +205 0 0 1 +205 1 0 2 +205 1 0 2 +205 0 0 2 +205 0 0 3 +205 0 0 4 +205 0 -1 1 +205 0 0 4 +205 0 0 3 +205 0 -1 2 +205 0 0 4 +205 1 -1 1 +205 1 0 1 +205 1 0 1 +205 0 0 1 +205 1 -1 1 +205 0 -1 3 +205 0 0 2 +205 0 0 4 +205 0 0 4 +205 0 0 1 +205 1 0 1 +205 0 0 1 +205 1 -1 3 +205 0 0 2 +205 0 0 1 +205 0 -1 4 +205 0 0 1 +205 0 -1 2 +205 0 0 1 +205 1 0 3 +205 0 -1 3 +205 0 0 2 +205 0 0 1 +205 1 0 4 +205 0 0 1 +205 0 0 4 +205 0 0 2 +205 0 0 1 +205 1 -1 3 +205 0 0 2 +205 1 0 1 +205 0 0 1 +205 0 0 4 +205 1 0 1 +205 0 0 3 +205 0 -1 2 +205 0 0 1 +205 0 -1 4 +205 0 -1 3 +205 0 0 2 +205 1 0 1 +205 0 0 1 +205 0 -1 3 +205 0 0 1 +205 0 0 2 +205 0 0 3 +205 0 0 4 +205 0 0 1 +205 0 0 4 +205 0 -1 3 +205 0 0 2 +205 1 -1 2 +205 1 0 1 +205 0 0 1 +205 0 0 4 +205 0 0 1 +205 0 0 3 +205 1 -1 1 +205 0 0 1 +205 0 0 2 +206 1 0 1 +206 0 0 2 +206 0 0 1 +206 1 -1 1 +206 0 -1 3 +206 0 0 1 +206 0 0 2 +206 0 0 4 +206 0 0 1 +206 1 -1 2 +206 1 0 3 +206 0 0 3 +206 0 0 4 +206 0 -1 1 +206 0 -1 2 +206 0 0 3 +206 1 0 3 +206 0 0 4 +206 1 0 3 +206 1 -1 3 +206 0 0 3 +206 0 0 1 +206 0 0 3 +206 0 0 2 +206 1 0 3 +206 0 -1 4 +206 1 0 3 +206 1 0 1 +206 1 0 1 +206 0 0 1 +206 0 0 3 +206 0 0 3 +206 0 0 3 +206 1 -1 1 +206 0 -1 2 +206 0 0 4 +206 0 0 3 +206 1 0 3 +206 1 0 3 +206 0 0 3 +206 0 -1 1 +206 0 0 3 +206 0 0 3 +206 0 0 3 +206 0 0 3 +206 1 0 1 +206 0 0 3 +206 0 0 2 +206 0 -1 4 +206 0 0 1 +206 1 0 1 +206 0 0 3 +206 0 0 2 +206 0 0 4 +206 0 -1 1 +206 0 0 3 +206 0 -1 4 +206 0 0 1 +206 0 0 3 +206 0 0 1 +206 0 0 4 +206 0 0 1 +206 1 0 3 +206 1 -1 3 +206 0 0 1 +206 0 -1 3 +206 0 -1 4 +206 0 -1 2 +206 0 -1 3 +206 1 0 1 +206 1 0 1 +206 1 0 1 +206 1 0 1 +206 1 -1 1 +206 0 -1 1 +206 0 -1 1 +206 0 0 3 +206 0 0 1 +206 0 0 4 +206 1 0 2 +206 0 0 1 +206 1 0 1 +206 1 0 3 +206 1 0 1 +206 1 0 3 +206 0 0 1 +206 0 0 3 +206 1 -1 1 +206 1 0 3 +206 0 -1 1 +206 0 -1 3 +206 0 -1 1 +206 1 0 2 +206 0 0 2 +206 0 -1 1 +206 0 -1 4 +206 0 0 3 +206 0 0 1 +206 0 0 3 +206 1 0 1 +206 0 0 1 +206 0 0 3 +206 1 0 4 +206 0 0 1 +206 0 0 4 +206 1 -1 2 +206 1 0 4 +206 1 0 4 +206 1 0 4 +206 0 -1 4 +206 0 0 3 +206 1 0 4 +206 0 0 4 +206 0 0 3 +206 0 0 4 +206 1 0 2 +206 0 0 2 +206 0 0 4 +206 0 0 2 +206 0 0 1 +206 0 -1 2 +206 0 0 4 +206 0 0 3 +206 0 0 2 +206 1 0 2 +206 0 0 2 +206 0 0 1 +206 1 0 4 +206 0 0 4 +206 0 -1 3 +206 0 -1 2 +206 0 -1 1 +206 0 0 4 +206 0 0 1 +206 0 -1 3 +206 1 0 2 +206 1 -1 2 +206 1 -1 2 +206 0 0 2 +206 1 -1 4 +206 1 0 4 +206 1 0 4 +206 0 0 4 +206 0 0 3 +206 0 0 1 +206 0 0 2 +206 0 0 4 +206 0 0 3 +206 0 0 1 +206 0 0 2 +206 0 0 1 +206 0 0 3 +206 0 -1 3 +206 0 0 4 +206 0 -1 4 +206 0 0 1 +206 0 -1 1 +206 0 -1 2 +206 0 -1 2 +206 0 0 1 +206 0 0 1 +206 0 -1 1 +206 0 -1 3 +206 1 0 4 +206 0 0 4 +206 0 0 4 +206 0 0 4 +206 0 0 4 +206 0 0 2 +206 0 0 4 +206 0 0 2 +206 0 0 4 +206 0 0 4 +206 0 0 2 +206 0 0 4 +206 0 0 3 +206 0 -1 4 +206 0 0 2 +206 1 0 2 +206 0 -1 2 +206 0 -1 4 +206 0 0 3 +206 0 0 3 +206 0 0 2 +206 1 0 2 +206 0 0 2 +206 1 0 2 +206 0 0 2 +206 1 -1 2 +206 1 0 2 +206 1 0 2 +206 0 0 2 +206 1 0 2 +206 0 -1 2 +206 0 -1 2 +206 0 0 4 +206 1 0 2 +206 0 -1 2 +206 0 0 2 +206 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 1 +207 1 0 1 +207 0 0 1 +207 0 0 3 +207 0 0 4 +207 0 -1 3 +207 0 0 1 +207 0 0 1 +207 0 0 4 +207 1 -1 3 +207 0 0 3 +207 0 0 1 +207 0 0 1 +207 0 0 1 +207 0 -1 2 +207 0 0 4 +207 0 -1 4 +207 0 0 3 +207 1 -1 1 +207 0 0 3 +207 0 0 3 +207 0 0 3 +207 1 0 3 +207 0 0 3 +207 1 0 4 +207 0 -1 4 +207 1 -1 1 +207 0 0 4 +207 0 -1 3 +207 0 0 3 +207 0 0 3 +207 0 0 3 +207 1 0 4 +207 1 0 3 +207 0 0 3 +207 0 -1 1 +207 0 0 3 +207 1 0 3 +207 0 -1 3 +207 0 0 4 +207 0 -1 3 +207 0 -1 4 +207 0 0 1 +207 0 -1 3 +207 1 0 1 +207 1 0 1 +207 0 0 1 +207 0 -1 1 +207 0 0 1 +207 0 -1 2 +207 1 0 3 +207 0 0 3 +207 0 -1 3 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 1 +207 1 -1 1 +207 0 0 4 +207 0 -1 4 +207 0 0 3 +207 0 0 3 +207 1 0 3 +207 0 0 3 +207 0 -1 3 +207 1 -1 1 +207 0 -1 1 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 -1 2 +207 1 0 4 +207 0 -1 4 +207 0 -1 3 +207 0 0 1 +207 0 0 1 +207 1 0 1 +207 0 0 1 +207 1 -1 1 +207 0 -1 1 +207 0 0 4 +207 0 -1 3 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 -1 2 +207 0 -1 1 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 2 +207 0 0 2 +207 0 -1 2 +207 0 -1 1 +207 1 0 1 +207 0 0 1 +207 0 -1 3 +207 0 0 3 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 0 -1 4 +207 1 0 2 +207 0 -1 2 +207 0 0 2 +207 0 -1 2 +207 1 0 1 +207 0 -1 1 +207 1 -1 1 +207 1 0 1 +207 0 0 3 +207 0 -1 3 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 1 -1 4 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 -1 2 +207 1 0 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 0 0 4 +207 0 -1 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 0 0 4 +207 1 -1 4 +207 0 -1 4 +207 0 0 3 +207 1 0 3 +207 0 -1 3 +207 0 0 3 +207 1 -1 3 +207 0 0 3 +207 0 0 3 +207 0 0 3 +207 0 0 1 +207 0 0 1 +207 0 0 1 +207 0 -1 1 +207 0 0 1 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 -1 2 +207 1 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 4 +207 1 -1 4 +207 0 0 4 +207 0 -1 4 +207 0 -1 4 +207 0 0 3 +207 1 -1 3 +207 1 -1 3 +207 1 0 3 +207 1 -1 3 +207 0 0 3 +207 0 -1 3 +207 0 -1 2 +207 0 0 1 +207 0 -1 1 +207 0 -1 1 +207 0 0 4 +207 0 0 4 +207 0 -1 4 +208 0 0 1 +208 0 0 2 +208 1 0 3 +208 0 0 3 +208 0 0 1 +208 0 -1 2 +208 1 -1 3 +208 0 0 4 +208 0 0 4 +208 0 -1 3 +208 0 0 4 +208 0 0 1 +208 0 0 1 +208 0 0 4 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 3 +208 1 -1 3 +208 1 0 3 +208 1 0 3 +208 1 0 3 +208 1 0 3 +208 1 0 3 +208 1 -1 3 +208 1 0 3 +208 1 -1 3 +208 0 -1 3 +208 0 -1 3 +208 0 -1 3 +208 0 0 2 +208 1 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 1 +208 0 -1 1 +208 1 0 1 +208 0 0 1 +208 0 0 1 +208 1 0 1 +208 0 0 1 +208 0 -1 1 +208 0 0 1 +208 0 -1 1 +208 0 -1 3 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 2 +208 0 0 1 +208 0 0 2 +208 0 0 3 +208 0 -1 4 +208 0 0 1 +208 0 0 2 +208 0 0 3 +208 1 0 1 +208 0 0 1 +208 0 0 1 +208 1 0 1 +208 1 0 1 +208 0 0 1 +208 0 -1 1 +208 0 0 2 +208 0 0 3 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 1 +208 0 0 2 +208 0 -1 3 +208 1 -1 1 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 1 0 1 +208 0 0 1 +208 0 -1 1 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 3 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 1 +208 0 0 2 +208 0 -1 3 +208 0 0 1 +208 0 0 2 +208 0 -1 1 +208 0 -1 2 +208 0 0 3 +208 0 0 4 +208 1 -1 3 +208 0 -1 3 +208 1 0 4 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 1 -1 4 +208 0 0 4 +208 1 -1 4 +208 0 -1 4 +208 0 0 4 +208 1 -1 4 +208 0 0 4 +208 0 0 4 +208 1 -1 4 +208 1 0 4 +208 1 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 4 +208 1 0 4 +208 1 0 4 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 4 +208 0 0 4 +208 1 0 4 +208 0 0 4 +208 1 0 4 +208 1 0 4 +208 1 -1 4 +208 1 0 4 +208 1 -1 4 +208 0 0 4 +208 1 0 4 +208 1 0 4 +208 0 -1 4 +208 0 0 4 +208 1 -1 4 +208 0 -1 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 1 +208 0 -1 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 -1 4 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 -1 3 +208 0 0 3 +208 0 -1 3 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 0 2 +208 0 0 2 +208 1 -1 1 +208 0 0 2 +208 0 0 1 +208 0 -1 2 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 -1 3 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 1 0 2 +208 0 0 2 +208 0 0 2 +208 1 0 2 +208 0 -1 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 1 -1 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 1 -1 2 +208 0 0 1 +208 0 -1 2 +208 0 0 2 +208 0 0 2 +209 0 0 3 +209 0 0 1 +209 0 0 4 +209 0 0 2 +209 0 0 4 +209 0 0 1 +209 1 0 3 +209 0 0 2 +209 1 -1 2 +209 0 0 4 +209 0 0 1 +209 0 0 3 +209 0 0 2 +209 0 0 1 +209 1 0 3 +209 1 0 3 +209 0 0 3 +209 0 0 2 +209 0 0 4 +209 1 -1 1 +209 1 0 1 +209 1 -1 1 +209 0 0 1 +209 1 0 2 +209 0 0 2 +209 1 0 4 +209 0 0 4 +209 0 -1 1 +209 0 0 3 +209 1 -1 2 +209 0 -1 2 +209 0 0 2 +209 0 -1 4 +209 0 -1 3 +209 0 -1 1 +209 1 -1 4 +209 1 -1 3 +209 0 0 3 +209 0 0 2 +209 1 -1 4 +209 0 0 4 +209 0 0 1 +209 0 0 3 +209 0 0 2 +209 0 0 4 +209 0 0 3 +209 1 0 1 +209 1 -1 1 +209 0 0 1 +209 0 0 2 +209 0 -1 3 +209 1 0 4 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 0 -1 3 +209 0 -1 3 +209 0 0 1 +209 0 0 2 +209 0 0 3 +209 0 0 4 +209 0 0 1 +209 0 0 2 +209 0 0 2 +209 1 0 4 +209 0 0 3 +209 0 0 1 +209 0 0 4 +209 0 -1 2 +209 1 0 3 +209 1 -1 3 +209 0 0 4 +209 1 0 4 +209 0 -1 4 +209 0 -1 1 +209 0 -1 3 +209 0 0 2 +209 1 0 2 +209 0 -1 2 +209 1 0 4 +209 0 -1 4 +209 1 0 3 +209 0 0 3 +209 0 0 3 +209 0 0 1 +209 1 0 1 +209 0 -1 2 +209 0 0 2 +209 0 -1 3 +209 0 0 4 +209 0 -1 4 +209 0 0 3 +209 1 -1 3 +209 1 0 4 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 1 0 3 +209 0 0 2 +209 1 0 3 +209 0 0 2 +209 0 0 1 +209 0 -1 2 +209 0 0 3 +209 0 0 1 +209 1 0 4 +209 0 0 3 +209 0 -1 1 +209 0 0 2 +209 1 -1 4 +209 0 0 1 +209 0 0 4 +209 1 0 2 +209 0 0 2 +209 0 0 1 +209 0 -1 3 +209 1 0 1 +209 0 0 4 +209 0 0 4 +209 0 0 4 +209 0 -1 2 +209 0 0 2 +209 0 0 2 +209 0 0 1 +209 0 0 3 +209 0 0 3 +209 0 -1 3 +209 0 0 3 +209 0 0 3 +209 1 0 1 +209 1 0 1 +209 1 0 4 +209 0 0 4 +209 0 0 3 +209 1 -1 2 +209 0 0 2 +209 0 0 3 +209 1 -1 1 +209 0 0 4 +209 0 -1 1 +209 0 0 1 +209 0 -1 2 +209 0 -1 4 +209 1 -1 4 +209 0 0 4 +209 0 -1 3 +209 0 0 3 +209 1 0 2 +209 0 0 2 +209 0 0 1 +209 0 -1 3 +209 0 0 2 +209 0 0 1 +209 0 0 1 +209 0 0 4 +209 0 0 1 +209 1 0 3 +209 0 0 3 +209 0 -1 3 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 0 0 3 +209 0 -1 1 +209 0 -1 3 +209 0 0 3 +209 0 0 1 +209 0 -1 4 +209 0 -1 4 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 1 0 2 +209 0 0 1 +209 0 -1 4 +209 1 0 1 +209 0 0 3 +209 0 -1 4 +209 0 -1 3 +209 0 0 3 +209 0 0 4 +209 0 0 2 +209 0 0 3 +209 0 0 3 +209 0 0 1 +209 0 -1 4 +209 0 0 3 +209 0 -1 3 +209 0 -1 2 +209 0 0 4 +209 0 0 1 +209 1 0 2 +209 0 0 4 +209 1 0 2 +209 1 0 1 +209 0 0 2 +209 0 0 3 +209 0 -1 4 +209 0 0 4 +209 0 0 1 \ No newline at end of file diff --git a/R/inst/extdata/bart_exampleData.txt b/R/inst/extdata/bart_exampleData.txt new file mode 100644 index 00000000..4890955f --- /dev/null +++ b/R/inst/extdata/bart_exampleData.txt @@ -0,0 +1,91 @@ +subjID group trial reward pumps explosion +101 0 1 1 14 1 +101 0 2 1 39 0 +101 0 3 1 50 0 +101 0 4 1 81 0 +101 0 5 1 45 1 +101 0 6 1 80 0 +101 0 7 1 45 1 +101 0 8 1 90 0 +101 0 9 1 65 1 +101 0 10 1 27 1 +101 0 11 1 40 1 +101 0 12 1 80 1 +101 0 13 1 95 1 +101 0 14 1 12 1 +101 0 15 1 36 1 +101 0 16 1 75 0 +101 0 17 1 55 1 +101 0 18 1 70 0 +101 0 19 1 26 1 +101 0 20 1 68 0 +101 0 21 1 55 1 +101 0 22 1 75 0 +101 0 23 1 64 1 +101 0 24 1 5 1 +101 0 25 1 38 1 +101 0 26 1 76 0 +101 0 27 1 57 0 +101 0 28 1 56 1 +101 0 29 1 48 1 +101 0 30 1 88 1 +103 0 1 1 88 1 +103 0 2 1 36 1 +103 0 3 1 30 0 +103 0 4 1 50 0 +103 0 5 1 56 1 +103 0 6 1 40 0 +103 0 7 1 6 1 +103 0 8 1 33 1 +103 0 9 1 30 0 +103 0 10 1 50 0 +103 0 11 1 22 1 +103 0 12 1 56 1 +103 0 13 1 20 0 +103 0 14 1 30 0 +103 0 15 1 40 0 +103 0 16 1 30 0 +103 0 17 1 30 0 +103 0 18 1 40 0 +103 0 19 1 45 0 +103 0 20 1 50 0 +103 0 21 1 55 1 +103 0 22 1 50 0 +103 0 23 1 60 0 +103 0 24 1 3 1 +103 0 25 1 40 0 +103 0 26 1 50 0 +103 0 27 1 30 0 +103 0 28 1 50 0 +103 0 29 1 30 0 +103 0 30 1 50 0 +104 0 1 1 50 0 +104 0 2 1 38 1 +104 0 3 1 50 0 +104 0 4 1 24 1 +104 0 5 1 46 1 +104 0 6 1 42 0 +104 0 7 1 11 1 +104 0 8 1 50 0 +104 0 9 1 28 1 +104 0 10 1 50 0 +104 0 11 1 50 1 +104 0 12 1 40 0 +104 0 13 1 40 0 +104 0 14 1 2 1 +104 0 15 1 40 0 +104 0 16 1 40 0 +104 0 17 1 40 0 +104 0 18 1 40 0 +104 0 19 1 25 1 +104 0 20 1 40 0 +104 0 21 1 40 0 +104 0 22 1 40 0 +104 0 23 1 36 1 +104 0 24 1 38 0 +104 0 25 1 50 0 +104 0 26 1 38 1 +104 0 27 1 40 0 +104 0 28 1 40 0 +104 0 29 1 30 0 +104 0 30 1 40 0 diff --git a/R/inst/extdata/choiceRT_exampleData.txt b/R/inst/extdata/choiceRT_exampleData.txt new file mode 100644 index 00000000..bbe2b474 --- /dev/null +++ b/R/inst/extdata/choiceRT_exampleData.txt @@ -0,0 +1,5001 @@ +RT choice subjID condition +0.238126253704183 1 1 1 +0.788334139249308 2 1 1 +0.524351202388138 2 1 1 +1.30852451859186 1 1 1 +0.244177006142252 1 1 1 +0.512534281943979 1 1 1 +0.570872020376975 2 1 1 +0.552056452179357 1 1 1 +0.298121361381527 2 1 1 +0.323864684737407 1 1 1 +0.542476237007045 1 1 1 +0.457829931981559 2 1 1 +0.214443816443766 1 1 1 +0.282641758197282 2 1 1 +0.577296397953241 1 1 1 +0.80363268095685 2 1 1 +0.630866151842371 2 1 1 +0.561537877283935 2 1 1 +0.447864619700588 1 1 1 +0.271079966516117 2 1 1 +0.286558308483825 2 1 1 +0.402853789793329 2 1 1 +0.261247265870358 2 1 1 +0.954323974954787 1 1 1 +0.233982750292549 2 1 1 +0.534509968347321 2 1 1 +1.38489463892966 1 1 1 +0.51382752398596 2 1 1 +0.877226598584423 2 1 1 +0.59661096895894 2 1 1 +0.653486235884601 2 1 1 +0.499754559401486 2 1 1 +0.234607668817517 2 1 1 +0.531596228343812 2 1 1 +0.517067421390557 2 1 1 +0.286714432990514 2 1 1 +0.280389415416944 1 1 1 +0.770848791728697 2 1 1 +0.242534242474749 2 1 1 +1.21402951161598 1 1 1 +0.254230773115822 1 1 1 +0.235607609409862 1 1 1 +0.893859490775577 2 1 1 +0.4248828895841 2 1 1 +0.806633683066691 1 1 1 +0.52846751057204 2 1 1 +0.283404274358359 2 1 1 +0.38261147359119 1 1 1 +0.366467333270928 2 1 1 +0.89906087165271 2 1 1 +0.473523175525898 2 1 1 +0.61052334774835 2 1 1 +0.348877038822898 2 1 1 +0.509848343105319 2 1 1 +0.714362767211544 2 1 1 +0.366653361634071 1 1 1 +0.504639516528354 2 1 1 +0.789291266027802 1 1 1 +0.220496731951155 2 1 1 +0.225368494671686 2 1 1 +0.935425512110651 1 1 1 +0.596093103065834 2 1 1 +0.751187828634478 2 1 1 +0.398369973292919 2 1 1 +0.803192132747886 1 1 1 +0.653642313281921 2 1 1 +0.759465190620081 2 1 1 +0.301158475484036 2 1 1 +0.468546635484975 2 1 1 +0.45136376067397 2 1 1 +0.225730206953994 1 1 1 +0.871541732294617 2 1 1 +1.02231746439083 2 1 1 +0.453015412970327 1 1 1 +0.198424664401742 2 1 1 +0.300531454438104 2 1 1 +0.473723469079576 1 1 1 +0.397993417619097 2 1 1 +0.990744721453659 2 1 1 +0.576175729949669 2 1 1 +0.200104343196362 2 1 1 +0.397950225292451 2 1 1 +0.595871677587168 1 1 1 +0.631283245367399 2 1 1 +0.225640535433198 2 1 1 +0.671278939344137 2 1 1 +0.562888330598081 1 1 1 +0.713201556333214 1 1 1 +0.429372024083033 2 1 1 +0.559437949496943 1 1 1 +0.747758954300599 2 1 1 +0.668556572370471 2 1 1 +0.179933868089705 1 1 1 +0.557946405103375 2 1 1 +0.781925159045207 2 1 1 +0.629998909619026 2 1 1 +0.73419031432803 1 1 1 +0.917048954570217 2 1 1 +1.27326330493077 1 1 1 +0.715099066135782 1 1 1 +0.561629162179203 2 1 1 +0.439342876745989 1 1 1 +0.212480989248291 2 1 1 +0.431997523692581 2 1 1 +0.504823085985375 1 1 1 +0.209443682735351 2 1 1 +0.535478168252645 1 1 1 +0.523309589143815 2 1 1 +0.292526841667345 2 1 1 +0.292598915819633 1 1 1 +0.383304045988112 2 1 1 +0.220801631101784 2 1 1 +0.240025256471961 2 1 1 +0.374617088048471 2 1 1 +0.225139772246513 2 1 1 +0.504765209525881 1 1 1 +0.536719069622199 1 1 1 +0.265730079523484 2 1 1 +0.788176797412021 2 1 1 +0.317054055572024 1 1 1 +0.984288372815029 1 1 1 +0.242704368769227 1 1 1 +1.16310843477133 1 1 1 +0.671512143534472 2 1 1 +0.235624281398265 2 1 1 +0.585278561981407 1 1 1 +0.313710683818167 1 1 1 +0.512453071354528 2 1 1 +0.318816084203735 2 1 1 +0.492290766723273 2 1 1 +0.869267244819061 2 1 1 +0.416347372277426 2 1 1 +1.25171209855063 1 1 1 +0.421124063985099 2 1 1 +0.330265759909128 2 1 1 +0.32442759213596 2 1 1 +0.499405834143408 2 1 1 +0.605809814064198 2 1 1 +0.441813584555195 1 1 1 +0.355018580197292 2 1 1 +0.284917824978601 2 1 1 +0.712509291577718 2 1 1 +0.360087543146394 2 1 1 +0.593758109292972 2 1 1 +0.21793928877364 2 1 1 +0.511916501085172 1 1 1 +1.65665966055448 2 1 1 +0.462252095429733 1 1 1 +0.375694324193756 2 1 1 +0.573330510111478 1 1 1 +0.624806212238662 1 1 1 +0.3221554867038 1 1 1 +0.394184550616579 1 1 1 +0.233898257977356 2 1 1 +0.616451835954318 2 1 1 +0.435745652986984 2 1 1 +0.309831870195393 1 1 1 +0.303567774481905 1 1 1 +0.268141575894932 2 1 1 +0.685546680374616 2 1 1 +0.315857448984633 2 1 1 +0.358148021225324 1 1 1 +0.561597978729496 2 1 1 +0.575763837785002 2 1 1 +0.408409797786314 2 1 1 +0.456470478096314 2 1 1 +0.211008154807298 1 1 1 +0.537560426488747 1 1 1 +0.474119050536192 1 1 1 +0.349680702914349 2 1 1 +0.43874642118394 2 1 1 +0.741099937281951 2 1 1 +0.397490501092685 1 1 1 +0.455993632903328 2 1 1 +0.531917883353318 2 1 1 +0.544592749033783 2 1 1 +0.74575081631549 1 1 1 +0.482830763020483 2 1 1 +0.280104823458282 2 1 1 +0.674827163589054 2 1 1 +0.27232449929437 1 1 1 +0.33609945965603 2 1 1 +0.642687813456977 2 1 1 +0.45152584390343 2 1 1 +0.393612819207325 1 1 1 +0.403513480920972 2 1 1 +0.55270209232572 2 1 1 +0.282474350101989 1 1 1 +0.225686494015142 1 1 1 +0.3032960404285 1 1 1 +0.741695387202929 1 1 1 +0.23627922113503 1 1 1 +0.317661404771517 2 1 1 +0.365881950379812 1 1 1 +0.671407911504626 2 1 1 +0.6327672361385 2 1 1 +0.408730216599132 2 1 1 +1.05295329016947 2 1 1 +0.647929253014634 2 1 1 +0.272505386795946 1 1 1 +0.477000937785718 2 1 1 +0.593679670773664 2 1 1 +0.485804513765726 1 1 1 +0.685108031619407 2 1 1 +0.463863491717212 2 1 1 +0.280869562583906 2 1 1 +0.484442256816249 1 1 1 +0.374203282894535 1 1 1 +0.205270568757322 2 1 1 +0.285730023779721 1 1 1 +0.420031671350127 2 1 1 +0.304140334800815 2 1 1 +0.455400240565684 1 1 1 +0.319295225911816 2 1 1 +0.853456173431349 2 1 1 +1.28296521539738 2 1 1 +0.402276812108308 2 1 1 +1.60365089898574 2 1 1 +0.513436951554669 2 1 1 +0.635287982445216 2 1 1 +0.756725913746622 1 1 1 +0.538411817875012 2 1 1 +0.252807751300543 1 1 1 +0.306493263230248 2 1 1 +0.328940637779731 1 1 1 +0.295149174376265 2 1 1 +0.428772773247104 2 1 1 +0.710257617392816 2 1 1 +2.1398843380733 2 1 1 +0.390964230021283 1 1 1 +0.30264732818644 1 1 1 +0.24604561365542 2 1 1 +0.326118394989355 2 1 1 +0.444302762917929 2 1 1 +0.994994120515054 2 1 1 +0.329747734506691 2 1 1 +0.244820417609073 2 1 1 +0.434344901812039 2 1 1 +0.245526039713125 2 1 1 +0.371387027622059 2 1 1 +0.396016682526436 2 1 1 +0.868293655068221 2 1 1 +0.339580118779972 2 1 1 +0.377321305638716 1 1 1 +0.352058350011174 2 1 1 +0.523222420484193 1 1 1 +1.63006360968846 2 1 1 +0.403780279358626 1 1 1 +0.33450821318739 2 1 1 +0.246049648436144 1 1 1 +0.73900563703035 2 1 1 +0.70659002598455 1 1 1 +0.251224036209508 1 1 1 +0.279682884105716 2 1 1 +0.446835970242547 2 1 1 +0.344773155307199 2 1 1 +1.18156313011751 2 1 1 +0.40159469187599 1 1 1 +0.662618250249293 2 1 1 +0.484088636200293 2 1 1 +0.249177412018199 1 1 1 +0.635565342005854 2 1 1 +0.237344366033974 2 1 1 +0.262837667936303 2 1 1 +0.223824529758815 2 1 1 +0.544851049052962 1 1 1 +1.22941904309934 1 1 1 +1.18790150456476 2 1 1 +0.397012831119724 2 1 1 +0.542500816372649 1 1 1 +0.215934076714995 2 1 1 +1.30455859046761 2 1 1 +0.536099297245265 2 1 1 +0.414264536316934 2 1 1 +0.777679859044325 2 1 1 +0.697575719549679 1 1 1 +0.704761484394448 2 1 1 +0.286893353427223 1 1 1 +0.681973124438239 2 1 1 +0.397462829482937 2 1 1 +0.257670640245336 2 1 1 +0.236649584180499 2 1 1 +0.436790435094707 1 1 1 +0.574656753851278 1 1 1 +0.253082319735779 1 1 1 +0.61806692862892 1 1 1 +0.46661603680114 2 1 1 +0.195332992231242 1 1 1 +0.229629897436442 1 1 1 +0.992646398039104 2 1 1 +0.94136783174252 2 1 1 +0.837333099387364 1 1 1 +0.430204780391451 2 1 1 +1.37515921760222 2 1 1 +0.89875556054097 2 1 1 +0.696864042518777 2 1 1 +0.332933586834615 2 1 1 +0.334070550417085 2 1 1 +0.38676132253602 2 1 1 +0.306404665389991 2 1 1 +0.478254432945422 2 1 1 +0.601997570889218 2 1 1 +0.373642558748753 2 1 1 +0.29388256861859 1 1 1 +0.403146732540824 2 1 1 +0.754379822737839 2 1 1 +0.20827688411218 2 1 1 +0.211975975201092 2 1 1 +0.591340246795799 1 1 1 +0.263322621163444 2 1 1 +0.525170614901281 1 1 1 +0.206823345071543 1 1 1 +0.935520204615524 1 1 1 +0.550910831841 2 1 1 +0.232504114652867 2 1 1 +0.391975720570035 1 1 1 +0.52537232580037 2 1 1 +0.604827669281913 2 1 1 +0.440173374557048 2 1 1 +0.294878838994327 2 1 1 +0.323868811622971 1 1 1 +0.240824506056104 2 1 1 +0.423271049333481 2 1 1 +0.849356591210965 2 1 1 +0.335818515496422 2 1 1 +0.538745656799135 2 1 1 +0.4208751745964 2 1 1 +0.55146359110108 2 1 1 +0.467882029849217 2 1 1 +0.567777388073783 2 1 1 +0.311394332684366 1 1 1 +1.33531192845093 1 1 1 +0.368867535882799 2 1 1 +0.340092989922591 2 1 1 +0.299811445088077 2 1 1 +0.198131285653104 2 1 1 +1.57688580580023 2 1 1 +0.671467937043381 2 1 1 +0.422481878776226 2 1 1 +0.48313672040092 1 1 1 +0.473697344635179 1 1 1 +0.68951966048344 2 1 1 +0.633967567703449 2 1 1 +0.382761102393661 1 1 1 +0.68736056335558 1 1 1 +0.677810537621417 2 1 1 +0.249614606331914 2 1 1 +0.755321813543998 2 1 1 +0.370198385669219 1 1 1 +0.697638915493631 1 1 1 +0.684828719775979 2 1 1 +0.502349799392655 1 1 1 +0.583605969114717 1 1 1 +0.517041977935336 1 1 1 +0.411670106317747 1 1 1 +0.207610898625408 1 1 1 +0.414006631133478 2 1 1 +0.921999310392829 1 1 1 +0.409507167245215 2 1 1 +0.584716070617761 2 1 1 +0.666513112126972 2 1 1 +0.233005827550518 2 1 1 +0.60770657746225 1 1 1 +0.221784346267773 2 1 1 +0.771391695716424 2 1 1 +1.21988159355549 1 1 1 +0.408933678664394 2 1 1 +1.03374983542661 2 1 1 +0.939198644733114 2 1 1 +0.962067734082042 2 1 1 +0.473406448845882 2 1 1 +0.223644602219167 2 1 1 +1.12139515597077 1 1 1 +0.299025722625131 2 1 1 +0.55009896091157 1 1 1 +0.578799507502895 1 1 1 +0.962097234341087 1 1 1 +0.348861796367042 2 1 1 +0.35773121339554 2 1 1 +0.246740510307971 1 1 1 +0.407111627051893 1 1 1 +0.550930871978825 1 1 1 +0.422754497543909 2 1 1 +0.758063342099552 2 1 1 +0.380847347114823 2 1 1 +0.377055603284598 2 1 1 +0.65608839650545 1 1 1 +1.42357385911498 2 1 1 +0.232798506755752 2 1 1 +0.539867634108279 2 1 1 +0.392489725525737 2 1 1 +0.367110223983889 2 1 1 +0.839639947757427 2 1 1 +0.592327430792799 2 1 1 +0.239126262427817 2 1 1 +0.328625329636268 2 1 1 +0.284257957756146 1 1 1 +0.331590178883346 1 1 1 +0.620620411662111 2 1 1 +0.755967038010479 2 1 1 +0.331900743408574 2 1 1 +0.421146686045199 1 1 1 +1.07476503410067 2 1 1 +0.507112598176372 1 1 1 +0.311566718621004 2 1 1 +0.301319320070233 2 1 1 +0.393257034342845 2 1 1 +0.673521481008061 2 1 1 +0.291567562966672 2 1 1 +1.05249328382332 2 1 1 +0.694698152076518 2 1 1 +0.733055920143737 2 1 1 +0.334350894107303 2 1 1 +0.542807697456418 1 1 1 +0.579281169009386 1 1 1 +0.320837583848137 1 1 1 +0.488074071042795 1 1 1 +0.213060081069537 2 1 1 +0.237230647833275 2 1 1 +0.237572229668373 1 1 1 +0.241805498724672 1 1 1 +0.21505246069559 2 1 1 +0.625069689033177 2 1 1 +0.391789762960315 1 1 1 +0.360924641936915 2 1 1 +0.434831888026175 1 1 1 +1.53947356804897 2 1 1 +0.390459073072731 2 1 1 +0.327186719063663 1 1 1 +0.451681415339723 1 1 1 +0.551841771615269 2 1 1 +0.41039773179749 1 1 1 +0.926634118987433 2 1 1 +0.813362027443744 2 1 1 +0.632371052186083 2 1 1 +1.07271976627787 1 1 1 +0.347281073927582 1 1 1 +0.44423560152159 1 1 1 +0.576366534316911 2 1 1 +0.279713029952993 2 1 1 +0.881466843024701 2 1 1 +0.374654223890455 1 1 1 +0.246340230252564 1 1 1 +0.46051090791758 2 1 1 +0.610478508455545 1 1 1 +0.290070606427311 2 1 1 +0.544420557842503 1 1 1 +0.776693279362721 1 1 1 +0.235406028367375 2 1 1 +0.239531675743827 1 1 1 +0.44775078332261 2 1 1 +0.272084709816774 1 1 1 +0.490027056594032 2 1 1 +1.11466956380519 2 1 1 +0.270448404879725 2 1 1 +0.442949902437612 1 1 1 +0.570651632322539 1 1 1 +0.32265845661882 2 1 1 +0.407435441210764 2 1 1 +0.200085052390358 2 1 1 +0.358511835895485 2 1 1 +1.2431214333383 2 1 1 +0.696171754957839 1 1 1 +0.2777627469669 1 1 1 +0.429359856138122 2 1 1 +0.340524177360971 2 1 1 +0.199944337376957 2 1 1 +0.398334292684942 2 1 1 +0.388541579168816 2 1 1 +0.398547679838622 1 1 1 +0.839309822360769 2 1 1 +0.280253849702043 2 1 1 +0.547345720269382 2 1 1 +0.376647832731017 2 1 1 +0.455530332435412 2 1 1 +0.334196466045242 2 1 1 +0.759777271734527 2 1 1 +1.10869967729068 2 1 1 +0.222920909328599 2 1 1 +0.243727194101031 2 1 1 +0.331283374352904 2 1 1 +0.489803545251022 1 1 1 +0.2736011848833 2 1 1 +0.432409628386385 1 1 1 +0.447747022319498 2 1 1 +0.736283852147818 2 1 1 +0.461500847594122 1 1 1 +0.359367876631285 1 1 1 +0.418098062593873 2 1 1 +0.502693165924066 1 1 1 +0.260188072876792 1 1 1 +0.348437996297828 1 1 1 +1.57562306974174 2 1 1 +0.316108820930013 2 1 1 +0.421685918698271 2 1 1 +0.578695918727619 2 1 1 +1.12879309366769 2 1 1 +1.03916993441652 2 1 1 +0.492207222672778 1 1 1 +0.33283217994747 2 1 1 +0.39422420306568 2 1 2 +0.362300838201913 1 1 2 +0.469662901313467 2 1 2 +0.820030023322582 1 1 2 +0.234551440695508 2 1 2 +0.331679248955791 1 1 2 +0.527229640837085 2 1 2 +0.91734807805308 2 1 2 +0.319175515877037 2 1 2 +0.651053459158852 1 1 2 +0.661459624685597 2 1 2 +0.281279784597852 2 1 2 +0.342078529279457 1 1 2 +0.3636800828231 2 1 2 +0.484151346003298 1 1 2 +0.658827635325395 1 1 2 +0.622208937699232 1 1 2 +0.580811030835409 2 1 2 +0.441808620117506 1 1 2 +0.36060243933493 2 1 2 +0.831194064165385 2 1 2 +0.361776006347027 2 1 2 +0.777351339265196 1 1 2 +0.278293909155803 2 1 2 +0.278507100800553 2 1 2 +0.884402648451047 1 1 2 +0.342560342613834 2 1 2 +0.809676649841315 2 1 2 +0.516858099569803 1 1 2 +0.634645370682583 2 1 2 +0.249686099229778 1 1 2 +1.33141985698474 1 1 2 +0.453726915386914 2 1 2 +0.290504549136735 2 1 2 +0.487095756746479 2 1 2 +0.346501172556082 2 1 2 +0.393430828426059 1 1 2 +0.504449494787339 2 1 2 +0.367999687491587 2 1 2 +0.352469038071531 1 1 2 +0.234560015153837 2 1 2 +0.940841504372444 1 1 2 +0.2046902513565 2 1 2 +0.461341997193658 1 1 2 +0.610339950737745 2 1 2 +0.446921029186028 1 1 2 +0.515591108864551 2 1 2 +1.58260395843454 2 1 2 +0.344764743329778 2 1 2 +0.427254054893139 2 1 2 +0.516158776880019 1 1 2 +1.2612303673015 2 1 2 +0.613528615965816 2 1 2 +0.267963577139406 2 1 2 +0.307594651280269 2 1 2 +0.24101706884499 1 1 2 +0.455753268732021 2 1 2 +0.405040912881131 2 1 2 +0.288094483330521 1 1 2 +0.545610622237084 2 1 2 +0.452142838999807 1 1 2 +0.594527943497764 1 1 2 +0.88116621589308 2 1 2 +0.277767297820233 2 1 2 +0.279551393619652 2 1 2 +0.365460511604365 2 1 2 +0.556212898406868 2 1 2 +0.328560209842821 1 1 2 +0.531013993625691 1 1 2 +0.231888430468412 1 1 2 +0.677110774143983 2 1 2 +0.453921989085917 2 1 2 +0.459571696136957 2 1 2 +0.393356837769246 1 1 2 +0.511202810478497 1 1 2 +0.693614307574487 1 1 2 +0.240863923388269 2 1 2 +0.321852817508144 1 1 2 +0.270908403919833 2 1 2 +0.820724000663825 1 1 2 +0.235189573689813 2 1 2 +0.326155088030317 2 1 2 +0.631590224724998 1 1 2 +0.441990726662034 1 1 2 +0.84336570752273 2 1 2 +0.359995826600722 2 1 2 +0.251400135935091 1 1 2 +0.412798716611553 1 1 2 +0.257997459005081 1 1 2 +0.324911808695266 2 1 2 +0.869954063020224 2 1 2 +0.316774804913553 1 1 2 +0.802438949561354 2 1 2 +0.753010120858102 2 1 2 +0.50447570028204 1 1 2 +0.472994968867572 2 1 2 +0.365558799398694 2 1 2 +0.355836646801112 1 1 2 +0.571157381310202 2 1 2 +0.634686215618027 2 1 2 +0.270208965991148 2 1 2 +0.328585338874615 1 1 2 +0.384434393299423 2 1 2 +0.316023575731398 1 1 2 +0.494817395995112 2 1 2 +0.300504460120145 2 1 2 +0.347783059904907 2 1 2 +1.02851702876777 2 1 2 +0.364863367923789 2 1 2 +0.460777943415657 2 1 2 +0.382793622325279 2 1 2 +0.273403607994913 2 1 2 +0.609426470046583 2 1 2 +0.297792901344866 1 1 2 +0.370479141756967 2 1 2 +0.882238434259769 1 1 2 +0.496857265474561 2 1 2 +0.277702369672893 2 1 2 +0.446926962878622 1 1 2 +0.36757607051588 1 1 2 +0.557136267106436 2 1 2 +1.00333007744122 2 1 2 +0.760219976689289 2 1 2 +0.332068843559009 2 1 2 +0.548961093445682 2 1 2 +0.313465233961872 2 1 2 +0.550216771807154 1 1 2 +0.29794278574353 1 1 2 +0.234198048951483 2 1 2 +0.273445183254746 2 1 2 +0.574886295740124 2 1 2 +0.258382409058055 1 1 2 +0.409845586460725 2 1 2 +0.326206723132256 2 1 2 +0.642595268751117 2 1 2 +0.232356531769144 2 1 2 +1.70736951927255 1 1 2 +0.274687338325608 2 1 2 +0.40877430223826 2 1 2 +0.365729356985064 2 1 2 +0.6050000403314 2 1 2 +0.592011487134505 2 1 2 +0.557179211825432 1 1 2 +0.873296855773591 1 1 2 +0.216826762785491 1 1 2 +0.517886780128018 2 1 2 +0.398323720600925 1 1 2 +1.12139464302831 2 1 2 +0.249538486660475 2 1 2 +0.360304338880141 1 1 2 +0.627773044075362 2 1 2 +0.996274959906684 2 1 2 +0.202797819180771 1 1 2 +0.383153769101205 1 1 2 +0.324797856324902 1 1 2 +0.239421301531662 2 1 2 +0.24289898785908 1 1 2 +0.547746136913622 1 1 2 +0.386255965400912 2 1 2 +0.60223673049116 2 1 2 +0.549261776998216 2 1 2 +0.395992071688511 2 1 2 +0.217402932038072 2 1 2 +0.295305459515413 2 1 2 +0.447909826549637 2 1 2 +0.71950962867128 2 1 2 +0.794816583397332 1 1 2 +0.241318968932987 2 1 2 +0.556293493098233 2 1 2 +0.238208378562322 2 1 2 +0.499247181746743 1 1 2 +0.317050968536836 2 1 2 +0.322686857249444 2 1 2 +0.71276761076242 2 1 2 +0.301030966624334 2 1 2 +0.336641004565653 2 1 2 +0.812046026214206 2 1 2 +0.270220261704131 2 1 2 +0.701954145112022 2 1 2 +0.43964095073941 2 1 2 +0.384704421988213 2 1 2 +0.501487364681699 2 1 2 +0.455023781459671 2 1 2 +0.332474164305816 2 1 2 +0.567142874907982 2 1 2 +0.253324335182053 2 1 2 +0.444329558298367 2 1 2 +0.750457236950695 2 1 2 +0.292500297080332 2 1 2 +0.319745451630673 2 1 2 +0.286210384865368 2 1 2 +0.283637752128579 1 1 2 +0.236044970372654 2 1 2 +0.606532173767213 1 1 2 +1.32620595835061 2 1 2 +0.49881945892801 2 1 2 +1.00559201100603 2 1 2 +0.498129494834216 2 1 2 +0.682007132416635 2 1 2 +0.521249610973914 2 1 2 +0.229929750671033 2 1 2 +1.12814610238938 1 1 2 +0.74135939367203 2 1 2 +1.14362542630031 2 1 2 +0.261969169934014 1 1 2 +0.240668217312327 1 1 2 +1.59220860546119 2 1 2 +0.459005868330534 2 1 2 +0.290018768199601 2 1 2 +0.204589440835719 1 1 2 +0.619039312673667 2 1 2 +0.667083334382893 1 1 2 +0.359845320132008 1 1 2 +0.912937103767445 2 1 2 +0.522430834145349 2 1 2 +0.297762304149053 1 1 2 +0.276240304783596 2 1 2 +0.399051717562123 2 1 2 +0.404254481667734 2 1 2 +1.23765251352633 1 1 2 +1.2213528437925 2 1 2 +0.554106620313858 2 1 2 +0.513543854359058 2 1 2 +0.718560875752879 2 1 2 +0.299045404005468 1 1 2 +0.197161504481574 2 1 2 +0.355424533393654 2 1 2 +0.601322385280793 2 1 2 +0.31408110064814 1 1 2 +0.681928297252204 2 1 2 +0.257899160580357 1 1 2 +0.331853308281021 2 1 2 +0.932271244383807 2 1 2 +0.762290747363875 1 1 2 +0.610315223598599 2 1 2 +0.508310743979851 2 1 2 +0.293542339726516 1 1 2 +0.249532498898509 2 1 2 +0.240661946068682 1 1 2 +0.480573774515142 2 1 2 +0.26503112695042 1 1 2 +0.745033574361612 2 1 2 +0.313418912457887 2 1 2 +0.428468490020874 2 1 2 +0.619836697801129 2 1 2 +0.404856983338945 2 1 2 +0.225135719018744 2 1 2 +0.247203725168153 2 1 2 +0.473126435201081 2 1 2 +0.758881984366834 2 1 2 +0.530103620429835 2 1 2 +0.609787747426196 2 1 2 +0.42023331047044 1 1 2 +0.294545387085857 1 1 2 +0.311952071319945 2 1 2 +0.793299410776987 2 1 2 +0.376179978035794 2 1 2 +0.230418084856786 2 1 2 +0.25879024565358 2 1 2 +0.264796453159985 2 1 2 +0.745485785923675 2 1 2 +0.224026456721164 2 1 2 +0.6030135494348 1 1 2 +0.489733962171922 2 1 2 +0.39466687509252 1 1 2 +0.552205654391275 2 1 2 +0.575332864606377 2 1 2 +0.673079198373531 1 1 2 +0.346849143283538 1 1 2 +0.384205850032696 1 1 2 +0.382157410278578 2 1 2 +0.294710963958947 2 1 2 +0.487164402385991 1 1 2 +0.571768796864126 2 1 2 +0.243155946253846 1 1 2 +0.366816988109117 1 1 2 +0.556560232965345 2 1 2 +0.842238270178048 2 1 2 +0.630587019970835 1 1 2 +0.849346128585895 2 1 2 +0.301910596058078 1 1 2 +0.494388435823995 1 1 2 +0.635279696032103 2 1 2 +0.244333041807845 1 1 2 +0.462722638825509 2 1 2 +0.355580520263025 1 1 2 +0.419159925222802 2 1 2 +0.229441499742296 2 1 2 +0.272011206196529 2 1 2 +0.457900548880182 1 1 2 +0.42581270796691 2 1 2 +0.192946477357373 2 1 2 +0.920226167527353 2 1 2 +0.870356567626495 2 1 2 +0.506429857300226 1 1 2 +1.35129991323996 2 1 2 +0.81782131154567 1 1 2 +0.312753351203148 2 1 2 +0.240147726849663 2 1 2 +0.331594506915926 2 1 2 +0.303605405427918 2 1 2 +1.3692312598303 2 1 2 +0.464969591870211 2 1 2 +0.365081121121442 2 1 2 +0.53678523283272 1 1 2 +0.362543809949933 2 1 2 +0.300077415892361 2 1 2 +0.565255726546058 2 1 2 +0.508969800017276 1 1 2 +0.197931347436034 2 1 2 +0.425448002968464 2 1 2 +0.917689004198981 2 1 2 +0.265209389680314 1 1 2 +0.399534037154238 1 1 2 +0.292118455947818 2 1 2 +0.352289208022807 2 1 2 +0.800608594982045 2 1 2 +0.251768687680971 1 1 2 +0.280448242529617 2 1 2 +0.243473452337431 1 1 2 +0.286916333216323 2 1 2 +0.838049565027792 1 1 2 +0.244529227888586 1 1 2 +0.71863102671265 2 1 2 +1.98916958946463 2 1 2 +0.238806320688673 2 1 2 +0.693785935221629 2 1 2 +0.495890282805749 1 1 2 +0.191174545766406 2 1 2 +0.836607694563896 2 1 2 +0.391165841939288 2 1 2 +0.566993167116615 2 1 2 +0.240715729525045 2 1 2 +0.354039896192607 1 1 2 +1.21434836670206 2 1 2 +0.48154154974369 2 1 2 +0.798332748413893 2 1 2 +0.650917466844914 2 1 2 +0.384224495536896 1 1 2 +0.945026137940947 2 1 2 +0.418508744931679 2 1 2 +0.659365265496408 1 1 2 +0.271823834279208 2 1 2 +0.43920360190222 2 1 2 +0.449532948575899 2 1 2 +1.02510373135742 2 1 2 +0.49889822568904 1 1 2 +1.23542122006285 1 1 2 +0.274287087904293 2 1 2 +0.673318487358746 2 1 2 +0.397619138281994 2 1 2 +1.0555886269523 2 1 2 +0.229089202292979 1 1 2 +0.697871360095817 2 1 2 +0.614287525537126 1 1 2 +0.322578991679628 1 1 2 +0.605688688250448 2 1 2 +0.534481750546624 2 1 2 +0.565101205666666 1 1 2 +0.378499737439249 1 1 2 +0.52995012536057 2 1 2 +2.45554336829165 1 1 2 +0.744067895318506 2 1 2 +0.673980171567151 2 1 2 +0.33496535179204 2 1 2 +0.703852317870538 2 1 2 +0.623851718541645 2 1 2 +0.275936871629696 2 1 2 +0.245843960416957 2 1 2 +0.220780887604494 2 1 2 +0.585098991357547 2 1 2 +0.343992796279959 1 1 2 +0.580010432096859 2 1 2 +0.377174286172397 2 1 2 +0.614794867960386 1 1 2 +0.235740390671863 1 1 2 +0.498093604359181 2 1 2 +0.422668225465882 2 1 2 +0.85458472173833 2 1 2 +0.318077105190021 2 1 2 +0.660599386236034 2 1 2 +0.44253879597235 2 1 2 +0.482452162905769 2 1 2 +0.569360166827625 2 1 2 +1.5195957937337 1 1 2 +0.335177741698269 2 1 2 +0.241392133198455 2 1 2 +0.503619286339201 2 1 2 +0.578489345701315 2 1 2 +0.327768204464024 2 1 2 +0.436095089114902 2 1 2 +0.424989568541 1 1 2 +0.214045608149353 2 1 2 +0.306116924393253 1 1 2 +0.476975246716608 2 1 2 +0.756750965776553 2 1 2 +0.312822136071239 2 1 2 +0.470827169455528 1 1 2 +0.37315029361616 2 1 2 +0.685155315108113 2 1 2 +0.959530818891534 2 1 2 +0.913595420136271 2 1 2 +0.595438752846243 2 1 2 +0.36802176344941 2 1 2 +0.418544504566566 2 1 2 +0.363048213885529 2 1 2 +0.254338756573215 1 1 2 +0.730957519992839 1 1 2 +0.263046554830887 1 1 2 +0.46094182659418 2 1 2 +0.531059000798822 2 1 2 +0.353280265477637 2 1 2 +0.464342980616116 2 1 2 +0.229724823533327 2 1 2 +0.504945673660676 2 1 2 +0.532493395334424 2 1 2 +0.423801763698387 1 1 2 +0.787113721614964 2 1 2 +0.223160559034952 2 1 2 +0.419595856308554 2 1 2 +0.396648316145306 2 1 2 +0.308908241587595 2 1 2 +0.627802576140553 2 1 2 +0.64888860721256 2 1 2 +0.738730808101364 2 1 2 +0.658745774579089 1 1 2 +0.679191956616965 1 1 2 +0.278164538209912 1 1 2 +0.205911141408479 2 1 2 +0.241638399787725 2 1 2 +0.24858355547484 1 1 2 +0.73740496979995 2 1 2 +0.247935082999496 2 1 2 +0.826311099617232 2 1 2 +0.590607775557781 1 1 2 +0.351249908681046 1 1 2 +0.370792468725378 2 1 2 +0.389722068994738 1 1 2 +0.251157837165118 2 1 2 +0.663087218040623 2 1 2 +0.454359737429872 2 1 2 +0.435474095638232 2 1 2 +0.284410206592962 2 1 2 +0.344506290138683 2 1 2 +1.01768620078799 1 1 2 +0.331330031800195 1 1 2 +0.277021859762052 1 1 2 +0.347332671037543 1 1 2 +0.286836805838407 1 1 2 +0.340934631295205 2 1 2 +1.22270556676254 1 1 2 +0.360534849486478 1 1 2 +0.359892263518994 2 1 2 +0.552595743599511 2 1 2 +0.301744081404754 2 1 2 +0.416037514267758 2 1 2 +0.541344562283886 2 1 2 +0.579986637345764 2 1 2 +0.221912718773351 2 1 2 +0.465245817277264 2 1 2 +0.474738754014913 2 1 2 +1.70409538281312 2 1 2 +1.02235518855245 2 1 2 +0.301214497598036 1 1 2 +0.991176433131545 2 1 2 +0.401432084705109 2 1 2 +1.11715380433533 2 1 2 +0.81719064511715 2 1 2 +0.549517654685354 1 1 2 +0.251345033237621 2 1 2 +0.357859075575934 2 1 2 +0.90132423193762 2 1 2 +0.272936669704676 1 1 2 +0.455508577827349 2 1 2 +0.861185664428614 2 1 2 +0.266987292082781 2 1 2 +0.578879341650739 2 1 2 +0.649256823455797 2 1 2 +0.418711362089519 2 1 2 +0.433426379919396 2 1 2 +0.642462173639701 1 1 2 +0.406446379518523 2 1 2 +0.290863063788828 1 1 2 +0.395803052313048 2 1 2 +0.311087619708231 2 1 2 +0.279185686505835 1 1 2 +0.412823984876793 1 1 2 +0.314508721309633 2 1 2 +0.417280760034167 2 1 2 +0.357813047077128 2 1 2 +0.256161295149574 2 1 2 +0.240326641914136 2 1 2 +0.469105961018824 2 1 2 +0.23311026462364 2 1 2 +0.219699590325278 2 1 2 +0.267828103451759 2 1 2 +0.324090708482963 1 1 2 +0.882370084866449 2 1 2 +0.296556033418114 2 1 2 +0.535028311840886 1 1 2 +0.43175137215661 2 2 1 +0.644941841007773 2 2 1 +0.454575049110823 1 2 1 +0.279695948494544 1 2 1 +0.426171246559654 2 2 1 +0.610510950165697 2 2 1 +0.230689244897577 1 2 1 +0.548095008243392 1 2 1 +0.98541029605035 2 2 1 +0.468950980918864 2 2 1 +0.684795778239259 2 2 1 +0.24699387402904 2 2 1 +0.27748506685569 2 2 1 +0.223809391596467 2 2 1 +0.400670922523929 2 2 1 +0.281960311355797 2 2 1 +0.231152419723023 2 2 1 +0.222978006492174 2 2 1 +0.524142717204131 1 2 1 +0.668726417947444 2 2 1 +0.683251780945197 1 2 1 +0.40863429831843 2 2 1 +0.276342613901865 2 2 1 +0.213193944799305 1 2 1 +0.40877289939876 2 2 1 +0.331817008251402 2 2 1 +0.258591934173063 1 2 1 +0.630903225088589 2 2 1 +0.987067038242542 2 2 1 +0.388841679090983 2 2 1 +0.50346695279445 2 2 1 +0.806091271285255 2 2 1 +0.492528933755195 2 2 1 +0.333653132977917 2 2 1 +0.385279766168457 2 2 1 +0.595417467221174 2 2 1 +0.365549592893083 2 2 1 +0.433959310098169 2 2 1 +0.573461315434403 2 2 1 +0.344529930746843 2 2 1 +0.291958498883562 1 2 1 +0.70457842689903 1 2 1 +0.806575731336316 2 2 1 +0.398225114239535 1 2 1 +0.377149378210516 2 2 1 +0.4258972597472 2 2 1 +0.345195995455676 1 2 1 +0.351061263845004 1 2 1 +0.608134033587742 2 2 1 +0.59067213970621 2 2 1 +0.422680728979774 1 2 1 +0.365154436343711 2 2 1 +0.329038190670398 2 2 1 +0.50673589153492 1 2 1 +0.370741158040407 2 2 1 +0.326087804776541 2 2 1 +0.255452470867296 2 2 1 +0.390552106391914 2 2 1 +0.26482744433547 2 2 1 +0.33730434871047 2 2 1 +0.394549621635902 2 2 1 +0.309813859295583 2 2 1 +0.63449833976955 2 2 1 +1.06805758473031 2 2 1 +0.421454806981705 2 2 1 +0.281636679807289 2 2 1 +0.554306471235953 2 2 1 +0.604870136671619 2 2 1 +1.08217269575099 1 2 1 +0.379299151823082 2 2 1 +0.689740592470931 2 2 1 +0.254395650138749 2 2 1 +0.414172534052434 2 2 1 +0.520113577322035 2 2 1 +0.431996596446885 2 2 1 +1.08053726808782 2 2 1 +0.3988833679393 1 2 1 +0.353686935845557 2 2 1 +0.260219916678923 2 2 1 +0.86593681879694 2 2 1 +0.225590599418998 2 2 1 +0.349164601087427 2 2 1 +0.458075994072888 2 2 1 +0.294956639081246 2 2 1 +0.313858826908574 2 2 1 +0.31162986062042 1 2 1 +0.507204360257467 2 2 1 +0.352032600138167 1 2 1 +0.663557530182887 2 2 1 +0.199857632822527 1 2 1 +0.514502853877809 2 2 1 +0.469535266171427 2 2 1 +0.355020801164096 2 2 1 +0.683808271711007 2 2 1 +0.301603299502107 2 2 1 +0.269580060746496 2 2 1 +0.299073546316696 2 2 1 +1.10983985933577 2 2 1 +0.360307123921532 2 2 1 +0.442261294563025 2 2 1 +0.435168028548888 1 2 1 +0.270793007645254 2 2 1 +0.607563481133469 2 2 1 +0.242718448543483 2 2 1 +0.760580795652265 2 2 1 +0.431996589129307 2 2 1 +0.374282624407255 2 2 1 +0.416206432567024 2 2 1 +0.519571791432021 2 2 1 +0.196032953071603 2 2 1 +0.315596729977301 1 2 1 +0.679912355835528 2 2 1 +0.264862579134914 1 2 1 +0.218987554044978 2 2 1 +0.355620540652982 1 2 1 +0.543244665580406 1 2 1 +0.457512624736921 2 2 1 +1.3226460471116 1 2 1 +0.525011653461871 1 2 1 +0.370965283148772 2 2 1 +0.347902925695899 1 2 1 +0.398586878749805 2 2 1 +0.548961196694153 1 2 1 +0.410882145807631 2 2 1 +0.198715843872579 1 2 1 +0.225346382503031 2 2 1 +0.42578338272523 2 2 1 +0.270583704112055 2 2 1 +0.219387522590806 1 2 1 +0.342735985144739 2 2 1 +0.553884896165182 2 2 1 +0.34639458884139 2 2 1 +0.283955583130347 2 2 1 +0.223220560804016 2 2 1 +0.243212165170184 1 2 1 +0.417166480278331 2 2 1 +0.339544388395638 2 2 1 +0.546503987947626 1 2 1 +0.318410466893085 1 2 1 +0.339234853728487 1 2 1 +0.340516936127161 2 2 1 +0.518403351170541 2 2 1 +0.195409190162283 1 2 1 +0.358173332839706 2 2 1 +0.699954775815217 1 2 1 +0.348838538015419 2 2 1 +0.270023193413005 2 2 1 +0.266646456805835 2 2 1 +1.16775233367232 1 2 1 +0.432285328985634 2 2 1 +0.392918105200082 2 2 1 +0.280124804921595 2 2 1 +0.339253528717098 1 2 1 +0.612654152551717 2 2 1 +1.09354543233683 1 2 1 +0.253131913451641 1 2 1 +0.407884093764528 2 2 1 +0.729923816383264 2 2 1 +0.412724482985278 1 2 1 +0.422136530830045 1 2 1 +0.195907626308766 1 2 1 +0.250168829070227 2 2 1 +0.361614330778561 2 2 1 +0.477174944954289 1 2 1 +0.630016603117949 1 2 1 +0.465295384344847 2 2 1 +0.294143656815915 2 2 1 +0.264628027587206 2 2 1 +0.207214488692379 2 2 1 +0.275361465213498 2 2 1 +0.390244603648003 2 2 1 +0.392019933911482 2 2 1 +0.419421788233775 2 2 1 +0.398851313639373 2 2 1 +0.222684342528921 1 2 1 +0.309535304324601 2 2 1 +0.518814359944856 1 2 1 +0.852128247986192 2 2 1 +0.281410288223871 2 2 1 +1.04526845496218 1 2 1 +0.299671718323509 2 2 1 +1.2206902247079 2 2 1 +0.266245221651821 2 2 1 +0.585856917539606 2 2 1 +0.246713344194944 2 2 1 +0.243221392767397 1 2 1 +0.466101604938217 2 2 1 +0.681358629889813 2 2 1 +0.316032029193665 2 2 1 +0.37711415731265 2 2 1 +0.321756308895114 2 2 1 +0.339426132611737 1 2 1 +0.462776164378388 2 2 1 +0.242651330034962 1 2 1 +0.246338808493067 2 2 1 +0.351443718315621 2 2 1 +0.879179474160666 2 2 1 +0.275892228592311 2 2 1 +1.19843207513575 2 2 1 +0.492212434904309 2 2 1 +0.235334077460408 1 2 1 +0.658781985580715 2 2 1 +0.496722023486868 2 2 1 +0.389333111607481 1 2 1 +0.290682514514568 1 2 1 +0.296078339261109 2 2 1 +1.13573146386925 2 2 1 +0.321243222665541 1 2 1 +0.47852089377703 2 2 1 +0.706173198859061 2 2 1 +0.342262066554139 1 2 1 +0.229700613420935 2 2 1 +0.262779311846245 1 2 1 +0.292772839155619 2 2 1 +0.412064699877953 2 2 1 +0.459836048826499 2 2 1 +0.490523892738996 2 2 1 +0.279524933441799 2 2 1 +0.369615897239809 2 2 1 +0.230758234694844 2 2 1 +0.252861805612104 2 2 1 +0.491841566756603 2 2 1 +0.381802651857499 2 2 1 +0.347186500235918 2 2 1 +0.608385233814657 2 2 1 +1.06849187905581 2 2 1 +0.312240566682192 1 2 1 +0.846639420443936 1 2 1 +0.712399792744085 2 2 1 +0.25012681536537 2 2 1 +0.474130073908507 2 2 1 +0.669096572588102 2 2 1 +0.245614214665135 1 2 1 +1.09046461098125 2 2 1 +0.865104839101706 2 2 1 +0.725212442091508 2 2 1 +0.328860852235035 2 2 1 +0.526209106175903 2 2 1 +0.421498807641989 2 2 1 +0.293969113717582 2 2 1 +0.390793667483304 2 2 1 +0.74262292487233 2 2 1 +0.308167280867968 2 2 1 +0.359026772195073 1 2 1 +0.328927185365953 2 2 1 +1.21905584255683 1 2 1 +0.500288047433814 2 2 1 +0.224842633452238 2 2 1 +0.382005686607667 2 2 1 +0.300634446023351 1 2 1 +0.417876867416724 2 2 1 +0.371249215012469 2 2 1 +0.788689811346923 1 2 1 +0.662689531590809 1 2 1 +0.471005868314423 2 2 1 +0.594444358601939 2 2 1 +1.55077240941125 1 2 1 +0.927706317276666 1 2 1 +0.649826050593124 1 2 1 +0.28075741006474 2 2 1 +0.505810290842985 2 2 1 +0.49711754981939 2 2 1 +0.317978096635881 2 2 1 +0.684248959928731 2 2 1 +0.24282378340995 2 2 1 +0.481707664140375 2 2 1 +0.373537373349082 1 2 1 +0.405447957366669 2 2 1 +0.748014256841301 2 2 1 +0.711834286991734 2 2 1 +0.907962085626992 1 2 1 +0.369967811242 2 2 1 +0.295993682640687 2 2 1 +0.373284266243751 2 2 1 +0.34166217722553 2 2 1 +0.42937750854584 1 2 1 +0.470915823976768 2 2 1 +0.740950067010803 2 2 1 +0.674240772478605 2 2 1 +0.424450122438996 2 2 1 +0.26745277302258 2 2 1 +1.16123242588962 1 2 1 +0.579767054462027 2 2 1 +1.13724912004989 2 2 1 +0.310882482715339 2 2 1 +0.510927903962018 2 2 1 +0.344111958329695 2 2 1 +0.618226135802301 2 2 1 +0.351479460921543 2 2 1 +0.286917418430935 2 2 1 +0.297983520129636 2 2 1 +0.56257712830786 2 2 1 +0.716682250022604 2 2 1 +0.414592630645323 1 2 1 +0.238976677322081 2 2 1 +0.272443854818692 2 2 1 +1.16955807004935 2 2 1 +1.09939549809574 2 2 1 +0.270028442968248 2 2 1 +0.788721847905805 2 2 1 +0.4191755150052 2 2 1 +0.316400373681771 2 2 1 +0.609802807606279 2 2 1 +0.242772508104779 2 2 1 +0.793135194837104 1 2 1 +0.225673630294491 2 2 1 +0.368031893686271 2 2 1 +0.276628839207783 2 2 1 +0.431489483371041 2 2 1 +0.389776699040007 1 2 1 +0.561033032142085 2 2 1 +0.330526167790471 2 2 1 +0.420110538629517 2 2 1 +0.270319448711143 2 2 1 +0.531423698665226 1 2 1 +0.476628212169931 2 2 1 +0.232314221820144 1 2 1 +0.941428986722565 2 2 1 +0.212025112102429 2 2 1 +0.368427723696994 2 2 1 +0.70992072587502 2 2 1 +0.925525840482286 2 2 1 +0.377883521547475 1 2 1 +0.78063938574767 2 2 1 +1.74503683482489 1 2 1 +0.251612907306528 2 2 1 +0.301650511821631 2 2 1 +0.813066289415148 2 2 1 +0.578407966843961 1 2 1 +0.348273146613647 2 2 1 +0.626193739500669 2 2 1 +0.253140397879093 2 2 1 +0.259307456267337 1 2 1 +0.415832848798801 1 2 1 +0.556832384556447 1 2 1 +0.673572632335394 1 2 1 +0.799853330760023 2 2 1 +0.930742156958785 1 2 1 +0.535762437608495 1 2 1 +0.473189488245964 2 2 1 +0.524542993511125 2 2 1 +0.739545131805635 1 2 1 +0.235355785766015 2 2 1 +0.260584654694577 1 2 1 +0.629506660259397 2 2 1 +0.231557754200238 2 2 1 +0.441319321469825 2 2 1 +0.583215313492174 2 2 1 +0.716830295625359 2 2 1 +0.199491993130699 2 2 1 +0.431091799266252 2 2 1 +0.206797413339198 2 2 1 +0.891303968036612 2 2 1 +0.418088670691812 2 2 1 +0.695686622676713 2 2 1 +0.509442640220052 2 2 1 +0.534434162394219 1 2 1 +0.22957675981285 2 2 1 +0.237510411071828 2 2 1 +0.384742470864086 2 2 1 +1.13440323753284 2 2 1 +0.371282462508375 2 2 1 +0.586952463908924 2 2 1 +0.751807274502031 2 2 1 +0.411626801231686 2 2 1 +0.788795034331271 2 2 1 +0.20733393183141 1 2 1 +0.638857588359423 1 2 1 +0.261472367531119 2 2 1 +0.373277752845772 2 2 1 +1.02436843366298 2 2 1 +0.302502960194587 1 2 1 +0.661168427682398 2 2 1 +0.233395542415348 2 2 1 +0.294733463977297 1 2 1 +0.26544588339993 2 2 1 +0.279478601813994 1 2 1 +0.926988733721204 2 2 1 +0.47577107073081 2 2 1 +0.265434794900874 2 2 1 +0.285106533088262 2 2 1 +0.888150923648132 1 2 1 +0.533632864862185 2 2 1 +0.816980040369266 2 2 1 +0.453517844009076 2 2 1 +0.32540514132032 1 2 1 +0.755359450830742 2 2 1 +0.388781842189814 2 2 1 +0.411602949797336 2 2 1 +0.269196234885745 2 2 1 +0.403147780188977 2 2 1 +0.815435476047168 2 2 1 +0.384261600014836 2 2 1 +0.267710822428141 2 2 1 +0.376570816086018 2 2 1 +0.364120979475635 2 2 1 +0.217074883970687 2 2 1 +0.354010708705527 2 2 1 +2.19563497894271 2 2 1 +0.269514173923494 2 2 1 +0.589071950609085 2 2 1 +0.343631456123552 2 2 1 +0.567816994849473 2 2 1 +0.510957888944779 2 2 1 +0.608732197392097 2 2 1 +0.312065520452347 2 2 1 +0.719826312987153 2 2 1 +0.991538495850398 2 2 1 +0.590296881002275 2 2 1 +0.585207716920772 2 2 1 +0.513932742073003 1 2 1 +0.29300310455318 2 2 1 +0.395229070645386 2 2 1 +0.251238693438004 1 2 1 +0.358100976516223 2 2 1 +0.604658428518133 2 2 1 +0.28898691264998 1 2 1 +0.616268731936217 1 2 1 +0.241534354644201 2 2 1 +0.586222445209675 2 2 1 +0.530578635018236 1 2 1 +0.400144208555685 2 2 1 +0.385729124722071 2 2 1 +0.397295110458581 2 2 1 +1.1980503220687 2 2 1 +0.284244205357729 2 2 1 +0.241347374440344 2 2 1 +0.379496078440646 1 2 1 +0.313029336995714 1 2 1 +0.233874351279794 2 2 1 +0.592645188650851 2 2 1 +0.380520487588823 2 2 1 +0.273459017317749 2 2 1 +0.381468845247399 2 2 1 +0.50996411763119 2 2 1 +0.26417116674038 2 2 1 +0.445725904718431 1 2 1 +0.284100667163705 2 2 1 +0.372087396465745 2 2 1 +0.259337317980368 1 2 1 +1.90238201143875 2 2 1 +0.420439527811802 2 2 1 +0.676790701130044 2 2 1 +0.491676544128052 2 2 1 +0.41814454475348 1 2 1 +0.325129499365181 2 2 1 +0.250560441839221 2 2 1 +0.42121953335634 2 2 1 +0.720448454867087 2 2 1 +0.32286005578195 2 2 1 +0.401634215363402 2 2 1 +0.823101550994882 1 2 1 +0.22652814141488 2 2 1 +0.574012915325073 2 2 1 +0.303814059799115 2 2 1 +0.38868794499924 2 2 1 +0.468185403754697 1 2 1 +0.64039931341012 2 2 1 +0.4922813732329 2 2 1 +0.685240592163985 2 2 1 +0.460729493739463 2 2 1 +0.454264406687532 2 2 1 +0.292098065982487 2 2 1 +0.259711243117317 2 2 1 +0.476819420143709 2 2 1 +0.54147202807433 2 2 1 +0.294960782219479 2 2 1 +0.265064471822996 2 2 1 +0.268586719914729 2 2 1 +0.663075377720097 2 2 1 +0.443778226905662 2 2 1 +0.33084725716745 2 2 1 +0.45665127223439 2 2 1 +0.937763503430533 2 2 1 +1.25335333650901 2 2 1 +0.882712190513773 2 2 1 +0.292970601755812 2 2 1 +0.34861595618408 1 2 1 +0.278204853725307 2 2 1 +0.677235764875177 1 2 1 +1.08316216966813 2 2 1 +0.365942431227363 2 2 1 +0.305484095875439 2 2 1 +0.235624184994017 2 2 1 +0.398658337232737 2 2 1 +0.434318631638857 2 2 1 +0.355187848050399 1 2 1 +0.454420611999672 2 2 1 +0.198639116074042 2 2 1 +0.359080427717729 2 2 1 +1.74384487531936 2 2 1 +0.191245680862617 2 2 1 +0.783931590262253 2 2 1 +0.567473626830682 2 2 1 +0.45296456401947 2 2 1 +0.248317703257658 2 2 1 +0.699649264703012 2 2 1 +0.611361282985861 2 2 1 +0.941610741093518 1 2 1 +0.58654507029159 2 2 1 +0.371419809546963 2 2 2 +0.476259551913618 2 2 2 +0.31638619890226 2 2 2 +0.623249848667632 2 2 2 +0.241497913775783 2 2 2 +0.270061097781809 1 2 2 +0.293966968248291 1 2 2 +0.299480877137146 1 2 2 +0.576031229139046 2 2 2 +0.574375556532669 2 2 2 +0.484563476656291 2 2 2 +0.397331777673979 2 2 2 +0.501973158919077 2 2 2 +0.354716577854211 2 2 2 +0.608633700459432 2 2 2 +0.263227756156975 2 2 2 +0.812103027527017 2 2 2 +0.433996647919137 2 2 2 +0.516347639257426 2 2 2 +0.272600890706883 2 2 2 +0.286047555800837 2 2 2 +0.342521016193953 2 2 2 +0.443573882489982 1 2 2 +0.526526295854292 2 2 2 +0.372529192399058 1 2 2 +0.294310906384799 2 2 2 +0.964693955497906 1 2 2 +0.388120088040495 2 2 2 +0.292344532077584 2 2 2 +1.15960223884759 2 2 2 +0.405153223966 1 2 2 +0.354989409321607 2 2 2 +0.218636295771409 2 2 2 +0.517752319489056 2 2 2 +0.288570371961949 2 2 2 +0.557762064455683 2 2 2 +0.216932595356505 1 2 2 +0.586061017160239 2 2 2 +0.269412734371731 2 2 2 +0.622722702637302 1 2 2 +0.244923602459739 2 2 2 +0.685296064602417 2 2 2 +0.292439180652386 1 2 2 +0.26310281088841 1 2 2 +0.234853617821344 2 2 2 +0.505912235218254 2 2 2 +0.221940510664284 2 2 2 +0.204399980929288 2 2 2 +0.848247148946075 2 2 2 +0.652791317257383 2 2 2 +0.258633103875262 2 2 2 +0.66307312794457 2 2 2 +0.4115021817099 2 2 2 +0.61208665562783 2 2 2 +0.384453937267756 2 2 2 +0.661908613432912 1 2 2 +0.606234829188098 2 2 2 +1.29671884936154 2 2 2 +0.286389778612672 2 2 2 +0.44358020190157 2 2 2 +0.331248138375298 2 2 2 +0.699098366691914 2 2 2 +0.286835588654431 2 2 2 +0.27329751741887 2 2 2 +0.578226988740838 1 2 2 +0.411952465608936 2 2 2 +0.373654621521716 2 2 2 +0.51548587469447 1 2 2 +1.11290090946189 2 2 2 +0.61606619557625 1 2 2 +0.698620145254327 1 2 2 +0.213186526028619 1 2 2 +0.256304787769457 2 2 2 +0.203323922453708 2 2 2 +0.451141466550498 2 2 2 +0.432233770300206 1 2 2 +0.619352781461983 2 2 2 +0.858258004341018 2 2 2 +0.325535345759254 2 2 2 +0.284928261958504 2 2 2 +0.292632701103941 2 2 2 +0.448776353232441 2 2 2 +0.629255153926353 2 2 2 +0.263200495144972 2 2 2 +0.352588083053461 1 2 2 +0.236540541826709 2 2 2 +0.707959296574283 2 2 2 +1.28313325031642 2 2 2 +0.301751574718914 1 2 2 +0.561892223203863 1 2 2 +0.282412604594248 2 2 2 +0.304262276952806 2 2 2 +0.407357953713203 2 2 2 +0.210707057789059 2 2 2 +0.560396081439257 2 2 2 +0.339009581511832 2 2 2 +0.996419746213449 2 2 2 +1.00183750288417 2 2 2 +1.32504284872589 2 2 2 +1.0721106707744 2 2 2 +0.377861404944634 2 2 2 +0.71900023167808 1 2 2 +0.319706843290023 2 2 2 +0.416610305545232 2 2 2 +0.206654488398495 2 2 2 +0.921766469149586 1 2 2 +0.658742909074791 2 2 2 +0.263893467268196 2 2 2 +0.190201252069023 1 2 2 +0.517337895143614 2 2 2 +0.224739734085673 2 2 2 +0.218898805354731 2 2 2 +0.717954990040875 2 2 2 +0.209228192652069 1 2 2 +0.256025079388851 1 2 2 +0.326258537383908 2 2 2 +0.689759693215715 2 2 2 +0.610908694182847 1 2 2 +0.337065226697079 2 2 2 +0.1870490728342 2 2 2 +0.299662174395397 2 2 2 +0.277717334862863 2 2 2 +1.23731761519909 2 2 2 +0.43474766698581 2 2 2 +0.557318058556568 2 2 2 +0.680232429047272 1 2 2 +0.322869377985879 2 2 2 +0.438605410585611 2 2 2 +0.87241634651293 2 2 2 +0.539311881419031 2 2 2 +0.475182882058131 2 2 2 +0.271154490775633 2 2 2 +0.385232918900933 2 2 2 +0.505906394481136 1 2 2 +0.442070078279938 1 2 2 +0.571547043533657 2 2 2 +0.655792477355547 2 2 2 +0.298499878396393 2 2 2 +0.193724242862765 2 2 2 +0.26006530791065 1 2 2 +0.356708786110689 1 2 2 +0.491543605775341 2 2 2 +0.393111774151399 2 2 2 +0.64026773631928 2 2 2 +1.21142909598262 2 2 2 +0.474157252918212 2 2 2 +0.262364047166446 1 2 2 +0.258812965103118 1 2 2 +0.348162908277828 2 2 2 +0.272495641205976 2 2 2 +0.312010686501704 2 2 2 +0.38306021754942 2 2 2 +0.661593514913509 2 2 2 +0.433229374187291 1 2 2 +0.361015067322576 2 2 2 +0.240000417220632 2 2 2 +0.354358867878031 1 2 2 +0.30397159906092 2 2 2 +0.678050772142903 2 2 2 +0.716435686835505 1 2 2 +0.688441301707592 2 2 2 +0.394328672411684 2 2 2 +0.46447905310017 2 2 2 +0.884150949192416 2 2 2 +0.248771015164462 2 2 2 +0.379182107844877 2 2 2 +1.29071049141673 2 2 2 +0.277475559903592 2 2 2 +0.428681740063866 2 2 2 +0.206857299277309 2 2 2 +0.669454595982171 2 2 2 +0.324919636356833 2 2 2 +0.395123689833804 1 2 2 +0.351936531306306 2 2 2 +0.55429721844539 2 2 2 +0.250263471266211 2 2 2 +0.372186767472496 1 2 2 +1.00061699085178 2 2 2 +1.09921193229266 2 2 2 +0.650060223521224 2 2 2 +0.232120071117234 2 2 2 +0.410458958763894 1 2 2 +0.26086224185435 2 2 2 +0.602658853022438 2 2 2 +0.282185336344145 2 2 2 +1.26553444840965 2 2 2 +0.382143185875273 2 2 2 +0.248988201311841 2 2 2 +0.982723892409823 2 2 2 +0.444303934998749 2 2 2 +0.64934036686621 1 2 2 +0.476803291197273 2 2 2 +0.385320489981782 2 2 2 +0.345492479856484 2 2 2 +0.422553263338974 2 2 2 +0.450135392979508 2 2 2 +0.18351739521214 2 2 2 +0.72669927106078 1 2 2 +0.375683539988626 1 2 2 +0.217452533729198 2 2 2 +0.664573219425088 2 2 2 +0.40675170248381 2 2 2 +0.687937270624779 2 2 2 +0.746504000572466 2 2 2 +0.315055664212289 2 2 2 +0.567678006192237 2 2 2 +0.35926586984242 2 2 2 +0.260726328355797 2 2 2 +0.420592363854161 1 2 2 +0.396878488001735 2 2 2 +0.25808424693846 1 2 2 +0.690268836967113 2 2 2 +0.829884430478792 2 2 2 +0.53272810901146 1 2 2 +1.01904300744411 2 2 2 +0.404198494338956 2 2 2 +0.406416489634305 2 2 2 +0.740461569878665 2 2 2 +0.397707721968874 2 2 2 +0.316960678261711 2 2 2 +0.393796380182408 2 2 2 +0.612468746065398 2 2 2 +0.45411966684652 1 2 2 +0.785721727701694 2 2 2 +0.9899071317292 2 2 2 +0.301479726835548 2 2 2 +0.202054252934703 1 2 2 +0.404304395375019 2 2 2 +0.405109216362114 2 2 2 +0.437782938427307 2 2 2 +0.323522608388588 2 2 2 +0.498830694826068 2 2 2 +0.37932833915468 2 2 2 +0.566103976465953 2 2 2 +0.285143334962036 2 2 2 +0.331554043990072 1 2 2 +1.1056014894372 1 2 2 +0.526579897732621 1 2 2 +0.498630096551189 1 2 2 +0.302146695343523 2 2 2 +0.593188221646493 2 2 2 +0.875545940047122 1 2 2 +0.21829415545408 2 2 2 +1.27038332570518 2 2 2 +0.282958152993897 1 2 2 +0.316439713175455 2 2 2 +0.579173988441469 1 2 2 +0.444463360833938 2 2 2 +0.269250618007444 1 2 2 +0.470819079103018 2 2 2 +0.209668973816132 2 2 2 +0.458009773429269 2 2 2 +0.408503850676956 2 2 2 +0.27215890031715 2 2 2 +0.326582192001007 2 2 2 +1.06297741258528 1 2 2 +0.322713389608647 1 2 2 +0.457320344283686 2 2 2 +0.358703584766666 1 2 2 +0.266227903632889 2 2 2 +0.559988919421634 2 2 2 +0.286133456649917 2 2 2 +0.505411943083196 1 2 2 +0.525780093131127 1 2 2 +0.469126014876269 2 2 2 +1.04689087989818 2 2 2 +0.319875950338349 2 2 2 +0.250597639059042 1 2 2 +0.457613518448636 2 2 2 +0.387669523459911 1 2 2 +0.434913766029881 2 2 2 +0.48328427011083 2 2 2 +0.243610412662936 2 2 2 +0.342488023626944 2 2 2 +1.02638570164986 2 2 2 +1.63528669167027 2 2 2 +0.547318790274417 2 2 2 +0.440550940111696 1 2 2 +0.373207977309306 1 2 2 +0.4309907178462 2 2 2 +0.687697858349405 2 2 2 +0.285905993586428 1 2 2 +0.295218110682198 2 2 2 +0.650238504586291 2 2 2 +0.353180609354725 2 2 2 +0.336105599731412 2 2 2 +0.308137951395616 2 2 2 +0.347726332414955 1 2 2 +0.36943111917592 2 2 2 +0.281602615433194 2 2 2 +0.341345778831345 2 2 2 +0.467241317856716 2 2 2 +0.805429950125371 2 2 2 +0.235507874506382 1 2 2 +0.60030833998794 2 2 2 +0.676534495912984 2 2 2 +0.217928389514833 2 2 2 +1.05751503498892 2 2 2 +0.279644280745951 2 2 2 +0.658652885367294 2 2 2 +0.958304606178503 2 2 2 +0.346568405752533 1 2 2 +0.56248781541788 2 2 2 +0.209725804674779 2 2 2 +0.846930432147221 2 2 2 +0.330138876610716 1 2 2 +0.420408554089049 2 2 2 +0.620358503800179 2 2 2 +0.429248922416652 2 2 2 +0.382833067577587 2 2 2 +0.340311828954195 2 2 2 +0.389953896192246 2 2 2 +0.53663347892141 2 2 2 +0.49895548899123 2 2 2 +0.941153754879819 1 2 2 +0.507411654917284 2 2 2 +1.20160702354942 2 2 2 +0.478889238903742 1 2 2 +0.901438524126996 2 2 2 +0.918390742809495 2 2 2 +0.493350491230538 1 2 2 +0.52166085506493 2 2 2 +0.389420617429571 2 2 2 +0.270742745239298 1 2 2 +1.37088764103588 2 2 2 +0.727863288960697 2 2 2 +1.12836933231538 2 2 2 +0.386561751596785 2 2 2 +0.4309399047834 2 2 2 +0.614786852311502 2 2 2 +0.411950362176773 2 2 2 +1.78194623155386 2 2 2 +0.539225103492103 2 2 2 +1.02211318479885 2 2 2 +0.544572050183936 2 2 2 +0.584989507154119 2 2 2 +0.248709232375088 2 2 2 +0.641762262858976 2 2 2 +0.535282924615562 2 2 2 +0.329604040226998 2 2 2 +0.279268417344702 2 2 2 +0.233349948825794 2 2 2 +0.412932663530443 2 2 2 +0.415689266706035 1 2 2 +0.482166933803416 2 2 2 +0.893443627004475 2 2 2 +0.446076551783159 2 2 2 +0.843967533571949 2 2 2 +0.292399559523647 2 2 2 +0.638203851975096 2 2 2 +0.993243483944454 2 2 2 +0.268455934511667 2 2 2 +0.248177339713737 2 2 2 +0.317553308658084 2 2 2 +0.312268015328109 2 2 2 +0.340662839522388 2 2 2 +0.277346119406243 2 2 2 +0.583825795661779 1 2 2 +0.315409861504152 1 2 2 +1.07687410829711 2 2 2 +0.444325240814203 2 2 2 +0.387565615939017 2 2 2 +0.267788731901758 2 2 2 +0.311270050983203 1 2 2 +1.22802442335164 2 2 2 +0.853133012546484 2 2 2 +0.258818891608348 2 2 2 +0.545664728569855 1 2 2 +0.243040509115306 1 2 2 +0.616018319395203 2 2 2 +0.424267110499089 2 2 2 +0.663051122325687 2 2 2 +0.226946615302446 2 2 2 +0.523585890324027 2 2 2 +0.35581713292406 2 2 2 +0.266158931754381 1 2 2 +0.720311139462917 1 2 2 +0.218118535654997 2 2 2 +0.821362912627226 1 2 2 +0.264989552139514 1 2 2 +0.511682799792117 2 2 2 +0.56125463965235 1 2 2 +0.973606014834926 2 2 2 +0.518075507295568 1 2 2 +0.272191894573665 2 2 2 +0.310819538858286 2 2 2 +0.368211947094363 2 2 2 +0.465474933911655 2 2 2 +0.561177518235567 1 2 2 +0.603470353749625 1 2 2 +0.334194497275073 2 2 2 +0.641942706323965 2 2 2 +0.356630874252134 2 2 2 +0.232871443126139 1 2 2 +0.333649448973833 2 2 2 +0.609159624944822 1 2 2 +1.45610468230462 2 2 2 +0.466354725792031 2 2 2 +0.304184098280551 1 2 2 +0.477141349721271 2 2 2 +0.303009331142076 2 2 2 +0.524109669978762 2 2 2 +0.43671698415524 2 2 2 +0.42787128143151 2 2 2 +1.08981029598678 1 2 2 +0.226054298232117 2 2 2 +0.383400885338281 2 2 2 +0.583586502122542 1 2 2 +0.778194753021581 2 2 2 +0.268775804780166 1 2 2 +0.502664308069978 2 2 2 +1.36226361256616 2 2 2 +0.335324976828731 2 2 2 +0.43949839083169 2 2 2 +0.432777757912515 2 2 2 +0.267162022023955 1 2 2 +0.879352118632758 2 2 2 +0.413066977072891 2 2 2 +0.421261106481918 2 2 2 +0.513682794838585 2 2 2 +0.232814503782821 2 2 2 +0.977943934285527 1 2 2 +0.278625342042981 2 2 2 +0.978405820837462 2 2 2 +0.43674850370889 2 2 2 +0.237684082723394 2 2 2 +0.244342880645148 2 2 2 +0.313124526412448 1 2 2 +0.244341553331277 2 2 2 +0.850477515504803 2 2 2 +0.346690313973946 2 2 2 +0.427706912439349 2 2 2 +0.383097524593988 1 2 2 +0.672506676903199 2 2 2 +0.325668111506743 2 2 2 +0.501131325233736 2 2 2 +0.240168060476825 2 2 2 +0.235178051076048 2 2 2 +0.47616856065887 2 2 2 +0.262979002698665 1 2 2 +0.526351217536873 2 2 2 +0.337727201047472 2 2 2 +0.255335801167391 2 2 2 +0.382811211430241 2 2 2 +0.526518217287997 2 2 2 +0.212982195364599 2 2 2 +0.197639872379599 2 2 2 +0.401129269762392 2 2 2 +2.19377506417666 2 2 2 +0.254835580976153 2 2 2 +0.635043789020716 2 2 2 +0.603192459522677 1 2 2 +0.780119188280459 1 2 2 +0.387445357368451 2 2 2 +0.352999715984171 2 2 2 +0.215260154150075 2 2 2 +0.438343141309741 1 2 2 +0.383351790215377 2 2 2 +0.290180848854179 2 2 2 +0.30327695172642 2 2 2 +1.16042778280559 2 2 2 +0.223187293483549 2 2 2 +0.699562632635369 2 2 2 +0.469536665963427 2 2 2 +0.614760127368693 1 2 2 +0.250342025167407 2 2 2 +0.192874356525872 1 2 2 +0.246314460692576 2 2 2 +0.37206273201885 2 2 2 +0.206582351239156 2 2 2 +0.534304707606674 2 2 2 +0.343229958652054 2 2 2 +0.777141268031828 2 2 2 +0.209204354796177 2 2 2 +0.897870484588665 1 2 2 +0.484064308449244 2 2 2 +0.309753836749031 2 2 2 +0.420217764048858 2 2 2 +0.564902124329689 2 2 2 +0.393733362161397 1 2 2 +0.266506772096653 2 2 2 +0.28962567887685 2 2 2 +0.528206137761708 2 2 2 +0.301389722137156 1 2 2 +0.223917441106893 2 2 2 +0.779205007871088 1 2 2 +0.218027803393641 2 2 2 +0.288022434145067 2 2 2 +0.303093997774882 1 2 2 +0.390286981959269 2 2 2 +0.464770428369033 2 2 2 +1.73960719684067 2 2 2 +0.248026714745345 2 2 2 +0.36728769019827 2 2 2 +0.596680154881044 1 2 2 +0.383715207146668 2 2 2 +0.376823540619621 2 2 2 +0.685403427627866 1 2 2 +0.441814166283547 2 2 2 +0.493957818252071 2 2 2 +1.09158112036438 2 3 1 +0.303214556399873 2 3 1 +0.546502981537546 1 3 1 +0.217431965663716 2 3 1 +0.268170730263647 2 3 1 +0.346976002816777 2 3 1 +0.229324148845003 1 3 1 +0.816902188437 1 3 1 +0.294576387592954 2 3 1 +0.633680684608576 1 3 1 +1.72847500258562 2 3 1 +0.296884162972746 2 3 1 +0.382321634766408 1 3 1 +0.763284418821156 1 3 1 +0.434224234531732 1 3 1 +1.19271589875813 1 3 1 +0.377781463528736 2 3 1 +0.79392960381382 2 3 1 +0.282338627630539 2 3 1 +0.327667168963983 2 3 1 +0.364638540293463 2 3 1 +0.690522801097352 1 3 1 +0.278680481396497 2 3 1 +1.52418840340842 2 3 1 +0.311969951458862 1 3 1 +0.317776650235954 2 3 1 +0.423691550838739 1 3 1 +0.671006456874602 1 3 1 +0.877883789761534 2 3 1 +0.302971325325345 1 3 1 +0.671517638524883 2 3 1 +1.11390681916392 2 3 1 +0.46388915584611 2 3 1 +0.439407447713224 2 3 1 +0.304368717367806 1 3 1 +0.598571664636264 2 3 1 +0.293087082176115 1 3 1 +0.88569612171942 1 3 1 +0.250899042539296 1 3 1 +0.357059055876667 2 3 1 +1.18166158962524 2 3 1 +0.470682360024002 2 3 1 +0.894894450156942 1 3 1 +0.435735118013038 2 3 1 +0.30295075552671 2 3 1 +0.470570639524463 2 3 1 +0.23748433775057 2 3 1 +0.360451685172226 1 3 1 +0.441474734419253 2 3 1 +0.586503558927763 2 3 1 +0.489843937397201 1 3 1 +0.466272618907063 2 3 1 +0.614130590008736 1 3 1 +0.328854179555165 1 3 1 +1.00309638651768 1 3 1 +0.616986168975414 2 3 1 +0.708134443160147 1 3 1 +0.187898870895148 1 3 1 +0.54082217240692 2 3 1 +1.57411391072384 1 3 1 +0.4493227844752 1 3 1 +0.713892489238243 2 3 1 +0.342186658762456 1 3 1 +1.02117655005718 1 3 1 +0.683440987874987 2 3 1 +0.423935184637998 1 3 1 +0.973133914601076 1 3 1 +0.38306019074276 2 3 1 +0.616703039430407 2 3 1 +0.424716691275681 2 3 1 +0.571910388646059 2 3 1 +0.321910647628946 2 3 1 +0.279467364732086 1 3 1 +0.511770823160077 1 3 1 +0.448839994649654 1 3 1 +0.408993038286618 2 3 1 +0.384671509573393 1 3 1 +0.59475727306752 2 3 1 +0.248904553159929 2 3 1 +0.730389140239337 2 3 1 +0.919037835604557 1 3 1 +0.264109362057892 1 3 1 +0.989121286812907 1 3 1 +0.828041122491036 1 3 1 +0.266898502330599 2 3 1 +0.254034221622117 2 3 1 +1.11127936853007 2 3 1 +0.877022707380551 2 3 1 +0.497101983703828 2 3 1 +0.55789237566765 2 3 1 +0.759066137604798 2 3 1 +0.346006778408851 2 3 1 +0.258561043266019 2 3 1 +0.934282414397615 2 3 1 +0.328642052298081 2 3 1 +0.550227936556864 1 3 1 +0.376633857594849 1 3 1 +0.304037777411272 2 3 1 +0.431708301902904 1 3 1 +0.349019174949225 1 3 1 +0.920298316488849 2 3 1 +0.39826548226189 1 3 1 +0.725829444504715 2 3 1 +0.588644591374367 1 3 1 +0.246128207487776 1 3 1 +1.11590498746582 2 3 1 +0.294876994112035 2 3 1 +0.641111356601665 1 3 1 +0.508053986081123 1 3 1 +0.771118458382302 1 3 1 +0.285699501579415 1 3 1 +1.02462070103652 1 3 1 +0.272825662912259 2 3 1 +0.484400350353985 2 3 1 +0.484569939314766 2 3 1 +0.531386883569837 1 3 1 +0.410632364466612 1 3 1 +0.529817458600413 2 3 1 +1.13935537418794 2 3 1 +0.2756098273084 1 3 1 +0.578175605563475 1 3 1 +1.05572643035039 2 3 1 +0.404530205629778 2 3 1 +0.68459004427736 2 3 1 +0.512697405878432 2 3 1 +0.429332115307925 2 3 1 +0.295864219583054 2 3 1 +0.619738889875145 2 3 1 +0.574787509981818 2 3 1 +0.25342580412108 2 3 1 +0.510997220464868 1 3 1 +0.314205597309042 2 3 1 +0.445509182794708 2 3 1 +1.21240465641764 2 3 1 +0.394946660382341 2 3 1 +0.452801112877752 2 3 1 +0.403350637136158 2 3 1 +0.576985676386101 2 3 1 +0.389264776452976 1 3 1 +0.823295103130808 1 3 1 +0.463468613723993 1 3 1 +0.245377944795518 2 3 1 +0.996044085392399 2 3 1 +0.690423137827953 2 3 1 +0.663845653189127 2 3 1 +0.459849111784745 2 3 1 +1.17832890976462 2 3 1 +1.71465565607573 2 3 1 +0.559470318252231 2 3 1 +1.62127201031263 2 3 1 +1.75932882254012 1 3 1 +0.810068975707212 2 3 1 +0.32567798881547 2 3 1 +0.405265927230293 2 3 1 +0.312319795786779 2 3 1 +0.664164798713009 2 3 1 +0.972174600565453 1 3 1 +0.350736426389176 1 3 1 +0.464183487885217 1 3 1 +0.979942810598283 2 3 1 +0.312621099364353 1 3 1 +0.321946657262611 2 3 1 +0.662512744175165 1 3 1 +0.265782966766695 2 3 1 +0.554547549403016 1 3 1 +0.670230788357581 2 3 1 +0.545148391569713 2 3 1 +0.3944126912798 2 3 1 +0.350245544303979 2 3 1 +0.447316724864116 2 3 1 +0.494291506086329 2 3 1 +0.294165307093089 2 3 1 +0.600045380632821 2 3 1 +0.653173135952646 2 3 1 +0.802716451437717 1 3 1 +0.425486199464103 1 3 1 +0.567162388988331 2 3 1 +0.216466918462054 2 3 1 +0.274646226936591 1 3 1 +0.27952911656832 2 3 1 +0.310673451915856 2 3 1 +1.85814147833547 2 3 1 +0.410157918175516 2 3 1 +0.422501872163458 1 3 1 +0.47177034302856 1 3 1 +0.371140269829411 2 3 1 +0.595311459484279 2 3 1 +1.50368663175704 1 3 1 +1.01238877285822 1 3 1 +0.330032112990162 2 3 1 +0.803689764137746 1 3 1 +1.11335733385196 2 3 1 +1.77758737390525 2 3 1 +0.960114443062577 1 3 1 +0.459716792002552 2 3 1 +1.48363248910813 2 3 1 +0.52360780431933 2 3 1 +0.377016971697135 2 3 1 +1.48796279286523 2 3 1 +1.5584173576682 2 3 1 +0.477222600875173 2 3 1 +0.806192776105325 2 3 1 +1.22714760697165 2 3 1 +0.285202509776337 2 3 1 +0.353998885887131 1 3 1 +0.614217926473409 2 3 1 +0.677981366002188 1 3 1 +1.39319663705638 2 3 1 +1.34011509916811 2 3 1 +0.736100370936723 2 3 1 +0.600109865280918 2 3 1 +1.28500130472893 1 3 1 +1.36640296134039 2 3 1 +0.801718417343498 2 3 1 +0.529860706811969 2 3 1 +0.389318546329582 2 3 1 +0.535588867826002 2 3 1 +0.920404797918709 2 3 1 +0.560133933106455 2 3 1 +0.239473881876335 1 3 1 +0.473860966075698 1 3 1 +0.230702658461256 1 3 1 +1.73611605008423 2 3 1 +0.497233574120993 2 3 1 +1.52778704234278 2 3 1 +1.02496145425289 2 3 1 +0.715732898893069 2 3 1 +0.499095116872872 2 3 1 +0.638470641059599 2 3 1 +0.352348027353716 1 3 1 +0.309076440163353 2 3 1 +0.351142175060277 2 3 1 +0.342163837984379 2 3 1 +0.796243815020877 1 3 1 +0.235901518677146 2 3 1 +0.848976396909524 2 3 1 +0.986793834419597 2 3 1 +1.52074724886378 2 3 1 +0.364767724102028 1 3 1 +1.12083572206547 2 3 1 +0.230356527206167 1 3 1 +0.472472696293136 2 3 1 +0.83628255017543 1 3 1 +2.45314242351987 2 3 1 +0.272291207710981 2 3 1 +0.944208776627134 2 3 1 +0.331972697221215 2 3 1 +0.240983354619397 2 3 1 +0.26997177771289 2 3 1 +0.920385413917779 1 3 1 +0.510927528682238 2 3 1 +0.287553521793071 2 3 1 +2.17875010817382 2 3 1 +0.292020199007728 2 3 1 +0.358620202445595 2 3 1 +1.36196670709987 2 3 1 +0.532103665995839 2 3 1 +1.78987248486592 1 3 1 +0.723800730925627 2 3 1 +0.389487868366274 2 3 1 +0.523651645920756 2 3 1 +0.564094358706312 2 3 1 +0.438071614397111 1 3 1 +0.396815552735571 1 3 1 +0.835877625163203 2 3 1 +0.960913558586309 2 3 1 +0.38021035653061 2 3 1 +0.637286675900738 1 3 1 +0.290784805544286 2 3 1 +0.568957565244384 2 3 1 +0.236438662927156 2 3 1 +1.10987563109661 2 3 1 +0.394257199940267 2 3 1 +0.941426455590548 1 3 1 +1.16694900264559 2 3 1 +0.449530936018223 1 3 1 +1.30291521810678 2 3 1 +0.663336993076141 2 3 1 +0.412620028111287 1 3 1 +0.213769081676035 1 3 1 +1.14008562289037 2 3 1 +0.642154856872125 2 3 1 +1.12186732245763 2 3 1 +0.530059942824884 2 3 1 +0.743562690339846 2 3 1 +0.467733874019439 1 3 1 +0.347897157855929 1 3 1 +0.271346908743046 2 3 1 +1.64808023049025 1 3 1 +0.873213094661973 1 3 1 +0.36045322327288 2 3 1 +0.415893829939983 2 3 1 +0.263744233102411 2 3 1 +0.540852200357253 2 3 1 +1.27190438964105 2 3 1 +0.692221005703411 2 3 1 +0.885861728476599 2 3 1 +0.465274050871376 1 3 1 +0.446986860614239 2 3 1 +1.14339397523192 1 3 1 +0.698229667938408 1 3 1 +0.545774956181041 2 3 1 +0.252737225149388 1 3 1 +0.956242959384857 1 3 1 +0.559955447458839 1 3 1 +0.321668964016761 1 3 1 +0.495953400106333 1 3 1 +0.435695907294935 2 3 1 +1.29125035927742 1 3 1 +1.14445342406167 2 3 1 +0.303314792589389 1 3 1 +0.550870572581426 1 3 1 +0.589452519460692 1 3 1 +0.793764837082831 2 3 1 +1.15478617130203 2 3 1 +0.423154299941937 2 3 1 +0.441625445567769 1 3 1 +1.04879230934071 1 3 1 +0.328428851869649 2 3 1 +0.679231844674899 2 3 1 +1.15492451938846 2 3 1 +1.08528509664462 1 3 1 +0.483072408512607 2 3 1 +1.64223021381801 2 3 1 +0.51690071016677 1 3 1 +0.912126868764157 2 3 1 +0.628163734423868 1 3 1 +0.698176751617721 1 3 1 +0.333876511447483 2 3 1 +0.479648257326482 2 3 1 +0.387197092688304 2 3 1 +0.692552401206789 2 3 1 +0.842058155042385 2 3 1 +0.71750243288607 1 3 1 +0.305258214294853 1 3 1 +0.443541182282758 2 3 1 +1.57795342301233 2 3 1 +0.849830671160219 2 3 1 +2.64336117374313 2 3 1 +0.602149680031763 1 3 1 +0.494502380433554 1 3 1 +0.301592730343628 2 3 1 +0.838419470710967 1 3 1 +0.667334464560839 2 3 1 +1.50007402393947 2 3 1 +0.389803326920504 2 3 1 +0.882528306696846 2 3 1 +0.372982160615418 2 3 1 +0.349280124360808 1 3 1 +0.512012574758371 2 3 1 +1.17220110413599 2 3 1 +0.414969949251035 2 3 1 +1.63347309320552 1 3 1 +0.303593516632502 2 3 1 +0.4186484600358 2 3 1 +0.339028778633365 2 3 1 +0.717224138012012 2 3 1 +0.779291021701807 2 3 1 +0.561060495047965 2 3 1 +1.80155372469456 2 3 1 +0.350666104484292 2 3 1 +1.75384028882697 2 3 1 +0.34614655108637 2 3 1 +0.857888702654049 2 3 1 +0.538538273376274 1 3 1 +0.808165431176924 2 3 1 +1.12566956065676 2 3 1 +0.401747844392863 1 3 1 +0.420638560385403 1 3 1 +0.689567547922525 1 3 1 +0.687101320313498 2 3 1 +1.11450021231709 1 3 1 +1.05673194424108 2 3 1 +0.378493955443519 1 3 1 +0.374806303246874 2 3 1 +0.605170645685489 1 3 1 +0.568600361954804 1 3 1 +0.279564048875058 2 3 1 +0.737344841204778 1 3 1 +0.383626489427317 1 3 1 +0.236592626403799 2 3 1 +0.606395313320835 2 3 1 +0.258446694712414 1 3 1 +0.667654851854366 1 3 1 +0.709849063285861 2 3 1 +1.06652304155128 2 3 1 +0.375309922256882 1 3 1 +0.305631875482354 2 3 1 +1.00431113766878 2 3 1 +0.731812037602777 1 3 1 +0.414232775562026 2 3 1 +0.826500384443797 2 3 1 +0.719116830057354 1 3 1 +0.61855729451436 2 3 1 +0.399444901441129 1 3 1 +0.2295675381027 2 3 1 +0.361734141102366 2 3 1 +0.928229131678883 1 3 1 +0.767105903673052 2 3 1 +0.312954135574329 2 3 1 +0.316125600053679 2 3 1 +0.83356862967393 2 3 1 +0.807805288376209 2 3 1 +0.529747082048339 2 3 1 +0.579438577427002 2 3 1 +0.748413890954428 2 3 1 +0.282714299825272 2 3 1 +1.32349690412234 2 3 1 +0.265264643396819 2 3 1 +1.05199545660803 1 3 1 +0.354844761549886 2 3 1 +0.673382389127187 2 3 1 +0.353927127140679 2 3 1 +0.742708866815784 2 3 1 +0.32440131369228 1 3 1 +1.15424939546308 2 3 1 +0.341988033892061 2 3 1 +0.385901020315423 2 3 1 +0.286688838450355 2 3 1 +0.862986046754551 1 3 1 +0.362564960443358 2 3 1 +0.618539574023911 1 3 1 +0.618839114124994 1 3 1 +0.351742202769926 2 3 1 +1.04713435904685 2 3 1 +1.13219276272773 1 3 1 +0.431897141646835 2 3 1 +0.903368625643694 2 3 1 +0.845773237970464 1 3 1 +0.448267685056844 1 3 1 +0.227965869389189 2 3 1 +0.727189656817706 1 3 1 +0.920965873390772 2 3 1 +2.86369573335364 2 3 1 +1.79113426784109 2 3 1 +0.434371557096156 1 3 1 +0.297049698054049 2 3 1 +0.908700456343662 2 3 1 +0.840014957530216 1 3 1 +0.391451573590056 2 3 1 +0.964762751519269 2 3 1 +1.97499804009749 1 3 1 +1.58759823353077 2 3 1 +2.10229181480408 1 3 1 +0.573075746749156 2 3 1 +0.87229795094926 2 3 1 +0.764360676764928 1 3 1 +0.236433389533537 2 3 1 +0.500319361165157 2 3 1 +0.436148207342909 1 3 1 +1.17237545076077 2 3 1 +0.305589800257361 2 3 1 +0.269400199640921 2 3 1 +1.35212659081556 1 3 1 +0.377929769476066 2 3 1 +0.899332425662463 1 3 1 +1.00394626493931 2 3 1 +1.10094408622287 2 3 1 +1.18830151405781 2 3 1 +0.284872387153195 2 3 1 +0.375024301973256 1 3 1 +0.404469325185188 1 3 1 +0.727775813795801 2 3 1 +0.252419279794447 2 3 1 +0.375032470671666 2 3 1 +0.309265085564879 2 3 1 +0.462609251209814 2 3 1 +0.320190368690629 1 3 1 +0.477686210715907 1 3 1 +0.275346713714152 2 3 1 +0.438717701647585 2 3 1 +0.810110401646601 2 3 1 +1.07883036639961 2 3 1 +0.809608091311169 2 3 1 +0.418153700722572 1 3 1 +0.764475113911811 1 3 1 +0.412993008145338 2 3 1 +1.335962806722 1 3 1 +0.875235294298827 1 3 1 +0.326172123137794 2 3 1 +0.231731510095046 2 3 1 +0.914448514006612 1 3 1 +0.550916483189837 2 3 1 +0.880466208259979 2 3 1 +0.601176005143088 2 3 1 +1.43879123063957 2 3 1 +0.621745991817644 2 3 1 +0.3528215159095 2 3 1 +0.30994437648555 1 3 1 +0.90362627319135 2 3 1 +0.903886767560117 2 3 1 +0.835640862308006 2 3 1 +1.20726153384552 2 3 1 +0.653989199174602 2 3 1 +0.915035948130758 2 3 1 +0.726052417728461 2 3 1 +1.01911267691402 2 3 1 +0.352826644011026 2 3 1 +0.440580677663477 1 3 1 +0.999490854549375 1 3 1 +0.514934236463869 1 3 2 +0.270420118537311 2 3 2 +0.646471943779092 2 3 2 +0.338830766035059 2 3 2 +0.491671962582901 2 3 2 +0.629886758393846 2 3 2 +0.51589107754444 2 3 2 +0.353946635128139 2 3 2 +0.331203176196343 1 3 2 +1.99524093298412 2 3 2 +1.02440243540096 2 3 2 +0.282898155808958 2 3 2 +0.599827056733371 2 3 2 +0.238817716879006 2 3 2 +0.206191544000187 1 3 2 +0.855183187835193 2 3 2 +0.722927152841454 2 3 2 +0.780157089830913 2 3 2 +0.509420971748398 2 3 2 +0.96103805001364 2 3 2 +0.493670434412268 1 3 2 +0.214424723176226 2 3 2 +0.392688836409781 1 3 2 +0.658620383209045 2 3 2 +0.216107419356536 2 3 2 +0.57723401544534 2 3 2 +0.527348367007325 1 3 2 +0.473776142345069 2 3 2 +0.424350872006699 2 3 2 +2.16246776879602 1 3 2 +0.579491048291868 2 3 2 +0.317300006903978 1 3 2 +1.25002685289334 1 3 2 +0.322077006208459 2 3 2 +0.65598919200563 1 3 2 +1.4631363964763 2 3 2 +1.68342497778485 1 3 2 +0.950063435678861 1 3 2 +0.205802941406673 2 3 2 +0.442509433023036 1 3 2 +0.818174077396608 2 3 2 +0.286175835912647 2 3 2 +0.33842480954584 2 3 2 +1.38048451044948 2 3 2 +0.39988067006981 1 3 2 +0.253643662104766 2 3 2 +0.560976394476593 2 3 2 +0.517941831953703 1 3 2 +0.487317274258782 1 3 2 +0.722089923618615 2 3 2 +0.468435888325438 2 3 2 +0.673049975899187 2 3 2 +0.404615085266369 2 3 2 +0.337034159076431 2 3 2 +0.435942135312604 2 3 2 +0.369014730704062 2 3 2 +0.567411102529725 1 3 2 +0.683010310238025 2 3 2 +0.290243271494673 2 3 2 +0.316097101922202 1 3 2 +1.26062843312539 2 3 2 +0.511300941444236 2 3 2 +2.2216549447733 2 3 2 +0.6322594359434 2 3 2 +1.05478582553533 2 3 2 +0.435637851164182 1 3 2 +0.468027792640505 2 3 2 +0.420219455300886 1 3 2 +0.273929740512875 1 3 2 +0.793839005062366 2 3 2 +0.700039266476368 2 3 2 +0.698393675491842 2 3 2 +0.320657243989843 1 3 2 +1.16100839379935 1 3 2 +0.64693140923479 1 3 2 +0.512516768633988 2 3 2 +0.742023676531162 2 3 2 +0.972058194099057 2 3 2 +0.398653703474479 1 3 2 +0.410331563672334 2 3 2 +0.222283100080092 1 3 2 +0.469355586816016 1 3 2 +1.19333537174541 2 3 2 +0.307250192296768 2 3 2 +0.818587555385279 1 3 2 +0.236745366451503 2 3 2 +0.881972502593121 2 3 2 +0.277399097478032 2 3 2 +0.522816700136251 1 3 2 +1.03885756789198 2 3 2 +0.423490173116203 2 3 2 +0.603714429771403 1 3 2 +0.343602016522983 2 3 2 +1.06315814327229 2 3 2 +1.64394374580309 2 3 2 +0.258950271624856 2 3 2 +0.839767784808717 1 3 2 +0.422102182085427 2 3 2 +1.19755245402792 2 3 2 +0.36311410858205 1 3 2 +1.32548841200299 2 3 2 +1.25935619927138 2 3 2 +1.94209928030842 2 3 2 +0.373060834025449 2 3 2 +0.3101485804146 2 3 2 +0.533892648348211 2 3 2 +0.744784639587278 2 3 2 +1.18381744027858 2 3 2 +0.240744324843731 2 3 2 +0.946050579944945 1 3 2 +0.627753953881302 1 3 2 +0.370445161933509 1 3 2 +0.300650302064735 1 3 2 +0.373459020239413 2 3 2 +0.430948400866446 1 3 2 +1.49574317491573 2 3 2 +1.3101827626935 2 3 2 +1.67262325791643 1 3 2 +0.241993699720061 2 3 2 +0.328265869000834 2 3 2 +0.384015136641366 1 3 2 +0.217469612226687 2 3 2 +0.353910918208547 1 3 2 +1.26306667028009 2 3 2 +0.283701100770668 2 3 2 +0.442696536960043 1 3 2 +1.23508427666644 2 3 2 +0.211641858949563 2 3 2 +1.31762978216566 2 3 2 +0.427189324868075 1 3 2 +0.427280548495511 2 3 2 +1.48538078369227 1 3 2 +0.628708315466251 2 3 2 +1.78361132590903 2 3 2 +0.239589904781277 1 3 2 +0.562274806702486 2 3 2 +0.721667014209347 1 3 2 +0.659149048175133 2 3 2 +0.829239328543952 2 3 2 +1.02272830279552 2 3 2 +0.671890699523076 2 3 2 +1.72636732645653 1 3 2 +0.615737367953486 2 3 2 +1.07678479171103 1 3 2 +0.538623051607739 2 3 2 +1.39867639023568 1 3 2 +0.218925904964068 2 3 2 +0.240724605293016 2 3 2 +0.452439100915242 2 3 2 +0.694089462161358 2 3 2 +1.84709801261543 2 3 2 +0.564582100435542 2 3 2 +0.725448687770764 2 3 2 +0.30708178683197 1 3 2 +0.47535866323473 2 3 2 +0.829935945880499 2 3 2 +0.90098748577001 2 3 2 +0.321123642659435 2 3 2 +0.295296832786178 2 3 2 +0.312203918236697 2 3 2 +0.967729509776647 1 3 2 +1.5438301792962 2 3 2 +0.493390293618284 2 3 2 +0.799464309749491 2 3 2 +0.492248525373071 1 3 2 +0.876464392051748 1 3 2 +0.914622363311664 2 3 2 +0.939704807763569 1 3 2 +0.901114296634733 1 3 2 +2.83967944372257 1 3 2 +0.633623902249606 1 3 2 +0.590830718948575 2 3 2 +0.644050309060636 2 3 2 +0.618329617213195 1 3 2 +0.406935681035188 2 3 2 +0.680935599818192 2 3 2 +0.689990604040213 2 3 2 +0.234441659901205 1 3 2 +0.628614985132006 2 3 2 +0.603104314434233 1 3 2 +0.458166460126185 2 3 2 +0.5410221389939 2 3 2 +0.806727630952272 1 3 2 +0.74995864564573 2 3 2 +0.925796642818387 2 3 2 +1.48881282804597 2 3 2 +1.32567553513673 2 3 2 +0.720300933060513 1 3 2 +0.845676771367771 2 3 2 +1.03943195815275 2 3 2 +0.269814661026592 2 3 2 +0.303313872334609 1 3 2 +0.668337346160446 1 3 2 +0.802759823174443 1 3 2 +1.0891459476003 1 3 2 +0.524162399076158 2 3 2 +0.843176928462498 2 3 2 +0.378654464972701 2 3 2 +0.37200187900001 2 3 2 +1.24108370520966 2 3 2 +0.407967861247184 1 3 2 +0.385604160482279 2 3 2 +1.37807312575617 2 3 2 +1.6236739129127 2 3 2 +0.299498250287133 2 3 2 +1.50283781972101 2 3 2 +0.425002443191797 2 3 2 +0.793010798627147 1 3 2 +0.443607069543917 2 3 2 +0.553450577754003 2 3 2 +0.856593591339924 2 3 2 +0.525593955394394 2 3 2 +0.896348840912902 2 3 2 +2.32094781645657 2 3 2 +0.343409204205168 2 3 2 +2.55188788674935 1 3 2 +0.59347479355615 2 3 2 +1.14133837346965 1 3 2 +0.769316859974066 1 3 2 +0.553345822537459 1 3 2 +0.280403390069466 1 3 2 +0.351752315403098 2 3 2 +0.464937188483399 1 3 2 +0.498643366432877 2 3 2 +1.20768606813256 2 3 2 +0.810552918965932 2 3 2 +0.814572293196432 2 3 2 +1.12466835158388 1 3 2 +0.605892652281983 2 3 2 +0.718062408112775 1 3 2 +1.43756814834945 2 3 2 +0.434523586879683 2 3 2 +1.1331064813251 2 3 2 +0.347724946096093 2 3 2 +0.493607135881693 1 3 2 +0.67651193182361 1 3 2 +0.552022281319177 1 3 2 +0.321142527517636 2 3 2 +0.597883853359763 1 3 2 +0.895001662908153 1 3 2 +0.48578933838309 1 3 2 +1.68093599717435 1 3 2 +0.70886900345093 2 3 2 +0.366715507465807 2 3 2 +0.229193753495571 1 3 2 +0.456535243655345 2 3 2 +0.96352225520781 1 3 2 +0.744795713668557 2 3 2 +0.469577483093902 2 3 2 +0.32463756963168 1 3 2 +0.718346029022071 2 3 2 +1.29491659083823 2 3 2 +0.461155144420931 2 3 2 +0.5798492161351 2 3 2 +1.05582449803871 2 3 2 +0.28481585650096 2 3 2 +0.410730259856026 2 3 2 +1.60060704532405 2 3 2 +0.534513028770362 2 3 2 +0.290600969494565 1 3 2 +0.508134824209082 2 3 2 +0.733677137815329 1 3 2 +0.524546051284814 2 3 2 +0.626615353700544 2 3 2 +0.207338061930642 2 3 2 +2.49191138211878 1 3 2 +0.446516299324413 2 3 2 +0.568970465239259 2 3 2 +1.30104078909728 2 3 2 +0.573721490299906 2 3 2 +0.395039645429215 2 3 2 +0.868269050301906 2 3 2 +1.39110795542721 2 3 2 +0.732569267335244 2 3 2 +0.346330122949029 2 3 2 +0.28247517183272 2 3 2 +0.339777576105551 2 3 2 +0.709070836414342 2 3 2 +0.741775977804924 2 3 2 +1.3105501266272 2 3 2 +0.650809742033691 1 3 2 +1.21685252483908 2 3 2 +1.31226952331015 2 3 2 +0.671641212759192 2 3 2 +2.60476050728261 1 3 2 +0.423290539217436 1 3 2 +0.747391695995648 2 3 2 +0.415214528560093 2 3 2 +0.732547499620541 2 3 2 +1.11203310275713 1 3 2 +0.310793178630083 1 3 2 +0.532873467984992 2 3 2 +0.24543893048753 2 3 2 +0.262637774460857 2 3 2 +0.638852263528672 2 3 2 +0.268251093022516 2 3 2 +0.754730587787048 1 3 2 +0.304771345055942 2 3 2 +0.600949799200535 2 3 2 +0.454921964270315 2 3 2 +0.291060243483869 2 3 2 +0.850896084981839 2 3 2 +1.02404745430124 2 3 2 +0.740373725034996 2 3 2 +0.697249433946795 2 3 2 +3.22861057623448 2 3 2 +0.614917615221698 2 3 2 +0.417940802999645 2 3 2 +0.428130364139945 1 3 2 +0.330671065628431 2 3 2 +0.62670607061658 2 3 2 +1.26038076404455 2 3 2 +0.322284823454811 2 3 2 +0.429191549101784 2 3 2 +0.455247274613782 2 3 2 +0.663368332878807 2 3 2 +0.305769717251401 2 3 2 +1.50140899013577 1 3 2 +1.45389192339163 2 3 2 +2.98105925544205 2 3 2 +1.04255718339312 2 3 2 +0.942508435934038 2 3 2 +2.787892838843 2 3 2 +0.840734403641314 2 3 2 +0.489925811963693 2 3 2 +1.8552326561657 2 3 2 +0.38632322022465 1 3 2 +0.256936541763573 2 3 2 +0.766126754945232 1 3 2 +0.451147102611256 2 3 2 +0.268349419782926 2 3 2 +0.284282929689453 2 3 2 +0.71369157188727 1 3 2 +0.984328464038398 1 3 2 +0.391254858951931 2 3 2 +0.473609040280498 1 3 2 +0.392371474332231 1 3 2 +0.512961553349468 2 3 2 +0.49196894137911 2 3 2 +1.98481178504207 2 3 2 +0.335597786997023 1 3 2 +0.951871051474144 2 3 2 +0.43773254271624 2 3 2 +0.759326775665626 1 3 2 +0.964163992380983 2 3 2 +0.243214557688043 2 3 2 +1.06020924390952 2 3 2 +0.624129170951697 2 3 2 +0.631213229469376 2 3 2 +1.22749512645753 1 3 2 +1.4544220968578 2 3 2 +0.704504240730269 2 3 2 +0.358484343002385 2 3 2 +0.550361017867011 2 3 2 +0.300375078256161 2 3 2 +0.739678180371973 1 3 2 +0.646734220557972 1 3 2 +0.777682254344784 2 3 2 +0.87982617759661 2 3 2 +0.418244912065538 2 3 2 +0.443192766363974 2 3 2 +0.61563753996371 1 3 2 +1.56614219537768 2 3 2 +2.65065478085341 2 3 2 +0.305863823661165 2 3 2 +0.954934661408583 2 3 2 +0.449191627811582 1 3 2 +0.793807602132907 2 3 2 +0.519871565006984 1 3 2 +0.293207709999379 1 3 2 +0.634867211065706 1 3 2 +0.469994277604704 2 3 2 +0.343616111556125 2 3 2 +0.23004178016569 1 3 2 +0.335931900338173 2 3 2 +0.743164799438406 2 3 2 +0.582587466556771 2 3 2 +0.412610130745763 2 3 2 +0.303143772129072 2 3 2 +1.28143560920008 2 3 2 +1.17554195841916 1 3 2 +0.656531128634536 2 3 2 +0.29995775158261 1 3 2 +0.89568315126836 2 3 2 +1.30618389742279 2 3 2 +3.0496460415615 2 3 2 +0.743633550842162 2 3 2 +0.976245680087169 2 3 2 +0.376124867059714 2 3 2 +0.537516104244315 2 3 2 +0.212538006220838 2 3 2 +0.571699948650591 2 3 2 +0.658537447407892 2 3 2 +0.409180121003016 2 3 2 +0.38453187413556 2 3 2 +1.39971354422791 2 3 2 +1.48526501316284 2 3 2 +0.765450963277084 2 3 2 +0.45683796523625 2 3 2 +0.49455927446116 2 3 2 +0.565813339958499 2 3 2 +0.51579075751888 2 3 2 +0.579482217533706 1 3 2 +0.464431057746426 2 3 2 +0.33385297359306 1 3 2 +0.237785587972524 2 3 2 +0.788856968485086 2 3 2 +0.247738584125967 2 3 2 +1.29521163578326 1 3 2 +0.51405310891982 2 3 2 +0.248969113363235 1 3 2 +0.8258802772869 2 3 2 +0.858915328254726 2 3 2 +1.04412031745921 1 3 2 +1.67795472999734 1 3 2 +0.560822550684719 2 3 2 +0.639828932713558 2 3 2 +0.606061526335406 1 3 2 +0.446197468121209 2 3 2 +0.889197611107733 2 3 2 +0.229591061070164 1 3 2 +0.598751053548388 1 3 2 +0.42084079282726 2 3 2 +0.280912227540918 2 3 2 +0.353535095083615 1 3 2 +0.533533031895995 2 3 2 +0.40991726170081 2 3 2 +0.386705907004533 2 3 2 +0.663325409649471 2 3 2 +1.05367422973975 1 3 2 +1.41642911541684 2 3 2 +0.728074140704459 2 3 2 +0.448237114304907 2 3 2 +0.471483947133633 2 3 2 +1.26271559797945 1 3 2 +0.241118847003316 2 3 2 +0.551833217379812 1 3 2 +0.508606043806118 2 3 2 +0.364460896466132 2 3 2 +0.412399264984449 2 3 2 +0.305920977565598 2 3 2 +0.705398182563824 2 3 2 +0.204036911418345 1 3 2 +1.04820599938717 1 3 2 +0.643323321586422 2 3 2 +3.66898798188367 2 3 2 +1.08829013517781 1 3 2 +0.361592831884118 1 3 2 +0.979363639648445 2 3 2 +0.521111784853412 1 3 2 +0.343395604193243 1 3 2 +0.587048881881688 2 3 2 +1.41201467474607 2 3 2 +0.443024780470065 2 3 2 +2.95728532098558 2 3 2 +0.319216793259789 2 3 2 +0.221315652964487 2 3 2 +0.77093692467471 1 3 2 +0.73558455041612 1 3 2 +0.259119262605434 2 3 2 +0.48994285788748 1 3 2 +0.571960539121533 2 3 2 +0.30717971899547 2 3 2 +1.97586628351188 2 3 2 +0.375432444639877 2 3 2 +0.811045564934994 2 3 2 +0.911400482590164 2 3 2 +0.421553307064521 1 3 2 +0.836499690800059 1 3 2 +0.709656783694355 1 3 2 +0.738884945936119 1 3 2 +0.369565008846999 1 3 2 +0.379597876167422 1 3 2 +0.673815169801798 2 3 2 +1.61852146139474 1 3 2 +0.341581465482509 1 3 2 +0.263351833487348 2 3 2 +0.34610526127482 2 3 2 +0.840263767605542 2 3 2 +0.860111461225023 2 3 2 +0.500246193912611 2 3 2 +0.611622543641809 2 3 2 +0.304839820514316 1 3 2 +0.478872857619653 2 3 2 +0.966509448052867 2 3 2 +0.369970546426949 2 3 2 +0.424912789436735 2 3 2 +0.639361694609756 2 3 2 +0.638867619514155 2 3 2 +0.439145854141595 2 3 2 +1.2433130957394 2 3 2 +0.546013305487959 2 3 2 +0.621366845453756 1 3 2 +0.371921153976491 1 3 2 +1.72030292611725 2 3 2 +0.265728845949588 2 3 2 +0.250485215272467 2 3 2 +0.260055352791922 2 3 2 +1.04055348391978 2 3 2 +0.639817829535305 2 3 2 +0.436687399202203 2 4 1 +0.390002899730434 2 4 1 +0.689914798071677 2 4 1 +0.89709020993931 2 4 1 +0.427183990353492 1 4 1 +0.340777120972685 2 4 1 +0.251597448645233 1 4 1 +0.270029795224852 1 4 1 +0.36506923015438 2 4 1 +0.925483297260795 1 4 1 +0.491542011496093 1 4 1 +0.584011448243567 2 4 1 +1.27419822711881 2 4 1 +0.33102420792392 2 4 1 +0.351900683919713 1 4 1 +0.393343181453058 2 4 1 +1.11700088666809 2 4 1 +0.383684827552196 2 4 1 +0.319389244865323 2 4 1 +0.207961019321362 1 4 1 +0.247906583019937 2 4 1 +0.664874815584718 2 4 1 +0.633174401608791 2 4 1 +0.228811949915 2 4 1 +0.313807509549483 1 4 1 +0.258061143771553 1 4 1 +1.40526930242479 2 4 1 +0.617601017184864 2 4 1 +0.448539769566249 2 4 1 +0.301286656201828 2 4 1 +0.35035514703207 2 4 1 +1.08961020047286 1 4 1 +0.29793786190371 1 4 1 +0.384511098165857 2 4 1 +0.761604738984846 1 4 1 +0.341173784779225 2 4 1 +0.376431442957684 2 4 1 +0.484742402575381 2 4 1 +0.257151781895977 1 4 1 +0.597747950821735 2 4 1 +0.510086318540574 2 4 1 +0.414694239051273 2 4 1 +0.583680054953304 1 4 1 +0.260760440689632 2 4 1 +0.798233246796322 1 4 1 +0.299861199950565 1 4 1 +0.276645559734816 1 4 1 +0.754832912251529 2 4 1 +0.409850111348969 2 4 1 +0.357937922566155 1 4 1 +1.74732047917189 2 4 1 +0.234683752237039 1 4 1 +0.379935287004687 2 4 1 +0.400355036755306 2 4 1 +0.199836116985197 1 4 1 +0.29654658741011 1 4 1 +0.40070736644743 2 4 1 +0.810244937262253 1 4 1 +0.232476182488422 1 4 1 +0.441531921393063 1 4 1 +0.266981994270395 1 4 1 +0.594986042709096 2 4 1 +0.226949086978422 2 4 1 +0.521975525478104 2 4 1 +0.342357539413783 1 4 1 +0.317929036077879 1 4 1 +0.252848528033154 1 4 1 +0.323519822370531 1 4 1 +0.543317920252121 2 4 1 +0.24136946576349 2 4 1 +0.924045894157614 1 4 1 +0.206395302547672 1 4 1 +0.32962698245246 1 4 1 +0.351093830002051 2 4 1 +0.393905213694999 1 4 1 +0.215647621306677 1 4 1 +1.07707197772823 2 4 1 +0.389509392394056 2 4 1 +0.223818285290267 1 4 1 +0.506337167510338 2 4 1 +0.471183820790944 2 4 1 +0.845694161269827 2 4 1 +0.709573151871471 1 4 1 +0.17746060467044 1 4 1 +0.31073859900678 2 4 1 +0.441048237227571 1 4 1 +0.46110944700935 1 4 1 +0.54945827722732 2 4 1 +0.784046498525351 2 4 1 +0.67011316512292 2 4 1 +0.835724249453141 2 4 1 +0.446765564545247 2 4 1 +0.672425348568567 2 4 1 +0.327795527459012 1 4 1 +0.638194736699407 1 4 1 +0.706923294313998 2 4 1 +0.599365457528674 1 4 1 +0.323701086823648 2 4 1 +0.219227233336576 1 4 1 +0.457730880006645 1 4 1 +0.315697871971176 2 4 1 +0.589925939568761 1 4 1 +0.247439085357404 1 4 1 +0.32425097397633 1 4 1 +0.597517113461805 2 4 1 +0.718798195346532 2 4 1 +0.638921440877047 1 4 1 +0.255295790134737 2 4 1 +0.327574345419546 2 4 1 +0.290528460395922 2 4 1 +0.812154747100783 2 4 1 +0.62206059702272 2 4 1 +0.692613337029294 2 4 1 +0.625883974901828 2 4 1 +0.441430346433252 2 4 1 +0.490081824112803 2 4 1 +0.244708505986005 2 4 1 +0.670449909527048 2 4 1 +0.548567982146401 2 4 1 +0.670609785865862 2 4 1 +0.232196621565708 1 4 1 +1.00556527970583 1 4 1 +0.362798089622382 2 4 1 +0.299887052564318 1 4 1 +0.29571247705273 2 4 1 +0.379234401357032 1 4 1 +0.370440077361359 1 4 1 +0.431164494502316 1 4 1 +0.420428289154959 2 4 1 +0.730550233135442 2 4 1 +0.196484263350659 1 4 1 +0.322244565661879 1 4 1 +0.419903128817093 2 4 1 +0.887401080614827 2 4 1 +0.347054627607897 1 4 1 +0.478136351931499 2 4 1 +0.22065845309499 1 4 1 +0.230551027213474 1 4 1 +0.774783709604525 2 4 1 +0.4017025367099 2 4 1 +0.473979554090044 2 4 1 +0.647672920929279 2 4 1 +0.498264780188815 2 4 1 +0.242543415916194 1 4 1 +1.50936872822537 2 4 1 +0.566867242699491 1 4 1 +1.47001919200243 2 4 1 +0.443050568657419 1 4 1 +0.617820295621617 2 4 1 +0.359479796449494 2 4 1 +0.604637805760835 2 4 1 +0.586057575592382 2 4 1 +0.440372792115214 1 4 1 +0.33669989715765 2 4 1 +0.46750885766708 2 4 1 +0.256884377023786 2 4 1 +0.404464415727674 1 4 1 +1.31921997047773 1 4 1 +0.430411845210472 2 4 1 +0.191395478638844 1 4 1 +2.65424233758138 2 4 1 +0.446895568163648 2 4 1 +1.16838122758826 1 4 1 +0.404164043001054 1 4 1 +0.453588390001177 1 4 1 +0.489843569086762 2 4 1 +0.511073192179454 1 4 1 +0.366748489459315 2 4 1 +1.08784444950982 1 4 1 +0.3127679937141 1 4 1 +0.374479796833337 2 4 1 +0.536583188537731 2 4 1 +0.468590946410184 1 4 1 +0.844010712248646 2 4 1 +0.557108724831039 1 4 1 +0.221613723664657 1 4 1 +0.485469547021943 2 4 1 +0.263615872608948 1 4 1 +0.441259334915482 2 4 1 +0.198072070553861 1 4 1 +1.50553220647041 2 4 1 +0.489507279251992 1 4 1 +0.864537266238903 2 4 1 +0.45282939606678 1 4 1 +0.746554343478926 2 4 1 +0.867773306761754 1 4 1 +0.767200054356521 1 4 1 +0.23660645439005 1 4 1 +1.39880762611901 2 4 1 +0.350271079317704 1 4 1 +1.40586843906866 1 4 1 +0.478210911942578 2 4 1 +0.338115182852339 2 4 1 +0.730320711764892 1 4 1 +0.956280538468675 1 4 1 +0.420525233125089 1 4 1 +0.347936329704388 1 4 1 +0.474630289993715 1 4 1 +0.819946152352902 2 4 1 +0.934441627702885 1 4 1 +0.730580755467428 1 4 1 +0.271300024998423 2 4 1 +0.304557666577329 1 4 1 +0.693593360198042 2 4 1 +0.275013203541771 2 4 1 +1.04949192092882 1 4 1 +0.912930864419114 2 4 1 +0.245497508402249 1 4 1 +0.340496536712849 1 4 1 +0.206302342316397 1 4 1 +0.349666013963138 1 4 1 +0.391997601731428 2 4 1 +0.31394682736151 2 4 1 +0.19581160832742 1 4 1 +1.06233406823564 1 4 1 +0.415830063959857 2 4 1 +0.593378048695084 2 4 1 +0.684233440252769 2 4 1 +0.688883920330433 2 4 1 +1.6950993730873 2 4 1 +0.432896756723563 2 4 1 +0.431906140578051 2 4 1 +1.52282637445437 1 4 1 +0.405643762755849 2 4 1 +0.3906375449839 2 4 1 +0.493637490245746 2 4 1 +0.253401712848908 1 4 1 +0.651891849887022 1 4 1 +0.191738721475655 1 4 1 +0.274200778844831 1 4 1 +0.687221734267395 1 4 1 +0.401284964672383 2 4 1 +1.30759449667971 2 4 1 +0.45371033421329 1 4 1 +0.414999090718536 1 4 1 +0.339864441346799 2 4 1 +0.224062601508878 1 4 1 +0.267986100808596 2 4 1 +0.205728192609412 1 4 1 +0.294602964122564 1 4 1 +0.57048745621061 2 4 1 +0.312999536718822 2 4 1 +0.468539738808252 2 4 1 +0.433964076498741 2 4 1 +0.263357126730855 1 4 1 +0.403225684575735 2 4 1 +0.505097149684945 1 4 1 +0.352103525497929 1 4 1 +0.353799144457459 2 4 1 +1.93501413202878 2 4 1 +0.234778101685937 1 4 1 +0.915861513813591 1 4 1 +0.222561853971709 2 4 1 +0.704078227413159 1 4 1 +0.324489257545476 1 4 1 +0.360961619402915 1 4 1 +1.08221841300972 2 4 1 +0.255068442126581 1 4 1 +0.427031071589034 2 4 1 +0.268383547265101 2 4 1 +0.527836858914301 2 4 1 +0.436588005092254 2 4 1 +0.63511533446365 1 4 1 +0.234879561500897 2 4 1 +0.401628516319902 1 4 1 +0.244232194192342 2 4 1 +0.228576135937129 1 4 1 +0.479791504967864 1 4 1 +0.623390007344234 2 4 1 +0.464270391434225 1 4 1 +0.366609113909401 2 4 1 +0.96274637101792 1 4 1 +0.576483736943277 2 4 1 +0.403581781434509 2 4 1 +0.639484168435868 2 4 1 +0.62685113380266 2 4 1 +0.504458338039312 1 4 1 +0.402748748564798 2 4 1 +0.214342237683536 2 4 1 +0.349689666731501 1 4 1 +0.920646509992372 2 4 1 +0.27944693184416 1 4 1 +0.44815474516242 2 4 1 +0.205465436502175 1 4 1 +0.58051540184786 2 4 1 +1.39132555887266 2 4 1 +0.364514642632317 2 4 1 +0.70643155804624 1 4 1 +0.323310314942546 2 4 1 +0.175430213692877 1 4 1 +0.324665401127095 2 4 1 +0.339971125940635 1 4 1 +0.357027781829466 2 4 1 +0.197390554919544 1 4 1 +1.19812104028543 1 4 1 +0.362575817654 2 4 1 +0.38070286088775 2 4 1 +0.468393488473505 2 4 1 +0.69309042773624 2 4 1 +0.489049481806581 2 4 1 +0.550976348834216 2 4 1 +0.359974012139019 2 4 1 +0.430662557513296 2 4 1 +0.230762012931962 1 4 1 +0.464324012490502 2 4 1 +0.312270150962999 2 4 1 +0.407424579565668 1 4 1 +1.69173371199779 2 4 1 +0.279156190669336 1 4 1 +1.09358568468554 2 4 1 +0.799141912889128 2 4 1 +0.471229301469417 2 4 1 +0.86570523590372 2 4 1 +0.83707942075239 2 4 1 +0.301114204037757 2 4 1 +0.26254546100438 1 4 1 +0.480187644205008 2 4 1 +1.26489425453197 1 4 1 +0.348490685840493 1 4 1 +1.80827808496735 2 4 1 +0.688328204172357 1 4 1 +0.327390856146583 1 4 1 +0.296891297267103 2 4 1 +0.537842103983798 2 4 1 +0.61750762959656 1 4 1 +0.514317599884349 2 4 1 +0.484707489461247 2 4 1 +1.02339278515862 2 4 1 +0.261167936671032 1 4 1 +0.304205039689018 2 4 1 +0.663106162019803 2 4 1 +0.717654991060284 2 4 1 +0.303534711764191 1 4 1 +0.330517625218401 2 4 1 +0.421085131279027 1 4 1 +0.205818877232142 2 4 1 +0.316549542909507 2 4 1 +0.204577406444421 1 4 1 +0.23632265848357 1 4 1 +0.548244833679468 2 4 1 +0.388542899040365 1 4 1 +0.664718352371726 2 4 1 +0.675589276314572 2 4 1 +0.797202947732321 2 4 1 +0.210476690239724 1 4 1 +0.368876471933727 1 4 1 +3.36252865887517 2 4 1 +1.08817433313251 2 4 1 +0.302267582741235 2 4 1 +0.528662894227581 2 4 1 +0.405868957673161 2 4 1 +0.78835957170157 2 4 1 +0.348515180995261 1 4 1 +0.400786810234322 2 4 1 +0.723106946883847 1 4 1 +0.302360607513298 1 4 1 +0.398732629036892 2 4 1 +0.404922292377484 1 4 1 +0.317519731671834 2 4 1 +0.34850072573164 1 4 1 +0.236746589712662 1 4 1 +0.343500348276907 2 4 1 +0.913186037917405 1 4 1 +0.399298167621525 1 4 1 +0.384145099266467 2 4 1 +0.202414771592286 1 4 1 +1.18165252887664 2 4 1 +0.330694054268851 1 4 1 +0.32152973903766 2 4 1 +0.491640817467057 2 4 1 +0.355523637611526 2 4 1 +0.652568975873668 1 4 1 +0.83445083655571 2 4 1 +1.2259345279659 2 4 1 +0.359422092118994 2 4 1 +0.706531400652435 2 4 1 +0.393761838705891 2 4 1 +0.637784009231585 2 4 1 +0.72390141241909 1 4 1 +0.250576856916352 1 4 1 +0.294901560775219 2 4 1 +0.360607616504287 2 4 1 +0.94020952091997 2 4 1 +0.512044648878922 2 4 1 +0.795154031544809 2 4 1 +0.483945751646636 2 4 1 +1.00668402779973 1 4 1 +0.354165221763607 2 4 1 +0.589695938125084 1 4 1 +0.317505434986294 1 4 1 +1.43373221832127 1 4 1 +0.288390917154746 2 4 1 +0.405502079908248 1 4 1 +0.446525389342848 2 4 1 +0.532552597088654 1 4 1 +0.313955963239798 2 4 1 +0.353727419903597 2 4 1 +0.370222838745741 1 4 1 +0.53179693872268 2 4 1 +0.367449479194125 1 4 1 +0.552379937342038 2 4 1 +0.990656154189996 2 4 1 +0.339737828162076 2 4 1 +0.637641033103 2 4 1 +0.42876311661298 2 4 1 +0.23749457142906 2 4 1 +0.319603538159136 2 4 1 +0.374605787276471 1 4 1 +0.849512129786118 2 4 1 +1.00104076228899 2 4 1 +0.198287811121547 1 4 1 +0.298821665244969 2 4 1 +0.291809945237506 1 4 1 +0.629558789118005 2 4 1 +0.387853650221803 2 4 1 +0.403253897590665 1 4 1 +0.467497496264389 2 4 1 +0.76163717207652 2 4 1 +0.509727112968688 2 4 1 +0.298777861255781 1 4 1 +0.511840234402182 2 4 1 +0.285361728745086 2 4 1 +0.597484562760948 2 4 1 +0.289800259644921 2 4 1 +0.583545618964623 2 4 1 +0.567804592187857 1 4 1 +0.507416826265034 1 4 1 +0.802494675316203 2 4 1 +0.977640762711063 1 4 1 +0.310065306648296 1 4 1 +0.345153633596149 2 4 1 +0.347175805412226 1 4 1 +0.412977137398828 2 4 1 +0.285153755999717 1 4 1 +0.239659186319816 2 4 1 +0.339455800385343 2 4 1 +0.268577501046517 2 4 1 +0.658394714841171 1 4 1 +0.778666141135073 2 4 1 +0.316640157058985 1 4 1 +0.223585419102432 1 4 1 +0.865653039717611 2 4 1 +0.366142103215405 1 4 1 +1.06763238053885 1 4 1 +0.649850247862938 1 4 1 +0.512047384065615 2 4 1 +0.416754656003573 2 4 1 +0.251040108432236 1 4 1 +0.360104447735964 2 4 1 +0.69217517705459 2 4 1 +1.20890660646077 2 4 1 +0.395473147659342 1 4 1 +2.01509671350874 2 4 1 +0.257146924592025 1 4 1 +0.56334930950831 2 4 1 +0.283190539517862 1 4 1 +0.926385510466009 2 4 1 +0.670839555017221 1 4 1 +0.209765778999125 1 4 1 +0.736401422311992 2 4 1 +0.652193628618288 1 4 1 +0.21004611504302 1 4 1 +0.427729746817706 2 4 1 +0.279727994007439 1 4 1 +0.973427619297358 1 4 1 +0.484447288964673 2 4 1 +0.773172203339819 1 4 1 +0.451517138859729 2 4 1 +0.366340544422608 1 4 1 +0.537698395391627 1 4 1 +0.502490066721104 2 4 1 +0.320507642052774 1 4 1 +0.521457049830957 2 4 1 +0.516601763002701 2 4 1 +0.335922415995894 1 4 1 +0.433364895325246 2 4 1 +0.680419249073426 2 4 1 +0.297293517357851 1 4 1 +0.442221587569742 1 4 1 +0.27166805138535 1 4 1 +0.61736599846165 2 4 1 +0.486875798898661 1 4 1 +0.221131066783682 1 4 1 +0.631893098333021 1 4 1 +0.680334804706098 2 4 1 +0.439239414100042 2 4 1 +0.844299183787246 1 4 1 +0.879748183908203 2 4 1 +0.881004157734928 2 4 1 +0.503475477012917 2 4 1 +0.345571841551121 2 4 1 +0.60106814148883 2 4 1 +0.380097253620339 2 4 1 +0.414803174135158 2 4 1 +0.205017263198972 1 4 1 +0.284670026282373 1 4 1 +0.362803142116261 1 4 1 +0.244497025354602 1 4 1 +0.227388249258125 2 4 1 +0.427473866189717 2 4 1 +0.753126345774075 2 4 2 +0.390711548236765 1 4 2 +0.270643332729808 1 4 2 +0.226363776905272 2 4 2 +0.465280739944602 2 4 2 +0.436604485335886 2 4 2 +0.682522814767707 2 4 2 +0.346349842215826 1 4 2 +0.661058914850539 1 4 2 +0.283908385401013 1 4 2 +0.439045953436687 2 4 2 +0.357784731597969 1 4 2 +0.201869884529536 1 4 2 +0.360806495344085 1 4 2 +0.925367484247164 2 4 2 +0.415837898905798 1 4 2 +0.227962933710507 1 4 2 +0.312737890779197 2 4 2 +0.432575973780529 1 4 2 +0.253471376207378 2 4 2 +0.288980617785268 2 4 2 +0.205568280701361 1 4 2 +0.355465859040812 1 4 2 +0.533249899426881 1 4 2 +0.428341046561918 1 4 2 +0.468946411082872 2 4 2 +0.64190965481191 2 4 2 +0.208274022160394 1 4 2 +0.217167590394544 1 4 2 +0.348200391766719 1 4 2 +0.617665880709998 2 4 2 +1.2550193745344 2 4 2 +0.566722271395131 2 4 2 +0.9275505182546 1 4 2 +0.360441788744519 2 4 2 +0.334083907534406 1 4 2 +0.621024094129476 1 4 2 +0.21468225890971 1 4 2 +0.764606539690629 2 4 2 +0.513552230720492 2 4 2 +0.274575330019494 1 4 2 +0.442639118882473 2 4 2 +0.333565588104835 1 4 2 +0.32578303931817 1 4 2 +0.937568367512386 1 4 2 +0.267480287372774 2 4 2 +0.531291803062624 2 4 2 +0.356804813639121 2 4 2 +0.229458300439067 1 4 2 +0.491510334559396 1 4 2 +0.294506874060557 2 4 2 +0.450689439535521 2 4 2 +0.330518336785702 2 4 2 +0.629929903785771 2 4 2 +0.556005918181015 2 4 2 +0.298001295445146 2 4 2 +0.373025553749853 1 4 2 +0.470482102722226 1 4 2 +0.341137316160965 2 4 2 +0.395674428886868 1 4 2 +0.612038098963857 2 4 2 +0.357119967218812 1 4 2 +0.248258993043406 2 4 2 +0.43888644731273 2 4 2 +0.236904461349091 1 4 2 +0.175318379144127 1 4 2 +0.295979215918683 2 4 2 +0.830651376675517 2 4 2 +0.282437606937073 2 4 2 +0.398046893313355 1 4 2 +0.438749921630364 2 4 2 +0.910170394806412 1 4 2 +0.612914933569449 2 4 2 +0.321192322725182 1 4 2 +0.568814752481222 2 4 2 +0.897945007327145 2 4 2 +0.498239908323843 2 4 2 +1.13610274041636 1 4 2 +0.38015282393527 2 4 2 +1.88042120748211 1 4 2 +0.475694408506186 2 4 2 +0.783173222507789 1 4 2 +1.15066675545721 2 4 2 +0.557329951105813 2 4 2 +0.571462117657645 1 4 2 +0.67890197694106 1 4 2 +0.602173033246056 1 4 2 +0.394020910343337 2 4 2 +0.313420994224523 1 4 2 +0.286589909536176 1 4 2 +0.705950642195192 2 4 2 +0.229774735082982 1 4 2 +1.23278117367534 1 4 2 +0.22937452069431 1 4 2 +0.280442062594111 2 4 2 +0.327747771853932 2 4 2 +0.235540251439368 1 4 2 +0.482389789621482 2 4 2 +0.221470341978539 1 4 2 +1.543390472393 1 4 2 +0.407965459047609 2 4 2 +0.861415260414181 2 4 2 +0.257801138767577 1 4 2 +1.08565280721424 2 4 2 +0.581197798001077 1 4 2 +0.324088998372876 2 4 2 +0.357086553411739 1 4 2 +1.40348937798243 1 4 2 +0.193914649716736 1 4 2 +1.21046346658098 1 4 2 +0.67741215417897 1 4 2 +0.329541261493576 1 4 2 +0.708070291827501 2 4 2 +0.799545815866931 1 4 2 +0.284679156256938 2 4 2 +0.394752939066119 2 4 2 +1.28735898040161 2 4 2 +0.28801399136096 2 4 2 +0.648970026347328 1 4 2 +0.441656916239289 1 4 2 +0.255895819732594 1 4 2 +0.273287514635962 1 4 2 +0.355331159272485 2 4 2 +0.660329500255681 2 4 2 +1.60223591193477 2 4 2 +0.312706096050944 2 4 2 +0.370271886124509 1 4 2 +0.4718314797338 2 4 2 +0.27614855720277 1 4 2 +0.483448608439343 1 4 2 +0.234968080618399 1 4 2 +0.391112229320968 2 4 2 +0.462383731150063 1 4 2 +1.84554462107946 2 4 2 +0.368383992484067 2 4 2 +0.427732918131404 2 4 2 +0.261183473976277 1 4 2 +0.443004159664207 1 4 2 +0.319045852909917 1 4 2 +0.459946072664613 1 4 2 +0.308634108085083 2 4 2 +0.698726479780432 2 4 2 +0.236639844152539 1 4 2 +0.331687357050262 1 4 2 +0.478759287978135 2 4 2 +0.368101563033333 2 4 2 +0.305576932610112 1 4 2 +0.429846006190982 2 4 2 +0.760633202811727 2 4 2 +0.413548649793985 2 4 2 +0.854550457981442 1 4 2 +0.793486300366621 2 4 2 +0.865787783075263 2 4 2 +0.29645719210445 2 4 2 +0.286957661251985 2 4 2 +0.799348039285717 2 4 2 +0.655555119608612 2 4 2 +0.742570590536339 2 4 2 +0.618028617950327 2 4 2 +0.287032292482981 1 4 2 +0.810817641683865 2 4 2 +0.542866362535735 2 4 2 +0.461735825500108 1 4 2 +0.360081585122882 2 4 2 +0.686393593652603 2 4 2 +0.406506979162325 1 4 2 +0.417722137978479 2 4 2 +0.199502298944795 1 4 2 +0.283998907427584 1 4 2 +0.641575484713698 1 4 2 +0.536268680798931 1 4 2 +1.30578408194073 2 4 2 +0.184223471103508 1 4 2 +0.69467941754697 2 4 2 +0.22448622806897 2 4 2 +0.286771373284366 1 4 2 +0.688354585035901 1 4 2 +0.967997645085735 1 4 2 +0.379271417939524 1 4 2 +0.657624551473419 2 4 2 +0.288156373424384 1 4 2 +0.798729464202731 2 4 2 +0.299408333666402 2 4 2 +0.569629360200182 2 4 2 +0.368672408571234 2 4 2 +0.689419089326237 2 4 2 +1.39439675244552 2 4 2 +0.349220352929323 1 4 2 +0.83628328787567 1 4 2 +0.954621512616944 1 4 2 +0.974085341355247 2 4 2 +0.232985801064047 1 4 2 +0.392841148562383 1 4 2 +0.215131830305328 1 4 2 +0.293782797485867 2 4 2 +0.271250958358236 1 4 2 +0.624040890183475 1 4 2 +0.805265827410823 2 4 2 +0.294091703458089 2 4 2 +0.710826711143953 2 4 2 +1.22718633292131 1 4 2 +0.634166705031742 2 4 2 +0.34847720815033 2 4 2 +0.401314348829917 1 4 2 +0.232628383153352 1 4 2 +0.732405673120817 1 4 2 +0.721193585825821 2 4 2 +0.409057462385994 2 4 2 +0.584778932939187 1 4 2 +0.232717575480756 2 4 2 +0.815862413646243 2 4 2 +0.384196628908383 1 4 2 +0.578948587380573 1 4 2 +1.09711900247466 1 4 2 +0.848385257280831 2 4 2 +0.569159709314235 2 4 2 +0.549417965516691 1 4 2 +0.732526266656702 2 4 2 +0.308790829645616 2 4 2 +0.99654060065045 1 4 2 +0.589889727186112 1 4 2 +0.5484884279984 2 4 2 +0.479934907640303 2 4 2 +0.270818907737512 1 4 2 +0.21367589349819 2 4 2 +0.400772720552252 1 4 2 +1.19500378366517 2 4 2 +0.247627583441975 2 4 2 +0.344233591133932 2 4 2 +0.226694192794682 2 4 2 +0.838668645891454 1 4 2 +0.706466288045509 1 4 2 +0.294054475401509 2 4 2 +0.686122317625166 1 4 2 +0.403497095541725 2 4 2 +0.243178882660292 2 4 2 +0.699616329368069 1 4 2 +0.307343994601662 1 4 2 +0.42437788244234 1 4 2 +0.502486986226067 2 4 2 +1.28866364433504 2 4 2 +0.305567946577639 2 4 2 +0.378703212426002 2 4 2 +0.792580021830969 1 4 2 +0.304089937760933 1 4 2 +0.311408526252319 1 4 2 +0.430396597223223 2 4 2 +0.297243961067233 2 4 2 +0.44197992729068 2 4 2 +0.316197941983761 2 4 2 +0.352238116060612 1 4 2 +0.537215363451521 1 4 2 +0.313054348616914 2 4 2 +2.41486023609302 2 4 2 +0.278189933299107 1 4 2 +0.608375967955867 2 4 2 +0.322433656896367 2 4 2 +0.510101175645582 2 4 2 +0.536008515884717 2 4 2 +0.411222727405007 1 4 2 +0.322077650775546 1 4 2 +0.376593462100595 1 4 2 +0.373346452642805 1 4 2 +0.204108290848997 1 4 2 +1.13673544543737 1 4 2 +0.608043497918468 2 4 2 +0.492504866468463 1 4 2 +2.64459046440055 2 4 2 +0.451189826082847 2 4 2 +0.576354637504905 2 4 2 +1.03341312876181 2 4 2 +1.06203790709498 2 4 2 +0.467283293752499 1 4 2 +0.59305758857692 1 4 2 +1.02820034315313 2 4 2 +0.243096585776871 2 4 2 +0.241979269170888 1 4 2 +2.12555571038889 2 4 2 +0.251788236043855 1 4 2 +0.723821381450342 1 4 2 +0.433526132962029 2 4 2 +0.21544327239061 1 4 2 +0.393688512078822 1 4 2 +0.284221169948672 2 4 2 +0.576093893566291 2 4 2 +1.15305709835803 1 4 2 +0.577913219145157 1 4 2 +0.715910910811142 1 4 2 +0.75590490398109 1 4 2 +0.360319468198415 2 4 2 +0.632185262786175 1 4 2 +0.18238157652647 1 4 2 +0.562586077551566 2 4 2 +0.215854179362236 1 4 2 +0.312933166228936 2 4 2 +0.555832005681486 2 4 2 +0.318457487234359 1 4 2 +0.582899849996915 2 4 2 +0.245962703292602 1 4 2 +0.465283341657699 2 4 2 +0.414444633194467 2 4 2 +1.24551335125207 2 4 2 +0.545757922901803 1 4 2 +0.231000979626702 1 4 2 +0.99685036055635 2 4 2 +0.384519234744453 2 4 2 +0.313068825633183 2 4 2 +0.272366676123266 2 4 2 +0.369113998245052 1 4 2 +0.852502642553233 2 4 2 +0.500222326108646 2 4 2 +1.33492159400307 2 4 2 +0.334531945802007 2 4 2 +0.345532493468953 2 4 2 +0.411524900150396 1 4 2 +0.412179135130737 1 4 2 +0.181615360189367 1 4 2 +0.362837785162399 2 4 2 +0.357363228746261 1 4 2 +0.341279195040987 1 4 2 +0.509319762686041 2 4 2 +0.832335744829212 1 4 2 +0.626841628293068 2 4 2 +0.410567315100069 2 4 2 +0.323450520248485 1 4 2 +0.460402879304217 2 4 2 +0.978638366687773 2 4 2 +0.391658527387104 2 4 2 +0.451809465395574 2 4 2 +0.593742477301499 2 4 2 +0.662146826253225 2 4 2 +0.437989819033819 1 4 2 +0.269628201689827 2 4 2 +0.435351210611835 1 4 2 +0.873512993112175 2 4 2 +0.303862086205385 2 4 2 +0.273370196428826 2 4 2 +0.346348296895532 1 4 2 +0.998766995801073 1 4 2 +0.500807556509458 2 4 2 +0.377700114272957 1 4 2 +0.557960766756848 2 4 2 +0.445694136085316 1 4 2 +0.45906855277066 2 4 2 +0.754461289543913 1 4 2 +0.400053087442967 2 4 2 +0.551849440102895 2 4 2 +0.442947303118676 2 4 2 +0.333326868270498 2 4 2 +0.299790315442093 2 4 2 +0.553945654472965 2 4 2 +0.451050492701825 2 4 2 +0.324222556361926 1 4 2 +0.535724245715277 2 4 2 +0.238621696921591 2 4 2 +1.05972477089422 2 4 2 +0.381000045748109 2 4 2 +0.622451138992948 2 4 2 +0.373517923040452 1 4 2 +0.379879533262278 2 4 2 +0.61017258538097 1 4 2 +0.373931156084275 2 4 2 +0.271265445543895 2 4 2 +1.13340860093329 2 4 2 +0.48332741629014 2 4 2 +0.278034332012135 2 4 2 +0.617732524410426 2 4 2 +0.295315882068786 1 4 2 +0.399809513238085 2 4 2 +0.593796551838838 1 4 2 +0.448034890388041 1 4 2 +0.507568706345751 1 4 2 +0.361775441496837 2 4 2 +0.680971086222875 2 4 2 +0.760052527880231 1 4 2 +0.523382086841163 2 4 2 +0.782073278018563 2 4 2 +0.366504296431615 1 4 2 +0.546586584253047 1 4 2 +0.214235924471223 2 4 2 +0.366212595435754 1 4 2 +0.291167146710121 1 4 2 +0.409610194588736 2 4 2 +0.356114057741968 2 4 2 +0.219739077268034 1 4 2 +0.672870970697509 2 4 2 +0.459606203597495 1 4 2 +0.526098601526635 2 4 2 +0.372672585132755 2 4 2 +0.597289086386129 2 4 2 +0.626632354698616 1 4 2 +0.332982149438951 1 4 2 +0.278836235260797 1 4 2 +0.322540461337514 2 4 2 +0.78120963284111 1 4 2 +0.23866016678153 1 4 2 +0.50573581272823 1 4 2 +0.307268642300638 1 4 2 +0.780733176074318 2 4 2 +0.524376179559523 2 4 2 +0.189183803390314 1 4 2 +0.845904136307582 2 4 2 +0.278587013457745 1 4 2 +0.225849178715147 1 4 2 +0.505248402948615 2 4 2 +0.366510147437898 2 4 2 +0.786656646299225 1 4 2 +0.28859561129487 2 4 2 +1.0544558000918 2 4 2 +0.393129941472734 2 4 2 +0.209426913346118 1 4 2 +0.557848900331885 1 4 2 +0.428353664374262 1 4 2 +0.530074401536137 2 4 2 +0.364796976039387 1 4 2 +0.31697830785037 2 4 2 +0.257110187742915 2 4 2 +0.860619019042301 2 4 2 +0.275753333898939 2 4 2 +0.397675327020122 2 4 2 +0.400511098922939 2 4 2 +0.948090448726289 2 4 2 +0.792802580857493 2 4 2 +0.492364374799986 2 4 2 +0.535712706435388 2 4 2 +0.303750231704068 2 4 2 +0.418198300886687 1 4 2 +0.242831317224143 1 4 2 +0.366807263977893 1 4 2 +0.950283301933651 2 4 2 +0.482878099739341 1 4 2 +0.97631347487202 1 4 2 +0.244665797059032 1 4 2 +0.383566210558107 2 4 2 +0.301865356343443 1 4 2 +0.649745063591824 2 4 2 +0.697050710541562 2 4 2 +0.809378147682444 2 4 2 +0.462123476270074 2 4 2 +0.245248883261683 1 4 2 +0.243212808416138 2 4 2 +0.314440748173076 2 4 2 +1.04698406308231 2 4 2 +0.544087182534972 2 4 2 +0.406124565426359 2 4 2 +0.718327290138134 2 4 2 +0.595497257669703 1 4 2 +0.666308342077789 1 4 2 +0.22438634675569 1 4 2 +0.365080530436751 1 4 2 +0.461875979293363 1 4 2 +0.815985780732118 2 4 2 +0.332674339664941 2 4 2 +0.567202795788581 2 4 2 +0.673655794950335 2 4 2 +0.477370669971454 2 4 2 +0.479764717684493 1 4 2 +0.68004172954729 2 4 2 +0.257963031440288 1 4 2 +0.321677163018359 2 4 2 +0.31627004676119 1 4 2 +0.529702779903144 2 4 2 +0.248585059238814 1 4 2 +0.282689321559121 1 4 2 +0.470660250683795 1 4 2 +0.250018593647379 1 4 2 +0.374469379392143 2 4 2 +0.275948837992336 1 4 2 +0.404124761645375 2 4 2 +0.461681809050967 2 4 2 +0.564107772448883 2 4 2 +0.384697573024493 1 4 2 +0.554591581848138 2 4 2 +0.584464813875503 2 4 2 +0.342621685779372 1 4 2 +1.16358906465507 1 4 2 +0.988193013514123 2 4 2 +0.332789439955037 2 4 2 +1.25271325447985 2 4 2 +0.177397789305329 1 4 2 +0.362176598671433 1 4 2 +0.419965871836781 1 4 2 +0.60515910819425 2 4 2 +0.31624308445277 2 4 2 +0.844077751163308 1 4 2 +0.3735652345134 1 4 2 +0.291452020039806 2 4 2 +0.232044027590617 2 4 2 +0.401670392843103 1 4 2 +0.453346479512868 1 4 2 +0.538601982527949 2 4 2 +0.830402429105011 2 4 2 +0.335367042770124 2 4 2 +1.69818743759647 2 4 2 +0.685148294374147 2 4 2 +0.563455535517706 1 4 2 +0.346418976965531 2 4 2 +0.662726370377329 2 4 2 +0.325948295612478 1 4 2 +0.790574826320759 2 4 2 +0.350496877596168 2 5 1 +0.874105967045303 2 5 1 +0.618625438899854 2 5 1 +0.418505998819245 2 5 1 +0.314600840328866 2 5 1 +0.381663329894006 2 5 1 +0.45975664723401 2 5 1 +0.629213989765841 2 5 1 +0.54170350431826 2 5 1 +0.765292807564366 2 5 1 +0.385154972025414 2 5 1 +1.0123064604624 1 5 1 +0.594468240472198 1 5 1 +0.432884690638713 1 5 1 +0.489852620287608 1 5 1 +0.439171145526351 2 5 1 +1.22541928040664 2 5 1 +0.489630344610841 1 5 1 +0.274646407464894 2 5 1 +0.321919494972732 2 5 1 +0.77321886884371 2 5 1 +0.618168337784024 1 5 1 +0.534313291237878 1 5 1 +0.248497494092332 2 5 1 +0.902277840078671 2 5 1 +0.362848845387321 1 5 1 +0.51771202639715 1 5 1 +0.354988230953876 1 5 1 +0.408263930712262 1 5 1 +0.515818454942625 2 5 1 +1.13442796786063 1 5 1 +0.922450951783412 2 5 1 +0.918236744271086 1 5 1 +0.615340787166512 2 5 1 +1.33086229422069 2 5 1 +0.773498580150888 1 5 1 +0.355111810070351 2 5 1 +0.463437947168048 1 5 1 +0.448309657003003 2 5 1 +0.6988174528086 1 5 1 +0.681543871813962 2 5 1 +0.388077823480071 1 5 1 +0.769550702497453 2 5 1 +0.276308219836844 2 5 1 +0.284812859728204 2 5 1 +0.362311034619243 2 5 1 +1.17071306250983 2 5 1 +0.866609929152046 2 5 1 +0.835449276907315 1 5 1 +0.398346916321057 2 5 1 +0.358189328108751 2 5 1 +0.554585348153169 1 5 1 +0.39760626253423 1 5 1 +0.392260790271942 1 5 1 +0.374565507395057 1 5 1 +0.490572674372733 1 5 1 +0.262430183982469 2 5 1 +0.406476847741613 2 5 1 +0.734398707641991 2 5 1 +0.808457812365959 2 5 1 +0.48642275256533 2 5 1 +0.652503153805679 2 5 1 +0.549320171737622 1 5 1 +0.363404443230036 1 5 1 +0.840787255553956 2 5 1 +0.659531467134479 2 5 1 +0.52542297118843 2 5 1 +1.641319692926 1 5 1 +0.349747975299305 2 5 1 +0.696664492661082 1 5 1 +0.999158454741339 2 5 1 +0.494799923420879 1 5 1 +0.696955053780108 2 5 1 +0.320987556616071 1 5 1 +0.49483529420845 2 5 1 +0.327928353910945 2 5 1 +0.243634957449529 2 5 1 +0.382623755383196 2 5 1 +0.340391545486682 1 5 1 +0.370852704813291 1 5 1 +0.513953236176426 1 5 1 +0.98488957304638 1 5 1 +0.72920423163152 1 5 1 +0.417878765259703 2 5 1 +0.969267542459561 2 5 1 +0.588656109235448 1 5 1 +0.309310108339579 2 5 1 +0.359079679822589 2 5 1 +0.432293522236456 2 5 1 +0.365699146412261 2 5 1 +0.280030960748012 1 5 1 +0.267439129890066 1 5 1 +1.64146572476405 1 5 1 +0.383985418553408 2 5 1 +0.820780704522388 2 5 1 +0.287252707592816 2 5 1 +0.471656559488085 1 5 1 +0.688654531662119 1 5 1 +1.51116458250868 1 5 1 +0.256595632268106 1 5 1 +1.44192189522027 1 5 1 +0.423719184953423 2 5 1 +0.474641530750688 1 5 1 +0.95167387507805 1 5 1 +0.249940083181891 2 5 1 +0.948956034762691 1 5 1 +0.497334710175687 2 5 1 +0.401258517347727 1 5 1 +0.593937951684305 1 5 1 +0.399453837192165 1 5 1 +0.339706036542822 2 5 1 +0.358808063230196 2 5 1 +0.399870518236517 2 5 1 +0.402281253533687 1 5 1 +1.21105144216928 2 5 1 +0.276484649301743 2 5 1 +0.614037537089416 1 5 1 +0.468119101565478 2 5 1 +0.363072435808267 2 5 1 +0.424349274964132 2 5 1 +0.817780271882891 2 5 1 +0.348979602184705 2 5 1 +0.428954419119663 1 5 1 +0.377745649623147 2 5 1 +1.52858746330548 1 5 1 +0.38829844572601 2 5 1 +0.272639358091468 1 5 1 +0.410898025055377 2 5 1 +0.345789355606247 1 5 1 +0.29532986306681 1 5 1 +1.41353962836894 1 5 1 +0.405012581079275 2 5 1 +0.308523509241669 1 5 1 +0.277244934012402 1 5 1 +0.317403964863673 2 5 1 +0.590597620939944 2 5 1 +2.60905439597824 2 5 1 +0.312334336841847 1 5 1 +0.437294940463007 2 5 1 +0.749488878001604 1 5 1 +1.55915796602619 2 5 1 +0.672396814187568 1 5 1 +0.32268652713371 1 5 1 +2.33549213748734 2 5 1 +0.533821174405078 1 5 1 +0.29694295172823 2 5 1 +0.428694990290054 1 5 1 +0.744566758964104 2 5 1 +0.902120586395754 2 5 1 +0.373865886414247 2 5 1 +0.34709384467313 2 5 1 +0.579724512470357 2 5 1 +0.480526240272104 2 5 1 +0.280388545170291 2 5 1 +0.327331492746605 2 5 1 +0.307049013223802 1 5 1 +0.67645551514592 2 5 1 +1.45539495712329 1 5 1 +0.414781606222836 2 5 1 +0.464869946932275 2 5 1 +0.502525874440138 1 5 1 +0.395815499456874 2 5 1 +0.354740455281115 2 5 1 +0.387644457183764 1 5 1 +0.441119885285695 2 5 1 +0.541299937122488 1 5 1 +0.462769272386231 2 5 1 +0.568510300909836 1 5 1 +0.739186686655553 2 5 1 +0.590037876611731 1 5 1 +0.876929440339061 2 5 1 +0.38745072608018 1 5 1 +0.874268369237665 1 5 1 +0.595292592424216 1 5 1 +0.704226262163879 2 5 1 +0.297080465628835 1 5 1 +0.873924224577836 1 5 1 +0.466850429992947 2 5 1 +2.19102033049238 2 5 1 +0.707932653803704 2 5 1 +0.52289483412265 1 5 1 +0.291400893995193 2 5 1 +0.334262174169645 1 5 1 +0.413969229169715 2 5 1 +0.368758425061548 2 5 1 +0.480077442145665 1 5 1 +1.00325797217147 1 5 1 +0.34303135632964 1 5 1 +0.449371536501468 2 5 1 +0.626712575183433 2 5 1 +0.300330886841042 1 5 1 +0.601032456393769 1 5 1 +0.398517086169262 2 5 1 +0.492233676994753 1 5 1 +0.559665551547645 2 5 1 +0.322079290494793 1 5 1 +1.00119635579758 2 5 1 +0.273572712925721 2 5 1 +0.997674531871953 2 5 1 +0.660614796170524 2 5 1 +0.682509343887865 2 5 1 +0.709029245629886 1 5 1 +0.450254655958821 2 5 1 +0.489362486290041 2 5 1 +0.256831501902699 1 5 1 +0.277749492989249 2 5 1 +0.570615557380466 1 5 1 +0.660249945470972 1 5 1 +0.913042539886274 2 5 1 +0.276157449004863 2 5 1 +0.42554491044995 1 5 1 +0.504500759944897 1 5 1 +0.464430914488706 2 5 1 +0.619432554673588 2 5 1 +0.344307477421206 2 5 1 +0.353936127201951 2 5 1 +0.542570048569743 1 5 1 +0.670056529092721 2 5 1 +1.69346839376907 1 5 1 +0.329122766203157 2 5 1 +0.580777607436693 2 5 1 +0.425192302825809 2 5 1 +1.01918716149399 1 5 1 +0.451237334938158 2 5 1 +0.365780961202646 1 5 1 +0.435969164806333 2 5 1 +0.446219693449255 2 5 1 +0.86148405175356 1 5 1 +0.378754506829674 2 5 1 +0.636129328599341 2 5 1 +0.484019299704474 1 5 1 +0.35361046366484 2 5 1 +0.778577922802192 2 5 1 +0.553712077213866 2 5 1 +0.576470926490595 1 5 1 +0.956990527979208 2 5 1 +0.676276969153139 2 5 1 +0.600336478541173 2 5 1 +1.1497148815834 2 5 1 +0.311559946333853 1 5 1 +0.32583211853549 2 5 1 +0.5129988563233 2 5 1 +0.706544486405484 2 5 1 +0.437094165223538 1 5 1 +0.824751569569288 1 5 1 +0.809546777157912 2 5 1 +0.537141407451724 2 5 1 +0.662502129558617 1 5 1 +0.501691224984125 2 5 1 +0.30901205010725 2 5 1 +0.47867098754842 1 5 1 +0.456986416516351 2 5 1 +0.275926112108479 1 5 1 +0.410807847551661 2 5 1 +0.828356765357645 1 5 1 +0.40635702823069 2 5 1 +0.986564014728032 2 5 1 +0.567057974036205 2 5 1 +0.322394251750056 2 5 1 +0.722132927785589 2 5 1 +0.409410905169481 2 5 1 +0.709898951889328 1 5 1 +0.541547402623561 1 5 1 +0.256670165225662 2 5 1 +0.49411541489472 1 5 1 +0.919104100863578 1 5 1 +0.549914334516905 2 5 1 +0.505235686112949 2 5 1 +0.282338529860808 2 5 1 +0.383976247257099 2 5 1 +0.680828005837556 1 5 1 +0.540327329724261 1 5 1 +1.05569191297992 2 5 1 +0.349514052734471 1 5 1 +0.496154085894348 2 5 1 +0.456989724057387 2 5 1 +0.730859840316074 1 5 1 +1.05327361608711 2 5 1 +0.614480004129318 2 5 1 +1.09479710995942 2 5 1 +0.456403655924994 2 5 1 +0.248929266015883 2 5 1 +0.345437497681469 2 5 1 +0.390161598039643 2 5 1 +1.59636606625796 1 5 1 +0.297653976651779 1 5 1 +0.53027456008198 2 5 1 +0.325786425396216 1 5 1 +0.412885828047688 1 5 1 +0.430942952085056 2 5 1 +0.628988308893905 2 5 1 +0.303432064418254 2 5 1 +0.502479650486677 2 5 1 +0.374956563195693 2 5 1 +0.317036830382213 1 5 1 +0.360257190747668 2 5 1 +0.392605112045462 2 5 1 +0.263946843562968 2 5 1 +0.543352649787032 2 5 1 +0.451523452374327 2 5 1 +0.588811763316336 2 5 1 +0.496429527557548 1 5 1 +0.363606147439157 1 5 1 +0.590754640562368 2 5 1 +0.399066967540575 2 5 1 +0.481461654111483 1 5 1 +0.44511198129432 2 5 1 +0.733957111974783 2 5 1 +0.233363499390124 2 5 1 +0.290775652443224 2 5 1 +1.0231168093639 2 5 1 +0.531611729352149 1 5 1 +0.825587045098161 2 5 1 +0.902074251648083 2 5 1 +0.323175839216028 2 5 1 +0.505991444493804 2 5 1 +0.261543288336422 1 5 1 +0.554486494360695 1 5 1 +0.351246068594391 2 5 1 +0.515328779203654 2 5 1 +0.639677471438791 2 5 1 +0.480386256226869 2 5 1 +0.712967189057983 2 5 1 +1.34226678264378 2 5 1 +0.522093483130097 2 5 1 +0.335177947867927 1 5 1 +0.427739523462974 2 5 1 +0.724445283326656 2 5 1 +0.520065928077709 2 5 1 +0.690085575348652 2 5 1 +0.395666881799587 2 5 1 +0.253416617473968 2 5 1 +0.292975240297012 2 5 1 +0.880042969703422 2 5 1 +1.18765090929155 2 5 1 +0.471824574051573 1 5 1 +0.3819371403618 1 5 1 +0.489731712873816 2 5 1 +0.517213617852645 2 5 1 +1.02045998930259 1 5 1 +0.353507003760541 2 5 1 +0.28738904500089 2 5 1 +0.761600567581336 1 5 1 +0.744138933405505 1 5 1 +0.628607646940531 1 5 1 +0.892684033273368 2 5 1 +0.415671512759732 2 5 1 +0.862657966498822 2 5 1 +0.44210809069166 2 5 1 +0.524200017230228 1 5 1 +0.324994378309518 2 5 1 +0.426992098360617 2 5 1 +0.421266101618318 1 5 1 +0.592739096667285 2 5 1 +0.555131716241223 2 5 1 +0.881544992259764 2 5 1 +0.712182588708822 2 5 1 +0.744752154767 2 5 1 +0.606487498310839 2 5 1 +0.461699520660315 2 5 1 +0.745717699517249 1 5 1 +1.24267939276122 2 5 1 +1.06725166167186 2 5 1 +0.688125268149 2 5 1 +0.322706592623082 2 5 1 +0.625340088917108 1 5 1 +0.481462311469004 2 5 1 +1.11856169670045 2 5 1 +0.496653373252084 2 5 1 +0.306702987850203 1 5 1 +0.299778976884002 2 5 1 +0.255084547535916 2 5 1 +0.324409887328244 2 5 1 +0.28656822676276 2 5 1 +0.685862574486554 1 5 1 +0.25080565529039 2 5 1 +0.423005534054917 2 5 1 +0.567258936488528 2 5 1 +0.453882515712717 2 5 1 +0.889040190992262 1 5 1 +0.527722880894638 1 5 1 +0.25678851823785 1 5 1 +0.569171422366059 1 5 1 +0.438002657123947 1 5 1 +1.07105043333329 2 5 1 +0.371311478728212 2 5 1 +0.427478236470728 2 5 1 +0.690413385769963 2 5 1 +1.23990009741994 1 5 1 +0.35055316921775 2 5 1 +0.320095074553569 2 5 1 +0.360451278466255 2 5 1 +1.3783410124737 1 5 1 +0.342094531805991 2 5 1 +0.939591296738548 1 5 1 +0.377754647410711 2 5 1 +0.397257993518603 2 5 1 +0.556583275671363 2 5 1 +0.814148052150712 1 5 1 +0.445052953408459 2 5 1 +0.635785394694251 1 5 1 +1.45734899209363 2 5 1 +0.553038339349569 2 5 1 +0.487114354718737 2 5 1 +0.326029703773354 2 5 1 +0.378499631880882 1 5 1 +0.249998652473012 2 5 1 +0.777922576637263 1 5 1 +0.344758853651022 2 5 1 +0.469680901216615 1 5 1 +0.572885048100009 2 5 1 +0.813047280725817 2 5 1 +0.31824550606856 1 5 1 +1.52076632666272 2 5 1 +0.525665900915789 2 5 1 +0.835443800969408 2 5 1 +0.411960036228098 2 5 1 +0.366444785673607 2 5 1 +0.583008070195596 2 5 1 +0.306789626562622 2 5 1 +0.32265104149373 2 5 1 +0.60706446511574 1 5 1 +0.660415820740065 1 5 1 +0.41961055869239 2 5 1 +1.18355949324619 2 5 1 +1.7831326042876 1 5 1 +1.32079987606431 1 5 1 +0.31469679737234 2 5 1 +0.742130214925183 2 5 1 +0.278159578075745 2 5 1 +0.944262180658039 1 5 1 +1.00185185390192 2 5 1 +0.723912539579018 2 5 1 +0.730046670620961 2 5 1 +0.594612038608092 2 5 1 +0.609003026408566 1 5 1 +0.703005542893214 2 5 1 +0.371613566829059 2 5 1 +0.839961901535526 2 5 1 +0.444097755957375 2 5 1 +0.358077624513172 2 5 1 +0.284271913540958 2 5 1 +0.479974130859609 2 5 1 +0.545791838695864 1 5 1 +0.332965450514954 2 5 1 +0.591764667761429 2 5 1 +0.298607348317416 2 5 1 +0.247276738340079 2 5 1 +0.798461231259409 2 5 1 +0.441564925769082 2 5 1 +1.48128308355737 2 5 1 +0.31434796484399 1 5 1 +1.20347173161341 2 5 1 +0.739554951126451 1 5 1 +0.497097781531935 2 5 1 +0.484159304177581 1 5 1 +0.244849956861225 1 5 1 +0.504629073199244 1 5 1 +0.569133223143607 2 5 1 +1.12622703221084 2 5 1 +0.764250286320223 2 5 1 +0.484641004950062 2 5 1 +0.514942303502618 2 5 1 +0.605883128391681 2 5 1 +0.49588937623075 1 5 1 +0.396953900942646 2 5 1 +0.520906400655699 1 5 1 +0.745428404112575 2 5 1 +0.351085063768204 1 5 1 +0.996492367823443 2 5 1 +0.831728540518663 2 5 1 +0.277512426545143 2 5 1 +0.25988569649848 2 5 1 +0.309494901893108 2 5 1 +0.506418492133261 1 5 1 +1.0882032936743 2 5 1 +0.561957049307115 1 5 1 +0.818825222243945 2 5 1 +0.53741814818001 2 5 1 +0.398894432067162 1 5 1 +0.370547579851629 2 5 1 +0.441101002850091 1 5 1 +0.335051494643735 1 5 1 +0.359857472939855 1 5 1 +1.23898294637209 2 5 1 +0.303629801372895 2 5 1 +1.27191285267288 2 5 1 +0.255294810871718 2 5 1 +0.387744030748659 2 5 1 +0.950171765183971 2 5 1 +0.338933464143833 2 5 1 +0.374472149784474 1 5 1 +0.850121988549967 1 5 1 +0.368357738596241 2 5 1 +0.422292677162516 1 5 1 +0.670555601613663 2 5 1 +0.443253833059252 2 5 1 +0.445156135335308 1 5 1 +0.570124671736916 1 5 1 +0.715586041727328 1 5 1 +0.59944925951305 1 5 2 +0.571270118524135 1 5 2 +0.478721803809417 2 5 2 +0.677464125838552 2 5 2 +0.663628439043173 2 5 2 +0.269338514686921 1 5 2 +0.602552243929772 1 5 2 +0.291299463050662 1 5 2 +0.44101267885359 1 5 2 +0.311931727273563 2 5 2 +0.352091009578926 1 5 2 +0.313916230690944 1 5 2 +0.374014526157238 1 5 2 +0.300236548555279 1 5 2 +0.361852622471219 1 5 2 +0.290120165572002 1 5 2 +1.3190023778617 1 5 2 +0.813585821304588 2 5 2 +0.563167577163749 2 5 2 +0.424847079638711 1 5 2 +0.431682396745519 1 5 2 +0.742666504831229 2 5 2 +1.3016483866513 1 5 2 +0.355061924288677 1 5 2 +0.645449822853174 2 5 2 +0.288378961868379 1 5 2 +0.374463137914422 2 5 2 +0.405984822829934 2 5 2 +0.460634380609883 2 5 2 +0.750352906162385 1 5 2 +0.644664165310704 1 5 2 +0.300369902496596 1 5 2 +0.443395281476769 1 5 2 +0.242967135467797 2 5 2 +0.286922136386556 2 5 2 +0.287290418411787 2 5 2 +0.521549068525531 2 5 2 +0.292427955172831 1 5 2 +0.689267068868006 2 5 2 +0.518413816193551 2 5 2 +0.318270239089209 1 5 2 +0.382457012636577 2 5 2 +1.01440803729853 1 5 2 +1.00158066089162 1 5 2 +0.568667032400329 2 5 2 +0.414610843415938 1 5 2 +0.400258366212628 2 5 2 +1.00809741464603 2 5 2 +1.27133812224764 2 5 2 +0.743864210325877 2 5 2 +0.775841632110899 2 5 2 +0.300321917136189 2 5 2 +0.3583802317039 2 5 2 +0.460038260487213 1 5 2 +0.364671001753739 2 5 2 +0.452730720520749 2 5 2 +0.650435164535667 1 5 2 +1.30257818709608 2 5 2 +0.467408775207611 2 5 2 +0.413873488779555 2 5 2 +0.362883875858316 1 5 2 +0.706627565831075 2 5 2 +0.965894478924112 1 5 2 +0.364415938903456 1 5 2 +0.302357207565824 2 5 2 +0.523675741606119 1 5 2 +0.246098564525208 1 5 2 +0.534354328367928 2 5 2 +0.305317088730255 2 5 2 +0.360877156880599 2 5 2 +0.269313918771234 1 5 2 +0.522107720783827 2 5 2 +0.798496955992481 1 5 2 +0.774270654545548 1 5 2 +0.689189662698451 2 5 2 +0.541317395948974 1 5 2 +0.758336347994286 2 5 2 +1.04260254410127 1 5 2 +0.298448066365288 2 5 2 +0.687244199835044 1 5 2 +0.321017344151699 2 5 2 +1.30053982037248 1 5 2 +0.871190362112565 2 5 2 +0.854567944471819 1 5 2 +0.406633311451941 2 5 2 +0.595529004717776 2 5 2 +0.932485076424955 2 5 2 +0.670527589251614 2 5 2 +0.349539633417549 2 5 2 +0.527533696703081 2 5 2 +0.604204077013109 1 5 2 +0.396372501238897 2 5 2 +0.54363245627552 2 5 2 +1.01374770072993 2 5 2 +0.442516401166061 1 5 2 +0.337787899261388 2 5 2 +0.611607046647056 2 5 2 +0.582231962637285 2 5 2 +0.531731222343829 2 5 2 +0.70961760628681 2 5 2 +0.983672547433239 2 5 2 +0.853752013607103 2 5 2 +0.299100847268621 2 5 2 +0.458144991493894 2 5 2 +0.646448464231831 2 5 2 +0.349113081280827 2 5 2 +0.371396695556209 2 5 2 +0.380762038115444 2 5 2 +0.626193352278793 2 5 2 +0.319957343473038 2 5 2 +0.894827474594344 1 5 2 +0.371085339061208 2 5 2 +0.274700685448708 2 5 2 +0.472512525590188 2 5 2 +0.428272329032601 1 5 2 +0.81108343332377 2 5 2 +0.29226124190246 1 5 2 +0.709302851449251 1 5 2 +0.441502263285431 2 5 2 +0.377865986040148 1 5 2 +0.299141714875424 2 5 2 +0.852037775518583 1 5 2 +0.367443619829809 2 5 2 +0.833467723342496 1 5 2 +0.518949570328891 2 5 2 +1.08071399168627 2 5 2 +1.45803253458175 1 5 2 +0.403737991042524 2 5 2 +0.430597937237068 1 5 2 +0.693327507928039 2 5 2 +0.409427255768774 2 5 2 +0.415266426974346 2 5 2 +0.399348890241505 1 5 2 +1.00807008741471 1 5 2 +0.600334249477256 2 5 2 +0.723090984967466 1 5 2 +0.30124593772841 2 5 2 +0.526226731553072 1 5 2 +0.909950421520731 2 5 2 +0.327267873435672 1 5 2 +1.07209919243311 2 5 2 +0.29726599356851 1 5 2 +0.552066919100173 2 5 2 +0.494894143356916 2 5 2 +0.32978714763287 2 5 2 +0.590111357521564 2 5 2 +0.453342223889263 2 5 2 +0.402928162250864 2 5 2 +1.28649448740156 2 5 2 +0.307709380863996 1 5 2 +0.28828133716514 2 5 2 +0.293039690509469 2 5 2 +0.416060987991466 2 5 2 +0.475558200663385 2 5 2 +1.05487716699584 2 5 2 +0.338172024306927 1 5 2 +0.595539455240045 1 5 2 +0.579146103187199 2 5 2 +0.786799876060352 1 5 2 +0.241065859944711 2 5 2 +0.582321691985273 2 5 2 +0.460354760620268 1 5 2 +0.841722330305237 1 5 2 +1.01167714961156 2 5 2 +0.424713060969898 1 5 2 +0.394167127869212 2 5 2 +1.03416630772583 1 5 2 +1.32959777810628 1 5 2 +1.75972860107492 1 5 2 +0.615179768965791 2 5 2 +0.319854717980425 2 5 2 +0.489657096763377 1 5 2 +0.670661576197628 2 5 2 +0.472203770212263 2 5 2 +0.765998706988045 1 5 2 +0.333182293252276 1 5 2 +0.570043234180048 1 5 2 +0.30363661516842 1 5 2 +0.332466771451815 2 5 2 +0.997841282553196 2 5 2 +0.45787243741055 1 5 2 +0.334190036053108 2 5 2 +0.419647452096217 2 5 2 +0.477289639435407 2 5 2 +0.319493821935502 2 5 2 +0.602105491492108 2 5 2 +0.901453621588105 2 5 2 +0.302006597660369 2 5 2 +0.370541701419962 1 5 2 +0.632484359068564 2 5 2 +0.675493181471213 1 5 2 +0.702604527810739 2 5 2 +0.457494041206726 1 5 2 +1.02133348990189 1 5 2 +1.02366570258029 2 5 2 +0.6241274220945 2 5 2 +1.13319965338725 1 5 2 +0.608397741451449 2 5 2 +0.275232263227218 2 5 2 +0.646078828525116 1 5 2 +0.392384020972726 2 5 2 +0.611608179642105 1 5 2 +0.403732771272542 2 5 2 +0.475709943826484 1 5 2 +0.644107463291593 1 5 2 +0.529061673835631 2 5 2 +0.76205402727557 1 5 2 +0.478372023349786 1 5 2 +0.721916054782157 2 5 2 +0.298281970063148 2 5 2 +0.308419274846538 2 5 2 +1.13676249928442 2 5 2 +0.489633518133111 1 5 2 +0.81718265267318 2 5 2 +0.296510774521216 1 5 2 +0.41545087025183 2 5 2 +0.858883400769863 1 5 2 +0.588983529667847 2 5 2 +1.05271712018426 2 5 2 +0.305721646872819 1 5 2 +0.639265026387045 2 5 2 +0.640474642443045 1 5 2 +0.896096517036621 2 5 2 +0.42307255863364 2 5 2 +0.415674104307401 1 5 2 +0.27547910375578 1 5 2 +0.348853636625538 2 5 2 +0.283695041149401 2 5 2 +0.305964485294963 2 5 2 +0.993384639595699 2 5 2 +0.747661562638797 1 5 2 +0.374983766761583 2 5 2 +0.709902782274922 1 5 2 +0.39029996774982 2 5 2 +0.443342047659481 1 5 2 +0.25232121385486 2 5 2 +0.606258771370627 2 5 2 +0.56630014842487 2 5 2 +0.407573540497359 2 5 2 +0.488426329742728 2 5 2 +0.530548943298116 1 5 2 +0.836491434275815 2 5 2 +0.368718555457284 2 5 2 +0.627508832177755 1 5 2 +0.658507784089307 2 5 2 +0.494633132243137 2 5 2 +0.767210656356636 1 5 2 +0.279455460728518 2 5 2 +0.680268781163734 1 5 2 +0.434657646694503 1 5 2 +0.975121716315162 2 5 2 +0.454458052785016 2 5 2 +0.870719661036972 2 5 2 +0.601447265395704 2 5 2 +0.883151345159082 2 5 2 +0.79883814953365 2 5 2 +0.520533085849907 2 5 2 +0.740289828056042 1 5 2 +0.808434675051423 2 5 2 +0.323990809323793 1 5 2 +0.402569321518717 1 5 2 +0.520691303484606 1 5 2 +0.401064022355165 2 5 2 +0.402344022560083 1 5 2 +0.532289549565749 2 5 2 +0.706963165521545 2 5 2 +0.784955813139153 2 5 2 +0.360194468075243 2 5 2 +0.409815687475514 1 5 2 +0.82043050263301 2 5 2 +0.460274040204098 2 5 2 +0.419051670972866 2 5 2 +0.599443515950589 2 5 2 +0.966096764539077 2 5 2 +0.366186511338898 1 5 2 +1.4222044721659 2 5 2 +0.777184212128937 2 5 2 +0.591852836588032 2 5 2 +0.770749892926039 2 5 2 +0.70434735829414 1 5 2 +0.765666276417329 2 5 2 +0.40346241426283 1 5 2 +0.391165632121021 2 5 2 +0.370020173988749 2 5 2 +1.58683703850196 1 5 2 +1.57793133770567 2 5 2 +0.80075289464325 2 5 2 +0.256904564618549 1 5 2 +0.53622262912349 1 5 2 +0.453580971173257 2 5 2 +0.410069535718748 1 5 2 +0.515797332567113 2 5 2 +0.952842898198181 1 5 2 +0.363748661621775 2 5 2 +0.428437274072119 2 5 2 +0.370569493908707 1 5 2 +1.55504308977282 1 5 2 +0.2910968027665 2 5 2 +0.889633822477091 2 5 2 +1.06768254922828 1 5 2 +0.737740843880572 2 5 2 +0.999020093280879 2 5 2 +0.332193354553405 2 5 2 +0.489468556048885 1 5 2 +0.276614446525954 2 5 2 +0.411812431184961 1 5 2 +0.645266731187802 2 5 2 +0.355603707761157 2 5 2 +0.390302407266954 1 5 2 +0.77292099479717 2 5 2 +0.252389739406451 2 5 2 +0.876313308318194 2 5 2 +0.58287646271364 1 5 2 +0.288751011208501 1 5 2 +0.431566553814174 2 5 2 +0.589008102697159 2 5 2 +0.829161903382978 2 5 2 +0.628538983915815 2 5 2 +0.621394296269468 2 5 2 +0.579181298723461 1 5 2 +0.727901955182036 2 5 2 +0.631355875603683 2 5 2 +0.860874327171326 2 5 2 +0.343433754720578 2 5 2 +0.5773516679935 2 5 2 +0.279980088015754 1 5 2 +0.297786857526651 2 5 2 +0.352028385521676 2 5 2 +0.718284977347952 1 5 2 +0.386491843574301 1 5 2 +0.733817922945248 2 5 2 +0.330039961374457 1 5 2 +0.562137897292054 2 5 2 +0.493718153186244 2 5 2 +0.384521948665274 2 5 2 +0.541882345607494 2 5 2 +0.899433484810609 1 5 2 +0.239671549366562 1 5 2 +0.903508962409293 2 5 2 +0.437062486670204 1 5 2 +0.47204968825503 2 5 2 +0.463054778904269 1 5 2 +0.337988558333662 1 5 2 +0.945352936382255 2 5 2 +0.319331081252348 1 5 2 +1.10841845342301 1 5 2 +0.727028251710372 1 5 2 +0.418174877683897 2 5 2 +0.83459503151359 2 5 2 +0.341783851166967 2 5 2 +0.409549042090065 2 5 2 +1.39638569008014 2 5 2 +0.282660568650718 2 5 2 +0.377102854745555 2 5 2 +0.559130208630371 1 5 2 +0.406889638299026 2 5 2 +0.874392505642916 2 5 2 +0.549370228408864 1 5 2 +0.702882081610178 1 5 2 +0.387196237366316 2 5 2 +0.254396302589893 2 5 2 +0.661141881945967 2 5 2 +0.353850139949898 2 5 2 +0.940367165724872 2 5 2 +0.257306998632217 2 5 2 +0.25948459184901 1 5 2 +0.282447060372156 2 5 2 +0.434471601810715 2 5 2 +0.531847599879585 2 5 2 +0.831639598690597 1 5 2 +0.476057177723281 1 5 2 +0.689960964595721 2 5 2 +0.762885904963058 2 5 2 +0.622547245815982 2 5 2 +0.467121910624321 2 5 2 +0.444998878563145 2 5 2 +0.610618477959826 2 5 2 +0.406946642496399 1 5 2 +1.0606594018364 2 5 2 +0.554673891728355 2 5 2 +0.249462989686065 2 5 2 +0.391072779178118 1 5 2 +0.502851072240924 2 5 2 +0.375337554826657 2 5 2 +0.459877784492948 1 5 2 +0.37669560180294 2 5 2 +0.35853976242436 2 5 2 +0.299654614882035 2 5 2 +0.722167944915479 2 5 2 +0.470173817048549 2 5 2 +0.434030303678653 1 5 2 +0.38807847057575 2 5 2 +0.461429537822728 1 5 2 +0.810700838760469 2 5 2 +0.249051828431355 1 5 2 +0.317554462771952 2 5 2 +0.262256455504448 1 5 2 +0.2953873757043 2 5 2 +0.696729236528574 2 5 2 +0.528266303919385 2 5 2 +0.55142571005823 2 5 2 +0.586822599864067 2 5 2 +0.418740386790856 2 5 2 +0.335528681201811 1 5 2 +0.842500045429954 2 5 2 +0.285946789650486 2 5 2 +0.408435800240321 2 5 2 +0.413125087979462 2 5 2 +0.393886958711384 1 5 2 +0.253356738206904 2 5 2 +0.312045370960966 1 5 2 +0.798452098494563 2 5 2 +0.492707665345048 2 5 2 +0.716083098282908 2 5 2 +0.234541570552336 2 5 2 +1.35239146034105 2 5 2 +0.718129009054262 1 5 2 +0.718707390761021 2 5 2 +0.491926442341928 2 5 2 +0.654723295742436 2 5 2 +2.2535330863484 2 5 2 +0.427000474398908 2 5 2 +0.709004302987488 1 5 2 +0.516478985375353 2 5 2 +0.399159476675353 2 5 2 +0.442334074498277 2 5 2 +0.305764408172937 1 5 2 +0.374730267131031 2 5 2 +0.258402933869162 2 5 2 +0.356729146842492 2 5 2 +0.249659208975827 2 5 2 +0.513939361328391 2 5 2 +0.273198932158475 2 5 2 +0.299847483659362 1 5 2 +0.541237531522651 2 5 2 +0.392932766582102 1 5 2 +0.525638020825498 2 5 2 +0.333414128837149 1 5 2 +0.576494101054249 2 5 2 +0.511222521868291 1 5 2 +0.412380296323655 2 5 2 +0.399606860754613 2 5 2 +0.997015772263903 2 5 2 +0.443550015156711 2 5 2 +0.837716892291427 2 5 2 +0.390478993062678 1 5 2 +0.636766756207244 2 5 2 +0.737585807116948 2 5 2 +0.295250014308323 2 5 2 +0.608066528187843 2 5 2 +1.24729266077018 2 5 2 +0.41288102715652 1 5 2 +1.24953422410599 1 5 2 +0.351993007234192 2 5 2 +0.335694580769538 1 5 2 +0.423647638118759 1 5 2 +0.734734535205897 1 5 2 +0.260015569529333 1 5 2 +0.750757643884208 2 5 2 +0.595132916679284 2 5 2 +0.522805168311647 2 5 2 +0.843659849398215 2 5 2 +1.10699652185756 2 5 2 +0.391076744361603 2 5 2 +0.312026720740498 2 5 2 +0.607969730004942 2 5 2 +1.00812364162894 2 5 2 +0.43084197949303 2 5 2 +0.453046882496868 1 5 2 +0.370369606134876 1 5 2 +0.775483364298362 1 5 2 +0.396231522637068 2 5 2 +0.413528320853371 2 5 2 +1.46772948611848 2 5 2 +0.341708188807674 2 5 2 +0.419394806122751 1 5 2 +0.257895217630086 2 5 2 +0.415728977418159 2 5 2 +0.316442984226336 2 5 2 +0.968259065135459 2 5 2 +0.387870086772944 1 5 2 +0.716049265883702 2 5 2 +0.633645416807576 1 5 2 +0.953173953972706 2 5 2 +0.690728024005709 2 5 2 +1.10833203046202 2 5 2 +2.21338412745891 2 5 2 +0.324098654160468 2 5 2 +0.673932874285758 2 5 2 +0.800813516749607 2 5 2 +0.696084502169422 1 5 2 +0.355840028465312 1 5 2 +0.683207616367023 1 5 2 +0.320920277031855 2 5 2 +1.03878518793101 1 5 2 +0.609374353605396 2 5 2 +0.578037696967778 2 5 2 +0.322774773347465 2 5 2 +0.444321937393125 2 5 2 +0.706403834907649 2 5 2 diff --git a/R/inst/extdata/choiceRT_single_exampleData.txt b/R/inst/extdata/choiceRT_single_exampleData.txt new file mode 100644 index 00000000..c925a82a --- /dev/null +++ b/R/inst/extdata/choiceRT_single_exampleData.txt @@ -0,0 +1,1001 @@ +RT choice subjID condition +0.238126253704183 1 1 1 +0.788334139249308 2 1 1 +0.524351202388138 2 1 1 +1.30852451859186 1 1 1 +0.244177006142252 1 1 1 +0.512534281943979 1 1 1 +0.570872020376975 2 1 1 +0.552056452179357 1 1 1 +0.298121361381527 2 1 1 +0.323864684737407 1 1 1 +0.542476237007045 1 1 1 +0.457829931981559 2 1 1 +0.214443816443766 1 1 1 +0.282641758197282 2 1 1 +0.577296397953241 1 1 1 +0.80363268095685 2 1 1 +0.630866151842371 2 1 1 +0.561537877283935 2 1 1 +0.447864619700588 1 1 1 +0.271079966516117 2 1 1 +0.286558308483825 2 1 1 +0.402853789793329 2 1 1 +0.261247265870358 2 1 1 +0.954323974954787 1 1 1 +0.233982750292549 2 1 1 +0.534509968347321 2 1 1 +1.38489463892966 1 1 1 +0.51382752398596 2 1 1 +0.877226598584423 2 1 1 +0.59661096895894 2 1 1 +0.653486235884601 2 1 1 +0.499754559401486 2 1 1 +0.234607668817517 2 1 1 +0.531596228343812 2 1 1 +0.517067421390557 2 1 1 +0.286714432990514 2 1 1 +0.280389415416944 1 1 1 +0.770848791728697 2 1 1 +0.242534242474749 2 1 1 +1.21402951161598 1 1 1 +0.254230773115822 1 1 1 +0.235607609409862 1 1 1 +0.893859490775577 2 1 1 +0.4248828895841 2 1 1 +0.806633683066691 1 1 1 +0.52846751057204 2 1 1 +0.283404274358359 2 1 1 +0.38261147359119 1 1 1 +0.366467333270928 2 1 1 +0.89906087165271 2 1 1 +0.473523175525898 2 1 1 +0.61052334774835 2 1 1 +0.348877038822898 2 1 1 +0.509848343105319 2 1 1 +0.714362767211544 2 1 1 +0.366653361634071 1 1 1 +0.504639516528354 2 1 1 +0.789291266027802 1 1 1 +0.220496731951155 2 1 1 +0.225368494671686 2 1 1 +0.935425512110651 1 1 1 +0.596093103065834 2 1 1 +0.751187828634478 2 1 1 +0.398369973292919 2 1 1 +0.803192132747886 1 1 1 +0.653642313281921 2 1 1 +0.759465190620081 2 1 1 +0.301158475484036 2 1 1 +0.468546635484975 2 1 1 +0.45136376067397 2 1 1 +0.225730206953994 1 1 1 +0.871541732294617 2 1 1 +1.02231746439083 2 1 1 +0.453015412970327 1 1 1 +0.198424664401742 2 1 1 +0.300531454438104 2 1 1 +0.473723469079576 1 1 1 +0.397993417619097 2 1 1 +0.990744721453659 2 1 1 +0.576175729949669 2 1 1 +0.200104343196362 2 1 1 +0.397950225292451 2 1 1 +0.595871677587168 1 1 1 +0.631283245367399 2 1 1 +0.225640535433198 2 1 1 +0.671278939344137 2 1 1 +0.562888330598081 1 1 1 +0.713201556333214 1 1 1 +0.429372024083033 2 1 1 +0.559437949496943 1 1 1 +0.747758954300599 2 1 1 +0.668556572370471 2 1 1 +0.179933868089705 1 1 1 +0.557946405103375 2 1 1 +0.781925159045207 2 1 1 +0.629998909619026 2 1 1 +0.73419031432803 1 1 1 +0.917048954570217 2 1 1 +1.27326330493077 1 1 1 +0.715099066135782 1 1 1 +0.561629162179203 2 1 1 +0.439342876745989 1 1 1 +0.212480989248291 2 1 1 +0.431997523692581 2 1 1 +0.504823085985375 1 1 1 +0.209443682735351 2 1 1 +0.535478168252645 1 1 1 +0.523309589143815 2 1 1 +0.292526841667345 2 1 1 +0.292598915819633 1 1 1 +0.383304045988112 2 1 1 +0.220801631101784 2 1 1 +0.240025256471961 2 1 1 +0.374617088048471 2 1 1 +0.225139772246513 2 1 1 +0.504765209525881 1 1 1 +0.536719069622199 1 1 1 +0.265730079523484 2 1 1 +0.788176797412021 2 1 1 +0.317054055572024 1 1 1 +0.984288372815029 1 1 1 +0.242704368769227 1 1 1 +1.16310843477133 1 1 1 +0.671512143534472 2 1 1 +0.235624281398265 2 1 1 +0.585278561981407 1 1 1 +0.313710683818167 1 1 1 +0.512453071354528 2 1 1 +0.318816084203735 2 1 1 +0.492290766723273 2 1 1 +0.869267244819061 2 1 1 +0.416347372277426 2 1 1 +1.25171209855063 1 1 1 +0.421124063985099 2 1 1 +0.330265759909128 2 1 1 +0.32442759213596 2 1 1 +0.499405834143408 2 1 1 +0.605809814064198 2 1 1 +0.441813584555195 1 1 1 +0.355018580197292 2 1 1 +0.284917824978601 2 1 1 +0.712509291577718 2 1 1 +0.360087543146394 2 1 1 +0.593758109292972 2 1 1 +0.21793928877364 2 1 1 +0.511916501085172 1 1 1 +1.65665966055448 2 1 1 +0.462252095429733 1 1 1 +0.375694324193756 2 1 1 +0.573330510111478 1 1 1 +0.624806212238662 1 1 1 +0.3221554867038 1 1 1 +0.394184550616579 1 1 1 +0.233898257977356 2 1 1 +0.616451835954318 2 1 1 +0.435745652986984 2 1 1 +0.309831870195393 1 1 1 +0.303567774481905 1 1 1 +0.268141575894932 2 1 1 +0.685546680374616 2 1 1 +0.315857448984633 2 1 1 +0.358148021225324 1 1 1 +0.561597978729496 2 1 1 +0.575763837785002 2 1 1 +0.408409797786314 2 1 1 +0.456470478096314 2 1 1 +0.211008154807298 1 1 1 +0.537560426488747 1 1 1 +0.474119050536192 1 1 1 +0.349680702914349 2 1 1 +0.43874642118394 2 1 1 +0.741099937281951 2 1 1 +0.397490501092685 1 1 1 +0.455993632903328 2 1 1 +0.531917883353318 2 1 1 +0.544592749033783 2 1 1 +0.74575081631549 1 1 1 +0.482830763020483 2 1 1 +0.280104823458282 2 1 1 +0.674827163589054 2 1 1 +0.27232449929437 1 1 1 +0.33609945965603 2 1 1 +0.642687813456977 2 1 1 +0.45152584390343 2 1 1 +0.393612819207325 1 1 1 +0.403513480920972 2 1 1 +0.55270209232572 2 1 1 +0.282474350101989 1 1 1 +0.225686494015142 1 1 1 +0.3032960404285 1 1 1 +0.741695387202929 1 1 1 +0.23627922113503 1 1 1 +0.317661404771517 2 1 1 +0.365881950379812 1 1 1 +0.671407911504626 2 1 1 +0.6327672361385 2 1 1 +0.408730216599132 2 1 1 +1.05295329016947 2 1 1 +0.647929253014634 2 1 1 +0.272505386795946 1 1 1 +0.477000937785718 2 1 1 +0.593679670773664 2 1 1 +0.485804513765726 1 1 1 +0.685108031619407 2 1 1 +0.463863491717212 2 1 1 +0.280869562583906 2 1 1 +0.484442256816249 1 1 1 +0.374203282894535 1 1 1 +0.205270568757322 2 1 1 +0.285730023779721 1 1 1 +0.420031671350127 2 1 1 +0.304140334800815 2 1 1 +0.455400240565684 1 1 1 +0.319295225911816 2 1 1 +0.853456173431349 2 1 1 +1.28296521539738 2 1 1 +0.402276812108308 2 1 1 +1.60365089898574 2 1 1 +0.513436951554669 2 1 1 +0.635287982445216 2 1 1 +0.756725913746622 1 1 1 +0.538411817875012 2 1 1 +0.252807751300543 1 1 1 +0.306493263230248 2 1 1 +0.328940637779731 1 1 1 +0.295149174376265 2 1 1 +0.428772773247104 2 1 1 +0.710257617392816 2 1 1 +2.1398843380733 2 1 1 +0.390964230021283 1 1 1 +0.30264732818644 1 1 1 +0.24604561365542 2 1 1 +0.326118394989355 2 1 1 +0.444302762917929 2 1 1 +0.994994120515054 2 1 1 +0.329747734506691 2 1 1 +0.244820417609073 2 1 1 +0.434344901812039 2 1 1 +0.245526039713125 2 1 1 +0.371387027622059 2 1 1 +0.396016682526436 2 1 1 +0.868293655068221 2 1 1 +0.339580118779972 2 1 1 +0.377321305638716 1 1 1 +0.352058350011174 2 1 1 +0.523222420484193 1 1 1 +1.63006360968846 2 1 1 +0.403780279358626 1 1 1 +0.33450821318739 2 1 1 +0.246049648436144 1 1 1 +0.73900563703035 2 1 1 +0.70659002598455 1 1 1 +0.251224036209508 1 1 1 +0.279682884105716 2 1 1 +0.446835970242547 2 1 1 +0.344773155307199 2 1 1 +1.18156313011751 2 1 1 +0.40159469187599 1 1 1 +0.662618250249293 2 1 1 +0.484088636200293 2 1 1 +0.249177412018199 1 1 1 +0.635565342005854 2 1 1 +0.237344366033974 2 1 1 +0.262837667936303 2 1 1 +0.223824529758815 2 1 1 +0.544851049052962 1 1 1 +1.22941904309934 1 1 1 +1.18790150456476 2 1 1 +0.397012831119724 2 1 1 +0.542500816372649 1 1 1 +0.215934076714995 2 1 1 +1.30455859046761 2 1 1 +0.536099297245265 2 1 1 +0.414264536316934 2 1 1 +0.777679859044325 2 1 1 +0.697575719549679 1 1 1 +0.704761484394448 2 1 1 +0.286893353427223 1 1 1 +0.681973124438239 2 1 1 +0.397462829482937 2 1 1 +0.257670640245336 2 1 1 +0.236649584180499 2 1 1 +0.436790435094707 1 1 1 +0.574656753851278 1 1 1 +0.253082319735779 1 1 1 +0.61806692862892 1 1 1 +0.46661603680114 2 1 1 +0.195332992231242 1 1 1 +0.229629897436442 1 1 1 +0.992646398039104 2 1 1 +0.94136783174252 2 1 1 +0.837333099387364 1 1 1 +0.430204780391451 2 1 1 +1.37515921760222 2 1 1 +0.89875556054097 2 1 1 +0.696864042518777 2 1 1 +0.332933586834615 2 1 1 +0.334070550417085 2 1 1 +0.38676132253602 2 1 1 +0.306404665389991 2 1 1 +0.478254432945422 2 1 1 +0.601997570889218 2 1 1 +0.373642558748753 2 1 1 +0.29388256861859 1 1 1 +0.403146732540824 2 1 1 +0.754379822737839 2 1 1 +0.20827688411218 2 1 1 +0.211975975201092 2 1 1 +0.591340246795799 1 1 1 +0.263322621163444 2 1 1 +0.525170614901281 1 1 1 +0.206823345071543 1 1 1 +0.935520204615524 1 1 1 +0.550910831841 2 1 1 +0.232504114652867 2 1 1 +0.391975720570035 1 1 1 +0.52537232580037 2 1 1 +0.604827669281913 2 1 1 +0.440173374557048 2 1 1 +0.294878838994327 2 1 1 +0.323868811622971 1 1 1 +0.240824506056104 2 1 1 +0.423271049333481 2 1 1 +0.849356591210965 2 1 1 +0.335818515496422 2 1 1 +0.538745656799135 2 1 1 +0.4208751745964 2 1 1 +0.55146359110108 2 1 1 +0.467882029849217 2 1 1 +0.567777388073783 2 1 1 +0.311394332684366 1 1 1 +1.33531192845093 1 1 1 +0.368867535882799 2 1 1 +0.340092989922591 2 1 1 +0.299811445088077 2 1 1 +0.198131285653104 2 1 1 +1.57688580580023 2 1 1 +0.671467937043381 2 1 1 +0.422481878776226 2 1 1 +0.48313672040092 1 1 1 +0.473697344635179 1 1 1 +0.68951966048344 2 1 1 +0.633967567703449 2 1 1 +0.382761102393661 1 1 1 +0.68736056335558 1 1 1 +0.677810537621417 2 1 1 +0.249614606331914 2 1 1 +0.755321813543998 2 1 1 +0.370198385669219 1 1 1 +0.697638915493631 1 1 1 +0.684828719775979 2 1 1 +0.502349799392655 1 1 1 +0.583605969114717 1 1 1 +0.517041977935336 1 1 1 +0.411670106317747 1 1 1 +0.207610898625408 1 1 1 +0.414006631133478 2 1 1 +0.921999310392829 1 1 1 +0.409507167245215 2 1 1 +0.584716070617761 2 1 1 +0.666513112126972 2 1 1 +0.233005827550518 2 1 1 +0.60770657746225 1 1 1 +0.221784346267773 2 1 1 +0.771391695716424 2 1 1 +1.21988159355549 1 1 1 +0.408933678664394 2 1 1 +1.03374983542661 2 1 1 +0.939198644733114 2 1 1 +0.962067734082042 2 1 1 +0.473406448845882 2 1 1 +0.223644602219167 2 1 1 +1.12139515597077 1 1 1 +0.299025722625131 2 1 1 +0.55009896091157 1 1 1 +0.578799507502895 1 1 1 +0.962097234341087 1 1 1 +0.348861796367042 2 1 1 +0.35773121339554 2 1 1 +0.246740510307971 1 1 1 +0.407111627051893 1 1 1 +0.550930871978825 1 1 1 +0.422754497543909 2 1 1 +0.758063342099552 2 1 1 +0.380847347114823 2 1 1 +0.377055603284598 2 1 1 +0.65608839650545 1 1 1 +1.42357385911498 2 1 1 +0.232798506755752 2 1 1 +0.539867634108279 2 1 1 +0.392489725525737 2 1 1 +0.367110223983889 2 1 1 +0.839639947757427 2 1 1 +0.592327430792799 2 1 1 +0.239126262427817 2 1 1 +0.328625329636268 2 1 1 +0.284257957756146 1 1 1 +0.331590178883346 1 1 1 +0.620620411662111 2 1 1 +0.755967038010479 2 1 1 +0.331900743408574 2 1 1 +0.421146686045199 1 1 1 +1.07476503410067 2 1 1 +0.507112598176372 1 1 1 +0.311566718621004 2 1 1 +0.301319320070233 2 1 1 +0.393257034342845 2 1 1 +0.673521481008061 2 1 1 +0.291567562966672 2 1 1 +1.05249328382332 2 1 1 +0.694698152076518 2 1 1 +0.733055920143737 2 1 1 +0.334350894107303 2 1 1 +0.542807697456418 1 1 1 +0.579281169009386 1 1 1 +0.320837583848137 1 1 1 +0.488074071042795 1 1 1 +0.213060081069537 2 1 1 +0.237230647833275 2 1 1 +0.237572229668373 1 1 1 +0.241805498724672 1 1 1 +0.21505246069559 2 1 1 +0.625069689033177 2 1 1 +0.391789762960315 1 1 1 +0.360924641936915 2 1 1 +0.434831888026175 1 1 1 +1.53947356804897 2 1 1 +0.390459073072731 2 1 1 +0.327186719063663 1 1 1 +0.451681415339723 1 1 1 +0.551841771615269 2 1 1 +0.41039773179749 1 1 1 +0.926634118987433 2 1 1 +0.813362027443744 2 1 1 +0.632371052186083 2 1 1 +1.07271976627787 1 1 1 +0.347281073927582 1 1 1 +0.44423560152159 1 1 1 +0.576366534316911 2 1 1 +0.279713029952993 2 1 1 +0.881466843024701 2 1 1 +0.374654223890455 1 1 1 +0.246340230252564 1 1 1 +0.46051090791758 2 1 1 +0.610478508455545 1 1 1 +0.290070606427311 2 1 1 +0.544420557842503 1 1 1 +0.776693279362721 1 1 1 +0.235406028367375 2 1 1 +0.239531675743827 1 1 1 +0.44775078332261 2 1 1 +0.272084709816774 1 1 1 +0.490027056594032 2 1 1 +1.11466956380519 2 1 1 +0.270448404879725 2 1 1 +0.442949902437612 1 1 1 +0.570651632322539 1 1 1 +0.32265845661882 2 1 1 +0.407435441210764 2 1 1 +0.200085052390358 2 1 1 +0.358511835895485 2 1 1 +1.2431214333383 2 1 1 +0.696171754957839 1 1 1 +0.2777627469669 1 1 1 +0.429359856138122 2 1 1 +0.340524177360971 2 1 1 +0.199944337376957 2 1 1 +0.398334292684942 2 1 1 +0.388541579168816 2 1 1 +0.398547679838622 1 1 1 +0.839309822360769 2 1 1 +0.280253849702043 2 1 1 +0.547345720269382 2 1 1 +0.376647832731017 2 1 1 +0.455530332435412 2 1 1 +0.334196466045242 2 1 1 +0.759777271734527 2 1 1 +1.10869967729068 2 1 1 +0.222920909328599 2 1 1 +0.243727194101031 2 1 1 +0.331283374352904 2 1 1 +0.489803545251022 1 1 1 +0.2736011848833 2 1 1 +0.432409628386385 1 1 1 +0.447747022319498 2 1 1 +0.736283852147818 2 1 1 +0.461500847594122 1 1 1 +0.359367876631285 1 1 1 +0.418098062593873 2 1 1 +0.502693165924066 1 1 1 +0.260188072876792 1 1 1 +0.348437996297828 1 1 1 +1.57562306974174 2 1 1 +0.316108820930013 2 1 1 +0.421685918698271 2 1 1 +0.578695918727619 2 1 1 +1.12879309366769 2 1 1 +1.03916993441652 2 1 1 +0.492207222672778 1 1 1 +0.33283217994747 2 1 1 +0.39422420306568 2 1 2 +0.362300838201913 1 1 2 +0.469662901313467 2 1 2 +0.820030023322582 1 1 2 +0.234551440695508 2 1 2 +0.331679248955791 1 1 2 +0.527229640837085 2 1 2 +0.91734807805308 2 1 2 +0.319175515877037 2 1 2 +0.651053459158852 1 1 2 +0.661459624685597 2 1 2 +0.281279784597852 2 1 2 +0.342078529279457 1 1 2 +0.3636800828231 2 1 2 +0.484151346003298 1 1 2 +0.658827635325395 1 1 2 +0.622208937699232 1 1 2 +0.580811030835409 2 1 2 +0.441808620117506 1 1 2 +0.36060243933493 2 1 2 +0.831194064165385 2 1 2 +0.361776006347027 2 1 2 +0.777351339265196 1 1 2 +0.278293909155803 2 1 2 +0.278507100800553 2 1 2 +0.884402648451047 1 1 2 +0.342560342613834 2 1 2 +0.809676649841315 2 1 2 +0.516858099569803 1 1 2 +0.634645370682583 2 1 2 +0.249686099229778 1 1 2 +1.33141985698474 1 1 2 +0.453726915386914 2 1 2 +0.290504549136735 2 1 2 +0.487095756746479 2 1 2 +0.346501172556082 2 1 2 +0.393430828426059 1 1 2 +0.504449494787339 2 1 2 +0.367999687491587 2 1 2 +0.352469038071531 1 1 2 +0.234560015153837 2 1 2 +0.940841504372444 1 1 2 +0.2046902513565 2 1 2 +0.461341997193658 1 1 2 +0.610339950737745 2 1 2 +0.446921029186028 1 1 2 +0.515591108864551 2 1 2 +1.58260395843454 2 1 2 +0.344764743329778 2 1 2 +0.427254054893139 2 1 2 +0.516158776880019 1 1 2 +1.2612303673015 2 1 2 +0.613528615965816 2 1 2 +0.267963577139406 2 1 2 +0.307594651280269 2 1 2 +0.24101706884499 1 1 2 +0.455753268732021 2 1 2 +0.405040912881131 2 1 2 +0.288094483330521 1 1 2 +0.545610622237084 2 1 2 +0.452142838999807 1 1 2 +0.594527943497764 1 1 2 +0.88116621589308 2 1 2 +0.277767297820233 2 1 2 +0.279551393619652 2 1 2 +0.365460511604365 2 1 2 +0.556212898406868 2 1 2 +0.328560209842821 1 1 2 +0.531013993625691 1 1 2 +0.231888430468412 1 1 2 +0.677110774143983 2 1 2 +0.453921989085917 2 1 2 +0.459571696136957 2 1 2 +0.393356837769246 1 1 2 +0.511202810478497 1 1 2 +0.693614307574487 1 1 2 +0.240863923388269 2 1 2 +0.321852817508144 1 1 2 +0.270908403919833 2 1 2 +0.820724000663825 1 1 2 +0.235189573689813 2 1 2 +0.326155088030317 2 1 2 +0.631590224724998 1 1 2 +0.441990726662034 1 1 2 +0.84336570752273 2 1 2 +0.359995826600722 2 1 2 +0.251400135935091 1 1 2 +0.412798716611553 1 1 2 +0.257997459005081 1 1 2 +0.324911808695266 2 1 2 +0.869954063020224 2 1 2 +0.316774804913553 1 1 2 +0.802438949561354 2 1 2 +0.753010120858102 2 1 2 +0.50447570028204 1 1 2 +0.472994968867572 2 1 2 +0.365558799398694 2 1 2 +0.355836646801112 1 1 2 +0.571157381310202 2 1 2 +0.634686215618027 2 1 2 +0.270208965991148 2 1 2 +0.328585338874615 1 1 2 +0.384434393299423 2 1 2 +0.316023575731398 1 1 2 +0.494817395995112 2 1 2 +0.300504460120145 2 1 2 +0.347783059904907 2 1 2 +1.02851702876777 2 1 2 +0.364863367923789 2 1 2 +0.460777943415657 2 1 2 +0.382793622325279 2 1 2 +0.273403607994913 2 1 2 +0.609426470046583 2 1 2 +0.297792901344866 1 1 2 +0.370479141756967 2 1 2 +0.882238434259769 1 1 2 +0.496857265474561 2 1 2 +0.277702369672893 2 1 2 +0.446926962878622 1 1 2 +0.36757607051588 1 1 2 +0.557136267106436 2 1 2 +1.00333007744122 2 1 2 +0.760219976689289 2 1 2 +0.332068843559009 2 1 2 +0.548961093445682 2 1 2 +0.313465233961872 2 1 2 +0.550216771807154 1 1 2 +0.29794278574353 1 1 2 +0.234198048951483 2 1 2 +0.273445183254746 2 1 2 +0.574886295740124 2 1 2 +0.258382409058055 1 1 2 +0.409845586460725 2 1 2 +0.326206723132256 2 1 2 +0.642595268751117 2 1 2 +0.232356531769144 2 1 2 +1.70736951927255 1 1 2 +0.274687338325608 2 1 2 +0.40877430223826 2 1 2 +0.365729356985064 2 1 2 +0.6050000403314 2 1 2 +0.592011487134505 2 1 2 +0.557179211825432 1 1 2 +0.873296855773591 1 1 2 +0.216826762785491 1 1 2 +0.517886780128018 2 1 2 +0.398323720600925 1 1 2 +1.12139464302831 2 1 2 +0.249538486660475 2 1 2 +0.360304338880141 1 1 2 +0.627773044075362 2 1 2 +0.996274959906684 2 1 2 +0.202797819180771 1 1 2 +0.383153769101205 1 1 2 +0.324797856324902 1 1 2 +0.239421301531662 2 1 2 +0.24289898785908 1 1 2 +0.547746136913622 1 1 2 +0.386255965400912 2 1 2 +0.60223673049116 2 1 2 +0.549261776998216 2 1 2 +0.395992071688511 2 1 2 +0.217402932038072 2 1 2 +0.295305459515413 2 1 2 +0.447909826549637 2 1 2 +0.71950962867128 2 1 2 +0.794816583397332 1 1 2 +0.241318968932987 2 1 2 +0.556293493098233 2 1 2 +0.238208378562322 2 1 2 +0.499247181746743 1 1 2 +0.317050968536836 2 1 2 +0.322686857249444 2 1 2 +0.71276761076242 2 1 2 +0.301030966624334 2 1 2 +0.336641004565653 2 1 2 +0.812046026214206 2 1 2 +0.270220261704131 2 1 2 +0.701954145112022 2 1 2 +0.43964095073941 2 1 2 +0.384704421988213 2 1 2 +0.501487364681699 2 1 2 +0.455023781459671 2 1 2 +0.332474164305816 2 1 2 +0.567142874907982 2 1 2 +0.253324335182053 2 1 2 +0.444329558298367 2 1 2 +0.750457236950695 2 1 2 +0.292500297080332 2 1 2 +0.319745451630673 2 1 2 +0.286210384865368 2 1 2 +0.283637752128579 1 1 2 +0.236044970372654 2 1 2 +0.606532173767213 1 1 2 +1.32620595835061 2 1 2 +0.49881945892801 2 1 2 +1.00559201100603 2 1 2 +0.498129494834216 2 1 2 +0.682007132416635 2 1 2 +0.521249610973914 2 1 2 +0.229929750671033 2 1 2 +1.12814610238938 1 1 2 +0.74135939367203 2 1 2 +1.14362542630031 2 1 2 +0.261969169934014 1 1 2 +0.240668217312327 1 1 2 +1.59220860546119 2 1 2 +0.459005868330534 2 1 2 +0.290018768199601 2 1 2 +0.204589440835719 1 1 2 +0.619039312673667 2 1 2 +0.667083334382893 1 1 2 +0.359845320132008 1 1 2 +0.912937103767445 2 1 2 +0.522430834145349 2 1 2 +0.297762304149053 1 1 2 +0.276240304783596 2 1 2 +0.399051717562123 2 1 2 +0.404254481667734 2 1 2 +1.23765251352633 1 1 2 +1.2213528437925 2 1 2 +0.554106620313858 2 1 2 +0.513543854359058 2 1 2 +0.718560875752879 2 1 2 +0.299045404005468 1 1 2 +0.197161504481574 2 1 2 +0.355424533393654 2 1 2 +0.601322385280793 2 1 2 +0.31408110064814 1 1 2 +0.681928297252204 2 1 2 +0.257899160580357 1 1 2 +0.331853308281021 2 1 2 +0.932271244383807 2 1 2 +0.762290747363875 1 1 2 +0.610315223598599 2 1 2 +0.508310743979851 2 1 2 +0.293542339726516 1 1 2 +0.249532498898509 2 1 2 +0.240661946068682 1 1 2 +0.480573774515142 2 1 2 +0.26503112695042 1 1 2 +0.745033574361612 2 1 2 +0.313418912457887 2 1 2 +0.428468490020874 2 1 2 +0.619836697801129 2 1 2 +0.404856983338945 2 1 2 +0.225135719018744 2 1 2 +0.247203725168153 2 1 2 +0.473126435201081 2 1 2 +0.758881984366834 2 1 2 +0.530103620429835 2 1 2 +0.609787747426196 2 1 2 +0.42023331047044 1 1 2 +0.294545387085857 1 1 2 +0.311952071319945 2 1 2 +0.793299410776987 2 1 2 +0.376179978035794 2 1 2 +0.230418084856786 2 1 2 +0.25879024565358 2 1 2 +0.264796453159985 2 1 2 +0.745485785923675 2 1 2 +0.224026456721164 2 1 2 +0.6030135494348 1 1 2 +0.489733962171922 2 1 2 +0.39466687509252 1 1 2 +0.552205654391275 2 1 2 +0.575332864606377 2 1 2 +0.673079198373531 1 1 2 +0.346849143283538 1 1 2 +0.384205850032696 1 1 2 +0.382157410278578 2 1 2 +0.294710963958947 2 1 2 +0.487164402385991 1 1 2 +0.571768796864126 2 1 2 +0.243155946253846 1 1 2 +0.366816988109117 1 1 2 +0.556560232965345 2 1 2 +0.842238270178048 2 1 2 +0.630587019970835 1 1 2 +0.849346128585895 2 1 2 +0.301910596058078 1 1 2 +0.494388435823995 1 1 2 +0.635279696032103 2 1 2 +0.244333041807845 1 1 2 +0.462722638825509 2 1 2 +0.355580520263025 1 1 2 +0.419159925222802 2 1 2 +0.229441499742296 2 1 2 +0.272011206196529 2 1 2 +0.457900548880182 1 1 2 +0.42581270796691 2 1 2 +0.192946477357373 2 1 2 +0.920226167527353 2 1 2 +0.870356567626495 2 1 2 +0.506429857300226 1 1 2 +1.35129991323996 2 1 2 +0.81782131154567 1 1 2 +0.312753351203148 2 1 2 +0.240147726849663 2 1 2 +0.331594506915926 2 1 2 +0.303605405427918 2 1 2 +1.3692312598303 2 1 2 +0.464969591870211 2 1 2 +0.365081121121442 2 1 2 +0.53678523283272 1 1 2 +0.362543809949933 2 1 2 +0.300077415892361 2 1 2 +0.565255726546058 2 1 2 +0.508969800017276 1 1 2 +0.197931347436034 2 1 2 +0.425448002968464 2 1 2 +0.917689004198981 2 1 2 +0.265209389680314 1 1 2 +0.399534037154238 1 1 2 +0.292118455947818 2 1 2 +0.352289208022807 2 1 2 +0.800608594982045 2 1 2 +0.251768687680971 1 1 2 +0.280448242529617 2 1 2 +0.243473452337431 1 1 2 +0.286916333216323 2 1 2 +0.838049565027792 1 1 2 +0.244529227888586 1 1 2 +0.71863102671265 2 1 2 +1.98916958946463 2 1 2 +0.238806320688673 2 1 2 +0.693785935221629 2 1 2 +0.495890282805749 1 1 2 +0.191174545766406 2 1 2 +0.836607694563896 2 1 2 +0.391165841939288 2 1 2 +0.566993167116615 2 1 2 +0.240715729525045 2 1 2 +0.354039896192607 1 1 2 +1.21434836670206 2 1 2 +0.48154154974369 2 1 2 +0.798332748413893 2 1 2 +0.650917466844914 2 1 2 +0.384224495536896 1 1 2 +0.945026137940947 2 1 2 +0.418508744931679 2 1 2 +0.659365265496408 1 1 2 +0.271823834279208 2 1 2 +0.43920360190222 2 1 2 +0.449532948575899 2 1 2 +1.02510373135742 2 1 2 +0.49889822568904 1 1 2 +1.23542122006285 1 1 2 +0.274287087904293 2 1 2 +0.673318487358746 2 1 2 +0.397619138281994 2 1 2 +1.0555886269523 2 1 2 +0.229089202292979 1 1 2 +0.697871360095817 2 1 2 +0.614287525537126 1 1 2 +0.322578991679628 1 1 2 +0.605688688250448 2 1 2 +0.534481750546624 2 1 2 +0.565101205666666 1 1 2 +0.378499737439249 1 1 2 +0.52995012536057 2 1 2 +2.45554336829165 1 1 2 +0.744067895318506 2 1 2 +0.673980171567151 2 1 2 +0.33496535179204 2 1 2 +0.703852317870538 2 1 2 +0.623851718541645 2 1 2 +0.275936871629696 2 1 2 +0.245843960416957 2 1 2 +0.220780887604494 2 1 2 +0.585098991357547 2 1 2 +0.343992796279959 1 1 2 +0.580010432096859 2 1 2 +0.377174286172397 2 1 2 +0.614794867960386 1 1 2 +0.235740390671863 1 1 2 +0.498093604359181 2 1 2 +0.422668225465882 2 1 2 +0.85458472173833 2 1 2 +0.318077105190021 2 1 2 +0.660599386236034 2 1 2 +0.44253879597235 2 1 2 +0.482452162905769 2 1 2 +0.569360166827625 2 1 2 +1.5195957937337 1 1 2 +0.335177741698269 2 1 2 +0.241392133198455 2 1 2 +0.503619286339201 2 1 2 +0.578489345701315 2 1 2 +0.327768204464024 2 1 2 +0.436095089114902 2 1 2 +0.424989568541 1 1 2 +0.214045608149353 2 1 2 +0.306116924393253 1 1 2 +0.476975246716608 2 1 2 +0.756750965776553 2 1 2 +0.312822136071239 2 1 2 +0.470827169455528 1 1 2 +0.37315029361616 2 1 2 +0.685155315108113 2 1 2 +0.959530818891534 2 1 2 +0.913595420136271 2 1 2 +0.595438752846243 2 1 2 +0.36802176344941 2 1 2 +0.418544504566566 2 1 2 +0.363048213885529 2 1 2 +0.254338756573215 1 1 2 +0.730957519992839 1 1 2 +0.263046554830887 1 1 2 +0.46094182659418 2 1 2 +0.531059000798822 2 1 2 +0.353280265477637 2 1 2 +0.464342980616116 2 1 2 +0.229724823533327 2 1 2 +0.504945673660676 2 1 2 +0.532493395334424 2 1 2 +0.423801763698387 1 1 2 +0.787113721614964 2 1 2 +0.223160559034952 2 1 2 +0.419595856308554 2 1 2 +0.396648316145306 2 1 2 +0.308908241587595 2 1 2 +0.627802576140553 2 1 2 +0.64888860721256 2 1 2 +0.738730808101364 2 1 2 +0.658745774579089 1 1 2 +0.679191956616965 1 1 2 +0.278164538209912 1 1 2 +0.205911141408479 2 1 2 +0.241638399787725 2 1 2 +0.24858355547484 1 1 2 +0.73740496979995 2 1 2 +0.247935082999496 2 1 2 +0.826311099617232 2 1 2 +0.590607775557781 1 1 2 +0.351249908681046 1 1 2 +0.370792468725378 2 1 2 +0.389722068994738 1 1 2 +0.251157837165118 2 1 2 +0.663087218040623 2 1 2 +0.454359737429872 2 1 2 +0.435474095638232 2 1 2 +0.284410206592962 2 1 2 +0.344506290138683 2 1 2 +1.01768620078799 1 1 2 +0.331330031800195 1 1 2 +0.277021859762052 1 1 2 +0.347332671037543 1 1 2 +0.286836805838407 1 1 2 +0.340934631295205 2 1 2 +1.22270556676254 1 1 2 +0.360534849486478 1 1 2 +0.359892263518994 2 1 2 +0.552595743599511 2 1 2 +0.301744081404754 2 1 2 +0.416037514267758 2 1 2 +0.541344562283886 2 1 2 +0.579986637345764 2 1 2 +0.221912718773351 2 1 2 +0.465245817277264 2 1 2 +0.474738754014913 2 1 2 +1.70409538281312 2 1 2 +1.02235518855245 2 1 2 +0.301214497598036 1 1 2 +0.991176433131545 2 1 2 +0.401432084705109 2 1 2 +1.11715380433533 2 1 2 +0.81719064511715 2 1 2 +0.549517654685354 1 1 2 +0.251345033237621 2 1 2 +0.357859075575934 2 1 2 +0.90132423193762 2 1 2 +0.272936669704676 1 1 2 +0.455508577827349 2 1 2 +0.861185664428614 2 1 2 +0.266987292082781 2 1 2 +0.578879341650739 2 1 2 +0.649256823455797 2 1 2 +0.418711362089519 2 1 2 +0.433426379919396 2 1 2 +0.642462173639701 1 1 2 +0.406446379518523 2 1 2 +0.290863063788828 1 1 2 +0.395803052313048 2 1 2 +0.311087619708231 2 1 2 +0.279185686505835 1 1 2 +0.412823984876793 1 1 2 +0.314508721309633 2 1 2 +0.417280760034167 2 1 2 +0.357813047077128 2 1 2 +0.256161295149574 2 1 2 +0.240326641914136 2 1 2 +0.469105961018824 2 1 2 +0.23311026462364 2 1 2 +0.219699590325278 2 1 2 +0.267828103451759 2 1 2 +0.324090708482963 1 1 2 +0.882370084866449 2 1 2 +0.296556033418114 2 1 2 +0.535028311840886 1 1 2 diff --git a/R/inst/extdata/cra_exampleData.txt b/R/inst/extdata/cra_exampleData.txt new file mode 100644 index 00000000..a658ea69 --- /dev/null +++ b/R/inst/extdata/cra_exampleData.txt @@ -0,0 +1,541 @@ +subjID trial_number RT prob reward_var reward_fix outcome types ambig choice +1 1 2579 0.5 342 50 0 ambiguous 0.75 0 +1 2 1736 0.375 91 50 0 low 0 1 +1 3 1006 0.5 342 50 342 ambiguous 0.5 1 +1 4 1374 0.375 183 50 0 low 0 1 +1 5 1119 0.25 648 50 648 low 0 1 +1 6 1147 0.375 648 50 0 low 0 1 +1 7 1034 0.375 99 50 99 low 0 1 +1 8 953 0.375 98 50 98 low 0 1 +1 9 1114 0.5 54 50 50 ambiguous 0.5 0 +1 10 3243 0.5 99 50 50 ambiguous 0.5 0 +1 11 2955 0.5 340 50 50 ambiguous 0.75 0 +1 12 1105 0.5 91 50 50 ambiguous 0.5 0 +1 13 920 0.375 342 50 0 low 0 1 +1 14 242 0.125 98 50 0 low 0 0 +1 15 1665 0.25 181 50 0 low 0 0 +1 16 801 0.5 183 50 183 ambiguous 0.25 1 +1 17 793 0.5 183 50 0 ambiguous 0.75 0 +1 18 816 0.5 46 50 0 ambiguous 0.5 0 +1 19 1009 0.375 340 50 0 low 0 1 +1 20 191 0.25 46 50 50 low 0 0 +1 21 64 0.25 342 50 0 low 0 0 +1 22 807 0.125 648 50 0 low 0 1 +1 23 1047 0.25 98 50 50 low 0 0 +1 24 401 0.125 91 50 0 low 0 0 +1 25 1009 0.125 342 50 50 low 0 0 +1 26 707 0.5 99 50 0 ambiguous 0.25 1 +1 27 516 0.5 181 50 181 ambiguous 0.25 1 +1 28 66 0.375 48 50 0 low 0 0 +1 29 2206 0.5 340 50 0 ambiguous 0.5 0 +1 30 826 0.5 343 50 343 ambiguous 0.5 1 +1 31 391 0.125 343 50 50 low 0 0 +1 32 293 0.25 54 50 0 low 0 0 +1 33 310 0.5 648 50 648 ambiguous 0.5 1 +1 34 923 0.375 648 50 0 low 0 1 +1 35 744 0.5 48 50 0 ambiguous 0.25 0 +1 36 278 0.5 48 50 0 ambiguous 0.75 0 +1 37 450 0.375 46 50 0 low 0 0 +1 38 267 0.5 654 50 0 ambiguous 0.5 1 +1 39 169 0.5 54 50 50 ambiguous 0.25 0 +1 40 179 0.5 46 50 0 ambiguous 0.75 0 +1 41 142 0.5 648 50 0 ambiguous 0.25 1 +1 42 863 0.5 648 50 0 ambiguous 0.75 0 +1 43 75 0.25 183 50 0 low 0 0 +1 44 183 0.25 91 50 0 low 0 0 +1 45 84 0.125 181 50 50 low 0 0 +1 46 2191 0.375 343 50 0 low 0 1 +1 47 269 0.125 648 50 0 low 0 0 +1 48 396 0.125 99 50 50 low 0 0 +1 49 137 0.5 654 50 0 ambiguous 0.25 1 +1 50 124 0.5 342 50 0 ambiguous 0.25 1 +1 51 1926 0.5 91 50 50 ambiguous 0.75 0 +1 52 96 0.125 183 50 50 low 0 0 +1 53 59 0.5 98 50 0 ambiguous 0.75 0 +1 54 342 0.125 340 50 0 low 0 0 +1 55 157 0.375 54 50 50 low 0 0 +1 56 122 0.5 183 50 0 ambiguous 0.75 0 +1 57 29 0.125 48 50 50 low 0 0 +1 58 985 0.375 183 50 0 low 0 1 +1 59 142 0.5 183 50 183 ambiguous 0.25 1 +1 60 246 0.125 54 50 50 low 0 0 +1 61 254 0.5 99 50 50 ambiguous 0.75 0 +1 62 107 0.5 648 50 648 ambiguous 0.25 1 +1 63 86 0.5 343 50 0 ambiguous 0.25 1 +1 64 50 0.25 48 50 50 low 0 0 +1 65 1507 0.125 183 50 50 low 0 0 +1 66 247 0.25 99 50 50 low 0 0 +1 67 21 0.5 98 50 98 ambiguous 0.25 1 +1 68 276 0.5 183 50 0 ambiguous 0.5 0 +1 69 1697 0.25 343 50 0 low 0 0 +1 70 208 0.5 648 50 648 ambiguous 0.5 1 +1 71 874 0.5 183 50 50 ambiguous 0.5 0 +1 72 4451 0.25 654 50 50 low 0 0 +1 73 255 0.5 181 50 0 ambiguous 0.75 0 +1 74 220 0.5 654 50 50 ambiguous 0.75 0 +1 75 2058 0.5 46 50 0 ambiguous 0.25 0 +1 76 198 0.125 46 50 50 low 0 0 +1 77 293 0.5 91 50 91 ambiguous 0.25 1 +1 78 133 0.5 54 50 50 ambiguous 0.75 0 +1 79 281 0.375 181 50 181 low 0 1 +1 80 63 0.5 48 50 0 ambiguous 0.5 0 +1 81 1945 0.5 181 50 181 ambiguous 0.5 1 +1 82 238 0.25 183 50 50 low 0 0 +1 83 210 0.25 340 50 0 low 0 0 +1 84 3110 0.5 648 50 648 ambiguous 0.75 1 +1 85 660 0.5 343 50 0 ambiguous 0.75 0 +1 86 13 0.5 98 50 0 ambiguous 0.5 0 +1 87 744 0.375 654 50 0 low 0 1 +1 88 3835 0.125 654 50 0 low 0 0 +1 89 72 0.25 648 50 0 low 0 1 +1 90 90 0.5 340 50 340 ambiguous 0.25 1 +2 1 857 0.375 647 50 0 low 0 1 +2 2 437 0.5 99 50 0 ambiguous 0.75 0 +2 3 289 0.5 96 50 0 ambiguous 0.25 1 +2 4 514 0.5 184 50 184 ambiguous 0.75 1 +2 5 233 0.5 336 50 336 ambiguous 0.25 1 +2 6 321 0.375 180 50 0 low 0 1 +2 7 266 0.5 47 50 50 ambiguous 0.75 0 +2 8 288 0.375 181 50 0 low 0 0 +2 9 480 0.25 647 50 50 low 0 0 +2 10 330 0.5 180 50 180 ambiguous 0.25 1 +2 11 421 0.25 180 50 0 low 0 1 +2 12 290 0.5 47 50 0 ambiguous 0.25 0 +2 13 540 0.125 91 50 0 low 0 0 +2 14 71 0.5 91 50 0 ambiguous 0.5 0 +2 15 184 0.5 647 50 0 ambiguous 0.25 0 +2 16 236 0.5 649 50 649 ambiguous 0.5 1 +2 17 364 0.375 336 50 0 low 0 1 +2 18 241 0.375 91 50 0 low 0 1 +2 19 62 0.375 96 50 50 low 0 0 +2 20 456 0.25 649 50 50 low 0 0 +2 21 653 0.5 91 50 0 ambiguous 0.75 1 +2 22 282 0.25 184 50 50 low 0 0 +2 23 42 0.125 49 50 0 low 0 0 +2 24 52 0.125 181 50 0 low 0 0 +2 25 443 0.125 180 50 0 low 0 0 +2 26 353 0.25 181 50 181 low 0 1 +2 27 265 0.375 48 50 0 low 0 0 +2 28 245 0.5 647 50 0 ambiguous 0.5 1 +2 29 286 0.125 647 50 0 low 0 0 +2 30 198 0.25 336 50 0 low 0 0 +2 31 76 0.5 49 50 0 ambiguous 0.5 0 +2 32 261 0.5 340 50 0 ambiguous 0.25 1 +2 33 166 0.5 99 50 50 ambiguous 0.5 0 +2 34 333 0.125 336 50 0 low 0 0 +2 35 99 0.125 340 50 0 low 0 0 +2 36 255 0.5 647 50 0 ambiguous 0.25 0 +2 37 257 0.5 647 50 0 ambiguous 0.75 1 +2 38 199 0.375 184 50 0 low 0 1 +2 39 118 0.375 49 50 0 low 0 0 +2 40 233 0.5 180 50 50 ambiguous 0.5 0 +2 41 49 0.5 49 50 49 ambiguous 0.25 1 +2 42 102 0.25 48 50 0 low 0 0 +2 43 512 0.5 181 50 181 ambiguous 0.75 1 +2 44 20 0.125 336 50 50 low 0 0 +2 45 198 0.5 48 50 50 ambiguous 0.5 0 +2 46 201 0.25 340 50 50 low 0 0 +2 47 17 0.5 96 50 96 ambiguous 0.5 1 +2 48 74 0.25 47 50 50 low 0 0 +2 49 211 0.25 99 50 50 low 0 0 +2 50 109 0.5 49 50 0 ambiguous 0.75 0 +2 51 410 0.125 649 50 50 low 0 0 +2 52 304 0.5 649 50 50 ambiguous 0.25 0 +2 53 220 0.25 91 50 0 low 0 0 +2 54 21 0.5 336 50 50 ambiguous 0.75 0 +2 55 271 0.5 48 50 0 ambiguous 0.75 0 +2 56 1458 0.125 99 50 50 low 0 0 +2 57 254 0.25 49 50 50 low 0 0 +2 58 216 0.5 340 50 0 ambiguous 0.75 1 +2 59 241 0.375 647 50 0 low 0 1 +2 60 21 0.5 647 50 0 ambiguous 0.75 1 +2 61 8 0.5 340 50 340 ambiguous 0.5 1 +2 62 168 0.5 336 50 0 ambiguous 0.5 1 +2 63 387 0.5 184 50 50 ambiguous 0.5 0 +2 64 266 0.375 99 50 0 low 0 0 +2 65 277 0.5 91 50 0 ambiguous 0.25 0 +2 66 350 0.5 647 50 0 ambiguous 0.5 0 +2 67 358 0.5 47 50 0 ambiguous 0.5 0 +2 68 407 0.5 184 50 0 ambiguous 0.25 0 +2 69 5 0.125 647 50 50 low 0 0 +2 70 369 0.125 48 50 50 low 0 0 +2 71 175 0.375 649 50 649 low 0 1 +2 72 650 0.25 647 50 50 low 0 0 +2 73 459 0.5 336 50 0 ambiguous 0.5 1 +2 74 129 0.5 96 50 0 ambiguous 0.75 0 +2 75 443 0.125 96 50 50 low 0 0 +2 76 398 0.375 340 50 50 low 0 0 +2 77 105 0.5 99 50 0 ambiguous 0.25 1 +2 78 239 0.125 47 50 0 low 0 0 +2 79 76 0.5 48 50 50 ambiguous 0.25 0 +2 80 198 0.25 336 50 336 low 0 1 +2 81 186 0.5 649 50 649 ambiguous 0.75 1 +2 82 130 0.5 181 50 0 ambiguous 0.25 1 +2 83 211 0.5 336 50 336 ambiguous 0.75 1 +2 84 231 0.5 180 50 50 ambiguous 0.75 0 +2 85 75 0.5 181 50 0 ambiguous 0.5 1 +2 86 41 0.375 47 50 50 low 0 0 +2 87 406 0.125 184 50 0 low 0 1 +2 88 367 0.25 96 50 0 low 0 1 +2 89 100 0.5 336 50 336 ambiguous 0.25 1 +2 90 967 0.375 336 50 0 low 0 0 +3 1 2755 0.5 341 50 0 ambiguous 0.25 1 +3 2 1695 0.125 183 50 0 low 0 1 +3 3 1291 0.5 92 50 0 ambiguous 0.75 0 +3 4 940 0.25 341 50 0 low 0 1 +3 5 1716 0.25 342 50 0 low 0 1 +3 6 1165 0.375 653 50 0 low 0 1 +3 7 1306 0.5 343 50 343 ambiguous 0.75 1 +3 8 1815 0.5 182 50 0 ambiguous 0.25 1 +3 9 1467 0.125 653 50 0 low 0 1 +3 10 1420 0.5 343 50 0 ambiguous 0.25 1 +3 11 1625 0.25 653 50 0 low 0 1 +3 12 1157 0.5 646 50 646 ambiguous 0.5 1 +3 13 1225 0.5 183 50 0 ambiguous 0.25 1 +3 14 1438 0.25 183 50 183 low 0 1 +3 15 1683 0.5 653 50 0 ambiguous 0.75 1 +3 16 1838 0.5 50 50 0 ambiguous 0.5 0 +3 17 1618 0.25 50 50 0 low 0 0 +3 18 1708 0.5 183 50 183 ambiguous 0.75 1 +3 19 970 0.5 94 50 0 ambiguous 0.75 1 +3 20 1151 0.5 653 50 0 ambiguous 0.75 1 +3 21 1928 0.5 646 50 0 ambiguous 0.25 1 +3 22 1758 0.5 653 50 0 ambiguous 0.5 1 +3 23 2629 0.125 653 50 0 low 0 1 +3 24 1439 0.5 183 50 0 ambiguous 0.25 1 +3 25 1328 0.5 50 50 0 ambiguous 0.75 0 +3 26 1193 0.5 342 50 0 ambiguous 0.75 1 +3 27 1290 0.5 94 50 94 ambiguous 0.5 1 +3 28 1487 0.5 183 50 0 ambiguous 0.5 1 +3 29 1154 0.5 94 50 50 ambiguous 0.25 0 +3 30 1205 0.375 94 50 50 low 0 0 +3 31 1449 0.25 182 50 0 low 0 1 +3 32 1497 0.5 342 50 0 ambiguous 0.25 1 +3 33 1430 0.25 183 50 0 low 0 1 +3 34 1514 0.375 92 50 0 low 0 0 +3 35 992 0.5 653 50 0 ambiguous 0.25 1 +3 36 1920 0.5 343 50 0 ambiguous 0.5 1 +3 37 1612 0.5 653 50 653 ambiguous 0.5 1 +3 38 1224 0.5 341 50 0 ambiguous 0.5 1 +3 39 549 0.375 342 50 342 low 0 1 +3 40 617 0.5 94 50 0 ambiguous 0.5 0 +3 41 1139 0.125 341 50 341 low 0 1 +3 42 1991 0.375 50 50 50 low 0 1 +3 43 1678 0.125 94 50 0 low 0 0 +3 44 1776 0.375 94 50 50 low 0 0 +3 45 1733 0.125 183 50 0 low 0 1 +3 46 1216 0.25 343 50 0 low 0 1 +3 47 1125 0.375 182 50 0 low 0 1 +3 48 1618 0.125 342 50 0 low 0 1 +3 49 1828 0.375 51 50 50 low 0 0 +3 50 1781 0.25 646 50 0 low 0 1 +3 51 553 0.375 183 50 0 low 0 1 +3 52 899 0.5 183 50 183 ambiguous 0.75 1 +3 53 388 0.125 47 50 50 low 0 0 +3 54 615 0.5 47 50 0 ambiguous 0.75 0 +3 55 594 0.375 343 50 0 low 0 1 +3 56 346 0.25 47 50 0 low 0 0 +3 57 1069 0.125 343 50 0 low 0 1 +3 58 894 0.5 51 50 50 ambiguous 0.25 0 +3 59 576 0.5 646 50 0 ambiguous 0.75 1 +3 60 592 0.125 182 50 0 low 0 1 +3 61 1508 0.5 92 50 0 ambiguous 0.5 0 +3 62 383 0.375 646 50 646 low 0 1 +3 63 428 0.5 51 50 50 ambiguous 0.5 0 +3 64 432 0.125 51 50 50 low 0 0 +3 65 454 0.375 47 50 0 low 0 0 +3 66 926 0.5 92 50 0 ambiguous 0.25 0 +3 67 346 0.375 341 50 341 low 0 1 +3 68 355 0.5 51 50 0 ambiguous 0.75 0 +3 69 879 0.25 94 50 50 low 0 0 +3 70 827 0.125 92 50 0 low 0 0 +3 71 437 0.5 182 50 182 ambiguous 0.5 1 +3 72 432 0.5 47 50 0 ambiguous 0.25 0 +3 73 411 0.5 341 50 0 ambiguous 0.75 0 +3 74 1125 0.375 183 50 183 low 0 1 +3 75 422 0.125 646 50 0 low 0 1 +3 76 290 0.5 47 50 50 ambiguous 0.5 0 +3 77 366 0.25 94 50 0 low 0 0 +3 78 360 0.25 653 50 0 low 0 1 +3 79 396 0.375 653 50 653 low 0 1 +3 80 408 0.125 94 50 0 low 0 0 +3 81 442 0.5 183 50 0 ambiguous 0.5 1 +3 82 419 0.25 92 50 50 low 0 0 +3 83 1415 0.5 50 50 0 ambiguous 0.25 0 +3 84 1163 0.5 182 50 50 ambiguous 0.75 0 +3 85 717 0.5 94 50 50 ambiguous 0.25 0 +3 86 537 0.5 342 50 0 ambiguous 0.5 1 +3 87 1530 0.5 94 50 50 ambiguous 0.75 0 +3 88 1024 0.25 51 50 0 low 0 0 +3 89 375 0.5 653 50 653 ambiguous 0.25 1 +3 90 777 0.125 50 50 0 low 0 0 +4 1 940 0.5 339 50 339 ambiguous 0.75 1 +4 2 3222 0.5 337 50 337 ambiguous 0.75 1 +4 3 1295 0.25 184 50 0 low 0 1 +4 4 1943 0.5 182 50 0 ambiguous 0.25 1 +4 5 1176 0.375 652 50 652 low 0 1 +4 6 918 0.5 337 50 0 ambiguous 0.25 1 +4 7 1404 0.25 99 50 0 low 0 1 +4 8 1259 0.125 52 50 0 low 0 1 +4 9 1847 0.125 337 50 0 low 0 1 +4 10 952 0.5 182 50 182 ambiguous 0.75 1 +4 11 1341 0.5 52 50 0 ambiguous 0.25 1 +4 12 2206 0.5 93 50 0 ambiguous 0.75 1 +4 13 4242 0.375 182 50 0 low 0 1 +4 14 13020 0.125 339 50 0 low 0 0 +4 15 1142 0.375 179 50 0 low 0 1 +4 16 1633 0.5 339 50 0 ambiguous 0.5 1 +4 17 1077 0.25 94 50 50 low 0 0 +4 18 2892 0.5 48 50 0 ambiguous 0.75 0 +4 19 524 0.5 652 50 652 ambiguous 0.5 1 +4 20 797 0.5 337 50 337 ambiguous 0.5 1 +4 21 1576 0.5 650 50 650 ambiguous 0.5 1 +4 22 1018 0.25 339 50 0 low 0 1 +4 23 1626 0.25 339 50 0 low 0 1 +4 24 766 0.5 94 50 0 ambiguous 0.25 1 +4 25 1089 0.5 94 50 94 ambiguous 0.75 1 +4 26 546 0.5 650 50 650 ambiguous 0.75 1 +4 27 982 0.125 93 50 0 low 0 0 +4 28 1950 0.125 650 50 650 low 0 1 +4 29 663 0.125 179 50 0 low 0 1 +4 30 482 0.375 650 50 650 low 0 1 +4 31 634 0.25 337 50 337 low 0 1 +4 32 466 0.5 94 50 94 ambiguous 0.5 1 +4 33 1844 0.25 182 50 0 low 0 1 +4 34 576 0.375 339 50 0 low 0 1 +4 35 618 0.125 182 50 50 low 0 0 +4 36 659 0.5 48 50 0 ambiguous 0.75 0 +4 37 389 0.125 652 50 0 low 0 1 +4 38 1116 0.375 99 50 0 low 0 1 +4 39 2504 0.25 93 50 0 low 0 1 +4 40 374 0.5 650 50 650 ambiguous 0.25 1 +4 41 342 0.5 179 50 179 ambiguous 0.5 1 +4 42 409 0.375 48 50 48 low 0 1 +4 43 2010 0.125 48 50 50 low 0 0 +4 44 445 0.5 179 50 0 ambiguous 0.25 1 +4 45 412 0.5 184 50 184 ambiguous 0.25 1 +4 46 404 0.375 650 50 0 low 0 1 +4 47 392 0.125 184 50 50 low 0 0 +4 48 1678 0.125 339 50 339 low 0 1 +4 49 428 0.5 339 50 0 ambiguous 0.75 1 +4 50 385 0.5 99 50 99 ambiguous 0.25 1 +4 51 370 0.5 93 50 93 ambiguous 0.25 1 +4 52 537 0.25 48 50 50 low 0 0 +4 53 1625 0.5 52 50 0 ambiguous 0.5 1 +4 54 355 0.25 650 50 0 low 0 1 +4 55 400 0.25 650 50 650 low 0 1 +4 56 381 0.5 48 50 50 ambiguous 0.5 0 +4 57 339 0.5 339 50 339 ambiguous 0.25 1 +4 58 320 0.375 339 50 0 low 0 1 +4 59 375 0.375 48 50 50 low 0 0 +4 60 525 0.5 184 50 184 ambiguous 0.75 1 +4 61 1071 0.125 99 50 50 low 0 0 +4 62 1389 0.5 652 50 652 ambiguous 0.75 1 +4 63 359 0.5 652 50 0 ambiguous 0.25 1 +4 64 412 0.375 337 50 0 low 0 1 +4 65 434 0.5 650 50 0 ambiguous 0.75 1 +4 66 566 0.5 339 50 0 ambiguous 0.5 1 +4 67 875 0.375 94 50 0 low 0 1 +4 68 361 0.375 93 50 93 low 0 1 +4 69 381 0.125 48 50 0 low 0 0 +4 70 671 0.5 650 50 650 ambiguous 0.5 1 +4 71 900 0.25 48 50 0 low 0 0 +4 72 394 0.5 48 50 0 ambiguous 0.25 0 +4 73 294 0.5 179 50 0 ambiguous 0.75 1 +4 74 249 0.5 93 50 93 ambiguous 0.5 1 +4 75 341 0.375 184 50 0 low 0 1 +4 76 1096 0.5 182 50 0 ambiguous 0.5 1 +4 77 1049 0.25 52 50 0 low 0 0 +4 78 339 0.5 48 50 0 ambiguous 0.25 0 +4 79 418 0.5 650 50 0 ambiguous 0.25 1 +4 80 415 0.375 52 50 50 low 0 0 +4 81 354 0.5 339 50 0 ambiguous 0.25 1 +4 82 1097 0.25 652 50 0 low 0 1 +4 83 580 0.125 94 50 50 low 0 0 +4 84 360 0.5 99 50 0 ambiguous 0.5 1 +4 85 1281 0.25 179 50 0 low 0 1 +4 86 642 0.125 650 50 0 low 0 1 +4 87 279 0.5 99 50 0 ambiguous 0.75 1 +4 88 926 0.5 52 50 0 ambiguous 0.75 0 +4 89 906 0.5 48 50 0 ambiguous 0.5 0 +4 90 326 0.5 184 50 0 ambiguous 0.5 1 +5 1 459 0.5 340 50 340 ambiguous 0.5 1 +5 2 762 0.5 52 50 0 ambiguous 0.5 0 +5 3 623 0.5 97 50 97 ambiguous 0.75 1 +5 4 722 0.5 337 50 337 ambiguous 0.5 1 +5 5 1220 0.5 183 50 0 ambiguous 0.25 1 +5 6 983 0.25 52 50 0 low 0 0 +5 7 919 0.375 650 50 0 low 0 1 +5 8 802 0.375 183 50 183 low 0 1 +5 9 834 0.5 339 50 339 ambiguous 0.75 1 +5 10 810 0.5 52 50 0 ambiguous 0.75 0 +5 11 657 0.5 649 50 649 ambiguous 0.75 1 +5 12 801 0.25 650 50 0 low 0 1 +5 13 803 0.5 50 50 50 ambiguous 0.75 0 +5 14 839 0.125 50 50 0 low 0 0 +5 15 824 0.125 50 50 0 low 0 0 +5 16 950 0.5 50 50 50 ambiguous 0.25 0 +5 17 870 0.5 183 50 183 ambiguous 0.5 1 +5 18 776 0.375 92 50 92 low 0 1 +5 19 854 0.125 97 50 50 low 0 0 +5 20 760 0.5 92 50 92 ambiguous 0.75 1 +5 21 713 0.125 649 50 0 low 0 1 +5 22 821 0.5 337 50 337 ambiguous 0.25 1 +5 23 810 0.5 650 50 0 ambiguous 0.75 1 +5 24 1050 0.375 340 50 0 low 0 1 +5 25 928 0.375 654 50 0 low 0 1 +5 26 725 0.5 50 50 50 ambiguous 0.75 0 +5 27 728 0.25 337 50 0 low 0 1 +5 28 657 0.5 654 50 654 ambiguous 0.25 1 +5 29 703 0.5 92 50 92 ambiguous 0.5 1 +5 30 823 0.375 183 50 183 low 0 1 +5 31 852 0.5 99 50 0 ambiguous 0.5 1 +5 32 638 0.5 649 50 649 ambiguous 0.5 1 +5 33 861 0.5 339 50 0 ambiguous 0.5 1 +5 34 768 0.375 184 50 184 low 0 1 +5 35 641 0.25 340 50 0 low 0 0 +5 36 741 0.375 339 50 339 low 0 1 +5 37 829 0.5 183 50 183 ambiguous 0.5 1 +5 38 782 0.5 340 50 0 ambiguous 0.75 1 +5 39 909 0.5 97 50 97 ambiguous 0.5 1 +5 40 736 0.125 654 50 0 low 0 0 +5 41 883 0.5 649 50 649 ambiguous 0.25 1 +5 42 681 0.25 97 50 50 low 0 0 +5 43 893 0.25 92 50 0 low 0 1 +5 44 810 0.375 50 50 50 low 0 0 +5 45 1219 0.5 52 50 0 ambiguous 0.25 0 +5 46 911 0.25 649 50 50 low 0 0 +5 47 781 0.5 340 50 0 ambiguous 0.25 1 +5 48 763 0.375 337 50 0 low 0 1 +5 49 810 0.5 184 50 184 ambiguous 0.5 1 +5 50 756 0.25 654 50 50 low 0 0 +5 51 735 0.375 97 50 0 low 0 1 +5 52 728 0.375 649 50 0 low 0 1 +5 53 1035 0.5 337 50 0 ambiguous 0.75 1 +5 54 743 0.5 183 50 183 ambiguous 0.25 1 +5 55 857 0.5 99 50 50 ambiguous 0.75 0 +5 56 742 0.5 339 50 339 ambiguous 0.25 1 +5 57 652 0.5 650 50 0 ambiguous 0.5 1 +5 58 777 0.5 92 50 0 ambiguous 0.25 1 +5 59 837 0.5 50 50 50 ambiguous 0.25 0 +5 60 775 0.5 50 50 50 ambiguous 0.5 0 +5 61 872 0.25 183 50 50 low 0 0 +5 62 789 0.5 654 50 654 ambiguous 0.75 1 +5 63 793 0.375 99 50 0 low 0 1 +5 64 888 0.125 650 50 50 low 0 0 +5 65 851 0.5 99 50 50 ambiguous 0.25 0 +5 66 878 0.25 50 50 0 low 0 0 +5 67 920 0.375 52 50 50 low 0 0 +5 68 772 0.25 183 50 0 low 0 1 +5 69 784 0.25 184 50 0 low 0 1 +5 70 957 0.5 650 50 650 ambiguous 0.25 1 +5 71 746 0.5 183 50 183 ambiguous 0.75 1 +5 72 784 0.5 184 50 0 ambiguous 0.25 1 +5 73 750 0.125 340 50 50 low 0 0 +5 74 746 0.5 50 50 0 ambiguous 0.5 0 +5 75 937 0.125 184 50 50 low 0 0 +5 76 836 0.125 339 50 0 low 0 1 +5 77 720 0.25 50 50 50 low 0 0 +5 78 729 0.25 99 50 0 low 0 1 +5 79 639 0.5 183 50 183 ambiguous 0.75 1 +5 80 784 0.125 99 50 0 low 0 0 +5 81 599 0.25 339 50 0 low 0 1 +5 82 705 0.375 50 50 0 low 0 0 +5 83 817 0.125 183 50 0 low 0 0 +5 84 785 0.5 97 50 0 ambiguous 0.25 1 +5 85 726 0.125 183 50 50 low 0 0 +5 86 1112 0.125 92 50 0 low 0 0 +5 87 799 0.125 52 50 0 low 0 0 +5 88 818 0.5 654 50 0 ambiguous 0.5 1 +5 89 847 0.5 184 50 0 ambiguous 0.75 1 +5 90 778 0.125 337 50 0 low 0 0 +6 1 7265 0.25 648 50 0 low 0 1 +6 2 8033 0.375 651 50 0 low 0 1 +6 3 5415 0.375 338 50 338 low 0 1 +6 4 5183 0.5 337 50 0 ambiguous 0.5 1 +6 5 1609 0.375 54 50 0 low 0 0 +6 6 3036 0.5 646 50 0 ambiguous 0.75 1 +6 7 10138 0.5 49 50 50 ambiguous 0.75 0 +6 8 3121 0.375 648 50 0 low 0 1 +6 9 2224 0.25 176 50 50 low 0 0 +6 10 3415 0.125 49 50 0 low 0 0 +6 11 3309 0.5 646 50 0 ambiguous 0.25 1 +6 12 5624 0.25 184 50 184 low 0 1 +6 13 5032 0.5 54 50 50 ambiguous 0.75 0 +6 14 5991 0.5 53 50 0 ambiguous 0.5 0 +6 15 2220 0.25 176 50 0 low 0 1 +6 16 665 0.25 49 50 0 low 0 0 +6 17 6233 0.125 651 50 651 low 0 1 +6 18 6381 0.125 91 50 0 low 0 0 +6 19 15254 0.5 338 50 0 ambiguous 0.25 1 +6 20 8786 0.375 337 50 0 low 0 1 +6 21 11423 0.5 91 50 91 ambiguous 0.25 1 +6 22 5114 0.125 99 50 50 low 0 0 +6 23 2545 0.125 53 50 50 low 0 0 +6 24 13957 0.5 341 50 50 ambiguous 0.5 0 +6 25 1837 0.5 648 50 0 ambiguous 0.25 1 +6 26 4679 0.375 91 50 0 low 0 1 +6 27 2697 0.125 91 50 50 low 0 0 +6 28 12661 0.5 651 50 0 ambiguous 0.75 1 +6 29 1942 0.5 99 50 99 ambiguous 0.5 1 +6 30 3170 0.5 99 50 99 ambiguous 0.25 1 +6 31 6455 0.375 99 50 99 low 0 1 +6 32 3171 0.25 651 50 0 low 0 1 +6 33 5667 0.375 176 50 0 low 0 1 +6 34 4606 0.5 91 50 0 ambiguous 0.75 1 +6 35 9317 0.125 646 50 0 low 0 0 +6 36 1734 0.5 651 50 651 ambiguous 0.5 1 +6 37 6134 0.5 91 50 0 ambiguous 0.5 1 +6 38 1547 0.375 91 50 0 low 0 1 +6 39 729 0.5 176 50 176 ambiguous 0.25 1 +6 40 4438 0.5 49 50 0 ambiguous 0.25 0 +6 41 4940 0.25 54 50 50 low 0 0 +6 42 1126 0.5 49 50 50 ambiguous 0.5 0 +6 43 1726 0.5 176 50 50 ambiguous 0.75 0 +6 44 611 0.5 341 50 341 ambiguous 0.25 1 +6 45 982 0.5 91 50 91 ambiguous 0.25 1 +6 46 3389 0.5 184 50 184 ambiguous 0.5 1 +6 47 372 0.375 184 50 0 low 0 1 +6 48 54 0.125 341 50 0 low 0 1 +6 49 5306 0.25 91 50 50 low 0 0 +6 50 806 0.25 91 50 50 low 0 0 +6 51 2225 0.25 341 50 0 low 0 0 +6 52 1382 0.5 651 50 0 ambiguous 0.25 1 +6 53 4960 0.5 176 50 0 ambiguous 0.25 1 +6 54 641 0.375 646 50 0 low 0 1 +6 55 1525 0.5 646 50 646 ambiguous 0.5 1 +6 56 1188 0.25 646 50 0 low 0 0 +6 57 2095 0.375 53 50 50 low 0 0 +6 58 346 0.125 54 50 50 low 0 0 +6 59 4855 0.25 338 50 50 low 0 0 +6 60 4182 0.25 337 50 50 low 0 0 +6 61 788 0.125 338 50 50 low 0 0 +6 62 2593 0.5 91 50 0 ambiguous 0.75 0 +6 63 163 0.125 184 50 50 low 0 0 +6 64 965 0.5 184 50 0 ambiguous 0.25 1 +6 65 927 0.5 176 50 0 ambiguous 0.5 1 +6 66 2085 0.125 176 50 50 low 0 0 +6 67 826 0.375 341 50 341 low 0 1 +6 68 5905 0.5 54 50 50 ambiguous 0.5 0 +6 69 27 0.375 49 50 0 low 0 0 +6 70 2324 0.25 53 50 0 low 0 0 +6 71 606 0.5 648 50 648 ambiguous 0.5 1 +6 72 8977 0.5 53 50 50 ambiguous 0.25 0 +6 73 3788 0.125 337 50 0 low 0 0 +6 74 3013 0.375 176 50 176 low 0 1 +6 75 732 0.5 53 50 50 ambiguous 0.75 0 +6 76 2932 0.5 648 50 0 ambiguous 0.75 1 +6 77 520 0.5 337 50 0 ambiguous 0.25 1 +6 78 4407 0.25 99 50 0 low 0 0 +6 79 5193 0.5 54 50 50 ambiguous 0.25 0 +6 80 3191 0.5 91 50 0 ambiguous 0.5 1 +6 81 390 0.5 176 50 0 ambiguous 0.5 1 +6 82 7450 0.5 341 50 341 ambiguous 0.75 1 +6 83 2018 0.5 337 50 50 ambiguous 0.75 0 +6 84 1206 0.5 184 50 0 ambiguous 0.75 0 +6 85 1363 0.125 648 50 0 low 0 0 +6 86 3957 0.5 338 50 0 ambiguous 0.5 1 +6 87 6344 0.125 176 50 0 low 0 0 +6 88 5897 0.5 99 50 0 ambiguous 0.75 0 +6 89 1421 0.5 338 50 50 ambiguous 0.75 0 +6 90 885 0.5 176 50 0 ambiguous 0.75 0 \ No newline at end of file diff --git a/R/inst/extdata/dbdm_exampleData.txt b/R/inst/extdata/dbdm_exampleData.txt new file mode 100644 index 00000000..0bb2520d --- /dev/null +++ b/R/inst/extdata/dbdm_exampleData.txt @@ -0,0 +1,15001 @@ +subjID opt1hprob opt2hprob opt1hval opt1lval opt2hval opt2lval choice +1 0.9 0.5 -14 -30 30 -43 2 +1 0.3 0.3 18 -15 46 36 2 +1 0.5 0.5 -26 -44 10 -5 2 +1 0.2 0.4 -8 -43 26 17 2 +1 0.3 0.3 30 -37 44 24 2 +1 0.6 0.2 46 -26 10 -14 1 +1 0.8 0.9 48 -49 -12 -30 1 +1 0.8 0.1 -8 -16 48 0 2 +1 0.2 0.5 27 -30 28 27 2 +1 0.1 0.3 -3 -48 2 -34 2 +1 0.6 0.4 -30 -39 49 -31 2 +1 0.1 0.9 29 -4 8 7 2 +1 0.9 0.9 12 -21 27 -13 1 +1 0.9 0.1 -1 -39 43 11 2 +1 0.5 0.2 22 -18 22 -12 2 +1 0.5 0.2 -9 -50 -4 -12 2 +1 0.4 0.2 -22 -45 -12 -49 2 +1 0.2 0.7 39 -4 19 -36 2 +1 0.8 0.3 32 -24 3 -25 1 +1 0.7 0.6 41 0 38 31 2 +1 0.7 0.4 28 5 43 -4 2 +1 0.5 0.3 28 -24 33 -22 1 +1 0.6 0.8 23 -15 -7 -35 1 +1 0.1 0.2 25 -42 -31 -35 1 +1 0.1 0.5 49 -34 3 -9 2 +1 0.6 0.5 38 -16 -42 -49 1 +1 0.6 0.5 6 -46 21 -3 2 +1 0.5 0.9 -18 -50 32 -42 2 +1 0.8 0.3 9 4 42 13 2 +1 0.3 0.8 41 34 -23 -25 1 +1 0.1 0.6 38 30 21 -7 1 +1 0.4 0.3 21 -32 -3 -40 1 +1 0.8 0.6 43 4 33 -40 1 +1 0.2 0.4 2 -4 5 -30 1 +1 0.5 0.9 -11 -37 6 -28 2 +1 0.8 0.8 31 -4 31 9 2 +1 0.2 0.6 33 -26 -4 -44 2 +1 0.8 0.3 43 14 49 9 2 +1 0.8 0.4 33 20 30 18 2 +1 0.8 0.2 19 -35 -5 -41 1 +1 0.4 0.8 7 -9 16 -11 2 +1 0.1 0.3 -8 -41 34 -35 2 +1 0.8 0.5 47 15 8 7 1 +1 0.4 0.9 -1 -8 22 -6 2 +1 0.8 0.8 16 6 34 -43 1 +1 0.5 0.4 22 -44 1 -29 1 +1 0.5 0.4 17 -22 -15 -20 1 +1 0.6 0.7 14 -33 -3 -14 1 +1 0.3 0.6 19 10 -34 -45 1 +1 0.4 0.6 30 18 35 28 2 +1 0.3 0.4 -4 -5 43 -13 2 +1 0.7 0.4 -7 -43 11 0 2 +1 0.4 0.7 24 5 14 -47 1 +1 0.3 0.7 42 14 22 13 1 +1 0.7 0.9 44 8 32 -11 1 +1 0.9 0.4 47 36 -36 -41 1 +1 0.3 0.9 7 -47 15 -26 2 +1 0.9 0.8 4 -39 47 0 2 +1 0.6 0.8 48 -8 28 -46 1 +1 0.6 0.5 47 35 38 12 2 +1 0.9 0.9 35 9 11 -37 1 +1 0.4 0.1 30 -16 -29 -40 1 +1 0.6 0.7 17 -31 -32 -39 1 +1 0.6 0.1 40 32 31 11 1 +1 0.1 0.1 -20 -38 49 -17 2 +1 0.7 0.6 36 -2 -42 -48 1 +1 0.5 0.5 9 -39 -1 -18 2 +1 0.6 0.5 46 -48 49 33 2 +1 0.6 0.7 -34 -46 49 38 2 +1 0.8 0.5 47 39 -5 -44 1 +1 0.5 0.9 41 -32 44 -35 2 +1 0.8 0.4 50 -41 38 6 1 +1 0.8 0.5 14 -24 -30 -43 1 +1 0.7 0.7 27 -32 17 -3 2 +1 0.6 0.1 48 -4 8 4 1 +1 0.6 0.3 10 -10 -22 -30 1 +1 0.3 0.7 3 -45 0 -39 2 +1 0.5 0.4 41 33 45 12 1 +1 0.5 0.1 39 -32 -34 -41 1 +1 0.9 0.5 40 33 10 8 1 +1 0.2 0.3 -2 -17 -4 -35 1 +1 0.6 0.2 25 -13 45 5 1 +1 0.2 0.1 10 -7 19 -23 1 +1 0.9 0.1 49 -21 29 25 2 +1 0.8 0.1 45 19 39 -44 1 +1 0.7 0.3 48 40 48 1 1 +1 0.8 0.7 37 -37 41 28 2 +1 0.3 0.8 26 -20 35 30 2 +1 0.2 0.2 0 -17 14 -36 2 +1 0.8 0.2 20 -19 -4 -29 1 +1 0.5 0.7 -7 -11 -16 -29 2 +1 0.8 0.4 48 -27 -1 -39 1 +1 0.3 0.9 15 -33 18 -14 2 +1 0.6 0.2 -12 -21 -34 -44 1 +1 0.5 0.7 26 1 10 -6 1 +1 0.9 0.1 35 -48 35 -9 1 +1 0.5 0.6 32 1 -4 -5 1 +1 0.7 0.7 28 2 42 -19 2 +1 0.6 0.6 20 3 42 7 2 +1 0.2 0.4 36 -25 16 -28 2 +1 0.1 0.4 12 -7 -10 -48 1 +1 0.7 0.2 -1 -24 47 -4 2 +1 0.3 0.7 -24 -35 33 27 2 +1 0.9 0.8 19 -47 23 -43 1 +1 0.1 0.7 38 -24 15 2 2 +1 0.1 0.4 48 -9 34 -40 1 +1 0.7 0.1 32 -35 23 -14 1 +1 0.1 0.4 23 4 -15 -34 1 +1 0.2 0.8 -9 -13 38 -42 2 +1 0.9 0.3 -35 -44 15 -44 1 +1 0.4 0.2 4 -8 18 -39 1 +1 0.7 0.4 22 17 42 -14 1 +1 0.5 0.5 25 -16 -4 -19 2 +1 0.8 0.2 41 -24 0 -22 1 +1 0.4 0.3 27 22 23 7 1 +1 0.1 0.3 17 5 15 -16 1 +1 0.8 0.5 -7 -41 49 16 2 +1 0.2 0.7 35 3 -6 -36 1 +1 0.3 0.6 19 -15 16 14 2 +1 0.7 0.3 34 14 22 -27 1 +1 0.7 0.3 39 -42 45 43 2 +1 0.2 0.5 47 -32 41 -39 2 +1 0.8 0.1 20 -4 43 29 2 +1 0.4 0.3 42 -3 8 -26 1 +1 0.6 0.9 24 2 -27 -44 1 +1 0.7 0.4 39 -44 47 16 2 +1 0.2 0.9 -2 -14 0 -24 2 +1 0.9 0.7 -24 -50 45 27 2 +1 0.9 0.4 -22 -35 26 -21 2 +1 0.2 0.1 16 11 26 -49 1 +1 0.2 0.6 1 -21 32 16 2 +1 0.1 0.7 -25 -42 50 -8 2 +1 0.7 0.1 -2 -37 -7 -10 1 +1 0.9 0.6 -24 -26 -7 -25 1 +1 0.8 0.1 33 -35 43 -47 1 +1 0.7 0.8 24 -23 49 15 2 +1 0.8 0.8 -15 -20 26 16 2 +1 0.3 0.4 40 -18 14 -47 2 +1 0.1 0.3 23 -17 49 -36 2 +1 0.1 0.2 14 -41 17 -36 2 +1 0.4 0.7 34 3 -20 -44 1 +1 0.5 0.7 -48 -50 38 12 2 +1 0.1 0.2 -20 -47 -20 -45 2 +1 0.4 0.3 41 -16 13 -27 1 +1 0.8 0.4 40 20 29 -12 1 +1 0.6 0.1 -9 -15 -6 -46 1 +1 0.1 0.1 16 -41 48 -24 2 +1 0.9 0.7 7 -50 11 -33 1 +1 0.3 0.1 39 -34 7 -19 1 +1 0.1 0.5 17 -35 -33 -35 1 +1 0.1 0.1 46 38 10 -13 1 +1 0.2 0.2 35 -30 7 3 2 +1 0.7 0.3 -44 -48 6 5 2 +1 0.6 0.8 -33 -36 5 -7 2 +1 0.2 0.4 -42 -45 0 -22 2 +1 0.9 0.1 -15 -49 -6 -33 1 +1 0.4 0.2 46 3 -26 -33 1 +1 0.5 0.9 7 -21 7 -9 2 +1 0.6 0.9 45 3 37 25 2 +1 0.3 0.1 9 1 25 -41 1 +1 0.4 0.6 -3 -10 22 15 2 +1 0.5 0.1 39 -22 4 -28 1 +1 0.2 0.7 4 -35 -12 -14 2 +1 0.4 0.4 17 0 24 -49 1 +1 0.3 0.7 28 6 19 -18 1 +1 0.9 0.7 26 -28 28 -47 1 +1 0.9 0.5 37 -34 16 10 1 +1 0.8 0.2 0 -47 45 43 2 +1 0.1 0.3 44 39 -6 -47 1 +1 0.7 0.6 -24 -33 35 1 2 +1 0.5 0.1 24 5 23 16 2 +1 0.2 0.4 -8 -41 22 -46 2 +1 0.3 0.7 16 4 36 35 2 +1 0.6 0.1 -11 -26 45 44 2 +1 0.7 0.7 -8 -49 48 -48 2 +1 0.2 0.4 36 1 3 -29 1 +1 0.6 0.4 13 -16 18 -17 1 +1 0.9 0.1 43 26 -15 -41 1 +1 0.6 0.7 12 -35 43 29 2 +1 0.9 0.7 36 1 23 -23 1 +1 0.3 0.4 4 -10 28 -26 1 +1 0.8 0.1 -19 -46 33 -30 2 +1 0.2 0.3 43 -31 50 3 2 +1 0.2 0.5 42 -6 1 -40 2 +1 0.8 0.5 24 -31 43 33 2 +1 0.2 0.1 -20 -47 26 -25 2 +1 0.5 0.3 4 -19 50 -48 1 +1 0.7 0.7 31 30 -16 -26 1 +1 0.2 0.2 42 -7 13 -13 2 +1 0.2 0.8 47 -37 25 -23 2 +1 0.6 0.6 46 -4 19 -1 1 +1 0.2 0.5 22 16 31 8 1 +1 0.5 0.8 11 2 -26 -47 1 +1 0.4 0.6 -3 -27 4 -47 1 +1 0.7 0.3 3 -30 46 4 2 +1 0.5 0.2 40 14 50 15 2 +1 0.6 0.6 26 -3 23 -42 1 +1 0.2 0.4 17 -5 48 38 2 +1 0.7 0.1 -41 -50 33 5 2 +1 0.3 0.3 36 -3 38 -16 1 +1 0.8 0.4 17 -37 7 -11 1 +1 0.8 0.4 44 -15 -8 -47 1 +1 0.1 0.9 37 10 34 21 2 +1 0.8 0.5 -18 -29 5 -16 2 +1 0.8 0.7 48 -25 -5 -8 1 +1 0.6 0.1 36 -25 36 -37 1 +1 0.6 0.6 29 19 8 -19 1 +1 0.8 0.9 16 -26 38 -33 2 +1 0.4 0.4 9 1 42 8 2 +1 0.6 0.3 36 -19 49 43 2 +1 0.3 0.9 23 12 -9 -24 1 +1 0.5 0.2 -2 -34 -9 -32 1 +1 0.9 0.2 -3 -44 42 -3 2 +1 0.6 0.9 41 -47 15 -34 1 +1 0.1 0.5 38 33 -23 -48 1 +1 0.9 0.7 15 -5 23 -19 2 +1 0.5 0.7 34 -29 23 19 2 +1 0.4 0.1 44 -25 3 -27 1 +1 0.4 0.9 26 25 -27 -37 1 +1 0.6 0.4 32 -9 31 -18 1 +1 0.1 0.5 -22 -29 32 -10 2 +1 0.1 0.3 26 10 31 -47 1 +1 0.6 0.5 42 -40 42 -41 1 +1 0.7 0.9 47 -34 40 -28 1 +1 0.8 0.6 -12 -36 20 -16 2 +1 0.9 0.6 25 -31 27 10 1 +1 0.5 0.6 21 -29 -4 -8 1 +1 0.5 0.8 -2 -19 47 41 2 +1 0.4 0.7 37 -14 -5 -8 1 +1 0.1 0.4 4 -17 -4 -27 2 +1 0.5 0.1 0 -49 40 12 2 +1 0.2 0.1 -9 -18 17 -49 1 +1 0.3 0.3 43 -47 30 -16 2 +1 0.8 0.7 39 -3 43 -21 1 +1 0.8 0.2 -28 -33 9 -25 2 +1 0.5 0.2 24 -50 50 5 2 +1 0.2 0.5 33 0 44 -18 2 +1 0.9 0.8 34 1 38 36 2 +1 0.2 0.7 -22 -36 15 -6 2 +1 0.6 0.3 42 16 31 -29 1 +1 0.7 0.9 9 -11 49 30 2 +1 0.6 0.6 43 -22 32 -22 2 +1 0.3 0.4 38 37 41 -39 1 +1 0.9 0.2 32 25 42 -33 1 +1 0.8 0.3 32 30 48 -30 1 +1 0.3 0.7 -4 -30 10 8 2 +1 0.7 0.7 -12 -14 -34 -50 1 +1 0.9 0.8 42 38 31 -40 1 +1 0.4 0.2 4 -43 -8 -11 2 +1 0.1 0.5 13 -16 27 10 2 +1 0.5 0.4 7 -22 5 -46 1 +1 0.3 0.4 45 -31 32 4 2 +1 0.8 0.7 38 -26 45 -27 1 +1 0.9 0.5 -4 -10 48 -7 2 +1 0.6 0.9 20 -43 38 18 2 +1 0.9 0.2 -1 -6 34 -44 1 +1 0.7 0.2 37 2 49 -2 1 +1 0.2 0.6 14 -43 21 -40 2 +1 0.5 0.3 22 -16 42 39 2 +1 0.7 0.1 -15 -45 16 -4 2 +1 0.9 0.6 2 -14 50 -25 2 +1 0.1 0.1 -33 -38 9 0 2 +1 0.2 0.2 -13 -28 26 -28 2 +1 0.9 0.2 35 -38 37 6 1 +1 0.7 0.5 -2 -50 39 -27 2 +1 0.8 0.3 42 -47 40 -20 1 +1 0.4 0.1 9 -9 -10 -46 1 +1 0.4 0.9 -27 -28 45 12 2 +1 0.2 0.8 23 21 40 -18 2 +1 0.8 0.8 9 -49 46 6 2 +1 0.7 0.8 -12 -13 -35 -50 1 +1 0.4 0.2 37 -8 27 -24 1 +1 0.3 0.6 -19 -28 45 -31 2 +1 0.4 0.4 -26 -50 -14 -16 2 +1 0.6 0.9 18 -9 24 19 2 +1 0.2 0.6 17 7 -10 -27 1 +1 0.5 0.8 47 -40 15 -33 2 +1 0.5 0.1 19 6 46 2 1 +1 0.7 0.2 12 -30 27 -8 2 +1 0.5 0.9 31 -32 43 -41 2 +1 0.4 0.7 -35 -45 -27 -45 2 +1 0.2 0.6 45 -13 47 -13 2 +1 0.7 0.2 19 -27 -12 -48 1 +1 0.9 0.9 26 -31 20 -8 2 +1 0.8 0.6 27 24 35 12 1 +1 0.4 0.1 22 -20 30 3 2 +1 0.5 0.5 16 -31 38 -19 1 +1 0.4 0.2 47 44 38 18 1 +1 0.7 0.4 8 -39 50 -18 2 +1 0.7 0.4 19 -25 33 -41 1 +1 0.7 0.9 39 15 23 -42 1 +1 0.8 0.4 8 -39 21 -40 1 +1 0.2 0.7 5 4 47 13 2 +1 0.2 0.5 4 -4 20 -43 1 +1 0.5 0.6 -3 -34 48 34 2 +1 0.5 0.7 16 -11 34 14 2 +1 0.5 0.2 35 -2 27 -44 1 +1 0.4 0.6 -9 -35 24 -36 2 +1 0.8 0.2 28 -21 30 8 1 +1 0.4 0.6 43 -31 13 -33 1 +2 0.2 0.2 8 -22 43 35 2 +2 0.2 0.6 18 -12 -19 -32 1 +2 0.3 0.2 29 -37 28 19 2 +2 0.3 0.1 -39 -45 -7 -16 2 +2 0.3 0.9 34 12 49 25 2 +2 0.9 0.6 43 -25 50 -29 1 +2 0.5 0.1 -13 -35 21 -19 2 +2 0.2 0.5 22 -12 25 -28 2 +2 0.8 0.5 -12 -50 15 8 2 +2 0.2 0.5 50 -5 6 -22 1 +2 0.7 0.2 33 22 4 -37 1 +2 0.1 0.1 3 -47 -15 -25 1 +2 0.5 0.9 19 -34 39 -49 2 +2 0.8 0.3 29 19 4 -41 1 +2 0.9 0.9 26 2 17 5 2 +2 0.1 0.1 -12 -16 45 37 2 +2 0.2 0.9 7 -44 9 -42 2 +2 0.9 0.1 23 -24 39 14 2 +2 0.4 0.8 32 21 29 10 1 +2 0.8 0.3 -37 -49 18 -49 2 +2 0.5 0.4 26 -31 18 -30 1 +2 0.6 0.4 15 -27 19 10 2 +2 0.9 0.9 8 -21 38 33 2 +2 0.8 0.7 30 4 -14 -31 1 +2 0.7 0.4 20 17 21 -38 1 +2 0.4 0.9 21 -40 -11 -29 2 +2 0.2 0.4 25 -8 30 -3 2 +2 0.9 0.6 24 11 7 -12 1 +2 0.3 0.7 44 -17 -14 -48 1 +2 0.8 0.4 25 1 19 -14 1 +2 0.3 0.1 35 -2 4 -20 1 +2 0.2 0.7 2 -42 8 -14 2 +2 0.9 0.3 35 1 39 -50 1 +2 0.3 0.8 13 -4 33 -49 1 +2 0.9 0.2 24 -12 15 -5 1 +2 0.7 0.1 23 -19 -20 -27 1 +2 0.4 0.5 24 -39 49 -6 2 +2 0.9 0.9 50 28 41 -19 1 +2 0.9 0.6 15 -28 -13 -22 1 +2 0.2 0.2 50 -6 47 42 2 +2 0.4 0.6 10 -38 0 -12 2 +2 0.1 0.7 39 -17 26 4 2 +2 0.2 0.1 38 -49 28 -22 1 +2 0.9 0.2 -11 -25 50 37 2 +2 0.4 0.7 30 -32 34 -39 2 +2 0.7 0.8 6 -22 -22 -50 1 +2 0.9 0.9 9 -14 40 -23 2 +2 0.8 0.6 -43 -46 20 13 2 +2 0.6 0.1 31 -39 15 -16 1 +2 0.8 0.9 -12 -23 15 0 2 +2 0.1 0.3 -4 -19 38 10 2 +2 0.9 0.6 37 -46 24 -27 1 +2 0.4 0.7 16 -32 34 -31 2 +2 0.9 0.9 36 27 14 -32 1 +2 0.2 0.7 1 -49 33 -48 2 +2 0.6 0.8 -7 -29 33 7 2 +2 0.4 0.2 9 -8 29 -24 1 +2 0.3 0.6 -16 -35 45 10 2 +2 0.5 0.6 49 -14 17 -13 1 +2 0.3 0.6 -4 -11 18 -10 2 +2 0.6 0.4 -20 -43 -8 -18 1 +2 0.5 0.7 15 -16 16 11 2 +2 0.1 0.7 32 8 -10 -12 1 +2 0.8 0.3 10 -35 2 -35 1 +2 0.9 0.4 -30 -41 9 -25 2 +2 0.1 0.2 -25 -37 -9 -17 2 +2 0.9 0.7 22 -5 34 -35 1 +2 0.9 0.3 -28 -37 -4 -42 2 +2 0.1 0.7 27 -34 9 -3 2 +2 0.6 0.9 16 6 -45 -49 1 +2 0.6 0.3 33 11 -1 -7 1 +2 0.7 0.5 42 -1 15 -42 1 +2 0.6 0.8 49 25 20 -29 1 +2 0.4 0.3 27 -38 -13 -19 1 +2 0.8 0.5 11 -11 -5 -41 1 +2 0.2 0.8 22 -3 1 -35 1 +2 0.1 0.9 15 -18 -14 -48 1 +2 0.1 0.6 47 33 48 -23 1 +2 0.7 0.1 39 -8 -24 -27 1 +2 0.9 0.7 16 -22 48 -27 2 +2 0.3 0.7 28 14 37 -33 1 +2 0.2 0.8 40 22 28 25 2 +2 0.9 0.2 9 -35 34 -50 1 +2 0.9 0.9 37 -45 40 -45 2 +2 0.9 0.5 -13 -33 30 -26 2 +2 0.5 0.6 32 -23 -3 -45 1 +2 0.1 0.5 34 9 47 -41 1 +2 0.5 0.4 -26 -35 35 24 2 +2 0.5 0.4 6 -15 -4 -47 1 +2 0.1 0.5 45 -28 16 -17 2 +2 0.2 0.4 47 -5 38 -39 2 +2 0.4 0.5 15 -41 40 -12 2 +2 0.5 0.5 49 -21 49 -38 1 +2 0.5 0.5 10 -5 45 -37 2 +2 0.5 0.7 24 19 3 -39 1 +2 0.5 0.9 19 3 -1 -37 1 +2 0.8 0.7 -9 -43 44 -32 2 +2 0.9 0.1 47 27 50 -22 1 +2 0.3 0.8 35 30 27 -32 1 +2 0.4 0.2 25 -25 29 28 2 +2 0.5 0.9 -38 -42 24 11 2 +2 0.5 0.1 -1 -38 46 -47 2 +2 0.2 0.6 26 2 12 -14 1 +2 0.3 0.2 -35 -42 28 7 2 +2 0.9 0.2 7 -37 21 6 1 +2 0.3 0.1 2 -29 40 -38 1 +2 0.1 0.5 7 -47 3 -18 2 +2 0.6 0.4 -14 -50 6 -49 2 +2 0.5 0.3 32 17 45 -31 1 +2 0.5 0.8 -10 -26 5 -48 2 +2 0.7 0.1 -8 -33 26 -10 2 +2 0.7 0.4 43 -26 32 -41 1 +2 0.1 0.1 30 -24 38 -28 1 +2 0.7 0.4 -21 -26 10 -22 2 +2 0.6 0.4 23 17 7 -32 1 +2 0.5 0.2 41 30 33 -37 1 +2 0.2 0.8 -28 -48 34 3 2 +2 0.4 0.2 -13 -30 47 33 2 +2 0.9 0.2 47 25 20 -43 1 +2 0.6 0.4 -3 -32 -7 -34 1 +2 0.2 0.5 -20 -49 2 -22 2 +2 0.7 0.9 9 -19 -12 -44 1 +2 0.7 0.8 1 -20 17 -1 2 +2 0.4 0.9 19 -38 -9 -48 1 +2 0.7 0.3 8 -5 47 38 2 +2 0.7 0.2 35 -4 16 6 1 +2 0.8 0.4 46 9 -27 -43 1 +2 0.2 0.7 -6 -28 40 31 2 +2 0.2 0.2 31 -17 44 21 2 +2 0.7 0.5 9 5 9 -2 1 +2 0.9 0.3 26 -23 14 9 1 +2 0.8 0.3 27 -12 -31 -35 1 +2 0.1 0.7 -23 -25 16 -27 2 +2 0.9 0.9 8 -48 2 -49 1 +2 0.3 0.6 19 -34 -20 -31 1 +2 0.4 0.3 26 -5 8 -31 1 +2 0.4 0.2 40 25 39 -17 1 +2 0.2 0.2 33 -33 40 31 2 +2 0.1 0.1 28 -43 10 -26 2 +2 0.6 0.9 14 6 3 -14 1 +2 0.9 0.1 -2 -37 12 -33 1 +2 0.9 0.5 35 26 44 39 2 +2 0.5 0.9 50 36 35 -13 1 +2 0.3 0.1 6 -44 -10 -36 1 +2 0.8 0.9 47 10 3 -12 1 +2 0.2 0.2 48 -43 -23 -31 2 +2 0.1 0.1 -27 -50 12 4 2 +2 0.3 0.7 33 2 48 7 2 +2 0.4 0.8 24 23 34 -36 1 +2 0.9 0.6 22 -30 25 21 2 +2 0.3 0.5 48 43 -11 -49 1 +2 0.6 0.4 10 -16 1 -9 2 +2 0.4 0.7 41 -32 28 -21 2 +2 0.6 0.9 45 25 14 -5 1 +2 0.4 0.3 43 -49 44 6 2 +2 0.1 0.9 36 -9 12 -30 2 +2 0.9 0.5 -37 -48 42 -29 2 +2 0.8 0.6 42 -42 2 -13 1 +2 0.8 0.3 30 -10 4 -40 1 +2 0.2 0.9 33 9 -21 -46 1 +2 0.3 0.7 -24 -33 -2 -20 2 +2 0.8 0.3 30 18 17 8 1 +2 0.1 0.6 23 -37 26 -39 2 +2 0.5 0.4 31 -50 49 -3 2 +2 0.4 0.1 -4 -33 41 -8 2 +2 0.7 0.1 16 -46 14 -35 1 +2 0.6 0.9 -4 -34 46 2 2 +2 0.6 0.1 -13 -21 10 -8 1 +2 0.9 0.9 3 2 48 -35 2 +2 0.4 0.6 13 -4 43 -43 2 +2 0.6 0.6 -19 -43 24 -18 2 +2 0.2 0.2 20 -14 50 -1 2 +2 0.6 0.5 -42 -48 -24 -27 2 +2 0.8 0.2 31 -20 4 -44 1 +2 0.8 0.1 8 2 27 -28 1 +2 0.4 0.1 29 1 3 -22 1 +2 0.1 0.8 27 -34 9 -7 2 +2 0.4 0.4 14 -7 45 -43 2 +2 0.9 0.2 7 5 38 -33 1 +2 0.5 0.9 37 -2 42 -26 2 +2 0.4 0.1 45 3 13 -34 1 +2 0.9 0.6 47 -28 39 -15 1 +2 0.4 0.2 1 -3 26 -8 2 +2 0.9 0.7 34 22 23 13 1 +2 0.5 0.9 -2 -34 6 1 2 +2 0.1 0.2 35 -36 44 -29 2 +2 0.9 0.1 -5 -20 42 36 2 +2 0.1 0.3 2 1 35 -20 1 +2 0.6 0.6 -24 -39 39 -9 2 +2 0.9 0.8 6 -35 13 -49 1 +2 0.5 0.7 17 -30 1 -46 2 +2 0.5 0.2 30 -43 34 9 2 +2 0.3 0.1 49 -42 13 -1 2 +2 0.6 0.2 50 19 -16 -29 1 +2 0.1 0.2 36 6 34 3 1 +2 0.9 0.7 -8 -26 12 1 2 +2 0.7 0.3 -15 -35 8 -35 2 +2 0.1 0.3 -27 -47 40 28 2 +2 0.7 0.8 12 -15 -29 -38 1 +2 0.7 0.9 26 -25 -5 -22 1 +2 0.1 0.3 50 35 49 17 1 +2 0.6 0.7 0 -28 26 -12 2 +2 0.6 0.9 8 -8 8 1 2 +2 0.4 0.4 15 13 35 -16 1 +2 0.6 0.8 32 8 47 -8 2 +2 0.4 0.4 48 -7 13 -1 1 +2 0.9 0.9 7 -12 28 -24 2 +2 0.2 0.3 36 11 34 -24 1 +2 0.2 0.1 -36 -46 -34 -46 2 +2 0.3 0.4 41 -28 11 -5 2 +2 0.3 0.9 35 31 22 12 1 +2 0.2 0.7 34 -47 7 4 2 +2 0.3 0.5 -4 -7 23 -36 1 +2 0.6 0.7 30 12 -6 -22 1 +2 0.4 0.5 28 27 12 -35 1 +2 0.5 0.2 -7 -28 27 17 2 +2 0.1 0.8 50 -17 4 -16 2 +2 0.1 0.6 -15 -30 34 19 2 +2 0.8 0.7 19 -19 -17 -32 1 +2 0.3 0.7 24 14 -8 -31 1 +2 0.4 0.3 34 -16 12 -17 1 +2 0.3 0.7 25 -35 43 41 2 +2 0.7 0.8 37 11 39 26 2 +2 0.1 0.7 21 -29 39 -41 2 +2 0.4 0.5 25 14 50 -19 2 +2 0.7 0.9 24 -14 18 -10 1 +2 0.7 0.7 37 -31 13 -1 1 +2 0.8 0.4 3 -26 -20 -48 1 +2 0.9 0.8 35 -14 24 -40 1 +2 0.3 0.5 7 -47 31 -21 2 +2 0.1 0.6 28 -27 -34 -46 2 +2 0.9 0.6 -18 -41 37 -44 2 +2 0.8 0.2 -22 -28 -5 -46 1 +2 0.2 0.4 12 -39 12 -3 2 +2 0.1 0.7 29 -13 -17 -42 1 +2 0.9 0.1 31 26 6 -23 1 +2 0.4 0.2 42 -11 47 -29 1 +2 0.9 0.8 -9 -29 -4 -47 2 +2 0.1 0.9 31 -50 42 2 2 +2 0.6 0.1 6 -14 26 -46 1 +2 0.8 0.9 35 -46 -10 -35 1 +2 0.1 0.8 11 -6 26 -13 2 +2 0.8 0.8 31 -2 22 -15 1 +2 0.7 0.8 22 6 41 -22 2 +2 0.8 0.6 -42 -47 33 -9 2 +2 0.5 0.9 15 -25 1 -22 1 +2 0.8 0.8 -40 -50 11 4 2 +2 0.2 0.7 50 34 5 -24 1 +2 0.1 0.6 7 -17 49 -1 2 +2 0.7 0.8 27 -50 17 -10 2 +2 0.2 0.7 47 -33 23 -46 2 +2 0.6 0.5 27 -37 -18 -31 1 +2 0.5 0.9 -1 -49 -2 -17 2 +2 0.5 0.2 -19 -28 46 -47 2 +2 0.5 0.4 -7 -20 38 4 2 +2 0.3 0.4 20 -4 27 13 2 +2 0.8 0.1 -15 -16 45 -10 2 +2 0.6 0.5 19 1 -21 -42 1 +2 0.1 0.3 5 -4 20 -8 2 +2 0.1 0.7 -36 -37 29 22 2 +2 0.4 0.5 24 0 11 -20 1 +2 0.5 0.3 -42 -48 24 -7 2 +2 0.1 0.7 29 6 15 -35 2 +2 0.4 0.6 -13 -47 48 -2 2 +2 0.5 0.1 36 4 21 -24 1 +2 0.8 0.4 -13 -38 38 -8 2 +2 0.5 0.1 41 -22 -1 -12 1 +2 0.7 0.4 40 30 16 -18 1 +2 0.4 0.3 11 -9 47 38 2 +2 0.4 0.7 -10 -17 2 -11 2 +2 0.9 0.1 -2 -20 28 13 2 +2 0.9 0.6 -8 -38 45 -14 2 +2 0.9 0.1 5 -8 0 -37 2 +2 0.3 0.7 -10 -24 46 19 2 +2 0.9 0.7 -22 -25 -14 -16 2 +2 0.8 0.4 -21 -28 44 -4 2 +2 0.2 0.7 18 -49 46 -17 2 +2 0.2 0.6 50 -49 26 -18 2 +2 0.5 0.6 30 17 50 38 2 +2 0.4 0.7 43 -31 0 -27 1 +2 0.3 0.3 21 -22 35 -49 1 +2 0.2 0.3 25 -43 43 -50 2 +2 0.8 0.5 8 -22 4 -39 1 +2 0.8 0.6 -13 -20 21 -18 2 +2 0.4 0.6 -8 -31 1 -9 2 +2 0.2 0.1 20 -25 -3 -23 1 +2 0.7 0.3 41 -47 46 -35 1 +2 0.2 0.8 23 -48 31 -9 2 +2 0.9 0.8 20 -38 -19 -38 1 +2 0.8 0.5 27 12 47 35 2 +2 0.5 0.8 43 8 19 -45 1 +2 0.7 0.4 -7 -14 7 5 2 +2 0.9 0.5 41 -12 48 29 1 +2 0.5 0.8 29 -34 -21 -49 1 +2 0.1 0.4 1 -6 49 -18 2 +2 0.2 0.2 49 -40 -15 -20 1 +2 0.8 0.6 40 -15 37 -8 1 +2 0.5 0.9 7 -48 -17 -50 1 +2 0.4 0.3 40 15 -6 -49 1 +2 0.4 0.9 36 14 13 -29 1 +3 0.5 0.5 46 32 36 10 1 +3 0.3 0.1 0 -26 -34 -42 1 +3 0.7 0.1 49 25 -29 -39 1 +3 0.5 0.5 3 -18 8 -11 2 +3 0.4 0.8 -12 -40 -17 -50 2 +3 0.2 0.3 49 -14 -33 -39 1 +3 0.3 0.1 -18 -19 30 8 2 +3 0.2 0.4 48 -4 49 30 2 +3 0.1 0.3 42 29 14 -7 1 +3 0.1 0.3 -20 -40 39 1 2 +3 0.9 0.2 37 -25 4 -9 1 +3 0.8 0.7 -12 -46 25 -11 2 +3 0.6 0.6 15 -41 17 -40 2 +3 0.7 0.8 0 -4 32 9 2 +3 0.2 0.6 47 -4 13 -41 1 +3 0.4 0.7 11 -7 -11 -30 1 +3 0.8 0.3 38 -42 -26 -39 1 +3 0.1 0.7 47 -32 -11 -34 2 +3 0.3 0.3 33 -7 32 -27 2 +3 0.7 0.6 -9 -10 33 -47 2 +3 0.1 0.9 17 -23 11 -2 2 +3 0.9 0.8 -2 -49 36 4 2 +3 0.3 0.3 11 -19 2 -38 1 +3 0.6 0.8 -16 -20 25 -16 2 +3 0.2 0.1 49 -43 18 -39 1 +3 0.3 0.4 31 -49 -12 -50 1 +3 0.5 0.3 42 1 49 -27 1 +3 0.5 0.4 23 -21 29 -47 1 +3 0.3 0.8 0 -35 6 -17 2 +3 0.3 0.4 29 -22 35 6 2 +3 0.7 0.7 -28 -35 -20 -34 1 +3 0.8 0.1 2 -25 39 -5 1 +3 0.4 0.5 18 -37 39 -40 2 +3 0.8 0.7 19 -35 22 1 2 +3 0.5 0.6 -32 -50 -2 -19 2 +3 0.7 0.5 25 -47 35 0 1 +3 0.8 0.5 -12 -27 36 10 2 +3 0.9 0.3 28 9 12 -18 1 +3 0.7 0.2 38 20 42 25 2 +3 0.4 0.7 36 -20 21 -12 2 +3 0.7 0.4 25 -36 -9 -24 1 +3 0.9 0.6 34 -24 29 27 2 +3 0.7 0.5 42 -14 49 30 2 +3 0.7 0.6 -12 -18 -10 -49 1 +3 0.1 0.3 -8 -30 29 -11 2 +3 0.6 0.2 -13 -42 42 -1 2 +3 0.4 0.1 -14 -31 -23 -46 1 +3 0.5 0.9 37 -32 33 15 2 +3 0.9 0.8 -6 -9 43 -27 2 +3 0.3 0.3 11 -49 39 -27 1 +3 0.7 0.8 19 -28 8 -23 1 +3 0.5 0.4 -32 -46 22 -48 2 +3 0.7 0.2 22 -30 36 30 2 +3 0.3 0.3 13 -4 10 -36 1 +3 0.5 0.6 19 -42 35 -34 2 +3 0.1 0.2 49 -19 -16 -47 1 +3 0.1 0.5 50 37 32 -17 1 +3 0.1 0.3 -43 -45 28 -25 2 +3 0.3 0.3 24 -7 34 -45 1 +3 0.1 0.1 39 21 -22 -36 1 +3 0.4 0.7 28 24 -26 -33 1 +3 0.6 0.8 -13 -32 13 -34 2 +3 0.4 0.2 -42 -50 20 3 2 +3 0.4 0.9 41 32 35 9 1 +3 0.5 0.2 18 -38 -48 -50 1 +3 0.4 0.6 49 26 32 14 1 +3 0.3 0.1 -3 -49 18 -36 2 +3 0.9 0.7 42 33 -20 -30 1 +3 0.4 0.9 -5 -27 -15 -42 2 +3 0.8 0.9 -15 -43 3 -10 2 +3 0.7 0.3 -7 -43 -23 -27 1 +3 0.1 0.6 40 -50 10 -43 2 +3 0.2 0.3 11 6 37 29 2 +3 0.7 0.1 50 -3 28 -5 1 +3 0.5 0.6 47 43 16 0 1 +3 0.9 0.1 21 -2 48 -50 1 +3 0.5 0.4 9 -5 -1 -2 2 +3 0.5 0.1 -4 -36 28 16 2 +3 0.8 0.5 -1 -3 44 5 2 +3 0.5 0.3 5 -7 33 11 2 +3 0.8 0.8 50 -21 47 -8 1 +3 0.6 0.5 -11 -12 -5 -14 2 +3 0.8 0.1 49 -23 8 -17 1 +3 0.4 0.8 48 -15 13 -8 1 +3 0.3 0.6 46 -47 -9 -44 2 +3 0.5 0.1 -34 -44 37 -25 2 +3 0.7 0.3 49 -43 26 -3 1 +3 0.8 0.1 15 -30 37 -9 1 +3 0.6 0.3 49 -26 50 -41 1 +3 0.1 0.2 8 -6 11 -31 1 +3 0.9 0.3 34 29 -7 -50 1 +3 0.3 0.4 46 12 44 7 1 +3 0.7 0.7 31 5 37 -5 1 +3 0.5 0.4 15 -24 50 -33 2 +3 0.6 0.6 -12 -17 13 -20 2 +3 0.3 0.3 40 -50 28 13 2 +3 0.1 0.5 21 -6 38 -32 2 +3 0.2 0.8 -30 -40 -3 -5 2 +3 0.4 0.6 12 -50 21 -43 2 +3 0.3 0.5 -5 -32 30 -4 2 +3 0.3 0.5 33 -6 11 -3 2 +3 0.6 0.4 20 -6 -11 -40 1 +3 0.5 0.9 19 -19 38 30 2 +3 0.2 0.5 41 24 14 -44 1 +3 0.8 0.4 -26 -49 20 -20 2 +3 0.8 0.4 11 -5 -22 -26 1 +3 0.8 0.6 44 -34 5 4 1 +3 0.7 0.2 25 10 32 -7 1 +3 0.5 0.2 45 31 24 11 1 +3 0.9 0.6 47 -23 27 -21 1 +3 0.7 0.9 42 27 32 16 2 +3 0.9 0.2 37 26 14 -19 1 +3 0.3 0.2 28 -44 47 -3 2 +3 0.2 0.6 42 10 30 23 1 +3 0.7 0.2 10 -48 -8 -18 1 +3 0.1 0.7 39 21 45 42 1 +3 0.1 0.8 28 -36 32 -46 2 +3 0.9 0.1 -35 -48 12 -15 2 +3 0.3 0.6 -1 -14 35 28 2 +3 0.2 0.5 34 1 48 -44 2 +3 0.3 0.2 34 -27 49 21 2 +3 0.7 0.6 -18 -49 -18 -46 1 +3 0.5 0.1 8 -47 29 -7 2 +3 0.9 0.7 47 -45 7 -7 1 +3 0.8 0.7 48 -30 5 -39 1 +3 0.3 0.7 9 -26 32 -35 2 +3 0.1 0.6 0 -16 33 -9 2 +3 0.1 0.6 35 19 21 -46 1 +3 0.3 0.6 -10 -44 -5 -9 2 +3 0.8 0.5 -24 -44 20 -43 2 +3 0.7 0.2 21 -37 19 -35 1 +3 0.6 0.6 39 31 -23 -26 1 +3 0.2 0.9 40 -47 45 36 2 +3 0.6 0.8 27 -6 24 22 1 +3 0.1 0.5 38 -45 39 -7 2 +3 0.3 0.5 -1 -2 11 -50 1 +3 0.5 0.7 -38 -42 30 -31 2 +3 0.2 0.1 29 8 28 -12 1 +3 0.1 0.5 10 -3 44 1 2 +3 0.2 0.2 29 27 41 -21 1 +3 0.6 0.1 24 -35 -6 -37 1 +3 0.1 0.7 14 -39 21 -45 2 +3 0.3 0.6 -26 -39 26 -43 2 +3 0.1 0.4 7 -17 -35 -45 1 +3 0.2 0.8 16 -50 46 -30 2 +3 0.6 0.9 29 -47 -15 -17 1 +3 0.8 0.9 46 -27 45 -5 2 +3 0.4 0.4 12 11 -5 -47 1 +3 0.4 0.1 48 42 18 -36 1 +3 0.5 0.3 -11 -23 50 -35 2 +3 0.4 0.5 29 -35 47 -21 2 +3 0.3 0.5 19 -21 -40 -49 1 +3 0.4 0.1 42 -1 -37 -50 1 +3 0.4 0.9 13 -17 -2 -11 1 +3 0.2 0.7 37 -4 -19 -22 1 +3 0.9 0.5 33 -28 33 -6 1 +3 0.1 0.7 15 -28 -41 -49 1 +3 0.4 0.7 41 -40 48 39 2 +3 0.4 0.1 28 0 32 26 2 +3 0.8 0.1 -13 -27 -19 -28 1 +3 0.6 0.6 4 -13 31 1 2 +3 0.2 0.5 -18 -42 47 26 2 +3 0.4 0.1 43 40 -20 -28 1 +3 0.6 0.6 14 -50 -34 -35 1 +3 0.3 0.3 -2 -37 50 -41 2 +3 0.9 0.8 24 18 46 -48 1 +3 0.5 0.5 7 2 41 -23 2 +3 0.8 0.6 26 20 29 24 1 +3 0.4 0.7 31 -26 31 -8 2 +3 0.9 0.1 14 -1 -33 -50 1 +3 0.4 0.2 -28 -30 37 -41 2 +3 0.3 0.6 41 -42 37 29 2 +3 0.5 0.6 29 -50 42 6 2 +3 0.1 0.7 31 -42 47 -12 2 +3 0.2 0.9 22 -36 -2 -5 1 +3 0.2 0.7 49 21 24 -25 1 +3 0.8 0.5 48 31 7 -21 1 +3 0.2 0.3 -12 -38 14 -22 2 +3 0.7 0.5 39 27 12 -28 1 +3 0.9 0.7 4 -26 9 -16 1 +3 0.4 0.6 -8 -37 34 16 2 +3 0.7 0.4 3 -50 2 -27 1 +3 0.2 0.1 4 -14 27 26 2 +3 0.9 0.7 -33 -36 43 -36 2 +3 0.1 0.2 -27 -40 22 5 2 +3 0.4 0.7 31 -13 6 -45 1 +3 0.3 0.5 24 -3 -38 -44 1 +3 0.7 0.7 48 -46 3 -41 1 +3 0.5 0.7 2 -9 30 2 2 +3 0.1 0.7 46 14 26 -49 1 +3 0.8 0.5 -34 -42 48 24 2 +3 0.8 0.1 37 -25 37 -14 1 +3 0.1 0.2 38 30 12 -2 1 +3 0.2 0.3 -3 -50 35 -27 2 +3 0.2 0.9 -28 -39 48 5 2 +3 0.4 0.9 27 -9 -12 -30 1 +3 0.6 0.1 -7 -27 25 7 2 +3 0.7 0.7 -36 -42 9 -27 2 +3 0.2 0.6 22 -38 40 11 2 +3 0.4 0.4 42 -45 50 -34 2 +3 0.3 0.5 50 -6 38 -40 1 +3 0.9 0.4 40 -11 40 -18 1 +3 0.3 0.6 -7 -13 34 -25 2 +3 0.5 0.6 36 -14 -7 -15 1 +3 0.5 0.6 36 5 -11 -28 1 +3 0.2 0.2 43 31 9 -5 1 +3 0.9 0.2 0 -42 34 -49 1 +3 0.4 0.4 31 13 4 -4 1 +3 0.8 0.1 -1 -30 -21 -38 1 +3 0.6 0.1 28 -9 24 -43 1 +3 0.9 0.9 -4 -21 13 -24 2 +3 0.2 0.7 7 -50 48 33 2 +3 0.7 0.7 -17 -46 33 -40 2 +3 0.2 0.5 22 -43 31 -14 2 +3 0.5 0.9 -4 -14 18 -18 2 +3 0.6 0.4 -10 -13 -45 -48 1 +3 0.3 0.4 9 -32 -15 -44 2 +3 0.3 0.7 41 7 0 -1 1 +3 0.3 0.9 5 -19 -20 -28 1 +3 0.5 0.1 41 -42 -30 -31 1 +3 0.8 0.7 -5 -45 30 -48 2 +3 0.8 0.4 -8 -44 23 -5 2 +3 0.5 0.1 -22 -28 -36 -39 1 +3 0.7 0.6 -2 -48 33 -28 2 +3 0.3 0.7 15 -11 7 -36 2 +3 0.6 0.4 25 -25 50 16 2 +3 0.6 0.2 19 -37 34 -8 1 +3 0.2 0.9 -8 -10 30 14 2 +3 0.4 0.1 31 -23 44 -45 1 +3 0.6 0.3 2 -28 44 -47 1 +3 0.7 0.6 -21 -47 -4 -9 2 +3 0.1 0.3 47 33 -45 -47 1 +3 0.2 0.1 43 12 34 2 1 +3 0.3 0.3 26 6 -21 -27 1 +3 0.2 0.2 -30 -41 48 9 2 +3 0.6 0.5 32 29 27 25 1 +3 0.3 0.5 18 -16 4 -34 1 +3 0.8 0.3 39 33 3 -13 1 +3 0.7 0.2 5 -45 1 -3 1 +3 0.1 0.1 -33 -34 32 7 2 +3 0.5 0.7 40 23 4 -15 1 +3 0.1 0.7 26 -21 16 -27 2 +3 0.7 0.4 -18 -27 42 -40 2 +3 0.6 0.4 -8 -49 48 -36 2 +3 0.9 0.8 29 -3 -43 -49 1 +3 0.5 0.9 34 -5 41 -15 2 +3 0.2 0.1 -10 -16 34 -22 2 +3 0.2 0.8 27 22 31 -37 2 +3 0.9 0.9 -17 -29 37 -22 2 +3 0.5 0.9 -9 -32 45 38 2 +3 0.8 0.6 36 10 12 -22 1 +3 0.3 0.2 -6 -32 19 -36 2 +3 0.2 0.7 7 -6 16 -35 2 +3 0.2 0.6 45 -43 37 36 2 +3 0.3 0.6 41 -33 23 -14 2 +3 0.5 0.6 -7 -28 -2 -24 2 +3 0.8 0.7 44 12 14 -14 1 +3 0.9 0.6 32 -49 46 -39 1 +3 0.8 0.2 -34 -37 -6 -41 2 +3 0.3 0.1 47 4 -6 -42 1 +3 0.9 0.9 37 -17 29 -26 1 +3 0.6 0.3 -16 -18 44 -46 1 +3 0.4 0.5 4 -45 7 -12 2 +3 0.5 0.8 -27 -48 38 -50 2 +3 0.6 0.6 10 -27 28 -30 2 +3 0.3 0.8 -18 -42 -22 -31 1 +3 0.8 0.7 39 -12 -34 -49 1 +3 0.6 0.7 46 -37 43 4 2 +3 0.2 0.6 30 -6 -30 -32 1 +3 0.5 0.2 38 22 36 -26 1 +3 0.3 0.6 -27 -39 50 46 2 +3 0.8 0.2 -20 -27 50 -25 2 +3 0.9 0.5 27 -33 38 -23 1 +3 0.4 0.9 24 -43 -14 -15 1 +3 0.4 0.4 33 17 33 -1 1 +3 0.8 0.7 28 25 -30 -48 1 +3 0.7 0.4 -12 -31 5 -46 2 +3 0.8 0.2 31 9 47 3 1 +3 0.4 0.8 -9 -18 20 5 2 +3 0.9 0.9 -2 -49 8 -25 2 +3 0.8 0.9 33 -19 -3 -35 1 +3 0.6 0.3 36 32 25 17 1 +3 0.4 0.5 12 4 28 12 2 +3 0.3 0.6 22 6 5 -16 1 +3 0.1 0.4 44 0 15 -35 1 +3 0.6 0.9 17 14 -8 -10 1 +3 0.6 0.4 40 15 16 -32 1 +3 0.2 0.4 -1 -50 -4 -17 2 +3 0.4 0.4 -15 -39 44 5 2 +3 0.6 0.7 41 -48 14 -9 2 +3 0.2 0.6 11 -42 16 -2 2 +3 0.6 0.3 42 -5 33 -8 1 +3 0.2 0.5 45 36 20 -31 1 +3 0.8 0.8 24 18 35 32 2 +3 0.2 0.3 47 22 33 -19 1 +3 0.8 0.8 9 0 -13 -20 1 +3 0.8 0.6 -27 -42 42 -45 2 +3 0.3 0.4 32 8 31 18 2 +3 0.6 0.6 49 -21 39 12 2 +3 0.5 0.8 48 24 -5 -49 1 +4 0.9 0.3 -20 -48 17 6 2 +4 0.8 0.7 13 -33 -2 -13 1 +4 0.8 0.6 -24 -26 28 -48 2 +4 0.6 0.2 1 -19 -12 -46 1 +4 0.3 0.2 10 -44 -26 -38 1 +4 0.9 0.8 -12 -20 15 -50 2 +4 0.1 0.2 12 -27 34 13 2 +4 0.2 0.3 10 -30 23 -15 2 +4 0.7 0.5 29 -13 38 9 2 +4 0.1 0.9 23 16 45 -27 2 +4 0.6 0.3 11 -46 9 -46 1 +4 0.1 0.5 -36 -43 32 15 2 +4 0.8 0.5 -7 -35 22 -30 2 +4 0.6 0.7 50 24 -3 -43 1 +4 0.6 0.5 49 4 -41 -46 1 +4 0.7 0.5 39 38 45 -15 1 +4 0.1 0.8 50 -2 7 -26 2 +4 0.8 0.3 23 -14 34 6 2 +4 0.2 0.8 22 3 -8 -23 2 +4 0.5 0.5 33 -9 -19 -25 1 +4 0.8 0.7 48 6 32 8 1 +4 0.7 0.6 48 -26 48 5 2 +4 0.7 0.6 9 -25 -22 -39 1 +4 0.5 0.6 41 -21 -28 -41 1 +4 0.8 0.7 19 11 47 -32 1 +4 0.6 0.7 22 0 -21 -44 1 +4 0.9 0.1 -9 -44 50 -32 2 +4 0.3 0.3 1 -22 32 12 2 +4 0.4 0.7 22 -16 27 -12 2 +4 0.7 0.6 14 -10 27 -8 2 +4 0.4 0.3 50 31 2 -2 1 +4 0.1 0.8 47 -36 -22 -32 2 +4 0.7 0.9 1 -3 37 -30 2 +4 0.3 0.9 36 8 23 10 1 +4 0.1 0.3 28 27 -36 -45 1 +4 0.5 0.3 42 -27 46 1 2 +4 0.9 0.5 28 10 29 20 2 +4 0.1 0.9 10 -17 25 18 2 +4 0.2 0.3 37 23 -24 -41 1 +4 0.2 0.2 18 -40 42 -48 2 +4 0.3 0.6 18 -27 41 39 2 +4 0.4 0.7 21 6 -1 -50 1 +4 0.4 0.4 15 14 20 -35 1 +4 0.1 0.8 13 -49 44 33 2 +4 0.3 0.2 -31 -42 30 -45 2 +4 0.6 0.4 -12 -23 22 -42 2 +4 0.9 0.1 -26 -40 -25 -30 2 +4 0.7 0.2 38 -27 34 -13 1 +4 0.9 0.1 42 -31 45 -1 1 +4 0.5 0.8 41 -3 37 -12 1 +4 0.2 0.5 31 2 17 -44 1 +4 0.8 0.8 -4 -31 16 -15 2 +4 0.9 0.3 24 1 15 -4 1 +4 0.8 0.4 9 -22 28 -5 2 +4 0.9 0.3 47 -24 17 -16 1 +4 0.8 0.4 -12 -14 -13 -21 2 +4 0.1 0.9 1 -50 4 -36 2 +4 0.6 0.7 10 -36 -16 -22 1 +4 0.5 0.2 26 13 24 -31 1 +4 0.4 0.9 1 -15 18 -41 2 +4 0.1 0.5 41 32 -33 -47 1 +4 0.6 0.4 4 -25 -12 -50 1 +4 0.6 0.2 -2 -22 25 5 2 +4 0.5 0.3 -2 -8 34 31 2 +4 0.9 0.9 6 -26 -6 -45 1 +4 0.1 0.2 23 0 38 -6 2 +4 0.8 0.7 -25 -40 9 -37 1 +4 0.5 0.2 -5 -14 48 -18 2 +4 0.2 0.1 6 -34 0 -50 1 +4 0.9 0.1 22 -2 -7 -47 1 +4 0.5 0.5 41 29 41 23 1 +4 0.3 0.5 2 -4 20 -24 2 +4 0.5 0.9 -14 -42 2 -13 2 +4 0.9 0.9 46 16 49 2 1 +4 0.9 0.3 49 39 -16 -29 1 +4 0.1 0.2 36 -28 40 -15 2 +4 0.5 0.9 3 -7 35 15 2 +4 0.1 0.4 3 -18 32 -12 2 +4 0.7 0.6 22 -34 18 4 2 +4 0.8 0.2 5 -6 16 -14 1 +4 0.3 0.9 24 9 -9 -32 1 +4 0.4 0.9 23 13 11 -46 1 +4 0.9 0.5 38 -6 -5 -32 1 +4 0.7 0.5 -23 -48 -26 -32 2 +4 0.2 0.8 19 -14 48 31 2 +4 0.3 0.2 50 39 33 30 1 +4 0.5 0.8 -33 -50 35 -45 2 +4 0.8 0.3 30 -5 32 -33 1 +4 0.1 0.8 -31 -40 35 4 2 +4 0.9 0.1 41 10 4 -49 1 +4 0.7 0.3 -9 -30 41 -26 2 +4 0.9 0.5 -20 -31 -25 -41 1 +4 0.3 0.8 45 -15 28 -17 2 +4 0.5 0.4 -24 -37 36 32 2 +4 0.8 0.1 18 -45 19 -10 1 +4 0.8 0.7 37 -19 -10 -40 1 +4 0.1 0.6 46 13 -30 -39 1 +4 0.3 0.4 15 -4 46 17 2 +4 0.4 0.4 -12 -32 45 38 2 +4 0.5 0.4 36 -19 -11 -13 1 +4 0.3 0.5 47 32 -21 -47 1 +4 0.3 0.3 -2 -25 -33 -34 1 +4 0.4 0.2 39 1 25 -35 1 +4 0.3 0.9 32 -32 35 21 2 +4 0.2 0.3 -9 -26 36 -4 2 +4 0.9 0.1 -10 -21 9 -3 2 +4 0.2 0.7 47 28 11 -22 1 +4 0.1 0.8 38 -4 39 38 2 +4 0.8 0.2 -28 -36 36 17 2 +4 0.6 0.6 10 -50 -2 -42 1 +4 0.7 0.4 37 -5 5 -44 1 +4 0.9 0.6 22 -36 26 -12 1 +4 0.8 0.5 11 -2 49 -41 1 +4 0.2 0.9 8 -25 29 -49 2 +4 0.6 0.3 -39 -49 6 -42 2 +4 0.7 0.9 44 -3 44 -33 2 +4 0.2 0.4 48 43 46 -34 1 +4 0.2 0.4 33 -33 -16 -36 1 +4 0.5 0.4 47 -48 19 12 2 +4 0.3 0.8 50 49 18 -3 1 +4 0.2 0.6 -11 -34 24 -21 2 +4 0.2 0.3 -33 -39 -20 -21 2 +4 0.5 0.8 -23 -46 -30 -43 2 +4 0.3 0.5 32 23 34 7 2 +4 0.3 0.5 -15 -50 -32 -37 2 +4 0.1 0.9 7 -25 46 7 2 +4 0.2 0.9 -40 -48 19 -1 2 +4 0.4 0.6 41 -21 -23 -25 1 +4 0.9 0.5 -29 -36 13 -27 2 +4 0.4 0.7 37 34 32 21 1 +4 0.2 0.3 21 2 -39 -49 1 +4 0.5 0.9 39 3 6 -24 1 +4 0.3 0.9 37 -19 -2 -17 1 +4 0.4 0.2 50 -8 -18 -41 1 +4 0.5 0.7 -7 -29 16 -41 2 +4 0.2 0.2 27 -32 20 -31 1 +4 0.3 0.5 32 -7 24 -11 2 +4 0.5 0.6 -25 -40 -28 -33 2 +4 0.4 0.3 42 30 45 -19 1 +4 0.8 0.4 25 -26 -4 -20 1 +4 0.4 0.3 9 -50 36 -14 2 +4 0.2 0.1 6 -30 -8 -33 1 +4 0.7 0.1 4 -14 3 -12 1 +4 0.4 0.7 32 10 47 -36 2 +4 0.5 0.4 36 15 24 -3 1 +4 0.8 0.2 23 7 10 -16 1 +4 0.6 0.7 14 -44 19 14 2 +4 0.7 0.4 -27 -39 -41 -43 2 +4 0.4 0.6 50 -19 -25 -42 1 +4 0.2 0.6 1 -34 29 -29 2 +4 0.2 0.3 16 -49 7 -32 1 +4 0.6 0.6 5 -39 12 -43 1 +4 0.4 0.2 28 -36 32 -24 1 +4 0.9 0.9 17 -9 5 0 1 +4 0.4 0.4 23 -47 -9 -37 1 +4 0.4 0.9 45 -44 28 -37 2 +4 0.8 0.6 21 12 -14 -17 1 +4 0.1 0.2 33 16 37 -24 1 +4 0.7 0.9 -31 -35 50 13 2 +4 0.5 0.7 -4 -17 -24 -35 1 +4 0.2 0.3 45 40 14 -15 1 +4 0.1 0.5 39 -10 -13 -21 1 +4 0.4 0.8 -3 -24 27 15 2 +4 0.2 0.5 16 0 37 -46 2 +4 0.6 0.4 3 -41 49 27 2 +4 0.7 0.6 21 5 11 -8 1 +4 0.6 0.4 48 -4 -12 -37 1 +4 0.9 0.7 18 -49 30 -22 2 +4 0.2 0.6 -28 -32 23 -35 2 +4 0.3 0.9 48 -41 40 -27 2 +4 0.6 0.9 1 -1 4 -30 1 +4 0.1 0.5 1 0 33 3 2 +4 0.5 0.9 -8 -33 33 -46 2 +4 0.1 0.4 -29 -37 22 4 2 +4 0.4 0.3 20 -8 48 -12 2 +4 0.9 0.2 15 -38 36 2 1 +4 0.1 0.3 3 -37 34 -22 2 +4 0.1 0.4 25 -48 26 -20 2 +4 0.7 0.5 -1 -50 36 -11 2 +4 0.8 0.3 50 -25 49 48 2 +4 0.3 0.9 -4 -36 29 -5 2 +4 0.8 0.1 47 21 12 -33 1 +4 0.1 0.6 45 -31 33 26 2 +4 0.5 0.7 37 -8 7 3 1 +4 0.1 0.5 12 -29 19 -36 1 +4 0.1 0.4 34 27 -14 -26 1 +4 0.1 0.1 32 30 23 -46 1 +4 0.7 0.7 45 -14 33 -40 1 +4 0.3 0.7 -1 -22 19 -46 2 +4 0.7 0.9 -30 -45 -14 -33 2 +4 0.3 0.9 -39 -44 15 -36 2 +4 0.7 0.1 23 -35 13 -35 1 +4 0.2 0.3 16 -42 48 -1 2 +4 0.2 0.1 20 -38 38 -7 2 +4 0.1 0.9 -33 -38 47 -2 2 +4 0.3 0.9 47 -44 19 -36 2 +4 0.7 0.4 40 27 49 46 2 +4 0.5 0.3 -3 -42 24 9 2 +4 0.5 0.9 9 -44 15 -19 2 +4 0.6 0.3 -19 -33 -24 -26 2 +4 0.4 0.5 -32 -48 27 -1 2 +4 0.1 0.4 38 -15 -26 -35 2 +4 0.1 0.2 39 -6 -14 -22 1 +4 0.8 0.5 40 -35 17 -21 1 +4 0.6 0.1 24 17 7 -28 1 +4 0.4 0.5 36 12 45 -10 1 +4 0.2 0.2 49 -23 -6 -18 2 +4 0.7 0.1 47 -22 45 -43 1 +4 0.1 0.3 29 17 21 16 1 +4 0.2 0.7 16 -33 16 -37 2 +4 0.5 0.8 7 -9 15 -44 2 +4 0.8 0.2 40 -24 -4 -45 1 +4 0.2 0.6 -11 -34 21 -17 2 +4 0.8 0.6 7 -32 34 27 2 +4 0.2 0.3 -17 -40 46 -45 2 +4 0.6 0.8 16 -6 6 -44 1 +4 0.2 0.7 17 6 12 8 1 +4 0.5 0.4 -2 -23 34 -38 2 +4 0.9 0.1 -30 -34 19 -45 2 +4 0.9 0.6 42 -23 44 -46 1 +4 0.5 0.8 40 -24 -16 -35 1 +4 0.4 0.1 11 -1 -11 -38 1 +4 0.4 0.2 -6 -40 27 -34 2 +4 0.7 0.7 21 -12 23 -38 1 +4 0.6 0.1 47 -14 9 -24 1 +4 0.3 0.9 36 -23 16 -26 1 +4 0.8 0.3 29 -20 35 -14 1 +4 0.4 0.1 29 -33 21 -42 1 +4 0.9 0.2 -27 -43 11 -48 2 +4 0.2 0.1 22 -25 29 -13 2 +4 0.9 0.6 14 -8 -12 -23 1 +4 0.6 0.1 -13 -18 1 -23 1 +4 0.3 0.5 22 -20 -2 -48 1 +4 0.4 0.1 24 13 38 -43 1 +4 0.9 0.7 40 -30 44 38 2 +4 0.3 0.7 1 -26 -4 -41 1 +4 0.2 0.6 20 -6 -22 -29 1 +4 0.6 0.9 -9 -16 37 32 2 +4 0.7 0.7 6 -8 40 2 2 +4 0.6 0.6 10 -21 39 18 2 +4 0.1 0.5 -3 -33 2 -47 2 +4 0.9 0.3 -20 -43 37 11 2 +4 0.9 0.1 27 -20 5 -14 1 +4 0.9 0.2 3 -19 -41 -49 1 +4 0.2 0.1 18 -5 45 -5 2 +4 0.8 0.1 -13 -36 -10 -38 2 +4 0.4 0.6 45 34 40 -17 1 +4 0.5 0.9 36 17 -2 -36 1 +4 0.1 0.1 14 1 -17 -38 1 +4 0.4 0.1 6 -10 -1 -49 1 +4 0.4 0.2 -10 -45 16 -13 2 +4 0.8 0.9 -11 -18 4 -38 2 +4 0.2 0.5 4 -49 8 1 2 +4 0.1 0.4 -32 -39 41 27 2 +4 0.1 0.5 14 1 -8 -21 1 +4 0.3 0.8 7 0 -18 -24 1 +4 0.9 0.1 24 -15 13 -40 1 +4 0.8 0.9 33 -36 -2 -48 1 +4 0.8 0.7 -42 -45 -17 -36 2 +4 0.8 0.8 31 20 -29 -31 1 +4 0.8 0.1 31 -25 -14 -45 1 +4 0.5 0.8 29 -28 28 -37 2 +4 0.6 0.3 -4 -23 -38 -43 1 +4 0.8 0.2 36 -6 47 -35 1 +4 0.4 0.5 -16 -41 48 22 2 +4 0.5 0.3 -10 -20 -21 -23 1 +4 0.9 0.3 17 -17 35 -17 1 +4 0.4 0.2 12 5 47 11 2 +4 0.8 0.8 10 -36 -23 -47 1 +4 0.6 0.6 24 -20 11 2 2 +4 0.9 0.5 35 -38 17 -26 1 +4 0.1 0.9 9 -5 -32 -40 1 +4 0.9 0.8 -37 -45 11 -28 2 +4 0.1 0.2 13 -35 29 -46 1 +4 0.2 0.2 36 23 33 -4 1 +4 0.9 0.2 38 5 -4 -39 1 +4 0.6 0.7 8 -35 43 40 2 +4 0.6 0.2 16 -14 23 19 2 +4 0.8 0.2 43 15 -21 -23 1 +4 0.5 0.3 50 -45 11 -42 1 +4 0.3 0.9 8 -18 6 -8 2 +4 0.8 0.7 -34 -49 11 -7 2 +4 0.5 0.6 22 -12 44 19 2 +4 0.8 0.1 2 -15 22 -29 2 +4 0.4 0.9 14 -20 27 -36 2 +4 0.8 0.7 6 -18 42 -23 2 +4 0.7 0.6 3 -17 33 19 2 +4 0.3 0.3 7 -23 3 -17 1 +4 0.4 0.6 29 -21 -26 -32 1 +4 0.6 0.3 15 -4 14 -45 1 +4 0.4 0.9 -10 -25 24 17 2 +4 0.7 0.5 5 -41 15 -29 2 +4 0.3 0.9 40 -23 10 -3 2 +4 0.4 0.5 -1 -20 -3 -11 2 +4 0.7 0.5 45 9 32 -22 1 +4 0.2 0.7 18 -37 42 -15 2 +4 0.6 0.9 16 -36 29 -32 2 +4 0.3 0.2 5 -41 44 -43 2 +4 0.2 0.6 23 -31 43 -33 2 +4 0.3 0.4 43 9 -4 -31 1 +5 0.5 0.9 6 -44 -11 -14 1 +5 0.1 0.6 43 -4 34 -9 2 +5 0.7 0.7 24 -25 -19 -21 1 +5 0.9 0.3 -9 -14 22 -31 1 +5 0.7 0.3 2 -16 30 -18 1 +5 0.4 0.3 28 -27 35 26 2 +5 0.8 0.3 27 -15 -2 -39 1 +5 0.5 0.2 16 -40 18 -43 1 +5 0.5 0.4 26 -33 20 15 2 +5 0.7 0.1 8 -33 -18 -34 1 +5 0.7 0.9 15 -24 32 9 1 +5 0.7 0.4 -1 -50 30 -2 2 +5 0.9 0.6 43 2 42 -10 2 +5 0.6 0.7 12 -48 46 40 2 +5 0.9 0.7 49 -40 49 26 1 +5 0.2 0.3 47 -20 27 -12 2 +5 0.6 0.3 41 20 -22 -44 1 +5 0.5 0.4 32 -5 7 4 2 +5 0.3 0.4 1 -10 15 -9 2 +5 0.6 0.6 45 43 37 30 2 +5 0.6 0.1 39 33 -31 -45 1 +5 0.5 0.6 48 -47 39 -32 2 +5 0.1 0.9 46 40 27 -9 1 +5 0.6 0.3 -15 -28 16 -38 1 +5 0.9 0.3 50 -24 -9 -21 1 +5 0.3 0.2 44 -26 23 -5 1 +5 0.1 0.6 46 16 32 4 2 +5 0.8 0.4 30 8 -6 -33 1 +5 0.5 0.4 50 -32 -15 -19 1 +5 0.9 0.4 1 -34 9 -22 2 +5 0.6 0.3 43 5 -5 -30 1 +5 0.4 0.3 44 0 45 21 2 +5 0.8 0.4 9 -5 47 -1 2 +5 0.6 0.1 17 7 47 -5 1 +5 0.5 0.7 2 -2 29 -20 2 +5 0.7 0.7 35 32 38 7 1 +5 0.6 0.3 15 -18 25 -31 1 +5 0.3 0.3 32 -45 28 26 2 +5 0.6 0.2 14 -42 15 -6 1 +5 0.3 0.3 29 -18 35 17 2 +5 0.9 0.6 44 -14 24 14 1 +5 0.3 0.2 38 3 -20 -21 1 +5 0.7 0.1 3 -7 2 -46 1 +5 0.5 0.7 32 -45 0 -45 1 +5 0.5 0.1 -2 -29 28 -37 2 +5 0.4 0.4 31 -24 -7 -12 1 +5 0.5 0.6 -21 -28 -27 -49 1 +5 0.7 0.9 21 12 34 21 2 +5 0.9 0.2 -3 -29 -43 -46 1 +5 0.8 0.6 19 7 50 24 2 +5 0.7 0.5 38 15 30 -40 1 +5 0.3 0.4 0 -26 49 -19 2 +5 0.9 0.6 23 -10 48 -1 1 +5 0.9 0.8 16 -17 -2 -48 1 +5 0.7 0.4 46 10 42 10 2 +5 0.9 0.8 23 22 35 -2 1 +5 0.1 0.6 47 14 22 -43 2 +5 0.9 0.4 12 -41 27 18 2 +5 0.3 0.2 27 25 36 16 1 +5 0.1 0.4 7 -47 17 -15 2 +5 0.4 0.7 33 32 36 -1 2 +5 0.1 0.3 48 -8 45 -35 1 +5 0.6 0.9 -23 -33 35 -18 2 +5 0.8 0.5 12 -47 26 -46 1 +5 0.3 0.8 19 -17 21 3 2 +5 0.2 0.9 30 14 30 -9 1 +5 0.5 0.8 44 -38 30 -11 2 +5 0.1 0.1 -23 -40 47 44 2 +5 0.2 0.9 40 7 24 6 1 +5 0.6 0.4 26 7 21 12 2 +5 0.7 0.6 41 -15 -16 -46 1 +5 0.3 0.4 12 1 2 -40 1 +5 0.6 0.2 -6 -13 4 -9 2 +5 0.5 0.9 49 47 -16 -41 1 +5 0.2 0.6 28 -22 10 -14 2 +5 0.5 0.3 -9 -28 -13 -46 2 +5 0.7 0.6 -1 -44 24 -10 2 +5 0.5 0.4 50 -41 49 -8 2 +5 0.4 0.7 -14 -33 -2 -9 2 +5 0.4 0.7 15 -50 -2 -26 1 +5 0.5 0.1 -38 -41 34 -46 1 +5 0.3 0.5 19 -45 4 -33 2 +5 0.5 0.1 48 32 36 -14 1 +5 0.5 0.9 -18 -37 45 26 2 +5 0.6 0.5 24 17 -21 -36 1 +5 0.9 0.3 -5 -35 8 -33 2 +5 0.5 0.8 40 26 35 29 2 +5 0.2 0.3 14 -34 -3 -23 2 +5 0.9 0.4 12 -22 5 -29 1 +5 0.2 0.6 48 43 25 -40 1 +5 0.4 0.8 -26 -41 23 5 2 +5 0.6 0.8 37 2 -13 -40 2 +5 0.4 0.2 12 10 47 -11 1 +5 0.8 0.6 3 -19 16 -21 1 +5 0.8 0.5 -2 -46 -16 -37 2 +5 0.3 0.2 41 36 9 -28 1 +5 0.6 0.4 0 -50 -15 -22 1 +5 0.1 0.8 23 -27 -9 -22 1 +5 0.8 0.7 13 -48 44 -50 1 +5 0.9 0.8 2 1 32 29 2 +5 0.9 0.1 40 -39 -10 -30 1 +5 0.8 0.7 -15 -46 -25 -27 1 +5 0.6 0.7 3 -5 -34 -49 1 +5 0.2 0.8 47 45 -12 -30 1 +5 0.4 0.7 19 -7 -20 -36 2 +5 0.1 0.3 15 -32 3 -9 2 +5 0.7 0.6 -15 -35 14 -21 2 +5 0.2 0.4 -42 -43 -7 -40 1 +5 0.9 0.2 12 -6 33 -37 1 +5 0.8 0.4 49 -39 25 12 2 +5 0.7 0.1 43 -3 -29 -45 1 +5 0.9 0.5 25 -3 -6 -14 1 +5 0.7 0.3 39 -36 42 -33 1 +5 0.9 0.8 -6 -30 46 -21 2 +5 0.5 0.7 41 24 11 -18 1 +5 0.7 0.9 23 -19 30 19 2 +5 0.4 0.1 -3 -36 37 14 2 +5 0.5 0.1 9 -24 16 -19 1 +5 0.8 0.3 43 -44 28 -27 1 +5 0.9 0.6 16 -43 47 36 2 +5 0.9 0.9 43 -14 -2 -12 1 +5 0.6 0.5 -11 -14 39 -40 1 +5 0.1 0.8 5 -13 45 -48 2 +5 0.5 0.7 37 36 21 -44 1 +5 0.4 0.5 49 8 -13 -25 1 +5 0.1 0.1 17 7 29 -25 1 +5 0.7 0.1 39 -46 15 -8 1 +5 0.8 0.4 42 -23 8 -45 1 +5 0.1 0.8 -43 -44 18 -19 2 +5 0.1 0.5 42 25 13 -8 1 +5 0.7 0.5 42 -40 41 -32 1 +5 0.7 0.9 46 -43 40 19 1 +5 0.9 0.9 2 -21 44 -42 2 +5 0.3 0.1 25 -7 40 4 1 +5 0.1 0.4 28 -27 -27 -31 1 +5 0.1 0.1 18 -17 22 -15 1 +5 0.7 0.5 17 7 28 -7 2 +5 0.2 0.2 44 5 34 -3 2 +5 0.2 0.9 16 -6 7 -7 2 +5 0.3 0.8 46 44 31 18 1 +5 0.6 0.4 9 -43 -3 -6 2 +5 0.9 0.8 43 -41 18 -38 1 +5 0.1 0.2 47 -49 45 37 2 +5 0.8 0.3 9 -22 8 4 1 +5 0.9 0.2 12 -17 -13 -47 1 +5 0.5 0.2 -20 -24 7 -9 1 +5 0.5 0.4 31 -46 -1 -37 1 +5 0.1 0.8 49 14 43 -13 1 +5 0.7 0.4 38 -24 37 -31 1 +5 0.2 0.2 17 -50 -5 -23 1 +5 0.6 0.8 -12 -36 14 -22 2 +5 0.3 0.9 48 16 21 -33 2 +5 0.1 0.1 -6 -9 45 -50 2 +5 0.2 0.5 41 12 -22 -31 1 +5 0.6 0.2 43 -30 -14 -20 1 +5 0.3 0.7 27 -49 -3 -49 1 +5 0.3 0.9 34 -47 49 19 2 +5 0.6 0.2 21 -46 28 -32 2 +5 0.8 0.5 -12 -49 17 -21 2 +5 0.2 0.8 8 -44 20 -47 2 +5 0.1 0.4 30 -24 33 -28 2 +5 0.9 0.2 5 -30 10 0 1 +5 0.5 0.3 -17 -25 38 31 2 +5 0.4 0.6 -8 -37 29 14 2 +5 0.8 0.7 40 3 6 -32 1 +5 0.5 0.7 -5 -38 40 1 2 +5 0.4 0.4 -3 -5 3 -36 1 +5 0.7 0.8 13 -36 46 -24 2 +5 0.6 0.6 47 -10 8 -30 1 +5 0.6 0.3 49 -30 -1 -3 1 +5 0.4 0.8 8 -18 27 12 2 +5 0.5 0.4 -11 -23 -3 -32 2 +5 0.4 0.8 43 -44 19 -7 1 +5 0.1 0.5 43 -2 16 -15 1 +5 0.9 0.6 -3 -28 9 -44 2 +5 0.4 0.7 -24 -28 7 -27 2 +5 0.1 0.6 16 -9 -31 -46 2 +5 0.8 0.7 43 27 46 -27 1 +5 0.3 0.4 -10 -25 41 -9 2 +5 0.8 0.1 36 -1 23 -6 1 +5 0.8 0.1 25 24 8 -13 1 +5 0.2 0.7 -14 -36 -25 -50 1 +5 0.5 0.3 -9 -44 40 -6 2 +5 0.5 0.2 35 26 -8 -36 1 +5 0.4 0.7 -18 -34 4 -28 2 +5 0.4 0.3 34 -49 -9 -16 1 +5 0.4 0.6 23 -7 -10 -19 1 +5 0.9 0.7 31 12 47 -37 1 +5 0.9 0.8 4 -25 9 -7 1 +5 0.2 0.1 19 -44 47 4 2 +5 0.8 0.8 -4 -12 20 -22 2 +5 0.2 0.1 15 -47 7 -40 2 +5 0.8 0.7 17 9 31 -32 2 +5 0.5 0.2 25 0 -35 -37 1 +5 0.1 0.6 -3 -40 -9 -35 1 +5 0.3 0.6 42 25 36 -2 2 +5 0.2 0.7 -12 -13 44 -10 2 +5 0.3 0.4 34 -2 4 -5 1 +5 0.6 0.9 9 -43 -3 -25 1 +5 0.1 0.7 25 -14 38 -18 2 +5 0.7 0.9 -18 -32 25 -20 2 +5 0.5 0.9 32 -39 41 -19 2 +5 0.3 0.2 -26 -28 21 -5 2 +5 0.2 0.2 48 6 35 -6 1 +5 0.9 0.4 -13 -34 19 2 2 +5 0.6 0.7 26 -26 11 -23 1 +5 0.2 0.8 34 -19 4 -20 2 +5 0.6 0.6 -10 -42 6 -30 2 +5 0.3 0.9 -17 -34 -7 -10 2 +5 0.2 0.2 8 -30 27 23 2 +5 0.5 0.3 33 4 1 -36 1 +5 0.1 0.7 -14 -34 -4 -26 2 +5 0.1 0.5 -26 -27 35 -37 2 +5 0.1 0.7 46 -50 42 0 2 +5 0.3 0.4 23 -50 13 -3 2 +5 0.1 0.6 28 -14 39 37 2 +5 0.2 0.8 9 -11 15 5 2 +5 0.2 0.5 -29 -44 12 -10 2 +5 0.4 0.6 -31 -40 -5 -40 2 +5 0.2 0.5 44 -22 36 -29 2 +5 0.7 0.5 11 2 14 1 1 +5 0.9 0.8 18 -22 26 -12 2 +5 0.8 0.7 -11 -35 14 -11 2 +5 0.6 0.4 2 -46 36 27 1 +5 0.5 0.3 -42 -50 24 13 2 +5 0.1 0.6 48 -36 31 -27 2 +5 0.4 0.4 49 -11 -35 -49 1 +5 0.4 0.2 45 25 44 42 2 +5 0.3 0.5 -7 -36 12 3 2 +5 0.6 0.6 -4 -23 45 38 2 +5 0.5 0.5 33 -46 -15 -21 1 +5 0.6 0.6 -1 -20 -4 -30 2 +5 0.4 0.7 41 -14 24 -23 2 +5 0.9 0.7 -30 -48 29 18 2 +5 0.7 0.6 -12 -22 13 -35 2 +5 0.7 0.7 -10 -31 48 -10 2 +5 0.6 0.3 37 20 -39 -47 1 +5 0.1 0.4 -6 -14 16 -32 2 +5 0.5 0.9 45 15 11 -13 1 +5 0.2 0.3 -20 -32 44 -19 2 +5 0.4 0.6 49 -44 17 -25 2 +5 0.6 0.2 27 -46 -9 -32 1 +5 0.1 0.5 -2 -19 26 -12 2 +5 0.4 0.9 35 3 50 -36 2 +5 0.7 0.7 -22 -33 4 -28 2 +5 0.3 0.1 9 -43 49 -38 1 +5 0.7 0.1 3 -45 49 -11 1 +5 0.6 0.2 48 32 35 -39 1 +5 0.8 0.3 -18 -49 28 24 2 +5 0.1 0.6 40 -35 -11 -22 2 +5 0.3 0.4 3 -46 20 13 2 +5 0.4 0.8 -11 -36 -25 -26 2 +5 0.9 0.1 34 -28 30 0 1 +5 0.2 0.4 41 -3 39 -20 2 +5 0.4 0.4 -35 -49 -9 -31 1 +5 0.2 0.8 27 20 23 6 2 +5 0.8 0.3 -1 -44 -43 -49 1 +5 0.5 0.1 42 24 44 -26 1 +5 0.4 0.9 50 20 49 25 1 +5 0.6 0.6 47 38 -9 -24 1 +5 0.5 0.4 23 -33 35 -37 2 +5 0.5 0.8 23 -15 -1 -48 1 +5 0.1 0.8 -14 -47 17 -39 2 +5 0.4 0.4 33 -12 20 -23 1 +5 0.6 0.5 49 10 47 43 1 +5 0.4 0.8 23 -2 21 -35 1 +5 0.3 0.8 -20 -36 16 -23 2 +5 0.6 0.7 37 34 47 -26 1 +5 0.2 0.4 14 -2 42 14 2 +5 0.6 0.6 10 -42 -28 -48 1 +5 0.2 0.7 43 -27 -1 -38 1 +5 0.6 0.3 40 -41 -3 -47 1 +5 0.1 0.7 -7 -35 41 39 2 +5 0.5 0.1 13 -7 -22 -40 1 +5 0.8 0.1 20 -19 0 -17 1 +5 0.6 0.4 34 22 22 -6 1 +5 0.3 0.9 46 -20 23 3 2 +5 0.4 0.3 20 -12 11 10 1 +5 0.5 0.1 35 28 37 28 1 +5 0.3 0.8 16 -30 14 -4 2 +5 0.9 0.9 13 -43 -6 -28 1 +5 0.4 0.8 0 -17 -5 -49 2 +5 0.5 0.3 -21 -47 32 -5 2 +5 0.1 0.6 47 25 3 -48 1 +5 0.6 0.3 7 -42 -5 -47 2 +5 0.2 0.9 44 34 -22 -33 1 +5 0.3 0.5 48 -19 17 -23 1 +5 0.6 0.9 -12 -24 45 40 2 +5 0.1 0.1 46 21 33 -22 1 +5 0.2 0.8 33 -16 -15 -28 1 +5 0.5 0.1 20 -19 36 11 2 +5 0.5 0.8 6 -5 10 -2 2 +5 0.6 0.1 6 -3 44 11 2 +5 0.8 0.8 29 -21 2 -25 1 +5 0.4 0.6 48 -29 18 -18 1 +5 0.7 0.5 46 -12 16 -25 1 +5 0.9 0.1 42 18 -34 -48 1 +5 0.9 0.7 -36 -39 19 -2 2 +5 0.8 0.6 41 10 30 -50 1 +5 0.3 0.1 49 -20 47 26 2 +6 0.8 0.6 43 -30 11 3 1 +6 0.2 0.6 8 2 29 7 2 +6 0.8 0.4 -18 -50 21 8 2 +6 0.7 0.1 -12 -24 34 -39 1 +6 0.9 0.4 49 -9 1 -38 1 +6 0.9 0.7 28 -12 33 -41 1 +6 0.5 0.1 30 22 40 28 2 +6 0.5 0.6 -26 -28 1 -1 2 +6 0.5 0.7 15 7 23 -25 2 +6 0.1 0.8 43 18 -13 -22 1 +6 0.7 0.8 -2 -35 28 1 2 +6 0.4 0.6 21 -20 36 11 2 +6 0.7 0.9 -39 -40 -11 -43 2 +6 0.5 0.3 1 -41 45 16 2 +6 0.6 0.7 8 -34 30 -43 2 +6 0.6 0.2 8 -43 18 -50 1 +6 0.9 0.2 21 -39 27 -37 1 +6 0.8 0.1 -18 -41 46 16 2 +6 0.1 0.2 3 -17 15 -30 1 +6 0.3 0.8 7 -39 4 -38 2 +6 0.9 0.5 41 -11 -41 -43 1 +6 0.3 0.8 -2 -50 1 -4 2 +6 0.2 0.3 48 0 34 9 2 +6 0.8 0.1 -29 -41 47 10 2 +6 0.1 0.9 21 -9 -23 -35 1 +6 0.4 0.9 -36 -39 25 -17 2 +6 0.1 0.6 50 13 -1 -20 1 +6 0.3 0.6 35 16 50 8 2 +6 0.8 0.3 48 -40 43 -39 1 +6 0.4 0.7 -6 -16 45 22 2 +6 0.7 0.9 28 1 33 -16 2 +6 0.4 0.6 30 -22 33 20 2 +6 0.6 0.1 47 -14 -2 -47 1 +6 0.1 0.5 16 -16 46 5 2 +6 0.4 0.2 43 27 -9 -27 1 +6 0.4 0.2 -1 -17 6 -35 1 +6 0.3 0.3 19 9 19 -41 1 +6 0.6 0.7 50 -23 -9 -22 1 +6 0.8 0.4 -16 -46 -34 -46 1 +6 0.7 0.3 35 23 49 -1 1 +6 0.3 0.2 27 13 5 -29 1 +6 0.6 0.7 44 2 15 -18 1 +6 0.5 0.5 31 10 -10 -38 1 +6 0.2 0.4 45 1 16 -11 1 +6 0.2 0.6 -40 -43 -4 -36 2 +6 0.3 0.2 20 -20 20 -46 1 +6 0.6 0.4 24 17 44 -9 1 +6 0.3 0.6 35 0 45 -24 2 +6 0.6 0.2 15 -3 23 -26 1 +6 0.8 0.8 31 10 34 32 2 +6 0.4 0.5 37 -40 8 -36 2 +6 0.3 0.5 13 -42 28 -6 2 +6 0.3 0.1 9 4 26 -38 1 +6 0.4 0.6 35 -50 42 35 2 +6 0.3 0.7 1 -44 36 30 2 +6 0.5 0.3 12 -18 -14 -46 1 +6 0.4 0.2 32 -21 44 8 2 +6 0.8 0.8 6 -22 44 15 2 +6 0.7 0.6 -8 -30 -8 -25 1 +6 0.5 0.8 18 15 -6 -7 1 +6 0.3 0.4 -31 -42 39 20 2 +6 0.1 0.3 33 6 22 -40 1 +6 0.2 0.6 26 -7 31 -42 2 +6 0.1 0.5 22 -10 41 -5 2 +6 0.4 0.2 6 -6 33 27 2 +6 0.3 0.8 29 -4 48 -37 2 +6 0.5 0.4 -22 -29 8 -36 2 +6 0.7 0.7 1 -13 10 -11 2 +6 0.1 0.2 -11 -25 15 -28 2 +6 0.4 0.1 27 7 11 1 1 +6 0.7 0.5 -11 -35 0 -7 2 +6 0.9 0.2 50 32 -6 -25 1 +6 0.2 0.2 -11 -23 34 -27 2 +6 0.7 0.6 30 -15 44 -37 1 +6 0.4 0.5 31 22 13 0 1 +6 0.6 0.6 44 -3 38 36 2 +6 0.5 0.5 8 2 38 -7 2 +6 0.3 0.2 -11 -30 18 -21 2 +6 0.7 0.4 -37 -38 -21 -48 1 +6 0.5 0.8 24 -33 38 32 2 +6 0.7 0.5 33 6 31 12 1 +6 0.9 0.8 39 -16 40 -35 1 +6 0.2 0.7 35 6 -35 -46 1 +6 0.8 0.3 18 -45 -16 -17 1 +6 0.8 0.9 41 -45 45 3 2 +6 0.4 0.2 38 28 21 -10 1 +6 0.5 0.6 -17 -43 6 -8 2 +6 0.7 0.3 43 -12 48 19 2 +6 0.2 0.9 39 15 37 -26 2 +6 0.5 0.9 17 -20 -11 -48 1 +6 0.3 0.6 -6 -13 36 -21 2 +6 0.6 0.2 12 -49 14 -4 1 +6 0.8 0.6 30 20 18 -47 1 +6 0.1 0.2 7 -10 29 -46 1 +6 0.8 0.2 4 -16 -7 -9 1 +6 0.6 0.7 22 -38 -8 -19 1 +6 0.1 0.8 18 -13 -26 -45 1 +6 0.3 0.3 -34 -38 22 -38 2 +6 0.1 0.9 32 -24 -25 -34 1 +6 0.3 0.5 16 -23 20 -11 2 +6 0.6 0.6 38 -36 -13 -24 1 +6 0.9 0.5 -6 -20 -27 -42 1 +6 0.4 0.5 1 -24 34 -18 2 +6 0.8 0.4 44 29 -3 -37 1 +6 0.2 0.8 49 -33 50 42 2 +6 0.4 0.9 2 -25 36 -8 2 +6 0.3 0.8 35 -40 22 -44 2 +6 0.8 0.1 27 -50 -34 -40 1 +6 0.3 0.4 45 -49 21 10 2 +6 0.5 0.6 20 -7 -34 -45 1 +6 0.3 0.9 -5 -21 35 33 2 +6 0.6 0.6 41 -11 -20 -35 1 +6 0.3 0.5 34 -43 -8 -16 1 +6 0.6 0.2 28 -36 33 -4 1 +6 0.9 0.4 49 -21 30 -11 1 +6 0.3 0.9 9 -23 28 -18 2 +6 0.3 0.9 31 20 4 -22 1 +6 0.3 0.1 31 10 -35 -44 1 +6 0.2 0.8 10 1 27 -2 2 +6 0.7 0.4 29 -29 5 -33 1 +6 0.6 0.9 4 -47 40 26 2 +6 0.1 0.9 8 -26 -22 -30 1 +6 0.2 0.3 26 -29 26 24 2 +6 0.2 0.4 41 -20 -18 -30 2 +6 0.7 0.1 6 -7 6 -3 1 +6 0.7 0.1 50 17 33 31 1 +6 0.5 0.9 15 -7 22 -17 2 +6 0.2 0.2 48 30 4 -13 1 +6 0.6 0.7 37 -39 32 -39 1 +6 0.6 0.7 28 -26 49 47 2 +6 0.7 0.5 46 -11 18 -38 1 +6 0.9 0.4 45 -7 47 42 2 +6 0.2 0.6 25 21 -21 -33 1 +6 0.1 0.6 37 -44 -13 -26 2 +6 0.5 0.5 41 33 27 -20 1 +6 0.7 0.9 14 2 35 28 1 +6 0.2 0.2 2 -26 23 2 2 +6 0.6 0.1 32 10 -12 -46 1 +6 0.1 0.1 -29 -43 39 4 2 +6 0.7 0.5 42 20 -37 -39 1 +6 0.2 0.3 17 4 22 3 1 +6 0.4 0.6 -12 -30 22 11 2 +6 0.8 0.9 41 24 35 -17 1 +6 0.6 0.7 -5 -38 36 24 2 +6 0.6 0.8 -37 -38 34 15 2 +6 0.6 0.8 6 -8 -7 -15 1 +6 0.8 0.5 39 33 45 -33 1 +6 0.8 0.7 22 -6 4 -14 1 +6 0.7 0.9 -19 -22 -33 -41 1 +6 0.6 0.6 39 -25 -18 -27 1 +6 0.7 0.3 -11 -24 -5 -33 1 +6 0.6 0.1 45 25 28 22 1 +6 0.3 0.9 28 -2 -2 -37 1 +6 0.9 0.8 10 -44 20 9 1 +6 0.2 0.9 50 -16 13 -21 2 +6 0.8 0.4 -14 -33 -42 -44 2 +6 0.8 0.2 31 -10 0 -7 1 +6 0.9 0.9 11 -38 36 -39 2 +6 0.3 0.5 15 -4 -16 -20 1 +6 0.3 0.5 16 -43 34 -43 2 +6 0.9 0.6 -30 -41 -31 -36 1 +6 0.1 0.2 -18 -49 18 5 2 +6 0.1 0.1 25 -19 35 -49 1 +6 0.2 0.1 14 4 -7 -30 1 +6 0.5 0.9 42 21 29 -19 1 +6 0.2 0.8 17 1 -1 -16 1 +6 0.7 0.3 -17 -31 44 -37 2 +6 0.9 0.4 0 -10 20 13 2 +6 0.4 0.9 16 -4 22 -44 2 +6 0.1 0.9 25 -40 -17 -33 2 +6 0.3 0.5 -7 -48 21 -24 2 +6 0.7 0.1 -16 -31 34 7 2 +6 0.9 0.2 22 -12 28 24 1 +6 0.1 0.5 28 3 27 -50 2 +6 0.9 0.7 -13 -38 -17 -18 2 +6 0.9 0.3 3 -41 37 -1 2 +6 0.6 0.2 -16 -36 -2 -37 2 +6 0.9 0.5 30 26 41 -40 1 +6 0.3 0.7 24 -18 -3 -24 1 +6 0.1 0.2 -16 -41 39 17 2 +6 0.7 0.7 40 36 -37 -46 1 +6 0.2 0.4 27 -46 41 8 2 +6 0.7 0.3 27 2 1 -16 1 +6 0.1 0.9 49 -36 10 -21 2 +6 0.5 0.2 22 11 30 -8 1 +6 0.8 0.8 0 -7 24 -3 2 +6 0.5 0.5 -7 -11 36 6 2 +6 0.4 0.4 39 14 -27 -44 1 +6 0.3 0.2 36 -14 21 6 2 +6 0.7 0.8 4 -25 46 17 2 +6 0.1 0.8 20 -10 25 -26 2 +6 0.6 0.6 12 0 11 -46 1 +6 0.1 0.5 43 23 32 -28 1 +6 0.6 0.9 -3 -18 47 38 2 +6 0.9 0.8 -18 -36 -14 -42 2 +6 0.8 0.4 10 -3 -9 -36 1 +6 0.2 0.8 3 -31 29 14 2 +6 0.6 0.1 11 -23 -25 -34 1 +6 0.7 0.5 39 -9 46 19 2 +6 0.8 0.6 44 -50 13 6 1 +6 0.2 0.9 27 -46 -8 -48 1 +6 0.2 0.7 50 -32 -28 -43 1 +6 0.8 0.5 32 -41 -1 -18 1 +6 0.6 0.5 -27 -33 38 -38 2 +6 0.8 0.8 31 -23 -35 -37 1 +6 0.5 0.9 -37 -38 10 -24 2 +6 0.3 0.6 11 10 33 -20 1 +6 0.3 0.5 -21 -22 11 -17 2 +6 0.7 0.7 29 12 46 -34 1 +6 0.7 0.5 17 -30 26 -3 1 +6 0.5 0.2 8 -15 1 -4 1 +6 0.5 0.7 35 20 36 25 2 +6 0.1 0.4 -25 -44 -8 -28 2 +6 0.8 0.9 46 -17 -6 -12 1 +6 0.3 0.4 -5 -40 -24 -36 1 +6 0.2 0.7 27 13 50 -50 2 +6 0.9 0.8 49 39 48 -2 2 +6 0.6 0.2 -15 -41 -7 -9 1 +6 0.5 0.2 49 22 38 -50 1 +6 0.3 0.7 35 17 48 12 2 +6 0.3 0.9 15 -44 35 -11 2 +6 0.1 0.9 27 7 46 25 2 +6 0.8 0.4 38 -46 15 -32 1 +6 0.6 0.7 27 -10 44 21 2 +6 0.2 0.7 10 -49 41 -10 2 +6 0.7 0.3 29 19 28 -28 1 +6 0.3 0.6 49 -6 26 -14 2 +6 0.7 0.4 20 -29 -5 -17 1 +6 0.8 0.9 20 -15 5 -1 1 +6 0.9 0.6 -19 -45 32 -6 2 +6 0.5 0.6 2 -29 -15 -48 1 +6 0.1 0.1 45 -40 7 -4 2 +6 0.3 0.1 -8 -39 30 -38 2 +6 0.8 0.8 37 1 -28 -32 1 +6 0.7 0.7 0 -26 35 -14 2 +6 0.7 0.1 -6 -19 10 -45 1 +6 0.5 0.7 42 -44 11 -17 2 +6 0.2 0.7 40 -33 8 -18 2 +6 0.8 0.2 25 -23 48 43 2 +6 0.7 0.2 10 1 20 5 1 +6 0.4 0.5 22 -39 -8 -19 2 +6 0.2 0.5 42 -45 17 -17 2 +6 0.9 0.5 -20 -39 48 -34 2 +6 0.8 0.7 -25 -26 11 -26 2 +6 0.5 0.3 20 -15 36 -16 1 +6 0.8 0.1 -18 -33 15 -1 2 +6 0.3 0.4 -3 -21 30 -8 2 +6 0.9 0.8 -4 -25 49 -24 2 +6 0.1 0.8 47 -13 33 -45 2 +6 0.6 0.6 -34 -37 2 -6 2 +6 0.8 0.3 0 -5 29 -1 2 +6 0.3 0.7 30 -25 45 -8 2 +6 0.7 0.1 14 -24 1 -3 1 +6 0.1 0.7 15 -10 23 -11 2 +6 0.1 0.7 -6 -48 -9 -30 2 +6 0.9 0.5 38 -8 -25 -39 1 +6 0.7 0.9 -32 -39 2 0 2 +6 0.2 0.9 -1 -45 14 5 2 +6 0.7 0.7 -3 -36 41 -8 2 +6 0.3 0.9 -2 -26 47 -11 2 +6 0.8 0.6 47 8 45 12 2 +6 0.9 0.1 -39 -44 43 37 2 +6 0.3 0.7 -3 -28 -18 -33 1 +6 0.3 0.5 23 13 14 -18 1 +6 0.2 0.4 11 6 35 -21 2 +6 0.3 0.2 50 19 -17 -47 1 +6 0.1 0.3 49 37 16 8 1 +6 0.5 0.1 -7 -23 -7 -34 1 +6 0.3 0.6 25 -11 50 35 2 +6 0.2 0.3 44 41 37 -23 1 +6 0.8 0.6 39 -18 2 -45 1 +6 0.3 0.5 18 -10 40 -15 2 +6 0.6 0.8 23 -37 2 1 1 +6 0.5 0.2 17 -50 1 -22 1 +6 0.9 0.2 20 8 -1 -12 1 +6 0.8 0.8 40 -32 44 1 2 +6 0.7 0.4 36 -50 -17 -22 1 +6 0.8 0.8 -27 -41 30 29 2 +6 0.5 0.9 19 11 -3 -34 1 +6 0.6 0.3 2 -38 -11 -20 1 +6 0.8 0.5 -3 -30 23 -6 2 +6 0.7 0.9 -4 -19 33 -8 2 +6 0.3 0.6 4 2 26 -37 1 +6 0.3 0.2 -33 -36 3 -13 2 +6 0.9 0.3 -31 -32 -15 -21 2 +6 0.9 0.8 35 -29 27 -42 1 +6 0.2 0.7 44 34 -3 -26 1 +6 0.3 0.4 -27 -39 -33 -50 1 +6 0.1 0.6 20 13 14 9 1 +6 0.8 0.4 40 -19 -1 -10 1 +6 0.3 0.8 44 5 -6 -37 1 +6 0.1 0.9 42 37 33 -46 2 +6 0.2 0.9 7 -25 -15 -24 2 +6 0.9 0.1 39 -46 -3 -4 1 +6 0.5 0.9 -21 -24 -27 -36 1 +6 0.7 0.3 37 -22 -12 -31 1 +6 0.9 0.9 4 -50 43 -13 2 +6 0.2 0.7 40 1 50 23 2 +6 0.7 0.9 6 -11 -5 -47 1 +6 0.1 0.4 -26 -28 48 -35 2 +7 0.5 0.3 37 7 -2 -6 1 +7 0.7 0.6 -10 -35 16 -38 2 +7 0.5 0.4 40 14 48 -7 1 +7 0.6 0.9 20 -27 22 -1 2 +7 0.3 0.5 46 -2 -5 -30 1 +7 0.8 0.3 -1 -6 17 -18 2 +7 0.5 0.5 39 23 -19 -47 1 +7 0.3 0.2 40 -16 -17 -40 1 +7 0.5 0.1 6 -24 50 -3 2 +7 0.7 0.1 35 6 -5 -42 1 +7 0.3 0.3 41 19 49 -44 1 +7 0.4 0.7 -16 -48 24 -24 2 +7 0.5 0.5 18 -9 37 15 2 +7 0.7 0.7 42 21 25 -50 1 +7 0.8 0.9 16 -35 -3 -15 1 +7 0.2 0.7 45 41 32 15 1 +7 0.3 0.4 39 -4 45 38 2 +7 0.8 0.4 -19 -34 15 -39 2 +7 0.8 0.7 27 -21 -15 -30 1 +7 0.6 0.6 38 -32 47 -49 2 +7 0.5 0.1 6 -38 7 -43 1 +7 0.8 0.9 36 18 44 24 2 +7 0.7 0.3 36 -16 37 -29 1 +7 0.6 0.9 33 27 45 -32 1 +7 0.1 0.4 -19 -41 13 -13 2 +7 0.9 0.3 8 -25 32 -20 1 +7 0.9 0.2 8 -39 44 -38 1 +7 0.4 0.3 20 -46 10 -37 2 +7 0.9 0.2 39 24 17 0 1 +7 0.7 0.1 39 -22 3 -40 1 +7 0.7 0.5 -22 -25 49 21 2 +7 0.8 0.9 -2 -41 -1 -26 2 +7 0.3 0.9 40 -25 17 -28 2 +7 0.9 0.4 43 23 21 -31 1 +7 0.3 0.2 -8 -34 2 -44 2 +7 0.1 0.6 27 -48 39 21 2 +7 0.8 0.9 6 -7 -42 -46 1 +7 0.8 0.8 -10 -35 45 14 2 +7 0.2 0.7 27 11 36 -42 2 +7 0.1 0.8 14 -19 -2 -20 2 +7 0.9 0.3 33 27 19 7 1 +7 0.2 0.8 15 -3 38 -7 2 +7 0.3 0.5 48 4 23 -5 1 +7 0.7 0.8 13 10 10 -23 1 +7 0.4 0.9 33 0 32 14 2 +7 0.2 0.6 43 -26 50 -7 2 +7 0.9 0.2 43 35 -24 -44 1 +7 0.8 0.4 30 21 20 -12 1 +7 0.5 0.2 6 2 11 -3 1 +7 0.3 0.8 2 -17 19 -50 2 +7 0.5 0.3 -28 -43 -17 -35 2 +7 0.9 0.7 36 13 27 10 1 +7 0.4 0.8 44 -4 27 -42 2 +7 0.9 0.3 2 -9 -23 -27 1 +7 0.9 0.5 18 -2 38 -50 1 +7 0.1 0.2 48 47 16 -18 1 +7 0.5 0.4 6 -2 26 15 2 +7 0.4 0.1 -16 -48 -3 -23 2 +7 0.1 0.9 12 -41 17 -25 2 +7 0.4 0.1 -1 -15 33 1 2 +7 0.2 0.1 -14 -39 34 -30 2 +7 0.3 0.5 -3 -11 29 -41 2 +7 0.9 0.3 41 17 -11 -13 1 +7 0.3 0.6 21 15 49 -44 1 +7 0.3 0.7 46 -1 42 39 2 +7 0.3 0.1 -11 -37 13 -49 2 +7 0.4 0.6 33 -9 41 -50 2 +7 0.5 0.1 16 -10 40 -43 1 +7 0.7 0.6 20 10 -6 -47 1 +7 0.7 0.6 21 -18 42 40 2 +7 0.8 0.8 -12 -28 -12 -43 1 +7 0.7 0.5 28 -48 0 -41 1 +7 0.3 0.1 36 -43 29 -19 2 +7 0.9 0.4 31 -15 12 -45 1 +7 0.8 0.3 -8 -25 48 47 2 +7 0.4 0.5 32 -27 41 23 2 +7 0.8 0.9 45 -36 20 8 1 +7 0.6 0.3 39 30 44 16 1 +7 0.7 0.6 4 -39 33 -48 2 +7 0.9 0.3 50 28 38 -27 1 +7 0.9 0.2 12 -25 -1 -26 1 +7 0.1 0.2 28 -6 4 -2 2 +7 0.7 0.5 29 -37 40 14 2 +7 0.6 0.9 16 -43 10 -3 2 +7 0.2 0.8 21 -24 31 -24 2 +7 0.3 0.9 50 -9 -3 -24 1 +7 0.1 0.2 4 -12 44 5 2 +7 0.9 0.8 47 -42 10 -16 1 +7 0.1 0.7 29 -24 -27 -43 1 +7 0.8 0.6 45 37 37 -2 1 +7 0.8 0.5 37 -9 49 -36 1 +7 0.2 0.6 10 -8 -45 -47 1 +7 0.9 0.4 37 36 12 -49 1 +7 0.9 0.3 17 -14 -14 -16 1 +7 0.8 0.1 33 30 -24 -46 1 +7 0.7 0.5 32 -34 15 -16 1 +7 0.8 0.4 30 19 43 -18 1 +7 0.8 0.3 38 -15 -7 -11 1 +7 0.1 0.4 35 -39 -17 -25 2 +7 0.8 0.5 34 25 -10 -11 1 +7 0.4 0.6 -1 -26 34 -34 2 +7 0.7 0.7 30 -21 4 -11 1 +7 0.5 0.1 13 -33 3 -45 1 +7 0.6 0.2 34 -23 35 -24 1 +7 0.2 0.3 24 -14 -6 -47 1 +7 0.5 0.2 28 -34 50 -19 1 +7 0.7 0.7 35 26 -7 -16 1 +7 0.5 0.5 34 13 17 -10 1 +7 0.1 0.3 -30 -49 10 -44 2 +7 0.9 0.7 23 1 44 22 2 +7 0.6 0.9 50 6 -29 -36 1 +7 0.1 0.9 24 -5 32 5 2 +7 0.4 0.3 31 3 1 -50 1 +7 0.4 0.3 0 -37 -16 -32 1 +7 0.5 0.8 34 4 31 -31 2 +7 0.2 0.1 50 -45 50 -49 1 +7 0.2 0.1 22 14 33 -36 1 +7 0.2 0.8 11 -26 -3 -25 2 +7 0.8 0.8 23 -1 32 5 2 +7 0.7 0.4 -24 -47 28 -32 2 +7 0.9 0.4 -16 -18 -1 -33 2 +7 0.6 0.2 -14 -42 36 -3 2 +7 0.7 0.4 49 19 -12 -37 1 +7 0.7 0.6 -3 -22 24 5 2 +7 0.8 0.6 15 -16 22 -37 1 +7 0.4 0.3 40 22 32 26 2 +7 0.4 0.8 20 -24 -33 -37 1 +7 0.5 0.5 48 -35 -17 -50 1 +7 0.1 0.5 8 0 44 23 2 +7 0.8 0.8 14 -33 48 -30 2 +7 0.4 0.7 31 -35 -1 -50 1 +7 0.8 0.9 -8 -49 -14 -24 1 +7 0.9 0.5 42 -11 -22 -41 1 +7 0.7 0.6 29 -34 25 12 2 +7 0.2 0.4 35 -24 37 15 2 +7 0.6 0.3 48 12 21 18 1 +7 0.2 0.9 -22 -34 12 -11 2 +7 0.3 0.8 48 -10 7 -23 2 +7 0.4 0.2 2 -19 38 -8 2 +7 0.7 0.9 24 -8 41 -26 2 +7 0.7 0.3 -3 -12 -3 -5 2 +7 0.6 0.2 40 9 40 -38 1 +7 0.7 0.2 3 -15 46 12 2 +7 0.4 0.8 4 -27 46 -40 2 +7 0.8 0.4 24 -7 -24 -29 1 +7 0.8 0.3 32 -1 41 -29 1 +7 0.9 0.4 -3 -34 35 3 2 +7 0.9 0.3 40 -27 29 -11 1 +7 0.2 0.9 -3 -35 -13 -49 2 +7 0.2 0.8 37 28 25 -13 1 +7 0.1 0.2 -15 -36 50 25 2 +7 0.3 0.5 -7 -30 4 -25 2 +7 0.6 0.7 21 10 13 8 1 +7 0.3 0.4 2 -22 41 -31 2 +7 0.5 0.1 0 -47 29 -45 1 +7 0.3 0.5 23 6 14 -25 1 +7 0.2 0.3 -24 -27 6 -5 2 +7 0.2 0.6 14 -1 43 -8 2 +7 0.6 0.2 -40 -41 30 17 2 +7 0.7 0.7 7 -50 9 -34 1 +7 0.9 0.4 37 -41 15 -1 1 +7 0.9 0.8 26 19 22 -45 1 +7 0.1 0.2 10 -31 30 13 2 +7 0.1 0.6 -31 -48 1 -36 2 +7 0.3 0.4 32 1 -7 -8 1 +7 0.3 0.1 -6 -27 24 10 2 +7 0.7 0.5 5 2 48 -15 2 +7 0.6 0.6 13 -23 14 -9 2 +7 0.2 0.9 13 -41 31 2 2 +7 0.2 0.5 34 22 10 -49 1 +7 0.2 0.8 29 1 3 -35 1 +7 0.4 0.6 26 -4 49 -23 2 +7 0.3 0.3 35 -40 -33 -34 1 +7 0.7 0.5 38 -41 3 -7 1 +7 0.7 0.1 22 -49 44 30 2 +7 0.3 0.4 33 -47 31 30 2 +7 0.3 0.2 28 -36 10 -44 1 +7 0.9 0.1 -23 -29 7 -26 1 +7 0.1 0.7 -20 -32 19 17 2 +7 0.3 0.7 8 -41 4 -11 2 +7 0.7 0.2 42 -50 40 5 2 +7 0.8 0.5 -7 -38 10 3 2 +7 0.9 0.8 39 -29 8 -29 1 +7 0.1 0.3 6 -37 20 -3 2 +7 0.7 0.7 21 -22 36 -42 2 +7 0.8 0.2 42 -16 29 16 1 +7 0.5 0.6 36 -43 35 6 2 +7 0.6 0.1 14 2 50 18 2 +7 0.1 0.2 45 -22 38 -49 2 +7 0.4 0.8 -23 -37 7 -32 2 +7 0.5 0.7 12 -25 20 -32 2 +7 0.4 0.7 10 -5 28 10 2 +7 0.3 0.9 22 19 32 23 2 +7 0.7 0.9 -3 -34 50 38 2 +7 0.7 0.1 -16 -47 19 11 2 +7 0.5 0.4 -21 -31 -12 -50 1 +7 0.4 0.1 39 30 37 -15 1 +7 0.6 0.3 -14 -31 30 -17 2 +7 0.9 0.4 25 14 44 -16 1 +7 0.9 0.6 -4 -17 21 -28 2 +7 0.5 0.9 1 -20 14 -3 2 +7 0.6 0.7 46 -4 -21 -27 1 +7 0.5 0.9 48 24 0 -37 1 +7 0.2 0.1 36 35 37 -44 1 +7 0.7 0.5 32 22 17 7 1 +7 0.9 0.8 38 -49 42 36 2 +7 0.9 0.9 -8 -35 20 -33 2 +7 0.2 0.9 23 -33 47 -12 2 +7 0.5 0.7 41 30 -1 -36 1 +7 0.1 0.7 48 -24 29 9 2 +7 0.1 0.1 -15 -21 10 -47 1 +7 0.3 0.5 38 35 -17 -32 1 +7 0.6 0.6 13 -13 19 -49 1 +7 0.5 0.7 7 -15 24 -20 2 +7 0.5 0.1 15 -39 22 21 2 +7 0.4 0.3 49 -3 41 -5 1 +7 0.3 0.4 23 -27 -2 -33 1 +7 0.5 0.8 15 -16 33 -34 2 +7 0.9 0.9 -21 -26 13 -24 2 +7 0.6 0.4 48 16 -6 -21 1 +7 0.3 0.1 45 -25 -16 -44 1 +7 0.5 0.9 -15 -43 29 -2 2 +7 0.7 0.5 42 -45 -12 -34 1 +7 0.8 0.5 39 6 32 24 2 +7 0.7 0.3 -18 -49 -20 -32 2 +7 0.5 0.6 -21 -32 10 7 2 +7 0.3 0.4 8 -26 -30 -44 1 +7 0.8 0.3 -6 -25 32 -14 2 +7 0.3 0.9 0 -27 13 -22 2 +7 0.5 0.1 40 39 41 -33 1 +7 0.6 0.8 34 0 43 -49 2 +7 0.7 0.5 -11 -19 9 -36 2 +7 0.8 0.2 46 -45 27 20 1 +7 0.2 0.2 14 -19 29 20 2 +7 0.2 0.9 4 -35 -1 -18 2 +7 0.5 0.1 -45 -49 23 -44 1 +7 0.6 0.4 -36 -40 39 10 2 +7 0.4 0.8 18 -10 -25 -47 1 +7 0.8 0.9 45 2 30 13 1 +7 0.7 0.5 -28 -44 -31 -45 2 +7 0.3 0.9 30 10 -25 -27 1 +7 0.3 0.5 33 -46 18 9 2 +7 0.2 0.7 35 3 49 -40 2 +7 0.1 0.4 28 -11 26 -49 1 +7 0.3 0.2 -32 -33 35 24 2 +7 0.5 0.3 25 -21 45 18 2 +7 0.5 0.2 15 -6 0 -25 1 +7 0.3 0.1 35 -36 -4 -14 2 +7 0.8 0.6 42 4 -6 -36 1 +7 0.6 0.8 29 -21 36 -15 2 +7 0.4 0.8 49 21 30 20 1 +7 0.8 0.7 8 -23 15 -43 2 +7 0.7 0.6 33 14 20 9 1 +7 0.5 0.5 14 -18 27 23 2 +7 0.9 0.4 -25 -49 15 -16 2 +7 0.7 0.5 19 3 35 -30 1 +7 0.2 0.8 48 0 47 -40 2 +7 0.1 0.1 42 -47 -14 -43 2 +7 0.3 0.4 17 -15 45 -45 2 +7 0.4 0.4 36 -33 7 -19 1 +7 0.2 0.2 29 -18 -5 -10 2 +7 0.2 0.7 21 -39 19 3 2 +7 0.3 0.7 25 1 -8 -23 1 +7 0.3 0.1 5 -10 11 1 2 +7 0.3 0.3 17 6 41 -6 2 +7 0.8 0.5 30 -23 36 31 2 +7 0.9 0.8 -22 -31 31 -23 2 +7 0.1 0.4 0 -43 39 14 2 +7 0.1 0.5 21 -13 1 -19 2 +7 0.2 0.6 23 -35 -17 -42 2 +7 0.3 0.8 35 22 -12 -13 1 +7 0.1 0.1 25 -28 37 -29 2 +7 0.1 0.4 12 -8 48 -45 1 +7 0.5 0.7 -8 -35 44 -29 2 +7 0.7 0.1 46 -45 6 -46 1 +7 0.9 0.6 17 -42 11 -13 1 +7 0.4 0.4 26 -31 49 14 2 +7 0.9 0.2 -23 -38 18 -43 2 +7 0.2 0.2 21 -16 -2 -3 1 +7 0.8 0.6 2 -25 45 17 2 +7 0.6 0.9 33 -45 -6 -10 1 +7 0.9 0.5 44 21 20 -34 1 +7 0.7 0.1 3 2 -30 -37 1 +7 0.5 0.2 29 -25 44 -16 1 +7 0.7 0.2 13 1 34 33 2 +7 0.4 0.1 3 -31 32 -8 2 +7 0.1 0.7 3 -28 31 4 2 +7 0.2 0.9 45 25 -7 -10 1 +7 0.9 0.5 45 5 38 -32 1 +7 0.1 0.5 -30 -41 -26 -42 1 +7 0.1 0.2 35 -33 1 -37 2 +7 0.6 0.6 27 -11 50 35 2 +7 0.8 0.8 15 -6 31 -13 2 +7 0.5 0.2 38 32 30 -17 1 +7 0.9 0.7 29 -35 49 20 2 +7 0.4 0.2 23 -47 -20 -37 1 +7 0.7 0.2 40 10 10 0 1 +7 0.8 0.3 -13 -48 47 35 2 +7 0.5 0.1 -4 -31 29 -19 1 +7 0.3 0.4 12 -44 28 1 2 +8 0.3 0.4 49 10 22 -3 1 +8 0.3 0.3 36 -15 15 0 2 +8 0.1 0.5 23 -24 -6 -40 1 +8 0.4 0.7 19 -40 35 16 2 +8 0.5 0.8 6 -11 18 -39 1 +8 0.1 0.5 26 -22 26 -44 2 +8 0.6 0.8 35 -38 50 7 2 +8 0.4 0.1 11 5 6 -7 1 +8 0.8 0.5 11 4 -11 -30 1 +8 0.2 0.9 12 -23 33 30 2 +8 0.3 0.1 45 -6 22 21 2 +8 0.4 0.5 5 -46 43 -42 2 +8 0.7 0.2 -15 -23 23 10 2 +8 0.4 0.4 0 -27 45 -4 2 +8 0.6 0.2 -8 -15 24 -45 2 +8 0.7 0.5 37 -14 8 -8 1 +8 0.9 0.6 25 -16 14 -12 1 +8 0.5 0.3 -25 -27 5 -43 2 +8 0.2 0.8 14 4 32 30 2 +8 0.6 0.2 21 -42 44 -49 1 +8 0.3 0.5 29 22 49 15 2 +8 0.7 0.2 -15 -29 22 -2 2 +8 0.6 0.5 39 -36 34 20 2 +8 0.5 0.3 32 -30 13 1 2 +8 0.1 0.7 43 6 -15 -28 1 +8 0.3 0.6 46 -41 4 -18 1 +8 0.1 0.1 9 -21 7 -9 2 +8 0.5 0.2 43 11 32 -46 1 +8 0.1 0.3 24 0 -42 -45 1 +8 0.3 0.4 40 26 47 -10 1 +8 0.2 0.8 10 -32 -30 -44 1 +8 0.3 0.6 47 0 19 16 2 +8 0.9 0.1 -34 -48 25 17 2 +8 0.1 0.5 4 -49 34 15 2 +8 0.6 0.5 4 -11 -10 -37 2 +8 0.7 0.2 -13 -42 -39 -45 1 +8 0.7 0.6 1 -35 10 -27 2 +8 0.2 0.3 -25 -40 40 -44 2 +8 0.9 0.9 8 -14 -2 -13 1 +8 0.4 0.1 49 19 20 -32 1 +8 0.9 0.8 43 -10 37 34 1 +8 0.7 0.6 27 -44 -4 -14 1 +8 0.6 0.4 12 -44 -1 -29 1 +8 0.5 0.5 44 41 -15 -22 1 +8 0.3 0.8 9 -34 47 -23 2 +8 0.5 0.8 14 8 3 -7 1 +8 0.8 0.9 5 -2 29 19 2 +8 0.7 0.8 41 12 -2 -9 1 +8 0.8 0.6 -4 -17 13 -32 2 +8 0.4 0.2 39 36 41 -38 1 +8 0.1 0.3 46 -22 34 7 2 +8 0.3 0.7 24 -41 3 -32 2 +8 0.2 0.6 50 -17 24 -33 2 +8 0.4 0.7 26 14 37 -21 1 +8 0.3 0.6 32 13 16 -37 1 +8 0.2 0.8 43 14 24 -46 1 +8 0.1 0.5 7 -42 35 -17 2 +8 0.1 0.6 29 2 -29 -44 1 +8 0.3 0.2 12 -33 14 -21 1 +8 0.4 0.3 31 8 31 -33 1 +8 0.7 0.9 13 -31 17 5 2 +8 0.8 0.2 31 20 6 4 1 +8 0.9 0.8 -1 -27 31 -7 2 +8 0.5 0.3 23 -29 27 -32 1 +8 0.2 0.8 -25 -46 -18 -38 2 +8 0.6 0.7 -3 -29 44 15 2 +8 0.4 0.5 42 -28 9 -16 1 +8 0.6 0.3 15 -39 49 -32 1 +8 0.5 0.9 43 -2 48 22 2 +8 0.1 0.4 21 -43 -19 -36 1 +8 0.1 0.2 7 -31 42 -1 2 +8 0.2 0.5 47 -7 31 30 2 +8 0.4 0.4 31 -9 19 -7 1 +8 0.4 0.9 32 12 -15 -36 1 +8 0.1 0.1 -5 -28 30 -49 1 +8 0.1 0.6 -28 -37 44 -41 2 +8 0.4 0.5 -46 -50 -26 -47 1 +8 0.1 0.5 28 -19 -23 -39 1 +8 0.3 0.8 16 -22 -4 -10 1 +8 0.7 0.1 26 21 3 -2 1 +8 0.2 0.2 38 -40 -16 -17 1 +8 0.3 0.5 -5 -19 32 -3 2 +8 0.7 0.7 34 -9 42 -6 2 +8 0.6 0.8 24 13 -16 -44 1 +8 0.8 0.4 14 8 4 -37 1 +8 0.6 0.5 23 -35 24 -7 2 +8 0.5 0.9 2 -17 -3 -29 1 +8 0.5 0.2 2 -35 19 -23 2 +8 0.2 0.2 -3 -42 -12 -36 2 +8 0.8 0.6 50 2 23 22 1 +8 0.3 0.9 3 -8 -1 -2 1 +8 0.7 0.6 -14 -33 41 -6 2 +8 0.1 0.1 44 -36 -2 -27 2 +8 0.4 0.8 17 -35 -10 -50 1 +8 0.9 0.3 37 -6 42 1 1 +8 0.2 0.5 3 -20 23 10 2 +8 0.4 0.6 6 -40 20 -12 2 +8 0.7 0.3 48 30 -21 -27 1 +8 0.9 0.9 22 -48 8 -9 1 +8 0.5 0.7 40 -48 9 -37 2 +8 0.5 0.2 -42 -43 13 5 2 +8 0.6 0.3 14 -46 24 -4 1 +8 0.5 0.1 43 -48 18 4 2 +8 0.5 0.8 -22 -45 42 -16 2 +8 0.4 0.1 32 -6 12 -29 1 +8 0.2 0.6 -4 -33 -2 -35 1 +8 0.4 0.4 23 -4 50 29 2 +8 0.9 0.4 46 -41 0 -33 1 +8 0.2 0.5 44 -7 -3 -34 1 +8 0.7 0.7 41 23 22 12 1 +8 0.1 0.5 11 -4 14 5 2 +8 0.7 0.9 34 31 32 14 1 +8 0.4 0.1 -7 -28 12 -2 2 +8 0.4 0.5 10 -35 23 -39 2 +8 0.1 0.2 -18 -40 20 -20 2 +8 0.1 0.6 27 -36 12 -4 2 +8 0.9 0.3 40 15 -2 -31 1 +8 0.9 0.8 -11 -35 25 -23 2 +8 0.4 0.2 17 -31 38 -15 2 +8 0.9 0.6 2 -22 26 -24 2 +8 0.2 0.5 -22 -43 19 -47 2 +8 0.8 0.5 43 39 27 -28 1 +8 0.1 0.9 -4 -41 -9 -28 2 +8 0.7 0.4 42 -42 47 -3 1 +8 0.6 0.4 6 3 38 29 2 +8 0.2 0.9 24 11 22 -32 2 +8 0.3 0.8 45 0 49 27 2 +8 0.9 0.2 20 -17 34 10 2 +8 0.4 0.1 40 -39 24 -4 1 +8 0.6 0.4 39 -24 49 -34 1 +8 0.3 0.9 5 -8 -16 -40 1 +8 0.9 0.6 -13 -38 33 -23 2 +8 0.2 0.3 48 -8 -3 -35 1 +8 0.8 0.4 5 -44 39 31 2 +8 0.4 0.3 -1 -10 48 -18 2 +8 0.3 0.1 28 9 12 5 1 +8 0.7 0.6 33 -34 5 -2 1 +8 0.7 0.4 5 -49 46 35 2 +8 0.7 0.6 49 -44 13 -8 1 +8 0.3 0.8 21 -37 24 -12 2 +8 0.4 0.5 9 -1 -14 -21 1 +8 0.7 0.4 38 -49 34 -4 1 +8 0.5 0.3 26 -48 32 6 2 +8 0.7 0.3 -4 -14 49 -17 2 +8 0.9 0.1 10 -41 -11 -15 1 +8 0.6 0.8 24 -23 4 -21 2 +8 0.9 0.1 31 -39 4 -8 1 +8 0.1 0.8 -31 -39 37 -36 2 +8 0.4 0.7 20 9 24 -23 1 +8 0.9 0.8 42 14 9 -27 1 +8 0.1 0.3 -14 -32 32 -48 1 +8 0.9 0.8 7 -30 14 2 2 +8 0.3 0.1 11 -7 38 -29 1 +8 0.2 0.4 33 -48 16 8 2 +8 0.9 0.5 33 11 30 -41 1 +8 0.3 0.5 33 23 -29 -37 1 +8 0.2 0.2 41 -2 3 -34 1 +8 0.9 0.7 33 -2 11 0 1 +8 0.9 0.8 49 -33 -15 -30 1 +8 0.5 0.4 34 33 32 15 1 +8 0.2 0.5 32 11 13 -47 1 +8 0.5 0.8 26 -35 43 41 2 +8 0.1 0.4 21 -45 29 7 2 +8 0.7 0.2 15 -2 27 0 1 +8 0.9 0.2 50 -7 46 -43 1 +8 0.7 0.5 20 -24 43 7 2 +8 0.5 0.7 35 -50 14 -37 2 +8 0.9 0.5 10 8 -13 -50 1 +8 0.1 0.7 1 -12 -14 -15 1 +8 0.3 0.5 -6 -42 24 -17 2 +8 0.8 0.7 17 9 -24 -28 1 +8 0.3 0.8 -46 -48 41 -21 2 +8 0.9 0.8 -28 -42 40 -18 2 +8 0.1 0.2 49 17 28 -28 1 +8 0.8 0.5 50 -19 -24 -33 1 +8 0.6 0.9 48 33 47 -11 1 +8 0.9 0.3 49 -42 -18 -41 1 +8 0.3 0.2 -17 -45 -2 -37 1 +8 0.4 0.1 32 -41 22 5 2 +8 0.7 0.5 18 -2 13 -10 2 +8 0.2 0.3 37 23 25 -25 1 +8 0.3 0.9 -13 -27 6 -49 2 +8 0.7 0.1 1 -28 -1 -20 1 +8 0.8 0.7 29 22 23 -16 1 +8 0.1 0.7 43 -47 22 -45 2 +8 0.4 0.8 31 -5 47 8 2 +8 0.3 0.1 33 -17 4 -14 1 +8 0.4 0.2 -8 -28 -42 -46 1 +8 0.9 0.2 48 -4 49 -35 1 +8 0.8 0.6 -11 -25 43 -35 2 +8 0.2 0.5 38 -48 -14 -30 1 +8 0.6 0.4 34 15 35 -6 1 +8 0.5 0.2 7 -3 -19 -25 1 +8 0.9 0.4 1 -1 25 -24 2 +8 0.8 0.1 12 -9 23 -45 1 +8 0.6 0.5 -14 -24 2 -18 2 +8 0.2 0.3 35 -11 35 8 1 +8 0.2 0.3 30 -9 38 13 2 +8 0.4 0.3 -11 -22 29 -20 2 +8 0.9 0.7 18 -20 24 -13 1 +8 0.1 0.1 50 -37 26 4 2 +8 0.4 0.8 32 -36 0 -46 1 +8 0.8 0.1 43 41 26 -13 1 +8 0.7 0.2 27 -30 49 19 2 +8 0.5 0.3 -16 -29 -13 -14 2 +8 0.7 0.5 -22 -25 39 37 2 +8 0.2 0.4 25 -16 1 -40 1 +8 0.4 0.2 32 -18 30 22 2 +8 0.9 0.6 -8 -24 0 -2 2 +8 0.2 0.8 10 -1 11 -18 2 +8 0.7 0.7 5 0 47 -24 2 +8 0.1 0.8 -30 -36 34 27 2 +8 0.8 0.5 -19 -29 16 -29 2 +8 0.2 0.4 5 -18 -5 -28 1 +8 0.1 0.1 34 9 29 -46 1 +8 0.4 0.9 14 -6 50 4 2 +8 0.3 0.7 38 -45 -28 -32 1 +8 0.3 0.5 50 -21 32 -37 2 +8 0.3 0.4 30 -44 47 -43 2 +8 0.4 0.4 49 3 36 -11 1 +8 0.5 0.7 -33 -36 37 10 2 +8 0.8 0.9 -17 -36 48 -17 2 +8 0.6 0.5 26 -33 44 11 2 +8 0.2 0.6 4 -6 20 16 2 +8 0.7 0.6 34 9 -14 -28 1 +8 0.3 0.2 4 -3 29 -35 2 +8 0.4 0.6 39 -14 4 -49 1 +8 0.7 0.3 -10 -16 24 -44 1 +8 0.7 0.3 41 -25 24 -34 1 +8 0.9 0.8 34 11 3 -1 1 +8 0.9 0.3 45 39 18 -11 1 +8 0.8 0.8 36 -38 22 9 1 +8 0.9 0.4 18 7 -33 -35 1 +8 0.6 0.4 38 -13 47 4 2 +8 0.5 0.6 28 -32 6 -42 1 +8 0.7 0.2 10 -48 37 -46 1 +8 0.2 0.7 37 -25 22 -17 2 +8 0.4 0.8 26 -16 44 28 2 +8 0.9 0.2 49 4 26 -43 1 +8 0.9 0.7 47 -27 11 -21 1 +8 0.7 0.3 30 -20 28 7 1 +8 0.9 0.4 -18 -47 14 11 2 +8 0.2 0.4 5 -44 -11 -15 1 +8 0.4 0.6 46 -1 18 -21 1 +8 0.7 0.5 16 -49 1 -30 1 +8 0.1 0.8 -5 -37 -28 -44 2 +8 0.9 0.1 -33 -41 12 -27 1 +8 0.9 0.4 -18 -48 -37 -41 1 +8 0.2 0.6 24 -6 -14 -30 1 +8 0.5 0.4 20 -19 -9 -28 1 +8 0.4 0.9 33 -33 32 30 2 +8 0.1 0.5 -6 -42 11 -15 2 +8 0.6 0.5 45 36 6 -23 1 +8 0.8 0.7 21 -33 13 -28 1 +8 0.9 0.2 42 33 -23 -38 1 +8 0.6 0.1 14 -40 22 -5 2 +8 0.4 0.2 -22 -24 -3 -31 1 +8 0.1 0.1 -31 -33 24 -23 1 +8 0.7 0.6 46 -39 33 24 2 +8 0.6 0.9 43 33 -13 -48 1 +8 0.3 0.1 44 15 22 -42 1 +8 0.9 0.5 49 46 48 6 1 +8 0.5 0.1 42 25 38 -26 1 +8 0.6 0.1 9 -49 21 -14 1 +8 0.4 0.4 46 -41 41 23 2 +8 0.4 0.8 30 -7 22 12 2 +8 0.3 0.6 42 21 35 21 1 +8 0.5 0.5 47 -27 12 -18 1 +8 0.4 0.5 44 40 24 8 2 +8 0.6 0.1 -22 -25 46 -29 2 +8 0.5 0.5 41 17 50 0 1 +8 0.9 0.1 -1 -23 -1 -27 1 +8 0.1 0.5 12 -42 -11 -42 2 +8 0.9 0.6 9 -29 48 30 2 +8 0.6 0.4 39 12 -24 -47 1 +8 0.6 0.2 -13 -22 -28 -50 2 +8 0.9 0.7 41 -35 6 -20 1 +8 0.2 0.4 48 -7 17 -46 1 +8 0.2 0.2 -8 -17 35 33 2 +8 0.3 0.8 41 -16 11 -26 2 +8 0.3 0.7 -5 -31 23 -31 2 +8 0.5 0.9 38 -7 12 2 2 +8 0.6 0.7 24 -33 7 -25 1 +8 0.8 0.4 46 13 -8 -45 1 +8 0.6 0.6 35 0 -12 -27 1 +8 0.3 0.2 -1 -36 17 -5 2 +8 0.3 0.9 -15 -50 9 -48 2 +8 0.1 0.6 22 -41 3 -18 2 +8 0.6 0.2 46 41 42 -35 1 +8 0.5 0.2 -22 -25 42 33 2 +8 0.5 0.2 47 -1 24 3 1 +8 0.4 0.1 -30 -44 18 -27 2 +8 0.8 0.4 47 35 12 -3 1 +8 0.9 0.2 46 -46 35 -20 1 +8 0.4 0.3 11 -24 16 7 2 +8 0.5 0.6 20 14 12 -14 1 +8 0.5 0.8 41 37 22 -50 1 +8 0.1 0.8 41 -43 40 -23 2 +8 0.1 0.1 -4 -32 23 15 2 +8 0.4 0.2 33 15 26 24 1 +9 0.1 0.5 -3 -20 19 -30 2 +9 0.9 0.4 23 -2 48 2 1 +9 0.7 0.5 44 31 33 20 1 +9 0.6 0.6 40 4 -5 -38 1 +9 0.2 0.9 50 -46 7 -34 1 +9 0.3 0.9 -3 -20 -32 -33 1 +9 0.8 0.1 -22 -33 45 24 2 +9 0.8 0.4 44 -10 30 15 2 +9 0.8 0.5 6 -25 50 3 2 +9 0.9 0.3 40 39 25 -43 1 +9 0.1 0.8 33 30 31 -17 2 +9 0.7 0.3 39 -35 16 -34 1 +9 0.8 0.9 26 -1 8 -27 1 +9 0.8 0.3 40 -39 49 17 1 +9 0.8 0.1 -11 -29 46 12 2 +9 0.4 0.5 9 6 -23 -42 1 +9 0.7 0.9 47 35 41 -20 1 +9 0.6 0.1 49 43 20 -32 1 +9 0.7 0.8 37 19 38 36 1 +9 0.6 0.8 24 -31 39 -36 2 +9 0.6 0.1 -14 -44 -4 -5 2 +9 0.2 0.1 30 -33 8 -1 1 +9 0.7 0.4 -7 -20 12 -10 2 +9 0.8 0.6 -34 -38 -7 -15 1 +9 0.7 0.6 8 -30 22 -30 2 +9 0.4 0.2 46 -26 -44 -46 1 +9 0.8 0.2 34 -23 34 -25 1 +9 0.3 0.9 35 -2 24 -43 2 +9 0.4 0.5 -25 -46 10 -35 2 +9 0.7 0.9 24 -8 -27 -40 1 +9 0.2 0.9 31 26 38 20 2 +9 0.7 0.3 40 4 9 -29 1 +9 0.7 0.8 48 -6 45 -49 2 +9 0.6 0.2 9 -1 31 2 1 +9 0.6 0.4 8 -8 45 -38 1 +9 0.5 0.2 -1 -31 25 -23 2 +9 0.7 0.4 49 -28 27 23 2 +9 0.3 0.1 45 -28 -36 -38 1 +9 0.5 0.8 20 14 -17 -44 1 +9 0.9 0.2 14 -17 41 39 2 +9 0.2 0.8 46 4 40 -14 1 +9 0.8 0.2 50 -12 1 -40 1 +9 0.8 0.5 -13 -34 -14 -44 1 +9 0.5 0.1 26 12 -36 -47 1 +9 0.2 0.9 47 -30 48 39 2 +9 0.7 0.4 36 11 24 12 1 +9 0.9 0.7 27 -7 41 -38 1 +9 0.3 0.4 -30 -42 -23 -31 2 +9 0.7 0.6 14 -6 -29 -43 1 +9 0.9 0.4 -13 -24 -11 -20 1 +9 0.5 0.4 32 -7 0 -15 1 +9 0.2 0.1 41 -29 28 -47 2 +9 0.7 0.6 -11 -19 3 -46 2 +9 0.8 0.9 -20 -24 49 33 2 +9 0.4 0.6 20 18 -23 -48 1 +9 0.1 0.6 38 -7 5 -2 2 +9 0.5 0.8 -26 -27 41 -15 2 +9 0.7 0.8 46 -26 45 -49 2 +9 0.1 0.4 5 -26 -19 -31 2 +9 0.7 0.6 18 -47 40 -18 2 +9 0.1 0.6 33 31 31 16 1 +9 0.7 0.7 -27 -44 -33 -45 2 +9 0.4 0.8 26 23 -15 -21 1 +9 0.1 0.5 29 18 -1 -3 1 +9 0.8 0.4 15 -14 35 -28 1 +9 0.6 0.6 33 12 34 15 2 +9 0.1 0.1 37 -17 -4 -36 1 +9 0.7 0.1 -5 -12 15 -31 1 +9 0.4 0.7 -14 -24 41 -4 2 +9 0.4 0.6 39 35 -26 -44 1 +9 0.9 0.7 46 13 -34 -35 1 +9 0.9 0.5 -17 -26 2 -22 2 +9 0.8 0.4 -2 -10 28 -37 2 +9 0.2 0.1 1 -50 30 -17 2 +9 0.5 0.8 -34 -45 43 -18 2 +9 0.1 0.3 -4 -27 39 -22 2 +9 0.2 0.8 50 -44 -3 -10 2 +9 0.5 0.7 -25 -34 24 -11 2 +9 0.4 0.7 39 37 37 -43 1 +9 0.7 0.6 -13 -46 -33 -35 1 +9 0.3 0.4 28 -14 -3 -16 1 +9 0.4 0.4 33 -4 33 2 2 +9 0.5 0.2 35 -8 45 43 2 +9 0.8 0.4 41 23 18 -48 1 +9 0.3 0.8 46 -21 33 -34 2 +9 0.3 0.4 3 -50 40 17 2 +9 0.4 0.9 8 -12 42 31 2 +9 0.9 0.7 26 16 36 -12 2 +9 0.3 0.8 -11 -44 47 -34 2 +9 0.5 0.9 20 -31 -41 -43 1 +9 0.7 0.6 -23 -42 2 -30 2 +9 0.3 0.9 40 20 -26 -49 1 +9 0.8 0.5 29 -32 10 -45 1 +9 0.3 0.8 -28 -44 16 -10 2 +9 0.1 0.4 -5 -30 -2 -24 2 +9 0.3 0.2 28 21 41 -50 1 +9 0.6 0.7 25 -31 12 -44 1 +9 0.7 0.4 17 -3 37 -12 1 +9 0.5 0.1 30 -22 33 -7 1 +9 0.9 0.2 -35 -37 36 9 2 +9 0.8 0.3 5 -46 34 22 2 +9 0.3 0.9 21 7 -29 -42 1 +9 0.8 0.2 24 -7 35 23 2 +9 0.6 0.3 44 0 29 -21 1 +9 0.9 0.4 3 -48 -23 -30 1 +9 0.3 0.1 35 1 30 -25 1 +9 0.9 0.2 20 -8 42 -24 1 +9 0.1 0.1 8 -41 -26 -38 1 +9 0.6 0.9 39 -3 -12 -28 1 +9 0.9 0.6 6 -46 18 0 1 +9 0.2 0.7 -13 -49 34 -1 2 +9 0.3 0.3 2 -12 31 21 2 +9 0.6 0.1 -18 -30 -35 -41 1 +9 0.4 0.4 48 -32 9 1 2 +9 0.2 0.9 -2 -24 41 -40 2 +9 0.2 0.4 -7 -34 26 -20 2 +9 0.2 0.2 49 -2 50 16 2 +9 0.9 0.7 47 -33 36 -13 1 +9 0.5 0.9 29 -38 17 14 2 +9 0.8 0.7 -19 -27 -26 -48 2 +9 0.8 0.9 38 17 13 1 1 +9 0.9 0.4 39 -42 35 -1 1 +9 0.5 0.5 -38 -46 -29 -42 1 +9 0.6 0.9 -11 -50 26 -6 2 +9 0.7 0.6 -4 -23 36 -50 2 +9 0.6 0.9 36 23 32 -14 2 +9 0.4 0.2 -18 -48 5 -18 2 +9 0.8 0.4 42 6 47 23 1 +9 0.1 0.8 3 -35 32 -48 2 +9 0.9 0.4 30 -23 42 35 2 +9 0.4 0.7 13 -40 42 -8 2 +9 0.2 0.8 -38 -50 -10 -27 2 +9 0.4 0.1 -10 -47 21 -4 2 +9 0.9 0.1 20 -37 47 -33 1 +9 0.8 0.9 40 -34 17 -22 2 +9 0.8 0.7 -3 -6 -21 -30 1 +9 0.9 0.8 1 -32 24 -41 2 +9 0.4 0.8 43 -31 46 27 2 +9 0.1 0.7 18 -22 13 -19 2 +9 0.4 0.3 20 -50 49 46 2 +9 0.2 0.4 -8 -50 40 33 2 +9 0.7 0.6 25 -11 29 -7 2 +9 0.5 0.2 -12 -33 25 -3 2 +9 0.2 0.1 31 -48 -4 -16 2 +9 0.6 0.7 25 -16 33 17 2 +9 0.1 0.4 32 31 5 -25 1 +9 0.9 0.1 29 -23 -5 -27 1 +9 0.6 0.9 12 -18 31 -48 2 +9 0.3 0.7 -12 -23 34 -45 2 +9 0.8 0.2 15 -49 44 -3 1 +9 0.4 0.6 35 -40 35 -4 1 +9 0.6 0.8 4 -35 47 33 2 +9 0.9 0.5 49 -41 -36 -49 1 +9 0.2 0.5 11 10 30 -28 1 +9 0.1 0.6 11 -36 49 -24 2 +9 0.7 0.3 1 -31 45 27 2 +9 0.1 0.6 -6 -42 48 2 2 +9 0.7 0.4 40 6 23 -14 1 +9 0.6 0.2 25 -46 45 7 2 +9 0.4 0.3 2 -32 34 -9 2 +9 0.1 0.2 -37 -46 -5 -37 2 +9 0.2 0.6 25 -11 38 -21 2 +9 0.2 0.2 -10 -47 33 -2 2 +9 0.8 0.2 24 -10 -36 -46 1 +9 0.2 0.9 48 7 25 -16 2 +9 0.7 0.5 48 -43 29 -42 1 +9 0.4 0.5 -30 -42 38 -50 2 +9 0.8 0.9 41 20 33 -49 1 +9 0.9 0.6 -2 -48 -28 -30 2 +9 0.5 0.8 -1 -50 24 -16 2 +9 0.8 0.4 40 37 -27 -29 1 +9 0.2 0.3 14 -19 -23 -37 2 +9 0.1 0.4 10 -25 -22 -33 2 +9 0.1 0.5 47 -27 40 11 2 +9 0.6 0.3 -19 -20 35 -8 2 +9 0.4 0.8 24 -17 29 18 2 +9 0.5 0.4 25 -21 28 6 2 +9 0.7 0.5 -12 -31 -25 -46 2 +9 0.6 0.1 46 -11 31 7 1 +9 0.6 0.9 21 -14 14 -14 1 +9 0.3 0.4 3 -33 14 -19 2 +9 0.3 0.6 8 5 39 -43 1 +9 0.7 0.4 13 -23 42 0 1 +9 0.5 0.4 6 -37 28 -49 1 +9 0.9 0.6 34 4 15 -42 1 +9 0.2 0.1 42 10 17 -46 1 +9 0.4 0.2 27 -49 -35 -41 1 +9 0.9 0.4 49 -38 44 37 1 +9 0.1 0.2 -18 -34 -29 -50 1 +9 0.8 0.8 22 15 35 33 1 +9 0.5 0.8 43 21 -28 -37 1 +9 0.2 0.4 49 28 24 -2 1 +9 0.8 0.4 4 -7 43 33 2 +9 0.9 0.7 11 5 -25 -38 1 +9 0.8 0.1 11 10 34 17 2 +9 0.7 0.9 -2 -22 39 28 2 +9 0.4 0.1 35 8 42 -30 1 +9 0.3 0.2 40 37 22 -27 1 +9 0.9 0.4 4 -26 26 -45 1 +9 0.1 0.6 48 46 49 -50 1 +9 0.8 0.9 42 -9 30 7 2 +9 0.6 0.2 23 -35 17 -7 2 +9 0.2 0.6 39 -31 10 -23 1 +9 0.5 0.5 36 -40 -25 -49 2 +9 0.7 0.9 32 -42 34 -29 2 +9 0.8 0.9 4 -35 19 -13 2 +9 0.1 0.3 38 -42 49 -21 2 +9 0.2 0.6 12 -34 0 -15 2 +9 0.9 0.6 26 3 -13 -49 1 +9 0.3 0.6 27 -14 39 32 2 +9 0.1 0.6 19 -44 31 17 2 +9 0.1 0.2 24 1 -10 -33 1 +9 0.4 0.1 35 -25 12 -45 1 +9 0.6 0.2 23 4 27 -28 1 +9 0.9 0.1 46 8 32 22 1 +9 0.2 0.9 39 26 23 -11 2 +9 0.7 0.8 49 20 50 -42 1 +9 0.9 0.4 1 -27 -10 -50 1 +9 0.4 0.2 7 -13 47 6 2 +9 0.4 0.2 41 8 32 9 2 +9 0.9 0.4 44 -14 22 17 1 +9 0.8 0.8 38 24 31 -22 1 +9 0.9 0.8 21 -44 15 1 2 +9 0.2 0.4 14 -19 -20 -47 1 +9 0.9 0.1 -5 -48 49 -19 2 +9 0.7 0.1 34 -4 2 -26 1 +9 0.1 0.6 5 -4 -24 -50 1 +9 0.5 0.4 26 -50 14 -49 1 +9 0.2 0.7 16 -25 44 12 2 +9 0.8 0.9 30 14 -18 -20 1 +9 0.4 0.9 -18 -29 -24 -31 1 +9 0.6 0.5 39 -35 30 -14 1 +9 0.9 0.2 44 -43 13 -35 1 +9 0.4 0.5 47 38 34 -14 1 +9 0.3 0.2 41 22 -41 -50 1 +9 0.5 0.6 41 -5 34 -3 2 +9 0.8 0.3 27 -26 44 -49 1 +9 0.6 0.9 43 14 19 -44 1 +9 0.5 0.4 -40 -49 49 12 2 +9 0.1 0.5 14 -33 46 -5 2 +9 0.6 0.8 49 45 18 -14 1 +9 0.4 0.2 3 -48 13 6 2 +9 0.5 0.1 -1 -34 -38 -46 1 +9 0.5 0.1 -9 -14 1 -48 1 +9 0.1 0.8 -12 -36 45 -24 2 +9 0.3 0.1 11 -6 26 -32 2 +9 0.1 0.8 -28 -48 48 -39 2 +9 0.1 0.8 35 14 -3 -21 1 +9 0.3 0.9 41 14 9 -18 1 +9 0.3 0.4 2 -40 -2 -38 1 +9 0.2 0.5 50 29 25 -4 1 +9 0.9 0.9 24 1 -42 -44 1 +9 0.5 0.9 36 -9 35 -30 2 +9 0.3 0.5 -8 -20 -5 -43 1 +9 0.2 0.6 41 9 4 -4 1 +9 0.9 0.1 7 -34 8 5 2 +9 0.2 0.6 -22 -48 16 -45 2 +9 0.3 0.4 38 36 23 -44 1 +9 0.5 0.2 46 -10 33 -46 1 +9 0.3 0.8 38 34 46 6 1 +9 0.5 0.3 3 -27 49 40 2 +9 0.2 0.5 39 -9 45 -11 2 +9 0.8 0.8 20 15 40 -46 2 +9 0.4 0.4 45 -21 33 27 2 +9 0.2 0.9 33 -50 33 15 2 +9 0.6 0.6 -2 -50 18 0 2 +9 0.4 0.1 41 33 -6 -23 1 +9 0.2 0.8 41 16 7 -46 1 +9 0.9 0.3 -10 -47 48 -5 2 +9 0.7 0.6 33 -16 26 -10 1 +9 0.3 0.6 14 -21 50 45 2 +9 0.7 0.3 49 -13 35 20 2 +9 0.1 0.7 20 -39 35 -38 2 +9 0.5 0.1 35 -45 0 -15 2 +9 0.1 0.9 -3 -13 -5 -14 1 +9 0.5 0.7 46 -26 37 -11 2 +9 0.8 0.9 9 -32 31 14 2 +9 0.6 0.4 44 -10 12 -24 2 +9 0.9 0.4 -18 -49 41 37 2 +9 0.4 0.7 34 -32 26 -37 2 +9 0.6 0.8 7 -26 24 -22 2 +9 0.3 0.1 -37 -39 49 -11 2 +9 0.6 0.4 43 -15 -32 -41 1 +9 0.6 0.6 21 -11 -1 -10 1 +9 0.7 0.1 10 -17 44 18 2 +9 0.5 0.8 16 -4 30 -27 2 +9 0.9 0.2 45 5 49 35 1 +9 0.1 0.2 -5 -35 45 -48 1 +9 0.8 0.5 12 6 -8 -20 1 +9 0.1 0.8 35 -34 34 -30 2 +9 0.3 0.2 13 -42 38 -23 2 +9 0.8 0.5 -32 -47 -3 -5 2 +9 0.1 0.5 37 -45 3 -3 2 +9 0.7 0.1 37 15 29 24 2 +9 0.6 0.9 35 -7 46 34 2 +9 0.6 0.2 41 -36 34 -22 1 +9 0.5 0.3 35 -40 -8 -39 2 +9 0.9 0.2 15 -48 -4 -25 1 +9 0.5 0.1 -22 -49 26 -29 2 +9 0.2 0.9 -2 -16 0 -14 2 +10 0.5 0.2 23 -35 7 -7 2 +10 0.9 0.2 35 33 14 -26 1 +10 0.5 0.9 32 -38 22 -30 2 +10 0.4 0.6 47 -39 3 2 2 +10 0.9 0.8 38 -29 1 -7 1 +10 0.7 0.4 -34 -48 42 -24 2 +10 0.9 0.6 -12 -49 -9 -36 2 +10 0.3 0.5 7 0 21 -17 1 +10 0.3 0.3 27 -11 11 -8 1 +10 0.6 0.3 41 9 32 -34 1 +10 0.3 0.3 22 -34 41 7 2 +10 0.5 0.9 40 -30 -22 -34 1 +10 0.7 0.1 22 -7 39 -38 1 +10 0.3 0.8 -18 -34 -5 -35 2 +10 0.4 0.8 -10 -27 -35 -47 2 +10 0.4 0.6 45 21 -9 -31 1 +10 0.3 0.4 34 -32 32 -24 1 +10 0.5 0.3 33 -26 35 0 2 +10 0.9 0.5 -5 -33 17 -27 2 +10 0.5 0.1 11 -43 -18 -48 1 +10 0.7 0.2 40 -39 15 -33 1 +10 0.4 0.8 23 -38 19 -22 2 +10 0.8 0.5 26 -46 -24 -43 1 +10 0.1 0.8 25 17 49 9 2 +10 0.5 0.9 19 -31 -10 -38 1 +10 0.5 0.3 43 -14 -9 -46 1 +10 0.3 0.5 43 -20 29 -36 2 +10 0.7 0.7 28 -41 28 -35 1 +10 0.8 0.2 -23 -29 35 14 2 +10 0.9 0.4 21 14 32 1 1 +10 0.7 0.8 45 24 26 -4 1 +10 0.2 0.1 24 -19 39 -9 2 +10 0.2 0.5 35 -47 49 -16 2 +10 0.4 0.7 45 5 4 -18 1 +10 0.5 0.8 34 -25 -32 -50 1 +10 0.8 0.1 -6 -9 43 39 2 +10 0.2 0.7 -30 -35 0 -20 2 +10 0.2 0.4 -7 -36 31 -48 2 +10 0.3 0.2 48 -16 -3 -6 2 +10 0.2 0.5 0 -10 31 -27 2 +10 0.8 0.2 -10 -50 12 -7 2 +10 0.3 0.1 34 25 47 41 2 +10 0.9 0.9 21 -30 11 -44 1 +10 0.5 0.3 -14 -44 -5 -28 2 +10 0.2 0.4 39 -26 -3 -28 1 +10 0.8 0.8 -26 -36 43 -4 2 +10 0.9 0.6 50 11 1 -20 1 +10 0.9 0.5 -14 -30 47 3 2 +10 0.6 0.8 45 -15 14 -1 1 +10 0.4 0.7 41 4 39 -37 1 +10 0.7 0.5 -15 -41 -8 -19 2 +10 0.1 0.6 7 -31 29 -37 2 +10 0.3 0.1 11 -50 38 -35 1 +10 0.9 0.2 28 -39 44 -16 1 +10 0.7 0.1 49 11 29 -43 1 +10 0.1 0.5 -5 -35 19 6 2 +10 0.4 0.1 17 13 -15 -19 1 +10 0.1 0.1 46 42 -5 -36 1 +10 0.2 0.6 36 33 32 -48 1 +10 0.1 0.1 -3 -32 18 -43 1 +10 0.2 0.2 22 -42 -30 -39 1 +10 0.4 0.6 40 -24 43 -28 2 +10 0.1 0.6 41 -48 23 -6 2 +10 0.1 0.8 -42 -44 23 11 2 +10 0.1 0.6 34 -23 -21 -32 1 +10 0.6 0.3 41 27 -9 -30 1 +10 0.1 0.7 40 20 50 5 2 +10 0.9 0.3 -15 -16 20 2 2 +10 0.1 0.9 36 -13 42 19 2 +10 0.6 0.1 14 -47 26 -27 1 +10 0.9 0.5 1 -7 42 9 2 +10 0.3 0.2 42 -11 18 16 2 +10 0.3 0.2 35 -40 3 -47 2 +10 0.4 0.3 40 -18 -6 -34 1 +10 0.8 0.5 29 -34 47 -8 1 +10 0.7 0.8 -23 -32 11 -12 2 +10 0.3 0.9 -31 -39 -16 -49 2 +10 0.7 0.1 37 -17 21 -48 1 +10 0.9 0.3 44 -11 34 -23 1 +10 0.5 0.5 -26 -44 -21 -41 2 +10 0.1 0.2 37 -38 17 -3 2 +10 0.4 0.5 34 20 -3 -11 1 +10 0.4 0.7 -8 -11 -10 -23 1 +10 0.8 0.9 21 -19 17 -40 1 +10 0.5 0.1 6 3 38 -18 1 +10 0.7 0.8 26 -16 27 -44 2 +10 0.5 0.1 39 -6 29 16 2 +10 0.6 0.2 14 13 17 0 1 +10 0.2 0.9 -22 -39 27 -32 2 +10 0.9 0.6 44 39 25 -35 1 +10 0.3 0.9 18 -4 45 -11 2 +10 0.3 0.6 15 -35 32 -1 2 +10 0.7 0.7 3 -15 25 -29 2 +10 0.6 0.8 41 -7 -8 -35 1 +10 0.9 0.8 -2 -45 36 5 2 +10 0.3 0.9 -34 -49 32 15 2 +10 0.8 0.4 39 -41 32 -40 1 +10 0.8 0.6 46 43 48 14 1 +10 0.7 0.8 17 -39 25 -2 2 +10 0.6 0.5 30 -41 46 -18 2 +10 0.9 0.8 0 -2 -23 -32 1 +10 0.9 0.7 44 4 47 36 2 +10 0.5 0.9 4 -48 21 -41 2 +10 0.8 0.5 -3 -45 46 43 2 +10 0.8 0.9 37 -21 13 -37 1 +10 0.2 0.2 10 -6 35 -15 2 +10 0.5 0.6 26 -40 -28 -48 1 +10 0.2 0.5 8 -4 -12 -46 1 +10 0.6 0.5 46 30 45 -18 1 +10 0.7 0.9 36 29 -12 -18 1 +10 0.1 0.9 40 4 -29 -36 1 +10 0.4 0.9 26 5 26 -43 1 +10 0.7 0.6 36 -46 49 -7 2 +10 0.2 0.1 49 1 45 -13 1 +10 0.3 0.8 -27 -48 0 -36 2 +10 0.5 0.9 -2 -9 40 -4 2 +10 0.7 0.4 1 -42 35 34 2 +10 0.7 0.5 -27 -42 -23 -25 2 +10 0.3 0.2 11 -23 19 -25 2 +10 0.7 0.6 50 -43 -25 -39 1 +10 0.7 0.4 46 35 48 5 1 +10 0.6 0.8 19 -18 30 13 2 +10 0.2 0.8 42 -48 40 0 2 +10 0.6 0.6 42 14 47 44 1 +10 0.9 0.7 47 -27 10 1 1 +10 0.4 0.6 -4 -34 34 16 2 +10 0.6 0.7 -26 -47 24 -3 2 +10 0.9 0.3 0 -41 17 -19 2 +10 0.8 0.5 8 -22 50 22 2 +10 0.7 0.4 29 -42 29 24 2 +10 0.5 0.2 -6 -42 14 -6 2 +10 0.4 0.7 49 -9 -7 -48 1 +10 0.4 0.9 16 8 26 3 2 +10 0.9 0.9 17 -13 -9 -49 1 +10 0.7 0.1 10 -40 12 -11 2 +10 0.1 0.1 -18 -30 -13 -45 1 +10 0.9 0.3 -22 -48 -6 -46 1 +10 0.1 0.3 49 3 -29 -43 1 +10 0.9 0.3 25 -45 32 21 2 +10 0.3 0.1 15 -48 14 -35 2 +10 0.7 0.9 44 42 43 -2 1 +10 0.9 0.7 33 -37 40 -37 2 +10 0.2 0.2 23 -44 49 34 2 +10 0.3 0.4 16 -34 17 -23 2 +10 0.9 0.3 46 11 48 -39 1 +10 0.4 0.9 29 5 9 5 2 +10 0.5 0.5 45 -31 48 22 2 +10 0.6 0.2 40 18 13 -15 1 +10 0.9 0.6 45 0 1 -24 1 +10 0.6 0.3 -14 -41 -21 -31 2 +10 0.3 0.3 33 -23 40 -33 1 +10 0.2 0.7 21 -1 44 16 2 +10 0.2 0.6 -25 -47 31 -17 2 +10 0.7 0.1 4 -38 33 18 2 +10 0.4 0.6 26 -38 31 -9 2 +10 0.8 0.1 27 -37 6 -38 1 +10 0.7 0.8 20 -48 12 -17 2 +10 0.5 0.4 32 -26 47 19 2 +10 0.2 0.2 42 4 0 -44 1 +10 0.1 0.5 6 -1 50 41 2 +10 0.3 0.7 39 11 50 -29 1 +10 0.4 0.1 -1 -50 50 -19 1 +10 0.9 0.3 -2 -24 22 -41 1 +10 0.3 0.1 -32 -33 36 24 2 +10 0.2 0.1 2 -45 -27 -32 2 +10 0.6 0.5 31 5 -5 -11 1 +10 0.9 0.9 32 -19 -7 -27 1 +10 0.4 0.8 10 -38 -17 -35 1 +10 0.9 0.5 25 -32 -37 -40 1 +10 0.1 0.9 23 13 -8 -23 1 +10 0.6 0.1 29 1 22 14 1 +10 0.4 0.1 30 -3 24 -35 1 +10 0.5 0.3 20 -48 40 6 2 +10 0.5 0.5 -33 -38 18 -15 2 +10 0.8 0.8 33 11 39 20 1 +10 0.8 0.7 29 -30 28 25 2 +10 0.2 0.2 -29 -30 -4 -43 2 +10 0.2 0.4 25 -13 35 -5 2 +10 0.5 0.2 41 22 13 -24 1 +10 0.1 0.6 -14 -47 40 2 2 +10 0.9 0.2 24 -43 13 -14 1 +10 0.8 0.8 3 -8 -3 -50 1 +10 0.5 0.8 -5 -16 38 -49 2 +10 0.3 0.4 23 12 -29 -41 1 +10 0.9 0.1 39 -46 27 -3 1 +10 0.2 0.8 43 17 7 -23 1 +10 0.5 0.8 17 13 -16 -30 1 +10 0.3 0.6 29 10 20 -3 1 +10 0.7 0.7 46 -11 27 -42 1 +10 0.8 0.5 32 -9 49 -4 1 +10 0.7 0.6 6 -28 45 -24 2 +10 0.1 0.1 38 -6 14 -15 1 +10 0.8 0.4 20 11 45 -18 1 +10 0.2 0.9 -6 -50 27 8 2 +10 0.4 0.3 30 -43 10 -9 2 +10 0.9 0.4 -17 -50 22 -19 2 +10 0.2 0.6 -31 -38 22 -32 2 +10 0.6 0.9 0 -48 -22 -43 1 +10 0.5 0.7 50 49 35 -5 1 +10 0.4 0.2 32 -48 50 1 2 +10 0.6 0.9 48 35 18 0 1 +10 0.8 0.8 41 -17 -16 -40 1 +10 0.9 0.5 12 -28 28 -17 2 +10 0.7 0.7 44 9 32 -28 1 +10 0.2 0.6 3 -9 46 27 2 +10 0.9 0.9 -23 -26 15 -23 2 +10 0.7 0.8 35 17 43 1 2 +10 0.8 0.2 50 -35 -18 -40 1 +10 0.2 0.1 23 -43 23 -11 2 +10 0.3 0.4 28 -17 43 -15 2 +10 0.1 0.4 -10 -14 30 10 2 +10 0.2 0.8 3 -32 38 25 2 +10 0.9 0.7 -8 -41 32 -12 2 +10 0.5 0.4 -5 -9 16 -24 1 +10 0.5 0.7 17 14 26 -32 1 +10 0.5 0.4 20 -25 11 -1 1 +10 0.4 0.7 -27 -41 -19 -49 2 +10 0.8 0.7 35 23 28 -20 1 +10 0.6 0.1 50 22 50 -18 1 +10 0.6 0.9 -14 -40 15 8 2 +10 0.2 0.2 37 -41 49 -6 2 +10 0.7 0.8 -4 -31 8 -34 2 +10 0.5 0.9 33 -8 18 -35 2 +10 0.6 0.4 25 -10 15 -34 1 +10 0.2 0.8 38 6 -48 -50 1 +10 0.6 0.1 49 0 -8 -18 1 +10 0.2 0.6 0 -33 -24 -27 1 +10 0.3 0.4 35 -7 39 -38 1 +10 0.2 0.5 10 7 25 0 2 +10 0.5 0.1 43 12 44 -13 1 +10 0.2 0.9 -19 -24 -14 -47 2 +10 0.5 0.5 27 14 -29 -47 1 +10 0.6 0.8 43 -35 -9 -38 1 +10 0.1 0.4 20 -21 32 -47 2 +10 0.7 0.7 0 -23 12 3 2 +10 0.3 0.1 6 -35 40 16 2 +10 0.4 0.4 13 -44 46 27 2 +10 0.7 0.9 23 -12 38 8 2 +10 0.3 0.4 31 3 9 -27 1 +10 0.4 0.6 22 -40 42 32 2 +10 0.5 0.3 47 -21 -13 -34 1 +10 0.2 0.1 33 -33 -2 -23 1 +10 0.3 0.3 29 -10 29 -16 2 +10 0.4 0.2 4 -15 -23 -50 1 +10 0.9 0.9 -23 -27 40 -12 2 +10 0.7 0.4 39 -24 39 -33 1 +10 0.8 0.6 -9 -23 50 18 2 +10 0.1 0.8 31 21 13 -41 1 +10 0.3 0.6 43 -31 4 -31 2 +10 0.2 0.8 13 -14 42 -45 2 +10 0.3 0.3 45 14 -31 -38 1 +10 0.3 0.5 18 -38 3 -32 2 +10 0.4 0.7 -9 -14 7 -31 2 +10 0.8 0.4 46 -37 33 9 1 +10 0.6 0.7 17 -23 3 -38 1 +10 0.3 0.7 -4 -37 38 0 2 +10 0.7 0.6 14 -47 40 -26 2 +10 0.1 0.6 36 -16 38 19 2 +10 0.5 0.5 6 -11 47 7 2 +10 0.8 0.2 40 33 28 27 1 +10 0.6 0.3 -14 -37 30 0 2 +10 0.1 0.3 40 -5 -11 -47 1 +10 0.2 0.6 23 -14 5 -22 1 +10 0.3 0.5 29 -15 20 -20 1 +10 0.7 0.2 -30 -46 -2 -14 2 +10 0.5 0.4 -15 -16 21 -36 1 +10 0.8 0.4 29 10 -9 -18 1 +10 0.7 0.9 34 27 0 -7 1 +10 0.1 0.5 8 -11 24 -16 2 +10 0.1 0.1 11 -25 49 28 2 +10 0.3 0.2 45 -16 33 -5 2 +10 0.4 0.4 -4 -19 -10 -35 2 +10 0.6 0.1 48 -36 -19 -35 1 +10 0.8 0.1 -8 -20 41 -30 1 +10 0.6 0.5 -6 -26 16 11 2 +10 0.8 0.1 7 -10 27 -36 1 +10 0.6 0.5 -21 -36 48 -9 2 +10 0.6 0.1 39 -42 30 -25 1 +10 0.3 0.4 36 -29 43 -22 1 +10 0.3 0.4 45 12 -14 -19 1 +10 0.8 0.9 19 -23 6 -3 1 +10 0.7 0.6 45 -39 13 -50 1 +10 0.6 0.8 27 -33 28 22 2 +10 0.1 0.1 -17 -34 18 -7 2 +10 0.3 0.8 40 -4 33 31 2 +10 0.4 0.6 34 -19 0 -10 1 +10 0.8 0.5 0 -5 -7 -23 1 +10 0.6 0.2 18 7 45 -36 1 +10 0.5 0.6 20 -5 31 -3 2 +10 0.9 0.3 20 -19 35 -25 1 +10 0.6 0.7 39 -30 39 12 2 +10 0.2 0.6 25 23 35 13 1 +10 0.7 0.8 -7 -30 32 14 2 +10 0.7 0.1 20 -50 5 -39 1 +10 0.4 0.6 33 12 -21 -29 1 +10 0.6 0.9 44 37 30 -45 1 +10 0.2 0.9 50 -28 39 33 2 +10 0.8 0.2 35 27 5 -21 1 +10 0.7 0.9 1 -27 2 -5 2 +10 0.3 0.7 -37 -38 -1 -3 2 +11 0.6 0.8 -4 -11 18 1 2 +11 0.6 0.3 38 5 17 -11 1 +11 0.5 0.7 33 -9 -13 -17 1 +11 0.1 0.2 -26 -49 -15 -29 2 +11 0.6 0.6 -19 -24 11 -20 2 +11 0.5 0.8 25 14 11 -43 1 +11 0.7 0.5 22 -2 -24 -37 1 +11 0.4 0.8 -34 -35 23 -29 2 +11 0.8 0.3 -1 -6 14 1 2 +11 0.9 0.5 25 -32 15 -1 1 +11 0.8 0.1 16 -26 50 -35 1 +11 0.1 0.7 48 -4 42 -8 2 +11 0.1 0.1 8 -39 8 -34 1 +11 0.6 0.6 35 -11 45 -46 1 +11 0.7 0.8 49 -26 33 1 1 +11 0.6 0.3 50 12 49 -35 1 +11 0.5 0.8 37 -38 -11 -35 1 +11 0.5 0.6 30 -46 -14 -22 1 +11 0.6 0.5 48 -2 4 -26 1 +11 0.3 0.6 40 -4 48 10 2 +11 0.3 0.2 13 -8 13 4 2 +11 0.7 0.8 -19 -43 21 -5 2 +11 0.8 0.4 -31 -32 10 -20 2 +11 0.7 0.4 8 -33 -46 -49 1 +11 0.4 0.9 34 26 40 -33 1 +11 0.4 0.4 -12 -50 -4 -9 2 +11 0.1 0.5 50 -31 50 46 2 +11 0.9 0.8 -4 -34 4 -50 2 +11 0.3 0.5 23 19 32 -30 1 +11 0.6 0.2 -10 -31 0 -48 2 +11 0.7 0.8 30 -7 44 2 2 +11 0.4 0.1 49 27 7 -22 1 +11 0.4 0.9 18 -4 25 -16 2 +11 0.4 0.3 32 -33 46 -24 1 +11 0.9 0.8 24 -14 15 -39 1 +11 0.4 0.6 46 43 33 -36 1 +11 0.1 0.6 47 -32 40 35 2 +11 0.9 0.5 17 -31 -2 -12 1 +11 0.4 0.4 43 20 -13 -23 1 +11 0.6 0.2 17 -34 -24 -45 1 +11 0.6 0.1 9 8 47 19 1 +11 0.9 0.5 32 21 10 -17 1 +11 0.3 0.3 37 33 -22 -36 1 +11 0.8 0.3 4 -6 39 24 2 +11 0.1 0.9 14 -20 5 -15 2 +11 0.3 0.1 -4 -37 45 11 2 +11 0.2 0.5 -20 -43 44 -49 2 +11 0.6 0.5 47 -26 15 -39 1 +11 0.3 0.9 -17 -27 -11 -20 2 +11 0.1 0.9 35 -14 49 14 2 +11 0.1 0.1 13 -34 49 -13 2 +11 0.8 0.7 30 -43 21 -19 1 +11 0.3 0.4 -14 -16 -7 -47 2 +11 0.3 0.9 13 -36 37 -44 1 +11 0.5 0.8 -8 -27 20 -25 2 +11 0.8 0.2 22 5 1 -5 1 +11 0.7 0.1 27 -25 -13 -45 1 +11 0.4 0.5 50 18 -2 -7 1 +11 0.4 0.5 31 -9 17 8 2 +11 0.7 0.8 7 0 24 -38 2 +11 0.6 0.4 9 -39 -28 -34 1 +11 0.4 0.4 39 -42 -4 -13 1 +11 0.3 0.4 28 -43 16 -32 1 +11 0.7 0.5 50 -29 32 -40 1 +11 0.1 0.1 18 -33 40 26 2 +11 0.7 0.7 48 -27 10 -44 1 +11 0.9 0.8 47 3 -1 -46 1 +11 0.3 0.7 2 -33 -28 -48 1 +11 0.6 0.5 16 2 -11 -27 1 +11 0.6 0.1 3 -20 11 -43 1 +11 0.8 0.6 7 -27 -13 -24 1 +11 0.5 0.9 22 -13 5 -33 1 +11 0.3 0.6 40 21 39 -25 1 +11 0.4 0.3 13 -12 13 10 2 +11 0.1 0.6 12 -37 5 -5 2 +11 0.8 0.7 44 4 12 -12 1 +11 0.1 0.7 37 14 39 -4 1 +11 0.7 0.2 22 4 20 19 1 +11 0.4 0.7 46 42 6 -26 1 +11 0.5 0.2 10 -3 13 -21 1 +11 0.3 0.3 16 6 34 -12 1 +11 0.6 0.2 17 11 36 -31 1 +11 0.2 0.3 5 1 15 -15 1 +11 0.4 0.7 43 -10 40 -40 2 +11 0.8 0.8 -12 -23 1 -26 2 +11 0.1 0.3 42 41 48 -14 1 +11 0.1 0.8 -12 -22 -17 -30 2 +11 0.5 0.2 18 -29 39 -7 1 +11 0.8 0.2 24 17 30 -30 1 +11 0.6 0.3 -2 -4 15 -4 2 +11 0.8 0.1 2 -46 5 -38 1 +11 0.5 0.7 -34 -44 32 31 2 +11 0.9 0.2 18 -24 11 -22 1 +11 0.3 0.8 15 -17 -22 -31 1 +11 0.6 0.3 39 12 9 -40 1 +11 0.9 0.1 36 32 35 32 1 +11 0.1 0.1 41 4 44 -47 1 +11 0.7 0.2 18 -13 33 27 2 +11 0.4 0.9 46 -41 26 -10 2 +11 0.3 0.2 38 -43 -23 -41 1 +11 0.1 0.6 -23 -24 37 0 2 +11 0.6 0.1 33 -45 35 18 2 +11 0.5 0.5 5 -47 -16 -45 1 +11 0.8 0.9 33 -38 42 -15 2 +11 0.4 0.1 -43 -47 46 25 2 +11 0.5 0.1 -18 -32 33 -21 2 +11 0.1 0.6 19 15 -23 -40 1 +11 0.2 0.6 31 -31 29 2 2 +11 0.5 0.1 8 -47 42 15 2 +11 0.2 0.7 39 -20 -1 -47 1 +11 0.1 0.3 9 -17 -35 -36 1 +11 0.6 0.3 37 35 31 -11 1 +11 0.6 0.8 49 -49 12 -31 1 +11 0.9 0.2 9 -33 25 -23 1 +11 0.6 0.5 25 4 9 -31 1 +11 0.5 0.8 20 -7 45 18 2 +11 0.7 0.6 48 -1 34 23 2 +11 0.7 0.3 23 15 -1 -16 1 +11 0.4 0.3 -6 -43 21 13 2 +11 0.9 0.3 -32 -38 20 6 2 +11 0.2 0.3 -27 -32 30 -48 1 +11 0.7 0.9 46 6 31 27 1 +11 0.8 0.5 -16 -39 41 3 2 +11 0.8 0.6 22 10 50 -26 1 +11 0.8 0.2 28 -23 8 -40 1 +11 0.3 0.3 33 -33 18 -38 2 +11 0.7 0.7 34 -8 23 -23 1 +11 0.6 0.5 29 -12 29 -10 2 +11 0.2 0.1 -23 -25 41 -28 2 +11 0.2 0.4 -14 -44 50 44 2 +11 0.9 0.4 4 -11 9 -19 1 +11 0.3 0.4 -6 -44 50 -35 2 +11 0.2 0.2 15 -12 24 9 2 +11 0.3 0.4 39 32 30 -40 1 +11 0.8 0.5 42 29 14 -28 1 +11 0.7 0.7 -11 -21 39 -31 2 +11 0.9 0.8 40 -13 27 -1 2 +11 0.9 0.9 -30 -41 -27 -50 1 +11 0.8 0.9 36 -30 -11 -36 1 +11 0.6 0.9 -31 -43 17 -43 2 +11 0.4 0.1 28 -39 44 -11 1 +11 0.8 0.5 47 15 47 18 2 +11 0.8 0.6 -11 -46 45 7 2 +11 0.9 0.3 16 -45 10 4 1 +11 0.5 0.6 0 -31 14 -8 2 +11 0.7 0.2 -7 -25 12 -31 2 +11 0.6 0.2 14 -41 -29 -44 1 +11 0.6 0.7 21 -8 35 18 2 +11 0.4 0.3 2 -31 11 2 2 +11 0.7 0.5 44 -43 -49 -50 1 +11 0.4 0.8 3 0 -3 -12 2 +11 0.4 0.4 31 11 -36 -37 1 +11 0.5 0.6 11 -50 -20 -50 1 +11 0.3 0.3 49 -15 49 -7 2 +11 0.1 0.3 28 -46 25 -46 2 +11 0.9 0.7 19 -6 41 -42 1 +11 0.1 0.8 2 -25 24 -1 2 +11 0.4 0.7 44 -40 7 -19 2 +11 0.7 0.1 4 -4 47 38 2 +11 0.4 0.8 10 -2 26 -47 2 +11 0.2 0.5 23 -12 -12 -49 1 +11 0.2 0.8 36 23 20 7 1 +11 0.4 0.5 46 24 28 -44 1 +11 0.3 0.6 29 -41 43 28 2 +11 0.8 0.2 5 -47 43 21 2 +11 0.5 0.4 13 -18 20 -33 1 +11 0.7 0.5 37 -27 33 18 2 +11 0.6 0.2 48 -44 17 14 2 +11 0.5 0.5 15 11 47 18 2 +11 0.1 0.5 35 -16 39 23 2 +11 0.4 0.4 3 2 38 3 2 +11 0.4 0.7 31 -13 7 -20 2 +11 0.5 0.4 -10 -25 15 8 2 +11 0.2 0.5 -22 -45 20 -21 2 +11 0.9 0.1 25 -5 -11 -24 1 +11 0.4 0.2 37 -30 5 -16 1 +11 0.5 0.6 24 -37 37 -49 2 +11 0.5 0.4 35 -15 -35 -49 1 +11 0.6 0.7 43 -22 30 3 2 +11 0.3 0.3 32 -32 48 -23 1 +11 0.2 0.3 3 -1 -40 -43 1 +11 0.7 0.6 28 8 0 -21 1 +11 0.2 0.8 38 -36 0 -27 2 +11 0.7 0.9 -17 -39 -2 -29 2 +11 0.3 0.7 -22 -40 -17 -33 1 +11 0.9 0.1 2 -14 -17 -28 1 +11 0.4 0.5 -9 -33 -27 -35 2 +11 0.4 0.1 33 -39 43 32 2 +11 0.6 0.7 36 -48 -4 -25 1 +11 0.3 0.7 37 27 13 -3 1 +11 0.1 0.1 -1 -9 20 -31 1 +11 0.4 0.4 -9 -42 -32 -45 2 +11 0.7 0.1 2 -23 9 0 2 +11 0.7 0.4 11 -2 -36 -45 1 +11 0.1 0.3 -13 -23 44 -37 2 +11 0.4 0.3 -40 -48 4 -45 2 +11 0.7 0.5 19 -46 21 -43 1 +11 0.8 0.6 14 8 22 -24 2 +11 0.4 0.6 7 -1 41 -41 2 +11 0.3 0.2 46 43 38 -39 1 +11 0.4 0.9 -21 -40 49 -42 2 +11 0.3 0.6 7 -44 40 20 2 +11 0.1 0.3 50 -13 47 -30 1 +11 0.1 0.4 -7 -15 23 -30 2 +11 0.6 0.2 35 12 40 20 1 +11 0.5 0.3 -1 -17 -11 -32 2 +11 0.3 0.8 46 32 27 -1 2 +11 0.7 0.6 25 -36 -2 -16 1 +11 0.4 0.4 8 -20 40 37 2 +11 0.4 0.7 6 -14 -1 -40 2 +11 0.8 0.7 35 -45 37 -20 1 +11 0.5 0.9 -14 -23 -6 -19 2 +11 0.1 0.2 38 30 47 -25 1 +11 0.5 0.9 12 -10 25 -15 2 +11 0.8 0.3 -19 -24 31 9 2 +11 0.6 0.4 39 -8 -36 -44 1 +11 0.6 0.4 15 -33 23 21 2 +11 0.2 0.6 33 -1 -19 -30 2 +11 0.1 0.8 28 -9 2 -40 2 +11 0.3 0.9 37 -9 22 -34 1 +11 0.7 0.2 -2 -28 -4 -7 2 +11 0.4 0.6 43 -8 -31 -42 1 +11 0.1 0.6 42 -15 21 20 2 +11 0.3 0.4 46 38 44 21 2 +11 0.2 0.2 19 -6 29 -19 1 +11 0.2 0.6 20 -29 38 35 1 +11 0.3 0.3 -2 -16 41 36 2 +11 0.7 0.3 33 -5 -7 -41 1 +11 0.6 0.4 34 11 40 -12 1 +11 0.6 0.4 -32 -35 45 -36 2 +11 0.1 0.5 -9 -40 -29 -47 2 +11 0.2 0.3 -37 -39 -26 -40 1 +11 0.3 0.3 37 -15 6 -27 1 +11 0.5 0.8 49 46 -6 -17 1 +11 0.7 0.6 26 -48 26 2 2 +11 0.7 0.2 11 10 25 -48 1 +11 0.1 0.2 4 -43 -6 -28 1 +11 0.6 0.8 -9 -41 5 -36 2 +11 0.4 0.5 28 -27 27 -39 1 +11 0.3 0.2 -16 -24 -31 -41 1 +11 0.3 0.1 30 21 21 -4 1 +11 0.8 0.6 16 -15 17 11 2 +11 0.8 0.4 -3 -40 26 21 2 +11 0.1 0.7 8 -18 36 35 2 +11 0.4 0.2 22 -13 50 -13 1 +11 0.4 0.6 40 -36 39 -40 2 +11 0.8 0.5 -4 -30 -1 -32 2 +11 0.2 0.5 39 -19 -9 -25 1 +11 0.7 0.7 3 -46 40 9 2 +11 0.3 0.2 -36 -44 34 18 2 +11 0.4 0.2 -37 -41 31 2 2 +11 0.3 0.2 -37 -41 37 -37 2 +11 0.3 0.6 15 -33 31 30 2 +11 0.4 0.5 -19 -26 -28 -34 1 +11 0.2 0.9 33 -3 23 -41 2 +11 0.7 0.2 30 20 24 9 1 +11 0.4 0.6 0 -49 18 13 2 +11 0.7 0.3 45 -46 49 -9 1 +11 0.8 0.3 -9 -46 18 -37 2 +11 0.9 0.7 24 -23 20 -27 1 +11 0.2 0.2 39 0 21 9 2 +11 0.6 0.2 19 -45 45 10 2 +11 0.7 0.2 15 11 9 -8 1 +11 0.2 0.7 27 -15 19 -5 2 +11 0.6 0.5 18 -13 35 27 2 +11 0.2 0.2 35 -22 31 -45 1 +11 0.4 0.1 47 -1 18 -27 1 +11 0.7 0.3 3 -32 -23 -38 1 +11 0.7 0.2 8 -35 -25 -35 1 +11 0.3 0.8 33 29 5 -6 1 +11 0.2 0.8 -25 -32 24 2 2 +11 0.9 0.1 33 -4 25 21 1 +11 0.8 0.8 -7 -20 25 14 2 +11 0.4 0.5 -17 -26 28 26 2 +11 0.1 0.4 -1 -25 -6 -33 1 +11 0.5 0.5 34 -21 35 12 2 +11 0.3 0.1 35 25 6 -14 1 +11 0.8 0.3 31 -19 41 -7 2 +11 0.8 0.4 -38 -40 22 -11 2 +11 0.3 0.1 14 -5 -28 -49 1 +11 0.3 0.1 31 30 -16 -45 1 +11 0.5 0.2 38 -46 -23 -48 1 +11 0.7 0.6 45 -20 -8 -18 1 +11 0.7 0.3 49 25 -2 -33 1 +11 0.9 0.4 35 32 6 -25 1 +11 0.9 0.1 -1 -13 40 -6 2 +11 0.3 0.1 -7 -16 -18 -43 1 +11 0.6 0.6 -10 -49 34 9 2 +11 0.6 0.2 0 -41 -3 -49 1 +11 0.3 0.3 -11 -45 -29 -47 1 +11 0.1 0.3 -19 -35 9 3 2 +11 0.4 0.1 -23 -30 26 -38 2 +11 0.1 0.4 6 -6 28 -27 2 +11 0.8 0.3 -14 -36 39 9 2 +11 0.8 0.2 5 -23 45 -44 1 +11 0.6 0.2 36 -9 -15 -20 1 +11 0.9 0.2 1 -15 -4 -38 1 +11 0.1 0.6 19 4 39 -3 2 +11 0.7 0.6 36 28 46 -43 1 +11 0.4 0.1 19 -3 12 -33 1 +12 0.8 0.1 49 41 -1 -43 1 +12 0.5 0.1 32 -3 47 -25 1 +12 0.9 0.3 21 -26 33 21 2 +12 0.8 0.2 37 12 -20 -47 1 +12 0.4 0.4 47 -1 45 41 2 +12 0.7 0.9 0 -4 4 -42 1 +12 0.5 0.5 36 21 -26 -31 1 +12 0.4 0.9 23 -2 46 -14 2 +12 0.3 0.1 11 -14 26 24 2 +12 0.9 0.4 46 14 48 27 1 +12 0.6 0.6 28 27 0 -50 1 +12 0.7 0.8 46 -20 44 -33 2 +12 0.9 0.5 44 -3 28 -3 1 +12 0.9 0.7 -34 -43 -11 -16 2 +12 0.4 0.7 49 -30 1 -15 1 +12 0.5 0.7 11 7 31 -46 2 +12 0.5 0.2 21 9 -14 -24 1 +12 0.4 0.6 14 -17 46 -26 2 +12 0.7 0.3 -5 -40 18 3 2 +12 0.4 0.8 27 -1 50 46 2 +12 0.5 0.3 23 -4 46 -18 1 +12 0.1 0.1 25 -26 -9 -46 1 +12 0.8 0.4 23 -45 37 13 2 +12 0.2 0.5 37 12 46 -44 1 +12 0.5 0.9 2 -2 45 31 2 +12 0.5 0.4 43 -46 10 -21 1 +12 0.9 0.1 27 -4 39 -36 1 +12 0.5 0.2 32 -44 -26 -29 1 +12 0.1 0.6 42 -48 27 -17 2 +12 0.5 0.7 36 -28 18 13 2 +12 0.3 0.8 32 -50 48 -10 2 +12 0.4 0.4 50 5 32 27 2 +12 0.6 0.4 -31 -47 12 -24 2 +12 0.2 0.9 38 7 8 0 1 +12 0.9 0.3 35 28 50 0 1 +12 0.3 0.4 15 2 31 -20 1 +12 0.8 0.5 -30 -50 23 7 2 +12 0.8 0.1 38 -19 48 39 2 +12 0.3 0.5 45 17 -2 -19 1 +12 0.1 0.3 11 -5 -26 -29 1 +12 0.8 0.7 25 -13 45 0 2 +12 0.7 0.8 29 -48 20 -26 2 +12 0.7 0.8 8 -24 6 -30 1 +12 0.1 0.9 -17 -25 -11 -45 2 +12 0.7 0.5 45 -8 -18 -50 1 +12 0.3 0.8 24 -1 45 -48 2 +12 0.6 0.1 32 28 -13 -37 1 +12 0.6 0.4 -41 -49 -22 -44 2 +12 0.7 0.3 12 -15 36 23 2 +12 0.4 0.1 -29 -44 50 -39 2 +12 0.9 0.2 48 -8 -17 -47 1 +12 0.4 0.1 41 5 36 -16 1 +12 0.3 0.1 28 -28 4 -3 2 +12 0.4 0.1 28 12 24 -36 1 +12 0.4 0.7 26 -18 15 10 2 +12 0.1 0.3 44 -17 33 -7 2 +12 0.2 0.6 -26 -49 20 -40 2 +12 0.6 0.1 21 -28 21 -6 1 +12 0.2 0.7 41 8 -8 -46 1 +12 0.2 0.7 48 -22 17 -10 2 +12 0.9 0.2 42 30 29 11 1 +12 0.3 0.3 -11 -28 36 -37 2 +12 0.8 0.9 -4 -39 20 7 2 +12 0.6 0.5 10 -22 15 -50 1 +12 0.5 0.2 49 -38 27 -16 1 +12 0.3 0.3 -30 -48 27 -1 2 +12 0.7 0.1 -1 -49 30 14 2 +12 0.8 0.9 -22 -37 -42 -46 1 +12 0.9 0.4 12 -45 48 -42 1 +12 0.2 0.2 39 -10 0 -24 1 +12 0.4 0.5 12 -45 0 -4 2 +12 0.6 0.2 -16 -31 41 -4 2 +12 0.1 0.7 2 -12 -13 -41 1 +12 0.1 0.9 10 -33 -11 -13 2 +12 0.2 0.4 1 -8 -5 -31 1 +12 0.5 0.3 32 -10 48 -38 1 +12 0.4 0.5 13 -36 36 -22 2 +12 0.2 0.9 41 16 49 43 2 +12 0.3 0.9 -24 -50 8 -28 2 +12 0.5 0.2 50 14 47 42 2 +12 0.2 0.8 -9 -17 -43 -45 1 +12 0.5 0.6 41 11 37 -10 1 +12 0.6 0.6 15 -9 37 -27 2 +12 0.2 0.4 -37 -45 36 -33 2 +12 0.2 0.1 45 -4 21 -6 1 +12 0.2 0.9 22 -33 35 31 2 +12 0.8 0.2 36 -10 -7 -20 1 +12 0.6 0.5 13 -22 40 23 2 +12 0.8 0.8 -9 -30 47 -38 2 +12 0.1 0.2 30 -15 35 -42 1 +12 0.6 0.9 9 -15 -13 -17 1 +12 0.9 0.4 22 -27 3 -29 1 +12 0.9 0.6 17 -12 24 -21 1 +12 0.6 0.8 41 5 15 1 1 +12 0.6 0.1 35 -37 11 -42 1 +12 0.1 0.3 45 -2 37 -46 1 +12 0.7 0.3 9 6 33 17 2 +12 0.4 0.9 32 -50 6 -23 2 +12 0.8 0.7 1 -30 4 -45 1 +12 0.7 0.9 -18 -24 -23 -30 1 +12 0.7 0.2 -35 -40 13 5 2 +12 0.8 0.4 34 -24 29 5 1 +12 0.5 0.3 32 -4 24 -44 1 +12 0.6 0.1 -21 -30 -4 -9 2 +12 0.7 0.9 13 -15 11 -28 2 +12 0.3 0.8 -8 -38 -14 -28 2 +12 0.2 0.3 -37 -43 -17 -18 2 +12 0.7 0.5 29 -5 -24 -32 1 +12 0.3 0.3 -35 -37 2 -9 2 +12 0.5 0.8 42 11 -9 -44 1 +12 0.5 0.4 42 -20 -1 -11 1 +12 0.1 0.6 17 -5 -17 -40 1 +12 0.8 0.2 26 0 9 -21 1 +12 0.2 0.8 -20 -34 47 -24 2 +12 0.4 0.9 -10 -23 22 -36 2 +12 0.3 0.7 20 -8 3 -12 1 +12 0.4 0.8 0 -5 29 -15 2 +12 0.2 0.4 21 13 -10 -24 1 +12 0.3 0.6 4 -16 37 -47 2 +12 0.4 0.4 43 4 22 -35 1 +12 0.1 0.7 48 -12 21 -3 2 +12 0.5 0.4 4 -22 -24 -28 1 +12 0.8 0.9 44 -49 46 14 2 +12 0.4 0.2 -1 -10 36 -17 2 +12 0.2 0.6 -28 -32 27 -2 2 +12 0.1 0.4 26 5 -13 -33 1 +12 0.5 0.2 2 -23 0 -5 2 +12 0.2 0.9 38 5 -2 -6 1 +12 0.9 0.5 38 -43 37 18 1 +12 0.6 0.3 17 -19 -35 -47 1 +12 0.7 0.1 6 -17 40 0 2 +12 0.2 0.1 -7 -38 35 20 2 +12 0.4 0.4 30 9 -8 -48 1 +12 0.6 0.5 24 -20 16 -32 1 +12 0.4 0.2 -7 -11 12 -19 1 +12 0.3 0.6 -18 -48 -17 -45 2 +12 0.2 0.5 -6 -37 13 -47 2 +12 0.1 0.1 40 -27 3 -17 1 +12 0.6 0.4 8 5 28 27 2 +12 0.3 0.2 -7 -40 -31 -43 1 +12 0.7 0.5 -34 -42 50 35 2 +12 0.6 0.2 15 4 -13 -26 1 +12 0.6 0.7 -19 -32 47 46 2 +12 0.5 0.7 2 -25 -4 -37 2 +12 0.2 0.6 22 -38 46 -46 2 +12 0.4 0.1 24 -32 -19 -43 1 +12 0.5 0.7 -10 -45 -17 -31 2 +12 0.5 0.2 22 -28 15 7 2 +12 0.5 0.4 18 -24 26 -9 2 +12 0.1 0.2 14 5 1 -17 1 +12 0.4 0.9 41 37 21 -22 1 +12 0.7 0.2 26 -11 -2 -43 1 +12 0.9 0.6 9 -20 31 -4 2 +12 0.3 0.9 11 -10 22 1 2 +12 0.3 0.2 -22 -27 -28 -48 1 +12 0.6 0.9 -35 -43 11 8 2 +12 0.2 0.3 -30 -32 43 28 2 +12 0.9 0.7 24 23 33 2 1 +12 0.9 0.4 -28 -36 35 28 2 +12 0.2 0.3 25 -10 -7 -29 1 +12 0.7 0.1 46 -25 9 3 1 +12 0.5 0.7 26 -30 -11 -29 1 +12 0.7 0.6 18 -23 26 -43 1 +12 0.6 0.2 30 16 -8 -25 1 +12 0.7 0.3 44 -30 43 -15 1 +12 0.2 0.7 13 -32 35 -29 2 +12 0.8 0.1 -20 -32 14 -10 2 +12 0.3 0.9 37 -15 -20 -21 1 +12 0.7 0.3 -10 -23 40 -11 2 +12 0.4 0.5 8 -40 0 -11 2 +12 0.8 0.2 -48 -49 21 13 2 +12 0.2 0.5 42 -22 40 34 2 +12 0.3 0.2 -35 -42 43 -28 2 +12 0.3 0.1 50 3 49 -22 1 +12 0.2 0.2 16 -23 18 -47 1 +12 0.4 0.3 -1 -28 29 -13 2 +12 0.9 0.8 22 3 10 0 1 +12 0.8 0.8 46 -30 -5 -22 1 +12 0.9 0.8 10 -4 50 45 2 +12 0.2 0.2 12 -1 -30 -35 1 +12 0.2 0.4 3 -46 27 18 2 +12 0.4 0.7 1 -5 28 -14 2 +12 0.7 0.8 31 -48 12 7 2 +12 0.8 0.8 10 -12 19 -48 1 +12 0.1 0.3 29 -13 -1 -22 1 +12 0.3 0.7 50 -45 -11 -33 1 +12 0.2 0.3 23 -2 -15 -35 1 +12 0.6 0.4 -2 -34 -12 -28 1 +12 0.2 0.5 -39 -43 30 -45 2 +12 0.4 0.9 38 -18 29 1 2 +12 0.4 0.3 -25 -39 29 -30 2 +12 0.7 0.3 31 -10 16 -32 1 +12 0.3 0.5 20 -39 -10 -17 2 +12 0.4 0.3 2 -48 12 -16 2 +12 0.6 0.8 -17 -34 15 -33 2 +12 0.1 0.1 23 -8 20 6 2 +12 0.2 0.2 -29 -39 49 3 2 +12 0.2 0.3 33 9 30 -6 1 +12 0.5 0.3 36 6 45 37 2 +12 0.9 0.3 30 -24 2 -22 1 +12 0.8 0.6 39 14 28 23 1 +12 0.7 0.2 34 27 37 14 1 +12 0.6 0.7 -35 -39 29 -38 2 +12 0.5 0.5 50 -21 -26 -44 1 +12 0.4 0.9 37 -36 39 -35 2 +12 0.4 0.1 17 -39 -22 -25 1 +12 0.3 0.4 50 19 -5 -33 1 +12 0.1 0.7 13 -39 13 -6 2 +12 0.5 0.1 45 -28 -30 -33 1 +12 0.4 0.1 -45 -49 18 -25 2 +12 0.1 0.8 21 -7 25 -37 2 +12 0.9 0.1 -42 -43 48 -13 2 +12 0.3 0.4 46 -21 33 -13 2 +12 0.5 0.7 6 -11 40 -5 2 +12 0.3 0.5 31 -34 38 -17 2 +12 0.4 0.5 -6 -10 36 -30 2 +12 0.7 0.3 41 -36 -28 -31 1 +12 0.5 0.2 -9 -20 21 6 2 +12 0.9 0.3 -9 -24 45 19 2 +12 0.9 0.3 41 31 7 -5 1 +12 0.9 0.3 5 -37 19 -36 1 +12 0.7 0.6 47 -35 27 20 2 +12 0.3 0.8 36 8 49 -10 2 +12 0.6 0.8 21 2 48 2 2 +12 0.3 0.7 -17 -29 28 -36 2 +12 0.3 0.9 50 1 4 3 1 +12 0.5 0.8 -4 -31 28 -7 2 +12 0.9 0.8 48 16 45 -5 1 +12 0.2 0.3 -22 -33 36 -46 2 +12 0.7 0.2 49 12 -10 -46 1 +12 0.4 0.6 24 23 3 -1 1 +12 0.9 0.8 -3 -37 -8 -23 2 +12 0.5 0.6 32 6 27 -21 1 +12 0.1 0.2 49 27 10 -13 1 +12 0.1 0.9 3 -1 -5 -37 1 +12 0.6 0.1 37 -39 36 -48 1 +12 0.8 0.3 -7 -9 14 -6 2 +12 0.4 0.4 32 17 -25 -31 1 +12 0.8 0.7 39 -7 43 -1 1 +12 0.5 0.5 -12 -46 37 -18 2 +12 0.1 0.6 16 -32 30 -31 2 +12 0.8 0.2 4 2 46 -28 1 +12 0.1 0.8 27 -49 0 -14 2 +12 0.2 0.4 19 -39 5 -22 2 +12 0.9 0.2 9 -38 12 -46 1 +12 0.1 0.5 11 -41 13 -6 2 +12 0.5 0.2 41 40 -2 -18 1 +12 0.2 0.9 50 39 -8 -12 1 +12 0.3 0.8 3 -46 -23 -47 2 +12 0.1 0.8 1 -20 28 2 2 +12 0.7 0.8 -11 -18 43 41 2 +12 0.7 0.3 31 -38 -29 -37 1 +12 0.8 0.9 28 -34 38 29 2 +12 0.2 0.8 39 -38 29 -27 2 +12 0.1 0.7 4 2 26 -11 2 +12 0.3 0.1 48 -36 -18 -34 1 +12 0.5 0.7 -4 -13 -19 -37 1 +12 0.7 0.1 49 -39 39 1 1 +12 0.4 0.8 29 20 -24 -28 1 +12 0.7 0.8 16 -31 14 -12 2 +12 0.3 0.2 49 -8 29 -45 1 +12 0.1 0.3 40 -3 18 -9 1 +12 0.8 0.6 -7 -9 3 -27 2 +12 0.2 0.2 14 -20 24 -28 1 +12 0.8 0.5 9 -27 0 -39 1 +12 0.4 0.8 46 -4 47 -49 2 +12 0.8 0.1 -13 -24 32 -26 1 +12 0.8 0.8 -28 -36 22 12 2 +12 0.4 0.9 23 -47 7 -50 2 +12 0.1 0.7 0 -11 6 -2 2 +12 0.3 0.4 29 -9 20 -27 1 +12 0.8 0.3 -9 -33 12 8 2 +12 0.1 0.8 20 -30 29 -38 2 +12 0.3 0.6 16 -4 18 6 2 +12 0.1 0.6 -42 -44 8 -14 2 +12 0.2 0.8 38 -41 28 -39 2 +12 0.8 0.2 49 21 44 25 1 +12 0.1 0.1 -8 -23 45 -8 2 +12 0.5 0.4 12 -31 36 3 2 +12 0.4 0.9 -20 -27 18 -1 2 +12 0.5 0.2 -2 -22 38 -23 2 +12 0.9 0.4 49 -34 43 24 1 +12 0.8 0.6 41 -38 -20 -33 1 +12 0.1 0.3 35 -27 34 3 2 +12 0.2 0.3 -10 -50 44 -28 2 +12 0.1 0.2 33 27 -1 -25 1 +12 0.9 0.2 -14 -37 31 21 2 +12 0.2 0.3 -23 -32 7 -43 2 +12 0.2 0.5 38 -7 0 -9 1 +12 0.7 0.8 10 -44 41 36 2 +12 0.9 0.3 19 -5 15 -31 1 +12 0.4 0.2 41 33 -32 -44 1 +12 0.6 0.7 18 -44 13 -44 2 +12 0.8 0.3 48 -44 -9 -33 1 +12 0.4 0.9 -11 -20 25 -36 2 +12 0.3 0.4 3 -47 36 29 2 +12 0.3 0.2 23 -49 -6 -49 1 +12 0.4 0.4 5 -37 -14 -20 1 +12 0.3 0.8 -11 -24 19 -36 2 +12 0.5 0.6 2 -34 41 -30 2 +13 0.9 0.1 35 -15 49 -44 1 +13 0.5 0.8 33 26 23 -21 1 +13 0.5 0.3 27 -26 44 3 2 +13 0.5 0.8 26 -15 29 -11 2 +13 0.7 0.1 -13 -24 42 15 2 +13 0.8 0.4 45 -22 44 -5 1 +13 0.2 0.6 50 -23 12 -35 2 +13 0.1 0.4 33 12 6 -32 1 +13 0.2 0.9 14 2 0 -6 1 +13 0.2 0.7 21 -8 15 -31 1 +13 0.8 0.8 43 23 6 -24 1 +13 0.2 0.9 -5 -38 26 5 2 +13 0.4 0.1 47 -41 15 10 2 +13 0.9 0.7 24 13 35 28 2 +13 0.4 0.3 22 11 43 -27 1 +13 0.5 0.5 45 33 14 -44 1 +13 0.8 0.9 -42 -47 27 -33 2 +13 0.8 0.7 0 -32 21 -1 2 +13 0.5 0.6 37 26 37 -4 1 +13 0.8 0.2 -12 -47 -37 -39 1 +13 0.9 0.1 49 -49 13 -21 1 +13 0.9 0.6 -3 -28 34 -16 2 +13 0.8 0.1 23 -9 29 8 1 +13 0.4 0.3 -29 -32 -3 -44 2 +13 0.8 0.4 -18 -22 38 -39 2 +13 0.5 0.6 35 -6 13 -15 1 +13 0.8 0.3 25 -8 46 36 2 +13 0.4 0.7 43 -49 20 17 2 +13 0.5 0.6 18 -13 45 -17 2 +13 0.8 0.1 40 -25 -3 -15 1 +13 0.3 0.1 49 23 46 11 1 +13 0.3 0.5 -23 -33 -37 -38 1 +13 0.1 0.5 45 17 4 -31 1 +13 0.5 0.7 16 15 18 1 1 +13 0.2 0.6 28 -10 25 18 2 +13 0.1 0.7 41 -12 -33 -37 1 +13 0.2 0.1 17 -7 42 -5 2 +13 0.6 0.8 41 -21 18 11 2 +13 0.3 0.1 17 -1 -28 -35 1 +13 0.7 0.3 6 -1 29 -26 1 +13 0.3 0.1 26 17 49 4 1 +13 0.1 0.5 28 27 -18 -42 1 +13 0.4 0.1 -20 -22 -8 -28 1 +13 0.3 0.8 46 27 20 -50 1 +13 0.8 0.1 -30 -34 36 -36 1 +13 0.8 0.7 18 17 47 39 2 +13 0.7 0.3 -39 -42 -15 -37 2 +13 0.6 0.3 42 8 20 6 1 +13 0.1 0.3 24 9 31 12 2 +13 0.5 0.7 -10 -48 22 3 2 +13 0.5 0.1 47 -11 34 -23 1 +13 0.2 0.1 6 4 1 -2 1 +13 0.8 0.3 31 -18 11 2 1 +13 0.5 0.2 35 -3 24 -12 1 +13 0.3 0.9 -17 -18 10 -24 2 +13 0.3 0.2 -20 -37 36 7 2 +13 0.4 0.3 19 16 41 9 2 +13 0.6 0.5 18 -10 14 -1 1 +13 0.3 0.1 -3 -11 36 -7 2 +13 0.1 0.6 48 -5 49 11 2 +13 0.2 0.1 30 -18 21 -12 1 +13 0.4 0.4 16 -12 19 -35 1 +13 0.8 0.8 46 21 38 -23 1 +13 0.4 0.4 -17 -47 19 6 2 +13 0.2 0.5 17 -4 -9 -15 1 +13 0.6 0.8 3 -36 39 -19 2 +13 0.1 0.3 45 -27 -42 -43 1 +13 0.1 0.3 42 -27 -6 -29 1 +13 0.7 0.8 10 -29 14 -34 2 +13 0.9 0.8 29 5 -28 -34 1 +13 0.8 0.7 -22 -45 39 -20 2 +13 0.1 0.3 2 1 4 -20 1 +13 0.4 0.6 12 -32 -14 -23 1 +13 0.9 0.8 43 -2 44 6 1 +13 0.1 0.8 -41 -49 39 -2 2 +13 0.4 0.6 37 6 47 24 2 +13 0.7 0.3 -1 -43 -26 -42 1 +13 0.8 0.8 48 34 -41 -49 1 +13 0.4 0.3 43 -21 -28 -35 1 +13 0.1 0.4 -31 -44 41 23 2 +13 0.2 0.4 37 -47 39 3 2 +13 0.4 0.4 -10 -17 -1 -27 2 +13 0.7 0.7 21 -25 29 0 2 +13 0.6 0.3 30 17 29 -37 1 +13 0.1 0.8 7 -31 2 -11 2 +13 0.3 0.8 -21 -36 -9 -47 2 +13 0.8 0.2 12 -38 13 -23 1 +13 0.4 0.1 42 -17 39 -9 1 +13 0.9 0.7 43 -49 13 -31 1 +13 0.9 0.9 12 -30 -36 -48 1 +13 0.9 0.7 24 -47 -30 -45 1 +13 0.5 0.2 -1 -4 44 -8 2 +13 0.8 0.3 -16 -39 39 -36 2 +13 0.7 0.8 35 2 26 -20 1 +13 0.1 0.5 48 32 27 -14 1 +13 0.5 0.6 -3 -17 49 1 2 +13 0.9 0.6 22 -18 28 -38 1 +13 0.4 0.6 32 -13 7 -7 1 +13 0.8 0.3 32 5 26 24 2 +13 0.2 0.8 4 -50 20 -8 2 +13 0.8 0.2 43 -34 2 0 1 +13 0.9 0.6 48 -26 22 -32 1 +13 0.4 0.4 0 -22 -5 -42 1 +13 0.8 0.9 17 -32 12 10 2 +13 0.1 0.3 42 34 43 24 1 +13 0.7 0.4 39 -43 26 -22 1 +13 0.6 0.2 47 3 -35 -37 1 +13 0.9 0.1 30 -16 49 22 2 +13 0.2 0.3 -7 -20 8 -9 2 +13 0.6 0.4 46 -50 29 25 2 +13 0.3 0.4 40 -10 44 -48 1 +13 0.1 0.1 -26 -39 7 -29 2 +13 0.5 0.3 48 -21 -14 -41 1 +13 0.5 0.4 -26 -40 13 -46 2 +13 0.7 0.5 4 -47 -9 -34 1 +13 0.4 0.7 41 -12 -9 -43 1 +13 0.6 0.4 38 3 24 20 2 +13 0.7 0.6 37 -40 30 -27 1 +13 0.4 0.9 30 -18 13 -41 1 +13 0.2 0.1 43 27 20 -12 1 +13 0.8 0.5 11 -26 33 12 2 +13 0.7 0.1 42 -32 30 19 1 +13 0.8 0.5 49 -36 46 11 2 +13 0.8 0.7 4 -17 9 -11 2 +13 0.1 0.3 36 -16 34 -50 2 +13 0.1 0.5 40 -41 -48 -49 1 +13 0.2 0.9 2 1 40 -42 2 +13 0.8 0.7 15 -14 -25 -39 1 +13 0.7 0.5 28 -37 17 -48 1 +13 0.3 0.8 29 28 -22 -43 1 +13 0.7 0.5 36 -29 23 -13 1 +13 0.4 0.6 38 -22 20 -50 1 +13 0.5 0.4 -22 -47 14 -8 2 +13 0.6 0.4 5 -44 16 -24 2 +13 0.9 0.3 17 -23 24 -28 1 +13 0.5 0.5 36 27 -41 -49 1 +13 0.6 0.3 -37 -45 15 -16 2 +13 0.6 0.6 0 -46 33 -7 2 +13 0.8 0.9 14 -49 44 43 2 +13 0.5 0.2 45 -22 0 -34 1 +13 0.9 0.5 30 -45 -17 -42 1 +13 0.3 0.5 34 13 21 -8 1 +13 0.7 0.9 31 14 -4 -22 1 +13 0.6 0.7 9 -8 -27 -49 1 +13 0.7 0.1 50 39 -17 -48 1 +13 0.7 0.2 16 -24 20 -46 1 +13 0.4 0.7 50 -20 3 -8 1 +13 0.8 0.2 47 41 -10 -49 1 +13 0.6 0.6 42 -34 19 -6 1 +13 0.4 0.8 33 -23 28 19 2 +13 0.4 0.5 1 -48 32 17 2 +13 0.4 0.7 31 2 43 14 2 +13 0.5 0.8 -35 -46 -17 -48 2 +13 0.1 0.7 13 0 36 -7 2 +13 0.6 0.6 -22 -49 35 -22 2 +13 0.3 0.5 32 -22 46 -8 2 +13 0.1 0.8 24 -16 23 -7 2 +13 0.4 0.1 -25 -29 47 5 2 +13 0.7 0.8 38 -45 36 28 2 +13 0.5 0.7 37 -26 -8 -33 1 +13 0.2 0.6 34 -35 -6 -17 2 +13 0.8 0.9 21 -21 -19 -24 1 +13 0.7 0.1 37 -42 -32 -38 1 +13 0.3 0.7 14 -15 36 -12 2 +13 0.9 0.8 44 -19 4 -16 1 +13 0.2 0.2 34 -30 19 -36 1 +13 0.1 0.9 44 19 22 4 2 +13 0.3 0.1 -27 -28 -10 -20 2 +13 0.8 0.9 -2 -27 27 -47 2 +13 0.8 0.1 -9 -31 -4 -22 1 +13 0.3 0.3 16 -31 -6 -43 1 +13 0.5 0.4 46 12 -11 -43 1 +13 0.2 0.3 -12 -50 38 3 2 +13 0.6 0.5 17 -25 4 -27 1 +13 0.4 0.4 -8 -44 -2 -49 1 +13 0.4 0.9 29 4 8 -23 1 +13 0.6 0.3 10 -35 13 -26 1 +13 0.9 0.9 9 -35 -5 -16 1 +13 0.1 0.2 33 3 35 -32 1 +13 0.4 0.1 26 24 0 -7 1 +13 0.7 0.2 9 4 20 15 2 +13 0.2 0.2 35 -33 -6 -18 1 +13 0.2 0.5 41 -41 8 -10 2 +13 0.4 0.8 20 -40 23 -16 2 +13 0.3 0.3 48 9 16 -35 1 +13 0.7 0.9 50 -6 47 -17 2 +13 0.5 0.9 46 -11 -5 -9 1 +13 0.7 0.6 41 -12 0 -32 1 +13 0.9 0.1 -12 -32 -7 -15 1 +13 0.8 0.2 37 29 -5 -16 1 +13 0.4 0.8 36 7 -25 -44 1 +13 0.2 0.9 -4 -15 -22 -37 1 +13 0.8 0.1 -31 -34 33 -5 2 +13 0.1 0.3 9 -17 -33 -41 1 +13 0.4 0.1 8 -2 48 -8 1 +13 0.3 0.4 47 -26 48 -40 2 +13 0.5 0.8 23 -26 34 -19 2 +13 0.6 0.5 11 -47 47 43 2 +13 0.6 0.5 47 37 25 -2 1 +13 0.5 0.1 28 24 33 -47 1 +13 0.8 0.4 17 3 22 6 2 +13 0.1 0.8 -46 -50 -13 -33 2 +13 0.4 0.5 34 24 22 16 1 +13 0.6 0.9 18 -45 -4 -40 1 +13 0.7 0.8 16 11 4 -25 1 +13 0.3 0.2 -20 -37 4 -43 2 +13 0.5 0.9 -1 -10 44 32 2 +13 0.4 0.8 43 -36 27 -38 2 +13 0.3 0.3 4 -42 41 -1 2 +13 0.7 0.8 47 -24 -40 -41 1 +13 0.6 0.4 4 -2 48 25 2 +13 0.2 0.3 33 -11 35 26 2 +13 0.7 0.1 8 0 13 3 1 +13 0.4 0.5 -5 -26 43 34 2 +13 0.8 0.7 -18 -50 -3 -32 2 +13 0.2 0.9 34 -2 29 14 2 +13 0.3 0.6 1 -27 18 6 2 +13 0.8 0.4 40 2 12 -14 1 +13 0.5 0.9 -14 -37 -18 -34 1 +13 0.9 0.2 6 -11 42 -50 1 +13 0.5 0.4 47 38 34 3 1 +13 0.9 0.7 21 -31 5 3 1 +13 0.9 0.3 19 8 23 -1 1 +13 0.9 0.1 -1 -14 0 -23 1 +13 0.2 0.3 -6 -15 47 -40 2 +13 0.7 0.8 40 -26 44 -33 2 +13 0.6 0.8 36 -26 -4 -10 1 +13 0.7 0.3 25 -17 -8 -42 1 +13 0.3 0.6 31 -25 14 7 2 +13 0.6 0.7 17 -25 28 4 2 +13 0.3 0.9 34 -8 26 8 2 +13 0.7 0.5 16 -45 35 29 2 +13 0.2 0.7 28 -20 27 -29 2 +13 0.6 0.4 45 -31 -11 -33 1 +13 0.9 0.6 31 -18 46 24 2 +13 0.3 0.6 12 -42 3 -5 2 +13 0.5 0.6 6 -12 23 -26 2 +13 0.8 0.5 12 -21 50 -2 2 +13 0.9 0.8 45 23 -22 -29 1 +13 0.3 0.6 18 16 16 -37 1 +13 0.2 0.5 43 -21 29 -29 1 +13 0.5 0.2 38 27 -35 -41 1 +13 0.8 0.7 46 5 49 -42 1 +13 0.9 0.3 6 -46 7 -50 1 +13 0.2 0.8 -11 -12 -19 -40 1 +13 0.6 0.2 8 -44 -10 -43 1 +13 0.3 0.7 41 -6 40 23 2 +13 0.7 0.8 10 -25 -32 -50 1 +13 0.5 0.3 49 13 37 -31 1 +13 0.8 0.5 39 19 50 -14 1 +13 0.4 0.6 29 24 44 13 2 +13 0.7 0.4 -1 -41 46 -23 2 +13 0.6 0.4 44 28 -3 -17 1 +13 0.3 0.8 49 -40 50 -41 2 +13 0.8 0.8 48 -37 26 21 1 +13 0.3 0.7 -17 -19 4 -30 2 +13 0.5 0.7 36 2 26 -2 2 +13 0.4 0.1 12 -42 -15 -25 1 +13 0.1 0.4 32 -29 45 -17 2 +13 0.4 0.2 40 4 33 -40 1 +13 0.5 0.1 33 -8 -32 -45 1 +13 0.7 0.6 8 -42 -3 -5 1 +13 0.4 0.6 -29 -45 46 15 2 +13 0.7 0.3 18 -14 40 -14 1 +13 0.1 0.6 -31 -44 -34 -36 2 +13 0.7 0.3 20 -14 7 1 1 +13 0.2 0.3 16 -38 50 2 2 +13 0.5 0.4 8 -39 -4 -49 1 +13 0.7 0.2 18 3 -6 -30 1 +13 0.3 0.8 43 -4 -2 -40 1 +13 0.6 0.6 16 -17 34 33 2 +13 0.7 0.6 -17 -32 17 10 2 +13 0.2 0.8 40 -40 48 18 2 +13 0.9 0.5 41 35 50 -31 1 +13 0.9 0.3 20 13 23 -50 1 +13 0.8 0.7 33 -2 47 -14 2 +13 0.2 0.7 32 -49 34 11 2 +13 0.6 0.9 40 8 44 5 2 +13 0.8 0.5 50 48 21 3 1 +13 0.3 0.3 -46 -49 28 -39 2 +13 0.7 0.6 -12 -21 33 3 2 +13 0.5 0.8 -29 -35 23 -34 2 +13 0.8 0.1 9 -34 36 34 2 +13 0.7 0.8 6 -21 40 36 2 +13 0.5 0.7 11 -35 19 10 2 +13 0.4 0.6 14 6 32 -3 1 +13 0.4 0.8 45 34 -30 -31 1 +13 0.5 0.6 20 5 43 -6 2 +13 0.4 0.7 -12 -38 22 15 2 +13 0.3 0.3 10 -17 16 -47 1 +13 0.6 0.5 44 26 35 29 1 +13 0.6 0.6 6 0 -2 -47 1 +13 0.5 0.8 38 33 15 -44 1 +13 0.5 0.1 7 -31 27 -6 2 +13 0.6 0.8 10 2 24 -18 2 +13 0.2 0.5 36 20 40 -35 1 +13 0.7 0.9 18 -14 -20 -24 1 +13 0.2 0.9 -36 -37 7 -50 2 +13 0.5 0.7 20 -25 15 -5 2 +13 0.9 0.7 47 40 -26 -29 1 +14 0.6 0.5 49 -49 -13 -37 1 +14 0.1 0.7 22 7 50 3 2 +14 0.3 0.9 -32 -39 21 13 2 +14 0.2 0.1 6 -44 -7 -33 1 +14 0.6 0.1 6 -34 2 -15 2 +14 0.6 0.9 43 -25 -35 -46 1 +14 0.2 0.6 33 -16 31 -28 2 +14 0.5 0.9 9 -35 31 -20 2 +14 0.3 0.5 34 -40 -20 -21 1 +14 0.4 0.1 49 -27 19 -41 1 +14 0.6 0.4 26 22 34 28 2 +14 0.6 0.2 18 -32 1 -27 1 +14 0.7 0.6 32 3 44 24 2 +14 0.2 0.9 44 37 42 13 2 +14 0.1 0.4 -35 -38 40 -29 2 +14 0.2 0.8 39 -44 33 11 2 +14 0.7 0.9 19 -2 21 -7 1 +14 0.1 0.3 -30 -32 23 -42 2 +14 0.8 0.7 3 -23 -5 -37 1 +14 0.2 0.5 24 -21 -15 -44 1 +14 0.4 0.2 4 -11 48 -29 2 +14 0.8 0.3 25 -30 17 -48 1 +14 0.4 0.6 22 18 14 -10 1 +14 0.4 0.2 39 8 48 -24 2 +14 0.3 0.6 8 -42 36 -35 2 +14 0.1 0.6 -3 -11 -6 -15 2 +14 0.5 0.1 5 -4 -2 -23 1 +14 0.3 0.7 27 -4 25 4 1 +14 0.8 0.8 -17 -29 -4 -5 1 +14 0.9 0.6 -2 -43 -22 -40 1 +14 0.9 0.5 23 -19 -5 -13 1 +14 0.5 0.7 -7 -8 30 -8 2 +14 0.8 0.7 37 35 27 -15 1 +14 0.1 0.1 50 22 29 9 2 +14 0.3 0.2 19 -11 0 -9 1 +14 0.1 0.5 36 -3 48 -16 2 +14 0.3 0.8 13 -39 43 12 2 +14 0.6 0.6 30 -14 11 -1 2 +14 0.3 0.8 28 -48 -4 -11 1 +14 0.2 0.7 33 -42 -18 -38 1 +14 0.2 0.9 25 -36 -15 -25 2 +14 0.5 0.6 -1 -30 41 -17 2 +14 0.7 0.5 -6 -40 13 -49 2 +14 0.8 0.3 21 4 -11 -47 1 +14 0.1 0.1 -10 -34 30 -28 2 +14 0.7 0.3 12 -27 15 -40 1 +14 0.5 0.2 -35 -42 13 -5 2 +14 0.8 0.8 18 5 21 -13 2 +14 0.1 0.5 22 6 41 -8 1 +14 0.4 0.8 45 -13 -16 -49 1 +14 0.5 0.1 9 2 -37 -49 1 +14 0.6 0.8 -4 -24 -14 -43 1 +14 0.4 0.9 31 -31 -7 -28 1 +14 0.3 0.6 38 -16 20 -27 1 +14 0.4 0.3 -11 -48 -1 -18 2 +14 0.5 0.5 -17 -38 38 -43 2 +14 0.7 0.8 20 -31 32 -21 2 +14 0.3 0.3 20 -8 35 -15 2 +14 0.8 0.3 -11 -34 17 -17 2 +14 0.6 0.4 -16 -20 26 -49 1 +14 0.8 0.1 30 -29 46 42 2 +14 0.3 0.8 40 14 24 14 2 +14 0.1 0.1 40 -37 -3 -44 1 +14 0.7 0.5 50 33 42 37 1 +14 0.7 0.5 16 6 20 -6 1 +14 0.5 0.4 13 -2 19 -37 2 +14 0.5 0.9 41 16 41 -1 2 +14 0.2 0.1 16 2 38 22 2 +14 0.5 0.7 29 -33 31 -4 2 +14 0.6 0.5 42 -30 12 -33 1 +14 0.5 0.4 31 -14 -3 -10 1 +14 0.3 0.4 5 -36 35 -24 1 +14 0.5 0.2 31 -18 50 -24 1 +14 0.5 0.7 2 -38 44 40 2 +14 0.3 0.7 27 2 -2 -17 1 +14 0.1 0.4 26 3 26 -19 1 +14 0.2 0.8 34 31 -7 -48 1 +14 0.2 0.4 35 -25 -21 -38 1 +14 0.6 0.8 -7 -49 -26 -35 1 +14 0.3 0.8 33 -45 21 9 2 +14 0.3 0.5 -27 -43 36 -35 2 +14 0.1 0.3 27 6 -22 -44 1 +14 0.7 0.2 4 -35 21 -7 2 +14 0.2 0.3 40 -32 4 -14 1 +14 0.3 0.6 45 8 9 -7 1 +14 0.1 0.8 39 -13 37 30 2 +14 0.9 0.3 -44 -49 39 -43 2 +14 0.5 0.4 0 -9 34 1 2 +14 0.2 0.1 45 -26 48 -23 1 +14 0.2 0.2 20 -22 -18 -50 1 +14 0.6 0.3 19 -41 22 -47 1 +14 0.6 0.7 26 -10 27 1 2 +14 0.9 0.1 25 -17 32 21 2 +14 0.3 0.8 26 -41 4 -16 2 +14 0.7 0.2 27 -42 -16 -45 1 +14 0.4 0.5 -2 -3 19 -26 2 +14 0.7 0.6 33 21 49 31 1 +14 0.3 0.6 18 -25 -12 -13 1 +14 0.6 0.4 13 -10 28 8 2 +14 0.8 0.9 32 -27 1 -7 1 +14 0.8 0.2 32 -6 22 -37 1 +14 0.1 0.5 -15 -31 42 -35 2 +14 0.7 0.7 -20 -33 24 -14 2 +14 0.2 0.7 46 -18 30 -14 2 +14 0.1 0.5 46 -33 39 -23 2 +14 0.8 0.6 18 -28 -17 -35 1 +14 0.5 0.6 35 -26 48 8 2 +14 0.1 0.2 -22 -24 35 7 2 +14 0.6 0.6 -1 -21 27 -19 1 +14 0.3 0.6 33 -6 39 10 2 +14 0.3 0.9 -27 -32 20 -47 2 +14 0.4 0.7 -16 -43 -15 -45 1 +14 0.2 0.9 16 -40 12 10 2 +14 0.8 0.1 -13 -43 1 -47 1 +14 0.3 0.8 -23 -39 47 13 2 +14 0.2 0.2 37 -48 43 12 2 +14 0.7 0.9 -6 -48 -33 -45 1 +14 0.7 0.8 43 -19 -1 -38 1 +14 0.3 0.8 -5 -7 -5 -26 2 +14 0.7 0.7 31 -18 11 -49 1 +14 0.1 0.1 6 -13 21 -44 1 +14 0.5 0.4 40 9 31 5 1 +14 0.9 0.9 5 -46 -25 -44 1 +14 0.7 0.8 44 39 3 -31 1 +14 0.9 0.9 3 -36 24 -1 2 +14 0.7 0.3 -3 -45 23 -36 2 +14 0.4 0.7 21 -16 2 -10 1 +14 0.6 0.6 6 -3 42 19 2 +14 0.8 0.4 34 -28 35 33 1 +14 0.5 0.7 11 2 22 -42 2 +14 0.8 0.6 22 -23 43 -45 1 +14 0.4 0.9 3 -21 47 -1 2 +14 0.4 0.7 12 4 23 -39 1 +14 0.7 0.5 40 -5 24 -30 1 +14 0.1 0.9 20 9 -2 -42 1 +14 0.1 0.1 40 21 44 -45 1 +14 0.4 0.1 21 -31 5 -25 1 +14 0.2 0.2 -24 -34 25 23 2 +14 0.9 0.3 23 -21 1 -25 1 +14 0.1 0.3 -1 -47 8 7 2 +14 0.9 0.1 10 -30 43 -39 2 +14 0.1 0.8 13 0 21 -3 2 +14 0.5 0.3 37 17 17 -36 1 +14 0.4 0.1 10 -28 34 0 2 +14 0.2 0.1 -4 -31 37 -36 2 +14 0.4 0.4 38 33 22 -13 1 +14 0.6 0.5 36 -10 -27 -35 1 +14 0.1 0.7 -2 -42 22 -10 2 +14 0.7 0.9 40 25 32 -35 1 +14 0.8 0.9 6 -41 39 23 2 +14 0.8 0.7 49 14 -30 -32 1 +14 0.7 0.1 -15 -36 17 16 2 +14 0.8 0.1 -3 -42 34 -19 1 +14 0.2 0.8 48 43 14 -12 1 +14 0.4 0.5 24 23 30 27 2 +14 0.3 0.1 28 -38 34 -36 1 +14 0.8 0.5 23 -46 -20 -27 1 +14 0.4 0.5 50 -6 38 1 1 +14 0.9 0.4 11 -11 42 -16 1 +14 0.4 0.6 41 38 36 -13 1 +14 0.6 0.6 -8 -14 47 -9 2 +14 0.4 0.5 -5 -47 7 -26 2 +14 0.4 0.2 -2 -44 3 -28 1 +14 0.2 0.5 -4 -33 10 -14 1 +14 0.2 0.3 28 -7 35 -6 2 +14 0.3 0.3 34 31 40 -43 1 +14 0.3 0.5 -5 -45 20 -7 2 +14 0.9 0.2 -12 -40 45 -17 2 +14 0.3 0.3 40 15 24 -8 1 +14 0.7 0.4 4 1 40 -44 2 +14 0.9 0.3 47 -25 23 4 1 +14 0.6 0.1 31 -19 -3 -6 1 +14 0.4 0.1 -10 -24 48 -11 2 +14 0.1 0.7 41 -46 -24 -34 2 +14 0.3 0.2 5 1 26 -49 1 +14 0.5 0.2 40 9 15 8 1 +14 0.3 0.5 46 8 19 2 1 +14 0.3 0.2 38 -23 35 -19 2 +14 0.8 0.6 44 -24 49 -31 1 +14 0.1 0.1 48 -37 4 -11 1 +14 0.2 0.5 46 8 13 -31 1 +14 0.9 0.4 23 16 15 -3 1 +14 0.7 0.5 31 -44 44 -30 1 +14 0.9 0.8 37 -46 -27 -31 1 +14 0.1 0.1 -25 -27 46 20 2 +14 0.6 0.9 -2 -36 46 25 2 +14 0.2 0.3 -1 -8 -36 -47 1 +14 0.7 0.6 29 9 13 -26 2 +14 0.5 0.8 23 -16 32 -3 2 +14 0.9 0.9 10 -41 33 -12 2 +14 0.9 0.7 3 -27 46 36 2 +14 0.6 0.6 39 -24 21 11 2 +14 0.3 0.3 14 -27 10 -21 1 +14 0.6 0.7 -17 -44 19 -18 2 +14 0.9 0.4 39 -47 47 -12 1 +14 0.9 0.6 -38 -45 16 -35 2 +14 0.7 0.2 35 30 33 23 2 +14 0.8 0.6 -9 -42 19 -35 2 +14 0.2 0.6 -14 -25 18 -36 2 +14 0.5 0.9 -22 -35 35 32 2 +14 0.3 0.8 18 17 44 4 1 +14 0.6 0.5 -2 -33 29 14 2 +14 0.2 0.8 -3 -38 17 -47 2 +14 0.1 0.7 7 -31 -18 -26 1 +14 0.3 0.7 16 -33 -17 -29 1 +14 0.8 0.2 15 -16 -44 -49 1 +14 0.1 0.4 43 -5 21 -41 1 +14 0.3 0.6 10 -28 22 -4 2 +14 0.1 0.4 29 -8 8 6 2 +14 0.3 0.9 6 -12 20 5 2 +14 0.1 0.5 -25 -49 24 -16 2 +14 0.8 0.1 40 -29 -24 -31 1 +14 0.4 0.1 22 -18 -22 -28 1 +14 0.6 0.4 5 -8 17 -41 1 +14 0.7 0.5 20 -1 24 -11 1 +14 0.2 0.2 40 -30 23 11 2 +14 0.8 0.2 29 -30 -12 -13 1 +14 0.1 0.5 30 -1 31 -18 1 +14 0.8 0.8 21 17 20 -9 1 +14 0.6 0.5 16 -10 -3 -14 1 +14 0.4 0.5 28 -49 36 27 2 +14 0.4 0.9 17 6 14 6 2 +14 0.4 0.1 1 -16 28 -8 1 +14 0.2 0.8 5 -14 -35 -48 1 +14 0.1 0.8 42 7 23 -14 2 +14 0.2 0.7 29 11 32 -11 1 +14 0.3 0.9 32 27 48 -4 2 +14 0.8 0.2 34 -17 12 -42 1 +14 0.1 0.7 15 -13 -9 -50 1 +14 0.2 0.4 40 33 5 -44 1 +14 0.5 0.3 46 -50 -16 -22 1 +14 0.5 0.7 21 -36 41 15 2 +14 0.8 0.3 -4 -5 35 16 2 +14 0.6 0.3 25 -31 35 21 2 +14 0.5 0.6 49 15 -40 -46 1 +14 0.7 0.9 -3 -21 41 -20 2 +14 0.4 0.8 37 27 -32 -50 1 +14 0.7 0.9 30 -50 28 -47 1 +14 0.6 0.3 46 -16 -17 -30 2 +14 0.9 0.8 6 -30 -6 -44 1 +14 0.2 0.4 23 12 9 -25 1 +14 0.3 0.6 18 -44 11 -41 2 +14 0.6 0.1 46 -46 -12 -47 1 +14 0.2 0.4 44 40 46 -23 1 +14 0.6 0.6 39 26 40 -47 1 +14 0.7 0.1 36 -14 -35 -49 1 +14 0.6 0.2 39 -21 -23 -28 1 +14 0.6 0.9 -1 -40 -3 -25 2 +14 0.5 0.9 18 -18 20 -6 2 +14 0.6 0.1 27 -5 46 18 2 +14 0.4 0.7 43 26 6 -45 1 +14 0.7 0.5 39 6 50 32 2 +14 0.9 0.5 29 -23 -7 -26 1 +14 0.8 0.7 -40 -45 -1 -30 2 +14 0.7 0.6 35 1 45 -7 2 +14 0.9 0.3 -13 -30 2 -20 2 +14 0.3 0.4 30 -18 -17 -50 1 +14 0.9 0.9 -25 -41 38 28 2 +14 0.8 0.1 -31 -36 39 -16 1 +14 0.3 0.1 -25 -34 9 -24 2 +14 0.7 0.5 14 3 3 -30 1 +14 0.3 0.8 -29 -40 -4 -31 2 +14 0.5 0.5 -18 -20 22 -25 2 +14 0.2 0.7 43 12 39 37 2 +14 0.9 0.4 15 -21 10 5 2 +14 0.5 0.4 45 -18 5 -16 1 +14 0.5 0.3 15 -1 -26 -31 1 +14 0.2 0.2 41 -13 47 -50 1 +14 0.8 0.2 31 -47 43 24 2 +14 0.6 0.2 44 -14 49 20 2 +14 0.8 0.6 22 -4 25 -6 1 +14 0.4 0.4 15 2 -4 -35 2 +14 0.2 0.6 -30 -36 17 -40 2 +14 0.4 0.9 -19 -22 32 24 2 +14 0.1 0.7 29 -38 30 -38 2 +14 0.1 0.3 19 -45 27 19 2 +14 0.8 0.7 16 -29 14 -11 2 +14 0.8 0.2 50 22 45 -33 1 +14 0.6 0.1 -22 -37 49 -33 2 +14 0.6 0.4 3 -43 26 9 2 +14 0.7 0.6 50 12 -13 -44 1 +14 0.4 0.4 5 -45 41 28 2 +14 0.4 0.2 1 -4 7 -37 2 +14 0.8 0.5 38 32 9 -21 1 +14 0.2 0.7 22 -37 46 -25 2 +14 0.8 0.8 -42 -44 22 13 2 +14 0.1 0.3 49 44 43 0 1 +14 0.2 0.1 16 -35 47 -39 2 +14 0.6 0.5 44 -35 -20 -37 1 +14 0.4 0.8 6 -18 24 -28 2 +14 0.9 0.4 8 -11 30 -42 2 +14 0.5 0.3 14 -25 44 -10 2 +14 0.9 0.2 49 22 46 -9 1 +14 0.6 0.6 29 2 21 -4 2 +14 0.4 0.2 0 -46 41 -29 2 +14 0.7 0.4 35 10 44 19 2 +14 0.8 0.8 50 35 15 -21 1 +14 0.1 0.1 45 2 40 -30 1 +14 0.7 0.6 -28 -30 5 -13 2 +14 0.7 0.8 -17 -29 48 29 2 +15 0.9 0.1 -19 -33 31 15 2 +15 0.4 0.5 36 5 -2 -8 1 +15 0.5 0.2 13 -23 39 -33 1 +15 0.8 0.3 36 -9 24 -35 1 +15 0.4 0.1 10 8 37 17 1 +15 0.3 0.8 5 -26 -9 -10 1 +15 0.4 0.3 14 -34 21 -45 1 +15 0.2 0.4 -31 -49 2 -24 2 +15 0.6 0.4 50 48 -41 -50 1 +15 0.2 0.8 -8 -34 45 15 2 +15 0.1 0.6 32 -27 14 0 2 +15 0.9 0.6 10 -49 -5 -43 1 +15 0.2 0.1 2 -6 14 -15 2 +15 0.1 0.7 -6 -34 18 -34 2 +15 0.2 0.6 13 -32 -36 -45 1 +15 0.1 0.7 0 -38 23 -22 2 +15 0.2 0.1 4 -27 -13 -38 1 +15 0.2 0.8 48 -35 -1 -46 2 +15 0.9 0.2 16 -40 10 -25 1 +15 0.7 0.8 13 -6 44 6 2 +15 0.8 0.7 35 31 -14 -47 1 +15 0.1 0.3 50 17 3 -12 1 +15 0.4 0.1 37 -42 18 -29 2 +15 0.8 0.6 -8 -22 49 6 2 +15 0.9 0.7 9 -17 27 -50 2 +15 0.2 0.7 0 -44 21 -16 2 +15 0.5 0.2 18 -8 35 -21 1 +15 0.1 0.1 37 27 46 18 1 +15 0.6 0.1 -32 -47 -6 -19 2 +15 0.4 0.6 6 -31 31 11 2 +15 0.5 0.4 34 26 50 49 2 +15 0.2 0.6 2 -10 36 21 2 +15 0.1 0.4 -42 -44 30 -43 2 +15 0.5 0.4 25 -23 29 -11 2 +15 0.9 0.5 46 0 39 -37 1 +15 0.6 0.5 41 -20 16 8 1 +15 0.1 0.9 46 23 -45 -50 1 +15 0.9 0.5 -3 -35 29 -50 2 +15 0.4 0.2 28 -49 3 -23 1 +15 0.7 0.1 30 -26 2 -35 1 +15 0.7 0.6 49 2 12 -28 1 +15 0.5 0.3 45 -2 17 10 1 +15 0.7 0.1 42 -12 7 -35 1 +15 0.1 0.7 48 -8 45 -19 2 +15 0.3 0.5 -10 -13 49 -46 2 +15 0.2 0.5 24 4 11 5 1 +15 0.1 0.2 27 18 -7 -34 1 +15 0.2 0.9 28 18 42 33 2 +15 0.9 0.1 18 9 31 -33 1 +15 0.6 0.9 11 6 7 -30 1 +15 0.1 0.4 32 -42 35 -47 2 +15 0.9 0.3 15 -23 -28 -37 1 +15 0.6 0.8 -18 -39 28 18 2 +15 0.8 0.6 28 -30 45 11 2 +15 0.1 0.8 27 23 -3 -18 1 +15 0.5 0.2 -5 -27 6 -38 2 +15 0.8 0.1 39 23 50 -19 1 +15 0.4 0.6 30 13 49 -2 2 +15 0.9 0.1 46 14 0 -19 1 +15 0.6 0.7 37 5 -29 -30 1 +15 0.3 0.8 42 -45 22 -12 2 +15 0.2 0.7 18 -14 47 24 2 +15 0.5 0.5 30 -46 22 -18 2 +15 0.5 0.4 49 -7 -15 -41 1 +15 0.9 0.5 -35 -38 42 33 2 +15 0.9 0.1 39 -19 -29 -34 1 +15 0.9 0.2 -42 -43 -36 -41 2 +15 0.9 0.9 13 -31 24 3 2 +15 0.3 0.8 44 23 35 -13 1 +15 0.1 0.6 41 30 42 28 2 +15 0.2 0.4 17 -25 13 1 2 +15 0.5 0.2 -6 -9 22 -17 2 +15 0.3 0.3 17 -35 -44 -45 1 +15 0.1 0.4 32 -2 41 17 2 +15 0.9 0.8 25 -49 49 18 2 +15 0.1 0.8 38 34 31 9 1 +15 0.5 0.7 20 -36 41 12 2 +15 0.5 0.2 5 -11 -28 -36 1 +15 0.8 0.5 45 -6 14 5 1 +15 0.1 0.6 4 -13 10 -18 2 +15 0.8 0.8 -32 -39 14 -36 2 +15 0.5 0.3 9 -38 45 15 2 +15 0.5 0.3 -5 -20 35 -17 2 +15 0.6 0.5 16 -5 50 10 2 +15 0.5 0.3 42 -16 4 3 2 +15 0.6 0.7 -11 -36 -6 -49 2 +15 0.9 0.2 14 -12 15 12 2 +15 0.8 0.6 -25 -26 25 10 2 +15 0.2 0.4 -27 -42 49 -18 2 +15 0.7 0.5 1 -33 17 8 2 +15 0.2 0.6 32 -47 50 -30 2 +15 0.3 0.4 -11 -34 46 -44 2 +15 0.3 0.8 21 -5 39 -1 2 +15 0.2 0.6 19 10 13 -28 1 +15 0.1 0.8 10 -12 13 -35 2 +15 0.6 0.7 -18 -46 -25 -46 1 +15 0.1 0.5 15 2 21 0 2 +15 0.3 0.4 48 -30 33 -30 1 +15 0.3 0.6 46 32 -12 -29 1 +15 0.5 0.2 5 4 28 0 1 +15 0.8 0.8 46 9 28 12 2 +15 0.3 0.2 13 4 35 12 2 +15 0.8 0.9 21 -35 20 -37 2 +15 0.7 0.3 46 -18 10 -39 1 +15 0.1 0.6 28 -31 22 -12 1 +15 0.8 0.1 -18 -22 44 19 2 +15 0.4 0.3 49 -47 -9 -29 2 +15 0.3 0.8 42 40 23 -31 1 +15 0.4 0.3 44 -40 35 21 2 +15 0.7 0.3 8 -49 -3 -30 1 +15 0.3 0.3 31 -3 26 -31 1 +15 0.8 0.6 -2 -50 -3 -22 2 +15 0.5 0.7 12 -41 37 9 2 +15 0.7 0.7 41 -27 15 -33 1 +15 0.7 0.6 22 14 17 8 1 +15 0.5 0.6 47 -14 31 1 1 +15 0.9 0.9 50 -4 -6 -45 1 +15 0.6 0.9 -13 -30 -36 -40 1 +15 0.8 0.3 39 -38 14 1 1 +15 0.1 0.9 40 10 10 -23 1 +15 0.4 0.9 -45 -46 8 -8 2 +15 0.2 0.8 10 -12 -2 -41 2 +15 0.8 0.6 38 2 32 -17 1 +15 0.5 0.5 31 -23 49 -28 2 +15 0.8 0.5 25 -14 -20 -22 1 +15 0.7 0.6 -30 -41 45 33 2 +15 0.1 0.2 2 -17 41 -47 1 +15 0.3 0.3 10 -42 30 -4 2 +15 0.3 0.5 49 -35 33 26 2 +15 0.5 0.3 45 -18 -3 -30 1 +15 0.4 0.8 45 -22 21 -42 2 +15 0.7 0.7 44 -45 21 -21 1 +15 0.1 0.4 17 10 43 35 2 +15 0.4 0.3 42 -36 6 -9 1 +15 0.6 0.7 12 3 35 30 2 +15 0.2 0.7 -42 -46 5 -32 2 +15 0.7 0.1 49 15 -29 -38 1 +15 0.5 0.4 0 -43 28 -3 2 +15 0.1 0.4 19 -34 -30 -44 1 +15 0.5 0.5 28 1 -34 -39 1 +15 0.9 0.5 0 -1 16 5 2 +15 0.1 0.8 18 -31 40 -9 2 +15 0.7 0.1 -4 -28 29 -26 1 +15 0.4 0.4 23 19 32 -11 2 +15 0.7 0.8 14 -5 -14 -47 1 +15 0.6 0.7 -20 -25 13 -21 2 +15 0.4 0.3 27 -3 -12 -18 1 +15 0.4 0.8 19 6 4 -35 1 +15 0.1 0.4 32 -43 23 14 2 +15 0.7 0.4 -38 -44 12 -21 2 +15 0.2 0.3 40 4 28 -9 1 +15 0.8 0.9 41 -8 -11 -22 1 +15 0.1 0.8 4 -34 -7 -16 2 +15 0.3 0.9 49 8 44 22 2 +15 0.6 0.1 5 -20 28 -50 1 +15 0.1 0.1 -45 -48 42 10 2 +15 0.9 0.8 11 -12 47 -6 2 +15 0.5 0.1 35 -38 -25 -41 1 +15 0.5 0.1 -18 -41 29 -24 2 +15 0.9 0.1 7 -38 18 -45 1 +15 0.6 0.9 41 -18 48 -16 2 +15 0.7 0.8 44 -44 19 -37 1 +15 0.5 0.6 32 -13 1 -48 1 +15 0.1 0.6 38 -43 -7 -21 2 +15 0.6 0.3 -35 -41 -13 -22 2 +15 0.1 0.6 -33 -42 8 -17 2 +15 0.1 0.3 20 -43 -23 -33 1 +15 0.5 0.5 21 -35 11 5 2 +15 0.3 0.8 15 4 -6 -16 1 +15 0.2 0.2 -11 -32 7 -6 2 +15 0.4 0.9 39 3 12 -8 2 +15 0.4 0.5 23 20 49 25 2 +15 0.1 0.1 22 -23 -5 -39 1 +15 0.7 0.7 6 -14 2 -49 1 +15 0.2 0.8 -41 -48 27 -12 2 +15 0.7 0.7 5 -22 25 -27 2 +15 0.3 0.3 -7 -43 26 13 2 +15 0.9 0.3 37 -42 22 -20 1 +15 0.9 0.6 21 -44 34 15 2 +15 0.1 0.8 -1 -14 14 -10 2 +15 0.6 0.6 50 37 42 28 1 +15 0.9 0.3 34 28 -24 -27 1 +15 0.2 0.5 21 16 -15 -40 1 +15 0.7 0.9 -38 -50 19 3 2 +15 0.5 0.8 -35 -50 26 -28 2 +15 0.3 0.2 37 -7 32 -17 1 +15 0.7 0.6 48 40 39 6 1 +15 0.1 0.3 49 29 40 22 1 +15 0.6 0.1 -13 -39 -24 -30 1 +15 0.5 0.6 8 -7 1 -3 1 +15 0.9 0.3 38 -31 36 4 1 +15 0.5 0.3 25 -5 3 -31 1 +15 0.2 0.8 4 -34 22 1 2 +15 0.9 0.2 10 8 0 -6 1 +15 0.8 0.9 -14 -43 28 -47 2 +15 0.1 0.8 43 6 -1 -37 1 +15 0.9 0.1 16 -45 40 -2 1 +15 0.8 0.8 -20 -26 29 -29 2 +15 0.9 0.5 41 34 19 -7 1 +15 0.9 0.4 9 -21 24 9 2 +15 0.5 0.7 13 -14 7 -26 2 +15 0.5 0.2 -26 -45 41 16 2 +15 0.9 0.6 -25 -37 34 -32 2 +15 0.2 0.4 17 -26 46 -31 2 +15 0.2 0.4 27 -9 34 -4 2 +15 0.1 0.9 16 -34 13 -14 2 +15 0.1 0.9 20 -40 34 -18 2 +15 0.6 0.9 30 -35 -25 -50 1 +15 0.4 0.5 14 -21 48 -24 2 +15 0.6 0.9 14 -12 29 -7 2 +15 0.9 0.2 23 -17 -6 -15 1 +15 0.3 0.7 11 3 1 -47 1 +15 0.7 0.2 21 4 44 19 2 +15 0.3 0.1 19 -23 25 2 2 +15 0.9 0.9 -14 -33 13 -27 2 +15 0.2 0.9 6 -33 12 -2 2 +15 0.8 0.2 -5 -25 29 -7 2 +15 0.7 0.8 22 -24 29 0 2 +15 0.4 0.7 44 -40 20 -27 2 +15 0.1 0.3 35 -17 29 23 2 +15 0.8 0.5 20 5 3 -25 1 +15 0.5 0.1 -17 -24 34 13 2 +15 0.9 0.1 40 16 42 -30 1 +15 0.7 0.9 -16 -27 40 -7 2 +15 0.2 0.3 33 31 12 -27 1 +15 0.3 0.4 5 -19 -35 -42 1 +15 0.6 0.7 2 -5 37 -1 2 +15 0.2 0.5 37 35 -6 -9 1 +15 0.4 0.9 27 15 38 -45 2 +15 0.2 0.3 14 -20 19 -43 2 +15 0.6 0.3 20 -33 25 -24 2 +15 0.8 0.8 19 5 20 -42 2 +15 0.2 0.8 5 -10 25 -16 2 +15 0.8 0.1 40 16 44 15 2 +15 0.5 0.3 48 -44 41 21 2 +15 0.6 0.5 36 30 35 28 1 +15 0.3 0.2 17 -18 45 29 2 +15 0.6 0.5 44 17 26 -28 1 +15 0.1 0.5 13 -42 50 -24 2 +15 0.2 0.2 39 5 48 5 2 +15 0.2 0.9 -7 -20 -1 -47 2 +15 0.7 0.5 38 27 50 -18 1 +15 0.9 0.1 18 -47 -10 -15 1 +15 0.3 0.5 31 -45 -14 -35 2 +15 0.7 0.2 -37 -38 0 -46 1 +15 0.5 0.7 28 -22 25 7 2 +15 0.3 0.1 3 -48 -13 -15 2 +15 0.5 0.3 -14 -15 49 17 2 +15 0.2 0.4 -17 -49 -34 -47 1 +15 0.8 0.7 -5 -48 13 -22 2 +15 0.1 0.8 12 -5 11 10 2 +15 0.9 0.2 -25 -40 -16 -42 1 +15 0.7 0.6 48 -14 33 -4 1 +15 0.5 0.9 12 -27 11 3 2 +15 0.5 0.1 39 -1 31 -21 1 +15 0.3 0.5 48 -29 21 -20 2 +15 0.6 0.9 40 -30 43 12 2 +15 0.6 0.5 28 -32 37 -19 1 +15 0.5 0.9 -9 -49 34 20 2 +15 0.5 0.2 46 -30 25 5 1 +15 0.1 0.4 -32 -34 -7 -35 1 +15 0.4 0.3 -7 -35 6 -41 1 +15 0.2 0.5 40 -30 -35 -49 1 +15 0.1 0.3 22 -3 38 -4 2 +15 0.3 0.6 -19 -43 47 4 2 +15 0.5 0.5 44 -32 -37 -45 1 +15 0.6 0.2 21 -18 -16 -27 1 +15 0.7 0.5 48 34 27 12 1 +15 0.9 0.2 40 -43 40 -6 1 +15 0.9 0.2 29 -4 8 7 1 +15 0.4 0.2 8 -50 44 13 2 +15 0.9 0.3 44 31 38 4 1 +15 0.3 0.2 20 -40 39 -14 2 +15 0.2 0.4 18 -36 44 40 2 +15 0.3 0.1 -6 -22 30 -22 1 +15 0.3 0.5 34 -21 48 -31 2 +15 0.4 0.1 5 -33 29 10 2 +15 0.4 0.2 48 -26 38 -26 1 +15 0.3 0.6 16 -33 21 -16 2 +15 0.1 0.2 -21 -45 36 13 2 +15 0.6 0.8 35 -14 5 -39 1 +15 0.7 0.7 39 -28 21 6 2 +15 0.8 0.6 -18 -25 35 -21 2 +15 0.2 0.9 30 -34 33 -36 2 +15 0.3 0.3 47 22 37 -47 1 +15 0.1 0.7 18 -47 -15 -28 1 +15 0.3 0.5 7 4 27 -40 1 +15 0.1 0.7 42 -35 -9 -50 1 +15 0.7 0.6 50 -12 23 14 1 +15 0.1 0.5 21 -31 16 -17 2 +15 0.8 0.4 -1 -10 24 11 2 +15 0.2 0.5 45 -37 -14 -28 1 +15 0.5 0.2 -24 -48 3 -21 1 +15 0.7 0.6 29 -1 40 10 2 +15 0.7 0.5 41 -20 38 -26 2 +15 0.1 0.2 33 1 41 -16 1 +15 0.2 0.2 32 21 42 -8 1 +15 0.9 0.3 40 29 7 -29 1 +15 0.3 0.3 10 -47 39 37 2 +15 0.7 0.8 46 22 17 -22 1 +16 0.1 0.3 -23 -45 -1 -32 2 +16 0.7 0.1 41 8 4 -2 1 +16 0.6 0.7 44 -23 -11 -17 1 +16 0.3 0.8 45 17 27 25 1 +16 0.4 0.9 10 -7 29 16 2 +16 0.4 0.4 16 3 31 -30 1 +16 0.6 0.5 49 -12 40 29 2 +16 0.7 0.5 10 -9 -36 -43 1 +16 0.7 0.6 37 -47 29 -23 1 +16 0.8 0.6 32 -18 48 -40 1 +16 0.9 0.3 17 -26 45 31 2 +16 0.2 0.8 24 -5 -1 -19 1 +16 0.1 0.4 17 -18 -5 -19 2 +16 0.2 0.5 42 -31 23 -38 2 +16 0.5 0.7 27 -18 39 -8 2 +16 0.4 0.1 24 3 30 -30 1 +16 0.1 0.4 21 -10 29 5 2 +16 0.8 0.4 6 -42 50 22 2 +16 0.7 0.3 29 -32 14 -8 1 +16 0.8 0.3 38 36 26 -7 1 +16 0.2 0.2 12 -18 21 -8 2 +16 0.2 0.3 -33 -42 33 -4 2 +16 0.3 0.9 14 -33 36 11 2 +16 0.7 0.5 19 -15 -36 -44 1 +16 0.5 0.2 28 -47 28 -48 1 +16 0.5 0.8 24 -45 13 -8 2 +16 0.6 0.8 43 -24 35 -32 2 +16 0.9 0.7 12 -41 17 -14 1 +16 0.4 0.9 20 -8 4 -38 1 +16 0.9 0.4 44 10 28 -44 1 +16 0.2 0.4 2 -27 42 5 2 +16 0.9 0.5 2 -1 38 -30 1 +16 0.1 0.4 19 -3 -5 -23 1 +16 0.1 0.6 29 26 43 -7 1 +16 0.6 0.8 -25 -29 9 6 2 +16 0.2 0.6 26 -31 24 -22 2 +16 0.7 0.8 19 -37 32 -36 2 +16 0.7 0.7 19 -30 50 -34 2 +16 0.9 0.4 50 15 -26 -44 1 +16 0.9 0.7 1 -8 -24 -29 1 +16 0.8 0.7 4 -14 4 -12 1 +16 0.6 0.7 34 32 40 0 2 +16 0.5 0.2 -26 -50 -20 -46 1 +16 0.9 0.7 44 -47 3 -14 1 +16 0.9 0.9 33 12 32 -33 1 +16 0.4 0.3 50 -2 -17 -28 1 +16 0.9 0.1 22 -32 49 5 1 +16 0.9 0.4 29 18 -38 -39 1 +16 0.6 0.1 31 0 47 41 2 +16 0.5 0.9 23 -3 -4 -16 1 +16 0.4 0.2 34 17 35 -47 1 +16 0.3 0.3 35 -3 -7 -40 1 +16 0.5 0.6 4 -19 -28 -48 1 +16 0.6 0.7 11 10 6 -41 1 +16 0.8 0.3 14 -45 -19 -50 1 +16 0.1 0.9 27 -35 2 -43 2 +16 0.1 0.8 10 9 -13 -45 1 +16 0.5 0.4 -33 -45 45 -25 2 +16 0.1 0.5 -5 -31 -26 -35 2 +16 0.5 0.7 -1 -19 27 -7 2 +16 0.7 0.9 12 1 -37 -49 1 +16 0.7 0.2 38 34 4 -6 1 +16 0.6 0.1 22 -32 32 8 2 +16 0.4 0.4 31 -7 43 19 2 +16 0.3 0.5 25 -38 22 -26 2 +16 0.4 0.8 -3 -19 44 -2 2 +16 0.5 0.3 37 -23 18 4 2 +16 0.9 0.1 30 -12 9 -48 1 +16 0.6 0.9 4 -7 30 -25 2 +16 0.2 0.9 34 -46 9 -34 2 +16 0.5 0.2 20 -26 40 -12 2 +16 0.9 0.2 -29 -50 34 -33 2 +16 0.3 0.6 44 23 20 -30 1 +16 0.1 0.9 3 -15 20 -2 2 +16 0.4 0.4 -22 -42 -27 -38 2 +16 0.7 0.1 -39 -48 45 -2 2 +16 0.1 0.8 43 8 45 22 2 +16 0.5 0.4 -11 -43 -33 -48 1 +16 0.9 0.7 8 -8 24 -18 2 +16 0.1 0.4 0 -25 40 -21 2 +16 0.7 0.5 42 34 22 -45 1 +16 0.5 0.3 28 2 31 -16 1 +16 0.4 0.8 2 -4 36 24 2 +16 0.1 0.2 48 -38 47 27 2 +16 0.8 0.1 -1 -4 -32 -44 1 +16 0.4 0.8 -4 -44 39 -8 2 +16 0.2 0.9 28 -34 7 -43 2 +16 0.1 0.3 -13 -24 16 -34 2 +16 0.9 0.1 41 26 15 -31 1 +16 0.6 0.6 48 -33 -32 -33 1 +16 0.8 0.1 42 -40 22 -48 1 +16 0.4 0.9 -1 -19 49 46 2 +16 0.2 0.5 29 -49 3 -21 2 +16 0.8 0.8 43 11 -34 -35 1 +16 0.1 0.5 43 -6 44 42 2 +16 0.2 0.6 -28 -35 -38 -39 2 +16 0.4 0.1 -26 -38 21 -38 2 +16 0.6 0.3 14 2 32 -30 1 +16 0.1 0.1 25 7 -12 -32 1 +16 0.7 0.4 43 10 49 41 2 +16 0.1 0.8 42 -38 41 2 2 +16 0.2 0.8 -11 -23 -3 -8 2 +16 0.5 0.1 -15 -38 38 -45 2 +16 0.3 0.7 -27 -49 -8 -18 2 +16 0.2 0.4 18 -34 40 -37 2 +16 0.5 0.7 0 -18 41 16 2 +16 0.1 0.1 28 -50 40 24 2 +16 0.5 0.4 18 -4 -10 -16 1 +16 0.1 0.7 -1 -38 23 -17 2 +16 0.5 0.5 -10 -15 12 -31 2 +16 0.5 0.1 1 -11 50 -27 1 +16 0.2 0.6 48 25 41 8 1 +16 0.3 0.8 -35 -42 -2 -3 2 +16 0.4 0.4 42 18 42 37 2 +16 0.7 0.6 9 -19 -2 -20 1 +16 0.6 0.3 -29 -33 14 -8 2 +16 0.2 0.8 5 -2 44 17 2 +16 0.2 0.5 38 -41 -4 -19 2 +16 0.8 0.6 48 -14 -31 -41 1 +16 0.9 0.5 2 -7 46 19 2 +16 0.4 0.7 11 -14 37 -19 2 +16 0.3 0.8 45 -33 41 -28 2 +16 0.3 0.3 11 -39 6 -24 2 +16 0.9 0.8 47 -27 -5 -19 1 +16 0.7 0.8 30 -23 48 26 2 +16 0.4 0.3 46 -17 28 -19 1 +16 0.3 0.9 -22 -23 -14 -37 1 +16 0.7 0.9 50 -24 -1 -15 1 +16 0.4 0.9 -5 -50 50 -16 2 +16 0.6 0.7 26 -23 50 -48 2 +16 0.4 0.1 49 3 15 -39 1 +16 0.9 0.4 29 23 -13 -33 1 +16 0.2 0.6 33 -25 13 -44 2 +16 0.9 0.6 24 -25 27 -5 1 +16 0.6 0.1 28 -42 21 -35 1 +16 0.4 0.3 1 -6 9 -12 1 +16 0.3 0.2 -9 -33 42 33 2 +16 0.7 0.4 -4 -46 17 -5 2 +16 0.7 0.9 21 -12 32 26 2 +16 0.2 0.4 -31 -46 49 12 2 +16 0.2 0.6 20 -44 46 -28 2 +16 0.1 0.1 3 -49 -6 -35 2 +16 0.7 0.5 37 11 -21 -38 1 +16 0.3 0.8 36 -42 -9 -25 1 +16 0.7 0.2 16 -6 18 17 2 +16 0.7 0.8 15 -37 35 -12 2 +16 0.6 0.4 19 -11 48 46 2 +16 0.3 0.6 46 -39 19 -29 2 +16 0.9 0.3 -16 -21 -26 -31 1 +16 0.4 0.3 11 -21 -6 -41 1 +16 0.7 0.2 -2 -22 38 -37 1 +16 0.7 0.4 44 -20 21 -22 1 +16 0.3 0.1 28 27 41 -26 1 +16 0.9 0.8 31 -46 -23 -26 1 +16 0.1 0.7 -1 -34 14 -45 2 +16 0.6 0.6 6 -46 23 -8 2 +16 0.9 0.8 15 -20 23 -40 1 +16 0.2 0.5 -1 -10 34 29 2 +16 0.9 0.8 -10 -14 30 -40 2 +16 0.1 0.6 0 -5 10 2 2 +16 0.8 0.2 3 -28 -5 -45 1 +16 0.6 0.9 20 -17 36 -32 2 +16 0.5 0.6 20 -47 47 -41 2 +16 0.6 0.6 18 -23 40 -10 2 +16 0.3 0.6 7 -29 -6 -24 1 +16 0.5 0.7 42 -2 41 -20 1 +16 0.7 0.8 26 -30 18 -27 1 +16 0.8 0.5 -11 -30 -31 -42 1 +16 0.1 0.9 19 12 35 7 2 +16 0.2 0.8 25 -8 37 -2 2 +16 0.5 0.9 16 -29 32 -42 2 +16 0.1 0.2 26 -29 20 -27 2 +16 0.9 0.9 39 34 42 16 2 +16 0.8 0.6 -2 -16 38 -22 2 +16 0.1 0.1 -12 -49 39 17 2 +16 0.5 0.2 44 -22 34 33 2 +16 0.5 0.8 37 -9 4 -31 1 +16 0.8 0.1 13 -21 44 10 2 +16 0.6 0.5 42 -37 40 33 2 +16 0.7 0.5 10 -8 26 -7 2 +16 0.3 0.6 30 0 38 1 2 +16 0.5 0.5 9 8 19 17 2 +16 0.9 0.5 8 -36 49 -15 2 +16 0.5 0.6 -15 -16 46 24 2 +16 0.2 0.6 21 15 31 -48 1 +16 0.9 0.2 -44 -47 32 -43 2 +16 0.2 0.2 14 1 40 2 2 +16 0.8 0.5 35 28 32 -11 1 +16 0.5 0.5 -38 -46 -31 -44 2 +16 0.4 0.1 -16 -41 18 -1 2 +16 0.4 0.6 28 -31 -14 -48 1 +16 0.7 0.1 -3 -41 -4 -41 1 +16 0.8 0.9 14 -21 31 -15 2 +16 0.5 0.6 40 -29 48 -15 2 +16 0.8 0.2 41 24 34 -28 1 +16 0.1 0.2 26 3 -9 -15 1 +16 0.1 0.8 18 -24 -3 -27 2 +16 0.9 0.8 33 -21 44 41 2 +16 0.8 0.2 45 -17 29 -16 1 +16 0.2 0.2 35 32 50 -18 1 +16 0.8 0.1 48 -28 36 -32 1 +16 0.8 0.2 35 -9 5 -25 1 +16 0.4 0.6 21 -5 15 -50 1 +16 0.5 0.7 50 16 37 -49 1 +16 0.8 0.9 -7 -24 18 -42 2 +16 0.8 0.8 45 -23 32 24 1 +16 0.3 0.7 -5 -44 45 42 2 +16 0.3 0.9 -35 -49 37 -2 2 +16 0.5 0.4 25 -21 26 -46 1 +16 0.7 0.3 20 -46 38 26 2 +16 0.5 0.8 16 -3 46 -13 2 +16 0.6 0.6 17 -22 48 18 2 +16 0.4 0.6 26 13 -36 -50 1 +16 0.4 0.6 40 23 41 -20 1 +16 0.2 0.6 -24 -50 -17 -23 2 +16 0.1 0.6 25 12 23 -37 1 +16 0.4 0.2 22 -22 49 -47 1 +16 0.6 0.2 -6 -42 -11 -32 1 +16 0.3 0.5 40 0 -5 -23 1 +16 0.3 0.7 16 5 24 -6 2 +16 0.7 0.7 36 21 46 -33 1 +16 0.9 0.3 12 -45 43 13 2 +16 0.7 0.2 -3 -27 32 0 2 +16 0.2 0.5 4 -23 8 0 2 +16 0.9 0.3 47 -18 48 7 1 +16 0.8 0.1 22 -23 30 -1 1 +16 0.4 0.2 20 -34 30 -40 1 +16 0.4 0.3 49 -27 -38 -46 1 +16 0.7 0.7 44 14 -7 -26 1 +16 0.3 0.6 50 -3 21 20 2 +16 0.4 0.9 8 -5 -35 -46 1 +16 0.4 0.6 24 -45 -18 -29 1 +16 0.9 0.1 21 -14 20 3 1 +16 0.9 0.6 9 -15 -16 -27 1 +16 0.6 0.6 29 -44 40 -17 2 +16 0.2 0.8 4 -16 9 -8 2 +16 0.5 0.5 40 -19 37 -28 1 +16 0.2 0.3 41 -23 12 -21 1 +16 0.5 0.3 27 -8 9 -11 1 +16 0.9 0.8 27 -10 22 -37 1 +16 0.4 0.6 29 -43 17 5 2 +16 0.7 0.8 4 -35 42 22 2 +16 0.4 0.3 45 14 -11 -16 1 +16 0.2 0.7 11 -37 7 5 2 +16 0.7 0.3 17 -26 34 -9 2 +16 0.9 0.5 42 26 38 6 1 +16 0.2 0.1 8 -30 17 -24 1 +16 0.5 0.5 36 0 28 -41 1 +16 0.6 0.7 15 -23 32 2 2 +16 0.5 0.7 0 -42 -23 -44 1 +16 0.5 0.1 -11 -38 -24 -36 1 +16 0.4 0.9 42 33 -2 -28 1 +16 0.3 0.7 49 40 34 31 1 +16 0.8 0.1 45 -23 -37 -48 1 +16 0.5 0.6 7 5 11 -26 1 +16 0.6 0.8 -42 -47 21 9 2 +16 0.2 0.9 49 -23 -27 -31 1 +16 0.5 0.8 32 4 21 -15 1 +16 0.7 0.4 38 -30 3 2 1 +16 0.9 0.5 44 9 37 13 1 +16 0.4 0.4 50 -11 40 5 2 +16 0.3 0.9 25 -22 30 24 2 +16 0.3 0.1 -1 -9 -30 -38 1 +16 0.9 0.7 18 -21 5 -18 1 +16 0.7 0.8 20 12 5 -25 1 +16 0.5 0.2 15 -37 8 -34 1 +16 0.6 0.3 41 9 47 -10 1 +16 0.1 0.2 22 -5 42 -25 1 +16 0.9 0.7 8 -7 10 -37 1 +16 0.3 0.3 -32 -43 43 -7 2 +16 0.7 0.4 30 -7 24 -40 1 +16 0.4 0.6 44 -43 -37 -40 1 +16 0.9 0.7 48 18 29 -7 1 +16 0.3 0.5 38 2 10 4 1 +16 0.5 0.8 13 -45 4 -9 2 +16 0.1 0.6 43 -33 23 16 2 +16 0.9 0.3 15 -23 22 15 2 +16 0.1 0.7 34 -48 -16 -20 2 +16 0.5 0.6 -6 -14 48 -35 2 +16 0.1 0.2 -36 -49 -2 -29 2 +16 0.2 0.9 9 -2 -8 -19 1 +16 0.7 0.1 8 -40 23 -30 1 +16 0.9 0.9 49 -44 32 -48 1 +16 0.9 0.8 -24 -32 12 -24 2 +16 0.9 0.4 29 17 28 -44 1 +16 0.5 0.6 1 -50 48 40 2 +16 0.2 0.6 27 -36 43 14 2 +16 0.3 0.7 -28 -46 42 12 2 +16 0.4 0.2 -15 -37 39 22 2 +16 0.1 0.8 17 -24 -39 -50 1 +16 0.1 0.6 38 -16 42 -49 2 +16 0.3 0.8 39 24 -1 -43 1 +16 0.9 0.6 29 -13 32 -19 1 +16 0.4 0.4 37 -9 8 -23 1 +16 0.7 0.5 38 -17 12 8 1 +16 0.8 0.4 8 2 -21 -36 1 +16 0.8 0.5 9 8 -7 -42 1 +16 0.1 0.1 -1 -49 -42 -46 1 +16 0.7 0.5 47 39 2 -1 1 +16 0.6 0.6 48 17 28 -48 1 +17 0.6 0.3 -6 -13 46 -19 2 +17 0.4 0.7 32 -2 -15 -40 1 +17 0.3 0.9 33 -24 44 41 2 +17 0.4 0.1 13 -42 -11 -24 1 +17 0.5 0.6 25 -18 38 3 2 +17 0.7 0.5 45 -1 31 -35 1 +17 0.3 0.2 24 -19 -14 -21 1 +17 0.1 0.4 -27 -44 -6 -50 2 +17 0.4 0.6 30 -13 17 -48 1 +17 0.5 0.1 35 28 1 -5 1 +17 0.2 0.2 18 -16 16 -15 1 +17 0.8 0.2 12 -24 -11 -41 1 +17 0.5 0.9 38 -41 27 11 2 +17 0.3 0.3 23 -3 -26 -43 1 +17 0.3 0.8 18 -46 41 22 2 +17 0.4 0.5 -26 -33 18 14 2 +17 0.7 0.7 -9 -23 38 -14 2 +17 0.7 0.7 34 32 16 -7 1 +17 0.4 0.2 17 -44 7 -26 1 +17 0.4 0.6 -5 -25 -33 -46 1 +17 0.1 0.8 47 32 39 -28 1 +17 0.9 0.5 30 -36 20 -9 1 +17 0.3 0.6 2 -4 33 15 2 +17 0.8 0.2 -27 -44 38 18 2 +17 0.2 0.4 3 -47 50 -31 2 +17 0.2 0.2 -20 -41 11 -2 2 +17 0.3 0.3 24 -18 39 -18 2 +17 0.4 0.1 -29 -47 47 31 2 +17 0.7 0.6 -12 -13 18 -44 2 +17 0.9 0.2 33 -38 49 41 2 +17 0.7 0.5 -3 -8 -33 -42 1 +17 0.6 0.3 47 -34 15 -1 1 +17 0.5 0.9 -22 -36 38 -23 2 +17 0.6 0.2 13 7 -16 -17 1 +17 0.4 0.7 -27 -29 16 -4 2 +17 0.2 0.8 49 -7 -3 -21 1 +17 0.5 0.6 3 -31 10 -33 2 +17 0.3 0.4 -23 -38 49 37 2 +17 0.2 0.6 45 41 24 -28 1 +17 0.8 0.3 45 36 25 -25 1 +17 0.8 0.1 35 11 -7 -13 1 +17 0.3 0.7 -12 -39 11 -7 2 +17 0.9 0.3 45 14 -17 -47 1 +17 0.4 0.4 3 -2 -5 -14 1 +17 0.2 0.3 19 -28 5 -28 2 +17 0.2 0.2 42 -16 47 33 2 +17 0.7 0.7 -34 -42 37 -24 2 +17 0.4 0.1 28 -5 6 -20 1 +17 0.4 0.8 -21 -23 19 -16 2 +17 0.1 0.8 11 -27 27 -14 2 +17 0.8 0.5 49 1 5 2 1 +17 0.9 0.9 -20 -36 44 25 2 +17 0.8 0.2 -4 -41 39 -48 2 +17 0.2 0.1 42 33 29 -29 1 +17 0.9 0.8 46 13 -5 -35 1 +17 0.8 0.8 18 -17 47 -11 2 +17 0.4 0.4 42 16 45 30 1 +17 0.3 0.6 42 30 15 -46 1 +17 0.3 0.2 47 41 13 -11 1 +17 0.2 0.4 43 25 46 16 2 +17 0.1 0.5 0 -45 0 -46 2 +17 0.8 0.2 49 -31 48 -4 1 +17 0.3 0.3 -9 -20 41 26 2 +17 0.2 0.1 30 -28 -9 -42 1 +17 0.2 0.2 -14 -17 46 -37 2 +17 0.6 0.2 37 22 48 16 1 +17 0.9 0.7 17 -12 -10 -22 1 +17 0.4 0.8 31 10 30 15 2 +17 0.8 0.7 -6 -32 44 3 2 +17 0.1 0.3 34 33 49 -2 1 +17 0.6 0.3 -42 -43 40 4 2 +17 0.2 0.6 27 -1 32 16 2 +17 0.7 0.1 -2 -17 -23 -28 1 +17 0.9 0.7 -45 -50 32 -35 2 +17 0.1 0.8 31 -36 42 -15 2 +17 0.4 0.9 34 -48 45 -22 2 +17 0.1 0.6 27 -6 15 -27 2 +17 0.2 0.3 12 0 27 0 2 +17 0.4 0.3 33 28 45 -43 1 +17 0.7 0.9 11 -50 30 26 2 +17 0.8 0.6 -9 -38 23 -30 2 +17 0.3 0.3 22 -38 19 -45 1 +17 0.9 0.5 43 11 29 -2 1 +17 0.9 0.2 -46 -49 -37 -49 1 +17 0.9 0.5 -1 -38 23 -11 2 +17 0.3 0.8 14 2 44 22 2 +17 0.8 0.4 24 -1 40 -36 1 +17 0.1 0.6 4 -13 18 -22 2 +17 0.1 0.5 22 15 49 44 2 +17 0.4 0.4 17 12 13 -28 1 +17 0.8 0.8 36 -26 19 -5 1 +17 0.6 0.8 12 -7 13 -14 2 +17 0.3 0.8 8 -26 -2 -32 2 +17 0.4 0.6 9 -45 10 -25 2 +17 0.2 0.2 32 -14 -44 -47 1 +17 0.4 0.1 -22 -38 -15 -39 1 +17 0.3 0.7 -40 -47 43 33 2 +17 0.5 0.5 6 -38 -9 -37 1 +17 0.4 0.2 -45 -48 -4 -26 2 +17 0.8 0.4 8 -50 40 -36 1 +17 0.5 0.2 15 -36 41 -3 2 +17 0.5 0.8 38 -33 35 -41 2 +17 0.3 0.1 -17 -35 -16 -46 1 +17 0.8 0.2 22 -17 -1 -31 1 +17 0.9 0.1 -14 -42 37 -22 2 +17 0.6 0.8 -25 -27 41 -9 2 +17 0.5 0.1 -6 -7 35 1 2 +17 0.2 0.4 19 -30 50 -32 2 +17 0.3 0.6 -7 -34 -5 -13 2 +17 0.9 0.5 35 14 33 0 1 +17 0.7 0.3 48 -35 32 24 2 +17 0.1 0.5 21 -4 8 -35 1 +17 0.7 0.6 39 -40 21 -30 1 +17 0.7 0.4 -40 -49 12 -8 2 +17 0.3 0.9 48 16 39 -39 2 +17 0.9 0.5 36 -42 42 6 1 +17 0.8 0.1 14 -10 42 37 2 +17 0.4 0.7 17 -31 42 -9 2 +17 0.8 0.1 43 8 49 -37 1 +17 0.1 0.2 30 -44 33 -5 2 +17 0.8 0.1 21 -49 9 -23 1 +17 0.3 0.6 38 29 50 36 2 +17 0.2 0.4 10 -15 43 13 2 +17 0.1 0.8 15 -9 33 17 2 +17 0.2 0.1 39 -32 42 37 2 +17 0.6 0.1 45 -47 26 -30 1 +17 0.1 0.9 36 10 38 11 2 +17 0.8 0.2 7 -35 38 -20 2 +17 0.1 0.8 27 -19 -16 -20 2 +17 0.2 0.9 0 -9 33 -32 2 +17 0.1 0.9 27 -50 9 -21 2 +17 0.1 0.7 -31 -47 41 -31 2 +17 0.5 0.1 -1 -30 49 -27 2 +17 0.9 0.5 -9 -43 -24 -47 1 +17 0.5 0.2 -8 -35 36 8 2 +17 0.1 0.4 20 -40 -12 -27 1 +17 0.6 0.9 34 15 49 -30 2 +17 0.6 0.9 -4 -6 48 14 2 +17 0.9 0.4 26 -20 -33 -39 1 +17 0.2 0.3 40 -43 -20 -47 1 +17 0.5 0.6 -15 -22 43 30 2 +17 0.8 0.2 19 2 -21 -26 1 +17 0.7 0.9 33 29 24 -35 1 +17 0.5 0.1 48 37 19 -36 1 +17 0.2 0.2 35 -24 -2 -27 1 +17 0.1 0.1 -19 -32 33 8 2 +17 0.4 0.1 -12 -36 21 18 2 +17 0.9 0.7 -13 -25 33 -27 2 +17 0.2 0.1 40 -19 -20 -26 1 +17 0.2 0.9 23 -8 -8 -13 1 +17 0.8 0.7 24 7 -31 -45 1 +17 0.8 0.1 15 -28 -23 -49 1 +17 0.1 0.5 28 -22 3 -22 2 +17 0.8 0.4 22 -40 43 -4 2 +17 0.7 0.2 28 22 43 4 1 +17 0.3 0.9 36 -12 19 -22 2 +17 0.7 0.7 28 -47 -9 -11 1 +17 0.2 0.1 45 15 5 -36 1 +17 0.2 0.1 -32 -41 38 -39 2 +17 0.8 0.9 34 -13 -29 -47 1 +17 0.7 0.7 31 -28 45 -2 2 +17 0.8 0.2 -3 -13 49 -21 2 +17 0.3 0.3 38 9 45 -30 1 +17 0.5 0.3 -24 -44 38 -12 2 +17 0.8 0.8 23 -29 48 -9 2 +17 0.7 0.4 -4 -34 3 -8 1 +17 0.1 0.1 5 -42 30 -44 2 +17 0.5 0.8 -34 -43 5 -32 2 +17 0.9 0.9 31 -37 -36 -38 1 +17 0.9 0.7 -11 -38 33 -41 2 +17 0.2 0.7 31 -20 3 -24 1 +17 0.2 0.5 -26 -41 14 9 2 +17 0.8 0.8 -43 -49 21 -10 2 +17 0.6 0.7 15 11 24 5 2 +17 0.8 0.4 39 -5 27 -19 1 +17 0.8 0.3 22 -31 49 -8 1 +17 0.8 0.8 18 7 33 6 2 +17 0.4 0.8 44 38 30 -26 1 +17 0.6 0.2 41 36 50 0 1 +17 0.9 0.2 37 -27 47 -7 1 +17 0.9 0.9 35 -41 -13 -44 1 +17 0.8 0.8 3 -17 21 -22 1 +17 0.3 0.4 -4 -47 -9 -21 2 +17 0.9 0.3 23 -15 0 -19 1 +17 0.1 0.7 27 -4 -23 -48 1 +17 0.3 0.8 46 -44 45 -16 2 +17 0.2 0.5 -22 -33 37 -45 2 +17 0.7 0.6 -14 -15 44 -49 2 +17 0.2 0.4 23 -10 25 -41 2 +17 0.9 0.1 -4 -20 12 6 2 +17 0.7 0.4 28 -6 36 18 2 +17 0.2 0.1 20 -22 48 -48 1 +17 0.8 0.8 17 -26 30 15 2 +17 0.8 0.9 37 -16 -10 -19 1 +17 0.4 0.5 -9 -40 21 -6 2 +17 0.1 0.9 41 6 48 35 2 +17 0.9 0.1 -12 -40 17 4 2 +17 0.7 0.8 34 -15 30 22 2 +17 0.8 0.4 25 10 4 -35 1 +17 0.8 0.1 9 -49 -16 -32 1 +17 0.7 0.7 10 -48 45 -13 2 +17 0.8 0.8 26 -20 8 -34 1 +17 0.8 0.1 -8 -20 2 -17 1 +17 0.3 0.5 -12 -50 45 -27 2 +17 0.7 0.8 -36 -37 -40 -46 1 +17 0.1 0.5 -14 -50 37 33 2 +17 0.2 0.7 5 -46 18 -4 2 +17 0.6 0.5 32 -45 26 24 2 +17 0.2 0.8 -13 -25 36 -36 2 +17 0.6 0.8 -20 -39 32 -19 2 +17 0.2 0.2 41 -34 -35 -46 1 +17 0.6 0.5 7 -35 16 13 2 +17 0.2 0.3 47 32 22 -35 1 +17 0.1 0.3 9 -43 36 -14 2 +17 0.9 0.4 25 -18 48 -5 1 +17 0.6 0.3 3 1 -8 -11 1 +17 0.9 0.1 28 3 -17 -45 1 +17 0.4 0.7 22 -4 -35 -38 1 +17 0.7 0.6 45 -2 -4 -49 1 +17 0.3 0.1 17 -20 49 31 2 +17 0.1 0.6 16 12 16 -15 1 +17 0.4 0.2 -2 -36 18 -30 2 +17 0.7 0.5 11 -29 19 -14 2 +17 0.1 0.4 -26 -33 -11 -50 2 +17 0.1 0.8 -35 -47 33 -36 2 +17 0.7 0.3 49 -45 27 -35 1 +17 0.9 0.6 40 -38 36 -15 1 +17 0.1 0.4 32 -16 -18 -22 1 +17 0.1 0.3 27 -23 47 30 2 +17 0.4 0.8 42 12 34 8 2 +17 0.4 0.1 10 -28 -9 -23 1 +17 0.5 0.4 -9 -48 -20 -39 2 +17 0.4 0.4 -20 -50 37 -14 2 +17 0.7 0.5 18 4 15 -15 1 +17 0.4 0.7 -25 -42 34 12 2 +17 0.9 0.7 48 18 22 9 1 +17 0.7 0.7 44 36 1 -34 1 +17 0.1 0.5 5 -17 45 -40 2 +17 0.5 0.3 22 -38 12 4 2 +17 0.1 0.8 39 -19 3 -48 2 +17 0.3 0.5 47 -49 -17 -50 1 +17 0.9 0.1 -20 -38 -2 -16 2 +17 0.2 0.1 42 4 26 -26 1 +17 0.9 0.7 50 -24 19 -38 1 +17 0.9 0.2 49 13 -8 -46 1 +17 0.3 0.1 45 -13 -35 -38 1 +17 0.3 0.4 -5 -30 13 -35 2 +17 0.9 0.4 27 -31 43 36 2 +17 0.4 0.5 33 -22 -29 -33 1 +17 0.6 0.2 40 -40 8 -12 1 +17 0.1 0.3 43 -22 0 -39 1 +17 0.1 0.5 13 -40 18 -34 2 +17 0.5 0.8 -1 -26 41 -46 2 +17 0.5 0.9 -2 -49 25 5 2 +17 0.4 0.8 -32 -36 40 -3 2 +17 0.2 0.8 32 -17 37 3 2 +17 0.7 0.4 18 16 44 -50 1 +17 0.5 0.2 -5 -36 -20 -27 1 +17 0.8 0.6 -12 -17 20 -30 2 +17 0.5 0.1 -36 -41 8 7 2 +17 0.3 0.1 43 -50 13 10 2 +17 0.4 0.5 -16 -39 34 13 2 +17 0.9 0.8 37 8 28 8 2 +17 0.3 0.9 1 -8 8 -13 2 +17 0.7 0.8 4 -31 5 -16 2 +17 0.8 0.9 -17 -36 35 -22 2 +17 0.7 0.8 -23 -25 6 -28 2 +17 0.5 0.2 43 -34 26 -2 2 +17 0.7 0.8 14 -32 17 2 2 +17 0.7 0.9 8 -33 4 -32 2 +17 0.9 0.5 23 -43 -1 -6 1 +17 0.9 0.7 -10 -50 32 3 2 +17 0.7 0.3 6 -30 -12 -19 1 +17 0.2 0.6 48 -4 30 -15 2 +17 0.6 0.1 5 -8 37 14 2 +17 0.7 0.8 17 -4 25 -49 2 +17 0.9 0.4 -10 -47 -5 -46 1 +17 0.6 0.5 45 -4 -26 -38 1 +17 0.3 0.8 -13 -44 31 6 2 +17 0.4 0.9 4 2 -10 -35 1 +17 0.5 0.9 42 16 44 27 2 +17 0.4 0.1 50 0 -27 -38 1 +17 0.7 0.1 37 26 9 -33 1 +17 0.2 0.9 47 -14 36 35 2 +17 0.7 0.3 37 15 47 -32 1 +17 0.1 0.7 -6 -19 36 27 2 +17 0.9 0.1 46 17 2 -47 1 +17 0.4 0.6 8 -12 -28 -45 1 +17 0.4 0.6 8 -44 38 -24 2 +17 0.3 0.7 19 -5 -18 -29 1 +17 0.2 0.8 22 -9 23 -20 2 +17 0.1 0.7 21 -13 23 0 2 +17 0.2 0.4 -6 -8 34 -41 2 +17 0.6 0.6 10 -32 45 28 2 +17 0.1 0.7 39 23 49 16 2 +17 0.1 0.7 29 7 30 -27 2 +17 0.8 0.3 6 -46 -8 -29 1 +17 0.2 0.7 46 -44 28 -26 2 +17 0.7 0.6 19 -20 13 -49 1 +17 0.3 0.2 28 21 -22 -48 1 +18 0.5 0.5 21 -3 35 -17 2 +18 0.4 0.7 9 -38 4 -42 2 +18 0.3 0.4 -13 -42 35 13 2 +18 0.8 0.8 30 -41 32 -47 2 +18 0.1 0.8 34 -4 -20 -35 1 +18 0.4 0.8 14 -25 8 -43 2 +18 0.5 0.6 42 27 21 -38 1 +18 0.5 0.2 -8 -41 49 -17 2 +18 0.7 0.4 45 4 -11 -16 1 +18 0.5 0.7 1 -2 -7 -41 1 +18 0.6 0.3 -1 -35 10 1 2 +18 0.4 0.2 0 -12 -21 -31 2 +18 0.2 0.7 0 -21 -7 -12 2 +18 0.7 0.3 27 -16 -15 -19 1 +18 0.2 0.3 9 -10 25 -22 2 +18 0.5 0.9 -21 -29 48 11 2 +18 0.2 0.1 38 -13 -23 -46 1 +18 0.3 0.5 23 9 15 -47 1 +18 0.9 0.8 9 -1 39 4 2 +18 0.1 0.4 17 9 31 -28 1 +18 0.4 0.6 3 -15 27 -23 2 +18 0.3 0.6 21 -6 48 -10 2 +18 0.1 0.8 46 14 42 19 2 +18 0.6 0.4 47 -19 46 -34 1 +18 0.1 0.3 -32 -50 -9 -11 2 +18 0.2 0.2 9 -38 -32 -33 1 +18 0.9 0.4 -10 -23 25 -40 2 +18 0.4 0.1 -24 -31 -33 -39 2 +18 0.6 0.8 9 -41 -8 -50 1 +18 0.2 0.5 50 38 22 -24 1 +18 0.1 0.3 -10 -44 22 19 2 +18 0.9 0.4 35 17 28 13 1 +18 0.4 0.8 22 5 30 11 2 +18 0.1 0.7 36 9 8 4 1 +18 0.3 0.5 -7 -14 11 0 2 +18 0.9 0.7 33 -46 21 -32 1 +18 0.8 0.2 -16 -24 30 0 2 +18 0.8 0.4 50 8 19 -8 1 +18 0.5 0.7 21 -10 49 10 2 +18 0.6 0.7 48 14 49 9 1 +18 0.3 0.9 -30 -47 -4 -39 2 +18 0.3 0.4 -22 -28 35 -28 2 +18 0.1 0.4 27 11 -25 -29 1 +18 0.9 0.1 20 17 35 6 1 +18 0.5 0.4 44 -16 -32 -46 1 +18 0.7 0.6 35 -17 -30 -33 1 +18 0.3 0.6 36 24 25 -34 1 +18 0.4 0.9 50 -26 8 -17 2 +18 0.9 0.2 20 11 30 -41 1 +18 0.5 0.6 -14 -49 22 -42 2 +18 0.9 0.9 36 -34 21 -23 1 +18 0.8 0.7 29 -3 23 17 2 +18 0.5 0.8 35 14 -37 -49 1 +18 0.2 0.6 35 -1 44 -40 2 +18 0.9 0.8 47 37 -28 -42 1 +18 0.6 0.5 -40 -43 39 -27 2 +18 0.9 0.1 26 8 44 12 1 +18 0.3 0.7 39 -17 -2 -27 1 +18 0.7 0.6 -30 -40 39 -28 2 +18 0.2 0.9 32 -17 47 -43 2 +18 0.7 0.7 17 -47 21 -14 2 +18 0.6 0.7 -34 -40 47 -15 2 +18 0.9 0.1 20 16 -17 -32 1 +18 0.2 0.4 47 39 47 25 1 +18 0.5 0.4 9 -50 10 -31 1 +18 0.2 0.8 17 1 45 10 2 +18 0.4 0.5 -28 -33 33 -19 2 +18 0.9 0.8 19 5 17 -7 1 +18 0.6 0.5 28 25 49 -2 1 +18 0.4 0.1 33 29 -10 -46 1 +18 0.4 0.9 -15 -31 44 26 2 +18 0.1 0.1 -26 -28 11 -40 1 +18 0.8 0.3 41 -33 41 8 2 +18 0.4 0.7 1 -35 13 -46 2 +18 0.8 0.2 31 -36 19 -23 1 +18 0.4 0.9 24 -30 21 -45 2 +18 0.2 0.1 41 -26 7 -46 1 +18 0.9 0.9 -34 -39 29 -46 2 +18 0.9 0.9 12 -39 13 -32 1 +18 0.4 0.5 39 -18 5 -34 1 +18 0.2 0.2 8 -27 -38 -42 1 +18 0.6 0.3 36 -16 12 -38 1 +18 0.2 0.5 10 -48 24 20 2 +18 0.7 0.2 34 28 49 39 2 +18 0.3 0.5 44 -47 -20 -47 1 +18 0.3 0.5 21 3 16 -6 1 +18 0.6 0.1 21 19 -18 -47 1 +18 0.7 0.7 47 -23 29 -20 2 +18 0.4 0.6 25 18 16 -40 1 +18 0.6 0.4 -39 -50 -11 -13 2 +18 0.4 0.9 28 -35 12 -49 2 +18 0.1 0.4 14 -40 32 -28 2 +18 0.5 0.6 35 -17 36 -40 1 +18 0.6 0.6 31 15 28 24 1 +18 0.5 0.2 43 -21 -19 -36 1 +18 0.7 0.9 30 14 29 -41 1 +18 0.8 0.3 21 -42 48 35 2 +18 0.9 0.5 38 32 32 -31 1 +18 0.9 0.6 47 -34 35 20 1 +18 0.4 0.2 -25 -48 -40 -47 1 +18 0.2 0.5 -24 -28 46 44 2 +18 0.4 0.6 46 -35 18 -26 2 +18 0.7 0.4 11 -20 30 27 2 +18 0.4 0.3 37 -33 -3 -36 1 +18 0.7 0.2 -18 -31 40 -1 2 +18 0.9 0.4 17 -40 -26 -43 1 +18 0.1 0.6 12 4 -5 -35 1 +18 0.1 0.8 35 24 42 -3 2 +18 0.5 0.9 19 -2 5 -29 2 +18 0.8 0.9 42 -19 23 9 1 +18 0.3 0.9 37 -15 -6 -11 1 +18 0.4 0.5 46 36 -17 -40 1 +18 0.1 0.9 39 -23 38 -50 2 +18 0.3 0.3 24 18 -1 -19 1 +18 0.5 0.3 22 10 6 -50 1 +18 0.7 0.8 14 -41 25 -30 2 +18 0.8 0.2 47 42 41 29 1 +18 0.3 0.1 17 -2 47 -5 1 +18 0.6 0.9 50 37 39 -29 1 +18 0.9 0.1 14 11 6 -38 1 +18 0.8 0.3 29 -20 15 5 1 +18 0.6 0.1 -7 -33 47 -27 2 +18 0.4 0.6 33 -24 40 -6 2 +18 0.7 0.3 -35 -40 48 -36 2 +18 0.1 0.5 32 16 24 -20 1 +18 0.7 0.7 47 -47 32 30 2 +18 0.8 0.9 49 22 38 -48 1 +18 0.2 0.3 38 -27 6 -24 2 +18 0.5 0.3 21 -25 41 -16 1 +18 0.6 0.9 26 -10 32 -48 2 +18 0.7 0.5 -16 -21 45 39 2 +18 0.2 0.3 2 -30 29 -8 2 +18 0.5 0.3 16 5 40 6 2 +18 0.6 0.3 19 -18 21 -29 1 +18 0.8 0.9 48 -1 13 8 1 +18 0.9 0.3 19 18 -5 -46 1 +18 0.1 0.7 30 -15 19 -8 2 +18 0.3 0.5 -28 -50 -16 -47 1 +18 0.1 0.1 50 -44 35 12 2 +18 0.1 0.8 20 -43 35 26 2 +18 0.3 0.4 33 -37 23 -25 2 +18 0.7 0.5 8 2 -8 -37 1 +18 0.3 0.2 3 -15 -10 -39 1 +18 0.4 0.6 -15 -50 19 -13 2 +18 0.3 0.3 18 12 43 21 2 +18 0.7 0.5 40 38 11 -7 1 +18 0.9 0.4 16 -34 -16 -36 1 +18 0.2 0.8 44 -32 -38 -48 1 +18 0.7 0.5 50 26 42 39 2 +18 0.4 0.5 37 -18 45 -4 2 +18 0.3 0.8 22 -40 17 -12 2 +18 0.5 0.3 5 -11 36 -21 2 +18 0.5 0.3 2 -42 38 23 2 +18 0.7 0.8 -11 -39 33 4 2 +18 0.4 0.4 39 11 -28 -37 1 +18 0.5 0.1 -12 -36 -41 -49 1 +18 0.8 0.3 -5 -13 -29 -32 1 +18 0.2 0.8 12 4 42 0 2 +18 0.8 0.1 3 -14 6 -19 1 +18 0.1 0.4 -8 -40 -22 -48 2 +18 0.3 0.4 36 -27 20 -46 1 +18 0.3 0.2 41 6 42 39 2 +18 0.2 0.4 19 -7 9 -27 1 +18 0.9 0.6 31 5 16 -46 1 +18 0.3 0.5 -9 -18 10 -38 2 +18 0.8 0.9 34 -4 34 11 2 +18 0.3 0.3 38 14 14 -48 1 +18 0.3 0.7 28 3 25 7 2 +18 0.2 0.8 11 -17 12 -27 2 +18 0.1 0.2 1 -32 -22 -24 2 +18 0.4 0.2 -5 -30 19 -37 1 +18 0.9 0.8 -20 -22 -23 -25 1 +18 0.5 0.1 8 -42 19 -25 1 +18 0.1 0.7 41 31 13 -42 1 +18 0.5 0.5 -6 -32 -17 -39 1 +18 0.4 0.7 36 -45 44 33 2 +18 0.6 0.6 19 -41 -11 -12 1 +18 0.4 0.5 8 -50 24 -8 2 +18 0.5 0.8 5 -12 5 -9 2 +18 0.5 0.4 48 -5 26 -23 1 +18 0.3 0.9 19 12 10 -30 1 +18 0.6 0.8 12 -27 45 42 2 +18 0.4 0.3 46 5 39 -20 1 +18 0.2 0.7 33 25 31 27 1 +18 0.5 0.5 17 -7 3 -5 2 +18 0.9 0.8 13 -36 43 14 2 +18 0.6 0.7 20 -8 47 -34 2 +18 0.6 0.3 -5 -16 -23 -38 1 +18 0.2 0.7 25 -29 29 18 2 +18 0.2 0.7 -20 -45 44 -40 2 +18 0.9 0.2 50 -38 21 -46 1 +18 0.2 0.2 3 -27 35 7 2 +18 0.8 0.1 43 -34 46 13 2 +18 0.1 0.4 27 -18 -33 -49 1 +18 0.7 0.5 21 -4 41 -2 2 +18 0.2 0.1 46 -6 42 -16 1 +18 0.7 0.6 27 12 0 -21 1 +18 0.3 0.9 38 -47 41 -48 2 +18 0.3 0.4 50 21 45 18 2 +18 0.4 0.6 -17 -49 -9 -44 2 +18 0.5 0.4 37 -12 30 -28 1 +18 0.1 0.3 35 -10 -9 -28 1 +18 0.7 0.5 5 -27 -25 -38 1 +18 0.7 0.8 48 -11 27 -11 1 +18 0.7 0.7 20 -7 41 -22 2 +18 0.3 0.6 9 -38 -14 -16 2 +18 0.6 0.8 12 -25 -11 -46 1 +18 0.1 0.4 38 -4 25 -20 2 +18 0.5 0.7 34 18 -12 -34 1 +18 0.1 0.1 -8 -21 40 -36 2 +18 0.5 0.8 21 7 18 4 2 +18 0.9 0.7 35 22 21 -21 1 +18 0.6 0.2 49 33 35 -8 1 +18 0.5 0.4 18 -7 35 -2 2 +18 0.8 0.3 36 -41 41 -40 1 +18 0.2 0.2 5 -47 -8 -9 2 +18 0.3 0.7 -20 -46 31 19 2 +18 0.1 0.7 -14 -50 49 37 2 +18 0.6 0.6 9 -41 50 16 2 +18 0.7 0.6 7 -31 49 -19 2 +18 0.8 0.2 -18 -29 20 -27 2 +18 0.1 0.3 16 -4 -8 -41 1 +18 0.2 0.2 45 -29 43 19 2 +18 0.4 0.3 6 -22 50 7 2 +18 0.8 0.1 -33 -44 -7 -23 2 +18 0.3 0.2 49 14 -33 -47 1 +18 0.1 0.1 0 -30 32 27 2 +18 0.4 0.6 36 -45 38 -32 1 +18 0.4 0.9 42 25 50 -34 2 +18 0.3 0.2 43 5 20 -45 1 +18 0.1 0.2 -8 -30 45 27 2 +18 0.5 0.4 -10 -27 25 -22 2 +18 0.2 0.6 46 41 34 -29 1 +18 0.6 0.9 13 4 -12 -27 1 +18 0.8 0.4 6 -8 -29 -48 1 +18 0.3 0.9 26 -45 -34 -35 1 +18 0.8 0.4 48 -10 -26 -48 1 +18 0.6 0.1 1 -8 5 -12 2 +18 0.8 0.1 -47 -48 43 37 2 +18 0.9 0.7 27 -17 38 3 1 +18 0.5 0.2 44 8 40 -42 1 +18 0.1 0.7 30 28 33 26 2 +18 0.7 0.9 -5 -12 -4 -28 1 +18 0.3 0.3 43 10 11 -5 1 +18 0.4 0.9 41 -33 25 -48 2 +18 0.2 0.9 32 13 21 -31 1 +18 0.6 0.4 -28 -39 27 16 2 +18 0.5 0.3 -4 -38 40 12 2 +18 0.2 0.5 -3 -31 5 -39 2 +18 0.6 0.2 -18 -32 17 -11 2 +18 0.5 0.1 6 -43 18 -47 1 +18 0.6 0.5 6 -41 35 16 2 +18 0.3 0.4 24 -24 10 -6 2 +18 0.8 0.8 -14 -27 7 -14 2 +18 0.2 0.7 30 -47 5 -13 2 +18 0.8 0.5 -25 -49 23 -9 2 +18 0.2 0.1 39 18 22 -38 1 +18 0.2 0.7 -14 -31 37 -39 2 +18 0.3 0.6 46 -6 8 1 2 +18 0.6 0.1 23 -26 34 5 1 +18 0.5 0.3 49 36 40 -43 1 +18 0.5 0.2 45 18 42 40 2 +18 0.7 0.4 0 -33 20 -12 2 +18 0.4 0.3 46 39 28 -44 1 +18 0.5 0.5 45 -10 30 10 2 +18 0.2 0.3 29 -25 19 -16 2 +18 0.1 0.5 50 -5 43 -37 2 +18 0.9 0.5 30 25 -10 -34 1 +18 0.2 0.8 39 -26 -26 -34 1 +18 0.1 0.1 27 -8 19 0 1 +18 0.5 0.8 50 40 38 34 2 +18 0.7 0.8 9 -6 35 -31 2 +18 0.5 0.9 -47 -48 43 -48 2 +18 0.1 0.6 23 5 -11 -42 1 +18 0.8 0.1 11 -25 34 18 2 +18 0.7 0.2 45 -43 21 -39 1 +18 0.5 0.8 47 26 37 21 2 +18 0.3 0.7 50 43 23 -30 1 +18 0.9 0.7 30 -34 -30 -37 1 +18 0.2 0.6 46 -30 -12 -39 1 +18 0.3 0.4 -23 -47 -14 -36 1 +18 0.4 0.5 -20 -50 11 -45 2 +18 0.4 0.4 33 -31 -1 -3 1 +18 0.5 0.5 25 -29 17 11 2 +18 0.3 0.6 21 -42 46 27 2 +18 0.9 0.5 -16 -25 -23 -26 2 +18 0.9 0.8 8 -49 -5 -24 1 +18 0.2 0.3 9 -50 0 -14 2 +18 0.6 0.2 24 -13 39 -7 1 +18 0.8 0.2 -3 -39 35 2 2 +18 0.9 0.7 44 38 41 -21 1 +18 0.8 0.6 26 -21 14 -28 1 +18 0.9 0.7 38 -9 45 -28 1 +18 0.3 0.9 49 20 26 -1 1 +18 0.2 0.9 36 26 -20 -29 1 +18 0.3 0.5 -31 -32 17 4 2 +18 0.6 0.3 34 -26 50 44 2 +18 0.8 0.5 35 18 41 7 1 +18 0.9 0.8 -21 -24 10 -33 2 +18 0.8 0.5 38 -41 49 28 2 +19 0.5 0.5 49 -4 -7 -50 1 +19 0.6 0.5 3 -17 45 -39 2 +19 0.2 0.9 19 18 27 8 1 +19 0.9 0.2 50 20 -10 -19 1 +19 0.3 0.6 42 9 13 -40 1 +19 0.6 0.4 28 -14 18 6 2 +19 0.3 0.3 9 -31 -11 -33 1 +19 0.6 0.8 45 21 -18 -50 1 +19 0.7 0.2 50 0 -21 -38 1 +19 0.7 0.2 12 -41 -14 -37 1 +19 0.4 0.6 31 15 -6 -18 1 +19 0.2 0.4 32 -44 47 20 2 +19 0.8 0.5 21 -15 3 -35 1 +19 0.2 0.5 21 4 20 -48 1 +19 0.1 0.5 28 -47 -16 -18 2 +19 0.9 0.5 40 29 33 -26 1 +19 0.7 0.1 44 36 8 -3 1 +19 0.7 0.6 4 -34 18 -29 2 +19 0.3 0.7 16 12 -32 -49 1 +19 0.8 0.3 -23 -35 24 -20 2 +19 0.5 0.1 -20 -30 37 -3 2 +19 0.8 0.9 21 -48 28 6 2 +19 0.7 0.7 21 12 38 -34 2 +19 0.9 0.6 44 -19 44 -49 1 +19 0.2 0.9 6 -8 21 10 2 +19 0.7 0.7 23 -21 50 -43 1 +19 0.1 0.2 -17 -35 16 -7 2 +19 0.5 0.2 14 -45 -30 -48 1 +19 0.4 0.3 21 -24 -21 -22 1 +19 0.3 0.1 13 -46 9 -30 1 +19 0.3 0.4 -22 -38 -25 -41 2 +19 0.7 0.9 48 3 25 12 2 +19 0.1 0.3 50 43 29 -37 1 +19 0.8 0.4 16 8 -33 -40 1 +19 0.2 0.3 49 -5 19 -41 1 +19 0.4 0.7 49 46 50 -9 1 +19 0.2 0.5 -18 -29 9 -14 2 +19 0.4 0.7 23 -45 21 -18 2 +19 0.6 0.4 30 -13 33 -7 1 +19 0.5 0.3 15 -14 42 11 2 +19 0.9 0.6 -15 -21 45 -40 2 +19 0.5 0.7 23 -35 26 24 2 +19 0.1 0.3 27 -40 -5 -21 2 +19 0.1 0.2 -16 -20 43 -45 2 +19 0.8 0.1 33 14 27 -9 1 +19 0.9 0.5 -22 -44 -15 -18 1 +19 0.4 0.1 21 -20 42 -46 1 +19 0.6 0.5 17 -28 45 -50 2 +19 0.2 0.1 8 -2 45 30 2 +19 0.5 0.6 -42 -50 31 -18 2 +19 0.2 0.8 29 3 28 -6 1 +19 0.4 0.2 25 -29 1 -34 1 +19 0.9 0.3 -34 -42 31 -37 2 +19 0.4 0.3 49 -23 -13 -19 1 +19 0.3 0.2 49 34 -6 -41 1 +19 0.4 0.4 0 -45 13 5 2 +19 0.8 0.5 47 -36 41 -5 1 +19 0.5 0.7 -29 -46 -7 -38 2 +19 0.8 0.6 30 24 17 -32 1 +19 0.9 0.1 -18 -49 31 1 2 +19 0.7 0.4 40 -43 50 -36 1 +19 0.2 0.4 38 30 -14 -45 1 +19 0.2 0.7 3 -41 -27 -38 2 +19 0.7 0.9 36 -36 -10 -46 1 +19 0.5 0.5 -9 -13 32 -41 2 +19 0.6 0.7 37 36 -20 -44 1 +19 0.1 0.2 30 1 43 39 2 +19 0.6 0.8 35 -31 46 -25 2 +19 0.2 0.5 24 -12 -29 -48 1 +19 0.5 0.6 -32 -44 -32 -35 1 +19 0.7 0.2 50 -50 39 -34 1 +19 0.5 0.3 4 -27 32 -6 2 +19 0.8 0.9 46 -10 15 -4 1 +19 0.7 0.5 12 -43 43 13 2 +19 0.3 0.8 18 -21 24 12 2 +19 0.4 0.1 25 24 44 -40 1 +19 0.6 0.6 -23 -46 44 -42 2 +19 0.7 0.2 -13 -18 20 -15 2 +19 0.1 0.5 -19 -28 -25 -41 1 +19 0.9 0.7 -14 -19 29 -35 2 +19 0.1 0.1 43 41 20 0 1 +19 0.5 0.7 -7 -35 23 -23 2 +19 0.2 0.7 24 22 43 -26 2 +19 0.2 0.8 32 -19 -7 -41 2 +19 0.3 0.9 15 -30 22 -49 2 +19 0.5 0.5 37 28 -34 -48 1 +19 0.5 0.4 26 -32 -24 -43 1 +19 0.4 0.8 34 33 23 -50 1 +19 0.7 0.2 34 -41 31 -6 1 +19 0.8 0.1 28 -15 32 -16 1 +19 0.2 0.9 41 4 32 -50 2 +19 0.4 0.8 43 -35 49 -22 2 +19 0.6 0.9 43 27 32 22 1 +19 0.1 0.1 36 -50 8 -50 1 +19 0.6 0.1 35 -35 -19 -44 1 +19 0.8 0.4 33 -2 -6 -7 1 +19 0.4 0.4 46 25 6 2 1 +19 0.3 0.2 18 -11 -33 -39 1 +19 0.7 0.5 36 29 -9 -44 1 +19 0.3 0.8 13 -7 1 -25 2 +19 0.6 0.7 -24 -32 21 -35 2 +19 0.7 0.2 9 -20 32 26 2 +19 0.5 0.3 47 4 -31 -33 1 +19 0.8 0.7 -10 -45 6 -35 2 +19 0.4 0.2 -10 -30 39 -33 2 +19 0.6 0.1 -25 -34 28 -45 2 +19 0.7 0.7 23 7 -17 -27 1 +19 0.2 0.2 15 -32 7 -36 2 +19 0.9 0.3 34 28 -1 -10 1 +19 0.3 0.2 49 22 16 -7 1 +19 0.3 0.1 27 2 -7 -24 1 +19 0.5 0.1 25 -48 22 -35 1 +19 0.5 0.5 8 3 49 -48 1 +19 0.1 0.3 -2 -45 12 -45 2 +19 0.1 0.1 -23 -31 29 -48 1 +19 0.7 0.5 18 0 49 11 2 +19 0.8 0.2 -2 -23 42 9 2 +19 0.2 0.8 9 -10 -8 -50 1 +19 0.4 0.1 -23 -39 8 -12 2 +19 0.7 0.9 21 15 45 28 2 +19 0.5 0.8 -25 -46 16 -42 2 +19 0.5 0.4 -1 -16 22 -39 1 +19 0.5 0.5 5 -37 -4 -24 1 +19 0.2 0.8 -16 -19 -32 -35 1 +19 0.7 0.9 11 -4 -12 -34 1 +19 0.6 0.3 39 -37 25 15 2 +19 0.1 0.8 13 -37 7 -16 2 +19 0.5 0.8 -36 -50 49 -18 2 +19 0.2 0.9 47 1 15 -3 2 +19 0.2 0.2 49 31 6 -35 1 +19 0.9 0.9 45 2 -31 -33 1 +19 0.9 0.9 5 -31 35 -33 2 +19 0.1 0.5 19 16 19 -3 1 +19 0.2 0.5 43 -6 38 -16 2 +19 0.8 0.3 -33 -47 0 -49 2 +19 0.9 0.8 -31 -49 48 -49 2 +19 0.4 0.8 25 -23 24 -11 2 +19 0.5 0.6 42 37 -12 -20 1 +19 0.6 0.4 47 -33 50 38 2 +19 0.7 0.2 -19 -21 17 0 2 +19 0.7 0.2 13 1 -1 -36 1 +19 0.1 0.5 43 -36 -2 -12 2 +19 0.1 0.4 47 42 13 -14 1 +19 0.7 0.1 45 9 32 -6 1 +19 0.8 0.9 33 -34 3 -48 1 +19 0.8 0.2 -14 -25 -1 -34 2 +19 0.4 0.3 16 4 -34 -36 1 +19 0.4 0.5 -2 -47 33 16 2 +19 0.2 0.9 20 -39 -16 -30 1 +19 0.1 0.3 22 17 3 -44 1 +19 0.8 0.3 -8 -42 -23 -31 1 +19 0.7 0.1 48 6 43 5 1 +19 0.6 0.3 9 -35 -47 -48 1 +19 0.9 0.3 36 -43 26 -28 1 +19 0.4 0.1 -40 -48 25 -26 2 +19 0.5 0.2 45 -39 25 -7 2 +19 0.2 0.3 45 -46 11 -15 2 +19 0.3 0.2 43 4 -37 -39 1 +19 0.6 0.5 10 9 45 -36 1 +19 0.6 0.7 30 -12 26 -39 2 +19 0.1 0.3 49 1 19 -4 2 +19 0.8 0.4 24 -46 47 32 2 +19 0.8 0.9 -7 -41 6 -26 2 +19 0.1 0.9 -31 -47 20 -1 2 +19 0.9 0.6 -28 -41 9 0 2 +19 0.7 0.9 -32 -48 44 -11 2 +19 0.3 0.4 47 41 49 -12 1 +19 0.9 0.3 15 -12 18 6 1 +19 0.1 0.3 37 -8 31 11 2 +19 0.7 0.6 4 -41 19 -12 2 +19 0.7 0.7 43 -30 26 -3 1 +19 0.5 0.7 -6 -8 -11 -35 1 +19 0.4 0.8 -20 -21 34 -3 2 +19 0.9 0.1 32 10 48 -3 1 +19 0.9 0.8 16 15 41 21 2 +19 0.7 0.2 46 -15 -10 -45 1 +19 0.9 0.3 30 16 3 -22 1 +19 0.3 0.8 -1 -3 23 -17 2 +19 0.1 0.1 15 -49 44 30 2 +19 0.7 0.5 27 -35 40 -44 1 +19 0.4 0.8 14 -25 48 -41 2 +19 0.5 0.5 31 -24 35 5 2 +19 0.9 0.7 48 1 -32 -37 1 +19 0.2 0.6 50 2 8 -16 1 +19 0.7 0.8 -27 -46 41 -16 2 +19 0.3 0.5 13 -30 25 -5 2 +19 0.1 0.3 23 0 12 -19 1 +19 0.7 0.4 45 16 50 -20 1 +19 0.8 0.2 7 -39 -14 -32 1 +19 0.2 0.7 39 -49 13 -2 2 +19 0.6 0.9 21 -3 33 -1 2 +19 0.9 0.2 48 -3 0 -47 1 +19 0.5 0.1 -16 -28 35 -13 2 +19 0.3 0.3 44 -1 49 -34 1 +19 0.7 0.2 -11 -21 -19 -20 2 +19 0.7 0.4 3 -33 47 7 2 +19 0.7 0.8 -28 -46 37 -36 2 +19 0.7 0.5 -1 -25 15 11 2 +19 0.7 0.2 48 21 -27 -50 1 +19 0.2 0.8 -5 -25 16 -8 2 +19 0.4 0.9 50 22 48 44 1 +19 0.8 0.2 36 -49 14 13 1 +19 0.8 0.9 40 -21 27 -6 1 +19 0.3 0.7 49 3 12 9 1 +19 0.4 0.7 46 -23 17 -29 1 +19 0.9 0.2 -30 -49 41 -9 2 +19 0.1 0.6 23 0 -21 -39 1 +19 0.5 0.1 14 -30 -16 -19 1 +19 0.6 0.1 46 -46 -4 -26 1 +19 0.2 0.4 40 12 39 19 2 +19 0.5 0.9 4 -10 40 4 2 +19 0.9 0.7 -23 -26 18 -45 2 +19 0.7 0.3 14 -22 -30 -38 1 +19 0.6 0.2 32 -35 16 9 2 +19 0.9 0.2 -27 -47 -23 -50 1 +19 0.2 0.2 -23 -25 49 -44 2 +19 0.5 0.7 42 -48 19 -45 2 +19 0.7 0.4 -33 -43 43 -44 2 +19 0.2 0.5 10 -20 8 -19 2 +19 0.2 0.3 1 -17 -19 -47 1 +19 0.1 0.6 48 -27 -12 -48 1 +19 0.7 0.3 30 -40 36 -2 1 +19 0.1 0.5 38 -32 14 -24 2 +19 0.2 0.6 10 -35 0 -27 2 +19 0.6 0.8 25 -6 -19 -27 1 +19 0.4 0.1 -41 -47 -7 -47 1 +19 0.9 0.8 27 -50 49 37 2 +19 0.2 0.7 -43 -50 37 -21 2 +19 0.5 0.2 8 -27 44 42 2 +19 0.6 0.9 49 -32 35 -28 2 +19 0.7 0.9 27 25 2 -39 1 +19 0.7 0.2 45 -2 -5 -30 1 +19 0.2 0.4 46 26 11 -44 1 +19 0.2 0.2 32 -24 25 -31 1 +19 0.3 0.7 40 33 23 -8 1 +19 0.3 0.7 9 -16 5 -23 2 +19 0.1 0.4 38 -45 9 -18 2 +19 0.2 0.2 41 -9 1 -39 1 +19 0.6 0.7 15 -12 -9 -48 1 +19 0.6 0.8 -14 -45 5 -44 2 +19 0.9 0.4 -19 -47 7 -15 2 +19 0.9 0.6 44 -4 27 -29 1 +19 0.6 0.5 0 -15 8 -43 1 +19 0.1 0.3 19 -1 30 25 2 +19 0.8 0.5 19 7 -25 -41 1 +19 0.8 0.2 -5 -33 44 -24 2 +19 0.8 0.5 20 -34 -22 -40 1 +19 0.9 0.2 -17 -48 -11 -39 1 +19 0.7 0.4 7 -37 43 -21 2 +19 0.2 0.7 35 -20 38 29 2 +19 0.1 0.9 48 40 48 14 2 +19 0.4 0.5 -12 -33 -2 -16 1 +19 0.1 0.9 14 -45 19 17 2 +19 0.5 0.1 17 -33 24 -47 1 +19 0.7 0.6 -7 -23 25 5 2 +19 0.8 0.3 41 34 -12 -20 1 +19 0.6 0.3 13 3 8 -18 1 +19 0.3 0.5 43 12 -10 -22 1 +19 0.7 0.3 48 -5 35 -33 1 +19 0.3 0.7 -1 -5 -22 -41 1 +19 0.6 0.3 50 22 14 -8 1 +19 0.2 0.5 30 -26 39 -6 2 +19 0.5 0.5 12 -7 21 11 2 +19 0.4 0.4 41 -50 -13 -50 1 +19 0.2 0.9 35 -49 -1 -31 2 +19 0.2 0.2 11 -44 16 -16 2 +19 0.1 0.2 40 -9 10 -50 1 +19 0.1 0.4 -25 -42 -15 -20 2 +19 0.8 0.8 30 -43 29 -8 1 +19 0.3 0.5 -9 -25 50 45 2 +19 0.2 0.4 30 -33 20 -29 2 +19 0.2 0.8 45 25 -5 -8 1 +19 0.6 0.5 22 -21 0 -21 1 +19 0.8 0.2 -12 -36 28 -34 2 +19 0.8 0.9 27 -26 -31 -40 1 +19 0.2 0.8 -21 -44 28 -23 2 +19 0.9 0.6 -10 -16 28 27 2 +19 0.8 0.5 45 -45 -15 -39 1 +19 0.4 0.8 5 2 -10 -13 2 +19 0.4 0.1 44 -7 44 -25 1 +19 0.8 0.4 18 5 19 3 1 +19 0.6 0.4 48 12 -48 -50 1 +19 0.6 0.5 49 -47 37 -38 1 +19 0.1 0.4 -36 -49 15 -49 2 +19 0.2 0.1 41 -5 -27 -35 1 +19 0.6 0.7 12 -5 5 -32 1 +19 0.3 0.3 -9 -32 18 -22 2 +19 0.8 0.5 25 11 -31 -32 1 +19 0.5 0.8 8 -27 41 -20 2 +19 0.5 0.6 8 -31 47 22 2 +19 0.9 0.1 37 13 38 -24 1 +19 0.6 0.2 -4 -50 18 -42 2 +19 0.7 0.7 10 -1 3 -2 2 +19 0.3 0.3 7 -35 12 -15 2 +19 0.3 0.6 42 -5 28 -50 1 +19 0.5 0.6 12 -27 26 20 2 +19 0.7 0.2 -7 -50 31 -50 1 +19 0.9 0.9 2 -35 47 37 2 +19 0.5 0.4 23 18 25 0 1 +19 0.4 0.8 18 -6 7 -35 1 +20 0.9 0.3 27 0 40 20 2 +20 0.4 0.8 4 -12 39 -9 2 +20 0.8 0.6 -18 -48 26 -37 2 +20 0.6 0.4 43 -21 20 -26 1 +20 0.1 0.9 26 13 5 1 1 +20 0.8 0.3 45 39 41 8 1 +20 0.8 0.1 39 -41 27 -5 1 +20 0.5 0.2 49 -10 -7 -15 1 +20 0.1 0.6 49 -35 48 -19 2 +20 0.2 0.8 26 -8 10 -20 1 +20 0.6 0.1 34 5 35 8 1 +20 0.3 0.5 -17 -47 31 -4 2 +20 0.5 0.2 48 -29 14 -3 1 +20 0.2 0.5 25 2 38 26 2 +20 0.6 0.1 31 -6 30 -8 1 +20 0.6 0.2 10 -27 -6 -14 1 +20 0.6 0.4 28 17 37 20 2 +20 0.1 0.7 36 13 20 0 1 +20 0.1 0.9 12 -37 -11 -25 2 +20 0.8 0.9 -9 -15 47 11 2 +20 0.9 0.1 43 1 -7 -31 1 +20 0.6 0.4 -13 -41 9 -25 2 +20 0.4 0.6 36 -26 14 -9 1 +20 0.2 0.8 -1 -36 2 -42 2 +20 0.4 0.4 32 5 45 30 2 +20 0.5 0.3 -22 -43 -6 -8 2 +20 0.6 0.2 21 13 -23 -43 1 +20 0.7 0.6 28 20 16 -30 1 +20 0.3 0.9 16 -32 24 14 2 +20 0.6 0.6 -26 -29 -6 -31 2 +20 0.1 0.5 43 33 22 -43 1 +20 0.3 0.2 7 -39 3 -19 2 +20 0.4 0.7 38 -3 22 3 2 +20 0.7 0.5 45 12 27 -16 1 +20 0.3 0.8 -18 -43 43 30 2 +20 0.6 0.2 37 17 25 -16 1 +20 0.4 0.7 31 -29 40 -44 2 +20 0.2 0.3 -1 -4 -3 -20 1 +20 0.1 0.3 33 19 33 15 1 +20 0.4 0.6 45 41 -3 -25 1 +20 0.3 0.3 34 -16 34 -11 1 +20 0.6 0.5 19 17 17 -40 1 +20 0.2 0.6 24 8 26 -31 2 +20 0.9 0.5 31 29 42 4 1 +20 0.7 0.1 -11 -23 14 -13 2 +20 0.4 0.6 -23 -43 13 -7 2 +20 0.8 0.2 36 -18 34 -30 1 +20 0.6 0.8 -33 -50 26 -8 2 +20 0.9 0.6 3 -31 34 -14 2 +20 0.5 0.6 27 6 21 -10 1 +20 0.7 0.2 -11 -29 13 -47 1 +20 0.5 0.6 -9 -48 19 -50 2 +20 0.7 0.9 3 -14 -34 -38 1 +20 0.1 0.4 34 -14 39 -2 2 +20 0.3 0.2 8 -36 37 -3 2 +20 0.7 0.2 2 -43 -30 -46 1 +20 0.5 0.7 37 -11 36 -9 2 +20 0.8 0.7 39 -4 25 -38 1 +20 0.6 0.7 25 -34 -16 -18 1 +20 0.6 0.2 21 5 23 21 2 +20 0.9 0.8 -16 -22 25 -25 2 +20 0.5 0.2 -11 -27 19 0 2 +20 0.1 0.9 20 6 31 -9 2 +20 0.6 0.1 19 -45 6 -37 1 +20 0.8 0.4 -4 -46 20 -48 1 +20 0.5 0.7 20 4 44 5 2 +20 0.9 0.1 1 -49 24 5 2 +20 0.4 0.7 35 9 33 -8 1 +20 0.5 0.6 26 -44 -12 -21 1 +20 0.6 0.2 12 -40 44 -29 1 +20 0.8 0.8 31 -24 25 15 2 +20 0.9 0.4 27 9 31 -47 1 +20 0.8 0.5 -45 -46 8 -1 2 +20 0.4 0.8 50 -49 20 -35 2 +20 0.9 0.5 -8 -17 43 -44 2 +20 0.9 0.6 43 -10 32 -13 1 +20 0.9 0.3 31 9 38 -12 1 +20 0.4 0.3 46 -38 -30 -37 1 +20 0.1 0.4 47 -44 13 4 2 +20 0.5 0.7 14 -18 5 -10 2 +20 0.6 0.6 6 -8 10 7 2 +20 0.6 0.4 -20 -28 -25 -41 1 +20 0.6 0.5 37 5 -31 -48 1 +20 0.2 0.9 3 -21 -31 -38 1 +20 0.7 0.6 -9 -22 3 -27 2 +20 0.2 0.7 -18 -48 0 -25 2 +20 0.3 0.7 47 46 24 -31 1 +20 0.5 0.4 -40 -43 -16 -39 1 +20 0.8 0.3 37 -42 45 16 2 +20 0.9 0.8 21 -3 34 3 2 +20 0.5 0.2 21 -6 -11 -21 1 +20 0.7 0.7 48 -9 -35 -37 1 +20 0.3 0.8 22 -34 -14 -50 1 +20 0.5 0.7 4 3 19 -2 2 +20 0.3 0.2 -7 -42 20 -21 2 +20 0.5 0.3 -9 -20 -20 -30 2 +20 0.8 0.1 36 -39 -1 -45 1 +20 0.8 0.8 18 -36 -34 -40 1 +20 0.4 0.3 33 1 -14 -20 1 +20 0.8 0.7 37 -21 46 -40 1 +20 0.8 0.7 -37 -42 33 27 2 +20 0.2 0.6 46 -22 45 14 2 +20 0.5 0.6 -4 -39 11 -46 2 +20 0.4 0.2 25 -22 48 47 2 +20 0.8 0.4 38 -13 27 -47 1 +20 0.2 0.9 -26 -48 -20 -22 2 +20 0.6 0.8 -13 -25 30 -4 2 +20 0.4 0.6 -6 -28 29 27 2 +20 0.5 0.6 38 -46 17 -24 2 +20 0.8 0.3 27 -43 27 -1 1 +20 0.4 0.1 29 -2 -2 -7 1 +20 0.5 0.9 -26 -32 14 -11 2 +20 0.8 0.2 40 -47 9 2 1 +20 0.5 0.2 50 17 -21 -25 1 +20 0.8 0.4 38 -44 19 -13 1 +20 0.9 0.3 41 17 28 -25 1 +20 0.7 0.5 34 -3 48 -33 1 +20 0.4 0.8 42 41 44 41 2 +20 0.2 0.7 15 7 3 -31 1 +20 0.7 0.6 -13 -27 40 21 2 +20 0.8 0.5 -5 -12 -14 -33 1 +20 0.6 0.3 3 -23 45 18 2 +20 0.7 0.1 48 45 44 -10 1 +20 0.8 0.8 29 -10 29 -8 1 +20 0.3 0.9 -10 -23 25 -8 2 +20 0.3 0.8 18 -40 48 -34 2 +20 0.3 0.7 30 -33 7 -44 1 +20 0.7 0.4 40 -3 -5 -16 1 +20 0.7 0.1 -25 -38 -24 -26 2 +20 0.7 0.2 49 10 -14 -37 1 +20 0.1 0.5 6 -44 36 -33 2 +20 0.9 0.1 45 -40 38 -29 1 +20 0.4 0.7 34 11 49 20 1 +20 0.5 0.4 12 -48 -15 -35 1 +20 0.2 0.9 -7 -9 -26 -30 1 +20 0.4 0.4 -21 -26 40 -7 2 +20 0.8 0.4 18 -10 33 -1 2 +20 0.2 0.7 25 -14 15 -16 2 +20 0.9 0.4 46 16 42 -25 1 +20 0.9 0.3 44 25 -21 -32 1 +20 0.1 0.1 -4 -41 5 -37 1 +20 0.1 0.2 39 -28 22 -2 2 +20 0.8 0.5 35 -13 2 -7 1 +20 0.5 0.2 32 -47 42 -29 1 +20 0.8 0.6 -3 -25 6 -33 2 +20 0.4 0.4 1 -15 36 -44 1 +20 0.2 0.5 46 2 -1 -43 1 +20 0.9 0.7 29 -3 22 10 1 +20 0.4 0.3 13 -37 19 15 2 +20 0.3 0.2 28 -4 35 19 2 +20 0.1 0.5 -20 -36 19 -32 2 +20 0.9 0.5 18 10 -1 -29 1 +20 0.8 0.7 39 8 8 -10 1 +20 0.4 0.1 3 -24 31 -34 1 +20 0.4 0.3 29 -29 25 14 2 +20 0.7 0.1 -25 -30 2 -19 2 +20 0.1 0.6 22 -17 21 -11 2 +20 0.4 0.9 15 -31 -1 -36 1 +20 0.9 0.2 -3 -47 32 5 2 +20 0.1 0.9 43 -9 43 -44 2 +20 0.6 0.1 40 -40 27 2 1 +20 0.8 0.3 26 -1 49 -6 1 +20 0.6 0.5 48 3 -35 -44 1 +20 0.1 0.1 13 5 40 39 2 +20 0.1 0.5 20 -50 -13 -29 2 +20 0.1 0.6 -4 -19 -28 -34 1 +20 0.5 0.1 47 43 35 4 1 +20 0.4 0.8 -8 -39 48 28 2 +20 0.8 0.5 7 3 49 23 2 +20 0.4 0.9 2 -36 38 12 2 +20 0.6 0.9 47 23 28 -25 1 +20 0.3 0.9 -1 -5 34 -10 2 +20 0.4 0.7 34 -15 27 13 2 +20 0.8 0.3 50 13 32 -45 1 +20 0.8 0.8 13 8 14 -26 1 +20 0.8 0.2 5 -8 48 41 2 +20 0.7 0.6 34 -10 49 37 2 +20 0.7 0.3 40 33 21 -22 1 +20 0.5 0.6 12 -17 45 27 2 +20 0.4 0.1 -30 -41 48 27 2 +20 0.9 0.1 -24 -39 3 -41 2 +20 0.2 0.6 43 -3 34 27 2 +20 0.7 0.9 41 -16 38 12 2 +20 0.4 0.6 18 -30 38 -24 2 +20 0.7 0.5 36 21 28 -43 1 +20 0.5 0.2 43 -15 -18 -20 1 +20 0.6 0.5 2 -39 -15 -32 1 +20 0.1 0.5 -42 -49 -17 -36 2 +20 0.1 0.9 36 -40 -2 -24 2 +20 0.1 0.5 17 -34 10 -38 2 +20 0.7 0.4 11 -18 35 30 2 +20 0.7 0.4 20 -36 -35 -44 1 +20 0.8 0.8 -21 -30 -6 -39 2 +20 0.2 0.8 48 27 -24 -48 1 +20 0.4 0.7 14 -24 -30 -49 1 +20 0.2 0.4 -9 -23 45 -49 2 +20 0.2 0.8 36 -48 25 19 2 +20 0.6 0.9 1 -48 33 16 2 +20 0.7 0.6 27 -31 37 -47 1 +20 0.4 0.5 -5 -49 31 20 2 +20 0.3 0.2 49 -2 46 23 2 +20 0.5 0.9 45 -16 44 -47 2 +20 0.8 0.8 1 -26 49 -1 2 +20 0.9 0.8 -20 -30 3 -9 2 +20 0.1 0.1 29 -47 43 40 2 +20 0.2 0.7 8 -8 -8 -34 1 +20 0.4 0.5 26 13 28 1 1 +20 0.9 0.4 48 18 25 -2 1 +20 0.5 0.4 47 28 -34 -45 1 +20 0.1 0.9 -40 -43 15 -3 2 +20 0.6 0.5 10 4 32 -25 1 +20 0.3 0.3 11 -14 30 -10 2 +20 0.4 0.7 24 -36 29 -50 2 +20 0.9 0.8 40 -1 39 -35 1 +20 0.7 0.2 -14 -17 -46 -49 1 +20 0.5 0.9 21 17 34 22 2 +20 0.7 0.1 30 -43 36 -35 1 +20 0.5 0.6 8 -28 31 17 2 +20 0.7 0.8 38 21 44 18 1 +20 0.3 0.3 0 -37 29 -26 2 +20 0.7 0.8 23 6 46 -26 2 +20 0.8 0.4 41 8 39 -42 1 +20 0.7 0.5 -12 -27 10 -24 2 +20 0.9 0.1 -29 -49 37 11 2 +20 0.6 0.1 -16 -18 6 -45 1 +20 0.8 0.2 -4 -30 19 3 2 +20 0.7 0.3 9 -43 33 27 2 +20 0.1 0.8 50 30 12 10 1 +20 0.3 0.1 -40 -42 33 -25 2 +20 0.9 0.7 17 -29 36 10 2 +20 0.3 0.8 33 -37 18 16 2 +20 0.5 0.1 -21 -25 13 -27 2 +20 0.9 0.7 -21 -36 -17 -28 1 +20 0.6 0.1 50 37 43 -42 1 +20 0.5 0.2 46 7 29 -17 1 +20 0.3 0.4 19 -40 36 26 2 +20 0.7 0.3 34 -40 43 2 2 +20 0.8 0.3 20 1 26 -6 1 +20 0.2 0.9 6 -26 39 -36 2 +20 0.6 0.7 48 9 29 24 2 +20 0.8 0.5 9 -16 -4 -15 1 +20 0.9 0.3 -9 -48 24 -22 2 +20 0.5 0.5 14 -3 -8 -15 1 +20 0.2 0.3 20 -14 23 -12 2 +20 0.9 0.9 48 -46 24 -29 1 +20 0.7 0.3 41 12 34 -20 1 +20 0.9 0.4 13 -32 31 -37 1 +20 0.2 0.6 30 -22 26 -49 2 +20 0.5 0.9 46 -16 39 -50 2 +20 0.5 0.7 42 34 -37 -39 1 +20 0.4 0.4 24 7 5 -42 1 +20 0.4 0.2 18 -42 11 -21 2 +20 0.8 0.8 -28 -45 13 -1 2 +20 0.3 0.9 20 -38 19 -6 2 +20 0.3 0.5 49 8 -19 -22 1 +20 0.5 0.2 40 -30 -10 -13 1 +20 0.3 0.8 40 29 28 26 1 +20 0.7 0.1 -6 -21 2 -22 1 +20 0.8 0.4 40 -35 -1 -22 1 +20 0.4 0.2 38 -31 1 -7 1 +20 0.7 0.9 13 -6 41 5 2 +20 0.5 0.1 46 31 36 -42 1 +20 0.7 0.7 -27 -49 47 -46 2 +20 0.8 0.3 -14 -33 -28 -40 1 +20 0.8 0.1 7 -31 24 -34 1 +20 0.3 0.4 28 3 26 -37 1 +20 0.8 0.6 22 -4 -14 -18 1 +20 0.7 0.8 -3 -7 14 -45 2 +20 0.3 0.4 46 -32 37 8 2 +20 0.1 0.6 38 -8 47 -16 2 +20 0.6 0.6 25 -33 28 -17 2 +20 0.9 0.8 42 25 18 -44 1 +20 0.8 0.7 44 -50 49 38 2 +20 0.3 0.4 28 -28 22 6 2 +20 0.4 0.6 -21 -47 43 -14 2 +20 0.5 0.2 23 -39 -15 -16 1 +20 0.1 0.3 7 -3 45 20 2 +20 0.7 0.1 21 11 38 -11 1 +20 0.6 0.2 6 -4 43 -17 1 +20 0.9 0.7 48 25 25 16 1 +20 0.4 0.7 6 -29 12 -30 2 +20 0.2 0.7 -47 -49 -20 -49 2 +20 0.7 0.8 -13 -26 -21 -39 1 +20 0.6 0.4 41 33 -20 -30 1 +20 0.8 0.7 -14 -43 19 -47 2 +20 0.1 0.3 5 -48 24 -8 2 +20 0.7 0.5 -8 -48 -3 -19 2 +20 0.5 0.5 40 20 -6 -17 1 +20 0.3 0.9 4 3 41 31 2 +20 0.6 0.6 -10 -18 40 -10 2 +20 0.4 0.4 -13 -36 -23 -48 1 +20 0.7 0.4 -18 -29 28 -27 2 +20 0.1 0.1 40 29 -28 -36 1 +20 0.6 0.6 33 -38 0 -48 1 +20 0.1 0.7 27 -34 5 -15 2 +20 0.7 0.7 -10 -11 3 -12 2 +20 0.6 0.7 24 -36 31 -17 2 +20 0.8 0.2 -29 -50 20 -46 2 +20 0.1 0.3 -9 -29 -26 -46 1 +20 0.6 0.9 15 -41 21 4 2 +21 0.3 0.3 -1 -35 6 -34 2 +21 0.2 0.9 45 25 45 -3 1 +21 0.2 0.1 -18 -29 -11 -46 1 +21 0.7 0.3 -7 -37 18 6 2 +21 0.4 0.4 34 -27 1 -17 1 +21 0.1 0.8 32 20 -2 -23 1 +21 0.8 0.7 42 13 50 31 2 +21 0.8 0.9 23 -15 15 -40 2 +21 0.7 0.6 -27 -42 -16 -39 2 +21 0.7 0.5 40 30 43 -5 1 +21 0.4 0.6 30 13 26 -36 1 +21 0.4 0.6 -4 -46 -9 -37 1 +21 0.9 0.1 42 -48 47 22 1 +21 0.6 0.9 24 2 11 -48 1 +21 0.8 0.5 46 20 40 16 1 +21 0.7 0.3 34 -29 45 6 2 +21 0.1 0.1 22 -22 38 -43 1 +21 0.8 0.7 -21 -30 50 -35 2 +21 0.2 0.9 45 -38 50 -42 2 +21 0.1 0.7 -7 -29 0 -7 2 +21 0.8 0.4 9 -50 -20 -26 1 +21 0.7 0.6 33 31 28 -21 1 +21 0.6 0.7 -17 -49 20 -43 2 +21 0.7 0.9 36 23 -8 -14 1 +21 0.2 0.9 49 -42 10 -13 2 +21 0.3 0.3 -11 -30 -20 -36 2 +21 0.8 0.5 31 7 14 -43 1 +21 0.7 0.7 24 4 12 -14 1 +21 0.7 0.2 -14 -23 -11 -38 1 +21 0.8 0.3 47 -18 15 -24 1 +21 0.5 0.1 -10 -35 44 -14 2 +21 0.3 0.5 41 10 37 -30 1 +21 0.8 0.5 -16 -45 45 39 2 +21 0.3 0.4 -44 -49 17 -40 2 +21 0.1 0.1 32 24 0 -10 1 +21 0.5 0.2 -6 -47 9 -28 2 +21 0.2 0.3 49 -16 43 -44 2 +21 0.5 0.4 47 -21 -22 -45 1 +21 0.5 0.3 48 41 7 -25 1 +21 0.9 0.2 -20 -41 31 -2 2 +21 0.8 0.9 48 4 36 -37 2 +21 0.1 0.9 -43 -48 -13 -30 2 +21 0.7 0.7 -3 -44 7 -36 2 +21 0.3 0.2 31 0 -21 -27 1 +21 0.4 0.8 6 -41 44 12 2 +21 0.6 0.7 33 -22 25 0 1 +21 0.8 0.8 -24 -43 12 -35 2 +21 0.1 0.4 30 4 -11 -44 1 +21 0.3 0.4 33 -24 24 -18 1 +21 0.1 0.7 31 -23 27 -16 2 +21 0.9 0.8 27 1 27 -21 1 +21 0.4 0.4 -21 -31 -17 -35 2 +21 0.4 0.5 0 -39 20 -49 2 +21 0.9 0.5 25 22 3 -46 1 +21 0.8 0.7 -38 -42 5 -50 2 +21 0.3 0.6 40 23 1 -20 1 +21 0.3 0.9 33 -18 -8 -19 1 +21 0.7 0.9 36 18 -40 -41 1 +21 0.3 0.7 10 -20 50 37 2 +21 0.5 0.4 39 -41 45 -12 1 +21 0.8 0.4 12 -8 -29 -33 1 +21 0.9 0.6 18 2 49 -10 1 +21 0.7 0.1 25 -8 -6 -11 1 +21 0.6 0.7 44 -14 1 -14 1 +21 0.4 0.6 -30 -39 8 -27 2 +21 0.2 0.3 -1 -49 48 -29 2 +21 0.1 0.3 50 -49 10 6 2 +21 0.7 0.7 34 31 1 -27 1 +21 0.1 0.4 28 -19 45 20 2 +21 0.1 0.3 -35 -48 1 -7 2 +21 0.2 0.6 -29 -32 18 -16 2 +21 0.3 0.2 5 -19 34 -8 2 +21 0.6 0.2 -16 -36 -22 -23 2 +21 0.1 0.1 14 -39 23 14 2 +21 0.1 0.9 47 39 -15 -29 1 +21 0.8 0.8 15 -46 34 7 2 +21 0.2 0.7 38 -34 -11 -19 1 +21 0.1 0.5 -12 -47 16 -33 2 +21 0.6 0.1 -14 -24 -7 -40 1 +21 0.8 0.8 9 -13 35 -44 2 +21 0.5 0.8 -11 -14 13 -47 2 +21 0.6 0.1 1 -10 36 18 2 +21 0.6 0.9 7 -25 -35 -41 1 +21 0.7 0.9 3 -12 24 5 2 +21 0.4 0.9 19 -49 45 16 2 +21 0.9 0.6 4 -4 21 -32 2 +21 0.6 0.7 46 -22 30 -21 1 +21 0.9 0.4 -4 -5 30 -41 2 +21 0.1 0.5 40 -35 45 33 2 +21 0.7 0.1 49 -49 45 5 1 +21 0.1 0.3 -34 -48 0 -44 2 +21 0.2 0.2 -4 -45 16 -2 2 +21 0.1 0.1 14 1 -37 -46 2 +21 0.5 0.2 -9 -31 -27 -29 1 +21 0.9 0.4 -2 -20 -22 -29 1 +21 0.2 0.9 13 -10 -7 -43 1 +21 0.5 0.1 -12 -26 -28 -38 2 +21 0.9 0.9 16 -40 47 -4 2 +21 0.5 0.4 -3 -18 49 27 2 +21 0.6 0.1 44 35 2 -9 1 +21 0.1 0.5 -10 -45 16 -19 2 +21 0.9 0.3 30 2 23 -19 1 +21 0.4 0.7 -2 -25 -16 -48 2 +21 0.3 0.9 0 -18 8 -7 2 +21 0.6 0.7 -5 -42 37 17 2 +21 0.1 0.3 48 -39 50 -20 2 +21 0.1 0.8 40 24 15 -34 1 +21 0.5 0.4 36 -48 41 -37 1 +21 0.8 0.4 9 -28 41 11 2 +21 0.2 0.9 -8 -47 -29 -33 2 +21 0.5 0.8 24 9 -22 -48 1 +21 0.7 0.2 28 -42 18 -8 1 +21 0.7 0.2 42 -33 -6 -12 1 +21 0.3 0.3 35 21 35 -6 2 +21 0.3 0.5 13 -40 30 26 2 +21 0.9 0.4 50 35 34 0 1 +21 0.8 0.3 -3 -7 43 -5 1 +21 0.2 0.9 -20 -25 7 1 2 +21 0.1 0.1 -9 -45 40 -34 2 +21 0.5 0.9 -13 -39 50 9 2 +21 0.5 0.7 39 24 23 -44 1 +21 0.1 0.9 48 35 17 1 1 +21 0.2 0.9 -10 -38 18 -30 2 +21 0.5 0.2 -2 -42 3 -4 2 +21 0.7 0.1 15 10 8 -27 1 +21 0.1 0.7 31 -39 -32 -40 2 +21 0.3 0.8 33 0 -39 -46 1 +21 0.3 0.1 50 0 29 -25 1 +21 0.8 0.3 30 -21 -34 -48 1 +21 0.5 0.1 32 -41 35 34 2 +21 0.2 0.6 2 -13 -8 -26 2 +21 0.1 0.8 37 25 1 -3 1 +21 0.1 0.2 18 -6 24 -45 1 +21 0.1 0.3 42 37 -19 -23 1 +21 0.9 0.6 35 31 4 -37 1 +21 0.6 0.3 7 -19 26 16 2 +21 0.8 0.4 11 -18 23 -44 1 +21 0.9 0.3 -6 -31 20 -42 2 +21 0.3 0.7 42 35 19 -47 1 +21 0.7 0.1 22 -38 26 20 2 +21 0.4 0.4 -5 -40 0 -7 2 +21 0.6 0.2 30 -6 33 -49 1 +21 0.1 0.6 43 32 -14 -44 1 +21 0.6 0.2 48 9 2 -4 1 +21 0.1 0.5 -13 -26 21 -30 2 +21 0.9 0.5 19 -2 42 -19 1 +21 0.5 0.3 -26 -42 49 9 2 +21 0.2 0.8 16 -9 32 1 2 +21 0.7 0.2 38 21 42 1 1 +21 0.2 0.5 -37 -50 -37 -40 2 +21 0.5 0.1 -27 -47 45 -28 2 +21 0.1 0.2 5 -13 5 -23 2 +21 0.3 0.4 19 -36 32 12 2 +21 0.8 0.7 -2 -42 40 -20 1 +21 0.2 0.1 -14 -17 11 -38 1 +21 0.7 0.5 -2 -23 49 48 2 +21 0.3 0.4 31 -47 27 8 2 +21 0.1 0.9 32 -31 36 17 2 +21 0.7 0.6 50 0 47 -24 1 +21 0.6 0.2 -42 -43 33 10 2 +21 0.8 0.7 38 -6 -38 -48 1 +21 0.6 0.4 10 -40 32 5 2 +21 0.5 0.1 22 12 -2 -17 1 +21 0.3 0.4 29 -38 -20 -37 1 +21 0.3 0.6 50 40 -5 -41 1 +21 0.9 0.3 36 -50 43 -34 1 +21 0.9 0.8 36 -24 -12 -47 1 +21 0.9 0.3 20 -20 44 17 2 +21 0.9 0.4 39 -6 37 -43 1 +21 0.6 0.9 -13 -28 13 -47 2 +21 0.3 0.4 -9 -23 29 -34 2 +21 0.3 0.3 26 -40 23 -42 2 +21 0.5 0.7 38 35 36 20 1 +21 0.9 0.1 25 -22 37 23 1 +21 0.5 0.1 30 -38 33 -33 1 +21 0.4 0.9 18 -7 -14 -47 1 +21 0.2 0.2 -20 -36 -9 -50 2 +21 0.6 0.9 19 16 50 -28 2 +21 0.7 0.8 36 -6 -6 -20 1 +21 0.6 0.7 50 -34 46 15 2 +21 0.5 0.6 33 -36 -18 -25 1 +21 0.3 0.7 50 -46 25 -14 2 +21 0.4 0.5 -9 -46 50 -3 2 +21 0.2 0.1 11 -38 26 -12 2 +21 0.7 0.8 38 19 33 11 1 +21 0.5 0.2 39 -23 7 -4 2 +21 0.6 0.9 9 -11 -22 -39 1 +21 0.3 0.7 -21 -44 24 0 2 +21 0.6 0.4 29 -37 7 -34 1 +21 0.9 0.1 16 -26 37 -33 1 +21 0.3 0.4 24 6 20 19 1 +21 0.1 0.1 -17 -50 47 1 2 +21 0.6 0.3 45 -44 -37 -40 1 +21 0.4 0.8 16 -5 49 25 2 +21 0.7 0.7 38 -12 22 14 2 +21 0.9 0.3 -16 -27 -28 -31 1 +21 0.4 0.6 30 -8 -27 -42 1 +21 0.2 0.9 13 5 7 -13 2 +21 0.7 0.2 -22 -41 50 -15 2 +21 0.3 0.5 -35 -43 10 -1 2 +21 0.1 0.7 17 -49 7 -35 1 +21 0.9 0.6 21 7 -31 -45 1 +21 0.6 0.2 19 -8 -4 -22 2 +21 0.4 0.7 41 18 15 -38 1 +21 0.9 0.3 35 -14 50 -21 1 +21 0.5 0.6 41 18 27 -28 1 +21 0.8 0.1 -3 -17 29 -16 1 +21 0.7 0.6 -29 -32 -16 -21 2 +21 0.1 0.3 -17 -18 -4 -33 1 +21 0.3 0.4 34 -17 30 -15 2 +21 0.7 0.4 48 41 38 20 1 +21 0.8 0.1 -15 -27 37 -15 1 +21 0.5 0.2 -20 -43 25 0 2 +21 0.4 0.9 28 15 3 -36 1 +21 0.4 0.9 43 -17 -16 -21 1 +21 0.3 0.4 34 -17 23 -36 1 +21 0.5 0.9 5 -30 40 24 2 +21 0.9 0.2 49 10 0 -17 1 +21 0.3 0.3 46 -27 24 19 2 +21 0.7 0.9 44 2 -19 -25 1 +21 0.9 0.5 -9 -49 37 -5 2 +21 0.1 0.6 40 -19 -30 -42 1 +21 0.6 0.7 15 -14 -3 -45 1 +21 0.3 0.9 48 40 50 28 1 +21 0.1 0.5 -13 -16 8 -35 2 +21 0.8 0.4 50 -4 45 -31 1 +21 0.9 0.3 38 22 50 30 1 +21 0.8 0.9 -2 -46 -17 -37 2 +21 0.3 0.9 23 -17 11 -23 2 +21 0.7 0.8 -9 -50 37 -32 2 +21 0.4 0.9 43 22 1 -37 1 +21 0.2 0.3 19 -25 -35 -48 2 +21 0.3 0.2 26 9 -15 -50 1 +21 0.9 0.5 29 -14 -11 -17 1 +21 0.6 0.9 45 2 -29 -38 1 +21 0.2 0.9 -23 -27 41 -9 2 +21 0.6 0.7 36 8 4 1 1 +21 0.1 0.4 4 -5 11 -21 1 +21 0.5 0.1 49 -18 -35 -43 1 +21 0.1 0.7 16 -36 33 -39 2 +21 0.9 0.6 48 -44 38 -9 1 +21 0.8 0.2 22 -29 -42 -44 1 +21 0.5 0.3 14 -32 -17 -37 1 +21 0.5 0.7 18 -30 20 -32 2 +21 0.3 0.4 40 38 41 28 1 +21 0.3 0.4 47 -43 20 -20 2 +21 0.2 0.9 21 6 26 -19 2 +21 0.8 0.3 24 -18 44 28 2 +21 0.4 0.4 -8 -23 1 -45 2 +21 0.1 0.6 16 -46 49 -3 2 +21 0.9 0.6 19 -42 -20 -42 1 +21 0.8 0.5 -13 -23 39 30 2 +21 0.1 0.6 49 8 46 -18 1 +21 0.1 0.9 32 5 3 -14 2 +21 0.2 0.4 36 35 22 -17 1 +21 0.7 0.8 -9 -43 26 -38 2 +21 0.8 0.7 13 -26 1 -29 1 +21 0.3 0.3 34 27 26 -21 1 +21 0.3 0.7 48 41 -11 -40 1 +21 0.3 0.1 11 -32 -30 -38 1 +21 0.3 0.3 42 -2 -5 -7 2 +21 0.1 0.8 43 4 20 -47 1 +21 0.1 0.5 -9 -31 11 -5 2 +21 0.5 0.9 1 -4 17 5 2 +21 0.7 0.2 26 9 50 49 2 +21 0.9 0.7 0 -47 12 8 2 +21 0.7 0.6 1 -25 48 -36 2 +21 0.6 0.5 19 -4 32 1 1 +21 0.3 0.7 23 5 46 31 2 +21 0.3 0.4 30 -33 7 -10 2 +21 0.8 0.9 7 -47 42 -20 2 +21 0.9 0.6 46 -28 16 -19 1 +21 0.3 0.6 49 41 50 -14 1 +21 0.7 0.5 25 -48 -13 -34 1 +21 0.3 0.9 27 -37 9 -12 2 +21 0.2 0.1 11 -5 -35 -36 1 +21 0.1 0.6 30 -9 0 -23 1 +21 0.1 0.2 -33 -44 34 21 2 +21 0.8 0.2 42 39 6 -26 1 +21 0.4 0.4 -4 -40 -48 -49 1 +21 0.5 0.3 37 19 41 -22 1 +21 0.1 0.6 16 -31 31 -24 2 +21 0.4 0.5 9 -1 49 44 2 +21 0.8 0.7 35 -47 50 48 2 +21 0.6 0.2 5 -18 -11 -15 1 +21 0.2 0.1 27 -13 9 -41 1 +21 0.2 0.4 -5 -31 28 -7 2 +21 0.5 0.8 29 -49 50 -16 2 +21 0.6 0.1 -23 -43 8 -17 2 +21 0.5 0.5 -20 -35 18 9 2 +21 0.4 0.8 23 -33 32 -6 2 +21 0.7 0.4 48 -16 12 11 1 +21 0.2 0.8 41 -2 25 24 2 +21 0.8 0.2 -19 -40 -15 -33 1 +21 0.1 0.9 50 31 20 -19 1 +21 0.8 0.2 7 -38 47 7 2 +21 0.9 0.7 27 6 -17 -23 1 +21 0.2 0.9 11 -37 12 -7 2 +21 0.4 0.6 36 -13 -15 -36 1 +21 0.3 0.2 -24 -48 4 -42 2 +22 0.9 0.2 23 12 32 -49 1 +22 0.1 0.6 -4 -17 47 -17 2 +22 0.7 0.8 -17 -25 32 -16 2 +22 0.3 0.8 31 -29 13 -7 2 +22 0.9 0.6 47 -28 46 -44 1 +22 0.8 0.4 29 -29 35 34 2 +22 0.4 0.4 -20 -47 13 -3 2 +22 0.4 0.3 24 9 1 -30 1 +22 0.3 0.2 22 -25 27 -34 1 +22 0.1 0.8 -16 -25 0 -27 2 +22 0.5 0.5 12 -19 -2 -14 1 +22 0.3 0.8 32 -19 5 -1 2 +22 0.8 0.3 30 13 3 -23 1 +22 0.8 0.2 39 35 34 30 1 +22 0.3 0.2 3 -4 15 -5 2 +22 0.6 0.5 33 7 26 21 1 +22 0.1 0.3 48 45 18 -44 1 +22 0.7 0.1 16 -7 45 33 2 +22 0.3 0.4 27 -46 20 -26 2 +22 0.7 0.6 39 -13 -14 -22 1 +22 0.2 0.7 22 15 42 -31 1 +22 0.1 0.5 48 5 32 -37 1 +22 0.8 0.8 -19 -35 42 34 2 +22 0.6 0.5 45 -17 23 -35 1 +22 0.2 0.7 4 -34 30 0 2 +22 0.8 0.4 13 8 43 7 2 +22 0.3 0.9 33 19 48 -46 1 +22 0.4 0.3 43 -20 29 10 2 +22 0.3 0.6 45 -17 16 -29 2 +22 0.3 0.2 13 11 41 21 1 +22 0.9 0.9 37 23 -1 -46 1 +22 0.4 0.9 6 -25 -15 -22 1 +22 0.1 0.8 -21 -32 23 5 2 +22 0.6 0.5 -12 -48 -16 -28 2 +22 0.8 0.6 32 6 -9 -31 1 +22 0.9 0.6 -12 -29 25 16 2 +22 0.1 0.7 43 13 13 -25 1 +22 0.9 0.7 41 13 42 -6 2 +22 0.9 0.1 38 -43 12 -19 1 +22 0.3 0.6 50 22 46 -6 1 +22 0.1 0.8 16 -19 -18 -49 2 +22 0.3 0.4 1 -9 46 31 2 +22 0.1 0.2 42 36 8 -2 1 +22 0.9 0.7 5 -19 -18 -49 1 +22 0.2 0.3 13 -3 5 -48 2 +22 0.6 0.8 4 -25 -10 -26 2 +22 0.8 0.6 31 -23 7 -21 1 +22 0.9 0.1 41 2 4 -24 1 +22 0.7 0.8 1 -32 44 8 2 +22 0.7 0.3 40 -49 17 -26 1 +22 0.6 0.3 -11 -33 36 -39 2 +22 0.5 0.7 -27 -34 25 15 2 +22 0.6 0.4 9 -3 26 -26 1 +22 0.1 0.4 -6 -35 28 26 2 +22 0.1 0.6 32 9 49 -44 2 +22 0.9 0.5 29 -12 39 -47 1 +22 0.8 0.8 47 2 -34 -35 1 +22 0.6 0.5 46 -39 41 -17 2 +22 0.3 0.7 38 -13 9 -22 1 +22 0.8 0.3 -17 -39 25 21 2 +22 0.8 0.2 -9 -38 -11 -31 1 +22 0.2 0.8 33 -6 45 40 2 +22 0.1 0.3 -16 -21 17 -46 1 +22 0.1 0.9 23 5 46 7 2 +22 0.8 0.8 24 -26 -2 -27 1 +22 0.4 0.8 23 -19 43 -45 2 +22 0.6 0.5 20 -18 1 -48 1 +22 0.2 0.2 -8 -30 32 -11 2 +22 0.7 0.6 17 16 -15 -21 1 +22 0.7 0.7 46 -5 17 -35 1 +22 0.4 0.8 12 -43 -16 -48 1 +22 0.6 0.2 35 14 -22 -36 2 +22 0.4 0.9 -26 -48 -21 -49 1 +22 0.1 0.9 14 -50 4 -2 2 +22 0.4 0.2 47 -30 41 -38 1 +22 0.8 0.6 13 -7 -9 -19 1 +22 0.4 0.5 50 20 39 -34 1 +22 0.6 0.3 22 5 2 -36 1 +22 0.8 0.8 31 -24 10 -46 1 +22 0.9 0.1 -26 -43 15 -2 2 +22 0.1 0.1 5 2 5 -43 1 +22 0.8 0.2 6 -1 37 3 2 +22 0.2 0.9 50 -40 22 7 2 +22 0.3 0.6 -6 -40 -39 -42 1 +22 0.2 0.6 -4 -44 41 13 2 +22 0.6 0.2 -16 -26 29 -9 2 +22 0.4 0.2 21 16 -27 -43 1 +22 0.3 0.8 49 26 36 -4 1 +22 0.2 0.5 -8 -45 -13 -37 1 +22 0.1 0.1 14 -27 8 -40 2 +22 0.4 0.9 4 -49 14 -42 2 +22 0.7 0.2 35 -47 -14 -40 1 +22 0.5 0.4 37 -44 17 14 2 +22 0.2 0.8 24 8 5 -3 2 +22 0.3 0.6 -20 -31 34 13 2 +22 0.8 0.7 48 24 -12 -13 1 +22 0.8 0.7 41 -36 36 26 2 +22 0.4 0.1 -18 -41 2 -24 2 +22 0.1 0.9 -25 -36 -3 -49 1 +22 0.1 0.3 40 32 -12 -21 1 +22 0.7 0.3 5 -42 15 -9 1 +22 0.3 0.5 21 -13 9 -45 2 +22 0.4 0.1 -5 -10 -22 -35 2 +22 0.4 0.4 35 8 -31 -46 1 +22 0.4 0.2 -11 -28 36 -45 1 +22 0.7 0.1 16 -11 49 19 2 +22 0.7 0.9 16 -21 47 7 2 +22 0.8 0.1 25 -49 9 -49 1 +22 0.2 0.6 3 0 46 -31 2 +22 0.8 0.3 -9 -20 3 -5 2 +22 0.2 0.3 36 -34 12 1 2 +22 0.7 0.2 -18 -39 43 -48 2 +22 0.7 0.6 23 -38 -15 -35 1 +22 0.5 0.1 37 -10 34 -22 1 +22 0.7 0.2 43 -1 7 -30 1 +22 0.4 0.7 42 -22 32 -2 2 +22 0.8 0.9 6 -36 34 9 2 +22 0.1 0.8 -12 -50 29 -2 2 +22 0.5 0.5 -19 -46 -36 -48 1 +22 0.4 0.4 9 -7 -34 -49 1 +22 0.5 0.4 41 22 50 -35 1 +22 0.7 0.4 -39 -45 49 -36 2 +22 0.5 0.1 49 40 28 2 1 +22 0.7 0.1 -13 -27 41 30 2 +22 0.9 0.9 43 33 50 -15 1 +22 0.6 0.5 27 -19 36 -25 1 +22 0.3 0.6 -17 -50 -24 -27 1 +22 0.4 0.5 20 16 32 16 2 +22 0.8 0.8 24 -20 7 -17 1 +22 0.1 0.2 19 10 22 -18 1 +22 0.1 0.4 8 -12 11 -21 2 +22 0.1 0.2 -8 -33 -29 -36 1 +22 0.2 0.8 23 -38 3 -9 1 +22 0.1 0.1 22 -42 -21 -35 2 +22 0.6 0.7 28 20 38 13 2 +22 0.2 0.7 -11 -35 15 -30 2 +22 0.2 0.8 43 -6 -10 -48 1 +22 0.6 0.5 20 -32 16 -48 1 +22 0.6 0.2 24 12 24 -41 1 +22 0.7 0.3 34 -25 42 28 1 +22 0.4 0.8 -8 -13 -34 -45 1 +22 0.7 0.8 37 16 -19 -41 1 +22 0.8 0.8 -23 -44 28 -30 2 +22 0.5 0.1 40 27 50 -41 1 +22 0.3 0.7 -2 -19 8 -19 2 +22 0.7 0.7 13 -41 20 -47 2 +22 0.7 0.9 35 33 -18 -39 1 +22 0.7 0.4 -5 -34 33 -2 2 +22 0.3 0.5 -17 -42 -14 -33 1 +22 0.6 0.9 -13 -14 21 -37 2 +22 0.9 0.6 12 -17 -23 -36 2 +22 0.2 0.6 -21 -45 48 -31 2 +22 0.3 0.7 32 -43 34 -50 2 +22 0.6 0.8 40 -42 49 -36 2 +22 0.5 0.4 11 10 -24 -46 1 +22 0.2 0.9 23 -14 21 -31 2 +22 0.3 0.5 50 0 40 -48 1 +22 0.6 0.4 -26 -49 50 26 2 +22 0.8 0.6 12 -45 32 28 2 +22 0.5 0.5 35 -15 -18 -33 1 +22 0.1 0.7 1 -27 26 -39 2 +22 0.2 0.2 32 12 -18 -43 2 +22 0.2 0.5 35 -20 12 8 2 +22 0.2 0.9 44 -6 36 -43 2 +22 0.2 0.8 -3 -19 -5 -21 2 +22 0.4 0.1 48 20 15 -38 1 +22 0.2 0.1 -23 -43 38 0 2 +22 0.2 0.2 -18 -32 16 -21 1 +22 0.7 0.3 41 9 21 20 1 +22 0.9 0.6 49 -12 48 19 1 +22 0.1 0.5 32 -36 5 -27 2 +22 0.3 0.3 30 -9 13 -6 2 +22 0.1 0.8 32 4 40 -8 2 +22 0.2 0.8 27 -30 -4 -26 2 +22 0.6 0.3 22 -25 35 -24 2 +22 0.8 0.4 -34 -43 24 -27 1 +22 0.5 0.8 -39 -43 -18 -41 2 +22 0.7 0.7 23 -1 44 -44 1 +22 0.6 0.8 -32 -43 -8 -31 1 +22 0.1 0.9 40 31 47 -22 2 +22 0.3 0.6 2 0 -20 -47 1 +22 0.5 0.2 -36 -37 -21 -33 2 +22 0.6 0.4 34 20 8 -2 1 +22 0.9 0.8 -15 -20 16 -32 2 +22 0.9 0.4 -21 -27 35 -20 2 +22 0.9 0.9 27 -36 35 -15 2 +22 0.8 0.2 -13 -45 5 -9 2 +22 0.4 0.9 31 -7 50 16 2 +22 0.9 0.8 -9 -50 22 -45 2 +22 0.6 0.2 16 6 -22 -35 1 +22 0.2 0.6 48 20 29 -2 2 +22 0.5 0.8 36 -3 41 5 2 +22 0.5 0.4 15 -5 26 -16 2 +22 0.8 0.3 6 -24 -18 -38 1 +22 0.6 0.8 46 44 49 -44 1 +22 0.9 0.7 34 33 25 -5 1 +22 0.3 0.7 3 -19 25 13 2 +22 0.9 0.2 -37 -50 -16 -23 1 +22 0.2 0.4 4 -3 -45 -46 1 +22 0.7 0.2 30 -6 26 12 2 +22 0.6 0.3 8 -45 5 -17 2 +22 0.4 0.9 -2 -27 35 -50 2 +22 0.9 0.8 39 13 -30 -33 1 +22 0.3 0.9 45 7 26 -46 2 +22 0.7 0.8 35 -9 2 -37 1 +22 0.6 0.9 -16 -27 -18 -20 2 +22 0.8 0.7 14 -30 46 -32 1 +22 0.1 0.8 50 26 43 -6 2 +22 0.6 0.7 3 -16 -41 -42 1 +22 0.9 0.3 24 -15 10 -22 1 +22 0.1 0.3 10 2 18 -45 1 +22 0.8 0.5 -21 -45 -1 -25 1 +22 0.1 0.7 2 -39 -14 -22 1 +22 0.6 0.1 7 -34 18 -18 2 +22 0.4 0.4 5 -16 48 -12 1 +22 0.5 0.8 24 -42 15 11 2 +22 0.1 0.8 48 -5 16 -45 2 +22 0.3 0.6 41 14 38 -7 1 +22 0.2 0.5 34 -5 -10 -23 1 +22 0.8 0.7 -10 -16 39 -45 2 +22 0.1 0.6 27 -39 -9 -37 2 +22 0.3 0.5 36 27 40 -44 1 +22 0.2 0.3 -9 -29 8 -19 2 +22 0.8 0.9 38 17 23 13 2 +22 0.7 0.4 48 15 -22 -24 1 +22 0.5 0.6 9 4 8 7 1 +22 0.7 0.8 35 -32 -5 -34 1 +22 0.3 0.9 26 -5 -20 -46 1 +22 0.3 0.3 -17 -28 -2 -33 2 +22 0.1 0.8 -13 -17 4 -12 1 +22 0.4 0.3 -1 -39 28 -28 2 +22 0.2 0.9 8 -37 38 16 2 +22 0.2 0.7 50 -16 -9 -19 1 +22 0.9 0.1 -21 -32 41 -7 2 +22 0.4 0.5 32 -32 14 -35 2 +22 0.8 0.7 44 35 -9 -10 1 +22 0.5 0.8 13 -27 49 4 1 +22 0.6 0.3 34 20 -22 -47 1 +22 0.8 0.5 36 -3 48 30 1 +22 0.4 0.9 50 3 2 -32 1 +22 0.4 0.5 35 -25 39 -13 1 +22 0.7 0.6 8 -35 -12 -16 1 +22 0.8 0.8 21 -50 -31 -46 1 +22 0.1 0.8 41 -23 46 6 2 +22 0.5 0.9 10 -3 14 -30 2 +22 0.2 0.1 43 -21 -44 -48 1 +22 0.5 0.2 30 8 43 40 2 +22 0.8 0.9 38 -47 42 -41 2 +22 0.2 0.1 13 -7 24 11 2 +22 0.7 0.7 8 -12 31 13 2 +22 0.4 0.1 39 -8 -21 -31 1 +22 0.7 0.3 7 -39 -17 -34 2 +22 0.1 0.5 49 -37 42 -9 2 +22 0.2 0.8 45 5 38 6 2 +22 0.9 0.4 33 -27 42 40 2 +22 0.3 0.7 14 -29 -5 -9 1 +22 0.8 0.3 45 -28 31 -47 1 +22 0.6 0.5 11 5 26 -2 2 +22 0.7 0.5 42 -5 11 -14 2 +22 0.4 0.7 47 28 29 26 2 +22 0.5 0.2 31 -20 -12 -47 1 +22 0.4 0.2 46 -6 49 -23 1 +22 0.7 0.8 18 -10 46 -17 2 +22 0.5 0.2 -19 -32 37 11 2 +22 0.1 0.9 42 32 48 -42 2 +22 0.3 0.4 21 -47 0 -18 1 +22 0.1 0.1 30 18 39 -13 1 +22 0.3 0.9 39 -12 3 1 2 +22 0.5 0.8 40 -36 7 -5 2 +22 0.2 0.6 -4 -39 -18 -30 1 +22 0.7 0.2 14 -1 -26 -41 1 +22 0.2 0.3 15 -31 22 -7 2 +22 0.4 0.5 50 -38 15 -8 2 +22 0.8 0.7 16 -37 -28 -34 1 +22 0.8 0.9 42 31 27 0 1 +22 0.4 0.6 -13 -29 11 -31 2 +22 0.6 0.8 14 -13 37 -8 2 +22 0.1 0.8 29 -29 8 -47 2 +22 0.9 0.1 3 -1 -13 -41 2 +22 0.1 0.5 -24 -42 24 20 2 +22 0.8 0.5 44 -5 16 -41 1 +22 0.7 0.4 34 -36 48 7 1 +22 0.1 0.9 35 8 10 -31 2 +22 0.3 0.9 48 -43 23 -17 1 +22 0.7 0.3 44 34 34 19 1 +22 0.3 0.2 27 -2 28 7 1 +22 0.3 0.1 -5 -17 44 -23 2 +22 0.8 0.2 24 2 23 -24 1 +22 0.7 0.3 38 -40 29 -6 1 +22 0.3 0.5 23 2 26 -49 1 +22 0.3 0.4 32 -6 14 -30 1 +22 0.6 0.2 42 20 39 -48 1 +22 0.1 0.9 35 29 20 12 1 +22 0.1 0.1 -18 -27 33 -45 1 +22 0.3 0.4 -6 -20 -7 -13 2 +22 0.7 0.7 30 -35 31 -36 1 +22 0.5 0.3 0 -31 -26 -30 2 +22 0.5 0.9 28 -19 -2 -7 1 +22 0.4 0.2 -13 -27 43 1 2 +22 0.1 0.7 39 12 -26 -35 1 +23 0.5 0.1 30 -35 18 -48 1 +23 0.7 0.9 -17 -43 -42 -46 1 +23 0.7 0.9 34 16 23 -7 1 +23 0.8 0.9 40 21 48 29 2 +23 0.6 0.2 -16 -20 23 -37 1 +23 0.3 0.2 21 -9 38 29 2 +23 0.4 0.8 50 -30 19 -32 2 +23 0.6 0.8 -14 -29 37 -35 2 +23 0.2 0.4 36 29 50 -2 1 +23 0.1 0.7 5 -33 -20 -38 2 +23 0.2 0.1 10 -41 24 -22 2 +23 0.5 0.4 5 -15 -17 -29 1 +23 0.6 0.3 32 3 -19 -33 1 +23 0.1 0.6 30 -44 2 -33 2 +23 0.8 0.1 38 -12 -22 -31 1 +23 0.3 0.3 -6 -37 33 17 2 +23 0.8 0.5 9 0 33 -13 2 +23 0.9 0.7 15 -30 23 -24 2 +23 0.7 0.4 39 -40 21 7 2 +23 0.7 0.5 8 -6 48 -33 2 +23 0.5 0.6 -11 -12 46 -26 2 +23 0.5 0.3 41 38 22 -47 1 +23 0.4 0.7 27 -25 -2 -50 1 +23 0.1 0.3 -19 -46 40 -45 2 +23 0.9 0.1 18 -2 28 -29 1 +23 0.7 0.4 26 -29 17 16 2 +23 0.1 0.1 -11 -24 35 -28 2 +23 0.2 0.2 18 -39 50 10 2 +23 0.7 0.7 14 -21 -46 -49 1 +23 0.1 0.8 31 -2 -8 -24 1 +23 0.2 0.8 30 -38 -11 -37 1 +23 0.6 0.1 19 -35 -6 -45 1 +23 0.6 0.9 -1 -10 -1 -50 1 +23 0.1 0.7 -24 -42 -33 -39 2 +23 0.7 0.1 -40 -50 27 -7 2 +23 0.9 0.1 34 -12 -40 -48 1 +23 0.3 0.5 46 -12 -9 -44 1 +23 0.4 0.8 11 -33 -1 -20 2 +23 0.6 0.4 36 27 3 -25 1 +23 0.4 0.2 -16 -22 35 25 2 +23 0.1 0.4 18 -37 50 36 2 +23 0.9 0.1 20 -27 -28 -49 1 +23 0.2 0.8 -17 -50 5 -23 2 +23 0.5 0.5 28 9 6 -12 1 +23 0.3 0.3 40 -41 4 2 2 +23 0.6 0.8 -10 -28 30 4 2 +23 0.2 0.1 -8 -13 -11 -43 1 +23 0.6 0.3 29 -22 38 24 2 +23 0.2 0.8 25 11 34 -32 2 +23 0.7 0.5 44 -37 -40 -50 1 +23 0.8 0.8 7 -46 18 -30 2 +23 0.6 0.7 49 3 41 9 2 +23 0.2 0.9 16 -25 39 2 2 +23 0.9 0.9 44 -48 -4 -21 1 +23 0.7 0.8 41 -32 18 -2 1 +23 0.3 0.1 41 -28 -15 -48 1 +23 0.5 0.7 48 47 -30 -39 1 +23 0.5 0.3 14 -47 19 6 2 +23 0.4 0.3 48 11 36 -48 1 +23 0.9 0.1 19 12 6 -31 1 +23 0.3 0.3 13 -23 -29 -37 1 +23 0.5 0.4 0 -44 50 -25 2 +23 0.7 0.4 -13 -25 10 -45 1 +23 0.7 0.4 -29 -37 47 31 2 +23 0.5 0.2 45 -2 12 -1 1 +23 0.5 0.6 14 -33 -28 -36 1 +23 0.3 0.5 40 -21 31 16 2 +23 0.1 0.6 -15 -18 10 -22 2 +23 0.9 0.8 22 16 35 -37 2 +23 0.7 0.5 28 -34 29 -27 1 +23 0.9 0.4 48 -49 10 -45 1 +23 0.2 0.7 -27 -37 21 -42 2 +23 0.8 0.9 6 -45 5 -35 1 +23 0.8 0.5 43 -27 10 -50 1 +23 0.3 0.5 47 -20 16 3 2 +23 0.2 0.7 26 19 15 -48 1 +23 0.4 0.9 26 -17 -13 -32 1 +23 0.2 0.6 12 -45 3 -31 2 +23 0.9 0.9 -21 -27 -3 -33 2 +23 0.9 0.6 -4 -22 19 -49 2 +23 0.2 0.4 -22 -39 20 -13 2 +23 0.4 0.6 34 22 -6 -33 1 +23 0.3 0.2 29 0 -26 -41 1 +23 0.7 0.2 48 24 36 16 1 +23 0.3 0.6 -31 -33 -31 -42 1 +23 0.2 0.5 -14 -49 39 24 2 +23 0.1 0.3 1 -17 43 -13 2 +23 0.9 0.8 -13 -33 -7 -8 2 +23 0.3 0.2 -19 -44 26 -49 2 +23 0.3 0.6 39 -42 40 -41 2 +23 0.3 0.6 30 6 25 -20 1 +23 0.1 0.1 33 20 -14 -49 1 +23 0.8 0.2 -4 -13 0 -43 1 +23 0.2 0.2 44 12 39 -42 1 +23 0.4 0.7 -24 -27 -35 -44 1 +23 0.9 0.4 15 -4 -13 -45 1 +23 0.9 0.1 -19 -39 38 -11 2 +23 0.4 0.7 41 2 -35 -41 1 +23 0.5 0.9 20 -31 13 -22 2 +23 0.1 0.3 20 -2 34 -49 1 +23 0.8 0.4 -18 -31 40 -24 2 +23 0.8 0.5 23 -29 40 -43 1 +23 0.7 0.5 15 -15 43 40 2 +23 0.8 0.3 43 20 11 5 1 +23 0.1 0.2 39 -12 8 -7 2 +23 0.9 0.8 -23 -33 42 8 2 +23 0.1 0.4 16 15 42 -21 1 +23 0.3 0.6 -18 -48 -14 -17 2 +23 0.3 0.7 32 21 49 39 2 +23 0.8 0.1 47 -14 49 15 1 +23 0.1 0.2 30 -37 -7 -38 2 +23 0.1 0.1 27 -37 34 -21 2 +23 0.5 0.9 26 -41 15 -17 2 +23 0.7 0.3 37 32 -12 -48 1 +23 0.1 0.8 45 15 -36 -48 1 +23 0.2 0.2 -31 -39 16 -33 2 +23 0.3 0.4 34 11 6 -10 1 +23 0.9 0.7 38 36 44 -24 1 +23 0.3 0.9 47 -2 34 17 2 +23 0.2 0.9 35 -4 28 -14 2 +23 0.9 0.8 15 -16 -21 -45 1 +23 0.5 0.7 46 45 -10 -44 1 +23 0.4 0.8 49 -23 -26 -32 1 +23 0.3 0.2 -19 -34 47 29 2 +23 0.1 0.3 24 -14 27 -48 2 +23 0.6 0.9 16 11 10 -6 1 +23 0.8 0.8 5 -9 -3 -46 1 +23 0.8 0.9 29 -34 37 -16 2 +23 0.6 0.7 16 -44 -5 -33 1 +23 0.1 0.2 4 -10 8 -5 2 +23 0.9 0.1 44 8 42 -42 1 +23 0.8 0.2 50 30 26 6 1 +23 0.6 0.9 50 -42 33 1 2 +23 0.5 0.1 13 -50 -29 -47 1 +23 0.3 0.2 32 9 40 6 1 +23 0.4 0.7 43 -24 -19 -40 1 +23 0.1 0.8 -16 -38 43 -2 2 +23 0.3 0.9 -15 -18 39 37 2 +23 0.9 0.8 34 8 6 2 1 +23 0.6 0.5 3 -23 -13 -20 1 +23 0.1 0.4 41 26 43 -44 1 +23 0.3 0.5 -9 -46 34 -27 2 +23 0.1 0.6 -14 -21 -25 -46 1 +23 0.3 0.3 23 -4 41 25 2 +23 0.2 0.4 -27 -34 29 3 2 +23 0.4 0.5 -10 -11 18 1 2 +23 0.3 0.9 32 -24 42 23 2 +23 0.9 0.3 5 -37 39 -23 1 +23 0.2 0.7 40 -6 17 -46 2 +23 0.6 0.6 43 -10 45 8 2 +23 0.2 0.4 18 11 34 -38 1 +23 0.2 0.1 50 33 41 1 1 +23 0.4 0.1 43 -40 2 -39 1 +23 0.9 0.2 -16 -50 -4 -34 1 +23 0.8 0.2 36 -26 45 -26 1 +23 0.9 0.9 28 -10 46 -25 2 +23 0.9 0.5 28 0 37 30 2 +23 0.3 0.6 -23 -28 -16 -42 2 +23 0.3 0.4 7 -8 37 -37 2 +23 0.2 0.3 39 8 33 -28 1 +23 0.4 0.2 28 -35 29 -43 1 +23 0.7 0.9 38 20 29 -20 2 +23 0.6 0.9 -5 -27 -15 -42 1 +23 0.9 0.2 -4 -14 46 -23 1 +23 0.2 0.7 41 22 13 -40 1 +23 0.6 0.3 33 -31 -5 -24 1 +23 0.3 0.6 32 16 42 29 2 +23 0.1 0.9 42 -10 -26 -45 1 +23 0.1 0.3 26 10 45 -22 1 +23 0.6 0.5 -33 -42 1 -16 2 +23 0.2 0.8 16 1 31 0 2 +23 0.4 0.5 -20 -50 49 -30 2 +23 0.2 0.7 37 25 24 23 1 +23 0.3 0.6 34 25 15 -12 1 +23 0.5 0.3 13 -38 47 -49 1 +23 0.4 0.8 6 -4 18 -14 2 +23 0.3 0.5 48 -10 16 -19 2 +23 0.2 0.3 -28 -38 -24 -35 1 +23 0.4 0.9 41 -38 -26 -48 1 +23 0.7 0.2 46 -18 31 -31 1 +23 0.8 0.8 42 8 18 -28 1 +23 0.6 0.4 9 -45 31 -30 2 +23 0.7 0.4 30 9 36 24 1 +23 0.8 0.4 48 40 -6 -32 1 +23 0.8 0.1 30 -22 3 -6 1 +23 0.1 0.2 -37 -44 49 42 2 +23 0.7 0.4 8 -43 49 38 2 +23 0.8 0.2 49 -37 -6 -23 1 +23 0.9 0.2 18 -12 12 0 1 +23 0.2 0.3 47 -16 47 23 2 +23 0.7 0.4 4 -2 42 12 2 +23 0.3 0.6 35 -44 37 7 2 +23 0.2 0.5 19 2 22 -46 1 +23 0.2 0.4 5 -43 42 0 2 +23 0.1 0.4 -21 -30 46 -45 2 +23 0.1 0.9 -9 -22 3 -31 2 +23 0.5 0.3 47 -50 -13 -15 1 +23 0.7 0.6 13 -20 -36 -41 1 +23 0.9 0.6 -2 -32 35 11 2 +23 0.3 0.4 27 -35 41 -10 2 +23 0.9 0.9 -45 -47 -23 -34 2 +23 0.6 0.7 -15 -22 23 5 2 +23 0.2 0.1 26 22 -39 -50 1 +23 0.3 0.3 26 12 5 -35 1 +23 0.6 0.1 -25 -26 20 -47 1 +23 0.5 0.8 -15 -45 32 -32 2 +23 0.1 0.9 38 29 -11 -15 1 +23 0.5 0.1 37 -49 39 38 2 +23 0.3 0.8 40 7 41 6 2 +23 0.5 0.1 1 -35 21 -13 2 +23 0.2 0.4 25 20 35 8 1 +23 0.8 0.7 50 -11 25 12 1 +23 0.9 0.1 33 22 29 -38 1 +23 0.7 0.2 13 -48 21 -6 1 +23 0.8 0.4 22 -9 18 -50 1 +23 0.1 0.2 9 -48 48 -30 2 +23 0.1 0.8 7 -5 18 -21 2 +23 0.9 0.9 18 -49 48 16 2 +23 0.7 0.3 33 8 0 -25 1 +23 0.8 0.1 46 -4 -9 -32 1 +23 0.2 0.5 -16 -46 44 13 2 +23 0.3 0.2 44 -44 21 10 2 +23 0.2 0.6 49 1 7 -3 1 +23 0.1 0.2 21 -13 44 -3 2 +23 0.2 0.6 35 -14 -5 -7 1 +23 0.2 0.2 -9 -39 30 18 2 +23 0.8 0.7 -23 -33 19 -43 2 +23 0.1 0.1 43 -24 0 -5 2 +23 0.2 0.5 48 4 -23 -46 1 +23 0.4 0.5 -16 -23 25 -36 2 +23 0.4 0.8 33 31 -19 -41 1 +23 0.5 0.2 29 -2 31 -10 1 +23 0.8 0.6 29 -45 32 -37 1 +23 0.2 0.3 26 -30 39 -49 1 +23 0.9 0.3 -26 -36 -42 -45 1 +23 0.6 0.5 43 -24 -21 -38 1 +23 0.9 0.9 8 -4 6 -49 1 +23 0.2 0.4 -23 -45 20 -3 2 +23 0.5 0.8 -7 -49 38 -38 2 +23 0.6 0.1 -5 -38 21 -17 2 +23 0.2 0.5 26 15 -18 -37 1 +23 0.1 0.4 5 -16 41 17 2 +23 0.6 0.2 8 5 34 32 2 +23 0.7 0.1 -13 -14 31 26 2 +23 0.5 0.4 38 21 33 -45 1 +23 0.4 0.7 42 -13 42 -12 2 +23 0.1 0.8 -28 -31 50 23 2 +23 0.6 0.4 19 -26 21 -36 1 +23 0.3 0.9 39 -19 41 38 2 +23 0.5 0.5 -17 -31 10 -23 2 +23 0.6 0.4 47 41 -22 -43 1 +23 0.6 0.3 1 -32 2 -17 1 +23 0.6 0.4 -5 -28 19 -4 2 +23 0.6 0.2 27 -26 33 -47 1 +23 0.8 0.7 36 -29 25 -14 1 +23 0.3 0.6 -23 -29 23 -27 2 +23 0.1 0.1 15 12 33 24 2 +23 0.8 0.9 6 -50 -27 -35 1 +23 0.2 0.5 46 19 -20 -40 1 +23 0.2 0.8 14 4 17 -10 2 +23 0.3 0.7 8 -25 -2 -41 2 +23 0.3 0.2 27 -33 50 -49 1 +23 0.8 0.5 13 -30 0 -10 1 +23 0.3 0.8 22 5 -2 -19 1 +23 0.6 0.2 10 -15 41 10 2 +23 0.9 0.9 -10 -40 36 -38 2 +23 0.6 0.7 24 11 -18 -48 1 +23 0.4 0.3 1 -44 30 -21 2 +23 0.7 0.6 39 10 41 -9 1 +23 0.6 0.8 22 -36 43 27 2 +23 0.1 0.5 35 25 11 -2 1 +23 0.3 0.6 2 -37 -39 -45 1 +23 0.4 0.6 48 24 4 -24 1 +23 0.1 0.5 27 -4 6 2 2 +23 0.9 0.8 42 28 38 -44 1 +23 0.1 0.2 48 -24 31 27 2 +23 0.3 0.9 45 -20 3 -6 2 +23 0.8 0.7 37 -31 30 26 2 +23 0.3 0.7 -6 -19 30 10 2 +23 0.9 0.4 39 16 50 -46 1 +23 0.2 0.1 47 -3 41 11 2 +23 0.1 0.8 -34 -46 7 -9 2 +23 0.3 0.3 38 32 37 -18 1 +23 0.2 0.6 31 -46 30 -38 2 +23 0.7 0.2 -6 -32 -1 -50 1 +23 0.8 0.8 35 34 20 -29 1 +23 0.4 0.9 -22 -45 17 -36 2 +23 0.1 0.4 47 -48 39 -8 2 +23 0.9 0.1 -41 -45 16 -45 1 +23 0.1 0.3 21 -18 35 -16 2 +23 0.2 0.9 43 1 31 13 2 +23 0.6 0.1 8 -35 31 -35 1 +23 0.9 0.5 31 17 17 -22 1 +23 0.7 0.6 38 0 -11 -30 1 +23 0.5 0.9 45 22 45 6 2 +23 0.3 0.1 -9 -35 49 6 2 +23 0.2 0.4 -18 -21 -16 -45 1 +23 0.7 0.6 14 -14 -22 -38 1 +23 0.4 0.6 34 -4 -28 -29 1 +23 0.2 0.6 5 -38 -30 -50 1 +24 0.9 0.3 0 -26 50 12 2 +24 0.7 0.4 29 26 -36 -50 1 +24 0.7 0.1 26 -30 37 -14 1 +24 0.9 0.4 -5 -30 36 1 2 +24 0.3 0.1 37 5 -3 -25 1 +24 0.8 0.5 5 -16 -4 -29 1 +24 0.5 0.6 27 -20 -24 -43 1 +24 0.3 0.6 -5 -12 2 -4 2 +24 0.5 0.2 -12 -27 -10 -38 1 +24 0.5 0.3 46 -40 17 -11 1 +24 0.5 0.6 47 -6 35 -27 1 +24 0.5 0.7 36 32 1 -41 1 +24 0.6 0.9 -34 -50 48 43 2 +24 0.7 0.1 41 -47 18 -19 1 +24 0.7 0.4 37 25 -8 -30 1 +24 0.2 0.3 31 -26 -13 -48 1 +24 0.4 0.6 28 -43 29 7 2 +24 0.8 0.4 36 -30 42 32 1 +24 0.4 0.4 36 -25 -27 -38 1 +24 0.3 0.5 22 -41 35 -44 2 +24 0.6 0.7 32 -25 27 -13 2 +24 0.5 0.2 36 20 25 7 1 +24 0.9 0.2 7 -33 -9 -36 1 +24 0.6 0.5 47 29 15 -41 1 +24 0.6 0.6 4 -50 39 -2 2 +24 0.6 0.2 49 15 -2 -42 1 +24 0.8 0.9 -31 -40 10 -30 2 +24 0.7 0.2 -4 -49 -18 -47 1 +24 0.6 0.9 35 15 22 -30 2 +24 0.2 0.6 9 -14 38 -39 2 +24 0.1 0.8 21 -34 -20 -22 2 +24 0.2 0.7 13 11 45 -5 2 +24 0.5 0.5 -29 -49 -23 -50 1 +24 0.4 0.3 15 -21 50 -21 1 +24 0.9 0.9 22 7 -37 -45 1 +24 0.3 0.6 44 26 3 -38 1 +24 0.1 0.7 44 -25 -27 -36 1 +24 0.4 0.9 -26 -38 11 -38 2 +24 0.3 0.2 42 -15 2 -39 1 +24 0.2 0.8 6 -14 27 -48 2 +24 0.4 0.4 40 7 47 5 2 +24 0.9 0.6 45 -11 -17 -49 1 +24 0.4 0.2 14 -45 -11 -32 1 +24 0.1 0.7 14 -19 18 -11 2 +24 0.4 0.5 25 -17 50 -28 2 +24 0.5 0.6 18 -38 30 25 2 +24 0.5 0.4 -12 -18 3 -10 1 +24 0.7 0.3 2 -44 -11 -29 1 +24 0.6 0.7 47 3 46 -2 2 +24 0.5 0.4 -23 -37 20 -45 2 +24 0.7 0.2 2 -28 -27 -45 1 +24 0.1 0.4 44 14 6 -19 1 +24 0.3 0.4 6 -44 46 -1 2 +24 0.1 0.7 15 -21 34 -7 2 +24 0.3 0.4 40 -43 -8 -30 1 +24 0.3 0.9 40 -33 41 -41 2 +24 0.5 0.5 29 -22 0 -5 2 +24 0.6 0.2 16 -45 22 -45 1 +24 0.8 0.1 -3 -42 17 15 2 +24 0.6 0.6 18 -6 50 31 2 +24 0.6 0.9 -15 -19 36 -37 2 +24 0.8 0.8 11 -18 19 -24 2 +24 0.1 0.9 -39 -42 41 -21 2 +24 0.2 0.4 -39 -49 2 -23 2 +24 0.4 0.5 36 6 -18 -23 1 +24 0.7 0.9 -15 -30 39 8 2 +24 0.1 0.9 44 -40 10 -32 2 +24 0.1 0.2 23 -13 -37 -46 1 +24 0.4 0.1 32 22 29 -31 1 +24 0.4 0.4 15 -3 32 19 1 +24 0.3 0.8 47 36 35 16 1 +24 0.3 0.5 37 32 7 -16 1 +24 0.6 0.6 41 -36 46 -10 2 +24 0.8 0.7 50 17 -14 -44 1 +24 0.3 0.9 7 -44 27 -4 2 +24 0.9 0.8 39 -37 21 -30 1 +24 0.7 0.4 0 -34 -22 -23 1 +24 0.1 0.9 12 -13 32 18 2 +24 0.4 0.9 -8 -49 29 24 2 +24 0.8 0.4 -5 -36 45 -8 2 +24 0.4 0.2 8 -13 -33 -48 1 +24 0.6 0.4 30 14 49 -31 1 +24 0.1 0.1 33 28 -32 -33 1 +24 0.4 0.1 44 4 18 -17 1 +24 0.7 0.6 -16 -40 3 -46 1 +24 0.5 0.6 45 -8 31 -49 2 +24 0.1 0.1 26 -50 23 -6 2 +24 0.7 0.1 -21 -42 49 -23 2 +24 0.1 0.3 -3 -30 41 -40 2 +24 0.3 0.6 17 -26 48 -4 2 +24 0.7 0.7 17 7 5 -43 1 +24 0.4 0.1 42 6 -14 -24 1 +24 0.7 0.9 43 -12 50 -13 2 +24 0.4 0.8 21 -40 9 -50 1 +24 0.1 0.5 27 -19 -30 -44 1 +24 0.4 0.8 43 25 -1 -42 1 +24 0.1 0.2 14 -22 -22 -30 1 +24 0.6 0.9 35 29 -20 -49 1 +24 0.9 0.9 25 -50 0 -31 1 +24 0.5 0.7 9 -45 48 -33 2 +24 0.9 0.5 43 22 11 -9 1 +24 0.4 0.3 -12 -46 -4 -29 2 +24 0.2 0.3 -29 -40 12 -11 1 +24 0.6 0.3 41 7 26 21 2 +24 0.3 0.7 29 9 30 -23 1 +24 0.1 0.5 46 29 49 25 1 +24 0.4 0.8 37 8 44 -49 1 +24 0.5 0.8 3 -5 46 -4 2 +24 0.6 0.3 -10 -48 16 -9 1 +24 0.7 0.6 14 -39 38 -11 2 +24 0.2 0.5 32 13 -27 -48 1 +24 0.7 0.2 -1 -21 14 -15 2 +24 0.1 0.3 29 3 14 -38 1 +24 0.9 0.7 29 -31 -21 -41 1 +24 0.4 0.9 30 -33 10 -18 2 +24 0.9 0.9 50 31 44 -12 1 +24 0.5 0.1 22 -15 -6 -9 1 +24 0.3 0.1 38 23 31 16 2 +24 0.8 0.6 50 8 32 -34 1 +24 0.1 0.7 -7 -15 48 -22 2 +24 0.4 0.9 -13 -50 45 34 2 +24 0.2 0.4 15 -23 49 31 2 +24 0.1 0.1 33 7 37 -21 1 +24 0.9 0.4 18 -45 28 12 2 +24 0.8 0.4 14 -43 -7 -25 1 +24 0.2 0.7 15 -31 -25 -43 1 +24 0.9 0.6 31 20 -16 -38 1 +24 0.7 0.4 -1 -6 19 -10 2 +24 0.6 0.5 40 -17 -5 -38 2 +24 0.3 0.2 36 -25 43 28 2 +24 0.7 0.5 50 -40 25 -21 1 +24 0.2 0.9 47 44 31 -35 1 +24 0.2 0.3 48 -27 46 21 2 +24 0.6 0.7 27 -19 33 -44 2 +24 0.8 0.1 39 -1 36 -13 1 +24 0.3 0.6 -10 -47 50 32 2 +24 0.6 0.4 27 7 17 -49 1 +24 0.1 0.8 28 -11 16 -2 2 +24 0.6 0.6 -7 -31 29 15 2 +24 0.9 0.2 49 47 -6 -29 1 +24 0.1 0.9 28 14 -12 -28 1 +24 0.1 0.4 15 3 -6 -10 1 +24 0.6 0.4 40 25 -14 -36 1 +24 0.2 0.3 6 -47 38 0 2 +24 0.4 0.3 -21 -26 41 16 2 +24 0.4 0.4 36 -20 5 -22 1 +24 0.8 0.5 33 -38 23 9 1 +24 0.2 0.4 5 2 16 0 2 +24 0.7 0.4 -19 -37 21 -21 2 +24 0.3 0.5 -4 -35 -7 -9 2 +24 0.1 0.1 -29 -33 29 23 2 +24 0.1 0.1 22 -13 28 -32 2 +24 0.3 0.4 37 16 48 -29 1 +24 0.4 0.5 6 -14 16 -36 1 +24 0.9 0.7 -1 -6 -8 -40 1 +24 0.4 0.3 -7 -40 47 6 2 +24 0.5 0.2 22 -32 20 -44 2 +24 0.7 0.4 14 -38 18 15 2 +24 0.3 0.6 36 -44 18 -20 2 +24 0.4 0.6 19 -49 -24 -43 1 +24 0.2 0.4 16 13 37 16 2 +24 0.8 0.6 41 30 29 15 2 +24 0.2 0.7 18 -23 40 24 2 +24 0.7 0.4 8 -2 -13 -46 2 +24 0.6 0.5 -10 -23 40 2 2 +24 0.8 0.3 18 -39 -9 -27 1 +24 0.3 0.7 -19 -31 23 8 2 +24 0.5 0.3 -9 -15 48 34 2 +24 0.1 0.1 36 -24 -7 -43 1 +24 0.5 0.7 13 0 34 -39 2 +24 0.7 0.4 30 28 34 -27 1 +24 0.5 0.9 -10 -47 -24 -36 1 +24 0.1 0.7 2 -5 41 14 2 +24 0.3 0.7 8 -46 -19 -44 1 +24 0.5 0.2 17 -12 38 -49 1 +24 0.3 0.5 -33 -39 17 8 2 +24 0.2 0.4 14 -11 47 -44 1 +24 0.5 0.9 43 -41 -14 -36 1 +24 0.8 0.2 -18 -21 49 23 2 +24 0.6 0.3 49 -25 -45 -48 1 +24 0.4 0.6 16 -46 36 -2 2 +24 0.4 0.9 -8 -11 42 5 2 +24 0.5 0.7 50 7 30 -11 2 +24 0.9 0.6 46 6 3 -26 1 +24 0.6 0.5 -18 -49 36 -5 2 +24 0.5 0.3 38 6 -22 -44 1 +24 0.6 0.8 9 5 45 -23 1 +24 0.4 0.6 33 -14 14 -16 2 +24 0.4 0.6 39 -47 -22 -35 1 +24 0.9 0.2 11 -28 29 -12 1 +24 0.3 0.9 -26 -27 38 36 2 +24 0.5 0.6 32 -38 15 -30 1 +24 0.1 0.3 24 22 43 30 1 +24 0.6 0.3 27 -28 -41 -49 1 +24 0.8 0.8 22 12 42 31 2 +24 0.8 0.9 38 28 29 -41 2 +24 0.4 0.7 41 -38 -1 -33 2 +24 0.2 0.4 46 16 28 -48 1 +24 0.5 0.9 13 -30 36 16 2 +24 0.1 0.8 41 25 46 -18 1 +24 0.8 0.3 -34 -35 15 -2 2 +24 0.2 0.3 -16 -40 31 8 2 +24 0.9 0.3 44 6 -11 -19 1 +24 0.7 0.9 43 23 13 -16 1 +24 0.3 0.4 22 -50 13 -33 1 +24 0.4 0.1 33 -1 48 -22 1 +24 0.6 0.5 -24 -45 38 35 2 +24 0.6 0.3 42 -47 20 -26 1 +24 0.9 0.4 19 14 -1 -42 1 +24 0.7 0.2 -18 -43 2 -30 1 +24 0.1 0.4 19 -14 12 8 2 +24 0.2 0.8 18 6 13 -33 2 +24 0.6 0.6 49 -50 40 32 2 +24 0.9 0.8 -9 -11 15 -30 2 +24 0.4 0.9 -24 -41 -31 -46 1 +24 0.1 0.3 -5 -42 -25 -43 1 +24 0.8 0.9 -18 -36 -3 -32 2 +24 0.8 0.1 14 -14 26 -38 1 +24 0.5 0.9 -4 -20 -6 -22 1 +24 0.1 0.4 17 -14 -17 -22 1 +24 0.2 0.4 6 -30 19 -37 2 +24 0.7 0.9 34 -33 19 9 2 +24 0.8 0.5 31 -36 5 -34 1 +24 0.8 0.7 7 -14 -10 -43 2 +24 0.4 0.3 34 10 29 -1 1 +24 0.4 0.5 1 -33 -20 -22 1 +24 0.1 0.3 18 -12 -3 -8 2 +24 0.7 0.6 4 -36 15 -37 2 +24 0.6 0.8 48 -23 2 -6 1 +24 0.1 0.4 -4 -9 43 -1 1 +24 0.5 0.1 16 5 35 31 2 +24 0.1 0.5 -3 -40 -13 -29 2 +24 0.8 0.7 40 -17 4 -44 1 +24 0.3 0.3 45 -15 26 -17 1 +24 0.8 0.7 38 -31 -16 -44 1 +24 0.7 0.3 -34 -41 4 -35 2 +24 0.9 0.2 -17 -47 36 14 2 +24 0.9 0.6 23 -3 49 4 2 +24 0.4 0.2 42 6 36 16 1 +24 0.5 0.7 -10 -32 19 -22 2 +24 0.8 0.9 31 -17 7 5 2 +24 0.9 0.8 49 -30 5 -19 1 +24 0.1 0.9 37 -24 48 44 2 +24 0.8 0.7 8 -36 -26 -31 1 +24 0.7 0.9 45 37 46 -39 1 +24 0.6 0.1 38 19 17 -4 1 +24 0.9 0.6 37 -7 1 0 1 +24 0.1 0.7 44 -22 47 -49 2 +24 0.6 0.4 28 13 37 -1 1 +24 0.8 0.3 -17 -44 28 -22 2 +24 0.7 0.3 44 10 25 6 1 +24 0.8 0.2 -29 -33 10 8 2 +24 0.1 0.7 44 -36 5 -1 2 +24 0.8 0.7 21 10 13 -22 2 +24 0.1 0.6 30 22 45 11 1 +24 0.1 0.1 39 -18 46 40 2 +24 0.4 0.2 10 -3 31 26 2 +24 0.9 0.1 41 -22 -39 -44 1 +24 0.5 0.3 -10 -37 -12 -34 2 +24 0.9 0.5 0 -14 -5 -33 2 +24 0.7 0.7 22 -49 3 -24 1 +24 0.8 0.3 29 1 9 -43 1 +24 0.1 0.9 29 6 -38 -41 1 +24 0.5 0.7 -10 -27 39 -3 2 +24 0.3 0.8 -9 -10 34 -50 2 +24 0.4 0.1 43 -46 43 1 2 +24 0.7 0.7 15 -22 -2 -15 1 +24 0.6 0.5 39 -21 -24 -30 1 +24 0.3 0.4 39 -27 48 1 2 +24 0.3 0.7 20 -27 38 9 2 +24 0.6 0.9 43 36 24 -34 2 +24 0.3 0.8 -16 -33 29 -22 2 +24 0.3 0.9 1 -34 -11 -16 2 +24 0.4 0.4 15 -25 12 10 2 +24 0.5 0.2 23 -3 49 44 2 +24 0.7 0.6 -42 -44 23 -47 2 +24 0.5 0.1 46 35 27 19 1 +24 0.8 0.5 31 21 41 6 1 +24 0.3 0.8 2 -21 34 9 2 +24 0.7 0.7 22 -12 25 -15 2 +24 0.1 0.6 50 45 32 -13 1 +24 0.4 0.5 5 -47 41 -31 2 +24 0.2 0.8 17 -4 43 -48 2 +24 0.6 0.6 28 23 10 -7 1 +24 0.7 0.7 36 -35 23 -16 1 +24 0.7 0.8 33 -15 36 23 2 +24 0.5 0.3 -4 -16 -36 -39 1 +24 0.5 0.4 46 37 39 14 1 +24 0.1 0.4 50 3 -15 -29 1 +24 0.5 0.6 24 -36 2 -5 2 +24 0.3 0.8 25 -17 -15 -40 1 +24 0.1 0.4 33 -50 9 6 2 +24 0.2 0.9 -32 -48 38 -40 2 +24 0.3 0.4 39 13 31 3 1 +24 0.5 0.3 -7 -33 -11 -43 1 +24 0.5 0.6 12 6 -25 -39 1 +24 0.1 0.3 27 -32 49 -31 2 +24 0.1 0.6 -2 -34 5 -23 1 +24 0.8 0.3 22 -45 16 10 1 +24 0.5 0.5 -16 -25 -6 -13 1 +25 0.4 0.5 41 1 38 24 2 +25 0.8 0.3 49 -23 -7 -43 1 +25 0.5 0.2 10 5 20 -16 2 +25 0.2 0.5 3 -43 34 14 2 +25 0.2 0.9 46 -2 -10 -17 1 +25 0.3 0.9 19 18 19 -16 1 +25 0.7 0.7 -26 -45 -34 -48 1 +25 0.9 0.3 28 -6 17 -25 1 +25 0.2 0.4 45 1 44 -3 1 +25 0.1 0.9 37 29 10 -46 1 +25 0.1 0.7 33 -38 -2 -48 1 +25 0.3 0.8 3 -36 -10 -29 2 +25 0.4 0.3 41 -48 36 -35 1 +25 0.3 0.5 -19 -42 25 14 2 +25 0.7 0.4 41 -43 29 9 1 +25 0.1 0.7 17 -28 9 -18 2 +25 0.1 0.1 23 -25 46 25 2 +25 0.9 0.9 35 18 47 -4 2 +25 0.4 0.3 28 -26 -6 -44 2 +25 0.3 0.3 17 9 50 8 2 +25 0.4 0.1 -25 -37 36 -10 2 +25 0.1 0.9 7 -10 -5 -13 1 +25 0.2 0.5 -18 -49 28 -43 2 +25 0.4 0.2 12 -20 -30 -36 1 +25 0.8 0.5 47 5 3 -26 1 +25 0.9 0.9 35 -13 29 11 2 +25 0.6 0.6 44 -7 15 -23 1 +25 0.1 0.6 50 17 32 -31 1 +25 0.7 0.6 3 -3 -23 -24 1 +25 0.8 0.1 10 -24 50 -29 1 +25 0.1 0.8 47 9 44 18 1 +25 0.6 0.8 21 -1 40 -11 2 +25 0.5 0.1 35 14 9 -20 1 +25 0.7 0.7 19 16 38 -13 1 +25 0.3 0.6 -30 -36 -17 -50 2 +25 0.6 0.8 22 -49 24 12 2 +25 0.2 0.5 -2 -12 32 -13 2 +25 0.3 0.2 34 19 -8 -10 1 +25 0.1 0.8 21 -15 45 -18 2 +25 0.2 0.5 -10 -21 20 3 2 +25 0.1 0.1 37 -28 -6 -27 1 +25 0.9 0.4 11 -33 37 2 2 +25 0.5 0.3 -15 -32 -19 -44 1 +25 0.2 0.7 -3 -48 -21 -48 2 +25 0.5 0.2 10 -38 -35 -42 1 +25 0.6 0.7 17 -28 2 -32 1 +25 0.3 0.5 -16 -21 3 -33 1 +25 0.2 0.2 46 23 8 -37 1 +25 0.6 0.8 44 -46 30 -34 2 +25 0.8 0.8 -36 -39 38 31 2 +25 0.7 0.7 31 3 -23 -30 1 +25 0.7 0.9 27 -12 30 -31 1 +25 0.5 0.3 38 -30 3 -16 1 +25 0.2 0.1 24 -26 27 4 2 +25 0.7 0.9 21 11 2 -4 1 +25 0.4 0.8 -49 -50 16 -29 2 +25 0.8 0.2 14 -43 46 11 2 +25 0.3 0.6 29 -36 18 -47 1 +25 0.7 0.1 30 -31 28 -7 1 +25 0.8 0.2 31 30 32 -21 1 +25 0.9 0.8 34 15 14 2 1 +25 0.6 0.7 46 -16 26 -1 2 +25 0.3 0.2 36 25 11 -49 1 +25 0.2 0.4 -5 -42 -9 -25 1 +25 0.8 0.4 0 -48 16 -21 2 +25 0.9 0.4 21 -17 16 11 2 +25 0.5 0.8 -27 -46 48 25 2 +25 0.1 0.9 20 -8 45 3 2 +25 0.9 0.4 -25 -26 6 -15 2 +25 0.8 0.8 -25 -33 10 -17 2 +25 0.9 0.3 38 -10 10 -26 1 +25 0.7 0.7 -30 -32 43 -14 2 +25 0.6 0.3 -11 -43 -39 -46 2 +25 0.4 0.2 7 -45 -36 -42 1 +25 0.9 0.9 -25 -43 37 -30 2 +25 0.8 0.4 -15 -35 17 -39 2 +25 0.7 0.6 -38 -39 43 -3 2 +25 0.4 0.2 -37 -44 -11 -44 2 +25 0.6 0.2 26 -44 11 -35 1 +25 0.9 0.5 22 -41 -11 -35 1 +25 0.6 0.5 24 15 -1 -19 1 +25 0.7 0.4 22 -10 13 -5 1 +25 0.7 0.5 38 26 -20 -35 1 +25 0.3 0.6 40 8 29 12 2 +25 0.8 0.4 12 -21 38 22 2 +25 0.7 0.7 25 -5 30 -43 1 +25 0.1 0.3 -20 -29 -17 -33 2 +25 0.6 0.7 -10 -44 0 -1 2 +25 0.5 0.7 50 8 12 -6 1 +25 0.7 0.5 30 -8 47 -4 2 +25 0.2 0.9 50 -36 30 -5 2 +25 0.3 0.9 12 -17 33 -17 2 +25 0.7 0.8 37 -24 24 18 2 +25 0.3 0.1 36 -26 -21 -36 1 +25 0.1 0.9 20 6 6 -7 2 +25 0.1 0.9 -40 -46 9 8 2 +25 0.2 0.6 48 29 0 -12 1 +25 0.4 0.8 9 -6 11 8 2 +25 0.7 0.6 12 -2 44 -22 2 +25 0.6 0.1 -14 -23 8 -11 2 +25 0.6 0.6 26 -19 -33 -44 1 +25 0.8 0.3 41 5 48 -47 1 +25 0.9 0.8 15 -33 42 35 2 +25 0.4 0.1 45 19 -4 -19 1 +25 0.3 0.4 -20 -28 -2 -11 1 +25 0.7 0.4 43 15 11 10 1 +25 0.9 0.3 -3 -41 46 32 2 +25 0.1 0.7 13 -11 -38 -45 1 +25 0.5 0.1 -15 -28 1 -32 2 +25 0.8 0.9 -22 -50 -24 -30 2 +25 0.5 0.5 -33 -50 -22 -44 2 +25 0.5 0.2 25 -32 25 0 2 +25 0.1 0.8 41 12 38 -2 1 +25 0.1 0.4 -29 -49 28 -47 2 +25 0.9 0.6 -37 -44 -10 -27 2 +25 0.7 0.4 37 -18 8 3 1 +25 0.7 0.3 45 43 24 18 1 +25 0.3 0.6 29 -3 -7 -14 2 +25 0.3 0.5 6 -35 24 -30 2 +25 0.3 0.7 -45 -47 18 -46 2 +25 0.5 0.4 7 -3 32 1 2 +25 0.9 0.5 26 14 -6 -8 1 +25 0.6 0.9 10 -47 48 46 2 +25 0.7 0.4 19 7 -13 -34 1 +25 0.9 0.8 8 -42 22 -14 2 +25 0.4 0.6 27 -11 -12 -42 1 +25 0.8 0.8 49 23 46 -35 1 +25 0.8 0.2 -23 -45 -4 -39 2 +25 0.4 0.9 25 -1 38 22 2 +25 0.3 0.9 21 -45 -2 -36 1 +25 0.1 0.4 24 -43 28 9 2 +25 0.1 0.7 12 -10 49 -33 2 +25 0.1 0.5 28 -2 35 -9 2 +25 0.7 0.3 8 -31 -8 -16 1 +25 0.3 0.5 50 11 44 -24 1 +25 0.1 0.6 49 17 -5 -26 1 +25 0.9 0.6 -5 -44 41 -47 2 +25 0.4 0.2 -20 -33 6 -34 2 +25 0.5 0.7 15 -47 24 2 2 +25 0.4 0.7 47 -7 28 -39 1 +25 0.5 0.9 -22 -29 28 -46 2 +25 0.9 0.7 24 13 15 -30 1 +25 0.2 0.2 12 -45 29 -37 2 +25 0.4 0.9 36 -11 34 -32 2 +25 0.7 0.7 -39 -42 -45 -46 2 +25 0.6 0.7 -20 -34 32 25 2 +25 0.5 0.9 30 -14 -10 -29 1 +25 0.7 0.8 -15 -17 45 -27 2 +25 0.8 0.6 15 -44 41 12 2 +25 0.4 0.7 36 15 20 -9 1 +25 0.4 0.6 30 -32 -11 -12 1 +25 0.7 0.5 29 -26 24 -33 2 +25 0.9 0.7 36 -6 38 -36 1 +25 0.5 0.4 14 -13 48 45 2 +25 0.1 0.3 -34 -39 41 22 2 +25 0.8 0.4 24 -16 7 -45 1 +25 0.2 0.5 48 37 32 -8 1 +25 0.8 0.9 36 33 19 -14 1 +25 0.4 0.2 -25 -31 20 11 2 +25 0.9 0.1 -16 -17 -15 -23 2 +25 0.3 0.6 -40 -47 40 -14 2 +25 0.5 0.7 13 2 37 -27 1 +25 0.4 0.3 11 -30 42 -47 1 +25 0.2 0.4 41 -5 29 7 2 +25 0.1 0.4 3 -1 -5 -48 1 +25 0.5 0.6 -33 -46 26 -38 2 +25 0.8 0.3 23 -38 10 -42 1 +25 0.3 0.3 37 4 41 -30 1 +25 0.3 0.1 13 2 37 4 2 +25 0.9 0.3 12 -15 4 -17 1 +25 0.3 0.9 45 -31 36 -18 2 +25 0.4 0.3 25 -29 -12 -21 1 +25 0.6 0.9 35 -20 -23 -33 1 +25 0.8 0.2 4 -23 18 -22 1 +25 0.4 0.4 23 -12 32 -4 1 +25 0.1 0.1 50 -47 21 8 2 +25 0.1 0.4 18 -4 29 -22 2 +25 0.4 0.3 39 -34 -3 -27 1 +25 0.8 0.1 -21 -48 23 -12 2 +25 0.2 0.4 44 38 8 -23 1 +25 0.4 0.3 -7 -8 33 -22 2 +25 0.4 0.3 24 -30 33 -46 2 +25 0.1 0.6 16 -32 33 -5 2 +25 0.4 0.7 -30 -43 11 -47 1 +25 0.5 0.8 29 -38 9 -19 2 +25 0.3 0.6 -25 -45 41 -13 2 +25 0.6 0.2 9 -18 18 -6 2 +25 0.5 0.1 37 -38 27 -22 1 +25 0.7 0.1 -10 -17 27 -15 1 +25 0.8 0.2 41 -45 29 19 1 +25 0.5 0.1 46 -42 19 -29 1 +25 0.4 0.5 29 5 -18 -39 1 +25 0.6 0.3 30 -24 1 -18 1 +25 0.8 0.2 -10 -22 6 -42 1 +25 0.2 0.4 -1 -46 -11 -19 2 +25 0.4 0.4 18 -30 9 -22 2 +25 0.5 0.6 26 11 44 -8 2 +25 0.9 0.6 -31 -43 12 -18 2 +25 0.1 0.2 44 -45 36 29 2 +25 0.1 0.7 22 7 11 4 2 +25 0.2 0.3 38 -33 39 -39 1 +25 0.8 0.8 43 -38 42 -19 2 +25 0.9 0.9 -1 -45 -26 -27 1 +25 0.5 0.4 30 19 31 -34 1 +25 0.6 0.1 48 -29 43 14 1 +25 0.2 0.5 46 3 25 5 2 +25 0.7 0.5 -1 -4 34 -26 2 +25 0.1 0.8 -12 -33 26 -18 2 +25 0.7 0.6 50 6 -6 -48 1 +25 0.2 0.9 9 -50 -10 -29 2 +25 0.2 0.9 34 -50 15 -3 2 +25 0.2 0.7 10 -46 19 -29 2 +25 0.4 0.8 12 -9 -6 -17 1 +25 0.5 0.4 -19 -38 -6 -50 1 +25 0.3 0.7 33 -21 -15 -17 1 +25 0.1 0.4 7 -2 36 -35 2 +25 0.3 0.8 -13 -33 25 5 2 +25 0.8 0.4 40 20 49 1 1 +25 0.7 0.6 -4 -26 34 -5 2 +25 0.9 0.2 47 43 14 2 1 +25 0.6 0.4 30 14 17 -22 1 +25 0.3 0.4 7 -25 24 -32 2 +25 0.1 0.8 29 24 40 -29 1 +25 0.6 0.8 -18 -38 -36 -46 2 +25 0.6 0.8 -27 -48 49 6 2 +25 0.1 0.7 8 -48 20 -15 2 +25 0.9 0.8 28 4 32 28 1 +25 0.2 0.7 18 2 37 -10 2 +25 0.9 0.2 36 26 38 -19 1 +25 0.4 0.9 -23 -24 15 -14 2 +25 0.2 0.2 -22 -33 -7 -22 2 +25 0.8 0.1 -3 -42 43 -20 2 +25 0.9 0.1 -19 -39 2 0 2 +25 0.7 0.5 8 -24 21 -34 1 +25 0.1 0.4 -12 -13 40 -35 2 +25 0.3 0.9 41 29 13 -14 1 +25 0.4 0.1 -43 -44 -22 -50 1 +25 0.8 0.6 46 44 0 -14 1 +25 0.2 0.3 41 -35 37 15 2 +25 0.5 0.8 36 -9 12 -39 1 +25 0.7 0.3 38 11 22 -11 1 +25 0.8 0.1 44 -49 32 4 1 +25 0.1 0.3 -26 -38 34 8 2 +25 0.3 0.7 7 -15 29 9 2 +25 0.8 0.3 26 -18 9 -43 1 +25 0.5 0.9 10 -46 18 -49 2 +25 0.9 0.7 -14 -36 26 -16 2 +25 0.9 0.1 44 -15 5 -33 2 +25 0.7 0.6 40 -15 40 30 2 +25 0.2 0.6 22 2 33 -12 1 +25 0.7 0.8 33 -28 30 15 2 +25 0.7 0.8 17 -12 -14 -44 1 +25 0.2 0.2 11 9 40 -1 1 +25 0.5 0.1 23 -7 49 -7 1 +25 0.6 0.3 49 0 37 -14 1 +25 0.6 0.8 43 -44 27 -16 2 +25 0.5 0.8 -28 -48 45 32 2 +25 0.8 0.4 50 -22 17 7 1 +25 0.9 0.5 24 -40 13 -10 1 +25 0.7 0.4 34 25 42 -24 1 +25 0.3 0.9 41 -10 -5 -37 1 +25 0.7 0.1 20 -22 -26 -49 1 +25 0.7 0.1 42 32 40 0 1 +25 0.8 0.8 -9 -10 21 15 2 +25 0.3 0.4 29 -49 32 11 2 +25 0.6 0.6 16 -11 45 11 2 +25 0.3 0.9 32 -37 -9 -31 1 +25 0.4 0.7 -9 -28 47 -15 2 +25 0.5 0.4 12 -38 5 -17 1 +25 0.2 0.6 -11 -39 17 -22 2 +25 0.8 0.6 -1 -7 48 26 2 +25 0.1 0.3 37 -15 44 -3 2 +25 0.1 0.5 -12 -47 39 1 2 +25 0.1 0.1 30 7 40 -29 1 +25 0.7 0.1 -12 -41 7 -19 2 +25 0.6 0.8 4 -5 7 -14 1 +25 0.5 0.4 32 -12 26 -48 2 +25 0.2 0.9 22 -9 -13 -36 1 +25 0.4 0.2 4 -10 47 -26 1 +25 0.4 0.4 34 -21 2 -24 1 +25 0.7 0.3 38 -33 0 -50 1 +25 0.3 0.3 34 -19 27 18 2 +25 0.6 0.6 44 -10 2 -50 1 +25 0.6 0.4 -34 -38 39 20 2 +25 0.7 0.6 28 -44 33 -23 1 +25 0.8 0.7 -3 -8 29 -38 2 +25 0.3 0.8 31 -29 33 6 2 +25 0.6 0.9 45 -34 -2 -4 1 +25 0.6 0.2 19 -23 21 7 2 +25 0.8 0.6 -14 -33 1 -39 2 +25 0.3 0.5 22 3 -3 -39 1 +25 0.9 0.9 -26 -45 34 29 2 +25 0.7 0.3 25 17 33 -47 1 +25 0.4 0.2 -13 -34 6 -45 1 +25 0.7 0.5 29 -16 -4 -35 1 +25 0.8 0.9 26 -39 36 -38 2 +25 0.9 0.2 12 -4 -28 -46 1 +25 0.4 0.1 16 -17 22 -44 1 +25 0.2 0.7 49 10 -1 -16 1 +25 0.6 0.5 -13 -43 12 -47 1 +26 0.7 0.7 28 -31 44 31 2 +26 0.8 0.4 40 -2 49 -21 1 +26 0.7 0.9 -40 -46 32 -50 2 +26 0.2 0.2 1 -47 30 -5 2 +26 0.7 0.2 25 -30 47 17 2 +26 0.4 0.2 50 39 -10 -18 1 +26 0.4 0.2 -31 -48 13 -50 2 +26 0.7 0.1 23 0 36 -24 1 +26 0.1 0.3 26 10 24 -32 1 +26 0.8 0.2 47 -8 44 -1 1 +26 0.2 0.4 43 3 23 12 2 +26 0.3 0.6 20 -18 22 -21 2 +26 0.9 0.5 -5 -38 33 -13 2 +26 0.9 0.8 -20 -40 48 12 2 +26 0.8 0.3 -2 -20 48 -27 2 +26 0.3 0.5 38 -29 -30 -43 1 +26 0.7 0.1 -15 -18 -8 -45 1 +26 0.8 0.6 10 -43 -30 -38 1 +26 0.4 0.5 22 -38 3 -26 1 +26 0.7 0.9 28 -9 -3 -21 1 +26 0.2 0.9 12 -31 21 -35 2 +26 0.5 0.4 38 -33 -2 -40 1 +26 0.9 0.1 16 -32 4 -50 1 +26 0.5 0.8 46 -12 41 -36 2 +26 0.1 0.2 21 -41 -10 -30 1 +26 0.9 0.9 29 -1 14 -36 1 +26 0.5 0.7 12 -3 -37 -39 1 +26 0.2 0.2 33 -29 50 27 2 +26 0.2 0.2 -12 -45 -26 -35 1 +26 0.7 0.1 9 -18 33 31 2 +26 0.7 0.8 24 -21 10 -27 1 +26 0.5 0.6 48 -41 26 -23 1 +26 0.2 0.8 35 -17 28 10 2 +26 0.7 0.7 -17 -28 -43 -49 1 +26 0.3 0.5 38 21 -20 -31 1 +26 0.9 0.1 -2 -23 -18 -35 1 +26 0.5 0.4 45 -23 24 -1 1 +26 0.5 0.1 -40 -43 8 2 2 +26 0.1 0.7 44 41 -7 -26 1 +26 0.4 0.7 30 -12 50 7 2 +26 0.7 0.6 18 -48 21 -19 1 +26 0.7 0.4 2 -40 -4 -32 1 +26 0.7 0.5 -27 -50 -38 -47 1 +26 0.5 0.1 45 -14 -1 -42 1 +26 0.4 0.5 7 -31 43 41 2 +26 0.5 0.1 -19 -26 32 13 2 +26 0.7 0.4 21 20 48 -42 1 +26 0.7 0.4 -20 -46 -14 -33 1 +26 0.3 0.7 -7 -15 2 -8 2 +26 0.1 0.7 49 -23 41 -16 2 +26 0.7 0.9 49 47 47 41 2 +26 0.7 0.9 48 -49 35 33 2 +26 0.8 0.9 30 -2 -20 -35 1 +26 0.7 0.1 38 29 -32 -47 1 +26 0.8 0.1 22 7 8 -15 1 +26 0.1 0.1 24 13 -15 -26 1 +26 0.1 0.2 -2 -44 37 34 2 +26 0.7 0.3 25 -17 14 -38 1 +26 0.7 0.3 41 28 12 11 1 +26 0.1 0.1 -36 -47 36 -17 2 +26 0.7 0.9 14 5 46 35 2 +26 0.6 0.2 36 32 29 1 1 +26 0.1 0.4 27 -44 22 -38 2 +26 0.8 0.7 47 6 39 -2 1 +26 0.2 0.4 49 -21 49 29 2 +26 0.7 0.7 4 -34 45 -47 2 +26 0.8 0.6 40 -12 -13 -19 1 +26 0.8 0.5 47 14 10 -43 1 +26 0.6 0.1 19 -39 32 -24 1 +26 0.1 0.5 -2 -18 -41 -46 1 +26 0.6 0.1 -14 -31 29 -20 2 +26 0.1 0.1 7 -10 31 -24 1 +26 0.1 0.8 40 39 12 -32 1 +26 0.7 0.4 43 -35 14 -33 1 +26 0.2 0.5 19 -31 42 9 2 +26 0.6 0.2 39 -4 41 -17 1 +26 0.2 0.2 1 -12 0 -42 1 +26 0.2 0.5 38 -15 -2 -21 1 +26 0.2 0.7 35 -27 32 2 2 +26 0.2 0.6 47 6 12 -23 1 +26 0.6 0.9 -12 -32 38 -6 2 +26 0.7 0.7 24 -29 5 4 2 +26 0.2 0.8 50 -44 25 -9 2 +26 0.5 0.7 -13 -22 23 -29 2 +26 0.6 0.3 3 -38 30 -5 2 +26 0.6 0.4 44 -44 -10 -18 1 +26 0.7 0.5 -36 -38 3 -23 2 +26 0.6 0.7 -9 -15 2 -32 2 +26 0.9 0.4 40 16 44 -32 1 +26 0.3 0.3 12 2 39 -29 1 +26 0.7 0.3 39 26 47 34 2 +26 0.4 0.4 5 -19 44 -6 2 +26 0.5 0.9 23 -35 -3 -22 1 +26 0.3 0.3 48 -15 10 -39 1 +26 0.6 0.2 9 -45 49 41 2 +26 0.9 0.6 40 -39 7 -8 1 +26 0.9 0.7 -8 -13 -41 -49 1 +26 0.6 0.6 29 -5 21 -21 1 +26 0.5 0.8 1 -45 48 37 2 +26 0.6 0.8 6 2 9 -2 2 +26 0.9 0.8 36 35 41 -13 1 +26 0.7 0.9 17 -5 -15 -43 1 +26 0.3 0.9 8 -27 10 5 2 +26 0.3 0.8 -22 -25 16 13 2 +26 0.8 0.3 -20 -44 -6 -11 2 +26 0.6 0.2 9 -9 46 -13 1 +26 0.7 0.2 -13 -42 30 10 2 +26 0.7 0.2 -11 -39 25 15 2 +26 0.5 0.7 -7 -9 41 25 2 +26 0.7 0.2 -5 -37 30 -31 2 +26 0.4 0.7 24 -12 -4 -41 1 +26 0.5 0.4 0 -31 -24 -26 1 +26 0.7 0.3 18 -20 7 -34 1 +26 0.9 0.1 -26 -40 22 12 2 +26 0.2 0.7 -22 -30 -16 -20 2 +26 0.7 0.3 39 -20 35 24 1 +26 0.2 0.1 36 -47 35 28 2 +26 0.4 0.6 18 -32 41 39 2 +26 0.7 0.7 50 42 24 -13 1 +26 0.4 0.6 -14 -34 18 -17 2 +26 0.3 0.1 47 -24 48 -18 1 +26 0.7 0.5 42 30 13 -11 1 +26 0.6 0.5 33 28 -40 -44 1 +26 0.4 0.4 -33 -40 16 -18 2 +26 0.8 0.8 49 22 -15 -49 1 +26 0.5 0.4 36 -19 -42 -45 1 +26 0.2 0.9 37 7 -10 -30 1 +26 0.6 0.9 -21 -46 26 -9 2 +26 0.1 0.9 -25 -41 25 -20 2 +26 0.5 0.3 -27 -44 27 -23 2 +26 0.3 0.7 45 25 0 -9 1 +26 0.6 0.7 10 -2 -15 -17 1 +26 0.1 0.8 -27 -50 -6 -18 2 +26 0.4 0.3 8 -30 -16 -28 1 +26 0.9 0.2 -33 -46 -16 -22 2 +26 0.8 0.9 -29 -35 24 -30 2 +26 0.5 0.3 -5 -47 -15 -27 1 +26 0.1 0.6 5 -31 -1 -7 2 +26 0.7 0.1 47 -24 22 -39 1 +26 0.6 0.4 21 19 39 -48 1 +26 0.1 0.6 26 11 0 -40 1 +26 0.4 0.5 20 -40 20 18 2 +26 0.9 0.7 -25 -46 23 -31 2 +26 0.9 0.9 47 -15 -23 -26 1 +26 0.3 0.6 -13 -45 50 -13 2 +26 0.2 0.1 -32 -33 -31 -32 1 +26 0.7 0.5 9 -26 50 24 2 +26 0.5 0.6 -20 -49 39 6 2 +26 0.6 0.1 22 -15 28 -21 1 +26 0.6 0.1 30 -43 30 -30 1 +26 0.2 0.9 -28 -44 3 -32 2 +26 0.7 0.6 12 -30 -11 -17 1 +26 0.4 0.9 21 -44 38 8 2 +26 0.2 0.7 4 -14 -19 -34 1 +26 0.2 0.3 9 -25 -41 -46 1 +26 0.7 0.5 41 -6 12 -25 1 +26 0.4 0.7 35 31 37 10 1 +26 0.4 0.6 19 12 -16 -43 1 +26 0.5 0.9 36 -14 45 24 2 +26 0.7 0.3 -38 -48 23 -48 2 +26 0.5 0.8 25 -37 22 -28 2 +26 0.4 0.1 -15 -24 8 -42 2 +26 0.1 0.1 35 1 16 -34 1 +26 0.4 0.2 42 13 -3 -5 1 +26 0.1 0.8 0 -10 32 14 2 +26 0.7 0.1 35 -7 -5 -23 1 +26 0.7 0.8 -21 -30 32 0 2 +26 0.7 0.8 11 2 17 -10 1 +26 0.8 0.7 -18 -20 9 2 2 +26 0.7 0.7 46 3 50 -17 1 +26 0.2 0.8 5 -27 50 -26 2 +26 0.3 0.6 41 -4 -7 -50 1 +26 0.8 0.6 30 -3 11 -50 1 +26 0.9 0.8 35 -21 6 -33 1 +26 0.2 0.1 -26 -49 45 -10 2 +26 0.4 0.3 8 -20 -4 -42 1 +26 0.8 0.7 24 0 41 -25 2 +26 0.1 0.6 26 -49 11 -25 2 +26 0.6 0.4 42 -37 -7 -35 1 +26 0.1 0.2 48 30 -26 -40 1 +26 0.5 0.3 36 -46 27 -34 1 +26 0.2 0.1 47 26 18 -10 1 +26 0.7 0.4 41 -50 25 -48 1 +26 0.8 0.5 -8 -25 23 -46 2 +26 0.8 0.7 17 -49 17 -32 2 +26 0.6 0.2 -18 -49 -33 -50 1 +26 0.8 0.9 33 -44 8 -9 1 +26 0.4 0.4 30 -39 -31 -35 1 +26 0.2 0.2 -1 -48 14 -45 2 +26 0.4 0.8 -22 -34 49 -10 2 +26 0.8 0.6 14 -17 5 3 1 +26 0.2 0.5 13 -21 45 -6 2 +26 0.3 0.7 41 -37 50 -16 2 +26 0.8 0.2 6 4 28 -25 1 +26 0.4 0.7 -34 -35 -8 -20 2 +26 0.6 0.8 49 40 40 -8 1 +26 0.8 0.7 19 -33 41 1 2 +26 0.5 0.3 45 27 25 -1 1 +26 0.8 0.3 34 21 29 14 2 +26 0.5 0.9 42 -12 11 -43 2 +26 0.9 0.6 4 -45 1 -3 2 +26 0.2 0.1 43 32 29 -14 1 +26 0.9 0.6 -7 -24 15 -39 2 +26 0.3 0.7 45 -22 -28 -31 1 +26 0.3 0.7 37 -20 17 3 2 +26 0.6 0.3 4 -45 -9 -24 1 +26 0.6 0.7 -4 -30 47 34 2 +26 0.6 0.3 -38 -49 44 27 2 +26 0.7 0.9 4 -19 25 -9 2 +26 0.5 0.2 -16 -37 25 -2 2 +26 0.8 0.3 33 -34 8 -26 1 +26 0.9 0.1 20 -9 21 -30 1 +26 0.6 0.6 30 -8 21 1 1 +26 0.1 0.6 45 -7 36 -50 2 +26 0.4 0.9 37 -32 11 -20 1 +26 0.4 0.6 -17 -18 -18 -45 1 +26 0.1 0.3 16 -26 41 -15 2 +26 0.8 0.8 -1 -48 30 26 2 +26 0.5 0.7 31 -43 11 -22 1 +26 0.9 0.2 46 45 -24 -26 1 +26 0.2 0.1 5 -44 23 -43 1 +26 0.1 0.9 -16 -47 8 -48 2 +26 0.7 0.2 38 29 -10 -21 1 +26 0.9 0.5 13 -47 25 17 2 +26 0.2 0.1 42 27 25 -26 1 +26 0.6 0.1 45 11 19 -7 1 +26 0.7 0.4 2 -39 -34 -37 1 +26 0.6 0.8 16 -18 4 -50 1 +26 0.3 0.5 18 -40 22 -14 2 +26 0.8 0.6 16 -19 29 7 2 +26 0.6 0.9 46 -5 31 -40 1 +26 0.2 0.8 16 2 37 13 2 +26 0.6 0.1 -1 -4 16 9 1 +26 0.9 0.2 35 1 47 -6 1 +26 0.3 0.1 -6 -42 0 -6 2 +26 0.4 0.1 14 -17 5 -43 1 +26 0.6 0.1 -20 -43 2 -47 1 +26 0.6 0.2 -29 -36 42 25 2 +26 0.8 0.7 17 1 25 10 2 +26 0.8 0.8 -33 -43 40 -45 2 +26 0.4 0.9 39 36 16 1 1 +26 0.5 0.8 -1 -46 36 22 2 +26 0.3 0.6 -12 -21 29 -41 2 +26 0.9 0.9 40 -42 -28 -41 1 +26 0.9 0.9 25 -41 31 -14 1 +26 0.8 0.3 1 -29 33 -33 1 +26 0.5 0.1 -4 -14 15 -29 2 +26 0.8 0.9 5 -24 0 -2 1 +26 0.2 0.1 -23 -46 36 25 2 +26 0.3 0.1 33 -8 26 -43 1 +26 0.7 0.6 50 27 -14 -46 1 +26 0.8 0.9 24 -38 -8 -18 1 +26 0.8 0.1 42 -43 3 -18 1 +26 0.3 0.5 26 16 7 -20 1 +26 0.7 0.2 35 32 14 -18 1 +26 0.8 0.9 -18 -48 12 10 2 +26 0.4 0.8 -11 -49 33 22 2 +26 0.3 0.5 -18 -31 -32 -49 1 +26 0.8 0.1 37 22 39 -48 1 +26 0.3 0.9 39 -14 12 2 2 +26 0.9 0.1 50 21 42 0 1 +26 0.1 0.5 42 -40 -3 -8 2 +26 0.1 0.7 41 -29 46 -14 2 +26 0.7 0.2 48 22 41 -26 1 +26 0.2 0.1 -37 -50 42 -28 2 +26 0.2 0.3 24 -32 7 -17 2 +26 0.3 0.9 35 25 20 19 2 +26 0.6 0.7 -23 -46 -38 -45 1 +26 0.7 0.9 16 8 -2 -11 1 +26 0.3 0.3 50 10 47 -5 1 +26 0.5 0.5 26 18 38 -34 1 +26 0.4 0.7 1 -11 41 -36 2 +26 0.4 0.2 -2 -11 49 16 2 +26 0.2 0.3 42 -25 50 -35 1 +26 0.2 0.4 8 -49 0 -26 2 +26 0.5 0.8 39 -29 -4 -30 1 +26 0.7 0.6 32 19 -39 -50 1 +26 0.3 0.8 10 -45 36 -13 2 +26 0.6 0.8 21 12 15 -2 1 +26 0.3 0.4 -15 -39 12 -16 2 +26 0.3 0.7 43 -18 -10 -41 1 +26 0.5 0.4 16 2 11 7 1 +26 0.4 0.2 16 -32 34 -12 2 +26 0.1 0.2 46 -15 -11 -40 1 +26 0.5 0.4 1 -28 21 -39 2 +26 0.1 0.9 6 -38 28 22 2 +26 0.5 0.3 43 9 1 -46 1 +26 0.5 0.4 0 -23 35 22 2 +26 0.3 0.2 -2 -9 27 -1 2 +26 0.9 0.9 -3 -44 25 -15 2 +26 0.1 0.8 -30 -47 -1 -13 2 +26 0.8 0.8 -13 -25 49 -30 2 +26 0.1 0.6 12 -14 46 7 2 +26 0.6 0.6 -35 -39 23 -4 2 +26 0.9 0.4 21 -4 -19 -21 1 +26 0.3 0.6 18 -11 -25 -36 1 +26 0.7 0.7 10 -50 7 -34 2 +26 0.1 0.9 17 -36 48 -18 2 +26 0.9 0.6 11 -18 -15 -34 1 +26 0.5 0.3 -26 -42 -10 -36 1 +27 0.1 0.6 -6 -24 -17 -23 2 +27 0.9 0.1 50 8 34 -19 1 +27 0.7 0.5 44 2 -36 -39 1 +27 0.9 0.4 19 -28 -35 -48 1 +27 0.5 0.8 50 -39 -27 -31 1 +27 0.2 0.9 -11 -23 12 9 2 +27 0.2 0.2 31 4 1 -43 1 +27 0.5 0.9 39 -17 18 -3 1 +27 0.5 0.1 45 -40 19 -25 1 +27 0.4 0.2 -19 -24 50 -12 2 +27 0.5 0.6 42 6 35 -5 1 +27 0.2 0.4 26 19 34 15 2 +27 0.7 0.4 44 43 4 -20 1 +27 0.8 0.6 48 -46 41 -18 1 +27 0.1 0.4 36 -22 21 -29 2 +27 0.1 0.5 -38 -48 45 -8 2 +27 0.7 0.4 5 -25 33 -6 2 +27 0.6 0.7 43 39 12 -28 1 +27 0.4 0.7 23 13 36 -47 1 +27 0.4 0.8 50 -35 22 -47 2 +27 0.3 0.3 -31 -45 42 8 2 +27 0.7 0.8 18 -28 8 -10 2 +27 0.9 0.6 16 -3 10 4 1 +27 0.4 0.4 40 -39 49 -15 2 +27 0.8 0.3 -20 -22 -13 -23 2 +27 0.2 0.8 3 -4 21 -34 2 +27 0.4 0.6 -33 -46 35 -19 2 +27 0.9 0.6 -40 -43 46 39 2 +27 0.7 0.4 26 -47 35 -40 1 +27 0.2 0.1 0 -27 5 -25 2 +27 0.1 0.8 50 -39 -8 -17 2 +27 0.6 0.7 18 1 -23 -28 1 +27 0.4 0.2 -12 -25 50 -21 2 +27 0.4 0.6 31 0 49 41 2 +27 0.5 0.2 41 -34 41 19 2 +27 0.7 0.6 47 17 43 34 2 +27 0.2 0.5 6 -49 6 -42 2 +27 0.1 0.8 -12 -19 31 -30 2 +27 0.3 0.4 46 -28 -18 -29 1 +27 0.7 0.8 10 -29 32 -38 2 +27 0.5 0.1 41 -34 21 5 2 +27 0.4 0.6 46 -22 46 19 2 +27 0.2 0.1 47 -8 46 -19 1 +27 0.2 0.8 -22 -48 41 39 2 +27 0.7 0.6 46 21 29 -21 1 +27 0.6 0.8 38 -1 49 43 2 +27 0.5 0.5 -9 -25 26 6 2 +27 0.6 0.1 -31 -50 27 20 2 +27 0.8 0.1 43 32 36 34 2 +27 0.5 0.8 24 -48 34 14 2 +27 0.7 0.4 49 36 37 -27 1 +27 0.2 0.4 -14 -38 -30 -44 1 +27 0.9 0.8 27 -11 17 -35 1 +27 0.1 0.9 -39 -47 -1 -50 2 +27 0.6 0.7 -20 -49 45 16 2 +27 0.3 0.6 9 2 -10 -35 1 +27 0.4 0.9 16 11 25 -16 2 +27 0.5 0.9 18 -34 -21 -24 1 +27 0.7 0.6 26 -49 42 -36 2 +27 0.8 0.2 41 -14 29 -50 1 +27 0.2 0.4 43 -2 42 21 2 +27 0.1 0.8 48 29 35 -14 1 +27 0.1 0.6 22 20 30 -18 1 +27 0.6 0.9 26 16 26 -33 1 +27 0.1 0.5 6 -7 48 -49 2 +27 0.6 0.1 3 -28 48 -2 2 +27 0.2 0.5 -21 -39 12 11 2 +27 0.5 0.3 49 -37 48 -41 1 +27 0.6 0.1 41 -2 5 -38 1 +27 0.8 0.4 25 11 29 -15 1 +27 0.3 0.1 36 -38 22 -32 1 +27 0.4 0.2 2 -28 -12 -34 1 +27 0.1 0.1 20 -32 23 -27 2 +27 0.8 0.9 13 7 15 -37 1 +27 0.2 0.1 -34 -38 2 -43 2 +27 0.8 0.4 -6 -12 32 -34 2 +27 0.2 0.8 6 -44 3 -47 2 +27 0.4 0.6 24 16 1 -23 1 +27 0.7 0.9 50 -50 46 39 2 +27 0.3 0.1 25 -32 49 -21 1 +27 0.5 0.6 7 3 28 -11 2 +27 0.1 0.7 26 -7 0 -13 1 +27 0.4 0.5 43 -11 -20 -48 1 +27 0.1 0.1 50 -22 16 -37 1 +27 0.3 0.8 31 -15 -22 -33 1 +27 0.4 0.2 31 14 29 -32 1 +27 0.9 0.1 48 -4 42 17 1 +27 0.4 0.4 15 -22 18 16 2 +27 0.5 0.6 23 -25 7 -44 1 +27 0.1 0.9 32 24 -23 -45 1 +27 0.3 0.2 10 4 -33 -34 1 +27 0.4 0.1 -10 -31 16 -42 1 +27 0.2 0.4 -4 -37 47 6 2 +27 0.9 0.8 18 16 45 -41 2 +27 0.2 0.6 1 -45 -8 -24 2 +27 0.4 0.7 29 -16 -9 -38 1 +27 0.6 0.8 31 -2 -42 -46 1 +27 0.9 0.4 -26 -50 42 1 2 +27 0.4 0.9 18 14 16 -44 1 +27 0.3 0.8 -7 -33 49 -36 2 +27 0.6 0.9 49 -33 37 30 2 +27 0.5 0.1 42 -36 8 -37 1 +27 0.2 0.9 -2 -21 23 -48 2 +27 0.5 0.1 14 -43 -3 -36 1 +27 0.6 0.2 27 -36 30 17 2 +27 0.7 0.9 1 -39 40 2 2 +27 0.4 0.8 -3 -43 -20 -35 2 +27 0.7 0.3 28 0 -25 -30 1 +27 0.4 0.2 49 -42 44 -21 1 +27 0.7 0.4 31 -36 44 26 2 +27 0.5 0.1 -20 -33 5 -29 2 +27 0.4 0.8 38 -21 30 29 2 +27 0.4 0.7 47 -46 32 -33 2 +27 0.1 0.1 26 13 11 -28 1 +27 0.2 0.5 26 -26 22 -33 2 +27 0.7 0.5 -5 -10 44 33 2 +27 0.7 0.7 47 -21 10 2 1 +27 0.3 0.7 -37 -49 18 -24 2 +27 0.5 0.3 39 28 -9 -23 1 +27 0.6 0.4 40 15 50 -33 1 +27 0.3 0.4 12 -32 43 25 2 +27 0.5 0.2 11 -20 5 -23 1 +27 0.2 0.1 40 5 -11 -20 1 +27 0.5 0.9 40 -7 27 11 2 +27 0.6 0.3 -17 -40 -12 -20 2 +27 0.1 0.1 42 -14 -18 -35 1 +27 0.7 0.2 48 33 -12 -41 1 +27 0.7 0.1 11 -8 -1 -50 1 +27 0.4 0.5 45 14 12 7 1 +27 0.4 0.5 0 -23 14 -38 2 +27 0.6 0.7 46 30 32 -48 1 +27 0.4 0.3 4 -31 26 -29 2 +27 0.3 0.1 8 -13 39 -13 2 +27 0.1 0.3 3 -35 -5 -22 2 +27 0.7 0.3 22 -6 18 -49 1 +27 0.2 0.7 16 -36 15 -40 2 +27 0.1 0.5 7 -48 13 -33 2 +27 0.3 0.9 -7 -37 27 -18 2 +27 0.3 0.7 -7 -32 -36 -48 1 +27 0.9 0.4 35 7 -27 -45 1 +27 0.5 0.9 23 -22 16 1 2 +27 0.3 0.1 36 -1 48 44 2 +27 0.1 0.4 43 6 -14 -36 1 +27 0.5 0.8 -4 -22 47 2 2 +27 0.5 0.7 21 -33 10 -35 1 +27 0.3 0.2 -8 -40 -19 -50 1 +27 0.4 0.6 47 -7 -19 -44 1 +27 0.5 0.1 -20 -45 15 -9 2 +27 0.7 0.5 47 -17 -5 -39 1 +27 0.8 0.1 9 -29 40 -3 2 +27 0.6 0.8 11 -42 -35 -46 1 +27 0.5 0.5 0 -15 -24 -44 1 +27 0.5 0.7 -2 -45 48 -38 2 +27 0.4 0.8 -10 -29 39 -23 2 +27 0.5 0.3 43 -15 -11 -31 1 +27 0.7 0.9 41 -24 47 -12 2 +27 0.9 0.9 -14 -46 16 -10 2 +27 0.4 0.1 -2 -14 15 -1 2 +27 0.1 0.9 3 -11 -15 -31 1 +27 0.2 0.3 8 1 18 -30 1 +27 0.8 0.4 31 16 14 -50 1 +27 0.8 0.7 40 -27 2 -9 1 +27 0.7 0.1 -21 -40 -1 -30 1 +27 0.3 0.5 50 14 33 -25 1 +27 0.9 0.6 25 20 -2 -42 1 +27 0.9 0.1 6 -49 49 41 2 +27 0.9 0.5 47 31 -37 -38 1 +27 0.4 0.8 45 -12 -23 -25 1 +27 0.8 0.9 45 -36 33 -29 2 +27 0.1 0.5 20 -44 48 -24 2 +27 0.9 0.7 35 -1 -6 -7 1 +27 0.2 0.6 -13 -42 16 -12 2 +27 0.1 0.7 35 22 12 0 1 +27 0.4 0.2 -29 -41 23 -13 2 +27 0.9 0.3 6 -42 46 7 2 +27 0.3 0.6 11 -5 29 -47 1 +27 0.3 0.9 16 -49 -19 -42 1 +27 0.6 0.2 39 21 33 -32 1 +27 0.8 0.6 -21 -37 30 -42 2 +27 0.6 0.9 12 11 9 -24 1 +27 0.7 0.6 -13 -49 5 -2 2 +27 0.9 0.5 -1 -49 1 -34 1 +27 0.9 0.5 29 -19 42 32 2 +27 0.3 0.3 0 -1 -7 -46 1 +27 0.5 0.7 29 -50 34 -3 2 +27 0.3 0.8 38 22 36 -32 1 +27 0.7 0.8 19 -24 32 26 2 +27 0.1 0.5 -42 -49 12 -19 2 +27 0.4 0.1 43 -3 -27 -32 1 +27 0.8 0.2 50 -6 3 -23 1 +27 0.6 0.8 24 -11 43 20 2 +27 0.8 0.9 33 -15 -6 -12 1 +27 0.6 0.8 42 -39 47 -24 2 +27 0.3 0.4 32 7 7 -18 1 +27 0.1 0.4 -7 -36 30 -14 2 +27 0.3 0.9 -3 -36 12 -38 2 +27 0.9 0.1 -1 -11 31 18 2 +27 0.1 0.9 11 -37 -9 -44 2 +27 0.1 0.8 22 20 24 -14 1 +27 0.5 0.2 -29 -30 17 -5 2 +27 0.5 0.5 -1 -16 37 17 2 +27 0.3 0.1 4 -31 39 32 2 +27 0.1 0.7 15 -49 36 -6 2 +27 0.9 0.4 43 -47 17 -11 1 +27 0.5 0.3 39 -5 41 -31 1 +27 0.2 0.4 16 14 14 -16 1 +27 0.1 0.4 31 20 23 -45 1 +27 0.7 0.8 0 -8 29 -38 2 +27 0.9 0.7 -5 -49 -10 -44 1 +27 0.4 0.9 38 -35 28 2 2 +27 0.7 0.7 1 -15 5 -47 1 +27 0.1 0.3 33 -13 17 14 2 +27 0.7 0.7 49 -14 18 10 1 +27 0.4 0.3 -23 -32 10 -47 2 +27 0.5 0.7 30 -29 14 -13 2 +27 0.1 0.5 -27 -43 6 -24 2 +27 0.8 0.5 47 34 7 5 1 +27 0.5 0.1 8 -32 -14 -28 1 +27 0.6 0.1 45 -28 30 -46 1 +27 0.5 0.5 46 -10 20 -17 1 +27 0.5 0.7 45 19 46 22 2 +27 0.1 0.2 26 21 34 -29 1 +27 0.2 0.3 -6 -20 3 -48 1 +27 0.8 0.2 39 -35 22 -49 1 +27 0.1 0.4 44 -25 7 -7 2 +27 0.4 0.9 25 6 9 -46 1 +27 0.8 0.4 -11 -13 36 -45 2 +27 0.1 0.7 -15 -19 21 20 2 +27 0.8 0.5 45 -34 7 -20 1 +27 0.1 0.9 -17 -43 21 -19 2 +27 0.2 0.4 -15 -41 46 32 2 +27 0.1 0.4 49 -37 -19 -46 1 +27 0.8 0.2 17 6 -10 -31 1 +27 0.6 0.8 40 -45 26 -49 2 +27 0.2 0.9 16 4 -11 -46 1 +27 0.7 0.1 -15 -47 19 -49 1 +27 0.2 0.7 35 -29 31 0 2 +27 0.7 0.6 3 -3 47 -14 2 +27 0.6 0.3 -4 -15 -40 -50 1 +27 0.1 0.7 48 14 35 -22 1 +27 0.3 0.8 22 -32 33 18 2 +27 0.2 0.5 29 -36 37 -45 2 +27 0.7 0.3 -25 -42 34 -39 2 +27 0.8 0.4 -20 -49 45 -15 2 +27 0.6 0.1 5 -27 38 -26 1 +27 0.2 0.6 30 26 38 -3 1 +27 0.9 0.8 18 9 -28 -40 1 +27 0.5 0.6 27 -25 12 11 2 +27 0.6 0.3 14 -42 46 -18 2 +27 0.2 0.2 25 13 30 -20 1 +27 0.8 0.2 -4 -25 -7 -47 1 +27 0.3 0.9 50 6 23 20 2 +27 0.3 0.2 7 -9 43 -28 1 +27 0.6 0.9 37 3 -18 -43 1 +27 0.6 0.1 42 -31 -17 -22 1 +27 0.6 0.2 46 20 21 -12 1 +27 0.6 0.6 38 -35 21 -25 1 +27 0.9 0.7 6 -9 43 27 2 +27 0.2 0.8 46 9 34 -41 2 +27 0.8 0.4 18 -42 19 -44 1 +27 0.7 0.9 13 -10 -4 -39 1 +27 0.9 0.7 28 4 31 -36 1 +27 0.4 0.1 27 -36 49 13 2 +27 0.1 0.2 18 8 -35 -45 1 +27 0.2 0.6 30 2 43 0 2 +27 0.5 0.3 -21 -50 -8 -40 2 +27 0.8 0.6 44 3 -36 -42 1 +27 0.2 0.6 2 -8 22 -47 2 +27 0.6 0.5 32 -44 4 -10 1 +27 0.7 0.2 17 -39 3 0 2 +27 0.7 0.3 11 -37 1 -31 1 +27 0.3 0.9 44 -6 39 14 2 +27 0.9 0.5 35 24 41 -4 1 +27 0.9 0.5 32 -41 35 -10 1 +27 0.6 0.1 31 10 28 -36 1 +27 0.2 0.5 -15 -43 -26 -29 2 +27 0.1 0.6 17 4 -30 -42 1 +27 0.1 0.2 -19 -45 -29 -30 2 +27 0.3 0.2 -31 -32 31 9 2 +27 0.2 0.2 5 4 6 -27 1 +27 0.8 0.5 -17 -21 50 -49 2 +27 0.4 0.7 24 -32 9 -41 2 +27 0.9 0.2 28 -10 20 -36 1 +27 0.9 0.7 -4 -10 37 29 2 +27 0.6 0.4 5 -30 -30 -32 1 +27 0.8 0.8 -7 -32 47 28 2 +27 0.2 0.7 23 4 47 -17 2 +27 0.2 0.7 -2 -33 7 -37 2 +27 0.4 0.7 43 -1 50 -13 2 +27 0.8 0.5 21 -8 0 -49 1 +27 0.6 0.6 11 -10 38 33 2 +27 0.1 0.9 18 10 5 -14 1 +27 0.5 0.2 33 5 40 -5 1 +27 0.8 0.7 21 -38 -24 -27 1 +27 0.9 0.5 33 -21 -21 -43 1 +27 0.6 0.1 39 -9 -28 -47 1 +27 0.3 0.7 28 -26 -26 -41 1 +27 0.9 0.8 -6 -9 37 -12 2 +27 0.6 0.5 36 -49 40 34 2 +27 0.5 0.8 -1 -2 -34 -46 1 +28 0.9 0.3 21 -45 41 32 2 +28 0.8 0.8 15 -46 50 -23 2 +28 0.2 0.6 25 -15 18 15 2 +28 0.3 0.3 -24 -42 16 -2 2 +28 0.5 0.9 31 -40 24 -3 2 +28 0.3 0.1 30 28 35 -37 1 +28 0.4 0.6 5 -34 48 -10 2 +28 0.1 0.3 -24 -34 27 11 2 +28 0.6 0.1 33 -32 17 -25 1 +28 0.9 0.8 41 -27 27 -45 1 +28 0.1 0.2 -29 -41 25 14 2 +28 0.1 0.3 34 -47 28 -15 2 +28 0.4 0.1 19 -39 -14 -34 1 +28 0.8 0.3 38 -9 45 -8 1 +28 0.4 0.6 30 -2 26 -49 1 +28 0.4 0.3 34 31 32 17 2 +28 0.2 0.4 47 -5 50 -25 2 +28 0.4 0.5 38 -34 42 -23 2 +28 0.1 0.4 26 -9 11 -31 1 +28 0.2 0.7 -17 -34 47 2 2 +28 0.4 0.4 -22 -26 33 -5 2 +28 0.2 0.8 25 -26 31 -30 2 +28 0.2 0.3 41 21 29 -47 1 +28 0.2 0.1 26 -32 7 1 2 +28 0.2 0.9 47 -6 11 -18 2 +28 0.1 0.1 39 10 4 -46 1 +28 0.2 0.6 -11 -38 -2 -48 2 +28 0.4 0.3 15 -2 34 12 2 +28 0.8 0.2 42 -27 33 29 2 +28 0.9 0.7 7 -27 50 10 2 +28 0.1 0.1 -26 -31 2 -38 2 +28 0.5 0.8 20 1 4 -29 1 +28 0.1 0.4 47 -18 16 -46 2 +28 0.9 0.8 13 -40 1 -34 1 +28 0.8 0.2 29 -15 -24 -39 1 +28 0.1 0.7 29 -35 29 19 2 +28 0.5 0.4 41 -39 34 -10 1 +28 0.1 0.5 44 -37 14 0 2 +28 0.7 0.6 38 25 29 -19 1 +28 0.3 0.1 -15 -38 46 9 2 +28 0.6 0.1 5 -29 44 -35 1 +28 0.3 0.5 46 -31 1 -24 1 +28 0.8 0.1 -42 -49 42 21 2 +28 0.9 0.8 7 -15 2 -32 1 +28 0.2 0.8 35 -35 7 -32 2 +28 0.3 0.9 7 -7 38 23 2 +28 0.1 0.2 -13 -14 33 -37 1 +28 0.6 0.7 23 -49 17 -48 1 +28 0.6 0.6 38 -4 48 -10 2 +28 0.5 0.3 16 -23 0 -26 1 +28 0.1 0.1 35 20 41 -2 1 +28 0.7 0.4 -31 -44 21 16 2 +28 0.1 0.7 25 -4 21 5 2 +28 0.6 0.3 1 -40 -34 -49 1 +28 0.5 0.1 21 13 0 -10 1 +28 0.6 0.1 -15 -20 38 36 2 +28 0.2 0.8 45 -23 22 -28 2 +28 0.6 0.8 48 -31 21 -28 1 +28 0.8 0.3 -29 -32 31 -26 2 +28 0.5 0.7 44 1 43 -36 1 +28 0.3 0.9 -19 -50 45 9 2 +28 0.5 0.9 3 -28 48 37 2 +28 0.5 0.9 16 -8 24 -29 2 +28 0.8 0.6 43 1 -28 -41 1 +28 0.1 0.1 8 -26 16 -42 1 +28 0.3 0.5 -2 -18 50 -17 2 +28 0.7 0.1 -20 -38 18 -13 2 +28 0.8 0.2 16 0 17 -36 1 +28 0.3 0.1 22 13 32 12 1 +28 0.1 0.9 47 30 9 -3 1 +28 0.6 0.6 0 -19 23 -18 2 +28 0.8 0.6 43 -26 23 11 1 +28 0.2 0.5 13 -27 47 -36 2 +28 0.1 0.4 5 -33 29 -41 2 +28 0.6 0.4 -7 -42 32 -49 2 +28 0.2 0.5 12 -5 4 -7 1 +28 0.1 0.8 -11 -28 -14 -47 2 +28 0.9 0.8 41 23 20 9 1 +28 0.4 0.6 32 19 35 -41 1 +28 0.5 0.3 26 -40 -15 -16 1 +28 0.7 0.6 32 12 6 -3 1 +28 0.8 0.4 -22 -41 -16 -43 2 +28 0.8 0.8 39 1 4 0 1 +28 0.2 0.7 17 -45 37 -12 2 +28 0.8 0.4 35 -18 42 3 2 +28 0.7 0.2 7 -7 14 -3 1 +28 0.3 0.2 40 19 31 -40 1 +28 0.5 0.4 45 21 21 20 1 +28 0.6 0.6 22 21 3 -38 1 +28 0.7 0.7 34 -17 20 9 1 +28 0.2 0.6 -15 -37 37 -12 2 +28 0.9 0.5 38 -9 50 4 1 +28 0.7 0.1 16 -19 5 -10 1 +28 0.8 0.2 -21 -39 7 -38 1 +28 0.6 0.8 -26 -33 47 8 2 +28 0.2 0.7 20 -38 -7 -22 2 +28 0.6 0.5 43 33 28 17 1 +28 0.6 0.2 40 -2 37 -47 1 +28 0.8 0.8 25 -5 -12 -22 1 +28 0.5 0.2 22 9 -29 -43 1 +28 0.9 0.7 49 23 17 -35 1 +28 0.7 0.2 45 38 29 -39 1 +28 0.1 0.2 20 -9 35 33 2 +28 0.7 0.2 42 -41 38 -14 1 +28 0.6 0.9 26 -37 17 -36 2 +28 0.6 0.2 38 22 35 -7 1 +28 0.5 0.3 19 8 22 6 1 +28 0.4 0.7 -16 -39 16 4 2 +28 0.6 0.7 48 1 19 2 1 +28 0.1 0.5 47 -30 46 3 2 +28 0.1 0.1 5 -23 16 -47 1 +28 0.3 0.8 46 -50 47 -11 2 +28 0.1 0.7 -16 -46 -35 -47 1 +28 0.6 0.4 -2 -12 4 -39 1 +28 0.6 0.8 -5 -10 43 -19 2 +28 0.3 0.8 43 36 48 26 1 +28 0.1 0.8 21 -3 48 38 2 +28 0.8 0.8 2 -28 38 37 2 +28 0.9 0.8 -35 -47 -3 -50 2 +28 0.5 0.7 44 18 26 -8 1 +28 0.1 0.2 41 -36 1 -20 2 +28 0.3 0.1 15 -1 34 11 2 +28 0.3 0.8 44 -22 42 21 2 +28 0.4 0.4 -30 -46 46 27 2 +28 0.3 0.2 -15 -44 31 0 2 +28 0.8 0.9 -12 -35 48 -26 2 +28 0.3 0.6 27 -8 -26 -44 1 +28 0.4 0.1 -31 -49 22 -44 2 +28 0.3 0.1 -10 -18 35 3 2 +28 0.6 0.8 10 -20 -17 -36 1 +28 0.1 0.5 -17 -46 12 -32 2 +28 0.6 0.9 5 -38 47 29 2 +28 0.5 0.2 24 -30 36 16 2 +28 0.8 0.9 -12 -46 1 -9 2 +28 0.1 0.2 6 -19 -11 -17 2 +28 0.7 0.1 -12 -46 49 -48 1 +28 0.1 0.6 11 9 20 19 2 +28 0.3 0.1 16 6 32 -13 1 +28 0.7 0.1 4 -12 40 33 2 +28 0.2 0.9 49 33 37 -34 2 +28 0.2 0.4 25 -29 27 -17 2 +28 0.7 0.8 42 -24 23 -48 1 +28 0.1 0.3 44 -48 1 -46 1 +28 0.3 0.8 10 7 -5 -26 1 +28 0.6 0.7 7 -13 46 9 2 +28 0.5 0.6 36 8 20 -12 1 +28 0.9 0.7 37 -42 36 -4 1 +28 0.6 0.9 33 9 -18 -47 1 +28 0.9 0.8 41 28 47 -44 1 +28 0.1 0.7 -5 -8 -14 -47 1 +28 0.5 0.4 11 3 18 -9 1 +28 0.3 0.2 49 19 47 45 2 +28 0.6 0.7 49 34 13 -33 1 +28 0.3 0.8 -37 -44 48 -45 2 +28 0.6 0.9 -23 -33 12 -33 2 +28 0.2 0.4 9 -4 -15 -24 1 +28 0.2 0.1 48 10 50 22 2 +28 0.2 0.9 36 18 47 -8 2 +28 0.2 0.7 29 -40 -29 -47 2 +28 0.2 0.6 38 36 29 -2 1 +28 0.6 0.8 9 -46 16 -38 2 +28 0.1 0.6 39 6 -3 -11 1 +28 0.9 0.4 7 -35 35 -47 1 +28 0.8 0.5 41 27 29 -43 1 +28 0.6 0.7 32 -16 47 -24 2 +28 0.5 0.8 11 -31 6 -46 2 +28 0.3 0.9 34 3 32 27 2 +28 0.1 0.7 -3 -37 -36 -43 2 +28 0.4 0.3 4 -27 -11 -34 1 +28 0.5 0.7 33 -21 12 -20 1 +28 0.2 0.6 9 -44 35 -8 2 +28 0.8 0.1 44 -17 -45 -47 1 +28 0.6 0.6 9 -46 3 -11 1 +28 0.1 0.2 15 -18 17 -5 2 +28 0.1 0.9 35 -46 32 8 2 +28 0.4 0.9 23 22 31 2 2 +28 0.4 0.7 34 20 -16 -29 1 +28 0.7 0.9 40 -14 -28 -41 1 +28 0.3 0.7 31 -4 39 24 2 +28 0.7 0.6 50 37 19 -23 1 +28 0.1 0.1 33 -43 45 -36 1 +28 0.5 0.8 -14 -30 8 -9 2 +28 0.3 0.2 3 -9 43 -28 1 +28 0.7 0.6 33 -16 36 -12 2 +28 0.1 0.7 -4 -35 1 -7 2 +28 0.4 0.5 -5 -48 -1 -2 2 +28 0.1 0.8 31 -8 42 -44 2 +28 0.9 0.6 44 -48 26 -10 1 +28 0.4 0.2 39 -9 47 46 2 +28 0.6 0.4 35 4 -3 -38 1 +28 0.1 0.1 -32 -42 -8 -44 2 +28 0.9 0.9 -2 -8 26 -9 2 +28 0.8 0.2 -43 -47 40 -6 2 +28 0.8 0.7 29 -4 -22 -35 1 +28 0.7 0.5 47 -46 28 -36 1 +28 0.8 0.5 -14 -35 17 -39 2 +28 0.4 0.9 30 20 41 31 2 +28 0.3 0.1 -30 -45 50 -32 2 +28 0.7 0.8 11 -40 -2 -8 1 +28 0.3 0.6 15 3 18 11 2 +28 0.3 0.8 -19 -34 29 -8 2 +28 0.2 0.5 43 8 38 5 2 +28 0.2 0.5 24 -8 3 -48 1 +28 0.4 0.2 5 -34 47 -43 2 +28 0.2 0.1 12 -17 9 -7 2 +28 0.6 0.9 6 -38 -4 -38 2 +28 0.9 0.2 28 -21 41 7 1 +28 0.3 0.4 -6 -20 -13 -34 1 +28 0.4 0.5 29 -14 19 -40 1 +28 0.8 0.7 23 19 17 15 1 +28 0.9 0.5 12 -36 49 23 2 +28 0.8 0.6 27 0 35 -46 1 +28 0.2 0.2 -8 -46 50 42 2 +28 0.4 0.8 47 27 -4 -16 1 +28 0.7 0.7 37 -10 32 30 2 +28 0.8 0.3 27 -11 2 -47 1 +28 0.5 0.6 -21 -35 29 4 2 +28 0.5 0.5 39 -11 34 -6 2 +28 0.3 0.9 5 -30 10 5 2 +28 0.1 0.9 20 -27 36 -29 2 +28 0.8 0.8 -38 -42 40 -41 2 +28 0.9 0.7 -31 -42 25 -48 2 +28 0.2 0.6 46 21 35 -48 1 +28 0.3 0.3 -8 -34 9 -7 2 +28 0.7 0.4 -15 -31 46 -19 2 +28 0.9 0.5 43 -6 32 15 1 +28 0.4 0.1 28 24 47 -33 1 +28 0.2 0.5 35 -37 -20 -23 1 +28 0.5 0.5 36 -44 1 -26 1 +28 0.7 0.1 13 -31 35 4 2 +28 0.8 0.1 -34 -40 18 -46 2 +28 0.9 0.3 15 -3 25 -36 1 +28 0.3 0.2 14 -31 29 -2 2 +28 0.2 0.1 49 45 -31 -50 1 +28 0.9 0.6 12 9 39 -20 1 +28 0.1 0.4 -18 -24 5 -25 2 +28 0.6 0.1 -36 -40 -2 -26 2 +28 0.1 0.4 18 -42 -4 -42 2 +28 0.9 0.9 24 -48 16 -13 1 +28 0.6 0.3 12 -46 24 -12 2 +28 0.5 0.2 7 -18 17 -13 1 +28 0.9 0.5 26 -27 2 -46 1 +28 0.6 0.4 37 -18 32 -31 1 +28 0.6 0.4 30 -17 12 -50 1 +28 0.9 0.9 7 -5 22 18 2 +28 0.7 0.2 25 -44 -34 -48 1 +28 0.5 0.4 40 -22 32 -39 1 +28 0.1 0.2 21 -13 45 -31 1 +28 0.2 0.7 -6 -34 7 -18 2 +28 0.2 0.8 35 -42 45 3 2 +28 0.6 0.4 8 -13 41 23 2 +28 0.7 0.1 17 -26 35 15 2 +28 0.8 0.8 -37 -45 1 0 2 +28 0.9 0.3 -18 -34 42 10 2 +28 0.4 0.1 49 -15 -12 -35 1 +28 0.4 0.4 44 -49 -31 -38 1 +28 0.8 0.9 -20 -37 43 42 2 +28 0.3 0.1 50 6 5 -14 1 +28 0.8 0.4 42 37 8 -23 1 +28 0.6 0.7 -18 -26 2 -26 2 +28 0.5 0.8 18 -26 31 -20 2 +28 0.3 0.4 -22 -50 28 -49 2 +28 0.2 0.8 36 -47 30 -2 2 +28 0.8 0.4 2 -9 11 -6 2 +28 0.4 0.5 9 -48 -5 -41 1 +28 0.8 0.8 -12 -18 8 -23 2 +28 0.8 0.7 0 -17 36 35 2 +28 0.3 0.6 25 21 49 -43 1 +28 0.3 0.3 24 -33 23 4 2 +28 0.6 0.3 30 -36 -25 -50 1 +28 0.6 0.8 -34 -43 -31 -32 2 +28 0.9 0.3 33 -25 46 11 1 +28 0.6 0.6 9 -2 39 29 2 +28 0.4 0.9 24 -41 -7 -20 1 +28 0.9 0.6 -13 -49 47 11 2 +28 0.6 0.3 -12 -35 25 -35 2 +28 0.4 0.4 -8 -47 26 -11 2 +28 0.2 0.6 28 15 12 -17 1 +28 0.4 0.9 -14 -50 -21 -28 1 +28 0.9 0.7 -9 -35 45 43 2 +28 0.1 0.6 -14 -16 44 2 2 +28 0.2 0.4 41 20 49 -25 1 +28 0.7 0.3 49 14 -37 -50 1 +28 0.3 0.4 -3 -4 45 -7 2 +28 0.5 0.6 24 20 -20 -21 1 +28 0.7 0.4 -14 -41 9 8 2 +28 0.7 0.3 16 -36 27 11 2 +28 0.1 0.5 -10 -36 40 -30 2 +28 0.9 0.6 32 15 -10 -13 1 +28 0.9 0.6 50 -40 -29 -32 1 +28 0.9 0.4 1 -50 42 34 2 +28 0.7 0.9 50 -16 -10 -50 1 +28 0.8 0.2 44 -27 33 7 1 +28 0.9 0.3 10 -19 34 24 2 +28 0.9 0.3 6 -15 -2 -30 1 +28 0.1 0.2 32 -20 28 19 2 +28 0.5 0.4 -14 -46 49 4 2 +28 0.1 0.9 24 13 -19 -35 1 +28 0.9 0.9 21 -41 39 -16 2 +28 0.6 0.5 42 21 40 -23 1 +29 0.1 0.8 20 -39 12 8 2 +29 0.8 0.9 35 28 -13 -31 1 +29 0.2 0.4 39 -19 18 14 2 +29 0.8 0.9 28 -22 -5 -40 1 +29 0.1 0.9 2 -5 36 27 2 +29 0.3 0.7 9 -26 45 4 2 +29 0.7 0.1 1 -44 49 -10 1 +29 0.5 0.6 16 -19 38 21 2 +29 0.2 0.8 0 -32 24 -45 2 +29 0.5 0.4 18 -1 43 15 2 +29 0.4 0.5 46 -27 38 29 2 +29 0.4 0.4 24 11 3 -48 1 +29 0.3 0.3 42 -16 48 -6 2 +29 0.7 0.3 -9 -28 5 -19 1 +29 0.1 0.8 -14 -31 -21 -45 2 +29 0.9 0.4 -7 -42 50 48 2 +29 0.8 0.9 2 -44 -18 -25 1 +29 0.5 0.2 26 -45 40 11 2 +29 0.7 0.7 16 -23 -3 -12 1 +29 0.2 0.7 43 -17 23 -19 2 +29 0.3 0.2 -12 -35 33 -7 2 +29 0.2 0.6 13 -15 15 -34 1 +29 0.8 0.1 -37 -44 31 16 2 +29 0.5 0.6 16 -9 15 -22 2 +29 0.7 0.3 4 -18 -32 -44 1 +29 0.3 0.4 2 -23 18 -6 2 +29 0.8 0.1 46 -36 29 3 1 +29 0.6 0.5 43 21 39 -46 1 +29 0.6 0.2 -5 -45 -5 -25 1 +29 0.4 0.1 11 -19 23 -41 1 +29 0.9 0.4 47 -5 47 1 1 +29 0.2 0.9 29 -31 20 9 2 +29 0.1 0.9 48 22 18 -16 1 +29 0.2 0.3 30 -14 -3 -18 1 +29 0.2 0.9 20 -45 15 -25 2 +29 0.1 0.2 48 -37 10 -7 1 +29 0.3 0.5 25 23 18 -45 1 +29 0.1 0.3 25 -34 19 -27 2 +29 0.5 0.5 13 -19 48 -40 2 +29 0.5 0.9 -16 -38 8 -40 2 +29 0.6 0.8 20 -35 47 -41 2 +29 0.5 0.5 -7 -14 48 30 2 +29 0.3 0.2 23 -30 19 9 2 +29 0.9 0.8 38 18 36 24 2 +29 0.4 0.3 34 -40 39 -32 2 +29 0.4 0.7 24 23 24 3 1 +29 0.1 0.4 11 -31 44 4 2 +29 0.2 0.3 14 -48 -4 -8 2 +29 0.3 0.2 42 -36 -42 -49 1 +29 0.2 0.8 30 18 20 -49 1 +29 0.6 0.5 25 -32 -28 -47 1 +29 0.9 0.1 45 32 -5 -30 1 +29 0.8 0.6 26 -20 32 5 1 +29 0.4 0.7 22 -22 -16 -49 1 +29 0.4 0.1 20 8 43 33 2 +29 0.9 0.3 26 8 44 28 2 +29 0.7 0.2 22 -40 44 -44 2 +29 0.3 0.4 31 -29 -15 -26 2 +29 0.5 0.5 45 36 48 -24 1 +29 0.5 0.6 34 3 50 -22 1 +29 0.6 0.6 48 36 28 -40 1 +29 0.7 0.7 33 -11 11 -26 1 +29 0.1 0.9 30 0 3 -32 2 +29 0.8 0.4 16 -46 19 -7 1 +29 0.8 0.7 -24 -36 41 -12 2 +29 0.8 0.5 25 -9 -5 -39 1 +29 0.9 0.2 21 -34 9 -28 1 +29 0.3 0.4 49 -35 27 12 2 +29 0.3 0.6 -14 -24 20 -45 2 +29 0.9 0.5 -20 -25 7 -8 2 +29 0.8 0.1 38 -23 24 13 1 +29 0.4 0.4 30 0 44 39 2 +29 0.7 0.8 24 -2 -31 -32 1 +29 0.3 0.3 -34 -35 22 -35 2 +29 0.1 0.4 21 5 16 12 2 +29 0.2 0.1 -21 -26 -10 -13 2 +29 0.2 0.2 -18 -45 24 -37 2 +29 0.5 0.6 32 23 14 -20 1 +29 0.2 0.9 -9 -29 -13 -36 1 +29 0.4 0.8 -15 -44 41 38 2 +29 0.2 0.9 33 -36 -34 -38 1 +29 0.7 0.5 38 6 -31 -45 1 +29 0.8 0.7 30 -40 47 -46 1 +29 0.9 0.8 -33 -36 46 -3 2 +29 0.9 0.6 10 -29 45 39 2 +29 0.1 0.3 50 10 42 34 2 +29 0.4 0.3 -16 -29 9 5 2 +29 0.2 0.5 -5 -26 -35 -45 1 +29 0.8 0.7 -32 -35 30 -44 2 +29 0.8 0.5 15 -37 38 3 2 +29 0.3 0.3 -11 -45 24 -33 2 +29 0.3 0.3 -13 -16 -6 -13 2 +29 0.4 0.1 47 -7 29 25 2 +29 0.8 0.3 21 -45 -13 -50 1 +29 0.6 0.1 -10 -47 32 12 2 +29 0.3 0.1 -5 -42 47 26 2 +29 0.4 0.2 13 -11 15 -38 1 +29 0.3 0.7 -10 -13 31 4 2 +29 0.4 0.3 6 -38 40 25 2 +29 0.7 0.9 -27 -36 -13 -46 1 +29 0.4 0.9 33 -1 -11 -41 1 +29 0.5 0.6 12 2 8 -9 2 +29 0.4 0.8 30 0 29 -18 2 +29 0.3 0.2 33 -41 -19 -48 1 +29 0.1 0.5 30 -2 4 -2 1 +29 0.3 0.9 32 26 45 13 2 +29 0.9 0.8 2 -37 41 -32 2 +29 0.7 0.3 28 4 47 -22 1 +29 0.2 0.2 -1 -8 30 2 2 +29 0.3 0.7 21 -46 -4 -16 1 +29 0.5 0.4 2 -21 32 16 2 +29 0.7 0.8 47 -42 41 -19 1 +29 0.4 0.3 -1 -24 45 12 2 +29 0.6 0.1 47 43 48 24 2 +29 0.7 0.4 37 18 -1 -50 1 +29 0.5 0.2 48 12 34 -19 1 +29 0.3 0.3 24 6 35 29 2 +29 0.6 0.7 44 32 -2 -32 1 +29 0.3 0.2 21 17 -4 -22 1 +29 0.7 0.2 42 16 -24 -33 1 +29 0.1 0.9 18 7 44 -26 2 +29 0.8 0.8 39 -40 42 -19 2 +29 0.4 0.6 -1 -31 37 -2 2 +29 0.8 0.9 27 7 7 -35 1 +29 0.5 0.6 47 -14 -1 -29 1 +29 0.9 0.8 20 -45 46 29 2 +29 0.6 0.2 -17 -35 45 -15 2 +29 0.6 0.1 -17 -30 27 -2 2 +29 0.2 0.7 47 42 -26 -34 1 +29 0.8 0.2 -12 -49 22 13 2 +29 0.6 0.6 8 -40 -12 -46 1 +29 0.9 0.1 -28 -50 -12 -13 1 +29 0.6 0.2 15 -9 29 -35 1 +29 0.6 0.7 -6 -36 43 0 2 +29 0.4 0.3 41 -7 -33 -38 1 +29 0.7 0.1 25 -10 -25 -30 1 +29 0.9 0.1 34 -26 38 -35 1 +29 0.9 0.2 16 -15 3 -37 1 +29 0.7 0.9 -5 -9 37 -26 2 +29 0.8 0.2 35 -28 36 -40 1 +29 0.5 0.1 28 -34 -21 -46 1 +29 0.5 0.1 8 -24 28 -15 1 +29 0.9 0.6 14 -15 31 12 2 +29 0.3 0.8 12 6 47 -20 2 +29 0.1 0.1 2 -19 32 -47 1 +29 0.5 0.5 30 -29 -33 -47 1 +29 0.1 0.9 48 -9 -17 -45 2 +29 0.4 0.5 21 -29 -31 -44 1 +29 0.2 0.5 1 -1 -38 -40 1 +29 0.8 0.4 11 -5 47 -12 1 +29 0.2 0.1 -14 -49 -3 -26 2 +29 0.3 0.8 29 -21 9 2 2 +29 0.9 0.8 34 -21 33 4 2 +29 0.3 0.1 28 16 17 -18 1 +29 0.5 0.7 37 -49 35 34 2 +29 0.7 0.7 23 -26 50 9 2 +29 0.5 0.2 34 32 10 -10 1 +29 0.2 0.3 37 -3 44 -43 2 +29 0.6 0.8 41 10 3 -2 1 +29 0.8 0.7 33 10 15 -19 1 +29 0.7 0.9 14 10 -18 -48 1 +29 0.4 0.3 49 18 -5 -24 1 +29 0.3 0.3 -47 -49 31 23 2 +29 0.3 0.3 40 -30 26 -7 2 +29 0.3 0.5 17 -36 38 36 2 +29 0.1 0.5 34 30 -14 -36 1 +29 0.4 0.2 35 -7 -14 -36 1 +29 0.1 0.4 -12 -23 4 -37 1 +29 0.7 0.3 0 -14 -11 -31 2 +29 0.8 0.8 23 -18 17 11 2 +29 0.1 0.5 27 -42 31 -11 2 +29 0.9 0.5 47 -8 -23 -32 1 +29 0.5 0.4 16 -31 25 -37 2 +29 0.3 0.4 29 -39 42 -25 2 +29 0.6 0.6 47 -1 32 -28 1 +29 0.3 0.8 48 40 24 -38 2 +29 0.7 0.7 -3 -27 48 28 2 +29 0.1 0.7 6 -43 -34 -44 2 +29 0.2 0.1 39 -50 48 -1 1 +29 0.9 0.5 48 -25 10 -49 1 +29 0.5 0.6 -6 -45 41 -20 2 +29 0.1 0.7 -19 -29 22 6 2 +29 0.4 0.6 17 -8 34 12 2 +29 0.1 0.3 -8 -15 -26 -34 1 +29 0.1 0.7 31 -4 -9 -16 1 +29 0.1 0.5 -3 -34 14 -35 2 +29 0.4 0.7 36 -21 -4 -30 1 +29 0.3 0.8 40 16 2 -5 2 +29 0.8 0.2 18 -27 12 4 1 +29 0.9 0.8 46 5 44 43 2 +29 0.6 0.5 39 -24 42 -15 2 +29 0.8 0.5 21 -47 -22 -29 1 +29 0.5 0.7 15 -31 -3 -31 1 +29 0.6 0.8 -20 -23 39 -35 2 +29 0.3 0.2 32 -11 26 24 2 +29 0.5 0.9 7 -17 17 2 2 +29 0.5 0.1 23 -30 -19 -41 1 +29 0.3 0.7 39 25 50 39 2 +29 0.2 0.5 12 -10 36 28 2 +29 0.6 0.9 30 -42 42 -14 2 +29 0.4 0.5 22 10 35 -27 1 +29 0.7 0.5 45 -2 -22 -43 1 +29 0.5 0.3 2 -48 26 10 2 +29 0.2 0.6 8 -1 44 -24 1 +29 0.7 0.5 -7 -36 32 -45 2 +29 0.3 0.2 -10 -44 42 -25 2 +29 0.2 0.3 46 -47 50 44 2 +29 0.4 0.3 31 -22 3 2 2 +29 0.7 0.5 -2 -50 5 -43 2 +29 0.6 0.4 29 22 41 38 1 +29 0.3 0.7 -11 -36 -21 -37 1 +29 0.3 0.2 42 0 23 -47 1 +29 0.5 0.6 6 -4 -21 -48 1 +29 0.9 0.7 29 -24 27 -1 1 +29 0.4 0.5 0 -26 45 -39 2 +29 0.7 0.2 29 -5 9 8 1 +29 0.9 0.4 -39 -44 12 -36 2 +29 0.2 0.5 -34 -39 20 -48 2 +29 0.8 0.3 29 -7 -7 -35 1 +29 0.9 0.6 18 1 39 27 2 +29 0.4 0.9 36 -16 -36 -43 1 +29 0.3 0.8 42 -34 16 -48 2 +29 0.4 0.8 -26 -44 -9 -31 2 +29 0.1 0.7 9 -43 -17 -18 2 +29 0.5 0.5 -25 -32 23 15 2 +29 0.1 0.6 46 -13 42 -46 2 +29 0.2 0.6 44 -6 26 8 2 +29 0.6 0.9 29 -39 15 -28 1 +29 0.8 0.6 4 -45 1 -36 2 +29 0.6 0.8 39 31 38 -32 1 +29 0.7 0.4 32 11 -20 -28 1 +29 0.8 0.8 -20 -37 9 -20 2 +29 0.4 0.7 -25 -26 40 10 2 +29 0.2 0.4 45 -15 1 -21 1 +29 0.4 0.8 39 -44 47 14 2 +29 0.6 0.6 49 -21 34 -34 1 +29 0.3 0.6 32 23 1 -5 1 +29 0.8 0.5 13 -1 5 -29 1 +29 0.5 0.7 24 -42 -9 -27 2 +29 0.4 0.9 16 -43 22 9 2 +29 0.9 0.9 49 4 50 -16 1 +29 0.5 0.9 -33 -39 -30 -47 2 +29 0.1 0.6 45 23 -4 -28 1 +29 0.6 0.7 27 -42 36 -39 2 +29 0.9 0.7 -4 -41 -13 -37 1 +29 0.8 0.9 19 -39 40 -3 2 +29 0.5 0.8 9 -29 50 9 2 +29 0.5 0.8 43 19 24 -40 1 +29 0.8 0.3 27 1 49 -17 1 +29 0.5 0.2 38 -2 41 -19 1 +29 0.7 0.1 23 5 48 40 2 +29 0.7 0.8 -15 -44 30 -24 2 +29 0.5 0.7 50 22 8 7 1 +29 0.5 0.5 43 25 -24 -50 1 +29 0.4 0.5 42 -15 20 -17 1 +29 0.9 0.1 41 30 -12 -28 1 +29 0.7 0.2 30 -6 30 -47 1 +29 0.3 0.6 -3 -14 -22 -34 2 +29 0.6 0.3 34 -47 19 -10 2 +29 0.5 0.5 44 -7 29 -17 1 +29 0.1 0.2 31 13 32 -6 1 +29 0.1 0.1 39 20 2 -34 1 +29 0.4 0.5 49 2 24 15 2 +29 0.2 0.8 -8 -39 37 5 2 +29 0.8 0.4 24 2 -28 -43 1 +29 0.8 0.5 41 6 -7 -50 1 +29 0.7 0.8 7 -44 40 -33 2 +29 0.9 0.1 28 3 9 5 1 +29 0.3 0.7 33 -7 27 -14 2 +29 0.5 0.8 2 -40 -4 -34 2 +29 0.4 0.9 41 29 -14 -23 1 +29 0.7 0.8 12 -38 -37 -43 1 +29 0.2 0.7 24 11 -27 -33 1 +29 0.8 0.6 40 19 27 -50 1 +29 0.8 0.3 -43 -50 46 -30 2 +29 0.7 0.4 42 4 -27 -46 1 +29 0.4 0.9 23 -38 47 29 2 +29 0.9 0.9 34 18 48 -35 2 +29 0.6 0.4 0 -10 28 5 2 +29 0.6 0.9 20 -39 50 32 2 +29 0.5 0.4 43 10 4 -12 2 +29 0.4 0.3 0 -32 25 -24 2 +29 0.1 0.4 28 23 -9 -50 1 +29 0.7 0.2 31 -37 27 -8 1 +29 0.7 0.6 -4 -17 0 -16 2 +29 0.5 0.7 11 -43 -6 -38 1 +29 0.6 0.7 34 -12 -8 -38 1 +29 0.3 0.4 -7 -20 -3 -37 1 +29 0.5 0.5 5 -7 9 -46 1 +29 0.4 0.4 48 -26 39 -31 1 +29 0.6 0.4 31 -1 42 30 2 +29 0.4 0.4 -40 -47 33 -41 1 +29 0.9 0.1 40 36 25 -3 1 +29 0.2 0.9 -13 -14 35 10 2 +29 0.3 0.6 23 -26 -27 -47 1 +29 0.4 0.9 37 -32 36 -10 2 +29 0.2 0.8 -2 -25 33 32 2 +29 0.6 0.6 49 30 -10 -20 1 +29 0.1 0.4 -18 -43 -26 -31 1 +29 0.4 0.6 -22 -49 35 -17 2 +30 0.2 0.6 43 13 47 33 2 +30 0.1 0.2 10 -5 14 -25 1 +30 0.1 0.5 26 12 33 -30 1 +30 0.8 0.9 30 -40 43 -8 2 +30 0.2 0.7 -7 -39 -1 -38 2 +30 0.4 0.8 -1 -14 0 -29 2 +30 0.5 0.9 47 23 10 -20 1 +30 0.5 0.1 45 28 14 -19 1 +30 0.9 0.4 45 -25 9 -3 1 +30 0.1 0.7 34 -45 15 -19 2 +30 0.3 0.9 32 22 26 8 1 +30 0.2 0.5 50 -6 22 21 2 +30 0.5 0.7 46 -47 24 -47 1 +30 0.1 0.8 -38 -42 13 -2 2 +30 0.5 0.8 41 -38 50 12 2 +30 0.4 0.9 -25 -30 39 24 2 +30 0.6 0.5 42 -15 -8 -36 1 +30 0.4 0.9 2 -13 8 1 2 +30 0.6 0.4 47 -13 -19 -37 1 +30 0.3 0.7 50 -46 43 29 2 +30 0.9 0.4 24 -28 -17 -26 1 +30 0.9 0.1 41 30 27 -27 1 +30 0.8 0.9 25 -18 3 -14 1 +30 0.9 0.4 45 -15 49 -9 1 +30 0.3 0.4 12 -40 36 -22 2 +30 0.6 0.3 49 2 -7 -11 1 +30 0.6 0.9 -1 -47 28 11 2 +30 0.6 0.4 -12 -27 7 0 2 +30 0.7 0.3 49 -24 45 34 2 +30 0.4 0.8 33 19 -30 -43 1 +30 0.2 0.1 26 -36 11 -33 1 +30 0.1 0.2 24 -22 50 -2 2 +30 0.7 0.3 42 24 -38 -46 1 +30 0.2 0.2 -8 -32 26 -37 2 +30 0.5 0.3 12 -28 29 -2 2 +30 0.8 0.7 40 -10 26 -46 1 +30 0.1 0.4 41 16 -7 -32 1 +30 0.5 0.4 36 27 44 31 2 +30 0.8 0.2 1 -32 32 -7 2 +30 0.9 0.9 -1 -24 44 33 2 +30 0.6 0.5 19 7 32 -16 1 +30 0.5 0.2 42 18 4 -33 1 +30 0.2 0.2 31 -48 50 -17 2 +30 0.7 0.3 47 -29 -23 -25 1 +30 0.7 0.4 50 12 44 -43 1 +30 0.1 0.2 17 -50 32 31 2 +30 0.4 0.3 34 30 33 28 1 +30 0.1 0.4 41 36 27 12 1 +30 0.7 0.9 39 -4 -31 -47 1 +30 0.7 0.7 28 -31 40 -46 2 +30 0.9 0.5 3 -38 10 -19 1 +30 0.1 0.3 0 -48 -15 -47 2 +30 0.6 0.9 50 5 32 7 1 +30 0.7 0.9 -12 -43 49 -17 2 +30 0.5 0.6 43 -16 -46 -49 1 +30 0.5 0.1 39 -17 49 37 2 +30 0.7 0.3 -7 -37 -1 -42 1 +30 0.3 0.4 10 9 -27 -46 1 +30 0.8 0.3 -6 -14 3 -3 2 +30 0.1 0.4 36 -47 34 -25 2 +30 0.8 0.1 -33 -43 18 12 2 +30 0.1 0.8 36 -49 47 10 2 +30 0.8 0.2 -22 -50 21 -9 2 +30 0.7 0.7 41 37 30 -31 1 +30 0.2 0.1 3 -30 16 -47 1 +30 0.7 0.7 26 -5 50 -32 2 +30 0.5 0.1 8 -20 18 7 2 +30 0.3 0.8 24 -36 -25 -38 1 +30 0.1 0.4 37 -40 30 -19 2 +30 0.7 0.9 -8 -40 44 34 2 +30 0.5 0.2 -11 -16 8 3 2 +30 0.6 0.3 -34 -36 -20 -22 2 +30 0.9 0.7 -2 -38 17 -40 2 +30 0.6 0.3 43 21 -42 -50 1 +30 0.8 0.1 45 -7 -13 -22 1 +30 0.8 0.9 21 -9 34 -11 2 +30 0.3 0.2 -7 -32 1 -34 1 +30 0.8 0.2 -4 -48 49 -11 2 +30 0.4 0.1 6 -30 32 -9 2 +30 0.6 0.7 21 19 -11 -14 1 +30 0.1 0.2 -6 -39 -2 -16 2 +30 0.1 0.8 -5 -26 9 -5 2 +30 0.2 0.1 -10 -24 24 -38 1 +30 0.8 0.3 18 3 48 -50 1 +30 0.1 0.5 -6 -38 29 17 2 +30 0.7 0.6 22 17 47 17 2 +30 0.2 0.6 15 -42 34 25 2 +30 0.4 0.1 -11 -40 25 24 2 +30 0.4 0.5 -16 -47 -4 -9 2 +30 0.6 0.2 12 8 18 -50 1 +30 0.5 0.9 -26 -30 50 -42 2 +30 0.8 0.3 40 -6 -1 -42 1 +30 0.1 0.7 36 -34 -7 -46 2 +30 0.1 0.3 24 11 -6 -25 1 +30 0.9 0.2 38 16 -33 -49 1 +30 0.4 0.1 -19 -33 48 45 2 +30 0.8 0.8 3 0 43 13 2 +30 0.5 0.4 39 25 45 1 1 +30 0.1 0.2 4 -7 44 5 2 +30 0.9 0.8 36 23 44 -8 2 +30 0.5 0.6 43 30 -20 -44 1 +30 0.4 0.1 26 -2 38 12 2 +30 0.2 0.4 -12 -35 41 -47 2 +30 0.3 0.9 37 19 36 -36 2 +30 0.7 0.4 41 -2 18 -20 1 +30 0.6 0.9 10 -28 -12 -50 1 +30 0.7 0.4 25 17 -16 -46 1 +30 0.9 0.6 -28 -37 2 -49 2 +30 0.3 0.9 22 7 38 -23 2 +30 0.8 0.9 39 28 48 -47 2 +30 0.7 0.9 26 0 -31 -50 1 +30 0.9 0.4 43 42 22 -36 1 +30 0.1 0.2 7 1 22 -25 1 +30 0.9 0.7 40 19 46 -28 1 +30 0.3 0.2 0 -4 -4 -34 1 +30 0.8 0.2 48 46 6 -12 1 +30 0.8 0.9 5 2 41 -27 2 +30 0.5 0.3 13 -5 25 -34 1 +30 0.4 0.8 26 7 47 -4 2 +30 0.9 0.4 -25 -33 39 -42 2 +30 0.7 0.3 15 -39 31 -28 1 +30 0.2 0.2 -33 -41 47 31 2 +30 0.7 0.4 46 -6 5 -1 1 +30 0.6 0.6 33 -44 -4 -31 1 +30 0.4 0.4 -35 -45 20 15 2 +30 0.1 0.4 40 2 -19 -25 1 +30 0.3 0.8 -2 -29 0 -40 2 +30 0.6 0.7 19 -17 -2 -20 1 +30 0.7 0.3 13 -31 -7 -42 1 +30 0.6 0.6 15 -24 18 -18 2 +30 0.9 0.4 23 -5 45 -7 1 +30 0.3 0.2 -40 -44 15 -34 2 +30 0.3 0.3 38 -41 26 -20 2 +30 0.4 0.3 26 -44 37 -2 2 +30 0.2 0.2 13 -41 37 -27 2 +30 0.1 0.1 -35 -40 50 -28 2 +30 0.4 0.5 -6 -25 26 -46 2 +30 0.1 0.6 20 -35 18 -10 2 +30 0.6 0.8 13 2 -7 -38 1 +30 0.1 0.4 26 24 5 -15 1 +30 0.5 0.2 -4 -7 30 -24 1 +30 0.6 0.2 18 -20 44 -7 2 +30 0.1 0.7 43 0 16 -17 1 +30 0.5 0.1 1 -38 26 -16 2 +30 0.8 0.7 -26 -45 43 19 2 +30 0.2 0.3 38 -42 25 8 2 +30 0.1 0.1 28 -46 44 -4 2 +30 0.2 0.3 -5 -48 18 -47 2 +30 0.4 0.8 4 -37 27 -22 2 +30 0.5 0.6 33 11 10 -18 1 +30 0.3 0.3 43 -44 0 -21 2 +30 0.9 0.9 36 26 30 -5 1 +30 0.4 0.2 48 -15 -10 -48 1 +30 0.3 0.7 18 14 33 -10 2 +30 0.5 0.4 34 20 1 -19 1 +30 0.1 0.4 48 46 41 14 1 +30 0.2 0.3 39 -22 31 -32 1 +30 0.6 0.5 33 -17 33 -14 1 +30 0.3 0.1 -4 -20 29 -16 2 +30 0.9 0.2 35 28 33 -6 1 +30 0.5 0.7 20 -43 31 -27 2 +30 0.4 0.1 -29 -47 -25 -30 2 +30 0.8 0.5 -9 -25 29 -33 2 +30 0.4 0.9 28 -42 43 6 2 +30 0.8 0.4 42 -6 8 -25 1 +30 0.3 0.4 28 -8 46 21 2 +30 0.5 0.7 40 -17 -28 -41 1 +30 0.9 0.9 39 -34 -6 -45 1 +30 0.5 0.6 19 10 41 35 2 +30 0.1 0.5 19 -23 48 19 2 +30 0.7 0.3 10 -38 -1 -9 1 +30 0.5 0.2 39 -40 2 -45 1 +30 0.4 0.3 42 41 46 34 1 +30 0.6 0.6 -9 -37 -32 -44 1 +30 0.5 0.8 10 -8 25 -44 2 +30 0.2 0.1 40 -24 31 15 2 +30 0.2 0.5 -17 -36 -3 -24 2 +30 0.4 0.3 -39 -50 30 16 2 +30 0.8 0.8 46 5 -3 -21 1 +30 0.5 0.3 -2 -4 29 -4 2 +30 0.6 0.6 23 -37 26 12 2 +30 0.3 0.8 -48 -49 25 -48 2 +30 0.5 0.7 4 -14 31 23 2 +30 0.7 0.3 9 -17 29 28 2 +30 0.4 0.5 44 -21 37 -20 2 +30 0.2 0.3 48 2 -33 -39 1 +30 0.4 0.9 34 24 5 -49 1 +30 0.5 0.7 22 12 8 -10 1 +30 0.8 0.5 21 -49 37 1 2 +30 0.4 0.5 11 -37 6 -31 1 +30 0.8 0.1 45 36 43 -36 1 +30 0.4 0.1 2 -28 30 29 2 +30 0.6 0.6 2 -33 32 -2 2 +30 0.8 0.1 -27 -41 8 -13 2 +30 0.5 0.6 36 -11 32 -41 1 +30 0.2 0.7 37 -48 41 38 2 +30 0.3 0.4 37 -31 -21 -50 1 +30 0.7 0.9 26 -2 12 -5 1 +30 0.7 0.6 25 -49 -17 -42 1 +30 0.8 0.4 13 -1 10 -35 1 +30 0.1 0.6 44 -36 -6 -31 2 +30 0.7 0.4 48 7 22 -40 1 +30 0.6 0.7 -7 -36 8 -29 2 +30 0.1 0.9 13 1 47 -41 2 +30 0.6 0.2 -40 -47 2 -9 2 +30 0.7 0.2 -27 -46 47 -31 2 +30 0.7 0.4 31 -38 45 39 2 +30 0.5 0.2 42 -38 28 19 2 +30 0.7 0.8 17 -2 50 13 2 +30 0.1 0.3 1 -36 46 44 2 +30 0.9 0.2 33 6 16 -22 1 +30 0.2 0.5 14 -3 6 -17 1 +30 0.4 0.3 -11 -43 40 16 2 +30 0.9 0.2 46 -1 -10 -45 1 +30 0.4 0.9 38 -40 7 -44 2 +30 0.8 0.4 35 -18 47 39 2 +30 0.6 0.4 -17 -26 26 10 2 +30 0.8 0.1 43 -24 -26 -34 1 +30 0.6 0.1 47 -50 -7 -16 1 +30 0.1 0.3 -13 -27 50 -37 2 +30 0.6 0.8 28 24 31 11 2 +30 0.2 0.5 30 28 -26 -43 1 +30 0.6 0.6 24 -20 -2 -42 1 +30 0.5 0.4 31 -38 6 -20 1 +30 0.4 0.1 22 -44 45 7 2 +30 0.9 0.9 43 -35 9 -35 1 +30 0.7 0.2 18 -9 19 8 2 +30 0.8 0.6 18 -6 -28 -36 1 +30 0.9 0.1 49 -4 41 15 1 +30 0.5 0.2 -16 -38 -17 -42 1 +30 0.9 0.7 46 -18 31 -46 1 +30 0.6 0.8 46 17 6 -15 1 +30 0.6 0.8 46 -21 -22 -50 1 +30 0.1 0.9 46 31 49 22 2 +30 0.7 0.5 4 -25 17 -30 1 +30 0.3 0.1 -18 -24 -15 -27 1 +30 0.9 0.7 11 -10 -6 -47 1 +30 0.6 0.6 42 -8 4 -8 1 +30 0.8 0.3 13 -3 45 -6 1 +30 0.8 0.7 18 -1 49 -46 2 +30 0.2 0.8 16 -40 44 -12 2 +30 0.1 0.3 -31 -34 28 -22 2 +30 0.3 0.8 24 -23 41 -39 2 +30 0.3 0.5 24 16 46 -9 1 +30 0.1 0.9 6 -48 39 22 2 +30 0.9 0.5 47 -12 -7 -35 1 +30 0.9 0.9 -41 -46 50 17 2 +30 0.3 0.1 -22 -30 26 18 2 +30 0.2 0.9 12 -24 40 21 2 +30 0.9 0.7 29 -22 35 3 1 +30 0.8 0.4 27 -25 13 -23 1 +30 0.8 0.7 -3 -22 35 1 2 +30 0.7 0.7 23 9 21 -1 1 +30 0.3 0.7 -7 -11 43 4 2 +30 0.4 0.3 -18 -48 38 32 2 +30 0.2 0.3 19 2 38 27 2 +30 0.4 0.6 25 -27 45 2 2 +30 0.6 0.2 17 -32 -21 -42 1 +30 0.8 0.6 2 -13 9 -45 1 +30 0.5 0.9 -6 -26 40 -16 2 +30 0.2 0.3 23 -47 29 12 2 +30 0.2 0.2 33 20 14 11 1 +30 0.9 0.3 -8 -28 5 -17 2 +30 0.2 0.6 48 27 41 17 1 +30 0.6 0.2 15 -49 -14 -36 1 +30 0.1 0.2 32 18 16 -37 1 +30 0.4 0.5 22 -28 41 32 2 +30 0.4 0.4 -12 -33 -33 -38 1 +30 0.8 0.5 27 -28 -38 -40 1 +30 0.7 0.7 16 -17 45 -31 2 +30 0.2 0.8 42 25 41 1 1 +30 0.1 0.3 35 -22 46 -40 2 +30 0.7 0.5 7 -3 41 -35 1 +30 0.6 0.8 -19 -37 42 -50 2 +30 0.5 0.9 13 -20 31 -42 2 +30 0.4 0.9 42 -28 33 4 2 +30 0.8 0.7 2 -32 42 -4 2 +30 0.1 0.6 -1 -44 7 0 2 +30 0.4 0.4 31 -23 7 -10 1 +30 0.1 0.7 24 10 -11 -48 1 +30 0.7 0.6 11 -14 10 -42 1 +30 0.1 0.3 -29 -39 5 -44 2 +30 0.4 0.4 37 2 27 26 2 +30 0.4 0.7 -18 -36 -17 -46 2 +30 0.6 0.4 32 -24 -25 -36 1 +30 0.4 0.6 12 -19 -31 -46 1 +30 0.8 0.8 28 8 46 -50 2 +30 0.3 0.5 37 1 14 7 1 +30 0.4 0.2 45 13 24 17 1 +30 0.2 0.2 18 -49 48 -35 2 +30 0.9 0.1 23 10 13 -11 1 +30 0.2 0.3 43 37 36 22 1 +30 0.5 0.5 9 -28 17 -24 2 +30 0.8 0.7 32 15 12 -49 1 +30 0.4 0.6 -2 -33 44 -15 2 +30 0.1 0.2 42 -46 29 17 2 +30 0.4 0.3 -38 -40 39 6 2 +30 0.3 0.7 29 12 36 34 2 +30 0.8 0.9 -30 -43 43 -8 2 +30 0.5 0.1 -4 -39 -10 -17 2 +31 0.8 0.1 35 -25 14 -1 1 +31 0.7 0.9 -2 -7 47 -42 2 +31 0.8 0.6 -20 -24 48 -38 2 +31 0.7 0.4 49 36 35 -14 1 +31 0.9 0.3 48 47 32 13 1 +31 0.1 0.3 38 -25 -23 -47 1 +31 0.8 0.9 24 -21 13 -16 1 +31 0.8 0.1 37 21 -31 -41 1 +31 0.6 0.1 10 -43 -10 -36 1 +31 0.9 0.4 19 1 26 -15 1 +31 0.1 0.9 43 17 13 -8 1 +31 0.3 0.8 22 -22 2 -3 2 +31 0.8 0.2 -5 -37 25 -23 1 +31 0.5 0.9 22 7 49 -13 2 +31 0.8 0.5 -12 -17 17 -6 2 +31 0.7 0.7 7 -18 48 37 2 +31 0.8 0.5 -33 -34 31 -37 2 +31 0.3 0.3 -17 -21 49 -19 2 +31 0.7 0.4 12 8 -7 -21 1 +31 0.3 0.3 24 -30 21 -49 1 +31 0.4 0.6 4 -5 23 -35 1 +31 0.2 0.6 33 18 29 21 1 +31 0.3 0.9 -8 -39 -23 -45 2 +31 0.6 0.5 9 -2 42 33 2 +31 0.6 0.1 7 -10 21 -35 1 +31 0.1 0.1 50 12 32 -28 1 +31 0.2 0.5 50 -24 5 -19 2 +31 0.4 0.5 19 -7 17 -42 2 +31 0.3 0.4 9 -38 2 -46 1 +31 0.6 0.9 45 -18 42 25 2 +31 0.5 0.1 1 -41 27 -48 1 +31 0.7 0.5 42 -15 19 -13 1 +31 0.4 0.1 5 -19 1 0 1 +31 0.9 0.6 -17 -23 41 -50 2 +31 0.5 0.1 -9 -38 18 5 2 +31 0.2 0.2 39 37 12 2 1 +31 0.6 0.2 -26 -34 10 -40 2 +31 0.5 0.2 -17 -46 15 14 2 +31 0.8 0.5 22 -29 31 14 2 +31 0.8 0.4 16 6 29 -5 1 +31 0.8 0.3 -11 -26 22 -19 2 +31 0.6 0.2 20 -41 26 -45 1 +31 0.3 0.9 13 3 34 10 2 +31 0.6 0.9 20 -34 7 -39 1 +31 0.9 0.8 49 21 42 -10 1 +31 0.2 0.7 29 -11 21 0 2 +31 0.3 0.2 40 -9 34 -26 2 +31 0.2 0.8 25 -24 16 -26 2 +31 0.7 0.8 -26 -40 26 -22 2 +31 0.8 0.9 -21 -38 12 -36 2 +31 0.4 0.5 39 28 46 -10 1 +31 0.1 0.8 38 27 10 1 1 +31 0.6 0.5 41 -7 10 3 1 +31 0.4 0.3 -1 -44 -25 -35 1 +31 0.2 0.1 32 -7 50 -14 1 +31 0.4 0.5 47 -38 22 -13 2 +31 0.3 0.5 30 12 -3 -33 1 +31 0.6 0.5 29 11 -7 -50 1 +31 0.4 0.6 19 -19 6 -46 1 +31 0.2 0.4 39 29 20 -33 1 +31 0.7 0.5 10 -36 -29 -34 1 +31 0.5 0.7 48 -19 18 -12 2 +31 0.2 0.4 22 -3 19 -44 1 +31 0.3 0.3 -8 -13 27 15 2 +31 0.7 0.1 6 -4 29 -1 1 +31 0.6 0.3 1 -8 25 -20 1 +31 0.2 0.2 -24 -27 15 -16 2 +31 0.5 0.8 27 10 34 16 2 +31 0.5 0.6 32 7 24 -13 2 +31 0.8 0.7 37 16 15 12 1 +31 0.4 0.4 33 -27 -11 -49 1 +31 0.3 0.9 43 3 -43 -50 1 +31 0.2 0.4 20 -34 46 -10 2 +31 0.5 0.3 22 -12 38 -39 1 +31 0.8 0.6 -18 -46 -22 -45 1 +31 0.1 0.8 46 -23 24 21 2 +31 0.3 0.4 9 -41 40 16 2 +31 0.2 0.6 16 -13 -13 -31 1 +31 0.9 0.8 12 -26 -13 -23 1 +31 0.6 0.9 31 -36 47 5 2 +31 0.4 0.5 14 -19 40 31 2 +31 0.4 0.9 7 -40 -1 -40 2 +31 0.8 0.7 38 17 -29 -34 1 +31 0.5 0.8 24 9 14 3 2 +31 0.2 0.3 43 2 41 -12 1 +31 0.5 0.2 43 12 50 -50 1 +31 0.4 0.2 46 -4 18 -47 1 +31 0.4 0.7 4 -38 -8 -38 2 +31 0.2 0.6 47 -24 38 7 2 +31 0.3 0.4 48 5 27 15 1 +31 0.3 0.8 41 -35 20 -41 2 +31 0.4 0.2 0 -44 5 -9 2 +31 0.9 0.1 37 -20 -1 -29 1 +31 0.3 0.9 48 -17 31 11 2 +31 0.8 0.8 47 42 31 -9 1 +31 0.1 0.6 50 49 -4 -33 1 +31 0.8 0.8 -36 -39 36 -2 2 +31 0.5 0.1 30 -16 -8 -19 1 +31 0.2 0.6 12 2 6 -38 1 +31 0.6 0.3 -13 -49 6 -47 1 +31 0.7 0.1 -20 -43 45 -28 2 +31 0.8 0.9 31 5 7 -31 1 +31 0.1 0.7 -1 -6 39 -26 2 +31 0.5 0.4 27 -5 35 -40 1 +31 0.7 0.1 36 28 49 -13 1 +31 0.9 0.1 24 -3 39 -23 1 +31 0.3 0.1 -2 -25 40 32 2 +31 0.9 0.7 47 25 34 -35 1 +31 0.7 0.2 6 1 21 -4 1 +31 0.2 0.6 -34 -41 12 4 2 +31 0.8 0.6 15 -38 19 -5 2 +31 0.5 0.9 17 -34 34 -8 2 +31 0.6 0.8 44 34 20 -30 1 +31 0.1 0.6 46 24 15 -13 1 +31 0.2 0.6 29 10 49 5 2 +31 0.1 0.7 18 -1 -5 -41 1 +31 0.1 0.9 48 42 -30 -36 1 +31 0.4 0.2 -1 -19 -37 -49 1 +31 0.9 0.4 48 -48 49 45 2 +31 0.6 0.6 -3 -47 0 -24 2 +31 0.7 0.2 -21 -47 23 9 2 +31 0.3 0.2 24 -7 44 -25 1 +31 0.3 0.6 -37 -39 33 -48 2 +31 0.8 0.8 -9 -36 -24 -46 1 +31 0.8 0.4 1 -47 -15 -49 1 +31 0.3 0.1 44 29 19 -23 1 +31 0.7 0.9 28 -21 22 0 2 +31 0.9 0.8 -31 -38 48 3 2 +31 0.6 0.3 47 -7 31 -42 1 +31 0.2 0.7 -4 -23 6 -46 2 +31 0.4 0.8 -4 -44 10 -4 2 +31 0.5 0.1 41 -41 42 -2 1 +31 0.8 0.3 -9 -48 23 -38 2 +31 0.5 0.1 44 -30 38 8 1 +31 0.2 0.8 23 -12 18 -12 2 +31 0.8 0.3 39 -37 16 -43 1 +31 0.7 0.9 41 -43 11 -23 1 +31 0.6 0.3 4 -11 -6 -50 1 +31 0.6 0.8 11 -10 9 -26 2 +31 0.9 0.8 9 0 14 -14 2 +31 0.3 0.6 2 -25 14 -36 2 +31 0.9 0.1 -38 -39 38 12 2 +31 0.7 0.1 35 -24 49 30 2 +31 0.4 0.2 18 -14 11 -27 1 +31 0.7 0.3 -24 -37 38 3 2 +31 0.9 0.1 3 -34 2 -18 1 +31 0.6 0.8 26 -26 23 -26 2 +31 0.6 0.1 8 -41 -5 -26 1 +31 0.1 0.9 39 -31 -20 -42 1 +31 0.9 0.3 40 10 -6 -40 1 +31 0.8 0.4 28 -21 49 -36 1 +31 0.2 0.2 24 -38 -6 -45 1 +31 0.5 0.9 43 -23 -7 -18 1 +31 0.8 0.8 -20 -25 38 -26 2 +31 0.8 0.1 10 -13 18 -50 1 +31 0.9 0.9 -42 -50 41 3 2 +31 0.6 0.8 -8 -44 16 -36 2 +31 0.5 0.2 43 40 16 11 1 +31 0.3 0.6 28 2 33 -47 1 +31 0.6 0.7 12 -34 50 -19 2 +31 0.8 0.5 23 -45 -1 -28 1 +31 0.5 0.2 17 3 0 -3 1 +31 0.2 0.4 15 -26 11 -13 2 +31 0.8 0.5 44 -24 28 -40 1 +31 0.8 0.4 16 -17 11 -26 1 +31 0.1 0.6 8 -21 35 2 2 +31 0.2 0.9 10 -38 49 -8 2 +31 0.2 0.8 14 -30 -17 -35 1 +31 0.6 0.8 41 -27 45 9 1 +31 0.7 0.8 0 -8 24 3 2 +31 0.1 0.8 11 9 -22 -49 1 +31 0.3 0.6 34 28 38 9 1 +31 0.5 0.9 -5 -50 8 -3 2 +31 0.3 0.7 -31 -37 42 -16 2 +31 0.8 0.7 8 -5 44 -24 2 +31 0.4 0.1 10 -31 23 2 2 +31 0.4 0.9 48 43 42 27 2 +31 0.6 0.9 38 -38 46 -23 2 +31 0.5 0.7 13 3 -32 -42 1 +31 0.5 0.4 45 -4 34 -37 1 +31 0.6 0.3 25 -11 -38 -45 1 +31 0.1 0.9 -16 -30 19 -9 2 +31 0.1 0.2 40 -1 36 0 2 +31 0.9 0.2 32 -8 47 27 2 +31 0.6 0.9 43 -22 35 -39 1 +31 0.6 0.5 25 17 -3 -41 1 +31 0.5 0.5 28 -24 50 -42 1 +31 0.1 0.7 12 -18 45 9 2 +31 0.4 0.8 -43 -48 44 -14 2 +31 0.6 0.6 29 -44 5 2 2 +31 0.8 0.9 -24 -48 36 -7 2 +31 0.8 0.3 3 -1 17 6 2 +31 0.5 0.2 -19 -49 42 -12 2 +31 0.5 0.3 48 -3 -33 -44 1 +31 0.3 0.4 20 -43 50 19 2 +31 0.4 0.5 13 5 35 -22 1 +31 0.9 0.4 33 6 -28 -46 1 +31 0.2 0.8 -15 -25 43 -31 2 +31 0.6 0.1 27 14 23 -46 1 +31 0.9 0.8 18 -40 20 -46 1 +31 0.9 0.7 28 -47 -10 -19 1 +31 0.6 0.3 4 -17 -38 -44 1 +31 0.2 0.2 -11 -38 -19 -32 1 +31 0.7 0.1 11 -31 11 -42 1 +31 0.6 0.6 16 -6 0 -14 1 +31 0.9 0.6 30 12 19 -4 1 +31 0.9 0.3 29 13 36 -9 1 +31 0.4 0.6 43 32 31 -12 1 +31 0.3 0.7 24 -6 19 -46 1 +31 0.6 0.7 -1 -18 33 12 2 +31 0.2 0.7 33 -32 -11 -17 2 +31 0.5 0.1 19 15 12 -37 1 +31 0.8 0.3 -1 -49 10 -20 1 +31 0.5 0.5 -2 -47 15 10 2 +31 0.9 0.7 43 18 49 12 2 +31 0.8 0.4 -5 -46 19 -8 2 +31 0.4 0.6 15 12 20 -2 1 +31 0.5 0.5 -18 -33 25 -14 2 +31 0.1 0.2 23 -17 -4 -35 1 +31 0.1 0.3 42 -23 2 -6 1 +31 0.8 0.9 46 18 30 6 2 +31 0.8 0.8 40 8 27 -3 1 +31 0.6 0.7 31 24 35 28 2 +31 0.3 0.7 31 -27 -6 -35 1 +31 0.8 0.2 -33 -44 16 -41 2 +31 0.1 0.9 16 -13 33 2 2 +31 0.8 0.7 23 19 30 -47 1 +31 0.6 0.6 18 -38 -5 -8 1 +31 0.2 0.5 4 -44 39 -15 2 +31 0.3 0.1 41 -42 -5 -35 1 +31 0.7 0.5 47 -36 28 4 1 +31 0.6 0.5 14 3 -4 -37 1 +31 0.1 0.1 39 7 42 15 2 +31 0.6 0.3 46 17 14 2 1 +31 0.2 0.3 47 -11 38 1 2 +31 0.1 0.8 45 -37 34 -13 2 +31 0.3 0.2 -18 -21 -7 -15 1 +31 0.8 0.4 1 -45 -13 -19 2 +31 0.4 0.2 5 -7 32 12 2 +31 0.3 0.9 21 12 14 -34 1 +31 0.3 0.2 4 -11 25 -34 2 +31 0.8 0.2 50 -35 -16 -38 1 +31 0.4 0.8 44 -9 46 -1 2 +31 0.4 0.5 24 -10 9 -27 2 +31 0.6 0.7 -26 -49 34 -14 2 +31 0.6 0.8 49 7 49 41 1 +31 0.4 0.1 13 -43 3 -17 2 +31 0.1 0.3 29 17 4 -3 1 +31 0.2 0.3 40 -30 36 16 2 +31 0.3 0.8 26 -7 4 -20 2 +31 0.3 0.1 25 -21 -14 -39 1 +31 0.5 0.1 -19 -45 43 -26 2 +31 0.8 0.4 -19 -34 -7 -44 1 +31 0.8 0.1 -43 -48 -34 -39 1 +31 0.2 0.1 12 7 -9 -32 1 +31 0.7 0.5 42 33 27 -33 1 +31 0.8 0.9 -30 -38 -3 -19 2 +31 0.5 0.8 -6 -40 20 18 2 +31 0.8 0.9 28 10 -14 -28 1 +31 0.8 0.6 37 -31 34 -12 1 +31 0.4 0.4 43 -47 16 3 2 +31 0.4 0.1 27 -47 8 -43 1 +31 0.2 0.6 13 -27 -16 -48 2 +31 0.5 0.1 16 -15 32 -35 1 +31 0.8 0.5 -24 -41 40 35 2 +31 0.2 0.5 32 12 38 -1 2 +31 0.8 0.5 -16 -45 46 -20 2 +31 0.3 0.4 -22 -23 42 15 2 +31 0.5 0.6 -7 -29 41 15 2 +31 0.3 0.9 -29 -49 4 -36 2 +31 0.5 0.7 48 12 45 44 1 +31 0.1 0.7 10 -44 31 -37 2 +31 0.3 0.9 36 -31 38 -40 2 +31 0.8 0.3 34 -15 11 -19 1 +31 0.8 0.2 47 -33 2 -23 1 +31 0.3 0.2 -32 -35 23 -31 2 +31 0.5 0.9 10 7 28 -13 2 +31 0.3 0.8 -9 -32 2 -43 2 +31 0.2 0.6 25 -40 -2 -38 1 +31 0.5 0.8 31 -13 27 -28 2 +31 0.7 0.9 -1 -26 49 16 2 +31 0.1 0.5 -44 -45 16 11 2 +31 0.2 0.3 19 -33 43 41 2 +31 0.5 0.5 21 6 38 -10 2 +31 0.4 0.4 7 -5 -13 -45 1 +31 0.8 0.8 36 15 25 16 1 +31 0.1 0.4 -32 -45 -13 -42 2 +31 0.2 0.1 21 17 37 36 2 +31 0.2 0.5 6 -37 47 34 2 +31 0.8 0.6 12 -16 36 -7 2 +31 0.4 0.5 -18 -27 -36 -46 1 +31 0.9 0.9 35 32 48 33 2 +31 0.1 0.7 27 -50 44 25 2 +31 0.9 0.2 -41 -49 29 5 2 +31 0.8 0.5 41 19 17 -18 1 +31 0.8 0.6 19 -40 -9 -18 1 +31 0.7 0.2 46 -49 32 -43 1 +31 0.1 0.2 11 -36 47 43 2 +31 0.2 0.6 -32 -47 13 -32 2 +31 0.4 0.2 43 -42 24 2 1 +32 0.4 0.6 -30 -39 32 -41 2 +32 0.3 0.8 12 8 45 -35 2 +32 0.8 0.1 13 -12 9 -48 1 +32 0.3 0.4 18 -18 8 -33 1 +32 0.2 0.7 46 8 -29 -35 1 +32 0.4 0.9 24 -18 7 -5 2 +32 0.8 0.2 -37 -39 4 -25 2 +32 0.7 0.6 -19 -48 29 22 2 +32 0.1 0.6 15 -27 46 -45 2 +32 0.8 0.1 26 -4 9 -22 1 +32 0.1 0.1 32 11 17 -20 1 +32 0.4 0.2 48 -47 12 -48 1 +32 0.1 0.6 43 -16 20 -30 2 +32 0.1 0.5 23 -5 7 3 2 +32 0.5 0.4 -12 -20 42 -15 2 +32 0.1 0.4 48 -30 35 15 2 +32 0.4 0.9 -2 -5 43 -25 2 +32 0.5 0.1 -15 -21 49 24 2 +32 0.7 0.5 21 -17 14 -4 1 +32 0.1 0.9 48 14 30 -31 1 +32 0.7 0.2 26 -31 4 -11 1 +32 0.5 0.6 -1 -20 31 14 2 +32 0.7 0.3 10 -9 23 22 2 +32 0.7 0.7 19 -45 -31 -48 1 +32 0.9 0.8 48 -27 49 30 2 +32 0.2 0.4 19 -2 17 -6 2 +32 0.1 0.1 25 18 28 17 1 +32 0.5 0.6 44 -12 47 -28 2 +32 0.7 0.2 50 -27 9 -49 1 +32 0.3 0.3 26 -15 -7 -16 1 +32 0.1 0.6 23 -45 -38 -47 1 +32 0.7 0.4 23 -35 44 -43 1 +32 0.1 0.2 -19 -40 34 17 2 +32 0.4 0.9 45 -30 42 -32 2 +32 0.9 0.8 30 -15 36 -2 2 +32 0.1 0.5 35 31 47 -50 1 +32 0.8 0.8 -13 -21 -17 -45 1 +32 0.3 0.3 23 -23 33 19 2 +32 0.9 0.7 47 45 15 -14 1 +32 0.6 0.6 -13 -25 -5 -16 2 +32 0.1 0.9 12 -41 23 -43 2 +32 0.4 0.7 25 -30 14 5 2 +32 0.2 0.7 35 16 8 -43 1 +32 0.6 0.8 38 -27 -5 -46 1 +32 0.8 0.6 37 -47 23 2 2 +32 0.5 0.2 18 -27 43 22 2 +32 0.7 0.5 29 18 30 18 2 +32 0.9 0.7 50 26 -27 -44 1 +32 0.3 0.6 40 29 44 2 1 +32 0.1 0.1 44 -39 26 8 2 +32 0.2 0.1 48 9 48 19 2 +32 0.1 0.5 -5 -37 39 36 2 +32 0.2 0.9 41 30 16 4 1 +32 0.1 0.9 -11 -17 32 -37 2 +32 0.9 0.8 -28 -29 49 14 2 +32 0.5 0.4 25 14 0 -16 1 +32 0.2 0.7 4 -30 -22 -24 2 +32 0.8 0.7 -14 -23 49 -33 2 +32 0.2 0.4 39 22 49 -49 1 +32 0.9 0.6 25 14 -19 -46 1 +32 0.4 0.1 -19 -33 47 42 2 +32 0.3 0.4 19 -17 33 32 2 +32 0.2 0.5 31 13 1 -33 1 +32 0.2 0.4 -31 -46 3 -34 2 +32 0.2 0.4 -4 -20 -20 -48 1 +32 0.8 0.4 30 8 14 -39 1 +32 0.9 0.2 37 22 7 4 1 +32 0.2 0.6 -26 -29 45 -40 2 +32 0.7 0.8 23 -23 46 -21 2 +32 0.2 0.1 0 -27 -24 -50 1 +32 0.9 0.8 41 -18 4 -50 1 +32 0.9 0.9 29 14 46 -42 2 +32 0.4 0.6 35 15 25 -39 1 +32 0.9 0.7 40 -4 -9 -46 1 +32 0.1 0.5 -30 -41 42 -29 2 +32 0.3 0.6 46 15 45 19 2 +32 0.6 0.1 -13 -34 13 -13 2 +32 0.3 0.4 34 -25 33 -31 2 +32 0.9 0.8 9 -16 36 30 2 +32 0.1 0.9 27 -23 7 -29 2 +32 0.4 0.3 50 47 34 23 2 +32 0.1 0.6 -18 -22 1 -47 2 +32 0.5 0.8 13 -35 -1 -15 2 +32 0.9 0.6 39 -33 -6 -44 1 +32 0.3 0.3 39 -36 42 5 2 +32 0.5 0.6 39 -48 45 -2 2 +32 0.3 0.1 -33 -48 45 -40 1 +32 0.6 0.7 23 -14 33 -36 2 +32 0.1 0.2 48 -41 31 14 2 +32 0.8 0.2 31 -21 50 -12 1 +32 0.3 0.4 23 -42 12 -14 2 +32 0.1 0.1 24 -47 13 -35 2 +32 0.4 0.6 48 -5 26 -39 1 +32 0.4 0.7 4 -6 -40 -42 1 +32 0.5 0.4 37 18 -1 -18 1 +32 0.6 0.1 28 -38 42 6 2 +32 0.8 0.2 33 -27 40 1 1 +32 0.9 0.8 37 -1 9 -42 1 +32 0.7 0.3 27 -34 31 -31 1 +32 0.1 0.7 20 -9 6 -48 2 +32 0.4 0.6 24 -27 -26 -42 1 +32 0.6 0.1 21 -19 45 36 2 +32 0.3 0.7 48 26 -27 -43 1 +32 0.4 0.7 16 13 4 -37 1 +32 0.5 0.1 50 40 27 -45 1 +32 0.8 0.7 42 -38 14 -20 1 +32 0.6 0.8 1 -44 -1 -42 2 +32 0.8 0.4 -16 -26 27 -35 1 +32 0.3 0.1 46 45 10 -11 1 +32 0.6 0.5 33 -50 39 -10 1 +32 0.7 0.8 29 -43 46 -3 2 +32 0.2 0.9 40 -18 -12 -48 1 +32 0.7 0.9 13 -46 49 20 2 +32 0.6 0.8 -25 -47 38 -24 2 +32 0.7 0.5 35 -10 22 -3 1 +32 0.7 0.1 33 6 -19 -23 1 +32 0.9 0.9 -12 -20 19 -17 2 +32 0.8 0.2 30 -32 21 -37 1 +32 0.7 0.6 12 -36 33 -39 2 +32 0.1 0.2 22 -26 -24 -32 1 +32 0.9 0.9 34 -28 -17 -45 1 +32 0.6 0.3 21 -20 23 -39 1 +32 0.6 0.8 6 -9 5 -37 2 +32 0.5 0.4 0 -2 -4 -6 1 +32 0.2 0.7 26 -31 28 10 2 +32 0.1 0.3 27 6 24 -32 1 +32 0.1 0.1 48 -42 -11 -46 1 +32 0.8 0.8 48 -37 -11 -13 1 +32 0.8 0.7 -19 -20 1 -21 1 +32 0.9 0.4 14 -11 36 25 2 +32 0.2 0.5 34 4 8 -4 1 +32 0.5 0.8 32 -1 6 -38 1 +32 0.3 0.9 31 25 41 -20 2 +32 0.1 0.1 4 -34 46 -50 2 +32 0.3 0.5 14 -11 8 -24 2 +32 0.5 0.7 10 4 48 -38 2 +32 0.8 0.7 35 -13 21 8 1 +32 0.1 0.2 11 -22 37 -27 2 +32 0.6 0.1 8 -40 6 -32 1 +32 0.6 0.9 21 -24 39 -19 2 +32 0.3 0.5 22 -33 41 -19 2 +32 0.8 0.7 21 -40 50 -9 2 +32 0.5 0.3 -7 -41 14 -4 2 +32 0.4 0.7 20 -42 -32 -39 1 +32 0.2 0.3 41 29 -12 -26 1 +32 0.6 0.6 30 -25 47 -17 2 +32 0.3 0.6 41 39 15 -15 1 +32 0.9 0.8 -1 -23 30 10 2 +32 0.2 0.7 29 -40 34 -12 2 +32 0.1 0.1 7 -33 34 -16 2 +32 0.1 0.3 15 -22 7 -14 2 +32 0.9 0.2 38 -16 -17 -31 1 +32 0.8 0.7 29 -7 30 -6 1 +32 0.8 0.4 19 6 18 4 1 +32 0.7 0.3 -4 -38 22 -28 2 +32 0.1 0.3 49 7 23 -39 1 +32 0.2 0.7 31 1 -21 -44 1 +32 0.4 0.9 48 10 38 19 2 +32 0.3 0.7 33 30 -4 -50 1 +32 0.3 0.5 42 -5 -22 -31 1 +32 0.6 0.7 50 -17 -38 -45 1 +32 0.7 0.3 27 -47 40 36 2 +32 0.4 0.4 28 -15 30 -41 1 +32 0.7 0.9 3 2 12 -15 2 +32 0.3 0.8 17 -35 -2 -40 2 +32 0.8 0.3 44 -23 45 3 1 +32 0.6 0.9 40 11 44 43 2 +32 0.9 0.9 31 28 45 3 2 +32 0.5 0.5 -14 -48 12 -16 2 +32 0.6 0.7 18 4 13 5 1 +32 0.8 0.7 41 18 28 -32 1 +32 0.3 0.6 -8 -28 0 -17 2 +32 0.9 0.6 48 -26 20 -26 1 +32 0.6 0.9 21 -16 16 -27 1 +32 0.5 0.9 26 -29 40 39 2 +32 0.3 0.9 36 -44 12 -12 2 +32 0.2 0.4 40 -1 19 10 2 +32 0.5 0.7 45 -38 44 -21 2 +32 0.5 0.4 39 -10 -3 -38 1 +32 0.5 0.6 -16 -29 29 -27 2 +32 0.4 0.3 47 -11 19 -8 1 +32 0.6 0.2 18 -29 7 -26 1 +32 0.5 0.3 36 -19 7 -17 1 +32 0.3 0.5 34 26 -28 -29 1 +32 0.6 0.8 20 -36 40 25 2 +32 0.8 0.8 -27 -37 24 17 2 +32 0.2 0.5 40 3 50 22 2 +32 0.1 0.2 24 -39 -39 -41 1 +32 0.8 0.6 -6 -40 14 3 2 +32 0.1 0.8 32 11 40 8 2 +32 0.6 0.5 9 -20 47 -4 2 +32 0.5 0.6 44 -47 -30 -42 1 +32 0.8 0.5 -21 -35 1 -23 2 +32 0.4 0.9 1 -44 3 -15 2 +32 0.5 0.5 -7 -30 10 -42 2 +32 0.6 0.6 -2 -30 32 -29 2 +32 0.3 0.1 -8 -40 17 -9 2 +32 0.4 0.2 13 -21 22 -10 1 +32 0.7 0.7 -1 -48 -39 -42 2 +32 0.3 0.5 8 -14 35 -4 2 +32 0.2 0.2 25 17 12 -17 1 +32 0.8 0.2 41 31 -4 -49 1 +32 0.7 0.1 3 -3 31 -2 2 +32 0.1 0.1 49 -19 45 39 2 +32 0.2 0.4 -1 -45 -15 -38 2 +32 0.9 0.6 -27 -45 -27 -38 2 +32 0.7 0.4 20 -45 -17 -29 1 +32 0.7 0.5 49 -36 39 18 2 +32 0.9 0.5 45 16 33 -36 1 +32 0.3 0.7 47 41 25 -30 1 +32 0.5 0.8 -13 -43 4 -29 2 +32 0.1 0.3 -8 -11 -25 -42 1 +32 0.4 0.2 36 -32 -16 -20 1 +32 0.7 0.5 -31 -40 40 1 2 +32 0.8 0.8 26 -9 -12 -22 1 +32 0.2 0.6 28 -46 -25 -29 1 +32 0.8 0.2 36 1 -6 -34 1 +32 0.6 0.9 42 -39 48 -19 2 +32 0.8 0.3 -10 -49 2 -43 2 +32 0.8 0.2 16 -30 12 -24 1 +32 0.9 0.8 0 -2 -16 -43 1 +32 0.1 0.5 3 -21 -43 -49 1 +32 0.7 0.5 36 26 47 -50 1 +32 0.1 0.1 26 -29 -20 -24 1 +32 0.7 0.3 27 -44 12 1 1 +32 0.7 0.9 -8 -45 36 -43 2 +32 0.1 0.6 25 2 -7 -26 1 +32 0.8 0.1 46 -9 27 -35 1 +32 0.9 0.4 -1 -4 -15 -37 2 +32 0.2 0.6 22 -31 -13 -49 1 +32 0.5 0.1 42 -38 -26 -30 1 +32 0.7 0.3 48 9 1 -25 1 +32 0.6 0.4 22 4 28 -4 1 +32 0.6 0.2 23 15 2 -28 1 +32 0.5 0.1 19 9 46 16 1 +32 0.1 0.1 7 1 37 -37 1 +32 0.3 0.4 39 -13 25 4 2 +32 0.2 0.3 35 26 6 -21 1 +32 0.8 0.8 38 -32 42 -30 2 +32 0.9 0.3 26 -16 -28 -43 1 +32 0.1 0.3 22 -10 -39 -43 1 +32 0.9 0.4 -4 -26 39 34 2 +32 0.4 0.8 18 -41 25 -46 2 +32 0.7 0.2 -4 -15 50 -46 1 +32 0.6 0.1 -34 -36 30 -23 2 +32 0.5 0.1 32 -1 7 -35 1 +32 0.7 0.7 8 -37 42 -2 2 +32 0.9 0.7 47 5 19 12 1 +32 0.7 0.8 42 7 10 -34 1 +32 0.9 0.1 34 23 -6 -26 1 +32 0.2 0.5 28 -48 8 -42 2 +32 0.9 0.3 7 -6 33 -33 1 +32 0.3 0.3 -16 -48 42 -17 2 +32 0.3 0.1 37 25 5 -42 1 +32 0.8 0.1 46 39 39 13 1 +32 0.4 0.5 -7 -48 36 -7 2 +32 0.8 0.1 -8 -39 37 13 2 +32 0.1 0.7 -17 -40 45 -23 2 +32 0.1 0.4 37 -8 21 -35 2 +32 0.9 0.3 13 -10 34 -14 2 +32 0.1 0.9 25 10 34 11 1 +32 0.3 0.8 -21 -33 31 -11 2 +32 0.5 0.7 23 4 49 37 2 +32 0.8 0.2 1 -9 25 12 2 +32 0.1 0.1 -1 -50 -26 -37 2 +32 0.2 0.7 5 -21 -4 -34 2 +32 0.6 0.8 -19 -35 23 -6 2 +32 0.1 0.4 45 40 18 -30 1 +32 0.3 0.2 18 -39 4 -1 2 +32 0.9 0.9 23 -11 44 1 2 +32 0.5 0.9 21 -17 10 -41 2 +32 0.3 0.6 34 -47 44 -26 2 +32 0.4 0.1 13 -24 38 29 2 +32 0.1 0.2 42 -1 -37 -49 1 +32 0.9 0.4 27 -8 39 -23 1 +32 0.4 0.5 22 18 13 -12 1 +32 0.5 0.9 37 5 2 -32 1 +32 0.2 0.7 40 19 4 -42 1 +32 0.4 0.9 34 -21 -22 -33 1 +32 0.4 0.3 11 -12 46 38 2 +32 0.3 0.7 39 -11 23 -49 2 +32 0.1 0.2 29 1 14 5 1 +32 0.5 0.9 18 8 27 -48 2 +32 0.9 0.8 25 8 27 4 1 +32 0.2 0.5 28 -4 37 8 2 +32 0.7 0.8 36 10 16 -28 1 +32 0.6 0.1 13 -44 46 38 2 +32 0.8 0.6 -11 -39 12 -24 2 +32 0.7 0.5 10 7 26 18 2 +32 0.7 0.9 -8 -13 23 -23 2 +32 0.1 0.8 23 -2 2 -33 1 +32 0.6 0.4 38 -13 -4 -11 1 +32 0.3 0.4 43 -34 25 -49 2 +32 0.8 0.2 9 -17 -1 -46 1 +32 0.6 0.4 34 -47 12 -15 2 +32 0.1 0.8 -33 -46 -1 -13 2 +32 0.6 0.8 -9 -29 45 -7 2 +32 0.9 0.5 37 -49 42 -18 1 +32 0.9 0.2 40 -32 33 3 1 +32 0.2 0.9 13 -43 5 -35 2 +33 0.8 0.3 3 -33 0 -34 1 +33 0.6 0.4 -12 -42 -8 -18 2 +33 0.6 0.5 13 -40 13 -26 1 +33 0.2 0.4 -20 -35 -7 -14 2 +33 0.7 0.9 32 -11 42 38 2 +33 0.2 0.1 -5 -33 13 -45 1 +33 0.6 0.3 28 -48 -46 -50 1 +33 0.1 0.5 26 -2 48 41 2 +33 0.2 0.9 33 -43 32 -34 2 +33 0.3 0.9 50 -4 41 -7 2 +33 0.7 0.4 -12 -29 0 -22 2 +33 0.7 0.7 38 34 1 -47 1 +33 0.4 0.5 27 -15 21 -11 2 +33 0.5 0.6 12 -39 -3 -11 2 +33 0.3 0.9 36 -34 41 24 2 +33 0.1 0.6 31 -2 21 9 2 +33 0.9 0.3 39 16 -12 -28 1 +33 0.6 0.4 40 -34 8 -16 1 +33 0.5 0.8 16 -37 3 0 2 +33 0.6 0.9 -16 -24 11 -20 2 +33 0.7 0.2 20 -14 21 -31 1 +33 0.2 0.6 -2 -49 -22 -49 2 +33 0.5 0.8 34 31 -2 -22 1 +33 0.4 0.9 32 25 15 -34 1 +33 0.9 0.7 14 -10 37 -23 2 +33 0.1 0.7 14 -24 -31 -43 1 +33 0.9 0.1 -5 -40 39 18 2 +33 0.8 0.4 31 2 -20 -45 1 +33 0.2 0.1 36 -6 -5 -26 1 +33 0.4 0.9 16 -48 -14 -36 1 +33 0.5 0.7 47 -17 -5 -10 1 +33 0.7 0.7 49 -40 -31 -44 1 +33 0.1 0.9 20 -28 22 -15 2 +33 0.3 0.2 -15 -42 33 -17 2 +33 0.3 0.1 22 -26 -8 -24 1 +33 0.4 0.3 42 -19 45 44 2 +33 0.9 0.7 42 -21 40 2 1 +33 0.8 0.1 9 -3 -6 -8 1 +33 0.9 0.2 38 10 -9 -18 1 +33 0.8 0.3 42 27 -13 -49 1 +33 0.6 0.2 43 -28 23 3 1 +33 0.9 0.2 -24 -31 35 -35 2 +33 0.9 0.6 -12 -49 4 -48 2 +33 0.6 0.1 28 1 19 -48 1 +33 0.8 0.2 6 -24 50 -11 2 +33 0.1 0.9 36 28 49 23 2 +33 0.4 0.2 49 -2 2 -44 1 +33 0.4 0.1 40 -14 45 28 2 +33 0.7 0.9 32 -2 45 2 2 +33 0.6 0.1 34 -39 49 32 2 +33 0.5 0.5 -29 -42 -23 -46 1 +33 0.5 0.9 7 -8 3 -3 2 +33 0.4 0.9 29 -33 43 -27 2 +33 0.5 0.7 17 -46 27 -1 2 +33 0.6 0.6 47 -17 -25 -34 1 +33 0.4 0.6 41 1 46 -42 1 +33 0.4 0.3 17 -23 27 -49 1 +33 0.3 0.8 11 -21 29 -10 2 +33 0.9 0.9 43 -48 1 -4 1 +33 0.5 0.6 -27 -41 48 43 2 +33 0.6 0.4 26 -37 -23 -31 1 +33 0.7 0.6 38 0 -16 -31 1 +33 0.9 0.8 32 -48 20 -46 1 +33 0.3 0.2 40 -48 6 -6 2 +33 0.3 0.7 -5 -34 42 31 2 +33 0.7 0.4 25 -21 19 11 2 +33 0.9 0.9 38 32 21 -3 1 +33 0.8 0.6 40 -27 29 13 2 +33 0.8 0.2 43 -19 44 -32 1 +33 0.6 0.8 5 -23 18 7 2 +33 0.4 0.5 -25 -32 33 -38 2 +33 0.7 0.4 25 -31 20 -36 1 +33 0.4 0.9 29 -25 41 3 2 +33 0.9 0.7 -20 -34 46 29 2 +33 0.4 0.4 44 -30 34 22 2 +33 0.8 0.2 32 -49 19 14 2 +33 0.9 0.5 -2 -32 -4 -44 1 +33 0.6 0.8 19 -40 34 0 2 +33 0.4 0.3 -4 -10 -5 -36 1 +33 0.2 0.2 43 5 18 12 1 +33 0.8 0.3 17 -8 13 -1 1 +33 0.9 0.4 12 -22 34 -29 1 +33 0.5 0.4 5 -13 37 -36 2 +33 0.3 0.3 44 34 32 -49 1 +33 0.7 0.9 9 7 30 -42 2 +33 0.7 0.6 37 -42 37 -38 1 +33 0.3 0.4 35 -15 41 -41 1 +33 0.7 0.8 50 -24 39 -23 1 +33 0.6 0.6 38 -23 -1 -3 1 +33 0.8 0.2 -27 -30 -6 -25 2 +33 0.7 0.2 18 11 0 -11 1 +33 0.7 0.1 20 -11 50 24 2 +33 0.1 0.1 38 -47 26 -41 2 +33 0.5 0.5 -32 -44 22 20 2 +33 0.1 0.4 -49 -50 -39 -47 2 +33 0.3 0.4 10 -47 48 -18 2 +33 0.1 0.5 -2 -16 41 2 2 +33 0.9 0.2 39 36 32 -22 1 +33 0.7 0.9 40 -6 46 -33 2 +33 0.2 0.2 46 -20 43 35 2 +33 0.6 0.4 48 6 47 14 1 +33 0.4 0.2 50 -29 6 -27 1 +33 0.1 0.1 40 -32 31 -20 2 +33 0.5 0.9 21 13 -28 -43 1 +33 0.7 0.1 34 -19 46 -11 1 +33 0.8 0.5 47 38 -14 -32 1 +33 0.7 0.3 34 32 29 -46 1 +33 0.3 0.6 22 -7 -1 -41 1 +33 0.8 0.6 45 40 -23 -42 1 +33 0.8 0.9 45 34 48 1 1 +33 0.9 0.4 13 -35 44 7 2 +33 0.7 0.5 49 10 10 5 1 +33 0.4 0.3 41 -45 21 -38 1 +33 0.5 0.9 38 22 -17 -35 1 +33 0.6 0.1 -22 -33 -7 -22 2 +33 0.4 0.8 -27 -37 40 -8 2 +33 0.2 0.3 -4 -16 11 -25 1 +33 0.7 0.2 29 8 19 -31 1 +33 0.7 0.9 50 -6 13 -48 1 +33 0.2 0.6 37 -4 27 -21 2 +33 0.8 0.2 -26 -28 36 18 2 +33 0.7 0.8 -4 -17 13 -24 2 +33 0.2 0.6 24 -2 36 5 2 +33 0.1 0.1 37 -11 47 -22 1 +33 0.7 0.9 -23 -48 47 31 2 +33 0.4 0.7 -20 -21 27 -42 2 +33 0.8 0.4 45 -26 37 -38 1 +33 0.8 0.2 -1 -25 34 31 2 +33 0.1 0.1 24 -24 -27 -44 1 +33 0.7 0.8 -11 -27 44 41 2 +33 0.4 0.3 39 -1 43 23 2 +33 0.1 0.1 24 -8 -18 -29 1 +33 0.6 0.6 5 -41 -28 -49 1 +33 0.1 0.6 6 -42 46 22 2 +33 0.5 0.5 -32 -43 5 -1 2 +33 0.4 0.3 1 -33 -14 -31 1 +33 0.1 0.4 -21 -45 35 -13 2 +33 0.3 0.3 -30 -39 48 41 2 +33 0.7 0.2 34 -16 48 2 1 +33 0.4 0.1 37 -11 42 -37 1 +33 0.3 0.7 29 -9 -30 -31 1 +33 0.7 0.1 17 6 31 12 1 +33 0.7 0.2 44 -14 -43 -50 1 +33 0.9 0.7 -15 -25 36 -4 2 +33 0.3 0.6 33 22 18 16 1 +33 0.1 0.9 -12 -35 32 9 2 +33 0.6 0.1 13 -25 43 -2 2 +33 0.6 0.8 48 -40 8 -17 1 +33 0.1 0.7 14 1 35 18 2 +33 0.2 0.3 -30 -45 9 5 2 +33 0.7 0.5 8 -24 48 15 2 +33 0.4 0.1 -10 -13 17 -49 1 +33 0.7 0.4 11 -7 21 -34 1 +33 0.2 0.9 16 -33 39 8 2 +33 0.1 0.6 20 -27 43 -48 2 +33 0.5 0.5 34 11 -18 -30 1 +33 0.9 0.6 9 -24 49 -15 2 +33 0.1 0.7 28 -9 45 -11 2 +33 0.8 0.5 43 -41 -26 -40 1 +33 0.1 0.7 25 -25 42 14 2 +33 0.8 0.9 10 5 36 31 2 +33 0.5 0.9 24 -29 -44 -50 1 +33 0.5 0.2 30 -19 16 -19 1 +33 0.6 0.6 32 10 -2 -25 1 +33 0.6 0.5 6 -28 32 -20 2 +33 0.6 0.9 -5 -15 34 23 2 +33 0.6 0.9 21 -40 2 -30 1 +33 0.2 0.2 12 -31 -1 -5 2 +33 0.4 0.3 42 -4 -5 -30 1 +33 0.5 0.2 20 -23 -6 -13 1 +33 0.7 0.2 46 40 -4 -5 1 +33 0.7 0.8 23 15 18 11 1 +33 0.8 0.7 11 -50 7 -14 2 +33 0.6 0.5 -20 -39 32 1 2 +33 0.2 0.7 43 -35 14 -6 2 +33 0.4 0.2 28 1 20 -50 1 +33 0.8 0.9 38 -20 42 4 2 +33 0.9 0.3 41 -30 27 14 1 +33 0.7 0.5 -22 -34 -45 -46 1 +33 0.4 0.8 44 -24 11 -41 1 +33 0.9 0.9 37 1 9 -16 1 +33 0.8 0.9 -5 -16 1 -44 2 +33 0.2 0.4 30 -3 37 -31 1 +33 0.2 0.3 14 -43 6 -28 2 +33 0.8 0.8 7 -43 27 0 2 +33 0.5 0.9 5 -6 45 30 2 +33 0.8 0.5 -10 -45 15 3 2 +33 0.5 0.6 30 -2 34 3 2 +33 0.5 0.9 37 -44 21 19 2 +33 0.2 0.9 30 -45 34 -6 2 +33 0.7 0.6 32 -38 -10 -37 1 +33 0.5 0.4 -11 -26 -19 -49 1 +33 0.1 0.1 20 -40 34 -30 2 +33 0.7 0.1 35 11 6 -35 1 +33 0.9 0.7 18 -36 -7 -23 1 +33 0.6 0.2 30 18 25 -28 1 +33 0.8 0.3 -28 -32 45 -44 2 +33 0.6 0.9 -32 -39 40 -39 2 +33 0.6 0.1 43 5 -38 -43 1 +33 0.5 0.4 42 33 -38 -40 1 +33 0.7 0.9 -5 -10 38 -39 2 +33 0.5 0.6 22 -7 32 16 2 +33 0.1 0.4 41 34 9 -15 1 +33 0.1 0.7 29 -28 31 -48 2 +33 0.8 0.7 27 -26 31 -19 1 +33 0.5 0.4 19 15 50 -31 1 +33 0.5 0.4 -14 -35 9 -31 2 +33 0.3 0.7 41 -47 -26 -34 1 +33 0.5 0.3 48 29 39 -24 1 +33 0.9 0.1 34 26 19 -30 1 +33 0.8 0.5 49 -31 43 25 2 +33 0.8 0.1 34 33 -17 -21 1 +33 0.9 0.9 22 -48 48 -22 2 +33 0.3 0.1 21 14 15 -20 1 +33 0.6 0.5 -16 -46 17 -12 2 +33 0.3 0.2 -41 -49 -7 -40 2 +33 0.9 0.8 24 -10 17 -50 1 +33 0.9 0.5 50 -37 -27 -31 1 +33 0.3 0.8 -26 -29 -22 -44 2 +33 0.3 0.2 41 -19 33 -16 1 +33 0.6 0.1 42 -5 -23 -38 1 +33 0.2 0.9 -28 -50 28 2 2 +33 0.2 0.6 25 -8 -9 -32 1 +33 0.9 0.6 16 -43 14 -29 1 +33 0.1 0.7 28 2 46 39 2 +33 0.3 0.9 22 10 -9 -43 1 +33 0.5 0.1 35 15 23 12 1 +33 0.6 0.5 48 -44 -15 -43 1 +33 0.9 0.6 44 10 -30 -38 1 +33 0.1 0.6 39 9 48 -8 2 +33 0.9 0.1 -27 -30 32 11 2 +33 0.9 0.5 -6 -11 40 -29 2 +33 0.6 0.6 -23 -42 41 16 2 +33 0.3 0.7 -19 -38 -9 -25 2 +33 0.8 0.7 -4 -24 14 0 2 +33 0.1 0.1 12 1 11 -4 1 +33 0.8 0.4 -18 -30 29 -50 1 +33 0.7 0.5 -4 -50 32 -8 2 +33 0.8 0.5 21 5 27 2 1 +33 0.2 0.7 33 2 28 -39 2 +33 0.7 0.8 10 -25 12 -49 2 +33 0.3 0.6 36 -38 22 3 2 +33 0.8 0.2 48 28 39 9 1 +33 0.9 0.4 19 -11 34 -45 1 +33 0.1 0.6 -19 -48 9 -22 2 +33 0.3 0.2 13 -13 44 -30 1 +33 0.9 0.3 20 4 -15 -50 1 +33 0.2 0.1 10 9 24 8 1 +33 0.7 0.5 -17 -34 33 20 2 +33 0.4 0.2 -29 -32 -16 -45 1 +33 0.6 0.9 -11 -32 25 -43 2 +33 0.9 0.8 5 -28 33 -22 2 +33 0.7 0.9 50 -37 45 -24 2 +33 0.3 0.1 -10 -38 6 -11 2 +33 0.8 0.6 10 -34 50 -18 2 +33 0.6 0.7 24 -47 -16 -17 1 +33 0.8 0.4 36 24 5 -38 1 +33 0.3 0.3 -23 -31 -29 -34 1 +33 0.6 0.9 10 -10 22 9 2 +33 0.3 0.6 32 16 48 23 2 +33 0.5 0.6 42 -50 40 -18 2 +33 0.5 0.3 47 30 46 -9 1 +33 0.3 0.3 -40 -45 40 -21 2 +33 0.8 0.2 29 -43 39 37 2 +33 0.1 0.9 1 -33 46 -35 2 +33 0.2 0.9 -9 -28 -20 -35 1 +33 0.1 0.5 -2 -3 28 -41 2 +33 0.5 0.9 -3 -14 28 18 2 +33 0.5 0.3 -4 -37 26 -37 2 +33 0.3 0.5 -26 -29 -3 -31 2 +33 0.7 0.1 1 -31 19 -33 1 +33 0.9 0.2 0 -24 10 -27 1 +33 0.4 0.2 2 -26 10 -43 1 +33 0.1 0.5 37 -33 -6 -19 2 +33 0.2 0.7 37 -50 7 -26 2 +33 0.7 0.6 36 33 13 -7 1 +33 0.7 0.3 6 -10 -6 -29 1 +33 0.6 0.2 36 -39 0 -20 1 +33 0.6 0.4 -27 -37 -20 -49 1 +33 0.4 0.9 48 -5 -45 -46 1 +33 0.5 0.4 48 21 35 -33 1 +33 0.8 0.3 -4 -18 13 -16 2 +33 0.9 0.8 42 19 40 -37 1 +33 0.1 0.3 11 -2 5 -37 1 +33 0.2 0.8 -21 -38 45 39 2 +33 0.8 0.6 -19 -36 21 -10 2 +33 0.5 0.4 41 -32 -23 -40 1 +33 0.2 0.2 25 -46 28 -12 2 +33 0.4 0.2 -12 -48 6 -40 2 +33 0.3 0.1 -26 -38 13 -34 2 +33 0.6 0.4 47 2 -23 -45 1 +33 0.8 0.2 30 2 -7 -12 1 +33 0.1 0.1 2 -49 -35 -45 1 +33 0.7 0.2 40 9 -29 -32 1 +33 0.4 0.8 24 -47 13 -26 2 +33 0.3 0.4 48 1 17 -22 1 +33 0.1 0.9 -11 -24 29 24 2 +33 0.8 0.8 21 -35 -26 -46 1 +33 0.9 0.7 38 28 -9 -28 1 +33 0.7 0.3 -1 -13 -3 -41 1 +34 0.5 0.4 30 -27 14 -1 2 +34 0.8 0.3 -3 -41 21 10 2 +34 0.6 0.7 35 -36 19 -11 1 +34 0.3 0.6 -14 -50 34 -21 2 +34 0.3 0.8 -12 -38 47 -10 2 +34 0.7 0.2 40 -9 34 -44 1 +34 0.4 0.1 -17 -38 28 26 2 +34 0.1 0.9 -16 -39 11 4 2 +34 0.5 0.3 -45 -49 -26 -29 2 +34 0.1 0.3 1 -35 21 -6 2 +34 0.5 0.3 35 18 30 -34 1 +34 0.7 0.5 -5 -30 29 25 2 +34 0.7 0.6 23 -2 16 -28 1 +34 0.3 0.1 -8 -25 24 20 2 +34 0.7 0.6 36 -33 -5 -14 1 +34 0.6 0.6 21 -48 -1 -41 1 +34 0.6 0.2 -7 -44 11 -29 1 +34 0.4 0.6 49 46 23 -5 1 +34 0.2 0.2 34 26 26 21 1 +34 0.3 0.6 41 39 16 10 1 +34 0.5 0.5 18 -30 -16 -35 1 +34 0.8 0.8 49 -48 19 -1 1 +34 0.8 0.7 19 -9 46 -43 2 +34 0.5 0.4 -14 -20 -28 -36 1 +34 0.5 0.8 -10 -49 26 -39 2 +34 0.2 0.6 18 -36 -19 -46 1 +34 0.6 0.2 -7 -38 10 -21 2 +34 0.9 0.4 24 -13 42 40 2 +34 0.8 0.2 24 22 28 -31 1 +34 0.9 0.3 -8 -19 22 -43 1 +34 0.4 0.9 -10 -45 47 23 2 +34 0.9 0.1 37 -4 1 -29 1 +34 0.7 0.8 36 16 44 -23 1 +34 0.4 0.8 19 18 -32 -50 1 +34 0.7 0.1 14 9 29 -1 1 +34 0.8 0.4 -10 -22 14 -22 2 +34 0.3 0.5 -2 -5 -15 -48 1 +34 0.1 0.7 45 15 18 1 1 +34 0.1 0.1 10 -24 45 -43 1 +34 0.9 0.8 19 -44 17 14 2 +34 0.8 0.1 43 -45 48 25 2 +34 0.3 0.9 31 13 45 40 2 +34 0.8 0.4 24 -29 -24 -37 1 +34 0.4 0.8 5 -42 34 10 2 +34 0.3 0.1 31 26 22 -32 1 +34 0.5 0.3 22 -47 -9 -13 2 +34 0.5 0.9 35 -41 3 -16 1 +34 0.7 0.6 20 -4 46 41 2 +34 0.4 0.4 41 -34 27 13 2 +34 0.8 0.9 20 17 28 15 2 +34 0.3 0.3 33 -50 39 -25 2 +34 0.3 0.5 28 -31 48 42 2 +34 0.4 0.5 44 6 -4 -50 1 +34 0.2 0.4 44 -2 16 11 2 +34 0.8 0.1 18 -12 -5 -43 1 +34 0.9 0.1 41 -40 25 -13 1 +34 0.1 0.3 7 -29 32 15 2 +34 0.4 0.6 -1 -33 17 16 2 +34 0.7 0.1 30 -19 27 18 2 +34 0.5 0.4 44 18 26 14 1 +34 0.3 0.5 29 -42 30 -47 2 +34 0.4 0.3 27 24 4 -40 1 +34 0.9 0.3 26 20 38 28 2 +34 0.3 0.7 31 6 38 35 2 +34 0.4 0.8 35 -37 29 6 2 +34 0.4 0.1 20 5 5 -18 1 +34 0.2 0.7 45 31 -30 -45 1 +34 0.4 0.3 47 -10 -15 -50 1 +34 0.4 0.3 -38 -48 5 -20 2 +34 0.3 0.9 34 -37 31 -46 2 +34 0.2 0.1 -34 -42 6 -8 2 +34 0.9 0.2 37 -43 -21 -48 1 +34 0.6 0.7 6 0 -6 -19 1 +34 0.1 0.9 7 5 19 -18 2 +34 0.2 0.8 -22 -23 37 36 2 +34 0.7 0.5 10 -12 14 -49 1 +34 0.4 0.8 28 12 9 4 1 +34 0.2 0.6 13 4 18 -43 1 +34 0.1 0.5 -8 -38 30 -39 2 +34 0.8 0.4 -1 -16 23 10 2 +34 0.6 0.7 46 -33 15 -2 1 +34 0.6 0.5 50 33 -12 -14 1 +34 0.1 0.5 34 -25 2 -50 1 +34 0.7 0.6 33 14 8 7 1 +34 0.3 0.7 39 -21 29 -36 2 +34 0.3 0.6 41 18 20 -29 1 +34 0.2 0.1 28 -2 5 -40 1 +34 0.6 0.9 43 10 12 -32 1 +34 0.2 0.7 43 -46 -7 -45 2 +34 0.6 0.5 26 -38 23 -42 1 +34 0.8 0.7 -13 -31 40 15 2 +34 0.4 0.3 -32 -35 50 -6 2 +34 0.4 0.2 40 -2 -40 -42 1 +34 0.4 0.7 27 -2 12 10 2 +34 0.7 0.1 -11 -25 37 -40 1 +34 0.5 0.9 49 -47 -43 -46 1 +34 0.7 0.3 11 -44 44 2 2 +34 0.7 0.9 -2 -23 42 -21 2 +34 0.8 0.6 36 -43 -14 -22 1 +34 0.4 0.2 28 27 25 -8 1 +34 0.7 0.9 -14 -34 31 -22 2 +34 0.3 0.6 5 -15 -20 -50 1 +34 0.7 0.4 26 0 50 7 1 +34 0.2 0.7 2 -27 46 -20 2 +34 0.6 0.7 1 -28 14 12 2 +34 0.4 0.3 -8 -9 34 -20 1 +34 0.3 0.4 49 48 27 -21 1 +34 0.4 0.5 48 -35 36 24 2 +34 0.8 0.7 9 -24 26 22 2 +34 0.2 0.5 38 -20 -11 -41 1 +34 0.5 0.1 16 -15 10 -8 1 +34 0.7 0.1 10 -5 50 -32 1 +34 0.8 0.4 -26 -44 29 -47 2 +34 0.7 0.8 42 0 48 9 2 +34 0.2 0.3 -21 -33 46 -38 2 +34 0.8 0.7 39 18 -4 -48 1 +34 0.9 0.4 -14 -41 -32 -48 1 +34 0.1 0.5 36 22 -34 -39 1 +34 0.9 0.9 50 -3 -16 -39 1 +34 0.1 0.1 -39 -49 -15 -32 2 +34 0.1 0.2 -17 -28 -28 -30 1 +34 0.8 0.6 22 -48 13 5 2 +34 0.6 0.9 8 -6 47 11 2 +34 0.4 0.9 -7 -10 -5 -17 1 +34 0.7 0.8 -10 -19 5 -4 2 +34 0.9 0.4 26 -17 -34 -42 1 +34 0.4 0.7 36 14 39 -49 1 +34 0.2 0.3 19 11 32 22 2 +34 0.7 0.8 34 -20 48 -3 2 +34 0.5 0.2 22 -17 -8 -49 1 +34 0.3 0.5 45 -25 26 -2 2 +34 0.4 0.4 -28 -46 35 -50 2 +34 0.7 0.1 -11 -29 22 -44 1 +34 0.4 0.9 10 -15 32 -22 2 +34 0.3 0.6 -3 -20 -23 -31 1 +34 0.5 0.6 36 15 22 6 1 +34 0.5 0.7 -12 -23 -9 -25 2 +34 0.2 0.4 42 25 -14 -37 1 +34 0.2 0.6 22 3 36 -29 2 +34 0.3 0.8 2 -15 21 19 2 +34 0.6 0.5 13 -13 35 -28 2 +34 0.7 0.9 32 28 -3 -10 1 +34 0.5 0.4 -36 -42 44 32 2 +34 0.6 0.4 -20 -31 39 -34 2 +34 0.4 0.2 15 -45 32 16 2 +34 0.4 0.2 46 43 40 20 1 +34 0.5 0.8 34 -9 -32 -37 1 +34 0.4 0.1 -19 -33 -12 -36 1 +34 0.7 0.1 12 -44 18 12 2 +34 0.6 0.3 -20 -45 -11 -19 2 +34 0.5 0.4 43 -35 31 -10 1 +34 0.5 0.1 -6 -35 -12 -24 1 +34 0.6 0.1 44 -18 31 5 2 +34 0.3 0.2 -4 -44 -8 -31 2 +34 0.5 0.3 27 -37 10 8 2 +34 0.5 0.8 21 -19 -1 -12 1 +34 0.7 0.3 8 5 48 12 2 +34 0.6 0.9 46 44 32 15 1 +34 0.1 0.2 -41 -44 41 37 2 +34 0.7 0.4 13 -18 17 -41 1 +34 0.3 0.9 -9 -44 23 7 2 +34 0.6 0.9 -23 -34 26 12 2 +34 0.9 0.1 5 -50 4 -17 1 +34 0.1 0.2 48 37 3 -5 1 +34 0.9 0.8 37 5 -1 -50 1 +34 0.4 0.3 48 -48 -26 -38 1 +34 0.5 0.8 43 -21 -20 -25 1 +34 0.3 0.5 45 40 35 4 1 +34 0.1 0.1 20 -15 -18 -32 1 +34 0.3 0.3 32 -43 12 -14 2 +34 0.6 0.8 26 -9 36 -34 2 +34 0.8 0.6 3 -14 9 -5 2 +34 0.8 0.3 42 -21 39 -5 1 +34 0.6 0.8 30 29 -2 -13 1 +34 0.3 0.3 38 30 14 -21 1 +34 0.5 0.4 6 -31 23 0 2 +34 0.7 0.6 18 -32 36 17 2 +34 0.3 0.7 19 14 47 6 2 +34 0.5 0.6 1 -15 39 -44 2 +34 0.8 0.1 -28 -40 48 38 2 +34 0.8 0.7 5 -24 -17 -35 1 +34 0.7 0.5 15 -33 3 -22 1 +34 0.8 0.5 -6 -48 -20 -36 1 +34 0.2 0.4 44 -7 48 -47 1 +34 0.2 0.6 -13 -44 40 25 2 +34 0.5 0.7 -16 -39 40 -27 2 +34 0.4 0.2 -13 -30 -24 -44 1 +34 0.8 0.9 15 -19 39 -20 2 +34 0.2 0.5 0 -9 33 -7 2 +34 0.4 0.7 21 -40 37 -23 2 +34 0.9 0.4 31 -1 17 -7 1 +34 0.6 0.6 -15 -20 -26 -32 1 +34 0.1 0.1 -18 -49 30 0 2 +34 0.1 0.6 -35 -48 35 -42 2 +34 0.3 0.2 38 -46 18 -3 2 +34 0.5 0.2 -34 -46 37 -2 2 +34 0.8 0.8 25 -3 13 -8 1 +34 0.2 0.1 13 -16 23 -8 2 +34 0.8 0.9 38 -32 -6 -31 1 +34 0.5 0.8 22 -23 32 -2 2 +34 0.5 0.1 41 35 -18 -34 1 +34 0.1 0.4 37 -33 24 7 2 +34 0.4 0.1 33 -36 18 -22 1 +34 0.3 0.1 -21 -29 -35 -49 1 +34 0.1 0.5 13 -40 8 -27 2 +34 0.1 0.7 -14 -41 -10 -16 2 +34 0.3 0.9 31 -7 30 10 2 +34 0.3 0.6 -15 -31 48 -12 2 +34 0.6 0.2 30 17 43 -13 1 +34 0.7 0.4 50 -31 -3 -25 1 +34 0.3 0.4 28 12 27 -12 1 +34 0.2 0.4 26 -24 6 -40 1 +34 0.6 0.4 47 -23 49 20 2 +34 0.2 0.5 2 -37 45 -29 2 +34 0.8 0.3 17 -15 -8 -46 1 +34 0.4 0.3 41 -38 43 -43 1 +34 0.7 0.9 36 6 -15 -17 1 +34 0.9 0.2 8 -29 18 -22 1 +34 0.9 0.7 46 8 6 -49 1 +34 0.2 0.2 4 3 45 31 2 +34 0.9 0.4 -36 -46 48 -48 2 +34 0.2 0.2 -1 -3 18 -41 1 +34 0.8 0.5 39 -33 0 -38 1 +34 0.8 0.8 33 20 27 26 1 +34 0.3 0.6 24 14 8 1 1 +34 0.5 0.3 -7 -48 42 -3 2 +34 0.9 0.4 37 34 36 30 1 +34 0.5 0.7 -19 -25 3 -32 2 +34 0.6 0.3 26 17 32 -38 1 +34 0.1 0.4 48 15 47 -6 1 +34 0.9 0.2 32 -28 8 -46 1 +34 0.6 0.6 -19 -28 38 -15 2 +34 0.4 0.5 1 -31 -4 -19 2 +34 0.4 0.3 18 -49 50 -44 2 +34 0.5 0.5 1 -39 -21 -32 1 +34 0.2 0.5 48 32 9 -1 1 +34 0.3 0.9 20 -47 -25 -29 1 +34 0.1 0.5 43 -26 -38 -41 1 +34 0.9 0.3 31 -17 24 -11 1 +34 0.1 0.3 -23 -27 44 -18 2 +34 0.1 0.5 46 -5 25 5 2 +34 0.5 0.3 49 28 -10 -38 1 +34 0.8 0.3 -5 -32 32 -50 2 +34 0.9 0.4 22 19 37 11 1 +34 0.2 0.6 6 -9 42 40 2 +34 0.4 0.5 8 -16 -8 -11 1 +34 0.3 0.4 47 -32 3 -26 1 +34 0.4 0.3 46 -2 -4 -24 1 +34 0.4 0.6 43 35 7 -5 1 +34 0.5 0.3 8 -6 39 -5 2 +34 0.2 0.3 33 19 46 -48 1 +34 0.7 0.1 36 28 12 8 1 +34 0.8 0.6 45 -32 -31 -50 1 +34 0.5 0.1 6 -43 -17 -41 1 +34 0.5 0.8 24 -7 47 -22 2 +34 0.9 0.1 -16 -19 -16 -50 1 +34 0.2 0.8 -11 -34 15 -14 2 +34 0.7 0.2 28 13 42 6 1 +34 0.3 0.5 17 -26 24 6 2 +34 0.5 0.5 -26 -37 -8 -37 2 +34 0.7 0.7 -3 -9 1 -43 2 +34 0.5 0.1 31 -21 -17 -39 1 +34 0.6 0.2 42 -44 23 -28 1 +34 0.5 0.2 3 -25 9 -11 2 +34 0.7 0.8 28 -5 19 -6 2 +34 0.4 0.6 22 -47 26 6 2 +34 0.7 0.2 42 -7 24 23 1 +34 0.9 0.6 42 36 -25 -27 1 +34 0.9 0.4 28 -19 41 -14 1 +34 0.1 0.5 31 -33 2 -23 2 +34 0.3 0.6 18 -45 44 8 2 +34 0.4 0.7 37 22 33 5 1 +34 0.1 0.8 7 -47 -11 -42 2 +34 0.9 0.1 17 -45 33 18 2 +34 0.2 0.1 19 -16 33 -47 1 +34 0.7 0.8 1 -42 17 10 2 +34 0.4 0.4 -15 -26 8 -14 2 +34 0.7 0.3 36 -29 13 -43 1 +34 0.2 0.5 38 19 34 -5 2 +34 0.9 0.8 48 -4 11 -17 1 +34 0.8 0.3 -1 -36 43 31 2 +34 0.8 0.3 9 -2 8 -50 1 +34 0.5 0.5 50 -29 39 5 2 +34 0.8 0.9 25 15 5 -35 1 +34 0.7 0.9 35 -20 -2 -24 1 +34 0.7 0.4 30 -40 48 -15 1 +34 0.6 0.4 8 -44 36 -10 2 +34 0.4 0.3 17 -9 26 1 2 +34 0.5 0.3 -22 -25 48 30 2 +34 0.6 0.7 1 -39 28 -43 2 +34 0.6 0.2 36 -11 31 -15 1 +34 0.4 0.5 38 0 -29 -31 1 +34 0.6 0.3 9 -14 11 4 2 +34 0.2 0.5 -34 -50 41 -27 2 +34 0.9 0.7 28 -4 33 11 2 +34 0.8 0.5 -13 -31 49 -26 2 +34 0.1 0.5 45 16 8 -40 1 +34 0.3 0.3 15 -46 9 0 2 +34 0.4 0.7 35 -48 34 2 2 +34 0.8 0.3 33 -42 10 4 1 +35 0.9 0.1 49 -42 26 -3 1 +35 0.3 0.3 9 0 25 -42 1 +35 0.4 0.9 23 -48 37 13 2 +35 0.3 0.4 8 -7 -12 -17 1 +35 0.9 0.7 6 -14 6 -35 2 +35 0.9 0.6 18 -7 11 -14 1 +35 0.8 0.9 -19 -37 28 -34 2 +35 0.1 0.1 5 -11 41 33 2 +35 0.6 0.2 45 26 -17 -28 1 +35 0.8 0.1 4 -10 43 8 2 +35 0.6 0.8 18 -45 -15 -34 1 +35 0.3 0.2 33 -32 15 -19 1 +35 0.4 0.1 47 28 5 -36 1 +35 0.7 0.1 31 -35 41 31 2 +35 0.2 0.2 6 -4 -28 -49 1 +35 0.1 0.1 -11 -40 31 -9 2 +35 0.3 0.9 26 -49 9 -25 2 +35 0.2 0.3 6 -30 46 13 2 +35 0.9 0.9 13 -2 48 -16 2 +35 0.2 0.8 50 49 36 -13 1 +35 0.1 0.8 11 -45 43 -33 2 +35 0.2 0.7 39 -23 -36 -49 1 +35 0.9 0.6 -20 -21 16 -23 2 +35 0.7 0.2 -8 -18 40 -30 2 +35 0.2 0.5 33 24 23 -31 1 +35 0.7 0.6 13 -3 42 -50 1 +35 0.7 0.6 38 -12 -6 -15 1 +35 0.3 0.5 25 -38 32 5 2 +35 0.2 0.8 35 -44 15 -31 2 +35 0.4 0.1 37 -45 5 -28 1 +35 0.5 0.4 38 -37 -19 -49 1 +35 0.5 0.1 27 18 27 -16 1 +35 0.4 0.1 -27 -33 49 26 2 +35 0.5 0.7 9 -10 50 9 2 +35 0.8 0.8 49 -16 38 19 2 +35 0.4 0.3 -17 -42 2 -43 1 +35 0.4 0.9 -33 -35 18 -40 2 +35 0.5 0.8 -6 -11 39 15 2 +35 0.1 0.5 45 -13 -14 -19 1 +35 0.4 0.7 31 -15 45 -24 2 +35 0.2 0.5 4 -21 50 9 2 +35 0.1 0.9 45 -8 -14 -33 1 +35 0.6 0.9 21 -25 26 -45 2 +35 0.2 0.6 -25 -26 -6 -9 2 +35 0.6 0.6 46 27 12 -8 1 +35 0.9 0.8 40 36 7 5 1 +35 0.9 0.7 23 -10 36 -16 1 +35 0.5 0.4 18 -25 -17 -29 1 +35 0.8 0.5 6 -24 26 -21 2 +35 0.4 0.9 48 -39 9 -48 2 +35 0.1 0.9 -27 -46 40 -34 2 +35 0.6 0.1 32 -39 -39 -42 1 +35 0.2 0.1 36 -46 -10 -26 1 +35 0.6 0.7 -35 -42 23 -34 2 +35 0.1 0.3 33 11 2 -30 1 +35 0.2 0.6 46 -23 21 14 2 +35 0.9 0.1 35 -25 -29 -50 1 +35 0.2 0.4 -6 -11 -16 -44 1 +35 0.4 0.8 -19 -24 36 5 2 +35 0.6 0.4 32 23 19 0 1 +35 0.3 0.3 -3 -24 1 -50 1 +35 0.6 0.1 24 16 2 -43 1 +35 0.3 0.8 25 -16 20 -44 2 +35 0.3 0.9 46 -8 45 -24 2 +35 0.1 0.7 38 -26 -16 -21 1 +35 0.7 0.6 9 -9 19 1 2 +35 0.1 0.1 43 -44 1 -23 2 +35 0.3 0.7 15 -26 30 -2 2 +35 0.2 0.9 -19 -50 19 -16 2 +35 0.2 0.2 28 -4 -22 -23 1 +35 0.4 0.8 48 0 38 30 2 +35 0.5 0.7 17 9 23 -11 1 +35 0.9 0.2 36 -6 45 7 1 +35 0.5 0.7 -12 -45 -21 -39 2 +35 0.2 0.7 29 2 43 -26 2 +35 0.8 0.6 17 14 36 4 1 +35 0.7 0.3 9 -3 23 15 2 +35 0.7 0.7 29 -31 11 -28 1 +35 0.8 0.6 10 -38 28 17 2 +35 0.4 0.2 -6 -13 24 -25 1 +35 0.2 0.3 23 -24 21 19 2 +35 0.5 0.1 40 10 28 -18 1 +35 0.5 0.6 -27 -32 13 -40 2 +35 0.3 0.2 39 16 24 17 1 +35 0.6 0.5 49 -35 25 -5 1 +35 0.5 0.5 50 17 33 -48 1 +35 0.6 0.8 49 -11 15 -31 1 +35 0.7 0.2 -4 -16 -3 -48 1 +35 0.4 0.7 38 -23 15 11 2 +35 0.7 0.7 14 -19 20 -45 1 +35 0.4 0.5 43 13 28 -25 1 +35 0.7 0.8 17 -45 42 7 2 +35 0.6 0.3 -11 -37 -3 -40 1 +35 0.2 0.6 38 -29 33 4 2 +35 0.7 0.3 48 -21 49 6 1 +35 0.3 0.6 46 -45 35 -25 2 +35 0.7 0.6 -20 -42 3 -16 2 +35 0.7 0.2 40 -39 36 32 2 +35 0.5 0.2 13 -47 22 9 2 +35 0.2 0.9 25 19 -27 -49 1 +35 0.6 0.6 -9 -49 43 -29 2 +35 0.4 0.3 27 16 21 -7 1 +35 0.2 0.1 20 1 22 -27 1 +35 0.9 0.1 -40 -41 37 26 2 +35 0.3 0.6 -24 -48 9 -19 2 +35 0.9 0.5 35 20 28 -13 1 +35 0.5 0.1 -21 -43 32 10 2 +35 0.9 0.1 46 -30 37 7 1 +35 0.2 0.4 30 -15 -4 -29 1 +35 0.2 0.9 -15 -35 6 -12 2 +35 0.6 0.2 32 -48 -4 -9 1 +35 0.3 0.1 -18 -30 49 31 2 +35 0.2 0.6 9 5 -3 -36 1 +35 0.9 0.1 46 -7 28 -39 1 +35 0.1 0.7 40 -44 11 -29 2 +35 0.8 0.8 15 0 43 -22 2 +35 0.4 0.5 46 -38 45 -31 1 +35 0.3 0.1 24 -17 28 -27 1 +35 0.8 0.2 -11 -35 50 37 2 +35 0.7 0.1 -5 -48 -13 -37 1 +35 0.9 0.8 30 -11 -13 -17 1 +35 0.9 0.6 22 2 37 -8 2 +35 0.2 0.3 20 -7 34 -32 2 +35 0.3 0.5 36 -12 -27 -43 1 +35 0.3 0.3 48 30 -2 -28 1 +35 0.1 0.9 -14 -45 8 -14 2 +35 0.9 0.5 6 -41 13 -40 1 +35 0.7 0.1 36 -43 -27 -38 1 +35 0.5 0.6 9 8 12 -37 1 +35 0.3 0.3 47 -7 -4 -25 1 +35 0.7 0.7 4 -31 46 -49 2 +35 0.8 0.5 43 40 44 -47 1 +35 0.4 0.3 -13 -37 37 -37 2 +35 0.3 0.2 34 -10 -1 -32 1 +35 0.6 0.9 30 -13 49 -15 2 +35 0.5 0.6 -31 -46 -6 -28 2 +35 0.4 0.3 -32 -43 -43 -48 2 +35 0.8 0.8 14 -42 17 2 2 +35 0.1 0.8 -18 -38 -17 -48 2 +35 0.9 0.1 24 -26 0 -22 1 +35 0.5 0.5 41 -28 -1 -31 1 +35 0.1 0.5 33 -22 50 -28 2 +35 0.2 0.3 23 20 7 -46 1 +35 0.4 0.6 -17 -33 48 -5 2 +35 0.3 0.1 -17 -44 47 46 2 +35 0.4 0.8 2 -15 47 15 2 +35 0.4 0.1 41 0 35 -40 1 +35 0.9 0.1 44 23 47 -17 1 +35 0.5 0.9 -6 -36 15 -16 2 +35 0.9 0.8 43 -11 -5 -20 1 +35 0.2 0.6 7 3 -7 -18 1 +35 0.9 0.6 31 -31 34 1 1 +35 0.1 0.7 40 17 -16 -33 1 +35 0.1 0.1 8 -30 44 -36 2 +35 0.9 0.9 12 -48 13 8 2 +35 0.8 0.5 -16 -38 46 -5 2 +35 0.9 0.7 17 5 29 -26 1 +35 0.5 0.7 50 39 33 -44 1 +35 0.7 0.7 8 -23 9 -50 1 +35 0.9 0.9 48 2 33 12 1 +35 0.5 0.9 38 -32 14 -46 2 +35 0.7 0.9 20 -40 47 8 2 +35 0.1 0.2 37 -9 46 -16 2 +35 0.4 0.8 -3 -16 47 35 2 +35 0.2 0.3 21 -50 23 1 2 +35 0.7 0.8 0 -5 30 20 2 +35 0.7 0.5 39 27 -7 -21 1 +35 0.4 0.8 -8 -49 39 3 2 +35 0.6 0.3 44 8 -1 -12 1 +35 0.2 0.7 12 -4 -14 -38 1 +35 0.7 0.4 0 -26 4 -17 2 +35 0.9 0.8 33 -15 6 -10 1 +35 0.9 0.3 38 -1 31 -24 1 +35 0.3 0.4 -2 -6 21 -1 2 +35 0.6 0.8 -11 -14 19 -10 2 +35 0.5 0.4 -19 -32 17 13 2 +35 0.7 0.8 21 -37 9 -44 1 +35 0.1 0.3 -26 -50 14 11 2 +35 0.4 0.8 24 -6 1 -18 1 +35 0.5 0.2 -26 -29 12 -36 2 +35 0.4 0.4 41 -47 39 -5 2 +35 0.9 0.1 -45 -48 27 -3 2 +35 0.4 0.8 30 -7 50 20 2 +35 0.4 0.3 12 -50 27 -44 2 +35 0.6 0.6 24 -7 32 -7 2 +35 0.9 0.5 46 34 -15 -24 1 +35 0.2 0.7 20 5 7 -23 1 +35 0.4 0.5 29 18 -42 -46 1 +35 0.3 0.6 28 -6 -19 -21 1 +35 0.8 0.6 6 2 -13 -16 1 +35 0.2 0.3 38 -47 -28 -33 1 +35 0.5 0.1 -7 -12 -8 -34 1 +35 0.4 0.2 -30 -45 15 -50 2 +35 0.8 0.4 19 -49 -3 -35 1 +35 0.5 0.4 9 -42 32 -12 2 +35 0.1 0.8 38 -40 46 -49 2 +35 0.7 0.5 4 -17 2 -40 1 +35 0.1 0.2 26 12 1 -14 1 +35 0.6 0.3 -19 -34 -20 -25 1 +35 0.7 0.3 23 -10 -5 -50 1 +35 0.1 0.5 -9 -25 -6 -48 1 +35 0.2 0.6 12 -29 28 21 2 +35 0.6 0.6 -1 -9 -15 -36 1 +35 0.7 0.9 48 29 34 -8 1 +35 0.3 0.6 22 -43 -3 -20 2 +35 0.9 0.2 35 24 43 2 1 +35 0.1 0.3 35 26 45 -43 1 +35 0.5 0.1 46 -40 3 2 1 +35 0.3 0.8 -1 -13 34 -14 2 +35 0.3 0.1 35 -9 45 22 2 +35 0.1 0.5 -35 -41 32 -33 2 +35 0.6 0.4 2 -44 -2 -23 1 +35 0.9 0.6 -44 -50 12 -28 2 +35 0.8 0.6 29 -5 5 -13 1 +35 0.9 0.7 35 25 44 7 2 +35 0.5 0.1 20 -34 33 24 2 +35 0.6 0.5 41 -7 37 26 2 +35 0.2 0.2 34 4 13 -32 1 +35 0.4 0.6 -22 -32 32 11 2 +35 0.9 0.8 8 2 48 24 2 +35 0.8 0.7 -25 -29 -2 -25 2 +35 0.6 0.2 39 -15 48 47 2 +35 0.7 0.5 10 -24 40 34 2 +35 0.3 0.6 -2 -37 42 -38 2 +35 0.7 0.1 37 -38 50 -46 1 +35 0.5 0.2 -7 -44 9 -30 2 +35 0.1 0.8 26 -35 37 11 2 +35 0.5 0.3 38 -11 49 9 2 +35 0.2 0.5 8 -27 31 -27 2 +35 0.7 0.2 36 -29 47 15 2 +35 0.5 0.9 35 26 38 -10 1 +35 0.5 0.8 7 -13 47 -44 2 +35 0.5 0.5 7 -24 -2 -7 2 +35 0.9 0.1 0 -50 26 14 2 +35 0.5 0.7 31 12 18 6 1 +35 0.7 0.7 12 2 17 -21 2 +35 0.3 0.8 -4 -47 14 5 2 +35 0.5 0.6 4 -2 8 -41 1 +35 0.5 0.4 -15 -38 39 -19 2 +35 0.5 0.4 37 0 28 -37 1 +35 0.1 0.9 47 8 -7 -46 1 +35 0.6 0.7 -11 -35 20 -33 2 +35 0.6 0.6 44 -31 34 8 2 +35 0.4 0.9 -30 -32 11 -49 2 +35 0.8 0.9 12 -25 31 -3 2 +35 0.4 0.3 -17 -32 30 -23 2 +35 0.8 0.5 15 6 24 20 2 +35 0.1 0.9 26 -48 33 3 2 +35 0.6 0.9 1 -32 30 -20 2 +35 0.8 0.8 -2 -21 15 -21 2 +35 0.7 0.9 31 -35 -6 -24 1 +35 0.3 0.7 6 -11 39 -1 2 +35 0.7 0.1 0 -47 47 -8 2 +35 0.3 0.7 41 -49 42 -48 2 +35 0.6 0.4 12 -12 -48 -49 1 +35 0.5 0.9 22 -29 38 -1 2 +35 0.9 0.2 -20 -38 23 -44 2 +35 0.8 0.8 10 -15 -20 -31 1 +35 0.6 0.5 18 -37 15 -10 2 +35 0.9 0.2 41 5 16 -19 1 +35 0.3 0.8 25 -14 10 -18 2 +35 0.9 0.7 8 -41 27 4 2 +35 0.8 0.1 46 -49 30 23 1 +35 0.4 0.3 17 -44 44 37 2 +35 0.2 0.8 29 -19 -2 -27 1 +35 0.1 0.9 11 -45 8 -46 2 +35 0.3 0.1 20 -13 5 -44 1 +35 0.1 0.1 44 -6 5 -23 1 +35 0.5 0.9 20 -43 43 -19 2 +35 0.1 0.8 45 -33 15 -43 2 +35 0.7 0.2 30 18 38 -9 1 +35 0.4 0.2 28 24 5 -10 1 +35 0.8 0.2 -2 -23 42 32 2 +35 0.9 0.9 42 -15 -27 -47 1 +35 0.9 0.9 25 -16 27 -17 2 +35 0.2 0.4 42 23 -2 -40 1 +35 0.8 0.5 43 -48 -34 -40 1 +35 0.4 0.5 6 -17 6 -27 1 +35 0.4 0.7 14 -36 -19 -40 1 +35 0.8 0.9 34 -36 30 12 2 +35 0.7 0.5 32 -37 34 -42 1 +35 0.7 0.1 37 0 -18 -45 1 +35 0.3 0.7 -15 -38 6 -6 2 +35 0.2 0.4 31 -5 -5 -8 1 +35 0.9 0.2 13 12 -30 -47 1 +35 0.6 0.8 44 41 17 -47 1 +35 0.4 0.6 11 -45 -26 -45 1 +35 0.5 0.1 10 6 16 -22 1 +35 0.8 0.5 -9 -45 37 4 2 +35 0.1 0.4 36 29 7 -9 1 +35 0.2 0.2 33 -4 28 -45 1 +35 0.7 0.6 48 7 22 -24 1 +35 0.4 0.2 37 -39 27 -13 2 +35 0.4 0.5 17 -16 16 1 2 +35 0.2 0.4 24 -8 42 -49 1 +35 0.5 0.8 40 21 1 -5 1 +35 0.8 0.3 36 19 -21 -38 1 +35 0.8 0.5 -6 -22 -30 -44 1 +35 0.3 0.2 31 15 -12 -44 1 +35 0.5 0.6 -5 -13 49 -34 2 +36 0.4 0.6 21 -7 -19 -46 1 +36 0.2 0.2 -10 -46 37 -31 2 +36 0.5 0.6 -21 -25 10 -33 2 +36 0.6 0.4 -1 -47 20 -36 2 +36 0.7 0.4 45 -44 1 -20 1 +36 0.6 0.5 32 -14 -21 -45 1 +36 0.9 0.6 20 -45 -6 -15 1 +36 0.5 0.7 -9 -34 11 -9 2 +36 0.3 0.6 32 16 -13 -23 1 +36 0.6 0.2 47 -14 13 -10 1 +36 0.4 0.2 47 1 2 -28 1 +36 0.5 0.8 34 -23 5 -10 1 +36 0.4 0.8 36 -24 1 -42 1 +36 0.1 0.5 14 -31 25 21 2 +36 0.4 0.5 48 18 10 -5 1 +36 0.2 0.7 34 -12 -6 -34 1 +36 0.8 0.5 19 -42 -38 -39 1 +36 0.2 0.5 4 -32 22 -42 2 +36 0.8 0.3 26 -30 -4 -6 1 +36 0.3 0.3 4 -41 39 2 2 +36 0.9 0.3 13 -35 7 -31 1 +36 0.7 0.8 50 -20 -25 -48 1 +36 0.2 0.2 -14 -18 -29 -37 1 +36 0.6 0.3 46 10 12 -37 1 +36 0.6 0.2 49 4 -17 -47 1 +36 0.1 0.6 5 2 23 -36 2 +36 0.3 0.3 26 21 -22 -32 1 +36 0.4 0.4 14 -20 40 -35 2 +36 0.3 0.2 -35 -38 47 31 2 +36 0.2 0.8 6 -24 -3 -35 2 +36 0.9 0.7 40 34 28 -44 1 +36 0.2 0.6 -15 -16 11 -14 2 +36 0.2 0.9 -11 -30 -5 -13 2 +36 0.4 0.8 43 20 42 -48 1 +36 0.7 0.5 28 19 30 -17 1 +36 0.9 0.6 27 -42 42 7 2 +36 0.8 0.7 -9 -36 23 -26 2 +36 0.5 0.9 35 9 30 21 2 +36 0.6 0.6 11 8 2 -12 1 +36 0.3 0.5 33 -30 48 -16 2 +36 0.4 0.6 -29 -49 43 -40 2 +36 0.9 0.1 49 5 -38 -41 1 +36 0.8 0.5 31 -13 49 -14 1 +36 0.8 0.7 10 -42 31 29 2 +36 0.7 0.1 35 -4 29 4 1 +36 0.9 0.8 48 -37 -16 -24 1 +36 0.8 0.6 23 -19 44 31 2 +36 0.8 0.3 7 -9 8 -31 1 +36 0.1 0.4 -20 -49 30 -35 2 +36 0.2 0.9 -25 -27 30 -17 2 +36 0.1 0.3 42 -14 44 43 2 +36 0.5 0.6 46 -26 -11 -31 1 +36 0.8 0.1 -24 -39 16 -24 2 +36 0.3 0.8 23 -14 -4 -27 1 +36 0.7 0.1 15 9 50 -9 1 +36 0.9 0.5 40 11 21 -29 1 +36 0.4 0.7 24 -50 41 -18 2 +36 0.5 0.5 14 -14 -18 -48 1 +36 0.8 0.1 21 -25 -1 -22 1 +36 0.1 0.7 3 -17 46 -22 2 +36 0.4 0.5 30 9 45 -1 1 +36 0.5 0.5 5 -32 26 -39 2 +36 0.6 0.3 43 40 38 -8 1 +36 0.7 0.2 -24 -27 23 -4 2 +36 0.1 0.5 12 -32 20 -38 2 +36 0.8 0.8 4 -50 36 -12 2 +36 0.2 0.2 40 26 47 -43 1 +36 0.4 0.8 -25 -44 32 -39 2 +36 0.4 0.9 24 8 23 -15 2 +36 0.4 0.7 23 0 -14 -44 1 +36 0.7 0.5 2 -43 -43 -47 1 +36 0.8 0.2 36 -14 5 -17 1 +36 0.7 0.9 29 15 -27 -40 1 +36 0.6 0.7 13 -49 0 -17 2 +36 0.9 0.3 3 1 50 41 2 +36 0.2 0.2 -18 -29 40 -17 2 +36 0.7 0.9 24 -29 17 -1 2 +36 0.7 0.9 -33 -41 -27 -34 2 +36 0.9 0.9 9 8 7 2 1 +36 0.9 0.3 39 26 -32 -46 1 +36 0.1 0.9 -11 -33 32 23 2 +36 0.4 0.1 28 -38 -22 -31 1 +36 0.1 0.3 36 -6 32 -23 2 +36 0.4 0.3 20 14 -12 -50 1 +36 0.8 0.3 41 7 -6 -20 1 +36 0.9 0.9 -6 -11 -2 -6 2 +36 0.3 0.7 50 -18 45 -20 2 +36 0.9 0.4 48 38 -1 -24 1 +36 0.4 0.1 48 43 26 11 1 +36 0.2 0.3 50 -9 -41 -43 1 +36 0.5 0.7 16 -13 3 -46 1 +36 0.9 0.1 21 -22 25 23 2 +36 0.5 0.8 -23 -49 40 -18 2 +36 0.5 0.9 4 -12 2 -7 2 +36 0.7 0.8 -25 -44 38 -23 2 +36 0.5 0.3 -3 -22 45 -17 2 +36 0.3 0.7 49 -46 9 -6 2 +36 0.8 0.5 32 -17 40 -7 1 +36 0.6 0.8 8 -30 -33 -47 1 +36 0.5 0.5 45 -30 36 -19 2 +36 0.3 0.3 26 -3 11 -1 2 +36 0.7 0.5 -19 -24 3 -47 2 +36 0.9 0.6 26 -22 41 -18 1 +36 0.1 0.7 22 -33 48 23 2 +36 0.3 0.8 28 -18 37 -28 2 +36 0.8 0.3 27 16 -13 -32 1 +36 0.3 0.9 6 3 26 -26 2 +36 0.9 0.7 41 -22 -4 -21 1 +36 0.9 0.7 39 -3 30 21 1 +36 0.7 0.8 48 44 23 -7 1 +36 0.1 0.8 -2 -9 -23 -27 1 +36 0.3 0.6 45 1 -21 -41 1 +36 0.8 0.5 23 -10 -6 -22 1 +36 0.5 0.9 40 6 0 -27 1 +36 0.1 0.9 33 19 33 22 2 +36 0.9 0.2 28 -7 31 -43 1 +36 0.3 0.9 32 -46 -41 -50 1 +36 0.7 0.8 -3 -50 -2 -38 2 +36 0.3 0.6 46 7 30 -36 1 +36 0.8 0.9 -8 -50 49 -12 2 +36 0.1 0.8 20 9 46 5 2 +36 0.7 0.7 -12 -19 25 3 2 +36 0.5 0.6 35 2 -3 -32 1 +36 0.2 0.8 -12 -44 -9 -34 2 +36 0.4 0.2 -5 -7 48 -45 1 +36 0.7 0.2 42 -43 32 25 2 +36 0.6 0.1 36 20 50 -50 1 +36 0.2 0.3 38 3 3 -27 1 +36 0.7 0.3 15 -13 37 -37 1 +36 0.2 0.2 40 30 16 2 1 +36 0.1 0.8 46 40 30 -20 1 +36 0.2 0.5 22 1 34 7 2 +36 0.1 0.2 33 3 13 -45 1 +36 0.7 0.8 34 18 -39 -40 1 +36 0.7 0.9 40 -15 49 -29 2 +36 0.4 0.9 -2 -11 31 27 2 +36 0.7 0.6 -6 -33 36 2 2 +36 0.5 0.7 20 -7 46 -15 2 +36 0.9 0.4 25 -9 41 14 2 +36 0.9 0.9 37 -14 0 -6 1 +36 0.9 0.6 10 -22 21 -42 1 +36 0.2 0.4 31 7 -10 -15 1 +36 0.5 0.3 11 5 50 -18 1 +36 0.1 0.1 34 -20 22 -43 1 +36 0.8 0.4 22 -26 -36 -48 1 +36 0.4 0.4 13 9 42 -41 1 +36 0.5 0.2 -24 -38 49 -27 2 +36 0.1 0.5 -21 -41 34 -33 2 +36 0.4 0.6 29 -20 10 3 2 +36 0.7 0.2 3 -30 29 -14 1 +36 0.7 0.3 -28 -37 26 -33 2 +36 0.4 0.4 -10 -20 24 7 2 +36 0.6 0.1 1 -29 48 4 2 +36 0.5 0.2 -27 -40 19 -18 2 +36 0.9 0.2 -2 -11 39 3 2 +36 0.9 0.1 36 -47 -12 -39 1 +36 0.9 0.4 19 11 41 1 1 +36 0.6 0.1 36 -43 38 25 2 +36 0.1 0.5 -13 -25 -21 -23 2 +36 0.1 0.3 -4 -30 49 28 2 +36 0.2 0.1 -26 -50 -8 -24 2 +36 0.2 0.1 13 -36 49 22 2 +36 0.1 0.5 21 -17 36 19 2 +36 0.2 0.3 3 -26 40 13 2 +36 0.7 0.9 19 -21 29 16 2 +36 0.5 0.8 22 -46 29 -41 2 +36 0.1 0.7 16 14 -34 -43 1 +36 0.4 0.5 -36 -50 17 -3 2 +36 0.3 0.2 36 5 24 -23 1 +36 0.6 0.6 -8 -44 50 -24 2 +36 0.5 0.2 31 -40 -5 -33 1 +36 0.5 0.4 39 5 -3 -36 1 +36 0.8 0.5 4 -47 31 18 2 +36 0.4 0.5 21 -37 43 32 2 +36 0.5 0.2 -16 -36 40 -28 2 +36 0.4 0.4 38 37 26 -45 1 +36 0.2 0.6 16 -30 -3 -7 2 +36 0.3 0.5 -12 -41 48 8 2 +36 0.6 0.6 17 7 42 -6 2 +36 0.1 0.1 38 21 47 15 2 +36 0.3 0.5 20 -10 28 -15 2 +36 0.1 0.9 7 -11 18 -35 2 +36 0.4 0.8 45 -25 30 -41 2 +36 0.7 0.6 -32 -40 34 -4 2 +36 0.9 0.3 33 -2 26 -29 1 +36 0.3 0.5 40 -32 38 -24 2 +36 0.1 0.8 16 -10 -44 -50 1 +36 0.3 0.8 50 -40 17 14 2 +36 0.9 0.8 19 11 13 -28 1 +36 0.7 0.5 6 -14 40 -28 2 +36 0.8 0.5 -47 -48 0 -37 2 +36 0.6 0.3 25 18 -36 -43 1 +36 0.8 0.7 48 -30 46 17 2 +36 0.8 0.9 19 -19 -37 -46 1 +36 0.5 0.9 -21 -29 41 -16 2 +36 0.3 0.9 48 4 35 0 2 +36 0.6 0.3 39 -42 28 -30 1 +36 0.9 0.2 36 -15 9 -24 1 +36 0.7 0.2 8 -46 -24 -41 1 +36 0.8 0.6 16 -25 25 -8 2 +36 0.8 0.4 38 -7 25 -4 1 +36 0.5 0.3 46 -23 -1 -8 1 +36 0.5 0.5 43 -32 0 -50 1 +36 0.2 0.1 10 -46 16 -25 2 +36 0.1 0.1 -38 -48 -38 -44 2 +36 0.7 0.2 8 -2 32 -34 1 +36 0.2 0.2 31 -41 33 12 2 +36 0.8 0.1 19 -21 45 34 2 +36 0.4 0.2 -36 -43 49 -13 2 +36 0.7 0.9 12 -40 15 3 2 +36 0.9 0.5 15 12 20 19 2 +36 0.6 0.1 38 27 33 -44 1 +36 0.5 0.9 38 26 44 40 2 +36 0.5 0.5 -6 -34 34 -21 2 +36 0.8 0.5 22 -19 38 0 2 +36 0.7 0.1 -28 -42 46 17 2 +36 0.2 0.3 40 20 13 -38 1 +36 0.8 0.5 48 -27 -35 -44 1 +36 0.9 0.8 -27 -50 -19 -28 2 +36 0.6 0.9 43 15 35 7 2 +36 0.9 0.4 41 -19 -26 -39 1 +36 0.1 0.5 4 -33 44 -28 2 +36 0.2 0.5 8 -26 45 -39 2 +36 0.9 0.7 42 34 14 -14 1 +36 0.3 0.8 32 -32 -32 -38 1 +36 0.5 0.1 8 -36 38 -3 2 +36 0.7 0.8 13 -19 28 6 2 +36 0.3 0.8 -20 -40 -16 -41 2 +36 0.1 0.5 14 -8 34 -37 2 +36 0.3 0.8 7 -11 -24 -41 1 +36 0.3 0.4 19 -11 -25 -26 1 +36 0.4 0.3 -1 -16 47 46 2 +36 0.8 0.8 38 7 -13 -19 1 +36 0.3 0.2 20 4 27 -45 1 +36 0.8 0.3 0 -2 35 10 2 +36 0.4 0.5 -13 -15 27 -45 2 +36 0.4 0.6 33 -2 11 -42 1 +36 0.8 0.7 46 24 28 -45 1 +36 0.4 0.1 46 36 12 7 1 +36 0.6 0.4 -11 -29 14 -29 2 +36 0.5 0.7 29 2 34 -14 2 +36 0.3 0.7 17 -43 25 -41 2 +36 0.4 0.2 45 9 49 1 1 +36 0.2 0.3 27 23 -10 -26 1 +36 0.6 0.4 26 -26 3 -17 1 +36 0.2 0.2 33 28 20 -25 1 +36 0.2 0.4 47 -2 -7 -18 1 +36 0.2 0.6 5 -39 -11 -16 2 +36 0.2 0.7 2 -45 35 -36 2 +36 0.9 0.4 16 5 -5 -19 1 +36 0.7 0.5 7 4 16 -40 1 +36 0.9 0.7 -12 -30 23 -34 2 +36 0.2 0.3 46 37 29 -20 1 +36 0.2 0.5 40 -12 26 -24 2 +36 0.1 0.3 20 -4 30 -42 1 +36 0.7 0.1 16 -28 -5 -49 1 +36 0.8 0.6 46 9 22 -34 1 +36 0.9 0.7 47 20 -27 -28 1 +36 0.8 0.2 47 -41 27 21 1 +36 0.7 0.5 26 4 -25 -39 1 +36 0.4 0.8 -22 -39 49 30 2 +36 0.7 0.4 2 -5 27 -6 2 +36 0.6 0.1 46 -18 25 13 2 +36 0.6 0.3 8 -31 -4 -9 2 +36 0.3 0.4 48 -40 28 13 2 +36 0.3 0.4 19 -17 0 -8 2 +36 0.1 0.1 -44 -48 19 -16 2 +36 0.4 0.8 49 -37 -30 -38 1 +36 0.2 0.5 -24 -49 14 -14 2 +36 0.5 0.2 48 35 29 15 1 +36 0.3 0.9 -19 -38 13 -33 2 +36 0.7 0.2 7 -34 44 10 2 +36 0.7 0.7 44 -47 -31 -39 1 +36 0.5 0.4 40 -44 -25 -50 1 +36 0.2 0.3 50 -3 -10 -31 1 +36 0.8 0.9 -40 -49 35 -25 2 +36 0.2 0.5 39 9 30 -20 1 +36 0.5 0.7 24 3 21 -26 1 +36 0.6 0.1 5 -20 -6 -7 2 +36 0.8 0.5 23 -10 15 -16 1 +36 0.6 0.5 28 -8 -31 -40 1 +36 0.5 0.1 12 -34 4 -24 1 +36 0.7 0.1 -15 -37 -7 -36 1 +36 0.7 0.8 24 -33 29 -40 2 +36 0.1 0.3 43 25 28 7 1 +36 0.8 0.9 41 -17 23 -26 1 +36 0.5 0.8 -11 -46 15 -39 2 +36 0.9 0.5 -21 -42 34 -21 2 +36 0.3 0.1 -2 -34 2 -36 1 +36 0.2 0.2 29 -40 -10 -33 1 +36 0.5 0.2 36 23 5 -29 1 +36 0.8 0.1 18 -4 -3 -16 1 +36 0.9 0.2 43 -16 47 38 1 +36 0.1 0.3 28 -16 28 -6 2 +36 0.7 0.3 25 -35 49 4 2 +36 0.9 0.3 18 -20 30 6 2 +36 0.2 0.7 42 -13 21 17 2 +36 0.8 0.2 37 -37 29 27 2 +36 0.9 0.7 48 14 45 -41 1 +36 0.5 0.9 35 -48 20 -46 2 +37 0.5 0.5 -12 -16 45 43 2 +37 0.3 0.5 -13 -17 3 -45 2 +37 0.7 0.6 29 -7 16 -25 1 +37 0.2 0.7 43 33 26 -14 1 +37 0.1 0.7 24 -30 46 -21 2 +37 0.7 0.4 25 -28 33 -45 1 +37 0.3 0.5 36 5 43 22 2 +37 0.5 0.9 10 -14 7 -38 1 +37 0.5 0.6 16 -24 46 -4 2 +37 0.4 0.3 16 -9 22 -2 1 +37 0.5 0.2 20 -7 22 16 2 +37 0.4 0.7 -26 -32 8 -10 2 +37 0.4 0.6 -40 -47 9 -4 2 +37 0.9 0.9 -1 -4 -20 -24 1 +37 0.6 0.5 -10 -45 -20 -49 1 +37 0.4 0.3 39 28 49 -4 1 +37 0.5 0.8 47 -37 44 -23 2 +37 0.1 0.2 19 -16 25 -15 1 +37 0.1 0.2 35 8 35 9 1 +37 0.3 0.1 25 0 35 -27 1 +37 0.3 0.3 -12 -21 5 -44 2 +37 0.1 0.2 -27 -34 15 -13 2 +37 0.5 0.7 42 31 4 -17 1 +37 0.7 0.7 -10 -16 45 41 2 +37 0.5 0.7 29 20 -3 -36 1 +37 0.3 0.8 1 -18 -16 -39 1 +37 0.5 0.2 17 -40 -40 -43 1 +37 0.2 0.6 29 -22 -1 -35 1 +37 0.7 0.3 10 -28 20 -4 1 +37 0.4 0.7 23 -48 -8 -26 1 +37 0.2 0.5 13 -32 1 -46 1 +37 0.9 0.9 -26 -42 48 26 2 +37 0.6 0.3 15 -38 25 -4 2 +37 0.6 0.3 -22 -30 32 -28 2 +37 0.1 0.6 37 -16 39 -10 2 +37 0.7 0.9 22 -19 48 -11 2 +37 0.4 0.2 43 -35 29 -35 1 +37 0.4 0.4 11 -37 4 -2 2 +37 0.4 0.7 -3 -39 31 -5 2 +37 0.6 0.3 22 10 49 31 1 +37 0.5 0.6 19 -40 48 -3 2 +37 0.5 0.1 -4 -19 38 -6 2 +37 0.6 0.9 12 -7 7 -31 1 +37 0.7 0.2 -39 -48 44 -30 2 +37 0.8 0.3 49 -11 19 -30 1 +37 0.6 0.7 -11 -36 25 18 2 +37 0.8 0.1 22 -17 -8 -24 1 +37 0.3 0.5 -15 -26 22 -44 2 +37 0.7 0.2 20 -23 36 11 2 +37 0.4 0.4 14 -10 -16 -42 1 +37 0.9 0.4 28 -1 -25 -46 1 +37 0.3 0.8 9 8 27 7 2 +37 0.3 0.5 25 -38 45 -8 2 +37 0.8 0.9 10 1 4 -15 1 +37 0.8 0.6 -43 -47 -3 -42 2 +37 0.6 0.1 49 29 43 15 1 +37 0.5 0.4 42 6 42 5 1 +37 0.3 0.8 44 -40 15 13 2 +37 0.1 0.5 21 -33 36 -22 2 +37 0.4 0.6 30 6 -4 -13 1 +37 0.2 0.3 -33 -37 41 10 2 +37 0.5 0.7 41 24 16 -15 1 +37 0.5 0.3 44 2 49 -47 1 +37 0.6 0.5 34 -13 15 -48 1 +37 0.1 0.6 13 3 27 -40 1 +37 0.4 0.1 1 -45 -26 -37 1 +37 0.8 0.4 25 -27 -39 -50 1 +37 0.2 0.1 24 13 36 -39 1 +37 0.4 0.5 33 -47 9 -14 2 +37 0.9 0.2 -39 -46 42 14 2 +37 0.9 0.8 31 -5 28 20 2 +37 0.3 0.6 42 31 -37 -44 1 +37 0.7 0.8 26 -12 0 -8 1 +37 0.2 0.7 34 -12 26 -28 2 +37 0.2 0.7 -20 -25 31 14 2 +37 0.2 0.3 27 -9 19 -1 2 +37 0.4 0.2 -28 -45 -2 -48 1 +37 0.1 0.4 13 5 41 30 2 +37 0.6 0.9 46 0 19 -36 1 +37 0.8 0.1 -28 -38 21 -50 1 +37 0.2 0.6 -1 -35 -7 -35 2 +37 0.5 0.8 28 -21 21 -24 2 +37 0.5 0.7 15 -35 -45 -49 1 +37 0.5 0.2 43 -21 47 31 2 +37 0.7 0.1 10 4 -11 -39 1 +37 0.7 0.6 40 8 0 -13 1 +37 0.7 0.5 -10 -33 25 0 2 +37 0.5 0.5 -16 -45 14 -22 2 +37 0.1 0.4 25 -46 15 -37 2 +37 0.6 0.1 24 -36 3 -26 1 +37 0.1 0.8 29 15 50 -17 2 +37 0.3 0.1 3 -31 5 3 2 +37 0.3 0.6 37 -11 18 -32 1 +37 0.5 0.1 1 -11 40 -31 1 +37 0.2 0.3 33 10 19 -38 1 +37 0.6 0.2 10 -3 49 39 2 +37 0.7 0.3 -11 -34 35 -26 2 +37 0.3 0.7 34 -16 13 -23 1 +37 0.5 0.9 6 -41 29 27 2 +37 0.7 0.1 16 6 27 -30 1 +37 0.4 0.2 37 -30 34 6 2 +37 0.4 0.3 30 -12 49 -9 1 +37 0.1 0.5 41 -38 27 17 2 +37 0.2 0.2 38 -10 18 4 2 +37 0.8 0.4 37 7 -30 -44 1 +37 0.5 0.5 -6 -44 39 10 2 +37 0.8 0.4 1 -41 26 -10 2 +37 0.5 0.5 47 44 -18 -22 1 +37 0.4 0.2 0 -11 36 -46 2 +37 0.2 0.7 -3 -27 -2 -17 2 +37 0.7 0.7 -12 -13 36 -20 2 +37 0.3 0.3 49 -31 17 -21 2 +37 0.4 0.9 4 -8 -16 -20 1 +37 0.5 0.5 -6 -39 37 -4 2 +37 0.5 0.6 35 -28 -25 -40 1 +37 0.2 0.7 23 -49 16 -6 2 +37 0.8 0.5 44 -12 12 -19 1 +37 0.9 0.5 26 -9 23 15 2 +37 0.2 0.8 27 -38 50 -1 2 +37 0.6 0.2 13 -17 43 16 2 +37 0.2 0.8 29 12 -27 -39 1 +37 0.4 0.4 1 -16 45 -38 2 +37 0.6 0.6 31 -47 26 -12 1 +37 0.9 0.2 41 -12 25 6 1 +37 0.3 0.2 15 10 47 -11 1 +37 0.8 0.3 19 3 47 -1 1 +37 0.4 0.1 29 -46 33 -22 1 +37 0.2 0.4 45 -17 47 -47 1 +37 0.4 0.2 49 15 39 -7 1 +37 0.2 0.9 -2 -42 26 9 2 +37 0.8 0.6 38 16 32 26 1 +37 0.1 0.5 15 -20 47 -42 2 +37 0.1 0.7 19 -40 18 -30 2 +37 0.7 0.2 32 -20 -15 -24 1 +37 0.5 0.5 -11 -28 39 -26 2 +37 0.4 0.2 28 11 34 -26 1 +37 0.8 0.4 39 -34 9 6 2 +37 0.7 0.5 -17 -28 -10 -24 1 +37 0.4 0.5 29 -24 45 -38 2 +37 0.9 0.6 27 20 -10 -28 1 +37 0.9 0.7 -11 -44 7 -4 2 +37 0.2 0.6 41 1 22 -17 2 +37 0.2 0.2 -20 -31 -43 -48 1 +37 0.2 0.1 1 -50 5 -41 1 +37 0.2 0.7 23 -29 18 9 2 +37 0.7 0.6 43 -15 44 -14 2 +37 0.1 0.3 22 12 19 14 2 +37 0.9 0.2 50 -19 9 8 1 +37 0.7 0.1 49 5 21 3 1 +37 0.7 0.9 18 -20 2 -48 1 +37 0.9 0.2 37 -24 20 -33 1 +37 0.4 0.3 14 -25 -31 -43 1 +37 0.5 0.9 16 2 33 9 2 +37 0.9 0.1 50 -38 6 -26 1 +37 0.7 0.4 21 -19 -38 -41 1 +37 0.7 0.7 13 -27 -7 -37 1 +37 0.1 0.6 7 4 27 -28 1 +37 0.4 0.2 30 3 -10 -32 1 +37 0.3 0.6 49 -50 40 -31 2 +37 0.6 0.4 47 33 33 -47 1 +37 0.3 0.6 -10 -25 14 5 2 +37 0.6 0.2 36 -47 31 -34 1 +37 0.6 0.7 38 31 -14 -22 1 +37 0.9 0.6 7 -15 19 -9 1 +37 0.4 0.5 48 -7 -4 -44 1 +37 0.6 0.7 -2 -44 45 11 2 +37 0.9 0.4 29 -38 31 -49 1 +37 0.3 0.7 42 18 -9 -23 1 +37 0.5 0.3 42 -1 41 -19 1 +37 0.8 0.1 26 -45 30 25 2 +37 0.8 0.1 -11 -21 37 -20 2 +37 0.8 0.3 29 -48 50 18 2 +37 0.6 0.6 4 -6 17 -24 1 +37 0.6 0.9 -23 -32 -7 -12 1 +37 0.9 0.1 -4 -36 -11 -40 1 +37 0.9 0.6 32 -40 23 -11 1 +37 0.5 0.7 39 -7 -3 -36 1 +37 0.3 0.7 48 17 43 -7 2 +37 0.9 0.2 33 -36 16 -24 1 +37 0.9 0.1 8 -17 44 4 2 +37 0.2 0.8 29 -4 8 -3 2 +37 0.7 0.1 37 28 23 -27 1 +37 0.8 0.8 24 20 -6 -23 1 +37 0.9 0.5 3 -45 46 -29 2 +37 0.3 0.1 9 -29 -1 -26 2 +37 0.7 0.2 49 47 29 10 1 +37 0.4 0.7 49 -41 8 -45 1 +37 0.1 0.8 -22 -34 38 29 2 +37 0.7 0.9 -16 -37 38 -4 2 +37 0.8 0.3 18 1 40 -37 1 +37 0.8 0.8 21 16 39 -27 1 +37 0.1 0.1 30 9 48 47 2 +37 0.2 0.3 26 1 28 19 2 +37 0.4 0.2 -15 -38 -30 -45 1 +37 0.4 0.4 35 -40 -39 -42 1 +37 0.1 0.2 36 8 -10 -46 1 +37 0.8 0.8 10 3 32 -25 1 +37 0.8 0.2 25 12 38 -18 1 +37 0.6 0.4 42 7 37 -15 1 +37 0.2 0.5 50 31 44 -23 1 +37 0.9 0.3 42 36 38 -20 1 +37 0.5 0.4 -20 -46 -18 -23 2 +37 0.9 0.9 8 2 29 -28 2 +37 0.5 0.5 29 -25 7 -9 1 +37 0.8 0.1 48 4 50 5 1 +37 0.7 0.1 15 -43 -35 -36 1 +37 0.7 0.5 28 7 39 17 1 +37 0.3 0.2 32 -47 46 -7 2 +37 0.8 0.7 47 -45 14 -47 1 +37 0.5 0.8 -40 -46 -33 -46 1 +37 0.3 0.5 -12 -21 2 -50 2 +37 0.7 0.1 30 -47 17 -48 1 +37 0.3 0.1 -46 -47 25 20 2 +37 0.9 0.8 47 -15 -10 -49 1 +37 0.1 0.8 48 -46 50 46 2 +37 0.4 0.6 -11 -21 -9 -30 2 +37 0.3 0.9 -35 -43 -6 -17 2 +37 0.9 0.5 -45 -47 -4 -11 2 +37 0.9 0.7 -11 -44 7 -3 1 +37 0.4 0.7 20 4 26 9 2 +37 0.3 0.8 -4 -45 17 -49 2 +37 0.4 0.8 27 -21 -20 -30 1 +37 0.9 0.2 37 -32 7 -3 1 +37 0.3 0.2 45 32 38 -23 1 +37 0.2 0.2 -2 -31 -10 -17 1 +37 0.9 0.6 16 -30 36 3 2 +37 0.4 0.7 -12 -42 25 -36 2 +37 0.4 0.1 -27 -32 26 -37 2 +37 0.6 0.2 17 12 41 -45 1 +37 0.4 0.9 48 -25 8 -40 2 +37 0.7 0.3 12 -11 15 -2 2 +37 0.3 0.9 21 -23 24 -43 2 +37 0.2 0.2 50 24 34 -38 1 +37 0.2 0.9 8 -6 5 -21 2 +37 0.2 0.4 1 -26 46 -47 2 +37 0.9 0.5 -25 -47 37 -6 2 +37 0.8 0.9 13 -8 -1 -9 1 +37 0.4 0.2 42 10 50 21 1 +37 0.8 0.3 -7 -15 -1 -21 2 +37 0.8 0.5 32 7 48 39 2 +37 0.8 0.9 23 20 27 10 2 +37 0.1 0.3 28 -1 48 -2 2 +37 0.5 0.1 49 19 11 -33 1 +37 0.9 0.6 32 -2 28 -33 1 +37 0.5 0.5 30 -7 -30 -41 1 +37 0.1 0.2 36 21 44 15 1 +37 0.7 0.4 23 2 5 -13 1 +37 0.7 0.4 5 -5 35 32 2 +37 0.4 0.6 -26 -31 -8 -50 2 +37 0.4 0.2 34 28 -17 -36 1 +37 0.8 0.9 44 -17 50 -30 2 +37 0.6 0.8 31 -33 -3 -14 1 +37 0.9 0.1 -10 -48 12 -27 1 +37 0.8 0.3 8 -4 32 -14 1 +37 0.7 0.5 -2 -33 6 -22 2 +37 0.4 0.5 46 45 7 -50 1 +37 0.5 0.8 9 -37 25 -25 2 +37 0.5 0.3 32 8 32 19 2 +37 0.5 0.8 43 41 48 21 2 +37 0.3 0.9 30 -34 40 -23 2 +37 0.4 0.5 24 -32 -40 -45 1 +37 0.6 0.4 -17 -33 -22 -49 1 +37 0.6 0.8 46 26 8 -28 1 +37 0.9 0.3 -10 -30 12 9 2 +37 0.2 0.4 -1 -21 26 -29 1 +37 0.9 0.7 27 4 5 -12 1 +37 0.3 0.4 -21 -35 9 -47 2 +37 0.2 0.3 -5 -9 22 -1 2 +37 0.6 0.9 50 -47 -25 -37 1 +37 0.2 0.9 -2 -4 28 24 2 +37 0.9 0.9 7 -34 5 -15 1 +37 0.1 0.7 35 15 50 12 2 +37 0.5 0.1 49 -35 -15 -27 1 +37 0.2 0.2 27 -24 0 -40 1 +37 0.7 0.4 26 -33 -8 -49 1 +37 0.3 0.3 9 7 7 -27 1 +37 0.6 0.9 17 -28 32 22 2 +37 0.6 0.7 16 -26 -23 -35 1 +37 0.7 0.1 13 -18 45 -38 2 +37 0.5 0.8 26 8 18 -33 1 +37 0.3 0.7 -35 -40 -1 -11 2 +37 0.6 0.3 -35 -39 22 -20 2 +37 0.7 0.6 -26 -28 24 -43 2 +37 0.7 0.2 23 -50 21 -14 1 +37 0.6 0.3 5 -37 50 -15 2 +37 0.3 0.5 -10 -48 44 26 2 +37 0.9 0.5 7 -34 3 -43 1 +37 0.5 0.5 26 -13 27 9 2 +37 0.4 0.4 -40 -46 31 0 2 +37 0.7 0.2 24 15 11 -30 1 +37 0.3 0.9 23 -46 19 -17 2 +37 0.5 0.4 24 3 21 -43 1 +37 0.5 0.2 33 30 40 29 2 +37 0.9 0.4 -2 -9 24 -1 2 +37 0.9 0.2 41 30 41 -13 1 +37 0.2 0.8 20 -23 22 -25 2 +37 0.3 0.7 27 3 28 9 2 +37 0.4 0.1 50 3 39 5 1 +37 0.3 0.1 22 -29 20 6 2 +37 0.9 0.3 46 26 -31 -39 1 +38 0.2 0.1 -9 -30 -14 -40 1 +38 0.5 0.7 38 -12 11 -31 1 +38 0.5 0.2 42 -20 14 -6 1 +38 0.5 0.9 46 16 23 -10 1 +38 0.1 0.7 35 -25 48 -49 2 +38 0.6 0.2 29 -29 30 -20 1 +38 0.3 0.2 18 16 -10 -41 1 +38 0.1 0.3 26 22 -17 -46 1 +38 0.2 0.7 43 -12 49 38 2 +38 0.8 0.6 49 -27 12 -14 1 +38 0.2 0.2 41 -41 45 -13 2 +38 0.4 0.1 29 8 -41 -49 1 +38 0.1 0.5 8 -6 -2 -4 2 +38 0.5 0.4 49 42 -10 -36 1 +38 0.6 0.5 9 -10 -11 -37 1 +38 0.2 0.8 26 24 16 -46 1 +38 0.6 0.4 49 -20 27 -41 1 +38 0.6 0.3 -1 -31 -20 -33 1 +38 0.8 0.2 -24 -34 39 -23 2 +38 0.5 0.3 4 -1 -11 -47 1 +38 0.7 0.9 5 -49 50 29 2 +38 0.3 0.6 47 -4 27 5 2 +38 0.8 0.3 33 -50 9 -38 1 +38 0.4 0.5 45 -23 -18 -41 1 +38 0.3 0.2 -17 -18 23 -8 1 +38 0.6 0.9 46 -28 26 -41 2 +38 0.5 0.6 31 -36 25 -9 2 +38 0.8 0.2 26 3 31 -32 1 +38 0.2 0.9 1 -48 38 10 2 +38 0.5 0.5 -6 -11 16 8 2 +38 0.3 0.1 -22 -34 30 -27 2 +38 0.6 0.1 35 1 41 -12 1 +38 0.4 0.9 9 -14 -13 -49 1 +38 0.8 0.1 30 0 46 -4 1 +38 0.3 0.3 -4 -26 35 13 2 +38 0.6 0.7 26 -16 44 36 2 +38 0.5 0.3 12 -50 -4 -27 1 +38 0.3 0.2 5 -6 48 -12 2 +38 0.7 0.5 -10 -40 36 -32 2 +38 0.3 0.7 48 27 43 -5 1 +38 0.1 0.1 9 -45 24 -24 2 +38 0.6 0.8 -20 -27 -26 -39 1 +38 0.1 0.1 -11 -23 2 -13 1 +38 0.7 0.8 -13 -39 45 36 2 +38 0.1 0.2 50 -15 -19 -33 1 +38 0.8 0.1 41 -47 -13 -39 1 +38 0.9 0.6 22 -23 13 -12 1 +38 0.9 0.1 -24 -42 40 -33 2 +38 0.6 0.9 19 -43 25 13 2 +38 0.7 0.8 41 -46 10 4 1 +38 0.9 0.1 18 -1 0 -26 1 +38 0.2 0.7 49 35 -9 -22 1 +38 0.8 0.6 15 3 42 -28 2 +38 0.5 0.1 -25 -36 18 -35 2 +38 0.9 0.7 17 -33 -17 -43 1 +38 0.5 0.4 -2 -17 39 -24 2 +38 0.3 0.4 -7 -26 -17 -24 1 +38 0.5 0.5 50 19 50 49 2 +38 0.2 0.5 -16 -23 34 -23 2 +38 0.3 0.1 23 -32 26 2 2 +38 0.3 0.5 35 17 37 -16 1 +38 0.7 0.8 26 21 3 -11 1 +38 0.4 0.7 -20 -39 42 31 2 +38 0.5 0.4 -5 -45 34 -44 2 +38 0.1 0.3 23 -16 47 17 2 +38 0.6 0.6 16 -34 17 -3 2 +38 0.2 0.2 23 -42 49 36 2 +38 0.6 0.8 -14 -19 21 -48 2 +38 0.7 0.3 39 31 -19 -37 1 +38 0.9 0.8 30 19 8 -26 1 +38 0.3 0.5 -24 -42 41 -33 2 +38 0.9 0.4 -37 -39 46 -15 2 +38 0.9 0.3 -13 -30 -7 -21 1 +38 0.3 0.3 50 1 23 -43 1 +38 0.1 0.8 11 -17 41 -2 2 +38 0.1 0.5 -10 -18 23 -43 2 +38 0.5 0.1 -45 -46 42 -20 1 +38 0.5 0.6 30 -33 11 -3 2 +38 0.1 0.2 35 26 -27 -35 1 +38 0.3 0.2 41 -8 -5 -50 1 +38 0.3 0.9 -34 -35 22 -12 2 +38 0.3 0.6 -12 -29 -26 -36 2 +38 0.8 0.3 -34 -37 8 -48 1 +38 0.4 0.8 -1 -38 45 43 2 +38 0.7 0.7 36 19 -25 -45 1 +38 0.5 0.8 33 -39 35 20 2 +38 0.1 0.5 -18 -39 29 10 2 +38 0.6 0.6 -20 -50 42 -39 2 +38 0.7 0.7 -4 -37 43 -45 2 +38 0.5 0.7 8 -35 -22 -43 1 +38 0.1 0.6 -8 -48 0 -8 2 +38 0.9 0.6 12 -21 50 -47 2 +38 0.9 0.7 38 0 40 18 1 +38 0.7 0.8 30 17 19 -1 1 +38 0.3 0.6 -32 -50 -24 -32 2 +38 0.1 0.9 22 -35 14 -9 2 +38 0.7 0.3 2 -23 29 -8 2 +38 0.3 0.3 -13 -41 43 30 2 +38 0.5 0.2 31 -44 3 -4 1 +38 0.9 0.8 36 -5 -12 -35 1 +38 0.4 0.7 19 -14 -25 -38 1 +38 0.5 0.2 40 -38 50 28 2 +38 0.5 0.8 16 -19 21 -9 2 +38 0.4 0.6 18 -45 -8 -37 1 +38 0.4 0.5 10 -15 33 -25 2 +38 0.2 0.2 21 14 6 -36 1 +38 0.5 0.1 35 12 26 -1 1 +38 0.3 0.1 34 -6 30 3 2 +38 0.8 0.5 39 3 17 -21 1 +38 0.1 0.8 31 -25 48 35 2 +38 0.4 0.4 -9 -15 -8 -49 1 +38 0.9 0.2 40 -4 -1 -23 1 +38 0.7 0.7 27 -21 34 -5 2 +38 0.1 0.3 40 35 4 -35 1 +38 0.7 0.3 24 -22 48 30 2 +38 0.8 0.8 -24 -34 -4 -38 1 +38 0.8 0.7 16 -18 -21 -45 1 +38 0.7 0.3 40 -48 45 -19 1 +38 0.6 0.4 -12 -13 -35 -36 1 +38 0.3 0.4 39 -42 4 0 1 +38 0.9 0.5 -1 -49 -18 -26 1 +38 0.7 0.7 48 -47 36 27 2 +38 0.6 0.9 6 -47 34 -35 2 +38 0.3 0.9 41 18 1 -43 1 +38 0.6 0.1 16 -38 -34 -42 1 +38 0.5 0.9 47 -6 18 -32 2 +38 0.4 0.7 9 1 -12 -38 1 +38 0.9 0.1 40 26 -1 -4 1 +38 0.5 0.1 44 -23 16 -7 1 +38 0.9 0.7 32 -2 -39 -41 1 +38 0.3 0.4 32 22 41 -29 1 +38 0.1 0.4 42 -33 21 -8 2 +38 0.4 0.8 46 -16 27 -22 1 +38 0.8 0.7 -2 -29 21 16 2 +38 0.7 0.2 0 -46 13 7 2 +38 0.9 0.2 50 20 11 -7 1 +38 0.6 0.6 43 -21 41 30 2 +38 0.8 0.1 5 -13 6 -41 1 +38 0.5 0.1 42 -17 -5 -10 1 +38 0.2 0.8 50 13 34 -50 1 +38 0.7 0.1 -7 -47 46 22 2 +38 0.8 0.6 -21 -47 19 5 2 +38 0.4 0.8 13 -14 6 0 2 +38 0.2 0.6 44 18 11 -27 1 +38 0.2 0.7 -28 -41 24 13 2 +38 0.6 0.9 12 -4 -8 -25 1 +38 0.9 0.6 46 -30 5 -4 1 +38 0.2 0.7 6 -2 10 -27 2 +38 0.9 0.5 -8 -35 50 44 2 +38 0.2 0.4 1 -34 22 11 2 +38 0.4 0.7 27 -49 -13 -26 1 +38 0.5 0.5 29 -4 31 -32 1 +38 0.6 0.1 36 25 2 -23 1 +38 0.6 0.7 48 42 36 5 1 +38 0.9 0.8 12 7 -26 -35 1 +38 0.3 0.7 38 10 -5 -43 1 +38 0.3 0.7 -11 -41 41 26 2 +38 0.4 0.6 49 47 30 -40 1 +38 0.6 0.2 -31 -47 42 -48 2 +38 0.2 0.8 42 24 41 4 2 +38 0.2 0.7 43 -4 -10 -40 1 +38 0.6 0.4 -2 -29 5 -40 1 +38 0.6 0.9 11 7 47 -18 2 +38 0.9 0.9 50 -10 24 -34 1 +38 0.7 0.3 49 -7 35 -14 1 +38 0.4 0.3 50 38 7 -16 1 +38 0.2 0.2 -3 -37 39 17 2 +38 0.7 0.2 -24 -41 33 -11 2 +38 0.7 0.6 -12 -23 -18 -46 2 +38 0.9 0.4 45 10 35 -41 1 +38 0.8 0.9 -14 -37 44 -50 2 +38 0.6 0.9 46 -42 16 -1 1 +38 0.4 0.1 6 -28 2 -13 1 +38 0.6 0.3 7 -48 19 -25 2 +38 0.3 0.3 41 18 -14 -25 1 +38 0.5 0.7 -16 -35 41 -30 2 +38 0.8 0.7 27 11 9 7 1 +38 0.7 0.9 24 -19 29 -46 2 +38 0.1 0.1 18 8 0 -28 1 +38 0.7 0.9 -19 -42 -19 -38 2 +38 0.8 0.3 22 -2 -5 -48 1 +38 0.9 0.1 -30 -45 47 -6 2 +38 0.7 0.4 28 24 -3 -12 1 +38 0.9 0.8 26 -2 -28 -29 1 +38 0.3 0.6 41 24 17 -2 1 +38 0.5 0.9 36 -46 17 -46 2 +38 0.3 0.2 12 -47 23 19 2 +38 0.4 0.2 37 -50 49 19 2 +38 0.6 0.7 -6 -9 24 6 2 +38 0.9 0.1 32 -15 -17 -20 1 +38 0.6 0.7 49 16 -9 -37 1 +38 0.7 0.5 -4 -31 50 -25 2 +38 0.4 0.6 43 10 42 40 2 +38 0.9 0.2 -1 -25 44 -36 2 +38 0.1 0.5 -12 -37 0 -8 2 +38 0.8 0.1 41 -48 23 7 1 +38 0.5 0.2 37 -7 -32 -34 1 +38 0.9 0.8 -12 -41 25 12 2 +38 0.4 0.2 21 0 33 -24 1 +38 0.1 0.4 -33 -46 24 -41 2 +38 0.1 0.5 27 26 13 -36 1 +38 0.6 0.5 32 -15 46 -22 1 +38 0.3 0.9 36 -40 26 6 2 +38 0.6 0.5 0 -20 27 -39 2 +38 0.4 0.6 -8 -37 -5 -21 1 +38 0.9 0.8 -3 -47 47 -41 2 +38 0.8 0.9 43 10 29 -19 1 +38 0.7 0.1 40 -8 7 -47 1 +38 0.4 0.3 18 7 44 27 2 +38 0.6 0.2 50 -40 50 34 2 +38 0.4 0.9 22 -8 40 39 2 +38 0.4 0.7 24 4 21 -43 1 +38 0.9 0.3 28 -3 -12 -30 1 +38 0.6 0.1 45 -7 43 -5 1 +38 0.3 0.7 36 -21 -29 -41 1 +38 0.6 0.2 -15 -44 7 -5 2 +38 0.6 0.8 -11 -13 20 -17 2 +38 0.9 0.8 28 -16 -12 -27 1 +38 0.3 0.5 31 26 15 -48 1 +38 0.1 0.1 35 -47 46 -48 1 +38 0.7 0.9 7 -26 2 -43 2 +38 0.2 0.1 4 -45 -20 -37 1 +38 0.3 0.6 -35 -46 32 5 2 +38 0.5 0.1 -37 -41 -20 -40 2 +38 0.2 0.1 26 -36 3 -6 2 +38 0.8 0.6 -13 -19 -15 -34 2 +38 0.7 0.2 -7 -36 15 -38 1 +38 0.2 0.8 -4 -29 41 -13 2 +38 0.8 0.1 16 -15 44 29 2 +38 0.5 0.7 46 7 -15 -19 1 +38 0.5 0.1 30 29 29 9 1 +38 0.2 0.5 44 28 -24 -36 1 +38 0.1 0.5 38 10 18 -49 1 +38 0.9 0.2 -44 -50 41 12 2 +38 0.9 0.7 0 -34 -9 -49 1 +38 0.5 0.7 3 -31 31 -5 2 +38 0.2 0.5 0 -35 37 -12 2 +38 0.4 0.4 49 -20 -20 -46 1 +38 0.1 0.6 -9 -27 -43 -50 2 +38 0.7 0.8 27 -35 35 2 2 +38 0.6 0.4 -48 -49 -11 -14 2 +38 0.2 0.2 33 -9 19 6 2 +38 0.6 0.3 48 33 29 8 1 +38 0.9 0.5 -34 -39 39 3 2 +38 0.6 0.1 45 -7 0 -43 1 +38 0.3 0.2 24 16 -10 -48 1 +38 0.1 0.3 -21 -38 19 -35 2 +38 0.4 0.1 37 -23 23 -29 1 +38 0.8 0.2 -16 -39 49 8 2 +38 0.9 0.2 47 -31 47 40 2 +38 0.1 0.5 -8 -46 41 -45 2 +38 0.1 0.8 46 -4 47 10 2 +38 0.3 0.3 -13 -30 -37 -44 1 +38 0.1 0.8 49 35 -25 -39 1 +38 0.5 0.9 32 31 11 -12 1 +38 0.6 0.4 23 -3 -5 -17 1 +38 0.7 0.6 35 18 1 -29 1 +38 0.9 0.8 6 -22 12 -40 2 +38 0.8 0.3 18 -38 33 -23 1 +38 0.6 0.4 48 -43 0 -2 1 +38 0.6 0.3 -10 -22 40 25 2 +38 0.2 0.5 -3 -48 50 -3 2 +38 0.4 0.3 27 2 26 6 2 +38 0.6 0.4 10 -26 45 39 2 +38 0.1 0.3 46 -29 34 8 2 +38 0.8 0.2 42 19 13 -50 1 +38 0.5 0.3 25 -17 -22 -40 1 +38 0.6 0.1 24 -17 15 -18 1 +38 0.9 0.6 19 -11 26 -27 1 +38 0.5 0.9 -2 -25 22 8 2 +38 0.6 0.9 35 16 20 -42 1 +38 0.9 0.1 20 -12 -9 -28 1 +38 0.8 0.1 -25 -31 29 -31 1 +38 0.4 0.2 15 -42 2 -19 1 +38 0.3 0.5 -31 -48 -2 -30 2 +38 0.2 0.7 -30 -44 19 18 2 +38 0.9 0.2 3 -17 33 -42 1 +38 0.7 0.8 37 -43 -26 -28 1 +38 0.6 0.3 23 -44 8 -6 1 +38 0.4 0.4 -11 -36 42 -42 2 +38 0.6 0.5 36 24 20 -38 1 +38 0.2 0.4 4 -5 17 -45 1 +38 0.2 0.6 29 -33 48 -12 2 +38 0.3 0.3 -21 -34 -10 -13 2 +38 0.1 0.4 48 -27 31 -30 2 +38 0.9 0.7 21 10 -8 -16 1 +38 0.4 0.8 23 11 1 -11 1 +38 0.6 0.7 -10 -23 28 -1 2 +38 0.8 0.2 -8 -9 -6 -25 1 +38 0.8 0.1 6 -21 -10 -21 1 +38 0.6 0.1 9 -35 7 -36 1 +38 0.4 0.7 12 -19 32 18 2 +38 0.4 0.6 -9 -36 0 -32 2 +38 0.6 0.6 12 -45 7 -31 2 +38 0.8 0.5 -17 -20 -5 -7 1 +38 0.3 0.3 11 -32 29 -49 1 +38 0.1 0.2 -44 -47 11 -39 2 +38 0.4 0.5 45 39 44 -16 1 +38 0.3 0.8 32 -50 48 13 2 +38 0.6 0.8 7 -24 48 -3 2 +39 0.2 0.1 48 24 7 -47 1 +39 0.7 0.8 44 -29 17 -50 1 +39 0.4 0.6 14 6 13 -42 1 +39 0.3 0.7 38 -27 -7 -39 2 +39 0.5 0.8 48 20 46 33 1 +39 0.4 0.3 32 -36 3 -39 1 +39 0.7 0.7 -22 -44 50 -26 2 +39 0.6 0.5 42 3 43 15 2 +39 0.4 0.1 13 -33 41 -47 2 +39 0.6 0.1 27 -23 28 11 2 +39 0.6 0.1 -13 -48 13 -8 1 +39 0.2 0.2 36 -33 3 -33 2 +39 0.4 0.4 21 -13 16 8 2 +39 0.3 0.2 1 -23 -27 -37 1 +39 0.4 0.7 -18 -28 -5 -23 1 +39 0.5 0.5 47 -20 -9 -22 1 +39 0.6 0.2 12 0 30 -47 1 +39 0.5 0.1 21 -5 40 -2 1 +39 0.6 0.8 13 -2 -23 -42 1 +39 0.3 0.6 42 31 12 -38 1 +39 0.6 0.8 -11 -29 19 -41 2 +39 0.3 0.7 6 -21 13 -31 2 +39 0.4 0.6 13 -30 29 17 2 +39 0.1 0.4 -44 -45 44 20 2 +39 0.6 0.7 24 -14 36 31 2 +39 0.5 0.7 -10 -18 25 -42 2 +39 0.2 0.9 23 -15 12 -7 2 +39 0.5 0.4 -5 -43 -16 -26 2 +39 0.9 0.4 19 9 24 -19 1 +39 0.5 0.7 -3 -46 48 16 2 +39 0.9 0.1 31 22 25 -1 1 +39 0.9 0.7 -39 -49 32 4 2 +39 0.3 0.2 25 13 -3 -14 1 +39 0.1 0.5 2 0 41 -27 2 +39 0.3 0.9 38 32 -6 -39 1 +39 0.1 0.3 -17 -19 30 -7 1 +39 0.4 0.1 -41 -46 -1 -31 2 +39 0.5 0.8 17 -30 3 -5 2 +39 0.7 0.2 -9 -25 49 15 2 +39 0.7 0.3 6 -9 47 -36 1 +39 0.7 0.2 45 -45 6 -21 1 +39 0.9 0.7 30 8 47 -43 1 +39 0.7 0.5 45 9 13 12 1 +39 0.8 0.5 29 4 50 -39 1 +39 0.4 0.8 18 -14 17 5 1 +39 0.1 0.3 -27 -45 18 -34 1 +39 0.7 0.1 6 2 17 -22 1 +39 0.2 0.5 31 28 48 9 1 +39 0.7 0.6 32 -22 15 -25 2 +39 0.3 0.9 -15 -37 27 19 2 +39 0.2 0.9 -17 -29 48 -11 2 +39 0.7 0.1 27 -26 28 -48 2 +39 0.8 0.1 -22 -23 16 -45 2 +39 0.5 0.8 50 38 49 -34 1 +39 0.1 0.8 -12 -24 33 -21 2 +39 0.6 0.8 15 -12 15 -45 1 +39 0.7 0.4 -4 -42 -23 -25 2 +39 0.5 0.4 23 -37 39 -4 2 +39 0.1 0.6 17 14 38 -7 2 +39 0.8 0.1 0 -4 29 11 2 +39 0.3 0.8 43 -38 -31 -42 1 +39 0.3 0.7 3 -22 46 10 2 +39 0.8 0.7 15 -8 28 -43 1 +39 0.7 0.1 48 -48 50 -11 1 +39 0.2 0.6 20 -15 12 -40 2 +39 0.8 0.1 42 41 -13 -41 1 +39 0.9 0.2 16 -15 -17 -24 2 +39 0.7 0.5 41 -19 44 -20 1 +39 0.7 0.1 -5 -49 40 24 2 +39 0.1 0.9 28 27 0 -2 1 +39 0.3 0.4 2 -47 35 -39 1 +39 0.3 0.7 -27 -50 -20 -22 1 +39 0.3 0.3 20 14 49 12 2 +39 0.1 0.8 24 -19 25 -17 2 +39 0.7 0.9 38 -30 28 -14 2 +39 0.9 0.5 14 -11 11 -1 1 +39 0.7 0.1 -25 -43 -23 -37 2 +39 0.8 0.3 13 10 -14 -18 1 +39 0.7 0.8 26 15 -33 -42 1 +39 0.4 0.9 46 41 45 -22 1 +39 0.8 0.1 25 -49 36 -42 1 +39 0.9 0.3 -4 -44 -3 -48 2 +39 0.9 0.1 40 -29 10 3 1 +39 0.4 0.1 20 -49 4 -16 1 +39 0.9 0.9 1 -36 29 -6 2 +39 0.1 0.6 22 -14 44 -7 2 +39 0.6 0.4 4 -21 11 -37 2 +39 0.9 0.2 36 -12 -16 -21 1 +39 0.6 0.4 -36 -50 11 -17 2 +39 0.5 0.7 24 -7 25 -24 2 +39 0.7 0.3 -12 -37 39 -17 2 +39 0.2 0.9 2 -7 45 -13 2 +39 0.8 0.8 38 -18 33 -32 2 +39 0.7 0.4 -8 -48 -2 -11 2 +39 0.8 0.5 17 3 25 5 1 +39 0.2 0.8 33 5 37 -41 1 +39 0.7 0.6 19 5 14 -45 1 +39 0.4 0.3 50 -42 -19 -26 1 +39 0.9 0.4 -25 -28 30 -16 2 +39 0.3 0.1 4 -8 46 42 2 +39 0.8 0.9 46 44 38 36 2 +39 0.4 0.1 28 -19 25 -35 1 +39 0.5 0.6 50 30 -42 -48 1 +39 0.1 0.6 15 -3 2 -37 1 +39 0.5 0.9 43 -6 -24 -42 2 +39 0.2 0.5 29 -44 19 -6 2 +39 0.4 0.5 43 -35 -18 -34 1 +39 0.4 0.6 12 -18 -23 -42 1 +39 0.9 0.7 38 -11 -11 -47 1 +39 0.8 0.2 47 36 42 -34 1 +39 0.4 0.7 31 -50 9 6 2 +39 0.5 0.4 14 1 19 16 2 +39 0.2 0.8 -15 -37 39 -24 2 +39 0.9 0.4 1 -3 42 -3 1 +39 0.8 0.2 15 -20 -6 -19 1 +39 0.2 0.5 22 4 24 -17 2 +39 0.5 0.1 35 -45 -24 -33 1 +39 0.2 0.4 31 -27 41 -5 2 +39 0.2 0.8 50 -18 0 -11 2 +39 0.8 0.3 37 26 27 -15 1 +39 0.8 0.7 35 -15 50 42 2 +39 0.6 0.9 47 -29 42 -16 2 +39 0.1 0.8 -30 -38 -8 -32 1 +39 0.6 0.4 -1 -45 -1 -22 1 +39 0.9 0.5 31 -13 45 -8 1 +39 0.3 0.3 -3 -27 -12 -40 2 +39 0.9 0.4 -18 -31 35 -11 2 +39 0.8 0.3 36 6 -36 -49 1 +39 0.2 0.1 5 -40 -16 -29 1 +39 0.5 0.3 26 -9 37 -10 2 +39 0.9 0.2 9 -29 22 18 2 +39 0.5 0.8 -27 -49 6 -22 1 +39 0.6 0.2 -8 -50 -16 -50 1 +39 0.9 0.8 -19 -33 -1 -4 2 +39 0.2 0.1 3 -4 48 -38 2 +39 0.5 0.1 42 36 40 -43 1 +39 0.4 0.7 -13 -48 16 11 2 +39 0.9 0.9 11 -21 42 -4 2 +39 0.7 0.2 9 -39 33 -33 1 +39 0.7 0.9 21 4 8 0 2 +39 0.4 0.8 -32 -42 45 -17 2 +39 0.4 0.6 34 -20 30 -44 2 +39 0.5 0.8 29 -24 48 23 2 +39 0.3 0.8 27 6 -8 -12 1 +39 0.7 0.7 -16 -29 -34 -49 1 +39 0.3 0.8 38 -34 -17 -24 1 +39 0.5 0.1 -39 -45 48 -23 2 +39 0.5 0.7 -13 -24 -21 -34 2 +39 0.6 0.3 3 1 21 -34 2 +39 0.8 0.2 4 -8 -3 -16 1 +39 0.8 0.1 48 -12 21 -50 2 +39 0.5 0.2 -9 -25 50 -6 1 +39 0.9 0.8 -24 -36 21 -28 2 +39 0.5 0.9 3 -50 47 -1 2 +39 0.8 0.9 50 -31 14 -33 1 +39 0.6 0.4 -17 -27 49 -21 2 +39 0.7 0.9 -33 -48 -6 -43 2 +39 0.8 0.9 46 -37 -28 -50 1 +39 0.2 0.1 9 0 30 3 2 +39 0.2 0.7 3 -50 50 37 2 +39 0.6 0.9 43 10 1 -21 1 +39 0.2 0.7 -11 -14 35 -45 2 +39 0.3 0.7 -7 -34 36 23 2 +39 0.3 0.8 13 -14 42 -39 2 +39 0.4 0.1 -24 -37 29 -28 1 +39 0.8 0.8 -29 -30 22 11 2 +39 0.8 0.4 31 -36 49 25 2 +39 0.8 0.6 35 22 -1 -12 1 +39 0.3 0.9 33 -23 -25 -37 2 +39 0.9 0.8 37 -14 6 -15 1 +39 0.9 0.2 -12 -47 33 -17 2 +39 0.4 0.2 35 -25 47 2 1 +39 0.4 0.4 -19 -36 17 -18 2 +39 0.1 0.2 30 -45 30 17 2 +39 0.1 0.5 45 29 18 -48 2 +39 0.6 0.1 31 -23 39 -40 1 +39 0.2 0.1 30 27 11 -22 1 +39 0.8 0.1 -1 -33 32 20 2 +39 0.6 0.7 2 -39 38 -5 2 +39 0.3 0.9 -1 -32 10 8 2 +39 0.5 0.2 30 -47 43 -40 1 +39 0.8 0.9 -26 -44 -3 -41 2 +39 0.1 0.3 49 1 41 -27 1 +39 0.1 0.8 1 -41 40 22 2 +39 0.5 0.1 16 -8 19 -11 1 +39 0.1 0.2 4 -12 -21 -24 2 +39 0.4 0.4 50 -21 -3 -42 1 +39 0.6 0.7 6 -16 32 -38 2 +39 0.3 0.3 29 -3 44 1 2 +39 0.5 0.6 32 -30 37 -16 2 +39 0.5 0.9 28 12 14 -20 2 +39 0.3 0.6 27 13 50 -40 1 +39 0.6 0.1 22 6 40 -15 1 +39 0.2 0.8 0 -49 45 -18 2 +39 0.9 0.2 50 -24 48 20 2 +39 0.9 0.5 35 -6 48 -44 1 +39 0.6 0.3 48 22 40 -4 1 +39 0.9 0.7 -40 -42 -22 -31 2 +39 0.3 0.7 45 35 7 -27 2 +39 0.8 0.5 14 -6 12 -20 1 +39 0.5 0.3 5 -26 50 25 2 +39 0.2 0.8 -19 -40 29 9 2 +39 0.4 0.3 40 8 -16 -42 1 +39 0.8 0.9 -25 -40 -19 -38 2 +39 0.3 0.7 50 19 29 -17 1 +39 0.1 0.7 22 8 6 -13 1 +39 0.9 0.4 -21 -33 42 3 2 +39 0.1 0.3 26 -33 13 -31 2 +39 0.4 0.4 42 -47 14 -9 1 +39 0.3 0.2 0 -12 2 -7 2 +39 0.3 0.9 -3 -26 41 11 2 +39 0.1 0.6 23 1 31 22 2 +39 0.9 0.1 -11 -33 44 20 2 +39 0.4 0.9 47 32 -2 -8 1 +39 0.2 0.8 44 37 26 -30 2 +39 0.7 0.3 37 -13 30 11 2 +39 0.8 0.8 25 -22 27 -9 1 +39 0.5 0.4 48 -10 24 17 1 +39 0.8 0.8 31 -23 -8 -26 1 +39 0.7 0.4 24 -47 36 35 2 +39 0.3 0.6 39 5 1 -27 1 +39 0.8 0.9 27 4 45 29 2 +39 0.5 0.8 31 6 33 -18 1 +39 0.6 0.4 40 -16 23 -25 1 +39 0.1 0.2 22 -40 27 -34 2 +39 0.4 0.8 49 9 -6 -20 1 +39 0.2 0.7 -10 -20 -7 -38 2 +39 0.8 0.3 -9 -13 50 -4 2 +39 0.6 0.7 -25 -28 -6 -45 1 +39 0.5 0.3 -20 -26 -5 -31 2 +39 0.8 0.4 25 8 -30 -33 1 +39 0.7 0.7 30 -2 26 -18 2 +39 0.7 0.3 27 21 -15 -37 1 +39 0.6 0.1 40 -2 -10 -15 1 +39 0.6 0.4 7 -34 43 -10 2 +39 0.5 0.1 47 -21 44 18 1 +39 0.2 0.9 20 0 18 11 2 +39 0.2 0.9 9 -48 48 30 2 +39 0.7 0.9 -13 -15 40 27 2 +39 0.5 0.7 -18 -45 -11 -26 2 +39 0.6 0.4 -14 -49 -8 -11 2 +39 0.5 0.2 49 47 48 -10 1 +39 0.9 0.8 20 -46 3 -43 1 +39 0.1 0.7 -17 -48 50 17 2 +39 0.4 0.8 -35 -41 39 -43 2 +39 0.6 0.5 43 17 -42 -50 1 +39 0.1 0.1 14 -39 48 -22 1 +39 0.5 0.1 40 -4 -7 -12 1 +39 0.6 0.7 48 -37 15 -24 1 +39 0.6 0.6 23 -42 14 -32 1 +39 0.4 0.4 8 -40 37 25 2 +39 0.6 0.9 31 -1 33 -9 2 +39 0.7 0.2 -29 -49 -14 -38 1 +39 0.9 0.6 38 -36 12 -11 1 +39 0.4 0.4 -4 -49 30 -32 2 +39 0.7 0.3 34 -8 43 0 2 +39 0.7 0.8 25 15 -28 -48 1 +39 0.7 0.4 -33 -50 12 -47 1 +39 0.8 0.2 41 1 -9 -10 1 +39 0.8 0.8 48 35 49 -46 1 +39 0.5 0.6 24 23 24 22 2 +39 0.7 0.6 1 -6 5 -14 1 +39 0.1 0.9 30 26 -19 -50 1 +39 0.1 0.1 6 5 2 -29 1 +39 0.6 0.8 30 -17 38 12 2 +39 0.8 0.7 18 -30 4 -24 1 +39 0.4 0.8 48 30 20 -43 2 +39 0.5 0.3 39 -41 2 -32 1 +39 0.7 0.5 6 -42 7 -20 2 +39 0.6 0.5 1 -28 37 28 2 +39 0.1 0.9 9 -13 44 38 2 +39 0.6 0.7 41 11 13 -27 1 +39 0.7 0.2 -24 -47 11 1 2 +39 0.8 0.6 0 -12 15 -6 2 +39 0.3 0.7 18 -44 35 33 2 +39 0.6 0.7 0 -35 -24 -42 1 +39 0.7 0.9 13 -4 13 -9 2 +39 0.6 0.9 42 -34 -18 -21 1 +39 0.8 0.2 43 -25 -22 -27 1 +39 0.3 0.3 -3 -15 -25 -49 1 +39 0.5 0.2 39 -40 -24 -47 1 +39 0.2 0.2 -13 -19 10 -36 1 +39 0.3 0.5 23 2 -25 -26 1 +39 0.4 0.6 46 44 38 -14 1 +39 0.7 0.8 19 6 39 37 2 +39 0.3 0.6 -13 -41 24 -35 2 +39 0.6 0.9 2 -43 5 4 1 +39 0.1 0.6 -9 -28 -33 -50 2 +39 0.8 0.1 -34 -49 13 -46 1 +39 0.9 0.8 -6 -14 41 3 2 +39 0.7 0.5 -7 -33 20 -7 2 +39 0.4 0.8 24 -31 24 -4 2 +39 0.1 0.3 42 -28 25 12 2 +39 0.9 0.4 44 2 -44 -46 1 +39 0.9 0.4 23 2 18 -8 2 +39 0.1 0.8 -14 -23 -12 -50 1 +39 0.8 0.9 25 17 0 -4 1 +39 0.6 0.7 42 -35 30 -24 2 +39 0.7 0.1 39 9 48 20 2 +39 0.3 0.9 13 -32 23 -23 2 +40 0.3 0.1 -1 -9 -1 -8 2 +40 0.7 0.5 40 1 30 -15 1 +40 0.7 0.7 27 16 47 8 2 +40 0.1 0.1 28 13 40 -37 1 +40 0.8 0.2 31 -5 10 -13 1 +40 0.8 0.7 -34 -42 50 -23 2 +40 0.1 0.6 -12 -48 20 11 2 +40 0.8 0.2 36 -14 -18 -37 1 +40 0.4 0.7 2 -9 -32 -47 1 +40 0.7 0.3 42 -5 18 -3 1 +40 0.8 0.4 17 6 3 -50 1 +40 0.7 0.4 38 20 42 1 1 +40 0.7 0.5 20 -11 46 25 2 +40 0.8 0.3 -2 -5 49 -39 2 +40 0.1 0.5 19 -13 -20 -32 1 +40 0.9 0.8 11 -12 4 -5 1 +40 0.9 0.3 6 -1 50 16 2 +40 0.5 0.1 9 -24 26 -19 1 +40 0.7 0.1 40 -29 -9 -15 1 +40 0.8 0.9 -34 -35 29 -43 2 +40 0.1 0.9 3 -10 37 -47 2 +40 0.6 0.8 5 -49 17 -27 2 +40 0.1 0.5 25 -42 -3 -28 1 +40 0.2 0.4 4 -49 16 -50 2 +40 0.5 0.7 -5 -19 37 -48 2 +40 0.7 0.4 5 -18 37 8 2 +40 0.3 0.5 16 -48 -40 -42 1 +40 0.1 0.1 49 -50 2 -45 1 +40 0.8 0.7 11 -23 4 -17 1 +40 0.4 0.5 -33 -44 -9 -17 2 +40 0.1 0.7 -3 -22 -26 -44 1 +40 0.6 0.1 50 41 -6 -36 1 +40 0.2 0.5 29 -25 34 -31 2 +40 0.1 0.6 22 -41 35 -46 2 +40 0.1 0.8 -3 -4 -22 -49 1 +40 0.3 0.5 7 -12 -33 -37 1 +40 0.3 0.1 24 17 46 40 2 +40 0.3 0.9 33 -7 6 -39 1 +40 0.5 0.2 37 -14 44 15 2 +40 0.6 0.4 38 -6 49 -21 1 +40 0.2 0.4 -18 -22 -13 -29 2 +40 0.3 0.7 42 -24 -6 -24 1 +40 0.9 0.1 -9 -44 44 13 2 +40 0.9 0.3 21 -47 21 -10 1 +40 0.6 0.3 46 -6 14 6 2 +40 0.1 0.5 12 6 16 -1 2 +40 0.5 0.2 2 -34 36 13 2 +40 0.5 0.7 21 -31 -14 -22 1 +40 0.5 0.9 10 3 32 -37 2 +40 0.5 0.6 44 -22 -10 -38 1 +40 0.5 0.6 37 -26 29 -20 2 +40 0.7 0.6 27 -28 -17 -29 1 +40 0.6 0.5 4 -20 30 0 2 +40 0.6 0.2 39 1 38 12 2 +40 0.6 0.2 20 -27 -29 -37 1 +40 0.6 0.2 24 -5 -21 -39 1 +40 0.7 0.6 0 -25 10 3 2 +40 0.3 0.3 -18 -46 42 -10 2 +40 0.6 0.2 31 19 -34 -42 1 +40 0.3 0.2 2 -38 45 12 2 +40 0.1 0.3 44 -6 -20 -23 1 +40 0.1 0.2 23 -23 -30 -33 1 +40 0.1 0.1 32 -42 32 10 2 +40 0.9 0.7 46 22 22 18 1 +40 0.8 0.4 44 -6 1 -31 1 +40 0.5 0.2 -19 -21 -15 -29 1 +40 0.8 0.6 11 -38 -23 -36 1 +40 0.3 0.7 13 -31 28 -14 2 +40 0.2 0.7 49 17 30 -25 1 +40 0.3 0.9 30 -38 -3 -37 2 +40 0.9 0.5 -24 -30 32 -18 2 +40 0.6 0.1 -16 -21 -13 -31 1 +40 0.2 0.6 21 -38 14 -19 2 +40 0.9 0.2 -10 -21 45 -2 2 +40 0.5 0.2 47 6 -9 -34 1 +40 0.4 0.7 38 18 19 -3 1 +40 0.2 0.9 2 -47 12 -18 2 +40 0.5 0.2 50 16 -33 -38 1 +40 0.5 0.1 10 -8 6 -10 1 +40 0.5 0.5 27 11 6 -49 1 +40 0.3 0.3 38 -20 49 3 2 +40 0.1 0.4 13 -1 20 -33 1 +40 0.8 0.1 47 -13 43 39 2 +40 0.7 0.1 -12 -41 -19 -42 1 +40 0.4 0.3 -6 -48 42 -36 2 +40 0.9 0.5 19 -36 43 -9 1 +40 0.1 0.6 23 -2 -22 -33 1 +40 0.1 0.8 30 -14 5 -1 2 +40 0.1 0.5 7 -10 24 3 2 +40 0.6 0.6 48 -22 43 -23 2 +40 0.7 0.9 16 -39 33 5 2 +40 0.1 0.2 5 -5 -14 -24 1 +40 0.7 0.7 -31 -39 2 -47 2 +40 0.1 0.7 -33 -44 38 4 2 +40 0.3 0.8 -1 -15 24 -7 2 +40 0.8 0.9 6 -44 38 -36 2 +40 0.5 0.4 -6 -26 -34 -38 2 +40 0.3 0.7 18 9 40 2 2 +40 0.4 0.9 -46 -47 47 -24 2 +40 0.5 0.4 47 -18 17 -1 1 +40 0.3 0.3 -7 -32 -8 -39 1 +40 0.4 0.5 42 -3 -12 -37 1 +40 0.5 0.3 34 23 -18 -35 1 +40 0.2 0.9 -17 -41 17 0 2 +40 0.3 0.5 34 -27 32 -36 2 +40 0.9 0.4 48 12 41 37 1 +40 0.8 0.8 30 19 27 -41 1 +40 0.5 0.6 -24 -38 38 34 2 +40 0.8 0.6 48 -38 5 -48 1 +40 0.9 0.9 -15 -23 32 23 2 +40 0.3 0.3 -9 -43 39 -27 2 +40 0.6 0.4 15 -23 24 -50 1 +40 0.9 0.1 14 -28 21 -22 1 +40 0.5 0.7 5 -43 -9 -26 1 +40 0.8 0.7 36 -5 -17 -36 1 +40 0.2 0.1 37 -20 -21 -49 1 +40 0.5 0.9 6 -24 14 -40 2 +40 0.6 0.3 9 5 36 -26 1 +40 0.7 0.3 45 -34 -25 -37 1 +40 0.9 0.6 -33 -34 49 17 2 +40 0.1 0.4 -21 -42 47 -36 2 +40 0.9 0.2 4 -42 -25 -35 1 +40 0.6 0.5 32 -4 47 3 2 +40 0.3 0.7 10 -46 14 -22 2 +40 0.2 0.9 49 11 30 -37 1 +40 0.8 0.8 27 12 42 -2 1 +40 0.1 0.2 27 -12 15 -39 1 +40 0.3 0.7 30 6 50 14 2 +40 0.3 0.3 -4 -46 17 10 2 +40 0.9 0.3 31 -30 35 -21 1 +40 0.8 0.4 45 -48 -14 -37 1 +40 0.8 0.2 5 -13 -28 -29 1 +40 0.3 0.8 22 -19 -4 -26 1 +40 0.9 0.7 40 -43 -2 -5 1 +40 0.5 0.6 49 -34 -24 -26 1 +40 0.8 0.6 -35 -39 29 -36 2 +40 0.1 0.1 50 -50 46 -9 2 +40 0.3 0.8 13 -10 -10 -33 1 +40 0.5 0.6 33 -34 31 19 2 +40 0.1 0.6 35 -13 -41 -49 1 +40 0.8 0.6 33 1 41 18 2 +40 0.2 0.2 49 31 13 -16 1 +40 0.5 0.8 0 -43 10 3 2 +40 0.6 0.5 35 -19 7 -22 1 +40 0.4 0.2 3 -20 41 31 2 +40 0.1 0.3 -3 -10 34 -35 2 +40 0.7 0.5 15 -35 47 -48 2 +40 0.6 0.6 19 -21 21 0 2 +40 0.5 0.4 15 -20 13 -42 1 +40 0.3 0.6 26 17 -11 -37 1 +40 0.8 0.3 -1 -46 9 -26 1 +40 0.1 0.1 46 -2 16 -30 1 +40 0.4 0.7 10 -28 46 3 2 +40 0.3 0.4 -38 -39 10 3 2 +40 0.6 0.9 10 -33 -42 -44 1 +40 0.3 0.9 3 -46 -40 -42 1 +40 0.8 0.2 2 -33 -7 -44 1 +40 0.2 0.8 15 -39 4 -22 2 +40 0.3 0.1 23 3 -10 -34 1 +40 0.1 0.6 49 -14 24 5 2 +40 0.9 0.5 46 29 -7 -30 1 +40 0.8 0.6 -30 -39 -5 -19 2 +40 0.8 0.5 41 -5 -9 -17 1 +40 0.1 0.9 17 -16 24 16 2 +40 0.5 0.7 -38 -39 11 -24 2 +40 0.6 0.9 33 -43 42 -42 2 +40 0.1 0.4 -28 -42 44 -6 2 +40 0.2 0.4 -5 -22 -27 -43 1 +40 0.7 0.8 39 -4 -9 -20 1 +40 0.4 0.7 33 -21 25 21 2 +40 0.8 0.9 30 15 44 -15 2 +40 0.5 0.1 13 7 16 -15 1 +40 0.8 0.8 8 -40 32 5 2 +40 0.2 0.3 -40 -47 37 22 2 +40 0.4 0.2 -11 -34 1 -12 2 +40 0.3 0.6 42 15 44 7 2 +40 0.3 0.5 -15 -34 32 -43 2 +40 0.6 0.7 -16 -29 23 18 2 +40 0.6 0.9 -6 -31 -16 -29 2 +40 0.6 0.5 10 7 19 -13 1 +40 0.1 0.4 31 -30 -22 -28 1 +40 0.2 0.1 28 10 -9 -41 1 +40 0.6 0.4 -24 -33 50 10 2 +40 0.2 0.1 37 15 38 -50 1 +40 0.3 0.1 -8 -29 50 -41 1 +40 0.1 0.9 41 5 -35 -49 1 +40 0.6 0.9 -2 -27 42 -19 2 +40 0.4 0.4 38 -38 -26 -43 1 +40 0.5 0.2 9 -12 47 -46 1 +40 0.7 0.9 46 -45 -6 -16 1 +40 0.8 0.8 33 16 20 -5 1 +40 0.1 0.8 46 1 -9 -50 1 +40 0.3 0.1 49 -4 -19 -38 1 +40 0.4 0.3 7 -20 38 -45 1 +40 0.2 0.4 34 24 17 -47 1 +40 0.9 0.3 16 -8 -24 -49 1 +40 0.3 0.9 40 32 -2 -9 1 +40 0.4 0.1 -24 -37 43 -48 2 +40 0.3 0.5 5 -1 -32 -44 1 +40 0.3 0.8 35 33 19 12 1 +40 0.4 0.4 46 -20 25 -44 1 +40 0.2 0.2 -31 -37 15 6 2 +40 0.4 0.6 4 -15 -30 -42 1 +40 0.9 0.6 -34 -44 -34 -50 2 +40 0.2 0.2 49 31 -21 -27 1 +40 0.4 0.4 2 -24 48 -30 2 +40 0.9 0.5 33 32 49 11 1 +40 0.5 0.9 19 -19 46 31 2 +40 0.4 0.2 33 30 -26 -27 1 +40 0.2 0.2 10 -39 47 5 2 +40 0.6 0.2 50 -9 49 2 1 +40 0.8 0.9 26 0 11 -17 1 +40 0.4 0.2 25 -46 34 33 2 +40 0.6 0.7 -21 -23 43 30 2 +40 0.3 0.5 -22 -36 27 7 2 +40 0.2 0.9 -9 -19 1 -41 2 +40 0.9 0.4 21 10 -35 -44 1 +40 0.3 0.3 19 0 25 -37 1 +40 0.7 0.6 21 1 10 -21 1 +40 0.7 0.2 11 1 -10 -41 1 +40 0.7 0.4 22 -1 36 -39 1 +40 0.6 0.5 -2 -19 37 -40 2 +40 0.9 0.6 11 -20 42 -3 2 +40 0.7 0.5 42 -45 28 -35 1 +40 0.3 0.6 8 -12 37 -47 2 +40 0.5 0.7 43 23 -2 -18 1 +40 0.5 0.3 -34 -50 -42 -44 2 +40 0.2 0.6 -3 -26 -15 -22 1 +40 0.3 0.8 31 -28 35 -46 2 +40 0.5 0.5 32 -11 -8 -25 1 +40 0.5 0.4 44 -3 23 -21 1 +40 0.3 0.9 -23 -40 28 7 2 +40 0.9 0.6 23 -48 16 -9 1 +40 0.2 0.8 36 0 28 3 2 +40 0.6 0.1 -6 -39 49 30 2 +40 0.7 0.9 32 9 44 -3 2 +40 0.6 0.6 49 -13 -17 -21 1 +40 0.7 0.5 16 -32 11 0 1 +40 0.6 0.6 21 -26 44 24 2 +40 0.2 0.2 34 -47 19 11 2 +40 0.4 0.5 48 -25 -17 -42 1 +40 0.2 0.8 29 -17 44 -20 2 +40 0.7 0.8 11 -5 19 14 2 +40 0.2 0.7 -20 -44 6 -8 2 +40 0.4 0.2 24 22 -25 -41 1 +40 0.6 0.6 -15 -37 1 -1 2 +40 0.2 0.2 14 -1 22 -29 1 +40 0.1 0.7 32 5 28 -19 2 +40 0.8 0.9 -36 -37 -7 -31 2 +40 0.9 0.5 12 -26 19 -12 2 +40 0.2 0.9 4 -49 49 -15 2 +40 0.8 0.8 24 -34 22 18 2 +40 0.6 0.7 31 7 -20 -29 1 +40 0.5 0.8 -10 -40 -17 -45 2 +40 0.8 0.4 34 -39 -18 -21 1 +40 0.5 0.3 30 19 6 -13 1 +40 0.4 0.4 18 8 -7 -48 1 +40 0.2 0.9 -30 -49 21 16 2 +40 0.8 0.6 -12 -21 12 -42 2 +40 0.5 0.3 -22 -41 -8 -35 2 +40 0.2 0.6 -7 -35 3 -11 2 +40 0.7 0.4 46 -25 0 -43 1 +40 0.9 0.3 1 -1 48 -49 1 +40 0.4 0.9 43 -10 -23 -49 1 +40 0.3 0.2 2 -26 47 -11 2 +40 0.4 0.9 -19 -38 30 -21 2 +40 0.3 0.1 37 6 47 18 2 +40 0.9 0.2 3 -12 -23 -24 1 +40 0.8 0.3 50 45 46 16 1 +40 0.1 0.2 47 35 10 -19 1 +40 0.9 0.5 48 -28 49 27 2 +40 0.3 0.4 28 -28 48 28 2 +40 0.9 0.3 32 20 37 -37 1 +40 0.4 0.1 -16 -18 31 -6 2 +40 0.8 0.9 -8 -50 14 -20 2 +40 0.2 0.3 49 -2 38 -45 1 +40 0.4 0.7 36 33 -30 -31 1 +40 0.7 0.1 20 -7 -15 -41 1 +40 0.1 0.4 9 -43 10 -24 2 +40 0.3 0.2 31 -6 36 -22 1 +40 0.9 0.8 -18 -25 37 -37 2 +40 0.9 0.3 47 3 -4 -6 1 +40 0.4 0.6 38 9 29 10 1 +40 0.5 0.4 43 -24 39 -35 1 +40 0.6 0.2 29 -43 26 -44 1 +40 0.7 0.4 1 -34 37 22 2 +40 0.5 0.7 38 26 32 9 1 +40 0.2 0.9 22 -48 36 -48 2 +40 0.2 0.1 12 6 50 42 2 +40 0.2 0.9 29 -24 45 16 2 +40 0.5 0.9 -26 -39 -10 -23 2 +40 0.6 0.7 8 -37 46 13 2 +40 0.8 0.5 36 -25 32 -36 2 +40 0.2 0.7 29 12 -3 -41 1 +40 0.4 0.6 50 -47 34 25 2 +40 0.4 0.3 0 -39 24 15 2 +40 0.8 0.3 15 -46 27 -4 1 +40 0.1 0.9 46 25 39 -25 1 +40 0.6 0.2 20 4 30 23 2 +40 0.9 0.8 39 33 49 -1 1 +41 0.8 0.3 49 18 49 -9 1 +41 0.2 0.1 -49 -50 -2 -24 2 +41 0.8 0.7 3 -20 18 4 2 +41 0.8 0.5 31 -7 -18 -43 1 +41 0.7 0.6 35 12 32 1 1 +41 0.1 0.9 44 -41 33 -11 2 +41 0.2 0.2 -6 -24 26 -45 2 +41 0.9 0.3 11 -40 16 -15 1 +41 0.9 0.8 41 -46 9 2 1 +41 0.1 0.7 44 -41 -23 -49 1 +41 0.6 0.1 50 11 44 40 2 +41 0.9 0.6 1 -38 -24 -36 1 +41 0.8 0.8 -20 -45 50 19 2 +41 0.5 0.4 21 17 33 18 2 +41 0.7 0.5 -15 -24 48 0 2 +41 0.9 0.5 -6 -34 8 -20 2 +41 0.3 0.5 38 -24 26 6 2 +41 0.9 0.2 1 -29 -21 -22 1 +41 0.2 0.4 42 16 14 1 1 +41 0.8 0.8 -9 -26 43 24 2 +41 0.9 0.1 49 -8 29 6 1 +41 0.5 0.7 27 -6 7 -41 2 +41 0.9 0.6 -40 -43 22 -2 2 +41 0.1 0.6 44 6 26 -36 1 +41 0.6 0.8 -4 -14 34 -26 2 +41 0.7 0.9 17 14 -3 -10 1 +41 0.5 0.9 46 -25 35 -9 2 +41 0.1 0.8 -27 -43 48 -47 2 +41 0.7 0.7 23 -28 0 -18 1 +41 0.2 0.7 44 -5 16 -22 2 +41 0.1 0.4 -12 -27 37 -23 2 +41 0.5 0.5 16 -49 28 -48 1 +41 0.5 0.2 41 25 21 -6 1 +41 0.3 0.8 50 41 -17 -41 1 +41 0.3 0.4 27 -3 43 38 2 +41 0.8 0.4 -25 -43 23 -35 2 +41 0.4 0.1 25 13 45 -29 1 +41 0.6 0.5 44 21 30 10 1 +41 0.7 0.7 0 -37 48 43 2 +41 0.3 0.9 -14 -22 36 -12 2 +41 0.5 0.6 45 10 -33 -47 1 +41 0.3 0.5 39 -22 -14 -17 1 +41 0.6 0.1 19 13 -15 -25 1 +41 0.6 0.3 7 -20 12 -25 1 +41 0.9 0.9 17 -26 -7 -27 1 +41 0.8 0.4 17 -30 48 44 2 +41 0.6 0.7 -22 -46 26 -25 2 +41 0.8 0.1 -2 -14 15 -26 1 +41 0.2 0.8 34 16 42 14 2 +41 0.8 0.8 -17 -32 50 15 2 +41 0.4 0.5 44 -4 13 -41 1 +41 0.5 0.4 41 -33 34 -5 1 +41 0.2 0.8 22 -39 49 -4 2 +41 0.9 0.4 -13 -46 34 -36 2 +41 0.4 0.1 29 -20 26 5 2 +41 0.5 0.8 10 -41 29 7 2 +41 0.7 0.5 10 -12 43 22 2 +41 0.1 0.7 36 14 39 2 2 +41 0.8 0.6 33 -12 6 -47 1 +41 0.8 0.5 23 -22 17 8 2 +41 0.7 0.9 16 -22 13 -10 2 +41 0.5 0.6 -12 -24 -8 -32 2 +41 0.5 0.2 24 5 2 -1 1 +41 0.1 0.6 26 -36 -39 -50 2 +41 0.6 0.8 18 -43 49 34 2 +41 0.4 0.9 8 7 49 0 2 +41 0.4 0.9 -2 -16 39 16 2 +41 0.5 0.8 48 -43 -2 -24 1 +41 0.9 0.2 0 -35 34 17 2 +41 0.4 0.9 19 -48 -6 -23 2 +41 0.9 0.3 -18 -45 27 19 2 +41 0.8 0.7 8 -6 22 -49 2 +41 0.9 0.9 43 -25 38 1 2 +41 0.7 0.9 12 -8 20 14 2 +41 0.2 0.8 48 29 18 -11 1 +41 0.8 0.3 29 -3 30 5 1 +41 0.9 0.4 45 42 -18 -33 1 +41 0.5 0.6 46 -30 -7 -40 1 +41 0.4 0.4 36 32 -16 -43 1 +41 0.7 0.2 31 27 42 20 1 +41 0.9 0.3 42 35 26 14 1 +41 0.8 0.3 -1 -19 -9 -38 1 +41 0.6 0.2 23 -22 48 -44 1 +41 0.9 0.6 -13 -48 -16 -28 2 +41 0.6 0.7 0 -41 35 21 2 +41 0.7 0.6 34 -41 17 -20 1 +41 0.7 0.8 23 -5 44 13 2 +41 0.7 0.3 -15 -21 42 27 2 +41 0.3 0.4 35 -7 -2 -42 1 +41 0.3 0.4 41 -37 47 -50 2 +41 0.6 0.9 -29 -40 42 36 2 +41 0.4 0.9 26 -5 47 13 2 +41 0.6 0.1 50 -27 35 -8 1 +41 0.4 0.9 -26 -38 -42 -47 1 +41 0.8 0.8 -7 -49 8 -36 2 +41 0.1 0.1 35 -25 5 -47 1 +41 0.1 0.9 -2 -7 0 -48 2 +41 0.9 0.8 47 3 13 0 1 +41 0.8 0.5 -27 -37 -23 -26 2 +41 0.3 0.5 37 9 36 -18 1 +41 0.8 0.9 18 -20 31 -17 2 +41 0.7 0.1 47 -41 20 17 2 +41 0.4 0.9 42 -39 21 -7 2 +41 0.6 0.1 46 21 44 20 1 +41 0.1 0.7 26 -23 12 -38 2 +41 0.7 0.8 40 30 -13 -37 1 +41 0.5 0.4 36 -39 -4 -8 1 +41 0.4 0.5 44 21 18 -5 1 +41 0.9 0.4 24 -39 23 -50 1 +41 0.4 0.9 40 -21 -36 -47 1 +41 0.3 0.9 11 -29 7 -5 2 +41 0.9 0.6 38 -3 18 8 1 +41 0.2 0.9 41 16 -13 -37 1 +41 0.8 0.2 14 -47 24 14 2 +41 0.7 0.2 25 -29 12 -13 1 +41 0.8 0.1 11 -19 8 -40 1 +41 0.2 0.2 -11 -15 24 -45 1 +41 0.6 0.7 -27 -39 -3 -9 2 +41 0.7 0.8 31 -7 36 -15 2 +41 0.7 0.1 25 -6 22 -23 1 +41 0.2 0.4 7 -3 48 -27 2 +41 0.6 0.4 -4 -11 36 -28 2 +41 0.7 0.8 47 -35 27 -19 2 +41 0.4 0.8 -14 -48 -34 -36 1 +41 0.9 0.9 32 -4 23 -22 1 +41 0.1 0.9 8 -18 -26 -31 1 +41 0.3 0.4 3 -35 43 -4 2 +41 0.7 0.4 45 18 -27 -42 1 +41 0.6 0.3 40 -7 48 0 1 +41 0.4 0.9 9 -1 13 -6 2 +41 0.6 0.6 1 -34 18 -5 2 +41 0.3 0.3 10 -4 48 42 2 +41 0.3 0.1 44 1 5 -48 1 +41 0.5 0.1 -45 -50 1 -12 2 +41 0.8 0.8 47 10 43 -2 1 +41 0.3 0.3 11 2 23 8 2 +41 0.1 0.9 -3 -23 0 -45 2 +41 0.2 0.1 28 -22 14 -31 1 +41 0.1 0.9 16 -49 -28 -42 2 +41 0.9 0.2 15 -3 43 -19 1 +41 0.7 0.8 36 -16 33 -36 2 +41 0.2 0.8 -27 -32 35 8 2 +41 0.4 0.2 -21 -23 46 -14 2 +41 0.3 0.6 24 -33 8 -48 2 +41 0.8 0.9 -11 -40 -34 -45 1 +41 0.3 0.8 19 -43 5 -14 2 +41 0.2 0.1 44 -17 24 6 2 +41 0.3 0.2 3 -46 21 -24 2 +41 0.5 0.8 -46 -50 1 -6 2 +41 0.5 0.1 49 -33 36 30 2 +41 0.8 0.7 14 8 1 -11 1 +41 0.3 0.1 49 -27 -29 -34 1 +41 0.5 0.9 -8 -19 4 -47 2 +41 0.7 0.1 15 -36 -26 -44 1 +41 0.7 0.8 -16 -31 35 -3 2 +41 0.7 0.8 42 11 3 -9 1 +41 0.5 0.2 -13 -19 -4 -36 1 +41 0.7 0.3 -16 -24 11 -46 2 +41 0.9 0.3 42 32 48 -3 1 +41 0.3 0.7 49 -41 -22 -47 1 +41 0.3 0.8 11 -9 -32 -39 1 +41 0.5 0.4 -4 -17 -7 -10 2 +41 0.1 0.9 13 -1 20 13 2 +41 0.6 0.5 16 13 20 11 2 +41 0.7 0.5 8 -40 -6 -13 1 +41 0.2 0.8 -22 -43 26 -35 2 +41 0.4 0.1 -8 -33 47 36 2 +41 0.5 0.4 21 -32 30 -48 1 +41 0.7 0.9 0 -39 -8 -27 2 +41 0.6 0.8 8 -30 23 -31 2 +41 0.2 0.1 -8 -48 -22 -30 2 +41 0.6 0.3 36 19 26 4 1 +41 0.1 0.1 30 -22 20 -43 1 +41 0.8 0.1 21 -35 -36 -49 1 +41 0.2 0.5 27 22 34 -11 1 +41 0.7 0.5 48 -23 13 11 1 +41 0.9 0.7 48 30 -48 -50 1 +41 0.1 0.7 16 -5 50 -14 2 +41 0.5 0.8 43 -37 18 -4 2 +41 0.1 0.2 -1 -4 4 -13 1 +41 0.8 0.8 -16 -42 25 -4 2 +41 0.3 0.1 49 37 42 20 1 +41 0.9 0.9 49 31 50 -48 1 +41 0.8 0.5 20 -50 -35 -40 1 +41 0.6 0.9 45 -9 -4 -41 1 +41 0.9 0.6 -28 -46 9 -32 2 +41 0.1 0.6 33 -45 50 -43 2 +41 0.4 0.6 1 -20 16 -31 2 +41 0.4 0.4 47 -23 44 -39 1 +41 0.5 0.1 16 -25 42 -21 2 +41 0.8 0.9 12 -33 19 -35 2 +41 0.5 0.6 -5 -36 14 -19 2 +41 0.4 0.6 -22 -39 32 -42 2 +41 0.7 0.4 45 -29 -20 -35 1 +41 0.7 0.9 -35 -47 35 12 2 +41 0.7 0.7 14 9 -19 -45 1 +41 0.5 0.4 5 -37 4 3 2 +41 0.3 0.3 -7 -34 31 -13 2 +41 0.6 0.5 -9 -45 -1 -27 2 +41 0.8 0.3 -10 -25 -21 -25 1 +41 0.3 0.6 2 -47 -18 -45 1 +41 0.4 0.9 25 -1 40 -50 2 +41 0.2 0.2 -14 -30 10 -2 2 +41 0.3 0.9 12 -13 13 -16 2 +41 0.1 0.1 36 -29 46 -39 1 +41 0.9 0.2 34 -3 -21 -32 1 +41 0.8 0.8 -7 -19 8 -22 2 +41 0.4 0.4 36 -22 -22 -49 1 +41 0.3 0.9 40 -13 32 -39 2 +41 0.9 0.3 43 25 48 -41 1 +41 0.8 0.3 16 -30 15 -47 1 +41 0.7 0.8 29 -49 38 -22 2 +41 0.9 0.3 30 -6 43 37 2 +41 0.8 0.8 32 -39 34 -14 2 +41 0.3 0.5 28 -24 14 -21 1 +41 0.9 0.8 19 -25 45 -18 2 +41 0.9 0.8 45 -22 -12 -28 1 +41 0.2 0.6 39 -21 19 10 2 +41 0.4 0.6 37 36 -7 -12 1 +41 0.6 0.7 -3 -4 -2 -25 1 +41 0.6 0.3 -44 -50 19 18 2 +41 0.1 0.1 39 36 -3 -13 1 +41 0.8 0.7 17 -50 16 6 1 +41 0.4 0.3 41 2 -27 -41 1 +41 0.9 0.8 47 5 -11 -32 1 +41 0.7 0.7 37 34 41 -30 1 +41 0.8 0.1 34 33 19 -35 1 +41 0.5 0.5 19 -45 33 -40 2 +41 0.8 0.7 45 -8 -4 -13 1 +41 0.9 0.2 4 -44 39 17 2 +41 0.2 0.3 12 -20 -6 -46 2 +41 0.6 0.5 -8 -10 -26 -42 1 +41 0.3 0.6 -3 -40 44 -47 2 +41 0.9 0.7 18 -23 17 -10 1 +41 0.9 0.6 37 -2 35 4 1 +41 0.7 0.3 0 -5 8 -16 1 +41 0.2 0.4 -29 -35 22 -50 2 +41 0.7 0.9 47 -22 32 -20 2 +41 0.5 0.9 25 -9 6 -45 1 +41 0.5 0.9 34 27 -4 -45 1 +41 0.4 0.6 23 -48 23 2 2 +41 0.5 0.6 21 -5 -2 -33 1 +41 0.6 0.8 36 -37 14 -42 1 +41 0.2 0.4 23 16 -7 -14 1 +41 0.8 0.1 -11 -48 49 -7 2 +41 0.3 0.9 29 -26 36 -21 2 +41 0.1 0.1 38 -32 4 -42 1 +41 0.6 0.1 37 -24 -41 -48 1 +41 0.9 0.4 37 -47 22 -23 1 +41 0.7 0.9 6 -25 35 -6 2 +41 0.6 0.8 44 -32 45 -1 2 +41 0.7 0.6 42 -10 -12 -38 1 +41 0.9 0.2 -23 -30 4 -35 2 +41 0.6 0.3 35 -22 -30 -43 1 +41 0.2 0.3 6 0 3 -25 1 +41 0.8 0.8 22 -32 18 -41 1 +41 0.4 0.2 -14 -21 9 -33 2 +41 0.2 0.4 42 -15 -5 -12 1 +41 0.5 0.4 44 -42 -19 -40 1 +41 0.5 0.2 28 3 20 -2 1 +41 0.1 0.2 8 -9 -28 -35 1 +41 0.9 0.4 -5 -15 25 19 2 +41 0.9 0.5 14 -29 -3 -22 1 +41 0.3 0.3 -16 -33 41 32 2 +41 0.8 0.3 -10 -45 10 -35 2 +41 0.8 0.3 42 -45 22 -5 1 +41 0.8 0.9 47 20 25 -4 1 +41 0.1 0.8 -20 -38 7 -30 2 +41 0.8 0.6 37 10 46 16 2 +41 0.2 0.3 37 -35 41 -49 2 +41 0.3 0.6 -14 -17 -1 -13 2 +41 0.6 0.5 34 -33 -34 -46 1 +41 0.7 0.5 26 -1 41 -48 1 +41 0.5 0.5 47 -26 12 -46 1 +41 0.5 0.1 38 -42 10 0 1 +41 0.4 0.4 16 -48 4 -19 2 +41 0.3 0.2 -24 -26 35 22 2 +41 0.8 0.2 45 42 12 -44 1 +41 0.1 0.7 23 -8 30 -34 2 +41 0.3 0.3 30 21 43 -43 1 +41 0.7 0.7 19 -42 8 -44 1 +41 0.9 0.3 48 17 20 -30 1 +41 0.3 0.3 50 21 -20 -40 1 +41 0.6 0.4 49 -4 17 2 1 +41 0.1 0.4 23 -22 36 15 2 +41 0.9 0.4 47 40 38 13 1 +41 0.8 0.2 32 -50 -27 -48 1 +41 0.8 0.1 23 -26 42 -18 1 +41 0.8 0.4 46 -5 6 -30 1 +41 0.9 0.5 39 27 -20 -45 1 +41 0.1 0.9 29 10 38 -37 2 +41 0.2 0.4 7 -35 50 8 2 +41 0.9 0.1 -3 -41 42 -21 1 +41 0.9 0.8 37 29 46 32 2 +41 0.3 0.9 44 27 17 -13 1 +41 0.3 0.8 10 -17 27 -9 2 +41 0.7 0.6 46 26 28 -50 1 +41 0.3 0.2 20 -4 32 -30 1 +41 0.3 0.8 42 -21 44 -19 2 +41 0.6 0.5 30 -10 36 -36 1 +42 0.8 0.2 10 -15 44 -20 1 +42 0.4 0.8 25 13 38 -29 1 +42 0.5 0.3 42 37 36 5 1 +42 0.9 0.7 -34 -46 49 19 2 +42 0.3 0.6 -6 -24 4 -1 2 +42 0.7 0.6 20 13 35 -28 1 +42 0.6 0.4 47 -31 42 3 2 +42 0.6 0.7 -25 -28 -13 -14 2 +42 0.5 0.9 -37 -42 34 -24 2 +42 0.6 0.6 35 -26 -34 -45 1 +42 0.3 0.7 16 9 26 15 2 +42 0.7 0.8 49 -31 24 -4 1 +42 0.8 0.7 22 13 -28 -33 1 +42 0.3 0.8 10 -42 50 29 2 +42 0.7 0.2 37 -20 -6 -14 1 +42 0.1 0.6 35 -16 -24 -25 2 +42 0.3 0.9 4 -26 -29 -38 1 +42 0.3 0.1 -26 -50 -44 -46 2 +42 0.4 0.5 -15 -22 43 -37 2 +42 0.2 0.2 18 -25 13 -17 2 +42 0.1 0.3 45 -34 29 -2 2 +42 0.5 0.1 -28 -49 -9 -45 2 +42 0.3 0.3 44 -40 2 -22 1 +42 0.5 0.5 34 -13 49 21 2 +42 0.4 0.3 -14 -23 17 -22 2 +42 0.2 0.4 31 -4 -11 -27 1 +42 0.9 0.2 14 -49 10 -43 1 +42 0.9 0.2 1 -50 24 11 2 +42 0.6 0.7 26 -31 19 -46 1 +42 0.5 0.1 5 -30 30 4 2 +42 0.9 0.3 -26 -36 15 -14 2 +42 0.9 0.7 48 -28 37 -12 1 +42 0.1 0.3 23 -35 13 -1 2 +42 0.7 0.4 15 -8 40 -23 1 +42 0.7 0.5 -10 -36 3 -6 2 +42 0.5 0.9 41 -30 22 -15 2 +42 0.7 0.5 30 -9 17 13 2 +42 0.4 0.2 23 -1 -38 -50 1 +42 0.1 0.2 -22 -36 50 -34 2 +42 0.7 0.1 22 15 36 2 1 +42 0.9 0.8 49 -24 12 7 1 +42 0.1 0.3 6 -20 7 -41 1 +42 0.3 0.8 21 -48 49 44 2 +42 0.7 0.3 -46 -47 11 -48 2 +42 0.4 0.4 7 -19 23 9 2 +42 0.5 0.3 39 -45 -26 -48 1 +42 0.9 0.1 -20 -49 17 -48 1 +42 0.6 0.3 38 -13 0 -31 1 +42 0.7 0.5 -6 -48 46 -32 2 +42 0.2 0.3 46 33 42 -23 1 +42 0.5 0.1 36 22 -12 -28 1 +42 0.7 0.3 -14 -18 44 34 2 +42 0.4 0.3 31 13 -17 -35 1 +42 0.1 0.1 38 -42 -18 -46 1 +42 0.6 0.3 39 28 -15 -31 1 +42 0.6 0.3 4 -10 34 9 2 +42 0.5 0.8 28 26 39 -46 1 +42 0.6 0.4 16 4 12 -2 1 +42 0.7 0.8 31 -22 10 -36 1 +42 0.7 0.5 27 -15 47 -50 1 +42 0.4 0.3 -20 -27 -2 -37 2 +42 0.8 0.7 8 -24 -32 -49 1 +42 0.8 0.2 5 -7 34 -23 1 +42 0.4 0.8 39 19 -7 -40 1 +42 0.4 0.8 29 -50 35 -24 2 +42 0.8 0.6 41 8 -1 -29 1 +42 0.7 0.5 42 33 -31 -36 1 +42 0.1 0.1 -1 -34 46 -6 2 +42 0.3 0.2 7 -42 48 -18 2 +42 0.4 0.1 1 -8 -4 -23 1 +42 0.8 0.1 37 -24 43 -18 1 +42 0.7 0.9 12 -26 37 27 2 +42 0.5 0.9 48 -2 31 -41 2 +42 0.9 0.8 -24 -39 2 -39 2 +42 0.9 0.1 -5 -8 43 -39 1 +42 0.4 0.9 15 -40 27 0 2 +42 0.4 0.7 47 -7 40 -47 2 +42 0.8 0.2 0 -13 -24 -33 1 +42 0.4 0.4 46 39 -27 -40 1 +42 0.6 0.2 45 33 39 -47 1 +42 0.2 0.2 -46 -47 24 23 2 +42 0.7 0.6 46 -1 -20 -26 1 +42 0.8 0.2 -22 -29 13 -25 2 +42 0.5 0.1 50 -22 32 -35 1 +42 0.4 0.3 30 -1 7 3 1 +42 0.4 0.2 19 -34 -21 -28 1 +42 0.4 0.5 32 17 34 -36 1 +42 0.8 0.7 43 3 -9 -21 1 +42 0.4 0.2 44 -4 8 -40 1 +42 0.6 0.4 38 22 26 7 1 +42 0.1 0.7 16 -5 -7 -43 1 +42 0.6 0.7 14 -34 7 2 2 +42 0.7 0.2 35 -34 45 -42 1 +42 0.3 0.6 35 22 49 -36 1 +42 0.5 0.1 16 -35 -30 -45 1 +42 0.4 0.7 1 -45 -25 -27 2 +42 0.3 0.4 0 -23 -24 -46 1 +42 0.1 0.3 45 -26 -6 -38 1 +42 0.6 0.7 14 -48 49 7 2 +42 0.8 0.6 -17 -19 47 19 2 +42 0.3 0.8 -2 -50 37 -29 2 +42 0.8 0.7 49 -35 39 19 2 +42 0.4 0.6 7 -3 9 -21 1 +42 0.7 0.5 33 28 42 7 1 +42 0.7 0.9 40 16 -17 -20 1 +42 0.3 0.3 -10 -12 33 -19 2 +42 0.3 0.7 49 22 -26 -35 1 +42 0.5 0.8 23 -12 -39 -48 1 +42 0.8 0.6 -10 -37 12 -49 2 +42 0.3 0.2 -22 -33 46 28 2 +42 0.6 0.2 -5 -29 9 6 2 +42 0.1 0.3 24 -47 21 -45 2 +42 0.9 0.8 34 -24 24 13 1 +42 0.9 0.8 -13 -41 -11 -20 1 +42 0.7 0.2 35 -18 -28 -44 1 +42 0.9 0.5 43 30 -11 -19 1 +42 0.7 0.5 17 11 15 -23 1 +42 0.8 0.8 16 9 50 21 2 +42 0.8 0.1 41 -49 21 -24 1 +42 0.1 0.4 -37 -49 13 -18 2 +42 0.9 0.3 41 12 10 7 1 +42 0.3 0.5 38 -18 48 -45 1 +42 0.4 0.4 45 -29 -4 -20 1 +42 0.3 0.2 44 26 -32 -45 1 +42 0.2 0.4 16 4 47 20 2 +42 0.2 0.5 -21 -48 33 -20 2 +42 0.2 0.5 42 -13 24 -17 2 +42 0.6 0.8 7 -36 48 -1 2 +42 0.7 0.5 -3 -18 19 15 2 +42 0.3 0.3 -13 -49 23 -36 2 +42 0.2 0.4 0 -23 3 -12 2 +42 0.4 0.2 -11 -31 -16 -29 2 +42 0.7 0.6 49 -28 15 -48 1 +42 0.1 0.5 4 -4 -6 -43 1 +42 0.5 0.1 14 -30 24 -50 1 +42 0.4 0.6 -11 -30 47 -22 2 +42 0.1 0.4 39 33 -3 -44 1 +42 0.9 0.6 49 -35 -18 -29 1 +42 0.5 0.5 48 -20 35 -31 1 +42 0.5 0.5 7 1 33 6 2 +42 0.4 0.3 -34 -46 44 -33 2 +42 0.8 0.7 30 26 -19 -29 1 +42 0.6 0.3 42 36 36 22 1 +42 0.8 0.7 30 24 21 -33 1 +42 0.7 0.5 30 3 32 -12 1 +42 0.1 0.1 -9 -34 -8 -22 2 +42 0.4 0.8 41 -1 50 -9 2 +42 0.3 0.8 -23 -49 32 -1 2 +42 0.8 0.3 14 -8 11 -7 1 +42 0.7 0.4 30 -27 31 -19 1 +42 0.7 0.8 -21 -31 -11 -23 2 +42 0.3 0.8 41 -21 20 -43 2 +42 0.5 0.4 46 -5 24 -39 1 +42 0.9 0.1 25 12 44 41 2 +42 0.3 0.4 15 -32 38 -39 2 +42 0.8 0.9 32 -38 -8 -12 1 +42 0.5 0.6 21 -6 -32 -41 1 +42 0.5 0.4 19 12 -3 -32 1 +42 0.8 0.8 16 -25 31 -37 2 +42 0.9 0.8 -26 -38 36 7 2 +42 0.7 0.4 2 -19 6 -30 1 +42 0.3 0.7 37 -47 29 -41 2 +42 0.4 0.3 -5 -14 13 -16 2 +42 0.7 0.7 -17 -39 24 11 2 +42 0.2 0.1 30 -33 -25 -50 1 +42 0.6 0.1 29 13 24 -19 1 +42 0.2 0.4 -22 -35 -19 -21 1 +42 0.1 0.8 3 -34 30 -21 2 +42 0.6 0.4 19 9 19 4 1 +42 0.1 0.5 25 -8 38 -15 2 +42 0.9 0.1 6 -41 16 -7 1 +42 0.8 0.6 33 -33 31 -19 1 +42 0.7 0.5 33 0 34 -19 1 +42 0.2 0.3 -13 -39 -22 -40 2 +42 0.8 0.5 28 -11 -30 -50 1 +42 0.3 0.1 33 -31 8 0 2 +42 0.3 0.3 5 -48 -4 -36 2 +42 0.2 0.6 27 -15 50 2 2 +42 0.5 0.5 50 -10 -13 -14 1 +42 0.7 0.2 -31 -38 -43 -47 1 +42 0.4 0.8 11 -5 -15 -37 1 +42 0.8 0.9 11 -47 40 9 2 +42 0.9 0.3 21 -6 17 -13 1 +42 0.9 0.4 1 -17 11 7 2 +42 0.4 0.6 10 -44 40 -39 2 +42 0.6 0.2 48 11 22 -37 1 +42 0.8 0.7 22 -18 20 8 2 +42 0.9 0.5 11 -26 40 -13 2 +42 0.3 0.9 9 -49 33 15 2 +42 0.9 0.9 10 -36 22 -35 2 +42 0.6 0.8 8 -25 -11 -38 1 +42 0.9 0.8 -41 -46 38 7 2 +42 0.5 0.8 50 -39 -30 -48 1 +42 0.1 0.4 16 -15 31 -35 2 +42 0.1 0.7 46 -27 9 -6 2 +42 0.5 0.9 15 -26 28 -37 2 +42 0.3 0.2 36 -35 4 -34 1 +42 0.8 0.8 -24 -27 34 4 2 +42 0.2 0.8 39 -29 28 0 2 +42 0.6 0.2 47 -32 33 12 2 +42 0.7 0.5 18 -40 5 -34 1 +42 0.8 0.6 40 -37 -19 -34 1 +42 0.7 0.9 15 -14 -23 -29 1 +42 0.2 0.8 16 8 23 -2 1 +42 0.4 0.4 34 23 43 -5 1 +42 0.1 0.8 4 -12 8 -34 2 +42 0.5 0.9 -43 -49 43 -4 2 +42 0.1 0.4 7 -11 18 8 2 +42 0.5 0.1 11 -49 -25 -49 1 +42 0.5 0.7 41 32 48 -36 1 +42 0.2 0.2 -6 -32 8 -33 2 +42 0.4 0.5 -2 -28 -6 -26 1 +42 0.9 0.7 37 -17 38 -24 1 +42 0.5 0.6 27 -5 13 -3 1 +42 0.3 0.3 6 -7 15 -39 1 +42 0.8 0.2 46 18 38 -43 1 +42 0.9 0.5 46 39 1 -8 1 +42 0.5 0.9 48 20 -13 -30 1 +42 0.3 0.3 42 35 30 -35 1 +42 0.2 0.3 14 -17 49 -32 1 +42 0.6 0.7 42 -27 50 -1 2 +42 0.9 0.7 40 -40 29 -20 1 +42 0.7 0.8 -32 -38 -13 -27 2 +42 0.8 0.5 11 -13 20 8 2 +42 0.7 0.6 30 10 25 -50 1 +42 0.5 0.2 16 -32 -42 -46 1 +42 0.5 0.4 21 -27 8 -48 1 +42 0.6 0.4 34 -5 -9 -14 1 +42 0.8 0.6 32 4 17 -50 1 +42 0.4 0.8 2 -21 -24 -33 1 +42 0.6 0.1 22 13 5 -48 1 +42 0.2 0.8 9 -24 45 -48 2 +42 0.1 0.3 39 31 10 -31 1 +42 0.7 0.2 11 -6 37 -46 1 +42 0.7 0.8 50 22 -13 -46 1 +42 0.9 0.3 -10 -35 25 22 2 +42 0.8 0.2 -7 -46 11 10 2 +42 0.6 0.9 -15 -36 31 10 2 +42 0.6 0.2 -3 -48 31 -36 2 +42 0.8 0.8 -16 -48 18 11 2 +42 0.5 0.1 31 0 -27 -38 1 +42 0.4 0.8 -18 -39 48 30 2 +42 0.2 0.2 29 -35 8 -24 1 +42 0.4 0.6 26 21 29 -26 1 +42 0.2 0.5 -25 -42 -10 -28 2 +42 0.3 0.3 -27 -40 18 0 2 +42 0.4 0.3 9 -48 46 38 2 +42 0.1 0.4 -15 -39 18 -11 2 +42 0.6 0.6 -45 -47 36 -14 2 +42 0.1 0.5 15 0 10 9 2 +42 0.8 0.8 12 -50 50 -46 2 +42 0.9 0.5 44 13 7 -11 1 +42 0.8 0.5 49 -45 43 32 2 +42 0.5 0.5 32 -4 36 -32 1 +42 0.4 0.9 -12 -15 -14 -32 1 +42 0.1 0.6 21 -4 9 -8 2 +42 0.2 0.4 -28 -43 39 34 2 +42 0.6 0.3 14 -22 18 11 2 +42 0.1 0.3 -28 -44 -16 -24 2 +42 0.7 0.8 -24 -50 29 -21 2 +42 0.4 0.1 -38 -41 26 -30 2 +42 0.3 0.8 48 25 34 -7 1 +42 0.3 0.1 -4 -23 36 28 2 +42 0.8 0.8 -9 -32 43 -27 2 +42 0.1 0.2 30 20 38 -30 1 +42 0.5 0.2 36 -28 41 -46 1 +42 0.8 0.2 46 -38 49 35 2 +42 0.6 0.1 15 11 4 -7 1 +42 0.1 0.2 43 23 18 -1 1 +42 0.4 0.6 33 -22 3 -18 2 +42 0.1 0.9 12 4 2 -5 1 +42 0.3 0.3 -9 -23 -27 -29 1 +42 0.3 0.4 50 17 41 19 2 +42 0.5 0.3 -6 -42 -20 -38 1 +42 0.5 0.2 24 -47 17 -49 1 +42 0.9 0.7 -12 -32 25 -25 2 +42 0.8 0.1 48 14 22 13 1 +42 0.9 0.8 38 -27 -7 -45 1 +42 0.3 0.2 -21 -43 -21 -27 2 +42 0.4 0.1 23 -32 37 17 2 +42 0.7 0.4 24 7 44 -6 1 +42 0.8 0.4 16 9 44 -43 1 +42 0.1 0.4 17 -40 17 -16 2 +42 0.9 0.6 -24 -26 19 -15 2 +42 0.4 0.1 20 -35 22 8 2 +42 0.5 0.4 41 14 8 -19 1 +42 0.3 0.3 49 5 24 11 1 +42 0.5 0.8 39 21 -6 -33 1 +42 0.7 0.6 46 -7 -31 -36 1 +42 0.5 0.6 2 -9 1 -4 1 +42 0.1 0.7 12 -21 45 8 2 +42 0.6 0.8 -13 -21 36 -7 2 +42 0.8 0.8 41 12 -33 -39 1 +42 0.6 0.9 -4 -15 9 -46 2 +42 0.6 0.1 21 -35 35 -19 1 +42 0.5 0.7 -13 -36 43 -18 2 +42 0.5 0.1 0 -34 49 3 2 +42 0.5 0.2 50 6 24 -24 1 +42 0.2 0.5 -17 -19 -33 -48 1 +42 0.6 0.7 -2 -27 23 -26 2 +43 0.8 0.1 43 42 31 -33 1 +43 0.7 0.7 -43 -50 9 -3 2 +43 0.4 0.4 -15 -34 -14 -29 2 +43 0.6 0.6 48 25 27 25 1 +43 0.4 0.8 49 6 21 -39 1 +43 0.4 0.6 14 -22 45 -46 2 +43 0.5 0.6 21 -10 37 28 2 +43 0.9 0.2 35 20 43 22 1 +43 0.8 0.8 -3 -15 40 17 2 +43 0.9 0.8 -18 -22 50 14 2 +43 0.3 0.9 -3 -50 20 -38 2 +43 0.6 0.5 47 -15 35 13 1 +43 0.2 0.2 45 28 34 -20 1 +43 0.8 0.4 42 -27 15 -50 1 +43 0.4 0.1 17 -44 42 33 2 +43 0.6 0.8 14 -32 7 -7 1 +43 0.9 0.1 19 -50 48 -42 1 +43 0.1 0.5 16 -11 6 -42 1 +43 0.9 0.4 32 -8 45 -32 1 +43 0.2 0.5 4 -17 -8 -23 2 +43 0.5 0.9 35 8 10 -37 1 +43 0.3 0.2 18 -41 20 -16 1 +43 0.6 0.9 -13 -47 27 -16 2 +43 0.3 0.1 33 -3 -15 -34 1 +43 0.9 0.4 45 4 46 31 1 +43 0.3 0.7 22 -47 32 19 2 +43 0.5 0.1 23 14 40 5 2 +43 0.6 0.6 10 8 -28 -46 1 +43 0.9 0.4 5 -38 -29 -49 1 +43 0.8 0.4 -41 -47 40 -31 2 +43 0.4 0.2 21 -50 48 17 2 +43 0.3 0.1 34 -43 6 -40 1 +43 0.3 0.1 -27 -50 48 -31 2 +43 0.8 0.9 9 -29 27 -34 2 +43 0.4 0.1 42 -40 -24 -30 1 +43 0.6 0.5 28 -8 34 -25 1 +43 0.3 0.2 8 -10 11 -18 1 +43 0.7 0.4 20 13 12 8 2 +43 0.6 0.2 1 -9 12 -17 1 +43 0.6 0.7 32 -15 -19 -38 1 +43 0.3 0.6 21 -3 -15 -46 1 +43 0.4 0.1 48 37 38 -45 1 +43 0.3 0.6 12 -36 -12 -45 1 +43 0.8 0.1 0 -40 -12 -39 1 +43 0.3 0.7 -15 -49 -38 -42 2 +43 0.4 0.8 -12 -31 45 41 2 +43 0.2 0.2 -19 -49 -32 -49 1 +43 0.7 0.6 -30 -42 -25 -27 1 +43 0.9 0.9 50 10 33 -32 1 +43 0.8 0.6 44 -21 10 -6 1 +43 0.1 0.4 28 -5 -9 -43 1 +43 0.1 0.2 7 4 50 -40 1 +43 0.4 0.3 -36 -49 27 -41 1 +43 0.8 0.3 -1 -18 24 -11 1 +43 0.3 0.8 -12 -14 34 24 2 +43 0.6 0.4 42 -3 38 35 2 +43 0.4 0.1 38 -25 41 21 2 +43 0.9 0.2 11 -3 1 -6 1 +43 0.5 0.9 -3 -14 10 -13 2 +43 0.5 0.2 26 -20 38 -40 1 +43 0.1 0.2 1 -35 15 -50 1 +43 0.3 0.5 -6 -47 27 -30 2 +43 0.8 0.5 2 -37 26 16 2 +43 0.7 0.5 38 -5 1 -47 1 +43 0.3 0.5 -2 -34 27 19 2 +43 0.5 0.7 43 -32 -39 -43 1 +43 0.2 0.2 49 -15 36 -11 1 +43 0.8 0.1 40 -15 -2 -19 1 +43 0.7 0.6 49 42 35 -35 1 +43 0.9 0.9 -11 -28 -5 -30 1 +43 0.1 0.6 45 11 50 22 2 +43 0.5 0.4 6 -40 39 -41 2 +43 0.4 0.1 -5 -14 45 31 2 +43 0.7 0.1 45 -39 -23 -50 1 +43 0.2 0.7 49 12 11 -32 1 +43 0.2 0.9 45 -10 3 -2 2 +43 0.7 0.5 22 -45 41 8 2 +43 0.4 0.2 36 5 7 -37 1 +43 0.9 0.6 39 -45 43 -46 1 +43 0.6 0.3 33 -22 1 -5 2 +43 0.2 0.7 50 -18 31 14 2 +43 0.8 0.6 27 17 26 -10 1 +43 0.5 0.7 50 -30 50 -10 2 +43 0.1 0.2 44 -31 -25 -34 2 +43 0.3 0.8 -12 -48 46 -29 2 +43 0.6 0.8 36 28 16 -25 1 +43 0.2 0.5 46 -43 -17 -28 1 +43 0.3 0.6 4 -25 18 -36 1 +43 0.2 0.3 -3 -50 50 -16 2 +43 0.6 0.2 -31 -42 30 18 2 +43 0.5 0.2 18 -5 -4 -20 1 +43 0.7 0.9 48 -38 -39 -42 1 +43 0.6 0.7 9 -42 -27 -35 1 +43 0.3 0.6 46 13 -24 -36 1 +43 0.4 0.6 29 7 -14 -48 1 +43 0.4 0.3 11 -17 48 -20 1 +43 0.8 0.2 37 -47 -16 -19 1 +43 0.5 0.1 36 -35 0 -3 1 +43 0.3 0.3 -1 -30 40 23 2 +43 0.4 0.9 25 -25 33 1 2 +43 0.4 0.8 16 -16 1 -15 2 +43 0.8 0.4 -14 -37 19 11 2 +43 0.9 0.4 25 17 27 -38 1 +43 0.6 0.4 35 34 47 -36 1 +43 0.9 0.8 37 -18 27 -23 1 +43 0.5 0.2 -7 -35 45 37 2 +43 0.2 0.2 33 -13 3 -6 2 +43 0.6 0.7 44 -45 23 -26 2 +43 0.2 0.4 1 -10 39 26 2 +43 0.5 0.2 2 -28 46 -33 2 +43 0.4 0.8 16 5 36 35 2 +43 0.3 0.1 35 -2 0 -47 1 +43 0.8 0.5 5 -18 -6 -35 2 +43 0.1 0.9 3 -18 8 -46 1 +43 0.1 0.1 45 31 -35 -46 1 +43 0.3 0.1 44 -41 47 -7 1 +43 0.8 0.2 -5 -34 17 -45 2 +43 0.1 0.3 -40 -45 30 -7 2 +43 0.2 0.1 47 -9 15 14 2 +43 0.9 0.3 -10 -22 27 0 2 +43 0.7 0.8 43 16 6 -24 1 +43 0.7 0.9 -40 -46 21 -38 2 +43 0.7 0.5 48 -8 36 -8 1 +43 0.3 0.4 12 -17 50 -8 2 +43 0.2 0.7 11 -40 13 -19 2 +43 0.8 0.8 40 39 47 -28 1 +43 0.2 0.2 39 13 -3 -42 1 +43 0.6 0.6 33 14 -43 -44 1 +43 0.1 0.4 42 27 38 15 2 +43 0.7 0.8 36 -20 42 -1 2 +43 0.6 0.3 21 -22 -15 -48 1 +43 0.9 0.2 7 -43 24 6 2 +43 0.9 0.2 7 -29 17 -10 1 +43 0.2 0.3 19 -29 30 -14 2 +43 0.4 0.4 27 -31 2 -18 1 +43 0.5 0.9 -18 -46 -23 -36 2 +43 0.4 0.5 6 -50 36 4 2 +43 0.5 0.9 29 -11 -19 -42 1 +43 0.1 0.1 -7 -17 7 -28 1 +43 0.5 0.2 -15 -27 47 -21 2 +43 0.1 0.8 -1 -26 24 -30 2 +43 0.5 0.1 -6 -31 33 14 2 +43 0.1 0.5 21 -4 21 -4 2 +43 0.9 0.3 40 -47 12 -34 1 +43 0.3 0.4 45 44 16 -16 1 +43 0.3 0.8 39 26 1 -17 1 +43 0.2 0.3 9 -3 24 -26 2 +43 0.2 0.3 -4 -9 21 -34 2 +43 0.7 0.3 29 -5 36 19 2 +43 0.5 0.4 47 31 37 -17 1 +43 0.1 0.3 20 -30 -26 -29 1 +43 0.9 0.2 42 16 43 -28 1 +43 0.4 0.9 4 -47 1 -43 2 +43 0.6 0.2 50 -2 26 10 1 +43 0.2 0.3 -13 -26 -15 -20 1 +43 0.7 0.4 50 35 17 12 1 +43 0.1 0.8 8 -19 -6 -12 1 +43 0.9 0.1 25 4 35 -12 1 +43 0.8 0.1 26 23 21 2 1 +43 0.3 0.4 -13 -19 50 -5 2 +43 0.9 0.8 11 -19 49 -34 2 +43 0.8 0.5 41 -41 45 39 2 +43 0.9 0.8 40 1 32 17 1 +43 0.7 0.7 49 -50 -2 -10 1 +43 0.6 0.7 -6 -50 0 -1 1 +43 0.3 0.6 26 -47 11 6 1 +43 0.1 0.6 38 0 15 10 1 +43 0.2 0.9 27 -10 46 6 2 +43 0.8 0.6 -5 -21 0 -25 2 +43 0.8 0.3 31 -19 -10 -40 1 +43 0.1 0.2 15 -40 -25 -26 2 +43 0.2 0.6 20 16 40 -45 1 +43 0.2 0.4 43 -9 -17 -32 1 +43 0.2 0.9 47 46 26 20 1 +43 0.5 0.2 38 13 21 -14 1 +43 0.2 0.2 29 -8 9 -12 2 +43 0.5 0.1 -14 -40 -6 -33 1 +43 0.5 0.5 3 -4 17 -29 2 +43 0.1 0.1 42 10 39 16 2 +43 0.5 0.3 45 -34 -25 -46 1 +43 0.3 0.3 23 -48 37 2 2 +43 0.1 0.8 28 -23 9 8 2 +43 0.9 0.7 43 12 35 24 1 +43 0.7 0.4 16 -44 -5 -24 2 +43 0.5 0.2 34 -5 18 3 2 +43 0.6 0.2 50 27 -29 -38 1 +43 0.4 0.2 24 -5 -2 -39 1 +43 0.9 0.4 19 -36 39 -18 1 +43 0.6 0.6 49 39 -3 -42 1 +43 0.1 0.3 32 -32 33 -6 2 +43 0.5 0.9 47 29 15 -16 1 +43 0.8 0.3 32 -14 38 -50 1 +43 0.9 0.9 -5 -22 48 20 2 +43 0.8 0.4 37 -22 47 2 2 +43 0.8 0.5 45 29 32 -3 1 +43 0.2 0.5 -10 -42 -48 -49 1 +43 0.5 0.2 22 -40 45 -22 1 +43 0.7 0.5 23 -15 -1 -9 1 +43 0.6 0.9 25 -48 -24 -37 1 +43 0.6 0.1 -22 -44 47 -18 2 +43 0.5 0.8 32 -37 46 -16 2 +43 0.7 0.1 10 -17 49 -49 1 +43 0.6 0.1 28 -50 38 -43 1 +43 0.1 0.9 44 -20 3 -32 2 +43 0.6 0.6 47 19 50 5 1 +43 0.8 0.9 42 -20 6 -34 1 +43 0.6 0.6 29 12 27 20 2 +43 0.2 0.4 33 -7 32 -18 1 +43 0.1 0.3 8 -44 40 -24 2 +43 0.2 0.3 -20 -41 1 -21 2 +43 0.7 0.2 15 -31 48 28 2 +43 0.4 0.5 16 12 10 -3 1 +43 0.7 0.5 39 -5 2 -29 1 +43 0.8 0.2 40 25 11 -29 1 +43 0.3 0.5 14 -40 16 -27 2 +43 0.2 0.5 -23 -34 5 0 2 +43 0.1 0.1 4 -28 -10 -18 2 +43 0.9 0.4 27 -29 -24 -28 1 +43 0.7 0.6 46 38 44 -47 1 +43 0.3 0.4 -36 -40 7 -12 2 +43 0.5 0.2 11 -41 -1 -29 1 +43 0.2 0.6 -1 -18 31 -50 2 +43 0.9 0.9 39 -27 15 -5 1 +43 0.6 0.9 40 4 32 29 2 +43 0.7 0.7 8 -8 44 9 2 +43 0.9 0.9 28 -27 26 -32 2 +43 0.3 0.6 14 -35 38 -42 2 +43 0.2 0.1 42 23 -4 -48 1 +43 0.6 0.6 0 -1 5 -16 2 +43 0.6 0.1 -4 -38 23 12 2 +43 0.9 0.9 -9 -32 28 -28 2 +43 0.4 0.1 -4 -13 -4 -39 1 +43 0.7 0.3 42 -33 20 -42 1 +43 0.6 0.5 20 -30 -1 -38 1 +43 0.1 0.7 -19 -42 -27 -29 2 +43 0.5 0.7 -7 -27 19 4 1 +43 0.3 0.7 26 21 10 -11 1 +43 0.3 0.4 40 -4 15 -10 1 +43 0.5 0.8 18 -17 -12 -33 1 +43 0.5 0.7 -9 -30 28 7 2 +43 0.3 0.8 40 -29 50 48 2 +43 0.8 0.1 -11 -14 31 22 2 +43 0.8 0.2 38 -30 2 -25 1 +43 0.5 0.1 18 -3 -12 -34 1 +43 0.7 0.5 0 -35 -8 -24 1 +43 0.7 0.7 46 35 14 10 1 +43 0.9 0.7 -3 -21 47 6 2 +43 0.7 0.1 21 11 35 -2 1 +43 0.4 0.9 -24 -48 31 20 2 +43 0.2 0.4 43 -12 25 -39 1 +43 0.5 0.6 -24 -34 42 8 2 +43 0.6 0.5 -39 -46 19 -3 2 +43 0.7 0.6 -20 -37 17 9 2 +43 0.2 0.8 23 -17 -3 -27 1 +43 0.8 0.6 16 -40 14 5 2 +43 0.2 0.1 -13 -36 40 -13 2 +43 0.4 0.7 7 -5 25 -32 2 +43 0.8 0.7 41 28 43 28 2 +43 0.9 0.4 -13 -22 11 -34 2 +43 0.4 0.9 18 -31 36 10 2 +43 0.3 0.1 -33 -47 48 -37 2 +43 0.7 0.2 -20 -44 -34 -44 2 +43 0.5 0.3 21 -46 34 -30 1 +43 0.7 0.2 -7 -50 -13 -27 1 +43 0.3 0.4 4 -21 17 -44 2 +43 0.7 0.7 -23 -26 9 -33 2 +43 0.1 0.8 14 -13 23 -9 2 +43 0.4 0.1 10 -33 2 -33 2 +43 0.6 0.6 35 16 42 -4 1 +43 0.1 0.3 46 27 41 -42 1 +43 0.7 0.6 6 -40 27 -50 2 +43 0.1 0.4 45 -38 25 -18 1 +43 0.1 0.4 -25 -50 -22 -36 2 +43 0.5 0.7 15 -28 -31 -39 1 +43 0.5 0.7 20 -25 32 -12 2 +43 0.3 0.7 50 1 20 -35 1 +43 0.8 0.4 14 -14 45 -31 2 +43 0.1 0.2 42 -49 -33 -34 2 +43 0.4 0.9 7 4 24 -33 2 +43 0.5 0.9 8 -12 39 12 2 +43 0.5 0.7 -15 -35 24 -43 2 +43 0.4 0.4 -10 -23 -31 -32 1 +43 0.7 0.8 50 20 34 16 2 +43 0.2 0.4 -9 -46 -18 -22 2 +43 0.3 0.1 26 -28 39 -25 2 +43 0.3 0.1 48 -10 4 -50 1 +43 0.6 0.2 -14 -24 -9 -24 1 +43 0.4 0.3 38 4 4 -42 1 +43 0.3 0.9 -13 -27 30 5 2 +43 0.8 0.9 -16 -31 -40 -47 2 +43 0.7 0.7 17 5 -6 -48 1 +43 0.6 0.7 22 -34 8 -49 2 +43 0.5 0.8 30 7 -23 -43 1 +43 0.4 0.6 -21 -34 14 -27 2 +43 0.8 0.4 -13 -50 15 -7 2 +43 0.6 0.7 10 -10 -39 -50 1 +43 0.7 0.1 24 -20 3 -33 1 +43 0.7 0.1 14 -35 17 -3 1 +43 0.9 0.3 -12 -50 -8 -34 1 +43 0.1 0.9 31 27 37 1 2 +44 0.7 0.1 48 -40 32 -37 1 +44 0.8 0.5 -21 -40 39 3 2 +44 0.6 0.8 -29 -40 14 6 2 +44 0.9 0.3 37 -23 47 -34 1 +44 0.4 0.5 49 -18 -37 -50 1 +44 0.2 0.5 -24 -46 32 18 2 +44 0.8 0.4 41 27 27 -2 1 +44 0.6 0.6 29 -45 7 -28 1 +44 0.7 0.5 -33 -48 27 19 2 +44 0.1 0.8 -39 -42 -34 -49 2 +44 0.8 0.6 -36 -39 31 -44 2 +44 0.9 0.2 26 -1 -20 -21 1 +44 0.8 0.2 -5 -15 24 -32 1 +44 0.8 0.9 -21 -35 -6 -17 2 +44 0.7 0.3 27 -31 25 -41 1 +44 0.6 0.5 -8 -17 9 -28 2 +44 0.9 0.1 -15 -34 -29 -40 1 +44 0.7 0.6 28 -5 0 -4 1 +44 0.2 0.8 8 -33 8 -39 2 +44 0.3 0.4 28 10 3 -35 1 +44 0.6 0.3 -4 -12 32 28 2 +44 0.9 0.2 -40 -49 40 -44 1 +44 0.8 0.3 28 11 41 -48 1 +44 0.7 0.2 -5 -26 46 2 2 +44 0.6 0.9 12 8 -26 -45 1 +44 0.4 0.4 35 -28 5 -36 1 +44 0.4 0.2 -28 -46 39 -44 2 +44 0.8 0.7 14 -34 29 -13 2 +44 0.6 0.8 21 -7 43 37 2 +44 0.9 0.3 -4 -30 8 -38 1 +44 0.6 0.1 -1 -30 29 -38 1 +44 0.2 0.5 -4 -22 14 -32 2 +44 0.5 0.5 -38 -43 39 14 2 +44 0.2 0.5 2 -23 8 -6 2 +44 0.8 0.6 -6 -19 35 -2 2 +44 0.4 0.5 -31 -33 45 -4 2 +44 0.2 0.9 -21 -37 37 -8 2 +44 0.9 0.4 41 -34 29 -49 1 +44 0.2 0.8 -24 -28 43 -15 2 +44 0.5 0.9 -40 -50 11 -17 2 +44 0.2 0.5 -10 -38 0 -19 2 +44 0.9 0.1 40 11 34 -31 1 +44 0.3 0.7 40 14 19 -4 1 +44 0.7 0.6 -8 -36 22 11 2 +44 0.3 0.1 10 -9 8 -31 1 +44 0.3 0.8 3 -27 47 -2 2 +44 0.3 0.4 50 49 -19 -39 1 +44 0.4 0.6 38 -6 48 17 2 +44 0.6 0.4 4 -24 22 -19 2 +44 0.8 0.9 -8 -37 -11 -36 1 +44 0.9 0.1 32 -33 40 -35 1 +44 0.4 0.5 34 -41 -21 -44 1 +44 0.7 0.2 -2 -5 46 -20 2 +44 0.9 0.1 42 11 4 -9 1 +44 0.3 0.1 -12 -37 29 -39 2 +44 0.7 0.6 39 10 5 -35 1 +44 0.8 0.8 -34 -42 -14 -39 2 +44 0.3 0.4 50 25 44 18 1 +44 0.5 0.2 -25 -30 22 0 2 +44 0.8 0.2 34 -39 41 -17 1 +44 0.3 0.3 -27 -32 -33 -37 1 +44 0.5 0.7 15 -26 50 -23 2 +44 0.4 0.1 -13 -21 24 2 2 +44 0.9 0.7 4 -37 34 -16 2 +44 0.4 0.9 -14 -23 27 12 2 +44 0.9 0.9 -8 -46 5 -21 2 +44 0.4 0.3 38 14 29 -28 1 +44 0.1 0.8 -28 -39 5 -29 2 +44 0.2 0.1 25 5 -4 -26 1 +44 0.1 0.6 47 18 41 -49 1 +44 0.8 0.7 4 -29 46 40 2 +44 0.4 0.7 45 42 -27 -35 1 +44 0.9 0.5 37 -16 32 28 1 +44 0.8 0.1 17 -8 32 7 1 +44 0.7 0.8 46 14 9 7 1 +44 0.9 0.3 -3 -34 2 -8 2 +44 0.9 0.5 44 -1 45 -11 1 +44 0.5 0.1 44 26 -9 -20 1 +44 0.6 0.4 33 -40 29 7 2 +44 0.5 0.3 2 -48 31 -42 2 +44 0.7 0.3 -23 -43 1 -47 2 +44 0.5 0.9 31 -40 12 -46 2 +44 0.3 0.1 49 -4 16 -14 1 +44 0.2 0.7 20 -8 31 -3 2 +44 0.2 0.8 -26 -31 -45 -47 1 +44 0.5 0.1 35 14 40 28 2 +44 0.9 0.8 -12 -37 24 -43 2 +44 0.5 0.5 43 -46 -33 -49 1 +44 0.1 0.3 -34 -43 46 -34 2 +44 0.1 0.8 5 3 12 -5 2 +44 0.5 0.6 14 -15 -17 -43 1 +44 0.9 0.2 -27 -38 -30 -41 2 +44 0.2 0.8 6 -36 -9 -44 2 +44 0.7 0.5 48 10 32 -14 1 +44 0.1 0.2 17 -37 41 39 2 +44 0.8 0.2 15 4 24 -45 1 +44 0.4 0.9 10 7 23 -43 2 +44 0.1 0.1 34 -38 24 17 2 +44 0.3 0.4 -20 -43 26 -33 2 +44 0.1 0.4 8 -8 2 -31 2 +44 0.4 0.1 -12 -13 23 -14 2 +44 0.5 0.4 -15 -27 10 -40 2 +44 0.3 0.5 42 -31 38 30 2 +44 0.7 0.5 32 -11 -22 -31 1 +44 0.8 0.8 12 -19 -13 -21 1 +44 0.7 0.8 11 -43 44 -7 2 +44 0.7 0.3 45 7 -33 -40 1 +44 0.8 0.1 -9 -32 36 -2 2 +44 0.7 0.8 44 -11 40 5 2 +44 0.7 0.1 47 45 15 -46 1 +44 0.7 0.3 3 -9 28 -32 1 +44 0.9 0.2 32 -8 29 9 1 +44 0.9 0.7 9 -50 7 -34 1 +44 0.9 0.4 12 -28 42 -1 2 +44 0.9 0.3 6 -29 7 -16 1 +44 0.7 0.1 5 -15 -4 -14 1 +44 0.2 0.1 45 -37 -10 -33 1 +44 0.2 0.4 24 -27 -9 -44 1 +44 0.9 0.1 20 -10 40 25 2 +44 0.6 0.5 -27 -28 19 15 2 +44 0.7 0.8 0 -1 11 -33 2 +44 0.1 0.9 34 6 28 2 2 +44 0.5 0.8 8 -36 -20 -40 1 +44 0.2 0.5 38 34 -6 -9 1 +44 0.6 0.1 46 -25 50 12 1 +44 0.3 0.8 20 -3 47 7 2 +44 0.3 0.8 -11 -28 -20 -50 2 +44 0.4 0.5 43 -15 7 -20 1 +44 0.6 0.3 -11 -30 4 -45 1 +44 0.4 0.6 2 0 40 36 2 +44 0.8 0.1 -3 -41 29 18 2 +44 0.5 0.3 36 23 -10 -50 1 +44 0.4 0.3 6 -14 27 -41 1 +44 0.4 0.5 1 -20 39 -13 2 +44 0.6 0.4 28 -33 45 -16 2 +44 0.1 0.5 25 -44 11 4 2 +44 0.7 0.2 46 2 39 34 2 +44 0.9 0.8 24 -1 3 -14 1 +44 0.4 0.2 36 -11 44 -17 1 +44 0.5 0.9 -45 -50 21 -46 2 +44 0.2 0.8 32 30 26 9 1 +44 0.6 0.2 23 19 -29 -45 1 +44 0.9 0.8 20 -22 43 26 2 +44 0.2 0.9 44 37 26 -24 1 +44 0.7 0.8 27 2 22 -28 1 +44 0.1 0.9 14 -50 2 -35 2 +44 0.3 0.7 46 4 37 15 2 +44 0.9 0.2 50 49 12 -26 1 +44 0.4 0.4 36 19 29 21 2 +44 0.2 0.5 29 8 41 37 2 +44 0.2 0.4 23 11 -3 -32 1 +44 0.7 0.9 47 -45 26 -26 2 +44 0.4 0.6 25 21 -14 -43 1 +44 0.5 0.8 50 18 37 -13 1 +44 0.5 0.7 30 25 10 -27 1 +44 0.3 0.4 22 6 0 -29 1 +44 0.4 0.3 20 -35 17 5 2 +44 0.1 0.5 17 -20 11 -49 2 +44 0.3 0.5 48 -21 8 4 2 +44 0.1 0.9 37 -38 29 19 2 +44 0.6 0.9 -21 -23 -10 -19 2 +44 0.9 0.4 -19 -50 4 -14 2 +44 0.7 0.2 21 -40 -44 -49 1 +44 0.1 0.5 -10 -29 45 -46 2 +44 0.4 0.6 33 -22 39 -41 2 +44 0.7 0.8 12 -21 21 4 2 +44 0.9 0.9 45 35 8 -38 1 +44 0.8 0.1 18 -24 -27 -30 1 +44 0.9 0.9 50 23 29 -40 1 +44 0.5 0.7 24 -30 5 3 2 +44 0.2 0.1 5 -30 -3 -31 1 +44 0.7 0.1 0 -46 50 6 2 +44 0.5 0.6 43 42 -33 -39 1 +44 0.1 0.8 37 13 21 -11 1 +44 0.1 0.5 39 -17 47 32 2 +44 0.2 0.9 34 -14 35 -19 2 +44 0.7 0.8 -28 -50 20 17 2 +44 0.3 0.6 23 -7 1 -9 2 +44 0.2 0.2 46 33 36 -49 1 +44 0.9 0.2 -13 -26 12 -32 2 +44 0.1 0.3 -10 -24 26 -13 2 +44 0.7 0.9 36 -45 10 -15 1 +44 0.6 0.6 50 -25 21 -41 1 +44 0.2 0.9 25 11 -39 -42 1 +44 0.1 0.2 9 7 34 -29 1 +44 0.8 0.1 20 -21 -42 -45 1 +44 0.4 0.2 40 2 16 -22 1 +44 0.7 0.6 17 -14 46 37 2 +44 0.2 0.5 -16 -25 43 -2 2 +44 0.2 0.3 45 -19 -10 -22 1 +44 0.4 0.7 -11 -16 10 -22 2 +44 0.8 0.3 40 -49 -14 -37 1 +44 0.5 0.3 -35 -43 46 -38 2 +44 0.6 0.8 3 -22 -14 -15 1 +44 0.9 0.2 -29 -40 21 2 2 +44 0.4 0.9 38 21 42 -47 1 +44 0.3 0.2 35 27 6 -9 1 +44 0.6 0.1 47 -21 49 7 1 +44 0.1 0.2 19 2 -9 -19 1 +44 0.1 0.9 21 -10 29 -12 2 +44 0.8 0.7 22 -49 4 -6 1 +44 0.9 0.7 24 -18 13 6 1 +44 0.2 0.5 39 -10 43 -7 2 +44 0.8 0.1 24 -25 -5 -13 1 +44 0.9 0.2 26 -12 6 -22 1 +44 0.4 0.7 19 -1 3 -18 1 +44 0.8 0.5 35 -31 3 -10 1 +44 0.6 0.2 32 7 -8 -21 1 +44 0.9 0.2 8 -11 42 23 2 +44 0.8 0.5 14 -26 11 -4 1 +44 0.6 0.7 13 -27 48 -10 2 +44 0.7 0.6 39 -7 5 -13 1 +44 0.2 0.8 20 8 -3 -17 1 +44 0.4 0.3 10 5 46 21 2 +44 0.2 0.8 36 -7 22 15 2 +44 0.5 0.8 36 -11 19 -20 2 +44 0.9 0.4 35 -14 -41 -50 1 +44 0.8 0.4 42 -11 -34 -44 1 +44 0.9 0.6 -9 -28 29 26 2 +44 0.9 0.1 16 -3 -24 -49 1 +44 0.2 0.7 25 -26 3 -48 2 +44 0.7 0.6 18 -49 49 -9 2 +44 0.9 0.7 50 -30 6 -12 1 +44 0.3 0.9 45 -26 27 -29 2 +44 0.5 0.1 -1 -20 25 2 2 +44 0.2 0.1 28 -28 41 29 2 +44 0.3 0.1 45 -25 -2 -24 1 +44 0.9 0.5 -21 -30 37 -45 2 +44 0.5 0.5 41 30 -18 -40 1 +44 0.1 0.4 18 -18 47 8 2 +44 0.7 0.7 42 -41 33 -12 2 +44 0.7 0.4 45 -1 24 -38 1 +44 0.3 0.3 -42 -49 50 -1 2 +44 0.3 0.4 35 27 0 -18 1 +44 0.5 0.9 47 32 47 27 2 +44 0.4 0.2 12 -46 22 -49 1 +44 0.5 0.8 10 -17 -31 -40 1 +44 0.8 0.1 18 16 -32 -50 1 +44 0.1 0.4 -18 -41 -30 -32 2 +44 0.7 0.8 -17 -50 48 -30 2 +44 0.9 0.9 22 -48 -11 -34 1 +44 0.5 0.1 12 -18 3 -15 1 +44 0.4 0.7 25 22 4 -41 1 +44 0.8 0.3 33 -6 34 19 2 +44 0.6 0.7 -1 -12 39 -28 2 +44 0.9 0.9 33 -31 23 19 2 +44 0.2 0.4 19 -25 43 -19 2 +44 0.2 0.2 4 -29 -23 -27 1 +44 0.4 0.1 4 -16 -15 -42 1 +44 0.6 0.7 22 -33 -6 -13 1 +44 0.8 0.2 4 -2 -11 -27 1 +44 0.6 0.7 -2 -14 32 -42 2 +44 0.3 0.7 28 -19 18 7 2 +44 0.3 0.4 21 14 39 26 2 +44 0.9 0.9 -9 -38 11 -47 2 +44 0.6 0.7 32 -33 47 -42 2 +44 0.8 0.2 2 -4 50 -5 1 +44 0.2 0.5 22 -3 28 2 2 +44 0.2 0.1 38 30 -4 -12 1 +44 0.2 0.2 32 -43 16 15 2 +44 0.3 0.3 22 -3 27 -27 1 +44 0.1 0.5 -3 -19 -32 -49 1 +44 0.5 0.7 -21 -29 21 -31 2 +44 0.6 0.4 -6 -35 -21 -23 1 +44 0.4 0.7 -6 -32 10 -48 2 +44 0.1 0.9 30 5 -30 -35 1 +44 0.7 0.7 9 -1 -6 -32 1 +44 0.3 0.2 -9 -24 8 -7 2 +44 0.5 0.4 21 -11 47 -19 2 +44 0.9 0.4 47 19 25 10 1 +44 0.6 0.3 -6 -31 11 -15 2 +44 0.4 0.1 47 -31 -7 -24 1 +44 0.1 0.9 19 6 50 9 2 +44 0.4 0.5 49 22 30 -16 1 +44 0.1 0.5 7 -41 45 -21 2 +44 0.1 0.3 36 -14 21 4 2 +44 0.2 0.9 34 16 31 19 2 +44 0.4 0.5 46 -40 4 -25 1 +44 0.2 0.5 28 -35 16 -13 2 +44 0.9 0.5 32 -24 2 -16 1 +44 0.7 0.1 45 12 35 -38 1 +44 0.8 0.2 29 4 7 -17 1 +44 0.8 0.8 -27 -50 9 -23 2 +44 0.2 0.9 23 -41 2 -1 2 +44 0.9 0.2 35 -38 36 -25 1 +44 0.2 0.2 47 45 26 -11 1 +44 0.4 0.5 41 34 -12 -19 1 +44 0.4 0.6 1 -43 28 -48 2 +44 0.9 0.3 50 -24 47 -39 1 +44 0.5 0.3 38 12 11 -50 1 +44 0.1 0.5 40 15 42 34 2 +44 0.7 0.9 -13 -44 47 39 2 +44 0.2 0.8 50 -43 -4 -29 2 +44 0.2 0.4 47 -37 33 -2 2 +44 0.2 0.1 48 -23 33 -8 2 +44 0.4 0.1 22 -18 14 -48 1 +44 0.6 0.1 49 -22 35 3 1 +44 0.6 0.6 40 39 44 -31 1 +44 0.6 0.5 11 -1 30 20 1 +44 0.7 0.2 30 -44 3 -29 1 +45 0.4 0.1 25 3 22 -1 1 +45 0.9 0.9 35 2 -22 -29 1 +45 0.8 0.7 3 -7 11 -49 1 +45 0.7 0.3 -18 -36 -43 -49 1 +45 0.5 0.5 11 -2 -1 -4 1 +45 0.5 0.2 36 -6 0 -17 1 +45 0.4 0.7 -29 -38 15 -11 2 +45 0.8 0.1 -6 -27 48 -12 2 +45 0.3 0.6 31 -15 21 -12 2 +45 0.9 0.3 14 3 45 -45 1 +45 0.4 0.5 35 8 10 -2 1 +45 0.3 0.2 3 -48 36 -45 2 +45 0.4 0.4 39 0 -13 -38 1 +45 0.8 0.3 17 16 -45 -48 1 +45 0.9 0.6 10 -20 25 -16 1 +45 0.9 0.7 -20 -38 34 -18 2 +45 0.7 0.1 -13 -20 34 17 2 +45 0.2 0.9 40 14 31 25 2 +45 0.2 0.9 -13 -39 8 -4 2 +45 0.4 0.2 23 22 39 -38 1 +45 0.8 0.8 -14 -33 39 -6 2 +45 0.4 0.8 -21 -50 -2 -50 2 +45 0.4 0.8 18 -46 24 2 2 +45 0.3 0.8 32 -42 -27 -50 1 +45 0.7 0.7 28 25 46 -48 1 +45 0.2 0.1 -16 -39 9 -46 1 +45 0.1 0.4 -6 -15 5 -49 1 +45 0.3 0.1 1 -26 -19 -43 1 +45 0.4 0.3 35 30 17 -3 1 +45 0.4 0.4 30 -44 5 3 2 +45 0.4 0.8 -39 -43 46 28 2 +45 0.2 0.1 -1 -7 34 28 2 +45 0.3 0.9 8 -41 50 10 2 +45 0.8 0.7 23 -24 18 8 2 +45 0.3 0.1 -17 -43 45 19 2 +45 0.1 0.6 -37 -45 -14 -21 2 +45 0.4 0.2 -23 -41 14 -20 2 +45 0.4 0.3 -35 -44 -14 -25 2 +45 0.6 0.9 34 -47 14 -31 2 +45 0.9 0.9 5 -31 37 -29 2 +45 0.7 0.3 -3 -47 -20 -49 1 +45 0.4 0.3 49 37 43 -9 1 +45 0.6 0.3 1 -40 27 -30 1 +45 0.9 0.4 9 -31 30 -43 1 +45 0.2 0.9 -42 -49 13 8 2 +45 0.9 0.3 43 2 21 16 1 +45 0.9 0.2 -5 -21 49 15 2 +45 0.6 0.9 41 -8 17 -9 1 +45 0.6 0.3 -46 -48 -39 -46 2 +45 0.4 0.5 -1 -40 13 -19 2 +45 0.9 0.9 50 1 -4 -30 1 +45 0.4 0.8 -2 -4 35 -13 2 +45 0.8 0.8 13 12 28 -14 2 +45 0.5 0.1 34 -16 -13 -17 1 +45 0.1 0.4 8 -23 35 -38 2 +45 0.4 0.1 8 -20 19 -23 1 +45 0.6 0.8 40 -9 -4 -44 1 +45 0.6 0.6 -9 -20 47 -18 2 +45 0.2 0.5 -6 -46 2 -18 2 +45 0.1 0.7 5 -5 -3 -7 1 +45 0.3 0.9 39 -35 -4 -46 2 +45 0.7 0.4 31 26 -29 -50 1 +45 0.9 0.7 27 -16 46 27 2 +45 0.6 0.1 -10 -36 39 31 2 +45 0.3 0.4 28 -32 -27 -48 1 +45 0.4 0.8 -7 -13 -35 -48 1 +45 0.6 0.4 49 32 41 27 1 +45 0.2 0.7 14 -22 34 -47 2 +45 0.5 0.4 17 -14 27 -15 2 +45 0.8 0.5 11 10 -38 -49 1 +45 0.7 0.1 20 -18 37 7 2 +45 0.6 0.7 -45 -49 -23 -39 2 +45 0.8 0.2 -6 -39 32 -10 2 +45 0.9 0.5 47 -5 47 40 2 +45 0.4 0.8 22 -39 20 -8 2 +45 0.3 0.7 -20 -21 -8 -32 2 +45 0.2 0.2 29 -17 40 16 2 +45 0.3 0.7 0 -23 37 4 2 +45 0.7 0.9 41 -10 18 -35 1 +45 0.8 0.8 -32 -47 47 -20 2 +45 0.1 0.5 16 -18 22 -17 2 +45 0.9 0.1 28 -49 28 -1 1 +45 0.9 0.3 -40 -41 13 -5 2 +45 0.1 0.3 2 -5 43 31 2 +45 0.1 0.2 49 44 47 4 1 +45 0.1 0.6 48 34 49 11 1 +45 0.6 0.1 -28 -30 39 -1 2 +45 0.2 0.3 2 -36 14 10 2 +45 0.3 0.1 15 4 1 -20 1 +45 0.7 0.8 25 -24 17 -27 1 +45 0.1 0.7 28 -48 24 -7 2 +45 0.4 0.1 -11 -24 38 -2 2 +45 0.1 0.7 46 18 41 -1 2 +45 0.6 0.1 12 -36 34 -7 1 +45 0.4 0.1 -29 -34 2 -40 2 +45 0.7 0.8 47 9 15 -8 1 +45 0.4 0.8 10 -3 -5 -10 1 +45 0.9 0.9 13 -44 6 -43 1 +45 0.6 0.4 41 -7 -41 -43 1 +45 0.3 0.6 42 -22 21 -30 2 +45 0.7 0.2 -26 -44 45 16 2 +45 0.8 0.2 18 -10 -7 -49 1 +45 0.4 0.6 -6 -21 50 -41 2 +45 0.4 0.2 39 -9 9 -43 1 +45 0.7 0.5 -27 -40 42 -44 2 +45 0.5 0.1 36 30 0 -17 1 +45 0.7 0.4 35 25 30 -15 1 +45 0.4 0.5 -3 -26 39 24 2 +45 0.7 0.7 6 -24 -29 -49 1 +45 0.1 0.1 29 23 21 -2 1 +45 0.9 0.3 45 -8 24 -1 1 +45 0.9 0.2 29 -50 7 -16 1 +45 0.6 0.6 6 -15 19 -16 2 +45 0.1 0.1 48 -18 -25 -32 1 +45 0.7 0.4 -8 -32 29 -22 2 +45 0.7 0.7 48 -50 50 -44 2 +45 0.9 0.1 40 -30 20 -45 1 +45 0.5 0.4 42 -24 15 8 2 +45 0.1 0.7 12 3 -43 -45 1 +45 0.6 0.5 -4 -20 21 -28 2 +45 0.6 0.8 10 -39 25 -39 2 +45 0.2 0.2 36 -16 -24 -26 1 +45 0.6 0.6 37 5 -11 -22 1 +45 0.1 0.1 -22 -45 36 -6 2 +45 0.6 0.8 -5 -42 -14 -47 1 +45 0.6 0.7 11 9 39 32 2 +45 0.4 0.8 50 -13 40 -23 2 +45 0.3 0.3 10 -27 21 8 2 +45 0.3 0.1 35 -45 50 -15 2 +45 0.1 0.8 11 2 27 -15 2 +45 0.5 0.1 25 -21 -38 -44 1 +45 0.2 0.6 -5 -36 12 1 2 +45 0.6 0.8 1 -45 -15 -28 2 +45 0.2 0.3 49 -21 36 18 2 +45 0.1 0.2 24 -47 -19 -32 2 +45 0.7 0.5 39 0 -11 -47 1 +45 0.1 0.4 28 11 19 -47 1 +45 0.1 0.8 4 -5 -8 -42 1 +45 0.6 0.1 -1 -26 24 -34 1 +45 0.7 0.7 7 -24 -19 -30 1 +45 0.1 0.7 11 4 6 -17 1 +45 0.4 0.7 27 -36 26 19 2 +45 0.9 0.4 -16 -27 45 -50 2 +45 0.1 0.6 16 14 40 19 2 +45 0.8 0.6 42 19 17 -50 1 +45 0.2 0.2 36 -32 34 -33 2 +45 0.3 0.3 -21 -48 -11 -17 2 +45 0.3 0.6 -12 -44 8 2 2 +45 0.4 0.1 17 -27 -38 -47 1 +45 0.4 0.5 41 39 -2 -24 1 +45 0.4 0.9 35 0 -32 -38 1 +45 0.5 0.5 33 -9 40 -15 1 +45 0.4 0.8 33 -29 -4 -34 1 +45 0.6 0.9 -10 -42 23 -38 2 +45 0.4 0.2 34 -29 28 -8 2 +45 0.4 0.2 50 8 -30 -37 1 +45 0.4 0.6 -27 -49 49 -19 2 +45 0.2 0.9 12 2 19 10 2 +45 0.9 0.5 6 -36 -8 -27 1 +45 0.5 0.4 21 -3 40 -41 1 +45 0.7 0.7 37 -32 -9 -31 1 +45 0.3 0.5 43 -49 41 20 2 +45 0.4 0.2 26 3 39 19 2 +45 0.4 0.5 -22 -23 -37 -49 1 +45 0.6 0.8 6 -7 49 -2 2 +45 0.7 0.6 46 -33 33 -27 1 +45 0.2 0.4 -28 -32 26 6 2 +45 0.7 0.3 50 46 48 -24 1 +45 0.4 0.7 9 -37 33 1 2 +45 0.5 0.3 1 -30 -34 -36 1 +45 0.6 0.6 7 -48 26 -32 2 +45 0.6 0.1 -8 -39 37 -19 2 +45 0.5 0.5 -9 -35 -15 -48 1 +45 0.2 0.1 3 -3 29 8 2 +45 0.6 0.5 26 11 48 -7 1 +45 0.8 0.4 26 -37 2 -36 1 +45 0.8 0.9 -18 -38 1 -5 2 +45 0.8 0.1 -38 -45 34 4 2 +45 0.4 0.6 16 2 -29 -36 1 +45 0.7 0.5 -6 -36 37 3 2 +45 0.5 0.4 -1 -50 23 12 2 +45 0.7 0.9 42 -49 46 -4 2 +45 0.6 0.2 20 -48 4 -44 1 +45 0.4 0.9 30 -28 29 -4 2 +45 0.9 0.8 47 26 43 37 1 +45 0.6 0.8 19 -7 43 -49 2 +45 0.7 0.6 -43 -48 30 -20 2 +45 0.7 0.6 -4 -5 -4 -36 1 +45 0.5 0.8 13 -31 -11 -39 1 +45 0.5 0.5 8 -47 40 -32 2 +45 0.8 0.2 -45 -49 5 -24 2 +45 0.8 0.7 27 22 14 -29 1 +45 0.5 0.3 22 -33 34 -25 1 +45 0.9 0.3 -13 -16 39 -26 2 +45 0.1 0.3 43 19 17 9 1 +45 0.2 0.1 -5 -34 40 -10 2 +45 0.3 0.4 8 -47 -5 -11 2 +45 0.5 0.4 45 -37 -34 -35 1 +45 0.6 0.3 48 -8 28 -44 1 +45 0.5 0.8 49 -39 -7 -18 1 +45 0.5 0.8 35 -3 21 -11 1 +45 0.6 0.2 -11 -38 49 30 2 +45 0.8 0.7 -20 -22 8 -29 2 +45 0.9 0.4 22 -5 -6 -26 1 +45 0.5 0.2 -11 -16 3 -9 2 +45 0.5 0.2 23 19 50 12 2 +45 0.2 0.1 29 -44 48 12 2 +45 0.8 0.5 3 -23 4 -33 1 +45 0.9 0.9 44 -39 -31 -40 1 +45 0.4 0.4 39 -27 -9 -44 1 +45 0.4 0.9 -29 -30 5 -7 2 +45 0.1 0.2 50 -18 23 21 2 +45 0.6 0.6 45 -28 -5 -43 1 +45 0.7 0.2 50 -9 13 -30 1 +45 0.8 0.1 -30 -36 -15 -44 1 +45 0.3 0.4 -34 -36 18 8 2 +45 0.2 0.2 -1 -2 7 1 2 +45 0.4 0.8 6 -41 21 20 2 +45 0.9 0.9 30 1 22 3 1 +45 0.4 0.6 -33 -35 34 1 2 +45 0.4 0.2 16 -34 48 -13 2 +45 0.9 0.5 47 -40 43 -11 1 +45 0.8 0.4 16 -1 41 -4 1 +45 0.1 0.1 -17 -25 -14 -31 1 +45 0.4 0.3 38 -19 23 -18 1 +45 0.5 0.1 0 -40 21 -37 1 +45 0.4 0.4 0 -37 29 20 2 +45 0.4 0.7 47 44 -10 -38 1 +45 0.5 0.6 -2 -34 12 -12 2 +45 0.3 0.8 13 -14 30 13 2 +45 0.5 0.9 40 35 39 -14 1 +45 0.2 0.5 44 -22 43 -1 2 +45 0.6 0.3 -5 -37 -40 -41 1 +45 0.6 0.2 -10 -36 19 4 2 +45 0.4 0.9 20 -27 5 4 2 +45 0.7 0.3 31 -18 33 -22 1 +45 0.9 0.2 49 3 33 -35 1 +45 0.2 0.6 -15 -25 -3 -35 2 +45 0.9 0.5 16 -20 12 -11 1 +45 0.8 0.4 48 -8 -8 -35 1 +45 0.6 0.5 23 -5 33 14 2 +45 0.5 0.6 40 28 18 -11 1 +45 0.3 0.7 26 -25 5 -3 2 +45 0.7 0.7 -18 -28 -31 -36 1 +45 0.4 0.6 38 -13 38 34 2 +45 0.7 0.7 38 -27 14 1 1 +45 0.3 0.6 22 -4 28 18 2 +45 0.8 0.6 -15 -16 -19 -34 1 +45 0.1 0.1 41 -25 -17 -40 1 +45 0.7 0.3 12 -42 -23 -25 1 +45 0.1 0.6 -2 -5 23 9 2 +45 0.2 0.7 29 -47 -2 -43 2 +45 0.1 0.1 36 -43 31 -12 2 +45 0.7 0.8 17 -44 -18 -27 1 +45 0.5 0.6 49 19 17 -8 1 +45 0.3 0.1 18 -29 48 -48 1 +45 0.6 0.3 49 -23 32 -18 1 +45 0.2 0.4 31 -5 14 -15 2 +45 0.4 0.6 4 -10 0 -7 2 +45 0.5 0.8 28 -21 27 12 2 +45 0.2 0.8 5 -7 -32 -42 1 +45 0.4 0.4 44 42 40 33 1 +45 0.9 0.5 4 -36 19 -48 1 +45 0.7 0.4 -18 -19 22 -35 2 +45 0.7 0.8 25 -38 30 4 2 +45 0.6 0.8 18 -40 -14 -44 1 +45 0.5 0.2 -39 -48 38 -24 2 +45 0.1 0.9 18 11 -22 -38 1 +45 0.6 0.2 8 -50 50 11 2 +45 0.8 0.5 23 -4 -20 -26 1 +45 0.1 0.1 -6 -22 26 -48 1 +45 0.6 0.2 17 -46 35 -20 1 +45 0.6 0.7 15 -18 45 -49 2 +45 0.5 0.2 -34 -39 8 -23 2 +45 0.2 0.3 4 -23 -7 -45 1 +45 0.4 0.4 17 -3 39 -11 2 +45 0.5 0.2 44 11 40 -15 1 +45 0.4 0.1 6 -45 39 -27 2 +45 0.3 0.8 20 -7 18 -50 2 +45 0.2 0.5 25 -2 31 -46 1 +45 0.3 0.4 42 -9 46 -4 2 +45 0.6 0.6 -16 -44 -17 -43 1 +45 0.3 0.4 36 -49 25 -45 2 +45 0.7 0.6 1 -8 6 -31 1 +45 0.8 0.1 11 -39 22 11 2 +45 0.2 0.6 -27 -45 19 18 2 +45 0.1 0.1 31 -41 -16 -32 1 +45 0.6 0.1 33 -29 12 8 1 +45 0.5 0.8 44 2 22 -7 1 +45 0.7 0.6 50 -30 40 -31 1 +45 0.1 0.9 14 -30 44 36 2 +45 0.9 0.5 -38 -45 35 -40 2 +45 0.3 0.6 47 8 46 -3 2 +45 0.7 0.3 24 -21 10 -49 1 +45 0.7 0.1 19 -1 -23 -37 1 +45 0.9 0.2 -14 -35 28 -48 1 +45 0.7 0.2 -16 -21 45 -13 2 +45 0.8 0.6 1 -49 31 -23 2 +45 0.9 0.9 27 -32 21 11 2 +45 0.8 0.2 34 20 28 -28 1 +46 0.9 0.7 3 -50 22 1 2 +46 0.8 0.7 34 15 -6 -37 1 +46 0.3 0.7 30 -32 23 -49 2 +46 0.4 0.7 -13 -47 13 -29 2 +46 0.3 0.5 20 -30 49 -12 2 +46 0.5 0.9 45 42 20 -17 1 +46 0.3 0.8 45 -2 -22 -50 1 +46 0.3 0.2 19 0 19 5 2 +46 0.1 0.8 5 -35 -18 -43 2 +46 0.1 0.7 45 -40 -24 -31 1 +46 0.4 0.3 37 16 41 27 1 +46 0.6 0.1 14 -15 -34 -47 1 +46 0.2 0.8 32 31 30 -30 1 +46 0.4 0.3 48 -9 -17 -47 1 +46 0.6 0.6 49 7 23 -24 2 +46 0.2 0.6 19 -39 25 -34 2 +46 0.3 0.3 15 -9 13 -35 1 +46 0.6 0.7 -3 -48 -5 -43 1 +46 0.8 0.8 35 -25 30 -49 1 +46 0.2 0.3 40 13 18 -21 1 +46 0.3 0.6 40 20 11 -27 1 +46 0.6 0.1 8 5 27 -29 1 +46 0.9 0.5 15 -12 40 10 2 +46 0.4 0.5 -1 -25 -13 -31 2 +46 0.9 0.1 -45 -47 19 -41 1 +46 0.9 0.2 6 -49 9 1 1 +46 0.2 0.7 44 -40 48 19 2 +46 0.5 0.5 50 -29 19 -13 1 +46 0.8 0.8 50 -41 -27 -39 1 +46 0.2 0.1 43 -29 -20 -26 2 +46 0.4 0.3 34 20 -47 -49 1 +46 0.7 0.2 -2 -47 20 -9 2 +46 0.1 0.5 44 6 44 33 2 +46 0.8 0.5 34 -43 15 -21 1 +46 0.8 0.1 50 -22 13 -24 1 +46 0.1 0.8 10 2 48 37 2 +46 0.2 0.8 27 -1 50 -45 2 +46 0.2 0.8 -28 -50 -29 -47 2 +46 0.1 0.3 12 -29 -16 -28 1 +46 0.5 0.7 39 -3 32 -14 2 +46 0.9 0.3 19 -7 48 39 2 +46 0.5 0.5 43 -18 40 -11 2 +46 0.2 0.1 -7 -17 -22 -37 1 +46 0.4 0.5 -32 -38 38 -18 2 +46 0.3 0.5 -2 -3 -18 -28 2 +46 0.2 0.8 24 23 21 15 2 +46 0.2 0.8 25 -41 -24 -27 1 +46 0.3 0.6 44 -21 17 11 2 +46 0.4 0.4 44 5 -11 -39 2 +46 0.9 0.4 25 17 28 -15 1 +46 0.8 0.5 31 -38 49 -37 1 +46 0.7 0.4 6 -7 21 20 2 +46 0.4 0.3 -7 -43 18 4 2 +46 0.4 0.2 49 -2 30 -21 1 +46 0.6 0.1 7 -3 43 -26 2 +46 0.2 0.7 48 8 25 -47 1 +46 0.9 0.9 34 -19 -31 -44 1 +46 0.8 0.1 -1 -47 41 31 2 +46 0.7 0.8 44 19 38 -44 1 +46 0.3 0.1 11 -34 33 -35 1 +46 0.7 0.2 43 -31 7 -18 1 +46 0.6 0.1 -8 -36 28 22 2 +46 0.8 0.2 48 -12 -20 -39 1 +46 0.3 0.9 11 -30 39 -14 2 +46 0.1 0.3 41 -46 1 -38 2 +46 0.9 0.4 46 -7 1 -42 1 +46 0.1 0.3 1 -17 28 -9 2 +46 0.1 0.3 4 -35 39 -48 2 +46 0.3 0.5 7 -35 -17 -33 1 +46 0.5 0.7 15 -48 42 -9 2 +46 0.2 0.5 28 13 -14 -27 1 +46 0.2 0.1 24 -13 50 46 2 +46 0.5 0.2 -9 -20 18 -26 1 +46 0.5 0.4 46 18 45 20 1 +46 0.5 0.1 27 25 -27 -29 1 +46 0.8 0.2 14 -48 37 -50 1 +46 0.5 0.8 42 -31 26 14 2 +46 0.6 0.8 46 -9 45 -14 2 +46 0.8 0.3 -10 -25 48 -28 1 +46 0.1 0.8 17 -1 43 18 2 +46 0.9 0.4 44 -47 4 -47 1 +46 0.8 0.3 29 -35 35 -44 1 +46 0.6 0.4 6 -8 39 -31 1 +46 0.7 0.2 -4 -23 30 -17 2 +46 0.5 0.6 36 -31 23 -37 1 +46 0.7 0.5 46 18 29 20 2 +46 0.8 0.5 11 -37 -18 -28 1 +46 0.1 0.5 30 -20 25 -34 2 +46 0.2 0.6 -37 -43 5 -23 2 +46 0.6 0.9 50 30 49 47 1 +46 0.8 0.7 27 -49 24 -36 1 +46 0.5 0.5 25 11 27 19 2 +46 0.6 0.1 30 20 -10 -25 1 +46 0.3 0.7 33 3 33 -22 1 +46 0.1 0.6 8 -21 37 -37 2 +46 0.5 0.3 -4 -24 -25 -27 2 +46 0.5 0.3 34 -23 31 -13 1 +46 0.8 0.8 -21 -36 49 -14 2 +46 0.9 0.6 17 -4 -18 -37 1 +46 0.7 0.4 -4 -15 27 -11 2 +46 0.4 0.7 33 -49 27 0 2 +46 0.4 0.3 27 -22 37 -24 1 +46 0.9 0.6 -6 -41 28 -42 2 +46 0.9 0.6 20 -15 41 8 1 +46 0.4 0.4 -1 -18 15 10 2 +46 0.8 0.3 47 -28 -7 -39 1 +46 0.6 0.3 11 8 -18 -40 1 +46 0.6 0.4 27 -37 29 3 2 +46 0.4 0.5 41 17 12 -45 1 +46 0.4 0.1 40 13 -7 -45 1 +46 0.7 0.5 47 16 19 -13 1 +46 0.2 0.3 39 -30 36 12 2 +46 0.2 0.9 49 -36 22 1 2 +46 0.9 0.1 10 -35 -8 -24 1 +46 0.7 0.1 16 -49 36 -6 1 +46 0.1 0.4 43 -41 0 -44 2 +46 0.2 0.2 44 13 20 -34 1 +46 0.2 0.3 -4 -24 37 -5 1 +46 0.6 0.2 -24 -39 33 11 2 +46 0.7 0.4 50 10 -12 -39 1 +46 0.1 0.9 26 -39 -41 -43 2 +46 0.8 0.2 17 -13 -11 -26 1 +46 0.5 0.3 41 -50 41 -17 2 +46 0.3 0.6 -4 -41 11 -36 2 +46 0.8 0.6 20 -20 25 15 2 +46 0.2 0.2 18 -33 45 -41 1 +46 0.9 0.3 29 6 43 23 2 +46 0.6 0.7 -3 -21 -10 -50 1 +46 0.5 0.3 9 -28 0 -15 2 +46 0.6 0.1 -4 -17 18 -10 1 +46 0.8 0.9 12 -43 -17 -36 1 +46 0.4 0.4 49 -41 28 21 2 +46 0.1 0.9 2 -20 32 -39 2 +46 0.6 0.4 21 10 46 30 2 +46 0.5 0.7 37 -21 40 -4 2 +46 0.5 0.6 44 -9 -37 -41 1 +46 0.2 0.9 29 -14 47 -6 2 +46 0.5 0.4 26 -16 -15 -39 1 +46 0.3 0.5 46 21 -3 -29 1 +46 0.7 0.1 7 -43 23 -19 1 +46 0.5 0.9 46 -24 39 -29 2 +46 0.4 0.8 32 21 49 13 2 +46 0.3 0.1 32 8 7 -48 1 +46 0.4 0.1 0 -24 -7 -23 2 +46 0.8 0.8 50 3 -5 -20 1 +46 0.7 0.7 -10 -39 45 -29 2 +46 0.9 0.3 24 19 -37 -49 1 +46 0.9 0.6 43 36 38 -43 1 +46 0.9 0.9 34 -23 42 6 2 +46 0.4 0.2 -17 -18 47 3 2 +46 0.6 0.2 50 22 22 -5 1 +46 0.6 0.7 -6 -20 30 -46 2 +46 0.3 0.2 48 19 49 -45 1 +46 0.5 0.8 -15 -26 10 -20 2 +46 0.8 0.6 49 -24 43 9 1 +46 0.1 0.6 32 -46 33 -2 2 +46 0.9 0.8 12 -11 37 -1 2 +46 0.1 0.1 37 10 24 22 2 +46 0.6 0.1 10 -48 39 32 2 +46 0.3 0.6 30 -41 5 1 2 +46 0.7 0.4 41 18 2 -31 1 +46 0.5 0.8 44 5 23 7 1 +46 0.4 0.6 -2 -10 -3 -6 2 +46 0.5 0.8 2 -27 -16 -20 1 +46 0.4 0.9 -7 -45 7 -42 2 +46 0.9 0.4 -2 -23 45 33 2 +46 0.8 0.4 34 25 1 -11 1 +46 0.7 0.7 22 -30 18 9 2 +46 0.6 0.5 38 0 44 -48 1 +46 0.6 0.7 -12 -31 24 -5 2 +46 0.6 0.8 30 -44 47 -19 2 +46 0.4 0.6 7 -28 -24 -48 1 +46 0.2 0.7 8 -4 29 -21 2 +46 0.4 0.1 16 -8 39 -19 1 +46 0.3 0.3 40 2 4 -11 1 +46 0.5 0.7 37 3 -45 -50 1 +46 0.3 0.9 -26 -28 35 -36 2 +46 0.2 0.4 26 -30 31 -35 2 +46 0.7 0.2 3 -23 25 -35 2 +46 0.6 0.6 0 -28 0 -10 1 +46 0.1 0.5 41 16 41 23 2 +46 0.3 0.2 -8 -23 -29 -38 1 +46 0.6 0.8 -33 -45 28 -30 2 +46 0.5 0.1 14 -22 39 -32 2 +46 0.5 0.7 45 -32 -27 -46 2 +46 0.6 0.1 22 -15 19 7 1 +46 0.9 0.5 45 24 45 14 1 +46 0.8 0.2 47 -2 20 -6 1 +46 0.7 0.4 18 -45 29 -6 2 +46 0.7 0.7 -9 -39 -18 -32 2 +46 0.7 0.6 20 -14 2 -46 1 +46 0.7 0.3 25 -18 37 6 2 +46 0.1 0.1 -9 -15 -30 -47 2 +46 0.7 0.7 -22 -30 -21 -47 2 +46 0.3 0.2 5 -38 12 -2 2 +46 0.7 0.8 34 -17 41 27 2 +46 0.3 0.7 45 -45 42 11 2 +46 0.4 0.2 29 24 43 25 1 +46 0.3 0.6 24 -34 21 -16 2 +46 0.4 0.2 41 26 44 15 2 +46 0.9 0.5 21 6 23 20 1 +46 0.9 0.7 -6 -29 21 -24 2 +46 0.2 0.2 19 13 47 22 2 +46 0.7 0.3 45 22 25 -7 1 +46 0.9 0.7 -13 -44 48 10 2 +46 0.4 0.6 42 0 33 -15 1 +46 0.9 0.2 25 -21 9 -4 1 +46 0.6 0.3 42 32 29 25 1 +46 0.1 0.7 33 -46 46 37 2 +46 0.2 0.9 16 0 -24 -48 1 +46 0.6 0.5 21 -1 21 -37 1 +46 0.2 0.1 36 19 3 -49 1 +46 0.1 0.9 -39 -40 29 2 2 +46 0.5 0.8 6 -24 20 9 2 +46 0.3 0.4 2 -4 34 11 2 +46 0.6 0.4 49 -18 -15 -18 1 +46 0.6 0.2 19 -35 42 -18 1 +46 0.6 0.8 4 -38 23 18 2 +46 0.5 0.9 5 -5 -25 -33 2 +46 0.2 0.7 14 13 33 -32 2 +46 0.5 0.7 5 -33 50 25 2 +46 0.8 0.7 45 -31 43 26 2 +46 0.3 0.5 28 -31 48 -7 2 +46 0.8 0.9 20 -38 22 -23 1 +46 0.6 0.8 6 -45 -7 -15 2 +46 0.7 0.5 22 -24 -5 -41 1 +46 0.8 0.7 43 35 -26 -36 1 +46 0.6 0.6 -32 -48 -20 -31 1 +46 0.6 0.7 48 1 -9 -31 1 +46 0.4 0.2 47 -31 37 -8 2 +46 0.2 0.4 19 7 25 19 2 +46 0.5 0.7 35 -31 0 -18 2 +46 0.1 0.9 9 -17 26 0 2 +46 0.8 0.5 40 12 48 -46 1 +46 0.9 0.9 38 26 12 -31 1 +46 0.9 0.8 -24 -36 26 5 2 +46 0.4 0.2 -47 -49 -36 -37 1 +46 0.7 0.6 2 -26 5 -49 1 +46 0.8 0.2 43 -47 -28 -40 1 +46 0.4 0.9 6 -31 7 -35 2 +46 0.6 0.4 47 -18 48 -26 2 +46 0.2 0.4 49 33 35 -28 1 +46 0.3 0.9 28 19 16 -8 2 +46 0.3 0.2 35 -31 1 -22 1 +46 0.7 0.8 16 -1 33 -36 2 +46 0.7 0.2 25 -15 37 21 2 +46 0.9 0.1 42 -27 49 -48 1 +46 0.8 0.3 44 -45 -18 -19 2 +46 0.3 0.9 -45 -47 -26 -44 1 +46 0.1 0.5 30 4 31 12 2 +46 0.3 0.5 7 -19 32 -17 1 +46 0.9 0.4 41 3 32 -12 1 +46 0.1 0.6 38 19 -16 -19 1 +46 0.1 0.1 11 -39 50 -37 2 +46 0.7 0.7 36 -19 18 -34 1 +46 0.1 0.9 33 -38 12 -23 2 +46 0.1 0.3 28 -6 -1 -10 2 +46 0.8 0.7 40 24 -10 -33 1 +46 0.2 0.4 4 -1 11 3 2 +46 0.6 0.8 48 37 29 -4 2 +46 0.3 0.1 -9 -23 43 -25 2 +46 0.6 0.4 8 -18 20 -36 1 +46 0.1 0.8 22 0 50 13 2 +46 0.4 0.9 23 -45 -19 -43 1 +46 0.8 0.4 38 32 -41 -47 1 +46 0.8 0.6 -29 -38 40 -12 2 +46 0.9 0.4 10 -27 36 9 2 +46 0.5 0.4 -28 -29 16 15 2 +46 0.5 0.2 46 2 37 -27 1 +46 0.8 0.8 30 16 37 -45 1 +46 0.3 0.6 33 -28 38 36 2 +46 0.4 0.6 24 -26 23 -5 1 +46 0.2 0.7 -40 -49 -6 -33 2 +46 0.4 0.9 -29 -30 1 -20 2 +46 0.6 0.5 40 -19 41 7 1 +46 0.9 0.2 48 31 -22 -44 1 +46 0.1 0.7 -6 -34 22 -41 2 +46 0.9 0.7 10 -30 9 -24 1 +46 0.4 0.1 -2 -3 9 -30 1 +46 0.6 0.8 -40 -42 18 0 2 +46 0.1 0.8 -6 -37 30 -5 2 +46 0.9 0.9 24 13 23 2 1 +46 0.2 0.5 -18 -38 35 27 2 +46 0.5 0.1 28 -50 -40 -49 1 +46 0.5 0.8 -36 -46 -26 -27 2 +46 0.7 0.3 -44 -47 10 -29 2 +46 0.4 0.9 35 -50 -10 -17 1 +46 0.2 0.6 44 20 45 -50 1 +46 0.7 0.9 50 -45 46 39 2 +46 0.1 0.3 48 -34 48 -33 1 +46 0.5 0.6 34 -37 -6 -40 1 +46 0.4 0.5 50 2 7 -36 1 +46 0.2 0.7 -3 -24 36 9 2 +46 0.8 0.6 27 -37 -10 -33 1 +46 0.4 0.8 36 -12 30 -27 1 +46 0.9 0.7 41 -5 46 43 1 +46 0.3 0.6 -3 -6 -6 -20 2 +46 0.9 0.7 40 8 7 -35 1 +46 0.9 0.5 13 -20 -18 -38 1 +46 0.9 0.5 38 20 34 -26 1 +47 0.8 0.7 -25 -26 44 6 2 +47 0.9 0.2 25 11 -31 -39 1 +47 0.6 0.7 7 -23 47 -19 2 +47 0.4 0.6 49 40 18 -45 2 +47 0.1 0.1 -45 -50 45 1 2 +47 0.6 0.4 34 7 17 -46 1 +47 0.7 0.3 -15 -35 30 -22 2 +47 0.1 0.2 4 3 -3 -8 1 +47 0.2 0.5 48 -40 -7 -37 1 +47 0.6 0.3 16 -44 -15 -18 1 +47 0.4 0.6 34 -25 36 -9 2 +47 0.5 0.1 49 27 37 36 1 +47 0.2 0.7 35 -37 11 5 2 +47 0.3 0.5 -19 -35 -18 -36 2 +47 0.8 0.1 43 35 12 -5 1 +47 0.9 0.5 36 -32 12 -35 1 +47 0.1 0.5 -11 -43 25 -23 2 +47 0.8 0.7 27 -49 8 -46 2 +47 0.8 0.4 -10 -17 -1 -45 1 +47 0.6 0.6 -1 -10 7 -47 2 +47 0.8 0.9 -17 -23 -7 -39 2 +47 0.7 0.7 -3 -14 48 -49 1 +47 0.5 0.8 -27 -34 26 14 2 +47 0.1 0.4 31 30 28 -43 1 +47 0.9 0.6 42 11 19 -6 1 +47 0.1 0.5 45 -27 -12 -24 1 +47 0.2 0.7 25 -18 -39 -47 1 +47 0.4 0.9 14 -21 -6 -41 1 +47 0.1 0.4 44 11 48 22 1 +47 0.6 0.8 36 -32 -3 -39 1 +47 0.2 0.3 44 -30 39 23 2 +47 0.6 0.2 -29 -32 12 -31 1 +47 0.9 0.2 43 -25 26 16 1 +47 0.8 0.9 14 13 -2 -44 1 +47 0.5 0.9 36 -13 21 -38 1 +47 0.4 0.1 41 35 22 7 1 +47 0.1 0.1 14 -1 -3 -11 2 +47 0.9 0.9 27 -14 36 -30 1 +47 0.1 0.7 34 -19 15 1 1 +47 0.7 0.5 37 -26 39 -2 2 +47 0.4 0.5 43 12 45 28 1 +47 0.1 0.5 -18 -26 17 -4 2 +47 0.8 0.2 8 4 17 -16 1 +47 0.2 0.9 4 -12 -7 -21 1 +47 0.8 0.3 -5 -17 38 -3 1 +47 0.9 0.2 50 5 9 -30 1 +47 0.9 0.1 26 -35 18 -28 1 +47 0.5 0.4 42 38 -10 -22 1 +47 0.9 0.3 14 -21 29 -50 1 +47 0.3 0.4 -30 -45 25 -14 1 +47 0.7 0.6 45 4 34 7 1 +47 0.3 0.7 26 5 -25 -47 1 +47 0.9 0.2 6 -20 49 15 2 +47 0.2 0.6 35 33 32 -28 1 +47 0.8 0.8 38 27 4 -14 1 +47 0.6 0.2 -2 -27 -4 -10 1 +47 0.7 0.1 -28 -29 -14 -35 2 +47 0.9 0.6 49 8 11 5 1 +47 0.9 0.6 35 -22 28 17 2 +47 0.3 0.7 34 26 49 48 2 +47 0.6 0.3 -28 -35 -1 -21 1 +47 0.9 0.5 11 -21 41 30 2 +47 0.4 0.6 48 4 0 -48 1 +47 0.6 0.7 -13 -18 -34 -39 1 +47 0.1 0.8 50 13 34 -34 1 +47 0.7 0.6 -31 -33 47 -47 2 +47 0.9 0.3 -10 -45 17 -38 2 +47 0.9 0.1 20 -8 -9 -17 1 +47 0.8 0.3 -34 -41 19 -48 2 +47 0.8 0.7 30 -2 38 -25 1 +47 0.7 0.7 24 22 -29 -32 1 +47 0.5 0.6 39 20 36 12 2 +47 0.8 0.4 14 7 45 -6 1 +47 0.4 0.8 32 12 49 -24 2 +47 0.5 0.6 40 -17 32 -17 1 +47 0.8 0.4 38 -28 1 -32 1 +47 0.9 0.3 32 -9 -20 -31 1 +47 0.4 0.8 46 33 15 10 2 +47 0.9 0.1 20 -31 48 -50 1 +47 0.7 0.5 35 -4 46 37 2 +47 0.4 0.4 11 -28 -9 -21 2 +47 0.5 0.9 35 24 8 -11 2 +47 0.1 0.6 24 2 32 -15 2 +47 0.5 0.2 8 -22 37 15 2 +47 0.6 0.9 20 -44 42 -11 2 +47 0.5 0.6 5 -19 34 -17 2 +47 0.8 0.1 43 -8 26 -40 2 +47 0.4 0.9 6 -45 30 2 2 +47 0.1 0.4 40 -17 29 4 2 +47 0.3 0.2 -14 -18 5 -31 2 +47 0.9 0.5 -13 -15 -14 -35 1 +47 0.6 0.1 -22 -29 12 1 2 +47 0.8 0.5 6 -44 25 -49 1 +47 0.3 0.1 5 -33 41 -16 2 +47 0.2 0.2 35 25 -30 -37 1 +47 0.4 0.1 -4 -16 38 -8 2 +47 0.5 0.8 50 22 28 -25 1 +47 0.9 0.4 -14 -17 -23 -35 1 +47 0.7 0.7 45 5 45 -48 2 +47 0.7 0.5 42 -42 24 -15 2 +47 0.8 0.1 12 -19 12 -30 1 +47 0.3 0.4 27 -44 -9 -36 2 +47 0.8 0.5 21 -7 -5 -11 1 +47 0.1 0.4 49 -18 15 6 2 +47 0.9 0.9 33 0 -8 -16 1 +47 0.7 0.3 5 -41 36 -26 1 +47 0.7 0.2 37 -8 12 6 1 +47 0.4 0.1 37 -18 13 -1 1 +47 0.9 0.5 39 -29 20 12 1 +47 0.2 0.2 -37 -45 49 -17 2 +47 0.1 0.5 12 -49 -36 -48 2 +47 0.9 0.2 9 -9 10 -34 2 +47 0.7 0.8 37 6 -15 -28 1 +47 0.4 0.4 11 -16 18 -35 1 +47 0.2 0.8 37 -2 28 -49 1 +47 0.4 0.4 23 -47 22 -24 1 +47 0.4 0.5 29 3 45 -8 1 +47 0.8 0.6 31 -4 31 -29 1 +47 0.2 0.9 29 -49 -10 -48 2 +47 0.7 0.4 2 -15 32 -25 2 +47 0.7 0.7 48 41 19 -39 1 +47 0.1 0.1 -8 -37 21 -17 2 +47 0.8 0.7 28 16 46 28 2 +47 0.9 0.1 50 7 45 -24 1 +47 0.9 0.5 -7 -24 6 -1 2 +47 0.4 0.8 35 6 -30 -33 1 +47 0.4 0.8 -27 -44 -14 -37 2 +47 0.4 0.2 -10 -28 8 -8 2 +47 0.5 0.3 42 -10 6 -42 1 +47 0.9 0.9 15 -49 40 -22 2 +47 0.3 0.7 21 -36 41 11 2 +47 0.6 0.7 44 -45 46 44 2 +47 0.5 0.5 21 18 41 40 2 +47 0.4 0.9 33 16 -4 -41 2 +47 0.3 0.7 49 47 33 -30 1 +47 0.9 0.9 36 -1 -4 -40 1 +47 0.2 0.9 1 -45 19 -40 2 +47 0.8 0.3 22 -37 50 0 1 +47 0.1 0.3 32 -9 49 -38 1 +47 0.5 0.3 36 -33 44 6 2 +47 0.1 0.6 -20 -26 -33 -39 1 +47 0.1 0.6 19 -19 5 -32 2 +47 0.6 0.1 0 -32 -23 -49 2 +47 0.7 0.5 8 -38 31 27 2 +47 0.9 0.6 11 -46 30 -41 1 +47 0.5 0.5 27 -31 20 -41 1 +47 0.6 0.9 4 -20 39 3 2 +47 0.1 0.9 43 40 30 -21 1 +47 0.8 0.8 24 -11 -24 -32 1 +47 0.3 0.4 22 -36 47 -30 2 +47 0.2 0.9 44 -24 42 -31 2 +47 0.9 0.1 18 -4 -30 -32 1 +47 0.6 0.4 11 -5 47 -15 2 +47 0.5 0.2 -1 -28 9 -21 1 +47 0.4 0.7 9 3 34 19 2 +47 0.3 0.4 45 -9 45 0 2 +47 0.9 0.8 25 9 36 -1 2 +47 0.8 0.5 41 19 16 0 1 +47 0.7 0.3 2 -14 4 -44 2 +47 0.4 0.6 21 -10 21 -12 2 +47 0.3 0.5 36 25 4 -32 1 +47 0.5 0.1 35 -7 15 -34 1 +47 0.4 0.7 22 9 -5 -41 1 +47 0.8 0.8 20 -35 6 -12 2 +47 0.6 0.4 -29 -48 12 -17 2 +47 0.6 0.8 -15 -32 -30 -48 1 +47 0.9 0.8 49 41 47 8 1 +47 0.6 0.4 11 -27 25 -34 2 +47 0.9 0.3 14 7 -15 -22 1 +47 0.3 0.4 41 35 24 -10 1 +47 0.5 0.1 -5 -17 -1 -25 1 +47 0.8 0.2 -41 -44 18 -20 2 +47 0.7 0.6 2 -9 15 -27 1 +47 0.7 0.3 -21 -30 34 -31 1 +47 0.9 0.5 -2 -17 10 -23 2 +47 0.8 0.7 49 -36 17 -4 2 +47 0.7 0.2 32 4 1 -9 1 +47 0.1 0.4 -30 -44 -2 -15 2 +47 0.4 0.2 46 -30 28 22 2 +47 0.8 0.3 38 8 39 32 2 +47 0.1 0.7 3 -18 -16 -49 2 +47 0.7 0.1 -29 -46 4 -4 2 +47 0.1 0.9 30 19 -39 -40 1 +47 0.8 0.4 26 -43 29 -8 1 +47 0.9 0.9 29 18 50 7 2 +47 0.3 0.2 9 -38 -30 -44 1 +47 0.8 0.2 43 -25 -28 -43 1 +47 0.1 0.3 25 -13 45 -24 2 +47 0.3 0.6 31 -18 -11 -18 1 +47 0.3 0.9 -36 -40 13 -30 2 +47 0.1 0.6 1 -49 17 12 2 +47 0.3 0.2 -22 -48 41 -9 2 +47 0.1 0.9 24 -43 24 -42 2 +47 0.3 0.1 34 -28 12 -45 1 +47 0.9 0.7 32 -22 36 -50 1 +47 0.3 0.4 -15 -34 39 34 2 +47 0.4 0.9 49 -18 27 -43 2 +47 0.6 0.6 48 12 46 -15 1 +47 0.6 0.8 47 7 -19 -25 1 +47 0.8 0.1 45 41 9 -36 1 +47 0.5 0.1 33 -6 4 -25 1 +47 0.1 0.1 47 -45 43 20 2 +47 0.7 0.5 21 -17 19 -16 1 +47 0.6 0.8 41 -10 -1 -21 1 +47 0.4 0.2 45 -22 12 -34 1 +47 0.5 0.4 47 -43 15 -23 2 +47 0.5 0.8 18 -40 38 -47 2 +47 0.6 0.5 38 -11 -14 -37 1 +47 0.4 0.4 -11 -33 45 -5 2 +47 0.5 0.3 -17 -42 22 -6 2 +47 0.2 0.4 24 -46 35 6 2 +47 0.3 0.5 28 7 45 -30 2 +47 0.8 0.2 12 -13 -23 -44 1 +47 0.6 0.4 26 19 38 -9 2 +47 0.7 0.8 -10 -45 21 -7 2 +47 0.3 0.9 39 18 50 -36 2 +47 0.2 0.5 20 -18 35 -5 2 +47 0.2 0.5 50 5 3 -1 1 +47 0.5 0.6 50 -29 25 -27 1 +47 0.7 0.5 38 11 47 -40 1 +47 0.6 0.4 50 -48 41 16 2 +47 0.7 0.1 20 7 11 -48 1 +47 0.4 0.5 24 -23 10 -16 2 +47 0.1 0.7 37 -17 37 -16 2 +47 0.8 0.3 7 -10 -2 -10 1 +47 0.2 0.8 -15 -18 -25 -29 1 +47 0.6 0.6 44 2 9 -34 1 +47 0.2 0.3 41 -13 30 -43 1 +47 0.1 0.4 25 -14 -12 -46 1 +47 0.6 0.7 40 -11 -13 -16 1 +47 0.8 0.2 41 6 0 -12 1 +47 0.9 0.8 -14 -43 34 -11 2 +47 0.4 0.1 -38 -48 27 -4 2 +47 0.5 0.1 20 -46 32 -40 1 +47 0.1 0.9 27 -37 30 6 2 +47 0.2 0.5 -2 -33 -18 -29 2 +47 0.7 0.4 36 35 36 -48 1 +47 0.7 0.8 35 28 -13 -36 1 +47 0.8 0.3 34 10 -9 -16 1 +47 0.7 0.6 44 -25 9 -4 1 +47 0.5 0.4 22 -11 45 38 1 +47 0.9 0.7 -14 -36 24 10 2 +47 0.5 0.5 43 -29 50 46 2 +47 0.1 0.1 26 -44 26 15 2 +47 0.4 0.7 2 -50 6 -42 1 +47 0.7 0.4 35 -49 37 -11 1 +47 0.1 0.7 -27 -49 25 -3 2 +47 0.4 0.5 -14 -16 43 2 2 +47 0.9 0.8 38 -38 43 -14 1 +47 0.1 0.4 -3 -7 -1 -23 2 +47 0.5 0.2 18 -11 14 13 2 +47 0.3 0.1 8 -39 38 -34 1 +47 0.2 0.7 46 -10 24 -17 1 +47 0.6 0.1 24 4 32 -10 1 +47 0.6 0.6 41 -2 10 -25 1 +47 0.3 0.2 7 -46 49 12 2 +47 0.4 0.1 37 30 -21 -32 1 +47 0.3 0.6 50 21 -31 -35 1 +47 0.9 0.8 -28 -42 15 -15 2 +47 0.1 0.5 -6 -45 5 -23 2 +47 0.5 0.3 -12 -23 -2 -24 1 +47 0.9 0.6 38 5 34 25 1 +47 0.9 0.1 14 -12 6 -34 1 +47 0.4 0.9 45 10 8 -43 1 +47 0.2 0.6 8 -36 40 -49 2 +47 0.4 0.8 33 28 37 -5 2 +47 0.5 0.3 -10 -44 50 -9 1 +47 0.2 0.3 -4 -49 45 -4 2 +47 0.8 0.6 39 14 32 2 1 +47 0.4 0.7 -2 -16 9 6 2 +47 0.4 0.3 -16 -30 7 -8 2 +47 0.7 0.9 19 -50 17 9 2 +47 0.2 0.7 28 -38 4 -49 1 +47 0.4 0.1 48 33 1 -47 1 +47 0.3 0.9 -3 -16 35 -30 2 +47 0.6 0.2 11 1 44 -13 1 +47 0.2 0.9 7 -29 -1 -44 1 +47 0.5 0.9 43 8 -26 -42 1 +47 0.1 0.1 50 -25 2 -39 1 +47 0.8 0.1 47 44 46 39 2 +47 0.1 0.7 25 -46 32 -48 2 +47 0.1 0.4 50 -40 7 -33 2 +47 0.9 0.4 28 -22 8 6 2 +47 0.7 0.2 32 -14 41 24 2 +47 0.5 0.1 43 0 -25 -31 1 +47 0.9 0.4 20 -19 42 23 2 +47 0.9 0.7 40 7 20 -15 1 +47 0.9 0.4 -17 -26 34 -31 2 +47 0.7 0.6 -10 -50 -3 -35 2 +47 0.4 0.8 33 25 12 -11 2 +47 0.3 0.8 22 -36 43 -40 2 +47 0.5 0.9 28 14 -1 -20 1 +47 0.5 0.5 -30 -46 20 -20 1 +47 0.7 0.7 -21 -35 9 -24 1 +47 0.1 0.5 18 11 27 -41 1 +47 0.6 0.4 10 -4 32 -30 2 +47 0.4 0.2 8 -39 40 7 2 +47 0.9 0.4 49 1 11 -31 1 +47 0.1 0.1 -41 -45 -13 -25 1 +47 0.5 0.4 29 -33 -19 -38 1 +48 0.5 0.9 -1 -43 -35 -41 1 +48 0.9 0.7 -19 -31 43 27 2 +48 0.6 0.7 -41 -48 6 -40 2 +48 0.9 0.6 27 -38 -5 -24 1 +48 0.4 0.5 28 5 -47 -50 1 +48 0.8 0.6 -8 -33 33 2 2 +48 0.8 0.1 38 10 48 17 1 +48 0.4 0.4 50 15 -34 -48 1 +48 0.5 0.8 -47 -48 13 -38 2 +48 0.3 0.6 -33 -47 38 15 2 +48 0.9 0.6 3 -14 43 12 2 +48 0.5 0.5 -26 -28 41 -41 2 +48 0.5 0.5 37 -39 41 -6 2 +48 0.7 0.1 18 -3 43 -24 1 +48 0.5 0.9 -15 -21 19 0 2 +48 0.3 0.6 -17 -42 -11 -13 2 +48 0.7 0.8 29 23 23 16 1 +48 0.3 0.1 23 -30 42 -4 2 +48 0.2 0.9 33 -44 36 23 2 +48 0.8 0.1 -37 -41 27 -35 2 +48 0.6 0.4 -28 -48 50 -18 2 +48 0.3 0.5 -7 -18 35 28 2 +48 0.5 0.9 19 -28 33 -11 2 +48 0.8 0.7 39 -13 39 3 1 +48 0.6 0.2 36 8 -15 -17 1 +48 0.4 0.4 41 3 9 -22 1 +48 0.4 0.6 -16 -21 -24 -35 1 +48 0.2 0.9 35 11 26 -8 2 +48 0.2 0.5 -34 -42 6 -2 2 +48 0.5 0.8 9 -12 -5 -21 1 +48 0.8 0.8 10 -2 42 3 2 +48 0.2 0.1 -15 -43 46 -32 2 +48 0.3 0.1 -18 -37 35 23 2 +48 0.8 0.6 38 -48 50 -14 2 +48 0.3 0.5 43 -49 40 8 2 +48 0.2 0.3 -12 -40 -37 -48 1 +48 0.9 0.7 50 -14 45 25 1 +48 0.5 0.4 -40 -44 17 -28 2 +48 0.3 0.4 4 -27 14 13 2 +48 0.5 0.6 18 -7 26 4 2 +48 0.2 0.9 47 -28 16 -41 2 +48 0.3 0.7 8 -21 26 15 2 +48 0.2 0.6 -20 -27 -17 -26 1 +48 0.4 0.6 47 -13 8 -45 1 +48 0.3 0.9 19 1 -1 -39 1 +48 0.6 0.6 6 -21 41 -8 2 +48 0.9 0.9 26 -2 36 -33 1 +48 0.4 0.5 -24 -32 8 -43 2 +48 0.7 0.4 45 21 49 29 1 +48 0.9 0.8 40 -44 18 -11 1 +48 0.3 0.5 45 35 11 -30 1 +48 0.8 0.5 8 -37 -16 -31 1 +48 0.1 0.1 46 -37 35 -38 2 +48 0.3 0.7 30 29 18 -30 1 +48 0.6 0.3 0 -40 26 -33 1 +48 0.3 0.6 49 10 30 24 2 +48 0.1 0.1 27 26 41 -37 1 +48 0.1 0.8 38 33 17 -9 1 +48 0.7 0.8 48 -12 12 -36 1 +48 0.2 0.7 -7 -22 30 8 2 +48 0.5 0.4 5 -44 -19 -48 1 +48 0.8 0.5 19 11 26 17 2 +48 0.4 0.4 1 0 25 -22 1 +48 0.5 0.4 39 -33 -15 -35 1 +48 0.4 0.5 -6 -49 -26 -34 2 +48 0.7 0.8 7 -36 -14 -37 1 +48 0.3 0.7 37 7 36 35 2 +48 0.8 0.5 26 -11 20 -34 1 +48 0.9 0.2 -1 -20 12 -50 1 +48 0.1 0.5 -11 -41 24 -29 2 +48 0.1 0.8 38 33 39 -41 1 +48 0.5 0.6 34 -18 -7 -9 1 +48 0.7 0.1 40 -39 5 -26 1 +48 0.8 0.9 12 -34 -32 -50 1 +48 0.8 0.2 37 21 18 3 1 +48 0.4 0.3 17 -22 -21 -37 1 +48 0.7 0.4 13 -1 38 -50 1 +48 0.6 0.1 -44 -48 38 30 2 +48 0.7 0.9 33 7 -25 -30 1 +48 0.2 0.4 44 -14 49 24 2 +48 0.1 0.8 -29 -50 10 3 2 +48 0.2 0.4 -15 -20 29 -42 2 +48 0.5 0.3 -23 -29 -3 -31 2 +48 0.1 0.5 26 -38 13 -25 2 +48 0.8 0.9 6 -44 41 -6 2 +48 0.6 0.1 46 42 -15 -33 1 +48 0.4 0.2 -9 -21 32 -13 2 +48 0.2 0.2 31 1 31 18 2 +48 0.4 0.8 12 -19 11 -42 2 +48 0.2 0.4 -9 -21 42 12 2 +48 0.2 0.5 8 -18 27 -48 2 +48 0.2 0.6 -16 -29 45 11 2 +48 0.8 0.9 24 -25 -24 -38 1 +48 0.2 0.5 22 -3 -35 -36 1 +48 0.5 0.2 -29 -38 44 -16 2 +48 0.1 0.7 38 13 16 -21 1 +48 0.1 0.9 -9 -45 -13 -18 2 +48 0.9 0.4 22 -41 38 10 2 +48 0.3 0.7 21 -41 23 -37 2 +48 0.5 0.4 26 4 45 39 2 +48 0.1 0.4 34 -6 19 -11 2 +48 0.1 0.4 -2 -44 -11 -40 2 +48 0.7 0.3 11 -6 43 23 2 +48 0.5 0.3 16 15 -33 -42 1 +48 0.3 0.2 -32 -44 -33 -50 1 +48 0.6 0.3 26 -32 10 -46 1 +48 0.4 0.2 35 22 11 -19 1 +48 0.2 0.3 18 -28 24 -39 2 +48 0.8 0.3 17 -8 -14 -41 1 +48 0.1 0.4 44 -29 34 11 2 +48 0.5 0.7 39 11 43 -17 2 +48 0.4 0.4 -27 -38 46 -19 2 +48 0.3 0.8 0 -4 40 -22 2 +48 0.9 0.2 34 -21 6 -50 1 +48 0.4 0.6 -21 -22 -7 -32 2 +48 0.1 0.4 26 18 -26 -44 1 +48 0.3 0.9 49 15 22 -16 1 +48 0.1 0.3 41 20 1 -34 1 +48 0.7 0.8 25 18 -4 -29 1 +48 0.3 0.9 -4 -35 24 -10 2 +48 0.5 0.2 11 -17 10 -32 1 +48 0.2 0.2 -27 -32 36 -23 2 +48 0.5 0.3 -28 -50 5 -37 2 +48 0.6 0.1 8 -24 -29 -47 1 +48 0.8 0.8 14 -1 -23 -32 1 +48 0.1 0.5 -11 -12 40 27 2 +48 0.6 0.5 49 -34 2 -24 1 +48 0.6 0.3 -7 -22 50 -36 2 +48 0.8 0.5 37 1 42 10 1 +48 0.5 0.9 43 -26 14 4 1 +48 0.4 0.2 50 -31 19 11 2 +48 0.9 0.5 29 -31 7 -4 1 +48 0.7 0.2 29 8 -39 -44 1 +48 0.3 0.7 49 42 -32 -50 1 +48 0.1 0.6 6 -37 -15 -28 2 +48 0.5 0.1 37 -1 15 14 2 +48 0.8 0.8 -5 -25 9 -43 2 +48 0.3 0.1 17 5 10 -13 1 +48 0.8 0.2 -4 -41 4 -33 1 +48 0.1 0.4 -3 -49 -4 -33 2 +48 0.6 0.2 35 -6 49 -39 1 +48 0.7 0.9 -2 -6 -22 -44 1 +48 0.4 0.5 -17 -48 47 -11 2 +48 0.9 0.9 47 -33 -10 -31 1 +48 0.9 0.7 10 -16 -21 -42 1 +48 0.7 0.5 48 -39 42 -38 1 +48 0.1 0.8 -12 -47 27 16 2 +48 0.6 0.5 1 -30 3 -42 1 +48 0.1 0.5 45 39 29 16 1 +48 0.7 0.7 45 39 12 -6 1 +48 0.8 0.7 37 -31 -37 -40 1 +48 0.9 0.5 46 -8 39 -35 1 +48 0.7 0.2 20 2 22 2 1 +48 0.2 0.8 22 -49 28 -41 2 +48 0.3 0.4 -12 -25 27 -6 2 +48 0.6 0.3 50 -12 -21 -30 1 +48 0.3 0.5 15 -26 4 -1 2 +48 0.5 0.7 16 13 14 -21 1 +48 0.8 0.5 42 -41 48 34 2 +48 0.1 0.5 20 -35 42 -26 2 +48 0.4 0.2 18 -30 20 -41 1 +48 0.5 0.5 33 -35 18 -48 1 +48 0.4 0.2 -17 -39 43 11 2 +48 0.3 0.7 27 -30 3 -21 2 +48 0.4 0.1 39 20 34 18 1 +48 0.7 0.6 48 24 6 0 1 +48 0.4 0.1 39 -23 -6 -28 1 +48 0.6 0.8 -4 -29 3 2 2 +48 0.1 0.4 3 -35 14 -19 2 +48 0.5 0.5 -23 -32 11 -44 2 +48 0.6 0.5 -5 -10 26 -1 2 +48 0.7 0.4 23 -21 46 -34 1 +48 0.3 0.1 -2 -43 -15 -21 2 +48 0.9 0.4 49 39 49 26 1 +48 0.7 0.9 30 10 8 -20 1 +48 0.2 0.9 47 -13 17 -37 2 +48 0.5 0.9 4 2 -25 -32 1 +48 0.4 0.5 24 -8 46 0 2 +48 0.9 0.1 33 29 42 -47 1 +48 0.2 0.1 4 -18 -12 -17 1 +48 0.6 0.8 47 -40 10 -9 1 +48 0.9 0.8 -18 -41 23 -14 2 +48 0.5 0.4 21 15 11 -13 1 +48 0.8 0.9 39 17 46 -27 1 +48 0.7 0.6 40 -18 45 -34 1 +48 0.3 0.7 50 18 12 8 1 +48 0.9 0.5 45 41 -2 -23 1 +48 0.7 0.2 49 -47 25 -14 1 +48 0.4 0.3 35 16 28 20 1 +48 0.5 0.8 16 6 17 -10 1 +48 0.3 0.7 33 -8 5 -9 1 +48 0.6 0.8 35 -8 42 21 2 +48 0.4 0.3 37 -17 39 24 2 +48 0.3 0.9 -6 -45 31 7 2 +48 0.7 0.1 33 13 -3 -6 1 +48 0.2 0.1 32 1 3 -18 1 +48 0.6 0.2 -4 -39 45 23 2 +48 0.4 0.1 25 -13 -9 -19 1 +48 0.4 0.3 37 -25 39 -14 1 +48 0.8 0.7 35 -25 12 5 1 +48 0.3 0.1 29 22 -11 -27 1 +48 0.5 0.9 36 22 42 -15 1 +48 0.6 0.1 39 -37 -2 -5 1 +48 0.4 0.2 23 -18 -26 -45 1 +48 0.4 0.8 26 -28 -24 -28 1 +48 0.4 0.4 23 -1 1 -17 1 +48 0.9 0.7 6 -27 -5 -6 1 +48 0.7 0.9 -21 -29 29 -3 2 +48 0.7 0.4 9 -2 11 -46 1 +48 0.9 0.3 31 -13 43 21 1 +48 0.5 0.9 27 -35 2 -11 1 +48 0.1 0.8 10 -4 16 -41 1 +48 0.7 0.6 -19 -45 48 22 2 +48 0.2 0.9 16 -50 45 -3 2 +48 0.6 0.5 28 -2 50 -22 1 +48 0.1 0.2 40 -40 -36 -48 1 +48 0.8 0.4 48 -32 15 -32 1 +48 0.8 0.9 47 -45 -43 -45 1 +48 0.8 0.8 -16 -49 11 5 2 +48 0.7 0.4 13 -22 16 12 2 +48 0.9 0.8 -25 -45 -4 -50 2 +48 0.4 0.5 39 32 -17 -43 1 +48 0.7 0.2 -29 -30 15 14 2 +48 0.7 0.3 -2 -26 1 -5 2 +48 0.5 0.9 -11 -43 44 37 2 +48 0.4 0.1 35 -19 47 -41 1 +48 0.3 0.1 8 -44 10 6 2 +48 0.6 0.7 21 -46 27 18 2 +48 0.5 0.6 39 -39 15 -7 2 +48 0.3 0.5 2 -18 50 23 2 +48 0.7 0.5 19 -9 -18 -45 1 +48 0.5 0.9 38 -6 13 -5 1 +48 0.3 0.5 19 -23 -42 -49 1 +48 0.2 0.4 13 8 45 -1 2 +48 0.6 0.7 -6 -47 -2 -21 2 +48 0.2 0.4 -24 -32 25 19 2 +48 0.5 0.9 7 -41 -14 -30 1 +48 0.2 0.9 17 2 21 6 2 +48 0.1 0.3 -27 -43 24 -33 2 +48 0.6 0.8 -28 -34 18 -28 2 +48 0.4 0.3 -3 -46 10 -43 2 +48 0.2 0.7 16 -5 -27 -37 1 +48 0.9 0.3 -13 -24 -5 -41 1 +48 0.2 0.3 -13 -41 23 10 2 +48 0.5 0.7 37 -17 -19 -49 1 +48 0.9 0.9 -11 -18 -19 -47 1 +48 0.7 0.4 -3 -20 48 37 2 +48 0.6 0.7 17 -47 38 -9 2 +48 0.8 0.2 38 -21 19 6 1 +48 0.6 0.6 -3 -50 35 -3 2 +48 0.4 0.4 16 8 30 29 2 +48 0.5 0.2 38 -47 38 36 2 +48 0.8 0.4 13 -10 -24 -50 1 +48 0.9 0.6 -10 -40 4 -36 2 +48 0.2 0.1 38 -32 7 -33 1 +48 0.6 0.6 24 5 11 -23 1 +48 0.9 0.8 -5 -22 42 39 2 +48 0.5 0.1 12 -48 38 21 2 +48 0.7 0.7 45 13 32 -4 1 +48 0.5 0.6 41 3 20 9 1 +48 0.9 0.7 -5 -10 22 19 2 +48 0.1 0.8 -6 -24 0 -34 2 +48 0.5 0.7 -27 -33 44 -11 2 +48 0.8 0.8 8 -6 37 -11 2 +48 0.3 0.1 36 -23 -35 -50 1 +48 0.6 0.1 21 -1 11 -17 1 +48 0.6 0.6 -7 -26 41 10 2 +48 0.8 0.3 18 -7 42 -32 1 +48 0.3 0.9 -17 -45 18 -7 2 +48 0.1 0.9 17 -14 -39 -41 1 +48 0.4 0.8 -9 -15 14 -39 2 +48 0.8 0.4 -28 -32 -39 -41 1 +48 0.9 0.6 40 24 29 28 1 +48 0.8 0.7 48 18 15 -40 1 +48 0.4 0.7 -3 -23 -12 -15 2 +48 0.4 0.2 46 -9 15 -25 1 +48 0.8 0.9 4 -12 20 -16 2 +48 0.7 0.1 47 42 11 -6 1 +48 0.1 0.4 6 2 44 -19 2 +48 0.8 0.6 -15 -33 36 -35 2 +48 0.1 0.6 40 -24 30 -34 2 +48 0.5 0.2 48 29 48 12 1 +48 0.5 0.8 34 -14 -1 -45 1 +48 0.8 0.3 49 -18 8 -35 1 +48 0.5 0.2 -18 -48 -1 -26 1 +48 0.9 0.2 13 -35 47 -38 1 +48 0.5 0.3 33 -28 35 -12 1 +48 0.1 0.8 35 -31 9 -11 2 +48 0.6 0.4 7 -50 37 -31 2 +48 0.1 0.4 0 -39 -9 -44 1 +48 0.7 0.8 -4 -11 -1 -13 1 +48 0.4 0.6 -25 -31 -3 -14 2 +48 0.7 0.1 39 8 37 16 1 +48 0.4 0.1 6 -31 9 -31 1 +48 0.1 0.9 8 -49 -1 -39 2 +48 0.5 0.2 -2 -36 -12 -33 1 +48 0.3 0.2 12 -45 48 -18 2 +48 0.5 0.5 49 -36 -10 -14 1 +48 0.1 0.7 49 -39 46 8 2 +48 0.1 0.5 -12 -46 32 -22 2 +49 0.9 0.7 22 -29 20 -32 1 +49 0.1 0.2 37 17 25 4 1 +49 0.7 0.1 22 7 19 13 2 +49 0.4 0.6 -16 -40 -2 -8 2 +49 0.8 0.1 6 -20 -22 -36 1 +49 0.5 0.9 42 12 46 -14 1 +49 0.5 0.5 9 -11 26 3 2 +49 0.2 0.6 -25 -37 48 -12 2 +49 0.9 0.4 2 -22 8 -37 1 +49 0.4 0.2 -8 -21 9 -49 1 +49 0.1 0.7 36 -15 29 -14 2 +49 0.3 0.6 36 -27 30 -33 2 +49 0.7 0.8 37 -8 32 -35 2 +49 0.3 0.2 21 -42 31 17 2 +49 0.4 0.2 -22 -38 29 -44 1 +49 0.1 0.1 15 -18 44 33 2 +49 0.6 0.5 0 -4 19 -17 2 +49 0.3 0.3 40 35 -13 -44 1 +49 0.5 0.4 23 -32 -5 -48 1 +49 0.1 0.9 1 -29 30 -32 2 +49 0.7 0.9 50 -42 39 -18 1 +49 0.9 0.6 -7 -14 23 -12 2 +49 0.1 0.6 50 -2 -13 -33 1 +49 0.5 0.8 -20 -36 50 10 2 +49 0.9 0.9 43 28 -12 -35 1 +49 0.9 0.4 -29 -32 5 -1 2 +49 0.5 0.6 30 -35 15 -15 2 +49 0.4 0.6 7 6 39 -27 1 +49 0.5 0.9 -23 -34 27 -48 2 +49 0.6 0.7 41 -49 -11 -48 1 +49 0.6 0.6 30 26 34 16 1 +49 0.5 0.5 29 13 -24 -45 1 +49 0.6 0.8 46 22 47 12 1 +49 0.6 0.6 26 3 15 -37 1 +49 0.8 0.7 34 -27 -13 -34 1 +49 0.7 0.8 -35 -41 49 47 2 +49 0.3 0.1 26 10 33 30 2 +49 0.8 0.9 37 24 17 -30 1 +49 0.6 0.5 -25 -41 -34 -49 2 +49 0.4 0.6 22 -28 16 -37 1 +49 0.9 0.9 20 -32 31 25 2 +49 0.6 0.9 6 -13 -44 -45 1 +49 0.5 0.6 -7 -34 -33 -39 1 +49 0.7 0.5 35 -4 -14 -40 1 +49 0.3 0.4 43 -22 8 -36 1 +49 0.8 0.8 41 -9 26 20 2 +49 0.4 0.3 -8 -25 -32 -46 1 +49 0.6 0.9 45 -37 19 -45 1 +49 0.3 0.9 39 11 45 36 2 +49 0.3 0.9 25 -45 20 9 2 +49 0.9 0.5 29 -37 14 -47 1 +49 0.9 0.4 -4 -50 26 22 2 +49 0.8 0.5 -39 -43 35 -19 2 +49 0.8 0.2 2 -34 32 24 2 +49 0.5 0.3 -1 -32 17 -18 2 +49 0.8 0.2 -45 -50 10 -29 2 +49 0.9 0.6 41 12 7 -17 1 +49 0.8 0.2 -10 -39 34 -24 1 +49 0.2 0.1 49 1 -12 -29 1 +49 0.8 0.1 19 -25 25 -44 1 +49 0.4 0.5 21 -4 41 -26 2 +49 0.1 0.5 -4 -36 44 37 2 +49 0.7 0.2 30 23 -39 -50 1 +49 0.1 0.9 28 -42 -14 -46 1 +49 0.6 0.2 0 -17 48 21 2 +49 0.4 0.7 45 -13 38 -36 2 +49 0.3 0.6 0 -44 -8 -30 1 +49 0.1 0.3 -23 -47 24 -27 2 +49 0.8 0.3 0 -6 -5 -30 1 +49 0.8 0.9 46 -3 32 -35 2 +49 0.7 0.9 45 41 10 -12 1 +49 0.8 0.9 30 -22 35 34 2 +49 0.6 0.6 35 -6 18 -32 1 +49 0.2 0.2 47 35 9 -45 1 +49 0.8 0.3 32 -34 15 -24 1 +49 0.6 0.3 39 23 46 -31 1 +49 0.7 0.8 18 -4 39 35 2 +49 0.7 0.9 48 -36 17 -7 1 +49 0.7 0.7 21 2 50 17 2 +49 0.7 0.3 45 -33 17 -28 1 +49 0.3 0.2 -37 -49 39 6 2 +49 0.1 0.2 38 26 37 -21 1 +49 0.7 0.3 34 -46 44 -29 1 +49 0.2 0.9 46 -16 -6 -34 1 +49 0.4 0.8 2 -5 40 -13 2 +49 0.5 0.7 -4 -42 18 16 2 +49 0.5 0.7 21 -7 -29 -47 1 +49 0.6 0.4 48 23 18 -5 1 +49 0.5 0.5 16 -19 -30 -40 1 +49 0.6 0.8 27 26 30 -30 1 +49 0.8 0.8 17 16 30 -8 2 +49 0.3 0.6 37 4 31 23 2 +49 0.3 0.8 17 -18 31 2 2 +49 0.7 0.2 -32 -50 48 -29 2 +49 0.1 0.5 22 -16 -4 -21 2 +49 0.2 0.4 -14 -36 -18 -23 2 +49 0.9 0.1 20 -47 37 -32 1 +49 0.8 0.5 12 -6 33 22 2 +49 0.3 0.1 37 -47 -9 -38 1 +49 0.4 0.6 32 -14 -15 -32 1 +49 0.4 0.3 12 -25 38 -34 1 +49 0.5 0.3 -5 -27 10 -4 2 +49 0.8 0.8 9 -7 43 -27 2 +49 0.7 0.6 45 23 27 -41 1 +49 0.9 0.5 8 4 29 -41 1 +49 0.7 0.6 7 -21 -1 -41 1 +49 0.1 0.2 42 9 40 -27 1 +49 0.6 0.7 40 34 47 -33 2 +49 0.9 0.4 34 -35 -1 -24 1 +49 0.5 0.4 38 28 33 -50 1 +49 0.9 0.3 -2 -11 45 -28 2 +49 0.6 0.3 40 -28 5 -36 1 +49 0.3 0.6 37 36 30 11 1 +49 0.7 0.7 17 12 1 -15 1 +49 0.1 0.2 15 -14 17 -26 1 +49 0.6 0.2 43 -21 -21 -27 1 +49 0.4 0.6 10 0 48 9 2 +49 0.7 0.3 34 -43 36 35 2 +49 0.1 0.3 29 4 32 7 1 +49 0.7 0.8 -31 -45 10 -44 2 +49 0.4 0.5 36 31 2 -1 1 +49 0.6 0.8 39 28 -28 -48 1 +49 0.7 0.9 -21 -25 23 2 2 +49 0.4 0.4 24 -12 30 -24 2 +49 0.7 0.6 30 -40 -3 -21 1 +49 0.6 0.1 -28 -30 24 -42 1 +49 0.8 0.8 49 31 6 -7 1 +49 0.7 0.5 47 20 20 12 1 +49 0.3 0.8 42 -36 23 -43 2 +49 0.7 0.6 49 -8 -26 -39 1 +49 0.9 0.3 44 -34 5 -47 1 +49 0.9 0.1 39 5 44 28 2 +49 0.6 0.1 24 -38 18 2 2 +49 0.2 0.7 40 37 1 -29 1 +49 0.5 0.8 -2 -44 -9 -34 1 +49 0.9 0.3 49 -21 -24 -39 1 +49 0.1 0.2 30 -50 24 -27 2 +49 0.4 0.2 -4 -10 -5 -45 1 +49 0.6 0.5 8 -48 7 -25 2 +49 0.5 0.5 40 15 8 7 1 +49 0.2 0.7 40 4 10 -26 1 +49 0.1 0.5 -44 -46 46 25 2 +49 0.8 0.2 33 5 11 -35 1 +49 0.8 0.3 -2 -26 -13 -20 1 +49 0.9 0.5 29 -34 14 -12 1 +49 0.1 0.7 37 -16 20 -32 2 +49 0.6 0.9 21 3 14 -25 1 +49 0.2 0.9 39 -1 7 3 2 +49 0.9 0.9 7 -23 36 14 2 +49 0.7 0.5 30 26 41 -39 1 +49 0.8 0.1 5 -26 -5 -42 1 +49 0.2 0.5 1 -17 38 30 2 +49 0.3 0.4 -38 -46 30 -22 2 +49 0.6 0.4 36 -13 -7 -15 1 +49 0.8 0.7 17 -27 42 -48 2 +49 0.2 0.3 38 -34 34 9 2 +49 0.8 0.3 34 -5 -18 -44 1 +49 0.9 0.1 42 -34 41 10 1 +49 0.5 0.6 -6 -29 4 -5 2 +49 0.2 0.4 16 -3 5 -32 1 +49 0.9 0.7 45 4 26 -27 1 +49 0.8 0.6 40 3 15 -14 1 +49 0.6 0.2 7 -3 4 -13 1 +49 0.1 0.6 40 -48 -28 -30 1 +49 0.6 0.4 8 -49 35 -12 2 +49 0.2 0.4 47 -11 38 -10 2 +49 0.2 0.5 14 -47 21 -23 2 +49 0.9 0.5 -2 -50 5 -41 1 +49 0.7 0.5 5 -6 30 -47 2 +49 0.6 0.3 46 -6 14 -35 1 +49 0.8 0.5 41 -10 -9 -39 1 +49 0.8 0.5 27 2 27 -32 1 +49 0.4 0.9 -11 -47 50 -37 2 +49 0.2 0.8 24 21 -33 -43 1 +49 0.4 0.3 -41 -42 -15 -47 2 +49 0.2 0.5 -2 -18 -25 -29 1 +49 0.6 0.3 -2 -32 30 11 2 +49 0.7 0.3 15 -14 -18 -42 1 +49 0.2 0.6 33 -10 26 2 2 +49 0.2 0.4 26 -29 15 -19 2 +49 0.6 0.6 23 -14 32 -41 2 +49 0.2 0.2 37 -28 36 18 2 +49 0.5 0.3 -3 -11 -9 -37 2 +49 0.8 0.2 25 -38 37 22 2 +49 0.1 0.2 15 -13 -1 -30 1 +49 0.8 0.2 7 6 -25 -49 1 +49 0.3 0.9 23 6 -5 -9 1 +49 0.3 0.3 49 -19 42 31 2 +49 0.4 0.3 8 -46 -15 -16 1 +49 0.3 0.6 16 10 -14 -36 1 +49 0.1 0.8 40 -46 30 -47 2 +49 0.5 0.2 9 -50 -8 -14 2 +49 0.3 0.6 -23 -46 -22 -29 2 +49 0.2 0.5 43 34 -21 -24 1 +49 0.8 0.7 -20 -33 13 -30 2 +49 0.2 0.2 23 9 34 -47 1 +49 0.5 0.9 -22 -49 42 -1 2 +49 0.4 0.5 -24 -42 8 -1 2 +49 0.1 0.7 30 29 14 -41 1 +49 0.4 0.5 4 -8 -4 -27 1 +49 0.4 0.2 11 9 -5 -37 1 +49 0.2 0.8 4 -37 -7 -32 2 +49 0.8 0.7 27 -26 15 -25 1 +49 0.1 0.1 -40 -45 34 -42 2 +49 0.8 0.3 -8 -28 -38 -45 1 +49 0.8 0.1 28 -6 50 20 2 +49 0.9 0.8 -24 -44 -19 -27 1 +49 0.8 0.8 -27 -37 -3 -33 2 +49 0.1 0.3 -1 -31 -12 -21 2 +49 0.1 0.5 46 -35 23 8 2 +49 0.3 0.4 -3 -44 31 16 2 +49 0.4 0.9 8 -2 -15 -21 1 +49 0.1 0.7 47 -3 -25 -30 1 +49 0.9 0.6 24 -9 27 16 2 +49 0.3 0.2 0 -28 41 -32 2 +49 0.4 0.1 -6 -11 3 -48 1 +49 0.4 0.9 43 -47 48 -46 2 +49 0.5 0.8 9 -50 13 -7 2 +49 0.2 0.4 -11 -15 29 -31 2 +49 0.2 0.5 10 -5 37 12 2 +49 0.1 0.8 33 -44 -3 -13 2 +49 0.6 0.8 35 -20 2 -49 1 +49 0.5 0.1 -8 -46 47 -44 1 +49 0.7 0.6 -11 -44 29 -22 2 +49 0.6 0.2 -31 -47 37 -11 2 +49 0.1 0.9 -26 -28 35 21 2 +49 0.3 0.3 26 -44 39 -40 1 +49 0.6 0.4 38 -23 -17 -30 1 +49 0.8 0.8 38 14 39 31 2 +49 0.8 0.7 1 -40 48 -34 2 +49 0.1 0.7 -12 -34 45 -45 2 +49 0.1 0.4 9 -29 7 6 2 +49 0.2 0.4 21 14 0 -11 1 +49 0.8 0.6 -13 -37 36 -42 2 +49 0.4 0.1 38 5 17 -45 1 +49 0.6 0.7 47 37 -34 -44 1 +49 0.7 0.7 -13 -41 48 39 2 +49 0.6 0.2 32 -20 -9 -18 1 +49 0.8 0.6 43 -24 5 -5 1 +49 0.2 0.2 8 -20 12 -27 1 +49 0.3 0.8 13 -42 20 15 2 +49 0.7 0.9 -4 -13 24 -50 2 +49 0.5 0.9 40 31 50 34 2 +49 0.5 0.3 -39 -42 -35 -39 1 +49 0.6 0.7 -12 -31 25 11 2 +49 0.2 0.5 -6 -29 9 -36 2 +49 0.6 0.8 7 -43 -7 -49 2 +49 0.4 0.3 50 -28 26 16 2 +49 0.5 0.6 37 -2 43 8 2 +49 0.3 0.7 35 9 -26 -38 1 +49 0.5 0.5 47 38 -8 -24 1 +49 0.8 0.1 30 23 30 -47 1 +49 0.1 0.2 24 12 38 -14 1 +49 0.6 0.6 -10 -20 -35 -47 1 +49 0.7 0.4 1 -35 -6 -44 1 +49 0.7 0.3 36 -19 -38 -44 1 +49 0.6 0.5 8 -22 4 -46 1 +49 0.9 0.3 35 27 1 -3 1 +49 0.9 0.2 1 -10 47 35 2 +49 0.9 0.1 13 5 43 11 2 +49 0.3 0.6 10 -18 -22 -40 1 +49 0.3 0.2 -14 -40 29 9 2 +49 0.4 0.3 30 21 48 16 1 +49 0.9 0.2 42 -50 13 3 1 +49 0.3 0.9 38 19 20 1 1 +49 0.6 0.9 -10 -27 48 -45 2 +49 0.2 0.1 22 1 -7 -24 1 +49 0.9 0.8 50 -42 38 12 1 +49 0.2 0.9 -27 -41 0 -50 2 +49 0.2 0.7 -19 -21 -29 -43 2 +49 0.6 0.7 -15 -43 -9 -48 1 +49 0.3 0.2 36 11 -38 -40 1 +49 0.8 0.5 49 20 -18 -22 1 +49 0.1 0.5 22 -5 -5 -9 1 +49 0.2 0.4 10 -25 7 -42 1 +49 0.3 0.2 27 -20 48 -36 1 +49 0.8 0.5 24 -1 42 -31 2 +49 0.7 0.9 12 -4 20 14 2 +49 0.4 0.3 22 -14 -3 -28 1 +49 0.8 0.5 30 -23 1 -12 1 +49 0.4 0.4 -11 -44 23 -15 2 +49 0.5 0.8 13 -42 21 -12 2 +49 0.5 0.5 27 -36 -5 -22 1 +49 0.6 0.4 48 19 5 -39 1 +49 0.6 0.7 25 -14 -35 -48 1 +49 0.1 0.3 48 -22 -3 -6 2 +49 0.4 0.2 -11 -39 -34 -43 1 +49 0.2 0.3 -1 -24 -1 -49 1 +49 0.3 0.8 -29 -43 7 -9 2 +49 0.7 0.5 33 18 -15 -34 1 +49 0.2 0.5 30 -33 26 20 2 +49 0.2 0.7 29 -41 44 3 2 +49 0.5 0.5 43 37 46 20 1 +49 0.9 0.6 40 -40 -32 -46 1 +49 0.7 0.7 26 0 12 -40 1 +49 0.7 0.9 30 13 10 -24 1 +49 0.4 0.5 -19 -29 9 -9 2 +49 0.2 0.5 -26 -29 10 3 2 +49 0.2 0.4 -12 -39 16 -4 2 +49 0.9 0.8 -4 -38 -23 -41 1 +50 0.7 0.4 12 4 39 -2 1 +50 0.4 0.4 18 -27 17 -10 2 +50 0.3 0.4 -6 -21 -15 -38 1 +50 0.6 0.4 32 1 -37 -50 1 +50 0.1 0.6 45 31 22 -39 1 +50 0.3 0.7 -45 -49 8 3 2 +50 0.2 0.1 12 -4 -37 -49 1 +50 0.3 0.3 -9 -46 -6 -44 1 +50 0.1 0.6 33 -43 42 5 2 +50 0.9 0.1 45 -24 49 -12 1 +50 0.9 0.8 23 -16 -18 -25 1 +50 0.9 0.5 42 -35 26 -13 1 +50 0.4 0.4 19 -41 37 -39 2 +50 0.1 0.8 -29 -37 -37 -40 1 +50 0.2 0.4 44 22 33 10 1 +50 0.6 0.6 -36 -50 37 22 2 +50 0.2 0.1 9 -42 41 -22 2 +50 0.7 0.4 43 -31 -33 -46 1 +50 0.5 0.4 -20 -32 36 -39 2 +50 0.5 0.2 32 -17 37 13 2 +50 0.9 0.2 7 -8 48 7 2 +50 0.1 0.5 35 -6 32 -7 2 +50 0.8 0.5 36 -27 -24 -32 1 +50 0.4 0.9 17 -47 -34 -39 1 +50 0.3 0.4 11 -45 -38 -49 1 +50 0.6 0.3 -7 -8 49 10 2 +50 0.8 0.5 5 -18 35 6 2 +50 0.6 0.8 17 -11 25 -30 2 +50 0.1 0.8 48 -29 47 40 2 +50 0.6 0.4 44 -2 48 -15 1 +50 0.1 0.7 30 -1 -3 -25 1 +50 0.7 0.8 44 -10 -4 -26 1 +50 0.8 0.2 17 4 -13 -21 1 +50 0.9 0.3 11 -33 22 -15 1 +50 0.7 0.2 -33 -43 39 0 2 +50 0.7 0.6 25 -25 38 -32 1 +50 0.1 0.4 -2 -29 12 -40 2 +50 0.5 0.2 -28 -39 31 -28 2 +50 0.5 0.5 46 -1 13 11 1 +50 0.8 0.9 18 2 26 -41 2 +50 0.3 0.4 21 12 49 -13 1 +50 0.5 0.3 42 -33 31 -24 1 +50 0.1 0.9 -37 -48 43 22 2 +50 0.7 0.1 47 30 21 -3 1 +50 0.8 0.8 19 -50 39 -42 2 +50 0.4 0.7 16 1 -7 -35 1 +50 0.6 0.7 34 -13 22 12 2 +50 0.2 0.8 -11 -32 20 15 2 +50 0.9 0.4 -17 -25 35 -47 2 +50 0.4 0.5 14 -27 -22 -36 1 +50 0.6 0.8 6 -39 -29 -45 1 +50 0.6 0.1 -12 -20 21 10 2 +50 0.8 0.5 35 -35 45 28 2 +50 0.1 0.7 22 -47 26 -12 2 +50 0.3 0.8 1 -15 -10 -36 2 +50 0.1 0.4 15 -38 40 18 2 +50 0.6 0.4 -37 -44 36 -39 2 +50 0.2 0.5 46 39 29 -9 1 +50 0.5 0.2 37 -33 20 -9 1 +50 0.9 0.2 -9 -35 26 -35 1 +50 0.6 0.4 19 -22 -23 -32 1 +50 0.1 0.8 28 -5 47 46 2 +50 0.5 0.2 45 9 -5 -14 1 +50 0.3 0.3 37 -50 32 -50 1 +50 0.3 0.1 41 9 -22 -47 1 +50 0.9 0.3 2 -23 48 -8 1 +50 0.9 0.6 34 7 23 -49 1 +50 0.7 0.7 38 -34 15 -28 1 +50 0.7 0.6 10 -14 30 -45 2 +50 0.9 0.7 14 -4 -15 -33 1 +50 0.6 0.3 -23 -35 46 36 2 +50 0.6 0.2 35 -38 8 -11 1 +50 0.4 0.4 31 -33 27 25 2 +50 0.3 0.8 0 -28 31 4 2 +50 0.3 0.3 22 -6 23 -35 1 +50 0.1 0.3 32 6 34 33 2 +50 0.6 0.9 27 -40 -1 -33 1 +50 0.2 0.9 28 -28 27 6 2 +50 0.7 0.7 45 27 -10 -47 1 +50 0.8 0.6 -1 -5 -21 -26 1 +50 0.6 0.9 44 18 33 -43 1 +50 0.8 0.5 27 -24 29 -8 1 +50 0.8 0.3 -22 -39 47 29 2 +50 0.1 0.9 -3 -14 27 -19 2 +50 0.5 0.2 -37 -44 -6 -11 2 +50 0.1 0.2 8 -41 11 -40 1 +50 0.8 0.4 46 42 21 -12 1 +50 0.1 0.6 25 -38 48 15 2 +50 0.5 0.8 36 -44 37 -41 2 +50 0.9 0.2 34 1 15 -5 1 +50 0.2 0.9 -12 -19 4 -17 2 +50 0.5 0.8 20 -26 30 -1 2 +50 0.8 0.7 19 -33 -2 -3 1 +50 0.3 0.1 -19 -24 13 -4 2 +50 0.6 0.4 48 -33 -35 -42 1 +50 0.9 0.7 31 5 45 22 2 +50 0.1 0.5 7 6 12 -41 1 +50 0.5 0.4 3 -46 -5 -6 2 +50 0.2 0.1 18 6 10 -42 1 +50 0.9 0.2 -3 -50 -15 -23 1 +50 0.2 0.6 41 30 -1 -7 1 +50 0.3 0.6 41 22 28 -26 1 +50 0.6 0.8 37 -29 38 -5 2 +50 0.8 0.3 14 1 31 30 2 +50 0.9 0.4 -4 -15 15 -8 2 +50 0.9 0.8 41 -17 10 3 1 +50 0.2 0.9 36 32 20 -11 1 +50 0.7 0.3 43 -37 26 24 1 +50 0.1 0.7 7 -25 35 -49 2 +50 0.9 0.5 23 1 2 -7 1 +50 0.1 0.2 -22 -38 48 20 2 +50 0.3 0.9 -12 -50 20 13 2 +50 0.1 0.1 39 -35 -43 -44 1 +50 0.1 0.5 17 -40 16 -2 2 +50 0.3 0.3 32 -10 26 -14 2 +50 0.4 0.3 24 19 25 19 1 +50 0.8 0.8 40 -5 10 8 1 +50 0.6 0.7 14 -23 21 10 2 +50 0.3 0.4 39 -20 44 -3 2 +50 0.1 0.4 27 -9 42 17 2 +50 0.4 0.9 22 -36 20 -2 2 +50 0.6 0.3 22 -12 32 -39 1 +50 0.1 0.3 32 -17 26 -15 2 +50 0.5 0.2 49 36 28 -50 1 +50 0.8 0.9 26 -20 43 40 2 +50 0.5 0.5 43 29 24 1 1 +50 0.5 0.8 -22 -27 50 29 2 +50 0.2 0.2 20 -50 28 -11 2 +50 0.5 0.3 21 -30 37 24 2 +50 0.9 0.3 15 13 6 -39 1 +50 0.9 0.7 -25 -26 26 -8 2 +50 0.7 0.7 25 7 -11 -19 1 +50 0.6 0.7 -5 -38 29 25 2 +50 0.3 0.3 40 13 25 -1 1 +50 0.9 0.8 8 -46 -18 -32 1 +50 0.8 0.7 -15 -38 27 5 2 +50 0.2 0.2 49 -33 -1 -6 2 +50 0.6 0.2 39 -3 -14 -25 1 +50 0.3 0.8 3 -39 4 -37 2 +50 0.3 0.9 39 37 48 -8 1 +50 0.2 0.6 43 -47 50 4 2 +50 0.6 0.7 -25 -30 -8 -47 2 +50 0.3 0.1 -8 -29 43 -7 2 +50 0.7 0.1 29 -18 -6 -26 1 +50 0.7 0.2 15 -19 24 -5 1 +50 0.1 0.6 -3 -17 9 -36 2 +50 0.4 0.4 -12 -30 21 -35 2 +50 0.6 0.7 0 -50 9 -45 2 +50 0.1 0.8 44 6 23 -25 2 +50 0.1 0.1 25 -11 40 -13 1 +50 0.8 0.4 15 -13 0 -43 1 +50 0.3 0.1 -28 -40 18 -36 1 +50 0.6 0.6 38 -5 -6 -16 1 +50 0.9 0.4 17 12 -7 -28 1 +50 0.9 0.9 48 -23 49 44 2 +50 0.6 0.7 -20 -29 32 -3 2 +50 0.7 0.6 -3 -33 1 -21 2 +50 0.8 0.8 6 -36 37 32 2 +50 0.3 0.9 18 -42 47 31 2 +50 0.7 0.4 28 -49 34 -18 1 +50 0.1 0.6 -7 -43 41 15 2 +50 0.7 0.1 14 -28 -1 -16 1 +50 0.7 0.3 44 0 12 -21 1 +50 0.9 0.8 6 -41 20 -37 2 +50 0.6 0.2 31 -31 42 27 2 +50 0.2 0.2 35 -2 27 8 2 +50 0.2 0.5 -31 -32 44 5 2 +50 0.1 0.6 49 -24 40 -6 2 +50 0.3 0.8 7 -45 40 -31 2 +50 0.4 0.3 43 13 35 13 1 +50 0.9 0.8 23 -9 -5 -39 1 +50 0.8 0.4 42 -37 -8 -28 1 +50 0.4 0.9 -16 -19 30 24 2 +50 0.3 0.6 35 33 39 -12 1 +50 0.2 0.6 28 -36 5 -25 2 +50 0.3 0.3 50 20 9 -38 1 +50 0.4 0.1 2 -14 -2 -42 1 +50 0.2 0.3 -32 -40 9 -11 2 +50 0.7 0.7 39 33 31 2 1 +50 0.3 0.8 23 -50 -21 -49 1 +50 0.6 0.3 22 -33 0 -8 1 +50 0.9 0.1 -5 -48 -17 -26 1 +50 0.7 0.7 46 -30 -30 -40 1 +50 0.8 0.9 12 -9 12 -9 1 +50 0.9 0.4 37 -27 -1 -22 1 +50 0.9 0.2 -16 -38 36 -37 2 +50 0.3 0.2 -14 -25 8 -31 2 +50 0.5 0.9 2 -14 43 20 2 +50 0.8 0.6 0 -3 28 -19 2 +50 0.6 0.5 16 11 44 -2 1 +50 0.8 0.2 6 -39 43 40 2 +50 0.5 0.7 50 3 21 -14 1 +50 0.9 0.3 42 -14 38 -45 1 +50 0.8 0.7 19 -11 18 -25 1 +50 0.7 0.6 22 -13 39 31 2 +50 0.5 0.7 -30 -42 40 -22 2 +50 0.3 0.1 49 9 34 29 2 +50 0.6 0.3 -17 -36 35 0 2 +50 0.7 0.6 11 -43 46 -2 2 +50 0.2 0.1 -30 -49 40 3 2 +50 0.9 0.3 25 24 45 14 1 +50 0.2 0.4 47 29 -2 -7 1 +50 0.2 0.4 34 33 39 -14 1 +50 0.4 0.5 1 -33 18 -34 2 +50 0.3 0.6 49 -31 49 35 2 +50 0.5 0.2 -6 -37 36 30 2 +50 0.3 0.6 9 3 4 3 2 +50 0.3 0.6 -11 -19 3 -33 2 +50 0.6 0.2 43 17 17 -31 1 +50 0.3 0.8 -32 -36 -10 -14 2 +50 0.1 0.8 15 -2 0 -30 1 +50 0.7 0.4 13 -26 32 15 2 +50 0.2 0.8 -9 -18 43 -3 2 +50 0.3 0.4 17 -48 46 13 2 +50 0.9 0.5 46 -7 44 -26 1 +50 0.1 0.7 47 17 26 -27 1 +50 0.9 0.4 -13 -50 41 -17 2 +50 0.5 0.5 28 14 1 -6 1 +50 0.4 0.6 26 -16 37 25 2 +50 0.1 0.5 41 14 -6 -32 1 +50 0.8 0.4 28 -31 45 -18 2 +50 0.8 0.1 33 -6 20 19 1 +50 0.4 0.4 -2 -47 20 3 2 +50 0.8 0.9 32 -9 6 -47 1 +50 0.3 0.2 -11 -30 42 16 2 +50 0.2 0.4 15 2 9 4 1 +50 0.5 0.9 7 -15 38 -45 2 +50 0.4 0.3 31 26 49 -41 1 +50 0.9 0.2 -17 -23 33 -37 2 +50 0.9 0.3 -21 -45 -15 -38 1 +50 0.3 0.1 35 -13 46 24 2 +50 0.8 0.9 38 -46 -12 -17 1 +50 0.5 0.5 22 -22 -3 -33 1 +50 0.6 0.6 -6 -27 20 12 2 +50 0.1 0.7 2 -33 29 -31 2 +50 0.5 0.3 36 -47 2 -29 1 +50 0.2 0.2 -2 -24 -32 -49 1 +50 0.7 0.6 28 26 2 -17 1 +50 0.6 0.9 49 42 31 -50 1 +50 0.9 0.2 -7 -48 42 7 2 +50 0.9 0.6 -1 -12 13 -1 2 +50 0.9 0.1 22 -39 17 -34 1 +50 0.7 0.9 1 -25 25 20 2 +50 0.1 0.1 -20 -48 39 -32 2 +50 0.3 0.8 18 -2 8 -4 2 +50 0.5 0.7 49 -27 -12 -33 1 +50 0.9 0.7 -11 -32 8 -1 2 +50 0.1 0.5 -4 -27 -1 -44 1 +50 0.3 0.5 37 35 17 -4 1 +50 0.8 0.5 23 4 -9 -50 1 +50 0.6 0.2 25 -41 50 41 2 +50 0.8 0.4 40 1 -1 -36 1 +50 0.7 0.5 -16 -48 27 -46 2 +50 0.6 0.8 -29 -42 4 3 2 +50 0.3 0.8 -8 -13 -19 -22 1 +50 0.1 0.8 18 -5 29 -1 2 +50 0.5 0.2 18 14 42 -39 1 +50 0.1 0.1 -1 -37 13 0 2 +50 0.2 0.7 -28 -30 5 -35 2 +50 0.6 0.1 -29 -33 47 -31 2 +50 0.8 0.3 0 -29 50 32 2 +50 0.8 0.4 -18 -26 27 -26 2 +50 0.8 0.2 10 -17 6 -20 1 +50 0.7 0.8 24 -39 27 17 2 +50 0.6 0.3 -3 -43 -26 -43 1 +50 0.2 0.8 38 -11 -1 -38 1 +50 0.5 0.7 5 -1 16 15 2 +50 0.7 0.4 36 23 26 -44 1 +50 0.7 0.7 37 -48 -7 -35 1 +50 0.7 0.5 -25 -44 38 0 2 +50 0.8 0.7 -26 -35 -38 -41 1 +50 0.9 0.4 -9 -43 46 -19 2 +50 0.2 0.8 31 -19 33 -23 2 +50 0.6 0.3 30 23 46 -24 1 +50 0.2 0.9 48 -10 32 -31 2 +50 0.1 0.8 25 7 27 -47 2 +50 0.3 0.4 2 -4 9 -38 1 +50 0.4 0.9 -4 -48 27 -27 2 +50 0.1 0.1 1 -47 11 3 2 +50 0.3 0.7 21 13 21 -18 2 +50 0.9 0.6 28 5 26 10 1 +50 0.2 0.2 35 29 49 -14 1 +50 0.3 0.6 39 12 50 17 2 +50 0.4 0.2 -18 -33 0 -26 2 +50 0.7 0.1 34 32 -18 -32 1 +50 0.9 0.7 21 -28 17 -7 1 +50 0.9 0.5 -7 -25 10 -48 1 +50 0.4 0.8 -11 -28 6 -14 2 +50 0.9 0.4 33 -16 38 -44 1 +50 0.1 0.9 13 11 31 -9 2 +50 0.1 0.1 -3 -44 39 -23 2 +50 0.9 0.2 15 -23 34 -38 1 +50 0.1 0.3 43 -3 21 -19 1 +50 0.2 0.5 -13 -34 33 -23 2 +50 0.5 0.3 28 25 43 21 2 +50 0.2 0.6 32 20 25 -2 1 +50 0.1 0.1 22 7 40 -32 1 +50 0.6 0.7 29 -21 -34 -46 1 +50 0.9 0.3 -23 -46 -4 -49 1 +50 0.9 0.8 42 -26 13 -38 1 diff --git a/R/inst/extdata/dd_exampleData.txt b/R/inst/extdata/dd_exampleData.txt new file mode 100644 index 00000000..d90c64c1 --- /dev/null +++ b/R/inst/extdata/dd_exampleData.txt @@ -0,0 +1,2161 @@ +subjID trial delay_later amount_later delay_sooner amount_sooner choice +1 1 6 10.5 0 10 1 +1 2 170 38.3 0 10 1 +1 3 28 13.4 0 10 1 +1 4 28 31.4 0 10 1 +1 5 85 30.9 0 10 1 +1 6 28 21.1 0 10 1 +1 7 28 13 0 10 1 +1 8 1 21.3 0 10 1 +1 9 28 21.1 0 10 1 +1 10 15 30.1 0 10 1 +1 11 1 10.7 0 10 1 +1 12 85 36.1 0 10 1 +1 13 15 10.5 0 10 1 +1 14 6 16.7 0 10 1 +1 15 1 11 0 10 1 +1 16 15 14.2 0 10 1 +1 17 15 12.5 0 10 1 +1 18 15 20.7 0 10 1 +1 19 6 11 0 10 0 +1 20 28 16.9 0 10 1 +1 21 15 30.1 0 10 1 +1 22 85 24.4 0 10 1 +1 23 170 41.3 0 10 1 +1 24 15 14.2 0 10 1 +1 25 6 10.5 0 10 1 +1 26 170 24.4 0 10 1 +1 27 15 49 0 10 1 +1 28 170 29.7 0 10 1 +1 29 1 11.8 0 10 0 +1 30 6 13.2 0 10 0 +1 31 85 30.9 0 10 1 +1 32 6 44 0 10 1 +1 33 6 35.1 0 10 1 +1 34 28 15.5 0 10 1 +1 35 170 43.3 0 10 1 +1 36 170 33.9 0 10 1 +1 37 1 11 0 10 1 +1 38 1 21.3 0 10 1 +1 39 85 45 0 10 1 +1 40 15 39.6 0 10 1 +1 41 85 10.5 0 10 0 +1 42 170 15 0 10 1 +1 43 170 49.8 0 10 1 +1 44 170 24.4 0 10 1 +1 45 28 13.4 0 10 1 +1 46 1 31.6 0 10 1 +1 47 170 35.6 0 10 1 +1 48 1 41.9 0 10 1 +1 49 6 17.4 0 10 1 +1 50 85 18.4 0 10 1 +1 51 85 27.3 0 10 1 +1 52 85 26 0 10 1 +1 53 170 38.3 0 10 1 +1 54 28 21.7 0 10 1 +1 55 1 10.7 0 10 1 +1 56 170 49.8 0 10 1 +1 57 1 11.2 0 10 1 +1 58 15 20.7 0 10 1 +1 59 6 44 0 10 1 +1 60 28 41.1 0 10 1 +1 61 28 16.9 0 10 1 +1 62 6 14 0 10 1 +1 63 1 31.6 0 10 1 +1 64 15 18.6 0 10 1 +1 65 28 12 0 10 1 +1 66 6 13.2 0 10 1 +1 67 170 43.3 0 10 1 +1 68 28 31.4 0 10 1 +1 69 85 19.5 0 10 1 +1 70 170 35.6 0 10 1 +1 71 85 18.4 0 10 1 +1 72 1 12.5 0 10 1 +1 73 170 41.3 0 10 1 +1 74 170 15 0 10 0 +1 75 28 12 0 10 0 +1 76 85 36.1 0 10 1 +1 77 1 18 0 10 1 +1 78 85 10.5 0 10 0 +1 79 170 33.9 0 10 1 +1 80 6 26.3 0 10 1 +1 81 85 45 0 10 1 +1 82 28 21.7 0 10 1 +1 83 28 13 0 10 0 +1 84 85 27.3 0 10 1 +1 85 15 18.6 0 10 1 +1 86 15 12.5 0 10 1 +1 87 6 26.3 0 10 1 +1 88 6 11 0 10 1 +1 89 15 10.7 0 10 0 +1 90 6 16.7 0 10 1 +1 91 28 41.1 0 10 1 +1 92 85 26 0 10 1 +1 93 85 24.4 0 10 1 +1 94 1 12.5 0 10 1 +1 95 6 17.4 0 10 1 +1 96 6 35.1 0 10 1 +1 97 6 14 0 10 1 +1 98 15 10.5 0 10 0 +1 99 1 11.8 0 10 1 +1 100 15 10.7 0 10 1 +1 101 15 39.6 0 10 1 +1 102 85 19.5 0 10 1 +1 103 1 11.2 0 10 1 +1 104 170 29.7 0 10 1 +1 105 15 49 0 10 1 +1 106 1 41.9 0 10 1 +1 107 1 18 0 10 1 +1 108 28 15.5 0 10 1 +2 1 1 11.8 0 10 0 +2 2 170 35.6 0 10 0 +2 3 85 10.5 0 10 0 +2 4 28 21.1 0 10 1 +2 5 28 13 0 10 0 +2 6 6 10.5 0 10 0 +2 7 15 10.5 0 10 0 +2 8 6 17.4 0 10 1 +2 9 85 26 0 10 1 +2 10 6 35.1 0 10 1 +2 11 28 21.7 0 10 1 +2 12 6 14 0 10 1 +2 13 15 14.2 0 10 0 +2 14 1 12.5 0 10 1 +2 15 170 38.3 0 10 0 +2 16 1 18 0 10 1 +2 17 15 39.6 0 10 1 +2 18 85 18.4 0 10 0 +2 19 28 21.1 0 10 0 +2 20 85 19.5 0 10 0 +2 21 6 11 0 10 1 +2 22 85 30.9 0 10 1 +2 23 1 10.7 0 10 1 +2 24 28 16.9 0 10 0 +2 25 170 29.7 0 10 1 +2 26 170 43.3 0 10 0 +2 27 6 14 0 10 1 +2 28 6 11 0 10 1 +2 29 28 41.1 0 10 1 +2 30 1 31.6 0 10 1 +2 31 15 18.6 0 10 1 +2 32 15 14.2 0 10 1 +2 33 28 12 0 10 0 +2 34 1 21.3 0 10 1 +2 35 85 36.1 0 10 1 +2 36 85 26 0 10 1 +2 37 15 49 0 10 1 +2 38 1 41.9 0 10 1 +2 39 1 21.3 0 10 1 +2 40 170 41.3 0 10 0 +2 41 170 43.3 0 10 0 +2 42 15 18.6 0 10 1 +2 43 15 49 0 10 1 +2 44 170 15 0 10 0 +2 45 85 24.4 0 10 1 +2 46 15 30.1 0 10 1 +2 47 85 18.4 0 10 0 +2 48 170 41.3 0 10 0 +2 49 28 41.1 0 10 1 +2 50 28 31.4 0 10 1 +2 51 6 35.1 0 10 1 +2 52 1 11.2 0 10 0 +2 53 170 33.9 0 10 0 +2 54 28 15.5 0 10 0 +2 55 1 18 0 10 1 +2 56 15 10.7 0 10 0 +2 57 85 45 0 10 1 +2 58 85 19.5 0 10 0 +2 59 6 44 0 10 1 +2 60 85 30.9 0 10 0 +2 61 1 11 0 10 0 +2 62 170 35.6 0 10 1 +2 63 170 29.7 0 10 0 +2 64 6 16.7 0 10 1 +2 65 28 15.5 0 10 0 +2 66 6 44 0 10 1 +2 67 85 10.5 0 10 0 +2 68 85 45 0 10 1 +2 69 1 11 0 10 0 +2 70 15 10.5 0 10 0 +2 71 170 49.8 0 10 0 +2 72 15 20.7 0 10 0 +2 73 6 13.2 0 10 1 +2 74 15 12.5 0 10 1 +2 75 28 13 0 10 1 +2 76 1 10.7 0 10 1 +2 77 28 13.4 0 10 1 +2 78 15 39.6 0 10 1 +2 79 15 20.7 0 10 1 +2 80 1 11.2 0 10 1 +2 81 85 24.4 0 10 1 +2 82 1 12.5 0 10 0 +2 83 170 49.8 0 10 1 +2 84 170 33.9 0 10 1 +2 85 85 27.3 0 10 0 +2 86 170 24.4 0 10 0 +2 87 15 10.7 0 10 0 +2 88 6 16.7 0 10 1 +2 89 1 11.8 0 10 0 +2 90 6 10.5 0 10 0 +2 91 28 12 0 10 0 +2 92 6 17.4 0 10 1 +2 93 28 16.9 0 10 0 +2 94 28 13.4 0 10 1 +2 95 1 31.6 0 10 1 +2 96 85 36.1 0 10 1 +2 97 15 30.1 0 10 0 +2 98 170 15 0 10 0 +2 99 85 27.3 0 10 0 +2 100 170 38.3 0 10 0 +2 101 15 12.5 0 10 0 +2 102 6 26.3 0 10 1 +2 103 1 41.9 0 10 1 +2 104 6 13.2 0 10 1 +2 105 28 21.7 0 10 0 +2 106 170 24.4 0 10 0 +2 107 28 31.4 0 10 0 +2 108 6 26.3 0 10 1 +3 1 28 16.9 0 10 0 +3 2 1 21.3 0 10 1 +3 3 6 44 0 10 1 +3 4 170 49.8 0 10 1 +3 5 28 13.4 0 10 1 +3 6 28 21.1 0 10 1 +3 7 15 14.2 0 10 1 +3 8 6 26.3 0 10 1 +3 9 85 24.4 0 10 0 +3 10 170 41.3 0 10 1 +3 11 28 12 0 10 0 +3 12 15 39.6 0 10 1 +3 13 85 30.9 0 10 1 +3 14 28 31.4 0 10 1 +3 15 85 10.5 0 10 1 +3 16 1 10.7 0 10 1 +3 17 28 31.4 0 10 1 +3 18 6 26.3 0 10 1 +3 19 1 41.9 0 10 1 +3 20 6 13.2 0 10 0 +3 21 28 41.1 0 10 1 +3 22 15 12.5 0 10 1 +3 23 15 39.6 0 10 1 +3 24 85 10.5 0 10 0 +3 25 28 12 0 10 1 +3 26 170 38.3 0 10 1 +3 27 85 36.1 0 10 1 +3 28 1 41.9 0 10 1 +3 29 15 10.5 0 10 0 +3 30 85 19.5 0 10 0 +3 31 85 26 0 10 1 +3 32 85 45 0 10 1 +3 33 1 12.5 0 10 1 +3 34 6 13.2 0 10 1 +3 35 15 10.7 0 10 1 +3 36 1 11 0 10 1 +3 37 15 30.1 0 10 0 +3 38 15 20.7 0 10 1 +3 39 6 17.4 0 10 1 +3 40 6 10.5 0 10 0 +3 41 170 15 0 10 1 +3 42 15 12.5 0 10 1 +3 43 1 31.6 0 10 1 +3 44 15 10.5 0 10 0 +3 45 170 41.3 0 10 1 +3 46 170 15 0 10 0 +3 47 15 18.6 0 10 1 +3 48 6 17.4 0 10 1 +3 49 85 18.4 0 10 0 +3 50 170 43.3 0 10 1 +3 51 28 21.7 0 10 0 +3 52 6 16.7 0 10 0 +3 53 170 33.9 0 10 0 +3 54 1 18 0 10 1 +3 55 1 18 0 10 1 +3 56 15 30.1 0 10 1 +3 57 1 10.7 0 10 0 +3 58 85 27.3 0 10 1 +3 59 6 35.1 0 10 1 +3 60 85 30.9 0 10 1 +3 61 85 24.4 0 10 1 +3 62 85 19.5 0 10 0 +3 63 170 33.9 0 10 1 +3 64 6 10.5 0 10 0 +3 65 85 27.3 0 10 1 +3 66 28 16.9 0 10 0 +3 67 6 35.1 0 10 1 +3 68 15 49 0 10 1 +3 69 85 26 0 10 1 +3 70 85 45 0 10 1 +3 71 1 11.8 0 10 1 +3 72 170 35.6 0 10 1 +3 73 1 31.6 0 10 1 +3 74 28 13 0 10 0 +3 75 28 21.1 0 10 1 +3 76 15 20.7 0 10 1 +3 77 15 10.7 0 10 0 +3 78 28 15.5 0 10 0 +3 79 1 21.3 0 10 1 +3 80 6 14 0 10 1 +3 81 170 49.8 0 10 1 +3 82 85 36.1 0 10 1 +3 83 1 11.2 0 10 0 +3 84 28 15.5 0 10 0 +3 85 170 29.7 0 10 1 +3 86 170 24.4 0 10 1 +3 87 170 24.4 0 10 1 +3 88 28 13.4 0 10 0 +3 89 15 18.6 0 10 1 +3 90 28 21.7 0 10 1 +3 91 85 18.4 0 10 1 +3 92 6 16.7 0 10 1 +3 93 6 11 0 10 1 +3 94 28 41.1 0 10 1 +3 95 170 43.3 0 10 1 +3 96 6 44 0 10 1 +3 97 1 11.2 0 10 1 +3 98 6 11 0 10 1 +3 99 170 35.6 0 10 1 +3 100 15 49 0 10 1 +3 101 170 38.3 0 10 1 +3 102 28 13 0 10 0 +3 103 170 29.7 0 10 1 +3 104 1 12.5 0 10 1 +3 105 1 11 0 10 1 +3 106 1 11.8 0 10 1 +3 107 6 14 0 10 0 +3 108 15 14.2 0 10 1 +4 1 170 41.3 0 10 1 +4 2 170 38.3 0 10 1 +4 3 28 21.1 0 10 1 +4 4 15 20.7 0 10 1 +4 5 85 45 0 10 1 +4 6 85 45 0 10 1 +4 7 28 21.7 0 10 1 +4 8 1 11.2 0 10 1 +4 9 170 49.8 0 10 1 +4 10 6 14 0 10 0 +4 11 28 21.7 0 10 1 +4 12 1 11.2 0 10 1 +4 13 1 31.6 0 10 1 +4 14 6 10.5 0 10 1 +4 15 1 21.3 0 10 1 +4 16 170 43.3 0 10 1 +4 17 1 18 0 10 1 +4 18 15 10.5 0 10 0 +4 19 15 20.7 0 10 1 +4 20 15 39.6 0 10 1 +4 21 170 33.9 0 10 1 +4 22 1 21.3 0 10 1 +4 23 85 30.9 0 10 1 +4 24 15 18.6 0 10 1 +4 25 28 13.4 0 10 1 +4 26 170 15 0 10 1 +4 27 170 41.3 0 10 1 +4 28 85 27.3 0 10 0 +4 29 1 11.8 0 10 0 +4 30 85 24.4 0 10 1 +4 31 15 49 0 10 1 +4 32 6 17.4 0 10 1 +4 33 6 35.1 0 10 1 +4 34 170 15 0 10 0 +4 35 6 26.3 0 10 1 +4 36 170 35.6 0 10 0 +4 37 6 13.2 0 10 1 +4 38 28 15.5 0 10 1 +4 39 1 11 0 10 1 +4 40 15 12.5 0 10 1 +4 41 6 13.2 0 10 0 +4 42 1 10.7 0 10 1 +4 43 6 17.4 0 10 1 +4 44 85 10.5 0 10 0 +4 45 28 13.4 0 10 1 +4 46 1 41.9 0 10 1 +4 47 28 13 0 10 1 +4 48 28 16.9 0 10 0 +4 49 85 36.1 0 10 1 +4 50 15 18.6 0 10 1 +4 51 85 27.3 0 10 1 +4 52 15 49 0 10 1 +4 53 15 30.1 0 10 1 +4 54 170 29.7 0 10 1 +4 55 6 14 0 10 1 +4 56 28 41.1 0 10 1 +4 57 15 30.1 0 10 1 +4 58 15 12.5 0 10 1 +4 59 85 30.9 0 10 1 +4 60 28 21.1 0 10 1 +4 61 6 44 0 10 1 +4 62 28 16.9 0 10 1 +4 63 6 11 0 10 0 +4 64 170 38.3 0 10 1 +4 65 85 18.4 0 10 1 +4 66 85 19.5 0 10 1 +4 67 170 33.9 0 10 0 +4 68 170 35.6 0 10 1 +4 69 15 14.2 0 10 1 +4 70 28 13 0 10 0 +4 71 28 31.4 0 10 1 +4 72 1 11.8 0 10 0 +4 73 1 12.5 0 10 0 +4 74 28 31.4 0 10 1 +4 75 1 12.5 0 10 1 +4 76 28 41.1 0 10 1 +4 77 1 10.7 0 10 1 +4 78 170 24.4 0 10 1 +4 79 6 16.7 0 10 1 +4 80 170 24.4 0 10 1 +4 81 6 35.1 0 10 1 +4 82 1 11 0 10 0 +4 83 28 12 0 10 0 +4 84 15 10.5 0 10 0 +4 85 15 10.7 0 10 0 +4 86 28 12 0 10 1 +4 87 85 19.5 0 10 1 +4 88 6 16.7 0 10 1 +4 89 6 11 0 10 0 +4 90 15 39.6 0 10 1 +4 91 85 24.4 0 10 0 +4 92 6 26.3 0 10 1 +4 93 85 18.4 0 10 1 +4 94 15 14.2 0 10 0 +4 95 6 10.5 0 10 0 +4 96 1 41.9 0 10 1 +4 97 85 36.1 0 10 1 +4 98 85 26 0 10 1 +4 99 28 15.5 0 10 0 +4 100 1 31.6 0 10 1 +4 101 6 44 0 10 1 +4 102 85 26 0 10 1 +4 103 170 29.7 0 10 1 +4 104 170 43.3 0 10 1 +4 105 170 49.8 0 10 1 +4 106 85 10.5 0 10 0 +4 107 1 18 0 10 1 +4 108 15 10.7 0 10 1 +5 1 170 41.3 0 10 1 +5 2 85 18.4 0 10 1 +5 3 28 21.7 0 10 1 +5 4 85 10.5 0 10 0 +5 5 15 14.2 0 10 1 +5 6 28 21.7 0 10 1 +5 7 85 30.9 0 10 1 +5 8 85 26 0 10 1 +5 9 1 10.7 0 10 1 +5 10 28 13 0 10 0 +5 11 170 33.9 0 10 1 +5 12 85 36.1 0 10 0 +5 13 15 30.1 0 10 1 +5 14 1 31.6 0 10 1 +5 15 6 13.2 0 10 1 +5 16 1 11 0 10 1 +5 17 85 24.4 0 10 1 +5 18 1 41.9 0 10 1 +5 19 15 14.2 0 10 0 +5 20 15 20.7 0 10 1 +5 21 15 10.5 0 10 0 +5 22 6 10.5 0 10 1 +5 23 85 45 0 10 1 +5 24 28 16.9 0 10 1 +5 25 1 21.3 0 10 1 +5 26 6 14 0 10 1 +5 27 28 13.4 0 10 0 +5 28 6 17.4 0 10 1 +5 29 170 33.9 0 10 0 +5 30 15 18.6 0 10 1 +5 31 85 45 0 10 1 +5 32 28 13.4 0 10 0 +5 33 15 10.5 0 10 0 +5 34 15 49 0 10 1 +5 35 170 43.3 0 10 1 +5 36 15 39.6 0 10 1 +5 37 85 18.4 0 10 0 +5 38 170 49.8 0 10 1 +5 39 15 10.7 0 10 0 +5 40 170 24.4 0 10 0 +5 41 15 39.6 0 10 1 +5 42 28 41.1 0 10 1 +5 43 85 27.3 0 10 1 +5 44 1 18 0 10 1 +5 45 1 12.5 0 10 1 +5 46 1 11.8 0 10 0 +5 47 28 15.5 0 10 0 +5 48 170 15 0 10 0 +5 49 28 21.1 0 10 1 +5 50 6 11 0 10 0 +5 51 28 31.4 0 10 1 +5 52 1 31.6 0 10 1 +5 53 15 20.7 0 10 1 +5 54 28 31.4 0 10 1 +5 55 1 11.2 0 10 1 +5 56 6 11 0 10 1 +5 57 6 10.5 0 10 1 +5 58 15 10.7 0 10 1 +5 59 28 13 0 10 0 +5 60 85 26 0 10 1 +5 61 6 35.1 0 10 1 +5 62 170 35.6 0 10 1 +5 63 85 27.3 0 10 1 +5 64 85 30.9 0 10 1 +5 65 1 41.9 0 10 1 +5 66 170 35.6 0 10 1 +5 67 28 15.5 0 10 1 +5 68 1 11.2 0 10 1 +5 69 170 49.8 0 10 1 +5 70 15 12.5 0 10 0 +5 71 85 19.5 0 10 1 +5 72 6 16.7 0 10 1 +5 73 1 10.7 0 10 1 +5 74 6 44 0 10 1 +5 75 170 29.7 0 10 1 +5 76 6 17.4 0 10 1 +5 77 1 21.3 0 10 1 +5 78 170 38.3 0 10 0 +5 79 170 24.4 0 10 0 +5 80 6 35.1 0 10 1 +5 81 1 12.5 0 10 1 +5 82 1 11.8 0 10 1 +5 83 28 12 0 10 1 +5 84 28 12 0 10 1 +5 85 85 36.1 0 10 1 +5 86 170 29.7 0 10 1 +5 87 170 43.3 0 10 1 +5 88 1 11 0 10 0 +5 89 85 24.4 0 10 0 +5 90 15 30.1 0 10 1 +5 91 6 14 0 10 0 +5 92 170 38.3 0 10 1 +5 93 6 44 0 10 1 +5 94 6 16.7 0 10 1 +5 95 6 26.3 0 10 1 +5 96 28 16.9 0 10 0 +5 97 85 10.5 0 10 0 +5 98 15 18.6 0 10 1 +5 99 28 21.1 0 10 1 +5 100 170 15 0 10 0 +5 101 15 49 0 10 1 +5 102 170 41.3 0 10 1 +5 103 6 13.2 0 10 1 +5 104 85 19.5 0 10 1 +5 105 6 26.3 0 10 1 +5 106 28 41.1 0 10 1 +5 107 1 18 0 10 1 +5 108 15 12.5 0 10 0 +6 1 15 18.6 0 10 1 +6 2 1 10.7 0 10 1 +6 3 1 11.2 0 10 1 +6 4 15 18.6 0 10 1 +6 5 28 16.9 0 10 1 +6 6 85 27.3 0 10 1 +6 7 28 13 0 10 1 +6 8 15 10.7 0 10 0 +6 9 170 33.9 0 10 1 +6 10 15 14.2 0 10 1 +6 11 15 10.5 0 10 1 +6 12 170 33.9 0 10 1 +6 13 15 39.6 0 10 1 +6 14 1 11.8 0 10 1 +6 15 15 10.7 0 10 0 +6 16 28 21.1 0 10 1 +6 17 85 18.4 0 10 0 +6 18 1 18 0 10 1 +6 19 1 11 0 10 1 +6 20 15 12.5 0 10 1 +6 21 170 38.3 0 10 1 +6 22 1 11 0 10 0 +6 23 6 16.7 0 10 0 +6 24 28 16.9 0 10 1 +6 25 6 17.4 0 10 1 +6 26 1 12.5 0 10 1 +6 27 85 18.4 0 10 0 +6 28 28 31.4 0 10 1 +6 29 6 26.3 0 10 1 +6 30 85 45 0 10 1 +6 31 85 24.4 0 10 1 +6 32 6 16.7 0 10 1 +6 33 85 10.5 0 10 0 +6 34 6 44 0 10 1 +6 35 1 12.5 0 10 1 +6 36 170 15 0 10 0 +6 37 170 15 0 10 0 +6 38 15 39.6 0 10 1 +6 39 85 19.5 0 10 1 +6 40 15 10.5 0 10 1 +6 41 85 27.3 0 10 1 +6 42 170 29.7 0 10 1 +6 43 170 24.4 0 10 1 +6 44 15 14.2 0 10 0 +6 45 6 11 0 10 1 +6 46 1 41.9 0 10 1 +6 47 1 31.6 0 10 1 +6 48 28 13.4 0 10 1 +6 49 15 30.1 0 10 1 +6 50 28 41.1 0 10 1 +6 51 28 13 0 10 1 +6 52 85 19.5 0 10 1 +6 53 170 43.3 0 10 1 +6 54 28 41.1 0 10 1 +6 55 6 17.4 0 10 1 +6 56 15 20.7 0 10 1 +6 57 15 30.1 0 10 1 +6 58 170 49.8 0 10 1 +6 59 85 36.1 0 10 1 +6 60 85 30.9 0 10 1 +6 61 170 35.6 0 10 1 +6 62 15 20.7 0 10 1 +6 63 1 11.2 0 10 0 +6 64 170 24.4 0 10 1 +6 65 28 21.7 0 10 1 +6 66 1 10.7 0 10 1 +6 67 85 45 0 10 1 +6 68 6 10.5 0 10 1 +6 69 15 12.5 0 10 1 +6 70 28 31.4 0 10 1 +6 71 170 38.3 0 10 1 +6 72 1 18 0 10 1 +6 73 1 21.3 0 10 1 +6 74 6 35.1 0 10 1 +6 75 28 13.4 0 10 0 +6 76 85 10.5 0 10 0 +6 77 28 12 0 10 1 +6 78 6 10.5 0 10 1 +6 79 1 11.8 0 10 1 +6 80 6 13.2 0 10 1 +6 81 1 41.9 0 10 1 +6 82 85 36.1 0 10 1 +6 83 28 15.5 0 10 1 +6 84 85 30.9 0 10 1 +6 85 170 43.3 0 10 1 +6 86 85 26 0 10 1 +6 87 28 21.1 0 10 1 +6 88 28 15.5 0 10 0 +6 89 6 11 0 10 1 +6 90 1 31.6 0 10 1 +6 91 170 49.8 0 10 1 +6 92 1 21.3 0 10 1 +6 93 28 21.7 0 10 1 +6 94 170 41.3 0 10 1 +6 95 15 49 0 10 1 +6 96 6 35.1 0 10 1 +6 97 15 49 0 10 1 +6 98 6 26.3 0 10 1 +6 99 28 12 0 10 1 +6 100 6 14 0 10 1 +6 101 6 44 0 10 1 +6 102 170 29.7 0 10 1 +6 103 6 14 0 10 1 +6 104 170 35.6 0 10 1 +6 105 85 26 0 10 1 +6 106 6 13.2 0 10 1 +6 107 170 41.3 0 10 1 +6 108 85 24.4 0 10 1 +7 1 28 13 0 10 1 +7 2 28 41.1 0 10 1 +7 3 170 29.7 0 10 0 +7 4 1 10.7 0 10 1 +7 5 6 17.4 0 10 1 +7 6 15 12.5 0 10 1 +7 7 15 18.6 0 10 1 +7 8 170 24.4 0 10 0 +7 9 1 11 0 10 1 +7 10 28 16.9 0 10 1 +7 11 170 41.3 0 10 1 +7 12 15 10.5 0 10 0 +7 13 6 10.5 0 10 1 +7 14 28 12 0 10 1 +7 15 170 24.4 0 10 1 +7 16 1 10.7 0 10 0 +7 17 6 35.1 0 10 1 +7 18 85 19.5 0 10 0 +7 19 6 26.3 0 10 1 +7 20 85 26 0 10 1 +7 21 1 11.2 0 10 1 +7 22 6 16.7 0 10 1 +7 23 28 31.4 0 10 1 +7 24 170 35.6 0 10 0 +7 25 1 21.3 0 10 1 +7 26 15 20.7 0 10 1 +7 27 15 14.2 0 10 1 +7 28 85 24.4 0 10 1 +7 29 1 11 0 10 1 +7 30 85 27.3 0 10 1 +7 31 15 18.6 0 10 1 +7 32 6 16.7 0 10 1 +7 33 28 21.1 0 10 1 +7 34 15 39.6 0 10 1 +7 35 28 31.4 0 10 1 +7 36 1 11.8 0 10 1 +7 37 170 38.3 0 10 1 +7 38 1 12.5 0 10 1 +7 39 1 11.8 0 10 1 +7 40 28 21.7 0 10 1 +7 41 28 21.1 0 10 1 +7 42 170 33.9 0 10 0 +7 43 6 14 0 10 1 +7 44 15 12.5 0 10 1 +7 45 15 10.7 0 10 1 +7 46 1 41.9 0 10 1 +7 47 1 18 0 10 1 +7 48 15 14.2 0 10 1 +7 49 6 11 0 10 0 +7 50 85 30.9 0 10 1 +7 51 170 49.8 0 10 1 +7 52 6 44 0 10 1 +7 53 85 45 0 10 1 +7 54 170 49.8 0 10 1 +7 55 85 10.5 0 10 0 +7 56 15 49 0 10 1 +7 57 170 15 0 10 0 +7 58 6 13.2 0 10 1 +7 59 170 35.6 0 10 1 +7 60 170 29.7 0 10 0 +7 61 170 15 0 10 0 +7 62 28 15.5 0 10 1 +7 63 28 21.7 0 10 1 +7 64 85 45 0 10 1 +7 65 28 13.4 0 10 0 +7 66 6 44 0 10 1 +7 67 6 10.5 0 10 1 +7 68 85 36.1 0 10 1 +7 69 6 14 0 10 1 +7 70 170 43.3 0 10 1 +7 71 28 12 0 10 0 +7 72 85 24.4 0 10 1 +7 73 85 18.4 0 10 0 +7 74 15 10.7 0 10 0 +7 75 6 35.1 0 10 1 +7 76 15 49 0 10 1 +7 77 85 19.5 0 10 0 +7 78 1 12.5 0 10 1 +7 79 1 18 0 10 1 +7 80 28 13 0 10 0 +7 81 6 17.4 0 10 1 +7 82 1 21.3 0 10 1 +7 83 15 30.1 0 10 1 +7 84 85 26 0 10 0 +7 85 85 30.9 0 10 1 +7 86 170 33.9 0 10 0 +7 87 15 39.6 0 10 1 +7 88 1 41.9 0 10 1 +7 89 170 43.3 0 10 1 +7 90 28 16.9 0 10 0 +7 91 85 10.5 0 10 0 +7 92 1 31.6 0 10 1 +7 93 6 26.3 0 10 1 +7 94 15 30.1 0 10 1 +7 95 1 31.6 0 10 1 +7 96 6 13.2 0 10 1 +7 97 170 38.3 0 10 1 +7 98 85 36.1 0 10 1 +7 99 170 41.3 0 10 1 +7 100 28 13.4 0 10 1 +7 101 28 15.5 0 10 0 +7 102 15 10.5 0 10 0 +7 103 6 11 0 10 0 +7 104 15 20.7 0 10 1 +7 105 85 27.3 0 10 0 +7 106 28 41.1 0 10 1 +7 107 85 18.4 0 10 1 +7 108 1 11.2 0 10 0 +8 1 85 19.5 0 10 0 +8 2 85 19.5 0 10 0 +8 3 28 21.1 0 10 0 +8 4 1 11.2 0 10 0 +8 5 170 33.9 0 10 0 +8 6 85 18.4 0 10 1 +8 7 15 20.7 0 10 1 +8 8 1 21.3 0 10 1 +8 9 15 14.2 0 10 0 +8 10 85 30.9 0 10 0 +8 11 1 11 0 10 1 +8 12 170 49.8 0 10 1 +8 13 1 41.9 0 10 1 +8 14 6 44 0 10 1 +8 15 170 38.3 0 10 1 +8 16 28 12 0 10 0 +8 17 6 10.5 0 10 0 +8 18 28 13 0 10 0 +8 19 6 14 0 10 1 +8 20 170 43.3 0 10 0 +8 21 6 17.4 0 10 1 +8 22 1 18 0 10 1 +8 23 85 36.1 0 10 0 +8 24 15 10.5 0 10 0 +8 25 85 24.4 0 10 1 +8 26 170 29.7 0 10 0 +8 27 6 14 0 10 1 +8 28 15 12.5 0 10 0 +8 29 28 15.5 0 10 0 +8 30 85 45 0 10 1 +8 31 28 13.4 0 10 0 +8 32 6 16.7 0 10 1 +8 33 170 49.8 0 10 0 +8 34 6 17.4 0 10 1 +8 35 85 26 0 10 1 +8 36 1 10.7 0 10 0 +8 37 6 11 0 10 1 +8 38 1 11.8 0 10 1 +8 39 1 12.5 0 10 0 +8 40 85 26 0 10 0 +8 41 15 10.7 0 10 0 +8 42 170 35.6 0 10 1 +8 43 85 27.3 0 10 1 +8 44 170 43.3 0 10 0 +8 45 28 13.4 0 10 0 +8 46 28 12 0 10 0 +8 47 1 31.6 0 10 1 +8 48 6 13.2 0 10 1 +8 49 85 36.1 0 10 1 +8 50 28 21.7 0 10 1 +8 51 15 18.6 0 10 0 +8 52 85 27.3 0 10 0 +8 53 6 26.3 0 10 1 +8 54 1 41.9 0 10 1 +8 55 15 30.1 0 10 1 +8 56 1 10.7 0 10 0 +8 57 170 15 0 10 0 +8 58 6 10.5 0 10 0 +8 59 28 31.4 0 10 1 +8 60 28 41.1 0 10 1 +8 61 170 29.7 0 10 0 +8 62 1 11.8 0 10 0 +8 63 15 18.6 0 10 0 +8 64 1 11 0 10 0 +8 65 170 41.3 0 10 1 +8 66 15 39.6 0 10 1 +8 67 28 31.4 0 10 0 +8 68 6 16.7 0 10 1 +8 69 15 49 0 10 1 +8 70 85 45 0 10 1 +8 71 170 24.4 0 10 1 +8 72 85 24.4 0 10 1 +8 73 1 18 0 10 1 +8 74 85 10.5 0 10 0 +8 75 28 21.7 0 10 1 +8 76 28 16.9 0 10 0 +8 77 6 44 0 10 1 +8 78 170 33.9 0 10 1 +8 79 6 11 0 10 1 +8 80 28 13 0 10 1 +8 81 28 41.1 0 10 1 +8 82 6 13.2 0 10 1 +8 83 28 15.5 0 10 0 +8 84 15 49 0 10 1 +8 85 15 14.2 0 10 1 +8 86 170 41.3 0 10 1 +8 87 15 12.5 0 10 0 +8 88 85 18.4 0 10 1 +8 89 1 12.5 0 10 1 +8 90 15 20.7 0 10 0 +8 91 6 26.3 0 10 1 +8 92 170 24.4 0 10 0 +8 93 28 21.1 0 10 1 +8 94 15 10.5 0 10 0 +8 95 6 35.1 0 10 1 +8 96 85 30.9 0 10 1 +8 97 1 21.3 0 10 1 +8 98 15 39.6 0 10 1 +8 99 170 35.6 0 10 1 +8 100 15 10.7 0 10 1 +8 101 85 10.5 0 10 0 +8 102 28 16.9 0 10 0 +8 103 170 15 0 10 0 +8 104 170 38.3 0 10 0 +8 105 6 35.1 0 10 1 +8 106 1 31.6 0 10 1 +8 107 15 30.1 0 10 1 +8 108 1 11.2 0 10 1 +9 1 1 11.2 0 10 1 +9 2 6 10.5 0 10 0 +9 3 28 31.4 0 10 1 +9 4 15 49 0 10 1 +9 5 15 12.5 0 10 1 +9 6 170 33.9 0 10 1 +9 7 170 35.6 0 10 0 +9 8 6 17.4 0 10 1 +9 9 1 21.3 0 10 1 +9 10 1 10.7 0 10 0 +9 11 1 11.8 0 10 1 +9 12 1 31.6 0 10 1 +9 13 6 16.7 0 10 0 +9 14 1 10.7 0 10 1 +9 15 170 15 0 10 0 +9 16 170 43.3 0 10 1 +9 17 85 27.3 0 10 0 +9 18 28 21.7 0 10 1 +9 19 1 11 0 10 0 +9 20 1 11.8 0 10 1 +9 21 1 12.5 0 10 1 +9 22 6 16.7 0 10 1 +9 23 170 35.6 0 10 1 +9 24 6 11 0 10 1 +9 25 85 30.9 0 10 0 +9 26 28 13 0 10 0 +9 27 28 41.1 0 10 1 +9 28 85 10.5 0 10 0 +9 29 1 11.2 0 10 1 +9 30 85 36.1 0 10 1 +9 31 1 12.5 0 10 1 +9 32 6 26.3 0 10 1 +9 33 170 33.9 0 10 1 +9 34 170 43.3 0 10 0 +9 35 85 10.5 0 10 0 +9 36 170 49.8 0 10 0 +9 37 15 18.6 0 10 1 +9 38 6 14 0 10 1 +9 39 6 11 0 10 0 +9 40 15 39.6 0 10 1 +9 41 85 19.5 0 10 0 +9 42 15 10.7 0 10 0 +9 43 85 36.1 0 10 1 +9 44 1 18 0 10 0 +9 45 170 49.8 0 10 1 +9 46 15 20.7 0 10 1 +9 47 1 11 0 10 1 +9 48 28 13.4 0 10 1 +9 49 15 20.7 0 10 1 +9 50 1 18 0 10 1 +9 51 85 18.4 0 10 1 +9 52 85 18.4 0 10 0 +9 53 85 26 0 10 1 +9 54 28 31.4 0 10 1 +9 55 6 44 0 10 1 +9 56 6 13.2 0 10 0 +9 57 6 10.5 0 10 0 +9 58 28 12 0 10 0 +9 59 15 10.5 0 10 0 +9 60 6 17.4 0 10 1 +9 61 170 24.4 0 10 0 +9 62 15 30.1 0 10 1 +9 63 6 35.1 0 10 1 +9 64 15 10.7 0 10 1 +9 65 15 14.2 0 10 1 +9 66 170 41.3 0 10 1 +9 67 28 21.1 0 10 1 +9 68 6 26.3 0 10 1 +9 69 15 14.2 0 10 1 +9 70 85 24.4 0 10 0 +9 71 85 27.3 0 10 0 +9 72 28 13.4 0 10 1 +9 73 170 29.7 0 10 0 +9 74 28 15.5 0 10 0 +9 75 85 45 0 10 1 +9 76 170 38.3 0 10 0 +9 77 28 16.9 0 10 1 +9 78 6 35.1 0 10 1 +9 79 85 19.5 0 10 0 +9 80 15 18.6 0 10 1 +9 81 15 12.5 0 10 1 +9 82 85 30.9 0 10 0 +9 83 28 12 0 10 1 +9 84 28 21.7 0 10 1 +9 85 28 13 0 10 0 +9 86 1 41.9 0 10 1 +9 87 15 39.6 0 10 1 +9 88 6 13.2 0 10 0 +9 89 1 21.3 0 10 1 +9 90 170 15 0 10 0 +9 91 15 30.1 0 10 1 +9 92 85 26 0 10 0 +9 93 15 49 0 10 1 +9 94 85 45 0 10 1 +9 95 6 14 0 10 0 +9 96 170 38.3 0 10 1 +9 97 170 29.7 0 10 0 +9 98 28 16.9 0 10 0 +9 99 6 44 0 10 1 +9 100 1 31.6 0 10 1 +9 101 15 10.5 0 10 0 +9 102 28 41.1 0 10 1 +9 103 85 24.4 0 10 0 +9 104 28 15.5 0 10 0 +9 105 28 21.1 0 10 1 +9 106 1 41.9 0 10 1 +9 107 170 41.3 0 10 1 +9 108 170 24.4 0 10 0 +10 1 170 41.3 0 10 0 +10 2 6 10.5 0 10 0 +10 3 170 15 0 10 0 +10 4 85 27.3 0 10 0 +10 5 170 15 0 10 0 +10 6 28 13 0 10 0 +10 7 6 35.1 0 10 1 +10 8 15 14.2 0 10 1 +10 9 85 19.5 0 10 0 +10 10 170 43.3 0 10 1 +10 11 85 45 0 10 1 +10 12 1 41.9 0 10 1 +10 13 15 30.1 0 10 1 +10 14 85 26 0 10 1 +10 15 28 12 0 10 1 +10 16 1 11.8 0 10 1 +10 17 15 10.7 0 10 0 +10 18 6 44 0 10 1 +10 19 1 18 0 10 1 +10 20 28 12 0 10 0 +10 21 15 20.7 0 10 1 +10 22 28 41.1 0 10 1 +10 23 15 39.6 0 10 1 +10 24 85 26 0 10 0 +10 25 6 26.3 0 10 1 +10 26 6 35.1 0 10 1 +10 27 6 26.3 0 10 1 +10 28 15 10.5 0 10 0 +10 29 1 31.6 0 10 1 +10 30 170 41.3 0 10 0 +10 31 6 10.5 0 10 1 +10 32 1 11.2 0 10 0 +10 33 170 29.7 0 10 0 +10 34 85 45 0 10 0 +10 35 15 12.5 0 10 0 +10 36 170 38.3 0 10 0 +10 37 85 19.5 0 10 0 +10 38 28 13.4 0 10 0 +10 39 28 13.4 0 10 0 +10 40 15 30.1 0 10 1 +10 41 28 41.1 0 10 1 +10 42 15 10.5 0 10 0 +10 43 170 33.9 0 10 0 +10 44 6 14 0 10 1 +10 45 170 35.6 0 10 1 +10 46 85 10.5 0 10 0 +10 47 85 30.9 0 10 1 +10 48 28 15.5 0 10 0 +10 49 15 39.6 0 10 1 +10 50 6 13.2 0 10 1 +10 51 1 10.7 0 10 1 +10 52 15 14.2 0 10 1 +10 53 6 11 0 10 0 +10 54 6 17.4 0 10 1 +10 55 170 24.4 0 10 1 +10 56 85 18.4 0 10 1 +10 57 28 31.4 0 10 1 +10 58 28 21.7 0 10 1 +10 59 15 18.6 0 10 1 +10 60 85 10.5 0 10 0 +10 61 6 16.7 0 10 1 +10 62 85 18.4 0 10 0 +10 63 6 44 0 10 1 +10 64 1 18 0 10 1 +10 65 28 16.9 0 10 0 +10 66 15 10.7 0 10 0 +10 67 1 10.7 0 10 1 +10 68 15 49 0 10 1 +10 69 170 38.3 0 10 1 +10 70 28 15.5 0 10 0 +10 71 28 31.4 0 10 1 +10 72 6 14 0 10 1 +10 73 170 35.6 0 10 0 +10 74 1 12.5 0 10 1 +10 75 15 18.6 0 10 0 +10 76 1 31.6 0 10 1 +10 77 28 16.9 0 10 1 +10 78 1 21.3 0 10 1 +10 79 15 12.5 0 10 0 +10 80 170 49.8 0 10 0 +10 81 85 27.3 0 10 0 +10 82 6 16.7 0 10 1 +10 83 85 36.1 0 10 0 +10 84 85 36.1 0 10 1 +10 85 6 17.4 0 10 1 +10 86 1 11 0 10 0 +10 87 6 13.2 0 10 0 +10 88 170 29.7 0 10 0 +10 89 1 11.2 0 10 0 +10 90 1 41.9 0 10 1 +10 91 170 33.9 0 10 0 +10 92 1 11.8 0 10 0 +10 93 15 49 0 10 1 +10 94 1 21.3 0 10 0 +10 95 85 30.9 0 10 0 +10 96 15 20.7 0 10 1 +10 97 28 21.1 0 10 0 +10 98 170 24.4 0 10 0 +10 99 85 24.4 0 10 0 +10 100 85 24.4 0 10 0 +10 101 28 21.1 0 10 0 +10 102 28 21.7 0 10 1 +10 103 170 49.8 0 10 1 +10 104 6 11 0 10 1 +10 105 1 12.5 0 10 1 +10 106 28 13 0 10 0 +10 107 170 43.3 0 10 0 +10 108 1 11 0 10 0 +11 1 6 10.5 0 10 0 +11 2 85 36.1 0 10 1 +11 3 85 27.3 0 10 0 +11 4 6 16.7 0 10 1 +11 5 1 31.6 0 10 1 +11 6 170 33.9 0 10 0 +11 7 15 10.5 0 10 0 +11 8 170 35.6 0 10 0 +11 9 15 10.7 0 10 0 +11 10 15 10.7 0 10 1 +11 11 170 15 0 10 0 +11 12 85 26 0 10 0 +11 13 28 21.1 0 10 1 +11 14 170 24.4 0 10 0 +11 15 28 13 0 10 0 +11 16 15 12.5 0 10 1 +11 17 85 19.5 0 10 0 +11 18 85 26 0 10 0 +11 19 6 11 0 10 0 +11 20 6 13.2 0 10 0 +11 21 28 15.5 0 10 0 +11 22 170 41.3 0 10 0 +11 23 6 14 0 10 1 +11 24 1 21.3 0 10 1 +11 25 85 18.4 0 10 1 +11 26 28 12 0 10 1 +11 27 15 49 0 10 1 +11 28 85 45 0 10 1 +11 29 170 41.3 0 10 0 +11 30 170 33.9 0 10 0 +11 31 28 21.7 0 10 1 +11 32 15 18.6 0 10 1 +11 33 1 12.5 0 10 0 +11 34 1 10.7 0 10 1 +11 35 28 21.1 0 10 0 +11 36 170 35.6 0 10 0 +11 37 1 11.2 0 10 1 +11 38 85 19.5 0 10 1 +11 39 1 41.9 0 10 1 +11 40 28 16.9 0 10 0 +11 41 15 30.1 0 10 1 +11 42 15 20.7 0 10 0 +11 43 15 14.2 0 10 1 +11 44 28 13 0 10 1 +11 45 15 12.5 0 10 1 +11 46 170 43.3 0 10 1 +11 47 170 49.8 0 10 1 +11 48 6 10.5 0 10 1 +11 49 15 30.1 0 10 1 +11 50 28 41.1 0 10 1 +11 51 28 41.1 0 10 1 +11 52 6 26.3 0 10 1 +11 53 85 10.5 0 10 0 +11 54 6 26.3 0 10 1 +11 55 6 44 0 10 1 +11 56 85 30.9 0 10 1 +11 57 85 24.4 0 10 0 +11 58 15 39.6 0 10 1 +11 59 1 41.9 0 10 1 +11 60 170 49.8 0 10 0 +11 61 28 31.4 0 10 1 +11 62 28 15.5 0 10 1 +11 63 28 12 0 10 0 +11 64 6 35.1 0 10 1 +11 65 85 24.4 0 10 0 +11 66 15 49 0 10 1 +11 67 15 39.6 0 10 1 +11 68 1 31.6 0 10 1 +11 69 85 36.1 0 10 0 +11 70 15 14.2 0 10 1 +11 71 28 16.9 0 10 0 +11 72 6 35.1 0 10 1 +11 73 170 15 0 10 0 +11 74 1 12.5 0 10 1 +11 75 15 20.7 0 10 0 +11 76 170 24.4 0 10 0 +11 77 85 18.4 0 10 0 +11 78 6 17.4 0 10 1 +11 79 28 31.4 0 10 1 +11 80 1 10.7 0 10 1 +11 81 6 11 0 10 1 +11 82 1 11.8 0 10 0 +11 83 170 43.3 0 10 1 +11 84 1 18 0 10 1 +11 85 1 11.8 0 10 1 +11 86 6 14 0 10 1 +11 87 85 10.5 0 10 0 +11 88 85 30.9 0 10 0 +11 89 85 27.3 0 10 0 +11 90 28 13.4 0 10 0 +11 91 6 17.4 0 10 1 +11 92 170 38.3 0 10 0 +11 93 6 16.7 0 10 1 +11 94 170 38.3 0 10 0 +11 95 1 18 0 10 1 +11 96 1 11 0 10 1 +11 97 170 29.7 0 10 0 +11 98 170 29.7 0 10 0 +11 99 15 18.6 0 10 1 +11 100 15 10.5 0 10 0 +11 101 1 21.3 0 10 1 +11 102 1 11.2 0 10 0 +11 103 28 13.4 0 10 0 +11 104 85 45 0 10 1 +11 105 28 21.7 0 10 1 +11 106 1 11 0 10 0 +11 107 6 13.2 0 10 1 +11 108 6 44 0 10 1 +12 1 1 11.2 0 10 0 +12 2 15 20.7 0 10 1 +12 3 6 10.5 0 10 0 +12 4 6 35.1 0 10 1 +12 5 28 13 0 10 0 +12 6 1 21.3 0 10 1 +12 7 170 35.6 0 10 0 +12 8 1 11 0 10 0 +12 9 1 31.6 0 10 1 +12 10 85 10.5 0 10 0 +12 11 28 13 0 10 0 +12 12 170 43.3 0 10 0 +12 13 170 29.7 0 10 0 +12 14 85 24.4 0 10 0 +12 15 85 27.3 0 10 1 +12 16 85 27.3 0 10 1 +12 17 28 16.9 0 10 1 +12 18 170 41.3 0 10 0 +12 19 28 13.4 0 10 0 +12 20 170 38.3 0 10 0 +12 21 170 43.3 0 10 1 +12 22 15 12.5 0 10 0 +12 23 15 10.7 0 10 0 +12 24 85 45 0 10 1 +12 25 170 15 0 10 0 +12 26 28 12 0 10 1 +12 27 1 41.9 0 10 1 +12 28 15 39.6 0 10 1 +12 29 6 11 0 10 1 +12 30 170 29.7 0 10 0 +12 31 170 49.8 0 10 1 +12 32 15 10.7 0 10 1 +12 33 85 10.5 0 10 1 +12 34 170 15 0 10 0 +12 35 170 41.3 0 10 1 +12 36 6 16.7 0 10 1 +12 37 15 18.6 0 10 1 +12 38 15 14.2 0 10 1 +12 39 6 35.1 0 10 1 +12 40 6 13.2 0 10 1 +12 41 1 12.5 0 10 1 +12 42 6 17.4 0 10 1 +12 43 1 18 0 10 1 +12 44 1 21.3 0 10 1 +12 45 1 11.2 0 10 0 +12 46 1 12.5 0 10 1 +12 47 1 41.9 0 10 1 +12 48 15 30.1 0 10 1 +12 49 6 17.4 0 10 1 +12 50 15 10.5 0 10 0 +12 51 15 14.2 0 10 0 +12 52 28 41.1 0 10 1 +12 53 85 45 0 10 1 +12 54 15 39.6 0 10 1 +12 55 28 15.5 0 10 0 +12 56 85 30.9 0 10 0 +12 57 85 36.1 0 10 1 +12 58 170 35.6 0 10 0 +12 59 6 16.7 0 10 1 +12 60 6 13.2 0 10 0 +12 61 85 30.9 0 10 1 +12 62 15 10.5 0 10 0 +12 63 28 12 0 10 0 +12 64 1 11 0 10 1 +12 65 15 18.6 0 10 1 +12 66 6 10.5 0 10 1 +12 67 6 11 0 10 0 +12 68 15 20.7 0 10 1 +12 69 28 13.4 0 10 1 +12 70 1 10.7 0 10 1 +12 71 6 44 0 10 1 +12 72 170 38.3 0 10 0 +12 73 28 31.4 0 10 1 +12 74 15 12.5 0 10 1 +12 75 170 33.9 0 10 0 +12 76 15 49 0 10 1 +12 77 85 26 0 10 0 +12 78 85 18.4 0 10 0 +12 79 1 11.8 0 10 0 +12 80 85 18.4 0 10 0 +12 81 85 24.4 0 10 1 +12 82 170 49.8 0 10 0 +12 83 28 21.7 0 10 1 +12 84 28 16.9 0 10 1 +12 85 1 18 0 10 0 +12 86 6 26.3 0 10 0 +12 87 28 21.7 0 10 1 +12 88 6 26.3 0 10 1 +12 89 6 44 0 10 1 +12 90 28 21.1 0 10 1 +12 91 85 36.1 0 10 1 +12 92 85 26 0 10 0 +12 93 28 41.1 0 10 1 +12 94 28 21.1 0 10 1 +12 95 28 31.4 0 10 1 +12 96 1 10.7 0 10 0 +12 97 15 30.1 0 10 1 +12 98 1 31.6 0 10 1 +12 99 85 19.5 0 10 0 +12 100 170 24.4 0 10 0 +12 101 15 49 0 10 1 +12 102 6 14 0 10 1 +12 103 85 19.5 0 10 1 +12 104 28 15.5 0 10 0 +12 105 170 24.4 0 10 0 +12 106 1 11.8 0 10 0 +12 107 6 14 0 10 1 +12 108 170 33.9 0 10 0 +13 1 170 41.3 0 10 0 +13 2 15 10.5 0 10 0 +13 3 170 15 0 10 0 +13 4 15 12.5 0 10 1 +13 5 85 45 0 10 1 +13 6 6 44 0 10 1 +13 7 1 11.2 0 10 1 +13 8 170 29.7 0 10 0 +13 9 85 27.3 0 10 1 +13 10 1 12.5 0 10 1 +13 11 15 20.7 0 10 1 +13 12 1 18 0 10 1 +13 13 6 16.7 0 10 1 +13 14 28 12 0 10 0 +13 15 6 35.1 0 10 1 +13 16 15 39.6 0 10 1 +13 17 28 41.1 0 10 1 +13 18 15 18.6 0 10 1 +13 19 1 11.2 0 10 1 +13 20 85 36.1 0 10 0 +13 21 15 10.5 0 10 0 +13 22 170 41.3 0 10 1 +13 23 28 16.9 0 10 1 +13 24 85 26 0 10 0 +13 25 28 16.9 0 10 1 +13 26 6 35.1 0 10 1 +13 27 85 24.4 0 10 1 +13 28 85 45 0 10 1 +13 29 1 11.8 0 10 1 +13 30 170 49.8 0 10 1 +13 31 170 33.9 0 10 0 +13 32 28 13.4 0 10 1 +13 33 1 41.9 0 10 1 +13 34 6 26.3 0 10 1 +13 35 170 35.6 0 10 1 +13 36 6 13.2 0 10 1 +13 37 170 29.7 0 10 0 +13 38 1 11.8 0 10 0 +13 39 85 27.3 0 10 1 +13 40 28 21.7 0 10 1 +13 41 6 14 0 10 0 +13 42 1 11 0 10 1 +13 43 6 14 0 10 1 +13 44 170 43.3 0 10 1 +13 45 15 10.7 0 10 1 +13 46 170 24.4 0 10 0 +13 47 28 21.1 0 10 1 +13 48 6 11 0 10 1 +13 49 15 39.6 0 10 1 +13 50 6 13.2 0 10 1 +13 51 15 10.7 0 10 0 +13 52 85 10.5 0 10 0 +13 53 85 18.4 0 10 1 +13 54 1 12.5 0 10 1 +13 55 15 30.1 0 10 1 +13 56 85 24.4 0 10 0 +13 57 28 12 0 10 0 +13 58 15 49 0 10 1 +13 59 28 41.1 0 10 1 +13 60 170 15 0 10 0 +13 61 85 26 0 10 1 +13 62 15 18.6 0 10 1 +13 63 28 13 0 10 0 +13 64 28 15.5 0 10 0 +13 65 28 31.4 0 10 1 +13 66 85 30.9 0 10 1 +13 67 28 13.4 0 10 0 +13 68 85 10.5 0 10 0 +13 69 1 18 0 10 1 +13 70 28 31.4 0 10 1 +13 71 170 33.9 0 10 0 +13 72 1 31.6 0 10 1 +13 73 28 21.1 0 10 1 +13 74 6 17.4 0 10 1 +13 75 1 21.3 0 10 1 +13 76 6 44 0 10 1 +13 77 85 36.1 0 10 1 +13 78 170 38.3 0 10 0 +13 79 85 30.9 0 10 1 +13 80 170 24.4 0 10 0 +13 81 15 14.2 0 10 0 +13 82 85 19.5 0 10 0 +13 83 85 19.5 0 10 1 +13 84 1 11 0 10 0 +13 85 170 49.8 0 10 1 +13 86 1 41.9 0 10 1 +13 87 6 11 0 10 0 +13 88 28 13 0 10 1 +13 89 15 14.2 0 10 1 +13 90 15 20.7 0 10 1 +13 91 170 35.6 0 10 0 +13 92 28 21.7 0 10 1 +13 93 15 49 0 10 1 +13 94 1 10.7 0 10 1 +13 95 15 12.5 0 10 1 +13 96 28 15.5 0 10 1 +13 97 170 43.3 0 10 1 +13 98 1 21.3 0 10 1 +13 99 6 10.5 0 10 0 +13 100 15 30.1 0 10 1 +13 101 6 17.4 0 10 1 +13 102 6 10.5 0 10 1 +13 103 1 10.7 0 10 1 +13 104 1 31.6 0 10 1 +13 105 6 16.7 0 10 1 +13 106 6 26.3 0 10 1 +13 107 170 38.3 0 10 0 +13 108 85 18.4 0 10 0 +14 1 28 21.7 0 10 1 +14 2 15 14.2 0 10 0 +14 3 6 11 0 10 0 +14 4 15 14.2 0 10 0 +14 5 15 10.7 0 10 1 +14 6 85 30.9 0 10 1 +14 7 6 16.7 0 10 0 +14 8 1 11.8 0 10 1 +14 9 28 13.4 0 10 1 +14 10 1 18 0 10 1 +14 11 15 39.6 0 10 1 +14 12 15 30.1 0 10 1 +14 13 1 11 0 10 0 +14 14 170 41.3 0 10 0 +14 15 6 16.7 0 10 1 +14 16 170 43.3 0 10 0 +14 17 6 35.1 0 10 1 +14 18 15 20.7 0 10 1 +14 19 85 26 0 10 1 +14 20 28 16.9 0 10 1 +14 21 85 19.5 0 10 0 +14 22 28 21.1 0 10 1 +14 23 1 31.6 0 10 1 +14 24 6 26.3 0 10 1 +14 25 28 21.7 0 10 1 +14 26 6 10.5 0 10 0 +14 27 85 24.4 0 10 0 +14 28 85 10.5 0 10 0 +14 29 15 49 0 10 1 +14 30 85 45 0 10 1 +14 31 170 29.7 0 10 1 +14 32 85 27.3 0 10 1 +14 33 170 35.6 0 10 0 +14 34 1 11.8 0 10 1 +14 35 1 18 0 10 1 +14 36 85 27.3 0 10 1 +14 37 6 14 0 10 0 +14 38 28 15.5 0 10 0 +14 39 28 12 0 10 0 +14 40 170 38.3 0 10 1 +14 41 6 13.2 0 10 1 +14 42 85 45 0 10 1 +14 43 6 17.4 0 10 1 +14 44 85 10.5 0 10 0 +14 45 15 10.5 0 10 0 +14 46 15 30.1 0 10 1 +14 47 170 24.4 0 10 0 +14 48 1 12.5 0 10 1 +14 49 15 10.5 0 10 0 +14 50 170 38.3 0 10 1 +14 51 85 18.4 0 10 1 +14 52 1 11 0 10 1 +14 53 170 24.4 0 10 1 +14 54 1 11.2 0 10 1 +14 55 6 10.5 0 10 0 +14 56 1 10.7 0 10 1 +14 57 6 35.1 0 10 1 +14 58 28 13 0 10 1 +14 59 170 29.7 0 10 0 +14 60 28 12 0 10 0 +14 61 85 36.1 0 10 1 +14 62 15 10.7 0 10 1 +14 63 28 21.1 0 10 1 +14 64 15 18.6 0 10 1 +14 65 170 43.3 0 10 1 +14 66 15 18.6 0 10 0 +14 67 85 26 0 10 1 +14 68 28 13.4 0 10 1 +14 69 1 21.3 0 10 1 +14 70 6 11 0 10 1 +14 71 170 35.6 0 10 1 +14 72 170 49.8 0 10 1 +14 73 1 41.9 0 10 1 +14 74 15 12.5 0 10 0 +14 75 1 10.7 0 10 1 +14 76 170 49.8 0 10 1 +14 77 1 31.6 0 10 1 +14 78 85 36.1 0 10 1 +14 79 28 15.5 0 10 1 +14 80 6 44 0 10 1 +14 81 28 13 0 10 0 +14 82 6 14 0 10 1 +14 83 85 18.4 0 10 0 +14 84 15 12.5 0 10 0 +14 85 1 11.2 0 10 0 +14 86 15 49 0 10 1 +14 87 170 33.9 0 10 1 +14 88 85 19.5 0 10 0 +14 89 6 17.4 0 10 1 +14 90 28 41.1 0 10 1 +14 91 6 44 0 10 1 +14 92 170 15 0 10 0 +14 93 28 31.4 0 10 1 +14 94 1 12.5 0 10 1 +14 95 28 16.9 0 10 1 +14 96 85 24.4 0 10 1 +14 97 15 39.6 0 10 1 +14 98 170 41.3 0 10 1 +14 99 1 21.3 0 10 1 +14 100 170 15 0 10 0 +14 101 170 33.9 0 10 1 +14 102 85 30.9 0 10 1 +14 103 28 41.1 0 10 1 +14 104 6 26.3 0 10 1 +14 105 28 31.4 0 10 1 +14 106 6 13.2 0 10 1 +14 107 15 20.7 0 10 1 +14 108 1 41.9 0 10 1 +15 1 15 10.7 0 10 0 +15 2 28 13.4 0 10 0 +15 3 170 33.9 0 10 0 +15 4 15 49 0 10 1 +15 5 28 21.7 0 10 0 +15 6 170 15 0 10 0 +15 7 28 41.1 0 10 1 +15 8 85 45 0 10 1 +15 9 28 13 0 10 1 +15 10 170 33.9 0 10 1 +15 11 6 11 0 10 0 +15 12 85 27.3 0 10 1 +15 13 1 11.8 0 10 1 +15 14 1 10.7 0 10 1 +15 15 28 12 0 10 0 +15 16 6 14 0 10 0 +15 17 1 11.2 0 10 0 +15 18 15 39.6 0 10 1 +15 19 15 30.1 0 10 0 +15 20 15 20.7 0 10 1 +15 21 28 13 0 10 0 +15 22 6 44 0 10 1 +15 23 170 38.3 0 10 0 +15 24 15 18.6 0 10 1 +15 25 15 14.2 0 10 1 +15 26 15 18.6 0 10 1 +15 27 170 41.3 0 10 0 +15 28 28 21.1 0 10 1 +15 29 6 14 0 10 1 +15 30 28 15.5 0 10 0 +15 31 170 24.4 0 10 0 +15 32 1 31.6 0 10 1 +15 33 6 35.1 0 10 1 +15 34 15 30.1 0 10 1 +15 35 170 49.8 0 10 1 +15 36 85 18.4 0 10 0 +15 37 15 10.5 0 10 1 +15 38 170 38.3 0 10 0 +15 39 6 26.3 0 10 1 +15 40 170 41.3 0 10 1 +15 41 85 10.5 0 10 0 +15 42 1 18 0 10 1 +15 43 6 10.5 0 10 1 +15 44 85 19.5 0 10 0 +15 45 1 21.3 0 10 1 +15 46 28 13.4 0 10 1 +15 47 15 39.6 0 10 1 +15 48 170 15 0 10 0 +15 49 85 24.4 0 10 0 +15 50 15 12.5 0 10 0 +15 51 85 30.9 0 10 0 +15 52 28 12 0 10 0 +15 53 85 18.4 0 10 0 +15 54 28 31.4 0 10 1 +15 55 170 35.6 0 10 0 +15 56 1 41.9 0 10 1 +15 57 15 10.7 0 10 0 +15 58 6 44 0 10 1 +15 59 85 26 0 10 0 +15 60 6 26.3 0 10 1 +15 61 170 29.7 0 10 0 +15 62 6 17.4 0 10 1 +15 63 85 36.1 0 10 0 +15 64 1 11 0 10 1 +15 65 1 11.2 0 10 1 +15 66 15 20.7 0 10 1 +15 67 6 10.5 0 10 0 +15 68 28 16.9 0 10 0 +15 69 170 43.3 0 10 0 +15 70 1 21.3 0 10 0 +15 71 1 31.6 0 10 1 +15 72 170 24.4 0 10 0 +15 73 170 35.6 0 10 0 +15 74 1 10.7 0 10 1 +15 75 170 29.7 0 10 0 +15 76 85 36.1 0 10 0 +15 77 6 11 0 10 1 +15 78 1 12.5 0 10 1 +15 79 15 49 0 10 1 +15 80 85 45 0 10 1 +15 81 28 41.1 0 10 1 +15 82 85 10.5 0 10 0 +15 83 1 12.5 0 10 1 +15 84 85 30.9 0 10 1 +15 85 28 16.9 0 10 0 +15 86 85 24.4 0 10 1 +15 87 1 41.9 0 10 1 +15 88 6 16.7 0 10 1 +15 89 170 43.3 0 10 1 +15 90 1 11 0 10 1 +15 91 170 49.8 0 10 1 +15 92 15 12.5 0 10 0 +15 93 1 11.8 0 10 0 +15 94 6 17.4 0 10 0 +15 95 28 21.1 0 10 1 +15 96 28 21.7 0 10 1 +15 97 85 27.3 0 10 0 +15 98 28 31.4 0 10 1 +15 99 6 13.2 0 10 0 +15 100 28 15.5 0 10 1 +15 101 1 18 0 10 0 +15 102 85 19.5 0 10 0 +15 103 6 16.7 0 10 1 +15 104 15 14.2 0 10 0 +15 105 6 13.2 0 10 1 +15 106 6 35.1 0 10 1 +15 107 15 10.5 0 10 1 +15 108 85 26 0 10 0 +16 1 85 10.5 0 10 0 +16 2 85 36.1 0 10 0 +16 3 28 41.1 0 10 1 +16 4 15 12.5 0 10 0 +16 5 6 17.4 0 10 1 +16 6 6 44 0 10 1 +16 7 6 14 0 10 0 +16 8 28 12 0 10 0 +16 9 28 41.1 0 10 1 +16 10 15 18.6 0 10 0 +16 11 85 27.3 0 10 0 +16 12 1 31.6 0 10 1 +16 13 85 45 0 10 1 +16 14 170 38.3 0 10 0 +16 15 28 16.9 0 10 0 +16 16 170 29.7 0 10 0 +16 17 170 15 0 10 0 +16 18 6 14 0 10 1 +16 19 85 18.4 0 10 0 +16 20 170 43.3 0 10 0 +16 21 170 33.9 0 10 0 +16 22 85 26 0 10 0 +16 23 15 10.7 0 10 0 +16 24 15 10.5 0 10 1 +16 25 6 13.2 0 10 0 +16 26 1 10.7 0 10 1 +16 27 28 15.5 0 10 0 +16 28 28 13.4 0 10 0 +16 29 170 35.6 0 10 0 +16 30 170 41.3 0 10 1 +16 31 1 31.6 0 10 1 +16 32 28 15.5 0 10 0 +16 33 85 10.5 0 10 0 +16 34 28 21.7 0 10 0 +16 35 1 21.3 0 10 1 +16 36 170 43.3 0 10 0 +16 37 15 49 0 10 1 +16 38 85 30.9 0 10 0 +16 39 1 11 0 10 0 +16 40 170 41.3 0 10 1 +16 41 6 13.2 0 10 0 +16 42 85 24.4 0 10 0 +16 43 170 15 0 10 1 +16 44 1 11.8 0 10 0 +16 45 85 26 0 10 0 +16 46 15 39.6 0 10 1 +16 47 15 39.6 0 10 1 +16 48 6 26.3 0 10 1 +16 49 1 10.7 0 10 0 +16 50 85 24.4 0 10 1 +16 51 15 20.7 0 10 0 +16 52 1 11 0 10 1 +16 53 1 12.5 0 10 1 +16 54 1 11.2 0 10 1 +16 55 28 21.1 0 10 0 +16 56 170 49.8 0 10 1 +16 57 1 21.3 0 10 1 +16 58 28 13.4 0 10 0 +16 59 15 10.5 0 10 0 +16 60 6 17.4 0 10 1 +16 61 28 31.4 0 10 1 +16 62 85 19.5 0 10 0 +16 63 85 36.1 0 10 1 +16 64 15 14.2 0 10 0 +16 65 6 35.1 0 10 1 +16 66 6 10.5 0 10 1 +16 67 15 18.6 0 10 0 +16 68 1 41.9 0 10 1 +16 69 1 18 0 10 1 +16 70 28 21.1 0 10 1 +16 71 170 24.4 0 10 1 +16 72 15 10.7 0 10 1 +16 73 6 16.7 0 10 0 +16 74 170 49.8 0 10 0 +16 75 15 30.1 0 10 1 +16 76 15 14.2 0 10 0 +16 77 15 20.7 0 10 1 +16 78 28 21.7 0 10 1 +16 79 85 27.3 0 10 0 +16 80 170 35.6 0 10 0 +16 81 28 16.9 0 10 0 +16 82 85 18.4 0 10 0 +16 83 28 13 0 10 0 +16 84 6 11 0 10 0 +16 85 6 35.1 0 10 1 +16 86 1 41.9 0 10 1 +16 87 1 12.5 0 10 1 +16 88 6 11 0 10 1 +16 89 6 26.3 0 10 1 +16 90 170 24.4 0 10 0 +16 91 15 30.1 0 10 1 +16 92 6 44 0 10 1 +16 93 15 12.5 0 10 0 +16 94 85 45 0 10 1 +16 95 15 49 0 10 1 +16 96 170 29.7 0 10 0 +16 97 1 11.2 0 10 1 +16 98 6 10.5 0 10 1 +16 99 170 33.9 0 10 0 +16 100 28 13 0 10 0 +16 101 85 19.5 0 10 0 +16 102 170 38.3 0 10 0 +16 103 28 31.4 0 10 1 +16 104 1 18 0 10 1 +16 105 28 12 0 10 0 +16 106 6 16.7 0 10 1 +16 107 1 11.8 0 10 0 +16 108 85 30.9 0 10 0 +17 1 28 21.7 0 10 0 +17 2 170 43.3 0 10 0 +17 3 28 21.7 0 10 0 +17 4 170 15 0 10 0 +17 5 170 43.3 0 10 0 +17 6 15 18.6 0 10 1 +17 7 85 27.3 0 10 0 +17 8 6 11 0 10 0 +17 9 28 16.9 0 10 0 +17 10 15 30.1 0 10 1 +17 11 15 20.7 0 10 1 +17 12 6 26.3 0 10 1 +17 13 28 12 0 10 0 +17 14 6 10.5 0 10 1 +17 15 1 21.3 0 10 1 +17 16 85 36.1 0 10 1 +17 17 15 18.6 0 10 1 +17 18 28 12 0 10 0 +17 19 170 15 0 10 0 +17 20 28 41.1 0 10 1 +17 21 28 31.4 0 10 1 +17 22 85 45 0 10 1 +17 23 15 12.5 0 10 0 +17 24 6 16.7 0 10 1 +17 25 15 20.7 0 10 0 +17 26 1 11.2 0 10 1 +17 27 15 39.6 0 10 1 +17 28 6 35.1 0 10 1 +17 29 1 10.7 0 10 1 +17 30 15 30.1 0 10 1 +17 31 28 13.4 0 10 0 +17 32 6 16.7 0 10 1 +17 33 170 41.3 0 10 1 +17 34 6 10.5 0 10 0 +17 35 85 19.5 0 10 0 +17 36 6 13.2 0 10 0 +17 37 6 26.3 0 10 1 +17 38 170 49.8 0 10 0 +17 39 1 31.6 0 10 1 +17 40 15 10.7 0 10 1 +17 41 170 24.4 0 10 0 +17 42 6 11 0 10 0 +17 43 15 10.5 0 10 1 +17 44 170 29.7 0 10 0 +17 45 28 15.5 0 10 0 +17 46 85 18.4 0 10 0 +17 47 85 18.4 0 10 0 +17 48 6 14 0 10 1 +17 49 170 38.3 0 10 0 +17 50 15 39.6 0 10 1 +17 51 1 18 0 10 1 +17 52 1 18 0 10 1 +17 53 1 11.8 0 10 1 +17 54 85 45 0 10 1 +17 55 170 33.9 0 10 0 +17 56 170 35.6 0 10 0 +17 57 1 12.5 0 10 0 +17 58 6 44 0 10 1 +17 59 1 11 0 10 0 +17 60 28 15.5 0 10 0 +17 61 15 49 0 10 1 +17 62 170 33.9 0 10 0 +17 63 85 26 0 10 0 +17 64 1 10.7 0 10 1 +17 65 28 16.9 0 10 0 +17 66 6 14 0 10 1 +17 67 15 10.5 0 10 1 +17 68 15 49 0 10 1 +17 69 85 36.1 0 10 0 +17 70 1 31.6 0 10 1 +17 71 1 11 0 10 1 +17 72 28 21.1 0 10 0 +17 73 85 30.9 0 10 0 +17 74 6 44 0 10 1 +17 75 15 12.5 0 10 1 +17 76 170 49.8 0 10 0 +17 77 28 13 0 10 1 +17 78 85 10.5 0 10 0 +17 79 28 13.4 0 10 0 +17 80 1 12.5 0 10 1 +17 81 28 41.1 0 10 1 +17 82 170 38.3 0 10 0 +17 83 170 35.6 0 10 0 +17 84 28 21.1 0 10 1 +17 85 15 10.7 0 10 1 +17 86 1 41.9 0 10 1 +17 87 28 31.4 0 10 1 +17 88 85 10.5 0 10 0 +17 89 1 11.8 0 10 1 +17 90 15 14.2 0 10 1 +17 91 85 24.4 0 10 0 +17 92 6 13.2 0 10 1 +17 93 85 19.5 0 10 0 +17 94 6 17.4 0 10 1 +17 95 85 30.9 0 10 1 +17 96 170 24.4 0 10 0 +17 97 28 13 0 10 0 +17 98 6 17.4 0 10 1 +17 99 170 41.3 0 10 0 +17 100 85 26 0 10 1 +17 101 85 24.4 0 10 0 +17 102 1 11.2 0 10 1 +17 103 85 27.3 0 10 1 +17 104 6 35.1 0 10 1 +17 105 170 29.7 0 10 0 +17 106 1 41.9 0 10 1 +17 107 1 21.3 0 10 1 +17 108 15 14.2 0 10 1 +18 1 170 43.3 0 10 1 +18 2 85 30.9 0 10 1 +18 3 6 14 0 10 1 +18 4 28 31.4 0 10 1 +18 5 170 38.3 0 10 1 +18 6 15 14.2 0 10 1 +18 7 6 44 0 10 1 +18 8 6 11 0 10 1 +18 9 85 19.5 0 10 1 +18 10 15 20.7 0 10 1 +18 11 6 13.2 0 10 1 +18 12 170 15 0 10 0 +18 13 85 26 0 10 1 +18 14 1 18 0 10 1 +18 15 15 14.2 0 10 1 +18 16 85 36.1 0 10 1 +18 17 1 18 0 10 0 +18 18 15 49 0 10 1 +18 19 170 49.8 0 10 1 +18 20 6 35.1 0 10 1 +18 21 85 10.5 0 10 0 +18 22 28 13.4 0 10 0 +18 23 15 20.7 0 10 1 +18 24 85 45 0 10 1 +18 25 15 39.6 0 10 1 +18 26 15 12.5 0 10 1 +18 27 1 11.8 0 10 1 +18 28 1 21.3 0 10 1 +18 29 6 26.3 0 10 1 +18 30 15 12.5 0 10 1 +18 31 6 17.4 0 10 1 +18 32 28 16.9 0 10 1 +18 33 170 41.3 0 10 0 +18 34 170 24.4 0 10 0 +18 35 15 10.7 0 10 0 +18 36 1 10.7 0 10 0 +18 37 6 35.1 0 10 1 +18 38 170 38.3 0 10 1 +18 39 6 44 0 10 1 +18 40 15 30.1 0 10 1 +18 41 28 13 0 10 0 +18 42 15 49 0 10 1 +18 43 6 11 0 10 0 +18 44 15 39.6 0 10 1 +18 45 15 10.7 0 10 0 +18 46 1 11 0 10 1 +18 47 28 21.1 0 10 1 +18 48 28 13 0 10 0 +18 49 1 11.2 0 10 1 +18 50 28 12 0 10 1 +18 51 6 16.7 0 10 1 +18 52 85 27.3 0 10 1 +18 53 170 49.8 0 10 1 +18 54 28 21.7 0 10 1 +18 55 15 10.5 0 10 0 +18 56 170 29.7 0 10 0 +18 57 85 10.5 0 10 0 +18 58 1 11 0 10 1 +18 59 6 14 0 10 1 +18 60 170 33.9 0 10 0 +18 61 170 35.6 0 10 1 +18 62 15 18.6 0 10 1 +18 63 6 26.3 0 10 1 +18 64 85 18.4 0 10 0 +18 65 1 41.9 0 10 1 +18 66 28 12 0 10 1 +18 67 6 16.7 0 10 1 +18 68 170 24.4 0 10 1 +18 69 15 18.6 0 10 1 +18 70 6 17.4 0 10 1 +18 71 85 18.4 0 10 0 +18 72 1 21.3 0 10 1 +18 73 28 41.1 0 10 1 +18 74 85 27.3 0 10 0 +18 75 85 36.1 0 10 1 +18 76 170 35.6 0 10 0 +18 77 28 21.1 0 10 1 +18 78 170 43.3 0 10 1 +18 79 28 21.7 0 10 1 +18 80 85 24.4 0 10 1 +18 81 28 31.4 0 10 1 +18 82 85 45 0 10 1 +18 83 15 10.5 0 10 0 +18 84 6 13.2 0 10 1 +18 85 1 31.6 0 10 1 +18 86 1 31.6 0 10 1 +18 87 85 30.9 0 10 1 +18 88 85 19.5 0 10 1 +18 89 85 24.4 0 10 1 +18 90 28 13.4 0 10 0 +18 91 170 29.7 0 10 1 +18 92 170 33.9 0 10 1 +18 93 28 41.1 0 10 1 +18 94 170 15 0 10 0 +18 95 85 26 0 10 0 +18 96 170 41.3 0 10 1 +18 97 1 12.5 0 10 1 +18 98 1 12.5 0 10 1 +18 99 28 15.5 0 10 1 +18 100 1 11.2 0 10 0 +18 101 6 10.5 0 10 0 +18 102 1 10.7 0 10 1 +18 103 1 11.8 0 10 1 +18 104 28 16.9 0 10 1 +18 105 6 10.5 0 10 1 +18 106 1 41.9 0 10 1 +18 107 28 15.5 0 10 0 +18 108 15 30.1 0 10 1 +19 1 28 41.1 0 10 1 +19 2 170 41.3 0 10 0 +19 3 6 10.5 0 10 0 +19 4 6 44 0 10 1 +19 5 15 12.5 0 10 1 +19 6 28 16.9 0 10 1 +19 7 6 14 0 10 0 +19 8 6 16.7 0 10 1 +19 9 1 31.6 0 10 1 +19 10 1 10.7 0 10 1 +19 11 85 19.5 0 10 1 +19 12 28 16.9 0 10 0 +19 13 170 24.4 0 10 0 +19 14 15 14.2 0 10 1 +19 15 85 26 0 10 0 +19 16 85 36.1 0 10 1 +19 17 15 30.1 0 10 1 +19 18 6 10.5 0 10 0 +19 19 170 24.4 0 10 0 +19 20 15 12.5 0 10 0 +19 21 28 21.7 0 10 0 +19 22 170 15 0 10 0 +19 23 85 10.5 0 10 0 +19 24 1 11.8 0 10 0 +19 25 1 12.5 0 10 1 +19 26 28 15.5 0 10 0 +19 27 6 26.3 0 10 1 +19 28 6 35.1 0 10 1 +19 29 15 18.6 0 10 0 +19 30 170 29.7 0 10 0 +19 31 85 19.5 0 10 0 +19 32 170 43.3 0 10 1 +19 33 28 31.4 0 10 1 +19 34 28 13.4 0 10 1 +19 35 85 26 0 10 0 +19 36 85 10.5 0 10 0 +19 37 85 45 0 10 1 +19 38 28 13 0 10 0 +19 39 170 38.3 0 10 0 +19 40 1 11 0 10 0 +19 41 1 10.7 0 10 0 +19 42 170 29.7 0 10 1 +19 43 6 26.3 0 10 1 +19 44 1 11.2 0 10 0 +19 45 28 41.1 0 10 1 +19 46 1 12.5 0 10 0 +19 47 85 30.9 0 10 1 +19 48 170 33.9 0 10 0 +19 49 28 13 0 10 0 +19 50 170 33.9 0 10 1 +19 51 170 49.8 0 10 1 +19 52 170 35.6 0 10 0 +19 53 15 49 0 10 1 +19 54 1 11.2 0 10 1 +19 55 6 11 0 10 0 +19 56 6 17.4 0 10 1 +19 57 15 49 0 10 1 +19 58 1 11 0 10 0 +19 59 85 18.4 0 10 0 +19 60 15 20.7 0 10 1 +19 61 170 38.3 0 10 0 +19 62 15 39.6 0 10 1 +19 63 6 35.1 0 10 1 +19 64 28 21.1 0 10 1 +19 65 15 39.6 0 10 1 +19 66 15 10.7 0 10 0 +19 67 1 31.6 0 10 1 +19 68 1 41.9 0 10 1 +19 69 170 49.8 0 10 0 +19 70 170 35.6 0 10 0 +19 71 85 36.1 0 10 0 +19 72 28 13.4 0 10 1 +19 73 1 18 0 10 1 +19 74 85 18.4 0 10 1 +19 75 85 24.4 0 10 0 +19 76 170 43.3 0 10 1 +19 77 15 18.6 0 10 1 +19 78 6 13.2 0 10 0 +19 79 6 44 0 10 1 +19 80 15 10.5 0 10 0 +19 81 6 14 0 10 1 +19 82 85 27.3 0 10 1 +19 83 15 30.1 0 10 1 +19 84 6 16.7 0 10 1 +19 85 28 31.4 0 10 1 +19 86 28 21.1 0 10 1 +19 87 15 10.7 0 10 0 +19 88 6 13.2 0 10 1 +19 89 170 41.3 0 10 0 +19 90 28 21.7 0 10 1 +19 91 85 24.4 0 10 1 +19 92 28 12 0 10 0 +19 93 1 11.8 0 10 0 +19 94 28 12 0 10 0 +19 95 1 18 0 10 1 +19 96 28 15.5 0 10 0 +19 97 1 21.3 0 10 1 +19 98 1 21.3 0 10 1 +19 99 1 41.9 0 10 1 +19 100 85 45 0 10 1 +19 101 15 10.5 0 10 1 +19 102 6 11 0 10 1 +19 103 15 14.2 0 10 0 +19 104 15 20.7 0 10 1 +19 105 85 30.9 0 10 0 +19 106 85 27.3 0 10 1 +19 107 6 17.4 0 10 1 +19 108 170 15 0 10 0 +20 1 6 14 0 10 1 +20 2 1 12.5 0 10 0 +20 3 6 16.7 0 10 1 +20 4 15 14.2 0 10 0 +20 5 170 24.4 0 10 0 +20 6 85 18.4 0 10 0 +20 7 28 41.1 0 10 1 +20 8 170 43.3 0 10 1 +20 9 1 21.3 0 10 1 +20 10 85 26 0 10 0 +20 11 1 11 0 10 0 +20 12 6 10.5 0 10 0 +20 13 15 20.7 0 10 1 +20 14 28 13.4 0 10 1 +20 15 170 35.6 0 10 1 +20 16 1 11 0 10 1 +20 17 6 44 0 10 1 +20 18 6 26.3 0 10 1 +20 19 15 39.6 0 10 1 +20 20 28 41.1 0 10 1 +20 21 85 10.5 0 10 0 +20 22 6 16.7 0 10 0 +20 23 1 11.8 0 10 1 +20 24 28 12 0 10 1 +20 25 1 18 0 10 1 +20 26 170 29.7 0 10 0 +20 27 28 21.7 0 10 1 +20 28 15 10.7 0 10 1 +20 29 170 41.3 0 10 1 +20 30 85 19.5 0 10 0 +20 31 85 45 0 10 1 +20 32 170 33.9 0 10 1 +20 33 28 13.4 0 10 0 +20 34 85 27.3 0 10 1 +20 35 28 13 0 10 0 +20 36 15 18.6 0 10 0 +20 37 15 12.5 0 10 1 +20 38 170 24.4 0 10 0 +20 39 6 44 0 10 1 +20 40 85 30.9 0 10 1 +20 41 6 35.1 0 10 1 +20 42 6 26.3 0 10 1 +20 43 6 13.2 0 10 1 +20 44 15 10.7 0 10 1 +20 45 28 21.7 0 10 1 +20 46 170 33.9 0 10 1 +20 47 15 20.7 0 10 1 +20 48 1 10.7 0 10 1 +20 49 28 16.9 0 10 1 +20 50 1 11.2 0 10 0 +20 51 1 12.5 0 10 1 +20 52 15 18.6 0 10 0 +20 53 28 21.1 0 10 1 +20 54 15 14.2 0 10 1 +20 55 85 18.4 0 10 0 +20 56 170 29.7 0 10 0 +20 57 85 45 0 10 1 +20 58 28 31.4 0 10 1 +20 59 15 30.1 0 10 1 +20 60 1 11.8 0 10 1 +20 61 28 31.4 0 10 1 +20 62 85 19.5 0 10 0 +20 63 6 14 0 10 1 +20 64 1 31.6 0 10 1 +20 65 1 10.7 0 10 1 +20 66 15 49 0 10 1 +20 67 1 21.3 0 10 1 +20 68 6 35.1 0 10 1 +20 69 15 10.5 0 10 1 +20 70 85 10.5 0 10 0 +20 71 6 13.2 0 10 1 +20 72 170 49.8 0 10 1 +20 73 170 35.6 0 10 1 +20 74 85 24.4 0 10 1 +20 75 6 11 0 10 1 +20 76 170 49.8 0 10 1 +20 77 15 30.1 0 10 1 +20 78 85 36.1 0 10 1 +20 79 85 26 0 10 1 +20 80 6 17.4 0 10 1 +20 81 170 15 0 10 0 +20 82 15 12.5 0 10 1 +20 83 85 30.9 0 10 0 +20 84 6 10.5 0 10 1 +20 85 1 41.9 0 10 1 +20 86 15 39.6 0 10 1 +20 87 170 43.3 0 10 1 +20 88 28 13 0 10 0 +20 89 28 15.5 0 10 0 +20 90 85 27.3 0 10 1 +20 91 28 15.5 0 10 1 +20 92 170 38.3 0 10 1 +20 93 15 10.5 0 10 1 +20 94 170 15 0 10 1 +20 95 1 41.9 0 10 1 +20 96 1 31.6 0 10 1 +20 97 6 11 0 10 0 +20 98 1 11.2 0 10 0 +20 99 170 41.3 0 10 1 +20 100 1 18 0 10 1 +20 101 28 12 0 10 1 +20 102 28 21.1 0 10 1 +20 103 28 16.9 0 10 1 +20 104 85 36.1 0 10 1 +20 105 85 24.4 0 10 1 +20 106 6 17.4 0 10 1 +20 107 170 38.3 0 10 1 +20 108 15 49 0 10 1 \ No newline at end of file diff --git a/R/inst/extdata/dd_single_exampleData.txt b/R/inst/extdata/dd_single_exampleData.txt new file mode 100644 index 00000000..a729477e --- /dev/null +++ b/R/inst/extdata/dd_single_exampleData.txt @@ -0,0 +1,109 @@ +subjID trial delay_later amount_later delay_sooner amount_sooner choice +1 1 6 10.5 0 10 1 +1 2 170 38.3 0 10 1 +1 3 28 13.4 0 10 1 +1 4 28 31.4 0 10 1 +1 5 85 30.9 0 10 1 +1 6 28 21.1 0 10 1 +1 7 28 13 0 10 1 +1 8 1 21.3 0 10 1 +1 9 28 21.1 0 10 1 +1 10 15 30.1 0 10 1 +1 11 1 10.7 0 10 1 +1 12 85 36.1 0 10 1 +1 13 15 10.5 0 10 1 +1 14 6 16.7 0 10 1 +1 15 1 11 0 10 1 +1 16 15 14.2 0 10 1 +1 17 15 12.5 0 10 1 +1 18 15 20.7 0 10 1 +1 19 6 11 0 10 0 +1 20 28 16.9 0 10 1 +1 21 15 30.1 0 10 1 +1 22 85 24.4 0 10 1 +1 23 170 41.3 0 10 1 +1 24 15 14.2 0 10 1 +1 25 6 10.5 0 10 1 +1 26 170 24.4 0 10 1 +1 27 15 49 0 10 1 +1 28 170 29.7 0 10 1 +1 29 1 11.8 0 10 0 +1 30 6 13.2 0 10 0 +1 31 85 30.9 0 10 1 +1 32 6 44 0 10 1 +1 33 6 35.1 0 10 1 +1 34 28 15.5 0 10 1 +1 35 170 43.3 0 10 1 +1 36 170 33.9 0 10 1 +1 37 1 11 0 10 1 +1 38 1 21.3 0 10 1 +1 39 85 45 0 10 1 +1 40 15 39.6 0 10 1 +1 41 85 10.5 0 10 0 +1 42 170 15 0 10 1 +1 43 170 49.8 0 10 1 +1 44 170 24.4 0 10 1 +1 45 28 13.4 0 10 1 +1 46 1 31.6 0 10 1 +1 47 170 35.6 0 10 1 +1 48 1 41.9 0 10 1 +1 49 6 17.4 0 10 1 +1 50 85 18.4 0 10 1 +1 51 85 27.3 0 10 1 +1 52 85 26 0 10 1 +1 53 170 38.3 0 10 1 +1 54 28 21.7 0 10 1 +1 55 1 10.7 0 10 1 +1 56 170 49.8 0 10 1 +1 57 1 11.2 0 10 1 +1 58 15 20.7 0 10 1 +1 59 6 44 0 10 1 +1 60 28 41.1 0 10 1 +1 61 28 16.9 0 10 1 +1 62 6 14 0 10 1 +1 63 1 31.6 0 10 1 +1 64 15 18.6 0 10 1 +1 65 28 12 0 10 1 +1 66 6 13.2 0 10 1 +1 67 170 43.3 0 10 1 +1 68 28 31.4 0 10 1 +1 69 85 19.5 0 10 1 +1 70 170 35.6 0 10 1 +1 71 85 18.4 0 10 1 +1 72 1 12.5 0 10 1 +1 73 170 41.3 0 10 1 +1 74 170 15 0 10 0 +1 75 28 12 0 10 0 +1 76 85 36.1 0 10 1 +1 77 1 18 0 10 1 +1 78 85 10.5 0 10 0 +1 79 170 33.9 0 10 1 +1 80 6 26.3 0 10 1 +1 81 85 45 0 10 1 +1 82 28 21.7 0 10 1 +1 83 28 13 0 10 0 +1 84 85 27.3 0 10 1 +1 85 15 18.6 0 10 1 +1 86 15 12.5 0 10 1 +1 87 6 26.3 0 10 1 +1 88 6 11 0 10 1 +1 89 15 10.7 0 10 0 +1 90 6 16.7 0 10 1 +1 91 28 41.1 0 10 1 +1 92 85 26 0 10 1 +1 93 85 24.4 0 10 1 +1 94 1 12.5 0 10 1 +1 95 6 17.4 0 10 1 +1 96 6 35.1 0 10 1 +1 97 6 14 0 10 1 +1 98 15 10.5 0 10 0 +1 99 1 11.8 0 10 1 +1 100 15 10.7 0 10 1 +1 101 15 39.6 0 10 1 +1 102 85 19.5 0 10 1 +1 103 1 11.2 0 10 1 +1 104 170 29.7 0 10 1 +1 105 15 49 0 10 1 +1 106 1 41.9 0 10 1 +1 107 1 18 0 10 1 +1 108 28 15.5 0 10 1 \ No newline at end of file diff --git a/R/inst/extdata/gng_exampleData.txt b/R/inst/extdata/gng_exampleData.txt new file mode 100644 index 00000000..40e0982a --- /dev/null +++ b/R/inst/extdata/gng_exampleData.txt @@ -0,0 +1,2401 @@ +trialNum cue keyPressed success congruentOutcome outcome subjID +1 1 1 1 2 0 1 +2 2 0 1 1 1 1 +3 4 0 1 1 0 1 +4 4 1 0 1 -1 1 +5 4 0 1 1 0 1 +6 1 1 1 1 1 1 +7 3 0 0 1 -1 1 +8 1 1 1 1 1 1 +9 3 1 1 1 0 1 +10 3 0 0 1 -1 1 +11 4 0 1 1 0 1 +12 4 0 1 1 0 1 +13 4 0 1 1 0 1 +14 1 1 1 1 1 1 +15 1 1 1 1 1 1 +16 2 0 1 1 1 1 +17 2 0 1 1 1 1 +18 4 0 1 1 0 1 +19 2 0 1 1 1 1 +20 3 1 1 1 0 1 +21 3 1 1 1 0 1 +22 4 1 0 2 0 1 +23 2 0 1 1 1 1 +24 3 0 0 1 -1 1 +25 1 1 1 1 1 1 +26 3 0 0 1 -1 1 +27 4 0 1 1 0 1 +28 1 1 1 1 1 1 +29 4 1 0 2 0 1 +30 2 0 1 2 0 1 +31 2 0 1 1 1 1 +32 4 1 0 2 0 1 +33 2 0 1 1 1 1 +34 2 0 1 1 1 1 +35 3 1 1 1 0 1 +36 2 0 1 1 1 1 +37 1 1 1 1 1 1 +38 4 0 1 1 0 1 +39 4 0 1 1 0 1 +40 4 1 0 1 -1 1 +41 3 1 1 1 0 1 +42 2 0 1 1 1 1 +43 2 0 1 1 1 1 +44 2 0 1 1 1 1 +45 2 0 1 1 1 1 +46 3 1 1 1 0 1 +47 2 0 1 1 1 1 +48 2 0 1 1 1 1 +49 1 1 1 1 1 1 +50 3 1 1 2 -1 1 +51 2 1 0 1 0 1 +52 1 1 1 1 1 1 +53 3 1 1 1 0 1 +54 4 0 1 1 0 1 +55 3 1 1 2 -1 1 +56 1 1 1 1 1 1 +57 3 0 0 1 -1 1 +58 1 1 1 1 1 1 +59 3 1 1 2 -1 1 +60 1 1 1 1 1 1 +61 3 1 1 1 0 1 +62 4 1 0 1 -1 1 +63 1 1 1 1 1 1 +64 1 1 1 1 1 1 +65 4 0 1 1 0 1 +66 1 1 1 1 1 1 +67 3 1 1 1 0 1 +68 2 0 1 1 1 1 +69 3 1 1 2 -1 1 +70 1 1 1 1 1 1 +71 2 0 1 1 1 1 +72 2 0 1 1 1 1 +73 1 1 1 1 1 1 +74 4 0 1 2 -1 1 +75 2 0 1 2 0 1 +76 1 1 1 2 0 1 +77 4 1 0 1 -1 1 +78 1 1 1 1 1 1 +79 3 1 1 2 -1 1 +80 3 1 1 1 0 1 +81 1 1 1 1 1 1 +82 1 1 1 1 1 1 +83 3 0 0 1 -1 1 +84 2 0 1 1 1 1 +85 4 0 1 1 0 1 +86 3 1 1 1 0 1 +87 4 0 1 1 0 1 +88 2 0 1 1 1 1 +89 1 1 1 1 1 1 +90 4 0 1 1 0 1 +91 1 1 1 2 0 1 +92 2 0 1 2 0 1 +93 1 1 1 1 1 1 +94 4 0 1 1 0 1 +95 2 0 1 1 1 1 +96 4 1 0 1 -1 1 +97 3 1 1 1 0 1 +98 3 1 1 1 0 1 +99 3 1 1 1 0 1 +100 1 1 1 1 1 1 +101 2 0 1 1 1 1 +102 4 0 1 2 -1 1 +103 4 0 1 1 0 1 +104 3 0 0 1 -1 1 +105 1 1 1 1 1 1 +106 4 0 1 1 0 1 +107 2 0 1 1 1 1 +108 2 0 1 1 1 1 +109 3 1 1 1 0 1 +110 4 0 1 1 0 1 +111 3 1 1 1 0 1 +112 3 1 1 1 0 1 +113 1 1 1 1 1 1 +114 3 1 1 1 0 1 +115 4 0 1 2 -1 1 +116 1 0 0 1 0 1 +117 1 1 1 1 1 1 +118 1 1 1 1 1 1 +119 3 0 0 1 -1 1 +120 2 0 1 1 1 1 +121 2 0 1 2 0 1 +122 4 0 1 1 0 1 +123 1 1 1 2 0 1 +124 4 0 1 1 0 1 +125 3 1 1 2 -1 1 +126 2 0 1 1 1 1 +127 4 0 1 1 0 1 +128 4 0 1 1 0 1 +129 4 0 1 1 0 1 +130 4 1 0 1 -1 1 +131 2 0 1 1 1 1 +132 3 1 1 2 -1 1 +133 1 0 0 1 0 1 +134 1 1 1 1 1 1 +135 3 1 1 1 0 1 +136 3 1 1 1 0 1 +137 4 0 1 2 -1 1 +138 4 0 1 1 0 1 +139 3 1 1 1 0 1 +140 3 1 1 2 -1 1 +141 3 0 0 1 -1 1 +142 2 0 1 2 0 1 +143 2 0 1 2 0 1 +144 2 0 1 2 0 1 +145 4 0 1 1 0 1 +146 1 1 1 2 0 1 +147 3 1 1 1 0 1 +148 3 1 1 1 0 1 +149 2 0 1 2 0 1 +150 1 1 1 1 1 1 +151 1 1 1 2 0 1 +152 1 1 1 1 1 1 +153 3 1 1 1 0 1 +154 4 0 1 1 0 1 +155 1 1 1 1 1 1 +156 3 1 1 1 0 1 +157 4 1 0 1 -1 1 +158 4 0 1 1 0 1 +159 3 1 1 1 0 1 +160 2 0 1 1 1 1 +161 2 0 1 1 1 1 +162 2 0 1 2 0 1 +163 3 1 1 1 0 1 +164 4 0 1 2 -1 1 +165 3 1 1 1 0 1 +166 4 0 1 2 -1 1 +167 2 0 1 1 1 1 +168 2 0 1 2 0 1 +169 1 1 1 1 1 1 +170 4 0 1 1 0 1 +171 3 1 1 2 -1 1 +172 3 1 1 2 -1 1 +173 2 0 1 1 1 1 +174 3 1 1 1 0 1 +175 4 1 0 1 -1 1 +176 2 0 1 1 1 1 +177 4 0 1 1 0 1 +178 2 0 1 2 0 1 +179 4 0 1 1 0 1 +180 1 1 1 2 0 1 +181 1 1 1 1 1 1 +182 3 1 1 1 0 1 +183 2 0 1 1 1 1 +184 1 1 1 1 1 1 +185 4 0 1 1 0 1 +186 3 1 1 1 0 1 +187 1 1 1 1 1 1 +188 3 1 1 2 -1 1 +189 4 0 1 1 0 1 +190 4 0 1 1 0 1 +191 2 0 1 1 1 1 +192 2 0 1 1 1 1 +193 1 1 1 1 1 1 +194 2 0 1 1 1 1 +195 2 0 1 2 0 1 +196 2 0 1 1 1 1 +197 1 1 1 1 1 1 +198 4 0 1 1 0 1 +199 3 1 1 1 0 1 +200 3 1 1 1 0 1 +201 3 1 1 1 0 1 +202 1 1 1 1 1 1 +203 3 1 1 1 0 1 +204 2 0 1 1 1 1 +205 4 0 1 2 -1 1 +206 2 0 1 1 1 1 +207 4 0 1 1 0 1 +208 4 0 1 1 0 1 +209 1 1 1 1 1 1 +210 3 1 1 1 0 1 +211 3 1 1 1 0 1 +212 1 1 1 1 1 1 +213 1 1 1 1 1 1 +214 2 0 1 1 1 1 +215 1 1 1 1 1 1 +216 3 1 1 1 0 1 +217 4 0 1 2 -1 1 +218 2 0 1 1 1 1 +219 2 0 1 1 1 1 +220 1 1 1 2 0 1 +221 2 0 1 1 1 1 +222 1 1 1 1 1 1 +223 1 1 1 1 1 1 +224 4 0 1 2 -1 1 +225 1 1 1 1 1 1 +226 2 0 1 1 1 1 +227 4 1 0 1 -1 1 +228 3 1 1 1 0 1 +229 4 1 0 1 -1 1 +230 1 1 1 1 1 1 +231 2 0 1 1 1 1 +232 1 1 1 1 1 1 +233 3 1 1 1 0 1 +234 2 0 1 1 1 1 +235 1 1 1 1 1 1 +236 4 1 0 1 -1 1 +237 2 0 1 1 1 1 +238 1 1 1 1 1 1 +239 4 0 1 1 0 1 +240 1 1 1 1 1 1 +1 1 1 1 1 1 2 +2 1 1 1 1 1 2 +3 3 1 1 1 0 2 +4 3 0 0 1 -1 2 +5 1 1 1 1 1 2 +6 4 1 0 1 -1 2 +7 4 0 1 1 0 2 +8 3 1 1 1 0 2 +9 3 1 1 1 0 2 +10 3 0 0 1 -1 2 +11 1 1 1 1 1 2 +12 4 0 1 1 0 2 +13 1 1 1 1 1 2 +14 4 0 1 1 0 2 +15 4 0 1 1 0 2 +16 3 1 1 1 0 2 +17 2 0 1 1 1 2 +18 4 0 1 2 -1 2 +19 1 1 1 1 1 2 +20 2 0 1 1 1 2 +21 4 0 1 1 0 2 +22 4 1 0 2 0 2 +23 1 1 1 2 0 2 +24 4 0 1 1 0 2 +25 2 0 1 1 1 2 +26 2 0 1 1 1 2 +27 2 0 1 2 0 2 +28 1 1 1 1 1 2 +29 2 0 1 1 1 2 +30 1 1 1 1 1 2 +31 4 0 1 2 -1 2 +32 2 0 1 2 0 2 +33 3 1 1 2 -1 2 +34 3 1 1 2 -1 2 +35 2 0 1 1 1 2 +36 3 0 0 1 -1 2 +37 4 1 0 1 -1 2 +38 4 1 0 1 -1 2 +39 4 0 1 1 0 2 +40 1 1 1 1 1 2 +41 4 0 1 1 0 2 +42 3 1 1 1 0 2 +43 3 0 0 1 -1 2 +44 1 1 1 2 0 2 +45 3 1 1 1 0 2 +46 4 0 1 1 0 2 +47 4 0 1 1 0 2 +48 2 0 1 1 1 2 +49 2 0 1 1 1 2 +50 2 0 1 1 1 2 +51 1 1 1 1 1 2 +52 3 1 1 1 0 2 +53 3 1 1 1 0 2 +54 4 1 0 1 -1 2 +55 1 1 1 1 1 2 +56 1 1 1 1 1 2 +57 2 0 1 1 1 2 +58 1 1 1 1 1 2 +59 1 1 1 2 0 2 +60 3 1 1 1 0 2 +61 2 0 1 1 1 2 +62 1 1 1 1 1 2 +63 3 1 1 2 -1 2 +64 3 1 1 1 0 2 +65 1 1 1 2 0 2 +66 2 0 1 2 0 2 +67 2 0 1 2 0 2 +68 4 0 1 1 0 2 +69 3 1 1 1 0 2 +70 2 0 1 1 1 2 +71 4 0 1 1 0 2 +72 4 0 1 1 0 2 +73 4 0 1 2 -1 2 +74 1 1 1 1 1 2 +75 4 1 0 1 -1 2 +76 4 0 1 1 0 2 +77 3 1 1 1 0 2 +78 4 0 1 2 -1 2 +79 3 0 0 1 -1 2 +80 4 0 1 1 0 2 +81 3 1 1 2 -1 2 +82 3 1 1 1 0 2 +83 3 1 1 2 -1 2 +84 3 1 1 1 0 2 +85 3 1 1 1 0 2 +86 3 1 1 1 0 2 +87 2 0 1 1 1 2 +88 4 0 1 1 0 2 +89 4 0 1 1 0 2 +90 2 0 1 2 0 2 +91 4 1 0 1 -1 2 +92 1 1 1 2 0 2 +93 4 0 1 2 -1 2 +94 2 0 1 2 0 2 +95 2 0 1 1 1 2 +96 3 1 1 2 -1 2 +97 2 0 1 1 1 2 +98 1 1 1 1 1 2 +99 1 1 1 1 1 2 +100 1 1 1 2 0 2 +101 1 1 1 1 1 2 +102 1 1 1 2 0 2 +103 1 1 1 1 1 2 +104 4 1 0 2 0 2 +105 4 0 1 1 0 2 +106 2 0 1 2 0 2 +107 3 1 1 1 0 2 +108 3 1 1 2 -1 2 +109 3 1 1 1 0 2 +110 1 1 1 2 0 2 +111 3 1 1 1 0 2 +112 2 0 1 1 1 2 +113 3 1 1 2 -1 2 +114 1 1 1 1 1 2 +115 3 1 1 2 -1 2 +116 3 0 0 2 0 2 +117 4 1 0 1 -1 2 +118 2 0 1 1 1 2 +119 2 0 1 2 0 2 +120 4 0 1 1 0 2 +121 1 1 1 1 1 2 +122 2 0 1 1 1 2 +123 4 0 1 2 -1 2 +124 3 0 0 1 -1 2 +125 3 1 1 1 0 2 +126 4 0 1 1 0 2 +127 2 0 1 2 0 2 +128 3 1 1 1 0 2 +129 4 1 0 1 -1 2 +130 4 0 1 1 0 2 +131 2 0 1 1 1 2 +132 2 0 1 2 0 2 +133 3 1 1 1 0 2 +134 3 0 0 1 -1 2 +135 1 1 1 1 1 2 +136 4 0 1 2 -1 2 +137 2 0 1 1 1 2 +138 4 0 1 1 0 2 +139 4 0 1 1 0 2 +140 1 1 1 1 1 2 +141 3 1 1 2 -1 2 +142 2 0 1 1 1 2 +143 2 1 0 2 1 2 +144 4 0 1 1 0 2 +145 2 0 1 1 1 2 +146 4 0 1 2 -1 2 +147 2 0 1 2 0 2 +148 2 0 1 2 0 2 +149 1 1 1 1 1 2 +150 3 1 1 2 -1 2 +151 2 0 1 1 1 2 +152 4 1 0 2 0 2 +153 4 1 0 2 0 2 +154 3 1 1 1 0 2 +155 3 1 1 1 0 2 +156 2 0 1 1 1 2 +157 1 1 1 1 1 2 +158 2 0 1 2 0 2 +159 1 1 1 2 0 2 +160 1 1 1 1 1 2 +161 1 1 1 1 1 2 +162 1 1 1 1 1 2 +163 3 1 1 2 -1 2 +164 3 0 0 1 -1 2 +165 1 1 1 2 0 2 +166 3 1 1 2 -1 2 +167 2 0 1 1 1 2 +168 4 1 0 1 -1 2 +169 2 0 1 1 1 2 +170 1 1 1 1 1 2 +171 3 1 1 1 0 2 +172 1 1 1 1 1 2 +173 4 1 0 1 -1 2 +174 1 1 1 1 1 2 +175 3 1 1 2 -1 2 +176 1 1 1 1 1 2 +177 4 0 1 1 0 2 +178 2 0 1 1 1 2 +179 3 1 1 2 -1 2 +180 2 0 1 1 1 2 +181 3 1 1 2 -1 2 +182 1 1 1 1 1 2 +183 3 0 0 1 -1 2 +184 4 0 1 1 0 2 +185 3 1 1 2 -1 2 +186 2 0 1 1 1 2 +187 2 0 1 1 1 2 +188 1 1 1 1 1 2 +189 1 1 1 1 1 2 +190 1 1 1 2 0 2 +191 2 0 1 1 1 2 +192 2 0 1 1 1 2 +193 3 1 1 2 -1 2 +194 1 1 1 1 1 2 +195 2 0 1 1 1 2 +196 2 0 1 1 1 2 +197 1 1 1 1 1 2 +198 3 0 0 1 -1 2 +199 1 1 1 1 1 2 +200 4 0 1 1 0 2 +201 2 0 1 1 1 2 +202 3 0 0 1 -1 2 +203 4 0 1 2 -1 2 +204 1 1 1 1 1 2 +205 1 1 1 1 1 2 +206 2 0 1 1 1 2 +207 3 1 1 1 0 2 +208 1 1 1 1 1 2 +209 2 0 1 1 1 2 +210 1 1 1 1 1 2 +211 4 1 0 2 0 2 +212 4 0 1 1 0 2 +213 4 1 0 1 -1 2 +214 1 1 1 1 1 2 +215 3 1 1 1 0 2 +216 2 0 1 1 1 2 +217 1 1 1 1 1 2 +218 2 0 1 1 1 2 +219 4 0 1 1 0 2 +220 4 0 1 2 -1 2 +221 4 1 0 1 -1 2 +222 4 1 0 1 -1 2 +223 1 1 1 2 0 2 +224 2 0 1 1 1 2 +225 1 1 1 1 1 2 +226 1 1 1 2 0 2 +227 1 1 1 1 1 2 +228 2 1 0 1 0 2 +229 2 0 1 1 1 2 +230 2 0 1 1 1 2 +231 2 0 1 1 1 2 +232 4 1 0 1 -1 2 +233 3 1 1 1 0 2 +234 3 1 1 1 0 2 +235 4 0 1 2 -1 2 +236 1 1 1 1 1 2 +237 4 0 1 1 0 2 +238 2 0 1 2 0 2 +239 3 1 1 1 0 2 +240 2 0 1 1 1 2 +1 3 0 0 1 -1 3 +2 2 0 1 1 1 3 +3 1 0 0 1 0 3 +4 3 1 1 1 0 3 +5 2 0 1 1 1 3 +6 1 0 0 1 0 3 +7 1 1 1 1 1 3 +8 1 1 1 1 1 3 +9 1 1 1 2 0 3 +10 1 1 1 1 1 3 +11 1 0 0 2 1 3 +12 4 1 0 1 -1 3 +13 1 1 1 1 1 3 +14 4 0 1 1 0 3 +15 2 0 1 2 0 3 +16 3 1 1 1 0 3 +17 2 0 1 1 1 3 +18 3 1 1 2 -1 3 +19 3 1 1 2 -1 3 +20 1 1 1 1 1 3 +21 1 1 1 1 1 3 +22 1 1 1 1 1 3 +23 3 0 0 2 0 3 +24 3 0 0 2 0 3 +25 1 1 1 1 1 3 +26 4 0 1 1 0 3 +27 4 0 1 1 0 3 +28 1 1 1 1 1 3 +29 3 1 1 1 0 3 +30 4 0 1 1 0 3 +31 2 0 1 1 1 3 +32 3 0 0 1 -1 3 +33 2 0 1 1 1 3 +34 4 0 1 1 0 3 +35 4 1 0 1 -1 3 +36 3 0 0 2 0 3 +37 1 1 1 1 1 3 +38 1 1 1 1 1 3 +39 1 0 0 1 0 3 +40 3 0 0 1 -1 3 +41 4 1 0 1 -1 3 +42 1 1 1 1 1 3 +43 3 0 0 1 -1 3 +44 2 0 1 1 1 3 +45 1 1 1 1 1 3 +46 2 1 0 2 1 3 +47 2 0 1 1 1 3 +48 4 0 1 2 -1 3 +49 2 0 1 1 1 3 +50 3 1 1 2 -1 3 +51 1 1 1 2 0 3 +52 4 0 1 2 -1 3 +53 1 1 1 1 1 3 +54 4 1 0 1 -1 3 +55 2 0 1 1 1 3 +56 3 1 1 1 0 3 +57 3 1 1 1 0 3 +58 2 0 1 1 1 3 +59 3 0 0 2 0 3 +60 4 0 1 1 0 3 +61 1 1 1 1 1 3 +62 2 0 1 1 1 3 +63 3 1 1 1 0 3 +64 2 1 0 1 0 3 +65 2 0 1 1 1 3 +66 4 0 1 2 -1 3 +67 2 0 1 1 1 3 +68 2 0 1 1 1 3 +69 4 1 0 1 -1 3 +70 4 0 1 1 0 3 +71 2 0 1 2 0 3 +72 1 1 1 1 1 3 +73 4 0 1 1 0 3 +74 3 0 0 2 0 3 +75 3 1 1 1 0 3 +76 2 0 1 1 1 3 +77 3 1 1 1 0 3 +78 4 1 0 1 -1 3 +79 3 1 1 2 -1 3 +80 4 0 1 2 -1 3 +81 3 1 1 1 0 3 +82 1 1 1 1 1 3 +83 2 0 1 1 1 3 +84 3 0 0 1 -1 3 +85 2 0 1 1 1 3 +86 3 0 0 1 -1 3 +87 2 0 1 1 1 3 +88 2 0 1 1 1 3 +89 3 1 1 1 0 3 +90 4 0 1 2 -1 3 +91 4 1 0 1 -1 3 +92 2 0 1 1 1 3 +93 4 0 1 1 0 3 +94 1 1 1 1 1 3 +95 2 0 1 2 0 3 +96 1 1 1 1 1 3 +97 2 0 1 2 0 3 +98 4 0 1 1 0 3 +99 4 0 1 1 0 3 +100 4 0 1 2 -1 3 +101 3 1 1 1 0 3 +102 2 0 1 1 1 3 +103 2 0 1 1 1 3 +104 4 1 0 2 0 3 +105 2 0 1 1 1 3 +106 1 1 1 2 0 3 +107 1 1 1 1 1 3 +108 2 0 1 1 1 3 +109 2 0 1 2 0 3 +110 2 0 1 2 0 3 +111 2 0 1 2 0 3 +112 1 1 1 1 1 3 +113 2 0 1 1 1 3 +114 4 0 1 1 0 3 +115 1 1 1 1 1 3 +116 2 0 1 1 1 3 +117 4 0 1 2 -1 3 +118 3 0 0 1 -1 3 +119 3 1 1 2 -1 3 +120 1 1 1 1 1 3 +121 4 0 1 1 0 3 +122 1 1 1 1 1 3 +123 2 0 1 2 0 3 +124 1 1 1 1 1 3 +125 4 0 1 2 -1 3 +126 3 1 1 1 0 3 +127 2 0 1 2 0 3 +128 3 1 1 1 0 3 +129 4 0 1 1 0 3 +130 3 1 1 2 -1 3 +131 2 0 1 1 1 3 +132 3 1 1 1 0 3 +133 2 0 1 1 1 3 +134 2 0 1 2 0 3 +135 3 1 1 1 0 3 +136 3 1 1 2 -1 3 +137 1 1 1 1 1 3 +138 2 0 1 1 1 3 +139 1 1 1 2 0 3 +140 4 0 1 2 -1 3 +141 2 0 1 1 1 3 +142 1 1 1 2 0 3 +143 3 1 1 2 -1 3 +144 3 1 1 1 0 3 +145 2 0 1 1 1 3 +146 3 1 1 1 0 3 +147 2 1 0 1 0 3 +148 4 0 1 1 0 3 +149 1 1 1 1 1 3 +150 1 1 1 2 0 3 +151 1 1 1 1 1 3 +152 2 0 1 1 1 3 +153 3 0 0 1 -1 3 +154 1 1 1 1 1 3 +155 4 1 0 2 0 3 +156 1 1 1 2 0 3 +157 4 1 0 1 -1 3 +158 3 1 1 1 0 3 +159 1 1 1 1 1 3 +160 4 0 1 1 0 3 +161 1 1 1 1 1 3 +162 4 1 0 1 -1 3 +163 1 1 1 2 0 3 +164 4 0 1 1 0 3 +165 4 0 1 1 0 3 +166 1 1 1 2 0 3 +167 3 1 1 1 0 3 +168 2 0 1 1 1 3 +169 4 0 1 1 0 3 +170 2 0 1 1 1 3 +171 4 1 0 1 -1 3 +172 3 0 0 1 -1 3 +173 4 0 1 2 -1 3 +174 2 1 0 1 0 3 +175 2 0 1 1 1 3 +176 1 1 1 1 1 3 +177 4 0 1 1 0 3 +178 3 1 1 1 0 3 +179 3 1 1 1 0 3 +180 2 0 1 1 1 3 +181 1 1 1 1 1 3 +182 4 0 1 1 0 3 +183 3 0 0 1 -1 3 +184 3 1 1 1 0 3 +185 4 0 1 1 0 3 +186 4 0 1 1 0 3 +187 1 1 1 1 1 3 +188 4 0 1 1 0 3 +189 3 1 1 2 -1 3 +190 2 0 1 1 1 3 +191 1 1 1 1 1 3 +192 3 1 1 1 0 3 +193 4 0 1 1 0 3 +194 3 1 1 1 0 3 +195 2 0 1 1 1 3 +196 2 0 1 2 0 3 +197 2 0 1 1 1 3 +198 1 1 1 1 1 3 +199 4 0 1 2 -1 3 +200 4 1 0 1 -1 3 +201 2 0 1 2 0 3 +202 3 1 1 1 0 3 +203 3 1 1 1 0 3 +204 1 1 1 1 1 3 +205 4 0 1 1 0 3 +206 1 1 1 1 1 3 +207 3 0 0 1 -1 3 +208 3 1 1 2 -1 3 +209 3 1 1 1 0 3 +210 1 1 1 1 1 3 +211 4 0 1 1 0 3 +212 4 0 1 2 -1 3 +213 4 0 1 1 0 3 +214 1 1 1 1 1 3 +215 1 1 1 1 1 3 +216 4 0 1 2 -1 3 +217 2 0 1 1 1 3 +218 3 1 1 1 0 3 +219 4 0 1 1 0 3 +220 3 1 1 2 -1 3 +221 1 1 1 2 0 3 +222 1 1 1 1 1 3 +223 3 1 1 1 0 3 +224 1 1 1 1 1 3 +225 2 0 1 1 1 3 +226 1 1 1 2 0 3 +227 3 0 0 1 -1 3 +228 2 0 1 1 1 3 +229 3 0 0 1 -1 3 +230 4 0 1 1 0 3 +231 3 1 1 2 -1 3 +232 4 0 1 1 0 3 +233 4 0 1 1 0 3 +234 1 1 1 1 1 3 +235 4 0 1 1 0 3 +236 4 0 1 1 0 3 +237 2 0 1 1 1 3 +238 2 0 1 1 1 3 +239 3 1 1 2 -1 3 +240 1 1 1 2 0 3 +1 3 1 1 1 0 4 +2 3 0 0 1 -1 4 +3 2 1 0 2 1 4 +4 4 0 1 1 0 4 +5 1 0 0 1 0 4 +6 4 1 0 1 -1 4 +7 2 1 0 1 0 4 +8 2 0 1 1 1 4 +9 3 1 1 1 0 4 +10 4 1 0 1 -1 4 +11 2 0 1 1 1 4 +12 4 0 1 2 -1 4 +13 1 0 0 1 0 4 +14 4 0 1 1 0 4 +15 4 1 0 1 -1 4 +16 3 1 1 1 0 4 +17 1 1 1 1 1 4 +18 3 1 1 1 0 4 +19 2 0 1 2 0 4 +20 2 0 1 1 1 4 +21 2 1 0 1 0 4 +22 3 1 1 2 -1 4 +23 3 0 0 1 -1 4 +24 4 0 1 1 0 4 +25 1 1 1 1 1 4 +26 3 1 1 1 0 4 +27 2 0 1 1 1 4 +28 3 1 1 2 -1 4 +29 4 0 1 1 0 4 +30 4 0 1 1 0 4 +31 3 1 1 1 0 4 +32 1 1 1 2 0 4 +33 3 1 1 1 0 4 +34 2 0 1 1 1 4 +35 4 1 0 1 -1 4 +36 3 0 0 2 0 4 +37 3 0 0 1 -1 4 +38 2 0 1 1 1 4 +39 4 0 1 1 0 4 +40 2 0 1 2 0 4 +41 1 1 1 1 1 4 +42 4 0 1 1 0 4 +43 3 1 1 1 0 4 +44 2 0 1 2 0 4 +45 1 1 1 1 1 4 +46 3 1 1 1 0 4 +47 3 1 1 1 0 4 +48 2 1 0 1 0 4 +49 1 1 1 1 1 4 +50 1 1 1 1 1 4 +51 1 1 1 1 1 4 +52 2 0 1 2 0 4 +53 3 1 1 1 0 4 +54 2 0 1 1 1 4 +55 1 1 1 1 1 4 +56 1 1 1 1 1 4 +57 3 0 0 1 -1 4 +58 4 0 1 2 -1 4 +59 2 0 1 1 1 4 +60 1 1 1 2 0 4 +61 1 1 1 1 1 4 +62 2 0 1 1 1 4 +63 1 1 1 2 0 4 +64 4 0 1 1 0 4 +65 2 0 1 1 1 4 +66 1 1 1 1 1 4 +67 2 0 1 1 1 4 +68 3 1 1 2 -1 4 +69 2 0 1 1 1 4 +70 4 0 1 1 0 4 +71 4 0 1 2 -1 4 +72 1 1 1 2 0 4 +73 2 0 1 1 1 4 +74 2 0 1 1 1 4 +75 4 0 1 1 0 4 +76 4 0 1 1 0 4 +77 1 1 1 1 1 4 +78 1 1 1 1 1 4 +79 2 0 1 1 1 4 +80 4 1 0 1 -1 4 +81 4 0 1 1 0 4 +82 1 1 1 1 1 4 +83 2 0 1 1 1 4 +84 1 1 1 1 1 4 +85 1 1 1 2 0 4 +86 4 0 1 1 0 4 +87 4 0 1 1 0 4 +88 1 1 1 1 1 4 +89 2 0 1 1 1 4 +90 3 0 0 1 -1 4 +91 3 1 1 1 0 4 +92 1 1 1 1 1 4 +93 3 1 1 2 -1 4 +94 4 0 1 1 0 4 +95 2 0 1 1 1 4 +96 3 1 1 1 0 4 +97 2 0 1 1 1 4 +98 1 1 1 2 0 4 +99 3 1 1 1 0 4 +100 3 1 1 1 0 4 +101 2 0 1 1 1 4 +102 4 0 1 1 0 4 +103 2 0 1 1 1 4 +104 3 1 1 1 0 4 +105 4 0 1 1 0 4 +106 3 1 1 1 0 4 +107 1 1 1 1 1 4 +108 3 1 1 1 0 4 +109 2 0 1 1 1 4 +110 2 0 1 2 0 4 +111 3 1 1 2 -1 4 +112 1 1 1 1 1 4 +113 4 0 1 1 0 4 +114 2 0 1 1 1 4 +115 3 0 0 1 -1 4 +116 1 1 1 1 1 4 +117 3 1 1 1 0 4 +118 1 1 1 1 1 4 +119 2 0 1 1 1 4 +120 3 1 1 1 0 4 +121 3 0 0 1 -1 4 +122 4 0 1 2 -1 4 +123 2 0 1 1 1 4 +124 2 0 1 1 1 4 +125 4 0 1 1 0 4 +126 4 1 0 1 -1 4 +127 1 1 1 1 1 4 +128 1 1 1 1 1 4 +129 2 0 1 1 1 4 +130 1 1 1 1 1 4 +131 4 0 1 1 0 4 +132 3 1 1 1 0 4 +133 4 0 1 1 0 4 +134 1 1 1 1 1 4 +135 4 0 1 1 0 4 +136 2 0 1 2 0 4 +137 4 0 1 1 0 4 +138 1 1 1 1 1 4 +139 2 0 1 1 1 4 +140 1 1 1 1 1 4 +141 2 0 1 1 1 4 +142 3 1 1 1 0 4 +143 2 0 1 1 1 4 +144 4 0 1 1 0 4 +145 2 0 1 1 1 4 +146 1 1 1 2 0 4 +147 3 1 1 1 0 4 +148 2 0 1 1 1 4 +149 2 0 1 1 1 4 +150 1 1 1 1 1 4 +151 3 1 1 2 -1 4 +152 3 1 1 2 -1 4 +153 1 1 1 1 1 4 +154 1 1 1 1 1 4 +155 3 1 1 1 0 4 +156 3 1 1 1 0 4 +157 2 0 1 1 1 4 +158 1 1 1 1 1 4 +159 4 0 1 1 0 4 +160 4 0 1 1 0 4 +161 3 1 1 1 0 4 +162 3 1 1 1 0 4 +163 4 0 1 1 0 4 +164 2 0 1 1 1 4 +165 4 0 1 1 0 4 +166 4 0 1 1 0 4 +167 3 1 1 1 0 4 +168 1 1 1 1 1 4 +169 4 0 1 1 0 4 +170 2 0 1 1 1 4 +171 1 1 1 2 0 4 +172 4 0 1 1 0 4 +173 1 1 1 1 1 4 +174 4 0 1 2 -1 4 +175 3 1 1 2 -1 4 +176 4 0 1 1 0 4 +177 4 0 1 1 0 4 +178 4 0 1 1 0 4 +179 2 0 1 1 1 4 +180 3 1 1 2 -1 4 +181 2 0 1 1 1 4 +182 1 1 1 1 1 4 +183 1 1 1 1 1 4 +184 2 0 1 1 1 4 +185 3 1 1 2 -1 4 +186 4 0 1 2 -1 4 +187 2 0 1 2 0 4 +188 1 1 1 1 1 4 +189 2 0 1 1 1 4 +190 2 0 1 1 1 4 +191 4 0 1 1 0 4 +192 1 1 1 1 1 4 +193 2 0 1 1 1 4 +194 2 0 1 1 1 4 +195 2 0 1 1 1 4 +196 4 0 1 2 -1 4 +197 3 0 0 2 0 4 +198 1 1 1 1 1 4 +199 3 1 1 1 0 4 +200 3 1 1 1 0 4 +201 2 0 1 1 1 4 +202 3 1 1 1 0 4 +203 3 1 1 1 0 4 +204 2 0 1 1 1 4 +205 1 1 1 1 1 4 +206 1 1 1 2 0 4 +207 4 0 1 1 0 4 +208 4 0 1 2 -1 4 +209 4 1 0 1 -1 4 +210 1 1 1 1 1 4 +211 3 1 1 2 -1 4 +212 4 1 0 1 -1 4 +213 3 0 0 1 -1 4 +214 1 1 1 2 0 4 +215 3 1 1 1 0 4 +216 1 1 1 1 1 4 +217 2 0 1 1 1 4 +218 1 1 1 2 0 4 +219 4 1 0 1 -1 4 +220 3 1 1 1 0 4 +221 3 1 1 1 0 4 +222 3 1 1 1 0 4 +223 4 0 1 1 0 4 +224 3 1 1 1 0 4 +225 4 1 0 1 -1 4 +226 3 1 1 2 -1 4 +227 4 0 1 2 -1 4 +228 4 1 0 1 -1 4 +229 2 0 1 1 1 4 +230 1 1 1 1 1 4 +231 4 0 1 1 0 4 +232 1 1 1 1 1 4 +233 4 0 1 1 0 4 +234 1 1 1 1 1 4 +235 2 0 1 1 1 4 +236 3 1 1 2 -1 4 +237 4 0 1 1 0 4 +238 1 1 1 1 1 4 +239 1 1 1 1 1 4 +240 1 1 1 1 1 4 +1 2 0 1 1 1 5 +2 1 1 1 1 1 5 +3 4 1 0 1 -1 5 +4 1 1 1 1 1 5 +5 4 1 0 2 0 5 +6 1 1 1 1 1 5 +7 4 0 1 2 -1 5 +8 3 0 0 1 -1 5 +9 3 1 1 1 0 5 +10 4 1 0 2 0 5 +11 1 1 1 2 0 5 +12 1 1 1 1 1 5 +13 4 1 0 1 -1 5 +14 2 0 1 1 1 5 +15 2 0 1 1 1 5 +16 3 1 1 1 0 5 +17 2 0 1 1 1 5 +18 4 1 0 1 -1 5 +19 2 0 1 1 1 5 +20 1 1 1 1 1 5 +21 1 1 1 1 1 5 +22 2 0 1 1 1 5 +23 1 1 1 1 1 5 +24 3 1 1 1 0 5 +25 4 0 1 1 0 5 +26 3 1 1 1 0 5 +27 4 0 1 2 -1 5 +28 4 0 1 1 0 5 +29 1 1 1 1 1 5 +30 4 0 1 1 0 5 +31 2 0 1 1 1 5 +32 3 1 1 2 -1 5 +33 3 1 1 1 0 5 +34 4 0 1 1 0 5 +35 2 0 1 1 1 5 +36 4 0 1 1 0 5 +37 3 0 0 1 -1 5 +38 1 1 1 1 1 5 +39 3 0 0 1 -1 5 +40 3 1 1 1 0 5 +41 1 1 1 2 0 5 +42 4 0 1 1 0 5 +43 4 0 1 1 0 5 +44 1 1 1 1 1 5 +45 3 1 1 1 0 5 +46 2 0 1 2 0 5 +47 4 0 1 1 0 5 +48 4 0 1 1 0 5 +49 4 0 1 2 -1 5 +50 3 1 1 1 0 5 +51 2 0 1 1 1 5 +52 1 1 1 2 0 5 +53 4 0 1 1 0 5 +54 4 0 1 1 0 5 +55 1 1 1 1 1 5 +56 3 1 1 1 0 5 +57 2 0 1 1 1 5 +58 1 1 1 1 1 5 +59 2 0 1 1 1 5 +60 3 1 1 2 -1 5 +61 1 1 1 1 1 5 +62 1 1 1 2 0 5 +63 3 0 0 1 -1 5 +64 2 0 1 1 1 5 +65 4 1 0 2 0 5 +66 3 0 0 1 -1 5 +67 4 1 0 1 -1 5 +68 2 0 1 1 1 5 +69 1 1 1 2 0 5 +70 1 1 1 1 1 5 +71 4 0 1 1 0 5 +72 3 0 0 1 -1 5 +73 2 0 1 2 0 5 +74 3 1 1 1 0 5 +75 4 0 1 1 0 5 +76 4 0 1 2 -1 5 +77 1 1 1 2 0 5 +78 3 1 1 1 0 5 +79 2 0 1 1 1 5 +80 1 1 1 1 1 5 +81 4 0 1 2 -1 5 +82 1 1 1 1 1 5 +83 4 1 0 1 -1 5 +84 2 0 1 1 1 5 +85 1 1 1 1 1 5 +86 1 1 1 1 1 5 +87 2 0 1 2 0 5 +88 3 1 1 2 -1 5 +89 3 0 0 1 -1 5 +90 4 0 1 1 0 5 +91 2 0 1 1 1 5 +92 3 1 1 1 0 5 +93 2 0 1 1 1 5 +94 1 1 1 1 1 5 +95 2 0 1 1 1 5 +96 1 1 1 2 0 5 +97 3 1 1 1 0 5 +98 3 0 0 1 -1 5 +99 4 0 1 1 0 5 +100 1 1 1 2 0 5 +101 4 0 1 1 0 5 +102 1 1 1 1 1 5 +103 4 0 1 1 0 5 +104 1 1 1 1 1 5 +105 1 1 1 1 1 5 +106 4 1 0 1 -1 5 +107 2 0 1 1 1 5 +108 1 1 1 1 1 5 +109 1 1 1 1 1 5 +110 3 1 1 2 -1 5 +111 2 0 1 1 1 5 +112 3 1 1 2 -1 5 +113 1 1 1 1 1 5 +114 1 1 1 1 1 5 +115 2 0 1 1 1 5 +116 1 1 1 1 1 5 +117 4 0 1 1 0 5 +118 4 0 1 1 0 5 +119 4 0 1 1 0 5 +120 1 1 1 1 1 5 +121 4 0 1 2 -1 5 +122 2 0 1 1 1 5 +123 1 1 1 1 1 5 +124 2 0 1 1 1 5 +125 3 1 1 2 -1 5 +126 4 0 1 1 0 5 +127 2 0 1 1 1 5 +128 3 1 1 1 0 5 +129 3 1 1 1 0 5 +130 3 1 1 1 0 5 +131 3 1 1 1 0 5 +132 3 1 1 2 -1 5 +133 1 1 1 1 1 5 +134 2 0 1 1 1 5 +135 4 0 1 1 0 5 +136 1 1 1 2 0 5 +137 2 0 1 1 1 5 +138 2 0 1 1 1 5 +139 2 0 1 1 1 5 +140 2 0 1 2 0 5 +141 2 0 1 1 1 5 +142 2 0 1 1 1 5 +143 3 1 1 1 0 5 +144 1 1 1 2 0 5 +145 1 1 1 1 1 5 +146 1 1 1 2 0 5 +147 3 1 1 1 0 5 +148 2 0 1 1 1 5 +149 1 1 1 2 0 5 +150 2 0 1 1 1 5 +151 4 0 1 2 -1 5 +152 4 0 1 2 -1 5 +153 1 1 1 1 1 5 +154 2 0 1 2 0 5 +155 2 0 1 1 1 5 +156 4 0 1 1 0 5 +157 1 1 1 1 1 5 +158 4 0 1 1 0 5 +159 1 1 1 1 1 5 +160 2 0 1 1 1 5 +161 3 1 1 1 0 5 +162 2 0 1 1 1 5 +163 4 0 1 1 0 5 +164 1 1 1 1 1 5 +165 2 0 1 1 1 5 +166 3 1 1 2 -1 5 +167 3 1 1 2 -1 5 +168 3 1 1 1 0 5 +169 4 0 1 1 0 5 +170 2 0 1 1 1 5 +171 2 0 1 2 0 5 +172 4 1 0 2 0 5 +173 3 1 1 1 0 5 +174 4 1 0 2 0 5 +175 2 0 1 1 1 5 +176 1 1 1 1 1 5 +177 2 0 1 1 1 5 +178 3 1 1 1 0 5 +179 2 0 1 1 1 5 +180 1 0 0 2 1 5 +181 1 1 1 1 1 5 +182 4 0 1 1 0 5 +183 1 1 1 1 1 5 +184 1 1 1 1 1 5 +185 1 1 1 1 1 5 +186 3 1 1 1 0 5 +187 3 1 1 1 0 5 +188 3 1 1 2 -1 5 +189 4 0 1 1 0 5 +190 4 0 1 1 0 5 +191 4 0 1 2 -1 5 +192 2 0 1 2 0 5 +193 2 0 1 1 1 5 +194 1 1 1 1 1 5 +195 2 0 1 1 1 5 +196 3 1 1 1 0 5 +197 3 1 1 1 0 5 +198 2 0 1 1 1 5 +199 2 0 1 1 1 5 +200 3 1 1 1 0 5 +201 4 0 1 1 0 5 +202 3 1 1 2 -1 5 +203 2 0 1 1 1 5 +204 2 0 1 1 1 5 +205 3 1 1 1 0 5 +206 2 0 1 1 1 5 +207 2 0 1 2 0 5 +208 3 1 1 1 0 5 +209 2 0 1 1 1 5 +210 1 1 1 1 1 5 +211 3 1 1 1 0 5 +212 4 1 0 1 -1 5 +213 4 1 0 2 0 5 +214 4 0 1 1 0 5 +215 1 1 1 1 1 5 +216 3 0 0 2 0 5 +217 1 1 1 2 0 5 +218 2 0 1 1 1 5 +219 4 0 1 1 0 5 +220 3 0 0 2 0 5 +221 3 0 0 1 -1 5 +222 4 0 1 1 0 5 +223 3 1 1 1 0 5 +224 4 0 1 1 0 5 +225 3 1 1 1 0 5 +226 3 1 1 1 0 5 +227 1 1 1 1 1 5 +228 4 0 1 1 0 5 +229 1 1 1 2 0 5 +230 2 0 1 2 0 5 +231 3 1 1 1 0 5 +232 2 0 1 1 1 5 +233 3 1 1 1 0 5 +234 4 0 1 1 0 5 +235 1 1 1 2 0 5 +236 2 0 1 1 1 5 +237 3 1 1 1 0 5 +238 3 1 1 1 0 5 +239 4 1 0 2 0 5 +240 4 1 0 1 -1 5 +1 3 1 1 2 -1 6 +2 1 0 0 2 1 6 +3 2 1 0 2 1 6 +4 1 0 0 1 0 6 +5 4 1 0 1 -1 6 +6 4 0 1 2 -1 6 +7 2 1 0 2 1 6 +8 4 1 0 1 -1 6 +9 1 0 0 1 0 6 +10 2 1 0 2 1 6 +11 2 1 0 2 1 6 +12 2 1 0 1 0 6 +13 2 1 0 1 0 6 +14 4 0 1 2 -1 6 +15 3 0 0 1 -1 6 +16 3 0 0 1 -1 6 +17 4 1 0 1 -1 6 +18 1 1 1 1 1 6 +19 1 1 1 1 1 6 +20 3 1 1 1 0 6 +21 4 0 1 1 0 6 +22 3 1 1 1 0 6 +23 4 0 1 1 0 6 +24 1 1 1 1 1 6 +25 3 1 1 1 0 6 +26 1 1 1 2 0 6 +27 1 1 1 1 1 6 +28 1 1 1 2 0 6 +29 1 1 1 1 1 6 +30 1 1 1 1 1 6 +31 4 0 1 1 0 6 +32 1 1 1 1 1 6 +33 2 1 0 1 0 6 +34 3 1 1 1 0 6 +35 4 0 1 2 -1 6 +36 3 1 1 2 -1 6 +37 4 1 0 2 0 6 +38 4 1 0 2 0 6 +39 3 1 1 2 -1 6 +40 4 1 0 2 0 6 +41 2 1 0 1 0 6 +42 1 1 1 1 1 6 +43 3 1 1 2 -1 6 +44 1 1 1 1 1 6 +45 4 1 0 1 -1 6 +46 2 1 0 2 1 6 +47 3 0 0 1 -1 6 +48 2 1 0 2 1 6 +49 1 1 1 2 0 6 +50 1 1 1 2 0 6 +51 4 0 1 1 0 6 +52 3 1 1 1 0 6 +53 1 1 1 1 1 6 +54 2 1 0 1 0 6 +55 2 1 0 2 1 6 +56 2 1 0 1 0 6 +57 1 1 1 1 1 6 +58 1 1 1 1 1 6 +59 3 1 1 1 0 6 +60 2 1 0 1 0 6 +61 4 1 0 2 0 6 +62 2 1 0 1 0 6 +63 3 1 1 2 -1 6 +64 3 0 0 2 0 6 +65 2 1 0 1 0 6 +66 3 1 1 1 0 6 +67 4 1 0 1 -1 6 +68 4 0 1 1 0 6 +69 4 0 1 1 0 6 +70 1 1 1 2 0 6 +71 2 1 0 1 0 6 +72 4 0 1 1 0 6 +73 3 1 1 1 0 6 +74 1 1 1 2 0 6 +75 4 1 0 1 -1 6 +76 1 1 1 2 0 6 +77 3 1 1 1 0 6 +78 2 1 0 1 0 6 +79 4 0 1 1 0 6 +80 4 1 0 2 0 6 +81 2 1 0 1 0 6 +82 1 1 1 1 1 6 +83 4 0 1 2 -1 6 +84 2 1 0 2 1 6 +85 2 1 0 1 0 6 +86 4 1 0 1 -1 6 +87 3 1 1 1 0 6 +88 4 0 1 1 0 6 +89 2 1 0 2 1 6 +90 1 1 1 1 1 6 +91 1 1 1 1 1 6 +92 3 1 1 1 0 6 +93 1 1 1 1 1 6 +94 1 1 1 1 1 6 +95 4 0 1 1 0 6 +96 3 1 1 1 0 6 +97 4 0 1 1 0 6 +98 4 0 1 2 -1 6 +99 2 1 0 1 0 6 +100 1 1 1 1 1 6 +101 4 0 1 1 0 6 +102 4 0 1 1 0 6 +103 3 1 1 1 0 6 +104 4 0 1 1 0 6 +105 2 1 0 1 0 6 +106 3 1 1 1 0 6 +107 2 1 0 1 0 6 +108 3 1 1 1 0 6 +109 3 1 1 1 0 6 +110 4 0 1 1 0 6 +111 1 1 1 2 0 6 +112 2 1 0 1 0 6 +113 1 1 1 1 1 6 +114 4 1 0 1 -1 6 +115 1 1 1 2 0 6 +116 4 1 0 1 -1 6 +117 4 0 1 1 0 6 +118 3 1 1 1 0 6 +119 3 0 0 1 -1 6 +120 2 1 0 1 0 6 +121 4 0 1 2 -1 6 +122 3 1 1 1 0 6 +123 4 1 0 1 -1 6 +124 3 1 1 2 -1 6 +125 2 0 1 1 1 6 +126 2 1 0 1 0 6 +127 2 1 0 1 0 6 +128 1 1 1 1 1 6 +129 4 1 0 1 -1 6 +130 3 1 1 1 0 6 +131 4 0 1 1 0 6 +132 2 1 0 1 0 6 +133 2 0 1 1 1 6 +134 2 0 1 1 1 6 +135 3 1 1 1 0 6 +136 3 1 1 1 0 6 +137 2 0 1 1 1 6 +138 4 0 1 1 0 6 +139 1 1 1 2 0 6 +140 2 0 1 1 1 6 +141 2 0 1 2 0 6 +142 4 0 1 1 0 6 +143 1 1 1 1 1 6 +144 4 0 1 1 0 6 +145 4 0 1 2 -1 6 +146 1 1 1 1 1 6 +147 3 0 0 1 -1 6 +148 4 0 1 1 0 6 +149 1 0 0 2 1 6 +150 1 1 1 2 0 6 +151 4 0 1 1 0 6 +152 1 1 1 2 0 6 +153 3 1 1 1 0 6 +154 3 1 1 1 0 6 +155 2 0 1 2 0 6 +156 2 0 1 1 1 6 +157 1 1 1 2 0 6 +158 3 1 1 1 0 6 +159 3 0 0 1 -1 6 +160 3 1 1 1 0 6 +161 3 1 1 1 0 6 +162 1 0 0 1 0 6 +163 4 0 1 1 0 6 +164 3 0 0 1 -1 6 +165 3 1 1 1 0 6 +166 3 1 1 1 0 6 +167 2 0 1 2 0 6 +168 3 1 1 2 -1 6 +169 2 0 1 1 1 6 +170 2 0 1 2 0 6 +171 1 1 1 1 1 6 +172 2 0 1 1 1 6 +173 1 1 1 1 1 6 +174 1 1 1 1 1 6 +175 2 0 1 1 1 6 +176 2 0 1 1 1 6 +177 1 1 1 1 1 6 +178 2 0 1 1 1 6 +179 4 0 1 1 0 6 +180 1 1 1 1 1 6 +181 3 1 1 1 0 6 +182 3 1 1 2 -1 6 +183 3 1 1 1 0 6 +184 4 1 0 1 -1 6 +185 3 1 1 1 0 6 +186 4 0 1 1 0 6 +187 3 1 1 2 -1 6 +188 4 0 1 1 0 6 +189 1 1 1 1 1 6 +190 4 0 1 2 -1 6 +191 1 1 1 1 1 6 +192 3 1 1 1 0 6 +193 3 1 1 2 -1 6 +194 2 0 1 1 1 6 +195 1 1 1 1 1 6 +196 1 1 1 1 1 6 +197 2 0 1 2 0 6 +198 1 1 1 2 0 6 +199 2 1 0 1 0 6 +200 3 1 1 1 0 6 +201 2 0 1 1 1 6 +202 3 1 1 1 0 6 +203 1 1 1 1 1 6 +204 3 1 1 1 0 6 +205 1 1 1 2 0 6 +206 3 1 1 1 0 6 +207 2 0 1 1 1 6 +208 3 1 1 1 0 6 +209 2 0 1 1 1 6 +210 4 1 0 1 -1 6 +211 2 0 1 1 1 6 +212 2 0 1 1 1 6 +213 1 1 1 1 1 6 +214 3 1 1 1 0 6 +215 1 1 1 1 1 6 +216 3 1 1 1 0 6 +217 1 1 1 1 1 6 +218 2 0 1 1 1 6 +219 2 0 1 1 1 6 +220 1 1 1 1 1 6 +221 1 1 1 1 1 6 +222 4 0 1 2 -1 6 +223 1 1 1 1 1 6 +224 4 0 1 1 0 6 +225 4 0 1 1 0 6 +226 4 0 1 1 0 6 +227 3 1 1 1 0 6 +228 2 0 1 1 1 6 +229 2 0 1 2 0 6 +230 3 1 1 1 0 6 +231 2 0 1 1 1 6 +232 2 0 1 1 1 6 +233 4 0 1 1 0 6 +234 2 0 1 1 1 6 +235 1 1 1 2 0 6 +236 4 0 1 2 -1 6 +237 4 0 1 1 0 6 +238 4 0 1 1 0 6 +239 3 0 0 1 -1 6 +240 1 1 1 1 1 6 +1 2 0 1 1 1 7 +2 4 1 0 1 -1 7 +3 4 0 1 1 0 7 +4 3 1 1 1 0 7 +5 3 1 1 1 0 7 +6 3 0 0 1 -1 7 +7 4 0 1 1 0 7 +8 2 0 1 1 1 7 +9 3 1 1 1 0 7 +10 4 0 1 2 -1 7 +11 2 0 1 1 1 7 +12 4 0 1 1 0 7 +13 3 1 1 2 -1 7 +14 1 1 1 1 1 7 +15 1 1 1 1 1 7 +16 1 1 1 1 1 7 +17 1 1 1 1 1 7 +18 2 0 1 1 1 7 +19 1 1 1 1 1 7 +20 3 1 1 1 0 7 +21 2 0 1 1 1 7 +22 3 0 0 1 -1 7 +23 2 1 0 1 0 7 +24 4 0 1 1 0 7 +25 4 1 0 1 -1 7 +26 3 1 1 1 0 7 +27 4 1 0 1 -1 7 +28 1 1 1 1 1 7 +29 1 1 1 1 1 7 +30 3 1 1 2 -1 7 +31 4 0 1 1 0 7 +32 2 0 1 1 1 7 +33 4 0 1 1 0 7 +34 3 1 1 1 0 7 +35 3 0 0 1 -1 7 +36 3 1 1 1 0 7 +37 1 1 1 1 1 7 +38 3 1 1 1 0 7 +39 3 0 0 1 -1 7 +40 4 1 0 1 -1 7 +41 4 0 1 1 0 7 +42 1 1 1 1 1 7 +43 4 0 1 1 0 7 +44 2 0 1 1 1 7 +45 1 1 1 1 1 7 +46 2 0 1 2 0 7 +47 1 1 1 1 1 7 +48 3 1 1 1 0 7 +49 2 0 1 2 0 7 +50 3 1 1 1 0 7 +51 2 0 1 1 1 7 +52 2 0 1 2 0 7 +53 2 0 1 1 1 7 +54 2 1 0 1 0 7 +55 1 1 1 1 1 7 +56 1 1 1 1 1 7 +57 4 0 1 1 0 7 +58 2 0 1 1 1 7 +59 4 0 1 1 0 7 +60 1 1 1 1 1 7 +61 3 1 1 2 -1 7 +62 2 0 1 1 1 7 +63 3 0 0 1 -1 7 +64 4 0 1 1 0 7 +65 3 1 1 1 0 7 +66 4 0 1 1 0 7 +67 2 0 1 2 0 7 +68 4 0 1 1 0 7 +69 2 0 1 2 0 7 +70 1 1 1 1 1 7 +71 4 1 0 2 0 7 +72 2 0 1 2 0 7 +73 3 1 1 1 0 7 +74 4 0 1 1 0 7 +75 3 1 1 1 0 7 +76 1 1 1 1 1 7 +77 2 0 1 1 1 7 +78 4 0 1 1 0 7 +79 2 0 1 1 1 7 +80 4 1 0 2 0 7 +81 3 1 1 2 -1 7 +82 3 1 1 2 -1 7 +83 2 0 1 1 1 7 +84 3 1 1 2 -1 7 +85 2 0 1 2 0 7 +86 3 1 1 2 -1 7 +87 2 0 1 1 1 7 +88 2 0 1 2 0 7 +89 1 1 1 1 1 7 +90 4 0 1 1 0 7 +91 2 0 1 1 1 7 +92 1 1 1 1 1 7 +93 4 1 0 1 -1 7 +94 1 0 0 1 0 7 +95 3 1 1 2 -1 7 +96 1 1 1 1 1 7 +97 3 0 0 1 -1 7 +98 1 1 1 1 1 7 +99 4 0 1 1 0 7 +100 1 1 1 1 1 7 +101 3 1 1 1 0 7 +102 2 0 1 1 1 7 +103 1 1 1 1 1 7 +104 3 1 1 1 0 7 +105 1 1 1 1 1 7 +106 2 1 0 1 0 7 +107 3 1 1 1 0 7 +108 3 1 1 1 0 7 +109 4 0 1 1 0 7 +110 4 1 0 1 -1 7 +111 2 0 1 1 1 7 +112 4 0 1 1 0 7 +113 2 0 1 1 1 7 +114 1 1 1 1 1 7 +115 4 0 1 1 0 7 +116 1 1 1 2 0 7 +117 2 0 1 1 1 7 +118 2 0 1 1 1 7 +119 4 0 1 1 0 7 +120 3 1 1 1 0 7 +121 1 1 1 1 1 7 +122 1 1 1 1 1 7 +123 2 0 1 1 1 7 +124 1 1 1 2 0 7 +125 4 0 1 1 0 7 +126 1 1 1 1 1 7 +127 3 0 0 1 -1 7 +128 4 0 1 1 0 7 +129 3 1 1 2 -1 7 +130 2 0 1 2 0 7 +131 1 1 1 2 0 7 +132 2 1 0 1 0 7 +133 4 0 1 1 0 7 +134 1 1 1 1 1 7 +135 1 1 1 2 0 7 +136 3 1 1 1 0 7 +137 2 0 1 1 1 7 +138 3 1 1 1 0 7 +139 4 0 1 1 0 7 +140 2 0 1 2 0 7 +141 3 1 1 1 0 7 +142 4 0 1 1 0 7 +143 1 1 1 1 1 7 +144 3 1 1 2 -1 7 +145 1 1 1 2 0 7 +146 1 1 1 1 1 7 +147 2 0 1 1 1 7 +148 2 0 1 1 1 7 +149 3 1 1 1 0 7 +150 4 0 1 1 0 7 +151 4 0 1 1 0 7 +152 4 0 1 1 0 7 +153 2 0 1 1 1 7 +154 4 0 1 2 -1 7 +155 4 0 1 2 -1 7 +156 4 0 1 2 -1 7 +157 1 1 1 2 0 7 +158 3 1 1 1 0 7 +159 2 0 1 2 0 7 +160 2 0 1 1 1 7 +161 3 1 1 1 0 7 +162 1 1 1 2 0 7 +163 1 1 1 1 1 7 +164 4 1 0 1 -1 7 +165 4 1 0 1 -1 7 +166 1 1 1 1 1 7 +167 4 1 0 1 -1 7 +168 1 1 1 2 0 7 +169 4 0 1 1 0 7 +170 4 0 1 1 0 7 +171 2 0 1 1 1 7 +172 4 0 1 1 0 7 +173 2 0 1 1 1 7 +174 1 1 1 1 1 7 +175 4 0 1 1 0 7 +176 4 0 1 1 0 7 +177 2 0 1 2 0 7 +178 4 0 1 1 0 7 +179 2 0 1 1 1 7 +180 3 1 1 1 0 7 +181 1 1 1 2 0 7 +182 3 1 1 2 -1 7 +183 3 1 1 1 0 7 +184 1 1 1 1 1 7 +185 3 1 1 2 -1 7 +186 4 0 1 1 0 7 +187 1 1 1 1 1 7 +188 1 1 1 1 1 7 +189 3 0 0 1 -1 7 +190 2 0 1 1 1 7 +191 1 1 1 1 1 7 +192 1 1 1 2 0 7 +193 4 0 1 1 0 7 +194 4 0 1 2 -1 7 +195 1 1 1 2 0 7 +196 4 0 1 1 0 7 +197 2 0 1 1 1 7 +198 2 0 1 1 1 7 +199 2 0 1 1 1 7 +200 1 1 1 1 1 7 +201 4 0 1 2 -1 7 +202 2 0 1 1 1 7 +203 2 0 1 1 1 7 +204 3 0 0 1 -1 7 +205 3 1 1 1 0 7 +206 1 1 1 2 0 7 +207 2 0 1 1 1 7 +208 3 1 1 1 0 7 +209 2 0 1 1 1 7 +210 3 1 1 1 0 7 +211 3 1 1 2 -1 7 +212 4 0 1 2 -1 7 +213 1 1 1 1 1 7 +214 3 1 1 2 -1 7 +215 1 1 1 2 0 7 +216 2 0 1 1 1 7 +217 3 1 1 1 0 7 +218 1 1 1 2 0 7 +219 1 1 1 1 1 7 +220 2 0 1 1 1 7 +221 3 1 1 1 0 7 +222 2 0 1 1 1 7 +223 2 0 1 1 1 7 +224 2 0 1 2 0 7 +225 1 0 0 1 0 7 +226 3 1 1 1 0 7 +227 1 1 1 1 1 7 +228 3 1 1 1 0 7 +229 1 1 1 2 0 7 +230 1 0 0 1 0 7 +231 4 0 1 2 -1 7 +232 2 0 1 1 1 7 +233 3 1 1 2 -1 7 +234 3 0 0 1 -1 7 +235 3 0 0 1 -1 7 +236 4 0 1 1 0 7 +237 3 1 1 1 0 7 +238 4 0 1 1 0 7 +239 1 1 1 1 1 7 +240 4 0 1 1 0 7 +1 3 1 1 1 0 8 +2 2 0 1 1 1 8 +3 3 0 0 1 -1 8 +4 3 1 1 1 0 8 +5 1 0 0 2 1 8 +6 3 1 1 2 -1 8 +7 2 1 0 1 0 8 +8 1 0 0 1 0 8 +9 2 0 1 2 0 8 +10 2 1 0 2 1 8 +11 1 1 1 2 0 8 +12 3 0 0 1 -1 8 +13 4 1 0 2 0 8 +14 3 0 0 1 -1 8 +15 4 1 0 2 0 8 +16 3 1 1 2 -1 8 +17 1 0 0 1 0 8 +18 2 1 0 2 1 8 +19 2 1 0 1 0 8 +20 3 1 1 1 0 8 +21 2 1 0 1 0 8 +22 4 1 0 1 -1 8 +23 2 0 1 2 0 8 +24 3 1 1 1 0 8 +25 2 1 0 1 0 8 +26 3 1 1 1 0 8 +27 3 1 1 1 0 8 +28 4 0 1 2 -1 8 +29 1 1 1 1 1 8 +30 3 1 1 2 -1 8 +31 1 1 1 1 1 8 +32 1 1 1 1 1 8 +33 3 1 1 1 0 8 +34 4 1 0 1 -1 8 +35 4 0 1 1 0 8 +36 2 1 0 2 1 8 +37 3 1 1 2 -1 8 +38 1 1 1 1 1 8 +39 4 1 0 1 -1 8 +40 2 1 0 1 0 8 +41 2 1 0 1 0 8 +42 4 0 1 1 0 8 +43 3 1 1 1 0 8 +44 1 1 1 1 1 8 +45 1 1 1 1 1 8 +46 4 0 1 2 -1 8 +47 3 0 0 1 -1 8 +48 2 1 0 2 1 8 +49 2 1 0 1 0 8 +50 3 1 1 1 0 8 +51 3 1 1 1 0 8 +52 1 1 1 1 1 8 +53 4 0 1 1 0 8 +54 4 1 0 2 0 8 +55 3 1 1 2 -1 8 +56 2 1 0 2 1 8 +57 4 0 1 1 0 8 +58 2 1 0 2 1 8 +59 1 1 1 2 0 8 +60 1 1 1 1 1 8 +61 1 1 1 2 0 8 +62 2 1 0 1 0 8 +63 3 1 1 1 0 8 +64 3 1 1 2 -1 8 +65 4 0 1 1 0 8 +66 3 1 1 1 0 8 +67 3 0 0 2 0 8 +68 1 1 1 1 1 8 +69 4 0 1 1 0 8 +70 1 1 1 2 0 8 +71 4 1 0 1 -1 8 +72 4 0 1 2 -1 8 +73 3 1 1 1 0 8 +74 3 1 1 2 -1 8 +75 4 1 0 1 -1 8 +76 1 1 1 1 1 8 +77 4 0 1 1 0 8 +78 2 1 0 1 0 8 +79 1 1 1 1 1 8 +80 1 1 1 1 1 8 +81 1 1 1 2 0 8 +82 3 0 0 1 -1 8 +83 4 0 1 1 0 8 +84 2 1 0 2 1 8 +85 3 0 0 1 -1 8 +86 4 0 1 1 0 8 +87 2 1 0 1 0 8 +88 1 1 1 1 1 8 +89 2 1 0 1 0 8 +90 4 0 1 1 0 8 +91 4 0 1 1 0 8 +92 4 0 1 1 0 8 +93 1 1 1 2 0 8 +94 2 1 0 1 0 8 +95 4 1 0 2 0 8 +96 2 1 0 1 0 8 +97 2 1 0 2 1 8 +98 4 0 1 1 0 8 +99 2 1 0 1 0 8 +100 4 0 1 1 0 8 +101 1 1 1 1 1 8 +102 2 1 0 2 1 8 +103 1 1 1 1 1 8 +104 4 0 1 1 0 8 +105 4 0 1 1 0 8 +106 4 0 1 1 0 8 +107 1 1 1 1 1 8 +108 2 1 0 1 0 8 +109 2 1 0 1 0 8 +110 3 1 1 1 0 8 +111 3 1 1 1 0 8 +112 1 1 1 1 1 8 +113 3 1 1 1 0 8 +114 4 0 1 1 0 8 +115 2 0 1 1 1 8 +116 2 1 0 1 0 8 +117 4 1 0 1 -1 8 +118 4 0 1 2 -1 8 +119 1 1 1 1 1 8 +120 1 1 1 1 1 8 +121 1 1 1 2 0 8 +122 3 1 1 1 0 8 +123 3 1 1 1 0 8 +124 3 1 1 2 -1 8 +125 2 0 1 1 1 8 +126 2 0 1 1 1 8 +127 1 1 1 1 1 8 +128 1 1 1 1 1 8 +129 2 0 1 1 1 8 +130 1 1 1 1 1 8 +131 2 0 1 1 1 8 +132 3 1 1 1 0 8 +133 4 1 0 1 -1 8 +134 1 1 1 1 1 8 +135 3 1 1 1 0 8 +136 4 0 1 1 0 8 +137 1 1 1 1 1 8 +138 2 0 1 1 1 8 +139 4 0 1 1 0 8 +140 4 0 1 2 -1 8 +141 2 0 1 1 1 8 +142 1 1 1 1 1 8 +143 3 1 1 1 0 8 +144 3 1 1 1 0 8 +145 3 1 1 2 -1 8 +146 3 1 1 1 0 8 +147 4 0 1 1 0 8 +148 1 1 1 2 0 8 +149 4 1 0 2 0 8 +150 2 0 1 1 1 8 +151 4 0 1 1 0 8 +152 1 1 1 1 1 8 +153 2 0 1 1 1 8 +154 4 1 0 1 -1 8 +155 1 1 1 1 1 8 +156 4 0 1 1 0 8 +157 2 0 1 1 1 8 +158 2 0 1 2 0 8 +159 2 0 1 1 1 8 +160 3 1 1 1 0 8 +161 1 1 1 1 1 8 +162 4 0 1 1 0 8 +163 3 1 1 1 0 8 +164 1 1 1 1 1 8 +165 2 0 1 2 0 8 +166 4 0 1 1 0 8 +167 2 0 1 1 1 8 +168 2 0 1 2 0 8 +169 2 0 1 1 1 8 +170 3 1 1 1 0 8 +171 3 1 1 1 0 8 +172 4 0 1 1 0 8 +173 1 1 1 1 1 8 +174 3 1 1 2 -1 8 +175 1 1 1 1 1 8 +176 3 1 1 1 0 8 +177 3 1 1 1 0 8 +178 3 1 1 1 0 8 +179 2 0 1 1 1 8 +180 1 1 1 1 1 8 +181 1 1 1 1 1 8 +182 3 1 1 1 0 8 +183 2 0 1 1 1 8 +184 4 1 0 1 -1 8 +185 4 0 1 2 -1 8 +186 4 0 1 1 0 8 +187 3 1 1 1 0 8 +188 2 0 1 1 1 8 +189 1 1 1 1 1 8 +190 2 0 1 1 1 8 +191 1 1 1 1 1 8 +192 2 0 1 1 1 8 +193 2 0 1 2 0 8 +194 2 0 1 1 1 8 +195 1 1 1 2 0 8 +196 3 1 1 1 0 8 +197 2 0 1 1 1 8 +198 4 0 1 2 -1 8 +199 4 0 1 2 -1 8 +200 2 0 1 1 1 8 +201 3 1 1 2 -1 8 +202 4 0 1 1 0 8 +203 3 1 1 1 0 8 +204 4 0 1 1 0 8 +205 4 1 0 1 -1 8 +206 3 1 1 1 0 8 +207 1 1 1 2 0 8 +208 3 1 1 1 0 8 +209 1 1 1 2 0 8 +210 3 1 1 1 0 8 +211 4 0 1 1 0 8 +212 2 0 1 2 0 8 +213 3 1 1 1 0 8 +214 1 1 1 1 1 8 +215 2 0 1 1 1 8 +216 1 1 1 1 1 8 +217 2 1 0 1 0 8 +218 1 1 1 1 1 8 +219 1 1 1 2 0 8 +220 1 1 1 2 0 8 +221 4 0 1 2 -1 8 +222 3 1 1 1 0 8 +223 1 1 1 1 1 8 +224 3 1 1 1 0 8 +225 4 0 1 2 -1 8 +226 1 1 1 2 0 8 +227 4 1 0 1 -1 8 +228 2 0 1 1 1 8 +229 1 1 1 1 1 8 +230 2 0 1 1 1 8 +231 4 0 1 1 0 8 +232 4 0 1 2 -1 8 +233 2 0 1 1 1 8 +234 1 1 1 1 1 8 +235 4 0 1 1 0 8 +236 1 1 1 1 1 8 +237 3 1 1 2 -1 8 +238 4 0 1 2 -1 8 +239 1 1 1 2 0 8 +240 3 0 0 1 -1 8 +1 2 1 0 1 0 9 +2 3 0 0 2 0 9 +3 3 1 1 1 0 9 +4 1 1 1 1 1 9 +5 4 0 1 1 0 9 +6 1 1 1 1 1 9 +7 3 1 1 1 0 9 +8 4 1 0 1 -1 9 +9 3 1 1 1 0 9 +10 4 0 1 1 0 9 +11 4 1 0 1 -1 9 +12 2 1 0 1 0 9 +13 1 0 0 1 0 9 +14 3 0 0 1 -1 9 +15 3 1 1 1 0 9 +16 2 1 0 1 0 9 +17 2 1 0 1 0 9 +18 1 1 1 1 1 9 +19 2 0 1 2 0 9 +20 4 0 1 2 -1 9 +21 2 1 0 2 1 9 +22 2 1 0 1 0 9 +23 4 1 0 1 -1 9 +24 2 1 0 1 0 9 +25 1 1 1 1 1 9 +26 3 1 1 2 -1 9 +27 2 1 0 1 0 9 +28 3 0 0 1 -1 9 +29 4 1 0 1 -1 9 +30 1 1 1 1 1 9 +31 2 0 1 2 0 9 +32 3 0 0 1 -1 9 +33 3 1 1 1 0 9 +34 3 1 1 1 0 9 +35 2 1 0 1 0 9 +36 2 1 0 1 0 9 +37 2 1 0 1 0 9 +38 4 0 1 1 0 9 +39 1 1 1 1 1 9 +40 2 1 0 1 0 9 +41 4 0 1 1 0 9 +42 3 1 1 1 0 9 +43 1 1 1 1 1 9 +44 4 0 1 1 0 9 +45 4 1 0 1 -1 9 +46 3 1 1 1 0 9 +47 2 0 1 1 1 9 +48 3 1 1 2 -1 9 +49 3 1 1 1 0 9 +50 4 0 1 2 -1 9 +51 2 0 1 1 1 9 +52 4 0 1 1 0 9 +53 4 0 1 1 0 9 +54 1 1 1 1 1 9 +55 1 1 1 1 1 9 +56 1 1 1 2 0 9 +57 4 0 1 1 0 9 +58 1 1 1 2 0 9 +59 4 0 1 1 0 9 +60 1 1 1 1 1 9 +61 4 0 1 1 0 9 +62 2 0 1 1 1 9 +63 3 0 0 1 -1 9 +64 3 1 1 2 -1 9 +65 3 1 1 2 -1 9 +66 4 1 0 1 -1 9 +67 2 0 1 2 0 9 +68 3 0 0 2 0 9 +69 4 0 1 2 -1 9 +70 4 0 1 1 0 9 +71 3 1 1 1 0 9 +72 1 1 1 1 1 9 +73 2 0 1 1 1 9 +74 3 1 1 1 0 9 +75 3 1 1 1 0 9 +76 1 1 1 2 0 9 +77 2 0 1 1 1 9 +78 1 1 1 1 1 9 +79 4 0 1 1 0 9 +80 2 0 1 1 1 9 +81 3 1 1 2 -1 9 +82 2 0 1 1 1 9 +83 2 0 1 1 1 9 +84 1 1 1 1 1 9 +85 2 0 1 2 0 9 +86 3 0 0 1 -1 9 +87 4 0 1 1 0 9 +88 3 0 0 1 -1 9 +89 3 1 1 1 0 9 +90 2 0 1 2 0 9 +91 2 0 1 2 0 9 +92 2 0 1 2 0 9 +93 4 0 1 1 0 9 +94 2 0 1 1 1 9 +95 3 1 1 1 0 9 +96 4 0 1 1 0 9 +97 2 0 1 1 1 9 +98 3 0 0 1 -1 9 +99 4 0 1 1 0 9 +100 3 1 1 1 0 9 +101 2 0 1 1 1 9 +102 4 0 1 1 0 9 +103 2 0 1 1 1 9 +104 4 0 1 1 0 9 +105 2 0 1 2 0 9 +106 4 1 0 2 0 9 +107 1 1 1 1 1 9 +108 4 0 1 1 0 9 +109 4 0 1 1 0 9 +110 2 0 1 2 0 9 +111 2 0 1 1 1 9 +112 3 1 1 1 0 9 +113 2 0 1 2 0 9 +114 3 1 1 1 0 9 +115 3 1 1 2 -1 9 +116 4 0 1 1 0 9 +117 3 1 1 1 0 9 +118 1 0 0 1 0 9 +119 2 0 1 1 1 9 +120 3 1 1 1 0 9 +121 1 1 1 2 0 9 +122 1 1 1 1 1 9 +123 2 0 1 1 1 9 +124 2 0 1 1 1 9 +125 2 0 1 1 1 9 +126 1 1 1 1 1 9 +127 1 1 1 2 0 9 +128 4 0 1 1 0 9 +129 4 0 1 1 0 9 +130 4 0 1 2 -1 9 +131 2 0 1 2 0 9 +132 1 1 1 1 1 9 +133 1 1 1 1 1 9 +134 2 0 1 1 1 9 +135 1 1 1 2 0 9 +136 3 1 1 1 0 9 +137 2 0 1 1 1 9 +138 3 1 1 1 0 9 +139 1 1 1 1 1 9 +140 1 1 1 1 1 9 +141 4 0 1 1 0 9 +142 1 1 1 1 1 9 +143 1 1 1 1 1 9 +144 4 0 1 1 0 9 +145 3 1 1 1 0 9 +146 4 1 0 1 -1 9 +147 3 1 1 2 -1 9 +148 4 0 1 1 0 9 +149 1 1 1 1 1 9 +150 3 1 1 1 0 9 +151 1 1 1 2 0 9 +152 2 0 1 1 1 9 +153 1 0 0 2 1 9 +154 2 0 1 1 1 9 +155 1 0 0 1 0 9 +156 4 0 1 2 -1 9 +157 2 0 1 1 1 9 +158 4 0 1 1 0 9 +159 1 1 1 1 1 9 +160 3 1 1 2 -1 9 +161 2 0 1 1 1 9 +162 3 1 1 1 0 9 +163 2 0 1 1 1 9 +164 2 0 1 1 1 9 +165 4 0 1 1 0 9 +166 2 0 1 1 1 9 +167 4 0 1 1 0 9 +168 1 1 1 1 1 9 +169 3 1 1 1 0 9 +170 1 1 1 2 0 9 +171 2 0 1 2 0 9 +172 4 0 1 1 0 9 +173 4 0 1 1 0 9 +174 4 0 1 1 0 9 +175 3 1 1 1 0 9 +176 2 0 1 1 1 9 +177 4 0 1 1 0 9 +178 1 1 1 2 0 9 +179 4 0 1 1 0 9 +180 1 1 1 1 1 9 +181 3 1 1 1 0 9 +182 4 0 1 1 0 9 +183 4 0 1 1 0 9 +184 1 1 1 1 1 9 +185 3 0 0 1 -1 9 +186 4 0 1 1 0 9 +187 4 0 1 1 0 9 +188 3 1 1 1 0 9 +189 1 1 1 1 1 9 +190 4 1 0 1 -1 9 +191 3 1 1 1 0 9 +192 4 0 1 2 -1 9 +193 3 1 1 1 0 9 +194 4 0 1 1 0 9 +195 2 0 1 1 1 9 +196 1 1 1 1 1 9 +197 3 1 1 1 0 9 +198 1 1 1 1 1 9 +199 2 0 1 1 1 9 +200 2 0 1 2 0 9 +201 2 0 1 1 1 9 +202 3 1 1 1 0 9 +203 4 1 0 1 -1 9 +204 3 1 1 1 0 9 +205 3 1 1 1 0 9 +206 2 0 1 2 0 9 +207 2 0 1 1 1 9 +208 1 1 1 1 1 9 +209 2 0 1 1 1 9 +210 3 1 1 1 0 9 +211 1 1 1 1 1 9 +212 3 1 1 1 0 9 +213 1 1 1 2 0 9 +214 3 1 1 1 0 9 +215 3 1 1 1 0 9 +216 4 0 1 1 0 9 +217 3 1 1 1 0 9 +218 1 1 1 1 1 9 +219 1 1 1 1 1 9 +220 4 0 1 1 0 9 +221 1 1 1 1 1 9 +222 3 1 1 1 0 9 +223 4 1 0 1 -1 9 +224 3 1 1 1 0 9 +225 1 1 1 1 1 9 +226 4 0 1 1 0 9 +227 1 1 1 1 1 9 +228 1 1 1 2 0 9 +229 1 1 1 1 1 9 +230 4 0 1 1 0 9 +231 2 0 1 1 1 9 +232 1 1 1 1 1 9 +233 1 1 1 1 1 9 +234 1 1 1 1 1 9 +235 1 1 1 1 1 9 +236 1 1 1 2 0 9 +237 3 1 1 1 0 9 +238 1 1 1 1 1 9 +239 2 0 1 1 1 9 +240 1 1 1 1 1 9 +1 1 0 0 1 0 10 +2 1 1 1 1 1 10 +3 1 1 1 1 1 10 +4 4 1 0 1 -1 10 +5 4 1 0 1 -1 10 +6 1 1 1 1 1 10 +7 4 0 1 1 0 10 +8 4 0 1 1 0 10 +9 2 0 1 1 1 10 +10 4 0 1 1 0 10 +11 1 1 1 1 1 10 +12 4 0 1 1 0 10 +13 1 1 1 1 1 10 +14 1 1 1 1 1 10 +15 4 1 0 2 0 10 +16 4 1 0 1 -1 10 +17 1 1 1 2 0 10 +18 1 1 1 1 1 10 +19 4 0 1 1 0 10 +20 4 0 1 1 0 10 +21 1 1 1 1 1 10 +22 3 0 0 1 -1 10 +23 3 1 1 2 -1 10 +24 4 0 1 2 -1 10 +25 2 0 1 1 1 10 +26 4 1 0 1 -1 10 +27 1 1 1 1 1 10 +28 3 1 1 1 0 10 +29 3 0 0 1 -1 10 +30 2 1 0 1 0 10 +31 1 1 1 1 1 10 +32 3 1 1 1 0 10 +33 2 1 0 1 0 10 +34 3 1 1 1 0 10 +35 2 0 1 1 1 10 +36 2 0 1 1 1 10 +37 2 0 1 1 1 10 +38 4 1 0 1 -1 10 +39 3 1 1 2 -1 10 +40 1 1 1 1 1 10 +41 3 0 0 1 -1 10 +42 3 0 0 2 0 10 +43 3 0 0 2 0 10 +44 1 1 1 1 1 10 +45 2 0 1 1 1 10 +46 3 0 0 1 -1 10 +47 3 1 1 1 0 10 +48 2 0 1 1 1 10 +49 4 1 0 1 -1 10 +50 3 0 0 1 -1 10 +51 2 1 0 1 0 10 +52 3 1 1 1 0 10 +53 4 0 1 1 0 10 +54 3 1 1 1 0 10 +55 2 0 1 1 1 10 +56 1 1 1 1 1 10 +57 4 0 1 1 0 10 +58 3 1 1 2 -1 10 +59 1 1 1 1 1 10 +60 3 1 1 1 0 10 +61 4 0 1 1 0 10 +62 3 1 1 1 0 10 +63 2 0 1 1 1 10 +64 2 0 1 1 1 10 +65 2 0 1 2 0 10 +66 1 1 1 1 1 10 +67 3 0 0 2 0 10 +68 1 1 1 1 1 10 +69 2 0 1 2 0 10 +70 4 0 1 1 0 10 +71 2 0 1 1 1 10 +72 1 1 1 2 0 10 +73 1 1 1 1 1 10 +74 2 0 1 1 1 10 +75 1 1 1 1 1 10 +76 4 0 1 1 0 10 +77 4 0 1 1 0 10 +78 4 0 1 1 0 10 +79 1 1 1 1 1 10 +80 2 0 1 1 1 10 +81 2 0 1 2 0 10 +82 3 1 1 2 -1 10 +83 2 1 0 1 0 10 +84 3 0 0 2 0 10 +85 3 0 0 2 0 10 +86 3 0 0 1 -1 10 +87 2 0 1 2 0 10 +88 4 0 1 2 -1 10 +89 2 0 1 1 1 10 +90 4 0 1 1 0 10 +91 4 1 0 1 -1 10 +92 3 1 1 1 0 10 +93 4 1 0 2 0 10 +94 3 1 1 1 0 10 +95 1 1 1 1 1 10 +96 3 1 1 1 0 10 +97 2 0 1 1 1 10 +98 2 0 1 1 1 10 +99 2 0 1 1 1 10 +100 1 1 1 1 1 10 +101 4 0 1 1 0 10 +102 2 0 1 1 1 10 +103 1 1 1 1 1 10 +104 2 0 1 1 1 10 +105 1 1 1 1 1 10 +106 3 1 1 1 0 10 +107 4 1 0 1 -1 10 +108 2 0 1 1 1 10 +109 1 1 1 2 0 10 +110 3 1 1 1 0 10 +111 4 0 1 1 0 10 +112 4 0 1 1 0 10 +113 2 0 1 2 0 10 +114 3 0 0 1 -1 10 +115 1 1 1 2 0 10 +116 1 1 1 1 1 10 +117 2 0 1 1 1 10 +118 4 0 1 1 0 10 +119 4 0 1 1 0 10 +120 4 0 1 1 0 10 +121 4 0 1 2 -1 10 +122 2 0 1 2 0 10 +123 4 0 1 1 0 10 +124 3 1 1 1 0 10 +125 2 0 1 2 0 10 +126 3 1 1 1 0 10 +127 1 1 1 1 1 10 +128 4 0 1 1 0 10 +129 2 0 1 1 1 10 +130 4 0 1 2 -1 10 +131 4 0 1 1 0 10 +132 1 1 1 1 1 10 +133 3 0 0 1 -1 10 +134 4 0 1 1 0 10 +135 1 1 1 1 1 10 +136 2 0 1 1 1 10 +137 1 1 1 1 1 10 +138 1 1 1 1 1 10 +139 4 0 1 1 0 10 +140 3 1 1 1 0 10 +141 2 0 1 1 1 10 +142 4 0 1 1 0 10 +143 1 1 1 2 0 10 +144 2 0 1 1 1 10 +145 3 1 1 1 0 10 +146 4 0 1 1 0 10 +147 1 1 1 1 1 10 +148 2 0 1 1 1 10 +149 1 1 1 1 1 10 +150 3 0 0 1 -1 10 +151 2 0 1 1 1 10 +152 2 0 1 1 1 10 +153 3 1 1 1 0 10 +154 3 0 0 2 0 10 +155 2 0 1 2 0 10 +156 2 0 1 1 1 10 +157 4 0 1 1 0 10 +158 3 1 1 1 0 10 +159 4 1 0 2 0 10 +160 3 1 1 1 0 10 +161 1 1 1 1 1 10 +162 2 0 1 1 1 10 +163 1 1 1 1 1 10 +164 2 0 1 1 1 10 +165 1 1 1 1 1 10 +166 4 0 1 1 0 10 +167 3 1 1 1 0 10 +168 3 1 1 1 0 10 +169 1 0 0 1 0 10 +170 3 1 1 1 0 10 +171 3 1 1 1 0 10 +172 2 1 0 1 0 10 +173 4 0 1 1 0 10 +174 1 1 1 1 1 10 +175 2 0 1 1 1 10 +176 4 0 1 2 -1 10 +177 3 1 1 1 0 10 +178 1 1 1 1 1 10 +179 1 1 1 1 1 10 +180 1 1 1 2 0 10 +181 1 1 1 1 1 10 +182 1 1 1 1 1 10 +183 4 0 1 2 -1 10 +184 4 1 0 1 -1 10 +185 2 0 1 1 1 10 +186 1 0 0 1 0 10 +187 2 0 1 2 0 10 +188 2 0 1 1 1 10 +189 3 1 1 1 0 10 +190 4 1 0 1 -1 10 +191 2 0 1 1 1 10 +192 4 0 1 1 0 10 +193 1 1 1 2 0 10 +194 2 0 1 1 1 10 +195 3 1 1 1 0 10 +196 2 0 1 1 1 10 +197 3 1 1 1 0 10 +198 4 0 1 1 0 10 +199 3 1 1 1 0 10 +200 2 0 1 1 1 10 +201 1 1 1 1 1 10 +202 3 1 1 1 0 10 +203 1 1 1 2 0 10 +204 3 1 1 1 0 10 +205 4 0 1 1 0 10 +206 3 1 1 2 -1 10 +207 1 1 1 2 0 10 +208 1 1 1 1 1 10 +209 1 1 1 1 1 10 +210 4 0 1 1 0 10 +211 1 1 1 1 1 10 +212 3 1 1 1 0 10 +213 2 0 1 2 0 10 +214 2 0 1 2 0 10 +215 4 0 1 1 0 10 +216 3 1 1 1 0 10 +217 4 0 1 1 0 10 +218 1 1 1 1 1 10 +219 4 0 1 1 0 10 +220 1 1 1 2 0 10 +221 3 0 0 1 -1 10 +222 2 0 1 2 0 10 +223 2 0 1 1 1 10 +224 1 1 1 1 1 10 +225 4 0 1 1 0 10 +226 1 1 1 2 0 10 +227 2 0 1 1 1 10 +228 4 0 1 1 0 10 +229 2 0 1 1 1 10 +230 3 1 1 1 0 10 +231 3 1 1 1 0 10 +232 3 1 1 1 0 10 +233 3 1 1 1 0 10 +234 2 1 0 1 0 10 +235 3 1 1 2 -1 10 +236 1 1 1 1 1 10 +237 3 0 0 1 -1 10 +238 4 0 1 1 0 10 +239 4 0 1 1 0 10 +240 2 0 1 1 1 10 diff --git a/R/inst/extdata/igt_exampleData.txt b/R/inst/extdata/igt_exampleData.txt new file mode 100644 index 00000000..3a6252af --- /dev/null +++ b/R/inst/extdata/igt_exampleData.txt @@ -0,0 +1,401 @@ +trial choice gain loss subjID +1 3 50 0 1001 +2 2 100 0 1001 +3 3 50 0 1001 +4 4 50 0 1001 +5 4 50 0 1001 +6 4 50 0 1001 +7 4 50 0 1001 +8 3 50 -50 1001 +9 4 50 0 1001 +10 4 50 0 1001 +11 3 50 0 1001 +12 4 50 0 1001 +13 4 50 0 1001 +14 4 50 0 1001 +15 4 50 -250 1001 +16 4 50 0 1001 +17 2 100 0 1001 +18 4 50 0 1001 +19 1 100 0 1001 +20 2 100 0 1001 +21 2 100 0 1001 +22 2 100 0 1001 +23 3 50 -50 1001 +24 2 100 0 1001 +25 4 50 0 1001 +26 1 100 0 1001 +27 1 100 -150 1001 +28 2 100 0 1001 +29 2 100 0 1001 +30 2 100 -1250 1001 +31 1 100 0 1001 +32 4 50 0 1001 +33 1 100 -300 1001 +34 4 50 0 1001 +35 1 100 0 1001 +36 4 50 0 1001 +37 1 100 -200 1001 +38 2 100 0 1001 +39 1 100 0 1001 +40 4 50 0 1001 +41 4 50 0 1001 +42 2 100 0 1001 +43 4 50 0 1001 +44 4 50 -250 1001 +45 4 50 0 1001 +46 2 100 0 1001 +47 4 50 0 1001 +48 1 100 -250 1001 +49 4 50 0 1001 +50 4 50 0 1001 +51 4 50 0 1001 +52 3 50 0 1001 +53 3 50 -50 1001 +54 3 50 0 1001 +55 3 50 -50 1001 +56 3 50 -50 1001 +57 2 100 0 1001 +58 2 100 -1250 1001 +59 4 50 0 1001 +60 4 50 0 1001 +61 4 50 0 1001 +62 4 50 -250 1001 +63 4 50 0 1001 +64 4 50 0 1001 +65 3 50 0 1001 +66 3 50 -25 1001 +67 3 50 -75 1001 +68 4 50 0 1001 +69 4 50 0 1001 +70 4 50 0 1001 +71 4 50 -250 1001 +72 4 50 0 1001 +73 4 50 0 1001 +74 4 50 0 1001 +75 4 50 0 1001 +76 4 50 0 1001 +77 4 50 0 1001 +78 3 50 0 1001 +79 4 50 0 1001 +80 4 50 0 1001 +81 4 50 0 1001 +82 4 50 0 1001 +83 4 50 0 1001 +84 4 50 0 1001 +85 4 50 0 1001 +86 4 50 0 1001 +87 4 50 -250 1001 +88 4 50 0 1001 +89 4 50 0 1001 +90 4 50 0 1001 +91 4 50 0 1001 +92 4 50 0 1001 +93 4 50 0 1001 +94 4 50 0 1001 +95 4 50 0 1001 +96 4 50 0 1001 +97 4 50 -250 1001 +98 4 50 0 1001 +99 4 50 0 1001 +100 4 50 0 1001 +1 3 50 0 1002 +2 3 50 0 1002 +3 3 50 -50 1002 +4 3 50 0 1002 +5 3 50 -50 1002 +6 1 100 0 1002 +7 3 50 0 1002 +8 2 100 0 1002 +9 3 50 -50 1002 +10 3 50 0 1002 +11 4 50 0 1002 +12 3 50 -50 1002 +13 3 50 -50 1002 +14 1 100 0 1002 +15 1 100 -150 1002 +16 3 50 0 1002 +17 4 50 0 1002 +18 4 50 0 1002 +19 4 50 0 1002 +20 4 50 0 1002 +21 4 50 0 1002 +22 3 50 -25 1002 +23 4 50 0 1002 +24 4 50 0 1002 +25 3 50 -75 1002 +26 3 50 0 1002 +27 4 50 0 1002 +28 4 50 -250 1002 +29 4 50 0 1002 +30 4 50 0 1002 +31 4 50 0 1002 +32 4 50 0 1002 +33 4 50 0 1002 +34 4 50 0 1002 +35 4 50 0 1002 +36 4 50 0 1002 +37 4 50 0 1002 +38 4 50 -250 1002 +39 1 100 0 1002 +40 3 50 0 1002 +41 3 50 0 1002 +42 3 50 -25 1002 +43 3 50 -75 1002 +44 1 100 -300 1002 +45 1 100 0 1002 +46 3 50 0 1002 +47 4 50 0 1002 +48 4 50 0 1002 +49 4 50 0 1002 +50 4 50 0 1002 +51 4 50 0 1002 +52 4 50 0 1002 +53 4 50 0 1002 +54 4 50 0 1002 +55 4 50 -250 1002 +56 4 50 0 1002 +57 4 50 0 1002 +58 4 50 0 1002 +59 4 50 0 1002 +60 4 50 0 1002 +61 4 50 -250 1002 +62 4 50 0 1002 +63 4 50 0 1002 +64 4 50 0 1002 +65 4 50 0 1002 +66 4 50 0 1002 +67 4 50 0 1002 +68 4 50 0 1002 +69 4 50 0 1002 +70 4 50 0 1002 +71 4 50 0 1002 +72 4 50 0 1002 +73 4 50 0 1002 +74 4 50 0 1002 +75 4 50 0 1002 +76 1 100 -200 1002 +77 4 50 -250 1002 +78 4 50 0 1002 +79 4 50 0 1002 +80 4 50 0 1002 +81 4 50 0 1002 +82 4 50 0 1002 +83 4 50 0 1002 +84 4 50 0 1002 +85 4 50 0 1002 +86 4 50 0 1002 +87 4 50 -250 1002 +88 4 50 0 1002 +89 4 50 0 1002 +90 4 50 0 1002 +91 4 50 0 1002 +92 4 50 0 1002 +93 4 50 0 1002 +94 4 50 0 1002 +95 4 50 0 1002 +96 4 50 -250 1002 +97 4 50 0 1002 +98 4 50 0 1002 +99 4 50 0 1002 +100 4 50 0 1002 +1 4 50 0 1003 +2 4 50 0 1003 +3 4 50 0 1003 +4 4 50 0 1003 +5 4 50 0 1003 +6 4 50 0 1003 +7 2 100 0 1003 +8 4 50 0 1003 +9 2 100 0 1003 +10 4 50 0 1003 +11 4 50 0 1003 +12 4 50 -250 1003 +13 4 50 0 1003 +14 2 100 0 1003 +15 1 100 0 1003 +16 3 50 0 1003 +17 2 100 0 1003 +18 1 100 0 1003 +19 2 100 0 1003 +20 2 100 0 1003 +21 2 100 0 1003 +22 2 100 0 1003 +23 2 100 -1250 1003 +24 2 100 0 1003 +25 1 100 -150 1003 +26 4 50 0 1003 +27 2 100 0 1003 +28 2 100 0 1003 +29 4 50 0 1003 +30 2 100 0 1003 +31 4 50 0 1003 +32 1 100 0 1003 +33 1 100 -300 1003 +34 4 50 0 1003 +35 4 50 0 1003 +36 3 50 0 1003 +37 4 50 0 1003 +38 4 50 0 1003 +39 4 50 0 1003 +40 4 50 -250 1003 +41 2 100 -1250 1003 +42 3 50 -50 1003 +43 1 100 0 1003 +44 3 50 0 1003 +45 3 50 -50 1003 +46 4 50 0 1003 +47 4 50 0 1003 +48 4 50 0 1003 +49 3 50 0 1003 +50 4 50 0 1003 +51 2 100 0 1003 +52 4 50 0 1003 +53 1 100 -200 1003 +54 4 50 0 1003 +55 4 50 0 1003 +56 2 100 0 1003 +57 4 50 0 1003 +58 4 50 -250 1003 +59 4 50 0 1003 +60 1 100 0 1003 +61 1 100 -250 1003 +62 2 100 0 1003 +63 4 50 0 1003 +64 3 50 -50 1003 +65 1 100 -350 1003 +66 4 50 0 1003 +67 4 50 0 1003 +68 3 50 0 1003 +69 3 50 -50 1003 +70 4 50 0 1003 +71 3 50 -50 1003 +72 3 50 0 1003 +73 4 50 -250 1003 +74 3 50 -25 1003 +75 1 100 0 1003 +76 1 100 -350 1003 +77 2 100 0 1003 +78 3 50 -75 1003 +79 2 100 0 1003 +80 2 100 0 1003 +81 3 50 0 1003 +82 2 100 -1250 1003 +83 3 50 0 1003 +84 3 50 0 1003 +85 4 50 0 1003 +86 3 50 -25 1003 +87 4 50 0 1003 +88 1 100 0 1003 +89 3 50 -75 1003 +90 3 50 0 1003 +91 3 50 -50 1003 +92 3 50 0 1003 +93 3 50 0 1003 +94 3 50 0 1003 +95 3 50 -50 1003 +96 1 100 -250 1003 +97 3 50 -25 1003 +98 3 50 -50 1003 +99 3 50 0 1003 +100 4 50 0 1003 +1 3 50 0 1004 +2 4 50 0 1004 +3 1 100 0 1004 +4 4 50 0 1004 +5 4 50 0 1004 +6 4 50 0 1004 +7 4 50 0 1004 +8 1 100 0 1004 +9 3 50 0 1004 +10 3 50 -50 1004 +11 1 100 -150 1004 +12 1 100 0 1004 +13 1 100 -300 1004 +14 4 50 0 1004 +15 1 100 0 1004 +16 4 50 0 1004 +17 4 50 0 1004 +18 2 100 0 1004 +19 4 50 0 1004 +20 4 50 -250 1004 +21 1 100 -200 1004 +22 2 100 0 1004 +23 3 50 0 1004 +24 4 50 0 1004 +25 2 100 0 1004 +26 2 100 0 1004 +27 2 100 0 1004 +28 2 100 0 1004 +29 2 100 0 1004 +30 2 100 0 1004 +31 2 100 -1250 1004 +32 3 50 -50 1004 +33 4 50 0 1004 +34 1 100 0 1004 +35 2 100 0 1004 +36 3 50 0 1004 +37 1 100 -250 1004 +38 3 50 -50 1004 +39 3 50 0 1004 +40 3 50 -50 1004 +41 4 50 0 1004 +42 4 50 0 1004 +43 4 50 0 1004 +44 4 50 0 1004 +45 4 50 0 1004 +46 4 50 0 1004 +47 4 50 0 1004 +48 4 50 -250 1004 +49 1 100 -350 1004 +50 4 50 0 1004 +51 4 50 0 1004 +52 4 50 0 1004 +53 3 50 -50 1004 +54 4 50 0 1004 +55 3 50 0 1004 +56 3 50 -25 1004 +57 4 50 0 1004 +58 4 50 0 1004 +59 4 50 0 1004 +60 4 50 0 1004 +61 4 50 -250 1004 +62 3 50 -75 1004 +63 3 50 0 1004 +64 3 50 0 1004 +65 4 50 0 1004 +66 3 50 0 1004 +67 2 100 0 1004 +68 4 50 0 1004 +69 1 100 0 1004 +70 4 50 0 1004 +71 3 50 -25 1004 +72 3 50 -75 1004 +73 4 50 0 1004 +74 4 50 0 1004 +75 3 50 0 1004 +76 4 50 -250 1004 +77 3 50 -50 1004 +78 3 50 0 1004 +79 3 50 0 1004 +80 2 100 0 1004 +81 4 50 0 1004 +82 4 50 0 1004 +83 3 50 0 1004 +84 3 50 -50 1004 +85 2 100 0 1004 +86 2 100 -1250 1004 +87 3 50 -25 1004 +88 2 100 0 1004 +89 3 50 -50 1004 +90 3 50 0 1004 +91 3 50 0 1004 +92 4 50 0 1004 +93 4 50 0 1004 +94 4 50 0 1004 +95 3 50 -75 1004 +96 4 50 0 1004 +97 4 50 0 1004 +98 3 50 -50 1004 +99 4 50 0 1004 +100 1 100 -350 1004 \ No newline at end of file diff --git a/R/inst/extdata/peer_exampleData.txt b/R/inst/extdata/peer_exampleData.txt new file mode 100644 index 00000000..d4e222eb --- /dev/null +++ b/R/inst/extdata/peer_exampleData.txt @@ -0,0 +1,361 @@ +trial condition p_gamble risky_Lpayoff risky_Hpayoff safe_Lpayoff safe_Hpayoff risky_color total_presses choice bonus subjID +1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 +2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 +5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 +7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 1 +8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 +14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 +28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 +32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 +35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 +44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 +47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 +55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 +72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +1 1 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 +2 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +3 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +4 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +5 1 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 +6 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +7 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +8 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +9 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +10 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +11 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +12 0 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 +13 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +14 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +15 0 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 +16 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +17 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +18 2 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +19 0 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 +20 3 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 +21 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +22 2 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 +23 1 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 +24 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +25 0 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 +26 0 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +27 2 0.6 1.2 51.1 23 24.4 orange 1 0 2.68 2 +28 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +29 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +30 3 0.4 1.2 51.1 23 24.4 orange 1 1 2.68 2 +31 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +32 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +33 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +34 1 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 +35 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +36 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +37 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +38 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +39 0 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 +40 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +41 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +42 1 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 +43 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +44 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +45 3 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 +46 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +47 3 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 +48 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +49 2 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 +50 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +51 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +52 2 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +53 1 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +54 2 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +55 2 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 +56 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +57 2 0.9 1.2 51.1 23 24.4 orange 1 1 2.68 2 +58 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +59 1 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 +60 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +61 3 0.8 1.2 51.1 23 24.4 orange 1 1 2.68 2 +62 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +63 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +64 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +65 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +66 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +67 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +68 2 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 +69 0 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 +70 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +71 3 0.5 1.2 51.1 23 24.4 orange 1 1 2.68 2 +72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +1 0 0.8 1.2 51.1 23 24.4 orange 2 0 0.25 3 +2 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +3 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +4 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +5 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +6 0 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 +7 3 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 +8 0 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +9 2 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +10 0 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +11 2 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 +12 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +13 1 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 +14 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +15 2 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +16 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +17 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +18 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +19 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +20 3 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +21 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +22 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +23 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +24 0 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +25 2 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 +26 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +27 1 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +28 2 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +29 0 0.6 1.2 51.1 23 24.4 orange 2 1 0.25 3 +30 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +31 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +32 2 0.9 1.2 51.1 23 24.4 orange 1 0 0.25 3 +33 0 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +34 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +35 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +36 3 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 +37 2 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +38 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +39 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +40 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +41 3 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +42 0 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +43 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +44 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +45 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +46 2 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +47 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +48 3 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 +49 1 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 +50 1 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +51 1 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +52 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +53 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +54 1 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +55 1 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 +56 3 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +57 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +58 2 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +59 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +60 2 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +61 1 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +62 1 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +63 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +64 2 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +65 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +66 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +67 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +68 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +69 3 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +70 0 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +71 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +72 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 +2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 +5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 +7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 4 +8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 +14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 +28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 +32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 +35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 +44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 +47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 +55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 +72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +1 1 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +2 1 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +3 1 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +4 1 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +5 2 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +6 1 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +7 3 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +8 0 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +9 2 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +10 3 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +11 2 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +12 0 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +13 1 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +14 3 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +15 2 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +16 2 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +17 2 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +18 0 0.7 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +19 0 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +20 2 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +21 2 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +22 1 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +23 2 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +24 0 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +25 1 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +26 3 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +27 0 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +28 1 0.6 1.6 55.5 26.6 28.3 darkcyan 2 0 0.25 5 +29 3 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +30 0 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +31 0 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +32 1 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +33 1 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +34 0 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +35 2 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +36 3 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +37 3 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +38 3 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +39 3 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +40 3 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +41 1 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +42 2 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +43 1 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +44 3 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +45 3 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +46 0 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +47 1 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +48 2 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +49 2 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +50 0 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +51 2 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +52 3 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +53 0 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +54 1 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +55 0 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +56 2 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +57 3 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +58 3 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +59 2 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +60 0 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +61 2 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +62 0 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +63 0 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +64 0 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +65 3 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +66 3 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +67 1 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +68 2 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +69 0 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +70 3 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +71 1 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +72 1 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 diff --git a/R/inst/extdata/prl_exampleData.txt b/R/inst/extdata/prl_exampleData.txt new file mode 100644 index 00000000..21abeae7 --- /dev/null +++ b/R/inst/extdata/prl_exampleData.txt @@ -0,0 +1,2001 @@ +subjID trial choice outcome +1 1 1 1 +1 2 2 1 +1 3 1 1 +1 4 1 1 +1 5 1 1 +1 6 1 1 +1 7 1 1 +1 8 1 1 +1 9 1 -1 +1 10 2 1 +1 11 2 1 +1 12 2 1 +1 13 2 1 +1 14 2 -1 +1 15 1 1 +1 16 2 1 +1 17 1 -1 +1 18 2 1 +1 19 2 -1 +1 20 2 -1 +1 21 1 1 +1 22 1 -1 +1 23 1 -1 +1 24 2 -1 +1 25 2 1 +1 26 2 1 +1 27 2 -1 +1 28 1 -1 +1 29 2 1 +1 30 2 1 +1 31 2 1 +1 32 2 1 +1 33 2 -1 +1 34 2 1 +1 35 2 -1 +1 36 2 1 +1 37 2 1 +1 38 1 -1 +1 39 1 -1 +1 40 2 -1 +1 41 2 -1 +1 42 2 -1 +1 43 1 1 +1 44 1 1 +1 45 1 -1 +1 46 1 1 +1 47 1 1 +1 48 1 1 +1 49 1 -1 +1 50 1 1 +1 51 1 1 +1 52 1 1 +1 53 1 1 +1 54 1 1 +1 55 1 1 +1 56 1 -1 +1 57 1 -1 +1 58 2 1 +1 59 2 1 +1 60 2 -1 +1 61 1 1 +1 62 2 -1 +1 63 1 1 +1 64 1 1 +1 65 1 1 +1 66 1 1 +1 67 1 1 +1 68 1 1 +1 69 1 -1 +1 70 2 1 +1 71 1 -1 +1 72 1 1 +1 73 1 -1 +1 74 2 1 +1 75 2 1 +1 76 2 -1 +1 77 1 1 +1 78 2 1 +1 79 1 1 +1 80 1 -1 +1 81 1 1 +1 82 1 1 +1 83 2 -1 +1 84 1 -1 +1 85 2 -1 +1 86 2 1 +1 87 2 -1 +1 88 1 1 +1 89 1 1 +1 90 1 -1 +1 91 1 -1 +1 92 2 1 +1 93 2 1 +1 94 1 1 +1 95 2 1 +1 96 2 1 +1 97 2 1 +1 98 2 -1 +1 99 1 -1 +1 100 2 1 +2 1 1 1 +2 2 1 -1 +2 3 1 -1 +2 4 2 -1 +2 5 2 -1 +2 6 1 1 +2 7 2 -1 +2 8 2 1 +2 9 1 -1 +2 10 1 1 +2 11 1 -1 +2 12 2 -1 +2 13 2 1 +2 14 2 -1 +2 15 1 1 +2 16 2 1 +2 17 1 1 +2 18 1 1 +2 19 2 1 +2 20 1 1 +2 21 1 1 +2 22 2 1 +2 23 1 1 +2 24 1 -1 +2 25 2 -1 +2 26 1 1 +2 27 1 -1 +2 28 1 1 +2 29 1 -1 +2 30 2 1 +2 31 1 -1 +2 32 1 1 +2 33 2 -1 +2 34 1 -1 +2 35 2 1 +2 36 2 1 +2 37 2 1 +2 38 1 -1 +2 39 2 1 +2 40 1 -1 +2 41 2 -1 +2 42 2 1 +2 43 2 -1 +2 44 2 -1 +2 45 2 1 +2 46 2 -1 +2 47 2 1 +2 48 2 1 +2 49 2 1 +2 50 2 1 +2 51 2 1 +2 52 2 1 +2 53 2 1 +2 54 2 1 +2 55 2 1 +2 56 2 1 +2 57 2 -1 +2 58 2 -1 +2 59 2 1 +2 60 1 -1 +2 61 2 1 +2 62 2 1 +2 63 2 -1 +2 64 2 -1 +2 65 1 1 +2 66 1 1 +2 67 1 1 +2 68 1 -1 +2 69 1 -1 +2 70 1 -1 +2 71 2 -1 +2 72 2 1 +2 73 1 -1 +2 74 1 1 +2 75 2 1 +2 76 2 -1 +2 77 1 1 +2 78 1 -1 +2 79 1 1 +2 80 2 -1 +2 81 1 -1 +2 82 1 -1 +2 83 1 1 +2 84 1 1 +2 85 1 1 +2 86 1 1 +2 87 2 -1 +2 88 1 1 +2 89 1 1 +2 90 1 -1 +2 91 1 -1 +2 92 1 -1 +2 93 1 1 +2 94 2 1 +2 95 1 -1 +2 96 2 -1 +2 97 1 -1 +2 98 1 -1 +2 99 1 1 +2 100 1 1 +3 1 2 -1 +3 2 1 -1 +3 3 1 1 +3 4 1 1 +3 5 2 1 +3 6 2 1 +3 7 1 -1 +3 8 2 1 +3 9 2 -1 +3 10 2 1 +3 11 2 1 +3 12 2 1 +3 13 2 1 +3 14 1 -1 +3 15 2 -1 +3 16 2 -1 +3 17 1 1 +3 18 1 -1 +3 19 2 1 +3 20 2 -1 +3 21 2 1 +3 22 1 -1 +3 23 2 1 +3 24 2 -1 +3 25 1 1 +3 26 1 1 +3 27 2 -1 +3 28 1 1 +3 29 1 -1 +3 30 2 1 +3 31 1 -1 +3 32 2 -1 +3 33 2 -1 +3 34 1 1 +3 35 2 1 +3 36 2 1 +3 37 2 -1 +3 38 2 1 +3 39 2 -1 +3 40 1 -1 +3 41 1 1 +3 42 1 1 +3 43 1 1 +3 44 1 1 +3 45 1 1 +3 46 1 1 +3 47 1 1 +3 48 1 -1 +3 49 1 1 +3 50 2 -1 +3 51 1 -1 +3 52 1 -1 +3 53 1 -1 +3 54 2 1 +3 55 2 -1 +3 56 2 1 +3 57 2 -1 +3 58 1 -1 +3 59 2 1 +3 60 1 -1 +3 61 2 -1 +3 62 1 1 +3 63 1 1 +3 64 2 1 +3 65 2 -1 +3 66 1 1 +3 67 1 1 +3 68 1 -1 +3 69 2 1 +3 70 2 1 +3 71 2 1 +3 72 2 -1 +3 73 2 1 +3 74 1 1 +3 75 2 -1 +3 76 2 -1 +3 77 1 -1 +3 78 1 1 +3 79 1 1 +3 80 2 1 +3 81 2 -1 +3 82 1 1 +3 83 1 1 +3 84 1 1 +3 85 1 -1 +3 86 2 1 +3 87 2 -1 +3 88 1 1 +3 89 1 1 +3 90 2 1 +3 91 2 -1 +3 92 1 1 +3 93 1 -1 +3 94 1 -1 +3 95 1 -1 +3 96 2 1 +3 97 2 -1 +3 98 1 -1 +3 99 2 1 +3 100 2 1 +4 1 2 -1 +4 2 1 1 +4 3 1 -1 +4 4 1 -1 +4 5 1 -1 +4 6 2 -1 +4 7 2 -1 +4 8 2 -1 +4 9 1 -1 +4 10 2 -1 +4 11 1 -1 +4 12 1 -1 +4 13 2 1 +4 14 2 -1 +4 15 1 1 +4 16 1 -1 +4 17 2 -1 +4 18 2 1 +4 19 2 1 +4 20 1 1 +4 21 2 1 +4 22 1 1 +4 23 2 -1 +4 24 1 1 +4 25 1 1 +4 26 1 -1 +4 27 2 -1 +4 28 1 1 +4 29 1 1 +4 30 2 1 +4 31 2 1 +4 32 2 -1 +4 33 2 1 +4 34 2 -1 +4 35 2 1 +4 36 2 1 +4 37 2 1 +4 38 1 1 +4 39 1 -1 +4 40 1 1 +4 41 1 -1 +4 42 2 1 +4 43 2 -1 +4 44 1 -1 +4 45 2 -1 +4 46 1 1 +4 47 1 -1 +4 48 1 -1 +4 49 1 1 +4 50 1 1 +4 51 1 1 +4 52 2 1 +4 53 1 -1 +4 54 1 1 +4 55 2 1 +4 56 2 -1 +4 57 1 1 +4 58 1 1 +4 59 1 -1 +4 60 1 1 +4 61 1 1 +4 62 1 1 +4 63 1 1 +4 64 1 1 +4 65 1 1 +4 66 1 1 +4 67 1 -1 +4 68 1 -1 +4 69 1 1 +4 70 1 1 +4 71 1 -1 +4 72 1 -1 +4 73 2 -1 +4 74 1 -1 +4 75 1 1 +4 76 2 -1 +4 77 1 1 +4 78 1 -1 +4 79 1 1 +4 80 1 1 +4 81 1 -1 +4 82 1 1 +4 83 1 1 +4 84 1 1 +4 85 1 -1 +4 86 2 1 +4 87 2 -1 +4 88 2 -1 +4 89 1 -1 +4 90 2 1 +4 91 2 -1 +4 92 1 1 +4 93 1 -1 +4 94 1 1 +4 95 1 -1 +4 96 2 1 +4 97 2 -1 +4 98 1 1 +4 99 1 -1 +4 100 2 1 +5 1 1 1 +5 2 1 1 +5 3 1 1 +5 4 1 1 +5 5 1 -1 +5 6 1 1 +5 7 1 1 +5 8 1 -1 +5 9 1 1 +5 10 1 -1 +5 11 2 1 +5 12 1 -1 +5 13 2 1 +5 14 2 1 +5 15 2 1 +5 16 2 1 +5 17 2 -1 +5 18 2 -1 +5 19 2 -1 +5 20 1 -1 +5 21 2 1 +5 22 2 -1 +5 23 1 1 +5 24 2 -1 +5 25 1 1 +5 26 1 -1 +5 27 1 1 +5 28 1 -1 +5 29 2 1 +5 30 2 1 +5 31 2 1 +5 32 2 -1 +5 33 1 -1 +5 34 2 1 +5 35 2 1 +5 36 1 1 +5 37 1 -1 +5 38 2 -1 +5 39 2 1 +5 40 2 -1 +5 41 1 -1 +5 42 2 1 +5 43 1 1 +5 44 2 -1 +5 45 2 -1 +5 46 1 1 +5 47 1 1 +5 48 1 -1 +5 49 1 1 +5 50 1 1 +5 51 1 -1 +5 52 1 -1 +5 53 2 -1 +5 54 1 1 +5 55 2 -1 +5 56 1 -1 +5 57 2 1 +5 58 2 -1 +5 59 2 1 +5 60 2 -1 +5 61 1 1 +5 62 2 -1 +5 63 1 1 +5 64 1 1 +5 65 1 1 +5 66 1 -1 +5 67 1 -1 +5 68 1 -1 +5 69 2 1 +5 70 2 1 +5 71 2 -1 +5 72 1 -1 +5 73 2 1 +5 74 2 -1 +5 75 2 1 +5 76 1 1 +5 77 1 -1 +5 78 1 -1 +5 79 2 -1 +5 80 1 1 +5 81 2 -1 +5 82 1 1 +5 83 1 1 +5 84 1 -1 +5 85 1 1 +5 86 1 1 +5 87 2 1 +5 88 1 1 +5 89 2 -1 +5 90 1 -1 +5 91 1 -1 +5 92 1 -1 +5 93 2 1 +5 94 2 1 +5 95 1 1 +5 96 2 -1 +5 97 1 -1 +5 98 1 -1 +5 99 2 -1 +5 100 2 1 +6 1 1 -1 +6 2 1 1 +6 3 1 1 +6 4 1 -1 +6 5 2 -1 +6 6 1 1 +6 7 1 1 +6 8 1 1 +6 9 1 -1 +6 10 1 -1 +6 11 2 -1 +6 12 1 -1 +6 13 2 -1 +6 14 2 1 +6 15 2 -1 +6 16 1 -1 +6 17 1 -1 +6 18 2 -1 +6 19 1 1 +6 20 1 1 +6 21 1 -1 +6 22 2 1 +6 23 2 1 +6 24 2 -1 +6 25 1 -1 +6 26 2 -1 +6 27 1 -1 +6 28 2 -1 +6 29 2 -1 +6 30 1 -1 +6 31 2 1 +6 32 2 -1 +6 33 1 -1 +6 34 2 1 +6 35 2 1 +6 36 2 1 +6 37 2 -1 +6 38 2 -1 +6 39 1 -1 +6 40 1 1 +6 41 2 -1 +6 42 1 1 +6 43 2 -1 +6 44 1 1 +6 45 1 1 +6 46 2 -1 +6 47 1 1 +6 48 2 1 +6 49 2 -1 +6 50 2 -1 +6 51 1 1 +6 52 1 1 +6 53 1 -1 +6 54 1 -1 +6 55 2 1 +6 56 2 -1 +6 57 2 -1 +6 58 1 -1 +6 59 1 1 +6 60 2 -1 +6 61 1 -1 +6 62 1 1 +6 63 2 -1 +6 64 2 -1 +6 65 2 -1 +6 66 1 1 +6 67 1 1 +6 68 1 1 +6 69 1 -1 +6 70 2 1 +6 71 2 1 +6 72 1 1 +6 73 1 -1 +6 74 1 1 +6 75 1 -1 +6 76 2 -1 +6 77 2 -1 +6 78 1 1 +6 79 1 1 +6 80 2 1 +6 81 2 1 +6 82 2 -1 +6 83 2 1 +6 84 2 -1 +6 85 1 1 +6 86 2 -1 +6 87 1 -1 +6 88 2 -1 +6 89 1 1 +6 90 1 1 +6 91 1 1 +6 92 1 -1 +6 93 1 -1 +6 94 1 1 +6 95 1 1 +6 96 1 -1 +6 97 2 1 +6 98 2 -1 +6 99 1 -1 +6 100 1 1 +7 1 2 1 +7 2 2 -1 +7 3 1 -1 +7 4 1 1 +7 5 1 1 +7 6 2 -1 +7 7 1 1 +7 8 1 1 +7 9 1 -1 +7 10 1 -1 +7 11 2 1 +7 12 1 -1 +7 13 2 -1 +7 14 1 1 +7 15 1 -1 +7 16 2 1 +7 17 2 -1 +7 18 1 1 +7 19 1 -1 +7 20 1 -1 +7 21 2 1 +7 22 2 -1 +7 23 1 1 +7 24 1 1 +7 25 1 -1 +7 26 2 -1 +7 27 2 -1 +7 28 1 -1 +7 29 1 -1 +7 30 2 1 +7 31 2 1 +7 32 2 -1 +7 33 2 1 +7 34 1 -1 +7 35 2 1 +7 36 2 -1 +7 37 1 -1 +7 38 2 -1 +7 39 1 1 +7 40 2 -1 +7 41 1 1 +7 42 1 1 +7 43 1 1 +7 44 1 1 +7 45 1 1 +7 46 1 1 +7 47 1 1 +7 48 1 1 +7 49 1 -1 +7 50 1 -1 +7 51 2 1 +7 52 2 1 +7 53 2 1 +7 54 2 1 +7 55 2 1 +7 56 1 -1 +7 57 2 -1 +7 58 2 1 +7 59 2 -1 +7 60 1 -1 +7 61 1 1 +7 62 1 -1 +7 63 2 -1 +7 64 2 -1 +7 65 1 1 +7 66 1 1 +7 67 1 1 +7 68 1 -1 +7 69 2 1 +7 70 2 1 +7 71 2 1 +7 72 2 -1 +7 73 2 1 +7 74 1 -1 +7 75 2 1 +7 76 1 -1 +7 77 2 -1 +7 78 2 1 +7 79 2 1 +7 80 2 -1 +7 81 1 1 +7 82 1 1 +7 83 1 1 +7 84 1 1 +7 85 1 1 +7 86 1 1 +7 87 1 -1 +7 88 2 1 +7 89 2 1 +7 90 2 -1 +7 91 1 1 +7 92 1 -1 +7 93 2 1 +7 94 1 -1 +7 95 1 -1 +7 96 1 -1 +7 97 2 -1 +7 98 2 1 +7 99 2 1 +7 100 2 1 +8 1 2 -1 +8 2 2 -1 +8 3 2 -1 +8 4 1 1 +8 5 1 1 +8 6 1 1 +8 7 1 -1 +8 8 2 -1 +8 9 1 -1 +8 10 1 1 +8 11 1 1 +8 12 1 1 +8 13 2 1 +8 14 2 -1 +8 15 1 -1 +8 16 2 1 +8 17 2 -1 +8 18 2 -1 +8 19 1 1 +8 20 1 -1 +8 21 1 -1 +8 22 2 -1 +8 23 1 -1 +8 24 1 -1 +8 25 2 -1 +8 26 1 1 +8 27 1 1 +8 28 1 1 +8 29 1 -1 +8 30 1 -1 +8 31 2 -1 +8 32 1 -1 +8 33 1 -1 +8 34 2 -1 +8 35 1 1 +8 36 1 -1 +8 37 2 1 +8 38 2 -1 +8 39 2 -1 +8 40 1 1 +8 41 1 1 +8 42 1 1 +8 43 1 -1 +8 44 1 1 +8 45 1 -1 +8 46 2 1 +8 47 1 -1 +8 48 2 -1 +8 49 1 1 +8 50 1 1 +8 51 1 -1 +8 52 1 -1 +8 53 2 1 +8 54 2 1 +8 55 2 1 +8 56 2 1 +8 57 2 1 +8 58 2 1 +8 59 2 1 +8 60 2 1 +8 61 2 -1 +8 62 2 -1 +8 63 1 1 +8 64 1 1 +8 65 1 1 +8 66 1 1 +8 67 1 -1 +8 68 1 -1 +8 69 2 -1 +8 70 2 1 +8 71 2 1 +8 72 1 -1 +8 73 2 1 +8 74 2 -1 +8 75 2 1 +8 76 2 -1 +8 77 2 -1 +8 78 1 -1 +8 79 2 -1 +8 80 1 -1 +8 81 1 -1 +8 82 1 1 +8 83 1 -1 +8 84 1 -1 +8 85 1 -1 +8 86 1 1 +8 87 1 1 +8 88 1 -1 +8 89 1 -1 +8 90 1 1 +8 91 1 1 +8 92 1 -1 +8 93 2 1 +8 94 2 1 +8 95 2 1 +8 96 2 1 +8 97 2 1 +8 98 2 1 +8 99 2 1 +8 100 2 1 +9 1 1 1 +9 2 2 -1 +9 3 1 1 +9 4 1 1 +9 5 1 1 +9 6 1 -1 +9 7 1 -1 +9 8 2 -1 +9 9 1 -1 +9 10 2 1 +9 11 2 -1 +9 12 2 1 +9 13 2 1 +9 14 1 1 +9 15 1 1 +9 16 2 -1 +9 17 2 1 +9 18 2 -1 +9 19 1 -1 +9 20 2 1 +9 21 2 -1 +9 22 2 1 +9 23 2 -1 +9 24 1 1 +9 25 2 -1 +9 26 1 1 +9 27 1 -1 +9 28 2 -1 +9 29 1 1 +9 30 2 1 +9 31 2 1 +9 32 2 1 +9 33 2 1 +9 34 2 1 +9 35 2 1 +9 36 1 1 +9 37 2 -1 +9 38 1 1 +9 39 1 1 +9 40 1 1 +9 41 1 -1 +9 42 2 1 +9 43 2 -1 +9 44 2 1 +9 45 2 1 +9 46 1 -1 +9 47 2 -1 +9 48 1 -1 +9 49 2 1 +9 50 1 -1 +9 51 2 1 +9 52 2 1 +9 53 2 1 +9 54 2 1 +9 55 2 1 +9 56 1 1 +9 57 1 1 +9 58 2 -1 +9 59 1 1 +9 60 1 1 +9 61 2 1 +9 62 1 1 +9 63 2 -1 +9 64 1 1 +9 65 1 -1 +9 66 2 -1 +9 67 1 1 +9 68 2 1 +9 69 2 -1 +9 70 1 -1 +9 71 2 -1 +9 72 2 1 +9 73 2 -1 +9 74 2 1 +9 75 1 1 +9 76 1 1 +9 77 2 1 +9 78 1 -1 +9 79 1 1 +9 80 1 -1 +9 81 1 -1 +9 82 2 -1 +9 83 1 1 +9 84 1 1 +9 85 1 1 +9 86 2 -1 +9 87 1 -1 +9 88 1 1 +9 89 1 -1 +9 90 1 -1 +9 91 2 1 +9 92 2 1 +9 93 2 1 +9 94 2 1 +9 95 2 1 +9 96 1 -1 +9 97 2 1 +9 98 2 -1 +9 99 2 -1 +9 100 1 -1 +10 1 2 -1 +10 2 1 1 +10 3 1 -1 +10 4 1 -1 +10 5 2 -1 +10 6 2 1 +10 7 2 -1 +10 8 1 -1 +10 9 2 1 +10 10 1 1 +10 11 1 1 +10 12 1 -1 +10 13 1 -1 +10 14 2 1 +10 15 2 1 +10 16 2 1 +10 17 2 -1 +10 18 2 -1 +10 19 1 1 +10 20 1 1 +10 21 1 1 +10 22 1 1 +10 23 1 -1 +10 24 2 1 +10 25 2 -1 +10 26 2 1 +10 27 2 -1 +10 28 1 1 +10 29 1 -1 +10 30 1 -1 +10 31 2 -1 +10 32 1 -1 +10 33 2 1 +10 34 2 1 +10 35 1 1 +10 36 1 1 +10 37 2 -1 +10 38 1 1 +10 39 1 1 +10 40 1 1 +10 41 1 1 +10 42 1 1 +10 43 2 -1 +10 44 2 1 +10 45 2 -1 +10 46 1 -1 +10 47 1 -1 +10 48 2 1 +10 49 2 1 +10 50 2 1 +10 51 2 -1 +10 52 1 1 +10 53 1 -1 +10 54 1 -1 +10 55 2 1 +10 56 2 1 +10 57 2 1 +10 58 2 1 +10 59 2 -1 +10 60 1 1 +10 61 1 -1 +10 62 1 1 +10 63 1 1 +10 64 1 1 +10 65 1 1 +10 66 1 -1 +10 67 1 -1 +10 68 2 1 +10 69 2 1 +10 70 2 -1 +10 71 2 1 +10 72 2 1 +10 73 2 1 +10 74 2 1 +10 75 2 1 +10 76 2 -1 +10 77 2 1 +10 78 2 -1 +10 79 2 -1 +10 80 2 1 +10 81 1 -1 +10 82 2 -1 +10 83 1 1 +10 84 1 1 +10 85 1 -1 +10 86 2 -1 +10 87 1 1 +10 88 2 1 +10 89 1 1 +10 90 1 1 +10 91 1 -1 +10 92 1 1 +10 93 1 -1 +10 94 2 1 +10 95 2 1 +10 96 2 1 +10 97 2 -1 +10 98 2 -1 +10 99 1 1 +10 100 1 -1 +11 1 1 -1 +11 2 1 1 +11 3 1 1 +11 4 1 1 +11 5 2 -1 +11 6 1 1 +11 7 1 -1 +11 8 2 1 +11 9 1 -1 +11 10 2 -1 +11 11 1 1 +11 12 1 1 +11 13 1 -1 +11 14 1 1 +11 15 2 -1 +11 16 2 -1 +11 17 1 1 +11 18 1 1 +11 19 1 1 +11 20 1 1 +11 21 2 -1 +11 22 1 1 +11 23 2 1 +11 24 1 1 +11 25 1 1 +11 26 2 1 +11 27 1 1 +11 28 1 1 +11 29 2 -1 +11 30 2 -1 +11 31 1 -1 +11 32 1 -1 +11 33 1 -1 +11 34 2 1 +11 35 2 1 +11 36 2 1 +11 37 2 -1 +11 38 1 1 +11 39 1 1 +11 40 1 1 +11 41 1 1 +11 42 2 -1 +11 43 1 1 +11 44 1 -1 +11 45 2 -1 +11 46 1 -1 +11 47 1 1 +11 48 2 1 +11 49 2 1 +11 50 2 -1 +11 51 2 -1 +11 52 2 -1 +11 53 1 -1 +11 54 1 1 +11 55 1 1 +11 56 1 -1 +11 57 1 -1 +11 58 2 -1 +11 59 1 -1 +11 60 2 -1 +11 61 2 -1 +11 62 2 1 +11 63 2 -1 +11 64 2 1 +11 65 2 1 +11 66 2 1 +11 67 2 1 +11 68 2 1 +11 69 2 -1 +11 70 2 1 +11 71 2 -1 +11 72 2 1 +11 73 2 1 +11 74 1 -1 +11 75 2 1 +11 76 2 1 +11 77 2 -1 +11 78 2 -1 +11 79 1 1 +11 80 1 -1 +11 81 1 1 +11 82 2 -1 +11 83 1 1 +11 84 1 1 +11 85 1 1 +11 86 1 1 +11 87 1 -1 +11 88 1 -1 +11 89 2 -1 +11 90 2 -1 +11 91 1 -1 +11 92 2 1 +11 93 2 -1 +11 94 2 1 +11 95 1 -1 +11 96 2 -1 +11 97 2 1 +11 98 2 1 +11 99 1 -1 +11 100 2 -1 +12 1 1 1 +12 2 2 1 +12 3 1 1 +12 4 2 -1 +12 5 1 1 +12 6 2 -1 +12 7 1 -1 +12 8 2 -1 +12 9 1 1 +12 10 1 -1 +12 11 1 -1 +12 12 2 1 +12 13 2 1 +12 14 1 -1 +12 15 2 1 +12 16 2 1 +12 17 1 -1 +12 18 1 1 +12 19 1 -1 +12 20 2 -1 +12 21 2 -1 +12 22 2 -1 +12 23 1 1 +12 24 2 -1 +12 25 1 1 +12 26 1 1 +12 27 1 1 +12 28 1 1 +12 29 1 -1 +12 30 2 -1 +12 31 1 -1 +12 32 1 1 +12 33 1 1 +12 34 1 1 +12 35 2 1 +12 36 1 1 +12 37 2 -1 +12 38 1 1 +12 39 1 1 +12 40 1 -1 +12 41 2 -1 +12 42 1 -1 +12 43 2 1 +12 44 1 1 +12 45 1 1 +12 46 1 -1 +12 47 2 -1 +12 48 1 -1 +12 49 1 -1 +12 50 1 1 +12 51 1 -1 +12 52 1 1 +12 53 1 1 +12 54 1 -1 +12 55 1 -1 +12 56 2 -1 +12 57 2 1 +12 58 1 -1 +12 59 2 1 +12 60 2 -1 +12 61 2 -1 +12 62 2 -1 +12 63 2 -1 +12 64 1 1 +12 65 2 1 +12 66 2 -1 +12 67 1 -1 +12 68 2 1 +12 69 1 -1 +12 70 2 1 +12 71 1 1 +12 72 2 -1 +12 73 1 1 +12 74 1 -1 +12 75 2 -1 +12 76 1 -1 +12 77 2 1 +12 78 2 1 +12 79 2 -1 +12 80 2 -1 +12 81 1 -1 +12 82 2 -1 +12 83 1 1 +12 84 1 1 +12 85 1 -1 +12 86 1 1 +12 87 2 -1 +12 88 1 1 +12 89 1 1 +12 90 1 1 +12 91 1 -1 +12 92 1 -1 +12 93 1 1 +12 94 1 1 +12 95 1 1 +12 96 1 -1 +12 97 2 1 +12 98 1 -1 +12 99 2 1 +12 100 1 -1 +13 1 1 1 +13 2 2 -1 +13 3 1 1 +13 4 2 1 +13 5 2 -1 +13 6 1 1 +13 7 1 1 +13 8 1 -1 +13 9 1 -1 +13 10 2 -1 +13 11 1 -1 +13 12 1 -1 +13 13 2 1 +13 14 2 1 +13 15 2 1 +13 16 2 1 +13 17 2 1 +13 18 2 1 +13 19 2 -1 +13 20 2 -1 +13 21 2 1 +13 22 2 1 +13 23 2 1 +13 24 2 -1 +13 25 1 -1 +13 26 2 -1 +13 27 2 1 +13 28 2 1 +13 29 2 1 +13 30 2 1 +13 31 2 -1 +13 32 1 1 +13 33 2 1 +13 34 2 1 +13 35 2 -1 +13 36 1 -1 +13 37 2 1 +13 38 1 1 +13 39 2 -1 +13 40 2 -1 +13 41 2 -1 +13 42 1 1 +13 43 1 1 +13 44 1 1 +13 45 1 1 +13 46 1 1 +13 47 2 -1 +13 48 1 1 +13 49 1 1 +13 50 1 -1 +13 51 1 1 +13 52 1 -1 +13 53 2 -1 +13 54 1 1 +13 55 1 -1 +13 56 1 -1 +13 57 2 -1 +13 58 2 1 +13 59 2 1 +13 60 2 -1 +13 61 1 -1 +13 62 2 1 +13 63 2 -1 +13 64 1 -1 +13 65 2 -1 +13 66 2 1 +13 67 1 1 +13 68 1 1 +13 69 1 -1 +13 70 1 -1 +13 71 2 1 +13 72 2 1 +13 73 2 1 +13 74 1 -1 +13 75 2 1 +13 76 2 -1 +13 77 2 1 +13 78 2 1 +13 79 2 -1 +13 80 2 -1 +13 81 1 1 +13 82 1 1 +13 83 1 -1 +13 84 2 -1 +13 85 1 -1 +13 86 2 1 +13 87 2 -1 +13 88 1 -1 +13 89 1 1 +13 90 1 1 +13 91 1 -1 +13 92 2 1 +13 93 1 1 +13 94 2 1 +13 95 2 1 +13 96 1 -1 +13 97 1 -1 +13 98 2 1 +13 99 2 1 +13 100 2 -1 +14 1 2 -1 +14 2 1 -1 +14 3 1 1 +14 4 2 -1 +14 5 1 1 +14 6 1 -1 +14 7 1 1 +14 8 1 1 +14 9 1 -1 +14 10 2 -1 +14 11 2 -1 +14 12 1 1 +14 13 1 -1 +14 14 2 1 +14 15 2 -1 +14 16 2 1 +14 17 2 -1 +14 18 1 1 +14 19 1 -1 +14 20 1 -1 +14 21 2 -1 +14 22 2 -1 +14 23 2 1 +14 24 1 -1 +14 25 2 1 +14 26 1 1 +14 27 2 -1 +14 28 1 1 +14 29 1 -1 +14 30 1 -1 +14 31 2 -1 +14 32 1 1 +14 33 1 -1 +14 34 2 -1 +14 35 1 -1 +14 36 1 1 +14 37 2 -1 +14 38 2 -1 +14 39 1 -1 +14 40 1 1 +14 41 1 -1 +14 42 1 1 +14 43 1 1 +14 44 1 1 +14 45 1 -1 +14 46 1 1 +14 47 2 -1 +14 48 2 -1 +14 49 2 1 +14 50 2 -1 +14 51 2 1 +14 52 1 -1 +14 53 2 1 +14 54 2 1 +14 55 2 1 +14 56 2 -1 +14 57 2 1 +14 58 1 -1 +14 59 2 -1 +14 60 1 1 +14 61 1 1 +14 62 1 -1 +14 63 2 -1 +14 64 1 1 +14 65 2 -1 +14 66 1 -1 +14 67 1 -1 +14 68 1 -1 +14 69 2 1 +14 70 2 1 +14 71 2 -1 +14 72 2 1 +14 73 2 -1 +14 74 2 -1 +14 75 1 1 +14 76 2 1 +14 77 2 1 +14 78 2 1 +14 79 2 -1 +14 80 1 1 +14 81 2 -1 +14 82 1 1 +14 83 1 1 +14 84 1 -1 +14 85 2 1 +14 86 1 1 +14 87 2 -1 +14 88 1 1 +14 89 1 -1 +14 90 1 -1 +14 91 2 -1 +14 92 1 1 +14 93 1 1 +14 94 1 -1 +14 95 1 1 +14 96 1 -1 +14 97 1 1 +14 98 1 1 +14 99 1 -1 +14 100 2 1 +15 1 1 1 +15 2 1 -1 +15 3 1 1 +15 4 2 -1 +15 5 1 1 +15 6 1 -1 +15 7 2 -1 +15 8 1 1 +15 9 1 -1 +15 10 2 -1 +15 11 1 -1 +15 12 2 1 +15 13 2 1 +15 14 1 1 +15 15 2 -1 +15 16 1 -1 +15 17 1 1 +15 18 1 1 +15 19 1 1 +15 20 2 1 +15 21 2 -1 +15 22 2 -1 +15 23 1 -1 +15 24 1 -1 +15 25 1 -1 +15 26 2 -1 +15 27 2 -1 +15 28 1 -1 +15 29 2 1 +15 30 2 1 +15 31 2 1 +15 32 2 -1 +15 33 1 -1 +15 34 1 -1 +15 35 2 -1 +15 36 2 1 +15 37 2 1 +15 38 2 -1 +15 39 2 1 +15 40 1 1 +15 41 1 -1 +15 42 2 -1 +15 43 1 1 +15 44 2 -1 +15 45 1 1 +15 46 1 1 +15 47 1 1 +15 48 1 -1 +15 49 1 -1 +15 50 2 1 +15 51 2 1 +15 52 2 -1 +15 53 1 -1 +15 54 2 1 +15 55 2 1 +15 56 1 1 +15 57 2 -1 +15 58 1 -1 +15 59 2 1 +15 60 2 1 +15 61 1 1 +15 62 2 1 +15 63 2 -1 +15 64 2 -1 +15 65 1 -1 +15 66 1 1 +15 67 2 -1 +15 68 1 -1 +15 69 1 1 +15 70 1 -1 +15 71 1 -1 +15 72 2 1 +15 73 2 1 +15 74 1 1 +15 75 2 1 +15 76 2 -1 +15 77 2 1 +15 78 2 -1 +15 79 1 1 +15 80 1 1 +15 81 1 1 +15 82 1 1 +15 83 1 1 +15 84 1 1 +15 85 1 -1 +15 86 1 1 +15 87 1 1 +15 88 1 -1 +15 89 2 -1 +15 90 1 -1 +15 91 1 -1 +15 92 2 1 +15 93 2 1 +15 94 2 1 +15 95 1 -1 +15 96 2 -1 +15 97 2 1 +15 98 1 -1 +15 99 2 1 +15 100 2 1 +16 1 2 1 +16 2 2 -1 +16 3 1 1 +16 4 2 1 +16 5 1 1 +16 6 1 1 +16 7 2 -1 +16 8 1 1 +16 9 1 1 +16 10 1 -1 +16 11 1 1 +16 12 1 -1 +16 13 2 -1 +16 14 1 -1 +16 15 2 1 +16 16 2 1 +16 17 2 -1 +16 18 2 -1 +16 19 1 1 +16 20 1 1 +16 21 1 -1 +16 22 1 1 +16 23 2 1 +16 24 2 -1 +16 25 1 -1 +16 26 1 1 +16 27 1 1 +16 28 1 -1 +16 29 2 -1 +16 30 2 -1 +16 31 1 1 +16 32 1 -1 +16 33 2 1 +16 34 2 1 +16 35 2 1 +16 36 2 -1 +16 37 2 -1 +16 38 1 -1 +16 39 2 -1 +16 40 1 1 +16 41 1 -1 +16 42 2 -1 +16 43 1 1 +16 44 1 1 +16 45 1 -1 +16 46 2 -1 +16 47 1 1 +16 48 1 -1 +16 49 1 1 +16 50 1 -1 +16 51 1 -1 +16 52 2 -1 +16 53 2 1 +16 54 2 1 +16 55 2 -1 +16 56 2 -1 +16 57 1 -1 +16 58 2 1 +16 59 2 1 +16 60 2 -1 +16 61 2 -1 +16 62 1 1 +16 63 1 -1 +16 64 1 1 +16 65 1 -1 +16 66 2 -1 +16 67 1 -1 +16 68 2 -1 +16 69 2 1 +16 70 2 1 +16 71 1 1 +16 72 2 1 +16 73 2 1 +16 74 2 1 +16 75 2 -1 +16 76 2 -1 +16 77 1 -1 +16 78 1 -1 +16 79 2 -1 +16 80 2 1 +16 81 2 -1 +16 82 1 -1 +16 83 2 1 +16 84 2 -1 +16 85 1 1 +16 86 1 1 +16 87 1 -1 +16 88 1 1 +16 89 1 1 +16 90 1 1 +16 91 1 1 +16 92 1 -1 +16 93 1 1 +16 94 2 1 +16 95 2 -1 +16 96 1 1 +16 97 1 -1 +16 98 1 -1 +16 99 2 1 +16 100 2 1 +17 1 2 -1 +17 2 1 1 +17 3 1 1 +17 4 1 -1 +17 5 1 1 +17 6 1 1 +17 7 1 -1 +17 8 1 -1 +17 9 1 -1 +17 10 2 1 +17 11 2 -1 +17 12 2 1 +17 13 2 1 +17 14 2 1 +17 15 1 -1 +17 16 2 1 +17 17 1 1 +17 18 2 1 +17 19 1 1 +17 20 2 -1 +17 21 2 -1 +17 22 1 1 +17 23 1 1 +17 24 1 1 +17 25 1 1 +17 26 1 1 +17 27 1 1 +17 28 1 1 +17 29 1 -1 +17 30 2 -1 +17 31 1 -1 +17 32 1 -1 +17 33 1 -1 +17 34 2 1 +17 35 2 1 +17 36 2 1 +17 37 2 -1 +17 38 1 1 +17 39 1 1 +17 40 1 1 +17 41 1 -1 +17 42 1 1 +17 43 1 1 +17 44 1 1 +17 45 1 1 +17 46 1 -1 +17 47 1 -1 +17 48 1 1 +17 49 2 1 +17 50 1 1 +17 51 2 -1 +17 52 2 1 +17 53 2 1 +17 54 2 -1 +17 55 2 1 +17 56 2 1 +17 57 1 1 +17 58 2 1 +17 59 2 1 +17 60 2 -1 +17 61 2 -1 +17 62 2 1 +17 63 1 1 +17 64 1 -1 +17 65 1 -1 +17 66 2 -1 +17 67 1 1 +17 68 2 1 +17 69 2 -1 +17 70 1 -1 +17 71 1 1 +17 72 1 -1 +17 73 2 -1 +17 74 1 -1 +17 75 2 -1 +17 76 1 -1 +17 77 2 -1 +17 78 2 -1 +17 79 1 1 +17 80 1 1 +17 81 1 1 +17 82 1 1 +17 83 1 1 +17 84 1 -1 +17 85 1 1 +17 86 1 1 +17 87 1 1 +17 88 1 1 +17 89 1 1 +17 90 1 1 +17 91 1 1 +17 92 1 -1 +17 93 1 1 +17 94 1 -1 +17 95 1 -1 +17 96 2 1 +17 97 2 -1 +17 98 2 1 +17 99 2 -1 +17 100 1 -1 +18 1 1 -1 +18 2 1 1 +18 3 1 1 +18 4 1 1 +18 5 1 1 +18 6 1 -1 +18 7 1 -1 +18 8 2 1 +18 9 2 1 +18 10 2 -1 +18 11 1 1 +18 12 1 1 +18 13 2 1 +18 14 2 1 +18 15 1 1 +18 16 1 1 +18 17 1 1 +18 18 1 1 +18 19 1 1 +18 20 1 1 +18 21 1 1 +18 22 2 -1 +18 23 2 -1 +18 24 1 1 +18 25 1 1 +18 26 1 1 +18 27 1 1 +18 28 1 1 +18 29 1 1 +18 30 1 -1 +18 31 1 1 +18 32 1 -1 +18 33 2 1 +18 34 2 1 +18 35 2 1 +18 36 2 -1 +18 37 2 1 +18 38 2 -1 +18 39 2 1 +18 40 2 -1 +18 41 2 -1 +18 42 2 -1 +18 43 1 -1 +18 44 2 1 +18 45 2 -1 +18 46 1 1 +18 47 1 1 +18 48 1 -1 +18 49 2 1 +18 50 2 -1 +18 51 1 1 +18 52 1 -1 +18 53 1 -1 +18 54 2 1 +18 55 2 1 +18 56 2 1 +18 57 2 -1 +18 58 2 1 +18 59 2 1 +18 60 2 -1 +18 61 2 -1 +18 62 2 -1 +18 63 1 1 +18 64 1 -1 +18 65 1 1 +18 66 1 1 +18 67 2 1 +18 68 1 -1 +18 69 1 1 +18 70 2 -1 +18 71 1 1 +18 72 2 -1 +18 73 1 -1 +18 74 1 -1 +18 75 2 1 +18 76 2 1 +18 77 1 -1 +18 78 2 1 +18 79 2 -1 +18 80 2 -1 +18 81 1 1 +18 82 1 -1 +18 83 2 -1 +18 84 2 1 +18 85 1 -1 +18 86 2 1 +18 87 2 -1 +18 88 1 -1 +18 89 1 -1 +18 90 2 -1 +18 91 1 -1 +18 92 1 -1 +18 93 1 -1 +18 94 1 1 +18 95 2 1 +18 96 2 1 +18 97 2 1 +18 98 1 1 +18 99 2 -1 +18 100 1 1 +19 1 2 -1 +19 2 2 -1 +19 3 2 -1 +19 4 1 -1 +19 5 2 -1 +19 6 1 1 +19 7 1 -1 +19 8 1 -1 +19 9 1 1 +19 10 1 -1 +19 11 2 -1 +19 12 2 1 +19 13 2 1 +19 14 2 1 +19 15 2 1 +19 16 2 -1 +19 17 2 -1 +19 18 1 1 +19 19 1 -1 +19 20 1 1 +19 21 1 -1 +19 22 1 1 +19 23 1 1 +19 24 1 1 +19 25 2 1 +19 26 2 -1 +19 27 1 -1 +19 28 1 1 +19 29 1 -1 +19 30 1 -1 +19 31 1 1 +19 32 2 1 +19 33 1 -1 +19 34 1 -1 +19 35 2 1 +19 36 2 1 +19 37 2 1 +19 38 1 1 +19 39 2 -1 +19 40 2 -1 +19 41 2 -1 +19 42 2 -1 +19 43 1 1 +19 44 1 1 +19 45 1 1 +19 46 1 1 +19 47 1 1 +19 48 1 1 +19 49 1 -1 +19 50 1 -1 +19 51 1 -1 +19 52 2 -1 +19 53 1 1 +19 54 1 -1 +19 55 1 -1 +19 56 2 -1 +19 57 2 1 +19 58 2 1 +19 59 2 1 +19 60 2 -1 +19 61 2 -1 +19 62 1 1 +19 63 1 1 +19 64 2 -1 +19 65 2 -1 +19 66 2 1 +19 67 1 1 +19 68 1 -1 +19 69 2 1 +19 70 2 1 +19 71 2 1 +19 72 1 -1 +19 73 2 1 +19 74 2 -1 +19 75 1 1 +19 76 1 -1 +19 77 2 1 +19 78 2 1 +19 79 2 -1 +19 80 1 1 +19 81 2 -1 +19 82 1 1 +19 83 1 1 +19 84 2 -1 +19 85 1 -1 +19 86 1 1 +19 87 2 1 +19 88 1 1 +19 89 2 -1 +19 90 1 1 +19 91 1 -1 +19 92 1 -1 +19 93 1 -1 +19 94 2 1 +19 95 2 1 +19 96 2 -1 +19 97 1 -1 +19 98 2 1 +19 99 2 1 +19 100 2 1 +20 1 1 1 +20 2 1 -1 +20 3 2 -1 +20 4 1 1 +20 5 1 -1 +20 6 1 1 +20 7 1 -1 +20 8 2 1 +20 9 2 1 +20 10 1 -1 +20 11 2 1 +20 12 2 1 +20 13 1 1 +20 14 2 -1 +20 15 2 1 +20 16 1 1 +20 17 1 1 +20 18 1 1 +20 19 1 -1 +20 20 2 -1 +20 21 1 1 +20 22 1 -1 +20 23 2 -1 +20 24 1 1 +20 25 1 1 +20 26 1 1 +20 27 1 1 +20 28 1 1 +20 29 1 1 +20 30 1 -1 +20 31 2 -1 +20 32 1 -1 +20 33 2 1 +20 34 2 1 +20 35 2 -1 +20 36 2 1 +20 37 2 -1 +20 38 1 -1 +20 39 2 -1 +20 40 1 1 +20 41 1 -1 +20 42 1 1 +20 43 1 1 +20 44 2 1 +20 45 2 -1 +20 46 1 -1 +20 47 2 -1 +20 48 1 -1 +20 49 1 1 +20 50 2 1 +20 51 1 -1 +20 52 2 -1 +20 53 1 -1 +20 54 2 1 +20 55 2 -1 +20 56 2 1 +20 57 2 1 +20 58 2 -1 +20 59 2 1 +20 60 2 -1 +20 61 2 1 +20 62 2 -1 +20 63 2 -1 +20 64 1 1 +20 65 1 1 +20 66 1 1 +20 67 1 1 +20 68 1 -1 +20 69 1 -1 +20 70 2 1 +20 71 2 1 +20 72 2 -1 +20 73 1 1 +20 74 1 -1 +20 75 1 -1 +20 76 1 -1 +20 77 2 1 +20 78 2 1 +20 79 2 1 +20 80 2 1 +20 81 2 1 +20 82 2 1 +20 83 2 1 +20 84 2 -1 +20 85 2 -1 +20 86 1 1 +20 87 1 -1 +20 88 1 1 +20 89 1 -1 +20 90 2 1 +20 91 2 -1 +20 92 1 1 +20 93 2 -1 +20 94 1 -1 +20 95 1 -1 +20 96 2 1 +20 97 2 1 +20 98 2 1 +20 99 2 1 +20 100 2 1 diff --git a/R/inst/extdata/prl_multipleB_exampleData.txt b/R/inst/extdata/prl_multipleB_exampleData.txt new file mode 100644 index 00000000..16725497 --- /dev/null +++ b/R/inst/extdata/prl_multipleB_exampleData.txt @@ -0,0 +1,1801 @@ +ExperimentName subjID block trial choice outcome choiceSwitch choice.ACC choice.RT Subject_Block +PRL_Young_Final 5038 1 1 2 25 1 1 1430 5038_1 +PRL_Young_Final 5038 1 2 2 25 0 1 439 5038_1 +PRL_Young_Final 5038 1 3 2 -25 0 1 374 5038_1 +PRL_Young_Final 5038 1 4 2 25 0 1 267 5038_1 +PRL_Young_Final 5038 1 5 2 25 0 1 331 5038_1 +PRL_Young_Final 5038 1 6 2 -25 0 1 316 5038_1 +PRL_Young_Final 5038 1 7 2 25 0 1 325 5038_1 +PRL_Young_Final 5038 1 8 2 25 0 0 264 5038_1 +PRL_Young_Final 5038 1 9 2 -25 0 0 343 5038_1 +PRL_Young_Final 5038 1 10 2 -25 0 0 292 5038_1 +PRL_Young_Final 5038 1 11 2 -25 0 0 288 5038_1 +PRL_Young_Final 5038 1 12 1 25 1 1 308 5038_1 +PRL_Young_Final 5038 1 13 1 25 0 1 383 5038_1 +PRL_Young_Final 5038 1 14 1 25 0 1 322 5038_1 +PRL_Young_Final 5038 1 15 1 25 0 1 297 5038_1 +PRL_Young_Final 5038 1 16 1 -25 0 1 350 5038_1 +PRL_Young_Final 5038 1 17 1 -25 0 0 484 5038_1 +PRL_Young_Final 5038 1 18 2 -25 1 1 442 5038_1 +PRL_Young_Final 5038 1 19 1 -25 1 0 298 5038_1 +PRL_Young_Final 5038 1 20 1 -25 0 0 312 5038_1 +PRL_Young_Final 5038 1 21 2 25 1 1 358 5038_1 +PRL_Young_Final 5038 1 22 2 25 0 1 397 5038_1 +PRL_Young_Final 5038 1 23 2 25 0 1 563 5038_1 +PRL_Young_Final 5038 1 24 2 25 0 1 351 5038_1 +PRL_Young_Final 5038 1 25 2 25 0 1 278 5038_1 +PRL_Young_Final 5038 1 26 2 25 0 1 222 5038_1 +PRL_Young_Final 5038 1 27 2 -25 0 1 391 5038_1 +PRL_Young_Final 5038 1 28 2 25 0 1 231 5038_1 +PRL_Young_Final 5038 1 29 2 25 0 0 281 5038_1 +PRL_Young_Final 5038 1 30 2 25 0 0 363 5038_1 +PRL_Young_Final 5038 1 31 2 -25 0 0 282 5038_1 +PRL_Young_Final 5038 1 32 2 -25 0 0 308 5038_1 +PRL_Young_Final 5038 1 33 2 -25 0 0 270 5038_1 +PRL_Young_Final 5038 1 34 1 25 1 1 291 5038_1 +PRL_Young_Final 5038 1 35 1 25 0 1 350 5038_1 +PRL_Young_Final 5038 1 36 1 25 0 1 271 5038_1 +PRL_Young_Final 5038 1 37 1 25 0 1 310 5038_1 +PRL_Young_Final 5038 1 38 1 25 0 1 341 5038_1 +PRL_Young_Final 5038 1 39 1 25 0 1 291 5038_1 +PRL_Young_Final 5038 1 40 1 -25 0 1 338 5038_1 +PRL_Young_Final 5038 1 41 1 25 0 1 296 5038_1 +PRL_Young_Final 5038 1 42 1 -25 0 0 419 5038_1 +PRL_Young_Final 5038 1 43 1 -25 0 0 356 5038_1 +PRL_Young_Final 5038 1 44 2 25 1 1 239 5038_1 +PRL_Young_Final 5038 1 45 2 -25 0 1 241 5038_1 +PRL_Young_Final 5038 1 46 2 25 0 1 386 5038_1 +PRL_Young_Final 5038 1 47 2 25 0 1 282 5038_1 +PRL_Young_Final 5038 1 48 2 25 0 1 276 5038_1 +PRL_Young_Final 5038 1 49 2 25 0 1 353 5038_1 +PRL_Young_Final 5038 1 50 2 25 0 1 264 5038_1 +PRL_Young_Final 5038 1 51 2 -25 0 0 412 5038_1 +PRL_Young_Final 5038 1 52 2 25 0 0 381 5038_1 +PRL_Young_Final 5038 1 53 2 -25 0 0 272 5038_1 +PRL_Young_Final 5038 1 54 2 -25 0 0 315 5038_1 +PRL_Young_Final 5038 1 55 2 -25 0 0 343 5038_1 +PRL_Young_Final 5038 1 56 1 -25 1 1 339 5038_1 +PRL_Young_Final 5038 1 57 1 -25 0 1 276 5038_1 +PRL_Young_Final 5038 1 58 2 -25 1 0 302 5038_1 +PRL_Young_Final 5038 1 59 2 -25 0 0 294 5038_1 +PRL_Young_Final 5038 1 60 1 25 1 1 382 5038_1 +PRL_Young_Final 5038 1 61 1 25 0 1 408 5038_1 +PRL_Young_Final 5038 1 62 1 25 0 1 475 5038_1 +PRL_Young_Final 5038 1 63 1 25 0 1 279 5038_1 +PRL_Young_Final 5038 1 64 1 25 0 1 678 5038_1 +PRL_Young_Final 5038 1 65 1 -25 0 0 319 5038_1 +PRL_Young_Final 5038 1 66 1 -25 0 0 1048 5038_1 +PRL_Young_Final 5038 1 67 2 25 1 1 385 5038_1 +PRL_Young_Final 5038 1 68 2 -25 0 1 465 5038_1 +PRL_Young_Final 5038 1 69 2 25 0 1 347 5038_1 +PRL_Young_Final 5038 1 70 2 25 0 1 462 5038_1 +PRL_Young_Final 5038 1 71 2 25 0 1 402 5038_1 +PRL_Young_Final 5038 1 72 2 25 0 1 426 5038_1 +PRL_Young_Final 5038 1 73 2 25 0 0 330 5038_1 +PRL_Young_Final 5038 1 74 2 -25 0 0 337 5038_1 +PRL_Young_Final 5038 1 75 2 25 0 0 236 5038_1 +PRL_Young_Final 5038 1 76 2 -25 0 0 385 5038_1 +PRL_Young_Final 5038 1 77 2 -25 0 0 391 5038_1 +PRL_Young_Final 5038 1 78 1 25 1 1 647 5038_1 +PRL_Young_Final 5038 1 79 1 25 0 1 410 5038_1 +PRL_Young_Final 5038 1 80 1 25 0 1 351 5038_1 +PRL_Young_Final 5038 1 81 1 -25 0 1 286 5038_1 +PRL_Young_Final 5038 1 82 1 25 0 1 359 5038_1 +PRL_Young_Final 5038 1 83 1 25 0 1 295 5038_1 +PRL_Young_Final 5038 1 84 1 -25 0 0 344 5038_1 +PRL_Young_Final 5038 1 85 1 -25 0 0 282 5038_1 +PRL_Young_Final 5038 1 86 2 -25 1 1 667 5038_1 +PRL_Young_Final 5038 1 87 2 25 0 1 331 5038_1 +PRL_Young_Final 5038 1 88 2 25 0 1 382 5038_1 +PRL_Young_Final 5038 1 89 2 25 0 1 300 5038_1 +PRL_Young_Final 5038 1 90 2 25 0 1 307 5038_1 +PRL_Young_Final 5038 1 91 2 25 0 1 329 5038_1 +PRL_Young_Final 5038 1 92 2 -25 0 1 375 5038_1 +PRL_Young_Final 5038 1 93 2 -25 0 1 193 5038_1 +PRL_Young_Final 5038 1 94 1 25 1 1 658 5038_1 +PRL_Young_Final 5038 1 95 1 25 0 1 466 5038_1 +PRL_Young_Final 5038 1 96 1 25 0 1 394 5038_1 +PRL_Young_Final 5038 1 97 1 25 0 1 272 5038_1 +PRL_Young_Final 5038 1 98 1 25 0 1 336 5038_1 +PRL_Young_Final 5038 1 99 1 25 0 1 446 5038_1 +PRL_Young_Final 5038 1 100 1 -25 0 1 387 5038_1 +PRL_Young_Final 5038 1 101 1 25 0 1 415 5038_1 +PRL_Young_Final 5038 1 102 1 -25 0 0 434 5038_1 +PRL_Young_Final 5038 1 103 1 -25 0 0 383 5038_1 +PRL_Young_Final 5038 1 104 2 25 1 1 341 5038_1 +PRL_Young_Final 5038 1 105 2 25 0 1 575 5038_1 +PRL_Young_Final 5038 1 106 2 25 0 1 332 5038_1 +PRL_Young_Final 5038 1 107 2 25 0 1 411 5038_1 +PRL_Young_Final 5038 1 108 2 25 0 1 408 5038_1 +PRL_Young_Final 5038 1 109 2 25 0 1 364 5038_1 +PRL_Young_Final 5038 1 110 2 25 0 0 429 5038_1 +PRL_Young_Final 5038 1 111 2 25 0 0 342 5038_1 +PRL_Young_Final 5038 1 112 2 -25 0 0 56 5038_1 +PRL_Young_Final 5038 1 113 2 -25 0 0 339 5038_1 +PRL_Young_Final 5038 1 114 1 -25 1 1 369 5038_1 +PRL_Young_Final 5038 1 115 1 25 0 1 779 5038_1 +PRL_Young_Final 5038 1 116 1 25 0 1 529 5038_1 +PRL_Young_Final 5038 1 117 1 -25 0 1 397 5038_1 +PRL_Young_Final 5038 1 118 1 25 0 1 414 5038_1 +PRL_Young_Final 5038 1 119 2 -25 1 0 392 5038_1 +PRL_Young_Final 5038 1 120 1 25 1 1 518 5038_1 +PRL_Young_Final 5038 1 121 1 25 0 1 470 5038_1 +PRL_Young_Final 5038 1 122 1 25 0 1 587 5038_1 +PRL_Young_Final 5038 1 123 1 25 0 1 377 5038_1 +PRL_Young_Final 5038 1 124 1 -25 0 1 351 5038_1 +PRL_Young_Final 5038 1 125 1 -25 0 1 331 5038_1 +PRL_Young_Final 5038 1 126 2 -25 1 0 265 5038_1 +PRL_Young_Final 5038 1 127 2 -25 0 0 327 5038_1 +PRL_Young_Final 5038 1 128 1 25 1 1 244 5038_1 +PRL_Young_Final 5038 1 129 1 25 0 1 363 5038_1 +PRL_Young_Final 5038 1 130 1 25 0 1 639 5038_1 +PRL_Young_Final 5038 1 131 1 25 0 1 435 5038_1 +PRL_Young_Final 5038 1 132 1 25 0 1 436 5038_1 +PRL_Young_Final 5038 1 133 1 25 0 1 559 5038_1 +PRL_Young_Final 5038 1 134 1 -25 0 1 388 5038_1 +PRL_Young_Final 5038 1 135 1 25 0 1 271 5038_1 +PRL_Young_Final 5038 1 136 1 -25 0 0 430 5038_1 +PRL_Young_Final 5038 1 137 1 25 0 0 536 5038_1 +PRL_Young_Final 5038 1 138 1 -25 0 0 281 5038_1 +PRL_Young_Final 5038 1 139 2 25 1 1 370 5038_1 +PRL_Young_Final 5038 1 140 2 25 0 1 335 5038_1 +PRL_Young_Final 5038 1 141 2 25 0 1 409 5038_1 +PRL_Young_Final 5038 1 142 2 25 0 1 358 5038_1 +PRL_Young_Final 5038 1 143 2 25 0 1 261 5038_1 +PRL_Young_Final 5038 1 144 2 -25 0 0 294 5038_1 +PRL_Young_Final 5038 1 145 2 -25 0 0 395 5038_1 +PRL_Young_Final 5038 1 146 1 25 1 1 417 5038_1 +PRL_Young_Final 5038 1 147 1 -25 0 1 410 5038_1 +PRL_Young_Final 5038 1 148 2 -25 1 0 348 5038_1 +PRL_Young_Final 5038 1 149 2 -25 0 0 336 5038_1 +PRL_Young_Final 5038 1 150 1 25 1 1 322 5038_1 +PRL_Young_Final 5038 1 151 1 25 0 1 363 5038_1 +PRL_Young_Final 5038 1 152 1 -25 0 1 353 5038_1 +PRL_Young_Final 5038 1 153 1 25 0 1 247 5038_1 +PRL_Young_Final 5038 1 154 1 25 0 1 347 5038_1 +PRL_Young_Final 5038 1 155 1 -25 0 0 341 5038_1 +PRL_Young_Final 5038 1 156 1 -25 0 0 358 5038_1 +PRL_Young_Final 5038 1 157 2 25 1 1 268 5038_1 +PRL_Young_Final 5038 1 158 2 25 0 1 312 5038_1 +PRL_Young_Final 5038 1 159 2 25 0 1 559 5038_1 +PRL_Young_Final 5038 1 160 2 -25 0 1 468 5038_1 +PRL_Young_Final 5038 1 161 2 -25 0 1 938 5038_1 +PRL_Young_Final 5038 1 162 1 25 1 0 277 5038_1 +PRL_Young_Final 5038 1 163 1 -25 0 0 385 5038_1 +PRL_Young_Final 5038 1 164 1 25 0 0 642 5038_1 +PRL_Young_Final 5038 1 165 1 -25 0 0 420 5038_1 +PRL_Young_Final 5038 1 166 1 -25 0 0 307 5038_1 +PRL_Young_Final 5038 1 167 2 25 1 1 260 5038_1 +PRL_Young_Final 5038 1 168 2 25 0 1 312 5038_1 +PRL_Young_Final 5038 1 169 2 25 0 1 305 5038_1 +PRL_Young_Final 5038 1 170 2 25 0 1 354 5038_1 +PRL_Young_Final 5038 1 171 2 25 0 1 341 5038_1 +PRL_Young_Final 5038 1 172 2 25 0 1 355 5038_1 +PRL_Young_Final 5038 1 173 2 -25 0 1 305 5038_1 +PRL_Young_Final 5038 1 174 2 25 0 1 279 5038_1 +PRL_Young_Final 5038 1 175 2 -25 0 0 342 5038_1 +PRL_Young_Final 5038 1 176 2 -25 0 0 116 5038_1 +PRL_Young_Final 5038 1 177 1 25 1 1 38 5038_1 +PRL_Young_Final 5038 1 178 1 25 0 1 326 5038_1 +PRL_Young_Final 5038 1 179 1 25 0 1 368 5038_1 +PRL_Young_Final 5038 1 180 1 25 0 1 373 5038_1 +PRL_Young_Final 5038 1 181 1 25 0 1 313 5038_1 +PRL_Young_Final 5038 1 182 1 25 0 1 300 5038_1 +PRL_Young_Final 5038 1 183 1 -25 0 1 296 5038_1 +PRL_Young_Final 5038 1 184 1 25 0 1 355 5038_1 +PRL_Young_Final 5038 1 185 1 -25 0 0 314 5038_1 +PRL_Young_Final 5038 1 186 2 25 1 1 286 5038_1 +PRL_Young_Final 5038 1 187 2 -25 0 1 442 5038_1 +PRL_Young_Final 5038 1 188 2 25 0 1 364 5038_1 +PRL_Young_Final 5038 1 189 2 25 0 1 336 5038_1 +PRL_Young_Final 5038 1 190 2 25 0 1 464 5038_1 +PRL_Young_Final 5038 1 191 2 25 0 1 367 5038_1 +PRL_Young_Final 5038 1 192 2 25 0 1 356 5038_1 +PRL_Young_Final 5038 1 193 2 -25 0 1 577 5038_1 +PRL_Young_Final 5038 1 194 2 -25 0 0 327 5038_1 +PRL_Young_Final 5038 1 195 1 -25 1 1 919 5038_1 +PRL_Young_Final 5038 1 196 1 25 0 1 292 5038_1 +PRL_Young_Final 5038 1 197 1 25 0 1 570 5038_1 +PRL_Young_Final 5038 1 198 1 25 0 1 397 5038_1 +PRL_Young_Final 5038 1 199 1 25 0 1 611 5038_1 +PRL_Young_Final 5038 1 200 1 25 0 0 373 5038_1 +PRL_Young_Final 5038 2 1 2 25 1 1 884 5038_2 +PRL_Young_Final 5038 2 2 2 25 0 1 435 5038_2 +PRL_Young_Final 5038 2 3 2 -25 0 1 376 5038_2 +PRL_Young_Final 5038 2 4 2 25 0 1 342 5038_2 +PRL_Young_Final 5038 2 5 2 25 0 1 300 5038_2 +PRL_Young_Final 5038 2 6 2 25 0 1 394 5038_2 +PRL_Young_Final 5038 2 7 2 25 0 0 363 5038_2 +PRL_Young_Final 5038 2 8 2 -25 0 0 356 5038_2 +PRL_Young_Final 5038 2 9 2 -25 0 0 348 5038_2 +PRL_Young_Final 5038 2 10 1 25 1 1 305 5038_2 +PRL_Young_Final 5038 2 11 1 25 0 1 448 5038_2 +PRL_Young_Final 5038 2 12 1 25 0 1 328 5038_2 +PRL_Young_Final 5038 2 13 1 25 0 1 752 5038_2 +PRL_Young_Final 5038 2 14 1 -25 0 1 647 5038_2 +PRL_Young_Final 5038 2 15 1 25 0 1 386 5038_2 +PRL_Young_Final 5038 2 16 1 25 0 1 579 5038_2 +PRL_Young_Final 5038 2 17 1 -25 0 1 530 5038_2 +PRL_Young_Final 5038 2 18 1 -25 0 0 646 5038_2 +PRL_Young_Final 5038 2 19 2 25 1 1 279 5038_2 +PRL_Young_Final 5038 2 20 2 25 0 1 401 5038_2 +PRL_Young_Final 5038 2 21 2 25 0 1 476 5038_2 +PRL_Young_Final 5038 2 22 2 25 0 1 363 5038_2 +PRL_Young_Final 5038 2 23 2 25 0 1 435 5038_2 +PRL_Young_Final 5038 2 24 2 -25 0 1 363 5038_2 +PRL_Young_Final 5038 2 25 2 -25 0 1 268 5038_2 +PRL_Young_Final 5038 2 26 1 -25 1 0 426 5038_2 +PRL_Young_Final 5038 2 27 1 -25 0 0 259 5038_2 +PRL_Young_Final 5038 2 28 2 25 1 1 315 5038_2 +PRL_Young_Final 5038 2 29 2 25 0 1 372 5038_2 +PRL_Young_Final 5038 2 30 2 25 0 1 432 5038_2 +PRL_Young_Final 5038 2 31 2 25 0 1 349 5038_2 +PRL_Young_Final 5038 2 32 2 25 0 1 346 5038_2 +PRL_Young_Final 5038 2 33 2 -25 0 0 340 5038_2 +PRL_Young_Final 5038 2 34 2 25 0 0 332 5038_2 +PRL_Young_Final 5038 2 35 2 -25 0 0 348 5038_2 +PRL_Young_Final 5038 2 36 2 -25 0 0 362 5038_2 +PRL_Young_Final 5038 2 37 1 25 1 1 245 5038_2 +PRL_Young_Final 5038 2 38 1 -25 0 1 316 5038_2 +PRL_Young_Final 5038 2 39 1 25 0 1 336 5038_2 +PRL_Young_Final 5038 2 40 1 25 0 1 312 5038_2 +PRL_Young_Final 5038 2 41 1 25 0 1 423 5038_2 +PRL_Young_Final 5038 2 42 1 25 0 1 461 5038_2 +PRL_Young_Final 5038 2 43 1 25 0 1 332 5038_2 +PRL_Young_Final 5038 2 44 1 25 0 1 336 5038_2 +PRL_Young_Final 5038 2 45 1 -25 0 0 361 5038_2 +PRL_Young_Final 5038 2 46 1 -25 0 0 738 5038_2 +PRL_Young_Final 5038 2 47 2 25 1 1 256 5038_2 +PRL_Young_Final 5038 2 48 2 -25 0 1 293 5038_2 +PRL_Young_Final 5038 2 49 2 25 0 1 732 5038_2 +PRL_Young_Final 5038 2 50 2 25 0 1 346 5038_2 +PRL_Young_Final 5038 2 51 2 -25 0 1 503 5038_2 +PRL_Young_Final 5038 2 52 2 -25 0 0 338 5038_2 +PRL_Young_Final 5038 2 53 1 25 1 1 381 5038_2 +PRL_Young_Final 5038 2 54 1 25 0 1 279 5038_2 +PRL_Young_Final 5038 2 55 1 25 0 1 393 5038_2 +PRL_Young_Final 5038 2 56 1 25 0 1 425 5038_2 +PRL_Young_Final 5038 2 57 1 25 0 1 296 5038_2 +PRL_Young_Final 5038 2 58 1 -25 0 1 445 5038_2 +PRL_Young_Final 5038 2 59 1 -25 0 1 279 5038_2 +PRL_Young_Final 5038 2 60 2 -25 1 0 23 5038_2 +PRL_Young_Final 5038 2 61 2 -25 0 0 342 5038_2 +PRL_Young_Final 5038 2 62 1 25 1 1 411 5038_2 +PRL_Young_Final 5038 2 63 1 25 0 1 359 5038_2 +PRL_Young_Final 5038 2 64 1 25 0 1 265 5038_2 +PRL_Young_Final 5038 2 65 1 25 0 1 421 5038_2 +PRL_Young_Final 5038 2 66 1 25 0 1 561 5038_2 +PRL_Young_Final 5038 2 67 1 25 0 0 325 5038_2 +PRL_Young_Final 5038 2 68 1 -25 0 0 356 5038_2 +PRL_Young_Final 5038 2 69 1 25 0 0 343 5038_2 +PRL_Young_Final 5038 2 70 1 -25 0 0 411 5038_2 +PRL_Young_Final 5038 2 71 1 -25 0 0 278 5038_2 +PRL_Young_Final 5038 2 72 2 25 1 1 329 5038_2 +PRL_Young_Final 5038 2 73 2 -25 0 1 347 5038_2 +PRL_Young_Final 5038 2 74 1 -25 1 0 314 5038_2 +PRL_Young_Final 5038 2 75 2 25 1 1 271 5038_2 +PRL_Young_Final 5038 2 76 2 25 0 1 357 5038_2 +PRL_Young_Final 5038 2 77 2 25 0 1 391 5038_2 +PRL_Young_Final 5038 2 78 2 25 0 1 371 5038_2 +PRL_Young_Final 5038 2 79 2 25 0 1 263 5038_2 +PRL_Young_Final 5038 2 80 2 25 0 1 306 5038_2 +PRL_Young_Final 5038 2 81 2 25 0 1 366 5038_2 +PRL_Young_Final 5038 2 82 2 -25 0 1 313 5038_2 +PRL_Young_Final 5038 2 83 2 -25 0 0 379 5038_2 +PRL_Young_Final 5038 2 84 1 25 1 1 328 5038_2 +PRL_Young_Final 5038 2 85 1 25 0 1 388 5038_2 +PRL_Young_Final 5038 2 86 1 -25 0 1 273 5038_2 +PRL_Young_Final 5038 2 87 1 25 0 1 324 5038_2 +PRL_Young_Final 5038 2 88 1 25 0 1 592 5038_2 +PRL_Young_Final 5038 2 89 1 25 0 1 467 5038_2 +PRL_Young_Final 5038 2 90 1 25 0 1 336 5038_2 +PRL_Young_Final 5038 2 91 1 -25 0 0 347 5038_2 +PRL_Young_Final 5038 2 92 1 -25 0 0 320 5038_2 +PRL_Young_Final 5038 2 93 2 25 1 1 447 5038_2 +PRL_Young_Final 5038 2 94 2 -25 0 1 494 5038_2 +PRL_Young_Final 5038 2 95 2 -25 0 1 456 5038_2 +PRL_Young_Final 5038 2 96 1 25 1 0 309 5038_2 +PRL_Young_Final 5038 2 97 1 25 0 0 430 5038_2 +PRL_Young_Final 5038 2 98 1 -25 0 0 315 5038_2 +PRL_Young_Final 5038 2 99 1 -25 0 0 471 5038_2 +PRL_Young_Final 5038 2 100 2 25 1 1 344 5038_2 +PRL_Young_Final 5038 2 101 2 25 0 1 325 5038_2 +PRL_Young_Final 5038 2 102 2 25 0 1 367 5038_2 +PRL_Young_Final 5038 2 103 2 25 0 1 353 5038_2 +PRL_Young_Final 5038 2 104 2 25 0 1 262 5038_2 +PRL_Young_Final 5038 2 105 2 -25 0 0 225 5038_2 +PRL_Young_Final 5038 2 106 2 -25 0 0 435 5038_2 +PRL_Young_Final 5038 2 107 1 25 1 1 319 5038_2 +PRL_Young_Final 5038 2 108 1 -25 0 1 330 5038_2 +PRL_Young_Final 5038 2 109 1 25 0 1 161 5038_2 +PRL_Young_Final 5038 2 110 1 25 0 1 347 5038_2 +PRL_Young_Final 5038 2 111 1 25 0 1 374 5038_2 +PRL_Young_Final 5038 2 112 1 25 0 1 358 5038_2 +PRL_Young_Final 5038 2 113 1 25 0 1 260 5038_2 +PRL_Young_Final 5038 2 114 1 -25 0 0 297 5038_2 +PRL_Young_Final 5038 2 115 1 -25 0 0 329 5038_2 +PRL_Young_Final 5038 2 116 2 25 1 1 295 5038_2 +PRL_Young_Final 5038 2 117 2 25 0 1 318 5038_2 +PRL_Young_Final 5038 2 118 2 -25 0 1 322 5038_2 +PRL_Young_Final 5038 2 119 2 25 0 1 16 5038_2 +PRL_Young_Final 5038 2 120 2 25 0 1 310 5038_2 +PRL_Young_Final 5038 2 121 2 -25 0 1 327 5038_2 +PRL_Young_Final 5038 2 122 1 25 1 0 334 5038_2 +PRL_Young_Final 5038 2 123 1 -25 0 0 330 5038_2 +PRL_Young_Final 5038 2 124 1 -25 0 0 355 5038_2 +PRL_Young_Final 5038 2 125 2 25 1 1 378 5038_2 +PRL_Young_Final 5038 2 126 2 25 0 1 411 5038_2 +PRL_Young_Final 5038 2 127 2 25 0 1 357 5038_2 +PRL_Young_Final 5038 2 128 2 25 0 1 400 5038_2 +PRL_Young_Final 5038 2 129 2 25 0 1 516 5038_2 +PRL_Young_Final 5038 2 130 2 -25 0 1 392 5038_2 +PRL_Young_Final 5038 2 131 2 -25 0 1 294 5038_2 +PRL_Young_Final 5038 2 132 1 -25 1 0 299 5038_2 +PRL_Young_Final 5038 2 133 2 25 1 1 506 5038_2 +PRL_Young_Final 5038 2 134 2 25 0 1 329 5038_2 +PRL_Young_Final 5038 2 135 2 25 0 1 379 5038_2 +PRL_Young_Final 5038 2 136 2 25 0 1 314 5038_2 +PRL_Young_Final 5038 2 137 2 25 0 1 398 5038_2 +PRL_Young_Final 5038 2 138 2 25 0 1 425 5038_2 +PRL_Young_Final 5038 2 139 2 -25 0 1 351 5038_2 +PRL_Young_Final 5038 2 140 2 25 0 1 342 5038_2 +PRL_Young_Final 5038 2 141 2 -25 0 0 335 5038_2 +PRL_Young_Final 5038 2 142 2 -25 0 0 392 5038_2 +PRL_Young_Final 5038 2 143 2 -25 0 0 776 5038_2 +PRL_Young_Final 5038 2 144 1 25 1 1 310 5038_2 +PRL_Young_Final 5038 2 145 1 25 0 1 304 5038_2 +PRL_Young_Final 5038 2 146 1 25 0 1 329 5038_2 +PRL_Young_Final 5038 2 147 1 25 0 1 448 5038_2 +PRL_Young_Final 5038 2 148 1 25 0 1 943 5038_2 +PRL_Young_Final 5038 2 149 1 25 0 1 370 5038_2 +PRL_Young_Final 5038 2 150 1 -25 0 1 356 5038_2 +PRL_Young_Final 5038 2 151 1 -25 0 0 431 5038_2 +PRL_Young_Final 5038 2 152 2 25 1 1 275 5038_2 +PRL_Young_Final 5038 2 153 2 25 0 1 345 5038_2 +PRL_Young_Final 5038 2 154 2 -25 0 1 506 5038_2 +PRL_Young_Final 5038 2 155 2 25 0 1 376 5038_2 +PRL_Young_Final 5038 2 156 2 25 0 1 346 5038_2 +PRL_Young_Final 5038 2 157 2 25 0 1 227 5038_2 +PRL_Young_Final 5038 2 158 2 25 0 0 512 5038_2 +PRL_Young_Final 5038 2 159 2 -25 0 0 383 5038_2 +PRL_Young_Final 5038 2 160 1 25 1 1 1125 5038_2 +PRL_Young_Final 5038 2 161 1 25 0 1 393 5038_2 +PRL_Young_Final 5038 2 162 1 -25 0 1 497 5038_2 +PRL_Young_Final 5038 2 163 1 -25 0 1 375 5038_2 +PRL_Young_Final 5038 2 164 2 25 1 0 291 5038_2 +PRL_Young_Final 5038 2 165 2 -25 0 0 390 5038_2 +PRL_Young_Final 5038 2 166 2 -25 0 0 389 5038_2 +PRL_Young_Final 5038 2 167 1 25 1 1 333 5038_2 +PRL_Young_Final 5038 2 168 1 25 0 1 383 5038_2 +PRL_Young_Final 5038 2 169 1 25 0 1 1005 5038_2 +PRL_Young_Final 5038 2 170 1 25 0 1 618 5038_2 +PRL_Young_Final 5038 2 171 1 25 0 1 448 5038_2 +PRL_Young_Final 5038 2 172 1 25 0 1 391 5038_2 +PRL_Young_Final 5038 2 173 1 -25 0 1 448 5038_2 +PRL_Young_Final 5038 2 174 1 -25 0 0 131 5038_2 +PRL_Young_Final 5038 2 175 2 25 1 1 364 5038_2 +PRL_Young_Final 5038 2 176 2 25 0 1 335 5038_2 +PRL_Young_Final 5038 2 177 2 25 0 1 473 5038_2 +PRL_Young_Final 5038 2 178 2 25 0 1 376 5038_2 +PRL_Young_Final 5038 2 179 2 25 0 1 423 5038_2 +PRL_Young_Final 5038 2 180 2 25 0 1 509 5038_2 +PRL_Young_Final 5038 2 181 2 25 0 1 659 5038_2 +PRL_Young_Final 5038 2 182 2 -25 0 1 450 5038_2 +PRL_Young_Final 5038 2 183 2 -25 0 0 415 5038_2 +PRL_Young_Final 5038 2 184 2 -25 0 0 610 5038_2 +PRL_Young_Final 5038 2 185 1 25 1 1 328 5038_2 +PRL_Young_Final 5038 2 186 1 25 0 1 313 5038_2 +PRL_Young_Final 5038 2 187 1 -25 0 1 344 5038_2 +PRL_Young_Final 5038 2 188 1 25 0 1 399 5038_2 +PRL_Young_Final 5038 2 189 1 25 0 1 436 5038_2 +PRL_Young_Final 5038 2 190 1 25 0 1 588 5038_2 +PRL_Young_Final 5038 2 191 1 25 0 1 523 5038_2 +PRL_Young_Final 5038 2 192 1 25 0 1 735 5038_2 +PRL_Young_Final 5038 2 193 1 -25 0 0 2022 5038_2 +PRL_Young_Final 5038 2 194 1 25 0 0 338 5038_2 +PRL_Young_Final 5038 2 195 1 25 0 0 458 5038_2 +PRL_Young_Final 5038 2 196 1 -25 0 0 535 5038_2 +PRL_Young_Final 5038 2 197 1 -25 0 0 325 5038_2 +PRL_Young_Final 5038 2 198 2 -25 1 1 286 5038_2 +PRL_Young_Final 5038 2 199 2 -25 0 1 355 5038_2 +PRL_Young_Final 5038 2 200 1 -25 1 0 360 5038_2 +PRL_Young_Final 5038 3 1 2 25 1 1 486 5038_3 +PRL_Young_Final 5038 3 2 2 25 0 1 366 5038_3 +PRL_Young_Final 5038 3 3 2 25 0 1 364 5038_3 +PRL_Young_Final 5038 3 4 2 25 0 1 396 5038_3 +PRL_Young_Final 5038 3 5 2 25 0 1 324 5038_3 +PRL_Young_Final 5038 3 6 2 25 0 1 460 5038_3 +PRL_Young_Final 5038 3 7 2 -25 0 1 320 5038_3 +PRL_Young_Final 5038 3 8 2 25 0 1 377 5038_3 +PRL_Young_Final 5038 3 9 2 -25 0 0 370 5038_3 +PRL_Young_Final 5038 3 10 2 -25 0 0 1010 5038_3 +PRL_Young_Final 5038 3 11 1 25 1 1 369 5038_3 +PRL_Young_Final 5038 3 12 1 25 0 1 358 5038_3 +PRL_Young_Final 5038 3 13 1 25 0 1 373 5038_3 +PRL_Young_Final 5038 3 14 1 25 0 1 56 5038_3 +PRL_Young_Final 5038 3 15 1 25 0 1 285 5038_3 +PRL_Young_Final 5038 3 16 1 25 0 1 320 5038_3 +PRL_Young_Final 5038 3 17 1 -25 0 1 319 5038_3 +PRL_Young_Final 5038 3 18 1 -25 0 0 306 5038_3 +PRL_Young_Final 5038 3 19 2 25 1 1 321 5038_3 +PRL_Young_Final 5038 3 20 2 25 0 1 351 5038_3 +PRL_Young_Final 5038 3 21 2 -25 0 1 375 5038_3 +PRL_Young_Final 5038 3 22 2 25 0 1 360 5038_3 +PRL_Young_Final 5038 3 23 2 25 0 1 387 5038_3 +PRL_Young_Final 5038 3 24 2 25 0 1 321 5038_3 +PRL_Young_Final 5038 3 25 2 25 0 1 339 5038_3 +PRL_Young_Final 5038 3 26 2 25 0 1 299 5038_3 +PRL_Young_Final 5038 3 27 2 25 0 0 214 5038_3 +PRL_Young_Final 5038 3 28 2 -25 0 0 441 5038_3 +PRL_Young_Final 5038 3 29 2 -25 0 0 476 5038_3 +PRL_Young_Final 5038 3 30 1 -25 1 1 307 5038_3 +PRL_Young_Final 5038 3 31 2 -25 1 0 597 5038_3 +PRL_Young_Final 5038 3 32 1 -25 1 1 631 5038_3 +PRL_Young_Final 5038 3 33 1 25 0 1 419 5038_3 +PRL_Young_Final 5038 3 34 1 25 0 1 289 5038_3 +PRL_Young_Final 5038 3 35 1 25 0 1 279 5038_3 +PRL_Young_Final 5038 3 36 1 25 0 1 424 5038_3 +PRL_Young_Final 5038 3 37 1 25 0 1 335 5038_3 +PRL_Young_Final 5038 3 38 1 25 0 1 522 5038_3 +PRL_Young_Final 5038 3 39 1 -25 0 0 485 5038_3 +PRL_Young_Final 5038 3 40 1 -25 0 0 401 5038_3 +PRL_Young_Final 5038 3 41 2 -25 1 1 377 5038_3 +PRL_Young_Final 5038 3 42 2 25 0 1 305 5038_3 +PRL_Young_Final 5038 3 43 2 25 0 1 19 5038_3 +PRL_Young_Final 5038 3 44 2 25 0 1 296 5038_3 +PRL_Young_Final 5038 3 45 2 25 0 1 254 5038_3 +PRL_Young_Final 5038 3 46 2 25 0 1 212 5038_3 +PRL_Young_Final 5038 3 47 2 -25 0 0 201 5038_3 +PRL_Young_Final 5038 3 48 2 -25 0 0 164 5038_3 +PRL_Young_Final 5038 3 49 1 25 1 1 727 5038_3 +PRL_Young_Final 5038 3 50 1 25 0 1 323 5038_3 +PRL_Young_Final 5038 3 51 1 -25 0 1 440 5038_3 +PRL_Young_Final 5038 3 52 2 25 1 0 705 5038_3 +PRL_Young_Final 5038 3 53 2 -25 0 0 320 5038_3 +PRL_Young_Final 5038 3 54 2 25 0 0 329 5038_3 +PRL_Young_Final 5038 3 55 2 -25 0 0 349 5038_3 +PRL_Young_Final 5038 3 56 2 -25 0 0 528 5038_3 +PRL_Young_Final 5038 3 57 1 25 1 1 338 5038_3 +PRL_Young_Final 5038 3 58 1 25 0 1 380 5038_3 +PRL_Young_Final 5038 3 59 1 -25 0 1 406 5038_3 +PRL_Young_Final 5038 3 60 1 25 0 1 419 5038_3 +PRL_Young_Final 5038 3 61 1 25 0 1 381 5038_3 +PRL_Young_Final 5038 3 62 1 25 0 1 432 5038_3 +PRL_Young_Final 5038 3 63 1 25 0 1 443 5038_3 +PRL_Young_Final 5038 3 64 1 -25 0 0 273 5038_3 +PRL_Young_Final 5038 3 65 1 -25 0 0 246 5038_3 +PRL_Young_Final 5038 3 66 2 25 1 1 321 5038_3 +PRL_Young_Final 5038 3 67 2 -25 0 1 317 5038_3 +PRL_Young_Final 5038 3 68 2 -25 0 1 409 5038_3 +PRL_Young_Final 5038 3 69 1 -25 1 0 293 5038_3 +PRL_Young_Final 5038 3 70 2 25 1 1 963 5038_3 +PRL_Young_Final 5038 3 71 2 25 0 1 398 5038_3 +PRL_Young_Final 5038 3 72 2 25 0 1 395 5038_3 +PRL_Young_Final 5038 3 73 2 25 0 1 355 5038_3 +PRL_Young_Final 5038 3 74 2 25 0 1 315 5038_3 +PRL_Young_Final 5038 3 75 2 25 0 1 467 5038_3 +PRL_Young_Final 5038 3 76 2 -25 0 0 758 5038_3 +PRL_Young_Final 5038 3 77 2 25 0 0 547 5038_3 +PRL_Young_Final 5038 3 78 2 25 0 0 339 5038_3 +PRL_Young_Final 5038 3 79 2 -25 0 0 442 5038_3 +PRL_Young_Final 5038 3 80 2 -25 0 0 471 5038_3 +PRL_Young_Final 5038 3 81 1 -25 1 1 497 5038_3 +PRL_Young_Final 5038 3 82 2 -25 1 0 2279 5038_3 +PRL_Young_Final 5038 3 83 1 25 1 1 328 5038_3 +PRL_Young_Final 5038 3 84 1 25 0 1 397 5038_3 +PRL_Young_Final 5038 3 85 1 25 0 1 531 5038_3 +PRL_Young_Final 5038 3 86 1 25 0 1 343 5038_3 +PRL_Young_Final 5038 3 87 1 25 0 1 472 5038_3 +PRL_Young_Final 5038 3 88 1 25 0 1 543 5038_3 +PRL_Young_Final 5038 3 89 1 -25 0 0 574 5038_3 +PRL_Young_Final 5038 3 90 1 -25 0 0 975 5038_3 +PRL_Young_Final 5038 3 91 2 25 1 1 1035 5038_3 +PRL_Young_Final 5038 3 92 2 -25 0 1 454 5038_3 +PRL_Young_Final 5038 3 93 2 25 0 1 370 5038_3 +PRL_Young_Final 5038 3 94 2 25 0 1 583 5038_3 +PRL_Young_Final 5038 3 95 2 -25 0 1 333 5038_3 +PRL_Young_Final 5038 3 96 2 25 0 1 508 5038_3 +PRL_Young_Final 5038 3 97 2 25 0 1 262 5038_3 +PRL_Young_Final 5038 3 98 2 -25 0 0 645 5038_3 +PRL_Young_Final 5038 3 99 2 25 0 0 1085 5038_3 +PRL_Young_Final 5038 3 100 2 -25 0 0 423 5038_3 +PRL_Young_Final 5038 3 101 2 -25 0 0 1003 5038_3 +PRL_Young_Final 5038 3 102 1 25 1 1 530 5038_3 +PRL_Young_Final 5038 3 103 1 25 0 1 388 5038_3 +PRL_Young_Final 5038 3 104 1 25 0 1 424 5038_3 +PRL_Young_Final 5038 3 105 1 -25 0 1 536 5038_3 +PRL_Young_Final 5038 3 106 1 -25 0 1 748 5038_3 +PRL_Young_Final 5038 3 107 2 -25 1 0 1117 5038_3 +PRL_Young_Final 5038 3 108 1 25 1 1 1623 5038_3 +PRL_Young_Final 5038 3 109 1 25 0 1 553 5038_3 +PRL_Young_Final 5038 3 110 1 25 0 1 348 5038_3 +PRL_Young_Final 5038 3 111 1 25 0 1 325 5038_3 +PRL_Young_Final 5038 3 112 1 25 0 1 388 5038_3 +PRL_Young_Final 5038 3 113 1 25 0 1 349 5038_3 +PRL_Young_Final 5038 3 114 1 -25 0 0 406 5038_3 +PRL_Young_Final 5038 3 115 1 -25 0 0 1710 5038_3 +PRL_Young_Final 5038 3 116 2 -25 1 1 553 5038_3 +PRL_Young_Final 5038 3 117 1 -25 1 0 356 5038_3 +PRL_Young_Final 5038 3 118 2 25 1 1 290 5038_3 +PRL_Young_Final 5038 3 119 2 25 0 1 167 5038_3 +PRL_Young_Final 5038 3 120 2 25 0 1 250 5038_3 +PRL_Young_Final 5038 3 121 2 25 0 1 278 5038_3 +PRL_Young_Final 5038 3 122 2 25 0 1 344 5038_3 +PRL_Young_Final 5038 3 123 2 -25 0 0 348 5038_3 +PRL_Young_Final 5038 3 124 2 25 0 0 511 5038_3 +PRL_Young_Final 5038 3 125 2 -25 0 0 660 5038_3 +PRL_Young_Final 5038 3 126 2 25 0 0 509 5038_3 +PRL_Young_Final 5038 3 127 2 -25 0 0 293 5038_3 +PRL_Young_Final 5038 3 128 1 25 1 1 492 5038_3 +PRL_Young_Final 5038 3 129 1 25 0 1 353 5038_3 +PRL_Young_Final 5038 3 130 1 -25 0 1 412 5038_3 +PRL_Young_Final 5038 3 131 1 25 0 1 683 5038_3 +PRL_Young_Final 5038 3 132 1 25 0 1 1084 5038_3 +PRL_Young_Final 5038 3 133 1 -25 0 1 1205 5038_3 +PRL_Young_Final 5038 3 134 1 25 0 1 292 5038_3 +PRL_Young_Final 5038 3 135 1 -25 0 0 496 5038_3 +PRL_Young_Final 5038 3 136 2 25 1 1 882 5038_3 +PRL_Young_Final 5038 3 137 2 25 0 1 419 5038_3 +PRL_Young_Final 5038 3 138 2 25 0 1 425 5038_3 +PRL_Young_Final 5038 3 139 2 25 0 1 488 5038_3 +PRL_Young_Final 5038 3 140 2 -25 0 1 625 5038_3 +PRL_Young_Final 5038 3 141 2 -25 0 1 149 5038_3 +PRL_Young_Final 5038 3 142 1 25 1 1 726 5038_3 +PRL_Young_Final 5038 3 143 1 25 0 1 479 5038_3 +PRL_Young_Final 5038 3 144 1 25 0 1 640 5038_3 +PRL_Young_Final 5038 3 145 1 25 0 1 547 5038_3 +PRL_Young_Final 5038 3 146 1 25 0 1 1157 5038_3 +PRL_Young_Final 5038 3 147 1 25 0 1 610 5038_3 +PRL_Young_Final 5038 3 148 1 -25 0 0 398 5038_3 +PRL_Young_Final 5038 3 149 1 -25 0 0 443 5038_3 +PRL_Young_Final 5038 3 150 2 -25 1 1 341 5038_3 +PRL_Young_Final 5038 3 151 2 25 0 1 453 5038_3 +PRL_Young_Final 5038 3 152 2 25 0 1 847 5038_3 +PRL_Young_Final 5038 3 153 2 25 0 1 394 5038_3 +PRL_Young_Final 5038 3 154 2 25 0 1 323 5038_3 +PRL_Young_Final 5038 3 155 2 -25 0 0 465 5038_3 +PRL_Young_Final 5038 3 156 2 -25 0 0 528 5038_3 +PRL_Young_Final 5038 3 157 1 25 1 1 628 5038_3 +PRL_Young_Final 5038 3 158 1 25 0 1 369 5038_3 +PRL_Young_Final 5038 3 159 1 25 0 1 366 5038_3 +PRL_Young_Final 5038 3 160 1 -25 0 1 420 5038_3 +PRL_Young_Final 5038 3 161 1 25 0 1 497 5038_3 +PRL_Young_Final 5038 3 162 1 25 0 0 1019 5038_3 +PRL_Young_Final 5038 3 163 1 25 0 0 468 5038_3 +PRL_Young_Final 5038 3 164 1 -25 0 0 319 5038_3 +PRL_Young_Final 5038 3 165 1 -25 0 0 819 5038_3 +PRL_Young_Final 5038 3 166 2 25 1 1 683 5038_3 +PRL_Young_Final 5038 3 167 2 -25 0 1 434 5038_3 +PRL_Young_Final 5038 3 168 2 25 0 1 417 5038_3 +PRL_Young_Final 5038 3 169 2 25 0 1 564 5038_3 +PRL_Young_Final 5038 3 170 2 25 0 1 431 5038_3 +PRL_Young_Final 5038 3 171 2 25 0 1 391 5038_3 +PRL_Young_Final 5038 3 172 2 25 0 1 331 5038_3 +PRL_Young_Final 5038 3 173 2 -25 0 1 332 5038_3 +PRL_Young_Final 5038 3 174 2 -25 0 0 561 5038_3 +PRL_Young_Final 5038 3 175 1 -25 1 1 345 5038_3 +PRL_Young_Final 5038 3 176 1 25 0 1 290 5038_3 +PRL_Young_Final 5038 3 177 1 25 0 1 514 5038_3 +PRL_Young_Final 5038 3 178 1 25 0 1 451 5038_3 +PRL_Young_Final 5038 3 179 1 25 0 1 459 5038_3 +PRL_Young_Final 5038 3 180 1 25 0 1 90 5038_3 +PRL_Young_Final 5038 3 181 1 25 0 1 449 5038_3 +PRL_Young_Final 5038 3 182 1 -25 0 1 452 5038_3 +PRL_Young_Final 5038 3 183 1 -25 0 0 161 5038_3 +PRL_Young_Final 5038 3 184 2 25 1 1 1073 5038_3 +PRL_Young_Final 5038 3 185 2 25 0 1 702 5038_3 +PRL_Young_Final 5038 3 186 2 25 0 1 1401 5038_3 +PRL_Young_Final 5038 3 187 2 25 0 1 567 5038_3 +PRL_Young_Final 5038 3 188 2 25 0 1 1081 5038_3 +PRL_Young_Final 5038 3 189 2 -25 0 0 659 5038_3 +PRL_Young_Final 5038 3 190 2 -25 0 0 977 5038_3 +PRL_Young_Final 5038 3 191 1 25 1 1 361 5038_3 +PRL_Young_Final 5038 3 192 1 25 0 1 625 5038_3 +PRL_Young_Final 5038 3 193 1 -25 0 1 355 5038_3 +PRL_Young_Final 5038 3 194 2 25 1 0 519 5038_3 +PRL_Young_Final 5038 3 195 2 -25 0 0 348 5038_3 +PRL_Young_Final 5038 3 196 1 25 1 1 616 5038_3 +PRL_Young_Final 5038 3 197 1 25 0 1 322 5038_3 +PRL_Young_Final 5038 3 198 1 -25 0 1 652 5038_3 +PRL_Young_Final 5038 3 199 2 -25 1 0 321 5038_3 +PRL_Young_Final 5038 3 200 1 25 1 1 863 5038_3 +PRL_Young_Final 5036 1 1 1 25 1 1 1282 5036_1 +PRL_Young_Final 5036 1 2 1 25 0 1 1282 5036_1 +PRL_Young_Final 5036 1 3 1 25 0 1 628 5036_1 +PRL_Young_Final 5036 1 4 1 -25 0 1 595 5036_1 +PRL_Young_Final 5036 1 5 1 25 0 1 817 5036_1 +PRL_Young_Final 5036 1 6 1 25 0 1 437 5036_1 +PRL_Young_Final 5036 1 7 1 -25 0 1 472 5036_1 +PRL_Young_Final 5036 1 8 1 25 0 0 459 5036_1 +PRL_Young_Final 5036 1 9 1 -25 0 0 739 5036_1 +PRL_Young_Final 5036 1 10 1 -25 0 0 541 5036_1 +PRL_Young_Final 5036 1 11 1 -25 0 0 538 5036_1 +PRL_Young_Final 5036 1 12 2 25 1 1 1258 5036_1 +PRL_Young_Final 5036 1 13 2 25 0 1 441 5036_1 +PRL_Young_Final 5036 1 14 2 25 0 1 485 5036_1 +PRL_Young_Final 5036 1 15 2 25 0 1 463 5036_1 +PRL_Young_Final 5036 1 16 2 25 0 1 466 5036_1 +PRL_Young_Final 5036 1 17 2 -25 0 1 610 5036_1 +PRL_Young_Final 5036 1 18 2 -25 0 0 421 5036_1 +PRL_Young_Final 5036 1 19 2 -25 0 0 455 5036_1 +PRL_Young_Final 5036 1 20 2 -25 0 0 1076 5036_1 +PRL_Young_Final 5036 1 21 1 -25 1 1 653 5036_1 +PRL_Young_Final 5036 1 22 2 25 1 0 433 5036_1 +PRL_Young_Final 5036 1 23 2 25 0 0 406 5036_1 +PRL_Young_Final 5036 1 24 2 -25 0 0 468 5036_1 +PRL_Young_Final 5036 1 25 2 -25 0 0 422 5036_1 +PRL_Young_Final 5036 1 26 2 -25 0 0 352 5036_1 +PRL_Young_Final 5036 1 27 2 -25 0 0 265 5036_1 +PRL_Young_Final 5036 1 28 2 -25 0 0 475 5036_1 +PRL_Young_Final 5036 1 29 1 25 1 1 454 5036_1 +PRL_Young_Final 5036 1 30 1 25 0 1 310 5036_1 +PRL_Young_Final 5036 1 31 1 25 0 1 289 5036_1 +PRL_Young_Final 5036 1 32 1 25 0 1 330 5036_1 +PRL_Young_Final 5036 1 33 1 25 0 1 494 5036_1 +PRL_Young_Final 5036 1 34 1 25 0 1 305 5036_1 +PRL_Young_Final 5036 1 35 1 -25 0 1 478 5036_1 +PRL_Young_Final 5036 1 36 1 25 0 1 433 5036_1 +PRL_Young_Final 5036 1 37 1 -25 0 0 172 5036_1 +PRL_Young_Final 5036 1 38 1 25 0 0 400 5036_1 +PRL_Young_Final 5036 1 39 1 -25 0 0 402 5036_1 +PRL_Young_Final 5036 1 40 1 -25 0 0 195 5036_1 +PRL_Young_Final 5036 1 41 1 -25 0 0 333 5036_1 +PRL_Young_Final 5036 1 42 1 -25 0 0 197 5036_1 +PRL_Young_Final 5036 1 43 1 -25 0 0 281 5036_1 +PRL_Young_Final 5036 1 44 1 -25 0 0 85 5036_1 +PRL_Young_Final 5036 1 45 1 -25 0 0 160 5036_1 +PRL_Young_Final 5036 1 46 2 25 1 1 857 5036_1 +PRL_Young_Final 5036 1 47 2 25 0 1 598 5036_1 +PRL_Young_Final 5036 1 48 2 25 0 1 217 5036_1 +PRL_Young_Final 5036 1 49 2 25 0 1 93 5036_1 +PRL_Young_Final 5036 1 50 2 25 0 1 450 5036_1 +PRL_Young_Final 5036 1 51 2 25 0 0 459 5036_1 +PRL_Young_Final 5036 1 52 2 -25 0 0 514 5036_1 +PRL_Young_Final 5036 1 53 2 25 0 0 1401 5036_1 +PRL_Young_Final 5036 1 54 2 -25 0 0 503 5036_1 +PRL_Young_Final 5036 1 55 2 -25 0 0 116 5036_1 +PRL_Young_Final 5036 1 56 1 25 1 1 463 5036_1 +PRL_Young_Final 5036 1 57 1 -25 0 1 377 5036_1 +PRL_Young_Final 5036 1 58 1 25 0 1 447 5036_1 +PRL_Young_Final 5036 1 59 1 25 0 1 274 5036_1 +PRL_Young_Final 5036 1 60 1 -25 0 1 434 5036_1 +PRL_Young_Final 5036 1 61 1 25 0 1 251 5036_1 +PRL_Young_Final 5036 1 62 1 25 0 1 301 5036_1 +PRL_Young_Final 5036 1 63 1 25 0 1 319 5036_1 +PRL_Young_Final 5036 1 64 1 -25 0 0 24 5036_1 +PRL_Young_Final 5036 1 65 1 -25 0 0 219 5036_1 +PRL_Young_Final 5036 1 66 1 -25 0 0 463 5036_1 +PRL_Young_Final 5036 1 67 2 25 1 1 541 5036_1 +PRL_Young_Final 5036 1 68 2 25 0 1 243 5036_1 +PRL_Young_Final 5036 1 69 2 -25 0 1 109 5036_1 +PRL_Young_Final 5036 1 70 2 -25 0 1 415 5036_1 +PRL_Young_Final 5036 1 71 1 -25 1 0 557 5036_1 +PRL_Young_Final 5036 1 72 1 25 0 0 331 5036_1 +PRL_Young_Final 5036 1 73 1 25 0 0 495 5036_1 +PRL_Young_Final 5036 1 74 1 -25 0 0 216 5036_1 +PRL_Young_Final 5036 1 75 1 -25 0 0 356 5036_1 +PRL_Young_Final 5036 1 76 1 -25 0 0 417 5036_1 +PRL_Young_Final 5036 1 77 2 25 1 1 457 5036_1 +PRL_Young_Final 5036 1 78 2 25 0 1 490 5036_1 +PRL_Young_Final 5036 1 79 2 25 0 1 196 5036_1 +PRL_Young_Final 5036 1 80 2 25 0 1 452 5036_1 +PRL_Young_Final 5036 1 81 2 25 0 1 224 5036_1 +PRL_Young_Final 5036 1 82 2 25 0 1 583 5036_1 +PRL_Young_Final 5036 1 83 2 -25 0 1 500 5036_1 +PRL_Young_Final 5036 1 84 1 -25 1 0 1289 5036_1 +PRL_Young_Final 5036 1 85 2 25 1 1 604 5036_1 +PRL_Young_Final 5036 1 86 2 25 0 1 485 5036_1 +PRL_Young_Final 5036 1 87 2 25 0 1 513 5036_1 +PRL_Young_Final 5036 1 88 2 25 0 1 1284 5036_1 +PRL_Young_Final 5036 1 89 2 25 0 1 801 5036_1 +PRL_Young_Final 5036 1 90 2 -25 0 0 686 5036_1 +PRL_Young_Final 5036 1 91 1 25 1 1 1769 5036_1 +PRL_Young_Final 5036 1 92 1 25 0 1 301 5036_1 +PRL_Young_Final 5036 1 93 1 -25 0 1 402 5036_1 +PRL_Young_Final 5036 1 94 2 -25 1 0 1137 5036_1 +PRL_Young_Final 5036 1 95 1 25 1 1 591 5036_1 +PRL_Young_Final 5036 1 96 1 25 0 1 199 5036_1 +PRL_Young_Final 5036 1 97 1 -25 0 1 263 5036_1 +PRL_Young_Final 5036 1 98 2 25 1 0 678 5036_1 +PRL_Young_Final 5036 1 99 2 -25 0 0 434 5036_1 +PRL_Young_Final 5036 1 100 2 -25 0 0 1157 5036_1 +PRL_Young_Final 5036 1 101 2 -25 0 0 1457 5036_1 +PRL_Young_Final 5036 1 102 1 25 1 1 492 5036_1 +PRL_Young_Final 5036 1 103 1 25 0 1 1344 5036_1 +PRL_Young_Final 5036 1 104 1 25 0 1 586 5036_1 +PRL_Young_Final 5036 1 105 1 25 0 1 666 5036_1 +PRL_Young_Final 5036 1 106 1 25 0 1 710 5036_1 +PRL_Young_Final 5036 1 107 1 -25 0 1 449 5036_1 +PRL_Young_Final 5036 1 108 2 -25 1 1 1025 5036_1 +PRL_Young_Final 5036 1 109 1 -25 1 0 484 5036_1 +PRL_Young_Final 5036 1 110 1 -25 0 0 427 5036_1 +PRL_Young_Final 5036 1 111 1 -25 0 0 9 5036_1 +PRL_Young_Final 5036 1 112 2 25 1 1 225 5036_1 +PRL_Young_Final 5036 1 113 2 25 0 1 519 5036_1 +PRL_Young_Final 5036 1 114 2 25 0 1 457 5036_1 +PRL_Young_Final 5036 1 115 2 25 0 1 91 5036_1 +PRL_Young_Final 5036 1 116 2 25 0 1 268 5036_1 +PRL_Young_Final 5036 1 117 2 25 0 1 535 5036_1 +PRL_Young_Final 5036 1 118 2 -25 0 1 590 5036_1 +PRL_Young_Final 5036 1 119 1 -25 1 0 727 5036_1 +PRL_Young_Final 5036 1 120 2 25 1 1 980 5036_1 +PRL_Young_Final 5036 1 121 2 25 0 1 399 5036_1 +PRL_Young_Final 5036 1 122 2 25 0 1 386 5036_1 +PRL_Young_Final 5036 1 123 2 25 0 1 294 5036_1 +PRL_Young_Final 5036 1 124 2 25 0 1 1345 5036_1 +PRL_Young_Final 5036 1 125 2 25 0 1 555 5036_1 +PRL_Young_Final 5036 1 126 2 25 0 1 516 5036_1 +PRL_Young_Final 5036 1 127 2 -25 0 1 707 5036_1 +PRL_Young_Final 5036 1 128 2 25 0 0 496 5036_1 +PRL_Young_Final 5036 1 129 2 -25 0 0 487 5036_1 +PRL_Young_Final 5036 1 130 2 25 0 0 237 5036_1 +PRL_Young_Final 5036 1 131 2 -25 0 0 455 5036_1 +PRL_Young_Final 5036 1 132 2 -25 0 0 537 5036_1 +PRL_Young_Final 5036 1 133 2 -25 0 0 514 5036_1 +PRL_Young_Final 5036 1 134 2 -25 0 0 1835 5036_1 +PRL_Young_Final 5036 1 135 2 -25 0 0 456 5036_1 +PRL_Young_Final 5036 1 136 2 -25 0 0 534 5036_1 +PRL_Young_Final 5036 1 137 1 25 1 1 1129 5036_1 +PRL_Young_Final 5036 1 138 1 25 0 1 140 5036_1 +PRL_Young_Final 5036 1 139 1 -25 0 1 409 5036_1 +PRL_Young_Final 5036 1 140 1 25 0 1 210 5036_1 +PRL_Young_Final 5036 1 141 1 25 0 1 242 5036_1 +PRL_Young_Final 5036 1 142 1 25 0 0 57 5036_1 +PRL_Young_Final 5036 1 143 1 25 0 0 49 5036_1 +PRL_Young_Final 5036 1 144 1 -25 0 0 167 5036_1 +PRL_Young_Final 5036 1 145 1 -25 0 0 1150 5036_1 +PRL_Young_Final 5036 1 146 1 -25 0 0 272 5036_1 +PRL_Young_Final 5036 1 147 1 -25 0 0 448 5036_1 +PRL_Young_Final 5036 1 148 1 -25 0 0 112 5036_1 +PRL_Young_Final 5036 1 149 1 -25 0 0 697 5036_1 +PRL_Young_Final 5036 1 150 1 25 0 0 566 5036_1 +PRL_Young_Final 5036 1 151 1 -25 0 0 570 5036_1 +PRL_Young_Final 5036 1 152 1 -25 0 0 425 5036_1 +PRL_Young_Final 5036 1 153 1 -25 0 0 551 5036_1 +PRL_Young_Final 5036 1 154 2 25 1 1 382 5036_1 +PRL_Young_Final 5036 1 155 2 25 0 1 1614 5036_1 +PRL_Young_Final 5036 1 156 2 25 0 1 297 5036_1 +PRL_Young_Final 5036 1 157 2 -25 0 1 118 5036_1 +PRL_Young_Final 5036 1 158 2 -25 0 1 445 5036_1 +PRL_Young_Final 5036 1 159 2 25 0 1 145 5036_1 +PRL_Young_Final 5036 1 160 2 25 0 1 400 5036_1 +PRL_Young_Final 5036 1 161 2 -25 0 0 112 5036_1 +PRL_Young_Final 5036 1 162 1 25 1 1 491 5036_1 +PRL_Young_Final 5036 1 163 1 25 0 1 157 5036_1 +PRL_Young_Final 5036 1 164 1 25 0 1 433 5036_1 +PRL_Young_Final 5036 1 165 1 25 0 1 401 5036_1 +PRL_Young_Final 5036 1 166 1 -25 0 1 433 5036_1 +PRL_Young_Final 5036 1 167 2 25 1 1 484 5036_1 +PRL_Young_Final 5036 1 168 2 25 0 1 595 5036_1 +PRL_Young_Final 5036 1 169 2 25 0 1 422 5036_1 +PRL_Young_Final 5036 1 170 2 25 0 1 369 5036_1 +PRL_Young_Final 5036 1 171 2 25 0 1 411 5036_1 +PRL_Young_Final 5036 1 172 2 25 0 1 450 5036_1 +PRL_Young_Final 5036 1 173 2 25 0 1 161 5036_1 +PRL_Young_Final 5036 1 174 2 -25 0 1 1909 5036_1 +PRL_Young_Final 5036 1 175 1 25 1 1 1234 5036_1 +PRL_Young_Final 5036 1 176 1 25 0 1 477 5036_1 +PRL_Young_Final 5036 1 177 1 -25 0 1 406 5036_1 +PRL_Young_Final 5036 1 178 2 -25 1 0 495 5036_1 +PRL_Young_Final 5036 1 179 2 -25 0 0 475 5036_1 +PRL_Young_Final 5036 1 180 2 -25 0 0 776 5036_1 +PRL_Young_Final 5036 1 181 2 25 0 0 410 5036_1 +PRL_Young_Final 5036 1 182 2 -25 0 0 626 5036_1 +PRL_Young_Final 5036 1 183 2 25 0 0 2067 5036_1 +PRL_Young_Final 5036 1 184 2 -25 0 0 160 5036_1 +PRL_Young_Final 5036 1 185 2 -25 0 0 633 5036_1 +PRL_Young_Final 5036 1 186 2 -25 0 0 1419 5036_1 +PRL_Young_Final 5036 1 187 1 25 1 1 1555 5036_1 +PRL_Young_Final 5036 1 188 1 25 0 1 410 5036_1 +PRL_Young_Final 5036 1 189 1 25 0 1 542 5036_1 +PRL_Young_Final 5036 1 190 1 25 0 1 441 5036_1 +PRL_Young_Final 5036 1 191 1 25 0 1 189 5036_1 +PRL_Young_Final 5036 1 192 1 -25 0 0 395 5036_1 +PRL_Young_Final 5036 1 193 1 -25 0 0 130 5036_1 +PRL_Young_Final 5036 1 194 2 -25 1 1 67 5036_1 +PRL_Young_Final 5036 1 195 2 -25 0 1 55 5036_1 +PRL_Young_Final 5036 1 196 2 25 0 1 1132 5036_1 +PRL_Young_Final 5036 1 197 2 25 0 1 2044 5036_1 +PRL_Young_Final 5036 1 198 2 25 0 1 481 5036_1 +PRL_Young_Final 5036 1 199 2 -25 0 0 482 5036_1 +PRL_Young_Final 5036 1 200 2 25 0 0 950 5036_1 +PRL_Young_Final 5036 2 1 1 25 1 1 3389 5036_2 +PRL_Young_Final 5036 2 2 1 25 0 1 450 5036_2 +PRL_Young_Final 5036 2 3 1 25 0 1 452 5036_2 +PRL_Young_Final 5036 2 4 1 -25 0 1 400 5036_2 +PRL_Young_Final 5036 2 5 1 25 0 1 391 5036_2 +PRL_Young_Final 5036 2 6 1 25 0 1 2144 5036_2 +PRL_Young_Final 5036 2 7 1 25 0 1 429 5036_2 +PRL_Young_Final 5036 2 8 1 25 0 1 502 5036_2 +PRL_Young_Final 5036 2 9 1 25 0 0 372 5036_2 +PRL_Young_Final 5036 2 10 1 -25 0 0 107 5036_2 +PRL_Young_Final 5036 2 11 1 -25 0 0 486 5036_2 +PRL_Young_Final 5036 2 12 2 25 1 1 674 5036_2 +PRL_Young_Final 5036 2 13 2 25 0 1 478 5036_2 +PRL_Young_Final 5036 2 14 2 25 0 1 506 5036_2 +PRL_Young_Final 5036 2 15 2 -25 0 1 505 5036_2 +PRL_Young_Final 5036 2 16 2 25 0 1 285 5036_2 +PRL_Young_Final 5036 2 17 2 -25 0 0 485 5036_2 +PRL_Young_Final 5036 2 18 2 -25 0 0 497 5036_2 +PRL_Young_Final 5036 2 19 1 25 1 1 392 5036_2 +PRL_Young_Final 5036 2 20 1 -25 0 1 546 5036_2 +PRL_Young_Final 5036 2 21 1 25 0 1 107 5036_2 +PRL_Young_Final 5036 2 22 1 25 0 1 1539 5036_2 +PRL_Young_Final 5036 2 23 1 25 0 1 485 5036_2 +PRL_Young_Final 5036 2 24 1 25 0 1 490 5036_2 +PRL_Young_Final 5036 2 25 1 25 0 1 272 5036_2 +PRL_Young_Final 5036 2 26 1 -25 0 1 321 5036_2 +PRL_Young_Final 5036 2 27 2 -25 1 1 1991 5036_2 +PRL_Young_Final 5036 2 28 1 -25 1 0 1638 5036_2 +PRL_Young_Final 5036 2 29 2 25 1 1 968 5036_2 +PRL_Young_Final 5036 2 30 2 25 0 1 514 5036_2 +PRL_Young_Final 5036 2 31 2 25 0 1 508 5036_2 +PRL_Young_Final 5036 2 32 2 25 0 1 308 5036_2 +PRL_Young_Final 5036 2 33 2 25 0 1 311 5036_2 +PRL_Young_Final 5036 2 34 2 25 0 1 240 5036_2 +PRL_Young_Final 5036 2 35 2 -25 0 0 499 5036_2 +PRL_Young_Final 5036 2 36 2 25 0 0 880 5036_2 +PRL_Young_Final 5036 2 37 2 -25 0 0 42 5036_2 +PRL_Young_Final 5036 2 38 1 -25 1 1 1118 5036_2 +PRL_Young_Final 5036 2 39 1 25 0 1 2073 5036_2 +PRL_Young_Final 5036 2 40 1 25 0 1 1534 5036_2 +PRL_Young_Final 5036 2 41 1 25 0 1 1537 5036_2 +PRL_Young_Final 5036 2 42 1 25 0 1 1447 5036_2 +PRL_Young_Final 5036 2 43 1 25 0 1 392 5036_2 +PRL_Young_Final 5036 2 44 1 25 0 1 447 5036_2 +PRL_Young_Final 5036 2 45 1 25 0 1 1778 5036_2 +PRL_Young_Final 5036 2 46 1 -25 0 0 1085 5036_2 +PRL_Young_Final 5036 2 47 1 -25 0 0 1708 5036_2 +PRL_Young_Final 5036 2 48 2 -25 1 1 469 5036_2 +PRL_Young_Final 5036 2 49 2 25 0 1 1336 5036_2 +PRL_Young_Final 5036 2 50 2 25 0 1 723 5036_2 +PRL_Young_Final 5036 2 51 2 -25 0 1 507 5036_2 +PRL_Young_Final 5036 2 52 2 25 0 1 261 5036_2 +PRL_Young_Final 5036 2 53 2 25 0 1 506 5036_2 +PRL_Young_Final 5036 2 54 2 25 0 1 437 5036_2 +PRL_Young_Final 5036 2 55 2 -25 0 0 178 5036_2 +PRL_Young_Final 5036 2 56 2 -25 0 0 1540 5036_2 +PRL_Young_Final 5036 2 57 1 25 1 1 458 5036_2 +PRL_Young_Final 5036 2 58 1 25 0 1 445 5036_2 +PRL_Young_Final 5036 2 59 1 -25 0 1 506 5036_2 +PRL_Young_Final 5036 2 60 1 -25 0 1 300 5036_2 +PRL_Young_Final 5036 2 61 2 -25 1 0 1634 5036_2 +PRL_Young_Final 5036 2 62 2 -25 0 0 268 5036_2 +PRL_Young_Final 5036 2 63 2 25 0 0 408 5036_2 +PRL_Young_Final 5036 2 64 2 -25 0 0 525 5036_2 +PRL_Young_Final 5036 2 65 2 25 0 0 88 5036_2 +PRL_Young_Final 5036 2 66 2 -25 0 0 1491 5036_2 +PRL_Young_Final 5036 2 67 2 -25 0 0 815 5036_2 +PRL_Young_Final 5036 2 68 1 25 1 1 829 5036_2 +PRL_Young_Final 5036 2 69 1 25 0 1 459 5036_2 +PRL_Young_Final 5036 2 70 1 25 0 1 808 5036_2 +PRL_Young_Final 5036 2 71 1 25 0 1 798 5036_2 +PRL_Young_Final 5036 2 72 1 25 0 1 541 5036_2 +PRL_Young_Final 5036 2 73 1 25 0 1 710 5036_2 +PRL_Young_Final 5036 2 74 1 -25 0 1 629 5036_2 +PRL_Young_Final 5036 2 75 1 -25 0 0 547 5036_2 +PRL_Young_Final 5036 2 76 2 25 1 1 2264 5036_2 +PRL_Young_Final 5036 2 77 2 25 0 1 443 5036_2 +PRL_Young_Final 5036 2 78 2 25 0 1 569 5036_2 +PRL_Young_Final 5036 2 79 2 25 0 1 371 5036_2 +PRL_Young_Final 5036 2 80 2 25 0 1 495 5036_2 +PRL_Young_Final 5036 2 81 2 25 0 1 464 5036_2 +PRL_Young_Final 5036 2 82 2 25 0 1 24 5036_2 +PRL_Young_Final 5036 2 83 2 -25 0 1 517 5036_2 +PRL_Young_Final 5036 2 84 2 -25 0 0 562 5036_2 +PRL_Young_Final 5036 2 85 1 25 1 1 1933 5036_2 +PRL_Young_Final 5036 2 86 1 25 0 1 485 5036_2 +PRL_Young_Final 5036 2 87 1 -25 0 1 79 5036_2 +PRL_Young_Final 5036 2 88 1 25 0 1 874 5036_2 +PRL_Young_Final 5036 2 89 1 25 0 1 125 5036_2 +PRL_Young_Final 5036 2 90 1 25 0 1 602 5036_2 +PRL_Young_Final 5036 2 91 1 25 0 1 622 5036_2 +PRL_Young_Final 5036 2 92 1 25 0 1 425 5036_2 +PRL_Young_Final 5036 2 93 1 -25 0 0 512 5036_2 +PRL_Young_Final 5036 2 94 1 -25 0 0 318 5036_2 +PRL_Young_Final 5036 2 95 2 -25 1 1 654 5036_2 +PRL_Young_Final 5036 2 96 2 -25 0 1 83 5036_2 +PRL_Young_Final 5036 2 97 2 25 0 1 195 5036_2 +PRL_Young_Final 5036 2 98 2 25 0 1 301 5036_2 +PRL_Young_Final 5036 2 99 2 25 0 1 201 5036_2 +PRL_Young_Final 5036 2 100 2 25 0 0 498 5036_2 +PRL_Young_Final 5036 2 101 2 25 0 0 467 5036_2 +PRL_Young_Final 5036 2 102 2 -25 0 0 521 5036_2 +PRL_Young_Final 5036 2 103 2 -25 0 0 529 5036_2 +PRL_Young_Final 5036 2 104 2 -25 0 0 252 5036_2 +PRL_Young_Final 5036 2 105 1 25 1 1 424 5036_2 +PRL_Young_Final 5036 2 106 1 25 0 1 448 5036_2 +PRL_Young_Final 5036 2 107 1 25 0 1 403 5036_2 +PRL_Young_Final 5036 2 108 1 -25 0 1 130 5036_2 +PRL_Young_Final 5036 2 109 1 25 0 1 200 5036_2 +PRL_Young_Final 5036 2 110 1 -25 0 0 308 5036_2 +PRL_Young_Final 5036 2 111 1 -25 0 0 566 5036_2 +PRL_Young_Final 5036 2 112 2 25 1 1 196 5036_2 +PRL_Young_Final 5036 2 113 2 25 0 1 387 5036_2 +PRL_Young_Final 5036 2 114 2 25 0 1 1008 5036_2 +PRL_Young_Final 5036 2 115 2 25 0 1 1355 5036_2 +PRL_Young_Final 5036 2 116 2 25 0 1 153 5036_2 +PRL_Young_Final 5036 2 117 2 25 0 1 319 5036_2 +PRL_Young_Final 5036 2 118 2 -25 0 1 453 5036_2 +PRL_Young_Final 5036 2 119 2 25 0 1 228 5036_2 +PRL_Young_Final 5036 2 120 2 -25 0 0 1982 5036_2 +PRL_Young_Final 5036 2 121 2 25 0 0 247 5036_2 +PRL_Young_Final 5036 2 122 2 -25 0 0 1437 5036_2 +PRL_Young_Final 5036 2 123 2 -25 0 0 287 5036_2 +PRL_Young_Final 5036 2 124 2 -25 0 0 898 5036_2 +PRL_Young_Final 5036 2 125 2 -25 0 0 451 5036_2 +PRL_Young_Final 5036 2 126 1 25 1 1 416 5036_2 +PRL_Young_Final 5036 2 127 1 -25 0 1 1363 5036_2 +PRL_Young_Final 5036 2 128 1 25 0 1 383 5036_2 +PRL_Young_Final 5036 2 129 1 25 0 1 508 5036_2 +PRL_Young_Final 5036 2 130 1 25 0 1 562 5036_2 +PRL_Young_Final 5036 2 131 1 25 0 1 546 5036_2 +PRL_Young_Final 5036 2 132 1 -25 0 0 162 5036_2 +PRL_Young_Final 5036 2 133 1 -25 0 0 88 5036_2 +PRL_Young_Final 5036 2 134 2 25 1 1 560 5036_2 +PRL_Young_Final 5036 2 135 2 -25 0 1 730 5036_2 +PRL_Young_Final 5036 2 136 2 -25 0 1 163 5036_2 +PRL_Young_Final 5036 2 137 1 -25 1 0 1807 5036_2 +PRL_Young_Final 5036 2 138 1 25 0 0 506 5036_2 +PRL_Young_Final 5036 2 139 1 -25 0 0 521 5036_2 +PRL_Young_Final 5036 2 140 1 25 0 0 350 5036_2 +PRL_Young_Final 5036 2 141 1 -25 0 0 73 5036_2 +PRL_Young_Final 5036 2 142 2 25 1 1 600 5036_2 +PRL_Young_Final 5036 2 143 2 25 0 1 441 5036_2 +PRL_Young_Final 5036 2 144 2 25 0 1 131 5036_2 +PRL_Young_Final 5036 2 145 2 25 0 1 360 5036_2 +PRL_Young_Final 5036 2 146 2 25 0 1 553 5036_2 +PRL_Young_Final 5036 2 147 2 25 0 1 36 5036_2 +PRL_Young_Final 5036 2 148 2 -25 0 1 460 5036_2 +PRL_Young_Final 5036 2 149 1 -25 1 0 1844 5036_2 +PRL_Young_Final 5036 2 150 2 25 1 1 761 5036_2 +PRL_Young_Final 5036 2 151 2 25 0 1 334 5036_2 +PRL_Young_Final 5036 2 152 2 25 0 1 407 5036_2 +PRL_Young_Final 5036 2 153 2 25 0 1 255 5036_2 +PRL_Young_Final 5036 2 154 2 25 0 1 1566 5036_2 +PRL_Young_Final 5036 2 155 2 25 0 1 656 5036_2 +PRL_Young_Final 5036 2 156 2 25 0 1 514 5036_2 +PRL_Young_Final 5036 2 157 2 -25 0 1 111 5036_2 +PRL_Young_Final 5036 2 158 1 25 1 1 1447 5036_2 +PRL_Young_Final 5036 2 159 1 25 0 1 494 5036_2 +PRL_Young_Final 5036 2 160 1 -25 0 1 629 5036_2 +PRL_Young_Final 5036 2 161 1 25 0 1 973 5036_2 +PRL_Young_Final 5036 2 162 1 25 0 1 183 5036_2 +PRL_Young_Final 5036 2 163 1 -25 0 0 75 5036_2 +PRL_Young_Final 5036 2 164 2 25 1 1 1017 5036_2 +PRL_Young_Final 5036 2 165 2 25 0 1 513 5036_2 +PRL_Young_Final 5036 2 166 2 25 0 1 1553 5036_2 +PRL_Young_Final 5036 2 167 2 -25 0 1 920 5036_2 +PRL_Young_Final 5036 2 168 2 -25 0 1 509 5036_2 +PRL_Young_Final 5036 2 169 1 -25 1 0 1115 5036_2 +PRL_Young_Final 5036 2 170 1 -25 0 0 409 5036_2 +PRL_Young_Final 5036 2 171 2 25 1 1 737 5036_2 +PRL_Young_Final 5036 2 172 2 25 0 1 19 5036_2 +PRL_Young_Final 5036 2 173 2 25 0 1 556 5036_2 +PRL_Young_Final 5036 2 174 2 25 0 1 461 5036_2 +PRL_Young_Final 5036 2 175 2 25 0 1 740 5036_2 +PRL_Young_Final 5036 2 176 2 25 0 1 483 5036_2 +PRL_Young_Final 5036 2 177 2 -25 0 1 488 5036_2 +PRL_Young_Final 5036 2 178 2 25 0 1 143 5036_2 +PRL_Young_Final 5036 2 179 2 -25 0 0 701 5036_2 +PRL_Young_Final 5036 2 180 1 25 1 1 1436 5036_2 +PRL_Young_Final 5036 2 181 1 25 0 1 471 5036_2 +PRL_Young_Final 5036 2 182 1 25 0 1 213 5036_2 +PRL_Young_Final 5036 2 183 1 25 0 1 377 5036_2 +PRL_Young_Final 5036 2 184 1 25 0 1 490 5036_2 +PRL_Young_Final 5036 2 185 1 25 0 1 631 5036_2 +PRL_Young_Final 5036 2 186 1 25 0 0 245 5036_2 +PRL_Young_Final 5036 2 187 1 25 0 0 995 5036_2 +PRL_Young_Final 5036 2 188 1 -25 0 0 974 5036_2 +PRL_Young_Final 5036 2 189 2 -25 1 1 1840 5036_2 +PRL_Young_Final 5036 2 190 1 -25 1 0 1510 5036_2 +PRL_Young_Final 5036 2 191 1 -25 0 0 412 5036_2 +PRL_Young_Final 5036 2 192 2 25 1 1 377 5036_2 +PRL_Young_Final 5036 2 193 2 25 0 1 160 5036_2 +PRL_Young_Final 5036 2 194 2 -25 0 1 410 5036_2 +PRL_Young_Final 5036 2 195 2 25 0 1 82 5036_2 +PRL_Young_Final 5036 2 196 2 25 0 1 509 5036_2 +PRL_Young_Final 5036 2 197 2 25 0 1 430 5036_2 +PRL_Young_Final 5036 2 198 2 -25 0 0 414 5036_2 +PRL_Young_Final 5036 2 199 1 25 1 1 282 5036_2 +PRL_Young_Final 5036 2 200 1 25 0 1 400 5036_2 +PRL_Young_Final 5036 3 1 1 -25 0 1 2267 5036_3 +PRL_Young_Final 5036 3 2 1 -25 0 1 628 5036_3 +PRL_Young_Final 5036 3 3 2 -25 1 0 1419 5036_3 +PRL_Young_Final 5036 3 4 2 -25 0 0 940 5036_3 +PRL_Young_Final 5036 3 5 2 25 0 0 556 5036_3 +PRL_Young_Final 5036 3 6 2 -25 0 0 378 5036_3 +PRL_Young_Final 5036 3 7 2 -25 0 0 304 5036_3 +PRL_Young_Final 5036 3 8 2 -25 0 0 819 5036_3 +PRL_Young_Final 5036 3 9 1 25 1 1 770 5036_3 +PRL_Young_Final 5036 3 10 1 25 0 1 1243 5036_3 +PRL_Young_Final 5036 3 11 1 25 0 1 587 5036_3 +PRL_Young_Final 5036 3 12 1 25 0 1 109 5036_3 +PRL_Young_Final 5036 3 13 1 25 0 1 710 5036_3 +PRL_Young_Final 5036 3 14 1 -25 0 0 446 5036_3 +PRL_Young_Final 5036 3 15 1 -25 0 0 174 5036_3 +PRL_Young_Final 5036 3 16 1 -25 0 0 946 5036_3 +PRL_Young_Final 5036 3 17 2 25 1 1 453 5036_3 +PRL_Young_Final 5036 3 18 2 -25 0 1 496 5036_3 +PRL_Young_Final 5036 3 19 2 25 0 1 447 5036_3 +PRL_Young_Final 5036 3 20 2 25 0 1 464 5036_3 +PRL_Young_Final 5036 3 21 2 25 0 1 263 5036_3 +PRL_Young_Final 5036 3 22 2 25 0 1 321 5036_3 +PRL_Young_Final 5036 3 23 2 25 0 1 326 5036_3 +PRL_Young_Final 5036 3 24 2 -25 0 0 421 5036_3 +PRL_Young_Final 5036 3 25 1 25 1 1 801 5036_3 +PRL_Young_Final 5036 3 26 1 25 0 1 452 5036_3 +PRL_Young_Final 5036 3 27 1 -25 0 1 510 5036_3 +PRL_Young_Final 5036 3 28 1 25 0 1 490 5036_3 +PRL_Young_Final 5036 3 29 1 25 0 1 464 5036_3 +PRL_Young_Final 5036 3 30 1 25 0 0 476 5036_3 +PRL_Young_Final 5036 3 31 1 -25 0 0 610 5036_3 +PRL_Young_Final 5036 3 32 2 -25 1 1 877 5036_3 +PRL_Young_Final 5036 3 33 2 25 0 1 769 5036_3 +PRL_Young_Final 5036 3 34 2 25 0 1 1131 5036_3 +PRL_Young_Final 5036 3 35 2 25 0 1 512 5036_3 +PRL_Young_Final 5036 3 36 2 25 0 1 465 5036_3 +PRL_Young_Final 5036 3 37 2 25 0 1 486 5036_3 +PRL_Young_Final 5036 3 38 2 -25 0 1 669 5036_3 +PRL_Young_Final 5036 3 39 2 -25 0 1 949 5036_3 +PRL_Young_Final 5036 3 40 1 25 1 1 830 5036_3 +PRL_Young_Final 5036 3 41 1 25 0 1 490 5036_3 +PRL_Young_Final 5036 3 42 1 25 0 1 229 5036_3 +PRL_Young_Final 5036 3 43 1 25 0 1 331 5036_3 +PRL_Young_Final 5036 3 44 1 25 0 1 462 5036_3 +PRL_Young_Final 5036 3 45 1 25 0 1 272 5036_3 +PRL_Young_Final 5036 3 46 1 25 0 0 1480 5036_3 +PRL_Young_Final 5036 3 47 1 -25 0 0 562 5036_3 +PRL_Young_Final 5036 3 48 1 -25 0 0 908 5036_3 +PRL_Young_Final 5036 3 49 2 -25 1 1 467 5036_3 +PRL_Young_Final 5036 3 50 2 25 0 1 392 5036_3 +PRL_Young_Final 5036 3 51 2 25 0 1 457 5036_3 +PRL_Young_Final 5036 3 52 2 25 0 1 667 5036_3 +PRL_Young_Final 5036 3 53 2 25 0 1 576 5036_3 +PRL_Young_Final 5036 3 54 2 -25 0 0 196 5036_3 +PRL_Young_Final 5036 3 55 2 -25 0 0 213 5036_3 +PRL_Young_Final 5036 3 56 1 25 1 1 867 5036_3 +PRL_Young_Final 5036 3 57 1 25 0 1 183 5036_3 +PRL_Young_Final 5036 3 58 1 25 0 1 403 5036_3 +PRL_Young_Final 5036 3 59 1 -25 0 1 460 5036_3 +PRL_Young_Final 5036 3 60 1 25 0 1 747 5036_3 +PRL_Young_Final 5036 3 61 1 25 0 1 758 5036_3 +PRL_Young_Final 5036 3 62 1 -25 0 0 62 5036_3 +PRL_Young_Final 5036 3 63 1 -25 0 0 715 5036_3 +PRL_Young_Final 5036 3 64 1 25 0 0 231 5036_3 +PRL_Young_Final 5036 3 65 1 25 0 0 458 5036_3 +PRL_Young_Final 5036 3 66 1 -25 0 0 487 5036_3 +PRL_Young_Final 5036 3 67 1 -25 0 0 59 5036_3 +PRL_Young_Final 5036 3 68 2 -25 1 1 52 5036_3 +PRL_Young_Final 5036 3 69 2 25 0 1 789 5036_3 +PRL_Young_Final 5036 3 70 2 25 0 1 432 5036_3 +PRL_Young_Final 5036 3 71 2 25 0 1 759 5036_3 +PRL_Young_Final 5036 3 72 2 25 0 1 3790 5036_3 +PRL_Young_Final 5036 3 73 2 -25 0 0 638 5036_3 +PRL_Young_Final 5036 3 74 2 -25 0 0 1516 5036_3 +PRL_Young_Final 5036 3 75 1 25 1 1 759 5036_3 +PRL_Young_Final 5036 3 76 1 -25 0 1 455 5036_3 +PRL_Young_Final 5036 3 77 1 -25 0 1 582 5036_3 +PRL_Young_Final 5036 3 78 1 25 0 1 133 5036_3 +PRL_Young_Final 5036 3 79 1 25 0 1 456 5036_3 +PRL_Young_Final 5036 3 80 1 25 0 1 619 5036_3 +PRL_Young_Final 5036 3 81 1 25 0 1 513 5036_3 +PRL_Young_Final 5036 3 82 1 25 0 1 985 5036_3 +PRL_Young_Final 5036 3 83 1 -25 0 0 134 5036_3 +PRL_Young_Final 5036 3 84 1 -25 0 0 569 5036_3 +PRL_Young_Final 5036 3 85 2 25 1 1 473 5036_3 +PRL_Young_Final 5036 3 86 2 -25 0 1 891 5036_3 +PRL_Young_Final 5036 3 87 2 25 0 1 487 5036_3 +PRL_Young_Final 5036 3 88 2 25 0 1 333 5036_3 +PRL_Young_Final 5036 3 89 2 25 0 1 424 5036_3 +PRL_Young_Final 5036 3 90 2 25 0 1 474 5036_3 +PRL_Young_Final 5036 3 91 2 25 0 1 407 5036_3 +PRL_Young_Final 5036 3 92 2 25 0 1 236 5036_3 +PRL_Young_Final 5036 3 93 2 25 0 0 57 5036_3 +PRL_Young_Final 5036 3 94 2 -25 0 0 785 5036_3 +PRL_Young_Final 5036 3 95 1 25 1 1 933 5036_3 +PRL_Young_Final 5036 3 96 1 -25 0 1 522 5036_3 +PRL_Young_Final 5036 3 97 1 25 0 1 243 5036_3 +PRL_Young_Final 5036 3 98 1 25 0 1 58 5036_3 +PRL_Young_Final 5036 3 99 1 -25 0 1 498 5036_3 +PRL_Young_Final 5036 3 100 2 -25 1 0 587 5036_3 +PRL_Young_Final 5036 3 101 2 -25 0 0 89 5036_3 +PRL_Young_Final 5036 3 102 2 -25 0 0 201 5036_3 +PRL_Young_Final 5036 3 103 1 25 1 1 446 5036_3 +PRL_Young_Final 5036 3 104 1 25 0 1 540 5036_3 +PRL_Young_Final 5036 3 105 1 25 0 1 507 5036_3 +PRL_Young_Final 5036 3 106 1 25 0 1 432 5036_3 +PRL_Young_Final 5036 3 107 1 25 0 1 525 5036_3 +PRL_Young_Final 5036 3 108 1 -25 0 1 47 5036_3 +PRL_Young_Final 5036 3 109 1 -25 0 1 210 5036_3 +PRL_Young_Final 5036 3 110 2 25 1 1 661 5036_3 +PRL_Young_Final 5036 3 111 2 25 0 1 225 5036_3 +PRL_Young_Final 5036 3 112 2 25 0 1 443 5036_3 +PRL_Young_Final 5036 3 113 2 25 0 1 289 5036_3 +PRL_Young_Final 5036 3 114 2 25 0 1 596 5036_3 +PRL_Young_Final 5036 3 115 2 -25 0 0 505 5036_3 +PRL_Young_Final 5036 3 116 2 -25 0 0 649 5036_3 +PRL_Young_Final 5036 3 117 1 25 1 1 1022 5036_3 +PRL_Young_Final 5036 3 118 1 -25 0 1 1342 5036_3 +PRL_Young_Final 5036 3 119 1 25 0 1 354 5036_3 +PRL_Young_Final 5036 3 120 1 25 0 1 1119 5036_3 +PRL_Young_Final 5036 3 121 1 25 0 1 911 5036_3 +PRL_Young_Final 5036 3 122 1 25 0 1 2367 5036_3 +PRL_Young_Final 5036 3 123 1 25 0 1 834 5036_3 +PRL_Young_Final 5036 3 124 1 25 0 1 1194 5036_3 +PRL_Young_Final 5036 3 125 1 -25 0 0 2371 5036_3 +PRL_Young_Final 5036 3 126 1 25 0 0 2397 5036_3 +PRL_Young_Final 5036 3 127 1 -25 0 0 313 5036_3 +PRL_Young_Final 5036 3 128 1 25 0 0 547 5036_3 +PRL_Young_Final 5036 3 129 1 -25 0 0 1113 5036_3 +PRL_Young_Final 5036 3 130 1 -25 0 0 283 5036_3 +PRL_Young_Final 5036 3 131 2 25 1 1 73 5036_3 +PRL_Young_Final 5036 3 132 2 -25 0 1 2660 5036_3 +PRL_Young_Final 5036 3 133 2 25 0 1 426 5036_3 +PRL_Young_Final 5036 3 134 2 25 0 1 950 5036_3 +PRL_Young_Final 5036 3 135 2 -25 0 1 744 5036_3 +PRL_Young_Final 5036 3 136 2 25 0 1 637 5036_3 +PRL_Young_Final 5036 3 137 2 -25 0 0 1133 5036_3 +PRL_Young_Final 5036 3 138 1 25 1 1 1031 5036_3 +PRL_Young_Final 5036 3 139 1 25 0 1 540 5036_3 +PRL_Young_Final 5036 3 140 1 25 0 1 447 5036_3 +PRL_Young_Final 5036 3 141 1 25 0 1 594 5036_3 +PRL_Young_Final 5036 3 142 1 -25 0 1 400 5036_3 +PRL_Young_Final 5036 3 143 1 -25 0 1 509 5036_3 +PRL_Young_Final 5036 3 144 2 -25 1 0 1377 5036_3 +PRL_Young_Final 5036 3 145 2 -25 0 0 503 5036_3 +PRL_Young_Final 5036 3 146 1 25 1 1 861 5036_3 +PRL_Young_Final 5036 3 147 1 25 0 1 457 5036_3 +PRL_Young_Final 5036 3 148 1 25 0 1 486 5036_3 +PRL_Young_Final 5036 3 149 1 25 0 1 470 5036_3 +PRL_Young_Final 5036 3 150 1 25 0 1 438 5036_3 +PRL_Young_Final 5036 3 151 1 25 0 1 977 5036_3 +PRL_Young_Final 5036 3 152 1 -25 0 1 762 5036_3 +PRL_Young_Final 5036 3 153 1 25 0 1 1138 5036_3 +PRL_Young_Final 5036 3 154 1 -25 0 0 578 5036_3 +PRL_Young_Final 5036 3 155 2 25 1 1 754 5036_3 +PRL_Young_Final 5036 3 156 2 25 0 1 482 5036_3 +PRL_Young_Final 5036 3 157 2 25 0 1 447 5036_3 +PRL_Young_Final 5036 3 158 2 25 0 1 952 5036_3 +PRL_Young_Final 5036 3 159 2 25 0 1 1078 5036_3 +PRL_Young_Final 5036 3 160 2 25 0 1 934 5036_3 +PRL_Young_Final 5036 3 161 2 -25 0 1 481 5036_3 +PRL_Young_Final 5036 3 162 2 25 0 0 563 5036_3 +PRL_Young_Final 5036 3 163 2 25 0 0 699 5036_3 +PRL_Young_Final 5036 3 164 2 -25 0 0 509 5036_3 +PRL_Young_Final 5036 3 165 1 25 1 1 1000 5036_3 +PRL_Young_Final 5036 3 166 1 25 0 1 488 5036_3 +PRL_Young_Final 5036 3 167 1 -25 0 1 1019 5036_3 +PRL_Young_Final 5036 3 168 1 25 0 1 613 5036_3 +PRL_Young_Final 5036 3 169 1 25 0 1 470 5036_3 +PRL_Young_Final 5036 3 170 1 25 0 1 436 5036_3 +PRL_Young_Final 5036 3 171 1 -25 0 0 269 5036_3 +PRL_Young_Final 5036 3 172 2 25 1 1 1473 5036_3 +PRL_Young_Final 5036 3 173 2 25 0 1 458 5036_3 +PRL_Young_Final 5036 3 174 2 -25 0 1 433 5036_3 +PRL_Young_Final 5036 3 175 2 -25 0 1 556 5036_3 +PRL_Young_Final 5036 3 176 1 -25 1 0 532 5036_3 +PRL_Young_Final 5036 3 177 1 -25 0 0 395 5036_3 +PRL_Young_Final 5036 3 178 1 -25 0 0 393 5036_3 +PRL_Young_Final 5036 3 179 2 25 1 1 421 5036_3 +PRL_Young_Final 5036 3 180 2 25 0 1 342 5036_3 +PRL_Young_Final 5036 3 181 2 25 0 1 436 5036_3 +PRL_Young_Final 5036 3 182 2 25 0 1 126 5036_3 +PRL_Young_Final 5036 3 183 2 25 0 1 533 5036_3 +PRL_Young_Final 5036 3 184 2 -25 0 0 474 5036_3 +PRL_Young_Final 5036 3 185 2 25 0 0 524 5036_3 +PRL_Young_Final 5036 3 186 2 -25 0 0 805 5036_3 +PRL_Young_Final 5036 3 187 1 25 1 1 1009 5036_3 +PRL_Young_Final 5036 3 188 1 -25 0 1 490 5036_3 +PRL_Young_Final 5036 3 189 1 25 0 1 795 5036_3 +PRL_Young_Final 5036 3 190 1 25 0 1 487 5036_3 +PRL_Young_Final 5036 3 191 1 25 0 1 946 5036_3 +PRL_Young_Final 5036 3 192 1 25 0 1 1127 5036_3 +PRL_Young_Final 5036 3 193 1 -25 0 0 677 5036_3 +PRL_Young_Final 5036 3 194 1 -25 0 0 782 5036_3 +PRL_Young_Final 5036 3 195 2 25 1 1 521 5036_3 +PRL_Young_Final 5036 3 196 2 25 0 1 480 5036_3 +PRL_Young_Final 5036 3 197 2 25 0 1 450 5036_3 +PRL_Young_Final 5036 3 198 2 -25 0 1 429 5036_3 +PRL_Young_Final 5036 3 199 2 25 0 1 585 5036_3 +PRL_Young_Final 5036 3 200 2 -25 0 0 102 5036_3 +PRL_Young_Final 5035 1 1 1 25 0 0 753 5035_1 +PRL_Young_Final 5035 1 2 1 -25 0 0 321 5035_1 +PRL_Young_Final 5035 1 3 1 -25 0 0 283 5035_1 +PRL_Young_Final 5035 1 4 2 25 1 1 300 5035_1 +PRL_Young_Final 5035 1 5 2 25 0 1 337 5035_1 +PRL_Young_Final 5035 1 6 2 -25 0 1 285 5035_1 +PRL_Young_Final 5035 1 7 2 25 0 1 363 5035_1 +PRL_Young_Final 5035 1 8 2 25 0 1 281 5035_1 +PRL_Young_Final 5035 1 9 2 -25 0 0 287 5035_1 +PRL_Young_Final 5035 1 10 1 -25 1 1 310 5035_1 +PRL_Young_Final 5035 1 11 2 -25 1 0 906 5035_1 +PRL_Young_Final 5035 1 12 2 -25 0 0 584 5035_1 +PRL_Young_Final 5035 1 13 1 25 1 1 239 5035_1 +PRL_Young_Final 5035 1 14 1 25 0 1 273 5035_1 +PRL_Young_Final 5035 1 15 1 25 0 1 698 5035_1 +PRL_Young_Final 5035 1 16 1 25 0 1 365 5035_1 +PRL_Young_Final 5035 1 17 1 25 0 1 295 5035_1 +PRL_Young_Final 5035 1 18 1 -25 0 0 305 5035_1 +PRL_Young_Final 5035 1 19 1 25 0 0 284 5035_1 +PRL_Young_Final 5035 1 20 1 25 0 0 278 5035_1 +PRL_Young_Final 5035 1 21 1 -25 0 0 276 5035_1 +PRL_Young_Final 5035 1 22 1 -25 0 0 239 5035_1 +PRL_Young_Final 5035 1 23 2 -25 1 1 342 5035_1 +PRL_Young_Final 5035 1 24 2 -25 0 1 536 5035_1 +PRL_Young_Final 5035 1 25 1 -25 1 0 464 5035_1 +PRL_Young_Final 5035 1 26 1 -25 0 0 277 5035_1 +PRL_Young_Final 5035 1 27 1 -25 0 0 412 5035_1 +PRL_Young_Final 5035 1 28 2 25 1 1 371 5035_1 +PRL_Young_Final 5035 1 29 2 25 0 1 311 5035_1 +PRL_Young_Final 5035 1 30 2 25 0 1 303 5035_1 +PRL_Young_Final 5035 1 31 2 25 0 1 410 5035_1 +PRL_Young_Final 5035 1 32 2 25 0 1 293 5035_1 +PRL_Young_Final 5035 1 33 2 25 0 1 706 5035_1 +PRL_Young_Final 5035 1 34 2 -25 0 1 484 5035_1 +PRL_Young_Final 5035 1 35 2 25 0 1 349 5035_1 +PRL_Young_Final 5035 1 36 2 -25 0 0 482 5035_1 +PRL_Young_Final 5035 1 37 2 25 0 0 649 5035_1 +PRL_Young_Final 5035 1 38 2 -25 0 0 543 5035_1 +PRL_Young_Final 5035 1 39 2 -25 0 0 419 5035_1 +PRL_Young_Final 5035 1 40 1 25 1 1 337 5035_1 +PRL_Young_Final 5035 1 41 1 25 0 1 331 5035_1 +PRL_Young_Final 5035 1 42 1 25 0 1 654 5035_1 +PRL_Young_Final 5035 1 43 1 25 0 1 301 5035_1 +PRL_Young_Final 5035 1 44 1 25 0 1 278 5035_1 +PRL_Young_Final 5035 1 45 1 -25 0 0 610 5035_1 +PRL_Young_Final 5035 1 46 1 -25 0 0 427 5035_1 +PRL_Young_Final 5035 1 47 2 25 1 1 336 5035_1 +PRL_Young_Final 5035 1 48 2 -25 0 1 271 5035_1 +PRL_Young_Final 5035 1 49 2 25 0 1 244 5035_1 +PRL_Young_Final 5035 1 50 2 25 0 1 577 5035_1 +PRL_Young_Final 5035 1 51 2 -25 0 1 291 5035_1 +PRL_Young_Final 5035 1 52 2 25 0 1 653 5035_1 +PRL_Young_Final 5035 1 53 2 25 0 1 327 5035_1 +PRL_Young_Final 5035 1 54 2 -25 0 0 306 5035_1 +PRL_Young_Final 5035 1 55 2 -25 0 0 295 5035_1 +PRL_Young_Final 5035 1 56 1 25 1 1 294 5035_1 +PRL_Young_Final 5035 1 57 1 25 0 1 350 5035_1 +PRL_Young_Final 5035 1 58 1 25 0 1 737 5035_1 +PRL_Young_Final 5035 1 59 1 -25 0 1 577 5035_1 +PRL_Young_Final 5035 1 60 1 -25 0 1 331 5035_1 +PRL_Young_Final 5035 1 61 2 -25 1 0 271 5035_1 +PRL_Young_Final 5035 1 62 2 25 0 0 381 5035_1 +PRL_Young_Final 5035 1 63 2 -25 0 0 332 5035_1 +PRL_Young_Final 5035 1 64 2 25 0 0 425 5035_1 +PRL_Young_Final 5035 1 65 2 -25 0 0 290 5035_1 +PRL_Young_Final 5035 1 66 2 -25 0 0 300 5035_1 +PRL_Young_Final 5035 1 67 1 25 1 1 625 5035_1 +PRL_Young_Final 5035 1 68 1 25 0 1 834 5035_1 +PRL_Young_Final 5035 1 69 1 25 0 1 448 5035_1 +PRL_Young_Final 5035 1 70 1 25 0 1 348 5035_1 +PRL_Young_Final 5035 1 71 1 25 0 1 367 5035_1 +PRL_Young_Final 5035 1 72 1 -25 0 0 296 5035_1 +PRL_Young_Final 5035 1 73 1 -25 0 0 624 5035_1 +PRL_Young_Final 5035 1 74 2 25 1 1 371 5035_1 +PRL_Young_Final 5035 1 75 2 -25 0 1 282 5035_1 +PRL_Young_Final 5035 1 76 2 25 0 1 302 5035_1 +PRL_Young_Final 5035 1 77 2 25 0 1 647 5035_1 +PRL_Young_Final 5035 1 78 2 25 0 1 374 5035_1 +PRL_Young_Final 5035 1 79 2 25 0 1 461 5035_1 +PRL_Young_Final 5035 1 80 2 25 0 1 260 5035_1 +PRL_Young_Final 5035 1 81 2 -25 0 0 342 5035_1 +PRL_Young_Final 5035 1 82 2 -25 0 0 438 5035_1 +PRL_Young_Final 5035 1 83 1 25 1 1 290 5035_1 +PRL_Young_Final 5035 1 84 1 25 0 1 350 5035_1 +PRL_Young_Final 5035 1 85 1 -25 0 1 202 5035_1 +PRL_Young_Final 5035 1 86 1 25 0 1 333 5035_1 +PRL_Young_Final 5035 1 87 1 25 0 1 687 5035_1 +PRL_Young_Final 5035 1 88 1 25 0 0 280 5035_1 +PRL_Young_Final 5035 1 89 1 25 0 0 358 5035_1 +PRL_Young_Final 5035 1 90 1 -25 0 0 570 5035_1 +PRL_Young_Final 5035 1 91 1 -25 0 0 632 5035_1 +PRL_Young_Final 5035 1 92 2 -25 1 1 334 5035_1 +PRL_Young_Final 5035 1 93 2 25 0 1 546 5035_1 +PRL_Young_Final 5035 1 94 2 25 0 1 649 5035_1 +PRL_Young_Final 5035 1 95 2 25 0 1 570 5035_1 +PRL_Young_Final 5035 1 96 2 25 0 1 651 5035_1 +PRL_Young_Final 5035 1 97 2 25 0 1 598 5035_1 +PRL_Young_Final 5035 1 98 2 -25 0 0 548 5035_1 +PRL_Young_Final 5035 1 99 2 -25 0 0 505 5035_1 +PRL_Young_Final 5035 1 100 1 -25 1 1 305 5035_1 +PRL_Young_Final 5035 1 101 1 -25 0 1 489 5035_1 +PRL_Young_Final 5035 1 102 2 -25 1 0 250 5035_1 +PRL_Young_Final 5035 1 103 2 -25 0 0 311 5035_1 +PRL_Young_Final 5035 1 104 1 25 1 1 342 5035_1 +PRL_Young_Final 5035 1 105 1 25 0 1 305 5035_1 +PRL_Young_Final 5035 1 106 1 25 0 1 310 5035_1 +PRL_Young_Final 5035 1 107 1 25 0 1 251 5035_1 +PRL_Young_Final 5035 1 108 1 25 0 1 254 5035_1 +PRL_Young_Final 5035 1 109 1 25 0 1 561 5035_1 +PRL_Young_Final 5035 1 110 1 25 0 0 287 5035_1 +PRL_Young_Final 5035 1 111 1 -25 0 0 261 5035_1 +PRL_Young_Final 5035 1 112 1 -25 0 0 276 5035_1 +PRL_Young_Final 5035 1 113 1 -25 0 0 74 5035_1 +PRL_Young_Final 5035 1 114 2 -25 1 1 257 5035_1 +PRL_Young_Final 5035 1 115 2 25 0 1 593 5035_1 +PRL_Young_Final 5035 1 116 2 25 0 1 565 5035_1 +PRL_Young_Final 5035 1 117 2 25 0 1 276 5035_1 +PRL_Young_Final 5035 1 118 2 25 0 1 614 5035_1 +PRL_Young_Final 5035 1 119 2 25 0 1 290 5035_1 +PRL_Young_Final 5035 1 120 2 25 0 1 385 5035_1 +PRL_Young_Final 5035 1 121 2 25 0 1 280 5035_1 +PRL_Young_Final 5035 1 122 2 -25 0 0 426 5035_1 +PRL_Young_Final 5035 1 123 2 -25 0 0 278 5035_1 +PRL_Young_Final 5035 1 124 2 -25 0 0 298 5035_1 +PRL_Young_Final 5035 1 125 1 -25 1 1 283 5035_1 +PRL_Young_Final 5035 1 126 1 25 0 1 338 5035_1 +PRL_Young_Final 5035 1 127 1 25 0 1 284 5035_1 +PRL_Young_Final 5035 1 128 1 -25 0 1 316 5035_1 +PRL_Young_Final 5035 1 129 1 25 0 1 287 5035_1 +PRL_Young_Final 5035 1 130 1 25 0 1 259 5035_1 +PRL_Young_Final 5035 1 131 1 -25 0 0 293 5035_1 +PRL_Young_Final 5035 1 132 1 25 0 0 301 5035_1 +PRL_Young_Final 5035 1 133 1 -25 0 0 360 5035_1 +PRL_Young_Final 5035 1 134 1 25 0 0 285 5035_1 +PRL_Young_Final 5035 1 135 1 -25 0 0 308 5035_1 +PRL_Young_Final 5035 1 136 1 -25 0 0 579 5035_1 +PRL_Young_Final 5035 1 137 1 -25 0 0 447 5035_1 +PRL_Young_Final 5035 1 138 2 25 1 1 257 5035_1 +PRL_Young_Final 5035 1 139 2 25 0 1 263 5035_1 +PRL_Young_Final 5035 1 140 2 25 0 1 268 5035_1 +PRL_Young_Final 5035 1 141 2 -25 0 1 268 5035_1 +PRL_Young_Final 5035 1 142 2 -25 0 1 268 5035_1 +PRL_Young_Final 5035 1 143 2 25 0 1 528 5035_1 +PRL_Young_Final 5035 1 144 2 25 0 1 267 5035_1 +PRL_Young_Final 5035 1 145 2 -25 0 0 314 5035_1 +PRL_Young_Final 5035 1 146 2 -25 0 0 567 5035_1 +PRL_Young_Final 5035 1 147 2 -25 0 0 797 5035_1 +PRL_Young_Final 5035 1 148 1 25 1 1 338 5035_1 +PRL_Young_Final 5035 1 149 1 25 0 1 296 5035_1 +PRL_Young_Final 5035 1 150 1 25 0 1 567 5035_1 +PRL_Young_Final 5035 1 151 1 25 0 1 579 5035_1 +PRL_Young_Final 5035 1 152 1 -25 0 1 303 5035_1 +PRL_Young_Final 5035 1 153 1 25 0 0 286 5035_1 +PRL_Young_Final 5035 1 154 1 25 0 0 278 5035_1 +PRL_Young_Final 5035 1 155 1 -25 0 0 359 5035_1 +PRL_Young_Final 5035 1 156 1 -25 0 0 571 5035_1 +PRL_Young_Final 5035 1 157 2 25 1 1 517 5035_1 +PRL_Young_Final 5035 1 158 2 25 0 1 354 5035_1 +PRL_Young_Final 5035 1 159 2 25 0 1 342 5035_1 +PRL_Young_Final 5035 1 160 2 25 0 1 289 5035_1 +PRL_Young_Final 5035 1 161 2 25 0 1 273 5035_1 +PRL_Young_Final 5035 1 162 2 25 0 1 286 5035_1 +PRL_Young_Final 5035 1 163 2 25 0 1 579 5035_1 +PRL_Young_Final 5035 1 164 2 -25 0 1 571 5035_1 +PRL_Young_Final 5035 1 165 2 -25 0 0 349 5035_1 +PRL_Young_Final 5035 1 166 1 25 1 1 289 5035_1 +PRL_Young_Final 5035 1 167 1 25 0 1 510 5035_1 +PRL_Young_Final 5035 1 168 1 -25 0 1 287 5035_1 +PRL_Young_Final 5035 1 169 1 25 0 1 289 5035_1 +PRL_Young_Final 5035 1 170 1 25 0 1 281 5035_1 +PRL_Young_Final 5035 1 171 1 25 0 1 258 5035_1 +PRL_Young_Final 5035 1 172 1 25 0 1 302 5035_1 +PRL_Young_Final 5035 1 173 1 25 0 1 590 5035_1 +PRL_Young_Final 5035 1 174 1 -25 0 0 298 5035_1 +PRL_Young_Final 5035 1 175 1 -25 0 0 261 5035_1 +PRL_Young_Final 5035 1 176 2 -25 1 1 387 5035_1 +PRL_Young_Final 5035 1 177 2 -25 0 1 362 5035_1 +PRL_Young_Final 5035 1 178 1 -25 1 0 258 5035_1 +PRL_Young_Final 5035 1 179 1 25 0 0 346 5035_1 +PRL_Young_Final 5035 1 180 1 -25 0 0 299 5035_1 +PRL_Young_Final 5035 1 181 1 -25 0 0 311 5035_1 +PRL_Young_Final 5035 1 182 2 25 1 1 274 5035_1 +PRL_Young_Final 5035 1 183 2 25 0 1 295 5035_1 +PRL_Young_Final 5035 1 184 2 25 0 1 325 5035_1 +PRL_Young_Final 5035 1 185 2 25 0 1 330 5035_1 +PRL_Young_Final 5035 1 186 2 25 0 1 278 5035_1 +PRL_Young_Final 5035 1 187 2 -25 0 0 602 5035_1 +PRL_Young_Final 5035 1 188 2 -25 0 0 594 5035_1 +PRL_Young_Final 5035 1 189 1 25 1 1 269 5035_1 +PRL_Young_Final 5035 1 190 1 -25 0 1 274 5035_1 +PRL_Young_Final 5035 1 191 1 25 0 1 271 5035_1 +PRL_Young_Final 5035 1 192 1 25 0 1 301 5035_1 +PRL_Young_Final 5035 1 193 1 25 0 1 322 5035_1 +PRL_Young_Final 5035 1 194 1 25 0 1 332 5035_1 +PRL_Young_Final 5035 1 195 1 25 0 1 337 5035_1 +PRL_Young_Final 5035 1 196 1 25 0 1 274 5035_1 +PRL_Young_Final 5035 1 197 1 -25 0 0 279 5035_1 +PRL_Young_Final 5035 1 198 1 -25 0 0 577 5035_1 +PRL_Young_Final 5035 1 199 2 25 1 1 350 5035_1 +PRL_Young_Final 5035 1 200 2 -25 0 1 262 5035_1 +PRL_Young_Final 5035 2 1 1 -25 1 0 838 5035_2 +PRL_Young_Final 5035 2 2 1 25 0 0 413 5035_2 +PRL_Young_Final 5035 2 3 1 -25 0 0 491 5035_2 +PRL_Young_Final 5035 2 4 1 25 0 0 276 5035_2 +PRL_Young_Final 5035 2 5 1 -25 0 0 381 5035_2 +PRL_Young_Final 5035 2 6 1 -25 0 0 279 5035_2 +PRL_Young_Final 5035 2 7 2 25 1 1 556 5035_2 +PRL_Young_Final 5035 2 8 2 25 0 1 297 5035_2 +PRL_Young_Final 5035 2 9 2 -25 0 1 344 5035_2 +PRL_Young_Final 5035 2 10 2 25 0 1 285 5035_2 +PRL_Young_Final 5035 2 11 2 25 0 1 306 5035_2 +PRL_Young_Final 5035 2 12 2 -25 0 0 567 5035_2 +PRL_Young_Final 5035 2 13 2 -25 0 0 597 5035_2 +PRL_Young_Final 5035 2 14 1 25 1 1 266 5035_2 +PRL_Young_Final 5035 2 15 1 25 0 1 482 5035_2 +PRL_Young_Final 5035 2 16 1 25 0 1 346 5035_2 +PRL_Young_Final 5035 2 17 1 -25 0 1 1135 5035_2 +PRL_Young_Final 5035 2 18 1 -25 0 1 294 5035_2 +PRL_Young_Final 5035 2 19 2 -25 1 0 283 5035_2 +PRL_Young_Final 5035 2 20 2 -25 0 0 356 5035_2 +PRL_Young_Final 5035 2 21 1 25 1 1 312 5035_2 +PRL_Young_Final 5035 2 22 1 25 0 1 322 5035_2 +PRL_Young_Final 5035 2 23 1 25 0 1 664 5035_2 +PRL_Young_Final 5035 2 24 1 25 0 1 586 5035_2 +PRL_Young_Final 5035 2 25 1 25 0 1 429 5035_2 +PRL_Young_Final 5035 2 26 1 25 0 0 479 5035_2 +PRL_Young_Final 5035 2 27 1 25 0 0 348 5035_2 +PRL_Young_Final 5035 2 28 1 -25 0 0 619 5035_2 +PRL_Young_Final 5035 2 29 1 -25 0 0 293 5035_2 +PRL_Young_Final 5035 2 30 2 25 1 1 272 5035_2 +PRL_Young_Final 5035 2 31 2 -25 0 1 306 5035_2 +PRL_Young_Final 5035 2 32 2 25 0 1 452 5035_2 +PRL_Young_Final 5035 2 33 2 25 0 1 262 5035_2 +PRL_Young_Final 5035 2 34 2 25 0 1 269 5035_2 +PRL_Young_Final 5035 2 35 2 25 0 1 272 5035_2 +PRL_Young_Final 5035 2 36 2 -25 0 0 294 5035_2 +PRL_Young_Final 5035 2 37 2 -25 0 0 416 5035_2 +PRL_Young_Final 5035 2 38 2 -25 0 0 368 5035_2 +PRL_Young_Final 5035 2 39 1 25 1 1 252 5035_2 +PRL_Young_Final 5035 2 40 1 25 0 1 241 5035_2 +PRL_Young_Final 5035 2 41 1 25 0 1 309 5035_2 +PRL_Young_Final 5035 2 42 1 -25 0 1 342 5035_2 +PRL_Young_Final 5035 2 43 1 25 0 1 318 5035_2 +PRL_Young_Final 5035 2 44 1 25 0 1 94 5035_2 +PRL_Young_Final 5035 2 45 1 -25 0 0 285 5035_2 +PRL_Young_Final 5035 2 46 1 25 0 0 258 5035_2 +PRL_Young_Final 5035 2 47 1 -25 0 0 363 5035_2 +PRL_Young_Final 5035 2 48 1 -25 0 0 316 5035_2 +PRL_Young_Final 5035 2 49 2 -25 1 1 310 5035_2 +PRL_Young_Final 5035 2 50 2 25 0 1 373 5035_2 +PRL_Young_Final 5035 2 51 2 25 0 1 571 5035_2 +PRL_Young_Final 5035 2 52 2 25 0 1 294 5035_2 +PRL_Young_Final 5035 2 53 2 25 0 1 314 5035_2 +PRL_Young_Final 5035 2 54 2 -25 0 0 628 5035_2 +PRL_Young_Final 5035 2 55 2 -25 0 0 545 5035_2 +PRL_Young_Final 5035 2 56 1 25 1 1 304 5035_2 +PRL_Young_Final 5035 2 57 1 -25 0 1 323 5035_2 +PRL_Young_Final 5035 2 58 1 -25 0 1 295 5035_2 +PRL_Young_Final 5035 2 59 2 -25 1 0 366 5035_2 +PRL_Young_Final 5035 2 60 2 -25 0 0 276 5035_2 +PRL_Young_Final 5035 2 61 1 25 1 1 282 5035_2 +PRL_Young_Final 5035 2 62 1 25 0 1 399 5035_2 +PRL_Young_Final 5035 2 63 1 25 0 1 334 5035_2 +PRL_Young_Final 5035 2 64 1 25 0 1 310 5035_2 +PRL_Young_Final 5035 2 65 1 25 0 1 416 5035_2 +PRL_Young_Final 5035 2 66 1 25 0 1 651 5035_2 +PRL_Young_Final 5035 2 67 1 -25 0 0 739 5035_2 +PRL_Young_Final 5035 2 68 1 25 0 0 368 5035_2 +PRL_Young_Final 5035 2 69 1 -25 0 0 274 5035_2 +PRL_Young_Final 5035 2 70 1 25 0 0 583 5035_2 +PRL_Young_Final 5035 2 71 1 -25 0 0 618 5035_2 +PRL_Young_Final 5035 2 72 1 -25 0 0 350 5035_2 +PRL_Young_Final 5035 2 73 2 -25 1 1 256 5035_2 +PRL_Young_Final 5035 2 74 2 25 0 1 289 5035_2 +PRL_Young_Final 5035 2 75 2 25 0 1 356 5035_2 +PRL_Young_Final 5035 2 76 2 25 0 1 399 5035_2 +PRL_Young_Final 5035 2 77 2 25 0 1 314 5035_2 +PRL_Young_Final 5035 2 78 2 25 0 1 325 5035_2 +PRL_Young_Final 5035 2 79 2 25 0 1 272 5035_2 +PRL_Young_Final 5035 2 80 2 25 0 1 536 5035_2 +PRL_Young_Final 5035 2 81 2 -25 0 0 372 5035_2 +PRL_Young_Final 5035 2 82 2 -25 0 0 308 5035_2 +PRL_Young_Final 5035 2 83 1 -25 1 1 460 5035_2 +PRL_Young_Final 5035 2 84 1 25 0 1 408 5035_2 +PRL_Young_Final 5035 2 85 1 25 0 1 307 5035_2 +PRL_Young_Final 5035 2 86 1 -25 0 1 635 5035_2 +PRL_Young_Final 5035 2 87 1 25 0 1 643 5035_2 +PRL_Young_Final 5035 2 88 1 25 0 1 331 5035_2 +PRL_Young_Final 5035 2 89 1 25 0 1 599 5035_2 +PRL_Young_Final 5035 2 90 1 25 0 1 288 5035_2 +PRL_Young_Final 5035 2 91 1 -25 0 0 271 5035_2 +PRL_Young_Final 5035 2 92 1 -25 0 0 324 5035_2 +PRL_Young_Final 5035 2 93 2 25 1 1 356 5035_2 +PRL_Young_Final 5035 2 94 2 -25 0 1 812 5035_2 +PRL_Young_Final 5035 2 95 2 -25 0 1 767 5035_2 +PRL_Young_Final 5035 2 96 1 25 1 0 309 5035_2 +PRL_Young_Final 5035 2 97 1 25 0 0 278 5035_2 +PRL_Young_Final 5035 2 98 1 -25 0 0 367 5035_2 +PRL_Young_Final 5035 2 99 1 -25 0 0 279 5035_2 +PRL_Young_Final 5035 2 100 1 -25 0 0 489 5035_2 +PRL_Young_Final 5035 2 101 2 25 1 1 336 5035_2 +PRL_Young_Final 5035 2 102 2 25 0 1 285 5035_2 +PRL_Young_Final 5035 2 103 2 25 0 1 299 5035_2 +PRL_Young_Final 5035 2 104 2 25 0 1 455 5035_2 +PRL_Young_Final 5035 2 105 2 25 0 1 381 5035_2 +PRL_Young_Final 5035 2 106 2 25 0 1 327 5035_2 +PRL_Young_Final 5035 2 107 2 -25 0 1 644 5035_2 +PRL_Young_Final 5035 2 108 2 -25 0 0 608 5035_2 +PRL_Young_Final 5035 2 109 2 -25 0 0 280 5035_2 +PRL_Young_Final 5035 2 110 1 25 1 1 315 5035_2 +PRL_Young_Final 5035 2 111 1 25 0 1 677 5035_2 +PRL_Young_Final 5035 2 112 1 25 0 1 260 5035_2 +PRL_Young_Final 5035 2 113 1 25 0 1 652 5035_2 +PRL_Young_Final 5035 2 114 1 25 0 1 565 5035_2 +PRL_Young_Final 5035 2 115 1 -25 0 0 283 5035_2 +PRL_Young_Final 5035 2 116 1 25 0 0 321 5035_2 +PRL_Young_Final 5035 2 117 1 -25 0 0 617 5035_2 +PRL_Young_Final 5035 2 118 1 -25 0 0 477 5035_2 +PRL_Young_Final 5035 2 119 2 25 1 1 336 5035_2 +PRL_Young_Final 5035 2 120 2 25 0 1 379 5035_2 +PRL_Young_Final 5035 2 121 2 -25 0 1 341 5035_2 +PRL_Young_Final 5035 2 122 2 25 0 1 494 5035_2 +PRL_Young_Final 5035 2 123 2 25 0 1 412 5035_2 +PRL_Young_Final 5035 2 124 2 -25 0 0 344 5035_2 +PRL_Young_Final 5035 2 125 2 -25 0 0 654 5035_2 +PRL_Young_Final 5035 2 126 1 -25 1 1 348 5035_2 +PRL_Young_Final 5035 2 127 1 25 0 1 314 5035_2 +PRL_Young_Final 5035 2 128 1 25 0 1 622 5035_2 +PRL_Young_Final 5035 2 129 1 25 0 1 298 5035_2 +PRL_Young_Final 5035 2 130 1 25 0 1 406 5035_2 +PRL_Young_Final 5035 2 131 1 25 0 1 413 5035_2 +PRL_Young_Final 5035 2 132 1 -25 0 1 479 5035_2 +PRL_Young_Final 5035 2 133 1 -25 0 0 390 5035_2 +PRL_Young_Final 5035 2 134 2 -25 1 1 1168 5035_2 +PRL_Young_Final 5035 2 135 2 25 0 1 1025 5035_2 +PRL_Young_Final 5035 2 136 2 25 0 1 383 5035_2 +PRL_Young_Final 5035 2 137 2 25 0 1 415 5035_2 +PRL_Young_Final 5035 2 138 2 25 0 1 334 5035_2 +PRL_Young_Final 5035 2 139 2 25 0 1 369 5035_2 +PRL_Young_Final 5035 2 140 2 25 0 1 428 5035_2 +PRL_Young_Final 5035 2 141 2 -25 0 1 345 5035_2 +PRL_Young_Final 5035 2 142 2 -25 0 0 326 5035_2 +PRL_Young_Final 5035 2 143 1 25 1 1 548 5035_2 +PRL_Young_Final 5035 2 144 1 25 0 1 690 5035_2 +PRL_Young_Final 5035 2 145 1 25 0 1 635 5035_2 +PRL_Young_Final 5035 2 146 1 25 0 1 1661 5035_2 +PRL_Young_Final 5035 2 147 1 25 0 1 358 5035_2 +PRL_Young_Final 5035 2 148 1 25 0 1 443 5035_2 +PRL_Young_Final 5035 2 149 1 25 0 1 353 5035_2 +PRL_Young_Final 5035 2 150 1 -25 0 0 310 5035_2 +PRL_Young_Final 5035 2 151 1 25 0 0 664 5035_2 +PRL_Young_Final 5035 2 152 1 -25 0 0 320 5035_2 +PRL_Young_Final 5035 2 153 1 25 0 0 318 5035_2 +PRL_Young_Final 5035 2 154 1 -25 0 0 630 5035_2 +PRL_Young_Final 5035 2 155 1 -25 0 0 373 5035_2 +PRL_Young_Final 5035 2 156 2 -25 1 1 385 5035_2 +PRL_Young_Final 5035 2 157 2 25 0 1 477 5035_2 +PRL_Young_Final 5035 2 158 2 25 0 1 360 5035_2 +PRL_Young_Final 5035 2 159 2 -25 0 1 524 5035_2 +PRL_Young_Final 5035 2 160 2 25 0 1 495 5035_2 +PRL_Young_Final 5035 2 161 2 -25 0 0 447 5035_2 +PRL_Young_Final 5035 2 162 2 -25 0 0 596 5035_2 +PRL_Young_Final 5035 2 163 1 25 1 1 598 5035_2 +PRL_Young_Final 5035 2 164 1 25 0 1 246 5035_2 +PRL_Young_Final 5035 2 165 1 25 0 1 283 5035_2 +PRL_Young_Final 5035 2 166 1 25 0 1 604 5035_2 +PRL_Young_Final 5035 2 167 1 -25 0 1 261 5035_2 +PRL_Young_Final 5035 2 168 1 -25 0 1 343 5035_2 +PRL_Young_Final 5035 2 169 2 -25 1 0 182 5035_2 +PRL_Young_Final 5035 2 170 2 -25 0 0 346 5035_2 +PRL_Young_Final 5035 2 171 1 25 1 1 432 5035_2 +PRL_Young_Final 5035 2 172 1 25 0 1 264 5035_2 +PRL_Young_Final 5035 2 173 1 25 0 1 347 5035_2 +PRL_Young_Final 5035 2 174 1 25 0 1 724 5035_2 +PRL_Young_Final 5035 2 175 1 25 0 1 607 5035_2 +PRL_Young_Final 5035 2 176 1 25 0 1 298 5035_2 +PRL_Young_Final 5035 2 177 1 25 0 0 292 5035_2 +PRL_Young_Final 5035 2 178 1 25 0 0 377 5035_2 +PRL_Young_Final 5035 2 179 1 -25 0 0 368 5035_2 +PRL_Young_Final 5035 2 180 1 -25 0 0 579 5035_2 +PRL_Young_Final 5035 2 181 2 -25 1 1 580 5035_2 +PRL_Young_Final 5035 2 182 2 25 0 1 371 5035_2 +PRL_Young_Final 5035 2 183 2 25 0 1 840 5035_2 +PRL_Young_Final 5035 2 184 2 25 0 1 642 5035_2 +PRL_Young_Final 5035 2 185 2 25 0 1 14 5035_2 +PRL_Young_Final 5035 2 186 2 25 0 1 262 5035_2 +PRL_Young_Final 5035 2 187 2 25 0 1 532 5035_2 +PRL_Young_Final 5035 2 188 2 25 0 1 379 5035_2 +PRL_Young_Final 5035 2 189 2 -25 0 0 327 5035_2 +PRL_Young_Final 5035 2 190 2 -25 0 0 616 5035_2 +PRL_Young_Final 5035 2 191 1 -25 1 1 319 5035_2 +PRL_Young_Final 5035 2 192 1 25 0 1 292 5035_2 +PRL_Young_Final 5035 2 193 1 25 0 1 620 5035_2 +PRL_Young_Final 5035 2 194 1 -25 0 1 318 5035_2 +PRL_Young_Final 5035 2 195 1 25 0 1 349 5035_2 +PRL_Young_Final 5035 2 196 1 -25 0 0 320 5035_2 +PRL_Young_Final 5035 2 197 1 -25 0 0 289 5035_2 +PRL_Young_Final 5035 2 198 2 25 1 1 641 5035_2 +PRL_Young_Final 5035 2 199 2 25 0 1 600 5035_2 +PRL_Young_Final 5035 2 200 2 25 0 1 597 5035_2 +PRL_Young_Final 5035 3 1 1 25 1 0 553 5035_3 +PRL_Young_Final 5035 3 2 1 -25 0 0 296 5035_3 +PRL_Young_Final 5035 3 3 1 -25 0 0 572 5035_3 +PRL_Young_Final 5035 3 4 2 25 1 1 278 5035_3 +PRL_Young_Final 5035 3 5 2 -25 0 1 527 5035_3 +PRL_Young_Final 5035 3 6 2 -25 0 1 313 5035_3 +PRL_Young_Final 5035 3 7 1 -25 1 0 293 5035_3 +PRL_Young_Final 5035 3 8 1 -25 0 0 267 5035_3 +PRL_Young_Final 5035 3 9 2 25 1 1 345 5035_3 +PRL_Young_Final 5035 3 10 2 25 0 1 314 5035_3 +PRL_Young_Final 5035 3 11 2 25 0 1 611 5035_3 +PRL_Young_Final 5035 3 12 2 25 0 1 280 5035_3 +PRL_Young_Final 5035 3 13 2 25 0 1 250 5035_3 +PRL_Young_Final 5035 3 14 2 25 0 1 266 5035_3 +PRL_Young_Final 5035 3 15 2 -25 0 1 267 5035_3 +PRL_Young_Final 5035 3 16 2 25 0 1 333 5035_3 +PRL_Young_Final 5035 3 17 2 -25 0 0 297 5035_3 +PRL_Young_Final 5035 3 18 2 -25 0 0 701 5035_3 +PRL_Young_Final 5035 3 19 1 25 1 1 311 5035_3 +PRL_Young_Final 5035 3 20 1 25 0 1 285 5035_3 +PRL_Young_Final 5035 3 21 1 25 0 1 470 5035_3 +PRL_Young_Final 5035 3 22 1 25 0 1 1365 5035_3 +PRL_Young_Final 5035 3 23 1 25 0 1 261 5035_3 +PRL_Young_Final 5035 3 24 1 25 0 1 266 5035_3 +PRL_Young_Final 5035 3 25 1 -25 0 1 298 5035_3 +PRL_Young_Final 5035 3 26 1 -25 0 0 412 5035_3 +PRL_Young_Final 5035 3 27 2 25 1 1 283 5035_3 +PRL_Young_Final 5035 3 28 2 25 0 1 499 5035_3 +PRL_Young_Final 5035 3 29 2 -25 0 1 51 5035_3 +PRL_Young_Final 5035 3 30 2 25 0 1 425 5035_3 +PRL_Young_Final 5035 3 31 2 25 0 1 597 5035_3 +PRL_Young_Final 5035 3 32 2 25 0 1 354 5035_3 +PRL_Young_Final 5035 3 33 2 25 0 1 318 5035_3 +PRL_Young_Final 5035 3 34 2 25 0 1 270 5035_3 +PRL_Young_Final 5035 3 35 2 25 0 0 322 5035_3 +PRL_Young_Final 5035 3 36 2 -25 0 0 319 5035_3 +PRL_Young_Final 5035 3 37 2 25 0 0 332 5035_3 +PRL_Young_Final 5035 3 38 2 -25 0 0 340 5035_3 +PRL_Young_Final 5035 3 39 2 -25 0 0 306 5035_3 +PRL_Young_Final 5035 3 40 1 -25 1 1 357 5035_3 +PRL_Young_Final 5035 3 41 1 -25 0 1 311 5035_3 +PRL_Young_Final 5035 3 42 2 -25 1 0 289 5035_3 +PRL_Young_Final 5035 3 43 2 -25 0 0 348 5035_3 +PRL_Young_Final 5035 3 44 1 25 1 1 397 5035_3 +PRL_Young_Final 5035 3 45 1 25 0 1 664 5035_3 +PRL_Young_Final 5035 3 46 1 25 0 1 965 5035_3 +PRL_Young_Final 5035 3 47 1 25 0 1 301 5035_3 +PRL_Young_Final 5035 3 48 1 25 0 1 277 5035_3 +PRL_Young_Final 5035 3 49 1 25 0 1 430 5035_3 +PRL_Young_Final 5035 3 50 1 -25 0 1 399 5035_3 +PRL_Young_Final 5035 3 51 1 25 0 1 398 5035_3 +PRL_Young_Final 5035 3 52 1 -25 0 0 718 5035_3 +PRL_Young_Final 5035 3 53 1 -25 0 0 388 5035_3 +PRL_Young_Final 5035 3 54 1 25 0 0 395 5035_3 +PRL_Young_Final 5035 3 55 1 25 0 0 506 5035_3 +PRL_Young_Final 5035 3 56 1 -25 0 0 343 5035_3 +PRL_Young_Final 5035 3 57 1 -25 0 0 923 5035_3 +PRL_Young_Final 5035 3 58 2 25 1 1 522 5035_3 +PRL_Young_Final 5035 3 59 2 25 0 1 294 5035_3 +PRL_Young_Final 5035 3 60 2 25 0 1 322 5035_3 +PRL_Young_Final 5035 3 61 2 25 0 1 607 5035_3 +PRL_Young_Final 5035 3 62 2 25 0 1 284 5035_3 +PRL_Young_Final 5035 3 63 2 25 0 1 461 5035_3 +PRL_Young_Final 5035 3 64 2 -25 0 1 318 5035_3 +PRL_Young_Final 5035 3 65 2 25 0 1 531 5035_3 +PRL_Young_Final 5035 3 66 2 -25 0 0 274 5035_3 +PRL_Young_Final 5035 3 67 2 -25 0 0 341 5035_3 +PRL_Young_Final 5035 3 68 1 25 1 1 390 5035_3 +PRL_Young_Final 5035 3 69 1 -25 0 1 356 5035_3 +PRL_Young_Final 5035 3 70 1 25 0 1 521 5035_3 +PRL_Young_Final 5035 3 71 1 25 0 1 302 5035_3 +PRL_Young_Final 5035 3 72 1 25 0 1 308 5035_3 +PRL_Young_Final 5035 3 73 1 25 0 1 338 5035_3 +PRL_Young_Final 5035 3 74 1 25 0 1 268 5035_3 +PRL_Young_Final 5035 3 75 1 -25 0 1 128 5035_3 +PRL_Young_Final 5035 3 76 1 -25 0 0 445 5035_3 +PRL_Young_Final 5035 3 77 2 -25 1 1 277 5035_3 +PRL_Young_Final 5035 3 78 2 25 0 1 584 5035_3 +PRL_Young_Final 5035 3 79 2 25 0 1 487 5035_3 +PRL_Young_Final 5035 3 80 2 25 0 1 368 5035_3 +PRL_Young_Final 5035 3 81 2 25 0 1 584 5035_3 +PRL_Young_Final 5035 3 82 2 25 0 1 300 5035_3 +PRL_Young_Final 5035 3 83 2 25 0 1 553 5035_3 +PRL_Young_Final 5035 3 84 2 -25 0 1 296 5035_3 +PRL_Young_Final 5035 3 85 2 -25 0 0 320 5035_3 +PRL_Young_Final 5035 3 86 2 25 0 0 299 5035_3 +PRL_Young_Final 5035 3 87 2 -25 0 0 661 5035_3 +PRL_Young_Final 5035 3 88 2 -25 0 0 304 5035_3 +PRL_Young_Final 5035 3 89 1 25 1 1 312 5035_3 +PRL_Young_Final 5035 3 90 1 25 0 1 631 5035_3 +PRL_Young_Final 5035 3 91 1 25 0 1 658 5035_3 +PRL_Young_Final 5035 3 92 1 25 0 1 248 5035_3 +PRL_Young_Final 5035 3 93 1 25 0 1 301 5035_3 +PRL_Young_Final 5035 3 94 1 25 0 1 551 5035_3 +PRL_Young_Final 5035 3 95 1 -25 0 0 597 5035_3 +PRL_Young_Final 5035 3 96 1 -25 0 0 605 5035_3 +PRL_Young_Final 5035 3 97 2 25 1 1 294 5035_3 +PRL_Young_Final 5035 3 98 2 -25 0 1 461 5035_3 +PRL_Young_Final 5035 3 99 2 25 0 1 313 5035_3 +PRL_Young_Final 5035 3 100 2 25 0 1 370 5035_3 +PRL_Young_Final 5035 3 101 2 -25 0 1 144 5035_3 +PRL_Young_Final 5035 3 102 2 -25 0 0 343 5035_3 +PRL_Young_Final 5035 3 103 1 25 1 1 334 5035_3 +PRL_Young_Final 5035 3 104 1 25 0 1 333 5035_3 +PRL_Young_Final 5035 3 105 1 25 0 1 645 5035_3 +PRL_Young_Final 5035 3 106 1 25 0 1 308 5035_3 +PRL_Young_Final 5035 3 107 1 25 0 1 334 5035_3 +PRL_Young_Final 5035 3 108 1 -25 0 0 305 5035_3 +PRL_Young_Final 5035 3 109 1 -25 0 0 313 5035_3 +PRL_Young_Final 5035 3 110 2 -25 1 1 614 5035_3 +PRL_Young_Final 5035 3 111 2 -25 0 1 585 5035_3 +PRL_Young_Final 5035 3 112 2 25 0 1 273 5035_3 +PRL_Young_Final 5035 3 113 2 25 0 1 626 5035_3 +PRL_Young_Final 5035 3 114 2 25 0 1 790 5035_3 +PRL_Young_Final 5035 3 115 2 25 0 1 402 5035_3 +PRL_Young_Final 5035 3 116 2 25 0 0 591 5035_3 +PRL_Young_Final 5035 3 117 2 -25 0 0 289 5035_3 +PRL_Young_Final 5035 3 118 2 25 0 0 404 5035_3 +PRL_Young_Final 5035 3 119 2 -25 0 0 343 5035_3 +PRL_Young_Final 5035 3 120 2 -25 0 0 635 5035_3 +PRL_Young_Final 5035 3 121 1 25 1 1 298 5035_3 +PRL_Young_Final 5035 3 122 1 25 0 1 804 5035_3 +PRL_Young_Final 5035 3 123 1 -25 0 1 304 5035_3 +PRL_Young_Final 5035 3 124 1 25 0 1 336 5035_3 +PRL_Young_Final 5035 3 125 1 25 0 1 683 5035_3 +PRL_Young_Final 5035 3 126 1 -25 0 0 290 5035_3 +PRL_Young_Final 5035 3 127 1 -25 0 0 403 5035_3 +PRL_Young_Final 5035 3 128 2 25 1 1 291 5035_3 +PRL_Young_Final 5035 3 129 2 25 0 1 311 5035_3 +PRL_Young_Final 5035 3 130 2 25 0 1 327 5035_3 +PRL_Young_Final 5035 3 131 2 25 0 1 303 5035_3 +PRL_Young_Final 5035 3 132 2 25 0 1 267 5035_3 +PRL_Young_Final 5035 3 133 2 -25 0 1 360 5035_3 +PRL_Young_Final 5035 3 134 2 25 0 1 351 5035_3 +PRL_Young_Final 5035 3 135 2 -25 0 0 358 5035_3 +PRL_Young_Final 5035 3 136 2 -25 0 0 354 5035_3 +PRL_Young_Final 5035 3 137 1 25 1 1 615 5035_3 +PRL_Young_Final 5035 3 138 1 -25 0 1 329 5035_3 +PRL_Young_Final 5035 3 139 1 25 0 1 314 5035_3 +PRL_Young_Final 5035 3 140 1 25 0 1 351 5035_3 +PRL_Young_Final 5035 3 141 1 25 0 1 356 5035_3 +PRL_Young_Final 5035 3 142 1 25 0 0 304 5035_3 +PRL_Young_Final 5035 3 143 1 25 0 0 278 5035_3 +PRL_Young_Final 5035 3 144 1 -25 0 0 346 5035_3 +PRL_Young_Final 5035 3 145 1 -25 0 0 378 5035_3 +PRL_Young_Final 5035 3 146 2 25 1 1 253 5035_3 +PRL_Young_Final 5035 3 147 2 25 0 1 336 5035_3 +PRL_Young_Final 5035 3 148 2 -25 0 1 796 5035_3 +PRL_Young_Final 5035 3 149 2 -25 0 1 621 5035_3 +PRL_Young_Final 5035 3 150 1 -25 1 0 329 5035_3 +PRL_Young_Final 5035 3 151 2 25 1 1 249 5035_3 +PRL_Young_Final 5035 3 152 2 25 0 1 302 5035_3 +PRL_Young_Final 5035 3 153 2 25 0 1 390 5035_3 +PRL_Young_Final 5035 3 154 2 25 0 1 341 5035_3 +PRL_Young_Final 5035 3 155 2 25 0 1 260 5035_3 +PRL_Young_Final 5035 3 156 2 25 0 1 278 5035_3 +PRL_Young_Final 5035 3 157 2 -25 0 1 432 5035_3 +PRL_Young_Final 5035 3 158 2 -25 0 0 276 5035_3 +PRL_Young_Final 5035 3 159 1 25 1 1 558 5035_3 +PRL_Young_Final 5035 3 160 1 25 0 1 313 5035_3 +PRL_Young_Final 5035 3 161 1 25 0 1 360 5035_3 +PRL_Young_Final 5035 3 162 1 25 0 1 557 5035_3 +PRL_Young_Final 5035 3 163 1 25 0 1 612 5035_3 +PRL_Young_Final 5035 3 164 1 25 0 1 388 5035_3 +PRL_Young_Final 5035 3 165 1 25 0 1 613 5035_3 +PRL_Young_Final 5035 3 166 1 -25 0 1 260 5035_3 +PRL_Young_Final 5035 3 167 1 -25 0 0 856 5035_3 +PRL_Young_Final 5035 3 168 2 25 1 1 586 5035_3 +PRL_Young_Final 5035 3 169 2 25 0 1 705 5035_3 +PRL_Young_Final 5035 3 170 2 -25 0 1 446 5035_3 +PRL_Young_Final 5035 3 171 2 25 0 1 266 5035_3 +PRL_Young_Final 5035 3 172 2 25 0 1 365 5035_3 +PRL_Young_Final 5035 3 173 2 25 0 1 285 5035_3 +PRL_Young_Final 5035 3 174 2 -25 0 0 268 5035_3 +PRL_Young_Final 5035 3 175 2 25 0 0 255 5035_3 +PRL_Young_Final 5035 3 176 2 -25 0 0 533 5035_3 +PRL_Young_Final 5035 3 177 1 25 1 1 320 5035_3 +PRL_Young_Final 5035 3 178 1 25 0 1 285 5035_3 +PRL_Young_Final 5035 3 179 1 -25 0 1 271 5035_3 +PRL_Young_Final 5035 3 180 1 -25 0 1 553 5035_3 +PRL_Young_Final 5035 3 181 2 -25 1 0 275 5035_3 +PRL_Young_Final 5035 3 182 1 25 1 1 293 5035_3 +PRL_Young_Final 5035 3 183 1 25 0 1 554 5035_3 +PRL_Young_Final 5035 3 184 1 25 0 1 300 5035_3 +PRL_Young_Final 5035 3 185 1 25 0 1 274 5035_3 +PRL_Young_Final 5035 3 186 1 25 0 1 289 5035_3 +PRL_Young_Final 5035 3 187 1 25 0 1 320 5035_3 +PRL_Young_Final 5035 3 188 1 -25 0 1 303 5035_3 +PRL_Young_Final 5035 3 189 1 -25 0 0 390 5035_3 +PRL_Young_Final 5035 3 190 2 25 1 1 272 5035_3 +PRL_Young_Final 5035 3 191 2 25 0 1 673 5035_3 +PRL_Young_Final 5035 3 192 2 25 0 1 263 5035_3 +PRL_Young_Final 5035 3 193 2 25 0 1 274 5035_3 +PRL_Young_Final 5035 3 194 2 25 0 1 578 5035_3 +PRL_Young_Final 5035 3 195 2 25 0 1 483 5035_3 +PRL_Young_Final 5035 3 196 2 -25 0 0 324 5035_3 +PRL_Young_Final 5035 3 197 2 -25 0 0 324 5035_3 +PRL_Young_Final 5035 3 198 1 25 1 1 299 5035_3 +PRL_Young_Final 5035 3 199 1 -25 0 1 406 5035_3 +PRL_Young_Final 5035 3 200 1 25 0 1 272 5035_3 diff --git a/R/inst/extdata/pst_exampleData.txt b/R/inst/extdata/pst_exampleData.txt new file mode 100644 index 00000000..76f91700 --- /dev/null +++ b/R/inst/extdata/pst_exampleData.txt @@ -0,0 +1,1021 @@ +subjID type choice reward +1 12 0 0 +1 56 1 0 +1 34 0 0 +1 34 1 1 +1 12 1 1 +1 56 1 0 +1 56 0 1 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 34 0 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 0 +1 56 0 1 +1 34 1 1 +1 12 1 0 +1 56 0 0 +1 12 0 0 +1 34 1 0 +1 56 0 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 1 +1 12 0 0 +1 34 1 1 +1 56 0 1 +1 12 0 0 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 1 +1 56 0 1 +1 34 1 0 +1 12 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 56 0 1 +1 12 0 1 +1 34 0 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 12 1 0 +1 34 1 1 +1 56 0 1 +1 34 1 0 +1 12 1 0 +1 56 0 1 +1 12 1 0 +1 56 0 0 +1 34 1 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 0 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 0 +1 56 0 1 +1 12 0 0 +1 34 1 0 +1 34 0 0 +1 12 0 0 +1 56 0 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 0 0 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 0 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 34 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 56 0 1 +1 12 1 0 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 0 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 12 1 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 1 +1 34 1 0 +1 56 0 0 +1 34 1 0 +1 12 1 0 +1 56 1 1 +1 12 0 0 +1 34 1 1 +1 56 0 1 +1 34 1 0 +1 12 0 1 +1 12 1 0 +1 56 0 1 +1 34 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 56 1 1 +1 34 1 1 +1 12 1 1 +1 12 1 0 +1 56 0 0 +1 34 0 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 0 0 +1 56 0 1 +1 34 1 0 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 12 1 0 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 56 0 0 +1 12 1 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 0 0 +1 56 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 0 1 +1 12 1 1 +1 56 1 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 56 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 34 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 56 1 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 12 1 0 +1 34 0 0 +1 56 0 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 0 +1 56 0 0 +1 34 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 34 1 0 +1 56 0 1 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 34 1 1 +1 12 1 0 +1 56 0 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 0 +1 34 1 0 +1 56 0 0 +1 12 1 0 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 34 1 1 +2 12 0 0 +2 56 0 0 +2 56 1 0 +2 34 1 1 +2 12 1 1 +2 56 1 0 +2 12 1 0 +2 34 1 1 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 34 1 1 +2 12 1 0 +2 56 1 0 +2 56 0 0 +2 34 1 1 +2 12 0 1 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 56 0 0 +2 12 1 0 +2 34 1 1 +2 56 1 0 +2 34 1 1 +2 12 0 0 +2 34 1 1 +2 56 1 1 +2 12 0 1 +2 56 0 1 +2 12 1 1 +2 34 1 0 +2 34 1 1 +2 56 0 1 +2 12 1 0 +2 34 1 0 +2 12 1 0 +2 56 1 1 +2 12 1 1 +2 34 1 0 +2 56 1 0 +2 12 1 1 +2 34 0 0 +2 56 1 1 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 34 1 1 +2 56 1 1 +2 12 1 1 +2 56 1 1 +2 34 1 1 +2 12 1 1 +2 56 1 0 +2 12 1 1 +2 34 1 0 +3 34 1 0 +3 56 1 1 +3 12 0 0 +3 56 1 1 +3 12 0 0 +3 34 1 1 +3 56 0 1 +3 34 1 1 +3 12 0 1 +3 12 0 0 +3 34 1 1 +3 56 0 0 +3 12 0 0 +3 34 1 1 +3 56 0 1 +3 56 0 0 +3 34 1 1 +3 12 0 0 +3 34 1 1 +3 56 0 0 +3 12 0 1 +3 34 1 1 +3 56 1 0 +3 12 0 1 +3 12 1 1 +3 56 0 0 +3 34 1 0 +3 56 0 0 +3 12 0 0 +3 34 1 0 +3 56 1 0 +3 34 0 0 +3 12 0 1 +3 12 0 0 +3 56 0 0 +3 34 1 1 +3 34 1 1 +3 12 1 1 +3 56 0 0 +3 34 1 1 +3 12 0 0 +3 56 1 0 +3 12 0 0 +3 56 0 0 +3 34 1 1 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 0 1 +3 34 1 0 +3 56 0 0 +3 34 1 0 +3 56 0 0 +3 12 1 0 +3 56 1 1 +3 34 1 0 +3 12 0 1 +3 56 0 0 +3 34 1 1 +3 12 0 0 +3 12 1 1 +3 34 1 0 +3 56 1 1 +3 56 1 1 +3 34 0 0 +3 12 1 1 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 1 1 +3 56 1 0 +3 34 1 0 +3 34 1 0 +3 12 1 0 +3 56 1 1 +3 12 1 1 +3 56 1 1 +3 34 1 1 +3 56 1 0 +3 12 1 1 +3 34 1 1 +3 12 1 1 +3 34 1 1 +3 56 1 1 +3 56 1 1 +3 12 0 0 +3 34 1 1 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 0 0 +3 56 1 1 +3 34 1 1 +3 34 1 0 +3 12 1 1 +3 56 1 1 +3 34 1 0 +3 12 1 1 +3 56 1 1 +3 34 1 1 +3 56 1 1 +3 12 1 1 +3 34 1 1 +3 12 1 1 +3 56 1 0 +3 34 1 1 +3 56 1 1 +3 12 0 0 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 0 0 +3 56 1 1 +3 34 1 1 +3 56 1 1 +3 12 0 0 +3 34 1 0 +3 12 1 1 +3 56 0 0 +3 34 1 1 +4 12 0 0 +4 34 0 0 +4 56 1 1 +4 34 1 1 +4 56 0 1 +4 12 0 0 +4 56 1 0 +4 34 0 1 +4 12 1 1 +4 34 1 0 +4 12 0 1 +4 56 0 1 +4 56 1 0 +4 12 1 1 +4 34 1 1 +4 12 0 0 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 34 0 1 +4 56 1 1 +4 12 0 1 +4 34 0 0 +4 56 0 0 +4 12 0 0 +4 34 1 1 +4 56 1 0 +4 12 0 0 +4 56 0 1 +4 34 0 0 +4 56 1 1 +4 34 0 0 +4 12 1 0 +4 12 1 1 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 56 1 0 +4 34 0 0 +4 12 0 0 +4 56 1 1 +4 34 1 1 +4 56 0 1 +4 12 0 0 +4 34 1 1 +4 12 1 1 +4 34 1 0 +4 56 0 0 +4 56 0 0 +4 34 0 0 +4 12 1 1 +4 12 1 1 +4 56 0 1 +4 34 1 0 +4 12 0 0 +4 34 1 1 +4 56 0 1 +4 34 1 0 +4 56 0 0 +4 12 1 1 +4 56 0 0 +4 12 0 0 +4 34 1 1 +4 34 1 0 +4 12 1 1 +4 56 1 0 +4 12 0 0 +4 34 1 1 +4 56 0 1 +4 34 1 1 +4 56 0 0 +4 12 1 1 +4 56 0 0 +4 12 1 1 +4 34 1 0 +4 12 1 1 +4 34 1 0 +4 56 0 1 +4 34 0 0 +4 12 0 1 +4 56 0 1 +4 34 0 0 +4 56 0 0 +4 12 0 0 +4 34 0 0 +4 12 0 0 +4 56 0 0 +4 56 0 0 +4 34 1 1 +4 12 1 1 +4 12 1 0 +4 34 1 1 +4 56 0 1 +4 12 1 1 +4 34 1 0 +4 56 0 1 +4 34 1 0 +4 12 1 0 +4 56 0 1 +4 12 0 1 +4 56 0 1 +4 34 0 0 +4 34 1 1 +4 12 0 0 +4 56 0 1 +4 12 0 0 +4 34 1 0 +4 56 0 0 +4 34 1 1 +4 12 1 0 +4 56 0 1 +4 12 0 0 +4 56 0 1 +4 34 1 0 +4 12 0 0 +4 56 0 0 +4 34 0 0 +4 12 0 0 +4 56 1 1 +4 34 0 1 +4 56 0 1 +4 12 0 0 +4 34 0 1 +4 34 0 0 +4 12 1 1 +4 56 0 0 +4 56 1 0 +4 12 1 0 +4 34 0 1 +4 56 1 1 +4 12 1 1 +4 34 0 1 +4 12 1 1 +4 56 1 1 +4 34 0 1 +4 34 0 0 +4 12 0 0 +4 56 1 1 +4 12 0 0 +4 56 1 1 +4 34 0 0 +4 56 1 0 +4 12 0 0 +4 34 1 1 +4 12 0 0 +4 34 1 1 +4 56 0 1 +4 56 0 0 +4 34 1 1 +4 12 0 0 +4 56 0 1 +4 12 1 1 +4 34 1 1 +4 34 1 1 +4 56 0 0 +4 12 0 1 +4 34 0 0 +4 56 1 0 +4 12 1 0 +4 12 0 0 +4 56 1 1 +4 34 0 1 +4 56 1 1 +4 12 1 0 +4 34 0 0 +4 56 1 1 +4 12 0 0 +4 34 1 1 +4 34 1 0 +4 56 0 1 +4 12 1 0 +4 34 1 0 +4 56 0 0 +4 12 0 0 +4 34 0 1 +4 56 0 1 +4 12 1 1 +4 12 0 1 +4 56 0 1 +4 34 0 1 +4 12 1 1 +4 34 0 1 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 1 0 +4 34 0 0 +4 56 1 1 +4 12 1 1 +4 34 1 0 +4 56 1 0 +4 12 1 1 +4 12 1 1 +4 56 1 1 +4 34 0 0 +4 56 1 1 +4 12 1 1 +4 34 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 34 1 0 +4 12 0 0 +4 56 1 1 +4 34 1 0 +4 56 1 0 +4 12 1 1 +4 12 1 1 +4 34 1 0 +4 56 1 1 +4 34 0 1 +4 56 1 1 +4 12 1 1 +4 12 1 0 +4 34 0 1 +4 56 1 1 +4 56 1 0 +4 12 0 0 +4 34 0 0 +4 56 1 1 +4 34 1 1 +4 12 1 1 +4 12 1 0 +4 34 1 0 +4 56 1 0 +4 34 0 0 +4 12 1 1 +4 56 1 0 +4 56 0 0 +4 12 1 1 +4 34 1 1 +4 34 1 0 +4 12 0 1 +4 56 0 0 +4 34 0 1 +4 56 0 0 +4 12 1 1 +4 12 0 0 +4 34 0 1 +4 56 1 1 +4 56 0 1 +4 34 0 1 +4 12 1 1 +4 56 0 1 +4 12 1 1 +4 34 0 0 +4 12 1 0 +4 56 0 0 +4 34 1 0 +4 56 0 0 +4 34 1 0 +4 12 0 0 +4 56 0 1 +4 12 1 1 +4 34 0 0 +4 56 1 0 +4 34 1 0 +4 12 0 0 +4 34 0 1 +4 12 1 1 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 0 1 +4 34 0 1 +4 12 1 1 +4 56 0 0 +4 12 1 0 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 0 1 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 56 0 1 +4 34 0 0 +4 12 1 1 +4 34 0 1 +4 12 1 1 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 0 0 +4 12 1 0 +4 34 0 0 +4 56 0 1 +4 56 0 0 +4 12 1 0 +4 34 1 1 +4 34 1 1 +4 12 0 0 +4 56 1 1 +4 56 1 0 +4 12 1 0 +4 34 0 0 +4 12 0 0 +4 34 0 0 +4 56 0 0 +4 56 0 0 +4 34 0 0 +4 12 1 0 +4 56 0 0 +4 12 0 0 +4 34 0 0 +4 56 0 0 +4 12 0 0 +4 34 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 56 0 0 +4 34 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 12 1 0 +4 34 1 0 +4 12 1 1 +4 56 1 1 +4 56 1 1 +4 34 1 0 +4 12 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 56 1 1 +4 12 1 1 +4 34 1 1 +4 56 1 0 +4 12 1 1 +4 34 1 1 +4 12 1 1 +4 34 1 0 +4 56 1 1 +4 56 1 1 +4 34 1 1 +4 12 1 0 +4 12 1 1 +4 56 1 0 +4 34 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 12 1 1 +4 12 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 56 1 0 +4 12 1 1 +4 34 1 1 +4 34 1 1 +4 12 1 1 +4 56 1 0 +5 12 1 1 +5 34 0 0 +5 56 0 1 +5 34 0 1 +5 56 0 1 +5 12 0 0 +5 34 0 0 +5 12 1 1 +5 56 0 0 +5 12 1 1 +5 56 0 0 +5 34 0 0 +5 34 0 0 +5 12 1 0 +5 56 0 0 +5 56 0 1 +5 34 0 1 +5 12 1 1 +5 34 0 0 +5 12 1 1 +5 56 1 1 +5 34 0 1 +5 12 1 1 +5 56 0 0 +5 34 0 1 +5 56 0 0 +5 12 1 1 +5 34 0 1 +5 12 0 0 +5 56 0 0 +5 34 0 0 +5 56 0 1 +5 12 1 1 +5 12 1 1 +5 56 0 0 +5 34 0 0 +5 56 1 0 +5 12 1 0 +5 34 0 1 +5 34 0 0 +5 12 1 1 +5 56 1 1 +5 56 0 1 +5 34 0 0 +5 12 1 1 +5 34 0 1 +5 12 1 1 +5 56 0 0 +5 12 1 0 +5 56 0 0 +5 34 0 0 +5 34 0 1 +5 56 0 0 +5 12 1 1 +5 12 1 1 +5 56 1 1 +5 34 1 0 +5 56 0 1 +5 12 1 1 +5 34 0 1 +5 34 0 1 +5 56 0 0 +5 12 1 0 +5 12 1 1 +5 56 0 1 +5 34 0 0 +5 12 1 1 +5 34 1 0 +5 56 0 0 +5 34 0 1 +5 56 0 0 +5 12 1 1 +5 56 1 0 +5 34 1 1 +5 12 1 1 +5 56 1 1 +5 12 1 0 +5 34 1 1 +5 34 0 1 +5 56 0 0 +5 12 1 0 +5 34 0 0 +5 56 0 0 +5 12 1 1 +5 12 1 1 +5 34 0 0 +5 56 0 0 +5 56 0 0 +5 12 1 1 +5 34 1 0 +5 12 1 1 +5 56 1 0 +5 34 0 0 +5 34 0 0 +5 56 0 0 +5 12 1 1 +5 12 1 0 +5 56 0 0 +5 34 1 0 +5 56 1 1 +5 34 1 1 +5 12 1 1 +5 56 1 1 +5 34 1 1 +5 12 1 1 +5 56 0 1 +5 12 1 1 +5 34 1 0 +5 56 1 0 +5 12 1 1 +5 34 1 1 +5 56 1 1 +5 34 1 1 +5 12 1 1 +5 56 1 0 +5 34 1 1 +5 12 1 1 +5 34 1 0 +5 56 1 1 +5 12 1 0 diff --git a/R/inst/extdata/ra_data_attend.txt b/R/inst/extdata/ra_data_attend.txt new file mode 100644 index 00000000..131612dd --- /dev/null +++ b/R/inst/extdata/ra_data_attend.txt @@ -0,0 +1,4192 @@ +gain loss cert gamble outcome cond subjID +2 -1 0 1 2 0 1 +9 -13.5 0 0 0 0 1 +5 -6.88 0 0 0 0 1 +10 -10 0 0 0 0 1 +6 -2.25 0 1 6 0 1 +6 -6.75 0 0 0 0 1 +9 -4.5 0 1 -4.5 0 1 +10 -13.75 0 0 0 0 1 +6 -8.25 0 0 0 0 1 +5 -10 0 0 0 0 1 +10 -6.25 0 1 10 0 1 +12 -3 0 1 -3 0 1 +12 -9 0 0 0 0 1 +8 -7 0 0 0 0 1 +6 -12 0 0 0 0 1 +8 -2 0 1 -2 0 1 +12 -6 0 1 -6 0 1 +3 0 1 0 1 0 1 +10 -20 0 1 10 0 1 +5 -3.75 0 0 0 0 1 +2 -1.75 0 0 0 0 1 +6 -3.75 0 1 -3.75 0 1 +9 -12.38 0 0 0 0 1 +5 -6.25 0 0 0 0 1 +12 0 4 1 12 0 1 +2 -1.5 0 0 0 0 1 +6 -5.25 0 0 0 0 1 +10 -18.75 0 0 0 0 1 +6 -6 0 0 0 0 1 +12 0 5 1 12 0 1 +4 -2 0 1 4 0 1 +2 -4 0 0 0 0 1 +5 -2.5 0 1 5 0 1 +2 -3.75 0 0 0 0 1 +9 -15.75 0 0 0 0 1 +8 -4 0 1 8 0 1 +26 0 12 1 26 0 1 +6 -1.5 0 1 6 0 1 +4 -6 0 0 0 0 1 +10 -2.5 0 1 -2.5 0 1 +8 -12 0 0 0 0 1 +2 -3.5 0 0 0 0 1 +5 -5.63 0 0 0 0 1 +12 -24 0 0 0 0 1 +25 0 10 1 0 0 1 +4 -6.5 0 0 0 0 1 +5 -9.38 0 0 0 0 1 +5 -7.5 0 0 0 0 1 +4 -4 0 1 -4 0 1 +6 -10.5 0 0 0 0 1 +13 0 6 1 13 0 1 +12 -22.5 0 0 0 0 1 +4 -7.5 0 0 0 0 1 +5 0 2 1 5 0 1 +10 -15 0 0 0 0 1 +9 -16.88 0 0 0 0 1 +2 -2.5 0 0 0 0 1 +10 -16.25 0 0 0 0 1 +6 -11.25 0 0 0 0 1 +4 -1.5 0 1 4 0 1 +6 -9 0 0 0 0 1 +12 -19.5 0 0 0 0 1 +10 -12.5 0 0 0 0 1 +2 -3 0 0 0 0 1 +8 -16 0 0 0 0 1 +4 0 2 1 4 0 1 +12 -7.5 0 1 -7.5 0 1 +12 -13.5 0 0 0 0 1 +22 0 10 1 22 0 1 +12 -21 0 0 0 0 1 +7 0 3 1 7 0 1 +10 -8.75 0 0 0 0 1 +2 -1.25 0 0 0 0 1 +9 -6.75 0 0 0 0 1 +12 0 6 1 12 0 1 +28 0 13 1 28 0 1 +9 -10.13 0 0 0 0 1 +2 -0.5 0 1 2 0 1 +25 0 9 1 25 0 1 +6 -7.5 0 0 0 0 1 +4 -3 0 0 0 0 1 +10 -3.75 0 1 10 0 1 +12 -4.5 0 1 -4.5 0 1 +12 -15 0 0 0 0 1 +6 -3 0 0 0 0 1 +9 -14.63 0 0 0 0 1 +5 -1.25 0 1 -1.25 0 1 +8 -11 0 0 0 0 1 +10 -17.5 0 0 0 0 1 +8 -10 0 0 0 0 1 +9 -9 0 0 0 0 1 +10 -11.25 0 0 0 0 1 +12 -12 0 0 0 0 1 +8 -14 0 0 0 0 1 +12 -16.5 0 1 -16.5 0 1 +4 -7 0 1 -7 0 1 +4 -1 0 1 -1 0 1 +5 -1.88 0 1 5 0 1 +8 0 3 1 0 0 1 +2 -3.25 0 0 0 0 1 +5 -5 0 0 0 0 1 +26 0 10 1 0 0 1 +12 -10.5 0 0 0 0 1 +2 0 1 1 0 0 1 +6 -9.75 0 0 0 0 1 +8 -3 0 1 8 0 1 +13 0 5 1 13 0 1 +10 -7.5 0 0 0 0 1 +8 -13 0 0 0 0 1 +9 -3.38 0 1 -3.38 0 1 +30 0 12 1 0 0 1 +8 -8 0 0 0 0 1 +8 -5 0 0 0 0 1 +12 -18 0 0 0 0 1 +10 -5 0 1 -5 0 1 +9 -11.25 0 0 0 0 1 +9 -7.88 0 0 0 0 1 +8 -6 0 1 -6 0 1 +6 -4.5 0 0 0 0 1 +8 -9 0 0 0 0 1 +4 -5.5 0 0 0 0 1 +4 -5 0 0 0 0 1 +9 -2.25 0 1 -2.25 0 1 +23 0 10 1 0 0 1 +9 -5.63 0 1 -5.63 0 1 +4 -8 0 0 0 0 1 +19 0 8 1 19 0 1 +2 -2 0 0 0 0 1 +5 -8.13 0 0 0 0 1 +5 -4.38 0 0 0 0 1 +2 -2.25 0 0 0 0 1 +2 -0.75 0 1 -0.75 0 1 +2 -2.75 0 0 0 0 1 +5 -8.75 0 0 0 0 1 +9 -18 0 0 0 0 1 +4 -3.5 0 0 0 0 1 +9 -6.75 0 1 -6.75 0 2 +6 -6.75 0 0 0 0 2 +6 -3 0 1 6 0 2 +2 -1.5 0 0 0 0 2 +4 -3 0 0 0 0 2 +5 -6.88 0 0 0 0 2 +12 -9 0 1 12 0 2 +4 -5 0 0 0 0 2 +5 -7.5 0 0 0 0 2 +4 -4 0 1 -4 0 2 +9 -5.63 0 1 -5.63 0 2 +9 -14.63 0 0 0 0 2 +5 -9.38 0 0 0 0 2 +6 -4.5 0 1 6 0 2 +8 -7 0 0 0 0 2 +10 -16.25 0 0 0 0 2 +10 -17.5 0 0 0 0 2 +9 -16.88 0 0 0 0 2 +8 -5 0 1 8 0 2 +6 -1.5 0 1 6 0 2 +12 -18 0 0 0 0 2 +5 -6.25 0 0 0 0 2 +8 -4 0 1 8 0 2 +9 -15.75 0 0 0 0 2 +9 -13.5 0 0 0 0 2 +5 -8.13 0 0 0 0 2 +2 0 1 1 0 0 2 +2 -3.75 0 0 0 0 2 +4 -6.5 0 0 0 0 2 +10 -5 0 1 -5 0 2 +12 -22.5 0 0 0 0 2 +2 -1 0 1 2 0 2 +13 0 6 1 13 0 2 +5 -2.5 0 0 0 0 2 +2 -0.5 0 1 2 0 2 +2 -3.25 0 1 -3.25 0 2 +30 0 12 1 0 0 2 +8 -8 0 1 8 0 2 +4 -5.5 0 0 0 0 2 +23 0 10 1 0 0 2 +4 -3.5 0 0 0 0 2 +5 0 2 1 5 0 2 +8 0 3 1 0 0 2 +9 -10.13 0 0 0 0 2 +8 -16 0 0 0 0 2 +12 -24 0 0 0 0 2 +9 -3.38 0 1 -3.38 0 2 +6 -5.25 0 1 6 0 2 +2 -4 0 0 0 0 2 +4 -1 0 1 -1 0 2 +6 -11.25 0 0 0 0 2 +5 -4.38 0 1 -4.38 0 2 +6 -2.25 0 1 6 0 2 +12 -10.5 0 1 12 0 2 +9 -18 0 0 0 0 2 +10 -20 0 0 0 0 2 +4 -4.5 0 0 0 0 2 +9 -2.25 0 1 -2.25 0 2 +4 -6 0 0 0 0 2 +8 -10 0 1 -10 0 2 +5 -5 0 1 -5 0 2 +5 -8.75 0 0 0 0 2 +8 -6 0 1 -6 0 2 +10 -13.75 0 0 0 0 2 +2 -2.5 0 0 0 0 2 +8 -11 0 1 -11 0 2 +4 -2 0 1 4 0 2 +10 -7.5 0 1 -7.5 0 2 +22 0 10 1 22 0 2 +25 0 10 1 0 0 2 +6 -9.75 0 0 0 0 2 +12 0 5 1 12 0 2 +4 -2.5 0 1 -2.5 0 2 +8 -3 0 1 8 0 2 +10 -11.25 0 1 -11.25 0 2 +5 -10 0 1 5 0 2 +10 -15 0 0 0 0 2 +2 -3.5 0 0 0 0 2 +12 0 4 1 12 0 2 +13 0 5 0 5 0 2 +5 -3.75 0 1 5 0 2 +26 0 12 0 12 0 2 +5 -5.63 0 0 0 0 2 +8 -2 0 1 -2 0 2 +2 -3 0 0 0 0 2 +6 -9 0 0 0 0 2 +9 -7.88 0 0 0 0 2 +8 -14 0 0 0 0 2 +28 0 13 1 28 0 2 +9 -12.38 0 0 0 0 2 +8 -15 0 1 -15 0 2 +10 -2.5 0 1 -2.5 0 2 +4 0 2 1 4 0 2 +12 -6 0 1 -6 0 2 +12 -16.5 0 1 -16.5 0 2 +4 -7.5 0 0 0 0 2 +10 -8.75 0 1 -8.75 0 2 +10 -18.75 0 1 10 0 2 +26 0 10 1 0 0 2 +12 -21 0 1 12 0 2 +2 -0.75 0 1 -0.75 0 2 +9 -9 0 1 -9 0 2 +10 -6.25 0 1 10 0 2 +8 -12 0 1 -12 0 2 +3 0 1 1 0 0 2 +5 -1.88 0 1 5 0 2 +6 -7.5 0 1 -7.5 0 2 +12 -13.5 0 1 12 0 2 +4 -7 0 0 0 0 2 +6 -8.25 0 1 -8.25 0 2 +6 -12 0 0 0 0 2 +6 -10.5 0 0 0 0 2 +4 -8 0 0 0 0 2 +6 -6 0 1 -6 0 2 +12 0 6 1 12 0 2 +12 -19.5 0 1 12 0 2 +19 0 8 1 19 0 2 +12 -15 0 0 0 0 2 +2 -1.75 0 0 0 0 2 +6 -3.75 0 0 0 0 2 +2 -1.25 0 0 0 0 2 +5 -1.25 0 1 -1.25 0 2 +4 -1.5 0 1 4 0 2 +8 -13 0 0 0 0 2 +12 -7.5 0 1 -7.5 0 2 +12 -3 0 1 -3 0 2 +2 -2.75 0 0 0 0 2 +7 0 3 1 7 0 2 +25 0 9 1 25 0 2 +2 -2 0 0 0 0 2 +12 -4.5 0 1 -4.5 0 2 +12 -12 0 1 12 0 2 +5 -3.13 0 1 5 0 2 +9 -11.25 0 0 0 0 2 +8 -9 0 1 -9 0 2 +2 -2.25 0 0 0 0 2 +9 -4.5 0 1 -4.5 0 2 +10 -3.75 0 1 10 0 2 +10 -10 0 1 10 0 2 +10 -12.5 0 0 0 0 2 +2 -2.5 0 0 0 0 3 +5 -5.63 0 0 0 0 3 +6 -7.5 0 0 0 0 3 +26 0 10 1 0 0 3 +9 -4.5 0 0 0 0 3 +2 -1.25 0 0 0 0 3 +8 -3 0 0 0 0 3 +25 0 9 0 9 0 3 +4 -4.5 0 0 0 0 3 +5 -10 0 0 0 0 3 +6 -9 0 0 0 0 3 +10 -6.25 0 0 0 0 3 +4 -4 0 0 0 0 3 +12 -3 0 0 0 0 3 +5 -5 0 0 0 0 3 +12 0 5 0 5 0 3 +6 -9.75 0 0 0 0 3 +19 0 8 0 8 0 3 +4 -7.5 0 0 0 0 3 +12 -9 0 0 0 0 3 +4 -6.5 0 0 0 0 3 +9 -5.63 0 0 0 0 3 +9 -18 0 0 0 0 3 +10 -11.25 0 0 0 0 3 +10 -13.75 0 0 0 0 3 +6 -12 0 0 0 0 3 +10 -12.5 0 0 0 0 3 +4 -7 0 0 0 0 3 +10 -7.5 0 0 0 0 3 +4 -8 0 0 0 0 3 +8 -11 0 0 0 0 3 +12 0 4 1 12 0 3 +9 -3.38 0 0 0 0 3 +10 -18.75 0 0 0 0 3 +2 -3.5 0 0 0 0 3 +2 -1 0 0 0 0 3 +2 -3.25 0 0 0 0 3 +2 0 1 0 1 0 3 +7 0 3 0 3 0 3 +8 0 3 0 3 0 3 +12 -6 0 0 0 0 3 +2 -0.5 0 1 2 0 3 +9 -7.88 0 0 0 0 3 +8 -15 0 0 0 0 3 +2 -1.5 0 0 0 0 3 +12 -22.5 0 0 0 0 3 +8 -7 0 0 0 0 3 +4 -5.5 0 0 0 0 3 +10 -8.75 0 0 0 0 3 +8 -9 0 0 0 0 3 +2 -4 0 0 0 0 3 +4 0 2 1 4 0 3 +8 -8 0 0 0 0 3 +9 -13.5 0 0 0 0 3 +9 -9 0 0 0 0 3 +6 -3.75 0 0 0 0 3 +13 0 6 0 6 0 3 +5 -1.88 0 1 5 0 3 +6 -6 0 0 0 0 3 +5 -6.88 0 0 0 0 3 +8 -16 0 0 0 0 3 +12 -7.5 0 0 0 0 3 +5 -1.25 0 1 -1.25 0 3 +9 -14.63 0 0 0 0 3 +8 -4 0 0 0 0 3 +10 -17.5 0 0 0 0 3 +5 -3.75 0 0 0 0 3 +6 -10.5 0 0 0 0 3 +13 0 5 1 13 0 3 +10 -16.25 0 0 0 0 3 +5 -7.5 0 0 0 0 3 +2 -1.75 0 0 0 0 3 +5 -9.38 0 0 0 0 3 +2 -2.75 0 0 0 0 3 +2 -0.75 0 1 -0.75 0 3 +5 -8.13 0 0 0 0 3 +9 -11.25 0 0 0 0 3 +8 -13 0 0 0 0 3 +9 -16.88 0 0 0 0 3 +2 -2 0 0 0 0 3 +12 -18 0 0 0 0 3 +8 -2 0 1 -2 0 3 +2 -3 0 0 0 0 3 +6 -4.5 0 0 0 0 3 +5 0 2 1 5 0 3 +12 -19.5 0 0 0 0 3 +9 -15.75 0 0 0 0 3 +8 -6 0 0 0 0 3 +10 -2.5 0 1 -2.5 0 3 +9 -6.75 0 0 0 0 3 +6 -6.75 0 0 0 0 3 +2 -3.75 0 0 0 0 3 +10 -5 0 0 0 0 3 +2 -2.25 0 0 0 0 3 +26 0 12 0 12 0 3 +12 -13.5 0 0 0 0 3 +8 -5 0 0 0 0 3 +6 -3 0 0 0 0 3 +10 -3.75 0 0 0 0 3 +12 -10.5 0 0 0 0 3 +4 -5 0 0 0 0 3 +9 -2.25 0 0 0 0 3 +4 -3 0 0 0 0 3 +9 -10.13 0 0 0 0 3 +28 0 13 0 13 0 3 +22 0 10 0 10 0 3 +10 -10 0 0 0 0 3 +4 -1 0 0 0 0 3 +4 -2.5 0 0 0 0 3 +12 -24 0 0 0 0 3 +8 -12 0 0 0 0 3 +3 0 1 1 0 0 3 +9 -12.38 0 0 0 0 3 +23 0 10 0 10 0 3 +4 -3.5 0 0 0 0 3 +4 -1.5 0 0 0 0 3 +8 -10 0 0 0 0 3 +8 -14 0 0 0 0 3 +4 -6 0 0 0 0 3 +25 0 10 0 10 0 3 +12 -16.5 0 0 0 0 3 +12 -12 0 0 0 0 3 +5 -2.5 0 0 0 0 3 +5 -8.75 0 0 0 0 3 +12 -4.5 0 0 0 0 3 +12 -15 0 0 0 0 3 +5 -3.13 0 0 0 0 3 +12 -21 0 0 0 0 3 +5 -4.38 0 0 0 0 3 +6 -11.25 0 0 0 0 3 +30 0 12 0 12 0 3 +6 -1.5 0 1 6 0 3 +12 0 6 0 6 0 3 +4 -2 0 0 0 0 3 +10 -15 0 0 0 0 3 +6 -2.25 0 0 0 0 3 +10 -20 0 0 0 0 3 +6 -5.25 0 0 0 0 3 +5 -6.25 0 0 0 0 3 +6 -8.25 0 0 0 0 3 +4 -4.5 0 1 -4.5 0 4 +10 -12.5 0 0 0 0 4 +26 0 12 1 26 0 4 +6 -7.5 0 1 -7.5 0 4 +4 -6.5 0 0 0 0 4 +12 -4.5 0 1 -4.5 0 4 +5 -2.5 0 1 5 0 4 +6 -12 0 0 0 0 4 +9 -14.63 0 1 9 0 4 +6 -6 0 0 0 0 4 +22 0 10 1 22 0 4 +2 -1 0 1 2 0 4 +8 -3 0 1 8 0 4 +12 -9 0 0 0 0 4 +5 -3.75 0 1 5 0 4 +6 -3 0 1 6 0 4 +4 0 2 0 2 0 4 +28 0 13 1 28 0 4 +12 -15 0 0 0 0 4 +9 -11.25 0 0 0 0 4 +12 -10.5 0 1 12 0 4 +5 -1.88 0 1 5 0 4 +2 -2.75 0 0 0 0 4 +4 -7 0 0 0 0 4 +8 -4 0 1 8 0 4 +2 0 1 1 0 0 4 +2 -3.5 0 0 0 0 4 +2 -1.75 0 1 2 0 4 +5 -5 0 0 0 0 4 +12 -12 0 1 12 0 4 +12 0 6 1 12 0 4 +6 -4.5 0 0 0 0 4 +30 0 12 0 12 0 4 +12 -16.5 0 0 0 0 4 +6 -9.75 0 1 6 0 4 +12 -22.5 0 0 0 0 4 +6 -9 0 1 -9 0 4 +5 -3.13 0 0 0 0 4 +5 -9.38 0 0 0 0 4 +12 -7.5 0 1 -7.5 0 4 +5 0 2 1 5 0 4 +10 -15 0 0 0 0 4 +12 -3 0 1 -3 0 4 +13 0 6 0 6 0 4 +9 -16.88 0 0 0 0 4 +6 -11.25 0 0 0 0 4 +8 -5 0 1 8 0 4 +8 -14 0 0 0 0 4 +12 -24 0 1 -24 0 4 +12 0 5 1 12 0 4 +9 -13.5 0 0 0 0 4 +6 -1.5 0 1 6 0 4 +2 -3 0 0 0 0 4 +10 -2.5 0 1 -2.5 0 4 +2 -0.75 0 0 0 0 4 +6 -10.5 0 0 0 0 4 +2 -0.5 0 1 2 0 4 +10 -10 0 0 0 0 4 +8 -10 0 1 -10 0 4 +9 -12.38 0 0 0 0 4 +4 -6 0 0 0 0 4 +6 -2.25 0 1 6 0 4 +9 -15.75 0 0 0 0 4 +12 -13.5 0 0 0 0 4 +8 -6 0 0 0 0 4 +10 -18.75 0 0 0 0 4 +4 -2 0 0 0 0 4 +5 -1.25 0 1 -1.25 0 4 +6 -5.25 0 0 0 0 4 +4 -8 0 1 4 0 4 +25 0 9 1 25 0 4 +2 -3.25 0 0 0 0 4 +10 -11.25 0 1 -11.25 0 4 +4 -7.5 0 0 0 0 4 +9 -5.63 0 1 -5.63 0 4 +6 -6.75 0 0 0 0 4 +8 -2 0 1 -2 0 4 +5 -6.25 0 0 0 0 4 +23 0 10 0 10 0 4 +8 -13 0 0 0 0 4 +10 -13.75 0 0 0 0 4 +5 -10 0 1 5 0 4 +12 0 4 1 12 0 4 +2 -2.5 0 0 0 0 4 +19 0 8 1 19 0 4 +4 -4 0 0 0 0 4 +4 -1 0 1 -1 0 4 +4 -2.5 0 1 -2.5 0 4 +5 -8.13 0 0 0 0 4 +10 -3.75 0 1 10 0 4 +5 -8.75 0 0 0 0 4 +10 -7.5 0 1 -7.5 0 4 +10 -5 0 1 -5 0 4 +10 -20 0 0 0 0 4 +13 0 5 0 5 0 4 +8 -9 0 0 0 0 4 +8 -12 0 0 0 0 4 +10 -16.25 0 0 0 0 4 +5 -6.88 0 1 5 0 4 +4 -5.5 0 0 0 0 4 +5 -7.5 0 0 0 0 4 +9 -10.13 0 0 0 0 4 +6 -8.25 0 0 0 0 4 +26 0 10 0 10 0 4 +4 -5 0 0 0 0 4 +2 -2.25 0 1 2 0 4 +6 -3.75 0 1 -3.75 0 4 +8 -8 0 1 8 0 4 +9 -6.75 0 0 0 0 4 +8 -15 0 1 -15 0 4 +12 -6 0 1 -6 0 4 +25 0 10 0 10 0 4 +12 -19.5 0 0 0 0 4 +9 -7.88 0 0 0 0 4 +4 -1.5 0 1 4 0 4 +8 -7 0 0 0 0 4 +12 -18 0 1 -18 0 4 +2 -2 0 1 2 0 4 +9 -18 0 0 0 0 4 +2 -1.25 0 0 0 0 4 +8 -16 0 0 0 0 4 +5 -4.38 0 0 0 0 4 +2 -4 0 0 0 0 4 +5 -5.63 0 0 0 0 4 +8 0 3 1 0 0 4 +10 -17.5 0 0 0 0 4 +8 -11 0 0 0 0 4 +2 -1.5 0 1 2 0 4 +4 -3.5 0 0 0 0 4 +2 -3.75 0 0 0 0 4 +3 0 1 1 0 0 4 +12 -21 0 0 0 0 4 +10 -8.75 0 0 0 0 4 +9 -9 0 1 -9 0 4 +4 -3 0 0 0 0 4 +7 0 3 1 7 0 4 +9 -3.38 0 1 -3.38 0 4 +9 -2.25 0 1 -2.25 0 4 +10 -6.25 0 0 0 0 4 +9 -4.5 0 1 -4.5 0 4 +2 -1 0 1 2 0 5 +9 -13.5 0 0 0 0 5 +5 -6.88 0 1 5 0 5 +10 -10 0 1 10 0 5 +6 -2.25 0 0 0 0 5 +6 -6.75 0 1 -6.75 0 5 +9 -4.5 0 0 0 0 5 +10 -13.75 0 0 0 0 5 +6 -8.25 0 0 0 0 5 +5 -10 0 0 0 0 5 +10 -6.25 0 1 10 0 5 +12 -3 0 1 -3 0 5 +12 -9 0 0 0 0 5 +8 -7 0 1 -7 0 5 +6 -12 0 0 0 0 5 +8 -2 0 1 -2 0 5 +12 -6 0 1 -6 0 5 +3 0 1 1 0 0 5 +10 -20 0 1 10 0 5 +5 -3.75 0 1 5 0 5 +2 -1.75 0 0 0 0 5 +6 -3.75 0 0 0 0 5 +9 -12.38 0 0 0 0 5 +5 -6.25 0 0 0 0 5 +12 0 4 0 4 0 5 +2 -1.5 0 1 2 0 5 +6 -5.25 0 0 0 0 5 +10 -18.75 0 0 0 0 5 +6 -6 0 1 -6 0 5 +12 0 5 0 5 0 5 +4 -2 0 1 4 0 5 +2 -4 0 0 0 0 5 +5 -2.5 0 1 5 0 5 +2 -3.75 0 0 0 0 5 +9 -15.75 0 0 0 0 5 +8 -4 0 1 8 0 5 +26 0 12 0 12 0 5 +6 -1.5 0 1 6 0 5 +4 -6 0 0 0 0 5 +10 -2.5 0 1 -2.5 0 5 +8 -12 0 0 0 0 5 +2 -3.5 0 0 0 0 5 +5 -5.63 0 1 -5.63 0 5 +12 -24 0 0 0 0 5 +25 0 10 0 10 0 5 +4 -6.5 0 0 0 0 5 +5 -9.38 0 0 0 0 5 +5 -7.5 0 1 -7.5 0 5 +4 -4 0 1 -4 0 5 +6 -10.5 0 1 -10.5 0 5 +13 0 6 1 13 0 5 +12 -22.5 0 0 0 0 5 +4 -7.5 0 0 0 0 5 +5 0 2 1 5 0 5 +10 -15 0 0 0 0 5 +9 -16.88 0 0 0 0 5 +2 -2.5 0 1 2 0 5 +10 -16.25 0 0 0 0 5 +6 -11.25 0 0 0 0 5 +4 -1.5 0 1 4 0 5 +5 -3.13 0 1 5 0 5 +6 -9 0 0 0 0 5 +12 -19.5 0 0 0 0 5 +10 -12.5 0 0 0 0 5 +2 -3 0 0 0 0 5 +8 -16 0 0 0 0 5 +4 0 2 1 4 0 5 +12 -7.5 0 0 0 0 5 +12 -13.5 0 0 0 0 5 +22 0 10 0 10 0 5 +12 -21 0 0 0 0 5 +7 0 3 0 3 0 5 +10 -8.75 0 1 -8.75 0 5 +2 -1.25 0 1 2 0 5 +9 -6.75 0 1 -6.75 0 5 +12 0 6 0 6 0 5 +28 0 13 0 13 0 5 +9 -10.13 0 0 0 0 5 +2 -0.5 0 1 2 0 5 +25 0 9 0 9 0 5 +6 -7.5 0 1 -7.5 0 5 +4 -3 0 1 4 0 5 +10 -3.75 0 1 10 0 5 +12 -4.5 0 1 -4.5 0 5 +12 -15 0 0 0 0 5 +6 -3 0 1 6 0 5 +9 -14.63 0 0 0 0 5 +5 -1.25 0 1 -1.25 0 5 +8 -11 0 0 0 0 5 +10 -17.5 0 0 0 0 5 +8 -10 0 0 0 0 5 +9 -9 0 0 0 0 5 +10 -11.25 0 0 0 0 5 +12 -12 0 0 0 0 5 +8 -14 0 0 0 0 5 +12 -16.5 0 0 0 0 5 +4 -7 0 0 0 0 5 +4 -1 0 1 -1 0 5 +5 -1.88 0 1 5 0 5 +8 0 3 0 3 0 5 +2 -3.25 0 1 -3.25 0 5 +5 -5 0 1 -5 0 5 +26 0 10 0 10 0 5 +12 -10.5 0 0 0 0 5 +2 0 1 0 1 0 5 +6 -9.75 0 0 0 0 5 +8 -3 0 1 8 0 5 +13 0 5 1 13 0 5 +10 -7.5 0 0 0 0 5 +8 -13 0 0 0 0 5 +9 -3.38 0 1 -3.38 0 5 +8 -15 0 0 0 0 5 +30 0 12 0 12 0 5 +8 -8 0 0 0 0 5 +8 -5 0 1 8 0 5 +12 -18 0 0 0 0 5 +10 -5 0 1 -5 0 5 +9 -11.25 0 1 9 0 5 +9 -7.88 0 0 0 0 5 +8 -6 0 1 -6 0 5 +6 -4.5 0 1 6 0 5 +8 -9 0 0 0 0 5 +4 -5.5 0 0 0 0 5 +4 -5 0 1 4 0 5 +9 -2.25 0 1 -2.25 0 5 +9 -5.63 0 1 -5.63 0 5 +4 -4.5 0 0 0 0 5 +4 -8 0 0 0 0 5 +19 0 8 0 8 0 5 +2 -2 0 1 2 0 5 +5 -8.13 0 1 5 0 5 +5 -4.38 0 1 -4.38 0 5 +2 -2.25 0 0 0 0 5 +2 -0.75 0 1 -0.75 0 5 +2 -2.75 0 0 0 0 5 +5 -8.75 0 0 0 0 5 +9 -18 0 0 0 0 5 +4 -3.5 0 1 4 0 5 +4 -2.5 0 1 -2.5 0 5 +9 -6.75 0 1 -6.75 0 6 +6 -6.75 0 1 -6.75 0 6 +6 -3 0 1 6 0 6 +2 -1.5 0 1 2 0 6 +4 -3 0 1 4 0 6 +5 -6.88 0 0 0 0 6 +12 -9 0 0 0 0 6 +4 -5 0 0 0 0 6 +5 -7.5 0 0 0 0 6 +4 -4 0 1 -4 0 6 +9 -5.63 0 1 -5.63 0 6 +9 -14.63 0 0 0 0 6 +5 -9.38 0 0 0 0 6 +6 -4.5 0 1 6 0 6 +8 -7 0 1 -7 0 6 +10 -16.25 0 0 0 0 6 +10 -17.5 0 0 0 0 6 +9 -16.88 0 0 0 0 6 +8 -5 0 1 8 0 6 +6 -1.5 0 1 6 0 6 +12 -18 0 0 0 0 6 +5 -6.25 0 0 0 0 6 +8 -4 0 1 8 0 6 +9 -15.75 0 0 0 0 6 +9 -13.5 0 0 0 0 6 +5 -8.13 0 0 0 0 6 +2 0 1 1 0 0 6 +2 -3.75 0 1 -3.75 0 6 +4 -6.5 0 0 0 0 6 +10 -5 0 1 -5 0 6 +12 -22.5 0 0 0 0 6 +2 -1 0 1 2 0 6 +13 0 6 0 6 0 6 +5 -2.5 0 1 5 0 6 +2 -0.5 0 1 2 0 6 +2 -3.25 0 0 0 0 6 +30 0 12 1 0 0 6 +8 -8 0 0 0 0 6 +4 -5.5 0 0 0 0 6 +23 0 10 1 0 0 6 +4 -3.5 0 1 4 0 6 +5 0 2 1 5 0 6 +8 0 3 1 0 0 6 +9 -10.13 0 0 0 0 6 +8 -16 0 0 0 0 6 +12 -24 0 0 0 0 6 +9 -3.38 0 1 -3.38 0 6 +6 -5.25 0 0 0 0 6 +2 -4 0 0 0 0 6 +4 -1 0 1 -1 0 6 +6 -11.25 0 0 0 0 6 +5 -4.38 0 1 -4.38 0 6 +6 -2.25 0 1 6 0 6 +12 -10.5 0 0 0 0 6 +9 -18 0 0 0 0 6 +10 -20 0 0 0 0 6 +4 -4.5 0 1 -4.5 0 6 +9 -2.25 0 1 -2.25 0 6 +4 -6 0 0 0 0 6 +8 -10 0 0 0 0 6 +5 -5 0 1 -5 0 6 +5 -8.75 0 0 0 0 6 +8 -6 0 1 -6 0 6 +10 -13.75 0 0 0 0 6 +2 -2.5 0 1 2 0 6 +8 -11 0 0 0 0 6 +4 -2 0 1 4 0 6 +10 -7.5 0 1 -7.5 0 6 +22 0 10 0 10 0 6 +25 0 10 1 0 0 6 +6 -9.75 0 0 0 0 6 +12 0 5 0 5 0 6 +4 -2.5 0 1 -2.5 0 6 +8 -3 0 1 8 0 6 +10 -11.25 0 0 0 0 6 +5 -10 0 0 0 0 6 +10 -15 0 0 0 0 6 +2 -3.5 0 1 -3.5 0 6 +12 0 4 1 12 0 6 +13 0 5 0 5 0 6 +5 -3.75 0 1 5 0 6 +26 0 12 1 26 0 6 +5 -5.63 0 1 -5.63 0 6 +8 -2 0 1 -2 0 6 +2 -3 0 1 -3 0 6 +6 -9 0 0 0 0 6 +9 -7.88 0 1 -7.88 0 6 +8 -14 0 0 0 0 6 +28 0 13 0 13 0 6 +9 -12.38 0 0 0 0 6 +8 -15 0 0 0 0 6 +10 -2.5 0 1 -2.5 0 6 +4 0 2 1 4 0 6 +12 -6 0 1 -6 0 6 +12 -16.5 0 0 0 0 6 +4 -7.5 0 0 0 0 6 +10 -8.75 0 1 -8.75 0 6 +10 -18.75 0 0 0 0 6 +26 0 10 1 0 0 6 +12 -21 0 0 0 0 6 +2 -0.75 0 1 -0.75 0 6 +9 -9 0 1 -9 0 6 +10 -6.25 0 1 10 0 6 +8 -12 0 0 0 0 6 +3 0 1 1 0 0 6 +5 -1.88 0 1 5 0 6 +6 -7.5 0 0 0 0 6 +12 -13.5 0 1 12 0 6 +4 -7 0 0 0 0 6 +6 -8.25 0 0 0 0 6 +6 -12 0 0 0 0 6 +6 -10.5 0 0 0 0 6 +4 -8 0 0 0 0 6 +6 -6 0 1 -6 0 6 +12 0 6 0 6 0 6 +12 -19.5 0 0 0 0 6 +19 0 8 1 19 0 6 +12 -15 0 0 0 0 6 +2 -1.75 0 1 2 0 6 +6 -3.75 0 1 -3.75 0 6 +2 -1.25 0 1 2 0 6 +5 -1.25 0 1 -1.25 0 6 +4 -1.5 0 1 4 0 6 +8 -13 0 0 0 0 6 +12 -7.5 0 1 -7.5 0 6 +12 -3 0 1 -3 0 6 +2 -2.75 0 1 2 0 6 +7 0 3 1 7 0 6 +25 0 9 1 25 0 6 +2 -2 0 1 2 0 6 +12 -4.5 0 1 -4.5 0 6 +12 -12 0 0 0 0 6 +5 -3.13 0 1 5 0 6 +9 -11.25 0 0 0 0 6 +8 -9 0 0 0 0 6 +2 -2.25 0 1 2 0 6 +9 -4.5 0 1 -4.5 0 6 +10 -3.75 0 1 10 0 6 +10 -10 0 0 0 0 6 +10 -12.5 0 0 0 0 6 +2 -2.5 0 1 2 0 7 +5 -5.63 0 0 0 0 7 +6 -7.5 0 0 0 0 7 +26 0 10 1 0 0 7 +9 -4.5 0 1 -4.5 0 7 +2 -1.25 0 1 2 0 7 +8 -3 0 1 8 0 7 +25 0 9 1 25 0 7 +4 -4.5 0 1 -4.5 0 7 +5 -10 0 0 0 0 7 +6 -9 0 0 0 0 7 +10 -6.25 0 0 0 0 7 +4 -4 0 1 -4 0 7 +12 -3 0 1 -3 0 7 +5 -5 0 0 0 0 7 +12 0 5 1 12 0 7 +6 -9.75 0 0 0 0 7 +19 0 8 1 19 0 7 +4 -7.5 0 0 0 0 7 +12 -9 0 0 0 0 7 +4 -6.5 0 0 0 0 7 +9 -5.63 0 1 -5.63 0 7 +9 -18 0 0 0 0 7 +10 -11.25 0 0 0 0 7 +10 -13.75 0 0 0 0 7 +6 -12 0 0 0 0 7 +10 -12.5 0 0 0 0 7 +4 -7 0 0 0 0 7 +10 -7.5 0 0 0 0 7 +4 -8 0 0 0 0 7 +8 -11 0 0 0 0 7 +12 0 4 1 12 0 7 +9 -3.38 0 1 -3.38 0 7 +10 -18.75 0 0 0 0 7 +2 -3.5 0 0 0 0 7 +2 -1 0 1 2 0 7 +2 -3.25 0 0 0 0 7 +2 0 1 1 0 0 7 +7 0 3 1 7 0 7 +8 0 3 1 0 0 7 +12 -6 0 1 -6 0 7 +2 -0.5 0 1 2 0 7 +9 -7.88 0 0 0 0 7 +8 -15 0 0 0 0 7 +2 -1.5 0 1 2 0 7 +12 -22.5 0 0 0 0 7 +8 -7 0 1 -7 0 7 +4 -5.5 0 0 0 0 7 +10 -8.75 0 0 0 0 7 +8 -9 0 0 0 0 7 +2 -4 0 0 0 0 7 +4 0 2 1 4 0 7 +8 -8 0 0 0 0 7 +9 -13.5 0 0 0 0 7 +9 -9 0 0 0 0 7 +6 -3.75 0 1 -3.75 0 7 +13 0 6 0 6 0 7 +5 -1.88 0 1 5 0 7 +6 -6 0 0 0 0 7 +5 -6.88 0 0 0 0 7 +8 -16 0 0 0 0 7 +12 -7.5 0 1 -7.5 0 7 +5 -1.25 0 1 -1.25 0 7 +9 -14.63 0 0 0 0 7 +8 -4 0 1 8 0 7 +10 -17.5 0 0 0 0 7 +5 -3.75 0 1 5 0 7 +6 -10.5 0 0 0 0 7 +13 0 5 1 13 0 7 +10 -16.25 0 0 0 0 7 +5 -7.5 0 0 0 0 7 +2 -1.75 0 1 2 0 7 +5 -9.38 0 0 0 0 7 +2 -2.75 0 0 0 0 7 +2 -0.75 0 1 -0.75 0 7 +5 -8.13 0 0 0 0 7 +9 -11.25 0 0 0 0 7 +8 -13 0 0 0 0 7 +9 -16.88 0 0 0 0 7 +2 -2 0 0 0 0 7 +12 -18 0 0 0 0 7 +8 -2 0 1 -2 0 7 +2 -3 0 0 0 0 7 +6 -4.5 0 1 6 0 7 +5 0 2 1 5 0 7 +12 -19.5 0 0 0 0 7 +9 -15.75 0 0 0 0 7 +8 -6 0 0 0 0 7 +10 -2.5 0 1 -2.5 0 7 +9 -6.75 0 0 0 0 7 +6 -6.75 0 0 0 0 7 +2 -3.75 0 0 0 0 7 +10 -5 0 1 -5 0 7 +2 -2.25 0 0 0 0 7 +26 0 12 1 26 0 7 +12 -13.5 0 0 0 0 7 +8 -5 0 0 0 0 7 +6 -3 0 1 6 0 7 +10 -3.75 0 1 10 0 7 +12 -10.5 0 0 0 0 7 +4 -5 0 0 0 0 7 +9 -2.25 0 1 -2.25 0 7 +4 -3 0 0 0 0 7 +9 -10.13 0 0 0 0 7 +28 0 13 0 13 0 7 +22 0 10 1 22 0 7 +10 -10 0 0 0 0 7 +4 -1 0 1 -1 0 7 +4 -2.5 0 0 0 0 7 +12 -24 0 0 0 0 7 +8 -12 0 0 0 0 7 +3 0 1 1 0 0 7 +9 -12.38 0 0 0 0 7 +23 0 10 1 0 0 7 +4 -3.5 0 0 0 0 7 +4 -1.5 0 1 4 0 7 +8 -10 0 0 0 0 7 +8 -14 0 0 0 0 7 +4 -6 0 0 0 0 7 +25 0 10 1 0 0 7 +12 -16.5 0 0 0 0 7 +12 -12 0 0 0 0 7 +5 -2.5 0 1 5 0 7 +5 -8.75 0 0 0 0 7 +12 -4.5 0 1 -4.5 0 7 +12 -15 0 0 0 0 7 +5 -3.13 0 0 0 0 7 +12 -21 0 1 12 0 7 +5 -4.38 0 0 0 0 7 +6 -11.25 0 0 0 0 7 +30 0 12 1 0 0 7 +6 -1.5 0 1 6 0 7 +12 0 6 1 12 0 7 +4 -2 0 1 4 0 7 +10 -15 0 0 0 0 7 +6 -2.25 0 1 6 0 7 +10 -20 0 0 0 0 7 +6 -5.25 0 0 0 0 7 +5 -6.25 0 0 0 0 7 +6 -8.25 0 0 0 0 7 +4 -4.5 0 1 -4.5 0 8 +10 -12.5 0 0 0 0 8 +26 0 12 0 12 0 8 +6 -7.5 0 0 0 0 8 +4 -6.5 0 0 0 0 8 +12 -4.5 0 1 -4.5 0 8 +5 -2.5 0 1 5 0 8 +6 -12 0 0 0 0 8 +9 -14.63 0 0 0 0 8 +6 -6 0 1 -6 0 8 +22 0 10 0 10 0 8 +2 -1 0 1 2 0 8 +8 -3 0 1 8 0 8 +12 -9 0 1 12 0 8 +5 -3.75 0 1 5 0 8 +6 -3 0 1 6 0 8 +4 0 2 0 2 0 8 +28 0 13 1 28 0 8 +12 -15 0 0 0 0 8 +9 -11.25 0 0 0 0 8 +12 -10.5 0 1 12 0 8 +5 -1.88 0 1 5 0 8 +2 -2.75 0 0 0 0 8 +4 -7 0 0 0 0 8 +8 -4 0 1 8 0 8 +2 0 1 0 1 0 8 +2 -3.5 0 0 0 0 8 +2 -1.75 0 1 2 0 8 +5 -5 0 1 -5 0 8 +12 -12 0 0 0 0 8 +12 0 6 0 6 0 8 +6 -4.5 0 1 6 0 8 +30 0 12 1 0 0 8 +12 -16.5 0 0 0 0 8 +6 -9.75 0 0 0 0 8 +12 -22.5 0 0 0 0 8 +6 -9 0 0 0 0 8 +5 -3.13 0 1 5 0 8 +5 -9.38 0 0 0 0 8 +12 -7.5 0 1 -7.5 0 8 +5 0 2 1 5 0 8 +10 -15 0 0 0 0 8 +12 -3 0 1 -3 0 8 +13 0 6 0 6 0 8 +9 -16.88 0 0 0 0 8 +6 -11.25 0 0 0 0 8 +8 -5 0 1 8 0 8 +8 -14 0 0 0 0 8 +12 -24 0 0 0 0 8 +12 0 5 0 5 0 8 +9 -13.5 0 0 0 0 8 +6 -1.5 0 1 6 0 8 +2 -3 0 0 0 0 8 +10 -2.5 0 1 -2.5 0 8 +2 -0.75 0 1 -0.75 0 8 +6 -10.5 0 0 0 0 8 +2 -0.5 0 1 2 0 8 +10 -10 0 0 0 0 8 +8 -10 0 0 0 0 8 +9 -12.38 0 0 0 0 8 +4 -6 0 0 0 0 8 +6 -2.25 0 1 6 0 8 +9 -15.75 0 0 0 0 8 +12 -13.5 0 0 0 0 8 +8 -6 0 1 -6 0 8 +10 -18.75 0 0 0 0 8 +4 -2 0 1 4 0 8 +5 -1.25 0 1 -1.25 0 8 +6 -5.25 0 1 6 0 8 +4 -8 0 0 0 0 8 +25 0 9 1 25 0 8 +2 -3.25 0 0 0 0 8 +10 -11.25 0 0 0 0 8 +4 -7.5 0 0 0 0 8 +9 -5.63 0 1 -5.63 0 8 +6 -6.75 0 1 -6.75 0 8 +8 -2 0 1 -2 0 8 +5 -6.25 0 0 0 0 8 +23 0 10 0 10 0 8 +8 -13 0 0 0 0 8 +10 -13.75 0 0 0 0 8 +5 -10 0 0 0 0 8 +12 0 4 1 12 0 8 +2 -2.5 0 1 2 0 8 +19 0 8 1 19 0 8 +4 -4 0 0 0 0 8 +4 -1 0 1 -1 0 8 +4 -2.5 0 1 -2.5 0 8 +5 -8.13 0 0 0 0 8 +10 -3.75 0 1 10 0 8 +5 -8.75 0 0 0 0 8 +10 -7.5 0 1 -7.5 0 8 +10 -5 0 1 -5 0 8 +10 -20 0 0 0 0 8 +13 0 5 0 5 0 8 +8 -9 0 0 0 0 8 +8 -12 0 0 0 0 8 +10 -16.25 0 0 0 0 8 +5 -6.88 0 0 0 0 8 +4 -5.5 0 0 0 0 8 +5 -7.5 0 0 0 0 8 +9 -10.13 0 0 0 0 8 +6 -8.25 0 0 0 0 8 +26 0 10 0 10 0 8 +4 -5 0 1 4 0 8 +2 -2.25 0 1 2 0 8 +6 -3.75 0 1 -3.75 0 8 +8 -8 0 1 8 0 8 +9 -6.75 0 1 -6.75 0 8 +8 -15 0 0 0 0 8 +12 -6 0 1 -6 0 8 +25 0 10 1 0 0 8 +12 -19.5 0 0 0 0 8 +9 -7.88 0 1 -7.88 0 8 +4 -1.5 0 1 4 0 8 +8 -7 0 1 -7 0 8 +12 -18 0 0 0 0 8 +2 -2 0 1 2 0 8 +9 -18 0 0 0 0 8 +2 -1.25 0 1 2 0 8 +8 -16 0 0 0 0 8 +5 -4.38 0 1 -4.38 0 8 +2 -4 0 0 0 0 8 +5 -5.63 0 1 -5.63 0 8 +8 0 3 0 3 0 8 +10 -17.5 0 0 0 0 8 +8 -11 0 0 0 0 8 +2 -1.5 0 1 2 0 8 +4 -3.5 0 1 4 0 8 +2 -3.75 0 0 0 0 8 +3 0 1 1 0 0 8 +12 -21 0 0 0 0 8 +10 -8.75 0 1 -8.75 0 8 +9 -9 0 1 -9 0 8 +4 -3 0 1 4 0 8 +7 0 3 0 3 0 8 +9 -3.38 0 1 -3.38 0 8 +9 -2.25 0 1 -2.25 0 8 +10 -6.25 0 1 10 0 8 +9 -4.5 0 1 -4.5 0 8 +2 -1 0 1 2 0 9 +9 -13.5 0 0 0 0 9 +5 -6.88 0 0 0 0 9 +10 -10 0 0 0 0 9 +6 -2.25 0 1 6 0 9 +6 -6.75 0 1 -6.75 0 9 +9 -4.5 0 1 -4.5 0 9 +10 -13.75 0 0 0 0 9 +6 -8.25 0 0 0 0 9 +5 -10 0 0 0 0 9 +10 -6.25 0 1 10 0 9 +12 -3 0 1 -3 0 9 +12 -9 0 1 12 0 9 +8 -7 0 1 -7 0 9 +6 -12 0 0 0 0 9 +8 -2 0 1 -2 0 9 +12 -6 0 1 -6 0 9 +3 0 1 1 0 0 9 +10 -20 0 0 0 0 9 +5 -3.75 0 1 5 0 9 +2 -1.75 0 1 2 0 9 +6 -3.75 0 1 -3.75 0 9 +9 -12.38 0 0 0 0 9 +5 -6.25 0 0 0 0 9 +12 0 4 1 12 0 9 +2 -1.5 0 1 2 0 9 +6 -5.25 0 1 6 0 9 +10 -18.75 0 0 0 0 9 +6 -6 0 1 -6 0 9 +12 0 5 1 12 0 9 +4 -2 0 1 4 0 9 +2 -4 0 0 0 0 9 +5 -2.5 0 1 5 0 9 +9 -15.75 0 0 0 0 9 +8 -4 0 1 8 0 9 +26 0 12 1 26 0 9 +6 -1.5 0 1 6 0 9 +4 -6 0 1 4 0 9 +10 -2.5 0 1 -2.5 0 9 +8 -12 0 0 0 0 9 +2 -3.5 0 0 0 0 9 +5 -5.63 0 0 0 0 9 +12 -24 0 0 0 0 9 +25 0 10 1 0 0 9 +4 -6.5 0 0 0 0 9 +5 -9.38 0 0 0 0 9 +5 -7.5 0 0 0 0 9 +4 -4 0 1 -4 0 9 +6 -10.5 0 0 0 0 9 +13 0 6 1 13 0 9 +12 -22.5 0 0 0 0 9 +4 -7.5 0 0 0 0 9 +5 0 2 1 5 0 9 +10 -15 0 0 0 0 9 +9 -16.88 0 0 0 0 9 +2 -2.5 0 0 0 0 9 +10 -16.25 0 0 0 0 9 +6 -11.25 0 0 0 0 9 +4 -1.5 0 1 4 0 9 +5 -3.13 0 1 5 0 9 +6 -9 0 0 0 0 9 +12 -19.5 0 0 0 0 9 +10 -12.5 0 0 0 0 9 +2 -3 0 1 -3 0 9 +8 -16 0 0 0 0 9 +4 0 2 1 4 0 9 +12 -7.5 0 1 -7.5 0 9 +12 -13.5 0 0 0 0 9 +22 0 10 1 22 0 9 +12 -21 0 0 0 0 9 +7 0 3 1 7 0 9 +10 -8.75 0 0 0 0 9 +2 -1.25 0 1 2 0 9 +9 -6.75 0 1 -6.75 0 9 +12 0 6 1 12 0 9 +28 0 13 1 28 0 9 +9 -10.13 0 0 0 0 9 +2 -0.5 0 1 2 0 9 +25 0 9 1 25 0 9 +6 -7.5 0 1 -7.5 0 9 +4 -3 0 1 4 0 9 +10 -3.75 0 1 10 0 9 +12 -4.5 0 1 -4.5 0 9 +12 -15 0 0 0 0 9 +6 -3 0 1 6 0 9 +9 -14.63 0 0 0 0 9 +5 -1.25 0 1 -1.25 0 9 +8 -11 0 0 0 0 9 +10 -17.5 0 0 0 0 9 +8 -10 0 0 0 0 9 +9 -9 0 0 0 0 9 +10 -11.25 0 0 0 0 9 +12 -12 0 0 0 0 9 +8 -14 0 0 0 0 9 +12 -16.5 0 0 0 0 9 +4 -7 0 0 0 0 9 +4 -1 0 1 -1 0 9 +5 -1.88 0 1 5 0 9 +8 0 3 1 0 0 9 +2 -3.25 0 1 -3.25 0 9 +5 -5 0 1 -5 0 9 +26 0 10 1 0 0 9 +12 -10.5 0 0 0 0 9 +2 0 1 1 0 0 9 +6 -9.75 0 0 0 0 9 +8 -3 0 1 8 0 9 +13 0 5 1 13 0 9 +10 -7.5 0 1 -7.5 0 9 +8 -13 0 0 0 0 9 +9 -3.38 0 1 -3.38 0 9 +8 -15 0 0 0 0 9 +30 0 12 1 0 0 9 +8 -8 0 1 8 0 9 +8 -5 0 1 8 0 9 +12 -18 0 0 0 0 9 +10 -5 0 1 -5 0 9 +9 -11.25 0 0 0 0 9 +9 -7.88 0 1 -7.88 0 9 +8 -6 0 1 -6 0 9 +6 -4.5 0 1 6 0 9 +8 -9 0 0 0 0 9 +4 -5.5 0 1 -5.5 0 9 +4 -5 0 1 4 0 9 +9 -2.25 0 1 -2.25 0 9 +23 0 10 1 0 0 9 +9 -5.63 0 1 -5.63 0 9 +4 -4.5 0 1 -4.5 0 9 +4 -8 0 0 0 0 9 +19 0 8 1 19 0 9 +2 -2 0 1 2 0 9 +5 -8.13 0 0 0 0 9 +5 -4.38 0 1 -4.38 0 9 +2 -2.25 0 1 2 0 9 +2 -0.75 0 1 -0.75 0 9 +2 -2.75 0 0 0 0 9 +5 -8.75 0 0 0 0 9 +9 -18 0 0 0 0 9 +4 -3.5 0 1 4 0 9 +4 -2.5 0 1 -2.5 0 9 +4 -4.5 0 0 0 0 10 +10 -12.5 0 1 -12.5 0 10 +26 0 12 1 26 0 10 +6 -7.5 0 1 -7.5 0 10 +4 -6.5 0 1 4 0 10 +12 -4.5 0 1 -4.5 0 10 +5 -2.5 0 1 5 0 10 +6 -12 0 0 0 0 10 +9 -14.63 0 1 9 0 10 +6 -6 0 0 0 0 10 +22 0 10 0 10 0 10 +2 -1 0 1 2 0 10 +8 -3 0 1 8 0 10 +12 -9 0 1 12 0 10 +5 -3.75 0 1 5 0 10 +6 -3 0 1 6 0 10 +4 0 2 0 2 0 10 +28 0 13 0 13 0 10 +12 -15 0 1 -15 0 10 +9 -11.25 0 1 9 0 10 +12 -10.5 0 1 12 0 10 +5 -1.88 0 1 5 0 10 +2 -2.75 0 1 2 0 10 +4 -7 0 1 -7 0 10 +8 -4 0 1 8 0 10 +2 0 1 0 1 0 10 +2 -3.5 0 1 -3.5 0 10 +2 -1.75 0 1 2 0 10 +5 -5 0 1 -5 0 10 +12 -12 0 1 12 0 10 +12 0 6 0 6 0 10 +6 -4.5 0 1 6 0 10 +30 0 12 0 12 0 10 +12 -16.5 0 1 -16.5 0 10 +6 -9.75 0 1 6 0 10 +12 -22.5 0 0 0 0 10 +6 -9 0 1 -9 0 10 +5 -3.13 0 1 5 0 10 +5 -9.38 0 0 0 0 10 +12 -7.5 0 1 -7.5 0 10 +5 0 2 0 2 0 10 +10 -15 0 1 -15 0 10 +12 -3 0 1 -3 0 10 +13 0 6 1 13 0 10 +9 -16.88 0 1 9 0 10 +6 -11.25 0 1 6 0 10 +8 -5 0 1 8 0 10 +8 -14 0 1 8 0 10 +12 -24 0 1 -24 0 10 +12 0 5 1 12 0 10 +9 -13.5 0 1 9 0 10 +6 -1.5 0 1 6 0 10 +2 -3 0 1 -3 0 10 +10 -2.5 0 1 -2.5 0 10 +2 -0.75 0 1 -0.75 0 10 +6 -10.5 0 1 -10.5 0 10 +2 -0.5 0 1 2 0 10 +10 -10 0 1 10 0 10 +8 -10 0 1 -10 0 10 +9 -12.38 0 1 -12.38 0 10 +4 -6 0 1 4 0 10 +6 -2.25 0 1 6 0 10 +9 -15.75 0 1 -15.75 0 10 +12 -13.5 0 1 12 0 10 +8 -6 0 1 -6 0 10 +10 -18.75 0 1 10 0 10 +4 -2 0 1 4 0 10 +5 -1.25 0 1 -1.25 0 10 +6 -5.25 0 1 6 0 10 +4 -8 0 1 4 0 10 +25 0 9 0 9 0 10 +2 -3.25 0 0 0 0 10 +10 -11.25 0 1 -11.25 0 10 +4 -7.5 0 1 -7.5 0 10 +9 -5.63 0 1 -5.63 0 10 +6 -6.75 0 1 -6.75 0 10 +8 -2 0 1 -2 0 10 +5 -6.25 0 1 5 0 10 +23 0 10 1 0 0 10 +8 -13 0 1 -13 0 10 +10 -13.75 0 1 -13.75 0 10 +5 -10 0 1 5 0 10 +12 0 4 1 12 0 10 +2 -2.5 0 1 2 0 10 +19 0 8 1 19 0 10 +4 -4 0 1 -4 0 10 +4 -1 0 1 -1 0 10 +4 -2.5 0 1 -2.5 0 10 +5 -8.13 0 1 5 0 10 +10 -3.75 0 1 10 0 10 +5 -8.75 0 1 5 0 10 +10 -7.5 0 1 -7.5 0 10 +10 -5 0 1 -5 0 10 +10 -20 0 1 10 0 10 +13 0 5 1 13 0 10 +8 -9 0 1 -9 0 10 +8 -12 0 0 0 0 10 +10 -16.25 0 0 0 0 10 +5 -6.88 0 1 5 0 10 +4 -5.5 0 1 -5.5 0 10 +5 -7.5 0 0 0 0 10 +9 -10.13 0 0 0 0 10 +6 -8.25 0 1 -8.25 0 10 +26 0 10 1 0 0 10 +4 -5 0 1 4 0 10 +2 -2.25 0 1 2 0 10 +6 -3.75 0 1 -3.75 0 10 +9 -6.75 0 1 -6.75 0 10 +8 -15 0 0 0 0 10 +12 -6 0 1 -6 0 10 +25 0 10 1 0 0 10 +12 -19.5 0 0 0 0 10 +9 -7.88 0 1 -7.88 0 10 +4 -1.5 0 1 4 0 10 +8 -7 0 1 -7 0 10 +12 -18 0 1 -18 0 10 +2 -2 0 1 2 0 10 +9 -18 0 1 -18 0 10 +2 -1.25 0 1 2 0 10 +8 -16 0 1 -16 0 10 +5 -4.38 0 1 -4.38 0 10 +2 -4 0 1 2 0 10 +5 -5.63 0 1 -5.63 0 10 +8 0 3 1 0 0 10 +10 -17.5 0 1 -17.5 0 10 +8 -11 0 1 -11 0 10 +2 -1.5 0 1 2 0 10 +4 -3.5 0 1 4 0 10 +2 -3.75 0 1 -3.75 0 10 +3 0 1 1 0 0 10 +12 -21 0 1 12 0 10 +10 -8.75 0 1 -8.75 0 10 +9 -9 0 1 -9 0 10 +4 -3 0 1 4 0 10 +7 0 3 1 7 0 10 +9 -3.38 0 1 -3.38 0 10 +9 -2.25 0 1 -2.25 0 10 +10 -6.25 0 1 10 0 10 +9 -4.5 0 1 -4.5 0 10 +2 -1 0 1 2 0 11 +9 -13.5 0 0 0 0 11 +5 -6.88 0 0 0 0 11 +10 -10 0 1 10 0 11 +6 -2.25 0 1 6 0 11 +6 -6.75 0 0 0 0 11 +9 -4.5 0 1 -4.5 0 11 +10 -13.75 0 0 0 0 11 +6 -8.25 0 0 0 0 11 +5 -10 0 0 0 0 11 +10 -6.25 0 1 10 0 11 +12 -3 0 1 -3 0 11 +12 -9 0 1 12 0 11 +8 -7 0 0 0 0 11 +6 -12 0 0 0 0 11 +8 -2 0 1 -2 0 11 +12 -6 0 1 -6 0 11 +3 0 1 1 0 0 11 +10 -20 0 0 0 0 11 +5 -3.75 0 1 5 0 11 +2 -1.75 0 1 2 0 11 +6 -3.75 0 1 -3.75 0 11 +9 -12.38 0 0 0 0 11 +5 -6.25 0 1 5 0 11 +12 0 4 0 4 0 11 +2 -1.5 0 1 2 0 11 +6 -5.25 0 0 0 0 11 +10 -18.75 0 0 0 0 11 +6 -6 0 1 -6 0 11 +12 0 5 0 5 0 11 +4 -2 0 1 4 0 11 +2 -4 0 0 0 0 11 +5 -2.5 0 1 5 0 11 +2 -3.75 0 0 0 0 11 +9 -15.75 0 0 0 0 11 +8 -4 0 1 8 0 11 +26 0 12 0 12 0 11 +6 -1.5 0 1 6 0 11 +4 -6 0 0 0 0 11 +10 -2.5 0 1 -2.5 0 11 +8 -12 0 0 0 0 11 +2 -3.5 0 0 0 0 11 +5 -5.63 0 0 0 0 11 +12 -24 0 0 0 0 11 +25 0 10 1 0 0 11 +4 -6.5 0 0 0 0 11 +5 -9.38 0 0 0 0 11 +5 -7.5 0 0 0 0 11 +4 -4 0 1 -4 0 11 +6 -10.5 0 0 0 0 11 +13 0 6 0 6 0 11 +12 -22.5 0 0 0 0 11 +4 -7.5 0 0 0 0 11 +10 -15 0 0 0 0 11 +9 -16.88 0 0 0 0 11 +2 -2.5 0 0 0 0 11 +10 -16.25 0 0 0 0 11 +6 -11.25 0 0 0 0 11 +4 -1.5 0 1 4 0 11 +5 -3.13 0 1 5 0 11 +6 -9 0 0 0 0 11 +12 -19.5 0 0 0 0 11 +10 -12.5 0 0 0 0 11 +2 -3 0 1 -3 0 11 +8 -16 0 0 0 0 11 +4 0 2 0 2 0 11 +12 -7.5 0 1 -7.5 0 11 +12 -13.5 0 0 0 0 11 +22 0 10 0 10 0 11 +12 -21 0 0 0 0 11 +7 0 3 0 3 0 11 +10 -8.75 0 1 -8.75 0 11 +2 -1.25 0 1 2 0 11 +9 -6.75 0 1 -6.75 0 11 +12 0 6 1 12 0 11 +28 0 13 0 13 0 11 +9 -10.13 0 0 0 0 11 +2 -0.5 0 1 2 0 11 +25 0 9 1 25 0 11 +6 -7.5 0 0 0 0 11 +4 -3 0 1 4 0 11 +10 -3.75 0 1 10 0 11 +12 -4.5 0 1 -4.5 0 11 +12 -15 0 0 0 0 11 +6 -3 0 1 6 0 11 +9 -14.63 0 0 0 0 11 +5 -1.25 0 1 -1.25 0 11 +8 -11 0 1 -11 0 11 +10 -17.5 0 0 0 0 11 +8 -10 0 0 0 0 11 +9 -9 0 1 -9 0 11 +10 -11.25 0 0 0 0 11 +12 -12 0 0 0 0 11 +8 -14 0 0 0 0 11 +12 -16.5 0 0 0 0 11 +4 -7 0 0 0 0 11 +4 -1 0 1 -1 0 11 +5 -1.88 0 1 5 0 11 +8 0 3 1 0 0 11 +2 -3.25 0 0 0 0 11 +5 -5 0 1 -5 0 11 +26 0 10 0 10 0 11 +12 -10.5 0 1 12 0 11 +2 0 1 0 1 0 11 +6 -9.75 0 0 0 0 11 +8 -3 0 1 8 0 11 +13 0 5 1 13 0 11 +10 -7.5 0 1 -7.5 0 11 +8 -13 0 0 0 0 11 +9 -3.38 0 1 -3.38 0 11 +8 -15 0 0 0 0 11 +30 0 12 1 0 0 11 +8 -8 0 1 8 0 11 +8 -5 0 1 8 0 11 +12 -18 0 0 0 0 11 +10 -5 0 1 -5 0 11 +9 -11.25 0 0 0 0 11 +9 -7.88 0 1 -7.88 0 11 +8 -6 0 1 -6 0 11 +6 -4.5 0 1 6 0 11 +8 -9 0 0 0 0 11 +4 -5.5 0 0 0 0 11 +4 -5 0 0 0 0 11 +9 -2.25 0 1 -2.25 0 11 +23 0 10 0 10 0 11 +9 -5.63 0 1 -5.63 0 11 +4 -4.5 0 0 0 0 11 +4 -8 0 0 0 0 11 +19 0 8 1 19 0 11 +2 -2 0 1 2 0 11 +5 -8.13 0 0 0 0 11 +5 -4.38 0 1 -4.38 0 11 +2 -2.25 0 0 0 0 11 +2 -0.75 0 1 -0.75 0 11 +2 -2.75 0 0 0 0 11 +5 -8.75 0 0 0 0 11 +9 -18 0 0 0 0 11 +4 -3.5 0 1 4 0 11 +4 -2.5 0 1 -2.5 0 11 +9 -6.75 0 0 0 0 12 +6 -6.75 0 0 0 0 12 +6 -3 0 1 6 0 12 +2 -1.5 0 1 2 0 12 +4 -3 0 0 0 0 12 +5 -6.88 0 0 0 0 12 +12 -9 0 0 0 0 12 +4 -5 0 0 0 0 12 +5 -7.5 0 0 0 0 12 +4 -4 0 0 0 0 12 +9 -5.63 0 1 -5.63 0 12 +9 -14.63 0 1 9 0 12 +5 -9.38 0 0 0 0 12 +6 -4.5 0 0 0 0 12 +8 -7 0 0 0 0 12 +10 -16.25 0 0 0 0 12 +10 -17.5 0 0 0 0 12 +9 -16.88 0 0 0 0 12 +8 -5 0 1 8 0 12 +6 -1.5 0 1 6 0 12 +12 -18 0 0 0 0 12 +5 -6.25 0 0 0 0 12 +8 -4 0 1 8 0 12 +9 -15.75 0 0 0 0 12 +9 -13.5 0 0 0 0 12 +5 -8.13 0 0 0 0 12 +2 0 1 0 1 0 12 +2 -3.75 0 0 0 0 12 +4 -6.5 0 0 0 0 12 +10 -5 0 1 -5 0 12 +12 -22.5 0 0 0 0 12 +2 -1 0 1 2 0 12 +13 0 6 0 6 0 12 +5 -2.5 0 1 5 0 12 +2 -0.5 0 1 2 0 12 +2 -3.25 0 0 0 0 12 +30 0 12 0 12 0 12 +8 -8 0 0 0 0 12 +4 -5.5 0 0 0 0 12 +23 0 10 0 10 0 12 +4 -3.5 0 0 0 0 12 +5 0 2 0 2 0 12 +8 0 3 0 3 0 12 +9 -10.13 0 0 0 0 12 +8 -16 0 0 0 0 12 +12 -24 0 0 0 0 12 +9 -3.38 0 1 -3.38 0 12 +6 -5.25 0 0 0 0 12 +2 -4 0 0 0 0 12 +4 -1 0 1 -1 0 12 +6 -11.25 0 0 0 0 12 +5 -4.38 0 0 0 0 12 +6 -2.25 0 1 6 0 12 +12 -10.5 0 0 0 0 12 +9 -18 0 1 -18 0 12 +10 -20 0 1 10 0 12 +4 -4.5 0 0 0 0 12 +9 -2.25 0 1 -2.25 0 12 +4 -6 0 0 0 0 12 +8 -10 0 0 0 0 12 +5 -5 0 0 0 0 12 +5 -8.75 0 0 0 0 12 +8 -6 0 0 0 0 12 +10 -13.75 0 0 0 0 12 +2 -2.5 0 0 0 0 12 +8 -11 0 0 0 0 12 +4 -2 0 1 4 0 12 +10 -7.5 0 1 -7.5 0 12 +22 0 10 1 22 0 12 +25 0 10 0 10 0 12 +6 -9.75 0 0 0 0 12 +12 0 5 0 5 0 12 +4 -2.5 0 1 -2.5 0 12 +8 -3 0 1 8 0 12 +10 -11.25 0 0 0 0 12 +5 -10 0 1 5 0 12 +10 -15 0 1 -15 0 12 +2 -3.5 0 0 0 0 12 +12 0 4 1 12 0 12 +13 0 5 0 5 0 12 +5 -3.75 0 1 5 0 12 +26 0 12 0 12 0 12 +5 -5.63 0 0 0 0 12 +8 -2 0 1 -2 0 12 +2 -3 0 0 0 0 12 +6 -9 0 0 0 0 12 +9 -7.88 0 0 0 0 12 +8 -14 0 0 0 0 12 +28 0 13 1 28 0 12 +9 -12.38 0 0 0 0 12 +8 -15 0 0 0 0 12 +10 -2.5 0 1 -2.5 0 12 +4 0 2 0 2 0 12 +12 -6 0 1 -6 0 12 +12 -16.5 0 1 -16.5 0 12 +4 -7.5 0 0 0 0 12 +10 -8.75 0 1 -8.75 0 12 +10 -18.75 0 1 10 0 12 +26 0 10 0 10 0 12 +12 -21 0 0 0 0 12 +2 -0.75 0 1 -0.75 0 12 +9 -9 0 0 0 0 12 +10 -6.25 0 0 0 0 12 +8 -12 0 0 0 0 12 +3 0 1 1 0 0 12 +5 -1.88 0 1 5 0 12 +6 -7.5 0 0 0 0 12 +12 -13.5 0 0 0 0 12 +4 -7 0 0 0 0 12 +6 -8.25 0 0 0 0 12 +6 -12 0 0 0 0 12 +6 -10.5 0 0 0 0 12 +4 -8 0 0 0 0 12 +6 -6 0 0 0 0 12 +12 0 6 0 6 0 12 +12 -19.5 0 0 0 0 12 +19 0 8 0 8 0 12 +12 -15 0 0 0 0 12 +2 -1.75 0 0 0 0 12 +6 -3.75 0 1 -3.75 0 12 +2 -1.25 0 0 0 0 12 +5 -1.25 0 1 -1.25 0 12 +4 -1.5 0 1 4 0 12 +8 -13 0 0 0 0 12 +12 -7.5 0 0 0 0 12 +12 -3 0 0 0 0 12 +2 -2.75 0 0 0 0 12 +7 0 3 1 7 0 12 +25 0 9 0 9 0 12 +2 -2 0 0 0 0 12 +12 -4.5 0 1 -4.5 0 12 +12 -12 0 1 12 0 12 +5 -3.13 0 0 0 0 12 +9 -11.25 0 0 0 0 12 +8 -9 0 0 0 0 12 +2 -2.25 0 0 0 0 12 +9 -4.5 0 0 0 0 12 +10 -3.75 0 1 10 0 12 +10 -10 0 0 0 0 12 +10 -12.5 0 0 0 0 12 +2 -2.5 0 0 0 0 13 +5 -5.63 0 0 0 0 13 +6 -7.5 0 0 0 0 13 +26 0 10 1 0 0 13 +9 -4.5 0 1 -4.5 0 13 +2 -1.25 0 1 2 0 13 +8 -3 0 1 8 0 13 +25 0 9 0 9 0 13 +4 -4.5 0 0 0 0 13 +5 -10 0 0 0 0 13 +6 -9 0 0 0 0 13 +10 -6.25 0 1 10 0 13 +4 -4 0 1 -4 0 13 +12 -3 0 1 -3 0 13 +5 -5 0 1 -5 0 13 +12 0 5 0 5 0 13 +6 -9.75 0 0 0 0 13 +19 0 8 0 8 0 13 +4 -7.5 0 0 0 0 13 +12 -9 0 1 12 0 13 +4 -6.5 0 0 0 0 13 +9 -5.63 0 1 -5.63 0 13 +9 -18 0 0 0 0 13 +10 -11.25 0 0 0 0 13 +10 -13.75 0 0 0 0 13 +6 -12 0 0 0 0 13 +10 -12.5 0 0 0 0 13 +4 -7 0 0 0 0 13 +10 -7.5 0 0 0 0 13 +4 -8 0 0 0 0 13 +8 -11 0 0 0 0 13 +12 0 4 0 4 0 13 +9 -3.38 0 1 -3.38 0 13 +10 -18.75 0 0 0 0 13 +2 -3.5 0 0 0 0 13 +2 -1 0 0 0 0 13 +2 -3.25 0 0 0 0 13 +2 0 1 0 1 0 13 +7 0 3 0 3 0 13 +8 0 3 0 3 0 13 +12 -6 0 1 -6 0 13 +2 -0.5 0 1 2 0 13 +9 -7.88 0 0 0 0 13 +8 -15 0 0 0 0 13 +2 -1.5 0 0 0 0 13 +12 -22.5 0 0 0 0 13 +8 -7 0 1 -7 0 13 +4 -5.5 0 0 0 0 13 +10 -8.75 0 0 0 0 13 +8 -9 0 0 0 0 13 +2 -4 0 0 0 0 13 +4 0 2 0 2 0 13 +8 -8 0 0 0 0 13 +9 -13.5 0 0 0 0 13 +9 -9 0 0 0 0 13 +6 -3.75 0 0 0 0 13 +13 0 6 0 6 0 13 +5 -1.88 0 1 5 0 13 +6 -6 0 0 0 0 13 +5 -6.88 0 0 0 0 13 +8 -16 0 0 0 0 13 +12 -7.5 0 0 0 0 13 +5 -1.25 0 0 0 0 13 +9 -14.63 0 0 0 0 13 +8 -4 0 1 8 0 13 +10 -17.5 0 0 0 0 13 +5 -3.75 0 0 0 0 13 +6 -10.5 0 0 0 0 13 +13 0 5 0 5 0 13 +10 -16.25 0 1 10 0 13 +5 -7.5 0 0 0 0 13 +2 -1.75 0 0 0 0 13 +5 -9.38 0 0 0 0 13 +2 -2.75 0 0 0 0 13 +2 -0.75 0 1 -0.75 0 13 +5 -8.13 0 0 0 0 13 +9 -11.25 0 0 0 0 13 +8 -13 0 0 0 0 13 +9 -16.88 0 0 0 0 13 +2 -2 0 0 0 0 13 +12 -18 0 0 0 0 13 +8 -2 0 0 0 0 13 +2 -3 0 0 0 0 13 +6 -4.5 0 1 6 0 13 +5 0 2 0 2 0 13 +12 -19.5 0 0 0 0 13 +9 -15.75 0 0 0 0 13 +8 -6 0 0 0 0 13 +10 -2.5 0 1 -2.5 0 13 +9 -6.75 0 1 -6.75 0 13 +6 -6.75 0 0 0 0 13 +2 -3.75 0 0 0 0 13 +10 -5 0 0 0 0 13 +2 -2.25 0 0 0 0 13 +26 0 12 0 12 0 13 +12 -13.5 0 0 0 0 13 +8 -5 0 0 0 0 13 +6 -3 0 1 6 0 13 +10 -3.75 0 0 0 0 13 +12 -10.5 0 0 0 0 13 +4 -5 0 0 0 0 13 +9 -2.25 0 1 -2.25 0 13 +4 -3 0 0 0 0 13 +9 -10.13 0 0 0 0 13 +28 0 13 0 13 0 13 +22 0 10 0 10 0 13 +10 -10 0 0 0 0 13 +4 -1 0 1 -1 0 13 +4 -2.5 0 0 0 0 13 +12 -24 0 0 0 0 13 +8 -12 0 0 0 0 13 +3 0 1 0 1 0 13 +9 -12.38 0 0 0 0 13 +23 0 10 0 10 0 13 +4 -3.5 0 0 0 0 13 +4 -1.5 0 0 0 0 13 +8 -10 0 0 0 0 13 +8 -14 0 0 0 0 13 +4 -6 0 0 0 0 13 +25 0 10 0 10 0 13 +12 -16.5 0 0 0 0 13 +12 -12 0 0 0 0 13 +5 -2.5 0 0 0 0 13 +5 -8.75 0 0 0 0 13 +12 -4.5 0 0 0 0 13 +12 -15 0 0 0 0 13 +5 -3.13 0 0 0 0 13 +12 -21 0 0 0 0 13 +5 -4.38 0 0 0 0 13 +6 -11.25 0 0 0 0 13 +30 0 12 0 12 0 13 +6 -1.5 0 0 0 0 13 +12 0 6 0 6 0 13 +4 -2 0 0 0 0 13 +10 -15 0 0 0 0 13 +6 -2.25 0 1 6 0 13 +10 -20 0 0 0 0 13 +6 -5.25 0 0 0 0 13 +5 -6.25 0 0 0 0 13 +6 -8.25 0 0 0 0 13 +2 -1 0 1 2 0 14 +9 -13.5 0 0 0 0 14 +5 -6.88 0 1 5 0 14 +10 -10 0 1 10 0 14 +6 -2.25 0 1 6 0 14 +6 -6.75 0 0 0 0 14 +9 -4.5 0 1 -4.5 0 14 +10 -13.75 0 1 -13.75 0 14 +6 -8.25 0 0 0 0 14 +5 -10 0 0 0 0 14 +10 -6.25 0 1 10 0 14 +12 -3 0 1 -3 0 14 +12 -9 0 0 0 0 14 +8 -7 0 1 -7 0 14 +6 -12 0 1 6 0 14 +8 -2 0 1 -2 0 14 +12 -6 0 1 -6 0 14 +3 0 1 0 1 0 14 +10 -20 0 1 10 0 14 +5 -3.75 0 1 5 0 14 +2 -1.75 0 1 2 0 14 +6 -3.75 0 1 -3.75 0 14 +9 -12.38 0 0 0 0 14 +5 -6.25 0 0 0 0 14 +12 0 4 1 12 0 14 +2 -1.5 0 1 2 0 14 +6 -5.25 0 0 0 0 14 +10 -18.75 0 0 0 0 14 +6 -6 0 1 -6 0 14 +12 0 5 0 5 0 14 +4 -2 0 1 4 0 14 +2 -4 0 0 0 0 14 +5 -2.5 0 1 5 0 14 +2 -3.75 0 0 0 0 14 +9 -15.75 0 1 -15.75 0 14 +8 -4 0 1 8 0 14 +26 0 12 0 12 0 14 +6 -1.5 0 1 6 0 14 +4 -6 0 0 0 0 14 +10 -2.5 0 1 -2.5 0 14 +8 -12 0 0 0 0 14 +2 -3.5 0 0 0 0 14 +5 -5.63 0 1 -5.63 0 14 +12 -24 0 0 0 0 14 +25 0 10 1 0 0 14 +4 -6.5 0 0 0 0 14 +5 -9.38 0 0 0 0 14 +5 -7.5 0 0 0 0 14 +4 -4 0 0 0 0 14 +6 -10.5 0 1 -10.5 0 14 +13 0 6 0 6 0 14 +12 -22.5 0 0 0 0 14 +4 -7.5 0 0 0 0 14 +5 0 2 1 5 0 14 +10 -15 0 0 0 0 14 +9 -16.88 0 0 0 0 14 +2 -2.5 0 0 0 0 14 +10 -16.25 0 1 10 0 14 +6 -11.25 0 0 0 0 14 +4 -1.5 0 1 4 0 14 +5 -3.13 0 1 5 0 14 +6 -9 0 0 0 0 14 +12 -19.5 0 0 0 0 14 +10 -12.5 0 1 -12.5 0 14 +2 -3 0 0 0 0 14 +8 -16 0 1 -16 0 14 +4 0 2 1 4 0 14 +12 -7.5 0 1 -7.5 0 14 +12 -13.5 0 1 12 0 14 +22 0 10 0 10 0 14 +12 -21 0 0 0 0 14 +7 0 3 1 7 0 14 +10 -8.75 0 1 -8.75 0 14 +2 -1.25 0 0 0 0 14 +9 -6.75 0 1 -6.75 0 14 +12 0 6 1 12 0 14 +28 0 13 0 13 0 14 +9 -10.13 0 1 -10.13 0 14 +2 -0.5 0 1 2 0 14 +25 0 9 1 25 0 14 +6 -7.5 0 0 0 0 14 +4 -3 0 1 4 0 14 +10 -3.75 0 1 10 0 14 +12 -4.5 0 1 -4.5 0 14 +12 -15 0 1 -15 0 14 +6 -3 0 1 6 0 14 +9 -14.63 0 0 0 0 14 +5 -1.25 0 1 -1.25 0 14 +8 -11 0 0 0 0 14 +10 -17.5 0 0 0 0 14 +8 -10 0 0 0 0 14 +9 -9 0 1 -9 0 14 +10 -11.25 0 0 0 0 14 +12 -12 0 1 12 0 14 +8 -14 0 0 0 0 14 +12 -16.5 0 0 0 0 14 +4 -7 0 1 -7 0 14 +4 -1 0 1 -1 0 14 +5 -1.88 0 1 5 0 14 +8 0 3 1 0 0 14 +2 -3.25 0 0 0 0 14 +5 -5 0 1 -5 0 14 +26 0 10 1 0 0 14 +12 -10.5 0 1 12 0 14 +2 0 1 0 1 0 14 +6 -9.75 0 0 0 0 14 +8 -3 0 1 8 0 14 +13 0 5 0 5 0 14 +10 -7.5 0 1 -7.5 0 14 +8 -13 0 0 0 0 14 +9 -3.38 0 1 -3.38 0 14 +8 -15 0 0 0 0 14 +30 0 12 1 0 0 14 +8 -8 0 0 0 0 14 +8 -5 0 1 8 0 14 +12 -18 0 0 0 0 14 +10 -5 0 1 -5 0 14 +9 -11.25 0 0 0 0 14 +9 -7.88 0 1 -7.88 0 14 +8 -6 0 1 -6 0 14 +6 -4.5 0 1 6 0 14 +8 -9 0 1 -9 0 14 +4 -5.5 0 0 0 0 14 +4 -5 0 0 0 0 14 +9 -2.25 0 1 -2.25 0 14 +23 0 10 1 0 0 14 +9 -5.63 0 1 -5.63 0 14 +4 -4.5 0 1 -4.5 0 14 +4 -8 0 1 4 0 14 +19 0 8 0 8 0 14 +2 -2 0 1 2 0 14 +5 -8.13 0 1 5 0 14 +5 -4.38 0 1 -4.38 0 14 +2 -2.25 0 0 0 0 14 +2 -0.75 0 1 -0.75 0 14 +2 -2.75 0 0 0 0 14 +5 -8.75 0 0 0 0 14 +9 -18 0 0 0 0 14 +4 -3.5 0 1 4 0 14 +4 -2.5 0 1 -2.5 0 14 +9 -6.75 0 1 -6.75 0 15 +6 -6.75 0 1 -6.75 0 15 +6 -3 0 1 6 0 15 +2 -1.5 0 1 2 0 15 +4 -3 0 1 4 0 15 +5 -6.88 0 1 5 0 15 +12 -9 0 1 12 0 15 +4 -5 0 1 4 0 15 +5 -7.5 0 1 -7.5 0 15 +4 -4 0 1 -4 0 15 +9 -5.63 0 1 -5.63 0 15 +9 -14.63 0 0 0 0 15 +5 -9.38 0 1 5 0 15 +6 -4.5 0 1 6 0 15 +8 -7 0 1 -7 0 15 +10 -16.25 0 0 0 0 15 +10 -17.5 0 0 0 0 15 +9 -16.88 0 0 0 0 15 +8 -5 0 1 8 0 15 +6 -1.5 0 1 6 0 15 +12 -18 0 1 -18 0 15 +5 -6.25 0 1 5 0 15 +8 -4 0 1 8 0 15 +9 -15.75 0 0 0 0 15 +9 -13.5 0 1 9 0 15 +5 -8.13 0 1 5 0 15 +2 0 1 0 1 0 15 +2 -3.75 0 1 -3.75 0 15 +4 -6.5 0 1 4 0 15 +10 -5 0 1 -5 0 15 +12 -22.5 0 0 0 0 15 +2 -1 0 1 2 0 15 +13 0 6 0 6 0 15 +5 -2.5 0 1 5 0 15 +2 -0.5 0 1 2 0 15 +2 -3.25 0 1 -3.25 0 15 +30 0 12 1 0 0 15 +8 -8 0 1 8 0 15 +4 -5.5 0 1 -5.5 0 15 +23 0 10 1 0 0 15 +4 -3.5 0 1 4 0 15 +5 0 2 1 5 0 15 +8 0 3 1 0 0 15 +9 -10.13 0 0 0 0 15 +8 -16 0 0 0 0 15 +12 -24 0 0 0 0 15 +9 -3.38 0 1 -3.38 0 15 +6 -5.25 0 1 6 0 15 +2 -4 0 0 0 0 15 +4 -1 0 1 -1 0 15 +6 -11.25 0 0 0 0 15 +5 -4.38 0 1 -4.38 0 15 +6 -2.25 0 1 6 0 15 +12 -10.5 0 0 0 0 15 +9 -18 0 0 0 0 15 +10 -20 0 0 0 0 15 +4 -4.5 0 1 -4.5 0 15 +9 -2.25 0 1 -2.25 0 15 +4 -6 0 1 4 0 15 +8 -10 0 1 -10 0 15 +5 -5 0 1 -5 0 15 +5 -8.75 0 1 5 0 15 +8 -6 0 1 -6 0 15 +10 -13.75 0 0 0 0 15 +2 -2.5 0 1 2 0 15 +8 -11 0 0 0 0 15 +4 -2 0 1 4 0 15 +10 -7.5 0 1 -7.5 0 15 +22 0 10 1 22 0 15 +25 0 10 0 10 0 15 +6 -9.75 0 0 0 0 15 +12 0 5 1 12 0 15 +4 -2.5 0 1 -2.5 0 15 +8 -3 0 1 8 0 15 +10 -11.25 0 0 0 0 15 +5 -10 0 0 0 0 15 +10 -15 0 0 0 0 15 +2 -3.5 0 1 -3.5 0 15 +12 0 4 1 12 0 15 +13 0 5 1 13 0 15 +5 -3.75 0 1 5 0 15 +26 0 12 0 12 0 15 +5 -5.63 0 1 -5.63 0 15 +8 -2 0 1 -2 0 15 +2 -3 0 1 -3 0 15 +6 -9 0 0 0 0 15 +9 -7.88 0 1 -7.88 0 15 +8 -14 0 1 8 0 15 +28 0 13 1 28 0 15 +9 -12.38 0 0 0 0 15 +8 -15 0 0 0 0 15 +10 -2.5 0 1 -2.5 0 15 +4 0 2 0 2 0 15 +12 -6 0 1 -6 0 15 +12 -16.5 0 0 0 0 15 +4 -7.5 0 1 -7.5 0 15 +10 -8.75 0 1 -8.75 0 15 +10 -18.75 0 1 10 0 15 +26 0 10 1 0 0 15 +12 -21 0 0 0 0 15 +2 -0.75 0 1 -0.75 0 15 +9 -9 0 1 -9 0 15 +10 -6.25 0 1 10 0 15 +8 -12 0 0 0 0 15 +3 0 1 1 0 0 15 +5 -1.88 0 1 5 0 15 +6 -7.5 0 1 -7.5 0 15 +12 -13.5 0 1 12 0 15 +4 -7 0 0 0 0 15 +6 -8.25 0 0 0 0 15 +6 -12 0 0 0 0 15 +6 -10.5 0 1 -10.5 0 15 +4 -8 0 1 4 0 15 +6 -6 0 1 -6 0 15 +12 0 6 0 6 0 15 +12 -19.5 0 0 0 0 15 +19 0 8 1 19 0 15 +12 -15 0 0 0 0 15 +2 -1.75 0 1 2 0 15 +6 -3.75 0 1 -3.75 0 15 +2 -1.25 0 1 2 0 15 +5 -1.25 0 1 -1.25 0 15 +4 -1.5 0 1 4 0 15 +8 -13 0 0 0 0 15 +12 -7.5 0 1 -7.5 0 15 +12 -3 0 1 -3 0 15 +2 -2.75 0 1 2 0 15 +7 0 3 1 7 0 15 +25 0 9 1 25 0 15 +2 -2 0 1 2 0 15 +12 -4.5 0 1 -4.5 0 15 +12 -12 0 1 12 0 15 +5 -3.13 0 1 5 0 15 +9 -11.25 0 0 0 0 15 +8 -9 0 1 -9 0 15 +2 -2.25 0 1 2 0 15 +9 -4.5 0 1 -4.5 0 15 +10 -3.75 0 1 10 0 15 +10 -10 0 0 0 0 15 +10 -12.5 0 0 0 0 15 +2 -2.5 0 1 2 0 16 +5 -5.63 0 0 0 0 16 +6 -7.5 0 0 0 0 16 +26 0 10 1 0 0 16 +9 -4.5 0 1 -4.5 0 16 +2 -1.25 0 1 2 0 16 +8 -3 0 1 8 0 16 +25 0 9 1 25 0 16 +4 -4.5 0 0 0 0 16 +5 -10 0 0 0 0 16 +6 -9 0 0 0 0 16 +10 -6.25 0 1 10 0 16 +4 -4 0 0 0 0 16 +12 -3 0 1 -3 0 16 +5 -5 0 0 0 0 16 +12 0 5 1 12 0 16 +6 -9.75 0 0 0 0 16 +19 0 8 1 19 0 16 +4 -7.5 0 0 0 0 16 +12 -9 0 0 0 0 16 +4 -6.5 0 0 0 0 16 +9 -5.63 0 1 -5.63 0 16 +9 -18 0 1 -18 0 16 +10 -11.25 0 1 -11.25 0 16 +10 -13.75 0 1 -13.75 0 16 +6 -12 0 0 0 0 16 +10 -12.5 0 0 0 0 16 +4 -7 0 0 0 0 16 +10 -7.5 0 1 -7.5 0 16 +4 -8 0 0 0 0 16 +8 -11 0 1 -11 0 16 +12 0 4 1 12 0 16 +9 -3.38 0 1 -3.38 0 16 +10 -18.75 0 0 0 0 16 +2 -3.5 0 0 0 0 16 +2 -1 0 1 2 0 16 +2 -3.25 0 1 -3.25 0 16 +2 0 1 1 0 0 16 +7 0 3 1 7 0 16 +8 0 3 0 3 0 16 +12 -6 0 1 -6 0 16 +2 -0.5 0 1 2 0 16 +9 -7.88 0 0 0 0 16 +8 -15 0 0 0 0 16 +2 -1.5 0 1 2 0 16 +12 -22.5 0 0 0 0 16 +8 -7 0 1 -7 0 16 +4 -5.5 0 1 -5.5 0 16 +10 -8.75 0 1 -8.75 0 16 +8 -9 0 0 0 0 16 +2 -4 0 0 0 0 16 +4 0 2 1 4 0 16 +8 -8 0 0 0 0 16 +9 -13.5 0 0 0 0 16 +9 -9 0 0 0 0 16 +6 -3.75 0 1 -3.75 0 16 +13 0 6 1 13 0 16 +5 -1.88 0 1 5 0 16 +6 -6 0 0 0 0 16 +5 -6.88 0 0 0 0 16 +8 -16 0 0 0 0 16 +12 -7.5 0 1 -7.5 0 16 +5 -1.25 0 1 -1.25 0 16 +9 -14.63 0 0 0 0 16 +8 -4 0 1 8 0 16 +10 -17.5 0 0 0 0 16 +5 -3.75 0 1 5 0 16 +6 -10.5 0 0 0 0 16 +13 0 5 1 13 0 16 +10 -16.25 0 0 0 0 16 +5 -7.5 0 1 -7.5 0 16 +2 -1.75 0 1 2 0 16 +5 -9.38 0 0 0 0 16 +2 -2.75 0 1 2 0 16 +2 -0.75 0 1 -0.75 0 16 +5 -8.13 0 1 5 0 16 +9 -11.25 0 1 9 0 16 +8 -13 0 0 0 0 16 +9 -16.88 0 1 9 0 16 +2 -2 0 1 2 0 16 +12 -18 0 1 -18 0 16 +8 -2 0 1 -2 0 16 +2 -3 0 0 0 0 16 +6 -4.5 0 1 6 0 16 +5 0 2 1 5 0 16 +12 -19.5 0 1 12 0 16 +9 -15.75 0 1 -15.75 0 16 +8 -6 0 1 -6 0 16 +10 -2.5 0 1 -2.5 0 16 +9 -6.75 0 1 -6.75 0 16 +6 -6.75 0 0 0 0 16 +2 -3.75 0 1 -3.75 0 16 +10 -5 0 1 -5 0 16 +2 -2.25 0 1 2 0 16 +26 0 12 1 26 0 16 +12 -13.5 0 1 12 0 16 +8 -5 0 1 8 0 16 +6 -3 0 1 6 0 16 +10 -3.75 0 1 10 0 16 +12 -10.5 0 1 12 0 16 +4 -5 0 1 4 0 16 +9 -2.25 0 1 -2.25 0 16 +4 -3 0 1 4 0 16 +9 -10.13 0 1 -10.13 0 16 +28 0 13 1 28 0 16 +22 0 10 0 10 0 16 +10 -10 0 0 0 0 16 +4 -1 0 1 -1 0 16 +4 -2.5 0 1 -2.5 0 16 +12 -24 0 0 0 0 16 +8 -12 0 0 0 0 16 +3 0 1 1 0 0 16 +9 -12.38 0 0 0 0 16 +23 0 10 1 0 0 16 +4 -3.5 0 1 4 0 16 +4 -1.5 0 1 4 0 16 +8 -10 0 1 -10 0 16 +8 -14 0 0 0 0 16 +4 -6 0 1 4 0 16 +25 0 10 1 0 0 16 +12 -16.5 0 0 0 0 16 +12 -12 0 1 12 0 16 +5 -2.5 0 1 5 0 16 +5 -8.75 0 1 5 0 16 +12 -4.5 0 1 -4.5 0 16 +12 -15 0 1 -15 0 16 +5 -3.13 0 1 5 0 16 +12 -21 0 0 0 0 16 +5 -4.38 0 1 -4.38 0 16 +6 -11.25 0 0 0 0 16 +30 0 12 1 0 0 16 +6 -1.5 0 1 6 0 16 +12 0 6 1 12 0 16 +4 -2 0 1 4 0 16 +10 -15 0 0 0 0 16 +6 -2.25 0 1 6 0 16 +10 -20 0 0 0 0 16 +6 -5.25 0 1 6 0 16 +5 -6.25 0 1 5 0 16 +6 -8.25 0 1 -8.25 0 16 +4 -4.5 0 0 0 0 17 +10 -12.5 0 0 0 0 17 +26 0 12 0 12 0 17 +6 -7.5 0 0 0 0 17 +4 -6.5 0 0 0 0 17 +12 -4.5 0 0 0 0 17 +5 -2.5 0 0 0 0 17 +6 -12 0 0 0 0 17 +9 -14.63 0 0 0 0 17 +6 -6 0 0 0 0 17 +22 0 10 1 22 0 17 +2 -1 0 0 0 0 17 +8 -3 0 0 0 0 17 +12 -9 0 0 0 0 17 +5 -3.75 0 0 0 0 17 +6 -3 0 0 0 0 17 +4 0 2 1 4 0 17 +28 0 13 1 28 0 17 +12 -15 0 0 0 0 17 +9 -11.25 0 0 0 0 17 +12 -10.5 0 0 0 0 17 +5 -1.88 0 0 0 0 17 +2 -2.75 0 0 0 0 17 +4 -7 0 0 0 0 17 +8 -4 0 0 0 0 17 +2 0 1 1 0 0 17 +2 -3.5 0 0 0 0 17 +2 -1.75 0 0 0 0 17 +5 -5 0 0 0 0 17 +12 -12 0 0 0 0 17 +12 0 6 1 12 0 17 +6 -4.5 0 0 0 0 17 +30 0 12 1 0 0 17 +12 -16.5 0 0 0 0 17 +6 -9.75 0 0 0 0 17 +12 -22.5 0 0 0 0 17 +6 -9 0 0 0 0 17 +5 -3.13 0 0 0 0 17 +5 -9.38 0 0 0 0 17 +12 -7.5 0 1 -7.5 0 17 +5 0 2 1 5 0 17 +10 -15 0 0 0 0 17 +12 -3 0 0 0 0 17 +13 0 6 1 13 0 17 +9 -16.88 0 0 0 0 17 +6 -11.25 0 0 0 0 17 +8 -5 0 0 0 0 17 +8 -14 0 0 0 0 17 +12 -24 0 0 0 0 17 +12 0 5 1 12 0 17 +9 -13.5 0 0 0 0 17 +6 -1.5 0 1 6 0 17 +2 -3 0 0 0 0 17 +10 -2.5 0 1 -2.5 0 17 +2 -0.75 0 0 0 0 17 +6 -10.5 0 0 0 0 17 +2 -0.5 0 0 0 0 17 +10 -10 0 0 0 0 17 +8 -10 0 0 0 0 17 +9 -12.38 0 0 0 0 17 +4 -6 0 0 0 0 17 +6 -2.25 0 0 0 0 17 +9 -15.75 0 0 0 0 17 +12 -13.5 0 0 0 0 17 +8 -6 0 0 0 0 17 +10 -18.75 0 0 0 0 17 +4 -2 0 0 0 0 17 +5 -1.25 0 1 -1.25 0 17 +6 -5.25 0 0 0 0 17 +4 -8 0 0 0 0 17 +25 0 9 1 25 0 17 +2 -3.25 0 0 0 0 17 +10 -11.25 0 0 0 0 17 +4 -7.5 0 0 0 0 17 +9 -5.63 0 0 0 0 17 +6 -6.75 0 0 0 0 17 +8 -2 0 0 0 0 17 +5 -6.25 0 0 0 0 17 +23 0 10 1 0 0 17 +8 -13 0 0 0 0 17 +10 -13.75 0 0 0 0 17 +5 -10 0 0 0 0 17 +12 0 4 1 12 0 17 +2 -2.5 0 0 0 0 17 +19 0 8 1 19 0 17 +4 -4 0 0 0 0 17 +4 -1 0 1 -1 0 17 +4 -2.5 0 0 0 0 17 +5 -8.13 0 0 0 0 17 +10 -3.75 0 0 0 0 17 +5 -8.75 0 0 0 0 17 +10 -7.5 0 0 0 0 17 +10 -5 0 0 0 0 17 +10 -20 0 0 0 0 17 +13 0 5 1 13 0 17 +8 -9 0 0 0 0 17 +8 -12 0 0 0 0 17 +10 -16.25 0 0 0 0 17 +5 -6.88 0 0 0 0 17 +4 -5.5 0 0 0 0 17 +5 -7.5 0 0 0 0 17 +9 -10.13 0 0 0 0 17 +6 -8.25 0 0 0 0 17 +26 0 10 1 0 0 17 +4 -5 0 0 0 0 17 +2 -2.25 0 0 0 0 17 +6 -3.75 0 0 0 0 17 +8 -8 0 0 0 0 17 +9 -6.75 0 0 0 0 17 +8 -15 0 0 0 0 17 +12 -6 0 0 0 0 17 +25 0 10 1 0 0 17 +12 -19.5 0 0 0 0 17 +9 -7.88 0 0 0 0 17 +4 -1.5 0 0 0 0 17 +8 -7 0 0 0 0 17 +12 -18 0 0 0 0 17 +2 -2 0 0 0 0 17 +9 -18 0 0 0 0 17 +2 -1.25 0 0 0 0 17 +8 -16 0 0 0 0 17 +5 -4.38 0 0 0 0 17 +2 -4 0 0 0 0 17 +5 -5.63 0 0 0 0 17 +8 0 3 1 0 0 17 +10 -17.5 0 0 0 0 17 +8 -11 0 0 0 0 17 +2 -1.5 0 0 0 0 17 +4 -3.5 0 0 0 0 17 +2 -3.75 0 0 0 0 17 +3 0 1 1 0 0 17 +12 -21 0 0 0 0 17 +10 -8.75 0 0 0 0 17 +9 -9 0 0 0 0 17 +4 -3 0 0 0 0 17 +7 0 3 1 7 0 17 +9 -3.38 0 0 0 0 17 +9 -2.25 0 0 0 0 17 +10 -6.25 0 0 0 0 17 +9 -4.5 0 0 0 0 17 +2 -1 0 1 2 0 18 +9 -13.5 0 0 0 0 18 +5 -6.88 0 1 5 0 18 +10 -10 0 0 0 0 18 +6 -2.25 0 1 6 0 18 +6 -6.75 0 0 0 0 18 +9 -4.5 0 1 -4.5 0 18 +10 -13.75 0 1 -13.75 0 18 +6 -8.25 0 1 -8.25 0 18 +5 -10 0 0 0 0 18 +10 -6.25 0 1 10 0 18 +12 -3 0 1 -3 0 18 +12 -9 0 1 12 0 18 +8 -7 0 0 0 0 18 +6 -12 0 0 0 0 18 +8 -2 0 1 -2 0 18 +12 -6 0 1 -6 0 18 +3 0 1 1 0 0 18 +10 -20 0 0 0 0 18 +5 -3.75 0 1 5 0 18 +2 -1.75 0 1 2 0 18 +6 -3.75 0 1 -3.75 0 18 +9 -12.38 0 0 0 0 18 +5 -6.25 0 0 0 0 18 +12 0 4 0 4 0 18 +2 -1.5 0 1 2 0 18 +6 -5.25 0 1 6 0 18 +10 -18.75 0 0 0 0 18 +6 -6 0 0 0 0 18 +12 0 5 0 5 0 18 +4 -2 0 1 4 0 18 +2 -4 0 0 0 0 18 +5 -2.5 0 1 5 0 18 +2 -3.75 0 0 0 0 18 +9 -15.75 0 0 0 0 18 +8 -4 0 1 8 0 18 +26 0 12 0 12 0 18 +6 -1.5 0 1 6 0 18 +4 -6 0 0 0 0 18 +10 -2.5 0 1 -2.5 0 18 +8 -12 0 0 0 0 18 +2 -3.5 0 1 -3.5 0 18 +5 -5.63 0 0 0 0 18 +12 -24 0 0 0 0 18 +25 0 10 0 10 0 18 +4 -6.5 0 0 0 0 18 +5 -9.38 0 0 0 0 18 +5 -7.5 0 0 0 0 18 +4 -4 0 0 0 0 18 +6 -10.5 0 0 0 0 18 +13 0 6 0 6 0 18 +12 -22.5 0 0 0 0 18 +4 -7.5 0 0 0 0 18 +5 0 2 1 5 0 18 +10 -15 0 0 0 0 18 +9 -16.88 0 0 0 0 18 +2 -2.5 0 1 2 0 18 +10 -16.25 0 0 0 0 18 +6 -11.25 0 0 0 0 18 +4 -1.5 0 1 4 0 18 +5 -3.13 0 1 5 0 18 +6 -9 0 0 0 0 18 +12 -19.5 0 0 0 0 18 +10 -12.5 0 0 0 0 18 +8 -16 0 0 0 0 18 +4 0 2 1 4 0 18 +12 -7.5 0 1 -7.5 0 18 +12 -13.5 0 0 0 0 18 +22 0 10 0 10 0 18 +12 -21 0 0 0 0 18 +7 0 3 1 7 0 18 +10 -8.75 0 1 -8.75 0 18 +2 -1.25 0 1 2 0 18 +9 -6.75 0 1 -6.75 0 18 +12 0 6 0 6 0 18 +28 0 13 0 13 0 18 +9 -10.13 0 0 0 0 18 +2 -0.5 0 1 2 0 18 +25 0 9 1 25 0 18 +6 -7.5 0 0 0 0 18 +4 -3 0 1 4 0 18 +10 -3.75 0 1 10 0 18 +12 -4.5 0 1 -4.5 0 18 +12 -15 0 0 0 0 18 +6 -3 0 1 6 0 18 +9 -14.63 0 0 0 0 18 +5 -1.25 0 1 -1.25 0 18 +8 -11 0 0 0 0 18 +10 -17.5 0 0 0 0 18 +8 -10 0 0 0 0 18 +9 -9 0 1 -9 0 18 +10 -11.25 0 0 0 0 18 +12 -12 0 1 12 0 18 +8 -14 0 0 0 0 18 +12 -16.5 0 0 0 0 18 +4 -7 0 0 0 0 18 +4 -1 0 1 -1 0 18 +5 -1.88 0 1 5 0 18 +8 0 3 1 0 0 18 +2 -3.25 0 0 0 0 18 +5 -5 0 0 0 0 18 +26 0 10 0 10 0 18 +12 -10.5 0 1 12 0 18 +2 0 1 1 0 0 18 +6 -9.75 0 0 0 0 18 +8 -3 0 1 8 0 18 +13 0 5 0 5 0 18 +10 -7.5 0 0 0 0 18 +8 -13 0 0 0 0 18 +9 -3.38 0 1 -3.38 0 18 +8 -15 0 0 0 0 18 +30 0 12 1 0 0 18 +8 -8 0 0 0 0 18 +8 -5 0 1 8 0 18 +12 -18 0 0 0 0 18 +10 -5 0 1 -5 0 18 +9 -11.25 0 0 0 0 18 +9 -7.88 0 1 -7.88 0 18 +8 -6 0 1 -6 0 18 +6 -4.5 0 1 6 0 18 +8 -9 0 0 0 0 18 +4 -5.5 0 0 0 0 18 +4 -5 0 0 0 0 18 +9 -2.25 0 1 -2.25 0 18 +23 0 10 0 10 0 18 +9 -5.63 0 1 -5.63 0 18 +4 -4.5 0 0 0 0 18 +4 -8 0 0 0 0 18 +19 0 8 0 8 0 18 +2 -2 0 0 0 0 18 +5 -8.13 0 0 0 0 18 +5 -4.38 0 1 -4.38 0 18 +2 -2.25 0 1 2 0 18 +2 -0.75 0 1 -0.75 0 18 +2 -2.75 0 0 0 0 18 +5 -8.75 0 0 0 0 18 +9 -18 0 0 0 0 18 +4 -3.5 0 0 0 0 18 +4 -2.5 0 1 -2.5 0 18 +9 -6.75 0 1 -6.75 0 19 +6 -6.75 0 0 0 0 19 +6 -3 0 1 6 0 19 +2 -1.5 0 0 0 0 19 +4 -3 0 0 0 0 19 +5 -6.88 0 0 0 0 19 +12 -9 0 1 12 0 19 +4 -5 0 0 0 0 19 +5 -7.5 0 0 0 0 19 +4 -4 0 1 -4 0 19 +9 -5.63 0 1 -5.63 0 19 +9 -14.63 0 0 0 0 19 +5 -9.38 0 0 0 0 19 +6 -4.5 0 1 6 0 19 +8 -7 0 1 -7 0 19 +10 -16.25 0 0 0 0 19 +10 -17.5 0 0 0 0 19 +9 -16.88 0 0 0 0 19 +8 -5 0 1 8 0 19 +6 -1.5 0 1 6 0 19 +12 -18 0 0 0 0 19 +5 -6.25 0 0 0 0 19 +8 -4 0 1 8 0 19 +9 -15.75 0 0 0 0 19 +9 -13.5 0 0 0 0 19 +5 -8.13 0 0 0 0 19 +2 0 1 0 1 0 19 +2 -3.75 0 0 0 0 19 +4 -6.5 0 0 0 0 19 +10 -5 0 1 -5 0 19 +12 -22.5 0 0 0 0 19 +2 -1 0 1 2 0 19 +13 0 6 1 13 0 19 +5 -2.5 0 1 5 0 19 +2 -0.5 0 1 2 0 19 +2 -3.25 0 0 0 0 19 +30 0 12 1 0 0 19 +8 -8 0 0 0 0 19 +4 -5.5 0 0 0 0 19 +23 0 10 1 0 0 19 +4 -3.5 0 0 0 0 19 +5 0 2 1 5 0 19 +8 0 3 1 0 0 19 +9 -10.13 0 0 0 0 19 +8 -16 0 0 0 0 19 +12 -24 0 0 0 0 19 +9 -3.38 0 1 -3.38 0 19 +6 -5.25 0 0 0 0 19 +2 -4 0 0 0 0 19 +4 -1 0 1 -1 0 19 +6 -11.25 0 0 0 0 19 +5 -4.38 0 0 0 0 19 +6 -2.25 0 1 6 0 19 +12 -10.5 0 1 12 0 19 +9 -18 0 0 0 0 19 +10 -20 0 0 0 0 19 +4 -4.5 0 0 0 0 19 +9 -2.25 0 1 -2.25 0 19 +4 -6 0 0 0 0 19 +8 -10 0 1 -10 0 19 +5 -5 0 0 0 0 19 +5 -8.75 0 0 0 0 19 +8 -6 0 1 -6 0 19 +10 -13.75 0 0 0 0 19 +2 -2.5 0 0 0 0 19 +8 -11 0 0 0 0 19 +4 -2 0 1 4 0 19 +10 -7.5 0 1 -7.5 0 19 +22 0 10 1 22 0 19 +25 0 10 1 0 0 19 +6 -9.75 0 0 0 0 19 +12 0 5 1 12 0 19 +4 -2.5 0 0 0 0 19 +8 -3 0 1 8 0 19 +10 -11.25 0 0 0 0 19 +5 -10 0 0 0 0 19 +10 -15 0 0 0 0 19 +2 -3.5 0 0 0 0 19 +12 0 4 1 12 0 19 +13 0 5 1 13 0 19 +5 -3.75 0 0 0 0 19 +26 0 12 1 26 0 19 +5 -5.63 0 0 0 0 19 +8 -2 0 1 -2 0 19 +2 -3 0 0 0 0 19 +6 -9 0 0 0 0 19 +9 -7.88 0 0 0 0 19 +8 -14 0 0 0 0 19 +28 0 13 1 28 0 19 +9 -12.38 0 0 0 0 19 +8 -15 0 0 0 0 19 +10 -2.5 0 1 -2.5 0 19 +4 0 2 1 4 0 19 +12 -6 0 1 -6 0 19 +12 -16.5 0 0 0 0 19 +4 -7.5 0 0 0 0 19 +10 -8.75 0 0 0 0 19 +10 -18.75 0 0 0 0 19 +26 0 10 1 0 0 19 +12 -21 0 0 0 0 19 +2 -0.75 0 1 -0.75 0 19 +9 -9 0 0 0 0 19 +10 -6.25 0 1 10 0 19 +8 -12 0 0 0 0 19 +3 0 1 1 0 0 19 +5 -1.88 0 1 5 0 19 +6 -7.5 0 0 0 0 19 +12 -13.5 0 0 0 0 19 +4 -7 0 0 0 0 19 +6 -8.25 0 0 0 0 19 +6 -12 0 0 0 0 19 +6 -10.5 0 0 0 0 19 +4 -8 0 0 0 0 19 +6 -6 0 0 0 0 19 +12 0 6 0 6 0 19 +12 -19.5 0 0 0 0 19 +19 0 8 1 19 0 19 +12 -15 0 0 0 0 19 +2 -1.75 0 1 2 0 19 +6 -3.75 0 0 0 0 19 +2 -1.25 0 0 0 0 19 +5 -1.25 0 1 -1.25 0 19 +4 -1.5 0 1 4 0 19 +8 -13 0 0 0 0 19 +12 -7.5 0 1 -7.5 0 19 +12 -3 0 1 -3 0 19 +2 -2.75 0 0 0 0 19 +7 0 3 1 7 0 19 +25 0 9 1 25 0 19 +2 -2 0 0 0 0 19 +12 -4.5 0 1 -4.5 0 19 +12 -12 0 0 0 0 19 +5 -3.13 0 1 5 0 19 +9 -11.25 0 0 0 0 19 +8 -9 0 0 0 0 19 +2 -2.25 0 0 0 0 19 +9 -4.5 0 1 -4.5 0 19 +10 -3.75 0 1 10 0 19 +10 -10 0 0 0 0 19 +10 -12.5 0 0 0 0 19 +2 -2.5 0 1 2 0 20 +5 -5.63 0 1 -5.63 0 20 +6 -7.5 0 0 0 0 20 +26 0 10 0 10 0 20 +9 -4.5 0 1 -4.5 0 20 +2 -1.25 0 1 2 0 20 +8 -3 0 1 8 0 20 +25 0 9 0 9 0 20 +4 -4.5 0 1 -4.5 0 20 +5 -10 0 0 0 0 20 +6 -9 0 1 -9 0 20 +10 -6.25 0 1 10 0 20 +4 -4 0 1 -4 0 20 +12 -3 0 1 -3 0 20 +5 -5 0 0 0 0 20 +12 0 5 1 12 0 20 +6 -9.75 0 0 0 0 20 +19 0 8 0 8 0 20 +4 -7.5 0 0 0 0 20 +12 -9 0 1 12 0 20 +4 -6.5 0 0 0 0 20 +9 -5.63 0 1 -5.63 0 20 +9 -18 0 1 -18 0 20 +10 -11.25 0 1 -11.25 0 20 +10 -13.75 0 0 0 0 20 +6 -12 0 0 0 0 20 +10 -12.5 0 1 -12.5 0 20 +4 -7 0 0 0 0 20 +10 -7.5 0 1 -7.5 0 20 +4 -8 0 0 0 0 20 +8 -11 0 1 -11 0 20 +12 0 4 0 4 0 20 +9 -3.38 0 1 -3.38 0 20 +10 -18.75 0 0 0 0 20 +2 -3.5 0 1 -3.5 0 20 +2 -1 0 1 2 0 20 +2 -3.25 0 0 0 0 20 +2 0 1 0 1 0 20 +7 0 3 1 7 0 20 +8 0 3 0 3 0 20 +12 -6 0 1 -6 0 20 +2 -0.5 0 1 2 0 20 +9 -7.88 0 0 0 0 20 +8 -15 0 0 0 0 20 +2 -1.5 0 0 0 0 20 +12 -22.5 0 1 -22.5 0 20 +8 -7 0 1 -7 0 20 +4 -5.5 0 0 0 0 20 +10 -8.75 0 0 0 0 20 +8 -9 0 1 -9 0 20 +2 -4 0 0 0 0 20 +4 0 2 0 2 0 20 +8 -8 0 1 8 0 20 +9 -13.5 0 0 0 0 20 +9 -9 0 0 0 0 20 +6 -3.75 0 1 -3.75 0 20 +13 0 6 0 6 0 20 +5 -1.88 0 1 5 0 20 +6 -6 0 1 -6 0 20 +5 -6.88 0 0 0 0 20 +8 -16 0 0 0 0 20 +12 -7.5 0 1 -7.5 0 20 +5 -1.25 0 1 -1.25 0 20 +9 -14.63 0 0 0 0 20 +8 -4 0 1 8 0 20 +10 -17.5 0 0 0 0 20 +5 -3.75 0 0 0 0 20 +6 -10.5 0 0 0 0 20 +13 0 5 0 5 0 20 +10 -16.25 0 0 0 0 20 +5 -7.5 0 0 0 0 20 +2 -1.75 0 0 0 0 20 +5 -9.38 0 0 0 0 20 +2 -2.75 0 1 2 0 20 +2 -0.75 0 1 -0.75 0 20 +5 -8.13 0 0 0 0 20 +9 -11.25 0 0 0 0 20 +8 -13 0 0 0 0 20 +9 -16.88 0 0 0 0 20 +2 -2 0 1 2 0 20 +12 -18 0 0 0 0 20 +8 -2 0 1 -2 0 20 +2 -3 0 1 -3 0 20 +6 -4.5 0 1 6 0 20 +5 0 2 0 2 0 20 +12 -19.5 0 0 0 0 20 +9 -15.75 0 0 0 0 20 +8 -6 0 1 -6 0 20 +10 -2.5 0 1 -2.5 0 20 +9 -6.75 0 0 0 0 20 +6 -6.75 0 0 0 0 20 +2 -3.75 0 0 0 0 20 +10 -5 0 1 -5 0 20 +2 -2.25 0 1 2 0 20 +26 0 12 0 12 0 20 +12 -13.5 0 0 0 0 20 +8 -5 0 0 0 0 20 +6 -3 0 1 6 0 20 +10 -3.75 0 1 10 0 20 +12 -10.5 0 0 0 0 20 +4 -5 0 0 0 0 20 +9 -2.25 0 1 -2.25 0 20 +4 -3 0 0 0 0 20 +9 -10.13 0 0 0 0 20 +28 0 13 0 13 0 20 +22 0 10 1 22 0 20 +10 -10 0 0 0 0 20 +4 -1 0 1 -1 0 20 +4 -2.5 0 1 -2.5 0 20 +12 -24 0 1 -24 0 20 +8 -12 0 1 -12 0 20 +3 0 1 0 1 0 20 +9 -12.38 0 0 0 0 20 +23 0 10 0 10 0 20 +4 -3.5 0 0 0 0 20 +4 -1.5 0 1 4 0 20 +8 -10 0 0 0 0 20 +8 -14 0 0 0 0 20 +4 -6 0 0 0 0 20 +25 0 10 0 10 0 20 +12 -16.5 0 0 0 0 20 +12 -12 0 1 12 0 20 +5 -2.5 0 0 0 0 20 +5 -8.75 0 0 0 0 20 +12 -4.5 0 1 -4.5 0 20 +12 -15 0 0 0 0 20 +5 -3.13 0 0 0 0 20 +12 -21 0 0 0 0 20 +5 -4.38 0 0 0 0 20 +6 -11.25 0 0 0 0 20 +30 0 12 0 12 0 20 +6 -1.5 0 1 6 0 20 +12 0 6 0 6 0 20 +4 -2 0 1 4 0 20 +10 -15 0 0 0 0 20 +6 -2.25 0 1 6 0 20 +10 -20 0 0 0 0 20 +6 -5.25 0 0 0 0 20 +5 -6.25 0 0 0 0 20 +6 -8.25 0 0 0 0 20 +4 -4.5 0 0 0 0 21 +10 -12.5 0 0 0 0 21 +26 0 12 1 26 0 21 +6 -7.5 0 0 0 0 21 +4 -6.5 0 0 0 0 21 +12 -4.5 0 1 -4.5 0 21 +5 -2.5 0 1 5 0 21 +6 -12 0 0 0 0 21 +9 -14.63 0 0 0 0 21 +6 -6 0 0 0 0 21 +22 0 10 1 22 0 21 +2 -1 0 1 2 0 21 +8 -3 0 1 8 0 21 +12 -9 0 0 0 0 21 +5 -3.75 0 0 0 0 21 +6 -3 0 1 6 0 21 +4 0 2 1 4 0 21 +28 0 13 1 28 0 21 +12 -15 0 0 0 0 21 +9 -11.25 0 0 0 0 21 +12 -10.5 0 0 0 0 21 +5 -1.88 0 1 5 0 21 +2 -2.75 0 0 0 0 21 +4 -7 0 0 0 0 21 +8 -4 0 1 8 0 21 +2 0 1 0 1 0 21 +2 -3.5 0 0 0 0 21 +2 -1.75 0 0 0 0 21 +5 -5 0 0 0 0 21 +12 -12 0 0 0 0 21 +12 0 6 1 12 0 21 +6 -4.5 0 0 0 0 21 +30 0 12 1 0 0 21 +12 -16.5 0 0 0 0 21 +6 -9.75 0 0 0 0 21 +12 -22.5 0 0 0 0 21 +6 -9 0 0 0 0 21 +5 -3.13 0 0 0 0 21 +5 -9.38 0 0 0 0 21 +12 -7.5 0 0 0 0 21 +5 0 2 1 5 0 21 +10 -15 0 0 0 0 21 +12 -3 0 1 -3 0 21 +13 0 6 1 13 0 21 +9 -16.88 0 0 0 0 21 +6 -11.25 0 0 0 0 21 +8 -5 0 0 0 0 21 +8 -14 0 0 0 0 21 +12 -24 0 0 0 0 21 +12 0 5 1 12 0 21 +9 -13.5 0 0 0 0 21 +6 -1.5 0 1 6 0 21 +2 -3 0 0 0 0 21 +10 -2.5 0 1 -2.5 0 21 +2 -0.75 0 1 -0.75 0 21 +6 -10.5 0 0 0 0 21 +2 -0.5 0 1 2 0 21 +10 -10 0 0 0 0 21 +8 -10 0 0 0 0 21 +9 -12.38 0 0 0 0 21 +4 -6 0 0 0 0 21 +6 -2.25 0 1 6 0 21 +9 -15.75 0 0 0 0 21 +12 -13.5 0 0 0 0 21 +8 -6 0 0 0 0 21 +10 -18.75 0 0 0 0 21 +4 -2 0 1 4 0 21 +5 -1.25 0 1 -1.25 0 21 +6 -5.25 0 0 0 0 21 +4 -8 0 0 0 0 21 +25 0 9 1 25 0 21 +2 -3.25 0 0 0 0 21 +10 -11.25 0 0 0 0 21 +4 -7.5 0 0 0 0 21 +9 -5.63 0 0 0 0 21 +6 -6.75 0 0 0 0 21 +8 -2 0 1 -2 0 21 +5 -6.25 0 0 0 0 21 +23 0 10 1 0 0 21 +8 -13 0 0 0 0 21 +10 -13.75 0 0 0 0 21 +5 -10 0 0 0 0 21 +12 0 4 1 12 0 21 +2 -2.5 0 0 0 0 21 +19 0 8 1 19 0 21 +4 -4 0 0 0 0 21 +4 -1 0 1 -1 0 21 +4 -2.5 0 1 -2.5 0 21 +5 -8.13 0 0 0 0 21 +10 -3.75 0 1 10 0 21 +5 -8.75 0 0 0 0 21 +10 -7.5 0 0 0 0 21 +10 -5 0 0 0 0 21 +10 -20 0 0 0 0 21 +13 0 5 0 5 0 21 +8 -9 0 0 0 0 21 +8 -12 0 0 0 0 21 +10 -16.25 0 0 0 0 21 +5 -6.88 0 0 0 0 21 +4 -5.5 0 0 0 0 21 +5 -7.5 0 0 0 0 21 +9 -10.13 0 0 0 0 21 +6 -8.25 0 0 0 0 21 +26 0 10 0 10 0 21 +4 -5 0 0 0 0 21 +2 -2.25 0 0 0 0 21 +6 -3.75 0 1 -3.75 0 21 +8 -8 0 0 0 0 21 +9 -6.75 0 0 0 0 21 +8 -15 0 0 0 0 21 +12 -6 0 1 -6 0 21 +25 0 10 0 10 0 21 +12 -19.5 0 0 0 0 21 +9 -7.88 0 0 0 0 21 +4 -1.5 0 1 4 0 21 +8 -7 0 0 0 0 21 +12 -18 0 0 0 0 21 +2 -2 0 0 0 0 21 +9 -18 0 0 0 0 21 +2 -1.25 0 0 0 0 21 +8 -16 0 0 0 0 21 +5 -4.38 0 0 0 0 21 +2 -4 0 0 0 0 21 +5 -5.63 0 0 0 0 21 +8 0 3 1 0 0 21 +10 -17.5 0 0 0 0 21 +8 -11 0 0 0 0 21 +2 -1.5 0 0 0 0 21 +4 -3.5 0 0 0 0 21 +2 -3.75 0 0 0 0 21 +3 0 1 1 0 0 21 +12 -21 0 0 0 0 21 +10 -8.75 0 0 0 0 21 +9 -9 0 0 0 0 21 +4 -3 0 0 0 0 21 +7 0 3 1 7 0 21 +9 -3.38 0 1 -3.38 0 21 +9 -2.25 0 1 -2.25 0 21 +10 -6.25 0 0 0 0 21 +9 -4.5 0 0 0 0 21 +2 -1 0 0 0 0 22 +9 -13.5 0 0 0 0 22 +5 -6.88 0 0 0 0 22 +10 -10 0 0 0 0 22 +6 -2.25 0 1 6 0 22 +6 -6.75 0 0 0 0 22 +9 -4.5 0 1 -4.5 0 22 +10 -13.75 0 0 0 0 22 +6 -8.25 0 0 0 0 22 +5 -10 0 0 0 0 22 +10 -6.25 0 1 10 0 22 +12 -3 0 1 -3 0 22 +12 -9 0 0 0 0 22 +8 -7 0 0 0 0 22 +6 -12 0 0 0 0 22 +8 -2 0 1 -2 0 22 +12 -6 0 1 -6 0 22 +3 0 1 1 0 0 22 +10 -20 0 0 0 0 22 +5 -3.75 0 0 0 0 22 +2 -1.75 0 0 0 0 22 +6 -3.75 0 0 0 0 22 +9 -12.38 0 0 0 0 22 +5 -6.25 0 0 0 0 22 +12 0 4 1 12 0 22 +2 -1.5 0 0 0 0 22 +6 -5.25 0 0 0 0 22 +10 -18.75 0 0 0 0 22 +6 -6 0 0 0 0 22 +12 0 5 1 12 0 22 +4 -2 0 0 0 0 22 +2 -4 0 0 0 0 22 +5 -2.5 0 0 0 0 22 +2 -3.75 0 0 0 0 22 +9 -15.75 0 0 0 0 22 +8 -4 0 0 0 0 22 +26 0 12 1 26 0 22 +6 -1.5 0 0 0 0 22 +4 -6 0 0 0 0 22 +10 -2.5 0 1 -2.5 0 22 +8 -12 0 0 0 0 22 +2 -3.5 0 0 0 0 22 +5 -5.63 0 1 -5.63 0 22 +12 -24 0 0 0 0 22 +25 0 10 1 0 0 22 +4 -6.5 0 0 0 0 22 +5 -9.38 0 0 0 0 22 +5 -7.5 0 0 0 0 22 +4 -4 0 1 -4 0 22 +6 -10.5 0 0 0 0 22 +13 0 6 1 13 0 22 +12 -22.5 0 0 0 0 22 +4 -7.5 0 0 0 0 22 +5 0 2 1 5 0 22 +10 -15 0 0 0 0 22 +9 -16.88 0 0 0 0 22 +2 -2.5 0 0 0 0 22 +10 -16.25 0 0 0 0 22 +6 -11.25 0 0 0 0 22 +4 -1.5 0 0 0 0 22 +5 -3.13 0 0 0 0 22 +6 -9 0 0 0 0 22 +12 -19.5 0 0 0 0 22 +10 -12.5 0 0 0 0 22 +2 -3 0 0 0 0 22 +8 -16 0 0 0 0 22 +4 0 2 1 4 0 22 +12 -7.5 0 1 -7.5 0 22 +12 -13.5 0 0 0 0 22 +22 0 10 1 22 0 22 +12 -21 0 0 0 0 22 +7 0 3 1 7 0 22 +10 -8.75 0 0 0 0 22 +2 -1.25 0 0 0 0 22 +9 -6.75 0 0 0 0 22 +12 0 6 1 12 0 22 +28 0 13 0 13 0 22 +9 -10.13 0 0 0 0 22 +2 -0.5 0 1 2 0 22 +25 0 9 1 25 0 22 +6 -7.5 0 0 0 0 22 +4 -3 0 0 0 0 22 +10 -3.75 0 1 10 0 22 +12 -4.5 0 0 0 0 22 +12 -15 0 1 -15 0 22 +6 -3 0 0 0 0 22 +9 -14.63 0 0 0 0 22 +5 -1.25 0 1 -1.25 0 22 +8 -11 0 0 0 0 22 +10 -17.5 0 0 0 0 22 +8 -10 0 0 0 0 22 +9 -9 0 0 0 0 22 +10 -11.25 0 0 0 0 22 +12 -12 0 1 12 0 22 +8 -14 0 0 0 0 22 +12 -16.5 0 0 0 0 22 +4 -7 0 0 0 0 22 +4 -1 0 1 -1 0 22 +5 -1.88 0 1 5 0 22 +8 0 3 1 0 0 22 +2 -3.25 0 0 0 0 22 +5 -5 0 0 0 0 22 +26 0 10 1 0 0 22 +12 -10.5 0 1 12 0 22 +2 0 1 0 1 0 22 +6 -9.75 0 0 0 0 22 +8 -3 0 1 8 0 22 +13 0 5 1 13 0 22 +10 -7.5 0 0 0 0 22 +8 -13 0 0 0 0 22 +9 -3.38 0 1 -3.38 0 22 +8 -15 0 0 0 0 22 +30 0 12 1 0 0 22 +8 -8 0 0 0 0 22 +8 -5 0 0 0 0 22 +12 -18 0 0 0 0 22 +10 -5 0 1 -5 0 22 +9 -11.25 0 0 0 0 22 +9 -7.88 0 0 0 0 22 +8 -6 0 0 0 0 22 +6 -4.5 0 0 0 0 22 +8 -9 0 0 0 0 22 +4 -5.5 0 0 0 0 22 +4 -5 0 0 0 0 22 +9 -2.25 0 1 -2.25 0 22 +23 0 10 1 0 0 22 +9 -5.63 0 0 0 0 22 +4 -4.5 0 0 0 0 22 +4 -8 0 0 0 0 22 +19 0 8 1 19 0 22 +2 -2 0 0 0 0 22 +5 -8.13 0 0 0 0 22 +5 -4.38 0 0 0 0 22 +2 -2.25 0 0 0 0 22 +2 -0.75 0 0 0 0 22 +2 -2.75 0 0 0 0 22 +5 -8.75 0 0 0 0 22 +9 -18 0 0 0 0 22 +4 -3.5 0 1 4 0 22 +4 -2.5 0 0 0 0 22 +9 -6.75 0 1 -6.75 0 23 +6 -6.75 0 0 0 0 23 +6 -3 0 1 6 0 23 +2 -1.5 0 1 2 0 23 +4 -3 0 1 4 0 23 +5 -6.88 0 0 0 0 23 +12 -9 0 1 12 0 23 +4 -5 0 0 0 0 23 +5 -7.5 0 0 0 0 23 +4 -4 0 1 -4 0 23 +9 -5.63 0 1 -5.63 0 23 +9 -14.63 0 0 0 0 23 +5 -9.38 0 0 0 0 23 +6 -4.5 0 1 6 0 23 +8 -7 0 0 0 0 23 +10 -16.25 0 1 10 0 23 +10 -17.5 0 1 -17.5 0 23 +9 -16.88 0 0 0 0 23 +8 -5 0 1 8 0 23 +6 -1.5 0 1 6 0 23 +12 -18 0 0 0 0 23 +5 -6.25 0 0 0 0 23 +8 -4 0 1 8 0 23 +9 -15.75 0 0 0 0 23 +9 -13.5 0 0 0 0 23 +5 -8.13 0 0 0 0 23 +2 0 1 1 0 0 23 +2 -3.75 0 1 -3.75 0 23 +4 -6.5 0 0 0 0 23 +10 -5 0 1 -5 0 23 +12 -22.5 0 0 0 0 23 +2 -1 0 1 2 0 23 +13 0 6 1 13 0 23 +5 -2.5 0 1 5 0 23 +2 -0.5 0 1 2 0 23 +2 -3.25 0 1 -3.25 0 23 +30 0 12 1 0 0 23 +8 -8 0 0 0 0 23 +4 -5.5 0 0 0 0 23 +23 0 10 1 0 0 23 +4 -3.5 0 1 4 0 23 +5 0 2 1 5 0 23 +8 0 3 1 0 0 23 +9 -10.13 0 0 0 0 23 +8 -16 0 0 0 0 23 +12 -24 0 0 0 0 23 +9 -3.38 0 1 -3.38 0 23 +6 -5.25 0 0 0 0 23 +2 -4 0 0 0 0 23 +4 -1 0 1 -1 0 23 +6 -11.25 0 0 0 0 23 +5 -4.38 0 1 -4.38 0 23 +6 -2.25 0 1 6 0 23 +12 -10.5 0 1 12 0 23 +9 -18 0 0 0 0 23 +10 -20 0 0 0 0 23 +4 -4.5 0 1 -4.5 0 23 +9 -2.25 0 1 -2.25 0 23 +4 -6 0 0 0 0 23 +8 -10 0 0 0 0 23 +5 -5 0 1 -5 0 23 +5 -8.75 0 0 0 0 23 +8 -6 0 0 0 0 23 +10 -13.75 0 0 0 0 23 +2 -2.5 0 1 2 0 23 +8 -11 0 0 0 0 23 +4 -2 0 1 4 0 23 +10 -7.5 0 0 0 0 23 +22 0 10 1 22 0 23 +25 0 10 1 0 0 23 +6 -9.75 0 0 0 0 23 +12 0 5 1 12 0 23 +4 -2.5 0 1 -2.5 0 23 +8 -3 0 1 8 0 23 +10 -11.25 0 0 0 0 23 +5 -10 0 0 0 0 23 +10 -15 0 0 0 0 23 +2 -3.5 0 1 -3.5 0 23 +12 0 4 1 12 0 23 +13 0 5 0 5 0 23 +5 -3.75 0 1 5 0 23 +26 0 12 0 12 0 23 +5 -5.63 0 0 0 0 23 +8 -2 0 1 -2 0 23 +2 -3 0 1 -3 0 23 +6 -9 0 0 0 0 23 +9 -7.88 0 0 0 0 23 +8 -14 0 0 0 0 23 +28 0 13 0 13 0 23 +9 -12.38 0 0 0 0 23 +8 -15 0 0 0 0 23 +10 -2.5 0 1 -2.5 0 23 +4 0 2 0 2 0 23 +12 -6 0 1 -6 0 23 +12 -16.5 0 0 0 0 23 +4 -7.5 0 0 0 0 23 +10 -8.75 0 0 0 0 23 +10 -18.75 0 0 0 0 23 +26 0 10 0 10 0 23 +12 -21 0 0 0 0 23 +2 -0.75 0 1 -0.75 0 23 +9 -9 0 0 0 0 23 +10 -6.25 0 0 0 0 23 +8 -12 0 0 0 0 23 +3 0 1 1 0 0 23 +5 -1.88 0 1 5 0 23 +6 -7.5 0 0 0 0 23 +12 -13.5 0 0 0 0 23 +4 -7 0 0 0 0 23 +6 -8.25 0 0 0 0 23 +6 -12 0 0 0 0 23 +6 -10.5 0 0 0 0 23 +4 -8 0 0 0 0 23 +6 -6 0 0 0 0 23 +12 0 6 0 6 0 23 +12 -19.5 0 0 0 0 23 +19 0 8 1 19 0 23 +12 -15 0 0 0 0 23 +2 -1.75 0 1 2 0 23 +6 -3.75 0 1 -3.75 0 23 +2 -1.25 0 1 2 0 23 +5 -1.25 0 1 -1.25 0 23 +4 -1.5 0 1 4 0 23 +8 -13 0 0 0 0 23 +12 -7.5 0 0 0 0 23 +12 -3 0 1 -3 0 23 +2 -2.75 0 1 2 0 23 +7 0 3 0 3 0 23 +25 0 9 0 9 0 23 +2 -2 0 1 2 0 23 +12 -4.5 0 1 -4.5 0 23 +12 -12 0 0 0 0 23 +5 -3.13 0 1 5 0 23 +9 -11.25 0 0 0 0 23 +8 -9 0 0 0 0 23 +2 -2.25 0 1 2 0 23 +9 -4.5 0 1 -4.5 0 23 +10 -3.75 0 1 10 0 23 +10 -10 0 0 0 0 23 +10 -12.5 0 0 0 0 23 +2 -2.5 0 0 0 0 24 +5 -5.63 0 1 -5.63 0 24 +6 -7.5 0 1 -7.5 0 24 +26 0 10 1 0 0 24 +9 -4.5 0 1 -4.5 0 24 +2 -1.25 0 1 2 0 24 +8 -3 0 1 8 0 24 +25 0 9 1 25 0 24 +4 -4.5 0 1 -4.5 0 24 +5 -10 0 0 0 0 24 +6 -9 0 0 0 0 24 +10 -6.25 0 1 10 0 24 +4 -4 0 0 0 0 24 +12 -3 0 1 -3 0 24 +5 -5 0 1 -5 0 24 +12 0 5 1 12 0 24 +6 -9.75 0 0 0 0 24 +19 0 8 1 19 0 24 +4 -7.5 0 0 0 0 24 +12 -9 0 1 12 0 24 +4 -6.5 0 0 0 0 24 +9 -5.63 0 1 -5.63 0 24 +9 -18 0 0 0 0 24 +10 -11.25 0 1 -11.25 0 24 +10 -13.75 0 0 0 0 24 +6 -12 0 0 0 0 24 +10 -12.5 0 1 -12.5 0 24 +4 -7 0 0 0 0 24 +10 -7.5 0 1 -7.5 0 24 +4 -8 0 0 0 0 24 +8 -11 0 0 0 0 24 +12 0 4 1 12 0 24 +9 -3.38 0 1 -3.38 0 24 +10 -18.75 0 0 0 0 24 +2 -3.5 0 0 0 0 24 +2 -1 0 1 2 0 24 +2 -3.25 0 0 0 0 24 +2 0 1 1 0 0 24 +7 0 3 1 7 0 24 +8 0 3 1 0 0 24 +12 -6 0 1 -6 0 24 +2 -0.5 0 1 2 0 24 +9 -7.88 0 1 -7.88 0 24 +8 -15 0 0 0 0 24 +2 -1.5 0 1 2 0 24 +12 -22.5 0 0 0 0 24 +8 -7 0 1 -7 0 24 +4 -5.5 0 0 0 0 24 +10 -8.75 0 1 -8.75 0 24 +8 -9 0 0 0 0 24 +2 -4 0 0 0 0 24 +4 0 2 1 4 0 24 +8 -8 0 1 8 0 24 +9 -13.5 0 0 0 0 24 +9 -9 0 1 -9 0 24 +6 -3.75 0 1 -3.75 0 24 +13 0 6 1 13 0 24 +5 -1.88 0 1 5 0 24 +6 -6 0 1 -6 0 24 +5 -6.88 0 0 0 0 24 +8 -16 0 0 0 0 24 +12 -7.5 0 1 -7.5 0 24 +5 -1.25 0 1 -1.25 0 24 +9 -14.63 0 0 0 0 24 +8 -4 0 1 8 0 24 +10 -17.5 0 0 0 0 24 +5 -3.75 0 1 5 0 24 +6 -10.5 0 0 0 0 24 +13 0 5 1 13 0 24 +10 -16.25 0 0 0 0 24 +5 -7.5 0 0 0 0 24 +2 -1.75 0 1 2 0 24 +5 -9.38 0 0 0 0 24 +2 -2.75 0 0 0 0 24 +2 -0.75 0 1 -0.75 0 24 +5 -8.13 0 0 0 0 24 +9 -11.25 0 0 0 0 24 +8 -13 0 0 0 0 24 +9 -16.88 0 0 0 0 24 +2 -2 0 1 2 0 24 +12 -18 0 0 0 0 24 +8 -2 0 1 -2 0 24 +2 -3 0 0 0 0 24 +6 -4.5 0 1 6 0 24 +5 0 2 1 5 0 24 +12 -19.5 0 0 0 0 24 +9 -15.75 0 0 0 0 24 +8 -6 0 1 -6 0 24 +10 -2.5 0 1 -2.5 0 24 +9 -6.75 0 1 -6.75 0 24 +6 -6.75 0 0 0 0 24 +2 -3.75 0 0 0 0 24 +10 -5 0 1 -5 0 24 +2 -2.25 0 0 0 0 24 +26 0 12 1 26 0 24 +12 -13.5 0 0 0 0 24 +8 -5 0 1 8 0 24 +6 -3 0 1 6 0 24 +10 -3.75 0 1 10 0 24 +12 -10.5 0 1 12 0 24 +4 -5 0 1 4 0 24 +9 -2.25 0 1 -2.25 0 24 +4 -3 0 1 4 0 24 +9 -10.13 0 0 0 0 24 +28 0 13 1 28 0 24 +22 0 10 1 22 0 24 +10 -10 0 1 10 0 24 +4 -1 0 1 -1 0 24 +4 -2.5 0 1 -2.5 0 24 +12 -24 0 0 0 0 24 +8 -12 0 0 0 0 24 +3 0 1 1 0 0 24 +9 -12.38 0 0 0 0 24 +23 0 10 1 0 0 24 +4 -3.5 0 1 4 0 24 +4 -1.5 0 1 4 0 24 +8 -10 0 0 0 0 24 +8 -14 0 0 0 0 24 +4 -6 0 0 0 0 24 +25 0 10 1 0 0 24 +12 -16.5 0 0 0 0 24 +12 -12 0 1 12 0 24 +5 -2.5 0 1 5 0 24 +5 -8.75 0 0 0 0 24 +12 -4.5 0 1 -4.5 0 24 +12 -15 0 0 0 0 24 +5 -3.13 0 1 5 0 24 +12 -21 0 0 0 0 24 +5 -4.38 0 1 -4.38 0 24 +6 -11.25 0 0 0 0 24 +30 0 12 1 0 0 24 +6 -1.5 0 1 6 0 24 +12 0 6 1 12 0 24 +4 -2 0 1 4 0 24 +10 -15 0 0 0 0 24 +6 -2.25 0 1 6 0 24 +10 -20 0 0 0 0 24 +6 -5.25 0 1 6 0 24 +5 -6.25 0 1 5 0 24 +6 -8.25 0 0 0 0 24 +4 -4.5 0 0 0 0 25 +10 -12.5 0 1 -12.5 0 25 +26 0 12 1 26 0 25 +6 -7.5 0 0 0 0 25 +4 -6.5 0 0 0 0 25 +12 -4.5 0 1 -4.5 0 25 +5 -2.5 0 1 5 0 25 +6 -12 0 0 0 0 25 +9 -14.63 0 0 0 0 25 +6 -6 0 1 -6 0 25 +22 0 10 1 22 0 25 +2 -1 0 1 2 0 25 +8 -3 0 1 8 0 25 +12 -9 0 0 0 0 25 +5 -3.75 0 1 5 0 25 +6 -3 0 1 6 0 25 +4 0 2 1 4 0 25 +28 0 13 1 28 0 25 +12 -15 0 0 0 0 25 +9 -11.25 0 0 0 0 25 +12 -10.5 0 0 0 0 25 +5 -1.88 0 1 5 0 25 +2 -2.75 0 1 2 0 25 +4 -7 0 0 0 0 25 +8 -4 0 1 8 0 25 +2 0 1 1 0 0 25 +2 -3.5 0 1 -3.5 0 25 +2 -1.75 0 1 2 0 25 +5 -5 0 1 -5 0 25 +12 -12 0 0 0 0 25 +12 0 6 1 12 0 25 +6 -4.5 0 1 6 0 25 +30 0 12 1 0 0 25 +12 -16.5 0 0 0 0 25 +6 -9.75 0 0 0 0 25 +12 -22.5 0 0 0 0 25 +6 -9 0 0 0 0 25 +5 -3.13 0 1 5 0 25 +5 -9.38 0 1 5 0 25 +12 -7.5 0 1 -7.5 0 25 +5 0 2 1 5 0 25 +10 -15 0 0 0 0 25 +12 -3 0 1 -3 0 25 +13 0 6 1 13 0 25 +9 -16.88 0 0 0 0 25 +6 -11.25 0 0 0 0 25 +8 -5 0 1 8 0 25 +8 -14 0 0 0 0 25 +12 -24 0 0 0 0 25 +12 0 5 1 12 0 25 +9 -13.5 0 0 0 0 25 +6 -1.5 0 1 6 0 25 +2 -3 0 1 -3 0 25 +10 -2.5 0 1 -2.5 0 25 +2 -0.75 0 1 -0.75 0 25 +6 -10.5 0 0 0 0 25 +2 -0.5 0 1 2 0 25 +10 -10 0 1 10 0 25 +8 -10 0 0 0 0 25 +9 -12.38 0 0 0 0 25 +4 -6 0 0 0 0 25 +6 -2.25 0 1 6 0 25 +9 -15.75 0 0 0 0 25 +12 -13.5 0 0 0 0 25 +8 -6 0 1 -6 0 25 +10 -18.75 0 0 0 0 25 +4 -2 0 1 4 0 25 +5 -1.25 0 1 -1.25 0 25 +6 -5.25 0 1 6 0 25 +4 -8 0 0 0 0 25 +25 0 9 1 25 0 25 +2 -3.25 0 1 -3.25 0 25 +10 -11.25 0 1 -11.25 0 25 +4 -7.5 0 1 -7.5 0 25 +9 -5.63 0 1 -5.63 0 25 +6 -6.75 0 1 -6.75 0 25 +8 -2 0 1 -2 0 25 +5 -6.25 0 1 5 0 25 +23 0 10 1 0 0 25 +8 -13 0 0 0 0 25 +10 -13.75 0 0 0 0 25 +5 -10 0 0 0 0 25 +12 0 4 1 12 0 25 +2 -2.5 0 1 2 0 25 +19 0 8 1 19 0 25 +4 -4 0 1 -4 0 25 +4 -1 0 1 -1 0 25 +4 -2.5 0 1 -2.5 0 25 +5 -8.13 0 1 5 0 25 +10 -3.75 0 1 10 0 25 +5 -8.75 0 0 0 0 25 +10 -7.5 0 1 -7.5 0 25 +10 -5 0 1 -5 0 25 +10 -20 0 0 0 0 25 +13 0 5 1 13 0 25 +8 -9 0 0 0 0 25 +8 -12 0 0 0 0 25 +10 -16.25 0 0 0 0 25 +5 -6.88 0 0 0 0 25 +4 -5.5 0 1 -5.5 0 25 +5 -7.5 0 0 0 0 25 +9 -10.13 0 0 0 0 25 +6 -8.25 0 0 0 0 25 +26 0 10 1 0 0 25 +4 -5 0 1 4 0 25 +2 -2.25 0 1 2 0 25 +6 -3.75 0 1 -3.75 0 25 +8 -8 0 1 8 0 25 +9 -6.75 0 1 -6.75 0 25 +8 -15 0 0 0 0 25 +12 -6 0 1 -6 0 25 +25 0 10 0 10 0 25 +12 -19.5 0 0 0 0 25 +9 -7.88 0 1 -7.88 0 25 +4 -1.5 0 1 4 0 25 +8 -7 0 1 -7 0 25 +12 -18 0 0 0 0 25 +2 -2 0 1 2 0 25 +9 -18 0 0 0 0 25 +2 -1.25 0 1 2 0 25 +8 -16 0 0 0 0 25 +5 -4.38 0 1 -4.38 0 25 +2 -4 0 0 0 0 25 +5 -5.63 0 1 -5.63 0 25 +8 0 3 0 3 0 25 +10 -17.5 0 0 0 0 25 +8 -11 0 0 0 0 25 +2 -1.5 0 1 2 0 25 +4 -3.5 0 1 4 0 25 +2 -3.75 0 1 -3.75 0 25 +3 0 1 0 1 0 25 +12 -21 0 0 0 0 25 +10 -8.75 0 1 -8.75 0 25 +9 -9 0 1 -9 0 25 +4 -3 0 1 4 0 25 +7 0 3 1 7 0 25 +9 -3.38 0 1 -3.38 0 25 +9 -2.25 0 1 -2.25 0 25 +10 -6.25 0 1 10 0 25 +9 -4.5 0 1 -4.5 0 25 +2 -1 0 1 2 0 26 +9 -13.5 0 0 0 0 26 +5 -6.88 0 0 0 0 26 +10 -10 0 1 10 0 26 +6 -2.25 0 1 6 0 26 +6 -6.75 0 0 0 0 26 +9 -4.5 0 1 -4.5 0 26 +10 -13.75 0 0 0 0 26 +6 -8.25 0 0 0 0 26 +5 -10 0 1 5 0 26 +10 -6.25 0 1 10 0 26 +12 -3 0 1 -3 0 26 +12 -9 0 0 0 0 26 +8 -7 0 0 0 0 26 +6 -12 0 0 0 0 26 +8 -2 0 1 -2 0 26 +12 -6 0 1 -6 0 26 +3 0 1 0 1 0 26 +10 -20 0 0 0 0 26 +5 -3.75 0 1 5 0 26 +2 -1.75 0 1 2 0 26 +6 -3.75 0 0 0 0 26 +9 -12.38 0 0 0 0 26 +5 -6.25 0 0 0 0 26 +12 0 4 0 4 0 26 +2 -1.5 0 1 2 0 26 +6 -5.25 0 0 0 0 26 +10 -18.75 0 0 0 0 26 +6 -6 0 0 0 0 26 +12 0 5 1 12 0 26 +4 -2 0 1 4 0 26 +2 -4 0 0 0 0 26 +5 -2.5 0 1 5 0 26 +2 -3.75 0 0 0 0 26 +9 -15.75 0 0 0 0 26 +8 -4 0 1 8 0 26 +26 0 12 0 12 0 26 +6 -1.5 0 1 6 0 26 +4 -6 0 0 0 0 26 +10 -2.5 0 1 -2.5 0 26 +8 -12 0 0 0 0 26 +2 -3.5 0 0 0 0 26 +5 -5.63 0 1 -5.63 0 26 +12 -24 0 0 0 0 26 +25 0 10 1 0 0 26 +4 -6.5 0 0 0 0 26 +5 -9.38 0 0 0 0 26 +5 -7.5 0 1 -7.5 0 26 +4 -4 0 1 -4 0 26 +6 -10.5 0 0 0 0 26 +13 0 6 0 6 0 26 +12 -22.5 0 0 0 0 26 +4 -7.5 0 0 0 0 26 +5 0 2 1 5 0 26 +10 -15 0 0 0 0 26 +9 -16.88 0 0 0 0 26 +2 -2.5 0 0 0 0 26 +10 -16.25 0 0 0 0 26 +6 -11.25 0 0 0 0 26 +4 -1.5 0 1 4 0 26 +5 -3.13 0 1 5 0 26 +6 -9 0 0 0 0 26 +12 -19.5 0 0 0 0 26 +10 -12.5 0 0 0 0 26 +2 -3 0 0 0 0 26 +8 -16 0 0 0 0 26 +4 0 2 1 4 0 26 +12 -7.5 0 1 -7.5 0 26 +12 -13.5 0 0 0 0 26 +22 0 10 1 22 0 26 +12 -21 0 0 0 0 26 +7 0 3 0 3 0 26 +10 -8.75 0 0 0 0 26 +2 -1.25 0 1 2 0 26 +9 -6.75 0 0 0 0 26 +12 0 6 0 6 0 26 +28 0 13 1 28 0 26 +9 -10.13 0 0 0 0 26 +2 -0.5 0 1 2 0 26 +25 0 9 0 9 0 26 +6 -7.5 0 0 0 0 26 +4 -3 0 1 4 0 26 +10 -3.75 0 0 0 0 26 +12 -4.5 0 1 -4.5 0 26 +12 -15 0 0 0 0 26 +6 -3 0 1 6 0 26 +9 -14.63 0 0 0 0 26 +5 -1.25 0 1 -1.25 0 26 +8 -11 0 0 0 0 26 +10 -17.5 0 0 0 0 26 +8 -10 0 0 0 0 26 +9 -9 0 0 0 0 26 +10 -11.25 0 0 0 0 26 +12 -12 0 0 0 0 26 +8 -14 0 0 0 0 26 +12 -16.5 0 0 0 0 26 +4 -7 0 0 0 0 26 +4 -1 0 1 -1 0 26 +5 -1.88 0 1 5 0 26 +8 0 3 1 0 0 26 +2 -3.25 0 0 0 0 26 +5 -5 0 0 0 0 26 +26 0 10 0 10 0 26 +12 -10.5 0 0 0 0 26 +2 0 1 0 1 0 26 +6 -9.75 0 0 0 0 26 +8 -3 0 0 0 0 26 +13 0 5 0 5 0 26 +10 -7.5 0 0 0 0 26 +8 -13 0 0 0 0 26 +9 -3.38 0 1 -3.38 0 26 +8 -15 0 0 0 0 26 +30 0 12 1 0 0 26 +8 -8 0 0 0 0 26 +8 -5 0 1 8 0 26 +12 -18 0 0 0 0 26 +10 -5 0 1 -5 0 26 +9 -11.25 0 0 0 0 26 +9 -7.88 0 0 0 0 26 +8 -6 0 0 0 0 26 +6 -4.5 0 1 6 0 26 +8 -9 0 0 0 0 26 +4 -5.5 0 0 0 0 26 +4 -5 0 0 0 0 26 +9 -2.25 0 1 -2.25 0 26 +23 0 10 1 0 0 26 +9 -5.63 0 1 -5.63 0 26 +4 -4.5 0 0 0 0 26 +4 -8 0 1 4 0 26 +19 0 8 1 19 0 26 +2 -2 0 0 0 0 26 +5 -8.13 0 0 0 0 26 +5 -4.38 0 0 0 0 26 +2 -2.25 0 0 0 0 26 +2 -0.75 0 1 -0.75 0 26 +2 -2.75 0 0 0 0 26 +5 -8.75 0 0 0 0 26 +9 -18 0 0 0 0 26 +4 -3.5 0 0 0 0 26 +4 -2.5 0 1 -2.5 0 26 +9 -6.75 0 1 -6.75 0 27 +6 -6.75 0 1 -6.75 0 27 +6 -3 0 1 6 0 27 +2 -1.5 0 1 2 0 27 +4 -3 0 0 0 0 27 +5 -6.88 0 1 5 0 27 +12 -9 0 1 12 0 27 +4 -5 0 0 0 0 27 +5 -7.5 0 1 -7.5 0 27 +4 -4 0 1 -4 0 27 +9 -5.63 0 1 -5.63 0 27 +9 -14.63 0 1 9 0 27 +5 -9.38 0 0 0 0 27 +6 -4.5 0 1 6 0 27 +8 -7 0 1 -7 0 27 +10 -16.25 0 0 0 0 27 +10 -17.5 0 1 -17.5 0 27 +9 -16.88 0 0 0 0 27 +8 -5 0 1 8 0 27 +6 -1.5 0 1 6 0 27 +12 -18 0 1 -18 0 27 +5 -6.25 0 1 5 0 27 +8 -4 0 1 8 0 27 +9 -15.75 0 1 -15.75 0 27 +9 -13.5 0 0 0 0 27 +5 -8.13 0 1 5 0 27 +2 0 1 1 0 0 27 +2 -3.75 0 0 0 0 27 +4 -6.5 0 1 4 0 27 +10 -5 0 1 -5 0 27 +12 -22.5 0 0 0 0 27 +2 -1 0 1 2 0 27 +13 0 6 1 13 0 27 +5 -2.5 0 1 5 0 27 +2 -0.5 0 1 2 0 27 +2 -3.25 0 1 -3.25 0 27 +30 0 12 1 0 0 27 +8 -8 0 1 8 0 27 +4 -5.5 0 0 0 0 27 +23 0 10 1 0 0 27 +4 -3.5 0 0 0 0 27 +5 0 2 1 5 0 27 +8 0 3 0 3 0 27 +9 -10.13 0 1 -10.13 0 27 +8 -16 0 1 -16 0 27 +12 -24 0 1 -24 0 27 +9 -3.38 0 1 -3.38 0 27 +6 -5.25 0 1 6 0 27 +2 -4 0 1 2 0 27 +4 -1 0 1 -1 0 27 +6 -11.25 0 0 0 0 27 +5 -4.38 0 1 -4.38 0 27 +6 -2.25 0 1 6 0 27 +12 -10.5 0 1 12 0 27 +9 -18 0 1 -18 0 27 +10 -20 0 0 0 0 27 +4 -4.5 0 1 -4.5 0 27 +9 -2.25 0 1 -2.25 0 27 +4 -6 0 1 4 0 27 +8 -10 0 1 -10 0 27 +5 -5 0 1 -5 0 27 +5 -8.75 0 0 0 0 27 +8 -6 0 1 -6 0 27 +10 -13.75 0 0 0 0 27 +2 -2.5 0 1 2 0 27 +8 -11 0 1 -11 0 27 +4 -2 0 1 4 0 27 +10 -7.5 0 1 -7.5 0 27 +22 0 10 1 22 0 27 +25 0 10 1 0 0 27 +6 -9.75 0 1 6 0 27 +12 0 5 1 12 0 27 +4 -2.5 0 1 -2.5 0 27 +8 -3 0 1 8 0 27 +10 -11.25 0 1 -11.25 0 27 +5 -10 0 1 5 0 27 +10 -15 0 0 0 0 27 +2 -3.5 0 0 0 0 27 +12 0 4 0 4 0 27 +13 0 5 1 13 0 27 +5 -3.75 0 1 5 0 27 +26 0 12 1 26 0 27 +5 -5.63 0 1 -5.63 0 27 +8 -2 0 1 -2 0 27 +2 -3 0 1 -3 0 27 +6 -9 0 1 -9 0 27 +9 -7.88 0 1 -7.88 0 27 +8 -14 0 0 0 0 27 +28 0 13 1 28 0 27 +9 -12.38 0 0 0 0 27 +8 -15 0 0 0 0 27 +10 -2.5 0 1 -2.5 0 27 +4 0 2 0 2 0 27 +12 -6 0 1 -6 0 27 +12 -16.5 0 1 -16.5 0 27 +4 -7.5 0 1 -7.5 0 27 +10 -8.75 0 1 -8.75 0 27 +10 -18.75 0 0 0 0 27 +26 0 10 1 0 0 27 +12 -21 0 1 12 0 27 +2 -0.75 0 1 -0.75 0 27 +9 -9 0 1 -9 0 27 +10 -6.25 0 1 10 0 27 +8 -12 0 1 -12 0 27 +3 0 1 1 0 0 27 +5 -1.88 0 1 5 0 27 +6 -7.5 0 1 -7.5 0 27 +12 -13.5 0 1 12 0 27 +4 -7 0 0 0 0 27 +6 -8.25 0 1 -8.25 0 27 +6 -12 0 0 0 0 27 +6 -10.5 0 0 0 0 27 +4 -8 0 1 4 0 27 +6 -6 0 1 -6 0 27 +12 0 6 0 6 0 27 +12 -19.5 0 0 0 0 27 +19 0 8 1 19 0 27 +12 -15 0 0 0 0 27 +2 -1.75 0 1 2 0 27 +6 -3.75 0 1 -3.75 0 27 +2 -1.25 0 1 2 0 27 +5 -1.25 0 1 -1.25 0 27 +4 -1.5 0 1 4 0 27 +8 -13 0 0 0 0 27 +12 -7.5 0 1 -7.5 0 27 +12 -3 0 1 -3 0 27 +2 -2.75 0 0 0 0 27 +7 0 3 1 7 0 27 +25 0 9 1 25 0 27 +2 -2 0 0 0 0 27 +12 -4.5 0 1 -4.5 0 27 +12 -12 0 1 12 0 27 +5 -3.13 0 1 5 0 27 +9 -11.25 0 1 9 0 27 +8 -9 0 0 0 0 27 +2 -2.25 0 1 2 0 27 +9 -4.5 0 1 -4.5 0 27 +10 -3.75 0 1 10 0 27 +10 -10 0 1 10 0 27 +10 -12.5 0 0 0 0 27 +2 -2.5 0 0 0 0 28 +5 -5.63 0 0 0 0 28 +6 -7.5 0 0 0 0 28 +26 0 10 1 0 0 28 +9 -4.5 0 0 0 0 28 +2 -1.25 0 0 0 0 28 +8 -3 0 1 8 0 28 +25 0 9 0 9 0 28 +4 -4.5 0 0 0 0 28 +5 -10 0 0 0 0 28 +6 -9 0 0 0 0 28 +10 -6.25 0 1 10 0 28 +4 -4 0 1 -4 0 28 +12 -3 0 1 -3 0 28 +5 -5 0 0 0 0 28 +12 0 5 0 5 0 28 +6 -9.75 0 0 0 0 28 +19 0 8 0 8 0 28 +4 -7.5 0 0 0 0 28 +12 -9 0 0 0 0 28 +4 -6.5 0 0 0 0 28 +9 -5.63 0 0 0 0 28 +9 -18 0 0 0 0 28 +10 -11.25 0 0 0 0 28 +10 -13.75 0 0 0 0 28 +6 -12 0 0 0 0 28 +10 -12.5 0 0 0 0 28 +4 -7 0 0 0 0 28 +10 -7.5 0 1 -7.5 0 28 +4 -8 0 0 0 0 28 +8 -11 0 0 0 0 28 +12 0 4 0 4 0 28 +9 -3.38 0 1 -3.38 0 28 +10 -18.75 0 0 0 0 28 +2 -3.5 0 0 0 0 28 +2 -1 0 0 0 0 28 +2 -3.25 0 0 0 0 28 +2 0 1 0 1 0 28 +7 0 3 0 3 0 28 +8 0 3 0 3 0 28 +12 -6 0 1 -6 0 28 +2 -0.5 0 1 2 0 28 +9 -7.88 0 0 0 0 28 +8 -15 0 0 0 0 28 +2 -1.5 0 0 0 0 28 +12 -22.5 0 0 0 0 28 +8 -7 0 0 0 0 28 +4 -5.5 0 0 0 0 28 +10 -8.75 0 0 0 0 28 +8 -9 0 0 0 0 28 +2 -4 0 0 0 0 28 +4 0 2 1 4 0 28 +8 -8 0 0 0 0 28 +9 -13.5 0 0 0 0 28 +9 -9 0 0 0 0 28 +6 -3.75 0 0 0 0 28 +13 0 6 0 6 0 28 +5 -1.88 0 1 5 0 28 +6 -6 0 0 0 0 28 +5 -6.88 0 0 0 0 28 +8 -16 0 0 0 0 28 +12 -7.5 0 0 0 0 28 +5 -1.25 0 0 0 0 28 +9 -14.63 0 0 0 0 28 +8 -4 0 0 0 0 28 +10 -17.5 0 0 0 0 28 +5 -3.75 0 0 0 0 28 +6 -10.5 0 0 0 0 28 +13 0 5 0 5 0 28 +10 -16.25 0 0 0 0 28 +5 -7.5 0 0 0 0 28 +2 -1.75 0 0 0 0 28 +5 -9.38 0 0 0 0 28 +2 -2.75 0 0 0 0 28 +2 -0.75 0 1 -0.75 0 28 +5 -8.13 0 0 0 0 28 +9 -11.25 0 0 0 0 28 +8 -13 0 0 0 0 28 +9 -16.88 0 0 0 0 28 +2 -2 0 0 0 0 28 +12 -18 0 0 0 0 28 +8 -2 0 1 -2 0 28 +2 -3 0 0 0 0 28 +6 -4.5 0 0 0 0 28 +5 0 2 0 2 0 28 +12 -19.5 0 0 0 0 28 +9 -15.75 0 0 0 0 28 +8 -6 0 0 0 0 28 +10 -2.5 0 1 -2.5 0 28 +9 -6.75 0 0 0 0 28 +6 -6.75 0 0 0 0 28 +2 -3.75 0 0 0 0 28 +10 -5 0 0 0 0 28 +2 -2.25 0 0 0 0 28 +26 0 12 0 12 0 28 +12 -13.5 0 0 0 0 28 +8 -5 0 0 0 0 28 +6 -3 0 0 0 0 28 +10 -3.75 0 1 10 0 28 +12 -10.5 0 0 0 0 28 +4 -5 0 0 0 0 28 +9 -2.25 0 1 -2.25 0 28 +4 -3 0 0 0 0 28 +9 -10.13 0 0 0 0 28 +28 0 13 0 13 0 28 +22 0 10 0 10 0 28 +10 -10 0 0 0 0 28 +4 -1 0 1 -1 0 28 +4 -2.5 0 0 0 0 28 +12 -24 0 0 0 0 28 +8 -12 0 0 0 0 28 +3 0 1 1 0 0 28 +9 -12.38 0 0 0 0 28 +23 0 10 0 10 0 28 +4 -3.5 0 0 0 0 28 +4 -1.5 0 1 4 0 28 +8 -10 0 0 0 0 28 +8 -14 0 0 0 0 28 +4 -6 0 0 0 0 28 +25 0 10 1 0 0 28 +12 -16.5 0 0 0 0 28 +12 -12 0 0 0 0 28 +5 -2.5 0 0 0 0 28 +5 -8.75 0 0 0 0 28 +12 -4.5 0 0 0 0 28 +12 -15 0 0 0 0 28 +5 -3.13 0 0 0 0 28 +12 -21 0 0 0 0 28 +5 -4.38 0 0 0 0 28 +6 -11.25 0 0 0 0 28 +30 0 12 0 12 0 28 +6 -1.5 0 1 6 0 28 +12 0 6 0 6 0 28 +4 -2 0 0 0 0 28 +10 -15 0 0 0 0 28 +6 -2.25 0 0 0 0 28 +10 -20 0 0 0 0 28 +6 -5.25 0 0 0 0 28 +5 -6.25 0 0 0 0 28 +6 -8.25 0 0 0 0 28 +4 -4.5 0 1 -4.5 0 29 +10 -12.5 0 1 -12.5 0 29 +26 0 12 1 26 0 29 +6 -7.5 0 0 0 0 29 +4 -6.5 0 0 0 0 29 +12 -4.5 0 1 -4.5 0 29 +5 -2.5 0 1 5 0 29 +6 -12 0 0 0 0 29 +9 -14.63 0 1 9 0 29 +6 -6 0 0 0 0 29 +22 0 10 1 22 0 29 +2 -1 0 1 2 0 29 +8 -3 0 0 0 0 29 +12 -9 0 0 0 0 29 +5 -3.75 0 1 5 0 29 +6 -3 0 0 0 0 29 +4 0 2 0 2 0 29 +28 0 13 0 13 0 29 +12 -15 0 0 0 0 29 +9 -11.25 0 0 0 0 29 +12 -10.5 0 0 0 0 29 +5 -1.88 0 1 5 0 29 +2 -2.75 0 0 0 0 29 +4 -7 0 0 0 0 29 +8 -4 0 1 8 0 29 +2 0 1 0 1 0 29 +2 -3.5 0 0 0 0 29 +2 -1.75 0 1 2 0 29 +5 -5 0 0 0 0 29 +12 -12 0 0 0 0 29 +12 0 6 1 12 0 29 +6 -4.5 0 0 0 0 29 +30 0 12 1 0 0 29 +12 -16.5 0 0 0 0 29 +6 -9.75 0 0 0 0 29 +12 -22.5 0 0 0 0 29 +6 -9 0 0 0 0 29 +5 -3.13 0 1 5 0 29 +5 -9.38 0 0 0 0 29 +12 -7.5 0 1 -7.5 0 29 +5 0 2 0 2 0 29 +10 -15 0 0 0 0 29 +12 -3 0 1 -3 0 29 +13 0 6 1 13 0 29 +9 -16.88 0 0 0 0 29 +6 -11.25 0 0 0 0 29 +8 -5 0 0 0 0 29 +8 -14 0 0 0 0 29 +12 -24 0 0 0 0 29 +12 0 5 0 5 0 29 +9 -13.5 0 0 0 0 29 +6 -1.5 0 1 6 0 29 +2 -3 0 0 0 0 29 +10 -2.5 0 1 -2.5 0 29 +2 -0.75 0 1 -0.75 0 29 +6 -10.5 0 0 0 0 29 +2 -0.5 0 1 2 0 29 +10 -10 0 0 0 0 29 +8 -10 0 1 -10 0 29 +9 -12.38 0 0 0 0 29 +4 -6 0 0 0 0 29 +6 -2.25 0 1 6 0 29 +9 -15.75 0 0 0 0 29 +12 -13.5 0 0 0 0 29 +8 -6 0 0 0 0 29 +10 -18.75 0 0 0 0 29 +4 -2 0 1 4 0 29 +5 -1.25 0 1 -1.25 0 29 +6 -5.25 0 1 6 0 29 +4 -8 0 0 0 0 29 +25 0 9 0 9 0 29 +2 -3.25 0 1 -3.25 0 29 +10 -11.25 0 1 -11.25 0 29 +4 -7.5 0 1 -7.5 0 29 +9 -5.63 0 1 -5.63 0 29 +6 -6.75 0 1 -6.75 0 29 +8 -2 0 1 -2 0 29 +5 -6.25 0 0 0 0 29 +23 0 10 1 0 0 29 +8 -13 0 0 0 0 29 +10 -13.75 0 0 0 0 29 +5 -10 0 0 0 0 29 +12 0 4 0 4 0 29 +2 -2.5 0 0 0 0 29 +19 0 8 0 8 0 29 +4 -4 0 0 0 0 29 +4 -1 0 1 -1 0 29 +4 -2.5 0 1 -2.5 0 29 +5 -8.13 0 0 0 0 29 +10 -3.75 0 1 10 0 29 +5 -8.75 0 0 0 0 29 +10 -7.5 0 0 0 0 29 +10 -5 0 1 -5 0 29 +10 -20 0 0 0 0 29 +13 0 5 0 5 0 29 +8 -9 0 0 0 0 29 +8 -12 0 0 0 0 29 +10 -16.25 0 0 0 0 29 +5 -6.88 0 1 5 0 29 +4 -5.5 0 0 0 0 29 +5 -7.5 0 0 0 0 29 +9 -10.13 0 0 0 0 29 +6 -8.25 0 0 0 0 29 +26 0 10 0 10 0 29 +4 -5 0 1 4 0 29 +2 -2.25 0 0 0 0 29 +6 -3.75 0 0 0 0 29 +8 -8 0 0 0 0 29 +9 -6.75 0 0 0 0 29 +8 -15 0 0 0 0 29 +12 -6 0 1 -6 0 29 +25 0 10 0 10 0 29 +12 -19.5 0 0 0 0 29 +9 -7.88 0 0 0 0 29 +4 -1.5 0 1 4 0 29 +8 -7 0 0 0 0 29 +12 -18 0 0 0 0 29 +2 -2 0 0 0 0 29 +9 -18 0 0 0 0 29 +2 -1.25 0 1 2 0 29 +8 -16 0 0 0 0 29 +5 -4.38 0 1 -4.38 0 29 +2 -4 0 0 0 0 29 +5 -5.63 0 0 0 0 29 +8 0 3 0 3 0 29 +10 -17.5 0 0 0 0 29 +8 -11 0 0 0 0 29 +2 -1.5 0 0 0 0 29 +4 -3.5 0 0 0 0 29 +2 -3.75 0 0 0 0 29 +3 0 1 0 1 0 29 +12 -21 0 0 0 0 29 +10 -8.75 0 0 0 0 29 +9 -9 0 0 0 0 29 +4 -3 0 0 0 0 29 +7 0 3 0 3 0 29 +9 -3.38 0 0 0 0 29 +9 -2.25 0 1 -2.25 0 29 +10 -6.25 0 1 10 0 29 +9 -4.5 0 0 0 0 29 +2 -1 0 0 0 0 30 +9 -13.5 0 0 0 0 30 +5 -6.88 0 1 5 0 30 +10 -10 0 0 0 0 30 +6 -2.25 0 1 6 0 30 +6 -6.75 0 0 0 0 30 +9 -4.5 0 1 -4.5 0 30 +10 -13.75 0 0 0 0 30 +6 -8.25 0 0 0 0 30 +5 -10 0 0 0 0 30 +10 -6.25 0 1 10 0 30 +12 -3 0 1 -3 0 30 +12 -9 0 0 0 0 30 +8 -7 0 1 -7 0 30 +6 -12 0 0 0 0 30 +8 -2 0 1 -2 0 30 +12 -6 0 0 0 0 30 +3 0 1 0 1 0 30 +10 -20 0 0 0 0 30 +5 -3.75 0 1 5 0 30 +2 -1.75 0 0 0 0 30 +6 -3.75 0 1 -3.75 0 30 +9 -12.38 0 0 0 0 30 +5 -6.25 0 0 0 0 30 +12 0 4 0 4 0 30 +2 -1.5 0 0 0 0 30 +6 -5.25 0 0 0 0 30 +10 -18.75 0 0 0 0 30 +6 -6 0 0 0 0 30 +12 0 5 0 5 0 30 +4 -2 0 0 0 0 30 +2 -4 0 0 0 0 30 +5 -2.5 0 1 5 0 30 +2 -3.75 0 0 0 0 30 +9 -15.75 0 0 0 0 30 +8 -4 0 1 8 0 30 +26 0 12 1 26 0 30 +6 -1.5 0 0 0 0 30 +4 -6 0 0 0 0 30 +10 -2.5 0 1 -2.5 0 30 +8 -12 0 0 0 0 30 +2 -3.5 0 0 0 0 30 +5 -5.63 0 0 0 0 30 +12 -24 0 0 0 0 30 +25 0 10 1 0 0 30 +4 -6.5 0 0 0 0 30 +5 -9.38 0 0 0 0 30 +5 -7.5 0 0 0 0 30 +4 -4 0 0 0 0 30 +6 -10.5 0 0 0 0 30 +13 0 6 1 13 0 30 +12 -22.5 0 0 0 0 30 +4 -7.5 0 0 0 0 30 +5 0 2 0 2 0 30 +10 -15 0 0 0 0 30 +9 -16.88 0 0 0 0 30 +2 -2.5 0 0 0 0 30 +10 -16.25 0 0 0 0 30 +6 -11.25 0 0 0 0 30 +4 -1.5 0 1 4 0 30 +5 -3.13 0 0 0 0 30 +6 -9 0 0 0 0 30 +12 -19.5 0 0 0 0 30 +10 -12.5 0 0 0 0 30 +2 -3 0 0 0 0 30 +8 -16 0 0 0 0 30 +4 0 2 0 2 0 30 +12 -7.5 0 0 0 0 30 +12 -13.5 0 0 0 0 30 +22 0 10 0 10 0 30 +12 -21 0 0 0 0 30 +7 0 3 0 3 0 30 +10 -8.75 0 0 0 0 30 +2 -1.25 0 0 0 0 30 +9 -6.75 0 0 0 0 30 +12 0 6 1 12 0 30 +28 0 13 0 13 0 30 +9 -10.13 0 0 0 0 30 +2 -0.5 0 1 2 0 30 +25 0 9 0 9 0 30 +6 -7.5 0 0 0 0 30 +4 -3 0 0 0 0 30 +10 -3.75 0 1 10 0 30 +12 -4.5 0 1 -4.5 0 30 +12 -15 0 0 0 0 30 +6 -3 0 0 0 0 30 +9 -14.63 0 0 0 0 30 +5 -1.25 0 0 0 0 30 +8 -11 0 0 0 0 30 +10 -17.5 0 0 0 0 30 +8 -10 0 0 0 0 30 +9 -9 0 0 0 0 30 +10 -11.25 0 0 0 0 30 +12 -12 0 0 0 0 30 +8 -14 0 0 0 0 30 +12 -16.5 0 0 0 0 30 +4 -7 0 0 0 0 30 +4 -1 0 0 0 0 30 +5 -1.88 0 0 0 0 30 +8 0 3 0 3 0 30 +2 -3.25 0 0 0 0 30 +5 -5 0 0 0 0 30 +26 0 10 0 10 0 30 +12 -10.5 0 0 0 0 30 +2 0 1 0 1 0 30 +6 -9.75 0 0 0 0 30 +8 -3 0 0 0 0 30 +13 0 5 0 5 0 30 +10 -7.5 0 0 0 0 30 +8 -13 0 0 0 0 30 +9 -3.38 0 0 0 0 30 +8 -15 0 0 0 0 30 +30 0 12 0 12 0 30 +8 -8 0 0 0 0 30 +8 -5 0 0 0 0 30 +12 -18 0 0 0 0 30 +10 -5 0 0 0 0 30 +9 -11.25 0 0 0 0 30 +9 -7.88 0 0 0 0 30 +8 -6 0 0 0 0 30 +6 -4.5 0 0 0 0 30 +8 -9 0 0 0 0 30 +4 -5.5 0 0 0 0 30 +4 -5 0 0 0 0 30 +9 -2.25 0 1 -2.25 0 30 +23 0 10 0 10 0 30 +9 -5.63 0 0 0 0 30 +4 -4.5 0 0 0 0 30 +4 -8 0 0 0 0 30 +19 0 8 0 8 0 30 +2 -2 0 0 0 0 30 +5 -8.13 0 0 0 0 30 +5 -4.38 0 0 0 0 30 +2 -2.25 0 0 0 0 30 +2 -0.75 0 0 0 0 30 +2 -2.75 0 0 0 0 30 +5 -8.75 0 0 0 0 30 +9 -18 0 0 0 0 30 +4 -3.5 0 0 0 0 30 +4 -2.5 0 1 -2.5 0 30 \ No newline at end of file diff --git a/R/inst/extdata/ra_data_reappraisal.txt b/R/inst/extdata/ra_data_reappraisal.txt new file mode 100644 index 00000000..b67f642b --- /dev/null +++ b/R/inst/extdata/ra_data_reappraisal.txt @@ -0,0 +1,4190 @@ +gain loss cert gamble outcome cond subjID +9 -11.25 0 1 9 1 1 +8 -16 0 0 0 1 1 +9 -5.63 0 1 -5.63 1 1 +9 -4.5 0 1 9 1 1 +2 -2 0 1 2 1 1 +12 -19.5 0 0 0 1 1 +4 -4.5 0 1 4 1 1 +2 -3.75 0 1 -3.75 1 1 +2 -2.25 0 0 0 1 1 +12 -4.5 0 1 -4.5 1 1 +9 -10.13 0 0 0 1 1 +12 -3 0 1 12 1 1 +10 -17.5 0 0 0 1 1 +5 -4.38 0 1 -4.38 1 1 +5 -7.5 0 0 0 1 1 +6 -11.25 0 0 0 1 1 +2 -1.5 0 1 -1.5 1 1 +9 -6.75 0 1 -6.75 1 1 +4 -7 0 0 0 1 1 +8 -7 0 1 8 1 1 +2 -1.75 0 1 2 1 1 +2 -1 0 1 2 1 1 +10 -6.25 0 1 -6.25 1 1 +6 -6.75 0 0 0 1 1 +9 -2.25 0 1 -2.25 1 1 +2 -0.75 0 1 2 1 1 +12 0 4 1 12 1 1 +6 -3 0 1 -3 1 1 +3 0 1 1 3 1 1 +2 -3 0 0 0 1 1 +10 -13.75 0 1 -13.75 1 1 +6 -2.25 0 1 6 1 1 +5 -1.88 0 1 -1.88 1 1 +12 -13.5 0 1 12 1 1 +22 0 10 1 22 1 1 +9 -12.38 0 0 0 1 1 +26 0 10 1 26 1 1 +12 -10.5 0 1 -10.5 1 1 +10 -2.5 0 1 -2.5 1 1 +25 0 10 1 25 1 1 +9 -15.75 0 1 9 1 1 +7 0 3 1 0 1 1 +10 -10 0 1 10 1 1 +12 -15 0 0 0 1 1 +12 0 6 1 0 1 1 +6 -4.5 0 1 -4.5 1 1 +8 -13 0 0 0 1 1 +10 -16.25 0 0 0 1 1 +5 -1.25 0 1 5 1 1 +4 -4 0 1 4 1 1 +5 -3.75 0 1 5 1 1 +6 -8.25 0 0 0 1 1 +8 -15 0 0 0 1 1 +8 -8 0 1 -8 1 1 +2 -2.75 0 1 -2.75 1 1 +6 -12 0 0 0 1 1 +2 0 1 1 2 1 1 +2 -1.25 0 1 -1.25 1 1 +9 -18 0 0 0 1 1 +6 -9 0 1 -9 1 1 +10 -8.75 0 1 -8.75 1 1 +4 -7.5 0 0 0 1 1 +13 0 6 1 0 1 1 +10 -11.25 0 0 0 1 1 +4 -3 0 1 4 1 1 +10 -5 0 1 10 1 1 +8 -2 0 1 -2 1 1 +4 -2.5 0 0 0 1 1 +2 -3.5 0 0 0 1 1 +2 -2.5 0 1 2 1 1 +6 -3.75 0 0 0 1 1 +8 -3 0 1 8 1 1 +2 -3.25 0 0 0 1 1 +8 -9 0 0 0 1 1 +6 -6 0 0 0 1 1 +8 -11 0 0 0 1 1 +5 -8.75 0 0 0 1 1 +6 -9.75 0 0 0 1 1 +12 -24 0 0 0 1 1 +4 -6.5 0 0 0 1 1 +5 -10 0 0 0 1 1 +30 0 12 1 0 1 1 +12 -18 0 0 0 1 1 +9 -9 0 0 0 1 1 +5 -5 0 1 -5 1 1 +5 -9.38 0 0 0 1 1 +10 -12.5 0 0 0 1 1 +10 -18.75 0 0 0 1 1 +5 -2.5 0 1 -2.5 1 1 +9 -14.63 0 0 0 1 1 +28 0 13 1 0 1 1 +5 -6.88 0 0 0 1 1 +4 -3.5 0 0 0 1 1 +12 -16.5 0 0 0 1 1 +5 -8.13 0 0 0 1 1 +9 -16.88 0 0 0 1 1 +9 -3.38 0 1 -3.38 1 1 +12 0 5 1 0 1 1 +4 -8 0 0 0 1 1 +8 -12 0 0 0 1 1 +8 -4 0 0 0 1 1 +2 -4 0 0 0 1 1 +12 -9 0 1 -9 1 1 +4 -1.5 0 1 4 1 1 +6 -10.5 0 0 0 1 1 +5 -3.13 0 1 5 1 1 +10 -15 0 0 0 1 1 +23 0 10 0 10 1 1 +12 -7.5 0 1 -7.5 1 1 +2 -0.5 0 1 -0.5 1 1 +4 0 2 0 2 1 1 +6 -1.5 0 1 -1.5 1 1 +4 -1 0 1 4 1 1 +10 -20 0 0 0 1 1 +12 -22.5 0 0 0 1 1 +25 0 9 1 0 1 1 +13 0 5 0 5 1 1 +6 -5.25 0 0 0 1 1 +9 -13.5 0 0 0 1 1 +5 0 2 0 2 1 1 +12 -6 0 1 -6 1 1 +5 -6.25 0 0 0 1 1 +10 -3.75 0 1 10 1 1 +9 -7.88 0 0 0 1 1 +8 -6 0 0 0 1 1 +4 -5.5 0 0 0 1 1 +19 0 8 0 8 1 1 +10 -7.5 0 0 0 1 1 +4 -6 0 0 0 1 1 +8 0 3 0 3 1 1 +12 -21 0 0 0 1 1 +4 -2 0 0 0 1 1 +4 -5 0 0 0 1 1 +12 -12 0 0 0 1 1 +8 -5 0 1 -5 1 1 +26 0 12 1 0 1 1 +8 -10 0 0 0 1 1 +5 -5.63 0 0 0 1 1 +2 -1 0 1 2 1 2 +9 -6.75 0 1 -6.75 1 2 +2 -4 0 0 0 1 2 +2 -3.25 0 0 0 1 2 +4 -6.5 0 1 -6.5 1 2 +5 -5.63 0 0 0 1 2 +8 -8 0 1 -8 1 2 +12 -18 0 1 12 1 2 +2 -2.5 0 0 0 1 2 +3 0 1 1 3 1 2 +12 -16.5 0 1 12 1 2 +10 -12.5 0 1 -12.5 1 2 +5 -1.25 0 1 5 1 2 +19 0 8 1 19 1 2 +8 -9 0 0 0 1 2 +5 -10 0 0 0 1 2 +25 0 10 1 25 1 2 +7 0 3 0 3 1 2 +6 -11.25 0 0 0 1 2 +6 -1.5 0 1 -1.5 1 2 +4 -1.5 0 1 4 1 2 +10 -5 0 1 10 1 2 +10 -3.75 0 1 10 1 2 +6 -4.5 0 0 0 1 2 +12 -19.5 0 0 0 1 2 +5 -4.38 0 0 0 1 2 +8 -11 0 0 0 1 2 +2 -0.75 0 1 2 1 2 +2 -1.5 0 0 0 1 2 +6 -6.75 0 0 0 1 2 +4 -6 0 0 0 1 2 +10 -16.25 0 1 -16.25 1 2 +12 -15 0 1 -15 1 2 +6 -5.25 0 1 6 1 2 +12 -21 0 1 12 1 2 +4 -3 0 1 4 1 2 +12 -22.5 0 1 12 1 2 +2 -3.75 0 0 0 1 2 +6 -12 0 1 -12 1 2 +5 -8.13 0 1 5 1 2 +10 -8.75 0 1 -8.75 1 2 +12 -6 0 1 -6 1 2 +5 -5 0 1 -5 1 2 +22 0 10 1 22 1 2 +12 -13.5 0 1 12 1 2 +8 -7 0 1 8 1 2 +4 -3.5 0 0 0 1 2 +9 -12.38 0 1 9 1 2 +10 -7.5 0 1 -7.5 1 2 +26 0 10 1 26 1 2 +12 -4.5 0 1 -4.5 1 2 +8 -15 0 0 0 1 2 +2 -1.75 0 0 0 1 2 +12 0 6 1 0 1 2 +9 -3.38 0 1 -3.38 1 2 +2 -3 0 0 0 1 2 +9 -5.63 0 0 0 1 2 +2 -3.5 0 0 0 1 2 +8 -12 0 0 0 1 2 +10 -18.75 0 1 10 1 2 +4 0 2 1 4 1 2 +2 -2.25 0 0 0 1 2 +9 -2.25 0 1 -2.25 1 2 +10 -13.75 0 1 -13.75 1 2 +28 0 13 1 0 1 2 +4 -2.5 0 1 4 1 2 +9 -15.75 0 1 9 1 2 +10 -15 0 0 0 1 2 +10 -10 0 1 10 1 2 +9 -18 0 0 0 1 2 +12 -24 0 1 -24 1 2 +13 0 5 1 13 1 2 +5 -1.88 0 1 -1.88 1 2 +4 -4.5 0 1 4 1 2 +9 -7.88 0 1 9 1 2 +9 -9 0 1 9 1 2 +25 0 9 1 0 1 2 +12 -12 0 1 -12 1 2 +6 -2.25 0 1 6 1 2 +8 -5 0 1 -5 1 2 +4 -5.5 0 1 -5.5 1 2 +2 -1.25 0 1 -1.25 1 2 +9 -13.5 0 1 -13.5 1 2 +9 -4.5 0 1 9 1 2 +10 -11.25 0 1 10 1 2 +6 -3 0 1 -3 1 2 +10 -2.5 0 1 -2.5 1 2 +12 0 4 1 12 1 2 +10 -20 0 1 10 1 2 +5 -3.75 0 1 5 1 2 +9 -10.13 0 1 -10.13 1 2 +4 -7 0 1 -7 1 2 +12 -10.5 0 1 -10.5 1 2 +8 -16 0 1 8 1 2 +4 -7.5 0 0 0 1 2 +8 0 3 1 8 1 2 +6 -10.5 0 0 0 1 2 +6 -9.75 0 0 0 1 2 +5 -8.75 0 0 0 1 2 +5 -2.5 0 1 -2.5 1 2 +13 0 6 1 0 1 2 +23 0 10 1 0 1 2 +8 -4 0 1 -4 1 2 +9 -11.25 0 1 9 1 2 +5 -6.88 0 0 0 1 2 +4 -4 0 1 4 1 2 +10 -17.5 0 0 0 1 2 +8 -13 0 0 0 1 2 +26 0 12 1 0 1 2 +6 -8.25 0 1 -8.25 1 2 +9 -14.63 0 1 9 1 2 +8 -2 0 1 -2 1 2 +10 -6.25 0 1 -6.25 1 2 +8 -14 0 0 0 1 2 +12 0 5 1 0 1 2 +8 -10 0 0 0 1 2 +30 0 12 1 0 1 2 +5 -7.5 0 0 0 1 2 +5 0 2 1 0 1 2 +6 -3.75 0 1 6 1 2 +6 -6 0 1 -6 1 2 +4 -2 0 1 -2 1 2 +12 -7.5 0 1 -7.5 1 2 +5 -6.25 0 1 5 1 2 +4 -5 0 1 4 1 2 +2 -2.75 0 1 -2.75 1 2 +2 -2 0 1 2 1 2 +6 -9 0 1 -9 1 2 +5 -3.13 0 1 5 1 2 +12 -9 0 1 -9 1 2 +4 -8 0 1 -8 1 2 +4 -1 0 1 4 1 2 +2 0 1 1 2 1 2 +9 -16.88 0 1 9 1 2 +8 -6 0 1 -6 1 2 +2 -0.5 0 1 -0.5 1 2 +6 -7.5 0 1 -7.5 1 2 +8 -3 0 1 8 1 2 +12 -3 0 1 12 1 2 +5 -9.38 0 1 -9.38 1 2 +6 -9.75 0 0 0 1 3 +12 -13.5 0 0 0 1 3 +8 -7 0 1 8 1 3 +10 -7.5 0 0 0 1 3 +2 -2.25 0 0 0 1 3 +6 -8.25 0 0 0 1 3 +10 -16.25 0 0 0 1 3 +3 0 1 1 3 1 3 +4 -3 0 0 0 1 3 +8 -2 0 1 -2 1 3 +4 -2.5 0 0 0 1 3 +5 -5.63 0 0 0 1 3 +5 0 2 1 0 1 3 +30 0 12 1 0 1 3 +9 -4.5 0 0 0 1 3 +4 -7.5 0 0 0 1 3 +26 0 10 0 10 1 3 +10 -6.25 0 0 0 1 3 +2 -4 0 0 0 1 3 +4 -5 0 0 0 1 3 +5 -1.88 0 1 -1.88 1 3 +23 0 10 1 0 1 3 +8 -3 0 0 0 1 3 +8 -12 0 0 0 1 3 +10 -2.5 0 0 0 1 3 +5 -8.13 0 0 0 1 3 +8 -9 0 0 0 1 3 +2 -3 0 0 0 1 3 +9 -11.25 0 0 0 1 3 +9 -12.38 0 0 0 1 3 +12 -15 0 0 0 1 3 +8 -10 0 0 0 1 3 +4 -1 0 1 4 1 3 +8 0 3 1 8 1 3 +4 -3.5 0 0 0 1 3 +8 -8 0 0 0 1 3 +10 -11.25 0 0 0 1 3 +10 -5 0 1 10 1 3 +9 -13.5 0 0 0 1 3 +2 -0.75 0 1 2 1 3 +5 -4.38 0 0 0 1 3 +2 -1.5 0 0 0 1 3 +2 -3.75 0 0 0 1 3 +5 -3.75 0 0 0 1 3 +9 -16.88 0 0 0 1 3 +9 -3.38 0 1 -3.38 1 3 +5 -10 0 0 0 1 3 +26 0 12 0 12 1 3 +5 -9.38 0 0 0 1 3 +6 -1.5 0 1 -1.5 1 3 +10 -10 0 0 0 1 3 +2 -1.25 0 1 -1.25 1 3 +9 -14.63 0 0 0 1 3 +6 -4.5 0 0 0 1 3 +5 -5 0 0 0 1 3 +5 -7.5 0 0 0 1 3 +8 -13 0 0 0 1 3 +5 -3.13 0 0 0 1 3 +8 -5 0 0 0 1 3 +8 -11 0 0 0 1 3 +6 -6.75 0 0 0 1 3 +5 -8.75 0 0 0 1 3 +2 0 1 1 2 1 3 +9 -5.63 0 0 0 1 3 +6 -6 0 0 0 1 3 +4 -5.5 0 0 0 1 3 +6 -3 0 0 0 1 3 +12 -19.5 0 0 0 1 3 +10 -13.75 0 0 0 1 3 +10 -8.75 0 0 0 1 3 +5 -6.88 0 0 0 1 3 +6 -7.5 0 0 0 1 3 +10 -12.5 0 0 0 1 3 +9 -6.75 0 0 0 1 3 +4 -6 0 0 0 1 3 +8 -4 0 1 -4 1 3 +2 -1 0 1 2 1 3 +12 -24 0 0 0 1 3 +12 -6 0 0 0 1 3 +2 -2 0 0 0 1 3 +4 -7 0 0 0 1 3 +12 -9 0 0 0 1 3 +6 -11.25 0 0 0 1 3 +25 0 10 0 10 1 3 +28 0 13 0 13 1 3 +2 -2.75 0 0 0 1 3 +12 -10.5 0 0 0 1 3 +8 -14 0 0 0 1 3 +4 -6.5 0 0 0 1 3 +4 0 2 1 4 1 3 +10 -15 0 0 0 1 3 +12 0 5 1 0 1 3 +10 -18.75 0 0 0 1 3 +12 -3 0 1 12 1 3 +4 -4 0 0 0 1 3 +9 -7.88 0 0 0 1 3 +9 -2.25 0 1 -2.25 1 3 +2 -1.75 0 0 0 1 3 +12 0 6 1 0 1 3 +5 -2.5 0 0 0 1 3 +4 -4.5 0 0 0 1 3 +8 -6 0 0 0 1 3 +12 -18 0 0 0 1 3 +12 -16.5 0 0 0 1 3 +22 0 10 0 10 1 3 +12 -21 0 0 0 1 3 +12 -4.5 0 0 0 1 3 +12 -12 0 0 0 1 3 +19 0 8 0 8 1 3 +2 -2.5 0 0 0 1 3 +12 0 4 1 12 1 3 +4 -2 0 0 0 1 3 +9 -9 0 0 0 1 3 +9 -10.13 0 0 0 1 3 +6 -2.25 0 1 6 1 3 +2 -0.5 0 1 -0.5 1 3 +10 -3.75 0 1 10 1 3 +13 0 5 1 13 1 3 +4 -1.5 0 1 4 1 3 +5 -1.25 0 1 5 1 3 +6 -9 0 0 0 1 3 +10 -17.5 0 0 0 1 3 +6 -12 0 0 0 1 3 +6 -5.25 0 0 0 1 3 +12 -22.5 0 0 0 1 3 +8 -16 0 0 0 1 3 +9 -15.75 0 0 0 1 3 +10 -20 0 0 0 1 3 +13 0 6 1 0 1 3 +4 -8 0 0 0 1 3 +12 -7.5 0 0 0 1 3 +9 -18 0 0 0 1 3 +2 -3.25 0 0 0 1 3 +7 0 3 0 3 1 3 +6 -3.75 0 0 0 1 3 +5 -6.25 0 0 0 1 3 +8 -15 0 0 0 1 3 +25 0 9 0 9 1 3 +2 -3.5 0 0 0 1 3 +6 -10.5 0 0 0 1 3 +9 -10.13 0 1 -10.13 1 4 +12 -10.5 0 0 0 1 4 +25 0 10 1 25 1 4 +4 -7 0 1 -7 1 4 +9 -7.88 0 0 0 1 4 +5 -3.13 0 1 5 1 4 +5 -8.13 0 1 5 1 4 +8 -7 0 0 0 1 4 +12 -6 0 1 -6 1 4 +12 -24 0 0 0 1 4 +12 -21 0 0 0 1 4 +4 -2.5 0 1 4 1 4 +6 -9 0 0 0 1 4 +10 -15 0 1 10 1 4 +8 -6 0 1 -6 1 4 +13 0 6 1 0 1 4 +6 -12 0 1 -12 1 4 +6 -4.5 0 0 0 1 4 +9 -16.88 0 0 0 1 4 +10 -18.75 0 1 10 1 4 +9 -3.38 0 1 -3.38 1 4 +6 -9.75 0 1 -9.75 1 4 +2 -1.75 0 0 0 1 4 +5 0 2 0 2 1 4 +8 -5 0 1 -5 1 4 +8 -9 0 0 0 1 4 +12 0 6 1 0 1 4 +12 0 4 1 12 1 4 +2 -2.5 0 0 0 1 4 +6 -3 0 1 -3 1 4 +10 -7.5 0 1 -7.5 1 4 +5 -2.5 0 1 -2.5 1 4 +5 -3.75 0 1 5 1 4 +10 -3.75 0 1 10 1 4 +2 -3 0 0 0 1 4 +10 -6.25 0 1 -6.25 1 4 +4 -7.5 0 0 0 1 4 +8 -16 0 0 0 1 4 +5 -6.25 0 1 5 1 4 +4 0 2 1 4 1 4 +10 -11.25 0 1 10 1 4 +5 -6.88 0 0 0 1 4 +5 -7.5 0 1 5 1 4 +26 0 12 0 12 1 4 +8 -13 0 1 8 1 4 +4 -4.5 0 0 0 1 4 +8 -10 0 1 -10 1 4 +6 -3.75 0 1 6 1 4 +5 -5.63 0 0 0 1 4 +9 -18 0 0 0 1 4 +12 -13.5 0 1 12 1 4 +7 0 3 0 3 1 4 +8 -14 0 1 8 1 4 +2 -1.5 0 0 0 1 4 +10 -2.5 0 1 -2.5 1 4 +13 0 5 1 13 1 4 +9 -15.75 0 0 0 1 4 +8 -12 0 0 0 1 4 +28 0 13 1 0 1 4 +6 -7.5 0 0 0 1 4 +10 -16.25 0 0 0 1 4 +12 -7.5 0 1 -7.5 1 4 +5 -5 0 0 0 1 4 +2 -2 0 1 2 1 4 +22 0 10 0 10 1 4 +2 -1 0 1 2 1 4 +3 0 1 1 3 1 4 +4 -5.5 0 0 0 1 4 +2 -2.25 0 1 2 1 4 +6 -2.25 0 1 6 1 4 +4 -6.5 0 1 -6.5 1 4 +9 -12.38 0 0 0 1 4 +10 -13.75 0 1 -13.75 1 4 +10 -17.5 0 0 0 1 4 +4 -5 0 1 4 1 4 +9 -11.25 0 1 9 1 4 +10 -10 0 0 0 1 4 +2 -3.25 0 0 0 1 4 +5 -8.75 0 1 5 1 4 +5 -10 0 0 0 1 4 +9 -2.25 0 1 -2.25 1 4 +6 -6.75 0 1 6 1 4 +12 -16.5 0 1 12 1 4 +9 -14.63 0 0 0 1 4 +4 -8 0 0 0 1 4 +6 -5.25 0 1 6 1 4 +9 -6.75 0 1 -6.75 1 4 +12 -12 0 1 -12 1 4 +4 -1 0 1 4 1 4 +12 -15 0 1 -15 1 4 +4 -3.5 0 1 -3.5 1 4 +2 -1.25 0 1 -1.25 1 4 +30 0 12 0 12 1 4 +12 -19.5 0 1 12 1 4 +12 -3 0 1 12 1 4 +5 -1.25 0 0 0 1 4 +5 -1.88 0 1 -1.88 1 4 +2 -3.5 0 0 0 1 4 +12 -9 0 1 -9 1 4 +10 -20 0 0 0 1 4 +8 -4 0 1 -4 1 4 +12 0 5 1 0 1 4 +2 0 1 0 1 1 4 +4 -1.5 0 1 4 1 4 +2 -3.75 0 0 0 1 4 +6 -10.5 0 1 -10.5 1 4 +4 -2 0 1 -2 1 4 +23 0 10 1 0 1 4 +12 -18 0 1 12 1 4 +6 -8.25 0 0 0 1 4 +26 0 10 1 26 1 4 +10 -8.75 0 1 -8.75 1 4 +2 -0.75 0 1 2 1 4 +5 -9.38 0 1 -9.38 1 4 +25 0 9 1 0 1 4 +9 -4.5 0 0 0 1 4 +10 -5 0 1 10 1 4 +2 -4 0 0 0 1 4 +2 -2.75 0 1 -2.75 1 4 +4 -6 0 0 0 1 4 +10 -12.5 0 1 -12.5 1 4 +12 -22.5 0 0 0 1 4 +4 -4 0 1 4 1 4 +2 -0.5 0 1 -0.5 1 4 +8 -2 0 1 -2 1 4 +4 -3 0 0 0 1 4 +6 -11.25 0 1 6 1 4 +8 -15 0 1 -15 1 4 +8 -11 0 0 0 1 4 +12 -4.5 0 1 -4.5 1 4 +19 0 8 1 19 1 4 +6 -6 0 1 -6 1 4 +5 -4.38 0 0 0 1 4 +9 -9 0 1 9 1 4 +6 -1.5 0 1 -1.5 1 4 +9 -13.5 0 0 0 1 4 +9 -5.63 0 1 -5.63 1 4 +8 -8 0 1 -8 1 4 +8 0 3 0 3 1 4 +8 -3 0 0 0 1 4 +9 -11.25 0 1 9 1 5 +8 -16 0 0 0 1 5 +9 -5.63 0 0 0 1 5 +9 -4.5 0 1 9 1 5 +2 -2 0 1 2 1 5 +12 -19.5 0 0 0 1 5 +4 -4.5 0 0 0 1 5 +2 -3.75 0 1 -3.75 1 5 +2 -2.25 0 0 0 1 5 +12 -4.5 0 1 -4.5 1 5 +9 -10.13 0 0 0 1 5 +12 -3 0 1 12 1 5 +10 -17.5 0 0 0 1 5 +5 -4.38 0 1 -4.38 1 5 +5 -7.5 0 1 5 1 5 +6 -11.25 0 0 0 1 5 +2 -1.5 0 1 -1.5 1 5 +9 -6.75 0 1 -6.75 1 5 +4 -7 0 1 -7 1 5 +8 -7 0 1 8 1 5 +2 -1.75 0 0 0 1 5 +2 -1 0 1 2 1 5 +10 -6.25 0 1 -6.25 1 5 +6 -6.75 0 1 6 1 5 +9 -2.25 0 1 -2.25 1 5 +2 -0.75 0 1 2 1 5 +12 0 4 0 4 1 5 +6 -3 0 1 -3 1 5 +3 0 1 1 3 1 5 +2 -3 0 0 0 1 5 +10 -13.75 0 0 0 1 5 +6 -2.25 0 1 6 1 5 +5 -1.88 0 1 -1.88 1 5 +12 -13.5 0 0 0 1 5 +22 0 10 0 10 1 5 +9 -12.38 0 0 0 1 5 +26 0 10 0 10 1 5 +12 -10.5 0 0 0 1 5 +10 -2.5 0 1 -2.5 1 5 +25 0 10 0 10 1 5 +9 -15.75 0 0 0 1 5 +7 0 3 0 3 1 5 +10 -10 0 0 0 1 5 +12 -15 0 0 0 1 5 +12 0 6 0 6 1 5 +6 -4.5 0 0 0 1 5 +8 -13 0 0 0 1 5 +10 -16.25 0 0 0 1 5 +5 -1.25 0 1 5 1 5 +4 -4 0 1 4 1 5 +5 -3.75 0 1 5 1 5 +6 -8.25 0 0 0 1 5 +8 -15 0 0 0 1 5 +8 -8 0 1 -8 1 5 +2 -2.75 0 1 -2.75 1 5 +6 -12 0 1 -12 1 5 +2 0 1 1 2 1 5 +2 -1.25 0 1 -1.25 1 5 +9 -18 0 0 0 1 5 +6 -9 0 0 0 1 5 +10 -8.75 0 1 -8.75 1 5 +4 -7.5 0 0 0 1 5 +13 0 6 1 0 1 5 +10 -11.25 0 0 0 1 5 +4 -3 0 1 4 1 5 +10 -5 0 1 10 1 5 +8 -2 0 1 -2 1 5 +4 -2.5 0 1 4 1 5 +2 -3.5 0 0 0 1 5 +2 -2.5 0 0 0 1 5 +6 -3.75 0 1 6 1 5 +8 -3 0 1 8 1 5 +2 -3.25 0 0 0 1 5 +8 -9 0 0 0 1 5 +6 -6 0 1 -6 1 5 +8 -11 0 0 0 1 5 +5 -8.75 0 1 5 1 5 +6 -9.75 0 0 0 1 5 +12 -24 0 0 0 1 5 +4 -6.5 0 0 0 1 5 +5 -10 0 0 0 1 5 +30 0 12 0 12 1 5 +12 -18 0 0 0 1 5 +9 -9 0 1 9 1 5 +5 -5 0 1 -5 1 5 +5 -9.38 0 0 0 1 5 +10 -12.5 0 0 0 1 5 +10 -18.75 0 0 0 1 5 +5 -2.5 0 1 -2.5 1 5 +9 -14.63 0 0 0 1 5 +28 0 13 0 13 1 5 +5 -6.88 0 0 0 1 5 +4 -3.5 0 1 -3.5 1 5 +12 -16.5 0 0 0 1 5 +5 -8.13 0 0 0 1 5 +9 -16.88 0 0 0 1 5 +9 -3.38 0 1 -3.38 1 5 +12 0 5 1 0 1 5 +4 -8 0 0 0 1 5 +8 -12 0 1 8 1 5 +8 -4 0 1 -4 1 5 +2 -4 0 0 0 1 5 +12 -9 0 1 -9 1 5 +4 -1.5 0 1 4 1 5 +6 -10.5 0 0 0 1 5 +5 -3.13 0 1 5 1 5 +10 -15 0 0 0 1 5 +23 0 10 0 10 1 5 +12 -7.5 0 1 -7.5 1 5 +2 -0.5 0 1 -0.5 1 5 +4 0 2 0 2 1 5 +6 -1.5 0 1 -1.5 1 5 +4 -1 0 1 4 1 5 +10 -20 0 0 0 1 5 +12 -22.5 0 0 0 1 5 +25 0 9 0 9 1 5 +13 0 5 1 13 1 5 +6 -5.25 0 0 0 1 5 +9 -13.5 0 0 0 1 5 +5 0 2 0 2 1 5 +12 -6 0 1 -6 1 5 +5 -6.25 0 1 5 1 5 +10 -3.75 0 1 10 1 5 +9 -7.88 0 0 0 1 5 +8 -6 0 1 -6 1 5 +4 -5.5 0 0 0 1 5 +19 0 8 0 8 1 5 +10 -7.5 0 1 -7.5 1 5 +4 -6 0 0 0 1 5 +8 -14 0 0 0 1 5 +8 0 3 0 3 1 5 +12 -21 0 0 0 1 5 +4 -2 0 1 -2 1 5 +4 -5 0 0 0 1 5 +6 -7.5 0 1 -7.5 1 5 +12 -12 0 1 -12 1 5 +8 -5 0 1 -5 1 5 +26 0 12 0 12 1 5 +8 -10 0 0 0 1 5 +5 -5.63 0 0 0 1 5 +2 -1 0 1 2 1 6 +9 -6.75 0 1 -6.75 1 6 +2 -4 0 0 0 1 6 +2 -3.25 0 1 2 1 6 +4 -6.5 0 0 0 1 6 +5 -5.63 0 1 -5.63 1 6 +8 -8 0 1 -8 1 6 +12 -18 0 0 0 1 6 +2 -2.5 0 1 2 1 6 +3 0 1 1 3 1 6 +12 -16.5 0 1 12 1 6 +10 -12.5 0 1 -12.5 1 6 +5 -1.25 0 1 5 1 6 +19 0 8 1 19 1 6 +8 -9 0 1 -9 1 6 +5 -10 0 0 0 1 6 +25 0 10 1 25 1 6 +7 0 3 1 0 1 6 +6 -11.25 0 1 6 1 6 +6 -1.5 0 1 -1.5 1 6 +4 -1.5 0 1 4 1 6 +10 -5 0 1 10 1 6 +10 -3.75 0 1 10 1 6 +6 -4.5 0 1 -4.5 1 6 +12 -19.5 0 1 12 1 6 +5 -4.38 0 1 -4.38 1 6 +8 -11 0 0 0 1 6 +2 -0.75 0 1 2 1 6 +2 -1.5 0 1 -1.5 1 6 +6 -6.75 0 1 6 1 6 +4 -6 0 1 4 1 6 +10 -16.25 0 1 -16.25 1 6 +12 -15 0 1 -15 1 6 +6 -5.25 0 1 6 1 6 +12 -21 0 0 0 1 6 +4 -3 0 1 4 1 6 +12 -22.5 0 0 0 1 6 +2 -3.75 0 1 -3.75 1 6 +6 -12 0 1 -12 1 6 +5 -8.13 0 1 5 1 6 +10 -8.75 0 1 -8.75 1 6 +12 -6 0 1 -6 1 6 +5 -5 0 1 -5 1 6 +22 0 10 0 10 1 6 +12 -13.5 0 0 0 1 6 +8 -7 0 1 8 1 6 +4 -3.5 0 1 -3.5 1 6 +9 -12.38 0 1 9 1 6 +10 -7.5 0 1 -7.5 1 6 +26 0 10 1 26 1 6 +12 -4.5 0 1 -4.5 1 6 +8 -15 0 1 -15 1 6 +2 -1.75 0 1 2 1 6 +12 0 6 1 0 1 6 +9 -3.38 0 1 -3.38 1 6 +2 -3 0 1 -3 1 6 +9 -5.63 0 1 -5.63 1 6 +2 -3.5 0 1 -3.5 1 6 +8 -12 0 0 0 1 6 +10 -18.75 0 0 0 1 6 +4 0 2 1 4 1 6 +2 -2.25 0 1 2 1 6 +9 -2.25 0 1 -2.25 1 6 +10 -13.75 0 0 0 1 6 +28 0 13 0 13 1 6 +4 -2.5 0 1 4 1 6 +9 -15.75 0 0 0 1 6 +10 -15 0 1 10 1 6 +10 -10 0 1 10 1 6 +9 -18 0 0 0 1 6 +12 -24 0 0 0 1 6 +13 0 5 1 13 1 6 +5 -1.88 0 1 -1.88 1 6 +4 -4.5 0 1 4 1 6 +9 -7.88 0 1 9 1 6 +9 -9 0 1 9 1 6 +25 0 9 1 0 1 6 +12 -12 0 1 -12 1 6 +6 -2.25 0 1 6 1 6 +8 -5 0 1 -5 1 6 +4 -5.5 0 1 -5.5 1 6 +2 -1.25 0 1 -1.25 1 6 +9 -13.5 0 0 0 1 6 +9 -4.5 0 1 9 1 6 +10 -11.25 0 1 10 1 6 +6 -3 0 1 -3 1 6 +10 -2.5 0 1 -2.5 1 6 +12 0 4 1 12 1 6 +10 -20 0 0 0 1 6 +5 -3.75 0 1 5 1 6 +9 -10.13 0 1 -10.13 1 6 +4 -7 0 0 0 1 6 +12 -10.5 0 1 -10.5 1 6 +8 -16 0 0 0 1 6 +4 -7.5 0 1 4 1 6 +8 0 3 1 8 1 6 +6 -10.5 0 0 0 1 6 +6 -9.75 0 1 -9.75 1 6 +5 -8.75 0 0 0 1 6 +5 -2.5 0 1 -2.5 1 6 +13 0 6 1 0 1 6 +23 0 10 1 0 1 6 +8 -4 0 1 -4 1 6 +9 -11.25 0 1 9 1 6 +5 -6.88 0 1 -6.88 1 6 +4 -4 0 1 4 1 6 +10 -17.5 0 0 0 1 6 +26 0 12 0 12 1 6 +6 -8.25 0 1 -8.25 1 6 +9 -14.63 0 1 9 1 6 +8 -2 0 1 -2 1 6 +10 -6.25 0 1 -6.25 1 6 +8 -14 0 1 8 1 6 +12 0 5 0 5 1 6 +8 -10 0 1 -10 1 6 +30 0 12 1 0 1 6 +5 -7.5 0 1 5 1 6 +5 0 2 1 0 1 6 +6 -3.75 0 1 6 1 6 +6 -6 0 1 -6 1 6 +4 -2 0 1 -2 1 6 +12 -7.5 0 1 -7.5 1 6 +5 -6.25 0 0 0 1 6 +4 -5 0 1 4 1 6 +2 -2.75 0 1 -2.75 1 6 +2 -2 0 1 2 1 6 +6 -9 0 1 -9 1 6 +5 -3.13 0 1 5 1 6 +12 -9 0 1 -9 1 6 +4 -8 0 1 -8 1 6 +4 -1 0 1 4 1 6 +2 0 1 1 2 1 6 +9 -16.88 0 0 0 1 6 +8 -6 0 1 -6 1 6 +2 -0.5 0 1 -0.5 1 6 +6 -7.5 0 1 -7.5 1 6 +8 -3 0 1 8 1 6 +12 -3 0 1 12 1 6 +5 -9.38 0 1 -9.38 1 6 +6 -9.75 0 0 0 1 7 +12 -13.5 0 0 0 1 7 +8 -7 0 1 8 1 7 +10 -7.5 0 1 -7.5 1 7 +2 -2.25 0 1 2 1 7 +6 -8.25 0 0 0 1 7 +10 -16.25 0 0 0 1 7 +3 0 1 1 3 1 7 +4 -3 0 1 4 1 7 +8 -2 0 1 -2 1 7 +4 -2.5 0 1 4 1 7 +5 -5.63 0 1 -5.63 1 7 +5 0 2 0 2 1 7 +30 0 12 1 0 1 7 +9 -4.5 0 1 9 1 7 +4 -7.5 0 0 0 1 7 +26 0 10 1 26 1 7 +10 -6.25 0 1 -6.25 1 7 +2 -4 0 0 0 1 7 +4 -5 0 0 0 1 7 +5 -1.88 0 1 -1.88 1 7 +23 0 10 0 10 1 7 +8 -3 0 1 8 1 7 +8 -12 0 0 0 1 7 +10 -2.5 0 1 -2.5 1 7 +5 -8.13 0 0 0 1 7 +8 -9 0 1 -9 1 7 +2 -3 0 0 0 1 7 +9 -11.25 0 0 0 1 7 +9 -12.38 0 0 0 1 7 +12 -15 0 1 -15 1 7 +8 -10 0 0 0 1 7 +4 -1 0 1 4 1 7 +8 0 3 1 8 1 7 +4 -3.5 0 0 0 1 7 +8 -8 0 1 -8 1 7 +10 -11.25 0 0 0 1 7 +10 -5 0 1 10 1 7 +9 -13.5 0 0 0 1 7 +2 -0.75 0 1 2 1 7 +5 -4.38 0 0 0 1 7 +2 -1.5 0 1 -1.5 1 7 +2 -3.75 0 0 0 1 7 +5 -3.75 0 1 5 1 7 +9 -16.88 0 0 0 1 7 +9 -3.38 0 1 -3.38 1 7 +5 -10 0 0 0 1 7 +26 0 12 1 0 1 7 +5 -9.38 0 0 0 1 7 +6 -1.5 0 1 -1.5 1 7 +10 -10 0 1 10 1 7 +2 -1.25 0 1 -1.25 1 7 +9 -14.63 0 0 0 1 7 +6 -4.5 0 1 -4.5 1 7 +5 -5 0 0 0 1 7 +5 -7.5 0 0 0 1 7 +8 -13 0 0 0 1 7 +5 -3.13 0 1 5 1 7 +8 -5 0 1 -5 1 7 +8 -11 0 0 0 1 7 +6 -6.75 0 0 0 1 7 +2 0 1 1 2 1 7 +9 -5.63 0 0 0 1 7 +6 -6 0 0 0 1 7 +4 -5.5 0 0 0 1 7 +6 -3 0 1 -3 1 7 +12 -19.5 0 0 0 1 7 +10 -13.75 0 0 0 1 7 +10 -8.75 0 0 0 1 7 +5 -6.88 0 0 0 1 7 +6 -7.5 0 0 0 1 7 +10 -12.5 0 0 0 1 7 +9 -6.75 0 1 -6.75 1 7 +4 -6 0 0 0 1 7 +8 -4 0 1 -4 1 7 +2 -1 0 1 2 1 7 +12 -24 0 0 0 1 7 +12 -6 0 1 -6 1 7 +2 -2 0 0 0 1 7 +4 -7 0 0 0 1 7 +12 -9 0 1 -9 1 7 +6 -11.25 0 0 0 1 7 +25 0 10 1 25 1 7 +28 0 13 0 13 1 7 +2 -2.75 0 1 -2.75 1 7 +12 -10.5 0 1 -10.5 1 7 +8 -14 0 0 0 1 7 +4 -6.5 0 0 0 1 7 +4 0 2 1 4 1 7 +10 -15 0 0 0 1 7 +12 0 5 1 0 1 7 +10 -18.75 0 0 0 1 7 +12 -3 0 1 12 1 7 +4 -4 0 0 0 1 7 +9 -7.88 0 0 0 1 7 +9 -2.25 0 1 -2.25 1 7 +2 -1.75 0 0 0 1 7 +12 0 6 1 0 1 7 +5 -2.5 0 1 -2.5 1 7 +4 -4.5 0 0 0 1 7 +8 -6 0 0 0 1 7 +12 -18 0 0 0 1 7 +12 -16.5 0 0 0 1 7 +22 0 10 1 22 1 7 +12 -21 0 0 0 1 7 +12 -4.5 0 1 -4.5 1 7 +12 -12 0 0 0 1 7 +19 0 8 1 19 1 7 +2 -2.5 0 0 0 1 7 +12 0 4 1 12 1 7 +4 -2 0 0 0 1 7 +9 -9 0 1 9 1 7 +9 -10.13 0 0 0 1 7 +6 -2.25 0 1 6 1 7 +2 -0.5 0 1 -0.5 1 7 +10 -3.75 0 1 10 1 7 +13 0 5 1 13 1 7 +4 -1.5 0 1 4 1 7 +5 -1.25 0 1 5 1 7 +6 -9 0 0 0 1 7 +10 -17.5 0 0 0 1 7 +6 -12 0 0 0 1 7 +6 -5.25 0 0 0 1 7 +12 -22.5 0 0 0 1 7 +8 -16 0 0 0 1 7 +9 -15.75 0 0 0 1 7 +10 -20 0 0 0 1 7 +13 0 6 1 0 1 7 +4 -8 0 0 0 1 7 +12 -7.5 0 1 -7.5 1 7 +9 -18 0 0 0 1 7 +2 -3.25 0 0 0 1 7 +7 0 3 1 0 1 7 +6 -3.75 0 0 0 1 7 +5 -6.25 0 0 0 1 7 +8 -15 0 0 0 1 7 +25 0 9 1 0 1 7 +2 -3.5 0 0 0 1 7 +6 -10.5 0 0 0 1 7 +9 -10.13 0 0 0 1 8 +12 -10.5 0 1 -10.5 1 8 +25 0 10 1 25 1 8 +4 -7 0 0 0 1 8 +9 -7.88 0 1 9 1 8 +5 -3.13 0 1 5 1 8 +5 -8.13 0 0 0 1 8 +8 -7 0 1 8 1 8 +12 -6 0 1 -6 1 8 +12 -24 0 0 0 1 8 +12 -21 0 0 0 1 8 +4 -2.5 0 1 4 1 8 +6 -9 0 0 0 1 8 +10 -15 0 0 0 1 8 +8 -6 0 1 -6 1 8 +13 0 6 1 0 1 8 +6 -12 0 0 0 1 8 +6 -4.5 0 1 -4.5 1 8 +9 -16.88 0 0 0 1 8 +10 -18.75 0 0 0 1 8 +9 -3.38 0 1 -3.38 1 8 +6 -9.75 0 0 0 1 8 +2 -1.75 0 1 2 1 8 +5 0 2 1 0 1 8 +8 -5 0 1 -5 1 8 +8 -9 0 0 0 1 8 +12 0 6 0 6 1 8 +12 0 4 1 12 1 8 +2 -2.5 0 0 0 1 8 +6 -3 0 1 -3 1 8 +10 -7.5 0 1 -7.5 1 8 +5 -2.5 0 1 -2.5 1 8 +5 -3.75 0 0 0 1 8 +10 -3.75 0 1 10 1 8 +2 -3 0 0 0 1 8 +10 -6.25 0 1 -6.25 1 8 +4 -7.5 0 0 0 1 8 +8 -16 0 0 0 1 8 +5 -6.25 0 0 0 1 8 +4 0 2 1 4 1 8 +10 -11.25 0 0 0 1 8 +5 -6.88 0 0 0 1 8 +5 -7.5 0 0 0 1 8 +26 0 12 1 0 1 8 +8 -13 0 0 0 1 8 +4 -4.5 0 1 4 1 8 +8 -10 0 0 0 1 8 +6 -3.75 0 1 6 1 8 +5 -5.63 0 1 -5.63 1 8 +9 -18 0 0 0 1 8 +12 -13.5 0 1 12 1 8 +7 0 3 1 0 1 8 +8 -14 0 0 0 1 8 +2 -1.5 0 1 -1.5 1 8 +10 -2.5 0 1 -2.5 1 8 +13 0 5 1 13 1 8 +9 -15.75 0 0 0 1 8 +8 -12 0 0 0 1 8 +28 0 13 1 0 1 8 +6 -7.5 0 1 -7.5 1 8 +10 -16.25 0 0 0 1 8 +12 -7.5 0 1 -7.5 1 8 +5 -5 0 1 -5 1 8 +2 -2 0 1 2 1 8 +22 0 10 1 22 1 8 +2 -1 0 1 2 1 8 +3 0 1 1 3 1 8 +4 -5.5 0 1 -5.5 1 8 +2 -2.25 0 1 2 1 8 +6 -2.25 0 1 6 1 8 +4 -6.5 0 1 -6.5 1 8 +9 -12.38 0 0 0 1 8 +10 -13.75 0 0 0 1 8 +10 -17.5 0 1 10 1 8 +4 -5 0 1 4 1 8 +9 -11.25 0 0 0 1 8 +10 -10 0 1 10 1 8 +2 -3.25 0 0 0 1 8 +5 -8.75 0 0 0 1 8 +5 -10 0 0 0 1 8 +9 -2.25 0 1 -2.25 1 8 +6 -6.75 0 1 6 1 8 +12 -16.5 0 1 12 1 8 +9 -14.63 0 0 0 1 8 +4 -8 0 0 0 1 8 +6 -5.25 0 1 6 1 8 +9 -6.75 0 1 -6.75 1 8 +12 -12 0 1 -12 1 8 +4 -1 0 1 4 1 8 +12 -15 0 1 -15 1 8 +4 -3.5 0 1 -3.5 1 8 +2 -1.25 0 1 -1.25 1 8 +30 0 12 1 0 1 8 +12 -19.5 0 0 0 1 8 +12 -3 0 1 12 1 8 +5 -1.25 0 1 5 1 8 +5 -1.88 0 1 -1.88 1 8 +2 -3.5 0 0 0 1 8 +12 -9 0 1 -9 1 8 +10 -20 0 0 0 1 8 +8 -4 0 1 -4 1 8 +12 0 5 1 0 1 8 +2 0 1 0 1 1 8 +4 -1.5 0 1 4 1 8 +2 -3.75 0 1 -3.75 1 8 +6 -10.5 0 0 0 1 8 +4 -2 0 1 -2 1 8 +23 0 10 0 10 1 8 +12 -18 0 1 12 1 8 +6 -8.25 0 1 -8.25 1 8 +26 0 10 1 26 1 8 +10 -8.75 0 1 -8.75 1 8 +2 -0.75 0 1 2 1 8 +5 -9.38 0 0 0 1 8 +25 0 9 1 0 1 8 +9 -4.5 0 1 9 1 8 +10 -5 0 1 10 1 8 +2 -4 0 1 -4 1 8 +2 -2.75 0 1 -2.75 1 8 +4 -6 0 1 4 1 8 +10 -12.5 0 1 -12.5 1 8 +12 -22.5 0 0 0 1 8 +4 -4 0 1 4 1 8 +2 -0.5 0 1 -0.5 1 8 +8 -2 0 1 -2 1 8 +4 -3 0 1 4 1 8 +6 -11.25 0 0 0 1 8 +8 -15 0 0 0 1 8 +8 -11 0 1 8 1 8 +12 -4.5 0 1 -4.5 1 8 +19 0 8 1 19 1 8 +6 -6 0 1 -6 1 8 +5 -4.38 0 1 -4.38 1 8 +9 -9 0 1 9 1 8 +6 -1.5 0 1 -1.5 1 8 +9 -13.5 0 0 0 1 8 +9 -5.63 0 1 -5.63 1 8 +8 -8 0 1 -8 1 8 +8 0 3 1 8 1 8 +8 -3 0 1 8 1 8 +9 -11.25 0 1 9 1 9 +8 -16 0 0 0 1 9 +9 -5.63 0 1 -5.63 1 9 +9 -4.5 0 1 9 1 9 +2 -2 0 1 2 1 9 +12 -19.5 0 0 0 1 9 +4 -4.5 0 1 4 1 9 +2 -3.75 0 1 -3.75 1 9 +2 -2.25 0 1 2 1 9 +12 -4.5 0 1 -4.5 1 9 +9 -10.13 0 0 0 1 9 +12 -3 0 1 12 1 9 +10 -17.5 0 0 0 1 9 +5 -4.38 0 1 -4.38 1 9 +5 -7.5 0 1 5 1 9 +6 -11.25 0 0 0 1 9 +2 -1.5 0 1 -1.5 1 9 +9 -6.75 0 1 -6.75 1 9 +4 -7 0 0 0 1 9 +8 -7 0 1 8 1 9 +2 -1.75 0 1 2 1 9 +2 -1 0 1 2 1 9 +10 -6.25 0 1 -6.25 1 9 +6 -6.75 0 1 6 1 9 +9 -2.25 0 1 -2.25 1 9 +2 -0.75 0 1 2 1 9 +12 0 4 1 12 1 9 +6 -3 0 1 -3 1 9 +3 0 1 1 3 1 9 +2 -3 0 1 -3 1 9 +10 -13.75 0 0 0 1 9 +6 -2.25 0 1 6 1 9 +5 -1.88 0 1 -1.88 1 9 +12 -13.5 0 0 0 1 9 +22 0 10 1 22 1 9 +9 -12.38 0 0 0 1 9 +26 0 10 1 26 1 9 +12 -10.5 0 0 0 1 9 +10 -2.5 0 1 -2.5 1 9 +25 0 10 1 25 1 9 +9 -15.75 0 0 0 1 9 +7 0 3 1 0 1 9 +10 -10 0 0 0 1 9 +12 -15 0 0 0 1 9 +12 0 6 1 0 1 9 +6 -4.5 0 1 -4.5 1 9 +8 -13 0 0 0 1 9 +10 -16.25 0 0 0 1 9 +5 -1.25 0 1 5 1 9 +4 -4 0 1 4 1 9 +5 -3.75 0 1 5 1 9 +6 -8.25 0 0 0 1 9 +8 -15 0 0 0 1 9 +8 -8 0 1 -8 1 9 +2 -2.75 0 1 -2.75 1 9 +6 -12 0 0 0 1 9 +2 0 1 1 2 1 9 +2 -1.25 0 1 -1.25 1 9 +9 -18 0 0 0 1 9 +6 -9 0 0 0 1 9 +10 -8.75 0 0 0 1 9 +4 -7.5 0 0 0 1 9 +13 0 6 1 0 1 9 +10 -11.25 0 0 0 1 9 +4 -3 0 1 4 1 9 +10 -5 0 1 10 1 9 +8 -2 0 1 -2 1 9 +4 -2.5 0 1 4 1 9 +2 -3.5 0 1 -3.5 1 9 +2 -2.5 0 1 2 1 9 +6 -3.75 0 1 6 1 9 +8 -3 0 1 8 1 9 +2 -3.25 0 1 2 1 9 +8 -9 0 1 -9 1 9 +6 -6 0 1 -6 1 9 +8 -11 0 0 0 1 9 +5 -8.75 0 0 0 1 9 +6 -9.75 0 0 0 1 9 +12 -24 0 0 0 1 9 +5 -10 0 0 0 1 9 +30 0 12 1 0 1 9 +12 -18 0 0 0 1 9 +9 -9 0 1 9 1 9 +5 -5 0 1 -5 1 9 +5 -9.38 0 0 0 1 9 +10 -12.5 0 0 0 1 9 +10 -18.75 0 0 0 1 9 +5 -2.5 0 1 -2.5 1 9 +9 -14.63 0 0 0 1 9 +28 0 13 1 0 1 9 +5 -6.88 0 1 -6.88 1 9 +4 -3.5 0 1 -3.5 1 9 +12 -16.5 0 0 0 1 9 +5 -8.13 0 0 0 1 9 +9 -16.88 0 0 0 1 9 +9 -3.38 0 1 -3.38 1 9 +12 0 5 1 0 1 9 +4 -8 0 0 0 1 9 +8 -12 0 0 0 1 9 +8 -4 0 1 -4 1 9 +2 -4 0 1 -4 1 9 +12 -9 0 1 -9 1 9 +4 -1.5 0 1 4 1 9 +6 -10.5 0 0 0 1 9 +5 -3.13 0 1 5 1 9 +10 -15 0 0 0 1 9 +23 0 10 1 0 1 9 +12 -7.5 0 1 -7.5 1 9 +2 -0.5 0 1 -0.5 1 9 +4 0 2 1 4 1 9 +6 -1.5 0 1 -1.5 1 9 +4 -1 0 1 4 1 9 +10 -20 0 0 0 1 9 +12 -22.5 0 0 0 1 9 +25 0 9 1 0 1 9 +13 0 5 1 13 1 9 +6 -5.25 0 1 6 1 9 +9 -13.5 0 0 0 1 9 +5 0 2 1 0 1 9 +12 -6 0 1 -6 1 9 +5 -6.25 0 1 5 1 9 +10 -3.75 0 1 10 1 9 +9 -7.88 0 1 9 1 9 +8 -6 0 1 -6 1 9 +4 -5.5 0 1 -5.5 1 9 +19 0 8 1 19 1 9 +10 -7.5 0 1 -7.5 1 9 +4 -6 0 1 4 1 9 +8 -14 0 0 0 1 9 +8 0 3 1 8 1 9 +12 -21 0 0 0 1 9 +4 -2 0 1 -2 1 9 +4 -5 0 1 4 1 9 +6 -7.5 0 0 0 1 9 +12 -12 0 0 0 1 9 +8 -5 0 1 -5 1 9 +26 0 12 1 0 1 9 +8 -10 0 0 0 1 9 +5 -5.63 0 0 0 1 9 +9 -10.13 0 1 -10.13 1 10 +12 -10.5 0 1 -10.5 1 10 +25 0 10 1 25 1 10 +4 -7 0 1 -7 1 10 +9 -7.88 0 1 9 1 10 +5 -3.13 0 1 5 1 10 +5 -8.13 0 0 0 1 10 +8 -7 0 1 8 1 10 +12 -6 0 1 -6 1 10 +12 -24 0 0 0 1 10 +12 -21 0 0 0 1 10 +4 -2.5 0 1 4 1 10 +6 -9 0 0 0 1 10 +10 -15 0 0 0 1 10 +8 -6 0 1 -6 1 10 +13 0 6 1 0 1 10 +6 -12 0 1 -12 1 10 +6 -4.5 0 1 -4.5 1 10 +9 -16.88 0 1 9 1 10 +10 -18.75 0 1 10 1 10 +9 -3.38 0 1 -3.38 1 10 +6 -9.75 0 1 -9.75 1 10 +2 -1.75 0 1 2 1 10 +5 0 2 1 0 1 10 +8 -5 0 1 -5 1 10 +8 -9 0 1 -9 1 10 +12 0 6 1 0 1 10 +12 0 4 1 12 1 10 +2 -2.5 0 1 2 1 10 +6 -3 0 1 -3 1 10 +10 -7.5 0 1 -7.5 1 10 +5 -2.5 0 1 -2.5 1 10 +5 -3.75 0 1 5 1 10 +10 -3.75 0 1 10 1 10 +2 -3 0 1 -3 1 10 +10 -6.25 0 1 -6.25 1 10 +4 -7.5 0 1 4 1 10 +8 -16 0 1 8 1 10 +5 -6.25 0 1 5 1 10 +4 0 2 1 4 1 10 +10 -11.25 0 1 10 1 10 +5 -6.88 0 1 -6.88 1 10 +5 -7.5 0 1 5 1 10 +26 0 12 0 12 1 10 +8 -13 0 0 0 1 10 +4 -4.5 0 1 4 1 10 +8 -10 0 1 -10 1 10 +6 -3.75 0 1 6 1 10 +5 -5.63 0 1 -5.63 1 10 +9 -18 0 1 9 1 10 +12 -13.5 0 1 12 1 10 +7 0 3 1 0 1 10 +8 -14 0 1 8 1 10 +2 -1.5 0 1 -1.5 1 10 +10 -2.5 0 1 -2.5 1 10 +13 0 5 0 5 1 10 +9 -15.75 0 1 9 1 10 +8 -12 0 1 8 1 10 +28 0 13 0 13 1 10 +6 -7.5 0 1 -7.5 1 10 +10 -16.25 0 1 -16.25 1 10 +12 -7.5 0 1 -7.5 1 10 +5 -5 0 1 -5 1 10 +2 -2 0 1 2 1 10 +22 0 10 1 22 1 10 +2 -1 0 1 2 1 10 +3 0 1 1 3 1 10 +4 -5.5 0 1 -5.5 1 10 +6 -2.25 0 1 6 1 10 +4 -6.5 0 1 -6.5 1 10 +9 -12.38 0 1 9 1 10 +10 -13.75 0 1 -13.75 1 10 +10 -17.5 0 1 10 1 10 +4 -5 0 1 4 1 10 +9 -11.25 0 1 9 1 10 +10 -10 0 1 10 1 10 +2 -3.25 0 1 2 1 10 +5 -8.75 0 1 5 1 10 +5 -10 0 1 5 1 10 +9 -2.25 0 1 -2.25 1 10 +12 -16.5 0 0 0 1 10 +9 -14.63 0 0 0 1 10 +4 -8 0 1 -8 1 10 +6 -5.25 0 1 6 1 10 +9 -6.75 0 1 -6.75 1 10 +12 -12 0 1 -12 1 10 +4 -1 0 1 4 1 10 +12 -15 0 0 0 1 10 +4 -3.5 0 1 -3.5 1 10 +2 -1.25 0 1 -1.25 1 10 +30 0 12 0 12 1 10 +12 -19.5 0 0 0 1 10 +12 -3 0 1 12 1 10 +5 -1.25 0 1 5 1 10 +5 -1.88 0 1 -1.88 1 10 +2 -3.5 0 1 -3.5 1 10 +12 -9 0 1 -9 1 10 +10 -20 0 0 0 1 10 +8 -4 0 1 -4 1 10 +12 0 5 1 0 1 10 +2 0 1 1 2 1 10 +4 -1.5 0 1 4 1 10 +2 -3.75 0 1 -3.75 1 10 +6 -10.5 0 1 -10.5 1 10 +4 -2 0 1 -2 1 10 +23 0 10 1 0 1 10 +12 -18 0 0 0 1 10 +6 -8.25 0 1 -8.25 1 10 +26 0 10 1 26 1 10 +10 -8.75 0 1 -8.75 1 10 +2 -0.75 0 1 2 1 10 +5 -9.38 0 1 -9.38 1 10 +25 0 9 1 0 1 10 +9 -4.5 0 1 9 1 10 +10 -5 0 1 10 1 10 +2 -4 0 1 -4 1 10 +2 -2.75 0 1 -2.75 1 10 +4 -6 0 1 4 1 10 +10 -12.5 0 1 -12.5 1 10 +12 -22.5 0 1 12 1 10 +4 -4 0 1 4 1 10 +2 -0.5 0 1 -0.5 1 10 +8 -2 0 1 -2 1 10 +4 -3 0 1 4 1 10 +6 -11.25 0 1 6 1 10 +8 -15 0 1 -15 1 10 +8 -11 0 1 8 1 10 +12 -4.5 0 1 -4.5 1 10 +19 0 8 1 19 1 10 +6 -6 0 1 -6 1 10 +5 -4.38 0 1 -4.38 1 10 +9 -9 0 1 9 1 10 +6 -1.5 0 1 -1.5 1 10 +9 -13.5 0 1 -13.5 1 10 +9 -5.63 0 1 -5.63 1 10 +8 -8 0 1 -8 1 10 +8 0 3 1 8 1 10 +8 -3 0 1 8 1 10 +9 -11.25 0 0 0 1 11 +8 -16 0 0 0 1 11 +9 -5.63 0 1 -5.63 1 11 +9 -4.5 0 1 9 1 11 +2 -2 0 1 2 1 11 +12 -19.5 0 0 0 1 11 +4 -4.5 0 0 0 1 11 +2 -3.75 0 0 0 1 11 +2 -2.25 0 0 0 1 11 +12 -4.5 0 1 -4.5 1 11 +9 -10.13 0 0 0 1 11 +12 -3 0 1 12 1 11 +10 -17.5 0 0 0 1 11 +5 -4.38 0 1 -4.38 1 11 +5 -7.5 0 0 0 1 11 +6 -11.25 0 0 0 1 11 +2 -1.5 0 1 -1.5 1 11 +9 -6.75 0 1 -6.75 1 11 +4 -7 0 0 0 1 11 +8 -7 0 1 8 1 11 +2 -1.75 0 1 2 1 11 +2 -1 0 1 2 1 11 +10 -6.25 0 1 -6.25 1 11 +6 -6.75 0 1 6 1 11 +9 -2.25 0 1 -2.25 1 11 +2 -0.75 0 1 2 1 11 +12 0 4 1 12 1 11 +6 -3 0 1 -3 1 11 +3 0 1 0 1 1 11 +2 -3 0 1 -3 1 11 +10 -13.75 0 0 0 1 11 +6 -2.25 0 1 6 1 11 +5 -1.88 0 1 -1.88 1 11 +12 -13.5 0 1 12 1 11 +22 0 10 1 22 1 11 +9 -12.38 0 0 0 1 11 +26 0 10 0 10 1 11 +10 -2.5 0 1 -2.5 1 11 +25 0 10 1 25 1 11 +9 -15.75 0 0 0 1 11 +7 0 3 0 3 1 11 +10 -10 0 1 10 1 11 +12 -15 0 0 0 1 11 +12 0 6 0 6 1 11 +6 -4.5 0 1 -4.5 1 11 +8 -13 0 0 0 1 11 +10 -16.25 0 0 0 1 11 +5 -1.25 0 1 5 1 11 +4 -4 0 1 4 1 11 +5 -3.75 0 1 5 1 11 +6 -8.25 0 0 0 1 11 +8 -15 0 0 0 1 11 +8 -8 0 0 0 1 11 +2 -2.75 0 0 0 1 11 +6 -12 0 0 0 1 11 +2 0 1 0 1 1 11 +2 -1.25 0 0 0 1 11 +9 -18 0 0 0 1 11 +6 -9 0 0 0 1 11 +10 -8.75 0 1 -8.75 1 11 +4 -7.5 0 0 0 1 11 +13 0 6 1 0 1 11 +10 -11.25 0 0 0 1 11 +4 -3 0 1 4 1 11 +10 -5 0 1 10 1 11 +8 -2 0 1 -2 1 11 +4 -2.5 0 1 4 1 11 +2 -3.5 0 0 0 1 11 +2 -2.5 0 0 0 1 11 +6 -3.75 0 1 6 1 11 +8 -3 0 1 8 1 11 +2 -3.25 0 0 0 1 11 +8 -9 0 0 0 1 11 +6 -6 0 1 -6 1 11 +8 -11 0 0 0 1 11 +5 -8.75 0 0 0 1 11 +6 -9.75 0 0 0 1 11 +12 -24 0 0 0 1 11 +4 -6.5 0 0 0 1 11 +5 -10 0 0 0 1 11 +30 0 12 1 0 1 11 +12 -18 0 0 0 1 11 +9 -9 0 0 0 1 11 +5 -5 0 1 -5 1 11 +5 -9.38 0 0 0 1 11 +10 -12.5 0 0 0 1 11 +10 -18.75 0 0 0 1 11 +5 -2.5 0 1 -2.5 1 11 +9 -14.63 0 0 0 1 11 +28 0 13 0 13 1 11 +5 -6.88 0 0 0 1 11 +4 -3.5 0 1 -3.5 1 11 +12 -16.5 0 0 0 1 11 +5 -8.13 0 0 0 1 11 +9 -16.88 0 0 0 1 11 +9 -3.38 0 1 -3.38 1 11 +12 0 5 0 5 1 11 +4 -8 0 0 0 1 11 +8 -12 0 0 0 1 11 +8 -4 0 1 -4 1 11 +2 -4 0 0 0 1 11 +12 -9 0 1 -9 1 11 +4 -1.5 0 1 4 1 11 +6 -10.5 0 0 0 1 11 +5 -3.13 0 1 5 1 11 +10 -15 0 0 0 1 11 +23 0 10 0 10 1 11 +12 -7.5 0 1 -7.5 1 11 +2 -0.5 0 1 -0.5 1 11 +4 0 2 0 2 1 11 +6 -1.5 0 1 -1.5 1 11 +4 -1 0 1 4 1 11 +10 -20 0 0 0 1 11 +12 -22.5 0 0 0 1 11 +25 0 9 1 0 1 11 +13 0 5 0 5 1 11 +6 -5.25 0 1 6 1 11 +9 -13.5 0 0 0 1 11 +5 0 2 1 0 1 11 +12 -6 0 1 -6 1 11 +5 -6.25 0 0 0 1 11 +10 -3.75 0 1 10 1 11 +9 -7.88 0 1 9 1 11 +8 -6 0 1 -6 1 11 +4 -5.5 0 0 0 1 11 +19 0 8 1 19 1 11 +10 -7.5 0 1 -7.5 1 11 +4 -6 0 0 0 1 11 +8 -14 0 0 0 1 11 +8 0 3 1 8 1 11 +12 -21 0 0 0 1 11 +4 -2 0 1 -2 1 11 +4 -5 0 0 0 1 11 +6 -7.5 0 0 0 1 11 +12 -12 0 0 0 1 11 +8 -5 0 1 -5 1 11 +26 0 12 0 12 1 11 +8 -10 0 0 0 1 11 +5 -5.63 0 0 0 1 11 +2 -1 0 1 2 1 12 +9 -6.75 0 1 -6.75 1 12 +2 -4 0 0 0 1 12 +2 -3.25 0 0 0 1 12 +4 -6.5 0 0 0 1 12 +5 -5.63 0 0 0 1 12 +8 -8 0 1 -8 1 12 +12 -18 0 1 12 1 12 +2 -2.5 0 0 0 1 12 +3 0 1 0 1 1 12 +12 -16.5 0 1 12 1 12 +10 -12.5 0 0 0 1 12 +5 -1.25 0 1 5 1 12 +19 0 8 0 8 1 12 +8 -9 0 0 0 1 12 +5 -10 0 0 0 1 12 +25 0 10 0 10 1 12 +7 0 3 1 0 1 12 +6 -11.25 0 0 0 1 12 +6 -1.5 0 1 -1.5 1 12 +4 -1.5 0 1 4 1 12 +10 -5 0 1 10 1 12 +10 -3.75 0 0 0 1 12 +6 -4.5 0 0 0 1 12 +12 -19.5 0 0 0 1 12 +5 -4.38 0 0 0 1 12 +8 -11 0 0 0 1 12 +2 -0.75 0 1 2 1 12 +2 -1.5 0 1 -1.5 1 12 +6 -6.75 0 1 6 1 12 +4 -6 0 1 4 1 12 +10 -16.25 0 1 -16.25 1 12 +12 -15 0 1 -15 1 12 +6 -5.25 0 1 6 1 12 +12 -21 0 0 0 1 12 +4 -3 0 0 0 1 12 +12 -22.5 0 1 12 1 12 +2 -3.75 0 0 0 1 12 +6 -12 0 0 0 1 12 +5 -8.13 0 1 5 1 12 +10 -8.75 0 1 -8.75 1 12 +12 -6 0 1 -6 1 12 +5 -5 0 1 -5 1 12 +22 0 10 0 10 1 12 +12 -13.5 0 1 12 1 12 +8 -7 0 1 8 1 12 +4 -3.5 0 0 0 1 12 +9 -12.38 0 0 0 1 12 +10 -7.5 0 1 -7.5 1 12 +26 0 10 1 26 1 12 +12 -4.5 0 1 -4.5 1 12 +8 -15 0 1 -15 1 12 +2 -1.75 0 1 2 1 12 +12 0 6 0 6 1 12 +9 -3.38 0 1 -3.38 1 12 +2 -3 0 0 0 1 12 +9 -5.63 0 1 -5.63 1 12 +2 -3.5 0 0 0 1 12 +8 -12 0 1 8 1 12 +10 -18.75 0 1 10 1 12 +4 0 2 0 2 1 12 +2 -2.25 0 0 0 1 12 +9 -2.25 0 1 -2.25 1 12 +10 -13.75 0 1 -13.75 1 12 +28 0 13 1 0 1 12 +4 -2.5 0 1 4 1 12 +9 -15.75 0 1 9 1 12 +10 -15 0 0 0 1 12 +10 -10 0 1 10 1 12 +9 -18 0 0 0 1 12 +12 -24 0 1 -24 1 12 +13 0 5 1 13 1 12 +5 -1.88 0 1 -1.88 1 12 +4 -4.5 0 0 0 1 12 +9 -7.88 0 1 9 1 12 +9 -9 0 0 0 1 12 +25 0 9 0 9 1 12 +12 -12 0 0 0 1 12 +6 -2.25 0 1 6 1 12 +8 -5 0 0 0 1 12 +4 -5.5 0 0 0 1 12 +2 -1.25 0 0 0 1 12 +9 -13.5 0 0 0 1 12 +9 -4.5 0 1 9 1 12 +10 -11.25 0 0 0 1 12 +6 -3 0 1 -3 1 12 +10 -2.5 0 1 -2.5 1 12 +12 0 4 1 12 1 12 +10 -20 0 0 0 1 12 +5 -3.75 0 0 0 1 12 +9 -10.13 0 0 0 1 12 +4 -7 0 0 0 1 12 +12 -10.5 0 1 -10.5 1 12 +8 -16 0 1 8 1 12 +4 -7.5 0 0 0 1 12 +8 0 3 1 8 1 12 +6 -10.5 0 0 0 1 12 +6 -9.75 0 0 0 1 12 +5 -8.75 0 0 0 1 12 +5 -2.5 0 1 -2.5 1 12 +13 0 6 0 6 1 12 +23 0 10 0 10 1 12 +8 -4 0 1 -4 1 12 +9 -11.25 0 0 0 1 12 +5 -6.88 0 0 0 1 12 +4 -4 0 0 0 1 12 +10 -17.5 0 0 0 1 12 +8 -13 0 0 0 1 12 +26 0 12 0 12 1 12 +6 -8.25 0 0 0 1 12 +9 -14.63 0 0 0 1 12 +8 -2 0 1 -2 1 12 +10 -6.25 0 1 -6.25 1 12 +8 -14 0 0 0 1 12 +12 0 5 1 0 1 12 +8 -10 0 0 0 1 12 +30 0 12 1 0 1 12 +5 -7.5 0 0 0 1 12 +5 0 2 1 0 1 12 +6 -3.75 0 1 6 1 12 +6 -6 0 0 0 1 12 +4 -2 0 1 -2 1 12 +12 -7.5 0 1 -7.5 1 12 +5 -6.25 0 1 5 1 12 +4 -5 0 0 0 1 12 +2 -2.75 0 0 0 1 12 +2 -2 0 0 0 1 12 +6 -9 0 0 0 1 12 +5 -3.13 0 1 5 1 12 +12 -9 0 0 0 1 12 +4 -8 0 0 0 1 12 +4 -1 0 1 4 1 12 +2 0 1 0 1 1 12 +9 -16.88 0 1 9 1 12 +8 -6 0 0 0 1 12 +2 -0.5 0 1 -0.5 1 12 +6 -7.5 0 0 0 1 12 +8 -3 0 1 8 1 12 +12 -3 0 1 12 1 12 +5 -9.38 0 0 0 1 12 +6 -9.75 0 0 0 1 13 +12 -13.5 0 1 12 1 13 +8 -7 0 1 8 1 13 +10 -7.5 0 1 -7.5 1 13 +2 -2.25 0 0 0 1 13 +6 -8.25 0 0 0 1 13 +10 -16.25 0 0 0 1 13 +3 0 1 1 3 1 13 +4 -3 0 1 4 1 13 +8 -2 0 1 -2 1 13 +4 -2.5 0 1 4 1 13 +5 -5.63 0 0 0 1 13 +5 0 2 1 0 1 13 +30 0 12 0 12 1 13 +9 -4.5 0 1 9 1 13 +4 -7.5 0 0 0 1 13 +26 0 10 0 10 1 13 +10 -6.25 0 1 -6.25 1 13 +2 -4 0 0 0 1 13 +4 -5 0 0 0 1 13 +5 -1.88 0 1 -1.88 1 13 +23 0 10 0 10 1 13 +8 -3 0 1 8 1 13 +8 -12 0 0 0 1 13 +10 -2.5 0 1 -2.5 1 13 +5 -8.13 0 0 0 1 13 +8 -9 0 0 0 1 13 +2 -3 0 0 0 1 13 +9 -11.25 0 0 0 1 13 +9 -12.38 0 0 0 1 13 +12 -15 0 0 0 1 13 +8 -10 0 0 0 1 13 +4 -1 0 1 4 1 13 +8 0 3 0 3 1 13 +4 -3.5 0 0 0 1 13 +8 -8 0 1 -8 1 13 +10 -11.25 0 0 0 1 13 +10 -5 0 1 10 1 13 +9 -13.5 0 0 0 1 13 +2 -0.75 0 1 2 1 13 +5 -4.38 0 1 -4.38 1 13 +2 -1.5 0 1 -1.5 1 13 +2 -3.75 0 0 0 1 13 +5 -3.75 0 1 5 1 13 +9 -16.88 0 0 0 1 13 +9 -3.38 0 1 -3.38 1 13 +5 -10 0 0 0 1 13 +26 0 12 0 12 1 13 +5 -9.38 0 0 0 1 13 +6 -1.5 0 1 -1.5 1 13 +10 -10 0 0 0 1 13 +2 -1.25 0 0 0 1 13 +9 -14.63 0 0 0 1 13 +6 -4.5 0 1 -4.5 1 13 +5 -5 0 1 -5 1 13 +5 -7.5 0 0 0 1 13 +8 -13 0 0 0 1 13 +5 -3.13 0 0 0 1 13 +8 -5 0 0 0 1 13 +8 -11 0 0 0 1 13 +6 -6.75 0 0 0 1 13 +5 -8.75 0 0 0 1 13 +2 0 1 0 1 1 13 +6 -6 0 0 0 1 13 +4 -5.5 0 0 0 1 13 +6 -3 0 1 -3 1 13 +12 -19.5 0 0 0 1 13 +10 -13.75 0 0 0 1 13 +10 -8.75 0 0 0 1 13 +5 -6.88 0 0 0 1 13 +6 -7.5 0 0 0 1 13 +10 -12.5 0 0 0 1 13 +9 -6.75 0 1 -6.75 1 13 +8 -4 0 1 -4 1 13 +2 -1 0 1 2 1 13 +12 -24 0 0 0 1 13 +12 -6 0 1 -6 1 13 +2 -2 0 0 0 1 13 +4 -7 0 0 0 1 13 +12 -9 0 1 -9 1 13 +6 -11.25 0 0 0 1 13 +25 0 10 0 10 1 13 +28 0 13 0 13 1 13 +2 -2.75 0 0 0 1 13 +12 -10.5 0 0 0 1 13 +8 -14 0 0 0 1 13 +4 -6.5 0 0 0 1 13 +4 0 2 0 2 1 13 +10 -15 0 0 0 1 13 +12 0 5 0 5 1 13 +10 -18.75 0 0 0 1 13 +12 -3 0 1 12 1 13 +4 -4 0 0 0 1 13 +9 -7.88 0 0 0 1 13 +9 -2.25 0 0 0 1 13 +2 -1.75 0 0 0 1 13 +12 0 6 0 6 1 13 +5 -2.5 0 0 0 1 13 +4 -4.5 0 0 0 1 13 +8 -6 0 0 0 1 13 +12 -18 0 0 0 1 13 +12 -16.5 0 0 0 1 13 +22 0 10 0 10 1 13 +12 -21 0 0 0 1 13 +12 -4.5 0 0 0 1 13 +12 -12 0 0 0 1 13 +19 0 8 0 8 1 13 +2 -2.5 0 0 0 1 13 +12 0 4 0 4 1 13 +4 -2 0 0 0 1 13 +9 -9 0 0 0 1 13 +9 -10.13 0 0 0 1 13 +6 -2.25 0 1 6 1 13 +2 -0.5 0 1 -0.5 1 13 +10 -3.75 0 1 10 1 13 +13 0 5 0 5 1 13 +4 -1.5 0 1 4 1 13 +5 -1.25 0 1 5 1 13 +6 -9 0 0 0 1 13 +10 -17.5 0 0 0 1 13 +6 -12 0 0 0 1 13 +6 -5.25 0 0 0 1 13 +12 -22.5 0 0 0 1 13 +8 -16 0 0 0 1 13 +9 -15.75 0 0 0 1 13 +10 -20 0 0 0 1 13 +13 0 6 0 6 1 13 +4 -8 0 0 0 1 13 +12 -7.5 0 0 0 1 13 +9 -18 0 0 0 1 13 +2 -3.25 0 0 0 1 13 +7 0 3 0 3 1 13 +6 -3.75 0 1 6 1 13 +5 -6.25 0 0 0 1 13 +8 -15 0 0 0 1 13 +25 0 9 0 9 1 13 +2 -3.5 0 0 0 1 13 +6 -10.5 0 0 0 1 13 +9 -11.25 0 0 0 1 14 +8 -16 0 0 0 1 14 +9 -5.63 0 1 -5.63 1 14 +9 -4.5 0 1 9 1 14 +2 -2 0 1 2 1 14 +12 -19.5 0 0 0 1 14 +4 -4.5 0 0 0 1 14 +2 -3.75 0 0 0 1 14 +2 -2.25 0 0 0 1 14 +12 -4.5 0 1 -4.5 1 14 +9 -10.13 0 0 0 1 14 +12 -3 0 1 12 1 14 +10 -17.5 0 0 0 1 14 +5 -4.38 0 1 -4.38 1 14 +5 -7.5 0 1 5 1 14 +6 -11.25 0 0 0 1 14 +2 -1.5 0 1 -1.5 1 14 +9 -6.75 0 1 -6.75 1 14 +4 -7 0 0 0 1 14 +8 -7 0 1 8 1 14 +2 -1.75 0 1 2 1 14 +2 -1 0 1 2 1 14 +10 -6.25 0 1 -6.25 1 14 +6 -6.75 0 1 6 1 14 +9 -2.25 0 1 -2.25 1 14 +2 -0.75 0 1 2 1 14 +12 0 4 1 12 1 14 +6 -3 0 1 -3 1 14 +3 0 1 1 3 1 14 +2 -3 0 0 0 1 14 +10 -13.75 0 1 -13.75 1 14 +6 -2.25 0 1 6 1 14 +5 -1.88 0 1 -1.88 1 14 +12 -13.5 0 1 12 1 14 +22 0 10 0 10 1 14 +9 -12.38 0 1 9 1 14 +26 0 10 1 26 1 14 +12 -10.5 0 1 -10.5 1 14 +10 -2.5 0 1 -2.5 1 14 +25 0 10 0 10 1 14 +9 -15.75 0 1 9 1 14 +7 0 3 1 0 1 14 +10 -10 0 1 10 1 14 +12 -15 0 0 0 1 14 +12 0 6 1 0 1 14 +6 -4.5 0 1 -4.5 1 14 +8 -13 0 0 0 1 14 +10 -16.25 0 0 0 1 14 +5 -1.25 0 1 5 1 14 +4 -4 0 1 4 1 14 +5 -3.75 0 1 5 1 14 +6 -8.25 0 1 -8.25 1 14 +8 -15 0 1 -15 1 14 +8 -8 0 1 -8 1 14 +2 -2.75 0 0 0 1 14 +6 -12 0 1 -12 1 14 +2 0 1 1 2 1 14 +2 -1.25 0 1 -1.25 1 14 +9 -18 0 1 9 1 14 +6 -9 0 0 0 1 14 +10 -8.75 0 1 -8.75 1 14 +4 -7.5 0 0 0 1 14 +13 0 6 1 0 1 14 +10 -11.25 0 1 10 1 14 +4 -3 0 0 0 1 14 +10 -5 0 1 10 1 14 +8 -2 0 1 -2 1 14 +4 -2.5 0 0 0 1 14 +2 -3.5 0 0 0 1 14 +2 -2.5 0 1 2 1 14 +6 -3.75 0 1 6 1 14 +8 -3 0 1 8 1 14 +2 -3.25 0 0 0 1 14 +8 -9 0 1 -9 1 14 +6 -6 0 1 -6 1 14 +8 -11 0 1 8 1 14 +5 -8.75 0 0 0 1 14 +6 -9.75 0 0 0 1 14 +12 -24 0 1 -24 1 14 +4 -6.5 0 1 -6.5 1 14 +5 -10 0 1 5 1 14 +30 0 12 1 0 1 14 +12 -18 0 1 12 1 14 +9 -9 0 0 0 1 14 +5 -5 0 1 -5 1 14 +5 -9.38 0 1 -9.38 1 14 +10 -12.5 0 1 -12.5 1 14 +10 -18.75 0 0 0 1 14 +5 -2.5 0 1 -2.5 1 14 +9 -14.63 0 1 9 1 14 +28 0 13 1 0 1 14 +5 -6.88 0 1 -6.88 1 14 +4 -3.5 0 1 -3.5 1 14 +12 -16.5 0 1 12 1 14 +5 -8.13 0 0 0 1 14 +9 -16.88 0 0 0 1 14 +9 -3.38 0 1 -3.38 1 14 +12 0 5 1 0 1 14 +4 -8 0 0 0 1 14 +8 -12 0 1 8 1 14 +8 -4 0 1 -4 1 14 +2 -4 0 0 0 1 14 +12 -9 0 1 -9 1 14 +4 -1.5 0 1 4 1 14 +6 -10.5 0 0 0 1 14 +5 -3.13 0 1 5 1 14 +10 -15 0 1 10 1 14 +23 0 10 0 10 1 14 +12 -7.5 0 1 -7.5 1 14 +2 -0.5 0 1 -0.5 1 14 +4 0 2 0 2 1 14 +6 -1.5 0 1 -1.5 1 14 +4 -1 0 1 4 1 14 +10 -20 0 0 0 1 14 +12 -22.5 0 1 12 1 14 +25 0 9 0 9 1 14 +13 0 5 1 13 1 14 +6 -5.25 0 1 6 1 14 +9 -13.5 0 0 0 1 14 +5 0 2 0 2 1 14 +12 -6 0 1 -6 1 14 +5 -6.25 0 1 5 1 14 +10 -3.75 0 1 10 1 14 +9 -7.88 0 0 0 1 14 +8 -6 0 1 -6 1 14 +4 -5.5 0 0 0 1 14 +19 0 8 0 8 1 14 +10 -7.5 0 1 -7.5 1 14 +4 -6 0 0 0 1 14 +8 -14 0 0 0 1 14 +8 0 3 0 3 1 14 +12 -21 0 0 0 1 14 +4 -2 0 1 -2 1 14 +4 -5 0 1 4 1 14 +6 -7.5 0 1 -7.5 1 14 +12 -12 0 1 -12 1 14 +8 -5 0 1 -5 1 14 +26 0 12 0 12 1 14 +8 -10 0 1 -10 1 14 +5 -5.63 0 1 -5.63 1 14 +2 -1 0 1 2 1 15 +9 -6.75 0 1 -6.75 1 15 +2 -4 0 0 0 1 15 +2 -3.25 0 1 2 1 15 +4 -6.5 0 1 -6.5 1 15 +5 -5.63 0 1 -5.63 1 15 +8 -8 0 1 -8 1 15 +12 -18 0 1 12 1 15 +2 -2.5 0 1 2 1 15 +3 0 1 0 1 1 15 +12 -16.5 0 1 12 1 15 +10 -12.5 0 1 -12.5 1 15 +5 -1.25 0 1 5 1 15 +19 0 8 0 8 1 15 +8 -9 0 1 -9 1 15 +5 -10 0 1 5 1 15 +25 0 10 1 25 1 15 +7 0 3 0 3 1 15 +6 -11.25 0 0 0 1 15 +6 -1.5 0 1 -1.5 1 15 +4 -1.5 0 1 4 1 15 +10 -5 0 1 10 1 15 +10 -3.75 0 1 10 1 15 +6 -4.5 0 1 -4.5 1 15 +12 -19.5 0 1 12 1 15 +5 -4.38 0 1 -4.38 1 15 +8 -11 0 0 0 1 15 +2 -0.75 0 1 2 1 15 +2 -1.5 0 1 -1.5 1 15 +6 -6.75 0 1 6 1 15 +4 -6 0 1 4 1 15 +10 -16.25 0 1 -16.25 1 15 +12 -15 0 1 -15 1 15 +6 -5.25 0 1 6 1 15 +12 -21 0 1 12 1 15 +4 -3 0 1 4 1 15 +12 -22.5 0 0 0 1 15 +2 -3.75 0 1 -3.75 1 15 +6 -12 0 0 0 1 15 +5 -8.13 0 1 5 1 15 +10 -8.75 0 1 -8.75 1 15 +12 -6 0 1 -6 1 15 +5 -5 0 1 -5 1 15 +22 0 10 1 22 1 15 +12 -13.5 0 1 12 1 15 +8 -7 0 0 0 1 15 +4 -3.5 0 1 -3.5 1 15 +9 -12.38 0 1 9 1 15 +10 -7.5 0 1 -7.5 1 15 +26 0 10 0 10 1 15 +12 -4.5 0 1 -4.5 1 15 +8 -15 0 1 -15 1 15 +2 -1.75 0 1 2 1 15 +12 0 6 1 0 1 15 +9 -3.38 0 1 -3.38 1 15 +2 -3 0 1 -3 1 15 +9 -5.63 0 1 -5.63 1 15 +2 -3.5 0 1 -3.5 1 15 +8 -12 0 1 8 1 15 +10 -18.75 0 1 10 1 15 +4 0 2 1 4 1 15 +2 -2.25 0 1 2 1 15 +9 -2.25 0 1 -2.25 1 15 +10 -13.75 0 1 -13.75 1 15 +28 0 13 1 0 1 15 +4 -2.5 0 1 4 1 15 +9 -15.75 0 0 0 1 15 +10 -15 0 0 0 1 15 +10 -10 0 1 10 1 15 +9 -18 0 0 0 1 15 +12 -24 0 0 0 1 15 +13 0 5 0 5 1 15 +5 -1.88 0 1 -1.88 1 15 +4 -4.5 0 1 4 1 15 +9 -7.88 0 1 9 1 15 +9 -9 0 1 9 1 15 +25 0 9 1 0 1 15 +12 -12 0 1 -12 1 15 +6 -2.25 0 1 6 1 15 +8 -5 0 1 -5 1 15 +4 -5.5 0 1 -5.5 1 15 +2 -1.25 0 1 -1.25 1 15 +9 -13.5 0 1 -13.5 1 15 +9 -4.5 0 1 9 1 15 +10 -11.25 0 1 10 1 15 +6 -3 0 1 -3 1 15 +10 -2.5 0 1 -2.5 1 15 +12 0 4 1 12 1 15 +10 -20 0 0 0 1 15 +5 -3.75 0 1 5 1 15 +9 -10.13 0 1 -10.13 1 15 +4 -7 0 1 -7 1 15 +12 -10.5 0 1 -10.5 1 15 +8 -16 0 1 8 1 15 +4 -7.5 0 1 4 1 15 +8 0 3 0 3 1 15 +6 -10.5 0 0 0 1 15 +6 -9.75 0 1 -9.75 1 15 +5 -8.75 0 1 5 1 15 +5 -2.5 0 1 -2.5 1 15 +13 0 6 0 6 1 15 +23 0 10 1 0 1 15 +8 -4 0 1 -4 1 15 +9 -11.25 0 1 9 1 15 +5 -6.88 0 1 -6.88 1 15 +4 -4 0 1 4 1 15 +10 -17.5 0 0 0 1 15 +8 -13 0 1 8 1 15 +26 0 12 0 12 1 15 +6 -8.25 0 0 0 1 15 +9 -14.63 0 0 0 1 15 +8 -2 0 1 -2 1 15 +10 -6.25 0 1 -6.25 1 15 +8 -14 0 1 8 1 15 +12 0 5 1 0 1 15 +8 -10 0 1 -10 1 15 +30 0 12 1 0 1 15 +5 -7.5 0 1 5 1 15 +5 0 2 1 0 1 15 +6 -3.75 0 1 6 1 15 +6 -6 0 1 -6 1 15 +4 -2 0 1 -2 1 15 +12 -7.5 0 1 -7.5 1 15 +5 -6.25 0 1 5 1 15 +4 -5 0 1 4 1 15 +2 -2.75 0 1 -2.75 1 15 +2 -2 0 1 2 1 15 +6 -9 0 1 -9 1 15 +5 -3.13 0 1 5 1 15 +12 -9 0 1 -9 1 15 +4 -8 0 1 -8 1 15 +4 -1 0 1 4 1 15 +2 0 1 0 1 1 15 +9 -16.88 0 1 9 1 15 +8 -6 0 1 -6 1 15 +2 -0.5 0 1 -0.5 1 15 +6 -7.5 0 1 -7.5 1 15 +8 -3 0 1 8 1 15 +12 -3 0 1 12 1 15 +5 -9.38 0 0 0 1 15 +6 -9.75 0 0 0 1 16 +12 -13.5 0 1 12 1 16 +8 -7 0 0 0 1 16 +10 -7.5 0 1 -7.5 1 16 +2 -2.25 0 1 2 1 16 +6 -8.25 0 0 0 1 16 +10 -16.25 0 1 -16.25 1 16 +3 0 1 1 3 1 16 +4 -3 0 1 4 1 16 +8 -2 0 1 -2 1 16 +4 -2.5 0 1 4 1 16 +5 -5.63 0 0 0 1 16 +5 0 2 0 2 1 16 +30 0 12 1 0 1 16 +9 -4.5 0 1 9 1 16 +4 -7.5 0 0 0 1 16 +26 0 10 1 26 1 16 +10 -6.25 0 1 -6.25 1 16 +2 -4 0 1 -4 1 16 +4 -5 0 0 0 1 16 +5 -1.88 0 1 -1.88 1 16 +23 0 10 1 0 1 16 +8 -3 0 1 8 1 16 +8 -12 0 1 8 1 16 +10 -2.5 0 0 0 1 16 +5 -8.13 0 0 0 1 16 +8 -9 0 1 -9 1 16 +2 -3 0 1 -3 1 16 +9 -11.25 0 1 9 1 16 +9 -12.38 0 0 0 1 16 +12 -15 0 1 -15 1 16 +8 -10 0 0 0 1 16 +4 -1 0 1 4 1 16 +8 0 3 1 8 1 16 +4 -3.5 0 0 0 1 16 +8 -8 0 0 0 1 16 +10 -11.25 0 0 0 1 16 +10 -5 0 1 10 1 16 +9 -13.5 0 0 0 1 16 +2 -0.75 0 1 2 1 16 +5 -4.38 0 1 -4.38 1 16 +2 -1.5 0 1 -1.5 1 16 +2 -3.75 0 1 -3.75 1 16 +5 -3.75 0 1 5 1 16 +9 -16.88 0 0 0 1 16 +9 -3.38 0 1 -3.38 1 16 +5 -10 0 0 0 1 16 +26 0 12 1 0 1 16 +5 -9.38 0 0 0 1 16 +6 -1.5 0 1 -1.5 1 16 +10 -10 0 1 10 1 16 +2 -1.25 0 1 -1.25 1 16 +9 -14.63 0 0 0 1 16 +6 -4.5 0 1 -4.5 1 16 +5 -5 0 1 -5 1 16 +5 -7.5 0 0 0 1 16 +8 -13 0 0 0 1 16 +5 -3.13 0 1 5 1 16 +8 -5 0 1 -5 1 16 +8 -11 0 1 8 1 16 +6 -6.75 0 1 6 1 16 +5 -8.75 0 1 5 1 16 +2 0 1 1 2 1 16 +9 -5.63 0 1 -5.63 1 16 +6 -6 0 1 -6 1 16 +4 -5.5 0 0 0 1 16 +6 -3 0 1 -3 1 16 +12 -19.5 0 1 12 1 16 +10 -13.75 0 0 0 1 16 +10 -8.75 0 1 -8.75 1 16 +5 -6.88 0 1 -6.88 1 16 +6 -7.5 0 0 0 1 16 +10 -12.5 0 1 -12.5 1 16 +9 -6.75 0 1 -6.75 1 16 +4 -6 0 1 4 1 16 +8 -4 0 1 -4 1 16 +2 -1 0 1 2 1 16 +12 -24 0 0 0 1 16 +12 -6 0 1 -6 1 16 +2 -2 0 1 2 1 16 +4 -7 0 0 0 1 16 +12 -9 0 1 -9 1 16 +6 -11.25 0 0 0 1 16 +25 0 10 1 25 1 16 +28 0 13 1 0 1 16 +2 -2.75 0 1 -2.75 1 16 +12 -10.5 0 0 0 1 16 +8 -14 0 0 0 1 16 +4 -6.5 0 0 0 1 16 +4 0 2 1 4 1 16 +10 -15 0 0 0 1 16 +12 0 5 1 0 1 16 +10 -18.75 0 0 0 1 16 +12 -3 0 1 12 1 16 +4 -4 0 1 4 1 16 +9 -7.88 0 1 9 1 16 +9 -2.25 0 1 -2.25 1 16 +2 -1.75 0 1 2 1 16 +12 0 6 1 0 1 16 +5 -2.5 0 1 -2.5 1 16 +4 -4.5 0 1 4 1 16 +8 -6 0 1 -6 1 16 +12 -18 0 0 0 1 16 +12 -16.5 0 1 12 1 16 +22 0 10 0 10 1 16 +12 -21 0 0 0 1 16 +12 -4.5 0 1 -4.5 1 16 +12 -12 0 1 -12 1 16 +19 0 8 1 19 1 16 +2 -2.5 0 1 2 1 16 +12 0 4 1 12 1 16 +4 -2 0 1 -2 1 16 +9 -9 0 0 0 1 16 +9 -10.13 0 0 0 1 16 +6 -2.25 0 1 6 1 16 +2 -0.5 0 1 -0.5 1 16 +10 -3.75 0 1 10 1 16 +13 0 5 1 13 1 16 +4 -1.5 0 1 4 1 16 +5 -1.25 0 1 5 1 16 +6 -9 0 1 -9 1 16 +10 -17.5 0 0 0 1 16 +6 -12 0 0 0 1 16 +6 -5.25 0 1 6 1 16 +12 -22.5 0 1 12 1 16 +8 -16 0 0 0 1 16 +9 -15.75 0 1 9 1 16 +10 -20 0 1 10 1 16 +13 0 6 1 0 1 16 +4 -8 0 0 0 1 16 +12 -7.5 0 1 -7.5 1 16 +9 -18 0 0 0 1 16 +2 -3.25 0 1 2 1 16 +7 0 3 1 0 1 16 +6 -3.75 0 1 6 1 16 +5 -6.25 0 1 5 1 16 +8 -15 0 0 0 1 16 +25 0 9 1 0 1 16 +2 -3.5 0 1 -3.5 1 16 +6 -10.5 0 1 -10.5 1 16 +9 -10.13 0 0 0 1 17 +12 -10.5 0 0 0 1 17 +25 0 10 0 10 1 17 +4 -7 0 0 0 1 17 +9 -7.88 0 0 0 1 17 +5 -3.13 0 0 0 1 17 +5 -8.13 0 0 0 1 17 +8 -7 0 0 0 1 17 +12 -6 0 0 0 1 17 +12 -24 0 0 0 1 17 +12 -21 0 0 0 1 17 +4 -2.5 0 0 0 1 17 +6 -9 0 0 0 1 17 +10 -15 0 0 0 1 17 +8 -6 0 0 0 1 17 +13 0 6 1 0 1 17 +6 -12 0 0 0 1 17 +6 -4.5 0 0 0 1 17 +9 -16.88 0 0 0 1 17 +10 -18.75 0 0 0 1 17 +9 -3.38 0 1 -3.38 1 17 +6 -9.75 0 0 0 1 17 +2 -1.75 0 0 0 1 17 +5 0 2 1 0 1 17 +8 -5 0 0 0 1 17 +8 -9 0 0 0 1 17 +12 0 6 1 0 1 17 +12 0 4 1 12 1 17 +2 -2.5 0 0 0 1 17 +6 -3 0 0 0 1 17 +10 -7.5 0 0 0 1 17 +5 -2.5 0 0 0 1 17 +5 -3.75 0 0 0 1 17 +10 -3.75 0 0 0 1 17 +2 -3 0 0 0 1 17 +10 -6.25 0 0 0 1 17 +4 -7.5 0 0 0 1 17 +8 -16 0 0 0 1 17 +5 -6.25 0 0 0 1 17 +4 0 2 1 4 1 17 +10 -11.25 0 0 0 1 17 +5 -6.88 0 0 0 1 17 +5 -7.5 0 0 0 1 17 +26 0 12 1 0 1 17 +8 -13 0 0 0 1 17 +4 -4.5 0 0 0 1 17 +8 -10 0 0 0 1 17 +6 -3.75 0 0 0 1 17 +5 -5.63 0 0 0 1 17 +9 -18 0 0 0 1 17 +12 -13.5 0 0 0 1 17 +7 0 3 1 0 1 17 +8 -14 0 0 0 1 17 +2 -1.5 0 0 0 1 17 +10 -2.5 0 1 -2.5 1 17 +13 0 5 1 13 1 17 +9 -15.75 0 0 0 1 17 +8 -12 0 0 0 1 17 +28 0 13 1 0 1 17 +6 -7.5 0 0 0 1 17 +10 -16.25 0 0 0 1 17 +12 -7.5 0 0 0 1 17 +5 -5 0 0 0 1 17 +2 -2 0 0 0 1 17 +22 0 10 1 22 1 17 +2 -1 0 0 0 1 17 +3 0 1 1 3 1 17 +4 -5.5 0 0 0 1 17 +2 -2.25 0 0 0 1 17 +6 -2.25 0 0 0 1 17 +4 -6.5 0 0 0 1 17 +9 -12.38 0 0 0 1 17 +10 -13.75 0 0 0 1 17 +10 -17.5 0 0 0 1 17 +4 -5 0 0 0 1 17 +9 -11.25 0 0 0 1 17 +10 -10 0 0 0 1 17 +2 -3.25 0 0 0 1 17 +5 -8.75 0 0 0 1 17 +5 -10 0 0 0 1 17 +9 -2.25 0 1 -2.25 1 17 +6 -6.75 0 0 0 1 17 +12 -16.5 0 0 0 1 17 +9 -14.63 0 0 0 1 17 +4 -8 0 0 0 1 17 +6 -5.25 0 0 0 1 17 +9 -6.75 0 0 0 1 17 +12 -12 0 0 0 1 17 +4 -1 0 1 4 1 17 +12 -15 0 0 0 1 17 +4 -3.5 0 0 0 1 17 +2 -1.25 0 0 0 1 17 +30 0 12 1 0 1 17 +12 -19.5 0 0 0 1 17 +12 -3 0 0 0 1 17 +5 -1.25 0 0 0 1 17 +5 -1.88 0 0 0 1 17 +2 -3.5 0 0 0 1 17 +12 -9 0 0 0 1 17 +10 -20 0 0 0 1 17 +8 -4 0 0 0 1 17 +12 0 5 1 0 1 17 +2 0 1 1 2 1 17 +4 -1.5 0 0 0 1 17 +2 -3.75 0 0 0 1 17 +6 -10.5 0 0 0 1 17 +4 -2 0 0 0 1 17 +23 0 10 1 0 1 17 +12 -18 0 0 0 1 17 +6 -8.25 0 0 0 1 17 +26 0 10 1 26 1 17 +10 -8.75 0 0 0 1 17 +2 -0.75 0 0 0 1 17 +5 -9.38 0 0 0 1 17 +25 0 9 1 0 1 17 +9 -4.5 0 0 0 1 17 +10 -5 0 0 0 1 17 +2 -4 0 0 0 1 17 +2 -2.75 0 0 0 1 17 +4 -6 0 0 0 1 17 +10 -12.5 0 0 0 1 17 +12 -22.5 0 0 0 1 17 +4 -4 0 0 0 1 17 +2 -0.5 0 0 0 1 17 +8 -2 0 1 -2 1 17 +4 -3 0 0 0 1 17 +6 -11.25 0 0 0 1 17 +8 -15 0 0 0 1 17 +8 -11 0 0 0 1 17 +12 -4.5 0 0 0 1 17 +19 0 8 1 19 1 17 +6 -6 0 0 0 1 17 +5 -4.38 0 0 0 1 17 +9 -9 0 0 0 1 17 +6 -1.5 0 0 0 1 17 +9 -13.5 0 0 0 1 17 +9 -5.63 0 0 0 1 17 +8 -8 0 0 0 1 17 +8 0 3 1 8 1 17 +8 -3 0 0 0 1 17 +9 -11.25 0 1 9 1 18 +8 -16 0 0 0 1 18 +9 -5.63 0 1 -5.63 1 18 +9 -4.5 0 1 9 1 18 +2 -2 0 1 2 1 18 +12 -19.5 0 0 0 1 18 +4 -4.5 0 1 4 1 18 +2 -3.75 0 0 0 1 18 +2 -2.25 0 0 0 1 18 +12 -4.5 0 1 -4.5 1 18 +9 -10.13 0 1 -10.13 1 18 +12 -3 0 1 12 1 18 +10 -17.5 0 0 0 1 18 +5 -4.38 0 1 -4.38 1 18 +5 -7.5 0 0 0 1 18 +6 -11.25 0 0 0 1 18 +2 -1.5 0 1 -1.5 1 18 +9 -6.75 0 1 -6.75 1 18 +4 -7 0 1 -7 1 18 +8 -7 0 1 8 1 18 +2 -1.75 0 1 2 1 18 +2 -1 0 1 2 1 18 +10 -6.25 0 1 -6.25 1 18 +6 -6.75 0 1 6 1 18 +9 -2.25 0 1 -2.25 1 18 +2 -0.75 0 1 2 1 18 +12 0 4 0 4 1 18 +6 -3 0 1 -3 1 18 +3 0 1 1 3 1 18 +2 -3 0 0 0 1 18 +10 -13.75 0 0 0 1 18 +6 -2.25 0 1 6 1 18 +5 -1.88 0 1 -1.88 1 18 +12 -13.5 0 1 12 1 18 +22 0 10 0 10 1 18 +9 -12.38 0 0 0 1 18 +26 0 10 0 10 1 18 +12 -10.5 0 1 -10.5 1 18 +10 -2.5 0 1 -2.5 1 18 +25 0 10 0 10 1 18 +9 -15.75 0 0 0 1 18 +7 0 3 1 0 1 18 +10 -10 0 0 0 1 18 +12 -15 0 0 0 1 18 +12 0 6 0 6 1 18 +6 -4.5 0 1 -4.5 1 18 +8 -13 0 0 0 1 18 +10 -16.25 0 0 0 1 18 +5 -1.25 0 1 5 1 18 +4 -4 0 1 4 1 18 +5 -3.75 0 1 5 1 18 +6 -8.25 0 0 0 1 18 +8 -15 0 0 0 1 18 +8 -8 0 1 -8 1 18 +2 -2.75 0 1 -2.75 1 18 +6 -12 0 0 0 1 18 +2 0 1 1 2 1 18 +2 -1.25 0 1 -1.25 1 18 +9 -18 0 0 0 1 18 +6 -9 0 0 0 1 18 +10 -8.75 0 1 -8.75 1 18 +4 -7.5 0 0 0 1 18 +13 0 6 0 6 1 18 +10 -11.25 0 1 10 1 18 +4 -3 0 1 4 1 18 +10 -5 0 1 10 1 18 +8 -2 0 1 -2 1 18 +4 -2.5 0 1 4 1 18 +2 -3.5 0 0 0 1 18 +2 -2.5 0 0 0 1 18 +6 -3.75 0 1 6 1 18 +8 -3 0 1 8 1 18 +2 -3.25 0 0 0 1 18 +8 -9 0 0 0 1 18 +6 -6 0 0 0 1 18 +8 -11 0 0 0 1 18 +5 -8.75 0 0 0 1 18 +6 -9.75 0 0 0 1 18 +12 -24 0 0 0 1 18 +4 -6.5 0 0 0 1 18 +5 -10 0 0 0 1 18 +30 0 12 1 0 1 18 +12 -18 0 0 0 1 18 +9 -9 0 0 0 1 18 +5 -5 0 1 -5 1 18 +5 -9.38 0 0 0 1 18 +10 -12.5 0 0 0 1 18 +10 -18.75 0 0 0 1 18 +5 -2.5 0 1 -2.5 1 18 +9 -14.63 0 0 0 1 18 +28 0 13 0 13 1 18 +5 -6.88 0 0 0 1 18 +4 -3.5 0 1 -3.5 1 18 +12 -16.5 0 0 0 1 18 +5 -8.13 0 0 0 1 18 +9 -16.88 0 0 0 1 18 +9 -3.38 0 1 -3.38 1 18 +12 0 5 1 0 1 18 +4 -8 0 0 0 1 18 +8 -12 0 0 0 1 18 +8 -4 0 1 -4 1 18 +2 -4 0 1 -4 1 18 +12 -9 0 1 -9 1 18 +4 -1.5 0 1 4 1 18 +6 -10.5 0 0 0 1 18 +5 -3.13 0 1 5 1 18 +10 -15 0 0 0 1 18 +23 0 10 0 10 1 18 +12 -7.5 0 1 -7.5 1 18 +2 -0.5 0 1 -0.5 1 18 +4 0 2 1 4 1 18 +6 -1.5 0 1 -1.5 1 18 +4 -1 0 1 4 1 18 +10 -20 0 0 0 1 18 +12 -22.5 0 0 0 1 18 +25 0 9 1 0 1 18 +13 0 5 1 13 1 18 +6 -5.25 0 1 6 1 18 +9 -13.5 0 0 0 1 18 +5 0 2 1 0 1 18 +12 -6 0 1 -6 1 18 +5 -6.25 0 0 0 1 18 +10 -3.75 0 1 10 1 18 +9 -7.88 0 1 9 1 18 +8 -6 0 1 -6 1 18 +4 -5.5 0 0 0 1 18 +19 0 8 0 8 1 18 +10 -7.5 0 1 -7.5 1 18 +4 -6 0 0 0 1 18 +8 -14 0 0 0 1 18 +8 0 3 1 8 1 18 +12 -21 0 0 0 1 18 +4 -2 0 1 -2 1 18 +4 -5 0 0 0 1 18 +6 -7.5 0 0 0 1 18 +12 -12 0 0 0 1 18 +8 -5 0 1 -5 1 18 +26 0 12 0 12 1 18 +8 -10 0 0 0 1 18 +5 -5.63 0 0 0 1 18 +2 -1 0 1 2 1 19 +9 -6.75 0 1 -6.75 1 19 +2 -4 0 0 0 1 19 +2 -3.25 0 0 0 1 19 +4 -6.5 0 1 -6.5 1 19 +5 -5.63 0 1 -5.63 1 19 +8 -8 0 1 -8 1 19 +12 -18 0 1 12 1 19 +2 -2.5 0 0 0 1 19 +3 0 1 1 3 1 19 +12 -16.5 0 1 12 1 19 +10 -12.5 0 0 0 1 19 +5 -1.25 0 1 5 1 19 +19 0 8 1 19 1 19 +8 -9 0 0 0 1 19 +5 -10 0 0 0 1 19 +25 0 10 1 25 1 19 +7 0 3 1 0 1 19 +6 -11.25 0 0 0 1 19 +6 -1.5 0 1 -1.5 1 19 +4 -1.5 0 1 4 1 19 +10 -5 0 1 10 1 19 +10 -3.75 0 1 10 1 19 +6 -4.5 0 1 -4.5 1 19 +12 -19.5 0 0 0 1 19 +5 -4.38 0 1 -4.38 1 19 +8 -11 0 0 0 1 19 +2 -0.75 0 1 2 1 19 +2 -1.5 0 1 -1.5 1 19 +6 -6.75 0 0 0 1 19 +4 -6 0 0 0 1 19 +10 -16.25 0 1 -16.25 1 19 +12 -15 0 1 -15 1 19 +6 -5.25 0 1 6 1 19 +12 -21 0 0 0 1 19 +4 -3 0 1 4 1 19 +12 -22.5 0 1 12 1 19 +2 -3.75 0 0 0 1 19 +6 -12 0 0 0 1 19 +5 -8.13 0 0 0 1 19 +10 -8.75 0 1 -8.75 1 19 +12 -6 0 1 -6 1 19 +5 -5 0 1 -5 1 19 +22 0 10 1 22 1 19 +12 -13.5 0 1 12 1 19 +8 -7 0 1 8 1 19 +4 -3.5 0 1 -3.5 1 19 +9 -12.38 0 0 0 1 19 +10 -7.5 0 1 -7.5 1 19 +26 0 10 1 26 1 19 +12 -4.5 0 1 -4.5 1 19 +8 -15 0 0 0 1 19 +2 -1.75 0 1 2 1 19 +12 0 6 1 0 1 19 +9 -3.38 0 1 -3.38 1 19 +2 -3 0 0 0 1 19 +9 -5.63 0 1 -5.63 1 19 +2 -3.5 0 0 0 1 19 +8 -12 0 0 0 1 19 +10 -18.75 0 0 0 1 19 +4 0 2 1 4 1 19 +2 -2.25 0 0 0 1 19 +9 -2.25 0 1 -2.25 1 19 +10 -13.75 0 0 0 1 19 +28 0 13 1 0 1 19 +4 -2.5 0 1 4 1 19 +9 -15.75 0 0 0 1 19 +10 -15 0 0 0 1 19 +10 -10 0 1 10 1 19 +9 -18 0 0 0 1 19 +12 -24 0 0 0 1 19 +13 0 5 1 13 1 19 +5 -1.88 0 1 -1.88 1 19 +4 -4.5 0 0 0 1 19 +9 -7.88 0 0 0 1 19 +9 -9 0 0 0 1 19 +25 0 9 1 0 1 19 +12 -12 0 0 0 1 19 +6 -2.25 0 1 6 1 19 +8 -5 0 1 -5 1 19 +4 -5.5 0 0 0 1 19 +2 -1.25 0 0 0 1 19 +9 -13.5 0 0 0 1 19 +9 -4.5 0 1 9 1 19 +10 -11.25 0 0 0 1 19 +6 -3 0 1 -3 1 19 +10 -2.5 0 1 -2.5 1 19 +12 0 4 1 12 1 19 +10 -20 0 0 0 1 19 +5 -3.75 0 0 0 1 19 +9 -10.13 0 0 0 1 19 +4 -7 0 0 0 1 19 +12 -10.5 0 1 -10.5 1 19 +8 -16 0 0 0 1 19 +4 -7.5 0 0 0 1 19 +8 0 3 1 8 1 19 +6 -10.5 0 0 0 1 19 +6 -9.75 0 0 0 1 19 +5 -8.75 0 0 0 1 19 +5 -2.5 0 1 -2.5 1 19 +13 0 6 1 0 1 19 +23 0 10 0 10 1 19 +8 -4 0 1 -4 1 19 +9 -11.25 0 0 0 1 19 +5 -6.88 0 0 0 1 19 +4 -4 0 1 4 1 19 +10 -17.5 0 0 0 1 19 +8 -13 0 0 0 1 19 +26 0 12 1 0 1 19 +6 -8.25 0 0 0 1 19 +9 -14.63 0 0 0 1 19 +8 -2 0 1 -2 1 19 +10 -6.25 0 1 -6.25 1 19 +8 -14 0 0 0 1 19 +12 0 5 1 0 1 19 +8 -10 0 0 0 1 19 +30 0 12 1 0 1 19 +5 -7.5 0 0 0 1 19 +5 0 2 1 0 1 19 +6 -3.75 0 1 6 1 19 +6 -6 0 1 -6 1 19 +4 -2 0 1 -2 1 19 +12 -7.5 0 1 -7.5 1 19 +5 -6.25 0 0 0 1 19 +4 -5 0 0 0 1 19 +2 -2.75 0 0 0 1 19 +2 -2 0 1 2 1 19 +6 -9 0 0 0 1 19 +5 -3.13 0 1 5 1 19 +12 -9 0 1 -9 1 19 +4 -8 0 0 0 1 19 +4 -1 0 1 4 1 19 +2 0 1 1 2 1 19 +9 -16.88 0 0 0 1 19 +8 -6 0 1 -6 1 19 +2 -0.5 0 1 -0.5 1 19 +6 -7.5 0 0 0 1 19 +8 -3 0 1 8 1 19 +12 -3 0 1 12 1 19 +5 -9.38 0 0 0 1 19 +6 -9.75 0 0 0 1 20 +12 -13.5 0 0 0 1 20 +8 -7 0 1 8 1 20 +10 -7.5 0 1 -7.5 1 20 +2 -2.25 0 1 2 1 20 +6 -8.25 0 1 -8.25 1 20 +10 -16.25 0 0 0 1 20 +3 0 1 0 1 1 20 +4 -3 0 1 4 1 20 +8 -2 0 1 -2 1 20 +4 -2.5 0 1 4 1 20 +5 -5.63 0 1 -5.63 1 20 +5 0 2 0 2 1 20 +30 0 12 0 12 1 20 +9 -4.5 0 1 9 1 20 +4 -7.5 0 0 0 1 20 +26 0 10 1 26 1 20 +10 -6.25 0 1 -6.25 1 20 +2 -4 0 0 0 1 20 +4 -5 0 1 4 1 20 +5 -1.88 0 1 -1.88 1 20 +23 0 10 0 10 1 20 +8 -3 0 1 8 1 20 +8 -12 0 0 0 1 20 +10 -2.5 0 1 -2.5 1 20 +5 -8.13 0 0 0 1 20 +8 -9 0 1 -9 1 20 +2 -3 0 1 -3 1 20 +9 -11.25 0 1 9 1 20 +9 -12.38 0 0 0 1 20 +12 -15 0 1 -15 1 20 +8 -10 0 0 0 1 20 +4 -1 0 1 4 1 20 +8 0 3 0 3 1 20 +4 -3.5 0 0 0 1 20 +8 -8 0 1 -8 1 20 +10 -11.25 0 1 10 1 20 +10 -5 0 1 10 1 20 +9 -13.5 0 0 0 1 20 +2 -0.75 0 1 2 1 20 +5 -4.38 0 0 0 1 20 +2 -1.5 0 1 -1.5 1 20 +2 -3.75 0 0 0 1 20 +5 -3.75 0 1 5 1 20 +9 -16.88 0 0 0 1 20 +9 -3.38 0 1 -3.38 1 20 +5 -10 0 0 0 1 20 +26 0 12 0 12 1 20 +5 -9.38 0 0 0 1 20 +6 -1.5 0 1 -1.5 1 20 +10 -10 0 1 10 1 20 +2 -1.25 0 0 0 1 20 +9 -14.63 0 0 0 1 20 +6 -4.5 0 1 -4.5 1 20 +5 -5 0 1 -5 1 20 +5 -7.5 0 0 0 1 20 +8 -13 0 0 0 1 20 +5 -3.13 0 1 5 1 20 +8 -5 0 1 -5 1 20 +8 -11 0 1 8 1 20 +6 -6.75 0 1 6 1 20 +5 -8.75 0 0 0 1 20 +2 0 1 0 1 1 20 +9 -5.63 0 1 -5.63 1 20 +6 -6 0 1 -6 1 20 +4 -5.5 0 0 0 1 20 +6 -3 0 1 -3 1 20 +12 -19.5 0 0 0 1 20 +10 -13.75 0 0 0 1 20 +10 -8.75 0 1 -8.75 1 20 +5 -6.88 0 1 -6.88 1 20 +6 -7.5 0 0 0 1 20 +10 -12.5 0 1 -12.5 1 20 +9 -6.75 0 1 -6.75 1 20 +4 -6 0 0 0 1 20 +8 -4 0 1 -4 1 20 +2 -1 0 0 0 1 20 +12 -24 0 0 0 1 20 +12 -6 0 1 -6 1 20 +2 -2 0 1 2 1 20 +4 -7 0 0 0 1 20 +12 -9 0 0 0 1 20 +6 -11.25 0 1 6 1 20 +25 0 10 0 10 1 20 +28 0 13 0 13 1 20 +2 -2.75 0 1 -2.75 1 20 +12 -10.5 0 0 0 1 20 +8 -14 0 0 0 1 20 +4 -6.5 0 0 0 1 20 +4 0 2 0 2 1 20 +10 -15 0 1 10 1 20 +12 0 5 0 5 1 20 +10 -18.75 0 0 0 1 20 +12 -3 0 1 12 1 20 +4 -4 0 0 0 1 20 +9 -7.88 0 0 0 1 20 +9 -2.25 0 1 -2.25 1 20 +2 -1.75 0 0 0 1 20 +12 0 6 0 6 1 20 +5 -2.5 0 1 -2.5 1 20 +4 -4.5 0 1 4 1 20 +8 -6 0 0 0 1 20 +12 -18 0 0 0 1 20 +12 -16.5 0 0 0 1 20 +22 0 10 0 10 1 20 +12 -21 0 0 0 1 20 +12 -4.5 0 1 -4.5 1 20 +12 -12 0 1 -12 1 20 +19 0 8 0 8 1 20 +2 -2.5 0 1 2 1 20 +12 0 4 0 4 1 20 +4 -2 0 1 -2 1 20 +9 -9 0 0 0 1 20 +9 -10.13 0 0 0 1 20 +6 -2.25 0 1 6 1 20 +2 -0.5 0 1 -0.5 1 20 +10 -3.75 0 1 10 1 20 +13 0 5 0 5 1 20 +4 -1.5 0 1 4 1 20 +5 -1.25 0 1 5 1 20 +6 -9 0 0 0 1 20 +10 -17.5 0 0 0 1 20 +6 -12 0 0 0 1 20 +6 -5.25 0 0 0 1 20 +12 -22.5 0 0 0 1 20 +8 -16 0 0 0 1 20 +9 -15.75 0 0 0 1 20 +10 -20 0 0 0 1 20 +13 0 6 0 6 1 20 +4 -8 0 0 0 1 20 +12 -7.5 0 1 -7.5 1 20 +9 -18 0 0 0 1 20 +2 -3.25 0 1 2 1 20 +7 0 3 0 3 1 20 +6 -3.75 0 1 6 1 20 +5 -6.25 0 0 0 1 20 +8 -15 0 0 0 1 20 +25 0 9 0 9 1 20 +2 -3.5 0 0 0 1 20 +6 -10.5 0 0 0 1 20 +9 -10.13 0 0 0 1 21 +12 -10.5 0 0 0 1 21 +25 0 10 1 25 1 21 +4 -7 0 0 0 1 21 +9 -7.88 0 0 0 1 21 +5 -3.13 0 1 5 1 21 +5 -8.13 0 0 0 1 21 +8 -7 0 0 0 1 21 +12 -6 0 1 -6 1 21 +12 -24 0 0 0 1 21 +12 -21 0 0 0 1 21 +4 -2.5 0 1 4 1 21 +6 -9 0 0 0 1 21 +10 -15 0 0 0 1 21 +8 -6 0 1 -6 1 21 +13 0 6 1 0 1 21 +6 -12 0 0 0 1 21 +6 -4.5 0 1 -4.5 1 21 +9 -16.88 0 0 0 1 21 +10 -18.75 0 0 0 1 21 +9 -3.38 0 1 -3.38 1 21 +6 -9.75 0 0 0 1 21 +2 -1.75 0 0 0 1 21 +5 0 2 1 0 1 21 +8 -5 0 1 -5 1 21 +8 -9 0 0 0 1 21 +12 0 6 1 0 1 21 +12 0 4 1 12 1 21 +2 -2.5 0 0 0 1 21 +6 -3 0 1 -3 1 21 +10 -7.5 0 0 0 1 21 +5 -2.5 0 1 -2.5 1 21 +5 -3.75 0 0 0 1 21 +10 -3.75 0 1 10 1 21 +2 -3 0 0 0 1 21 +10 -6.25 0 0 0 1 21 +4 -7.5 0 0 0 1 21 +8 -16 0 0 0 1 21 +5 -6.25 0 0 0 1 21 +4 0 2 1 4 1 21 +10 -11.25 0 0 0 1 21 +5 -6.88 0 0 0 1 21 +5 -7.5 0 0 0 1 21 +26 0 12 1 0 1 21 +8 -13 0 0 0 1 21 +4 -4.5 0 0 0 1 21 +8 -10 0 0 0 1 21 +6 -3.75 0 1 6 1 21 +5 -5.63 0 0 0 1 21 +9 -18 0 0 0 1 21 +12 -13.5 0 0 0 1 21 +7 0 3 1 0 1 21 +8 -14 0 0 0 1 21 +2 -1.5 0 1 -1.5 1 21 +10 -2.5 0 1 -2.5 1 21 +13 0 5 1 13 1 21 +9 -15.75 0 0 0 1 21 +8 -12 0 0 0 1 21 +28 0 13 1 0 1 21 +6 -7.5 0 0 0 1 21 +10 -16.25 0 0 0 1 21 +12 -7.5 0 0 0 1 21 +5 -5 0 0 0 1 21 +2 -2 0 0 0 1 21 +22 0 10 1 22 1 21 +2 -1 0 1 2 1 21 +3 0 1 1 3 1 21 +4 -5.5 0 0 0 1 21 +2 -2.25 0 0 0 1 21 +6 -2.25 0 1 6 1 21 +4 -6.5 0 0 0 1 21 +9 -12.38 0 0 0 1 21 +10 -13.75 0 0 0 1 21 +10 -17.5 0 0 0 1 21 +4 -5 0 0 0 1 21 +9 -11.25 0 0 0 1 21 +10 -10 0 0 0 1 21 +2 -3.25 0 0 0 1 21 +5 -8.75 0 0 0 1 21 +5 -10 0 0 0 1 21 +9 -2.25 0 1 -2.25 1 21 +6 -6.75 0 0 0 1 21 +12 -16.5 0 0 0 1 21 +9 -14.63 0 0 0 1 21 +4 -8 0 0 0 1 21 +6 -5.25 0 0 0 1 21 +12 -12 0 0 0 1 21 +4 -1 0 1 4 1 21 +12 -15 0 0 0 1 21 +4 -3.5 0 0 0 1 21 +2 -1.25 0 1 -1.25 1 21 +30 0 12 1 0 1 21 +12 -19.5 0 0 0 1 21 +12 -3 0 1 12 1 21 +5 -1.25 0 1 5 1 21 +5 -1.88 0 1 -1.88 1 21 +2 -3.5 0 0 0 1 21 +12 -9 0 0 0 1 21 +10 -20 0 0 0 1 21 +8 -4 0 1 -4 1 21 +12 0 5 1 0 1 21 +2 0 1 1 2 1 21 +4 -1.5 0 1 4 1 21 +2 -3.75 0 0 0 1 21 +6 -10.5 0 0 0 1 21 +4 -2 0 1 -2 1 21 +23 0 10 1 0 1 21 +12 -18 0 0 0 1 21 +6 -8.25 0 0 0 1 21 +26 0 10 1 26 1 21 +10 -8.75 0 0 0 1 21 +2 -0.75 0 1 2 1 21 +5 -9.38 0 0 0 1 21 +25 0 9 1 0 1 21 +9 -4.5 0 1 9 1 21 +10 -5 0 1 10 1 21 +2 -4 0 0 0 1 21 +2 -2.75 0 0 0 1 21 +4 -6 0 0 0 1 21 +10 -12.5 0 0 0 1 21 +12 -22.5 0 0 0 1 21 +4 -4 0 0 0 1 21 +2 -0.5 0 1 -0.5 1 21 +8 -2 0 1 -2 1 21 +4 -3 0 0 0 1 21 +6 -11.25 0 0 0 1 21 +8 -15 0 0 0 1 21 +8 -11 0 0 0 1 21 +12 -4.5 0 1 -4.5 1 21 +19 0 8 1 19 1 21 +6 -6 0 0 0 1 21 +5 -4.38 0 0 0 1 21 +9 -9 0 0 0 1 21 +6 -1.5 0 1 -1.5 1 21 +9 -13.5 0 0 0 1 21 +9 -5.63 0 0 0 1 21 +8 -8 0 0 0 1 21 +8 0 3 1 8 1 21 +8 -3 0 1 8 1 21 +9 -11.25 0 0 0 1 22 +8 -16 0 0 0 1 22 +9 -5.63 0 0 0 1 22 +9 -4.5 0 1 9 1 22 +2 -2 0 0 0 1 22 +12 -19.5 0 0 0 1 22 +4 -4.5 0 0 0 1 22 +2 -3.75 0 0 0 1 22 +2 -2.25 0 0 0 1 22 +12 -4.5 0 1 -4.5 1 22 +9 -10.13 0 0 0 1 22 +12 -3 0 1 12 1 22 +10 -17.5 0 0 0 1 22 +5 -4.38 0 0 0 1 22 +5 -7.5 0 0 0 1 22 +6 -11.25 0 0 0 1 22 +2 -1.5 0 0 0 1 22 +9 -6.75 0 0 0 1 22 +4 -7 0 0 0 1 22 +8 -7 0 0 0 1 22 +2 -1.75 0 0 0 1 22 +2 -1 0 0 0 1 22 +10 -6.25 0 1 -6.25 1 22 +6 -6.75 0 0 0 1 22 +9 -2.25 0 1 -2.25 1 22 +2 -0.75 0 1 2 1 22 +12 0 4 1 12 1 22 +6 -3 0 0 0 1 22 +3 0 1 1 3 1 22 +2 -3 0 0 0 1 22 +10 -13.75 0 1 -13.75 1 22 +6 -2.25 0 1 6 1 22 +5 -1.88 0 1 -1.88 1 22 +12 -13.5 0 0 0 1 22 +22 0 10 1 22 1 22 +9 -12.38 0 0 0 1 22 +26 0 10 0 10 1 22 +12 -10.5 0 0 0 1 22 +10 -2.5 0 1 -2.5 1 22 +25 0 10 1 25 1 22 +9 -15.75 0 0 0 1 22 +7 0 3 1 0 1 22 +10 -10 0 1 10 1 22 +12 -15 0 0 0 1 22 +12 0 6 0 6 1 22 +6 -4.5 0 0 0 1 22 +8 -13 0 0 0 1 22 +10 -16.25 0 0 0 1 22 +5 -1.25 0 0 0 1 22 +4 -4 0 0 0 1 22 +5 -3.75 0 0 0 1 22 +6 -8.25 0 0 0 1 22 +8 -15 0 0 0 1 22 +8 -8 0 0 0 1 22 +2 -2.75 0 0 0 1 22 +6 -12 0 0 0 1 22 +2 0 1 0 1 1 22 +2 -1.25 0 1 -1.25 1 22 +9 -18 0 0 0 1 22 +6 -9 0 0 0 1 22 +10 -8.75 0 0 0 1 22 +4 -7.5 0 0 0 1 22 +13 0 6 1 0 1 22 +10 -11.25 0 0 0 1 22 +4 -3 0 1 4 1 22 +10 -5 0 0 0 1 22 +8 -2 0 1 -2 1 22 +4 -2.5 0 1 4 1 22 +2 -3.5 0 0 0 1 22 +2 -2.5 0 0 0 1 22 +6 -3.75 0 0 0 1 22 +8 -3 0 1 8 1 22 +2 -3.25 0 1 2 1 22 +8 -9 0 0 0 1 22 +6 -6 0 0 0 1 22 +8 -11 0 0 0 1 22 +5 -8.75 0 0 0 1 22 +6 -9.75 0 0 0 1 22 +12 -24 0 0 0 1 22 +4 -6.5 0 0 0 1 22 +5 -10 0 0 0 1 22 +30 0 12 1 0 1 22 +12 -18 0 0 0 1 22 +9 -9 0 1 9 1 22 +5 -5 0 0 0 1 22 +5 -9.38 0 0 0 1 22 +10 -12.5 0 0 0 1 22 +10 -18.75 0 0 0 1 22 +5 -2.5 0 1 -2.5 1 22 +9 -14.63 0 0 0 1 22 +28 0 13 0 13 1 22 +5 -6.88 0 0 0 1 22 +4 -3.5 0 1 -3.5 1 22 +12 -16.5 0 1 12 1 22 +5 -8.13 0 0 0 1 22 +9 -16.88 0 0 0 1 22 +9 -3.38 0 1 -3.38 1 22 +12 0 5 1 0 1 22 +4 -8 0 0 0 1 22 +8 -12 0 0 0 1 22 +8 -4 0 0 0 1 22 +2 -4 0 0 0 1 22 +12 -9 0 0 0 1 22 +4 -1.5 0 1 4 1 22 +6 -10.5 0 0 0 1 22 +5 -3.13 0 0 0 1 22 +10 -15 0 1 10 1 22 +23 0 10 0 10 1 22 +12 -7.5 0 0 0 1 22 +2 -0.5 0 1 -0.5 1 22 +4 0 2 1 4 1 22 +6 -1.5 0 0 0 1 22 +4 -1 0 1 4 1 22 +10 -20 0 0 0 1 22 +12 -22.5 0 0 0 1 22 +25 0 9 1 0 1 22 +13 0 5 1 13 1 22 +6 -5.25 0 0 0 1 22 +9 -13.5 0 0 0 1 22 +5 0 2 0 2 1 22 +12 -6 0 1 -6 1 22 +5 -6.25 0 0 0 1 22 +10 -3.75 0 1 10 1 22 +9 -7.88 0 0 0 1 22 +8 -6 0 0 0 1 22 +4 -5.5 0 0 0 1 22 +19 0 8 1 19 1 22 +10 -7.5 0 0 0 1 22 +4 -6 0 1 4 1 22 +8 -14 0 0 0 1 22 +8 0 3 1 8 1 22 +12 -21 0 0 0 1 22 +4 -2 0 0 0 1 22 +4 -5 0 0 0 1 22 +6 -7.5 0 0 0 1 22 +12 -12 0 0 0 1 22 +8 -5 0 1 -5 1 22 +26 0 12 1 0 1 22 +8 -10 0 1 -10 1 22 +5 -5.63 0 1 -5.63 1 22 +2 -1 0 1 2 1 23 +9 -6.75 0 1 -6.75 1 23 +2 -4 0 0 0 1 23 +2 -3.25 0 0 0 1 23 +4 -6.5 0 1 -6.5 1 23 +5 -5.63 0 0 0 1 23 +8 -8 0 1 -8 1 23 +12 -18 0 1 12 1 23 +2 -2.5 0 1 2 1 23 +3 0 1 1 3 1 23 +12 -16.5 0 1 12 1 23 +10 -12.5 0 0 0 1 23 +5 -1.25 0 1 5 1 23 +19 0 8 1 19 1 23 +8 -9 0 0 0 1 23 +5 -10 0 0 0 1 23 +25 0 10 1 25 1 23 +7 0 3 1 0 1 23 +6 -11.25 0 0 0 1 23 +6 -1.5 0 1 -1.5 1 23 +4 -1.5 0 1 4 1 23 +10 -5 0 1 10 1 23 +10 -3.75 0 1 10 1 23 +6 -4.5 0 1 -4.5 1 23 +12 -19.5 0 0 0 1 23 +5 -4.38 0 1 -4.38 1 23 +8 -11 0 0 0 1 23 +2 -0.75 0 1 2 1 23 +2 -1.5 0 1 -1.5 1 23 +6 -6.75 0 0 0 1 23 +4 -6 0 1 4 1 23 +10 -16.25 0 0 0 1 23 +12 -15 0 0 0 1 23 +6 -5.25 0 1 6 1 23 +12 -21 0 0 0 1 23 +4 -3 0 1 4 1 23 +12 -22.5 0 0 0 1 23 +2 -3.75 0 1 -3.75 1 23 +6 -12 0 0 0 1 23 +5 -8.13 0 0 0 1 23 +10 -8.75 0 1 -8.75 1 23 +12 -6 0 0 0 1 23 +5 -5 0 1 -5 1 23 +22 0 10 0 10 1 23 +12 -13.5 0 0 0 1 23 +8 -7 0 0 0 1 23 +4 -3.5 0 1 -3.5 1 23 +9 -12.38 0 0 0 1 23 +10 -7.5 0 0 0 1 23 +26 0 10 1 26 1 23 +12 -4.5 0 1 -4.5 1 23 +8 -15 0 0 0 1 23 +2 -1.75 0 1 2 1 23 +12 0 6 0 6 1 23 +9 -3.38 0 1 -3.38 1 23 +2 -3 0 1 -3 1 23 +9 -5.63 0 0 0 1 23 +2 -3.5 0 1 -3.5 1 23 +8 -12 0 1 8 1 23 +10 -18.75 0 0 0 1 23 +4 0 2 1 4 1 23 +2 -2.25 0 1 2 1 23 +9 -2.25 0 1 -2.25 1 23 +10 -13.75 0 0 0 1 23 +28 0 13 0 13 1 23 +4 -2.5 0 1 4 1 23 +9 -15.75 0 0 0 1 23 +10 -15 0 0 0 1 23 +10 -10 0 1 10 1 23 +9 -18 0 0 0 1 23 +12 -24 0 0 0 1 23 +13 0 5 1 13 1 23 +5 -1.88 0 1 -1.88 1 23 +4 -4.5 0 1 4 1 23 +9 -7.88 0 0 0 1 23 +9 -9 0 0 0 1 23 +25 0 9 1 0 1 23 +12 -12 0 0 0 1 23 +6 -2.25 0 1 6 1 23 +8 -5 0 1 -5 1 23 +4 -5.5 0 0 0 1 23 +2 -1.25 0 1 -1.25 1 23 +9 -13.5 0 0 0 1 23 +9 -4.5 0 1 9 1 23 +10 -11.25 0 0 0 1 23 +6 -3 0 1 -3 1 23 +10 -2.5 0 1 -2.5 1 23 +12 0 4 0 4 1 23 +10 -20 0 0 0 1 23 +5 -3.75 0 1 5 1 23 +9 -10.13 0 0 0 1 23 +4 -7 0 0 0 1 23 +12 -10.5 0 0 0 1 23 +8 -16 0 0 0 1 23 +4 -7.5 0 0 0 1 23 +8 0 3 1 8 1 23 +6 -10.5 0 0 0 1 23 +6 -9.75 0 0 0 1 23 +5 -8.75 0 0 0 1 23 +5 -2.5 0 1 -2.5 1 23 +13 0 6 0 6 1 23 +23 0 10 0 10 1 23 +8 -4 0 1 -4 1 23 +9 -11.25 0 0 0 1 23 +5 -6.88 0 0 0 1 23 +4 -4 0 1 4 1 23 +10 -17.5 0 0 0 1 23 +8 -13 0 0 0 1 23 +26 0 12 0 12 1 23 +6 -8.25 0 0 0 1 23 +9 -14.63 0 0 0 1 23 +8 -2 0 1 -2 1 23 +10 -6.25 0 0 0 1 23 +8 -14 0 0 0 1 23 +12 0 5 1 0 1 23 +8 -10 0 0 0 1 23 +30 0 12 0 12 1 23 +5 -7.5 0 0 0 1 23 +5 0 2 1 0 1 23 +6 -3.75 0 0 0 1 23 +6 -6 0 0 0 1 23 +4 -2 0 1 -2 1 23 +12 -7.5 0 0 0 1 23 +5 -6.25 0 0 0 1 23 +4 -5 0 1 4 1 23 +2 -2.75 0 0 0 1 23 +2 -2 0 1 2 1 23 +6 -9 0 0 0 1 23 +5 -3.13 0 1 5 1 23 +12 -9 0 0 0 1 23 +4 -8 0 0 0 1 23 +4 -1 0 1 4 1 23 +2 0 1 1 2 1 23 +9 -16.88 0 0 0 1 23 +8 -6 0 0 0 1 23 +2 -0.5 0 1 -0.5 1 23 +6 -7.5 0 0 0 1 23 +8 -3 0 1 8 1 23 +12 -3 0 1 12 1 23 +5 -9.38 0 0 0 1 23 +6 -9.75 0 0 0 1 24 +12 -13.5 0 1 12 1 24 +8 -7 0 1 8 1 24 +10 -7.5 0 1 -7.5 1 24 +2 -2.25 0 0 0 1 24 +6 -8.25 0 0 0 1 24 +10 -16.25 0 0 0 1 24 +3 0 1 1 3 1 24 +4 -3 0 1 4 1 24 +8 -2 0 1 -2 1 24 +4 -2.5 0 1 4 1 24 +5 -5.63 0 1 -5.63 1 24 +5 0 2 1 0 1 24 +30 0 12 1 0 1 24 +9 -4.5 0 1 9 1 24 +4 -7.5 0 0 0 1 24 +26 0 10 1 26 1 24 +10 -6.25 0 1 -6.25 1 24 +2 -4 0 0 0 1 24 +4 -5 0 0 0 1 24 +5 -1.88 0 1 -1.88 1 24 +23 0 10 0 10 1 24 +8 -3 0 1 8 1 24 +8 -12 0 0 0 1 24 +10 -2.5 0 1 -2.5 1 24 +5 -8.13 0 0 0 1 24 +8 -9 0 1 -9 1 24 +2 -3 0 1 -3 1 24 +9 -11.25 0 0 0 1 24 +9 -12.38 0 0 0 1 24 +12 -15 0 1 -15 1 24 +8 -10 0 0 0 1 24 +4 -1 0 1 4 1 24 +8 0 3 1 8 1 24 +4 -3.5 0 1 -3.5 1 24 +8 -8 0 1 -8 1 24 +10 -11.25 0 0 0 1 24 +10 -5 0 1 10 1 24 +9 -13.5 0 0 0 1 24 +2 -0.75 0 1 2 1 24 +5 -4.38 0 1 -4.38 1 24 +2 -1.5 0 1 -1.5 1 24 +2 -3.75 0 0 0 1 24 +5 -3.75 0 1 5 1 24 +9 -16.88 0 0 0 1 24 +9 -3.38 0 1 -3.38 1 24 +5 -10 0 0 0 1 24 +26 0 12 1 0 1 24 +5 -9.38 0 0 0 1 24 +6 -1.5 0 1 -1.5 1 24 +10 -10 0 1 10 1 24 +2 -1.25 0 1 -1.25 1 24 +9 -14.63 0 0 0 1 24 +6 -4.5 0 1 -4.5 1 24 +5 -5 0 1 -5 1 24 +5 -7.5 0 0 0 1 24 +8 -13 0 0 0 1 24 +5 -3.13 0 1 5 1 24 +8 -5 0 1 -5 1 24 +8 -11 0 0 0 1 24 +6 -6.75 0 0 0 1 24 +5 -8.75 0 0 0 1 24 +2 0 1 1 2 1 24 +9 -5.63 0 1 -5.63 1 24 +6 -6 0 1 -6 1 24 +4 -5.5 0 0 0 1 24 +6 -3 0 1 -3 1 24 +12 -19.5 0 0 0 1 24 +10 -13.75 0 0 0 1 24 +10 -8.75 0 1 -8.75 1 24 +5 -6.88 0 0 0 1 24 +6 -7.5 0 0 0 1 24 +10 -12.5 0 0 0 1 24 +9 -6.75 0 1 -6.75 1 24 +4 -6 0 0 0 1 24 +8 -4 0 1 -4 1 24 +2 -1 0 1 2 1 24 +12 -24 0 0 0 1 24 +12 -6 0 1 -6 1 24 +2 -2 0 1 2 1 24 +4 -7 0 0 0 1 24 +12 -9 0 1 -9 1 24 +6 -11.25 0 0 0 1 24 +25 0 10 1 25 1 24 +28 0 13 1 0 1 24 +2 -2.75 0 0 0 1 24 +12 -10.5 0 1 -10.5 1 24 +8 -14 0 0 0 1 24 +4 -6.5 0 0 0 1 24 +4 0 2 1 4 1 24 +10 -15 0 0 0 1 24 +12 0 5 1 0 1 24 +10 -18.75 0 0 0 1 24 +12 -3 0 1 12 1 24 +4 -4 0 1 4 1 24 +9 -7.88 0 1 9 1 24 +9 -2.25 0 1 -2.25 1 24 +2 -1.75 0 1 2 1 24 +12 0 6 1 0 1 24 +5 -2.5 0 1 -2.5 1 24 +4 -4.5 0 0 0 1 24 +8 -6 0 1 -6 1 24 +12 -18 0 0 0 1 24 +12 -16.5 0 0 0 1 24 +22 0 10 1 22 1 24 +12 -21 0 0 0 1 24 +12 -4.5 0 1 -4.5 1 24 +12 -12 0 1 -12 1 24 +19 0 8 1 19 1 24 +2 -2.5 0 0 0 1 24 +12 0 4 1 12 1 24 +4 -2 0 1 -2 1 24 +9 -9 0 1 9 1 24 +9 -10.13 0 0 0 1 24 +6 -2.25 0 1 6 1 24 +2 -0.5 0 1 -0.5 1 24 +10 -3.75 0 1 10 1 24 +13 0 5 1 13 1 24 +4 -1.5 0 1 4 1 24 +5 -1.25 0 1 5 1 24 +6 -9 0 0 0 1 24 +10 -17.5 0 0 0 1 24 +6 -12 0 0 0 1 24 +6 -5.25 0 1 6 1 24 +12 -22.5 0 0 0 1 24 +8 -16 0 0 0 1 24 +9 -15.75 0 0 0 1 24 +10 -20 0 0 0 1 24 +13 0 6 1 0 1 24 +4 -8 0 0 0 1 24 +12 -7.5 0 1 -7.5 1 24 +9 -18 0 0 0 1 24 +2 -3.25 0 0 0 1 24 +7 0 3 1 0 1 24 +6 -3.75 0 1 6 1 24 +5 -6.25 0 0 0 1 24 +8 -15 0 0 0 1 24 +25 0 9 1 0 1 24 +2 -3.5 0 0 0 1 24 +6 -10.5 0 0 0 1 24 +9 -10.13 0 1 -10.13 1 25 +12 -10.5 0 1 -10.5 1 25 +25 0 10 1 25 1 25 +4 -7 0 0 0 1 25 +9 -7.88 0 1 9 1 25 +5 -3.13 0 1 5 1 25 +5 -8.13 0 0 0 1 25 +8 -7 0 1 8 1 25 +12 -6 0 1 -6 1 25 +12 -24 0 0 0 1 25 +12 -21 0 0 0 1 25 +4 -2.5 0 1 4 1 25 +6 -9 0 1 -9 1 25 +10 -15 0 1 10 1 25 +8 -6 0 1 -6 1 25 +13 0 6 1 0 1 25 +6 -12 0 0 0 1 25 +6 -4.5 0 0 0 1 25 +9 -16.88 0 1 9 1 25 +10 -18.75 0 0 0 1 25 +9 -3.38 0 1 -3.38 1 25 +6 -9.75 0 1 -9.75 1 25 +2 -1.75 0 1 2 1 25 +5 0 2 1 0 1 25 +8 -5 0 1 -5 1 25 +8 -9 0 1 -9 1 25 +12 0 6 1 0 1 25 +12 0 4 1 12 1 25 +2 -2.5 0 1 2 1 25 +6 -3 0 0 0 1 25 +10 -7.5 0 1 -7.5 1 25 +5 -2.5 0 1 -2.5 1 25 +5 -3.75 0 1 5 1 25 +10 -3.75 0 1 10 1 25 +2 -3 0 0 0 1 25 +10 -6.25 0 1 -6.25 1 25 +4 -7.5 0 0 0 1 25 +8 -16 0 0 0 1 25 +5 -6.25 0 0 0 1 25 +4 0 2 1 4 1 25 +10 -11.25 0 1 10 1 25 +5 -6.88 0 1 -6.88 1 25 +5 -7.5 0 1 5 1 25 +26 0 12 1 0 1 25 +8 -13 0 0 0 1 25 +4 -4.5 0 1 4 1 25 +8 -10 0 1 -10 1 25 +6 -3.75 0 1 6 1 25 +5 -5.63 0 1 -5.63 1 25 +9 -18 0 0 0 1 25 +12 -13.5 0 0 0 1 25 +7 0 3 1 0 1 25 +8 -14 0 1 8 1 25 +2 -1.5 0 1 -1.5 1 25 +10 -2.5 0 1 -2.5 1 25 +13 0 5 1 13 1 25 +9 -15.75 0 0 0 1 25 +8 -12 0 1 8 1 25 +28 0 13 1 0 1 25 +6 -7.5 0 1 -7.5 1 25 +10 -16.25 0 0 0 1 25 +12 -7.5 0 1 -7.5 1 25 +5 -5 0 1 -5 1 25 +2 -2 0 1 2 1 25 +22 0 10 0 10 1 25 +2 -1 0 1 2 1 25 +3 0 1 1 3 1 25 +4 -5.5 0 1 -5.5 1 25 +2 -2.25 0 1 2 1 25 +6 -2.25 0 1 6 1 25 +4 -6.5 0 1 -6.5 1 25 +9 -12.38 0 0 0 1 25 +10 -13.75 0 0 0 1 25 +10 -17.5 0 1 10 1 25 +4 -5 0 1 4 1 25 +9 -11.25 0 1 9 1 25 +10 -10 0 0 0 1 25 +2 -3.25 0 1 2 1 25 +5 -8.75 0 1 5 1 25 +5 -10 0 0 0 1 25 +9 -2.25 0 1 -2.25 1 25 +6 -6.75 0 1 6 1 25 +12 -16.5 0 0 0 1 25 +9 -14.63 0 1 9 1 25 +4 -8 0 0 0 1 25 +6 -5.25 0 1 6 1 25 +9 -6.75 0 1 -6.75 1 25 +12 -12 0 1 -12 1 25 +4 -1 0 1 4 1 25 +12 -15 0 1 -15 1 25 +4 -3.5 0 1 -3.5 1 25 +2 -1.25 0 1 -1.25 1 25 +30 0 12 1 0 1 25 +12 -19.5 0 0 0 1 25 +12 -3 0 1 12 1 25 +5 -1.25 0 1 5 1 25 +5 -1.88 0 1 -1.88 1 25 +2 -3.5 0 1 -3.5 1 25 +12 -9 0 0 0 1 25 +10 -20 0 0 0 1 25 +8 -4 0 1 -4 1 25 +12 0 5 1 0 1 25 +2 0 1 0 1 1 25 +4 -1.5 0 1 4 1 25 +2 -3.75 0 0 0 1 25 +6 -10.5 0 0 0 1 25 +4 -2 0 1 -2 1 25 +23 0 10 0 10 1 25 +12 -18 0 1 12 1 25 +6 -8.25 0 0 0 1 25 +26 0 10 1 26 1 25 +10 -8.75 0 1 -8.75 1 25 +2 -0.75 0 1 2 1 25 +5 -9.38 0 1 -9.38 1 25 +25 0 9 1 0 1 25 +9 -4.5 0 1 9 1 25 +10 -5 0 1 10 1 25 +2 -4 0 1 -4 1 25 +2 -2.75 0 1 -2.75 1 25 +4 -6 0 0 0 1 25 +10 -12.5 0 1 -12.5 1 25 +12 -22.5 0 0 0 1 25 +4 -4 0 1 4 1 25 +2 -0.5 0 1 -0.5 1 25 +8 -2 0 1 -2 1 25 +4 -3 0 1 4 1 25 +6 -11.25 0 0 0 1 25 +8 -15 0 0 0 1 25 +8 -11 0 1 8 1 25 +12 -4.5 0 1 -4.5 1 25 +19 0 8 1 19 1 25 +6 -6 0 1 -6 1 25 +5 -4.38 0 1 -4.38 1 25 +9 -9 0 1 9 1 25 +6 -1.5 0 1 -1.5 1 25 +9 -13.5 0 1 -13.5 1 25 +9 -5.63 0 1 -5.63 1 25 +8 -8 0 1 -8 1 25 +8 0 3 1 8 1 25 +8 -3 0 1 8 1 25 +9 -11.25 0 1 9 1 26 +8 -16 0 0 0 1 26 +9 -5.63 0 1 -5.63 1 26 +9 -4.5 0 1 9 1 26 +2 -2 0 0 0 1 26 +12 -19.5 0 0 0 1 26 +4 -4.5 0 1 4 1 26 +2 -3.75 0 0 0 1 26 +2 -2.25 0 0 0 1 26 +12 -4.5 0 1 -4.5 1 26 +9 -10.13 0 0 0 1 26 +12 -3 0 1 12 1 26 +10 -17.5 0 0 0 1 26 +5 -4.38 0 1 -4.38 1 26 +5 -7.5 0 0 0 1 26 +6 -11.25 0 0 0 1 26 +2 -1.5 0 1 -1.5 1 26 +9 -6.75 0 1 -6.75 1 26 +4 -7 0 0 0 1 26 +8 -7 0 0 0 1 26 +2 -1.75 0 1 2 1 26 +2 -1 0 1 2 1 26 +10 -6.25 0 0 0 1 26 +6 -6.75 0 0 0 1 26 +9 -2.25 0 1 -2.25 1 26 +2 -0.75 0 1 2 1 26 +12 0 4 1 12 1 26 +6 -3 0 1 -3 1 26 +3 0 1 1 3 1 26 +2 -3 0 1 -3 1 26 +10 -13.75 0 1 -13.75 1 26 +6 -2.25 0 1 6 1 26 +5 -1.88 0 1 -1.88 1 26 +12 -13.5 0 0 0 1 26 +22 0 10 0 10 1 26 +9 -12.38 0 0 0 1 26 +26 0 10 0 10 1 26 +12 -10.5 0 0 0 1 26 +10 -2.5 0 1 -2.5 1 26 +25 0 10 0 10 1 26 +9 -15.75 0 0 0 1 26 +7 0 3 1 0 1 26 +10 -10 0 0 0 1 26 +12 -15 0 0 0 1 26 +12 0 6 0 6 1 26 +6 -4.5 0 1 -4.5 1 26 +8 -13 0 0 0 1 26 +10 -16.25 0 0 0 1 26 +5 -1.25 0 1 5 1 26 +4 -4 0 0 0 1 26 +5 -3.75 0 1 5 1 26 +6 -8.25 0 0 0 1 26 +8 -15 0 0 0 1 26 +8 -8 0 0 0 1 26 +2 -2.75 0 1 -2.75 1 26 +6 -12 0 0 0 1 26 +2 0 1 1 2 1 26 +2 -1.25 0 1 -1.25 1 26 +9 -18 0 0 0 1 26 +6 -9 0 0 0 1 26 +10 -8.75 0 1 -8.75 1 26 +4 -7.5 0 0 0 1 26 +13 0 6 1 0 1 26 +10 -11.25 0 0 0 1 26 +4 -3 0 0 0 1 26 +10 -5 0 1 10 1 26 +8 -2 0 1 -2 1 26 +4 -2.5 0 1 4 1 26 +2 -3.5 0 0 0 1 26 +2 -2.5 0 0 0 1 26 +6 -3.75 0 1 6 1 26 +8 -3 0 1 8 1 26 +2 -3.25 0 0 0 1 26 +8 -9 0 0 0 1 26 +6 -6 0 1 -6 1 26 +8 -11 0 0 0 1 26 +5 -8.75 0 0 0 1 26 +6 -9.75 0 0 0 1 26 +12 -24 0 0 0 1 26 +4 -6.5 0 0 0 1 26 +5 -10 0 0 0 1 26 +30 0 12 1 0 1 26 +12 -18 0 0 0 1 26 +9 -9 0 0 0 1 26 +5 -5 0 0 0 1 26 +5 -9.38 0 0 0 1 26 +10 -12.5 0 1 -12.5 1 26 +10 -18.75 0 0 0 1 26 +5 -2.5 0 1 -2.5 1 26 +9 -14.63 0 0 0 1 26 +28 0 13 1 0 1 26 +5 -6.88 0 0 0 1 26 +4 -3.5 0 0 0 1 26 +12 -16.5 0 1 12 1 26 +5 -8.13 0 0 0 1 26 +9 -16.88 0 0 0 1 26 +9 -3.38 0 1 -3.38 1 26 +12 0 5 1 0 1 26 +4 -8 0 0 0 1 26 +8 -12 0 0 0 1 26 +8 -4 0 1 -4 1 26 +2 -4 0 0 0 1 26 +12 -9 0 1 -9 1 26 +4 -1.5 0 1 4 1 26 +6 -10.5 0 0 0 1 26 +5 -3.13 0 1 5 1 26 +10 -15 0 0 0 1 26 +23 0 10 0 10 1 26 +12 -7.5 0 1 -7.5 1 26 +2 -0.5 0 1 -0.5 1 26 +4 0 2 1 4 1 26 +6 -1.5 0 1 -1.5 1 26 +4 -1 0 1 4 1 26 +10 -20 0 0 0 1 26 +12 -22.5 0 0 0 1 26 +25 0 9 1 0 1 26 +13 0 5 1 13 1 26 +6 -5.25 0 0 0 1 26 +9 -13.5 0 0 0 1 26 +5 0 2 1 0 1 26 +12 -6 0 1 -6 1 26 +5 -6.25 0 0 0 1 26 +10 -3.75 0 1 10 1 26 +9 -7.88 0 0 0 1 26 +8 -6 0 0 0 1 26 +4 -5.5 0 0 0 1 26 +19 0 8 1 19 1 26 +10 -7.5 0 0 0 1 26 +4 -6 0 0 0 1 26 +8 -14 0 0 0 1 26 +8 0 3 0 3 1 26 +12 -21 0 0 0 1 26 +4 -2 0 1 -2 1 26 +4 -5 0 0 0 1 26 +6 -7.5 0 0 0 1 26 +12 -12 0 0 0 1 26 +8 -5 0 1 -5 1 26 +26 0 12 0 12 1 26 +8 -10 0 0 0 1 26 +5 -5.63 0 0 0 1 26 +2 -1 0 1 2 1 27 +9 -6.75 0 1 -6.75 1 27 +2 -4 0 1 -4 1 27 +2 -3.25 0 1 2 1 27 +4 -6.5 0 1 -6.5 1 27 +5 -5.63 0 1 -5.63 1 27 +8 -8 0 1 -8 1 27 +12 -18 0 1 12 1 27 +2 -2.5 0 0 0 1 27 +3 0 1 1 3 1 27 +12 -16.5 0 0 0 1 27 +10 -12.5 0 1 -12.5 1 27 +5 -1.25 0 1 5 1 27 +19 0 8 1 19 1 27 +8 -9 0 1 -9 1 27 +5 -10 0 1 5 1 27 +25 0 10 0 10 1 27 +7 0 3 1 0 1 27 +6 -11.25 0 1 6 1 27 +6 -1.5 0 1 -1.5 1 27 +4 -1.5 0 1 4 1 27 +10 -5 0 1 10 1 27 +10 -3.75 0 1 10 1 27 +6 -4.5 0 1 -4.5 1 27 +12 -19.5 0 1 12 1 27 +5 -4.38 0 0 0 1 27 +8 -11 0 1 8 1 27 +2 -0.75 0 1 2 1 27 +2 -1.5 0 0 0 1 27 +6 -6.75 0 1 6 1 27 +4 -6 0 1 4 1 27 +10 -16.25 0 1 -16.25 1 27 +12 -15 0 1 -15 1 27 +6 -5.25 0 1 6 1 27 +12 -21 0 1 12 1 27 +4 -3 0 1 4 1 27 +12 -22.5 0 1 12 1 27 +2 -3.75 0 1 -3.75 1 27 +6 -12 0 1 -12 1 27 +5 -8.13 0 0 0 1 27 +10 -8.75 0 1 -8.75 1 27 +12 -6 0 1 -6 1 27 +5 -5 0 1 -5 1 27 +22 0 10 1 22 1 27 +12 -13.5 0 1 12 1 27 +8 -7 0 1 8 1 27 +4 -3.5 0 1 -3.5 1 27 +9 -12.38 0 1 9 1 27 +10 -7.5 0 1 -7.5 1 27 +26 0 10 1 26 1 27 +12 -4.5 0 1 -4.5 1 27 +8 -15 0 1 -15 1 27 +2 -1.75 0 1 2 1 27 +12 0 6 1 0 1 27 +9 -3.38 0 1 -3.38 1 27 +2 -3 0 1 -3 1 27 +9 -5.63 0 1 -5.63 1 27 +2 -3.5 0 0 0 1 27 +8 -12 0 1 8 1 27 +10 -18.75 0 0 0 1 27 +4 0 2 1 4 1 27 +2 -2.25 0 1 2 1 27 +9 -2.25 0 1 -2.25 1 27 +10 -13.75 0 1 -13.75 1 27 +28 0 13 1 0 1 27 +4 -2.5 0 1 4 1 27 +9 -15.75 0 1 9 1 27 +10 -15 0 1 10 1 27 +10 -10 0 1 10 1 27 +9 -18 0 0 0 1 27 +12 -24 0 0 0 1 27 +13 0 5 0 5 1 27 +5 -1.88 0 1 -1.88 1 27 +4 -4.5 0 1 4 1 27 +9 -7.88 0 1 9 1 27 +9 -9 0 1 9 1 27 +25 0 9 1 0 1 27 +12 -12 0 1 -12 1 27 +6 -2.25 0 1 6 1 27 +8 -5 0 1 -5 1 27 +4 -5.5 0 1 -5.5 1 27 +2 -1.25 0 1 -1.25 1 27 +9 -13.5 0 1 -13.5 1 27 +9 -4.5 0 1 9 1 27 +10 -11.25 0 1 10 1 27 +6 -3 0 1 -3 1 27 +10 -2.5 0 1 -2.5 1 27 +12 0 4 1 12 1 27 +10 -20 0 0 0 1 27 +5 -3.75 0 1 5 1 27 +9 -10.13 0 1 -10.13 1 27 +4 -7 0 1 -7 1 27 +12 -10.5 0 1 -10.5 1 27 +8 -16 0 0 0 1 27 +4 -7.5 0 1 4 1 27 +8 0 3 1 8 1 27 +6 -10.5 0 1 -10.5 1 27 +6 -9.75 0 1 -9.75 1 27 +5 -8.75 0 1 5 1 27 +5 -2.5 0 1 -2.5 1 27 +13 0 6 0 6 1 27 +23 0 10 1 0 1 27 +8 -4 0 1 -4 1 27 +9 -11.25 0 1 9 1 27 +5 -6.88 0 1 -6.88 1 27 +4 -4 0 1 4 1 27 +10 -17.5 0 1 10 1 27 +8 -13 0 0 0 1 27 +26 0 12 1 0 1 27 +6 -8.25 0 1 -8.25 1 27 +9 -14.63 0 1 9 1 27 +8 -2 0 1 -2 1 27 +10 -6.25 0 1 -6.25 1 27 +8 -14 0 0 0 1 27 +12 0 5 1 0 1 27 +8 -10 0 1 -10 1 27 +30 0 12 1 0 1 27 +5 -7.5 0 0 0 1 27 +5 0 2 1 0 1 27 +6 -3.75 0 1 6 1 27 +6 -6 0 1 -6 1 27 +4 -2 0 1 -2 1 27 +12 -7.5 0 1 -7.5 1 27 +5 -6.25 0 1 5 1 27 +4 -5 0 1 4 1 27 +2 -2.75 0 1 -2.75 1 27 +2 -2 0 1 2 1 27 +6 -9 0 1 -9 1 27 +5 -3.13 0 1 5 1 27 +12 -9 0 1 -9 1 27 +4 -8 0 1 -8 1 27 +4 -1 0 1 4 1 27 +2 0 1 0 1 1 27 +9 -16.88 0 0 0 1 27 +8 -6 0 1 -6 1 27 +2 -0.5 0 1 -0.5 1 27 +6 -7.5 0 1 -7.5 1 27 +8 -3 0 1 8 1 27 +12 -3 0 1 12 1 27 +5 -9.38 0 1 -9.38 1 27 +6 -9.75 0 0 0 1 28 +12 -13.5 0 1 12 1 28 +8 -7 0 1 8 1 28 +10 -7.5 0 1 -7.5 1 28 +2 -2.25 0 0 0 1 28 +6 -8.25 0 1 -8.25 1 28 +10 -16.25 0 0 0 1 28 +3 0 1 0 1 1 28 +4 -3 0 1 4 1 28 +8 -2 0 1 -2 1 28 +4 -2.5 0 1 4 1 28 +5 -5.63 0 0 0 1 28 +5 0 2 1 0 1 28 +30 0 12 1 0 1 28 +9 -4.5 0 0 0 1 28 +4 -7.5 0 0 0 1 28 +26 0 10 1 26 1 28 +10 -6.25 0 1 -6.25 1 28 +2 -4 0 0 0 1 28 +4 -5 0 0 0 1 28 +5 -1.88 0 1 -1.88 1 28 +23 0 10 0 10 1 28 +8 -3 0 1 8 1 28 +8 -12 0 0 0 1 28 +10 -2.5 0 0 0 1 28 +5 -8.13 0 0 0 1 28 +8 -9 0 0 0 1 28 +2 -3 0 0 0 1 28 +9 -11.25 0 0 0 1 28 +9 -12.38 0 0 0 1 28 +12 -15 0 0 0 1 28 +8 -10 0 0 0 1 28 +4 -1 0 1 4 1 28 +8 0 3 1 8 1 28 +4 -3.5 0 0 0 1 28 +8 -8 0 0 0 1 28 +10 -11.25 0 0 0 1 28 +10 -5 0 1 10 1 28 +9 -13.5 0 0 0 1 28 +2 -0.75 0 1 2 1 28 +5 -4.38 0 0 0 1 28 +2 -1.5 0 0 0 1 28 +2 -3.75 0 0 0 1 28 +5 -3.75 0 1 5 1 28 +9 -16.88 0 0 0 1 28 +9 -3.38 0 1 -3.38 1 28 +5 -10 0 0 0 1 28 +26 0 12 0 12 1 28 +5 -9.38 0 0 0 1 28 +6 -1.5 0 1 -1.5 1 28 +10 -10 0 0 0 1 28 +2 -1.25 0 0 0 1 28 +9 -14.63 0 0 0 1 28 +6 -4.5 0 0 0 1 28 +5 -5 0 0 0 1 28 +5 -7.5 0 0 0 1 28 +8 -13 0 0 0 1 28 +5 -3.13 0 0 0 1 28 +8 -5 0 1 -5 1 28 +8 -11 0 0 0 1 28 +6 -6.75 0 0 0 1 28 +5 -8.75 0 0 0 1 28 +2 0 1 1 2 1 28 +9 -5.63 0 0 0 1 28 +6 -6 0 0 0 1 28 +4 -5.5 0 0 0 1 28 +6 -3 0 1 -3 1 28 +12 -19.5 0 0 0 1 28 +10 -13.75 0 0 0 1 28 +10 -8.75 0 0 0 1 28 +5 -6.88 0 0 0 1 28 +6 -7.5 0 0 0 1 28 +10 -12.5 0 0 0 1 28 +9 -6.75 0 0 0 1 28 +4 -6 0 0 0 1 28 +8 -4 0 0 0 1 28 +2 -1 0 1 2 1 28 +12 -24 0 0 0 1 28 +12 -6 0 0 0 1 28 +2 -2 0 0 0 1 28 +4 -7 0 0 0 1 28 +12 -9 0 0 0 1 28 +6 -11.25 0 0 0 1 28 +25 0 10 1 25 1 28 +28 0 13 1 0 1 28 +2 -2.75 0 0 0 1 28 +12 -10.5 0 0 0 1 28 +8 -14 0 0 0 1 28 +4 -6.5 0 0 0 1 28 +4 0 2 1 4 1 28 +10 -15 0 0 0 1 28 +12 0 5 1 0 1 28 +10 -18.75 0 0 0 1 28 +12 -3 0 1 12 1 28 +4 -4 0 0 0 1 28 +9 -7.88 0 0 0 1 28 +9 -2.25 0 1 -2.25 1 28 +2 -1.75 0 0 0 1 28 +12 0 6 1 0 1 28 +5 -2.5 0 0 0 1 28 +4 -4.5 0 0 0 1 28 +8 -6 0 0 0 1 28 +12 -18 0 0 0 1 28 +12 -16.5 0 0 0 1 28 +22 0 10 1 22 1 28 +12 -21 0 0 0 1 28 +12 -4.5 0 0 0 1 28 +12 -12 0 0 0 1 28 +19 0 8 1 19 1 28 +2 -2.5 0 0 0 1 28 +12 0 4 1 12 1 28 +4 -2 0 0 0 1 28 +9 -9 0 0 0 1 28 +9 -10.13 0 0 0 1 28 +6 -2.25 0 1 6 1 28 +2 -0.5 0 1 -0.5 1 28 +10 -3.75 0 0 0 1 28 +13 0 5 1 13 1 28 +4 -1.5 0 1 4 1 28 +5 -1.25 0 1 5 1 28 +6 -9 0 0 0 1 28 +10 -17.5 0 0 0 1 28 +6 -12 0 0 0 1 28 +6 -5.25 0 0 0 1 28 +12 -22.5 0 0 0 1 28 +8 -16 0 0 0 1 28 +9 -15.75 0 0 0 1 28 +10 -20 0 0 0 1 28 +13 0 6 0 6 1 28 +4 -8 0 0 0 1 28 +12 -7.5 0 0 0 1 28 +9 -18 0 0 0 1 28 +2 -3.25 0 0 0 1 28 +7 0 3 1 0 1 28 +6 -3.75 0 0 0 1 28 +5 -6.25 0 0 0 1 28 +8 -15 0 0 0 1 28 +25 0 9 1 0 1 28 +2 -3.5 0 0 0 1 28 +6 -10.5 0 0 0 1 28 +9 -10.13 0 0 0 1 29 +12 -10.5 0 1 -10.5 1 29 +25 0 10 1 25 1 29 +4 -7 0 0 0 1 29 +9 -7.88 0 0 0 1 29 +5 -3.13 0 1 5 1 29 +5 -8.13 0 0 0 1 29 +8 -7 0 1 8 1 29 +12 -6 0 1 -6 1 29 +12 -24 0 0 0 1 29 +12 -21 0 0 0 1 29 +4 -2.5 0 1 4 1 29 +6 -9 0 0 0 1 29 +10 -15 0 0 0 1 29 +8 -6 0 1 -6 1 29 +13 0 6 1 0 1 29 +6 -12 0 0 0 1 29 +6 -4.5 0 1 -4.5 1 29 +9 -16.88 0 0 0 1 29 +10 -18.75 0 0 0 1 29 +9 -3.38 0 1 -3.38 1 29 +6 -9.75 0 1 -9.75 1 29 +2 -1.75 0 1 2 1 29 +5 0 2 1 0 1 29 +8 -5 0 0 0 1 29 +8 -9 0 1 -9 1 29 +12 0 6 1 0 1 29 +12 0 4 1 12 1 29 +2 -2.5 0 0 0 1 29 +6 -3 0 1 -3 1 29 +10 -7.5 0 1 -7.5 1 29 +5 -2.5 0 1 -2.5 1 29 +5 -3.75 0 1 5 1 29 +10 -3.75 0 1 10 1 29 +2 -3 0 0 0 1 29 +10 -6.25 0 0 0 1 29 +4 -7.5 0 0 0 1 29 +8 -16 0 0 0 1 29 +5 -6.25 0 0 0 1 29 +4 0 2 1 4 1 29 +10 -11.25 0 0 0 1 29 +5 -6.88 0 0 0 1 29 +5 -7.5 0 1 5 1 29 +26 0 12 0 12 1 29 +8 -13 0 0 0 1 29 +4 -4.5 0 1 4 1 29 +8 -10 0 0 0 1 29 +6 -3.75 0 1 6 1 29 +5 -5.63 0 0 0 1 29 +9 -18 0 0 0 1 29 +12 -13.5 0 1 12 1 29 +7 0 3 0 3 1 29 +8 -14 0 0 0 1 29 +2 -1.5 0 1 -1.5 1 29 +10 -2.5 0 1 -2.5 1 29 +13 0 5 0 5 1 29 +9 -15.75 0 0 0 1 29 +8 -12 0 0 0 1 29 +28 0 13 1 0 1 29 +6 -7.5 0 0 0 1 29 +10 -16.25 0 0 0 1 29 +12 -7.5 0 0 0 1 29 +5 -5 0 1 -5 1 29 +2 -2 0 1 2 1 29 +22 0 10 0 10 1 29 +2 -1 0 1 2 1 29 +3 0 1 0 1 1 29 +4 -5.5 0 0 0 1 29 +2 -2.25 0 0 0 1 29 +6 -2.25 0 1 6 1 29 +4 -6.5 0 0 0 1 29 +9 -12.38 0 0 0 1 29 +10 -13.75 0 0 0 1 29 +10 -17.5 0 0 0 1 29 +4 -5 0 1 4 1 29 +9 -11.25 0 0 0 1 29 +10 -10 0 0 0 1 29 +2 -3.25 0 1 2 1 29 +5 -8.75 0 0 0 1 29 +5 -10 0 0 0 1 29 +9 -2.25 0 1 -2.25 1 29 +6 -6.75 0 0 0 1 29 +12 -16.5 0 0 0 1 29 +9 -14.63 0 0 0 1 29 +4 -8 0 0 0 1 29 +6 -5.25 0 0 0 1 29 +9 -6.75 0 1 -6.75 1 29 +12 -12 0 1 -12 1 29 +4 -1 0 1 4 1 29 +12 -15 0 0 0 1 29 +4 -3.5 0 0 0 1 29 +2 -1.25 0 0 0 1 29 +30 0 12 0 12 1 29 +12 -19.5 0 0 0 1 29 +12 -3 0 0 0 1 29 +5 -1.25 0 0 0 1 29 +5 -1.88 0 1 -1.88 1 29 +2 -3.5 0 1 -3.5 1 29 +12 -9 0 0 0 1 29 +10 -20 0 0 0 1 29 +8 -4 0 1 -4 1 29 +12 0 5 0 5 1 29 +2 0 1 0 1 1 29 +4 -1.5 0 1 4 1 29 +2 -3.75 0 0 0 1 29 +6 -10.5 0 0 0 1 29 +4 -2 0 0 0 1 29 +23 0 10 0 10 1 29 +12 -18 0 0 0 1 29 +6 -8.25 0 0 0 1 29 +26 0 10 0 10 1 29 +10 -8.75 0 0 0 1 29 +2 -0.75 0 1 2 1 29 +5 -9.38 0 0 0 1 29 +25 0 9 0 9 1 29 +9 -4.5 0 0 0 1 29 +10 -5 0 1 10 1 29 +2 -4 0 0 0 1 29 +2 -2.75 0 1 -2.75 1 29 +4 -6 0 0 0 1 29 +10 -12.5 0 0 0 1 29 +12 -22.5 0 0 0 1 29 +4 -4 0 1 4 1 29 +2 -0.5 0 1 -0.5 1 29 +8 -2 0 1 -2 1 29 +4 -3 0 1 4 1 29 +6 -11.25 0 0 0 1 29 +8 -15 0 0 0 1 29 +8 -11 0 0 0 1 29 +12 -4.5 0 1 -4.5 1 29 +19 0 8 0 8 1 29 +6 -6 0 0 0 1 29 +5 -4.38 0 0 0 1 29 +9 -9 0 0 0 1 29 +6 -1.5 0 1 -1.5 1 29 +9 -13.5 0 1 -13.5 1 29 +9 -5.63 0 0 0 1 29 +8 -8 0 0 0 1 29 +8 0 3 0 3 1 29 +8 -3 0 1 8 1 29 +9 -11.25 0 0 0 1 30 +8 -16 0 0 0 1 30 +9 -5.63 0 1 -5.63 1 30 +9 -4.5 0 1 9 1 30 +2 -2 0 0 0 1 30 +12 -19.5 0 0 0 1 30 +4 -4.5 0 0 0 1 30 +2 -3.75 0 1 -3.75 1 30 +2 -2.25 0 0 0 1 30 +12 -4.5 0 1 -4.5 1 30 +9 -10.13 0 0 0 1 30 +12 -3 0 1 12 1 30 +10 -17.5 0 0 0 1 30 +5 -4.38 0 0 0 1 30 +5 -7.5 0 0 0 1 30 +6 -11.25 0 0 0 1 30 +2 -1.5 0 0 0 1 30 +9 -6.75 0 1 -6.75 1 30 +4 -7 0 0 0 1 30 +8 -7 0 0 0 1 30 +2 -1.75 0 0 0 1 30 +2 -1 0 0 0 1 30 +10 -6.25 0 1 -6.25 1 30 +6 -6.75 0 0 0 1 30 +9 -2.25 0 1 -2.25 1 30 +2 -0.75 0 0 0 1 30 +12 0 4 0 4 1 30 +6 -3 0 1 -3 1 30 +3 0 1 0 1 1 30 +2 -3 0 0 0 1 30 +10 -13.75 0 0 0 1 30 +6 -2.25 0 1 6 1 30 +5 -1.88 0 1 -1.88 1 30 +12 -13.5 0 0 0 1 30 +22 0 10 0 10 1 30 +9 -12.38 0 0 0 1 30 +26 0 10 0 10 1 30 +12 -10.5 0 0 0 1 30 +10 -2.5 0 1 -2.5 1 30 +25 0 10 0 10 1 30 +9 -15.75 0 0 0 1 30 +7 0 3 0 3 1 30 +10 -10 0 0 0 1 30 +12 -15 0 0 0 1 30 +12 0 6 0 6 1 30 +6 -4.5 0 0 0 1 30 +8 -13 0 0 0 1 30 +10 -16.25 0 0 0 1 30 +5 -1.25 0 1 5 1 30 +4 -4 0 0 0 1 30 +5 -3.75 0 0 0 1 30 +6 -8.25 0 0 0 1 30 +8 -15 0 0 0 1 30 +8 -8 0 0 0 1 30 +2 -2.75 0 0 0 1 30 +6 -12 0 0 0 1 30 +2 0 1 0 1 1 30 +2 -1.25 0 0 0 1 30 +9 -18 0 0 0 1 30 +6 -9 0 0 0 1 30 +10 -8.75 0 0 0 1 30 +4 -7.5 0 0 0 1 30 +13 0 6 0 6 1 30 +10 -11.25 0 0 0 1 30 +4 -3 0 0 0 1 30 +10 -5 0 0 0 1 30 +8 -2 0 0 0 1 30 +4 -2.5 0 0 0 1 30 +2 -3.5 0 0 0 1 30 +2 -2.5 0 0 0 1 30 +6 -3.75 0 0 0 1 30 +8 -3 0 0 0 1 30 +2 -3.25 0 0 0 1 30 +8 -9 0 0 0 1 30 +6 -6 0 0 0 1 30 +8 -11 0 0 0 1 30 +5 -8.75 0 0 0 1 30 +6 -9.75 0 0 0 1 30 +12 -24 0 0 0 1 30 +4 -6.5 0 0 0 1 30 +5 -10 0 0 0 1 30 +30 0 12 1 0 1 30 +12 -18 0 0 0 1 30 +9 -9 0 0 0 1 30 +5 -5 0 0 0 1 30 +5 -9.38 0 0 0 1 30 +10 -12.5 0 0 0 1 30 +10 -18.75 0 0 0 1 30 +5 -2.5 0 1 -2.5 1 30 +9 -14.63 0 0 0 1 30 +28 0 13 1 0 1 30 +5 -6.88 0 1 -6.88 1 30 +4 -3.5 0 0 0 1 30 +12 -16.5 0 0 0 1 30 +5 -8.13 0 0 0 1 30 +9 -16.88 0 0 0 1 30 +9 -3.38 0 0 0 1 30 +12 0 5 1 0 1 30 +4 -8 0 0 0 1 30 +8 -12 0 0 0 1 30 +8 -4 0 0 0 1 30 +2 -4 0 0 0 1 30 +12 -9 0 1 -9 1 30 +4 -1.5 0 0 0 1 30 +6 -10.5 0 0 0 1 30 +5 -3.13 0 0 0 1 30 +10 -15 0 0 0 1 30 +23 0 10 0 10 1 30 +12 -7.5 0 0 0 1 30 +2 -0.5 0 0 0 1 30 +4 0 2 0 2 1 30 +6 -1.5 0 0 0 1 30 +4 -1 0 0 0 1 30 +10 -20 0 0 0 1 30 +12 -22.5 0 0 0 1 30 +25 0 9 0 9 1 30 +13 0 5 0 5 1 30 +6 -5.25 0 0 0 1 30 +9 -13.5 0 0 0 1 30 +5 0 2 0 2 1 30 +12 -6 0 0 0 1 30 +5 -6.25 0 0 0 1 30 +10 -3.75 0 0 0 1 30 +9 -7.88 0 0 0 1 30 +8 -6 0 0 0 1 30 +4 -5.5 0 0 0 1 30 +19 0 8 0 8 1 30 +10 -7.5 0 0 0 1 30 +4 -6 0 0 0 1 30 +8 -14 0 0 0 1 30 +8 0 3 0 3 1 30 +12 -21 0 0 0 1 30 +4 -2 0 0 0 1 30 +4 -5 0 0 0 1 30 +6 -7.5 0 0 0 1 30 +12 -12 0 0 0 1 30 +8 -5 0 0 0 1 30 +26 0 12 1 0 1 30 +8 -10 0 0 0 1 30 +5 -5.63 0 0 0 1 30 \ No newline at end of file diff --git a/R/inst/extdata/ra_exampleData.txt b/R/inst/extdata/ra_exampleData.txt new file mode 100644 index 00000000..dd6e3536 --- /dev/null +++ b/R/inst/extdata/ra_exampleData.txt @@ -0,0 +1,701 @@ +gain loss cert gamble outcome cond subjID +9 -6.75 0 1 -6.75 0 2 +6 -6.75 0 0 0 0 2 +6 -3 0 1 6 0 2 +2 -1.5 0 0 0 0 2 +4 -3 0 0 0 0 2 +5 -6.88 0 0 0 0 2 +12 -9 0 1 12 0 2 +4 -5 0 0 0 0 2 +5 -7.5 0 0 0 0 2 +4 -4 0 1 -4 0 2 +9 -5.63 0 1 -5.63 0 2 +9 -14.63 0 0 0 0 2 +5 -9.38 0 0 0 0 2 +6 -4.5 0 1 6 0 2 +8 -7 0 0 0 0 2 +10 -16.25 0 0 0 0 2 +10 -17.5 0 0 0 0 2 +9 -16.88 0 0 0 0 2 +8 -5 0 1 8 0 2 +6 -1.5 0 1 6 0 2 +12 -18 0 0 0 0 2 +5 -6.25 0 0 0 0 2 +8 -4 0 1 8 0 2 +9 -15.75 0 0 0 0 2 +9 -13.5 0 0 0 0 2 +5 -8.13 0 0 0 0 2 +2 0 1 1 0 0 2 +2 -3.75 0 0 0 0 2 +4 -6.5 0 0 0 0 2 +10 -5 0 1 -5 0 2 +12 -22.5 0 0 0 0 2 +2 -1 0 1 2 0 2 +13 0 6 1 13 0 2 +5 -2.5 0 0 0 0 2 +2 -0.5 0 1 2 0 2 +2 -3.25 0 1 -3.25 0 2 +30 0 12 1 0 0 2 +8 -8 0 1 8 0 2 +4 -5.5 0 0 0 0 2 +23 0 10 1 0 0 2 +4 -3.5 0 0 0 0 2 +5 0 2 1 5 0 2 +8 0 3 1 0 0 2 +9 -10.13 0 0 0 0 2 +8 -16 0 0 0 0 2 +12 -24 0 0 0 0 2 +9 -3.38 0 1 -3.38 0 2 +6 -5.25 0 1 6 0 2 +2 -4 0 0 0 0 2 +4 -1 0 1 -1 0 2 +6 -11.25 0 0 0 0 2 +5 -4.38 0 1 -4.38 0 2 +6 -2.25 0 1 6 0 2 +12 -10.5 0 1 12 0 2 +9 -18 0 0 0 0 2 +10 -20 0 0 0 0 2 +4 -4.5 0 0 0 0 2 +9 -2.25 0 1 -2.25 0 2 +4 -6 0 0 0 0 2 +8 -10 0 1 -10 0 2 +5 -5 0 1 -5 0 2 +5 -8.75 0 0 0 0 2 +8 -6 0 1 -6 0 2 +10 -13.75 0 0 0 0 2 +2 -2.5 0 0 0 0 2 +8 -11 0 1 -11 0 2 +4 -2 0 1 4 0 2 +10 -7.5 0 1 -7.5 0 2 +22 0 10 1 22 0 2 +25 0 10 1 0 0 2 +6 -9.75 0 0 0 0 2 +12 0 5 1 12 0 2 +4 -2.5 0 1 -2.5 0 2 +8 -3 0 1 8 0 2 +10 -11.25 0 1 -11.25 0 2 +5 -10 0 1 5 0 2 +10 -15 0 0 0 0 2 +2 -3.5 0 0 0 0 2 +12 0 4 1 12 0 2 +13 0 5 0 5 0 2 +5 -3.75 0 1 5 0 2 +26 0 12 0 12 0 2 +5 -5.63 0 0 0 0 2 +8 -2 0 1 -2 0 2 +2 -3 0 0 0 0 2 +6 -9 0 0 0 0 2 +9 -7.88 0 0 0 0 2 +8 -14 0 0 0 0 2 +28 0 13 1 28 0 2 +9 -12.38 0 0 0 0 2 +8 -15 0 1 -15 0 2 +10 -2.5 0 1 -2.5 0 2 +4 0 2 1 4 0 2 +12 -6 0 1 -6 0 2 +12 -16.5 0 1 -16.5 0 2 +4 -7.5 0 0 0 0 2 +10 -8.75 0 1 -8.75 0 2 +10 -18.75 0 1 10 0 2 +26 0 10 1 0 0 2 +12 -21 0 1 12 0 2 +2 -0.75 0 1 -0.75 0 2 +9 -9 0 1 -9 0 2 +10 -6.25 0 1 10 0 2 +8 -12 0 1 -12 0 2 +3 0 1 1 0 0 2 +5 -1.88 0 1 5 0 2 +6 -7.5 0 1 -7.5 0 2 +12 -13.5 0 1 12 0 2 +4 -7 0 0 0 0 2 +6 -8.25 0 1 -8.25 0 2 +6 -12 0 0 0 0 2 +6 -10.5 0 0 0 0 2 +4 -8 0 0 0 0 2 +6 -6 0 1 -6 0 2 +12 0 6 1 12 0 2 +12 -19.5 0 1 12 0 2 +19 0 8 1 19 0 2 +12 -15 0 0 0 0 2 +2 -1.75 0 0 0 0 2 +6 -3.75 0 0 0 0 2 +2 -1.25 0 0 0 0 2 +5 -1.25 0 1 -1.25 0 2 +4 -1.5 0 1 4 0 2 +8 -13 0 0 0 0 2 +12 -7.5 0 1 -7.5 0 2 +12 -3 0 1 -3 0 2 +2 -2.75 0 0 0 0 2 +7 0 3 1 7 0 2 +25 0 9 1 25 0 2 +2 -2 0 0 0 0 2 +12 -4.5 0 1 -4.5 0 2 +12 -12 0 1 12 0 2 +5 -3.13 0 1 5 0 2 +9 -11.25 0 0 0 0 2 +8 -9 0 1 -9 0 2 +2 -2.25 0 0 0 0 2 +9 -4.5 0 1 -4.5 0 2 +10 -3.75 0 1 10 0 2 +10 -10 0 1 10 0 2 +10 -12.5 0 0 0 0 2 +2 -2.5 0 0 0 0 3 +5 -5.63 0 0 0 0 3 +6 -7.5 0 0 0 0 3 +26 0 10 1 0 0 3 +9 -4.5 0 0 0 0 3 +2 -1.25 0 0 0 0 3 +8 -3 0 0 0 0 3 +25 0 9 0 9 0 3 +4 -4.5 0 0 0 0 3 +5 -10 0 0 0 0 3 +6 -9 0 0 0 0 3 +10 -6.25 0 0 0 0 3 +4 -4 0 0 0 0 3 +12 -3 0 0 0 0 3 +5 -5 0 0 0 0 3 +12 0 5 0 5 0 3 +6 -9.75 0 0 0 0 3 +19 0 8 0 8 0 3 +4 -7.5 0 0 0 0 3 +12 -9 0 0 0 0 3 +4 -6.5 0 0 0 0 3 +9 -5.63 0 0 0 0 3 +9 -18 0 0 0 0 3 +10 -11.25 0 0 0 0 3 +10 -13.75 0 0 0 0 3 +6 -12 0 0 0 0 3 +10 -12.5 0 0 0 0 3 +4 -7 0 0 0 0 3 +10 -7.5 0 0 0 0 3 +4 -8 0 0 0 0 3 +8 -11 0 0 0 0 3 +12 0 4 1 12 0 3 +9 -3.38 0 0 0 0 3 +10 -18.75 0 0 0 0 3 +2 -3.5 0 0 0 0 3 +2 -1 0 0 0 0 3 +2 -3.25 0 0 0 0 3 +2 0 1 0 1 0 3 +7 0 3 0 3 0 3 +8 0 3 0 3 0 3 +12 -6 0 0 0 0 3 +2 -0.5 0 1 2 0 3 +9 -7.88 0 0 0 0 3 +8 -15 0 0 0 0 3 +2 -1.5 0 0 0 0 3 +12 -22.5 0 0 0 0 3 +8 -7 0 0 0 0 3 +4 -5.5 0 0 0 0 3 +10 -8.75 0 0 0 0 3 +8 -9 0 0 0 0 3 +2 -4 0 0 0 0 3 +4 0 2 1 4 0 3 +8 -8 0 0 0 0 3 +9 -13.5 0 0 0 0 3 +9 -9 0 0 0 0 3 +6 -3.75 0 0 0 0 3 +13 0 6 0 6 0 3 +5 -1.88 0 1 5 0 3 +6 -6 0 0 0 0 3 +5 -6.88 0 0 0 0 3 +8 -16 0 0 0 0 3 +12 -7.5 0 0 0 0 3 +5 -1.25 0 1 -1.25 0 3 +9 -14.63 0 0 0 0 3 +8 -4 0 0 0 0 3 +10 -17.5 0 0 0 0 3 +5 -3.75 0 0 0 0 3 +6 -10.5 0 0 0 0 3 +13 0 5 1 13 0 3 +10 -16.25 0 0 0 0 3 +5 -7.5 0 0 0 0 3 +2 -1.75 0 0 0 0 3 +5 -9.38 0 0 0 0 3 +2 -2.75 0 0 0 0 3 +2 -0.75 0 1 -0.75 0 3 +5 -8.13 0 0 0 0 3 +9 -11.25 0 0 0 0 3 +8 -13 0 0 0 0 3 +9 -16.88 0 0 0 0 3 +2 -2 0 0 0 0 3 +12 -18 0 0 0 0 3 +8 -2 0 1 -2 0 3 +2 -3 0 0 0 0 3 +6 -4.5 0 0 0 0 3 +5 0 2 1 5 0 3 +12 -19.5 0 0 0 0 3 +9 -15.75 0 0 0 0 3 +8 -6 0 0 0 0 3 +10 -2.5 0 1 -2.5 0 3 +9 -6.75 0 0 0 0 3 +6 -6.75 0 0 0 0 3 +2 -3.75 0 0 0 0 3 +10 -5 0 0 0 0 3 +2 -2.25 0 0 0 0 3 +26 0 12 0 12 0 3 +12 -13.5 0 0 0 0 3 +8 -5 0 0 0 0 3 +6 -3 0 0 0 0 3 +10 -3.75 0 0 0 0 3 +12 -10.5 0 0 0 0 3 +4 -5 0 0 0 0 3 +9 -2.25 0 0 0 0 3 +4 -3 0 0 0 0 3 +9 -10.13 0 0 0 0 3 +28 0 13 0 13 0 3 +22 0 10 0 10 0 3 +10 -10 0 0 0 0 3 +4 -1 0 0 0 0 3 +4 -2.5 0 0 0 0 3 +12 -24 0 0 0 0 3 +8 -12 0 0 0 0 3 +3 0 1 1 0 0 3 +9 -12.38 0 0 0 0 3 +23 0 10 0 10 0 3 +4 -3.5 0 0 0 0 3 +4 -1.5 0 0 0 0 3 +8 -10 0 0 0 0 3 +8 -14 0 0 0 0 3 +4 -6 0 0 0 0 3 +25 0 10 0 10 0 3 +12 -16.5 0 0 0 0 3 +12 -12 0 0 0 0 3 +5 -2.5 0 0 0 0 3 +5 -8.75 0 0 0 0 3 +12 -4.5 0 0 0 0 3 +12 -15 0 0 0 0 3 +5 -3.13 0 0 0 0 3 +12 -21 0 0 0 0 3 +5 -4.38 0 0 0 0 3 +6 -11.25 0 0 0 0 3 +30 0 12 0 12 0 3 +6 -1.5 0 1 6 0 3 +12 0 6 0 6 0 3 +4 -2 0 0 0 0 3 +10 -15 0 0 0 0 3 +6 -2.25 0 0 0 0 3 +10 -20 0 0 0 0 3 +6 -5.25 0 0 0 0 3 +5 -6.25 0 0 0 0 3 +6 -8.25 0 0 0 0 3 +4 -4.5 0 1 -4.5 0 4 +10 -12.5 0 0 0 0 4 +26 0 12 1 26 0 4 +6 -7.5 0 1 -7.5 0 4 +4 -6.5 0 0 0 0 4 +12 -4.5 0 1 -4.5 0 4 +5 -2.5 0 1 5 0 4 +6 -12 0 0 0 0 4 +9 -14.63 0 1 9 0 4 +6 -6 0 0 0 0 4 +22 0 10 1 22 0 4 +2 -1 0 1 2 0 4 +8 -3 0 1 8 0 4 +12 -9 0 0 0 0 4 +5 -3.75 0 1 5 0 4 +6 -3 0 1 6 0 4 +4 0 2 0 2 0 4 +28 0 13 1 28 0 4 +12 -15 0 0 0 0 4 +9 -11.25 0 0 0 0 4 +12 -10.5 0 1 12 0 4 +5 -1.88 0 1 5 0 4 +2 -2.75 0 0 0 0 4 +4 -7 0 0 0 0 4 +8 -4 0 1 8 0 4 +2 0 1 1 0 0 4 +2 -3.5 0 0 0 0 4 +2 -1.75 0 1 2 0 4 +5 -5 0 0 0 0 4 +12 -12 0 1 12 0 4 +12 0 6 1 12 0 4 +6 -4.5 0 0 0 0 4 +30 0 12 0 12 0 4 +12 -16.5 0 0 0 0 4 +6 -9.75 0 1 6 0 4 +12 -22.5 0 0 0 0 4 +6 -9 0 1 -9 0 4 +5 -3.13 0 0 0 0 4 +5 -9.38 0 0 0 0 4 +12 -7.5 0 1 -7.5 0 4 +5 0 2 1 5 0 4 +10 -15 0 0 0 0 4 +12 -3 0 1 -3 0 4 +13 0 6 0 6 0 4 +9 -16.88 0 0 0 0 4 +6 -11.25 0 0 0 0 4 +8 -5 0 1 8 0 4 +8 -14 0 0 0 0 4 +12 -24 0 1 -24 0 4 +12 0 5 1 12 0 4 +9 -13.5 0 0 0 0 4 +6 -1.5 0 1 6 0 4 +2 -3 0 0 0 0 4 +10 -2.5 0 1 -2.5 0 4 +2 -0.75 0 0 0 0 4 +6 -10.5 0 0 0 0 4 +2 -0.5 0 1 2 0 4 +10 -10 0 0 0 0 4 +8 -10 0 1 -10 0 4 +9 -12.38 0 0 0 0 4 +4 -6 0 0 0 0 4 +6 -2.25 0 1 6 0 4 +9 -15.75 0 0 0 0 4 +12 -13.5 0 0 0 0 4 +8 -6 0 0 0 0 4 +10 -18.75 0 0 0 0 4 +4 -2 0 0 0 0 4 +5 -1.25 0 1 -1.25 0 4 +6 -5.25 0 0 0 0 4 +4 -8 0 1 4 0 4 +25 0 9 1 25 0 4 +2 -3.25 0 0 0 0 4 +10 -11.25 0 1 -11.25 0 4 +4 -7.5 0 0 0 0 4 +9 -5.63 0 1 -5.63 0 4 +6 -6.75 0 0 0 0 4 +8 -2 0 1 -2 0 4 +5 -6.25 0 0 0 0 4 +23 0 10 0 10 0 4 +8 -13 0 0 0 0 4 +10 -13.75 0 0 0 0 4 +5 -10 0 1 5 0 4 +12 0 4 1 12 0 4 +2 -2.5 0 0 0 0 4 +19 0 8 1 19 0 4 +4 -4 0 0 0 0 4 +4 -1 0 1 -1 0 4 +4 -2.5 0 1 -2.5 0 4 +5 -8.13 0 0 0 0 4 +10 -3.75 0 1 10 0 4 +5 -8.75 0 0 0 0 4 +10 -7.5 0 1 -7.5 0 4 +10 -5 0 1 -5 0 4 +10 -20 0 0 0 0 4 +13 0 5 0 5 0 4 +8 -9 0 0 0 0 4 +8 -12 0 0 0 0 4 +10 -16.25 0 0 0 0 4 +5 -6.88 0 1 5 0 4 +4 -5.5 0 0 0 0 4 +5 -7.5 0 0 0 0 4 +9 -10.13 0 0 0 0 4 +6 -8.25 0 0 0 0 4 +26 0 10 0 10 0 4 +4 -5 0 0 0 0 4 +2 -2.25 0 1 2 0 4 +6 -3.75 0 1 -3.75 0 4 +8 -8 0 1 8 0 4 +9 -6.75 0 0 0 0 4 +8 -15 0 1 -15 0 4 +12 -6 0 1 -6 0 4 +25 0 10 0 10 0 4 +12 -19.5 0 0 0 0 4 +9 -7.88 0 0 0 0 4 +4 -1.5 0 1 4 0 4 +8 -7 0 0 0 0 4 +12 -18 0 1 -18 0 4 +2 -2 0 1 2 0 4 +9 -18 0 0 0 0 4 +2 -1.25 0 0 0 0 4 +8 -16 0 0 0 0 4 +5 -4.38 0 0 0 0 4 +2 -4 0 0 0 0 4 +5 -5.63 0 0 0 0 4 +8 0 3 1 0 0 4 +10 -17.5 0 0 0 0 4 +8 -11 0 0 0 0 4 +2 -1.5 0 1 2 0 4 +4 -3.5 0 0 0 0 4 +2 -3.75 0 0 0 0 4 +3 0 1 1 0 0 4 +12 -21 0 0 0 0 4 +10 -8.75 0 0 0 0 4 +9 -9 0 1 -9 0 4 +4 -3 0 0 0 0 4 +7 0 3 1 7 0 4 +9 -3.38 0 1 -3.38 0 4 +9 -2.25 0 1 -2.25 0 4 +10 -6.25 0 0 0 0 4 +9 -4.5 0 1 -4.5 0 4 +9 -6.75 0 1 -6.75 0 6 +6 -6.75 0 1 -6.75 0 6 +6 -3 0 1 6 0 6 +2 -1.5 0 1 2 0 6 +4 -3 0 1 4 0 6 +5 -6.88 0 0 0 0 6 +12 -9 0 0 0 0 6 +4 -5 0 0 0 0 6 +5 -7.5 0 0 0 0 6 +4 -4 0 1 -4 0 6 +9 -5.63 0 1 -5.63 0 6 +9 -14.63 0 0 0 0 6 +5 -9.38 0 0 0 0 6 +6 -4.5 0 1 6 0 6 +8 -7 0 1 -7 0 6 +10 -16.25 0 0 0 0 6 +10 -17.5 0 0 0 0 6 +9 -16.88 0 0 0 0 6 +8 -5 0 1 8 0 6 +6 -1.5 0 1 6 0 6 +12 -18 0 0 0 0 6 +5 -6.25 0 0 0 0 6 +8 -4 0 1 8 0 6 +9 -15.75 0 0 0 0 6 +9 -13.5 0 0 0 0 6 +5 -8.13 0 0 0 0 6 +2 0 1 1 0 0 6 +2 -3.75 0 1 -3.75 0 6 +4 -6.5 0 0 0 0 6 +10 -5 0 1 -5 0 6 +12 -22.5 0 0 0 0 6 +2 -1 0 1 2 0 6 +13 0 6 0 6 0 6 +5 -2.5 0 1 5 0 6 +2 -0.5 0 1 2 0 6 +2 -3.25 0 0 0 0 6 +30 0 12 1 0 0 6 +8 -8 0 0 0 0 6 +4 -5.5 0 0 0 0 6 +23 0 10 1 0 0 6 +4 -3.5 0 1 4 0 6 +5 0 2 1 5 0 6 +8 0 3 1 0 0 6 +9 -10.13 0 0 0 0 6 +8 -16 0 0 0 0 6 +12 -24 0 0 0 0 6 +9 -3.38 0 1 -3.38 0 6 +6 -5.25 0 0 0 0 6 +2 -4 0 0 0 0 6 +4 -1 0 1 -1 0 6 +6 -11.25 0 0 0 0 6 +5 -4.38 0 1 -4.38 0 6 +6 -2.25 0 1 6 0 6 +12 -10.5 0 0 0 0 6 +9 -18 0 0 0 0 6 +10 -20 0 0 0 0 6 +4 -4.5 0 1 -4.5 0 6 +9 -2.25 0 1 -2.25 0 6 +4 -6 0 0 0 0 6 +8 -10 0 0 0 0 6 +5 -5 0 1 -5 0 6 +5 -8.75 0 0 0 0 6 +8 -6 0 1 -6 0 6 +10 -13.75 0 0 0 0 6 +2 -2.5 0 1 2 0 6 +8 -11 0 0 0 0 6 +4 -2 0 1 4 0 6 +10 -7.5 0 1 -7.5 0 6 +22 0 10 0 10 0 6 +25 0 10 1 0 0 6 +6 -9.75 0 0 0 0 6 +12 0 5 0 5 0 6 +4 -2.5 0 1 -2.5 0 6 +8 -3 0 1 8 0 6 +10 -11.25 0 0 0 0 6 +5 -10 0 0 0 0 6 +10 -15 0 0 0 0 6 +2 -3.5 0 1 -3.5 0 6 +12 0 4 1 12 0 6 +13 0 5 0 5 0 6 +5 -3.75 0 1 5 0 6 +26 0 12 1 26 0 6 +5 -5.63 0 1 -5.63 0 6 +8 -2 0 1 -2 0 6 +2 -3 0 1 -3 0 6 +6 -9 0 0 0 0 6 +9 -7.88 0 1 -7.88 0 6 +8 -14 0 0 0 0 6 +28 0 13 0 13 0 6 +9 -12.38 0 0 0 0 6 +8 -15 0 0 0 0 6 +10 -2.5 0 1 -2.5 0 6 +4 0 2 1 4 0 6 +12 -6 0 1 -6 0 6 +12 -16.5 0 0 0 0 6 +4 -7.5 0 0 0 0 6 +10 -8.75 0 1 -8.75 0 6 +10 -18.75 0 0 0 0 6 +26 0 10 1 0 0 6 +12 -21 0 0 0 0 6 +2 -0.75 0 1 -0.75 0 6 +9 -9 0 1 -9 0 6 +10 -6.25 0 1 10 0 6 +8 -12 0 0 0 0 6 +3 0 1 1 0 0 6 +5 -1.88 0 1 5 0 6 +6 -7.5 0 0 0 0 6 +12 -13.5 0 1 12 0 6 +4 -7 0 0 0 0 6 +6 -8.25 0 0 0 0 6 +6 -12 0 0 0 0 6 +6 -10.5 0 0 0 0 6 +4 -8 0 0 0 0 6 +6 -6 0 1 -6 0 6 +12 0 6 0 6 0 6 +12 -19.5 0 0 0 0 6 +19 0 8 1 19 0 6 +12 -15 0 0 0 0 6 +2 -1.75 0 1 2 0 6 +6 -3.75 0 1 -3.75 0 6 +2 -1.25 0 1 2 0 6 +5 -1.25 0 1 -1.25 0 6 +4 -1.5 0 1 4 0 6 +8 -13 0 0 0 0 6 +12 -7.5 0 1 -7.5 0 6 +12 -3 0 1 -3 0 6 +2 -2.75 0 1 2 0 6 +7 0 3 1 7 0 6 +25 0 9 1 25 0 6 +2 -2 0 1 2 0 6 +12 -4.5 0 1 -4.5 0 6 +12 -12 0 0 0 0 6 +5 -3.13 0 1 5 0 6 +9 -11.25 0 0 0 0 6 +8 -9 0 0 0 0 6 +2 -2.25 0 1 2 0 6 +9 -4.5 0 1 -4.5 0 6 +10 -3.75 0 1 10 0 6 +10 -10 0 0 0 0 6 +10 -12.5 0 0 0 0 6 +2 -2.5 0 1 2 0 7 +5 -5.63 0 0 0 0 7 +6 -7.5 0 0 0 0 7 +26 0 10 1 0 0 7 +9 -4.5 0 1 -4.5 0 7 +2 -1.25 0 1 2 0 7 +8 -3 0 1 8 0 7 +25 0 9 1 25 0 7 +4 -4.5 0 1 -4.5 0 7 +5 -10 0 0 0 0 7 +6 -9 0 0 0 0 7 +10 -6.25 0 0 0 0 7 +4 -4 0 1 -4 0 7 +12 -3 0 1 -3 0 7 +5 -5 0 0 0 0 7 +12 0 5 1 12 0 7 +6 -9.75 0 0 0 0 7 +19 0 8 1 19 0 7 +4 -7.5 0 0 0 0 7 +12 -9 0 0 0 0 7 +4 -6.5 0 0 0 0 7 +9 -5.63 0 1 -5.63 0 7 +9 -18 0 0 0 0 7 +10 -11.25 0 0 0 0 7 +10 -13.75 0 0 0 0 7 +6 -12 0 0 0 0 7 +10 -12.5 0 0 0 0 7 +4 -7 0 0 0 0 7 +10 -7.5 0 0 0 0 7 +4 -8 0 0 0 0 7 +8 -11 0 0 0 0 7 +12 0 4 1 12 0 7 +9 -3.38 0 1 -3.38 0 7 +10 -18.75 0 0 0 0 7 +2 -3.5 0 0 0 0 7 +2 -1 0 1 2 0 7 +2 -3.25 0 0 0 0 7 +2 0 1 1 0 0 7 +7 0 3 1 7 0 7 +8 0 3 1 0 0 7 +12 -6 0 1 -6 0 7 +2 -0.5 0 1 2 0 7 +9 -7.88 0 0 0 0 7 +8 -15 0 0 0 0 7 +2 -1.5 0 1 2 0 7 +12 -22.5 0 0 0 0 7 +8 -7 0 1 -7 0 7 +4 -5.5 0 0 0 0 7 +10 -8.75 0 0 0 0 7 +8 -9 0 0 0 0 7 +2 -4 0 0 0 0 7 +4 0 2 1 4 0 7 +8 -8 0 0 0 0 7 +9 -13.5 0 0 0 0 7 +9 -9 0 0 0 0 7 +6 -3.75 0 1 -3.75 0 7 +13 0 6 0 6 0 7 +5 -1.88 0 1 5 0 7 +6 -6 0 0 0 0 7 +5 -6.88 0 0 0 0 7 +8 -16 0 0 0 0 7 +12 -7.5 0 1 -7.5 0 7 +5 -1.25 0 1 -1.25 0 7 +9 -14.63 0 0 0 0 7 +8 -4 0 1 8 0 7 +10 -17.5 0 0 0 0 7 +5 -3.75 0 1 5 0 7 +6 -10.5 0 0 0 0 7 +13 0 5 1 13 0 7 +10 -16.25 0 0 0 0 7 +5 -7.5 0 0 0 0 7 +2 -1.75 0 1 2 0 7 +5 -9.38 0 0 0 0 7 +2 -2.75 0 0 0 0 7 +2 -0.75 0 1 -0.75 0 7 +5 -8.13 0 0 0 0 7 +9 -11.25 0 0 0 0 7 +8 -13 0 0 0 0 7 +9 -16.88 0 0 0 0 7 +2 -2 0 0 0 0 7 +12 -18 0 0 0 0 7 +8 -2 0 1 -2 0 7 +2 -3 0 0 0 0 7 +6 -4.5 0 1 6 0 7 +5 0 2 1 5 0 7 +12 -19.5 0 0 0 0 7 +9 -15.75 0 0 0 0 7 +8 -6 0 0 0 0 7 +10 -2.5 0 1 -2.5 0 7 +9 -6.75 0 0 0 0 7 +6 -6.75 0 0 0 0 7 +2 -3.75 0 0 0 0 7 +10 -5 0 1 -5 0 7 +2 -2.25 0 0 0 0 7 +26 0 12 1 26 0 7 +12 -13.5 0 0 0 0 7 +8 -5 0 0 0 0 7 +6 -3 0 1 6 0 7 +10 -3.75 0 1 10 0 7 +12 -10.5 0 0 0 0 7 +4 -5 0 0 0 0 7 +9 -2.25 0 1 -2.25 0 7 +4 -3 0 0 0 0 7 +9 -10.13 0 0 0 0 7 +28 0 13 0 13 0 7 +22 0 10 1 22 0 7 +10 -10 0 0 0 0 7 +4 -1 0 1 -1 0 7 +4 -2.5 0 0 0 0 7 +12 -24 0 0 0 0 7 +8 -12 0 0 0 0 7 +3 0 1 1 0 0 7 +9 -12.38 0 0 0 0 7 +23 0 10 1 0 0 7 +4 -3.5 0 0 0 0 7 +4 -1.5 0 1 4 0 7 +8 -10 0 0 0 0 7 +8 -14 0 0 0 0 7 +4 -6 0 0 0 0 7 +25 0 10 1 0 0 7 +12 -16.5 0 0 0 0 7 +12 -12 0 0 0 0 7 +5 -2.5 0 1 5 0 7 +5 -8.75 0 0 0 0 7 +12 -4.5 0 1 -4.5 0 7 +12 -15 0 0 0 0 7 +5 -3.13 0 0 0 0 7 +12 -21 0 1 12 0 7 +5 -4.38 0 0 0 0 7 +6 -11.25 0 0 0 0 7 +30 0 12 1 0 0 7 +6 -1.5 0 1 6 0 7 +12 0 6 1 12 0 7 +4 -2 0 1 4 0 7 +10 -15 0 0 0 0 7 +6 -2.25 0 1 6 0 7 +10 -20 0 0 0 0 7 +6 -5.25 0 0 0 0 7 +5 -6.25 0 0 0 0 7 +6 -8.25 0 0 0 0 7 \ No newline at end of file diff --git a/R/inst/extdata/rdt_exampleData.txt b/R/inst/extdata/rdt_exampleData.txt new file mode 100644 index 00000000..79d99830 --- /dev/null +++ b/R/inst/extdata/rdt_exampleData.txt @@ -0,0 +1,901 @@ +subjID trial_number gamble_cha RT cert gain loss type_cha trial_payoff outcome happy RT_happy gamble type +1 1 safe 1935 40 0 88 loss 0 -40 0 689 0 -1 +1 2 safe 5581 0 103 198 mixed 0 0 0 689 0 0 +1 3 safe 5871 56 0 116 loss 0 -56 0 689 0 -1 +1 4 safe 3932 0 61 124 mixed 0 0 -1 3353 0 0 +1 5 risky 3838 0 60 48 mixed 0 60 -1 3353 1 0 +1 6 risky 1228 0 304 302 mixed 0 -302 -1 3353 1 0 +1 7 safe 2443 76 0 255 loss 0 -76 -1 1064 0 -1 +1 8 safe 1024 96 197 0 gain 0 96 -1 1064 0 1 +1 9 safe 1107 60 190 0 gain 0 60 1 692 0 1 +1 10 safe 1546 80 254 0 gain 0 80 1 692 0 1 +1 11 safe 3902 37 0 70 loss 0 -37 1 944 0 -1 +1 12 risky 1349 0 158 79 mixed 0 158 1 944 1 0 +1 13 safe 794 116 0 598 loss 0 -116 2 811 0 -1 +1 14 safe 1330 60 0 144 loss 0 -60 2 811 0 -1 +1 15 risky 1210 81 228 0 gain 0 0 0 411 1 1 +1 16 risky 1138 0 303 247 mixed 0 303 0 411 1 0 +1 17 risky 996 81 148 0 gain 0 0 1 600 1 1 +1 18 risky 3145 0 101 50 mixed 0 -50 1 600 1 0 +1 19 risky 138 82 335 0 gain 0 0 1 600 1 1 +1 20 risky 3909 104 0 182 loss 0 0 -1 1103 1 -1 +1 21 safe 1575 0 301 449 mixed 0 0 -1 1103 0 0 +1 22 safe 2616 36 74 0 gain 0 36 1 756 0 1 +1 23 risky 2635 59 0 106 loss 0 0 1 756 1 -1 +1 24 risky 3355 0 102 41 mixed 0 -41 1 756 1 0 +1 25 safe 1038 101 0 419 loss 0 -101 -1 955 0 -1 +1 26 safe 893 83 0 284 loss 0 -83 -1 955 0 -1 +1 27 risky 636 39 197 0 gain 0 197 -1 955 1 1 +1 28 safe 139 100 0 503 loss 0 -100 1 629 0 -1 +1 29 risky 333 103 357 0 gain 0 0 1 629 1 1 +1 30 safe 636 117 220 0 gain 0 117 -1 611 0 1 +1 31 risky 1001 64 0 101 loss 0 0 -1 611 1 -1 +1 32 safe 2614 99 182 0 gain 0 99 1 503 0 1 +1 33 safe 596 97 0 281 loss 0 -97 1 503 0 -1 +1 34 risky 96 77 401 0 gain 0 0 0 589 1 1 +1 35 safe 215 98 0 222 loss 0 -98 0 589 0 -1 +1 36 risky 920 0 58 15 mixed 0 -15 -1 490 1 0 +1 37 safe 537 40 0 143 loss 0 -40 -1 490 0 -1 +1 38 risky 1164 0 223 113 mixed 0 -113 -1 939 1 0 +1 39 risky 3247 124 268 0 gain 0 268 -1 939 1 1 +1 40 risky 42 0 63 16 mixed 0 -16 -1 939 1 0 +1 41 risky 131 96 225 0 gain 0 0 -1 667 1 1 +1 42 risky 920 0 223 149 mixed 0 -149 -1 667 1 0 +1 43 risky 612 0 104 28 mixed 0 -28 -1 667 1 0 +1 44 risky 3925 77 0 133 loss 0 -133 -2 639 1 -1 +1 45 safe 1912 64 0 136 loss 0 -64 -2 639 0 -1 +1 46 safe 162 120 0 433 loss 0 -120 -2 639 0 -1 +1 47 risky 369 104 319 0 gain 0 0 -2 641 1 1 +1 48 risky 1531 43 0 77 loss 0 0 -2 641 1 -1 +1 49 risky 640 0 61 40 mixed 0 -40 -1 772 1 0 +1 50 risky 635 0 160 131 mixed 0 -131 -1 772 1 0 +1 51 risky 41 58 151 0 gain 0 151 -1 772 1 1 +1 52 risky 305 0 304 201 mixed 0 304 -2 684 1 0 +1 53 safe 2651 78 0 178 loss 0 -78 -2 684 0 -1 +1 54 risky 239 0 304 153 mixed 0 -153 0 248 1 0 +1 55 risky 805 116 0 200 loss 0 0 0 248 1 -1 +1 56 risky 222 0 59 34 mixed 0 -34 0 463 1 0 +1 57 safe 338 80 0 336 loss 0 -80 0 463 0 -1 +1 58 risky 248 62 252 0 gain 0 0 0 1483 1 1 +1 59 safe 216 80 0 162 loss 0 -80 0 1483 0 -1 +1 60 risky 538 0 97 19 mixed 0 97 0 1129 1 0 +1 61 safe 351 0 102 120 mixed 0 0 0 1129 0 0 +1 62 risky 2484 119 381 0 gain 0 381 0 262 1 1 +1 63 safe 217 123 0 383 loss 0 -123 0 262 0 -1 +1 64 risky 18 117 298 0 gain 0 298 0 1059 1 1 +1 65 safe 71 0 98 154 mixed 0 0 0 1059 0 0 +1 66 safe 373 83 0 221 loss 0 -83 2 753 0 -1 +1 67 risky 619 62 217 0 gain 0 217 2 753 1 1 +1 68 safe 1612 0 61 58 mixed 0 0 2 753 0 0 +1 69 risky 34 37 84 0 gain 0 0 1 874 1 1 +1 70 safe 134 100 0 252 loss 0 -100 1 874 0 -1 +1 71 safe 714 99 169 0 gain 0 99 1 874 0 1 +1 72 safe 828 0 217 179 mixed 0 0 0 3052 0 0 +1 73 safe 88 0 297 364 mixed 0 0 0 3052 0 0 +1 74 risky 558 41 171 0 gain 0 171 0 3052 1 1 +1 75 risky 364 44 79 0 gain 0 0 0 865 1 1 +1 76 risky 501 123 200 0 gain 0 200 0 865 1 1 +1 77 risky 43 82 181 0 gain 0 181 0 552 1 1 +1 78 risky 120 0 61 24 mixed 0 61 0 552 1 0 +1 79 safe 2328 102 0 203 loss 0 -102 2 583 0 -1 +1 80 risky 263 0 303 92 mixed 0 -92 2 583 1 0 +1 81 safe 358 37 0 199 loss 0 -37 -1 611 0 -1 +1 82 safe 1306 82 0 144 loss 0 -82 -1 611 0 -1 +1 83 safe 101 0 218 438 mixed 0 0 0 824 0 0 +1 84 risky 598 119 430 0 gain 0 430 0 824 1 1 +1 85 risky 541 39 110 0 gain 0 0 0 279 1 1 +1 86 risky 6345 0 103 62 mixed 0 -62 0 279 1 0 +1 87 risky 208 44 143 0 gain 0 0 0 279 1 1 +1 88 safe 142 81 0 398 loss 0 -81 -1 618 0 -1 +1 89 safe 105 63 0 215 loss 0 -63 -1 618 0 -1 +1 90 safe 1436 0 218 263 mixed 0 0 -1 618 0 0 +1 91 safe 1136 43 0 108 loss 0 -43 -1 579 0 -1 +1 92 safe 229 79 0 202 loss 0 -79 -1 579 0 -1 +1 93 safe 595 97 0 318 loss 0 -97 0 1311 0 -1 +1 94 safe 5488 0 158 191 mixed 0 0 0 1311 0 0 +1 95 safe 1038 56 0 302 loss 0 -56 0 1311 0 -1 +1 96 safe 124 103 0 364 loss 0 -103 -1 1420 0 -1 +1 97 safe 788 117 0 293 loss 0 -117 -1 1420 0 -1 +1 98 safe 270 0 301 59 mixed 0 0 -1 1420 0 0 +1 99 risky 348 99 248 0 gain 0 248 0 476 1 1 +1 100 risky 2651 0 163 45 mixed 0 -45 0 476 1 0 +1 101 safe 84 42 0 172 loss 0 -42 -1 1537 0 -1 +1 102 safe 40 0 156 243 mixed 0 0 -1 1537 0 0 +1 103 risky 204 0 157 101 mixed 0 157 -1 1537 1 0 +1 104 risky 24 57 133 0 gain 0 133 0 5156 1 1 +1 105 safe 3897 99 417 0 gain 0 99 0 5156 0 1 +1 106 safe 3165 78 130 0 gain 0 78 0 5156 0 1 +1 107 risky 157 83 202 0 gain 0 0 1 595 1 1 +1 108 risky 628 39 123 0 gain 0 0 1 595 1 1 +1 109 safe 225 0 102 78 mixed 0 0 1 595 0 0 +1 110 safe 512 61 0 252 loss 0 -61 -2 700 0 -1 +1 111 safe 185 118 0 219 loss 0 -118 -2 700 0 -1 +1 112 risky 381 56 303 0 gain 0 303 -2 700 1 1 +1 113 risky 410 83 157 0 gain 0 157 -1 1091 1 1 +1 114 risky 1205 0 303 124 mixed 0 303 -1 1091 1 0 +1 115 risky 2050 98 0 166 loss 0 -166 -1 1091 1 -1 +1 116 risky 171 0 159 61 mixed 0 159 0 258 1 0 +1 117 safe 147 0 162 161 mixed 0 0 0 258 0 0 +1 118 safe 410 0 216 334 mixed 0 0 0 258 0 0 +1 119 safe 372 61 0 185 loss 0 -61 0 2066 0 -1 +1 120 safe 1952 62 103 0 gain 0 62 0 2066 0 1 +1 121 safe 516 117 0 503 loss 0 -117 0 2621 0 -1 +1 122 safe 479 0 296 604 mixed 0 0 0 2621 0 0 +1 123 risky 154 119 335 0 gain 0 0 -1 557 1 1 +1 124 safe 3132 0 64 92 mixed 0 0 -1 557 0 0 +1 125 risky 266 0 224 65 mixed 0 224 -1 557 1 0 +1 126 risky 4022 116 240 0 gain 1 240 0 1164 1 1 +1 127 risky 123 56 166 0 gain 0 0 0 1164 1 1 +1 128 safe 499 39 71 0 gain 0 39 0 1164 0 1 +1 129 risky 611 44 101 0 gain 0 101 -1 1045 1 1 +1 130 safe 267 0 104 97 mixed 0 0 -1 1045 0 0 +1 131 safe 423 39 0 100 loss 0 -39 -1 1045 0 -1 +1 132 risky 129 0 219 40 mixed 0 -40 -1 626 1 0 +1 133 safe 903 122 0 339 loss 0 -122 -1 626 0 -1 +1 134 safe 771 0 58 73 mixed 0 0 -1 442 0 0 +1 135 risky 1178 101 497 0 gain 0 497 -1 442 1 1 +1 136 safe 156 123 0 239 loss 0 -123 -1 442 0 -1 +1 137 risky 50 123 601 0 gain 0 0 1 826 1 1 +1 138 risky 4906 83 291 0 gain 0 291 1 826 1 1 +1 139 risky 11109 0 156 31 mixed 0 -31 1 826 1 0 +1 140 risky 795 121 504 0 gain 0 0 -1 651 1 1 +1 141 risky 715 40 0 68 loss 0 0 -1 651 1 -1 +1 142 safe 449 43 0 126 loss 0 -43 -1 651 0 -1 +1 143 risky 13105 0 222 84 mixed 0 -84 0 5028 1 0 +1 144 safe 188 0 158 318 mixed 0 0 0 5028 0 0 +1 145 risky 2599 61 111 0 gain 0 111 0 5028 1 1 +1 146 risky 546 59 124 0 gain 0 124 0 816 1 1 +1 147 safe 405 0 220 223 mixed 0 0 0 816 0 0 +1 148 safe 787 61 0 172 loss 0 -61 0 816 0 -1 +1 149 safe 742 124 0 263 loss 0 -124 -1 1037 0 -1 +1 150 risky 786 103 280 0 gain 0 280 -1 1037 1 1 +2 1 risky 923 64 113 0 gain 0 113 1 4009 1 1 +2 2 safe 854 44 0 75 loss 0 -44 1 4009 0 -1 +2 3 safe 1204 0 220 440 mixed 0 0 1 4009 0 0 +2 4 risky 207 104 416 0 gain 0 416 1 2004 1 1 +2 5 safe 328 124 340 0 gain 0 124 1 2004 0 1 +2 6 risky 521 0 298 87 mixed 0 298 2 1635 1 0 +2 7 safe 488 83 136 0 gain 0 83 2 1635 0 1 +2 8 safe 49 64 145 0 gain 0 64 2 1635 0 1 +2 9 safe 420 42 0 91 loss 0 -42 3 1663 0 -1 +2 10 risky 668 0 223 149 mixed 0 223 3 1663 1 0 +2 11 safe 124 0 223 216 mixed 0 0 3 1663 0 0 +2 12 safe 304 0 224 183 mixed 0 0 4 2299 0 0 +2 13 safe 64 0 220 332 mixed 0 0 4 2299 0 0 +2 14 safe 551 0 102 41 mixed 0 0 4 2299 0 0 +2 15 risky 253 0 161 128 mixed 0 -128 3 1977 1 0 +2 16 safe 717 82 0 143 loss 0 -82 3 1977 0 -1 +2 17 risky 263 82 397 0 gain 0 0 3 1977 1 1 +2 18 safe 35 0 97 199 mixed 0 0 2 1359 0 0 +2 19 safe 251 124 297 0 gain 0 124 2 1359 0 1 +2 20 safe 339 119 0 244 loss 0 -119 3 1376 0 -1 +2 21 safe 252 100 0 503 loss 0 -100 3 1376 0 -1 +2 22 safe 305 79 253 0 gain 0 79 3 1376 0 1 +2 23 safe 50 57 0 296 loss 0 -57 2 1831 0 -1 +2 24 risky 734 77 201 0 gain 0 0 2 1831 1 1 +2 25 safe 496 122 0 505 loss 0 -122 1 1231 0 -1 +2 26 safe 484 98 0 224 loss 0 -98 1 1231 0 -1 +2 27 risky 813 0 303 364 mixed 0 -364 1 1231 1 0 +2 28 safe 447 0 161 194 mixed 0 0 -1 923 0 0 +2 29 safe 297 98 182 0 gain 0 98 -1 923 0 1 +2 30 safe 438 101 318 0 gain 0 101 -1 923 0 1 +2 31 risky 357 62 96 0 gain 0 0 0 1046 1 1 +2 32 safe 369 0 57 124 mixed 0 0 0 1046 0 0 +2 33 risky 357 117 205 0 gain 0 205 0 585 1 1 +2 34 safe 548 0 62 73 mixed 0 0 0 585 0 0 +2 35 safe 354 44 64 0 gain 0 44 1 800 0 1 +2 36 safe 955 0 156 51 mixed 0 0 1 800 0 0 +2 37 safe 752 116 0 215 loss 0 -116 0 693 0 -1 +2 38 safe 323 40 0 67 loss 0 -40 0 693 0 -1 +2 39 safe 286 79 161 0 gain 0 79 1 1575 0 1 +2 40 safe 563 0 60 92 mixed 0 0 1 1575 0 0 +2 41 safe 380 0 301 118 mixed 0 0 1 1575 0 0 +2 42 safe 374 56 0 147 loss 0 -56 1 1684 0 -1 +2 43 safe 309 99 203 0 gain 0 99 1 1684 0 1 +2 44 safe 518 103 362 0 gain 0 103 2 947 0 1 +2 45 safe 255 0 96 51 mixed 0 0 2 947 0 0 +2 46 safe 305 0 63 64 mixed 0 0 2 947 0 0 +2 47 safe 1508 61 0 193 loss 0 -61 2 1786 0 -1 +2 48 safe 241 123 500 0 gain 0 123 2 1786 0 1 +2 49 safe 131 0 102 64 mixed 0 0 2 6084 0 0 +2 50 safe 219 103 0 417 loss 0 -103 2 6084 0 -1 +2 51 safe 502 0 219 48 mixed 0 0 2 6084 0 0 +2 52 safe 326 0 57 18 mixed 0 0 2 1639 0 0 +2 53 safe 292 59 0 117 loss 0 -59 2 1639 0 -1 +2 54 safe 684 76 333 0 gain 0 76 -1 5341 0 1 +2 55 safe 3649 117 218 0 gain 0 117 -1 5341 0 1 +2 56 safe 792 98 0 167 loss 0 -98 1 1275 0 -1 +2 57 safe 296 0 223 67 mixed 0 0 1 1275 0 0 +2 58 risky 323 121 0 380 loss 0 -380 1 1275 1 -1 +2 59 safe 667 40 0 123 loss 0 -40 -2 2709 0 -1 +2 60 safe 2437 40 78 0 gain 0 40 -2 2709 0 1 +2 61 safe 338 98 0 199 loss 0 -98 -2 2709 0 -1 +2 62 safe 264 100 0 282 loss 0 -100 1 2729 0 -1 +2 63 safe 181 0 162 320 mixed 0 0 1 2729 0 0 +2 64 safe 321 0 300 156 mixed 0 0 1 2728 0 0 +2 65 safe 1519 0 299 194 mixed 0 0 1 2728 0 0 +2 66 safe 452 100 0 312 loss 0 -100 1 2728 0 -1 +2 67 safe 215 57 304 0 gain 0 57 2 3944 0 1 +2 68 safe 224 122 0 262 loss 0 -122 2 3944 0 -1 +2 69 safe 911 43 108 0 gain 0 43 1 8538 0 1 +2 70 safe 263 99 276 0 gain 0 99 1 8538 0 1 +2 71 safe 140 0 220 261 mixed 0 0 1 1240 0 0 +2 72 safe 290 81 0 251 loss 0 -81 1 1240 0 -1 +2 73 safe 262 98 496 0 gain 0 98 0 536 0 1 +2 74 safe 452 0 299 304 mixed 0 0 0 536 0 0 +2 75 safe 3302 61 0 250 loss 0 -61 0 536 0 -1 +2 76 safe 281 40 0 199 loss 0 -40 -5 2275 0 -1 +2 77 safe 413 121 0 435 loss 0 -121 -5 2275 0 -1 +2 78 safe 5118 39 127 0 gain 0 39 -5 2275 0 1 +2 79 safe 95 123 236 0 gain 0 123 -2 2182 0 1 +2 80 safe 274 39 0 167 loss 0 -39 -2 2182 0 -1 +2 81 safe 616 62 213 0 gain 0 62 -2 2182 0 1 +2 82 safe 4258 37 0 111 loss 0 -37 3 4483 0 -1 +2 83 safe 273 118 431 0 gain 0 118 3 4483 0 1 +2 84 safe 5294 0 100 104 mixed 0 0 3 4483 0 0 +2 85 safe 264 0 161 67 mixed 0 0 1 5178 0 0 +2 86 safe 344 121 381 0 gain 0 121 1 5178 0 1 +2 87 safe 311 0 158 34 mixed 0 0 -4 970 0 0 +2 88 safe 263 78 0 179 loss 1 -78 -4 970 0 -1 +2 89 safe 298 84 284 0 gain 0 84 -3 949 0 1 +2 90 risky 39 61 0 165 loss 0 -165 -3 949 1 -1 +2 91 safe 4045 43 95 0 gain 0 43 -3 949 0 1 +2 92 safe 269 0 163 79 mixed 0 0 -3 885 0 0 +2 93 safe 41 44 72 0 gain 0 44 -3 885 0 1 +2 94 safe 3629 62 189 0 gain 0 62 -3 885 0 1 +2 95 safe 924 0 103 148 mixed 0 0 -4 1299 0 0 +2 96 safe 268 81 0 404 loss 0 -81 -4 1299 0 -1 +2 97 safe 348 0 98 86 mixed 0 0 0 1213 0 0 +2 98 safe 286 37 148 0 gain 0 37 0 1213 0 1 +2 99 safe 282 0 156 104 mixed 0 0 0 1213 0 0 +2 100 safe 1223 38 0 140 loss 0 -38 -2 2135 0 -1 +2 101 safe 322 118 0 600 loss 0 -118 -2 2135 0 -1 +2 102 safe 425 0 58 42 mixed 0 0 -2 2135 0 0 +2 103 safe 288 0 96 27 mixed 0 0 -4 1296 0 0 +2 104 safe 267 0 100 20 mixed 0 0 -4 1296 0 0 +2 105 risky 957 121 0 295 loss 0 0 -4 1296 1 -1 +2 106 safe 451 0 96 121 mixed 0 0 0 1416 0 0 +2 107 safe 188 0 61 24 mixed 0 0 0 1416 0 0 +2 108 risky 1784 37 0 81 loss 0 -81 0 1416 1 -1 +2 109 safe 327 0 296 454 mixed 0 0 -1 1369 0 0 +2 110 safe 278 38 0 95 loss 0 -38 -1 1369 0 -1 +2 111 safe 406 104 0 182 loss 0 -104 -1 774 0 -1 +2 112 safe 757 56 137 0 gain 0 56 -1 774 0 1 +2 113 safe 295 0 299 249 mixed 0 0 -1 774 0 0 +2 114 safe 2575 80 224 0 gain 0 80 -1 596 0 1 +2 115 safe 499 83 0 340 loss 0 -83 -1 596 0 -1 +2 116 safe 261 60 250 0 gain 0 60 -1 596 0 1 +2 117 safe 174 60 0 112 loss 0 -60 1 1222 0 -1 +2 118 safe 224 44 197 0 gain 0 44 1 1222 0 1 +2 119 safe 272 83 0 227 loss 0 -83 -1 505 0 -1 +2 120 safe 267 0 299 57 mixed 0 0 -1 505 0 0 +2 121 safe 341 56 116 0 gain 0 56 -1 505 0 1 +2 122 safe 155 119 0 204 loss 0 -119 1 1054 0 -1 +2 123 safe 480 41 86 0 gain 0 41 1 1054 0 1 +2 124 safe 271 39 168 0 gain 0 39 1 1054 0 1 +2 125 safe 250 117 263 0 gain 0 117 -5 993 0 1 +2 126 safe 334 0 223 88 mixed 0 0 -5 993 0 0 +2 127 safe 308 63 171 0 gain 0 63 -4 1144 0 1 +2 128 safe 260 0 220 112 mixed 0 0 -4 1144 0 0 +2 129 safe 245 98 219 0 gain 0 98 -4 1023 0 1 +2 130 safe 322 103 169 0 gain 0 103 -4 1023 0 1 +2 131 safe 314 83 0 290 loss 0 -83 -4 1023 0 -1 +2 132 safe 265 0 62 46 mixed 0 0 -5 1193 0 0 +2 133 safe 454 117 0 337 loss 0 -117 -5 1193 0 -1 +2 134 safe 1290 82 146 0 gain 0 82 0 268 0 1 +2 135 safe 9 59 0 135 loss 0 -59 0 268 0 -1 +2 136 safe 233 83 174 0 gain 0 83 -2 779 0 1 +2 137 safe 314 57 0 98 loss 0 -57 -2 779 0 -1 +2 138 safe 267 99 0 249 loss 0 -99 -2 779 0 -1 +2 139 safe 245 104 252 0 gain 0 104 -3 870 0 1 +2 140 safe 79 58 0 215 loss 0 -58 -3 870 0 -1 +2 141 safe 280 76 0 158 loss 0 -76 -3 809 0 -1 +2 142 safe 255 0 164 157 mixed 0 0 -3 809 0 0 +2 143 safe 550 77 0 196 loss 0 -77 -3 969 0 -1 +2 144 safe 77 79 0 135 loss 0 -79 -3 969 0 -1 +2 145 safe 265 0 303 604 mixed 0 0 -2 1203 0 0 +2 146 safe 292 0 156 244 mixed 0 0 -2 1203 0 0 +2 147 safe 300 100 0 364 loss 0 -100 -5 2008 0 -1 +2 148 safe 369 0 64 12 mixed 0 0 -5 2008 0 0 +2 149 risky 300 0 62 27 mixed 0 -27 -3 967 1 0 +2 150 safe 125 123 603 0 gain 0 123 -3 967 0 1 +3 1 risky 1331 0 161 80 mixed 0 -80 0 2245 1 0 +3 2 risky 791 39 69 0 gain 0 0 0 2245 1 1 +3 3 risky 774 120 266 0 gain 0 0 -1 3256 1 1 +3 4 risky 818 63 169 0 gain 0 0 -1 3256 1 1 +3 5 safe 1108 0 304 89 mixed 0 0 -2 2171 0 0 +3 6 safe 1853 0 303 356 mixed 0 0 -2 2171 0 0 +3 7 safe 672 38 103 0 gain 0 38 -2 2171 0 1 +3 8 risky 1258 124 222 0 gain 0 0 1 1230 1 1 +3 9 safe 1401 0 61 30 mixed 0 0 1 1230 0 0 +3 10 risky 2620 116 0 222 loss 0 -222 1 1230 1 -1 +3 11 risky 4004 76 0 292 loss 0 0 0 2470 1 -1 +3 12 safe 2848 96 0 245 loss 0 -96 0 2470 0 -1 +3 13 safe 496 116 198 0 gain 0 116 0 1497 0 1 +3 14 risky 3183 103 0 496 loss 0 0 0 1497 1 -1 +3 15 safe 1640 123 0 300 loss 0 -123 0 1497 0 -1 +3 16 risky 2010 36 201 0 gain 0 0 1 966 1 1 +3 17 safe 1860 79 159 0 gain 0 79 1 966 0 1 +3 18 safe 731 41 170 0 gain 0 41 1 966 0 1 +3 19 safe 3531 118 0 338 loss 0 -118 2 1359 0 -1 +3 20 safe 1121 0 158 68 mixed 0 0 2 1359 0 0 +3 21 risky 642 62 0 101 loss 0 -101 2 1359 1 -1 +3 22 safe 892 64 0 130 loss 0 -64 2 1610 0 -1 +3 23 safe 2057 102 315 0 gain 0 102 2 1610 0 1 +3 24 safe 369 96 172 0 gain 0 96 2 2510 0 1 +3 25 safe 675 37 0 113 loss 0 -37 2 2510 0 -1 +3 26 risky 1180 0 102 17 mixed 0 -17 2 2510 1 0 +3 27 safe 484 0 99 62 mixed 0 0 2 1207 0 0 +3 28 safe 1374 36 0 143 loss 0 -36 2 1207 0 -1 +3 29 safe 243 42 0 167 loss 0 -42 2 1207 0 -1 +3 30 risky 5007 0 102 55 mixed 0 102 1 1043 1 0 +3 31 safe 382 43 0 197 loss 0 -43 1 1043 0 -1 +3 32 safe 1432 118 501 0 gain 0 118 1 1043 0 1 +3 33 safe 3694 60 0 149 loss 0 -60 2 1278 0 -1 +3 34 risky 930 0 160 102 mixed 0 -102 2 1278 1 0 +3 35 safe 2289 81 221 0 gain 0 81 1 1652 0 1 +3 36 safe 1958 61 218 0 gain 0 61 1 1652 0 1 +3 37 safe 2900 0 58 88 mixed 0 0 2 3032 0 0 +3 38 safe 772 122 0 506 loss 0 -122 2 3032 0 -1 +3 39 safe 560 0 223 224 mixed 0 0 2 938 0 0 +3 40 safe 691 0 300 602 mixed 0 0 2 938 0 0 +3 41 safe 843 77 142 0 gain 0 77 2 938 0 1 +3 42 risky 2174 0 300 243 mixed 0 300 2 1234 1 0 +3 43 safe 2380 44 110 0 gain 0 44 2 1234 0 1 +3 44 safe 887 83 253 0 gain 0 83 2 931 0 1 +3 45 safe 329 119 433 0 gain 0 119 2 931 0 1 +3 46 safe 2179 0 297 157 mixed 0 0 2 931 0 0 +3 47 risky 2493 59 111 0 gain 0 0 3 1445 1 1 +3 48 safe 250 0 221 443 mixed 0 0 3 1445 0 0 +3 49 safe 909 37 0 85 loss 0 -37 3 1445 0 -1 +3 50 risky 2574 57 0 117 loss 0 -117 2 1088 1 -1 +3 51 risky 2379 0 102 34 mixed 0 -34 2 1088 1 0 +3 52 risky 902 0 156 29 mixed 0 156 1 839 1 0 +3 53 safe 2553 63 0 166 loss 0 -63 1 839 0 -1 +3 54 risky 2147 100 0 178 loss 0 -178 1 839 1 -1 +3 55 safe 816 84 0 141 loss 0 -84 0 1472 0 -1 +3 56 safe 557 0 99 198 mixed 0 0 0 1472 0 0 +3 57 safe 703 120 0 244 loss 0 -120 0 1472 0 -1 +3 58 safe 404 78 0 401 loss 0 -78 0 1263 0 -1 +3 59 safe 2504 0 220 111 mixed 0 0 0 1263 0 0 +3 60 safe 2346 116 298 0 gain 0 116 -1 2409 0 1 +3 61 safe 2756 63 249 0 gain 0 63 -1 2409 0 1 +3 62 safe 1211 0 220 178 mixed 0 0 -1 2409 0 0 +3 63 safe 1053 0 304 297 mixed 0 0 1 852 0 0 +3 64 risky 1740 116 0 201 loss 0 0 1 852 1 -1 +3 65 safe 1039 83 338 0 gain 0 83 2 1081 0 1 +3 66 safe 522 82 0 197 loss 0 -82 2 1081 0 -1 +3 67 safe 1987 38 123 0 gain 0 38 2 1081 0 1 +3 68 safe 475 61 0 108 loss 0 -61 2 1125 0 -1 +3 69 safe 198 43 0 128 loss 0 -43 2 1125 0 -1 +3 70 safe 4435 0 104 97 mixed 0 0 1 876 0 0 +3 71 safe 477 122 0 429 loss 0 -122 1 876 0 -1 +3 72 safe 2442 0 157 188 mixed 0 0 1 876 0 0 +3 73 risky 2927 38 91 0 gain 0 0 1 8932 1 1 +3 74 safe 641 41 0 68 loss 0 -41 1 8932 0 -1 +3 75 safe 7035 0 101 150 mixed 0 0 2 1656 0 0 +3 76 risky 4390 104 0 165 loss 0 0 2 1656 1 -1 +3 77 risky 3113 0 57 9 mixed 0 -9 3 1419 1 0 +3 78 risky 839 0 223 45 mixed 0 -45 3 1419 1 0 +3 79 safe 576 62 134 0 gain 0 62 3 1419 0 1 +3 80 safe 774 100 0 422 loss 0 -100 3 1130 0 -1 +3 81 safe 1030 57 0 190 loss 0 -57 3 1130 0 -1 +3 82 safe 713 0 58 76 mixed 0 0 2 2386 0 0 +3 83 safe 1322 0 62 63 mixed 0 0 2 2386 0 0 +3 84 risky 2147 98 501 0 gain 0 501 3 1623 1 1 +3 85 safe 1100 120 0 376 loss 0 -120 3 1623 0 -1 +3 86 safe 417 79 0 336 loss 0 -79 3 1445 0 -1 +3 87 safe 851 0 58 23 mixed 0 0 3 1445 0 0 +3 88 safe 168 0 61 124 mixed 0 0 3 1445 0 0 +3 89 safe 93 119 336 0 gain 0 119 3 1825 0 1 +3 90 safe 993 0 156 158 mixed 0 0 3 1825 0 0 +3 91 safe 299 101 178 0 gain 0 101 3 1825 0 1 +3 92 safe 1636 0 304 123 mixed 0 0 4 5059 0 0 +3 93 risky 3777 0 62 21 mixed 0 62 4 5059 1 0 +3 94 safe 525 0 299 447 mixed 0 0 3 1872 0 0 +3 95 safe 588 102 222 0 gain 0 102 3 1872 0 1 +3 96 safe 551 0 217 87 mixed 0 0 2 1089 0 0 +3 97 safe 630 42 68 0 gain 0 42 2 1089 0 1 +3 98 risky 1675 0 96 120 mixed 0 -120 3 1358 1 0 +3 99 safe 133 83 0 137 loss 0 -83 3 1358 0 -1 +3 100 safe 210 118 598 0 gain 0 118 3 1358 0 1 +3 101 safe 355 100 0 219 loss 0 -100 3 1032 0 -1 +3 102 safe 656 63 100 0 gain 0 63 3 1032 0 1 +3 103 safe 210 0 164 324 mixed 0 0 3 1032 0 0 +3 104 safe 4184 83 0 174 loss 0 -83 2 4160 0 -1 +3 105 risky 1178 56 297 0 gain 0 297 2 4160 1 1 +3 106 risky 2517 36 143 0 gain 0 143 3 2632 1 1 +3 107 safe 1485 101 283 0 gain 0 101 3 2632 0 1 +3 108 safe 388 41 0 66 loss 0 -41 3 2632 0 -1 +3 109 safe 1077 0 303 197 mixed 0 0 3 3534 0 0 +3 110 safe 330 41 0 98 loss 0 -41 3 3534 0 -1 +3 111 risky 975 59 191 0 gain 0 0 2 956 1 1 +3 112 risky 568 56 120 0 gain 0 0 2 956 1 1 +3 113 safe 397 58 0 248 loss 0 -58 2 956 0 -1 +3 114 risky 645 0 304 59 mixed 0 304 1 804 1 0 +3 115 safe 940 0 99 38 mixed 0 0 1 804 0 0 +3 116 risky 2733 102 0 281 loss 0 -281 2 1208 1 -1 +3 117 safe 1662 104 0 357 loss 0 -104 2 1208 0 -1 +3 118 risky 2275 83 197 0 gain 0 197 2 1208 1 1 +3 119 safe 846 0 58 38 mixed 0 0 2 1291 0 0 +3 120 risky 1609 81 179 0 gain 0 179 2 1291 1 1 +3 121 safe 163 103 0 316 loss 0 -103 2 1291 0 -1 +3 122 risky 787 98 416 0 gain 0 416 3 1814 1 1 +3 123 safe 237 102 0 202 loss 0 -102 3 1814 0 -1 +3 124 safe 1772 0 161 131 mixed 0 0 3 1104 0 0 +3 125 safe 303 102 248 0 gain 0 102 3 1104 0 1 +3 126 safe 654 124 0 600 loss 0 -124 3 1195 0 -1 +3 127 safe 842 81 0 156 loss 0 -81 3 1195 0 -1 +3 128 risky 1931 0 217 70 mixed 0 217 3 2163 1 0 +3 129 safe 450 0 57 52 mixed 0 0 3 2163 0 0 +3 130 risky 1135 0 159 50 mixed 0 -50 3 2163 1 0 +3 131 risky 522 79 396 0 gain 0 396 2 1997 1 1 +3 132 safe 678 42 78 0 gain 0 42 2 1997 0 1 +3 133 safe 296 0 158 236 mixed 0 0 2 1997 0 0 +3 134 risky 560 98 364 0 gain 0 364 2 1955 1 1 +3 135 safe 617 118 236 0 gain 0 118 2 1955 0 1 +3 136 safe 618 81 0 227 loss 0 -81 2 1955 0 -1 +3 137 safe 1294 0 219 327 mixed 0 0 2 1935 0 0 +3 138 safe 407 61 0 304 loss 0 -61 2 1935 0 -1 +3 139 safe 2348 120 378 0 gain 0 120 2 1630 0 1 +3 140 safe 527 76 284 0 gain 0 76 2 1630 0 1 +3 141 safe 1267 76 137 0 gain 0 76 2 1588 0 1 +3 142 safe 19 36 0 83 loss 0 -36 2 1588 0 -1 +3 143 safe 168 82 0 256 loss 0 -82 2 1588 0 -1 +3 144 safe 540 0 99 79 mixed 0 0 3 1509 0 0 +3 145 safe 601 59 149 0 gain 0 59 3 1509 0 1 +3 146 safe 27 0 216 266 mixed 0 0 3 1509 0 0 +3 147 safe 863 61 0 217 loss 0 -61 2 1697 0 -1 +3 148 safe 326 0 218 146 mixed 0 0 2 1697 0 0 +3 149 risky 2092 101 196 0 gain 0 0 2 1563 1 1 +3 150 safe 451 124 0 265 loss 1 -124 2 1563 0 -1 +4 1 risky 1858 103 0 200 loss 0 0 0 750 1 -1 +4 2 risky 579 101 498 0 gain 0 498 0 750 1 1 +4 3 safe 898 0 100 103 mixed 0 0 0 750 0 0 +4 4 safe 903 99 0 363 loss 0 -99 2 3404 0 -1 +4 5 risky 444 57 300 0 gain 0 300 2 3404 1 1 +4 6 risky 207 79 337 0 gain 0 0 2 2347 1 1 +4 7 safe 584 36 0 82 loss 0 -36 2 2347 0 -1 +4 8 risky 261 42 99 0 gain 0 0 0 1647 1 1 +4 9 risky 77 0 220 326 mixed 0 -326 0 1647 1 0 +4 10 safe 1259 122 0 435 loss 0 -122 0 1647 0 -1 +4 11 safe 848 77 0 143 loss 0 -77 -2 1605 0 -1 +4 12 risky 93 0 303 599 mixed 0 -599 -2 1605 1 0 +4 13 safe 460 38 0 128 loss 0 -38 -2 911 0 -1 +4 14 risky 21 0 299 244 mixed 0 -244 -2 911 1 0 +4 15 risky 865 116 0 500 loss 0 -500 -2 911 1 -1 +4 16 risky 253 120 265 0 gain 0 0 0 1210 1 1 +4 17 risky 837 39 87 0 gain 0 87 0 1210 1 1 +4 18 risky 1292 56 0 215 loss 0 -215 0 1210 1 -1 +4 19 risky 904 124 0 198 loss 0 0 -3 1807 1 -1 +4 20 risky 769 124 0 294 loss 0 0 -3 1807 1 -1 +4 21 risky 1280 116 0 376 loss 0 0 0 606 1 -1 +4 22 risky 1474 64 0 102 loss 0 -102 0 606 1 -1 +4 23 risky 91 0 161 49 mixed 0 161 0 3006 1 0 +4 24 risky 558 119 430 0 gain 0 0 0 3006 1 1 +4 25 risky 393 122 377 0 gain 0 377 0 554 1 1 +4 26 risky 426 96 169 0 gain 0 169 0 554 1 1 +4 27 risky 171 98 247 0 gain 0 0 0 554 1 1 +4 28 risky 890 62 99 0 gain 0 99 1 1038 1 1 +4 29 risky 611 82 227 0 gain 0 0 1 1038 1 1 +4 30 risky 464 0 222 178 mixed 0 222 1 1038 1 0 +4 31 risky 1210 0 103 16 mixed 0 103 1 1159 1 0 +4 32 risky 41 0 300 124 mixed 0 300 1 1159 1 0 +4 33 risky 474 0 300 300 mixed 0 -300 1 1159 1 0 +4 34 safe 438 0 63 58 mixed 0 0 -1 617 0 0 +4 35 risky 437 64 0 252 loss 0 0 -1 617 1 -1 +4 36 risky 507 0 222 260 mixed 0 -260 0 315 1 0 +4 37 risky 1028 63 0 121 loss 0 0 0 315 1 -1 +4 38 risky 420 77 195 0 gain 0 0 0 315 1 1 +4 39 risky 944 0 61 71 mixed 0 61 0 1350 1 0 +4 40 risky 623 39 0 90 loss 0 0 0 1350 1 -1 +4 41 risky 320 79 400 0 gain 0 400 0 469 1 1 +4 42 risky 477 63 187 0 gain 0 187 0 469 1 1 +4 43 risky 605 96 182 0 gain 0 182 0 469 1 1 +4 44 risky 1141 96 0 277 loss 0 -277 2 1110 1 -1 +4 45 risky 452 120 240 0 gain 0 240 2 1110 1 1 +4 46 risky 694 0 60 17 mixed 0 -17 2 1110 1 0 +4 47 risky 861 44 0 68 loss 0 0 0 815 1 -1 +4 48 risky 52 0 156 30 mixed 0 156 0 815 1 0 +4 49 risky 429 0 157 133 mixed 0 -133 0 815 1 0 +4 50 risky 579 0 61 15 mixed 0 61 0 1373 1 0 +4 51 risky 533 120 508 0 gain 0 508 0 1373 1 1 +4 52 risky 112 101 277 0 gain 0 277 0 1373 1 1 +4 53 risky 642 123 596 0 gain 0 0 2 907 1 1 +4 54 safe 707 0 164 237 mixed 0 0 2 907 0 0 +4 55 risky 1610 0 98 80 mixed 0 98 0 497 1 0 +4 56 safe 1042 63 0 152 loss 0 -63 0 497 0 -1 +4 57 safe 1029 79 0 201 loss 0 -79 0 497 0 -1 +4 58 risky 385 0 162 82 mixed 0 -82 0 424 1 0 +4 59 risky 399 38 201 0 gain 0 201 0 424 1 1 +4 60 risky 549 0 301 358 mixed 0 301 0 976 1 0 +4 61 risky 453 79 251 0 gain 0 251 0 976 1 1 +4 62 risky 662 56 111 0 gain 0 111 2 894 1 1 +4 63 risky 613 103 360 0 gain 0 0 2 894 1 1 +4 64 safe 891 36 0 172 loss 0 -36 2 894 0 -1 +4 65 risky 1229 76 0 179 loss 0 -179 0 1002 1 -1 +4 66 safe 1915 0 98 149 mixed 0 0 0 1002 0 0 +4 67 risky 928 123 0 243 loss 0 0 0 1002 1 -1 +4 68 risky 883 0 159 158 mixed 0 159 1 571 1 0 +4 69 risky 411 37 127 0 gain 0 0 1 571 1 1 +4 70 risky 1488 83 0 249 loss 0 0 0 1771 1 -1 +4 71 safe 436 37 0 203 loss 0 -37 0 1771 0 -1 +4 72 risky 630 0 156 106 mixed 0 -106 0 1771 1 0 +4 73 risky 497 0 223 144 mixed 0 -144 0 593 1 0 +4 74 risky 477 0 221 117 mixed 0 221 0 593 1 0 +4 75 risky 447 81 173 0 gain 0 0 0 593 1 1 +4 76 risky 491 124 299 0 gain 0 0 0 448 1 1 +4 77 risky 3490 0 101 67 mixed 0 -67 0 448 1 0 +4 78 risky 603 0 57 34 mixed 0 -34 0 808 1 0 +4 79 risky 1740 96 0 245 loss 0 -245 0 808 1 -1 +4 80 risky 60 0 62 117 mixed 0 62 -1 2861 1 0 +4 81 risky 503 60 213 0 gain 1 0 -1 2861 1 1 +4 82 risky 184 41 108 0 gain 0 0 -1 1771 1 1 +4 83 safe 1134 57 0 105 loss 0 -57 -1 1771 0 -1 +4 84 risky 1143 39 0 74 loss 0 -74 -1 1771 1 -1 +4 85 safe 776 84 0 333 loss 0 -84 -2 650 0 -1 +4 86 risky 441 57 256 0 gain 0 0 -2 650 1 1 +4 87 risky 2638 77 131 0 gain 0 0 -2 2262 1 1 +4 88 risky 376 0 104 56 mixed 0 -56 -2 2262 1 0 +4 89 risky 1022 97 0 315 loss 0 -315 -3 827 1 -1 +4 90 safe 261 99 0 219 loss 0 -99 -3 827 0 -1 +4 91 risky 847 98 0 184 loss 0 0 -2 938 1 -1 +4 92 risky 1344 57 133 0 gain 0 133 -2 938 1 1 +4 93 risky 409 0 101 42 mixed 0 -42 -2 938 1 0 +4 94 risky 425 82 156 0 gain 0 0 0 1091 1 1 +4 95 risky 499 0 222 441 mixed 0 222 0 1091 1 0 +4 96 safe 951 76 147 0 gain 0 76 0 1091 0 1 +4 97 risky 82 0 300 92 mixed 0 300 1 1894 1 0 +4 98 risky 460 58 149 0 gain 0 0 1 1894 1 1 +4 99 safe 91 0 104 197 mixed 0 0 1 1894 0 0 +4 100 risky 876 56 165 0 gain 0 0 -1 1129 1 1 +4 101 risky 1097 43 168 0 gain 0 168 -1 1129 1 1 +4 102 safe 860 58 0 186 loss 0 -58 1 1686 0 -1 +4 103 safe 1226 81 0 286 loss 0 -81 1 1686 0 -1 +4 104 risky 1043 0 63 41 mixed 0 -41 0 1775 1 0 +4 105 risky 543 0 218 45 mixed 0 218 0 1775 1 0 +4 106 risky 414 0 217 65 mixed 0 -65 0 1205 1 0 +4 107 risky 408 42 67 0 gain 0 0 0 1205 1 1 +4 108 safe 412 63 0 171 loss 0 -63 0 1205 0 -1 +4 109 risky 428 0 163 191 mixed 0 163 -1 742 1 0 +4 110 risky 528 0 302 160 mixed 0 302 -1 742 1 0 +4 111 risky 368 103 197 0 gain 0 197 -1 742 1 1 +4 112 safe 565 117 0 601 loss 0 -117 3 912 0 -1 +4 113 risky 435 0 163 66 mixed 0 -66 3 912 1 0 +4 114 risky 1003 0 299 59 mixed 0 -59 -1 695 1 0 +4 115 risky 595 99 423 0 gain 0 423 -1 695 1 1 +4 116 risky 324 0 221 88 mixed 0 221 1 1538 1 0 +4 117 risky 1551 123 0 264 loss 0 0 1 1538 1 -1 +4 118 safe 1445 43 0 98 loss 0 -43 2 661 0 -1 +4 119 risky 364 102 226 0 gain 0 226 2 661 1 1 +4 120 risky 1307 80 0 134 loss 0 -134 1 1858 1 -1 +4 121 risky 394 0 302 447 mixed 0 -447 1 1858 1 0 +4 122 risky 519 0 299 196 mixed 0 299 -2 1036 1 0 +4 123 risky 1050 0 57 26 mixed 0 57 -2 1036 1 0 +4 124 safe 624 80 0 221 loss 0 -80 -2 1036 0 -1 +4 125 risky 720 118 220 0 gain 0 0 0 725 1 1 +4 126 risky 449 118 340 0 gain 0 340 0 725 1 1 +4 127 safe 669 0 96 121 mixed 0 0 0 725 0 0 +4 128 safe 337 37 0 116 loss 0 -37 0 490 0 -1 +4 129 risky 1343 0 60 86 mixed 0 60 0 490 1 0 +4 130 safe 484 80 0 397 loss 0 -80 0 533 0 -1 +4 131 risky 335 0 217 224 mixed 0 -224 0 533 1 0 +4 132 safe 368 56 0 302 loss 0 -56 0 327 0 -1 +4 133 safe 331 77 0 160 loss 0 -77 0 327 0 -1 +4 134 risky 24 43 84 0 gain 0 84 0 327 1 1 +4 135 safe 389 97 0 422 loss 0 -97 0 327 0 -1 +4 136 safe 781 60 0 131 loss 0 -60 0 327 0 -1 +4 137 safe 250 121 0 220 loss 0 -121 0 327 0 -1 +4 138 risky 353 58 122 0 gain 0 0 0 480 1 1 +4 139 risky 447 99 0 170 loss 0 -170 0 480 1 -1 +4 140 risky 266 123 198 0 gain 0 0 0 480 1 1 +4 141 safe 643 38 0 145 loss 0 -38 0 527 0 -1 +4 142 risky 321 79 289 0 gain 0 0 0 527 1 1 +4 143 risky 370 40 75 0 gain 0 0 0 527 1 1 +4 144 risky 347 0 62 53 mixed 0 62 -2 2527 1 0 +4 145 risky 468 103 313 0 gain 0 0 -2 2527 1 1 +4 146 risky 743 0 162 323 mixed 0 -323 0 1309 1 0 +4 147 risky 2941 40 141 0 gain 0 141 0 1309 1 1 +4 148 risky 1030 0 103 27 mixed 0 103 0 446 1 0 +4 149 safe 611 99 0 503 loss 0 -99 0 446 0 -1 +4 150 safe 1960 122 0 334 loss 0 -122 0 446 0 -1 +5 1 risky 1413 103 0 501 loss 0 0 0 2372 1 -1 +5 2 safe 288 41 0 85 loss 0 -41 0 2372 0 -1 +5 3 safe 915 79 0 291 loss 0 -79 0 2372 0 -1 +5 4 risky 990 80 145 0 gain 0 145 0 1917 1 1 +5 5 risky 704 0 221 224 mixed 0 -224 0 1917 1 0 +5 6 risky 141 0 303 596 mixed 0 -596 -1 1218 1 0 +5 7 risky 434 0 157 44 mixed 0 -44 -1 1218 1 0 +5 8 safe 734 36 86 0 gain 0 36 -1 1218 0 1 +5 9 risky 750 84 0 141 loss 0 0 -1 1715 1 -1 +5 10 safe 844 63 169 0 gain 0 63 -1 1715 0 1 +5 11 risky 435 0 220 149 mixed 0 220 -1 1715 1 0 +5 12 risky 640 0 303 200 mixed 0 -200 1 1348 1 0 +5 13 risky 1055 0 161 31 mixed 0 161 1 1348 1 0 +5 14 risky 539 84 226 0 gain 0 0 1 1814 1 1 +5 15 risky 149 0 100 48 mixed 0 -48 1 1814 1 0 +5 16 safe 772 0 158 159 mixed 0 0 1 1814 0 0 +5 17 risky 890 123 0 238 loss 0 0 0 1531 1 -1 +5 18 risky 636 0 301 244 mixed 0 301 0 1531 1 0 +5 19 risky 1118 0 164 190 mixed 0 164 0 1531 1 0 +5 20 risky 220 0 104 41 mixed 0 104 2 1240 1 0 +5 21 safe 849 0 63 70 mixed 0 0 2 1240 0 0 +5 22 risky 885 60 0 171 loss 0 0 1 1120 1 -1 +5 23 risky 222 38 200 0 gain 0 0 1 1120 1 1 +5 24 risky 590 0 220 41 mixed 0 -41 1 1120 1 0 +5 25 risky 816 76 177 0 gain 0 0 0 2236 1 1 +5 26 safe 579 0 223 182 mixed 0 0 0 2236 0 0 +5 27 risky 656 84 287 0 gain 0 287 0 1454 1 1 +5 28 safe 864 97 0 361 loss 0 -97 0 1454 0 -1 +5 29 risky 970 37 0 76 loss 0 0 -1 1625 1 -1 +5 30 risky 1232 123 0 221 loss 0 0 -1 1625 1 -1 +5 31 safe 821 38 0 164 loss 0 -38 3 1293 0 -1 +5 32 safe 938 39 0 203 loss 0 -39 3 1293 0 -1 +5 33 safe 789 123 0 503 loss 0 -123 3 1293 0 -1 +5 34 risky 931 0 60 88 mixed 0 60 -1 1131 1 0 +5 35 risky 430 60 118 0 gain 0 0 -1 1131 1 1 +5 36 safe 1011 116 0 340 loss 0 -116 -1 1131 0 -1 +5 37 safe 895 79 0 173 loss 0 -79 1 1222 0 -1 +5 38 risky 1029 101 0 314 loss 0 -314 1 1222 1 -1 +5 39 risky 790 0 224 110 mixed 0 224 1 1222 1 0 +5 40 risky 1217 60 0 120 loss 0 -120 -1 1166 1 -1 +5 41 risky 676 0 218 261 mixed 0 218 -1 1166 1 0 +5 42 risky 725 43 71 0 gain 0 71 -1 1166 1 1 +5 43 safe 839 98 0 284 loss 0 -98 2 1245 0 -1 +5 44 risky 1430 58 0 187 loss 0 0 2 1245 1 -1 +5 45 risky 786 98 423 0 gain 0 0 2 1245 1 1 +5 46 safe 997 0 156 130 mixed 0 0 1 2042 0 0 +5 47 risky 601 43 165 0 gain 0 0 1 2042 1 1 +5 48 risky 18 59 215 0 gain 0 215 1 2042 1 1 +5 49 risky 1843 120 0 301 loss 0 0 3 2216 1 -1 +5 50 safe 976 0 63 42 mixed 0 0 3 2216 0 0 +5 51 risky 742 42 116 0 gain 0 0 0 1216 1 1 +5 52 safe 1086 39 0 112 loss 0 -39 0 1216 0 -1 +5 53 risky 942 43 0 64 loss 0 -64 0 1216 1 -1 +5 54 risky 1535 77 160 0 gain 0 160 -1 573 1 1 +5 55 risky 1100 37 128 0 gain 0 128 -1 573 1 1 +5 56 risky 1199 121 265 0 gain 0 265 3 1015 1 1 +5 57 risky 800 123 205 0 gain 0 0 3 1015 1 1 +5 58 safe 1077 0 162 322 mixed 0 0 1 1827 0 0 +5 59 risky 705 116 335 0 gain 0 335 1 1827 1 1 +5 60 risky 817 58 252 0 gain 0 252 1 1827 1 1 +5 61 safe 1068 0 98 62 mixed 0 0 3 1060 0 0 +5 62 safe 1034 117 0 428 loss 0 -117 3 1060 0 -1 +5 63 risky 1105 0 103 151 mixed 0 -151 3 1060 1 0 +5 64 risky 1023 124 293 0 gain 0 293 -2 707 1 1 +5 65 safe 857 0 60 61 mixed 0 0 -2 707 0 0 +5 66 safe 1198 0 300 298 mixed 0 0 0 1657 0 0 +5 67 risky 917 100 497 0 gain 0 497 0 1657 1 1 +5 68 safe 839 63 0 301 loss 0 -63 0 1657 0 -1 +5 69 safe 1020 119 0 379 loss 0 -119 0 662 0 -1 +5 70 risky 830 78 335 0 gain 0 335 0 662 1 1 +5 71 risky 1145 0 164 83 mixed 0 164 1 1074 1 0 +5 72 risky 946 0 102 23 mixed 0 -23 1 1074 1 0 +5 73 risky 1198 60 0 129 loss 0 0 0 947 1 -1 +5 74 risky 189 0 301 156 mixed 0 -156 0 947 1 0 +5 75 risky 889 0 63 28 mixed 0 63 0 947 1 0 +5 76 risky 607 83 138 0 gain 0 138 1 1973 1 1 +5 77 safe 1003 99 0 222 loss 0 -99 1 1973 0 -1 +5 78 risky 875 80 400 0 gain 0 0 1 1973 1 1 +5 79 risky 1292 100 0 179 loss 0 0 1 2038 1 -1 +5 80 safe 1613 80 0 164 loss 0 -80 1 2038 0 -1 +5 81 risky 623 0 218 66 mixed 0 218 1 2038 1 0 +5 82 safe 1006 57 0 110 loss 0 -57 1 543 0 -1 +5 83 risky 849 116 428 0 gain 0 0 1 543 1 1 +5 84 safe 1007 43 0 130 loss 0 -43 1 2127 0 -1 +5 85 risky 1154 39 0 96 loss 0 0 1 2127 1 -1 +5 86 risky 954 61 190 0 gain 0 0 1 2127 1 1 +5 87 safe 832 96 0 244 loss 0 -96 1 720 0 -1 +5 88 risky 688 59 148 0 gain 0 0 1 720 1 1 +5 89 risky 835 44 143 0 gain 0 0 1 720 1 1 +5 90 safe 686 0 57 118 mixed 0 0 -1 839 0 0 +5 91 risky 600 96 170 0 gain 0 0 -1 839 1 1 +5 92 risky 717 0 62 19 mixed 0 -19 0 1046 1 0 +5 93 safe 885 0 158 240 mixed 0 0 0 1046 0 0 +5 94 risky 840 40 68 0 gain 0 68 0 1046 1 1 +5 95 risky 905 120 0 205 loss 0 0 1 1086 1 -1 +5 96 risky 766 104 222 0 gain 0 0 1 1086 1 1 +5 97 safe 763 60 0 249 loss 0 -60 1 1344 0 -1 +5 98 safe 939 0 217 443 mixed 0 0 1 1344 0 0 +5 99 risky 815 0 56 9 mixed 0 56 1 1344 1 0 +5 100 safe 631 76 0 403 loss 0 -76 1 1411 0 -1 +5 101 risky 756 0 102 199 mixed 0 102 1 1411 1 0 +5 102 risky 1026 0 222 331 mixed 0 -331 1 1411 1 0 +5 103 safe 1070 104 0 419 loss 0 -104 -2 1054 0 -1 +5 104 risky 811 62 132 0 gain 0 132 -2 1054 1 1 +5 105 risky 1091 64 303 0 gain 0 303 2 1088 1 1 +5 106 risky 667 0 298 116 mixed 0 298 2 1088 1 0 +5 107 safe 880 80 0 194 loss 0 -80 1 587 0 -1 +5 108 safe 1294 0 97 81 mixed 0 0 1 587 0 0 +5 109 risky 689 0 100 31 mixed 0 -31 -1 808 1 0 +5 110 risky 820 57 104 0 gain 0 0 -1 808 1 1 +5 111 safe 639 0 61 46 mixed 0 0 -2 961 0 0 +5 112 risky 783 118 377 0 gain 0 0 -2 961 1 1 +5 113 risky 816 101 358 0 gain 0 358 -1 862 1 1 +5 114 risky 747 84 0 136 loss 0 -136 -1 862 1 -1 +5 115 safe 954 76 0 253 loss 0 -76 -1 862 0 -1 +5 116 risky 1464 122 242 0 gain 0 0 -3 1262 1 1 +5 117 risky 845 82 194 0 gain 0 0 -3 1262 1 1 +5 118 safe 907 58 0 214 loss 0 -58 -3 1262 0 -1 +5 119 safe 973 96 0 164 loss 0 -96 -4 1169 0 -1 +5 120 risky 801 122 503 0 gain 0 0 -4 1169 1 1 +5 121 risky 839 0 157 64 mixed 0 157 -4 1169 1 0 +5 122 safe 1103 36 0 141 loss 0 -36 0 1215 0 -1 +5 123 risky 875 0 100 119 mixed 0 -119 0 1215 1 0 +5 124 risky 873 41 98 0 gain 0 0 -1 796 1 1 +5 125 risky 803 96 283 0 gain 0 0 -1 796 1 1 +5 126 risky 734 103 250 0 gain 0 0 -1 796 1 1 +5 127 safe 994 83 0 221 loss 0 -83 -1 576 0 -1 +5 128 risky 574 80 251 0 gain 0 0 -1 576 1 1 +5 129 risky 1024 40 0 79 loss 0 0 -2 929 1 -1 +5 130 risky 582 97 314 0 gain 0 314 -2 929 1 1 +5 131 risky 594 61 107 0 gain 0 107 -2 929 1 1 +5 132 risky 688 100 198 0 gain 0 0 2 1097 1 1 +5 133 safe 971 0 99 99 mixed 0 0 2 1097 0 0 +5 134 risky 726 0 301 90 mixed 0 301 0 734 1 0 +5 135 risky 1385 0 163 109 mixed 0 -109 0 734 1 0 +5 136 risky 753 122 0 269 loss 1 0 -1 970 1 -1 +5 137 risky 844 60 0 98 loss 0 -98 -1 970 1 -1 +5 138 risky 832 103 184 0 gain 0 0 -1 731 1 1 +5 139 safe 737 97 0 201 loss 0 -97 -1 731 0 -1 +5 140 risky 770 0 304 60 mixed 0 -60 -2 890 1 0 +5 141 safe 832 64 0 151 loss 0 -64 -2 890 0 -1 +5 142 safe 740 77 0 334 loss 0 -77 -3 1066 0 -1 +5 143 risky 696 0 61 24 mixed 0 61 -3 1066 1 0 +5 144 risky 818 0 219 84 mixed 0 219 -1 680 1 0 +5 145 safe 707 120 0 600 loss 0 -120 -1 680 0 -1 +5 146 risky 910 0 303 359 mixed 0 303 -1 489 1 0 +5 147 risky 651 122 598 0 gain 0 598 -1 489 1 1 +5 148 safe 967 0 301 454 mixed 0 0 -1 489 0 0 +5 149 risky 826 119 216 0 gain 0 216 2 808 1 1 +5 150 risky 876 41 80 0 gain 0 80 2 808 1 1 +6 1 risky 8683 0 63 10 mixed 0 -10 0 38019 1 0 +6 2 risky 6548 119 236 0 gain 0 0 0 38019 1 1 +6 3 risky 11388 99 312 0 gain 0 312 -1 2893 1 1 +6 4 risky 4920 0 302 56 mixed 0 -56 -1 2893 1 0 +6 5 risky 5889 0 221 48 mixed 0 -48 -1 1767 1 0 +6 6 safe 1711 0 299 356 mixed 0 0 -1 1767 0 0 +6 7 safe 2687 63 120 0 gain 0 63 0 3312 0 1 +6 8 risky 2482 77 340 0 gain 0 0 0 3312 1 1 +6 9 risky 1313 79 222 0 gain 0 222 -1 4237 1 1 +6 10 risky 3219 43 0 70 loss 0 -70 -1 4237 1 -1 +6 11 risky 840 101 416 0 gain 0 0 -1 2893 1 1 +6 12 risky 2690 0 304 158 mixed 0 304 -1 2893 1 0 +6 13 risky 5480 57 170 0 gain 0 170 1 3609 1 1 +6 14 risky 6195 117 506 0 gain 0 0 1 3609 1 1 +6 15 safe 1799 44 0 204 loss 0 -44 1 3609 0 -1 +6 16 risky 3339 101 0 182 loss 0 -182 0 1761 1 -1 +6 17 safe 1679 37 0 108 loss 0 -37 0 1761 0 -1 +6 18 safe 2923 121 0 378 loss 0 -121 0 1761 0 -1 +6 19 safe 3155 0 64 76 mixed 0 0 -2 2754 0 0 +6 20 safe 4114 58 0 192 loss 0 -58 -2 2754 0 -1 +6 21 risky 3842 59 191 0 gain 0 191 -2 2754 1 1 +6 22 risky 1839 83 396 0 gain 0 396 0 2197 1 1 +6 23 risky 3984 0 101 84 mixed 0 101 0 2197 1 0 +6 24 safe 1262 57 0 254 loss 0 -57 0 2197 0 -1 +6 25 safe 2746 42 0 147 loss 0 -42 0 2171 0 -1 +6 26 risky 2941 0 220 111 mixed 0 -111 0 2171 1 0 +6 27 risky 3376 37 130 0 gain 0 130 0 2171 1 1 +6 28 risky 1224 116 602 0 gain 0 0 1 1356 1 1 +6 29 risky 2118 56 212 0 gain 0 212 1 1356 1 1 +6 30 risky 3519 100 0 165 loss 0 0 1 1356 1 -1 +6 31 safe 1538 97 0 501 loss 0 -97 2 959 0 -1 +6 32 risky 1818 119 378 0 gain 0 0 2 959 1 1 +6 33 safe 2836 98 0 359 loss 0 -98 -1 1881 0 -1 +6 34 risky 6887 83 0 179 loss 0 -179 -1 1881 1 -1 +6 35 risky 972 36 114 0 gain 0 0 -1 1176 1 1 +6 36 risky 4252 0 97 67 mixed 0 97 -1 1176 1 0 +6 37 safe 2055 0 156 164 mixed 0 0 0 1240 0 0 +6 38 risky 1016 0 158 35 mixed 0 158 0 1240 1 0 +6 39 safe 1023 58 0 217 loss 0 -58 0 1240 0 -1 +6 40 risky 3274 44 0 71 loss 0 -71 0 3700 1 -1 +6 41 risky 1661 102 276 0 gain 1 276 0 3700 1 1 +6 42 risky 2404 0 304 247 mixed 0 304 1 884 1 0 +6 43 safe 5414 38 66 0 gain 0 38 1 884 0 1 +6 44 risky 4757 119 263 0 gain 0 263 1 884 1 1 +6 45 risky 2906 100 0 223 loss 0 0 2 5593 1 -1 +6 46 risky 2634 0 104 44 mixed 0 104 2 5593 1 0 +6 47 risky 900 43 142 0 gain 0 0 2 5593 1 1 +6 48 risky 804 0 296 94 mixed 0 296 1 1667 1 0 +6 49 risky 7162 0 224 264 mixed 0 224 1 1667 1 0 +6 50 safe 1344 80 0 252 loss 0 -80 2 1556 0 -1 +6 51 risky 2165 103 356 0 gain 0 0 2 1556 1 1 +6 52 risky 2000 44 197 0 gain 0 0 -1 836 1 1 +6 53 safe 2276 101 198 0 gain 0 101 -1 836 0 1 +6 54 risky 4122 38 103 0 gain 0 0 -1 836 1 1 +6 55 risky 3216 117 0 215 loss 0 0 0 2517 1 -1 +6 56 risky 2745 42 88 0 gain 0 0 0 2517 1 1 +6 57 risky 4050 80 0 156 loss 0 0 0 2517 1 -1 +6 58 safe 4801 98 165 0 gain 0 98 1 1145 0 1 +6 59 risky 4523 104 502 0 gain 0 502 1 1145 1 1 +6 60 risky 2626 0 100 23 mixed 0 100 1 1145 1 0 +6 61 safe 2020 0 59 89 mixed 0 0 2 927 0 0 +6 62 safe 1931 0 96 117 mixed 0 0 2 927 0 0 +6 63 safe 1416 77 0 334 loss 0 -77 1 1456 0 -1 +6 64 safe 1401 97 0 282 loss 0 -97 1 1456 0 -1 +6 65 risky 3129 102 0 198 loss 0 -198 1 1456 1 -1 +6 66 safe 1867 122 0 505 loss 0 -122 0 3611 0 -1 +6 67 risky 3273 0 104 98 mixed 0 104 0 3611 1 0 +6 68 risky 2041 99 218 0 gain 0 218 2 2511 1 1 +6 69 risky 1201 0 58 29 mixed 0 -29 2 2511 1 0 +6 70 safe 5182 63 0 134 loss 0 -63 1 1679 0 -1 +6 71 safe 3177 124 218 0 gain 0 124 1 1679 0 1 +6 72 safe 584 77 0 287 loss 0 -77 1 1261 0 -1 +6 73 risky 2418 82 291 0 gain 0 0 1 1261 1 1 +6 74 risky 3964 44 0 102 loss 0 0 0 1136 1 -1 +6 75 risky 3794 82 0 148 loss 0 0 0 1136 1 -1 +6 76 safe 2515 36 0 89 loss 0 -36 2 1383 0 -1 +6 77 risky 265 117 435 0 gain 0 0 2 1383 1 1 +6 78 risky 6888 57 0 164 loss 0 -164 2 1383 1 -1 +6 79 safe 2396 0 57 63 mixed 0 0 -1 1026 0 0 +6 80 risky 1278 0 297 121 mixed 0 -121 -1 1026 1 0 +6 81 risky 5505 0 296 297 mixed 0 -297 -1 1026 1 0 +6 82 risky 3784 0 61 36 mixed 0 61 0 1676 1 0 +6 83 risky 1794 77 0 136 loss 0 0 0 1676 1 -1 +6 84 risky 2456 119 0 264 loss 0 -264 1 3992 1 -1 +6 85 safe 4586 101 0 313 loss 0 -101 1 3992 0 -1 +6 86 risky 1296 96 244 0 gain 0 0 1 3992 1 1 +6 87 safe 9041 0 98 197 mixed 0 0 1 5878 0 0 +6 88 safe 1707 103 0 252 loss 0 -103 1 5878 0 -1 +6 89 safe 1719 0 99 149 mixed 0 0 1 5878 0 0 +6 90 safe 5513 36 69 0 gain 0 36 -1 1086 0 1 +6 91 safe 2391 61 105 0 gain 0 61 -1 1086 0 1 +6 92 safe 857 64 0 304 loss 0 -64 -1 1086 0 -1 +6 93 safe 5282 118 0 299 loss 0 -118 0 4927 0 -1 +6 94 risky 6335 79 181 0 gain 0 0 0 4927 1 1 +6 95 risky 2827 123 0 198 loss 0 0 -2 1551 1 -1 +6 96 risky 1180 0 159 86 mixed 0 -86 -2 1551 1 0 +6 97 risky 1852 0 56 46 mixed 0 -46 0 7386 1 0 +6 98 risky 2467 0 220 224 mixed 0 220 0 7386 1 0 +6 99 safe 3255 99 178 0 gain 0 99 2 941 0 1 +6 100 safe 1427 59 0 102 loss 0 -59 2 941 0 -1 +6 101 safe 3837 83 0 197 loss 0 -83 2 941 0 -1 +6 102 safe 1837 0 217 436 mixed 0 0 -1 843 0 0 +6 103 risky 3683 0 220 70 mixed 0 220 -1 843 1 0 +6 104 safe 747 61 104 0 gain 0 61 1 1050 0 1 +6 105 safe 2730 0 160 190 mixed 0 0 1 1050 0 0 +6 106 safe 1027 83 144 0 gain 0 83 -1 1258 0 1 +6 107 safe 1506 0 300 603 mixed 0 0 -1 1258 0 0 +6 108 safe 2397 84 161 0 gain 0 84 0 704 0 1 +6 109 safe 852 120 0 432 loss 0 -120 0 704 0 -1 +6 110 risky 1251 64 301 0 gain 0 301 -1 1932 1 1 +6 111 risky 1520 43 166 0 gain 0 166 -1 1932 1 1 +6 112 risky 5107 39 0 77 loss 0 -77 -1 1932 1 -1 +6 113 safe 1632 123 0 597 loss 0 -123 -1 1352 0 -1 +6 114 risky 3292 81 196 0 gain 0 0 -1 1352 1 1 +6 115 safe 789 0 60 119 mixed 0 0 -1 1352 0 0 +6 116 risky 164 0 297 198 mixed 0 297 -2 1108 1 0 +6 117 risky 1374 0 64 19 mixed 0 64 -2 1108 1 0 +6 118 safe 4612 119 0 334 loss 0 -119 -2 1108 0 -1 +6 119 risky 441 0 158 134 mixed 0 158 -1 5816 1 0 +6 120 risky 224 0 56 23 mixed 0 56 -1 5816 1 0 +6 121 safe 6709 62 0 151 loss 0 -62 1 992 0 -1 +6 122 risky 1360 0 161 45 mixed 0 -45 1 992 1 0 +6 123 risky 223 0 156 68 mixed 0 -68 1 992 1 0 +6 124 safe 2146 40 0 165 loss 0 -40 -1 928 0 -1 +6 125 risky 4720 0 219 146 mixed 0 -146 -1 928 1 0 +6 126 risky 1186 0 161 102 mixed 0 161 -1 820 1 0 +6 127 safe 1117 123 201 0 gain 0 123 -1 820 0 1 +6 128 safe 3741 101 0 422 loss 0 -101 -1 820 0 -1 +6 129 risky 3362 59 0 116 loss 0 -116 -1 1713 1 -1 +6 130 risky 3963 0 216 327 mixed 0 -327 -1 1713 1 0 +6 131 risky 2562 57 146 0 gain 0 0 -1 1713 1 1 +6 132 risky 54 58 250 0 gain 0 250 -1 3406 1 1 +6 133 risky 2451 40 83 0 gain 0 0 -1 3406 1 1 +6 134 risky 56 116 340 0 gain 0 0 -1 3406 1 1 +6 135 risky 3118 124 0 244 loss 0 -244 -1 994 1 -1 +6 136 safe 1001 0 300 453 mixed 0 0 -1 994 0 0 +6 137 safe 558 77 0 225 loss 0 -77 0 548 0 -1 +6 138 risky 2346 0 100 51 mixed 0 -51 0 548 1 0 +6 139 safe 5850 0 158 242 mixed 0 0 0 548 0 0 +6 140 risky 1415 0 222 85 mixed 0 222 -1 1771 1 0 +6 141 safe 2947 36 0 129 loss 0 -36 -1 1771 0 -1 +6 142 risky 3290 119 299 0 gain 0 299 -1 1771 1 1 +6 143 safe 6930 78 130 0 gain 0 78 1 1182 0 1 +6 144 risky 5721 59 0 110 loss 0 -110 1 1182 1 -1 +6 145 risky 641 0 221 177 mixed 0 221 1 1182 1 0 +6 146 safe 1530 0 161 323 mixed 0 0 1 1031 0 0 +6 147 risky 336 0 99 30 mixed 0 -30 1 1031 1 0 +6 148 risky 190 61 133 0 gain 0 0 1 1031 1 1 +6 149 risky 26 76 255 0 gain 0 255 -2 823 1 1 +6 150 safe 1139 80 0 396 loss 0 -80 -2 823 0 -1 diff --git a/R/inst/extdata/ts_exampleData.txt b/R/inst/extdata/ts_exampleData.txt new file mode 100644 index 00000000..648f94b9 --- /dev/null +++ b/R/inst/extdata/ts_exampleData.txt @@ -0,0 +1,2191 @@ +subjID trial level1_choice level2_choice reward A1prob A2prob B1prob B2prob +1 2 1 4 1 0.73174 0.44094 0.28525 0.42124 +1 3 1 1 1 0.72582 0.3864 0.30663 0.39319 +1 4 2 1 1 0.7296 0.41459 0.30549 0.34948 +1 5 1 3 0 0.77339 0.40618 0.31232 0.3926 +1 6 1 1 1 0.75457 0.45989 0.30146 0.39908 +1 7 1 1 1 0.799 0.47671 0.30695 0.4193 +1 8 1 3 1 0.8 0.4705 0.28921 0.43012 +1 9 1 4 1 0.8 0.4414 0.32746 0.40748 +1 10 2 4 0 0.79121 0.44951 0.34192 0.4238 +1 11 2 1 0 0.8 0.45063 0.30527 0.41502 +1 12 1 3 0 0.8 0.46023 0.30255 0.43582 +1 13 1 2 0 0.7713 0.45539 0.3145 0.41748 +1 14 2 1 1 0.77967 0.46743 0.33255 0.41147 +1 15 2 4 1 0.8 0.44997 0.33142 0.43247 +1 16 1 1 1 0.8 0.46545 0.38953 0.40187 +1 17 2 4 0 0.78989 0.43383 0.44462 0.39286 +1 18 1 1 1 0.8 0.45304 0.45707 0.41177 +1 19 1 1 1 0.8 0.46451 0.4644 0.35639 +1 20 1 1 1 0.8 0.46125 0.49334 0.33543 +1 21 1 1 0 0.8 0.49285 0.47484 0.36058 +1 22 1 4 0 0.8 0.49623 0.48841 0.34768 +1 23 2 3 1 0.77469 0.54065 0.50539 0.32396 +1 24 2 2 1 0.77481 0.58668 0.50524 0.32207 +1 25 2 3 1 0.78178 0.62035 0.46226 0.32988 +1 26 2 3 1 0.7996 0.59698 0.5076 0.37398 +1 27 2 3 0 0.8 0.61101 0.51855 0.37097 +1 28 2 4 0 0.8 0.57941 0.49362 0.33811 +1 29 1 1 1 0.75907 0.58061 0.49262 0.34061 +1 30 1 1 1 0.78157 0.60034 0.47932 0.32465 +1 31 1 4 1 0.73941 0.57595 0.41336 0.31351 +1 32 1 1 1 0.78407 0.57293 0.40238 0.31508 +1 33 1 1 1 0.7673 0.55497 0.44794 0.32404 +1 34 1 1 1 0.74815 0.57301 0.45619 0.30755 +1 35 1 1 1 0.76077 0.55076 0.45351 0.23356 +1 36 1 1 0 0.78983 0.53785 0.45 0.2218 +1 37 1 4 0 0.79931 0.53644 0.43941 0.25251 +1 38 1 2 0 0.78409 0.52744 0.44277 0.25328 +1 39 2 3 0 0.79235 0.54545 0.42458 0.28172 +1 40 2 4 1 0.7884 0.53537 0.40774 0.30555 +1 41 2 4 0 0.8 0.5217 0.44137 0.30486 +1 42 1 3 0 0.7987 0.53313 0.44258 0.29581 +1 43 1 1 0 0.75319 0.5575 0.46962 0.29889 +1 44 1 2 0 0.75826 0.57211 0.49623 0.34481 +1 45 2 4 0 0.8 0.59358 0.50784 0.33974 +1 46 1 2 1 0.8 0.58261 0.49178 0.31495 +1 47 1 2 0 0.76387 0.51143 0.50769 0.34591 +1 48 1 3 1 0.7373 0.55849 0.4958 0.34391 +1 49 2 3 1 0.71163 0.55437 0.50188 0.37737 +1 50 2 3 1 0.7274 0.55684 0.49608 0.42051 +1 51 2 3 1 0.74133 0.51026 0.50806 0.39224 +1 52 2 3 0 0.78899 0.52159 0.53676 0.39005 +1 53 2 4 0 0.8 0.5142 0.57107 0.33701 +1 54 1 3 1 0.8 0.55215 0.56694 0.31545 +1 55 1 2 1 0.8 0.53609 0.53305 0.30683 +1 56 1 1 1 0.8 0.51736 0.51624 0.29661 +1 57 1 1 1 0.8 0.55649 0.57046 0.30073 +1 58 1 4 0 0.77863 0.54926 0.57542 0.31415 +1 59 1 3 0 0.78765 0.57095 0.5805 0.28316 +1 60 1 1 0 0.7736 0.54228 0.58221 0.23798 +1 61 1 2 0 0.8 0.55273 0.52453 0.2241 +1 62 2 2 1 0.77377 0.54429 0.52093 0.24853 +1 63 1 2 1 0.8 0.53118 0.48452 0.22815 +1 64 1 3 0 0.8 0.5621 0.5142 0.24439 +1 65 1 3 0 0.8 0.58121 0.52545 0.24843 +1 66 1 2 1 0.8 0.59505 0.53803 0.23704 +1 67 2 4 0 0.8 0.61952 0.54213 0.20897 +1 68 1 2 1 0.8 0.5983 0.5531 0.24432 +1 69 1 2 1 0.78218 0.65305 0.57632 0.26855 +1 70 1 2 1 0.74435 0.68187 0.58155 0.30696 +1 71 1 2 1 0.75476 0.68078 0.57166 0.31697 +1 72 1 2 0 0.7518 0.67198 0.59557 0.30499 +1 73 1 1 1 0.77418 0.6968 0.58319 0.32965 +1 74 1 1 1 0.74976 0.71575 0.64715 0.2999 +1 75 1 4 1 0.76123 0.70332 0.63275 0.30766 +1 76 2 4 0 0.75946 0.70432 0.61657 0.30659 +1 77 2 1 1 0.8 0.69223 0.64135 0.32633 +1 78 1 1 1 0.8 0.67848 0.62949 0.29921 +1 79 1 1 0 0.76968 0.66689 0.64594 0.31559 +1 80 1 4 0 0.767 0.66963 0.62129 0.32788 +1 81 2 3 1 0.75012 0.63656 0.60248 0.34237 +1 82 2 3 1 0.7351 0.68337 0.63189 0.30771 +1 83 2 3 1 0.74526 0.67142 0.6594 0.30594 +1 84 2 1 1 0.76226 0.68819 0.6318 0.27628 +1 85 1 1 1 0.7758 0.73023 0.58491 0.29002 +1 86 1 1 1 0.77074 0.74821 0.58291 0.28925 +1 87 1 1 1 0.77089 0.79434 0.57504 0.32894 +1 88 1 1 1 0.74567 0.8 0.55285 0.30923 +1 89 1 1 1 0.7727 0.8 0.59163 0.31176 +1 90 1 1 1 0.79157 0.8 0.5741 0.33049 +1 91 1 1 1 0.8 0.8 0.56745 0.33548 +1 92 1 3 0 0.8 0.77512 0.59173 0.36604 +1 93 1 1 0 0.77964 0.77689 0.65552 0.29529 +1 94 1 4 0 0.72323 0.77346 0.68053 0.28964 +1 95 1 4 0 0.7587 0.79182 0.68303 0.28661 +1 96 1 2 1 0.76904 0.78153 0.69918 0.25219 +1 97 1 2 1 0.77612 0.8 0.7122 0.27558 +1 98 1 4 1 0.79077 0.79734 0.71788 0.28339 +1 99 2 4 1 0.76885 0.778 0.73227 0.29194 +1 100 2 2 0 0.72235 0.76099 0.72207 0.28469 +1 101 2 1 1 0.75343 0.75863 0.68128 0.29834 +1 102 1 1 1 0.77836 0.75896 0.6992 0.29074 +1 103 1 1 0 0.76782 0.74809 0.67502 0.27929 +1 104 2 4 0 0.76299 0.79317 0.66158 0.31297 +1 105 2 3 1 0.76924 0.8 0.64813 0.30434 +1 106 2 1 0 0.79236 0.76987 0.63234 0.29248 +1 107 2 3 0 0.76225 0.74234 0.62737 0.34844 +1 108 1 2 1 0.75963 0.71965 0.63631 0.31392 +1 109 1 2 0 0.78157 0.65906 0.63594 0.29344 +1 110 1 4 0 0.8 0.6691 0.63189 0.33999 +1 111 2 3 1 0.76426 0.64471 0.60207 0.27577 +1 112 2 2 1 0.74667 0.66462 0.62046 0.26335 +1 113 1 3 1 0.78458 0.63884 0.64195 0.27218 +1 114 2 3 0 0.79243 0.63824 0.63688 0.27592 +1 115 1 2 1 0.79322 0.65028 0.62034 0.25584 +1 116 1 4 1 0.79914 0.66745 0.60886 0.25548 +1 117 2 3 1 0.79739 0.61932 0.61802 0.28086 +1 118 2 4 0 0.79022 0.61075 0.61969 0.26407 +1 119 2 3 1 0.8 0.62074 0.62673 0.27659 +1 120 2 1 1 0.8 0.62032 0.57944 0.28841 +1 121 1 4 1 0.79253 0.61165 0.555 0.26186 +1 122 2 4 0 0.8 0.62946 0.54182 0.25526 +1 123 1 3 1 0.79597 0.60834 0.5357 0.2 +1 124 2 3 0 0.78078 0.60309 0.55323 0.22367 +1 125 1 1 1 0.78059 0.59006 0.5389 0.20545 +1 126 1 1 1 0.7415 0.5477 0.53843 0.2 +1 127 1 1 1 0.72498 0.55081 0.54774 0.2 +1 128 1 3 0 0.7273 0.53482 0.54397 0.23411 +1 129 1 1 1 0.6983 0.53396 0.57112 0.26527 +1 130 1 3 0 0.67184 0.55217 0.54923 0.26093 +1 131 1 1 1 0.64299 0.4833 0.56131 0.27607 +1 132 1 4 0 0.64678 0.48409 0.55659 0.26744 +1 133 1 1 1 0.66958 0.48672 0.55672 0.28704 +1 134 1 4 0 0.71353 0.43812 0.54296 0.26765 +1 135 1 1 1 0.72913 0.45831 0.55595 0.26157 +1 136 1 3 1 0.71214 0.40894 0.57912 0.27759 +1 137 2 3 1 0.72246 0.3716 0.5666 0.25731 +1 138 2 3 0 0.70016 0.33562 0.53811 0.26686 +1 139 1 1 0 0.68348 0.29021 0.5032 0.2907 +1 141 1 2 1 0.70413 0.24533 0.53268 0.31855 +1 142 1 2 0 0.74585 0.23758 0.54789 0.32516 +1 143 2 3 0 0.75878 0.20683 0.54172 0.32643 +1 144 1 1 1 0.75508 0.2 0.54123 0.33066 +1 145 1 1 1 0.75405 0.2 0.50283 0.33762 +1 146 1 4 0 0.72616 0.21818 0.51489 0.34734 +1 147 1 1 1 0.72165 0.2146 0.52902 0.33863 +1 148 1 1 1 0.76338 0.22901 0.53995 0.32508 +1 149 1 3 0 0.8 0.24977 0.55147 0.34688 +1 150 1 4 1 0.8 0.22491 0.55515 0.38301 +1 151 2 4 0 0.76821 0.26234 0.54065 0.37305 +1 152 2 3 0 0.77307 0.22488 0.58349 0.37869 +1 153 1 1 1 0.77173 0.21431 0.53551 0.42413 +1 154 1 1 0 0.75927 0.20014 0.50704 0.42257 +1 155 1 3 0 0.75921 0.21264 0.50199 0.38167 +1 156 1 2 0 0.74445 0.22054 0.51196 0.33042 +1 157 2 1 0 0.72395 0.21222 0.48676 0.33988 +1 158 2 1 0 0.71999 0.2298 0.51039 0.3507 +1 159 2 4 1 0.72939 0.2308 0.54111 0.32357 +1 160 2 2 0 0.69386 0.21052 0.54663 0.27117 +1 161 2 4 1 0.69174 0.2 0.53472 0.28176 +1 162 2 4 0 0.71402 0.2 0.59491 0.26687 +1 163 2 3 0 0.71077 0.2 0.5787 0.29751 +1 164 1 4 0 0.70963 0.2 0.60455 0.28655 +1 165 1 1 1 0.73785 0.2 0.60482 0.27845 +1 166 1 1 1 0.75026 0.2 0.60278 0.29223 +1 167 1 1 1 0.78057 0.2 0.59516 0.29242 +1 168 1 1 1 0.7938 0.20923 0.53569 0.27625 +1 169 1 4 0 0.77124 0.25164 0.47943 0.29059 +1 170 1 3 0 0.77023 0.2788 0.50377 0.25799 +1 171 1 1 1 0.76646 0.27905 0.51914 0.26122 +1 172 1 1 1 0.74042 0.24415 0.5069 0.27107 +1 173 1 1 1 0.73021 0.27041 0.4785 0.26917 +1 174 1 4 0 0.71286 0.28303 0.4701 0.29255 +1 175 1 1 0 0.67608 0.30914 0.48553 0.27482 +1 176 1 2 0 0.72568 0.28528 0.46698 0.28983 +1 177 2 3 0 0.75068 0.32288 0.51553 0.32661 +1 178 1 3 1 0.68976 0.33437 0.57487 0.30929 +1 179 2 3 1 0.63552 0.32788 0.56683 0.28999 +1 180 2 3 0 0.65651 0.29706 0.64643 0.32216 +1 181 1 1 0 0.63992 0.28636 0.65593 0.30065 +1 182 1 1 1 0.63118 0.29203 0.61181 0.24868 +1 183 1 1 0 0.61433 0.30691 0.58943 0.26967 +1 184 1 1 1 0.64362 0.28234 0.59775 0.25273 +1 185 2 4 0 0.65589 0.2 0.63046 0.22552 +1 186 2 4 0 0.64753 0.21033 0.62343 0.23167 +1 187 2 3 1 0.6708 0.23303 0.58866 0.24963 +1 188 2 3 1 0.68793 0.2 0.59113 0.30878 +1 189 2 3 1 0.70132 0.2 0.57037 0.30299 +1 191 1 4 1 0.70615 0.23807 0.57935 0.30751 +1 192 2 4 0 0.69038 0.24958 0.56007 0.27807 +1 193 2 4 0 0.72402 0.24868 0.58419 0.29444 +1 194 1 1 1 0.74722 0.22597 0.57091 0.27845 +1 195 1 1 1 0.77007 0.25026 0.59727 0.26951 +1 196 1 3 0 0.75861 0.24017 0.58072 0.24954 +1 197 2 4 0 0.74568 0.2 0.58408 0.24979 +1 198 1 1 1 0.78681 0.21341 0.56264 0.20372 +1 199 1 1 1 0.7694 0.24506 0.54298 0.2 +1 200 1 1 1 0.8 0.22759 0.49432 0.2 +1 201 1 1 0 0.8 0.22705 0.48005 0.2179 +2 1 2 1 1 0.24366 0.21338 0.7897 0.36247 +2 3 2 1 0 0.24195 0.22465 0.7635 0.37649 +2 4 2 2 1 0.24137 0.22427 0.79877 0.3744 +2 5 2 2 0 0.24103 0.2 0.8 0.38687 +2 6 2 3 1 0.2 0.2 0.79295 0.35462 +2 7 2 3 1 0.21009 0.22935 0.79064 0.34995 +2 8 2 2 0 0.2 0.25825 0.79677 0.32497 +2 9 2 1 0 0.2 0.27439 0.77263 0.31415 +2 10 2 2 0 0.25693 0.28699 0.8 0.35165 +2 11 2 3 1 0.23686 0.27897 0.8 0.33176 +2 12 2 3 0 0.2 0.29644 0.78883 0.34925 +2 13 2 2 0 0.21085 0.29313 0.78698 0.38282 +2 14 2 4 0 0.20371 0.30914 0.78273 0.39991 +2 15 2 1 0 0.2 0.27436 0.79031 0.37668 +2 16 2 1 0 0.2 0.31162 0.783 0.38107 +2 17 2 4 1 0.2 0.33142 0.78508 0.39967 +2 18 2 3 1 0.20132 0.3441 0.79349 0.41119 +2 19 2 3 1 0.2 0.2921 0.7947 0.39435 +2 20 2 3 0 0.2 0.28001 0.8 0.38265 +2 21 2 3 0 0.23446 0.29161 0.7848 0.40374 +2 22 2 1 1 0.24324 0.30684 0.78655 0.36654 +2 23 1 3 1 0.25357 0.28896 0.8 0.36812 +2 24 1 2 0 0.247 0.31968 0.778 0.39979 +2 25 2 1 0 0.26191 0.29039 0.78188 0.42514 +2 26 2 4 0 0.24009 0.26705 0.77572 0.43339 +2 27 1 2 0 0.23637 0.27463 0.8 0.44448 +2 28 2 3 1 0.2 0.26527 0.79768 0.43536 +2 29 2 3 0 0.2 0.2249 0.8 0.45377 +2 30 2 1 0 0.27119 0.24548 0.77507 0.47467 +2 31 1 2 0 0.25741 0.25583 0.8 0.43019 +2 32 1 1 0 0.25833 0.25345 0.7833 0.45546 +2 33 1 2 1 0.29274 0.2548 0.75592 0.48444 +2 34 1 2 0 0.24411 0.2674 0.69707 0.50089 +2 35 2 1 0 0.25087 0.29031 0.69606 0.51711 +2 36 1 1 1 0.29422 0.24655 0.7281 0.55837 +2 37 2 2 0 0.28983 0.24619 0.73075 0.64885 +2 38 1 1 0 0.28961 0.22933 0.76907 0.64365 +2 39 2 3 1 0.32305 0.2115 0.72785 0.66863 +2 40 2 3 1 0.32795 0.21391 0.75703 0.68245 +2 41 2 3 1 0.33668 0.2 0.8 0.69042 +2 42 1 4 1 0.32341 0.2 0.7744 0.76419 +2 43 2 3 1 0.2924 0.2 0.77229 0.77877 +2 44 2 2 0 0.29488 0.21148 0.8 0.77328 +2 45 2 3 1 0.32204 0.25048 0.77766 0.8 +2 46 2 2 0 0.29959 0.27915 0.78361 0.8 +2 47 2 2 1 0.30354 0.35484 0.77031 0.77172 +2 48 2 4 1 0.32089 0.33943 0.76879 0.8 +2 49 2 4 1 0.31639 0.30386 0.71735 0.8 +2 50 2 4 0 0.32926 0.34595 0.68555 0.7724 +2 51 1 1 0 0.29947 0.30318 0.6959 0.78212 +2 52 2 1 0 0.29323 0.29421 0.69798 0.8 +2 53 2 4 1 0.31145 0.28711 0.67731 0.8 +2 54 2 4 1 0.35715 0.26453 0.66623 0.8 +2 55 2 4 1 0.34242 0.25018 0.65922 0.76883 +2 56 2 4 1 0.34459 0.25371 0.68819 0.76716 +2 57 2 2 0 0.39018 0.26396 0.63748 0.78614 +2 58 1 1 1 0.3358 0.23748 0.60919 0.8 +2 59 1 1 0 0.31958 0.21064 0.63817 0.8 +2 60 1 2 0 0.29338 0.2 0.68027 0.79001 +2 61 2 1 1 0.27116 0.2 0.709 0.8 +2 62 2 3 0 0.25717 0.2 0.69624 0.77628 +2 63 2 4 1 0.27483 0.2 0.66719 0.75931 +2 64 2 4 1 0.23855 0.2 0.61004 0.74309 +2 65 2 2 1 0.21736 0.2 0.65247 0.77225 +2 66 1 1 0 0.25099 0.2 0.70211 0.74655 +2 67 2 4 1 0.2702 0.2 0.71121 0.7433 +2 68 2 2 0 0.27338 0.2358 0.65203 0.71806 +2 69 2 4 1 0.2925 0.2 0.65285 0.72883 +2 70 2 2 0 0.31246 0.22217 0.65929 0.75781 +2 71 2 1 0 0.32305 0.2 0.66168 0.75266 +2 72 2 1 0 0.28378 0.2 0.65774 0.78056 +2 73 2 2 0 0.26524 0.20141 0.59448 0.77223 +2 74 1 4 0 0.27387 0.2 0.57972 0.76982 +2 75 1 1 1 0.33482 0.2 0.5624 0.8 +2 76 1 3 1 0.30843 0.22087 0.52495 0.77129 +2 77 2 4 1 0.29104 0.24487 0.53711 0.7695 +2 78 1 2 0 0.26102 0.24152 0.50456 0.77789 +2 79 2 4 1 0.2445 0.24204 0.50356 0.75557 +2 80 2 1 1 0.26642 0.23341 0.50453 0.72099 +2 81 2 4 1 0.27563 0.23117 0.51365 0.73239 +2 82 2 2 0 0.24556 0.23887 0.49212 0.76062 +2 83 1 2 0 0.21118 0.22106 0.54552 0.79201 +2 84 1 2 1 0.2 0.26054 0.52037 0.79404 +2 85 2 4 1 0.23536 0.24661 0.57319 0.8 +2 86 2 2 0 0.23971 0.21726 0.60673 0.7575 +2 87 2 4 1 0.27447 0.21378 0.58475 0.7807 +2 88 2 1 0 0.23447 0.22887 0.53945 0.8 +2 89 2 4 1 0.23122 0.2 0.56969 0.8 +2 90 2 4 1 0.21434 0.2 0.58063 0.8 +2 91 2 4 1 0.20412 0.2 0.5776 0.77905 +2 92 2 1 0 0.23715 0.20107 0.59502 0.78801 +2 93 1 2 0 0.2 0.20172 0.56694 0.8 +2 94 2 4 1 0.2 0.23888 0.56918 0.8 +2 95 2 1 0 0.2 0.22836 0.54608 0.79578 +2 96 2 4 1 0.21792 0.22493 0.55862 0.8 +2 97 2 4 0 0.25765 0.26661 0.57298 0.76303 +2 98 2 1 1 0.25462 0.26054 0.58158 0.76424 +2 99 2 4 1 0.25058 0.2355 0.56115 0.77487 +2 100 2 4 1 0.2352 0.2 0.57613 0.77472 +2 101 2 4 0 0.24936 0.20905 0.55364 0.75352 +2 102 2 1 0 0.2433 0.2 0.55993 0.78065 +2 103 2 2 0 0.25461 0.23537 0.58316 0.7884 +2 104 2 2 0 0.25684 0.24005 0.54965 0.7952 +2 105 1 1 0 0.29907 0.2506 0.55251 0.8 +2 106 1 2 1 0.26851 0.2435 0.54227 0.8 +2 107 1 4 1 0.24851 0.22888 0.55616 0.79765 +2 108 2 4 1 0.26537 0.25165 0.56028 0.77126 +2 109 2 1 0 0.26116 0.25402 0.55846 0.73255 +2 110 2 4 0 0.2603 0.24673 0.58361 0.7276 +2 111 2 2 0 0.28591 0.22322 0.64084 0.7201 +2 112 1 4 0 0.26526 0.20484 0.6863 0.712 +2 113 1 4 1 0.26692 0.2 0.70522 0.72084 +2 114 2 4 1 0.27249 0.21392 0.68892 0.72746 +2 115 2 1 0 0.22902 0.20045 0.74818 0.71253 +2 116 2 4 1 0.2353 0.2 0.77855 0.69805 +2 117 2 4 1 0.20838 0.2 0.78606 0.68928 +2 118 2 1 0 0.20182 0.20659 0.79165 0.67785 +2 119 2 4 1 0.21032 0.247 0.77601 0.74302 +2 120 2 4 1 0.20034 0.25251 0.8 0.70396 +2 121 2 4 1 0.2 0.24629 0.79537 0.68448 +2 122 2 4 1 0.21398 0.29466 0.75251 0.66879 +2 123 2 1 0 0.2 0.31706 0.76204 0.6732 +2 124 2 4 1 0.2 0.30489 0.7534 0.71219 +2 125 2 4 1 0.2 0.32492 0.76137 0.71172 +2 126 2 1 0 0.2 0.35076 0.7997 0.71048 +2 127 2 4 0 0.20503 0.31678 0.79524 0.70346 +2 128 2 4 1 0.20516 0.29861 0.76553 0.69496 +2 129 2 4 1 0.22588 0.30163 0.7683 0.72198 +2 130 2 2 0 0.21011 0.32075 0.77334 0.72815 +2 131 2 4 1 0.21068 0.30684 0.76088 0.73397 +2 132 2 4 1 0.2087 0.30048 0.79883 0.74999 +2 133 2 4 1 0.22202 0.30679 0.8 0.7297 +2 134 2 4 0 0.20441 0.28039 0.77104 0.6871 +2 135 2 4 0 0.2029 0.26801 0.75639 0.66139 +2 136 2 4 0 0.20636 0.2252 0.741 0.63109 +2 137 2 1 0 0.24226 0.2 0.78649 0.65203 +2 138 2 4 1 0.25766 0.2 0.7582 0.643 +2 139 1 4 0 0.29617 0.2 0.7412 0.59132 +2 140 2 4 1 0.30146 0.2 0.76005 0.61217 +2 141 2 4 0 0.27104 0.2159 0.75701 0.60006 +2 142 2 4 0 0.26798 0.24948 0.7371 0.61118 +2 143 2 4 0 0.25651 0.23851 0.73358 0.60815 +2 144 2 1 0 0.26757 0.27016 0.72062 0.64522 +2 145 2 4 1 0.28294 0.2391 0.75141 0.62282 +2 146 2 1 0 0.28259 0.23563 0.69756 0.61478 +2 147 2 4 1 0.2582 0.24803 0.70625 0.58711 +2 148 2 2 1 0.28571 0.26536 0.70991 0.60658 +2 149 2 2 0 0.29377 0.23557 0.72483 0.59885 +2 150 2 4 0 0.3194 0.25725 0.74524 0.59905 +2 151 2 4 0 0.30979 0.2444 0.74963 0.58005 +2 152 2 4 1 0.35056 0.22948 0.73684 0.58931 +2 153 2 4 0 0.34977 0.22911 0.72578 0.58484 +2 154 2 4 1 0.34519 0.21168 0.71921 0.60472 +2 155 2 4 1 0.36661 0.23326 0.72028 0.57828 +2 156 2 3 0 0.40117 0.25436 0.71302 0.56412 +2 157 2 4 1 0.40102 0.27823 0.66922 0.56995 +2 158 2 3 1 0.35642 0.26836 0.67426 0.55094 +2 159 1 1 1 0.37148 0.29016 0.67501 0.51965 +2 160 2 1 1 0.3358 0.24635 0.66468 0.50215 +2 161 1 4 0 0.35501 0.24552 0.69507 0.50197 +2 162 2 3 0 0.31346 0.23161 0.66735 0.51181 +2 163 1 2 1 0.30964 0.232 0.6475 0.53865 +2 164 2 3 0 0.30373 0.22914 0.62935 0.55306 +2 165 2 3 1 0.31736 0.22369 0.62071 0.54398 +2 166 2 4 0 0.30014 0.25322 0.61517 0.55492 +2 167 2 4 1 0.34385 0.2456 0.58311 0.5534 +2 168 2 4 0 0.3473 0.2477 0.58684 0.57142 +2 169 2 4 0 0.34401 0.27733 0.59587 0.55711 +2 170 1 1 1 0.33799 0.29646 0.62267 0.58141 +2 171 2 2 0 0.36342 0.31122 0.63888 0.60783 +2 172 2 2 0 0.34621 0.32128 0.63943 0.54333 +2 173 2 4 0 0.32895 0.34686 0.68134 0.49852 +2 174 1 1 1 0.37522 0.31644 0.61196 0.4386 +2 175 2 4 0 0.39076 0.33159 0.65 0.44614 +2 176 2 2 0 0.4096 0.34605 0.68745 0.44148 +2 177 2 2 0 0.46425 0.33531 0.66985 0.44431 +2 178 2 4 0 0.48127 0.34427 0.65921 0.43196 +2 179 2 4 0 0.46951 0.32875 0.66862 0.42214 +2 180 2 4 0 0.45978 0.3009 0.65382 0.42035 +2 181 2 1 1 0.46639 0.31441 0.66291 0.41407 +2 182 2 4 0 0.49453 0.3332 0.6395 0.40546 +2 183 2 2 0 0.48048 0.32783 0.637 0.39346 +2 184 1 1 0 0.50093 0.33951 0.60778 0.42871 +2 185 1 4 1 0.47675 0.33238 0.61487 0.43485 +2 186 2 4 0 0.46652 0.35543 0.62031 0.40333 +2 187 1 4 1 0.50299 0.34544 0.60978 0.38389 +2 188 1 1 1 0.51908 0.35843 0.61294 0.38385 +2 189 2 4 1 0.56691 0.37283 0.60469 0.39722 +2 190 2 4 0 0.57641 0.40698 0.65272 0.40517 +2 191 1 1 0 0.61806 0.40434 0.62457 0.38315 +2 192 2 4 1 0.6387 0.43436 0.59972 0.37162 +2 193 2 4 0 0.6537 0.47132 0.56371 0.36873 +2 194 1 4 1 0.64354 0.44272 0.53871 0.37205 +2 195 1 1 0 0.68281 0.4423 0.53232 0.37961 +2 196 1 1 0 0.68423 0.48885 0.52515 0.38681 +2 197 2 4 0 0.69172 0.49761 0.51816 0.37109 +2 198 1 4 0 0.68823 0.49309 0.51419 0.36965 +2 199 1 2 1 0.68377 0.4935 0.50005 0.35935 +2 200 2 4 0 0.67325 0.48124 0.48284 0.34656 +2 201 2 4 1 0.68844 0.47268 0.52266 0.36539 +3 1 1 4 1 0.66883 0.37325 0.76919 0.69293 +3 3 1 2 0 0.67015 0.3856 0.76941 0.72175 +3 4 2 4 1 0.65867 0.38996 0.73512 0.76353 +3 5 2 4 1 0.61271 0.4136 0.70859 0.77052 +3 6 2 4 0 0.61433 0.42465 0.70933 0.8 +3 7 2 4 1 0.5804 0.39622 0.69341 0.8 +3 8 2 4 0 0.51841 0.38227 0.73289 0.8 +3 9 1 2 0 0.53659 0.3558 0.74592 0.8 +3 10 1 4 1 0.52065 0.38466 0.78221 0.8 +3 11 1 1 0 0.5127 0.37854 0.7661 0.78401 +3 12 2 4 1 0.49501 0.43971 0.7905 0.7796 +3 13 2 4 1 0.49142 0.46183 0.74579 0.78366 +3 14 2 4 1 0.49081 0.46637 0.74794 0.77315 +3 15 2 2 1 0.50132 0.47586 0.74207 0.8 +3 16 2 4 0 0.56473 0.46072 0.79825 0.79796 +3 17 1 4 0 0.54207 0.46664 0.8 0.74878 +3 18 1 2 0 0.58164 0.44106 0.79297 0.72317 +3 19 2 4 1 0.59149 0.45774 0.79293 0.76953 +3 20 2 4 1 0.61672 0.45676 0.77379 0.79815 +3 21 2 2 1 0.62121 0.44059 0.76258 0.8 +3 22 2 2 1 0.63551 0.4599 0.75005 0.76542 +3 23 2 4 1 0.63114 0.46266 0.75579 0.8 +3 24 2 4 0 0.61963 0.49526 0.7527 0.79561 +3 25 2 2 0 0.57841 0.49419 0.72627 0.8 +3 26 2 2 1 0.56152 0.52013 0.78467 0.8 +3 27 2 3 1 0.57869 0.51671 0.78265 0.8 +3 28 2 3 1 0.56639 0.50541 0.75377 0.76181 +3 29 2 2 1 0.53607 0.53711 0.73607 0.77493 +3 30 2 3 1 0.50742 0.57529 0.74619 0.74033 +3 31 2 3 0 0.49056 0.52378 0.73618 0.71541 +3 32 1 3 1 0.4976 0.52854 0.72432 0.70405 +3 33 1 2 1 0.53769 0.5559 0.68652 0.70031 +3 34 1 2 0 0.53378 0.54603 0.67969 0.70818 +3 35 2 3 1 0.52224 0.53683 0.70012 0.73016 +3 36 2 3 0 0.54336 0.51652 0.69302 0.7253 +3 37 1 1 0 0.50921 0.56155 0.67768 0.72735 +3 38 1 2 0 0.52346 0.5659 0.67873 0.73461 +3 39 2 2 1 0.56296 0.54234 0.64272 0.72261 +3 40 2 4 1 0.57085 0.5206 0.67906 0.72352 +3 41 2 4 1 0.58499 0.53196 0.69191 0.72011 +3 42 2 2 0 0.57616 0.51196 0.674 0.74266 +3 43 2 2 0 0.576 0.53392 0.65332 0.75823 +3 44 1 1 1 0.57044 0.52995 0.61126 0.7968 +3 45 1 1 1 0.60101 0.54231 0.60942 0.78605 +3 46 1 1 0 0.57728 0.55258 0.59843 0.8 +3 47 2 1 0 0.55056 0.54806 0.56974 0.8 +3 48 2 4 1 0.55445 0.59867 0.58828 0.7958 +3 49 2 4 0 0.56397 0.57727 0.55507 0.7543 +3 50 2 3 1 0.57406 0.59639 0.54868 0.76199 +3 51 2 3 1 0.5561 0.59867 0.58165 0.75913 +3 52 2 1 1 0.48821 0.63845 0.58467 0.79374 +3 53 1 2 0 0.47204 0.62393 0.60018 0.75774 +3 54 2 4 0 0.48959 0.6457 0.62181 0.73965 +3 55 1 3 1 0.52759 0.60195 0.61241 0.70988 +3 56 1 3 1 0.52772 0.62054 0.57173 0.7234 +3 57 1 1 0 0.50986 0.59709 0.54509 0.73144 +3 58 1 3 0 0.46038 0.60037 0.52496 0.75924 +3 59 2 3 0 0.4769 0.6381 0.50502 0.73557 +3 60 1 1 1 0.4638 0.63734 0.53088 0.73204 +3 61 1 1 0 0.44397 0.62479 0.55098 0.7317 +3 62 1 1 1 0.45771 0.64205 0.56085 0.7122 +3 63 1 3 1 0.41829 0.61723 0.53791 0.65224 +3 64 1 1 0 0.44906 0.58146 0.55191 0.66344 +3 65 2 3 1 0.47217 0.57877 0.525 0.6597 +3 66 2 1 1 0.48396 0.57911 0.48678 0.66715 +3 67 2 3 0 0.48087 0.55254 0.46851 0.68836 +3 68 2 1 0 0.40167 0.54104 0.40646 0.67455 +3 69 1 2 1 0.41253 0.55343 0.41672 0.65517 +3 70 1 2 1 0.42959 0.59563 0.41995 0.68402 +3 71 1 2 1 0.43857 0.59709 0.42729 0.70901 +3 72 1 2 1 0.44418 0.62363 0.4246 0.71959 +3 73 2 4 1 0.4767 0.64183 0.38548 0.78097 +3 74 1 2 1 0.5276 0.62719 0.3852 0.8 +3 75 1 2 1 0.49319 0.624 0.35591 0.8 +3 76 2 4 1 0.54732 0.5904 0.29778 0.8 +3 77 2 4 1 0.54944 0.58123 0.32742 0.77967 +3 78 1 2 1 0.56733 0.53663 0.30483 0.8 +3 79 1 2 0 0.57654 0.53186 0.30929 0.76943 +3 80 2 2 1 0.59232 0.54615 0.32875 0.77195 +3 81 2 4 1 0.6407 0.52331 0.29697 0.8 +3 82 2 4 1 0.63453 0.50234 0.2913 0.76079 +3 83 1 2 1 0.63164 0.53699 0.30748 0.77895 +3 84 1 2 0 0.6282 0.5123 0.30934 0.77445 +3 85 2 4 1 0.60935 0.49884 0.33065 0.74279 +3 86 2 2 0 0.61729 0.54562 0.34929 0.74988 +3 87 1 4 1 0.63495 0.52927 0.31141 0.73159 +3 88 1 1 1 0.6246 0.52432 0.34703 0.73015 +3 89 1 1 1 0.64368 0.48815 0.27377 0.73239 +3 90 2 1 0 0.59542 0.45566 0.26969 0.72239 +3 91 2 1 0 0.59224 0.4519 0.27504 0.69281 +3 92 2 4 1 0.59509 0.5055 0.24022 0.66945 +3 93 2 4 1 0.64672 0.53689 0.22287 0.66914 +3 94 2 4 1 0.63177 0.54698 0.21258 0.68408 +3 95 1 2 0 0.67391 0.57384 0.2072 0.68711 +3 96 2 4 1 0.66292 0.52497 0.2 0.70323 +3 97 2 2 1 0.6416 0.53087 0.20378 0.6965 +3 98 2 4 1 0.60641 0.50909 0.26903 0.6806 +3 99 1 2 1 0.6134 0.48996 0.27622 0.70435 +3 100 2 2 1 0.62637 0.48483 0.31202 0.73029 +3 101 2 4 1 0.58895 0.44496 0.3198 0.72504 +3 102 1 4 1 0.59891 0.50268 0.29841 0.72913 +3 103 1 2 0 0.63238 0.5181 0.2929 0.73254 +3 104 2 2 1 0.64532 0.51598 0.29077 0.79193 +3 105 2 2 0 0.64278 0.47902 0.28531 0.79905 +3 106 2 4 1 0.62308 0.49617 0.30022 0.8 +3 107 2 1 0 0.66055 0.47591 0.30855 0.78427 +3 108 2 4 1 0.66069 0.49633 0.31414 0.8 +3 109 2 4 1 0.66532 0.51261 0.33326 0.8 +3 110 2 1 1 0.66496 0.51259 0.30694 0.79976 +3 111 2 4 1 0.63477 0.50855 0.35965 0.8 +3 112 2 4 0 0.64212 0.47413 0.32055 0.7694 +3 113 2 4 1 0.60057 0.42494 0.35101 0.77125 +3 114 1 1 1 0.56903 0.38249 0.35041 0.76236 +3 115 1 1 1 0.6015 0.39316 0.36371 0.77496 +3 116 1 1 1 0.60273 0.42415 0.42261 0.77538 +3 117 1 4 1 0.64753 0.36608 0.46082 0.74709 +3 118 2 4 1 0.64442 0.40509 0.48388 0.71915 +3 119 1 4 1 0.65391 0.42951 0.48458 0.7488 +3 120 1 1 1 0.68116 0.4308 0.49861 0.71676 +3 121 1 1 0 0.65563 0.46113 0.47371 0.72506 +3 122 2 1 1 0.69349 0.49043 0.47868 0.73556 +3 123 2 4 1 0.66198 0.48623 0.51209 0.74302 +3 124 2 1 0 0.62501 0.50053 0.52244 0.73455 +3 125 2 4 1 0.65673 0.44638 0.51138 0.75814 +3 126 2 2 1 0.64113 0.45613 0.4999 0.7822 +3 127 2 2 1 0.61183 0.47796 0.47914 0.78129 +3 128 2 4 0 0.62885 0.48371 0.46325 0.76828 +3 129 1 2 0 0.65825 0.46961 0.48531 0.7496 +3 130 1 3 0 0.6435 0.48994 0.53024 0.72654 +3 131 1 2 1 0.66244 0.51286 0.52535 0.7488 +3 132 1 2 0 0.68476 0.54099 0.51799 0.7379 +3 133 2 4 1 0.68301 0.55496 0.51328 0.74206 +3 134 2 2 0 0.67316 0.55361 0.48301 0.75786 +3 135 2 2 1 0.67376 0.53684 0.49156 0.76391 +3 136 2 4 0 0.70431 0.5375 0.49248 0.72144 +3 137 2 2 0 0.73911 0.51031 0.50981 0.69143 +3 138 2 3 0 0.73501 0.54236 0.48455 0.65323 +3 139 1 2 1 0.70711 0.53633 0.51912 0.68392 +3 140 1 2 0 0.68128 0.55276 0.48967 0.66202 +3 141 1 2 1 0.66796 0.51312 0.48063 0.67974 +3 142 1 2 0 0.68706 0.52262 0.45528 0.69269 +3 143 2 4 1 0.67081 0.50414 0.40634 0.68221 +3 144 2 2 1 0.60688 0.4994 0.38689 0.68965 +3 145 2 4 1 0.64122 0.47853 0.39266 0.71406 +3 146 2 4 1 0.66933 0.47368 0.37491 0.69829 +3 147 2 2 0 0.6751 0.52406 0.38091 0.70497 +3 148 2 4 1 0.66144 0.49961 0.37475 0.69052 +3 149 2 4 0 0.63735 0.5103 0.36973 0.69204 +3 150 1 2 1 0.63358 0.48455 0.37815 0.68046 +3 151 1 4 1 0.65883 0.47061 0.3947 0.65703 +3 152 1 2 1 0.6302 0.50495 0.39799 0.65565 +3 153 2 4 1 0.62789 0.48344 0.39312 0.63916 +3 154 1 2 1 0.6335 0.46165 0.41299 0.64529 +3 155 1 2 1 0.64593 0.46122 0.38794 0.66622 +3 156 1 2 0 0.646 0.44097 0.3853 0.6999 +3 157 1 2 0 0.63902 0.45708 0.35352 0.70509 +3 158 2 4 1 0.66877 0.4357 0.31695 0.71684 +3 159 2 4 1 0.66383 0.44026 0.28375 0.73352 +3 160 2 4 1 0.6475 0.43008 0.26323 0.68252 +3 161 2 4 1 0.62258 0.43133 0.24392 0.69062 +3 162 2 1 1 0.65065 0.46271 0.22707 0.71892 +3 163 2 4 1 0.60723 0.44933 0.2092 0.71241 +3 164 1 4 1 0.59875 0.43997 0.21956 0.6914 +3 165 1 1 0 0.55818 0.40711 0.2 0.72182 +3 166 1 1 1 0.60092 0.38929 0.25299 0.74315 +3 167 1 1 1 0.6077 0.36729 0.2275 0.74274 +3 168 1 1 1 0.58144 0.36602 0.24947 0.70624 +3 169 1 1 1 0.58884 0.34827 0.2796 0.71898 +3 170 1 4 1 0.61215 0.37417 0.2637 0.74439 +3 171 1 1 1 0.63596 0.36185 0.26624 0.73248 +3 172 1 1 1 0.61559 0.37883 0.22076 0.73546 +3 173 1 1 1 0.58784 0.39491 0.20025 0.73755 +3 174 1 2 1 0.6596 0.38477 0.24322 0.77936 +3 175 1 1 1 0.64983 0.43784 0.27238 0.77963 +3 176 1 1 0 0.63608 0.43822 0.26457 0.78278 +3 177 2 4 1 0.61948 0.43996 0.23311 0.75512 +3 178 2 4 1 0.61418 0.38824 0.23349 0.75632 +3 179 2 1 0 0.6146 0.37627 0.25115 0.8 +3 180 2 2 0 0.62001 0.34678 0.2671 0.79487 +3 181 1 2 1 0.615 0.33971 0.28171 0.79225 +3 182 2 2 0 0.61682 0.32518 0.34198 0.79845 +3 183 2 4 1 0.63967 0.31349 0.3434 0.78929 +3 184 2 4 1 0.62524 0.30235 0.32921 0.78309 +3 185 2 4 1 0.65432 0.28414 0.3005 0.77878 +3 186 2 4 1 0.6499 0.28287 0.3494 0.7755 +3 187 2 4 1 0.6312 0.29965 0.3589 0.739 +3 188 2 4 1 0.60689 0.31089 0.35521 0.74163 +3 189 2 4 0 0.62744 0.29311 0.34019 0.75455 +3 190 2 2 0 0.62018 0.30403 0.37572 0.75018 +3 191 1 1 1 0.59118 0.32691 0.3682 0.74053 +3 192 1 3 0 0.62218 0.31464 0.37339 0.72332 +3 193 1 1 1 0.60768 0.30155 0.3907 0.73393 +3 194 1 1 1 0.62445 0.25367 0.40889 0.7381 +3 195 1 4 1 0.58264 0.27604 0.38269 0.73848 +3 196 1 4 1 0.5586 0.23074 0.38086 0.77833 +3 197 1 4 0 0.54563 0.22598 0.36843 0.73306 +3 198 1 1 0 0.5992 0.24965 0.35665 0.72907 +3 199 2 4 0 0.63541 0.24274 0.35439 0.68775 +3 200 1 2 0 0.64018 0.24858 0.36565 0.6627 +3 201 2 3 1 0.65081 0.25388 0.39391 0.67241 +4 1 2 4 0 0.21199 0.54628 0.68794 0.47466 +4 2 2 4 0 0.2 0.534 0.65541 0.47102 +4 3 2 3 1 0.2 0.57876 0.65958 0.47067 +4 4 2 3 1 0.2 0.56797 0.63188 0.42063 +4 6 2 1 1 0.2 0.54476 0.60146 0.47798 +4 7 2 1 0 0.2 0.52605 0.60722 0.47527 +4 8 2 3 0 0.2 0.52271 0.63572 0.47881 +4 9 2 1 0 0.21722 0.55743 0.64484 0.49461 +4 10 2 3 0 0.22466 0.5777 0.67382 0.49864 +4 11 1 4 0 0.22561 0.58815 0.68153 0.5709 +4 12 1 4 0 0.21568 0.56781 0.67591 0.5935 +4 13 1 2 1 0.22308 0.52535 0.71702 0.60735 +4 14 1 2 1 0.20689 0.53131 0.70323 0.60971 +4 15 1 2 1 0.21517 0.57119 0.70512 0.60726 +4 16 1 2 1 0.2067 0.56055 0.72736 0.59942 +4 17 1 2 0 0.2 0.55337 0.74002 0.60865 +4 18 2 3 1 0.2 0.57323 0.78656 0.53756 +4 19 2 3 0 0.24501 0.57013 0.8 0.55964 +4 20 1 1 0 0.22025 0.59802 0.78074 0.58471 +4 21 1 2 1 0.24619 0.58994 0.78157 0.62366 +4 22 1 2 1 0.2425 0.60365 0.77134 0.67168 +4 23 2 4 1 0.26134 0.62914 0.73476 0.66238 +4 24 1 2 1 0.2523 0.67328 0.76341 0.6803 +4 25 2 4 1 0.21905 0.66907 0.74752 0.67259 +4 26 2 1 0 0.2 0.63148 0.74129 0.64534 +4 27 1 2 1 0.22706 0.64474 0.7103 0.64379 +4 28 2 3 1 0.24795 0.71816 0.73498 0.65137 +4 29 2 3 0 0.25824 0.72237 0.71915 0.64535 +4 30 2 4 1 0.2023 0.73493 0.72564 0.63744 +4 31 1 4 1 0.21389 0.75741 0.72671 0.64195 +4 32 2 2 1 0.2 0.77333 0.72097 0.6328 +4 33 2 2 1 0.2 0.77558 0.69994 0.6418 +4 34 2 2 1 0.2 0.78144 0.67943 0.63996 +4 35 1 3 0 0.2 0.8 0.67301 0.61008 +4 36 1 2 0 0.2 0.8 0.68831 0.63528 +4 37 1 2 1 0.2 0.8 0.69131 0.6034 +4 38 2 3 1 0.20971 0.8 0.70393 0.57568 +4 39 2 3 1 0.21621 0.76178 0.71917 0.55994 +4 40 2 1 0 0.22745 0.74196 0.70603 0.55248 +4 41 2 4 1 0.23812 0.75206 0.68627 0.56138 +4 42 1 2 1 0.26515 0.73895 0.69746 0.57138 +4 43 2 4 0 0.28398 0.76918 0.73134 0.59407 +4 44 2 1 1 0.30592 0.75416 0.70629 0.5629 +4 45 1 4 1 0.31918 0.76789 0.70929 0.56458 +4 46 2 3 1 0.29707 0.71374 0.71305 0.56137 +4 47 1 4 0 0.30046 0.66943 0.72947 0.55543 +4 48 1 2 1 0.27898 0.69381 0.70074 0.5995 +4 49 2 4 1 0.26535 0.69816 0.68161 0.58912 +4 50 1 2 1 0.30351 0.72021 0.67091 0.56377 +4 51 2 4 1 0.33934 0.65248 0.66959 0.56201 +4 52 2 2 1 0.3872 0.63757 0.68541 0.58033 +4 53 1 2 0 0.3944 0.6497 0.67534 0.57672 +4 54 2 3 0 0.41049 0.65797 0.6736 0.57501 +4 55 1 2 1 0.36902 0.61196 0.70441 0.62472 +4 56 1 2 1 0.39708 0.658 0.73746 0.67287 +4 57 1 3 0 0.33752 0.69203 0.75871 0.63325 +4 58 1 4 1 0.33235 0.68372 0.79774 0.65964 +4 59 2 1 0 0.29321 0.66003 0.8 0.68977 +4 60 2 2 1 0.2689 0.68852 0.79386 0.6942 +4 61 1 4 0 0.27333 0.70509 0.78656 0.68412 +4 62 1 1 1 0.29007 0.6768 0.8 0.72379 +4 63 1 1 1 0.27771 0.69015 0.79024 0.74027 +4 64 1 1 0 0.27473 0.72906 0.76889 0.73509 +4 65 1 1 1 0.27454 0.7235 0.75293 0.74844 +4 66 2 3 1 0.27372 0.71364 0.77559 0.75522 +4 67 2 4 1 0.29453 0.64955 0.77966 0.74915 +4 68 2 3 1 0.28646 0.64986 0.79155 0.78968 +4 69 2 4 1 0.26537 0.63016 0.77741 0.78975 +4 70 2 3 1 0.28141 0.66991 0.74299 0.79249 +4 71 1 1 0 0.29099 0.66493 0.79439 0.79014 +4 72 1 4 1 0.31207 0.64723 0.79159 0.78607 +4 73 2 3 1 0.26992 0.64794 0.762 0.79788 +4 74 1 1 0 0.28006 0.57867 0.78492 0.78075 +4 75 2 3 1 0.25879 0.61897 0.77092 0.7282 +4 76 2 1 0 0.21374 0.6422 0.77857 0.7214 +4 77 1 2 1 0.2 0.66219 0.76089 0.71271 +4 78 1 4 1 0.23095 0.63052 0.78842 0.74988 +4 79 2 1 0 0.21211 0.67373 0.75575 0.77481 +4 80 2 4 1 0.22245 0.67839 0.71743 0.78994 +4 81 1 3 1 0.22854 0.67643 0.72384 0.76479 +4 82 2 2 1 0.2 0.65877 0.69777 0.8 +4 83 2 3 1 0.2 0.66073 0.69603 0.8 +4 84 1 1 0 0.2 0.68394 0.70717 0.8 +4 85 2 2 1 0.20384 0.66684 0.75085 0.8 +4 86 1 3 1 0.21624 0.64553 0.77035 0.76305 +4 87 2 4 1 0.22371 0.66605 0.71853 0.79022 +4 88 2 3 1 0.22751 0.70415 0.75329 0.76656 +4 89 2 2 1 0.226 0.71427 0.73792 0.75358 +4 90 2 4 1 0.25551 0.73673 0.75205 0.73508 +4 91 2 1 0 0.24871 0.75519 0.77856 0.70971 +4 92 2 4 1 0.22538 0.69685 0.77893 0.72328 +4 93 2 1 0 0.24222 0.68194 0.77438 0.65775 +4 94 2 4 1 0.25815 0.70205 0.8 0.63861 +4 95 1 1 0 0.27333 0.6861 0.8 0.65481 +4 96 1 4 0 0.27917 0.73356 0.7416 0.67907 +4 97 1 3 1 0.28182 0.71244 0.72781 0.65051 +4 98 1 2 1 0.29413 0.72278 0.7606 0.68453 +4 99 1 2 1 0.2932 0.73863 0.75846 0.68132 +4 100 1 3 1 0.31532 0.69763 0.75898 0.69651 +4 101 1 2 1 0.31612 0.70769 0.74336 0.70307 +4 102 1 2 1 0.3108 0.75304 0.76022 0.6906 +4 103 1 1 0 0.33191 0.79851 0.7261 0.709 +4 104 1 2 1 0.34414 0.79383 0.74593 0.71874 +4 105 1 2 1 0.34368 0.8 0.77512 0.71896 +4 106 1 2 1 0.34419 0.77415 0.78079 0.71189 +4 107 1 2 1 0.37746 0.79259 0.78847 0.70569 +4 108 1 2 1 0.37835 0.79968 0.77385 0.69216 +4 109 1 4 1 0.38553 0.8 0.70916 0.66968 +4 110 1 2 1 0.38058 0.8 0.69244 0.67389 +4 111 1 2 1 0.41382 0.79577 0.70813 0.67588 +4 112 1 2 0 0.36934 0.8 0.66458 0.68569 +4 113 2 3 0 0.35152 0.79807 0.65552 0.63742 +4 114 1 2 0 0.34184 0.8 0.66402 0.60133 +4 115 2 4 1 0.32713 0.8 0.70044 0.5724 +4 116 2 4 1 0.34862 0.8 0.76034 0.54769 +4 117 2 2 1 0.38828 0.8 0.79676 0.5328 +4 118 2 3 1 0.39307 0.8 0.8 0.53451 +4 119 2 4 1 0.39582 0.79676 0.79137 0.50423 +4 120 2 4 0 0.40118 0.8 0.75272 0.46582 +4 121 2 4 0 0.43031 0.8 0.74693 0.48711 +4 122 2 3 1 0.4908 0.8 0.7287 0.48293 +4 123 2 3 0 0.46163 0.8 0.68921 0.46915 +4 124 1 2 1 0.46082 0.76616 0.6904 0.44279 +4 125 1 2 1 0.46621 0.77326 0.68577 0.45188 +4 126 1 2 1 0.41896 0.77596 0.6704 0.42 +4 127 1 3 1 0.40602 0.76215 0.63875 0.37658 +4 128 1 2 1 0.42846 0.78743 0.63211 0.36063 +4 129 2 4 1 0.41213 0.75659 0.6251 0.33481 +4 130 2 4 0 0.41481 0.77493 0.59454 0.28814 +4 131 2 3 1 0.41472 0.79236 0.61594 0.2509 +4 132 2 3 1 0.39245 0.8 0.56165 0.30671 +4 133 1 1 0 0.40761 0.8 0.60698 0.33748 +4 134 1 3 1 0.42713 0.79083 0.64562 0.28783 +4 135 1 4 0 0.42478 0.7864 0.64 0.3166 +4 136 2 2 1 0.41485 0.76076 0.63457 0.29308 +4 137 2 3 1 0.4325 0.79865 0.6661 0.27684 +4 138 1 4 1 0.4363 0.75789 0.66885 0.25926 +4 139 1 2 1 0.4181 0.77731 0.68794 0.28972 +4 140 2 4 0 0.40627 0.76115 0.7093 0.31961 +4 141 2 2 1 0.37519 0.76241 0.69879 0.28667 +4 142 1 3 0 0.3901 0.7591 0.69174 0.30751 +4 143 2 4 1 0.36338 0.71133 0.69568 0.33997 +4 144 1 1 1 0.39841 0.73696 0.72225 0.34169 +4 145 1 1 1 0.41371 0.69938 0.72395 0.36836 +4 146 1 1 0 0.37714 0.71863 0.68051 0.39311 +4 147 1 1 0 0.32263 0.78138 0.72232 0.35715 +4 148 2 2 1 0.3333 0.76573 0.69665 0.40039 +4 149 1 4 0 0.3169 0.77223 0.65767 0.42938 +4 150 1 3 1 0.27789 0.78937 0.68047 0.46507 +4 151 1 2 1 0.23163 0.77209 0.72142 0.47408 +4 152 2 3 1 0.23568 0.76247 0.73256 0.46965 +4 153 1 2 1 0.26304 0.7484 0.73707 0.48612 +4 154 1 4 0 0.21324 0.72897 0.73612 0.50978 +4 155 1 2 1 0.21614 0.74213 0.72873 0.47975 +4 156 1 2 1 0.22546 0.75149 0.73982 0.51567 +4 157 2 4 0 0.2 0.76702 0.73213 0.50302 +4 158 1 2 0 0.2 0.75791 0.76103 0.49764 +4 159 1 2 0 0.24648 0.74262 0.75323 0.48225 +4 160 2 1 0 0.29166 0.7449 0.75737 0.49812 +4 161 1 2 1 0.29722 0.74881 0.78415 0.49579 +4 162 1 2 1 0.29739 0.748 0.75971 0.49409 +4 163 1 2 1 0.29061 0.74109 0.75713 0.47148 +4 164 1 2 1 0.3241 0.8 0.7562 0.49 +4 165 1 1 0 0.30304 0.8 0.74852 0.43331 +4 166 1 3 1 0.30686 0.8 0.79876 0.4492 +4 167 1 2 1 0.27929 0.79193 0.8 0.45587 +4 168 1 2 1 0.27502 0.8 0.8 0.47165 +4 169 1 2 1 0.27626 0.76784 0.8 0.45688 +4 170 1 2 1 0.26103 0.8 0.79842 0.50098 +4 171 2 1 0 0.25009 0.8 0.8 0.52076 +4 172 2 4 0 0.27084 0.76792 0.79399 0.53205 +4 173 1 4 0 0.25985 0.8 0.8 0.52452 +4 174 1 2 1 0.3034 0.8 0.76969 0.53788 +4 175 1 3 1 0.31203 0.8 0.76893 0.55553 +4 176 1 4 1 0.29759 0.79857 0.8 0.54718 +4 177 1 3 1 0.30694 0.8 0.77632 0.50738 +4 178 1 2 1 0.32205 0.79845 0.75379 0.51164 +4 179 1 2 1 0.3711 0.79228 0.75654 0.49837 +4 180 1 2 0 0.36351 0.75311 0.76007 0.52005 +4 181 1 2 1 0.43037 0.78817 0.7648 0.52297 +4 182 1 2 1 0.38317 0.8 0.76207 0.53649 +4 183 1 2 1 0.40583 0.76667 0.7691 0.51703 +4 184 1 2 1 0.37856 0.74345 0.78541 0.54304 +4 185 1 2 0 0.35465 0.75525 0.76958 0.50629 +4 186 1 1 0 0.34375 0.75051 0.8 0.51524 +4 187 1 2 1 0.32132 0.75855 0.79423 0.53117 +4 188 1 2 1 0.3636 0.77127 0.78654 0.58878 +4 189 1 2 0 0.3275 0.78351 0.77677 0.58923 +4 190 1 2 0 0.27943 0.77737 0.76301 0.61983 +4 191 1 2 1 0.27087 0.77048 0.76726 0.63355 +4 192 2 2 1 0.2608 0.7859 0.79498 0.67274 +4 193 1 2 0 0.24295 0.77068 0.8 0.6974 +4 194 1 1 1 0.21104 0.76327 0.74363 0.68911 +4 195 1 2 1 0.2056 0.77968 0.75447 0.67363 +4 196 1 2 1 0.2 0.78194 0.71332 0.67214 +4 197 1 3 1 0.2 0.79051 0.73342 0.72048 +4 198 1 3 1 0.2 0.8 0.75775 0.73538 +4 199 1 3 1 0.2 0.8 0.71951 0.74666 +4 200 1 1 0 0.2 0.79957 0.72178 0.77312 +4 201 1 3 0 0.2 0.77904 0.76431 0.79704 +5 1 2 3 0 0.52965 0.6281 0.39177 0.2627 +5 2 2 1 1 0.50844 0.64534 0.43629 0.25243 +5 3 2 3 1 0.49916 0.65298 0.43716 0.25631 +5 4 1 1 1 0.46066 0.65858 0.45227 0.24514 +5 5 1 4 0 0.46583 0.67651 0.42093 0.28374 +5 6 1 3 1 0.48888 0.66179 0.441 0.31545 +5 7 1 1 1 0.46957 0.67537 0.39708 0.31115 +5 8 1 1 0 0.47773 0.63281 0.41152 0.30077 +5 9 1 1 0 0.42138 0.60612 0.41382 0.35085 +5 10 2 3 0 0.39058 0.65772 0.43496 0.34415 +5 11 2 4 0 0.36632 0.65864 0.38564 0.32864 +5 12 1 2 1 0.3353 0.67799 0.36702 0.30257 +5 13 1 3 1 0.33005 0.67692 0.37394 0.31 +5 14 1 3 0 0.30704 0.70034 0.38948 0.29147 +5 15 1 2 0 0.30188 0.67684 0.37848 0.30749 +5 16 2 3 0 0.31777 0.70184 0.37308 0.33226 +5 17 2 4 0 0.30146 0.68226 0.36574 0.35462 +5 18 1 2 0 0.30953 0.70445 0.36379 0.35352 +5 19 1 1 0 0.29945 0.71922 0.36604 0.36233 +5 21 1 1 0 0.26136 0.76773 0.33342 0.40977 +5 22 1 2 1 0.25017 0.79726 0.26784 0.44439 +5 23 1 4 1 0.25924 0.8 0.24602 0.39868 +5 24 1 2 1 0.25417 0.8 0.26601 0.39396 +5 25 1 2 1 0.24727 0.8 0.28345 0.40097 +5 26 1 2 1 0.24463 0.8 0.27493 0.41779 +5 27 1 2 1 0.22767 0.75664 0.25281 0.37704 +5 28 1 4 1 0.24347 0.75487 0.25652 0.36365 +5 29 1 2 1 0.25231 0.72268 0.27731 0.35213 +5 30 1 4 0 0.25335 0.6809 0.32021 0.34899 +5 31 1 4 1 0.26974 0.64092 0.25591 0.36438 +5 32 1 2 1 0.26745 0.66799 0.2717 0.34281 +5 33 1 2 1 0.28884 0.69135 0.26879 0.34217 +5 34 1 4 0 0.29497 0.69864 0.25664 0.33734 +5 35 1 2 1 0.30562 0.64968 0.24518 0.32997 +5 36 1 2 1 0.28868 0.66533 0.30171 0.30097 +5 37 1 2 0 0.28809 0.7375 0.3034 0.30363 +5 38 1 4 0 0.31865 0.71161 0.30639 0.28397 +5 39 1 1 0 0.31807 0.64858 0.29697 0.30764 +5 40 1 1 0 0.31593 0.65613 0.33298 0.31225 +5 41 1 3 1 0.33026 0.61258 0.33138 0.32014 +5 42 1 2 1 0.32697 0.61912 0.34696 0.33464 +5 43 1 3 0 0.27858 0.63891 0.35506 0.31018 +5 44 1 4 1 0.32086 0.66656 0.41123 0.28709 +5 45 1 2 1 0.34782 0.66129 0.41197 0.2836 +5 46 1 2 1 0.3288 0.70515 0.42619 0.30467 +5 47 1 2 1 0.31461 0.7261 0.46665 0.28781 +5 48 1 2 1 0.29798 0.75841 0.45923 0.24544 +5 49 1 1 0 0.32415 0.74721 0.45376 0.23062 +5 50 1 4 1 0.30859 0.73631 0.42276 0.25451 +5 51 1 4 1 0.30114 0.70529 0.43194 0.24206 +5 52 1 4 0 0.29249 0.67129 0.43607 0.20447 +5 53 1 2 1 0.28941 0.65402 0.47464 0.20202 +5 54 1 4 0 0.28255 0.65782 0.44258 0.24802 +5 55 1 2 0 0.29205 0.65442 0.42603 0.2763 +5 56 1 2 1 0.28681 0.68052 0.43304 0.25667 +5 57 1 2 1 0.28534 0.69036 0.43969 0.30449 +5 58 1 4 1 0.28727 0.72614 0.40972 0.28317 +5 59 1 2 0 0.29809 0.73427 0.40003 0.25991 +5 60 1 2 1 0.32128 0.72385 0.38134 0.25928 +5 61 1 2 1 0.2904 0.77418 0.40214 0.22237 +5 62 1 2 1 0.31291 0.76574 0.39228 0.23189 +5 63 1 2 0 0.31813 0.74611 0.37152 0.21661 +5 64 1 4 1 0.34169 0.72641 0.37578 0.23515 +5 65 1 1 0 0.39352 0.70822 0.32018 0.23678 +5 66 1 4 0 0.35239 0.70569 0.33043 0.25038 +5 67 1 1 0 0.31002 0.73202 0.30254 0.22323 +5 68 1 2 1 0.32702 0.73928 0.32406 0.22419 +5 69 1 2 1 0.32569 0.74191 0.3323 0.2288 +5 70 1 2 0 0.31631 0.75926 0.35622 0.20484 +5 71 1 2 1 0.34697 0.7608 0.3981 0.2 +5 72 1 4 0 0.36965 0.74103 0.41356 0.20749 +5 73 1 4 1 0.33203 0.75547 0.40478 0.24049 +5 74 1 2 0 0.3359 0.8 0.41224 0.23604 +5 75 1 2 1 0.38071 0.77505 0.40267 0.23514 +5 76 1 2 0 0.35913 0.7656 0.44632 0.22138 +5 77 1 2 1 0.32985 0.79312 0.47177 0.24763 +5 78 1 2 1 0.34528 0.72516 0.45731 0.25059 +5 79 1 2 1 0.42887 0.70956 0.52762 0.22566 +5 80 1 2 1 0.50375 0.70408 0.55354 0.24068 +5 81 1 2 1 0.49584 0.69185 0.52126 0.21029 +5 82 1 2 1 0.49765 0.68081 0.51965 0.21723 +5 83 1 4 0 0.47827 0.70016 0.5245 0.22204 +5 84 1 2 0 0.49644 0.72369 0.54001 0.22711 +5 85 1 2 1 0.50782 0.73512 0.5403 0.24375 +5 86 1 2 1 0.48393 0.6719 0.54166 0.22529 +5 87 1 2 0 0.48789 0.6832 0.54899 0.23012 +5 88 1 2 1 0.45357 0.68183 0.54698 0.23454 +5 89 1 2 1 0.43108 0.6934 0.50771 0.28144 +5 90 1 2 1 0.41876 0.69745 0.50987 0.29576 +5 91 1 3 0 0.38172 0.67906 0.49969 0.29294 +5 92 1 2 1 0.37691 0.68526 0.47025 0.2863 +5 93 1 2 1 0.3854 0.66665 0.42952 0.27794 +5 94 1 2 1 0.4366 0.66658 0.43534 0.29518 +5 95 1 2 0 0.42289 0.69998 0.41894 0.35091 +5 96 1 4 0 0.40661 0.68082 0.40679 0.35538 +5 97 1 4 0 0.40432 0.69434 0.43249 0.38228 +5 98 1 4 0 0.39867 0.7264 0.39862 0.32107 +5 99 1 2 1 0.41113 0.6887 0.459 0.32047 +5 100 2 3 0 0.37729 0.72178 0.47418 0.33235 +5 101 1 2 1 0.3844 0.77754 0.48317 0.28709 +5 102 1 2 1 0.36407 0.79627 0.47854 0.29967 +5 103 1 2 1 0.37211 0.8 0.49278 0.26266 +5 104 1 4 0 0.36476 0.8 0.51316 0.2918 +5 105 1 2 1 0.37656 0.7832 0.52443 0.31781 +5 106 1 4 0 0.361 0.75417 0.51713 0.3391 +5 107 1 2 1 0.34127 0.69674 0.51345 0.33678 +5 108 1 3 1 0.36536 0.69128 0.54557 0.37853 +5 109 1 2 0 0.36782 0.68772 0.50025 0.3886 +5 110 1 3 1 0.37694 0.66622 0.52168 0.3531 +5 111 2 3 0 0.40396 0.67503 0.51225 0.35866 +5 112 1 3 0 0.39044 0.77402 0.48213 0.36963 +5 113 1 2 1 0.41819 0.76111 0.42435 0.36787 +5 114 1 3 0 0.43218 0.74342 0.45394 0.37659 +5 115 1 2 1 0.41543 0.7167 0.43029 0.37865 +5 116 1 2 1 0.43204 0.74695 0.47116 0.35511 +5 117 1 2 0 0.42545 0.73504 0.48081 0.38071 +5 118 1 4 1 0.40956 0.76826 0.48392 0.37526 +5 119 1 2 1 0.44331 0.7724 0.493 0.35941 +5 120 1 2 1 0.42941 0.74261 0.48721 0.32865 +5 121 1 2 1 0.46223 0.7079 0.495 0.34236 +5 122 1 4 1 0.45196 0.74791 0.51239 0.30726 +5 123 1 2 1 0.46976 0.7212 0.50553 0.29633 +5 124 1 2 1 0.49744 0.72772 0.47922 0.32832 +5 125 1 4 1 0.48511 0.70999 0.44181 0.35508 +5 126 1 2 1 0.49698 0.72154 0.4094 0.33259 +5 127 1 2 0 0.55174 0.72168 0.34913 0.29959 +5 128 1 2 1 0.56839 0.74423 0.36314 0.29836 +5 129 1 2 1 0.56329 0.74977 0.30709 0.29901 +5 130 1 4 0 0.53117 0.71506 0.30289 0.29889 +5 131 1 2 1 0.53059 0.72266 0.29907 0.27074 +5 132 2 4 0 0.52097 0.73037 0.31229 0.26118 +5 133 1 2 1 0.52505 0.73778 0.30595 0.26641 +5 134 1 2 1 0.51804 0.74373 0.29208 0.22722 +5 135 1 4 0 0.50817 0.69914 0.29086 0.2444 +5 136 1 2 0 0.46426 0.64347 0.29607 0.24786 +5 137 1 2 1 0.45112 0.65173 0.28418 0.2684 +5 138 1 2 1 0.46582 0.63202 0.27425 0.2506 +5 139 1 2 1 0.52614 0.64221 0.30124 0.26622 +5 140 1 2 1 0.48691 0.62286 0.24835 0.26166 +5 141 1 2 1 0.49674 0.65637 0.26224 0.26184 +5 142 1 2 1 0.50251 0.66832 0.27825 0.22906 +5 143 1 2 0 0.52561 0.63081 0.2888 0.25572 +5 144 1 2 0 0.5582 0.63455 0.31322 0.2142 +5 145 1 1 0 0.56369 0.67699 0.28798 0.25044 +5 146 1 2 0 0.59611 0.68733 0.30053 0.26272 +5 147 2 4 0 0.62406 0.68451 0.27671 0.28084 +5 148 2 3 0 0.62594 0.66005 0.2575 0.23694 +5 149 1 2 1 0.60473 0.64258 0.26584 0.2 +5 150 1 2 0 0.55418 0.64354 0.25955 0.2 +5 151 1 2 1 0.49225 0.65877 0.22367 0.2 +5 152 1 4 0 0.48977 0.67444 0.22502 0.2 +5 153 1 2 1 0.49144 0.68793 0.23937 0.2251 +5 154 1 2 1 0.51661 0.67634 0.25181 0.23167 +5 155 1 3 0 0.49595 0.61461 0.27478 0.25776 +5 156 1 2 0 0.51275 0.58055 0.29726 0.29554 +5 157 1 2 1 0.54337 0.57698 0.34097 0.29997 +5 158 1 2 0 0.54004 0.56746 0.33991 0.34567 +5 159 1 2 1 0.5505 0.58749 0.37857 0.33782 +5 160 1 2 0 0.55734 0.58047 0.31952 0.32997 +5 161 1 2 1 0.57778 0.58586 0.32935 0.29741 +5 162 1 3 0 0.61467 0.56721 0.34121 0.29956 +5 163 1 2 0 0.61045 0.60386 0.33623 0.31601 +5 164 1 2 1 0.61667 0.65245 0.37916 0.34916 +5 165 1 4 0 0.60631 0.64049 0.37032 0.32187 +5 166 1 4 1 0.56741 0.6463 0.39292 0.26194 +5 167 1 4 1 0.55779 0.66226 0.35642 0.30488 +5 168 1 2 1 0.60508 0.65471 0.33749 0.31078 +5 169 2 4 0 0.58856 0.68126 0.3558 0.29629 +5 170 1 4 0 0.6058 0.67863 0.36828 0.29849 +5 171 1 4 0 0.63924 0.72809 0.37309 0.27935 +5 172 1 2 0 0.61086 0.76142 0.32803 0.31131 +5 173 1 4 0 0.56857 0.72348 0.4022 0.2991 +5 174 1 2 1 0.57425 0.75776 0.38847 0.31192 +5 175 1 2 1 0.58034 0.73465 0.38833 0.33734 +5 176 1 3 1 0.54472 0.70354 0.39372 0.35991 +5 177 1 3 0 0.58202 0.70963 0.34559 0.35314 +5 178 1 2 1 0.55976 0.69322 0.34919 0.33378 +5 179 1 2 0 0.58258 0.70533 0.3781 0.31117 +5 180 1 2 1 0.55612 0.70875 0.43954 0.32156 +5 181 1 3 1 0.56855 0.67537 0.44562 0.32888 +5 182 1 4 1 0.59863 0.68329 0.42186 0.35163 +5 183 1 3 0 0.60997 0.68519 0.4303 0.37683 +5 184 1 2 0 0.60248 0.71019 0.41902 0.38277 +5 185 1 2 1 0.59809 0.76062 0.43002 0.38323 +5 186 1 4 1 0.6081 0.77644 0.47993 0.38275 +5 187 1 2 1 0.61408 0.79223 0.45948 0.39387 +5 188 1 2 0 0.60928 0.8 0.45733 0.43246 +5 189 1 3 1 0.60294 0.78355 0.42614 0.43557 +5 190 1 3 0 0.5558 0.78433 0.36131 0.4455 +5 191 2 4 0 0.60722 0.77838 0.36265 0.45639 +5 192 1 4 1 0.56958 0.74974 0.39301 0.46816 +5 193 1 2 1 0.57706 0.73897 0.38343 0.45477 +5 194 1 2 1 0.58157 0.70094 0.39122 0.42132 +5 195 1 2 1 0.60293 0.70089 0.38323 0.39178 +5 196 1 2 1 0.57593 0.66786 0.43664 0.4349 +5 197 1 2 0 0.60465 0.64527 0.44414 0.40635 +5 198 1 4 0 0.5616 0.64003 0.46539 0.42425 +5 199 1 2 1 0.58718 0.58884 0.45605 0.43693 +5 200 1 2 0 0.58107 0.59477 0.40883 0.42763 +5 201 1 2 0 0.60801 0.56536 0.38925 0.43773 +6 2 2 4 0 0.73844 0.64629 0.26467 0.27395 +6 3 2 3 0 0.69228 0.64001 0.24449 0.2622 +6 4 1 2 0 0.68433 0.59579 0.25212 0.29312 +6 5 1 1 1 0.69212 0.58937 0.24595 0.3045 +6 6 1 1 1 0.6607 0.53947 0.24873 0.32325 +6 7 1 1 1 0.66944 0.47849 0.24066 0.2849 +6 8 1 1 1 0.63452 0.50521 0.25215 0.28294 +6 9 1 3 0 0.65345 0.53719 0.26617 0.28657 +6 10 2 4 0 0.64699 0.52901 0.22662 0.27622 +6 11 1 1 1 0.61869 0.5252 0.2135 0.26357 +6 12 1 1 0 0.60212 0.53859 0.24008 0.24272 +6 13 1 2 1 0.65511 0.58001 0.26076 0.25046 +6 14 1 2 0 0.63622 0.5112 0.20641 0.28391 +6 15 1 2 1 0.61028 0.53644 0.2 0.26336 +6 16 1 2 1 0.60128 0.53024 0.22805 0.27728 +6 17 1 2 0 0.65173 0.56066 0.2 0.25872 +6 18 1 2 1 0.67288 0.5652 0.2 0.21137 +6 19 1 2 1 0.66971 0.49274 0.2218 0.22404 +6 20 1 2 0 0.68494 0.53305 0.22901 0.22827 +6 21 1 2 0 0.70233 0.55296 0.2461 0.22885 +6 22 2 2 1 0.70664 0.58177 0.2493 0.27281 +6 23 2 4 0 0.68894 0.55935 0.20401 0.2927 +6 24 1 2 0 0.63312 0.52932 0.2 0.25501 +6 26 2 3 1 0.67007 0.49809 0.2 0.29855 +6 27 2 1 1 0.70116 0.47996 0.20695 0.28267 +6 28 2 1 1 0.71325 0.49454 0.2 0.26681 +6 29 1 1 1 0.72645 0.43744 0.2 0.3196 +6 30 1 1 1 0.74659 0.44562 0.2 0.31482 +6 31 1 1 1 0.76222 0.42447 0.2 0.32081 +6 32 1 1 1 0.73448 0.43376 0.2 0.34296 +6 33 1 1 1 0.72797 0.43597 0.2 0.35101 +6 34 1 1 1 0.75074 0.46387 0.2 0.37183 +6 36 1 3 0 0.7328 0.45374 0.20442 0.42417 +6 37 2 4 0 0.7252 0.48332 0.2 0.40393 +6 38 1 1 1 0.73625 0.48365 0.2 0.41363 +6 39 1 4 1 0.70231 0.49133 0.21054 0.38507 +6 40 1 1 1 0.72107 0.49519 0.2 0.39185 +6 41 1 4 0 0.71986 0.48078 0.24709 0.37263 +6 42 1 1 1 0.7031 0.49023 0.2 0.3371 +6 43 1 1 1 0.72264 0.50759 0.2 0.37898 +6 44 1 1 1 0.71507 0.49493 0.20433 0.39462 +6 45 1 1 1 0.7487 0.48231 0.21516 0.36209 +6 46 1 4 1 0.77837 0.4936 0.2 0.37251 +6 47 1 1 0 0.8 0.4782 0.21425 0.36112 +6 48 1 4 0 0.79292 0.46445 0.21974 0.38153 +6 49 2 4 0 0.79228 0.43 0.21181 0.33966 +6 50 1 1 1 0.8 0.42849 0.21877 0.30777 +6 51 1 1 1 0.8 0.41144 0.21003 0.32592 +6 52 1 1 1 0.74326 0.40335 0.2 0.33955 +6 53 1 1 0 0.75236 0.42683 0.2552 0.33861 +6 54 1 1 0 0.77285 0.43779 0.28617 0.32257 +6 55 1 2 1 0.78413 0.42864 0.31439 0.26735 +6 56 1 2 1 0.75446 0.43688 0.29642 0.21333 +6 57 1 4 0 0.75827 0.46732 0.29081 0.2 +6 58 1 2 0 0.76712 0.44268 0.29863 0.20607 +6 59 1 1 1 0.76093 0.46029 0.29629 0.22507 +6 60 1 1 0 0.75172 0.4466 0.27871 0.20603 +6 61 1 4 0 0.76716 0.45762 0.29448 0.20525 +6 62 1 2 1 0.77346 0.48289 0.27256 0.2 +6 63 1 2 0 0.78575 0.49385 0.28216 0.20283 +6 64 1 3 0 0.8 0.53941 0.2776 0.2 +6 65 1 3 0 0.79417 0.55878 0.26483 0.20866 +6 66 1 2 1 0.8 0.55414 0.30446 0.21184 +6 67 1 2 0 0.76477 0.53706 0.30028 0.21075 +6 68 2 2 0 0.79557 0.50808 0.32894 0.24184 +6 69 1 1 1 0.79729 0.50847 0.34599 0.2038 +6 70 1 1 1 0.77915 0.52111 0.36398 0.2 +6 71 1 1 1 0.75315 0.4976 0.37342 0.2 +6 72 1 4 0 0.79673 0.52013 0.36636 0.2 +6 73 1 1 1 0.79215 0.53026 0.36133 0.20624 +6 74 1 1 1 0.79701 0.4543 0.3809 0.2 +6 75 1 1 1 0.78056 0.44464 0.38713 0.2141 +6 76 1 1 1 0.76446 0.46656 0.34142 0.2 +6 77 1 3 1 0.74452 0.47535 0.3358 0.2 +6 78 2 3 0 0.76853 0.48442 0.32546 0.21197 +6 79 1 1 1 0.8 0.43554 0.32101 0.22591 +6 80 1 1 1 0.8 0.41821 0.30145 0.23128 +6 81 1 1 1 0.7939 0.41732 0.32749 0.23821 +6 82 1 3 0 0.8 0.42489 0.40595 0.23603 +6 83 1 1 1 0.772 0.37394 0.36878 0.2 +6 84 1 1 1 0.8 0.38051 0.35345 0.20285 +6 85 1 1 1 0.76923 0.37481 0.35245 0.21921 +6 86 1 4 0 0.78747 0.3609 0.3549 0.21975 +6 87 1 1 1 0.7787 0.36849 0.38117 0.21094 +6 88 1 1 1 0.77307 0.36926 0.38108 0.22898 +6 89 1 3 1 0.75185 0.354 0.37251 0.21602 +6 90 1 3 1 0.73574 0.34884 0.37385 0.23082 +6 91 1 1 1 0.75935 0.35762 0.36155 0.22633 +6 92 1 3 0 0.7433 0.35534 0.38664 0.2 +6 93 1 1 1 0.73787 0.38545 0.38875 0.24564 +6 94 1 1 1 0.75313 0.388 0.36282 0.23557 +6 95 1 1 1 0.76958 0.40559 0.35864 0.20505 +6 96 1 1 0 0.74335 0.37624 0.33644 0.21493 +6 97 1 3 1 0.76881 0.38935 0.31529 0.23183 +6 98 1 3 1 0.8 0.44251 0.3035 0.29254 +6 99 1 1 1 0.79365 0.46767 0.2849 0.29204 +6 100 1 1 1 0.76134 0.45193 0.25515 0.333 +6 101 1 1 1 0.75759 0.47852 0.28939 0.3302 +6 102 1 1 1 0.74885 0.45144 0.32843 0.29179 +6 103 1 3 0 0.746 0.47158 0.35982 0.25733 +6 104 1 3 1 0.69118 0.51695 0.38424 0.2 +6 105 1 1 1 0.70272 0.4799 0.41213 0.2 +6 106 1 1 1 0.74316 0.49883 0.42373 0.22409 +6 107 1 1 1 0.76885 0.49184 0.44116 0.2596 +6 108 1 1 1 0.72596 0.45403 0.42147 0.29699 +6 109 1 1 1 0.73449 0.46472 0.43617 0.25409 +6 110 1 3 1 0.74143 0.43229 0.45642 0.26774 +6 111 1 1 1 0.73912 0.41536 0.47221 0.28046 +6 112 1 1 1 0.76965 0.4156 0.48204 0.28448 +6 113 1 1 0 0.8 0.43366 0.4671 0.25635 +6 114 1 3 0 0.8 0.44463 0.45515 0.23894 +6 115 1 1 1 0.76283 0.42759 0.44516 0.26106 +6 116 1 3 1 0.76396 0.39602 0.43198 0.26465 +6 117 1 1 1 0.72628 0.40751 0.44361 0.23025 +6 118 1 1 1 0.71607 0.41871 0.44249 0.22063 +6 119 1 3 1 0.70541 0.43852 0.43407 0.20998 +6 120 1 3 0 0.70211 0.43268 0.39145 0.21564 +6 121 1 1 1 0.70631 0.43664 0.41911 0.21776 +6 122 1 1 0 0.68467 0.50343 0.397 0.24363 +6 123 2 1 0 0.66341 0.43698 0.40924 0.22622 +6 124 2 3 0 0.66634 0.40102 0.37127 0.24413 +6 125 1 1 1 0.67439 0.3999 0.35503 0.21975 +6 126 2 3 0 0.64118 0.41188 0.36435 0.22777 +6 127 1 4 0 0.61951 0.41806 0.29839 0.2 +6 128 1 1 1 0.61455 0.40946 0.28627 0.20417 +6 129 1 1 0 0.63085 0.36319 0.3193 0.23203 +6 130 1 1 0 0.65582 0.38843 0.35002 0.22153 +6 131 1 2 0 0.66217 0.33886 0.35366 0.2 +6 132 1 1 1 0.68178 0.35875 0.3396 0.21985 +6 133 1 1 1 0.65448 0.34927 0.361 0.21653 +6 134 1 1 0 0.64916 0.3723 0.35015 0.2 +6 135 1 4 0 0.62925 0.37902 0.38279 0.20361 +6 136 1 1 0 0.59687 0.40627 0.42257 0.2 +6 137 1 1 1 0.60549 0.42107 0.39855 0.2 +6 138 1 1 0 0.62875 0.43311 0.41428 0.20039 +6 139 1 4 0 0.59721 0.3936 0.43017 0.2 +6 140 1 4 0 0.59071 0.34912 0.48425 0.20947 +6 141 2 3 0 0.60092 0.34219 0.47539 0.2 +6 142 1 3 0 0.59808 0.34948 0.43629 0.2 +6 143 1 1 0 0.62534 0.41234 0.3883 0.20448 +6 144 1 2 1 0.53923 0.44827 0.40399 0.20877 +6 145 1 2 1 0.54857 0.46654 0.40463 0.24354 +6 146 1 4 0 0.52858 0.4551 0.3712 0.30885 +6 147 1 4 1 0.52064 0.43257 0.35697 0.30294 +6 148 1 2 0 0.54765 0.41182 0.37944 0.29994 +6 149 1 1 0 0.54236 0.37342 0.38255 0.29 +6 150 1 4 0 0.54737 0.41134 0.43858 0.30156 +6 151 2 3 1 0.54518 0.44957 0.48541 0.29498 +6 152 2 3 0 0.54049 0.45317 0.52725 0.27868 +6 153 1 2 0 0.53167 0.48579 0.53491 0.32645 +6 154 1 3 0 0.56726 0.53268 0.52742 0.3149 +6 155 2 3 1 0.52608 0.52041 0.53536 0.36929 +6 156 2 2 1 0.5284 0.49763 0.55458 0.3797 +6 157 1 3 0 0.50124 0.48963 0.54688 0.38372 +6 158 1 2 0 0.49942 0.5344 0.5345 0.33314 +6 159 1 3 1 0.53981 0.51409 0.52742 0.3483 +6 160 2 3 0 0.53189 0.45857 0.54323 0.35816 +6 161 1 3 0 0.5403 0.41014 0.56219 0.37759 +6 162 2 3 0 0.48437 0.37569 0.56655 0.38003 +6 163 2 3 1 0.48562 0.37159 0.52849 0.40983 +6 164 2 2 1 0.51432 0.34938 0.5522 0.36539 +6 165 1 2 0 0.50373 0.31786 0.55346 0.37918 +6 166 1 3 1 0.50697 0.32369 0.53195 0.36926 +6 167 1 3 0 0.50027 0.31776 0.55123 0.40146 +6 168 2 3 1 0.49311 0.36849 0.54299 0.4182 +6 169 2 3 0 0.46946 0.39455 0.55376 0.39565 +6 170 1 2 1 0.45765 0.41354 0.53639 0.36544 +6 171 1 2 0 0.48573 0.43632 0.57664 0.31938 +6 172 2 3 0 0.49735 0.42353 0.52452 0.29386 +6 173 1 2 1 0.48042 0.42558 0.49732 0.29756 +6 174 1 4 0 0.48046 0.36183 0.5306 0.3231 +6 175 1 3 0 0.42026 0.35278 0.49506 0.30487 +6 176 2 3 0 0.35582 0.392 0.51456 0.30989 +6 177 2 3 0 0.40432 0.4218 0.49302 0.3005 +6 178 1 3 0 0.41173 0.45867 0.51064 0.34797 +6 179 1 2 1 0.41262 0.47837 0.52873 0.34487 +6 180 1 2 0 0.45323 0.49058 0.51499 0.33723 +6 181 1 2 1 0.45695 0.48239 0.52702 0.32631 +6 182 1 3 1 0.4087 0.48923 0.52477 0.29357 +6 183 1 3 1 0.37603 0.45321 0.51015 0.29681 +6 184 1 3 0 0.34802 0.45232 0.52372 0.29781 +6 185 1 4 1 0.33154 0.43127 0.55039 0.28834 +6 186 1 2 0 0.30693 0.43095 0.56428 0.31368 +6 187 2 2 0 0.32836 0.44521 0.56101 0.324 +6 188 1 1 0 0.33058 0.44933 0.59314 0.34103 +6 189 2 4 0 0.37315 0.48423 0.55358 0.32311 +6 190 2 3 1 0.38488 0.4916 0.53274 0.35724 +6 191 2 3 0 0.31661 0.52983 0.50062 0.36964 +6 192 1 1 1 0.30432 0.53627 0.48407 0.3694 +6 193 1 1 0 0.31336 0.60341 0.45643 0.38103 +6 194 2 4 0 0.2759 0.58892 0.51222 0.35864 +6 195 1 3 0 0.29853 0.59536 0.49481 0.29334 +6 196 1 1 0 0.29928 0.61646 0.47757 0.26909 +6 197 1 2 0 0.32504 0.56206 0.48146 0.26988 +6 198 1 1 1 0.31099 0.53143 0.4545 0.26122 +6 199 1 1 0 0.31326 0.55127 0.45337 0.26451 +6 200 2 4 0 0.35566 0.53366 0.46122 0.22951 +6 201 2 1 0 0.31696 0.49235 0.45925 0.21454 +7 2 1 2 1 0.73652 0.77382 0.28119 0.41581 +7 3 1 2 1 0.73617 0.72485 0.33184 0.39733 +7 4 1 2 1 0.73024 0.74529 0.34602 0.40709 +7 5 1 2 0 0.75685 0.75935 0.35679 0.43301 +7 6 1 2 1 0.72626 0.7789 0.3369 0.40037 +7 7 1 4 0 0.74553 0.76397 0.32571 0.4309 +7 8 1 3 1 0.77443 0.72212 0.35922 0.41842 +7 9 1 1 1 0.77449 0.75619 0.34035 0.4397 +7 10 1 3 0 0.8 0.7567 0.35428 0.46373 +7 11 2 3 1 0.77508 0.69086 0.37354 0.47258 +7 12 2 3 0 0.76829 0.67967 0.40001 0.50458 +7 13 1 1 0 0.76672 0.69934 0.41569 0.5421 +7 14 2 4 0 0.77127 0.71321 0.37702 0.5448 +7 15 1 2 0 0.77979 0.68525 0.36089 0.55744 +7 16 1 1 0 0.76202 0.71828 0.41109 0.5703 +7 17 1 2 0 0.78828 0.71386 0.44286 0.60376 +7 18 2 1 1 0.78199 0.74743 0.44158 0.59931 +7 19 2 1 1 0.8 0.7754 0.47841 0.56621 +7 20 2 1 1 0.77974 0.78389 0.42621 0.54334 +7 21 2 3 0 0.77194 0.8 0.42647 0.55248 +7 22 2 1 1 0.77586 0.78019 0.42254 0.56588 +7 23 2 3 0 0.76585 0.77611 0.42221 0.54927 +7 24 2 1 1 0.76275 0.78298 0.39476 0.53483 +7 25 2 4 1 0.7644 0.8 0.36015 0.50291 +7 26 2 4 1 0.6958 0.77858 0.37691 0.47691 +7 27 2 4 1 0.68262 0.8 0.36378 0.46858 +7 28 2 4 0 0.6817 0.78741 0.34606 0.45713 +7 29 2 4 1 0.63409 0.7914 0.34804 0.42898 +7 30 2 4 0 0.62849 0.72416 0.34811 0.38981 +7 31 2 4 1 0.62123 0.72053 0.3406 0.42393 +7 32 2 4 0 0.60729 0.72962 0.3686 0.43583 +7 33 1 1 0 0.63289 0.73121 0.36294 0.44422 +7 34 1 2 1 0.66452 0.70818 0.38126 0.50299 +7 35 1 4 1 0.65276 0.69137 0.34886 0.4978 +7 36 1 4 0 0.67635 0.67695 0.36245 0.48723 +7 37 1 2 1 0.6364 0.72169 0.37522 0.46262 +7 38 1 2 0 0.62136 0.68738 0.37165 0.50667 +7 39 1 2 0 0.62684 0.66038 0.3311 0.4098 +7 40 2 4 1 0.62214 0.66569 0.30715 0.39592 +7 41 2 1 1 0.64773 0.65643 0.32032 0.41724 +7 42 2 4 0 0.62275 0.63917 0.30505 0.42642 +7 43 2 2 0 0.62059 0.60141 0.28548 0.37703 +7 44 2 4 0 0.61911 0.58772 0.32664 0.37639 +7 45 1 1 1 0.59969 0.56746 0.32019 0.41392 +7 46 1 3 0 0.60794 0.56318 0.33423 0.41813 +7 47 1 1 1 0.58478 0.55692 0.30522 0.42107 +7 48 1 1 1 0.59251 0.52936 0.31046 0.42059 +7 49 1 1 0 0.62059 0.52717 0.28554 0.43034 +7 50 1 3 0 0.6537 0.58194 0.31337 0.4118 +7 51 1 1 1 0.67234 0.59347 0.33659 0.42892 +7 52 1 4 0 0.68842 0.60563 0.31471 0.44322 +7 53 1 3 0 0.64103 0.6188 0.3388 0.46047 +7 54 1 1 0 0.64114 0.62285 0.37748 0.42213 +7 55 1 3 0 0.59619 0.62455 0.40678 0.41438 +7 56 2 4 0 0.61538 0.57985 0.42666 0.43971 +7 57 2 3 0 0.60028 0.56839 0.42793 0.45859 +7 58 1 2 1 0.60161 0.55339 0.43555 0.46169 +7 59 1 4 0 0.56162 0.53272 0.42529 0.4359 +7 60 1 2 0 0.55868 0.52705 0.4066 0.45736 +7 61 1 4 1 0.58758 0.56185 0.43095 0.48555 +7 62 1 2 1 0.63743 0.55867 0.43084 0.49401 +7 63 1 2 0 0.63078 0.52857 0.47546 0.4925 +7 64 2 1 1 0.66642 0.52672 0.47409 0.51444 +7 65 2 4 1 0.63518 0.50386 0.52161 0.46751 +7 66 2 4 1 0.64562 0.46239 0.51872 0.47957 +7 67 2 4 0 0.64799 0.47066 0.52357 0.45758 +7 68 2 4 0 0.667 0.46017 0.53543 0.48102 +7 69 1 3 0 0.63757 0.46475 0.51101 0.47566 +7 70 1 1 0 0.64153 0.46559 0.5019 0.49674 +7 71 1 2 1 0.65082 0.46107 0.50177 0.46355 +7 72 1 2 0 0.69254 0.45669 0.48715 0.4624 +7 73 1 2 1 0.70784 0.45809 0.45837 0.45079 +7 74 2 4 1 0.68494 0.50599 0.4562 0.46241 +7 75 1 2 1 0.665 0.48582 0.41982 0.45064 +7 76 1 2 0 0.62657 0.47462 0.40381 0.49046 +7 77 1 2 1 0.63548 0.45809 0.43495 0.51459 +7 78 1 2 1 0.62649 0.45812 0.44179 0.52481 +7 79 1 2 0 0.63757 0.44622 0.43598 0.49922 +7 80 1 4 1 0.67383 0.43832 0.4147 0.49838 +7 81 2 4 0 0.67993 0.40577 0.46088 0.45564 +7 82 1 2 0 0.67973 0.40914 0.49205 0.4278 +7 83 1 1 0 0.70219 0.4152 0.51664 0.38622 +7 84 2 3 0 0.67299 0.4017 0.52281 0.38776 +7 85 2 4 0 0.68044 0.43412 0.51532 0.37247 +7 86 1 2 0 0.68635 0.37774 0.51596 0.37094 +7 87 1 1 0 0.70487 0.42532 0.50949 0.33338 +7 88 1 1 1 0.71384 0.40387 0.57109 0.33162 +7 89 1 3 1 0.71681 0.41858 0.56217 0.35235 +7 90 1 1 1 0.71665 0.43533 0.56571 0.3589 +7 91 1 1 0 0.69869 0.4295 0.56662 0.34501 +7 92 1 1 1 0.6688 0.42876 0.56326 0.33795 +7 93 1 1 1 0.65901 0.40379 0.51785 0.31292 +7 94 1 3 0 0.69655 0.47237 0.52794 0.28493 +7 95 1 4 0 0.70413 0.48688 0.5342 0.28189 +7 96 1 1 1 0.72048 0.47089 0.60726 0.28497 +7 97 1 1 1 0.72608 0.48122 0.62543 0.32596 +7 98 1 1 1 0.719 0.52294 0.63479 0.31364 +7 99 1 1 0 0.75845 0.54401 0.6308 0.30227 +7 100 1 1 1 0.77583 0.50055 0.58914 0.31341 +7 101 1 1 1 0.7629 0.5101 0.60475 0.30806 +7 102 1 1 1 0.75136 0.53288 0.60111 0.28629 +7 103 1 4 0 0.7449 0.53476 0.63764 0.26279 +7 104 1 1 1 0.75735 0.5178 0.60022 0.26465 +7 105 1 1 1 0.74006 0.5208 0.6246 0.28593 +7 106 1 4 0 0.74655 0.50826 0.62744 0.27347 +7 107 2 1 0 0.73213 0.49834 0.6107 0.26635 +7 108 1 1 0 0.74252 0.49545 0.58829 0.29655 +7 109 2 3 1 0.75246 0.50399 0.57983 0.30671 +7 110 2 3 1 0.75356 0.53978 0.58352 0.3279 +7 111 2 3 1 0.74623 0.54892 0.61842 0.38178 +7 112 2 1 1 0.73508 0.53497 0.62784 0.38015 +7 113 2 3 1 0.67071 0.53694 0.60079 0.38059 +7 114 2 3 1 0.65294 0.56661 0.61227 0.41993 +7 115 2 3 0 0.6813 0.53256 0.57688 0.42939 +7 116 2 3 0 0.65403 0.48876 0.56384 0.42955 +7 117 1 2 0 0.65483 0.45313 0.58652 0.44558 +7 118 2 3 0 0.62252 0.40745 0.56623 0.45398 +7 119 2 4 0 0.61672 0.43266 0.54611 0.40321 +7 120 1 1 0 0.62597 0.47728 0.55853 0.42995 +7 121 1 1 1 0.62411 0.52183 0.51648 0.40554 +7 122 1 1 0 0.61963 0.46424 0.51519 0.40227 +7 123 1 4 1 0.64348 0.4852 0.5114 0.42042 +7 124 1 1 1 0.64939 0.459 0.51596 0.44302 +7 125 1 1 1 0.62903 0.46474 0.5682 0.42705 +7 126 1 1 1 0.64059 0.44304 0.54478 0.43085 +7 127 1 1 1 0.64861 0.45858 0.59115 0.45264 +7 128 1 1 1 0.67555 0.44122 0.57081 0.45509 +7 129 1 1 1 0.69338 0.48416 0.57055 0.41559 +7 130 1 3 1 0.706 0.52255 0.58431 0.44688 +7 131 1 1 1 0.68307 0.53763 0.58693 0.44044 +7 132 1 1 1 0.74351 0.54766 0.56273 0.43455 +7 133 1 1 1 0.79692 0.52775 0.58414 0.41248 +7 134 1 3 1 0.8 0.54525 0.59477 0.39839 +7 135 1 1 1 0.78379 0.51418 0.54199 0.39777 +7 136 1 1 1 0.76691 0.50612 0.56284 0.41139 +7 137 1 1 1 0.77781 0.51617 0.5476 0.36561 +7 138 1 1 0 0.8 0.52935 0.55226 0.38513 +7 139 1 1 1 0.77705 0.5329 0.50855 0.39285 +7 140 1 1 1 0.8 0.53265 0.55625 0.42593 +7 141 1 1 1 0.78731 0.54815 0.55623 0.4615 +7 142 1 3 0 0.8 0.5041 0.54073 0.42958 +7 143 1 1 1 0.79062 0.47687 0.57157 0.44331 +7 144 1 1 1 0.79089 0.51494 0.59257 0.40092 +7 145 1 4 0 0.8 0.54651 0.57069 0.40706 +7 146 1 3 1 0.8 0.51781 0.5596 0.4409 +7 147 1 1 1 0.8 0.51138 0.56482 0.42916 +7 148 1 1 1 0.79077 0.51298 0.5507 0.4566 +7 149 1 1 1 0.8 0.50618 0.5763 0.46124 +7 150 1 1 1 0.78315 0.5093 0.54557 0.41467 +7 151 1 1 1 0.76184 0.50378 0.51372 0.4155 +7 152 1 1 1 0.8 0.51599 0.54876 0.4234 +7 153 1 4 1 0.77191 0.50295 0.54092 0.41225 +7 154 1 1 0 0.73744 0.53739 0.56046 0.41314 +7 155 1 1 1 0.73295 0.52323 0.60109 0.39622 +7 156 1 1 1 0.71701 0.56102 0.57213 0.37529 +7 157 1 1 1 0.68905 0.56982 0.58759 0.36813 +7 158 1 1 1 0.66678 0.5476 0.54347 0.39532 +7 159 1 1 1 0.64494 0.49831 0.56258 0.41057 +7 160 1 4 1 0.67528 0.52884 0.5608 0.42352 +7 161 1 1 1 0.65777 0.52094 0.52869 0.40421 +7 162 1 4 1 0.66188 0.5178 0.51787 0.40349 +7 163 1 1 1 0.6381 0.5756 0.48972 0.38619 +7 164 1 4 0 0.6234 0.58792 0.50411 0.40694 +7 165 1 1 0 0.59122 0.56722 0.51712 0.40979 +7 166 1 1 1 0.56838 0.58774 0.50826 0.41299 +7 167 1 1 1 0.5784 0.59008 0.54381 0.40637 +7 168 1 1 0 0.60385 0.60007 0.53611 0.4377 +7 169 1 1 0 0.58775 0.58172 0.5346 0.43027 +7 170 2 2 1 0.62321 0.55708 0.52841 0.46651 +7 171 1 3 0 0.63522 0.53954 0.49567 0.44104 +7 172 1 2 1 0.61149 0.53715 0.49165 0.39184 +7 173 1 2 1 0.59542 0.5716 0.50583 0.36408 +7 174 1 2 1 0.59577 0.55592 0.49924 0.35093 +7 175 1 2 0 0.57984 0.54968 0.50682 0.35664 +7 176 1 3 0 0.55362 0.52279 0.50539 0.38891 +7 177 1 2 0 0.53257 0.51765 0.49096 0.37394 +7 178 2 3 1 0.51265 0.48652 0.47513 0.33797 +7 179 1 1 1 0.54152 0.48191 0.49638 0.28258 +7 180 2 2 0 0.51037 0.44645 0.50537 0.29273 +7 181 2 4 0 0.51899 0.47024 0.49558 0.30212 +7 182 2 3 0 0.55107 0.44902 0.49999 0.31915 +7 184 2 3 0 0.53488 0.41045 0.50085 0.24677 +7 185 1 2 1 0.51874 0.40118 0.4794 0.23449 +7 186 1 4 0 0.50501 0.35356 0.48161 0.21752 +7 187 1 2 0 0.51149 0.30729 0.4853 0.24047 +7 188 1 3 1 0.51032 0.32082 0.51876 0.24294 +7 189 1 3 1 0.49661 0.33353 0.54357 0.22793 +7 190 2 2 0 0.507 0.32073 0.57431 0.22494 +7 191 2 3 0 0.52917 0.31669 0.53771 0.22422 +7 192 2 4 0 0.4745 0.28164 0.58907 0.23844 +7 193 1 3 0 0.46175 0.31539 0.56068 0.25404 +7 194 1 1 1 0.46292 0.35015 0.49346 0.26066 +7 195 1 1 1 0.47478 0.30222 0.49708 0.24947 +7 196 1 1 1 0.45377 0.31007 0.50207 0.23864 +7 197 1 1 1 0.43098 0.27325 0.5128 0.2 +7 198 1 3 1 0.44254 0.29017 0.50301 0.2 +7 199 1 1 0 0.4086 0.28842 0.50102 0.20853 +7 200 2 3 1 0.40769 0.24911 0.51045 0.20608 +7 201 2 3 1 0.41026 0.23245 0.5347 0.2 +8 2 2 1 1 0.72272 0.36159 0.66489 0.27837 +8 3 1 2 1 0.71968 0.36712 0.68895 0.30518 +8 4 2 4 0 0.72708 0.34154 0.69245 0.3467 +8 5 2 3 0 0.75828 0.38545 0.69597 0.34304 +8 6 1 2 1 0.76986 0.38524 0.72058 0.32163 +8 7 1 2 1 0.76545 0.40963 0.73387 0.34507 +8 8 1 2 1 0.7734 0.39018 0.76278 0.31589 +8 9 1 2 0 0.78821 0.40092 0.72354 0.33535 +8 10 2 4 0 0.74599 0.38238 0.71034 0.33525 +8 11 1 2 0 0.73145 0.3622 0.71513 0.34081 +8 12 1 1 1 0.76426 0.39595 0.77204 0.32657 +8 13 2 2 1 0.76519 0.34091 0.77712 0.31212 +8 14 2 2 0 0.8 0.35002 0.71746 0.31764 +8 15 2 4 0 0.8 0.39403 0.75616 0.2627 +8 16 1 2 1 0.7785 0.35638 0.72783 0.28618 +8 17 1 4 0 0.75761 0.35088 0.73161 0.26425 +8 18 1 2 0 0.71263 0.33098 0.77363 0.21803 +8 19 1 1 1 0.71657 0.35143 0.77443 0.20496 +8 20 1 1 1 0.72809 0.39404 0.75392 0.22908 +8 21 1 4 0 0.71511 0.39616 0.74218 0.2512 +8 22 1 4 0 0.71898 0.42119 0.73278 0.20837 +8 23 1 1 1 0.69772 0.43582 0.74177 0.2 +8 24 1 1 0 0.68991 0.46405 0.70513 0.20777 +8 25 1 1 0 0.68773 0.42401 0.68911 0.2 +8 26 1 1 1 0.72672 0.40715 0.70028 0.2 +8 27 1 1 1 0.73579 0.46605 0.64975 0.22909 +8 28 1 1 1 0.78535 0.49908 0.62345 0.29034 +8 29 1 1 1 0.78645 0.53396 0.63025 0.26708 +8 30 1 1 0 0.8 0.57474 0.62491 0.27928 +8 31 1 2 0 0.8 0.55664 0.59219 0.25888 +8 32 1 1 1 0.77736 0.51533 0.56281 0.28132 +8 33 1 3 1 0.78572 0.49382 0.58043 0.28552 +8 34 1 2 1 0.8 0.48829 0.60231 0.26588 +8 35 1 2 0 0.7994 0.51443 0.58259 0.22521 +8 36 1 2 1 0.8 0.51975 0.57607 0.27626 +8 37 1 1 1 0.73863 0.45143 0.56481 0.30938 +8 38 1 3 0 0.74349 0.46602 0.53322 0.32414 +8 39 2 1 1 0.78366 0.45325 0.49959 0.3419 +8 40 2 3 0 0.75988 0.47175 0.49019 0.37422 +8 41 2 4 0 0.75219 0.46767 0.48061 0.37156 +8 42 1 1 1 0.79631 0.46215 0.50231 0.3728 +8 43 1 1 1 0.79028 0.4427 0.51929 0.40784 +8 44 1 4 1 0.77799 0.41938 0.46929 0.44665 +8 45 1 1 1 0.8 0.46747 0.44668 0.44329 +8 46 1 1 1 0.7941 0.42631 0.45533 0.45917 +8 47 1 1 1 0.7867 0.45833 0.48318 0.44614 +8 48 1 2 1 0.78057 0.47779 0.49572 0.46559 +8 49 1 2 1 0.75523 0.4499 0.52687 0.44407 +8 50 1 1 1 0.76751 0.45794 0.52563 0.46388 +8 51 1 4 0 0.73705 0.40908 0.55823 0.47995 +8 52 1 1 1 0.74353 0.3851 0.57003 0.45614 +8 53 1 1 1 0.74428 0.35783 0.5685 0.50084 +8 54 1 1 1 0.73822 0.34854 0.55082 0.48659 +8 55 1 1 0 0.70796 0.35846 0.56603 0.50308 +8 56 1 1 1 0.70021 0.33682 0.55176 0.50902 +8 57 2 4 0 0.68065 0.38184 0.54209 0.51248 +8 58 1 1 0 0.70286 0.38027 0.55712 0.5348 +8 59 1 3 1 0.73473 0.36571 0.56786 0.53329 +8 60 1 3 0 0.73215 0.39102 0.57982 0.55559 +8 61 2 1 0 0.75962 0.3911 0.54845 0.54965 +8 62 2 4 0 0.8 0.44048 0.5297 0.53859 +8 63 1 1 1 0.8 0.40466 0.52765 0.54916 +8 64 1 1 0 0.77938 0.41566 0.5246 0.59296 +8 65 1 3 0 0.75575 0.37063 0.51426 0.60145 +8 66 1 4 0 0.74651 0.40092 0.5261 0.58375 +8 67 1 1 0 0.72621 0.37283 0.53563 0.56422 +8 68 1 1 1 0.75576 0.38204 0.53792 0.55336 +8 69 1 1 0 0.75841 0.41184 0.53299 0.53414 +8 70 1 4 1 0.77846 0.39225 0.51454 0.58763 +8 71 1 2 1 0.74423 0.38544 0.5424 0.58411 +8 72 1 1 1 0.73713 0.42122 0.5801 0.57288 +8 73 1 4 1 0.74995 0.41 0.55732 0.61644 +8 74 1 1 1 0.71944 0.44072 0.59852 0.63972 +8 75 1 1 1 0.70632 0.43435 0.62256 0.67195 +8 76 1 1 0 0.7103 0.46444 0.61044 0.66563 +8 77 1 1 1 0.7446 0.4446 0.62344 0.68054 +8 78 1 1 1 0.75614 0.41919 0.6287 0.68762 +8 79 1 1 1 0.75506 0.38343 0.62226 0.65367 +8 80 1 1 1 0.75653 0.38598 0.60912 0.66087 +8 81 1 4 0 0.73538 0.42836 0.62309 0.66879 +8 82 1 1 1 0.75823 0.44314 0.6169 0.62751 +8 83 2 4 0 0.79074 0.456 0.63763 0.61644 +8 84 1 1 1 0.78747 0.44861 0.65821 0.61868 +8 85 1 1 0 0.79994 0.44257 0.63397 0.61364 +8 86 1 4 0 0.79493 0.44211 0.60813 0.59908 +8 87 1 1 0 0.79839 0.40118 0.54373 0.57622 +8 88 1 3 1 0.78054 0.38787 0.53546 0.57043 +8 89 1 2 0 0.8 0.40677 0.51008 0.55589 +8 90 1 4 0 0.8 0.39744 0.49598 0.54377 +8 91 1 1 1 0.76859 0.38977 0.49369 0.54716 +8 92 1 1 1 0.8 0.41353 0.49306 0.53437 +8 93 1 1 1 0.74998 0.4016 0.48395 0.56719 +8 94 1 1 1 0.71606 0.42625 0.42105 0.54828 +8 95 1 1 0 0.76205 0.4315 0.36728 0.54724 +8 96 1 1 1 0.76019 0.45926 0.40119 0.499 +8 97 1 1 1 0.72664 0.45703 0.39698 0.49327 +8 98 2 3 0 0.72215 0.47076 0.34439 0.49322 +8 99 1 3 1 0.69887 0.45542 0.28961 0.49125 +8 100 1 3 0 0.7013 0.44834 0.24887 0.485 +8 101 1 3 0 0.69343 0.45486 0.20955 0.4761 +8 102 1 1 1 0.65582 0.46996 0.2 0.46113 +8 103 1 1 0 0.63285 0.47212 0.2 0.48287 +8 104 1 1 1 0.62979 0.47559 0.2 0.45542 +8 105 1 1 1 0.63382 0.46579 0.2 0.461 +8 106 2 4 1 0.61825 0.46631 0.2 0.50011 +8 107 1 1 0 0.58195 0.48072 0.21164 0.50888 +8 108 1 2 1 0.58192 0.47029 0.26596 0.54347 +8 109 1 4 0 0.54768 0.47038 0.2649 0.52854 +8 110 1 1 0 0.60211 0.48349 0.22934 0.53953 +8 111 1 1 1 0.59398 0.44655 0.23532 0.53892 +8 112 2 3 0 0.57221 0.49309 0.24537 0.52478 +8 113 2 3 0 0.59708 0.47542 0.27066 0.53401 +8 114 1 1 1 0.5744 0.44636 0.29851 0.51063 +8 115 1 4 1 0.58123 0.42844 0.28975 0.5042 +8 116 1 1 0 0.55918 0.40677 0.30295 0.48247 +8 117 1 4 0 0.60221 0.39264 0.25755 0.48985 +8 118 1 1 1 0.62322 0.38096 0.25997 0.47628 +8 119 1 1 0 0.60216 0.40376 0.31645 0.47469 +8 120 1 1 1 0.63668 0.38762 0.32407 0.50604 +8 121 1 4 0 0.64281 0.37437 0.29351 0.49155 +8 122 1 1 1 0.64757 0.40178 0.30047 0.53058 +8 123 1 1 1 0.64206 0.48089 0.34117 0.51659 +8 124 1 1 0 0.61338 0.48418 0.34044 0.52231 +8 125 1 3 0 0.63249 0.48818 0.33356 0.54288 +8 126 1 1 0 0.64622 0.50893 0.3428 0.56352 +8 127 2 4 0 0.65173 0.5071 0.34521 0.56125 +8 128 1 1 1 0.6364 0.49866 0.31433 0.49489 +8 129 1 1 1 0.6426 0.51138 0.30761 0.49526 +8 130 1 3 0 0.65601 0.51237 0.33386 0.49921 +8 132 1 1 1 0.70711 0.53832 0.30634 0.51059 +8 133 1 1 0 0.69874 0.56144 0.3293 0.51964 +8 134 1 4 1 0.67403 0.56679 0.34472 0.55133 +8 135 1 4 0 0.6696 0.58442 0.34797 0.54135 +8 136 1 1 1 0.63877 0.59711 0.34372 0.51172 +8 137 1 1 1 0.61784 0.63393 0.36052 0.52338 +8 138 2 4 1 0.68505 0.66969 0.36838 0.51195 +8 139 1 1 1 0.66854 0.66013 0.37246 0.52443 +8 140 1 1 0 0.67228 0.64484 0.38002 0.51714 +8 141 1 1 0 0.65464 0.58997 0.39632 0.56173 +8 142 1 1 0 0.61896 0.5957 0.37331 0.58496 +8 143 1 1 1 0.64018 0.59859 0.35183 0.58706 +8 144 1 4 0 0.62543 0.63388 0.33652 0.561 +8 145 1 3 1 0.61792 0.61865 0.30557 0.56444 +8 146 1 1 1 0.64088 0.65857 0.36269 0.55997 +8 147 1 3 1 0.66423 0.67844 0.31305 0.55433 +8 148 1 1 1 0.6649 0.75876 0.30007 0.56184 +8 149 1 1 0 0.64727 0.70167 0.27646 0.51868 +8 150 2 4 1 0.66209 0.68147 0.25647 0.51973 +8 151 1 1 1 0.64634 0.65699 0.26901 0.49803 +8 152 1 4 1 0.65724 0.65722 0.25335 0.4673 +8 153 1 2 1 0.65488 0.6575 0.26338 0.47573 +8 154 1 3 0 0.65342 0.68414 0.25197 0.49104 +8 155 2 1 1 0.64961 0.69631 0.27227 0.50288 +8 156 1 3 0 0.64466 0.64614 0.29655 0.50133 +8 157 1 1 1 0.63516 0.66127 0.26392 0.45544 +8 158 1 1 1 0.65012 0.69597 0.26444 0.44891 +8 159 1 1 1 0.63283 0.72946 0.25341 0.44042 +8 160 1 1 0 0.62373 0.74285 0.26646 0.4657 +8 161 1 2 0 0.64012 0.6942 0.25127 0.42213 +8 162 1 1 1 0.62625 0.72194 0.27347 0.38954 +8 163 1 1 1 0.65363 0.73949 0.25458 0.42168 +8 164 1 1 1 0.62054 0.70957 0.25526 0.39074 +8 165 2 4 1 0.61797 0.72743 0.24834 0.39418 +8 166 1 1 1 0.62556 0.73738 0.22064 0.40057 +8 167 1 1 0 0.60339 0.71756 0.2 0.39676 +8 168 1 1 1 0.58168 0.71372 0.21836 0.38182 +8 169 1 4 0 0.62985 0.75977 0.2 0.38573 +8 170 1 1 1 0.65444 0.72687 0.2 0.44398 +8 171 1 1 0 0.67974 0.71431 0.20057 0.4605 +8 172 1 1 1 0.72836 0.69795 0.2 0.42323 +8 173 1 1 1 0.69442 0.67182 0.2 0.41072 +8 174 1 1 0 0.68041 0.71198 0.2 0.42484 +8 175 1 1 1 0.68333 0.72709 0.2 0.39269 +8 176 1 1 1 0.67668 0.71537 0.2 0.38646 +8 177 1 1 0 0.65651 0.72791 0.2 0.39812 +8 178 1 4 1 0.711 0.75634 0.2 0.38376 +8 179 1 1 1 0.71544 0.72615 0.2 0.34603 +8 180 1 1 0 0.72974 0.74606 0.22999 0.31491 +8 181 2 2 1 0.70108 0.7698 0.25176 0.30004 +8 182 1 1 1 0.67697 0.74005 0.24558 0.29188 +8 183 1 1 1 0.76022 0.73588 0.27982 0.29037 +8 184 2 2 1 0.72328 0.73178 0.29401 0.27617 +8 185 2 3 0 0.73633 0.7141 0.3185 0.32137 +8 186 1 1 1 0.71002 0.7047 0.3316 0.28957 +8 187 1 1 0 0.75527 0.70136 0.34886 0.30559 +8 188 1 1 1 0.78373 0.68291 0.3439 0.28045 +8 189 1 1 0 0.7765 0.64914 0.36649 0.25248 +8 190 1 1 1 0.78414 0.6469 0.3685 0.25279 +8 191 1 3 1 0.78078 0.61753 0.33387 0.26636 +8 192 1 1 1 0.77767 0.63941 0.36395 0.27625 +8 193 1 3 0 0.76366 0.6321 0.35046 0.24559 +8 194 1 1 1 0.7597 0.64028 0.36988 0.21377 +8 195 1 4 0 0.73823 0.66169 0.32258 0.2 +8 196 1 1 1 0.73748 0.64822 0.31795 0.2 +8 197 1 1 1 0.75941 0.62476 0.2908 0.2 +8 198 1 1 1 0.77609 0.66111 0.32098 0.2278 +8 199 1 1 0 0.77967 0.62898 0.34737 0.22289 +8 200 1 4 0 0.73928 0.63717 0.33916 0.22653 +8 201 1 1 1 0.78518 0.6633 0.30215 0.23086 +9 1 1 3 0 0.75589 0.74958 0.4954 0.50785 +9 2 1 4 0 0.77074 0.74753 0.49091 0.51417 +9 3 2 3 0 0.75312 0.7488 0.51292 0.51973 +9 4 2 4 1 0.75142 0.73553 0.49914 0.51276 +9 5 2 2 1 0.75532 0.74618 0.51337 0.54812 +9 6 2 4 0 0.7778 0.75498 0.50815 0.57844 +9 7 2 2 1 0.8 0.79755 0.54204 0.61636 +9 8 2 3 1 0.8 0.8 0.5334 0.58682 +9 9 1 2 1 0.79834 0.8 0.49191 0.61194 +9 10 1 2 1 0.8 0.8 0.49474 0.59715 +9 11 1 3 0 0.79249 0.8 0.51127 0.59645 +9 12 1 2 1 0.75369 0.8 0.50994 0.59665 +9 13 2 4 0 0.76151 0.79994 0.53482 0.58578 +9 14 1 2 1 0.74082 0.8 0.53074 0.54457 +9 15 1 3 0 0.72549 0.79943 0.52863 0.5383 +9 16 1 2 1 0.70023 0.8 0.53768 0.52082 +9 17 1 3 1 0.73274 0.8 0.53512 0.50637 +9 18 1 2 1 0.72325 0.8 0.51422 0.47233 +9 19 1 2 1 0.6987 0.7603 0.52669 0.57786 +9 20 1 2 1 0.70606 0.7665 0.50784 0.54725 +9 21 1 3 0 0.74568 0.8 0.47528 0.48847 +9 22 1 2 1 0.73602 0.71019 0.47623 0.46379 +9 23 1 2 1 0.6995 0.78311 0.50349 0.45863 +9 24 2 3 0 0.73815 0.78553 0.48827 0.51755 +9 25 1 2 0 0.73645 0.8 0.46058 0.51965 +9 26 1 2 1 0.70921 0.8 0.43961 0.46746 +9 27 1 2 1 0.70937 0.79914 0.40737 0.52818 +9 28 1 2 1 0.72713 0.79376 0.409 0.51479 +9 29 1 2 1 0.74288 0.8 0.41613 0.5544 +9 30 1 2 1 0.7556 0.78558 0.39214 0.61406 +9 31 1 2 1 0.76292 0.8 0.40288 0.60137 +9 32 1 2 1 0.78922 0.8 0.38361 0.56391 +9 33 2 4 1 0.8 0.75956 0.33854 0.54354 +9 34 2 1 1 0.8 0.74209 0.34263 0.53183 +9 35 2 1 1 0.76504 0.6834 0.30965 0.49896 +9 36 2 4 0 0.77918 0.67152 0.31268 0.50109 +9 37 1 4 0 0.79817 0.67787 0.33283 0.51364 +9 38 1 3 0 0.8 0.65923 0.32099 0.51672 +9 39 1 2 1 0.79847 0.66575 0.28866 0.49839 +9 40 1 2 1 0.8 0.62933 0.24114 0.46222 +9 41 1 3 1 0.8 0.58321 0.25503 0.45842 +9 42 1 3 0 0.8 0.55711 0.26149 0.43178 +9 43 1 2 0 0.76933 0.54428 0.26785 0.45191 +9 44 1 2 0 0.76406 0.56701 0.25289 0.4396 +9 45 2 1 1 0.74421 0.53342 0.26598 0.44684 +9 46 2 3 1 0.8 0.52758 0.2609 0.45764 +9 47 2 3 1 0.77372 0.53752 0.28265 0.40529 +9 48 2 3 1 0.7658 0.52954 0.28731 0.40651 +9 49 2 3 1 0.78144 0.52903 0.29352 0.37285 +9 50 2 3 0 0.76062 0.50885 0.27632 0.38626 +9 51 1 2 1 0.77829 0.50797 0.25896 0.43098 +9 52 1 3 0 0.79212 0.48195 0.2946 0.42884 +9 53 2 3 1 0.7989 0.48888 0.27181 0.44709 +9 54 1 1 0 0.8 0.47165 0.28961 0.47398 +9 55 2 3 0 0.76304 0.48877 0.28486 0.45241 +9 56 2 3 1 0.76383 0.48391 0.2639 0.48111 +9 57 2 3 0 0.77142 0.42317 0.21136 0.47006 +9 58 1 3 0 0.78932 0.43569 0.20504 0.4815 +9 59 1 4 1 0.8 0.44924 0.21098 0.46179 +9 60 1 2 0 0.8 0.42929 0.2 0.49275 +9 61 1 2 0 0.8 0.4061 0.20125 0.50836 +9 62 2 2 1 0.7939 0.43189 0.2 0.45065 +9 63 2 2 0 0.77599 0.42752 0.2 0.43872 +9 64 1 2 1 0.7732 0.41677 0.22647 0.40868 +9 65 2 4 0 0.78238 0.43176 0.22137 0.41566 +9 66 2 3 0 0.76354 0.45853 0.24257 0.41647 +9 67 1 4 0 0.74726 0.45721 0.2302 0.42024 +9 68 1 3 1 0.71946 0.39431 0.25842 0.3981 +9 69 1 1 1 0.7615 0.3867 0.23508 0.41493 +9 70 1 3 0 0.769 0.40698 0.21787 0.4461 +9 71 1 1 1 0.7758 0.38461 0.27463 0.50449 +9 72 2 4 1 0.75578 0.3849 0.28129 0.51699 +9 73 1 4 0 0.75883 0.37006 0.27127 0.51168 +9 74 1 1 0 0.77596 0.34578 0.20299 0.4647 +9 75 2 3 0 0.79787 0.36426 0.2 0.45844 +9 76 1 1 1 0.79981 0.34511 0.20752 0.45614 +9 77 2 4 1 0.8 0.27499 0.2198 0.4812 +9 78 1 1 1 0.8 0.27516 0.20862 0.49358 +9 79 2 1 1 0.8 0.24672 0.2 0.47946 +9 80 2 1 1 0.76502 0.24829 0.21118 0.4635 +9 81 2 4 0 0.77632 0.27321 0.20222 0.45215 +9 82 2 3 1 0.77724 0.24133 0.22068 0.46303 +9 83 2 3 0 0.75954 0.25195 0.2 0.46168 +9 84 2 4 1 0.75151 0.23705 0.22881 0.49423 +9 85 1 1 1 0.75631 0.2 0.2 0.44735 +9 86 1 1 1 0.7591 0.2 0.20426 0.48861 +9 87 1 1 1 0.69645 0.2 0.21528 0.50524 +9 88 1 4 1 0.64578 0.2 0.2 0.51599 +9 89 1 1 0 0.61847 0.24198 0.2 0.51325 +9 90 1 4 1 0.60891 0.22231 0.20642 0.52709 +9 91 2 1 1 0.56935 0.2 0.2 0.53635 +9 92 2 4 0 0.59362 0.22588 0.20084 0.50491 +9 93 1 4 0 0.58413 0.22833 0.20064 0.52309 +9 94 1 1 0 0.59485 0.28355 0.2 0.53893 +9 95 1 3 0 0.63423 0.26507 0.2 0.52302 +9 96 2 3 0 0.60383 0.2807 0.2 0.51331 +9 97 1 2 0 0.58153 0.28407 0.2 0.50771 +9 98 1 3 0 0.58763 0.30448 0.20177 0.51951 +9 99 1 1 0 0.60168 0.29137 0.22474 0.5328 +9 100 2 4 0 0.60534 0.30633 0.22142 0.5267 +9 101 2 4 0 0.61744 0.32021 0.2 0.51496 +9 102 1 2 0 0.60371 0.33654 0.2 0.48967 +9 103 1 2 1 0.61568 0.31611 0.24811 0.48848 +9 104 2 2 1 0.64658 0.29741 0.23902 0.49939 +9 105 2 4 0 0.63724 0.34588 0.24255 0.54398 +9 106 1 3 1 0.68848 0.34246 0.21635 0.59986 +9 107 1 2 0 0.70055 0.35615 0.22945 0.65007 +9 108 1 2 1 0.72668 0.4019 0.22144 0.67431 +9 109 1 2 0 0.73614 0.37601 0.22303 0.65517 +9 110 2 4 0 0.70871 0.38318 0.21633 0.64814 +9 111 2 3 0 0.72082 0.4259 0.22911 0.64106 +9 112 1 1 1 0.74365 0.39062 0.2159 0.67835 +9 113 2 4 0 0.74488 0.42965 0.21564 0.64292 +9 114 1 1 0 0.76232 0.43526 0.23636 0.66444 +9 115 2 1 1 0.77108 0.43833 0.21982 0.72201 +9 116 1 4 1 0.7764 0.38469 0.2197 0.68629 +9 117 2 4 1 0.76204 0.37327 0.22346 0.73047 +9 118 2 4 1 0.73289 0.37751 0.24291 0.76032 +9 119 2 4 1 0.68179 0.37169 0.26213 0.7443 +9 120 2 4 1 0.66835 0.38497 0.29941 0.77099 +9 121 2 1 0 0.69142 0.35369 0.32598 0.75279 +9 122 2 4 1 0.67488 0.37777 0.34756 0.7462 +9 123 2 2 0 0.68119 0.34416 0.31133 0.73815 +9 124 2 4 1 0.66546 0.35682 0.29042 0.77882 +9 125 2 4 0 0.67253 0.39233 0.31288 0.8 +9 127 1 1 1 0.66914 0.36629 0.3453 0.73957 +9 128 2 4 1 0.61275 0.37034 0.35161 0.75057 +9 129 2 4 1 0.62213 0.36436 0.33584 0.72704 +9 130 2 4 1 0.62269 0.34953 0.30498 0.76824 +9 131 1 2 0 0.60851 0.32652 0.30339 0.72964 +9 132 2 4 1 0.60786 0.32857 0.29987 0.72062 +9 133 2 4 1 0.58617 0.33267 0.3049 0.69372 +9 134 2 1 1 0.56321 0.33668 0.30934 0.63765 +9 135 2 1 1 0.56034 0.34 0.32951 0.62249 +9 136 2 4 1 0.59695 0.35349 0.33372 0.64054 +9 137 2 4 0 0.5942 0.33099 0.2794 0.68589 +9 138 2 4 1 0.55607 0.27542 0.26988 0.68944 +9 139 2 1 0 0.55672 0.2687 0.26835 0.71414 +9 140 2 4 1 0.561 0.27377 0.26538 0.71706 +9 141 2 4 1 0.57517 0.25666 0.28595 0.71656 +9 142 2 1 1 0.61529 0.2424 0.26603 0.70067 +9 143 2 4 1 0.64528 0.27415 0.22106 0.73397 +9 144 2 1 0 0.61906 0.28121 0.23817 0.75352 +9 145 2 4 1 0.66645 0.25274 0.27652 0.78179 +9 146 2 2 1 0.63504 0.22433 0.27043 0.78352 +9 147 2 4 1 0.65923 0.22031 0.24342 0.8 +9 148 2 2 0 0.66101 0.24213 0.2582 0.79169 +9 149 2 4 0 0.68021 0.24563 0.27224 0.74907 +9 150 2 2 0 0.64328 0.24075 0.23307 0.70195 +9 151 2 2 0 0.65067 0.22962 0.23184 0.70443 +9 152 1 3 0 0.67629 0.2 0.26824 0.6873 +9 153 1 2 0 0.70799 0.20293 0.26597 0.68027 +9 154 1 1 1 0.72808 0.2 0.2558 0.69619 +9 155 1 3 0 0.67266 0.2 0.26307 0.75591 +9 156 1 4 1 0.66292 0.2 0.23587 0.79649 +9 157 1 1 0 0.66732 0.20346 0.22775 0.79178 +9 158 1 1 1 0.68327 0.20605 0.22413 0.78059 +9 159 1 4 1 0.69984 0.2 0.20639 0.79022 +9 160 1 1 1 0.67476 0.2 0.21937 0.76964 +9 161 2 4 1 0.68327 0.21611 0.20076 0.7332 +9 162 2 4 1 0.67228 0.20292 0.20451 0.72464 +9 163 2 4 1 0.67515 0.25669 0.2 0.76848 +9 164 2 4 1 0.63301 0.25602 0.2 0.77518 +9 165 2 4 1 0.6279 0.28699 0.25009 0.76979 +9 166 2 1 0 0.65378 0.28915 0.21989 0.76559 +9 167 2 4 1 0.6211 0.31583 0.24256 0.79443 +9 168 2 4 1 0.61409 0.30287 0.28766 0.8 +9 169 2 4 1 0.65255 0.35784 0.29354 0.8 +9 170 1 1 0 0.6344 0.31885 0.31769 0.79089 +9 171 2 4 1 0.65216 0.33764 0.36155 0.76434 +9 172 2 4 1 0.67819 0.39544 0.3474 0.74471 +9 173 2 4 1 0.68263 0.40543 0.33819 0.74302 +9 174 2 1 1 0.66787 0.37755 0.3207 0.77943 +9 175 2 1 1 0.65956 0.36316 0.29664 0.76993 +9 176 2 4 1 0.65754 0.35879 0.28283 0.7549 +9 177 2 1 0 0.63822 0.38018 0.27172 0.8 +9 178 1 4 0 0.63985 0.42095 0.2254 0.8 +9 179 1 2 0 0.64619 0.42085 0.21304 0.75356 +9 180 1 1 1 0.59991 0.42416 0.2385 0.75872 +9 181 2 3 0 0.59123 0.44978 0.2418 0.7741 +9 182 1 1 0 0.58685 0.47005 0.26321 0.76732 +9 183 2 1 0 0.59901 0.50363 0.23276 0.75541 +9 184 2 1 0 0.61453 0.50371 0.28101 0.75982 +9 185 1 2 1 0.6131 0.53879 0.26269 0.74025 +9 186 1 2 1 0.62768 0.50382 0.26046 0.71628 +9 187 1 3 0 0.66219 0.52766 0.26739 0.70902 +9 188 1 2 0 0.63736 0.56407 0.29745 0.72158 +9 189 1 4 0 0.62284 0.55832 0.31837 0.72099 +9 190 1 1 1 0.65609 0.53865 0.2909 0.69863 +9 191 1 4 1 0.63598 0.56449 0.29165 0.74445 +9 192 1 4 1 0.65425 0.55375 0.28118 0.76522 +9 193 2 4 1 0.68603 0.57074 0.27263 0.76083 +9 194 2 2 1 0.64706 0.56614 0.2661 0.76867 +9 195 2 4 1 0.68318 0.51971 0.26064 0.74966 +9 196 2 4 0 0.68059 0.497 0.25007 0.75177 +9 197 2 2 1 0.68456 0.51478 0.30823 0.74594 +9 198 1 2 0 0.71844 0.52649 0.33788 0.74639 +9 199 2 4 1 0.71123 0.551 0.35148 0.74464 +9 200 2 3 0 0.72001 0.52342 0.33865 0.72155 +9 201 2 2 1 0.74859 0.49433 0.34542 0.70368 +10 2 1 2 0 0.79533 0.61486 0.33702 0.64806 +10 3 1 3 0 0.77911 0.60512 0.33215 0.66138 +10 4 1 1 1 0.78337 0.61632 0.35911 0.63626 +10 5 1 4 1 0.7892 0.61803 0.31638 0.62637 +10 6 1 4 0 0.78802 0.6319 0.33504 0.61932 +10 7 1 1 1 0.8 0.62275 0.35163 0.64329 +10 8 1 1 1 0.8 0.67669 0.35841 0.6561 +10 9 1 1 0 0.79212 0.63642 0.32298 0.65235 +10 10 2 1 1 0.79185 0.65624 0.33595 0.61702 +10 11 1 1 0 0.74926 0.72931 0.34746 0.58345 +10 12 1 2 0 0.75632 0.76559 0.34628 0.58341 +10 13 1 2 1 0.7639 0.74425 0.36349 0.58836 +10 14 1 4 1 0.76054 0.77998 0.3378 0.60271 +10 15 1 1 1 0.8 0.78498 0.35739 0.61118 +10 16 1 1 1 0.8 0.8 0.35731 0.59052 +10 17 1 1 1 0.8 0.78324 0.35449 0.58059 +10 18 1 1 1 0.8 0.8 0.37265 0.5926 +10 19 1 1 1 0.8 0.79256 0.37965 0.59253 +10 20 1 2 1 0.8 0.8 0.36541 0.59428 +10 21 1 3 1 0.8 0.77415 0.41601 0.60457 +10 22 1 1 1 0.77449 0.77168 0.42524 0.55168 +10 23 1 1 0 0.8 0.75067 0.38646 0.55036 +10 24 2 3 1 0.8 0.7915 0.40166 0.48547 +10 25 1 2 1 0.76914 0.8 0.42448 0.49218 +10 26 1 4 0 0.77758 0.79562 0.44998 0.51029 +10 27 1 1 1 0.8 0.8 0.42856 0.52042 +10 28 1 4 0 0.8 0.8 0.4345 0.50575 +10 29 1 1 1 0.8 0.75882 0.4448 0.48349 +10 30 1 3 0 0.78175 0.8 0.40258 0.44428 +10 31 1 1 1 0.76652 0.8 0.39384 0.39106 +10 32 1 1 0 0.79291 0.78602 0.37873 0.40843 +10 33 1 4 1 0.8 0.76245 0.39005 0.41561 +10 34 1 1 1 0.76776 0.8 0.35845 0.37649 +10 35 1 1 0 0.76479 0.8 0.38451 0.39538 +10 36 1 1 1 0.78675 0.8 0.43522 0.37399 +10 37 1 2 1 0.76881 0.8 0.42214 0.3937 +10 38 1 1 1 0.76719 0.77319 0.46031 0.41895 +10 39 1 1 1 0.76835 0.76873 0.48054 0.4191 +10 40 1 3 1 0.73738 0.8 0.48358 0.43343 +10 41 1 1 1 0.72322 0.8 0.47422 0.44369 +10 42 1 2 1 0.71153 0.8 0.46335 0.42345 +10 43 2 2 1 0.68255 0.8 0.45305 0.44001 +10 44 1 1 1 0.69089 0.78104 0.47197 0.42788 +10 45 1 4 0 0.66732 0.79469 0.44878 0.44591 +10 46 1 3 1 0.65766 0.79189 0.46314 0.44544 +10 47 1 1 0 0.5976 0.7684 0.47052 0.42774 +10 48 1 1 0 0.55005 0.74119 0.42341 0.48673 +10 49 1 1 0 0.53023 0.72798 0.41138 0.52417 +10 50 2 4 1 0.52611 0.77921 0.39979 0.53658 +10 51 1 3 0 0.49055 0.8 0.42019 0.55675 +10 52 1 2 0 0.52201 0.8 0.38315 0.53647 +10 53 1 4 1 0.48311 0.79404 0.38362 0.52962 +10 54 1 1 1 0.47851 0.8 0.37604 0.53622 +10 55 2 3 0 0.47114 0.78186 0.32555 0.54864 +10 56 1 3 0 0.52498 0.8 0.34821 0.50809 +10 57 2 3 1 0.5568 0.73854 0.32226 0.50379 +10 58 1 1 1 0.59107 0.77508 0.35782 0.48549 +10 59 1 2 1 0.57859 0.75152 0.36376 0.45497 +10 60 1 2 1 0.58538 0.78769 0.35096 0.45682 +10 61 1 2 1 0.5831 0.77991 0.36414 0.42087 +10 62 1 2 0 0.62814 0.73505 0.34981 0.38763 +10 63 2 2 1 0.6263 0.70483 0.30452 0.36641 +10 64 2 4 1 0.59054 0.67293 0.31781 0.38955 +10 65 1 1 1 0.59062 0.68057 0.37094 0.38191 +10 66 2 2 0 0.6 0.69509 0.37937 0.3568 +10 67 2 2 1 0.56786 0.66458 0.3822 0.36675 +10 68 1 1 1 0.5604 0.70035 0.38482 0.35478 +10 69 2 2 1 0.57104 0.71245 0.39957 0.35664 +10 70 2 1 0 0.54359 0.71196 0.42378 0.3571 +10 72 2 4 0 0.52826 0.71692 0.47957 0.39249 +10 73 1 1 0 0.52679 0.7008 0.51601 0.41836 +10 74 1 4 0 0.51832 0.69455 0.52682 0.45528 +10 75 1 1 1 0.51664 0.69787 0.52532 0.41025 +10 76 1 3 1 0.47841 0.70222 0.54519 0.40828 +10 77 1 4 1 0.52181 0.7075 0.58058 0.4413 +10 78 1 1 0 0.53371 0.70709 0.61293 0.43702 +10 79 1 3 0 0.51187 0.69805 0.59148 0.41795 +10 80 1 1 0 0.54662 0.71333 0.60554 0.38272 +10 81 1 1 1 0.56164 0.69111 0.62202 0.35641 +10 82 1 3 1 0.58091 0.68219 0.60688 0.36051 +10 83 2 1 1 0.57962 0.68056 0.65021 0.36674 +10 84 1 3 1 0.55223 0.72185 0.6166 0.38495 +10 85 1 3 1 0.53855 0.73672 0.61654 0.40548 +10 86 1 3 0 0.57337 0.74868 0.60743 0.41265 +10 87 2 1 0 0.58888 0.77335 0.60741 0.42571 +10 88 1 2 1 0.57303 0.74299 0.64668 0.39972 +10 89 1 4 0 0.59801 0.76723 0.67844 0.37176 +10 90 1 1 0 0.59176 0.76852 0.67096 0.35297 +10 91 1 4 0 0.57924 0.77291 0.6587 0.31125 +10 92 1 1 1 0.51333 0.72898 0.67478 0.32844 +10 93 1 1 1 0.48641 0.7256 0.70118 0.32468 +10 94 1 1 0 0.5281 0.75115 0.7171 0.31097 +10 95 1 1 0 0.54681 0.72528 0.66602 0.3031 +10 96 1 3 1 0.55095 0.75749 0.68133 0.27906 +10 97 1 1 1 0.51773 0.75759 0.70518 0.28213 +10 98 1 1 0 0.55385 0.74856 0.71976 0.32485 +10 99 1 1 0 0.52698 0.73911 0.72153 0.34305 +10 100 1 3 0 0.52457 0.68834 0.6949 0.33497 +10 101 1 1 1 0.50922 0.70878 0.68 0.32982 +10 102 2 1 1 0.48483 0.64737 0.69028 0.34193 +10 103 2 1 0 0.45722 0.6843 0.7382 0.38723 +10 104 1 1 0 0.40999 0.68738 0.71106 0.40523 +10 105 2 4 1 0.40716 0.6782 0.72897 0.428 +10 106 1 1 1 0.37528 0.64565 0.72724 0.41672 +10 107 1 3 1 0.34385 0.62057 0.75223 0.40683 +10 108 1 2 1 0.34604 0.63244 0.75108 0.39212 +10 109 1 2 0 0.33945 0.59741 0.76341 0.40183 +10 110 2 4 1 0.33353 0.64323 0.77437 0.41816 +10 111 1 1 1 0.31872 0.64159 0.78477 0.39964 +10 112 1 4 0 0.33592 0.63011 0.75495 0.40405 +10 113 1 1 1 0.31717 0.66784 0.76968 0.42643 +10 114 2 1 0 0.29305 0.64843 0.77733 0.43549 +10 115 1 3 1 0.31068 0.65657 0.75338 0.4475 +10 116 1 4 0 0.30067 0.63712 0.75722 0.44616 +10 117 2 4 0 0.29281 0.63177 0.77055 0.45544 +10 118 2 4 1 0.27562 0.63623 0.74456 0.49864 +10 119 2 4 1 0.28123 0.6148 0.7298 0.50748 +10 120 1 1 0 0.29372 0.60176 0.74845 0.48459 +10 121 1 4 1 0.31879 0.61007 0.75153 0.51382 +10 122 1 1 0 0.34325 0.6292 0.78053 0.49692 +10 123 1 3 0 0.33735 0.59963 0.78052 0.45578 +10 124 2 4 0 0.3061 0.6266 0.78756 0.43671 +10 125 1 2 0 0.2692 0.61377 0.78148 0.46674 +10 126 2 4 0 0.28086 0.60337 0.74509 0.49568 +10 127 1 2 1 0.25623 0.61038 0.72673 0.50664 +10 128 1 2 1 0.22991 0.60778 0.69596 0.50548 +10 129 1 2 0 0.21403 0.62861 0.74918 0.56429 +10 130 2 3 1 0.2 0.61951 0.74461 0.5241 +10 131 1 2 0 0.2016 0.57322 0.73803 0.53287 +10 132 2 2 1 0.2 0.57338 0.76688 0.57414 +10 133 2 3 1 0.24192 0.54513 0.78845 0.5805 +10 134 2 3 1 0.22449 0.53931 0.76575 0.54908 +10 135 1 1 0 0.23756 0.49219 0.79371 0.58829 +10 136 2 4 0 0.2 0.51495 0.8 0.59757 +10 137 1 4 1 0.25076 0.50844 0.79613 0.56615 +10 138 1 1 0 0.21233 0.51514 0.79756 0.56617 +10 139 2 3 1 0.21396 0.53692 0.8 0.5165 +10 140 1 2 1 0.24112 0.5471 0.79205 0.53283 +10 141 1 1 0 0.2593 0.56942 0.79459 0.5476 +10 142 1 2 1 0.26206 0.52408 0.8 0.54105 +10 143 1 2 0 0.27497 0.5153 0.78909 0.49856 +10 144 2 4 1 0.27226 0.48823 0.8 0.51845 +10 145 1 3 1 0.25567 0.45553 0.79816 0.52488 +10 146 1 1 1 0.25052 0.42584 0.8 0.5299 +10 147 1 1 0 0.26973 0.42681 0.75384 0.53273 +10 148 2 2 0 0.2731 0.42783 0.77287 0.53278 +10 149 1 1 0 0.27367 0.44849 0.79407 0.55838 +10 150 2 1 0 0.2931 0.49084 0.8 0.5316 +10 151 1 3 0 0.28059 0.52164 0.74324 0.53733 +10 152 1 1 0 0.28957 0.53893 0.75354 0.54824 +10 153 2 3 1 0.28571 0.5029 0.75907 0.54966 +10 154 1 1 0 0.31502 0.49518 0.76867 0.53628 +10 155 1 2 1 0.32785 0.49781 0.7489 0.53514 +10 156 1 1 1 0.40513 0.47091 0.77671 0.51453 +10 157 1 1 1 0.43223 0.45748 0.77637 0.51789 +10 158 1 3 1 0.46163 0.47294 0.74615 0.50768 +10 159 2 4 0 0.46601 0.48429 0.74986 0.49598 +10 160 2 2 0 0.47188 0.49195 0.71257 0.45374 +10 161 2 1 1 0.44031 0.49121 0.68641 0.47482 +10 162 2 3 0 0.41901 0.49526 0.70796 0.44658 +10 163 2 4 1 0.43251 0.46366 0.70508 0.42232 +10 164 1 2 1 0.41364 0.48043 0.70468 0.42756 +10 165 1 2 1 0.40867 0.48836 0.7352 0.39318 +10 166 1 2 1 0.38058 0.5313 0.7467 0.39111 +10 167 2 2 1 0.36313 0.57037 0.70606 0.40742 +10 168 2 1 1 0.35806 0.52469 0.72541 0.39842 +10 169 2 1 1 0.3965 0.55552 0.70164 0.39636 +10 170 1 4 1 0.39881 0.58211 0.7039 0.39538 +10 171 1 2 0 0.40161 0.64315 0.66595 0.43253 +10 172 1 2 1 0.40527 0.63142 0.68344 0.47814 +10 173 1 2 1 0.41818 0.64688 0.68375 0.49872 +10 174 2 4 1 0.43695 0.66292 0.67543 0.48242 +10 175 2 3 1 0.39064 0.70592 0.72162 0.50853 +10 176 2 3 1 0.41418 0.72227 0.7404 0.50848 +10 177 1 4 0 0.42828 0.73365 0.74399 0.50729 +10 178 1 1 0 0.4385 0.714 0.75182 0.52906 +10 179 1 1 1 0.38462 0.71399 0.78125 0.51557 +10 180 1 1 0 0.38243 0.6897 0.8 0.56269 +10 181 1 2 1 0.41956 0.6537 0.78119 0.53999 +10 182 1 3 1 0.46125 0.60926 0.78334 0.5355 +10 183 1 4 1 0.5079 0.58234 0.79041 0.53978 +10 185 2 3 1 0.58773 0.61974 0.77567 0.58005 +10 186 2 1 1 0.56994 0.61598 0.77697 0.5845 +10 187 1 4 1 0.58265 0.65108 0.75528 0.55322 +10 188 1 1 1 0.61641 0.63525 0.73075 0.53583 +10 189 2 3 0 0.61089 0.68513 0.74215 0.55119 +10 190 1 4 0 0.67254 0.67502 0.74304 0.52344 +10 191 2 3 1 0.71849 0.66315 0.75994 0.54305 +10 192 1 2 1 0.73545 0.6294 0.79004 0.53427 +10 194 2 1 0 0.76957 0.61338 0.74831 0.52514 +10 195 1 3 0 0.73544 0.61721 0.75629 0.4949 +10 196 1 2 1 0.7462 0.65698 0.72142 0.46658 +10 197 1 2 0 0.74487 0.67712 0.70299 0.4334 +10 198 1 4 0 0.75813 0.63902 0.7132 0.38684 +10 199 1 3 1 0.77195 0.65874 0.73815 0.4148 +10 200 2 4 0 0.75457 0.63628 0.72188 0.43059 +10 201 1 2 1 0.72843 0.646 0.71037 0.46274 +11 1 2 3 0 0.55043 0.5779 0.35359 0.35152 +11 3 2 3 1 0.56558 0.52928 0.27501 0.34064 +11 4 2 1 0 0.53392 0.562 0.31016 0.31646 +11 5 2 3 1 0.53131 0.54329 0.26991 0.33176 +11 6 2 2 0 0.51301 0.54949 0.31655 0.3123 +11 7 2 2 1 0.49465 0.56101 0.30935 0.29295 +11 8 2 3 1 0.49977 0.59981 0.32269 0.29142 +11 9 2 2 1 0.5337 0.64168 0.27847 0.23984 +11 10 2 2 1 0.54034 0.62302 0.28238 0.25339 +11 11 2 3 0 0.54457 0.62412 0.2874 0.25673 +11 12 2 3 0 0.56074 0.61232 0.31517 0.23104 +11 13 2 1 0 0.52356 0.59777 0.32553 0.237 +11 14 2 3 1 0.51113 0.58218 0.2904 0.24704 +11 15 2 4 0 0.49545 0.58171 0.29121 0.22882 +11 16 2 3 1 0.4855 0.57166 0.29957 0.24484 +11 17 2 3 0 0.46832 0.58533 0.29036 0.26997 +11 18 2 3 0 0.48141 0.5609 0.29432 0.20644 +11 19 2 2 0 0.48431 0.57657 0.27478 0.22073 +11 20 2 4 0 0.5241 0.57124 0.29602 0.24342 +11 21 1 1 0 0.52299 0.60777 0.32382 0.27311 +11 22 1 2 1 0.55617 0.6093 0.31869 0.25054 +11 23 2 3 0 0.53125 0.56702 0.37266 0.23961 +11 24 2 3 1 0.54199 0.54908 0.40232 0.24116 +11 25 1 3 0 0.57052 0.54133 0.35003 0.22522 +11 26 2 2 0 0.55783 0.57273 0.32456 0.2 +11 27 1 1 1 0.58077 0.60062 0.32838 0.2236 +11 28 1 1 1 0.55735 0.57087 0.29145 0.22587 +11 29 1 1 1 0.57661 0.59907 0.24968 0.25374 +11 30 1 4 0 0.54437 0.63611 0.24601 0.24774 +11 31 1 1 0 0.52996 0.60176 0.21448 0.27302 +11 32 2 3 0 0.50178 0.63469 0.2 0.24459 +11 33 1 1 1 0.52212 0.64377 0.20389 0.2 +11 34 1 4 0 0.49338 0.67082 0.2 0.2053 +11 35 1 3 0 0.47172 0.66299 0.2 0.25588 +11 36 1 1 1 0.48227 0.66312 0.21198 0.24343 +11 37 1 1 1 0.4579 0.66234 0.2 0.2323 +11 38 1 1 0 0.45205 0.60848 0.24113 0.28058 +11 39 1 1 0 0.46492 0.65123 0.21024 0.2792 +11 40 1 2 0 0.46757 0.63588 0.2 0.27274 +11 41 2 2 0 0.46378 0.61741 0.2 0.20137 +11 42 2 1 1 0.43549 0.60662 0.2191 0.25492 +11 43 1 1 0 0.44882 0.58923 0.20251 0.23947 +11 44 2 4 0 0.44739 0.61681 0.2209 0.26471 +11 45 2 4 1 0.46384 0.59305 0.25319 0.24613 +11 46 2 2 1 0.51119 0.59576 0.24912 0.24026 +11 47 2 2 1 0.51119 0.61554 0.27732 0.25702 +11 48 2 4 0 0.46991 0.64183 0.23582 0.22444 +11 49 1 2 1 0.48296 0.63369 0.20535 0.27083 +11 50 1 2 0 0.5426 0.70366 0.2 0.32619 +11 51 2 4 0 0.50892 0.69992 0.2 0.32438 +11 52 1 4 1 0.4954 0.71494 0.23481 0.31973 +11 53 1 4 0 0.45152 0.75401 0.22295 0.31486 +11 54 1 4 1 0.44303 0.76902 0.2 0.33248 +11 55 1 2 1 0.44439 0.76276 0.2 0.35535 +11 56 1 2 1 0.47164 0.77372 0.20991 0.30229 +11 57 1 4 1 0.42252 0.76981 0.20823 0.27657 +11 58 1 2 0 0.4626 0.77064 0.22131 0.23807 +11 59 2 4 0 0.47876 0.74553 0.21937 0.2 +11 60 1 2 0 0.47492 0.77258 0.21553 0.20372 +11 61 2 1 1 0.50097 0.797 0.22088 0.2 +11 62 2 1 0 0.54239 0.79098 0.2 0.21974 +11 63 2 3 0 0.52331 0.79352 0.23114 0.23316 +11 64 1 1 0 0.49528 0.76334 0.22954 0.25331 +11 65 1 4 0 0.49876 0.73095 0.26264 0.27285 +11 66 1 1 1 0.53242 0.72036 0.23656 0.25391 +11 67 1 1 1 0.52189 0.67882 0.27964 0.2393 +11 68 1 1 0 0.53176 0.68164 0.25737 0.27294 +11 69 2 3 0 0.54291 0.73061 0.2078 0.27341 +11 70 2 3 1 0.54529 0.72522 0.24896 0.28794 +11 71 2 3 0 0.55668 0.75084 0.26641 0.26938 +11 72 1 1 0 0.53982 0.71284 0.25302 0.27195 +11 73 2 4 0 0.57059 0.72955 0.21981 0.28124 +11 74 2 3 0 0.60017 0.75338 0.2 0.26189 +11 75 2 3 0 0.56987 0.79254 0.23086 0.26063 +11 76 2 4 1 0.53471 0.77639 0.25951 0.27258 +11 77 1 2 1 0.577 0.75346 0.31801 0.28561 +11 78 1 2 1 0.54832 0.71056 0.29922 0.29029 +11 79 1 1 0 0.53751 0.7274 0.31287 0.29789 +11 80 1 2 1 0.54485 0.71156 0.31588 0.2826 +11 81 1 2 0 0.54433 0.69123 0.36089 0.26533 +11 82 2 4 1 0.56687 0.76312 0.36796 0.23672 +11 83 2 4 0 0.60786 0.7831 0.38363 0.23398 +11 84 2 2 1 0.60936 0.78368 0.36804 0.23087 +11 85 1 2 1 0.6211 0.74615 0.36371 0.20084 +11 86 2 4 0 0.5845 0.77236 0.3 0.2 +11 87 1 2 1 0.60376 0.8 0.32359 0.2 +11 88 1 2 1 0.62269 0.77985 0.32173 0.2031 +11 89 1 2 1 0.67831 0.79703 0.35091 0.20138 +11 90 1 2 1 0.69167 0.8 0.37196 0.2 +11 91 1 2 1 0.69856 0.7645 0.36825 0.21451 +11 92 1 2 1 0.69443 0.73286 0.37839 0.24046 +11 93 1 2 1 0.69619 0.72385 0.376 0.28831 +11 94 1 2 1 0.69996 0.7384 0.34066 0.30731 +11 95 1 2 1 0.69694 0.77351 0.34487 0.33476 +11 96 1 4 0 0.66827 0.79268 0.35615 0.27297 +11 97 1 2 0 0.67878 0.8 0.33973 0.27352 +11 98 1 1 0 0.6799 0.8 0.34456 0.25026 +11 99 2 3 0 0.62187 0.8 0.28067 0.22623 +11 100 2 2 1 0.6054 0.77345 0.25924 0.2 +11 101 1 4 0 0.65383 0.77114 0.27204 0.2008 +11 102 1 3 0 0.64727 0.76172 0.25281 0.21862 +11 103 2 4 0 0.6635 0.77711 0.305 0.2 +11 104 2 3 0 0.67197 0.79161 0.28616 0.23714 +11 105 1 2 1 0.64376 0.8 0.30672 0.23704 +11 106 1 2 0 0.63178 0.8 0.29251 0.23955 +11 107 1 2 1 0.6538 0.79894 0.29693 0.24725 +11 108 1 3 0 0.64423 0.7928 0.28867 0.21739 +11 109 1 2 0 0.61983 0.76918 0.29843 0.27407 +11 111 1 4 0 0.54281 0.76111 0.30887 0.26801 +11 112 1 2 1 0.52811 0.74021 0.30316 0.29007 +11 113 1 3 0 0.53029 0.77255 0.2792 0.28303 +11 114 1 2 1 0.53651 0.79728 0.29597 0.30773 +11 115 1 2 0 0.55624 0.78973 0.25582 0.31867 +11 116 1 2 1 0.53997 0.8 0.22837 0.31713 +11 117 1 2 1 0.52671 0.8 0.24036 0.38512 +11 118 1 2 1 0.51438 0.8 0.23973 0.37043 +11 119 1 3 1 0.53182 0.79605 0.27918 0.36907 +11 120 1 2 1 0.50286 0.8 0.28253 0.35504 +11 121 1 2 1 0.50235 0.8 0.29495 0.29598 +11 122 1 3 0 0.48894 0.79331 0.30969 0.31204 +11 123 1 2 1 0.47871 0.79948 0.3263 0.31954 +11 124 1 2 1 0.45365 0.8 0.29099 0.33413 +11 125 1 2 1 0.46635 0.8 0.26215 0.31968 +11 126 1 2 1 0.45381 0.74542 0.23663 0.31977 +11 127 1 2 1 0.44701 0.77734 0.26106 0.32745 +11 128 1 3 0 0.42234 0.8 0.25779 0.32023 +11 129 1 2 1 0.40726 0.79306 0.29614 0.30255 +11 130 1 2 1 0.36714 0.79442 0.30191 0.2763 +11 131 1 4 0 0.36376 0.79681 0.32273 0.27065 +11 132 1 2 1 0.3524 0.79521 0.28949 0.2766 +11 133 1 2 1 0.32952 0.78195 0.24297 0.25312 +11 134 1 2 1 0.28499 0.8 0.24966 0.28736 +11 135 1 2 0 0.26727 0.79224 0.22348 0.3366 +11 136 1 2 1 0.25006 0.79156 0.26094 0.27667 +11 137 1 2 1 0.21646 0.76325 0.24907 0.25544 +11 138 1 2 1 0.2 0.78589 0.26922 0.27831 +11 139 1 2 0 0.2 0.76936 0.2926 0.24553 +11 140 1 2 1 0.23753 0.79722 0.33088 0.24868 +11 141 1 2 0 0.22556 0.8 0.31513 0.27674 +11 142 1 2 0 0.21033 0.8 0.30497 0.28001 +11 143 2 3 1 0.22537 0.8 0.27327 0.29592 +11 144 2 3 0 0.24274 0.79907 0.27427 0.28701 +11 145 2 3 0 0.21851 0.8 0.26859 0.30582 +11 146 2 1 0 0.2178 0.8 0.22666 0.31128 +11 147 2 1 1 0.24484 0.8 0.20365 0.30276 +11 148 2 3 0 0.22017 0.8 0.21582 0.29858 +11 149 2 4 1 0.20838 0.8 0.20549 0.3077 +11 150 1 1 1 0.26729 0.8 0.24835 0.33951 +11 151 2 4 1 0.25459 0.78232 0.22098 0.37519 +11 152 1 1 0 0.30189 0.77343 0.22356 0.39847 +11 153 2 2 0 0.277 0.77971 0.21763 0.41577 +11 154 2 1 0 0.33926 0.74634 0.2 0.37846 +11 155 2 4 1 0.33888 0.74843 0.2 0.38359 +11 156 2 4 1 0.36489 0.74266 0.2 0.33555 +11 157 2 4 0 0.36922 0.72864 0.21039 0.36898 +11 158 2 4 0 0.37651 0.71557 0.25706 0.3556 +11 159 2 1 0 0.3562 0.70798 0.27144 0.39188 +11 160 2 4 0 0.37324 0.73197 0.24524 0.39553 +11 161 2 4 1 0.33247 0.71528 0.25213 0.40268 +11 162 2 4 0 0.32664 0.75746 0.24262 0.39697 +11 163 2 4 0 0.32737 0.75497 0.20079 0.37946 +11 164 2 1 1 0.32241 0.8 0.2 0.40464 +11 165 1 1 1 0.33485 0.8 0.20678 0.40402 +11 166 1 1 1 0.35907 0.8 0.22123 0.40926 +11 167 2 4 1 0.34949 0.8 0.27264 0.41539 +11 168 2 4 0 0.37267 0.8 0.30797 0.436 +11 169 1 4 0 0.39837 0.76644 0.29919 0.41091 +11 170 1 3 0 0.38419 0.74126 0.2761 0.42169 +11 171 1 4 0 0.38649 0.73896 0.24769 0.45184 +11 172 1 1 1 0.3735 0.79988 0.21732 0.43546 +11 173 1 3 0 0.36009 0.8 0.22321 0.44074 +11 174 1 4 0 0.38648 0.8 0.21086 0.45846 +11 175 1 1 0 0.37753 0.7835 0.20936 0.47693 +11 176 1 1 0 0.32905 0.77017 0.20358 0.42754 +11 177 1 1 0 0.3851 0.78473 0.20506 0.44052 +11 178 2 3 1 0.36796 0.78051 0.2 0.3813 +11 179 2 2 1 0.35199 0.79315 0.22019 0.36803 +11 180 2 2 1 0.31801 0.8 0.24509 0.37891 +11 181 2 3 1 0.30674 0.8 0.23432 0.38882 +11 182 2 3 0 0.33107 0.75848 0.2 0.38555 +11 183 2 3 0 0.33389 0.73813 0.2101 0.38101 +11 184 2 3 0 0.3633 0.73729 0.2 0.40103 +11 185 1 2 1 0.35394 0.73968 0.2 0.36388 +11 186 1 2 1 0.3625 0.78628 0.2 0.38816 +11 187 1 2 1 0.37426 0.8 0.2 0.46508 +11 188 1 3 1 0.34876 0.8 0.22985 0.47632 +11 189 1 3 0 0.33813 0.78029 0.2056 0.47281 +11 190 1 4 0 0.37915 0.79114 0.2 0.45265 +11 191 1 2 1 0.40233 0.8 0.24541 0.49344 +11 192 1 2 1 0.42529 0.78379 0.25813 0.51788 +11 193 1 3 0 0.44473 0.8 0.26037 0.55478 +11 194 1 2 1 0.42335 0.77949 0.25251 0.56739 +11 195 1 2 0 0.41433 0.74312 0.25601 0.56701 +11 196 1 2 0 0.3768 0.76441 0.25915 0.56346 +11 197 1 2 1 0.36343 0.78095 0.20219 0.55189 +11 198 1 2 1 0.32245 0.76431 0.2 0.52809 +11 199 1 3 0 0.28258 0.8 0.23392 0.52097 +11 200 1 4 0 0.29243 0.79748 0.22832 0.51401 +11 201 1 2 1 0.30384 0.79552 0.23835 0.52741 \ No newline at end of file diff --git a/R/inst/extdata/ug_exampleData.txt b/R/inst/extdata/ug_exampleData.txt new file mode 100644 index 00000000..257795cc --- /dev/null +++ b/R/inst/extdata/ug_exampleData.txt @@ -0,0 +1,1801 @@ +trial offer accept subjID group +1 3 0 1 LM +2 3 0 1 LM +3 5 0 1 LM +4 4 0 1 LM +5 2 0 1 LM +6 4 0 1 LM +7 3 0 1 LM +8 4 0 1 LM +9 3 0 1 LM +10 4 0 1 LM +11 5 1 1 LM +12 3 0 1 LM +13 5 1 1 LM +14 3 0 1 LM +15 1 0 1 LM +16 2 0 1 LM +17 3 0 1 LM +18 6 1 1 LM +19 2 0 1 LM +20 2 0 1 LM +21 4 1 1 LM +22 3 0 1 LM +23 5 1 1 LM +24 2 0 1 LM +25 4 1 1 LM +26 4 1 1 LM +27 2 0 1 LM +28 6 1 1 LM +29 4 1 1 LM +30 7 1 1 LM +31 9 1 1 LM +32 7 1 1 LM +33 10 1 1 LM +34 7 1 1 LM +35 8 1 1 LM +36 8 1 1 LM +37 11 1 1 LM +38 7 1 1 LM +39 6 1 1 LM +40 6 1 1 LM +41 12 1 1 LM +42 9 1 1 LM +43 5 1 1 LM +44 8 1 1 LM +45 6 1 1 LM +46 7 1 1 LM +47 8 1 1 LM +48 7 1 1 LM +49 8 1 1 LM +50 6 1 1 LM +51 8 1 1 LM +52 7 1 1 LM +53 9 1 1 LM +54 9 1 1 LM +55 8 1 1 LM +56 10 1 1 LM +57 6 1 1 LM +58 10 1 1 LM +59 10 1 1 LM +60 8 1 1 LM +1 3 0 2 LM +2 3 0 2 LM +3 5 0 2 LM +4 4 0 2 LM +5 2 0 2 LM +6 4 0 2 LM +7 3 0 2 LM +8 4 0 2 LM +9 3 0 2 LM +10 4 0 2 LM +11 5 1 2 LM +12 3 0 2 LM +13 5 1 2 LM +14 3 0 2 LM +15 1 0 2 LM +16 2 0 2 LM +17 3 0 2 LM +18 6 1 2 LM +19 2 0 2 LM +20 2 0 2 LM +21 4 0 2 LM +22 3 0 2 LM +23 5 0 2 LM +24 2 0 2 LM +25 4 0 2 LM +26 4 0 2 LM +27 2 0 2 LM +28 6 1 2 LM +29 4 0 2 LM +30 7 1 2 LM +31 9 1 2 LM +32 7 1 2 LM +33 10 1 2 LM +34 7 1 2 LM +35 8 1 2 LM +36 8 1 2 LM +37 11 1 2 LM +38 7 1 2 LM +39 6 1 2 LM +40 6 1 2 LM +41 12 1 2 LM +42 9 1 2 LM +43 5 1 2 LM +44 8 1 2 LM +45 6 1 2 LM +46 7 1 2 LM +47 8 1 2 LM +48 7 1 2 LM +49 8 1 2 LM +50 6 1 2 LM +51 8 1 2 LM +52 7 1 2 LM +53 9 1 2 LM +54 9 1 2 LM +55 8 1 2 LM +56 10 1 2 LM +57 6 1 2 LM +58 10 1 2 LM +59 10 1 2 LM +60 8 1 2 LM +1 3 0 3 LM +2 3 0 3 LM +3 5 1 3 LM +4 4 0 3 LM +5 2 0 3 LM +6 4 0 3 LM +7 3 0 3 LM +8 4 0 3 LM +9 3 0 3 LM +10 4 0 3 LM +11 5 1 3 LM +12 3 0 3 LM +13 5 1 3 LM +14 3 0 3 LM +15 1 0 3 LM +16 2 0 3 LM +17 3 0 3 LM +18 6 1 3 LM +19 2 0 3 LM +20 2 0 3 LM +21 4 0 3 LM +22 3 0 3 LM +23 5 1 3 LM +24 2 0 3 LM +25 4 0 3 LM +26 4 0 3 LM +27 2 0 3 LM +28 6 1 3 LM +29 4 0 3 LM +30 7 1 3 LM +31 9 1 3 LM +32 7 1 3 LM +33 10 1 3 LM +34 7 1 3 LM +35 8 1 3 LM +36 8 1 3 LM +37 11 1 3 LM +38 7 1 3 LM +39 6 1 3 LM +40 6 1 3 LM +41 12 1 3 LM +42 9 1 3 LM +43 5 1 3 LM +44 8 1 3 LM +45 6 1 3 LM +46 7 1 3 LM +47 8 1 3 LM +48 7 1 3 LM +49 8 1 3 LM +50 6 1 3 LM +51 8 1 3 LM +52 7 1 3 LM +53 9 1 3 LM +54 9 1 3 LM +55 8 1 3 LM +56 10 1 3 LM +57 6 1 3 LM +58 10 1 3 LM +59 10 1 3 LM +60 8 1 3 LM +1 3 0 4 LM +2 3 0 4 LM +3 5 1 4 LM +4 4 0 4 LM +5 2 0 4 LM +6 4 0 4 LM +7 3 0 4 LM +8 4 0 4 LM +9 3 0 4 LM +10 4 0 4 LM +11 5 1 4 LM +12 3 0 4 LM +13 5 1 4 LM +14 3 0 4 LM +15 1 0 4 LM +16 2 0 4 LM +17 3 0 4 LM +18 6 1 4 LM +19 2 0 4 LM +20 2 0 4 LM +21 4 0 4 LM +22 3 0 4 LM +23 5 1 4 LM +24 2 0 4 LM +25 4 0 4 LM +26 4 0 4 LM +27 2 0 4 LM +28 6 1 4 LM +29 4 0 4 LM +30 7 1 4 LM +31 9 1 4 LM +32 7 1 4 LM +33 10 1 4 LM +34 7 1 4 LM +35 8 1 4 LM +36 8 1 4 LM +37 11 1 4 LM +38 7 1 4 LM +39 6 1 4 LM +40 6 1 4 LM +41 12 1 4 LM +42 9 1 4 LM +43 5 1 4 LM +44 8 1 4 LM +45 6 0 4 LM +46 7 1 4 LM +47 8 1 4 LM +48 7 1 4 LM +49 8 1 4 LM +50 6 1 4 LM +51 8 1 4 LM +52 7 1 4 LM +53 9 1 4 LM +54 9 1 4 LM +55 8 1 4 LM +56 10 1 4 LM +57 6 1 4 LM +58 10 1 4 LM +59 10 1 4 LM +60 8 1 4 LM +1 3 0 5 LM +2 3 0 5 LM +3 5 1 5 LM +4 4 1 5 LM +5 2 0 5 LM +6 4 0 5 LM +7 3 0 5 LM +8 4 0 5 LM +9 3 0 5 LM +10 4 0 5 LM +11 5 1 5 LM +12 3 0 5 LM +13 5 1 5 LM +14 3 0 5 LM +15 1 0 5 LM +16 2 0 5 LM +17 3 0 5 LM +18 6 1 5 LM +19 2 0 5 LM +20 2 0 5 LM +21 4 0 5 LM +22 3 0 5 LM +23 5 1 5 LM +24 2 0 5 LM +25 4 0 5 LM +26 4 0 5 LM +27 2 0 5 LM +28 6 1 5 LM +29 4 0 5 LM +30 7 1 5 LM +31 9 1 5 LM +32 7 1 5 LM +33 10 1 5 LM +34 7 1 5 LM +35 8 1 5 LM +36 8 1 5 LM +37 11 1 5 LM +38 7 1 5 LM +39 6 1 5 LM +40 6 1 5 LM +41 12 1 5 LM +42 9 1 5 LM +43 5 1 5 LM +44 8 1 5 LM +45 6 1 5 LM +46 7 1 5 LM +47 8 1 5 LM +48 7 1 5 LM +49 8 1 5 LM +50 6 1 5 LM +51 8 1 5 LM +52 7 1 5 LM +53 9 1 5 LM +54 9 1 5 LM +55 8 1 5 LM +56 10 1 5 LM +57 6 1 5 LM +58 10 1 5 LM +59 10 1 5 LM +60 8 1 5 LM +1 3 0 6 LM +2 3 0 6 LM +3 5 1 6 LM +4 4 0 6 LM +5 2 0 6 LM +6 4 0 6 LM +7 3 0 6 LM +8 4 0 6 LM +9 3 0 6 LM +10 4 0 6 LM +11 5 1 6 LM +12 3 0 6 LM +13 5 1 6 LM +14 3 0 6 LM +15 1 0 6 LM +16 2 0 6 LM +17 3 0 6 LM +18 6 1 6 LM +19 2 0 6 LM +20 2 0 6 LM +21 4 0 6 LM +22 3 0 6 LM +23 5 1 6 LM +24 2 0 6 LM +25 4 0 6 LM +26 4 0 6 LM +27 2 0 6 LM +28 6 1 6 LM +29 4 0 6 LM +30 7 1 6 LM +31 9 1 6 LM +32 7 1 6 LM +33 10 1 6 LM +34 7 1 6 LM +35 8 1 6 LM +36 8 1 6 LM +37 11 1 6 LM +38 7 1 6 LM +39 6 1 6 LM +40 6 1 6 LM +41 12 1 6 LM +42 9 1 6 LM +43 5 0 6 LM +44 8 1 6 LM +45 6 1 6 LM +46 7 1 6 LM +47 8 1 6 LM +48 7 1 6 LM +49 8 1 6 LM +50 6 1 6 LM +51 8 1 6 LM +52 7 1 6 LM +53 9 1 6 LM +54 9 1 6 LM +55 8 1 6 LM +56 10 1 6 LM +57 6 1 6 LM +58 10 1 6 LM +59 10 1 6 LM +60 8 1 6 LM +1 3 0 7 LM +2 3 0 7 LM +3 5 0 7 LM +4 4 0 7 LM +5 2 0 7 LM +6 4 0 7 LM +7 3 0 7 LM +8 4 0 7 LM +9 3 0 7 LM +10 4 0 7 LM +11 5 0 7 LM +12 3 0 7 LM +13 5 0 7 LM +14 3 0 7 LM +15 1 0 7 LM +16 2 0 7 LM +17 3 0 7 LM +18 6 1 7 LM +19 2 0 7 LM +20 2 0 7 LM +21 4 0 7 LM +22 3 0 7 LM +23 5 0 7 LM +24 2 0 7 LM +25 4 0 7 LM +26 4 0 7 LM +27 2 0 7 LM +28 6 1 7 LM +29 4 0 7 LM +30 7 1 7 LM +31 9 1 7 LM +32 7 1 7 LM +33 10 1 7 LM +34 7 1 7 LM +35 8 1 7 LM +36 8 1 7 LM +37 11 1 7 LM +38 7 1 7 LM +39 6 1 7 LM +40 6 1 7 LM +41 12 1 7 LM +42 9 1 7 LM +43 5 1 7 LM +44 8 1 7 LM +45 6 1 7 LM +46 7 1 7 LM +47 8 1 7 LM +48 7 1 7 LM +49 8 1 7 LM +50 6 1 7 LM +51 8 1 7 LM +52 7 1 7 LM +53 9 1 7 LM +54 9 1 7 LM +55 8 1 7 LM +56 10 1 7 LM +57 6 1 7 LM +58 10 1 7 LM +59 10 1 7 LM +60 8 1 7 LM +1 3 0 8 LM +2 3 0 8 LM +3 5 1 8 LM +4 4 0 8 LM +5 2 0 8 LM +6 4 0 8 LM +7 3 0 8 LM +8 4 0 8 LM +9 3 0 8 LM +10 4 0 8 LM +11 5 1 8 LM +12 3 0 8 LM +13 5 0 8 LM +14 3 0 8 LM +15 1 0 8 LM +16 2 0 8 LM +17 3 0 8 LM +18 6 1 8 LM +19 2 0 8 LM +20 2 0 8 LM +21 4 0 8 LM +22 3 0 8 LM +23 5 1 8 LM +24 2 0 8 LM +25 4 0 8 LM +26 4 0 8 LM +27 2 0 8 LM +28 6 1 8 LM +29 4 0 8 LM +30 7 1 8 LM +31 9 1 8 LM +32 7 1 8 LM +33 10 1 8 LM +34 7 1 8 LM +35 8 1 8 LM +36 8 1 8 LM +37 11 1 8 LM +38 7 1 8 LM +39 6 1 8 LM +40 6 1 8 LM +41 12 1 8 LM +42 9 1 8 LM +43 5 1 8 LM +44 8 1 8 LM +45 6 1 8 LM +46 7 1 8 LM +47 8 1 8 LM +48 7 1 8 LM +49 8 1 8 LM +50 6 1 8 LM +51 8 1 8 LM +52 7 1 8 LM +53 9 1 8 LM +54 9 1 8 LM +55 8 1 8 LM +56 10 1 8 LM +57 6 1 8 LM +58 10 1 8 LM +59 10 1 8 LM +60 8 1 8 LM +1 3 0 9 LM +2 3 0 9 LM +3 5 1 9 LM +4 4 0 9 LM +5 2 0 9 LM +6 4 0 9 LM +7 3 0 9 LM +8 4 0 9 LM +9 3 0 9 LM +10 4 0 9 LM +11 5 0 9 LM +12 3 0 9 LM +13 5 1 9 LM +14 3 0 9 LM +15 1 0 9 LM +16 2 0 9 LM +17 3 0 9 LM +18 6 1 9 LM +19 2 0 9 LM +20 2 0 9 LM +21 4 0 9 LM +22 3 0 9 LM +23 5 1 9 LM +24 2 0 9 LM +25 4 0 9 LM +26 4 0 9 LM +27 2 0 9 LM +28 6 1 9 LM +29 4 0 9 LM +30 7 1 9 LM +31 9 1 9 LM +32 7 1 9 LM +33 10 1 9 LM +34 7 1 9 LM +35 8 1 9 LM +36 8 1 9 LM +37 11 1 9 LM +38 7 1 9 LM +39 6 1 9 LM +40 6 1 9 LM +41 12 1 9 LM +42 9 1 9 LM +43 5 1 9 LM +44 8 1 9 LM +45 6 1 9 LM +46 7 1 9 LM +47 8 1 9 LM +48 7 1 9 LM +49 8 1 9 LM +50 6 1 9 LM +51 8 1 9 LM +52 7 1 9 LM +53 9 1 9 LM +54 9 1 9 LM +55 8 1 9 LM +56 10 1 9 LM +57 6 1 9 LM +58 10 1 9 LM +59 10 1 9 LM +60 8 1 9 LM +1 3 0 10 LM +2 3 0 10 LM +3 5 0 10 LM +4 4 0 10 LM +5 2 0 10 LM +6 4 0 10 LM +7 3 0 10 LM +8 4 0 10 LM +9 3 0 10 LM +10 4 0 10 LM +11 5 0 10 LM +12 3 0 10 LM +13 5 1 10 LM +14 3 0 10 LM +15 1 0 10 LM +16 2 0 10 LM +17 3 0 10 LM +18 6 1 10 LM +19 2 0 10 LM +20 2 0 10 LM +21 4 0 10 LM +22 3 0 10 LM +23 5 1 10 LM +24 2 0 10 LM +25 4 0 10 LM +26 4 0 10 LM +27 2 0 10 LM +28 6 1 10 LM +29 4 0 10 LM +30 7 1 10 LM +31 9 1 10 LM +32 7 1 10 LM +33 10 1 10 LM +34 7 1 10 LM +35 8 1 10 LM +36 8 1 10 LM +37 11 1 10 LM +38 7 1 10 LM +39 6 1 10 LM +40 6 1 10 LM +41 12 1 10 LM +42 9 1 10 LM +43 5 1 10 LM +44 8 1 10 LM +45 6 1 10 LM +46 7 1 10 LM +47 8 1 10 LM +48 7 1 10 LM +49 8 1 10 LM +50 6 1 10 LM +51 8 1 10 LM +52 7 1 10 LM +53 9 1 10 LM +54 9 1 10 LM +55 8 1 10 LM +56 10 1 10 LM +57 6 1 10 LM +58 10 1 10 LM +59 10 1 10 LM +60 8 1 10 LM +1 3 0 11 LM +2 3 0 11 LM +3 5 1 11 LM +4 4 0 11 LM +5 2 0 11 LM +6 4 0 11 LM +7 3 0 11 LM +8 4 0 11 LM +9 3 0 11 LM +10 4 0 11 LM +11 5 1 11 LM +12 3 0 11 LM +13 5 1 11 LM +14 3 0 11 LM +15 1 0 11 LM +16 2 0 11 LM +17 3 0 11 LM +18 6 1 11 LM +19 2 0 11 LM +20 2 0 11 LM +21 4 0 11 LM +22 3 0 11 LM +23 5 1 11 LM +24 2 0 11 LM +25 4 0 11 LM +26 4 0 11 LM +27 2 0 11 LM +28 6 1 11 LM +29 4 0 11 LM +30 7 1 11 LM +31 9 1 11 LM +32 7 1 11 LM +33 10 1 11 LM +34 7 1 11 LM +35 8 1 11 LM +36 8 1 11 LM +37 11 1 11 LM +38 7 1 11 LM +39 6 1 11 LM +40 6 1 11 LM +41 12 1 11 LM +42 9 1 11 LM +43 5 1 11 LM +44 8 1 11 LM +45 6 1 11 LM +46 7 1 11 LM +47 8 1 11 LM +48 7 1 11 LM +49 8 1 11 LM +50 6 1 11 LM +51 8 1 11 LM +52 7 1 11 LM +53 9 1 11 LM +54 9 1 11 LM +55 8 1 11 LM +56 10 1 11 LM +57 6 1 11 LM +58 10 1 11 LM +59 10 1 11 LM +60 8 1 11 LM +1 3 0 12 LM +2 3 0 12 LM +3 5 1 12 LM +4 4 0 12 LM +5 2 0 12 LM +6 4 0 12 LM +7 3 0 12 LM +8 4 0 12 LM +9 3 0 12 LM +10 4 0 12 LM +11 5 1 12 LM +12 3 0 12 LM +13 5 1 12 LM +14 3 0 12 LM +15 1 0 12 LM +16 2 0 12 LM +17 3 0 12 LM +18 6 1 12 LM +19 2 0 12 LM +20 2 0 12 LM +21 4 0 12 LM +22 3 0 12 LM +23 5 1 12 LM +24 2 0 12 LM +25 4 0 12 LM +26 4 0 12 LM +27 2 0 12 LM +28 6 1 12 LM +29 4 0 12 LM +30 7 1 12 LM +31 9 1 12 LM +32 7 1 12 LM +33 10 1 12 LM +34 7 1 12 LM +35 8 1 12 LM +36 8 1 12 LM +37 11 1 12 LM +38 7 1 12 LM +39 6 1 12 LM +40 6 1 12 LM +41 12 1 12 LM +42 9 1 12 LM +43 5 1 12 LM +44 8 1 12 LM +45 6 1 12 LM +46 7 1 12 LM +47 8 1 12 LM +48 7 1 12 LM +49 8 1 12 LM +50 6 1 12 LM +51 8 1 12 LM +52 7 1 12 LM +53 9 1 12 LM +54 9 1 12 LM +55 8 1 12 LM +56 10 1 12 LM +57 6 1 12 LM +58 10 1 12 LM +59 10 1 12 LM +60 8 1 12 LM +1 3 0 13 LM +2 3 0 13 LM +3 5 0 13 LM +4 4 0 13 LM +5 2 0 13 LM +6 4 0 13 LM +7 3 0 13 LM +8 4 0 13 LM +9 3 0 13 LM +10 4 0 13 LM +11 5 1 13 LM +12 3 0 13 LM +13 5 0 13 LM +14 3 0 13 LM +15 1 0 13 LM +16 2 0 13 LM +17 3 0 13 LM +18 6 1 13 LM +19 2 0 13 LM +20 2 0 13 LM +21 4 0 13 LM +22 3 0 13 LM +23 5 1 13 LM +24 2 0 13 LM +25 4 0 13 LM +26 4 0 13 LM +27 2 0 13 LM +28 6 1 13 LM +29 4 0 13 LM +30 7 1 13 LM +31 9 1 13 LM +32 7 1 13 LM +33 10 1 13 LM +34 7 1 13 LM +35 8 1 13 LM +36 8 1 13 LM +37 11 1 13 LM +38 7 1 13 LM +39 6 1 13 LM +40 6 1 13 LM +41 12 1 13 LM +42 9 1 13 LM +43 5 1 13 LM +44 8 1 13 LM +45 6 1 13 LM +46 7 1 13 LM +47 8 1 13 LM +48 7 1 13 LM +49 8 1 13 LM +50 6 1 13 LM +51 8 1 13 LM +52 7 1 13 LM +53 9 1 13 LM +54 9 1 13 LM +55 8 1 13 LM +56 10 1 13 LM +57 6 1 13 LM +58 10 1 13 LM +59 10 1 13 LM +60 8 1 13 LM +1 3 0 14 LM +2 3 0 14 LM +3 5 1 14 LM +4 4 0 14 LM +5 2 0 14 LM +6 4 0 14 LM +7 3 0 14 LM +8 4 0 14 LM +9 3 0 14 LM +10 4 0 14 LM +11 5 1 14 LM +12 3 0 14 LM +13 5 1 14 LM +14 3 0 14 LM +15 1 0 14 LM +16 2 0 14 LM +17 3 0 14 LM +18 6 1 14 LM +19 2 0 14 LM +20 2 0 14 LM +21 4 0 14 LM +22 3 0 14 LM +23 5 1 14 LM +24 2 0 14 LM +25 4 0 14 LM +26 4 0 14 LM +27 2 0 14 LM +28 6 1 14 LM +29 4 0 14 LM +30 7 1 14 LM +31 9 1 14 LM +32 7 1 14 LM +33 10 1 14 LM +34 7 1 14 LM +35 8 1 14 LM +36 8 1 14 LM +37 11 1 14 LM +38 7 1 14 LM +39 6 1 14 LM +40 6 1 14 LM +41 12 1 14 LM +42 9 1 14 LM +43 5 1 14 LM +44 8 1 14 LM +45 6 1 14 LM +46 7 1 14 LM +47 8 1 14 LM +48 7 1 14 LM +49 8 1 14 LM +50 6 1 14 LM +51 8 1 14 LM +52 7 1 14 LM +53 9 1 14 LM +54 9 1 14 LM +55 8 1 14 LM +56 10 1 14 LM +57 6 1 14 LM +58 10 1 14 LM +59 10 1 14 LM +60 8 1 14 LM +1 3 0 15 LM +2 3 0 15 LM +3 5 1 15 LM +4 4 0 15 LM +5 2 0 15 LM +6 4 0 15 LM +7 3 0 15 LM +8 4 0 15 LM +9 3 0 15 LM +10 4 0 15 LM +11 5 1 15 LM +12 3 0 15 LM +13 5 1 15 LM +14 3 0 15 LM +15 1 0 15 LM +16 2 0 15 LM +17 3 0 15 LM +18 6 1 15 LM +19 2 0 15 LM +20 2 0 15 LM +21 4 0 15 LM +22 3 0 15 LM +23 5 0 15 LM +24 2 0 15 LM +25 4 0 15 LM +26 4 0 15 LM +27 2 0 15 LM +28 6 1 15 LM +29 4 0 15 LM +30 7 1 15 LM +31 9 1 15 LM +32 7 1 15 LM +33 10 1 15 LM +34 7 1 15 LM +35 8 1 15 LM +36 8 1 15 LM +37 11 1 15 LM +38 7 1 15 LM +39 6 1 15 LM +40 6 1 15 LM +41 12 1 15 LM +42 9 1 15 LM +43 5 1 15 LM +44 8 1 15 LM +45 6 1 15 LM +46 7 1 15 LM +47 8 1 15 LM +48 7 1 15 LM +49 8 1 15 LM +50 6 1 15 LM +51 8 1 15 LM +52 7 1 15 LM +53 9 1 15 LM +54 9 1 15 LM +55 8 1 15 LM +56 10 1 15 LM +57 6 1 15 LM +58 10 1 15 LM +59 10 1 15 LM +60 8 1 15 LM +1 3 0 16 LM +2 3 0 16 LM +3 5 1 16 LM +4 4 0 16 LM +5 2 0 16 LM +6 4 0 16 LM +7 3 0 16 LM +8 4 0 16 LM +9 3 0 16 LM +10 4 0 16 LM +11 5 0 16 LM +12 3 0 16 LM +13 5 1 16 LM +14 3 0 16 LM +15 1 0 16 LM +16 2 0 16 LM +17 3 0 16 LM +18 6 1 16 LM +19 2 0 16 LM +20 2 0 16 LM +21 4 0 16 LM +22 3 0 16 LM +23 5 1 16 LM +24 2 0 16 LM +25 4 0 16 LM +26 4 0 16 LM +27 2 0 16 LM +28 6 1 16 LM +29 4 0 16 LM +30 7 1 16 LM +31 9 1 16 LM +32 7 1 16 LM +33 10 1 16 LM +34 7 1 16 LM +35 8 1 16 LM +36 8 1 16 LM +37 11 1 16 LM +38 7 1 16 LM +39 6 1 16 LM +40 6 1 16 LM +41 12 1 16 LM +42 9 1 16 LM +43 5 1 16 LM +44 8 1 16 LM +45 6 1 16 LM +46 7 1 16 LM +47 8 1 16 LM +48 7 1 16 LM +49 8 1 16 LM +50 6 1 16 LM +51 8 1 16 LM +52 7 1 16 LM +53 9 1 16 LM +54 9 1 16 LM +55 8 1 16 LM +56 10 1 16 LM +57 6 1 16 LM +58 10 1 16 LM +59 10 1 16 LM +60 8 1 16 LM +1 3 0 17 LM +2 3 0 17 LM +3 5 1 17 LM +4 4 0 17 LM +5 2 0 17 LM +6 4 0 17 LM +7 3 0 17 LM +8 4 0 17 LM +9 3 0 17 LM +10 4 0 17 LM +11 5 1 17 LM +12 3 0 17 LM +13 5 1 17 LM +14 3 0 17 LM +15 1 0 17 LM +16 2 0 17 LM +17 3 0 17 LM +18 6 1 17 LM +19 2 0 17 LM +20 2 0 17 LM +21 4 0 17 LM +22 3 0 17 LM +23 5 1 17 LM +24 2 0 17 LM +25 4 0 17 LM +26 4 0 17 LM +27 2 0 17 LM +28 6 1 17 LM +29 4 0 17 LM +30 7 1 17 LM +31 9 1 17 LM +32 7 1 17 LM +33 10 1 17 LM +34 7 1 17 LM +35 8 1 17 LM +36 8 1 17 LM +37 11 1 17 LM +38 7 1 17 LM +39 6 1 17 LM +40 6 1 17 LM +41 12 1 17 LM +42 9 1 17 LM +43 5 1 17 LM +44 8 1 17 LM +45 6 1 17 LM +46 7 1 17 LM +47 8 1 17 LM +48 7 1 17 LM +49 8 1 17 LM +50 6 1 17 LM +51 8 1 17 LM +52 7 1 17 LM +53 9 1 17 LM +54 9 1 17 LM +55 8 1 17 LM +56 10 1 17 LM +57 6 1 17 LM +58 10 1 17 LM +59 10 1 17 LM +60 8 1 17 LM +1 3 0 18 LM +2 3 0 18 LM +3 5 1 18 LM +4 4 0 18 LM +5 2 0 18 LM +6 4 0 18 LM +7 3 0 18 LM +8 4 0 18 LM +9 3 0 18 LM +10 4 0 18 LM +11 5 0 18 LM +12 3 0 18 LM +13 5 1 18 LM +14 3 0 18 LM +15 1 0 18 LM +16 2 0 18 LM +17 3 0 18 LM +18 6 1 18 LM +19 2 0 18 LM +20 2 0 18 LM +21 4 0 18 LM +22 3 0 18 LM +23 5 1 18 LM +24 2 0 18 LM +25 4 0 18 LM +26 4 0 18 LM +27 2 0 18 LM +28 6 1 18 LM +29 4 0 18 LM +30 7 1 18 LM +31 9 1 18 LM +32 7 1 18 LM +33 10 1 18 LM +34 7 1 18 LM +35 8 1 18 LM +36 8 1 18 LM +37 11 1 18 LM +38 7 1 18 LM +39 6 1 18 LM +40 6 1 18 LM +41 12 1 18 LM +42 9 1 18 LM +43 5 1 18 LM +44 8 1 18 LM +45 6 1 18 LM +46 7 1 18 LM +47 8 1 18 LM +48 7 1 18 LM +49 8 0 18 LM +50 6 1 18 LM +51 8 1 18 LM +52 7 1 18 LM +53 9 1 18 LM +54 9 1 18 LM +55 8 1 18 LM +56 10 1 18 LM +57 6 1 18 LM +58 10 1 18 LM +59 10 1 18 LM +60 8 1 18 LM +1 3 0 19 LM +2 3 0 19 LM +3 5 1 19 LM +4 4 0 19 LM +5 2 0 19 LM +6 4 0 19 LM +7 3 0 19 LM +8 4 0 19 LM +9 3 0 19 LM +10 4 0 19 LM +11 5 1 19 LM +12 3 0 19 LM +13 5 1 19 LM +14 3 0 19 LM +15 1 0 19 LM +16 2 0 19 LM +17 3 0 19 LM +18 6 1 19 LM +19 2 0 19 LM +20 2 0 19 LM +21 4 0 19 LM +22 3 0 19 LM +23 5 1 19 LM +24 2 0 19 LM +25 4 0 19 LM +26 4 0 19 LM +27 2 0 19 LM +28 6 1 19 LM +29 4 0 19 LM +30 7 1 19 LM +31 9 1 19 LM +32 7 1 19 LM +33 10 1 19 LM +34 7 1 19 LM +35 8 1 19 LM +36 8 1 19 LM +37 11 1 19 LM +38 7 1 19 LM +39 6 1 19 LM +40 6 1 19 LM +41 12 1 19 LM +42 9 1 19 LM +43 5 1 19 LM +44 8 1 19 LM +45 6 1 19 LM +46 7 1 19 LM +47 8 1 19 LM +48 7 1 19 LM +49 8 1 19 LM +50 6 1 19 LM +51 8 1 19 LM +52 7 1 19 LM +53 9 1 19 LM +54 9 1 19 LM +55 8 1 19 LM +56 10 1 19 LM +57 6 1 19 LM +58 10 1 19 LM +59 10 1 19 LM +60 8 1 19 LM +1 3 0 20 LM +2 3 0 20 LM +3 5 0 20 LM +4 4 0 20 LM +5 2 0 20 LM +6 4 0 20 LM +7 3 0 20 LM +8 4 0 20 LM +9 3 0 20 LM +10 4 0 20 LM +11 5 1 20 LM +12 3 0 20 LM +13 5 1 20 LM +14 3 0 20 LM +15 1 0 20 LM +16 2 0 20 LM +17 3 0 20 LM +18 6 1 20 LM +19 2 0 20 LM +20 2 0 20 LM +21 4 0 20 LM +22 3 0 20 LM +23 5 1 20 LM +24 2 0 20 LM +25 4 0 20 LM +26 4 0 20 LM +27 2 0 20 LM +28 6 1 20 LM +29 4 0 20 LM +30 7 1 20 LM +31 9 1 20 LM +32 7 1 20 LM +33 10 1 20 LM +34 7 1 20 LM +35 8 1 20 LM +36 8 1 20 LM +37 11 1 20 LM +38 7 1 20 LM +39 6 1 20 LM +40 6 1 20 LM +41 12 1 20 LM +42 9 1 20 LM +43 5 1 20 LM +44 8 1 20 LM +45 6 1 20 LM +46 7 1 20 LM +47 8 1 20 LM +48 7 1 20 LM +49 8 1 20 LM +50 6 1 20 LM +51 8 1 20 LM +52 7 1 20 LM +53 9 1 20 LM +54 9 1 20 LM +55 8 1 20 LM +56 10 1 20 LM +57 6 1 20 LM +58 10 1 20 LM +59 10 1 20 LM +60 8 1 20 LM +1 3 0 21 LM +2 3 0 21 LM +3 5 1 21 LM +4 4 0 21 LM +5 2 0 21 LM +6 4 0 21 LM +7 3 0 21 LM +8 4 0 21 LM +9 3 0 21 LM +10 4 0 21 LM +11 5 1 21 LM +12 3 0 21 LM +13 5 1 21 LM +14 3 0 21 LM +15 1 0 21 LM +16 2 0 21 LM +17 3 0 21 LM +18 6 1 21 LM +19 2 0 21 LM +20 2 0 21 LM +21 4 0 21 LM +22 3 0 21 LM +23 5 0 21 LM +24 2 0 21 LM +25 4 0 21 LM +26 4 0 21 LM +27 2 0 21 LM +28 6 1 21 LM +29 4 0 21 LM +30 7 1 21 LM +31 9 1 21 LM +32 7 1 21 LM +33 10 1 21 LM +34 7 1 21 LM +35 8 1 21 LM +36 8 1 21 LM +37 11 1 21 LM +38 7 1 21 LM +39 6 1 21 LM +40 6 1 21 LM +41 12 1 21 LM +42 9 1 21 LM +43 5 1 21 LM +44 8 1 21 LM +45 6 1 21 LM +46 7 1 21 LM +47 8 1 21 LM +48 7 1 21 LM +49 8 1 21 LM +50 6 1 21 LM +51 8 1 21 LM +52 7 1 21 LM +53 9 1 21 LM +54 9 1 21 LM +55 8 1 21 LM +56 10 1 21 LM +57 6 1 21 LM +58 10 1 21 LM +59 10 1 21 LM +60 8 1 21 LM +1 3 0 22 LM +2 3 0 22 LM +3 5 1 22 LM +4 4 0 22 LM +5 2 0 22 LM +6 4 0 22 LM +7 3 0 22 LM +8 4 0 22 LM +9 3 0 22 LM +10 4 0 22 LM +11 5 1 22 LM +12 3 0 22 LM +13 5 1 22 LM +14 3 0 22 LM +15 1 0 22 LM +16 2 0 22 LM +17 3 0 22 LM +18 6 1 22 LM +19 2 0 22 LM +20 2 0 22 LM +21 4 0 22 LM +22 3 0 22 LM +23 5 1 22 LM +24 2 0 22 LM +25 4 0 22 LM +26 4 0 22 LM +27 2 0 22 LM +28 6 1 22 LM +29 4 0 22 LM +30 7 1 22 LM +31 9 1 22 LM +32 7 1 22 LM +33 10 1 22 LM +34 7 1 22 LM +35 8 1 22 LM +36 8 1 22 LM +37 11 1 22 LM +38 7 1 22 LM +39 6 1 22 LM +40 6 1 22 LM +41 12 1 22 LM +42 9 1 22 LM +43 5 0 22 LM +44 8 1 22 LM +45 6 1 22 LM +46 7 1 22 LM +47 8 1 22 LM +48 7 1 22 LM +49 8 1 22 LM +50 6 1 22 LM +51 8 1 22 LM +52 7 1 22 LM +53 9 1 22 LM +54 9 1 22 LM +55 8 1 22 LM +56 10 1 22 LM +57 6 1 22 LM +58 10 1 22 LM +59 10 1 22 LM +60 8 1 22 LM +1 3 0 23 LM +2 3 0 23 LM +3 5 1 23 LM +4 4 0 23 LM +5 2 0 23 LM +6 4 0 23 LM +7 3 0 23 LM +8 4 0 23 LM +9 3 0 23 LM +10 4 0 23 LM +11 5 1 23 LM +12 3 0 23 LM +13 5 1 23 LM +14 3 0 23 LM +15 1 0 23 LM +16 2 0 23 LM +17 3 0 23 LM +18 6 1 23 LM +19 2 0 23 LM +20 2 0 23 LM +21 4 0 23 LM +22 3 0 23 LM +23 5 1 23 LM +24 2 0 23 LM +25 4 0 23 LM +26 4 0 23 LM +27 2 0 23 LM +28 6 1 23 LM +29 4 0 23 LM +30 7 1 23 LM +31 9 1 23 LM +32 7 1 23 LM +33 10 1 23 LM +34 7 1 23 LM +35 8 1 23 LM +36 8 1 23 LM +37 11 1 23 LM +38 7 1 23 LM +39 6 1 23 LM +40 6 1 23 LM +41 12 1 23 LM +42 9 1 23 LM +43 5 1 23 LM +44 8 1 23 LM +45 6 1 23 LM +46 7 1 23 LM +47 8 1 23 LM +48 7 1 23 LM +49 8 1 23 LM +50 6 1 23 LM +51 8 1 23 LM +52 7 1 23 LM +53 9 1 23 LM +54 9 1 23 LM +55 8 1 23 LM +56 10 1 23 LM +57 6 1 23 LM +58 10 1 23 LM +59 10 1 23 LM +60 8 1 23 LM +1 3 0 24 LM +2 3 0 24 LM +3 5 1 24 LM +4 4 0 24 LM +5 2 0 24 LM +6 4 0 24 LM +7 3 0 24 LM +8 4 0 24 LM +9 3 0 24 LM +10 4 0 24 LM +11 5 0 24 LM +12 3 0 24 LM +13 5 1 24 LM +14 3 0 24 LM +15 1 0 24 LM +16 2 0 24 LM +17 3 0 24 LM +18 6 1 24 LM +19 2 0 24 LM +20 2 0 24 LM +21 4 0 24 LM +22 3 0 24 LM +23 5 0 24 LM +24 2 0 24 LM +25 4 0 24 LM +26 4 0 24 LM +27 2 0 24 LM +28 6 1 24 LM +29 4 0 24 LM +30 7 1 24 LM +31 9 1 24 LM +32 7 1 24 LM +33 10 1 24 LM +34 7 1 24 LM +35 8 1 24 LM +36 8 1 24 LM +37 11 1 24 LM +38 7 1 24 LM +39 6 1 24 LM +40 6 1 24 LM +41 12 1 24 LM +42 9 1 24 LM +43 5 1 24 LM +44 8 1 24 LM +45 6 1 24 LM +46 7 1 24 LM +47 8 1 24 LM +48 7 1 24 LM +49 8 1 24 LM +50 6 1 24 LM +51 8 1 24 LM +52 7 1 24 LM +53 9 1 24 LM +54 9 1 24 LM +55 8 1 24 LM +56 10 1 24 LM +57 6 1 24 LM +58 10 1 24 LM +59 10 1 24 LM +60 8 1 24 LM +1 3 0 25 LM +2 3 0 25 LM +3 5 1 25 LM +4 4 0 25 LM +5 2 0 25 LM +6 4 0 25 LM +7 3 0 25 LM +8 4 0 25 LM +9 3 0 25 LM +10 4 0 25 LM +11 5 0 25 LM +12 3 0 25 LM +13 5 0 25 LM +14 3 0 25 LM +15 1 0 25 LM +16 2 0 25 LM +17 3 0 25 LM +18 6 1 25 LM +19 2 0 25 LM +20 2 0 25 LM +21 4 0 25 LM +22 3 0 25 LM +23 5 1 25 LM +24 2 0 25 LM +25 4 0 25 LM +26 4 0 25 LM +27 2 0 25 LM +28 6 1 25 LM +29 4 0 25 LM +30 7 1 25 LM +31 9 1 25 LM +32 7 1 25 LM +33 10 1 25 LM +34 7 1 25 LM +35 8 1 25 LM +36 8 1 25 LM +37 11 1 25 LM +38 7 1 25 LM +39 6 1 25 LM +40 6 1 25 LM +41 12 1 25 LM +42 9 1 25 LM +43 5 1 25 LM +44 8 1 25 LM +45 6 1 25 LM +46 7 1 25 LM +47 8 1 25 LM +48 7 1 25 LM +49 8 1 25 LM +50 6 1 25 LM +51 8 1 25 LM +52 7 1 25 LM +53 9 1 25 LM +54 9 1 25 LM +55 8 1 25 LM +56 10 1 25 LM +57 6 1 25 LM +58 10 1 25 LM +59 10 1 25 LM +60 8 1 25 LM +1 3 0 26 LM +2 3 0 26 LM +3 5 0 26 LM +4 4 0 26 LM +5 2 0 26 LM +6 4 0 26 LM +7 3 0 26 LM +8 4 0 26 LM +9 3 0 26 LM +10 4 0 26 LM +11 5 1 26 LM +12 3 0 26 LM +13 5 1 26 LM +14 3 0 26 LM +15 1 0 26 LM +16 2 0 26 LM +17 3 0 26 LM +18 6 1 26 LM +19 2 0 26 LM +20 2 0 26 LM +21 4 0 26 LM +22 3 0 26 LM +23 5 1 26 LM +24 2 0 26 LM +25 4 0 26 LM +26 4 0 26 LM +27 2 0 26 LM +28 6 1 26 LM +29 4 0 26 LM +30 7 1 26 LM +31 9 1 26 LM +32 7 1 26 LM +33 10 1 26 LM +34 7 1 26 LM +35 8 1 26 LM +36 8 1 26 LM +37 11 1 26 LM +38 7 1 26 LM +39 6 1 26 LM +40 6 1 26 LM +41 12 1 26 LM +42 9 1 26 LM +43 5 1 26 LM +44 8 1 26 LM +45 6 1 26 LM +46 7 1 26 LM +47 8 1 26 LM +48 7 1 26 LM +49 8 1 26 LM +50 6 1 26 LM +51 8 1 26 LM +52 7 1 26 LM +53 9 1 26 LM +54 9 1 26 LM +55 8 1 26 LM +56 10 1 26 LM +57 6 1 26 LM +58 10 1 26 LM +59 10 1 26 LM +60 8 1 26 LM +1 3 0 27 LM +2 3 0 27 LM +3 5 1 27 LM +4 4 0 27 LM +5 2 0 27 LM +6 4 0 27 LM +7 3 0 27 LM +8 4 0 27 LM +9 3 0 27 LM +10 4 0 27 LM +11 5 1 27 LM +12 3 0 27 LM +13 5 1 27 LM +14 3 0 27 LM +15 1 0 27 LM +16 2 0 27 LM +17 3 0 27 LM +18 6 1 27 LM +19 2 0 27 LM +20 2 0 27 LM +21 4 0 27 LM +22 3 0 27 LM +23 5 1 27 LM +24 2 0 27 LM +25 4 0 27 LM +26 4 0 27 LM +27 2 0 27 LM +28 6 1 27 LM +29 4 0 27 LM +30 7 1 27 LM +31 9 1 27 LM +32 7 1 27 LM +33 10 1 27 LM +34 7 1 27 LM +35 8 1 27 LM +36 8 1 27 LM +37 11 1 27 LM +38 7 1 27 LM +39 6 1 27 LM +40 6 1 27 LM +41 12 1 27 LM +42 9 1 27 LM +43 5 1 27 LM +44 8 1 27 LM +45 6 1 27 LM +46 7 1 27 LM +47 8 1 27 LM +48 7 1 27 LM +49 8 1 27 LM +50 6 1 27 LM +51 8 1 27 LM +52 7 1 27 LM +53 9 1 27 LM +54 9 1 27 LM +55 8 1 27 LM +56 10 1 27 LM +57 6 1 27 LM +58 10 1 27 LM +59 10 1 27 LM +60 8 1 27 LM +1 3 0 28 LM +2 3 0 28 LM +3 5 0 28 LM +4 4 0 28 LM +5 2 0 28 LM +6 4 0 28 LM +7 3 0 28 LM +8 4 0 28 LM +9 3 0 28 LM +10 4 0 28 LM +11 5 0 28 LM +12 3 0 28 LM +13 5 1 28 LM +14 3 0 28 LM +15 1 0 28 LM +16 2 0 28 LM +17 3 0 28 LM +18 6 1 28 LM +19 2 0 28 LM +20 2 0 28 LM +21 4 0 28 LM +22 3 0 28 LM +23 5 1 28 LM +24 2 0 28 LM +25 4 0 28 LM +26 4 0 28 LM +27 2 0 28 LM +28 6 1 28 LM +29 4 0 28 LM +30 7 1 28 LM +31 9 1 28 LM +32 7 1 28 LM +33 10 1 28 LM +34 7 1 28 LM +35 8 1 28 LM +36 8 1 28 LM +37 11 1 28 LM +38 7 1 28 LM +39 6 1 28 LM +40 6 1 28 LM +41 12 1 28 LM +42 9 1 28 LM +43 5 0 28 LM +44 8 1 28 LM +45 6 1 28 LM +46 7 1 28 LM +47 8 1 28 LM +48 7 1 28 LM +49 8 1 28 LM +50 6 1 28 LM +51 8 1 28 LM +52 7 1 28 LM +53 9 1 28 LM +54 9 1 28 LM +55 8 1 28 LM +56 10 1 28 LM +57 6 1 28 LM +58 10 1 28 LM +59 10 1 28 LM +60 8 1 28 LM +1 3 0 29 LM +2 3 0 29 LM +3 5 1 29 LM +4 4 0 29 LM +5 2 0 29 LM +6 4 0 29 LM +7 3 0 29 LM +8 4 0 29 LM +9 3 0 29 LM +10 4 0 29 LM +11 5 1 29 LM +12 3 0 29 LM +13 5 1 29 LM +14 3 0 29 LM +15 1 0 29 LM +16 2 0 29 LM +17 3 0 29 LM +18 6 1 29 LM +19 2 0 29 LM +20 2 0 29 LM +21 4 0 29 LM +22 3 0 29 LM +23 5 0 29 LM +24 2 0 29 LM +25 4 0 29 LM +26 4 0 29 LM +27 2 0 29 LM +28 6 1 29 LM +29 4 0 29 LM +30 7 1 29 LM +31 9 1 29 LM +32 7 1 29 LM +33 10 1 29 LM +34 7 1 29 LM +35 8 1 29 LM +36 8 1 29 LM +37 11 1 29 LM +38 7 1 29 LM +39 6 1 29 LM +40 6 1 29 LM +41 12 1 29 LM +42 9 1 29 LM +43 5 0 29 LM +44 8 1 29 LM +45 6 1 29 LM +46 7 1 29 LM +47 8 1 29 LM +48 7 1 29 LM +49 8 1 29 LM +50 6 1 29 LM +51 8 1 29 LM +52 7 1 29 LM +53 9 1 29 LM +54 9 1 29 LM +55 8 1 29 LM +56 10 1 29 LM +57 6 1 29 LM +58 10 1 29 LM +59 10 1 29 LM +60 8 1 29 LM +1 3 0 30 LM +2 3 0 30 LM +3 5 1 30 LM +4 4 0 30 LM +5 2 0 30 LM +6 4 0 30 LM +7 3 0 30 LM +8 4 0 30 LM +9 3 0 30 LM +10 4 0 30 LM +11 5 1 30 LM +12 3 0 30 LM +13 5 1 30 LM +14 3 0 30 LM +15 1 0 30 LM +16 2 0 30 LM +17 3 0 30 LM +18 6 1 30 LM +19 2 0 30 LM +20 2 0 30 LM +21 4 0 30 LM +22 3 0 30 LM +23 5 1 30 LM +24 2 0 30 LM +25 4 0 30 LM +26 4 0 30 LM +27 2 0 30 LM +28 6 1 30 LM +29 4 0 30 LM +30 7 1 30 LM +31 9 1 30 LM +32 7 1 30 LM +33 10 1 30 LM +34 7 1 30 LM +35 8 1 30 LM +36 8 1 30 LM +37 11 1 30 LM +38 7 1 30 LM +39 6 0 30 LM +40 6 1 30 LM +41 12 1 30 LM +42 9 1 30 LM +43 5 1 30 LM +44 8 1 30 LM +45 6 1 30 LM +46 7 1 30 LM +47 8 1 30 LM +48 7 1 30 LM +49 8 1 30 LM +50 6 1 30 LM +51 8 1 30 LM +52 7 1 30 LM +53 9 1 30 LM +54 9 1 30 LM +55 8 1 30 LM +56 10 1 30 LM +57 6 1 30 LM +58 10 1 30 LM +59 10 1 30 LM +60 8 1 30 LM diff --git a/R/inst/extdata/wcs_answersheet.txt b/R/inst/extdata/wcs_answersheet.txt new file mode 100644 index 00000000..207ac3a1 --- /dev/null +++ b/R/inst/extdata/wcs_answersheet.txt @@ -0,0 +1,4 @@ + 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 +Color 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 +Form 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 +Number 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 diff --git a/R/inst/extdata/wcs_exampleData.txt b/R/inst/extdata/wcs_exampleData.txt new file mode 100644 index 00000000..80cf86db --- /dev/null +++ b/R/inst/extdata/wcs_exampleData.txt @@ -0,0 +1,1158 @@ +choice outcome subjID trial +1 0 1 1 +1 1 1 2 +4 1 1 3 +1 1 1 4 +2 1 1 5 +3 1 1 6 +4 1 1 7 +1 1 1 8 +2 1 1 9 +3 1 1 10 +4 1 1 11 +1 1 1 12 +4 0 1 13 +3 0 1 14 +1 1 1 15 +4 1 1 16 +2 1 1 17 +4 1 1 18 +1 1 1 19 +4 1 1 20 +2 1 1 21 +3 1 1 22 +2 1 1 23 +1 1 1 24 +2 0 1 25 +2 1 1 26 +3 1 1 27 +4 1 1 28 +2 1 1 29 +1 1 1 30 +4 1 1 31 +1 1 1 32 +3 1 1 33 +4 1 1 34 +1 1 1 35 +4 0 1 36 +2 0 1 37 +3 1 1 38 +1 0 1 39 +4 1 1 40 +1 1 1 41 +3 0 1 42 +4 0 1 43 +2 1 1 44 +1 1 1 45 +3 1 1 46 +4 1 1 47 +3 1 1 48 +2 1 1 49 +3 1 1 50 +4 1 1 51 +2 1 1 52 +1 1 1 53 +2 0 1 54 +4 0 1 55 +2 0 1 56 +1 0 1 57 +3 0 1 58 +3 1 1 59 +2 0 1 60 +4 0 1 61 +3 1 1 62 +1 1 1 63 +3 1 1 64 +1 1 1 65 +3 1 1 66 +1 1 1 67 +4 1 1 68 +2 1 1 69 +3 1 1 70 +1 1 1 71 +3 1 1 72 +3 0 1 73 +2 1 1 74 +1 1 1 75 +3 1 1 76 +2 1 1 77 +1 1 1 78 +3 1 1 79 +4 1 1 80 +2 1 1 81 +3 1 1 82 +4 1 1 83 +1 0 2 1 +1 1 2 2 +1 0 2 3 +1 1 2 4 +2 1 2 5 +3 1 2 6 +4 1 2 7 +1 1 2 8 +2 1 2 9 +3 1 2 10 +4 1 2 11 +1 1 2 12 +3 0 2 13 +3 1 2 14 +2 1 2 15 +4 1 2 16 +1 1 2 17 +3 1 2 18 +1 1 2 19 +3 1 2 20 +4 1 2 21 +2 1 2 22 +3 1 2 23 +4 0 2 24 +1 0 2 25 +3 0 2 26 +4 0 2 27 +1 0 2 28 +2 1 2 29 +3 1 2 30 +4 1 2 31 +1 0 2 32 +3 0 2 33 +3 1 2 34 +2 1 2 35 +4 0 2 36 +4 1 2 37 +3 0 2 38 +4 1 2 39 +3 1 2 40 +1 1 2 41 +4 0 2 42 +3 0 2 43 +1 1 2 44 +2 1 2 45 +3 1 2 46 +4 1 2 47 +1 1 2 48 +2 0 2 49 +2 1 2 50 +1 1 2 51 +2 1 2 52 +4 1 2 53 +2 0 2 54 +3 1 2 55 +4 1 2 56 +3 1 2 57 +1 1 2 58 +3 1 2 59 +4 1 2 60 +2 1 2 61 +3 1 2 62 +1 1 2 63 +3 1 2 64 +2 0 2 65 +3 0 2 66 +1 0 2 67 +4 0 2 68 +2 0 2 69 +3 0 2 70 +4 1 2 71 +3 1 2 72 +3 0 2 73 +4 0 2 74 +2 0 2 75 +3 1 2 76 +4 0 2 77 +1 1 2 78 +1 0 2 79 +4 1 2 80 +2 1 2 81 +3 1 2 82 +4 1 2 83 +1 1 2 84 +4 0 2 85 +3 1 2 86 +2 1 2 87 +4 0 2 88 +1 0 2 89 +3 0 2 90 +4 0 2 91 +1 0 2 92 +2 1 2 93 +1 1 2 94 +4 1 2 95 +1 1 2 96 +4 0 2 97 +3 0 2 98 +2 0 2 99 +3 0 2 100 +4 0 2 101 +3 1 2 102 +2 0 2 103 +3 0 2 104 +1 1 2 105 +4 0 2 106 +3 0 2 107 +1 0 2 108 +1 0 2 109 +3 0 2 110 +4 0 2 111 +3 1 2 112 +3 0 2 113 +2 0 2 114 +1 1 2 115 +2 0 2 116 +1 0 2 117 +1 0 2 118 +3 0 2 119 +4 0 2 120 +3 0 2 121 +3 0 2 122 +3 1 2 123 +4 0 2 124 +2 0 2 125 +3 1 2 126 +1 0 2 127 +3 0 2 128 +1 0 3 1 +1 1 3 2 +4 1 3 3 +1 1 3 4 +2 1 3 5 +3 1 3 6 +4 1 3 7 +1 1 3 8 +2 1 3 9 +3 1 3 10 +4 1 3 11 +1 1 3 12 +4 0 3 13 +3 0 3 14 +2 0 3 15 +4 1 3 16 +1 0 3 17 +3 0 3 18 +1 1 3 19 +3 0 3 20 +4 0 3 21 +3 1 3 22 +2 1 3 23 +1 1 3 24 +4 0 3 25 +1 1 3 26 +3 1 3 27 +4 1 3 28 +2 1 3 29 +3 1 3 30 +4 1 3 31 +2 1 3 32 +4 1 3 33 +3 1 3 34 +1 0 3 35 +4 0 3 36 +2 0 3 37 +3 0 3 38 +1 0 3 39 +4 0 3 40 +1 1 3 41 +3 0 3 42 +4 1 3 43 +2 0 3 44 +3 0 3 45 +3 1 3 46 +4 1 3 47 +3 0 3 48 +2 0 3 49 +3 0 3 50 +4 0 3 51 +2 1 3 52 +1 0 3 53 +2 0 3 54 +4 0 3 55 +2 0 3 56 +1 0 3 57 +1 1 3 58 +3 1 3 59 +4 1 3 60 +2 1 3 61 +3 1 3 62 +1 1 3 63 +3 1 3 64 +1 1 3 65 +3 1 3 66 +1 1 3 67 +1 1 3 68 +2 0 3 69 +1 1 3 70 +4 1 3 71 +1 0 3 72 +4 1 3 73 +2 1 3 74 +1 1 3 75 +3 1 3 76 +2 1 3 77 +1 1 3 78 +3 1 3 79 +4 1 3 80 +2 1 3 81 +3 1 3 82 +4 0 3 83 +1 0 3 84 +4 1 3 85 +2 1 3 86 +3 1 3 87 +4 1 3 88 +1 1 3 89 +3 1 3 90 +4 1 3 91 +1 1 3 92 +2 1 3 93 +1 1 3 94 +2 0 3 95 +1 0 3 96 +4 1 3 97 +3 1 3 98 +2 1 3 99 +1 1 3 100 +4 1 3 101 +2 1 3 102 +4 1 3 103 +3 1 3 104 +1 1 3 105 +2 1 3 106 +3 0 3 107 +2 1 3 108 +2 0 3 109 +2 1 3 110 +1 1 3 111 +3 1 3 112 +2 1 3 113 +4 1 3 114 +1 1 3 115 +3 1 3 116 +2 1 3 117 +4 1 3 118 +1 1 3 119 +1 0 4 1 +1 1 4 2 +4 1 4 3 +1 1 4 4 +2 1 4 5 +3 1 4 6 +4 1 4 7 +1 1 4 8 +2 1 4 9 +3 1 4 10 +4 1 4 11 +1 1 4 12 +3 1 4 13 +2 1 4 14 +1 1 4 15 +4 1 4 16 +2 1 4 17 +4 1 4 18 +1 1 4 19 +4 1 4 20 +2 1 4 21 +3 1 4 22 +2 1 4 23 +1 0 4 24 +4 1 4 25 +3 0 4 26 +3 1 4 27 +4 1 4 28 +2 1 4 29 +1 1 4 30 +4 1 4 31 +1 1 4 32 +3 1 4 33 +4 1 4 34 +1 1 4 35 +4 1 4 36 +2 0 4 37 +3 1 4 38 +1 0 4 39 +4 1 4 40 +1 1 4 41 +3 0 4 42 +4 0 4 43 +2 1 4 44 +3 0 4 45 +2 0 4 46 +4 1 4 47 +3 1 4 48 +2 1 4 49 +3 1 4 50 +4 1 4 51 +2 1 4 52 +1 1 4 53 +2 1 4 54 +4 1 4 55 +2 1 4 56 +1 0 4 57 +1 1 4 58 +3 1 4 59 +2 0 4 60 +2 1 4 61 +3 1 4 62 +1 1 4 63 +3 1 4 64 +1 1 4 65 +3 1 4 66 +1 1 4 67 +4 1 4 68 +2 1 4 69 +1 0 4 70 +4 0 4 71 +3 0 4 72 +4 0 4 73 +2 0 4 74 +1 0 4 75 +3 0 4 76 +2 0 4 77 +1 0 4 78 +3 0 4 79 +4 1 4 80 +2 1 4 81 +3 0 4 82 +1 1 4 83 +3 0 4 84 +4 0 4 85 +2 0 4 86 +3 0 4 87 +4 0 4 88 +1 0 4 89 +3 0 4 90 +4 0 4 91 +1 0 4 92 +2 1 4 93 +1 0 4 94 +2 0 4 95 +2 1 4 96 +4 1 4 97 +3 1 4 98 +2 1 4 99 +1 1 4 100 +4 1 4 101 +2 1 4 102 +4 1 4 103 +3 1 4 104 +1 1 4 105 +2 0 4 106 +3 0 4 107 +1 0 4 108 +3 1 4 109 +2 1 4 110 +1 1 4 111 +3 1 4 112 +2 1 4 113 +4 1 4 114 +1 1 4 115 +3 1 4 116 +2 1 4 117 +1 0 4 118 +4 0 4 119 +2 0 4 120 +1 0 4 121 +3 0 4 122 +1 0 4 123 +2 1 4 124 +4 1 4 125 +3 1 4 126 +1 0 4 127 +2 0 4 128 +1 0 5 1 +1 1 5 2 +4 1 5 3 +1 1 5 4 +2 1 5 5 +3 1 5 6 +4 1 5 7 +1 1 5 8 +2 1 5 9 +3 1 5 10 +4 1 5 11 +1 1 5 12 +3 1 5 13 +2 1 5 14 +1 1 5 15 +4 1 5 16 +2 1 5 17 +4 1 5 18 +1 1 5 19 +4 1 5 20 +2 1 5 21 +3 1 5 22 +2 1 5 23 +1 0 5 24 +2 0 5 25 +2 1 5 26 +4 0 5 27 +1 0 5 28 +2 1 5 29 +1 1 5 30 +4 1 5 31 +1 1 5 32 +3 1 5 33 +4 1 5 34 +1 1 5 35 +4 1 5 36 +2 1 5 37 +3 1 5 38 +1 0 5 39 +4 1 5 40 +1 1 5 41 +3 0 5 42 +4 0 5 43 +2 1 5 44 +3 0 5 45 +2 0 5 46 +4 1 5 47 +1 0 5 48 +2 1 5 49 +2 0 5 50 +4 1 5 51 +2 1 5 52 +1 1 5 53 +2 1 5 54 +4 1 5 55 +2 1 5 56 +1 1 5 57 +3 1 5 58 +1 1 5 59 +2 1 5 60 +4 0 5 61 +3 1 5 62 +1 1 5 63 +3 1 5 64 +1 1 5 65 +4 0 5 66 +1 1 5 67 +4 1 5 68 +2 1 5 69 +3 1 5 70 +1 1 5 71 +4 1 5 72 +3 1 5 73 +4 1 5 74 +2 1 5 75 +1 1 5 76 +3 0 5 77 +2 0 5 78 +1 0 5 79 +4 1 5 80 +2 1 5 81 +4 0 5 82 +1 0 5 83 +4 0 5 84 +2 1 5 85 +3 1 5 86 +2 1 5 87 +1 0 5 88 +2 0 5 89 +2 1 5 90 +3 1 5 91 +4 1 5 92 +2 1 5 93 +1 1 5 94 +4 1 5 95 +1 1 5 96 +3 1 5 97 +4 1 5 98 +1 1 5 99 +1 0 6 1 +4 0 6 2 +2 0 6 3 +1 1 6 4 +2 1 6 5 +3 1 6 6 +4 1 6 7 +1 1 6 8 +2 1 6 9 +3 1 6 10 +4 1 6 11 +1 1 6 12 +4 1 6 13 +3 0 6 14 +2 0 6 15 +4 1 6 16 +1 0 6 17 +3 0 6 18 +1 1 6 19 +4 1 6 20 +2 1 6 21 +3 1 6 22 +2 1 6 23 +1 1 6 24 +2 1 6 25 +1 1 6 26 +3 1 6 27 +4 1 6 28 +2 1 6 29 +3 0 6 30 +4 1 6 31 +2 0 6 32 +4 0 6 33 +3 0 6 34 +1 1 6 35 +4 1 6 36 +2 1 6 37 +3 1 6 38 +1 1 6 39 +4 1 6 40 +1 1 6 41 +3 1 6 42 +4 1 6 43 +2 1 6 44 +3 0 6 45 +2 0 6 46 +1 0 6 47 +3 1 6 48 +2 1 6 49 +4 0 6 50 +1 0 6 51 +2 1 6 52 +2 0 6 53 +2 1 6 54 +4 1 6 55 +2 1 6 56 +1 1 6 57 +3 1 6 58 +1 1 6 59 +2 1 6 60 +4 1 6 61 +3 1 6 62 +1 1 6 63 +2 0 6 64 +1 1 6 65 +3 1 6 66 +4 0 6 67 +4 1 6 68 +2 1 6 69 +3 1 6 70 +1 1 6 71 +4 1 6 72 +3 1 6 73 +4 1 6 74 +2 1 6 75 +1 1 6 76 +3 1 6 77 +2 0 6 78 +2 0 6 79 +4 1 6 80 +2 1 6 81 +3 1 6 82 +4 1 6 83 +1 1 6 84 +2 1 6 85 +3 1 6 86 +2 1 6 87 +3 1 6 88 +4 1 6 89 +2 1 7 1 +3 0 7 2 +4 1 7 3 +4 0 7 4 +2 1 7 5 +3 1 7 6 +4 1 7 7 +1 1 7 8 +2 1 7 9 +3 1 7 10 +4 1 7 11 +1 1 7 12 +4 1 7 13 +3 1 7 14 +2 0 7 15 +4 1 7 16 +1 0 7 17 +3 0 7 18 +1 1 7 19 +4 1 7 20 +2 1 7 21 +2 0 7 22 +2 1 7 23 +1 1 7 24 +2 1 7 25 +1 1 7 26 +3 1 7 27 +4 1 7 28 +2 1 7 29 +1 0 7 30 +4 1 7 31 +2 1 7 32 +4 1 7 33 +3 1 7 34 +1 0 7 35 +4 0 7 36 +2 0 7 37 +3 0 7 38 +1 0 7 39 +4 0 7 40 +1 1 7 41 +4 0 7 42 +4 1 7 43 +2 0 7 44 +3 0 7 45 +3 1 7 46 +4 1 7 47 +3 0 7 48 +3 1 7 49 +2 1 7 50 +1 1 7 51 +2 1 7 52 +4 1 7 53 +1 1 7 54 +4 0 7 55 +2 0 7 56 +1 0 7 57 +3 0 7 58 +1 0 7 59 +2 0 7 60 +4 0 7 61 +3 1 7 62 +1 1 7 63 +2 0 7 64 +2 0 7 65 +1 0 7 66 +4 0 7 67 +4 1 7 68 +2 1 7 69 +3 1 7 70 +1 1 7 71 +1 0 7 72 +2 0 7 73 +4 1 7 74 +2 1 7 75 +1 1 7 76 +3 1 7 77 +2 1 7 78 +1 1 7 79 +4 1 7 80 +2 1 7 81 +3 0 7 82 +1 1 7 83 +1 0 7 84 +2 1 7 85 +3 1 7 86 +2 1 7 87 +3 0 7 88 +4 0 7 89 +2 0 7 90 +3 1 7 91 +4 1 7 92 +2 1 7 93 +1 0 7 94 +4 1 7 95 +1 0 7 96 +3 0 7 97 +4 0 7 98 +1 0 7 99 +4 0 7 100 +2 0 7 101 +3 0 7 102 +4 1 7 103 +4 0 7 104 +1 1 7 105 +4 0 7 106 +3 0 7 107 +2 0 7 108 +1 0 7 109 +2 0 7 110 +4 1 7 111 +3 0 7 112 +2 0 7 113 +3 0 7 114 +1 1 7 115 +2 1 7 116 +1 0 7 117 +1 1 7 118 +3 1 7 119 +4 1 7 120 +3 1 7 121 +1 1 7 122 +3 1 7 123 +4 1 7 124 +2 1 7 125 +3 1 7 126 +2 0 7 127 +2 0 7 128 +1 0 8 1 +3 0 8 2 +4 1 8 3 +1 1 8 4 +2 1 8 5 +3 1 8 6 +4 1 8 7 +1 1 8 8 +2 1 8 9 +3 1 8 10 +4 1 8 11 +1 1 8 12 +3 1 8 13 +3 0 8 14 +1 1 8 15 +4 1 8 16 +1 0 8 17 +4 1 8 18 +1 1 8 19 +4 1 8 20 +2 1 8 21 +3 1 8 22 +2 1 8 23 +1 1 8 24 +1 0 8 25 +1 1 8 26 +4 0 8 27 +4 1 8 28 +2 1 8 29 +1 0 8 30 +4 1 8 31 +1 0 8 32 +3 0 8 33 +4 0 8 34 +1 0 8 35 +4 0 8 36 +2 0 8 37 +3 0 8 38 +1 0 8 39 +4 0 8 40 +1 1 8 41 +3 0 8 42 +4 1 8 43 +2 0 8 44 +1 0 8 45 +2 0 8 46 +4 1 8 47 +3 0 8 48 +2 0 8 49 +3 0 8 50 +4 0 8 51 +2 1 8 52 +1 0 8 53 +2 0 8 54 +4 0 8 55 +2 0 8 56 +3 1 8 57 +1 1 8 58 +3 1 8 59 +4 1 8 60 +2 1 8 61 +3 1 8 62 +1 1 8 63 +3 1 8 64 +1 1 8 65 +3 1 8 66 +1 0 8 67 +1 1 8 68 +4 1 8 69 +1 1 8 70 +4 1 8 71 +3 1 8 72 +4 1 8 73 +2 1 8 74 +1 1 8 75 +3 1 8 76 +2 1 8 77 +1 0 8 78 +3 0 8 79 +4 1 8 80 +2 0 8 81 +3 1 8 82 +4 0 8 83 +1 0 8 84 +4 1 8 85 +2 1 8 86 +3 1 8 87 +4 1 8 88 +1 1 8 89 +3 1 8 90 +4 1 8 91 +1 1 8 92 +2 1 8 93 +1 1 8 94 +2 0 8 95 +1 0 8 96 +4 1 8 97 +3 1 8 98 +2 1 8 99 +1 1 8 100 +4 1 8 101 +2 1 8 102 +4 1 8 103 +3 1 8 104 +1 1 8 105 +4 0 8 106 +4 1 8 107 +1 1 8 108 +2 1 8 109 +3 1 8 110 +4 1 8 111 +1 1 8 112 +3 1 8 113 +2 1 8 114 +1 1 8 115 +2 1 8 116 +4 0 8 117 +4 1 8 118 +1 1 8 119 +3 1 8 120 +2 1 8 121 +1 1 8 122 +3 1 8 123 +2 1 8 124 +4 1 8 125 +3 1 8 126 +2 1 8 127 +2 1 9 1 +1 1 9 2 +4 1 9 3 +1 1 9 4 +2 1 9 5 +3 1 9 6 +4 1 9 7 +1 1 9 8 +2 1 9 9 +3 1 9 10 +4 0 9 11 +1 1 9 12 +4 0 9 13 +3 0 9 14 +1 1 9 15 +4 1 9 16 +1 0 9 17 +4 1 9 18 +1 1 9 19 +4 1 9 20 +2 1 9 21 +3 1 9 22 +2 1 9 23 +1 1 9 24 +2 1 9 25 +1 1 9 26 +3 1 9 27 +4 1 9 28 +2 1 9 29 +3 0 9 30 +4 1 9 31 +1 1 9 32 +3 1 9 33 +4 1 9 34 +1 1 9 35 +4 1 9 36 +4 0 9 37 +3 1 9 38 +1 1 9 39 +4 1 9 40 +1 1 9 41 +3 1 9 42 +4 1 9 43 +2 1 9 44 +1 0 9 45 +2 1 9 46 +4 0 9 47 +3 1 9 48 +2 1 9 49 +3 0 9 50 +4 0 9 51 +2 0 9 52 +1 0 9 53 +1 0 9 54 +4 0 9 55 +2 0 9 56 +1 0 9 57 +3 0 9 58 +1 0 9 59 +2 1 9 60 +4 1 9 61 +3 1 9 62 +1 0 9 63 +2 0 9 64 +2 0 9 65 +1 0 9 66 +4 0 9 67 +4 0 9 68 +4 1 9 69 +1 1 9 70 +1 0 9 71 +4 0 9 72 +3 0 9 73 +4 0 9 74 +2 0 9 75 +1 0 9 76 +3 0 9 77 +2 0 9 78 +1 0 9 79 +4 1 9 80 +2 1 9 81 +4 0 9 82 +1 0 9 83 +4 0 9 84 +2 1 9 85 +3 1 9 86 +2 1 9 87 +1 0 9 88 +4 1 9 89 +2 1 9 90 +3 1 9 91 +4 1 9 92 +2 1 9 93 +1 1 9 94 +4 1 9 95 +1 1 9 96 +3 1 9 97 +4 1 9 98 +1 0 9 99 +4 0 9 100 +2 0 9 101 +3 1 9 102 +4 0 9 103 +4 1 9 104 +1 1 9 105 +4 1 9 106 +3 1 9 107 +2 1 9 108 +1 1 9 109 +3 1 9 110 +4 1 9 111 +3 1 9 112 +2 1 9 113 +3 0 9 114 +1 1 9 115 +3 0 9 116 +2 0 9 117 +1 1 9 118 +4 0 9 119 +4 1 9 120 +3 1 9 121 +1 1 9 122 +3 1 9 123 +4 1 9 124 +2 1 9 125 +3 1 9 126 +1 1 9 127 +1 0 9 128 +1 0 10 1 +1 1 10 2 +2 0 10 3 +1 1 10 4 +4 0 10 5 +3 1 10 6 +4 1 10 7 +1 1 10 8 +2 1 10 9 +3 1 10 10 +2 0 10 11 +1 1 10 12 +4 1 10 13 +3 1 10 14 +2 1 10 15 +4 1 10 16 +1 1 10 17 +3 1 10 18 +1 1 10 19 +4 0 10 20 +2 0 10 21 +2 1 10 22 +2 0 10 23 +1 0 10 24 +4 0 10 25 +1 0 10 26 +3 0 10 27 +4 0 10 28 +2 1 10 29 +3 0 10 30 +4 0 10 31 +2 0 10 32 +4 1 10 33 +3 1 10 34 +2 1 10 35 +1 0 10 36 +4 1 10 37 +2 0 10 38 +4 0 10 39 +3 0 10 40 +1 1 10 41 +2 0 10 42 +3 1 10 43 +1 0 10 44 +2 0 10 45 +3 1 10 46 +1 0 10 47 +3 1 10 48 +2 1 10 49 +2 0 10 50 +4 1 10 51 +3 0 10 52 +2 0 10 53 +1 0 10 54 +1 0 10 55 +4 0 10 56 +2 0 10 57 +1 0 10 58 +3 0 10 59 +2 1 10 60 +4 1 10 61 +3 1 10 62 +2 0 10 63 +1 0 10 64 +1 0 10 65 +4 0 10 66 +2 0 10 67 +1 1 10 68 +4 0 10 69 +3 1 10 70 +4 1 10 71 +1 1 10 72 +2 1 10 73 +3 1 10 74 +4 1 10 75 +1 1 10 76 +4 1 10 77 +3 1 10 78 +2 1 10 79 +4 1 10 80 +1 0 10 81 +3 0 10 82 +1 1 10 83 +3 0 10 84 +4 0 10 85 +3 1 10 86 +3 0 10 87 +4 0 10 88 +4 0 10 89 +3 0 10 90 +4 0 10 91 +1 0 10 92 +2 1 10 93 +1 0 10 94 +4 1 10 95 +1 0 10 96 +4 1 10 97 +3 1 10 98 +2 1 10 99 +3 0 10 100 +4 1 10 101 +3 0 10 102 +2 0 10 103 +4 0 10 104 +1 1 10 105 +3 0 10 106 +4 1 10 107 +2 0 10 108 +2 1 10 109 +3 1 10 110 +4 1 10 111 +1 1 10 112 +3 1 10 113 +2 1 10 114 +1 1 10 115 +2 1 10 116 +4 1 10 117 +1 1 10 118 +3 0 10 119 +4 0 10 120 +3 0 10 121 +1 1 10 122 +3 1 10 123 +4 0 10 124 +4 1 10 125 +3 1 10 126 +2 1 10 127 +2 0 10 128 diff --git a/inst/include/meta_header.hpp b/R/inst/include/meta_header.hpp similarity index 100% rename from inst/include/meta_header.hpp rename to R/inst/include/meta_header.hpp diff --git a/inst/plotting/plot_functions.R b/R/inst/plotting/plot_functions.R similarity index 100% rename from inst/plotting/plot_functions.R rename to R/inst/plotting/plot_functions.R diff --git a/R/inst/stan_files/bandit2arm_delta.stan b/R/inst/stan_files/bandit2arm_delta.stan new file mode 100644 index 00000000..3c44ddde --- /dev/null +++ b/R/inst/stan_files/bandit2arm_delta.stan @@ -0,0 +1,109 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; // no lower and upper bounds +} +transformed data { + vector[2] initV; // initial values for EV + initV = rep_vector(0.0, 2); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; // learning rate + vector[N] tau_pr; // inverse temperature +} +transformed parameters { + // subject-level parameters + vector[N] A; + vector[N] tau; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 5; + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1); + tau_pr ~ normal(0, 1); + + // subject loop and trial loop + for (i in 1:N) { + vector[2] ev; // expected value + real PE; // prediction error + + ev = initV; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + choice[i, t] ~ categorical_logit(tau[i] * ev); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + + // value updating (learning) + ev[choice[i, t]] += A[i] * PE; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_tau; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_tau = Phi_approx(mu_pr[2]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + vector[2] ev; // expected value + real PE; // prediction error + + // Initialize values + ev = initV; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute log likelihood of current trial + log_lik[i] += categorical_logit_lpmf(choice[i, t] | tau[i] * ev); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(tau[i] * ev)); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + + // value updating (learning) + ev[choice[i, t]] += A[i] * PE; + } + } + } +} + diff --git a/R/inst/stan_files/bandit4arm2_kalman_filter.stan b/R/inst/stan_files/bandit4arm2_kalman_filter.stan new file mode 100644 index 00000000..15d36c63 --- /dev/null +++ b/R/inst/stan_files/bandit4arm2_kalman_filter.stan @@ -0,0 +1,163 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N,T]; + real outcome[N,T]; +} + +transformed data { + real sigmaO; // sigma_O = 4 + sigmaO = 4; +} + +parameters { + // group-level parameters + vector[6] mu_pr; + vector[6] sigma; + + // subject-level raw parameters, follows norm(0,1), for later Matt Trick + vector[N] lambda_pr; // decay factor + vector[N] theta_pr; // decay center + vector[N] beta_pr; // inverse softmax temperature + vector[N] mu0_pr; // anticipated initial mean of all 4 options + vector[N] sigma0_pr; // anticipated initial sd^2 (uncertainty factor) of all 4 options + vector[N] sigmaD_pr; // sd^2 of diffusion noise +} + +transformed parameters { + // subject-level parameters + vector[N] lambda; + vector[N] theta; + vector[N] beta; + vector[N] mu0; + vector[N] sigma0; + vector[N] sigmaD; + + // Matt Trick + for (i in 1:N) { + lambda[i] = Phi_approx( mu_pr[1] + sigma[1] * lambda_pr[i] ); + theta[i] = Phi_approx( mu_pr[2] + sigma[2] * theta_pr[i] ) * 100; + beta[i] = Phi_approx( mu_pr[3] + sigma[3] * beta_pr[i] ); + mu0[i] = Phi_approx( mu_pr[4] + sigma[4] * mu0_pr[i] ) * 100; + sigma0[i] = Phi_approx( mu_pr[5] + sigma[5] * sigma0_pr[i] ) * 15; + sigmaD[i] = Phi_approx( mu_pr[6] + sigma[6] * sigmaD_pr[i] ) * 15; + } +} + +model { + // prior: hyperparameters + mu_pr ~ normal(0,1); + sigma ~ cauchy(0,5); + + // prior: individual parameters + lambda_pr ~ normal(0,1);; + theta_pr ~ normal(0,1);; + beta_pr ~ normal(0,1);; + mu0_pr ~ normal(0,1);; + sigma0_pr ~ normal(0,1);; + sigmaD_pr ~ normal(0,1);; + + // subject loop and trial loop + for (i in 1:N) { + vector[4] mu_ev; // estimated mean for each option + vector[4] sd_ev_sq; // estimated sd^2 for each option + real pe; // prediction error + real k; // learning rate + + mu_ev = rep_vector(mu0[i] ,4); + sd_ev_sq = rep_vector(sigma0[i]^2, 4); + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + choice[i,t] ~ categorical_logit( beta[i] * mu_ev ); + + // learning rate + k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2 ); + + // prediction error + pe = outcome[i,t] - mu_ev[choice[i,t]]; + + // value updating (learning) + mu_ev[choice[i,t]] += k * pe; + sd_ev_sq[choice[i,t]] *= (1-k); + + // diffusion process + { + mu_ev *= lambda[i]; + mu_ev += (1 - lambda[i]) * theta[i]; + } + { + sd_ev_sq *= lambda[i]^2; + sd_ev_sq += sigmaD[i]^2; + } + } + } +} + +generated quantities { + real mu_lambda; + real mu_theta; + real mu_beta; + real mu_mu0; + real mu_sigma0; + real mu_sigmaD; + real log_lik[N]; + real y_pred[N,T]; + + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_lambda = Phi_approx(mu_pr[1]); + mu_theta = Phi_approx(mu_pr[2]) * 100; + mu_beta = Phi_approx(mu_pr[3]); + mu_mu0 = Phi_approx(mu_pr[4]) * 100; + mu_sigma0 = Phi_approx(mu_pr[5]) * 15; + mu_sigmaD = Phi_approx(mu_pr[6]) * 15; + + { // local block + for (i in 1:N) { + vector[4] mu_ev; // estimated mean for each option + vector[4] sd_ev_sq; // estimated sd^2 for each option + real pe; // prediction error + real k; // learning rate + + log_lik[i] = 0; + mu_ev = rep_vector(mu0[i] ,4); + sd_ev_sq = rep_vector(sigma0[i]^2, 4); + + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + log_lik[i] += categorical_logit_lpmf( choice[i,t] | beta[i] * mu_ev ); + y_pred[i, t] = categorical_rng(softmax(beta[i] * mu_ev)); + + // learning rate + k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2); + + // prediction error + pe = outcome[i,t] - mu_ev[choice[i,t]]; + + // value updating (learning) + mu_ev[choice[i,t]] += k * pe; + sd_ev_sq[choice[i,t]] *= (1-k); + + // diffusion process + { + mu_ev *= lambda[i]; + mu_ev += (1 - lambda[i]) * theta[i]; + } + { + sd_ev_sq *= lambda[i]^2; + sd_ev_sq += sigmaD[i]^2; + } + } + } + } // local block END +} + diff --git a/R/inst/stan_files/bandit4arm_2par_lapse.stan b/R/inst/stan_files/bandit4arm_2par_lapse.stan new file mode 100644 index 00000000..b95da5ce --- /dev/null +++ b/R/inst/stan_files/bandit4arm_2par_lapse.stan @@ -0,0 +1,173 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) +// w/o reward sensitivity and punishment sensitivity +// in sum, there are three parameters - Arew, Apun, xi +// Aylward et al., 2018, PsyArXiv +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] xi_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] xi; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + xi[i] = Phi_approx(mu_pr[3] + sigma[3] * xi_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = rew[i, t] - Qr[choice[i, t]]; + PEp = los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_xi; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_xi = Phi_approx(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = rew[i, t] - Qr[choice[i, t]]; + PEp = los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/R/inst/stan_files/bandit4arm_4par.stan b/R/inst/stan_files/bandit4arm_4par.stan new file mode 100644 index 00000000..18d6acf9 --- /dev/null +++ b/R/inst/stan_files/bandit4arm_4par.stan @@ -0,0 +1,176 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] R_pr; + vector[N] P_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] R; + vector[N] P; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(Qsum); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_R; + real mu_P; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_R = Phi_approx(mu_pr[3]) * 30; + mu_P = Phi_approx(mu_pr[4]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_logit_lpmf(choice[i, t] | Qsum); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum)); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/R/inst/stan_files/bandit4arm_lapse.stan b/R/inst/stan_files/bandit4arm_lapse.stan new file mode 100644 index 00000000..161ce311 --- /dev/null +++ b/R/inst/stan_files/bandit4arm_lapse.stan @@ -0,0 +1,182 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[5] mu_pr; + vector[5] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] R_pr; + vector[N] P_pr; + vector[N] xi_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] R; + vector[N] P; + vector[N] xi; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; + xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_R; + real mu_P; + real mu_xi; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_R = Phi_approx(mu_pr[3]) * 30; + mu_P = Phi_approx(mu_pr[4]) * 30; + mu_xi = Phi_approx(mu_pr[5]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/R/inst/stan_files/bandit4arm_lapse_decay.stan b/R/inst/stan_files/bandit4arm_lapse_decay.stan new file mode 100644 index 00000000..b089ee21 --- /dev/null +++ b/R/inst/stan_files/bandit4arm_lapse_decay.stan @@ -0,0 +1,201 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Added decay rate (Niv et al., 2015, J. Neuro) +// Aylward et al., 2018, PsyArXiv +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[6] mu_pr; + vector[6] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] R_pr; + vector[N] P_pr; + vector[N] xi_pr; + vector[N] d_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] R; + vector[N] P; + vector[N] xi; + vector[N] d; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; + xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); + d[i] = Phi_approx(mu_pr[6] + sigma[6] * d_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + d_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + vector[4] tmp; // temporary vector for Qr and Qp + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + //PEr_fic = -Qr; + //PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ decay rate + //Qr += Arew[i] * PEr_fic; + //Qp += Apun[i] * PEp_fic; + tmp = (1-d[i]) * Qr; + Qr = tmp; + tmp = (1-d[i]) * Qp; + Qp = tmp; + + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_R; + real mu_P; + real mu_xi; + real mu_d; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_R = Phi_approx(mu_pr[3]) * 30; + mu_P = Phi_approx(mu_pr[4]) * 30; + mu_xi = Phi_approx(mu_pr[5]); + mu_d = Phi_approx(mu_pr[6]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + vector[4] tmp; // temporary vector for Qr and Qp + + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + //PEr_fic = -Qr; + //PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ decay rate + //Qr += Arew[i] * PEr_fic; + //Qp += Apun[i] * PEp_fic; + tmp = (1-d[i]) * Qr; + Qr = tmp; + tmp = (1-d[i]) * Qp; + Qp = tmp; + + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} diff --git a/R/inst/stan_files/bandit4arm_singleA_lapse.stan b/R/inst/stan_files/bandit4arm_singleA_lapse.stan new file mode 100644 index 00000000..b383f389 --- /dev/null +++ b/R/inst/stan_files/bandit4arm_singleA_lapse.stan @@ -0,0 +1,177 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Single learning rate both for R and P. +// Aylward et al., 2018, PsyArXiv +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] R_pr; + vector[N] P_pr; + vector[N] xi_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] R; + vector[N] P; + vector[N] xi; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + R[i] = Phi_approx(mu_pr[2] + sigma[2] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[3] + sigma[3] * P_pr[i]) * 30; + xi[i] = Phi_approx(mu_pr[4] + sigma[4] * xi_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += A[i] * PEr_fic; + Qp += A[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_R; + real mu_P; + real mu_xi; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_R = Phi_approx(mu_pr[2]) * 30; + mu_P = Phi_approx(mu_pr[3]) * 30; + mu_xi = Phi_approx(mu_pr[4]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += A[i] * PEr_fic; + Qp += A[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/R/inst/stan_files/bart_par4.stan b/R/inst/stan_files/bart_par4.stan new file mode 100644 index 00000000..f66ca8f0 --- /dev/null +++ b/R/inst/stan_files/bart_par4.stan @@ -0,0 +1,129 @@ +#include /pre/license.stan + +data { + int N; // Number of subjects + int T; // Maximum number of trials + int Tsubj[N]; // Number of trials for each subject + int P; // Number of max pump + 1 ** CAUTION ** + int pumps[N, T]; // Number of pump + int explosion[N, T]; // Whether the balloon exploded (0 or 1) +} + +transformed data{ + // Whether a subject pump the button or not (0 or 1) + int d[N, T, P]; + + for (j in 1:N) { + for (k in 1:Tsubj[j]) { + for (l in 1:P) { + if (l <= pumps[j, k]) + d[j, k, l] = 1; + else + d[j, k, l] = 0; + } + } + } +} + +parameters { + // Group-level parameters + vector[4] mu_pr; + vector[4] sigma; + + // Normally distributed error for Matt trick + vector[N] phi_pr; + vector[N] eta_pr; + vector[N] gam_pr; + vector[N] tau_pr; +} + +transformed parameters { + // Subject-level parameters with Matt trick + vector[N] phi; + vector[N] eta; + vector[N] gam; + vector[N] tau; + + phi = Phi_approx(mu_pr[1] + sigma[1] * phi_pr); + eta = exp(mu_pr[2] + sigma[2] * eta_pr); + gam = exp(mu_pr[3] + sigma[3] * gam_pr); + tau = exp(mu_pr[4] + sigma[4] * tau_pr); +} + +model { + // Prior + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + phi_pr ~ normal(0, 1); + eta_pr ~ normal(0, 1); + gam_pr ~ normal(0, 1); + tau_pr ~ normal(0, 1); + + // Likelihood + for (j in 1:N) { + // Initialize n_succ and n_pump for a subject + int n_succ = 0; // Number of successful pumps + int n_pump = 0; // Number of total pumps + + for (k in 1:Tsubj[j]) { + real p_burst; // Belief on a balloon to be burst + real omega; // Optimal number of pumps + + p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); + omega = -gam[j] / log1m(p_burst); + + // Calculate likelihood with bernoulli distribution + for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) + d[j, k, l] ~ bernoulli_logit(tau[j] * (omega - l)); + + // Update n_succ and n_pump after each trial ends + n_succ += pumps[j, k] - explosion[j, k]; + n_pump += pumps[j, k]; + } + } +} + +generated quantities { + // Actual group-level mean + real mu_phi = Phi_approx(mu_pr[1]); + real mu_eta = exp(mu_pr[2]); + real mu_gam = exp(mu_pr[3]); + real mu_tau = exp(mu_pr[4]); + + // Log-likelihood for model fit + real log_lik = 0; + + // For posterior predictive check + real y_pred[N, T, P]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (j in 1:N) + for (k in 1:T) + for(l in 1:P) + y_pred[j, k, l] = -1; + + { // Local section to save time and space + for (j in 1:N) { + int n_succ = 0; + int n_pump = 0; + + for (k in 1:Tsubj[j]) { + real p_burst; // Belief on a balloon to be burst + real omega; // Optimal number of pumps + + p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); + omega = -gam[j] / log1m(p_burst); + + for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) { + log_lik += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); + y_pred[j, k, l] = bernoulli_logit_rng(tau[j] * (omega - l)); + } + + n_succ += pumps[j, k] - explosion[j, k]; + n_pump += pumps[j, k]; + } + } + } +} + diff --git a/R/inst/stan_files/choiceRT_ddm.stan b/R/inst/stan_files/choiceRT_ddm.stan new file mode 100644 index 00000000..58baaec6 --- /dev/null +++ b/R/inst/stan_files/choiceRT_ddm.stan @@ -0,0 +1,98 @@ +#include /pre/license.stan + +// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists +data { + int N; // Number of subjects + int Nu_max; // Max (across subjects) number of upper boundary responses + int Nl_max; // Max (across subjects) number of lower boundary responses + int Nu[N]; // Number of upper boundary responses for each subj + int Nl[N]; // Number of lower boundary responses for each subj + real RTu[N, Nu_max]; // upper boundary response times + real RTl[N, Nl_max]; // lower boundary response times + real minRT[N]; // minimum RT for each subject of the observed data + real RTbound; // lower bound or RT across all subjects (e.g., 0.1 second) +} + +parameters { + // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R + // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ + // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 + // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 + // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta + // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) + ///* upper boundary of tau must be smaller than minimum RT + //to avoid zero likelihood for fast responses. + //tau can for physiological reasone not be faster than 0.1 s.*/ + + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; + vector[N] beta_pr; + vector[N] delta_pr; + vector[N] tau_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] alpha; // boundary separation + vector[N] beta; // initial bias + vector[N] delta; // drift rate + vector[N] tau; // nondecision time + + for (i in 1:N) { + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]); + tau[i] = Phi_approx(mu_pr[4] + sigma[4] * tau_pr[i]) * (minRT[i] - RTbound) + RTbound; + } + alpha = exp(mu_pr[1] + sigma[1] * alpha_pr); + delta = exp(mu_pr[3] + sigma[3] * delta_pr); +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters for non-centered parameterization + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + delta_pr ~ normal(0, 1); + tau_pr ~ normal(0, 1); + + // Begin subject loop + for (i in 1:N) { + // Response time distributed along wiener first passage time distribution + RTu[i, :Nu[i]] ~ wiener(alpha[i], tau[i], beta[i], delta[i]); + RTl[i, :Nl[i]] ~ wiener(alpha[i], tau[i], 1-beta[i], -delta[i]); + + } // end of subject loop +} + +generated quantities { + // For group level parameters + real mu_alpha; // boundary separation + real mu_beta; // initial bias + real mu_delta; // drift rate + real mu_tau; // nondecision time + + // For log likelihood calculation + real log_lik[N]; + + // Assign group level parameter values + mu_alpha = exp(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]); + mu_delta = exp(mu_pr[3]); + mu_tau = Phi_approx(mu_pr[4]) * (mean(minRT)-RTbound) + RTbound; + + { // local section, this saves time and space + // Begin subject loop + for (i in 1:N) { + log_lik[i] = wiener_lpdf(RTu[i, :Nu[i]] | alpha[i], tau[i], beta[i], delta[i]); + log_lik[i] += wiener_lpdf(RTl[i, :Nl[i]] | alpha[i], tau[i], 1-beta[i], -delta[i]); + } + } +} + diff --git a/R/inst/stan_files/choiceRT_ddm_single.stan b/R/inst/stan_files/choiceRT_ddm_single.stan new file mode 100644 index 00000000..6bacd18a --- /dev/null +++ b/R/inst/stan_files/choiceRT_ddm_single.stan @@ -0,0 +1,58 @@ +#include /pre/license.stan + +// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists +data { + int Nu; // of upper boundary responses + int Nl; // of lower boundary responses + real RTu[Nu]; // upper boundary response times + real RTl[Nl]; // lower boundary response times + real minRT; // minimum RT of the observed data + real RTbound; // lower bound or RT (e.g., 0.1 second) +} + +parameters { + // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R + // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ + // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 + // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 + // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta + // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) + ///* upper boundary of tau must be smaller than minimum RT + //to avoid zero likelihood for fast responses. + //tau can for physiological reasone not be faster than 0.1 s.*/ + + real alpha; // boundary separation + real beta; // initial bias + real delta; // drift rate + real tau; // nondecision time +} + +model { + alpha ~ uniform(0, 5); + beta ~ uniform(0, 1); + delta ~ normal(0, 2); + tau ~ uniform(RTbound, minRT); + + RTu ~ wiener(alpha, tau, beta, delta); + RTl ~ wiener(alpha, tau, 1-beta, -delta); +} + +generated quantities { + + // For log likelihood calculation + real log_lik; + + // For posterior predictive check (Not implementeed yet) + // vector[Nu] y_pred_upper; + // vector[Nl] y_pred_lower; + + { // local section, this saves time and space + log_lik = wiener_lpdf(RTu | alpha, tau, beta, delta); + log_lik += wiener_lpdf(RTl | alpha, tau, 1-beta, -delta); + + // generate posterior predictions (Not implemented yet) + // y_pred_upper = wiener_rng(alpha, tau, beta, delta); + // y_pred_lower = wiener_rng(alpha, tau, 1-beta, -delta); + } +} + diff --git a/R/inst/stan_files/choiceRT_lba.stan b/R/inst/stan_files/choiceRT_lba.stan new file mode 100644 index 00000000..222e5a27 --- /dev/null +++ b/R/inst/stan_files/choiceRT_lba.stan @@ -0,0 +1,278 @@ +#include /pre/license.stan + +// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). +// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. +functions { + real lba_pdf(real t, real b, real A, real v_pdf, real s) { + //PDF of the LBA model + real b_A_tv_ts; + real b_tv_ts; + real term_1b; + real term_2b; + real term_3b; + real term_4b; + real pdf; + + b_A_tv_ts = (b - A - t * v_pdf)/(t * s); + b_tv_ts = (b - t * v_pdf)/(t * s); + + term_1b = v_pdf * Phi(b_A_tv_ts); + term_2b = s * exp(normal_lpdf(fabs(b_A_tv_ts) | 0, 1)); + term_3b = v_pdf * Phi(b_tv_ts); + term_4b = s * exp(normal_lpdf(fabs(b_tv_ts) | 0, 1)); + + pdf = (1/A) * (-term_1b + term_2b + term_3b - term_4b); + + return pdf; + } + + real lba_cdf(real t, real b, real A, real v_cdf, real s) { + //CDF of the LBA model + real b_A_tv; + real b_tv; + real ts; + real term_1a; + real term_2a; + real term_3a; + real term_4a; + real cdf; + + b_A_tv = b - A - t * v_cdf; + b_tv = b - t * v_cdf; + ts = t * s; + + term_1a = b_A_tv/A * Phi(b_A_tv/ts); + term_2a = b_tv/A * Phi(b_tv/ts); + term_3a = ts/A * exp(normal_lpdf(fabs(b_A_tv/ts) | 0, 1)); + term_4a = ts/A * exp(normal_lpdf(fabs(b_tv/ts) | 0, 1)); + + cdf = 1 + term_1a - term_2a + term_3a - term_4a; + + return cdf; + } + + real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { + + real t; + real b; + real cdf; + real pdf; + vector[cols(RT)] prob; + real out; + real prob_neg; + + b = A + d; + for (i in 1:cols(RT)) { + t = RT[1, i] - tau; + if (t > 0) { + cdf = 1; + for (j in 1:num_elements(v)) { + if (RT[2, i] == j) { + pdf = lba_pdf(t, b, A, v[j], s); + } else { + cdf *= lba_cdf(t, b, A, v[j], s); + } + } + prob_neg = 1; + for (j in 1:num_elements(v)) { + prob_neg *= Phi(-v[j]/s); + } + prob[i] = pdf * (1-cdf); + prob[i] /= (1-prob_neg); + if (prob[i] < 1e-10) { + prob[i] = 1e-10; + } + + } else { + prob[i] = 1e-10; + } + } + out = sum(log(prob)); + return out; + } + + vector lba_rng(real d, real A, vector v, real s, real tau) { + + int get_pos_drift; + int no_pos_drift; + int get_first_pos; + vector[num_elements(v)] drift; + int max_iter; + int iter; + real start[num_elements(v)]; + real ttf[num_elements(v)]; + int resp[num_elements(v)]; + real rt; + vector[2] pred; + real b; + + //try to get a positive drift rate + get_pos_drift = 1; + no_pos_drift = 0; + max_iter = 1000; + iter = 0; + while(get_pos_drift) { + for (j in 1:num_elements(v)) { + drift[j] = normal_rng(v[j], s); + if (drift[j] > 0) { + get_pos_drift = 0; + } + } + iter += 1; + if (iter > max_iter) { + get_pos_drift = 0; + no_pos_drift = 1; + } + } + //if both drift rates are <= 0 + //return an infinite response time + if (no_pos_drift) { + pred[1] = -1; + pred[2] = -1; + } else { + b = A + d; + for (i in 1:num_elements(v)) { + //start time of each accumulator + start[i] = uniform_rng(0, A); + //finish times + ttf[i] = (b-start[i])/drift[i]; + } + //rt is the fastest accumulator finish time + //if one is negative get the positive drift + resp = sort_indices_asc(ttf); + { + real temp_ttf[num_elements(v)]; + temp_ttf = sort_asc(ttf); + ttf = temp_ttf; + } + get_first_pos = 1; + iter = 1; + while(get_first_pos) { + if (ttf[iter] > 0) { + pred[1] = ttf[iter]; + pred[2] = resp[iter]; + get_first_pos = 0; + } + iter += 1; + } + } + return pred; + } +} +data { + int N; + int Max_tr; + int N_choices; + int N_cond; + int N_tr_cond[N, N_cond]; + matrix[2, Max_tr] RT[N, N_cond]; + +} + +parameters { + // Hyperparameter means + real mu_d; + real mu_A; + real mu_tau; + vector[N_choices] mu_v[N_cond]; + + // Hyperparameter sigmas + real sigma_d; + real sigma_A; + real sigma_tau; + vector[N_choices] sigma_v[N_cond]; + + // Individual parameters + real d[N]; + real A[N]; + real tau[N]; + vector[N_choices] v[N, N_cond]; +} +transformed parameters { + // s is set to 1 to make model identifiable + real s; + s = 1; +} +model { + // Hyperparameter means + mu_d ~ normal(.5, 1)T[0,]; + mu_A ~ normal(.5, 1)T[0,]; + mu_tau ~ normal(.5, .5)T[0,]; + + // Hyperparameter sigmas + sigma_d ~ gamma(1, 1); + sigma_A ~ gamma(1, 1); + sigma_tau ~ gamma(1, 1); + + // Hyperparameter means and sigmas for multiple drift rates + for (j in 1:N_cond) { + for (n in 1:N_choices) { + mu_v[j, n] ~ normal(2, 1)T[0,]; + sigma_v[j, n] ~ gamma(1, 1); + } + } + + for (i in 1:N) { + // Declare variables + int n_trials; + + // Individual parameters + d[i] ~ normal(mu_d, sigma_d)T[0,]; + A[i] ~ normal(mu_A, sigma_A)T[0,]; + tau[i] ~ normal(mu_tau, sigma_tau)T[0,]; + + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = N_tr_cond[i, j]; + + for (n in 1:N_choices) { + // Drift rate is normally distributed + v[i, j, n] ~ normal(mu_v[j, n], sigma_v[j, n])T[0,]; + } + // Likelihood of RT x Choice + RT[i, j, , 1:n_trials] ~ lba(d[i], A[i], v[i, j,], s, tau[i]); + } + } +} + +generated quantities { + // Declare variables + int n_trials; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + matrix[2, Max_tr] y_pred[N, N_cond]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (j in 1:N_cond) { + for (t in 1:Max_tr) { + y_pred[i, j, , t] = rep_vector(-1, 2); + } + } + } + + { // local section, this saves time and space + for (i in 1:N) { + // Initialize variables + log_lik[i] = 0; + + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = N_tr_cond[i, j]; + + // Sum likelihood over conditions within subjects + log_lik[i] += lba_lpdf(RT[i, j, , 1:n_trials] | d[i], A[i], v[i, j,], s, tau[i]); + + for (t in 1:n_trials) { + // generate posterior predictions + y_pred[i, j, , t] = lba_rng(d[i], A[i], v[i, j,], s, tau[i]); + } + } + } + // end of subject loop + } +} + diff --git a/R/inst/stan_files/choiceRT_lba_single.stan b/R/inst/stan_files/choiceRT_lba_single.stan new file mode 100644 index 00000000..1d5fd992 --- /dev/null +++ b/R/inst/stan_files/choiceRT_lba_single.stan @@ -0,0 +1,239 @@ +#include /pre/license.stan + +// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). +// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. +functions { + real lba_pdf(real t, real b, real A, real v, real s) { + //PDF of the LBA model + real b_A_tv_ts; + real b_tv_ts; + real term_1; + real term_2; + real term_3; + real term_4; + real pdf; + + b_A_tv_ts = (b - A - t * v)/(t * s); + b_tv_ts = (b - t * v)/(t * s); + + term_1 = v * Phi(b_A_tv_ts); + term_2 = s * exp(normal_lpdf(b_A_tv_ts | 0, 1)); + term_3 = v * Phi(b_tv_ts); + term_4 = s * exp(normal_lpdf(b_tv_ts | 0, 1)); + + pdf = (1/A) * (-term_1 + term_2 + term_3 - term_4); + + return pdf; + } + + real lba_cdf(real t, real b, real A, real v, real s) { + //CDF of the LBA model + real b_A_tv; + real b_tv; + real ts; + real term_1; + real term_2; + real term_3; + real term_4; + real cdf; + + b_A_tv = b - A - t * v; + b_tv = b - t * v; + ts = t * s; + + term_1 = b_A_tv/A * Phi(b_A_tv/ts); + term_2 = b_tv/A * Phi(b_tv/ts); + term_3 = ts/A * exp(normal_lpdf(b_A_tv/ts | 0, 1)); + term_4 = ts/A * exp(normal_lpdf(b_tv/ts | 0, 1)); + + cdf = 1 + term_1 - term_2 + term_3 - term_4; + + return cdf; + + } + + real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { + + real t; + real b; + real cdf; + real pdf; + vector[rows(RT)] prob; + real out; + real prob_neg; + + b = A + d; + for (i in 1:rows(RT)) { + t = RT[1, i] - tau; + if (t > 0) { + cdf = 1; + + for (j in 1:num_elements(v)) { + if (RT[2, i] == j) { + pdf = lba_pdf(t, b, A, v[j], s); + } else { + cdf *= (1-lba_cdf(t, b, A, v[j], s)); + } + } + prob_neg = 1; + for (j in 1:num_elements(v)) { + prob_neg *= Phi(-v[j]/s); + } + prob[i] = pdf * cdf; + prob[i] /= (1-prob_neg); + if (prob[i] < 1e-10) { + prob[i] = 1e-10; + } + + } else { + prob[i] = 1e-10; + } + } + out = sum(log(prob)); + return out; + } + + vector lba_rng(real d, real A, vector v, real s, real tau) { + + int get_pos_drift; + int no_pos_drift; + int get_first_pos; + vector[num_elements(v)] drift; + int max_iter; + int iter; + real start[num_elements(v)]; + real ttf[num_elements(v)]; + int resp[num_elements(v)]; + real rt; + vector[2] pred; + real b; + + //try to get a positive drift rate + get_pos_drift = 1; + no_pos_drift = 0; + max_iter = 1000; + iter = 0; + while(get_pos_drift) { + for (j in 1:num_elements(v)) { + drift[j] = normal_rng(v[j], s); + if (drift[j] > 0) { + get_pos_drift = 0; + } + } + iter += 1; + if (iter > max_iter) { + get_pos_drift = 0; + no_pos_drift = 1; + } + } + //if both drift rates are <= 0 + //return an infinite response time + if (no_pos_drift) { + pred[1] = -1; + pred[2] = -1; + } else { + b = A + d; + for (i in 1:num_elements(v)) { + //start time of each accumulator + start[i] = uniform_rng(0, A); + //finish times + ttf[i] = (b-start[i])/drift[i]; + } + //rt is the fastest accumulator finish time + //if one is negative get the positive drift + resp = sort_indices_asc(ttf); + { + real temp_ttf[num_elements(v)]; + temp_ttf = sort_asc(ttf); + ttf = temp_ttf; + } + get_first_pos = 1; + iter = 1; + while(get_first_pos) { + if (ttf[iter] > 0) { + pred[1] = ttf[iter] + tau; + pred[2] = resp[iter]; + get_first_pos = 0; + } + iter += 1; + } + } + return pred; + } +} +data { + int N_choice; + int N_cond; + int tr_cond[N_cond]; + int max_tr; + matrix[2, max_tr] RT[N_cond]; +} + +parameters { + real d; + real A; + real tau; + vector[N_choice] v[N_cond]; +} +transformed parameters { + real s; + s = 1; +} +model { + // Declare variables + int n_trials; + + // Individual parameters + d ~ normal(.5, 1)T[0,]; + A ~ normal(.5, 1)T[0,]; + tau ~ normal(.5, .5)T[0,]; + + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = tr_cond[j]; + + for (n in 1:N_choice) { + // Drift rate is normally distributed + v[j, n] ~ normal(2, 1)T[0,]; + } + // Likelihood of RT x Choice + RT[j, , 1:n_trials] ~ lba(d, A, v[j,], s, tau); + } +} + +generated quantities { + // Declare variables + int n_trials; + + // For log likelihood calculation + real log_lik; + + // For posterior predictive check + matrix[2, max_tr] y_pred[N_cond]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (j in 1:N_cond) { + for (t in 1:max_tr) { + y_pred[j, , t] = rep_vector(-1, 2); + } + } + + // initialize log_lik + log_lik = 0; + + { // local section, this saves time and space + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = tr_cond[j]; + + // Sum likelihood over conditions within subjects + log_lik += lba_lpdf(RT[j, , 1:n_trials] | d, A, v[j,], s, tau); + + for (t in 1:n_trials) { + // generate posterior predictions + y_pred[j, , t] = lba_rng(d, A, v[j,], s, tau); + } + } + } +} + diff --git a/R/inst/stan_files/cra_exp.stan b/R/inst/stan_files/cra_exp.stan new file mode 100644 index 00000000..86a44a0e --- /dev/null +++ b/R/inst/stan_files/cra_exp.stan @@ -0,0 +1,134 @@ +#include /pre/license.stan + +/** + * Choice under Risk and Ambiguity Task + * + * Exponential model in Hsu et al. (2005) Science + */ + +functions { + /** + * Subjective value function with the exponential equation form + */ + real subjective_value(real alpha, real beta, real p, real a, real v) { + return pow(p, 1 + beta * a) * pow(v, alpha); + } +} + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/block for each subject + + int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) + real prob[N, T]; // The objective probability of the variable lottery + real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) + real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) + real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // risk attitude parameter + vector[N] beta_pr; // ambiguity attitude parameter + vector[N] gamma_pr; // inverse temperature parameter +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] alpha; + vector[N] beta; + vector[N] gamma; + + alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; + beta = mu_pr[2] + sigma[2] * beta_pr; + gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); +} + +model { + // hyper parameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 5); + + // individual parameters w/ Matt trick + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + gamma_pr ~ normal(0, 1); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + real p_var; // probability of choosing the variable option + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var = inv_logit(gamma[i] * (u_var - u_fix)); + + target += bernoulli_lpmf(choice[i, t] | p_var); + } + } +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_beta; + real mu_gamma; + + // For log likelihood calculation for each subject + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Model regressors + real sv[N, T]; + real sv_fix[N, T]; + real sv_var[N, T]; + real p_var[N, T]; + + // Set all posterior predictions to -1 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + sv[i, t] = 0; + sv_fix[i, t] = 0; + sv_var[i, t] = 0; + p_var[i, t] = 0; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 2; + mu_beta = mu_pr[2]; + mu_gamma = exp(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Initialize the log likelihood variable to 0. + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); + + sv_fix[i, t] = u_fix; + sv_var[i, t] = u_var; + sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; + + log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); + y_pred[i, t] = bernoulli_rng(p_var[i, t]); + } + } + } +} + diff --git a/R/inst/stan_files/cra_linear.stan b/R/inst/stan_files/cra_linear.stan new file mode 100644 index 00000000..b8653c85 --- /dev/null +++ b/R/inst/stan_files/cra_linear.stan @@ -0,0 +1,130 @@ +#include /pre/license.stan + +/** + * Choice under Risk and Ambiguity Task + * + * Linear model in Levy et al. (2010) J Neurophysiol + */ + +functions { + /** + * Subjective value function with the linear equation form + */ + real subjective_value(real alpha, real beta, real p, real a, real v) { + return (p - beta * a / 2) * pow(v, alpha); + } +} + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/block for each subject + + int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) + real prob[N, T]; // The objective probability of the variable lottery + real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) + real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) + real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // risk attitude parameter + vector[N] beta_pr; // ambiguity attitude parameter + vector[N] gamma_pr; // inverse temperature parameter +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] alpha; + vector[N] beta; + vector[N] gamma; + + alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; + beta = mu_pr[2] + sigma[2] * beta_pr; + gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); +} + +model { + // hyper parameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 5); + + // individual parameters w/ Matt trick + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + gamma_pr ~ normal(0, 1); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + real p_var; // probability of choosing the variable option + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var = inv_logit(gamma[i] * (u_var - u_fix)); + + target += bernoulli_lpmf(choice[i, t] | p_var); + } + } +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_beta; + real mu_gamma; + + // For log likelihood calculation for each subject + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Model regressors + real sv[N, T]; + real sv_fix[N, T]; + real sv_var[N, T]; + real p_var[N, T]; + + // Set all posterior predictions to -1 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 2; + mu_beta = mu_pr[2]; + mu_gamma = exp(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Initialize the log likelihood variable to 0. + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); + + sv_fix[i, t] = u_fix; + sv_var[i, t] = u_var; + sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; + + log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); + y_pred[i, t] = bernoulli_rng(p_var[i, t]); + } + } + } +} + diff --git a/R/inst/stan_files/dbdm_prob_weight.stan b/R/inst/stan_files/dbdm_prob_weight.stan new file mode 100644 index 00000000..ee248835 --- /dev/null +++ b/R/inst/stan_files/dbdm_prob_weight.stan @@ -0,0 +1,154 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real opt1hprob[N, T]; + real opt2hprob[N, T]; + real opt1hval[N, T]; + real opt1lval[N, T]; + real opt2hval[N, T]; + real opt2lval[N, T]; +} +transformed data { +} +parameters{ + //group-level parameters + vector[4] mu_pr; + vector[4] sigma; + + //subject-level raw parameters, follows norm(0,1), for later Matt Trick + vector[N] tau_pr; //probability weight parameter + vector[N] rho_pr; //subject utility parameter + vector[N] lambda_pr; //loss aversion parameter + vector[N] beta_pr; //inverse softmax temperature +} + +transformed parameters { + //subject-level parameters + vector[N] tau; + vector[N] rho; + vector[N] lambda; + vector[N] beta; + + //Matt Trick + for (i in 1:N) { + tau[i] = Phi_approx( mu_pr[1] + sigma[1] * tau_pr[i] ); + rho[i] = Phi_approx( mu_pr[2] + sigma[2] * rho_pr[i] )*2; + lambda[i] = Phi_approx( mu_pr[3] + sigma[3] * lambda_pr[i] )*5; + beta[i] = Phi_approx( mu_pr[4] + sigma[4] * beta_pr[i] ); + } +} + +model { + //prior : hyperparameters + mu_pr ~ normal(0,1); + sigma ~ cauchy(0,5); + + //prior : individual parameters + tau_pr ~ normal(0,1); + rho_pr ~ normal(0,1); + lambda_pr ~ normal(0,1); + beta_pr ~ normal(0,1); + + //subject loop and trial loop + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + vector[4] w_prob; + vector[2] U_opt; + + //probability weight function + w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); + w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); + w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); + w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); + + if (opt1hval[i,t]>0) { + if (opt1lval[i,t]>= 0) { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); + } else { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + + if (opt2hval[i,t] > 0) { + if (opt2lval[i,t] >= 0) { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); + } else { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + // compute action probabilities + choice[i, t] ~ categorical_logit(U_opt*beta[i]); + } + } +} + +generated quantities { + real mu_tau; + real mu_rho; + real mu_lambda; + real mu_beta; + real log_lik[N]; + // For posterior predictive check + real y_pred[N,T]; + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_tau = Phi_approx(mu_pr[1]); + mu_rho = Phi_approx(mu_pr[2])*2; + mu_lambda = Phi_approx(mu_pr[3])*5; + mu_beta = Phi_approx(mu_pr[4]); + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + vector[4] w_prob; + vector[2] U_opt; + + //probability weight function + w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); + w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); + w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); + w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); + + if (opt1hval[i,t]>0) { + if (opt1lval[i,t]>= 0) { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); + } else { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + + if (opt2hval[i,t] > 0) { + if (opt2lval[i,t] >= 0) { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); + } else { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + + // compute action probabilities + log_lik[i] += categorical_logit_lpmf(choice[i,t] | U_opt*beta[i]); + y_pred[i, t] = categorical_rng(softmax(U_opt*beta[i])); + + } + } + } +} + diff --git a/R/inst/stan_files/dd_cs.stan b/R/inst/stan_files/dd_cs.stan new file mode 100644 index 00000000..d221d34a --- /dev/null +++ b/R/inst/stan_files/dd_cs.stan @@ -0,0 +1,107 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real delay_later[N, T]; + real amount_later[N, T]; + real delay_sooner[N, T]; + real amount_sooner[N, T]; + int choice[N, T]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] r_pr; // (exponential) discounting rate (Impatience) + vector[N] s_pr; // time-sensitivity + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] r; + vector[N] s; + vector[N] beta; + + for (i in 1:N) { + r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); + s[i] = Phi_approx(mu_pr[2] + sigma[2] * s_pr[i]) * 10; + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 5; + } +} + +model { +// Constant-sensitivity model (Ebert & Prelec, 2007) + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + r_pr ~ normal(0, 1); + s_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); + ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); + choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); + } + } +} +generated quantities { + // For group level parameters + real mu_r; + real mu_s; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_r = Phi_approx(mu_pr[1]); + mu_s = Phi_approx(mu_pr[2]) * 10; + mu_beta = Phi_approx(mu_pr[3]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); + ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); + } + } + } +} + diff --git a/R/inst/stan_files/dd_cs_single.stan b/R/inst/stan_files/dd_cs_single.stan new file mode 100644 index 00000000..2436b8b1 --- /dev/null +++ b/R/inst/stan_files/dd_cs_single.stan @@ -0,0 +1,63 @@ +#include /pre/license.stan + +data { + int Tsubj; + real delay_later[Tsubj]; + real amount_later[Tsubj]; + real delay_sooner[Tsubj]; + real amount_sooner[Tsubj]; + int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { + real r; // (exponential) discounting rate + real s; // impatience + real beta; // inverse temperature +} + +transformed parameters { + real ev_later[Tsubj]; + real ev_sooner[Tsubj]; + + for (t in 1:Tsubj) { + ev_later[t] = amount_later[t] * exp(-1* (pow(r * delay_later[t], s))); + ev_sooner[t] = amount_sooner[t] * exp(-1* (pow(r * delay_sooner[t], s))); + } +} + +model { + // constant-sensitivity model (Ebert & Prelec, 2007) + // hyperparameters + r ~ uniform(0, 1); + s ~ uniform(0, 10); + beta ~ uniform(0, 5); + + for (t in 1:Tsubj) { + choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); + } +} + +generated quantities { + real logR; + real log_lik; + + // For posterior predictive check + real y_pred[Tsubj]; + + logR = log(r); + + { // local section, this saves time and space + log_lik = 0; + + for (t in 1:Tsubj) { + log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); + + // generate posterior prediction for current trial + y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); + } + } +} + diff --git a/R/inst/stan_files/dd_exp.stan b/R/inst/stan_files/dd_exp.stan new file mode 100644 index 00000000..3d772a5a --- /dev/null +++ b/R/inst/stan_files/dd_exp.stan @@ -0,0 +1,101 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real delay_later[N, T]; + real amount_later[N, T]; + real delay_sooner[N, T]; + real amount_sooner[N, T]; + int choice[N, T]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] r_pr; + vector[N] beta_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] r; + vector[N] beta; + + for (i in 1:N) { + r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; + } +} + +model { +// Exponential function + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + r_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); + choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); + } + } +} +generated quantities { + // For group level parameters + real mu_r; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_r = Phi_approx(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); + } + } + } +} + diff --git a/R/inst/stan_files/dd_hyperbolic.stan b/R/inst/stan_files/dd_hyperbolic.stan new file mode 100644 index 00000000..1551304a --- /dev/null +++ b/R/inst/stan_files/dd_hyperbolic.stan @@ -0,0 +1,101 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real delay_later[N, T]; + real amount_later[N, T]; + real delay_sooner[N, T]; + real amount_sooner[N, T]; + int choice[N, T]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] k_pr; + vector[N] beta_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] k; + vector[N] beta; + + for (i in 1:N) { + k[i] = Phi_approx(mu_pr[1] + sigma[1] * k_pr[i]); + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; + } +} + +model { +// Hyperbolic function + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + k_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); + choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); + } + } +} +generated quantities { + // For group level parameters + real mu_k; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_k = Phi_approx(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); + } + } + } +} + diff --git a/R/inst/stan_files/dd_hyperbolic_single.stan b/R/inst/stan_files/dd_hyperbolic_single.stan new file mode 100644 index 00000000..be3011f0 --- /dev/null +++ b/R/inst/stan_files/dd_hyperbolic_single.stan @@ -0,0 +1,57 @@ +#include /pre/license.stan + +data { + int Tsubj; + real delay_later[Tsubj]; + real amount_later[Tsubj]; + real delay_sooner[Tsubj]; + real amount_sooner[Tsubj]; + int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { + real k; // discounting rate + real beta; // inverse temperature +} + +transformed parameters { + real ev_later[Tsubj]; + real ev_sooner[Tsubj]; + + for (t in 1:Tsubj) { + ev_later[t] = amount_later[t] / (1 + k * delay_later[t]); + ev_sooner[t] = amount_sooner[t] / (1 + k * delay_sooner[t]); + } +} + +model { + k ~ uniform(0, 1); + beta ~ uniform(0, 5); + + for (t in 1:Tsubj) { + choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); + } +} +generated quantities { + real logK; + real log_lik; + + // For posterior predictive check + real y_pred[Tsubj]; + + logK = log(k); + + { // local section, this saves time and space + log_lik = 0; + for (t in 1:Tsubj) { + log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); + + // generate posterior prediction for current trial + y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); + } + } +} + diff --git a/R/inst/stan_files/gng_m1.stan b/R/inst/stan_files/gng_m1.stan new file mode 100644 index 00000000..5ac8abd0 --- /dev/null +++ b/R/inst/stan_files/gng_m1.stan @@ -0,0 +1,149 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[3] mu_pr; + vector[3] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] rho_pr; // rho, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] rho; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + rho = exp(mu_pr[3] + sigma[3] * rho_pr); +} + +model { +// gng_m1: RW + noise model in Guitart-Masip et al 2012 + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + rho_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_rho; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_rho = exp(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/R/inst/stan_files/gng_m2.stan b/R/inst/stan_files/gng_m2.stan new file mode 100644 index 00000000..c9a8ced8 --- /dev/null +++ b/R/inst/stan_files/gng_m2.stan @@ -0,0 +1,160 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[4] mu_pr; + vector[4] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] b_pr; // go bias + vector[N] rho_pr; // rho, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] b; + vector[N] rho; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + b = mu_pr[3] + sigma[3] * b_pr; // vectorization + rho = exp(mu_pr[4] + sigma[4] * rho_pr); +} + +model { +// gng_m2: RW + noise + bias model in Guitart-Masip et al 2012 + // hyper parameters + mu_pr[1] ~ normal(0, 1.0); + mu_pr[2] ~ normal(0, 1.0); + mu_pr[3] ~ normal(0, 10.0); + mu_pr[4] ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3] ~ cauchy(0, 1.0); + sigma[4] ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + b_pr ~ normal(0, 1.0); + rho_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_b; + real mu_rho; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_b = mu_pr[3]; + mu_rho = exp(mu_pr[4]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/R/inst/stan_files/gng_m3.stan b/R/inst/stan_files/gng_m3.stan new file mode 100644 index 00000000..2368ea1a --- /dev/null +++ b/R/inst/stan_files/gng_m3.stan @@ -0,0 +1,179 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[5] mu_pr; + vector[5] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] b_pr; // go bias + vector[N] pi_pr; // pavlovian bias + vector[N] rho_pr; // rho, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] b; + vector[N] pi; + vector[N] rho; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + b = mu_pr[3] + sigma[3] * b_pr; // vectorization + pi = mu_pr[4] + sigma[4] * pi_pr; + rho = exp(mu_pr[5] + sigma[5] * rho_pr); +} + +model { +// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) + // hyper parameters + mu_pr[1] ~ normal(0, 1.0); + mu_pr[2] ~ normal(0, 1.0); + mu_pr[3] ~ normal(0, 10.0); + mu_pr[4] ~ normal(0, 10.0); + mu_pr[5] ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3:4] ~ cauchy(0, 1.0); + sigma[5] ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + b_pr ~ normal(0, 1.0); + pi_pr ~ normal(0, 1.0); + rho_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // after receiving feedback, update sv[t + 1] + sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_b; + real mu_pi; + real mu_rho; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + real SV[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_b = mu_pr[3]; + mu_pi = mu_pr[4]; + mu_rho = exp(mu_pr[5]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + SV[i, t] = sv[cue[i, t]]; + + // after receiving feedback, update sv[t + 1] + sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/R/inst/stan_files/gng_m4.stan b/R/inst/stan_files/gng_m4.stan new file mode 100644 index 00000000..73e30cb1 --- /dev/null +++ b/R/inst/stan_files/gng_m4.stan @@ -0,0 +1,210 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[6] mu_pr; + vector[6] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] b_pr; // go bias + vector[N] pi_pr; // pavlovian bias + vector[N] rhoRew_pr; // rho reward, inv temp + vector[N] rhoPun_pr; // rho punishment, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] b; + vector[N] pi; + vector[N] rhoRew; + vector[N] rhoPun; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + b = mu_pr[3] + sigma[3] * b_pr; // vectorization + pi = mu_pr[4] + sigma[4] * pi_pr; + rhoRew = exp(mu_pr[5] + sigma[5] * rhoRew_pr); + rhoPun = exp(mu_pr[6] + sigma[6] * rhoPun_pr); +} + +model { +// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) + // hyper parameters + mu_pr[1] ~ normal(0, 1.0); + mu_pr[2] ~ normal(0, 1.0); + mu_pr[3] ~ normal(0, 10.0); + mu_pr[4] ~ normal(0, 10.0); + mu_pr[5] ~ normal(0, 1.0); + mu_pr[6] ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3:4] ~ cauchy(0, 1.0); + sigma[5:6] ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + b_pr ~ normal(0, 1.0); + pi_pr ~ normal(0, 1.0); + rhoRew_pr ~ normal(0, 1.0); + rhoPun_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // after receiving feedback, update sv[t + 1] + if (outcome[i, t] >= 0) { + sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); + } else { + sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); + } + + // update action values + if (pressed[i, t]) { // update go value + if (outcome[i, t] >=0) { + qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { + qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); + } + } else { // update no-go value + if (outcome[i, t] >=0) { + qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } else { + qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_b; + real mu_pi; + real mu_rhoRew; + real mu_rhoPun; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + real SV[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_b = mu_pr[3]; + mu_pi = mu_pr[4]; + mu_rhoRew = exp(mu_pr[5]); + mu_rhoPun = exp(mu_pr[6]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + SV[i, t] = sv[cue[i, t]]; + + // after receiving feedback, update sv[t + 1] + if (outcome[i, t] >= 0) { + sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); + } else { + sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); + } + + // update action values + if (pressed[i, t]) { // update go value + if (outcome[i, t] >=0) { + qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { + qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); + } + } else { // update no-go value + if (outcome[i, t] >=0) { + qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } else { + qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/R/inst/stan_files/igt_orl.stan b/R/inst/stan_files/igt_orl.stan new file mode 100644 index 00000000..a560de27 --- /dev/null +++ b/R/inst/stan_files/igt_orl.stan @@ -0,0 +1,207 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; + real sign_out[N, T]; +} +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[5] mu_pr; + vector[5] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] K_pr; + vector[N] betaF_pr; + vector[N] betaP_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] K; + vector[N] betaF; + vector[N] betaP; + + for (i in 1:N) { + Arew[i] = Phi_approx( mu_pr[1] + sigma[1] * Arew_pr[i] ); + Apun[i] = Phi_approx( mu_pr[2] + sigma[2] * Apun_pr[i] ); + K[i] = Phi_approx(mu_pr[3] + sigma[3] + K_pr[i]) * 5; + } + betaF = mu_pr[4] + sigma[4] * betaF_pr; + betaP = mu_pr[5] + sigma[5] * betaP_pr; +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1:3] ~ normal(0, 0.2); + sigma[4:5] ~ cauchy(0, 1.0); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + K_pr ~ normal(0, 1.0); + betaF_pr ~ normal(0, 1.0); + betaP_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] ef; + vector[4] ev; + vector[4] PEfreq_fic; + vector[4] PEval_fic; + vector[4] pers; // perseverance + vector[4] util; + + real PEval; + real PEfreq; + real efChosen; + real evChosen; + real K_tr; + + // Initialize values + ef = initV; + ev = initV; + pers = initV; // initial pers values + util = initV; + K_tr = pow(3, K[i]) - 1; + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit( util ); + + // Prediction error + PEval = outcome[i,t] - ev[ choice[i,t]]; + PEfreq = sign_out[i,t] - ef[ choice[i,t]]; + PEfreq_fic = -sign_out[i,t]/3 - ef; + + // store chosen deck ev + efChosen = ef[ choice[i,t]]; + evChosen = ev[ choice[i,t]]; + + if (outcome[i,t] >= 0) { + // Update ev for all decks + ef += Apun[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Arew[i] * PEval; + } else { + // Update ev for all decks + ef += Arew[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Apun[i] * PEval; + } + + // Perseverance updating + pers[ choice[i,t] ] = 1; // perseverance term + pers /= (1 + K_tr); // decay + + // Utility of expected value and perseverance + util = ev + ef * betaF[i] + pers * betaP[i]; + } + } +} + +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_K; + real mu_betaF; + real mu_betaP; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N,T]; + + // Set all posterior predictions to -1 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i,t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_K = Phi_approx(mu_pr[3]) * 5; + mu_betaF = mu_pr[4]; + mu_betaP = mu_pr[5]; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ef; + vector[4] ev; + vector[4] PEfreq_fic; + vector[4] PEval_fic; + vector[4] pers; // perseverance + vector[4] util; + + real PEval; + real PEfreq; + real efChosen; + real evChosen; + real K_tr; + + // Initialize values + log_lik[i] = 0; + ef = initV; + ev = initV; + pers = initV; // initial pers values + util = initV; + K_tr = pow(3, K[i]) - 1; + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf( choice[i, t] | util ); + + // generate posterior prediction for current trial + y_pred[i,t] = categorical_rng(softmax(util)); + + // Prediction error + PEval = outcome[i,t] - ev[ choice[i,t]]; + PEfreq = sign_out[i,t] - ef[ choice[i,t]]; + PEfreq_fic = -sign_out[i,t]/3 - ef; + + // store chosen deck ev + efChosen = ef[ choice[i,t]]; + evChosen = ev[ choice[i,t]]; + + if (outcome[i,t] >= 0) { + // Update ev for all decks + ef += Apun[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Arew[i] * PEval; + } else { + // Update ev for all decks + ef += Arew[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Apun[i] * PEval; + } + + // Perseverance updating + pers[ choice[i,t] ] = 1; // perseverance term + pers /= (1 + K_tr); // decay + + // Utility of expected value and perseverance + util = ev + ef * betaF[i] + pers * betaP[i]; + } + } + } +} + diff --git a/R/inst/stan_files/igt_pvl_decay.stan b/R/inst/stan_files/igt_pvl_decay.stan new file mode 100644 index 00000000..2d908a19 --- /dev/null +++ b/R/inst/stan_files/igt_pvl_decay.stan @@ -0,0 +1,134 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; +} +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] alpha_pr; + vector[N] cons_pr; + vector[N] lambda_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] alpha; + vector[N] cons; + vector[N] lambda; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; + cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; + lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + cons_pr ~ normal(0, 1); + lambda_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(theta * ev); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // decay-RI + ev *= A[i]; + ev[choice[i, t]] += curUtil; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_alpha; + real mu_cons; + real mu_lambda; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_alpha = Phi_approx(mu_pr[2]) * 2; + mu_cons = Phi_approx(mu_pr[3]) * 5; + mu_lambda = Phi_approx(mu_pr[4]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + log_lik[i] = 0; + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(theta * ev)); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // decay-RI + ev *= A[i]; + ev[choice[i, t]] += curUtil; + } + } + } +} + diff --git a/R/inst/stan_files/igt_pvl_delta.stan b/R/inst/stan_files/igt_pvl_delta.stan new file mode 100644 index 00000000..05c6e870 --- /dev/null +++ b/R/inst/stan_files/igt_pvl_delta.stan @@ -0,0 +1,132 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; +} +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] alpha_pr; + vector[N] cons_pr; + vector[N] lambda_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] alpha; + vector[N] cons; + vector[N] lambda; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; + cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; + lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; + } +} +model { +// Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + cons_pr ~ normal(0, 1); + lambda_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(theta * ev); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // delta + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_alpha; + real mu_cons; + real mu_lambda; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_alpha = Phi_approx(mu_pr[2]) * 2; + mu_cons = Phi_approx(mu_pr[3]) * 5; + mu_lambda = Phi_approx(mu_pr[4]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + log_lik[i] = 0; + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(theta * ev)); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // delta + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + } + } + } +} + diff --git a/R/inst/stan_files/igt_vpp.stan b/R/inst/stan_files/igt_vpp.stan new file mode 100644 index 00000000..61c2b831 --- /dev/null +++ b/R/inst/stan_files/igt_vpp.stan @@ -0,0 +1,188 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[8] mu_pr; + vector[8] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] alpha_pr; + vector[N] cons_pr; + vector[N] lambda_pr; + vector[N] epP_pr; + vector[N] epN_pr; + vector[N] K_pr; + vector[N] w_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] alpha; + vector[N] cons; + vector[N] lambda; + vector[N] epP; + vector[N] epN; + vector[N] K; + vector[N] w; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; + cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; + lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; + K[i] = Phi_approx(mu_pr[7] + sigma[7] * K_pr[i]); + w[i] = Phi_approx(mu_pr[8] + sigma[8] * w_pr[i]); + } + epP = mu_pr[5] + sigma[5] * epP_pr; + epN = mu_pr[6] + sigma[6] * epN_pr; +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1.0); + sigma[1:4] ~ normal(0, 0.2); + sigma[5:6] ~ cauchy(0, 1.0); + sigma[7:8] ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1.0); + alpha_pr ~ normal(0, 1.0); + cons_pr ~ normal(0, 1.0); + lambda_pr ~ normal(0, 1.0); + epP_pr ~ normal(0, 1.0); + epN_pr ~ normal(0, 1.0); + K_pr ~ normal(0, 1.0); + w_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] ev; + vector[4] p_next; + vector[4] str; + vector[4] pers; // perseverance + vector[4] V; // weighted sum of ev and pers + + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + pers = initV; // initial pers values + V = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(theta * V); + + // perseverance decay + pers *= K[i]; // decay + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + pers[choice[i, t]] += epP[i]; // perseverance term + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + pers[choice[i, t]] += epN[i]; // perseverance term + } + + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + // calculate V + V = w[i] * ev + (1-w[i]) * pers; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_alpha; + real mu_cons; + real mu_lambda; + real mu_epP; + real mu_epN; + real mu_K; + real mu_w; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_alpha = Phi_approx(mu_pr[2]) * 2; + mu_cons = Phi_approx(mu_pr[3]) * 5; + mu_lambda = Phi_approx(mu_pr[4]) * 10; + mu_epP = mu_pr[5]; + mu_epN = mu_pr[6]; + mu_K = Phi_approx(mu_pr[7]); + mu_w = Phi_approx(mu_pr[8]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ev; + vector[4] p_next; + vector[4] str; + vector[4] pers; // perseverance + vector[4] V; // weighted sum of ev and pers + + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + log_lik[i] = 0; + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + pers = initV; // initial pers values + V = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * V); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(theta * V)); + + // perseverance decay + pers *= K[i]; // decay + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + pers[choice[i, t]] += epP[i]; // perseverance term + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + pers[choice[i, t]] += epN[i]; // perseverance term + } + + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + // calculate V + V = w[i] * ev + (1-w[i]) * pers; + } + } + } +} + diff --git a/R/inst/stan_files/peer_ocu.stan b/R/inst/stan_files/peer_ocu.stan new file mode 100644 index 00000000..cd0c52d5 --- /dev/null +++ b/R/inst/stan_files/peer_ocu.stan @@ -0,0 +1,115 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int condition[N, T]; // 0: solo, 1: ss, 2: mix, 3: rr + real p_gamble[N, T]; + real safe_Hpayoff[N, T]; + real safe_Lpayoff[N, T]; + real risky_Hpayoff[N, T]; + real risky_Lpayoff[N, T]; + int choice[N, T]; +} + +transformed data { +} + +parameters { + vector[3] mu_pr; + vector[3] sigma; + vector[N] rho_pr; + vector[N] tau_pr; + vector[N] ocu_pr; +} + +transformed parameters { + vector[N] rho; + vector[N] tau; + vector[N] ocu; + + for (i in 1:N) { + rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; + } + tau = exp(mu_pr[2] + sigma[2] * tau_pr); + ocu = mu_pr[3] + sigma[3] * ocu_pr; +} + +model { + // peer_ocu + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3] ~ cauchy(0, 1.0); + + // individual parameters w/ Matt trick + rho_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + ocu_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real U_safe; + real U_risky; + + U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); + U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); + if (condition[i, t] == 1) { // safe-safe + U_safe += ocu[i]; + } + if (condition[i, t] == 3) { // risky-risky + U_risky += ocu[i]; + } + choice[i, t] ~ bernoulli_logit(tau[i] * (U_risky - U_safe)); + } + } +} +generated quantities { + real mu_rho; + real mu_tau; + real mu_ocu; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_rho = Phi_approx(mu_pr[1]) * 2; + mu_tau = exp(mu_pr[2]); + mu_ocu = mu_pr[3]; + + { // local section, this saves time and space + for (i in 1:N) { + + // Initialize values + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + real U_safe; + real U_risky; + + U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); + U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); + if (condition[i, t] == 1) { // safe-safe + U_safe += ocu[i]; + } + if (condition[i, t] == 3) { // risky-risky + U_risky += ocu[i]; + } + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | tau[i] * (U_risky - U_safe)); + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(tau[i] * (U_risky - U_safe))); + } + } + } +} + diff --git a/R/inst/stan_files/pre/license.stan b/R/inst/stan_files/pre/license.stan new file mode 100644 index 00000000..dec428a6 --- /dev/null +++ b/R/inst/stan_files/pre/license.stan @@ -0,0 +1,14 @@ +/* + hBayesDM is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + hBayesDM is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with hBayesDM. If not, see . +*/ diff --git a/R/inst/stan_files/prl_ewa.stan b/R/inst/stan_files/prl_ewa.stan new file mode 100644 index 00000000..234cf467 --- /dev/null +++ b/R/inst/stan_files/prl_ewa.stan @@ -0,0 +1,179 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Experience-Weighted Attraction model by Ouden et al. (2013) Neuron + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] phi_pr; // 1-learning rate + vector[N] rho_pr; // experience decay factor + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] phi; + vector[N] rho; + vector[N] beta; + + for (i in 1:N) { + phi[i] = Phi_approx(mu_pr[1] + sigma[1] * phi_pr[i]); + rho[i] = Phi_approx(mu_pr[2] + sigma[2] * rho_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters + phi_pr ~ normal(0, 1); + rho_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // Expected value + vector[2] ew; // Experience weight + + real ewt1; // Experience weight of trial (t - 1) + + // Initialize values + ev = initV; // initial ev values + ew = initV; // initial ew values + + for (t in 1:Tsubj[i]) { + // Softmax choice + choice[i, t] ~ categorical_logit(ev * beta[i]); + + // Store previous experience weight value + ewt1 = ew[choice[i, t]]; + + // Update experience weight for chosen stimulus + { + ew[choice[i, t]] *= rho[i]; + ew[choice[i, t]] += 1; + } + + // Update expected value of chosen stimulus + { + ev[choice[i, t]] *= phi[i] * ewt1; + ev[choice[i, t]] += outcome[i, t]; + ev[choice[i, t]] /= ew[choice[i, t]]; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_phi; + real mu_rho; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + //real mr_ev[N, T, 2]; // Expected value + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + //real mr_ew[N, T, 2]; // Experience weight + real ew_c[N, T]; // Experience weight of the chosen option + real ew_nc[N, T]; // Experience weight of the non-chosen option + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + ew_c[i, t] = 0; + ew_nc[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_phi = Phi_approx(mu_pr[1]); + mu_rho = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // Expected value + vector[2] ew; // Experience weight + + real ewt1; // Experience weight of trial (t-1) + + // Initialize values + ev = initV; // initial ev values + ew = initV; // initial ew values + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // Softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); + + // Store values for model regressors + //mr_ev[i, t] = ev; + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + + //mr_ew[i, t] = ew; + ew_c[i, t] = ew[choice[i, t]]; + ew_nc[i, t] = ew[3 - choice[i, t]]; + + // Store previous experience weight value + ewt1 = ew[choice[i, t]]; + + // Update experience weight for chosen stimulus + { + ew[choice[i, t]] *= rho[i]; + ew[choice[i, t]] += 1; + } + + // Update expected value of chosen stimulus + { + ev[choice[i, t]] *= phi[i] * ewt1; + ev[choice[i, t]] += outcome[i, t]; + ev[choice[i, t]] /= ew[choice[i, t]]; + } + } + } + } +} + diff --git a/R/inst/stan_files/prl_fictitious.stan b/R/inst/stan_files/prl_fictitious.stan new file mode 100644 index 00000000..0fb8d486 --- /dev/null +++ b/R/inst/stan_files/prl_fictitious.stan @@ -0,0 +1,173 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pr; // learning rate + vector[N] alpha_pr; // indecision point + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta; + vector[N] alpha; + vector[N] beta; + + for (i in 1:N) { + eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } + alpha = mu_pr[2] + sigma[2] * alpha_pr; +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1] ~ normal(0, 0.2); + sigma[2] ~ cauchy(0, 1.0); + sigma[3] ~ normal(0, 0.2); + + // Individual parameters + eta_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // Compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // Prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } +} + +generated quantities { + // For group level parameters + real mu_eta; + real mu_alpha; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; //Prediction error of the chosen option + real pe_nc[N, T]; //Prediction error of the non-chosen option + real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_eta = Phi_approx(mu_pr[1]); + mu_alpha = mu_pr[2]; + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } + } +} + diff --git a/R/inst/stan_files/prl_fictitious_multipleB.stan b/R/inst/stan_files/prl_fictitious_multipleB.stan new file mode 100644 index 00000000..264d6c8f --- /dev/null +++ b/R/inst/stan_files/prl_fictitious_multipleB.stan @@ -0,0 +1,185 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) + */ + +data { + int N; // Number of subjects + + int B; // Max number of blocks across subjects + int Bsubj[N]; // Number of blocks for each subject + + int T; // Max number of trials across subjects + int Tsubj[N, B]; // Number of trials/block for each subject + + int choice[N, B, T]; // Choice for each subject-block-trial + real outcome[N, B, T]; // Outcome (reward/loss) for each subject-block-trial +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pr; // learning rate + vector[N] alpha_pr; // indecision point + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta; + vector[N] alpha; + vector[N] beta; + + for (i in 1:N) { + eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } + alpha = mu_pr[2] + sigma[2] * alpha_pr; +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1] ~ normal(0, 0.2); + sigma[2] ~ cauchy(0, 1.0); + sigma[3] ~ normal(0, 0.2); + + // individual parameters + eta_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + for (bIdx in 1:Bsubj[i]) { // new + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i, bIdx])) { // new + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, bIdx, t] ~ categorical(prob); + //choice[i, t] ~ bernoulli(prob); + + // prediction error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new + PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new + + // value updating (learning) + ev[choice[i, bIdx, t]] += eta[i] * PE; //new + ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new + } // end of t loop + } // end of bIdx loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_eta; + real mu_alpha; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, B, T]; // Expected value of the chosen option + real ev_nc[N, B, T]; // Expected value of the non-chosen option + + real pe_c[N, B, T]; //Prediction error of the chosen option + real pe_nc[N, B, T]; //Prediction error of the non-chosen option + real dv[N, B, T]; //Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, B, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (b in 1:B) { + for (t in 1:T) { + ev_c[i, b, t] = 0; + ev_nc[i, b, t] = 0; + + pe_c[i, b, t] = 0; + pe_nc[i, b, t] = 0; + dv[i, b, t] = 0; + + y_pred[i, b, t] = -1; + } + } + } + + mu_eta = Phi_approx(mu_pr[1]); + mu_alpha = mu_pr[2]; + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + + log_lik[i] = 0; + + for (bIdx in 1:Bsubj[i]) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i, bIdx])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, bIdx, t] | prob); //new + + // generate posterior prediction for current trial + y_pred[i, bIdx, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new + PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new + + // Store values for model regressors + ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; + ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; + + pe_c[i, bIdx, t] = PE; + pe_nc[i, bIdx, t] = PEnc; + dv[i, bIdx, t] = PE - PEnc; + + // value updating (learning) + ev[choice[i, bIdx, t]] += eta[i] * PE; //new + ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new + } // end of t loop + } // end of bIdx loop + } + } +} + diff --git a/R/inst/stan_files/prl_fictitious_rp.stan b/R/inst/stan_files/prl_fictitious_rp.stan new file mode 100644 index 00000000..daa0779c --- /dev/null +++ b/R/inst/stan_files/prl_fictitious_rp.stan @@ -0,0 +1,188 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) + */ + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pos_pr; // learning rate, positive PE + vector[N] eta_neg_pr; // learning rate, negative PE + vector[N] alpha_pr; // indecision point + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta_pos; + vector[N] eta_neg; + vector[N] alpha; + vector[N] beta; + + for (i in 1:N) { + eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); + eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); + beta[i] = Phi_approx(mu_pr[4] + sigma[4] * beta_pr[i]) * 10; + } + alpha = mu_pr[3] + sigma[3] * alpha_pr; +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1:2] ~ normal(0, 0.2); + sigma[3] ~ cauchy(0, 1.0); + sigma[4] ~ normal(0, 0.2); + + // individual parameters + eta_pos_pr ~ normal(0, 1); + eta_neg_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_eta_pos; + real mu_eta_neg; + real mu_alpha; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; // Prediction error of the chosen option + real pe_nc[N, T]; // Prediction error of the non-chosen option + + real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_eta_pos = Phi_approx(mu_pr[1]); + mu_eta_neg = Phi_approx(mu_pr[2]); + mu_alpha = mu_pr[3]; + mu_beta = Phi_approx(mu_pr[4]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // Value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } + } +} + diff --git a/R/inst/stan_files/prl_fictitious_rp_woa.stan b/R/inst/stan_files/prl_fictitious_rp_woa.stan new file mode 100644 index 00000000..48f78a42 --- /dev/null +++ b/R/inst/stan_files/prl_fictitious_rp_woa.stan @@ -0,0 +1,180 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) with separate learning rates for +PE and -PE & without alpha (indecision point) + */ + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pos_pr; // learning rate, positive PE + vector[N] eta_neg_pr; // learning rate, negative PE + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta_pos; + vector[N] eta_neg; + vector[N] beta; + + for (i in 1:N) { + eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); + eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + eta_pos_pr ~ normal(0, 1); + eta_neg_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_eta_pos; + real mu_eta_neg; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; // Prediction error of the chosen option + real pe_nc[N, T]; // Prediction error of the non-chosen option + + real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_eta_pos = Phi_approx(mu_pr[1]); + mu_eta_neg = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // Value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } + } +} + diff --git a/R/inst/stan_files/prl_fictitious_woa.stan b/R/inst/stan_files/prl_fictitious_woa.stan new file mode 100644 index 00000000..58a4053f --- /dev/null +++ b/R/inst/stan_files/prl_fictitious_woa.stan @@ -0,0 +1,165 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) without alpha (indecision point) + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pr; // learning rate + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta; + vector[N] beta; + + for (i in 1:N) { + eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters + eta_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // Compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // Prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } +} + +generated quantities { + // For group level parameters + real mu_eta; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; //Prediction error of the chosen option + real pe_nc[N, T]; //Prediction error of the non-chosen option + real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] =0; + + y_pred[i, t] = -1; + } + } + + mu_eta = Phi_approx(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } + } +} + diff --git a/R/inst/stan_files/prl_rp.stan b/R/inst/stan_files/prl_rp.stan new file mode 100644 index 00000000..a7303744 --- /dev/null +++ b/R/inst/stan_files/prl_rp.stan @@ -0,0 +1,149 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Reward-Punishment Model by Ouden et al. (2013) Neuron + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Apun_pr; // learning rate (punishment) + vector[N] Arew_pr; // learning rate (reward) + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Apun; + vector[N] Arew; + vector[N] beta; + + for (i in 1:N) { + Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); + Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Apun_pr ~ normal(0, 1); + Arew_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define Values + vector[2] ev; // Expected value + real PE; // prediction error + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // Softmax choice + choice[i, t] ~ categorical_logit(ev * beta[i]); + + // Prediction Error + PE = outcome[i, t] - ev[choice[i, t]]; + + // Update expected value of chosen stimulus + if (outcome[i, t] > 0) + ev[choice[i, t]] += Arew[i] * PE; + else + ev[choice[i, t]] += Apun[i] * PE; + } + } +} + +generated quantities { + // For group level parameters + real mu_Apun; + real mu_Arew; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + real pe[N, T]; // Prediction error + + // For posterior predictive check + real y_pred[N, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + pe[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_Apun = Phi_approx(mu_pr[1]); + mu_Arew = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // Expected value + real PE; // Prediction error + + // Initialize values + ev = initV; // initial ev values + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // Softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); + + // Prediction Error + PE = outcome[i, t] - ev[choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + pe[i, t] = PE; + + // Update expected value of chosen stimulus + if (outcome[i, t] > 0) + ev[choice[i, t]] += Arew[i] * PE; + else + ev[choice[i, t]] += Apun[i] * PE; + } + } + } +} + diff --git a/R/inst/stan_files/prl_rp_multipleB.stan b/R/inst/stan_files/prl_rp_multipleB.stan new file mode 100644 index 00000000..8cd77c43 --- /dev/null +++ b/R/inst/stan_files/prl_rp_multipleB.stan @@ -0,0 +1,161 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Reward-Punishment Model with multiple blocks per subject by Ouden et al. (2013) Neuron + */ + +data { + int N; // Number of subjects + + int B; // Maximum number of blocks across subjects + int Bsubj[N]; // Number of blocks for each subject + + int T; // Maximum number of trials across subjects + int Tsubj[N, B]; // Number of trials/blocks for each subject + + int choice[N, B, T]; // The choices subjects made + real outcome[N, B, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Apun_pr; // learning rate (punishment) + vector[N] Arew_pr; // learning rate (reward) + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Apun; + vector[N] Arew; + vector[N] beta; + + for (i in 1:N) { + Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); + Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Apun_pr ~ normal(0, 1); + Arew_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + for (bIdx in 1:Bsubj[i]) { // new + // Define Values + vector[2] ev; // Expected value + real PE; // Prediction error + + // Initialize values + ev = initV; // Initial ev values + + for (t in 1:Tsubj[i, bIdx]) { + // Softmax choice + choice[i, bIdx, t] ~ categorical_logit(ev * beta[i]); + + // Prediction Error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; + + // Update expected value of chosen stimulus + if (outcome[i, bIdx, t] > 0) + ev[choice[i, bIdx, t]] += Arew[i] * PE; + else + ev[choice[i, bIdx, t]] += Apun[i] * PE; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_Apun; + real mu_Arew; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, B, T]; // Expected value of the chosen option + real ev_nc[N, B, T]; // Expected value of the non-chosen option + real pe[N, B, T]; // Prediction error + + // For posterior predictive check + real y_pred[N, B, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (b in 1:B) { + for (t in 1:T) { + ev_c[i, b, t] = 0; + ev_nc[i, b, t] = 0; + pe[i, b, t] = 0; + + y_pred[i, b, t] = -1; + } + } + } + + mu_Apun = Phi_approx(mu_pr[1]); + mu_Arew = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + + log_lik[i] = 0; + + for (bIdx in 1:Bsubj[i]) { // new + // Define values + vector[2] ev; // Expected value + real PE; // prediction error + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:Tsubj[i, bIdx]) { + // Softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, bIdx, t] | ev * beta[i]); + + // generate posterior prediction for current trial + y_pred[i, bIdx, t] = categorical_rng(softmax(ev * beta[i])); + + // Prediction Error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; + + // Store values for model regressors + ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; + ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; + pe[i, bIdx, t] = PE; + + // Update expected value of chosen stimulus + if (outcome[i, bIdx, t] > 0) + ev[choice[i, bIdx, t]] += Arew[i] * PE; + else + ev[choice[i, bIdx, t]] += Apun[i] * PE; + } + } + } + } +} + diff --git a/R/inst/stan_files/pst_gainloss_Q.stan b/R/inst/stan_files/pst_gainloss_Q.stan new file mode 100644 index 00000000..788b9a4e --- /dev/null +++ b/R/inst/stan_files/pst_gainloss_Q.stan @@ -0,0 +1,114 @@ +#include /pre/license.stan + +data { + int N; // Number of subjects + int T; // Maximum # of trials + int Tsubj[N]; // # of trials for acquisition phase + + int option1[N, T]; + int option2[N, T]; + int choice[N, T]; + real reward[N, T]; +} + +transformed data { + // Default values to initialize the vector of expected values + vector[6] initial_values; + initial_values = rep_vector(0, 6); +} + +parameters { + // Group-level parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level parameters for Matt trick + vector[N] alpha_pos_pr; + vector[N] alpha_neg_pr; + vector[N] beta_pr; +} + +transformed parameters { + vector[N] alpha_pos; + vector[N] alpha_neg; + vector[N] beta; + + alpha_pos = Phi_approx(mu_pr[1] + sigma[1] * alpha_pos_pr); + alpha_neg = Phi_approx(mu_pr[2] + sigma[2] * alpha_neg_pr); + beta = Phi_approx(mu_pr[3] + sigma[3] * beta_pr) * 10; +} + +model { + // Priors for group-level parameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Priors for subject-level parameters + alpha_pos_pr ~ normal(0, 1); + alpha_neg_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + int co; // Chosen option + real delta; // Difference between two options + real pe; // Prediction error + real alpha; + vector[6] ev; // Expected values + + ev = initial_values; + + // Acquisition Phase + for (t in 1:Tsubj[i]) { + co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; + + // Luce choice rule + delta = ev[option1[i, t]] - ev[option2[i, t]]; + target += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); + + pe = reward[i, t] - ev[co]; + alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; + ev[co] += alpha * pe; + } + } +} + +generated quantities { + // For group-level parameters + real mu_alpha_pos; + real mu_alpha_neg; + real mu_beta; + + // For log-likelihood calculation + real log_lik[N]; + + mu_alpha_pos = Phi_approx(mu_pr[1]); + mu_alpha_neg = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { + for (i in 1:N) { + int co; // Chosen option + real delta; // Difference between two options + real pe; // Prediction error + real alpha; + vector[6] ev; // Expected values + + ev = initial_values; + log_lik[i] = 0; + + // Acquisition Phase + for (t in 1:Tsubj[i]) { + co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; + + // Luce choice rule + delta = ev[option1[i, t]] - ev[option2[i, t]]; + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); + + pe = reward[i, t] - ev[co]; + alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; + ev[co] += alpha * pe; + } + } + } +} + diff --git a/R/inst/stan_files/ra_noLA.stan b/R/inst/stan_files/ra_noLA.stan new file mode 100644 index 00000000..c5c599c4 --- /dev/null +++ b/R/inst/stan_files/ra_noLA.stan @@ -0,0 +1,95 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int gamble[N, T]; +} + +transformed data { +} + +parameters { + vector[2] mu_pr; + vector[2] sigma; + vector[N] rho_pr; + vector[N] tau_pr; +} + +transformed parameters { + vector[N] rho; + vector[N] tau; + + for (i in 1:N) { + rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; + } +} + +model { + // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + rho_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + gamble[i, t] ~ bernoulli(pGamble); + } + } +} +generated quantities { + real mu_rho; + real mu_tau; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_rho = Phi_approx(mu_pr[1]) * 2; + mu_tau = Phi_approx(mu_pr[2]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + // loss[i, t]=absolute amount of loss (pre-converted in R) + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGamble); + } + } + } +} + diff --git a/R/inst/stan_files/ra_noRA.stan b/R/inst/stan_files/ra_noRA.stan new file mode 100644 index 00000000..0f36c3be --- /dev/null +++ b/R/inst/stan_files/ra_noRA.stan @@ -0,0 +1,95 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int gamble[N, T]; +} + +transformed data { +} + +parameters { + vector[2] mu_pr; + vector[2] sigma; + vector[N] lambda_pr; + vector[N] tau_pr; +} + +transformed parameters { + vector[N] lambda; + vector[N] tau; + + for (i in 1:N) { + lambda[i] = Phi_approx(mu_pr[1] + sigma[1] * lambda_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; + } +} + +model { + // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + lambda_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + // loss[i, t]=absolute amount of loss (pre-converted in R) + evSafe = cert[i, t]; + evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + gamble[i, t] ~ bernoulli(pGamble); + } + } +} +generated quantities { + real mu_lambda; + real mu_tau; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_lambda = Phi_approx(mu_pr[1]) * 5; + mu_tau = Phi_approx(mu_pr[2]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + evSafe = cert[i, t]; + evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGamble); + } + } + } +} + diff --git a/R/inst/stan_files/ra_prospect.stan b/R/inst/stan_files/ra_prospect.stan new file mode 100644 index 00000000..542ea460 --- /dev/null +++ b/R/inst/stan_files/ra_prospect.stan @@ -0,0 +1,97 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int gamble[N, T]; +} +transformed data { +} +parameters { + vector[3] mu_pr; + vector[3] sigma; + vector[N] rho_pr; + vector[N] lambda_pr; + vector[N] tau_pr; +} +transformed parameters { + vector[N] rho; + vector[N] lambda; + vector[N] tau; + + for (i in 1:N) { + rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; + lambda[i] = Phi_approx(mu_pr[2] + sigma[2] * lambda_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 30; + } +} +model { + // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + rho_pr ~ normal(0, 1.0); + lambda_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + // loss[i, t]=absolute amount of loss (pre-converted in R) + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(loss[i, t], rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + gamble[i, t] ~ bernoulli(pGamble); + } + } +} +generated quantities { + real mu_rho; + real mu_lambda; + real mu_tau; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_rho = Phi_approx(mu_pr[1]) * 2; + mu_lambda = Phi_approx(mu_pr[2]) * 5; + mu_tau = Phi_approx(mu_pr[3]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(fabs(loss[i, t]), rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGamble); + } + } + } +} + diff --git a/R/inst/stan_files/rdt_happiness.stan b/R/inst/stan_files/rdt_happiness.stan new file mode 100644 index 00000000..3abb9e18 --- /dev/null +++ b/R/inst/stan_files/rdt_happiness.stan @@ -0,0 +1,146 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int type[N, T]; + int gamble[N, T]; + real outcome[N, T]; + real happy[N, T]; + real RT_happy[N, T]; +} +transformed data { +} +parameters { + vector[6] mu_pr; + vector[6] sigma; + vector[N] w0_pr; + vector[N] w1_pr; + vector[N] w2_pr; + vector[N] w3_pr; + vector[N] gam_pr; + vector[N] sig_pr; +} +transformed parameters { + vector[N] w0; + vector[N] w1; + vector[N] w2; + vector[N] w3; + vector[N] gam; + vector[N] sig; + + w0 = mu_pr[1] + sigma[1] * w0_pr; + w1 = mu_pr[2] + sigma[2] * w1_pr; + w2 = mu_pr[3] + sigma[3] * w2_pr; + w3 = mu_pr[4] + sigma[4] * w3_pr; + + for (i in 1:N) { + gam[i] = Phi_approx(mu_pr[5] + sigma[5] * gam_pr[i]); + } + sig = exp(mu_pr[6] + sigma[6] * sig_pr); +} +model { + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + w0_pr ~ normal(0, 1.0); + w1_pr ~ normal(0, 1.0); + w2_pr ~ normal(0, 1.0); + w3_pr ~ normal(0, 1.0); + gam_pr ~ normal(0, 1.0); + sig_pr ~ normal(0, 1.0); + + for (i in 1:N) { + real cert_sum; + real ev_sum; + real rpe_sum; + + + cert_sum = 0; + ev_sum = 0; + rpe_sum = 0; + + for (t in 1:Tsubj[i]) { + if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ + happy[i,t] ~ normal(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); + } + + if(gamble[i,t] == 0){ + cert_sum += type[i,t] * cert[i,t]; + } else { + ev_sum += 0.5 * (gain[i,t] - loss[i,t]); + rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); + } + + cert_sum *= gam[i]; + ev_sum *= gam[i]; + rpe_sum *= gam[i]; + } + } +} +generated quantities { + real mu_w0; + real mu_w1; + real mu_w2; + real mu_w3; + real mu_gam; + real mu_sig; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_w0 = mu_pr[1]; + mu_w1 = mu_pr[2]; + mu_w2 = mu_pr[3]; + mu_w3 = mu_pr[4]; + mu_gam = Phi_approx(mu_pr[5]); + mu_sig = exp(mu_pr[6]); + + + { // local section, this saves time and space + for (i in 1:N) { + real cert_sum; + real ev_sum; + real rpe_sum; + + log_lik[i] = 0; + + cert_sum = 0; + ev_sum = 0; + rpe_sum = 0; + + for (t in 1:Tsubj[i]) { + if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ + log_lik[i] += normal_lpdf(happy[i, t] | w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); + y_pred[i, t] = normal_rng(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); + } + + if(gamble[i,t] == 0){ + cert_sum += type[i,t] * cert[i,t]; + } else { + ev_sum += 0.5 * (gain[i,t] - loss[i,t]); + rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); + } + + cert_sum *= gam[i]; + ev_sum *= gam[i]; + rpe_sum *= gam[i]; + } + } + } +} + diff --git a/R/inst/stan_files/ts_par4.stan b/R/inst/stan_files/ts_par4.stan new file mode 100644 index 00000000..c615f6d0 --- /dev/null +++ b/R/inst/stan_files/ts_par4.stan @@ -0,0 +1,204 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int level1_choice[N,T]; // 1: left, 2: right + int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 + int reward[N,T]; + real trans_prob; +} +transformed data { +} +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] a_pr; + vector[N] beta_pr; + vector[N] pi_pr; + vector[N] w_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] a; + vector[N] beta; + vector[N] pi; + vector[N] w; + + for (i in 1:N) { + a[i] = Phi_approx( mu_pr[1] + sigma[1] * a_pr[i] ); + beta[i] = exp( mu_pr[2] + sigma[2] * beta_pr[i] ); + pi[i] = Phi_approx( mu_pr[3] + sigma[3] * pi_pr[i] ) * 5; + w[i] = Phi_approx( mu_pr[4] + sigma[4] * w_pr[i] ); + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + a_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + pi_pr ~ normal(0, 1); + w_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); + } + level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_a; + real mu_beta; + real mu_pi; + real mu_w; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred_step1[N,T]; + real y_pred_step2[N,T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred_step1[i,t] = -1; + y_pred_step2[i,t] = -1; + } + } + + // Generate group level parameter values + mu_a = Phi_approx( mu_pr[1] ); + mu_beta = exp( mu_pr[2] ); + mu_pi = Phi_approx( mu_pr[3] ) * 5; + mu_w = Phi_approx( mu_pr[4] ); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 + // Level 2 --> choose one of two level 2 options + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); + } + log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); + + // generate posterior prediction for current trial + y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); + y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop + } +} + diff --git a/R/inst/stan_files/ts_par6.stan b/R/inst/stan_files/ts_par6.stan new file mode 100644 index 00000000..b472afa0 --- /dev/null +++ b/R/inst/stan_files/ts_par6.stan @@ -0,0 +1,213 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int level1_choice[N,T]; // 1: left, 2: right + int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 + int reward[N,T]; + real trans_prob; +} +transformed data { +} +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[6] mu_pr; + vector[6] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] a1_pr; + vector[N] beta1_pr; + vector[N] a2_pr; + vector[N] beta2_pr; + vector[N] pi_pr; + vector[N] w_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] a1; + vector[N] beta1; + vector[N] a2; + vector[N] beta2; + vector[N] pi; + vector[N] w; + + for (i in 1:N) { + a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); + beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); + a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); + beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); + pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; + w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + a1_pr ~ normal(0, 1); + beta1_pr ~ normal(0, 1); + a2_pr ~ normal(0, 1); + beta2_pr ~ normal(0, 1); + pi_pr ~ normal(0, 1); + w_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_a1; + real mu_beta1; + real mu_a2; + real mu_beta2; + real mu_pi; + real mu_w; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred_step1[N,T]; + real y_pred_step2[N,T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred_step1[i,t] = -1; + y_pred_step2[i,t] = -1; + } + } + + // Generate group level parameter values + mu_a1 = Phi_approx( mu_pr[1] ); + mu_beta1 = exp( mu_pr[2] ); + mu_a2 = Phi_approx( mu_pr[3] ); + mu_beta2 = exp( mu_pr[4] ); + mu_pi = Phi_approx( mu_pr[5] ) * 5; + mu_w = Phi_approx( mu_pr[6] ); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 + // Level 2 --> choose one of two level 2 options + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); + + // generate posterior prediction for current trial + y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); + y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop + } +} + diff --git a/R/inst/stan_files/ts_par7.stan b/R/inst/stan_files/ts_par7.stan new file mode 100644 index 00000000..089042c2 --- /dev/null +++ b/R/inst/stan_files/ts_par7.stan @@ -0,0 +1,217 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int level1_choice[N,T]; // 1: left, 2: right + int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 + int reward[N,T]; + real trans_prob; +} +transformed data { +} +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[7] mu_pr; + vector[7] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] a1_pr; + vector[N] beta1_pr; + vector[N] a2_pr; + vector[N] beta2_pr; + vector[N] pi_pr; + vector[N] w_pr; + vector[N] lambda_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] a1; + vector[N] beta1; + vector[N] a2; + vector[N] beta2; + vector[N] pi; + vector[N] w; + vector[N] lambda; + + for (i in 1:N) { + a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); + beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); + a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); + beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); + pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; + w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); + lambda[i] = Phi_approx( mu_pr[7] + sigma[7] * lambda_pr[i] ); + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + a1_pr ~ normal(0, 1); + beta1_pr ~ normal(0, 1); + a2_pr ~ normal(0, 1); + beta2_pr ~ normal(0, 1); + pi_pr ~ normal(0, 1); + w_pr ~ normal(0, 1); + lambda_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_a1; + real mu_beta1; + real mu_a2; + real mu_beta2; + real mu_pi; + real mu_w; + real mu_lambda; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred_step1[N,T]; + real y_pred_step2[N,T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred_step1[i,t] = -1; + y_pred_step2[i,t] = -1; + } + } + + // Generate group level parameter values + mu_a1 = Phi_approx( mu_pr[1] ); + mu_beta1 = exp( mu_pr[2] ); + mu_a2 = Phi_approx( mu_pr[3] ); + mu_beta2 = exp( mu_pr[4] ); + mu_pi = Phi_approx( mu_pr[5] ) * 5; + mu_w = Phi_approx( mu_pr[6] ); + mu_lambda = Phi_approx( mu_pr[7] ); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 + // Level 2 --> choose one of two level 2 options + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); + + // generate posterior prediction for current trial + y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); + y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + } // end of t loop + } // end of i loop + } +} + diff --git a/R/inst/stan_files/ug_bayes.stan b/R/inst/stan_files/ug_bayes.stan new file mode 100644 index 00000000..6136e708 --- /dev/null +++ b/R/inst/stan_files/ug_bayes.stan @@ -0,0 +1,167 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real offer[N, T]; + int accept[N, T]; +} + +transformed data { + real initV; + real mu0; + real k0; + real sig20; + real nu0; + + initV = 0.0; + mu0 = 10.0; // initial expectation + k0 = 4.0; + sig20 = 4.0; + nu0 = 10.0; +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // alpha: envy + vector[N] beta_pr; // beta: guilt + vector[N] tau_pr; // tau: inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + real alpha[N]; + real beta[N]; + real tau[N]; + + for (i in 1:N) { + alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; + tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + alpha_pr ~ normal(0, 1.0); + beta_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + real util; + real mu_old; + real mu_new; + real k_old; + real k_new; + real sig2_old; + real sig2_new; + real nu_old; + real nu_new; + real PE; // not required for computation + + // Initialize values + mu_old = mu0; + k_old = k0; + sig2_old = sig20; + nu_old = nu0; + + for (t in 1:Tsubj[i]) { + k_new = k_old + 1; + nu_new = nu_old + 1; + mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; + sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); + + PE = offer[i, t] - mu_old; + util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); + + accept[i, t] ~ bernoulli_logit(util * tau[i]); + + // replace old ones with new ones + mu_old = mu_new; + sig2_old = sig2_new; + k_old = k_new; + nu_old = nu_new; + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_beta; + real mu_tau; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 20; + mu_beta = Phi_approx(mu_pr[2]) * 10; + mu_tau = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real util; + real mu_old; + real mu_new; + real k_old; + real k_new; + real sig2_old; + real sig2_new; + real nu_old; + real nu_new; + real PE; // not required for computation + + // Initialize values + mu_old = mu0; + k_old = k0; + sig2_old = sig20; + nu_old = nu0; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + k_new = k_old + 1; + nu_new = nu_old + 1; + mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; + sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); + + PE = offer[i, t] - mu_old; + util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); + + log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); + + // replace old ones with new ones + mu_old = mu_new; + sig2_old = sig2_new; + k_old = k_new; + nu_old = nu_new; + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/R/inst/stan_files/ug_delta.stan b/R/inst/stan_files/ug_delta.stan new file mode 100644 index 00000000..9bb70e0a --- /dev/null +++ b/R/inst/stan_files/ug_delta.stan @@ -0,0 +1,129 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real offer[N, T]; + int accept[N, T]; +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // alpha: Envy (sensitivity to norm prediction error) + vector[N] tau_pr; // tau: Inverse temperature + vector[N] ep_pr; // ep: Norm adaptation rate +} + +transformed parameters { + // Transform subject-level raw parameters + real alpha[N]; + real tau[N]; + real ep[N]; + + for (i in 1:N) { + alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 10; + ep[i] = Phi_approx(mu_pr[3] + sigma[3] * ep_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + alpha_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + real f; // Internal norm + real PE; // Prediction error + real util; // Utility of offer + + // Initialize values + f = 10.0; + + for (t in 1:Tsubj[i]) { + // calculate prediction error + PE = offer[i, t] - f; + + // Update utility + util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); + + // Sampling statement + accept[i, t] ~ bernoulli_logit(util * tau[i]); + + // Update internal norm + f += ep[i] * PE; + + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_tau; + real mu_ep; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 20; + mu_tau = Phi_approx(mu_pr[2]) * 10; + mu_ep = Phi_approx(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real f; // Internal norm + real PE; // prediction error + real util; // Utility of offer + + // Initialize values + f = 10.0; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // calculate prediction error + PE = offer[i, t] - f; + + // Update utility + util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); + + // Calculate log likelihood + log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); + + // Update internal norm + f += ep[i] * PE; + + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/R/inst/stan_files/wcs_sql.stan b/R/inst/stan_files/wcs_sql.stan new file mode 100644 index 00000000..81b8ce17 --- /dev/null +++ b/R/inst/stan_files/wcs_sql.stan @@ -0,0 +1,176 @@ +#include /pre/license.stan + +data { + int N; // number of subjects + int T; // max trial + int Tsubj[N]; // number of max trials per subject + + int choice[N, 4, T]; // subject's deck choice within a trial (1, 2, 3 and 4) + int outcome[N, T]; // whether subject's choice is correct or not within a trial (1 and 0) + matrix[1, 3] choice_match_att[N, T]; // indicates which dimension the chosen card matches to within a trial + matrix[3, 4] deck_match_rule[T]; // indicates which dimension(color, form, number) each of the 4 decks matches to within a trial +} + +transformed data { + matrix[1, 3] initAtt; // each subject start with an even attention to each dimension + matrix[1, 3] unit; // used to flip attention after punishing feedback inside the model + + initAtt = rep_matrix(1.0/3.0, 1, 3); + unit = rep_matrix(1.0, 1, 3); +} + +parameters { + // hyper parameters + vector[3] mu_pr; + vector[3] sigma; + + // subject-level raw parameters (for Matt trick) + vector[N] r_pr; // sensitivity to rewarding feedback (reward learning rate) + vector[N] p_pr; // sensitivity to punishing feedback (punishment learning rate) + vector[N] d_pr; // decision consistency (inverse temperature) +} + +transformed parameters { + // transform subject-level raw parameters + vector[N] r; + vector[N] p; + vector[N] d; + + for (i in 1:N) { + r[i] = Phi_approx( mu_pr[1] + sigma[1] * r_pr[i] ); + p[i] = Phi_approx( mu_pr[2] + sigma[2] * p_pr[i] ); + d[i] = Phi_approx( mu_pr[3] + sigma[3] * d_pr[i] ) * 5; + } +} + +model { + // hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + r_pr ~ normal(0, 1); + p_pr ~ normal(0, 1); + d_pr ~ normal(0, 1); + + for (i in 1:N) { + // define values + vector[4] pred_prob_mat; // predicted probability of choosing a deck in each trial based on attention + matrix[1, 3] subj_att; // subject's attention to each dimension + matrix[1, 3] att_signal; // signal where a subject has to pay attention after reward/punishment + real sum_att_signal; // temporary variable to calculate sum(att_signal) + matrix[1, 3] tmpatt; // temporary variable to calculate subj_att + vector[4] tmpp; // temporary variable to calculate pred_prob_mat + + // initiate values + subj_att = initAtt; + pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); + + for (t in 1:Tsubj[i]) { + // multinomial choice + choice[i,,t] ~ multinomial(pred_prob_mat); + + // re-distribute attention after getting a feedback + if (outcome[i,t] == 1) { + att_signal = subj_att .* choice_match_att[i,t]; + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; + } else { + att_signal = subj_att .* (unit - choice_match_att[i,t]); + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; + } + + // scaling to avoid log(0) + subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; + + tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); + tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); + tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); + + // repeat until the final trial + if (t < Tsubj[i]) { + tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; + pred_prob_mat = tmpp/sum(tmpp); + } + + } // end of trial loop + } // end of subject loop +} +generated quantities { + // for group level parameters + real mu_r; + real mu_p; + real mu_d; + + // for log-likelihood calculation + real log_lik[N]; + + // for posterior predictive check + int y_pred[N, 4, T]; + + // initiate the variable to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + for (deck in 1:4) { + y_pred[i,deck,t] = -1; + } + } + } + + mu_r = Phi_approx(mu_pr[1]); + mu_p = Phi_approx(mu_pr[2]); + mu_d = Phi_approx(mu_pr[3]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + matrix[1, 3] subj_att; + matrix[1, 3] att_signal; + vector[4] pred_prob_mat; + + matrix[1, 3] tmpatt; + vector[4] tmpp; + + real sum_att_signal; + + subj_att = initAtt; + pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + + log_lik[i] += multinomial_lpmf(choice[i,,t] | pred_prob_mat); + + y_pred[i,,t] = multinomial_rng(pred_prob_mat, 1); + + if(outcome[i,t] == 1) { + att_signal = subj_att .* choice_match_att[i,t]; + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; + } else { + att_signal = subj_att .* (unit - choice_match_att[i,t]); + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; + } + + subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; + + tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); + tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); + tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); + + if(t < Tsubj[i]) { + tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; + pred_prob_mat = tmpp/sum(tmpp); + } + + } // end of trial loop + } // end of subject loop + } // end of local section +} + diff --git a/man-roxygen/model-documentation.md b/R/man-roxygen/README.md similarity index 100% rename from man-roxygen/model-documentation.md rename to R/man-roxygen/README.md diff --git a/man-roxygen/model-documentation.R b/R/man-roxygen/model-documentation.R similarity index 100% rename from man-roxygen/model-documentation.R rename to R/man-roxygen/model-documentation.R diff --git a/man/HDIofMCMC.Rd b/R/man/HDIofMCMC.Rd similarity index 100% rename from man/HDIofMCMC.Rd rename to R/man/HDIofMCMC.Rd diff --git a/man/bandit2arm_delta.Rd b/R/man/bandit2arm_delta.Rd similarity index 100% rename from man/bandit2arm_delta.Rd rename to R/man/bandit2arm_delta.Rd diff --git a/man/bandit4arm2_kalman_filter.Rd b/R/man/bandit4arm2_kalman_filter.Rd similarity index 100% rename from man/bandit4arm2_kalman_filter.Rd rename to R/man/bandit4arm2_kalman_filter.Rd diff --git a/man/bandit4arm_2par_lapse.Rd b/R/man/bandit4arm_2par_lapse.Rd similarity index 100% rename from man/bandit4arm_2par_lapse.Rd rename to R/man/bandit4arm_2par_lapse.Rd diff --git a/man/bandit4arm_4par.Rd b/R/man/bandit4arm_4par.Rd similarity index 100% rename from man/bandit4arm_4par.Rd rename to R/man/bandit4arm_4par.Rd diff --git a/man/bandit4arm_lapse.Rd b/R/man/bandit4arm_lapse.Rd similarity index 100% rename from man/bandit4arm_lapse.Rd rename to R/man/bandit4arm_lapse.Rd diff --git a/man/bandit4arm_lapse_decay.Rd b/R/man/bandit4arm_lapse_decay.Rd similarity index 100% rename from man/bandit4arm_lapse_decay.Rd rename to R/man/bandit4arm_lapse_decay.Rd diff --git a/man/bandit4arm_singleA_lapse.Rd b/R/man/bandit4arm_singleA_lapse.Rd similarity index 100% rename from man/bandit4arm_singleA_lapse.Rd rename to R/man/bandit4arm_singleA_lapse.Rd diff --git a/man/bart_par4.Rd b/R/man/bart_par4.Rd similarity index 100% rename from man/bart_par4.Rd rename to R/man/bart_par4.Rd diff --git a/man/choiceRT_ddm.Rd b/R/man/choiceRT_ddm.Rd similarity index 100% rename from man/choiceRT_ddm.Rd rename to R/man/choiceRT_ddm.Rd diff --git a/man/choiceRT_ddm_single.Rd b/R/man/choiceRT_ddm_single.Rd similarity index 100% rename from man/choiceRT_ddm_single.Rd rename to R/man/choiceRT_ddm_single.Rd diff --git a/man/choiceRT_lba.Rd b/R/man/choiceRT_lba.Rd similarity index 100% rename from man/choiceRT_lba.Rd rename to R/man/choiceRT_lba.Rd diff --git a/man/choiceRT_lba_single.Rd b/R/man/choiceRT_lba_single.Rd similarity index 100% rename from man/choiceRT_lba_single.Rd rename to R/man/choiceRT_lba_single.Rd diff --git a/man/cra_exp.Rd b/R/man/cra_exp.Rd similarity index 100% rename from man/cra_exp.Rd rename to R/man/cra_exp.Rd diff --git a/man/cra_linear.Rd b/R/man/cra_linear.Rd similarity index 100% rename from man/cra_linear.Rd rename to R/man/cra_linear.Rd diff --git a/man/dbdm_prob_weight.Rd b/R/man/dbdm_prob_weight.Rd similarity index 100% rename from man/dbdm_prob_weight.Rd rename to R/man/dbdm_prob_weight.Rd diff --git a/man/dd_cs.Rd b/R/man/dd_cs.Rd similarity index 100% rename from man/dd_cs.Rd rename to R/man/dd_cs.Rd diff --git a/man/dd_cs_single.Rd b/R/man/dd_cs_single.Rd similarity index 100% rename from man/dd_cs_single.Rd rename to R/man/dd_cs_single.Rd diff --git a/man/dd_exp.Rd b/R/man/dd_exp.Rd similarity index 100% rename from man/dd_exp.Rd rename to R/man/dd_exp.Rd diff --git a/man/dd_hyperbolic.Rd b/R/man/dd_hyperbolic.Rd similarity index 100% rename from man/dd_hyperbolic.Rd rename to R/man/dd_hyperbolic.Rd diff --git a/man/dd_hyperbolic_single.Rd b/R/man/dd_hyperbolic_single.Rd similarity index 100% rename from man/dd_hyperbolic_single.Rd rename to R/man/dd_hyperbolic_single.Rd diff --git a/man/estimate_mode.Rd b/R/man/estimate_mode.Rd similarity index 100% rename from man/estimate_mode.Rd rename to R/man/estimate_mode.Rd diff --git a/man/extract_ic.Rd b/R/man/extract_ic.Rd similarity index 100% rename from man/extract_ic.Rd rename to R/man/extract_ic.Rd diff --git a/man/gng_m1.Rd b/R/man/gng_m1.Rd similarity index 100% rename from man/gng_m1.Rd rename to R/man/gng_m1.Rd diff --git a/man/gng_m2.Rd b/R/man/gng_m2.Rd similarity index 100% rename from man/gng_m2.Rd rename to R/man/gng_m2.Rd diff --git a/man/gng_m3.Rd b/R/man/gng_m3.Rd similarity index 100% rename from man/gng_m3.Rd rename to R/man/gng_m3.Rd diff --git a/man/gng_m4.Rd b/R/man/gng_m4.Rd similarity index 100% rename from man/gng_m4.Rd rename to R/man/gng_m4.Rd diff --git a/man/hBayesDM-package.Rd b/R/man/hBayesDM-package.Rd similarity index 100% rename from man/hBayesDM-package.Rd rename to R/man/hBayesDM-package.Rd diff --git a/man/hBayesDM_model.Rd b/R/man/hBayesDM_model.Rd similarity index 100% rename from man/hBayesDM_model.Rd rename to R/man/hBayesDM_model.Rd diff --git a/man/igt_orl.Rd b/R/man/igt_orl.Rd similarity index 100% rename from man/igt_orl.Rd rename to R/man/igt_orl.Rd diff --git a/man/igt_pvl_decay.Rd b/R/man/igt_pvl_decay.Rd similarity index 100% rename from man/igt_pvl_decay.Rd rename to R/man/igt_pvl_decay.Rd diff --git a/man/igt_pvl_delta.Rd b/R/man/igt_pvl_delta.Rd similarity index 100% rename from man/igt_pvl_delta.Rd rename to R/man/igt_pvl_delta.Rd diff --git a/man/igt_vpp.Rd b/R/man/igt_vpp.Rd similarity index 100% rename from man/igt_vpp.Rd rename to R/man/igt_vpp.Rd diff --git a/man/multiplot.Rd b/R/man/multiplot.Rd similarity index 100% rename from man/multiplot.Rd rename to R/man/multiplot.Rd diff --git a/man/peer_ocu.Rd b/R/man/peer_ocu.Rd similarity index 100% rename from man/peer_ocu.Rd rename to R/man/peer_ocu.Rd diff --git a/man/plot.hBayesDM.Rd b/R/man/plot.hBayesDM.Rd similarity index 100% rename from man/plot.hBayesDM.Rd rename to R/man/plot.hBayesDM.Rd diff --git a/man/plotDist.Rd b/R/man/plotDist.Rd similarity index 100% rename from man/plotDist.Rd rename to R/man/plotDist.Rd diff --git a/man/plotHDI.Rd b/R/man/plotHDI.Rd similarity index 100% rename from man/plotHDI.Rd rename to R/man/plotHDI.Rd diff --git a/man/plotInd.Rd b/R/man/plotInd.Rd similarity index 100% rename from man/plotInd.Rd rename to R/man/plotInd.Rd diff --git a/man/printFit.Rd b/R/man/printFit.Rd similarity index 100% rename from man/printFit.Rd rename to R/man/printFit.Rd diff --git a/man/prl_ewa.Rd b/R/man/prl_ewa.Rd similarity index 100% rename from man/prl_ewa.Rd rename to R/man/prl_ewa.Rd diff --git a/man/prl_fictitious.Rd b/R/man/prl_fictitious.Rd similarity index 100% rename from man/prl_fictitious.Rd rename to R/man/prl_fictitious.Rd diff --git a/man/prl_fictitious_multipleB.Rd b/R/man/prl_fictitious_multipleB.Rd similarity index 100% rename from man/prl_fictitious_multipleB.Rd rename to R/man/prl_fictitious_multipleB.Rd diff --git a/man/prl_fictitious_rp.Rd b/R/man/prl_fictitious_rp.Rd similarity index 100% rename from man/prl_fictitious_rp.Rd rename to R/man/prl_fictitious_rp.Rd diff --git a/man/prl_fictitious_rp_woa.Rd b/R/man/prl_fictitious_rp_woa.Rd similarity index 100% rename from man/prl_fictitious_rp_woa.Rd rename to R/man/prl_fictitious_rp_woa.Rd diff --git a/man/prl_fictitious_woa.Rd b/R/man/prl_fictitious_woa.Rd similarity index 100% rename from man/prl_fictitious_woa.Rd rename to R/man/prl_fictitious_woa.Rd diff --git a/man/prl_rp.Rd b/R/man/prl_rp.Rd similarity index 100% rename from man/prl_rp.Rd rename to R/man/prl_rp.Rd diff --git a/man/prl_rp_multipleB.Rd b/R/man/prl_rp_multipleB.Rd similarity index 100% rename from man/prl_rp_multipleB.Rd rename to R/man/prl_rp_multipleB.Rd diff --git a/man/pst_gainloss_Q.Rd b/R/man/pst_gainloss_Q.Rd similarity index 100% rename from man/pst_gainloss_Q.Rd rename to R/man/pst_gainloss_Q.Rd diff --git a/man/ra_noLA.Rd b/R/man/ra_noLA.Rd similarity index 100% rename from man/ra_noLA.Rd rename to R/man/ra_noLA.Rd diff --git a/man/ra_noRA.Rd b/R/man/ra_noRA.Rd similarity index 100% rename from man/ra_noRA.Rd rename to R/man/ra_noRA.Rd diff --git a/man/ra_prospect.Rd b/R/man/ra_prospect.Rd similarity index 100% rename from man/ra_prospect.Rd rename to R/man/ra_prospect.Rd diff --git a/man/rdt_happiness.Rd b/R/man/rdt_happiness.Rd similarity index 100% rename from man/rdt_happiness.Rd rename to R/man/rdt_happiness.Rd diff --git a/man/rhat.Rd b/R/man/rhat.Rd similarity index 100% rename from man/rhat.Rd rename to R/man/rhat.Rd diff --git a/man/ts_par4.Rd b/R/man/ts_par4.Rd similarity index 100% rename from man/ts_par4.Rd rename to R/man/ts_par4.Rd diff --git a/man/ts_par6.Rd b/R/man/ts_par6.Rd similarity index 100% rename from man/ts_par6.Rd rename to R/man/ts_par6.Rd diff --git a/man/ts_par7.Rd b/R/man/ts_par7.Rd similarity index 100% rename from man/ts_par7.Rd rename to R/man/ts_par7.Rd diff --git a/man/ug_bayes.Rd b/R/man/ug_bayes.Rd similarity index 100% rename from man/ug_bayes.Rd rename to R/man/ug_bayes.Rd diff --git a/man/ug_delta.Rd b/R/man/ug_delta.Rd similarity index 100% rename from man/ug_delta.Rd rename to R/man/ug_delta.Rd diff --git a/man/wcs_sql.Rd b/R/man/wcs_sql.Rd similarity index 100% rename from man/wcs_sql.Rd rename to R/man/wcs_sql.Rd diff --git a/src/Makevars b/R/src/Makevars similarity index 100% rename from src/Makevars rename to R/src/Makevars diff --git a/src/Makevars.win b/R/src/Makevars.win similarity index 100% rename from src/Makevars.win rename to R/src/Makevars.win diff --git a/src/init.cpp b/R/src/init.cpp similarity index 100% rename from src/init.cpp rename to R/src/init.cpp diff --git a/tools/make_cc.R b/R/tools/make_cc.R similarity index 100% rename from tools/make_cc.R rename to R/tools/make_cc.R diff --git a/inst/common b/inst/common deleted file mode 160000 index bf544254..00000000 --- a/inst/common +++ /dev/null @@ -1 +0,0 @@ -Subproject commit bf544254404e303f43355180018e6534fe133bb0 From 5e1ce3bf78e8abdde219c1cb83d5a62717ae9e27 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 30 Apr 2019 18:15:32 +0900 Subject: [PATCH 009/163] Move JSON-related into JSON/ --- JSON/ModelInformation.schema.json | 171 ++++++++++++++++++++ JSON/PY_CODE_TEMPLATE.txt | 77 +++++++++ JSON/PY_DOCSTRING_TEMPLATE.txt | 155 ++++++++++++++++++ JSON/README.md | 152 ++++++++++++++++++ JSON/ValidateAll.sh | 7 + JSON/WritePython.py | 238 ++++++++++++++++++++++++++++ JSON/bandit2arm_delta.json | 39 +++++ JSON/bandit4arm2_kalman_filter.json | 60 +++++++ JSON/bandit4arm_2par_lapse.json | 43 +++++ JSON/bandit4arm_4par.json | 47 ++++++ JSON/bandit4arm_lapse.json | 51 ++++++ JSON/bandit4arm_lapse_decay.json | 55 +++++++ JSON/bandit4arm_singleA_lapse.json | 47 ++++++ JSON/bart_par4.json | 77 +++++++++ JSON/choiceRT_ddm.json | 55 +++++++ JSON/choiceRT_ddm_single.json | 55 +++++++ JSON/cra_exp.json | 56 +++++++ JSON/cra_linear.json | 56 +++++++ JSON/dbdm_prob_weight.json | 59 +++++++ JSON/dd_cs.json | 45 ++++++ JSON/dd_cs_single.json | 45 ++++++ JSON/dd_exp.json | 41 +++++ JSON/dd_hyperbolic.json | 41 +++++ JSON/dd_hyperbolic_single.json | 41 +++++ JSON/gng_m1.json | 48 ++++++ JSON/gng_m2.json | 52 ++++++ JSON/gng_m3.json | 57 +++++++ JSON/gng_m4.json | 61 +++++++ JSON/igt_orl.json | 65 ++++++++ JSON/igt_pvl_decay.json | 55 +++++++ JSON/igt_pvl_delta.json | 55 +++++++ JSON/igt_vpp.json | 71 +++++++++ JSON/peer_ocu.json | 53 +++++++ JSON/prl_ewa.json | 58 +++++++ JSON/prl_fictitious.json | 59 +++++++ JSON/prl_fictitious_multipleB.json | 60 +++++++ JSON/prl_fictitious_rp.json | 64 ++++++++ JSON/prl_fictitious_rp_woa.json | 60 +++++++ JSON/prl_fictitious_woa.json | 55 +++++++ JSON/prl_rp.json | 57 +++++++ JSON/prl_rp_multipleB.json | 58 +++++++ JSON/pst_gainloss_Q.json | 49 ++++++ JSON/ra_noLA.json | 40 +++++ JSON/ra_noRA.json | 40 +++++ JSON/ra_prospect.json | 44 +++++ JSON/rdt_happiness.json | 66 ++++++++ JSON/ts_par4.json | 62 ++++++++ JSON/ts_par6.json | 69 ++++++++ JSON/ts_par7.json | 73 +++++++++ JSON/ug_bayes.json | 42 +++++ JSON/ug_delta.json | 42 +++++ JSON/wcs_sql.json | 48 ++++++ 52 files changed, 3276 insertions(+) create mode 100644 JSON/ModelInformation.schema.json create mode 100644 JSON/PY_CODE_TEMPLATE.txt create mode 100644 JSON/PY_DOCSTRING_TEMPLATE.txt create mode 100644 JSON/README.md create mode 100755 JSON/ValidateAll.sh create mode 100755 JSON/WritePython.py create mode 100644 JSON/bandit2arm_delta.json create mode 100644 JSON/bandit4arm2_kalman_filter.json create mode 100644 JSON/bandit4arm_2par_lapse.json create mode 100644 JSON/bandit4arm_4par.json create mode 100644 JSON/bandit4arm_lapse.json create mode 100644 JSON/bandit4arm_lapse_decay.json create mode 100644 JSON/bandit4arm_singleA_lapse.json create mode 100644 JSON/bart_par4.json create mode 100644 JSON/choiceRT_ddm.json create mode 100644 JSON/choiceRT_ddm_single.json create mode 100644 JSON/cra_exp.json create mode 100644 JSON/cra_linear.json create mode 100644 JSON/dbdm_prob_weight.json create mode 100644 JSON/dd_cs.json create mode 100644 JSON/dd_cs_single.json create mode 100644 JSON/dd_exp.json create mode 100644 JSON/dd_hyperbolic.json create mode 100644 JSON/dd_hyperbolic_single.json create mode 100644 JSON/gng_m1.json create mode 100644 JSON/gng_m2.json create mode 100644 JSON/gng_m3.json create mode 100644 JSON/gng_m4.json create mode 100644 JSON/igt_orl.json create mode 100644 JSON/igt_pvl_decay.json create mode 100644 JSON/igt_pvl_delta.json create mode 100644 JSON/igt_vpp.json create mode 100644 JSON/peer_ocu.json create mode 100644 JSON/prl_ewa.json create mode 100644 JSON/prl_fictitious.json create mode 100644 JSON/prl_fictitious_multipleB.json create mode 100644 JSON/prl_fictitious_rp.json create mode 100644 JSON/prl_fictitious_rp_woa.json create mode 100644 JSON/prl_fictitious_woa.json create mode 100644 JSON/prl_rp.json create mode 100644 JSON/prl_rp_multipleB.json create mode 100644 JSON/pst_gainloss_Q.json create mode 100644 JSON/ra_noLA.json create mode 100644 JSON/ra_noRA.json create mode 100644 JSON/ra_prospect.json create mode 100644 JSON/rdt_happiness.json create mode 100644 JSON/ts_par4.json create mode 100644 JSON/ts_par6.json create mode 100644 JSON/ts_par7.json create mode 100644 JSON/ug_bayes.json create mode 100644 JSON/ug_delta.json create mode 100644 JSON/wcs_sql.json diff --git a/JSON/ModelInformation.schema.json b/JSON/ModelInformation.schema.json new file mode 100644 index 00000000..dd760b3f --- /dev/null +++ b/JSON/ModelInformation.schema.json @@ -0,0 +1,171 @@ +{ + "title": "Model Information Schema", + "description": "Written by Jethro Lee", + "type": "object", + "required": ["task_name", "model_name", "model_type", "notes", "contributors", "data_columns", "parameters", "regressors", "postpreds", "additional_args"], + "properties": { + "task_name": { + "$ref": "#/definitions/_name" + }, + "model_name": { + "$ref": "#/definitions/_name" + }, + "model_type": { + "type": "object", + "enum": [ + { + "code": "", + "desc": "Hierarchical" + }, + { + "code": "single", + "desc": "Individual" + }, + { + "code": "multipleB", + "desc": "Multiple-Block Hierarchical" + } + ] + }, + "notes": { + "type": "array", + "items": { + "type": "string", + "minLength": 1 + } + }, + "contributors": { + "type": "array", + "items": { + "type": "object", + "required": ["name", "email", "link"], + "properties": { + "name": { + "type": "string", + "minLength": 1 + }, + "email": { + "type": "string", + "minLength": 1, + "format": "email" + }, + "link": { + "type": "string", + "minLength": 1, + "format": "uri" + } + }, + "additionalProperties": false + } + }, + "data_columns": { + "type": "object", + "required": ["subjID"], + "patternProperties": { + "^[a-zA-Z0-9_]+$": { + "type": "string", + "minLength": 1 + } + }, + "additionalProperties": false + }, + "parameters": { + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9_]+$": { + "type": "object", + "required": ["desc", "info"], + "properties": { + "desc": { + "type": "string", + "minLength": 1 + }, + "info": { + "type": "array", + "minItems": 3, + "maxItems": 3, + "items": { + "type": ["number", "string", "null"], + "description": "**Edit below to add more allowed patterns**", + "pattern": "^(-?Inf|exp\\([0-9.]+\\))$" + } + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + "regressors": { + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9_]+$": { + "type": "integer", + "description": "**Edit below to add more allowed values**", + "enum": [2, 3] + } + }, + "additionalProperties": false + }, + "postpreds": { + "type": "array", + "items": { + "type": "string", + "description": "**Edit below to add more allowed values**", + "enum": ["y_pred", "y_pred_step1", "y_pred_step2"] + } + }, + "additional_args": { + "type": "array", + "items": { + "type": "object", + "required": ["code", "default", "desc"], + "properties": { + "code": { + "type": "string", + "pattern": "^[a-zA-Z0-9_]+$" + }, + "default": { + "type": "number" + }, + "desc": { + "type": "string", + "minLength": 1 + } + }, + "additionalProperties": false + } + } + }, + "additionalProperties": false, + "if": { + "properties": {"model_type": {"properties": {"code": {"const": "multipleB"}}}} + }, + "then": { + "properties": {"data_columns": {"required": ["block"]}} + }, + "definitions": { + "_name": { + "type": "object", + "required": ["code", "desc", "cite"], + "properties": { + "code": { + "type": "string", + "pattern": "^[a-zA-Z0-9_]+$" + }, + "desc": { + "type": "string", + "minLength": 1 + }, + "cite": { + "type": "array", + "items": { + "type": "string", + "minLength": 1 + } + } + }, + "additionalProperties": false + } + } +} diff --git a/JSON/PY_CODE_TEMPLATE.txt b/JSON/PY_CODE_TEMPLATE.txt new file mode 100644 index 00000000..c90f8cf2 --- /dev/null +++ b/JSON/PY_CODE_TEMPLATE.txt @@ -0,0 +1,77 @@ +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import {prefix_preprocess_func}_preprocess_func + +__all__ = ['{model_function}'] + + +class {class_name}(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='{task_name}', + model_name='{model_name}', + model_type='{model_type}', + data_columns=( + {data_columns} + ), + parameters=OrderedDict([ + {parameters} + ]), + regressors=OrderedDict([ + {regressors} + ]), + postpreds=[{postpreds}], + parameters_desc=OrderedDict([ + {parameters_desc} + ]), + additional_args_desc=OrderedDict([ + {additional_args_desc} + ]), + **kwargs, + ) + + _preprocess_func = {prefix_preprocess_func}_preprocess_func + + +def {model_function}( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """{docstring_template} """ + return {class_name}( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/JSON/PY_DOCSTRING_TEMPLATE.txt b/JSON/PY_DOCSTRING_TEMPLATE.txt new file mode 100644 index 00000000..6517f605 --- /dev/null +++ b/JSON/PY_DOCSTRING_TEMPLATE.txt @@ -0,0 +1,155 @@ +{task_name} - {model_name} + + {model_type} Bayesian Modeling of the {task_name} {task_cite_short} + using {model_name} {model_cite_short} with the following parameters: + {parameters}. + + {notes} + + {task_cite_long} + {model_cite_long} + + {contributors} + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the {task_name}, there should be {data_columns_len} columns of data + with the labels {data_columns}. It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + {data_columns_details} + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: {data_columns}. + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: {data_columns}. + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. {model_regressor_parameter}. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + {postpreds}Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + {additional_args} + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('{model_function}'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + {model_regressor_return} + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- {model_function}(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) diff --git a/JSON/README.md b/JSON/README.md new file mode 100644 index 00000000..fc89c0a6 --- /dev/null +++ b/JSON/README.md @@ -0,0 +1,152 @@ +# Model Information JSON Files + +Contributed by [Jethro Lee][jethro-lee]. + +[jethro-lee]: https://github.com/dlemfh + +## JSON Schema + +Schema for the Model Information JSON files is stored in `ModelInformation.schema.json` as a JSON Schema format. + +| Property | Type | Description +|-------------------|---------------------|----------------------------------| +| `task_name` | Object | Informations regarding the task. *See below for **Keys** and **Values**.* +| `model_name` | Object | Informations regarding the model. *See below for **Keys** and **Values**.* +| `model_type` | Object | Modeling-type information. Should be one of the following three:
- `{"code": "", "desc": "Hierarchical"}`
- `{"code": "single", "desc": "Individual"}`
- `{"code": "multipleB", "desc": "Multiple-Block Hierarchical"}` +| `notes` | Array of Strings | Optional notes about the task/model. Give empty array `[]` if unused. +| `contributors` | Array of Objects | Optional specifying of contributors. Give empty array `[]` if unused. +| `data_columns` | Object | **Keys**: names of the necessary data columns for user data.
- `"subjID"` must always be included.
- Also include `"block"`, if modeling-type is "multipleB".
**Values**: one-line descriptions about each data column. +| `parameters` | Object (of Objects) | **Keys**: names of the parameters of this model.
**Values**: inner-level Object specifying desc and info for each parameter. +| `regressors` | Object | *(Give empty object `{}` if not supported.)*
**Keys**: names of the regressors of this model.
**Values**: extracted dimension-size for each regressor. +| `postpreds` | Array of Strings | Name(s) of posterior predictions. Give empty array `[]` if not supported. +| `additional_args` | Array of Objects | Specifying of additional arguments, if any. Give empty array `[]` if unused. + +*\* Note that all outermost-level properties are required properties. Assign empty values (`[]` or `{}`) to them if unused.* +*\* Refer below for inner-level Object specifications.* + +
task_name & model_name Object

+ +| Keys | Values +|----------|-------------------------------------| +| `"code"` | *(String)* Code for the task/model. +| `"desc"` | *(String)* Name of the task/model in title-case. +| `"cite"` | *(Array of Strings)* Citation(s) for the task/model. + +

+ +
model_type Object

+ +One of the following three: + +```json +{ + "code": "", + "desc": "Hierarchical" +} +``` +```json +{ + "code": "single", + "desc": "Individual" +} +``` +```json +{ + "code": "multipleB", + "desc": "Multiple-Block Hierarchical" +} +``` + +

+ +
(Inner-level) Contributor Object

+ +| Keys | Values +|-----------|-------------------------------------| +| `"name"` | *(String)* Name of the contributor. +| `"email"` | *(String)* Email address of the contributor. +| `"link"` | *(String)* Link to the contributor's page. + +

+ +
(Inner-level) Parameter Object

+ +| Keys | Values +|----------|---------------------------------------------------------| +| `"desc"` | *(String)* Description of the parameter in a few words. +| `"info"` | *(Length-3-Array)* **Lower bound**, **plausible value**, and **upper bound** of the parameter.
*\* See right below for allowed values.* + +*\* Allowed values (lower bound, plausible value, upper bound):* +- Numbers +- Strings: `"Inf"`, `"-Inf"`, `"exp([0-9.]+)"` +- `null` + +

+ +
(Inner-level) Additional_arg Object

+ +| Keys | Values +|-------------|----------------------------------------------| +| `"code"` | *(String)* Code for the additional argument. +| `"default"` | *(Number)* Default value of the additional argument. +| `"desc"` | *(String)* One-line description about the additional argument. + +

+ +## JSON Examples + +These are some good examples to start with, if you are completely new. + +| [`gng_m1.json`](./gng_m1.json) | [`choiceRT_ddm_single.json`](./choiceRT_ddm_single.json) | [`prl_fictitious_multipleB.json`](./prl_fictitious_multipleB.json) | [`ts_par4.json`](./ts_par4.json) +|-|-|-|-| +|`task_name`
`model_name`
`model_type`
~~`notes`~~
~~`contributors`~~
`data_columns`
`parameters`
`regressors`
`postpreds`
~~`additional_args`~~ |`task_name`
`model_name`
`model_type`
`notes`
~~`contributors`~~
`data_columns`
`parameters`
~~`regressors`~~
~~`postpreds`~~
`additional_args` |`task_name`
`model_name`
`model_type`
~~`notes`~~
`contributors`
`data_columns`
`parameters`
`regressors`
`postpreds`
~~`additional_args`~~ |`task_name`
`model_name`
`model_type`
~~`notes`~~
`contributors`
`data_columns`
`parameters`
~~`regressors`~~
`postpreds`
`additional_args` + +## JSON Validation + +Validating against the current Schema file is a good basis to see if you've written the model JSON file correctly. +To validate JSON files, you need to have [`jsonschema`][jsonschema] installed; you can install it with `pip install jsonschema`. + +[jsonschema]: https://github.com/Julian/jsonschema + +To validate a single JSON file (e.g. `gng_m1.json`): +``` +$ jsonschema -i gng_m1.json ModelInformation.schema.json +``` + +To validate all JSON files in directory, use following shell script: +``` +$ ./ValidateAll.sh +``` + +## Automated Python Code Generation + +Once you've (correctly) written the JSON file for a new model, +it's possible to automatically generate the corresponding python code for the new model, +using the python script `WritePython.py`: + +``` +$ ./WritePython.py -h +usage: WritePython.py [-h] [-a] [-v] json_file + +positional arguments: + json_file JSON file of the model to generate corresponding python code + +optional arguments: + -h, --help show this help message and exit + -a, --all write for all json files in directory + -v, --verbose print output to stdout instead of writing to file +``` + +E.g. (to generate `_gng_m1.py` from `gng_m1.json`): +``` +$ ./WritePython.py gng_m1.json +Created file: _gng_m1.py +``` + +To generate python codes for all json files in directory: +``` +$ ./WritePython.py --all . +Created file: _bandit2arm_delta.py +... +Created file: _wcs_sql.py +``` diff --git a/JSON/ValidateAll.sh b/JSON/ValidateAll.sh new file mode 100755 index 00000000..d228e151 --- /dev/null +++ b/JSON/ValidateAll.sh @@ -0,0 +1,7 @@ +#!/bin/bash +# Written by Jetho Lee + +for i in `ls [a-z]*.json`; do + echo "========== $i ==========" + jsonschema -i "$i" ModelInformation.schema.json +done diff --git a/JSON/WritePython.py b/JSON/WritePython.py new file mode 100755 index 00000000..631c4b29 --- /dev/null +++ b/JSON/WritePython.py @@ -0,0 +1,238 @@ +#!/usr/bin/env python3 +""" +Written by Jethro Lee. +""" +import sys +import argparse +import glob +import json +import re +from pathlib import Path +from typing import List, Iterable, Callable +from collections import OrderedDict + + +def main(json_file, verbose): + # Make Path object for given filename + path_fn = Path(json_file) + + # Check if file exists + if not path_fn.exists(): + print('FileNotFound: Please specify existing json_file as argument.') + sys.exit(1) + + # Load json_file + with open(path_fn, 'r') as f: + model_info = json.load(f, object_pairs_hook=OrderedDict) + + # Model full name (Snake-case) + model_function = path_fn.name.replace('.json', '') + + # Model class name (Pascal-case) + class_name = model_function.title().replace('_', '') + + # Prefix to preprocess_func + prefix_preprocess_func = model_info['task_name']['code'] + if model_info['model_type']['code']: + prefix_preprocess_func += '_' + model_info['model_type']['code'] + + # Preprocess citations + def shortify(cite: str) -> str: + last_name = cite[:cite.find(',')].replace(' ', '_') + m = re.search('\\((\\d{4})\\)', cite) + year = m.group(1) if m else '' + return last_name + year + task_cite = OrderedDict( + (shortify(cite), cite) for cite in model_info['task_name']['cite']) + model_cite = OrderedDict( + (shortify(cite), cite) for cite in model_info['model_name']['cite']) + + # Read template for docstring + with open('PY_DOCSTRING_TEMPLATE.txt', 'r') as f: + docstring_template = f.read().format( + model_function=model_function, + task_name=model_info['task_name']['desc'], + task_cite_short=format_list( + task_cite, + fmt='[{}]_', + sep=', '), + task_cite_long=format_dict( + task_cite, + fmt='.. [{}] {}', + sep='\n '), + model_name=model_info['model_name']['desc'], + model_cite_short=format_list( + model_cite, + fmt='[{}]_', + sep=', '), + model_cite_long=format_dict( + OrderedDict((k, v) for k, v in model_cite.items() + if k not in task_cite), + fmt='.. [{}] {}', + sep='\n '), + model_type=model_info['model_type']['desc'], + notes=format_list( + model_info['notes'], + fmt='.. note::\n {}', + sep='\n\n '), + contributors=format_list_of_dict( + model_info['contributors'], + 'name', 'email', + fmt='.. codeauthor:: {} <{}>', + sep='\n '), + data_columns=format_list( + model_info['data_columns'], + fmt='"{}"', + sep=', '), + data_columns_len=len(model_info['data_columns']), + data_columns_details=format_dict( + model_info['data_columns'], + fmt='- "{}": {}', + sep='\n '), + parameters=format_dict( + model_info['parameters'], + fmt='"{}" ({})', + sep=', ', + pre=lambda v: v['desc']), + model_regressor_parameter=message_model_regressor_parameter( + model_info['regressors']), + model_regressor_return=message_model_regressor_return( + model_info['regressors']), + postpreds=message_postpreds(model_info['postpreds']), + additional_args=message_additional_args( + model_info['additional_args']), + ) + + # Read template for model python code + with open('PY_CODE_TEMPLATE.txt', 'r') as f: + code_template = f.read().format( + docstring_template=docstring_template, + model_function=model_function, + class_name=class_name, + prefix_preprocess_func=prefix_preprocess_func, + task_name=model_info['task_name']['code'], + model_name=model_info['model_name']['code'], + model_type=model_info['model_type']['code'], + data_columns=format_list( + model_info['data_columns'], + fmt="'{}',", + sep='\n '), + parameters=format_dict( + model_info['parameters'], + fmt="('{}', ({})),", + sep='\n ', + pre=lambda v: ', '.join(map(str, v['info']))), + regressors=format_dict( + model_info['regressors'], + fmt="('{}', {}),", + sep='\n '), + postpreds=format_list( + model_info['postpreds'], + fmt="'{}'", + sep=', '), + parameters_desc=format_dict( + model_info['parameters'], + fmt="('{}', '{}'),", + sep='\n ', + pre=lambda v: v['desc']), + additional_args_desc=format_list_of_dict( + model_info['additional_args'], + 'code', 'default', + fmt="('{}', {}),", + sep='\n '), + ) + + if verbose: + # Print code string to stdout + print(code_template) + else: + # Write model python code + code_fn = '_' + model_function + '.py' + with open(code_fn, 'w') as f: + f.write('"""\nGenerated by template. Do not edit by hand.\n"""\n') + f.write(code_template) + print('Created file: ' + code_fn) + + +def format_list(data: Iterable, + fmt: str, + sep: str) -> str: + return sep.join(map(fmt.format, data)) + + +def format_dict(data: OrderedDict, + fmt: str, + sep: str, + pre: Callable = lambda v: v) -> str: + return sep.join(fmt.format(k, pre(v)) for k, v in data.items()) + + +def format_list_of_dict(data: List[OrderedDict], + *keys: str, + fmt: str, + sep: str) -> str: + return sep.join(fmt.format(*(d[k] for k in keys)) for d in data) + + +def message_model_regressor_parameter(regressors: OrderedDict) -> str: + if regressors: + return 'For this model they are: ' + format_list( + regressors, fmt='"{}"', sep=', ') + else: + return 'Currently not available for this model' + + +def message_model_regressor_return(regressors: OrderedDict) -> str: + if regressors: + return ( + '- ``model_regressor``: ' + + 'Dict holding the extracted model-based regressors.') + else: + return '' + + +def message_postpreds(postpreds: List) -> str: + if not postpreds: + return '**(Currently not available.)** ' + else: + return '' + + +def message_additional_args(additional_args: List) -> str: + if additional_args: + return ( + 'For this model, it\'s possible to set the following model-' + + 'specific argument to a value that you may prefer.\n\n ' + + format_list_of_dict( + additional_args, + 'code', 'desc', + fmt='- ``{}``: {}', + sep='\n ')) + else: + return 'Not used for this model.' + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument( + '-a', '--all', + help='write for all json files in directory', + action='store_true') + parser.add_argument( + '-v', '--verbose', + help='print output to stdout instead of writing to file', + action='store_true') + parser.add_argument( + 'json_file', + help='JSON file of the model to generate corresponding python code', + type=str) + + args = parser.parse_args() + + if args.all: + # `all` flag overrides `json_file` & `verbose` + all_json_files = glob.glob('[a-z]*.json') + for json_fn in all_json_files: + main(json_fn, False) + else: + main(args.json_file, args.verbose) diff --git a/JSON/bandit2arm_delta.json b/JSON/bandit2arm_delta.json new file mode 100644 index 00000000..371ba5cb --- /dev/null +++ b/JSON/bandit2arm_delta.json @@ -0,0 +1,39 @@ +{ + "task_name": { + "code": "bandit2arm", + "desc": "2-Armed Bandit Task", + "cite": [ + "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683", + "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x" + ] + }, + "model_name": { + "code": "delta", + "desc": "Rescorla-Wagner (Delta) Model", + "cite": [] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1 or 2.", + "outcome": "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "A": { + "desc": "learning rate", + "info": [0, 0.5, 1] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, 5] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bandit4arm2_kalman_filter.json b/JSON/bandit4arm2_kalman_filter.json new file mode 100644 index 00000000..3b20d664 --- /dev/null +++ b/JSON/bandit4arm2_kalman_filter.json @@ -0,0 +1,60 @@ +{ + "task_name": { + "code": "bandit4arm2", + "desc": "4-Armed Bandit Task (modified)", + "cite": [] + }, + "model_name": { + "code": "kalman_filter", + "desc": "Kalman Filter", + "cite": [ + "Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Yoonseo Zoh", + "email": "zohyos7@gmail.com", + "link": "https://zohyos7.github.io" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", + "outcome": "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "lambda": { + "desc": "decay factor", + "info": [0, 0.9, 1] + }, + "theta": { + "desc": "decay center", + "info": [0, 50, 100] + }, + "beta": { + "desc": "inverse softmax temperature", + "info": [0, 0.1, 1] + }, + "mu0": { + "desc": "anticipated initial mean of all 4 options", + "info": [0, 85, 100] + }, + "sigma0": { + "desc": "anticipated initial sd (uncertainty factor) of all 4 options", + "info": [0, 6, 15] + }, + "sigmaD": { + "desc": "sd of diffusion noise", + "info": [0, 3, 15] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bandit4arm_2par_lapse.json b/JSON/bandit4arm_2par_lapse.json new file mode 100644 index 00000000..93d99289 --- /dev/null +++ b/JSON/bandit4arm_2par_lapse.json @@ -0,0 +1,43 @@ +{ + "task_name": { + "code": "bandit4arm", + "desc": "4-Armed Bandit Task", + "cite": [] + }, + "model_name": { + "code": "2par_lapse", + "desc": "3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)", + "cite": [ + "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", + "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + }, + "parameters": { + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bandit4arm_4par.json b/JSON/bandit4arm_4par.json new file mode 100644 index 00000000..81d7b8ee --- /dev/null +++ b/JSON/bandit4arm_4par.json @@ -0,0 +1,47 @@ +{ + "task_name": { + "code": "bandit4arm", + "desc": "4-Armed Bandit Task", + "cite": [] + }, + "model_name": { + "code": "4par", + "desc": "4 Parameter Model, without C (choice perseveration)", + "cite": [ + "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", + "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + }, + "parameters": { + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "R": { + "desc": "reward sensitivity", + "info": [0, 1, 30] + }, + "P": { + "desc": "punishment sensitivity", + "info": [0, 1, 30] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bandit4arm_lapse.json b/JSON/bandit4arm_lapse.json new file mode 100644 index 00000000..cd981d67 --- /dev/null +++ b/JSON/bandit4arm_lapse.json @@ -0,0 +1,51 @@ +{ + "task_name": { + "code": "bandit4arm", + "desc": "4-Armed Bandit Task", + "cite": [] + }, + "model_name": { + "code": "lapse", + "desc": "5 Parameter Model, without C (choice perseveration) but with xi (noise)", + "cite": [ + "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", + "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + }, + "parameters": { + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "R": { + "desc": "reward sensitivity", + "info": [0, 1, 30] + }, + "P": { + "desc": "punishment sensitivity", + "info": [0, 1, 30] + }, + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bandit4arm_lapse_decay.json b/JSON/bandit4arm_lapse_decay.json new file mode 100644 index 00000000..81145a39 --- /dev/null +++ b/JSON/bandit4arm_lapse_decay.json @@ -0,0 +1,55 @@ +{ + "task_name": { + "code": "bandit4arm", + "desc": "4-Armed Bandit Task", + "cite": [] + }, + "model_name": { + "code": "lapse_decay", + "desc": "5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).", + "cite": [ + "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", + "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + }, + "parameters": { + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "R": { + "desc": "reward sensitivity", + "info": [0, 1, 30] + }, + "P": { + "desc": "punishment sensitivity", + "info": [0, 1, 30] + }, + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + }, + "d": { + "desc": "decay rate", + "info": [0, 0.1, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bandit4arm_singleA_lapse.json b/JSON/bandit4arm_singleA_lapse.json new file mode 100644 index 00000000..07583331 --- /dev/null +++ b/JSON/bandit4arm_singleA_lapse.json @@ -0,0 +1,47 @@ +{ + "task_name": { + "code": "bandit4arm", + "desc": "4-Armed Bandit Task", + "cite": [] + }, + "model_name": { + "code": "singleA_lapse", + "desc": "4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.", + "cite": [ + "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", + "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + }, + "parameters": { + "A": { + "desc": "learning rate", + "info": [0, 0.1, 1] + }, + "R": { + "desc": "reward sensitivity", + "info": [0, 1, 30] + }, + "P": { + "desc": "punishment sensitivity", + "info": [0, 1, 30] + }, + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/bart_par4.json b/JSON/bart_par4.json new file mode 100644 index 00000000..a0d0db39 --- /dev/null +++ b/JSON/bart_par4.json @@ -0,0 +1,77 @@ +{ + "task_name": { + "code": "bart", + "desc": "Balloon Analogue Risk Task", + "cite": [ + "van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105." + ] + }, + "model_name": { + "code": "par4", + "desc": "Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters", + "cite": [] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Harhim Park", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + }, + { + "name": "Jaeyeong Yang", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Ayoung Lee", + "email": "aylee2008@naver.com", + "link": "https://ccs-lab.github.io/team/ayoung-lee/" + }, + { + "name": "Jeongbin Oh", + "email": "ows0104@gmail.com", + "link": "https://ccs-lab.github.io/team/jeongbin-oh/" + }, + { + "name": "Jiyoon Lee", + "email": "nicole.lee2001@gmail.com", + "link": "https://ccs-lab.github.io/team/jiyoon-lee/" + }, + { + "name": "Junha Jang", + "email": "andy627robo@naver.com", + "link": "https://ccs-lab.github.io/team/junha-jang/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "pumps": "The number of pumps.", + "explosion": "0: intact, 1: burst" + }, + "parameters": { + "phi": { + "desc": "prior belief of balloon not bursting", + "info": [0, 0.5, 1] + }, + "eta": { + "desc": "updating rate", + "info": [0, 1, "Inf"] + }, + "gam": { + "desc": "risk-taking parameter", + "info": [0, 1, "Inf"] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, "Inf"] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/choiceRT_ddm.json b/JSON/choiceRT_ddm.json new file mode 100644 index 00000000..6050c43a --- /dev/null +++ b/JSON/choiceRT_ddm.json @@ -0,0 +1,55 @@ +{ + "task_name": { + "code": "choiceRT", + "desc": "Choice Reaction Time Task", + "cite": [] + }, + "model_name": { + "code": "ddm", + "desc": "Drift Diffusion Model", + "cite": [ + "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [ + "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters.", + "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." + ], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).", + "RT": "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." + }, + "parameters": { + "alpha": { + "desc": "boundary separation", + "info": [0, 0.5, "Inf"] + }, + "beta": { + "desc": "bias", + "info": [0, 0.5, 1] + }, + "delta": { + "desc": "drift rate", + "info": [0, 0.5, "Inf"] + }, + "tau": { + "desc": "non-decision time", + "info": [0, 0.15, 1] + } + }, + "regressors": {}, + "postpreds": [], + "additional_args": [ + { + "code": "RTbound", + "default": 0.1, + "desc": "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." + } + ] +} diff --git a/JSON/choiceRT_ddm_single.json b/JSON/choiceRT_ddm_single.json new file mode 100644 index 00000000..70954155 --- /dev/null +++ b/JSON/choiceRT_ddm_single.json @@ -0,0 +1,55 @@ +{ + "task_name": { + "code": "choiceRT", + "desc": "Choice Reaction Time Task", + "cite": [] + }, + "model_name": { + "code": "ddm", + "desc": "Drift Diffusion Model", + "cite": [ + "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" + ] + }, + "model_type": { + "code": "single", + "desc": "Individual" + }, + "notes": [ + "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters.", + "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." + ], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).", + "RT": "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." + }, + "parameters": { + "alpha": { + "desc": "boundary separation", + "info": [null, 0.5, null] + }, + "beta": { + "desc": "bias", + "info": [null, 0.5, null] + }, + "delta": { + "desc": "drift rate", + "info": [null, 0.5, null] + }, + "tau": { + "desc": "non-decision time", + "info": [null, 0.15, null] + } + }, + "regressors": {}, + "postpreds": [], + "additional_args": [ + { + "code": "RTbound", + "default": 0.1, + "desc": "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." + } + ] +} diff --git a/JSON/cra_exp.json b/JSON/cra_exp.json new file mode 100644 index 00000000..9dacc77b --- /dev/null +++ b/JSON/cra_exp.json @@ -0,0 +1,56 @@ +{ + "task_name": { + "code": "cra", + "desc": "Choice Under Risk and Ambiguity Task", + "cite": [] + }, + "model_name": { + "code": "exp", + "desc": "Exponential Subjective Value Model", + "cite": [ + "Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "prob": "Objective probability of the variable lottery.", + "ambig": "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).", + "reward_var": "Amount of reward in variable lottery. Assumed to be greater than zero.", + "reward_fix": "Amount of reward in fixed lottery. Assumed to be greater than zero.", + "choice": "If the variable lottery was selected, choice == 1; otherwise choice == 0." + }, + "parameters": { + "alpha": { + "desc": "risk attitude", + "info": [0, 1, 2] + }, + "beta": { + "desc": "ambiguity attitude", + "info": ["-Inf", 0, "Inf"] + }, + "gamma": { + "desc": "inverse temperature", + "info": [0, 1, "Inf"] + } + }, + "regressors": { + "sv": 2, + "sv_fix": 2, + "sv_var": 2, + "p_var": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/cra_linear.json b/JSON/cra_linear.json new file mode 100644 index 00000000..d8ce1cfa --- /dev/null +++ b/JSON/cra_linear.json @@ -0,0 +1,56 @@ +{ + "task_name": { + "code": "cra", + "desc": "Choice Under Risk and Ambiguity Task", + "cite": [] + }, + "model_name": { + "code": "linear", + "desc": "Linear Subjective Value Model", + "cite": [ + "Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "prob": "Objective probability of the variable lottery.", + "ambig": "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).", + "reward_var": "Amount of reward in variable lottery. Assumed to be greater than zero.", + "reward_fix": "Amount of reward in fixed lottery. Assumed to be greater than zero.", + "choice": "If the variable lottery was selected, choice == 1; otherwise choice == 0." + }, + "parameters": { + "alpha": { + "desc": "risk attitude", + "info": [0, 1, 2] + }, + "beta": { + "desc": "ambiguity attitude", + "info": ["-Inf", 0, "Inf"] + }, + "gamma": { + "desc": "inverse temperature", + "info": [0, 1, "Inf"] + } + }, + "regressors": { + "sv": 2, + "sv_fix": 2, + "sv_var": 2, + "p_var": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/dbdm_prob_weight.json b/JSON/dbdm_prob_weight.json new file mode 100644 index 00000000..d9650466 --- /dev/null +++ b/JSON/dbdm_prob_weight.json @@ -0,0 +1,59 @@ +{ + "task_name": { + "code": "dbdm", + "desc": "Description Based Decison Making Task", + "cite": [] + }, + "model_name": { + "code": "prob_weight", + "desc": "Probability Weight Function", + "cite": [ + "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47.", + "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539.", + "Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Yoonseo Zoh", + "email": "zohyos7@gmail.com", + "link": "https://ccs-lab.github.io/team/yoonseo-zoh/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "opt1hprob": "Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.", + "opt2hprob": "Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.", + "opt1hval": "Possible (with opt1hprob probability) outcome of option 1.", + "opt1lval": "Possible (with (1 - opt1hprob) probability) outcome of option 1.", + "opt2hval": "Possible (with opt2hprob probability) outcome of option 2.", + "opt2lval": "Possible (with (1 - opt2hprob) probability) outcome of option 2.", + "choice": "If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2." + }, + "parameters": { + "tau": { + "desc": "probability weight function", + "info": [0, 0.8, 1] + }, + "rho": { + "desc": "subject utility function", + "info": [0, 0.7, 2] + }, + "lambda": { + "desc": "loss aversion parameter", + "info": [0, 2.5, 5] + }, + "beta": { + "desc": "inverse softmax temperature", + "info": [0, 0.2, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/dd_cs.json b/JSON/dd_cs.json new file mode 100644 index 00000000..96ff42fb --- /dev/null +++ b/JSON/dd_cs.json @@ -0,0 +1,45 @@ +{ + "task_name": { + "code": "dd", + "desc": "Delay Discounting Task", + "cite": [] + }, + "model_name": { + "code": "cs", + "desc": "Constant-Sensitivity (CS) Model", + "cite": [ + "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", + "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", + "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", + "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", + "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + }, + "parameters": { + "r": { + "desc": "exponential discounting rate", + "info": [0, 0.1, 1] + }, + "s": { + "desc": "impatience", + "info": [0, 1, 10] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 5] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/dd_cs_single.json b/JSON/dd_cs_single.json new file mode 100644 index 00000000..58d4ccb7 --- /dev/null +++ b/JSON/dd_cs_single.json @@ -0,0 +1,45 @@ +{ + "task_name": { + "code": "dd", + "desc": "Delay Discounting Task", + "cite": [] + }, + "model_name": { + "code": "cs", + "desc": "Constant-Sensitivity (CS) Model", + "cite": [ + "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" + ] + }, + "model_type": { + "code": "single", + "desc": "Individual" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", + "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", + "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", + "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", + "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + }, + "parameters": { + "r": { + "desc": "exponential discounting rate", + "info": [null, 0.1, null] + }, + "s": { + "desc": "impatience", + "info": [null, 1, null] + }, + "beta": { + "desc": "inverse temperature", + "info": [null, 1, null] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/dd_exp.json b/JSON/dd_exp.json new file mode 100644 index 00000000..9f9794d5 --- /dev/null +++ b/JSON/dd_exp.json @@ -0,0 +1,41 @@ +{ + "task_name": { + "code": "dd", + "desc": "Delay Discounting Task", + "cite": [] + }, + "model_name": { + "code": "exp", + "desc": "Exponential Model", + "cite": [ + "Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", + "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", + "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", + "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", + "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + }, + "parameters": { + "r": { + "desc": "exponential discounting rate", + "info": [0, 0.1, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 5] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/dd_hyperbolic.json b/JSON/dd_hyperbolic.json new file mode 100644 index 00000000..809e131f --- /dev/null +++ b/JSON/dd_hyperbolic.json @@ -0,0 +1,41 @@ +{ + "task_name": { + "code": "dd", + "desc": "Delay Discounting Task", + "cite": [] + }, + "model_name": { + "code": "hyperbolic", + "desc": "Hyperbolic Model", + "cite": [ + "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", + "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", + "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", + "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", + "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + }, + "parameters": { + "k": { + "desc": "discounting rate", + "info": [0, 0.1, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 5] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/dd_hyperbolic_single.json b/JSON/dd_hyperbolic_single.json new file mode 100644 index 00000000..e89ba3f4 --- /dev/null +++ b/JSON/dd_hyperbolic_single.json @@ -0,0 +1,41 @@ +{ + "task_name": { + "code": "dd", + "desc": "Delay Discounting Task", + "cite": [] + }, + "model_name": { + "code": "hyperbolic", + "desc": "Hyperbolic Model", + "cite": [ + "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." + ] + }, + "model_type": { + "code": "single", + "desc": "Individual" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", + "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", + "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", + "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", + "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + }, + "parameters": { + "k": { + "desc": "discounting rate", + "info": [null, 0.1, null] + }, + "beta": { + "desc": "inverse temperature", + "info": [null, 1, null] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/gng_m1.json b/JSON/gng_m1.json new file mode 100644 index 00000000..99b7ac01 --- /dev/null +++ b/JSON/gng_m1.json @@ -0,0 +1,48 @@ +{ + "task_name": { + "code": "gng", + "desc": "Orthogonalized Go/Nogo Task", + "cite": [] + }, + "model_name": { + "code": "m1", + "desc": "RW + noise", + "cite": [ + "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", + "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", + "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + }, + "parameters": { + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + }, + "ep": { + "desc": "learning rate", + "info": [0, 0.2, 1] + }, + "rho": { + "desc": "effective size", + "info": [0, "exp(2)", "Inf"] + } + }, + "regressors": { + "Qgo": 2, + "Qnogo": 2, + "Wgo": 2, + "Wnogo": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/gng_m2.json b/JSON/gng_m2.json new file mode 100644 index 00000000..3f97a1ed --- /dev/null +++ b/JSON/gng_m2.json @@ -0,0 +1,52 @@ +{ + "task_name": { + "code": "gng", + "desc": "Orthogonalized Go/Nogo Task", + "cite": [] + }, + "model_name": { + "code": "m2", + "desc": "RW + noise + bias", + "cite": [ + "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", + "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", + "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + }, + "parameters": { + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + }, + "ep": { + "desc": "learning rate", + "info": [0, 0.2, 1] + }, + "b": { + "desc": "action bias", + "info": ["-Inf", 0, "Inf"] + }, + "rho": { + "desc": "effective size", + "info": [0, "exp(2)", "Inf"] + } + }, + "regressors": { + "Qgo": 2, + "Qnogo": 2, + "Wgo": 2, + "Wnogo": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/gng_m3.json b/JSON/gng_m3.json new file mode 100644 index 00000000..b4fdcbba --- /dev/null +++ b/JSON/gng_m3.json @@ -0,0 +1,57 @@ +{ + "task_name": { + "code": "gng", + "desc": "Orthogonalized Go/Nogo Task", + "cite": [] + }, + "model_name": { + "code": "m3", + "desc": "RW + noise + bias + pi", + "cite": [ + "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", + "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", + "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + }, + "parameters": { + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + }, + "ep": { + "desc": "learning rate", + "info": [0, 0.2, 1] + }, + "b": { + "desc": "action bias", + "info": ["-Inf", 0, "Inf"] + }, + "pi": { + "desc": "Pavlovian bias", + "info": ["-Inf", 0, "Inf"] + }, + "rho": { + "desc": "effective size", + "info": [0, "exp(2)", "Inf"] + } + }, + "regressors": { + "Qgo": 2, + "Qnogo": 2, + "Wgo": 2, + "Wnogo": 2, + "SV": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/gng_m4.json b/JSON/gng_m4.json new file mode 100644 index 00000000..8e3626ec --- /dev/null +++ b/JSON/gng_m4.json @@ -0,0 +1,61 @@ +{ + "task_name": { + "code": "gng", + "desc": "Orthogonalized Go/Nogo Task", + "cite": [] + }, + "model_name": { + "code": "m4", + "desc": "RW (rew/pun) + noise + bias + pi", + "cite": [ + "Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", + "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", + "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + }, + "parameters": { + "xi": { + "desc": "noise", + "info": [0, 0.1, 1] + }, + "ep": { + "desc": "learning rate", + "info": [0, 0.2, 1] + }, + "b": { + "desc": "action bias", + "info": ["-Inf", 0, "Inf"] + }, + "pi": { + "desc": "Pavlovian bias", + "info": ["-Inf", 0, "Inf"] + }, + "rhoRew": { + "desc": "reward sensitivity", + "info": [0, "exp(2)", "Inf"] + }, + "rhoPun": { + "desc": "punishment sensitivity", + "info": [0, "exp(2)", "Inf"] + } + }, + "regressors": { + "Qgo": 2, + "Qnogo": 2, + "Wgo": 2, + "Wnogo": 2, + "SV": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/igt_orl.json b/JSON/igt_orl.json new file mode 100644 index 00000000..8f399b46 --- /dev/null +++ b/JSON/igt_orl.json @@ -0,0 +1,65 @@ +{ + "task_name": { + "code": "igt", + "desc": "Iowa Gambling Task", + "cite": [ + "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + ] + }, + "model_name": { + "code": "orl", + "desc": "Outcome-Representation Learning Model", + "cite": [ + "Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Nate Haines", + "email": "haines.175@osu.edu", + "link": "https://ccs-lab.github.io/team/nate-haines/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", + "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + }, + "parameters": { + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "K": { + "desc": "perseverance decay", + "info": [0, 0.1, 5] + }, + "betaF": { + "desc": "outcome frequency weight", + "info": ["-Inf", 0.1, "Inf"] + }, + "betaP": { + "desc": "perseverance weight", + "info": ["-Inf", 1, "Inf"] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [ + { + "code": "payscale", + "default": 100, + "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + } + ] +} diff --git a/JSON/igt_pvl_decay.json b/JSON/igt_pvl_decay.json new file mode 100644 index 00000000..8886e5f5 --- /dev/null +++ b/JSON/igt_pvl_decay.json @@ -0,0 +1,55 @@ +{ + "task_name": { + "code": "igt", + "desc": "Iowa Gambling Task", + "cite": [ + "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + ] + }, + "model_name": { + "code": "pvl_decay", + "desc": "Prospect Valence Learning (PVL) Decay-RI", + "cite": [ + "Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", + "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + }, + "parameters": { + "A": { + "desc": "decay rate", + "info": [0, 0.5, 1] + }, + "alpha": { + "desc": "outcome sensitivity", + "info": [0, 0.5, 2] + }, + "cons": { + "desc": "response consistency", + "info": [0, 1, 5] + }, + "lambda": { + "desc": "loss aversion", + "info": [0, 1, 10] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [ + { + "code": "payscale", + "default": 100, + "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + } + ] +} diff --git a/JSON/igt_pvl_delta.json b/JSON/igt_pvl_delta.json new file mode 100644 index 00000000..dade6353 --- /dev/null +++ b/JSON/igt_pvl_delta.json @@ -0,0 +1,55 @@ +{ + "task_name": { + "code": "igt", + "desc": "Iowa Gambling Task", + "cite": [ + "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + ] + }, + "model_name": { + "code": "pvl_delta", + "desc": "Prospect Valence Learning (PVL) Delta", + "cite": [ + "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", + "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + }, + "parameters": { + "A": { + "desc": "learning rate", + "info": [0, 0.5, 1] + }, + "alpha": { + "desc": "outcome sensitivity", + "info": [0, 0.5, 2] + }, + "cons": { + "desc": "response consistency", + "info": [0, 1, 5] + }, + "lambda": { + "desc": "loss aversion", + "info": [0, 1, 10] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [ + { + "code": "payscale", + "default": 100, + "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + } + ] +} diff --git a/JSON/igt_vpp.json b/JSON/igt_vpp.json new file mode 100644 index 00000000..70c18b34 --- /dev/null +++ b/JSON/igt_vpp.json @@ -0,0 +1,71 @@ +{ + "task_name": { + "code": "igt", + "desc": "Iowa Gambling Task", + "cite": [ + "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + ] + }, + "model_name": { + "code": "vpp", + "desc": "Value-Plus-Perseverance", + "cite": [ + "Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", + "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", + "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + }, + "parameters": { + "A": { + "desc": "learning rate", + "info": [0, 0.5, 1] + }, + "alpha": { + "desc": "outcome sensitivity", + "info": [0, 0.5, 2] + }, + "cons": { + "desc": "response consistency", + "info": [0, 1, 5] + }, + "lambda": { + "desc": "loss aversion", + "info": [0, 1, 10] + }, + "epP": { + "desc": "gain impact", + "info": ["-Inf", 0, "Inf"] + }, + "epN": { + "desc": "loss impact", + "info": ["-Inf", 0, "Inf"] + }, + "K": { + "desc": "decay rate", + "info": [0, 0.5, 1] + }, + "w": { + "desc": "RL weight", + "info": [0, 0.5, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [ + { + "code": "payscale", + "default": 100, + "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + } + ] +} diff --git a/JSON/peer_ocu.json b/JSON/peer_ocu.json new file mode 100644 index 00000000..ff82a77c --- /dev/null +++ b/JSON/peer_ocu.json @@ -0,0 +1,53 @@ +{ + "task_name": { + "code": "peer", + "desc": "Peer Influence Task", + "cite": [ + "Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916." + ] + }, + "model_name": { + "code": "ocu", + "desc": "Other-Conferred Utility (OCU) Model", + "cite": [] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Harhim Park", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "condition": "0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).", + "p_gamble": "Probability of receiving a high payoff (same for both options).", + "safe_Hpayoff": "High payoff of the safe option.", + "safe_Lpayoff": "Low payoff of the safe option.", + "risky_Hpayoff": "High payoff of the risky option.", + "risky_Lpayoff": "Low payoff of the risky option.", + "choice": "Which option was chosen? 0: safe, 1: risky." + }, + "parameters": { + "rho": { + "desc": "risk preference", + "info": [0, 1, 2] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, "Inf"] + }, + "ocu": { + "desc": "other-conferred utility", + "info": ["-Inf", 0, "Inf"] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_ewa.json b/JSON/prl_ewa.json new file mode 100644 index 00000000..14f249e1 --- /dev/null +++ b/JSON/prl_ewa.json @@ -0,0 +1,58 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "ewa", + "desc": "Experience-Weighted Attraction Model", + "cite": [ + "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "phi": { + "desc": "1 - learning rate", + "info": [0, 0.5, 1] + }, + "rho": { + "desc": "experience decay factor", + "info": [0, 0.1, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 2, + "ev_nc": 2, + "ew_c": 2, + "ew_nc": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_fictitious.json b/JSON/prl_fictitious.json new file mode 100644 index 00000000..102b32c2 --- /dev/null +++ b/JSON/prl_fictitious.json @@ -0,0 +1,59 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "fictitious", + "desc": "Fictitious Update Model", + "cite": [ + "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "eta": { + "desc": "learning rate", + "info": [0, 0.5, 1] + }, + "alpha": { + "desc": "indecision point", + "info": ["-Inf", 0, "Inf"] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 2, + "ev_nc": 2, + "pe_c": 2, + "pe_nc": 2, + "dv": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_fictitious_multipleB.json b/JSON/prl_fictitious_multipleB.json new file mode 100644 index 00000000..e4792f7a --- /dev/null +++ b/JSON/prl_fictitious_multipleB.json @@ -0,0 +1,60 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "fictitious", + "desc": "Fictitious Update Model", + "cite": [ + "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + ] + }, + "model_type": { + "code": "multipleB", + "desc": "Multiple-Block Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "block": "A unique identifier for each of the multiple blocks within each subject.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "eta": { + "desc": "learning rate", + "info": [0, 0.5, 1] + }, + "alpha": { + "desc": "indecision point", + "info": ["-Inf", 0, "Inf"] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 3, + "ev_nc": 3, + "pe_c": 3, + "pe_nc": 3, + "dv": 3 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_fictitious_rp.json b/JSON/prl_fictitious_rp.json new file mode 100644 index 00000000..0684fa12 --- /dev/null +++ b/JSON/prl_fictitious_rp.json @@ -0,0 +1,64 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "fictitious_rp", + "desc": "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)", + "cite": [ + "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098", + "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "eta_pos": { + "desc": "learning rate, +PE", + "info": [0, 0.5, 1] + }, + "eta_neg": { + "desc": "learning rate, -PE", + "info": [0, 0.5, 1] + }, + "alpha": { + "desc": "indecision point", + "info": ["-Inf", 0, "Inf"] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 2, + "ev_nc": 2, + "pe_c": 2, + "pe_nc": 2, + "dv": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_fictitious_rp_woa.json b/JSON/prl_fictitious_rp_woa.json new file mode 100644 index 00000000..1a942c2e --- /dev/null +++ b/JSON/prl_fictitious_rp_woa.json @@ -0,0 +1,60 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "fictitious_rp_woa", + "desc": "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)", + "cite": [ + "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098", + "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "eta_pos": { + "desc": "learning rate, +PE", + "info": [0, 0.5, 1] + }, + "eta_neg": { + "desc": "learning rate, -PE", + "info": [0, 0.5, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 2, + "ev_nc": 2, + "pe_c": 2, + "pe_nc": 2, + "dv": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_fictitious_woa.json b/JSON/prl_fictitious_woa.json new file mode 100644 index 00000000..69b35d55 --- /dev/null +++ b/JSON/prl_fictitious_woa.json @@ -0,0 +1,55 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "fictitious_woa", + "desc": "Fictitious Update Model, without alpha (indecision point)", + "cite": [ + "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "eta": { + "desc": "learning rate", + "info": [0, 0.5, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 2, + "ev_nc": 2, + "pe_c": 2, + "pe_nc": 2, + "dv": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_rp.json b/JSON/prl_rp.json new file mode 100644 index 00000000..6272765c --- /dev/null +++ b/JSON/prl_rp.json @@ -0,0 +1,57 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "rp", + "desc": "Reward-Punishment Model", + "cite": [ + "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 2, + "ev_nc": 2, + "pe": 2 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/prl_rp_multipleB.json b/JSON/prl_rp_multipleB.json new file mode 100644 index 00000000..f300b69f --- /dev/null +++ b/JSON/prl_rp_multipleB.json @@ -0,0 +1,58 @@ +{ + "task_name": { + "code": "prl", + "desc": "Probabilistic Reversal Learning Task", + "cite": [] + }, + "model_name": { + "code": "rp", + "desc": "Reward-Punishment Model", + "cite": [ + "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + ] + }, + "model_type": { + "code": "multipleB", + "desc": "Multiple-Block Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang (for model-based regressors)", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + }, + { + "name": "Harhim Park (for model-based regressors)", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "block": "A unique identifier for each of the multiple blocks within each subject.", + "choice": "Integer value representing the option chosen on that trial: 1 or 2.", + "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + }, + "parameters": { + "Apun": { + "desc": "punishment learning rate", + "info": [0, 0.1, 1] + }, + "Arew": { + "desc": "reward learning rate", + "info": [0, 0.1, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": { + "ev_c": 3, + "ev_nc": 3, + "pe": 3 + }, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/pst_gainloss_Q.json b/JSON/pst_gainloss_Q.json new file mode 100644 index 00000000..f9fcfa65 --- /dev/null +++ b/JSON/pst_gainloss_Q.json @@ -0,0 +1,49 @@ +{ + "task_name": { + "code": "pst", + "desc": "Probabilistic Selection Task", + "cite": [] + }, + "model_name": { + "code": "gainloss_Q", + "desc": "Gain-Loss Q Learning Model", + "cite": [ + "Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Jaeyeong Yang", + "email": "jaeyeong.yang1125@gmail.com", + "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "type": "Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as below:\n\n ===== ======== ==================\n Code Stimulus Probability to win\n ===== ======== ==================\n 1 A 80%\n 2 B 20%\n 3 C 70%\n 4 D 30%\n 5 E 60%\n 6 F 40%\n ===== ======== ==================\n\n The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.", + "choice": "Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).", + "reward": "Amount of reward earned as a result of the trial." + }, + "parameters": { + "alpha_pos": { + "desc": "learning rate for positive feedbacks", + "info": [0, 0.5, 1] + }, + "alpha_neg": { + "desc": "learning rate for negative feedbacks", + "info": [0, 0.5, 1] + }, + "beta": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/ra_noLA.json b/JSON/ra_noLA.json new file mode 100644 index 00000000..1c6ad3c5 --- /dev/null +++ b/JSON/ra_noLA.json @@ -0,0 +1,40 @@ +{ + "task_name": { + "code": "ra", + "desc": "Risk Aversion Task", + "cite": [] + }, + "model_name": { + "code": "noLA", + "desc": "Prospect Theory, without loss aversion (LA) parameter", + "cite": [ + "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).}", + "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", + "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", + "gamble": "If gamble was taken, gamble == 1; else gamble == 0." + }, + "parameters": { + "rho": { + "desc": "risk aversion", + "info": [0, 1, 2] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, 30] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/ra_noRA.json b/JSON/ra_noRA.json new file mode 100644 index 00000000..7dd2f329 --- /dev/null +++ b/JSON/ra_noRA.json @@ -0,0 +1,40 @@ +{ + "task_name": { + "code": "ra", + "desc": "Risk Aversion Task", + "cite": [] + }, + "model_name": { + "code": "noRA", + "desc": "Prospect Theory, without risk aversion (RA) parameter", + "cite": [ + "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).}", + "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", + "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", + "gamble": "If gamble was taken, gamble == 1; else gamble == 0." + }, + "parameters": { + "lambda": { + "desc": "loss aversion", + "info": [0, 1, 5] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, 30] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/ra_prospect.json b/JSON/ra_prospect.json new file mode 100644 index 00000000..471c91da --- /dev/null +++ b/JSON/ra_prospect.json @@ -0,0 +1,44 @@ +{ + "task_name": { + "code": "ra", + "desc": "Risk Aversion Task", + "cite": [] + }, + "model_name": { + "code": "prospect", + "desc": "Prospect Theory", + "cite": [ + "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).}", + "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", + "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", + "gamble": "If gamble was taken, gamble == 1; else gamble == 0." + }, + "parameters": { + "rho": { + "desc": "risk aversion", + "info": [0, 1, 2] + }, + "lambda": { + "desc": "loss aversion", + "info": [0, 1, 5] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, 30] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/rdt_happiness.json b/JSON/rdt_happiness.json new file mode 100644 index 00000000..d8d77706 --- /dev/null +++ b/JSON/rdt_happiness.json @@ -0,0 +1,66 @@ +{ + "task_name": { + "code": "rdt", + "desc": "Risky Decision Task", + "cite": [] + }, + "model_name": { + "code": "happiness", + "desc": "Happiness Computational Model", + "cite": [ + "Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Harhim Park", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", + "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", + "cert": "Guaranteed amount of a safe option.", + "type": "loss == -1, mixed == 0, gain == 1", + "gamble": "If gamble was taken, gamble == 1; else gamble == 0.", + "outcome": "Result of the trial.", + "happy": "Happiness score.", + "RT_happy": "Reaction time for answering the happiness score." + }, + "parameters": { + "w0": { + "desc": "baseline", + "info": ["-Inf", 1, "Inf"] + }, + "w1": { + "desc": "weight of certain rewards", + "info": ["-Inf", 1, "Inf"] + }, + "w2": { + "desc": "weight of expected values", + "info": ["-Inf", 1, "Inf"] + }, + "w3": { + "desc": "weight of reward prediction errors", + "info": ["-Inf", 1, "Inf"] + }, + "gam": { + "desc": "forgetting factor", + "info": [0, 0.5, 1] + }, + "sig": { + "desc": "standard deviation of error", + "info": [0, 1, "Inf"] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/ts_par4.json b/JSON/ts_par4.json new file mode 100644 index 00000000..fd7d3512 --- /dev/null +++ b/JSON/ts_par4.json @@ -0,0 +1,62 @@ +{ + "task_name": { + "code": "ts", + "desc": "Two-Step Task", + "cite": [ + "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + ] + }, + "model_name": { + "code": "par4", + "desc": "Hybrid Model, with 4 parameters", + "cite": [ + "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027", + "Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Harhim Park", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "level1_choice": "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).", + "level2_choice": "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.", + "reward": "Reward after Level 2 (0 or 1)." + }, + "parameters": { + "a": { + "desc": "learning rate for both stages 1 & 2", + "info": [0, 0.5, 1] + }, + "beta": { + "desc": "inverse temperature for both stages 1 & 2", + "info": [0, 1, "Inf"] + }, + "pi": { + "desc": "perseverance", + "info": [0, 1, 5] + }, + "w": { + "desc": "model-based weight", + "info": [0, 0.5, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred_step1", "y_pred_step2"], + "additional_args": [ + { + "code": "trans_prob", + "default": 0.7, + "desc": "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." + } + ] +} diff --git a/JSON/ts_par6.json b/JSON/ts_par6.json new file mode 100644 index 00000000..02a5e3c6 --- /dev/null +++ b/JSON/ts_par6.json @@ -0,0 +1,69 @@ +{ + "task_name": { + "code": "ts", + "desc": "Two-Step Task", + "cite": [ + "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + ] + }, + "model_name": { + "code": "par6", + "desc": "Hybrid Model, with 6 parameters", + "cite": [ + "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Harhim Park", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "level1_choice": "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).", + "level2_choice": "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.", + "reward": "Reward after Level 2 (0 or 1)." + }, + "parameters": { + "a1": { + "desc": "learning rate in stage 1", + "info": [0, 0.5, 1] + }, + "beta1": { + "desc": "inverse temperature in stage 1", + "info": [0, 1, "Inf"] + }, + "a2": { + "desc": "learning rate in stage 2", + "info": [0, 0.5, 1] + }, + "beta2": { + "desc": "inverse temperature in stage 2", + "info": [0, 1, "Inf"] + }, + "pi": { + "desc": "perseverance", + "info": [0, 1, 5] + }, + "w": { + "desc": "model-based weight", + "info": [0, 0.5, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred_step1", "y_pred_step2"], + "additional_args": [ + { + "code": "trans_prob", + "default": 0.7, + "desc": "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." + } + ] +} diff --git a/JSON/ts_par7.json b/JSON/ts_par7.json new file mode 100644 index 00000000..9e3b5e32 --- /dev/null +++ b/JSON/ts_par7.json @@ -0,0 +1,73 @@ +{ + "task_name": { + "code": "ts", + "desc": "Two-Step Task", + "cite": [ + "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + ] + }, + "model_name": { + "code": "par7", + "desc": "Hybrid Model, with 7 parameters (original model)", + "cite": [ + "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Harhim Park", + "email": "hrpark12@gmail.com", + "link": "https://ccs-lab.github.io/team/harhim-park/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "level1_choice": "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).", + "level2_choice": "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.", + "reward": "Reward after Level 2 (0 or 1)." + }, + "parameters": { + "a1": { + "desc": "learning rate in stage 1", + "info": [0, 0.5, 1] + }, + "beta1": { + "desc": "inverse temperature in stage 1", + "info": [0, 1, "Inf"] + }, + "a2": { + "desc": "learning rate in stage 2", + "info": [0, 0.5, 1] + }, + "beta2": { + "desc": "inverse temperature in stage 2", + "info": [0, 1, "Inf"] + }, + "pi": { + "desc": "perseverance", + "info": [0, 1, 5] + }, + "w": { + "desc": "model-based weight", + "info": [0, 0.5, 1] + }, + "lambda": { + "desc": "eligibility trace", + "info": [0, 0.5, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred_step1", "y_pred_step2"], + "additional_args": [ + { + "code": "trans_prob", + "default": 0.7, + "desc": "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." + } + ] +} diff --git a/JSON/ug_bayes.json b/JSON/ug_bayes.json new file mode 100644 index 00000000..d34f9078 --- /dev/null +++ b/JSON/ug_bayes.json @@ -0,0 +1,42 @@ +{ + "task_name": { + "code": "ug", + "desc": "Norm-Training Ultimatum Game", + "cite": [] + }, + "model_name": { + "code": "bayes", + "desc": "Ideal Observer Model", + "cite": [ + "Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "offer": "Floating point value representing the offer made in that trial (e.g. 4, 10, 11).", + "accept": "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." + }, + "parameters": { + "alpha": { + "desc": "envy", + "info": [0, 1, 20] + }, + "beta": { + "desc": "guilt", + "info": [0, 0.5, 10] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, 10] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/ug_delta.json b/JSON/ug_delta.json new file mode 100644 index 00000000..6c797f51 --- /dev/null +++ b/JSON/ug_delta.json @@ -0,0 +1,42 @@ +{ + "task_name": { + "code": "ug", + "desc": "Norm-Training Ultimatum Game", + "cite": [] + }, + "model_name": { + "code": "delta", + "desc": "Rescorla-Wagner (Delta) Model", + "cite": [ + "Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015" + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "offer": "Floating point value representing the offer made in that trial (e.g. 4, 10, 11).", + "accept": "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." + }, + "parameters": { + "alpha": { + "desc": "envy", + "info": [0, 1, 20] + }, + "tau": { + "desc": "inverse temperature", + "info": [0, 1, 10] + }, + "ep": { + "desc": "norm adaptation rate", + "info": [0, 0.5, 1] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} diff --git a/JSON/wcs_sql.json b/JSON/wcs_sql.json new file mode 100644 index 00000000..126ae5af --- /dev/null +++ b/JSON/wcs_sql.json @@ -0,0 +1,48 @@ +{ + "task_name": { + "code": "wcs", + "desc": "Wisconsin Card Sorting Task", + "cite": [] + }, + "model_name": { + "code": "sql", + "desc": "Sequential Learning Model", + "cite": [ + "Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13." + ] + }, + "model_type": { + "code": "", + "desc": "Hierarchical" + }, + "notes": [], + "contributors": [ + { + "name": "Dayeong Min", + "email": "mindy2801@snu.ac.kr", + "link": "https://ccs-lab.github.io/team/dayeong-min/" + } + ], + "data_columns": { + "subjID": "A unique identifier for each subject in the data-set.", + "choice": "Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.", + "outcome": "1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0." + }, + "parameters": { + "r": { + "desc": "reward sensitivity", + "info": [0, 0.1, 1] + }, + "p": { + "desc": "punishment sensitivity", + "info": [0, 0.1, 1] + }, + "d": { + "desc": "decision consistency or inverse temperature", + "info": [0, 1, 5] + } + }, + "regressors": {}, + "postpreds": ["y_pred"], + "additional_args": [] +} From be45857bc917b96db9b450d03e2d7ba82039b072 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Wed, 1 May 2019 18:35:48 +0900 Subject: [PATCH 010/163] Move Py-related into Python/ --- Python/MANIFEST.in | 4 + Python/Makefile | 23 + Python/Pipfile | 30 + Python/README.rst | 58 + Python/docs/Makefile | 24 + Python/docs/source/conf.py | 187 + Python/docs/source/index.rst | 19 + Python/hbayesdm/__init__.py | 4 + Python/hbayesdm/base.py | 881 + .../common/extdata/bandit2arm_exampleData.txt | 2001 +++ .../extdata/bandit4arm2_exampleData.txt | 3001 ++++ .../common/extdata/bandit4arm_exampleData.txt | 2001 +++ .../common/extdata/bart_exampleData.txt | 91 + .../common/extdata/choiceRT_exampleData.txt | 5001 ++++++ .../extdata/choiceRT_single_exampleData.txt | 1001 ++ .../common/extdata/cra_exampleData.txt | 541 + .../common/extdata/dbdm_exampleData.txt | 15001 ++++++++++++++++ .../common/extdata/dd_exampleData.txt | 2161 +++ .../common/extdata/dd_single_exampleData.txt | 109 + .../common/extdata/gng_exampleData.txt | 2401 +++ .../common/extdata/igt_exampleData.txt | 401 + .../common/extdata/peer_exampleData.txt | 361 + .../common/extdata/prl_exampleData.txt | 2001 +++ .../extdata/prl_multipleB_exampleData.txt | 1801 ++ .../common/extdata/pst_exampleData.txt | 1021 ++ .../common/extdata/ra_data_attend.txt | 4192 +++++ .../common/extdata/ra_data_reappraisal.txt | 4190 +++++ .../common/extdata/ra_exampleData.txt | 701 + .../common/extdata/rdt_exampleData.txt | 901 + .../common/extdata/ts_exampleData.txt | 2191 +++ .../common/extdata/ug_exampleData.txt | 1801 ++ .../common/extdata/wcs_answersheet.txt | 4 + .../common/extdata/wcs_exampleData.txt | 1158 ++ .../common/stan_files/bandit2arm_delta.stan | 109 + .../stan_files/bandit4arm2_kalman_filter.stan | 163 + .../stan_files/bandit4arm_2par_lapse.stan | 173 + .../common/stan_files/bandit4arm_4par.stan | 176 + .../common/stan_files/bandit4arm_lapse.stan | 182 + .../stan_files/bandit4arm_lapse_decay.stan | 201 + .../stan_files/bandit4arm_singleA_lapse.stan | 177 + .../hbayesdm/common/stan_files/bart_par4.stan | 129 + .../common/stan_files/choiceRT_ddm.stan | 98 + .../stan_files/choiceRT_ddm_single.stan | 58 + .../common/stan_files/choiceRT_lba.stan | 278 + .../stan_files/choiceRT_lba_single.stan | 239 + .../hbayesdm/common/stan_files/cra_exp.stan | 134 + .../common/stan_files/cra_linear.stan | 130 + .../common/stan_files/dbdm_prob_weight.stan | 154 + Python/hbayesdm/common/stan_files/dd_cs.stan | 107 + .../common/stan_files/dd_cs_single.stan | 63 + Python/hbayesdm/common/stan_files/dd_exp.stan | 101 + .../common/stan_files/dd_hyperbolic.stan | 101 + .../stan_files/dd_hyperbolic_single.stan | 57 + Python/hbayesdm/common/stan_files/gng_m1.stan | 149 + Python/hbayesdm/common/stan_files/gng_m2.stan | 160 + Python/hbayesdm/common/stan_files/gng_m3.stan | 179 + Python/hbayesdm/common/stan_files/gng_m4.stan | 210 + .../hbayesdm/common/stan_files/igt_orl.stan | 207 + .../common/stan_files/igt_pvl_decay.stan | 134 + .../common/stan_files/igt_pvl_delta.stan | 132 + .../hbayesdm/common/stan_files/igt_vpp.stan | 188 + .../hbayesdm/common/stan_files/peer_ocu.stan | 115 + .../common/stan_files/pre/license.stan | 14 + .../hbayesdm/common/stan_files/prl_ewa.stan | 179 + .../common/stan_files/prl_fictitious.stan | 173 + .../stan_files/prl_fictitious_multipleB.stan | 185 + .../common/stan_files/prl_fictitious_rp.stan | 188 + .../stan_files/prl_fictitious_rp_woa.stan | 180 + .../common/stan_files/prl_fictitious_woa.stan | 165 + Python/hbayesdm/common/stan_files/prl_rp.stan | 149 + .../common/stan_files/prl_rp_multipleB.stan | 161 + .../common/stan_files/pst_gainloss_Q.stan | 114 + .../hbayesdm/common/stan_files/ra_noLA.stan | 95 + .../hbayesdm/common/stan_files/ra_noRA.stan | 95 + .../common/stan_files/ra_prospect.stan | 97 + .../common/stan_files/rdt_happiness.stan | 146 + .../hbayesdm/common/stan_files/ts_par4.stan | 204 + .../hbayesdm/common/stan_files/ts_par6.stan | 213 + .../hbayesdm/common/stan_files/ts_par7.stan | 217 + .../hbayesdm/common/stan_files/ug_bayes.stan | 167 + .../hbayesdm/common/stan_files/ug_delta.stan | 129 + .../hbayesdm/common/stan_files/wcs_sql.stan | 176 + Python/hbayesdm/diagnostics.py | 136 + Python/hbayesdm/models/__init__.py | 95 + Python/hbayesdm/models/_bandit2arm_delta.py | 242 + .../models/_bandit4arm2_kalman_filter.py | 249 + .../hbayesdm/models/_bandit4arm_2par_lapse.py | 245 + Python/hbayesdm/models/_bandit4arm_4par.py | 247 + Python/hbayesdm/models/_bandit4arm_lapse.py | 249 + .../models/_bandit4arm_lapse_decay.py | 251 + .../models/_bandit4arm_singleA_lapse.py | 247 + Python/hbayesdm/models/_bart_par4.py | 250 + Python/hbayesdm/models/_choiceRT_ddm.py | 251 + .../hbayesdm/models/_choiceRT_ddm_single.py | 251 + Python/hbayesdm/models/_cra_exp.py | 252 + Python/hbayesdm/models/_cra_linear.py | 252 + Python/hbayesdm/models/_dbdm_prob_weight.py | 257 + Python/hbayesdm/models/_dd_cs.py | 249 + Python/hbayesdm/models/_dd_cs_single.py | 249 + Python/hbayesdm/models/_dd_exp.py | 247 + Python/hbayesdm/models/_dd_hyperbolic.py | 247 + .../hbayesdm/models/_dd_hyperbolic_single.py | 247 + Python/hbayesdm/models/_gng_m1.py | 248 + Python/hbayesdm/models/_gng_m2.py | 250 + Python/hbayesdm/models/_gng_m3.py | 253 + Python/hbayesdm/models/_gng_m4.py | 255 + Python/hbayesdm/models/_igt_orl.py | 251 + Python/hbayesdm/models/_igt_pvl_decay.py | 249 + Python/hbayesdm/models/_igt_pvl_delta.py | 249 + Python/hbayesdm/models/_igt_vpp.py | 257 + Python/hbayesdm/models/_peer_ocu.py | 253 + Python/hbayesdm/models/_prl_ewa.py | 247 + Python/hbayesdm/models/_prl_fictitious.py | 248 + .../models/_prl_fictitious_multipleB.py | 250 + Python/hbayesdm/models/_prl_fictitious_rp.py | 251 + .../hbayesdm/models/_prl_fictitious_rp_woa.py | 249 + Python/hbayesdm/models/_prl_fictitious_woa.py | 246 + Python/hbayesdm/models/_prl_rp.py | 246 + Python/hbayesdm/models/_prl_rp_multipleB.py | 248 + Python/hbayesdm/models/_pst_gainloss_Q.py | 258 + Python/hbayesdm/models/_ra_noLA.py | 245 + Python/hbayesdm/models/_ra_noRA.py | 245 + Python/hbayesdm/models/_ra_prospect.py | 247 + Python/hbayesdm/models/_rdt_happiness.py | 261 + Python/hbayesdm/models/_ts_par4.py | 250 + Python/hbayesdm/models/_ts_par6.py | 254 + Python/hbayesdm/models/_ts_par7.py | 256 + Python/hbayesdm/models/_ug_bayes.py | 243 + Python/hbayesdm/models/_ug_delta.py | 243 + Python/hbayesdm/models/_wcs_sql.py | 243 + Python/hbayesdm/preprocess_funcs.py | 854 + Python/setup.cfg | 40 + Python/setup.py | 140 + 133 files changed, 75355 insertions(+) create mode 100644 Python/MANIFEST.in create mode 100644 Python/Makefile create mode 100644 Python/Pipfile create mode 100644 Python/README.rst create mode 100644 Python/docs/Makefile create mode 100644 Python/docs/source/conf.py create mode 100644 Python/docs/source/index.rst create mode 100644 Python/hbayesdm/__init__.py create mode 100644 Python/hbayesdm/base.py create mode 100644 Python/hbayesdm/common/extdata/bandit2arm_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/bandit4arm2_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/bandit4arm_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/bart_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/choiceRT_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/choiceRT_single_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/cra_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/dbdm_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/dd_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/dd_single_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/gng_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/igt_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/peer_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/prl_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/prl_multipleB_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/pst_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/ra_data_attend.txt create mode 100644 Python/hbayesdm/common/extdata/ra_data_reappraisal.txt create mode 100644 Python/hbayesdm/common/extdata/ra_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/rdt_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/ts_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/ug_exampleData.txt create mode 100644 Python/hbayesdm/common/extdata/wcs_answersheet.txt create mode 100644 Python/hbayesdm/common/extdata/wcs_exampleData.txt create mode 100644 Python/hbayesdm/common/stan_files/bandit2arm_delta.stan create mode 100644 Python/hbayesdm/common/stan_files/bandit4arm2_kalman_filter.stan create mode 100644 Python/hbayesdm/common/stan_files/bandit4arm_2par_lapse.stan create mode 100644 Python/hbayesdm/common/stan_files/bandit4arm_4par.stan create mode 100644 Python/hbayesdm/common/stan_files/bandit4arm_lapse.stan create mode 100644 Python/hbayesdm/common/stan_files/bandit4arm_lapse_decay.stan create mode 100644 Python/hbayesdm/common/stan_files/bandit4arm_singleA_lapse.stan create mode 100644 Python/hbayesdm/common/stan_files/bart_par4.stan create mode 100644 Python/hbayesdm/common/stan_files/choiceRT_ddm.stan create mode 100644 Python/hbayesdm/common/stan_files/choiceRT_ddm_single.stan create mode 100644 Python/hbayesdm/common/stan_files/choiceRT_lba.stan create mode 100644 Python/hbayesdm/common/stan_files/choiceRT_lba_single.stan create mode 100644 Python/hbayesdm/common/stan_files/cra_exp.stan create mode 100644 Python/hbayesdm/common/stan_files/cra_linear.stan create mode 100644 Python/hbayesdm/common/stan_files/dbdm_prob_weight.stan create mode 100644 Python/hbayesdm/common/stan_files/dd_cs.stan create mode 100644 Python/hbayesdm/common/stan_files/dd_cs_single.stan create mode 100644 Python/hbayesdm/common/stan_files/dd_exp.stan create mode 100644 Python/hbayesdm/common/stan_files/dd_hyperbolic.stan create mode 100644 Python/hbayesdm/common/stan_files/dd_hyperbolic_single.stan create mode 100644 Python/hbayesdm/common/stan_files/gng_m1.stan create mode 100644 Python/hbayesdm/common/stan_files/gng_m2.stan create mode 100644 Python/hbayesdm/common/stan_files/gng_m3.stan create mode 100644 Python/hbayesdm/common/stan_files/gng_m4.stan create mode 100644 Python/hbayesdm/common/stan_files/igt_orl.stan create mode 100644 Python/hbayesdm/common/stan_files/igt_pvl_decay.stan create mode 100644 Python/hbayesdm/common/stan_files/igt_pvl_delta.stan create mode 100644 Python/hbayesdm/common/stan_files/igt_vpp.stan create mode 100644 Python/hbayesdm/common/stan_files/peer_ocu.stan create mode 100644 Python/hbayesdm/common/stan_files/pre/license.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_ewa.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_fictitious.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_fictitious_multipleB.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_fictitious_rp.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_fictitious_rp_woa.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_fictitious_woa.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_rp.stan create mode 100644 Python/hbayesdm/common/stan_files/prl_rp_multipleB.stan create mode 100644 Python/hbayesdm/common/stan_files/pst_gainloss_Q.stan create mode 100644 Python/hbayesdm/common/stan_files/ra_noLA.stan create mode 100644 Python/hbayesdm/common/stan_files/ra_noRA.stan create mode 100644 Python/hbayesdm/common/stan_files/ra_prospect.stan create mode 100644 Python/hbayesdm/common/stan_files/rdt_happiness.stan create mode 100644 Python/hbayesdm/common/stan_files/ts_par4.stan create mode 100644 Python/hbayesdm/common/stan_files/ts_par6.stan create mode 100644 Python/hbayesdm/common/stan_files/ts_par7.stan create mode 100644 Python/hbayesdm/common/stan_files/ug_bayes.stan create mode 100644 Python/hbayesdm/common/stan_files/ug_delta.stan create mode 100644 Python/hbayesdm/common/stan_files/wcs_sql.stan create mode 100644 Python/hbayesdm/diagnostics.py create mode 100644 Python/hbayesdm/models/__init__.py create mode 100644 Python/hbayesdm/models/_bandit2arm_delta.py create mode 100644 Python/hbayesdm/models/_bandit4arm2_kalman_filter.py create mode 100644 Python/hbayesdm/models/_bandit4arm_2par_lapse.py create mode 100644 Python/hbayesdm/models/_bandit4arm_4par.py create mode 100644 Python/hbayesdm/models/_bandit4arm_lapse.py create mode 100644 Python/hbayesdm/models/_bandit4arm_lapse_decay.py create mode 100644 Python/hbayesdm/models/_bandit4arm_singleA_lapse.py create mode 100644 Python/hbayesdm/models/_bart_par4.py create mode 100644 Python/hbayesdm/models/_choiceRT_ddm.py create mode 100644 Python/hbayesdm/models/_choiceRT_ddm_single.py create mode 100644 Python/hbayesdm/models/_cra_exp.py create mode 100644 Python/hbayesdm/models/_cra_linear.py create mode 100644 Python/hbayesdm/models/_dbdm_prob_weight.py create mode 100644 Python/hbayesdm/models/_dd_cs.py create mode 100644 Python/hbayesdm/models/_dd_cs_single.py create mode 100644 Python/hbayesdm/models/_dd_exp.py create mode 100644 Python/hbayesdm/models/_dd_hyperbolic.py create mode 100644 Python/hbayesdm/models/_dd_hyperbolic_single.py create mode 100644 Python/hbayesdm/models/_gng_m1.py create mode 100644 Python/hbayesdm/models/_gng_m2.py create mode 100644 Python/hbayesdm/models/_gng_m3.py create mode 100644 Python/hbayesdm/models/_gng_m4.py create mode 100644 Python/hbayesdm/models/_igt_orl.py create mode 100644 Python/hbayesdm/models/_igt_pvl_decay.py create mode 100644 Python/hbayesdm/models/_igt_pvl_delta.py create mode 100644 Python/hbayesdm/models/_igt_vpp.py create mode 100644 Python/hbayesdm/models/_peer_ocu.py create mode 100644 Python/hbayesdm/models/_prl_ewa.py create mode 100644 Python/hbayesdm/models/_prl_fictitious.py create mode 100644 Python/hbayesdm/models/_prl_fictitious_multipleB.py create mode 100644 Python/hbayesdm/models/_prl_fictitious_rp.py create mode 100644 Python/hbayesdm/models/_prl_fictitious_rp_woa.py create mode 100644 Python/hbayesdm/models/_prl_fictitious_woa.py create mode 100644 Python/hbayesdm/models/_prl_rp.py create mode 100644 Python/hbayesdm/models/_prl_rp_multipleB.py create mode 100644 Python/hbayesdm/models/_pst_gainloss_Q.py create mode 100644 Python/hbayesdm/models/_ra_noLA.py create mode 100644 Python/hbayesdm/models/_ra_noRA.py create mode 100644 Python/hbayesdm/models/_ra_prospect.py create mode 100644 Python/hbayesdm/models/_rdt_happiness.py create mode 100644 Python/hbayesdm/models/_ts_par4.py create mode 100644 Python/hbayesdm/models/_ts_par6.py create mode 100644 Python/hbayesdm/models/_ts_par7.py create mode 100644 Python/hbayesdm/models/_ug_bayes.py create mode 100644 Python/hbayesdm/models/_ug_delta.py create mode 100644 Python/hbayesdm/models/_wcs_sql.py create mode 100644 Python/hbayesdm/preprocess_funcs.py create mode 100644 Python/setup.cfg create mode 100644 Python/setup.py diff --git a/Python/MANIFEST.in b/Python/MANIFEST.in new file mode 100644 index 00000000..0999da74 --- /dev/null +++ b/Python/MANIFEST.in @@ -0,0 +1,4 @@ +include README.rst +include LICENSE +graft hbayesdm/common +global-exclude .git diff --git a/Python/Makefile b/Python/Makefile new file mode 100644 index 00000000..6de60898 --- /dev/null +++ b/Python/Makefile @@ -0,0 +1,23 @@ +PIPENV_PATH = $(shell which pipenv) + +init: +ifeq (, $(PIPENV_PATH)) + pip install pipenv +endif + pipenv install --dev --skip-lock + +test: + pipenv run py.test tests + +test-travis: + pipenv run py.test tests --doctest-modules + +lint: + pipenv run flake8 adopy --format=pylint --statistics --exit-zero + pipenv run pylint adopy --rcfile=setup.cfg --exit-zero + +docs-travis: + pipenv run travis-sphinx build + pipenv run travis-sphinx deploy + +.PHONY: init test test-travis lint docs-travis diff --git a/Python/Pipfile b/Python/Pipfile new file mode 100644 index 00000000..48dc698c --- /dev/null +++ b/Python/Pipfile @@ -0,0 +1,30 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] +autopep8 = "*" +pylint = "*" +flake8 = "*" +mypy = "*" +pytest = "*" +sphinx = "*" +sphinx-rtd-theme = "*" +travis-sphinx = "*" +sphinx-autodoc-typehints = "*" +sphinx-autobuild = "*" +jupyter = "*" +jupyterlab = "*" + +[packages] +numpy = "*" +scipy = "*" +pandas = "*" +pystan = "*" +matplotlib = "*" +arviz = "*" +hbayesdm = {editable = true,path = "."} + +[requires] +python_version = "3" diff --git a/Python/README.rst b/Python/README.rst new file mode 100644 index 00000000..c4dc2bce --- /dev/null +++ b/Python/README.rst @@ -0,0 +1,58 @@ +hBayesDM-py +=========== + +.. image:: https://www.repostatus.org/badges/latest/wip.svg + :alt: Project Status: WIP – Initial development is in progress, + but there has not yet been a stable, usable release suitable + for the public. + :target: https://www.repostatus.org/#wip +.. image:: https://travis-ci.com/CCS-Lab/hBayesDM-py.svg?token=gbyEQoyAYgexeSRwBwj6&branch=master + :alt: Travis CI + :target: https://travis-ci.com/CCS-Lab/hBayesDM-py + +This is the Python version of *hBayesDM* (hierarchical Bayesian modeling of +Decision-Making tasks), a user-friendly package that offers hierarchical +Bayesian analysis of various computational models on an array of +decision-making tasks. *hBayesDM* uses `PyStan`_ (Python interface for +`Stan`_) for Bayesian inference. + +.. _PyStan: https://github.com/stan-dev/pystan +.. _Stan: http://mc-stan.org/ + +hBayesDM-py supports Python 3.5 or higher. It requires several packages including: + +* `NumPy`_, `SciPy`_, `Pandas`_, `PyStan`_, `Matplotlib`_, `ArviZ`_ + +.. _NumPy: https://www.numpy.org/ +.. _SciPy: https://www.scipy.org/ +.. _Pandas: https://pandas.pydata.org/ +.. _Matplotlib: https://matplotlib.org/ +.. _ArviZ: https://arviz-devs.github.io/arviz/ + +Installation +------------ + +You can install hBayesDM-py from PyPI with the following line: + +.. code:: bash + + pip install hbayesdm + +If you want to install from source (via cloning from GitHub): + +.. code:: bash + + git clone --recursive https://github.com/CCS-Lab/hBayesDM-py.git + cd hBayesDM-py + python setup.py install + +If you want to make a virtual environment using `pipenv`_, +you can do so with the following command: + +.. _pipenv: https://pipenv.readthedocs.io/en/latest/ + +.. code:: bash + + # After cloning (recursively) & cd-ing into hBayesDM-py + pipenv install + pipenv install --dev # For developmental purpose diff --git a/Python/docs/Makefile b/Python/docs/Makefile new file mode 100644 index 00000000..783edd8f --- /dev/null +++ b/Python/docs/Makefile @@ -0,0 +1,24 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXAUTOBUILD = sphinx-autobuild +SOURCEDIR = source +BUILDDIR = build +PACKAGEDIR = ../hbayesdm + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +livehtml: + @$(SPHINXAUTOBUILD) -b html -z "$(PACKAGEDIR)" "$(SOURCEDIR)" "$(BUILDDIR)/html" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile livehtml + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/Python/docs/source/conf.py b/Python/docs/source/conf.py new file mode 100644 index 00000000..deef2b62 --- /dev/null +++ b/Python/docs/source/conf.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + +import hbayesdm # noqa: E402 + +# -- Project information ----------------------------------------------------- + +project = 'hBayesDM' +copyright = '2019, hBayesDM Developers' +author = 'hBayesDM Developers' + +# The short X.Y version +version = hbayesdm.version.version +# The full version, including alpha/beta/rc tags +release = hbayesdm.version.release + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.todo', + 'sphinx.ext.mathjax', + 'sphinx.ext.githubpages', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'hBayesDMdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'hBayesDM.tex', 'hBayesDM Documentation', + 'hBayesDM Developers', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'hbayesdm', 'hBayesDM Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'hBayesDM', 'hBayesDM Documentation', + author, 'hBayesDM', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True diff --git a/Python/docs/source/index.rst b/Python/docs/source/index.rst new file mode 100644 index 00000000..7330aea7 --- /dev/null +++ b/Python/docs/source/index.rst @@ -0,0 +1,19 @@ +.. hBayesDM documentation master file, created by + sphinx-quickstart on Thu Feb 14 10:06:38 2019. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +.. include:: ../../README.rst + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/Python/hbayesdm/__init__.py b/Python/hbayesdm/__init__.py new file mode 100644 index 00000000..d48d8c2c --- /dev/null +++ b/Python/hbayesdm/__init__.py @@ -0,0 +1,4 @@ +from hbayesdm.version import version as __version__ +from hbayesdm.diagnostics import rhat, print_fit, hdi, plot_hdi + +__all__ = ['__version__', 'rhat', 'print_fit', 'hdi', 'plot_hdi'] diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py new file mode 100644 index 00000000..3720a0b8 --- /dev/null +++ b/Python/hbayesdm/base.py @@ -0,0 +1,881 @@ +import os +import pickle +import multiprocessing +from abc import ABCMeta, abstractmethod +from typing import Tuple, List, Sequence, Dict, Union, Callable, Any +from collections import OrderedDict + +import numpy as np +import pandas as pd +from scipy import stats +from pystan import StanModel +import matplotlib.pyplot as plt +import arviz as az + +from hbayesdm import __version__ as _hbayesdm_version +from pystan import __version__ as _pystan_version + +__all__ = ['TaskModel'] + +_common = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'common') + + +class TaskModel(metaclass=ABCMeta): + """HBayesDM TaskModel Base Class. + + The base class that is inherited by all hBayesDM task-models. Child classes + should implement (i.e. override) the abstract method: `_preprocess_func`. + """ + + def __init__(self, + task_name: str, + model_name: str, + model_type: str, + data_columns: Sequence[str], + parameters: 'OrderedDict[str, Sequence[float]]', + regressors: 'OrderedDict[str, int]', + postpreds: Sequence[str], + parameters_desc: 'OrderedDict[str, str]', + additional_args_desc: 'OrderedDict[str, float]', + **kwargs): + # Assign attributes + self.__task_name = task_name + self.__model_name = model_name + self.__model_type = model_type + self.__data_columns = data_columns + self.__parameters = parameters + self.__regressors = regressors + self.__postpreds = postpreds + self.__parameters_desc = parameters_desc + self.__additional_args_desc = additional_args_desc + + # Handle special case (dd_single) + if self.task_name == 'dd' and self.model_type == 'single': + p = list(self.parameters_desc)[0] + self.__parameters_desc['log' + p.upper()] = 'log(%s)' % p + + # Run model function + model, all_ind_pars, par_vals, fit, raw_data, model_regressor \ + = self._run(**kwargs) + + # Assign results as attributes + self.__model = model + self.__all_ind_pars = all_ind_pars + self.__par_vals = par_vals + self.__fit = fit + self.__raw_data = raw_data + self.__model_regressor = model_regressor + + @property + def task_name(self) -> str: + return self.__task_name + + @property + def model_name(self) -> str: + return self.__model_name + + @property + def model_type(self) -> str: + return self.__model_type + + @property + def data_columns(self) -> Sequence[str]: + return self.__data_columns + + @property + def parameters(self) -> 'OrderedDict[str, Sequence[float]]': + return self.__parameters + + @property + def regressors(self) -> 'OrderedDict[str, int]': + return self.__regressors + + @property + def postpreds(self) -> Sequence[str]: + return self.__postpreds + + @property + def parameters_desc(self) -> 'OrderedDict[str, str]': + return self.__parameters_desc + + @property + def additional_args_desc(self) -> 'OrderedDict[str, float]': + return self.__additional_args_desc + + @property + def model(self) -> str: + return self.__model + + @property + def all_ind_pars(self) -> pd.DataFrame: + return self.__all_ind_pars + + @property + def par_vals(self) -> OrderedDict: + return self.__par_vals + + @property + def fit(self) -> Any: + return self.__fit + + @property + def raw_data(self) -> pd.DataFrame: + return self.__raw_data + + @property + def model_regressor(self) -> Dict: + return self.__model_regressor + + def _run(self, + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> Tuple[str, + pd.DataFrame, + OrderedDict, + Any, + pd.DataFrame, + Dict]: + """Run the hbayesdm modeling function.""" + self._check_regressor(model_regressor) + self._check_postpred(inc_postpred) + + raw_data, initial_columns = self._handle_data_args( + example, datafile, data) + insensitive_data_columns = self._get_insensitive_data_columns() + + self._check_data_columns(raw_data, insensitive_data_columns) + self._check_missing_values(raw_data, insensitive_data_columns) + + general_info = self._prepare_general_info(raw_data) + + data_dict = self._preprocess_func( + raw_data, general_info, additional_args) + pars = self._prepare_pars(model_regressor, inc_postpred) + gen_init = self._prepare_gen_init(inits, general_info['n_subj']) + + model = self._get_model_full_name() + ncore = self._set_number_of_cores(ncore) + + self._print_for_user( + model, example, datafile, data, vb, nchain, ncore, niter, nwarmup, + general_info, additional_args, model_regressor) + + sm = self._designate_stan_model(model) + fit = self._fit_stan_model( + vb, sm, data_dict, pars, gen_init, nchain, niter, nwarmup, nthin, + adapt_delta, stepsize, max_treedepth, ncore) + + measure = self._define_measure_function(ind_pars) + par_vals = self._extract_from_fit(fit, inc_postpred) + all_ind_pars = self._measure_all_ind_pars( + measure, par_vals, general_info['subjs']) + model_regressor = self._extract_model_regressor( + measure, par_vals) if model_regressor else None + + self._revert_initial_columns(raw_data, initial_columns) + self._inform_completion() + + return model, all_ind_pars, par_vals, fit, raw_data, model_regressor + + def _check_regressor(self, requested_by_user: bool): + """Check if regressors are available for this model. + + Parameters + ---------- + requested_by_user + Whether model regressors are requested by user. + """ + if requested_by_user and not self.regressors: + raise RuntimeError( + 'Model-based regressors are not available for this model.') + + def _check_postpred(self, requested_by_user: bool): + """Check if posterior predictive check is available for this model. + + Parameters + ---------- + requested_by_user + Whether PPC is requested by user. + """ + if requested_by_user and not self.postpreds: + raise RuntimeError( + 'Posterior predictions are not yet available for this model.') + + def _handle_data_args(self, + example: bool, + datafile: str, + data: pd.DataFrame) -> Tuple[pd.DataFrame, List]: + """Handle user data arguments and return raw_data. + + Parameters + ---------- + example : bool + Whether to use example data. + datafile : str + String of filepath for the data file. + data : pandas.DataFrame + Pandas DataFrame object that holds the data. + + Returns + ------- + raw_data : pandas.DataFrame + Properly imported raw data as a Pandas DataFrame. + initial_columns : List + Initial column names of raw data, as given by the user. + """ + # Check the number of valid arguments (which should be 1) + if int(example) \ + + int(datafile is not None) \ + + int(data is not None) != 1: + raise RuntimeError( + 'Please give one of these arguments: ' + 'example, datafile, or data.') + + if data is not None: # Use given data as raw_data + if not isinstance(data, pd.DataFrame): + raise RuntimeError( + 'Please provide `data` argument as a pandas.DataFrame.') + raw_data = data + + elif datafile is not None: # Load data from given filepath + if datafile.endswith('.csv'): + raw_data = pd.read_csv(datafile) + else: # Read the file as a tsv format + raw_data = pd.read_csv(datafile, sep='\t') + + else: # (example == True) Load example data + if self.model_type == '': + filename = '%s_exampleData.txt' % self.task_name + else: + filename = '%s_%s_exampleData.txt' % ( + self.task_name, self.model_type) + example_data = os.path.join(_common, 'extdata', filename) + raw_data = pd.read_csv(example_data, sep='\t') + + # Save initial column names of raw data for later + initial_columns = list(raw_data.columns) + + # Assign case- & underscore-insensitive column names + raw_data.columns = [ + col.replace('_', '').lower() for col in raw_data.columns] + + return raw_data, initial_columns + + def _get_insensitive_data_columns(self) -> List: + """Return list of case- & underscore-insensitive data column names. + + Returns + ------- + insensitive_data_columns + List of data columns, with underscores removed and case ignored. + """ + return [col.replace('_', '').lower() for col in self.data_columns] + + def _check_data_columns(self, + raw_data: pd.DataFrame, + insensitive_data_columns: List): + """Check if necessary data columns all exist in raw data, + while ignoring case and underscores in column names. + + Parameters + ---------- + raw_data + The raw behavioral data as a Pandas DataFrame. + insensitive_data_columns + Case- & underscore-insensitive data columns of this model. + """ + if not set(insensitive_data_columns).issubset(set(raw_data.columns)): + raise RuntimeError( + 'Data is missing one or more necessary data columns.\n' + + 'Necessary data columns are: ' + repr(self.data_columns)) + + def _check_missing_values(self, + raw_data: pd.DataFrame, + insensitive_data_columns: List): + """Remove rows containing NaNs in necessary columns. + + Parameters + ---------- + raw_data + The raw behavioral data as a Pandas DataFrame. + insensitive_data_columns + Case- & underscore-insensitive data columns of this model. + """ + initial = raw_data.copy() + raw_data.dropna(subset=insensitive_data_columns, inplace=True) + nan_rows = set(initial.index).difference(raw_data.index) + if nan_rows: + print() + print('Following rows of data have NaNs in necessary columns:') + print(initial.loc[nan_rows, ]) + print('These rows are removed prior to modeling the data.') + + def _prepare_general_info(self, raw_data: pd.DataFrame) -> Dict: + """Prepare general infos about the raw data. + + Parameters + ---------- + raw_data + The raw behavioral data as a Pandas DataFrame. + + Returns + ------- + general_info : Dict + 'grouped_data': data grouped by subjs (& blocks) - Pandas GroupBy, + 'subjs': list of all subjects in data, + 'n_subj': total number of subjects in data, + 'b_subjs': number of blocks per subj, + 'b_max': max number of blocks across all subjs, + 't_subjs': number of trials (per block) per subj, + 't_max': max number of trials across all subjs (& blocks). + """ + if self.model_type == '' or self.model_type == 'single': + grouped_data = raw_data.groupby('subjid', sort=False) + trials_per_subj = grouped_data.size() + subjs = list(trials_per_subj.index) + n_subj = len(subjs) + t_subjs = list(trials_per_subj) + t_max = max(t_subjs) + b_subjs, b_max = None, None + if self.model_type == 'single' and n_subj != 1: + raise RuntimeError( + 'More than 1 unique subjects exist in data, ' + + 'while using \'single\' type model.') + else: + grouped_data = raw_data.groupby(['subjid', 'block'], sort=False) + trials_per_block = grouped_data.size() + subj_block = trials_per_block.index.to_frame(index=False) + blocks_per_subj = subj_block.groupby('subjid', sort=False).size() + subjs = list(blocks_per_subj.index) + n_subj = len(subjs) + b_subjs = list(blocks_per_subj) + b_max = max(b_subjs) + t_subjs = [list(trials_per_block[subj]) for subj in subjs] + t_max = max(max(t_subjs)) + return {'grouped_data': grouped_data, + 'subjs': subjs, 'n_subj': n_subj, + 'b_subjs': b_subjs, 'b_max': b_max, + 't_subjs': t_subjs, 't_max': t_max} + + @abstractmethod + def _preprocess_func(self, + raw_data: pd.DataFrame, + general_info: Dict, + additional_args: Dict) -> Dict: + """Preprocess the raw data to pass to pystan. + + This function should be overridden in each specific model class. + + Parameters + ---------- + raw_data + The raw behavioral data as a Pandas DataFrame. + general_info + Holds general infos about the raw data. + additional_args + Optional additional argument(s) that may be used. + + Returns + ------- + data_dict + Will directly be passed to pystan. + """ + pass + + def _prepare_pars(self, model_regressor: bool, inc_postpred: bool) -> List: + """Prepare the parameters of interest for pystan. + + Parameters + ---------- + model_regressor + Whether user requested to extract model-based regressors. + inc_postpred + Whether user requested to include posterior predictive checks. + + Returns + ------- + pars + List of parameters of interest for pystan. + """ + pars = [] + if self.model_type != 'single': + pars += ['mu_' + p for p in self.parameters] + pars += ['sigma'] + pars += self.parameters_desc + pars += ['log_lik'] + if model_regressor: + pars += self.regressors + if inc_postpred: + pars += self.postpreds + return pars + + def _prepare_gen_init(self, + inits: Union[str, Sequence[float]], + n_subj: int) -> Union[str, Callable]: + """Prepare initial values for the parameters. + + Parameters + ---------- + inits + User-defined inits. Can be the strings 'random' or 'fixed', or a + list of float values to use as initial values for the parameters. + n_subj + Total number of subjects in data. + + Returns + ------- + gen_init : Union[str, Callable] + Either a string 'random', or a function that returns a Dict + holding the initial values for each parameter. + """ + if inits == 'random': + return 'random' + + if inits == 'fixed': + inits = [plausible for _, plausible, _ in self.parameters.values()] + elif len(inits) != len(self.parameters): + raise RuntimeError( + 'Length of `inits` must be %d ' % len(self.parameters) + + '(= the number of parameters of this model).') + + def gen_init(): + if self.model_type == 'single': + return dict(zip(self.parameters, inits)) + else: + def get_prime(v, lb, ub): + if np.isinf(lb): + return v + elif np.isinf(ub): + return np.log(v - lb) + else: + return stats.norm.ppf((v - lb) / (ub - lb)) + + primes = [get_prime(inits[i], lb, ub) for i, (lb, _, ub) in + enumerate(self.parameters.values())] + group_level = {'mu_pr': primes, 'sigma': [1.0] * len(primes)} + indiv_level = {param + '_pr': [prime] * n_subj for + param, prime in zip(self.parameters, primes)} + return {**group_level, **indiv_level} + + return gen_init + + def _get_model_full_name(self) -> str: + """Return full name of model. + + Returns + ------- + model + Full name of the model. + """ + if self.model_type == '': + return '%s_%s' % (self.task_name, self.model_name) + else: + return '%s_%s_%s' % ( + self.task_name, self.model_name, self.model_type) + + def _set_number_of_cores(self, ncore: int) -> int: + """Set number of cores for parallel computing. + + Parameters + ---------- + ncore + Number of cores to use, specified by user. + + Returns + ------- + ncore + Actual number of cores to use (value to be passed to pystan). + """ + local_cores = multiprocessing.cpu_count() + if ncore == -1 or ncore > local_cores: + return local_cores + return ncore + + def _print_for_user(self, model: str, example: bool, datafile: str, + data: pd.DataFrame, vb: bool, nchain: int, ncore: int, + niter: int, nwarmup: int, general_info: Dict, + additional_args: Dict, model_regressor: bool): + """Print information for user. + + Parameters + ---------- + model + Full name of model. + example + Whether to use example data. + datafile + String of filepath for data file. + data + Pandas DataFrame object holding user data. + vb + Whether to use variational Bayesian analysis. + nchain + Number of chains to run. + ncore + Number of cores to use. + niter + Number of iterations per chain. + nwarmup + Number of warm-up iterations. + general_info + Dict holding general infos about the raw data. + additional_args + Optional additional arguments that may be used. + model_regressor + Whether to extract model-based regressors. + """ + print() + print('Model =', model) + if example: + print('Data = example') + elif datafile: + print('Data =', datafile) + else: + print('Data =', object.__repr__(data)) + print() + print('Details:') + if vb: + print(' Using variational inference') + else: + print(' # of chains =', nchain) + print(' # of cores used =', ncore) + print(' # of MCMC samples (per chain) =', niter) + print(' # of burn-in samples =', nwarmup) + print(' # of subjects =', general_info['n_subj']) + if self.model_type == 'multipleB': + print(' # of (max) blocks per subject =', general_info['b_max']) + if self.model_type == '': + print(' # of (max) trials per subject =', general_info['t_max']) + elif self.model_type == 'multipleB': + print(' # of (max) trials...') + print(' ...per block per subject =', general_info['t_max']) + else: # (self.model_type == 'single') + print(' # of trials (for this subject) =', general_info['t_max']) + + # Models with additional arguments + if self.additional_args_desc: + for arg, default_value in self.additional_args_desc.items(): + print(' `{}` is set to '.format(arg)[:31], + '= {}'.format(additional_args.get(arg, default_value))) + + # When extracting model-based regressors + if model_regressor: + print() + print('**************************************') + print('** Extract model-based regressors **') + print('**************************************') + + # An empty newline before Stan begins + print() + + def _designate_stan_model(self, model: str) -> StanModel: + """Designate the stan model to use for sampling. + + Parameters + ---------- + model + Full name of the model. + + Returns + ------- + sm + Compiled StanModel obj to use for sampling & fitting. + """ + stan_files = os.path.join(_common, 'stan_files') + model_path = os.path.join(stan_files, model + '.stan') + cache_file = 'cached-%s-hbayesdm=%s-pystan=%s.pkl' % ( + model, _hbayesdm_version, _pystan_version) + try: + with open(cache_file, 'rb') as cached_stan_model: + sm = pickle.load(cached_stan_model) + with open(model_path, 'r') as model_stan_code: + assert sm.model_code == model_stan_code.read() + except (FileNotFoundError, AssertionError): + sm = StanModel( + file=model_path, model_name=model, include_paths=[stan_files]) + with open(cache_file, 'wb') as f: + pickle.dump(sm, f) + except: # All other exceptions + raise RuntimeError( + 'Cache file is corrupted. Please remove file `' + + cache_file + '` and run again.') + else: + print('Using cached StanModel:', cache_file) + return sm + + def _fit_stan_model(self, vb: bool, sm: StanModel, data_dict: Dict, + pars: List, gen_init: Union[str, Callable], + nchain: int, niter: int, nwarmup: int, nthin: int, + adapt_delta: float, stepsize: float, + max_treedepth: int, ncore: int) -> Any: + """Fit the stan model. + + Parameters + ---------- + vb + Whether to perform variational Bayesian analysis. + sm + The StanModel object to use to fit the model. + data_dict + Dict holding the data to pass to Stan. + pars + List specifying the parameters of interest. + gen_init + String or function to specify how to generate the initial values. + nchain + Number of chains to run. + niter + Number of iterations per chain. + nwarmup + Number of warm-up iterations. + nthin + Use every `i == nthin` sample to generate posterior distribution. + adapt_delta + Advanced control argument for sampler. + stepsize + Advanced control argument for sampler. + max_treedepth + Advanced control argument for sampler. + ncore + Argument for parallel computing while sampling multiple chains. + + Returns + ------- + fit + The fitted result returned by `vb` or `sampling` function. + """ + if vb: + return sm.vb(data=data_dict, + pars=pars, + init=gen_init) + else: + return sm.sampling(data=data_dict, + pars=pars, + init=gen_init, + chains=nchain, + iter=niter, + warmup=nwarmup, + thin=nthin, + control={'adapt_delta': adapt_delta, + 'stepsize': stepsize, + 'max_treedepth': max_treedepth}, + n_jobs=ncore) + + def _define_measure_function(self, ind_pars: str) -> Callable: + """Define which function to use to summarize results. + + Parameters + ---------- + ind_pars + String specifying how to summarize results. + + Returns + ------- + measure + Function to use to summarize (measure) the results. + """ + return { + 'mean': np.mean, + 'median': np.median, + 'mode': stats.mode, + }[ind_pars] + + def _extract_from_fit(self, fit: Any, inc_postpred: bool) -> OrderedDict: + """Extract from the stan fit object. + + Parameters + ---------- + fit + Fitted result of sampling the stan model. + inc_postpred + Whether user requested to include posterior predictive checks. + + Returns + ------- + par_vals + Entire raw draws of MCMC sampler, for each parameter (& subject). + """ + par_vals = fit.extract(permuted=True) + if inc_postpred: + for pp in self.postpreds: + par_vals[pp][par_vals[pp] == -1] = np.nan + return par_vals + + def _measure_all_ind_pars(self, + measure: Callable, + par_vals: OrderedDict, + subjs: List) -> pd.DataFrame: + """Measure all individual parameters (per subject). + + Parameters + ---------- + measure + Function to use to summarize the samples. + par_vals + Raw draws of MCMC sampler (per parameter & subject). + subjs + List of all the subjects in the data. + + Returns + ------- + all_ind_pars + Pandas DataFrame summarizing the draws per parameter (& subject). + """ + # Define which parameters to measure + which_pars = list(self.parameters_desc) + + # Measure all individual parameters + if self.model_type == 'single': + return pd.DataFrame( + {p: measure(par_vals[p]) for p in which_pars}, + index=subjs + ) + else: + return pd.DataFrame( + {p: list(map(measure, par_vals[p].T)) for p in which_pars}, + index=subjs + ) + + def _extract_model_regressor( + self, measure: Callable, par_vals: OrderedDict) -> Dict: + """Model regressors (for model-based neuroimaging, etc.). + + Parameters + ---------- + measure + Function to use to summarize the samples. + par_vals + Raw draws of MCMC sampler. + + Returns + ------- + model_regressor + Dict containing summarized model regressor values. + """ + return {r: np.apply_over_axes( + measure, + par_vals[r], + [i + 1 for i in range(dim_size)] + ).squeeze() for r, dim_size in self.regressors.items()} + + def _revert_initial_columns(self, + raw_data: pd.DataFrame, + initial_columns: List): + """Give back initial column names of raw data. + + Parameters + ---------- + raw_data + Data used to fit the model, as specified by the user. + initial_columns + Initial column names of raw data, as given by the user. + """ + raw_data.columns = initial_columns + + def _inform_completion(self): + """Inform user of completion.""" + print('************************************') + print('**** Model fitting is complete! ****') + print('************************************') + + def __str__(self): + return self.fit.stansummary() + + def plot(self, + type: str = 'dist', + credible_interval: float = 0.94, + point_estimate: str = 'mean', + bins: Union[int, Sequence, str] = 'auto', + round_to: int = 2, + **kwargs): + """General purpose plotting for hbayesdm-py. + + This function plots hyper-parameters. + + Parameters + ---------- + type + Current options are: 'dist', 'trace'. Defaults to 'dist'. + credible_interval + Credible interval to plot. Defaults to 0.94. + point_estimate + Show point estimate on plot. + Options are: 'mean', 'median' or 'mode'. Defaults to 'mean'. + bins + Controls the number of bins. Defaults to 'auto'. + Accepts the same values (or keywords) as plt.hist() does. + round_to + Controls formatting for floating point numbers. Defaults to 2. + **kwargs + Passed as-is to plt.hist(). + """ + type_options = ('dist', 'trace') + if type not in type_options: + raise RuntimeError('Plot type must be one of ' + repr(type_options)) + + if self.model_type == 'single': + var_names = list(self.parameters_desc) + else: + var_names = ['mu_' + p for p in self.parameters_desc] + + if type == 'dist': + kwargs.setdefault('color', 'black') + axes = az.plot_posterior(self.fit, + kind='hist', + var_names=var_names, + credible_interval=credible_interval, + point_estimate=point_estimate, + bins=bins, + round_to=round_to, + **kwargs) + for ax, (p, desc) in zip(axes, self.parameters_desc.items()): + ax.set_title('{} ({})'.format(p, desc)) + elif type == 'trace': + az.plot_trace(self.fit, var_names=var_names) + + plt.show() + + def plot_ind(self, + var_names: Union[str, List[str]] = None, + show_density: bool = True, + credible_interval: float = 0.94): + """Plots individual posterior distributions, using ArviZ. + + Parameters + ---------- + var_names + Parameter(s) to plot. If not specified, show all model parameters. + show_density + Whether to show density. True or False. Defaults to True. + credible_interval + Credible interval to plot. Defaults to 0.94. + """ + if var_names is None: + var_names = list(self.parameters_desc) + + if show_density: + kind = 'ridgeplot' + else: + kind = 'forestplot' + + az.plot_forest(self.fit, + kind=kind, + var_names=var_names, + credible_interval=credible_interval, + combined=True, + colors='gray', ridgeplot_alpha=0.8) + plt.show() diff --git a/Python/hbayesdm/common/extdata/bandit2arm_exampleData.txt b/Python/hbayesdm/common/extdata/bandit2arm_exampleData.txt new file mode 100644 index 00000000..d28e2ca2 --- /dev/null +++ b/Python/hbayesdm/common/extdata/bandit2arm_exampleData.txt @@ -0,0 +1,2001 @@ +subjID trial choice outcome +1 1 1 1 +1 2 2 -1 +1 3 2 -1 +1 4 2 -1 +1 5 1 -1 +1 6 2 -1 +1 7 2 1 +1 8 1 1 +1 9 1 1 +1 10 1 -1 +1 11 2 1 +1 12 1 1 +1 13 2 -1 +1 14 1 -1 +1 15 1 1 +1 16 1 1 +1 17 2 -1 +1 18 2 -1 +1 19 2 -1 +1 20 2 1 +1 21 1 -1 +1 22 1 -1 +1 23 2 -1 +1 24 2 -1 +1 25 2 -1 +1 26 1 1 +1 27 1 -1 +1 28 2 -1 +1 29 1 1 +1 30 1 1 +1 31 1 1 +1 32 1 1 +1 33 2 -1 +1 34 1 1 +1 35 1 1 +1 36 1 -1 +1 37 2 -1 +1 38 1 1 +1 39 2 -1 +1 40 1 1 +1 41 1 1 +1 42 1 1 +1 43 2 -1 +1 44 1 1 +1 45 1 1 +1 46 1 1 +1 47 1 1 +1 48 2 1 +1 49 1 1 +1 50 1 1 +1 51 1 1 +1 52 2 -1 +1 53 1 1 +1 54 1 -1 +1 55 1 -1 +1 56 2 -1 +1 57 1 -1 +1 58 2 1 +1 59 2 1 +1 60 2 -1 +1 61 1 1 +1 62 1 -1 +1 63 2 1 +1 64 1 1 +1 65 1 1 +1 66 2 -1 +1 67 1 -1 +1 68 2 1 +1 69 2 -1 +1 70 2 -1 +1 71 2 -1 +1 72 1 1 +1 73 2 1 +1 74 1 1 +1 75 2 -1 +1 76 1 -1 +1 77 1 -1 +1 78 2 -1 +1 79 1 -1 +1 80 2 -1 +1 81 1 1 +1 82 2 -1 +1 83 2 -1 +1 84 1 -1 +1 85 1 1 +1 86 1 1 +1 87 1 1 +1 88 1 1 +1 89 1 1 +1 90 1 -1 +1 91 2 1 +1 92 2 1 +1 93 1 1 +1 94 2 1 +1 95 2 -1 +1 96 1 -1 +1 97 1 1 +1 98 1 -1 +1 99 1 -1 +1 100 1 -1 +2 1 2 1 +2 2 2 -1 +2 3 2 -1 +2 4 1 1 +2 5 2 1 +2 6 1 1 +2 7 1 1 +2 8 1 -1 +2 9 1 -1 +2 10 2 1 +2 11 1 1 +2 12 2 -1 +2 13 2 -1 +2 14 1 1 +2 15 2 1 +2 16 1 1 +2 17 1 1 +2 18 1 1 +2 19 2 1 +2 20 1 1 +2 21 2 1 +2 22 1 1 +2 23 2 1 +2 24 2 -1 +2 25 1 1 +2 26 2 1 +2 27 1 1 +2 28 2 1 +2 29 2 -1 +2 30 1 1 +2 31 2 1 +2 32 1 1 +2 33 2 1 +2 34 2 1 +2 35 2 1 +2 36 2 -1 +2 37 1 1 +2 38 1 -1 +2 39 2 1 +2 40 1 1 +2 41 2 1 +2 42 2 -1 +2 43 1 1 +2 44 2 -1 +2 45 2 -1 +2 46 1 1 +2 47 1 -1 +2 48 1 1 +2 49 2 -1 +2 50 1 1 +2 51 1 1 +2 52 1 1 +2 53 2 -1 +2 54 1 1 +2 55 1 -1 +2 56 1 -1 +2 57 2 1 +2 58 1 1 +2 59 2 -1 +2 60 1 1 +2 61 1 1 +2 62 1 1 +2 63 1 1 +2 64 1 1 +2 65 1 -1 +2 66 1 1 +2 67 2 -1 +2 68 1 -1 +2 69 2 1 +2 70 1 1 +2 71 2 -1 +2 72 2 1 +2 73 1 1 +2 74 2 -1 +2 75 1 -1 +2 76 2 1 +2 77 1 1 +2 78 1 1 +2 79 1 1 +2 80 1 -1 +2 81 2 -1 +2 82 2 -1 +2 83 1 1 +2 84 2 1 +2 85 1 -1 +2 86 2 1 +2 87 1 1 +2 88 1 1 +2 89 1 -1 +2 90 1 -1 +2 91 1 1 +2 92 1 1 +2 93 2 1 +2 94 2 -1 +2 95 1 -1 +2 96 1 1 +2 97 2 1 +2 98 1 1 +2 99 1 -1 +2 100 2 -1 +3 1 1 1 +3 2 2 1 +3 3 1 1 +3 4 2 -1 +3 5 1 1 +3 6 1 1 +3 7 1 1 +3 8 2 -1 +3 9 1 1 +3 10 1 1 +3 11 1 1 +3 12 1 1 +3 13 2 1 +3 14 2 1 +3 15 1 1 +3 16 2 -1 +3 17 2 -1 +3 18 1 1 +3 19 2 1 +3 20 2 -1 +3 21 2 1 +3 22 2 -1 +3 23 1 1 +3 24 2 -1 +3 25 1 1 +3 26 2 -1 +3 27 1 -1 +3 28 1 1 +3 29 2 1 +3 30 1 -1 +3 31 2 -1 +3 32 1 1 +3 33 1 -1 +3 34 2 1 +3 35 2 1 +3 36 1 1 +3 37 2 1 +3 38 1 1 +3 39 2 1 +3 40 1 -1 +3 41 2 -1 +3 42 2 -1 +3 43 2 -1 +3 44 1 1 +3 45 1 -1 +3 46 1 1 +3 47 1 1 +3 48 1 1 +3 49 1 1 +3 50 1 1 +3 51 2 -1 +3 52 1 1 +3 53 2 -1 +3 54 1 -1 +3 55 1 -1 +3 56 1 1 +3 57 1 -1 +3 58 1 1 +3 59 1 1 +3 60 1 -1 +3 61 1 1 +3 62 2 -1 +3 63 1 1 +3 64 1 1 +3 65 1 1 +3 66 2 -1 +3 67 1 -1 +3 68 1 -1 +3 69 2 -1 +3 70 2 -1 +3 71 2 1 +3 72 2 -1 +3 73 1 1 +3 74 2 1 +3 75 2 -1 +3 76 1 -1 +3 77 1 1 +3 78 1 1 +3 79 2 -1 +3 80 1 1 +3 81 1 -1 +3 82 1 -1 +3 83 1 1 +3 84 1 1 +3 85 2 1 +3 86 1 1 +3 87 1 1 +3 88 1 1 +3 89 1 1 +3 90 2 -1 +3 91 1 -1 +3 92 2 -1 +3 93 2 -1 +3 94 2 -1 +3 95 2 -1 +3 96 1 1 +3 97 1 -1 +3 98 1 -1 +3 99 2 1 +3 100 1 1 +4 1 2 -1 +4 2 2 1 +4 3 2 1 +4 4 2 1 +4 5 1 1 +4 6 2 1 +4 7 1 1 +4 8 1 1 +4 9 1 1 +4 10 2 -1 +4 11 2 -1 +4 12 1 1 +4 13 1 -1 +4 14 2 -1 +4 15 1 1 +4 16 1 1 +4 17 1 -1 +4 18 2 1 +4 19 1 1 +4 20 2 -1 +4 21 2 1 +4 22 1 1 +4 23 1 -1 +4 24 2 -1 +4 25 1 1 +4 26 1 -1 +4 27 1 -1 +4 28 2 -1 +4 29 2 1 +4 30 2 -1 +4 31 2 1 +4 32 2 -1 +4 33 2 -1 +4 34 1 1 +4 35 1 -1 +4 36 2 -1 +4 37 1 -1 +4 38 2 1 +4 39 2 -1 +4 40 2 -1 +4 41 1 1 +4 42 2 1 +4 43 1 -1 +4 44 1 -1 +4 45 2 1 +4 46 1 -1 +4 47 2 1 +4 48 2 1 +4 49 2 -1 +4 50 2 -1 +4 51 1 1 +4 52 1 1 +4 53 1 1 +4 54 2 1 +4 55 1 1 +4 56 1 1 +4 57 1 1 +4 58 1 1 +4 59 2 1 +4 60 1 1 +4 61 2 1 +4 62 1 -1 +4 63 2 -1 +4 64 2 -1 +4 65 2 -1 +4 66 1 1 +4 67 2 -1 +4 68 1 -1 +4 69 1 -1 +4 70 1 1 +4 71 2 1 +4 72 2 -1 +4 73 2 1 +4 74 1 -1 +4 75 2 -1 +4 76 1 1 +4 77 1 1 +4 78 1 -1 +4 79 2 -1 +4 80 1 1 +4 81 2 -1 +4 82 1 1 +4 83 1 -1 +4 84 1 -1 +4 85 2 1 +4 86 1 1 +4 87 1 1 +4 88 2 1 +4 89 2 -1 +4 90 2 -1 +4 91 1 1 +4 92 1 1 +4 93 2 1 +4 94 1 1 +4 95 2 1 +4 96 2 -1 +4 97 2 1 +4 98 1 1 +4 99 2 -1 +4 100 2 1 +5 1 2 -1 +5 2 2 1 +5 3 1 -1 +5 4 2 1 +5 5 2 -1 +5 6 1 1 +5 7 1 -1 +5 8 1 -1 +5 9 2 1 +5 10 1 -1 +5 11 1 -1 +5 12 2 -1 +5 13 1 1 +5 14 1 -1 +5 15 1 1 +5 16 2 -1 +5 17 1 -1 +5 18 1 -1 +5 19 1 1 +5 20 1 1 +5 21 1 -1 +5 22 1 1 +5 23 2 -1 +5 24 2 1 +5 25 1 1 +5 26 1 1 +5 27 2 -1 +5 28 1 1 +5 29 1 1 +5 30 2 -1 +5 31 1 -1 +5 32 2 1 +5 33 1 -1 +5 34 2 -1 +5 35 2 -1 +5 36 1 1 +5 37 1 -1 +5 38 2 1 +5 39 1 1 +5 40 2 -1 +5 41 1 1 +5 42 1 1 +5 43 1 1 +5 44 1 -1 +5 45 1 1 +5 46 2 -1 +5 47 1 1 +5 48 2 1 +5 49 1 1 +5 50 1 1 +5 51 1 1 +5 52 2 -1 +5 53 1 1 +5 54 2 -1 +5 55 1 1 +5 56 1 -1 +5 57 1 1 +5 58 1 -1 +5 59 2 1 +5 60 2 1 +5 61 2 -1 +5 62 1 -1 +5 63 2 1 +5 64 1 1 +5 65 2 1 +5 66 2 1 +5 67 1 1 +5 68 1 -1 +5 69 2 -1 +5 70 1 -1 +5 71 2 1 +5 72 1 1 +5 73 2 -1 +5 74 2 -1 +5 75 2 -1 +5 76 2 -1 +5 77 1 -1 +5 78 1 1 +5 79 1 1 +5 80 2 -1 +5 81 1 1 +5 82 2 -1 +5 83 2 1 +5 84 2 1 +5 85 1 1 +5 86 1 1 +5 87 2 1 +5 88 1 1 +5 89 1 1 +5 90 2 -1 +5 91 1 1 +5 92 2 -1 +5 93 1 -1 +5 94 1 1 +5 95 1 1 +5 96 1 1 +5 97 1 -1 +5 98 1 1 +5 99 1 1 +5 100 1 -1 +6 1 1 -1 +6 2 2 -1 +6 3 2 -1 +6 4 1 -1 +6 5 1 1 +6 6 1 1 +6 7 2 1 +6 8 1 -1 +6 9 2 -1 +6 10 2 -1 +6 11 2 1 +6 12 1 1 +6 13 2 -1 +6 14 2 -1 +6 15 2 1 +6 16 1 1 +6 17 1 -1 +6 18 2 -1 +6 19 2 1 +6 20 1 -1 +6 21 2 -1 +6 22 1 -1 +6 23 2 -1 +6 24 1 -1 +6 25 1 1 +6 26 1 1 +6 27 1 1 +6 28 1 1 +6 29 2 -1 +6 30 1 -1 +6 31 1 1 +6 32 1 1 +6 33 1 -1 +6 34 1 -1 +6 35 1 -1 +6 36 1 1 +6 37 1 -1 +6 38 2 1 +6 39 2 1 +6 40 1 1 +6 41 2 1 +6 42 1 -1 +6 43 2 -1 +6 44 1 1 +6 45 1 1 +6 46 2 -1 +6 47 1 1 +6 48 1 1 +6 49 1 1 +6 50 2 -1 +6 51 1 1 +6 52 1 1 +6 53 1 1 +6 54 1 -1 +6 55 1 1 +6 56 1 -1 +6 57 2 1 +6 58 2 -1 +6 59 1 1 +6 60 1 1 +6 61 1 1 +6 62 2 -1 +6 63 1 1 +6 64 1 -1 +6 65 1 1 +6 66 1 -1 +6 67 1 -1 +6 68 2 1 +6 69 2 -1 +6 70 1 -1 +6 71 1 1 +6 72 1 1 +6 73 2 -1 +6 74 1 -1 +6 75 1 -1 +6 76 2 -1 +6 77 2 -1 +6 78 1 1 +6 79 1 1 +6 80 1 1 +6 81 1 -1 +6 82 2 1 +6 83 1 1 +6 84 1 -1 +6 85 1 -1 +6 86 2 -1 +6 87 2 -1 +6 88 1 1 +6 89 1 1 +6 90 2 -1 +6 91 2 1 +6 92 1 1 +6 93 2 -1 +6 94 1 1 +6 95 2 1 +6 96 1 1 +6 97 1 -1 +6 98 1 -1 +6 99 1 -1 +6 100 1 1 +7 1 2 -1 +7 2 1 -1 +7 3 1 1 +7 4 2 -1 +7 5 2 -1 +7 6 1 1 +7 7 1 1 +7 8 1 -1 +7 9 1 1 +7 10 1 1 +7 11 1 -1 +7 12 1 -1 +7 13 1 1 +7 14 1 -1 +7 15 1 -1 +7 16 1 1 +7 17 1 1 +7 18 2 -1 +7 19 1 -1 +7 20 1 -1 +7 21 1 1 +7 22 2 1 +7 23 2 -1 +7 24 1 1 +7 25 1 1 +7 26 1 1 +7 27 1 -1 +7 28 2 -1 +7 29 1 1 +7 30 1 1 +7 31 2 -1 +7 32 1 1 +7 33 1 -1 +7 34 1 1 +7 35 1 1 +7 36 1 -1 +7 37 2 -1 +7 38 1 1 +7 39 1 -1 +7 40 2 -1 +7 41 1 1 +7 42 1 1 +7 43 1 1 +7 44 1 1 +7 45 1 1 +7 46 1 1 +7 47 1 -1 +7 48 1 -1 +7 49 2 1 +7 50 1 1 +7 51 2 1 +7 52 2 1 +7 53 2 -1 +7 54 2 1 +7 55 2 1 +7 56 1 1 +7 57 1 1 +7 58 1 -1 +7 59 2 -1 +7 60 1 -1 +7 61 2 -1 +7 62 1 1 +7 63 1 1 +7 64 1 1 +7 65 1 1 +7 66 1 -1 +7 67 1 1 +7 68 1 1 +7 69 1 1 +7 70 1 1 +7 71 1 1 +7 72 2 1 +7 73 1 1 +7 74 1 1 +7 75 1 1 +7 76 2 -1 +7 77 1 1 +7 78 1 -1 +7 79 2 -1 +7 80 1 1 +7 81 1 1 +7 82 2 -1 +7 83 1 -1 +7 84 1 1 +7 85 2 -1 +7 86 1 1 +7 87 1 1 +7 88 1 -1 +7 89 1 -1 +7 90 2 -1 +7 91 1 1 +7 92 1 1 +7 93 1 -1 +7 94 1 1 +7 95 1 -1 +7 96 1 1 +7 97 1 1 +7 98 2 1 +7 99 2 -1 +7 100 1 1 +8 1 2 1 +8 2 2 -1 +8 3 2 -1 +8 4 1 1 +8 5 2 1 +8 6 1 -1 +8 7 2 -1 +8 8 2 1 +8 9 1 1 +8 10 2 1 +8 11 1 1 +8 12 1 1 +8 13 2 -1 +8 14 1 1 +8 15 1 -1 +8 16 2 1 +8 17 2 -1 +8 18 2 -1 +8 19 2 1 +8 20 1 1 +8 21 2 1 +8 22 2 -1 +8 23 1 -1 +8 24 1 -1 +8 25 2 1 +8 26 2 -1 +8 27 2 1 +8 28 2 1 +8 29 1 1 +8 30 2 -1 +8 31 1 1 +8 32 1 -1 +8 33 1 1 +8 34 1 1 +8 35 2 -1 +8 36 2 -1 +8 37 1 -1 +8 38 2 -1 +8 39 1 1 +8 40 1 1 +8 41 1 1 +8 42 1 -1 +8 43 1 1 +8 44 1 1 +8 45 1 1 +8 46 1 -1 +8 47 2 -1 +8 48 2 -1 +8 49 1 1 +8 50 2 -1 +8 51 1 -1 +8 52 2 -1 +8 53 2 -1 +8 54 2 -1 +8 55 1 1 +8 56 2 1 +8 57 1 1 +8 58 1 -1 +8 59 1 -1 +8 60 2 1 +8 61 2 -1 +8 62 2 1 +8 63 2 -1 +8 64 1 -1 +8 65 2 -1 +8 66 1 1 +8 67 1 -1 +8 68 1 -1 +8 69 1 1 +8 70 2 -1 +8 71 2 -1 +8 72 2 1 +8 73 1 1 +8 74 1 -1 +8 75 1 -1 +8 76 1 1 +8 77 1 1 +8 78 1 -1 +8 79 2 -1 +8 80 2 1 +8 81 2 -1 +8 82 1 1 +8 83 1 1 +8 84 1 1 +8 85 1 1 +8 86 1 1 +8 87 1 1 +8 88 1 1 +8 89 2 -1 +8 90 1 -1 +8 91 2 1 +8 92 2 -1 +8 93 1 1 +8 94 1 1 +8 95 2 -1 +8 96 1 1 +8 97 1 1 +8 98 1 1 +8 99 1 1 +8 100 1 1 +9 1 1 1 +9 2 1 1 +9 3 1 1 +9 4 1 1 +9 5 1 -1 +9 6 1 1 +9 7 1 1 +9 8 1 1 +9 9 1 1 +9 10 2 -1 +9 11 1 1 +9 12 2 -1 +9 13 2 -1 +9 14 1 -1 +9 15 1 1 +9 16 1 -1 +9 17 1 1 +9 18 1 1 +9 19 1 1 +9 20 1 1 +9 21 1 -1 +9 22 1 1 +9 23 2 -1 +9 24 2 1 +9 25 1 1 +9 26 1 -1 +9 27 2 -1 +9 28 1 -1 +9 29 1 -1 +9 30 2 -1 +9 31 1 -1 +9 32 1 1 +9 33 1 1 +9 34 1 -1 +9 35 1 -1 +9 36 2 -1 +9 37 2 1 +9 38 1 1 +9 39 1 1 +9 40 2 -1 +9 41 1 -1 +9 42 1 1 +9 43 1 1 +9 44 2 1 +9 45 1 1 +9 46 2 -1 +9 47 1 1 +9 48 1 1 +9 49 1 1 +9 50 2 -1 +9 51 1 -1 +9 52 1 -1 +9 53 1 1 +9 54 2 -1 +9 55 1 -1 +9 56 2 -1 +9 57 1 1 +9 58 1 -1 +9 59 1 1 +9 60 2 -1 +9 61 1 -1 +9 62 1 -1 +9 63 1 1 +9 64 1 -1 +9 65 1 1 +9 66 1 -1 +9 67 1 -1 +9 68 2 -1 +9 69 2 -1 +9 70 2 -1 +9 71 2 1 +9 72 2 1 +9 73 1 1 +9 74 1 1 +9 75 1 1 +9 76 2 1 +9 77 2 -1 +9 78 1 -1 +9 79 1 1 +9 80 1 1 +9 81 1 1 +9 82 2 1 +9 83 2 1 +9 84 1 1 +9 85 2 -1 +9 86 2 1 +9 87 2 -1 +9 88 2 -1 +9 89 1 -1 +9 90 1 -1 +9 91 2 1 +9 92 2 -1 +9 93 2 -1 +9 94 2 -1 +9 95 1 1 +9 96 1 1 +9 97 1 1 +9 98 2 -1 +9 99 1 -1 +9 100 2 1 +10 1 1 -1 +10 2 2 -1 +10 3 2 -1 +10 4 2 -1 +10 5 1 1 +10 6 1 1 +10 7 1 1 +10 8 2 -1 +10 9 1 1 +10 10 1 1 +10 11 2 -1 +10 12 1 -1 +10 13 2 -1 +10 14 2 1 +10 15 2 -1 +10 16 1 -1 +10 17 1 -1 +10 18 2 -1 +10 19 1 1 +10 20 1 1 +10 21 2 1 +10 22 2 -1 +10 23 1 -1 +10 24 1 -1 +10 25 2 -1 +10 26 1 1 +10 27 2 -1 +10 28 1 1 +10 29 2 1 +10 30 1 1 +10 31 1 1 +10 32 2 -1 +10 33 1 1 +10 34 1 1 +10 35 1 -1 +10 36 1 1 +10 37 1 1 +10 38 2 -1 +10 39 1 -1 +10 40 1 1 +10 41 1 -1 +10 42 1 -1 +10 43 1 1 +10 44 1 1 +10 45 1 1 +10 46 1 -1 +10 47 1 1 +10 48 1 -1 +10 49 2 -1 +10 50 1 1 +10 51 1 1 +10 52 1 -1 +10 53 1 1 +10 54 2 -1 +10 55 1 -1 +10 56 1 1 +10 57 1 -1 +10 58 2 -1 +10 59 2 -1 +10 60 1 1 +10 61 1 -1 +10 62 1 1 +10 63 1 1 +10 64 1 1 +10 65 1 -1 +10 66 2 -1 +10 67 2 -1 +10 68 1 1 +10 69 1 1 +10 70 1 1 +10 71 1 1 +10 72 1 1 +10 73 1 -1 +10 74 1 1 +10 75 1 -1 +10 76 1 -1 +10 77 1 1 +10 78 1 1 +10 79 2 -1 +10 80 1 1 +10 81 1 -1 +10 82 2 -1 +10 83 1 1 +10 84 1 1 +10 85 1 1 +10 86 1 -1 +10 87 1 1 +10 88 1 -1 +10 89 1 1 +10 90 2 1 +10 91 1 1 +10 92 2 -1 +10 93 1 -1 +10 94 1 1 +10 95 1 1 +10 96 2 -1 +10 97 1 -1 +10 98 2 -1 +10 99 2 -1 +10 100 2 1 +11 1 2 -1 +11 2 1 1 +11 3 2 1 +11 4 1 -1 +11 5 2 -1 +11 6 2 1 +11 7 2 -1 +11 8 1 1 +11 9 1 -1 +11 10 2 -1 +11 11 2 -1 +11 12 1 1 +11 13 1 1 +11 14 2 -1 +11 15 2 -1 +11 16 2 -1 +11 17 1 -1 +11 18 1 1 +11 19 1 1 +11 20 1 -1 +11 21 2 -1 +11 22 1 1 +11 23 1 -1 +11 24 2 -1 +11 25 1 -1 +11 26 1 1 +11 27 2 -1 +11 28 1 1 +11 29 1 1 +11 30 1 1 +11 31 1 -1 +11 32 1 1 +11 33 2 -1 +11 34 2 -1 +11 35 1 1 +11 36 1 -1 +11 37 1 1 +11 38 1 -1 +11 39 2 -1 +11 40 2 -1 +11 41 1 1 +11 42 2 -1 +11 43 1 -1 +11 44 1 -1 +11 45 1 1 +11 46 1 1 +11 47 1 -1 +11 48 1 1 +11 49 1 1 +11 50 1 1 +11 51 1 -1 +11 52 1 -1 +11 53 1 1 +11 54 1 -1 +11 55 1 1 +11 56 1 -1 +11 57 1 -1 +11 58 1 1 +11 59 2 1 +11 60 1 -1 +11 61 1 1 +11 62 1 -1 +11 63 1 1 +11 64 1 1 +11 65 1 -1 +11 66 1 1 +11 67 1 -1 +11 68 1 1 +11 69 1 1 +11 70 2 -1 +11 71 1 -1 +11 72 2 -1 +11 73 1 1 +11 74 1 1 +11 75 1 1 +11 76 2 -1 +11 77 1 -1 +11 78 2 -1 +11 79 1 -1 +11 80 1 -1 +11 81 1 -1 +11 82 2 -1 +11 83 2 -1 +11 84 2 -1 +11 85 1 -1 +11 86 1 -1 +11 87 2 -1 +11 88 1 -1 +11 89 1 -1 +11 90 2 -1 +11 91 1 1 +11 92 1 1 +11 93 1 1 +11 94 1 -1 +11 95 1 1 +11 96 1 1 +11 97 1 1 +11 98 1 1 +11 99 1 1 +11 100 1 1 +12 1 2 1 +12 2 2 -1 +12 3 2 -1 +12 4 2 -1 +12 5 1 1 +12 6 2 1 +12 7 2 1 +12 8 2 -1 +12 9 1 1 +12 10 2 -1 +12 11 2 -1 +12 12 2 1 +12 13 2 1 +12 14 1 1 +12 15 2 -1 +12 16 2 1 +12 17 2 -1 +12 18 1 -1 +12 19 1 -1 +12 20 2 1 +12 21 2 1 +12 22 1 1 +12 23 1 1 +12 24 1 1 +12 25 2 -1 +12 26 1 -1 +12 27 2 -1 +12 28 2 -1 +12 29 2 -1 +12 30 1 1 +12 31 1 1 +12 32 1 1 +12 33 1 -1 +12 34 1 1 +12 35 2 -1 +12 36 1 1 +12 37 2 -1 +12 38 1 -1 +12 39 2 -1 +12 40 1 -1 +12 41 1 1 +12 42 1 1 +12 43 1 1 +12 44 1 -1 +12 45 1 1 +12 46 1 -1 +12 47 1 1 +12 48 1 -1 +12 49 1 1 +12 50 1 -1 +12 51 2 -1 +12 52 1 1 +12 53 1 1 +12 54 1 1 +12 55 1 1 +12 56 1 1 +12 57 2 -1 +12 58 1 -1 +12 59 2 -1 +12 60 1 1 +12 61 1 1 +12 62 1 1 +12 63 1 -1 +12 64 1 1 +12 65 1 1 +12 66 1 1 +12 67 1 1 +12 68 1 1 +12 69 1 1 +12 70 2 -1 +12 71 1 1 +12 72 1 1 +12 73 1 1 +12 74 1 -1 +12 75 1 1 +12 76 1 1 +12 77 1 1 +12 78 1 1 +12 79 1 1 +12 80 1 1 +12 81 1 1 +12 82 1 1 +12 83 1 -1 +12 84 2 -1 +12 85 2 -1 +12 86 2 -1 +12 87 2 -1 +12 88 1 1 +12 89 1 -1 +12 90 2 -1 +12 91 2 1 +12 92 2 -1 +12 93 2 1 +12 94 1 -1 +12 95 2 -1 +12 96 1 -1 +12 97 2 -1 +12 98 2 -1 +12 99 1 1 +12 100 2 -1 +13 1 2 -1 +13 2 1 1 +13 3 1 1 +13 4 1 -1 +13 5 2 -1 +13 6 1 1 +13 7 1 -1 +13 8 1 -1 +13 9 1 -1 +13 10 1 1 +13 11 2 -1 +13 12 2 -1 +13 13 1 1 +13 14 2 1 +13 15 2 -1 +13 16 2 -1 +13 17 1 1 +13 18 1 -1 +13 19 2 -1 +13 20 1 1 +13 21 1 1 +13 22 1 -1 +13 23 1 -1 +13 24 2 1 +13 25 1 1 +13 26 1 1 +13 27 1 -1 +13 28 1 1 +13 29 1 -1 +13 30 2 1 +13 31 1 -1 +13 32 2 -1 +13 33 2 -1 +13 34 2 -1 +13 35 2 1 +13 36 1 1 +13 37 1 -1 +13 38 2 -1 +13 39 2 -1 +13 40 1 -1 +13 41 1 1 +13 42 2 -1 +13 43 2 1 +13 44 1 1 +13 45 2 -1 +13 46 2 1 +13 47 1 1 +13 48 1 1 +13 49 2 -1 +13 50 2 -1 +13 51 2 -1 +13 52 1 1 +13 53 2 1 +13 54 1 1 +13 55 1 1 +13 56 1 1 +13 57 1 1 +13 58 1 1 +13 59 1 -1 +13 60 1 -1 +13 61 2 1 +13 62 2 1 +13 63 2 1 +13 64 2 -1 +13 65 2 -1 +13 66 1 1 +13 67 2 -1 +13 68 2 1 +13 69 1 1 +13 70 2 1 +13 71 2 1 +13 72 2 -1 +13 73 2 1 +13 74 1 -1 +13 75 1 -1 +13 76 1 1 +13 77 1 -1 +13 78 1 1 +13 79 1 1 +13 80 2 1 +13 81 2 -1 +13 82 2 1 +13 83 1 1 +13 84 2 -1 +13 85 1 1 +13 86 2 -1 +13 87 1 1 +13 88 1 1 +13 89 1 -1 +13 90 1 1 +13 91 1 -1 +13 92 1 1 +13 93 1 1 +13 94 1 1 +13 95 1 1 +13 96 1 1 +13 97 1 1 +13 98 1 1 +13 99 1 1 +13 100 1 1 +14 1 2 -1 +14 2 1 1 +14 3 1 1 +14 4 1 1 +14 5 1 -1 +14 6 1 1 +14 7 2 -1 +14 8 1 1 +14 9 1 1 +14 10 2 -1 +14 11 1 -1 +14 12 2 -1 +14 13 2 1 +14 14 2 -1 +14 15 2 -1 +14 16 1 1 +14 17 1 1 +14 18 1 -1 +14 19 1 1 +14 20 1 1 +14 21 1 1 +14 22 2 -1 +14 23 2 -1 +14 24 1 1 +14 25 1 1 +14 26 2 -1 +14 27 2 1 +14 28 2 1 +14 29 2 -1 +14 30 2 -1 +14 31 1 1 +14 32 2 -1 +14 33 1 -1 +14 34 1 -1 +14 35 2 1 +14 36 2 1 +14 37 1 -1 +14 38 2 1 +14 39 2 -1 +14 40 2 1 +14 41 1 -1 +14 42 2 -1 +14 43 2 -1 +14 44 2 -1 +14 45 2 -1 +14 46 2 -1 +14 47 2 -1 +14 48 1 -1 +14 49 2 1 +14 50 2 1 +14 51 2 1 +14 52 2 -1 +14 53 1 1 +14 54 2 -1 +14 55 1 1 +14 56 2 -1 +14 57 1 1 +14 58 1 1 +14 59 2 -1 +14 60 2 -1 +14 61 2 1 +14 62 1 -1 +14 63 1 1 +14 64 1 -1 +14 65 1 -1 +14 66 1 1 +14 67 1 1 +14 68 1 1 +14 69 1 1 +14 70 2 -1 +14 71 2 -1 +14 72 2 1 +14 73 2 -1 +14 74 1 1 +14 75 2 -1 +14 76 1 1 +14 77 1 1 +14 78 1 -1 +14 79 2 -1 +14 80 2 -1 +14 81 1 1 +14 82 1 1 +14 83 1 1 +14 84 1 -1 +14 85 1 1 +14 86 2 -1 +14 87 2 1 +14 88 1 1 +14 89 1 1 +14 90 2 -1 +14 91 1 1 +14 92 1 -1 +14 93 1 1 +14 94 1 1 +14 95 1 1 +14 96 2 1 +14 97 1 -1 +14 98 1 1 +14 99 1 1 +14 100 1 1 +15 1 1 -1 +15 2 2 -1 +15 3 1 1 +15 4 1 1 +15 5 1 -1 +15 6 2 1 +15 7 2 1 +15 8 2 -1 +15 9 2 -1 +15 10 1 1 +15 11 1 -1 +15 12 1 1 +15 13 1 1 +15 14 1 -1 +15 15 2 1 +15 16 1 -1 +15 17 2 1 +15 18 1 -1 +15 19 2 1 +15 20 1 1 +15 21 2 1 +15 22 1 1 +15 23 1 1 +15 24 2 -1 +15 25 2 -1 +15 26 2 -1 +15 27 2 1 +15 28 2 1 +15 29 2 1 +15 30 1 1 +15 31 1 1 +15 32 1 1 +15 33 2 -1 +15 34 1 1 +15 35 1 1 +15 36 1 1 +15 37 2 1 +15 38 2 -1 +15 39 1 -1 +15 40 2 -1 +15 41 1 -1 +15 42 1 -1 +15 43 1 1 +15 44 1 1 +15 45 1 -1 +15 46 1 1 +15 47 1 1 +15 48 2 -1 +15 49 1 -1 +15 50 1 1 +15 51 2 1 +15 52 1 -1 +15 53 1 -1 +15 54 1 1 +15 55 1 -1 +15 56 1 1 +15 57 1 -1 +15 58 1 1 +15 59 2 1 +15 60 1 -1 +15 61 2 1 +15 62 2 1 +15 63 1 1 +15 64 2 -1 +15 65 2 -1 +15 66 1 1 +15 67 1 -1 +15 68 1 1 +15 69 1 1 +15 70 1 1 +15 71 1 -1 +15 72 1 1 +15 73 1 -1 +15 74 1 1 +15 75 1 1 +15 76 2 -1 +15 77 1 -1 +15 78 2 -1 +15 79 2 1 +15 80 1 -1 +15 81 2 -1 +15 82 2 1 +15 83 1 -1 +15 84 2 -1 +15 85 1 1 +15 86 1 1 +15 87 1 -1 +15 88 2 -1 +15 89 2 -1 +15 90 1 1 +15 91 1 1 +15 92 1 -1 +15 93 1 1 +15 94 1 1 +15 95 1 -1 +15 96 2 1 +15 97 1 1 +15 98 2 1 +15 99 1 1 +15 100 2 1 +16 1 2 -1 +16 2 2 1 +16 3 2 1 +16 4 1 1 +16 5 2 -1 +16 6 2 -1 +16 7 2 -1 +16 8 1 1 +16 9 2 -1 +16 10 1 1 +16 11 2 1 +16 12 1 -1 +16 13 1 1 +16 14 2 1 +16 15 1 1 +16 16 1 -1 +16 17 2 -1 +16 18 1 -1 +16 19 2 -1 +16 20 1 1 +16 21 1 1 +16 22 1 1 +16 23 2 1 +16 24 1 1 +16 25 1 1 +16 26 2 -1 +16 27 1 1 +16 28 2 -1 +16 29 1 1 +16 30 1 1 +16 31 1 -1 +16 32 1 -1 +16 33 1 -1 +16 34 2 -1 +16 35 1 1 +16 36 1 -1 +16 37 1 -1 +16 38 1 -1 +16 39 1 -1 +16 40 1 -1 +16 41 2 -1 +16 42 1 1 +16 43 2 -1 +16 44 1 1 +16 45 2 -1 +16 46 2 -1 +16 47 1 1 +16 48 1 1 +16 49 2 -1 +16 50 1 1 +16 51 1 1 +16 52 1 1 +16 53 1 1 +16 54 1 -1 +16 55 1 1 +16 56 1 1 +16 57 1 -1 +16 58 2 -1 +16 59 2 1 +16 60 2 -1 +16 61 1 1 +16 62 1 -1 +16 63 1 -1 +16 64 1 1 +16 65 1 1 +16 66 1 1 +16 67 1 1 +16 68 1 1 +16 69 1 -1 +16 70 1 1 +16 71 1 1 +16 72 1 -1 +16 73 1 1 +16 74 2 -1 +16 75 1 1 +16 76 1 -1 +16 77 2 1 +16 78 2 1 +16 79 1 -1 +16 80 2 -1 +16 81 1 1 +16 82 2 -1 +16 83 2 -1 +16 84 2 -1 +16 85 1 1 +16 86 1 1 +16 87 1 1 +16 88 2 1 +16 89 1 -1 +16 90 2 1 +16 91 2 -1 +16 92 2 1 +16 93 1 -1 +16 94 1 -1 +16 95 1 1 +16 96 2 -1 +16 97 1 -1 +16 98 2 -1 +16 99 2 -1 +16 100 1 1 +17 1 1 1 +17 2 1 1 +17 3 1 1 +17 4 1 1 +17 5 1 1 +17 6 1 1 +17 7 1 1 +17 8 1 1 +17 9 2 -1 +17 10 1 1 +17 11 2 1 +17 12 1 1 +17 13 1 1 +17 14 1 -1 +17 15 1 -1 +17 16 2 -1 +17 17 2 -1 +17 18 2 -1 +17 19 2 -1 +17 20 1 1 +17 21 1 1 +17 22 1 -1 +17 23 2 1 +17 24 1 -1 +17 25 1 1 +17 26 1 1 +17 27 1 1 +17 28 1 -1 +17 29 2 -1 +17 30 2 -1 +17 31 1 -1 +17 32 1 1 +17 33 1 -1 +17 34 1 1 +17 35 2 -1 +17 36 1 -1 +17 37 2 1 +17 38 2 -1 +17 39 2 -1 +17 40 1 1 +17 41 1 1 +17 42 1 1 +17 43 1 1 +17 44 1 -1 +17 45 1 1 +17 46 1 -1 +17 47 1 -1 +17 48 1 -1 +17 49 1 1 +17 50 2 1 +17 51 1 1 +17 52 1 1 +17 53 2 -1 +17 54 1 -1 +17 55 2 1 +17 56 2 1 +17 57 2 -1 +17 58 2 -1 +17 59 1 -1 +17 60 2 -1 +17 61 2 -1 +17 62 1 1 +17 63 2 1 +17 64 1 -1 +17 65 2 -1 +17 66 2 -1 +17 67 2 -1 +17 68 2 -1 +17 69 1 1 +17 70 1 1 +17 71 1 1 +17 72 1 1 +17 73 1 -1 +17 74 2 -1 +17 75 1 1 +17 76 1 1 +17 77 2 -1 +17 78 1 1 +17 79 1 -1 +17 80 1 1 +17 81 1 1 +17 82 1 -1 +17 83 1 1 +17 84 1 -1 +17 85 1 1 +17 86 1 1 +17 87 1 1 +17 88 1 1 +17 89 1 -1 +17 90 1 -1 +17 91 1 1 +17 92 1 1 +17 93 1 1 +17 94 2 -1 +17 95 1 -1 +17 96 1 -1 +17 97 1 1 +17 98 1 1 +17 99 1 1 +17 100 2 -1 +18 1 2 -1 +18 2 1 -1 +18 3 2 -1 +18 4 2 1 +18 5 2 -1 +18 6 2 -1 +18 7 1 -1 +18 8 1 1 +18 9 1 1 +18 10 2 -1 +18 11 1 1 +18 12 2 1 +18 13 1 1 +18 14 1 1 +18 15 1 1 +18 16 1 -1 +18 17 2 -1 +18 18 2 1 +18 19 1 -1 +18 20 1 1 +18 21 1 1 +18 22 1 -1 +18 23 2 -1 +18 24 2 -1 +18 25 1 1 +18 26 1 1 +18 27 2 -1 +18 28 1 1 +18 29 1 -1 +18 30 2 1 +18 31 1 1 +18 32 2 -1 +18 33 1 1 +18 34 2 1 +18 35 2 -1 +18 36 1 -1 +18 37 1 1 +18 38 1 1 +18 39 2 1 +18 40 2 -1 +18 41 2 1 +18 42 2 1 +18 43 1 -1 +18 44 2 1 +18 45 2 1 +18 46 2 -1 +18 47 1 1 +18 48 2 -1 +18 49 2 -1 +18 50 2 1 +18 51 1 1 +18 52 1 1 +18 53 2 -1 +18 54 2 1 +18 55 2 -1 +18 56 1 1 +18 57 1 1 +18 58 2 1 +18 59 1 1 +18 60 1 -1 +18 61 2 1 +18 62 2 -1 +18 63 1 -1 +18 64 2 -1 +18 65 1 1 +18 66 2 1 +18 67 1 -1 +18 68 1 -1 +18 69 1 1 +18 70 1 -1 +18 71 2 -1 +18 72 2 -1 +18 73 1 1 +18 74 1 -1 +18 75 2 1 +18 76 1 1 +18 77 2 -1 +18 78 1 1 +18 79 2 -1 +18 80 2 -1 +18 81 1 1 +18 82 1 1 +18 83 1 1 +18 84 2 1 +18 85 2 -1 +18 86 2 1 +18 87 1 1 +18 88 1 1 +18 89 2 -1 +18 90 1 1 +18 91 1 1 +18 92 1 -1 +18 93 1 -1 +18 94 1 -1 +18 95 1 1 +18 96 1 1 +18 97 1 1 +18 98 1 -1 +18 99 2 -1 +18 100 1 -1 +19 1 1 1 +19 2 1 -1 +19 3 2 -1 +19 4 1 1 +19 5 1 1 +19 6 1 -1 +19 7 1 1 +19 8 2 -1 +19 9 1 1 +19 10 1 1 +19 11 1 -1 +19 12 2 1 +19 13 2 1 +19 14 2 -1 +19 15 1 1 +19 16 2 1 +19 17 2 -1 +19 18 2 -1 +19 19 1 1 +19 20 2 -1 +19 21 1 1 +19 22 2 -1 +19 23 1 -1 +19 24 1 1 +19 25 1 1 +19 26 1 -1 +19 27 1 1 +19 28 2 1 +19 29 1 1 +19 30 1 1 +19 31 2 1 +19 32 2 1 +19 33 2 -1 +19 34 1 -1 +19 35 1 1 +19 36 1 1 +19 37 1 1 +19 38 1 -1 +19 39 1 1 +19 40 2 1 +19 41 2 1 +19 42 2 1 +19 43 2 1 +19 44 2 -1 +19 45 1 -1 +19 46 1 1 +19 47 2 -1 +19 48 2 -1 +19 49 1 1 +19 50 1 1 +19 51 1 1 +19 52 2 -1 +19 53 1 1 +19 54 1 1 +19 55 2 -1 +19 56 1 -1 +19 57 1 -1 +19 58 1 1 +19 59 1 1 +19 60 1 1 +19 61 1 1 +19 62 1 -1 +19 63 1 -1 +19 64 1 1 +19 65 1 -1 +19 66 1 1 +19 67 1 1 +19 68 1 1 +19 69 1 1 +19 70 1 -1 +19 71 2 -1 +19 72 1 -1 +19 73 2 1 +19 74 2 -1 +19 75 2 1 +19 76 1 -1 +19 77 1 1 +19 78 2 -1 +19 79 2 -1 +19 80 1 1 +19 81 1 -1 +19 82 1 -1 +19 83 2 1 +19 84 2 -1 +19 85 1 -1 +19 86 1 1 +19 87 2 -1 +19 88 2 1 +19 89 2 1 +19 90 1 1 +19 91 2 -1 +19 92 1 -1 +19 93 2 -1 +19 94 1 1 +19 95 1 -1 +19 96 2 1 +19 97 1 1 +19 98 2 -1 +19 99 1 1 +19 100 2 -1 +20 1 1 1 +20 2 1 -1 +20 3 1 -1 +20 4 2 -1 +20 5 2 -1 +20 6 1 -1 +20 7 1 -1 +20 8 2 -1 +20 9 1 1 +20 10 1 -1 +20 11 1 -1 +20 12 1 -1 +20 13 2 -1 +20 14 2 1 +20 15 2 1 +20 16 2 -1 +20 17 2 1 +20 18 2 -1 +20 19 1 -1 +20 20 2 -1 +20 21 1 1 +20 22 1 1 +20 23 1 1 +20 24 1 1 +20 25 1 1 +20 26 1 1 +20 27 1 1 +20 28 1 1 +20 29 1 1 +20 30 1 -1 +20 31 1 -1 +20 32 2 -1 +20 33 1 -1 +20 34 1 1 +20 35 2 -1 +20 36 2 1 +20 37 1 1 +20 38 1 -1 +20 39 1 -1 +20 40 1 1 +20 41 2 1 +20 42 1 1 +20 43 2 1 +20 44 1 -1 +20 45 2 -1 +20 46 2 -1 +20 47 2 1 +20 48 1 1 +20 49 1 1 +20 50 1 1 +20 51 1 -1 +20 52 1 1 +20 53 1 -1 +20 54 1 1 +20 55 1 1 +20 56 2 1 +20 57 1 1 +20 58 1 -1 +20 59 2 -1 +20 60 2 -1 +20 61 1 1 +20 62 1 1 +20 63 1 -1 +20 64 2 1 +20 65 2 -1 +20 66 2 -1 +20 67 1 -1 +20 68 1 1 +20 69 1 -1 +20 70 2 -1 +20 71 1 1 +20 72 1 -1 +20 73 2 -1 +20 74 1 -1 +20 75 1 1 +20 76 1 -1 +20 77 1 1 +20 78 1 -1 +20 79 1 1 +20 80 1 -1 +20 81 2 -1 +20 82 1 -1 +20 83 2 -1 +20 84 1 1 +20 85 1 1 +20 86 1 -1 +20 87 2 -1 +20 88 2 1 +20 89 1 1 +20 90 1 -1 +20 91 2 -1 +20 92 1 -1 +20 93 2 -1 +20 94 2 1 +20 95 1 -1 +20 96 2 1 +20 97 2 -1 +20 98 1 1 +20 99 1 -1 +20 100 2 1 diff --git a/Python/hbayesdm/common/extdata/bandit4arm2_exampleData.txt b/Python/hbayesdm/common/extdata/bandit4arm2_exampleData.txt new file mode 100644 index 00000000..68ac99e3 --- /dev/null +++ b/Python/hbayesdm/common/extdata/bandit4arm2_exampleData.txt @@ -0,0 +1,3001 @@ +subjID choice outcome +1 4 33 +1 3 84 +1 3 88 +1 2 36 +1 1 67 +1 2 28 +1 1 74 +1 1 76 +1 1 79 +1 1 84 +1 1 82 +1 3 87 +1 3 85 +1 4 48 +1 1 79 +1 3 76 +1 3 73 +1 1 75 +1 1 71 +1 3 61 +1 3 60 +1 1 72 +1 1 66 +1 1 61 +1 4 42 +1 2 41 +1 1 57 +1 4 32 +1 3 67 +1 1 57 +1 1 58 +1 3 71 +1 3 66 +1 2 41 +1 1 70 +1 1 73 +1 1 74 +1 4 45 +1 1 72 +1 1 73 +1 1 70 +1 1 66 +1 1 69 +1 4 42 +1 3 58 +1 1 68 +1 2 53 +1 3 58 +1 4 50 +1 1 73 +1 3 65 +1 2 44 +1 3 64 +1 4 44 +1 2 41 +1 1 61 +1 1 56 +1 4 44 +1 1 51 +1 1 55 +1 4 47 +1 4 48 +1 1 59 +1 3 55 +1 3 44 +1 1 59 +1 2 25 +1 1 59 +1 2 31 +1 3 45 +1 1 58 +1 1 56 +1 1 58 +1 3 51 +1 4 52 +1 1 55 +1 4 56 +1 4 55 +1 1 54 +1 1 50 +1 3 58 +1 1 53 +1 1 51 +1 2 31 +1 3 58 +1 4 55 +1 4 55 +1 3 59 +1 1 50 +1 1 54 +1 1 53 +1 4 52 +1 3 57 +1 4 60 +1 4 60 +1 4 58 +1 4 60 +1 4 55 +1 4 56 +1 4 59 +1 2 36 +1 4 59 +1 3 48 +1 2 41 +1 4 62 +1 4 62 +1 1 44 +1 2 48 +1 4 66 +1 2 53 +1 3 56 +1 2 56 +1 2 61 +1 3 49 +1 2 58 +1 4 68 +1 2 54 +1 3 49 +1 4 70 +1 4 69 +1 4 73 +1 4 77 +1 2 54 +1 4 70 +1 1 49 +1 4 69 +1 1 50 +1 2 63 +1 1 55 +1 2 57 +1 3 53 +1 2 57 +1 4 85 +1 4 85 +1 4 88 +1 4 82 +1 2 62 +1 4 75 +1 3 61 +1 3 61 +1 3 62 +1 4 77 +1 2 66 +1 3 59 +1 4 74 +1 4 79 +1 4 79 +1 1 73 +1 4 76 +1 4 76 +1 4 78 +1 4 77 +1 4 78 +1 2 66 +1 4 80 +1 4 74 +1 1 69 +1 4 69 +1 4 73 +1 3 70 +1 3 67 +1 1 69 +1 1 71 +1 2 79 +1 1 68 +1 2 82 +1 2 84 +1 2 86 +1 2 84 +1 2 82 +1 2 84 +1 2 85 +1 2 82 +1 3 76 +1 1 62 +1 2 85 +1 3 74 +1 3 70 +1 3 69 +1 2 86 +1 3 60 +1 2 89 +1 2 86 +1 2 81 +1 2 80 +1 3 64 +1 3 67 +1 2 86 +1 1 56 +1 2 91 +1 2 91 +1 4 55 +1 2 87 +1 4 52 +1 2 85 +1 3 63 +1 2 92 +1 2 90 +1 2 90 +1 3 56 +1 2 89 +1 3 60 +1 3 60 +1 2 88 +1 2 84 +1 2 86 +1 3 53 +1 2 81 +1 2 82 +1 3 63 +1 2 78 +1 1 57 +1 2 80 +1 4 48 +1 4 44 +1 3 61 +1 3 64 +1 4 40 +1 3 67 +1 4 39 +1 1 60 +1 1 59 +1 3 68 +1 2 73 +1 3 70 +1 2 70 +1 2 70 +1 3 77 +1 2 74 +1 1 62 +1 3 82 +1 4 29 +1 4 29 +1 4 34 +1 3 73 +1 2 71 +1 2 67 +1 2 61 +1 2 60 +1 1 76 +1 1 70 +1 2 60 +1 2 57 +1 1 71 +1 1 69 +1 1 67 +1 3 58 +1 3 55 +1 2 50 +1 1 62 +1 2 52 +1 4 47 +1 2 54 +1 3 50 +1 2 53 +1 1 66 +1 4 51 +1 1 65 +1 3 50 +1 2 50 +1 3 51 +1 2 47 +1 1 60 +1 3 52 +1 4 47 +1 2 32 +1 3 56 +1 4 46 +1 1 63 +1 4 43 +1 2 24 +1 2 27 +1 1 63 +1 1 61 +1 4 40 +1 1 65 +1 1 69 +1 3 70 +1 2 25 +1 1 71 +1 1 73 +1 3 62 +1 3 69 +1 3 65 +1 1 72 +1 3 63 +1 1 75 +1 3 68 +1 2 28 +1 3 61 +1 3 64 +1 3 63 +1 4 36 +1 3 58 +1 2 35 +1 1 64 +1 3 67 +1 3 68 +1 3 66 +1 3 64 +2 1 62 +2 4 34 +2 2 36 +2 3 88 +2 3 91 +2 3 97 +2 3 94 +2 3 94 +2 3 93 +2 3 93 +2 3 88 +2 3 87 +2 3 85 +2 3 85 +2 3 81 +2 3 76 +2 3 73 +2 3 65 +2 3 64 +2 3 61 +2 3 60 +2 1 72 +2 1 66 +2 1 61 +2 1 62 +2 1 61 +2 3 62 +2 2 43 +2 1 56 +2 3 68 +2 3 68 +2 1 62 +2 4 41 +2 3 64 +2 1 70 +2 1 73 +2 1 74 +2 1 74 +2 1 72 +2 3 59 +2 1 70 +2 1 66 +2 1 69 +2 1 69 +2 2 46 +2 1 68 +2 1 70 +2 1 70 +2 1 72 +2 1 73 +2 1 70 +2 3 61 +2 3 64 +2 3 64 +2 1 66 +2 1 61 +2 1 56 +2 1 53 +2 1 51 +2 3 58 +2 4 47 +2 3 60 +2 3 57 +2 3 55 +2 3 44 +2 1 59 +2 3 44 +2 1 59 +2 4 50 +2 1 59 +2 1 58 +2 1 56 +2 2 20 +2 1 59 +2 4 52 +2 4 55 +2 1 53 +2 3 61 +2 3 58 +2 3 57 +2 3 58 +2 3 56 +2 4 57 +2 3 58 +2 4 57 +2 3 58 +2 3 59 +2 1 51 +2 4 61 +2 3 54 +2 3 50 +2 1 47 +2 4 57 +2 3 62 +2 1 49 +2 3 61 +2 1 46 +2 4 55 +2 1 42 +2 3 55 +2 3 53 +2 2 39 +2 1 43 +2 4 57 +2 3 50 +2 4 62 +2 1 44 +2 4 63 +2 4 66 +2 4 71 +2 4 71 +2 4 67 +2 4 66 +2 4 69 +2 4 72 +2 4 68 +2 4 70 +2 4 71 +2 4 70 +2 4 69 +2 4 73 +2 4 77 +2 4 72 +2 4 70 +2 4 68 +2 4 69 +2 4 72 +2 4 75 +2 2 58 +2 4 80 +2 4 81 +2 4 84 +2 4 85 +2 4 85 +2 4 88 +2 4 82 +2 4 80 +2 4 75 +2 4 73 +2 4 75 +2 4 76 +2 4 77 +2 4 77 +2 4 77 +2 4 74 +2 4 79 +2 4 79 +2 4 79 +2 4 76 +2 4 76 +2 4 78 +2 4 77 +2 4 78 +2 4 76 +2 4 80 +2 4 74 +2 4 72 +2 4 69 +2 4 73 +2 2 76 +2 2 76 +2 4 68 +2 2 78 +2 2 79 +2 2 82 +2 2 82 +2 2 84 +2 2 86 +2 2 84 +2 2 82 +2 2 84 +2 2 85 +2 2 82 +2 2 84 +2 2 84 +2 2 85 +2 2 88 +2 2 90 +2 2 87 +2 2 86 +2 2 86 +2 2 89 +2 2 86 +2 2 81 +2 2 80 +2 2 77 +2 2 81 +2 2 86 +2 2 85 +2 2 91 +2 2 91 +2 2 87 +2 2 87 +2 2 83 +2 4 51 +2 2 91 +2 2 92 +2 2 90 +2 2 90 +2 2 91 +2 2 89 +2 2 88 +2 2 90 +2 2 88 +2 2 84 +2 2 86 +2 2 81 +2 2 81 +2 2 82 +2 2 81 +2 2 78 +2 2 78 +2 2 80 +2 2 80 +2 2 80 +2 2 81 +2 2 78 +2 2 80 +2 2 78 +2 2 76 +2 2 78 +2 2 74 +2 2 73 +2 2 73 +2 2 73 +2 2 70 +2 2 70 +2 2 74 +2 2 74 +2 2 76 +2 2 74 +2 2 75 +2 2 73 +2 2 73 +2 2 69 +2 2 71 +2 4 32 +2 2 61 +2 2 60 +2 1 76 +2 1 70 +2 1 72 +2 1 69 +2 1 71 +2 2 52 +2 1 67 +2 1 63 +2 2 49 +2 1 63 +2 1 62 +2 2 52 +2 1 56 +2 3 52 +2 1 60 +2 1 62 +2 1 66 +2 1 67 +2 1 65 +2 1 63 +2 1 60 +2 1 61 +2 1 59 +2 2 45 +2 1 62 +2 1 64 +2 1 64 +2 1 63 +2 1 63 +2 1 63 +2 1 63 +2 1 63 +2 1 64 +2 1 63 +2 3 68 +2 3 68 +2 3 69 +2 3 69 +2 3 70 +2 3 67 +2 3 64 +2 3 60 +2 3 62 +2 1 73 +2 1 73 +2 1 72 +2 1 73 +2 1 75 +2 1 72 +2 1 72 +2 1 67 +2 1 64 +2 1 65 +2 1 65 +2 3 58 +2 1 63 +2 3 57 +2 4 27 +2 1 65 +2 1 62 +2 3 64 +3 3 85 +3 1 60 +3 3 88 +3 2 36 +3 3 91 +3 3 97 +3 3 94 +3 3 94 +3 3 93 +3 3 93 +3 4 37 +3 3 87 +3 4 46 +3 1 82 +3 3 81 +3 3 76 +3 3 73 +3 1 75 +3 3 64 +3 1 71 +3 1 70 +3 1 72 +3 1 66 +3 3 67 +3 1 62 +3 3 68 +3 1 57 +3 3 61 +3 3 67 +3 3 68 +3 4 37 +3 3 71 +3 2 37 +3 1 65 +3 3 68 +3 4 41 +3 1 74 +3 1 74 +3 3 64 +3 4 48 +3 4 43 +3 3 56 +3 1 69 +3 4 42 +3 4 44 +3 3 59 +3 1 70 +3 1 70 +3 1 72 +3 3 64 +3 3 65 +3 1 67 +3 2 45 +3 1 65 +3 1 66 +3 2 38 +3 3 67 +3 3 65 +3 2 29 +3 4 48 +3 3 62 +3 1 57 +3 1 59 +3 1 57 +3 3 44 +3 4 55 +3 4 51 +3 1 59 +3 4 50 +3 1 59 +3 1 58 +3 2 22 +3 4 50 +3 4 53 +3 3 51 +3 4 55 +3 4 56 +3 3 61 +3 3 58 +3 1 50 +3 4 52 +3 4 53 +3 3 55 +3 3 58 +3 3 58 +3 4 55 +3 3 59 +3 1 51 +3 1 50 +3 4 61 +3 4 56 +3 4 52 +3 4 57 +3 2 31 +3 1 49 +3 3 61 +3 3 57 +3 4 55 +3 3 58 +3 2 32 +3 3 53 +3 3 51 +3 4 60 +3 4 57 +3 4 62 +3 1 47 +3 3 47 +3 2 48 +3 3 53 +3 1 38 +3 1 41 +3 4 67 +3 4 66 +3 2 58 +3 2 58 +3 2 55 +3 3 49 +3 2 53 +3 2 54 +3 3 51 +3 2 52 +3 3 48 +3 3 45 +3 4 70 +3 2 55 +3 2 59 +3 4 72 +3 4 75 +3 1 55 +3 4 80 +3 2 61 +3 1 64 +3 4 85 +3 4 85 +3 2 60 +3 4 82 +3 4 80 +3 4 75 +3 4 73 +3 4 75 +3 4 76 +3 4 77 +3 4 77 +3 4 77 +3 4 74 +3 4 79 +3 4 79 +3 1 73 +3 1 72 +3 4 76 +3 4 78 +3 1 66 +3 3 68 +3 4 76 +3 3 68 +3 1 65 +3 3 68 +3 4 69 +3 2 78 +3 1 72 +3 4 69 +3 1 69 +3 1 71 +3 2 79 +3 2 82 +3 1 68 +3 1 69 +3 2 86 +3 2 84 +3 1 62 +3 3 64 +3 2 85 +3 2 82 +3 2 84 +3 2 84 +3 2 85 +3 2 88 +3 2 90 +3 1 60 +3 2 86 +3 1 53 +3 2 89 +3 2 86 +3 1 62 +3 2 80 +3 2 77 +3 2 81 +3 3 67 +3 2 85 +3 2 91 +3 2 91 +3 1 52 +3 2 87 +3 2 83 +3 2 85 +3 2 91 +3 2 92 +3 2 90 +3 2 90 +3 1 47 +3 2 89 +3 2 88 +3 2 90 +3 2 88 +3 2 84 +3 2 86 +3 2 81 +3 2 81 +3 3 59 +3 4 49 +3 2 78 +3 2 78 +3 2 80 +3 2 80 +3 2 80 +3 2 81 +3 2 78 +3 2 80 +3 2 78 +3 1 60 +3 4 35 +3 3 68 +3 2 73 +3 1 64 +3 3 70 +3 3 72 +3 3 75 +3 2 74 +3 2 74 +3 2 76 +3 2 74 +3 3 79 +3 2 73 +3 1 69 +3 3 73 +3 3 69 +3 3 67 +3 2 61 +3 3 66 +3 3 63 +3 3 62 +3 2 60 +3 2 57 +3 3 66 +3 3 63 +3 2 52 +3 2 51 +3 2 49 +3 4 45 +3 2 46 +3 1 61 +3 2 49 +3 3 52 +3 4 52 +3 1 62 +3 3 47 +3 3 49 +3 3 51 +3 4 48 +3 1 60 +3 4 50 +3 4 53 +3 4 52 +3 1 62 +3 1 64 +3 3 52 +3 2 26 +3 4 46 +3 2 31 +3 4 43 +3 3 67 +3 1 64 +3 3 65 +3 3 68 +3 1 62 +3 1 65 +3 3 69 +3 3 70 +3 4 46 +3 3 64 +3 3 60 +3 4 45 +3 4 45 +3 1 73 +3 3 65 +3 2 25 +3 1 75 +3 3 68 +3 4 40 +3 4 37 +3 1 64 +3 1 65 +3 3 63 +3 1 66 +3 3 57 +3 1 64 +3 3 67 +3 1 65 +3 1 62 +3 1 56 +4 2 38 +4 3 84 +4 3 88 +4 3 88 +4 3 91 +4 3 97 +4 3 94 +4 3 94 +4 3 93 +4 3 93 +4 3 88 +4 3 87 +4 3 85 +4 3 85 +4 3 81 +4 3 76 +4 3 73 +4 3 65 +4 3 64 +4 1 71 +4 1 70 +4 3 61 +4 3 68 +4 2 31 +4 3 69 +4 3 68 +4 1 57 +4 3 61 +4 3 67 +4 4 33 +4 3 68 +4 1 62 +4 1 63 +4 1 65 +4 1 70 +4 1 73 +4 1 74 +4 1 74 +4 1 72 +4 4 48 +4 4 43 +4 1 66 +4 2 48 +4 1 69 +4 3 58 +4 3 59 +4 3 59 +4 3 58 +4 3 62 +4 1 73 +4 1 70 +4 1 67 +4 1 67 +4 2 43 +4 3 65 +4 1 61 +4 1 56 +4 1 53 +4 1 51 +4 4 48 +4 3 62 +4 4 48 +4 1 59 +4 2 31 +4 3 44 +4 3 44 +4 1 59 +4 3 47 +4 2 31 +4 1 59 +4 4 46 +4 1 56 +4 1 58 +4 3 51 +4 2 28 +4 4 55 +4 3 58 +4 1 52 +4 3 58 +4 2 35 +4 4 52 +4 3 56 +4 1 51 +4 3 58 +4 3 58 +4 4 55 +4 3 59 +4 3 59 +4 4 61 +4 3 54 +4 4 56 +4 4 52 +4 2 33 +4 4 60 +4 1 49 +4 3 61 +4 3 57 +4 3 57 +4 3 58 +4 4 59 +4 3 53 +4 3 51 +4 4 60 +4 4 57 +4 4 62 +4 1 47 +4 3 47 +4 4 63 +4 4 66 +4 1 38 +4 2 56 +4 4 67 +4 4 66 +4 4 69 +4 4 72 +4 4 68 +4 4 70 +4 4 71 +4 4 70 +4 1 51 +4 4 73 +4 4 77 +4 4 72 +4 4 70 +4 4 68 +4 4 69 +4 4 72 +4 3 48 +4 4 79 +4 1 57 +4 4 81 +4 4 84 +4 4 85 +4 2 58 +4 3 53 +4 4 82 +4 4 80 +4 4 75 +4 2 64 +4 4 75 +4 4 76 +4 1 73 +4 3 59 +4 4 77 +4 3 61 +4 3 65 +4 4 79 +4 2 66 +4 1 72 +4 2 68 +4 1 69 +4 4 77 +4 4 78 +4 3 68 +4 4 80 +4 2 68 +4 3 68 +4 2 75 +4 3 71 +4 2 76 +4 3 67 +4 2 72 +4 4 69 +4 2 79 +4 4 66 +4 2 82 +4 2 84 +4 2 86 +4 4 70 +4 2 82 +4 1 66 +4 4 78 +4 2 82 +4 3 76 +4 2 84 +4 4 72 +4 2 88 +4 2 90 +4 3 69 +4 4 76 +4 1 53 +4 4 73 +4 4 69 +4 3 64 +4 2 80 +4 3 64 +4 2 81 +4 2 86 +4 2 85 +4 2 91 +4 2 91 +4 2 87 +4 2 87 +4 2 83 +4 2 85 +4 2 91 +4 2 92 +4 4 49 +4 2 90 +4 1 47 +4 3 59 +4 2 88 +4 3 60 +4 2 88 +4 3 57 +4 2 86 +4 2 81 +4 2 81 +4 2 82 +4 2 81 +4 2 78 +4 2 78 +4 2 80 +4 2 80 +4 2 80 +4 2 81 +4 2 78 +4 2 80 +4 1 57 +4 2 76 +4 2 78 +4 2 74 +4 2 73 +4 2 73 +4 2 73 +4 2 70 +4 3 75 +4 3 77 +4 3 76 +4 3 79 +4 2 74 +4 2 75 +4 3 75 +4 3 76 +4 2 69 +4 3 69 +4 2 67 +4 4 32 +4 2 60 +4 3 63 +4 1 70 +4 1 72 +4 2 57 +4 4 36 +4 3 63 +4 1 67 +4 2 51 +4 2 49 +4 1 63 +4 4 46 +4 3 51 +4 4 47 +4 3 52 +4 3 50 +4 1 62 +4 2 55 +4 2 55 +4 3 51 +4 1 63 +4 1 60 +4 1 61 +4 4 53 +4 1 60 +4 2 41 +4 4 47 +4 1 64 +4 1 63 +4 1 63 +4 3 58 +4 1 63 +4 3 67 +4 1 64 +4 1 63 +4 3 68 +4 3 68 +4 4 44 +4 3 69 +4 3 70 +4 3 67 +4 4 45 +4 3 60 +4 1 73 +4 1 73 +4 1 73 +4 4 39 +4 3 63 +4 4 37 +4 2 23 +4 3 67 +4 3 61 +4 4 36 +4 1 65 +4 3 63 +4 3 58 +4 1 63 +4 3 57 +4 3 67 +4 1 65 +4 3 66 +4 1 56 +5 3 85 +5 2 40 +5 3 88 +5 4 40 +5 1 67 +5 3 97 +5 3 94 +5 3 94 +5 3 93 +5 1 84 +5 3 88 +5 1 81 +5 3 85 +5 3 85 +5 3 81 +5 3 76 +5 3 73 +5 1 75 +5 3 64 +5 3 61 +5 3 60 +5 1 72 +5 1 66 +5 1 61 +5 1 62 +5 4 42 +5 3 62 +5 3 61 +5 1 56 +5 2 40 +5 4 37 +5 3 71 +5 1 63 +5 3 64 +5 1 70 +5 1 73 +5 1 74 +5 1 74 +5 3 64 +5 1 73 +5 1 70 +5 1 66 +5 1 69 +5 3 57 +5 1 71 +5 3 59 +5 2 53 +5 1 70 +5 1 72 +5 1 73 +5 4 46 +5 1 67 +5 3 64 +5 2 43 +5 1 66 +5 2 38 +5 3 67 +5 3 65 +5 3 59 +5 1 55 +5 2 29 +5 3 60 +5 4 51 +5 4 53 +5 3 44 +5 4 55 +5 1 59 +5 1 59 +5 4 50 +5 4 48 +5 1 58 +5 1 56 +5 3 46 +5 1 59 +5 1 61 +5 1 55 +5 3 58 +5 2 30 +5 4 57 +5 1 50 +5 1 48 +5 4 53 +5 3 55 +5 1 57 +5 1 51 +5 3 58 +5 3 59 +5 1 51 +5 4 61 +5 4 61 +5 1 53 +5 3 53 +5 4 57 +5 2 31 +5 4 60 +5 4 58 +5 3 57 +5 1 44 +5 1 42 +5 2 32 +5 4 59 +5 3 51 +5 4 60 +5 3 47 +5 4 62 +5 4 62 +5 3 47 +5 2 48 +5 4 66 +5 4 71 +5 4 71 +5 4 67 +5 3 48 +5 4 69 +5 4 72 +5 4 68 +5 2 54 +5 2 53 +5 4 70 +5 2 53 +5 1 51 +5 3 48 +5 2 54 +5 4 70 +5 4 68 +5 4 69 +5 2 60 +5 4 75 +5 4 79 +5 4 80 +5 2 61 +5 2 57 +5 4 85 +5 4 85 +5 2 60 +5 2 62 +5 4 80 +5 3 60 +5 3 61 +5 2 60 +5 4 76 +5 2 64 +5 4 77 +5 4 77 +5 4 74 +5 2 66 +5 4 79 +5 4 79 +5 4 76 +5 2 68 +5 4 78 +5 4 77 +5 2 69 +5 4 76 +5 4 80 +5 2 68 +5 2 72 +5 2 75 +5 4 73 +5 4 74 +5 4 69 +5 2 72 +5 4 69 +5 1 72 +5 1 68 +5 4 62 +5 3 71 +5 1 67 +5 4 70 +5 2 82 +5 2 84 +5 2 85 +5 2 82 +5 4 76 +5 3 73 +5 2 85 +5 2 88 +5 2 90 +5 1 60 +5 2 86 +5 1 53 +5 2 89 +5 2 86 +5 2 81 +5 3 66 +5 2 77 +5 2 81 +5 2 86 +5 2 85 +5 2 91 +5 1 54 +5 2 87 +5 4 56 +5 2 83 +5 2 85 +5 2 91 +5 2 92 +5 2 90 +5 2 90 +5 2 91 +5 2 89 +5 2 88 +5 2 90 +5 2 88 +5 2 84 +5 2 86 +5 2 81 +5 2 81 +5 2 82 +5 2 81 +5 1 55 +5 4 50 +5 1 54 +5 2 80 +5 2 80 +5 2 81 +5 2 78 +5 2 80 +5 2 78 +5 2 76 +5 2 78 +5 3 68 +5 2 73 +5 1 64 +5 1 61 +5 4 26 +5 2 70 +5 2 74 +5 2 74 +5 3 79 +5 3 82 +5 3 79 +5 3 75 +5 3 76 +5 2 69 +5 3 69 +5 3 67 +5 2 61 +5 4 36 +5 3 63 +5 3 62 +5 4 34 +5 3 59 +5 2 55 +5 4 39 +5 2 52 +5 2 51 +5 2 49 +5 2 50 +5 4 46 +5 3 51 +5 2 49 +5 2 54 +5 3 50 +5 3 52 +5 1 66 +5 1 67 +5 1 65 +5 3 50 +5 2 50 +5 1 61 +5 1 59 +5 1 60 +5 4 50 +5 4 47 +5 2 32 +5 3 56 +5 1 63 +5 3 58 +5 2 26 +5 1 63 +5 4 43 +5 1 63 +5 4 38 +5 1 62 +5 2 20 +5 1 69 +5 1 70 +5 4 46 +5 1 71 +5 1 73 +5 3 62 +5 4 45 +5 1 73 +5 4 39 +5 4 38 +5 3 66 +5 3 68 +5 1 72 +5 1 67 +5 3 64 +5 3 63 +5 1 65 +5 3 58 +5 3 57 +5 1 64 +5 3 67 +5 2 42 +5 3 66 +5 2 46 +6 2 38 +6 1 60 +6 3 88 +6 3 88 +6 4 40 +6 3 97 +6 3 94 +6 1 76 +6 3 93 +6 3 93 +6 2 29 +6 3 87 +6 3 85 +6 2 30 +6 4 49 +6 1 77 +6 1 76 +6 3 65 +6 3 64 +6 1 71 +6 1 70 +6 3 61 +6 3 68 +6 1 61 +6 1 62 +6 4 42 +6 2 38 +6 3 61 +6 1 56 +6 4 33 +6 3 68 +6 3 71 +6 2 37 +6 3 64 +6 3 68 +6 1 73 +6 1 74 +6 1 74 +6 1 72 +6 3 59 +6 1 70 +6 1 66 +6 4 41 +6 1 69 +6 1 71 +6 1 68 +6 4 53 +6 3 58 +6 1 72 +6 1 73 +6 4 46 +6 1 67 +6 1 67 +6 1 65 +6 3 65 +6 1 61 +6 1 56 +6 1 53 +6 1 51 +6 2 28 +6 4 47 +6 4 48 +6 1 59 +6 1 57 +6 4 56 +6 4 55 +6 4 51 +6 2 31 +6 1 59 +6 1 59 +6 1 58 +6 3 48 +6 3 46 +6 1 59 +6 1 61 +6 1 55 +6 1 53 +6 2 30 +6 4 57 +6 1 50 +6 3 58 +6 3 56 +6 4 57 +6 4 59 +6 2 34 +6 4 55 +6 3 59 +6 3 59 +6 1 50 +6 1 54 +6 4 56 +6 4 52 +6 3 57 +6 3 62 +6 4 60 +6 4 58 +6 3 57 +6 3 57 +6 3 58 +6 4 59 +6 4 59 +6 2 39 +6 4 60 +6 2 41 +6 1 42 +6 3 52 +6 2 47 +6 4 63 +6 3 53 +6 4 71 +6 1 41 +6 3 50 +6 4 66 +6 1 51 +6 3 54 +6 2 55 +6 2 54 +6 1 52 +6 3 47 +6 1 51 +6 2 52 +6 1 49 +6 2 54 +6 1 47 +6 4 68 +6 2 59 +6 3 47 +6 3 48 +6 1 55 +6 2 57 +6 2 61 +6 2 57 +6 2 56 +6 1 63 +6 4 88 +6 4 82 +6 2 62 +6 3 60 +6 4 73 +6 4 75 +6 4 76 +6 2 64 +6 2 66 +6 3 59 +6 2 67 +6 1 70 +6 3 61 +6 2 66 +6 3 65 +6 1 71 +6 1 69 +6 2 66 +6 1 67 +6 2 66 +6 4 80 +6 4 74 +6 4 72 +6 2 75 +6 4 73 +6 1 72 +6 1 70 +6 1 69 +6 1 71 +6 1 72 +6 4 66 +6 3 71 +6 4 61 +6 3 66 +6 3 65 +6 3 61 +6 2 84 +6 3 64 +6 3 70 +6 1 68 +6 1 62 +6 1 60 +6 1 64 +6 1 62 +6 4 70 +6 2 86 +6 2 86 +6 1 55 +6 4 69 +6 2 81 +6 2 80 +6 3 64 +6 1 63 +6 2 86 +6 2 85 +6 4 55 +6 1 54 +6 1 52 +6 3 60 +6 3 63 +6 4 51 +6 1 47 +6 4 49 +6 3 62 +6 2 90 +6 2 91 +6 2 89 +6 2 88 +6 2 90 +6 1 51 +6 2 84 +6 4 52 +6 2 81 +6 2 81 +6 2 82 +6 3 63 +6 3 60 +6 1 57 +6 2 80 +6 4 48 +6 2 80 +6 2 81 +6 1 53 +6 2 80 +6 2 78 +6 3 65 +6 1 60 +6 4 33 +6 3 68 +6 2 73 +6 2 73 +6 2 70 +6 4 26 +6 2 74 +6 2 74 +6 2 76 +6 2 74 +6 2 75 +6 3 75 +6 3 76 +6 3 73 +6 2 71 +6 1 70 +6 3 65 +6 3 66 +6 4 32 +6 2 62 +6 1 72 +6 4 35 +6 3 66 +6 1 69 +6 1 67 +6 2 51 +6 3 55 +6 2 50 +6 1 62 +6 2 52 +6 1 56 +6 4 48 +6 2 59 +6 3 52 +6 2 55 +6 2 55 +6 1 65 +6 3 50 +6 4 47 +6 2 51 +6 1 59 +6 2 45 +6 1 62 +6 3 54 +6 3 52 +6 1 63 +6 3 60 +6 2 31 +6 1 63 +6 4 42 +6 4 43 +6 3 65 +6 3 68 +6 2 23 +6 3 69 +6 1 69 +6 1 70 +6 3 67 +6 1 71 +6 3 60 +6 1 73 +6 3 69 +6 3 65 +6 1 72 +6 3 63 +6 1 75 +6 1 72 +6 2 28 +6 1 67 +6 2 37 +6 4 37 +6 4 36 +6 2 33 +6 3 57 +6 1 64 +6 3 67 +6 4 27 +6 1 62 +6 3 64 +7 4 33 +7 1 60 +7 2 36 +7 3 88 +7 3 91 +7 3 97 +7 3 94 +7 3 94 +7 3 93 +7 3 93 +7 3 88 +7 3 87 +7 3 85 +7 1 82 +7 3 81 +7 1 77 +7 3 73 +7 3 65 +7 1 71 +7 3 61 +7 2 23 +7 1 72 +7 4 45 +7 1 61 +7 3 69 +7 2 41 +7 1 57 +7 3 61 +7 1 56 +7 2 40 +7 3 68 +7 3 71 +7 3 66 +7 1 65 +7 1 70 +7 1 73 +7 1 74 +7 1 74 +7 1 72 +7 1 73 +7 2 45 +7 3 56 +7 1 69 +7 4 42 +7 1 71 +7 1 68 +7 1 70 +7 1 70 +7 4 50 +7 3 64 +7 3 65 +7 1 67 +7 3 64 +7 1 65 +7 3 65 +7 1 61 +7 1 56 +7 3 65 +7 3 59 +7 2 28 +7 4 47 +7 1 57 +7 3 57 +7 3 55 +7 1 57 +7 1 59 +7 3 44 +7 4 51 +7 4 50 +7 4 48 +7 1 58 +7 4 48 +7 1 58 +7 1 59 +7 4 52 +7 3 55 +7 1 53 +7 2 30 +7 1 54 +7 4 55 +7 3 58 +7 3 56 +7 1 51 +7 4 59 +7 4 57 +7 1 49 +7 1 51 +7 4 60 +7 1 50 +7 4 61 +7 1 53 +7 4 52 +7 3 57 +7 3 62 +7 3 63 +7 3 61 +7 3 57 +7 2 28 +7 4 56 +7 1 43 +7 4 59 +7 3 51 +7 4 60 +7 4 57 +7 1 42 +7 3 52 +7 3 47 +7 4 63 +7 4 66 +7 4 71 +7 2 56 +7 1 44 +7 3 48 +7 4 69 +7 4 72 +7 2 55 +7 1 54 +7 2 53 +7 3 47 +7 4 69 +7 2 52 +7 4 77 +7 4 72 +7 3 46 +7 4 68 +7 4 69 +7 4 72 +7 4 75 +7 3 49 +7 4 80 +7 4 81 +7 4 84 +7 4 85 +7 4 85 +7 4 88 +7 4 82 +7 4 80 +7 4 75 +7 1 67 +7 4 75 +7 4 76 +7 4 77 +7 4 77 +7 4 77 +7 4 74 +7 1 70 +7 4 79 +7 4 79 +7 1 72 +7 4 76 +7 4 78 +7 4 77 +7 4 78 +7 4 76 +7 4 80 +7 4 74 +7 1 69 +7 4 69 +7 4 73 +7 4 74 +7 4 69 +7 1 69 +7 1 71 +7 4 68 +7 1 68 +7 4 62 +7 1 69 +7 1 67 +7 4 70 +7 3 61 +7 3 64 +7 1 60 +7 3 70 +7 1 68 +7 1 62 +7 3 72 +7 3 74 +7 1 62 +7 4 70 +7 1 61 +7 3 60 +7 4 73 +7 1 56 +7 4 67 +7 3 66 +7 4 65 +7 4 59 +7 3 67 +7 3 68 +7 3 63 +7 4 56 +7 2 87 +7 2 87 +7 1 46 +7 2 85 +7 3 63 +7 2 92 +7 2 90 +7 2 90 +7 2 91 +7 2 89 +7 2 88 +7 2 90 +7 2 88 +7 2 84 +7 2 86 +7 2 81 +7 2 81 +7 3 59 +7 2 81 +7 2 78 +7 2 78 +7 2 80 +7 2 80 +7 2 80 +7 3 61 +7 2 78 +7 2 80 +7 2 78 +7 3 65 +7 2 78 +7 2 74 +7 3 68 +7 1 64 +7 2 73 +7 2 70 +7 2 70 +7 2 74 +7 3 76 +7 1 62 +7 3 82 +7 1 64 +7 3 75 +7 2 73 +7 2 69 +7 2 71 +7 3 67 +7 3 65 +7 4 36 +7 3 63 +7 3 62 +7 2 60 +7 1 69 +7 2 55 +7 1 69 +7 2 52 +7 1 63 +7 3 55 +7 1 63 +7 3 51 +7 1 61 +7 1 56 +7 1 58 +7 1 60 +7 2 53 +7 1 66 +7 1 67 +7 2 52 +7 1 63 +7 3 54 +7 2 51 +7 1 59 +7 4 52 +7 4 50 +7 2 32 +7 2 32 +7 1 63 +7 1 63 +7 1 63 +7 4 43 +7 1 63 +7 3 62 +7 3 65 +7 3 68 +7 3 68 +7 3 69 +7 1 69 +7 1 70 +7 1 72 +7 1 71 +7 3 60 +7 2 27 +7 1 73 +7 1 73 +7 1 72 +7 1 73 +7 1 75 +7 1 72 +7 4 40 +7 1 67 +7 4 36 +7 1 65 +7 3 63 +7 1 66 +7 2 35 +7 2 41 +7 1 66 +7 3 68 +7 1 62 +7 3 64 +8 3 85 +8 3 84 +8 3 88 +8 4 40 +8 1 67 +8 3 97 +8 2 26 +8 3 94 +8 3 93 +8 3 93 +8 3 88 +8 3 87 +8 1 85 +8 3 85 +8 3 81 +8 1 77 +8 3 73 +8 1 75 +8 1 71 +8 3 61 +8 1 70 +8 1 72 +8 1 66 +8 3 67 +8 3 69 +8 1 61 +8 4 36 +8 3 61 +8 3 67 +8 1 57 +8 2 39 +8 1 62 +8 3 66 +8 3 64 +8 3 68 +8 3 71 +8 3 69 +8 3 68 +8 1 72 +8 3 59 +8 4 43 +8 1 66 +8 1 69 +8 3 57 +8 3 58 +8 1 68 +8 1 70 +8 3 58 +8 1 72 +8 1 73 +8 4 46 +8 4 44 +8 1 67 +8 4 44 +8 1 66 +8 3 67 +8 3 67 +8 1 53 +8 3 59 +8 1 55 +8 4 47 +8 4 48 +8 3 57 +8 1 57 +8 3 44 +8 4 55 +8 4 51 +8 4 51 +8 1 59 +8 2 27 +8 3 45 +8 1 56 +8 4 50 +8 3 51 +8 4 52 +8 4 55 +8 3 58 +8 4 55 +8 3 58 +8 1 50 +8 3 58 +8 3 56 +8 4 57 +8 1 57 +8 3 58 +8 3 58 +8 3 59 +8 1 51 +8 4 61 +8 4 61 +8 4 56 +8 3 53 +8 3 57 +8 4 60 +8 3 63 +8 3 61 +8 3 57 +8 4 55 +8 3 58 +8 1 43 +8 2 36 +8 4 59 +8 1 43 +8 2 41 +8 4 62 +8 4 62 +8 4 63 +8 4 63 +8 4 66 +8 4 71 +8 4 71 +8 3 50 +8 4 66 +8 1 51 +8 4 72 +8 1 52 +8 4 70 +8 4 71 +8 2 54 +8 1 51 +8 4 73 +8 4 77 +8 4 72 +8 4 70 +8 4 68 +8 4 69 +8 1 50 +8 4 75 +8 4 79 +8 3 51 +8 3 53 +8 4 84 +8 4 85 +8 4 85 +8 4 88 +8 4 82 +8 4 80 +8 4 75 +8 4 73 +8 4 75 +8 2 60 +8 4 77 +8 3 59 +8 4 77 +8 4 74 +8 4 79 +8 4 79 +8 4 79 +8 4 76 +8 4 76 +8 4 78 +8 4 77 +8 4 78 +8 4 76 +8 4 80 +8 4 74 +8 4 72 +8 4 69 +8 4 73 +8 4 74 +8 4 69 +8 4 68 +8 3 72 +8 3 70 +8 4 66 +8 2 82 +8 2 84 +8 3 66 +8 2 84 +8 3 61 +8 2 84 +8 2 85 +8 2 82 +8 2 84 +8 2 84 +8 2 85 +8 2 88 +8 2 90 +8 2 87 +8 2 86 +8 2 86 +8 2 89 +8 2 86 +8 2 81 +8 2 80 +8 2 77 +8 4 59 +8 2 86 +8 2 85 +8 2 91 +8 2 91 +8 2 87 +8 2 87 +8 2 83 +8 2 85 +8 2 91 +8 4 49 +8 2 90 +8 2 90 +8 2 91 +8 3 59 +8 2 88 +8 2 90 +8 2 88 +8 2 84 +8 2 86 +8 2 81 +8 1 51 +8 2 82 +8 2 81 +8 2 78 +8 2 78 +8 2 80 +8 2 80 +8 2 80 +8 2 81 +8 2 78 +8 2 80 +8 2 78 +8 2 76 +8 2 78 +8 2 74 +8 2 73 +8 2 73 +8 4 28 +8 2 70 +8 1 61 +8 2 74 +8 2 74 +8 2 76 +8 2 74 +8 2 75 +8 2 73 +8 2 73 +8 2 69 +8 2 71 +8 2 67 +8 1 70 +8 2 60 +8 1 76 +8 3 62 +8 3 62 +8 2 57 +8 3 66 +8 1 69 +8 3 58 +8 1 63 +8 1 63 +8 4 45 +8 1 62 +8 2 52 +8 1 56 +8 1 58 +8 4 52 +8 4 52 +8 1 66 +8 3 49 +8 1 65 +8 3 50 +8 3 54 +8 1 61 +8 1 59 +8 1 60 +8 4 50 +8 1 64 +8 1 64 +8 1 63 +8 1 63 +8 3 58 +8 2 26 +8 1 63 +8 1 64 +8 1 63 +8 3 68 +8 4 40 +8 3 69 +8 3 69 +8 3 70 +8 3 67 +8 1 71 +8 1 73 +8 1 73 +8 3 69 +8 1 73 +8 1 72 +8 1 73 +8 1 75 +8 1 72 +8 1 72 +8 3 61 +8 2 37 +8 3 63 +8 3 63 +8 1 66 +8 4 30 +8 3 57 +8 1 66 +8 1 65 +8 3 66 +8 1 56 +9 4 33 +9 1 60 +9 1 61 +9 3 88 +9 3 91 +9 3 97 +9 3 94 +9 3 94 +9 3 93 +9 3 93 +9 3 88 +9 4 43 +9 3 85 +9 3 85 +9 4 49 +9 1 77 +9 1 76 +9 2 20 +9 3 64 +9 1 71 +9 1 70 +9 3 61 +9 1 66 +9 3 67 +9 1 62 +9 3 68 +9 4 36 +9 3 61 +9 1 56 +9 3 68 +9 3 68 +9 2 38 +9 4 41 +9 1 65 +9 1 70 +9 1 73 +9 1 74 +9 1 74 +9 1 72 +9 1 73 +9 3 55 +9 1 66 +9 1 69 +9 1 69 +9 4 44 +9 3 59 +9 2 53 +9 1 70 +9 1 72 +9 4 50 +9 1 70 +9 4 44 +9 4 41 +9 2 43 +9 1 66 +9 1 61 +9 4 44 +9 2 34 +9 1 51 +9 1 55 +9 1 58 +9 3 60 +9 1 59 +9 4 53 +9 3 44 +9 1 59 +9 4 51 +9 4 51 +9 1 59 +9 4 48 +9 2 23 +9 2 22 +9 4 50 +9 1 59 +9 3 51 +9 3 55 +9 1 53 +9 3 61 +9 4 57 +9 1 50 +9 2 31 +9 3 56 +9 4 57 +9 4 59 +9 2 34 +9 3 58 +9 1 51 +9 3 59 +9 3 58 +9 1 54 +9 4 56 +9 3 53 +9 1 49 +9 1 48 +9 1 49 +9 4 58 +9 4 60 +9 4 55 +9 4 56 +9 4 59 +9 4 59 +9 4 59 +9 4 60 +9 4 57 +9 2 45 +9 3 52 +9 4 63 +9 4 63 +9 2 51 +9 2 53 +9 4 71 +9 2 56 +9 4 66 +9 1 51 +9 4 72 +9 3 48 +9 4 70 +9 2 53 +9 2 54 +9 2 53 +9 2 52 +9 1 49 +9 3 45 +9 4 70 +9 4 68 +9 4 69 +9 4 72 +9 3 48 +9 4 79 +9 4 80 +9 1 59 +9 4 84 +9 4 85 +9 4 85 +9 4 88 +9 4 82 +9 4 80 +9 1 65 +9 4 73 +9 1 68 +9 4 76 +9 2 64 +9 4 77 +9 1 71 +9 1 70 +9 4 79 +9 4 79 +9 4 79 +9 4 76 +9 4 76 +9 4 78 +9 4 77 +9 4 78 +9 4 76 +9 2 67 +9 1 65 +9 1 69 +9 1 73 +9 4 73 +9 4 74 +9 4 69 +9 4 68 +9 2 78 +9 2 79 +9 2 82 +9 4 62 +9 2 84 +9 4 59 +9 2 84 +9 4 72 +9 2 84 +9 4 78 +9 1 64 +9 4 76 +9 4 73 +9 2 85 +9 4 70 +9 4 73 +9 2 87 +9 2 86 +9 2 86 +9 2 89 +9 2 86 +9 2 81 +9 2 80 +9 2 77 +9 3 67 +9 4 57 +9 3 68 +9 2 91 +9 2 91 +9 2 87 +9 2 87 +9 2 83 +9 1 45 +9 3 63 +9 2 92 +9 2 90 +9 2 90 +9 2 91 +9 4 49 +9 2 88 +9 2 90 +9 2 88 +9 2 84 +9 2 86 +9 2 81 +9 2 81 +9 2 82 +9 2 81 +9 1 55 +9 2 78 +9 2 80 +9 2 80 +9 2 80 +9 2 81 +9 2 78 +9 2 80 +9 2 78 +9 3 65 +9 2 78 +9 3 68 +9 2 73 +9 3 68 +9 3 70 +9 2 70 +9 3 75 +9 3 77 +9 2 74 +9 3 79 +9 3 82 +9 3 79 +9 3 75 +9 2 73 +9 1 67 +9 3 69 +9 4 32 +9 3 65 +9 1 74 +9 3 63 +9 1 70 +9 3 62 +9 2 57 +9 3 66 +9 2 52 +9 1 67 +9 1 63 +9 3 55 +9 4 45 +9 3 51 +9 3 51 +9 3 54 +9 1 58 +9 1 60 +9 1 62 +9 1 66 +9 1 67 +9 1 65 +9 2 52 +9 4 47 +9 1 61 +9 1 59 +9 1 60 +9 3 52 +9 1 64 +9 1 64 +9 4 46 +9 4 46 +9 1 63 +9 1 63 +9 4 42 +9 2 27 +9 1 63 +9 1 61 +9 3 68 +9 3 69 +9 3 69 +9 3 70 +9 3 67 +9 1 71 +9 1 73 +9 3 62 +9 3 69 +9 1 73 +9 3 65 +9 4 38 +9 3 66 +9 3 68 +9 1 72 +9 1 67 +9 3 64 +9 3 63 +9 4 36 +9 1 66 +9 3 57 +9 1 64 +9 1 66 +9 3 68 +9 1 62 +9 3 64 +10 1 62 +10 4 34 +10 3 88 +10 2 36 +10 3 91 +10 3 97 +10 3 94 +10 3 94 +10 3 93 +10 3 93 +10 3 88 +10 3 87 +10 3 85 +10 3 85 +10 3 81 +10 3 76 +10 3 73 +10 3 65 +10 3 64 +10 3 61 +10 3 60 +10 3 61 +10 3 68 +10 2 31 +10 3 69 +10 3 68 +10 3 62 +10 3 61 +10 4 32 +10 3 68 +10 3 68 +10 1 62 +10 3 66 +10 3 64 +10 3 68 +10 3 71 +10 3 69 +10 2 44 +10 1 72 +10 3 59 +10 1 70 +10 3 56 +10 1 69 +10 1 69 +10 1 71 +10 1 68 +10 1 70 +10 1 70 +10 1 72 +10 3 64 +10 1 70 +10 1 67 +10 1 67 +10 1 65 +10 1 66 +10 2 38 +10 2 36 +10 1 53 +10 1 51 +10 3 58 +10 1 58 +10 3 60 +10 2 32 +10 4 53 +10 3 44 +10 4 55 +10 1 59 +10 3 47 +10 3 48 +10 4 48 +10 2 23 +10 4 48 +10 1 58 +10 1 59 +10 1 61 +10 4 55 +10 1 53 +10 1 52 +10 4 57 +10 4 55 +10 2 31 +10 4 53 +10 4 57 +10 3 58 +10 3 58 +10 3 58 +10 3 59 +10 4 60 +10 3 58 +10 4 61 +10 2 37 +10 3 53 +10 1 49 +10 4 60 +10 1 49 +10 4 58 +10 4 60 +10 4 55 +10 3 58 +10 3 55 +10 3 53 +10 3 51 +10 4 60 +10 3 47 +10 4 62 +10 4 62 +10 4 63 +10 1 48 +10 4 66 +10 1 38 +10 4 71 +10 4 67 +10 3 48 +10 4 69 +10 4 72 +10 4 68 +10 4 70 +10 4 71 +10 4 70 +10 4 69 +10 1 51 +10 4 77 +10 4 72 +10 4 70 +10 2 55 +10 3 49 +10 1 50 +10 4 75 +10 4 79 +10 4 80 +10 4 81 +10 4 84 +10 4 85 +10 4 85 +10 4 88 +10 4 82 +10 4 80 +10 4 75 +10 4 73 +10 4 75 +10 4 76 +10 4 77 +10 4 77 +10 4 77 +10 4 74 +10 4 79 +10 4 79 +10 4 79 +10 4 76 +10 4 76 +10 4 78 +10 4 77 +10 2 69 +10 4 76 +10 4 80 +10 4 74 +10 4 72 +10 4 69 +10 4 73 +10 4 74 +10 4 69 +10 4 68 +10 1 71 +10 1 72 +10 1 68 +10 4 62 +10 2 84 +10 4 59 +10 1 65 +10 1 62 +10 2 84 +10 2 85 +10 2 82 +10 2 84 +10 2 84 +10 2 85 +10 2 88 +10 2 90 +10 2 87 +10 2 86 +10 2 86 +10 2 89 +10 2 86 +10 2 81 +10 2 80 +10 2 77 +10 2 81 +10 2 86 +10 2 85 +10 2 91 +10 2 91 +10 2 87 +10 2 87 +10 2 83 +10 2 85 +10 2 91 +10 2 92 +10 2 90 +10 2 90 +10 2 91 +10 2 89 +10 2 88 +10 2 90 +10 2 88 +10 2 84 +10 2 86 +10 2 81 +10 2 81 +10 2 82 +10 2 81 +10 2 78 +10 2 78 +10 2 80 +10 2 80 +10 2 80 +10 2 81 +10 2 78 +10 2 80 +10 2 78 +10 2 76 +10 2 78 +10 2 74 +10 2 73 +10 2 73 +10 3 70 +10 3 72 +10 4 26 +10 3 77 +10 3 76 +10 3 79 +10 2 74 +10 2 75 +10 3 75 +10 2 73 +10 2 69 +10 4 32 +10 2 67 +10 3 65 +10 2 60 +10 3 63 +10 2 62 +10 3 62 +10 3 59 +10 2 55 +10 2 52 +10 3 58 +10 1 63 +10 2 49 +10 1 63 +10 3 51 +10 3 51 +10 1 56 +10 1 58 +10 3 50 +10 2 53 +10 1 66 +10 1 67 +10 1 65 +10 1 63 +10 1 60 +10 1 61 +10 1 59 +10 1 60 +10 1 62 +10 1 64 +10 1 64 +10 1 63 +10 3 60 +10 1 63 +10 1 63 +10 1 63 +10 1 64 +10 4 42 +10 1 61 +10 1 62 +10 3 69 +10 3 69 +10 4 42 +10 3 67 +10 4 45 +10 3 60 +10 3 62 +10 3 69 +10 3 65 +10 3 65 +10 3 63 +10 3 66 +10 3 68 +10 3 67 +10 3 61 +10 3 64 +10 2 32 +10 3 63 +10 3 58 +10 3 57 +10 3 57 +10 3 67 +10 3 68 +10 3 66 +10 3 64 diff --git a/Python/hbayesdm/common/extdata/bandit4arm_exampleData.txt b/Python/hbayesdm/common/extdata/bandit4arm_exampleData.txt new file mode 100644 index 00000000..92ffad0d --- /dev/null +++ b/Python/hbayesdm/common/extdata/bandit4arm_exampleData.txt @@ -0,0 +1,2001 @@ +subjID gain loss choice +102 0 0 2 +102 1 0 1 +102 0 -1 1 +102 1 0 3 +102 0 -1 3 +102 0 -1 4 +102 0 -1 4 +102 0 0 2 +102 1 0 2 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 -1 3 +102 0 0 1 +102 1 0 1 +102 0 0 2 +102 0 0 2 +102 1 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 1 +102 0 -1 2 +102 1 0 3 +102 0 0 1 +102 1 0 3 +102 0 -1 3 +102 0 0 1 +102 0 -1 3 +102 0 0 1 +102 0 0 1 +102 1 0 2 +102 0 -1 3 +102 0 0 2 +102 1 0 1 +102 1 0 3 +102 0 0 2 +102 0 0 1 +102 0 -1 3 +102 0 0 3 +102 0 0 1 +102 0 0 2 +102 1 -1 3 +102 0 0 1 +102 0 0 3 +102 0 0 1 +102 0 -1 3 +102 0 0 1 +102 0 -1 2 +102 0 -1 1 +102 0 -1 3 +102 0 0 2 +102 1 -1 1 +102 0 0 2 +102 1 0 1 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 1 0 4 +102 0 0 1 +102 0 0 4 +102 0 0 1 +102 0 0 2 +102 0 0 4 +102 0 0 3 +102 0 -1 1 +102 0 -1 4 +102 0 0 2 +102 1 -1 3 +102 0 -1 2 +102 0 -1 3 +102 0 0 1 +102 0 -1 1 +102 1 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 1 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 1 0 1 +102 1 0 1 +102 0 0 1 +102 0 0 3 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 2 +102 0 -1 1 +102 1 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 1 +102 1 0 2 +102 0 0 2 +102 0 0 2 +102 0 0 1 +102 0 0 2 +102 0 -1 3 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 1 0 3 +102 0 0 2 +102 0 0 3 +102 0 0 3 +102 0 0 4 +102 1 0 2 +102 0 -1 2 +102 0 0 1 +102 1 0 1 +102 1 -1 1 +102 0 0 2 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 0 0 4 +102 0 -1 2 +102 0 0 1 +102 0 0 4 +102 0 -1 3 +102 1 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 4 +102 0 0 4 +102 0 -1 3 +102 0 0 2 +102 0 0 1 +102 1 -1 4 +102 0 0 2 +102 1 0 1 +102 0 0 1 +102 1 0 2 +102 0 -1 2 +102 0 0 1 +102 0 -1 4 +102 0 0 2 +102 0 -1 2 +102 1 0 1 +102 0 0 1 +102 1 0 2 +102 0 -1 2 +102 0 0 1 +102 1 0 2 +102 1 0 2 +102 0 0 2 +102 0 0 4 +102 0 0 2 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 0 -1 4 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 3 +102 0 0 4 +102 0 0 2 +102 0 0 1 +102 0 0 1 +102 0 0 2 +102 1 0 4 +102 0 -1 4 +102 0 0 3 +102 0 0 3 +102 1 0 1 +102 0 0 1 +102 0 0 2 +102 0 0 3 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 0 0 3 +102 0 0 1 +102 1 0 1 +102 0 -1 1 +102 1 0 3 +102 1 0 3 +102 0 -1 3 +102 0 0 4 +102 0 0 2 +102 1 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 1 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 0 -1 2 +102 0 0 1 +102 0 0 1 +102 1 0 1 +102 0 0 1 +102 0 0 3 +201 0 0 1 +201 0 -1 2 +201 0 -1 3 +201 0 -1 4 +201 0 0 1 +201 1 -1 1 +201 0 0 4 +201 0 0 3 +201 0 0 3 +201 1 0 1 +201 0 0 4 +201 0 0 1 +201 0 0 2 +201 1 -1 3 +201 1 0 3 +201 1 -1 2 +201 1 0 2 +201 0 0 4 +201 0 0 3 +201 0 -1 1 +201 0 -1 4 +201 0 0 2 +201 0 -1 3 +201 0 0 2 +201 0 0 3 +201 0 -1 1 +201 1 0 4 +201 0 -1 4 +201 0 -1 1 +201 1 0 2 +201 1 -1 3 +201 0 0 3 +201 1 0 3 +201 0 0 2 +201 1 0 4 +201 0 -1 1 +201 0 0 2 +201 1 0 3 +201 0 0 2 +201 0 -1 3 +201 0 -1 1 +201 0 0 4 +201 0 0 3 +201 0 -1 2 +201 0 0 3 +201 0 0 3 +201 0 0 3 +201 0 0 3 +201 1 0 1 +201 0 -1 4 +201 0 0 1 +201 0 0 4 +201 1 -1 4 +201 0 0 4 +201 1 0 1 +201 0 -1 2 +201 0 -1 2 +201 0 0 1 +201 0 0 3 +201 1 0 3 +201 0 -1 2 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 0 2 +201 1 -1 3 +201 0 -1 3 +201 0 0 2 +201 1 -1 3 +201 1 -1 3 +201 0 -1 3 +201 1 -1 2 +201 0 -1 3 +201 1 0 4 +201 0 -1 1 +201 0 0 4 +201 1 0 4 +201 0 0 4 +201 0 0 4 +201 0 0 2 +201 0 0 4 +201 0 0 2 +201 0 0 1 +201 0 0 4 +201 0 0 3 +201 0 0 4 +201 1 0 1 +201 1 0 1 +201 0 -1 2 +201 0 0 4 +201 1 0 1 +201 0 0 4 +201 0 0 1 +201 0 -1 4 +201 0 0 2 +201 0 -1 3 +201 0 0 1 +201 1 -1 4 +201 1 0 1 +201 0 0 1 +201 0 0 4 +201 0 0 2 +201 0 -1 3 +201 1 0 2 +201 0 -1 2 +201 0 -1 1 +201 1 -1 4 +201 0 -1 4 +201 1 0 1 +201 1 0 4 +201 0 0 2 +201 0 0 4 +201 0 -1 3 +201 0 0 2 +201 1 -1 4 +201 1 -1 1 +201 0 0 4 +201 0 0 1 +201 1 0 4 +201 0 0 1 +201 1 0 1 +201 1 0 2 +201 1 0 4 +201 0 0 3 +201 1 0 1 +201 0 -1 2 +201 0 0 1 +201 1 0 4 +201 0 0 3 +201 0 0 2 +201 1 -1 1 +201 1 0 4 +201 0 0 1 +201 0 0 1 +201 0 0 1 +201 0 0 1 +201 0 0 4 +201 0 -1 1 +201 0 -1 4 +201 0 0 2 +201 1 0 1 +201 0 -1 4 +201 0 -1 1 +201 0 0 3 +201 0 -1 3 +201 0 -1 4 +201 0 -1 3 +201 1 0 2 +201 1 0 3 +201 0 0 1 +201 1 0 2 +201 0 0 4 +201 0 0 2 +201 0 0 1 +201 0 -1 3 +201 1 0 2 +201 0 -1 4 +201 0 -1 2 +201 1 0 4 +201 0 -1 2 +201 1 0 3 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 -1 2 +201 0 -1 3 +201 0 0 4 +201 0 0 1 +201 0 0 4 +201 0 0 2 +201 0 -1 3 +201 0 -1 2 +201 0 -1 2 +201 1 0 1 +201 1 -1 3 +201 0 0 2 +201 1 0 3 +201 1 0 1 +201 0 0 3 +201 0 0 4 +201 0 0 1 +201 1 -1 3 +201 1 -1 2 +201 0 0 3 +201 1 0 2 +201 0 0 3 +201 0 0 4 +201 0 0 1 +201 0 -1 4 +201 0 0 1 +201 0 0 3 +201 0 0 2 +201 0 0 3 +201 0 -1 2 +201 0 0 2 +201 0 0 3 +201 0 0 4 +201 0 0 4 +202 0 0 3 +202 0 0 1 +202 1 -1 1 +202 0 0 3 +202 0 0 2 +202 0 0 4 +202 1 0 1 +202 0 0 1 +202 0 0 3 +202 1 -1 2 +202 0 0 2 +202 0 0 4 +202 0 -1 3 +202 1 0 1 +202 0 0 4 +202 1 0 1 +202 0 0 1 +202 0 0 2 +202 1 -1 3 +202 0 0 3 +202 0 0 1 +202 0 0 4 +202 0 0 3 +202 0 0 1 +202 0 0 2 +202 0 0 1 +202 0 -1 3 +202 0 0 1 +202 1 0 4 +202 0 -1 1 +202 1 0 4 +202 0 -1 1 +202 0 -1 3 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 -1 3 +202 0 0 1 +202 0 0 2 +202 0 0 4 +202 0 -1 1 +202 1 0 3 +202 0 0 1 +202 0 0 2 +202 0 -1 3 +202 0 0 1 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 0 1 +202 0 0 3 +202 1 0 2 +202 0 0 3 +202 0 -1 2 +202 0 0 1 +202 0 0 4 +202 0 0 4 +202 0 0 1 +202 1 0 3 +202 0 0 2 +202 0 0 1 +202 1 -1 4 +202 0 0 1 +202 0 -1 4 +202 0 -1 2 +202 1 -1 3 +202 1 0 1 +202 0 -1 3 +202 0 0 4 +202 0 0 2 +202 0 0 4 +202 0 0 3 +202 0 0 2 +202 0 -1 1 +202 1 0 4 +202 1 0 3 +202 0 0 1 +202 1 -1 4 +202 0 -1 3 +202 0 0 2 +202 0 -1 2 +202 0 0 2 +202 0 0 3 +202 0 0 1 +202 1 0 4 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 0 2 +202 0 0 2 +202 1 -1 1 +202 0 0 3 +202 0 0 4 +202 1 0 2 +202 0 0 1 +202 0 0 4 +202 0 0 3 +202 0 0 2 +202 0 0 2 +202 0 0 3 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 -1 2 +202 0 -1 1 +202 1 0 4 +202 0 -1 3 +202 1 0 4 +202 1 0 4 +202 0 0 4 +202 0 -1 1 +202 0 0 2 +202 0 0 3 +202 0 0 3 +202 0 0 4 +202 1 0 1 +202 1 0 1 +202 0 -1 2 +202 0 0 3 +202 0 0 4 +202 0 0 2 +202 0 -1 1 +202 1 0 4 +202 1 0 4 +202 0 -1 3 +202 1 0 4 +202 1 -1 4 +202 0 0 4 +202 0 -1 2 +202 0 0 4 +202 0 -1 1 +202 0 0 3 +202 1 0 4 +202 0 0 4 +202 0 -1 1 +202 1 0 4 +202 1 0 4 +202 0 0 3 +202 1 0 2 +202 1 0 2 +202 0 -1 1 +202 0 0 4 +202 1 0 2 +202 0 0 2 +202 0 -1 3 +202 0 0 2 +202 0 0 4 +202 0 -1 1 +202 0 0 3 +202 0 0 1 +202 0 -1 2 +202 0 0 3 +202 0 0 1 +202 0 0 4 +202 1 0 3 +202 0 0 1 +202 0 -1 2 +202 0 0 3 +202 0 0 1 +202 0 0 1 +202 0 0 4 +202 0 -1 1 +202 0 0 4 +202 0 -1 2 +202 0 -1 3 +202 0 0 1 +202 0 0 4 +202 1 0 2 +202 1 0 2 +202 0 0 2 +202 1 0 3 +202 0 0 3 +202 0 0 1 +202 0 0 1 +202 0 0 4 +202 0 0 2 +202 1 0 3 +202 0 0 1 +202 0 0 4 +202 1 0 3 +202 1 0 3 +202 1 0 3 +202 0 0 3 +202 0 0 1 +202 0 -1 4 +202 0 -1 2 +202 0 0 2 +202 0 -1 3 +202 0 0 1 +202 0 0 4 +202 0 0 3 +202 0 0 2 +202 0 0 4 +202 0 0 1 +202 0 0 3 +202 0 0 2 +202 1 0 3 +202 0 0 4 +203 0 0 3 +203 0 0 2 +203 0 0 3 +203 0 -1 1 +203 0 0 3 +203 1 0 4 +203 0 -1 4 +203 0 -1 2 +203 0 -1 1 +203 0 0 3 +203 1 0 2 +203 1 0 2 +203 0 -1 2 +203 0 0 4 +203 0 0 3 +203 0 -1 1 +203 0 0 3 +203 0 -1 2 +203 0 0 4 +203 0 -1 3 +203 1 -1 1 +203 0 0 1 +203 0 -1 3 +203 0 0 2 +203 0 0 1 +203 1 0 4 +203 1 0 4 +203 0 0 4 +203 1 0 3 +203 0 -1 3 +203 1 0 1 +203 1 0 1 +203 0 0 2 +203 1 -1 4 +203 0 -1 3 +203 0 0 1 +203 0 0 2 +203 0 -1 3 +203 0 -1 4 +203 0 -1 1 +203 1 -1 2 +203 1 0 4 +203 0 0 3 +203 0 0 4 +203 0 0 1 +203 1 0 2 +203 0 -1 3 +203 0 -1 4 +203 1 0 2 +203 0 -1 1 +203 0 0 3 +203 0 -1 1 +203 0 0 2 +203 0 -1 4 +203 0 0 3 +203 0 -1 1 +203 0 0 2 +203 0 0 1 +203 0 0 4 +203 0 -1 3 +203 0 0 2 +203 0 -1 3 +203 0 0 1 +203 1 -1 4 +203 0 -1 4 +203 1 0 2 +203 0 0 2 +203 0 0 2 +203 1 0 1 +203 0 0 1 +203 0 0 2 +203 1 0 1 +203 0 -1 3 +203 0 -1 1 +203 0 -1 2 +203 0 0 4 +203 0 -1 3 +203 1 0 1 +203 0 0 1 +203 0 0 1 +203 1 0 2 +203 1 0 2 +203 0 0 2 +203 0 0 3 +203 0 0 4 +203 0 0 2 +203 1 -1 3 +203 1 0 1 +203 0 0 1 +203 0 0 1 +203 0 0 4 +203 1 0 3 +203 1 0 3 +203 0 0 3 +203 1 0 3 +203 1 -1 3 +203 0 -1 2 +203 0 0 3 +203 0 -1 1 +203 0 0 4 +203 0 0 2 +203 0 0 3 +203 0 0 4 +203 0 0 2 +203 1 -1 1 +203 1 -1 2 +203 1 0 3 +203 0 0 1 +203 0 -1 3 +203 0 0 4 +203 0 0 2 +203 1 0 3 +203 0 0 1 +203 0 0 3 +203 0 0 4 +203 0 0 2 +203 0 -1 1 +203 1 0 3 +203 0 0 2 +203 1 0 3 +203 0 -1 3 +203 1 -1 4 +203 0 0 1 +203 0 -1 3 +203 0 0 2 +203 0 0 4 +203 1 -1 3 +203 0 0 1 +203 0 0 2 +203 0 0 3 +203 0 -1 4 +203 0 0 2 +203 0 0 3 +203 1 0 1 +203 0 0 1 +203 0 0 3 +203 0 0 2 +203 1 -1 4 +203 0 0 3 +203 0 0 1 +203 0 0 3 +203 1 0 2 +203 0 0 1 +203 0 0 2 +203 0 -1 4 +203 0 0 3 +203 0 -1 1 +203 0 0 2 +203 1 -1 3 +203 1 -1 2 +203 0 0 1 +203 0 -1 3 +203 0 0 4 +203 0 -1 2 +203 0 0 3 +203 0 -1 1 +203 1 0 2 +203 0 -1 4 +203 1 -1 3 +203 0 -1 2 +203 0 -1 1 +203 0 0 4 +203 1 0 3 +203 1 -1 1 +203 1 0 3 +203 1 0 3 +203 1 0 3 +203 0 0 3 +203 1 0 3 +203 0 0 3 +203 0 0 3 +203 0 0 3 +203 0 0 3 +203 0 -1 1 +203 0 0 3 +203 1 -1 4 +203 0 -1 2 +203 1 0 3 +203 0 0 3 +203 0 -1 3 +203 0 0 1 +203 1 -1 4 +203 0 0 2 +203 1 0 3 +203 0 0 1 +203 0 -1 3 +203 1 -1 3 +203 0 -1 3 +203 1 0 2 +203 0 0 2 +203 0 0 1 +203 1 0 4 +203 1 0 4 +203 0 0 4 +203 0 0 3 +203 0 -1 4 +203 0 0 2 +203 0 0 3 +203 0 0 1 +203 1 0 4 +204 0 0 1 +204 0 -1 4 +204 1 0 3 +204 1 -1 2 +204 1 0 2 +204 0 0 2 +204 0 0 2 +204 0 0 1 +204 0 -1 3 +204 0 0 4 +204 0 0 2 +204 0 0 4 +204 0 0 2 +204 0 -1 1 +204 0 0 2 +204 0 0 4 +204 1 0 3 +204 1 0 3 +204 1 0 2 +204 0 -1 3 +204 1 0 1 +204 0 0 2 +204 0 0 4 +204 0 0 2 +204 1 0 3 +204 0 0 1 +204 0 -1 2 +204 0 0 3 +204 0 0 2 +204 0 0 4 +204 0 0 3 +204 1 0 2 +204 0 -1 1 +204 0 0 2 +204 0 -1 3 +204 0 -1 3 +204 0 0 2 +204 0 0 2 +204 0 0 1 +204 1 -1 4 +204 0 0 4 +204 1 0 3 +204 0 0 2 +204 1 0 2 +204 0 -1 4 +204 0 -1 3 +204 0 0 2 +204 0 0 2 +204 1 -1 3 +204 0 0 2 +204 1 0 1 +204 0 0 2 +204 0 -1 1 +204 0 -1 4 +204 1 0 3 +204 0 -1 2 +204 1 0 3 +204 0 0 1 +204 0 0 1 +204 1 0 3 +204 0 0 1 +204 0 0 4 +204 1 0 3 +204 0 0 2 +204 1 0 1 +204 1 -1 2 +204 1 0 3 +204 0 0 1 +204 1 0 4 +204 0 0 2 +204 0 -1 4 +204 0 0 1 +204 1 0 3 +204 0 0 2 +204 0 -1 3 +204 0 -1 1 +204 1 0 3 +204 0 -1 2 +204 1 0 4 +204 1 0 3 +204 1 0 3 +204 0 0 1 +204 0 0 2 +204 0 0 3 +204 0 0 4 +204 0 0 2 +204 0 -1 1 +204 0 0 3 +204 0 0 2 +204 1 -1 3 +204 0 0 1 +204 0 0 3 +204 0 -1 4 +204 1 0 2 +204 0 0 3 +204 0 0 1 +204 0 0 2 +204 1 0 3 +204 0 0 2 +204 0 -1 3 +204 1 0 1 +204 0 0 1 +204 0 0 2 +204 0 0 3 +204 0 -1 4 +204 0 0 2 +204 0 0 4 +204 0 0 3 +204 0 -1 1 +204 0 -1 2 +204 1 0 4 +204 1 0 4 +204 1 -1 3 +204 0 0 3 +204 0 -1 3 +204 1 -1 3 +204 0 -1 2 +204 1 0 2 +204 0 -1 2 +204 1 0 4 +204 1 0 4 +204 0 0 1 +204 0 -1 1 +204 0 -1 1 +204 0 0 4 +204 0 0 4 +204 1 0 4 +204 0 0 2 +204 0 -1 2 +204 1 -1 2 +204 1 -1 3 +204 0 0 4 +204 1 0 4 +204 0 0 4 +204 0 0 4 +204 0 0 4 +204 0 -1 1 +204 0 -1 2 +204 0 0 2 +204 0 -1 3 +204 1 -1 4 +204 0 0 4 +204 0 0 4 +204 0 -1 2 +204 1 0 1 +204 0 0 4 +204 0 0 3 +204 0 -1 3 +204 0 0 3 +204 0 -1 1 +204 0 -1 2 +204 0 -1 1 +204 0 0 3 +204 0 0 4 +204 0 0 4 +204 0 -1 4 +204 0 0 4 +204 0 0 3 +204 0 0 3 +204 0 -1 3 +204 0 -1 2 +204 0 0 2 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 4 +204 0 0 4 +204 0 -1 4 +204 0 0 3 +204 1 0 3 +204 0 -1 3 +204 0 -1 3 +204 1 0 3 +204 0 0 2 +204 0 -1 4 +204 1 0 4 +204 0 0 3 +204 0 0 1 +204 0 0 1 +204 0 0 1 +204 0 0 3 +204 0 0 4 +204 0 0 4 +204 0 -1 3 +204 0 -1 3 +204 0 0 3 +204 0 0 4 +204 0 0 3 +204 0 0 4 +204 0 0 2 +204 1 0 1 +204 0 -1 4 +204 0 -1 4 +204 1 0 3 +205 1 0 1 +205 0 -1 4 +205 0 0 3 +205 0 0 1 +205 0 0 2 +205 1 0 4 +205 1 0 4 +205 0 -1 4 +205 0 0 3 +205 0 0 4 +205 0 -1 1 +205 1 0 2 +205 1 0 4 +205 0 -1 2 +205 0 0 4 +205 0 0 3 +205 0 0 4 +205 0 -1 1 +205 1 -1 3 +205 0 0 4 +205 0 0 2 +205 0 0 2 +205 0 0 3 +205 0 -1 4 +205 0 0 1 +205 1 0 1 +205 1 0 3 +205 0 0 4 +205 0 -1 1 +205 1 -1 3 +205 0 0 2 +205 1 0 2 +205 0 0 3 +205 1 -1 3 +205 1 0 4 +205 0 0 4 +205 0 0 2 +205 0 0 1 +205 0 -1 3 +205 0 -1 4 +205 1 0 2 +205 0 0 2 +205 0 -1 1 +205 0 -1 3 +205 0 0 4 +205 0 -1 1 +205 0 -1 3 +205 0 -1 4 +205 0 0 2 +205 0 0 2 +205 0 0 4 +205 0 0 2 +205 0 -1 1 +205 0 0 3 +205 1 0 4 +205 0 0 4 +205 0 0 2 +205 0 0 1 +205 0 0 3 +205 0 -1 4 +205 0 0 2 +205 1 0 1 +205 1 0 3 +205 0 0 1 +205 0 0 3 +205 0 -1 4 +205 0 0 2 +205 0 0 1 +205 0 -1 4 +205 0 0 4 +205 0 0 3 +205 0 -1 3 +205 0 -1 2 +205 0 0 4 +205 0 0 4 +205 0 0 3 +205 0 0 4 +205 0 0 4 +205 0 -1 1 +205 0 0 2 +205 1 0 4 +205 0 0 4 +205 0 -1 3 +205 1 0 1 +205 0 0 2 +205 0 -1 3 +205 0 -1 1 +205 0 -1 4 +205 0 0 2 +205 0 0 3 +205 0 0 1 +205 0 0 4 +205 1 0 2 +205 1 0 3 +205 1 -1 2 +205 0 0 2 +205 1 0 3 +205 0 0 3 +205 0 0 1 +205 0 0 4 +205 0 0 4 +205 0 0 3 +205 1 0 2 +205 0 0 2 +205 0 0 4 +205 0 0 1 +205 1 0 3 +205 1 -1 3 +205 0 0 4 +205 0 -1 2 +205 0 0 2 +205 0 0 3 +205 0 0 4 +205 0 0 1 +205 0 0 1 +205 1 -1 4 +205 0 0 3 +205 1 0 4 +205 1 -1 2 +205 0 0 4 +205 0 0 3 +205 1 -1 2 +205 0 0 1 +205 0 0 3 +205 0 0 4 +205 1 0 2 +205 1 -1 4 +205 0 0 3 +205 0 0 2 +205 0 0 1 +205 1 0 2 +205 1 0 2 +205 0 0 2 +205 0 0 3 +205 0 0 4 +205 0 -1 1 +205 0 0 4 +205 0 0 3 +205 0 -1 2 +205 0 0 4 +205 1 -1 1 +205 1 0 1 +205 1 0 1 +205 0 0 1 +205 1 -1 1 +205 0 -1 3 +205 0 0 2 +205 0 0 4 +205 0 0 4 +205 0 0 1 +205 1 0 1 +205 0 0 1 +205 1 -1 3 +205 0 0 2 +205 0 0 1 +205 0 -1 4 +205 0 0 1 +205 0 -1 2 +205 0 0 1 +205 1 0 3 +205 0 -1 3 +205 0 0 2 +205 0 0 1 +205 1 0 4 +205 0 0 1 +205 0 0 4 +205 0 0 2 +205 0 0 1 +205 1 -1 3 +205 0 0 2 +205 1 0 1 +205 0 0 1 +205 0 0 4 +205 1 0 1 +205 0 0 3 +205 0 -1 2 +205 0 0 1 +205 0 -1 4 +205 0 -1 3 +205 0 0 2 +205 1 0 1 +205 0 0 1 +205 0 -1 3 +205 0 0 1 +205 0 0 2 +205 0 0 3 +205 0 0 4 +205 0 0 1 +205 0 0 4 +205 0 -1 3 +205 0 0 2 +205 1 -1 2 +205 1 0 1 +205 0 0 1 +205 0 0 4 +205 0 0 1 +205 0 0 3 +205 1 -1 1 +205 0 0 1 +205 0 0 2 +206 1 0 1 +206 0 0 2 +206 0 0 1 +206 1 -1 1 +206 0 -1 3 +206 0 0 1 +206 0 0 2 +206 0 0 4 +206 0 0 1 +206 1 -1 2 +206 1 0 3 +206 0 0 3 +206 0 0 4 +206 0 -1 1 +206 0 -1 2 +206 0 0 3 +206 1 0 3 +206 0 0 4 +206 1 0 3 +206 1 -1 3 +206 0 0 3 +206 0 0 1 +206 0 0 3 +206 0 0 2 +206 1 0 3 +206 0 -1 4 +206 1 0 3 +206 1 0 1 +206 1 0 1 +206 0 0 1 +206 0 0 3 +206 0 0 3 +206 0 0 3 +206 1 -1 1 +206 0 -1 2 +206 0 0 4 +206 0 0 3 +206 1 0 3 +206 1 0 3 +206 0 0 3 +206 0 -1 1 +206 0 0 3 +206 0 0 3 +206 0 0 3 +206 0 0 3 +206 1 0 1 +206 0 0 3 +206 0 0 2 +206 0 -1 4 +206 0 0 1 +206 1 0 1 +206 0 0 3 +206 0 0 2 +206 0 0 4 +206 0 -1 1 +206 0 0 3 +206 0 -1 4 +206 0 0 1 +206 0 0 3 +206 0 0 1 +206 0 0 4 +206 0 0 1 +206 1 0 3 +206 1 -1 3 +206 0 0 1 +206 0 -1 3 +206 0 -1 4 +206 0 -1 2 +206 0 -1 3 +206 1 0 1 +206 1 0 1 +206 1 0 1 +206 1 0 1 +206 1 -1 1 +206 0 -1 1 +206 0 -1 1 +206 0 0 3 +206 0 0 1 +206 0 0 4 +206 1 0 2 +206 0 0 1 +206 1 0 1 +206 1 0 3 +206 1 0 1 +206 1 0 3 +206 0 0 1 +206 0 0 3 +206 1 -1 1 +206 1 0 3 +206 0 -1 1 +206 0 -1 3 +206 0 -1 1 +206 1 0 2 +206 0 0 2 +206 0 -1 1 +206 0 -1 4 +206 0 0 3 +206 0 0 1 +206 0 0 3 +206 1 0 1 +206 0 0 1 +206 0 0 3 +206 1 0 4 +206 0 0 1 +206 0 0 4 +206 1 -1 2 +206 1 0 4 +206 1 0 4 +206 1 0 4 +206 0 -1 4 +206 0 0 3 +206 1 0 4 +206 0 0 4 +206 0 0 3 +206 0 0 4 +206 1 0 2 +206 0 0 2 +206 0 0 4 +206 0 0 2 +206 0 0 1 +206 0 -1 2 +206 0 0 4 +206 0 0 3 +206 0 0 2 +206 1 0 2 +206 0 0 2 +206 0 0 1 +206 1 0 4 +206 0 0 4 +206 0 -1 3 +206 0 -1 2 +206 0 -1 1 +206 0 0 4 +206 0 0 1 +206 0 -1 3 +206 1 0 2 +206 1 -1 2 +206 1 -1 2 +206 0 0 2 +206 1 -1 4 +206 1 0 4 +206 1 0 4 +206 0 0 4 +206 0 0 3 +206 0 0 1 +206 0 0 2 +206 0 0 4 +206 0 0 3 +206 0 0 1 +206 0 0 2 +206 0 0 1 +206 0 0 3 +206 0 -1 3 +206 0 0 4 +206 0 -1 4 +206 0 0 1 +206 0 -1 1 +206 0 -1 2 +206 0 -1 2 +206 0 0 1 +206 0 0 1 +206 0 -1 1 +206 0 -1 3 +206 1 0 4 +206 0 0 4 +206 0 0 4 +206 0 0 4 +206 0 0 4 +206 0 0 2 +206 0 0 4 +206 0 0 2 +206 0 0 4 +206 0 0 4 +206 0 0 2 +206 0 0 4 +206 0 0 3 +206 0 -1 4 +206 0 0 2 +206 1 0 2 +206 0 -1 2 +206 0 -1 4 +206 0 0 3 +206 0 0 3 +206 0 0 2 +206 1 0 2 +206 0 0 2 +206 1 0 2 +206 0 0 2 +206 1 -1 2 +206 1 0 2 +206 1 0 2 +206 0 0 2 +206 1 0 2 +206 0 -1 2 +206 0 -1 2 +206 0 0 4 +206 1 0 2 +206 0 -1 2 +206 0 0 2 +206 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 1 +207 1 0 1 +207 0 0 1 +207 0 0 3 +207 0 0 4 +207 0 -1 3 +207 0 0 1 +207 0 0 1 +207 0 0 4 +207 1 -1 3 +207 0 0 3 +207 0 0 1 +207 0 0 1 +207 0 0 1 +207 0 -1 2 +207 0 0 4 +207 0 -1 4 +207 0 0 3 +207 1 -1 1 +207 0 0 3 +207 0 0 3 +207 0 0 3 +207 1 0 3 +207 0 0 3 +207 1 0 4 +207 0 -1 4 +207 1 -1 1 +207 0 0 4 +207 0 -1 3 +207 0 0 3 +207 0 0 3 +207 0 0 3 +207 1 0 4 +207 1 0 3 +207 0 0 3 +207 0 -1 1 +207 0 0 3 +207 1 0 3 +207 0 -1 3 +207 0 0 4 +207 0 -1 3 +207 0 -1 4 +207 0 0 1 +207 0 -1 3 +207 1 0 1 +207 1 0 1 +207 0 0 1 +207 0 -1 1 +207 0 0 1 +207 0 -1 2 +207 1 0 3 +207 0 0 3 +207 0 -1 3 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 1 +207 1 -1 1 +207 0 0 4 +207 0 -1 4 +207 0 0 3 +207 0 0 3 +207 1 0 3 +207 0 0 3 +207 0 -1 3 +207 1 -1 1 +207 0 -1 1 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 -1 2 +207 1 0 4 +207 0 -1 4 +207 0 -1 3 +207 0 0 1 +207 0 0 1 +207 1 0 1 +207 0 0 1 +207 1 -1 1 +207 0 -1 1 +207 0 0 4 +207 0 -1 3 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 -1 2 +207 0 -1 1 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 2 +207 0 0 2 +207 0 -1 2 +207 0 -1 1 +207 1 0 1 +207 0 0 1 +207 0 -1 3 +207 0 0 3 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 0 -1 4 +207 1 0 2 +207 0 -1 2 +207 0 0 2 +207 0 -1 2 +207 1 0 1 +207 0 -1 1 +207 1 -1 1 +207 1 0 1 +207 0 0 3 +207 0 -1 3 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 0 0 4 +207 1 -1 4 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 0 2 +207 0 0 2 +207 1 0 2 +207 0 -1 2 +207 1 0 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 0 0 4 +207 0 -1 4 +207 0 0 4 +207 1 0 4 +207 0 0 4 +207 0 0 4 +207 1 -1 4 +207 0 -1 4 +207 0 0 3 +207 1 0 3 +207 0 -1 3 +207 0 0 3 +207 1 -1 3 +207 0 0 3 +207 0 0 3 +207 0 0 3 +207 0 0 1 +207 0 0 1 +207 0 0 1 +207 0 -1 1 +207 0 0 1 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 -1 2 +207 1 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 2 +207 0 0 4 +207 1 -1 4 +207 0 0 4 +207 0 -1 4 +207 0 -1 4 +207 0 0 3 +207 1 -1 3 +207 1 -1 3 +207 1 0 3 +207 1 -1 3 +207 0 0 3 +207 0 -1 3 +207 0 -1 2 +207 0 0 1 +207 0 -1 1 +207 0 -1 1 +207 0 0 4 +207 0 0 4 +207 0 -1 4 +208 0 0 1 +208 0 0 2 +208 1 0 3 +208 0 0 3 +208 0 0 1 +208 0 -1 2 +208 1 -1 3 +208 0 0 4 +208 0 0 4 +208 0 -1 3 +208 0 0 4 +208 0 0 1 +208 0 0 1 +208 0 0 4 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 3 +208 1 -1 3 +208 1 0 3 +208 1 0 3 +208 1 0 3 +208 1 0 3 +208 1 0 3 +208 1 -1 3 +208 1 0 3 +208 1 -1 3 +208 0 -1 3 +208 0 -1 3 +208 0 -1 3 +208 0 0 2 +208 1 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 1 +208 0 -1 1 +208 1 0 1 +208 0 0 1 +208 0 0 1 +208 1 0 1 +208 0 0 1 +208 0 -1 1 +208 0 0 1 +208 0 -1 1 +208 0 -1 3 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 2 +208 0 0 1 +208 0 0 2 +208 0 0 3 +208 0 -1 4 +208 0 0 1 +208 0 0 2 +208 0 0 3 +208 1 0 1 +208 0 0 1 +208 0 0 1 +208 1 0 1 +208 1 0 1 +208 0 0 1 +208 0 -1 1 +208 0 0 2 +208 0 0 3 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 1 +208 0 0 2 +208 0 -1 3 +208 1 -1 1 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 1 0 1 +208 0 0 1 +208 0 -1 1 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 3 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 1 +208 0 0 2 +208 0 -1 3 +208 0 0 1 +208 0 0 2 +208 0 -1 1 +208 0 -1 2 +208 0 0 3 +208 0 0 4 +208 1 -1 3 +208 0 -1 3 +208 1 0 4 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 1 -1 4 +208 0 0 4 +208 1 -1 4 +208 0 -1 4 +208 0 0 4 +208 1 -1 4 +208 0 0 4 +208 0 0 4 +208 1 -1 4 +208 1 0 4 +208 1 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 4 +208 1 0 4 +208 1 0 4 +208 1 0 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 4 +208 0 0 4 +208 1 0 4 +208 0 0 4 +208 1 0 4 +208 1 0 4 +208 1 -1 4 +208 1 0 4 +208 1 -1 4 +208 0 0 4 +208 1 0 4 +208 1 0 4 +208 0 -1 4 +208 0 0 4 +208 1 -1 4 +208 0 -1 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 1 +208 0 -1 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 0 0 4 +208 0 -1 4 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 -1 3 +208 0 0 3 +208 0 -1 3 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 0 2 +208 0 0 2 +208 1 -1 1 +208 0 0 2 +208 0 0 1 +208 0 -1 2 +208 0 0 1 +208 0 0 1 +208 0 0 1 +208 0 -1 3 +208 0 0 4 +208 0 0 4 +208 0 -1 4 +208 1 0 2 +208 0 0 2 +208 0 0 2 +208 1 0 2 +208 0 -1 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 1 -1 2 +208 0 0 2 +208 0 0 2 +208 0 0 2 +208 1 -1 2 +208 0 0 1 +208 0 -1 2 +208 0 0 2 +208 0 0 2 +209 0 0 3 +209 0 0 1 +209 0 0 4 +209 0 0 2 +209 0 0 4 +209 0 0 1 +209 1 0 3 +209 0 0 2 +209 1 -1 2 +209 0 0 4 +209 0 0 1 +209 0 0 3 +209 0 0 2 +209 0 0 1 +209 1 0 3 +209 1 0 3 +209 0 0 3 +209 0 0 2 +209 0 0 4 +209 1 -1 1 +209 1 0 1 +209 1 -1 1 +209 0 0 1 +209 1 0 2 +209 0 0 2 +209 1 0 4 +209 0 0 4 +209 0 -1 1 +209 0 0 3 +209 1 -1 2 +209 0 -1 2 +209 0 0 2 +209 0 -1 4 +209 0 -1 3 +209 0 -1 1 +209 1 -1 4 +209 1 -1 3 +209 0 0 3 +209 0 0 2 +209 1 -1 4 +209 0 0 4 +209 0 0 1 +209 0 0 3 +209 0 0 2 +209 0 0 4 +209 0 0 3 +209 1 0 1 +209 1 -1 1 +209 0 0 1 +209 0 0 2 +209 0 -1 3 +209 1 0 4 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 0 -1 3 +209 0 -1 3 +209 0 0 1 +209 0 0 2 +209 0 0 3 +209 0 0 4 +209 0 0 1 +209 0 0 2 +209 0 0 2 +209 1 0 4 +209 0 0 3 +209 0 0 1 +209 0 0 4 +209 0 -1 2 +209 1 0 3 +209 1 -1 3 +209 0 0 4 +209 1 0 4 +209 0 -1 4 +209 0 -1 1 +209 0 -1 3 +209 0 0 2 +209 1 0 2 +209 0 -1 2 +209 1 0 4 +209 0 -1 4 +209 1 0 3 +209 0 0 3 +209 0 0 3 +209 0 0 1 +209 1 0 1 +209 0 -1 2 +209 0 0 2 +209 0 -1 3 +209 0 0 4 +209 0 -1 4 +209 0 0 3 +209 1 -1 3 +209 1 0 4 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 1 0 3 +209 0 0 2 +209 1 0 3 +209 0 0 2 +209 0 0 1 +209 0 -1 2 +209 0 0 3 +209 0 0 1 +209 1 0 4 +209 0 0 3 +209 0 -1 1 +209 0 0 2 +209 1 -1 4 +209 0 0 1 +209 0 0 4 +209 1 0 2 +209 0 0 2 +209 0 0 1 +209 0 -1 3 +209 1 0 1 +209 0 0 4 +209 0 0 4 +209 0 0 4 +209 0 -1 2 +209 0 0 2 +209 0 0 2 +209 0 0 1 +209 0 0 3 +209 0 0 3 +209 0 -1 3 +209 0 0 3 +209 0 0 3 +209 1 0 1 +209 1 0 1 +209 1 0 4 +209 0 0 4 +209 0 0 3 +209 1 -1 2 +209 0 0 2 +209 0 0 3 +209 1 -1 1 +209 0 0 4 +209 0 -1 1 +209 0 0 1 +209 0 -1 2 +209 0 -1 4 +209 1 -1 4 +209 0 0 4 +209 0 -1 3 +209 0 0 3 +209 1 0 2 +209 0 0 2 +209 0 0 1 +209 0 -1 3 +209 0 0 2 +209 0 0 1 +209 0 0 1 +209 0 0 4 +209 0 0 1 +209 1 0 3 +209 0 0 3 +209 0 -1 3 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 0 0 3 +209 0 -1 1 +209 0 -1 3 +209 0 0 3 +209 0 0 1 +209 0 -1 4 +209 0 -1 4 +209 0 0 2 +209 0 0 1 +209 0 0 4 +209 1 0 2 +209 0 0 1 +209 0 -1 4 +209 1 0 1 +209 0 0 3 +209 0 -1 4 +209 0 -1 3 +209 0 0 3 +209 0 0 4 +209 0 0 2 +209 0 0 3 +209 0 0 3 +209 0 0 1 +209 0 -1 4 +209 0 0 3 +209 0 -1 3 +209 0 -1 2 +209 0 0 4 +209 0 0 1 +209 1 0 2 +209 0 0 4 +209 1 0 2 +209 1 0 1 +209 0 0 2 +209 0 0 3 +209 0 -1 4 +209 0 0 4 +209 0 0 1 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/bart_exampleData.txt b/Python/hbayesdm/common/extdata/bart_exampleData.txt new file mode 100644 index 00000000..4890955f --- /dev/null +++ b/Python/hbayesdm/common/extdata/bart_exampleData.txt @@ -0,0 +1,91 @@ +subjID group trial reward pumps explosion +101 0 1 1 14 1 +101 0 2 1 39 0 +101 0 3 1 50 0 +101 0 4 1 81 0 +101 0 5 1 45 1 +101 0 6 1 80 0 +101 0 7 1 45 1 +101 0 8 1 90 0 +101 0 9 1 65 1 +101 0 10 1 27 1 +101 0 11 1 40 1 +101 0 12 1 80 1 +101 0 13 1 95 1 +101 0 14 1 12 1 +101 0 15 1 36 1 +101 0 16 1 75 0 +101 0 17 1 55 1 +101 0 18 1 70 0 +101 0 19 1 26 1 +101 0 20 1 68 0 +101 0 21 1 55 1 +101 0 22 1 75 0 +101 0 23 1 64 1 +101 0 24 1 5 1 +101 0 25 1 38 1 +101 0 26 1 76 0 +101 0 27 1 57 0 +101 0 28 1 56 1 +101 0 29 1 48 1 +101 0 30 1 88 1 +103 0 1 1 88 1 +103 0 2 1 36 1 +103 0 3 1 30 0 +103 0 4 1 50 0 +103 0 5 1 56 1 +103 0 6 1 40 0 +103 0 7 1 6 1 +103 0 8 1 33 1 +103 0 9 1 30 0 +103 0 10 1 50 0 +103 0 11 1 22 1 +103 0 12 1 56 1 +103 0 13 1 20 0 +103 0 14 1 30 0 +103 0 15 1 40 0 +103 0 16 1 30 0 +103 0 17 1 30 0 +103 0 18 1 40 0 +103 0 19 1 45 0 +103 0 20 1 50 0 +103 0 21 1 55 1 +103 0 22 1 50 0 +103 0 23 1 60 0 +103 0 24 1 3 1 +103 0 25 1 40 0 +103 0 26 1 50 0 +103 0 27 1 30 0 +103 0 28 1 50 0 +103 0 29 1 30 0 +103 0 30 1 50 0 +104 0 1 1 50 0 +104 0 2 1 38 1 +104 0 3 1 50 0 +104 0 4 1 24 1 +104 0 5 1 46 1 +104 0 6 1 42 0 +104 0 7 1 11 1 +104 0 8 1 50 0 +104 0 9 1 28 1 +104 0 10 1 50 0 +104 0 11 1 50 1 +104 0 12 1 40 0 +104 0 13 1 40 0 +104 0 14 1 2 1 +104 0 15 1 40 0 +104 0 16 1 40 0 +104 0 17 1 40 0 +104 0 18 1 40 0 +104 0 19 1 25 1 +104 0 20 1 40 0 +104 0 21 1 40 0 +104 0 22 1 40 0 +104 0 23 1 36 1 +104 0 24 1 38 0 +104 0 25 1 50 0 +104 0 26 1 38 1 +104 0 27 1 40 0 +104 0 28 1 40 0 +104 0 29 1 30 0 +104 0 30 1 40 0 diff --git a/Python/hbayesdm/common/extdata/choiceRT_exampleData.txt b/Python/hbayesdm/common/extdata/choiceRT_exampleData.txt new file mode 100644 index 00000000..bbe2b474 --- /dev/null +++ b/Python/hbayesdm/common/extdata/choiceRT_exampleData.txt @@ -0,0 +1,5001 @@ +RT choice subjID condition +0.238126253704183 1 1 1 +0.788334139249308 2 1 1 +0.524351202388138 2 1 1 +1.30852451859186 1 1 1 +0.244177006142252 1 1 1 +0.512534281943979 1 1 1 +0.570872020376975 2 1 1 +0.552056452179357 1 1 1 +0.298121361381527 2 1 1 +0.323864684737407 1 1 1 +0.542476237007045 1 1 1 +0.457829931981559 2 1 1 +0.214443816443766 1 1 1 +0.282641758197282 2 1 1 +0.577296397953241 1 1 1 +0.80363268095685 2 1 1 +0.630866151842371 2 1 1 +0.561537877283935 2 1 1 +0.447864619700588 1 1 1 +0.271079966516117 2 1 1 +0.286558308483825 2 1 1 +0.402853789793329 2 1 1 +0.261247265870358 2 1 1 +0.954323974954787 1 1 1 +0.233982750292549 2 1 1 +0.534509968347321 2 1 1 +1.38489463892966 1 1 1 +0.51382752398596 2 1 1 +0.877226598584423 2 1 1 +0.59661096895894 2 1 1 +0.653486235884601 2 1 1 +0.499754559401486 2 1 1 +0.234607668817517 2 1 1 +0.531596228343812 2 1 1 +0.517067421390557 2 1 1 +0.286714432990514 2 1 1 +0.280389415416944 1 1 1 +0.770848791728697 2 1 1 +0.242534242474749 2 1 1 +1.21402951161598 1 1 1 +0.254230773115822 1 1 1 +0.235607609409862 1 1 1 +0.893859490775577 2 1 1 +0.4248828895841 2 1 1 +0.806633683066691 1 1 1 +0.52846751057204 2 1 1 +0.283404274358359 2 1 1 +0.38261147359119 1 1 1 +0.366467333270928 2 1 1 +0.89906087165271 2 1 1 +0.473523175525898 2 1 1 +0.61052334774835 2 1 1 +0.348877038822898 2 1 1 +0.509848343105319 2 1 1 +0.714362767211544 2 1 1 +0.366653361634071 1 1 1 +0.504639516528354 2 1 1 +0.789291266027802 1 1 1 +0.220496731951155 2 1 1 +0.225368494671686 2 1 1 +0.935425512110651 1 1 1 +0.596093103065834 2 1 1 +0.751187828634478 2 1 1 +0.398369973292919 2 1 1 +0.803192132747886 1 1 1 +0.653642313281921 2 1 1 +0.759465190620081 2 1 1 +0.301158475484036 2 1 1 +0.468546635484975 2 1 1 +0.45136376067397 2 1 1 +0.225730206953994 1 1 1 +0.871541732294617 2 1 1 +1.02231746439083 2 1 1 +0.453015412970327 1 1 1 +0.198424664401742 2 1 1 +0.300531454438104 2 1 1 +0.473723469079576 1 1 1 +0.397993417619097 2 1 1 +0.990744721453659 2 1 1 +0.576175729949669 2 1 1 +0.200104343196362 2 1 1 +0.397950225292451 2 1 1 +0.595871677587168 1 1 1 +0.631283245367399 2 1 1 +0.225640535433198 2 1 1 +0.671278939344137 2 1 1 +0.562888330598081 1 1 1 +0.713201556333214 1 1 1 +0.429372024083033 2 1 1 +0.559437949496943 1 1 1 +0.747758954300599 2 1 1 +0.668556572370471 2 1 1 +0.179933868089705 1 1 1 +0.557946405103375 2 1 1 +0.781925159045207 2 1 1 +0.629998909619026 2 1 1 +0.73419031432803 1 1 1 +0.917048954570217 2 1 1 +1.27326330493077 1 1 1 +0.715099066135782 1 1 1 +0.561629162179203 2 1 1 +0.439342876745989 1 1 1 +0.212480989248291 2 1 1 +0.431997523692581 2 1 1 +0.504823085985375 1 1 1 +0.209443682735351 2 1 1 +0.535478168252645 1 1 1 +0.523309589143815 2 1 1 +0.292526841667345 2 1 1 +0.292598915819633 1 1 1 +0.383304045988112 2 1 1 +0.220801631101784 2 1 1 +0.240025256471961 2 1 1 +0.374617088048471 2 1 1 +0.225139772246513 2 1 1 +0.504765209525881 1 1 1 +0.536719069622199 1 1 1 +0.265730079523484 2 1 1 +0.788176797412021 2 1 1 +0.317054055572024 1 1 1 +0.984288372815029 1 1 1 +0.242704368769227 1 1 1 +1.16310843477133 1 1 1 +0.671512143534472 2 1 1 +0.235624281398265 2 1 1 +0.585278561981407 1 1 1 +0.313710683818167 1 1 1 +0.512453071354528 2 1 1 +0.318816084203735 2 1 1 +0.492290766723273 2 1 1 +0.869267244819061 2 1 1 +0.416347372277426 2 1 1 +1.25171209855063 1 1 1 +0.421124063985099 2 1 1 +0.330265759909128 2 1 1 +0.32442759213596 2 1 1 +0.499405834143408 2 1 1 +0.605809814064198 2 1 1 +0.441813584555195 1 1 1 +0.355018580197292 2 1 1 +0.284917824978601 2 1 1 +0.712509291577718 2 1 1 +0.360087543146394 2 1 1 +0.593758109292972 2 1 1 +0.21793928877364 2 1 1 +0.511916501085172 1 1 1 +1.65665966055448 2 1 1 +0.462252095429733 1 1 1 +0.375694324193756 2 1 1 +0.573330510111478 1 1 1 +0.624806212238662 1 1 1 +0.3221554867038 1 1 1 +0.394184550616579 1 1 1 +0.233898257977356 2 1 1 +0.616451835954318 2 1 1 +0.435745652986984 2 1 1 +0.309831870195393 1 1 1 +0.303567774481905 1 1 1 +0.268141575894932 2 1 1 +0.685546680374616 2 1 1 +0.315857448984633 2 1 1 +0.358148021225324 1 1 1 +0.561597978729496 2 1 1 +0.575763837785002 2 1 1 +0.408409797786314 2 1 1 +0.456470478096314 2 1 1 +0.211008154807298 1 1 1 +0.537560426488747 1 1 1 +0.474119050536192 1 1 1 +0.349680702914349 2 1 1 +0.43874642118394 2 1 1 +0.741099937281951 2 1 1 +0.397490501092685 1 1 1 +0.455993632903328 2 1 1 +0.531917883353318 2 1 1 +0.544592749033783 2 1 1 +0.74575081631549 1 1 1 +0.482830763020483 2 1 1 +0.280104823458282 2 1 1 +0.674827163589054 2 1 1 +0.27232449929437 1 1 1 +0.33609945965603 2 1 1 +0.642687813456977 2 1 1 +0.45152584390343 2 1 1 +0.393612819207325 1 1 1 +0.403513480920972 2 1 1 +0.55270209232572 2 1 1 +0.282474350101989 1 1 1 +0.225686494015142 1 1 1 +0.3032960404285 1 1 1 +0.741695387202929 1 1 1 +0.23627922113503 1 1 1 +0.317661404771517 2 1 1 +0.365881950379812 1 1 1 +0.671407911504626 2 1 1 +0.6327672361385 2 1 1 +0.408730216599132 2 1 1 +1.05295329016947 2 1 1 +0.647929253014634 2 1 1 +0.272505386795946 1 1 1 +0.477000937785718 2 1 1 +0.593679670773664 2 1 1 +0.485804513765726 1 1 1 +0.685108031619407 2 1 1 +0.463863491717212 2 1 1 +0.280869562583906 2 1 1 +0.484442256816249 1 1 1 +0.374203282894535 1 1 1 +0.205270568757322 2 1 1 +0.285730023779721 1 1 1 +0.420031671350127 2 1 1 +0.304140334800815 2 1 1 +0.455400240565684 1 1 1 +0.319295225911816 2 1 1 +0.853456173431349 2 1 1 +1.28296521539738 2 1 1 +0.402276812108308 2 1 1 +1.60365089898574 2 1 1 +0.513436951554669 2 1 1 +0.635287982445216 2 1 1 +0.756725913746622 1 1 1 +0.538411817875012 2 1 1 +0.252807751300543 1 1 1 +0.306493263230248 2 1 1 +0.328940637779731 1 1 1 +0.295149174376265 2 1 1 +0.428772773247104 2 1 1 +0.710257617392816 2 1 1 +2.1398843380733 2 1 1 +0.390964230021283 1 1 1 +0.30264732818644 1 1 1 +0.24604561365542 2 1 1 +0.326118394989355 2 1 1 +0.444302762917929 2 1 1 +0.994994120515054 2 1 1 +0.329747734506691 2 1 1 +0.244820417609073 2 1 1 +0.434344901812039 2 1 1 +0.245526039713125 2 1 1 +0.371387027622059 2 1 1 +0.396016682526436 2 1 1 +0.868293655068221 2 1 1 +0.339580118779972 2 1 1 +0.377321305638716 1 1 1 +0.352058350011174 2 1 1 +0.523222420484193 1 1 1 +1.63006360968846 2 1 1 +0.403780279358626 1 1 1 +0.33450821318739 2 1 1 +0.246049648436144 1 1 1 +0.73900563703035 2 1 1 +0.70659002598455 1 1 1 +0.251224036209508 1 1 1 +0.279682884105716 2 1 1 +0.446835970242547 2 1 1 +0.344773155307199 2 1 1 +1.18156313011751 2 1 1 +0.40159469187599 1 1 1 +0.662618250249293 2 1 1 +0.484088636200293 2 1 1 +0.249177412018199 1 1 1 +0.635565342005854 2 1 1 +0.237344366033974 2 1 1 +0.262837667936303 2 1 1 +0.223824529758815 2 1 1 +0.544851049052962 1 1 1 +1.22941904309934 1 1 1 +1.18790150456476 2 1 1 +0.397012831119724 2 1 1 +0.542500816372649 1 1 1 +0.215934076714995 2 1 1 +1.30455859046761 2 1 1 +0.536099297245265 2 1 1 +0.414264536316934 2 1 1 +0.777679859044325 2 1 1 +0.697575719549679 1 1 1 +0.704761484394448 2 1 1 +0.286893353427223 1 1 1 +0.681973124438239 2 1 1 +0.397462829482937 2 1 1 +0.257670640245336 2 1 1 +0.236649584180499 2 1 1 +0.436790435094707 1 1 1 +0.574656753851278 1 1 1 +0.253082319735779 1 1 1 +0.61806692862892 1 1 1 +0.46661603680114 2 1 1 +0.195332992231242 1 1 1 +0.229629897436442 1 1 1 +0.992646398039104 2 1 1 +0.94136783174252 2 1 1 +0.837333099387364 1 1 1 +0.430204780391451 2 1 1 +1.37515921760222 2 1 1 +0.89875556054097 2 1 1 +0.696864042518777 2 1 1 +0.332933586834615 2 1 1 +0.334070550417085 2 1 1 +0.38676132253602 2 1 1 +0.306404665389991 2 1 1 +0.478254432945422 2 1 1 +0.601997570889218 2 1 1 +0.373642558748753 2 1 1 +0.29388256861859 1 1 1 +0.403146732540824 2 1 1 +0.754379822737839 2 1 1 +0.20827688411218 2 1 1 +0.211975975201092 2 1 1 +0.591340246795799 1 1 1 +0.263322621163444 2 1 1 +0.525170614901281 1 1 1 +0.206823345071543 1 1 1 +0.935520204615524 1 1 1 +0.550910831841 2 1 1 +0.232504114652867 2 1 1 +0.391975720570035 1 1 1 +0.52537232580037 2 1 1 +0.604827669281913 2 1 1 +0.440173374557048 2 1 1 +0.294878838994327 2 1 1 +0.323868811622971 1 1 1 +0.240824506056104 2 1 1 +0.423271049333481 2 1 1 +0.849356591210965 2 1 1 +0.335818515496422 2 1 1 +0.538745656799135 2 1 1 +0.4208751745964 2 1 1 +0.55146359110108 2 1 1 +0.467882029849217 2 1 1 +0.567777388073783 2 1 1 +0.311394332684366 1 1 1 +1.33531192845093 1 1 1 +0.368867535882799 2 1 1 +0.340092989922591 2 1 1 +0.299811445088077 2 1 1 +0.198131285653104 2 1 1 +1.57688580580023 2 1 1 +0.671467937043381 2 1 1 +0.422481878776226 2 1 1 +0.48313672040092 1 1 1 +0.473697344635179 1 1 1 +0.68951966048344 2 1 1 +0.633967567703449 2 1 1 +0.382761102393661 1 1 1 +0.68736056335558 1 1 1 +0.677810537621417 2 1 1 +0.249614606331914 2 1 1 +0.755321813543998 2 1 1 +0.370198385669219 1 1 1 +0.697638915493631 1 1 1 +0.684828719775979 2 1 1 +0.502349799392655 1 1 1 +0.583605969114717 1 1 1 +0.517041977935336 1 1 1 +0.411670106317747 1 1 1 +0.207610898625408 1 1 1 +0.414006631133478 2 1 1 +0.921999310392829 1 1 1 +0.409507167245215 2 1 1 +0.584716070617761 2 1 1 +0.666513112126972 2 1 1 +0.233005827550518 2 1 1 +0.60770657746225 1 1 1 +0.221784346267773 2 1 1 +0.771391695716424 2 1 1 +1.21988159355549 1 1 1 +0.408933678664394 2 1 1 +1.03374983542661 2 1 1 +0.939198644733114 2 1 1 +0.962067734082042 2 1 1 +0.473406448845882 2 1 1 +0.223644602219167 2 1 1 +1.12139515597077 1 1 1 +0.299025722625131 2 1 1 +0.55009896091157 1 1 1 +0.578799507502895 1 1 1 +0.962097234341087 1 1 1 +0.348861796367042 2 1 1 +0.35773121339554 2 1 1 +0.246740510307971 1 1 1 +0.407111627051893 1 1 1 +0.550930871978825 1 1 1 +0.422754497543909 2 1 1 +0.758063342099552 2 1 1 +0.380847347114823 2 1 1 +0.377055603284598 2 1 1 +0.65608839650545 1 1 1 +1.42357385911498 2 1 1 +0.232798506755752 2 1 1 +0.539867634108279 2 1 1 +0.392489725525737 2 1 1 +0.367110223983889 2 1 1 +0.839639947757427 2 1 1 +0.592327430792799 2 1 1 +0.239126262427817 2 1 1 +0.328625329636268 2 1 1 +0.284257957756146 1 1 1 +0.331590178883346 1 1 1 +0.620620411662111 2 1 1 +0.755967038010479 2 1 1 +0.331900743408574 2 1 1 +0.421146686045199 1 1 1 +1.07476503410067 2 1 1 +0.507112598176372 1 1 1 +0.311566718621004 2 1 1 +0.301319320070233 2 1 1 +0.393257034342845 2 1 1 +0.673521481008061 2 1 1 +0.291567562966672 2 1 1 +1.05249328382332 2 1 1 +0.694698152076518 2 1 1 +0.733055920143737 2 1 1 +0.334350894107303 2 1 1 +0.542807697456418 1 1 1 +0.579281169009386 1 1 1 +0.320837583848137 1 1 1 +0.488074071042795 1 1 1 +0.213060081069537 2 1 1 +0.237230647833275 2 1 1 +0.237572229668373 1 1 1 +0.241805498724672 1 1 1 +0.21505246069559 2 1 1 +0.625069689033177 2 1 1 +0.391789762960315 1 1 1 +0.360924641936915 2 1 1 +0.434831888026175 1 1 1 +1.53947356804897 2 1 1 +0.390459073072731 2 1 1 +0.327186719063663 1 1 1 +0.451681415339723 1 1 1 +0.551841771615269 2 1 1 +0.41039773179749 1 1 1 +0.926634118987433 2 1 1 +0.813362027443744 2 1 1 +0.632371052186083 2 1 1 +1.07271976627787 1 1 1 +0.347281073927582 1 1 1 +0.44423560152159 1 1 1 +0.576366534316911 2 1 1 +0.279713029952993 2 1 1 +0.881466843024701 2 1 1 +0.374654223890455 1 1 1 +0.246340230252564 1 1 1 +0.46051090791758 2 1 1 +0.610478508455545 1 1 1 +0.290070606427311 2 1 1 +0.544420557842503 1 1 1 +0.776693279362721 1 1 1 +0.235406028367375 2 1 1 +0.239531675743827 1 1 1 +0.44775078332261 2 1 1 +0.272084709816774 1 1 1 +0.490027056594032 2 1 1 +1.11466956380519 2 1 1 +0.270448404879725 2 1 1 +0.442949902437612 1 1 1 +0.570651632322539 1 1 1 +0.32265845661882 2 1 1 +0.407435441210764 2 1 1 +0.200085052390358 2 1 1 +0.358511835895485 2 1 1 +1.2431214333383 2 1 1 +0.696171754957839 1 1 1 +0.2777627469669 1 1 1 +0.429359856138122 2 1 1 +0.340524177360971 2 1 1 +0.199944337376957 2 1 1 +0.398334292684942 2 1 1 +0.388541579168816 2 1 1 +0.398547679838622 1 1 1 +0.839309822360769 2 1 1 +0.280253849702043 2 1 1 +0.547345720269382 2 1 1 +0.376647832731017 2 1 1 +0.455530332435412 2 1 1 +0.334196466045242 2 1 1 +0.759777271734527 2 1 1 +1.10869967729068 2 1 1 +0.222920909328599 2 1 1 +0.243727194101031 2 1 1 +0.331283374352904 2 1 1 +0.489803545251022 1 1 1 +0.2736011848833 2 1 1 +0.432409628386385 1 1 1 +0.447747022319498 2 1 1 +0.736283852147818 2 1 1 +0.461500847594122 1 1 1 +0.359367876631285 1 1 1 +0.418098062593873 2 1 1 +0.502693165924066 1 1 1 +0.260188072876792 1 1 1 +0.348437996297828 1 1 1 +1.57562306974174 2 1 1 +0.316108820930013 2 1 1 +0.421685918698271 2 1 1 +0.578695918727619 2 1 1 +1.12879309366769 2 1 1 +1.03916993441652 2 1 1 +0.492207222672778 1 1 1 +0.33283217994747 2 1 1 +0.39422420306568 2 1 2 +0.362300838201913 1 1 2 +0.469662901313467 2 1 2 +0.820030023322582 1 1 2 +0.234551440695508 2 1 2 +0.331679248955791 1 1 2 +0.527229640837085 2 1 2 +0.91734807805308 2 1 2 +0.319175515877037 2 1 2 +0.651053459158852 1 1 2 +0.661459624685597 2 1 2 +0.281279784597852 2 1 2 +0.342078529279457 1 1 2 +0.3636800828231 2 1 2 +0.484151346003298 1 1 2 +0.658827635325395 1 1 2 +0.622208937699232 1 1 2 +0.580811030835409 2 1 2 +0.441808620117506 1 1 2 +0.36060243933493 2 1 2 +0.831194064165385 2 1 2 +0.361776006347027 2 1 2 +0.777351339265196 1 1 2 +0.278293909155803 2 1 2 +0.278507100800553 2 1 2 +0.884402648451047 1 1 2 +0.342560342613834 2 1 2 +0.809676649841315 2 1 2 +0.516858099569803 1 1 2 +0.634645370682583 2 1 2 +0.249686099229778 1 1 2 +1.33141985698474 1 1 2 +0.453726915386914 2 1 2 +0.290504549136735 2 1 2 +0.487095756746479 2 1 2 +0.346501172556082 2 1 2 +0.393430828426059 1 1 2 +0.504449494787339 2 1 2 +0.367999687491587 2 1 2 +0.352469038071531 1 1 2 +0.234560015153837 2 1 2 +0.940841504372444 1 1 2 +0.2046902513565 2 1 2 +0.461341997193658 1 1 2 +0.610339950737745 2 1 2 +0.446921029186028 1 1 2 +0.515591108864551 2 1 2 +1.58260395843454 2 1 2 +0.344764743329778 2 1 2 +0.427254054893139 2 1 2 +0.516158776880019 1 1 2 +1.2612303673015 2 1 2 +0.613528615965816 2 1 2 +0.267963577139406 2 1 2 +0.307594651280269 2 1 2 +0.24101706884499 1 1 2 +0.455753268732021 2 1 2 +0.405040912881131 2 1 2 +0.288094483330521 1 1 2 +0.545610622237084 2 1 2 +0.452142838999807 1 1 2 +0.594527943497764 1 1 2 +0.88116621589308 2 1 2 +0.277767297820233 2 1 2 +0.279551393619652 2 1 2 +0.365460511604365 2 1 2 +0.556212898406868 2 1 2 +0.328560209842821 1 1 2 +0.531013993625691 1 1 2 +0.231888430468412 1 1 2 +0.677110774143983 2 1 2 +0.453921989085917 2 1 2 +0.459571696136957 2 1 2 +0.393356837769246 1 1 2 +0.511202810478497 1 1 2 +0.693614307574487 1 1 2 +0.240863923388269 2 1 2 +0.321852817508144 1 1 2 +0.270908403919833 2 1 2 +0.820724000663825 1 1 2 +0.235189573689813 2 1 2 +0.326155088030317 2 1 2 +0.631590224724998 1 1 2 +0.441990726662034 1 1 2 +0.84336570752273 2 1 2 +0.359995826600722 2 1 2 +0.251400135935091 1 1 2 +0.412798716611553 1 1 2 +0.257997459005081 1 1 2 +0.324911808695266 2 1 2 +0.869954063020224 2 1 2 +0.316774804913553 1 1 2 +0.802438949561354 2 1 2 +0.753010120858102 2 1 2 +0.50447570028204 1 1 2 +0.472994968867572 2 1 2 +0.365558799398694 2 1 2 +0.355836646801112 1 1 2 +0.571157381310202 2 1 2 +0.634686215618027 2 1 2 +0.270208965991148 2 1 2 +0.328585338874615 1 1 2 +0.384434393299423 2 1 2 +0.316023575731398 1 1 2 +0.494817395995112 2 1 2 +0.300504460120145 2 1 2 +0.347783059904907 2 1 2 +1.02851702876777 2 1 2 +0.364863367923789 2 1 2 +0.460777943415657 2 1 2 +0.382793622325279 2 1 2 +0.273403607994913 2 1 2 +0.609426470046583 2 1 2 +0.297792901344866 1 1 2 +0.370479141756967 2 1 2 +0.882238434259769 1 1 2 +0.496857265474561 2 1 2 +0.277702369672893 2 1 2 +0.446926962878622 1 1 2 +0.36757607051588 1 1 2 +0.557136267106436 2 1 2 +1.00333007744122 2 1 2 +0.760219976689289 2 1 2 +0.332068843559009 2 1 2 +0.548961093445682 2 1 2 +0.313465233961872 2 1 2 +0.550216771807154 1 1 2 +0.29794278574353 1 1 2 +0.234198048951483 2 1 2 +0.273445183254746 2 1 2 +0.574886295740124 2 1 2 +0.258382409058055 1 1 2 +0.409845586460725 2 1 2 +0.326206723132256 2 1 2 +0.642595268751117 2 1 2 +0.232356531769144 2 1 2 +1.70736951927255 1 1 2 +0.274687338325608 2 1 2 +0.40877430223826 2 1 2 +0.365729356985064 2 1 2 +0.6050000403314 2 1 2 +0.592011487134505 2 1 2 +0.557179211825432 1 1 2 +0.873296855773591 1 1 2 +0.216826762785491 1 1 2 +0.517886780128018 2 1 2 +0.398323720600925 1 1 2 +1.12139464302831 2 1 2 +0.249538486660475 2 1 2 +0.360304338880141 1 1 2 +0.627773044075362 2 1 2 +0.996274959906684 2 1 2 +0.202797819180771 1 1 2 +0.383153769101205 1 1 2 +0.324797856324902 1 1 2 +0.239421301531662 2 1 2 +0.24289898785908 1 1 2 +0.547746136913622 1 1 2 +0.386255965400912 2 1 2 +0.60223673049116 2 1 2 +0.549261776998216 2 1 2 +0.395992071688511 2 1 2 +0.217402932038072 2 1 2 +0.295305459515413 2 1 2 +0.447909826549637 2 1 2 +0.71950962867128 2 1 2 +0.794816583397332 1 1 2 +0.241318968932987 2 1 2 +0.556293493098233 2 1 2 +0.238208378562322 2 1 2 +0.499247181746743 1 1 2 +0.317050968536836 2 1 2 +0.322686857249444 2 1 2 +0.71276761076242 2 1 2 +0.301030966624334 2 1 2 +0.336641004565653 2 1 2 +0.812046026214206 2 1 2 +0.270220261704131 2 1 2 +0.701954145112022 2 1 2 +0.43964095073941 2 1 2 +0.384704421988213 2 1 2 +0.501487364681699 2 1 2 +0.455023781459671 2 1 2 +0.332474164305816 2 1 2 +0.567142874907982 2 1 2 +0.253324335182053 2 1 2 +0.444329558298367 2 1 2 +0.750457236950695 2 1 2 +0.292500297080332 2 1 2 +0.319745451630673 2 1 2 +0.286210384865368 2 1 2 +0.283637752128579 1 1 2 +0.236044970372654 2 1 2 +0.606532173767213 1 1 2 +1.32620595835061 2 1 2 +0.49881945892801 2 1 2 +1.00559201100603 2 1 2 +0.498129494834216 2 1 2 +0.682007132416635 2 1 2 +0.521249610973914 2 1 2 +0.229929750671033 2 1 2 +1.12814610238938 1 1 2 +0.74135939367203 2 1 2 +1.14362542630031 2 1 2 +0.261969169934014 1 1 2 +0.240668217312327 1 1 2 +1.59220860546119 2 1 2 +0.459005868330534 2 1 2 +0.290018768199601 2 1 2 +0.204589440835719 1 1 2 +0.619039312673667 2 1 2 +0.667083334382893 1 1 2 +0.359845320132008 1 1 2 +0.912937103767445 2 1 2 +0.522430834145349 2 1 2 +0.297762304149053 1 1 2 +0.276240304783596 2 1 2 +0.399051717562123 2 1 2 +0.404254481667734 2 1 2 +1.23765251352633 1 1 2 +1.2213528437925 2 1 2 +0.554106620313858 2 1 2 +0.513543854359058 2 1 2 +0.718560875752879 2 1 2 +0.299045404005468 1 1 2 +0.197161504481574 2 1 2 +0.355424533393654 2 1 2 +0.601322385280793 2 1 2 +0.31408110064814 1 1 2 +0.681928297252204 2 1 2 +0.257899160580357 1 1 2 +0.331853308281021 2 1 2 +0.932271244383807 2 1 2 +0.762290747363875 1 1 2 +0.610315223598599 2 1 2 +0.508310743979851 2 1 2 +0.293542339726516 1 1 2 +0.249532498898509 2 1 2 +0.240661946068682 1 1 2 +0.480573774515142 2 1 2 +0.26503112695042 1 1 2 +0.745033574361612 2 1 2 +0.313418912457887 2 1 2 +0.428468490020874 2 1 2 +0.619836697801129 2 1 2 +0.404856983338945 2 1 2 +0.225135719018744 2 1 2 +0.247203725168153 2 1 2 +0.473126435201081 2 1 2 +0.758881984366834 2 1 2 +0.530103620429835 2 1 2 +0.609787747426196 2 1 2 +0.42023331047044 1 1 2 +0.294545387085857 1 1 2 +0.311952071319945 2 1 2 +0.793299410776987 2 1 2 +0.376179978035794 2 1 2 +0.230418084856786 2 1 2 +0.25879024565358 2 1 2 +0.264796453159985 2 1 2 +0.745485785923675 2 1 2 +0.224026456721164 2 1 2 +0.6030135494348 1 1 2 +0.489733962171922 2 1 2 +0.39466687509252 1 1 2 +0.552205654391275 2 1 2 +0.575332864606377 2 1 2 +0.673079198373531 1 1 2 +0.346849143283538 1 1 2 +0.384205850032696 1 1 2 +0.382157410278578 2 1 2 +0.294710963958947 2 1 2 +0.487164402385991 1 1 2 +0.571768796864126 2 1 2 +0.243155946253846 1 1 2 +0.366816988109117 1 1 2 +0.556560232965345 2 1 2 +0.842238270178048 2 1 2 +0.630587019970835 1 1 2 +0.849346128585895 2 1 2 +0.301910596058078 1 1 2 +0.494388435823995 1 1 2 +0.635279696032103 2 1 2 +0.244333041807845 1 1 2 +0.462722638825509 2 1 2 +0.355580520263025 1 1 2 +0.419159925222802 2 1 2 +0.229441499742296 2 1 2 +0.272011206196529 2 1 2 +0.457900548880182 1 1 2 +0.42581270796691 2 1 2 +0.192946477357373 2 1 2 +0.920226167527353 2 1 2 +0.870356567626495 2 1 2 +0.506429857300226 1 1 2 +1.35129991323996 2 1 2 +0.81782131154567 1 1 2 +0.312753351203148 2 1 2 +0.240147726849663 2 1 2 +0.331594506915926 2 1 2 +0.303605405427918 2 1 2 +1.3692312598303 2 1 2 +0.464969591870211 2 1 2 +0.365081121121442 2 1 2 +0.53678523283272 1 1 2 +0.362543809949933 2 1 2 +0.300077415892361 2 1 2 +0.565255726546058 2 1 2 +0.508969800017276 1 1 2 +0.197931347436034 2 1 2 +0.425448002968464 2 1 2 +0.917689004198981 2 1 2 +0.265209389680314 1 1 2 +0.399534037154238 1 1 2 +0.292118455947818 2 1 2 +0.352289208022807 2 1 2 +0.800608594982045 2 1 2 +0.251768687680971 1 1 2 +0.280448242529617 2 1 2 +0.243473452337431 1 1 2 +0.286916333216323 2 1 2 +0.838049565027792 1 1 2 +0.244529227888586 1 1 2 +0.71863102671265 2 1 2 +1.98916958946463 2 1 2 +0.238806320688673 2 1 2 +0.693785935221629 2 1 2 +0.495890282805749 1 1 2 +0.191174545766406 2 1 2 +0.836607694563896 2 1 2 +0.391165841939288 2 1 2 +0.566993167116615 2 1 2 +0.240715729525045 2 1 2 +0.354039896192607 1 1 2 +1.21434836670206 2 1 2 +0.48154154974369 2 1 2 +0.798332748413893 2 1 2 +0.650917466844914 2 1 2 +0.384224495536896 1 1 2 +0.945026137940947 2 1 2 +0.418508744931679 2 1 2 +0.659365265496408 1 1 2 +0.271823834279208 2 1 2 +0.43920360190222 2 1 2 +0.449532948575899 2 1 2 +1.02510373135742 2 1 2 +0.49889822568904 1 1 2 +1.23542122006285 1 1 2 +0.274287087904293 2 1 2 +0.673318487358746 2 1 2 +0.397619138281994 2 1 2 +1.0555886269523 2 1 2 +0.229089202292979 1 1 2 +0.697871360095817 2 1 2 +0.614287525537126 1 1 2 +0.322578991679628 1 1 2 +0.605688688250448 2 1 2 +0.534481750546624 2 1 2 +0.565101205666666 1 1 2 +0.378499737439249 1 1 2 +0.52995012536057 2 1 2 +2.45554336829165 1 1 2 +0.744067895318506 2 1 2 +0.673980171567151 2 1 2 +0.33496535179204 2 1 2 +0.703852317870538 2 1 2 +0.623851718541645 2 1 2 +0.275936871629696 2 1 2 +0.245843960416957 2 1 2 +0.220780887604494 2 1 2 +0.585098991357547 2 1 2 +0.343992796279959 1 1 2 +0.580010432096859 2 1 2 +0.377174286172397 2 1 2 +0.614794867960386 1 1 2 +0.235740390671863 1 1 2 +0.498093604359181 2 1 2 +0.422668225465882 2 1 2 +0.85458472173833 2 1 2 +0.318077105190021 2 1 2 +0.660599386236034 2 1 2 +0.44253879597235 2 1 2 +0.482452162905769 2 1 2 +0.569360166827625 2 1 2 +1.5195957937337 1 1 2 +0.335177741698269 2 1 2 +0.241392133198455 2 1 2 +0.503619286339201 2 1 2 +0.578489345701315 2 1 2 +0.327768204464024 2 1 2 +0.436095089114902 2 1 2 +0.424989568541 1 1 2 +0.214045608149353 2 1 2 +0.306116924393253 1 1 2 +0.476975246716608 2 1 2 +0.756750965776553 2 1 2 +0.312822136071239 2 1 2 +0.470827169455528 1 1 2 +0.37315029361616 2 1 2 +0.685155315108113 2 1 2 +0.959530818891534 2 1 2 +0.913595420136271 2 1 2 +0.595438752846243 2 1 2 +0.36802176344941 2 1 2 +0.418544504566566 2 1 2 +0.363048213885529 2 1 2 +0.254338756573215 1 1 2 +0.730957519992839 1 1 2 +0.263046554830887 1 1 2 +0.46094182659418 2 1 2 +0.531059000798822 2 1 2 +0.353280265477637 2 1 2 +0.464342980616116 2 1 2 +0.229724823533327 2 1 2 +0.504945673660676 2 1 2 +0.532493395334424 2 1 2 +0.423801763698387 1 1 2 +0.787113721614964 2 1 2 +0.223160559034952 2 1 2 +0.419595856308554 2 1 2 +0.396648316145306 2 1 2 +0.308908241587595 2 1 2 +0.627802576140553 2 1 2 +0.64888860721256 2 1 2 +0.738730808101364 2 1 2 +0.658745774579089 1 1 2 +0.679191956616965 1 1 2 +0.278164538209912 1 1 2 +0.205911141408479 2 1 2 +0.241638399787725 2 1 2 +0.24858355547484 1 1 2 +0.73740496979995 2 1 2 +0.247935082999496 2 1 2 +0.826311099617232 2 1 2 +0.590607775557781 1 1 2 +0.351249908681046 1 1 2 +0.370792468725378 2 1 2 +0.389722068994738 1 1 2 +0.251157837165118 2 1 2 +0.663087218040623 2 1 2 +0.454359737429872 2 1 2 +0.435474095638232 2 1 2 +0.284410206592962 2 1 2 +0.344506290138683 2 1 2 +1.01768620078799 1 1 2 +0.331330031800195 1 1 2 +0.277021859762052 1 1 2 +0.347332671037543 1 1 2 +0.286836805838407 1 1 2 +0.340934631295205 2 1 2 +1.22270556676254 1 1 2 +0.360534849486478 1 1 2 +0.359892263518994 2 1 2 +0.552595743599511 2 1 2 +0.301744081404754 2 1 2 +0.416037514267758 2 1 2 +0.541344562283886 2 1 2 +0.579986637345764 2 1 2 +0.221912718773351 2 1 2 +0.465245817277264 2 1 2 +0.474738754014913 2 1 2 +1.70409538281312 2 1 2 +1.02235518855245 2 1 2 +0.301214497598036 1 1 2 +0.991176433131545 2 1 2 +0.401432084705109 2 1 2 +1.11715380433533 2 1 2 +0.81719064511715 2 1 2 +0.549517654685354 1 1 2 +0.251345033237621 2 1 2 +0.357859075575934 2 1 2 +0.90132423193762 2 1 2 +0.272936669704676 1 1 2 +0.455508577827349 2 1 2 +0.861185664428614 2 1 2 +0.266987292082781 2 1 2 +0.578879341650739 2 1 2 +0.649256823455797 2 1 2 +0.418711362089519 2 1 2 +0.433426379919396 2 1 2 +0.642462173639701 1 1 2 +0.406446379518523 2 1 2 +0.290863063788828 1 1 2 +0.395803052313048 2 1 2 +0.311087619708231 2 1 2 +0.279185686505835 1 1 2 +0.412823984876793 1 1 2 +0.314508721309633 2 1 2 +0.417280760034167 2 1 2 +0.357813047077128 2 1 2 +0.256161295149574 2 1 2 +0.240326641914136 2 1 2 +0.469105961018824 2 1 2 +0.23311026462364 2 1 2 +0.219699590325278 2 1 2 +0.267828103451759 2 1 2 +0.324090708482963 1 1 2 +0.882370084866449 2 1 2 +0.296556033418114 2 1 2 +0.535028311840886 1 1 2 +0.43175137215661 2 2 1 +0.644941841007773 2 2 1 +0.454575049110823 1 2 1 +0.279695948494544 1 2 1 +0.426171246559654 2 2 1 +0.610510950165697 2 2 1 +0.230689244897577 1 2 1 +0.548095008243392 1 2 1 +0.98541029605035 2 2 1 +0.468950980918864 2 2 1 +0.684795778239259 2 2 1 +0.24699387402904 2 2 1 +0.27748506685569 2 2 1 +0.223809391596467 2 2 1 +0.400670922523929 2 2 1 +0.281960311355797 2 2 1 +0.231152419723023 2 2 1 +0.222978006492174 2 2 1 +0.524142717204131 1 2 1 +0.668726417947444 2 2 1 +0.683251780945197 1 2 1 +0.40863429831843 2 2 1 +0.276342613901865 2 2 1 +0.213193944799305 1 2 1 +0.40877289939876 2 2 1 +0.331817008251402 2 2 1 +0.258591934173063 1 2 1 +0.630903225088589 2 2 1 +0.987067038242542 2 2 1 +0.388841679090983 2 2 1 +0.50346695279445 2 2 1 +0.806091271285255 2 2 1 +0.492528933755195 2 2 1 +0.333653132977917 2 2 1 +0.385279766168457 2 2 1 +0.595417467221174 2 2 1 +0.365549592893083 2 2 1 +0.433959310098169 2 2 1 +0.573461315434403 2 2 1 +0.344529930746843 2 2 1 +0.291958498883562 1 2 1 +0.70457842689903 1 2 1 +0.806575731336316 2 2 1 +0.398225114239535 1 2 1 +0.377149378210516 2 2 1 +0.4258972597472 2 2 1 +0.345195995455676 1 2 1 +0.351061263845004 1 2 1 +0.608134033587742 2 2 1 +0.59067213970621 2 2 1 +0.422680728979774 1 2 1 +0.365154436343711 2 2 1 +0.329038190670398 2 2 1 +0.50673589153492 1 2 1 +0.370741158040407 2 2 1 +0.326087804776541 2 2 1 +0.255452470867296 2 2 1 +0.390552106391914 2 2 1 +0.26482744433547 2 2 1 +0.33730434871047 2 2 1 +0.394549621635902 2 2 1 +0.309813859295583 2 2 1 +0.63449833976955 2 2 1 +1.06805758473031 2 2 1 +0.421454806981705 2 2 1 +0.281636679807289 2 2 1 +0.554306471235953 2 2 1 +0.604870136671619 2 2 1 +1.08217269575099 1 2 1 +0.379299151823082 2 2 1 +0.689740592470931 2 2 1 +0.254395650138749 2 2 1 +0.414172534052434 2 2 1 +0.520113577322035 2 2 1 +0.431996596446885 2 2 1 +1.08053726808782 2 2 1 +0.3988833679393 1 2 1 +0.353686935845557 2 2 1 +0.260219916678923 2 2 1 +0.86593681879694 2 2 1 +0.225590599418998 2 2 1 +0.349164601087427 2 2 1 +0.458075994072888 2 2 1 +0.294956639081246 2 2 1 +0.313858826908574 2 2 1 +0.31162986062042 1 2 1 +0.507204360257467 2 2 1 +0.352032600138167 1 2 1 +0.663557530182887 2 2 1 +0.199857632822527 1 2 1 +0.514502853877809 2 2 1 +0.469535266171427 2 2 1 +0.355020801164096 2 2 1 +0.683808271711007 2 2 1 +0.301603299502107 2 2 1 +0.269580060746496 2 2 1 +0.299073546316696 2 2 1 +1.10983985933577 2 2 1 +0.360307123921532 2 2 1 +0.442261294563025 2 2 1 +0.435168028548888 1 2 1 +0.270793007645254 2 2 1 +0.607563481133469 2 2 1 +0.242718448543483 2 2 1 +0.760580795652265 2 2 1 +0.431996589129307 2 2 1 +0.374282624407255 2 2 1 +0.416206432567024 2 2 1 +0.519571791432021 2 2 1 +0.196032953071603 2 2 1 +0.315596729977301 1 2 1 +0.679912355835528 2 2 1 +0.264862579134914 1 2 1 +0.218987554044978 2 2 1 +0.355620540652982 1 2 1 +0.543244665580406 1 2 1 +0.457512624736921 2 2 1 +1.3226460471116 1 2 1 +0.525011653461871 1 2 1 +0.370965283148772 2 2 1 +0.347902925695899 1 2 1 +0.398586878749805 2 2 1 +0.548961196694153 1 2 1 +0.410882145807631 2 2 1 +0.198715843872579 1 2 1 +0.225346382503031 2 2 1 +0.42578338272523 2 2 1 +0.270583704112055 2 2 1 +0.219387522590806 1 2 1 +0.342735985144739 2 2 1 +0.553884896165182 2 2 1 +0.34639458884139 2 2 1 +0.283955583130347 2 2 1 +0.223220560804016 2 2 1 +0.243212165170184 1 2 1 +0.417166480278331 2 2 1 +0.339544388395638 2 2 1 +0.546503987947626 1 2 1 +0.318410466893085 1 2 1 +0.339234853728487 1 2 1 +0.340516936127161 2 2 1 +0.518403351170541 2 2 1 +0.195409190162283 1 2 1 +0.358173332839706 2 2 1 +0.699954775815217 1 2 1 +0.348838538015419 2 2 1 +0.270023193413005 2 2 1 +0.266646456805835 2 2 1 +1.16775233367232 1 2 1 +0.432285328985634 2 2 1 +0.392918105200082 2 2 1 +0.280124804921595 2 2 1 +0.339253528717098 1 2 1 +0.612654152551717 2 2 1 +1.09354543233683 1 2 1 +0.253131913451641 1 2 1 +0.407884093764528 2 2 1 +0.729923816383264 2 2 1 +0.412724482985278 1 2 1 +0.422136530830045 1 2 1 +0.195907626308766 1 2 1 +0.250168829070227 2 2 1 +0.361614330778561 2 2 1 +0.477174944954289 1 2 1 +0.630016603117949 1 2 1 +0.465295384344847 2 2 1 +0.294143656815915 2 2 1 +0.264628027587206 2 2 1 +0.207214488692379 2 2 1 +0.275361465213498 2 2 1 +0.390244603648003 2 2 1 +0.392019933911482 2 2 1 +0.419421788233775 2 2 1 +0.398851313639373 2 2 1 +0.222684342528921 1 2 1 +0.309535304324601 2 2 1 +0.518814359944856 1 2 1 +0.852128247986192 2 2 1 +0.281410288223871 2 2 1 +1.04526845496218 1 2 1 +0.299671718323509 2 2 1 +1.2206902247079 2 2 1 +0.266245221651821 2 2 1 +0.585856917539606 2 2 1 +0.246713344194944 2 2 1 +0.243221392767397 1 2 1 +0.466101604938217 2 2 1 +0.681358629889813 2 2 1 +0.316032029193665 2 2 1 +0.37711415731265 2 2 1 +0.321756308895114 2 2 1 +0.339426132611737 1 2 1 +0.462776164378388 2 2 1 +0.242651330034962 1 2 1 +0.246338808493067 2 2 1 +0.351443718315621 2 2 1 +0.879179474160666 2 2 1 +0.275892228592311 2 2 1 +1.19843207513575 2 2 1 +0.492212434904309 2 2 1 +0.235334077460408 1 2 1 +0.658781985580715 2 2 1 +0.496722023486868 2 2 1 +0.389333111607481 1 2 1 +0.290682514514568 1 2 1 +0.296078339261109 2 2 1 +1.13573146386925 2 2 1 +0.321243222665541 1 2 1 +0.47852089377703 2 2 1 +0.706173198859061 2 2 1 +0.342262066554139 1 2 1 +0.229700613420935 2 2 1 +0.262779311846245 1 2 1 +0.292772839155619 2 2 1 +0.412064699877953 2 2 1 +0.459836048826499 2 2 1 +0.490523892738996 2 2 1 +0.279524933441799 2 2 1 +0.369615897239809 2 2 1 +0.230758234694844 2 2 1 +0.252861805612104 2 2 1 +0.491841566756603 2 2 1 +0.381802651857499 2 2 1 +0.347186500235918 2 2 1 +0.608385233814657 2 2 1 +1.06849187905581 2 2 1 +0.312240566682192 1 2 1 +0.846639420443936 1 2 1 +0.712399792744085 2 2 1 +0.25012681536537 2 2 1 +0.474130073908507 2 2 1 +0.669096572588102 2 2 1 +0.245614214665135 1 2 1 +1.09046461098125 2 2 1 +0.865104839101706 2 2 1 +0.725212442091508 2 2 1 +0.328860852235035 2 2 1 +0.526209106175903 2 2 1 +0.421498807641989 2 2 1 +0.293969113717582 2 2 1 +0.390793667483304 2 2 1 +0.74262292487233 2 2 1 +0.308167280867968 2 2 1 +0.359026772195073 1 2 1 +0.328927185365953 2 2 1 +1.21905584255683 1 2 1 +0.500288047433814 2 2 1 +0.224842633452238 2 2 1 +0.382005686607667 2 2 1 +0.300634446023351 1 2 1 +0.417876867416724 2 2 1 +0.371249215012469 2 2 1 +0.788689811346923 1 2 1 +0.662689531590809 1 2 1 +0.471005868314423 2 2 1 +0.594444358601939 2 2 1 +1.55077240941125 1 2 1 +0.927706317276666 1 2 1 +0.649826050593124 1 2 1 +0.28075741006474 2 2 1 +0.505810290842985 2 2 1 +0.49711754981939 2 2 1 +0.317978096635881 2 2 1 +0.684248959928731 2 2 1 +0.24282378340995 2 2 1 +0.481707664140375 2 2 1 +0.373537373349082 1 2 1 +0.405447957366669 2 2 1 +0.748014256841301 2 2 1 +0.711834286991734 2 2 1 +0.907962085626992 1 2 1 +0.369967811242 2 2 1 +0.295993682640687 2 2 1 +0.373284266243751 2 2 1 +0.34166217722553 2 2 1 +0.42937750854584 1 2 1 +0.470915823976768 2 2 1 +0.740950067010803 2 2 1 +0.674240772478605 2 2 1 +0.424450122438996 2 2 1 +0.26745277302258 2 2 1 +1.16123242588962 1 2 1 +0.579767054462027 2 2 1 +1.13724912004989 2 2 1 +0.310882482715339 2 2 1 +0.510927903962018 2 2 1 +0.344111958329695 2 2 1 +0.618226135802301 2 2 1 +0.351479460921543 2 2 1 +0.286917418430935 2 2 1 +0.297983520129636 2 2 1 +0.56257712830786 2 2 1 +0.716682250022604 2 2 1 +0.414592630645323 1 2 1 +0.238976677322081 2 2 1 +0.272443854818692 2 2 1 +1.16955807004935 2 2 1 +1.09939549809574 2 2 1 +0.270028442968248 2 2 1 +0.788721847905805 2 2 1 +0.4191755150052 2 2 1 +0.316400373681771 2 2 1 +0.609802807606279 2 2 1 +0.242772508104779 2 2 1 +0.793135194837104 1 2 1 +0.225673630294491 2 2 1 +0.368031893686271 2 2 1 +0.276628839207783 2 2 1 +0.431489483371041 2 2 1 +0.389776699040007 1 2 1 +0.561033032142085 2 2 1 +0.330526167790471 2 2 1 +0.420110538629517 2 2 1 +0.270319448711143 2 2 1 +0.531423698665226 1 2 1 +0.476628212169931 2 2 1 +0.232314221820144 1 2 1 +0.941428986722565 2 2 1 +0.212025112102429 2 2 1 +0.368427723696994 2 2 1 +0.70992072587502 2 2 1 +0.925525840482286 2 2 1 +0.377883521547475 1 2 1 +0.78063938574767 2 2 1 +1.74503683482489 1 2 1 +0.251612907306528 2 2 1 +0.301650511821631 2 2 1 +0.813066289415148 2 2 1 +0.578407966843961 1 2 1 +0.348273146613647 2 2 1 +0.626193739500669 2 2 1 +0.253140397879093 2 2 1 +0.259307456267337 1 2 1 +0.415832848798801 1 2 1 +0.556832384556447 1 2 1 +0.673572632335394 1 2 1 +0.799853330760023 2 2 1 +0.930742156958785 1 2 1 +0.535762437608495 1 2 1 +0.473189488245964 2 2 1 +0.524542993511125 2 2 1 +0.739545131805635 1 2 1 +0.235355785766015 2 2 1 +0.260584654694577 1 2 1 +0.629506660259397 2 2 1 +0.231557754200238 2 2 1 +0.441319321469825 2 2 1 +0.583215313492174 2 2 1 +0.716830295625359 2 2 1 +0.199491993130699 2 2 1 +0.431091799266252 2 2 1 +0.206797413339198 2 2 1 +0.891303968036612 2 2 1 +0.418088670691812 2 2 1 +0.695686622676713 2 2 1 +0.509442640220052 2 2 1 +0.534434162394219 1 2 1 +0.22957675981285 2 2 1 +0.237510411071828 2 2 1 +0.384742470864086 2 2 1 +1.13440323753284 2 2 1 +0.371282462508375 2 2 1 +0.586952463908924 2 2 1 +0.751807274502031 2 2 1 +0.411626801231686 2 2 1 +0.788795034331271 2 2 1 +0.20733393183141 1 2 1 +0.638857588359423 1 2 1 +0.261472367531119 2 2 1 +0.373277752845772 2 2 1 +1.02436843366298 2 2 1 +0.302502960194587 1 2 1 +0.661168427682398 2 2 1 +0.233395542415348 2 2 1 +0.294733463977297 1 2 1 +0.26544588339993 2 2 1 +0.279478601813994 1 2 1 +0.926988733721204 2 2 1 +0.47577107073081 2 2 1 +0.265434794900874 2 2 1 +0.285106533088262 2 2 1 +0.888150923648132 1 2 1 +0.533632864862185 2 2 1 +0.816980040369266 2 2 1 +0.453517844009076 2 2 1 +0.32540514132032 1 2 1 +0.755359450830742 2 2 1 +0.388781842189814 2 2 1 +0.411602949797336 2 2 1 +0.269196234885745 2 2 1 +0.403147780188977 2 2 1 +0.815435476047168 2 2 1 +0.384261600014836 2 2 1 +0.267710822428141 2 2 1 +0.376570816086018 2 2 1 +0.364120979475635 2 2 1 +0.217074883970687 2 2 1 +0.354010708705527 2 2 1 +2.19563497894271 2 2 1 +0.269514173923494 2 2 1 +0.589071950609085 2 2 1 +0.343631456123552 2 2 1 +0.567816994849473 2 2 1 +0.510957888944779 2 2 1 +0.608732197392097 2 2 1 +0.312065520452347 2 2 1 +0.719826312987153 2 2 1 +0.991538495850398 2 2 1 +0.590296881002275 2 2 1 +0.585207716920772 2 2 1 +0.513932742073003 1 2 1 +0.29300310455318 2 2 1 +0.395229070645386 2 2 1 +0.251238693438004 1 2 1 +0.358100976516223 2 2 1 +0.604658428518133 2 2 1 +0.28898691264998 1 2 1 +0.616268731936217 1 2 1 +0.241534354644201 2 2 1 +0.586222445209675 2 2 1 +0.530578635018236 1 2 1 +0.400144208555685 2 2 1 +0.385729124722071 2 2 1 +0.397295110458581 2 2 1 +1.1980503220687 2 2 1 +0.284244205357729 2 2 1 +0.241347374440344 2 2 1 +0.379496078440646 1 2 1 +0.313029336995714 1 2 1 +0.233874351279794 2 2 1 +0.592645188650851 2 2 1 +0.380520487588823 2 2 1 +0.273459017317749 2 2 1 +0.381468845247399 2 2 1 +0.50996411763119 2 2 1 +0.26417116674038 2 2 1 +0.445725904718431 1 2 1 +0.284100667163705 2 2 1 +0.372087396465745 2 2 1 +0.259337317980368 1 2 1 +1.90238201143875 2 2 1 +0.420439527811802 2 2 1 +0.676790701130044 2 2 1 +0.491676544128052 2 2 1 +0.41814454475348 1 2 1 +0.325129499365181 2 2 1 +0.250560441839221 2 2 1 +0.42121953335634 2 2 1 +0.720448454867087 2 2 1 +0.32286005578195 2 2 1 +0.401634215363402 2 2 1 +0.823101550994882 1 2 1 +0.22652814141488 2 2 1 +0.574012915325073 2 2 1 +0.303814059799115 2 2 1 +0.38868794499924 2 2 1 +0.468185403754697 1 2 1 +0.64039931341012 2 2 1 +0.4922813732329 2 2 1 +0.685240592163985 2 2 1 +0.460729493739463 2 2 1 +0.454264406687532 2 2 1 +0.292098065982487 2 2 1 +0.259711243117317 2 2 1 +0.476819420143709 2 2 1 +0.54147202807433 2 2 1 +0.294960782219479 2 2 1 +0.265064471822996 2 2 1 +0.268586719914729 2 2 1 +0.663075377720097 2 2 1 +0.443778226905662 2 2 1 +0.33084725716745 2 2 1 +0.45665127223439 2 2 1 +0.937763503430533 2 2 1 +1.25335333650901 2 2 1 +0.882712190513773 2 2 1 +0.292970601755812 2 2 1 +0.34861595618408 1 2 1 +0.278204853725307 2 2 1 +0.677235764875177 1 2 1 +1.08316216966813 2 2 1 +0.365942431227363 2 2 1 +0.305484095875439 2 2 1 +0.235624184994017 2 2 1 +0.398658337232737 2 2 1 +0.434318631638857 2 2 1 +0.355187848050399 1 2 1 +0.454420611999672 2 2 1 +0.198639116074042 2 2 1 +0.359080427717729 2 2 1 +1.74384487531936 2 2 1 +0.191245680862617 2 2 1 +0.783931590262253 2 2 1 +0.567473626830682 2 2 1 +0.45296456401947 2 2 1 +0.248317703257658 2 2 1 +0.699649264703012 2 2 1 +0.611361282985861 2 2 1 +0.941610741093518 1 2 1 +0.58654507029159 2 2 1 +0.371419809546963 2 2 2 +0.476259551913618 2 2 2 +0.31638619890226 2 2 2 +0.623249848667632 2 2 2 +0.241497913775783 2 2 2 +0.270061097781809 1 2 2 +0.293966968248291 1 2 2 +0.299480877137146 1 2 2 +0.576031229139046 2 2 2 +0.574375556532669 2 2 2 +0.484563476656291 2 2 2 +0.397331777673979 2 2 2 +0.501973158919077 2 2 2 +0.354716577854211 2 2 2 +0.608633700459432 2 2 2 +0.263227756156975 2 2 2 +0.812103027527017 2 2 2 +0.433996647919137 2 2 2 +0.516347639257426 2 2 2 +0.272600890706883 2 2 2 +0.286047555800837 2 2 2 +0.342521016193953 2 2 2 +0.443573882489982 1 2 2 +0.526526295854292 2 2 2 +0.372529192399058 1 2 2 +0.294310906384799 2 2 2 +0.964693955497906 1 2 2 +0.388120088040495 2 2 2 +0.292344532077584 2 2 2 +1.15960223884759 2 2 2 +0.405153223966 1 2 2 +0.354989409321607 2 2 2 +0.218636295771409 2 2 2 +0.517752319489056 2 2 2 +0.288570371961949 2 2 2 +0.557762064455683 2 2 2 +0.216932595356505 1 2 2 +0.586061017160239 2 2 2 +0.269412734371731 2 2 2 +0.622722702637302 1 2 2 +0.244923602459739 2 2 2 +0.685296064602417 2 2 2 +0.292439180652386 1 2 2 +0.26310281088841 1 2 2 +0.234853617821344 2 2 2 +0.505912235218254 2 2 2 +0.221940510664284 2 2 2 +0.204399980929288 2 2 2 +0.848247148946075 2 2 2 +0.652791317257383 2 2 2 +0.258633103875262 2 2 2 +0.66307312794457 2 2 2 +0.4115021817099 2 2 2 +0.61208665562783 2 2 2 +0.384453937267756 2 2 2 +0.661908613432912 1 2 2 +0.606234829188098 2 2 2 +1.29671884936154 2 2 2 +0.286389778612672 2 2 2 +0.44358020190157 2 2 2 +0.331248138375298 2 2 2 +0.699098366691914 2 2 2 +0.286835588654431 2 2 2 +0.27329751741887 2 2 2 +0.578226988740838 1 2 2 +0.411952465608936 2 2 2 +0.373654621521716 2 2 2 +0.51548587469447 1 2 2 +1.11290090946189 2 2 2 +0.61606619557625 1 2 2 +0.698620145254327 1 2 2 +0.213186526028619 1 2 2 +0.256304787769457 2 2 2 +0.203323922453708 2 2 2 +0.451141466550498 2 2 2 +0.432233770300206 1 2 2 +0.619352781461983 2 2 2 +0.858258004341018 2 2 2 +0.325535345759254 2 2 2 +0.284928261958504 2 2 2 +0.292632701103941 2 2 2 +0.448776353232441 2 2 2 +0.629255153926353 2 2 2 +0.263200495144972 2 2 2 +0.352588083053461 1 2 2 +0.236540541826709 2 2 2 +0.707959296574283 2 2 2 +1.28313325031642 2 2 2 +0.301751574718914 1 2 2 +0.561892223203863 1 2 2 +0.282412604594248 2 2 2 +0.304262276952806 2 2 2 +0.407357953713203 2 2 2 +0.210707057789059 2 2 2 +0.560396081439257 2 2 2 +0.339009581511832 2 2 2 +0.996419746213449 2 2 2 +1.00183750288417 2 2 2 +1.32504284872589 2 2 2 +1.0721106707744 2 2 2 +0.377861404944634 2 2 2 +0.71900023167808 1 2 2 +0.319706843290023 2 2 2 +0.416610305545232 2 2 2 +0.206654488398495 2 2 2 +0.921766469149586 1 2 2 +0.658742909074791 2 2 2 +0.263893467268196 2 2 2 +0.190201252069023 1 2 2 +0.517337895143614 2 2 2 +0.224739734085673 2 2 2 +0.218898805354731 2 2 2 +0.717954990040875 2 2 2 +0.209228192652069 1 2 2 +0.256025079388851 1 2 2 +0.326258537383908 2 2 2 +0.689759693215715 2 2 2 +0.610908694182847 1 2 2 +0.337065226697079 2 2 2 +0.1870490728342 2 2 2 +0.299662174395397 2 2 2 +0.277717334862863 2 2 2 +1.23731761519909 2 2 2 +0.43474766698581 2 2 2 +0.557318058556568 2 2 2 +0.680232429047272 1 2 2 +0.322869377985879 2 2 2 +0.438605410585611 2 2 2 +0.87241634651293 2 2 2 +0.539311881419031 2 2 2 +0.475182882058131 2 2 2 +0.271154490775633 2 2 2 +0.385232918900933 2 2 2 +0.505906394481136 1 2 2 +0.442070078279938 1 2 2 +0.571547043533657 2 2 2 +0.655792477355547 2 2 2 +0.298499878396393 2 2 2 +0.193724242862765 2 2 2 +0.26006530791065 1 2 2 +0.356708786110689 1 2 2 +0.491543605775341 2 2 2 +0.393111774151399 2 2 2 +0.64026773631928 2 2 2 +1.21142909598262 2 2 2 +0.474157252918212 2 2 2 +0.262364047166446 1 2 2 +0.258812965103118 1 2 2 +0.348162908277828 2 2 2 +0.272495641205976 2 2 2 +0.312010686501704 2 2 2 +0.38306021754942 2 2 2 +0.661593514913509 2 2 2 +0.433229374187291 1 2 2 +0.361015067322576 2 2 2 +0.240000417220632 2 2 2 +0.354358867878031 1 2 2 +0.30397159906092 2 2 2 +0.678050772142903 2 2 2 +0.716435686835505 1 2 2 +0.688441301707592 2 2 2 +0.394328672411684 2 2 2 +0.46447905310017 2 2 2 +0.884150949192416 2 2 2 +0.248771015164462 2 2 2 +0.379182107844877 2 2 2 +1.29071049141673 2 2 2 +0.277475559903592 2 2 2 +0.428681740063866 2 2 2 +0.206857299277309 2 2 2 +0.669454595982171 2 2 2 +0.324919636356833 2 2 2 +0.395123689833804 1 2 2 +0.351936531306306 2 2 2 +0.55429721844539 2 2 2 +0.250263471266211 2 2 2 +0.372186767472496 1 2 2 +1.00061699085178 2 2 2 +1.09921193229266 2 2 2 +0.650060223521224 2 2 2 +0.232120071117234 2 2 2 +0.410458958763894 1 2 2 +0.26086224185435 2 2 2 +0.602658853022438 2 2 2 +0.282185336344145 2 2 2 +1.26553444840965 2 2 2 +0.382143185875273 2 2 2 +0.248988201311841 2 2 2 +0.982723892409823 2 2 2 +0.444303934998749 2 2 2 +0.64934036686621 1 2 2 +0.476803291197273 2 2 2 +0.385320489981782 2 2 2 +0.345492479856484 2 2 2 +0.422553263338974 2 2 2 +0.450135392979508 2 2 2 +0.18351739521214 2 2 2 +0.72669927106078 1 2 2 +0.375683539988626 1 2 2 +0.217452533729198 2 2 2 +0.664573219425088 2 2 2 +0.40675170248381 2 2 2 +0.687937270624779 2 2 2 +0.746504000572466 2 2 2 +0.315055664212289 2 2 2 +0.567678006192237 2 2 2 +0.35926586984242 2 2 2 +0.260726328355797 2 2 2 +0.420592363854161 1 2 2 +0.396878488001735 2 2 2 +0.25808424693846 1 2 2 +0.690268836967113 2 2 2 +0.829884430478792 2 2 2 +0.53272810901146 1 2 2 +1.01904300744411 2 2 2 +0.404198494338956 2 2 2 +0.406416489634305 2 2 2 +0.740461569878665 2 2 2 +0.397707721968874 2 2 2 +0.316960678261711 2 2 2 +0.393796380182408 2 2 2 +0.612468746065398 2 2 2 +0.45411966684652 1 2 2 +0.785721727701694 2 2 2 +0.9899071317292 2 2 2 +0.301479726835548 2 2 2 +0.202054252934703 1 2 2 +0.404304395375019 2 2 2 +0.405109216362114 2 2 2 +0.437782938427307 2 2 2 +0.323522608388588 2 2 2 +0.498830694826068 2 2 2 +0.37932833915468 2 2 2 +0.566103976465953 2 2 2 +0.285143334962036 2 2 2 +0.331554043990072 1 2 2 +1.1056014894372 1 2 2 +0.526579897732621 1 2 2 +0.498630096551189 1 2 2 +0.302146695343523 2 2 2 +0.593188221646493 2 2 2 +0.875545940047122 1 2 2 +0.21829415545408 2 2 2 +1.27038332570518 2 2 2 +0.282958152993897 1 2 2 +0.316439713175455 2 2 2 +0.579173988441469 1 2 2 +0.444463360833938 2 2 2 +0.269250618007444 1 2 2 +0.470819079103018 2 2 2 +0.209668973816132 2 2 2 +0.458009773429269 2 2 2 +0.408503850676956 2 2 2 +0.27215890031715 2 2 2 +0.326582192001007 2 2 2 +1.06297741258528 1 2 2 +0.322713389608647 1 2 2 +0.457320344283686 2 2 2 +0.358703584766666 1 2 2 +0.266227903632889 2 2 2 +0.559988919421634 2 2 2 +0.286133456649917 2 2 2 +0.505411943083196 1 2 2 +0.525780093131127 1 2 2 +0.469126014876269 2 2 2 +1.04689087989818 2 2 2 +0.319875950338349 2 2 2 +0.250597639059042 1 2 2 +0.457613518448636 2 2 2 +0.387669523459911 1 2 2 +0.434913766029881 2 2 2 +0.48328427011083 2 2 2 +0.243610412662936 2 2 2 +0.342488023626944 2 2 2 +1.02638570164986 2 2 2 +1.63528669167027 2 2 2 +0.547318790274417 2 2 2 +0.440550940111696 1 2 2 +0.373207977309306 1 2 2 +0.4309907178462 2 2 2 +0.687697858349405 2 2 2 +0.285905993586428 1 2 2 +0.295218110682198 2 2 2 +0.650238504586291 2 2 2 +0.353180609354725 2 2 2 +0.336105599731412 2 2 2 +0.308137951395616 2 2 2 +0.347726332414955 1 2 2 +0.36943111917592 2 2 2 +0.281602615433194 2 2 2 +0.341345778831345 2 2 2 +0.467241317856716 2 2 2 +0.805429950125371 2 2 2 +0.235507874506382 1 2 2 +0.60030833998794 2 2 2 +0.676534495912984 2 2 2 +0.217928389514833 2 2 2 +1.05751503498892 2 2 2 +0.279644280745951 2 2 2 +0.658652885367294 2 2 2 +0.958304606178503 2 2 2 +0.346568405752533 1 2 2 +0.56248781541788 2 2 2 +0.209725804674779 2 2 2 +0.846930432147221 2 2 2 +0.330138876610716 1 2 2 +0.420408554089049 2 2 2 +0.620358503800179 2 2 2 +0.429248922416652 2 2 2 +0.382833067577587 2 2 2 +0.340311828954195 2 2 2 +0.389953896192246 2 2 2 +0.53663347892141 2 2 2 +0.49895548899123 2 2 2 +0.941153754879819 1 2 2 +0.507411654917284 2 2 2 +1.20160702354942 2 2 2 +0.478889238903742 1 2 2 +0.901438524126996 2 2 2 +0.918390742809495 2 2 2 +0.493350491230538 1 2 2 +0.52166085506493 2 2 2 +0.389420617429571 2 2 2 +0.270742745239298 1 2 2 +1.37088764103588 2 2 2 +0.727863288960697 2 2 2 +1.12836933231538 2 2 2 +0.386561751596785 2 2 2 +0.4309399047834 2 2 2 +0.614786852311502 2 2 2 +0.411950362176773 2 2 2 +1.78194623155386 2 2 2 +0.539225103492103 2 2 2 +1.02211318479885 2 2 2 +0.544572050183936 2 2 2 +0.584989507154119 2 2 2 +0.248709232375088 2 2 2 +0.641762262858976 2 2 2 +0.535282924615562 2 2 2 +0.329604040226998 2 2 2 +0.279268417344702 2 2 2 +0.233349948825794 2 2 2 +0.412932663530443 2 2 2 +0.415689266706035 1 2 2 +0.482166933803416 2 2 2 +0.893443627004475 2 2 2 +0.446076551783159 2 2 2 +0.843967533571949 2 2 2 +0.292399559523647 2 2 2 +0.638203851975096 2 2 2 +0.993243483944454 2 2 2 +0.268455934511667 2 2 2 +0.248177339713737 2 2 2 +0.317553308658084 2 2 2 +0.312268015328109 2 2 2 +0.340662839522388 2 2 2 +0.277346119406243 2 2 2 +0.583825795661779 1 2 2 +0.315409861504152 1 2 2 +1.07687410829711 2 2 2 +0.444325240814203 2 2 2 +0.387565615939017 2 2 2 +0.267788731901758 2 2 2 +0.311270050983203 1 2 2 +1.22802442335164 2 2 2 +0.853133012546484 2 2 2 +0.258818891608348 2 2 2 +0.545664728569855 1 2 2 +0.243040509115306 1 2 2 +0.616018319395203 2 2 2 +0.424267110499089 2 2 2 +0.663051122325687 2 2 2 +0.226946615302446 2 2 2 +0.523585890324027 2 2 2 +0.35581713292406 2 2 2 +0.266158931754381 1 2 2 +0.720311139462917 1 2 2 +0.218118535654997 2 2 2 +0.821362912627226 1 2 2 +0.264989552139514 1 2 2 +0.511682799792117 2 2 2 +0.56125463965235 1 2 2 +0.973606014834926 2 2 2 +0.518075507295568 1 2 2 +0.272191894573665 2 2 2 +0.310819538858286 2 2 2 +0.368211947094363 2 2 2 +0.465474933911655 2 2 2 +0.561177518235567 1 2 2 +0.603470353749625 1 2 2 +0.334194497275073 2 2 2 +0.641942706323965 2 2 2 +0.356630874252134 2 2 2 +0.232871443126139 1 2 2 +0.333649448973833 2 2 2 +0.609159624944822 1 2 2 +1.45610468230462 2 2 2 +0.466354725792031 2 2 2 +0.304184098280551 1 2 2 +0.477141349721271 2 2 2 +0.303009331142076 2 2 2 +0.524109669978762 2 2 2 +0.43671698415524 2 2 2 +0.42787128143151 2 2 2 +1.08981029598678 1 2 2 +0.226054298232117 2 2 2 +0.383400885338281 2 2 2 +0.583586502122542 1 2 2 +0.778194753021581 2 2 2 +0.268775804780166 1 2 2 +0.502664308069978 2 2 2 +1.36226361256616 2 2 2 +0.335324976828731 2 2 2 +0.43949839083169 2 2 2 +0.432777757912515 2 2 2 +0.267162022023955 1 2 2 +0.879352118632758 2 2 2 +0.413066977072891 2 2 2 +0.421261106481918 2 2 2 +0.513682794838585 2 2 2 +0.232814503782821 2 2 2 +0.977943934285527 1 2 2 +0.278625342042981 2 2 2 +0.978405820837462 2 2 2 +0.43674850370889 2 2 2 +0.237684082723394 2 2 2 +0.244342880645148 2 2 2 +0.313124526412448 1 2 2 +0.244341553331277 2 2 2 +0.850477515504803 2 2 2 +0.346690313973946 2 2 2 +0.427706912439349 2 2 2 +0.383097524593988 1 2 2 +0.672506676903199 2 2 2 +0.325668111506743 2 2 2 +0.501131325233736 2 2 2 +0.240168060476825 2 2 2 +0.235178051076048 2 2 2 +0.47616856065887 2 2 2 +0.262979002698665 1 2 2 +0.526351217536873 2 2 2 +0.337727201047472 2 2 2 +0.255335801167391 2 2 2 +0.382811211430241 2 2 2 +0.526518217287997 2 2 2 +0.212982195364599 2 2 2 +0.197639872379599 2 2 2 +0.401129269762392 2 2 2 +2.19377506417666 2 2 2 +0.254835580976153 2 2 2 +0.635043789020716 2 2 2 +0.603192459522677 1 2 2 +0.780119188280459 1 2 2 +0.387445357368451 2 2 2 +0.352999715984171 2 2 2 +0.215260154150075 2 2 2 +0.438343141309741 1 2 2 +0.383351790215377 2 2 2 +0.290180848854179 2 2 2 +0.30327695172642 2 2 2 +1.16042778280559 2 2 2 +0.223187293483549 2 2 2 +0.699562632635369 2 2 2 +0.469536665963427 2 2 2 +0.614760127368693 1 2 2 +0.250342025167407 2 2 2 +0.192874356525872 1 2 2 +0.246314460692576 2 2 2 +0.37206273201885 2 2 2 +0.206582351239156 2 2 2 +0.534304707606674 2 2 2 +0.343229958652054 2 2 2 +0.777141268031828 2 2 2 +0.209204354796177 2 2 2 +0.897870484588665 1 2 2 +0.484064308449244 2 2 2 +0.309753836749031 2 2 2 +0.420217764048858 2 2 2 +0.564902124329689 2 2 2 +0.393733362161397 1 2 2 +0.266506772096653 2 2 2 +0.28962567887685 2 2 2 +0.528206137761708 2 2 2 +0.301389722137156 1 2 2 +0.223917441106893 2 2 2 +0.779205007871088 1 2 2 +0.218027803393641 2 2 2 +0.288022434145067 2 2 2 +0.303093997774882 1 2 2 +0.390286981959269 2 2 2 +0.464770428369033 2 2 2 +1.73960719684067 2 2 2 +0.248026714745345 2 2 2 +0.36728769019827 2 2 2 +0.596680154881044 1 2 2 +0.383715207146668 2 2 2 +0.376823540619621 2 2 2 +0.685403427627866 1 2 2 +0.441814166283547 2 2 2 +0.493957818252071 2 2 2 +1.09158112036438 2 3 1 +0.303214556399873 2 3 1 +0.546502981537546 1 3 1 +0.217431965663716 2 3 1 +0.268170730263647 2 3 1 +0.346976002816777 2 3 1 +0.229324148845003 1 3 1 +0.816902188437 1 3 1 +0.294576387592954 2 3 1 +0.633680684608576 1 3 1 +1.72847500258562 2 3 1 +0.296884162972746 2 3 1 +0.382321634766408 1 3 1 +0.763284418821156 1 3 1 +0.434224234531732 1 3 1 +1.19271589875813 1 3 1 +0.377781463528736 2 3 1 +0.79392960381382 2 3 1 +0.282338627630539 2 3 1 +0.327667168963983 2 3 1 +0.364638540293463 2 3 1 +0.690522801097352 1 3 1 +0.278680481396497 2 3 1 +1.52418840340842 2 3 1 +0.311969951458862 1 3 1 +0.317776650235954 2 3 1 +0.423691550838739 1 3 1 +0.671006456874602 1 3 1 +0.877883789761534 2 3 1 +0.302971325325345 1 3 1 +0.671517638524883 2 3 1 +1.11390681916392 2 3 1 +0.46388915584611 2 3 1 +0.439407447713224 2 3 1 +0.304368717367806 1 3 1 +0.598571664636264 2 3 1 +0.293087082176115 1 3 1 +0.88569612171942 1 3 1 +0.250899042539296 1 3 1 +0.357059055876667 2 3 1 +1.18166158962524 2 3 1 +0.470682360024002 2 3 1 +0.894894450156942 1 3 1 +0.435735118013038 2 3 1 +0.30295075552671 2 3 1 +0.470570639524463 2 3 1 +0.23748433775057 2 3 1 +0.360451685172226 1 3 1 +0.441474734419253 2 3 1 +0.586503558927763 2 3 1 +0.489843937397201 1 3 1 +0.466272618907063 2 3 1 +0.614130590008736 1 3 1 +0.328854179555165 1 3 1 +1.00309638651768 1 3 1 +0.616986168975414 2 3 1 +0.708134443160147 1 3 1 +0.187898870895148 1 3 1 +0.54082217240692 2 3 1 +1.57411391072384 1 3 1 +0.4493227844752 1 3 1 +0.713892489238243 2 3 1 +0.342186658762456 1 3 1 +1.02117655005718 1 3 1 +0.683440987874987 2 3 1 +0.423935184637998 1 3 1 +0.973133914601076 1 3 1 +0.38306019074276 2 3 1 +0.616703039430407 2 3 1 +0.424716691275681 2 3 1 +0.571910388646059 2 3 1 +0.321910647628946 2 3 1 +0.279467364732086 1 3 1 +0.511770823160077 1 3 1 +0.448839994649654 1 3 1 +0.408993038286618 2 3 1 +0.384671509573393 1 3 1 +0.59475727306752 2 3 1 +0.248904553159929 2 3 1 +0.730389140239337 2 3 1 +0.919037835604557 1 3 1 +0.264109362057892 1 3 1 +0.989121286812907 1 3 1 +0.828041122491036 1 3 1 +0.266898502330599 2 3 1 +0.254034221622117 2 3 1 +1.11127936853007 2 3 1 +0.877022707380551 2 3 1 +0.497101983703828 2 3 1 +0.55789237566765 2 3 1 +0.759066137604798 2 3 1 +0.346006778408851 2 3 1 +0.258561043266019 2 3 1 +0.934282414397615 2 3 1 +0.328642052298081 2 3 1 +0.550227936556864 1 3 1 +0.376633857594849 1 3 1 +0.304037777411272 2 3 1 +0.431708301902904 1 3 1 +0.349019174949225 1 3 1 +0.920298316488849 2 3 1 +0.39826548226189 1 3 1 +0.725829444504715 2 3 1 +0.588644591374367 1 3 1 +0.246128207487776 1 3 1 +1.11590498746582 2 3 1 +0.294876994112035 2 3 1 +0.641111356601665 1 3 1 +0.508053986081123 1 3 1 +0.771118458382302 1 3 1 +0.285699501579415 1 3 1 +1.02462070103652 1 3 1 +0.272825662912259 2 3 1 +0.484400350353985 2 3 1 +0.484569939314766 2 3 1 +0.531386883569837 1 3 1 +0.410632364466612 1 3 1 +0.529817458600413 2 3 1 +1.13935537418794 2 3 1 +0.2756098273084 1 3 1 +0.578175605563475 1 3 1 +1.05572643035039 2 3 1 +0.404530205629778 2 3 1 +0.68459004427736 2 3 1 +0.512697405878432 2 3 1 +0.429332115307925 2 3 1 +0.295864219583054 2 3 1 +0.619738889875145 2 3 1 +0.574787509981818 2 3 1 +0.25342580412108 2 3 1 +0.510997220464868 1 3 1 +0.314205597309042 2 3 1 +0.445509182794708 2 3 1 +1.21240465641764 2 3 1 +0.394946660382341 2 3 1 +0.452801112877752 2 3 1 +0.403350637136158 2 3 1 +0.576985676386101 2 3 1 +0.389264776452976 1 3 1 +0.823295103130808 1 3 1 +0.463468613723993 1 3 1 +0.245377944795518 2 3 1 +0.996044085392399 2 3 1 +0.690423137827953 2 3 1 +0.663845653189127 2 3 1 +0.459849111784745 2 3 1 +1.17832890976462 2 3 1 +1.71465565607573 2 3 1 +0.559470318252231 2 3 1 +1.62127201031263 2 3 1 +1.75932882254012 1 3 1 +0.810068975707212 2 3 1 +0.32567798881547 2 3 1 +0.405265927230293 2 3 1 +0.312319795786779 2 3 1 +0.664164798713009 2 3 1 +0.972174600565453 1 3 1 +0.350736426389176 1 3 1 +0.464183487885217 1 3 1 +0.979942810598283 2 3 1 +0.312621099364353 1 3 1 +0.321946657262611 2 3 1 +0.662512744175165 1 3 1 +0.265782966766695 2 3 1 +0.554547549403016 1 3 1 +0.670230788357581 2 3 1 +0.545148391569713 2 3 1 +0.3944126912798 2 3 1 +0.350245544303979 2 3 1 +0.447316724864116 2 3 1 +0.494291506086329 2 3 1 +0.294165307093089 2 3 1 +0.600045380632821 2 3 1 +0.653173135952646 2 3 1 +0.802716451437717 1 3 1 +0.425486199464103 1 3 1 +0.567162388988331 2 3 1 +0.216466918462054 2 3 1 +0.274646226936591 1 3 1 +0.27952911656832 2 3 1 +0.310673451915856 2 3 1 +1.85814147833547 2 3 1 +0.410157918175516 2 3 1 +0.422501872163458 1 3 1 +0.47177034302856 1 3 1 +0.371140269829411 2 3 1 +0.595311459484279 2 3 1 +1.50368663175704 1 3 1 +1.01238877285822 1 3 1 +0.330032112990162 2 3 1 +0.803689764137746 1 3 1 +1.11335733385196 2 3 1 +1.77758737390525 2 3 1 +0.960114443062577 1 3 1 +0.459716792002552 2 3 1 +1.48363248910813 2 3 1 +0.52360780431933 2 3 1 +0.377016971697135 2 3 1 +1.48796279286523 2 3 1 +1.5584173576682 2 3 1 +0.477222600875173 2 3 1 +0.806192776105325 2 3 1 +1.22714760697165 2 3 1 +0.285202509776337 2 3 1 +0.353998885887131 1 3 1 +0.614217926473409 2 3 1 +0.677981366002188 1 3 1 +1.39319663705638 2 3 1 +1.34011509916811 2 3 1 +0.736100370936723 2 3 1 +0.600109865280918 2 3 1 +1.28500130472893 1 3 1 +1.36640296134039 2 3 1 +0.801718417343498 2 3 1 +0.529860706811969 2 3 1 +0.389318546329582 2 3 1 +0.535588867826002 2 3 1 +0.920404797918709 2 3 1 +0.560133933106455 2 3 1 +0.239473881876335 1 3 1 +0.473860966075698 1 3 1 +0.230702658461256 1 3 1 +1.73611605008423 2 3 1 +0.497233574120993 2 3 1 +1.52778704234278 2 3 1 +1.02496145425289 2 3 1 +0.715732898893069 2 3 1 +0.499095116872872 2 3 1 +0.638470641059599 2 3 1 +0.352348027353716 1 3 1 +0.309076440163353 2 3 1 +0.351142175060277 2 3 1 +0.342163837984379 2 3 1 +0.796243815020877 1 3 1 +0.235901518677146 2 3 1 +0.848976396909524 2 3 1 +0.986793834419597 2 3 1 +1.52074724886378 2 3 1 +0.364767724102028 1 3 1 +1.12083572206547 2 3 1 +0.230356527206167 1 3 1 +0.472472696293136 2 3 1 +0.83628255017543 1 3 1 +2.45314242351987 2 3 1 +0.272291207710981 2 3 1 +0.944208776627134 2 3 1 +0.331972697221215 2 3 1 +0.240983354619397 2 3 1 +0.26997177771289 2 3 1 +0.920385413917779 1 3 1 +0.510927528682238 2 3 1 +0.287553521793071 2 3 1 +2.17875010817382 2 3 1 +0.292020199007728 2 3 1 +0.358620202445595 2 3 1 +1.36196670709987 2 3 1 +0.532103665995839 2 3 1 +1.78987248486592 1 3 1 +0.723800730925627 2 3 1 +0.389487868366274 2 3 1 +0.523651645920756 2 3 1 +0.564094358706312 2 3 1 +0.438071614397111 1 3 1 +0.396815552735571 1 3 1 +0.835877625163203 2 3 1 +0.960913558586309 2 3 1 +0.38021035653061 2 3 1 +0.637286675900738 1 3 1 +0.290784805544286 2 3 1 +0.568957565244384 2 3 1 +0.236438662927156 2 3 1 +1.10987563109661 2 3 1 +0.394257199940267 2 3 1 +0.941426455590548 1 3 1 +1.16694900264559 2 3 1 +0.449530936018223 1 3 1 +1.30291521810678 2 3 1 +0.663336993076141 2 3 1 +0.412620028111287 1 3 1 +0.213769081676035 1 3 1 +1.14008562289037 2 3 1 +0.642154856872125 2 3 1 +1.12186732245763 2 3 1 +0.530059942824884 2 3 1 +0.743562690339846 2 3 1 +0.467733874019439 1 3 1 +0.347897157855929 1 3 1 +0.271346908743046 2 3 1 +1.64808023049025 1 3 1 +0.873213094661973 1 3 1 +0.36045322327288 2 3 1 +0.415893829939983 2 3 1 +0.263744233102411 2 3 1 +0.540852200357253 2 3 1 +1.27190438964105 2 3 1 +0.692221005703411 2 3 1 +0.885861728476599 2 3 1 +0.465274050871376 1 3 1 +0.446986860614239 2 3 1 +1.14339397523192 1 3 1 +0.698229667938408 1 3 1 +0.545774956181041 2 3 1 +0.252737225149388 1 3 1 +0.956242959384857 1 3 1 +0.559955447458839 1 3 1 +0.321668964016761 1 3 1 +0.495953400106333 1 3 1 +0.435695907294935 2 3 1 +1.29125035927742 1 3 1 +1.14445342406167 2 3 1 +0.303314792589389 1 3 1 +0.550870572581426 1 3 1 +0.589452519460692 1 3 1 +0.793764837082831 2 3 1 +1.15478617130203 2 3 1 +0.423154299941937 2 3 1 +0.441625445567769 1 3 1 +1.04879230934071 1 3 1 +0.328428851869649 2 3 1 +0.679231844674899 2 3 1 +1.15492451938846 2 3 1 +1.08528509664462 1 3 1 +0.483072408512607 2 3 1 +1.64223021381801 2 3 1 +0.51690071016677 1 3 1 +0.912126868764157 2 3 1 +0.628163734423868 1 3 1 +0.698176751617721 1 3 1 +0.333876511447483 2 3 1 +0.479648257326482 2 3 1 +0.387197092688304 2 3 1 +0.692552401206789 2 3 1 +0.842058155042385 2 3 1 +0.71750243288607 1 3 1 +0.305258214294853 1 3 1 +0.443541182282758 2 3 1 +1.57795342301233 2 3 1 +0.849830671160219 2 3 1 +2.64336117374313 2 3 1 +0.602149680031763 1 3 1 +0.494502380433554 1 3 1 +0.301592730343628 2 3 1 +0.838419470710967 1 3 1 +0.667334464560839 2 3 1 +1.50007402393947 2 3 1 +0.389803326920504 2 3 1 +0.882528306696846 2 3 1 +0.372982160615418 2 3 1 +0.349280124360808 1 3 1 +0.512012574758371 2 3 1 +1.17220110413599 2 3 1 +0.414969949251035 2 3 1 +1.63347309320552 1 3 1 +0.303593516632502 2 3 1 +0.4186484600358 2 3 1 +0.339028778633365 2 3 1 +0.717224138012012 2 3 1 +0.779291021701807 2 3 1 +0.561060495047965 2 3 1 +1.80155372469456 2 3 1 +0.350666104484292 2 3 1 +1.75384028882697 2 3 1 +0.34614655108637 2 3 1 +0.857888702654049 2 3 1 +0.538538273376274 1 3 1 +0.808165431176924 2 3 1 +1.12566956065676 2 3 1 +0.401747844392863 1 3 1 +0.420638560385403 1 3 1 +0.689567547922525 1 3 1 +0.687101320313498 2 3 1 +1.11450021231709 1 3 1 +1.05673194424108 2 3 1 +0.378493955443519 1 3 1 +0.374806303246874 2 3 1 +0.605170645685489 1 3 1 +0.568600361954804 1 3 1 +0.279564048875058 2 3 1 +0.737344841204778 1 3 1 +0.383626489427317 1 3 1 +0.236592626403799 2 3 1 +0.606395313320835 2 3 1 +0.258446694712414 1 3 1 +0.667654851854366 1 3 1 +0.709849063285861 2 3 1 +1.06652304155128 2 3 1 +0.375309922256882 1 3 1 +0.305631875482354 2 3 1 +1.00431113766878 2 3 1 +0.731812037602777 1 3 1 +0.414232775562026 2 3 1 +0.826500384443797 2 3 1 +0.719116830057354 1 3 1 +0.61855729451436 2 3 1 +0.399444901441129 1 3 1 +0.2295675381027 2 3 1 +0.361734141102366 2 3 1 +0.928229131678883 1 3 1 +0.767105903673052 2 3 1 +0.312954135574329 2 3 1 +0.316125600053679 2 3 1 +0.83356862967393 2 3 1 +0.807805288376209 2 3 1 +0.529747082048339 2 3 1 +0.579438577427002 2 3 1 +0.748413890954428 2 3 1 +0.282714299825272 2 3 1 +1.32349690412234 2 3 1 +0.265264643396819 2 3 1 +1.05199545660803 1 3 1 +0.354844761549886 2 3 1 +0.673382389127187 2 3 1 +0.353927127140679 2 3 1 +0.742708866815784 2 3 1 +0.32440131369228 1 3 1 +1.15424939546308 2 3 1 +0.341988033892061 2 3 1 +0.385901020315423 2 3 1 +0.286688838450355 2 3 1 +0.862986046754551 1 3 1 +0.362564960443358 2 3 1 +0.618539574023911 1 3 1 +0.618839114124994 1 3 1 +0.351742202769926 2 3 1 +1.04713435904685 2 3 1 +1.13219276272773 1 3 1 +0.431897141646835 2 3 1 +0.903368625643694 2 3 1 +0.845773237970464 1 3 1 +0.448267685056844 1 3 1 +0.227965869389189 2 3 1 +0.727189656817706 1 3 1 +0.920965873390772 2 3 1 +2.86369573335364 2 3 1 +1.79113426784109 2 3 1 +0.434371557096156 1 3 1 +0.297049698054049 2 3 1 +0.908700456343662 2 3 1 +0.840014957530216 1 3 1 +0.391451573590056 2 3 1 +0.964762751519269 2 3 1 +1.97499804009749 1 3 1 +1.58759823353077 2 3 1 +2.10229181480408 1 3 1 +0.573075746749156 2 3 1 +0.87229795094926 2 3 1 +0.764360676764928 1 3 1 +0.236433389533537 2 3 1 +0.500319361165157 2 3 1 +0.436148207342909 1 3 1 +1.17237545076077 2 3 1 +0.305589800257361 2 3 1 +0.269400199640921 2 3 1 +1.35212659081556 1 3 1 +0.377929769476066 2 3 1 +0.899332425662463 1 3 1 +1.00394626493931 2 3 1 +1.10094408622287 2 3 1 +1.18830151405781 2 3 1 +0.284872387153195 2 3 1 +0.375024301973256 1 3 1 +0.404469325185188 1 3 1 +0.727775813795801 2 3 1 +0.252419279794447 2 3 1 +0.375032470671666 2 3 1 +0.309265085564879 2 3 1 +0.462609251209814 2 3 1 +0.320190368690629 1 3 1 +0.477686210715907 1 3 1 +0.275346713714152 2 3 1 +0.438717701647585 2 3 1 +0.810110401646601 2 3 1 +1.07883036639961 2 3 1 +0.809608091311169 2 3 1 +0.418153700722572 1 3 1 +0.764475113911811 1 3 1 +0.412993008145338 2 3 1 +1.335962806722 1 3 1 +0.875235294298827 1 3 1 +0.326172123137794 2 3 1 +0.231731510095046 2 3 1 +0.914448514006612 1 3 1 +0.550916483189837 2 3 1 +0.880466208259979 2 3 1 +0.601176005143088 2 3 1 +1.43879123063957 2 3 1 +0.621745991817644 2 3 1 +0.3528215159095 2 3 1 +0.30994437648555 1 3 1 +0.90362627319135 2 3 1 +0.903886767560117 2 3 1 +0.835640862308006 2 3 1 +1.20726153384552 2 3 1 +0.653989199174602 2 3 1 +0.915035948130758 2 3 1 +0.726052417728461 2 3 1 +1.01911267691402 2 3 1 +0.352826644011026 2 3 1 +0.440580677663477 1 3 1 +0.999490854549375 1 3 1 +0.514934236463869 1 3 2 +0.270420118537311 2 3 2 +0.646471943779092 2 3 2 +0.338830766035059 2 3 2 +0.491671962582901 2 3 2 +0.629886758393846 2 3 2 +0.51589107754444 2 3 2 +0.353946635128139 2 3 2 +0.331203176196343 1 3 2 +1.99524093298412 2 3 2 +1.02440243540096 2 3 2 +0.282898155808958 2 3 2 +0.599827056733371 2 3 2 +0.238817716879006 2 3 2 +0.206191544000187 1 3 2 +0.855183187835193 2 3 2 +0.722927152841454 2 3 2 +0.780157089830913 2 3 2 +0.509420971748398 2 3 2 +0.96103805001364 2 3 2 +0.493670434412268 1 3 2 +0.214424723176226 2 3 2 +0.392688836409781 1 3 2 +0.658620383209045 2 3 2 +0.216107419356536 2 3 2 +0.57723401544534 2 3 2 +0.527348367007325 1 3 2 +0.473776142345069 2 3 2 +0.424350872006699 2 3 2 +2.16246776879602 1 3 2 +0.579491048291868 2 3 2 +0.317300006903978 1 3 2 +1.25002685289334 1 3 2 +0.322077006208459 2 3 2 +0.65598919200563 1 3 2 +1.4631363964763 2 3 2 +1.68342497778485 1 3 2 +0.950063435678861 1 3 2 +0.205802941406673 2 3 2 +0.442509433023036 1 3 2 +0.818174077396608 2 3 2 +0.286175835912647 2 3 2 +0.33842480954584 2 3 2 +1.38048451044948 2 3 2 +0.39988067006981 1 3 2 +0.253643662104766 2 3 2 +0.560976394476593 2 3 2 +0.517941831953703 1 3 2 +0.487317274258782 1 3 2 +0.722089923618615 2 3 2 +0.468435888325438 2 3 2 +0.673049975899187 2 3 2 +0.404615085266369 2 3 2 +0.337034159076431 2 3 2 +0.435942135312604 2 3 2 +0.369014730704062 2 3 2 +0.567411102529725 1 3 2 +0.683010310238025 2 3 2 +0.290243271494673 2 3 2 +0.316097101922202 1 3 2 +1.26062843312539 2 3 2 +0.511300941444236 2 3 2 +2.2216549447733 2 3 2 +0.6322594359434 2 3 2 +1.05478582553533 2 3 2 +0.435637851164182 1 3 2 +0.468027792640505 2 3 2 +0.420219455300886 1 3 2 +0.273929740512875 1 3 2 +0.793839005062366 2 3 2 +0.700039266476368 2 3 2 +0.698393675491842 2 3 2 +0.320657243989843 1 3 2 +1.16100839379935 1 3 2 +0.64693140923479 1 3 2 +0.512516768633988 2 3 2 +0.742023676531162 2 3 2 +0.972058194099057 2 3 2 +0.398653703474479 1 3 2 +0.410331563672334 2 3 2 +0.222283100080092 1 3 2 +0.469355586816016 1 3 2 +1.19333537174541 2 3 2 +0.307250192296768 2 3 2 +0.818587555385279 1 3 2 +0.236745366451503 2 3 2 +0.881972502593121 2 3 2 +0.277399097478032 2 3 2 +0.522816700136251 1 3 2 +1.03885756789198 2 3 2 +0.423490173116203 2 3 2 +0.603714429771403 1 3 2 +0.343602016522983 2 3 2 +1.06315814327229 2 3 2 +1.64394374580309 2 3 2 +0.258950271624856 2 3 2 +0.839767784808717 1 3 2 +0.422102182085427 2 3 2 +1.19755245402792 2 3 2 +0.36311410858205 1 3 2 +1.32548841200299 2 3 2 +1.25935619927138 2 3 2 +1.94209928030842 2 3 2 +0.373060834025449 2 3 2 +0.3101485804146 2 3 2 +0.533892648348211 2 3 2 +0.744784639587278 2 3 2 +1.18381744027858 2 3 2 +0.240744324843731 2 3 2 +0.946050579944945 1 3 2 +0.627753953881302 1 3 2 +0.370445161933509 1 3 2 +0.300650302064735 1 3 2 +0.373459020239413 2 3 2 +0.430948400866446 1 3 2 +1.49574317491573 2 3 2 +1.3101827626935 2 3 2 +1.67262325791643 1 3 2 +0.241993699720061 2 3 2 +0.328265869000834 2 3 2 +0.384015136641366 1 3 2 +0.217469612226687 2 3 2 +0.353910918208547 1 3 2 +1.26306667028009 2 3 2 +0.283701100770668 2 3 2 +0.442696536960043 1 3 2 +1.23508427666644 2 3 2 +0.211641858949563 2 3 2 +1.31762978216566 2 3 2 +0.427189324868075 1 3 2 +0.427280548495511 2 3 2 +1.48538078369227 1 3 2 +0.628708315466251 2 3 2 +1.78361132590903 2 3 2 +0.239589904781277 1 3 2 +0.562274806702486 2 3 2 +0.721667014209347 1 3 2 +0.659149048175133 2 3 2 +0.829239328543952 2 3 2 +1.02272830279552 2 3 2 +0.671890699523076 2 3 2 +1.72636732645653 1 3 2 +0.615737367953486 2 3 2 +1.07678479171103 1 3 2 +0.538623051607739 2 3 2 +1.39867639023568 1 3 2 +0.218925904964068 2 3 2 +0.240724605293016 2 3 2 +0.452439100915242 2 3 2 +0.694089462161358 2 3 2 +1.84709801261543 2 3 2 +0.564582100435542 2 3 2 +0.725448687770764 2 3 2 +0.30708178683197 1 3 2 +0.47535866323473 2 3 2 +0.829935945880499 2 3 2 +0.90098748577001 2 3 2 +0.321123642659435 2 3 2 +0.295296832786178 2 3 2 +0.312203918236697 2 3 2 +0.967729509776647 1 3 2 +1.5438301792962 2 3 2 +0.493390293618284 2 3 2 +0.799464309749491 2 3 2 +0.492248525373071 1 3 2 +0.876464392051748 1 3 2 +0.914622363311664 2 3 2 +0.939704807763569 1 3 2 +0.901114296634733 1 3 2 +2.83967944372257 1 3 2 +0.633623902249606 1 3 2 +0.590830718948575 2 3 2 +0.644050309060636 2 3 2 +0.618329617213195 1 3 2 +0.406935681035188 2 3 2 +0.680935599818192 2 3 2 +0.689990604040213 2 3 2 +0.234441659901205 1 3 2 +0.628614985132006 2 3 2 +0.603104314434233 1 3 2 +0.458166460126185 2 3 2 +0.5410221389939 2 3 2 +0.806727630952272 1 3 2 +0.74995864564573 2 3 2 +0.925796642818387 2 3 2 +1.48881282804597 2 3 2 +1.32567553513673 2 3 2 +0.720300933060513 1 3 2 +0.845676771367771 2 3 2 +1.03943195815275 2 3 2 +0.269814661026592 2 3 2 +0.303313872334609 1 3 2 +0.668337346160446 1 3 2 +0.802759823174443 1 3 2 +1.0891459476003 1 3 2 +0.524162399076158 2 3 2 +0.843176928462498 2 3 2 +0.378654464972701 2 3 2 +0.37200187900001 2 3 2 +1.24108370520966 2 3 2 +0.407967861247184 1 3 2 +0.385604160482279 2 3 2 +1.37807312575617 2 3 2 +1.6236739129127 2 3 2 +0.299498250287133 2 3 2 +1.50283781972101 2 3 2 +0.425002443191797 2 3 2 +0.793010798627147 1 3 2 +0.443607069543917 2 3 2 +0.553450577754003 2 3 2 +0.856593591339924 2 3 2 +0.525593955394394 2 3 2 +0.896348840912902 2 3 2 +2.32094781645657 2 3 2 +0.343409204205168 2 3 2 +2.55188788674935 1 3 2 +0.59347479355615 2 3 2 +1.14133837346965 1 3 2 +0.769316859974066 1 3 2 +0.553345822537459 1 3 2 +0.280403390069466 1 3 2 +0.351752315403098 2 3 2 +0.464937188483399 1 3 2 +0.498643366432877 2 3 2 +1.20768606813256 2 3 2 +0.810552918965932 2 3 2 +0.814572293196432 2 3 2 +1.12466835158388 1 3 2 +0.605892652281983 2 3 2 +0.718062408112775 1 3 2 +1.43756814834945 2 3 2 +0.434523586879683 2 3 2 +1.1331064813251 2 3 2 +0.347724946096093 2 3 2 +0.493607135881693 1 3 2 +0.67651193182361 1 3 2 +0.552022281319177 1 3 2 +0.321142527517636 2 3 2 +0.597883853359763 1 3 2 +0.895001662908153 1 3 2 +0.48578933838309 1 3 2 +1.68093599717435 1 3 2 +0.70886900345093 2 3 2 +0.366715507465807 2 3 2 +0.229193753495571 1 3 2 +0.456535243655345 2 3 2 +0.96352225520781 1 3 2 +0.744795713668557 2 3 2 +0.469577483093902 2 3 2 +0.32463756963168 1 3 2 +0.718346029022071 2 3 2 +1.29491659083823 2 3 2 +0.461155144420931 2 3 2 +0.5798492161351 2 3 2 +1.05582449803871 2 3 2 +0.28481585650096 2 3 2 +0.410730259856026 2 3 2 +1.60060704532405 2 3 2 +0.534513028770362 2 3 2 +0.290600969494565 1 3 2 +0.508134824209082 2 3 2 +0.733677137815329 1 3 2 +0.524546051284814 2 3 2 +0.626615353700544 2 3 2 +0.207338061930642 2 3 2 +2.49191138211878 1 3 2 +0.446516299324413 2 3 2 +0.568970465239259 2 3 2 +1.30104078909728 2 3 2 +0.573721490299906 2 3 2 +0.395039645429215 2 3 2 +0.868269050301906 2 3 2 +1.39110795542721 2 3 2 +0.732569267335244 2 3 2 +0.346330122949029 2 3 2 +0.28247517183272 2 3 2 +0.339777576105551 2 3 2 +0.709070836414342 2 3 2 +0.741775977804924 2 3 2 +1.3105501266272 2 3 2 +0.650809742033691 1 3 2 +1.21685252483908 2 3 2 +1.31226952331015 2 3 2 +0.671641212759192 2 3 2 +2.60476050728261 1 3 2 +0.423290539217436 1 3 2 +0.747391695995648 2 3 2 +0.415214528560093 2 3 2 +0.732547499620541 2 3 2 +1.11203310275713 1 3 2 +0.310793178630083 1 3 2 +0.532873467984992 2 3 2 +0.24543893048753 2 3 2 +0.262637774460857 2 3 2 +0.638852263528672 2 3 2 +0.268251093022516 2 3 2 +0.754730587787048 1 3 2 +0.304771345055942 2 3 2 +0.600949799200535 2 3 2 +0.454921964270315 2 3 2 +0.291060243483869 2 3 2 +0.850896084981839 2 3 2 +1.02404745430124 2 3 2 +0.740373725034996 2 3 2 +0.697249433946795 2 3 2 +3.22861057623448 2 3 2 +0.614917615221698 2 3 2 +0.417940802999645 2 3 2 +0.428130364139945 1 3 2 +0.330671065628431 2 3 2 +0.62670607061658 2 3 2 +1.26038076404455 2 3 2 +0.322284823454811 2 3 2 +0.429191549101784 2 3 2 +0.455247274613782 2 3 2 +0.663368332878807 2 3 2 +0.305769717251401 2 3 2 +1.50140899013577 1 3 2 +1.45389192339163 2 3 2 +2.98105925544205 2 3 2 +1.04255718339312 2 3 2 +0.942508435934038 2 3 2 +2.787892838843 2 3 2 +0.840734403641314 2 3 2 +0.489925811963693 2 3 2 +1.8552326561657 2 3 2 +0.38632322022465 1 3 2 +0.256936541763573 2 3 2 +0.766126754945232 1 3 2 +0.451147102611256 2 3 2 +0.268349419782926 2 3 2 +0.284282929689453 2 3 2 +0.71369157188727 1 3 2 +0.984328464038398 1 3 2 +0.391254858951931 2 3 2 +0.473609040280498 1 3 2 +0.392371474332231 1 3 2 +0.512961553349468 2 3 2 +0.49196894137911 2 3 2 +1.98481178504207 2 3 2 +0.335597786997023 1 3 2 +0.951871051474144 2 3 2 +0.43773254271624 2 3 2 +0.759326775665626 1 3 2 +0.964163992380983 2 3 2 +0.243214557688043 2 3 2 +1.06020924390952 2 3 2 +0.624129170951697 2 3 2 +0.631213229469376 2 3 2 +1.22749512645753 1 3 2 +1.4544220968578 2 3 2 +0.704504240730269 2 3 2 +0.358484343002385 2 3 2 +0.550361017867011 2 3 2 +0.300375078256161 2 3 2 +0.739678180371973 1 3 2 +0.646734220557972 1 3 2 +0.777682254344784 2 3 2 +0.87982617759661 2 3 2 +0.418244912065538 2 3 2 +0.443192766363974 2 3 2 +0.61563753996371 1 3 2 +1.56614219537768 2 3 2 +2.65065478085341 2 3 2 +0.305863823661165 2 3 2 +0.954934661408583 2 3 2 +0.449191627811582 1 3 2 +0.793807602132907 2 3 2 +0.519871565006984 1 3 2 +0.293207709999379 1 3 2 +0.634867211065706 1 3 2 +0.469994277604704 2 3 2 +0.343616111556125 2 3 2 +0.23004178016569 1 3 2 +0.335931900338173 2 3 2 +0.743164799438406 2 3 2 +0.582587466556771 2 3 2 +0.412610130745763 2 3 2 +0.303143772129072 2 3 2 +1.28143560920008 2 3 2 +1.17554195841916 1 3 2 +0.656531128634536 2 3 2 +0.29995775158261 1 3 2 +0.89568315126836 2 3 2 +1.30618389742279 2 3 2 +3.0496460415615 2 3 2 +0.743633550842162 2 3 2 +0.976245680087169 2 3 2 +0.376124867059714 2 3 2 +0.537516104244315 2 3 2 +0.212538006220838 2 3 2 +0.571699948650591 2 3 2 +0.658537447407892 2 3 2 +0.409180121003016 2 3 2 +0.38453187413556 2 3 2 +1.39971354422791 2 3 2 +1.48526501316284 2 3 2 +0.765450963277084 2 3 2 +0.45683796523625 2 3 2 +0.49455927446116 2 3 2 +0.565813339958499 2 3 2 +0.51579075751888 2 3 2 +0.579482217533706 1 3 2 +0.464431057746426 2 3 2 +0.33385297359306 1 3 2 +0.237785587972524 2 3 2 +0.788856968485086 2 3 2 +0.247738584125967 2 3 2 +1.29521163578326 1 3 2 +0.51405310891982 2 3 2 +0.248969113363235 1 3 2 +0.8258802772869 2 3 2 +0.858915328254726 2 3 2 +1.04412031745921 1 3 2 +1.67795472999734 1 3 2 +0.560822550684719 2 3 2 +0.639828932713558 2 3 2 +0.606061526335406 1 3 2 +0.446197468121209 2 3 2 +0.889197611107733 2 3 2 +0.229591061070164 1 3 2 +0.598751053548388 1 3 2 +0.42084079282726 2 3 2 +0.280912227540918 2 3 2 +0.353535095083615 1 3 2 +0.533533031895995 2 3 2 +0.40991726170081 2 3 2 +0.386705907004533 2 3 2 +0.663325409649471 2 3 2 +1.05367422973975 1 3 2 +1.41642911541684 2 3 2 +0.728074140704459 2 3 2 +0.448237114304907 2 3 2 +0.471483947133633 2 3 2 +1.26271559797945 1 3 2 +0.241118847003316 2 3 2 +0.551833217379812 1 3 2 +0.508606043806118 2 3 2 +0.364460896466132 2 3 2 +0.412399264984449 2 3 2 +0.305920977565598 2 3 2 +0.705398182563824 2 3 2 +0.204036911418345 1 3 2 +1.04820599938717 1 3 2 +0.643323321586422 2 3 2 +3.66898798188367 2 3 2 +1.08829013517781 1 3 2 +0.361592831884118 1 3 2 +0.979363639648445 2 3 2 +0.521111784853412 1 3 2 +0.343395604193243 1 3 2 +0.587048881881688 2 3 2 +1.41201467474607 2 3 2 +0.443024780470065 2 3 2 +2.95728532098558 2 3 2 +0.319216793259789 2 3 2 +0.221315652964487 2 3 2 +0.77093692467471 1 3 2 +0.73558455041612 1 3 2 +0.259119262605434 2 3 2 +0.48994285788748 1 3 2 +0.571960539121533 2 3 2 +0.30717971899547 2 3 2 +1.97586628351188 2 3 2 +0.375432444639877 2 3 2 +0.811045564934994 2 3 2 +0.911400482590164 2 3 2 +0.421553307064521 1 3 2 +0.836499690800059 1 3 2 +0.709656783694355 1 3 2 +0.738884945936119 1 3 2 +0.369565008846999 1 3 2 +0.379597876167422 1 3 2 +0.673815169801798 2 3 2 +1.61852146139474 1 3 2 +0.341581465482509 1 3 2 +0.263351833487348 2 3 2 +0.34610526127482 2 3 2 +0.840263767605542 2 3 2 +0.860111461225023 2 3 2 +0.500246193912611 2 3 2 +0.611622543641809 2 3 2 +0.304839820514316 1 3 2 +0.478872857619653 2 3 2 +0.966509448052867 2 3 2 +0.369970546426949 2 3 2 +0.424912789436735 2 3 2 +0.639361694609756 2 3 2 +0.638867619514155 2 3 2 +0.439145854141595 2 3 2 +1.2433130957394 2 3 2 +0.546013305487959 2 3 2 +0.621366845453756 1 3 2 +0.371921153976491 1 3 2 +1.72030292611725 2 3 2 +0.265728845949588 2 3 2 +0.250485215272467 2 3 2 +0.260055352791922 2 3 2 +1.04055348391978 2 3 2 +0.639817829535305 2 3 2 +0.436687399202203 2 4 1 +0.390002899730434 2 4 1 +0.689914798071677 2 4 1 +0.89709020993931 2 4 1 +0.427183990353492 1 4 1 +0.340777120972685 2 4 1 +0.251597448645233 1 4 1 +0.270029795224852 1 4 1 +0.36506923015438 2 4 1 +0.925483297260795 1 4 1 +0.491542011496093 1 4 1 +0.584011448243567 2 4 1 +1.27419822711881 2 4 1 +0.33102420792392 2 4 1 +0.351900683919713 1 4 1 +0.393343181453058 2 4 1 +1.11700088666809 2 4 1 +0.383684827552196 2 4 1 +0.319389244865323 2 4 1 +0.207961019321362 1 4 1 +0.247906583019937 2 4 1 +0.664874815584718 2 4 1 +0.633174401608791 2 4 1 +0.228811949915 2 4 1 +0.313807509549483 1 4 1 +0.258061143771553 1 4 1 +1.40526930242479 2 4 1 +0.617601017184864 2 4 1 +0.448539769566249 2 4 1 +0.301286656201828 2 4 1 +0.35035514703207 2 4 1 +1.08961020047286 1 4 1 +0.29793786190371 1 4 1 +0.384511098165857 2 4 1 +0.761604738984846 1 4 1 +0.341173784779225 2 4 1 +0.376431442957684 2 4 1 +0.484742402575381 2 4 1 +0.257151781895977 1 4 1 +0.597747950821735 2 4 1 +0.510086318540574 2 4 1 +0.414694239051273 2 4 1 +0.583680054953304 1 4 1 +0.260760440689632 2 4 1 +0.798233246796322 1 4 1 +0.299861199950565 1 4 1 +0.276645559734816 1 4 1 +0.754832912251529 2 4 1 +0.409850111348969 2 4 1 +0.357937922566155 1 4 1 +1.74732047917189 2 4 1 +0.234683752237039 1 4 1 +0.379935287004687 2 4 1 +0.400355036755306 2 4 1 +0.199836116985197 1 4 1 +0.29654658741011 1 4 1 +0.40070736644743 2 4 1 +0.810244937262253 1 4 1 +0.232476182488422 1 4 1 +0.441531921393063 1 4 1 +0.266981994270395 1 4 1 +0.594986042709096 2 4 1 +0.226949086978422 2 4 1 +0.521975525478104 2 4 1 +0.342357539413783 1 4 1 +0.317929036077879 1 4 1 +0.252848528033154 1 4 1 +0.323519822370531 1 4 1 +0.543317920252121 2 4 1 +0.24136946576349 2 4 1 +0.924045894157614 1 4 1 +0.206395302547672 1 4 1 +0.32962698245246 1 4 1 +0.351093830002051 2 4 1 +0.393905213694999 1 4 1 +0.215647621306677 1 4 1 +1.07707197772823 2 4 1 +0.389509392394056 2 4 1 +0.223818285290267 1 4 1 +0.506337167510338 2 4 1 +0.471183820790944 2 4 1 +0.845694161269827 2 4 1 +0.709573151871471 1 4 1 +0.17746060467044 1 4 1 +0.31073859900678 2 4 1 +0.441048237227571 1 4 1 +0.46110944700935 1 4 1 +0.54945827722732 2 4 1 +0.784046498525351 2 4 1 +0.67011316512292 2 4 1 +0.835724249453141 2 4 1 +0.446765564545247 2 4 1 +0.672425348568567 2 4 1 +0.327795527459012 1 4 1 +0.638194736699407 1 4 1 +0.706923294313998 2 4 1 +0.599365457528674 1 4 1 +0.323701086823648 2 4 1 +0.219227233336576 1 4 1 +0.457730880006645 1 4 1 +0.315697871971176 2 4 1 +0.589925939568761 1 4 1 +0.247439085357404 1 4 1 +0.32425097397633 1 4 1 +0.597517113461805 2 4 1 +0.718798195346532 2 4 1 +0.638921440877047 1 4 1 +0.255295790134737 2 4 1 +0.327574345419546 2 4 1 +0.290528460395922 2 4 1 +0.812154747100783 2 4 1 +0.62206059702272 2 4 1 +0.692613337029294 2 4 1 +0.625883974901828 2 4 1 +0.441430346433252 2 4 1 +0.490081824112803 2 4 1 +0.244708505986005 2 4 1 +0.670449909527048 2 4 1 +0.548567982146401 2 4 1 +0.670609785865862 2 4 1 +0.232196621565708 1 4 1 +1.00556527970583 1 4 1 +0.362798089622382 2 4 1 +0.299887052564318 1 4 1 +0.29571247705273 2 4 1 +0.379234401357032 1 4 1 +0.370440077361359 1 4 1 +0.431164494502316 1 4 1 +0.420428289154959 2 4 1 +0.730550233135442 2 4 1 +0.196484263350659 1 4 1 +0.322244565661879 1 4 1 +0.419903128817093 2 4 1 +0.887401080614827 2 4 1 +0.347054627607897 1 4 1 +0.478136351931499 2 4 1 +0.22065845309499 1 4 1 +0.230551027213474 1 4 1 +0.774783709604525 2 4 1 +0.4017025367099 2 4 1 +0.473979554090044 2 4 1 +0.647672920929279 2 4 1 +0.498264780188815 2 4 1 +0.242543415916194 1 4 1 +1.50936872822537 2 4 1 +0.566867242699491 1 4 1 +1.47001919200243 2 4 1 +0.443050568657419 1 4 1 +0.617820295621617 2 4 1 +0.359479796449494 2 4 1 +0.604637805760835 2 4 1 +0.586057575592382 2 4 1 +0.440372792115214 1 4 1 +0.33669989715765 2 4 1 +0.46750885766708 2 4 1 +0.256884377023786 2 4 1 +0.404464415727674 1 4 1 +1.31921997047773 1 4 1 +0.430411845210472 2 4 1 +0.191395478638844 1 4 1 +2.65424233758138 2 4 1 +0.446895568163648 2 4 1 +1.16838122758826 1 4 1 +0.404164043001054 1 4 1 +0.453588390001177 1 4 1 +0.489843569086762 2 4 1 +0.511073192179454 1 4 1 +0.366748489459315 2 4 1 +1.08784444950982 1 4 1 +0.3127679937141 1 4 1 +0.374479796833337 2 4 1 +0.536583188537731 2 4 1 +0.468590946410184 1 4 1 +0.844010712248646 2 4 1 +0.557108724831039 1 4 1 +0.221613723664657 1 4 1 +0.485469547021943 2 4 1 +0.263615872608948 1 4 1 +0.441259334915482 2 4 1 +0.198072070553861 1 4 1 +1.50553220647041 2 4 1 +0.489507279251992 1 4 1 +0.864537266238903 2 4 1 +0.45282939606678 1 4 1 +0.746554343478926 2 4 1 +0.867773306761754 1 4 1 +0.767200054356521 1 4 1 +0.23660645439005 1 4 1 +1.39880762611901 2 4 1 +0.350271079317704 1 4 1 +1.40586843906866 1 4 1 +0.478210911942578 2 4 1 +0.338115182852339 2 4 1 +0.730320711764892 1 4 1 +0.956280538468675 1 4 1 +0.420525233125089 1 4 1 +0.347936329704388 1 4 1 +0.474630289993715 1 4 1 +0.819946152352902 2 4 1 +0.934441627702885 1 4 1 +0.730580755467428 1 4 1 +0.271300024998423 2 4 1 +0.304557666577329 1 4 1 +0.693593360198042 2 4 1 +0.275013203541771 2 4 1 +1.04949192092882 1 4 1 +0.912930864419114 2 4 1 +0.245497508402249 1 4 1 +0.340496536712849 1 4 1 +0.206302342316397 1 4 1 +0.349666013963138 1 4 1 +0.391997601731428 2 4 1 +0.31394682736151 2 4 1 +0.19581160832742 1 4 1 +1.06233406823564 1 4 1 +0.415830063959857 2 4 1 +0.593378048695084 2 4 1 +0.684233440252769 2 4 1 +0.688883920330433 2 4 1 +1.6950993730873 2 4 1 +0.432896756723563 2 4 1 +0.431906140578051 2 4 1 +1.52282637445437 1 4 1 +0.405643762755849 2 4 1 +0.3906375449839 2 4 1 +0.493637490245746 2 4 1 +0.253401712848908 1 4 1 +0.651891849887022 1 4 1 +0.191738721475655 1 4 1 +0.274200778844831 1 4 1 +0.687221734267395 1 4 1 +0.401284964672383 2 4 1 +1.30759449667971 2 4 1 +0.45371033421329 1 4 1 +0.414999090718536 1 4 1 +0.339864441346799 2 4 1 +0.224062601508878 1 4 1 +0.267986100808596 2 4 1 +0.205728192609412 1 4 1 +0.294602964122564 1 4 1 +0.57048745621061 2 4 1 +0.312999536718822 2 4 1 +0.468539738808252 2 4 1 +0.433964076498741 2 4 1 +0.263357126730855 1 4 1 +0.403225684575735 2 4 1 +0.505097149684945 1 4 1 +0.352103525497929 1 4 1 +0.353799144457459 2 4 1 +1.93501413202878 2 4 1 +0.234778101685937 1 4 1 +0.915861513813591 1 4 1 +0.222561853971709 2 4 1 +0.704078227413159 1 4 1 +0.324489257545476 1 4 1 +0.360961619402915 1 4 1 +1.08221841300972 2 4 1 +0.255068442126581 1 4 1 +0.427031071589034 2 4 1 +0.268383547265101 2 4 1 +0.527836858914301 2 4 1 +0.436588005092254 2 4 1 +0.63511533446365 1 4 1 +0.234879561500897 2 4 1 +0.401628516319902 1 4 1 +0.244232194192342 2 4 1 +0.228576135937129 1 4 1 +0.479791504967864 1 4 1 +0.623390007344234 2 4 1 +0.464270391434225 1 4 1 +0.366609113909401 2 4 1 +0.96274637101792 1 4 1 +0.576483736943277 2 4 1 +0.403581781434509 2 4 1 +0.639484168435868 2 4 1 +0.62685113380266 2 4 1 +0.504458338039312 1 4 1 +0.402748748564798 2 4 1 +0.214342237683536 2 4 1 +0.349689666731501 1 4 1 +0.920646509992372 2 4 1 +0.27944693184416 1 4 1 +0.44815474516242 2 4 1 +0.205465436502175 1 4 1 +0.58051540184786 2 4 1 +1.39132555887266 2 4 1 +0.364514642632317 2 4 1 +0.70643155804624 1 4 1 +0.323310314942546 2 4 1 +0.175430213692877 1 4 1 +0.324665401127095 2 4 1 +0.339971125940635 1 4 1 +0.357027781829466 2 4 1 +0.197390554919544 1 4 1 +1.19812104028543 1 4 1 +0.362575817654 2 4 1 +0.38070286088775 2 4 1 +0.468393488473505 2 4 1 +0.69309042773624 2 4 1 +0.489049481806581 2 4 1 +0.550976348834216 2 4 1 +0.359974012139019 2 4 1 +0.430662557513296 2 4 1 +0.230762012931962 1 4 1 +0.464324012490502 2 4 1 +0.312270150962999 2 4 1 +0.407424579565668 1 4 1 +1.69173371199779 2 4 1 +0.279156190669336 1 4 1 +1.09358568468554 2 4 1 +0.799141912889128 2 4 1 +0.471229301469417 2 4 1 +0.86570523590372 2 4 1 +0.83707942075239 2 4 1 +0.301114204037757 2 4 1 +0.26254546100438 1 4 1 +0.480187644205008 2 4 1 +1.26489425453197 1 4 1 +0.348490685840493 1 4 1 +1.80827808496735 2 4 1 +0.688328204172357 1 4 1 +0.327390856146583 1 4 1 +0.296891297267103 2 4 1 +0.537842103983798 2 4 1 +0.61750762959656 1 4 1 +0.514317599884349 2 4 1 +0.484707489461247 2 4 1 +1.02339278515862 2 4 1 +0.261167936671032 1 4 1 +0.304205039689018 2 4 1 +0.663106162019803 2 4 1 +0.717654991060284 2 4 1 +0.303534711764191 1 4 1 +0.330517625218401 2 4 1 +0.421085131279027 1 4 1 +0.205818877232142 2 4 1 +0.316549542909507 2 4 1 +0.204577406444421 1 4 1 +0.23632265848357 1 4 1 +0.548244833679468 2 4 1 +0.388542899040365 1 4 1 +0.664718352371726 2 4 1 +0.675589276314572 2 4 1 +0.797202947732321 2 4 1 +0.210476690239724 1 4 1 +0.368876471933727 1 4 1 +3.36252865887517 2 4 1 +1.08817433313251 2 4 1 +0.302267582741235 2 4 1 +0.528662894227581 2 4 1 +0.405868957673161 2 4 1 +0.78835957170157 2 4 1 +0.348515180995261 1 4 1 +0.400786810234322 2 4 1 +0.723106946883847 1 4 1 +0.302360607513298 1 4 1 +0.398732629036892 2 4 1 +0.404922292377484 1 4 1 +0.317519731671834 2 4 1 +0.34850072573164 1 4 1 +0.236746589712662 1 4 1 +0.343500348276907 2 4 1 +0.913186037917405 1 4 1 +0.399298167621525 1 4 1 +0.384145099266467 2 4 1 +0.202414771592286 1 4 1 +1.18165252887664 2 4 1 +0.330694054268851 1 4 1 +0.32152973903766 2 4 1 +0.491640817467057 2 4 1 +0.355523637611526 2 4 1 +0.652568975873668 1 4 1 +0.83445083655571 2 4 1 +1.2259345279659 2 4 1 +0.359422092118994 2 4 1 +0.706531400652435 2 4 1 +0.393761838705891 2 4 1 +0.637784009231585 2 4 1 +0.72390141241909 1 4 1 +0.250576856916352 1 4 1 +0.294901560775219 2 4 1 +0.360607616504287 2 4 1 +0.94020952091997 2 4 1 +0.512044648878922 2 4 1 +0.795154031544809 2 4 1 +0.483945751646636 2 4 1 +1.00668402779973 1 4 1 +0.354165221763607 2 4 1 +0.589695938125084 1 4 1 +0.317505434986294 1 4 1 +1.43373221832127 1 4 1 +0.288390917154746 2 4 1 +0.405502079908248 1 4 1 +0.446525389342848 2 4 1 +0.532552597088654 1 4 1 +0.313955963239798 2 4 1 +0.353727419903597 2 4 1 +0.370222838745741 1 4 1 +0.53179693872268 2 4 1 +0.367449479194125 1 4 1 +0.552379937342038 2 4 1 +0.990656154189996 2 4 1 +0.339737828162076 2 4 1 +0.637641033103 2 4 1 +0.42876311661298 2 4 1 +0.23749457142906 2 4 1 +0.319603538159136 2 4 1 +0.374605787276471 1 4 1 +0.849512129786118 2 4 1 +1.00104076228899 2 4 1 +0.198287811121547 1 4 1 +0.298821665244969 2 4 1 +0.291809945237506 1 4 1 +0.629558789118005 2 4 1 +0.387853650221803 2 4 1 +0.403253897590665 1 4 1 +0.467497496264389 2 4 1 +0.76163717207652 2 4 1 +0.509727112968688 2 4 1 +0.298777861255781 1 4 1 +0.511840234402182 2 4 1 +0.285361728745086 2 4 1 +0.597484562760948 2 4 1 +0.289800259644921 2 4 1 +0.583545618964623 2 4 1 +0.567804592187857 1 4 1 +0.507416826265034 1 4 1 +0.802494675316203 2 4 1 +0.977640762711063 1 4 1 +0.310065306648296 1 4 1 +0.345153633596149 2 4 1 +0.347175805412226 1 4 1 +0.412977137398828 2 4 1 +0.285153755999717 1 4 1 +0.239659186319816 2 4 1 +0.339455800385343 2 4 1 +0.268577501046517 2 4 1 +0.658394714841171 1 4 1 +0.778666141135073 2 4 1 +0.316640157058985 1 4 1 +0.223585419102432 1 4 1 +0.865653039717611 2 4 1 +0.366142103215405 1 4 1 +1.06763238053885 1 4 1 +0.649850247862938 1 4 1 +0.512047384065615 2 4 1 +0.416754656003573 2 4 1 +0.251040108432236 1 4 1 +0.360104447735964 2 4 1 +0.69217517705459 2 4 1 +1.20890660646077 2 4 1 +0.395473147659342 1 4 1 +2.01509671350874 2 4 1 +0.257146924592025 1 4 1 +0.56334930950831 2 4 1 +0.283190539517862 1 4 1 +0.926385510466009 2 4 1 +0.670839555017221 1 4 1 +0.209765778999125 1 4 1 +0.736401422311992 2 4 1 +0.652193628618288 1 4 1 +0.21004611504302 1 4 1 +0.427729746817706 2 4 1 +0.279727994007439 1 4 1 +0.973427619297358 1 4 1 +0.484447288964673 2 4 1 +0.773172203339819 1 4 1 +0.451517138859729 2 4 1 +0.366340544422608 1 4 1 +0.537698395391627 1 4 1 +0.502490066721104 2 4 1 +0.320507642052774 1 4 1 +0.521457049830957 2 4 1 +0.516601763002701 2 4 1 +0.335922415995894 1 4 1 +0.433364895325246 2 4 1 +0.680419249073426 2 4 1 +0.297293517357851 1 4 1 +0.442221587569742 1 4 1 +0.27166805138535 1 4 1 +0.61736599846165 2 4 1 +0.486875798898661 1 4 1 +0.221131066783682 1 4 1 +0.631893098333021 1 4 1 +0.680334804706098 2 4 1 +0.439239414100042 2 4 1 +0.844299183787246 1 4 1 +0.879748183908203 2 4 1 +0.881004157734928 2 4 1 +0.503475477012917 2 4 1 +0.345571841551121 2 4 1 +0.60106814148883 2 4 1 +0.380097253620339 2 4 1 +0.414803174135158 2 4 1 +0.205017263198972 1 4 1 +0.284670026282373 1 4 1 +0.362803142116261 1 4 1 +0.244497025354602 1 4 1 +0.227388249258125 2 4 1 +0.427473866189717 2 4 1 +0.753126345774075 2 4 2 +0.390711548236765 1 4 2 +0.270643332729808 1 4 2 +0.226363776905272 2 4 2 +0.465280739944602 2 4 2 +0.436604485335886 2 4 2 +0.682522814767707 2 4 2 +0.346349842215826 1 4 2 +0.661058914850539 1 4 2 +0.283908385401013 1 4 2 +0.439045953436687 2 4 2 +0.357784731597969 1 4 2 +0.201869884529536 1 4 2 +0.360806495344085 1 4 2 +0.925367484247164 2 4 2 +0.415837898905798 1 4 2 +0.227962933710507 1 4 2 +0.312737890779197 2 4 2 +0.432575973780529 1 4 2 +0.253471376207378 2 4 2 +0.288980617785268 2 4 2 +0.205568280701361 1 4 2 +0.355465859040812 1 4 2 +0.533249899426881 1 4 2 +0.428341046561918 1 4 2 +0.468946411082872 2 4 2 +0.64190965481191 2 4 2 +0.208274022160394 1 4 2 +0.217167590394544 1 4 2 +0.348200391766719 1 4 2 +0.617665880709998 2 4 2 +1.2550193745344 2 4 2 +0.566722271395131 2 4 2 +0.9275505182546 1 4 2 +0.360441788744519 2 4 2 +0.334083907534406 1 4 2 +0.621024094129476 1 4 2 +0.21468225890971 1 4 2 +0.764606539690629 2 4 2 +0.513552230720492 2 4 2 +0.274575330019494 1 4 2 +0.442639118882473 2 4 2 +0.333565588104835 1 4 2 +0.32578303931817 1 4 2 +0.937568367512386 1 4 2 +0.267480287372774 2 4 2 +0.531291803062624 2 4 2 +0.356804813639121 2 4 2 +0.229458300439067 1 4 2 +0.491510334559396 1 4 2 +0.294506874060557 2 4 2 +0.450689439535521 2 4 2 +0.330518336785702 2 4 2 +0.629929903785771 2 4 2 +0.556005918181015 2 4 2 +0.298001295445146 2 4 2 +0.373025553749853 1 4 2 +0.470482102722226 1 4 2 +0.341137316160965 2 4 2 +0.395674428886868 1 4 2 +0.612038098963857 2 4 2 +0.357119967218812 1 4 2 +0.248258993043406 2 4 2 +0.43888644731273 2 4 2 +0.236904461349091 1 4 2 +0.175318379144127 1 4 2 +0.295979215918683 2 4 2 +0.830651376675517 2 4 2 +0.282437606937073 2 4 2 +0.398046893313355 1 4 2 +0.438749921630364 2 4 2 +0.910170394806412 1 4 2 +0.612914933569449 2 4 2 +0.321192322725182 1 4 2 +0.568814752481222 2 4 2 +0.897945007327145 2 4 2 +0.498239908323843 2 4 2 +1.13610274041636 1 4 2 +0.38015282393527 2 4 2 +1.88042120748211 1 4 2 +0.475694408506186 2 4 2 +0.783173222507789 1 4 2 +1.15066675545721 2 4 2 +0.557329951105813 2 4 2 +0.571462117657645 1 4 2 +0.67890197694106 1 4 2 +0.602173033246056 1 4 2 +0.394020910343337 2 4 2 +0.313420994224523 1 4 2 +0.286589909536176 1 4 2 +0.705950642195192 2 4 2 +0.229774735082982 1 4 2 +1.23278117367534 1 4 2 +0.22937452069431 1 4 2 +0.280442062594111 2 4 2 +0.327747771853932 2 4 2 +0.235540251439368 1 4 2 +0.482389789621482 2 4 2 +0.221470341978539 1 4 2 +1.543390472393 1 4 2 +0.407965459047609 2 4 2 +0.861415260414181 2 4 2 +0.257801138767577 1 4 2 +1.08565280721424 2 4 2 +0.581197798001077 1 4 2 +0.324088998372876 2 4 2 +0.357086553411739 1 4 2 +1.40348937798243 1 4 2 +0.193914649716736 1 4 2 +1.21046346658098 1 4 2 +0.67741215417897 1 4 2 +0.329541261493576 1 4 2 +0.708070291827501 2 4 2 +0.799545815866931 1 4 2 +0.284679156256938 2 4 2 +0.394752939066119 2 4 2 +1.28735898040161 2 4 2 +0.28801399136096 2 4 2 +0.648970026347328 1 4 2 +0.441656916239289 1 4 2 +0.255895819732594 1 4 2 +0.273287514635962 1 4 2 +0.355331159272485 2 4 2 +0.660329500255681 2 4 2 +1.60223591193477 2 4 2 +0.312706096050944 2 4 2 +0.370271886124509 1 4 2 +0.4718314797338 2 4 2 +0.27614855720277 1 4 2 +0.483448608439343 1 4 2 +0.234968080618399 1 4 2 +0.391112229320968 2 4 2 +0.462383731150063 1 4 2 +1.84554462107946 2 4 2 +0.368383992484067 2 4 2 +0.427732918131404 2 4 2 +0.261183473976277 1 4 2 +0.443004159664207 1 4 2 +0.319045852909917 1 4 2 +0.459946072664613 1 4 2 +0.308634108085083 2 4 2 +0.698726479780432 2 4 2 +0.236639844152539 1 4 2 +0.331687357050262 1 4 2 +0.478759287978135 2 4 2 +0.368101563033333 2 4 2 +0.305576932610112 1 4 2 +0.429846006190982 2 4 2 +0.760633202811727 2 4 2 +0.413548649793985 2 4 2 +0.854550457981442 1 4 2 +0.793486300366621 2 4 2 +0.865787783075263 2 4 2 +0.29645719210445 2 4 2 +0.286957661251985 2 4 2 +0.799348039285717 2 4 2 +0.655555119608612 2 4 2 +0.742570590536339 2 4 2 +0.618028617950327 2 4 2 +0.287032292482981 1 4 2 +0.810817641683865 2 4 2 +0.542866362535735 2 4 2 +0.461735825500108 1 4 2 +0.360081585122882 2 4 2 +0.686393593652603 2 4 2 +0.406506979162325 1 4 2 +0.417722137978479 2 4 2 +0.199502298944795 1 4 2 +0.283998907427584 1 4 2 +0.641575484713698 1 4 2 +0.536268680798931 1 4 2 +1.30578408194073 2 4 2 +0.184223471103508 1 4 2 +0.69467941754697 2 4 2 +0.22448622806897 2 4 2 +0.286771373284366 1 4 2 +0.688354585035901 1 4 2 +0.967997645085735 1 4 2 +0.379271417939524 1 4 2 +0.657624551473419 2 4 2 +0.288156373424384 1 4 2 +0.798729464202731 2 4 2 +0.299408333666402 2 4 2 +0.569629360200182 2 4 2 +0.368672408571234 2 4 2 +0.689419089326237 2 4 2 +1.39439675244552 2 4 2 +0.349220352929323 1 4 2 +0.83628328787567 1 4 2 +0.954621512616944 1 4 2 +0.974085341355247 2 4 2 +0.232985801064047 1 4 2 +0.392841148562383 1 4 2 +0.215131830305328 1 4 2 +0.293782797485867 2 4 2 +0.271250958358236 1 4 2 +0.624040890183475 1 4 2 +0.805265827410823 2 4 2 +0.294091703458089 2 4 2 +0.710826711143953 2 4 2 +1.22718633292131 1 4 2 +0.634166705031742 2 4 2 +0.34847720815033 2 4 2 +0.401314348829917 1 4 2 +0.232628383153352 1 4 2 +0.732405673120817 1 4 2 +0.721193585825821 2 4 2 +0.409057462385994 2 4 2 +0.584778932939187 1 4 2 +0.232717575480756 2 4 2 +0.815862413646243 2 4 2 +0.384196628908383 1 4 2 +0.578948587380573 1 4 2 +1.09711900247466 1 4 2 +0.848385257280831 2 4 2 +0.569159709314235 2 4 2 +0.549417965516691 1 4 2 +0.732526266656702 2 4 2 +0.308790829645616 2 4 2 +0.99654060065045 1 4 2 +0.589889727186112 1 4 2 +0.5484884279984 2 4 2 +0.479934907640303 2 4 2 +0.270818907737512 1 4 2 +0.21367589349819 2 4 2 +0.400772720552252 1 4 2 +1.19500378366517 2 4 2 +0.247627583441975 2 4 2 +0.344233591133932 2 4 2 +0.226694192794682 2 4 2 +0.838668645891454 1 4 2 +0.706466288045509 1 4 2 +0.294054475401509 2 4 2 +0.686122317625166 1 4 2 +0.403497095541725 2 4 2 +0.243178882660292 2 4 2 +0.699616329368069 1 4 2 +0.307343994601662 1 4 2 +0.42437788244234 1 4 2 +0.502486986226067 2 4 2 +1.28866364433504 2 4 2 +0.305567946577639 2 4 2 +0.378703212426002 2 4 2 +0.792580021830969 1 4 2 +0.304089937760933 1 4 2 +0.311408526252319 1 4 2 +0.430396597223223 2 4 2 +0.297243961067233 2 4 2 +0.44197992729068 2 4 2 +0.316197941983761 2 4 2 +0.352238116060612 1 4 2 +0.537215363451521 1 4 2 +0.313054348616914 2 4 2 +2.41486023609302 2 4 2 +0.278189933299107 1 4 2 +0.608375967955867 2 4 2 +0.322433656896367 2 4 2 +0.510101175645582 2 4 2 +0.536008515884717 2 4 2 +0.411222727405007 1 4 2 +0.322077650775546 1 4 2 +0.376593462100595 1 4 2 +0.373346452642805 1 4 2 +0.204108290848997 1 4 2 +1.13673544543737 1 4 2 +0.608043497918468 2 4 2 +0.492504866468463 1 4 2 +2.64459046440055 2 4 2 +0.451189826082847 2 4 2 +0.576354637504905 2 4 2 +1.03341312876181 2 4 2 +1.06203790709498 2 4 2 +0.467283293752499 1 4 2 +0.59305758857692 1 4 2 +1.02820034315313 2 4 2 +0.243096585776871 2 4 2 +0.241979269170888 1 4 2 +2.12555571038889 2 4 2 +0.251788236043855 1 4 2 +0.723821381450342 1 4 2 +0.433526132962029 2 4 2 +0.21544327239061 1 4 2 +0.393688512078822 1 4 2 +0.284221169948672 2 4 2 +0.576093893566291 2 4 2 +1.15305709835803 1 4 2 +0.577913219145157 1 4 2 +0.715910910811142 1 4 2 +0.75590490398109 1 4 2 +0.360319468198415 2 4 2 +0.632185262786175 1 4 2 +0.18238157652647 1 4 2 +0.562586077551566 2 4 2 +0.215854179362236 1 4 2 +0.312933166228936 2 4 2 +0.555832005681486 2 4 2 +0.318457487234359 1 4 2 +0.582899849996915 2 4 2 +0.245962703292602 1 4 2 +0.465283341657699 2 4 2 +0.414444633194467 2 4 2 +1.24551335125207 2 4 2 +0.545757922901803 1 4 2 +0.231000979626702 1 4 2 +0.99685036055635 2 4 2 +0.384519234744453 2 4 2 +0.313068825633183 2 4 2 +0.272366676123266 2 4 2 +0.369113998245052 1 4 2 +0.852502642553233 2 4 2 +0.500222326108646 2 4 2 +1.33492159400307 2 4 2 +0.334531945802007 2 4 2 +0.345532493468953 2 4 2 +0.411524900150396 1 4 2 +0.412179135130737 1 4 2 +0.181615360189367 1 4 2 +0.362837785162399 2 4 2 +0.357363228746261 1 4 2 +0.341279195040987 1 4 2 +0.509319762686041 2 4 2 +0.832335744829212 1 4 2 +0.626841628293068 2 4 2 +0.410567315100069 2 4 2 +0.323450520248485 1 4 2 +0.460402879304217 2 4 2 +0.978638366687773 2 4 2 +0.391658527387104 2 4 2 +0.451809465395574 2 4 2 +0.593742477301499 2 4 2 +0.662146826253225 2 4 2 +0.437989819033819 1 4 2 +0.269628201689827 2 4 2 +0.435351210611835 1 4 2 +0.873512993112175 2 4 2 +0.303862086205385 2 4 2 +0.273370196428826 2 4 2 +0.346348296895532 1 4 2 +0.998766995801073 1 4 2 +0.500807556509458 2 4 2 +0.377700114272957 1 4 2 +0.557960766756848 2 4 2 +0.445694136085316 1 4 2 +0.45906855277066 2 4 2 +0.754461289543913 1 4 2 +0.400053087442967 2 4 2 +0.551849440102895 2 4 2 +0.442947303118676 2 4 2 +0.333326868270498 2 4 2 +0.299790315442093 2 4 2 +0.553945654472965 2 4 2 +0.451050492701825 2 4 2 +0.324222556361926 1 4 2 +0.535724245715277 2 4 2 +0.238621696921591 2 4 2 +1.05972477089422 2 4 2 +0.381000045748109 2 4 2 +0.622451138992948 2 4 2 +0.373517923040452 1 4 2 +0.379879533262278 2 4 2 +0.61017258538097 1 4 2 +0.373931156084275 2 4 2 +0.271265445543895 2 4 2 +1.13340860093329 2 4 2 +0.48332741629014 2 4 2 +0.278034332012135 2 4 2 +0.617732524410426 2 4 2 +0.295315882068786 1 4 2 +0.399809513238085 2 4 2 +0.593796551838838 1 4 2 +0.448034890388041 1 4 2 +0.507568706345751 1 4 2 +0.361775441496837 2 4 2 +0.680971086222875 2 4 2 +0.760052527880231 1 4 2 +0.523382086841163 2 4 2 +0.782073278018563 2 4 2 +0.366504296431615 1 4 2 +0.546586584253047 1 4 2 +0.214235924471223 2 4 2 +0.366212595435754 1 4 2 +0.291167146710121 1 4 2 +0.409610194588736 2 4 2 +0.356114057741968 2 4 2 +0.219739077268034 1 4 2 +0.672870970697509 2 4 2 +0.459606203597495 1 4 2 +0.526098601526635 2 4 2 +0.372672585132755 2 4 2 +0.597289086386129 2 4 2 +0.626632354698616 1 4 2 +0.332982149438951 1 4 2 +0.278836235260797 1 4 2 +0.322540461337514 2 4 2 +0.78120963284111 1 4 2 +0.23866016678153 1 4 2 +0.50573581272823 1 4 2 +0.307268642300638 1 4 2 +0.780733176074318 2 4 2 +0.524376179559523 2 4 2 +0.189183803390314 1 4 2 +0.845904136307582 2 4 2 +0.278587013457745 1 4 2 +0.225849178715147 1 4 2 +0.505248402948615 2 4 2 +0.366510147437898 2 4 2 +0.786656646299225 1 4 2 +0.28859561129487 2 4 2 +1.0544558000918 2 4 2 +0.393129941472734 2 4 2 +0.209426913346118 1 4 2 +0.557848900331885 1 4 2 +0.428353664374262 1 4 2 +0.530074401536137 2 4 2 +0.364796976039387 1 4 2 +0.31697830785037 2 4 2 +0.257110187742915 2 4 2 +0.860619019042301 2 4 2 +0.275753333898939 2 4 2 +0.397675327020122 2 4 2 +0.400511098922939 2 4 2 +0.948090448726289 2 4 2 +0.792802580857493 2 4 2 +0.492364374799986 2 4 2 +0.535712706435388 2 4 2 +0.303750231704068 2 4 2 +0.418198300886687 1 4 2 +0.242831317224143 1 4 2 +0.366807263977893 1 4 2 +0.950283301933651 2 4 2 +0.482878099739341 1 4 2 +0.97631347487202 1 4 2 +0.244665797059032 1 4 2 +0.383566210558107 2 4 2 +0.301865356343443 1 4 2 +0.649745063591824 2 4 2 +0.697050710541562 2 4 2 +0.809378147682444 2 4 2 +0.462123476270074 2 4 2 +0.245248883261683 1 4 2 +0.243212808416138 2 4 2 +0.314440748173076 2 4 2 +1.04698406308231 2 4 2 +0.544087182534972 2 4 2 +0.406124565426359 2 4 2 +0.718327290138134 2 4 2 +0.595497257669703 1 4 2 +0.666308342077789 1 4 2 +0.22438634675569 1 4 2 +0.365080530436751 1 4 2 +0.461875979293363 1 4 2 +0.815985780732118 2 4 2 +0.332674339664941 2 4 2 +0.567202795788581 2 4 2 +0.673655794950335 2 4 2 +0.477370669971454 2 4 2 +0.479764717684493 1 4 2 +0.68004172954729 2 4 2 +0.257963031440288 1 4 2 +0.321677163018359 2 4 2 +0.31627004676119 1 4 2 +0.529702779903144 2 4 2 +0.248585059238814 1 4 2 +0.282689321559121 1 4 2 +0.470660250683795 1 4 2 +0.250018593647379 1 4 2 +0.374469379392143 2 4 2 +0.275948837992336 1 4 2 +0.404124761645375 2 4 2 +0.461681809050967 2 4 2 +0.564107772448883 2 4 2 +0.384697573024493 1 4 2 +0.554591581848138 2 4 2 +0.584464813875503 2 4 2 +0.342621685779372 1 4 2 +1.16358906465507 1 4 2 +0.988193013514123 2 4 2 +0.332789439955037 2 4 2 +1.25271325447985 2 4 2 +0.177397789305329 1 4 2 +0.362176598671433 1 4 2 +0.419965871836781 1 4 2 +0.60515910819425 2 4 2 +0.31624308445277 2 4 2 +0.844077751163308 1 4 2 +0.3735652345134 1 4 2 +0.291452020039806 2 4 2 +0.232044027590617 2 4 2 +0.401670392843103 1 4 2 +0.453346479512868 1 4 2 +0.538601982527949 2 4 2 +0.830402429105011 2 4 2 +0.335367042770124 2 4 2 +1.69818743759647 2 4 2 +0.685148294374147 2 4 2 +0.563455535517706 1 4 2 +0.346418976965531 2 4 2 +0.662726370377329 2 4 2 +0.325948295612478 1 4 2 +0.790574826320759 2 4 2 +0.350496877596168 2 5 1 +0.874105967045303 2 5 1 +0.618625438899854 2 5 1 +0.418505998819245 2 5 1 +0.314600840328866 2 5 1 +0.381663329894006 2 5 1 +0.45975664723401 2 5 1 +0.629213989765841 2 5 1 +0.54170350431826 2 5 1 +0.765292807564366 2 5 1 +0.385154972025414 2 5 1 +1.0123064604624 1 5 1 +0.594468240472198 1 5 1 +0.432884690638713 1 5 1 +0.489852620287608 1 5 1 +0.439171145526351 2 5 1 +1.22541928040664 2 5 1 +0.489630344610841 1 5 1 +0.274646407464894 2 5 1 +0.321919494972732 2 5 1 +0.77321886884371 2 5 1 +0.618168337784024 1 5 1 +0.534313291237878 1 5 1 +0.248497494092332 2 5 1 +0.902277840078671 2 5 1 +0.362848845387321 1 5 1 +0.51771202639715 1 5 1 +0.354988230953876 1 5 1 +0.408263930712262 1 5 1 +0.515818454942625 2 5 1 +1.13442796786063 1 5 1 +0.922450951783412 2 5 1 +0.918236744271086 1 5 1 +0.615340787166512 2 5 1 +1.33086229422069 2 5 1 +0.773498580150888 1 5 1 +0.355111810070351 2 5 1 +0.463437947168048 1 5 1 +0.448309657003003 2 5 1 +0.6988174528086 1 5 1 +0.681543871813962 2 5 1 +0.388077823480071 1 5 1 +0.769550702497453 2 5 1 +0.276308219836844 2 5 1 +0.284812859728204 2 5 1 +0.362311034619243 2 5 1 +1.17071306250983 2 5 1 +0.866609929152046 2 5 1 +0.835449276907315 1 5 1 +0.398346916321057 2 5 1 +0.358189328108751 2 5 1 +0.554585348153169 1 5 1 +0.39760626253423 1 5 1 +0.392260790271942 1 5 1 +0.374565507395057 1 5 1 +0.490572674372733 1 5 1 +0.262430183982469 2 5 1 +0.406476847741613 2 5 1 +0.734398707641991 2 5 1 +0.808457812365959 2 5 1 +0.48642275256533 2 5 1 +0.652503153805679 2 5 1 +0.549320171737622 1 5 1 +0.363404443230036 1 5 1 +0.840787255553956 2 5 1 +0.659531467134479 2 5 1 +0.52542297118843 2 5 1 +1.641319692926 1 5 1 +0.349747975299305 2 5 1 +0.696664492661082 1 5 1 +0.999158454741339 2 5 1 +0.494799923420879 1 5 1 +0.696955053780108 2 5 1 +0.320987556616071 1 5 1 +0.49483529420845 2 5 1 +0.327928353910945 2 5 1 +0.243634957449529 2 5 1 +0.382623755383196 2 5 1 +0.340391545486682 1 5 1 +0.370852704813291 1 5 1 +0.513953236176426 1 5 1 +0.98488957304638 1 5 1 +0.72920423163152 1 5 1 +0.417878765259703 2 5 1 +0.969267542459561 2 5 1 +0.588656109235448 1 5 1 +0.309310108339579 2 5 1 +0.359079679822589 2 5 1 +0.432293522236456 2 5 1 +0.365699146412261 2 5 1 +0.280030960748012 1 5 1 +0.267439129890066 1 5 1 +1.64146572476405 1 5 1 +0.383985418553408 2 5 1 +0.820780704522388 2 5 1 +0.287252707592816 2 5 1 +0.471656559488085 1 5 1 +0.688654531662119 1 5 1 +1.51116458250868 1 5 1 +0.256595632268106 1 5 1 +1.44192189522027 1 5 1 +0.423719184953423 2 5 1 +0.474641530750688 1 5 1 +0.95167387507805 1 5 1 +0.249940083181891 2 5 1 +0.948956034762691 1 5 1 +0.497334710175687 2 5 1 +0.401258517347727 1 5 1 +0.593937951684305 1 5 1 +0.399453837192165 1 5 1 +0.339706036542822 2 5 1 +0.358808063230196 2 5 1 +0.399870518236517 2 5 1 +0.402281253533687 1 5 1 +1.21105144216928 2 5 1 +0.276484649301743 2 5 1 +0.614037537089416 1 5 1 +0.468119101565478 2 5 1 +0.363072435808267 2 5 1 +0.424349274964132 2 5 1 +0.817780271882891 2 5 1 +0.348979602184705 2 5 1 +0.428954419119663 1 5 1 +0.377745649623147 2 5 1 +1.52858746330548 1 5 1 +0.38829844572601 2 5 1 +0.272639358091468 1 5 1 +0.410898025055377 2 5 1 +0.345789355606247 1 5 1 +0.29532986306681 1 5 1 +1.41353962836894 1 5 1 +0.405012581079275 2 5 1 +0.308523509241669 1 5 1 +0.277244934012402 1 5 1 +0.317403964863673 2 5 1 +0.590597620939944 2 5 1 +2.60905439597824 2 5 1 +0.312334336841847 1 5 1 +0.437294940463007 2 5 1 +0.749488878001604 1 5 1 +1.55915796602619 2 5 1 +0.672396814187568 1 5 1 +0.32268652713371 1 5 1 +2.33549213748734 2 5 1 +0.533821174405078 1 5 1 +0.29694295172823 2 5 1 +0.428694990290054 1 5 1 +0.744566758964104 2 5 1 +0.902120586395754 2 5 1 +0.373865886414247 2 5 1 +0.34709384467313 2 5 1 +0.579724512470357 2 5 1 +0.480526240272104 2 5 1 +0.280388545170291 2 5 1 +0.327331492746605 2 5 1 +0.307049013223802 1 5 1 +0.67645551514592 2 5 1 +1.45539495712329 1 5 1 +0.414781606222836 2 5 1 +0.464869946932275 2 5 1 +0.502525874440138 1 5 1 +0.395815499456874 2 5 1 +0.354740455281115 2 5 1 +0.387644457183764 1 5 1 +0.441119885285695 2 5 1 +0.541299937122488 1 5 1 +0.462769272386231 2 5 1 +0.568510300909836 1 5 1 +0.739186686655553 2 5 1 +0.590037876611731 1 5 1 +0.876929440339061 2 5 1 +0.38745072608018 1 5 1 +0.874268369237665 1 5 1 +0.595292592424216 1 5 1 +0.704226262163879 2 5 1 +0.297080465628835 1 5 1 +0.873924224577836 1 5 1 +0.466850429992947 2 5 1 +2.19102033049238 2 5 1 +0.707932653803704 2 5 1 +0.52289483412265 1 5 1 +0.291400893995193 2 5 1 +0.334262174169645 1 5 1 +0.413969229169715 2 5 1 +0.368758425061548 2 5 1 +0.480077442145665 1 5 1 +1.00325797217147 1 5 1 +0.34303135632964 1 5 1 +0.449371536501468 2 5 1 +0.626712575183433 2 5 1 +0.300330886841042 1 5 1 +0.601032456393769 1 5 1 +0.398517086169262 2 5 1 +0.492233676994753 1 5 1 +0.559665551547645 2 5 1 +0.322079290494793 1 5 1 +1.00119635579758 2 5 1 +0.273572712925721 2 5 1 +0.997674531871953 2 5 1 +0.660614796170524 2 5 1 +0.682509343887865 2 5 1 +0.709029245629886 1 5 1 +0.450254655958821 2 5 1 +0.489362486290041 2 5 1 +0.256831501902699 1 5 1 +0.277749492989249 2 5 1 +0.570615557380466 1 5 1 +0.660249945470972 1 5 1 +0.913042539886274 2 5 1 +0.276157449004863 2 5 1 +0.42554491044995 1 5 1 +0.504500759944897 1 5 1 +0.464430914488706 2 5 1 +0.619432554673588 2 5 1 +0.344307477421206 2 5 1 +0.353936127201951 2 5 1 +0.542570048569743 1 5 1 +0.670056529092721 2 5 1 +1.69346839376907 1 5 1 +0.329122766203157 2 5 1 +0.580777607436693 2 5 1 +0.425192302825809 2 5 1 +1.01918716149399 1 5 1 +0.451237334938158 2 5 1 +0.365780961202646 1 5 1 +0.435969164806333 2 5 1 +0.446219693449255 2 5 1 +0.86148405175356 1 5 1 +0.378754506829674 2 5 1 +0.636129328599341 2 5 1 +0.484019299704474 1 5 1 +0.35361046366484 2 5 1 +0.778577922802192 2 5 1 +0.553712077213866 2 5 1 +0.576470926490595 1 5 1 +0.956990527979208 2 5 1 +0.676276969153139 2 5 1 +0.600336478541173 2 5 1 +1.1497148815834 2 5 1 +0.311559946333853 1 5 1 +0.32583211853549 2 5 1 +0.5129988563233 2 5 1 +0.706544486405484 2 5 1 +0.437094165223538 1 5 1 +0.824751569569288 1 5 1 +0.809546777157912 2 5 1 +0.537141407451724 2 5 1 +0.662502129558617 1 5 1 +0.501691224984125 2 5 1 +0.30901205010725 2 5 1 +0.47867098754842 1 5 1 +0.456986416516351 2 5 1 +0.275926112108479 1 5 1 +0.410807847551661 2 5 1 +0.828356765357645 1 5 1 +0.40635702823069 2 5 1 +0.986564014728032 2 5 1 +0.567057974036205 2 5 1 +0.322394251750056 2 5 1 +0.722132927785589 2 5 1 +0.409410905169481 2 5 1 +0.709898951889328 1 5 1 +0.541547402623561 1 5 1 +0.256670165225662 2 5 1 +0.49411541489472 1 5 1 +0.919104100863578 1 5 1 +0.549914334516905 2 5 1 +0.505235686112949 2 5 1 +0.282338529860808 2 5 1 +0.383976247257099 2 5 1 +0.680828005837556 1 5 1 +0.540327329724261 1 5 1 +1.05569191297992 2 5 1 +0.349514052734471 1 5 1 +0.496154085894348 2 5 1 +0.456989724057387 2 5 1 +0.730859840316074 1 5 1 +1.05327361608711 2 5 1 +0.614480004129318 2 5 1 +1.09479710995942 2 5 1 +0.456403655924994 2 5 1 +0.248929266015883 2 5 1 +0.345437497681469 2 5 1 +0.390161598039643 2 5 1 +1.59636606625796 1 5 1 +0.297653976651779 1 5 1 +0.53027456008198 2 5 1 +0.325786425396216 1 5 1 +0.412885828047688 1 5 1 +0.430942952085056 2 5 1 +0.628988308893905 2 5 1 +0.303432064418254 2 5 1 +0.502479650486677 2 5 1 +0.374956563195693 2 5 1 +0.317036830382213 1 5 1 +0.360257190747668 2 5 1 +0.392605112045462 2 5 1 +0.263946843562968 2 5 1 +0.543352649787032 2 5 1 +0.451523452374327 2 5 1 +0.588811763316336 2 5 1 +0.496429527557548 1 5 1 +0.363606147439157 1 5 1 +0.590754640562368 2 5 1 +0.399066967540575 2 5 1 +0.481461654111483 1 5 1 +0.44511198129432 2 5 1 +0.733957111974783 2 5 1 +0.233363499390124 2 5 1 +0.290775652443224 2 5 1 +1.0231168093639 2 5 1 +0.531611729352149 1 5 1 +0.825587045098161 2 5 1 +0.902074251648083 2 5 1 +0.323175839216028 2 5 1 +0.505991444493804 2 5 1 +0.261543288336422 1 5 1 +0.554486494360695 1 5 1 +0.351246068594391 2 5 1 +0.515328779203654 2 5 1 +0.639677471438791 2 5 1 +0.480386256226869 2 5 1 +0.712967189057983 2 5 1 +1.34226678264378 2 5 1 +0.522093483130097 2 5 1 +0.335177947867927 1 5 1 +0.427739523462974 2 5 1 +0.724445283326656 2 5 1 +0.520065928077709 2 5 1 +0.690085575348652 2 5 1 +0.395666881799587 2 5 1 +0.253416617473968 2 5 1 +0.292975240297012 2 5 1 +0.880042969703422 2 5 1 +1.18765090929155 2 5 1 +0.471824574051573 1 5 1 +0.3819371403618 1 5 1 +0.489731712873816 2 5 1 +0.517213617852645 2 5 1 +1.02045998930259 1 5 1 +0.353507003760541 2 5 1 +0.28738904500089 2 5 1 +0.761600567581336 1 5 1 +0.744138933405505 1 5 1 +0.628607646940531 1 5 1 +0.892684033273368 2 5 1 +0.415671512759732 2 5 1 +0.862657966498822 2 5 1 +0.44210809069166 2 5 1 +0.524200017230228 1 5 1 +0.324994378309518 2 5 1 +0.426992098360617 2 5 1 +0.421266101618318 1 5 1 +0.592739096667285 2 5 1 +0.555131716241223 2 5 1 +0.881544992259764 2 5 1 +0.712182588708822 2 5 1 +0.744752154767 2 5 1 +0.606487498310839 2 5 1 +0.461699520660315 2 5 1 +0.745717699517249 1 5 1 +1.24267939276122 2 5 1 +1.06725166167186 2 5 1 +0.688125268149 2 5 1 +0.322706592623082 2 5 1 +0.625340088917108 1 5 1 +0.481462311469004 2 5 1 +1.11856169670045 2 5 1 +0.496653373252084 2 5 1 +0.306702987850203 1 5 1 +0.299778976884002 2 5 1 +0.255084547535916 2 5 1 +0.324409887328244 2 5 1 +0.28656822676276 2 5 1 +0.685862574486554 1 5 1 +0.25080565529039 2 5 1 +0.423005534054917 2 5 1 +0.567258936488528 2 5 1 +0.453882515712717 2 5 1 +0.889040190992262 1 5 1 +0.527722880894638 1 5 1 +0.25678851823785 1 5 1 +0.569171422366059 1 5 1 +0.438002657123947 1 5 1 +1.07105043333329 2 5 1 +0.371311478728212 2 5 1 +0.427478236470728 2 5 1 +0.690413385769963 2 5 1 +1.23990009741994 1 5 1 +0.35055316921775 2 5 1 +0.320095074553569 2 5 1 +0.360451278466255 2 5 1 +1.3783410124737 1 5 1 +0.342094531805991 2 5 1 +0.939591296738548 1 5 1 +0.377754647410711 2 5 1 +0.397257993518603 2 5 1 +0.556583275671363 2 5 1 +0.814148052150712 1 5 1 +0.445052953408459 2 5 1 +0.635785394694251 1 5 1 +1.45734899209363 2 5 1 +0.553038339349569 2 5 1 +0.487114354718737 2 5 1 +0.326029703773354 2 5 1 +0.378499631880882 1 5 1 +0.249998652473012 2 5 1 +0.777922576637263 1 5 1 +0.344758853651022 2 5 1 +0.469680901216615 1 5 1 +0.572885048100009 2 5 1 +0.813047280725817 2 5 1 +0.31824550606856 1 5 1 +1.52076632666272 2 5 1 +0.525665900915789 2 5 1 +0.835443800969408 2 5 1 +0.411960036228098 2 5 1 +0.366444785673607 2 5 1 +0.583008070195596 2 5 1 +0.306789626562622 2 5 1 +0.32265104149373 2 5 1 +0.60706446511574 1 5 1 +0.660415820740065 1 5 1 +0.41961055869239 2 5 1 +1.18355949324619 2 5 1 +1.7831326042876 1 5 1 +1.32079987606431 1 5 1 +0.31469679737234 2 5 1 +0.742130214925183 2 5 1 +0.278159578075745 2 5 1 +0.944262180658039 1 5 1 +1.00185185390192 2 5 1 +0.723912539579018 2 5 1 +0.730046670620961 2 5 1 +0.594612038608092 2 5 1 +0.609003026408566 1 5 1 +0.703005542893214 2 5 1 +0.371613566829059 2 5 1 +0.839961901535526 2 5 1 +0.444097755957375 2 5 1 +0.358077624513172 2 5 1 +0.284271913540958 2 5 1 +0.479974130859609 2 5 1 +0.545791838695864 1 5 1 +0.332965450514954 2 5 1 +0.591764667761429 2 5 1 +0.298607348317416 2 5 1 +0.247276738340079 2 5 1 +0.798461231259409 2 5 1 +0.441564925769082 2 5 1 +1.48128308355737 2 5 1 +0.31434796484399 1 5 1 +1.20347173161341 2 5 1 +0.739554951126451 1 5 1 +0.497097781531935 2 5 1 +0.484159304177581 1 5 1 +0.244849956861225 1 5 1 +0.504629073199244 1 5 1 +0.569133223143607 2 5 1 +1.12622703221084 2 5 1 +0.764250286320223 2 5 1 +0.484641004950062 2 5 1 +0.514942303502618 2 5 1 +0.605883128391681 2 5 1 +0.49588937623075 1 5 1 +0.396953900942646 2 5 1 +0.520906400655699 1 5 1 +0.745428404112575 2 5 1 +0.351085063768204 1 5 1 +0.996492367823443 2 5 1 +0.831728540518663 2 5 1 +0.277512426545143 2 5 1 +0.25988569649848 2 5 1 +0.309494901893108 2 5 1 +0.506418492133261 1 5 1 +1.0882032936743 2 5 1 +0.561957049307115 1 5 1 +0.818825222243945 2 5 1 +0.53741814818001 2 5 1 +0.398894432067162 1 5 1 +0.370547579851629 2 5 1 +0.441101002850091 1 5 1 +0.335051494643735 1 5 1 +0.359857472939855 1 5 1 +1.23898294637209 2 5 1 +0.303629801372895 2 5 1 +1.27191285267288 2 5 1 +0.255294810871718 2 5 1 +0.387744030748659 2 5 1 +0.950171765183971 2 5 1 +0.338933464143833 2 5 1 +0.374472149784474 1 5 1 +0.850121988549967 1 5 1 +0.368357738596241 2 5 1 +0.422292677162516 1 5 1 +0.670555601613663 2 5 1 +0.443253833059252 2 5 1 +0.445156135335308 1 5 1 +0.570124671736916 1 5 1 +0.715586041727328 1 5 1 +0.59944925951305 1 5 2 +0.571270118524135 1 5 2 +0.478721803809417 2 5 2 +0.677464125838552 2 5 2 +0.663628439043173 2 5 2 +0.269338514686921 1 5 2 +0.602552243929772 1 5 2 +0.291299463050662 1 5 2 +0.44101267885359 1 5 2 +0.311931727273563 2 5 2 +0.352091009578926 1 5 2 +0.313916230690944 1 5 2 +0.374014526157238 1 5 2 +0.300236548555279 1 5 2 +0.361852622471219 1 5 2 +0.290120165572002 1 5 2 +1.3190023778617 1 5 2 +0.813585821304588 2 5 2 +0.563167577163749 2 5 2 +0.424847079638711 1 5 2 +0.431682396745519 1 5 2 +0.742666504831229 2 5 2 +1.3016483866513 1 5 2 +0.355061924288677 1 5 2 +0.645449822853174 2 5 2 +0.288378961868379 1 5 2 +0.374463137914422 2 5 2 +0.405984822829934 2 5 2 +0.460634380609883 2 5 2 +0.750352906162385 1 5 2 +0.644664165310704 1 5 2 +0.300369902496596 1 5 2 +0.443395281476769 1 5 2 +0.242967135467797 2 5 2 +0.286922136386556 2 5 2 +0.287290418411787 2 5 2 +0.521549068525531 2 5 2 +0.292427955172831 1 5 2 +0.689267068868006 2 5 2 +0.518413816193551 2 5 2 +0.318270239089209 1 5 2 +0.382457012636577 2 5 2 +1.01440803729853 1 5 2 +1.00158066089162 1 5 2 +0.568667032400329 2 5 2 +0.414610843415938 1 5 2 +0.400258366212628 2 5 2 +1.00809741464603 2 5 2 +1.27133812224764 2 5 2 +0.743864210325877 2 5 2 +0.775841632110899 2 5 2 +0.300321917136189 2 5 2 +0.3583802317039 2 5 2 +0.460038260487213 1 5 2 +0.364671001753739 2 5 2 +0.452730720520749 2 5 2 +0.650435164535667 1 5 2 +1.30257818709608 2 5 2 +0.467408775207611 2 5 2 +0.413873488779555 2 5 2 +0.362883875858316 1 5 2 +0.706627565831075 2 5 2 +0.965894478924112 1 5 2 +0.364415938903456 1 5 2 +0.302357207565824 2 5 2 +0.523675741606119 1 5 2 +0.246098564525208 1 5 2 +0.534354328367928 2 5 2 +0.305317088730255 2 5 2 +0.360877156880599 2 5 2 +0.269313918771234 1 5 2 +0.522107720783827 2 5 2 +0.798496955992481 1 5 2 +0.774270654545548 1 5 2 +0.689189662698451 2 5 2 +0.541317395948974 1 5 2 +0.758336347994286 2 5 2 +1.04260254410127 1 5 2 +0.298448066365288 2 5 2 +0.687244199835044 1 5 2 +0.321017344151699 2 5 2 +1.30053982037248 1 5 2 +0.871190362112565 2 5 2 +0.854567944471819 1 5 2 +0.406633311451941 2 5 2 +0.595529004717776 2 5 2 +0.932485076424955 2 5 2 +0.670527589251614 2 5 2 +0.349539633417549 2 5 2 +0.527533696703081 2 5 2 +0.604204077013109 1 5 2 +0.396372501238897 2 5 2 +0.54363245627552 2 5 2 +1.01374770072993 2 5 2 +0.442516401166061 1 5 2 +0.337787899261388 2 5 2 +0.611607046647056 2 5 2 +0.582231962637285 2 5 2 +0.531731222343829 2 5 2 +0.70961760628681 2 5 2 +0.983672547433239 2 5 2 +0.853752013607103 2 5 2 +0.299100847268621 2 5 2 +0.458144991493894 2 5 2 +0.646448464231831 2 5 2 +0.349113081280827 2 5 2 +0.371396695556209 2 5 2 +0.380762038115444 2 5 2 +0.626193352278793 2 5 2 +0.319957343473038 2 5 2 +0.894827474594344 1 5 2 +0.371085339061208 2 5 2 +0.274700685448708 2 5 2 +0.472512525590188 2 5 2 +0.428272329032601 1 5 2 +0.81108343332377 2 5 2 +0.29226124190246 1 5 2 +0.709302851449251 1 5 2 +0.441502263285431 2 5 2 +0.377865986040148 1 5 2 +0.299141714875424 2 5 2 +0.852037775518583 1 5 2 +0.367443619829809 2 5 2 +0.833467723342496 1 5 2 +0.518949570328891 2 5 2 +1.08071399168627 2 5 2 +1.45803253458175 1 5 2 +0.403737991042524 2 5 2 +0.430597937237068 1 5 2 +0.693327507928039 2 5 2 +0.409427255768774 2 5 2 +0.415266426974346 2 5 2 +0.399348890241505 1 5 2 +1.00807008741471 1 5 2 +0.600334249477256 2 5 2 +0.723090984967466 1 5 2 +0.30124593772841 2 5 2 +0.526226731553072 1 5 2 +0.909950421520731 2 5 2 +0.327267873435672 1 5 2 +1.07209919243311 2 5 2 +0.29726599356851 1 5 2 +0.552066919100173 2 5 2 +0.494894143356916 2 5 2 +0.32978714763287 2 5 2 +0.590111357521564 2 5 2 +0.453342223889263 2 5 2 +0.402928162250864 2 5 2 +1.28649448740156 2 5 2 +0.307709380863996 1 5 2 +0.28828133716514 2 5 2 +0.293039690509469 2 5 2 +0.416060987991466 2 5 2 +0.475558200663385 2 5 2 +1.05487716699584 2 5 2 +0.338172024306927 1 5 2 +0.595539455240045 1 5 2 +0.579146103187199 2 5 2 +0.786799876060352 1 5 2 +0.241065859944711 2 5 2 +0.582321691985273 2 5 2 +0.460354760620268 1 5 2 +0.841722330305237 1 5 2 +1.01167714961156 2 5 2 +0.424713060969898 1 5 2 +0.394167127869212 2 5 2 +1.03416630772583 1 5 2 +1.32959777810628 1 5 2 +1.75972860107492 1 5 2 +0.615179768965791 2 5 2 +0.319854717980425 2 5 2 +0.489657096763377 1 5 2 +0.670661576197628 2 5 2 +0.472203770212263 2 5 2 +0.765998706988045 1 5 2 +0.333182293252276 1 5 2 +0.570043234180048 1 5 2 +0.30363661516842 1 5 2 +0.332466771451815 2 5 2 +0.997841282553196 2 5 2 +0.45787243741055 1 5 2 +0.334190036053108 2 5 2 +0.419647452096217 2 5 2 +0.477289639435407 2 5 2 +0.319493821935502 2 5 2 +0.602105491492108 2 5 2 +0.901453621588105 2 5 2 +0.302006597660369 2 5 2 +0.370541701419962 1 5 2 +0.632484359068564 2 5 2 +0.675493181471213 1 5 2 +0.702604527810739 2 5 2 +0.457494041206726 1 5 2 +1.02133348990189 1 5 2 +1.02366570258029 2 5 2 +0.6241274220945 2 5 2 +1.13319965338725 1 5 2 +0.608397741451449 2 5 2 +0.275232263227218 2 5 2 +0.646078828525116 1 5 2 +0.392384020972726 2 5 2 +0.611608179642105 1 5 2 +0.403732771272542 2 5 2 +0.475709943826484 1 5 2 +0.644107463291593 1 5 2 +0.529061673835631 2 5 2 +0.76205402727557 1 5 2 +0.478372023349786 1 5 2 +0.721916054782157 2 5 2 +0.298281970063148 2 5 2 +0.308419274846538 2 5 2 +1.13676249928442 2 5 2 +0.489633518133111 1 5 2 +0.81718265267318 2 5 2 +0.296510774521216 1 5 2 +0.41545087025183 2 5 2 +0.858883400769863 1 5 2 +0.588983529667847 2 5 2 +1.05271712018426 2 5 2 +0.305721646872819 1 5 2 +0.639265026387045 2 5 2 +0.640474642443045 1 5 2 +0.896096517036621 2 5 2 +0.42307255863364 2 5 2 +0.415674104307401 1 5 2 +0.27547910375578 1 5 2 +0.348853636625538 2 5 2 +0.283695041149401 2 5 2 +0.305964485294963 2 5 2 +0.993384639595699 2 5 2 +0.747661562638797 1 5 2 +0.374983766761583 2 5 2 +0.709902782274922 1 5 2 +0.39029996774982 2 5 2 +0.443342047659481 1 5 2 +0.25232121385486 2 5 2 +0.606258771370627 2 5 2 +0.56630014842487 2 5 2 +0.407573540497359 2 5 2 +0.488426329742728 2 5 2 +0.530548943298116 1 5 2 +0.836491434275815 2 5 2 +0.368718555457284 2 5 2 +0.627508832177755 1 5 2 +0.658507784089307 2 5 2 +0.494633132243137 2 5 2 +0.767210656356636 1 5 2 +0.279455460728518 2 5 2 +0.680268781163734 1 5 2 +0.434657646694503 1 5 2 +0.975121716315162 2 5 2 +0.454458052785016 2 5 2 +0.870719661036972 2 5 2 +0.601447265395704 2 5 2 +0.883151345159082 2 5 2 +0.79883814953365 2 5 2 +0.520533085849907 2 5 2 +0.740289828056042 1 5 2 +0.808434675051423 2 5 2 +0.323990809323793 1 5 2 +0.402569321518717 1 5 2 +0.520691303484606 1 5 2 +0.401064022355165 2 5 2 +0.402344022560083 1 5 2 +0.532289549565749 2 5 2 +0.706963165521545 2 5 2 +0.784955813139153 2 5 2 +0.360194468075243 2 5 2 +0.409815687475514 1 5 2 +0.82043050263301 2 5 2 +0.460274040204098 2 5 2 +0.419051670972866 2 5 2 +0.599443515950589 2 5 2 +0.966096764539077 2 5 2 +0.366186511338898 1 5 2 +1.4222044721659 2 5 2 +0.777184212128937 2 5 2 +0.591852836588032 2 5 2 +0.770749892926039 2 5 2 +0.70434735829414 1 5 2 +0.765666276417329 2 5 2 +0.40346241426283 1 5 2 +0.391165632121021 2 5 2 +0.370020173988749 2 5 2 +1.58683703850196 1 5 2 +1.57793133770567 2 5 2 +0.80075289464325 2 5 2 +0.256904564618549 1 5 2 +0.53622262912349 1 5 2 +0.453580971173257 2 5 2 +0.410069535718748 1 5 2 +0.515797332567113 2 5 2 +0.952842898198181 1 5 2 +0.363748661621775 2 5 2 +0.428437274072119 2 5 2 +0.370569493908707 1 5 2 +1.55504308977282 1 5 2 +0.2910968027665 2 5 2 +0.889633822477091 2 5 2 +1.06768254922828 1 5 2 +0.737740843880572 2 5 2 +0.999020093280879 2 5 2 +0.332193354553405 2 5 2 +0.489468556048885 1 5 2 +0.276614446525954 2 5 2 +0.411812431184961 1 5 2 +0.645266731187802 2 5 2 +0.355603707761157 2 5 2 +0.390302407266954 1 5 2 +0.77292099479717 2 5 2 +0.252389739406451 2 5 2 +0.876313308318194 2 5 2 +0.58287646271364 1 5 2 +0.288751011208501 1 5 2 +0.431566553814174 2 5 2 +0.589008102697159 2 5 2 +0.829161903382978 2 5 2 +0.628538983915815 2 5 2 +0.621394296269468 2 5 2 +0.579181298723461 1 5 2 +0.727901955182036 2 5 2 +0.631355875603683 2 5 2 +0.860874327171326 2 5 2 +0.343433754720578 2 5 2 +0.5773516679935 2 5 2 +0.279980088015754 1 5 2 +0.297786857526651 2 5 2 +0.352028385521676 2 5 2 +0.718284977347952 1 5 2 +0.386491843574301 1 5 2 +0.733817922945248 2 5 2 +0.330039961374457 1 5 2 +0.562137897292054 2 5 2 +0.493718153186244 2 5 2 +0.384521948665274 2 5 2 +0.541882345607494 2 5 2 +0.899433484810609 1 5 2 +0.239671549366562 1 5 2 +0.903508962409293 2 5 2 +0.437062486670204 1 5 2 +0.47204968825503 2 5 2 +0.463054778904269 1 5 2 +0.337988558333662 1 5 2 +0.945352936382255 2 5 2 +0.319331081252348 1 5 2 +1.10841845342301 1 5 2 +0.727028251710372 1 5 2 +0.418174877683897 2 5 2 +0.83459503151359 2 5 2 +0.341783851166967 2 5 2 +0.409549042090065 2 5 2 +1.39638569008014 2 5 2 +0.282660568650718 2 5 2 +0.377102854745555 2 5 2 +0.559130208630371 1 5 2 +0.406889638299026 2 5 2 +0.874392505642916 2 5 2 +0.549370228408864 1 5 2 +0.702882081610178 1 5 2 +0.387196237366316 2 5 2 +0.254396302589893 2 5 2 +0.661141881945967 2 5 2 +0.353850139949898 2 5 2 +0.940367165724872 2 5 2 +0.257306998632217 2 5 2 +0.25948459184901 1 5 2 +0.282447060372156 2 5 2 +0.434471601810715 2 5 2 +0.531847599879585 2 5 2 +0.831639598690597 1 5 2 +0.476057177723281 1 5 2 +0.689960964595721 2 5 2 +0.762885904963058 2 5 2 +0.622547245815982 2 5 2 +0.467121910624321 2 5 2 +0.444998878563145 2 5 2 +0.610618477959826 2 5 2 +0.406946642496399 1 5 2 +1.0606594018364 2 5 2 +0.554673891728355 2 5 2 +0.249462989686065 2 5 2 +0.391072779178118 1 5 2 +0.502851072240924 2 5 2 +0.375337554826657 2 5 2 +0.459877784492948 1 5 2 +0.37669560180294 2 5 2 +0.35853976242436 2 5 2 +0.299654614882035 2 5 2 +0.722167944915479 2 5 2 +0.470173817048549 2 5 2 +0.434030303678653 1 5 2 +0.38807847057575 2 5 2 +0.461429537822728 1 5 2 +0.810700838760469 2 5 2 +0.249051828431355 1 5 2 +0.317554462771952 2 5 2 +0.262256455504448 1 5 2 +0.2953873757043 2 5 2 +0.696729236528574 2 5 2 +0.528266303919385 2 5 2 +0.55142571005823 2 5 2 +0.586822599864067 2 5 2 +0.418740386790856 2 5 2 +0.335528681201811 1 5 2 +0.842500045429954 2 5 2 +0.285946789650486 2 5 2 +0.408435800240321 2 5 2 +0.413125087979462 2 5 2 +0.393886958711384 1 5 2 +0.253356738206904 2 5 2 +0.312045370960966 1 5 2 +0.798452098494563 2 5 2 +0.492707665345048 2 5 2 +0.716083098282908 2 5 2 +0.234541570552336 2 5 2 +1.35239146034105 2 5 2 +0.718129009054262 1 5 2 +0.718707390761021 2 5 2 +0.491926442341928 2 5 2 +0.654723295742436 2 5 2 +2.2535330863484 2 5 2 +0.427000474398908 2 5 2 +0.709004302987488 1 5 2 +0.516478985375353 2 5 2 +0.399159476675353 2 5 2 +0.442334074498277 2 5 2 +0.305764408172937 1 5 2 +0.374730267131031 2 5 2 +0.258402933869162 2 5 2 +0.356729146842492 2 5 2 +0.249659208975827 2 5 2 +0.513939361328391 2 5 2 +0.273198932158475 2 5 2 +0.299847483659362 1 5 2 +0.541237531522651 2 5 2 +0.392932766582102 1 5 2 +0.525638020825498 2 5 2 +0.333414128837149 1 5 2 +0.576494101054249 2 5 2 +0.511222521868291 1 5 2 +0.412380296323655 2 5 2 +0.399606860754613 2 5 2 +0.997015772263903 2 5 2 +0.443550015156711 2 5 2 +0.837716892291427 2 5 2 +0.390478993062678 1 5 2 +0.636766756207244 2 5 2 +0.737585807116948 2 5 2 +0.295250014308323 2 5 2 +0.608066528187843 2 5 2 +1.24729266077018 2 5 2 +0.41288102715652 1 5 2 +1.24953422410599 1 5 2 +0.351993007234192 2 5 2 +0.335694580769538 1 5 2 +0.423647638118759 1 5 2 +0.734734535205897 1 5 2 +0.260015569529333 1 5 2 +0.750757643884208 2 5 2 +0.595132916679284 2 5 2 +0.522805168311647 2 5 2 +0.843659849398215 2 5 2 +1.10699652185756 2 5 2 +0.391076744361603 2 5 2 +0.312026720740498 2 5 2 +0.607969730004942 2 5 2 +1.00812364162894 2 5 2 +0.43084197949303 2 5 2 +0.453046882496868 1 5 2 +0.370369606134876 1 5 2 +0.775483364298362 1 5 2 +0.396231522637068 2 5 2 +0.413528320853371 2 5 2 +1.46772948611848 2 5 2 +0.341708188807674 2 5 2 +0.419394806122751 1 5 2 +0.257895217630086 2 5 2 +0.415728977418159 2 5 2 +0.316442984226336 2 5 2 +0.968259065135459 2 5 2 +0.387870086772944 1 5 2 +0.716049265883702 2 5 2 +0.633645416807576 1 5 2 +0.953173953972706 2 5 2 +0.690728024005709 2 5 2 +1.10833203046202 2 5 2 +2.21338412745891 2 5 2 +0.324098654160468 2 5 2 +0.673932874285758 2 5 2 +0.800813516749607 2 5 2 +0.696084502169422 1 5 2 +0.355840028465312 1 5 2 +0.683207616367023 1 5 2 +0.320920277031855 2 5 2 +1.03878518793101 1 5 2 +0.609374353605396 2 5 2 +0.578037696967778 2 5 2 +0.322774773347465 2 5 2 +0.444321937393125 2 5 2 +0.706403834907649 2 5 2 diff --git a/Python/hbayesdm/common/extdata/choiceRT_single_exampleData.txt b/Python/hbayesdm/common/extdata/choiceRT_single_exampleData.txt new file mode 100644 index 00000000..c925a82a --- /dev/null +++ b/Python/hbayesdm/common/extdata/choiceRT_single_exampleData.txt @@ -0,0 +1,1001 @@ +RT choice subjID condition +0.238126253704183 1 1 1 +0.788334139249308 2 1 1 +0.524351202388138 2 1 1 +1.30852451859186 1 1 1 +0.244177006142252 1 1 1 +0.512534281943979 1 1 1 +0.570872020376975 2 1 1 +0.552056452179357 1 1 1 +0.298121361381527 2 1 1 +0.323864684737407 1 1 1 +0.542476237007045 1 1 1 +0.457829931981559 2 1 1 +0.214443816443766 1 1 1 +0.282641758197282 2 1 1 +0.577296397953241 1 1 1 +0.80363268095685 2 1 1 +0.630866151842371 2 1 1 +0.561537877283935 2 1 1 +0.447864619700588 1 1 1 +0.271079966516117 2 1 1 +0.286558308483825 2 1 1 +0.402853789793329 2 1 1 +0.261247265870358 2 1 1 +0.954323974954787 1 1 1 +0.233982750292549 2 1 1 +0.534509968347321 2 1 1 +1.38489463892966 1 1 1 +0.51382752398596 2 1 1 +0.877226598584423 2 1 1 +0.59661096895894 2 1 1 +0.653486235884601 2 1 1 +0.499754559401486 2 1 1 +0.234607668817517 2 1 1 +0.531596228343812 2 1 1 +0.517067421390557 2 1 1 +0.286714432990514 2 1 1 +0.280389415416944 1 1 1 +0.770848791728697 2 1 1 +0.242534242474749 2 1 1 +1.21402951161598 1 1 1 +0.254230773115822 1 1 1 +0.235607609409862 1 1 1 +0.893859490775577 2 1 1 +0.4248828895841 2 1 1 +0.806633683066691 1 1 1 +0.52846751057204 2 1 1 +0.283404274358359 2 1 1 +0.38261147359119 1 1 1 +0.366467333270928 2 1 1 +0.89906087165271 2 1 1 +0.473523175525898 2 1 1 +0.61052334774835 2 1 1 +0.348877038822898 2 1 1 +0.509848343105319 2 1 1 +0.714362767211544 2 1 1 +0.366653361634071 1 1 1 +0.504639516528354 2 1 1 +0.789291266027802 1 1 1 +0.220496731951155 2 1 1 +0.225368494671686 2 1 1 +0.935425512110651 1 1 1 +0.596093103065834 2 1 1 +0.751187828634478 2 1 1 +0.398369973292919 2 1 1 +0.803192132747886 1 1 1 +0.653642313281921 2 1 1 +0.759465190620081 2 1 1 +0.301158475484036 2 1 1 +0.468546635484975 2 1 1 +0.45136376067397 2 1 1 +0.225730206953994 1 1 1 +0.871541732294617 2 1 1 +1.02231746439083 2 1 1 +0.453015412970327 1 1 1 +0.198424664401742 2 1 1 +0.300531454438104 2 1 1 +0.473723469079576 1 1 1 +0.397993417619097 2 1 1 +0.990744721453659 2 1 1 +0.576175729949669 2 1 1 +0.200104343196362 2 1 1 +0.397950225292451 2 1 1 +0.595871677587168 1 1 1 +0.631283245367399 2 1 1 +0.225640535433198 2 1 1 +0.671278939344137 2 1 1 +0.562888330598081 1 1 1 +0.713201556333214 1 1 1 +0.429372024083033 2 1 1 +0.559437949496943 1 1 1 +0.747758954300599 2 1 1 +0.668556572370471 2 1 1 +0.179933868089705 1 1 1 +0.557946405103375 2 1 1 +0.781925159045207 2 1 1 +0.629998909619026 2 1 1 +0.73419031432803 1 1 1 +0.917048954570217 2 1 1 +1.27326330493077 1 1 1 +0.715099066135782 1 1 1 +0.561629162179203 2 1 1 +0.439342876745989 1 1 1 +0.212480989248291 2 1 1 +0.431997523692581 2 1 1 +0.504823085985375 1 1 1 +0.209443682735351 2 1 1 +0.535478168252645 1 1 1 +0.523309589143815 2 1 1 +0.292526841667345 2 1 1 +0.292598915819633 1 1 1 +0.383304045988112 2 1 1 +0.220801631101784 2 1 1 +0.240025256471961 2 1 1 +0.374617088048471 2 1 1 +0.225139772246513 2 1 1 +0.504765209525881 1 1 1 +0.536719069622199 1 1 1 +0.265730079523484 2 1 1 +0.788176797412021 2 1 1 +0.317054055572024 1 1 1 +0.984288372815029 1 1 1 +0.242704368769227 1 1 1 +1.16310843477133 1 1 1 +0.671512143534472 2 1 1 +0.235624281398265 2 1 1 +0.585278561981407 1 1 1 +0.313710683818167 1 1 1 +0.512453071354528 2 1 1 +0.318816084203735 2 1 1 +0.492290766723273 2 1 1 +0.869267244819061 2 1 1 +0.416347372277426 2 1 1 +1.25171209855063 1 1 1 +0.421124063985099 2 1 1 +0.330265759909128 2 1 1 +0.32442759213596 2 1 1 +0.499405834143408 2 1 1 +0.605809814064198 2 1 1 +0.441813584555195 1 1 1 +0.355018580197292 2 1 1 +0.284917824978601 2 1 1 +0.712509291577718 2 1 1 +0.360087543146394 2 1 1 +0.593758109292972 2 1 1 +0.21793928877364 2 1 1 +0.511916501085172 1 1 1 +1.65665966055448 2 1 1 +0.462252095429733 1 1 1 +0.375694324193756 2 1 1 +0.573330510111478 1 1 1 +0.624806212238662 1 1 1 +0.3221554867038 1 1 1 +0.394184550616579 1 1 1 +0.233898257977356 2 1 1 +0.616451835954318 2 1 1 +0.435745652986984 2 1 1 +0.309831870195393 1 1 1 +0.303567774481905 1 1 1 +0.268141575894932 2 1 1 +0.685546680374616 2 1 1 +0.315857448984633 2 1 1 +0.358148021225324 1 1 1 +0.561597978729496 2 1 1 +0.575763837785002 2 1 1 +0.408409797786314 2 1 1 +0.456470478096314 2 1 1 +0.211008154807298 1 1 1 +0.537560426488747 1 1 1 +0.474119050536192 1 1 1 +0.349680702914349 2 1 1 +0.43874642118394 2 1 1 +0.741099937281951 2 1 1 +0.397490501092685 1 1 1 +0.455993632903328 2 1 1 +0.531917883353318 2 1 1 +0.544592749033783 2 1 1 +0.74575081631549 1 1 1 +0.482830763020483 2 1 1 +0.280104823458282 2 1 1 +0.674827163589054 2 1 1 +0.27232449929437 1 1 1 +0.33609945965603 2 1 1 +0.642687813456977 2 1 1 +0.45152584390343 2 1 1 +0.393612819207325 1 1 1 +0.403513480920972 2 1 1 +0.55270209232572 2 1 1 +0.282474350101989 1 1 1 +0.225686494015142 1 1 1 +0.3032960404285 1 1 1 +0.741695387202929 1 1 1 +0.23627922113503 1 1 1 +0.317661404771517 2 1 1 +0.365881950379812 1 1 1 +0.671407911504626 2 1 1 +0.6327672361385 2 1 1 +0.408730216599132 2 1 1 +1.05295329016947 2 1 1 +0.647929253014634 2 1 1 +0.272505386795946 1 1 1 +0.477000937785718 2 1 1 +0.593679670773664 2 1 1 +0.485804513765726 1 1 1 +0.685108031619407 2 1 1 +0.463863491717212 2 1 1 +0.280869562583906 2 1 1 +0.484442256816249 1 1 1 +0.374203282894535 1 1 1 +0.205270568757322 2 1 1 +0.285730023779721 1 1 1 +0.420031671350127 2 1 1 +0.304140334800815 2 1 1 +0.455400240565684 1 1 1 +0.319295225911816 2 1 1 +0.853456173431349 2 1 1 +1.28296521539738 2 1 1 +0.402276812108308 2 1 1 +1.60365089898574 2 1 1 +0.513436951554669 2 1 1 +0.635287982445216 2 1 1 +0.756725913746622 1 1 1 +0.538411817875012 2 1 1 +0.252807751300543 1 1 1 +0.306493263230248 2 1 1 +0.328940637779731 1 1 1 +0.295149174376265 2 1 1 +0.428772773247104 2 1 1 +0.710257617392816 2 1 1 +2.1398843380733 2 1 1 +0.390964230021283 1 1 1 +0.30264732818644 1 1 1 +0.24604561365542 2 1 1 +0.326118394989355 2 1 1 +0.444302762917929 2 1 1 +0.994994120515054 2 1 1 +0.329747734506691 2 1 1 +0.244820417609073 2 1 1 +0.434344901812039 2 1 1 +0.245526039713125 2 1 1 +0.371387027622059 2 1 1 +0.396016682526436 2 1 1 +0.868293655068221 2 1 1 +0.339580118779972 2 1 1 +0.377321305638716 1 1 1 +0.352058350011174 2 1 1 +0.523222420484193 1 1 1 +1.63006360968846 2 1 1 +0.403780279358626 1 1 1 +0.33450821318739 2 1 1 +0.246049648436144 1 1 1 +0.73900563703035 2 1 1 +0.70659002598455 1 1 1 +0.251224036209508 1 1 1 +0.279682884105716 2 1 1 +0.446835970242547 2 1 1 +0.344773155307199 2 1 1 +1.18156313011751 2 1 1 +0.40159469187599 1 1 1 +0.662618250249293 2 1 1 +0.484088636200293 2 1 1 +0.249177412018199 1 1 1 +0.635565342005854 2 1 1 +0.237344366033974 2 1 1 +0.262837667936303 2 1 1 +0.223824529758815 2 1 1 +0.544851049052962 1 1 1 +1.22941904309934 1 1 1 +1.18790150456476 2 1 1 +0.397012831119724 2 1 1 +0.542500816372649 1 1 1 +0.215934076714995 2 1 1 +1.30455859046761 2 1 1 +0.536099297245265 2 1 1 +0.414264536316934 2 1 1 +0.777679859044325 2 1 1 +0.697575719549679 1 1 1 +0.704761484394448 2 1 1 +0.286893353427223 1 1 1 +0.681973124438239 2 1 1 +0.397462829482937 2 1 1 +0.257670640245336 2 1 1 +0.236649584180499 2 1 1 +0.436790435094707 1 1 1 +0.574656753851278 1 1 1 +0.253082319735779 1 1 1 +0.61806692862892 1 1 1 +0.46661603680114 2 1 1 +0.195332992231242 1 1 1 +0.229629897436442 1 1 1 +0.992646398039104 2 1 1 +0.94136783174252 2 1 1 +0.837333099387364 1 1 1 +0.430204780391451 2 1 1 +1.37515921760222 2 1 1 +0.89875556054097 2 1 1 +0.696864042518777 2 1 1 +0.332933586834615 2 1 1 +0.334070550417085 2 1 1 +0.38676132253602 2 1 1 +0.306404665389991 2 1 1 +0.478254432945422 2 1 1 +0.601997570889218 2 1 1 +0.373642558748753 2 1 1 +0.29388256861859 1 1 1 +0.403146732540824 2 1 1 +0.754379822737839 2 1 1 +0.20827688411218 2 1 1 +0.211975975201092 2 1 1 +0.591340246795799 1 1 1 +0.263322621163444 2 1 1 +0.525170614901281 1 1 1 +0.206823345071543 1 1 1 +0.935520204615524 1 1 1 +0.550910831841 2 1 1 +0.232504114652867 2 1 1 +0.391975720570035 1 1 1 +0.52537232580037 2 1 1 +0.604827669281913 2 1 1 +0.440173374557048 2 1 1 +0.294878838994327 2 1 1 +0.323868811622971 1 1 1 +0.240824506056104 2 1 1 +0.423271049333481 2 1 1 +0.849356591210965 2 1 1 +0.335818515496422 2 1 1 +0.538745656799135 2 1 1 +0.4208751745964 2 1 1 +0.55146359110108 2 1 1 +0.467882029849217 2 1 1 +0.567777388073783 2 1 1 +0.311394332684366 1 1 1 +1.33531192845093 1 1 1 +0.368867535882799 2 1 1 +0.340092989922591 2 1 1 +0.299811445088077 2 1 1 +0.198131285653104 2 1 1 +1.57688580580023 2 1 1 +0.671467937043381 2 1 1 +0.422481878776226 2 1 1 +0.48313672040092 1 1 1 +0.473697344635179 1 1 1 +0.68951966048344 2 1 1 +0.633967567703449 2 1 1 +0.382761102393661 1 1 1 +0.68736056335558 1 1 1 +0.677810537621417 2 1 1 +0.249614606331914 2 1 1 +0.755321813543998 2 1 1 +0.370198385669219 1 1 1 +0.697638915493631 1 1 1 +0.684828719775979 2 1 1 +0.502349799392655 1 1 1 +0.583605969114717 1 1 1 +0.517041977935336 1 1 1 +0.411670106317747 1 1 1 +0.207610898625408 1 1 1 +0.414006631133478 2 1 1 +0.921999310392829 1 1 1 +0.409507167245215 2 1 1 +0.584716070617761 2 1 1 +0.666513112126972 2 1 1 +0.233005827550518 2 1 1 +0.60770657746225 1 1 1 +0.221784346267773 2 1 1 +0.771391695716424 2 1 1 +1.21988159355549 1 1 1 +0.408933678664394 2 1 1 +1.03374983542661 2 1 1 +0.939198644733114 2 1 1 +0.962067734082042 2 1 1 +0.473406448845882 2 1 1 +0.223644602219167 2 1 1 +1.12139515597077 1 1 1 +0.299025722625131 2 1 1 +0.55009896091157 1 1 1 +0.578799507502895 1 1 1 +0.962097234341087 1 1 1 +0.348861796367042 2 1 1 +0.35773121339554 2 1 1 +0.246740510307971 1 1 1 +0.407111627051893 1 1 1 +0.550930871978825 1 1 1 +0.422754497543909 2 1 1 +0.758063342099552 2 1 1 +0.380847347114823 2 1 1 +0.377055603284598 2 1 1 +0.65608839650545 1 1 1 +1.42357385911498 2 1 1 +0.232798506755752 2 1 1 +0.539867634108279 2 1 1 +0.392489725525737 2 1 1 +0.367110223983889 2 1 1 +0.839639947757427 2 1 1 +0.592327430792799 2 1 1 +0.239126262427817 2 1 1 +0.328625329636268 2 1 1 +0.284257957756146 1 1 1 +0.331590178883346 1 1 1 +0.620620411662111 2 1 1 +0.755967038010479 2 1 1 +0.331900743408574 2 1 1 +0.421146686045199 1 1 1 +1.07476503410067 2 1 1 +0.507112598176372 1 1 1 +0.311566718621004 2 1 1 +0.301319320070233 2 1 1 +0.393257034342845 2 1 1 +0.673521481008061 2 1 1 +0.291567562966672 2 1 1 +1.05249328382332 2 1 1 +0.694698152076518 2 1 1 +0.733055920143737 2 1 1 +0.334350894107303 2 1 1 +0.542807697456418 1 1 1 +0.579281169009386 1 1 1 +0.320837583848137 1 1 1 +0.488074071042795 1 1 1 +0.213060081069537 2 1 1 +0.237230647833275 2 1 1 +0.237572229668373 1 1 1 +0.241805498724672 1 1 1 +0.21505246069559 2 1 1 +0.625069689033177 2 1 1 +0.391789762960315 1 1 1 +0.360924641936915 2 1 1 +0.434831888026175 1 1 1 +1.53947356804897 2 1 1 +0.390459073072731 2 1 1 +0.327186719063663 1 1 1 +0.451681415339723 1 1 1 +0.551841771615269 2 1 1 +0.41039773179749 1 1 1 +0.926634118987433 2 1 1 +0.813362027443744 2 1 1 +0.632371052186083 2 1 1 +1.07271976627787 1 1 1 +0.347281073927582 1 1 1 +0.44423560152159 1 1 1 +0.576366534316911 2 1 1 +0.279713029952993 2 1 1 +0.881466843024701 2 1 1 +0.374654223890455 1 1 1 +0.246340230252564 1 1 1 +0.46051090791758 2 1 1 +0.610478508455545 1 1 1 +0.290070606427311 2 1 1 +0.544420557842503 1 1 1 +0.776693279362721 1 1 1 +0.235406028367375 2 1 1 +0.239531675743827 1 1 1 +0.44775078332261 2 1 1 +0.272084709816774 1 1 1 +0.490027056594032 2 1 1 +1.11466956380519 2 1 1 +0.270448404879725 2 1 1 +0.442949902437612 1 1 1 +0.570651632322539 1 1 1 +0.32265845661882 2 1 1 +0.407435441210764 2 1 1 +0.200085052390358 2 1 1 +0.358511835895485 2 1 1 +1.2431214333383 2 1 1 +0.696171754957839 1 1 1 +0.2777627469669 1 1 1 +0.429359856138122 2 1 1 +0.340524177360971 2 1 1 +0.199944337376957 2 1 1 +0.398334292684942 2 1 1 +0.388541579168816 2 1 1 +0.398547679838622 1 1 1 +0.839309822360769 2 1 1 +0.280253849702043 2 1 1 +0.547345720269382 2 1 1 +0.376647832731017 2 1 1 +0.455530332435412 2 1 1 +0.334196466045242 2 1 1 +0.759777271734527 2 1 1 +1.10869967729068 2 1 1 +0.222920909328599 2 1 1 +0.243727194101031 2 1 1 +0.331283374352904 2 1 1 +0.489803545251022 1 1 1 +0.2736011848833 2 1 1 +0.432409628386385 1 1 1 +0.447747022319498 2 1 1 +0.736283852147818 2 1 1 +0.461500847594122 1 1 1 +0.359367876631285 1 1 1 +0.418098062593873 2 1 1 +0.502693165924066 1 1 1 +0.260188072876792 1 1 1 +0.348437996297828 1 1 1 +1.57562306974174 2 1 1 +0.316108820930013 2 1 1 +0.421685918698271 2 1 1 +0.578695918727619 2 1 1 +1.12879309366769 2 1 1 +1.03916993441652 2 1 1 +0.492207222672778 1 1 1 +0.33283217994747 2 1 1 +0.39422420306568 2 1 2 +0.362300838201913 1 1 2 +0.469662901313467 2 1 2 +0.820030023322582 1 1 2 +0.234551440695508 2 1 2 +0.331679248955791 1 1 2 +0.527229640837085 2 1 2 +0.91734807805308 2 1 2 +0.319175515877037 2 1 2 +0.651053459158852 1 1 2 +0.661459624685597 2 1 2 +0.281279784597852 2 1 2 +0.342078529279457 1 1 2 +0.3636800828231 2 1 2 +0.484151346003298 1 1 2 +0.658827635325395 1 1 2 +0.622208937699232 1 1 2 +0.580811030835409 2 1 2 +0.441808620117506 1 1 2 +0.36060243933493 2 1 2 +0.831194064165385 2 1 2 +0.361776006347027 2 1 2 +0.777351339265196 1 1 2 +0.278293909155803 2 1 2 +0.278507100800553 2 1 2 +0.884402648451047 1 1 2 +0.342560342613834 2 1 2 +0.809676649841315 2 1 2 +0.516858099569803 1 1 2 +0.634645370682583 2 1 2 +0.249686099229778 1 1 2 +1.33141985698474 1 1 2 +0.453726915386914 2 1 2 +0.290504549136735 2 1 2 +0.487095756746479 2 1 2 +0.346501172556082 2 1 2 +0.393430828426059 1 1 2 +0.504449494787339 2 1 2 +0.367999687491587 2 1 2 +0.352469038071531 1 1 2 +0.234560015153837 2 1 2 +0.940841504372444 1 1 2 +0.2046902513565 2 1 2 +0.461341997193658 1 1 2 +0.610339950737745 2 1 2 +0.446921029186028 1 1 2 +0.515591108864551 2 1 2 +1.58260395843454 2 1 2 +0.344764743329778 2 1 2 +0.427254054893139 2 1 2 +0.516158776880019 1 1 2 +1.2612303673015 2 1 2 +0.613528615965816 2 1 2 +0.267963577139406 2 1 2 +0.307594651280269 2 1 2 +0.24101706884499 1 1 2 +0.455753268732021 2 1 2 +0.405040912881131 2 1 2 +0.288094483330521 1 1 2 +0.545610622237084 2 1 2 +0.452142838999807 1 1 2 +0.594527943497764 1 1 2 +0.88116621589308 2 1 2 +0.277767297820233 2 1 2 +0.279551393619652 2 1 2 +0.365460511604365 2 1 2 +0.556212898406868 2 1 2 +0.328560209842821 1 1 2 +0.531013993625691 1 1 2 +0.231888430468412 1 1 2 +0.677110774143983 2 1 2 +0.453921989085917 2 1 2 +0.459571696136957 2 1 2 +0.393356837769246 1 1 2 +0.511202810478497 1 1 2 +0.693614307574487 1 1 2 +0.240863923388269 2 1 2 +0.321852817508144 1 1 2 +0.270908403919833 2 1 2 +0.820724000663825 1 1 2 +0.235189573689813 2 1 2 +0.326155088030317 2 1 2 +0.631590224724998 1 1 2 +0.441990726662034 1 1 2 +0.84336570752273 2 1 2 +0.359995826600722 2 1 2 +0.251400135935091 1 1 2 +0.412798716611553 1 1 2 +0.257997459005081 1 1 2 +0.324911808695266 2 1 2 +0.869954063020224 2 1 2 +0.316774804913553 1 1 2 +0.802438949561354 2 1 2 +0.753010120858102 2 1 2 +0.50447570028204 1 1 2 +0.472994968867572 2 1 2 +0.365558799398694 2 1 2 +0.355836646801112 1 1 2 +0.571157381310202 2 1 2 +0.634686215618027 2 1 2 +0.270208965991148 2 1 2 +0.328585338874615 1 1 2 +0.384434393299423 2 1 2 +0.316023575731398 1 1 2 +0.494817395995112 2 1 2 +0.300504460120145 2 1 2 +0.347783059904907 2 1 2 +1.02851702876777 2 1 2 +0.364863367923789 2 1 2 +0.460777943415657 2 1 2 +0.382793622325279 2 1 2 +0.273403607994913 2 1 2 +0.609426470046583 2 1 2 +0.297792901344866 1 1 2 +0.370479141756967 2 1 2 +0.882238434259769 1 1 2 +0.496857265474561 2 1 2 +0.277702369672893 2 1 2 +0.446926962878622 1 1 2 +0.36757607051588 1 1 2 +0.557136267106436 2 1 2 +1.00333007744122 2 1 2 +0.760219976689289 2 1 2 +0.332068843559009 2 1 2 +0.548961093445682 2 1 2 +0.313465233961872 2 1 2 +0.550216771807154 1 1 2 +0.29794278574353 1 1 2 +0.234198048951483 2 1 2 +0.273445183254746 2 1 2 +0.574886295740124 2 1 2 +0.258382409058055 1 1 2 +0.409845586460725 2 1 2 +0.326206723132256 2 1 2 +0.642595268751117 2 1 2 +0.232356531769144 2 1 2 +1.70736951927255 1 1 2 +0.274687338325608 2 1 2 +0.40877430223826 2 1 2 +0.365729356985064 2 1 2 +0.6050000403314 2 1 2 +0.592011487134505 2 1 2 +0.557179211825432 1 1 2 +0.873296855773591 1 1 2 +0.216826762785491 1 1 2 +0.517886780128018 2 1 2 +0.398323720600925 1 1 2 +1.12139464302831 2 1 2 +0.249538486660475 2 1 2 +0.360304338880141 1 1 2 +0.627773044075362 2 1 2 +0.996274959906684 2 1 2 +0.202797819180771 1 1 2 +0.383153769101205 1 1 2 +0.324797856324902 1 1 2 +0.239421301531662 2 1 2 +0.24289898785908 1 1 2 +0.547746136913622 1 1 2 +0.386255965400912 2 1 2 +0.60223673049116 2 1 2 +0.549261776998216 2 1 2 +0.395992071688511 2 1 2 +0.217402932038072 2 1 2 +0.295305459515413 2 1 2 +0.447909826549637 2 1 2 +0.71950962867128 2 1 2 +0.794816583397332 1 1 2 +0.241318968932987 2 1 2 +0.556293493098233 2 1 2 +0.238208378562322 2 1 2 +0.499247181746743 1 1 2 +0.317050968536836 2 1 2 +0.322686857249444 2 1 2 +0.71276761076242 2 1 2 +0.301030966624334 2 1 2 +0.336641004565653 2 1 2 +0.812046026214206 2 1 2 +0.270220261704131 2 1 2 +0.701954145112022 2 1 2 +0.43964095073941 2 1 2 +0.384704421988213 2 1 2 +0.501487364681699 2 1 2 +0.455023781459671 2 1 2 +0.332474164305816 2 1 2 +0.567142874907982 2 1 2 +0.253324335182053 2 1 2 +0.444329558298367 2 1 2 +0.750457236950695 2 1 2 +0.292500297080332 2 1 2 +0.319745451630673 2 1 2 +0.286210384865368 2 1 2 +0.283637752128579 1 1 2 +0.236044970372654 2 1 2 +0.606532173767213 1 1 2 +1.32620595835061 2 1 2 +0.49881945892801 2 1 2 +1.00559201100603 2 1 2 +0.498129494834216 2 1 2 +0.682007132416635 2 1 2 +0.521249610973914 2 1 2 +0.229929750671033 2 1 2 +1.12814610238938 1 1 2 +0.74135939367203 2 1 2 +1.14362542630031 2 1 2 +0.261969169934014 1 1 2 +0.240668217312327 1 1 2 +1.59220860546119 2 1 2 +0.459005868330534 2 1 2 +0.290018768199601 2 1 2 +0.204589440835719 1 1 2 +0.619039312673667 2 1 2 +0.667083334382893 1 1 2 +0.359845320132008 1 1 2 +0.912937103767445 2 1 2 +0.522430834145349 2 1 2 +0.297762304149053 1 1 2 +0.276240304783596 2 1 2 +0.399051717562123 2 1 2 +0.404254481667734 2 1 2 +1.23765251352633 1 1 2 +1.2213528437925 2 1 2 +0.554106620313858 2 1 2 +0.513543854359058 2 1 2 +0.718560875752879 2 1 2 +0.299045404005468 1 1 2 +0.197161504481574 2 1 2 +0.355424533393654 2 1 2 +0.601322385280793 2 1 2 +0.31408110064814 1 1 2 +0.681928297252204 2 1 2 +0.257899160580357 1 1 2 +0.331853308281021 2 1 2 +0.932271244383807 2 1 2 +0.762290747363875 1 1 2 +0.610315223598599 2 1 2 +0.508310743979851 2 1 2 +0.293542339726516 1 1 2 +0.249532498898509 2 1 2 +0.240661946068682 1 1 2 +0.480573774515142 2 1 2 +0.26503112695042 1 1 2 +0.745033574361612 2 1 2 +0.313418912457887 2 1 2 +0.428468490020874 2 1 2 +0.619836697801129 2 1 2 +0.404856983338945 2 1 2 +0.225135719018744 2 1 2 +0.247203725168153 2 1 2 +0.473126435201081 2 1 2 +0.758881984366834 2 1 2 +0.530103620429835 2 1 2 +0.609787747426196 2 1 2 +0.42023331047044 1 1 2 +0.294545387085857 1 1 2 +0.311952071319945 2 1 2 +0.793299410776987 2 1 2 +0.376179978035794 2 1 2 +0.230418084856786 2 1 2 +0.25879024565358 2 1 2 +0.264796453159985 2 1 2 +0.745485785923675 2 1 2 +0.224026456721164 2 1 2 +0.6030135494348 1 1 2 +0.489733962171922 2 1 2 +0.39466687509252 1 1 2 +0.552205654391275 2 1 2 +0.575332864606377 2 1 2 +0.673079198373531 1 1 2 +0.346849143283538 1 1 2 +0.384205850032696 1 1 2 +0.382157410278578 2 1 2 +0.294710963958947 2 1 2 +0.487164402385991 1 1 2 +0.571768796864126 2 1 2 +0.243155946253846 1 1 2 +0.366816988109117 1 1 2 +0.556560232965345 2 1 2 +0.842238270178048 2 1 2 +0.630587019970835 1 1 2 +0.849346128585895 2 1 2 +0.301910596058078 1 1 2 +0.494388435823995 1 1 2 +0.635279696032103 2 1 2 +0.244333041807845 1 1 2 +0.462722638825509 2 1 2 +0.355580520263025 1 1 2 +0.419159925222802 2 1 2 +0.229441499742296 2 1 2 +0.272011206196529 2 1 2 +0.457900548880182 1 1 2 +0.42581270796691 2 1 2 +0.192946477357373 2 1 2 +0.920226167527353 2 1 2 +0.870356567626495 2 1 2 +0.506429857300226 1 1 2 +1.35129991323996 2 1 2 +0.81782131154567 1 1 2 +0.312753351203148 2 1 2 +0.240147726849663 2 1 2 +0.331594506915926 2 1 2 +0.303605405427918 2 1 2 +1.3692312598303 2 1 2 +0.464969591870211 2 1 2 +0.365081121121442 2 1 2 +0.53678523283272 1 1 2 +0.362543809949933 2 1 2 +0.300077415892361 2 1 2 +0.565255726546058 2 1 2 +0.508969800017276 1 1 2 +0.197931347436034 2 1 2 +0.425448002968464 2 1 2 +0.917689004198981 2 1 2 +0.265209389680314 1 1 2 +0.399534037154238 1 1 2 +0.292118455947818 2 1 2 +0.352289208022807 2 1 2 +0.800608594982045 2 1 2 +0.251768687680971 1 1 2 +0.280448242529617 2 1 2 +0.243473452337431 1 1 2 +0.286916333216323 2 1 2 +0.838049565027792 1 1 2 +0.244529227888586 1 1 2 +0.71863102671265 2 1 2 +1.98916958946463 2 1 2 +0.238806320688673 2 1 2 +0.693785935221629 2 1 2 +0.495890282805749 1 1 2 +0.191174545766406 2 1 2 +0.836607694563896 2 1 2 +0.391165841939288 2 1 2 +0.566993167116615 2 1 2 +0.240715729525045 2 1 2 +0.354039896192607 1 1 2 +1.21434836670206 2 1 2 +0.48154154974369 2 1 2 +0.798332748413893 2 1 2 +0.650917466844914 2 1 2 +0.384224495536896 1 1 2 +0.945026137940947 2 1 2 +0.418508744931679 2 1 2 +0.659365265496408 1 1 2 +0.271823834279208 2 1 2 +0.43920360190222 2 1 2 +0.449532948575899 2 1 2 +1.02510373135742 2 1 2 +0.49889822568904 1 1 2 +1.23542122006285 1 1 2 +0.274287087904293 2 1 2 +0.673318487358746 2 1 2 +0.397619138281994 2 1 2 +1.0555886269523 2 1 2 +0.229089202292979 1 1 2 +0.697871360095817 2 1 2 +0.614287525537126 1 1 2 +0.322578991679628 1 1 2 +0.605688688250448 2 1 2 +0.534481750546624 2 1 2 +0.565101205666666 1 1 2 +0.378499737439249 1 1 2 +0.52995012536057 2 1 2 +2.45554336829165 1 1 2 +0.744067895318506 2 1 2 +0.673980171567151 2 1 2 +0.33496535179204 2 1 2 +0.703852317870538 2 1 2 +0.623851718541645 2 1 2 +0.275936871629696 2 1 2 +0.245843960416957 2 1 2 +0.220780887604494 2 1 2 +0.585098991357547 2 1 2 +0.343992796279959 1 1 2 +0.580010432096859 2 1 2 +0.377174286172397 2 1 2 +0.614794867960386 1 1 2 +0.235740390671863 1 1 2 +0.498093604359181 2 1 2 +0.422668225465882 2 1 2 +0.85458472173833 2 1 2 +0.318077105190021 2 1 2 +0.660599386236034 2 1 2 +0.44253879597235 2 1 2 +0.482452162905769 2 1 2 +0.569360166827625 2 1 2 +1.5195957937337 1 1 2 +0.335177741698269 2 1 2 +0.241392133198455 2 1 2 +0.503619286339201 2 1 2 +0.578489345701315 2 1 2 +0.327768204464024 2 1 2 +0.436095089114902 2 1 2 +0.424989568541 1 1 2 +0.214045608149353 2 1 2 +0.306116924393253 1 1 2 +0.476975246716608 2 1 2 +0.756750965776553 2 1 2 +0.312822136071239 2 1 2 +0.470827169455528 1 1 2 +0.37315029361616 2 1 2 +0.685155315108113 2 1 2 +0.959530818891534 2 1 2 +0.913595420136271 2 1 2 +0.595438752846243 2 1 2 +0.36802176344941 2 1 2 +0.418544504566566 2 1 2 +0.363048213885529 2 1 2 +0.254338756573215 1 1 2 +0.730957519992839 1 1 2 +0.263046554830887 1 1 2 +0.46094182659418 2 1 2 +0.531059000798822 2 1 2 +0.353280265477637 2 1 2 +0.464342980616116 2 1 2 +0.229724823533327 2 1 2 +0.504945673660676 2 1 2 +0.532493395334424 2 1 2 +0.423801763698387 1 1 2 +0.787113721614964 2 1 2 +0.223160559034952 2 1 2 +0.419595856308554 2 1 2 +0.396648316145306 2 1 2 +0.308908241587595 2 1 2 +0.627802576140553 2 1 2 +0.64888860721256 2 1 2 +0.738730808101364 2 1 2 +0.658745774579089 1 1 2 +0.679191956616965 1 1 2 +0.278164538209912 1 1 2 +0.205911141408479 2 1 2 +0.241638399787725 2 1 2 +0.24858355547484 1 1 2 +0.73740496979995 2 1 2 +0.247935082999496 2 1 2 +0.826311099617232 2 1 2 +0.590607775557781 1 1 2 +0.351249908681046 1 1 2 +0.370792468725378 2 1 2 +0.389722068994738 1 1 2 +0.251157837165118 2 1 2 +0.663087218040623 2 1 2 +0.454359737429872 2 1 2 +0.435474095638232 2 1 2 +0.284410206592962 2 1 2 +0.344506290138683 2 1 2 +1.01768620078799 1 1 2 +0.331330031800195 1 1 2 +0.277021859762052 1 1 2 +0.347332671037543 1 1 2 +0.286836805838407 1 1 2 +0.340934631295205 2 1 2 +1.22270556676254 1 1 2 +0.360534849486478 1 1 2 +0.359892263518994 2 1 2 +0.552595743599511 2 1 2 +0.301744081404754 2 1 2 +0.416037514267758 2 1 2 +0.541344562283886 2 1 2 +0.579986637345764 2 1 2 +0.221912718773351 2 1 2 +0.465245817277264 2 1 2 +0.474738754014913 2 1 2 +1.70409538281312 2 1 2 +1.02235518855245 2 1 2 +0.301214497598036 1 1 2 +0.991176433131545 2 1 2 +0.401432084705109 2 1 2 +1.11715380433533 2 1 2 +0.81719064511715 2 1 2 +0.549517654685354 1 1 2 +0.251345033237621 2 1 2 +0.357859075575934 2 1 2 +0.90132423193762 2 1 2 +0.272936669704676 1 1 2 +0.455508577827349 2 1 2 +0.861185664428614 2 1 2 +0.266987292082781 2 1 2 +0.578879341650739 2 1 2 +0.649256823455797 2 1 2 +0.418711362089519 2 1 2 +0.433426379919396 2 1 2 +0.642462173639701 1 1 2 +0.406446379518523 2 1 2 +0.290863063788828 1 1 2 +0.395803052313048 2 1 2 +0.311087619708231 2 1 2 +0.279185686505835 1 1 2 +0.412823984876793 1 1 2 +0.314508721309633 2 1 2 +0.417280760034167 2 1 2 +0.357813047077128 2 1 2 +0.256161295149574 2 1 2 +0.240326641914136 2 1 2 +0.469105961018824 2 1 2 +0.23311026462364 2 1 2 +0.219699590325278 2 1 2 +0.267828103451759 2 1 2 +0.324090708482963 1 1 2 +0.882370084866449 2 1 2 +0.296556033418114 2 1 2 +0.535028311840886 1 1 2 diff --git a/Python/hbayesdm/common/extdata/cra_exampleData.txt b/Python/hbayesdm/common/extdata/cra_exampleData.txt new file mode 100644 index 00000000..a658ea69 --- /dev/null +++ b/Python/hbayesdm/common/extdata/cra_exampleData.txt @@ -0,0 +1,541 @@ +subjID trial_number RT prob reward_var reward_fix outcome types ambig choice +1 1 2579 0.5 342 50 0 ambiguous 0.75 0 +1 2 1736 0.375 91 50 0 low 0 1 +1 3 1006 0.5 342 50 342 ambiguous 0.5 1 +1 4 1374 0.375 183 50 0 low 0 1 +1 5 1119 0.25 648 50 648 low 0 1 +1 6 1147 0.375 648 50 0 low 0 1 +1 7 1034 0.375 99 50 99 low 0 1 +1 8 953 0.375 98 50 98 low 0 1 +1 9 1114 0.5 54 50 50 ambiguous 0.5 0 +1 10 3243 0.5 99 50 50 ambiguous 0.5 0 +1 11 2955 0.5 340 50 50 ambiguous 0.75 0 +1 12 1105 0.5 91 50 50 ambiguous 0.5 0 +1 13 920 0.375 342 50 0 low 0 1 +1 14 242 0.125 98 50 0 low 0 0 +1 15 1665 0.25 181 50 0 low 0 0 +1 16 801 0.5 183 50 183 ambiguous 0.25 1 +1 17 793 0.5 183 50 0 ambiguous 0.75 0 +1 18 816 0.5 46 50 0 ambiguous 0.5 0 +1 19 1009 0.375 340 50 0 low 0 1 +1 20 191 0.25 46 50 50 low 0 0 +1 21 64 0.25 342 50 0 low 0 0 +1 22 807 0.125 648 50 0 low 0 1 +1 23 1047 0.25 98 50 50 low 0 0 +1 24 401 0.125 91 50 0 low 0 0 +1 25 1009 0.125 342 50 50 low 0 0 +1 26 707 0.5 99 50 0 ambiguous 0.25 1 +1 27 516 0.5 181 50 181 ambiguous 0.25 1 +1 28 66 0.375 48 50 0 low 0 0 +1 29 2206 0.5 340 50 0 ambiguous 0.5 0 +1 30 826 0.5 343 50 343 ambiguous 0.5 1 +1 31 391 0.125 343 50 50 low 0 0 +1 32 293 0.25 54 50 0 low 0 0 +1 33 310 0.5 648 50 648 ambiguous 0.5 1 +1 34 923 0.375 648 50 0 low 0 1 +1 35 744 0.5 48 50 0 ambiguous 0.25 0 +1 36 278 0.5 48 50 0 ambiguous 0.75 0 +1 37 450 0.375 46 50 0 low 0 0 +1 38 267 0.5 654 50 0 ambiguous 0.5 1 +1 39 169 0.5 54 50 50 ambiguous 0.25 0 +1 40 179 0.5 46 50 0 ambiguous 0.75 0 +1 41 142 0.5 648 50 0 ambiguous 0.25 1 +1 42 863 0.5 648 50 0 ambiguous 0.75 0 +1 43 75 0.25 183 50 0 low 0 0 +1 44 183 0.25 91 50 0 low 0 0 +1 45 84 0.125 181 50 50 low 0 0 +1 46 2191 0.375 343 50 0 low 0 1 +1 47 269 0.125 648 50 0 low 0 0 +1 48 396 0.125 99 50 50 low 0 0 +1 49 137 0.5 654 50 0 ambiguous 0.25 1 +1 50 124 0.5 342 50 0 ambiguous 0.25 1 +1 51 1926 0.5 91 50 50 ambiguous 0.75 0 +1 52 96 0.125 183 50 50 low 0 0 +1 53 59 0.5 98 50 0 ambiguous 0.75 0 +1 54 342 0.125 340 50 0 low 0 0 +1 55 157 0.375 54 50 50 low 0 0 +1 56 122 0.5 183 50 0 ambiguous 0.75 0 +1 57 29 0.125 48 50 50 low 0 0 +1 58 985 0.375 183 50 0 low 0 1 +1 59 142 0.5 183 50 183 ambiguous 0.25 1 +1 60 246 0.125 54 50 50 low 0 0 +1 61 254 0.5 99 50 50 ambiguous 0.75 0 +1 62 107 0.5 648 50 648 ambiguous 0.25 1 +1 63 86 0.5 343 50 0 ambiguous 0.25 1 +1 64 50 0.25 48 50 50 low 0 0 +1 65 1507 0.125 183 50 50 low 0 0 +1 66 247 0.25 99 50 50 low 0 0 +1 67 21 0.5 98 50 98 ambiguous 0.25 1 +1 68 276 0.5 183 50 0 ambiguous 0.5 0 +1 69 1697 0.25 343 50 0 low 0 0 +1 70 208 0.5 648 50 648 ambiguous 0.5 1 +1 71 874 0.5 183 50 50 ambiguous 0.5 0 +1 72 4451 0.25 654 50 50 low 0 0 +1 73 255 0.5 181 50 0 ambiguous 0.75 0 +1 74 220 0.5 654 50 50 ambiguous 0.75 0 +1 75 2058 0.5 46 50 0 ambiguous 0.25 0 +1 76 198 0.125 46 50 50 low 0 0 +1 77 293 0.5 91 50 91 ambiguous 0.25 1 +1 78 133 0.5 54 50 50 ambiguous 0.75 0 +1 79 281 0.375 181 50 181 low 0 1 +1 80 63 0.5 48 50 0 ambiguous 0.5 0 +1 81 1945 0.5 181 50 181 ambiguous 0.5 1 +1 82 238 0.25 183 50 50 low 0 0 +1 83 210 0.25 340 50 0 low 0 0 +1 84 3110 0.5 648 50 648 ambiguous 0.75 1 +1 85 660 0.5 343 50 0 ambiguous 0.75 0 +1 86 13 0.5 98 50 0 ambiguous 0.5 0 +1 87 744 0.375 654 50 0 low 0 1 +1 88 3835 0.125 654 50 0 low 0 0 +1 89 72 0.25 648 50 0 low 0 1 +1 90 90 0.5 340 50 340 ambiguous 0.25 1 +2 1 857 0.375 647 50 0 low 0 1 +2 2 437 0.5 99 50 0 ambiguous 0.75 0 +2 3 289 0.5 96 50 0 ambiguous 0.25 1 +2 4 514 0.5 184 50 184 ambiguous 0.75 1 +2 5 233 0.5 336 50 336 ambiguous 0.25 1 +2 6 321 0.375 180 50 0 low 0 1 +2 7 266 0.5 47 50 50 ambiguous 0.75 0 +2 8 288 0.375 181 50 0 low 0 0 +2 9 480 0.25 647 50 50 low 0 0 +2 10 330 0.5 180 50 180 ambiguous 0.25 1 +2 11 421 0.25 180 50 0 low 0 1 +2 12 290 0.5 47 50 0 ambiguous 0.25 0 +2 13 540 0.125 91 50 0 low 0 0 +2 14 71 0.5 91 50 0 ambiguous 0.5 0 +2 15 184 0.5 647 50 0 ambiguous 0.25 0 +2 16 236 0.5 649 50 649 ambiguous 0.5 1 +2 17 364 0.375 336 50 0 low 0 1 +2 18 241 0.375 91 50 0 low 0 1 +2 19 62 0.375 96 50 50 low 0 0 +2 20 456 0.25 649 50 50 low 0 0 +2 21 653 0.5 91 50 0 ambiguous 0.75 1 +2 22 282 0.25 184 50 50 low 0 0 +2 23 42 0.125 49 50 0 low 0 0 +2 24 52 0.125 181 50 0 low 0 0 +2 25 443 0.125 180 50 0 low 0 0 +2 26 353 0.25 181 50 181 low 0 1 +2 27 265 0.375 48 50 0 low 0 0 +2 28 245 0.5 647 50 0 ambiguous 0.5 1 +2 29 286 0.125 647 50 0 low 0 0 +2 30 198 0.25 336 50 0 low 0 0 +2 31 76 0.5 49 50 0 ambiguous 0.5 0 +2 32 261 0.5 340 50 0 ambiguous 0.25 1 +2 33 166 0.5 99 50 50 ambiguous 0.5 0 +2 34 333 0.125 336 50 0 low 0 0 +2 35 99 0.125 340 50 0 low 0 0 +2 36 255 0.5 647 50 0 ambiguous 0.25 0 +2 37 257 0.5 647 50 0 ambiguous 0.75 1 +2 38 199 0.375 184 50 0 low 0 1 +2 39 118 0.375 49 50 0 low 0 0 +2 40 233 0.5 180 50 50 ambiguous 0.5 0 +2 41 49 0.5 49 50 49 ambiguous 0.25 1 +2 42 102 0.25 48 50 0 low 0 0 +2 43 512 0.5 181 50 181 ambiguous 0.75 1 +2 44 20 0.125 336 50 50 low 0 0 +2 45 198 0.5 48 50 50 ambiguous 0.5 0 +2 46 201 0.25 340 50 50 low 0 0 +2 47 17 0.5 96 50 96 ambiguous 0.5 1 +2 48 74 0.25 47 50 50 low 0 0 +2 49 211 0.25 99 50 50 low 0 0 +2 50 109 0.5 49 50 0 ambiguous 0.75 0 +2 51 410 0.125 649 50 50 low 0 0 +2 52 304 0.5 649 50 50 ambiguous 0.25 0 +2 53 220 0.25 91 50 0 low 0 0 +2 54 21 0.5 336 50 50 ambiguous 0.75 0 +2 55 271 0.5 48 50 0 ambiguous 0.75 0 +2 56 1458 0.125 99 50 50 low 0 0 +2 57 254 0.25 49 50 50 low 0 0 +2 58 216 0.5 340 50 0 ambiguous 0.75 1 +2 59 241 0.375 647 50 0 low 0 1 +2 60 21 0.5 647 50 0 ambiguous 0.75 1 +2 61 8 0.5 340 50 340 ambiguous 0.5 1 +2 62 168 0.5 336 50 0 ambiguous 0.5 1 +2 63 387 0.5 184 50 50 ambiguous 0.5 0 +2 64 266 0.375 99 50 0 low 0 0 +2 65 277 0.5 91 50 0 ambiguous 0.25 0 +2 66 350 0.5 647 50 0 ambiguous 0.5 0 +2 67 358 0.5 47 50 0 ambiguous 0.5 0 +2 68 407 0.5 184 50 0 ambiguous 0.25 0 +2 69 5 0.125 647 50 50 low 0 0 +2 70 369 0.125 48 50 50 low 0 0 +2 71 175 0.375 649 50 649 low 0 1 +2 72 650 0.25 647 50 50 low 0 0 +2 73 459 0.5 336 50 0 ambiguous 0.5 1 +2 74 129 0.5 96 50 0 ambiguous 0.75 0 +2 75 443 0.125 96 50 50 low 0 0 +2 76 398 0.375 340 50 50 low 0 0 +2 77 105 0.5 99 50 0 ambiguous 0.25 1 +2 78 239 0.125 47 50 0 low 0 0 +2 79 76 0.5 48 50 50 ambiguous 0.25 0 +2 80 198 0.25 336 50 336 low 0 1 +2 81 186 0.5 649 50 649 ambiguous 0.75 1 +2 82 130 0.5 181 50 0 ambiguous 0.25 1 +2 83 211 0.5 336 50 336 ambiguous 0.75 1 +2 84 231 0.5 180 50 50 ambiguous 0.75 0 +2 85 75 0.5 181 50 0 ambiguous 0.5 1 +2 86 41 0.375 47 50 50 low 0 0 +2 87 406 0.125 184 50 0 low 0 1 +2 88 367 0.25 96 50 0 low 0 1 +2 89 100 0.5 336 50 336 ambiguous 0.25 1 +2 90 967 0.375 336 50 0 low 0 0 +3 1 2755 0.5 341 50 0 ambiguous 0.25 1 +3 2 1695 0.125 183 50 0 low 0 1 +3 3 1291 0.5 92 50 0 ambiguous 0.75 0 +3 4 940 0.25 341 50 0 low 0 1 +3 5 1716 0.25 342 50 0 low 0 1 +3 6 1165 0.375 653 50 0 low 0 1 +3 7 1306 0.5 343 50 343 ambiguous 0.75 1 +3 8 1815 0.5 182 50 0 ambiguous 0.25 1 +3 9 1467 0.125 653 50 0 low 0 1 +3 10 1420 0.5 343 50 0 ambiguous 0.25 1 +3 11 1625 0.25 653 50 0 low 0 1 +3 12 1157 0.5 646 50 646 ambiguous 0.5 1 +3 13 1225 0.5 183 50 0 ambiguous 0.25 1 +3 14 1438 0.25 183 50 183 low 0 1 +3 15 1683 0.5 653 50 0 ambiguous 0.75 1 +3 16 1838 0.5 50 50 0 ambiguous 0.5 0 +3 17 1618 0.25 50 50 0 low 0 0 +3 18 1708 0.5 183 50 183 ambiguous 0.75 1 +3 19 970 0.5 94 50 0 ambiguous 0.75 1 +3 20 1151 0.5 653 50 0 ambiguous 0.75 1 +3 21 1928 0.5 646 50 0 ambiguous 0.25 1 +3 22 1758 0.5 653 50 0 ambiguous 0.5 1 +3 23 2629 0.125 653 50 0 low 0 1 +3 24 1439 0.5 183 50 0 ambiguous 0.25 1 +3 25 1328 0.5 50 50 0 ambiguous 0.75 0 +3 26 1193 0.5 342 50 0 ambiguous 0.75 1 +3 27 1290 0.5 94 50 94 ambiguous 0.5 1 +3 28 1487 0.5 183 50 0 ambiguous 0.5 1 +3 29 1154 0.5 94 50 50 ambiguous 0.25 0 +3 30 1205 0.375 94 50 50 low 0 0 +3 31 1449 0.25 182 50 0 low 0 1 +3 32 1497 0.5 342 50 0 ambiguous 0.25 1 +3 33 1430 0.25 183 50 0 low 0 1 +3 34 1514 0.375 92 50 0 low 0 0 +3 35 992 0.5 653 50 0 ambiguous 0.25 1 +3 36 1920 0.5 343 50 0 ambiguous 0.5 1 +3 37 1612 0.5 653 50 653 ambiguous 0.5 1 +3 38 1224 0.5 341 50 0 ambiguous 0.5 1 +3 39 549 0.375 342 50 342 low 0 1 +3 40 617 0.5 94 50 0 ambiguous 0.5 0 +3 41 1139 0.125 341 50 341 low 0 1 +3 42 1991 0.375 50 50 50 low 0 1 +3 43 1678 0.125 94 50 0 low 0 0 +3 44 1776 0.375 94 50 50 low 0 0 +3 45 1733 0.125 183 50 0 low 0 1 +3 46 1216 0.25 343 50 0 low 0 1 +3 47 1125 0.375 182 50 0 low 0 1 +3 48 1618 0.125 342 50 0 low 0 1 +3 49 1828 0.375 51 50 50 low 0 0 +3 50 1781 0.25 646 50 0 low 0 1 +3 51 553 0.375 183 50 0 low 0 1 +3 52 899 0.5 183 50 183 ambiguous 0.75 1 +3 53 388 0.125 47 50 50 low 0 0 +3 54 615 0.5 47 50 0 ambiguous 0.75 0 +3 55 594 0.375 343 50 0 low 0 1 +3 56 346 0.25 47 50 0 low 0 0 +3 57 1069 0.125 343 50 0 low 0 1 +3 58 894 0.5 51 50 50 ambiguous 0.25 0 +3 59 576 0.5 646 50 0 ambiguous 0.75 1 +3 60 592 0.125 182 50 0 low 0 1 +3 61 1508 0.5 92 50 0 ambiguous 0.5 0 +3 62 383 0.375 646 50 646 low 0 1 +3 63 428 0.5 51 50 50 ambiguous 0.5 0 +3 64 432 0.125 51 50 50 low 0 0 +3 65 454 0.375 47 50 0 low 0 0 +3 66 926 0.5 92 50 0 ambiguous 0.25 0 +3 67 346 0.375 341 50 341 low 0 1 +3 68 355 0.5 51 50 0 ambiguous 0.75 0 +3 69 879 0.25 94 50 50 low 0 0 +3 70 827 0.125 92 50 0 low 0 0 +3 71 437 0.5 182 50 182 ambiguous 0.5 1 +3 72 432 0.5 47 50 0 ambiguous 0.25 0 +3 73 411 0.5 341 50 0 ambiguous 0.75 0 +3 74 1125 0.375 183 50 183 low 0 1 +3 75 422 0.125 646 50 0 low 0 1 +3 76 290 0.5 47 50 50 ambiguous 0.5 0 +3 77 366 0.25 94 50 0 low 0 0 +3 78 360 0.25 653 50 0 low 0 1 +3 79 396 0.375 653 50 653 low 0 1 +3 80 408 0.125 94 50 0 low 0 0 +3 81 442 0.5 183 50 0 ambiguous 0.5 1 +3 82 419 0.25 92 50 50 low 0 0 +3 83 1415 0.5 50 50 0 ambiguous 0.25 0 +3 84 1163 0.5 182 50 50 ambiguous 0.75 0 +3 85 717 0.5 94 50 50 ambiguous 0.25 0 +3 86 537 0.5 342 50 0 ambiguous 0.5 1 +3 87 1530 0.5 94 50 50 ambiguous 0.75 0 +3 88 1024 0.25 51 50 0 low 0 0 +3 89 375 0.5 653 50 653 ambiguous 0.25 1 +3 90 777 0.125 50 50 0 low 0 0 +4 1 940 0.5 339 50 339 ambiguous 0.75 1 +4 2 3222 0.5 337 50 337 ambiguous 0.75 1 +4 3 1295 0.25 184 50 0 low 0 1 +4 4 1943 0.5 182 50 0 ambiguous 0.25 1 +4 5 1176 0.375 652 50 652 low 0 1 +4 6 918 0.5 337 50 0 ambiguous 0.25 1 +4 7 1404 0.25 99 50 0 low 0 1 +4 8 1259 0.125 52 50 0 low 0 1 +4 9 1847 0.125 337 50 0 low 0 1 +4 10 952 0.5 182 50 182 ambiguous 0.75 1 +4 11 1341 0.5 52 50 0 ambiguous 0.25 1 +4 12 2206 0.5 93 50 0 ambiguous 0.75 1 +4 13 4242 0.375 182 50 0 low 0 1 +4 14 13020 0.125 339 50 0 low 0 0 +4 15 1142 0.375 179 50 0 low 0 1 +4 16 1633 0.5 339 50 0 ambiguous 0.5 1 +4 17 1077 0.25 94 50 50 low 0 0 +4 18 2892 0.5 48 50 0 ambiguous 0.75 0 +4 19 524 0.5 652 50 652 ambiguous 0.5 1 +4 20 797 0.5 337 50 337 ambiguous 0.5 1 +4 21 1576 0.5 650 50 650 ambiguous 0.5 1 +4 22 1018 0.25 339 50 0 low 0 1 +4 23 1626 0.25 339 50 0 low 0 1 +4 24 766 0.5 94 50 0 ambiguous 0.25 1 +4 25 1089 0.5 94 50 94 ambiguous 0.75 1 +4 26 546 0.5 650 50 650 ambiguous 0.75 1 +4 27 982 0.125 93 50 0 low 0 0 +4 28 1950 0.125 650 50 650 low 0 1 +4 29 663 0.125 179 50 0 low 0 1 +4 30 482 0.375 650 50 650 low 0 1 +4 31 634 0.25 337 50 337 low 0 1 +4 32 466 0.5 94 50 94 ambiguous 0.5 1 +4 33 1844 0.25 182 50 0 low 0 1 +4 34 576 0.375 339 50 0 low 0 1 +4 35 618 0.125 182 50 50 low 0 0 +4 36 659 0.5 48 50 0 ambiguous 0.75 0 +4 37 389 0.125 652 50 0 low 0 1 +4 38 1116 0.375 99 50 0 low 0 1 +4 39 2504 0.25 93 50 0 low 0 1 +4 40 374 0.5 650 50 650 ambiguous 0.25 1 +4 41 342 0.5 179 50 179 ambiguous 0.5 1 +4 42 409 0.375 48 50 48 low 0 1 +4 43 2010 0.125 48 50 50 low 0 0 +4 44 445 0.5 179 50 0 ambiguous 0.25 1 +4 45 412 0.5 184 50 184 ambiguous 0.25 1 +4 46 404 0.375 650 50 0 low 0 1 +4 47 392 0.125 184 50 50 low 0 0 +4 48 1678 0.125 339 50 339 low 0 1 +4 49 428 0.5 339 50 0 ambiguous 0.75 1 +4 50 385 0.5 99 50 99 ambiguous 0.25 1 +4 51 370 0.5 93 50 93 ambiguous 0.25 1 +4 52 537 0.25 48 50 50 low 0 0 +4 53 1625 0.5 52 50 0 ambiguous 0.5 1 +4 54 355 0.25 650 50 0 low 0 1 +4 55 400 0.25 650 50 650 low 0 1 +4 56 381 0.5 48 50 50 ambiguous 0.5 0 +4 57 339 0.5 339 50 339 ambiguous 0.25 1 +4 58 320 0.375 339 50 0 low 0 1 +4 59 375 0.375 48 50 50 low 0 0 +4 60 525 0.5 184 50 184 ambiguous 0.75 1 +4 61 1071 0.125 99 50 50 low 0 0 +4 62 1389 0.5 652 50 652 ambiguous 0.75 1 +4 63 359 0.5 652 50 0 ambiguous 0.25 1 +4 64 412 0.375 337 50 0 low 0 1 +4 65 434 0.5 650 50 0 ambiguous 0.75 1 +4 66 566 0.5 339 50 0 ambiguous 0.5 1 +4 67 875 0.375 94 50 0 low 0 1 +4 68 361 0.375 93 50 93 low 0 1 +4 69 381 0.125 48 50 0 low 0 0 +4 70 671 0.5 650 50 650 ambiguous 0.5 1 +4 71 900 0.25 48 50 0 low 0 0 +4 72 394 0.5 48 50 0 ambiguous 0.25 0 +4 73 294 0.5 179 50 0 ambiguous 0.75 1 +4 74 249 0.5 93 50 93 ambiguous 0.5 1 +4 75 341 0.375 184 50 0 low 0 1 +4 76 1096 0.5 182 50 0 ambiguous 0.5 1 +4 77 1049 0.25 52 50 0 low 0 0 +4 78 339 0.5 48 50 0 ambiguous 0.25 0 +4 79 418 0.5 650 50 0 ambiguous 0.25 1 +4 80 415 0.375 52 50 50 low 0 0 +4 81 354 0.5 339 50 0 ambiguous 0.25 1 +4 82 1097 0.25 652 50 0 low 0 1 +4 83 580 0.125 94 50 50 low 0 0 +4 84 360 0.5 99 50 0 ambiguous 0.5 1 +4 85 1281 0.25 179 50 0 low 0 1 +4 86 642 0.125 650 50 0 low 0 1 +4 87 279 0.5 99 50 0 ambiguous 0.75 1 +4 88 926 0.5 52 50 0 ambiguous 0.75 0 +4 89 906 0.5 48 50 0 ambiguous 0.5 0 +4 90 326 0.5 184 50 0 ambiguous 0.5 1 +5 1 459 0.5 340 50 340 ambiguous 0.5 1 +5 2 762 0.5 52 50 0 ambiguous 0.5 0 +5 3 623 0.5 97 50 97 ambiguous 0.75 1 +5 4 722 0.5 337 50 337 ambiguous 0.5 1 +5 5 1220 0.5 183 50 0 ambiguous 0.25 1 +5 6 983 0.25 52 50 0 low 0 0 +5 7 919 0.375 650 50 0 low 0 1 +5 8 802 0.375 183 50 183 low 0 1 +5 9 834 0.5 339 50 339 ambiguous 0.75 1 +5 10 810 0.5 52 50 0 ambiguous 0.75 0 +5 11 657 0.5 649 50 649 ambiguous 0.75 1 +5 12 801 0.25 650 50 0 low 0 1 +5 13 803 0.5 50 50 50 ambiguous 0.75 0 +5 14 839 0.125 50 50 0 low 0 0 +5 15 824 0.125 50 50 0 low 0 0 +5 16 950 0.5 50 50 50 ambiguous 0.25 0 +5 17 870 0.5 183 50 183 ambiguous 0.5 1 +5 18 776 0.375 92 50 92 low 0 1 +5 19 854 0.125 97 50 50 low 0 0 +5 20 760 0.5 92 50 92 ambiguous 0.75 1 +5 21 713 0.125 649 50 0 low 0 1 +5 22 821 0.5 337 50 337 ambiguous 0.25 1 +5 23 810 0.5 650 50 0 ambiguous 0.75 1 +5 24 1050 0.375 340 50 0 low 0 1 +5 25 928 0.375 654 50 0 low 0 1 +5 26 725 0.5 50 50 50 ambiguous 0.75 0 +5 27 728 0.25 337 50 0 low 0 1 +5 28 657 0.5 654 50 654 ambiguous 0.25 1 +5 29 703 0.5 92 50 92 ambiguous 0.5 1 +5 30 823 0.375 183 50 183 low 0 1 +5 31 852 0.5 99 50 0 ambiguous 0.5 1 +5 32 638 0.5 649 50 649 ambiguous 0.5 1 +5 33 861 0.5 339 50 0 ambiguous 0.5 1 +5 34 768 0.375 184 50 184 low 0 1 +5 35 641 0.25 340 50 0 low 0 0 +5 36 741 0.375 339 50 339 low 0 1 +5 37 829 0.5 183 50 183 ambiguous 0.5 1 +5 38 782 0.5 340 50 0 ambiguous 0.75 1 +5 39 909 0.5 97 50 97 ambiguous 0.5 1 +5 40 736 0.125 654 50 0 low 0 0 +5 41 883 0.5 649 50 649 ambiguous 0.25 1 +5 42 681 0.25 97 50 50 low 0 0 +5 43 893 0.25 92 50 0 low 0 1 +5 44 810 0.375 50 50 50 low 0 0 +5 45 1219 0.5 52 50 0 ambiguous 0.25 0 +5 46 911 0.25 649 50 50 low 0 0 +5 47 781 0.5 340 50 0 ambiguous 0.25 1 +5 48 763 0.375 337 50 0 low 0 1 +5 49 810 0.5 184 50 184 ambiguous 0.5 1 +5 50 756 0.25 654 50 50 low 0 0 +5 51 735 0.375 97 50 0 low 0 1 +5 52 728 0.375 649 50 0 low 0 1 +5 53 1035 0.5 337 50 0 ambiguous 0.75 1 +5 54 743 0.5 183 50 183 ambiguous 0.25 1 +5 55 857 0.5 99 50 50 ambiguous 0.75 0 +5 56 742 0.5 339 50 339 ambiguous 0.25 1 +5 57 652 0.5 650 50 0 ambiguous 0.5 1 +5 58 777 0.5 92 50 0 ambiguous 0.25 1 +5 59 837 0.5 50 50 50 ambiguous 0.25 0 +5 60 775 0.5 50 50 50 ambiguous 0.5 0 +5 61 872 0.25 183 50 50 low 0 0 +5 62 789 0.5 654 50 654 ambiguous 0.75 1 +5 63 793 0.375 99 50 0 low 0 1 +5 64 888 0.125 650 50 50 low 0 0 +5 65 851 0.5 99 50 50 ambiguous 0.25 0 +5 66 878 0.25 50 50 0 low 0 0 +5 67 920 0.375 52 50 50 low 0 0 +5 68 772 0.25 183 50 0 low 0 1 +5 69 784 0.25 184 50 0 low 0 1 +5 70 957 0.5 650 50 650 ambiguous 0.25 1 +5 71 746 0.5 183 50 183 ambiguous 0.75 1 +5 72 784 0.5 184 50 0 ambiguous 0.25 1 +5 73 750 0.125 340 50 50 low 0 0 +5 74 746 0.5 50 50 0 ambiguous 0.5 0 +5 75 937 0.125 184 50 50 low 0 0 +5 76 836 0.125 339 50 0 low 0 1 +5 77 720 0.25 50 50 50 low 0 0 +5 78 729 0.25 99 50 0 low 0 1 +5 79 639 0.5 183 50 183 ambiguous 0.75 1 +5 80 784 0.125 99 50 0 low 0 0 +5 81 599 0.25 339 50 0 low 0 1 +5 82 705 0.375 50 50 0 low 0 0 +5 83 817 0.125 183 50 0 low 0 0 +5 84 785 0.5 97 50 0 ambiguous 0.25 1 +5 85 726 0.125 183 50 50 low 0 0 +5 86 1112 0.125 92 50 0 low 0 0 +5 87 799 0.125 52 50 0 low 0 0 +5 88 818 0.5 654 50 0 ambiguous 0.5 1 +5 89 847 0.5 184 50 0 ambiguous 0.75 1 +5 90 778 0.125 337 50 0 low 0 0 +6 1 7265 0.25 648 50 0 low 0 1 +6 2 8033 0.375 651 50 0 low 0 1 +6 3 5415 0.375 338 50 338 low 0 1 +6 4 5183 0.5 337 50 0 ambiguous 0.5 1 +6 5 1609 0.375 54 50 0 low 0 0 +6 6 3036 0.5 646 50 0 ambiguous 0.75 1 +6 7 10138 0.5 49 50 50 ambiguous 0.75 0 +6 8 3121 0.375 648 50 0 low 0 1 +6 9 2224 0.25 176 50 50 low 0 0 +6 10 3415 0.125 49 50 0 low 0 0 +6 11 3309 0.5 646 50 0 ambiguous 0.25 1 +6 12 5624 0.25 184 50 184 low 0 1 +6 13 5032 0.5 54 50 50 ambiguous 0.75 0 +6 14 5991 0.5 53 50 0 ambiguous 0.5 0 +6 15 2220 0.25 176 50 0 low 0 1 +6 16 665 0.25 49 50 0 low 0 0 +6 17 6233 0.125 651 50 651 low 0 1 +6 18 6381 0.125 91 50 0 low 0 0 +6 19 15254 0.5 338 50 0 ambiguous 0.25 1 +6 20 8786 0.375 337 50 0 low 0 1 +6 21 11423 0.5 91 50 91 ambiguous 0.25 1 +6 22 5114 0.125 99 50 50 low 0 0 +6 23 2545 0.125 53 50 50 low 0 0 +6 24 13957 0.5 341 50 50 ambiguous 0.5 0 +6 25 1837 0.5 648 50 0 ambiguous 0.25 1 +6 26 4679 0.375 91 50 0 low 0 1 +6 27 2697 0.125 91 50 50 low 0 0 +6 28 12661 0.5 651 50 0 ambiguous 0.75 1 +6 29 1942 0.5 99 50 99 ambiguous 0.5 1 +6 30 3170 0.5 99 50 99 ambiguous 0.25 1 +6 31 6455 0.375 99 50 99 low 0 1 +6 32 3171 0.25 651 50 0 low 0 1 +6 33 5667 0.375 176 50 0 low 0 1 +6 34 4606 0.5 91 50 0 ambiguous 0.75 1 +6 35 9317 0.125 646 50 0 low 0 0 +6 36 1734 0.5 651 50 651 ambiguous 0.5 1 +6 37 6134 0.5 91 50 0 ambiguous 0.5 1 +6 38 1547 0.375 91 50 0 low 0 1 +6 39 729 0.5 176 50 176 ambiguous 0.25 1 +6 40 4438 0.5 49 50 0 ambiguous 0.25 0 +6 41 4940 0.25 54 50 50 low 0 0 +6 42 1126 0.5 49 50 50 ambiguous 0.5 0 +6 43 1726 0.5 176 50 50 ambiguous 0.75 0 +6 44 611 0.5 341 50 341 ambiguous 0.25 1 +6 45 982 0.5 91 50 91 ambiguous 0.25 1 +6 46 3389 0.5 184 50 184 ambiguous 0.5 1 +6 47 372 0.375 184 50 0 low 0 1 +6 48 54 0.125 341 50 0 low 0 1 +6 49 5306 0.25 91 50 50 low 0 0 +6 50 806 0.25 91 50 50 low 0 0 +6 51 2225 0.25 341 50 0 low 0 0 +6 52 1382 0.5 651 50 0 ambiguous 0.25 1 +6 53 4960 0.5 176 50 0 ambiguous 0.25 1 +6 54 641 0.375 646 50 0 low 0 1 +6 55 1525 0.5 646 50 646 ambiguous 0.5 1 +6 56 1188 0.25 646 50 0 low 0 0 +6 57 2095 0.375 53 50 50 low 0 0 +6 58 346 0.125 54 50 50 low 0 0 +6 59 4855 0.25 338 50 50 low 0 0 +6 60 4182 0.25 337 50 50 low 0 0 +6 61 788 0.125 338 50 50 low 0 0 +6 62 2593 0.5 91 50 0 ambiguous 0.75 0 +6 63 163 0.125 184 50 50 low 0 0 +6 64 965 0.5 184 50 0 ambiguous 0.25 1 +6 65 927 0.5 176 50 0 ambiguous 0.5 1 +6 66 2085 0.125 176 50 50 low 0 0 +6 67 826 0.375 341 50 341 low 0 1 +6 68 5905 0.5 54 50 50 ambiguous 0.5 0 +6 69 27 0.375 49 50 0 low 0 0 +6 70 2324 0.25 53 50 0 low 0 0 +6 71 606 0.5 648 50 648 ambiguous 0.5 1 +6 72 8977 0.5 53 50 50 ambiguous 0.25 0 +6 73 3788 0.125 337 50 0 low 0 0 +6 74 3013 0.375 176 50 176 low 0 1 +6 75 732 0.5 53 50 50 ambiguous 0.75 0 +6 76 2932 0.5 648 50 0 ambiguous 0.75 1 +6 77 520 0.5 337 50 0 ambiguous 0.25 1 +6 78 4407 0.25 99 50 0 low 0 0 +6 79 5193 0.5 54 50 50 ambiguous 0.25 0 +6 80 3191 0.5 91 50 0 ambiguous 0.5 1 +6 81 390 0.5 176 50 0 ambiguous 0.5 1 +6 82 7450 0.5 341 50 341 ambiguous 0.75 1 +6 83 2018 0.5 337 50 50 ambiguous 0.75 0 +6 84 1206 0.5 184 50 0 ambiguous 0.75 0 +6 85 1363 0.125 648 50 0 low 0 0 +6 86 3957 0.5 338 50 0 ambiguous 0.5 1 +6 87 6344 0.125 176 50 0 low 0 0 +6 88 5897 0.5 99 50 0 ambiguous 0.75 0 +6 89 1421 0.5 338 50 50 ambiguous 0.75 0 +6 90 885 0.5 176 50 0 ambiguous 0.75 0 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/dbdm_exampleData.txt b/Python/hbayesdm/common/extdata/dbdm_exampleData.txt new file mode 100644 index 00000000..0bb2520d --- /dev/null +++ b/Python/hbayesdm/common/extdata/dbdm_exampleData.txt @@ -0,0 +1,15001 @@ +subjID opt1hprob opt2hprob opt1hval opt1lval opt2hval opt2lval choice +1 0.9 0.5 -14 -30 30 -43 2 +1 0.3 0.3 18 -15 46 36 2 +1 0.5 0.5 -26 -44 10 -5 2 +1 0.2 0.4 -8 -43 26 17 2 +1 0.3 0.3 30 -37 44 24 2 +1 0.6 0.2 46 -26 10 -14 1 +1 0.8 0.9 48 -49 -12 -30 1 +1 0.8 0.1 -8 -16 48 0 2 +1 0.2 0.5 27 -30 28 27 2 +1 0.1 0.3 -3 -48 2 -34 2 +1 0.6 0.4 -30 -39 49 -31 2 +1 0.1 0.9 29 -4 8 7 2 +1 0.9 0.9 12 -21 27 -13 1 +1 0.9 0.1 -1 -39 43 11 2 +1 0.5 0.2 22 -18 22 -12 2 +1 0.5 0.2 -9 -50 -4 -12 2 +1 0.4 0.2 -22 -45 -12 -49 2 +1 0.2 0.7 39 -4 19 -36 2 +1 0.8 0.3 32 -24 3 -25 1 +1 0.7 0.6 41 0 38 31 2 +1 0.7 0.4 28 5 43 -4 2 +1 0.5 0.3 28 -24 33 -22 1 +1 0.6 0.8 23 -15 -7 -35 1 +1 0.1 0.2 25 -42 -31 -35 1 +1 0.1 0.5 49 -34 3 -9 2 +1 0.6 0.5 38 -16 -42 -49 1 +1 0.6 0.5 6 -46 21 -3 2 +1 0.5 0.9 -18 -50 32 -42 2 +1 0.8 0.3 9 4 42 13 2 +1 0.3 0.8 41 34 -23 -25 1 +1 0.1 0.6 38 30 21 -7 1 +1 0.4 0.3 21 -32 -3 -40 1 +1 0.8 0.6 43 4 33 -40 1 +1 0.2 0.4 2 -4 5 -30 1 +1 0.5 0.9 -11 -37 6 -28 2 +1 0.8 0.8 31 -4 31 9 2 +1 0.2 0.6 33 -26 -4 -44 2 +1 0.8 0.3 43 14 49 9 2 +1 0.8 0.4 33 20 30 18 2 +1 0.8 0.2 19 -35 -5 -41 1 +1 0.4 0.8 7 -9 16 -11 2 +1 0.1 0.3 -8 -41 34 -35 2 +1 0.8 0.5 47 15 8 7 1 +1 0.4 0.9 -1 -8 22 -6 2 +1 0.8 0.8 16 6 34 -43 1 +1 0.5 0.4 22 -44 1 -29 1 +1 0.5 0.4 17 -22 -15 -20 1 +1 0.6 0.7 14 -33 -3 -14 1 +1 0.3 0.6 19 10 -34 -45 1 +1 0.4 0.6 30 18 35 28 2 +1 0.3 0.4 -4 -5 43 -13 2 +1 0.7 0.4 -7 -43 11 0 2 +1 0.4 0.7 24 5 14 -47 1 +1 0.3 0.7 42 14 22 13 1 +1 0.7 0.9 44 8 32 -11 1 +1 0.9 0.4 47 36 -36 -41 1 +1 0.3 0.9 7 -47 15 -26 2 +1 0.9 0.8 4 -39 47 0 2 +1 0.6 0.8 48 -8 28 -46 1 +1 0.6 0.5 47 35 38 12 2 +1 0.9 0.9 35 9 11 -37 1 +1 0.4 0.1 30 -16 -29 -40 1 +1 0.6 0.7 17 -31 -32 -39 1 +1 0.6 0.1 40 32 31 11 1 +1 0.1 0.1 -20 -38 49 -17 2 +1 0.7 0.6 36 -2 -42 -48 1 +1 0.5 0.5 9 -39 -1 -18 2 +1 0.6 0.5 46 -48 49 33 2 +1 0.6 0.7 -34 -46 49 38 2 +1 0.8 0.5 47 39 -5 -44 1 +1 0.5 0.9 41 -32 44 -35 2 +1 0.8 0.4 50 -41 38 6 1 +1 0.8 0.5 14 -24 -30 -43 1 +1 0.7 0.7 27 -32 17 -3 2 +1 0.6 0.1 48 -4 8 4 1 +1 0.6 0.3 10 -10 -22 -30 1 +1 0.3 0.7 3 -45 0 -39 2 +1 0.5 0.4 41 33 45 12 1 +1 0.5 0.1 39 -32 -34 -41 1 +1 0.9 0.5 40 33 10 8 1 +1 0.2 0.3 -2 -17 -4 -35 1 +1 0.6 0.2 25 -13 45 5 1 +1 0.2 0.1 10 -7 19 -23 1 +1 0.9 0.1 49 -21 29 25 2 +1 0.8 0.1 45 19 39 -44 1 +1 0.7 0.3 48 40 48 1 1 +1 0.8 0.7 37 -37 41 28 2 +1 0.3 0.8 26 -20 35 30 2 +1 0.2 0.2 0 -17 14 -36 2 +1 0.8 0.2 20 -19 -4 -29 1 +1 0.5 0.7 -7 -11 -16 -29 2 +1 0.8 0.4 48 -27 -1 -39 1 +1 0.3 0.9 15 -33 18 -14 2 +1 0.6 0.2 -12 -21 -34 -44 1 +1 0.5 0.7 26 1 10 -6 1 +1 0.9 0.1 35 -48 35 -9 1 +1 0.5 0.6 32 1 -4 -5 1 +1 0.7 0.7 28 2 42 -19 2 +1 0.6 0.6 20 3 42 7 2 +1 0.2 0.4 36 -25 16 -28 2 +1 0.1 0.4 12 -7 -10 -48 1 +1 0.7 0.2 -1 -24 47 -4 2 +1 0.3 0.7 -24 -35 33 27 2 +1 0.9 0.8 19 -47 23 -43 1 +1 0.1 0.7 38 -24 15 2 2 +1 0.1 0.4 48 -9 34 -40 1 +1 0.7 0.1 32 -35 23 -14 1 +1 0.1 0.4 23 4 -15 -34 1 +1 0.2 0.8 -9 -13 38 -42 2 +1 0.9 0.3 -35 -44 15 -44 1 +1 0.4 0.2 4 -8 18 -39 1 +1 0.7 0.4 22 17 42 -14 1 +1 0.5 0.5 25 -16 -4 -19 2 +1 0.8 0.2 41 -24 0 -22 1 +1 0.4 0.3 27 22 23 7 1 +1 0.1 0.3 17 5 15 -16 1 +1 0.8 0.5 -7 -41 49 16 2 +1 0.2 0.7 35 3 -6 -36 1 +1 0.3 0.6 19 -15 16 14 2 +1 0.7 0.3 34 14 22 -27 1 +1 0.7 0.3 39 -42 45 43 2 +1 0.2 0.5 47 -32 41 -39 2 +1 0.8 0.1 20 -4 43 29 2 +1 0.4 0.3 42 -3 8 -26 1 +1 0.6 0.9 24 2 -27 -44 1 +1 0.7 0.4 39 -44 47 16 2 +1 0.2 0.9 -2 -14 0 -24 2 +1 0.9 0.7 -24 -50 45 27 2 +1 0.9 0.4 -22 -35 26 -21 2 +1 0.2 0.1 16 11 26 -49 1 +1 0.2 0.6 1 -21 32 16 2 +1 0.1 0.7 -25 -42 50 -8 2 +1 0.7 0.1 -2 -37 -7 -10 1 +1 0.9 0.6 -24 -26 -7 -25 1 +1 0.8 0.1 33 -35 43 -47 1 +1 0.7 0.8 24 -23 49 15 2 +1 0.8 0.8 -15 -20 26 16 2 +1 0.3 0.4 40 -18 14 -47 2 +1 0.1 0.3 23 -17 49 -36 2 +1 0.1 0.2 14 -41 17 -36 2 +1 0.4 0.7 34 3 -20 -44 1 +1 0.5 0.7 -48 -50 38 12 2 +1 0.1 0.2 -20 -47 -20 -45 2 +1 0.4 0.3 41 -16 13 -27 1 +1 0.8 0.4 40 20 29 -12 1 +1 0.6 0.1 -9 -15 -6 -46 1 +1 0.1 0.1 16 -41 48 -24 2 +1 0.9 0.7 7 -50 11 -33 1 +1 0.3 0.1 39 -34 7 -19 1 +1 0.1 0.5 17 -35 -33 -35 1 +1 0.1 0.1 46 38 10 -13 1 +1 0.2 0.2 35 -30 7 3 2 +1 0.7 0.3 -44 -48 6 5 2 +1 0.6 0.8 -33 -36 5 -7 2 +1 0.2 0.4 -42 -45 0 -22 2 +1 0.9 0.1 -15 -49 -6 -33 1 +1 0.4 0.2 46 3 -26 -33 1 +1 0.5 0.9 7 -21 7 -9 2 +1 0.6 0.9 45 3 37 25 2 +1 0.3 0.1 9 1 25 -41 1 +1 0.4 0.6 -3 -10 22 15 2 +1 0.5 0.1 39 -22 4 -28 1 +1 0.2 0.7 4 -35 -12 -14 2 +1 0.4 0.4 17 0 24 -49 1 +1 0.3 0.7 28 6 19 -18 1 +1 0.9 0.7 26 -28 28 -47 1 +1 0.9 0.5 37 -34 16 10 1 +1 0.8 0.2 0 -47 45 43 2 +1 0.1 0.3 44 39 -6 -47 1 +1 0.7 0.6 -24 -33 35 1 2 +1 0.5 0.1 24 5 23 16 2 +1 0.2 0.4 -8 -41 22 -46 2 +1 0.3 0.7 16 4 36 35 2 +1 0.6 0.1 -11 -26 45 44 2 +1 0.7 0.7 -8 -49 48 -48 2 +1 0.2 0.4 36 1 3 -29 1 +1 0.6 0.4 13 -16 18 -17 1 +1 0.9 0.1 43 26 -15 -41 1 +1 0.6 0.7 12 -35 43 29 2 +1 0.9 0.7 36 1 23 -23 1 +1 0.3 0.4 4 -10 28 -26 1 +1 0.8 0.1 -19 -46 33 -30 2 +1 0.2 0.3 43 -31 50 3 2 +1 0.2 0.5 42 -6 1 -40 2 +1 0.8 0.5 24 -31 43 33 2 +1 0.2 0.1 -20 -47 26 -25 2 +1 0.5 0.3 4 -19 50 -48 1 +1 0.7 0.7 31 30 -16 -26 1 +1 0.2 0.2 42 -7 13 -13 2 +1 0.2 0.8 47 -37 25 -23 2 +1 0.6 0.6 46 -4 19 -1 1 +1 0.2 0.5 22 16 31 8 1 +1 0.5 0.8 11 2 -26 -47 1 +1 0.4 0.6 -3 -27 4 -47 1 +1 0.7 0.3 3 -30 46 4 2 +1 0.5 0.2 40 14 50 15 2 +1 0.6 0.6 26 -3 23 -42 1 +1 0.2 0.4 17 -5 48 38 2 +1 0.7 0.1 -41 -50 33 5 2 +1 0.3 0.3 36 -3 38 -16 1 +1 0.8 0.4 17 -37 7 -11 1 +1 0.8 0.4 44 -15 -8 -47 1 +1 0.1 0.9 37 10 34 21 2 +1 0.8 0.5 -18 -29 5 -16 2 +1 0.8 0.7 48 -25 -5 -8 1 +1 0.6 0.1 36 -25 36 -37 1 +1 0.6 0.6 29 19 8 -19 1 +1 0.8 0.9 16 -26 38 -33 2 +1 0.4 0.4 9 1 42 8 2 +1 0.6 0.3 36 -19 49 43 2 +1 0.3 0.9 23 12 -9 -24 1 +1 0.5 0.2 -2 -34 -9 -32 1 +1 0.9 0.2 -3 -44 42 -3 2 +1 0.6 0.9 41 -47 15 -34 1 +1 0.1 0.5 38 33 -23 -48 1 +1 0.9 0.7 15 -5 23 -19 2 +1 0.5 0.7 34 -29 23 19 2 +1 0.4 0.1 44 -25 3 -27 1 +1 0.4 0.9 26 25 -27 -37 1 +1 0.6 0.4 32 -9 31 -18 1 +1 0.1 0.5 -22 -29 32 -10 2 +1 0.1 0.3 26 10 31 -47 1 +1 0.6 0.5 42 -40 42 -41 1 +1 0.7 0.9 47 -34 40 -28 1 +1 0.8 0.6 -12 -36 20 -16 2 +1 0.9 0.6 25 -31 27 10 1 +1 0.5 0.6 21 -29 -4 -8 1 +1 0.5 0.8 -2 -19 47 41 2 +1 0.4 0.7 37 -14 -5 -8 1 +1 0.1 0.4 4 -17 -4 -27 2 +1 0.5 0.1 0 -49 40 12 2 +1 0.2 0.1 -9 -18 17 -49 1 +1 0.3 0.3 43 -47 30 -16 2 +1 0.8 0.7 39 -3 43 -21 1 +1 0.8 0.2 -28 -33 9 -25 2 +1 0.5 0.2 24 -50 50 5 2 +1 0.2 0.5 33 0 44 -18 2 +1 0.9 0.8 34 1 38 36 2 +1 0.2 0.7 -22 -36 15 -6 2 +1 0.6 0.3 42 16 31 -29 1 +1 0.7 0.9 9 -11 49 30 2 +1 0.6 0.6 43 -22 32 -22 2 +1 0.3 0.4 38 37 41 -39 1 +1 0.9 0.2 32 25 42 -33 1 +1 0.8 0.3 32 30 48 -30 1 +1 0.3 0.7 -4 -30 10 8 2 +1 0.7 0.7 -12 -14 -34 -50 1 +1 0.9 0.8 42 38 31 -40 1 +1 0.4 0.2 4 -43 -8 -11 2 +1 0.1 0.5 13 -16 27 10 2 +1 0.5 0.4 7 -22 5 -46 1 +1 0.3 0.4 45 -31 32 4 2 +1 0.8 0.7 38 -26 45 -27 1 +1 0.9 0.5 -4 -10 48 -7 2 +1 0.6 0.9 20 -43 38 18 2 +1 0.9 0.2 -1 -6 34 -44 1 +1 0.7 0.2 37 2 49 -2 1 +1 0.2 0.6 14 -43 21 -40 2 +1 0.5 0.3 22 -16 42 39 2 +1 0.7 0.1 -15 -45 16 -4 2 +1 0.9 0.6 2 -14 50 -25 2 +1 0.1 0.1 -33 -38 9 0 2 +1 0.2 0.2 -13 -28 26 -28 2 +1 0.9 0.2 35 -38 37 6 1 +1 0.7 0.5 -2 -50 39 -27 2 +1 0.8 0.3 42 -47 40 -20 1 +1 0.4 0.1 9 -9 -10 -46 1 +1 0.4 0.9 -27 -28 45 12 2 +1 0.2 0.8 23 21 40 -18 2 +1 0.8 0.8 9 -49 46 6 2 +1 0.7 0.8 -12 -13 -35 -50 1 +1 0.4 0.2 37 -8 27 -24 1 +1 0.3 0.6 -19 -28 45 -31 2 +1 0.4 0.4 -26 -50 -14 -16 2 +1 0.6 0.9 18 -9 24 19 2 +1 0.2 0.6 17 7 -10 -27 1 +1 0.5 0.8 47 -40 15 -33 2 +1 0.5 0.1 19 6 46 2 1 +1 0.7 0.2 12 -30 27 -8 2 +1 0.5 0.9 31 -32 43 -41 2 +1 0.4 0.7 -35 -45 -27 -45 2 +1 0.2 0.6 45 -13 47 -13 2 +1 0.7 0.2 19 -27 -12 -48 1 +1 0.9 0.9 26 -31 20 -8 2 +1 0.8 0.6 27 24 35 12 1 +1 0.4 0.1 22 -20 30 3 2 +1 0.5 0.5 16 -31 38 -19 1 +1 0.4 0.2 47 44 38 18 1 +1 0.7 0.4 8 -39 50 -18 2 +1 0.7 0.4 19 -25 33 -41 1 +1 0.7 0.9 39 15 23 -42 1 +1 0.8 0.4 8 -39 21 -40 1 +1 0.2 0.7 5 4 47 13 2 +1 0.2 0.5 4 -4 20 -43 1 +1 0.5 0.6 -3 -34 48 34 2 +1 0.5 0.7 16 -11 34 14 2 +1 0.5 0.2 35 -2 27 -44 1 +1 0.4 0.6 -9 -35 24 -36 2 +1 0.8 0.2 28 -21 30 8 1 +1 0.4 0.6 43 -31 13 -33 1 +2 0.2 0.2 8 -22 43 35 2 +2 0.2 0.6 18 -12 -19 -32 1 +2 0.3 0.2 29 -37 28 19 2 +2 0.3 0.1 -39 -45 -7 -16 2 +2 0.3 0.9 34 12 49 25 2 +2 0.9 0.6 43 -25 50 -29 1 +2 0.5 0.1 -13 -35 21 -19 2 +2 0.2 0.5 22 -12 25 -28 2 +2 0.8 0.5 -12 -50 15 8 2 +2 0.2 0.5 50 -5 6 -22 1 +2 0.7 0.2 33 22 4 -37 1 +2 0.1 0.1 3 -47 -15 -25 1 +2 0.5 0.9 19 -34 39 -49 2 +2 0.8 0.3 29 19 4 -41 1 +2 0.9 0.9 26 2 17 5 2 +2 0.1 0.1 -12 -16 45 37 2 +2 0.2 0.9 7 -44 9 -42 2 +2 0.9 0.1 23 -24 39 14 2 +2 0.4 0.8 32 21 29 10 1 +2 0.8 0.3 -37 -49 18 -49 2 +2 0.5 0.4 26 -31 18 -30 1 +2 0.6 0.4 15 -27 19 10 2 +2 0.9 0.9 8 -21 38 33 2 +2 0.8 0.7 30 4 -14 -31 1 +2 0.7 0.4 20 17 21 -38 1 +2 0.4 0.9 21 -40 -11 -29 2 +2 0.2 0.4 25 -8 30 -3 2 +2 0.9 0.6 24 11 7 -12 1 +2 0.3 0.7 44 -17 -14 -48 1 +2 0.8 0.4 25 1 19 -14 1 +2 0.3 0.1 35 -2 4 -20 1 +2 0.2 0.7 2 -42 8 -14 2 +2 0.9 0.3 35 1 39 -50 1 +2 0.3 0.8 13 -4 33 -49 1 +2 0.9 0.2 24 -12 15 -5 1 +2 0.7 0.1 23 -19 -20 -27 1 +2 0.4 0.5 24 -39 49 -6 2 +2 0.9 0.9 50 28 41 -19 1 +2 0.9 0.6 15 -28 -13 -22 1 +2 0.2 0.2 50 -6 47 42 2 +2 0.4 0.6 10 -38 0 -12 2 +2 0.1 0.7 39 -17 26 4 2 +2 0.2 0.1 38 -49 28 -22 1 +2 0.9 0.2 -11 -25 50 37 2 +2 0.4 0.7 30 -32 34 -39 2 +2 0.7 0.8 6 -22 -22 -50 1 +2 0.9 0.9 9 -14 40 -23 2 +2 0.8 0.6 -43 -46 20 13 2 +2 0.6 0.1 31 -39 15 -16 1 +2 0.8 0.9 -12 -23 15 0 2 +2 0.1 0.3 -4 -19 38 10 2 +2 0.9 0.6 37 -46 24 -27 1 +2 0.4 0.7 16 -32 34 -31 2 +2 0.9 0.9 36 27 14 -32 1 +2 0.2 0.7 1 -49 33 -48 2 +2 0.6 0.8 -7 -29 33 7 2 +2 0.4 0.2 9 -8 29 -24 1 +2 0.3 0.6 -16 -35 45 10 2 +2 0.5 0.6 49 -14 17 -13 1 +2 0.3 0.6 -4 -11 18 -10 2 +2 0.6 0.4 -20 -43 -8 -18 1 +2 0.5 0.7 15 -16 16 11 2 +2 0.1 0.7 32 8 -10 -12 1 +2 0.8 0.3 10 -35 2 -35 1 +2 0.9 0.4 -30 -41 9 -25 2 +2 0.1 0.2 -25 -37 -9 -17 2 +2 0.9 0.7 22 -5 34 -35 1 +2 0.9 0.3 -28 -37 -4 -42 2 +2 0.1 0.7 27 -34 9 -3 2 +2 0.6 0.9 16 6 -45 -49 1 +2 0.6 0.3 33 11 -1 -7 1 +2 0.7 0.5 42 -1 15 -42 1 +2 0.6 0.8 49 25 20 -29 1 +2 0.4 0.3 27 -38 -13 -19 1 +2 0.8 0.5 11 -11 -5 -41 1 +2 0.2 0.8 22 -3 1 -35 1 +2 0.1 0.9 15 -18 -14 -48 1 +2 0.1 0.6 47 33 48 -23 1 +2 0.7 0.1 39 -8 -24 -27 1 +2 0.9 0.7 16 -22 48 -27 2 +2 0.3 0.7 28 14 37 -33 1 +2 0.2 0.8 40 22 28 25 2 +2 0.9 0.2 9 -35 34 -50 1 +2 0.9 0.9 37 -45 40 -45 2 +2 0.9 0.5 -13 -33 30 -26 2 +2 0.5 0.6 32 -23 -3 -45 1 +2 0.1 0.5 34 9 47 -41 1 +2 0.5 0.4 -26 -35 35 24 2 +2 0.5 0.4 6 -15 -4 -47 1 +2 0.1 0.5 45 -28 16 -17 2 +2 0.2 0.4 47 -5 38 -39 2 +2 0.4 0.5 15 -41 40 -12 2 +2 0.5 0.5 49 -21 49 -38 1 +2 0.5 0.5 10 -5 45 -37 2 +2 0.5 0.7 24 19 3 -39 1 +2 0.5 0.9 19 3 -1 -37 1 +2 0.8 0.7 -9 -43 44 -32 2 +2 0.9 0.1 47 27 50 -22 1 +2 0.3 0.8 35 30 27 -32 1 +2 0.4 0.2 25 -25 29 28 2 +2 0.5 0.9 -38 -42 24 11 2 +2 0.5 0.1 -1 -38 46 -47 2 +2 0.2 0.6 26 2 12 -14 1 +2 0.3 0.2 -35 -42 28 7 2 +2 0.9 0.2 7 -37 21 6 1 +2 0.3 0.1 2 -29 40 -38 1 +2 0.1 0.5 7 -47 3 -18 2 +2 0.6 0.4 -14 -50 6 -49 2 +2 0.5 0.3 32 17 45 -31 1 +2 0.5 0.8 -10 -26 5 -48 2 +2 0.7 0.1 -8 -33 26 -10 2 +2 0.7 0.4 43 -26 32 -41 1 +2 0.1 0.1 30 -24 38 -28 1 +2 0.7 0.4 -21 -26 10 -22 2 +2 0.6 0.4 23 17 7 -32 1 +2 0.5 0.2 41 30 33 -37 1 +2 0.2 0.8 -28 -48 34 3 2 +2 0.4 0.2 -13 -30 47 33 2 +2 0.9 0.2 47 25 20 -43 1 +2 0.6 0.4 -3 -32 -7 -34 1 +2 0.2 0.5 -20 -49 2 -22 2 +2 0.7 0.9 9 -19 -12 -44 1 +2 0.7 0.8 1 -20 17 -1 2 +2 0.4 0.9 19 -38 -9 -48 1 +2 0.7 0.3 8 -5 47 38 2 +2 0.7 0.2 35 -4 16 6 1 +2 0.8 0.4 46 9 -27 -43 1 +2 0.2 0.7 -6 -28 40 31 2 +2 0.2 0.2 31 -17 44 21 2 +2 0.7 0.5 9 5 9 -2 1 +2 0.9 0.3 26 -23 14 9 1 +2 0.8 0.3 27 -12 -31 -35 1 +2 0.1 0.7 -23 -25 16 -27 2 +2 0.9 0.9 8 -48 2 -49 1 +2 0.3 0.6 19 -34 -20 -31 1 +2 0.4 0.3 26 -5 8 -31 1 +2 0.4 0.2 40 25 39 -17 1 +2 0.2 0.2 33 -33 40 31 2 +2 0.1 0.1 28 -43 10 -26 2 +2 0.6 0.9 14 6 3 -14 1 +2 0.9 0.1 -2 -37 12 -33 1 +2 0.9 0.5 35 26 44 39 2 +2 0.5 0.9 50 36 35 -13 1 +2 0.3 0.1 6 -44 -10 -36 1 +2 0.8 0.9 47 10 3 -12 1 +2 0.2 0.2 48 -43 -23 -31 2 +2 0.1 0.1 -27 -50 12 4 2 +2 0.3 0.7 33 2 48 7 2 +2 0.4 0.8 24 23 34 -36 1 +2 0.9 0.6 22 -30 25 21 2 +2 0.3 0.5 48 43 -11 -49 1 +2 0.6 0.4 10 -16 1 -9 2 +2 0.4 0.7 41 -32 28 -21 2 +2 0.6 0.9 45 25 14 -5 1 +2 0.4 0.3 43 -49 44 6 2 +2 0.1 0.9 36 -9 12 -30 2 +2 0.9 0.5 -37 -48 42 -29 2 +2 0.8 0.6 42 -42 2 -13 1 +2 0.8 0.3 30 -10 4 -40 1 +2 0.2 0.9 33 9 -21 -46 1 +2 0.3 0.7 -24 -33 -2 -20 2 +2 0.8 0.3 30 18 17 8 1 +2 0.1 0.6 23 -37 26 -39 2 +2 0.5 0.4 31 -50 49 -3 2 +2 0.4 0.1 -4 -33 41 -8 2 +2 0.7 0.1 16 -46 14 -35 1 +2 0.6 0.9 -4 -34 46 2 2 +2 0.6 0.1 -13 -21 10 -8 1 +2 0.9 0.9 3 2 48 -35 2 +2 0.4 0.6 13 -4 43 -43 2 +2 0.6 0.6 -19 -43 24 -18 2 +2 0.2 0.2 20 -14 50 -1 2 +2 0.6 0.5 -42 -48 -24 -27 2 +2 0.8 0.2 31 -20 4 -44 1 +2 0.8 0.1 8 2 27 -28 1 +2 0.4 0.1 29 1 3 -22 1 +2 0.1 0.8 27 -34 9 -7 2 +2 0.4 0.4 14 -7 45 -43 2 +2 0.9 0.2 7 5 38 -33 1 +2 0.5 0.9 37 -2 42 -26 2 +2 0.4 0.1 45 3 13 -34 1 +2 0.9 0.6 47 -28 39 -15 1 +2 0.4 0.2 1 -3 26 -8 2 +2 0.9 0.7 34 22 23 13 1 +2 0.5 0.9 -2 -34 6 1 2 +2 0.1 0.2 35 -36 44 -29 2 +2 0.9 0.1 -5 -20 42 36 2 +2 0.1 0.3 2 1 35 -20 1 +2 0.6 0.6 -24 -39 39 -9 2 +2 0.9 0.8 6 -35 13 -49 1 +2 0.5 0.7 17 -30 1 -46 2 +2 0.5 0.2 30 -43 34 9 2 +2 0.3 0.1 49 -42 13 -1 2 +2 0.6 0.2 50 19 -16 -29 1 +2 0.1 0.2 36 6 34 3 1 +2 0.9 0.7 -8 -26 12 1 2 +2 0.7 0.3 -15 -35 8 -35 2 +2 0.1 0.3 -27 -47 40 28 2 +2 0.7 0.8 12 -15 -29 -38 1 +2 0.7 0.9 26 -25 -5 -22 1 +2 0.1 0.3 50 35 49 17 1 +2 0.6 0.7 0 -28 26 -12 2 +2 0.6 0.9 8 -8 8 1 2 +2 0.4 0.4 15 13 35 -16 1 +2 0.6 0.8 32 8 47 -8 2 +2 0.4 0.4 48 -7 13 -1 1 +2 0.9 0.9 7 -12 28 -24 2 +2 0.2 0.3 36 11 34 -24 1 +2 0.2 0.1 -36 -46 -34 -46 2 +2 0.3 0.4 41 -28 11 -5 2 +2 0.3 0.9 35 31 22 12 1 +2 0.2 0.7 34 -47 7 4 2 +2 0.3 0.5 -4 -7 23 -36 1 +2 0.6 0.7 30 12 -6 -22 1 +2 0.4 0.5 28 27 12 -35 1 +2 0.5 0.2 -7 -28 27 17 2 +2 0.1 0.8 50 -17 4 -16 2 +2 0.1 0.6 -15 -30 34 19 2 +2 0.8 0.7 19 -19 -17 -32 1 +2 0.3 0.7 24 14 -8 -31 1 +2 0.4 0.3 34 -16 12 -17 1 +2 0.3 0.7 25 -35 43 41 2 +2 0.7 0.8 37 11 39 26 2 +2 0.1 0.7 21 -29 39 -41 2 +2 0.4 0.5 25 14 50 -19 2 +2 0.7 0.9 24 -14 18 -10 1 +2 0.7 0.7 37 -31 13 -1 1 +2 0.8 0.4 3 -26 -20 -48 1 +2 0.9 0.8 35 -14 24 -40 1 +2 0.3 0.5 7 -47 31 -21 2 +2 0.1 0.6 28 -27 -34 -46 2 +2 0.9 0.6 -18 -41 37 -44 2 +2 0.8 0.2 -22 -28 -5 -46 1 +2 0.2 0.4 12 -39 12 -3 2 +2 0.1 0.7 29 -13 -17 -42 1 +2 0.9 0.1 31 26 6 -23 1 +2 0.4 0.2 42 -11 47 -29 1 +2 0.9 0.8 -9 -29 -4 -47 2 +2 0.1 0.9 31 -50 42 2 2 +2 0.6 0.1 6 -14 26 -46 1 +2 0.8 0.9 35 -46 -10 -35 1 +2 0.1 0.8 11 -6 26 -13 2 +2 0.8 0.8 31 -2 22 -15 1 +2 0.7 0.8 22 6 41 -22 2 +2 0.8 0.6 -42 -47 33 -9 2 +2 0.5 0.9 15 -25 1 -22 1 +2 0.8 0.8 -40 -50 11 4 2 +2 0.2 0.7 50 34 5 -24 1 +2 0.1 0.6 7 -17 49 -1 2 +2 0.7 0.8 27 -50 17 -10 2 +2 0.2 0.7 47 -33 23 -46 2 +2 0.6 0.5 27 -37 -18 -31 1 +2 0.5 0.9 -1 -49 -2 -17 2 +2 0.5 0.2 -19 -28 46 -47 2 +2 0.5 0.4 -7 -20 38 4 2 +2 0.3 0.4 20 -4 27 13 2 +2 0.8 0.1 -15 -16 45 -10 2 +2 0.6 0.5 19 1 -21 -42 1 +2 0.1 0.3 5 -4 20 -8 2 +2 0.1 0.7 -36 -37 29 22 2 +2 0.4 0.5 24 0 11 -20 1 +2 0.5 0.3 -42 -48 24 -7 2 +2 0.1 0.7 29 6 15 -35 2 +2 0.4 0.6 -13 -47 48 -2 2 +2 0.5 0.1 36 4 21 -24 1 +2 0.8 0.4 -13 -38 38 -8 2 +2 0.5 0.1 41 -22 -1 -12 1 +2 0.7 0.4 40 30 16 -18 1 +2 0.4 0.3 11 -9 47 38 2 +2 0.4 0.7 -10 -17 2 -11 2 +2 0.9 0.1 -2 -20 28 13 2 +2 0.9 0.6 -8 -38 45 -14 2 +2 0.9 0.1 5 -8 0 -37 2 +2 0.3 0.7 -10 -24 46 19 2 +2 0.9 0.7 -22 -25 -14 -16 2 +2 0.8 0.4 -21 -28 44 -4 2 +2 0.2 0.7 18 -49 46 -17 2 +2 0.2 0.6 50 -49 26 -18 2 +2 0.5 0.6 30 17 50 38 2 +2 0.4 0.7 43 -31 0 -27 1 +2 0.3 0.3 21 -22 35 -49 1 +2 0.2 0.3 25 -43 43 -50 2 +2 0.8 0.5 8 -22 4 -39 1 +2 0.8 0.6 -13 -20 21 -18 2 +2 0.4 0.6 -8 -31 1 -9 2 +2 0.2 0.1 20 -25 -3 -23 1 +2 0.7 0.3 41 -47 46 -35 1 +2 0.2 0.8 23 -48 31 -9 2 +2 0.9 0.8 20 -38 -19 -38 1 +2 0.8 0.5 27 12 47 35 2 +2 0.5 0.8 43 8 19 -45 1 +2 0.7 0.4 -7 -14 7 5 2 +2 0.9 0.5 41 -12 48 29 1 +2 0.5 0.8 29 -34 -21 -49 1 +2 0.1 0.4 1 -6 49 -18 2 +2 0.2 0.2 49 -40 -15 -20 1 +2 0.8 0.6 40 -15 37 -8 1 +2 0.5 0.9 7 -48 -17 -50 1 +2 0.4 0.3 40 15 -6 -49 1 +2 0.4 0.9 36 14 13 -29 1 +3 0.5 0.5 46 32 36 10 1 +3 0.3 0.1 0 -26 -34 -42 1 +3 0.7 0.1 49 25 -29 -39 1 +3 0.5 0.5 3 -18 8 -11 2 +3 0.4 0.8 -12 -40 -17 -50 2 +3 0.2 0.3 49 -14 -33 -39 1 +3 0.3 0.1 -18 -19 30 8 2 +3 0.2 0.4 48 -4 49 30 2 +3 0.1 0.3 42 29 14 -7 1 +3 0.1 0.3 -20 -40 39 1 2 +3 0.9 0.2 37 -25 4 -9 1 +3 0.8 0.7 -12 -46 25 -11 2 +3 0.6 0.6 15 -41 17 -40 2 +3 0.7 0.8 0 -4 32 9 2 +3 0.2 0.6 47 -4 13 -41 1 +3 0.4 0.7 11 -7 -11 -30 1 +3 0.8 0.3 38 -42 -26 -39 1 +3 0.1 0.7 47 -32 -11 -34 2 +3 0.3 0.3 33 -7 32 -27 2 +3 0.7 0.6 -9 -10 33 -47 2 +3 0.1 0.9 17 -23 11 -2 2 +3 0.9 0.8 -2 -49 36 4 2 +3 0.3 0.3 11 -19 2 -38 1 +3 0.6 0.8 -16 -20 25 -16 2 +3 0.2 0.1 49 -43 18 -39 1 +3 0.3 0.4 31 -49 -12 -50 1 +3 0.5 0.3 42 1 49 -27 1 +3 0.5 0.4 23 -21 29 -47 1 +3 0.3 0.8 0 -35 6 -17 2 +3 0.3 0.4 29 -22 35 6 2 +3 0.7 0.7 -28 -35 -20 -34 1 +3 0.8 0.1 2 -25 39 -5 1 +3 0.4 0.5 18 -37 39 -40 2 +3 0.8 0.7 19 -35 22 1 2 +3 0.5 0.6 -32 -50 -2 -19 2 +3 0.7 0.5 25 -47 35 0 1 +3 0.8 0.5 -12 -27 36 10 2 +3 0.9 0.3 28 9 12 -18 1 +3 0.7 0.2 38 20 42 25 2 +3 0.4 0.7 36 -20 21 -12 2 +3 0.7 0.4 25 -36 -9 -24 1 +3 0.9 0.6 34 -24 29 27 2 +3 0.7 0.5 42 -14 49 30 2 +3 0.7 0.6 -12 -18 -10 -49 1 +3 0.1 0.3 -8 -30 29 -11 2 +3 0.6 0.2 -13 -42 42 -1 2 +3 0.4 0.1 -14 -31 -23 -46 1 +3 0.5 0.9 37 -32 33 15 2 +3 0.9 0.8 -6 -9 43 -27 2 +3 0.3 0.3 11 -49 39 -27 1 +3 0.7 0.8 19 -28 8 -23 1 +3 0.5 0.4 -32 -46 22 -48 2 +3 0.7 0.2 22 -30 36 30 2 +3 0.3 0.3 13 -4 10 -36 1 +3 0.5 0.6 19 -42 35 -34 2 +3 0.1 0.2 49 -19 -16 -47 1 +3 0.1 0.5 50 37 32 -17 1 +3 0.1 0.3 -43 -45 28 -25 2 +3 0.3 0.3 24 -7 34 -45 1 +3 0.1 0.1 39 21 -22 -36 1 +3 0.4 0.7 28 24 -26 -33 1 +3 0.6 0.8 -13 -32 13 -34 2 +3 0.4 0.2 -42 -50 20 3 2 +3 0.4 0.9 41 32 35 9 1 +3 0.5 0.2 18 -38 -48 -50 1 +3 0.4 0.6 49 26 32 14 1 +3 0.3 0.1 -3 -49 18 -36 2 +3 0.9 0.7 42 33 -20 -30 1 +3 0.4 0.9 -5 -27 -15 -42 2 +3 0.8 0.9 -15 -43 3 -10 2 +3 0.7 0.3 -7 -43 -23 -27 1 +3 0.1 0.6 40 -50 10 -43 2 +3 0.2 0.3 11 6 37 29 2 +3 0.7 0.1 50 -3 28 -5 1 +3 0.5 0.6 47 43 16 0 1 +3 0.9 0.1 21 -2 48 -50 1 +3 0.5 0.4 9 -5 -1 -2 2 +3 0.5 0.1 -4 -36 28 16 2 +3 0.8 0.5 -1 -3 44 5 2 +3 0.5 0.3 5 -7 33 11 2 +3 0.8 0.8 50 -21 47 -8 1 +3 0.6 0.5 -11 -12 -5 -14 2 +3 0.8 0.1 49 -23 8 -17 1 +3 0.4 0.8 48 -15 13 -8 1 +3 0.3 0.6 46 -47 -9 -44 2 +3 0.5 0.1 -34 -44 37 -25 2 +3 0.7 0.3 49 -43 26 -3 1 +3 0.8 0.1 15 -30 37 -9 1 +3 0.6 0.3 49 -26 50 -41 1 +3 0.1 0.2 8 -6 11 -31 1 +3 0.9 0.3 34 29 -7 -50 1 +3 0.3 0.4 46 12 44 7 1 +3 0.7 0.7 31 5 37 -5 1 +3 0.5 0.4 15 -24 50 -33 2 +3 0.6 0.6 -12 -17 13 -20 2 +3 0.3 0.3 40 -50 28 13 2 +3 0.1 0.5 21 -6 38 -32 2 +3 0.2 0.8 -30 -40 -3 -5 2 +3 0.4 0.6 12 -50 21 -43 2 +3 0.3 0.5 -5 -32 30 -4 2 +3 0.3 0.5 33 -6 11 -3 2 +3 0.6 0.4 20 -6 -11 -40 1 +3 0.5 0.9 19 -19 38 30 2 +3 0.2 0.5 41 24 14 -44 1 +3 0.8 0.4 -26 -49 20 -20 2 +3 0.8 0.4 11 -5 -22 -26 1 +3 0.8 0.6 44 -34 5 4 1 +3 0.7 0.2 25 10 32 -7 1 +3 0.5 0.2 45 31 24 11 1 +3 0.9 0.6 47 -23 27 -21 1 +3 0.7 0.9 42 27 32 16 2 +3 0.9 0.2 37 26 14 -19 1 +3 0.3 0.2 28 -44 47 -3 2 +3 0.2 0.6 42 10 30 23 1 +3 0.7 0.2 10 -48 -8 -18 1 +3 0.1 0.7 39 21 45 42 1 +3 0.1 0.8 28 -36 32 -46 2 +3 0.9 0.1 -35 -48 12 -15 2 +3 0.3 0.6 -1 -14 35 28 2 +3 0.2 0.5 34 1 48 -44 2 +3 0.3 0.2 34 -27 49 21 2 +3 0.7 0.6 -18 -49 -18 -46 1 +3 0.5 0.1 8 -47 29 -7 2 +3 0.9 0.7 47 -45 7 -7 1 +3 0.8 0.7 48 -30 5 -39 1 +3 0.3 0.7 9 -26 32 -35 2 +3 0.1 0.6 0 -16 33 -9 2 +3 0.1 0.6 35 19 21 -46 1 +3 0.3 0.6 -10 -44 -5 -9 2 +3 0.8 0.5 -24 -44 20 -43 2 +3 0.7 0.2 21 -37 19 -35 1 +3 0.6 0.6 39 31 -23 -26 1 +3 0.2 0.9 40 -47 45 36 2 +3 0.6 0.8 27 -6 24 22 1 +3 0.1 0.5 38 -45 39 -7 2 +3 0.3 0.5 -1 -2 11 -50 1 +3 0.5 0.7 -38 -42 30 -31 2 +3 0.2 0.1 29 8 28 -12 1 +3 0.1 0.5 10 -3 44 1 2 +3 0.2 0.2 29 27 41 -21 1 +3 0.6 0.1 24 -35 -6 -37 1 +3 0.1 0.7 14 -39 21 -45 2 +3 0.3 0.6 -26 -39 26 -43 2 +3 0.1 0.4 7 -17 -35 -45 1 +3 0.2 0.8 16 -50 46 -30 2 +3 0.6 0.9 29 -47 -15 -17 1 +3 0.8 0.9 46 -27 45 -5 2 +3 0.4 0.4 12 11 -5 -47 1 +3 0.4 0.1 48 42 18 -36 1 +3 0.5 0.3 -11 -23 50 -35 2 +3 0.4 0.5 29 -35 47 -21 2 +3 0.3 0.5 19 -21 -40 -49 1 +3 0.4 0.1 42 -1 -37 -50 1 +3 0.4 0.9 13 -17 -2 -11 1 +3 0.2 0.7 37 -4 -19 -22 1 +3 0.9 0.5 33 -28 33 -6 1 +3 0.1 0.7 15 -28 -41 -49 1 +3 0.4 0.7 41 -40 48 39 2 +3 0.4 0.1 28 0 32 26 2 +3 0.8 0.1 -13 -27 -19 -28 1 +3 0.6 0.6 4 -13 31 1 2 +3 0.2 0.5 -18 -42 47 26 2 +3 0.4 0.1 43 40 -20 -28 1 +3 0.6 0.6 14 -50 -34 -35 1 +3 0.3 0.3 -2 -37 50 -41 2 +3 0.9 0.8 24 18 46 -48 1 +3 0.5 0.5 7 2 41 -23 2 +3 0.8 0.6 26 20 29 24 1 +3 0.4 0.7 31 -26 31 -8 2 +3 0.9 0.1 14 -1 -33 -50 1 +3 0.4 0.2 -28 -30 37 -41 2 +3 0.3 0.6 41 -42 37 29 2 +3 0.5 0.6 29 -50 42 6 2 +3 0.1 0.7 31 -42 47 -12 2 +3 0.2 0.9 22 -36 -2 -5 1 +3 0.2 0.7 49 21 24 -25 1 +3 0.8 0.5 48 31 7 -21 1 +3 0.2 0.3 -12 -38 14 -22 2 +3 0.7 0.5 39 27 12 -28 1 +3 0.9 0.7 4 -26 9 -16 1 +3 0.4 0.6 -8 -37 34 16 2 +3 0.7 0.4 3 -50 2 -27 1 +3 0.2 0.1 4 -14 27 26 2 +3 0.9 0.7 -33 -36 43 -36 2 +3 0.1 0.2 -27 -40 22 5 2 +3 0.4 0.7 31 -13 6 -45 1 +3 0.3 0.5 24 -3 -38 -44 1 +3 0.7 0.7 48 -46 3 -41 1 +3 0.5 0.7 2 -9 30 2 2 +3 0.1 0.7 46 14 26 -49 1 +3 0.8 0.5 -34 -42 48 24 2 +3 0.8 0.1 37 -25 37 -14 1 +3 0.1 0.2 38 30 12 -2 1 +3 0.2 0.3 -3 -50 35 -27 2 +3 0.2 0.9 -28 -39 48 5 2 +3 0.4 0.9 27 -9 -12 -30 1 +3 0.6 0.1 -7 -27 25 7 2 +3 0.7 0.7 -36 -42 9 -27 2 +3 0.2 0.6 22 -38 40 11 2 +3 0.4 0.4 42 -45 50 -34 2 +3 0.3 0.5 50 -6 38 -40 1 +3 0.9 0.4 40 -11 40 -18 1 +3 0.3 0.6 -7 -13 34 -25 2 +3 0.5 0.6 36 -14 -7 -15 1 +3 0.5 0.6 36 5 -11 -28 1 +3 0.2 0.2 43 31 9 -5 1 +3 0.9 0.2 0 -42 34 -49 1 +3 0.4 0.4 31 13 4 -4 1 +3 0.8 0.1 -1 -30 -21 -38 1 +3 0.6 0.1 28 -9 24 -43 1 +3 0.9 0.9 -4 -21 13 -24 2 +3 0.2 0.7 7 -50 48 33 2 +3 0.7 0.7 -17 -46 33 -40 2 +3 0.2 0.5 22 -43 31 -14 2 +3 0.5 0.9 -4 -14 18 -18 2 +3 0.6 0.4 -10 -13 -45 -48 1 +3 0.3 0.4 9 -32 -15 -44 2 +3 0.3 0.7 41 7 0 -1 1 +3 0.3 0.9 5 -19 -20 -28 1 +3 0.5 0.1 41 -42 -30 -31 1 +3 0.8 0.7 -5 -45 30 -48 2 +3 0.8 0.4 -8 -44 23 -5 2 +3 0.5 0.1 -22 -28 -36 -39 1 +3 0.7 0.6 -2 -48 33 -28 2 +3 0.3 0.7 15 -11 7 -36 2 +3 0.6 0.4 25 -25 50 16 2 +3 0.6 0.2 19 -37 34 -8 1 +3 0.2 0.9 -8 -10 30 14 2 +3 0.4 0.1 31 -23 44 -45 1 +3 0.6 0.3 2 -28 44 -47 1 +3 0.7 0.6 -21 -47 -4 -9 2 +3 0.1 0.3 47 33 -45 -47 1 +3 0.2 0.1 43 12 34 2 1 +3 0.3 0.3 26 6 -21 -27 1 +3 0.2 0.2 -30 -41 48 9 2 +3 0.6 0.5 32 29 27 25 1 +3 0.3 0.5 18 -16 4 -34 1 +3 0.8 0.3 39 33 3 -13 1 +3 0.7 0.2 5 -45 1 -3 1 +3 0.1 0.1 -33 -34 32 7 2 +3 0.5 0.7 40 23 4 -15 1 +3 0.1 0.7 26 -21 16 -27 2 +3 0.7 0.4 -18 -27 42 -40 2 +3 0.6 0.4 -8 -49 48 -36 2 +3 0.9 0.8 29 -3 -43 -49 1 +3 0.5 0.9 34 -5 41 -15 2 +3 0.2 0.1 -10 -16 34 -22 2 +3 0.2 0.8 27 22 31 -37 2 +3 0.9 0.9 -17 -29 37 -22 2 +3 0.5 0.9 -9 -32 45 38 2 +3 0.8 0.6 36 10 12 -22 1 +3 0.3 0.2 -6 -32 19 -36 2 +3 0.2 0.7 7 -6 16 -35 2 +3 0.2 0.6 45 -43 37 36 2 +3 0.3 0.6 41 -33 23 -14 2 +3 0.5 0.6 -7 -28 -2 -24 2 +3 0.8 0.7 44 12 14 -14 1 +3 0.9 0.6 32 -49 46 -39 1 +3 0.8 0.2 -34 -37 -6 -41 2 +3 0.3 0.1 47 4 -6 -42 1 +3 0.9 0.9 37 -17 29 -26 1 +3 0.6 0.3 -16 -18 44 -46 1 +3 0.4 0.5 4 -45 7 -12 2 +3 0.5 0.8 -27 -48 38 -50 2 +3 0.6 0.6 10 -27 28 -30 2 +3 0.3 0.8 -18 -42 -22 -31 1 +3 0.8 0.7 39 -12 -34 -49 1 +3 0.6 0.7 46 -37 43 4 2 +3 0.2 0.6 30 -6 -30 -32 1 +3 0.5 0.2 38 22 36 -26 1 +3 0.3 0.6 -27 -39 50 46 2 +3 0.8 0.2 -20 -27 50 -25 2 +3 0.9 0.5 27 -33 38 -23 1 +3 0.4 0.9 24 -43 -14 -15 1 +3 0.4 0.4 33 17 33 -1 1 +3 0.8 0.7 28 25 -30 -48 1 +3 0.7 0.4 -12 -31 5 -46 2 +3 0.8 0.2 31 9 47 3 1 +3 0.4 0.8 -9 -18 20 5 2 +3 0.9 0.9 -2 -49 8 -25 2 +3 0.8 0.9 33 -19 -3 -35 1 +3 0.6 0.3 36 32 25 17 1 +3 0.4 0.5 12 4 28 12 2 +3 0.3 0.6 22 6 5 -16 1 +3 0.1 0.4 44 0 15 -35 1 +3 0.6 0.9 17 14 -8 -10 1 +3 0.6 0.4 40 15 16 -32 1 +3 0.2 0.4 -1 -50 -4 -17 2 +3 0.4 0.4 -15 -39 44 5 2 +3 0.6 0.7 41 -48 14 -9 2 +3 0.2 0.6 11 -42 16 -2 2 +3 0.6 0.3 42 -5 33 -8 1 +3 0.2 0.5 45 36 20 -31 1 +3 0.8 0.8 24 18 35 32 2 +3 0.2 0.3 47 22 33 -19 1 +3 0.8 0.8 9 0 -13 -20 1 +3 0.8 0.6 -27 -42 42 -45 2 +3 0.3 0.4 32 8 31 18 2 +3 0.6 0.6 49 -21 39 12 2 +3 0.5 0.8 48 24 -5 -49 1 +4 0.9 0.3 -20 -48 17 6 2 +4 0.8 0.7 13 -33 -2 -13 1 +4 0.8 0.6 -24 -26 28 -48 2 +4 0.6 0.2 1 -19 -12 -46 1 +4 0.3 0.2 10 -44 -26 -38 1 +4 0.9 0.8 -12 -20 15 -50 2 +4 0.1 0.2 12 -27 34 13 2 +4 0.2 0.3 10 -30 23 -15 2 +4 0.7 0.5 29 -13 38 9 2 +4 0.1 0.9 23 16 45 -27 2 +4 0.6 0.3 11 -46 9 -46 1 +4 0.1 0.5 -36 -43 32 15 2 +4 0.8 0.5 -7 -35 22 -30 2 +4 0.6 0.7 50 24 -3 -43 1 +4 0.6 0.5 49 4 -41 -46 1 +4 0.7 0.5 39 38 45 -15 1 +4 0.1 0.8 50 -2 7 -26 2 +4 0.8 0.3 23 -14 34 6 2 +4 0.2 0.8 22 3 -8 -23 2 +4 0.5 0.5 33 -9 -19 -25 1 +4 0.8 0.7 48 6 32 8 1 +4 0.7 0.6 48 -26 48 5 2 +4 0.7 0.6 9 -25 -22 -39 1 +4 0.5 0.6 41 -21 -28 -41 1 +4 0.8 0.7 19 11 47 -32 1 +4 0.6 0.7 22 0 -21 -44 1 +4 0.9 0.1 -9 -44 50 -32 2 +4 0.3 0.3 1 -22 32 12 2 +4 0.4 0.7 22 -16 27 -12 2 +4 0.7 0.6 14 -10 27 -8 2 +4 0.4 0.3 50 31 2 -2 1 +4 0.1 0.8 47 -36 -22 -32 2 +4 0.7 0.9 1 -3 37 -30 2 +4 0.3 0.9 36 8 23 10 1 +4 0.1 0.3 28 27 -36 -45 1 +4 0.5 0.3 42 -27 46 1 2 +4 0.9 0.5 28 10 29 20 2 +4 0.1 0.9 10 -17 25 18 2 +4 0.2 0.3 37 23 -24 -41 1 +4 0.2 0.2 18 -40 42 -48 2 +4 0.3 0.6 18 -27 41 39 2 +4 0.4 0.7 21 6 -1 -50 1 +4 0.4 0.4 15 14 20 -35 1 +4 0.1 0.8 13 -49 44 33 2 +4 0.3 0.2 -31 -42 30 -45 2 +4 0.6 0.4 -12 -23 22 -42 2 +4 0.9 0.1 -26 -40 -25 -30 2 +4 0.7 0.2 38 -27 34 -13 1 +4 0.9 0.1 42 -31 45 -1 1 +4 0.5 0.8 41 -3 37 -12 1 +4 0.2 0.5 31 2 17 -44 1 +4 0.8 0.8 -4 -31 16 -15 2 +4 0.9 0.3 24 1 15 -4 1 +4 0.8 0.4 9 -22 28 -5 2 +4 0.9 0.3 47 -24 17 -16 1 +4 0.8 0.4 -12 -14 -13 -21 2 +4 0.1 0.9 1 -50 4 -36 2 +4 0.6 0.7 10 -36 -16 -22 1 +4 0.5 0.2 26 13 24 -31 1 +4 0.4 0.9 1 -15 18 -41 2 +4 0.1 0.5 41 32 -33 -47 1 +4 0.6 0.4 4 -25 -12 -50 1 +4 0.6 0.2 -2 -22 25 5 2 +4 0.5 0.3 -2 -8 34 31 2 +4 0.9 0.9 6 -26 -6 -45 1 +4 0.1 0.2 23 0 38 -6 2 +4 0.8 0.7 -25 -40 9 -37 1 +4 0.5 0.2 -5 -14 48 -18 2 +4 0.2 0.1 6 -34 0 -50 1 +4 0.9 0.1 22 -2 -7 -47 1 +4 0.5 0.5 41 29 41 23 1 +4 0.3 0.5 2 -4 20 -24 2 +4 0.5 0.9 -14 -42 2 -13 2 +4 0.9 0.9 46 16 49 2 1 +4 0.9 0.3 49 39 -16 -29 1 +4 0.1 0.2 36 -28 40 -15 2 +4 0.5 0.9 3 -7 35 15 2 +4 0.1 0.4 3 -18 32 -12 2 +4 0.7 0.6 22 -34 18 4 2 +4 0.8 0.2 5 -6 16 -14 1 +4 0.3 0.9 24 9 -9 -32 1 +4 0.4 0.9 23 13 11 -46 1 +4 0.9 0.5 38 -6 -5 -32 1 +4 0.7 0.5 -23 -48 -26 -32 2 +4 0.2 0.8 19 -14 48 31 2 +4 0.3 0.2 50 39 33 30 1 +4 0.5 0.8 -33 -50 35 -45 2 +4 0.8 0.3 30 -5 32 -33 1 +4 0.1 0.8 -31 -40 35 4 2 +4 0.9 0.1 41 10 4 -49 1 +4 0.7 0.3 -9 -30 41 -26 2 +4 0.9 0.5 -20 -31 -25 -41 1 +4 0.3 0.8 45 -15 28 -17 2 +4 0.5 0.4 -24 -37 36 32 2 +4 0.8 0.1 18 -45 19 -10 1 +4 0.8 0.7 37 -19 -10 -40 1 +4 0.1 0.6 46 13 -30 -39 1 +4 0.3 0.4 15 -4 46 17 2 +4 0.4 0.4 -12 -32 45 38 2 +4 0.5 0.4 36 -19 -11 -13 1 +4 0.3 0.5 47 32 -21 -47 1 +4 0.3 0.3 -2 -25 -33 -34 1 +4 0.4 0.2 39 1 25 -35 1 +4 0.3 0.9 32 -32 35 21 2 +4 0.2 0.3 -9 -26 36 -4 2 +4 0.9 0.1 -10 -21 9 -3 2 +4 0.2 0.7 47 28 11 -22 1 +4 0.1 0.8 38 -4 39 38 2 +4 0.8 0.2 -28 -36 36 17 2 +4 0.6 0.6 10 -50 -2 -42 1 +4 0.7 0.4 37 -5 5 -44 1 +4 0.9 0.6 22 -36 26 -12 1 +4 0.8 0.5 11 -2 49 -41 1 +4 0.2 0.9 8 -25 29 -49 2 +4 0.6 0.3 -39 -49 6 -42 2 +4 0.7 0.9 44 -3 44 -33 2 +4 0.2 0.4 48 43 46 -34 1 +4 0.2 0.4 33 -33 -16 -36 1 +4 0.5 0.4 47 -48 19 12 2 +4 0.3 0.8 50 49 18 -3 1 +4 0.2 0.6 -11 -34 24 -21 2 +4 0.2 0.3 -33 -39 -20 -21 2 +4 0.5 0.8 -23 -46 -30 -43 2 +4 0.3 0.5 32 23 34 7 2 +4 0.3 0.5 -15 -50 -32 -37 2 +4 0.1 0.9 7 -25 46 7 2 +4 0.2 0.9 -40 -48 19 -1 2 +4 0.4 0.6 41 -21 -23 -25 1 +4 0.9 0.5 -29 -36 13 -27 2 +4 0.4 0.7 37 34 32 21 1 +4 0.2 0.3 21 2 -39 -49 1 +4 0.5 0.9 39 3 6 -24 1 +4 0.3 0.9 37 -19 -2 -17 1 +4 0.4 0.2 50 -8 -18 -41 1 +4 0.5 0.7 -7 -29 16 -41 2 +4 0.2 0.2 27 -32 20 -31 1 +4 0.3 0.5 32 -7 24 -11 2 +4 0.5 0.6 -25 -40 -28 -33 2 +4 0.4 0.3 42 30 45 -19 1 +4 0.8 0.4 25 -26 -4 -20 1 +4 0.4 0.3 9 -50 36 -14 2 +4 0.2 0.1 6 -30 -8 -33 1 +4 0.7 0.1 4 -14 3 -12 1 +4 0.4 0.7 32 10 47 -36 2 +4 0.5 0.4 36 15 24 -3 1 +4 0.8 0.2 23 7 10 -16 1 +4 0.6 0.7 14 -44 19 14 2 +4 0.7 0.4 -27 -39 -41 -43 2 +4 0.4 0.6 50 -19 -25 -42 1 +4 0.2 0.6 1 -34 29 -29 2 +4 0.2 0.3 16 -49 7 -32 1 +4 0.6 0.6 5 -39 12 -43 1 +4 0.4 0.2 28 -36 32 -24 1 +4 0.9 0.9 17 -9 5 0 1 +4 0.4 0.4 23 -47 -9 -37 1 +4 0.4 0.9 45 -44 28 -37 2 +4 0.8 0.6 21 12 -14 -17 1 +4 0.1 0.2 33 16 37 -24 1 +4 0.7 0.9 -31 -35 50 13 2 +4 0.5 0.7 -4 -17 -24 -35 1 +4 0.2 0.3 45 40 14 -15 1 +4 0.1 0.5 39 -10 -13 -21 1 +4 0.4 0.8 -3 -24 27 15 2 +4 0.2 0.5 16 0 37 -46 2 +4 0.6 0.4 3 -41 49 27 2 +4 0.7 0.6 21 5 11 -8 1 +4 0.6 0.4 48 -4 -12 -37 1 +4 0.9 0.7 18 -49 30 -22 2 +4 0.2 0.6 -28 -32 23 -35 2 +4 0.3 0.9 48 -41 40 -27 2 +4 0.6 0.9 1 -1 4 -30 1 +4 0.1 0.5 1 0 33 3 2 +4 0.5 0.9 -8 -33 33 -46 2 +4 0.1 0.4 -29 -37 22 4 2 +4 0.4 0.3 20 -8 48 -12 2 +4 0.9 0.2 15 -38 36 2 1 +4 0.1 0.3 3 -37 34 -22 2 +4 0.1 0.4 25 -48 26 -20 2 +4 0.7 0.5 -1 -50 36 -11 2 +4 0.8 0.3 50 -25 49 48 2 +4 0.3 0.9 -4 -36 29 -5 2 +4 0.8 0.1 47 21 12 -33 1 +4 0.1 0.6 45 -31 33 26 2 +4 0.5 0.7 37 -8 7 3 1 +4 0.1 0.5 12 -29 19 -36 1 +4 0.1 0.4 34 27 -14 -26 1 +4 0.1 0.1 32 30 23 -46 1 +4 0.7 0.7 45 -14 33 -40 1 +4 0.3 0.7 -1 -22 19 -46 2 +4 0.7 0.9 -30 -45 -14 -33 2 +4 0.3 0.9 -39 -44 15 -36 2 +4 0.7 0.1 23 -35 13 -35 1 +4 0.2 0.3 16 -42 48 -1 2 +4 0.2 0.1 20 -38 38 -7 2 +4 0.1 0.9 -33 -38 47 -2 2 +4 0.3 0.9 47 -44 19 -36 2 +4 0.7 0.4 40 27 49 46 2 +4 0.5 0.3 -3 -42 24 9 2 +4 0.5 0.9 9 -44 15 -19 2 +4 0.6 0.3 -19 -33 -24 -26 2 +4 0.4 0.5 -32 -48 27 -1 2 +4 0.1 0.4 38 -15 -26 -35 2 +4 0.1 0.2 39 -6 -14 -22 1 +4 0.8 0.5 40 -35 17 -21 1 +4 0.6 0.1 24 17 7 -28 1 +4 0.4 0.5 36 12 45 -10 1 +4 0.2 0.2 49 -23 -6 -18 2 +4 0.7 0.1 47 -22 45 -43 1 +4 0.1 0.3 29 17 21 16 1 +4 0.2 0.7 16 -33 16 -37 2 +4 0.5 0.8 7 -9 15 -44 2 +4 0.8 0.2 40 -24 -4 -45 1 +4 0.2 0.6 -11 -34 21 -17 2 +4 0.8 0.6 7 -32 34 27 2 +4 0.2 0.3 -17 -40 46 -45 2 +4 0.6 0.8 16 -6 6 -44 1 +4 0.2 0.7 17 6 12 8 1 +4 0.5 0.4 -2 -23 34 -38 2 +4 0.9 0.1 -30 -34 19 -45 2 +4 0.9 0.6 42 -23 44 -46 1 +4 0.5 0.8 40 -24 -16 -35 1 +4 0.4 0.1 11 -1 -11 -38 1 +4 0.4 0.2 -6 -40 27 -34 2 +4 0.7 0.7 21 -12 23 -38 1 +4 0.6 0.1 47 -14 9 -24 1 +4 0.3 0.9 36 -23 16 -26 1 +4 0.8 0.3 29 -20 35 -14 1 +4 0.4 0.1 29 -33 21 -42 1 +4 0.9 0.2 -27 -43 11 -48 2 +4 0.2 0.1 22 -25 29 -13 2 +4 0.9 0.6 14 -8 -12 -23 1 +4 0.6 0.1 -13 -18 1 -23 1 +4 0.3 0.5 22 -20 -2 -48 1 +4 0.4 0.1 24 13 38 -43 1 +4 0.9 0.7 40 -30 44 38 2 +4 0.3 0.7 1 -26 -4 -41 1 +4 0.2 0.6 20 -6 -22 -29 1 +4 0.6 0.9 -9 -16 37 32 2 +4 0.7 0.7 6 -8 40 2 2 +4 0.6 0.6 10 -21 39 18 2 +4 0.1 0.5 -3 -33 2 -47 2 +4 0.9 0.3 -20 -43 37 11 2 +4 0.9 0.1 27 -20 5 -14 1 +4 0.9 0.2 3 -19 -41 -49 1 +4 0.2 0.1 18 -5 45 -5 2 +4 0.8 0.1 -13 -36 -10 -38 2 +4 0.4 0.6 45 34 40 -17 1 +4 0.5 0.9 36 17 -2 -36 1 +4 0.1 0.1 14 1 -17 -38 1 +4 0.4 0.1 6 -10 -1 -49 1 +4 0.4 0.2 -10 -45 16 -13 2 +4 0.8 0.9 -11 -18 4 -38 2 +4 0.2 0.5 4 -49 8 1 2 +4 0.1 0.4 -32 -39 41 27 2 +4 0.1 0.5 14 1 -8 -21 1 +4 0.3 0.8 7 0 -18 -24 1 +4 0.9 0.1 24 -15 13 -40 1 +4 0.8 0.9 33 -36 -2 -48 1 +4 0.8 0.7 -42 -45 -17 -36 2 +4 0.8 0.8 31 20 -29 -31 1 +4 0.8 0.1 31 -25 -14 -45 1 +4 0.5 0.8 29 -28 28 -37 2 +4 0.6 0.3 -4 -23 -38 -43 1 +4 0.8 0.2 36 -6 47 -35 1 +4 0.4 0.5 -16 -41 48 22 2 +4 0.5 0.3 -10 -20 -21 -23 1 +4 0.9 0.3 17 -17 35 -17 1 +4 0.4 0.2 12 5 47 11 2 +4 0.8 0.8 10 -36 -23 -47 1 +4 0.6 0.6 24 -20 11 2 2 +4 0.9 0.5 35 -38 17 -26 1 +4 0.1 0.9 9 -5 -32 -40 1 +4 0.9 0.8 -37 -45 11 -28 2 +4 0.1 0.2 13 -35 29 -46 1 +4 0.2 0.2 36 23 33 -4 1 +4 0.9 0.2 38 5 -4 -39 1 +4 0.6 0.7 8 -35 43 40 2 +4 0.6 0.2 16 -14 23 19 2 +4 0.8 0.2 43 15 -21 -23 1 +4 0.5 0.3 50 -45 11 -42 1 +4 0.3 0.9 8 -18 6 -8 2 +4 0.8 0.7 -34 -49 11 -7 2 +4 0.5 0.6 22 -12 44 19 2 +4 0.8 0.1 2 -15 22 -29 2 +4 0.4 0.9 14 -20 27 -36 2 +4 0.8 0.7 6 -18 42 -23 2 +4 0.7 0.6 3 -17 33 19 2 +4 0.3 0.3 7 -23 3 -17 1 +4 0.4 0.6 29 -21 -26 -32 1 +4 0.6 0.3 15 -4 14 -45 1 +4 0.4 0.9 -10 -25 24 17 2 +4 0.7 0.5 5 -41 15 -29 2 +4 0.3 0.9 40 -23 10 -3 2 +4 0.4 0.5 -1 -20 -3 -11 2 +4 0.7 0.5 45 9 32 -22 1 +4 0.2 0.7 18 -37 42 -15 2 +4 0.6 0.9 16 -36 29 -32 2 +4 0.3 0.2 5 -41 44 -43 2 +4 0.2 0.6 23 -31 43 -33 2 +4 0.3 0.4 43 9 -4 -31 1 +5 0.5 0.9 6 -44 -11 -14 1 +5 0.1 0.6 43 -4 34 -9 2 +5 0.7 0.7 24 -25 -19 -21 1 +5 0.9 0.3 -9 -14 22 -31 1 +5 0.7 0.3 2 -16 30 -18 1 +5 0.4 0.3 28 -27 35 26 2 +5 0.8 0.3 27 -15 -2 -39 1 +5 0.5 0.2 16 -40 18 -43 1 +5 0.5 0.4 26 -33 20 15 2 +5 0.7 0.1 8 -33 -18 -34 1 +5 0.7 0.9 15 -24 32 9 1 +5 0.7 0.4 -1 -50 30 -2 2 +5 0.9 0.6 43 2 42 -10 2 +5 0.6 0.7 12 -48 46 40 2 +5 0.9 0.7 49 -40 49 26 1 +5 0.2 0.3 47 -20 27 -12 2 +5 0.6 0.3 41 20 -22 -44 1 +5 0.5 0.4 32 -5 7 4 2 +5 0.3 0.4 1 -10 15 -9 2 +5 0.6 0.6 45 43 37 30 2 +5 0.6 0.1 39 33 -31 -45 1 +5 0.5 0.6 48 -47 39 -32 2 +5 0.1 0.9 46 40 27 -9 1 +5 0.6 0.3 -15 -28 16 -38 1 +5 0.9 0.3 50 -24 -9 -21 1 +5 0.3 0.2 44 -26 23 -5 1 +5 0.1 0.6 46 16 32 4 2 +5 0.8 0.4 30 8 -6 -33 1 +5 0.5 0.4 50 -32 -15 -19 1 +5 0.9 0.4 1 -34 9 -22 2 +5 0.6 0.3 43 5 -5 -30 1 +5 0.4 0.3 44 0 45 21 2 +5 0.8 0.4 9 -5 47 -1 2 +5 0.6 0.1 17 7 47 -5 1 +5 0.5 0.7 2 -2 29 -20 2 +5 0.7 0.7 35 32 38 7 1 +5 0.6 0.3 15 -18 25 -31 1 +5 0.3 0.3 32 -45 28 26 2 +5 0.6 0.2 14 -42 15 -6 1 +5 0.3 0.3 29 -18 35 17 2 +5 0.9 0.6 44 -14 24 14 1 +5 0.3 0.2 38 3 -20 -21 1 +5 0.7 0.1 3 -7 2 -46 1 +5 0.5 0.7 32 -45 0 -45 1 +5 0.5 0.1 -2 -29 28 -37 2 +5 0.4 0.4 31 -24 -7 -12 1 +5 0.5 0.6 -21 -28 -27 -49 1 +5 0.7 0.9 21 12 34 21 2 +5 0.9 0.2 -3 -29 -43 -46 1 +5 0.8 0.6 19 7 50 24 2 +5 0.7 0.5 38 15 30 -40 1 +5 0.3 0.4 0 -26 49 -19 2 +5 0.9 0.6 23 -10 48 -1 1 +5 0.9 0.8 16 -17 -2 -48 1 +5 0.7 0.4 46 10 42 10 2 +5 0.9 0.8 23 22 35 -2 1 +5 0.1 0.6 47 14 22 -43 2 +5 0.9 0.4 12 -41 27 18 2 +5 0.3 0.2 27 25 36 16 1 +5 0.1 0.4 7 -47 17 -15 2 +5 0.4 0.7 33 32 36 -1 2 +5 0.1 0.3 48 -8 45 -35 1 +5 0.6 0.9 -23 -33 35 -18 2 +5 0.8 0.5 12 -47 26 -46 1 +5 0.3 0.8 19 -17 21 3 2 +5 0.2 0.9 30 14 30 -9 1 +5 0.5 0.8 44 -38 30 -11 2 +5 0.1 0.1 -23 -40 47 44 2 +5 0.2 0.9 40 7 24 6 1 +5 0.6 0.4 26 7 21 12 2 +5 0.7 0.6 41 -15 -16 -46 1 +5 0.3 0.4 12 1 2 -40 1 +5 0.6 0.2 -6 -13 4 -9 2 +5 0.5 0.9 49 47 -16 -41 1 +5 0.2 0.6 28 -22 10 -14 2 +5 0.5 0.3 -9 -28 -13 -46 2 +5 0.7 0.6 -1 -44 24 -10 2 +5 0.5 0.4 50 -41 49 -8 2 +5 0.4 0.7 -14 -33 -2 -9 2 +5 0.4 0.7 15 -50 -2 -26 1 +5 0.5 0.1 -38 -41 34 -46 1 +5 0.3 0.5 19 -45 4 -33 2 +5 0.5 0.1 48 32 36 -14 1 +5 0.5 0.9 -18 -37 45 26 2 +5 0.6 0.5 24 17 -21 -36 1 +5 0.9 0.3 -5 -35 8 -33 2 +5 0.5 0.8 40 26 35 29 2 +5 0.2 0.3 14 -34 -3 -23 2 +5 0.9 0.4 12 -22 5 -29 1 +5 0.2 0.6 48 43 25 -40 1 +5 0.4 0.8 -26 -41 23 5 2 +5 0.6 0.8 37 2 -13 -40 2 +5 0.4 0.2 12 10 47 -11 1 +5 0.8 0.6 3 -19 16 -21 1 +5 0.8 0.5 -2 -46 -16 -37 2 +5 0.3 0.2 41 36 9 -28 1 +5 0.6 0.4 0 -50 -15 -22 1 +5 0.1 0.8 23 -27 -9 -22 1 +5 0.8 0.7 13 -48 44 -50 1 +5 0.9 0.8 2 1 32 29 2 +5 0.9 0.1 40 -39 -10 -30 1 +5 0.8 0.7 -15 -46 -25 -27 1 +5 0.6 0.7 3 -5 -34 -49 1 +5 0.2 0.8 47 45 -12 -30 1 +5 0.4 0.7 19 -7 -20 -36 2 +5 0.1 0.3 15 -32 3 -9 2 +5 0.7 0.6 -15 -35 14 -21 2 +5 0.2 0.4 -42 -43 -7 -40 1 +5 0.9 0.2 12 -6 33 -37 1 +5 0.8 0.4 49 -39 25 12 2 +5 0.7 0.1 43 -3 -29 -45 1 +5 0.9 0.5 25 -3 -6 -14 1 +5 0.7 0.3 39 -36 42 -33 1 +5 0.9 0.8 -6 -30 46 -21 2 +5 0.5 0.7 41 24 11 -18 1 +5 0.7 0.9 23 -19 30 19 2 +5 0.4 0.1 -3 -36 37 14 2 +5 0.5 0.1 9 -24 16 -19 1 +5 0.8 0.3 43 -44 28 -27 1 +5 0.9 0.6 16 -43 47 36 2 +5 0.9 0.9 43 -14 -2 -12 1 +5 0.6 0.5 -11 -14 39 -40 1 +5 0.1 0.8 5 -13 45 -48 2 +5 0.5 0.7 37 36 21 -44 1 +5 0.4 0.5 49 8 -13 -25 1 +5 0.1 0.1 17 7 29 -25 1 +5 0.7 0.1 39 -46 15 -8 1 +5 0.8 0.4 42 -23 8 -45 1 +5 0.1 0.8 -43 -44 18 -19 2 +5 0.1 0.5 42 25 13 -8 1 +5 0.7 0.5 42 -40 41 -32 1 +5 0.7 0.9 46 -43 40 19 1 +5 0.9 0.9 2 -21 44 -42 2 +5 0.3 0.1 25 -7 40 4 1 +5 0.1 0.4 28 -27 -27 -31 1 +5 0.1 0.1 18 -17 22 -15 1 +5 0.7 0.5 17 7 28 -7 2 +5 0.2 0.2 44 5 34 -3 2 +5 0.2 0.9 16 -6 7 -7 2 +5 0.3 0.8 46 44 31 18 1 +5 0.6 0.4 9 -43 -3 -6 2 +5 0.9 0.8 43 -41 18 -38 1 +5 0.1 0.2 47 -49 45 37 2 +5 0.8 0.3 9 -22 8 4 1 +5 0.9 0.2 12 -17 -13 -47 1 +5 0.5 0.2 -20 -24 7 -9 1 +5 0.5 0.4 31 -46 -1 -37 1 +5 0.1 0.8 49 14 43 -13 1 +5 0.7 0.4 38 -24 37 -31 1 +5 0.2 0.2 17 -50 -5 -23 1 +5 0.6 0.8 -12 -36 14 -22 2 +5 0.3 0.9 48 16 21 -33 2 +5 0.1 0.1 -6 -9 45 -50 2 +5 0.2 0.5 41 12 -22 -31 1 +5 0.6 0.2 43 -30 -14 -20 1 +5 0.3 0.7 27 -49 -3 -49 1 +5 0.3 0.9 34 -47 49 19 2 +5 0.6 0.2 21 -46 28 -32 2 +5 0.8 0.5 -12 -49 17 -21 2 +5 0.2 0.8 8 -44 20 -47 2 +5 0.1 0.4 30 -24 33 -28 2 +5 0.9 0.2 5 -30 10 0 1 +5 0.5 0.3 -17 -25 38 31 2 +5 0.4 0.6 -8 -37 29 14 2 +5 0.8 0.7 40 3 6 -32 1 +5 0.5 0.7 -5 -38 40 1 2 +5 0.4 0.4 -3 -5 3 -36 1 +5 0.7 0.8 13 -36 46 -24 2 +5 0.6 0.6 47 -10 8 -30 1 +5 0.6 0.3 49 -30 -1 -3 1 +5 0.4 0.8 8 -18 27 12 2 +5 0.5 0.4 -11 -23 -3 -32 2 +5 0.4 0.8 43 -44 19 -7 1 +5 0.1 0.5 43 -2 16 -15 1 +5 0.9 0.6 -3 -28 9 -44 2 +5 0.4 0.7 -24 -28 7 -27 2 +5 0.1 0.6 16 -9 -31 -46 2 +5 0.8 0.7 43 27 46 -27 1 +5 0.3 0.4 -10 -25 41 -9 2 +5 0.8 0.1 36 -1 23 -6 1 +5 0.8 0.1 25 24 8 -13 1 +5 0.2 0.7 -14 -36 -25 -50 1 +5 0.5 0.3 -9 -44 40 -6 2 +5 0.5 0.2 35 26 -8 -36 1 +5 0.4 0.7 -18 -34 4 -28 2 +5 0.4 0.3 34 -49 -9 -16 1 +5 0.4 0.6 23 -7 -10 -19 1 +5 0.9 0.7 31 12 47 -37 1 +5 0.9 0.8 4 -25 9 -7 1 +5 0.2 0.1 19 -44 47 4 2 +5 0.8 0.8 -4 -12 20 -22 2 +5 0.2 0.1 15 -47 7 -40 2 +5 0.8 0.7 17 9 31 -32 2 +5 0.5 0.2 25 0 -35 -37 1 +5 0.1 0.6 -3 -40 -9 -35 1 +5 0.3 0.6 42 25 36 -2 2 +5 0.2 0.7 -12 -13 44 -10 2 +5 0.3 0.4 34 -2 4 -5 1 +5 0.6 0.9 9 -43 -3 -25 1 +5 0.1 0.7 25 -14 38 -18 2 +5 0.7 0.9 -18 -32 25 -20 2 +5 0.5 0.9 32 -39 41 -19 2 +5 0.3 0.2 -26 -28 21 -5 2 +5 0.2 0.2 48 6 35 -6 1 +5 0.9 0.4 -13 -34 19 2 2 +5 0.6 0.7 26 -26 11 -23 1 +5 0.2 0.8 34 -19 4 -20 2 +5 0.6 0.6 -10 -42 6 -30 2 +5 0.3 0.9 -17 -34 -7 -10 2 +5 0.2 0.2 8 -30 27 23 2 +5 0.5 0.3 33 4 1 -36 1 +5 0.1 0.7 -14 -34 -4 -26 2 +5 0.1 0.5 -26 -27 35 -37 2 +5 0.1 0.7 46 -50 42 0 2 +5 0.3 0.4 23 -50 13 -3 2 +5 0.1 0.6 28 -14 39 37 2 +5 0.2 0.8 9 -11 15 5 2 +5 0.2 0.5 -29 -44 12 -10 2 +5 0.4 0.6 -31 -40 -5 -40 2 +5 0.2 0.5 44 -22 36 -29 2 +5 0.7 0.5 11 2 14 1 1 +5 0.9 0.8 18 -22 26 -12 2 +5 0.8 0.7 -11 -35 14 -11 2 +5 0.6 0.4 2 -46 36 27 1 +5 0.5 0.3 -42 -50 24 13 2 +5 0.1 0.6 48 -36 31 -27 2 +5 0.4 0.4 49 -11 -35 -49 1 +5 0.4 0.2 45 25 44 42 2 +5 0.3 0.5 -7 -36 12 3 2 +5 0.6 0.6 -4 -23 45 38 2 +5 0.5 0.5 33 -46 -15 -21 1 +5 0.6 0.6 -1 -20 -4 -30 2 +5 0.4 0.7 41 -14 24 -23 2 +5 0.9 0.7 -30 -48 29 18 2 +5 0.7 0.6 -12 -22 13 -35 2 +5 0.7 0.7 -10 -31 48 -10 2 +5 0.6 0.3 37 20 -39 -47 1 +5 0.1 0.4 -6 -14 16 -32 2 +5 0.5 0.9 45 15 11 -13 1 +5 0.2 0.3 -20 -32 44 -19 2 +5 0.4 0.6 49 -44 17 -25 2 +5 0.6 0.2 27 -46 -9 -32 1 +5 0.1 0.5 -2 -19 26 -12 2 +5 0.4 0.9 35 3 50 -36 2 +5 0.7 0.7 -22 -33 4 -28 2 +5 0.3 0.1 9 -43 49 -38 1 +5 0.7 0.1 3 -45 49 -11 1 +5 0.6 0.2 48 32 35 -39 1 +5 0.8 0.3 -18 -49 28 24 2 +5 0.1 0.6 40 -35 -11 -22 2 +5 0.3 0.4 3 -46 20 13 2 +5 0.4 0.8 -11 -36 -25 -26 2 +5 0.9 0.1 34 -28 30 0 1 +5 0.2 0.4 41 -3 39 -20 2 +5 0.4 0.4 -35 -49 -9 -31 1 +5 0.2 0.8 27 20 23 6 2 +5 0.8 0.3 -1 -44 -43 -49 1 +5 0.5 0.1 42 24 44 -26 1 +5 0.4 0.9 50 20 49 25 1 +5 0.6 0.6 47 38 -9 -24 1 +5 0.5 0.4 23 -33 35 -37 2 +5 0.5 0.8 23 -15 -1 -48 1 +5 0.1 0.8 -14 -47 17 -39 2 +5 0.4 0.4 33 -12 20 -23 1 +5 0.6 0.5 49 10 47 43 1 +5 0.4 0.8 23 -2 21 -35 1 +5 0.3 0.8 -20 -36 16 -23 2 +5 0.6 0.7 37 34 47 -26 1 +5 0.2 0.4 14 -2 42 14 2 +5 0.6 0.6 10 -42 -28 -48 1 +5 0.2 0.7 43 -27 -1 -38 1 +5 0.6 0.3 40 -41 -3 -47 1 +5 0.1 0.7 -7 -35 41 39 2 +5 0.5 0.1 13 -7 -22 -40 1 +5 0.8 0.1 20 -19 0 -17 1 +5 0.6 0.4 34 22 22 -6 1 +5 0.3 0.9 46 -20 23 3 2 +5 0.4 0.3 20 -12 11 10 1 +5 0.5 0.1 35 28 37 28 1 +5 0.3 0.8 16 -30 14 -4 2 +5 0.9 0.9 13 -43 -6 -28 1 +5 0.4 0.8 0 -17 -5 -49 2 +5 0.5 0.3 -21 -47 32 -5 2 +5 0.1 0.6 47 25 3 -48 1 +5 0.6 0.3 7 -42 -5 -47 2 +5 0.2 0.9 44 34 -22 -33 1 +5 0.3 0.5 48 -19 17 -23 1 +5 0.6 0.9 -12 -24 45 40 2 +5 0.1 0.1 46 21 33 -22 1 +5 0.2 0.8 33 -16 -15 -28 1 +5 0.5 0.1 20 -19 36 11 2 +5 0.5 0.8 6 -5 10 -2 2 +5 0.6 0.1 6 -3 44 11 2 +5 0.8 0.8 29 -21 2 -25 1 +5 0.4 0.6 48 -29 18 -18 1 +5 0.7 0.5 46 -12 16 -25 1 +5 0.9 0.1 42 18 -34 -48 1 +5 0.9 0.7 -36 -39 19 -2 2 +5 0.8 0.6 41 10 30 -50 1 +5 0.3 0.1 49 -20 47 26 2 +6 0.8 0.6 43 -30 11 3 1 +6 0.2 0.6 8 2 29 7 2 +6 0.8 0.4 -18 -50 21 8 2 +6 0.7 0.1 -12 -24 34 -39 1 +6 0.9 0.4 49 -9 1 -38 1 +6 0.9 0.7 28 -12 33 -41 1 +6 0.5 0.1 30 22 40 28 2 +6 0.5 0.6 -26 -28 1 -1 2 +6 0.5 0.7 15 7 23 -25 2 +6 0.1 0.8 43 18 -13 -22 1 +6 0.7 0.8 -2 -35 28 1 2 +6 0.4 0.6 21 -20 36 11 2 +6 0.7 0.9 -39 -40 -11 -43 2 +6 0.5 0.3 1 -41 45 16 2 +6 0.6 0.7 8 -34 30 -43 2 +6 0.6 0.2 8 -43 18 -50 1 +6 0.9 0.2 21 -39 27 -37 1 +6 0.8 0.1 -18 -41 46 16 2 +6 0.1 0.2 3 -17 15 -30 1 +6 0.3 0.8 7 -39 4 -38 2 +6 0.9 0.5 41 -11 -41 -43 1 +6 0.3 0.8 -2 -50 1 -4 2 +6 0.2 0.3 48 0 34 9 2 +6 0.8 0.1 -29 -41 47 10 2 +6 0.1 0.9 21 -9 -23 -35 1 +6 0.4 0.9 -36 -39 25 -17 2 +6 0.1 0.6 50 13 -1 -20 1 +6 0.3 0.6 35 16 50 8 2 +6 0.8 0.3 48 -40 43 -39 1 +6 0.4 0.7 -6 -16 45 22 2 +6 0.7 0.9 28 1 33 -16 2 +6 0.4 0.6 30 -22 33 20 2 +6 0.6 0.1 47 -14 -2 -47 1 +6 0.1 0.5 16 -16 46 5 2 +6 0.4 0.2 43 27 -9 -27 1 +6 0.4 0.2 -1 -17 6 -35 1 +6 0.3 0.3 19 9 19 -41 1 +6 0.6 0.7 50 -23 -9 -22 1 +6 0.8 0.4 -16 -46 -34 -46 1 +6 0.7 0.3 35 23 49 -1 1 +6 0.3 0.2 27 13 5 -29 1 +6 0.6 0.7 44 2 15 -18 1 +6 0.5 0.5 31 10 -10 -38 1 +6 0.2 0.4 45 1 16 -11 1 +6 0.2 0.6 -40 -43 -4 -36 2 +6 0.3 0.2 20 -20 20 -46 1 +6 0.6 0.4 24 17 44 -9 1 +6 0.3 0.6 35 0 45 -24 2 +6 0.6 0.2 15 -3 23 -26 1 +6 0.8 0.8 31 10 34 32 2 +6 0.4 0.5 37 -40 8 -36 2 +6 0.3 0.5 13 -42 28 -6 2 +6 0.3 0.1 9 4 26 -38 1 +6 0.4 0.6 35 -50 42 35 2 +6 0.3 0.7 1 -44 36 30 2 +6 0.5 0.3 12 -18 -14 -46 1 +6 0.4 0.2 32 -21 44 8 2 +6 0.8 0.8 6 -22 44 15 2 +6 0.7 0.6 -8 -30 -8 -25 1 +6 0.5 0.8 18 15 -6 -7 1 +6 0.3 0.4 -31 -42 39 20 2 +6 0.1 0.3 33 6 22 -40 1 +6 0.2 0.6 26 -7 31 -42 2 +6 0.1 0.5 22 -10 41 -5 2 +6 0.4 0.2 6 -6 33 27 2 +6 0.3 0.8 29 -4 48 -37 2 +6 0.5 0.4 -22 -29 8 -36 2 +6 0.7 0.7 1 -13 10 -11 2 +6 0.1 0.2 -11 -25 15 -28 2 +6 0.4 0.1 27 7 11 1 1 +6 0.7 0.5 -11 -35 0 -7 2 +6 0.9 0.2 50 32 -6 -25 1 +6 0.2 0.2 -11 -23 34 -27 2 +6 0.7 0.6 30 -15 44 -37 1 +6 0.4 0.5 31 22 13 0 1 +6 0.6 0.6 44 -3 38 36 2 +6 0.5 0.5 8 2 38 -7 2 +6 0.3 0.2 -11 -30 18 -21 2 +6 0.7 0.4 -37 -38 -21 -48 1 +6 0.5 0.8 24 -33 38 32 2 +6 0.7 0.5 33 6 31 12 1 +6 0.9 0.8 39 -16 40 -35 1 +6 0.2 0.7 35 6 -35 -46 1 +6 0.8 0.3 18 -45 -16 -17 1 +6 0.8 0.9 41 -45 45 3 2 +6 0.4 0.2 38 28 21 -10 1 +6 0.5 0.6 -17 -43 6 -8 2 +6 0.7 0.3 43 -12 48 19 2 +6 0.2 0.9 39 15 37 -26 2 +6 0.5 0.9 17 -20 -11 -48 1 +6 0.3 0.6 -6 -13 36 -21 2 +6 0.6 0.2 12 -49 14 -4 1 +6 0.8 0.6 30 20 18 -47 1 +6 0.1 0.2 7 -10 29 -46 1 +6 0.8 0.2 4 -16 -7 -9 1 +6 0.6 0.7 22 -38 -8 -19 1 +6 0.1 0.8 18 -13 -26 -45 1 +6 0.3 0.3 -34 -38 22 -38 2 +6 0.1 0.9 32 -24 -25 -34 1 +6 0.3 0.5 16 -23 20 -11 2 +6 0.6 0.6 38 -36 -13 -24 1 +6 0.9 0.5 -6 -20 -27 -42 1 +6 0.4 0.5 1 -24 34 -18 2 +6 0.8 0.4 44 29 -3 -37 1 +6 0.2 0.8 49 -33 50 42 2 +6 0.4 0.9 2 -25 36 -8 2 +6 0.3 0.8 35 -40 22 -44 2 +6 0.8 0.1 27 -50 -34 -40 1 +6 0.3 0.4 45 -49 21 10 2 +6 0.5 0.6 20 -7 -34 -45 1 +6 0.3 0.9 -5 -21 35 33 2 +6 0.6 0.6 41 -11 -20 -35 1 +6 0.3 0.5 34 -43 -8 -16 1 +6 0.6 0.2 28 -36 33 -4 1 +6 0.9 0.4 49 -21 30 -11 1 +6 0.3 0.9 9 -23 28 -18 2 +6 0.3 0.9 31 20 4 -22 1 +6 0.3 0.1 31 10 -35 -44 1 +6 0.2 0.8 10 1 27 -2 2 +6 0.7 0.4 29 -29 5 -33 1 +6 0.6 0.9 4 -47 40 26 2 +6 0.1 0.9 8 -26 -22 -30 1 +6 0.2 0.3 26 -29 26 24 2 +6 0.2 0.4 41 -20 -18 -30 2 +6 0.7 0.1 6 -7 6 -3 1 +6 0.7 0.1 50 17 33 31 1 +6 0.5 0.9 15 -7 22 -17 2 +6 0.2 0.2 48 30 4 -13 1 +6 0.6 0.7 37 -39 32 -39 1 +6 0.6 0.7 28 -26 49 47 2 +6 0.7 0.5 46 -11 18 -38 1 +6 0.9 0.4 45 -7 47 42 2 +6 0.2 0.6 25 21 -21 -33 1 +6 0.1 0.6 37 -44 -13 -26 2 +6 0.5 0.5 41 33 27 -20 1 +6 0.7 0.9 14 2 35 28 1 +6 0.2 0.2 2 -26 23 2 2 +6 0.6 0.1 32 10 -12 -46 1 +6 0.1 0.1 -29 -43 39 4 2 +6 0.7 0.5 42 20 -37 -39 1 +6 0.2 0.3 17 4 22 3 1 +6 0.4 0.6 -12 -30 22 11 2 +6 0.8 0.9 41 24 35 -17 1 +6 0.6 0.7 -5 -38 36 24 2 +6 0.6 0.8 -37 -38 34 15 2 +6 0.6 0.8 6 -8 -7 -15 1 +6 0.8 0.5 39 33 45 -33 1 +6 0.8 0.7 22 -6 4 -14 1 +6 0.7 0.9 -19 -22 -33 -41 1 +6 0.6 0.6 39 -25 -18 -27 1 +6 0.7 0.3 -11 -24 -5 -33 1 +6 0.6 0.1 45 25 28 22 1 +6 0.3 0.9 28 -2 -2 -37 1 +6 0.9 0.8 10 -44 20 9 1 +6 0.2 0.9 50 -16 13 -21 2 +6 0.8 0.4 -14 -33 -42 -44 2 +6 0.8 0.2 31 -10 0 -7 1 +6 0.9 0.9 11 -38 36 -39 2 +6 0.3 0.5 15 -4 -16 -20 1 +6 0.3 0.5 16 -43 34 -43 2 +6 0.9 0.6 -30 -41 -31 -36 1 +6 0.1 0.2 -18 -49 18 5 2 +6 0.1 0.1 25 -19 35 -49 1 +6 0.2 0.1 14 4 -7 -30 1 +6 0.5 0.9 42 21 29 -19 1 +6 0.2 0.8 17 1 -1 -16 1 +6 0.7 0.3 -17 -31 44 -37 2 +6 0.9 0.4 0 -10 20 13 2 +6 0.4 0.9 16 -4 22 -44 2 +6 0.1 0.9 25 -40 -17 -33 2 +6 0.3 0.5 -7 -48 21 -24 2 +6 0.7 0.1 -16 -31 34 7 2 +6 0.9 0.2 22 -12 28 24 1 +6 0.1 0.5 28 3 27 -50 2 +6 0.9 0.7 -13 -38 -17 -18 2 +6 0.9 0.3 3 -41 37 -1 2 +6 0.6 0.2 -16 -36 -2 -37 2 +6 0.9 0.5 30 26 41 -40 1 +6 0.3 0.7 24 -18 -3 -24 1 +6 0.1 0.2 -16 -41 39 17 2 +6 0.7 0.7 40 36 -37 -46 1 +6 0.2 0.4 27 -46 41 8 2 +6 0.7 0.3 27 2 1 -16 1 +6 0.1 0.9 49 -36 10 -21 2 +6 0.5 0.2 22 11 30 -8 1 +6 0.8 0.8 0 -7 24 -3 2 +6 0.5 0.5 -7 -11 36 6 2 +6 0.4 0.4 39 14 -27 -44 1 +6 0.3 0.2 36 -14 21 6 2 +6 0.7 0.8 4 -25 46 17 2 +6 0.1 0.8 20 -10 25 -26 2 +6 0.6 0.6 12 0 11 -46 1 +6 0.1 0.5 43 23 32 -28 1 +6 0.6 0.9 -3 -18 47 38 2 +6 0.9 0.8 -18 -36 -14 -42 2 +6 0.8 0.4 10 -3 -9 -36 1 +6 0.2 0.8 3 -31 29 14 2 +6 0.6 0.1 11 -23 -25 -34 1 +6 0.7 0.5 39 -9 46 19 2 +6 0.8 0.6 44 -50 13 6 1 +6 0.2 0.9 27 -46 -8 -48 1 +6 0.2 0.7 50 -32 -28 -43 1 +6 0.8 0.5 32 -41 -1 -18 1 +6 0.6 0.5 -27 -33 38 -38 2 +6 0.8 0.8 31 -23 -35 -37 1 +6 0.5 0.9 -37 -38 10 -24 2 +6 0.3 0.6 11 10 33 -20 1 +6 0.3 0.5 -21 -22 11 -17 2 +6 0.7 0.7 29 12 46 -34 1 +6 0.7 0.5 17 -30 26 -3 1 +6 0.5 0.2 8 -15 1 -4 1 +6 0.5 0.7 35 20 36 25 2 +6 0.1 0.4 -25 -44 -8 -28 2 +6 0.8 0.9 46 -17 -6 -12 1 +6 0.3 0.4 -5 -40 -24 -36 1 +6 0.2 0.7 27 13 50 -50 2 +6 0.9 0.8 49 39 48 -2 2 +6 0.6 0.2 -15 -41 -7 -9 1 +6 0.5 0.2 49 22 38 -50 1 +6 0.3 0.7 35 17 48 12 2 +6 0.3 0.9 15 -44 35 -11 2 +6 0.1 0.9 27 7 46 25 2 +6 0.8 0.4 38 -46 15 -32 1 +6 0.6 0.7 27 -10 44 21 2 +6 0.2 0.7 10 -49 41 -10 2 +6 0.7 0.3 29 19 28 -28 1 +6 0.3 0.6 49 -6 26 -14 2 +6 0.7 0.4 20 -29 -5 -17 1 +6 0.8 0.9 20 -15 5 -1 1 +6 0.9 0.6 -19 -45 32 -6 2 +6 0.5 0.6 2 -29 -15 -48 1 +6 0.1 0.1 45 -40 7 -4 2 +6 0.3 0.1 -8 -39 30 -38 2 +6 0.8 0.8 37 1 -28 -32 1 +6 0.7 0.7 0 -26 35 -14 2 +6 0.7 0.1 -6 -19 10 -45 1 +6 0.5 0.7 42 -44 11 -17 2 +6 0.2 0.7 40 -33 8 -18 2 +6 0.8 0.2 25 -23 48 43 2 +6 0.7 0.2 10 1 20 5 1 +6 0.4 0.5 22 -39 -8 -19 2 +6 0.2 0.5 42 -45 17 -17 2 +6 0.9 0.5 -20 -39 48 -34 2 +6 0.8 0.7 -25 -26 11 -26 2 +6 0.5 0.3 20 -15 36 -16 1 +6 0.8 0.1 -18 -33 15 -1 2 +6 0.3 0.4 -3 -21 30 -8 2 +6 0.9 0.8 -4 -25 49 -24 2 +6 0.1 0.8 47 -13 33 -45 2 +6 0.6 0.6 -34 -37 2 -6 2 +6 0.8 0.3 0 -5 29 -1 2 +6 0.3 0.7 30 -25 45 -8 2 +6 0.7 0.1 14 -24 1 -3 1 +6 0.1 0.7 15 -10 23 -11 2 +6 0.1 0.7 -6 -48 -9 -30 2 +6 0.9 0.5 38 -8 -25 -39 1 +6 0.7 0.9 -32 -39 2 0 2 +6 0.2 0.9 -1 -45 14 5 2 +6 0.7 0.7 -3 -36 41 -8 2 +6 0.3 0.9 -2 -26 47 -11 2 +6 0.8 0.6 47 8 45 12 2 +6 0.9 0.1 -39 -44 43 37 2 +6 0.3 0.7 -3 -28 -18 -33 1 +6 0.3 0.5 23 13 14 -18 1 +6 0.2 0.4 11 6 35 -21 2 +6 0.3 0.2 50 19 -17 -47 1 +6 0.1 0.3 49 37 16 8 1 +6 0.5 0.1 -7 -23 -7 -34 1 +6 0.3 0.6 25 -11 50 35 2 +6 0.2 0.3 44 41 37 -23 1 +6 0.8 0.6 39 -18 2 -45 1 +6 0.3 0.5 18 -10 40 -15 2 +6 0.6 0.8 23 -37 2 1 1 +6 0.5 0.2 17 -50 1 -22 1 +6 0.9 0.2 20 8 -1 -12 1 +6 0.8 0.8 40 -32 44 1 2 +6 0.7 0.4 36 -50 -17 -22 1 +6 0.8 0.8 -27 -41 30 29 2 +6 0.5 0.9 19 11 -3 -34 1 +6 0.6 0.3 2 -38 -11 -20 1 +6 0.8 0.5 -3 -30 23 -6 2 +6 0.7 0.9 -4 -19 33 -8 2 +6 0.3 0.6 4 2 26 -37 1 +6 0.3 0.2 -33 -36 3 -13 2 +6 0.9 0.3 -31 -32 -15 -21 2 +6 0.9 0.8 35 -29 27 -42 1 +6 0.2 0.7 44 34 -3 -26 1 +6 0.3 0.4 -27 -39 -33 -50 1 +6 0.1 0.6 20 13 14 9 1 +6 0.8 0.4 40 -19 -1 -10 1 +6 0.3 0.8 44 5 -6 -37 1 +6 0.1 0.9 42 37 33 -46 2 +6 0.2 0.9 7 -25 -15 -24 2 +6 0.9 0.1 39 -46 -3 -4 1 +6 0.5 0.9 -21 -24 -27 -36 1 +6 0.7 0.3 37 -22 -12 -31 1 +6 0.9 0.9 4 -50 43 -13 2 +6 0.2 0.7 40 1 50 23 2 +6 0.7 0.9 6 -11 -5 -47 1 +6 0.1 0.4 -26 -28 48 -35 2 +7 0.5 0.3 37 7 -2 -6 1 +7 0.7 0.6 -10 -35 16 -38 2 +7 0.5 0.4 40 14 48 -7 1 +7 0.6 0.9 20 -27 22 -1 2 +7 0.3 0.5 46 -2 -5 -30 1 +7 0.8 0.3 -1 -6 17 -18 2 +7 0.5 0.5 39 23 -19 -47 1 +7 0.3 0.2 40 -16 -17 -40 1 +7 0.5 0.1 6 -24 50 -3 2 +7 0.7 0.1 35 6 -5 -42 1 +7 0.3 0.3 41 19 49 -44 1 +7 0.4 0.7 -16 -48 24 -24 2 +7 0.5 0.5 18 -9 37 15 2 +7 0.7 0.7 42 21 25 -50 1 +7 0.8 0.9 16 -35 -3 -15 1 +7 0.2 0.7 45 41 32 15 1 +7 0.3 0.4 39 -4 45 38 2 +7 0.8 0.4 -19 -34 15 -39 2 +7 0.8 0.7 27 -21 -15 -30 1 +7 0.6 0.6 38 -32 47 -49 2 +7 0.5 0.1 6 -38 7 -43 1 +7 0.8 0.9 36 18 44 24 2 +7 0.7 0.3 36 -16 37 -29 1 +7 0.6 0.9 33 27 45 -32 1 +7 0.1 0.4 -19 -41 13 -13 2 +7 0.9 0.3 8 -25 32 -20 1 +7 0.9 0.2 8 -39 44 -38 1 +7 0.4 0.3 20 -46 10 -37 2 +7 0.9 0.2 39 24 17 0 1 +7 0.7 0.1 39 -22 3 -40 1 +7 0.7 0.5 -22 -25 49 21 2 +7 0.8 0.9 -2 -41 -1 -26 2 +7 0.3 0.9 40 -25 17 -28 2 +7 0.9 0.4 43 23 21 -31 1 +7 0.3 0.2 -8 -34 2 -44 2 +7 0.1 0.6 27 -48 39 21 2 +7 0.8 0.9 6 -7 -42 -46 1 +7 0.8 0.8 -10 -35 45 14 2 +7 0.2 0.7 27 11 36 -42 2 +7 0.1 0.8 14 -19 -2 -20 2 +7 0.9 0.3 33 27 19 7 1 +7 0.2 0.8 15 -3 38 -7 2 +7 0.3 0.5 48 4 23 -5 1 +7 0.7 0.8 13 10 10 -23 1 +7 0.4 0.9 33 0 32 14 2 +7 0.2 0.6 43 -26 50 -7 2 +7 0.9 0.2 43 35 -24 -44 1 +7 0.8 0.4 30 21 20 -12 1 +7 0.5 0.2 6 2 11 -3 1 +7 0.3 0.8 2 -17 19 -50 2 +7 0.5 0.3 -28 -43 -17 -35 2 +7 0.9 0.7 36 13 27 10 1 +7 0.4 0.8 44 -4 27 -42 2 +7 0.9 0.3 2 -9 -23 -27 1 +7 0.9 0.5 18 -2 38 -50 1 +7 0.1 0.2 48 47 16 -18 1 +7 0.5 0.4 6 -2 26 15 2 +7 0.4 0.1 -16 -48 -3 -23 2 +7 0.1 0.9 12 -41 17 -25 2 +7 0.4 0.1 -1 -15 33 1 2 +7 0.2 0.1 -14 -39 34 -30 2 +7 0.3 0.5 -3 -11 29 -41 2 +7 0.9 0.3 41 17 -11 -13 1 +7 0.3 0.6 21 15 49 -44 1 +7 0.3 0.7 46 -1 42 39 2 +7 0.3 0.1 -11 -37 13 -49 2 +7 0.4 0.6 33 -9 41 -50 2 +7 0.5 0.1 16 -10 40 -43 1 +7 0.7 0.6 20 10 -6 -47 1 +7 0.7 0.6 21 -18 42 40 2 +7 0.8 0.8 -12 -28 -12 -43 1 +7 0.7 0.5 28 -48 0 -41 1 +7 0.3 0.1 36 -43 29 -19 2 +7 0.9 0.4 31 -15 12 -45 1 +7 0.8 0.3 -8 -25 48 47 2 +7 0.4 0.5 32 -27 41 23 2 +7 0.8 0.9 45 -36 20 8 1 +7 0.6 0.3 39 30 44 16 1 +7 0.7 0.6 4 -39 33 -48 2 +7 0.9 0.3 50 28 38 -27 1 +7 0.9 0.2 12 -25 -1 -26 1 +7 0.1 0.2 28 -6 4 -2 2 +7 0.7 0.5 29 -37 40 14 2 +7 0.6 0.9 16 -43 10 -3 2 +7 0.2 0.8 21 -24 31 -24 2 +7 0.3 0.9 50 -9 -3 -24 1 +7 0.1 0.2 4 -12 44 5 2 +7 0.9 0.8 47 -42 10 -16 1 +7 0.1 0.7 29 -24 -27 -43 1 +7 0.8 0.6 45 37 37 -2 1 +7 0.8 0.5 37 -9 49 -36 1 +7 0.2 0.6 10 -8 -45 -47 1 +7 0.9 0.4 37 36 12 -49 1 +7 0.9 0.3 17 -14 -14 -16 1 +7 0.8 0.1 33 30 -24 -46 1 +7 0.7 0.5 32 -34 15 -16 1 +7 0.8 0.4 30 19 43 -18 1 +7 0.8 0.3 38 -15 -7 -11 1 +7 0.1 0.4 35 -39 -17 -25 2 +7 0.8 0.5 34 25 -10 -11 1 +7 0.4 0.6 -1 -26 34 -34 2 +7 0.7 0.7 30 -21 4 -11 1 +7 0.5 0.1 13 -33 3 -45 1 +7 0.6 0.2 34 -23 35 -24 1 +7 0.2 0.3 24 -14 -6 -47 1 +7 0.5 0.2 28 -34 50 -19 1 +7 0.7 0.7 35 26 -7 -16 1 +7 0.5 0.5 34 13 17 -10 1 +7 0.1 0.3 -30 -49 10 -44 2 +7 0.9 0.7 23 1 44 22 2 +7 0.6 0.9 50 6 -29 -36 1 +7 0.1 0.9 24 -5 32 5 2 +7 0.4 0.3 31 3 1 -50 1 +7 0.4 0.3 0 -37 -16 -32 1 +7 0.5 0.8 34 4 31 -31 2 +7 0.2 0.1 50 -45 50 -49 1 +7 0.2 0.1 22 14 33 -36 1 +7 0.2 0.8 11 -26 -3 -25 2 +7 0.8 0.8 23 -1 32 5 2 +7 0.7 0.4 -24 -47 28 -32 2 +7 0.9 0.4 -16 -18 -1 -33 2 +7 0.6 0.2 -14 -42 36 -3 2 +7 0.7 0.4 49 19 -12 -37 1 +7 0.7 0.6 -3 -22 24 5 2 +7 0.8 0.6 15 -16 22 -37 1 +7 0.4 0.3 40 22 32 26 2 +7 0.4 0.8 20 -24 -33 -37 1 +7 0.5 0.5 48 -35 -17 -50 1 +7 0.1 0.5 8 0 44 23 2 +7 0.8 0.8 14 -33 48 -30 2 +7 0.4 0.7 31 -35 -1 -50 1 +7 0.8 0.9 -8 -49 -14 -24 1 +7 0.9 0.5 42 -11 -22 -41 1 +7 0.7 0.6 29 -34 25 12 2 +7 0.2 0.4 35 -24 37 15 2 +7 0.6 0.3 48 12 21 18 1 +7 0.2 0.9 -22 -34 12 -11 2 +7 0.3 0.8 48 -10 7 -23 2 +7 0.4 0.2 2 -19 38 -8 2 +7 0.7 0.9 24 -8 41 -26 2 +7 0.7 0.3 -3 -12 -3 -5 2 +7 0.6 0.2 40 9 40 -38 1 +7 0.7 0.2 3 -15 46 12 2 +7 0.4 0.8 4 -27 46 -40 2 +7 0.8 0.4 24 -7 -24 -29 1 +7 0.8 0.3 32 -1 41 -29 1 +7 0.9 0.4 -3 -34 35 3 2 +7 0.9 0.3 40 -27 29 -11 1 +7 0.2 0.9 -3 -35 -13 -49 2 +7 0.2 0.8 37 28 25 -13 1 +7 0.1 0.2 -15 -36 50 25 2 +7 0.3 0.5 -7 -30 4 -25 2 +7 0.6 0.7 21 10 13 8 1 +7 0.3 0.4 2 -22 41 -31 2 +7 0.5 0.1 0 -47 29 -45 1 +7 0.3 0.5 23 6 14 -25 1 +7 0.2 0.3 -24 -27 6 -5 2 +7 0.2 0.6 14 -1 43 -8 2 +7 0.6 0.2 -40 -41 30 17 2 +7 0.7 0.7 7 -50 9 -34 1 +7 0.9 0.4 37 -41 15 -1 1 +7 0.9 0.8 26 19 22 -45 1 +7 0.1 0.2 10 -31 30 13 2 +7 0.1 0.6 -31 -48 1 -36 2 +7 0.3 0.4 32 1 -7 -8 1 +7 0.3 0.1 -6 -27 24 10 2 +7 0.7 0.5 5 2 48 -15 2 +7 0.6 0.6 13 -23 14 -9 2 +7 0.2 0.9 13 -41 31 2 2 +7 0.2 0.5 34 22 10 -49 1 +7 0.2 0.8 29 1 3 -35 1 +7 0.4 0.6 26 -4 49 -23 2 +7 0.3 0.3 35 -40 -33 -34 1 +7 0.7 0.5 38 -41 3 -7 1 +7 0.7 0.1 22 -49 44 30 2 +7 0.3 0.4 33 -47 31 30 2 +7 0.3 0.2 28 -36 10 -44 1 +7 0.9 0.1 -23 -29 7 -26 1 +7 0.1 0.7 -20 -32 19 17 2 +7 0.3 0.7 8 -41 4 -11 2 +7 0.7 0.2 42 -50 40 5 2 +7 0.8 0.5 -7 -38 10 3 2 +7 0.9 0.8 39 -29 8 -29 1 +7 0.1 0.3 6 -37 20 -3 2 +7 0.7 0.7 21 -22 36 -42 2 +7 0.8 0.2 42 -16 29 16 1 +7 0.5 0.6 36 -43 35 6 2 +7 0.6 0.1 14 2 50 18 2 +7 0.1 0.2 45 -22 38 -49 2 +7 0.4 0.8 -23 -37 7 -32 2 +7 0.5 0.7 12 -25 20 -32 2 +7 0.4 0.7 10 -5 28 10 2 +7 0.3 0.9 22 19 32 23 2 +7 0.7 0.9 -3 -34 50 38 2 +7 0.7 0.1 -16 -47 19 11 2 +7 0.5 0.4 -21 -31 -12 -50 1 +7 0.4 0.1 39 30 37 -15 1 +7 0.6 0.3 -14 -31 30 -17 2 +7 0.9 0.4 25 14 44 -16 1 +7 0.9 0.6 -4 -17 21 -28 2 +7 0.5 0.9 1 -20 14 -3 2 +7 0.6 0.7 46 -4 -21 -27 1 +7 0.5 0.9 48 24 0 -37 1 +7 0.2 0.1 36 35 37 -44 1 +7 0.7 0.5 32 22 17 7 1 +7 0.9 0.8 38 -49 42 36 2 +7 0.9 0.9 -8 -35 20 -33 2 +7 0.2 0.9 23 -33 47 -12 2 +7 0.5 0.7 41 30 -1 -36 1 +7 0.1 0.7 48 -24 29 9 2 +7 0.1 0.1 -15 -21 10 -47 1 +7 0.3 0.5 38 35 -17 -32 1 +7 0.6 0.6 13 -13 19 -49 1 +7 0.5 0.7 7 -15 24 -20 2 +7 0.5 0.1 15 -39 22 21 2 +7 0.4 0.3 49 -3 41 -5 1 +7 0.3 0.4 23 -27 -2 -33 1 +7 0.5 0.8 15 -16 33 -34 2 +7 0.9 0.9 -21 -26 13 -24 2 +7 0.6 0.4 48 16 -6 -21 1 +7 0.3 0.1 45 -25 -16 -44 1 +7 0.5 0.9 -15 -43 29 -2 2 +7 0.7 0.5 42 -45 -12 -34 1 +7 0.8 0.5 39 6 32 24 2 +7 0.7 0.3 -18 -49 -20 -32 2 +7 0.5 0.6 -21 -32 10 7 2 +7 0.3 0.4 8 -26 -30 -44 1 +7 0.8 0.3 -6 -25 32 -14 2 +7 0.3 0.9 0 -27 13 -22 2 +7 0.5 0.1 40 39 41 -33 1 +7 0.6 0.8 34 0 43 -49 2 +7 0.7 0.5 -11 -19 9 -36 2 +7 0.8 0.2 46 -45 27 20 1 +7 0.2 0.2 14 -19 29 20 2 +7 0.2 0.9 4 -35 -1 -18 2 +7 0.5 0.1 -45 -49 23 -44 1 +7 0.6 0.4 -36 -40 39 10 2 +7 0.4 0.8 18 -10 -25 -47 1 +7 0.8 0.9 45 2 30 13 1 +7 0.7 0.5 -28 -44 -31 -45 2 +7 0.3 0.9 30 10 -25 -27 1 +7 0.3 0.5 33 -46 18 9 2 +7 0.2 0.7 35 3 49 -40 2 +7 0.1 0.4 28 -11 26 -49 1 +7 0.3 0.2 -32 -33 35 24 2 +7 0.5 0.3 25 -21 45 18 2 +7 0.5 0.2 15 -6 0 -25 1 +7 0.3 0.1 35 -36 -4 -14 2 +7 0.8 0.6 42 4 -6 -36 1 +7 0.6 0.8 29 -21 36 -15 2 +7 0.4 0.8 49 21 30 20 1 +7 0.8 0.7 8 -23 15 -43 2 +7 0.7 0.6 33 14 20 9 1 +7 0.5 0.5 14 -18 27 23 2 +7 0.9 0.4 -25 -49 15 -16 2 +7 0.7 0.5 19 3 35 -30 1 +7 0.2 0.8 48 0 47 -40 2 +7 0.1 0.1 42 -47 -14 -43 2 +7 0.3 0.4 17 -15 45 -45 2 +7 0.4 0.4 36 -33 7 -19 1 +7 0.2 0.2 29 -18 -5 -10 2 +7 0.2 0.7 21 -39 19 3 2 +7 0.3 0.7 25 1 -8 -23 1 +7 0.3 0.1 5 -10 11 1 2 +7 0.3 0.3 17 6 41 -6 2 +7 0.8 0.5 30 -23 36 31 2 +7 0.9 0.8 -22 -31 31 -23 2 +7 0.1 0.4 0 -43 39 14 2 +7 0.1 0.5 21 -13 1 -19 2 +7 0.2 0.6 23 -35 -17 -42 2 +7 0.3 0.8 35 22 -12 -13 1 +7 0.1 0.1 25 -28 37 -29 2 +7 0.1 0.4 12 -8 48 -45 1 +7 0.5 0.7 -8 -35 44 -29 2 +7 0.7 0.1 46 -45 6 -46 1 +7 0.9 0.6 17 -42 11 -13 1 +7 0.4 0.4 26 -31 49 14 2 +7 0.9 0.2 -23 -38 18 -43 2 +7 0.2 0.2 21 -16 -2 -3 1 +7 0.8 0.6 2 -25 45 17 2 +7 0.6 0.9 33 -45 -6 -10 1 +7 0.9 0.5 44 21 20 -34 1 +7 0.7 0.1 3 2 -30 -37 1 +7 0.5 0.2 29 -25 44 -16 1 +7 0.7 0.2 13 1 34 33 2 +7 0.4 0.1 3 -31 32 -8 2 +7 0.1 0.7 3 -28 31 4 2 +7 0.2 0.9 45 25 -7 -10 1 +7 0.9 0.5 45 5 38 -32 1 +7 0.1 0.5 -30 -41 -26 -42 1 +7 0.1 0.2 35 -33 1 -37 2 +7 0.6 0.6 27 -11 50 35 2 +7 0.8 0.8 15 -6 31 -13 2 +7 0.5 0.2 38 32 30 -17 1 +7 0.9 0.7 29 -35 49 20 2 +7 0.4 0.2 23 -47 -20 -37 1 +7 0.7 0.2 40 10 10 0 1 +7 0.8 0.3 -13 -48 47 35 2 +7 0.5 0.1 -4 -31 29 -19 1 +7 0.3 0.4 12 -44 28 1 2 +8 0.3 0.4 49 10 22 -3 1 +8 0.3 0.3 36 -15 15 0 2 +8 0.1 0.5 23 -24 -6 -40 1 +8 0.4 0.7 19 -40 35 16 2 +8 0.5 0.8 6 -11 18 -39 1 +8 0.1 0.5 26 -22 26 -44 2 +8 0.6 0.8 35 -38 50 7 2 +8 0.4 0.1 11 5 6 -7 1 +8 0.8 0.5 11 4 -11 -30 1 +8 0.2 0.9 12 -23 33 30 2 +8 0.3 0.1 45 -6 22 21 2 +8 0.4 0.5 5 -46 43 -42 2 +8 0.7 0.2 -15 -23 23 10 2 +8 0.4 0.4 0 -27 45 -4 2 +8 0.6 0.2 -8 -15 24 -45 2 +8 0.7 0.5 37 -14 8 -8 1 +8 0.9 0.6 25 -16 14 -12 1 +8 0.5 0.3 -25 -27 5 -43 2 +8 0.2 0.8 14 4 32 30 2 +8 0.6 0.2 21 -42 44 -49 1 +8 0.3 0.5 29 22 49 15 2 +8 0.7 0.2 -15 -29 22 -2 2 +8 0.6 0.5 39 -36 34 20 2 +8 0.5 0.3 32 -30 13 1 2 +8 0.1 0.7 43 6 -15 -28 1 +8 0.3 0.6 46 -41 4 -18 1 +8 0.1 0.1 9 -21 7 -9 2 +8 0.5 0.2 43 11 32 -46 1 +8 0.1 0.3 24 0 -42 -45 1 +8 0.3 0.4 40 26 47 -10 1 +8 0.2 0.8 10 -32 -30 -44 1 +8 0.3 0.6 47 0 19 16 2 +8 0.9 0.1 -34 -48 25 17 2 +8 0.1 0.5 4 -49 34 15 2 +8 0.6 0.5 4 -11 -10 -37 2 +8 0.7 0.2 -13 -42 -39 -45 1 +8 0.7 0.6 1 -35 10 -27 2 +8 0.2 0.3 -25 -40 40 -44 2 +8 0.9 0.9 8 -14 -2 -13 1 +8 0.4 0.1 49 19 20 -32 1 +8 0.9 0.8 43 -10 37 34 1 +8 0.7 0.6 27 -44 -4 -14 1 +8 0.6 0.4 12 -44 -1 -29 1 +8 0.5 0.5 44 41 -15 -22 1 +8 0.3 0.8 9 -34 47 -23 2 +8 0.5 0.8 14 8 3 -7 1 +8 0.8 0.9 5 -2 29 19 2 +8 0.7 0.8 41 12 -2 -9 1 +8 0.8 0.6 -4 -17 13 -32 2 +8 0.4 0.2 39 36 41 -38 1 +8 0.1 0.3 46 -22 34 7 2 +8 0.3 0.7 24 -41 3 -32 2 +8 0.2 0.6 50 -17 24 -33 2 +8 0.4 0.7 26 14 37 -21 1 +8 0.3 0.6 32 13 16 -37 1 +8 0.2 0.8 43 14 24 -46 1 +8 0.1 0.5 7 -42 35 -17 2 +8 0.1 0.6 29 2 -29 -44 1 +8 0.3 0.2 12 -33 14 -21 1 +8 0.4 0.3 31 8 31 -33 1 +8 0.7 0.9 13 -31 17 5 2 +8 0.8 0.2 31 20 6 4 1 +8 0.9 0.8 -1 -27 31 -7 2 +8 0.5 0.3 23 -29 27 -32 1 +8 0.2 0.8 -25 -46 -18 -38 2 +8 0.6 0.7 -3 -29 44 15 2 +8 0.4 0.5 42 -28 9 -16 1 +8 0.6 0.3 15 -39 49 -32 1 +8 0.5 0.9 43 -2 48 22 2 +8 0.1 0.4 21 -43 -19 -36 1 +8 0.1 0.2 7 -31 42 -1 2 +8 0.2 0.5 47 -7 31 30 2 +8 0.4 0.4 31 -9 19 -7 1 +8 0.4 0.9 32 12 -15 -36 1 +8 0.1 0.1 -5 -28 30 -49 1 +8 0.1 0.6 -28 -37 44 -41 2 +8 0.4 0.5 -46 -50 -26 -47 1 +8 0.1 0.5 28 -19 -23 -39 1 +8 0.3 0.8 16 -22 -4 -10 1 +8 0.7 0.1 26 21 3 -2 1 +8 0.2 0.2 38 -40 -16 -17 1 +8 0.3 0.5 -5 -19 32 -3 2 +8 0.7 0.7 34 -9 42 -6 2 +8 0.6 0.8 24 13 -16 -44 1 +8 0.8 0.4 14 8 4 -37 1 +8 0.6 0.5 23 -35 24 -7 2 +8 0.5 0.9 2 -17 -3 -29 1 +8 0.5 0.2 2 -35 19 -23 2 +8 0.2 0.2 -3 -42 -12 -36 2 +8 0.8 0.6 50 2 23 22 1 +8 0.3 0.9 3 -8 -1 -2 1 +8 0.7 0.6 -14 -33 41 -6 2 +8 0.1 0.1 44 -36 -2 -27 2 +8 0.4 0.8 17 -35 -10 -50 1 +8 0.9 0.3 37 -6 42 1 1 +8 0.2 0.5 3 -20 23 10 2 +8 0.4 0.6 6 -40 20 -12 2 +8 0.7 0.3 48 30 -21 -27 1 +8 0.9 0.9 22 -48 8 -9 1 +8 0.5 0.7 40 -48 9 -37 2 +8 0.5 0.2 -42 -43 13 5 2 +8 0.6 0.3 14 -46 24 -4 1 +8 0.5 0.1 43 -48 18 4 2 +8 0.5 0.8 -22 -45 42 -16 2 +8 0.4 0.1 32 -6 12 -29 1 +8 0.2 0.6 -4 -33 -2 -35 1 +8 0.4 0.4 23 -4 50 29 2 +8 0.9 0.4 46 -41 0 -33 1 +8 0.2 0.5 44 -7 -3 -34 1 +8 0.7 0.7 41 23 22 12 1 +8 0.1 0.5 11 -4 14 5 2 +8 0.7 0.9 34 31 32 14 1 +8 0.4 0.1 -7 -28 12 -2 2 +8 0.4 0.5 10 -35 23 -39 2 +8 0.1 0.2 -18 -40 20 -20 2 +8 0.1 0.6 27 -36 12 -4 2 +8 0.9 0.3 40 15 -2 -31 1 +8 0.9 0.8 -11 -35 25 -23 2 +8 0.4 0.2 17 -31 38 -15 2 +8 0.9 0.6 2 -22 26 -24 2 +8 0.2 0.5 -22 -43 19 -47 2 +8 0.8 0.5 43 39 27 -28 1 +8 0.1 0.9 -4 -41 -9 -28 2 +8 0.7 0.4 42 -42 47 -3 1 +8 0.6 0.4 6 3 38 29 2 +8 0.2 0.9 24 11 22 -32 2 +8 0.3 0.8 45 0 49 27 2 +8 0.9 0.2 20 -17 34 10 2 +8 0.4 0.1 40 -39 24 -4 1 +8 0.6 0.4 39 -24 49 -34 1 +8 0.3 0.9 5 -8 -16 -40 1 +8 0.9 0.6 -13 -38 33 -23 2 +8 0.2 0.3 48 -8 -3 -35 1 +8 0.8 0.4 5 -44 39 31 2 +8 0.4 0.3 -1 -10 48 -18 2 +8 0.3 0.1 28 9 12 5 1 +8 0.7 0.6 33 -34 5 -2 1 +8 0.7 0.4 5 -49 46 35 2 +8 0.7 0.6 49 -44 13 -8 1 +8 0.3 0.8 21 -37 24 -12 2 +8 0.4 0.5 9 -1 -14 -21 1 +8 0.7 0.4 38 -49 34 -4 1 +8 0.5 0.3 26 -48 32 6 2 +8 0.7 0.3 -4 -14 49 -17 2 +8 0.9 0.1 10 -41 -11 -15 1 +8 0.6 0.8 24 -23 4 -21 2 +8 0.9 0.1 31 -39 4 -8 1 +8 0.1 0.8 -31 -39 37 -36 2 +8 0.4 0.7 20 9 24 -23 1 +8 0.9 0.8 42 14 9 -27 1 +8 0.1 0.3 -14 -32 32 -48 1 +8 0.9 0.8 7 -30 14 2 2 +8 0.3 0.1 11 -7 38 -29 1 +8 0.2 0.4 33 -48 16 8 2 +8 0.9 0.5 33 11 30 -41 1 +8 0.3 0.5 33 23 -29 -37 1 +8 0.2 0.2 41 -2 3 -34 1 +8 0.9 0.7 33 -2 11 0 1 +8 0.9 0.8 49 -33 -15 -30 1 +8 0.5 0.4 34 33 32 15 1 +8 0.2 0.5 32 11 13 -47 1 +8 0.5 0.8 26 -35 43 41 2 +8 0.1 0.4 21 -45 29 7 2 +8 0.7 0.2 15 -2 27 0 1 +8 0.9 0.2 50 -7 46 -43 1 +8 0.7 0.5 20 -24 43 7 2 +8 0.5 0.7 35 -50 14 -37 2 +8 0.9 0.5 10 8 -13 -50 1 +8 0.1 0.7 1 -12 -14 -15 1 +8 0.3 0.5 -6 -42 24 -17 2 +8 0.8 0.7 17 9 -24 -28 1 +8 0.3 0.8 -46 -48 41 -21 2 +8 0.9 0.8 -28 -42 40 -18 2 +8 0.1 0.2 49 17 28 -28 1 +8 0.8 0.5 50 -19 -24 -33 1 +8 0.6 0.9 48 33 47 -11 1 +8 0.9 0.3 49 -42 -18 -41 1 +8 0.3 0.2 -17 -45 -2 -37 1 +8 0.4 0.1 32 -41 22 5 2 +8 0.7 0.5 18 -2 13 -10 2 +8 0.2 0.3 37 23 25 -25 1 +8 0.3 0.9 -13 -27 6 -49 2 +8 0.7 0.1 1 -28 -1 -20 1 +8 0.8 0.7 29 22 23 -16 1 +8 0.1 0.7 43 -47 22 -45 2 +8 0.4 0.8 31 -5 47 8 2 +8 0.3 0.1 33 -17 4 -14 1 +8 0.4 0.2 -8 -28 -42 -46 1 +8 0.9 0.2 48 -4 49 -35 1 +8 0.8 0.6 -11 -25 43 -35 2 +8 0.2 0.5 38 -48 -14 -30 1 +8 0.6 0.4 34 15 35 -6 1 +8 0.5 0.2 7 -3 -19 -25 1 +8 0.9 0.4 1 -1 25 -24 2 +8 0.8 0.1 12 -9 23 -45 1 +8 0.6 0.5 -14 -24 2 -18 2 +8 0.2 0.3 35 -11 35 8 1 +8 0.2 0.3 30 -9 38 13 2 +8 0.4 0.3 -11 -22 29 -20 2 +8 0.9 0.7 18 -20 24 -13 1 +8 0.1 0.1 50 -37 26 4 2 +8 0.4 0.8 32 -36 0 -46 1 +8 0.8 0.1 43 41 26 -13 1 +8 0.7 0.2 27 -30 49 19 2 +8 0.5 0.3 -16 -29 -13 -14 2 +8 0.7 0.5 -22 -25 39 37 2 +8 0.2 0.4 25 -16 1 -40 1 +8 0.4 0.2 32 -18 30 22 2 +8 0.9 0.6 -8 -24 0 -2 2 +8 0.2 0.8 10 -1 11 -18 2 +8 0.7 0.7 5 0 47 -24 2 +8 0.1 0.8 -30 -36 34 27 2 +8 0.8 0.5 -19 -29 16 -29 2 +8 0.2 0.4 5 -18 -5 -28 1 +8 0.1 0.1 34 9 29 -46 1 +8 0.4 0.9 14 -6 50 4 2 +8 0.3 0.7 38 -45 -28 -32 1 +8 0.3 0.5 50 -21 32 -37 2 +8 0.3 0.4 30 -44 47 -43 2 +8 0.4 0.4 49 3 36 -11 1 +8 0.5 0.7 -33 -36 37 10 2 +8 0.8 0.9 -17 -36 48 -17 2 +8 0.6 0.5 26 -33 44 11 2 +8 0.2 0.6 4 -6 20 16 2 +8 0.7 0.6 34 9 -14 -28 1 +8 0.3 0.2 4 -3 29 -35 2 +8 0.4 0.6 39 -14 4 -49 1 +8 0.7 0.3 -10 -16 24 -44 1 +8 0.7 0.3 41 -25 24 -34 1 +8 0.9 0.8 34 11 3 -1 1 +8 0.9 0.3 45 39 18 -11 1 +8 0.8 0.8 36 -38 22 9 1 +8 0.9 0.4 18 7 -33 -35 1 +8 0.6 0.4 38 -13 47 4 2 +8 0.5 0.6 28 -32 6 -42 1 +8 0.7 0.2 10 -48 37 -46 1 +8 0.2 0.7 37 -25 22 -17 2 +8 0.4 0.8 26 -16 44 28 2 +8 0.9 0.2 49 4 26 -43 1 +8 0.9 0.7 47 -27 11 -21 1 +8 0.7 0.3 30 -20 28 7 1 +8 0.9 0.4 -18 -47 14 11 2 +8 0.2 0.4 5 -44 -11 -15 1 +8 0.4 0.6 46 -1 18 -21 1 +8 0.7 0.5 16 -49 1 -30 1 +8 0.1 0.8 -5 -37 -28 -44 2 +8 0.9 0.1 -33 -41 12 -27 1 +8 0.9 0.4 -18 -48 -37 -41 1 +8 0.2 0.6 24 -6 -14 -30 1 +8 0.5 0.4 20 -19 -9 -28 1 +8 0.4 0.9 33 -33 32 30 2 +8 0.1 0.5 -6 -42 11 -15 2 +8 0.6 0.5 45 36 6 -23 1 +8 0.8 0.7 21 -33 13 -28 1 +8 0.9 0.2 42 33 -23 -38 1 +8 0.6 0.1 14 -40 22 -5 2 +8 0.4 0.2 -22 -24 -3 -31 1 +8 0.1 0.1 -31 -33 24 -23 1 +8 0.7 0.6 46 -39 33 24 2 +8 0.6 0.9 43 33 -13 -48 1 +8 0.3 0.1 44 15 22 -42 1 +8 0.9 0.5 49 46 48 6 1 +8 0.5 0.1 42 25 38 -26 1 +8 0.6 0.1 9 -49 21 -14 1 +8 0.4 0.4 46 -41 41 23 2 +8 0.4 0.8 30 -7 22 12 2 +8 0.3 0.6 42 21 35 21 1 +8 0.5 0.5 47 -27 12 -18 1 +8 0.4 0.5 44 40 24 8 2 +8 0.6 0.1 -22 -25 46 -29 2 +8 0.5 0.5 41 17 50 0 1 +8 0.9 0.1 -1 -23 -1 -27 1 +8 0.1 0.5 12 -42 -11 -42 2 +8 0.9 0.6 9 -29 48 30 2 +8 0.6 0.4 39 12 -24 -47 1 +8 0.6 0.2 -13 -22 -28 -50 2 +8 0.9 0.7 41 -35 6 -20 1 +8 0.2 0.4 48 -7 17 -46 1 +8 0.2 0.2 -8 -17 35 33 2 +8 0.3 0.8 41 -16 11 -26 2 +8 0.3 0.7 -5 -31 23 -31 2 +8 0.5 0.9 38 -7 12 2 2 +8 0.6 0.7 24 -33 7 -25 1 +8 0.8 0.4 46 13 -8 -45 1 +8 0.6 0.6 35 0 -12 -27 1 +8 0.3 0.2 -1 -36 17 -5 2 +8 0.3 0.9 -15 -50 9 -48 2 +8 0.1 0.6 22 -41 3 -18 2 +8 0.6 0.2 46 41 42 -35 1 +8 0.5 0.2 -22 -25 42 33 2 +8 0.5 0.2 47 -1 24 3 1 +8 0.4 0.1 -30 -44 18 -27 2 +8 0.8 0.4 47 35 12 -3 1 +8 0.9 0.2 46 -46 35 -20 1 +8 0.4 0.3 11 -24 16 7 2 +8 0.5 0.6 20 14 12 -14 1 +8 0.5 0.8 41 37 22 -50 1 +8 0.1 0.8 41 -43 40 -23 2 +8 0.1 0.1 -4 -32 23 15 2 +8 0.4 0.2 33 15 26 24 1 +9 0.1 0.5 -3 -20 19 -30 2 +9 0.9 0.4 23 -2 48 2 1 +9 0.7 0.5 44 31 33 20 1 +9 0.6 0.6 40 4 -5 -38 1 +9 0.2 0.9 50 -46 7 -34 1 +9 0.3 0.9 -3 -20 -32 -33 1 +9 0.8 0.1 -22 -33 45 24 2 +9 0.8 0.4 44 -10 30 15 2 +9 0.8 0.5 6 -25 50 3 2 +9 0.9 0.3 40 39 25 -43 1 +9 0.1 0.8 33 30 31 -17 2 +9 0.7 0.3 39 -35 16 -34 1 +9 0.8 0.9 26 -1 8 -27 1 +9 0.8 0.3 40 -39 49 17 1 +9 0.8 0.1 -11 -29 46 12 2 +9 0.4 0.5 9 6 -23 -42 1 +9 0.7 0.9 47 35 41 -20 1 +9 0.6 0.1 49 43 20 -32 1 +9 0.7 0.8 37 19 38 36 1 +9 0.6 0.8 24 -31 39 -36 2 +9 0.6 0.1 -14 -44 -4 -5 2 +9 0.2 0.1 30 -33 8 -1 1 +9 0.7 0.4 -7 -20 12 -10 2 +9 0.8 0.6 -34 -38 -7 -15 1 +9 0.7 0.6 8 -30 22 -30 2 +9 0.4 0.2 46 -26 -44 -46 1 +9 0.8 0.2 34 -23 34 -25 1 +9 0.3 0.9 35 -2 24 -43 2 +9 0.4 0.5 -25 -46 10 -35 2 +9 0.7 0.9 24 -8 -27 -40 1 +9 0.2 0.9 31 26 38 20 2 +9 0.7 0.3 40 4 9 -29 1 +9 0.7 0.8 48 -6 45 -49 2 +9 0.6 0.2 9 -1 31 2 1 +9 0.6 0.4 8 -8 45 -38 1 +9 0.5 0.2 -1 -31 25 -23 2 +9 0.7 0.4 49 -28 27 23 2 +9 0.3 0.1 45 -28 -36 -38 1 +9 0.5 0.8 20 14 -17 -44 1 +9 0.9 0.2 14 -17 41 39 2 +9 0.2 0.8 46 4 40 -14 1 +9 0.8 0.2 50 -12 1 -40 1 +9 0.8 0.5 -13 -34 -14 -44 1 +9 0.5 0.1 26 12 -36 -47 1 +9 0.2 0.9 47 -30 48 39 2 +9 0.7 0.4 36 11 24 12 1 +9 0.9 0.7 27 -7 41 -38 1 +9 0.3 0.4 -30 -42 -23 -31 2 +9 0.7 0.6 14 -6 -29 -43 1 +9 0.9 0.4 -13 -24 -11 -20 1 +9 0.5 0.4 32 -7 0 -15 1 +9 0.2 0.1 41 -29 28 -47 2 +9 0.7 0.6 -11 -19 3 -46 2 +9 0.8 0.9 -20 -24 49 33 2 +9 0.4 0.6 20 18 -23 -48 1 +9 0.1 0.6 38 -7 5 -2 2 +9 0.5 0.8 -26 -27 41 -15 2 +9 0.7 0.8 46 -26 45 -49 2 +9 0.1 0.4 5 -26 -19 -31 2 +9 0.7 0.6 18 -47 40 -18 2 +9 0.1 0.6 33 31 31 16 1 +9 0.7 0.7 -27 -44 -33 -45 2 +9 0.4 0.8 26 23 -15 -21 1 +9 0.1 0.5 29 18 -1 -3 1 +9 0.8 0.4 15 -14 35 -28 1 +9 0.6 0.6 33 12 34 15 2 +9 0.1 0.1 37 -17 -4 -36 1 +9 0.7 0.1 -5 -12 15 -31 1 +9 0.4 0.7 -14 -24 41 -4 2 +9 0.4 0.6 39 35 -26 -44 1 +9 0.9 0.7 46 13 -34 -35 1 +9 0.9 0.5 -17 -26 2 -22 2 +9 0.8 0.4 -2 -10 28 -37 2 +9 0.2 0.1 1 -50 30 -17 2 +9 0.5 0.8 -34 -45 43 -18 2 +9 0.1 0.3 -4 -27 39 -22 2 +9 0.2 0.8 50 -44 -3 -10 2 +9 0.5 0.7 -25 -34 24 -11 2 +9 0.4 0.7 39 37 37 -43 1 +9 0.7 0.6 -13 -46 -33 -35 1 +9 0.3 0.4 28 -14 -3 -16 1 +9 0.4 0.4 33 -4 33 2 2 +9 0.5 0.2 35 -8 45 43 2 +9 0.8 0.4 41 23 18 -48 1 +9 0.3 0.8 46 -21 33 -34 2 +9 0.3 0.4 3 -50 40 17 2 +9 0.4 0.9 8 -12 42 31 2 +9 0.9 0.7 26 16 36 -12 2 +9 0.3 0.8 -11 -44 47 -34 2 +9 0.5 0.9 20 -31 -41 -43 1 +9 0.7 0.6 -23 -42 2 -30 2 +9 0.3 0.9 40 20 -26 -49 1 +9 0.8 0.5 29 -32 10 -45 1 +9 0.3 0.8 -28 -44 16 -10 2 +9 0.1 0.4 -5 -30 -2 -24 2 +9 0.3 0.2 28 21 41 -50 1 +9 0.6 0.7 25 -31 12 -44 1 +9 0.7 0.4 17 -3 37 -12 1 +9 0.5 0.1 30 -22 33 -7 1 +9 0.9 0.2 -35 -37 36 9 2 +9 0.8 0.3 5 -46 34 22 2 +9 0.3 0.9 21 7 -29 -42 1 +9 0.8 0.2 24 -7 35 23 2 +9 0.6 0.3 44 0 29 -21 1 +9 0.9 0.4 3 -48 -23 -30 1 +9 0.3 0.1 35 1 30 -25 1 +9 0.9 0.2 20 -8 42 -24 1 +9 0.1 0.1 8 -41 -26 -38 1 +9 0.6 0.9 39 -3 -12 -28 1 +9 0.9 0.6 6 -46 18 0 1 +9 0.2 0.7 -13 -49 34 -1 2 +9 0.3 0.3 2 -12 31 21 2 +9 0.6 0.1 -18 -30 -35 -41 1 +9 0.4 0.4 48 -32 9 1 2 +9 0.2 0.9 -2 -24 41 -40 2 +9 0.2 0.4 -7 -34 26 -20 2 +9 0.2 0.2 49 -2 50 16 2 +9 0.9 0.7 47 -33 36 -13 1 +9 0.5 0.9 29 -38 17 14 2 +9 0.8 0.7 -19 -27 -26 -48 2 +9 0.8 0.9 38 17 13 1 1 +9 0.9 0.4 39 -42 35 -1 1 +9 0.5 0.5 -38 -46 -29 -42 1 +9 0.6 0.9 -11 -50 26 -6 2 +9 0.7 0.6 -4 -23 36 -50 2 +9 0.6 0.9 36 23 32 -14 2 +9 0.4 0.2 -18 -48 5 -18 2 +9 0.8 0.4 42 6 47 23 1 +9 0.1 0.8 3 -35 32 -48 2 +9 0.9 0.4 30 -23 42 35 2 +9 0.4 0.7 13 -40 42 -8 2 +9 0.2 0.8 -38 -50 -10 -27 2 +9 0.4 0.1 -10 -47 21 -4 2 +9 0.9 0.1 20 -37 47 -33 1 +9 0.8 0.9 40 -34 17 -22 2 +9 0.8 0.7 -3 -6 -21 -30 1 +9 0.9 0.8 1 -32 24 -41 2 +9 0.4 0.8 43 -31 46 27 2 +9 0.1 0.7 18 -22 13 -19 2 +9 0.4 0.3 20 -50 49 46 2 +9 0.2 0.4 -8 -50 40 33 2 +9 0.7 0.6 25 -11 29 -7 2 +9 0.5 0.2 -12 -33 25 -3 2 +9 0.2 0.1 31 -48 -4 -16 2 +9 0.6 0.7 25 -16 33 17 2 +9 0.1 0.4 32 31 5 -25 1 +9 0.9 0.1 29 -23 -5 -27 1 +9 0.6 0.9 12 -18 31 -48 2 +9 0.3 0.7 -12 -23 34 -45 2 +9 0.8 0.2 15 -49 44 -3 1 +9 0.4 0.6 35 -40 35 -4 1 +9 0.6 0.8 4 -35 47 33 2 +9 0.9 0.5 49 -41 -36 -49 1 +9 0.2 0.5 11 10 30 -28 1 +9 0.1 0.6 11 -36 49 -24 2 +9 0.7 0.3 1 -31 45 27 2 +9 0.1 0.6 -6 -42 48 2 2 +9 0.7 0.4 40 6 23 -14 1 +9 0.6 0.2 25 -46 45 7 2 +9 0.4 0.3 2 -32 34 -9 2 +9 0.1 0.2 -37 -46 -5 -37 2 +9 0.2 0.6 25 -11 38 -21 2 +9 0.2 0.2 -10 -47 33 -2 2 +9 0.8 0.2 24 -10 -36 -46 1 +9 0.2 0.9 48 7 25 -16 2 +9 0.7 0.5 48 -43 29 -42 1 +9 0.4 0.5 -30 -42 38 -50 2 +9 0.8 0.9 41 20 33 -49 1 +9 0.9 0.6 -2 -48 -28 -30 2 +9 0.5 0.8 -1 -50 24 -16 2 +9 0.8 0.4 40 37 -27 -29 1 +9 0.2 0.3 14 -19 -23 -37 2 +9 0.1 0.4 10 -25 -22 -33 2 +9 0.1 0.5 47 -27 40 11 2 +9 0.6 0.3 -19 -20 35 -8 2 +9 0.4 0.8 24 -17 29 18 2 +9 0.5 0.4 25 -21 28 6 2 +9 0.7 0.5 -12 -31 -25 -46 2 +9 0.6 0.1 46 -11 31 7 1 +9 0.6 0.9 21 -14 14 -14 1 +9 0.3 0.4 3 -33 14 -19 2 +9 0.3 0.6 8 5 39 -43 1 +9 0.7 0.4 13 -23 42 0 1 +9 0.5 0.4 6 -37 28 -49 1 +9 0.9 0.6 34 4 15 -42 1 +9 0.2 0.1 42 10 17 -46 1 +9 0.4 0.2 27 -49 -35 -41 1 +9 0.9 0.4 49 -38 44 37 1 +9 0.1 0.2 -18 -34 -29 -50 1 +9 0.8 0.8 22 15 35 33 1 +9 0.5 0.8 43 21 -28 -37 1 +9 0.2 0.4 49 28 24 -2 1 +9 0.8 0.4 4 -7 43 33 2 +9 0.9 0.7 11 5 -25 -38 1 +9 0.8 0.1 11 10 34 17 2 +9 0.7 0.9 -2 -22 39 28 2 +9 0.4 0.1 35 8 42 -30 1 +9 0.3 0.2 40 37 22 -27 1 +9 0.9 0.4 4 -26 26 -45 1 +9 0.1 0.6 48 46 49 -50 1 +9 0.8 0.9 42 -9 30 7 2 +9 0.6 0.2 23 -35 17 -7 2 +9 0.2 0.6 39 -31 10 -23 1 +9 0.5 0.5 36 -40 -25 -49 2 +9 0.7 0.9 32 -42 34 -29 2 +9 0.8 0.9 4 -35 19 -13 2 +9 0.1 0.3 38 -42 49 -21 2 +9 0.2 0.6 12 -34 0 -15 2 +9 0.9 0.6 26 3 -13 -49 1 +9 0.3 0.6 27 -14 39 32 2 +9 0.1 0.6 19 -44 31 17 2 +9 0.1 0.2 24 1 -10 -33 1 +9 0.4 0.1 35 -25 12 -45 1 +9 0.6 0.2 23 4 27 -28 1 +9 0.9 0.1 46 8 32 22 1 +9 0.2 0.9 39 26 23 -11 2 +9 0.7 0.8 49 20 50 -42 1 +9 0.9 0.4 1 -27 -10 -50 1 +9 0.4 0.2 7 -13 47 6 2 +9 0.4 0.2 41 8 32 9 2 +9 0.9 0.4 44 -14 22 17 1 +9 0.8 0.8 38 24 31 -22 1 +9 0.9 0.8 21 -44 15 1 2 +9 0.2 0.4 14 -19 -20 -47 1 +9 0.9 0.1 -5 -48 49 -19 2 +9 0.7 0.1 34 -4 2 -26 1 +9 0.1 0.6 5 -4 -24 -50 1 +9 0.5 0.4 26 -50 14 -49 1 +9 0.2 0.7 16 -25 44 12 2 +9 0.8 0.9 30 14 -18 -20 1 +9 0.4 0.9 -18 -29 -24 -31 1 +9 0.6 0.5 39 -35 30 -14 1 +9 0.9 0.2 44 -43 13 -35 1 +9 0.4 0.5 47 38 34 -14 1 +9 0.3 0.2 41 22 -41 -50 1 +9 0.5 0.6 41 -5 34 -3 2 +9 0.8 0.3 27 -26 44 -49 1 +9 0.6 0.9 43 14 19 -44 1 +9 0.5 0.4 -40 -49 49 12 2 +9 0.1 0.5 14 -33 46 -5 2 +9 0.6 0.8 49 45 18 -14 1 +9 0.4 0.2 3 -48 13 6 2 +9 0.5 0.1 -1 -34 -38 -46 1 +9 0.5 0.1 -9 -14 1 -48 1 +9 0.1 0.8 -12 -36 45 -24 2 +9 0.3 0.1 11 -6 26 -32 2 +9 0.1 0.8 -28 -48 48 -39 2 +9 0.1 0.8 35 14 -3 -21 1 +9 0.3 0.9 41 14 9 -18 1 +9 0.3 0.4 2 -40 -2 -38 1 +9 0.2 0.5 50 29 25 -4 1 +9 0.9 0.9 24 1 -42 -44 1 +9 0.5 0.9 36 -9 35 -30 2 +9 0.3 0.5 -8 -20 -5 -43 1 +9 0.2 0.6 41 9 4 -4 1 +9 0.9 0.1 7 -34 8 5 2 +9 0.2 0.6 -22 -48 16 -45 2 +9 0.3 0.4 38 36 23 -44 1 +9 0.5 0.2 46 -10 33 -46 1 +9 0.3 0.8 38 34 46 6 1 +9 0.5 0.3 3 -27 49 40 2 +9 0.2 0.5 39 -9 45 -11 2 +9 0.8 0.8 20 15 40 -46 2 +9 0.4 0.4 45 -21 33 27 2 +9 0.2 0.9 33 -50 33 15 2 +9 0.6 0.6 -2 -50 18 0 2 +9 0.4 0.1 41 33 -6 -23 1 +9 0.2 0.8 41 16 7 -46 1 +9 0.9 0.3 -10 -47 48 -5 2 +9 0.7 0.6 33 -16 26 -10 1 +9 0.3 0.6 14 -21 50 45 2 +9 0.7 0.3 49 -13 35 20 2 +9 0.1 0.7 20 -39 35 -38 2 +9 0.5 0.1 35 -45 0 -15 2 +9 0.1 0.9 -3 -13 -5 -14 1 +9 0.5 0.7 46 -26 37 -11 2 +9 0.8 0.9 9 -32 31 14 2 +9 0.6 0.4 44 -10 12 -24 2 +9 0.9 0.4 -18 -49 41 37 2 +9 0.4 0.7 34 -32 26 -37 2 +9 0.6 0.8 7 -26 24 -22 2 +9 0.3 0.1 -37 -39 49 -11 2 +9 0.6 0.4 43 -15 -32 -41 1 +9 0.6 0.6 21 -11 -1 -10 1 +9 0.7 0.1 10 -17 44 18 2 +9 0.5 0.8 16 -4 30 -27 2 +9 0.9 0.2 45 5 49 35 1 +9 0.1 0.2 -5 -35 45 -48 1 +9 0.8 0.5 12 6 -8 -20 1 +9 0.1 0.8 35 -34 34 -30 2 +9 0.3 0.2 13 -42 38 -23 2 +9 0.8 0.5 -32 -47 -3 -5 2 +9 0.1 0.5 37 -45 3 -3 2 +9 0.7 0.1 37 15 29 24 2 +9 0.6 0.9 35 -7 46 34 2 +9 0.6 0.2 41 -36 34 -22 1 +9 0.5 0.3 35 -40 -8 -39 2 +9 0.9 0.2 15 -48 -4 -25 1 +9 0.5 0.1 -22 -49 26 -29 2 +9 0.2 0.9 -2 -16 0 -14 2 +10 0.5 0.2 23 -35 7 -7 2 +10 0.9 0.2 35 33 14 -26 1 +10 0.5 0.9 32 -38 22 -30 2 +10 0.4 0.6 47 -39 3 2 2 +10 0.9 0.8 38 -29 1 -7 1 +10 0.7 0.4 -34 -48 42 -24 2 +10 0.9 0.6 -12 -49 -9 -36 2 +10 0.3 0.5 7 0 21 -17 1 +10 0.3 0.3 27 -11 11 -8 1 +10 0.6 0.3 41 9 32 -34 1 +10 0.3 0.3 22 -34 41 7 2 +10 0.5 0.9 40 -30 -22 -34 1 +10 0.7 0.1 22 -7 39 -38 1 +10 0.3 0.8 -18 -34 -5 -35 2 +10 0.4 0.8 -10 -27 -35 -47 2 +10 0.4 0.6 45 21 -9 -31 1 +10 0.3 0.4 34 -32 32 -24 1 +10 0.5 0.3 33 -26 35 0 2 +10 0.9 0.5 -5 -33 17 -27 2 +10 0.5 0.1 11 -43 -18 -48 1 +10 0.7 0.2 40 -39 15 -33 1 +10 0.4 0.8 23 -38 19 -22 2 +10 0.8 0.5 26 -46 -24 -43 1 +10 0.1 0.8 25 17 49 9 2 +10 0.5 0.9 19 -31 -10 -38 1 +10 0.5 0.3 43 -14 -9 -46 1 +10 0.3 0.5 43 -20 29 -36 2 +10 0.7 0.7 28 -41 28 -35 1 +10 0.8 0.2 -23 -29 35 14 2 +10 0.9 0.4 21 14 32 1 1 +10 0.7 0.8 45 24 26 -4 1 +10 0.2 0.1 24 -19 39 -9 2 +10 0.2 0.5 35 -47 49 -16 2 +10 0.4 0.7 45 5 4 -18 1 +10 0.5 0.8 34 -25 -32 -50 1 +10 0.8 0.1 -6 -9 43 39 2 +10 0.2 0.7 -30 -35 0 -20 2 +10 0.2 0.4 -7 -36 31 -48 2 +10 0.3 0.2 48 -16 -3 -6 2 +10 0.2 0.5 0 -10 31 -27 2 +10 0.8 0.2 -10 -50 12 -7 2 +10 0.3 0.1 34 25 47 41 2 +10 0.9 0.9 21 -30 11 -44 1 +10 0.5 0.3 -14 -44 -5 -28 2 +10 0.2 0.4 39 -26 -3 -28 1 +10 0.8 0.8 -26 -36 43 -4 2 +10 0.9 0.6 50 11 1 -20 1 +10 0.9 0.5 -14 -30 47 3 2 +10 0.6 0.8 45 -15 14 -1 1 +10 0.4 0.7 41 4 39 -37 1 +10 0.7 0.5 -15 -41 -8 -19 2 +10 0.1 0.6 7 -31 29 -37 2 +10 0.3 0.1 11 -50 38 -35 1 +10 0.9 0.2 28 -39 44 -16 1 +10 0.7 0.1 49 11 29 -43 1 +10 0.1 0.5 -5 -35 19 6 2 +10 0.4 0.1 17 13 -15 -19 1 +10 0.1 0.1 46 42 -5 -36 1 +10 0.2 0.6 36 33 32 -48 1 +10 0.1 0.1 -3 -32 18 -43 1 +10 0.2 0.2 22 -42 -30 -39 1 +10 0.4 0.6 40 -24 43 -28 2 +10 0.1 0.6 41 -48 23 -6 2 +10 0.1 0.8 -42 -44 23 11 2 +10 0.1 0.6 34 -23 -21 -32 1 +10 0.6 0.3 41 27 -9 -30 1 +10 0.1 0.7 40 20 50 5 2 +10 0.9 0.3 -15 -16 20 2 2 +10 0.1 0.9 36 -13 42 19 2 +10 0.6 0.1 14 -47 26 -27 1 +10 0.9 0.5 1 -7 42 9 2 +10 0.3 0.2 42 -11 18 16 2 +10 0.3 0.2 35 -40 3 -47 2 +10 0.4 0.3 40 -18 -6 -34 1 +10 0.8 0.5 29 -34 47 -8 1 +10 0.7 0.8 -23 -32 11 -12 2 +10 0.3 0.9 -31 -39 -16 -49 2 +10 0.7 0.1 37 -17 21 -48 1 +10 0.9 0.3 44 -11 34 -23 1 +10 0.5 0.5 -26 -44 -21 -41 2 +10 0.1 0.2 37 -38 17 -3 2 +10 0.4 0.5 34 20 -3 -11 1 +10 0.4 0.7 -8 -11 -10 -23 1 +10 0.8 0.9 21 -19 17 -40 1 +10 0.5 0.1 6 3 38 -18 1 +10 0.7 0.8 26 -16 27 -44 2 +10 0.5 0.1 39 -6 29 16 2 +10 0.6 0.2 14 13 17 0 1 +10 0.2 0.9 -22 -39 27 -32 2 +10 0.9 0.6 44 39 25 -35 1 +10 0.3 0.9 18 -4 45 -11 2 +10 0.3 0.6 15 -35 32 -1 2 +10 0.7 0.7 3 -15 25 -29 2 +10 0.6 0.8 41 -7 -8 -35 1 +10 0.9 0.8 -2 -45 36 5 2 +10 0.3 0.9 -34 -49 32 15 2 +10 0.8 0.4 39 -41 32 -40 1 +10 0.8 0.6 46 43 48 14 1 +10 0.7 0.8 17 -39 25 -2 2 +10 0.6 0.5 30 -41 46 -18 2 +10 0.9 0.8 0 -2 -23 -32 1 +10 0.9 0.7 44 4 47 36 2 +10 0.5 0.9 4 -48 21 -41 2 +10 0.8 0.5 -3 -45 46 43 2 +10 0.8 0.9 37 -21 13 -37 1 +10 0.2 0.2 10 -6 35 -15 2 +10 0.5 0.6 26 -40 -28 -48 1 +10 0.2 0.5 8 -4 -12 -46 1 +10 0.6 0.5 46 30 45 -18 1 +10 0.7 0.9 36 29 -12 -18 1 +10 0.1 0.9 40 4 -29 -36 1 +10 0.4 0.9 26 5 26 -43 1 +10 0.7 0.6 36 -46 49 -7 2 +10 0.2 0.1 49 1 45 -13 1 +10 0.3 0.8 -27 -48 0 -36 2 +10 0.5 0.9 -2 -9 40 -4 2 +10 0.7 0.4 1 -42 35 34 2 +10 0.7 0.5 -27 -42 -23 -25 2 +10 0.3 0.2 11 -23 19 -25 2 +10 0.7 0.6 50 -43 -25 -39 1 +10 0.7 0.4 46 35 48 5 1 +10 0.6 0.8 19 -18 30 13 2 +10 0.2 0.8 42 -48 40 0 2 +10 0.6 0.6 42 14 47 44 1 +10 0.9 0.7 47 -27 10 1 1 +10 0.4 0.6 -4 -34 34 16 2 +10 0.6 0.7 -26 -47 24 -3 2 +10 0.9 0.3 0 -41 17 -19 2 +10 0.8 0.5 8 -22 50 22 2 +10 0.7 0.4 29 -42 29 24 2 +10 0.5 0.2 -6 -42 14 -6 2 +10 0.4 0.7 49 -9 -7 -48 1 +10 0.4 0.9 16 8 26 3 2 +10 0.9 0.9 17 -13 -9 -49 1 +10 0.7 0.1 10 -40 12 -11 2 +10 0.1 0.1 -18 -30 -13 -45 1 +10 0.9 0.3 -22 -48 -6 -46 1 +10 0.1 0.3 49 3 -29 -43 1 +10 0.9 0.3 25 -45 32 21 2 +10 0.3 0.1 15 -48 14 -35 2 +10 0.7 0.9 44 42 43 -2 1 +10 0.9 0.7 33 -37 40 -37 2 +10 0.2 0.2 23 -44 49 34 2 +10 0.3 0.4 16 -34 17 -23 2 +10 0.9 0.3 46 11 48 -39 1 +10 0.4 0.9 29 5 9 5 2 +10 0.5 0.5 45 -31 48 22 2 +10 0.6 0.2 40 18 13 -15 1 +10 0.9 0.6 45 0 1 -24 1 +10 0.6 0.3 -14 -41 -21 -31 2 +10 0.3 0.3 33 -23 40 -33 1 +10 0.2 0.7 21 -1 44 16 2 +10 0.2 0.6 -25 -47 31 -17 2 +10 0.7 0.1 4 -38 33 18 2 +10 0.4 0.6 26 -38 31 -9 2 +10 0.8 0.1 27 -37 6 -38 1 +10 0.7 0.8 20 -48 12 -17 2 +10 0.5 0.4 32 -26 47 19 2 +10 0.2 0.2 42 4 0 -44 1 +10 0.1 0.5 6 -1 50 41 2 +10 0.3 0.7 39 11 50 -29 1 +10 0.4 0.1 -1 -50 50 -19 1 +10 0.9 0.3 -2 -24 22 -41 1 +10 0.3 0.1 -32 -33 36 24 2 +10 0.2 0.1 2 -45 -27 -32 2 +10 0.6 0.5 31 5 -5 -11 1 +10 0.9 0.9 32 -19 -7 -27 1 +10 0.4 0.8 10 -38 -17 -35 1 +10 0.9 0.5 25 -32 -37 -40 1 +10 0.1 0.9 23 13 -8 -23 1 +10 0.6 0.1 29 1 22 14 1 +10 0.4 0.1 30 -3 24 -35 1 +10 0.5 0.3 20 -48 40 6 2 +10 0.5 0.5 -33 -38 18 -15 2 +10 0.8 0.8 33 11 39 20 1 +10 0.8 0.7 29 -30 28 25 2 +10 0.2 0.2 -29 -30 -4 -43 2 +10 0.2 0.4 25 -13 35 -5 2 +10 0.5 0.2 41 22 13 -24 1 +10 0.1 0.6 -14 -47 40 2 2 +10 0.9 0.2 24 -43 13 -14 1 +10 0.8 0.8 3 -8 -3 -50 1 +10 0.5 0.8 -5 -16 38 -49 2 +10 0.3 0.4 23 12 -29 -41 1 +10 0.9 0.1 39 -46 27 -3 1 +10 0.2 0.8 43 17 7 -23 1 +10 0.5 0.8 17 13 -16 -30 1 +10 0.3 0.6 29 10 20 -3 1 +10 0.7 0.7 46 -11 27 -42 1 +10 0.8 0.5 32 -9 49 -4 1 +10 0.7 0.6 6 -28 45 -24 2 +10 0.1 0.1 38 -6 14 -15 1 +10 0.8 0.4 20 11 45 -18 1 +10 0.2 0.9 -6 -50 27 8 2 +10 0.4 0.3 30 -43 10 -9 2 +10 0.9 0.4 -17 -50 22 -19 2 +10 0.2 0.6 -31 -38 22 -32 2 +10 0.6 0.9 0 -48 -22 -43 1 +10 0.5 0.7 50 49 35 -5 1 +10 0.4 0.2 32 -48 50 1 2 +10 0.6 0.9 48 35 18 0 1 +10 0.8 0.8 41 -17 -16 -40 1 +10 0.9 0.5 12 -28 28 -17 2 +10 0.7 0.7 44 9 32 -28 1 +10 0.2 0.6 3 -9 46 27 2 +10 0.9 0.9 -23 -26 15 -23 2 +10 0.7 0.8 35 17 43 1 2 +10 0.8 0.2 50 -35 -18 -40 1 +10 0.2 0.1 23 -43 23 -11 2 +10 0.3 0.4 28 -17 43 -15 2 +10 0.1 0.4 -10 -14 30 10 2 +10 0.2 0.8 3 -32 38 25 2 +10 0.9 0.7 -8 -41 32 -12 2 +10 0.5 0.4 -5 -9 16 -24 1 +10 0.5 0.7 17 14 26 -32 1 +10 0.5 0.4 20 -25 11 -1 1 +10 0.4 0.7 -27 -41 -19 -49 2 +10 0.8 0.7 35 23 28 -20 1 +10 0.6 0.1 50 22 50 -18 1 +10 0.6 0.9 -14 -40 15 8 2 +10 0.2 0.2 37 -41 49 -6 2 +10 0.7 0.8 -4 -31 8 -34 2 +10 0.5 0.9 33 -8 18 -35 2 +10 0.6 0.4 25 -10 15 -34 1 +10 0.2 0.8 38 6 -48 -50 1 +10 0.6 0.1 49 0 -8 -18 1 +10 0.2 0.6 0 -33 -24 -27 1 +10 0.3 0.4 35 -7 39 -38 1 +10 0.2 0.5 10 7 25 0 2 +10 0.5 0.1 43 12 44 -13 1 +10 0.2 0.9 -19 -24 -14 -47 2 +10 0.5 0.5 27 14 -29 -47 1 +10 0.6 0.8 43 -35 -9 -38 1 +10 0.1 0.4 20 -21 32 -47 2 +10 0.7 0.7 0 -23 12 3 2 +10 0.3 0.1 6 -35 40 16 2 +10 0.4 0.4 13 -44 46 27 2 +10 0.7 0.9 23 -12 38 8 2 +10 0.3 0.4 31 3 9 -27 1 +10 0.4 0.6 22 -40 42 32 2 +10 0.5 0.3 47 -21 -13 -34 1 +10 0.2 0.1 33 -33 -2 -23 1 +10 0.3 0.3 29 -10 29 -16 2 +10 0.4 0.2 4 -15 -23 -50 1 +10 0.9 0.9 -23 -27 40 -12 2 +10 0.7 0.4 39 -24 39 -33 1 +10 0.8 0.6 -9 -23 50 18 2 +10 0.1 0.8 31 21 13 -41 1 +10 0.3 0.6 43 -31 4 -31 2 +10 0.2 0.8 13 -14 42 -45 2 +10 0.3 0.3 45 14 -31 -38 1 +10 0.3 0.5 18 -38 3 -32 2 +10 0.4 0.7 -9 -14 7 -31 2 +10 0.8 0.4 46 -37 33 9 1 +10 0.6 0.7 17 -23 3 -38 1 +10 0.3 0.7 -4 -37 38 0 2 +10 0.7 0.6 14 -47 40 -26 2 +10 0.1 0.6 36 -16 38 19 2 +10 0.5 0.5 6 -11 47 7 2 +10 0.8 0.2 40 33 28 27 1 +10 0.6 0.3 -14 -37 30 0 2 +10 0.1 0.3 40 -5 -11 -47 1 +10 0.2 0.6 23 -14 5 -22 1 +10 0.3 0.5 29 -15 20 -20 1 +10 0.7 0.2 -30 -46 -2 -14 2 +10 0.5 0.4 -15 -16 21 -36 1 +10 0.8 0.4 29 10 -9 -18 1 +10 0.7 0.9 34 27 0 -7 1 +10 0.1 0.5 8 -11 24 -16 2 +10 0.1 0.1 11 -25 49 28 2 +10 0.3 0.2 45 -16 33 -5 2 +10 0.4 0.4 -4 -19 -10 -35 2 +10 0.6 0.1 48 -36 -19 -35 1 +10 0.8 0.1 -8 -20 41 -30 1 +10 0.6 0.5 -6 -26 16 11 2 +10 0.8 0.1 7 -10 27 -36 1 +10 0.6 0.5 -21 -36 48 -9 2 +10 0.6 0.1 39 -42 30 -25 1 +10 0.3 0.4 36 -29 43 -22 1 +10 0.3 0.4 45 12 -14 -19 1 +10 0.8 0.9 19 -23 6 -3 1 +10 0.7 0.6 45 -39 13 -50 1 +10 0.6 0.8 27 -33 28 22 2 +10 0.1 0.1 -17 -34 18 -7 2 +10 0.3 0.8 40 -4 33 31 2 +10 0.4 0.6 34 -19 0 -10 1 +10 0.8 0.5 0 -5 -7 -23 1 +10 0.6 0.2 18 7 45 -36 1 +10 0.5 0.6 20 -5 31 -3 2 +10 0.9 0.3 20 -19 35 -25 1 +10 0.6 0.7 39 -30 39 12 2 +10 0.2 0.6 25 23 35 13 1 +10 0.7 0.8 -7 -30 32 14 2 +10 0.7 0.1 20 -50 5 -39 1 +10 0.4 0.6 33 12 -21 -29 1 +10 0.6 0.9 44 37 30 -45 1 +10 0.2 0.9 50 -28 39 33 2 +10 0.8 0.2 35 27 5 -21 1 +10 0.7 0.9 1 -27 2 -5 2 +10 0.3 0.7 -37 -38 -1 -3 2 +11 0.6 0.8 -4 -11 18 1 2 +11 0.6 0.3 38 5 17 -11 1 +11 0.5 0.7 33 -9 -13 -17 1 +11 0.1 0.2 -26 -49 -15 -29 2 +11 0.6 0.6 -19 -24 11 -20 2 +11 0.5 0.8 25 14 11 -43 1 +11 0.7 0.5 22 -2 -24 -37 1 +11 0.4 0.8 -34 -35 23 -29 2 +11 0.8 0.3 -1 -6 14 1 2 +11 0.9 0.5 25 -32 15 -1 1 +11 0.8 0.1 16 -26 50 -35 1 +11 0.1 0.7 48 -4 42 -8 2 +11 0.1 0.1 8 -39 8 -34 1 +11 0.6 0.6 35 -11 45 -46 1 +11 0.7 0.8 49 -26 33 1 1 +11 0.6 0.3 50 12 49 -35 1 +11 0.5 0.8 37 -38 -11 -35 1 +11 0.5 0.6 30 -46 -14 -22 1 +11 0.6 0.5 48 -2 4 -26 1 +11 0.3 0.6 40 -4 48 10 2 +11 0.3 0.2 13 -8 13 4 2 +11 0.7 0.8 -19 -43 21 -5 2 +11 0.8 0.4 -31 -32 10 -20 2 +11 0.7 0.4 8 -33 -46 -49 1 +11 0.4 0.9 34 26 40 -33 1 +11 0.4 0.4 -12 -50 -4 -9 2 +11 0.1 0.5 50 -31 50 46 2 +11 0.9 0.8 -4 -34 4 -50 2 +11 0.3 0.5 23 19 32 -30 1 +11 0.6 0.2 -10 -31 0 -48 2 +11 0.7 0.8 30 -7 44 2 2 +11 0.4 0.1 49 27 7 -22 1 +11 0.4 0.9 18 -4 25 -16 2 +11 0.4 0.3 32 -33 46 -24 1 +11 0.9 0.8 24 -14 15 -39 1 +11 0.4 0.6 46 43 33 -36 1 +11 0.1 0.6 47 -32 40 35 2 +11 0.9 0.5 17 -31 -2 -12 1 +11 0.4 0.4 43 20 -13 -23 1 +11 0.6 0.2 17 -34 -24 -45 1 +11 0.6 0.1 9 8 47 19 1 +11 0.9 0.5 32 21 10 -17 1 +11 0.3 0.3 37 33 -22 -36 1 +11 0.8 0.3 4 -6 39 24 2 +11 0.1 0.9 14 -20 5 -15 2 +11 0.3 0.1 -4 -37 45 11 2 +11 0.2 0.5 -20 -43 44 -49 2 +11 0.6 0.5 47 -26 15 -39 1 +11 0.3 0.9 -17 -27 -11 -20 2 +11 0.1 0.9 35 -14 49 14 2 +11 0.1 0.1 13 -34 49 -13 2 +11 0.8 0.7 30 -43 21 -19 1 +11 0.3 0.4 -14 -16 -7 -47 2 +11 0.3 0.9 13 -36 37 -44 1 +11 0.5 0.8 -8 -27 20 -25 2 +11 0.8 0.2 22 5 1 -5 1 +11 0.7 0.1 27 -25 -13 -45 1 +11 0.4 0.5 50 18 -2 -7 1 +11 0.4 0.5 31 -9 17 8 2 +11 0.7 0.8 7 0 24 -38 2 +11 0.6 0.4 9 -39 -28 -34 1 +11 0.4 0.4 39 -42 -4 -13 1 +11 0.3 0.4 28 -43 16 -32 1 +11 0.7 0.5 50 -29 32 -40 1 +11 0.1 0.1 18 -33 40 26 2 +11 0.7 0.7 48 -27 10 -44 1 +11 0.9 0.8 47 3 -1 -46 1 +11 0.3 0.7 2 -33 -28 -48 1 +11 0.6 0.5 16 2 -11 -27 1 +11 0.6 0.1 3 -20 11 -43 1 +11 0.8 0.6 7 -27 -13 -24 1 +11 0.5 0.9 22 -13 5 -33 1 +11 0.3 0.6 40 21 39 -25 1 +11 0.4 0.3 13 -12 13 10 2 +11 0.1 0.6 12 -37 5 -5 2 +11 0.8 0.7 44 4 12 -12 1 +11 0.1 0.7 37 14 39 -4 1 +11 0.7 0.2 22 4 20 19 1 +11 0.4 0.7 46 42 6 -26 1 +11 0.5 0.2 10 -3 13 -21 1 +11 0.3 0.3 16 6 34 -12 1 +11 0.6 0.2 17 11 36 -31 1 +11 0.2 0.3 5 1 15 -15 1 +11 0.4 0.7 43 -10 40 -40 2 +11 0.8 0.8 -12 -23 1 -26 2 +11 0.1 0.3 42 41 48 -14 1 +11 0.1 0.8 -12 -22 -17 -30 2 +11 0.5 0.2 18 -29 39 -7 1 +11 0.8 0.2 24 17 30 -30 1 +11 0.6 0.3 -2 -4 15 -4 2 +11 0.8 0.1 2 -46 5 -38 1 +11 0.5 0.7 -34 -44 32 31 2 +11 0.9 0.2 18 -24 11 -22 1 +11 0.3 0.8 15 -17 -22 -31 1 +11 0.6 0.3 39 12 9 -40 1 +11 0.9 0.1 36 32 35 32 1 +11 0.1 0.1 41 4 44 -47 1 +11 0.7 0.2 18 -13 33 27 2 +11 0.4 0.9 46 -41 26 -10 2 +11 0.3 0.2 38 -43 -23 -41 1 +11 0.1 0.6 -23 -24 37 0 2 +11 0.6 0.1 33 -45 35 18 2 +11 0.5 0.5 5 -47 -16 -45 1 +11 0.8 0.9 33 -38 42 -15 2 +11 0.4 0.1 -43 -47 46 25 2 +11 0.5 0.1 -18 -32 33 -21 2 +11 0.1 0.6 19 15 -23 -40 1 +11 0.2 0.6 31 -31 29 2 2 +11 0.5 0.1 8 -47 42 15 2 +11 0.2 0.7 39 -20 -1 -47 1 +11 0.1 0.3 9 -17 -35 -36 1 +11 0.6 0.3 37 35 31 -11 1 +11 0.6 0.8 49 -49 12 -31 1 +11 0.9 0.2 9 -33 25 -23 1 +11 0.6 0.5 25 4 9 -31 1 +11 0.5 0.8 20 -7 45 18 2 +11 0.7 0.6 48 -1 34 23 2 +11 0.7 0.3 23 15 -1 -16 1 +11 0.4 0.3 -6 -43 21 13 2 +11 0.9 0.3 -32 -38 20 6 2 +11 0.2 0.3 -27 -32 30 -48 1 +11 0.7 0.9 46 6 31 27 1 +11 0.8 0.5 -16 -39 41 3 2 +11 0.8 0.6 22 10 50 -26 1 +11 0.8 0.2 28 -23 8 -40 1 +11 0.3 0.3 33 -33 18 -38 2 +11 0.7 0.7 34 -8 23 -23 1 +11 0.6 0.5 29 -12 29 -10 2 +11 0.2 0.1 -23 -25 41 -28 2 +11 0.2 0.4 -14 -44 50 44 2 +11 0.9 0.4 4 -11 9 -19 1 +11 0.3 0.4 -6 -44 50 -35 2 +11 0.2 0.2 15 -12 24 9 2 +11 0.3 0.4 39 32 30 -40 1 +11 0.8 0.5 42 29 14 -28 1 +11 0.7 0.7 -11 -21 39 -31 2 +11 0.9 0.8 40 -13 27 -1 2 +11 0.9 0.9 -30 -41 -27 -50 1 +11 0.8 0.9 36 -30 -11 -36 1 +11 0.6 0.9 -31 -43 17 -43 2 +11 0.4 0.1 28 -39 44 -11 1 +11 0.8 0.5 47 15 47 18 2 +11 0.8 0.6 -11 -46 45 7 2 +11 0.9 0.3 16 -45 10 4 1 +11 0.5 0.6 0 -31 14 -8 2 +11 0.7 0.2 -7 -25 12 -31 2 +11 0.6 0.2 14 -41 -29 -44 1 +11 0.6 0.7 21 -8 35 18 2 +11 0.4 0.3 2 -31 11 2 2 +11 0.7 0.5 44 -43 -49 -50 1 +11 0.4 0.8 3 0 -3 -12 2 +11 0.4 0.4 31 11 -36 -37 1 +11 0.5 0.6 11 -50 -20 -50 1 +11 0.3 0.3 49 -15 49 -7 2 +11 0.1 0.3 28 -46 25 -46 2 +11 0.9 0.7 19 -6 41 -42 1 +11 0.1 0.8 2 -25 24 -1 2 +11 0.4 0.7 44 -40 7 -19 2 +11 0.7 0.1 4 -4 47 38 2 +11 0.4 0.8 10 -2 26 -47 2 +11 0.2 0.5 23 -12 -12 -49 1 +11 0.2 0.8 36 23 20 7 1 +11 0.4 0.5 46 24 28 -44 1 +11 0.3 0.6 29 -41 43 28 2 +11 0.8 0.2 5 -47 43 21 2 +11 0.5 0.4 13 -18 20 -33 1 +11 0.7 0.5 37 -27 33 18 2 +11 0.6 0.2 48 -44 17 14 2 +11 0.5 0.5 15 11 47 18 2 +11 0.1 0.5 35 -16 39 23 2 +11 0.4 0.4 3 2 38 3 2 +11 0.4 0.7 31 -13 7 -20 2 +11 0.5 0.4 -10 -25 15 8 2 +11 0.2 0.5 -22 -45 20 -21 2 +11 0.9 0.1 25 -5 -11 -24 1 +11 0.4 0.2 37 -30 5 -16 1 +11 0.5 0.6 24 -37 37 -49 2 +11 0.5 0.4 35 -15 -35 -49 1 +11 0.6 0.7 43 -22 30 3 2 +11 0.3 0.3 32 -32 48 -23 1 +11 0.2 0.3 3 -1 -40 -43 1 +11 0.7 0.6 28 8 0 -21 1 +11 0.2 0.8 38 -36 0 -27 2 +11 0.7 0.9 -17 -39 -2 -29 2 +11 0.3 0.7 -22 -40 -17 -33 1 +11 0.9 0.1 2 -14 -17 -28 1 +11 0.4 0.5 -9 -33 -27 -35 2 +11 0.4 0.1 33 -39 43 32 2 +11 0.6 0.7 36 -48 -4 -25 1 +11 0.3 0.7 37 27 13 -3 1 +11 0.1 0.1 -1 -9 20 -31 1 +11 0.4 0.4 -9 -42 -32 -45 2 +11 0.7 0.1 2 -23 9 0 2 +11 0.7 0.4 11 -2 -36 -45 1 +11 0.1 0.3 -13 -23 44 -37 2 +11 0.4 0.3 -40 -48 4 -45 2 +11 0.7 0.5 19 -46 21 -43 1 +11 0.8 0.6 14 8 22 -24 2 +11 0.4 0.6 7 -1 41 -41 2 +11 0.3 0.2 46 43 38 -39 1 +11 0.4 0.9 -21 -40 49 -42 2 +11 0.3 0.6 7 -44 40 20 2 +11 0.1 0.3 50 -13 47 -30 1 +11 0.1 0.4 -7 -15 23 -30 2 +11 0.6 0.2 35 12 40 20 1 +11 0.5 0.3 -1 -17 -11 -32 2 +11 0.3 0.8 46 32 27 -1 2 +11 0.7 0.6 25 -36 -2 -16 1 +11 0.4 0.4 8 -20 40 37 2 +11 0.4 0.7 6 -14 -1 -40 2 +11 0.8 0.7 35 -45 37 -20 1 +11 0.5 0.9 -14 -23 -6 -19 2 +11 0.1 0.2 38 30 47 -25 1 +11 0.5 0.9 12 -10 25 -15 2 +11 0.8 0.3 -19 -24 31 9 2 +11 0.6 0.4 39 -8 -36 -44 1 +11 0.6 0.4 15 -33 23 21 2 +11 0.2 0.6 33 -1 -19 -30 2 +11 0.1 0.8 28 -9 2 -40 2 +11 0.3 0.9 37 -9 22 -34 1 +11 0.7 0.2 -2 -28 -4 -7 2 +11 0.4 0.6 43 -8 -31 -42 1 +11 0.1 0.6 42 -15 21 20 2 +11 0.3 0.4 46 38 44 21 2 +11 0.2 0.2 19 -6 29 -19 1 +11 0.2 0.6 20 -29 38 35 1 +11 0.3 0.3 -2 -16 41 36 2 +11 0.7 0.3 33 -5 -7 -41 1 +11 0.6 0.4 34 11 40 -12 1 +11 0.6 0.4 -32 -35 45 -36 2 +11 0.1 0.5 -9 -40 -29 -47 2 +11 0.2 0.3 -37 -39 -26 -40 1 +11 0.3 0.3 37 -15 6 -27 1 +11 0.5 0.8 49 46 -6 -17 1 +11 0.7 0.6 26 -48 26 2 2 +11 0.7 0.2 11 10 25 -48 1 +11 0.1 0.2 4 -43 -6 -28 1 +11 0.6 0.8 -9 -41 5 -36 2 +11 0.4 0.5 28 -27 27 -39 1 +11 0.3 0.2 -16 -24 -31 -41 1 +11 0.3 0.1 30 21 21 -4 1 +11 0.8 0.6 16 -15 17 11 2 +11 0.8 0.4 -3 -40 26 21 2 +11 0.1 0.7 8 -18 36 35 2 +11 0.4 0.2 22 -13 50 -13 1 +11 0.4 0.6 40 -36 39 -40 2 +11 0.8 0.5 -4 -30 -1 -32 2 +11 0.2 0.5 39 -19 -9 -25 1 +11 0.7 0.7 3 -46 40 9 2 +11 0.3 0.2 -36 -44 34 18 2 +11 0.4 0.2 -37 -41 31 2 2 +11 0.3 0.2 -37 -41 37 -37 2 +11 0.3 0.6 15 -33 31 30 2 +11 0.4 0.5 -19 -26 -28 -34 1 +11 0.2 0.9 33 -3 23 -41 2 +11 0.7 0.2 30 20 24 9 1 +11 0.4 0.6 0 -49 18 13 2 +11 0.7 0.3 45 -46 49 -9 1 +11 0.8 0.3 -9 -46 18 -37 2 +11 0.9 0.7 24 -23 20 -27 1 +11 0.2 0.2 39 0 21 9 2 +11 0.6 0.2 19 -45 45 10 2 +11 0.7 0.2 15 11 9 -8 1 +11 0.2 0.7 27 -15 19 -5 2 +11 0.6 0.5 18 -13 35 27 2 +11 0.2 0.2 35 -22 31 -45 1 +11 0.4 0.1 47 -1 18 -27 1 +11 0.7 0.3 3 -32 -23 -38 1 +11 0.7 0.2 8 -35 -25 -35 1 +11 0.3 0.8 33 29 5 -6 1 +11 0.2 0.8 -25 -32 24 2 2 +11 0.9 0.1 33 -4 25 21 1 +11 0.8 0.8 -7 -20 25 14 2 +11 0.4 0.5 -17 -26 28 26 2 +11 0.1 0.4 -1 -25 -6 -33 1 +11 0.5 0.5 34 -21 35 12 2 +11 0.3 0.1 35 25 6 -14 1 +11 0.8 0.3 31 -19 41 -7 2 +11 0.8 0.4 -38 -40 22 -11 2 +11 0.3 0.1 14 -5 -28 -49 1 +11 0.3 0.1 31 30 -16 -45 1 +11 0.5 0.2 38 -46 -23 -48 1 +11 0.7 0.6 45 -20 -8 -18 1 +11 0.7 0.3 49 25 -2 -33 1 +11 0.9 0.4 35 32 6 -25 1 +11 0.9 0.1 -1 -13 40 -6 2 +11 0.3 0.1 -7 -16 -18 -43 1 +11 0.6 0.6 -10 -49 34 9 2 +11 0.6 0.2 0 -41 -3 -49 1 +11 0.3 0.3 -11 -45 -29 -47 1 +11 0.1 0.3 -19 -35 9 3 2 +11 0.4 0.1 -23 -30 26 -38 2 +11 0.1 0.4 6 -6 28 -27 2 +11 0.8 0.3 -14 -36 39 9 2 +11 0.8 0.2 5 -23 45 -44 1 +11 0.6 0.2 36 -9 -15 -20 1 +11 0.9 0.2 1 -15 -4 -38 1 +11 0.1 0.6 19 4 39 -3 2 +11 0.7 0.6 36 28 46 -43 1 +11 0.4 0.1 19 -3 12 -33 1 +12 0.8 0.1 49 41 -1 -43 1 +12 0.5 0.1 32 -3 47 -25 1 +12 0.9 0.3 21 -26 33 21 2 +12 0.8 0.2 37 12 -20 -47 1 +12 0.4 0.4 47 -1 45 41 2 +12 0.7 0.9 0 -4 4 -42 1 +12 0.5 0.5 36 21 -26 -31 1 +12 0.4 0.9 23 -2 46 -14 2 +12 0.3 0.1 11 -14 26 24 2 +12 0.9 0.4 46 14 48 27 1 +12 0.6 0.6 28 27 0 -50 1 +12 0.7 0.8 46 -20 44 -33 2 +12 0.9 0.5 44 -3 28 -3 1 +12 0.9 0.7 -34 -43 -11 -16 2 +12 0.4 0.7 49 -30 1 -15 1 +12 0.5 0.7 11 7 31 -46 2 +12 0.5 0.2 21 9 -14 -24 1 +12 0.4 0.6 14 -17 46 -26 2 +12 0.7 0.3 -5 -40 18 3 2 +12 0.4 0.8 27 -1 50 46 2 +12 0.5 0.3 23 -4 46 -18 1 +12 0.1 0.1 25 -26 -9 -46 1 +12 0.8 0.4 23 -45 37 13 2 +12 0.2 0.5 37 12 46 -44 1 +12 0.5 0.9 2 -2 45 31 2 +12 0.5 0.4 43 -46 10 -21 1 +12 0.9 0.1 27 -4 39 -36 1 +12 0.5 0.2 32 -44 -26 -29 1 +12 0.1 0.6 42 -48 27 -17 2 +12 0.5 0.7 36 -28 18 13 2 +12 0.3 0.8 32 -50 48 -10 2 +12 0.4 0.4 50 5 32 27 2 +12 0.6 0.4 -31 -47 12 -24 2 +12 0.2 0.9 38 7 8 0 1 +12 0.9 0.3 35 28 50 0 1 +12 0.3 0.4 15 2 31 -20 1 +12 0.8 0.5 -30 -50 23 7 2 +12 0.8 0.1 38 -19 48 39 2 +12 0.3 0.5 45 17 -2 -19 1 +12 0.1 0.3 11 -5 -26 -29 1 +12 0.8 0.7 25 -13 45 0 2 +12 0.7 0.8 29 -48 20 -26 2 +12 0.7 0.8 8 -24 6 -30 1 +12 0.1 0.9 -17 -25 -11 -45 2 +12 0.7 0.5 45 -8 -18 -50 1 +12 0.3 0.8 24 -1 45 -48 2 +12 0.6 0.1 32 28 -13 -37 1 +12 0.6 0.4 -41 -49 -22 -44 2 +12 0.7 0.3 12 -15 36 23 2 +12 0.4 0.1 -29 -44 50 -39 2 +12 0.9 0.2 48 -8 -17 -47 1 +12 0.4 0.1 41 5 36 -16 1 +12 0.3 0.1 28 -28 4 -3 2 +12 0.4 0.1 28 12 24 -36 1 +12 0.4 0.7 26 -18 15 10 2 +12 0.1 0.3 44 -17 33 -7 2 +12 0.2 0.6 -26 -49 20 -40 2 +12 0.6 0.1 21 -28 21 -6 1 +12 0.2 0.7 41 8 -8 -46 1 +12 0.2 0.7 48 -22 17 -10 2 +12 0.9 0.2 42 30 29 11 1 +12 0.3 0.3 -11 -28 36 -37 2 +12 0.8 0.9 -4 -39 20 7 2 +12 0.6 0.5 10 -22 15 -50 1 +12 0.5 0.2 49 -38 27 -16 1 +12 0.3 0.3 -30 -48 27 -1 2 +12 0.7 0.1 -1 -49 30 14 2 +12 0.8 0.9 -22 -37 -42 -46 1 +12 0.9 0.4 12 -45 48 -42 1 +12 0.2 0.2 39 -10 0 -24 1 +12 0.4 0.5 12 -45 0 -4 2 +12 0.6 0.2 -16 -31 41 -4 2 +12 0.1 0.7 2 -12 -13 -41 1 +12 0.1 0.9 10 -33 -11 -13 2 +12 0.2 0.4 1 -8 -5 -31 1 +12 0.5 0.3 32 -10 48 -38 1 +12 0.4 0.5 13 -36 36 -22 2 +12 0.2 0.9 41 16 49 43 2 +12 0.3 0.9 -24 -50 8 -28 2 +12 0.5 0.2 50 14 47 42 2 +12 0.2 0.8 -9 -17 -43 -45 1 +12 0.5 0.6 41 11 37 -10 1 +12 0.6 0.6 15 -9 37 -27 2 +12 0.2 0.4 -37 -45 36 -33 2 +12 0.2 0.1 45 -4 21 -6 1 +12 0.2 0.9 22 -33 35 31 2 +12 0.8 0.2 36 -10 -7 -20 1 +12 0.6 0.5 13 -22 40 23 2 +12 0.8 0.8 -9 -30 47 -38 2 +12 0.1 0.2 30 -15 35 -42 1 +12 0.6 0.9 9 -15 -13 -17 1 +12 0.9 0.4 22 -27 3 -29 1 +12 0.9 0.6 17 -12 24 -21 1 +12 0.6 0.8 41 5 15 1 1 +12 0.6 0.1 35 -37 11 -42 1 +12 0.1 0.3 45 -2 37 -46 1 +12 0.7 0.3 9 6 33 17 2 +12 0.4 0.9 32 -50 6 -23 2 +12 0.8 0.7 1 -30 4 -45 1 +12 0.7 0.9 -18 -24 -23 -30 1 +12 0.7 0.2 -35 -40 13 5 2 +12 0.8 0.4 34 -24 29 5 1 +12 0.5 0.3 32 -4 24 -44 1 +12 0.6 0.1 -21 -30 -4 -9 2 +12 0.7 0.9 13 -15 11 -28 2 +12 0.3 0.8 -8 -38 -14 -28 2 +12 0.2 0.3 -37 -43 -17 -18 2 +12 0.7 0.5 29 -5 -24 -32 1 +12 0.3 0.3 -35 -37 2 -9 2 +12 0.5 0.8 42 11 -9 -44 1 +12 0.5 0.4 42 -20 -1 -11 1 +12 0.1 0.6 17 -5 -17 -40 1 +12 0.8 0.2 26 0 9 -21 1 +12 0.2 0.8 -20 -34 47 -24 2 +12 0.4 0.9 -10 -23 22 -36 2 +12 0.3 0.7 20 -8 3 -12 1 +12 0.4 0.8 0 -5 29 -15 2 +12 0.2 0.4 21 13 -10 -24 1 +12 0.3 0.6 4 -16 37 -47 2 +12 0.4 0.4 43 4 22 -35 1 +12 0.1 0.7 48 -12 21 -3 2 +12 0.5 0.4 4 -22 -24 -28 1 +12 0.8 0.9 44 -49 46 14 2 +12 0.4 0.2 -1 -10 36 -17 2 +12 0.2 0.6 -28 -32 27 -2 2 +12 0.1 0.4 26 5 -13 -33 1 +12 0.5 0.2 2 -23 0 -5 2 +12 0.2 0.9 38 5 -2 -6 1 +12 0.9 0.5 38 -43 37 18 1 +12 0.6 0.3 17 -19 -35 -47 1 +12 0.7 0.1 6 -17 40 0 2 +12 0.2 0.1 -7 -38 35 20 2 +12 0.4 0.4 30 9 -8 -48 1 +12 0.6 0.5 24 -20 16 -32 1 +12 0.4 0.2 -7 -11 12 -19 1 +12 0.3 0.6 -18 -48 -17 -45 2 +12 0.2 0.5 -6 -37 13 -47 2 +12 0.1 0.1 40 -27 3 -17 1 +12 0.6 0.4 8 5 28 27 2 +12 0.3 0.2 -7 -40 -31 -43 1 +12 0.7 0.5 -34 -42 50 35 2 +12 0.6 0.2 15 4 -13 -26 1 +12 0.6 0.7 -19 -32 47 46 2 +12 0.5 0.7 2 -25 -4 -37 2 +12 0.2 0.6 22 -38 46 -46 2 +12 0.4 0.1 24 -32 -19 -43 1 +12 0.5 0.7 -10 -45 -17 -31 2 +12 0.5 0.2 22 -28 15 7 2 +12 0.5 0.4 18 -24 26 -9 2 +12 0.1 0.2 14 5 1 -17 1 +12 0.4 0.9 41 37 21 -22 1 +12 0.7 0.2 26 -11 -2 -43 1 +12 0.9 0.6 9 -20 31 -4 2 +12 0.3 0.9 11 -10 22 1 2 +12 0.3 0.2 -22 -27 -28 -48 1 +12 0.6 0.9 -35 -43 11 8 2 +12 0.2 0.3 -30 -32 43 28 2 +12 0.9 0.7 24 23 33 2 1 +12 0.9 0.4 -28 -36 35 28 2 +12 0.2 0.3 25 -10 -7 -29 1 +12 0.7 0.1 46 -25 9 3 1 +12 0.5 0.7 26 -30 -11 -29 1 +12 0.7 0.6 18 -23 26 -43 1 +12 0.6 0.2 30 16 -8 -25 1 +12 0.7 0.3 44 -30 43 -15 1 +12 0.2 0.7 13 -32 35 -29 2 +12 0.8 0.1 -20 -32 14 -10 2 +12 0.3 0.9 37 -15 -20 -21 1 +12 0.7 0.3 -10 -23 40 -11 2 +12 0.4 0.5 8 -40 0 -11 2 +12 0.8 0.2 -48 -49 21 13 2 +12 0.2 0.5 42 -22 40 34 2 +12 0.3 0.2 -35 -42 43 -28 2 +12 0.3 0.1 50 3 49 -22 1 +12 0.2 0.2 16 -23 18 -47 1 +12 0.4 0.3 -1 -28 29 -13 2 +12 0.9 0.8 22 3 10 0 1 +12 0.8 0.8 46 -30 -5 -22 1 +12 0.9 0.8 10 -4 50 45 2 +12 0.2 0.2 12 -1 -30 -35 1 +12 0.2 0.4 3 -46 27 18 2 +12 0.4 0.7 1 -5 28 -14 2 +12 0.7 0.8 31 -48 12 7 2 +12 0.8 0.8 10 -12 19 -48 1 +12 0.1 0.3 29 -13 -1 -22 1 +12 0.3 0.7 50 -45 -11 -33 1 +12 0.2 0.3 23 -2 -15 -35 1 +12 0.6 0.4 -2 -34 -12 -28 1 +12 0.2 0.5 -39 -43 30 -45 2 +12 0.4 0.9 38 -18 29 1 2 +12 0.4 0.3 -25 -39 29 -30 2 +12 0.7 0.3 31 -10 16 -32 1 +12 0.3 0.5 20 -39 -10 -17 2 +12 0.4 0.3 2 -48 12 -16 2 +12 0.6 0.8 -17 -34 15 -33 2 +12 0.1 0.1 23 -8 20 6 2 +12 0.2 0.2 -29 -39 49 3 2 +12 0.2 0.3 33 9 30 -6 1 +12 0.5 0.3 36 6 45 37 2 +12 0.9 0.3 30 -24 2 -22 1 +12 0.8 0.6 39 14 28 23 1 +12 0.7 0.2 34 27 37 14 1 +12 0.6 0.7 -35 -39 29 -38 2 +12 0.5 0.5 50 -21 -26 -44 1 +12 0.4 0.9 37 -36 39 -35 2 +12 0.4 0.1 17 -39 -22 -25 1 +12 0.3 0.4 50 19 -5 -33 1 +12 0.1 0.7 13 -39 13 -6 2 +12 0.5 0.1 45 -28 -30 -33 1 +12 0.4 0.1 -45 -49 18 -25 2 +12 0.1 0.8 21 -7 25 -37 2 +12 0.9 0.1 -42 -43 48 -13 2 +12 0.3 0.4 46 -21 33 -13 2 +12 0.5 0.7 6 -11 40 -5 2 +12 0.3 0.5 31 -34 38 -17 2 +12 0.4 0.5 -6 -10 36 -30 2 +12 0.7 0.3 41 -36 -28 -31 1 +12 0.5 0.2 -9 -20 21 6 2 +12 0.9 0.3 -9 -24 45 19 2 +12 0.9 0.3 41 31 7 -5 1 +12 0.9 0.3 5 -37 19 -36 1 +12 0.7 0.6 47 -35 27 20 2 +12 0.3 0.8 36 8 49 -10 2 +12 0.6 0.8 21 2 48 2 2 +12 0.3 0.7 -17 -29 28 -36 2 +12 0.3 0.9 50 1 4 3 1 +12 0.5 0.8 -4 -31 28 -7 2 +12 0.9 0.8 48 16 45 -5 1 +12 0.2 0.3 -22 -33 36 -46 2 +12 0.7 0.2 49 12 -10 -46 1 +12 0.4 0.6 24 23 3 -1 1 +12 0.9 0.8 -3 -37 -8 -23 2 +12 0.5 0.6 32 6 27 -21 1 +12 0.1 0.2 49 27 10 -13 1 +12 0.1 0.9 3 -1 -5 -37 1 +12 0.6 0.1 37 -39 36 -48 1 +12 0.8 0.3 -7 -9 14 -6 2 +12 0.4 0.4 32 17 -25 -31 1 +12 0.8 0.7 39 -7 43 -1 1 +12 0.5 0.5 -12 -46 37 -18 2 +12 0.1 0.6 16 -32 30 -31 2 +12 0.8 0.2 4 2 46 -28 1 +12 0.1 0.8 27 -49 0 -14 2 +12 0.2 0.4 19 -39 5 -22 2 +12 0.9 0.2 9 -38 12 -46 1 +12 0.1 0.5 11 -41 13 -6 2 +12 0.5 0.2 41 40 -2 -18 1 +12 0.2 0.9 50 39 -8 -12 1 +12 0.3 0.8 3 -46 -23 -47 2 +12 0.1 0.8 1 -20 28 2 2 +12 0.7 0.8 -11 -18 43 41 2 +12 0.7 0.3 31 -38 -29 -37 1 +12 0.8 0.9 28 -34 38 29 2 +12 0.2 0.8 39 -38 29 -27 2 +12 0.1 0.7 4 2 26 -11 2 +12 0.3 0.1 48 -36 -18 -34 1 +12 0.5 0.7 -4 -13 -19 -37 1 +12 0.7 0.1 49 -39 39 1 1 +12 0.4 0.8 29 20 -24 -28 1 +12 0.7 0.8 16 -31 14 -12 2 +12 0.3 0.2 49 -8 29 -45 1 +12 0.1 0.3 40 -3 18 -9 1 +12 0.8 0.6 -7 -9 3 -27 2 +12 0.2 0.2 14 -20 24 -28 1 +12 0.8 0.5 9 -27 0 -39 1 +12 0.4 0.8 46 -4 47 -49 2 +12 0.8 0.1 -13 -24 32 -26 1 +12 0.8 0.8 -28 -36 22 12 2 +12 0.4 0.9 23 -47 7 -50 2 +12 0.1 0.7 0 -11 6 -2 2 +12 0.3 0.4 29 -9 20 -27 1 +12 0.8 0.3 -9 -33 12 8 2 +12 0.1 0.8 20 -30 29 -38 2 +12 0.3 0.6 16 -4 18 6 2 +12 0.1 0.6 -42 -44 8 -14 2 +12 0.2 0.8 38 -41 28 -39 2 +12 0.8 0.2 49 21 44 25 1 +12 0.1 0.1 -8 -23 45 -8 2 +12 0.5 0.4 12 -31 36 3 2 +12 0.4 0.9 -20 -27 18 -1 2 +12 0.5 0.2 -2 -22 38 -23 2 +12 0.9 0.4 49 -34 43 24 1 +12 0.8 0.6 41 -38 -20 -33 1 +12 0.1 0.3 35 -27 34 3 2 +12 0.2 0.3 -10 -50 44 -28 2 +12 0.1 0.2 33 27 -1 -25 1 +12 0.9 0.2 -14 -37 31 21 2 +12 0.2 0.3 -23 -32 7 -43 2 +12 0.2 0.5 38 -7 0 -9 1 +12 0.7 0.8 10 -44 41 36 2 +12 0.9 0.3 19 -5 15 -31 1 +12 0.4 0.2 41 33 -32 -44 1 +12 0.6 0.7 18 -44 13 -44 2 +12 0.8 0.3 48 -44 -9 -33 1 +12 0.4 0.9 -11 -20 25 -36 2 +12 0.3 0.4 3 -47 36 29 2 +12 0.3 0.2 23 -49 -6 -49 1 +12 0.4 0.4 5 -37 -14 -20 1 +12 0.3 0.8 -11 -24 19 -36 2 +12 0.5 0.6 2 -34 41 -30 2 +13 0.9 0.1 35 -15 49 -44 1 +13 0.5 0.8 33 26 23 -21 1 +13 0.5 0.3 27 -26 44 3 2 +13 0.5 0.8 26 -15 29 -11 2 +13 0.7 0.1 -13 -24 42 15 2 +13 0.8 0.4 45 -22 44 -5 1 +13 0.2 0.6 50 -23 12 -35 2 +13 0.1 0.4 33 12 6 -32 1 +13 0.2 0.9 14 2 0 -6 1 +13 0.2 0.7 21 -8 15 -31 1 +13 0.8 0.8 43 23 6 -24 1 +13 0.2 0.9 -5 -38 26 5 2 +13 0.4 0.1 47 -41 15 10 2 +13 0.9 0.7 24 13 35 28 2 +13 0.4 0.3 22 11 43 -27 1 +13 0.5 0.5 45 33 14 -44 1 +13 0.8 0.9 -42 -47 27 -33 2 +13 0.8 0.7 0 -32 21 -1 2 +13 0.5 0.6 37 26 37 -4 1 +13 0.8 0.2 -12 -47 -37 -39 1 +13 0.9 0.1 49 -49 13 -21 1 +13 0.9 0.6 -3 -28 34 -16 2 +13 0.8 0.1 23 -9 29 8 1 +13 0.4 0.3 -29 -32 -3 -44 2 +13 0.8 0.4 -18 -22 38 -39 2 +13 0.5 0.6 35 -6 13 -15 1 +13 0.8 0.3 25 -8 46 36 2 +13 0.4 0.7 43 -49 20 17 2 +13 0.5 0.6 18 -13 45 -17 2 +13 0.8 0.1 40 -25 -3 -15 1 +13 0.3 0.1 49 23 46 11 1 +13 0.3 0.5 -23 -33 -37 -38 1 +13 0.1 0.5 45 17 4 -31 1 +13 0.5 0.7 16 15 18 1 1 +13 0.2 0.6 28 -10 25 18 2 +13 0.1 0.7 41 -12 -33 -37 1 +13 0.2 0.1 17 -7 42 -5 2 +13 0.6 0.8 41 -21 18 11 2 +13 0.3 0.1 17 -1 -28 -35 1 +13 0.7 0.3 6 -1 29 -26 1 +13 0.3 0.1 26 17 49 4 1 +13 0.1 0.5 28 27 -18 -42 1 +13 0.4 0.1 -20 -22 -8 -28 1 +13 0.3 0.8 46 27 20 -50 1 +13 0.8 0.1 -30 -34 36 -36 1 +13 0.8 0.7 18 17 47 39 2 +13 0.7 0.3 -39 -42 -15 -37 2 +13 0.6 0.3 42 8 20 6 1 +13 0.1 0.3 24 9 31 12 2 +13 0.5 0.7 -10 -48 22 3 2 +13 0.5 0.1 47 -11 34 -23 1 +13 0.2 0.1 6 4 1 -2 1 +13 0.8 0.3 31 -18 11 2 1 +13 0.5 0.2 35 -3 24 -12 1 +13 0.3 0.9 -17 -18 10 -24 2 +13 0.3 0.2 -20 -37 36 7 2 +13 0.4 0.3 19 16 41 9 2 +13 0.6 0.5 18 -10 14 -1 1 +13 0.3 0.1 -3 -11 36 -7 2 +13 0.1 0.6 48 -5 49 11 2 +13 0.2 0.1 30 -18 21 -12 1 +13 0.4 0.4 16 -12 19 -35 1 +13 0.8 0.8 46 21 38 -23 1 +13 0.4 0.4 -17 -47 19 6 2 +13 0.2 0.5 17 -4 -9 -15 1 +13 0.6 0.8 3 -36 39 -19 2 +13 0.1 0.3 45 -27 -42 -43 1 +13 0.1 0.3 42 -27 -6 -29 1 +13 0.7 0.8 10 -29 14 -34 2 +13 0.9 0.8 29 5 -28 -34 1 +13 0.8 0.7 -22 -45 39 -20 2 +13 0.1 0.3 2 1 4 -20 1 +13 0.4 0.6 12 -32 -14 -23 1 +13 0.9 0.8 43 -2 44 6 1 +13 0.1 0.8 -41 -49 39 -2 2 +13 0.4 0.6 37 6 47 24 2 +13 0.7 0.3 -1 -43 -26 -42 1 +13 0.8 0.8 48 34 -41 -49 1 +13 0.4 0.3 43 -21 -28 -35 1 +13 0.1 0.4 -31 -44 41 23 2 +13 0.2 0.4 37 -47 39 3 2 +13 0.4 0.4 -10 -17 -1 -27 2 +13 0.7 0.7 21 -25 29 0 2 +13 0.6 0.3 30 17 29 -37 1 +13 0.1 0.8 7 -31 2 -11 2 +13 0.3 0.8 -21 -36 -9 -47 2 +13 0.8 0.2 12 -38 13 -23 1 +13 0.4 0.1 42 -17 39 -9 1 +13 0.9 0.7 43 -49 13 -31 1 +13 0.9 0.9 12 -30 -36 -48 1 +13 0.9 0.7 24 -47 -30 -45 1 +13 0.5 0.2 -1 -4 44 -8 2 +13 0.8 0.3 -16 -39 39 -36 2 +13 0.7 0.8 35 2 26 -20 1 +13 0.1 0.5 48 32 27 -14 1 +13 0.5 0.6 -3 -17 49 1 2 +13 0.9 0.6 22 -18 28 -38 1 +13 0.4 0.6 32 -13 7 -7 1 +13 0.8 0.3 32 5 26 24 2 +13 0.2 0.8 4 -50 20 -8 2 +13 0.8 0.2 43 -34 2 0 1 +13 0.9 0.6 48 -26 22 -32 1 +13 0.4 0.4 0 -22 -5 -42 1 +13 0.8 0.9 17 -32 12 10 2 +13 0.1 0.3 42 34 43 24 1 +13 0.7 0.4 39 -43 26 -22 1 +13 0.6 0.2 47 3 -35 -37 1 +13 0.9 0.1 30 -16 49 22 2 +13 0.2 0.3 -7 -20 8 -9 2 +13 0.6 0.4 46 -50 29 25 2 +13 0.3 0.4 40 -10 44 -48 1 +13 0.1 0.1 -26 -39 7 -29 2 +13 0.5 0.3 48 -21 -14 -41 1 +13 0.5 0.4 -26 -40 13 -46 2 +13 0.7 0.5 4 -47 -9 -34 1 +13 0.4 0.7 41 -12 -9 -43 1 +13 0.6 0.4 38 3 24 20 2 +13 0.7 0.6 37 -40 30 -27 1 +13 0.4 0.9 30 -18 13 -41 1 +13 0.2 0.1 43 27 20 -12 1 +13 0.8 0.5 11 -26 33 12 2 +13 0.7 0.1 42 -32 30 19 1 +13 0.8 0.5 49 -36 46 11 2 +13 0.8 0.7 4 -17 9 -11 2 +13 0.1 0.3 36 -16 34 -50 2 +13 0.1 0.5 40 -41 -48 -49 1 +13 0.2 0.9 2 1 40 -42 2 +13 0.8 0.7 15 -14 -25 -39 1 +13 0.7 0.5 28 -37 17 -48 1 +13 0.3 0.8 29 28 -22 -43 1 +13 0.7 0.5 36 -29 23 -13 1 +13 0.4 0.6 38 -22 20 -50 1 +13 0.5 0.4 -22 -47 14 -8 2 +13 0.6 0.4 5 -44 16 -24 2 +13 0.9 0.3 17 -23 24 -28 1 +13 0.5 0.5 36 27 -41 -49 1 +13 0.6 0.3 -37 -45 15 -16 2 +13 0.6 0.6 0 -46 33 -7 2 +13 0.8 0.9 14 -49 44 43 2 +13 0.5 0.2 45 -22 0 -34 1 +13 0.9 0.5 30 -45 -17 -42 1 +13 0.3 0.5 34 13 21 -8 1 +13 0.7 0.9 31 14 -4 -22 1 +13 0.6 0.7 9 -8 -27 -49 1 +13 0.7 0.1 50 39 -17 -48 1 +13 0.7 0.2 16 -24 20 -46 1 +13 0.4 0.7 50 -20 3 -8 1 +13 0.8 0.2 47 41 -10 -49 1 +13 0.6 0.6 42 -34 19 -6 1 +13 0.4 0.8 33 -23 28 19 2 +13 0.4 0.5 1 -48 32 17 2 +13 0.4 0.7 31 2 43 14 2 +13 0.5 0.8 -35 -46 -17 -48 2 +13 0.1 0.7 13 0 36 -7 2 +13 0.6 0.6 -22 -49 35 -22 2 +13 0.3 0.5 32 -22 46 -8 2 +13 0.1 0.8 24 -16 23 -7 2 +13 0.4 0.1 -25 -29 47 5 2 +13 0.7 0.8 38 -45 36 28 2 +13 0.5 0.7 37 -26 -8 -33 1 +13 0.2 0.6 34 -35 -6 -17 2 +13 0.8 0.9 21 -21 -19 -24 1 +13 0.7 0.1 37 -42 -32 -38 1 +13 0.3 0.7 14 -15 36 -12 2 +13 0.9 0.8 44 -19 4 -16 1 +13 0.2 0.2 34 -30 19 -36 1 +13 0.1 0.9 44 19 22 4 2 +13 0.3 0.1 -27 -28 -10 -20 2 +13 0.8 0.9 -2 -27 27 -47 2 +13 0.8 0.1 -9 -31 -4 -22 1 +13 0.3 0.3 16 -31 -6 -43 1 +13 0.5 0.4 46 12 -11 -43 1 +13 0.2 0.3 -12 -50 38 3 2 +13 0.6 0.5 17 -25 4 -27 1 +13 0.4 0.4 -8 -44 -2 -49 1 +13 0.4 0.9 29 4 8 -23 1 +13 0.6 0.3 10 -35 13 -26 1 +13 0.9 0.9 9 -35 -5 -16 1 +13 0.1 0.2 33 3 35 -32 1 +13 0.4 0.1 26 24 0 -7 1 +13 0.7 0.2 9 4 20 15 2 +13 0.2 0.2 35 -33 -6 -18 1 +13 0.2 0.5 41 -41 8 -10 2 +13 0.4 0.8 20 -40 23 -16 2 +13 0.3 0.3 48 9 16 -35 1 +13 0.7 0.9 50 -6 47 -17 2 +13 0.5 0.9 46 -11 -5 -9 1 +13 0.7 0.6 41 -12 0 -32 1 +13 0.9 0.1 -12 -32 -7 -15 1 +13 0.8 0.2 37 29 -5 -16 1 +13 0.4 0.8 36 7 -25 -44 1 +13 0.2 0.9 -4 -15 -22 -37 1 +13 0.8 0.1 -31 -34 33 -5 2 +13 0.1 0.3 9 -17 -33 -41 1 +13 0.4 0.1 8 -2 48 -8 1 +13 0.3 0.4 47 -26 48 -40 2 +13 0.5 0.8 23 -26 34 -19 2 +13 0.6 0.5 11 -47 47 43 2 +13 0.6 0.5 47 37 25 -2 1 +13 0.5 0.1 28 24 33 -47 1 +13 0.8 0.4 17 3 22 6 2 +13 0.1 0.8 -46 -50 -13 -33 2 +13 0.4 0.5 34 24 22 16 1 +13 0.6 0.9 18 -45 -4 -40 1 +13 0.7 0.8 16 11 4 -25 1 +13 0.3 0.2 -20 -37 4 -43 2 +13 0.5 0.9 -1 -10 44 32 2 +13 0.4 0.8 43 -36 27 -38 2 +13 0.3 0.3 4 -42 41 -1 2 +13 0.7 0.8 47 -24 -40 -41 1 +13 0.6 0.4 4 -2 48 25 2 +13 0.2 0.3 33 -11 35 26 2 +13 0.7 0.1 8 0 13 3 1 +13 0.4 0.5 -5 -26 43 34 2 +13 0.8 0.7 -18 -50 -3 -32 2 +13 0.2 0.9 34 -2 29 14 2 +13 0.3 0.6 1 -27 18 6 2 +13 0.8 0.4 40 2 12 -14 1 +13 0.5 0.9 -14 -37 -18 -34 1 +13 0.9 0.2 6 -11 42 -50 1 +13 0.5 0.4 47 38 34 3 1 +13 0.9 0.7 21 -31 5 3 1 +13 0.9 0.3 19 8 23 -1 1 +13 0.9 0.1 -1 -14 0 -23 1 +13 0.2 0.3 -6 -15 47 -40 2 +13 0.7 0.8 40 -26 44 -33 2 +13 0.6 0.8 36 -26 -4 -10 1 +13 0.7 0.3 25 -17 -8 -42 1 +13 0.3 0.6 31 -25 14 7 2 +13 0.6 0.7 17 -25 28 4 2 +13 0.3 0.9 34 -8 26 8 2 +13 0.7 0.5 16 -45 35 29 2 +13 0.2 0.7 28 -20 27 -29 2 +13 0.6 0.4 45 -31 -11 -33 1 +13 0.9 0.6 31 -18 46 24 2 +13 0.3 0.6 12 -42 3 -5 2 +13 0.5 0.6 6 -12 23 -26 2 +13 0.8 0.5 12 -21 50 -2 2 +13 0.9 0.8 45 23 -22 -29 1 +13 0.3 0.6 18 16 16 -37 1 +13 0.2 0.5 43 -21 29 -29 1 +13 0.5 0.2 38 27 -35 -41 1 +13 0.8 0.7 46 5 49 -42 1 +13 0.9 0.3 6 -46 7 -50 1 +13 0.2 0.8 -11 -12 -19 -40 1 +13 0.6 0.2 8 -44 -10 -43 1 +13 0.3 0.7 41 -6 40 23 2 +13 0.7 0.8 10 -25 -32 -50 1 +13 0.5 0.3 49 13 37 -31 1 +13 0.8 0.5 39 19 50 -14 1 +13 0.4 0.6 29 24 44 13 2 +13 0.7 0.4 -1 -41 46 -23 2 +13 0.6 0.4 44 28 -3 -17 1 +13 0.3 0.8 49 -40 50 -41 2 +13 0.8 0.8 48 -37 26 21 1 +13 0.3 0.7 -17 -19 4 -30 2 +13 0.5 0.7 36 2 26 -2 2 +13 0.4 0.1 12 -42 -15 -25 1 +13 0.1 0.4 32 -29 45 -17 2 +13 0.4 0.2 40 4 33 -40 1 +13 0.5 0.1 33 -8 -32 -45 1 +13 0.7 0.6 8 -42 -3 -5 1 +13 0.4 0.6 -29 -45 46 15 2 +13 0.7 0.3 18 -14 40 -14 1 +13 0.1 0.6 -31 -44 -34 -36 2 +13 0.7 0.3 20 -14 7 1 1 +13 0.2 0.3 16 -38 50 2 2 +13 0.5 0.4 8 -39 -4 -49 1 +13 0.7 0.2 18 3 -6 -30 1 +13 0.3 0.8 43 -4 -2 -40 1 +13 0.6 0.6 16 -17 34 33 2 +13 0.7 0.6 -17 -32 17 10 2 +13 0.2 0.8 40 -40 48 18 2 +13 0.9 0.5 41 35 50 -31 1 +13 0.9 0.3 20 13 23 -50 1 +13 0.8 0.7 33 -2 47 -14 2 +13 0.2 0.7 32 -49 34 11 2 +13 0.6 0.9 40 8 44 5 2 +13 0.8 0.5 50 48 21 3 1 +13 0.3 0.3 -46 -49 28 -39 2 +13 0.7 0.6 -12 -21 33 3 2 +13 0.5 0.8 -29 -35 23 -34 2 +13 0.8 0.1 9 -34 36 34 2 +13 0.7 0.8 6 -21 40 36 2 +13 0.5 0.7 11 -35 19 10 2 +13 0.4 0.6 14 6 32 -3 1 +13 0.4 0.8 45 34 -30 -31 1 +13 0.5 0.6 20 5 43 -6 2 +13 0.4 0.7 -12 -38 22 15 2 +13 0.3 0.3 10 -17 16 -47 1 +13 0.6 0.5 44 26 35 29 1 +13 0.6 0.6 6 0 -2 -47 1 +13 0.5 0.8 38 33 15 -44 1 +13 0.5 0.1 7 -31 27 -6 2 +13 0.6 0.8 10 2 24 -18 2 +13 0.2 0.5 36 20 40 -35 1 +13 0.7 0.9 18 -14 -20 -24 1 +13 0.2 0.9 -36 -37 7 -50 2 +13 0.5 0.7 20 -25 15 -5 2 +13 0.9 0.7 47 40 -26 -29 1 +14 0.6 0.5 49 -49 -13 -37 1 +14 0.1 0.7 22 7 50 3 2 +14 0.3 0.9 -32 -39 21 13 2 +14 0.2 0.1 6 -44 -7 -33 1 +14 0.6 0.1 6 -34 2 -15 2 +14 0.6 0.9 43 -25 -35 -46 1 +14 0.2 0.6 33 -16 31 -28 2 +14 0.5 0.9 9 -35 31 -20 2 +14 0.3 0.5 34 -40 -20 -21 1 +14 0.4 0.1 49 -27 19 -41 1 +14 0.6 0.4 26 22 34 28 2 +14 0.6 0.2 18 -32 1 -27 1 +14 0.7 0.6 32 3 44 24 2 +14 0.2 0.9 44 37 42 13 2 +14 0.1 0.4 -35 -38 40 -29 2 +14 0.2 0.8 39 -44 33 11 2 +14 0.7 0.9 19 -2 21 -7 1 +14 0.1 0.3 -30 -32 23 -42 2 +14 0.8 0.7 3 -23 -5 -37 1 +14 0.2 0.5 24 -21 -15 -44 1 +14 0.4 0.2 4 -11 48 -29 2 +14 0.8 0.3 25 -30 17 -48 1 +14 0.4 0.6 22 18 14 -10 1 +14 0.4 0.2 39 8 48 -24 2 +14 0.3 0.6 8 -42 36 -35 2 +14 0.1 0.6 -3 -11 -6 -15 2 +14 0.5 0.1 5 -4 -2 -23 1 +14 0.3 0.7 27 -4 25 4 1 +14 0.8 0.8 -17 -29 -4 -5 1 +14 0.9 0.6 -2 -43 -22 -40 1 +14 0.9 0.5 23 -19 -5 -13 1 +14 0.5 0.7 -7 -8 30 -8 2 +14 0.8 0.7 37 35 27 -15 1 +14 0.1 0.1 50 22 29 9 2 +14 0.3 0.2 19 -11 0 -9 1 +14 0.1 0.5 36 -3 48 -16 2 +14 0.3 0.8 13 -39 43 12 2 +14 0.6 0.6 30 -14 11 -1 2 +14 0.3 0.8 28 -48 -4 -11 1 +14 0.2 0.7 33 -42 -18 -38 1 +14 0.2 0.9 25 -36 -15 -25 2 +14 0.5 0.6 -1 -30 41 -17 2 +14 0.7 0.5 -6 -40 13 -49 2 +14 0.8 0.3 21 4 -11 -47 1 +14 0.1 0.1 -10 -34 30 -28 2 +14 0.7 0.3 12 -27 15 -40 1 +14 0.5 0.2 -35 -42 13 -5 2 +14 0.8 0.8 18 5 21 -13 2 +14 0.1 0.5 22 6 41 -8 1 +14 0.4 0.8 45 -13 -16 -49 1 +14 0.5 0.1 9 2 -37 -49 1 +14 0.6 0.8 -4 -24 -14 -43 1 +14 0.4 0.9 31 -31 -7 -28 1 +14 0.3 0.6 38 -16 20 -27 1 +14 0.4 0.3 -11 -48 -1 -18 2 +14 0.5 0.5 -17 -38 38 -43 2 +14 0.7 0.8 20 -31 32 -21 2 +14 0.3 0.3 20 -8 35 -15 2 +14 0.8 0.3 -11 -34 17 -17 2 +14 0.6 0.4 -16 -20 26 -49 1 +14 0.8 0.1 30 -29 46 42 2 +14 0.3 0.8 40 14 24 14 2 +14 0.1 0.1 40 -37 -3 -44 1 +14 0.7 0.5 50 33 42 37 1 +14 0.7 0.5 16 6 20 -6 1 +14 0.5 0.4 13 -2 19 -37 2 +14 0.5 0.9 41 16 41 -1 2 +14 0.2 0.1 16 2 38 22 2 +14 0.5 0.7 29 -33 31 -4 2 +14 0.6 0.5 42 -30 12 -33 1 +14 0.5 0.4 31 -14 -3 -10 1 +14 0.3 0.4 5 -36 35 -24 1 +14 0.5 0.2 31 -18 50 -24 1 +14 0.5 0.7 2 -38 44 40 2 +14 0.3 0.7 27 2 -2 -17 1 +14 0.1 0.4 26 3 26 -19 1 +14 0.2 0.8 34 31 -7 -48 1 +14 0.2 0.4 35 -25 -21 -38 1 +14 0.6 0.8 -7 -49 -26 -35 1 +14 0.3 0.8 33 -45 21 9 2 +14 0.3 0.5 -27 -43 36 -35 2 +14 0.1 0.3 27 6 -22 -44 1 +14 0.7 0.2 4 -35 21 -7 2 +14 0.2 0.3 40 -32 4 -14 1 +14 0.3 0.6 45 8 9 -7 1 +14 0.1 0.8 39 -13 37 30 2 +14 0.9 0.3 -44 -49 39 -43 2 +14 0.5 0.4 0 -9 34 1 2 +14 0.2 0.1 45 -26 48 -23 1 +14 0.2 0.2 20 -22 -18 -50 1 +14 0.6 0.3 19 -41 22 -47 1 +14 0.6 0.7 26 -10 27 1 2 +14 0.9 0.1 25 -17 32 21 2 +14 0.3 0.8 26 -41 4 -16 2 +14 0.7 0.2 27 -42 -16 -45 1 +14 0.4 0.5 -2 -3 19 -26 2 +14 0.7 0.6 33 21 49 31 1 +14 0.3 0.6 18 -25 -12 -13 1 +14 0.6 0.4 13 -10 28 8 2 +14 0.8 0.9 32 -27 1 -7 1 +14 0.8 0.2 32 -6 22 -37 1 +14 0.1 0.5 -15 -31 42 -35 2 +14 0.7 0.7 -20 -33 24 -14 2 +14 0.2 0.7 46 -18 30 -14 2 +14 0.1 0.5 46 -33 39 -23 2 +14 0.8 0.6 18 -28 -17 -35 1 +14 0.5 0.6 35 -26 48 8 2 +14 0.1 0.2 -22 -24 35 7 2 +14 0.6 0.6 -1 -21 27 -19 1 +14 0.3 0.6 33 -6 39 10 2 +14 0.3 0.9 -27 -32 20 -47 2 +14 0.4 0.7 -16 -43 -15 -45 1 +14 0.2 0.9 16 -40 12 10 2 +14 0.8 0.1 -13 -43 1 -47 1 +14 0.3 0.8 -23 -39 47 13 2 +14 0.2 0.2 37 -48 43 12 2 +14 0.7 0.9 -6 -48 -33 -45 1 +14 0.7 0.8 43 -19 -1 -38 1 +14 0.3 0.8 -5 -7 -5 -26 2 +14 0.7 0.7 31 -18 11 -49 1 +14 0.1 0.1 6 -13 21 -44 1 +14 0.5 0.4 40 9 31 5 1 +14 0.9 0.9 5 -46 -25 -44 1 +14 0.7 0.8 44 39 3 -31 1 +14 0.9 0.9 3 -36 24 -1 2 +14 0.7 0.3 -3 -45 23 -36 2 +14 0.4 0.7 21 -16 2 -10 1 +14 0.6 0.6 6 -3 42 19 2 +14 0.8 0.4 34 -28 35 33 1 +14 0.5 0.7 11 2 22 -42 2 +14 0.8 0.6 22 -23 43 -45 1 +14 0.4 0.9 3 -21 47 -1 2 +14 0.4 0.7 12 4 23 -39 1 +14 0.7 0.5 40 -5 24 -30 1 +14 0.1 0.9 20 9 -2 -42 1 +14 0.1 0.1 40 21 44 -45 1 +14 0.4 0.1 21 -31 5 -25 1 +14 0.2 0.2 -24 -34 25 23 2 +14 0.9 0.3 23 -21 1 -25 1 +14 0.1 0.3 -1 -47 8 7 2 +14 0.9 0.1 10 -30 43 -39 2 +14 0.1 0.8 13 0 21 -3 2 +14 0.5 0.3 37 17 17 -36 1 +14 0.4 0.1 10 -28 34 0 2 +14 0.2 0.1 -4 -31 37 -36 2 +14 0.4 0.4 38 33 22 -13 1 +14 0.6 0.5 36 -10 -27 -35 1 +14 0.1 0.7 -2 -42 22 -10 2 +14 0.7 0.9 40 25 32 -35 1 +14 0.8 0.9 6 -41 39 23 2 +14 0.8 0.7 49 14 -30 -32 1 +14 0.7 0.1 -15 -36 17 16 2 +14 0.8 0.1 -3 -42 34 -19 1 +14 0.2 0.8 48 43 14 -12 1 +14 0.4 0.5 24 23 30 27 2 +14 0.3 0.1 28 -38 34 -36 1 +14 0.8 0.5 23 -46 -20 -27 1 +14 0.4 0.5 50 -6 38 1 1 +14 0.9 0.4 11 -11 42 -16 1 +14 0.4 0.6 41 38 36 -13 1 +14 0.6 0.6 -8 -14 47 -9 2 +14 0.4 0.5 -5 -47 7 -26 2 +14 0.4 0.2 -2 -44 3 -28 1 +14 0.2 0.5 -4 -33 10 -14 1 +14 0.2 0.3 28 -7 35 -6 2 +14 0.3 0.3 34 31 40 -43 1 +14 0.3 0.5 -5 -45 20 -7 2 +14 0.9 0.2 -12 -40 45 -17 2 +14 0.3 0.3 40 15 24 -8 1 +14 0.7 0.4 4 1 40 -44 2 +14 0.9 0.3 47 -25 23 4 1 +14 0.6 0.1 31 -19 -3 -6 1 +14 0.4 0.1 -10 -24 48 -11 2 +14 0.1 0.7 41 -46 -24 -34 2 +14 0.3 0.2 5 1 26 -49 1 +14 0.5 0.2 40 9 15 8 1 +14 0.3 0.5 46 8 19 2 1 +14 0.3 0.2 38 -23 35 -19 2 +14 0.8 0.6 44 -24 49 -31 1 +14 0.1 0.1 48 -37 4 -11 1 +14 0.2 0.5 46 8 13 -31 1 +14 0.9 0.4 23 16 15 -3 1 +14 0.7 0.5 31 -44 44 -30 1 +14 0.9 0.8 37 -46 -27 -31 1 +14 0.1 0.1 -25 -27 46 20 2 +14 0.6 0.9 -2 -36 46 25 2 +14 0.2 0.3 -1 -8 -36 -47 1 +14 0.7 0.6 29 9 13 -26 2 +14 0.5 0.8 23 -16 32 -3 2 +14 0.9 0.9 10 -41 33 -12 2 +14 0.9 0.7 3 -27 46 36 2 +14 0.6 0.6 39 -24 21 11 2 +14 0.3 0.3 14 -27 10 -21 1 +14 0.6 0.7 -17 -44 19 -18 2 +14 0.9 0.4 39 -47 47 -12 1 +14 0.9 0.6 -38 -45 16 -35 2 +14 0.7 0.2 35 30 33 23 2 +14 0.8 0.6 -9 -42 19 -35 2 +14 0.2 0.6 -14 -25 18 -36 2 +14 0.5 0.9 -22 -35 35 32 2 +14 0.3 0.8 18 17 44 4 1 +14 0.6 0.5 -2 -33 29 14 2 +14 0.2 0.8 -3 -38 17 -47 2 +14 0.1 0.7 7 -31 -18 -26 1 +14 0.3 0.7 16 -33 -17 -29 1 +14 0.8 0.2 15 -16 -44 -49 1 +14 0.1 0.4 43 -5 21 -41 1 +14 0.3 0.6 10 -28 22 -4 2 +14 0.1 0.4 29 -8 8 6 2 +14 0.3 0.9 6 -12 20 5 2 +14 0.1 0.5 -25 -49 24 -16 2 +14 0.8 0.1 40 -29 -24 -31 1 +14 0.4 0.1 22 -18 -22 -28 1 +14 0.6 0.4 5 -8 17 -41 1 +14 0.7 0.5 20 -1 24 -11 1 +14 0.2 0.2 40 -30 23 11 2 +14 0.8 0.2 29 -30 -12 -13 1 +14 0.1 0.5 30 -1 31 -18 1 +14 0.8 0.8 21 17 20 -9 1 +14 0.6 0.5 16 -10 -3 -14 1 +14 0.4 0.5 28 -49 36 27 2 +14 0.4 0.9 17 6 14 6 2 +14 0.4 0.1 1 -16 28 -8 1 +14 0.2 0.8 5 -14 -35 -48 1 +14 0.1 0.8 42 7 23 -14 2 +14 0.2 0.7 29 11 32 -11 1 +14 0.3 0.9 32 27 48 -4 2 +14 0.8 0.2 34 -17 12 -42 1 +14 0.1 0.7 15 -13 -9 -50 1 +14 0.2 0.4 40 33 5 -44 1 +14 0.5 0.3 46 -50 -16 -22 1 +14 0.5 0.7 21 -36 41 15 2 +14 0.8 0.3 -4 -5 35 16 2 +14 0.6 0.3 25 -31 35 21 2 +14 0.5 0.6 49 15 -40 -46 1 +14 0.7 0.9 -3 -21 41 -20 2 +14 0.4 0.8 37 27 -32 -50 1 +14 0.7 0.9 30 -50 28 -47 1 +14 0.6 0.3 46 -16 -17 -30 2 +14 0.9 0.8 6 -30 -6 -44 1 +14 0.2 0.4 23 12 9 -25 1 +14 0.3 0.6 18 -44 11 -41 2 +14 0.6 0.1 46 -46 -12 -47 1 +14 0.2 0.4 44 40 46 -23 1 +14 0.6 0.6 39 26 40 -47 1 +14 0.7 0.1 36 -14 -35 -49 1 +14 0.6 0.2 39 -21 -23 -28 1 +14 0.6 0.9 -1 -40 -3 -25 2 +14 0.5 0.9 18 -18 20 -6 2 +14 0.6 0.1 27 -5 46 18 2 +14 0.4 0.7 43 26 6 -45 1 +14 0.7 0.5 39 6 50 32 2 +14 0.9 0.5 29 -23 -7 -26 1 +14 0.8 0.7 -40 -45 -1 -30 2 +14 0.7 0.6 35 1 45 -7 2 +14 0.9 0.3 -13 -30 2 -20 2 +14 0.3 0.4 30 -18 -17 -50 1 +14 0.9 0.9 -25 -41 38 28 2 +14 0.8 0.1 -31 -36 39 -16 1 +14 0.3 0.1 -25 -34 9 -24 2 +14 0.7 0.5 14 3 3 -30 1 +14 0.3 0.8 -29 -40 -4 -31 2 +14 0.5 0.5 -18 -20 22 -25 2 +14 0.2 0.7 43 12 39 37 2 +14 0.9 0.4 15 -21 10 5 2 +14 0.5 0.4 45 -18 5 -16 1 +14 0.5 0.3 15 -1 -26 -31 1 +14 0.2 0.2 41 -13 47 -50 1 +14 0.8 0.2 31 -47 43 24 2 +14 0.6 0.2 44 -14 49 20 2 +14 0.8 0.6 22 -4 25 -6 1 +14 0.4 0.4 15 2 -4 -35 2 +14 0.2 0.6 -30 -36 17 -40 2 +14 0.4 0.9 -19 -22 32 24 2 +14 0.1 0.7 29 -38 30 -38 2 +14 0.1 0.3 19 -45 27 19 2 +14 0.8 0.7 16 -29 14 -11 2 +14 0.8 0.2 50 22 45 -33 1 +14 0.6 0.1 -22 -37 49 -33 2 +14 0.6 0.4 3 -43 26 9 2 +14 0.7 0.6 50 12 -13 -44 1 +14 0.4 0.4 5 -45 41 28 2 +14 0.4 0.2 1 -4 7 -37 2 +14 0.8 0.5 38 32 9 -21 1 +14 0.2 0.7 22 -37 46 -25 2 +14 0.8 0.8 -42 -44 22 13 2 +14 0.1 0.3 49 44 43 0 1 +14 0.2 0.1 16 -35 47 -39 2 +14 0.6 0.5 44 -35 -20 -37 1 +14 0.4 0.8 6 -18 24 -28 2 +14 0.9 0.4 8 -11 30 -42 2 +14 0.5 0.3 14 -25 44 -10 2 +14 0.9 0.2 49 22 46 -9 1 +14 0.6 0.6 29 2 21 -4 2 +14 0.4 0.2 0 -46 41 -29 2 +14 0.7 0.4 35 10 44 19 2 +14 0.8 0.8 50 35 15 -21 1 +14 0.1 0.1 45 2 40 -30 1 +14 0.7 0.6 -28 -30 5 -13 2 +14 0.7 0.8 -17 -29 48 29 2 +15 0.9 0.1 -19 -33 31 15 2 +15 0.4 0.5 36 5 -2 -8 1 +15 0.5 0.2 13 -23 39 -33 1 +15 0.8 0.3 36 -9 24 -35 1 +15 0.4 0.1 10 8 37 17 1 +15 0.3 0.8 5 -26 -9 -10 1 +15 0.4 0.3 14 -34 21 -45 1 +15 0.2 0.4 -31 -49 2 -24 2 +15 0.6 0.4 50 48 -41 -50 1 +15 0.2 0.8 -8 -34 45 15 2 +15 0.1 0.6 32 -27 14 0 2 +15 0.9 0.6 10 -49 -5 -43 1 +15 0.2 0.1 2 -6 14 -15 2 +15 0.1 0.7 -6 -34 18 -34 2 +15 0.2 0.6 13 -32 -36 -45 1 +15 0.1 0.7 0 -38 23 -22 2 +15 0.2 0.1 4 -27 -13 -38 1 +15 0.2 0.8 48 -35 -1 -46 2 +15 0.9 0.2 16 -40 10 -25 1 +15 0.7 0.8 13 -6 44 6 2 +15 0.8 0.7 35 31 -14 -47 1 +15 0.1 0.3 50 17 3 -12 1 +15 0.4 0.1 37 -42 18 -29 2 +15 0.8 0.6 -8 -22 49 6 2 +15 0.9 0.7 9 -17 27 -50 2 +15 0.2 0.7 0 -44 21 -16 2 +15 0.5 0.2 18 -8 35 -21 1 +15 0.1 0.1 37 27 46 18 1 +15 0.6 0.1 -32 -47 -6 -19 2 +15 0.4 0.6 6 -31 31 11 2 +15 0.5 0.4 34 26 50 49 2 +15 0.2 0.6 2 -10 36 21 2 +15 0.1 0.4 -42 -44 30 -43 2 +15 0.5 0.4 25 -23 29 -11 2 +15 0.9 0.5 46 0 39 -37 1 +15 0.6 0.5 41 -20 16 8 1 +15 0.1 0.9 46 23 -45 -50 1 +15 0.9 0.5 -3 -35 29 -50 2 +15 0.4 0.2 28 -49 3 -23 1 +15 0.7 0.1 30 -26 2 -35 1 +15 0.7 0.6 49 2 12 -28 1 +15 0.5 0.3 45 -2 17 10 1 +15 0.7 0.1 42 -12 7 -35 1 +15 0.1 0.7 48 -8 45 -19 2 +15 0.3 0.5 -10 -13 49 -46 2 +15 0.2 0.5 24 4 11 5 1 +15 0.1 0.2 27 18 -7 -34 1 +15 0.2 0.9 28 18 42 33 2 +15 0.9 0.1 18 9 31 -33 1 +15 0.6 0.9 11 6 7 -30 1 +15 0.1 0.4 32 -42 35 -47 2 +15 0.9 0.3 15 -23 -28 -37 1 +15 0.6 0.8 -18 -39 28 18 2 +15 0.8 0.6 28 -30 45 11 2 +15 0.1 0.8 27 23 -3 -18 1 +15 0.5 0.2 -5 -27 6 -38 2 +15 0.8 0.1 39 23 50 -19 1 +15 0.4 0.6 30 13 49 -2 2 +15 0.9 0.1 46 14 0 -19 1 +15 0.6 0.7 37 5 -29 -30 1 +15 0.3 0.8 42 -45 22 -12 2 +15 0.2 0.7 18 -14 47 24 2 +15 0.5 0.5 30 -46 22 -18 2 +15 0.5 0.4 49 -7 -15 -41 1 +15 0.9 0.5 -35 -38 42 33 2 +15 0.9 0.1 39 -19 -29 -34 1 +15 0.9 0.2 -42 -43 -36 -41 2 +15 0.9 0.9 13 -31 24 3 2 +15 0.3 0.8 44 23 35 -13 1 +15 0.1 0.6 41 30 42 28 2 +15 0.2 0.4 17 -25 13 1 2 +15 0.5 0.2 -6 -9 22 -17 2 +15 0.3 0.3 17 -35 -44 -45 1 +15 0.1 0.4 32 -2 41 17 2 +15 0.9 0.8 25 -49 49 18 2 +15 0.1 0.8 38 34 31 9 1 +15 0.5 0.7 20 -36 41 12 2 +15 0.5 0.2 5 -11 -28 -36 1 +15 0.8 0.5 45 -6 14 5 1 +15 0.1 0.6 4 -13 10 -18 2 +15 0.8 0.8 -32 -39 14 -36 2 +15 0.5 0.3 9 -38 45 15 2 +15 0.5 0.3 -5 -20 35 -17 2 +15 0.6 0.5 16 -5 50 10 2 +15 0.5 0.3 42 -16 4 3 2 +15 0.6 0.7 -11 -36 -6 -49 2 +15 0.9 0.2 14 -12 15 12 2 +15 0.8 0.6 -25 -26 25 10 2 +15 0.2 0.4 -27 -42 49 -18 2 +15 0.7 0.5 1 -33 17 8 2 +15 0.2 0.6 32 -47 50 -30 2 +15 0.3 0.4 -11 -34 46 -44 2 +15 0.3 0.8 21 -5 39 -1 2 +15 0.2 0.6 19 10 13 -28 1 +15 0.1 0.8 10 -12 13 -35 2 +15 0.6 0.7 -18 -46 -25 -46 1 +15 0.1 0.5 15 2 21 0 2 +15 0.3 0.4 48 -30 33 -30 1 +15 0.3 0.6 46 32 -12 -29 1 +15 0.5 0.2 5 4 28 0 1 +15 0.8 0.8 46 9 28 12 2 +15 0.3 0.2 13 4 35 12 2 +15 0.8 0.9 21 -35 20 -37 2 +15 0.7 0.3 46 -18 10 -39 1 +15 0.1 0.6 28 -31 22 -12 1 +15 0.8 0.1 -18 -22 44 19 2 +15 0.4 0.3 49 -47 -9 -29 2 +15 0.3 0.8 42 40 23 -31 1 +15 0.4 0.3 44 -40 35 21 2 +15 0.7 0.3 8 -49 -3 -30 1 +15 0.3 0.3 31 -3 26 -31 1 +15 0.8 0.6 -2 -50 -3 -22 2 +15 0.5 0.7 12 -41 37 9 2 +15 0.7 0.7 41 -27 15 -33 1 +15 0.7 0.6 22 14 17 8 1 +15 0.5 0.6 47 -14 31 1 1 +15 0.9 0.9 50 -4 -6 -45 1 +15 0.6 0.9 -13 -30 -36 -40 1 +15 0.8 0.3 39 -38 14 1 1 +15 0.1 0.9 40 10 10 -23 1 +15 0.4 0.9 -45 -46 8 -8 2 +15 0.2 0.8 10 -12 -2 -41 2 +15 0.8 0.6 38 2 32 -17 1 +15 0.5 0.5 31 -23 49 -28 2 +15 0.8 0.5 25 -14 -20 -22 1 +15 0.7 0.6 -30 -41 45 33 2 +15 0.1 0.2 2 -17 41 -47 1 +15 0.3 0.3 10 -42 30 -4 2 +15 0.3 0.5 49 -35 33 26 2 +15 0.5 0.3 45 -18 -3 -30 1 +15 0.4 0.8 45 -22 21 -42 2 +15 0.7 0.7 44 -45 21 -21 1 +15 0.1 0.4 17 10 43 35 2 +15 0.4 0.3 42 -36 6 -9 1 +15 0.6 0.7 12 3 35 30 2 +15 0.2 0.7 -42 -46 5 -32 2 +15 0.7 0.1 49 15 -29 -38 1 +15 0.5 0.4 0 -43 28 -3 2 +15 0.1 0.4 19 -34 -30 -44 1 +15 0.5 0.5 28 1 -34 -39 1 +15 0.9 0.5 0 -1 16 5 2 +15 0.1 0.8 18 -31 40 -9 2 +15 0.7 0.1 -4 -28 29 -26 1 +15 0.4 0.4 23 19 32 -11 2 +15 0.7 0.8 14 -5 -14 -47 1 +15 0.6 0.7 -20 -25 13 -21 2 +15 0.4 0.3 27 -3 -12 -18 1 +15 0.4 0.8 19 6 4 -35 1 +15 0.1 0.4 32 -43 23 14 2 +15 0.7 0.4 -38 -44 12 -21 2 +15 0.2 0.3 40 4 28 -9 1 +15 0.8 0.9 41 -8 -11 -22 1 +15 0.1 0.8 4 -34 -7 -16 2 +15 0.3 0.9 49 8 44 22 2 +15 0.6 0.1 5 -20 28 -50 1 +15 0.1 0.1 -45 -48 42 10 2 +15 0.9 0.8 11 -12 47 -6 2 +15 0.5 0.1 35 -38 -25 -41 1 +15 0.5 0.1 -18 -41 29 -24 2 +15 0.9 0.1 7 -38 18 -45 1 +15 0.6 0.9 41 -18 48 -16 2 +15 0.7 0.8 44 -44 19 -37 1 +15 0.5 0.6 32 -13 1 -48 1 +15 0.1 0.6 38 -43 -7 -21 2 +15 0.6 0.3 -35 -41 -13 -22 2 +15 0.1 0.6 -33 -42 8 -17 2 +15 0.1 0.3 20 -43 -23 -33 1 +15 0.5 0.5 21 -35 11 5 2 +15 0.3 0.8 15 4 -6 -16 1 +15 0.2 0.2 -11 -32 7 -6 2 +15 0.4 0.9 39 3 12 -8 2 +15 0.4 0.5 23 20 49 25 2 +15 0.1 0.1 22 -23 -5 -39 1 +15 0.7 0.7 6 -14 2 -49 1 +15 0.2 0.8 -41 -48 27 -12 2 +15 0.7 0.7 5 -22 25 -27 2 +15 0.3 0.3 -7 -43 26 13 2 +15 0.9 0.3 37 -42 22 -20 1 +15 0.9 0.6 21 -44 34 15 2 +15 0.1 0.8 -1 -14 14 -10 2 +15 0.6 0.6 50 37 42 28 1 +15 0.9 0.3 34 28 -24 -27 1 +15 0.2 0.5 21 16 -15 -40 1 +15 0.7 0.9 -38 -50 19 3 2 +15 0.5 0.8 -35 -50 26 -28 2 +15 0.3 0.2 37 -7 32 -17 1 +15 0.7 0.6 48 40 39 6 1 +15 0.1 0.3 49 29 40 22 1 +15 0.6 0.1 -13 -39 -24 -30 1 +15 0.5 0.6 8 -7 1 -3 1 +15 0.9 0.3 38 -31 36 4 1 +15 0.5 0.3 25 -5 3 -31 1 +15 0.2 0.8 4 -34 22 1 2 +15 0.9 0.2 10 8 0 -6 1 +15 0.8 0.9 -14 -43 28 -47 2 +15 0.1 0.8 43 6 -1 -37 1 +15 0.9 0.1 16 -45 40 -2 1 +15 0.8 0.8 -20 -26 29 -29 2 +15 0.9 0.5 41 34 19 -7 1 +15 0.9 0.4 9 -21 24 9 2 +15 0.5 0.7 13 -14 7 -26 2 +15 0.5 0.2 -26 -45 41 16 2 +15 0.9 0.6 -25 -37 34 -32 2 +15 0.2 0.4 17 -26 46 -31 2 +15 0.2 0.4 27 -9 34 -4 2 +15 0.1 0.9 16 -34 13 -14 2 +15 0.1 0.9 20 -40 34 -18 2 +15 0.6 0.9 30 -35 -25 -50 1 +15 0.4 0.5 14 -21 48 -24 2 +15 0.6 0.9 14 -12 29 -7 2 +15 0.9 0.2 23 -17 -6 -15 1 +15 0.3 0.7 11 3 1 -47 1 +15 0.7 0.2 21 4 44 19 2 +15 0.3 0.1 19 -23 25 2 2 +15 0.9 0.9 -14 -33 13 -27 2 +15 0.2 0.9 6 -33 12 -2 2 +15 0.8 0.2 -5 -25 29 -7 2 +15 0.7 0.8 22 -24 29 0 2 +15 0.4 0.7 44 -40 20 -27 2 +15 0.1 0.3 35 -17 29 23 2 +15 0.8 0.5 20 5 3 -25 1 +15 0.5 0.1 -17 -24 34 13 2 +15 0.9 0.1 40 16 42 -30 1 +15 0.7 0.9 -16 -27 40 -7 2 +15 0.2 0.3 33 31 12 -27 1 +15 0.3 0.4 5 -19 -35 -42 1 +15 0.6 0.7 2 -5 37 -1 2 +15 0.2 0.5 37 35 -6 -9 1 +15 0.4 0.9 27 15 38 -45 2 +15 0.2 0.3 14 -20 19 -43 2 +15 0.6 0.3 20 -33 25 -24 2 +15 0.8 0.8 19 5 20 -42 2 +15 0.2 0.8 5 -10 25 -16 2 +15 0.8 0.1 40 16 44 15 2 +15 0.5 0.3 48 -44 41 21 2 +15 0.6 0.5 36 30 35 28 1 +15 0.3 0.2 17 -18 45 29 2 +15 0.6 0.5 44 17 26 -28 1 +15 0.1 0.5 13 -42 50 -24 2 +15 0.2 0.2 39 5 48 5 2 +15 0.2 0.9 -7 -20 -1 -47 2 +15 0.7 0.5 38 27 50 -18 1 +15 0.9 0.1 18 -47 -10 -15 1 +15 0.3 0.5 31 -45 -14 -35 2 +15 0.7 0.2 -37 -38 0 -46 1 +15 0.5 0.7 28 -22 25 7 2 +15 0.3 0.1 3 -48 -13 -15 2 +15 0.5 0.3 -14 -15 49 17 2 +15 0.2 0.4 -17 -49 -34 -47 1 +15 0.8 0.7 -5 -48 13 -22 2 +15 0.1 0.8 12 -5 11 10 2 +15 0.9 0.2 -25 -40 -16 -42 1 +15 0.7 0.6 48 -14 33 -4 1 +15 0.5 0.9 12 -27 11 3 2 +15 0.5 0.1 39 -1 31 -21 1 +15 0.3 0.5 48 -29 21 -20 2 +15 0.6 0.9 40 -30 43 12 2 +15 0.6 0.5 28 -32 37 -19 1 +15 0.5 0.9 -9 -49 34 20 2 +15 0.5 0.2 46 -30 25 5 1 +15 0.1 0.4 -32 -34 -7 -35 1 +15 0.4 0.3 -7 -35 6 -41 1 +15 0.2 0.5 40 -30 -35 -49 1 +15 0.1 0.3 22 -3 38 -4 2 +15 0.3 0.6 -19 -43 47 4 2 +15 0.5 0.5 44 -32 -37 -45 1 +15 0.6 0.2 21 -18 -16 -27 1 +15 0.7 0.5 48 34 27 12 1 +15 0.9 0.2 40 -43 40 -6 1 +15 0.9 0.2 29 -4 8 7 1 +15 0.4 0.2 8 -50 44 13 2 +15 0.9 0.3 44 31 38 4 1 +15 0.3 0.2 20 -40 39 -14 2 +15 0.2 0.4 18 -36 44 40 2 +15 0.3 0.1 -6 -22 30 -22 1 +15 0.3 0.5 34 -21 48 -31 2 +15 0.4 0.1 5 -33 29 10 2 +15 0.4 0.2 48 -26 38 -26 1 +15 0.3 0.6 16 -33 21 -16 2 +15 0.1 0.2 -21 -45 36 13 2 +15 0.6 0.8 35 -14 5 -39 1 +15 0.7 0.7 39 -28 21 6 2 +15 0.8 0.6 -18 -25 35 -21 2 +15 0.2 0.9 30 -34 33 -36 2 +15 0.3 0.3 47 22 37 -47 1 +15 0.1 0.7 18 -47 -15 -28 1 +15 0.3 0.5 7 4 27 -40 1 +15 0.1 0.7 42 -35 -9 -50 1 +15 0.7 0.6 50 -12 23 14 1 +15 0.1 0.5 21 -31 16 -17 2 +15 0.8 0.4 -1 -10 24 11 2 +15 0.2 0.5 45 -37 -14 -28 1 +15 0.5 0.2 -24 -48 3 -21 1 +15 0.7 0.6 29 -1 40 10 2 +15 0.7 0.5 41 -20 38 -26 2 +15 0.1 0.2 33 1 41 -16 1 +15 0.2 0.2 32 21 42 -8 1 +15 0.9 0.3 40 29 7 -29 1 +15 0.3 0.3 10 -47 39 37 2 +15 0.7 0.8 46 22 17 -22 1 +16 0.1 0.3 -23 -45 -1 -32 2 +16 0.7 0.1 41 8 4 -2 1 +16 0.6 0.7 44 -23 -11 -17 1 +16 0.3 0.8 45 17 27 25 1 +16 0.4 0.9 10 -7 29 16 2 +16 0.4 0.4 16 3 31 -30 1 +16 0.6 0.5 49 -12 40 29 2 +16 0.7 0.5 10 -9 -36 -43 1 +16 0.7 0.6 37 -47 29 -23 1 +16 0.8 0.6 32 -18 48 -40 1 +16 0.9 0.3 17 -26 45 31 2 +16 0.2 0.8 24 -5 -1 -19 1 +16 0.1 0.4 17 -18 -5 -19 2 +16 0.2 0.5 42 -31 23 -38 2 +16 0.5 0.7 27 -18 39 -8 2 +16 0.4 0.1 24 3 30 -30 1 +16 0.1 0.4 21 -10 29 5 2 +16 0.8 0.4 6 -42 50 22 2 +16 0.7 0.3 29 -32 14 -8 1 +16 0.8 0.3 38 36 26 -7 1 +16 0.2 0.2 12 -18 21 -8 2 +16 0.2 0.3 -33 -42 33 -4 2 +16 0.3 0.9 14 -33 36 11 2 +16 0.7 0.5 19 -15 -36 -44 1 +16 0.5 0.2 28 -47 28 -48 1 +16 0.5 0.8 24 -45 13 -8 2 +16 0.6 0.8 43 -24 35 -32 2 +16 0.9 0.7 12 -41 17 -14 1 +16 0.4 0.9 20 -8 4 -38 1 +16 0.9 0.4 44 10 28 -44 1 +16 0.2 0.4 2 -27 42 5 2 +16 0.9 0.5 2 -1 38 -30 1 +16 0.1 0.4 19 -3 -5 -23 1 +16 0.1 0.6 29 26 43 -7 1 +16 0.6 0.8 -25 -29 9 6 2 +16 0.2 0.6 26 -31 24 -22 2 +16 0.7 0.8 19 -37 32 -36 2 +16 0.7 0.7 19 -30 50 -34 2 +16 0.9 0.4 50 15 -26 -44 1 +16 0.9 0.7 1 -8 -24 -29 1 +16 0.8 0.7 4 -14 4 -12 1 +16 0.6 0.7 34 32 40 0 2 +16 0.5 0.2 -26 -50 -20 -46 1 +16 0.9 0.7 44 -47 3 -14 1 +16 0.9 0.9 33 12 32 -33 1 +16 0.4 0.3 50 -2 -17 -28 1 +16 0.9 0.1 22 -32 49 5 1 +16 0.9 0.4 29 18 -38 -39 1 +16 0.6 0.1 31 0 47 41 2 +16 0.5 0.9 23 -3 -4 -16 1 +16 0.4 0.2 34 17 35 -47 1 +16 0.3 0.3 35 -3 -7 -40 1 +16 0.5 0.6 4 -19 -28 -48 1 +16 0.6 0.7 11 10 6 -41 1 +16 0.8 0.3 14 -45 -19 -50 1 +16 0.1 0.9 27 -35 2 -43 2 +16 0.1 0.8 10 9 -13 -45 1 +16 0.5 0.4 -33 -45 45 -25 2 +16 0.1 0.5 -5 -31 -26 -35 2 +16 0.5 0.7 -1 -19 27 -7 2 +16 0.7 0.9 12 1 -37 -49 1 +16 0.7 0.2 38 34 4 -6 1 +16 0.6 0.1 22 -32 32 8 2 +16 0.4 0.4 31 -7 43 19 2 +16 0.3 0.5 25 -38 22 -26 2 +16 0.4 0.8 -3 -19 44 -2 2 +16 0.5 0.3 37 -23 18 4 2 +16 0.9 0.1 30 -12 9 -48 1 +16 0.6 0.9 4 -7 30 -25 2 +16 0.2 0.9 34 -46 9 -34 2 +16 0.5 0.2 20 -26 40 -12 2 +16 0.9 0.2 -29 -50 34 -33 2 +16 0.3 0.6 44 23 20 -30 1 +16 0.1 0.9 3 -15 20 -2 2 +16 0.4 0.4 -22 -42 -27 -38 2 +16 0.7 0.1 -39 -48 45 -2 2 +16 0.1 0.8 43 8 45 22 2 +16 0.5 0.4 -11 -43 -33 -48 1 +16 0.9 0.7 8 -8 24 -18 2 +16 0.1 0.4 0 -25 40 -21 2 +16 0.7 0.5 42 34 22 -45 1 +16 0.5 0.3 28 2 31 -16 1 +16 0.4 0.8 2 -4 36 24 2 +16 0.1 0.2 48 -38 47 27 2 +16 0.8 0.1 -1 -4 -32 -44 1 +16 0.4 0.8 -4 -44 39 -8 2 +16 0.2 0.9 28 -34 7 -43 2 +16 0.1 0.3 -13 -24 16 -34 2 +16 0.9 0.1 41 26 15 -31 1 +16 0.6 0.6 48 -33 -32 -33 1 +16 0.8 0.1 42 -40 22 -48 1 +16 0.4 0.9 -1 -19 49 46 2 +16 0.2 0.5 29 -49 3 -21 2 +16 0.8 0.8 43 11 -34 -35 1 +16 0.1 0.5 43 -6 44 42 2 +16 0.2 0.6 -28 -35 -38 -39 2 +16 0.4 0.1 -26 -38 21 -38 2 +16 0.6 0.3 14 2 32 -30 1 +16 0.1 0.1 25 7 -12 -32 1 +16 0.7 0.4 43 10 49 41 2 +16 0.1 0.8 42 -38 41 2 2 +16 0.2 0.8 -11 -23 -3 -8 2 +16 0.5 0.1 -15 -38 38 -45 2 +16 0.3 0.7 -27 -49 -8 -18 2 +16 0.2 0.4 18 -34 40 -37 2 +16 0.5 0.7 0 -18 41 16 2 +16 0.1 0.1 28 -50 40 24 2 +16 0.5 0.4 18 -4 -10 -16 1 +16 0.1 0.7 -1 -38 23 -17 2 +16 0.5 0.5 -10 -15 12 -31 2 +16 0.5 0.1 1 -11 50 -27 1 +16 0.2 0.6 48 25 41 8 1 +16 0.3 0.8 -35 -42 -2 -3 2 +16 0.4 0.4 42 18 42 37 2 +16 0.7 0.6 9 -19 -2 -20 1 +16 0.6 0.3 -29 -33 14 -8 2 +16 0.2 0.8 5 -2 44 17 2 +16 0.2 0.5 38 -41 -4 -19 2 +16 0.8 0.6 48 -14 -31 -41 1 +16 0.9 0.5 2 -7 46 19 2 +16 0.4 0.7 11 -14 37 -19 2 +16 0.3 0.8 45 -33 41 -28 2 +16 0.3 0.3 11 -39 6 -24 2 +16 0.9 0.8 47 -27 -5 -19 1 +16 0.7 0.8 30 -23 48 26 2 +16 0.4 0.3 46 -17 28 -19 1 +16 0.3 0.9 -22 -23 -14 -37 1 +16 0.7 0.9 50 -24 -1 -15 1 +16 0.4 0.9 -5 -50 50 -16 2 +16 0.6 0.7 26 -23 50 -48 2 +16 0.4 0.1 49 3 15 -39 1 +16 0.9 0.4 29 23 -13 -33 1 +16 0.2 0.6 33 -25 13 -44 2 +16 0.9 0.6 24 -25 27 -5 1 +16 0.6 0.1 28 -42 21 -35 1 +16 0.4 0.3 1 -6 9 -12 1 +16 0.3 0.2 -9 -33 42 33 2 +16 0.7 0.4 -4 -46 17 -5 2 +16 0.7 0.9 21 -12 32 26 2 +16 0.2 0.4 -31 -46 49 12 2 +16 0.2 0.6 20 -44 46 -28 2 +16 0.1 0.1 3 -49 -6 -35 2 +16 0.7 0.5 37 11 -21 -38 1 +16 0.3 0.8 36 -42 -9 -25 1 +16 0.7 0.2 16 -6 18 17 2 +16 0.7 0.8 15 -37 35 -12 2 +16 0.6 0.4 19 -11 48 46 2 +16 0.3 0.6 46 -39 19 -29 2 +16 0.9 0.3 -16 -21 -26 -31 1 +16 0.4 0.3 11 -21 -6 -41 1 +16 0.7 0.2 -2 -22 38 -37 1 +16 0.7 0.4 44 -20 21 -22 1 +16 0.3 0.1 28 27 41 -26 1 +16 0.9 0.8 31 -46 -23 -26 1 +16 0.1 0.7 -1 -34 14 -45 2 +16 0.6 0.6 6 -46 23 -8 2 +16 0.9 0.8 15 -20 23 -40 1 +16 0.2 0.5 -1 -10 34 29 2 +16 0.9 0.8 -10 -14 30 -40 2 +16 0.1 0.6 0 -5 10 2 2 +16 0.8 0.2 3 -28 -5 -45 1 +16 0.6 0.9 20 -17 36 -32 2 +16 0.5 0.6 20 -47 47 -41 2 +16 0.6 0.6 18 -23 40 -10 2 +16 0.3 0.6 7 -29 -6 -24 1 +16 0.5 0.7 42 -2 41 -20 1 +16 0.7 0.8 26 -30 18 -27 1 +16 0.8 0.5 -11 -30 -31 -42 1 +16 0.1 0.9 19 12 35 7 2 +16 0.2 0.8 25 -8 37 -2 2 +16 0.5 0.9 16 -29 32 -42 2 +16 0.1 0.2 26 -29 20 -27 2 +16 0.9 0.9 39 34 42 16 2 +16 0.8 0.6 -2 -16 38 -22 2 +16 0.1 0.1 -12 -49 39 17 2 +16 0.5 0.2 44 -22 34 33 2 +16 0.5 0.8 37 -9 4 -31 1 +16 0.8 0.1 13 -21 44 10 2 +16 0.6 0.5 42 -37 40 33 2 +16 0.7 0.5 10 -8 26 -7 2 +16 0.3 0.6 30 0 38 1 2 +16 0.5 0.5 9 8 19 17 2 +16 0.9 0.5 8 -36 49 -15 2 +16 0.5 0.6 -15 -16 46 24 2 +16 0.2 0.6 21 15 31 -48 1 +16 0.9 0.2 -44 -47 32 -43 2 +16 0.2 0.2 14 1 40 2 2 +16 0.8 0.5 35 28 32 -11 1 +16 0.5 0.5 -38 -46 -31 -44 2 +16 0.4 0.1 -16 -41 18 -1 2 +16 0.4 0.6 28 -31 -14 -48 1 +16 0.7 0.1 -3 -41 -4 -41 1 +16 0.8 0.9 14 -21 31 -15 2 +16 0.5 0.6 40 -29 48 -15 2 +16 0.8 0.2 41 24 34 -28 1 +16 0.1 0.2 26 3 -9 -15 1 +16 0.1 0.8 18 -24 -3 -27 2 +16 0.9 0.8 33 -21 44 41 2 +16 0.8 0.2 45 -17 29 -16 1 +16 0.2 0.2 35 32 50 -18 1 +16 0.8 0.1 48 -28 36 -32 1 +16 0.8 0.2 35 -9 5 -25 1 +16 0.4 0.6 21 -5 15 -50 1 +16 0.5 0.7 50 16 37 -49 1 +16 0.8 0.9 -7 -24 18 -42 2 +16 0.8 0.8 45 -23 32 24 1 +16 0.3 0.7 -5 -44 45 42 2 +16 0.3 0.9 -35 -49 37 -2 2 +16 0.5 0.4 25 -21 26 -46 1 +16 0.7 0.3 20 -46 38 26 2 +16 0.5 0.8 16 -3 46 -13 2 +16 0.6 0.6 17 -22 48 18 2 +16 0.4 0.6 26 13 -36 -50 1 +16 0.4 0.6 40 23 41 -20 1 +16 0.2 0.6 -24 -50 -17 -23 2 +16 0.1 0.6 25 12 23 -37 1 +16 0.4 0.2 22 -22 49 -47 1 +16 0.6 0.2 -6 -42 -11 -32 1 +16 0.3 0.5 40 0 -5 -23 1 +16 0.3 0.7 16 5 24 -6 2 +16 0.7 0.7 36 21 46 -33 1 +16 0.9 0.3 12 -45 43 13 2 +16 0.7 0.2 -3 -27 32 0 2 +16 0.2 0.5 4 -23 8 0 2 +16 0.9 0.3 47 -18 48 7 1 +16 0.8 0.1 22 -23 30 -1 1 +16 0.4 0.2 20 -34 30 -40 1 +16 0.4 0.3 49 -27 -38 -46 1 +16 0.7 0.7 44 14 -7 -26 1 +16 0.3 0.6 50 -3 21 20 2 +16 0.4 0.9 8 -5 -35 -46 1 +16 0.4 0.6 24 -45 -18 -29 1 +16 0.9 0.1 21 -14 20 3 1 +16 0.9 0.6 9 -15 -16 -27 1 +16 0.6 0.6 29 -44 40 -17 2 +16 0.2 0.8 4 -16 9 -8 2 +16 0.5 0.5 40 -19 37 -28 1 +16 0.2 0.3 41 -23 12 -21 1 +16 0.5 0.3 27 -8 9 -11 1 +16 0.9 0.8 27 -10 22 -37 1 +16 0.4 0.6 29 -43 17 5 2 +16 0.7 0.8 4 -35 42 22 2 +16 0.4 0.3 45 14 -11 -16 1 +16 0.2 0.7 11 -37 7 5 2 +16 0.7 0.3 17 -26 34 -9 2 +16 0.9 0.5 42 26 38 6 1 +16 0.2 0.1 8 -30 17 -24 1 +16 0.5 0.5 36 0 28 -41 1 +16 0.6 0.7 15 -23 32 2 2 +16 0.5 0.7 0 -42 -23 -44 1 +16 0.5 0.1 -11 -38 -24 -36 1 +16 0.4 0.9 42 33 -2 -28 1 +16 0.3 0.7 49 40 34 31 1 +16 0.8 0.1 45 -23 -37 -48 1 +16 0.5 0.6 7 5 11 -26 1 +16 0.6 0.8 -42 -47 21 9 2 +16 0.2 0.9 49 -23 -27 -31 1 +16 0.5 0.8 32 4 21 -15 1 +16 0.7 0.4 38 -30 3 2 1 +16 0.9 0.5 44 9 37 13 1 +16 0.4 0.4 50 -11 40 5 2 +16 0.3 0.9 25 -22 30 24 2 +16 0.3 0.1 -1 -9 -30 -38 1 +16 0.9 0.7 18 -21 5 -18 1 +16 0.7 0.8 20 12 5 -25 1 +16 0.5 0.2 15 -37 8 -34 1 +16 0.6 0.3 41 9 47 -10 1 +16 0.1 0.2 22 -5 42 -25 1 +16 0.9 0.7 8 -7 10 -37 1 +16 0.3 0.3 -32 -43 43 -7 2 +16 0.7 0.4 30 -7 24 -40 1 +16 0.4 0.6 44 -43 -37 -40 1 +16 0.9 0.7 48 18 29 -7 1 +16 0.3 0.5 38 2 10 4 1 +16 0.5 0.8 13 -45 4 -9 2 +16 0.1 0.6 43 -33 23 16 2 +16 0.9 0.3 15 -23 22 15 2 +16 0.1 0.7 34 -48 -16 -20 2 +16 0.5 0.6 -6 -14 48 -35 2 +16 0.1 0.2 -36 -49 -2 -29 2 +16 0.2 0.9 9 -2 -8 -19 1 +16 0.7 0.1 8 -40 23 -30 1 +16 0.9 0.9 49 -44 32 -48 1 +16 0.9 0.8 -24 -32 12 -24 2 +16 0.9 0.4 29 17 28 -44 1 +16 0.5 0.6 1 -50 48 40 2 +16 0.2 0.6 27 -36 43 14 2 +16 0.3 0.7 -28 -46 42 12 2 +16 0.4 0.2 -15 -37 39 22 2 +16 0.1 0.8 17 -24 -39 -50 1 +16 0.1 0.6 38 -16 42 -49 2 +16 0.3 0.8 39 24 -1 -43 1 +16 0.9 0.6 29 -13 32 -19 1 +16 0.4 0.4 37 -9 8 -23 1 +16 0.7 0.5 38 -17 12 8 1 +16 0.8 0.4 8 2 -21 -36 1 +16 0.8 0.5 9 8 -7 -42 1 +16 0.1 0.1 -1 -49 -42 -46 1 +16 0.7 0.5 47 39 2 -1 1 +16 0.6 0.6 48 17 28 -48 1 +17 0.6 0.3 -6 -13 46 -19 2 +17 0.4 0.7 32 -2 -15 -40 1 +17 0.3 0.9 33 -24 44 41 2 +17 0.4 0.1 13 -42 -11 -24 1 +17 0.5 0.6 25 -18 38 3 2 +17 0.7 0.5 45 -1 31 -35 1 +17 0.3 0.2 24 -19 -14 -21 1 +17 0.1 0.4 -27 -44 -6 -50 2 +17 0.4 0.6 30 -13 17 -48 1 +17 0.5 0.1 35 28 1 -5 1 +17 0.2 0.2 18 -16 16 -15 1 +17 0.8 0.2 12 -24 -11 -41 1 +17 0.5 0.9 38 -41 27 11 2 +17 0.3 0.3 23 -3 -26 -43 1 +17 0.3 0.8 18 -46 41 22 2 +17 0.4 0.5 -26 -33 18 14 2 +17 0.7 0.7 -9 -23 38 -14 2 +17 0.7 0.7 34 32 16 -7 1 +17 0.4 0.2 17 -44 7 -26 1 +17 0.4 0.6 -5 -25 -33 -46 1 +17 0.1 0.8 47 32 39 -28 1 +17 0.9 0.5 30 -36 20 -9 1 +17 0.3 0.6 2 -4 33 15 2 +17 0.8 0.2 -27 -44 38 18 2 +17 0.2 0.4 3 -47 50 -31 2 +17 0.2 0.2 -20 -41 11 -2 2 +17 0.3 0.3 24 -18 39 -18 2 +17 0.4 0.1 -29 -47 47 31 2 +17 0.7 0.6 -12 -13 18 -44 2 +17 0.9 0.2 33 -38 49 41 2 +17 0.7 0.5 -3 -8 -33 -42 1 +17 0.6 0.3 47 -34 15 -1 1 +17 0.5 0.9 -22 -36 38 -23 2 +17 0.6 0.2 13 7 -16 -17 1 +17 0.4 0.7 -27 -29 16 -4 2 +17 0.2 0.8 49 -7 -3 -21 1 +17 0.5 0.6 3 -31 10 -33 2 +17 0.3 0.4 -23 -38 49 37 2 +17 0.2 0.6 45 41 24 -28 1 +17 0.8 0.3 45 36 25 -25 1 +17 0.8 0.1 35 11 -7 -13 1 +17 0.3 0.7 -12 -39 11 -7 2 +17 0.9 0.3 45 14 -17 -47 1 +17 0.4 0.4 3 -2 -5 -14 1 +17 0.2 0.3 19 -28 5 -28 2 +17 0.2 0.2 42 -16 47 33 2 +17 0.7 0.7 -34 -42 37 -24 2 +17 0.4 0.1 28 -5 6 -20 1 +17 0.4 0.8 -21 -23 19 -16 2 +17 0.1 0.8 11 -27 27 -14 2 +17 0.8 0.5 49 1 5 2 1 +17 0.9 0.9 -20 -36 44 25 2 +17 0.8 0.2 -4 -41 39 -48 2 +17 0.2 0.1 42 33 29 -29 1 +17 0.9 0.8 46 13 -5 -35 1 +17 0.8 0.8 18 -17 47 -11 2 +17 0.4 0.4 42 16 45 30 1 +17 0.3 0.6 42 30 15 -46 1 +17 0.3 0.2 47 41 13 -11 1 +17 0.2 0.4 43 25 46 16 2 +17 0.1 0.5 0 -45 0 -46 2 +17 0.8 0.2 49 -31 48 -4 1 +17 0.3 0.3 -9 -20 41 26 2 +17 0.2 0.1 30 -28 -9 -42 1 +17 0.2 0.2 -14 -17 46 -37 2 +17 0.6 0.2 37 22 48 16 1 +17 0.9 0.7 17 -12 -10 -22 1 +17 0.4 0.8 31 10 30 15 2 +17 0.8 0.7 -6 -32 44 3 2 +17 0.1 0.3 34 33 49 -2 1 +17 0.6 0.3 -42 -43 40 4 2 +17 0.2 0.6 27 -1 32 16 2 +17 0.7 0.1 -2 -17 -23 -28 1 +17 0.9 0.7 -45 -50 32 -35 2 +17 0.1 0.8 31 -36 42 -15 2 +17 0.4 0.9 34 -48 45 -22 2 +17 0.1 0.6 27 -6 15 -27 2 +17 0.2 0.3 12 0 27 0 2 +17 0.4 0.3 33 28 45 -43 1 +17 0.7 0.9 11 -50 30 26 2 +17 0.8 0.6 -9 -38 23 -30 2 +17 0.3 0.3 22 -38 19 -45 1 +17 0.9 0.5 43 11 29 -2 1 +17 0.9 0.2 -46 -49 -37 -49 1 +17 0.9 0.5 -1 -38 23 -11 2 +17 0.3 0.8 14 2 44 22 2 +17 0.8 0.4 24 -1 40 -36 1 +17 0.1 0.6 4 -13 18 -22 2 +17 0.1 0.5 22 15 49 44 2 +17 0.4 0.4 17 12 13 -28 1 +17 0.8 0.8 36 -26 19 -5 1 +17 0.6 0.8 12 -7 13 -14 2 +17 0.3 0.8 8 -26 -2 -32 2 +17 0.4 0.6 9 -45 10 -25 2 +17 0.2 0.2 32 -14 -44 -47 1 +17 0.4 0.1 -22 -38 -15 -39 1 +17 0.3 0.7 -40 -47 43 33 2 +17 0.5 0.5 6 -38 -9 -37 1 +17 0.4 0.2 -45 -48 -4 -26 2 +17 0.8 0.4 8 -50 40 -36 1 +17 0.5 0.2 15 -36 41 -3 2 +17 0.5 0.8 38 -33 35 -41 2 +17 0.3 0.1 -17 -35 -16 -46 1 +17 0.8 0.2 22 -17 -1 -31 1 +17 0.9 0.1 -14 -42 37 -22 2 +17 0.6 0.8 -25 -27 41 -9 2 +17 0.5 0.1 -6 -7 35 1 2 +17 0.2 0.4 19 -30 50 -32 2 +17 0.3 0.6 -7 -34 -5 -13 2 +17 0.9 0.5 35 14 33 0 1 +17 0.7 0.3 48 -35 32 24 2 +17 0.1 0.5 21 -4 8 -35 1 +17 0.7 0.6 39 -40 21 -30 1 +17 0.7 0.4 -40 -49 12 -8 2 +17 0.3 0.9 48 16 39 -39 2 +17 0.9 0.5 36 -42 42 6 1 +17 0.8 0.1 14 -10 42 37 2 +17 0.4 0.7 17 -31 42 -9 2 +17 0.8 0.1 43 8 49 -37 1 +17 0.1 0.2 30 -44 33 -5 2 +17 0.8 0.1 21 -49 9 -23 1 +17 0.3 0.6 38 29 50 36 2 +17 0.2 0.4 10 -15 43 13 2 +17 0.1 0.8 15 -9 33 17 2 +17 0.2 0.1 39 -32 42 37 2 +17 0.6 0.1 45 -47 26 -30 1 +17 0.1 0.9 36 10 38 11 2 +17 0.8 0.2 7 -35 38 -20 2 +17 0.1 0.8 27 -19 -16 -20 2 +17 0.2 0.9 0 -9 33 -32 2 +17 0.1 0.9 27 -50 9 -21 2 +17 0.1 0.7 -31 -47 41 -31 2 +17 0.5 0.1 -1 -30 49 -27 2 +17 0.9 0.5 -9 -43 -24 -47 1 +17 0.5 0.2 -8 -35 36 8 2 +17 0.1 0.4 20 -40 -12 -27 1 +17 0.6 0.9 34 15 49 -30 2 +17 0.6 0.9 -4 -6 48 14 2 +17 0.9 0.4 26 -20 -33 -39 1 +17 0.2 0.3 40 -43 -20 -47 1 +17 0.5 0.6 -15 -22 43 30 2 +17 0.8 0.2 19 2 -21 -26 1 +17 0.7 0.9 33 29 24 -35 1 +17 0.5 0.1 48 37 19 -36 1 +17 0.2 0.2 35 -24 -2 -27 1 +17 0.1 0.1 -19 -32 33 8 2 +17 0.4 0.1 -12 -36 21 18 2 +17 0.9 0.7 -13 -25 33 -27 2 +17 0.2 0.1 40 -19 -20 -26 1 +17 0.2 0.9 23 -8 -8 -13 1 +17 0.8 0.7 24 7 -31 -45 1 +17 0.8 0.1 15 -28 -23 -49 1 +17 0.1 0.5 28 -22 3 -22 2 +17 0.8 0.4 22 -40 43 -4 2 +17 0.7 0.2 28 22 43 4 1 +17 0.3 0.9 36 -12 19 -22 2 +17 0.7 0.7 28 -47 -9 -11 1 +17 0.2 0.1 45 15 5 -36 1 +17 0.2 0.1 -32 -41 38 -39 2 +17 0.8 0.9 34 -13 -29 -47 1 +17 0.7 0.7 31 -28 45 -2 2 +17 0.8 0.2 -3 -13 49 -21 2 +17 0.3 0.3 38 9 45 -30 1 +17 0.5 0.3 -24 -44 38 -12 2 +17 0.8 0.8 23 -29 48 -9 2 +17 0.7 0.4 -4 -34 3 -8 1 +17 0.1 0.1 5 -42 30 -44 2 +17 0.5 0.8 -34 -43 5 -32 2 +17 0.9 0.9 31 -37 -36 -38 1 +17 0.9 0.7 -11 -38 33 -41 2 +17 0.2 0.7 31 -20 3 -24 1 +17 0.2 0.5 -26 -41 14 9 2 +17 0.8 0.8 -43 -49 21 -10 2 +17 0.6 0.7 15 11 24 5 2 +17 0.8 0.4 39 -5 27 -19 1 +17 0.8 0.3 22 -31 49 -8 1 +17 0.8 0.8 18 7 33 6 2 +17 0.4 0.8 44 38 30 -26 1 +17 0.6 0.2 41 36 50 0 1 +17 0.9 0.2 37 -27 47 -7 1 +17 0.9 0.9 35 -41 -13 -44 1 +17 0.8 0.8 3 -17 21 -22 1 +17 0.3 0.4 -4 -47 -9 -21 2 +17 0.9 0.3 23 -15 0 -19 1 +17 0.1 0.7 27 -4 -23 -48 1 +17 0.3 0.8 46 -44 45 -16 2 +17 0.2 0.5 -22 -33 37 -45 2 +17 0.7 0.6 -14 -15 44 -49 2 +17 0.2 0.4 23 -10 25 -41 2 +17 0.9 0.1 -4 -20 12 6 2 +17 0.7 0.4 28 -6 36 18 2 +17 0.2 0.1 20 -22 48 -48 1 +17 0.8 0.8 17 -26 30 15 2 +17 0.8 0.9 37 -16 -10 -19 1 +17 0.4 0.5 -9 -40 21 -6 2 +17 0.1 0.9 41 6 48 35 2 +17 0.9 0.1 -12 -40 17 4 2 +17 0.7 0.8 34 -15 30 22 2 +17 0.8 0.4 25 10 4 -35 1 +17 0.8 0.1 9 -49 -16 -32 1 +17 0.7 0.7 10 -48 45 -13 2 +17 0.8 0.8 26 -20 8 -34 1 +17 0.8 0.1 -8 -20 2 -17 1 +17 0.3 0.5 -12 -50 45 -27 2 +17 0.7 0.8 -36 -37 -40 -46 1 +17 0.1 0.5 -14 -50 37 33 2 +17 0.2 0.7 5 -46 18 -4 2 +17 0.6 0.5 32 -45 26 24 2 +17 0.2 0.8 -13 -25 36 -36 2 +17 0.6 0.8 -20 -39 32 -19 2 +17 0.2 0.2 41 -34 -35 -46 1 +17 0.6 0.5 7 -35 16 13 2 +17 0.2 0.3 47 32 22 -35 1 +17 0.1 0.3 9 -43 36 -14 2 +17 0.9 0.4 25 -18 48 -5 1 +17 0.6 0.3 3 1 -8 -11 1 +17 0.9 0.1 28 3 -17 -45 1 +17 0.4 0.7 22 -4 -35 -38 1 +17 0.7 0.6 45 -2 -4 -49 1 +17 0.3 0.1 17 -20 49 31 2 +17 0.1 0.6 16 12 16 -15 1 +17 0.4 0.2 -2 -36 18 -30 2 +17 0.7 0.5 11 -29 19 -14 2 +17 0.1 0.4 -26 -33 -11 -50 2 +17 0.1 0.8 -35 -47 33 -36 2 +17 0.7 0.3 49 -45 27 -35 1 +17 0.9 0.6 40 -38 36 -15 1 +17 0.1 0.4 32 -16 -18 -22 1 +17 0.1 0.3 27 -23 47 30 2 +17 0.4 0.8 42 12 34 8 2 +17 0.4 0.1 10 -28 -9 -23 1 +17 0.5 0.4 -9 -48 -20 -39 2 +17 0.4 0.4 -20 -50 37 -14 2 +17 0.7 0.5 18 4 15 -15 1 +17 0.4 0.7 -25 -42 34 12 2 +17 0.9 0.7 48 18 22 9 1 +17 0.7 0.7 44 36 1 -34 1 +17 0.1 0.5 5 -17 45 -40 2 +17 0.5 0.3 22 -38 12 4 2 +17 0.1 0.8 39 -19 3 -48 2 +17 0.3 0.5 47 -49 -17 -50 1 +17 0.9 0.1 -20 -38 -2 -16 2 +17 0.2 0.1 42 4 26 -26 1 +17 0.9 0.7 50 -24 19 -38 1 +17 0.9 0.2 49 13 -8 -46 1 +17 0.3 0.1 45 -13 -35 -38 1 +17 0.3 0.4 -5 -30 13 -35 2 +17 0.9 0.4 27 -31 43 36 2 +17 0.4 0.5 33 -22 -29 -33 1 +17 0.6 0.2 40 -40 8 -12 1 +17 0.1 0.3 43 -22 0 -39 1 +17 0.1 0.5 13 -40 18 -34 2 +17 0.5 0.8 -1 -26 41 -46 2 +17 0.5 0.9 -2 -49 25 5 2 +17 0.4 0.8 -32 -36 40 -3 2 +17 0.2 0.8 32 -17 37 3 2 +17 0.7 0.4 18 16 44 -50 1 +17 0.5 0.2 -5 -36 -20 -27 1 +17 0.8 0.6 -12 -17 20 -30 2 +17 0.5 0.1 -36 -41 8 7 2 +17 0.3 0.1 43 -50 13 10 2 +17 0.4 0.5 -16 -39 34 13 2 +17 0.9 0.8 37 8 28 8 2 +17 0.3 0.9 1 -8 8 -13 2 +17 0.7 0.8 4 -31 5 -16 2 +17 0.8 0.9 -17 -36 35 -22 2 +17 0.7 0.8 -23 -25 6 -28 2 +17 0.5 0.2 43 -34 26 -2 2 +17 0.7 0.8 14 -32 17 2 2 +17 0.7 0.9 8 -33 4 -32 2 +17 0.9 0.5 23 -43 -1 -6 1 +17 0.9 0.7 -10 -50 32 3 2 +17 0.7 0.3 6 -30 -12 -19 1 +17 0.2 0.6 48 -4 30 -15 2 +17 0.6 0.1 5 -8 37 14 2 +17 0.7 0.8 17 -4 25 -49 2 +17 0.9 0.4 -10 -47 -5 -46 1 +17 0.6 0.5 45 -4 -26 -38 1 +17 0.3 0.8 -13 -44 31 6 2 +17 0.4 0.9 4 2 -10 -35 1 +17 0.5 0.9 42 16 44 27 2 +17 0.4 0.1 50 0 -27 -38 1 +17 0.7 0.1 37 26 9 -33 1 +17 0.2 0.9 47 -14 36 35 2 +17 0.7 0.3 37 15 47 -32 1 +17 0.1 0.7 -6 -19 36 27 2 +17 0.9 0.1 46 17 2 -47 1 +17 0.4 0.6 8 -12 -28 -45 1 +17 0.4 0.6 8 -44 38 -24 2 +17 0.3 0.7 19 -5 -18 -29 1 +17 0.2 0.8 22 -9 23 -20 2 +17 0.1 0.7 21 -13 23 0 2 +17 0.2 0.4 -6 -8 34 -41 2 +17 0.6 0.6 10 -32 45 28 2 +17 0.1 0.7 39 23 49 16 2 +17 0.1 0.7 29 7 30 -27 2 +17 0.8 0.3 6 -46 -8 -29 1 +17 0.2 0.7 46 -44 28 -26 2 +17 0.7 0.6 19 -20 13 -49 1 +17 0.3 0.2 28 21 -22 -48 1 +18 0.5 0.5 21 -3 35 -17 2 +18 0.4 0.7 9 -38 4 -42 2 +18 0.3 0.4 -13 -42 35 13 2 +18 0.8 0.8 30 -41 32 -47 2 +18 0.1 0.8 34 -4 -20 -35 1 +18 0.4 0.8 14 -25 8 -43 2 +18 0.5 0.6 42 27 21 -38 1 +18 0.5 0.2 -8 -41 49 -17 2 +18 0.7 0.4 45 4 -11 -16 1 +18 0.5 0.7 1 -2 -7 -41 1 +18 0.6 0.3 -1 -35 10 1 2 +18 0.4 0.2 0 -12 -21 -31 2 +18 0.2 0.7 0 -21 -7 -12 2 +18 0.7 0.3 27 -16 -15 -19 1 +18 0.2 0.3 9 -10 25 -22 2 +18 0.5 0.9 -21 -29 48 11 2 +18 0.2 0.1 38 -13 -23 -46 1 +18 0.3 0.5 23 9 15 -47 1 +18 0.9 0.8 9 -1 39 4 2 +18 0.1 0.4 17 9 31 -28 1 +18 0.4 0.6 3 -15 27 -23 2 +18 0.3 0.6 21 -6 48 -10 2 +18 0.1 0.8 46 14 42 19 2 +18 0.6 0.4 47 -19 46 -34 1 +18 0.1 0.3 -32 -50 -9 -11 2 +18 0.2 0.2 9 -38 -32 -33 1 +18 0.9 0.4 -10 -23 25 -40 2 +18 0.4 0.1 -24 -31 -33 -39 2 +18 0.6 0.8 9 -41 -8 -50 1 +18 0.2 0.5 50 38 22 -24 1 +18 0.1 0.3 -10 -44 22 19 2 +18 0.9 0.4 35 17 28 13 1 +18 0.4 0.8 22 5 30 11 2 +18 0.1 0.7 36 9 8 4 1 +18 0.3 0.5 -7 -14 11 0 2 +18 0.9 0.7 33 -46 21 -32 1 +18 0.8 0.2 -16 -24 30 0 2 +18 0.8 0.4 50 8 19 -8 1 +18 0.5 0.7 21 -10 49 10 2 +18 0.6 0.7 48 14 49 9 1 +18 0.3 0.9 -30 -47 -4 -39 2 +18 0.3 0.4 -22 -28 35 -28 2 +18 0.1 0.4 27 11 -25 -29 1 +18 0.9 0.1 20 17 35 6 1 +18 0.5 0.4 44 -16 -32 -46 1 +18 0.7 0.6 35 -17 -30 -33 1 +18 0.3 0.6 36 24 25 -34 1 +18 0.4 0.9 50 -26 8 -17 2 +18 0.9 0.2 20 11 30 -41 1 +18 0.5 0.6 -14 -49 22 -42 2 +18 0.9 0.9 36 -34 21 -23 1 +18 0.8 0.7 29 -3 23 17 2 +18 0.5 0.8 35 14 -37 -49 1 +18 0.2 0.6 35 -1 44 -40 2 +18 0.9 0.8 47 37 -28 -42 1 +18 0.6 0.5 -40 -43 39 -27 2 +18 0.9 0.1 26 8 44 12 1 +18 0.3 0.7 39 -17 -2 -27 1 +18 0.7 0.6 -30 -40 39 -28 2 +18 0.2 0.9 32 -17 47 -43 2 +18 0.7 0.7 17 -47 21 -14 2 +18 0.6 0.7 -34 -40 47 -15 2 +18 0.9 0.1 20 16 -17 -32 1 +18 0.2 0.4 47 39 47 25 1 +18 0.5 0.4 9 -50 10 -31 1 +18 0.2 0.8 17 1 45 10 2 +18 0.4 0.5 -28 -33 33 -19 2 +18 0.9 0.8 19 5 17 -7 1 +18 0.6 0.5 28 25 49 -2 1 +18 0.4 0.1 33 29 -10 -46 1 +18 0.4 0.9 -15 -31 44 26 2 +18 0.1 0.1 -26 -28 11 -40 1 +18 0.8 0.3 41 -33 41 8 2 +18 0.4 0.7 1 -35 13 -46 2 +18 0.8 0.2 31 -36 19 -23 1 +18 0.4 0.9 24 -30 21 -45 2 +18 0.2 0.1 41 -26 7 -46 1 +18 0.9 0.9 -34 -39 29 -46 2 +18 0.9 0.9 12 -39 13 -32 1 +18 0.4 0.5 39 -18 5 -34 1 +18 0.2 0.2 8 -27 -38 -42 1 +18 0.6 0.3 36 -16 12 -38 1 +18 0.2 0.5 10 -48 24 20 2 +18 0.7 0.2 34 28 49 39 2 +18 0.3 0.5 44 -47 -20 -47 1 +18 0.3 0.5 21 3 16 -6 1 +18 0.6 0.1 21 19 -18 -47 1 +18 0.7 0.7 47 -23 29 -20 2 +18 0.4 0.6 25 18 16 -40 1 +18 0.6 0.4 -39 -50 -11 -13 2 +18 0.4 0.9 28 -35 12 -49 2 +18 0.1 0.4 14 -40 32 -28 2 +18 0.5 0.6 35 -17 36 -40 1 +18 0.6 0.6 31 15 28 24 1 +18 0.5 0.2 43 -21 -19 -36 1 +18 0.7 0.9 30 14 29 -41 1 +18 0.8 0.3 21 -42 48 35 2 +18 0.9 0.5 38 32 32 -31 1 +18 0.9 0.6 47 -34 35 20 1 +18 0.4 0.2 -25 -48 -40 -47 1 +18 0.2 0.5 -24 -28 46 44 2 +18 0.4 0.6 46 -35 18 -26 2 +18 0.7 0.4 11 -20 30 27 2 +18 0.4 0.3 37 -33 -3 -36 1 +18 0.7 0.2 -18 -31 40 -1 2 +18 0.9 0.4 17 -40 -26 -43 1 +18 0.1 0.6 12 4 -5 -35 1 +18 0.1 0.8 35 24 42 -3 2 +18 0.5 0.9 19 -2 5 -29 2 +18 0.8 0.9 42 -19 23 9 1 +18 0.3 0.9 37 -15 -6 -11 1 +18 0.4 0.5 46 36 -17 -40 1 +18 0.1 0.9 39 -23 38 -50 2 +18 0.3 0.3 24 18 -1 -19 1 +18 0.5 0.3 22 10 6 -50 1 +18 0.7 0.8 14 -41 25 -30 2 +18 0.8 0.2 47 42 41 29 1 +18 0.3 0.1 17 -2 47 -5 1 +18 0.6 0.9 50 37 39 -29 1 +18 0.9 0.1 14 11 6 -38 1 +18 0.8 0.3 29 -20 15 5 1 +18 0.6 0.1 -7 -33 47 -27 2 +18 0.4 0.6 33 -24 40 -6 2 +18 0.7 0.3 -35 -40 48 -36 2 +18 0.1 0.5 32 16 24 -20 1 +18 0.7 0.7 47 -47 32 30 2 +18 0.8 0.9 49 22 38 -48 1 +18 0.2 0.3 38 -27 6 -24 2 +18 0.5 0.3 21 -25 41 -16 1 +18 0.6 0.9 26 -10 32 -48 2 +18 0.7 0.5 -16 -21 45 39 2 +18 0.2 0.3 2 -30 29 -8 2 +18 0.5 0.3 16 5 40 6 2 +18 0.6 0.3 19 -18 21 -29 1 +18 0.8 0.9 48 -1 13 8 1 +18 0.9 0.3 19 18 -5 -46 1 +18 0.1 0.7 30 -15 19 -8 2 +18 0.3 0.5 -28 -50 -16 -47 1 +18 0.1 0.1 50 -44 35 12 2 +18 0.1 0.8 20 -43 35 26 2 +18 0.3 0.4 33 -37 23 -25 2 +18 0.7 0.5 8 2 -8 -37 1 +18 0.3 0.2 3 -15 -10 -39 1 +18 0.4 0.6 -15 -50 19 -13 2 +18 0.3 0.3 18 12 43 21 2 +18 0.7 0.5 40 38 11 -7 1 +18 0.9 0.4 16 -34 -16 -36 1 +18 0.2 0.8 44 -32 -38 -48 1 +18 0.7 0.5 50 26 42 39 2 +18 0.4 0.5 37 -18 45 -4 2 +18 0.3 0.8 22 -40 17 -12 2 +18 0.5 0.3 5 -11 36 -21 2 +18 0.5 0.3 2 -42 38 23 2 +18 0.7 0.8 -11 -39 33 4 2 +18 0.4 0.4 39 11 -28 -37 1 +18 0.5 0.1 -12 -36 -41 -49 1 +18 0.8 0.3 -5 -13 -29 -32 1 +18 0.2 0.8 12 4 42 0 2 +18 0.8 0.1 3 -14 6 -19 1 +18 0.1 0.4 -8 -40 -22 -48 2 +18 0.3 0.4 36 -27 20 -46 1 +18 0.3 0.2 41 6 42 39 2 +18 0.2 0.4 19 -7 9 -27 1 +18 0.9 0.6 31 5 16 -46 1 +18 0.3 0.5 -9 -18 10 -38 2 +18 0.8 0.9 34 -4 34 11 2 +18 0.3 0.3 38 14 14 -48 1 +18 0.3 0.7 28 3 25 7 2 +18 0.2 0.8 11 -17 12 -27 2 +18 0.1 0.2 1 -32 -22 -24 2 +18 0.4 0.2 -5 -30 19 -37 1 +18 0.9 0.8 -20 -22 -23 -25 1 +18 0.5 0.1 8 -42 19 -25 1 +18 0.1 0.7 41 31 13 -42 1 +18 0.5 0.5 -6 -32 -17 -39 1 +18 0.4 0.7 36 -45 44 33 2 +18 0.6 0.6 19 -41 -11 -12 1 +18 0.4 0.5 8 -50 24 -8 2 +18 0.5 0.8 5 -12 5 -9 2 +18 0.5 0.4 48 -5 26 -23 1 +18 0.3 0.9 19 12 10 -30 1 +18 0.6 0.8 12 -27 45 42 2 +18 0.4 0.3 46 5 39 -20 1 +18 0.2 0.7 33 25 31 27 1 +18 0.5 0.5 17 -7 3 -5 2 +18 0.9 0.8 13 -36 43 14 2 +18 0.6 0.7 20 -8 47 -34 2 +18 0.6 0.3 -5 -16 -23 -38 1 +18 0.2 0.7 25 -29 29 18 2 +18 0.2 0.7 -20 -45 44 -40 2 +18 0.9 0.2 50 -38 21 -46 1 +18 0.2 0.2 3 -27 35 7 2 +18 0.8 0.1 43 -34 46 13 2 +18 0.1 0.4 27 -18 -33 -49 1 +18 0.7 0.5 21 -4 41 -2 2 +18 0.2 0.1 46 -6 42 -16 1 +18 0.7 0.6 27 12 0 -21 1 +18 0.3 0.9 38 -47 41 -48 2 +18 0.3 0.4 50 21 45 18 2 +18 0.4 0.6 -17 -49 -9 -44 2 +18 0.5 0.4 37 -12 30 -28 1 +18 0.1 0.3 35 -10 -9 -28 1 +18 0.7 0.5 5 -27 -25 -38 1 +18 0.7 0.8 48 -11 27 -11 1 +18 0.7 0.7 20 -7 41 -22 2 +18 0.3 0.6 9 -38 -14 -16 2 +18 0.6 0.8 12 -25 -11 -46 1 +18 0.1 0.4 38 -4 25 -20 2 +18 0.5 0.7 34 18 -12 -34 1 +18 0.1 0.1 -8 -21 40 -36 2 +18 0.5 0.8 21 7 18 4 2 +18 0.9 0.7 35 22 21 -21 1 +18 0.6 0.2 49 33 35 -8 1 +18 0.5 0.4 18 -7 35 -2 2 +18 0.8 0.3 36 -41 41 -40 1 +18 0.2 0.2 5 -47 -8 -9 2 +18 0.3 0.7 -20 -46 31 19 2 +18 0.1 0.7 -14 -50 49 37 2 +18 0.6 0.6 9 -41 50 16 2 +18 0.7 0.6 7 -31 49 -19 2 +18 0.8 0.2 -18 -29 20 -27 2 +18 0.1 0.3 16 -4 -8 -41 1 +18 0.2 0.2 45 -29 43 19 2 +18 0.4 0.3 6 -22 50 7 2 +18 0.8 0.1 -33 -44 -7 -23 2 +18 0.3 0.2 49 14 -33 -47 1 +18 0.1 0.1 0 -30 32 27 2 +18 0.4 0.6 36 -45 38 -32 1 +18 0.4 0.9 42 25 50 -34 2 +18 0.3 0.2 43 5 20 -45 1 +18 0.1 0.2 -8 -30 45 27 2 +18 0.5 0.4 -10 -27 25 -22 2 +18 0.2 0.6 46 41 34 -29 1 +18 0.6 0.9 13 4 -12 -27 1 +18 0.8 0.4 6 -8 -29 -48 1 +18 0.3 0.9 26 -45 -34 -35 1 +18 0.8 0.4 48 -10 -26 -48 1 +18 0.6 0.1 1 -8 5 -12 2 +18 0.8 0.1 -47 -48 43 37 2 +18 0.9 0.7 27 -17 38 3 1 +18 0.5 0.2 44 8 40 -42 1 +18 0.1 0.7 30 28 33 26 2 +18 0.7 0.9 -5 -12 -4 -28 1 +18 0.3 0.3 43 10 11 -5 1 +18 0.4 0.9 41 -33 25 -48 2 +18 0.2 0.9 32 13 21 -31 1 +18 0.6 0.4 -28 -39 27 16 2 +18 0.5 0.3 -4 -38 40 12 2 +18 0.2 0.5 -3 -31 5 -39 2 +18 0.6 0.2 -18 -32 17 -11 2 +18 0.5 0.1 6 -43 18 -47 1 +18 0.6 0.5 6 -41 35 16 2 +18 0.3 0.4 24 -24 10 -6 2 +18 0.8 0.8 -14 -27 7 -14 2 +18 0.2 0.7 30 -47 5 -13 2 +18 0.8 0.5 -25 -49 23 -9 2 +18 0.2 0.1 39 18 22 -38 1 +18 0.2 0.7 -14 -31 37 -39 2 +18 0.3 0.6 46 -6 8 1 2 +18 0.6 0.1 23 -26 34 5 1 +18 0.5 0.3 49 36 40 -43 1 +18 0.5 0.2 45 18 42 40 2 +18 0.7 0.4 0 -33 20 -12 2 +18 0.4 0.3 46 39 28 -44 1 +18 0.5 0.5 45 -10 30 10 2 +18 0.2 0.3 29 -25 19 -16 2 +18 0.1 0.5 50 -5 43 -37 2 +18 0.9 0.5 30 25 -10 -34 1 +18 0.2 0.8 39 -26 -26 -34 1 +18 0.1 0.1 27 -8 19 0 1 +18 0.5 0.8 50 40 38 34 2 +18 0.7 0.8 9 -6 35 -31 2 +18 0.5 0.9 -47 -48 43 -48 2 +18 0.1 0.6 23 5 -11 -42 1 +18 0.8 0.1 11 -25 34 18 2 +18 0.7 0.2 45 -43 21 -39 1 +18 0.5 0.8 47 26 37 21 2 +18 0.3 0.7 50 43 23 -30 1 +18 0.9 0.7 30 -34 -30 -37 1 +18 0.2 0.6 46 -30 -12 -39 1 +18 0.3 0.4 -23 -47 -14 -36 1 +18 0.4 0.5 -20 -50 11 -45 2 +18 0.4 0.4 33 -31 -1 -3 1 +18 0.5 0.5 25 -29 17 11 2 +18 0.3 0.6 21 -42 46 27 2 +18 0.9 0.5 -16 -25 -23 -26 2 +18 0.9 0.8 8 -49 -5 -24 1 +18 0.2 0.3 9 -50 0 -14 2 +18 0.6 0.2 24 -13 39 -7 1 +18 0.8 0.2 -3 -39 35 2 2 +18 0.9 0.7 44 38 41 -21 1 +18 0.8 0.6 26 -21 14 -28 1 +18 0.9 0.7 38 -9 45 -28 1 +18 0.3 0.9 49 20 26 -1 1 +18 0.2 0.9 36 26 -20 -29 1 +18 0.3 0.5 -31 -32 17 4 2 +18 0.6 0.3 34 -26 50 44 2 +18 0.8 0.5 35 18 41 7 1 +18 0.9 0.8 -21 -24 10 -33 2 +18 0.8 0.5 38 -41 49 28 2 +19 0.5 0.5 49 -4 -7 -50 1 +19 0.6 0.5 3 -17 45 -39 2 +19 0.2 0.9 19 18 27 8 1 +19 0.9 0.2 50 20 -10 -19 1 +19 0.3 0.6 42 9 13 -40 1 +19 0.6 0.4 28 -14 18 6 2 +19 0.3 0.3 9 -31 -11 -33 1 +19 0.6 0.8 45 21 -18 -50 1 +19 0.7 0.2 50 0 -21 -38 1 +19 0.7 0.2 12 -41 -14 -37 1 +19 0.4 0.6 31 15 -6 -18 1 +19 0.2 0.4 32 -44 47 20 2 +19 0.8 0.5 21 -15 3 -35 1 +19 0.2 0.5 21 4 20 -48 1 +19 0.1 0.5 28 -47 -16 -18 2 +19 0.9 0.5 40 29 33 -26 1 +19 0.7 0.1 44 36 8 -3 1 +19 0.7 0.6 4 -34 18 -29 2 +19 0.3 0.7 16 12 -32 -49 1 +19 0.8 0.3 -23 -35 24 -20 2 +19 0.5 0.1 -20 -30 37 -3 2 +19 0.8 0.9 21 -48 28 6 2 +19 0.7 0.7 21 12 38 -34 2 +19 0.9 0.6 44 -19 44 -49 1 +19 0.2 0.9 6 -8 21 10 2 +19 0.7 0.7 23 -21 50 -43 1 +19 0.1 0.2 -17 -35 16 -7 2 +19 0.5 0.2 14 -45 -30 -48 1 +19 0.4 0.3 21 -24 -21 -22 1 +19 0.3 0.1 13 -46 9 -30 1 +19 0.3 0.4 -22 -38 -25 -41 2 +19 0.7 0.9 48 3 25 12 2 +19 0.1 0.3 50 43 29 -37 1 +19 0.8 0.4 16 8 -33 -40 1 +19 0.2 0.3 49 -5 19 -41 1 +19 0.4 0.7 49 46 50 -9 1 +19 0.2 0.5 -18 -29 9 -14 2 +19 0.4 0.7 23 -45 21 -18 2 +19 0.6 0.4 30 -13 33 -7 1 +19 0.5 0.3 15 -14 42 11 2 +19 0.9 0.6 -15 -21 45 -40 2 +19 0.5 0.7 23 -35 26 24 2 +19 0.1 0.3 27 -40 -5 -21 2 +19 0.1 0.2 -16 -20 43 -45 2 +19 0.8 0.1 33 14 27 -9 1 +19 0.9 0.5 -22 -44 -15 -18 1 +19 0.4 0.1 21 -20 42 -46 1 +19 0.6 0.5 17 -28 45 -50 2 +19 0.2 0.1 8 -2 45 30 2 +19 0.5 0.6 -42 -50 31 -18 2 +19 0.2 0.8 29 3 28 -6 1 +19 0.4 0.2 25 -29 1 -34 1 +19 0.9 0.3 -34 -42 31 -37 2 +19 0.4 0.3 49 -23 -13 -19 1 +19 0.3 0.2 49 34 -6 -41 1 +19 0.4 0.4 0 -45 13 5 2 +19 0.8 0.5 47 -36 41 -5 1 +19 0.5 0.7 -29 -46 -7 -38 2 +19 0.8 0.6 30 24 17 -32 1 +19 0.9 0.1 -18 -49 31 1 2 +19 0.7 0.4 40 -43 50 -36 1 +19 0.2 0.4 38 30 -14 -45 1 +19 0.2 0.7 3 -41 -27 -38 2 +19 0.7 0.9 36 -36 -10 -46 1 +19 0.5 0.5 -9 -13 32 -41 2 +19 0.6 0.7 37 36 -20 -44 1 +19 0.1 0.2 30 1 43 39 2 +19 0.6 0.8 35 -31 46 -25 2 +19 0.2 0.5 24 -12 -29 -48 1 +19 0.5 0.6 -32 -44 -32 -35 1 +19 0.7 0.2 50 -50 39 -34 1 +19 0.5 0.3 4 -27 32 -6 2 +19 0.8 0.9 46 -10 15 -4 1 +19 0.7 0.5 12 -43 43 13 2 +19 0.3 0.8 18 -21 24 12 2 +19 0.4 0.1 25 24 44 -40 1 +19 0.6 0.6 -23 -46 44 -42 2 +19 0.7 0.2 -13 -18 20 -15 2 +19 0.1 0.5 -19 -28 -25 -41 1 +19 0.9 0.7 -14 -19 29 -35 2 +19 0.1 0.1 43 41 20 0 1 +19 0.5 0.7 -7 -35 23 -23 2 +19 0.2 0.7 24 22 43 -26 2 +19 0.2 0.8 32 -19 -7 -41 2 +19 0.3 0.9 15 -30 22 -49 2 +19 0.5 0.5 37 28 -34 -48 1 +19 0.5 0.4 26 -32 -24 -43 1 +19 0.4 0.8 34 33 23 -50 1 +19 0.7 0.2 34 -41 31 -6 1 +19 0.8 0.1 28 -15 32 -16 1 +19 0.2 0.9 41 4 32 -50 2 +19 0.4 0.8 43 -35 49 -22 2 +19 0.6 0.9 43 27 32 22 1 +19 0.1 0.1 36 -50 8 -50 1 +19 0.6 0.1 35 -35 -19 -44 1 +19 0.8 0.4 33 -2 -6 -7 1 +19 0.4 0.4 46 25 6 2 1 +19 0.3 0.2 18 -11 -33 -39 1 +19 0.7 0.5 36 29 -9 -44 1 +19 0.3 0.8 13 -7 1 -25 2 +19 0.6 0.7 -24 -32 21 -35 2 +19 0.7 0.2 9 -20 32 26 2 +19 0.5 0.3 47 4 -31 -33 1 +19 0.8 0.7 -10 -45 6 -35 2 +19 0.4 0.2 -10 -30 39 -33 2 +19 0.6 0.1 -25 -34 28 -45 2 +19 0.7 0.7 23 7 -17 -27 1 +19 0.2 0.2 15 -32 7 -36 2 +19 0.9 0.3 34 28 -1 -10 1 +19 0.3 0.2 49 22 16 -7 1 +19 0.3 0.1 27 2 -7 -24 1 +19 0.5 0.1 25 -48 22 -35 1 +19 0.5 0.5 8 3 49 -48 1 +19 0.1 0.3 -2 -45 12 -45 2 +19 0.1 0.1 -23 -31 29 -48 1 +19 0.7 0.5 18 0 49 11 2 +19 0.8 0.2 -2 -23 42 9 2 +19 0.2 0.8 9 -10 -8 -50 1 +19 0.4 0.1 -23 -39 8 -12 2 +19 0.7 0.9 21 15 45 28 2 +19 0.5 0.8 -25 -46 16 -42 2 +19 0.5 0.4 -1 -16 22 -39 1 +19 0.5 0.5 5 -37 -4 -24 1 +19 0.2 0.8 -16 -19 -32 -35 1 +19 0.7 0.9 11 -4 -12 -34 1 +19 0.6 0.3 39 -37 25 15 2 +19 0.1 0.8 13 -37 7 -16 2 +19 0.5 0.8 -36 -50 49 -18 2 +19 0.2 0.9 47 1 15 -3 2 +19 0.2 0.2 49 31 6 -35 1 +19 0.9 0.9 45 2 -31 -33 1 +19 0.9 0.9 5 -31 35 -33 2 +19 0.1 0.5 19 16 19 -3 1 +19 0.2 0.5 43 -6 38 -16 2 +19 0.8 0.3 -33 -47 0 -49 2 +19 0.9 0.8 -31 -49 48 -49 2 +19 0.4 0.8 25 -23 24 -11 2 +19 0.5 0.6 42 37 -12 -20 1 +19 0.6 0.4 47 -33 50 38 2 +19 0.7 0.2 -19 -21 17 0 2 +19 0.7 0.2 13 1 -1 -36 1 +19 0.1 0.5 43 -36 -2 -12 2 +19 0.1 0.4 47 42 13 -14 1 +19 0.7 0.1 45 9 32 -6 1 +19 0.8 0.9 33 -34 3 -48 1 +19 0.8 0.2 -14 -25 -1 -34 2 +19 0.4 0.3 16 4 -34 -36 1 +19 0.4 0.5 -2 -47 33 16 2 +19 0.2 0.9 20 -39 -16 -30 1 +19 0.1 0.3 22 17 3 -44 1 +19 0.8 0.3 -8 -42 -23 -31 1 +19 0.7 0.1 48 6 43 5 1 +19 0.6 0.3 9 -35 -47 -48 1 +19 0.9 0.3 36 -43 26 -28 1 +19 0.4 0.1 -40 -48 25 -26 2 +19 0.5 0.2 45 -39 25 -7 2 +19 0.2 0.3 45 -46 11 -15 2 +19 0.3 0.2 43 4 -37 -39 1 +19 0.6 0.5 10 9 45 -36 1 +19 0.6 0.7 30 -12 26 -39 2 +19 0.1 0.3 49 1 19 -4 2 +19 0.8 0.4 24 -46 47 32 2 +19 0.8 0.9 -7 -41 6 -26 2 +19 0.1 0.9 -31 -47 20 -1 2 +19 0.9 0.6 -28 -41 9 0 2 +19 0.7 0.9 -32 -48 44 -11 2 +19 0.3 0.4 47 41 49 -12 1 +19 0.9 0.3 15 -12 18 6 1 +19 0.1 0.3 37 -8 31 11 2 +19 0.7 0.6 4 -41 19 -12 2 +19 0.7 0.7 43 -30 26 -3 1 +19 0.5 0.7 -6 -8 -11 -35 1 +19 0.4 0.8 -20 -21 34 -3 2 +19 0.9 0.1 32 10 48 -3 1 +19 0.9 0.8 16 15 41 21 2 +19 0.7 0.2 46 -15 -10 -45 1 +19 0.9 0.3 30 16 3 -22 1 +19 0.3 0.8 -1 -3 23 -17 2 +19 0.1 0.1 15 -49 44 30 2 +19 0.7 0.5 27 -35 40 -44 1 +19 0.4 0.8 14 -25 48 -41 2 +19 0.5 0.5 31 -24 35 5 2 +19 0.9 0.7 48 1 -32 -37 1 +19 0.2 0.6 50 2 8 -16 1 +19 0.7 0.8 -27 -46 41 -16 2 +19 0.3 0.5 13 -30 25 -5 2 +19 0.1 0.3 23 0 12 -19 1 +19 0.7 0.4 45 16 50 -20 1 +19 0.8 0.2 7 -39 -14 -32 1 +19 0.2 0.7 39 -49 13 -2 2 +19 0.6 0.9 21 -3 33 -1 2 +19 0.9 0.2 48 -3 0 -47 1 +19 0.5 0.1 -16 -28 35 -13 2 +19 0.3 0.3 44 -1 49 -34 1 +19 0.7 0.2 -11 -21 -19 -20 2 +19 0.7 0.4 3 -33 47 7 2 +19 0.7 0.8 -28 -46 37 -36 2 +19 0.7 0.5 -1 -25 15 11 2 +19 0.7 0.2 48 21 -27 -50 1 +19 0.2 0.8 -5 -25 16 -8 2 +19 0.4 0.9 50 22 48 44 1 +19 0.8 0.2 36 -49 14 13 1 +19 0.8 0.9 40 -21 27 -6 1 +19 0.3 0.7 49 3 12 9 1 +19 0.4 0.7 46 -23 17 -29 1 +19 0.9 0.2 -30 -49 41 -9 2 +19 0.1 0.6 23 0 -21 -39 1 +19 0.5 0.1 14 -30 -16 -19 1 +19 0.6 0.1 46 -46 -4 -26 1 +19 0.2 0.4 40 12 39 19 2 +19 0.5 0.9 4 -10 40 4 2 +19 0.9 0.7 -23 -26 18 -45 2 +19 0.7 0.3 14 -22 -30 -38 1 +19 0.6 0.2 32 -35 16 9 2 +19 0.9 0.2 -27 -47 -23 -50 1 +19 0.2 0.2 -23 -25 49 -44 2 +19 0.5 0.7 42 -48 19 -45 2 +19 0.7 0.4 -33 -43 43 -44 2 +19 0.2 0.5 10 -20 8 -19 2 +19 0.2 0.3 1 -17 -19 -47 1 +19 0.1 0.6 48 -27 -12 -48 1 +19 0.7 0.3 30 -40 36 -2 1 +19 0.1 0.5 38 -32 14 -24 2 +19 0.2 0.6 10 -35 0 -27 2 +19 0.6 0.8 25 -6 -19 -27 1 +19 0.4 0.1 -41 -47 -7 -47 1 +19 0.9 0.8 27 -50 49 37 2 +19 0.2 0.7 -43 -50 37 -21 2 +19 0.5 0.2 8 -27 44 42 2 +19 0.6 0.9 49 -32 35 -28 2 +19 0.7 0.9 27 25 2 -39 1 +19 0.7 0.2 45 -2 -5 -30 1 +19 0.2 0.4 46 26 11 -44 1 +19 0.2 0.2 32 -24 25 -31 1 +19 0.3 0.7 40 33 23 -8 1 +19 0.3 0.7 9 -16 5 -23 2 +19 0.1 0.4 38 -45 9 -18 2 +19 0.2 0.2 41 -9 1 -39 1 +19 0.6 0.7 15 -12 -9 -48 1 +19 0.6 0.8 -14 -45 5 -44 2 +19 0.9 0.4 -19 -47 7 -15 2 +19 0.9 0.6 44 -4 27 -29 1 +19 0.6 0.5 0 -15 8 -43 1 +19 0.1 0.3 19 -1 30 25 2 +19 0.8 0.5 19 7 -25 -41 1 +19 0.8 0.2 -5 -33 44 -24 2 +19 0.8 0.5 20 -34 -22 -40 1 +19 0.9 0.2 -17 -48 -11 -39 1 +19 0.7 0.4 7 -37 43 -21 2 +19 0.2 0.7 35 -20 38 29 2 +19 0.1 0.9 48 40 48 14 2 +19 0.4 0.5 -12 -33 -2 -16 1 +19 0.1 0.9 14 -45 19 17 2 +19 0.5 0.1 17 -33 24 -47 1 +19 0.7 0.6 -7 -23 25 5 2 +19 0.8 0.3 41 34 -12 -20 1 +19 0.6 0.3 13 3 8 -18 1 +19 0.3 0.5 43 12 -10 -22 1 +19 0.7 0.3 48 -5 35 -33 1 +19 0.3 0.7 -1 -5 -22 -41 1 +19 0.6 0.3 50 22 14 -8 1 +19 0.2 0.5 30 -26 39 -6 2 +19 0.5 0.5 12 -7 21 11 2 +19 0.4 0.4 41 -50 -13 -50 1 +19 0.2 0.9 35 -49 -1 -31 2 +19 0.2 0.2 11 -44 16 -16 2 +19 0.1 0.2 40 -9 10 -50 1 +19 0.1 0.4 -25 -42 -15 -20 2 +19 0.8 0.8 30 -43 29 -8 1 +19 0.3 0.5 -9 -25 50 45 2 +19 0.2 0.4 30 -33 20 -29 2 +19 0.2 0.8 45 25 -5 -8 1 +19 0.6 0.5 22 -21 0 -21 1 +19 0.8 0.2 -12 -36 28 -34 2 +19 0.8 0.9 27 -26 -31 -40 1 +19 0.2 0.8 -21 -44 28 -23 2 +19 0.9 0.6 -10 -16 28 27 2 +19 0.8 0.5 45 -45 -15 -39 1 +19 0.4 0.8 5 2 -10 -13 2 +19 0.4 0.1 44 -7 44 -25 1 +19 0.8 0.4 18 5 19 3 1 +19 0.6 0.4 48 12 -48 -50 1 +19 0.6 0.5 49 -47 37 -38 1 +19 0.1 0.4 -36 -49 15 -49 2 +19 0.2 0.1 41 -5 -27 -35 1 +19 0.6 0.7 12 -5 5 -32 1 +19 0.3 0.3 -9 -32 18 -22 2 +19 0.8 0.5 25 11 -31 -32 1 +19 0.5 0.8 8 -27 41 -20 2 +19 0.5 0.6 8 -31 47 22 2 +19 0.9 0.1 37 13 38 -24 1 +19 0.6 0.2 -4 -50 18 -42 2 +19 0.7 0.7 10 -1 3 -2 2 +19 0.3 0.3 7 -35 12 -15 2 +19 0.3 0.6 42 -5 28 -50 1 +19 0.5 0.6 12 -27 26 20 2 +19 0.7 0.2 -7 -50 31 -50 1 +19 0.9 0.9 2 -35 47 37 2 +19 0.5 0.4 23 18 25 0 1 +19 0.4 0.8 18 -6 7 -35 1 +20 0.9 0.3 27 0 40 20 2 +20 0.4 0.8 4 -12 39 -9 2 +20 0.8 0.6 -18 -48 26 -37 2 +20 0.6 0.4 43 -21 20 -26 1 +20 0.1 0.9 26 13 5 1 1 +20 0.8 0.3 45 39 41 8 1 +20 0.8 0.1 39 -41 27 -5 1 +20 0.5 0.2 49 -10 -7 -15 1 +20 0.1 0.6 49 -35 48 -19 2 +20 0.2 0.8 26 -8 10 -20 1 +20 0.6 0.1 34 5 35 8 1 +20 0.3 0.5 -17 -47 31 -4 2 +20 0.5 0.2 48 -29 14 -3 1 +20 0.2 0.5 25 2 38 26 2 +20 0.6 0.1 31 -6 30 -8 1 +20 0.6 0.2 10 -27 -6 -14 1 +20 0.6 0.4 28 17 37 20 2 +20 0.1 0.7 36 13 20 0 1 +20 0.1 0.9 12 -37 -11 -25 2 +20 0.8 0.9 -9 -15 47 11 2 +20 0.9 0.1 43 1 -7 -31 1 +20 0.6 0.4 -13 -41 9 -25 2 +20 0.4 0.6 36 -26 14 -9 1 +20 0.2 0.8 -1 -36 2 -42 2 +20 0.4 0.4 32 5 45 30 2 +20 0.5 0.3 -22 -43 -6 -8 2 +20 0.6 0.2 21 13 -23 -43 1 +20 0.7 0.6 28 20 16 -30 1 +20 0.3 0.9 16 -32 24 14 2 +20 0.6 0.6 -26 -29 -6 -31 2 +20 0.1 0.5 43 33 22 -43 1 +20 0.3 0.2 7 -39 3 -19 2 +20 0.4 0.7 38 -3 22 3 2 +20 0.7 0.5 45 12 27 -16 1 +20 0.3 0.8 -18 -43 43 30 2 +20 0.6 0.2 37 17 25 -16 1 +20 0.4 0.7 31 -29 40 -44 2 +20 0.2 0.3 -1 -4 -3 -20 1 +20 0.1 0.3 33 19 33 15 1 +20 0.4 0.6 45 41 -3 -25 1 +20 0.3 0.3 34 -16 34 -11 1 +20 0.6 0.5 19 17 17 -40 1 +20 0.2 0.6 24 8 26 -31 2 +20 0.9 0.5 31 29 42 4 1 +20 0.7 0.1 -11 -23 14 -13 2 +20 0.4 0.6 -23 -43 13 -7 2 +20 0.8 0.2 36 -18 34 -30 1 +20 0.6 0.8 -33 -50 26 -8 2 +20 0.9 0.6 3 -31 34 -14 2 +20 0.5 0.6 27 6 21 -10 1 +20 0.7 0.2 -11 -29 13 -47 1 +20 0.5 0.6 -9 -48 19 -50 2 +20 0.7 0.9 3 -14 -34 -38 1 +20 0.1 0.4 34 -14 39 -2 2 +20 0.3 0.2 8 -36 37 -3 2 +20 0.7 0.2 2 -43 -30 -46 1 +20 0.5 0.7 37 -11 36 -9 2 +20 0.8 0.7 39 -4 25 -38 1 +20 0.6 0.7 25 -34 -16 -18 1 +20 0.6 0.2 21 5 23 21 2 +20 0.9 0.8 -16 -22 25 -25 2 +20 0.5 0.2 -11 -27 19 0 2 +20 0.1 0.9 20 6 31 -9 2 +20 0.6 0.1 19 -45 6 -37 1 +20 0.8 0.4 -4 -46 20 -48 1 +20 0.5 0.7 20 4 44 5 2 +20 0.9 0.1 1 -49 24 5 2 +20 0.4 0.7 35 9 33 -8 1 +20 0.5 0.6 26 -44 -12 -21 1 +20 0.6 0.2 12 -40 44 -29 1 +20 0.8 0.8 31 -24 25 15 2 +20 0.9 0.4 27 9 31 -47 1 +20 0.8 0.5 -45 -46 8 -1 2 +20 0.4 0.8 50 -49 20 -35 2 +20 0.9 0.5 -8 -17 43 -44 2 +20 0.9 0.6 43 -10 32 -13 1 +20 0.9 0.3 31 9 38 -12 1 +20 0.4 0.3 46 -38 -30 -37 1 +20 0.1 0.4 47 -44 13 4 2 +20 0.5 0.7 14 -18 5 -10 2 +20 0.6 0.6 6 -8 10 7 2 +20 0.6 0.4 -20 -28 -25 -41 1 +20 0.6 0.5 37 5 -31 -48 1 +20 0.2 0.9 3 -21 -31 -38 1 +20 0.7 0.6 -9 -22 3 -27 2 +20 0.2 0.7 -18 -48 0 -25 2 +20 0.3 0.7 47 46 24 -31 1 +20 0.5 0.4 -40 -43 -16 -39 1 +20 0.8 0.3 37 -42 45 16 2 +20 0.9 0.8 21 -3 34 3 2 +20 0.5 0.2 21 -6 -11 -21 1 +20 0.7 0.7 48 -9 -35 -37 1 +20 0.3 0.8 22 -34 -14 -50 1 +20 0.5 0.7 4 3 19 -2 2 +20 0.3 0.2 -7 -42 20 -21 2 +20 0.5 0.3 -9 -20 -20 -30 2 +20 0.8 0.1 36 -39 -1 -45 1 +20 0.8 0.8 18 -36 -34 -40 1 +20 0.4 0.3 33 1 -14 -20 1 +20 0.8 0.7 37 -21 46 -40 1 +20 0.8 0.7 -37 -42 33 27 2 +20 0.2 0.6 46 -22 45 14 2 +20 0.5 0.6 -4 -39 11 -46 2 +20 0.4 0.2 25 -22 48 47 2 +20 0.8 0.4 38 -13 27 -47 1 +20 0.2 0.9 -26 -48 -20 -22 2 +20 0.6 0.8 -13 -25 30 -4 2 +20 0.4 0.6 -6 -28 29 27 2 +20 0.5 0.6 38 -46 17 -24 2 +20 0.8 0.3 27 -43 27 -1 1 +20 0.4 0.1 29 -2 -2 -7 1 +20 0.5 0.9 -26 -32 14 -11 2 +20 0.8 0.2 40 -47 9 2 1 +20 0.5 0.2 50 17 -21 -25 1 +20 0.8 0.4 38 -44 19 -13 1 +20 0.9 0.3 41 17 28 -25 1 +20 0.7 0.5 34 -3 48 -33 1 +20 0.4 0.8 42 41 44 41 2 +20 0.2 0.7 15 7 3 -31 1 +20 0.7 0.6 -13 -27 40 21 2 +20 0.8 0.5 -5 -12 -14 -33 1 +20 0.6 0.3 3 -23 45 18 2 +20 0.7 0.1 48 45 44 -10 1 +20 0.8 0.8 29 -10 29 -8 1 +20 0.3 0.9 -10 -23 25 -8 2 +20 0.3 0.8 18 -40 48 -34 2 +20 0.3 0.7 30 -33 7 -44 1 +20 0.7 0.4 40 -3 -5 -16 1 +20 0.7 0.1 -25 -38 -24 -26 2 +20 0.7 0.2 49 10 -14 -37 1 +20 0.1 0.5 6 -44 36 -33 2 +20 0.9 0.1 45 -40 38 -29 1 +20 0.4 0.7 34 11 49 20 1 +20 0.5 0.4 12 -48 -15 -35 1 +20 0.2 0.9 -7 -9 -26 -30 1 +20 0.4 0.4 -21 -26 40 -7 2 +20 0.8 0.4 18 -10 33 -1 2 +20 0.2 0.7 25 -14 15 -16 2 +20 0.9 0.4 46 16 42 -25 1 +20 0.9 0.3 44 25 -21 -32 1 +20 0.1 0.1 -4 -41 5 -37 1 +20 0.1 0.2 39 -28 22 -2 2 +20 0.8 0.5 35 -13 2 -7 1 +20 0.5 0.2 32 -47 42 -29 1 +20 0.8 0.6 -3 -25 6 -33 2 +20 0.4 0.4 1 -15 36 -44 1 +20 0.2 0.5 46 2 -1 -43 1 +20 0.9 0.7 29 -3 22 10 1 +20 0.4 0.3 13 -37 19 15 2 +20 0.3 0.2 28 -4 35 19 2 +20 0.1 0.5 -20 -36 19 -32 2 +20 0.9 0.5 18 10 -1 -29 1 +20 0.8 0.7 39 8 8 -10 1 +20 0.4 0.1 3 -24 31 -34 1 +20 0.4 0.3 29 -29 25 14 2 +20 0.7 0.1 -25 -30 2 -19 2 +20 0.1 0.6 22 -17 21 -11 2 +20 0.4 0.9 15 -31 -1 -36 1 +20 0.9 0.2 -3 -47 32 5 2 +20 0.1 0.9 43 -9 43 -44 2 +20 0.6 0.1 40 -40 27 2 1 +20 0.8 0.3 26 -1 49 -6 1 +20 0.6 0.5 48 3 -35 -44 1 +20 0.1 0.1 13 5 40 39 2 +20 0.1 0.5 20 -50 -13 -29 2 +20 0.1 0.6 -4 -19 -28 -34 1 +20 0.5 0.1 47 43 35 4 1 +20 0.4 0.8 -8 -39 48 28 2 +20 0.8 0.5 7 3 49 23 2 +20 0.4 0.9 2 -36 38 12 2 +20 0.6 0.9 47 23 28 -25 1 +20 0.3 0.9 -1 -5 34 -10 2 +20 0.4 0.7 34 -15 27 13 2 +20 0.8 0.3 50 13 32 -45 1 +20 0.8 0.8 13 8 14 -26 1 +20 0.8 0.2 5 -8 48 41 2 +20 0.7 0.6 34 -10 49 37 2 +20 0.7 0.3 40 33 21 -22 1 +20 0.5 0.6 12 -17 45 27 2 +20 0.4 0.1 -30 -41 48 27 2 +20 0.9 0.1 -24 -39 3 -41 2 +20 0.2 0.6 43 -3 34 27 2 +20 0.7 0.9 41 -16 38 12 2 +20 0.4 0.6 18 -30 38 -24 2 +20 0.7 0.5 36 21 28 -43 1 +20 0.5 0.2 43 -15 -18 -20 1 +20 0.6 0.5 2 -39 -15 -32 1 +20 0.1 0.5 -42 -49 -17 -36 2 +20 0.1 0.9 36 -40 -2 -24 2 +20 0.1 0.5 17 -34 10 -38 2 +20 0.7 0.4 11 -18 35 30 2 +20 0.7 0.4 20 -36 -35 -44 1 +20 0.8 0.8 -21 -30 -6 -39 2 +20 0.2 0.8 48 27 -24 -48 1 +20 0.4 0.7 14 -24 -30 -49 1 +20 0.2 0.4 -9 -23 45 -49 2 +20 0.2 0.8 36 -48 25 19 2 +20 0.6 0.9 1 -48 33 16 2 +20 0.7 0.6 27 -31 37 -47 1 +20 0.4 0.5 -5 -49 31 20 2 +20 0.3 0.2 49 -2 46 23 2 +20 0.5 0.9 45 -16 44 -47 2 +20 0.8 0.8 1 -26 49 -1 2 +20 0.9 0.8 -20 -30 3 -9 2 +20 0.1 0.1 29 -47 43 40 2 +20 0.2 0.7 8 -8 -8 -34 1 +20 0.4 0.5 26 13 28 1 1 +20 0.9 0.4 48 18 25 -2 1 +20 0.5 0.4 47 28 -34 -45 1 +20 0.1 0.9 -40 -43 15 -3 2 +20 0.6 0.5 10 4 32 -25 1 +20 0.3 0.3 11 -14 30 -10 2 +20 0.4 0.7 24 -36 29 -50 2 +20 0.9 0.8 40 -1 39 -35 1 +20 0.7 0.2 -14 -17 -46 -49 1 +20 0.5 0.9 21 17 34 22 2 +20 0.7 0.1 30 -43 36 -35 1 +20 0.5 0.6 8 -28 31 17 2 +20 0.7 0.8 38 21 44 18 1 +20 0.3 0.3 0 -37 29 -26 2 +20 0.7 0.8 23 6 46 -26 2 +20 0.8 0.4 41 8 39 -42 1 +20 0.7 0.5 -12 -27 10 -24 2 +20 0.9 0.1 -29 -49 37 11 2 +20 0.6 0.1 -16 -18 6 -45 1 +20 0.8 0.2 -4 -30 19 3 2 +20 0.7 0.3 9 -43 33 27 2 +20 0.1 0.8 50 30 12 10 1 +20 0.3 0.1 -40 -42 33 -25 2 +20 0.9 0.7 17 -29 36 10 2 +20 0.3 0.8 33 -37 18 16 2 +20 0.5 0.1 -21 -25 13 -27 2 +20 0.9 0.7 -21 -36 -17 -28 1 +20 0.6 0.1 50 37 43 -42 1 +20 0.5 0.2 46 7 29 -17 1 +20 0.3 0.4 19 -40 36 26 2 +20 0.7 0.3 34 -40 43 2 2 +20 0.8 0.3 20 1 26 -6 1 +20 0.2 0.9 6 -26 39 -36 2 +20 0.6 0.7 48 9 29 24 2 +20 0.8 0.5 9 -16 -4 -15 1 +20 0.9 0.3 -9 -48 24 -22 2 +20 0.5 0.5 14 -3 -8 -15 1 +20 0.2 0.3 20 -14 23 -12 2 +20 0.9 0.9 48 -46 24 -29 1 +20 0.7 0.3 41 12 34 -20 1 +20 0.9 0.4 13 -32 31 -37 1 +20 0.2 0.6 30 -22 26 -49 2 +20 0.5 0.9 46 -16 39 -50 2 +20 0.5 0.7 42 34 -37 -39 1 +20 0.4 0.4 24 7 5 -42 1 +20 0.4 0.2 18 -42 11 -21 2 +20 0.8 0.8 -28 -45 13 -1 2 +20 0.3 0.9 20 -38 19 -6 2 +20 0.3 0.5 49 8 -19 -22 1 +20 0.5 0.2 40 -30 -10 -13 1 +20 0.3 0.8 40 29 28 26 1 +20 0.7 0.1 -6 -21 2 -22 1 +20 0.8 0.4 40 -35 -1 -22 1 +20 0.4 0.2 38 -31 1 -7 1 +20 0.7 0.9 13 -6 41 5 2 +20 0.5 0.1 46 31 36 -42 1 +20 0.7 0.7 -27 -49 47 -46 2 +20 0.8 0.3 -14 -33 -28 -40 1 +20 0.8 0.1 7 -31 24 -34 1 +20 0.3 0.4 28 3 26 -37 1 +20 0.8 0.6 22 -4 -14 -18 1 +20 0.7 0.8 -3 -7 14 -45 2 +20 0.3 0.4 46 -32 37 8 2 +20 0.1 0.6 38 -8 47 -16 2 +20 0.6 0.6 25 -33 28 -17 2 +20 0.9 0.8 42 25 18 -44 1 +20 0.8 0.7 44 -50 49 38 2 +20 0.3 0.4 28 -28 22 6 2 +20 0.4 0.6 -21 -47 43 -14 2 +20 0.5 0.2 23 -39 -15 -16 1 +20 0.1 0.3 7 -3 45 20 2 +20 0.7 0.1 21 11 38 -11 1 +20 0.6 0.2 6 -4 43 -17 1 +20 0.9 0.7 48 25 25 16 1 +20 0.4 0.7 6 -29 12 -30 2 +20 0.2 0.7 -47 -49 -20 -49 2 +20 0.7 0.8 -13 -26 -21 -39 1 +20 0.6 0.4 41 33 -20 -30 1 +20 0.8 0.7 -14 -43 19 -47 2 +20 0.1 0.3 5 -48 24 -8 2 +20 0.7 0.5 -8 -48 -3 -19 2 +20 0.5 0.5 40 20 -6 -17 1 +20 0.3 0.9 4 3 41 31 2 +20 0.6 0.6 -10 -18 40 -10 2 +20 0.4 0.4 -13 -36 -23 -48 1 +20 0.7 0.4 -18 -29 28 -27 2 +20 0.1 0.1 40 29 -28 -36 1 +20 0.6 0.6 33 -38 0 -48 1 +20 0.1 0.7 27 -34 5 -15 2 +20 0.7 0.7 -10 -11 3 -12 2 +20 0.6 0.7 24 -36 31 -17 2 +20 0.8 0.2 -29 -50 20 -46 2 +20 0.1 0.3 -9 -29 -26 -46 1 +20 0.6 0.9 15 -41 21 4 2 +21 0.3 0.3 -1 -35 6 -34 2 +21 0.2 0.9 45 25 45 -3 1 +21 0.2 0.1 -18 -29 -11 -46 1 +21 0.7 0.3 -7 -37 18 6 2 +21 0.4 0.4 34 -27 1 -17 1 +21 0.1 0.8 32 20 -2 -23 1 +21 0.8 0.7 42 13 50 31 2 +21 0.8 0.9 23 -15 15 -40 2 +21 0.7 0.6 -27 -42 -16 -39 2 +21 0.7 0.5 40 30 43 -5 1 +21 0.4 0.6 30 13 26 -36 1 +21 0.4 0.6 -4 -46 -9 -37 1 +21 0.9 0.1 42 -48 47 22 1 +21 0.6 0.9 24 2 11 -48 1 +21 0.8 0.5 46 20 40 16 1 +21 0.7 0.3 34 -29 45 6 2 +21 0.1 0.1 22 -22 38 -43 1 +21 0.8 0.7 -21 -30 50 -35 2 +21 0.2 0.9 45 -38 50 -42 2 +21 0.1 0.7 -7 -29 0 -7 2 +21 0.8 0.4 9 -50 -20 -26 1 +21 0.7 0.6 33 31 28 -21 1 +21 0.6 0.7 -17 -49 20 -43 2 +21 0.7 0.9 36 23 -8 -14 1 +21 0.2 0.9 49 -42 10 -13 2 +21 0.3 0.3 -11 -30 -20 -36 2 +21 0.8 0.5 31 7 14 -43 1 +21 0.7 0.7 24 4 12 -14 1 +21 0.7 0.2 -14 -23 -11 -38 1 +21 0.8 0.3 47 -18 15 -24 1 +21 0.5 0.1 -10 -35 44 -14 2 +21 0.3 0.5 41 10 37 -30 1 +21 0.8 0.5 -16 -45 45 39 2 +21 0.3 0.4 -44 -49 17 -40 2 +21 0.1 0.1 32 24 0 -10 1 +21 0.5 0.2 -6 -47 9 -28 2 +21 0.2 0.3 49 -16 43 -44 2 +21 0.5 0.4 47 -21 -22 -45 1 +21 0.5 0.3 48 41 7 -25 1 +21 0.9 0.2 -20 -41 31 -2 2 +21 0.8 0.9 48 4 36 -37 2 +21 0.1 0.9 -43 -48 -13 -30 2 +21 0.7 0.7 -3 -44 7 -36 2 +21 0.3 0.2 31 0 -21 -27 1 +21 0.4 0.8 6 -41 44 12 2 +21 0.6 0.7 33 -22 25 0 1 +21 0.8 0.8 -24 -43 12 -35 2 +21 0.1 0.4 30 4 -11 -44 1 +21 0.3 0.4 33 -24 24 -18 1 +21 0.1 0.7 31 -23 27 -16 2 +21 0.9 0.8 27 1 27 -21 1 +21 0.4 0.4 -21 -31 -17 -35 2 +21 0.4 0.5 0 -39 20 -49 2 +21 0.9 0.5 25 22 3 -46 1 +21 0.8 0.7 -38 -42 5 -50 2 +21 0.3 0.6 40 23 1 -20 1 +21 0.3 0.9 33 -18 -8 -19 1 +21 0.7 0.9 36 18 -40 -41 1 +21 0.3 0.7 10 -20 50 37 2 +21 0.5 0.4 39 -41 45 -12 1 +21 0.8 0.4 12 -8 -29 -33 1 +21 0.9 0.6 18 2 49 -10 1 +21 0.7 0.1 25 -8 -6 -11 1 +21 0.6 0.7 44 -14 1 -14 1 +21 0.4 0.6 -30 -39 8 -27 2 +21 0.2 0.3 -1 -49 48 -29 2 +21 0.1 0.3 50 -49 10 6 2 +21 0.7 0.7 34 31 1 -27 1 +21 0.1 0.4 28 -19 45 20 2 +21 0.1 0.3 -35 -48 1 -7 2 +21 0.2 0.6 -29 -32 18 -16 2 +21 0.3 0.2 5 -19 34 -8 2 +21 0.6 0.2 -16 -36 -22 -23 2 +21 0.1 0.1 14 -39 23 14 2 +21 0.1 0.9 47 39 -15 -29 1 +21 0.8 0.8 15 -46 34 7 2 +21 0.2 0.7 38 -34 -11 -19 1 +21 0.1 0.5 -12 -47 16 -33 2 +21 0.6 0.1 -14 -24 -7 -40 1 +21 0.8 0.8 9 -13 35 -44 2 +21 0.5 0.8 -11 -14 13 -47 2 +21 0.6 0.1 1 -10 36 18 2 +21 0.6 0.9 7 -25 -35 -41 1 +21 0.7 0.9 3 -12 24 5 2 +21 0.4 0.9 19 -49 45 16 2 +21 0.9 0.6 4 -4 21 -32 2 +21 0.6 0.7 46 -22 30 -21 1 +21 0.9 0.4 -4 -5 30 -41 2 +21 0.1 0.5 40 -35 45 33 2 +21 0.7 0.1 49 -49 45 5 1 +21 0.1 0.3 -34 -48 0 -44 2 +21 0.2 0.2 -4 -45 16 -2 2 +21 0.1 0.1 14 1 -37 -46 2 +21 0.5 0.2 -9 -31 -27 -29 1 +21 0.9 0.4 -2 -20 -22 -29 1 +21 0.2 0.9 13 -10 -7 -43 1 +21 0.5 0.1 -12 -26 -28 -38 2 +21 0.9 0.9 16 -40 47 -4 2 +21 0.5 0.4 -3 -18 49 27 2 +21 0.6 0.1 44 35 2 -9 1 +21 0.1 0.5 -10 -45 16 -19 2 +21 0.9 0.3 30 2 23 -19 1 +21 0.4 0.7 -2 -25 -16 -48 2 +21 0.3 0.9 0 -18 8 -7 2 +21 0.6 0.7 -5 -42 37 17 2 +21 0.1 0.3 48 -39 50 -20 2 +21 0.1 0.8 40 24 15 -34 1 +21 0.5 0.4 36 -48 41 -37 1 +21 0.8 0.4 9 -28 41 11 2 +21 0.2 0.9 -8 -47 -29 -33 2 +21 0.5 0.8 24 9 -22 -48 1 +21 0.7 0.2 28 -42 18 -8 1 +21 0.7 0.2 42 -33 -6 -12 1 +21 0.3 0.3 35 21 35 -6 2 +21 0.3 0.5 13 -40 30 26 2 +21 0.9 0.4 50 35 34 0 1 +21 0.8 0.3 -3 -7 43 -5 1 +21 0.2 0.9 -20 -25 7 1 2 +21 0.1 0.1 -9 -45 40 -34 2 +21 0.5 0.9 -13 -39 50 9 2 +21 0.5 0.7 39 24 23 -44 1 +21 0.1 0.9 48 35 17 1 1 +21 0.2 0.9 -10 -38 18 -30 2 +21 0.5 0.2 -2 -42 3 -4 2 +21 0.7 0.1 15 10 8 -27 1 +21 0.1 0.7 31 -39 -32 -40 2 +21 0.3 0.8 33 0 -39 -46 1 +21 0.3 0.1 50 0 29 -25 1 +21 0.8 0.3 30 -21 -34 -48 1 +21 0.5 0.1 32 -41 35 34 2 +21 0.2 0.6 2 -13 -8 -26 2 +21 0.1 0.8 37 25 1 -3 1 +21 0.1 0.2 18 -6 24 -45 1 +21 0.1 0.3 42 37 -19 -23 1 +21 0.9 0.6 35 31 4 -37 1 +21 0.6 0.3 7 -19 26 16 2 +21 0.8 0.4 11 -18 23 -44 1 +21 0.9 0.3 -6 -31 20 -42 2 +21 0.3 0.7 42 35 19 -47 1 +21 0.7 0.1 22 -38 26 20 2 +21 0.4 0.4 -5 -40 0 -7 2 +21 0.6 0.2 30 -6 33 -49 1 +21 0.1 0.6 43 32 -14 -44 1 +21 0.6 0.2 48 9 2 -4 1 +21 0.1 0.5 -13 -26 21 -30 2 +21 0.9 0.5 19 -2 42 -19 1 +21 0.5 0.3 -26 -42 49 9 2 +21 0.2 0.8 16 -9 32 1 2 +21 0.7 0.2 38 21 42 1 1 +21 0.2 0.5 -37 -50 -37 -40 2 +21 0.5 0.1 -27 -47 45 -28 2 +21 0.1 0.2 5 -13 5 -23 2 +21 0.3 0.4 19 -36 32 12 2 +21 0.8 0.7 -2 -42 40 -20 1 +21 0.2 0.1 -14 -17 11 -38 1 +21 0.7 0.5 -2 -23 49 48 2 +21 0.3 0.4 31 -47 27 8 2 +21 0.1 0.9 32 -31 36 17 2 +21 0.7 0.6 50 0 47 -24 1 +21 0.6 0.2 -42 -43 33 10 2 +21 0.8 0.7 38 -6 -38 -48 1 +21 0.6 0.4 10 -40 32 5 2 +21 0.5 0.1 22 12 -2 -17 1 +21 0.3 0.4 29 -38 -20 -37 1 +21 0.3 0.6 50 40 -5 -41 1 +21 0.9 0.3 36 -50 43 -34 1 +21 0.9 0.8 36 -24 -12 -47 1 +21 0.9 0.3 20 -20 44 17 2 +21 0.9 0.4 39 -6 37 -43 1 +21 0.6 0.9 -13 -28 13 -47 2 +21 0.3 0.4 -9 -23 29 -34 2 +21 0.3 0.3 26 -40 23 -42 2 +21 0.5 0.7 38 35 36 20 1 +21 0.9 0.1 25 -22 37 23 1 +21 0.5 0.1 30 -38 33 -33 1 +21 0.4 0.9 18 -7 -14 -47 1 +21 0.2 0.2 -20 -36 -9 -50 2 +21 0.6 0.9 19 16 50 -28 2 +21 0.7 0.8 36 -6 -6 -20 1 +21 0.6 0.7 50 -34 46 15 2 +21 0.5 0.6 33 -36 -18 -25 1 +21 0.3 0.7 50 -46 25 -14 2 +21 0.4 0.5 -9 -46 50 -3 2 +21 0.2 0.1 11 -38 26 -12 2 +21 0.7 0.8 38 19 33 11 1 +21 0.5 0.2 39 -23 7 -4 2 +21 0.6 0.9 9 -11 -22 -39 1 +21 0.3 0.7 -21 -44 24 0 2 +21 0.6 0.4 29 -37 7 -34 1 +21 0.9 0.1 16 -26 37 -33 1 +21 0.3 0.4 24 6 20 19 1 +21 0.1 0.1 -17 -50 47 1 2 +21 0.6 0.3 45 -44 -37 -40 1 +21 0.4 0.8 16 -5 49 25 2 +21 0.7 0.7 38 -12 22 14 2 +21 0.9 0.3 -16 -27 -28 -31 1 +21 0.4 0.6 30 -8 -27 -42 1 +21 0.2 0.9 13 5 7 -13 2 +21 0.7 0.2 -22 -41 50 -15 2 +21 0.3 0.5 -35 -43 10 -1 2 +21 0.1 0.7 17 -49 7 -35 1 +21 0.9 0.6 21 7 -31 -45 1 +21 0.6 0.2 19 -8 -4 -22 2 +21 0.4 0.7 41 18 15 -38 1 +21 0.9 0.3 35 -14 50 -21 1 +21 0.5 0.6 41 18 27 -28 1 +21 0.8 0.1 -3 -17 29 -16 1 +21 0.7 0.6 -29 -32 -16 -21 2 +21 0.1 0.3 -17 -18 -4 -33 1 +21 0.3 0.4 34 -17 30 -15 2 +21 0.7 0.4 48 41 38 20 1 +21 0.8 0.1 -15 -27 37 -15 1 +21 0.5 0.2 -20 -43 25 0 2 +21 0.4 0.9 28 15 3 -36 1 +21 0.4 0.9 43 -17 -16 -21 1 +21 0.3 0.4 34 -17 23 -36 1 +21 0.5 0.9 5 -30 40 24 2 +21 0.9 0.2 49 10 0 -17 1 +21 0.3 0.3 46 -27 24 19 2 +21 0.7 0.9 44 2 -19 -25 1 +21 0.9 0.5 -9 -49 37 -5 2 +21 0.1 0.6 40 -19 -30 -42 1 +21 0.6 0.7 15 -14 -3 -45 1 +21 0.3 0.9 48 40 50 28 1 +21 0.1 0.5 -13 -16 8 -35 2 +21 0.8 0.4 50 -4 45 -31 1 +21 0.9 0.3 38 22 50 30 1 +21 0.8 0.9 -2 -46 -17 -37 2 +21 0.3 0.9 23 -17 11 -23 2 +21 0.7 0.8 -9 -50 37 -32 2 +21 0.4 0.9 43 22 1 -37 1 +21 0.2 0.3 19 -25 -35 -48 2 +21 0.3 0.2 26 9 -15 -50 1 +21 0.9 0.5 29 -14 -11 -17 1 +21 0.6 0.9 45 2 -29 -38 1 +21 0.2 0.9 -23 -27 41 -9 2 +21 0.6 0.7 36 8 4 1 1 +21 0.1 0.4 4 -5 11 -21 1 +21 0.5 0.1 49 -18 -35 -43 1 +21 0.1 0.7 16 -36 33 -39 2 +21 0.9 0.6 48 -44 38 -9 1 +21 0.8 0.2 22 -29 -42 -44 1 +21 0.5 0.3 14 -32 -17 -37 1 +21 0.5 0.7 18 -30 20 -32 2 +21 0.3 0.4 40 38 41 28 1 +21 0.3 0.4 47 -43 20 -20 2 +21 0.2 0.9 21 6 26 -19 2 +21 0.8 0.3 24 -18 44 28 2 +21 0.4 0.4 -8 -23 1 -45 2 +21 0.1 0.6 16 -46 49 -3 2 +21 0.9 0.6 19 -42 -20 -42 1 +21 0.8 0.5 -13 -23 39 30 2 +21 0.1 0.6 49 8 46 -18 1 +21 0.1 0.9 32 5 3 -14 2 +21 0.2 0.4 36 35 22 -17 1 +21 0.7 0.8 -9 -43 26 -38 2 +21 0.8 0.7 13 -26 1 -29 1 +21 0.3 0.3 34 27 26 -21 1 +21 0.3 0.7 48 41 -11 -40 1 +21 0.3 0.1 11 -32 -30 -38 1 +21 0.3 0.3 42 -2 -5 -7 2 +21 0.1 0.8 43 4 20 -47 1 +21 0.1 0.5 -9 -31 11 -5 2 +21 0.5 0.9 1 -4 17 5 2 +21 0.7 0.2 26 9 50 49 2 +21 0.9 0.7 0 -47 12 8 2 +21 0.7 0.6 1 -25 48 -36 2 +21 0.6 0.5 19 -4 32 1 1 +21 0.3 0.7 23 5 46 31 2 +21 0.3 0.4 30 -33 7 -10 2 +21 0.8 0.9 7 -47 42 -20 2 +21 0.9 0.6 46 -28 16 -19 1 +21 0.3 0.6 49 41 50 -14 1 +21 0.7 0.5 25 -48 -13 -34 1 +21 0.3 0.9 27 -37 9 -12 2 +21 0.2 0.1 11 -5 -35 -36 1 +21 0.1 0.6 30 -9 0 -23 1 +21 0.1 0.2 -33 -44 34 21 2 +21 0.8 0.2 42 39 6 -26 1 +21 0.4 0.4 -4 -40 -48 -49 1 +21 0.5 0.3 37 19 41 -22 1 +21 0.1 0.6 16 -31 31 -24 2 +21 0.4 0.5 9 -1 49 44 2 +21 0.8 0.7 35 -47 50 48 2 +21 0.6 0.2 5 -18 -11 -15 1 +21 0.2 0.1 27 -13 9 -41 1 +21 0.2 0.4 -5 -31 28 -7 2 +21 0.5 0.8 29 -49 50 -16 2 +21 0.6 0.1 -23 -43 8 -17 2 +21 0.5 0.5 -20 -35 18 9 2 +21 0.4 0.8 23 -33 32 -6 2 +21 0.7 0.4 48 -16 12 11 1 +21 0.2 0.8 41 -2 25 24 2 +21 0.8 0.2 -19 -40 -15 -33 1 +21 0.1 0.9 50 31 20 -19 1 +21 0.8 0.2 7 -38 47 7 2 +21 0.9 0.7 27 6 -17 -23 1 +21 0.2 0.9 11 -37 12 -7 2 +21 0.4 0.6 36 -13 -15 -36 1 +21 0.3 0.2 -24 -48 4 -42 2 +22 0.9 0.2 23 12 32 -49 1 +22 0.1 0.6 -4 -17 47 -17 2 +22 0.7 0.8 -17 -25 32 -16 2 +22 0.3 0.8 31 -29 13 -7 2 +22 0.9 0.6 47 -28 46 -44 1 +22 0.8 0.4 29 -29 35 34 2 +22 0.4 0.4 -20 -47 13 -3 2 +22 0.4 0.3 24 9 1 -30 1 +22 0.3 0.2 22 -25 27 -34 1 +22 0.1 0.8 -16 -25 0 -27 2 +22 0.5 0.5 12 -19 -2 -14 1 +22 0.3 0.8 32 -19 5 -1 2 +22 0.8 0.3 30 13 3 -23 1 +22 0.8 0.2 39 35 34 30 1 +22 0.3 0.2 3 -4 15 -5 2 +22 0.6 0.5 33 7 26 21 1 +22 0.1 0.3 48 45 18 -44 1 +22 0.7 0.1 16 -7 45 33 2 +22 0.3 0.4 27 -46 20 -26 2 +22 0.7 0.6 39 -13 -14 -22 1 +22 0.2 0.7 22 15 42 -31 1 +22 0.1 0.5 48 5 32 -37 1 +22 0.8 0.8 -19 -35 42 34 2 +22 0.6 0.5 45 -17 23 -35 1 +22 0.2 0.7 4 -34 30 0 2 +22 0.8 0.4 13 8 43 7 2 +22 0.3 0.9 33 19 48 -46 1 +22 0.4 0.3 43 -20 29 10 2 +22 0.3 0.6 45 -17 16 -29 2 +22 0.3 0.2 13 11 41 21 1 +22 0.9 0.9 37 23 -1 -46 1 +22 0.4 0.9 6 -25 -15 -22 1 +22 0.1 0.8 -21 -32 23 5 2 +22 0.6 0.5 -12 -48 -16 -28 2 +22 0.8 0.6 32 6 -9 -31 1 +22 0.9 0.6 -12 -29 25 16 2 +22 0.1 0.7 43 13 13 -25 1 +22 0.9 0.7 41 13 42 -6 2 +22 0.9 0.1 38 -43 12 -19 1 +22 0.3 0.6 50 22 46 -6 1 +22 0.1 0.8 16 -19 -18 -49 2 +22 0.3 0.4 1 -9 46 31 2 +22 0.1 0.2 42 36 8 -2 1 +22 0.9 0.7 5 -19 -18 -49 1 +22 0.2 0.3 13 -3 5 -48 2 +22 0.6 0.8 4 -25 -10 -26 2 +22 0.8 0.6 31 -23 7 -21 1 +22 0.9 0.1 41 2 4 -24 1 +22 0.7 0.8 1 -32 44 8 2 +22 0.7 0.3 40 -49 17 -26 1 +22 0.6 0.3 -11 -33 36 -39 2 +22 0.5 0.7 -27 -34 25 15 2 +22 0.6 0.4 9 -3 26 -26 1 +22 0.1 0.4 -6 -35 28 26 2 +22 0.1 0.6 32 9 49 -44 2 +22 0.9 0.5 29 -12 39 -47 1 +22 0.8 0.8 47 2 -34 -35 1 +22 0.6 0.5 46 -39 41 -17 2 +22 0.3 0.7 38 -13 9 -22 1 +22 0.8 0.3 -17 -39 25 21 2 +22 0.8 0.2 -9 -38 -11 -31 1 +22 0.2 0.8 33 -6 45 40 2 +22 0.1 0.3 -16 -21 17 -46 1 +22 0.1 0.9 23 5 46 7 2 +22 0.8 0.8 24 -26 -2 -27 1 +22 0.4 0.8 23 -19 43 -45 2 +22 0.6 0.5 20 -18 1 -48 1 +22 0.2 0.2 -8 -30 32 -11 2 +22 0.7 0.6 17 16 -15 -21 1 +22 0.7 0.7 46 -5 17 -35 1 +22 0.4 0.8 12 -43 -16 -48 1 +22 0.6 0.2 35 14 -22 -36 2 +22 0.4 0.9 -26 -48 -21 -49 1 +22 0.1 0.9 14 -50 4 -2 2 +22 0.4 0.2 47 -30 41 -38 1 +22 0.8 0.6 13 -7 -9 -19 1 +22 0.4 0.5 50 20 39 -34 1 +22 0.6 0.3 22 5 2 -36 1 +22 0.8 0.8 31 -24 10 -46 1 +22 0.9 0.1 -26 -43 15 -2 2 +22 0.1 0.1 5 2 5 -43 1 +22 0.8 0.2 6 -1 37 3 2 +22 0.2 0.9 50 -40 22 7 2 +22 0.3 0.6 -6 -40 -39 -42 1 +22 0.2 0.6 -4 -44 41 13 2 +22 0.6 0.2 -16 -26 29 -9 2 +22 0.4 0.2 21 16 -27 -43 1 +22 0.3 0.8 49 26 36 -4 1 +22 0.2 0.5 -8 -45 -13 -37 1 +22 0.1 0.1 14 -27 8 -40 2 +22 0.4 0.9 4 -49 14 -42 2 +22 0.7 0.2 35 -47 -14 -40 1 +22 0.5 0.4 37 -44 17 14 2 +22 0.2 0.8 24 8 5 -3 2 +22 0.3 0.6 -20 -31 34 13 2 +22 0.8 0.7 48 24 -12 -13 1 +22 0.8 0.7 41 -36 36 26 2 +22 0.4 0.1 -18 -41 2 -24 2 +22 0.1 0.9 -25 -36 -3 -49 1 +22 0.1 0.3 40 32 -12 -21 1 +22 0.7 0.3 5 -42 15 -9 1 +22 0.3 0.5 21 -13 9 -45 2 +22 0.4 0.1 -5 -10 -22 -35 2 +22 0.4 0.4 35 8 -31 -46 1 +22 0.4 0.2 -11 -28 36 -45 1 +22 0.7 0.1 16 -11 49 19 2 +22 0.7 0.9 16 -21 47 7 2 +22 0.8 0.1 25 -49 9 -49 1 +22 0.2 0.6 3 0 46 -31 2 +22 0.8 0.3 -9 -20 3 -5 2 +22 0.2 0.3 36 -34 12 1 2 +22 0.7 0.2 -18 -39 43 -48 2 +22 0.7 0.6 23 -38 -15 -35 1 +22 0.5 0.1 37 -10 34 -22 1 +22 0.7 0.2 43 -1 7 -30 1 +22 0.4 0.7 42 -22 32 -2 2 +22 0.8 0.9 6 -36 34 9 2 +22 0.1 0.8 -12 -50 29 -2 2 +22 0.5 0.5 -19 -46 -36 -48 1 +22 0.4 0.4 9 -7 -34 -49 1 +22 0.5 0.4 41 22 50 -35 1 +22 0.7 0.4 -39 -45 49 -36 2 +22 0.5 0.1 49 40 28 2 1 +22 0.7 0.1 -13 -27 41 30 2 +22 0.9 0.9 43 33 50 -15 1 +22 0.6 0.5 27 -19 36 -25 1 +22 0.3 0.6 -17 -50 -24 -27 1 +22 0.4 0.5 20 16 32 16 2 +22 0.8 0.8 24 -20 7 -17 1 +22 0.1 0.2 19 10 22 -18 1 +22 0.1 0.4 8 -12 11 -21 2 +22 0.1 0.2 -8 -33 -29 -36 1 +22 0.2 0.8 23 -38 3 -9 1 +22 0.1 0.1 22 -42 -21 -35 2 +22 0.6 0.7 28 20 38 13 2 +22 0.2 0.7 -11 -35 15 -30 2 +22 0.2 0.8 43 -6 -10 -48 1 +22 0.6 0.5 20 -32 16 -48 1 +22 0.6 0.2 24 12 24 -41 1 +22 0.7 0.3 34 -25 42 28 1 +22 0.4 0.8 -8 -13 -34 -45 1 +22 0.7 0.8 37 16 -19 -41 1 +22 0.8 0.8 -23 -44 28 -30 2 +22 0.5 0.1 40 27 50 -41 1 +22 0.3 0.7 -2 -19 8 -19 2 +22 0.7 0.7 13 -41 20 -47 2 +22 0.7 0.9 35 33 -18 -39 1 +22 0.7 0.4 -5 -34 33 -2 2 +22 0.3 0.5 -17 -42 -14 -33 1 +22 0.6 0.9 -13 -14 21 -37 2 +22 0.9 0.6 12 -17 -23 -36 2 +22 0.2 0.6 -21 -45 48 -31 2 +22 0.3 0.7 32 -43 34 -50 2 +22 0.6 0.8 40 -42 49 -36 2 +22 0.5 0.4 11 10 -24 -46 1 +22 0.2 0.9 23 -14 21 -31 2 +22 0.3 0.5 50 0 40 -48 1 +22 0.6 0.4 -26 -49 50 26 2 +22 0.8 0.6 12 -45 32 28 2 +22 0.5 0.5 35 -15 -18 -33 1 +22 0.1 0.7 1 -27 26 -39 2 +22 0.2 0.2 32 12 -18 -43 2 +22 0.2 0.5 35 -20 12 8 2 +22 0.2 0.9 44 -6 36 -43 2 +22 0.2 0.8 -3 -19 -5 -21 2 +22 0.4 0.1 48 20 15 -38 1 +22 0.2 0.1 -23 -43 38 0 2 +22 0.2 0.2 -18 -32 16 -21 1 +22 0.7 0.3 41 9 21 20 1 +22 0.9 0.6 49 -12 48 19 1 +22 0.1 0.5 32 -36 5 -27 2 +22 0.3 0.3 30 -9 13 -6 2 +22 0.1 0.8 32 4 40 -8 2 +22 0.2 0.8 27 -30 -4 -26 2 +22 0.6 0.3 22 -25 35 -24 2 +22 0.8 0.4 -34 -43 24 -27 1 +22 0.5 0.8 -39 -43 -18 -41 2 +22 0.7 0.7 23 -1 44 -44 1 +22 0.6 0.8 -32 -43 -8 -31 1 +22 0.1 0.9 40 31 47 -22 2 +22 0.3 0.6 2 0 -20 -47 1 +22 0.5 0.2 -36 -37 -21 -33 2 +22 0.6 0.4 34 20 8 -2 1 +22 0.9 0.8 -15 -20 16 -32 2 +22 0.9 0.4 -21 -27 35 -20 2 +22 0.9 0.9 27 -36 35 -15 2 +22 0.8 0.2 -13 -45 5 -9 2 +22 0.4 0.9 31 -7 50 16 2 +22 0.9 0.8 -9 -50 22 -45 2 +22 0.6 0.2 16 6 -22 -35 1 +22 0.2 0.6 48 20 29 -2 2 +22 0.5 0.8 36 -3 41 5 2 +22 0.5 0.4 15 -5 26 -16 2 +22 0.8 0.3 6 -24 -18 -38 1 +22 0.6 0.8 46 44 49 -44 1 +22 0.9 0.7 34 33 25 -5 1 +22 0.3 0.7 3 -19 25 13 2 +22 0.9 0.2 -37 -50 -16 -23 1 +22 0.2 0.4 4 -3 -45 -46 1 +22 0.7 0.2 30 -6 26 12 2 +22 0.6 0.3 8 -45 5 -17 2 +22 0.4 0.9 -2 -27 35 -50 2 +22 0.9 0.8 39 13 -30 -33 1 +22 0.3 0.9 45 7 26 -46 2 +22 0.7 0.8 35 -9 2 -37 1 +22 0.6 0.9 -16 -27 -18 -20 2 +22 0.8 0.7 14 -30 46 -32 1 +22 0.1 0.8 50 26 43 -6 2 +22 0.6 0.7 3 -16 -41 -42 1 +22 0.9 0.3 24 -15 10 -22 1 +22 0.1 0.3 10 2 18 -45 1 +22 0.8 0.5 -21 -45 -1 -25 1 +22 0.1 0.7 2 -39 -14 -22 1 +22 0.6 0.1 7 -34 18 -18 2 +22 0.4 0.4 5 -16 48 -12 1 +22 0.5 0.8 24 -42 15 11 2 +22 0.1 0.8 48 -5 16 -45 2 +22 0.3 0.6 41 14 38 -7 1 +22 0.2 0.5 34 -5 -10 -23 1 +22 0.8 0.7 -10 -16 39 -45 2 +22 0.1 0.6 27 -39 -9 -37 2 +22 0.3 0.5 36 27 40 -44 1 +22 0.2 0.3 -9 -29 8 -19 2 +22 0.8 0.9 38 17 23 13 2 +22 0.7 0.4 48 15 -22 -24 1 +22 0.5 0.6 9 4 8 7 1 +22 0.7 0.8 35 -32 -5 -34 1 +22 0.3 0.9 26 -5 -20 -46 1 +22 0.3 0.3 -17 -28 -2 -33 2 +22 0.1 0.8 -13 -17 4 -12 1 +22 0.4 0.3 -1 -39 28 -28 2 +22 0.2 0.9 8 -37 38 16 2 +22 0.2 0.7 50 -16 -9 -19 1 +22 0.9 0.1 -21 -32 41 -7 2 +22 0.4 0.5 32 -32 14 -35 2 +22 0.8 0.7 44 35 -9 -10 1 +22 0.5 0.8 13 -27 49 4 1 +22 0.6 0.3 34 20 -22 -47 1 +22 0.8 0.5 36 -3 48 30 1 +22 0.4 0.9 50 3 2 -32 1 +22 0.4 0.5 35 -25 39 -13 1 +22 0.7 0.6 8 -35 -12 -16 1 +22 0.8 0.8 21 -50 -31 -46 1 +22 0.1 0.8 41 -23 46 6 2 +22 0.5 0.9 10 -3 14 -30 2 +22 0.2 0.1 43 -21 -44 -48 1 +22 0.5 0.2 30 8 43 40 2 +22 0.8 0.9 38 -47 42 -41 2 +22 0.2 0.1 13 -7 24 11 2 +22 0.7 0.7 8 -12 31 13 2 +22 0.4 0.1 39 -8 -21 -31 1 +22 0.7 0.3 7 -39 -17 -34 2 +22 0.1 0.5 49 -37 42 -9 2 +22 0.2 0.8 45 5 38 6 2 +22 0.9 0.4 33 -27 42 40 2 +22 0.3 0.7 14 -29 -5 -9 1 +22 0.8 0.3 45 -28 31 -47 1 +22 0.6 0.5 11 5 26 -2 2 +22 0.7 0.5 42 -5 11 -14 2 +22 0.4 0.7 47 28 29 26 2 +22 0.5 0.2 31 -20 -12 -47 1 +22 0.4 0.2 46 -6 49 -23 1 +22 0.7 0.8 18 -10 46 -17 2 +22 0.5 0.2 -19 -32 37 11 2 +22 0.1 0.9 42 32 48 -42 2 +22 0.3 0.4 21 -47 0 -18 1 +22 0.1 0.1 30 18 39 -13 1 +22 0.3 0.9 39 -12 3 1 2 +22 0.5 0.8 40 -36 7 -5 2 +22 0.2 0.6 -4 -39 -18 -30 1 +22 0.7 0.2 14 -1 -26 -41 1 +22 0.2 0.3 15 -31 22 -7 2 +22 0.4 0.5 50 -38 15 -8 2 +22 0.8 0.7 16 -37 -28 -34 1 +22 0.8 0.9 42 31 27 0 1 +22 0.4 0.6 -13 -29 11 -31 2 +22 0.6 0.8 14 -13 37 -8 2 +22 0.1 0.8 29 -29 8 -47 2 +22 0.9 0.1 3 -1 -13 -41 2 +22 0.1 0.5 -24 -42 24 20 2 +22 0.8 0.5 44 -5 16 -41 1 +22 0.7 0.4 34 -36 48 7 1 +22 0.1 0.9 35 8 10 -31 2 +22 0.3 0.9 48 -43 23 -17 1 +22 0.7 0.3 44 34 34 19 1 +22 0.3 0.2 27 -2 28 7 1 +22 0.3 0.1 -5 -17 44 -23 2 +22 0.8 0.2 24 2 23 -24 1 +22 0.7 0.3 38 -40 29 -6 1 +22 0.3 0.5 23 2 26 -49 1 +22 0.3 0.4 32 -6 14 -30 1 +22 0.6 0.2 42 20 39 -48 1 +22 0.1 0.9 35 29 20 12 1 +22 0.1 0.1 -18 -27 33 -45 1 +22 0.3 0.4 -6 -20 -7 -13 2 +22 0.7 0.7 30 -35 31 -36 1 +22 0.5 0.3 0 -31 -26 -30 2 +22 0.5 0.9 28 -19 -2 -7 1 +22 0.4 0.2 -13 -27 43 1 2 +22 0.1 0.7 39 12 -26 -35 1 +23 0.5 0.1 30 -35 18 -48 1 +23 0.7 0.9 -17 -43 -42 -46 1 +23 0.7 0.9 34 16 23 -7 1 +23 0.8 0.9 40 21 48 29 2 +23 0.6 0.2 -16 -20 23 -37 1 +23 0.3 0.2 21 -9 38 29 2 +23 0.4 0.8 50 -30 19 -32 2 +23 0.6 0.8 -14 -29 37 -35 2 +23 0.2 0.4 36 29 50 -2 1 +23 0.1 0.7 5 -33 -20 -38 2 +23 0.2 0.1 10 -41 24 -22 2 +23 0.5 0.4 5 -15 -17 -29 1 +23 0.6 0.3 32 3 -19 -33 1 +23 0.1 0.6 30 -44 2 -33 2 +23 0.8 0.1 38 -12 -22 -31 1 +23 0.3 0.3 -6 -37 33 17 2 +23 0.8 0.5 9 0 33 -13 2 +23 0.9 0.7 15 -30 23 -24 2 +23 0.7 0.4 39 -40 21 7 2 +23 0.7 0.5 8 -6 48 -33 2 +23 0.5 0.6 -11 -12 46 -26 2 +23 0.5 0.3 41 38 22 -47 1 +23 0.4 0.7 27 -25 -2 -50 1 +23 0.1 0.3 -19 -46 40 -45 2 +23 0.9 0.1 18 -2 28 -29 1 +23 0.7 0.4 26 -29 17 16 2 +23 0.1 0.1 -11 -24 35 -28 2 +23 0.2 0.2 18 -39 50 10 2 +23 0.7 0.7 14 -21 -46 -49 1 +23 0.1 0.8 31 -2 -8 -24 1 +23 0.2 0.8 30 -38 -11 -37 1 +23 0.6 0.1 19 -35 -6 -45 1 +23 0.6 0.9 -1 -10 -1 -50 1 +23 0.1 0.7 -24 -42 -33 -39 2 +23 0.7 0.1 -40 -50 27 -7 2 +23 0.9 0.1 34 -12 -40 -48 1 +23 0.3 0.5 46 -12 -9 -44 1 +23 0.4 0.8 11 -33 -1 -20 2 +23 0.6 0.4 36 27 3 -25 1 +23 0.4 0.2 -16 -22 35 25 2 +23 0.1 0.4 18 -37 50 36 2 +23 0.9 0.1 20 -27 -28 -49 1 +23 0.2 0.8 -17 -50 5 -23 2 +23 0.5 0.5 28 9 6 -12 1 +23 0.3 0.3 40 -41 4 2 2 +23 0.6 0.8 -10 -28 30 4 2 +23 0.2 0.1 -8 -13 -11 -43 1 +23 0.6 0.3 29 -22 38 24 2 +23 0.2 0.8 25 11 34 -32 2 +23 0.7 0.5 44 -37 -40 -50 1 +23 0.8 0.8 7 -46 18 -30 2 +23 0.6 0.7 49 3 41 9 2 +23 0.2 0.9 16 -25 39 2 2 +23 0.9 0.9 44 -48 -4 -21 1 +23 0.7 0.8 41 -32 18 -2 1 +23 0.3 0.1 41 -28 -15 -48 1 +23 0.5 0.7 48 47 -30 -39 1 +23 0.5 0.3 14 -47 19 6 2 +23 0.4 0.3 48 11 36 -48 1 +23 0.9 0.1 19 12 6 -31 1 +23 0.3 0.3 13 -23 -29 -37 1 +23 0.5 0.4 0 -44 50 -25 2 +23 0.7 0.4 -13 -25 10 -45 1 +23 0.7 0.4 -29 -37 47 31 2 +23 0.5 0.2 45 -2 12 -1 1 +23 0.5 0.6 14 -33 -28 -36 1 +23 0.3 0.5 40 -21 31 16 2 +23 0.1 0.6 -15 -18 10 -22 2 +23 0.9 0.8 22 16 35 -37 2 +23 0.7 0.5 28 -34 29 -27 1 +23 0.9 0.4 48 -49 10 -45 1 +23 0.2 0.7 -27 -37 21 -42 2 +23 0.8 0.9 6 -45 5 -35 1 +23 0.8 0.5 43 -27 10 -50 1 +23 0.3 0.5 47 -20 16 3 2 +23 0.2 0.7 26 19 15 -48 1 +23 0.4 0.9 26 -17 -13 -32 1 +23 0.2 0.6 12 -45 3 -31 2 +23 0.9 0.9 -21 -27 -3 -33 2 +23 0.9 0.6 -4 -22 19 -49 2 +23 0.2 0.4 -22 -39 20 -13 2 +23 0.4 0.6 34 22 -6 -33 1 +23 0.3 0.2 29 0 -26 -41 1 +23 0.7 0.2 48 24 36 16 1 +23 0.3 0.6 -31 -33 -31 -42 1 +23 0.2 0.5 -14 -49 39 24 2 +23 0.1 0.3 1 -17 43 -13 2 +23 0.9 0.8 -13 -33 -7 -8 2 +23 0.3 0.2 -19 -44 26 -49 2 +23 0.3 0.6 39 -42 40 -41 2 +23 0.3 0.6 30 6 25 -20 1 +23 0.1 0.1 33 20 -14 -49 1 +23 0.8 0.2 -4 -13 0 -43 1 +23 0.2 0.2 44 12 39 -42 1 +23 0.4 0.7 -24 -27 -35 -44 1 +23 0.9 0.4 15 -4 -13 -45 1 +23 0.9 0.1 -19 -39 38 -11 2 +23 0.4 0.7 41 2 -35 -41 1 +23 0.5 0.9 20 -31 13 -22 2 +23 0.1 0.3 20 -2 34 -49 1 +23 0.8 0.4 -18 -31 40 -24 2 +23 0.8 0.5 23 -29 40 -43 1 +23 0.7 0.5 15 -15 43 40 2 +23 0.8 0.3 43 20 11 5 1 +23 0.1 0.2 39 -12 8 -7 2 +23 0.9 0.8 -23 -33 42 8 2 +23 0.1 0.4 16 15 42 -21 1 +23 0.3 0.6 -18 -48 -14 -17 2 +23 0.3 0.7 32 21 49 39 2 +23 0.8 0.1 47 -14 49 15 1 +23 0.1 0.2 30 -37 -7 -38 2 +23 0.1 0.1 27 -37 34 -21 2 +23 0.5 0.9 26 -41 15 -17 2 +23 0.7 0.3 37 32 -12 -48 1 +23 0.1 0.8 45 15 -36 -48 1 +23 0.2 0.2 -31 -39 16 -33 2 +23 0.3 0.4 34 11 6 -10 1 +23 0.9 0.7 38 36 44 -24 1 +23 0.3 0.9 47 -2 34 17 2 +23 0.2 0.9 35 -4 28 -14 2 +23 0.9 0.8 15 -16 -21 -45 1 +23 0.5 0.7 46 45 -10 -44 1 +23 0.4 0.8 49 -23 -26 -32 1 +23 0.3 0.2 -19 -34 47 29 2 +23 0.1 0.3 24 -14 27 -48 2 +23 0.6 0.9 16 11 10 -6 1 +23 0.8 0.8 5 -9 -3 -46 1 +23 0.8 0.9 29 -34 37 -16 2 +23 0.6 0.7 16 -44 -5 -33 1 +23 0.1 0.2 4 -10 8 -5 2 +23 0.9 0.1 44 8 42 -42 1 +23 0.8 0.2 50 30 26 6 1 +23 0.6 0.9 50 -42 33 1 2 +23 0.5 0.1 13 -50 -29 -47 1 +23 0.3 0.2 32 9 40 6 1 +23 0.4 0.7 43 -24 -19 -40 1 +23 0.1 0.8 -16 -38 43 -2 2 +23 0.3 0.9 -15 -18 39 37 2 +23 0.9 0.8 34 8 6 2 1 +23 0.6 0.5 3 -23 -13 -20 1 +23 0.1 0.4 41 26 43 -44 1 +23 0.3 0.5 -9 -46 34 -27 2 +23 0.1 0.6 -14 -21 -25 -46 1 +23 0.3 0.3 23 -4 41 25 2 +23 0.2 0.4 -27 -34 29 3 2 +23 0.4 0.5 -10 -11 18 1 2 +23 0.3 0.9 32 -24 42 23 2 +23 0.9 0.3 5 -37 39 -23 1 +23 0.2 0.7 40 -6 17 -46 2 +23 0.6 0.6 43 -10 45 8 2 +23 0.2 0.4 18 11 34 -38 1 +23 0.2 0.1 50 33 41 1 1 +23 0.4 0.1 43 -40 2 -39 1 +23 0.9 0.2 -16 -50 -4 -34 1 +23 0.8 0.2 36 -26 45 -26 1 +23 0.9 0.9 28 -10 46 -25 2 +23 0.9 0.5 28 0 37 30 2 +23 0.3 0.6 -23 -28 -16 -42 2 +23 0.3 0.4 7 -8 37 -37 2 +23 0.2 0.3 39 8 33 -28 1 +23 0.4 0.2 28 -35 29 -43 1 +23 0.7 0.9 38 20 29 -20 2 +23 0.6 0.9 -5 -27 -15 -42 1 +23 0.9 0.2 -4 -14 46 -23 1 +23 0.2 0.7 41 22 13 -40 1 +23 0.6 0.3 33 -31 -5 -24 1 +23 0.3 0.6 32 16 42 29 2 +23 0.1 0.9 42 -10 -26 -45 1 +23 0.1 0.3 26 10 45 -22 1 +23 0.6 0.5 -33 -42 1 -16 2 +23 0.2 0.8 16 1 31 0 2 +23 0.4 0.5 -20 -50 49 -30 2 +23 0.2 0.7 37 25 24 23 1 +23 0.3 0.6 34 25 15 -12 1 +23 0.5 0.3 13 -38 47 -49 1 +23 0.4 0.8 6 -4 18 -14 2 +23 0.3 0.5 48 -10 16 -19 2 +23 0.2 0.3 -28 -38 -24 -35 1 +23 0.4 0.9 41 -38 -26 -48 1 +23 0.7 0.2 46 -18 31 -31 1 +23 0.8 0.8 42 8 18 -28 1 +23 0.6 0.4 9 -45 31 -30 2 +23 0.7 0.4 30 9 36 24 1 +23 0.8 0.4 48 40 -6 -32 1 +23 0.8 0.1 30 -22 3 -6 1 +23 0.1 0.2 -37 -44 49 42 2 +23 0.7 0.4 8 -43 49 38 2 +23 0.8 0.2 49 -37 -6 -23 1 +23 0.9 0.2 18 -12 12 0 1 +23 0.2 0.3 47 -16 47 23 2 +23 0.7 0.4 4 -2 42 12 2 +23 0.3 0.6 35 -44 37 7 2 +23 0.2 0.5 19 2 22 -46 1 +23 0.2 0.4 5 -43 42 0 2 +23 0.1 0.4 -21 -30 46 -45 2 +23 0.1 0.9 -9 -22 3 -31 2 +23 0.5 0.3 47 -50 -13 -15 1 +23 0.7 0.6 13 -20 -36 -41 1 +23 0.9 0.6 -2 -32 35 11 2 +23 0.3 0.4 27 -35 41 -10 2 +23 0.9 0.9 -45 -47 -23 -34 2 +23 0.6 0.7 -15 -22 23 5 2 +23 0.2 0.1 26 22 -39 -50 1 +23 0.3 0.3 26 12 5 -35 1 +23 0.6 0.1 -25 -26 20 -47 1 +23 0.5 0.8 -15 -45 32 -32 2 +23 0.1 0.9 38 29 -11 -15 1 +23 0.5 0.1 37 -49 39 38 2 +23 0.3 0.8 40 7 41 6 2 +23 0.5 0.1 1 -35 21 -13 2 +23 0.2 0.4 25 20 35 8 1 +23 0.8 0.7 50 -11 25 12 1 +23 0.9 0.1 33 22 29 -38 1 +23 0.7 0.2 13 -48 21 -6 1 +23 0.8 0.4 22 -9 18 -50 1 +23 0.1 0.2 9 -48 48 -30 2 +23 0.1 0.8 7 -5 18 -21 2 +23 0.9 0.9 18 -49 48 16 2 +23 0.7 0.3 33 8 0 -25 1 +23 0.8 0.1 46 -4 -9 -32 1 +23 0.2 0.5 -16 -46 44 13 2 +23 0.3 0.2 44 -44 21 10 2 +23 0.2 0.6 49 1 7 -3 1 +23 0.1 0.2 21 -13 44 -3 2 +23 0.2 0.6 35 -14 -5 -7 1 +23 0.2 0.2 -9 -39 30 18 2 +23 0.8 0.7 -23 -33 19 -43 2 +23 0.1 0.1 43 -24 0 -5 2 +23 0.2 0.5 48 4 -23 -46 1 +23 0.4 0.5 -16 -23 25 -36 2 +23 0.4 0.8 33 31 -19 -41 1 +23 0.5 0.2 29 -2 31 -10 1 +23 0.8 0.6 29 -45 32 -37 1 +23 0.2 0.3 26 -30 39 -49 1 +23 0.9 0.3 -26 -36 -42 -45 1 +23 0.6 0.5 43 -24 -21 -38 1 +23 0.9 0.9 8 -4 6 -49 1 +23 0.2 0.4 -23 -45 20 -3 2 +23 0.5 0.8 -7 -49 38 -38 2 +23 0.6 0.1 -5 -38 21 -17 2 +23 0.2 0.5 26 15 -18 -37 1 +23 0.1 0.4 5 -16 41 17 2 +23 0.6 0.2 8 5 34 32 2 +23 0.7 0.1 -13 -14 31 26 2 +23 0.5 0.4 38 21 33 -45 1 +23 0.4 0.7 42 -13 42 -12 2 +23 0.1 0.8 -28 -31 50 23 2 +23 0.6 0.4 19 -26 21 -36 1 +23 0.3 0.9 39 -19 41 38 2 +23 0.5 0.5 -17 -31 10 -23 2 +23 0.6 0.4 47 41 -22 -43 1 +23 0.6 0.3 1 -32 2 -17 1 +23 0.6 0.4 -5 -28 19 -4 2 +23 0.6 0.2 27 -26 33 -47 1 +23 0.8 0.7 36 -29 25 -14 1 +23 0.3 0.6 -23 -29 23 -27 2 +23 0.1 0.1 15 12 33 24 2 +23 0.8 0.9 6 -50 -27 -35 1 +23 0.2 0.5 46 19 -20 -40 1 +23 0.2 0.8 14 4 17 -10 2 +23 0.3 0.7 8 -25 -2 -41 2 +23 0.3 0.2 27 -33 50 -49 1 +23 0.8 0.5 13 -30 0 -10 1 +23 0.3 0.8 22 5 -2 -19 1 +23 0.6 0.2 10 -15 41 10 2 +23 0.9 0.9 -10 -40 36 -38 2 +23 0.6 0.7 24 11 -18 -48 1 +23 0.4 0.3 1 -44 30 -21 2 +23 0.7 0.6 39 10 41 -9 1 +23 0.6 0.8 22 -36 43 27 2 +23 0.1 0.5 35 25 11 -2 1 +23 0.3 0.6 2 -37 -39 -45 1 +23 0.4 0.6 48 24 4 -24 1 +23 0.1 0.5 27 -4 6 2 2 +23 0.9 0.8 42 28 38 -44 1 +23 0.1 0.2 48 -24 31 27 2 +23 0.3 0.9 45 -20 3 -6 2 +23 0.8 0.7 37 -31 30 26 2 +23 0.3 0.7 -6 -19 30 10 2 +23 0.9 0.4 39 16 50 -46 1 +23 0.2 0.1 47 -3 41 11 2 +23 0.1 0.8 -34 -46 7 -9 2 +23 0.3 0.3 38 32 37 -18 1 +23 0.2 0.6 31 -46 30 -38 2 +23 0.7 0.2 -6 -32 -1 -50 1 +23 0.8 0.8 35 34 20 -29 1 +23 0.4 0.9 -22 -45 17 -36 2 +23 0.1 0.4 47 -48 39 -8 2 +23 0.9 0.1 -41 -45 16 -45 1 +23 0.1 0.3 21 -18 35 -16 2 +23 0.2 0.9 43 1 31 13 2 +23 0.6 0.1 8 -35 31 -35 1 +23 0.9 0.5 31 17 17 -22 1 +23 0.7 0.6 38 0 -11 -30 1 +23 0.5 0.9 45 22 45 6 2 +23 0.3 0.1 -9 -35 49 6 2 +23 0.2 0.4 -18 -21 -16 -45 1 +23 0.7 0.6 14 -14 -22 -38 1 +23 0.4 0.6 34 -4 -28 -29 1 +23 0.2 0.6 5 -38 -30 -50 1 +24 0.9 0.3 0 -26 50 12 2 +24 0.7 0.4 29 26 -36 -50 1 +24 0.7 0.1 26 -30 37 -14 1 +24 0.9 0.4 -5 -30 36 1 2 +24 0.3 0.1 37 5 -3 -25 1 +24 0.8 0.5 5 -16 -4 -29 1 +24 0.5 0.6 27 -20 -24 -43 1 +24 0.3 0.6 -5 -12 2 -4 2 +24 0.5 0.2 -12 -27 -10 -38 1 +24 0.5 0.3 46 -40 17 -11 1 +24 0.5 0.6 47 -6 35 -27 1 +24 0.5 0.7 36 32 1 -41 1 +24 0.6 0.9 -34 -50 48 43 2 +24 0.7 0.1 41 -47 18 -19 1 +24 0.7 0.4 37 25 -8 -30 1 +24 0.2 0.3 31 -26 -13 -48 1 +24 0.4 0.6 28 -43 29 7 2 +24 0.8 0.4 36 -30 42 32 1 +24 0.4 0.4 36 -25 -27 -38 1 +24 0.3 0.5 22 -41 35 -44 2 +24 0.6 0.7 32 -25 27 -13 2 +24 0.5 0.2 36 20 25 7 1 +24 0.9 0.2 7 -33 -9 -36 1 +24 0.6 0.5 47 29 15 -41 1 +24 0.6 0.6 4 -50 39 -2 2 +24 0.6 0.2 49 15 -2 -42 1 +24 0.8 0.9 -31 -40 10 -30 2 +24 0.7 0.2 -4 -49 -18 -47 1 +24 0.6 0.9 35 15 22 -30 2 +24 0.2 0.6 9 -14 38 -39 2 +24 0.1 0.8 21 -34 -20 -22 2 +24 0.2 0.7 13 11 45 -5 2 +24 0.5 0.5 -29 -49 -23 -50 1 +24 0.4 0.3 15 -21 50 -21 1 +24 0.9 0.9 22 7 -37 -45 1 +24 0.3 0.6 44 26 3 -38 1 +24 0.1 0.7 44 -25 -27 -36 1 +24 0.4 0.9 -26 -38 11 -38 2 +24 0.3 0.2 42 -15 2 -39 1 +24 0.2 0.8 6 -14 27 -48 2 +24 0.4 0.4 40 7 47 5 2 +24 0.9 0.6 45 -11 -17 -49 1 +24 0.4 0.2 14 -45 -11 -32 1 +24 0.1 0.7 14 -19 18 -11 2 +24 0.4 0.5 25 -17 50 -28 2 +24 0.5 0.6 18 -38 30 25 2 +24 0.5 0.4 -12 -18 3 -10 1 +24 0.7 0.3 2 -44 -11 -29 1 +24 0.6 0.7 47 3 46 -2 2 +24 0.5 0.4 -23 -37 20 -45 2 +24 0.7 0.2 2 -28 -27 -45 1 +24 0.1 0.4 44 14 6 -19 1 +24 0.3 0.4 6 -44 46 -1 2 +24 0.1 0.7 15 -21 34 -7 2 +24 0.3 0.4 40 -43 -8 -30 1 +24 0.3 0.9 40 -33 41 -41 2 +24 0.5 0.5 29 -22 0 -5 2 +24 0.6 0.2 16 -45 22 -45 1 +24 0.8 0.1 -3 -42 17 15 2 +24 0.6 0.6 18 -6 50 31 2 +24 0.6 0.9 -15 -19 36 -37 2 +24 0.8 0.8 11 -18 19 -24 2 +24 0.1 0.9 -39 -42 41 -21 2 +24 0.2 0.4 -39 -49 2 -23 2 +24 0.4 0.5 36 6 -18 -23 1 +24 0.7 0.9 -15 -30 39 8 2 +24 0.1 0.9 44 -40 10 -32 2 +24 0.1 0.2 23 -13 -37 -46 1 +24 0.4 0.1 32 22 29 -31 1 +24 0.4 0.4 15 -3 32 19 1 +24 0.3 0.8 47 36 35 16 1 +24 0.3 0.5 37 32 7 -16 1 +24 0.6 0.6 41 -36 46 -10 2 +24 0.8 0.7 50 17 -14 -44 1 +24 0.3 0.9 7 -44 27 -4 2 +24 0.9 0.8 39 -37 21 -30 1 +24 0.7 0.4 0 -34 -22 -23 1 +24 0.1 0.9 12 -13 32 18 2 +24 0.4 0.9 -8 -49 29 24 2 +24 0.8 0.4 -5 -36 45 -8 2 +24 0.4 0.2 8 -13 -33 -48 1 +24 0.6 0.4 30 14 49 -31 1 +24 0.1 0.1 33 28 -32 -33 1 +24 0.4 0.1 44 4 18 -17 1 +24 0.7 0.6 -16 -40 3 -46 1 +24 0.5 0.6 45 -8 31 -49 2 +24 0.1 0.1 26 -50 23 -6 2 +24 0.7 0.1 -21 -42 49 -23 2 +24 0.1 0.3 -3 -30 41 -40 2 +24 0.3 0.6 17 -26 48 -4 2 +24 0.7 0.7 17 7 5 -43 1 +24 0.4 0.1 42 6 -14 -24 1 +24 0.7 0.9 43 -12 50 -13 2 +24 0.4 0.8 21 -40 9 -50 1 +24 0.1 0.5 27 -19 -30 -44 1 +24 0.4 0.8 43 25 -1 -42 1 +24 0.1 0.2 14 -22 -22 -30 1 +24 0.6 0.9 35 29 -20 -49 1 +24 0.9 0.9 25 -50 0 -31 1 +24 0.5 0.7 9 -45 48 -33 2 +24 0.9 0.5 43 22 11 -9 1 +24 0.4 0.3 -12 -46 -4 -29 2 +24 0.2 0.3 -29 -40 12 -11 1 +24 0.6 0.3 41 7 26 21 2 +24 0.3 0.7 29 9 30 -23 1 +24 0.1 0.5 46 29 49 25 1 +24 0.4 0.8 37 8 44 -49 1 +24 0.5 0.8 3 -5 46 -4 2 +24 0.6 0.3 -10 -48 16 -9 1 +24 0.7 0.6 14 -39 38 -11 2 +24 0.2 0.5 32 13 -27 -48 1 +24 0.7 0.2 -1 -21 14 -15 2 +24 0.1 0.3 29 3 14 -38 1 +24 0.9 0.7 29 -31 -21 -41 1 +24 0.4 0.9 30 -33 10 -18 2 +24 0.9 0.9 50 31 44 -12 1 +24 0.5 0.1 22 -15 -6 -9 1 +24 0.3 0.1 38 23 31 16 2 +24 0.8 0.6 50 8 32 -34 1 +24 0.1 0.7 -7 -15 48 -22 2 +24 0.4 0.9 -13 -50 45 34 2 +24 0.2 0.4 15 -23 49 31 2 +24 0.1 0.1 33 7 37 -21 1 +24 0.9 0.4 18 -45 28 12 2 +24 0.8 0.4 14 -43 -7 -25 1 +24 0.2 0.7 15 -31 -25 -43 1 +24 0.9 0.6 31 20 -16 -38 1 +24 0.7 0.4 -1 -6 19 -10 2 +24 0.6 0.5 40 -17 -5 -38 2 +24 0.3 0.2 36 -25 43 28 2 +24 0.7 0.5 50 -40 25 -21 1 +24 0.2 0.9 47 44 31 -35 1 +24 0.2 0.3 48 -27 46 21 2 +24 0.6 0.7 27 -19 33 -44 2 +24 0.8 0.1 39 -1 36 -13 1 +24 0.3 0.6 -10 -47 50 32 2 +24 0.6 0.4 27 7 17 -49 1 +24 0.1 0.8 28 -11 16 -2 2 +24 0.6 0.6 -7 -31 29 15 2 +24 0.9 0.2 49 47 -6 -29 1 +24 0.1 0.9 28 14 -12 -28 1 +24 0.1 0.4 15 3 -6 -10 1 +24 0.6 0.4 40 25 -14 -36 1 +24 0.2 0.3 6 -47 38 0 2 +24 0.4 0.3 -21 -26 41 16 2 +24 0.4 0.4 36 -20 5 -22 1 +24 0.8 0.5 33 -38 23 9 1 +24 0.2 0.4 5 2 16 0 2 +24 0.7 0.4 -19 -37 21 -21 2 +24 0.3 0.5 -4 -35 -7 -9 2 +24 0.1 0.1 -29 -33 29 23 2 +24 0.1 0.1 22 -13 28 -32 2 +24 0.3 0.4 37 16 48 -29 1 +24 0.4 0.5 6 -14 16 -36 1 +24 0.9 0.7 -1 -6 -8 -40 1 +24 0.4 0.3 -7 -40 47 6 2 +24 0.5 0.2 22 -32 20 -44 2 +24 0.7 0.4 14 -38 18 15 2 +24 0.3 0.6 36 -44 18 -20 2 +24 0.4 0.6 19 -49 -24 -43 1 +24 0.2 0.4 16 13 37 16 2 +24 0.8 0.6 41 30 29 15 2 +24 0.2 0.7 18 -23 40 24 2 +24 0.7 0.4 8 -2 -13 -46 2 +24 0.6 0.5 -10 -23 40 2 2 +24 0.8 0.3 18 -39 -9 -27 1 +24 0.3 0.7 -19 -31 23 8 2 +24 0.5 0.3 -9 -15 48 34 2 +24 0.1 0.1 36 -24 -7 -43 1 +24 0.5 0.7 13 0 34 -39 2 +24 0.7 0.4 30 28 34 -27 1 +24 0.5 0.9 -10 -47 -24 -36 1 +24 0.1 0.7 2 -5 41 14 2 +24 0.3 0.7 8 -46 -19 -44 1 +24 0.5 0.2 17 -12 38 -49 1 +24 0.3 0.5 -33 -39 17 8 2 +24 0.2 0.4 14 -11 47 -44 1 +24 0.5 0.9 43 -41 -14 -36 1 +24 0.8 0.2 -18 -21 49 23 2 +24 0.6 0.3 49 -25 -45 -48 1 +24 0.4 0.6 16 -46 36 -2 2 +24 0.4 0.9 -8 -11 42 5 2 +24 0.5 0.7 50 7 30 -11 2 +24 0.9 0.6 46 6 3 -26 1 +24 0.6 0.5 -18 -49 36 -5 2 +24 0.5 0.3 38 6 -22 -44 1 +24 0.6 0.8 9 5 45 -23 1 +24 0.4 0.6 33 -14 14 -16 2 +24 0.4 0.6 39 -47 -22 -35 1 +24 0.9 0.2 11 -28 29 -12 1 +24 0.3 0.9 -26 -27 38 36 2 +24 0.5 0.6 32 -38 15 -30 1 +24 0.1 0.3 24 22 43 30 1 +24 0.6 0.3 27 -28 -41 -49 1 +24 0.8 0.8 22 12 42 31 2 +24 0.8 0.9 38 28 29 -41 2 +24 0.4 0.7 41 -38 -1 -33 2 +24 0.2 0.4 46 16 28 -48 1 +24 0.5 0.9 13 -30 36 16 2 +24 0.1 0.8 41 25 46 -18 1 +24 0.8 0.3 -34 -35 15 -2 2 +24 0.2 0.3 -16 -40 31 8 2 +24 0.9 0.3 44 6 -11 -19 1 +24 0.7 0.9 43 23 13 -16 1 +24 0.3 0.4 22 -50 13 -33 1 +24 0.4 0.1 33 -1 48 -22 1 +24 0.6 0.5 -24 -45 38 35 2 +24 0.6 0.3 42 -47 20 -26 1 +24 0.9 0.4 19 14 -1 -42 1 +24 0.7 0.2 -18 -43 2 -30 1 +24 0.1 0.4 19 -14 12 8 2 +24 0.2 0.8 18 6 13 -33 2 +24 0.6 0.6 49 -50 40 32 2 +24 0.9 0.8 -9 -11 15 -30 2 +24 0.4 0.9 -24 -41 -31 -46 1 +24 0.1 0.3 -5 -42 -25 -43 1 +24 0.8 0.9 -18 -36 -3 -32 2 +24 0.8 0.1 14 -14 26 -38 1 +24 0.5 0.9 -4 -20 -6 -22 1 +24 0.1 0.4 17 -14 -17 -22 1 +24 0.2 0.4 6 -30 19 -37 2 +24 0.7 0.9 34 -33 19 9 2 +24 0.8 0.5 31 -36 5 -34 1 +24 0.8 0.7 7 -14 -10 -43 2 +24 0.4 0.3 34 10 29 -1 1 +24 0.4 0.5 1 -33 -20 -22 1 +24 0.1 0.3 18 -12 -3 -8 2 +24 0.7 0.6 4 -36 15 -37 2 +24 0.6 0.8 48 -23 2 -6 1 +24 0.1 0.4 -4 -9 43 -1 1 +24 0.5 0.1 16 5 35 31 2 +24 0.1 0.5 -3 -40 -13 -29 2 +24 0.8 0.7 40 -17 4 -44 1 +24 0.3 0.3 45 -15 26 -17 1 +24 0.8 0.7 38 -31 -16 -44 1 +24 0.7 0.3 -34 -41 4 -35 2 +24 0.9 0.2 -17 -47 36 14 2 +24 0.9 0.6 23 -3 49 4 2 +24 0.4 0.2 42 6 36 16 1 +24 0.5 0.7 -10 -32 19 -22 2 +24 0.8 0.9 31 -17 7 5 2 +24 0.9 0.8 49 -30 5 -19 1 +24 0.1 0.9 37 -24 48 44 2 +24 0.8 0.7 8 -36 -26 -31 1 +24 0.7 0.9 45 37 46 -39 1 +24 0.6 0.1 38 19 17 -4 1 +24 0.9 0.6 37 -7 1 0 1 +24 0.1 0.7 44 -22 47 -49 2 +24 0.6 0.4 28 13 37 -1 1 +24 0.8 0.3 -17 -44 28 -22 2 +24 0.7 0.3 44 10 25 6 1 +24 0.8 0.2 -29 -33 10 8 2 +24 0.1 0.7 44 -36 5 -1 2 +24 0.8 0.7 21 10 13 -22 2 +24 0.1 0.6 30 22 45 11 1 +24 0.1 0.1 39 -18 46 40 2 +24 0.4 0.2 10 -3 31 26 2 +24 0.9 0.1 41 -22 -39 -44 1 +24 0.5 0.3 -10 -37 -12 -34 2 +24 0.9 0.5 0 -14 -5 -33 2 +24 0.7 0.7 22 -49 3 -24 1 +24 0.8 0.3 29 1 9 -43 1 +24 0.1 0.9 29 6 -38 -41 1 +24 0.5 0.7 -10 -27 39 -3 2 +24 0.3 0.8 -9 -10 34 -50 2 +24 0.4 0.1 43 -46 43 1 2 +24 0.7 0.7 15 -22 -2 -15 1 +24 0.6 0.5 39 -21 -24 -30 1 +24 0.3 0.4 39 -27 48 1 2 +24 0.3 0.7 20 -27 38 9 2 +24 0.6 0.9 43 36 24 -34 2 +24 0.3 0.8 -16 -33 29 -22 2 +24 0.3 0.9 1 -34 -11 -16 2 +24 0.4 0.4 15 -25 12 10 2 +24 0.5 0.2 23 -3 49 44 2 +24 0.7 0.6 -42 -44 23 -47 2 +24 0.5 0.1 46 35 27 19 1 +24 0.8 0.5 31 21 41 6 1 +24 0.3 0.8 2 -21 34 9 2 +24 0.7 0.7 22 -12 25 -15 2 +24 0.1 0.6 50 45 32 -13 1 +24 0.4 0.5 5 -47 41 -31 2 +24 0.2 0.8 17 -4 43 -48 2 +24 0.6 0.6 28 23 10 -7 1 +24 0.7 0.7 36 -35 23 -16 1 +24 0.7 0.8 33 -15 36 23 2 +24 0.5 0.3 -4 -16 -36 -39 1 +24 0.5 0.4 46 37 39 14 1 +24 0.1 0.4 50 3 -15 -29 1 +24 0.5 0.6 24 -36 2 -5 2 +24 0.3 0.8 25 -17 -15 -40 1 +24 0.1 0.4 33 -50 9 6 2 +24 0.2 0.9 -32 -48 38 -40 2 +24 0.3 0.4 39 13 31 3 1 +24 0.5 0.3 -7 -33 -11 -43 1 +24 0.5 0.6 12 6 -25 -39 1 +24 0.1 0.3 27 -32 49 -31 2 +24 0.1 0.6 -2 -34 5 -23 1 +24 0.8 0.3 22 -45 16 10 1 +24 0.5 0.5 -16 -25 -6 -13 1 +25 0.4 0.5 41 1 38 24 2 +25 0.8 0.3 49 -23 -7 -43 1 +25 0.5 0.2 10 5 20 -16 2 +25 0.2 0.5 3 -43 34 14 2 +25 0.2 0.9 46 -2 -10 -17 1 +25 0.3 0.9 19 18 19 -16 1 +25 0.7 0.7 -26 -45 -34 -48 1 +25 0.9 0.3 28 -6 17 -25 1 +25 0.2 0.4 45 1 44 -3 1 +25 0.1 0.9 37 29 10 -46 1 +25 0.1 0.7 33 -38 -2 -48 1 +25 0.3 0.8 3 -36 -10 -29 2 +25 0.4 0.3 41 -48 36 -35 1 +25 0.3 0.5 -19 -42 25 14 2 +25 0.7 0.4 41 -43 29 9 1 +25 0.1 0.7 17 -28 9 -18 2 +25 0.1 0.1 23 -25 46 25 2 +25 0.9 0.9 35 18 47 -4 2 +25 0.4 0.3 28 -26 -6 -44 2 +25 0.3 0.3 17 9 50 8 2 +25 0.4 0.1 -25 -37 36 -10 2 +25 0.1 0.9 7 -10 -5 -13 1 +25 0.2 0.5 -18 -49 28 -43 2 +25 0.4 0.2 12 -20 -30 -36 1 +25 0.8 0.5 47 5 3 -26 1 +25 0.9 0.9 35 -13 29 11 2 +25 0.6 0.6 44 -7 15 -23 1 +25 0.1 0.6 50 17 32 -31 1 +25 0.7 0.6 3 -3 -23 -24 1 +25 0.8 0.1 10 -24 50 -29 1 +25 0.1 0.8 47 9 44 18 1 +25 0.6 0.8 21 -1 40 -11 2 +25 0.5 0.1 35 14 9 -20 1 +25 0.7 0.7 19 16 38 -13 1 +25 0.3 0.6 -30 -36 -17 -50 2 +25 0.6 0.8 22 -49 24 12 2 +25 0.2 0.5 -2 -12 32 -13 2 +25 0.3 0.2 34 19 -8 -10 1 +25 0.1 0.8 21 -15 45 -18 2 +25 0.2 0.5 -10 -21 20 3 2 +25 0.1 0.1 37 -28 -6 -27 1 +25 0.9 0.4 11 -33 37 2 2 +25 0.5 0.3 -15 -32 -19 -44 1 +25 0.2 0.7 -3 -48 -21 -48 2 +25 0.5 0.2 10 -38 -35 -42 1 +25 0.6 0.7 17 -28 2 -32 1 +25 0.3 0.5 -16 -21 3 -33 1 +25 0.2 0.2 46 23 8 -37 1 +25 0.6 0.8 44 -46 30 -34 2 +25 0.8 0.8 -36 -39 38 31 2 +25 0.7 0.7 31 3 -23 -30 1 +25 0.7 0.9 27 -12 30 -31 1 +25 0.5 0.3 38 -30 3 -16 1 +25 0.2 0.1 24 -26 27 4 2 +25 0.7 0.9 21 11 2 -4 1 +25 0.4 0.8 -49 -50 16 -29 2 +25 0.8 0.2 14 -43 46 11 2 +25 0.3 0.6 29 -36 18 -47 1 +25 0.7 0.1 30 -31 28 -7 1 +25 0.8 0.2 31 30 32 -21 1 +25 0.9 0.8 34 15 14 2 1 +25 0.6 0.7 46 -16 26 -1 2 +25 0.3 0.2 36 25 11 -49 1 +25 0.2 0.4 -5 -42 -9 -25 1 +25 0.8 0.4 0 -48 16 -21 2 +25 0.9 0.4 21 -17 16 11 2 +25 0.5 0.8 -27 -46 48 25 2 +25 0.1 0.9 20 -8 45 3 2 +25 0.9 0.4 -25 -26 6 -15 2 +25 0.8 0.8 -25 -33 10 -17 2 +25 0.9 0.3 38 -10 10 -26 1 +25 0.7 0.7 -30 -32 43 -14 2 +25 0.6 0.3 -11 -43 -39 -46 2 +25 0.4 0.2 7 -45 -36 -42 1 +25 0.9 0.9 -25 -43 37 -30 2 +25 0.8 0.4 -15 -35 17 -39 2 +25 0.7 0.6 -38 -39 43 -3 2 +25 0.4 0.2 -37 -44 -11 -44 2 +25 0.6 0.2 26 -44 11 -35 1 +25 0.9 0.5 22 -41 -11 -35 1 +25 0.6 0.5 24 15 -1 -19 1 +25 0.7 0.4 22 -10 13 -5 1 +25 0.7 0.5 38 26 -20 -35 1 +25 0.3 0.6 40 8 29 12 2 +25 0.8 0.4 12 -21 38 22 2 +25 0.7 0.7 25 -5 30 -43 1 +25 0.1 0.3 -20 -29 -17 -33 2 +25 0.6 0.7 -10 -44 0 -1 2 +25 0.5 0.7 50 8 12 -6 1 +25 0.7 0.5 30 -8 47 -4 2 +25 0.2 0.9 50 -36 30 -5 2 +25 0.3 0.9 12 -17 33 -17 2 +25 0.7 0.8 37 -24 24 18 2 +25 0.3 0.1 36 -26 -21 -36 1 +25 0.1 0.9 20 6 6 -7 2 +25 0.1 0.9 -40 -46 9 8 2 +25 0.2 0.6 48 29 0 -12 1 +25 0.4 0.8 9 -6 11 8 2 +25 0.7 0.6 12 -2 44 -22 2 +25 0.6 0.1 -14 -23 8 -11 2 +25 0.6 0.6 26 -19 -33 -44 1 +25 0.8 0.3 41 5 48 -47 1 +25 0.9 0.8 15 -33 42 35 2 +25 0.4 0.1 45 19 -4 -19 1 +25 0.3 0.4 -20 -28 -2 -11 1 +25 0.7 0.4 43 15 11 10 1 +25 0.9 0.3 -3 -41 46 32 2 +25 0.1 0.7 13 -11 -38 -45 1 +25 0.5 0.1 -15 -28 1 -32 2 +25 0.8 0.9 -22 -50 -24 -30 2 +25 0.5 0.5 -33 -50 -22 -44 2 +25 0.5 0.2 25 -32 25 0 2 +25 0.1 0.8 41 12 38 -2 1 +25 0.1 0.4 -29 -49 28 -47 2 +25 0.9 0.6 -37 -44 -10 -27 2 +25 0.7 0.4 37 -18 8 3 1 +25 0.7 0.3 45 43 24 18 1 +25 0.3 0.6 29 -3 -7 -14 2 +25 0.3 0.5 6 -35 24 -30 2 +25 0.3 0.7 -45 -47 18 -46 2 +25 0.5 0.4 7 -3 32 1 2 +25 0.9 0.5 26 14 -6 -8 1 +25 0.6 0.9 10 -47 48 46 2 +25 0.7 0.4 19 7 -13 -34 1 +25 0.9 0.8 8 -42 22 -14 2 +25 0.4 0.6 27 -11 -12 -42 1 +25 0.8 0.8 49 23 46 -35 1 +25 0.8 0.2 -23 -45 -4 -39 2 +25 0.4 0.9 25 -1 38 22 2 +25 0.3 0.9 21 -45 -2 -36 1 +25 0.1 0.4 24 -43 28 9 2 +25 0.1 0.7 12 -10 49 -33 2 +25 0.1 0.5 28 -2 35 -9 2 +25 0.7 0.3 8 -31 -8 -16 1 +25 0.3 0.5 50 11 44 -24 1 +25 0.1 0.6 49 17 -5 -26 1 +25 0.9 0.6 -5 -44 41 -47 2 +25 0.4 0.2 -20 -33 6 -34 2 +25 0.5 0.7 15 -47 24 2 2 +25 0.4 0.7 47 -7 28 -39 1 +25 0.5 0.9 -22 -29 28 -46 2 +25 0.9 0.7 24 13 15 -30 1 +25 0.2 0.2 12 -45 29 -37 2 +25 0.4 0.9 36 -11 34 -32 2 +25 0.7 0.7 -39 -42 -45 -46 2 +25 0.6 0.7 -20 -34 32 25 2 +25 0.5 0.9 30 -14 -10 -29 1 +25 0.7 0.8 -15 -17 45 -27 2 +25 0.8 0.6 15 -44 41 12 2 +25 0.4 0.7 36 15 20 -9 1 +25 0.4 0.6 30 -32 -11 -12 1 +25 0.7 0.5 29 -26 24 -33 2 +25 0.9 0.7 36 -6 38 -36 1 +25 0.5 0.4 14 -13 48 45 2 +25 0.1 0.3 -34 -39 41 22 2 +25 0.8 0.4 24 -16 7 -45 1 +25 0.2 0.5 48 37 32 -8 1 +25 0.8 0.9 36 33 19 -14 1 +25 0.4 0.2 -25 -31 20 11 2 +25 0.9 0.1 -16 -17 -15 -23 2 +25 0.3 0.6 -40 -47 40 -14 2 +25 0.5 0.7 13 2 37 -27 1 +25 0.4 0.3 11 -30 42 -47 1 +25 0.2 0.4 41 -5 29 7 2 +25 0.1 0.4 3 -1 -5 -48 1 +25 0.5 0.6 -33 -46 26 -38 2 +25 0.8 0.3 23 -38 10 -42 1 +25 0.3 0.3 37 4 41 -30 1 +25 0.3 0.1 13 2 37 4 2 +25 0.9 0.3 12 -15 4 -17 1 +25 0.3 0.9 45 -31 36 -18 2 +25 0.4 0.3 25 -29 -12 -21 1 +25 0.6 0.9 35 -20 -23 -33 1 +25 0.8 0.2 4 -23 18 -22 1 +25 0.4 0.4 23 -12 32 -4 1 +25 0.1 0.1 50 -47 21 8 2 +25 0.1 0.4 18 -4 29 -22 2 +25 0.4 0.3 39 -34 -3 -27 1 +25 0.8 0.1 -21 -48 23 -12 2 +25 0.2 0.4 44 38 8 -23 1 +25 0.4 0.3 -7 -8 33 -22 2 +25 0.4 0.3 24 -30 33 -46 2 +25 0.1 0.6 16 -32 33 -5 2 +25 0.4 0.7 -30 -43 11 -47 1 +25 0.5 0.8 29 -38 9 -19 2 +25 0.3 0.6 -25 -45 41 -13 2 +25 0.6 0.2 9 -18 18 -6 2 +25 0.5 0.1 37 -38 27 -22 1 +25 0.7 0.1 -10 -17 27 -15 1 +25 0.8 0.2 41 -45 29 19 1 +25 0.5 0.1 46 -42 19 -29 1 +25 0.4 0.5 29 5 -18 -39 1 +25 0.6 0.3 30 -24 1 -18 1 +25 0.8 0.2 -10 -22 6 -42 1 +25 0.2 0.4 -1 -46 -11 -19 2 +25 0.4 0.4 18 -30 9 -22 2 +25 0.5 0.6 26 11 44 -8 2 +25 0.9 0.6 -31 -43 12 -18 2 +25 0.1 0.2 44 -45 36 29 2 +25 0.1 0.7 22 7 11 4 2 +25 0.2 0.3 38 -33 39 -39 1 +25 0.8 0.8 43 -38 42 -19 2 +25 0.9 0.9 -1 -45 -26 -27 1 +25 0.5 0.4 30 19 31 -34 1 +25 0.6 0.1 48 -29 43 14 1 +25 0.2 0.5 46 3 25 5 2 +25 0.7 0.5 -1 -4 34 -26 2 +25 0.1 0.8 -12 -33 26 -18 2 +25 0.7 0.6 50 6 -6 -48 1 +25 0.2 0.9 9 -50 -10 -29 2 +25 0.2 0.9 34 -50 15 -3 2 +25 0.2 0.7 10 -46 19 -29 2 +25 0.4 0.8 12 -9 -6 -17 1 +25 0.5 0.4 -19 -38 -6 -50 1 +25 0.3 0.7 33 -21 -15 -17 1 +25 0.1 0.4 7 -2 36 -35 2 +25 0.3 0.8 -13 -33 25 5 2 +25 0.8 0.4 40 20 49 1 1 +25 0.7 0.6 -4 -26 34 -5 2 +25 0.9 0.2 47 43 14 2 1 +25 0.6 0.4 30 14 17 -22 1 +25 0.3 0.4 7 -25 24 -32 2 +25 0.1 0.8 29 24 40 -29 1 +25 0.6 0.8 -18 -38 -36 -46 2 +25 0.6 0.8 -27 -48 49 6 2 +25 0.1 0.7 8 -48 20 -15 2 +25 0.9 0.8 28 4 32 28 1 +25 0.2 0.7 18 2 37 -10 2 +25 0.9 0.2 36 26 38 -19 1 +25 0.4 0.9 -23 -24 15 -14 2 +25 0.2 0.2 -22 -33 -7 -22 2 +25 0.8 0.1 -3 -42 43 -20 2 +25 0.9 0.1 -19 -39 2 0 2 +25 0.7 0.5 8 -24 21 -34 1 +25 0.1 0.4 -12 -13 40 -35 2 +25 0.3 0.9 41 29 13 -14 1 +25 0.4 0.1 -43 -44 -22 -50 1 +25 0.8 0.6 46 44 0 -14 1 +25 0.2 0.3 41 -35 37 15 2 +25 0.5 0.8 36 -9 12 -39 1 +25 0.7 0.3 38 11 22 -11 1 +25 0.8 0.1 44 -49 32 4 1 +25 0.1 0.3 -26 -38 34 8 2 +25 0.3 0.7 7 -15 29 9 2 +25 0.8 0.3 26 -18 9 -43 1 +25 0.5 0.9 10 -46 18 -49 2 +25 0.9 0.7 -14 -36 26 -16 2 +25 0.9 0.1 44 -15 5 -33 2 +25 0.7 0.6 40 -15 40 30 2 +25 0.2 0.6 22 2 33 -12 1 +25 0.7 0.8 33 -28 30 15 2 +25 0.7 0.8 17 -12 -14 -44 1 +25 0.2 0.2 11 9 40 -1 1 +25 0.5 0.1 23 -7 49 -7 1 +25 0.6 0.3 49 0 37 -14 1 +25 0.6 0.8 43 -44 27 -16 2 +25 0.5 0.8 -28 -48 45 32 2 +25 0.8 0.4 50 -22 17 7 1 +25 0.9 0.5 24 -40 13 -10 1 +25 0.7 0.4 34 25 42 -24 1 +25 0.3 0.9 41 -10 -5 -37 1 +25 0.7 0.1 20 -22 -26 -49 1 +25 0.7 0.1 42 32 40 0 1 +25 0.8 0.8 -9 -10 21 15 2 +25 0.3 0.4 29 -49 32 11 2 +25 0.6 0.6 16 -11 45 11 2 +25 0.3 0.9 32 -37 -9 -31 1 +25 0.4 0.7 -9 -28 47 -15 2 +25 0.5 0.4 12 -38 5 -17 1 +25 0.2 0.6 -11 -39 17 -22 2 +25 0.8 0.6 -1 -7 48 26 2 +25 0.1 0.3 37 -15 44 -3 2 +25 0.1 0.5 -12 -47 39 1 2 +25 0.1 0.1 30 7 40 -29 1 +25 0.7 0.1 -12 -41 7 -19 2 +25 0.6 0.8 4 -5 7 -14 1 +25 0.5 0.4 32 -12 26 -48 2 +25 0.2 0.9 22 -9 -13 -36 1 +25 0.4 0.2 4 -10 47 -26 1 +25 0.4 0.4 34 -21 2 -24 1 +25 0.7 0.3 38 -33 0 -50 1 +25 0.3 0.3 34 -19 27 18 2 +25 0.6 0.6 44 -10 2 -50 1 +25 0.6 0.4 -34 -38 39 20 2 +25 0.7 0.6 28 -44 33 -23 1 +25 0.8 0.7 -3 -8 29 -38 2 +25 0.3 0.8 31 -29 33 6 2 +25 0.6 0.9 45 -34 -2 -4 1 +25 0.6 0.2 19 -23 21 7 2 +25 0.8 0.6 -14 -33 1 -39 2 +25 0.3 0.5 22 3 -3 -39 1 +25 0.9 0.9 -26 -45 34 29 2 +25 0.7 0.3 25 17 33 -47 1 +25 0.4 0.2 -13 -34 6 -45 1 +25 0.7 0.5 29 -16 -4 -35 1 +25 0.8 0.9 26 -39 36 -38 2 +25 0.9 0.2 12 -4 -28 -46 1 +25 0.4 0.1 16 -17 22 -44 1 +25 0.2 0.7 49 10 -1 -16 1 +25 0.6 0.5 -13 -43 12 -47 1 +26 0.7 0.7 28 -31 44 31 2 +26 0.8 0.4 40 -2 49 -21 1 +26 0.7 0.9 -40 -46 32 -50 2 +26 0.2 0.2 1 -47 30 -5 2 +26 0.7 0.2 25 -30 47 17 2 +26 0.4 0.2 50 39 -10 -18 1 +26 0.4 0.2 -31 -48 13 -50 2 +26 0.7 0.1 23 0 36 -24 1 +26 0.1 0.3 26 10 24 -32 1 +26 0.8 0.2 47 -8 44 -1 1 +26 0.2 0.4 43 3 23 12 2 +26 0.3 0.6 20 -18 22 -21 2 +26 0.9 0.5 -5 -38 33 -13 2 +26 0.9 0.8 -20 -40 48 12 2 +26 0.8 0.3 -2 -20 48 -27 2 +26 0.3 0.5 38 -29 -30 -43 1 +26 0.7 0.1 -15 -18 -8 -45 1 +26 0.8 0.6 10 -43 -30 -38 1 +26 0.4 0.5 22 -38 3 -26 1 +26 0.7 0.9 28 -9 -3 -21 1 +26 0.2 0.9 12 -31 21 -35 2 +26 0.5 0.4 38 -33 -2 -40 1 +26 0.9 0.1 16 -32 4 -50 1 +26 0.5 0.8 46 -12 41 -36 2 +26 0.1 0.2 21 -41 -10 -30 1 +26 0.9 0.9 29 -1 14 -36 1 +26 0.5 0.7 12 -3 -37 -39 1 +26 0.2 0.2 33 -29 50 27 2 +26 0.2 0.2 -12 -45 -26 -35 1 +26 0.7 0.1 9 -18 33 31 2 +26 0.7 0.8 24 -21 10 -27 1 +26 0.5 0.6 48 -41 26 -23 1 +26 0.2 0.8 35 -17 28 10 2 +26 0.7 0.7 -17 -28 -43 -49 1 +26 0.3 0.5 38 21 -20 -31 1 +26 0.9 0.1 -2 -23 -18 -35 1 +26 0.5 0.4 45 -23 24 -1 1 +26 0.5 0.1 -40 -43 8 2 2 +26 0.1 0.7 44 41 -7 -26 1 +26 0.4 0.7 30 -12 50 7 2 +26 0.7 0.6 18 -48 21 -19 1 +26 0.7 0.4 2 -40 -4 -32 1 +26 0.7 0.5 -27 -50 -38 -47 1 +26 0.5 0.1 45 -14 -1 -42 1 +26 0.4 0.5 7 -31 43 41 2 +26 0.5 0.1 -19 -26 32 13 2 +26 0.7 0.4 21 20 48 -42 1 +26 0.7 0.4 -20 -46 -14 -33 1 +26 0.3 0.7 -7 -15 2 -8 2 +26 0.1 0.7 49 -23 41 -16 2 +26 0.7 0.9 49 47 47 41 2 +26 0.7 0.9 48 -49 35 33 2 +26 0.8 0.9 30 -2 -20 -35 1 +26 0.7 0.1 38 29 -32 -47 1 +26 0.8 0.1 22 7 8 -15 1 +26 0.1 0.1 24 13 -15 -26 1 +26 0.1 0.2 -2 -44 37 34 2 +26 0.7 0.3 25 -17 14 -38 1 +26 0.7 0.3 41 28 12 11 1 +26 0.1 0.1 -36 -47 36 -17 2 +26 0.7 0.9 14 5 46 35 2 +26 0.6 0.2 36 32 29 1 1 +26 0.1 0.4 27 -44 22 -38 2 +26 0.8 0.7 47 6 39 -2 1 +26 0.2 0.4 49 -21 49 29 2 +26 0.7 0.7 4 -34 45 -47 2 +26 0.8 0.6 40 -12 -13 -19 1 +26 0.8 0.5 47 14 10 -43 1 +26 0.6 0.1 19 -39 32 -24 1 +26 0.1 0.5 -2 -18 -41 -46 1 +26 0.6 0.1 -14 -31 29 -20 2 +26 0.1 0.1 7 -10 31 -24 1 +26 0.1 0.8 40 39 12 -32 1 +26 0.7 0.4 43 -35 14 -33 1 +26 0.2 0.5 19 -31 42 9 2 +26 0.6 0.2 39 -4 41 -17 1 +26 0.2 0.2 1 -12 0 -42 1 +26 0.2 0.5 38 -15 -2 -21 1 +26 0.2 0.7 35 -27 32 2 2 +26 0.2 0.6 47 6 12 -23 1 +26 0.6 0.9 -12 -32 38 -6 2 +26 0.7 0.7 24 -29 5 4 2 +26 0.2 0.8 50 -44 25 -9 2 +26 0.5 0.7 -13 -22 23 -29 2 +26 0.6 0.3 3 -38 30 -5 2 +26 0.6 0.4 44 -44 -10 -18 1 +26 0.7 0.5 -36 -38 3 -23 2 +26 0.6 0.7 -9 -15 2 -32 2 +26 0.9 0.4 40 16 44 -32 1 +26 0.3 0.3 12 2 39 -29 1 +26 0.7 0.3 39 26 47 34 2 +26 0.4 0.4 5 -19 44 -6 2 +26 0.5 0.9 23 -35 -3 -22 1 +26 0.3 0.3 48 -15 10 -39 1 +26 0.6 0.2 9 -45 49 41 2 +26 0.9 0.6 40 -39 7 -8 1 +26 0.9 0.7 -8 -13 -41 -49 1 +26 0.6 0.6 29 -5 21 -21 1 +26 0.5 0.8 1 -45 48 37 2 +26 0.6 0.8 6 2 9 -2 2 +26 0.9 0.8 36 35 41 -13 1 +26 0.7 0.9 17 -5 -15 -43 1 +26 0.3 0.9 8 -27 10 5 2 +26 0.3 0.8 -22 -25 16 13 2 +26 0.8 0.3 -20 -44 -6 -11 2 +26 0.6 0.2 9 -9 46 -13 1 +26 0.7 0.2 -13 -42 30 10 2 +26 0.7 0.2 -11 -39 25 15 2 +26 0.5 0.7 -7 -9 41 25 2 +26 0.7 0.2 -5 -37 30 -31 2 +26 0.4 0.7 24 -12 -4 -41 1 +26 0.5 0.4 0 -31 -24 -26 1 +26 0.7 0.3 18 -20 7 -34 1 +26 0.9 0.1 -26 -40 22 12 2 +26 0.2 0.7 -22 -30 -16 -20 2 +26 0.7 0.3 39 -20 35 24 1 +26 0.2 0.1 36 -47 35 28 2 +26 0.4 0.6 18 -32 41 39 2 +26 0.7 0.7 50 42 24 -13 1 +26 0.4 0.6 -14 -34 18 -17 2 +26 0.3 0.1 47 -24 48 -18 1 +26 0.7 0.5 42 30 13 -11 1 +26 0.6 0.5 33 28 -40 -44 1 +26 0.4 0.4 -33 -40 16 -18 2 +26 0.8 0.8 49 22 -15 -49 1 +26 0.5 0.4 36 -19 -42 -45 1 +26 0.2 0.9 37 7 -10 -30 1 +26 0.6 0.9 -21 -46 26 -9 2 +26 0.1 0.9 -25 -41 25 -20 2 +26 0.5 0.3 -27 -44 27 -23 2 +26 0.3 0.7 45 25 0 -9 1 +26 0.6 0.7 10 -2 -15 -17 1 +26 0.1 0.8 -27 -50 -6 -18 2 +26 0.4 0.3 8 -30 -16 -28 1 +26 0.9 0.2 -33 -46 -16 -22 2 +26 0.8 0.9 -29 -35 24 -30 2 +26 0.5 0.3 -5 -47 -15 -27 1 +26 0.1 0.6 5 -31 -1 -7 2 +26 0.7 0.1 47 -24 22 -39 1 +26 0.6 0.4 21 19 39 -48 1 +26 0.1 0.6 26 11 0 -40 1 +26 0.4 0.5 20 -40 20 18 2 +26 0.9 0.7 -25 -46 23 -31 2 +26 0.9 0.9 47 -15 -23 -26 1 +26 0.3 0.6 -13 -45 50 -13 2 +26 0.2 0.1 -32 -33 -31 -32 1 +26 0.7 0.5 9 -26 50 24 2 +26 0.5 0.6 -20 -49 39 6 2 +26 0.6 0.1 22 -15 28 -21 1 +26 0.6 0.1 30 -43 30 -30 1 +26 0.2 0.9 -28 -44 3 -32 2 +26 0.7 0.6 12 -30 -11 -17 1 +26 0.4 0.9 21 -44 38 8 2 +26 0.2 0.7 4 -14 -19 -34 1 +26 0.2 0.3 9 -25 -41 -46 1 +26 0.7 0.5 41 -6 12 -25 1 +26 0.4 0.7 35 31 37 10 1 +26 0.4 0.6 19 12 -16 -43 1 +26 0.5 0.9 36 -14 45 24 2 +26 0.7 0.3 -38 -48 23 -48 2 +26 0.5 0.8 25 -37 22 -28 2 +26 0.4 0.1 -15 -24 8 -42 2 +26 0.1 0.1 35 1 16 -34 1 +26 0.4 0.2 42 13 -3 -5 1 +26 0.1 0.8 0 -10 32 14 2 +26 0.7 0.1 35 -7 -5 -23 1 +26 0.7 0.8 -21 -30 32 0 2 +26 0.7 0.8 11 2 17 -10 1 +26 0.8 0.7 -18 -20 9 2 2 +26 0.7 0.7 46 3 50 -17 1 +26 0.2 0.8 5 -27 50 -26 2 +26 0.3 0.6 41 -4 -7 -50 1 +26 0.8 0.6 30 -3 11 -50 1 +26 0.9 0.8 35 -21 6 -33 1 +26 0.2 0.1 -26 -49 45 -10 2 +26 0.4 0.3 8 -20 -4 -42 1 +26 0.8 0.7 24 0 41 -25 2 +26 0.1 0.6 26 -49 11 -25 2 +26 0.6 0.4 42 -37 -7 -35 1 +26 0.1 0.2 48 30 -26 -40 1 +26 0.5 0.3 36 -46 27 -34 1 +26 0.2 0.1 47 26 18 -10 1 +26 0.7 0.4 41 -50 25 -48 1 +26 0.8 0.5 -8 -25 23 -46 2 +26 0.8 0.7 17 -49 17 -32 2 +26 0.6 0.2 -18 -49 -33 -50 1 +26 0.8 0.9 33 -44 8 -9 1 +26 0.4 0.4 30 -39 -31 -35 1 +26 0.2 0.2 -1 -48 14 -45 2 +26 0.4 0.8 -22 -34 49 -10 2 +26 0.8 0.6 14 -17 5 3 1 +26 0.2 0.5 13 -21 45 -6 2 +26 0.3 0.7 41 -37 50 -16 2 +26 0.8 0.2 6 4 28 -25 1 +26 0.4 0.7 -34 -35 -8 -20 2 +26 0.6 0.8 49 40 40 -8 1 +26 0.8 0.7 19 -33 41 1 2 +26 0.5 0.3 45 27 25 -1 1 +26 0.8 0.3 34 21 29 14 2 +26 0.5 0.9 42 -12 11 -43 2 +26 0.9 0.6 4 -45 1 -3 2 +26 0.2 0.1 43 32 29 -14 1 +26 0.9 0.6 -7 -24 15 -39 2 +26 0.3 0.7 45 -22 -28 -31 1 +26 0.3 0.7 37 -20 17 3 2 +26 0.6 0.3 4 -45 -9 -24 1 +26 0.6 0.7 -4 -30 47 34 2 +26 0.6 0.3 -38 -49 44 27 2 +26 0.7 0.9 4 -19 25 -9 2 +26 0.5 0.2 -16 -37 25 -2 2 +26 0.8 0.3 33 -34 8 -26 1 +26 0.9 0.1 20 -9 21 -30 1 +26 0.6 0.6 30 -8 21 1 1 +26 0.1 0.6 45 -7 36 -50 2 +26 0.4 0.9 37 -32 11 -20 1 +26 0.4 0.6 -17 -18 -18 -45 1 +26 0.1 0.3 16 -26 41 -15 2 +26 0.8 0.8 -1 -48 30 26 2 +26 0.5 0.7 31 -43 11 -22 1 +26 0.9 0.2 46 45 -24 -26 1 +26 0.2 0.1 5 -44 23 -43 1 +26 0.1 0.9 -16 -47 8 -48 2 +26 0.7 0.2 38 29 -10 -21 1 +26 0.9 0.5 13 -47 25 17 2 +26 0.2 0.1 42 27 25 -26 1 +26 0.6 0.1 45 11 19 -7 1 +26 0.7 0.4 2 -39 -34 -37 1 +26 0.6 0.8 16 -18 4 -50 1 +26 0.3 0.5 18 -40 22 -14 2 +26 0.8 0.6 16 -19 29 7 2 +26 0.6 0.9 46 -5 31 -40 1 +26 0.2 0.8 16 2 37 13 2 +26 0.6 0.1 -1 -4 16 9 1 +26 0.9 0.2 35 1 47 -6 1 +26 0.3 0.1 -6 -42 0 -6 2 +26 0.4 0.1 14 -17 5 -43 1 +26 0.6 0.1 -20 -43 2 -47 1 +26 0.6 0.2 -29 -36 42 25 2 +26 0.8 0.7 17 1 25 10 2 +26 0.8 0.8 -33 -43 40 -45 2 +26 0.4 0.9 39 36 16 1 1 +26 0.5 0.8 -1 -46 36 22 2 +26 0.3 0.6 -12 -21 29 -41 2 +26 0.9 0.9 40 -42 -28 -41 1 +26 0.9 0.9 25 -41 31 -14 1 +26 0.8 0.3 1 -29 33 -33 1 +26 0.5 0.1 -4 -14 15 -29 2 +26 0.8 0.9 5 -24 0 -2 1 +26 0.2 0.1 -23 -46 36 25 2 +26 0.3 0.1 33 -8 26 -43 1 +26 0.7 0.6 50 27 -14 -46 1 +26 0.8 0.9 24 -38 -8 -18 1 +26 0.8 0.1 42 -43 3 -18 1 +26 0.3 0.5 26 16 7 -20 1 +26 0.7 0.2 35 32 14 -18 1 +26 0.8 0.9 -18 -48 12 10 2 +26 0.4 0.8 -11 -49 33 22 2 +26 0.3 0.5 -18 -31 -32 -49 1 +26 0.8 0.1 37 22 39 -48 1 +26 0.3 0.9 39 -14 12 2 2 +26 0.9 0.1 50 21 42 0 1 +26 0.1 0.5 42 -40 -3 -8 2 +26 0.1 0.7 41 -29 46 -14 2 +26 0.7 0.2 48 22 41 -26 1 +26 0.2 0.1 -37 -50 42 -28 2 +26 0.2 0.3 24 -32 7 -17 2 +26 0.3 0.9 35 25 20 19 2 +26 0.6 0.7 -23 -46 -38 -45 1 +26 0.7 0.9 16 8 -2 -11 1 +26 0.3 0.3 50 10 47 -5 1 +26 0.5 0.5 26 18 38 -34 1 +26 0.4 0.7 1 -11 41 -36 2 +26 0.4 0.2 -2 -11 49 16 2 +26 0.2 0.3 42 -25 50 -35 1 +26 0.2 0.4 8 -49 0 -26 2 +26 0.5 0.8 39 -29 -4 -30 1 +26 0.7 0.6 32 19 -39 -50 1 +26 0.3 0.8 10 -45 36 -13 2 +26 0.6 0.8 21 12 15 -2 1 +26 0.3 0.4 -15 -39 12 -16 2 +26 0.3 0.7 43 -18 -10 -41 1 +26 0.5 0.4 16 2 11 7 1 +26 0.4 0.2 16 -32 34 -12 2 +26 0.1 0.2 46 -15 -11 -40 1 +26 0.5 0.4 1 -28 21 -39 2 +26 0.1 0.9 6 -38 28 22 2 +26 0.5 0.3 43 9 1 -46 1 +26 0.5 0.4 0 -23 35 22 2 +26 0.3 0.2 -2 -9 27 -1 2 +26 0.9 0.9 -3 -44 25 -15 2 +26 0.1 0.8 -30 -47 -1 -13 2 +26 0.8 0.8 -13 -25 49 -30 2 +26 0.1 0.6 12 -14 46 7 2 +26 0.6 0.6 -35 -39 23 -4 2 +26 0.9 0.4 21 -4 -19 -21 1 +26 0.3 0.6 18 -11 -25 -36 1 +26 0.7 0.7 10 -50 7 -34 2 +26 0.1 0.9 17 -36 48 -18 2 +26 0.9 0.6 11 -18 -15 -34 1 +26 0.5 0.3 -26 -42 -10 -36 1 +27 0.1 0.6 -6 -24 -17 -23 2 +27 0.9 0.1 50 8 34 -19 1 +27 0.7 0.5 44 2 -36 -39 1 +27 0.9 0.4 19 -28 -35 -48 1 +27 0.5 0.8 50 -39 -27 -31 1 +27 0.2 0.9 -11 -23 12 9 2 +27 0.2 0.2 31 4 1 -43 1 +27 0.5 0.9 39 -17 18 -3 1 +27 0.5 0.1 45 -40 19 -25 1 +27 0.4 0.2 -19 -24 50 -12 2 +27 0.5 0.6 42 6 35 -5 1 +27 0.2 0.4 26 19 34 15 2 +27 0.7 0.4 44 43 4 -20 1 +27 0.8 0.6 48 -46 41 -18 1 +27 0.1 0.4 36 -22 21 -29 2 +27 0.1 0.5 -38 -48 45 -8 2 +27 0.7 0.4 5 -25 33 -6 2 +27 0.6 0.7 43 39 12 -28 1 +27 0.4 0.7 23 13 36 -47 1 +27 0.4 0.8 50 -35 22 -47 2 +27 0.3 0.3 -31 -45 42 8 2 +27 0.7 0.8 18 -28 8 -10 2 +27 0.9 0.6 16 -3 10 4 1 +27 0.4 0.4 40 -39 49 -15 2 +27 0.8 0.3 -20 -22 -13 -23 2 +27 0.2 0.8 3 -4 21 -34 2 +27 0.4 0.6 -33 -46 35 -19 2 +27 0.9 0.6 -40 -43 46 39 2 +27 0.7 0.4 26 -47 35 -40 1 +27 0.2 0.1 0 -27 5 -25 2 +27 0.1 0.8 50 -39 -8 -17 2 +27 0.6 0.7 18 1 -23 -28 1 +27 0.4 0.2 -12 -25 50 -21 2 +27 0.4 0.6 31 0 49 41 2 +27 0.5 0.2 41 -34 41 19 2 +27 0.7 0.6 47 17 43 34 2 +27 0.2 0.5 6 -49 6 -42 2 +27 0.1 0.8 -12 -19 31 -30 2 +27 0.3 0.4 46 -28 -18 -29 1 +27 0.7 0.8 10 -29 32 -38 2 +27 0.5 0.1 41 -34 21 5 2 +27 0.4 0.6 46 -22 46 19 2 +27 0.2 0.1 47 -8 46 -19 1 +27 0.2 0.8 -22 -48 41 39 2 +27 0.7 0.6 46 21 29 -21 1 +27 0.6 0.8 38 -1 49 43 2 +27 0.5 0.5 -9 -25 26 6 2 +27 0.6 0.1 -31 -50 27 20 2 +27 0.8 0.1 43 32 36 34 2 +27 0.5 0.8 24 -48 34 14 2 +27 0.7 0.4 49 36 37 -27 1 +27 0.2 0.4 -14 -38 -30 -44 1 +27 0.9 0.8 27 -11 17 -35 1 +27 0.1 0.9 -39 -47 -1 -50 2 +27 0.6 0.7 -20 -49 45 16 2 +27 0.3 0.6 9 2 -10 -35 1 +27 0.4 0.9 16 11 25 -16 2 +27 0.5 0.9 18 -34 -21 -24 1 +27 0.7 0.6 26 -49 42 -36 2 +27 0.8 0.2 41 -14 29 -50 1 +27 0.2 0.4 43 -2 42 21 2 +27 0.1 0.8 48 29 35 -14 1 +27 0.1 0.6 22 20 30 -18 1 +27 0.6 0.9 26 16 26 -33 1 +27 0.1 0.5 6 -7 48 -49 2 +27 0.6 0.1 3 -28 48 -2 2 +27 0.2 0.5 -21 -39 12 11 2 +27 0.5 0.3 49 -37 48 -41 1 +27 0.6 0.1 41 -2 5 -38 1 +27 0.8 0.4 25 11 29 -15 1 +27 0.3 0.1 36 -38 22 -32 1 +27 0.4 0.2 2 -28 -12 -34 1 +27 0.1 0.1 20 -32 23 -27 2 +27 0.8 0.9 13 7 15 -37 1 +27 0.2 0.1 -34 -38 2 -43 2 +27 0.8 0.4 -6 -12 32 -34 2 +27 0.2 0.8 6 -44 3 -47 2 +27 0.4 0.6 24 16 1 -23 1 +27 0.7 0.9 50 -50 46 39 2 +27 0.3 0.1 25 -32 49 -21 1 +27 0.5 0.6 7 3 28 -11 2 +27 0.1 0.7 26 -7 0 -13 1 +27 0.4 0.5 43 -11 -20 -48 1 +27 0.1 0.1 50 -22 16 -37 1 +27 0.3 0.8 31 -15 -22 -33 1 +27 0.4 0.2 31 14 29 -32 1 +27 0.9 0.1 48 -4 42 17 1 +27 0.4 0.4 15 -22 18 16 2 +27 0.5 0.6 23 -25 7 -44 1 +27 0.1 0.9 32 24 -23 -45 1 +27 0.3 0.2 10 4 -33 -34 1 +27 0.4 0.1 -10 -31 16 -42 1 +27 0.2 0.4 -4 -37 47 6 2 +27 0.9 0.8 18 16 45 -41 2 +27 0.2 0.6 1 -45 -8 -24 2 +27 0.4 0.7 29 -16 -9 -38 1 +27 0.6 0.8 31 -2 -42 -46 1 +27 0.9 0.4 -26 -50 42 1 2 +27 0.4 0.9 18 14 16 -44 1 +27 0.3 0.8 -7 -33 49 -36 2 +27 0.6 0.9 49 -33 37 30 2 +27 0.5 0.1 42 -36 8 -37 1 +27 0.2 0.9 -2 -21 23 -48 2 +27 0.5 0.1 14 -43 -3 -36 1 +27 0.6 0.2 27 -36 30 17 2 +27 0.7 0.9 1 -39 40 2 2 +27 0.4 0.8 -3 -43 -20 -35 2 +27 0.7 0.3 28 0 -25 -30 1 +27 0.4 0.2 49 -42 44 -21 1 +27 0.7 0.4 31 -36 44 26 2 +27 0.5 0.1 -20 -33 5 -29 2 +27 0.4 0.8 38 -21 30 29 2 +27 0.4 0.7 47 -46 32 -33 2 +27 0.1 0.1 26 13 11 -28 1 +27 0.2 0.5 26 -26 22 -33 2 +27 0.7 0.5 -5 -10 44 33 2 +27 0.7 0.7 47 -21 10 2 1 +27 0.3 0.7 -37 -49 18 -24 2 +27 0.5 0.3 39 28 -9 -23 1 +27 0.6 0.4 40 15 50 -33 1 +27 0.3 0.4 12 -32 43 25 2 +27 0.5 0.2 11 -20 5 -23 1 +27 0.2 0.1 40 5 -11 -20 1 +27 0.5 0.9 40 -7 27 11 2 +27 0.6 0.3 -17 -40 -12 -20 2 +27 0.1 0.1 42 -14 -18 -35 1 +27 0.7 0.2 48 33 -12 -41 1 +27 0.7 0.1 11 -8 -1 -50 1 +27 0.4 0.5 45 14 12 7 1 +27 0.4 0.5 0 -23 14 -38 2 +27 0.6 0.7 46 30 32 -48 1 +27 0.4 0.3 4 -31 26 -29 2 +27 0.3 0.1 8 -13 39 -13 2 +27 0.1 0.3 3 -35 -5 -22 2 +27 0.7 0.3 22 -6 18 -49 1 +27 0.2 0.7 16 -36 15 -40 2 +27 0.1 0.5 7 -48 13 -33 2 +27 0.3 0.9 -7 -37 27 -18 2 +27 0.3 0.7 -7 -32 -36 -48 1 +27 0.9 0.4 35 7 -27 -45 1 +27 0.5 0.9 23 -22 16 1 2 +27 0.3 0.1 36 -1 48 44 2 +27 0.1 0.4 43 6 -14 -36 1 +27 0.5 0.8 -4 -22 47 2 2 +27 0.5 0.7 21 -33 10 -35 1 +27 0.3 0.2 -8 -40 -19 -50 1 +27 0.4 0.6 47 -7 -19 -44 1 +27 0.5 0.1 -20 -45 15 -9 2 +27 0.7 0.5 47 -17 -5 -39 1 +27 0.8 0.1 9 -29 40 -3 2 +27 0.6 0.8 11 -42 -35 -46 1 +27 0.5 0.5 0 -15 -24 -44 1 +27 0.5 0.7 -2 -45 48 -38 2 +27 0.4 0.8 -10 -29 39 -23 2 +27 0.5 0.3 43 -15 -11 -31 1 +27 0.7 0.9 41 -24 47 -12 2 +27 0.9 0.9 -14 -46 16 -10 2 +27 0.4 0.1 -2 -14 15 -1 2 +27 0.1 0.9 3 -11 -15 -31 1 +27 0.2 0.3 8 1 18 -30 1 +27 0.8 0.4 31 16 14 -50 1 +27 0.8 0.7 40 -27 2 -9 1 +27 0.7 0.1 -21 -40 -1 -30 1 +27 0.3 0.5 50 14 33 -25 1 +27 0.9 0.6 25 20 -2 -42 1 +27 0.9 0.1 6 -49 49 41 2 +27 0.9 0.5 47 31 -37 -38 1 +27 0.4 0.8 45 -12 -23 -25 1 +27 0.8 0.9 45 -36 33 -29 2 +27 0.1 0.5 20 -44 48 -24 2 +27 0.9 0.7 35 -1 -6 -7 1 +27 0.2 0.6 -13 -42 16 -12 2 +27 0.1 0.7 35 22 12 0 1 +27 0.4 0.2 -29 -41 23 -13 2 +27 0.9 0.3 6 -42 46 7 2 +27 0.3 0.6 11 -5 29 -47 1 +27 0.3 0.9 16 -49 -19 -42 1 +27 0.6 0.2 39 21 33 -32 1 +27 0.8 0.6 -21 -37 30 -42 2 +27 0.6 0.9 12 11 9 -24 1 +27 0.7 0.6 -13 -49 5 -2 2 +27 0.9 0.5 -1 -49 1 -34 1 +27 0.9 0.5 29 -19 42 32 2 +27 0.3 0.3 0 -1 -7 -46 1 +27 0.5 0.7 29 -50 34 -3 2 +27 0.3 0.8 38 22 36 -32 1 +27 0.7 0.8 19 -24 32 26 2 +27 0.1 0.5 -42 -49 12 -19 2 +27 0.4 0.1 43 -3 -27 -32 1 +27 0.8 0.2 50 -6 3 -23 1 +27 0.6 0.8 24 -11 43 20 2 +27 0.8 0.9 33 -15 -6 -12 1 +27 0.6 0.8 42 -39 47 -24 2 +27 0.3 0.4 32 7 7 -18 1 +27 0.1 0.4 -7 -36 30 -14 2 +27 0.3 0.9 -3 -36 12 -38 2 +27 0.9 0.1 -1 -11 31 18 2 +27 0.1 0.9 11 -37 -9 -44 2 +27 0.1 0.8 22 20 24 -14 1 +27 0.5 0.2 -29 -30 17 -5 2 +27 0.5 0.5 -1 -16 37 17 2 +27 0.3 0.1 4 -31 39 32 2 +27 0.1 0.7 15 -49 36 -6 2 +27 0.9 0.4 43 -47 17 -11 1 +27 0.5 0.3 39 -5 41 -31 1 +27 0.2 0.4 16 14 14 -16 1 +27 0.1 0.4 31 20 23 -45 1 +27 0.7 0.8 0 -8 29 -38 2 +27 0.9 0.7 -5 -49 -10 -44 1 +27 0.4 0.9 38 -35 28 2 2 +27 0.7 0.7 1 -15 5 -47 1 +27 0.1 0.3 33 -13 17 14 2 +27 0.7 0.7 49 -14 18 10 1 +27 0.4 0.3 -23 -32 10 -47 2 +27 0.5 0.7 30 -29 14 -13 2 +27 0.1 0.5 -27 -43 6 -24 2 +27 0.8 0.5 47 34 7 5 1 +27 0.5 0.1 8 -32 -14 -28 1 +27 0.6 0.1 45 -28 30 -46 1 +27 0.5 0.5 46 -10 20 -17 1 +27 0.5 0.7 45 19 46 22 2 +27 0.1 0.2 26 21 34 -29 1 +27 0.2 0.3 -6 -20 3 -48 1 +27 0.8 0.2 39 -35 22 -49 1 +27 0.1 0.4 44 -25 7 -7 2 +27 0.4 0.9 25 6 9 -46 1 +27 0.8 0.4 -11 -13 36 -45 2 +27 0.1 0.7 -15 -19 21 20 2 +27 0.8 0.5 45 -34 7 -20 1 +27 0.1 0.9 -17 -43 21 -19 2 +27 0.2 0.4 -15 -41 46 32 2 +27 0.1 0.4 49 -37 -19 -46 1 +27 0.8 0.2 17 6 -10 -31 1 +27 0.6 0.8 40 -45 26 -49 2 +27 0.2 0.9 16 4 -11 -46 1 +27 0.7 0.1 -15 -47 19 -49 1 +27 0.2 0.7 35 -29 31 0 2 +27 0.7 0.6 3 -3 47 -14 2 +27 0.6 0.3 -4 -15 -40 -50 1 +27 0.1 0.7 48 14 35 -22 1 +27 0.3 0.8 22 -32 33 18 2 +27 0.2 0.5 29 -36 37 -45 2 +27 0.7 0.3 -25 -42 34 -39 2 +27 0.8 0.4 -20 -49 45 -15 2 +27 0.6 0.1 5 -27 38 -26 1 +27 0.2 0.6 30 26 38 -3 1 +27 0.9 0.8 18 9 -28 -40 1 +27 0.5 0.6 27 -25 12 11 2 +27 0.6 0.3 14 -42 46 -18 2 +27 0.2 0.2 25 13 30 -20 1 +27 0.8 0.2 -4 -25 -7 -47 1 +27 0.3 0.9 50 6 23 20 2 +27 0.3 0.2 7 -9 43 -28 1 +27 0.6 0.9 37 3 -18 -43 1 +27 0.6 0.1 42 -31 -17 -22 1 +27 0.6 0.2 46 20 21 -12 1 +27 0.6 0.6 38 -35 21 -25 1 +27 0.9 0.7 6 -9 43 27 2 +27 0.2 0.8 46 9 34 -41 2 +27 0.8 0.4 18 -42 19 -44 1 +27 0.7 0.9 13 -10 -4 -39 1 +27 0.9 0.7 28 4 31 -36 1 +27 0.4 0.1 27 -36 49 13 2 +27 0.1 0.2 18 8 -35 -45 1 +27 0.2 0.6 30 2 43 0 2 +27 0.5 0.3 -21 -50 -8 -40 2 +27 0.8 0.6 44 3 -36 -42 1 +27 0.2 0.6 2 -8 22 -47 2 +27 0.6 0.5 32 -44 4 -10 1 +27 0.7 0.2 17 -39 3 0 2 +27 0.7 0.3 11 -37 1 -31 1 +27 0.3 0.9 44 -6 39 14 2 +27 0.9 0.5 35 24 41 -4 1 +27 0.9 0.5 32 -41 35 -10 1 +27 0.6 0.1 31 10 28 -36 1 +27 0.2 0.5 -15 -43 -26 -29 2 +27 0.1 0.6 17 4 -30 -42 1 +27 0.1 0.2 -19 -45 -29 -30 2 +27 0.3 0.2 -31 -32 31 9 2 +27 0.2 0.2 5 4 6 -27 1 +27 0.8 0.5 -17 -21 50 -49 2 +27 0.4 0.7 24 -32 9 -41 2 +27 0.9 0.2 28 -10 20 -36 1 +27 0.9 0.7 -4 -10 37 29 2 +27 0.6 0.4 5 -30 -30 -32 1 +27 0.8 0.8 -7 -32 47 28 2 +27 0.2 0.7 23 4 47 -17 2 +27 0.2 0.7 -2 -33 7 -37 2 +27 0.4 0.7 43 -1 50 -13 2 +27 0.8 0.5 21 -8 0 -49 1 +27 0.6 0.6 11 -10 38 33 2 +27 0.1 0.9 18 10 5 -14 1 +27 0.5 0.2 33 5 40 -5 1 +27 0.8 0.7 21 -38 -24 -27 1 +27 0.9 0.5 33 -21 -21 -43 1 +27 0.6 0.1 39 -9 -28 -47 1 +27 0.3 0.7 28 -26 -26 -41 1 +27 0.9 0.8 -6 -9 37 -12 2 +27 0.6 0.5 36 -49 40 34 2 +27 0.5 0.8 -1 -2 -34 -46 1 +28 0.9 0.3 21 -45 41 32 2 +28 0.8 0.8 15 -46 50 -23 2 +28 0.2 0.6 25 -15 18 15 2 +28 0.3 0.3 -24 -42 16 -2 2 +28 0.5 0.9 31 -40 24 -3 2 +28 0.3 0.1 30 28 35 -37 1 +28 0.4 0.6 5 -34 48 -10 2 +28 0.1 0.3 -24 -34 27 11 2 +28 0.6 0.1 33 -32 17 -25 1 +28 0.9 0.8 41 -27 27 -45 1 +28 0.1 0.2 -29 -41 25 14 2 +28 0.1 0.3 34 -47 28 -15 2 +28 0.4 0.1 19 -39 -14 -34 1 +28 0.8 0.3 38 -9 45 -8 1 +28 0.4 0.6 30 -2 26 -49 1 +28 0.4 0.3 34 31 32 17 2 +28 0.2 0.4 47 -5 50 -25 2 +28 0.4 0.5 38 -34 42 -23 2 +28 0.1 0.4 26 -9 11 -31 1 +28 0.2 0.7 -17 -34 47 2 2 +28 0.4 0.4 -22 -26 33 -5 2 +28 0.2 0.8 25 -26 31 -30 2 +28 0.2 0.3 41 21 29 -47 1 +28 0.2 0.1 26 -32 7 1 2 +28 0.2 0.9 47 -6 11 -18 2 +28 0.1 0.1 39 10 4 -46 1 +28 0.2 0.6 -11 -38 -2 -48 2 +28 0.4 0.3 15 -2 34 12 2 +28 0.8 0.2 42 -27 33 29 2 +28 0.9 0.7 7 -27 50 10 2 +28 0.1 0.1 -26 -31 2 -38 2 +28 0.5 0.8 20 1 4 -29 1 +28 0.1 0.4 47 -18 16 -46 2 +28 0.9 0.8 13 -40 1 -34 1 +28 0.8 0.2 29 -15 -24 -39 1 +28 0.1 0.7 29 -35 29 19 2 +28 0.5 0.4 41 -39 34 -10 1 +28 0.1 0.5 44 -37 14 0 2 +28 0.7 0.6 38 25 29 -19 1 +28 0.3 0.1 -15 -38 46 9 2 +28 0.6 0.1 5 -29 44 -35 1 +28 0.3 0.5 46 -31 1 -24 1 +28 0.8 0.1 -42 -49 42 21 2 +28 0.9 0.8 7 -15 2 -32 1 +28 0.2 0.8 35 -35 7 -32 2 +28 0.3 0.9 7 -7 38 23 2 +28 0.1 0.2 -13 -14 33 -37 1 +28 0.6 0.7 23 -49 17 -48 1 +28 0.6 0.6 38 -4 48 -10 2 +28 0.5 0.3 16 -23 0 -26 1 +28 0.1 0.1 35 20 41 -2 1 +28 0.7 0.4 -31 -44 21 16 2 +28 0.1 0.7 25 -4 21 5 2 +28 0.6 0.3 1 -40 -34 -49 1 +28 0.5 0.1 21 13 0 -10 1 +28 0.6 0.1 -15 -20 38 36 2 +28 0.2 0.8 45 -23 22 -28 2 +28 0.6 0.8 48 -31 21 -28 1 +28 0.8 0.3 -29 -32 31 -26 2 +28 0.5 0.7 44 1 43 -36 1 +28 0.3 0.9 -19 -50 45 9 2 +28 0.5 0.9 3 -28 48 37 2 +28 0.5 0.9 16 -8 24 -29 2 +28 0.8 0.6 43 1 -28 -41 1 +28 0.1 0.1 8 -26 16 -42 1 +28 0.3 0.5 -2 -18 50 -17 2 +28 0.7 0.1 -20 -38 18 -13 2 +28 0.8 0.2 16 0 17 -36 1 +28 0.3 0.1 22 13 32 12 1 +28 0.1 0.9 47 30 9 -3 1 +28 0.6 0.6 0 -19 23 -18 2 +28 0.8 0.6 43 -26 23 11 1 +28 0.2 0.5 13 -27 47 -36 2 +28 0.1 0.4 5 -33 29 -41 2 +28 0.6 0.4 -7 -42 32 -49 2 +28 0.2 0.5 12 -5 4 -7 1 +28 0.1 0.8 -11 -28 -14 -47 2 +28 0.9 0.8 41 23 20 9 1 +28 0.4 0.6 32 19 35 -41 1 +28 0.5 0.3 26 -40 -15 -16 1 +28 0.7 0.6 32 12 6 -3 1 +28 0.8 0.4 -22 -41 -16 -43 2 +28 0.8 0.8 39 1 4 0 1 +28 0.2 0.7 17 -45 37 -12 2 +28 0.8 0.4 35 -18 42 3 2 +28 0.7 0.2 7 -7 14 -3 1 +28 0.3 0.2 40 19 31 -40 1 +28 0.5 0.4 45 21 21 20 1 +28 0.6 0.6 22 21 3 -38 1 +28 0.7 0.7 34 -17 20 9 1 +28 0.2 0.6 -15 -37 37 -12 2 +28 0.9 0.5 38 -9 50 4 1 +28 0.7 0.1 16 -19 5 -10 1 +28 0.8 0.2 -21 -39 7 -38 1 +28 0.6 0.8 -26 -33 47 8 2 +28 0.2 0.7 20 -38 -7 -22 2 +28 0.6 0.5 43 33 28 17 1 +28 0.6 0.2 40 -2 37 -47 1 +28 0.8 0.8 25 -5 -12 -22 1 +28 0.5 0.2 22 9 -29 -43 1 +28 0.9 0.7 49 23 17 -35 1 +28 0.7 0.2 45 38 29 -39 1 +28 0.1 0.2 20 -9 35 33 2 +28 0.7 0.2 42 -41 38 -14 1 +28 0.6 0.9 26 -37 17 -36 2 +28 0.6 0.2 38 22 35 -7 1 +28 0.5 0.3 19 8 22 6 1 +28 0.4 0.7 -16 -39 16 4 2 +28 0.6 0.7 48 1 19 2 1 +28 0.1 0.5 47 -30 46 3 2 +28 0.1 0.1 5 -23 16 -47 1 +28 0.3 0.8 46 -50 47 -11 2 +28 0.1 0.7 -16 -46 -35 -47 1 +28 0.6 0.4 -2 -12 4 -39 1 +28 0.6 0.8 -5 -10 43 -19 2 +28 0.3 0.8 43 36 48 26 1 +28 0.1 0.8 21 -3 48 38 2 +28 0.8 0.8 2 -28 38 37 2 +28 0.9 0.8 -35 -47 -3 -50 2 +28 0.5 0.7 44 18 26 -8 1 +28 0.1 0.2 41 -36 1 -20 2 +28 0.3 0.1 15 -1 34 11 2 +28 0.3 0.8 44 -22 42 21 2 +28 0.4 0.4 -30 -46 46 27 2 +28 0.3 0.2 -15 -44 31 0 2 +28 0.8 0.9 -12 -35 48 -26 2 +28 0.3 0.6 27 -8 -26 -44 1 +28 0.4 0.1 -31 -49 22 -44 2 +28 0.3 0.1 -10 -18 35 3 2 +28 0.6 0.8 10 -20 -17 -36 1 +28 0.1 0.5 -17 -46 12 -32 2 +28 0.6 0.9 5 -38 47 29 2 +28 0.5 0.2 24 -30 36 16 2 +28 0.8 0.9 -12 -46 1 -9 2 +28 0.1 0.2 6 -19 -11 -17 2 +28 0.7 0.1 -12 -46 49 -48 1 +28 0.1 0.6 11 9 20 19 2 +28 0.3 0.1 16 6 32 -13 1 +28 0.7 0.1 4 -12 40 33 2 +28 0.2 0.9 49 33 37 -34 2 +28 0.2 0.4 25 -29 27 -17 2 +28 0.7 0.8 42 -24 23 -48 1 +28 0.1 0.3 44 -48 1 -46 1 +28 0.3 0.8 10 7 -5 -26 1 +28 0.6 0.7 7 -13 46 9 2 +28 0.5 0.6 36 8 20 -12 1 +28 0.9 0.7 37 -42 36 -4 1 +28 0.6 0.9 33 9 -18 -47 1 +28 0.9 0.8 41 28 47 -44 1 +28 0.1 0.7 -5 -8 -14 -47 1 +28 0.5 0.4 11 3 18 -9 1 +28 0.3 0.2 49 19 47 45 2 +28 0.6 0.7 49 34 13 -33 1 +28 0.3 0.8 -37 -44 48 -45 2 +28 0.6 0.9 -23 -33 12 -33 2 +28 0.2 0.4 9 -4 -15 -24 1 +28 0.2 0.1 48 10 50 22 2 +28 0.2 0.9 36 18 47 -8 2 +28 0.2 0.7 29 -40 -29 -47 2 +28 0.2 0.6 38 36 29 -2 1 +28 0.6 0.8 9 -46 16 -38 2 +28 0.1 0.6 39 6 -3 -11 1 +28 0.9 0.4 7 -35 35 -47 1 +28 0.8 0.5 41 27 29 -43 1 +28 0.6 0.7 32 -16 47 -24 2 +28 0.5 0.8 11 -31 6 -46 2 +28 0.3 0.9 34 3 32 27 2 +28 0.1 0.7 -3 -37 -36 -43 2 +28 0.4 0.3 4 -27 -11 -34 1 +28 0.5 0.7 33 -21 12 -20 1 +28 0.2 0.6 9 -44 35 -8 2 +28 0.8 0.1 44 -17 -45 -47 1 +28 0.6 0.6 9 -46 3 -11 1 +28 0.1 0.2 15 -18 17 -5 2 +28 0.1 0.9 35 -46 32 8 2 +28 0.4 0.9 23 22 31 2 2 +28 0.4 0.7 34 20 -16 -29 1 +28 0.7 0.9 40 -14 -28 -41 1 +28 0.3 0.7 31 -4 39 24 2 +28 0.7 0.6 50 37 19 -23 1 +28 0.1 0.1 33 -43 45 -36 1 +28 0.5 0.8 -14 -30 8 -9 2 +28 0.3 0.2 3 -9 43 -28 1 +28 0.7 0.6 33 -16 36 -12 2 +28 0.1 0.7 -4 -35 1 -7 2 +28 0.4 0.5 -5 -48 -1 -2 2 +28 0.1 0.8 31 -8 42 -44 2 +28 0.9 0.6 44 -48 26 -10 1 +28 0.4 0.2 39 -9 47 46 2 +28 0.6 0.4 35 4 -3 -38 1 +28 0.1 0.1 -32 -42 -8 -44 2 +28 0.9 0.9 -2 -8 26 -9 2 +28 0.8 0.2 -43 -47 40 -6 2 +28 0.8 0.7 29 -4 -22 -35 1 +28 0.7 0.5 47 -46 28 -36 1 +28 0.8 0.5 -14 -35 17 -39 2 +28 0.4 0.9 30 20 41 31 2 +28 0.3 0.1 -30 -45 50 -32 2 +28 0.7 0.8 11 -40 -2 -8 1 +28 0.3 0.6 15 3 18 11 2 +28 0.3 0.8 -19 -34 29 -8 2 +28 0.2 0.5 43 8 38 5 2 +28 0.2 0.5 24 -8 3 -48 1 +28 0.4 0.2 5 -34 47 -43 2 +28 0.2 0.1 12 -17 9 -7 2 +28 0.6 0.9 6 -38 -4 -38 2 +28 0.9 0.2 28 -21 41 7 1 +28 0.3 0.4 -6 -20 -13 -34 1 +28 0.4 0.5 29 -14 19 -40 1 +28 0.8 0.7 23 19 17 15 1 +28 0.9 0.5 12 -36 49 23 2 +28 0.8 0.6 27 0 35 -46 1 +28 0.2 0.2 -8 -46 50 42 2 +28 0.4 0.8 47 27 -4 -16 1 +28 0.7 0.7 37 -10 32 30 2 +28 0.8 0.3 27 -11 2 -47 1 +28 0.5 0.6 -21 -35 29 4 2 +28 0.5 0.5 39 -11 34 -6 2 +28 0.3 0.9 5 -30 10 5 2 +28 0.1 0.9 20 -27 36 -29 2 +28 0.8 0.8 -38 -42 40 -41 2 +28 0.9 0.7 -31 -42 25 -48 2 +28 0.2 0.6 46 21 35 -48 1 +28 0.3 0.3 -8 -34 9 -7 2 +28 0.7 0.4 -15 -31 46 -19 2 +28 0.9 0.5 43 -6 32 15 1 +28 0.4 0.1 28 24 47 -33 1 +28 0.2 0.5 35 -37 -20 -23 1 +28 0.5 0.5 36 -44 1 -26 1 +28 0.7 0.1 13 -31 35 4 2 +28 0.8 0.1 -34 -40 18 -46 2 +28 0.9 0.3 15 -3 25 -36 1 +28 0.3 0.2 14 -31 29 -2 2 +28 0.2 0.1 49 45 -31 -50 1 +28 0.9 0.6 12 9 39 -20 1 +28 0.1 0.4 -18 -24 5 -25 2 +28 0.6 0.1 -36 -40 -2 -26 2 +28 0.1 0.4 18 -42 -4 -42 2 +28 0.9 0.9 24 -48 16 -13 1 +28 0.6 0.3 12 -46 24 -12 2 +28 0.5 0.2 7 -18 17 -13 1 +28 0.9 0.5 26 -27 2 -46 1 +28 0.6 0.4 37 -18 32 -31 1 +28 0.6 0.4 30 -17 12 -50 1 +28 0.9 0.9 7 -5 22 18 2 +28 0.7 0.2 25 -44 -34 -48 1 +28 0.5 0.4 40 -22 32 -39 1 +28 0.1 0.2 21 -13 45 -31 1 +28 0.2 0.7 -6 -34 7 -18 2 +28 0.2 0.8 35 -42 45 3 2 +28 0.6 0.4 8 -13 41 23 2 +28 0.7 0.1 17 -26 35 15 2 +28 0.8 0.8 -37 -45 1 0 2 +28 0.9 0.3 -18 -34 42 10 2 +28 0.4 0.1 49 -15 -12 -35 1 +28 0.4 0.4 44 -49 -31 -38 1 +28 0.8 0.9 -20 -37 43 42 2 +28 0.3 0.1 50 6 5 -14 1 +28 0.8 0.4 42 37 8 -23 1 +28 0.6 0.7 -18 -26 2 -26 2 +28 0.5 0.8 18 -26 31 -20 2 +28 0.3 0.4 -22 -50 28 -49 2 +28 0.2 0.8 36 -47 30 -2 2 +28 0.8 0.4 2 -9 11 -6 2 +28 0.4 0.5 9 -48 -5 -41 1 +28 0.8 0.8 -12 -18 8 -23 2 +28 0.8 0.7 0 -17 36 35 2 +28 0.3 0.6 25 21 49 -43 1 +28 0.3 0.3 24 -33 23 4 2 +28 0.6 0.3 30 -36 -25 -50 1 +28 0.6 0.8 -34 -43 -31 -32 2 +28 0.9 0.3 33 -25 46 11 1 +28 0.6 0.6 9 -2 39 29 2 +28 0.4 0.9 24 -41 -7 -20 1 +28 0.9 0.6 -13 -49 47 11 2 +28 0.6 0.3 -12 -35 25 -35 2 +28 0.4 0.4 -8 -47 26 -11 2 +28 0.2 0.6 28 15 12 -17 1 +28 0.4 0.9 -14 -50 -21 -28 1 +28 0.9 0.7 -9 -35 45 43 2 +28 0.1 0.6 -14 -16 44 2 2 +28 0.2 0.4 41 20 49 -25 1 +28 0.7 0.3 49 14 -37 -50 1 +28 0.3 0.4 -3 -4 45 -7 2 +28 0.5 0.6 24 20 -20 -21 1 +28 0.7 0.4 -14 -41 9 8 2 +28 0.7 0.3 16 -36 27 11 2 +28 0.1 0.5 -10 -36 40 -30 2 +28 0.9 0.6 32 15 -10 -13 1 +28 0.9 0.6 50 -40 -29 -32 1 +28 0.9 0.4 1 -50 42 34 2 +28 0.7 0.9 50 -16 -10 -50 1 +28 0.8 0.2 44 -27 33 7 1 +28 0.9 0.3 10 -19 34 24 2 +28 0.9 0.3 6 -15 -2 -30 1 +28 0.1 0.2 32 -20 28 19 2 +28 0.5 0.4 -14 -46 49 4 2 +28 0.1 0.9 24 13 -19 -35 1 +28 0.9 0.9 21 -41 39 -16 2 +28 0.6 0.5 42 21 40 -23 1 +29 0.1 0.8 20 -39 12 8 2 +29 0.8 0.9 35 28 -13 -31 1 +29 0.2 0.4 39 -19 18 14 2 +29 0.8 0.9 28 -22 -5 -40 1 +29 0.1 0.9 2 -5 36 27 2 +29 0.3 0.7 9 -26 45 4 2 +29 0.7 0.1 1 -44 49 -10 1 +29 0.5 0.6 16 -19 38 21 2 +29 0.2 0.8 0 -32 24 -45 2 +29 0.5 0.4 18 -1 43 15 2 +29 0.4 0.5 46 -27 38 29 2 +29 0.4 0.4 24 11 3 -48 1 +29 0.3 0.3 42 -16 48 -6 2 +29 0.7 0.3 -9 -28 5 -19 1 +29 0.1 0.8 -14 -31 -21 -45 2 +29 0.9 0.4 -7 -42 50 48 2 +29 0.8 0.9 2 -44 -18 -25 1 +29 0.5 0.2 26 -45 40 11 2 +29 0.7 0.7 16 -23 -3 -12 1 +29 0.2 0.7 43 -17 23 -19 2 +29 0.3 0.2 -12 -35 33 -7 2 +29 0.2 0.6 13 -15 15 -34 1 +29 0.8 0.1 -37 -44 31 16 2 +29 0.5 0.6 16 -9 15 -22 2 +29 0.7 0.3 4 -18 -32 -44 1 +29 0.3 0.4 2 -23 18 -6 2 +29 0.8 0.1 46 -36 29 3 1 +29 0.6 0.5 43 21 39 -46 1 +29 0.6 0.2 -5 -45 -5 -25 1 +29 0.4 0.1 11 -19 23 -41 1 +29 0.9 0.4 47 -5 47 1 1 +29 0.2 0.9 29 -31 20 9 2 +29 0.1 0.9 48 22 18 -16 1 +29 0.2 0.3 30 -14 -3 -18 1 +29 0.2 0.9 20 -45 15 -25 2 +29 0.1 0.2 48 -37 10 -7 1 +29 0.3 0.5 25 23 18 -45 1 +29 0.1 0.3 25 -34 19 -27 2 +29 0.5 0.5 13 -19 48 -40 2 +29 0.5 0.9 -16 -38 8 -40 2 +29 0.6 0.8 20 -35 47 -41 2 +29 0.5 0.5 -7 -14 48 30 2 +29 0.3 0.2 23 -30 19 9 2 +29 0.9 0.8 38 18 36 24 2 +29 0.4 0.3 34 -40 39 -32 2 +29 0.4 0.7 24 23 24 3 1 +29 0.1 0.4 11 -31 44 4 2 +29 0.2 0.3 14 -48 -4 -8 2 +29 0.3 0.2 42 -36 -42 -49 1 +29 0.2 0.8 30 18 20 -49 1 +29 0.6 0.5 25 -32 -28 -47 1 +29 0.9 0.1 45 32 -5 -30 1 +29 0.8 0.6 26 -20 32 5 1 +29 0.4 0.7 22 -22 -16 -49 1 +29 0.4 0.1 20 8 43 33 2 +29 0.9 0.3 26 8 44 28 2 +29 0.7 0.2 22 -40 44 -44 2 +29 0.3 0.4 31 -29 -15 -26 2 +29 0.5 0.5 45 36 48 -24 1 +29 0.5 0.6 34 3 50 -22 1 +29 0.6 0.6 48 36 28 -40 1 +29 0.7 0.7 33 -11 11 -26 1 +29 0.1 0.9 30 0 3 -32 2 +29 0.8 0.4 16 -46 19 -7 1 +29 0.8 0.7 -24 -36 41 -12 2 +29 0.8 0.5 25 -9 -5 -39 1 +29 0.9 0.2 21 -34 9 -28 1 +29 0.3 0.4 49 -35 27 12 2 +29 0.3 0.6 -14 -24 20 -45 2 +29 0.9 0.5 -20 -25 7 -8 2 +29 0.8 0.1 38 -23 24 13 1 +29 0.4 0.4 30 0 44 39 2 +29 0.7 0.8 24 -2 -31 -32 1 +29 0.3 0.3 -34 -35 22 -35 2 +29 0.1 0.4 21 5 16 12 2 +29 0.2 0.1 -21 -26 -10 -13 2 +29 0.2 0.2 -18 -45 24 -37 2 +29 0.5 0.6 32 23 14 -20 1 +29 0.2 0.9 -9 -29 -13 -36 1 +29 0.4 0.8 -15 -44 41 38 2 +29 0.2 0.9 33 -36 -34 -38 1 +29 0.7 0.5 38 6 -31 -45 1 +29 0.8 0.7 30 -40 47 -46 1 +29 0.9 0.8 -33 -36 46 -3 2 +29 0.9 0.6 10 -29 45 39 2 +29 0.1 0.3 50 10 42 34 2 +29 0.4 0.3 -16 -29 9 5 2 +29 0.2 0.5 -5 -26 -35 -45 1 +29 0.8 0.7 -32 -35 30 -44 2 +29 0.8 0.5 15 -37 38 3 2 +29 0.3 0.3 -11 -45 24 -33 2 +29 0.3 0.3 -13 -16 -6 -13 2 +29 0.4 0.1 47 -7 29 25 2 +29 0.8 0.3 21 -45 -13 -50 1 +29 0.6 0.1 -10 -47 32 12 2 +29 0.3 0.1 -5 -42 47 26 2 +29 0.4 0.2 13 -11 15 -38 1 +29 0.3 0.7 -10 -13 31 4 2 +29 0.4 0.3 6 -38 40 25 2 +29 0.7 0.9 -27 -36 -13 -46 1 +29 0.4 0.9 33 -1 -11 -41 1 +29 0.5 0.6 12 2 8 -9 2 +29 0.4 0.8 30 0 29 -18 2 +29 0.3 0.2 33 -41 -19 -48 1 +29 0.1 0.5 30 -2 4 -2 1 +29 0.3 0.9 32 26 45 13 2 +29 0.9 0.8 2 -37 41 -32 2 +29 0.7 0.3 28 4 47 -22 1 +29 0.2 0.2 -1 -8 30 2 2 +29 0.3 0.7 21 -46 -4 -16 1 +29 0.5 0.4 2 -21 32 16 2 +29 0.7 0.8 47 -42 41 -19 1 +29 0.4 0.3 -1 -24 45 12 2 +29 0.6 0.1 47 43 48 24 2 +29 0.7 0.4 37 18 -1 -50 1 +29 0.5 0.2 48 12 34 -19 1 +29 0.3 0.3 24 6 35 29 2 +29 0.6 0.7 44 32 -2 -32 1 +29 0.3 0.2 21 17 -4 -22 1 +29 0.7 0.2 42 16 -24 -33 1 +29 0.1 0.9 18 7 44 -26 2 +29 0.8 0.8 39 -40 42 -19 2 +29 0.4 0.6 -1 -31 37 -2 2 +29 0.8 0.9 27 7 7 -35 1 +29 0.5 0.6 47 -14 -1 -29 1 +29 0.9 0.8 20 -45 46 29 2 +29 0.6 0.2 -17 -35 45 -15 2 +29 0.6 0.1 -17 -30 27 -2 2 +29 0.2 0.7 47 42 -26 -34 1 +29 0.8 0.2 -12 -49 22 13 2 +29 0.6 0.6 8 -40 -12 -46 1 +29 0.9 0.1 -28 -50 -12 -13 1 +29 0.6 0.2 15 -9 29 -35 1 +29 0.6 0.7 -6 -36 43 0 2 +29 0.4 0.3 41 -7 -33 -38 1 +29 0.7 0.1 25 -10 -25 -30 1 +29 0.9 0.1 34 -26 38 -35 1 +29 0.9 0.2 16 -15 3 -37 1 +29 0.7 0.9 -5 -9 37 -26 2 +29 0.8 0.2 35 -28 36 -40 1 +29 0.5 0.1 28 -34 -21 -46 1 +29 0.5 0.1 8 -24 28 -15 1 +29 0.9 0.6 14 -15 31 12 2 +29 0.3 0.8 12 6 47 -20 2 +29 0.1 0.1 2 -19 32 -47 1 +29 0.5 0.5 30 -29 -33 -47 1 +29 0.1 0.9 48 -9 -17 -45 2 +29 0.4 0.5 21 -29 -31 -44 1 +29 0.2 0.5 1 -1 -38 -40 1 +29 0.8 0.4 11 -5 47 -12 1 +29 0.2 0.1 -14 -49 -3 -26 2 +29 0.3 0.8 29 -21 9 2 2 +29 0.9 0.8 34 -21 33 4 2 +29 0.3 0.1 28 16 17 -18 1 +29 0.5 0.7 37 -49 35 34 2 +29 0.7 0.7 23 -26 50 9 2 +29 0.5 0.2 34 32 10 -10 1 +29 0.2 0.3 37 -3 44 -43 2 +29 0.6 0.8 41 10 3 -2 1 +29 0.8 0.7 33 10 15 -19 1 +29 0.7 0.9 14 10 -18 -48 1 +29 0.4 0.3 49 18 -5 -24 1 +29 0.3 0.3 -47 -49 31 23 2 +29 0.3 0.3 40 -30 26 -7 2 +29 0.3 0.5 17 -36 38 36 2 +29 0.1 0.5 34 30 -14 -36 1 +29 0.4 0.2 35 -7 -14 -36 1 +29 0.1 0.4 -12 -23 4 -37 1 +29 0.7 0.3 0 -14 -11 -31 2 +29 0.8 0.8 23 -18 17 11 2 +29 0.1 0.5 27 -42 31 -11 2 +29 0.9 0.5 47 -8 -23 -32 1 +29 0.5 0.4 16 -31 25 -37 2 +29 0.3 0.4 29 -39 42 -25 2 +29 0.6 0.6 47 -1 32 -28 1 +29 0.3 0.8 48 40 24 -38 2 +29 0.7 0.7 -3 -27 48 28 2 +29 0.1 0.7 6 -43 -34 -44 2 +29 0.2 0.1 39 -50 48 -1 1 +29 0.9 0.5 48 -25 10 -49 1 +29 0.5 0.6 -6 -45 41 -20 2 +29 0.1 0.7 -19 -29 22 6 2 +29 0.4 0.6 17 -8 34 12 2 +29 0.1 0.3 -8 -15 -26 -34 1 +29 0.1 0.7 31 -4 -9 -16 1 +29 0.1 0.5 -3 -34 14 -35 2 +29 0.4 0.7 36 -21 -4 -30 1 +29 0.3 0.8 40 16 2 -5 2 +29 0.8 0.2 18 -27 12 4 1 +29 0.9 0.8 46 5 44 43 2 +29 0.6 0.5 39 -24 42 -15 2 +29 0.8 0.5 21 -47 -22 -29 1 +29 0.5 0.7 15 -31 -3 -31 1 +29 0.6 0.8 -20 -23 39 -35 2 +29 0.3 0.2 32 -11 26 24 2 +29 0.5 0.9 7 -17 17 2 2 +29 0.5 0.1 23 -30 -19 -41 1 +29 0.3 0.7 39 25 50 39 2 +29 0.2 0.5 12 -10 36 28 2 +29 0.6 0.9 30 -42 42 -14 2 +29 0.4 0.5 22 10 35 -27 1 +29 0.7 0.5 45 -2 -22 -43 1 +29 0.5 0.3 2 -48 26 10 2 +29 0.2 0.6 8 -1 44 -24 1 +29 0.7 0.5 -7 -36 32 -45 2 +29 0.3 0.2 -10 -44 42 -25 2 +29 0.2 0.3 46 -47 50 44 2 +29 0.4 0.3 31 -22 3 2 2 +29 0.7 0.5 -2 -50 5 -43 2 +29 0.6 0.4 29 22 41 38 1 +29 0.3 0.7 -11 -36 -21 -37 1 +29 0.3 0.2 42 0 23 -47 1 +29 0.5 0.6 6 -4 -21 -48 1 +29 0.9 0.7 29 -24 27 -1 1 +29 0.4 0.5 0 -26 45 -39 2 +29 0.7 0.2 29 -5 9 8 1 +29 0.9 0.4 -39 -44 12 -36 2 +29 0.2 0.5 -34 -39 20 -48 2 +29 0.8 0.3 29 -7 -7 -35 1 +29 0.9 0.6 18 1 39 27 2 +29 0.4 0.9 36 -16 -36 -43 1 +29 0.3 0.8 42 -34 16 -48 2 +29 0.4 0.8 -26 -44 -9 -31 2 +29 0.1 0.7 9 -43 -17 -18 2 +29 0.5 0.5 -25 -32 23 15 2 +29 0.1 0.6 46 -13 42 -46 2 +29 0.2 0.6 44 -6 26 8 2 +29 0.6 0.9 29 -39 15 -28 1 +29 0.8 0.6 4 -45 1 -36 2 +29 0.6 0.8 39 31 38 -32 1 +29 0.7 0.4 32 11 -20 -28 1 +29 0.8 0.8 -20 -37 9 -20 2 +29 0.4 0.7 -25 -26 40 10 2 +29 0.2 0.4 45 -15 1 -21 1 +29 0.4 0.8 39 -44 47 14 2 +29 0.6 0.6 49 -21 34 -34 1 +29 0.3 0.6 32 23 1 -5 1 +29 0.8 0.5 13 -1 5 -29 1 +29 0.5 0.7 24 -42 -9 -27 2 +29 0.4 0.9 16 -43 22 9 2 +29 0.9 0.9 49 4 50 -16 1 +29 0.5 0.9 -33 -39 -30 -47 2 +29 0.1 0.6 45 23 -4 -28 1 +29 0.6 0.7 27 -42 36 -39 2 +29 0.9 0.7 -4 -41 -13 -37 1 +29 0.8 0.9 19 -39 40 -3 2 +29 0.5 0.8 9 -29 50 9 2 +29 0.5 0.8 43 19 24 -40 1 +29 0.8 0.3 27 1 49 -17 1 +29 0.5 0.2 38 -2 41 -19 1 +29 0.7 0.1 23 5 48 40 2 +29 0.7 0.8 -15 -44 30 -24 2 +29 0.5 0.7 50 22 8 7 1 +29 0.5 0.5 43 25 -24 -50 1 +29 0.4 0.5 42 -15 20 -17 1 +29 0.9 0.1 41 30 -12 -28 1 +29 0.7 0.2 30 -6 30 -47 1 +29 0.3 0.6 -3 -14 -22 -34 2 +29 0.6 0.3 34 -47 19 -10 2 +29 0.5 0.5 44 -7 29 -17 1 +29 0.1 0.2 31 13 32 -6 1 +29 0.1 0.1 39 20 2 -34 1 +29 0.4 0.5 49 2 24 15 2 +29 0.2 0.8 -8 -39 37 5 2 +29 0.8 0.4 24 2 -28 -43 1 +29 0.8 0.5 41 6 -7 -50 1 +29 0.7 0.8 7 -44 40 -33 2 +29 0.9 0.1 28 3 9 5 1 +29 0.3 0.7 33 -7 27 -14 2 +29 0.5 0.8 2 -40 -4 -34 2 +29 0.4 0.9 41 29 -14 -23 1 +29 0.7 0.8 12 -38 -37 -43 1 +29 0.2 0.7 24 11 -27 -33 1 +29 0.8 0.6 40 19 27 -50 1 +29 0.8 0.3 -43 -50 46 -30 2 +29 0.7 0.4 42 4 -27 -46 1 +29 0.4 0.9 23 -38 47 29 2 +29 0.9 0.9 34 18 48 -35 2 +29 0.6 0.4 0 -10 28 5 2 +29 0.6 0.9 20 -39 50 32 2 +29 0.5 0.4 43 10 4 -12 2 +29 0.4 0.3 0 -32 25 -24 2 +29 0.1 0.4 28 23 -9 -50 1 +29 0.7 0.2 31 -37 27 -8 1 +29 0.7 0.6 -4 -17 0 -16 2 +29 0.5 0.7 11 -43 -6 -38 1 +29 0.6 0.7 34 -12 -8 -38 1 +29 0.3 0.4 -7 -20 -3 -37 1 +29 0.5 0.5 5 -7 9 -46 1 +29 0.4 0.4 48 -26 39 -31 1 +29 0.6 0.4 31 -1 42 30 2 +29 0.4 0.4 -40 -47 33 -41 1 +29 0.9 0.1 40 36 25 -3 1 +29 0.2 0.9 -13 -14 35 10 2 +29 0.3 0.6 23 -26 -27 -47 1 +29 0.4 0.9 37 -32 36 -10 2 +29 0.2 0.8 -2 -25 33 32 2 +29 0.6 0.6 49 30 -10 -20 1 +29 0.1 0.4 -18 -43 -26 -31 1 +29 0.4 0.6 -22 -49 35 -17 2 +30 0.2 0.6 43 13 47 33 2 +30 0.1 0.2 10 -5 14 -25 1 +30 0.1 0.5 26 12 33 -30 1 +30 0.8 0.9 30 -40 43 -8 2 +30 0.2 0.7 -7 -39 -1 -38 2 +30 0.4 0.8 -1 -14 0 -29 2 +30 0.5 0.9 47 23 10 -20 1 +30 0.5 0.1 45 28 14 -19 1 +30 0.9 0.4 45 -25 9 -3 1 +30 0.1 0.7 34 -45 15 -19 2 +30 0.3 0.9 32 22 26 8 1 +30 0.2 0.5 50 -6 22 21 2 +30 0.5 0.7 46 -47 24 -47 1 +30 0.1 0.8 -38 -42 13 -2 2 +30 0.5 0.8 41 -38 50 12 2 +30 0.4 0.9 -25 -30 39 24 2 +30 0.6 0.5 42 -15 -8 -36 1 +30 0.4 0.9 2 -13 8 1 2 +30 0.6 0.4 47 -13 -19 -37 1 +30 0.3 0.7 50 -46 43 29 2 +30 0.9 0.4 24 -28 -17 -26 1 +30 0.9 0.1 41 30 27 -27 1 +30 0.8 0.9 25 -18 3 -14 1 +30 0.9 0.4 45 -15 49 -9 1 +30 0.3 0.4 12 -40 36 -22 2 +30 0.6 0.3 49 2 -7 -11 1 +30 0.6 0.9 -1 -47 28 11 2 +30 0.6 0.4 -12 -27 7 0 2 +30 0.7 0.3 49 -24 45 34 2 +30 0.4 0.8 33 19 -30 -43 1 +30 0.2 0.1 26 -36 11 -33 1 +30 0.1 0.2 24 -22 50 -2 2 +30 0.7 0.3 42 24 -38 -46 1 +30 0.2 0.2 -8 -32 26 -37 2 +30 0.5 0.3 12 -28 29 -2 2 +30 0.8 0.7 40 -10 26 -46 1 +30 0.1 0.4 41 16 -7 -32 1 +30 0.5 0.4 36 27 44 31 2 +30 0.8 0.2 1 -32 32 -7 2 +30 0.9 0.9 -1 -24 44 33 2 +30 0.6 0.5 19 7 32 -16 1 +30 0.5 0.2 42 18 4 -33 1 +30 0.2 0.2 31 -48 50 -17 2 +30 0.7 0.3 47 -29 -23 -25 1 +30 0.7 0.4 50 12 44 -43 1 +30 0.1 0.2 17 -50 32 31 2 +30 0.4 0.3 34 30 33 28 1 +30 0.1 0.4 41 36 27 12 1 +30 0.7 0.9 39 -4 -31 -47 1 +30 0.7 0.7 28 -31 40 -46 2 +30 0.9 0.5 3 -38 10 -19 1 +30 0.1 0.3 0 -48 -15 -47 2 +30 0.6 0.9 50 5 32 7 1 +30 0.7 0.9 -12 -43 49 -17 2 +30 0.5 0.6 43 -16 -46 -49 1 +30 0.5 0.1 39 -17 49 37 2 +30 0.7 0.3 -7 -37 -1 -42 1 +30 0.3 0.4 10 9 -27 -46 1 +30 0.8 0.3 -6 -14 3 -3 2 +30 0.1 0.4 36 -47 34 -25 2 +30 0.8 0.1 -33 -43 18 12 2 +30 0.1 0.8 36 -49 47 10 2 +30 0.8 0.2 -22 -50 21 -9 2 +30 0.7 0.7 41 37 30 -31 1 +30 0.2 0.1 3 -30 16 -47 1 +30 0.7 0.7 26 -5 50 -32 2 +30 0.5 0.1 8 -20 18 7 2 +30 0.3 0.8 24 -36 -25 -38 1 +30 0.1 0.4 37 -40 30 -19 2 +30 0.7 0.9 -8 -40 44 34 2 +30 0.5 0.2 -11 -16 8 3 2 +30 0.6 0.3 -34 -36 -20 -22 2 +30 0.9 0.7 -2 -38 17 -40 2 +30 0.6 0.3 43 21 -42 -50 1 +30 0.8 0.1 45 -7 -13 -22 1 +30 0.8 0.9 21 -9 34 -11 2 +30 0.3 0.2 -7 -32 1 -34 1 +30 0.8 0.2 -4 -48 49 -11 2 +30 0.4 0.1 6 -30 32 -9 2 +30 0.6 0.7 21 19 -11 -14 1 +30 0.1 0.2 -6 -39 -2 -16 2 +30 0.1 0.8 -5 -26 9 -5 2 +30 0.2 0.1 -10 -24 24 -38 1 +30 0.8 0.3 18 3 48 -50 1 +30 0.1 0.5 -6 -38 29 17 2 +30 0.7 0.6 22 17 47 17 2 +30 0.2 0.6 15 -42 34 25 2 +30 0.4 0.1 -11 -40 25 24 2 +30 0.4 0.5 -16 -47 -4 -9 2 +30 0.6 0.2 12 8 18 -50 1 +30 0.5 0.9 -26 -30 50 -42 2 +30 0.8 0.3 40 -6 -1 -42 1 +30 0.1 0.7 36 -34 -7 -46 2 +30 0.1 0.3 24 11 -6 -25 1 +30 0.9 0.2 38 16 -33 -49 1 +30 0.4 0.1 -19 -33 48 45 2 +30 0.8 0.8 3 0 43 13 2 +30 0.5 0.4 39 25 45 1 1 +30 0.1 0.2 4 -7 44 5 2 +30 0.9 0.8 36 23 44 -8 2 +30 0.5 0.6 43 30 -20 -44 1 +30 0.4 0.1 26 -2 38 12 2 +30 0.2 0.4 -12 -35 41 -47 2 +30 0.3 0.9 37 19 36 -36 2 +30 0.7 0.4 41 -2 18 -20 1 +30 0.6 0.9 10 -28 -12 -50 1 +30 0.7 0.4 25 17 -16 -46 1 +30 0.9 0.6 -28 -37 2 -49 2 +30 0.3 0.9 22 7 38 -23 2 +30 0.8 0.9 39 28 48 -47 2 +30 0.7 0.9 26 0 -31 -50 1 +30 0.9 0.4 43 42 22 -36 1 +30 0.1 0.2 7 1 22 -25 1 +30 0.9 0.7 40 19 46 -28 1 +30 0.3 0.2 0 -4 -4 -34 1 +30 0.8 0.2 48 46 6 -12 1 +30 0.8 0.9 5 2 41 -27 2 +30 0.5 0.3 13 -5 25 -34 1 +30 0.4 0.8 26 7 47 -4 2 +30 0.9 0.4 -25 -33 39 -42 2 +30 0.7 0.3 15 -39 31 -28 1 +30 0.2 0.2 -33 -41 47 31 2 +30 0.7 0.4 46 -6 5 -1 1 +30 0.6 0.6 33 -44 -4 -31 1 +30 0.4 0.4 -35 -45 20 15 2 +30 0.1 0.4 40 2 -19 -25 1 +30 0.3 0.8 -2 -29 0 -40 2 +30 0.6 0.7 19 -17 -2 -20 1 +30 0.7 0.3 13 -31 -7 -42 1 +30 0.6 0.6 15 -24 18 -18 2 +30 0.9 0.4 23 -5 45 -7 1 +30 0.3 0.2 -40 -44 15 -34 2 +30 0.3 0.3 38 -41 26 -20 2 +30 0.4 0.3 26 -44 37 -2 2 +30 0.2 0.2 13 -41 37 -27 2 +30 0.1 0.1 -35 -40 50 -28 2 +30 0.4 0.5 -6 -25 26 -46 2 +30 0.1 0.6 20 -35 18 -10 2 +30 0.6 0.8 13 2 -7 -38 1 +30 0.1 0.4 26 24 5 -15 1 +30 0.5 0.2 -4 -7 30 -24 1 +30 0.6 0.2 18 -20 44 -7 2 +30 0.1 0.7 43 0 16 -17 1 +30 0.5 0.1 1 -38 26 -16 2 +30 0.8 0.7 -26 -45 43 19 2 +30 0.2 0.3 38 -42 25 8 2 +30 0.1 0.1 28 -46 44 -4 2 +30 0.2 0.3 -5 -48 18 -47 2 +30 0.4 0.8 4 -37 27 -22 2 +30 0.5 0.6 33 11 10 -18 1 +30 0.3 0.3 43 -44 0 -21 2 +30 0.9 0.9 36 26 30 -5 1 +30 0.4 0.2 48 -15 -10 -48 1 +30 0.3 0.7 18 14 33 -10 2 +30 0.5 0.4 34 20 1 -19 1 +30 0.1 0.4 48 46 41 14 1 +30 0.2 0.3 39 -22 31 -32 1 +30 0.6 0.5 33 -17 33 -14 1 +30 0.3 0.1 -4 -20 29 -16 2 +30 0.9 0.2 35 28 33 -6 1 +30 0.5 0.7 20 -43 31 -27 2 +30 0.4 0.1 -29 -47 -25 -30 2 +30 0.8 0.5 -9 -25 29 -33 2 +30 0.4 0.9 28 -42 43 6 2 +30 0.8 0.4 42 -6 8 -25 1 +30 0.3 0.4 28 -8 46 21 2 +30 0.5 0.7 40 -17 -28 -41 1 +30 0.9 0.9 39 -34 -6 -45 1 +30 0.5 0.6 19 10 41 35 2 +30 0.1 0.5 19 -23 48 19 2 +30 0.7 0.3 10 -38 -1 -9 1 +30 0.5 0.2 39 -40 2 -45 1 +30 0.4 0.3 42 41 46 34 1 +30 0.6 0.6 -9 -37 -32 -44 1 +30 0.5 0.8 10 -8 25 -44 2 +30 0.2 0.1 40 -24 31 15 2 +30 0.2 0.5 -17 -36 -3 -24 2 +30 0.4 0.3 -39 -50 30 16 2 +30 0.8 0.8 46 5 -3 -21 1 +30 0.5 0.3 -2 -4 29 -4 2 +30 0.6 0.6 23 -37 26 12 2 +30 0.3 0.8 -48 -49 25 -48 2 +30 0.5 0.7 4 -14 31 23 2 +30 0.7 0.3 9 -17 29 28 2 +30 0.4 0.5 44 -21 37 -20 2 +30 0.2 0.3 48 2 -33 -39 1 +30 0.4 0.9 34 24 5 -49 1 +30 0.5 0.7 22 12 8 -10 1 +30 0.8 0.5 21 -49 37 1 2 +30 0.4 0.5 11 -37 6 -31 1 +30 0.8 0.1 45 36 43 -36 1 +30 0.4 0.1 2 -28 30 29 2 +30 0.6 0.6 2 -33 32 -2 2 +30 0.8 0.1 -27 -41 8 -13 2 +30 0.5 0.6 36 -11 32 -41 1 +30 0.2 0.7 37 -48 41 38 2 +30 0.3 0.4 37 -31 -21 -50 1 +30 0.7 0.9 26 -2 12 -5 1 +30 0.7 0.6 25 -49 -17 -42 1 +30 0.8 0.4 13 -1 10 -35 1 +30 0.1 0.6 44 -36 -6 -31 2 +30 0.7 0.4 48 7 22 -40 1 +30 0.6 0.7 -7 -36 8 -29 2 +30 0.1 0.9 13 1 47 -41 2 +30 0.6 0.2 -40 -47 2 -9 2 +30 0.7 0.2 -27 -46 47 -31 2 +30 0.7 0.4 31 -38 45 39 2 +30 0.5 0.2 42 -38 28 19 2 +30 0.7 0.8 17 -2 50 13 2 +30 0.1 0.3 1 -36 46 44 2 +30 0.9 0.2 33 6 16 -22 1 +30 0.2 0.5 14 -3 6 -17 1 +30 0.4 0.3 -11 -43 40 16 2 +30 0.9 0.2 46 -1 -10 -45 1 +30 0.4 0.9 38 -40 7 -44 2 +30 0.8 0.4 35 -18 47 39 2 +30 0.6 0.4 -17 -26 26 10 2 +30 0.8 0.1 43 -24 -26 -34 1 +30 0.6 0.1 47 -50 -7 -16 1 +30 0.1 0.3 -13 -27 50 -37 2 +30 0.6 0.8 28 24 31 11 2 +30 0.2 0.5 30 28 -26 -43 1 +30 0.6 0.6 24 -20 -2 -42 1 +30 0.5 0.4 31 -38 6 -20 1 +30 0.4 0.1 22 -44 45 7 2 +30 0.9 0.9 43 -35 9 -35 1 +30 0.7 0.2 18 -9 19 8 2 +30 0.8 0.6 18 -6 -28 -36 1 +30 0.9 0.1 49 -4 41 15 1 +30 0.5 0.2 -16 -38 -17 -42 1 +30 0.9 0.7 46 -18 31 -46 1 +30 0.6 0.8 46 17 6 -15 1 +30 0.6 0.8 46 -21 -22 -50 1 +30 0.1 0.9 46 31 49 22 2 +30 0.7 0.5 4 -25 17 -30 1 +30 0.3 0.1 -18 -24 -15 -27 1 +30 0.9 0.7 11 -10 -6 -47 1 +30 0.6 0.6 42 -8 4 -8 1 +30 0.8 0.3 13 -3 45 -6 1 +30 0.8 0.7 18 -1 49 -46 2 +30 0.2 0.8 16 -40 44 -12 2 +30 0.1 0.3 -31 -34 28 -22 2 +30 0.3 0.8 24 -23 41 -39 2 +30 0.3 0.5 24 16 46 -9 1 +30 0.1 0.9 6 -48 39 22 2 +30 0.9 0.5 47 -12 -7 -35 1 +30 0.9 0.9 -41 -46 50 17 2 +30 0.3 0.1 -22 -30 26 18 2 +30 0.2 0.9 12 -24 40 21 2 +30 0.9 0.7 29 -22 35 3 1 +30 0.8 0.4 27 -25 13 -23 1 +30 0.8 0.7 -3 -22 35 1 2 +30 0.7 0.7 23 9 21 -1 1 +30 0.3 0.7 -7 -11 43 4 2 +30 0.4 0.3 -18 -48 38 32 2 +30 0.2 0.3 19 2 38 27 2 +30 0.4 0.6 25 -27 45 2 2 +30 0.6 0.2 17 -32 -21 -42 1 +30 0.8 0.6 2 -13 9 -45 1 +30 0.5 0.9 -6 -26 40 -16 2 +30 0.2 0.3 23 -47 29 12 2 +30 0.2 0.2 33 20 14 11 1 +30 0.9 0.3 -8 -28 5 -17 2 +30 0.2 0.6 48 27 41 17 1 +30 0.6 0.2 15 -49 -14 -36 1 +30 0.1 0.2 32 18 16 -37 1 +30 0.4 0.5 22 -28 41 32 2 +30 0.4 0.4 -12 -33 -33 -38 1 +30 0.8 0.5 27 -28 -38 -40 1 +30 0.7 0.7 16 -17 45 -31 2 +30 0.2 0.8 42 25 41 1 1 +30 0.1 0.3 35 -22 46 -40 2 +30 0.7 0.5 7 -3 41 -35 1 +30 0.6 0.8 -19 -37 42 -50 2 +30 0.5 0.9 13 -20 31 -42 2 +30 0.4 0.9 42 -28 33 4 2 +30 0.8 0.7 2 -32 42 -4 2 +30 0.1 0.6 -1 -44 7 0 2 +30 0.4 0.4 31 -23 7 -10 1 +30 0.1 0.7 24 10 -11 -48 1 +30 0.7 0.6 11 -14 10 -42 1 +30 0.1 0.3 -29 -39 5 -44 2 +30 0.4 0.4 37 2 27 26 2 +30 0.4 0.7 -18 -36 -17 -46 2 +30 0.6 0.4 32 -24 -25 -36 1 +30 0.4 0.6 12 -19 -31 -46 1 +30 0.8 0.8 28 8 46 -50 2 +30 0.3 0.5 37 1 14 7 1 +30 0.4 0.2 45 13 24 17 1 +30 0.2 0.2 18 -49 48 -35 2 +30 0.9 0.1 23 10 13 -11 1 +30 0.2 0.3 43 37 36 22 1 +30 0.5 0.5 9 -28 17 -24 2 +30 0.8 0.7 32 15 12 -49 1 +30 0.4 0.6 -2 -33 44 -15 2 +30 0.1 0.2 42 -46 29 17 2 +30 0.4 0.3 -38 -40 39 6 2 +30 0.3 0.7 29 12 36 34 2 +30 0.8 0.9 -30 -43 43 -8 2 +30 0.5 0.1 -4 -39 -10 -17 2 +31 0.8 0.1 35 -25 14 -1 1 +31 0.7 0.9 -2 -7 47 -42 2 +31 0.8 0.6 -20 -24 48 -38 2 +31 0.7 0.4 49 36 35 -14 1 +31 0.9 0.3 48 47 32 13 1 +31 0.1 0.3 38 -25 -23 -47 1 +31 0.8 0.9 24 -21 13 -16 1 +31 0.8 0.1 37 21 -31 -41 1 +31 0.6 0.1 10 -43 -10 -36 1 +31 0.9 0.4 19 1 26 -15 1 +31 0.1 0.9 43 17 13 -8 1 +31 0.3 0.8 22 -22 2 -3 2 +31 0.8 0.2 -5 -37 25 -23 1 +31 0.5 0.9 22 7 49 -13 2 +31 0.8 0.5 -12 -17 17 -6 2 +31 0.7 0.7 7 -18 48 37 2 +31 0.8 0.5 -33 -34 31 -37 2 +31 0.3 0.3 -17 -21 49 -19 2 +31 0.7 0.4 12 8 -7 -21 1 +31 0.3 0.3 24 -30 21 -49 1 +31 0.4 0.6 4 -5 23 -35 1 +31 0.2 0.6 33 18 29 21 1 +31 0.3 0.9 -8 -39 -23 -45 2 +31 0.6 0.5 9 -2 42 33 2 +31 0.6 0.1 7 -10 21 -35 1 +31 0.1 0.1 50 12 32 -28 1 +31 0.2 0.5 50 -24 5 -19 2 +31 0.4 0.5 19 -7 17 -42 2 +31 0.3 0.4 9 -38 2 -46 1 +31 0.6 0.9 45 -18 42 25 2 +31 0.5 0.1 1 -41 27 -48 1 +31 0.7 0.5 42 -15 19 -13 1 +31 0.4 0.1 5 -19 1 0 1 +31 0.9 0.6 -17 -23 41 -50 2 +31 0.5 0.1 -9 -38 18 5 2 +31 0.2 0.2 39 37 12 2 1 +31 0.6 0.2 -26 -34 10 -40 2 +31 0.5 0.2 -17 -46 15 14 2 +31 0.8 0.5 22 -29 31 14 2 +31 0.8 0.4 16 6 29 -5 1 +31 0.8 0.3 -11 -26 22 -19 2 +31 0.6 0.2 20 -41 26 -45 1 +31 0.3 0.9 13 3 34 10 2 +31 0.6 0.9 20 -34 7 -39 1 +31 0.9 0.8 49 21 42 -10 1 +31 0.2 0.7 29 -11 21 0 2 +31 0.3 0.2 40 -9 34 -26 2 +31 0.2 0.8 25 -24 16 -26 2 +31 0.7 0.8 -26 -40 26 -22 2 +31 0.8 0.9 -21 -38 12 -36 2 +31 0.4 0.5 39 28 46 -10 1 +31 0.1 0.8 38 27 10 1 1 +31 0.6 0.5 41 -7 10 3 1 +31 0.4 0.3 -1 -44 -25 -35 1 +31 0.2 0.1 32 -7 50 -14 1 +31 0.4 0.5 47 -38 22 -13 2 +31 0.3 0.5 30 12 -3 -33 1 +31 0.6 0.5 29 11 -7 -50 1 +31 0.4 0.6 19 -19 6 -46 1 +31 0.2 0.4 39 29 20 -33 1 +31 0.7 0.5 10 -36 -29 -34 1 +31 0.5 0.7 48 -19 18 -12 2 +31 0.2 0.4 22 -3 19 -44 1 +31 0.3 0.3 -8 -13 27 15 2 +31 0.7 0.1 6 -4 29 -1 1 +31 0.6 0.3 1 -8 25 -20 1 +31 0.2 0.2 -24 -27 15 -16 2 +31 0.5 0.8 27 10 34 16 2 +31 0.5 0.6 32 7 24 -13 2 +31 0.8 0.7 37 16 15 12 1 +31 0.4 0.4 33 -27 -11 -49 1 +31 0.3 0.9 43 3 -43 -50 1 +31 0.2 0.4 20 -34 46 -10 2 +31 0.5 0.3 22 -12 38 -39 1 +31 0.8 0.6 -18 -46 -22 -45 1 +31 0.1 0.8 46 -23 24 21 2 +31 0.3 0.4 9 -41 40 16 2 +31 0.2 0.6 16 -13 -13 -31 1 +31 0.9 0.8 12 -26 -13 -23 1 +31 0.6 0.9 31 -36 47 5 2 +31 0.4 0.5 14 -19 40 31 2 +31 0.4 0.9 7 -40 -1 -40 2 +31 0.8 0.7 38 17 -29 -34 1 +31 0.5 0.8 24 9 14 3 2 +31 0.2 0.3 43 2 41 -12 1 +31 0.5 0.2 43 12 50 -50 1 +31 0.4 0.2 46 -4 18 -47 1 +31 0.4 0.7 4 -38 -8 -38 2 +31 0.2 0.6 47 -24 38 7 2 +31 0.3 0.4 48 5 27 15 1 +31 0.3 0.8 41 -35 20 -41 2 +31 0.4 0.2 0 -44 5 -9 2 +31 0.9 0.1 37 -20 -1 -29 1 +31 0.3 0.9 48 -17 31 11 2 +31 0.8 0.8 47 42 31 -9 1 +31 0.1 0.6 50 49 -4 -33 1 +31 0.8 0.8 -36 -39 36 -2 2 +31 0.5 0.1 30 -16 -8 -19 1 +31 0.2 0.6 12 2 6 -38 1 +31 0.6 0.3 -13 -49 6 -47 1 +31 0.7 0.1 -20 -43 45 -28 2 +31 0.8 0.9 31 5 7 -31 1 +31 0.1 0.7 -1 -6 39 -26 2 +31 0.5 0.4 27 -5 35 -40 1 +31 0.7 0.1 36 28 49 -13 1 +31 0.9 0.1 24 -3 39 -23 1 +31 0.3 0.1 -2 -25 40 32 2 +31 0.9 0.7 47 25 34 -35 1 +31 0.7 0.2 6 1 21 -4 1 +31 0.2 0.6 -34 -41 12 4 2 +31 0.8 0.6 15 -38 19 -5 2 +31 0.5 0.9 17 -34 34 -8 2 +31 0.6 0.8 44 34 20 -30 1 +31 0.1 0.6 46 24 15 -13 1 +31 0.2 0.6 29 10 49 5 2 +31 0.1 0.7 18 -1 -5 -41 1 +31 0.1 0.9 48 42 -30 -36 1 +31 0.4 0.2 -1 -19 -37 -49 1 +31 0.9 0.4 48 -48 49 45 2 +31 0.6 0.6 -3 -47 0 -24 2 +31 0.7 0.2 -21 -47 23 9 2 +31 0.3 0.2 24 -7 44 -25 1 +31 0.3 0.6 -37 -39 33 -48 2 +31 0.8 0.8 -9 -36 -24 -46 1 +31 0.8 0.4 1 -47 -15 -49 1 +31 0.3 0.1 44 29 19 -23 1 +31 0.7 0.9 28 -21 22 0 2 +31 0.9 0.8 -31 -38 48 3 2 +31 0.6 0.3 47 -7 31 -42 1 +31 0.2 0.7 -4 -23 6 -46 2 +31 0.4 0.8 -4 -44 10 -4 2 +31 0.5 0.1 41 -41 42 -2 1 +31 0.8 0.3 -9 -48 23 -38 2 +31 0.5 0.1 44 -30 38 8 1 +31 0.2 0.8 23 -12 18 -12 2 +31 0.8 0.3 39 -37 16 -43 1 +31 0.7 0.9 41 -43 11 -23 1 +31 0.6 0.3 4 -11 -6 -50 1 +31 0.6 0.8 11 -10 9 -26 2 +31 0.9 0.8 9 0 14 -14 2 +31 0.3 0.6 2 -25 14 -36 2 +31 0.9 0.1 -38 -39 38 12 2 +31 0.7 0.1 35 -24 49 30 2 +31 0.4 0.2 18 -14 11 -27 1 +31 0.7 0.3 -24 -37 38 3 2 +31 0.9 0.1 3 -34 2 -18 1 +31 0.6 0.8 26 -26 23 -26 2 +31 0.6 0.1 8 -41 -5 -26 1 +31 0.1 0.9 39 -31 -20 -42 1 +31 0.9 0.3 40 10 -6 -40 1 +31 0.8 0.4 28 -21 49 -36 1 +31 0.2 0.2 24 -38 -6 -45 1 +31 0.5 0.9 43 -23 -7 -18 1 +31 0.8 0.8 -20 -25 38 -26 2 +31 0.8 0.1 10 -13 18 -50 1 +31 0.9 0.9 -42 -50 41 3 2 +31 0.6 0.8 -8 -44 16 -36 2 +31 0.5 0.2 43 40 16 11 1 +31 0.3 0.6 28 2 33 -47 1 +31 0.6 0.7 12 -34 50 -19 2 +31 0.8 0.5 23 -45 -1 -28 1 +31 0.5 0.2 17 3 0 -3 1 +31 0.2 0.4 15 -26 11 -13 2 +31 0.8 0.5 44 -24 28 -40 1 +31 0.8 0.4 16 -17 11 -26 1 +31 0.1 0.6 8 -21 35 2 2 +31 0.2 0.9 10 -38 49 -8 2 +31 0.2 0.8 14 -30 -17 -35 1 +31 0.6 0.8 41 -27 45 9 1 +31 0.7 0.8 0 -8 24 3 2 +31 0.1 0.8 11 9 -22 -49 1 +31 0.3 0.6 34 28 38 9 1 +31 0.5 0.9 -5 -50 8 -3 2 +31 0.3 0.7 -31 -37 42 -16 2 +31 0.8 0.7 8 -5 44 -24 2 +31 0.4 0.1 10 -31 23 2 2 +31 0.4 0.9 48 43 42 27 2 +31 0.6 0.9 38 -38 46 -23 2 +31 0.5 0.7 13 3 -32 -42 1 +31 0.5 0.4 45 -4 34 -37 1 +31 0.6 0.3 25 -11 -38 -45 1 +31 0.1 0.9 -16 -30 19 -9 2 +31 0.1 0.2 40 -1 36 0 2 +31 0.9 0.2 32 -8 47 27 2 +31 0.6 0.9 43 -22 35 -39 1 +31 0.6 0.5 25 17 -3 -41 1 +31 0.5 0.5 28 -24 50 -42 1 +31 0.1 0.7 12 -18 45 9 2 +31 0.4 0.8 -43 -48 44 -14 2 +31 0.6 0.6 29 -44 5 2 2 +31 0.8 0.9 -24 -48 36 -7 2 +31 0.8 0.3 3 -1 17 6 2 +31 0.5 0.2 -19 -49 42 -12 2 +31 0.5 0.3 48 -3 -33 -44 1 +31 0.3 0.4 20 -43 50 19 2 +31 0.4 0.5 13 5 35 -22 1 +31 0.9 0.4 33 6 -28 -46 1 +31 0.2 0.8 -15 -25 43 -31 2 +31 0.6 0.1 27 14 23 -46 1 +31 0.9 0.8 18 -40 20 -46 1 +31 0.9 0.7 28 -47 -10 -19 1 +31 0.6 0.3 4 -17 -38 -44 1 +31 0.2 0.2 -11 -38 -19 -32 1 +31 0.7 0.1 11 -31 11 -42 1 +31 0.6 0.6 16 -6 0 -14 1 +31 0.9 0.6 30 12 19 -4 1 +31 0.9 0.3 29 13 36 -9 1 +31 0.4 0.6 43 32 31 -12 1 +31 0.3 0.7 24 -6 19 -46 1 +31 0.6 0.7 -1 -18 33 12 2 +31 0.2 0.7 33 -32 -11 -17 2 +31 0.5 0.1 19 15 12 -37 1 +31 0.8 0.3 -1 -49 10 -20 1 +31 0.5 0.5 -2 -47 15 10 2 +31 0.9 0.7 43 18 49 12 2 +31 0.8 0.4 -5 -46 19 -8 2 +31 0.4 0.6 15 12 20 -2 1 +31 0.5 0.5 -18 -33 25 -14 2 +31 0.1 0.2 23 -17 -4 -35 1 +31 0.1 0.3 42 -23 2 -6 1 +31 0.8 0.9 46 18 30 6 2 +31 0.8 0.8 40 8 27 -3 1 +31 0.6 0.7 31 24 35 28 2 +31 0.3 0.7 31 -27 -6 -35 1 +31 0.8 0.2 -33 -44 16 -41 2 +31 0.1 0.9 16 -13 33 2 2 +31 0.8 0.7 23 19 30 -47 1 +31 0.6 0.6 18 -38 -5 -8 1 +31 0.2 0.5 4 -44 39 -15 2 +31 0.3 0.1 41 -42 -5 -35 1 +31 0.7 0.5 47 -36 28 4 1 +31 0.6 0.5 14 3 -4 -37 1 +31 0.1 0.1 39 7 42 15 2 +31 0.6 0.3 46 17 14 2 1 +31 0.2 0.3 47 -11 38 1 2 +31 0.1 0.8 45 -37 34 -13 2 +31 0.3 0.2 -18 -21 -7 -15 1 +31 0.8 0.4 1 -45 -13 -19 2 +31 0.4 0.2 5 -7 32 12 2 +31 0.3 0.9 21 12 14 -34 1 +31 0.3 0.2 4 -11 25 -34 2 +31 0.8 0.2 50 -35 -16 -38 1 +31 0.4 0.8 44 -9 46 -1 2 +31 0.4 0.5 24 -10 9 -27 2 +31 0.6 0.7 -26 -49 34 -14 2 +31 0.6 0.8 49 7 49 41 1 +31 0.4 0.1 13 -43 3 -17 2 +31 0.1 0.3 29 17 4 -3 1 +31 0.2 0.3 40 -30 36 16 2 +31 0.3 0.8 26 -7 4 -20 2 +31 0.3 0.1 25 -21 -14 -39 1 +31 0.5 0.1 -19 -45 43 -26 2 +31 0.8 0.4 -19 -34 -7 -44 1 +31 0.8 0.1 -43 -48 -34 -39 1 +31 0.2 0.1 12 7 -9 -32 1 +31 0.7 0.5 42 33 27 -33 1 +31 0.8 0.9 -30 -38 -3 -19 2 +31 0.5 0.8 -6 -40 20 18 2 +31 0.8 0.9 28 10 -14 -28 1 +31 0.8 0.6 37 -31 34 -12 1 +31 0.4 0.4 43 -47 16 3 2 +31 0.4 0.1 27 -47 8 -43 1 +31 0.2 0.6 13 -27 -16 -48 2 +31 0.5 0.1 16 -15 32 -35 1 +31 0.8 0.5 -24 -41 40 35 2 +31 0.2 0.5 32 12 38 -1 2 +31 0.8 0.5 -16 -45 46 -20 2 +31 0.3 0.4 -22 -23 42 15 2 +31 0.5 0.6 -7 -29 41 15 2 +31 0.3 0.9 -29 -49 4 -36 2 +31 0.5 0.7 48 12 45 44 1 +31 0.1 0.7 10 -44 31 -37 2 +31 0.3 0.9 36 -31 38 -40 2 +31 0.8 0.3 34 -15 11 -19 1 +31 0.8 0.2 47 -33 2 -23 1 +31 0.3 0.2 -32 -35 23 -31 2 +31 0.5 0.9 10 7 28 -13 2 +31 0.3 0.8 -9 -32 2 -43 2 +31 0.2 0.6 25 -40 -2 -38 1 +31 0.5 0.8 31 -13 27 -28 2 +31 0.7 0.9 -1 -26 49 16 2 +31 0.1 0.5 -44 -45 16 11 2 +31 0.2 0.3 19 -33 43 41 2 +31 0.5 0.5 21 6 38 -10 2 +31 0.4 0.4 7 -5 -13 -45 1 +31 0.8 0.8 36 15 25 16 1 +31 0.1 0.4 -32 -45 -13 -42 2 +31 0.2 0.1 21 17 37 36 2 +31 0.2 0.5 6 -37 47 34 2 +31 0.8 0.6 12 -16 36 -7 2 +31 0.4 0.5 -18 -27 -36 -46 1 +31 0.9 0.9 35 32 48 33 2 +31 0.1 0.7 27 -50 44 25 2 +31 0.9 0.2 -41 -49 29 5 2 +31 0.8 0.5 41 19 17 -18 1 +31 0.8 0.6 19 -40 -9 -18 1 +31 0.7 0.2 46 -49 32 -43 1 +31 0.1 0.2 11 -36 47 43 2 +31 0.2 0.6 -32 -47 13 -32 2 +31 0.4 0.2 43 -42 24 2 1 +32 0.4 0.6 -30 -39 32 -41 2 +32 0.3 0.8 12 8 45 -35 2 +32 0.8 0.1 13 -12 9 -48 1 +32 0.3 0.4 18 -18 8 -33 1 +32 0.2 0.7 46 8 -29 -35 1 +32 0.4 0.9 24 -18 7 -5 2 +32 0.8 0.2 -37 -39 4 -25 2 +32 0.7 0.6 -19 -48 29 22 2 +32 0.1 0.6 15 -27 46 -45 2 +32 0.8 0.1 26 -4 9 -22 1 +32 0.1 0.1 32 11 17 -20 1 +32 0.4 0.2 48 -47 12 -48 1 +32 0.1 0.6 43 -16 20 -30 2 +32 0.1 0.5 23 -5 7 3 2 +32 0.5 0.4 -12 -20 42 -15 2 +32 0.1 0.4 48 -30 35 15 2 +32 0.4 0.9 -2 -5 43 -25 2 +32 0.5 0.1 -15 -21 49 24 2 +32 0.7 0.5 21 -17 14 -4 1 +32 0.1 0.9 48 14 30 -31 1 +32 0.7 0.2 26 -31 4 -11 1 +32 0.5 0.6 -1 -20 31 14 2 +32 0.7 0.3 10 -9 23 22 2 +32 0.7 0.7 19 -45 -31 -48 1 +32 0.9 0.8 48 -27 49 30 2 +32 0.2 0.4 19 -2 17 -6 2 +32 0.1 0.1 25 18 28 17 1 +32 0.5 0.6 44 -12 47 -28 2 +32 0.7 0.2 50 -27 9 -49 1 +32 0.3 0.3 26 -15 -7 -16 1 +32 0.1 0.6 23 -45 -38 -47 1 +32 0.7 0.4 23 -35 44 -43 1 +32 0.1 0.2 -19 -40 34 17 2 +32 0.4 0.9 45 -30 42 -32 2 +32 0.9 0.8 30 -15 36 -2 2 +32 0.1 0.5 35 31 47 -50 1 +32 0.8 0.8 -13 -21 -17 -45 1 +32 0.3 0.3 23 -23 33 19 2 +32 0.9 0.7 47 45 15 -14 1 +32 0.6 0.6 -13 -25 -5 -16 2 +32 0.1 0.9 12 -41 23 -43 2 +32 0.4 0.7 25 -30 14 5 2 +32 0.2 0.7 35 16 8 -43 1 +32 0.6 0.8 38 -27 -5 -46 1 +32 0.8 0.6 37 -47 23 2 2 +32 0.5 0.2 18 -27 43 22 2 +32 0.7 0.5 29 18 30 18 2 +32 0.9 0.7 50 26 -27 -44 1 +32 0.3 0.6 40 29 44 2 1 +32 0.1 0.1 44 -39 26 8 2 +32 0.2 0.1 48 9 48 19 2 +32 0.1 0.5 -5 -37 39 36 2 +32 0.2 0.9 41 30 16 4 1 +32 0.1 0.9 -11 -17 32 -37 2 +32 0.9 0.8 -28 -29 49 14 2 +32 0.5 0.4 25 14 0 -16 1 +32 0.2 0.7 4 -30 -22 -24 2 +32 0.8 0.7 -14 -23 49 -33 2 +32 0.2 0.4 39 22 49 -49 1 +32 0.9 0.6 25 14 -19 -46 1 +32 0.4 0.1 -19 -33 47 42 2 +32 0.3 0.4 19 -17 33 32 2 +32 0.2 0.5 31 13 1 -33 1 +32 0.2 0.4 -31 -46 3 -34 2 +32 0.2 0.4 -4 -20 -20 -48 1 +32 0.8 0.4 30 8 14 -39 1 +32 0.9 0.2 37 22 7 4 1 +32 0.2 0.6 -26 -29 45 -40 2 +32 0.7 0.8 23 -23 46 -21 2 +32 0.2 0.1 0 -27 -24 -50 1 +32 0.9 0.8 41 -18 4 -50 1 +32 0.9 0.9 29 14 46 -42 2 +32 0.4 0.6 35 15 25 -39 1 +32 0.9 0.7 40 -4 -9 -46 1 +32 0.1 0.5 -30 -41 42 -29 2 +32 0.3 0.6 46 15 45 19 2 +32 0.6 0.1 -13 -34 13 -13 2 +32 0.3 0.4 34 -25 33 -31 2 +32 0.9 0.8 9 -16 36 30 2 +32 0.1 0.9 27 -23 7 -29 2 +32 0.4 0.3 50 47 34 23 2 +32 0.1 0.6 -18 -22 1 -47 2 +32 0.5 0.8 13 -35 -1 -15 2 +32 0.9 0.6 39 -33 -6 -44 1 +32 0.3 0.3 39 -36 42 5 2 +32 0.5 0.6 39 -48 45 -2 2 +32 0.3 0.1 -33 -48 45 -40 1 +32 0.6 0.7 23 -14 33 -36 2 +32 0.1 0.2 48 -41 31 14 2 +32 0.8 0.2 31 -21 50 -12 1 +32 0.3 0.4 23 -42 12 -14 2 +32 0.1 0.1 24 -47 13 -35 2 +32 0.4 0.6 48 -5 26 -39 1 +32 0.4 0.7 4 -6 -40 -42 1 +32 0.5 0.4 37 18 -1 -18 1 +32 0.6 0.1 28 -38 42 6 2 +32 0.8 0.2 33 -27 40 1 1 +32 0.9 0.8 37 -1 9 -42 1 +32 0.7 0.3 27 -34 31 -31 1 +32 0.1 0.7 20 -9 6 -48 2 +32 0.4 0.6 24 -27 -26 -42 1 +32 0.6 0.1 21 -19 45 36 2 +32 0.3 0.7 48 26 -27 -43 1 +32 0.4 0.7 16 13 4 -37 1 +32 0.5 0.1 50 40 27 -45 1 +32 0.8 0.7 42 -38 14 -20 1 +32 0.6 0.8 1 -44 -1 -42 2 +32 0.8 0.4 -16 -26 27 -35 1 +32 0.3 0.1 46 45 10 -11 1 +32 0.6 0.5 33 -50 39 -10 1 +32 0.7 0.8 29 -43 46 -3 2 +32 0.2 0.9 40 -18 -12 -48 1 +32 0.7 0.9 13 -46 49 20 2 +32 0.6 0.8 -25 -47 38 -24 2 +32 0.7 0.5 35 -10 22 -3 1 +32 0.7 0.1 33 6 -19 -23 1 +32 0.9 0.9 -12 -20 19 -17 2 +32 0.8 0.2 30 -32 21 -37 1 +32 0.7 0.6 12 -36 33 -39 2 +32 0.1 0.2 22 -26 -24 -32 1 +32 0.9 0.9 34 -28 -17 -45 1 +32 0.6 0.3 21 -20 23 -39 1 +32 0.6 0.8 6 -9 5 -37 2 +32 0.5 0.4 0 -2 -4 -6 1 +32 0.2 0.7 26 -31 28 10 2 +32 0.1 0.3 27 6 24 -32 1 +32 0.1 0.1 48 -42 -11 -46 1 +32 0.8 0.8 48 -37 -11 -13 1 +32 0.8 0.7 -19 -20 1 -21 1 +32 0.9 0.4 14 -11 36 25 2 +32 0.2 0.5 34 4 8 -4 1 +32 0.5 0.8 32 -1 6 -38 1 +32 0.3 0.9 31 25 41 -20 2 +32 0.1 0.1 4 -34 46 -50 2 +32 0.3 0.5 14 -11 8 -24 2 +32 0.5 0.7 10 4 48 -38 2 +32 0.8 0.7 35 -13 21 8 1 +32 0.1 0.2 11 -22 37 -27 2 +32 0.6 0.1 8 -40 6 -32 1 +32 0.6 0.9 21 -24 39 -19 2 +32 0.3 0.5 22 -33 41 -19 2 +32 0.8 0.7 21 -40 50 -9 2 +32 0.5 0.3 -7 -41 14 -4 2 +32 0.4 0.7 20 -42 -32 -39 1 +32 0.2 0.3 41 29 -12 -26 1 +32 0.6 0.6 30 -25 47 -17 2 +32 0.3 0.6 41 39 15 -15 1 +32 0.9 0.8 -1 -23 30 10 2 +32 0.2 0.7 29 -40 34 -12 2 +32 0.1 0.1 7 -33 34 -16 2 +32 0.1 0.3 15 -22 7 -14 2 +32 0.9 0.2 38 -16 -17 -31 1 +32 0.8 0.7 29 -7 30 -6 1 +32 0.8 0.4 19 6 18 4 1 +32 0.7 0.3 -4 -38 22 -28 2 +32 0.1 0.3 49 7 23 -39 1 +32 0.2 0.7 31 1 -21 -44 1 +32 0.4 0.9 48 10 38 19 2 +32 0.3 0.7 33 30 -4 -50 1 +32 0.3 0.5 42 -5 -22 -31 1 +32 0.6 0.7 50 -17 -38 -45 1 +32 0.7 0.3 27 -47 40 36 2 +32 0.4 0.4 28 -15 30 -41 1 +32 0.7 0.9 3 2 12 -15 2 +32 0.3 0.8 17 -35 -2 -40 2 +32 0.8 0.3 44 -23 45 3 1 +32 0.6 0.9 40 11 44 43 2 +32 0.9 0.9 31 28 45 3 2 +32 0.5 0.5 -14 -48 12 -16 2 +32 0.6 0.7 18 4 13 5 1 +32 0.8 0.7 41 18 28 -32 1 +32 0.3 0.6 -8 -28 0 -17 2 +32 0.9 0.6 48 -26 20 -26 1 +32 0.6 0.9 21 -16 16 -27 1 +32 0.5 0.9 26 -29 40 39 2 +32 0.3 0.9 36 -44 12 -12 2 +32 0.2 0.4 40 -1 19 10 2 +32 0.5 0.7 45 -38 44 -21 2 +32 0.5 0.4 39 -10 -3 -38 1 +32 0.5 0.6 -16 -29 29 -27 2 +32 0.4 0.3 47 -11 19 -8 1 +32 0.6 0.2 18 -29 7 -26 1 +32 0.5 0.3 36 -19 7 -17 1 +32 0.3 0.5 34 26 -28 -29 1 +32 0.6 0.8 20 -36 40 25 2 +32 0.8 0.8 -27 -37 24 17 2 +32 0.2 0.5 40 3 50 22 2 +32 0.1 0.2 24 -39 -39 -41 1 +32 0.8 0.6 -6 -40 14 3 2 +32 0.1 0.8 32 11 40 8 2 +32 0.6 0.5 9 -20 47 -4 2 +32 0.5 0.6 44 -47 -30 -42 1 +32 0.8 0.5 -21 -35 1 -23 2 +32 0.4 0.9 1 -44 3 -15 2 +32 0.5 0.5 -7 -30 10 -42 2 +32 0.6 0.6 -2 -30 32 -29 2 +32 0.3 0.1 -8 -40 17 -9 2 +32 0.4 0.2 13 -21 22 -10 1 +32 0.7 0.7 -1 -48 -39 -42 2 +32 0.3 0.5 8 -14 35 -4 2 +32 0.2 0.2 25 17 12 -17 1 +32 0.8 0.2 41 31 -4 -49 1 +32 0.7 0.1 3 -3 31 -2 2 +32 0.1 0.1 49 -19 45 39 2 +32 0.2 0.4 -1 -45 -15 -38 2 +32 0.9 0.6 -27 -45 -27 -38 2 +32 0.7 0.4 20 -45 -17 -29 1 +32 0.7 0.5 49 -36 39 18 2 +32 0.9 0.5 45 16 33 -36 1 +32 0.3 0.7 47 41 25 -30 1 +32 0.5 0.8 -13 -43 4 -29 2 +32 0.1 0.3 -8 -11 -25 -42 1 +32 0.4 0.2 36 -32 -16 -20 1 +32 0.7 0.5 -31 -40 40 1 2 +32 0.8 0.8 26 -9 -12 -22 1 +32 0.2 0.6 28 -46 -25 -29 1 +32 0.8 0.2 36 1 -6 -34 1 +32 0.6 0.9 42 -39 48 -19 2 +32 0.8 0.3 -10 -49 2 -43 2 +32 0.8 0.2 16 -30 12 -24 1 +32 0.9 0.8 0 -2 -16 -43 1 +32 0.1 0.5 3 -21 -43 -49 1 +32 0.7 0.5 36 26 47 -50 1 +32 0.1 0.1 26 -29 -20 -24 1 +32 0.7 0.3 27 -44 12 1 1 +32 0.7 0.9 -8 -45 36 -43 2 +32 0.1 0.6 25 2 -7 -26 1 +32 0.8 0.1 46 -9 27 -35 1 +32 0.9 0.4 -1 -4 -15 -37 2 +32 0.2 0.6 22 -31 -13 -49 1 +32 0.5 0.1 42 -38 -26 -30 1 +32 0.7 0.3 48 9 1 -25 1 +32 0.6 0.4 22 4 28 -4 1 +32 0.6 0.2 23 15 2 -28 1 +32 0.5 0.1 19 9 46 16 1 +32 0.1 0.1 7 1 37 -37 1 +32 0.3 0.4 39 -13 25 4 2 +32 0.2 0.3 35 26 6 -21 1 +32 0.8 0.8 38 -32 42 -30 2 +32 0.9 0.3 26 -16 -28 -43 1 +32 0.1 0.3 22 -10 -39 -43 1 +32 0.9 0.4 -4 -26 39 34 2 +32 0.4 0.8 18 -41 25 -46 2 +32 0.7 0.2 -4 -15 50 -46 1 +32 0.6 0.1 -34 -36 30 -23 2 +32 0.5 0.1 32 -1 7 -35 1 +32 0.7 0.7 8 -37 42 -2 2 +32 0.9 0.7 47 5 19 12 1 +32 0.7 0.8 42 7 10 -34 1 +32 0.9 0.1 34 23 -6 -26 1 +32 0.2 0.5 28 -48 8 -42 2 +32 0.9 0.3 7 -6 33 -33 1 +32 0.3 0.3 -16 -48 42 -17 2 +32 0.3 0.1 37 25 5 -42 1 +32 0.8 0.1 46 39 39 13 1 +32 0.4 0.5 -7 -48 36 -7 2 +32 0.8 0.1 -8 -39 37 13 2 +32 0.1 0.7 -17 -40 45 -23 2 +32 0.1 0.4 37 -8 21 -35 2 +32 0.9 0.3 13 -10 34 -14 2 +32 0.1 0.9 25 10 34 11 1 +32 0.3 0.8 -21 -33 31 -11 2 +32 0.5 0.7 23 4 49 37 2 +32 0.8 0.2 1 -9 25 12 2 +32 0.1 0.1 -1 -50 -26 -37 2 +32 0.2 0.7 5 -21 -4 -34 2 +32 0.6 0.8 -19 -35 23 -6 2 +32 0.1 0.4 45 40 18 -30 1 +32 0.3 0.2 18 -39 4 -1 2 +32 0.9 0.9 23 -11 44 1 2 +32 0.5 0.9 21 -17 10 -41 2 +32 0.3 0.6 34 -47 44 -26 2 +32 0.4 0.1 13 -24 38 29 2 +32 0.1 0.2 42 -1 -37 -49 1 +32 0.9 0.4 27 -8 39 -23 1 +32 0.4 0.5 22 18 13 -12 1 +32 0.5 0.9 37 5 2 -32 1 +32 0.2 0.7 40 19 4 -42 1 +32 0.4 0.9 34 -21 -22 -33 1 +32 0.4 0.3 11 -12 46 38 2 +32 0.3 0.7 39 -11 23 -49 2 +32 0.1 0.2 29 1 14 5 1 +32 0.5 0.9 18 8 27 -48 2 +32 0.9 0.8 25 8 27 4 1 +32 0.2 0.5 28 -4 37 8 2 +32 0.7 0.8 36 10 16 -28 1 +32 0.6 0.1 13 -44 46 38 2 +32 0.8 0.6 -11 -39 12 -24 2 +32 0.7 0.5 10 7 26 18 2 +32 0.7 0.9 -8 -13 23 -23 2 +32 0.1 0.8 23 -2 2 -33 1 +32 0.6 0.4 38 -13 -4 -11 1 +32 0.3 0.4 43 -34 25 -49 2 +32 0.8 0.2 9 -17 -1 -46 1 +32 0.6 0.4 34 -47 12 -15 2 +32 0.1 0.8 -33 -46 -1 -13 2 +32 0.6 0.8 -9 -29 45 -7 2 +32 0.9 0.5 37 -49 42 -18 1 +32 0.9 0.2 40 -32 33 3 1 +32 0.2 0.9 13 -43 5 -35 2 +33 0.8 0.3 3 -33 0 -34 1 +33 0.6 0.4 -12 -42 -8 -18 2 +33 0.6 0.5 13 -40 13 -26 1 +33 0.2 0.4 -20 -35 -7 -14 2 +33 0.7 0.9 32 -11 42 38 2 +33 0.2 0.1 -5 -33 13 -45 1 +33 0.6 0.3 28 -48 -46 -50 1 +33 0.1 0.5 26 -2 48 41 2 +33 0.2 0.9 33 -43 32 -34 2 +33 0.3 0.9 50 -4 41 -7 2 +33 0.7 0.4 -12 -29 0 -22 2 +33 0.7 0.7 38 34 1 -47 1 +33 0.4 0.5 27 -15 21 -11 2 +33 0.5 0.6 12 -39 -3 -11 2 +33 0.3 0.9 36 -34 41 24 2 +33 0.1 0.6 31 -2 21 9 2 +33 0.9 0.3 39 16 -12 -28 1 +33 0.6 0.4 40 -34 8 -16 1 +33 0.5 0.8 16 -37 3 0 2 +33 0.6 0.9 -16 -24 11 -20 2 +33 0.7 0.2 20 -14 21 -31 1 +33 0.2 0.6 -2 -49 -22 -49 2 +33 0.5 0.8 34 31 -2 -22 1 +33 0.4 0.9 32 25 15 -34 1 +33 0.9 0.7 14 -10 37 -23 2 +33 0.1 0.7 14 -24 -31 -43 1 +33 0.9 0.1 -5 -40 39 18 2 +33 0.8 0.4 31 2 -20 -45 1 +33 0.2 0.1 36 -6 -5 -26 1 +33 0.4 0.9 16 -48 -14 -36 1 +33 0.5 0.7 47 -17 -5 -10 1 +33 0.7 0.7 49 -40 -31 -44 1 +33 0.1 0.9 20 -28 22 -15 2 +33 0.3 0.2 -15 -42 33 -17 2 +33 0.3 0.1 22 -26 -8 -24 1 +33 0.4 0.3 42 -19 45 44 2 +33 0.9 0.7 42 -21 40 2 1 +33 0.8 0.1 9 -3 -6 -8 1 +33 0.9 0.2 38 10 -9 -18 1 +33 0.8 0.3 42 27 -13 -49 1 +33 0.6 0.2 43 -28 23 3 1 +33 0.9 0.2 -24 -31 35 -35 2 +33 0.9 0.6 -12 -49 4 -48 2 +33 0.6 0.1 28 1 19 -48 1 +33 0.8 0.2 6 -24 50 -11 2 +33 0.1 0.9 36 28 49 23 2 +33 0.4 0.2 49 -2 2 -44 1 +33 0.4 0.1 40 -14 45 28 2 +33 0.7 0.9 32 -2 45 2 2 +33 0.6 0.1 34 -39 49 32 2 +33 0.5 0.5 -29 -42 -23 -46 1 +33 0.5 0.9 7 -8 3 -3 2 +33 0.4 0.9 29 -33 43 -27 2 +33 0.5 0.7 17 -46 27 -1 2 +33 0.6 0.6 47 -17 -25 -34 1 +33 0.4 0.6 41 1 46 -42 1 +33 0.4 0.3 17 -23 27 -49 1 +33 0.3 0.8 11 -21 29 -10 2 +33 0.9 0.9 43 -48 1 -4 1 +33 0.5 0.6 -27 -41 48 43 2 +33 0.6 0.4 26 -37 -23 -31 1 +33 0.7 0.6 38 0 -16 -31 1 +33 0.9 0.8 32 -48 20 -46 1 +33 0.3 0.2 40 -48 6 -6 2 +33 0.3 0.7 -5 -34 42 31 2 +33 0.7 0.4 25 -21 19 11 2 +33 0.9 0.9 38 32 21 -3 1 +33 0.8 0.6 40 -27 29 13 2 +33 0.8 0.2 43 -19 44 -32 1 +33 0.6 0.8 5 -23 18 7 2 +33 0.4 0.5 -25 -32 33 -38 2 +33 0.7 0.4 25 -31 20 -36 1 +33 0.4 0.9 29 -25 41 3 2 +33 0.9 0.7 -20 -34 46 29 2 +33 0.4 0.4 44 -30 34 22 2 +33 0.8 0.2 32 -49 19 14 2 +33 0.9 0.5 -2 -32 -4 -44 1 +33 0.6 0.8 19 -40 34 0 2 +33 0.4 0.3 -4 -10 -5 -36 1 +33 0.2 0.2 43 5 18 12 1 +33 0.8 0.3 17 -8 13 -1 1 +33 0.9 0.4 12 -22 34 -29 1 +33 0.5 0.4 5 -13 37 -36 2 +33 0.3 0.3 44 34 32 -49 1 +33 0.7 0.9 9 7 30 -42 2 +33 0.7 0.6 37 -42 37 -38 1 +33 0.3 0.4 35 -15 41 -41 1 +33 0.7 0.8 50 -24 39 -23 1 +33 0.6 0.6 38 -23 -1 -3 1 +33 0.8 0.2 -27 -30 -6 -25 2 +33 0.7 0.2 18 11 0 -11 1 +33 0.7 0.1 20 -11 50 24 2 +33 0.1 0.1 38 -47 26 -41 2 +33 0.5 0.5 -32 -44 22 20 2 +33 0.1 0.4 -49 -50 -39 -47 2 +33 0.3 0.4 10 -47 48 -18 2 +33 0.1 0.5 -2 -16 41 2 2 +33 0.9 0.2 39 36 32 -22 1 +33 0.7 0.9 40 -6 46 -33 2 +33 0.2 0.2 46 -20 43 35 2 +33 0.6 0.4 48 6 47 14 1 +33 0.4 0.2 50 -29 6 -27 1 +33 0.1 0.1 40 -32 31 -20 2 +33 0.5 0.9 21 13 -28 -43 1 +33 0.7 0.1 34 -19 46 -11 1 +33 0.8 0.5 47 38 -14 -32 1 +33 0.7 0.3 34 32 29 -46 1 +33 0.3 0.6 22 -7 -1 -41 1 +33 0.8 0.6 45 40 -23 -42 1 +33 0.8 0.9 45 34 48 1 1 +33 0.9 0.4 13 -35 44 7 2 +33 0.7 0.5 49 10 10 5 1 +33 0.4 0.3 41 -45 21 -38 1 +33 0.5 0.9 38 22 -17 -35 1 +33 0.6 0.1 -22 -33 -7 -22 2 +33 0.4 0.8 -27 -37 40 -8 2 +33 0.2 0.3 -4 -16 11 -25 1 +33 0.7 0.2 29 8 19 -31 1 +33 0.7 0.9 50 -6 13 -48 1 +33 0.2 0.6 37 -4 27 -21 2 +33 0.8 0.2 -26 -28 36 18 2 +33 0.7 0.8 -4 -17 13 -24 2 +33 0.2 0.6 24 -2 36 5 2 +33 0.1 0.1 37 -11 47 -22 1 +33 0.7 0.9 -23 -48 47 31 2 +33 0.4 0.7 -20 -21 27 -42 2 +33 0.8 0.4 45 -26 37 -38 1 +33 0.8 0.2 -1 -25 34 31 2 +33 0.1 0.1 24 -24 -27 -44 1 +33 0.7 0.8 -11 -27 44 41 2 +33 0.4 0.3 39 -1 43 23 2 +33 0.1 0.1 24 -8 -18 -29 1 +33 0.6 0.6 5 -41 -28 -49 1 +33 0.1 0.6 6 -42 46 22 2 +33 0.5 0.5 -32 -43 5 -1 2 +33 0.4 0.3 1 -33 -14 -31 1 +33 0.1 0.4 -21 -45 35 -13 2 +33 0.3 0.3 -30 -39 48 41 2 +33 0.7 0.2 34 -16 48 2 1 +33 0.4 0.1 37 -11 42 -37 1 +33 0.3 0.7 29 -9 -30 -31 1 +33 0.7 0.1 17 6 31 12 1 +33 0.7 0.2 44 -14 -43 -50 1 +33 0.9 0.7 -15 -25 36 -4 2 +33 0.3 0.6 33 22 18 16 1 +33 0.1 0.9 -12 -35 32 9 2 +33 0.6 0.1 13 -25 43 -2 2 +33 0.6 0.8 48 -40 8 -17 1 +33 0.1 0.7 14 1 35 18 2 +33 0.2 0.3 -30 -45 9 5 2 +33 0.7 0.5 8 -24 48 15 2 +33 0.4 0.1 -10 -13 17 -49 1 +33 0.7 0.4 11 -7 21 -34 1 +33 0.2 0.9 16 -33 39 8 2 +33 0.1 0.6 20 -27 43 -48 2 +33 0.5 0.5 34 11 -18 -30 1 +33 0.9 0.6 9 -24 49 -15 2 +33 0.1 0.7 28 -9 45 -11 2 +33 0.8 0.5 43 -41 -26 -40 1 +33 0.1 0.7 25 -25 42 14 2 +33 0.8 0.9 10 5 36 31 2 +33 0.5 0.9 24 -29 -44 -50 1 +33 0.5 0.2 30 -19 16 -19 1 +33 0.6 0.6 32 10 -2 -25 1 +33 0.6 0.5 6 -28 32 -20 2 +33 0.6 0.9 -5 -15 34 23 2 +33 0.6 0.9 21 -40 2 -30 1 +33 0.2 0.2 12 -31 -1 -5 2 +33 0.4 0.3 42 -4 -5 -30 1 +33 0.5 0.2 20 -23 -6 -13 1 +33 0.7 0.2 46 40 -4 -5 1 +33 0.7 0.8 23 15 18 11 1 +33 0.8 0.7 11 -50 7 -14 2 +33 0.6 0.5 -20 -39 32 1 2 +33 0.2 0.7 43 -35 14 -6 2 +33 0.4 0.2 28 1 20 -50 1 +33 0.8 0.9 38 -20 42 4 2 +33 0.9 0.3 41 -30 27 14 1 +33 0.7 0.5 -22 -34 -45 -46 1 +33 0.4 0.8 44 -24 11 -41 1 +33 0.9 0.9 37 1 9 -16 1 +33 0.8 0.9 -5 -16 1 -44 2 +33 0.2 0.4 30 -3 37 -31 1 +33 0.2 0.3 14 -43 6 -28 2 +33 0.8 0.8 7 -43 27 0 2 +33 0.5 0.9 5 -6 45 30 2 +33 0.8 0.5 -10 -45 15 3 2 +33 0.5 0.6 30 -2 34 3 2 +33 0.5 0.9 37 -44 21 19 2 +33 0.2 0.9 30 -45 34 -6 2 +33 0.7 0.6 32 -38 -10 -37 1 +33 0.5 0.4 -11 -26 -19 -49 1 +33 0.1 0.1 20 -40 34 -30 2 +33 0.7 0.1 35 11 6 -35 1 +33 0.9 0.7 18 -36 -7 -23 1 +33 0.6 0.2 30 18 25 -28 1 +33 0.8 0.3 -28 -32 45 -44 2 +33 0.6 0.9 -32 -39 40 -39 2 +33 0.6 0.1 43 5 -38 -43 1 +33 0.5 0.4 42 33 -38 -40 1 +33 0.7 0.9 -5 -10 38 -39 2 +33 0.5 0.6 22 -7 32 16 2 +33 0.1 0.4 41 34 9 -15 1 +33 0.1 0.7 29 -28 31 -48 2 +33 0.8 0.7 27 -26 31 -19 1 +33 0.5 0.4 19 15 50 -31 1 +33 0.5 0.4 -14 -35 9 -31 2 +33 0.3 0.7 41 -47 -26 -34 1 +33 0.5 0.3 48 29 39 -24 1 +33 0.9 0.1 34 26 19 -30 1 +33 0.8 0.5 49 -31 43 25 2 +33 0.8 0.1 34 33 -17 -21 1 +33 0.9 0.9 22 -48 48 -22 2 +33 0.3 0.1 21 14 15 -20 1 +33 0.6 0.5 -16 -46 17 -12 2 +33 0.3 0.2 -41 -49 -7 -40 2 +33 0.9 0.8 24 -10 17 -50 1 +33 0.9 0.5 50 -37 -27 -31 1 +33 0.3 0.8 -26 -29 -22 -44 2 +33 0.3 0.2 41 -19 33 -16 1 +33 0.6 0.1 42 -5 -23 -38 1 +33 0.2 0.9 -28 -50 28 2 2 +33 0.2 0.6 25 -8 -9 -32 1 +33 0.9 0.6 16 -43 14 -29 1 +33 0.1 0.7 28 2 46 39 2 +33 0.3 0.9 22 10 -9 -43 1 +33 0.5 0.1 35 15 23 12 1 +33 0.6 0.5 48 -44 -15 -43 1 +33 0.9 0.6 44 10 -30 -38 1 +33 0.1 0.6 39 9 48 -8 2 +33 0.9 0.1 -27 -30 32 11 2 +33 0.9 0.5 -6 -11 40 -29 2 +33 0.6 0.6 -23 -42 41 16 2 +33 0.3 0.7 -19 -38 -9 -25 2 +33 0.8 0.7 -4 -24 14 0 2 +33 0.1 0.1 12 1 11 -4 1 +33 0.8 0.4 -18 -30 29 -50 1 +33 0.7 0.5 -4 -50 32 -8 2 +33 0.8 0.5 21 5 27 2 1 +33 0.2 0.7 33 2 28 -39 2 +33 0.7 0.8 10 -25 12 -49 2 +33 0.3 0.6 36 -38 22 3 2 +33 0.8 0.2 48 28 39 9 1 +33 0.9 0.4 19 -11 34 -45 1 +33 0.1 0.6 -19 -48 9 -22 2 +33 0.3 0.2 13 -13 44 -30 1 +33 0.9 0.3 20 4 -15 -50 1 +33 0.2 0.1 10 9 24 8 1 +33 0.7 0.5 -17 -34 33 20 2 +33 0.4 0.2 -29 -32 -16 -45 1 +33 0.6 0.9 -11 -32 25 -43 2 +33 0.9 0.8 5 -28 33 -22 2 +33 0.7 0.9 50 -37 45 -24 2 +33 0.3 0.1 -10 -38 6 -11 2 +33 0.8 0.6 10 -34 50 -18 2 +33 0.6 0.7 24 -47 -16 -17 1 +33 0.8 0.4 36 24 5 -38 1 +33 0.3 0.3 -23 -31 -29 -34 1 +33 0.6 0.9 10 -10 22 9 2 +33 0.3 0.6 32 16 48 23 2 +33 0.5 0.6 42 -50 40 -18 2 +33 0.5 0.3 47 30 46 -9 1 +33 0.3 0.3 -40 -45 40 -21 2 +33 0.8 0.2 29 -43 39 37 2 +33 0.1 0.9 1 -33 46 -35 2 +33 0.2 0.9 -9 -28 -20 -35 1 +33 0.1 0.5 -2 -3 28 -41 2 +33 0.5 0.9 -3 -14 28 18 2 +33 0.5 0.3 -4 -37 26 -37 2 +33 0.3 0.5 -26 -29 -3 -31 2 +33 0.7 0.1 1 -31 19 -33 1 +33 0.9 0.2 0 -24 10 -27 1 +33 0.4 0.2 2 -26 10 -43 1 +33 0.1 0.5 37 -33 -6 -19 2 +33 0.2 0.7 37 -50 7 -26 2 +33 0.7 0.6 36 33 13 -7 1 +33 0.7 0.3 6 -10 -6 -29 1 +33 0.6 0.2 36 -39 0 -20 1 +33 0.6 0.4 -27 -37 -20 -49 1 +33 0.4 0.9 48 -5 -45 -46 1 +33 0.5 0.4 48 21 35 -33 1 +33 0.8 0.3 -4 -18 13 -16 2 +33 0.9 0.8 42 19 40 -37 1 +33 0.1 0.3 11 -2 5 -37 1 +33 0.2 0.8 -21 -38 45 39 2 +33 0.8 0.6 -19 -36 21 -10 2 +33 0.5 0.4 41 -32 -23 -40 1 +33 0.2 0.2 25 -46 28 -12 2 +33 0.4 0.2 -12 -48 6 -40 2 +33 0.3 0.1 -26 -38 13 -34 2 +33 0.6 0.4 47 2 -23 -45 1 +33 0.8 0.2 30 2 -7 -12 1 +33 0.1 0.1 2 -49 -35 -45 1 +33 0.7 0.2 40 9 -29 -32 1 +33 0.4 0.8 24 -47 13 -26 2 +33 0.3 0.4 48 1 17 -22 1 +33 0.1 0.9 -11 -24 29 24 2 +33 0.8 0.8 21 -35 -26 -46 1 +33 0.9 0.7 38 28 -9 -28 1 +33 0.7 0.3 -1 -13 -3 -41 1 +34 0.5 0.4 30 -27 14 -1 2 +34 0.8 0.3 -3 -41 21 10 2 +34 0.6 0.7 35 -36 19 -11 1 +34 0.3 0.6 -14 -50 34 -21 2 +34 0.3 0.8 -12 -38 47 -10 2 +34 0.7 0.2 40 -9 34 -44 1 +34 0.4 0.1 -17 -38 28 26 2 +34 0.1 0.9 -16 -39 11 4 2 +34 0.5 0.3 -45 -49 -26 -29 2 +34 0.1 0.3 1 -35 21 -6 2 +34 0.5 0.3 35 18 30 -34 1 +34 0.7 0.5 -5 -30 29 25 2 +34 0.7 0.6 23 -2 16 -28 1 +34 0.3 0.1 -8 -25 24 20 2 +34 0.7 0.6 36 -33 -5 -14 1 +34 0.6 0.6 21 -48 -1 -41 1 +34 0.6 0.2 -7 -44 11 -29 1 +34 0.4 0.6 49 46 23 -5 1 +34 0.2 0.2 34 26 26 21 1 +34 0.3 0.6 41 39 16 10 1 +34 0.5 0.5 18 -30 -16 -35 1 +34 0.8 0.8 49 -48 19 -1 1 +34 0.8 0.7 19 -9 46 -43 2 +34 0.5 0.4 -14 -20 -28 -36 1 +34 0.5 0.8 -10 -49 26 -39 2 +34 0.2 0.6 18 -36 -19 -46 1 +34 0.6 0.2 -7 -38 10 -21 2 +34 0.9 0.4 24 -13 42 40 2 +34 0.8 0.2 24 22 28 -31 1 +34 0.9 0.3 -8 -19 22 -43 1 +34 0.4 0.9 -10 -45 47 23 2 +34 0.9 0.1 37 -4 1 -29 1 +34 0.7 0.8 36 16 44 -23 1 +34 0.4 0.8 19 18 -32 -50 1 +34 0.7 0.1 14 9 29 -1 1 +34 0.8 0.4 -10 -22 14 -22 2 +34 0.3 0.5 -2 -5 -15 -48 1 +34 0.1 0.7 45 15 18 1 1 +34 0.1 0.1 10 -24 45 -43 1 +34 0.9 0.8 19 -44 17 14 2 +34 0.8 0.1 43 -45 48 25 2 +34 0.3 0.9 31 13 45 40 2 +34 0.8 0.4 24 -29 -24 -37 1 +34 0.4 0.8 5 -42 34 10 2 +34 0.3 0.1 31 26 22 -32 1 +34 0.5 0.3 22 -47 -9 -13 2 +34 0.5 0.9 35 -41 3 -16 1 +34 0.7 0.6 20 -4 46 41 2 +34 0.4 0.4 41 -34 27 13 2 +34 0.8 0.9 20 17 28 15 2 +34 0.3 0.3 33 -50 39 -25 2 +34 0.3 0.5 28 -31 48 42 2 +34 0.4 0.5 44 6 -4 -50 1 +34 0.2 0.4 44 -2 16 11 2 +34 0.8 0.1 18 -12 -5 -43 1 +34 0.9 0.1 41 -40 25 -13 1 +34 0.1 0.3 7 -29 32 15 2 +34 0.4 0.6 -1 -33 17 16 2 +34 0.7 0.1 30 -19 27 18 2 +34 0.5 0.4 44 18 26 14 1 +34 0.3 0.5 29 -42 30 -47 2 +34 0.4 0.3 27 24 4 -40 1 +34 0.9 0.3 26 20 38 28 2 +34 0.3 0.7 31 6 38 35 2 +34 0.4 0.8 35 -37 29 6 2 +34 0.4 0.1 20 5 5 -18 1 +34 0.2 0.7 45 31 -30 -45 1 +34 0.4 0.3 47 -10 -15 -50 1 +34 0.4 0.3 -38 -48 5 -20 2 +34 0.3 0.9 34 -37 31 -46 2 +34 0.2 0.1 -34 -42 6 -8 2 +34 0.9 0.2 37 -43 -21 -48 1 +34 0.6 0.7 6 0 -6 -19 1 +34 0.1 0.9 7 5 19 -18 2 +34 0.2 0.8 -22 -23 37 36 2 +34 0.7 0.5 10 -12 14 -49 1 +34 0.4 0.8 28 12 9 4 1 +34 0.2 0.6 13 4 18 -43 1 +34 0.1 0.5 -8 -38 30 -39 2 +34 0.8 0.4 -1 -16 23 10 2 +34 0.6 0.7 46 -33 15 -2 1 +34 0.6 0.5 50 33 -12 -14 1 +34 0.1 0.5 34 -25 2 -50 1 +34 0.7 0.6 33 14 8 7 1 +34 0.3 0.7 39 -21 29 -36 2 +34 0.3 0.6 41 18 20 -29 1 +34 0.2 0.1 28 -2 5 -40 1 +34 0.6 0.9 43 10 12 -32 1 +34 0.2 0.7 43 -46 -7 -45 2 +34 0.6 0.5 26 -38 23 -42 1 +34 0.8 0.7 -13 -31 40 15 2 +34 0.4 0.3 -32 -35 50 -6 2 +34 0.4 0.2 40 -2 -40 -42 1 +34 0.4 0.7 27 -2 12 10 2 +34 0.7 0.1 -11 -25 37 -40 1 +34 0.5 0.9 49 -47 -43 -46 1 +34 0.7 0.3 11 -44 44 2 2 +34 0.7 0.9 -2 -23 42 -21 2 +34 0.8 0.6 36 -43 -14 -22 1 +34 0.4 0.2 28 27 25 -8 1 +34 0.7 0.9 -14 -34 31 -22 2 +34 0.3 0.6 5 -15 -20 -50 1 +34 0.7 0.4 26 0 50 7 1 +34 0.2 0.7 2 -27 46 -20 2 +34 0.6 0.7 1 -28 14 12 2 +34 0.4 0.3 -8 -9 34 -20 1 +34 0.3 0.4 49 48 27 -21 1 +34 0.4 0.5 48 -35 36 24 2 +34 0.8 0.7 9 -24 26 22 2 +34 0.2 0.5 38 -20 -11 -41 1 +34 0.5 0.1 16 -15 10 -8 1 +34 0.7 0.1 10 -5 50 -32 1 +34 0.8 0.4 -26 -44 29 -47 2 +34 0.7 0.8 42 0 48 9 2 +34 0.2 0.3 -21 -33 46 -38 2 +34 0.8 0.7 39 18 -4 -48 1 +34 0.9 0.4 -14 -41 -32 -48 1 +34 0.1 0.5 36 22 -34 -39 1 +34 0.9 0.9 50 -3 -16 -39 1 +34 0.1 0.1 -39 -49 -15 -32 2 +34 0.1 0.2 -17 -28 -28 -30 1 +34 0.8 0.6 22 -48 13 5 2 +34 0.6 0.9 8 -6 47 11 2 +34 0.4 0.9 -7 -10 -5 -17 1 +34 0.7 0.8 -10 -19 5 -4 2 +34 0.9 0.4 26 -17 -34 -42 1 +34 0.4 0.7 36 14 39 -49 1 +34 0.2 0.3 19 11 32 22 2 +34 0.7 0.8 34 -20 48 -3 2 +34 0.5 0.2 22 -17 -8 -49 1 +34 0.3 0.5 45 -25 26 -2 2 +34 0.4 0.4 -28 -46 35 -50 2 +34 0.7 0.1 -11 -29 22 -44 1 +34 0.4 0.9 10 -15 32 -22 2 +34 0.3 0.6 -3 -20 -23 -31 1 +34 0.5 0.6 36 15 22 6 1 +34 0.5 0.7 -12 -23 -9 -25 2 +34 0.2 0.4 42 25 -14 -37 1 +34 0.2 0.6 22 3 36 -29 2 +34 0.3 0.8 2 -15 21 19 2 +34 0.6 0.5 13 -13 35 -28 2 +34 0.7 0.9 32 28 -3 -10 1 +34 0.5 0.4 -36 -42 44 32 2 +34 0.6 0.4 -20 -31 39 -34 2 +34 0.4 0.2 15 -45 32 16 2 +34 0.4 0.2 46 43 40 20 1 +34 0.5 0.8 34 -9 -32 -37 1 +34 0.4 0.1 -19 -33 -12 -36 1 +34 0.7 0.1 12 -44 18 12 2 +34 0.6 0.3 -20 -45 -11 -19 2 +34 0.5 0.4 43 -35 31 -10 1 +34 0.5 0.1 -6 -35 -12 -24 1 +34 0.6 0.1 44 -18 31 5 2 +34 0.3 0.2 -4 -44 -8 -31 2 +34 0.5 0.3 27 -37 10 8 2 +34 0.5 0.8 21 -19 -1 -12 1 +34 0.7 0.3 8 5 48 12 2 +34 0.6 0.9 46 44 32 15 1 +34 0.1 0.2 -41 -44 41 37 2 +34 0.7 0.4 13 -18 17 -41 1 +34 0.3 0.9 -9 -44 23 7 2 +34 0.6 0.9 -23 -34 26 12 2 +34 0.9 0.1 5 -50 4 -17 1 +34 0.1 0.2 48 37 3 -5 1 +34 0.9 0.8 37 5 -1 -50 1 +34 0.4 0.3 48 -48 -26 -38 1 +34 0.5 0.8 43 -21 -20 -25 1 +34 0.3 0.5 45 40 35 4 1 +34 0.1 0.1 20 -15 -18 -32 1 +34 0.3 0.3 32 -43 12 -14 2 +34 0.6 0.8 26 -9 36 -34 2 +34 0.8 0.6 3 -14 9 -5 2 +34 0.8 0.3 42 -21 39 -5 1 +34 0.6 0.8 30 29 -2 -13 1 +34 0.3 0.3 38 30 14 -21 1 +34 0.5 0.4 6 -31 23 0 2 +34 0.7 0.6 18 -32 36 17 2 +34 0.3 0.7 19 14 47 6 2 +34 0.5 0.6 1 -15 39 -44 2 +34 0.8 0.1 -28 -40 48 38 2 +34 0.8 0.7 5 -24 -17 -35 1 +34 0.7 0.5 15 -33 3 -22 1 +34 0.8 0.5 -6 -48 -20 -36 1 +34 0.2 0.4 44 -7 48 -47 1 +34 0.2 0.6 -13 -44 40 25 2 +34 0.5 0.7 -16 -39 40 -27 2 +34 0.4 0.2 -13 -30 -24 -44 1 +34 0.8 0.9 15 -19 39 -20 2 +34 0.2 0.5 0 -9 33 -7 2 +34 0.4 0.7 21 -40 37 -23 2 +34 0.9 0.4 31 -1 17 -7 1 +34 0.6 0.6 -15 -20 -26 -32 1 +34 0.1 0.1 -18 -49 30 0 2 +34 0.1 0.6 -35 -48 35 -42 2 +34 0.3 0.2 38 -46 18 -3 2 +34 0.5 0.2 -34 -46 37 -2 2 +34 0.8 0.8 25 -3 13 -8 1 +34 0.2 0.1 13 -16 23 -8 2 +34 0.8 0.9 38 -32 -6 -31 1 +34 0.5 0.8 22 -23 32 -2 2 +34 0.5 0.1 41 35 -18 -34 1 +34 0.1 0.4 37 -33 24 7 2 +34 0.4 0.1 33 -36 18 -22 1 +34 0.3 0.1 -21 -29 -35 -49 1 +34 0.1 0.5 13 -40 8 -27 2 +34 0.1 0.7 -14 -41 -10 -16 2 +34 0.3 0.9 31 -7 30 10 2 +34 0.3 0.6 -15 -31 48 -12 2 +34 0.6 0.2 30 17 43 -13 1 +34 0.7 0.4 50 -31 -3 -25 1 +34 0.3 0.4 28 12 27 -12 1 +34 0.2 0.4 26 -24 6 -40 1 +34 0.6 0.4 47 -23 49 20 2 +34 0.2 0.5 2 -37 45 -29 2 +34 0.8 0.3 17 -15 -8 -46 1 +34 0.4 0.3 41 -38 43 -43 1 +34 0.7 0.9 36 6 -15 -17 1 +34 0.9 0.2 8 -29 18 -22 1 +34 0.9 0.7 46 8 6 -49 1 +34 0.2 0.2 4 3 45 31 2 +34 0.9 0.4 -36 -46 48 -48 2 +34 0.2 0.2 -1 -3 18 -41 1 +34 0.8 0.5 39 -33 0 -38 1 +34 0.8 0.8 33 20 27 26 1 +34 0.3 0.6 24 14 8 1 1 +34 0.5 0.3 -7 -48 42 -3 2 +34 0.9 0.4 37 34 36 30 1 +34 0.5 0.7 -19 -25 3 -32 2 +34 0.6 0.3 26 17 32 -38 1 +34 0.1 0.4 48 15 47 -6 1 +34 0.9 0.2 32 -28 8 -46 1 +34 0.6 0.6 -19 -28 38 -15 2 +34 0.4 0.5 1 -31 -4 -19 2 +34 0.4 0.3 18 -49 50 -44 2 +34 0.5 0.5 1 -39 -21 -32 1 +34 0.2 0.5 48 32 9 -1 1 +34 0.3 0.9 20 -47 -25 -29 1 +34 0.1 0.5 43 -26 -38 -41 1 +34 0.9 0.3 31 -17 24 -11 1 +34 0.1 0.3 -23 -27 44 -18 2 +34 0.1 0.5 46 -5 25 5 2 +34 0.5 0.3 49 28 -10 -38 1 +34 0.8 0.3 -5 -32 32 -50 2 +34 0.9 0.4 22 19 37 11 1 +34 0.2 0.6 6 -9 42 40 2 +34 0.4 0.5 8 -16 -8 -11 1 +34 0.3 0.4 47 -32 3 -26 1 +34 0.4 0.3 46 -2 -4 -24 1 +34 0.4 0.6 43 35 7 -5 1 +34 0.5 0.3 8 -6 39 -5 2 +34 0.2 0.3 33 19 46 -48 1 +34 0.7 0.1 36 28 12 8 1 +34 0.8 0.6 45 -32 -31 -50 1 +34 0.5 0.1 6 -43 -17 -41 1 +34 0.5 0.8 24 -7 47 -22 2 +34 0.9 0.1 -16 -19 -16 -50 1 +34 0.2 0.8 -11 -34 15 -14 2 +34 0.7 0.2 28 13 42 6 1 +34 0.3 0.5 17 -26 24 6 2 +34 0.5 0.5 -26 -37 -8 -37 2 +34 0.7 0.7 -3 -9 1 -43 2 +34 0.5 0.1 31 -21 -17 -39 1 +34 0.6 0.2 42 -44 23 -28 1 +34 0.5 0.2 3 -25 9 -11 2 +34 0.7 0.8 28 -5 19 -6 2 +34 0.4 0.6 22 -47 26 6 2 +34 0.7 0.2 42 -7 24 23 1 +34 0.9 0.6 42 36 -25 -27 1 +34 0.9 0.4 28 -19 41 -14 1 +34 0.1 0.5 31 -33 2 -23 2 +34 0.3 0.6 18 -45 44 8 2 +34 0.4 0.7 37 22 33 5 1 +34 0.1 0.8 7 -47 -11 -42 2 +34 0.9 0.1 17 -45 33 18 2 +34 0.2 0.1 19 -16 33 -47 1 +34 0.7 0.8 1 -42 17 10 2 +34 0.4 0.4 -15 -26 8 -14 2 +34 0.7 0.3 36 -29 13 -43 1 +34 0.2 0.5 38 19 34 -5 2 +34 0.9 0.8 48 -4 11 -17 1 +34 0.8 0.3 -1 -36 43 31 2 +34 0.8 0.3 9 -2 8 -50 1 +34 0.5 0.5 50 -29 39 5 2 +34 0.8 0.9 25 15 5 -35 1 +34 0.7 0.9 35 -20 -2 -24 1 +34 0.7 0.4 30 -40 48 -15 1 +34 0.6 0.4 8 -44 36 -10 2 +34 0.4 0.3 17 -9 26 1 2 +34 0.5 0.3 -22 -25 48 30 2 +34 0.6 0.7 1 -39 28 -43 2 +34 0.6 0.2 36 -11 31 -15 1 +34 0.4 0.5 38 0 -29 -31 1 +34 0.6 0.3 9 -14 11 4 2 +34 0.2 0.5 -34 -50 41 -27 2 +34 0.9 0.7 28 -4 33 11 2 +34 0.8 0.5 -13 -31 49 -26 2 +34 0.1 0.5 45 16 8 -40 1 +34 0.3 0.3 15 -46 9 0 2 +34 0.4 0.7 35 -48 34 2 2 +34 0.8 0.3 33 -42 10 4 1 +35 0.9 0.1 49 -42 26 -3 1 +35 0.3 0.3 9 0 25 -42 1 +35 0.4 0.9 23 -48 37 13 2 +35 0.3 0.4 8 -7 -12 -17 1 +35 0.9 0.7 6 -14 6 -35 2 +35 0.9 0.6 18 -7 11 -14 1 +35 0.8 0.9 -19 -37 28 -34 2 +35 0.1 0.1 5 -11 41 33 2 +35 0.6 0.2 45 26 -17 -28 1 +35 0.8 0.1 4 -10 43 8 2 +35 0.6 0.8 18 -45 -15 -34 1 +35 0.3 0.2 33 -32 15 -19 1 +35 0.4 0.1 47 28 5 -36 1 +35 0.7 0.1 31 -35 41 31 2 +35 0.2 0.2 6 -4 -28 -49 1 +35 0.1 0.1 -11 -40 31 -9 2 +35 0.3 0.9 26 -49 9 -25 2 +35 0.2 0.3 6 -30 46 13 2 +35 0.9 0.9 13 -2 48 -16 2 +35 0.2 0.8 50 49 36 -13 1 +35 0.1 0.8 11 -45 43 -33 2 +35 0.2 0.7 39 -23 -36 -49 1 +35 0.9 0.6 -20 -21 16 -23 2 +35 0.7 0.2 -8 -18 40 -30 2 +35 0.2 0.5 33 24 23 -31 1 +35 0.7 0.6 13 -3 42 -50 1 +35 0.7 0.6 38 -12 -6 -15 1 +35 0.3 0.5 25 -38 32 5 2 +35 0.2 0.8 35 -44 15 -31 2 +35 0.4 0.1 37 -45 5 -28 1 +35 0.5 0.4 38 -37 -19 -49 1 +35 0.5 0.1 27 18 27 -16 1 +35 0.4 0.1 -27 -33 49 26 2 +35 0.5 0.7 9 -10 50 9 2 +35 0.8 0.8 49 -16 38 19 2 +35 0.4 0.3 -17 -42 2 -43 1 +35 0.4 0.9 -33 -35 18 -40 2 +35 0.5 0.8 -6 -11 39 15 2 +35 0.1 0.5 45 -13 -14 -19 1 +35 0.4 0.7 31 -15 45 -24 2 +35 0.2 0.5 4 -21 50 9 2 +35 0.1 0.9 45 -8 -14 -33 1 +35 0.6 0.9 21 -25 26 -45 2 +35 0.2 0.6 -25 -26 -6 -9 2 +35 0.6 0.6 46 27 12 -8 1 +35 0.9 0.8 40 36 7 5 1 +35 0.9 0.7 23 -10 36 -16 1 +35 0.5 0.4 18 -25 -17 -29 1 +35 0.8 0.5 6 -24 26 -21 2 +35 0.4 0.9 48 -39 9 -48 2 +35 0.1 0.9 -27 -46 40 -34 2 +35 0.6 0.1 32 -39 -39 -42 1 +35 0.2 0.1 36 -46 -10 -26 1 +35 0.6 0.7 -35 -42 23 -34 2 +35 0.1 0.3 33 11 2 -30 1 +35 0.2 0.6 46 -23 21 14 2 +35 0.9 0.1 35 -25 -29 -50 1 +35 0.2 0.4 -6 -11 -16 -44 1 +35 0.4 0.8 -19 -24 36 5 2 +35 0.6 0.4 32 23 19 0 1 +35 0.3 0.3 -3 -24 1 -50 1 +35 0.6 0.1 24 16 2 -43 1 +35 0.3 0.8 25 -16 20 -44 2 +35 0.3 0.9 46 -8 45 -24 2 +35 0.1 0.7 38 -26 -16 -21 1 +35 0.7 0.6 9 -9 19 1 2 +35 0.1 0.1 43 -44 1 -23 2 +35 0.3 0.7 15 -26 30 -2 2 +35 0.2 0.9 -19 -50 19 -16 2 +35 0.2 0.2 28 -4 -22 -23 1 +35 0.4 0.8 48 0 38 30 2 +35 0.5 0.7 17 9 23 -11 1 +35 0.9 0.2 36 -6 45 7 1 +35 0.5 0.7 -12 -45 -21 -39 2 +35 0.2 0.7 29 2 43 -26 2 +35 0.8 0.6 17 14 36 4 1 +35 0.7 0.3 9 -3 23 15 2 +35 0.7 0.7 29 -31 11 -28 1 +35 0.8 0.6 10 -38 28 17 2 +35 0.4 0.2 -6 -13 24 -25 1 +35 0.2 0.3 23 -24 21 19 2 +35 0.5 0.1 40 10 28 -18 1 +35 0.5 0.6 -27 -32 13 -40 2 +35 0.3 0.2 39 16 24 17 1 +35 0.6 0.5 49 -35 25 -5 1 +35 0.5 0.5 50 17 33 -48 1 +35 0.6 0.8 49 -11 15 -31 1 +35 0.7 0.2 -4 -16 -3 -48 1 +35 0.4 0.7 38 -23 15 11 2 +35 0.7 0.7 14 -19 20 -45 1 +35 0.4 0.5 43 13 28 -25 1 +35 0.7 0.8 17 -45 42 7 2 +35 0.6 0.3 -11 -37 -3 -40 1 +35 0.2 0.6 38 -29 33 4 2 +35 0.7 0.3 48 -21 49 6 1 +35 0.3 0.6 46 -45 35 -25 2 +35 0.7 0.6 -20 -42 3 -16 2 +35 0.7 0.2 40 -39 36 32 2 +35 0.5 0.2 13 -47 22 9 2 +35 0.2 0.9 25 19 -27 -49 1 +35 0.6 0.6 -9 -49 43 -29 2 +35 0.4 0.3 27 16 21 -7 1 +35 0.2 0.1 20 1 22 -27 1 +35 0.9 0.1 -40 -41 37 26 2 +35 0.3 0.6 -24 -48 9 -19 2 +35 0.9 0.5 35 20 28 -13 1 +35 0.5 0.1 -21 -43 32 10 2 +35 0.9 0.1 46 -30 37 7 1 +35 0.2 0.4 30 -15 -4 -29 1 +35 0.2 0.9 -15 -35 6 -12 2 +35 0.6 0.2 32 -48 -4 -9 1 +35 0.3 0.1 -18 -30 49 31 2 +35 0.2 0.6 9 5 -3 -36 1 +35 0.9 0.1 46 -7 28 -39 1 +35 0.1 0.7 40 -44 11 -29 2 +35 0.8 0.8 15 0 43 -22 2 +35 0.4 0.5 46 -38 45 -31 1 +35 0.3 0.1 24 -17 28 -27 1 +35 0.8 0.2 -11 -35 50 37 2 +35 0.7 0.1 -5 -48 -13 -37 1 +35 0.9 0.8 30 -11 -13 -17 1 +35 0.9 0.6 22 2 37 -8 2 +35 0.2 0.3 20 -7 34 -32 2 +35 0.3 0.5 36 -12 -27 -43 1 +35 0.3 0.3 48 30 -2 -28 1 +35 0.1 0.9 -14 -45 8 -14 2 +35 0.9 0.5 6 -41 13 -40 1 +35 0.7 0.1 36 -43 -27 -38 1 +35 0.5 0.6 9 8 12 -37 1 +35 0.3 0.3 47 -7 -4 -25 1 +35 0.7 0.7 4 -31 46 -49 2 +35 0.8 0.5 43 40 44 -47 1 +35 0.4 0.3 -13 -37 37 -37 2 +35 0.3 0.2 34 -10 -1 -32 1 +35 0.6 0.9 30 -13 49 -15 2 +35 0.5 0.6 -31 -46 -6 -28 2 +35 0.4 0.3 -32 -43 -43 -48 2 +35 0.8 0.8 14 -42 17 2 2 +35 0.1 0.8 -18 -38 -17 -48 2 +35 0.9 0.1 24 -26 0 -22 1 +35 0.5 0.5 41 -28 -1 -31 1 +35 0.1 0.5 33 -22 50 -28 2 +35 0.2 0.3 23 20 7 -46 1 +35 0.4 0.6 -17 -33 48 -5 2 +35 0.3 0.1 -17 -44 47 46 2 +35 0.4 0.8 2 -15 47 15 2 +35 0.4 0.1 41 0 35 -40 1 +35 0.9 0.1 44 23 47 -17 1 +35 0.5 0.9 -6 -36 15 -16 2 +35 0.9 0.8 43 -11 -5 -20 1 +35 0.2 0.6 7 3 -7 -18 1 +35 0.9 0.6 31 -31 34 1 1 +35 0.1 0.7 40 17 -16 -33 1 +35 0.1 0.1 8 -30 44 -36 2 +35 0.9 0.9 12 -48 13 8 2 +35 0.8 0.5 -16 -38 46 -5 2 +35 0.9 0.7 17 5 29 -26 1 +35 0.5 0.7 50 39 33 -44 1 +35 0.7 0.7 8 -23 9 -50 1 +35 0.9 0.9 48 2 33 12 1 +35 0.5 0.9 38 -32 14 -46 2 +35 0.7 0.9 20 -40 47 8 2 +35 0.1 0.2 37 -9 46 -16 2 +35 0.4 0.8 -3 -16 47 35 2 +35 0.2 0.3 21 -50 23 1 2 +35 0.7 0.8 0 -5 30 20 2 +35 0.7 0.5 39 27 -7 -21 1 +35 0.4 0.8 -8 -49 39 3 2 +35 0.6 0.3 44 8 -1 -12 1 +35 0.2 0.7 12 -4 -14 -38 1 +35 0.7 0.4 0 -26 4 -17 2 +35 0.9 0.8 33 -15 6 -10 1 +35 0.9 0.3 38 -1 31 -24 1 +35 0.3 0.4 -2 -6 21 -1 2 +35 0.6 0.8 -11 -14 19 -10 2 +35 0.5 0.4 -19 -32 17 13 2 +35 0.7 0.8 21 -37 9 -44 1 +35 0.1 0.3 -26 -50 14 11 2 +35 0.4 0.8 24 -6 1 -18 1 +35 0.5 0.2 -26 -29 12 -36 2 +35 0.4 0.4 41 -47 39 -5 2 +35 0.9 0.1 -45 -48 27 -3 2 +35 0.4 0.8 30 -7 50 20 2 +35 0.4 0.3 12 -50 27 -44 2 +35 0.6 0.6 24 -7 32 -7 2 +35 0.9 0.5 46 34 -15 -24 1 +35 0.2 0.7 20 5 7 -23 1 +35 0.4 0.5 29 18 -42 -46 1 +35 0.3 0.6 28 -6 -19 -21 1 +35 0.8 0.6 6 2 -13 -16 1 +35 0.2 0.3 38 -47 -28 -33 1 +35 0.5 0.1 -7 -12 -8 -34 1 +35 0.4 0.2 -30 -45 15 -50 2 +35 0.8 0.4 19 -49 -3 -35 1 +35 0.5 0.4 9 -42 32 -12 2 +35 0.1 0.8 38 -40 46 -49 2 +35 0.7 0.5 4 -17 2 -40 1 +35 0.1 0.2 26 12 1 -14 1 +35 0.6 0.3 -19 -34 -20 -25 1 +35 0.7 0.3 23 -10 -5 -50 1 +35 0.1 0.5 -9 -25 -6 -48 1 +35 0.2 0.6 12 -29 28 21 2 +35 0.6 0.6 -1 -9 -15 -36 1 +35 0.7 0.9 48 29 34 -8 1 +35 0.3 0.6 22 -43 -3 -20 2 +35 0.9 0.2 35 24 43 2 1 +35 0.1 0.3 35 26 45 -43 1 +35 0.5 0.1 46 -40 3 2 1 +35 0.3 0.8 -1 -13 34 -14 2 +35 0.3 0.1 35 -9 45 22 2 +35 0.1 0.5 -35 -41 32 -33 2 +35 0.6 0.4 2 -44 -2 -23 1 +35 0.9 0.6 -44 -50 12 -28 2 +35 0.8 0.6 29 -5 5 -13 1 +35 0.9 0.7 35 25 44 7 2 +35 0.5 0.1 20 -34 33 24 2 +35 0.6 0.5 41 -7 37 26 2 +35 0.2 0.2 34 4 13 -32 1 +35 0.4 0.6 -22 -32 32 11 2 +35 0.9 0.8 8 2 48 24 2 +35 0.8 0.7 -25 -29 -2 -25 2 +35 0.6 0.2 39 -15 48 47 2 +35 0.7 0.5 10 -24 40 34 2 +35 0.3 0.6 -2 -37 42 -38 2 +35 0.7 0.1 37 -38 50 -46 1 +35 0.5 0.2 -7 -44 9 -30 2 +35 0.1 0.8 26 -35 37 11 2 +35 0.5 0.3 38 -11 49 9 2 +35 0.2 0.5 8 -27 31 -27 2 +35 0.7 0.2 36 -29 47 15 2 +35 0.5 0.9 35 26 38 -10 1 +35 0.5 0.8 7 -13 47 -44 2 +35 0.5 0.5 7 -24 -2 -7 2 +35 0.9 0.1 0 -50 26 14 2 +35 0.5 0.7 31 12 18 6 1 +35 0.7 0.7 12 2 17 -21 2 +35 0.3 0.8 -4 -47 14 5 2 +35 0.5 0.6 4 -2 8 -41 1 +35 0.5 0.4 -15 -38 39 -19 2 +35 0.5 0.4 37 0 28 -37 1 +35 0.1 0.9 47 8 -7 -46 1 +35 0.6 0.7 -11 -35 20 -33 2 +35 0.6 0.6 44 -31 34 8 2 +35 0.4 0.9 -30 -32 11 -49 2 +35 0.8 0.9 12 -25 31 -3 2 +35 0.4 0.3 -17 -32 30 -23 2 +35 0.8 0.5 15 6 24 20 2 +35 0.1 0.9 26 -48 33 3 2 +35 0.6 0.9 1 -32 30 -20 2 +35 0.8 0.8 -2 -21 15 -21 2 +35 0.7 0.9 31 -35 -6 -24 1 +35 0.3 0.7 6 -11 39 -1 2 +35 0.7 0.1 0 -47 47 -8 2 +35 0.3 0.7 41 -49 42 -48 2 +35 0.6 0.4 12 -12 -48 -49 1 +35 0.5 0.9 22 -29 38 -1 2 +35 0.9 0.2 -20 -38 23 -44 2 +35 0.8 0.8 10 -15 -20 -31 1 +35 0.6 0.5 18 -37 15 -10 2 +35 0.9 0.2 41 5 16 -19 1 +35 0.3 0.8 25 -14 10 -18 2 +35 0.9 0.7 8 -41 27 4 2 +35 0.8 0.1 46 -49 30 23 1 +35 0.4 0.3 17 -44 44 37 2 +35 0.2 0.8 29 -19 -2 -27 1 +35 0.1 0.9 11 -45 8 -46 2 +35 0.3 0.1 20 -13 5 -44 1 +35 0.1 0.1 44 -6 5 -23 1 +35 0.5 0.9 20 -43 43 -19 2 +35 0.1 0.8 45 -33 15 -43 2 +35 0.7 0.2 30 18 38 -9 1 +35 0.4 0.2 28 24 5 -10 1 +35 0.8 0.2 -2 -23 42 32 2 +35 0.9 0.9 42 -15 -27 -47 1 +35 0.9 0.9 25 -16 27 -17 2 +35 0.2 0.4 42 23 -2 -40 1 +35 0.8 0.5 43 -48 -34 -40 1 +35 0.4 0.5 6 -17 6 -27 1 +35 0.4 0.7 14 -36 -19 -40 1 +35 0.8 0.9 34 -36 30 12 2 +35 0.7 0.5 32 -37 34 -42 1 +35 0.7 0.1 37 0 -18 -45 1 +35 0.3 0.7 -15 -38 6 -6 2 +35 0.2 0.4 31 -5 -5 -8 1 +35 0.9 0.2 13 12 -30 -47 1 +35 0.6 0.8 44 41 17 -47 1 +35 0.4 0.6 11 -45 -26 -45 1 +35 0.5 0.1 10 6 16 -22 1 +35 0.8 0.5 -9 -45 37 4 2 +35 0.1 0.4 36 29 7 -9 1 +35 0.2 0.2 33 -4 28 -45 1 +35 0.7 0.6 48 7 22 -24 1 +35 0.4 0.2 37 -39 27 -13 2 +35 0.4 0.5 17 -16 16 1 2 +35 0.2 0.4 24 -8 42 -49 1 +35 0.5 0.8 40 21 1 -5 1 +35 0.8 0.3 36 19 -21 -38 1 +35 0.8 0.5 -6 -22 -30 -44 1 +35 0.3 0.2 31 15 -12 -44 1 +35 0.5 0.6 -5 -13 49 -34 2 +36 0.4 0.6 21 -7 -19 -46 1 +36 0.2 0.2 -10 -46 37 -31 2 +36 0.5 0.6 -21 -25 10 -33 2 +36 0.6 0.4 -1 -47 20 -36 2 +36 0.7 0.4 45 -44 1 -20 1 +36 0.6 0.5 32 -14 -21 -45 1 +36 0.9 0.6 20 -45 -6 -15 1 +36 0.5 0.7 -9 -34 11 -9 2 +36 0.3 0.6 32 16 -13 -23 1 +36 0.6 0.2 47 -14 13 -10 1 +36 0.4 0.2 47 1 2 -28 1 +36 0.5 0.8 34 -23 5 -10 1 +36 0.4 0.8 36 -24 1 -42 1 +36 0.1 0.5 14 -31 25 21 2 +36 0.4 0.5 48 18 10 -5 1 +36 0.2 0.7 34 -12 -6 -34 1 +36 0.8 0.5 19 -42 -38 -39 1 +36 0.2 0.5 4 -32 22 -42 2 +36 0.8 0.3 26 -30 -4 -6 1 +36 0.3 0.3 4 -41 39 2 2 +36 0.9 0.3 13 -35 7 -31 1 +36 0.7 0.8 50 -20 -25 -48 1 +36 0.2 0.2 -14 -18 -29 -37 1 +36 0.6 0.3 46 10 12 -37 1 +36 0.6 0.2 49 4 -17 -47 1 +36 0.1 0.6 5 2 23 -36 2 +36 0.3 0.3 26 21 -22 -32 1 +36 0.4 0.4 14 -20 40 -35 2 +36 0.3 0.2 -35 -38 47 31 2 +36 0.2 0.8 6 -24 -3 -35 2 +36 0.9 0.7 40 34 28 -44 1 +36 0.2 0.6 -15 -16 11 -14 2 +36 0.2 0.9 -11 -30 -5 -13 2 +36 0.4 0.8 43 20 42 -48 1 +36 0.7 0.5 28 19 30 -17 1 +36 0.9 0.6 27 -42 42 7 2 +36 0.8 0.7 -9 -36 23 -26 2 +36 0.5 0.9 35 9 30 21 2 +36 0.6 0.6 11 8 2 -12 1 +36 0.3 0.5 33 -30 48 -16 2 +36 0.4 0.6 -29 -49 43 -40 2 +36 0.9 0.1 49 5 -38 -41 1 +36 0.8 0.5 31 -13 49 -14 1 +36 0.8 0.7 10 -42 31 29 2 +36 0.7 0.1 35 -4 29 4 1 +36 0.9 0.8 48 -37 -16 -24 1 +36 0.8 0.6 23 -19 44 31 2 +36 0.8 0.3 7 -9 8 -31 1 +36 0.1 0.4 -20 -49 30 -35 2 +36 0.2 0.9 -25 -27 30 -17 2 +36 0.1 0.3 42 -14 44 43 2 +36 0.5 0.6 46 -26 -11 -31 1 +36 0.8 0.1 -24 -39 16 -24 2 +36 0.3 0.8 23 -14 -4 -27 1 +36 0.7 0.1 15 9 50 -9 1 +36 0.9 0.5 40 11 21 -29 1 +36 0.4 0.7 24 -50 41 -18 2 +36 0.5 0.5 14 -14 -18 -48 1 +36 0.8 0.1 21 -25 -1 -22 1 +36 0.1 0.7 3 -17 46 -22 2 +36 0.4 0.5 30 9 45 -1 1 +36 0.5 0.5 5 -32 26 -39 2 +36 0.6 0.3 43 40 38 -8 1 +36 0.7 0.2 -24 -27 23 -4 2 +36 0.1 0.5 12 -32 20 -38 2 +36 0.8 0.8 4 -50 36 -12 2 +36 0.2 0.2 40 26 47 -43 1 +36 0.4 0.8 -25 -44 32 -39 2 +36 0.4 0.9 24 8 23 -15 2 +36 0.4 0.7 23 0 -14 -44 1 +36 0.7 0.5 2 -43 -43 -47 1 +36 0.8 0.2 36 -14 5 -17 1 +36 0.7 0.9 29 15 -27 -40 1 +36 0.6 0.7 13 -49 0 -17 2 +36 0.9 0.3 3 1 50 41 2 +36 0.2 0.2 -18 -29 40 -17 2 +36 0.7 0.9 24 -29 17 -1 2 +36 0.7 0.9 -33 -41 -27 -34 2 +36 0.9 0.9 9 8 7 2 1 +36 0.9 0.3 39 26 -32 -46 1 +36 0.1 0.9 -11 -33 32 23 2 +36 0.4 0.1 28 -38 -22 -31 1 +36 0.1 0.3 36 -6 32 -23 2 +36 0.4 0.3 20 14 -12 -50 1 +36 0.8 0.3 41 7 -6 -20 1 +36 0.9 0.9 -6 -11 -2 -6 2 +36 0.3 0.7 50 -18 45 -20 2 +36 0.9 0.4 48 38 -1 -24 1 +36 0.4 0.1 48 43 26 11 1 +36 0.2 0.3 50 -9 -41 -43 1 +36 0.5 0.7 16 -13 3 -46 1 +36 0.9 0.1 21 -22 25 23 2 +36 0.5 0.8 -23 -49 40 -18 2 +36 0.5 0.9 4 -12 2 -7 2 +36 0.7 0.8 -25 -44 38 -23 2 +36 0.5 0.3 -3 -22 45 -17 2 +36 0.3 0.7 49 -46 9 -6 2 +36 0.8 0.5 32 -17 40 -7 1 +36 0.6 0.8 8 -30 -33 -47 1 +36 0.5 0.5 45 -30 36 -19 2 +36 0.3 0.3 26 -3 11 -1 2 +36 0.7 0.5 -19 -24 3 -47 2 +36 0.9 0.6 26 -22 41 -18 1 +36 0.1 0.7 22 -33 48 23 2 +36 0.3 0.8 28 -18 37 -28 2 +36 0.8 0.3 27 16 -13 -32 1 +36 0.3 0.9 6 3 26 -26 2 +36 0.9 0.7 41 -22 -4 -21 1 +36 0.9 0.7 39 -3 30 21 1 +36 0.7 0.8 48 44 23 -7 1 +36 0.1 0.8 -2 -9 -23 -27 1 +36 0.3 0.6 45 1 -21 -41 1 +36 0.8 0.5 23 -10 -6 -22 1 +36 0.5 0.9 40 6 0 -27 1 +36 0.1 0.9 33 19 33 22 2 +36 0.9 0.2 28 -7 31 -43 1 +36 0.3 0.9 32 -46 -41 -50 1 +36 0.7 0.8 -3 -50 -2 -38 2 +36 0.3 0.6 46 7 30 -36 1 +36 0.8 0.9 -8 -50 49 -12 2 +36 0.1 0.8 20 9 46 5 2 +36 0.7 0.7 -12 -19 25 3 2 +36 0.5 0.6 35 2 -3 -32 1 +36 0.2 0.8 -12 -44 -9 -34 2 +36 0.4 0.2 -5 -7 48 -45 1 +36 0.7 0.2 42 -43 32 25 2 +36 0.6 0.1 36 20 50 -50 1 +36 0.2 0.3 38 3 3 -27 1 +36 0.7 0.3 15 -13 37 -37 1 +36 0.2 0.2 40 30 16 2 1 +36 0.1 0.8 46 40 30 -20 1 +36 0.2 0.5 22 1 34 7 2 +36 0.1 0.2 33 3 13 -45 1 +36 0.7 0.8 34 18 -39 -40 1 +36 0.7 0.9 40 -15 49 -29 2 +36 0.4 0.9 -2 -11 31 27 2 +36 0.7 0.6 -6 -33 36 2 2 +36 0.5 0.7 20 -7 46 -15 2 +36 0.9 0.4 25 -9 41 14 2 +36 0.9 0.9 37 -14 0 -6 1 +36 0.9 0.6 10 -22 21 -42 1 +36 0.2 0.4 31 7 -10 -15 1 +36 0.5 0.3 11 5 50 -18 1 +36 0.1 0.1 34 -20 22 -43 1 +36 0.8 0.4 22 -26 -36 -48 1 +36 0.4 0.4 13 9 42 -41 1 +36 0.5 0.2 -24 -38 49 -27 2 +36 0.1 0.5 -21 -41 34 -33 2 +36 0.4 0.6 29 -20 10 3 2 +36 0.7 0.2 3 -30 29 -14 1 +36 0.7 0.3 -28 -37 26 -33 2 +36 0.4 0.4 -10 -20 24 7 2 +36 0.6 0.1 1 -29 48 4 2 +36 0.5 0.2 -27 -40 19 -18 2 +36 0.9 0.2 -2 -11 39 3 2 +36 0.9 0.1 36 -47 -12 -39 1 +36 0.9 0.4 19 11 41 1 1 +36 0.6 0.1 36 -43 38 25 2 +36 0.1 0.5 -13 -25 -21 -23 2 +36 0.1 0.3 -4 -30 49 28 2 +36 0.2 0.1 -26 -50 -8 -24 2 +36 0.2 0.1 13 -36 49 22 2 +36 0.1 0.5 21 -17 36 19 2 +36 0.2 0.3 3 -26 40 13 2 +36 0.7 0.9 19 -21 29 16 2 +36 0.5 0.8 22 -46 29 -41 2 +36 0.1 0.7 16 14 -34 -43 1 +36 0.4 0.5 -36 -50 17 -3 2 +36 0.3 0.2 36 5 24 -23 1 +36 0.6 0.6 -8 -44 50 -24 2 +36 0.5 0.2 31 -40 -5 -33 1 +36 0.5 0.4 39 5 -3 -36 1 +36 0.8 0.5 4 -47 31 18 2 +36 0.4 0.5 21 -37 43 32 2 +36 0.5 0.2 -16 -36 40 -28 2 +36 0.4 0.4 38 37 26 -45 1 +36 0.2 0.6 16 -30 -3 -7 2 +36 0.3 0.5 -12 -41 48 8 2 +36 0.6 0.6 17 7 42 -6 2 +36 0.1 0.1 38 21 47 15 2 +36 0.3 0.5 20 -10 28 -15 2 +36 0.1 0.9 7 -11 18 -35 2 +36 0.4 0.8 45 -25 30 -41 2 +36 0.7 0.6 -32 -40 34 -4 2 +36 0.9 0.3 33 -2 26 -29 1 +36 0.3 0.5 40 -32 38 -24 2 +36 0.1 0.8 16 -10 -44 -50 1 +36 0.3 0.8 50 -40 17 14 2 +36 0.9 0.8 19 11 13 -28 1 +36 0.7 0.5 6 -14 40 -28 2 +36 0.8 0.5 -47 -48 0 -37 2 +36 0.6 0.3 25 18 -36 -43 1 +36 0.8 0.7 48 -30 46 17 2 +36 0.8 0.9 19 -19 -37 -46 1 +36 0.5 0.9 -21 -29 41 -16 2 +36 0.3 0.9 48 4 35 0 2 +36 0.6 0.3 39 -42 28 -30 1 +36 0.9 0.2 36 -15 9 -24 1 +36 0.7 0.2 8 -46 -24 -41 1 +36 0.8 0.6 16 -25 25 -8 2 +36 0.8 0.4 38 -7 25 -4 1 +36 0.5 0.3 46 -23 -1 -8 1 +36 0.5 0.5 43 -32 0 -50 1 +36 0.2 0.1 10 -46 16 -25 2 +36 0.1 0.1 -38 -48 -38 -44 2 +36 0.7 0.2 8 -2 32 -34 1 +36 0.2 0.2 31 -41 33 12 2 +36 0.8 0.1 19 -21 45 34 2 +36 0.4 0.2 -36 -43 49 -13 2 +36 0.7 0.9 12 -40 15 3 2 +36 0.9 0.5 15 12 20 19 2 +36 0.6 0.1 38 27 33 -44 1 +36 0.5 0.9 38 26 44 40 2 +36 0.5 0.5 -6 -34 34 -21 2 +36 0.8 0.5 22 -19 38 0 2 +36 0.7 0.1 -28 -42 46 17 2 +36 0.2 0.3 40 20 13 -38 1 +36 0.8 0.5 48 -27 -35 -44 1 +36 0.9 0.8 -27 -50 -19 -28 2 +36 0.6 0.9 43 15 35 7 2 +36 0.9 0.4 41 -19 -26 -39 1 +36 0.1 0.5 4 -33 44 -28 2 +36 0.2 0.5 8 -26 45 -39 2 +36 0.9 0.7 42 34 14 -14 1 +36 0.3 0.8 32 -32 -32 -38 1 +36 0.5 0.1 8 -36 38 -3 2 +36 0.7 0.8 13 -19 28 6 2 +36 0.3 0.8 -20 -40 -16 -41 2 +36 0.1 0.5 14 -8 34 -37 2 +36 0.3 0.8 7 -11 -24 -41 1 +36 0.3 0.4 19 -11 -25 -26 1 +36 0.4 0.3 -1 -16 47 46 2 +36 0.8 0.8 38 7 -13 -19 1 +36 0.3 0.2 20 4 27 -45 1 +36 0.8 0.3 0 -2 35 10 2 +36 0.4 0.5 -13 -15 27 -45 2 +36 0.4 0.6 33 -2 11 -42 1 +36 0.8 0.7 46 24 28 -45 1 +36 0.4 0.1 46 36 12 7 1 +36 0.6 0.4 -11 -29 14 -29 2 +36 0.5 0.7 29 2 34 -14 2 +36 0.3 0.7 17 -43 25 -41 2 +36 0.4 0.2 45 9 49 1 1 +36 0.2 0.3 27 23 -10 -26 1 +36 0.6 0.4 26 -26 3 -17 1 +36 0.2 0.2 33 28 20 -25 1 +36 0.2 0.4 47 -2 -7 -18 1 +36 0.2 0.6 5 -39 -11 -16 2 +36 0.2 0.7 2 -45 35 -36 2 +36 0.9 0.4 16 5 -5 -19 1 +36 0.7 0.5 7 4 16 -40 1 +36 0.9 0.7 -12 -30 23 -34 2 +36 0.2 0.3 46 37 29 -20 1 +36 0.2 0.5 40 -12 26 -24 2 +36 0.1 0.3 20 -4 30 -42 1 +36 0.7 0.1 16 -28 -5 -49 1 +36 0.8 0.6 46 9 22 -34 1 +36 0.9 0.7 47 20 -27 -28 1 +36 0.8 0.2 47 -41 27 21 1 +36 0.7 0.5 26 4 -25 -39 1 +36 0.4 0.8 -22 -39 49 30 2 +36 0.7 0.4 2 -5 27 -6 2 +36 0.6 0.1 46 -18 25 13 2 +36 0.6 0.3 8 -31 -4 -9 2 +36 0.3 0.4 48 -40 28 13 2 +36 0.3 0.4 19 -17 0 -8 2 +36 0.1 0.1 -44 -48 19 -16 2 +36 0.4 0.8 49 -37 -30 -38 1 +36 0.2 0.5 -24 -49 14 -14 2 +36 0.5 0.2 48 35 29 15 1 +36 0.3 0.9 -19 -38 13 -33 2 +36 0.7 0.2 7 -34 44 10 2 +36 0.7 0.7 44 -47 -31 -39 1 +36 0.5 0.4 40 -44 -25 -50 1 +36 0.2 0.3 50 -3 -10 -31 1 +36 0.8 0.9 -40 -49 35 -25 2 +36 0.2 0.5 39 9 30 -20 1 +36 0.5 0.7 24 3 21 -26 1 +36 0.6 0.1 5 -20 -6 -7 2 +36 0.8 0.5 23 -10 15 -16 1 +36 0.6 0.5 28 -8 -31 -40 1 +36 0.5 0.1 12 -34 4 -24 1 +36 0.7 0.1 -15 -37 -7 -36 1 +36 0.7 0.8 24 -33 29 -40 2 +36 0.1 0.3 43 25 28 7 1 +36 0.8 0.9 41 -17 23 -26 1 +36 0.5 0.8 -11 -46 15 -39 2 +36 0.9 0.5 -21 -42 34 -21 2 +36 0.3 0.1 -2 -34 2 -36 1 +36 0.2 0.2 29 -40 -10 -33 1 +36 0.5 0.2 36 23 5 -29 1 +36 0.8 0.1 18 -4 -3 -16 1 +36 0.9 0.2 43 -16 47 38 1 +36 0.1 0.3 28 -16 28 -6 2 +36 0.7 0.3 25 -35 49 4 2 +36 0.9 0.3 18 -20 30 6 2 +36 0.2 0.7 42 -13 21 17 2 +36 0.8 0.2 37 -37 29 27 2 +36 0.9 0.7 48 14 45 -41 1 +36 0.5 0.9 35 -48 20 -46 2 +37 0.5 0.5 -12 -16 45 43 2 +37 0.3 0.5 -13 -17 3 -45 2 +37 0.7 0.6 29 -7 16 -25 1 +37 0.2 0.7 43 33 26 -14 1 +37 0.1 0.7 24 -30 46 -21 2 +37 0.7 0.4 25 -28 33 -45 1 +37 0.3 0.5 36 5 43 22 2 +37 0.5 0.9 10 -14 7 -38 1 +37 0.5 0.6 16 -24 46 -4 2 +37 0.4 0.3 16 -9 22 -2 1 +37 0.5 0.2 20 -7 22 16 2 +37 0.4 0.7 -26 -32 8 -10 2 +37 0.4 0.6 -40 -47 9 -4 2 +37 0.9 0.9 -1 -4 -20 -24 1 +37 0.6 0.5 -10 -45 -20 -49 1 +37 0.4 0.3 39 28 49 -4 1 +37 0.5 0.8 47 -37 44 -23 2 +37 0.1 0.2 19 -16 25 -15 1 +37 0.1 0.2 35 8 35 9 1 +37 0.3 0.1 25 0 35 -27 1 +37 0.3 0.3 -12 -21 5 -44 2 +37 0.1 0.2 -27 -34 15 -13 2 +37 0.5 0.7 42 31 4 -17 1 +37 0.7 0.7 -10 -16 45 41 2 +37 0.5 0.7 29 20 -3 -36 1 +37 0.3 0.8 1 -18 -16 -39 1 +37 0.5 0.2 17 -40 -40 -43 1 +37 0.2 0.6 29 -22 -1 -35 1 +37 0.7 0.3 10 -28 20 -4 1 +37 0.4 0.7 23 -48 -8 -26 1 +37 0.2 0.5 13 -32 1 -46 1 +37 0.9 0.9 -26 -42 48 26 2 +37 0.6 0.3 15 -38 25 -4 2 +37 0.6 0.3 -22 -30 32 -28 2 +37 0.1 0.6 37 -16 39 -10 2 +37 0.7 0.9 22 -19 48 -11 2 +37 0.4 0.2 43 -35 29 -35 1 +37 0.4 0.4 11 -37 4 -2 2 +37 0.4 0.7 -3 -39 31 -5 2 +37 0.6 0.3 22 10 49 31 1 +37 0.5 0.6 19 -40 48 -3 2 +37 0.5 0.1 -4 -19 38 -6 2 +37 0.6 0.9 12 -7 7 -31 1 +37 0.7 0.2 -39 -48 44 -30 2 +37 0.8 0.3 49 -11 19 -30 1 +37 0.6 0.7 -11 -36 25 18 2 +37 0.8 0.1 22 -17 -8 -24 1 +37 0.3 0.5 -15 -26 22 -44 2 +37 0.7 0.2 20 -23 36 11 2 +37 0.4 0.4 14 -10 -16 -42 1 +37 0.9 0.4 28 -1 -25 -46 1 +37 0.3 0.8 9 8 27 7 2 +37 0.3 0.5 25 -38 45 -8 2 +37 0.8 0.9 10 1 4 -15 1 +37 0.8 0.6 -43 -47 -3 -42 2 +37 0.6 0.1 49 29 43 15 1 +37 0.5 0.4 42 6 42 5 1 +37 0.3 0.8 44 -40 15 13 2 +37 0.1 0.5 21 -33 36 -22 2 +37 0.4 0.6 30 6 -4 -13 1 +37 0.2 0.3 -33 -37 41 10 2 +37 0.5 0.7 41 24 16 -15 1 +37 0.5 0.3 44 2 49 -47 1 +37 0.6 0.5 34 -13 15 -48 1 +37 0.1 0.6 13 3 27 -40 1 +37 0.4 0.1 1 -45 -26 -37 1 +37 0.8 0.4 25 -27 -39 -50 1 +37 0.2 0.1 24 13 36 -39 1 +37 0.4 0.5 33 -47 9 -14 2 +37 0.9 0.2 -39 -46 42 14 2 +37 0.9 0.8 31 -5 28 20 2 +37 0.3 0.6 42 31 -37 -44 1 +37 0.7 0.8 26 -12 0 -8 1 +37 0.2 0.7 34 -12 26 -28 2 +37 0.2 0.7 -20 -25 31 14 2 +37 0.2 0.3 27 -9 19 -1 2 +37 0.4 0.2 -28 -45 -2 -48 1 +37 0.1 0.4 13 5 41 30 2 +37 0.6 0.9 46 0 19 -36 1 +37 0.8 0.1 -28 -38 21 -50 1 +37 0.2 0.6 -1 -35 -7 -35 2 +37 0.5 0.8 28 -21 21 -24 2 +37 0.5 0.7 15 -35 -45 -49 1 +37 0.5 0.2 43 -21 47 31 2 +37 0.7 0.1 10 4 -11 -39 1 +37 0.7 0.6 40 8 0 -13 1 +37 0.7 0.5 -10 -33 25 0 2 +37 0.5 0.5 -16 -45 14 -22 2 +37 0.1 0.4 25 -46 15 -37 2 +37 0.6 0.1 24 -36 3 -26 1 +37 0.1 0.8 29 15 50 -17 2 +37 0.3 0.1 3 -31 5 3 2 +37 0.3 0.6 37 -11 18 -32 1 +37 0.5 0.1 1 -11 40 -31 1 +37 0.2 0.3 33 10 19 -38 1 +37 0.6 0.2 10 -3 49 39 2 +37 0.7 0.3 -11 -34 35 -26 2 +37 0.3 0.7 34 -16 13 -23 1 +37 0.5 0.9 6 -41 29 27 2 +37 0.7 0.1 16 6 27 -30 1 +37 0.4 0.2 37 -30 34 6 2 +37 0.4 0.3 30 -12 49 -9 1 +37 0.1 0.5 41 -38 27 17 2 +37 0.2 0.2 38 -10 18 4 2 +37 0.8 0.4 37 7 -30 -44 1 +37 0.5 0.5 -6 -44 39 10 2 +37 0.8 0.4 1 -41 26 -10 2 +37 0.5 0.5 47 44 -18 -22 1 +37 0.4 0.2 0 -11 36 -46 2 +37 0.2 0.7 -3 -27 -2 -17 2 +37 0.7 0.7 -12 -13 36 -20 2 +37 0.3 0.3 49 -31 17 -21 2 +37 0.4 0.9 4 -8 -16 -20 1 +37 0.5 0.5 -6 -39 37 -4 2 +37 0.5 0.6 35 -28 -25 -40 1 +37 0.2 0.7 23 -49 16 -6 2 +37 0.8 0.5 44 -12 12 -19 1 +37 0.9 0.5 26 -9 23 15 2 +37 0.2 0.8 27 -38 50 -1 2 +37 0.6 0.2 13 -17 43 16 2 +37 0.2 0.8 29 12 -27 -39 1 +37 0.4 0.4 1 -16 45 -38 2 +37 0.6 0.6 31 -47 26 -12 1 +37 0.9 0.2 41 -12 25 6 1 +37 0.3 0.2 15 10 47 -11 1 +37 0.8 0.3 19 3 47 -1 1 +37 0.4 0.1 29 -46 33 -22 1 +37 0.2 0.4 45 -17 47 -47 1 +37 0.4 0.2 49 15 39 -7 1 +37 0.2 0.9 -2 -42 26 9 2 +37 0.8 0.6 38 16 32 26 1 +37 0.1 0.5 15 -20 47 -42 2 +37 0.1 0.7 19 -40 18 -30 2 +37 0.7 0.2 32 -20 -15 -24 1 +37 0.5 0.5 -11 -28 39 -26 2 +37 0.4 0.2 28 11 34 -26 1 +37 0.8 0.4 39 -34 9 6 2 +37 0.7 0.5 -17 -28 -10 -24 1 +37 0.4 0.5 29 -24 45 -38 2 +37 0.9 0.6 27 20 -10 -28 1 +37 0.9 0.7 -11 -44 7 -4 2 +37 0.2 0.6 41 1 22 -17 2 +37 0.2 0.2 -20 -31 -43 -48 1 +37 0.2 0.1 1 -50 5 -41 1 +37 0.2 0.7 23 -29 18 9 2 +37 0.7 0.6 43 -15 44 -14 2 +37 0.1 0.3 22 12 19 14 2 +37 0.9 0.2 50 -19 9 8 1 +37 0.7 0.1 49 5 21 3 1 +37 0.7 0.9 18 -20 2 -48 1 +37 0.9 0.2 37 -24 20 -33 1 +37 0.4 0.3 14 -25 -31 -43 1 +37 0.5 0.9 16 2 33 9 2 +37 0.9 0.1 50 -38 6 -26 1 +37 0.7 0.4 21 -19 -38 -41 1 +37 0.7 0.7 13 -27 -7 -37 1 +37 0.1 0.6 7 4 27 -28 1 +37 0.4 0.2 30 3 -10 -32 1 +37 0.3 0.6 49 -50 40 -31 2 +37 0.6 0.4 47 33 33 -47 1 +37 0.3 0.6 -10 -25 14 5 2 +37 0.6 0.2 36 -47 31 -34 1 +37 0.6 0.7 38 31 -14 -22 1 +37 0.9 0.6 7 -15 19 -9 1 +37 0.4 0.5 48 -7 -4 -44 1 +37 0.6 0.7 -2 -44 45 11 2 +37 0.9 0.4 29 -38 31 -49 1 +37 0.3 0.7 42 18 -9 -23 1 +37 0.5 0.3 42 -1 41 -19 1 +37 0.8 0.1 26 -45 30 25 2 +37 0.8 0.1 -11 -21 37 -20 2 +37 0.8 0.3 29 -48 50 18 2 +37 0.6 0.6 4 -6 17 -24 1 +37 0.6 0.9 -23 -32 -7 -12 1 +37 0.9 0.1 -4 -36 -11 -40 1 +37 0.9 0.6 32 -40 23 -11 1 +37 0.5 0.7 39 -7 -3 -36 1 +37 0.3 0.7 48 17 43 -7 2 +37 0.9 0.2 33 -36 16 -24 1 +37 0.9 0.1 8 -17 44 4 2 +37 0.2 0.8 29 -4 8 -3 2 +37 0.7 0.1 37 28 23 -27 1 +37 0.8 0.8 24 20 -6 -23 1 +37 0.9 0.5 3 -45 46 -29 2 +37 0.3 0.1 9 -29 -1 -26 2 +37 0.7 0.2 49 47 29 10 1 +37 0.4 0.7 49 -41 8 -45 1 +37 0.1 0.8 -22 -34 38 29 2 +37 0.7 0.9 -16 -37 38 -4 2 +37 0.8 0.3 18 1 40 -37 1 +37 0.8 0.8 21 16 39 -27 1 +37 0.1 0.1 30 9 48 47 2 +37 0.2 0.3 26 1 28 19 2 +37 0.4 0.2 -15 -38 -30 -45 1 +37 0.4 0.4 35 -40 -39 -42 1 +37 0.1 0.2 36 8 -10 -46 1 +37 0.8 0.8 10 3 32 -25 1 +37 0.8 0.2 25 12 38 -18 1 +37 0.6 0.4 42 7 37 -15 1 +37 0.2 0.5 50 31 44 -23 1 +37 0.9 0.3 42 36 38 -20 1 +37 0.5 0.4 -20 -46 -18 -23 2 +37 0.9 0.9 8 2 29 -28 2 +37 0.5 0.5 29 -25 7 -9 1 +37 0.8 0.1 48 4 50 5 1 +37 0.7 0.1 15 -43 -35 -36 1 +37 0.7 0.5 28 7 39 17 1 +37 0.3 0.2 32 -47 46 -7 2 +37 0.8 0.7 47 -45 14 -47 1 +37 0.5 0.8 -40 -46 -33 -46 1 +37 0.3 0.5 -12 -21 2 -50 2 +37 0.7 0.1 30 -47 17 -48 1 +37 0.3 0.1 -46 -47 25 20 2 +37 0.9 0.8 47 -15 -10 -49 1 +37 0.1 0.8 48 -46 50 46 2 +37 0.4 0.6 -11 -21 -9 -30 2 +37 0.3 0.9 -35 -43 -6 -17 2 +37 0.9 0.5 -45 -47 -4 -11 2 +37 0.9 0.7 -11 -44 7 -3 1 +37 0.4 0.7 20 4 26 9 2 +37 0.3 0.8 -4 -45 17 -49 2 +37 0.4 0.8 27 -21 -20 -30 1 +37 0.9 0.2 37 -32 7 -3 1 +37 0.3 0.2 45 32 38 -23 1 +37 0.2 0.2 -2 -31 -10 -17 1 +37 0.9 0.6 16 -30 36 3 2 +37 0.4 0.7 -12 -42 25 -36 2 +37 0.4 0.1 -27 -32 26 -37 2 +37 0.6 0.2 17 12 41 -45 1 +37 0.4 0.9 48 -25 8 -40 2 +37 0.7 0.3 12 -11 15 -2 2 +37 0.3 0.9 21 -23 24 -43 2 +37 0.2 0.2 50 24 34 -38 1 +37 0.2 0.9 8 -6 5 -21 2 +37 0.2 0.4 1 -26 46 -47 2 +37 0.9 0.5 -25 -47 37 -6 2 +37 0.8 0.9 13 -8 -1 -9 1 +37 0.4 0.2 42 10 50 21 1 +37 0.8 0.3 -7 -15 -1 -21 2 +37 0.8 0.5 32 7 48 39 2 +37 0.8 0.9 23 20 27 10 2 +37 0.1 0.3 28 -1 48 -2 2 +37 0.5 0.1 49 19 11 -33 1 +37 0.9 0.6 32 -2 28 -33 1 +37 0.5 0.5 30 -7 -30 -41 1 +37 0.1 0.2 36 21 44 15 1 +37 0.7 0.4 23 2 5 -13 1 +37 0.7 0.4 5 -5 35 32 2 +37 0.4 0.6 -26 -31 -8 -50 2 +37 0.4 0.2 34 28 -17 -36 1 +37 0.8 0.9 44 -17 50 -30 2 +37 0.6 0.8 31 -33 -3 -14 1 +37 0.9 0.1 -10 -48 12 -27 1 +37 0.8 0.3 8 -4 32 -14 1 +37 0.7 0.5 -2 -33 6 -22 2 +37 0.4 0.5 46 45 7 -50 1 +37 0.5 0.8 9 -37 25 -25 2 +37 0.5 0.3 32 8 32 19 2 +37 0.5 0.8 43 41 48 21 2 +37 0.3 0.9 30 -34 40 -23 2 +37 0.4 0.5 24 -32 -40 -45 1 +37 0.6 0.4 -17 -33 -22 -49 1 +37 0.6 0.8 46 26 8 -28 1 +37 0.9 0.3 -10 -30 12 9 2 +37 0.2 0.4 -1 -21 26 -29 1 +37 0.9 0.7 27 4 5 -12 1 +37 0.3 0.4 -21 -35 9 -47 2 +37 0.2 0.3 -5 -9 22 -1 2 +37 0.6 0.9 50 -47 -25 -37 1 +37 0.2 0.9 -2 -4 28 24 2 +37 0.9 0.9 7 -34 5 -15 1 +37 0.1 0.7 35 15 50 12 2 +37 0.5 0.1 49 -35 -15 -27 1 +37 0.2 0.2 27 -24 0 -40 1 +37 0.7 0.4 26 -33 -8 -49 1 +37 0.3 0.3 9 7 7 -27 1 +37 0.6 0.9 17 -28 32 22 2 +37 0.6 0.7 16 -26 -23 -35 1 +37 0.7 0.1 13 -18 45 -38 2 +37 0.5 0.8 26 8 18 -33 1 +37 0.3 0.7 -35 -40 -1 -11 2 +37 0.6 0.3 -35 -39 22 -20 2 +37 0.7 0.6 -26 -28 24 -43 2 +37 0.7 0.2 23 -50 21 -14 1 +37 0.6 0.3 5 -37 50 -15 2 +37 0.3 0.5 -10 -48 44 26 2 +37 0.9 0.5 7 -34 3 -43 1 +37 0.5 0.5 26 -13 27 9 2 +37 0.4 0.4 -40 -46 31 0 2 +37 0.7 0.2 24 15 11 -30 1 +37 0.3 0.9 23 -46 19 -17 2 +37 0.5 0.4 24 3 21 -43 1 +37 0.5 0.2 33 30 40 29 2 +37 0.9 0.4 -2 -9 24 -1 2 +37 0.9 0.2 41 30 41 -13 1 +37 0.2 0.8 20 -23 22 -25 2 +37 0.3 0.7 27 3 28 9 2 +37 0.4 0.1 50 3 39 5 1 +37 0.3 0.1 22 -29 20 6 2 +37 0.9 0.3 46 26 -31 -39 1 +38 0.2 0.1 -9 -30 -14 -40 1 +38 0.5 0.7 38 -12 11 -31 1 +38 0.5 0.2 42 -20 14 -6 1 +38 0.5 0.9 46 16 23 -10 1 +38 0.1 0.7 35 -25 48 -49 2 +38 0.6 0.2 29 -29 30 -20 1 +38 0.3 0.2 18 16 -10 -41 1 +38 0.1 0.3 26 22 -17 -46 1 +38 0.2 0.7 43 -12 49 38 2 +38 0.8 0.6 49 -27 12 -14 1 +38 0.2 0.2 41 -41 45 -13 2 +38 0.4 0.1 29 8 -41 -49 1 +38 0.1 0.5 8 -6 -2 -4 2 +38 0.5 0.4 49 42 -10 -36 1 +38 0.6 0.5 9 -10 -11 -37 1 +38 0.2 0.8 26 24 16 -46 1 +38 0.6 0.4 49 -20 27 -41 1 +38 0.6 0.3 -1 -31 -20 -33 1 +38 0.8 0.2 -24 -34 39 -23 2 +38 0.5 0.3 4 -1 -11 -47 1 +38 0.7 0.9 5 -49 50 29 2 +38 0.3 0.6 47 -4 27 5 2 +38 0.8 0.3 33 -50 9 -38 1 +38 0.4 0.5 45 -23 -18 -41 1 +38 0.3 0.2 -17 -18 23 -8 1 +38 0.6 0.9 46 -28 26 -41 2 +38 0.5 0.6 31 -36 25 -9 2 +38 0.8 0.2 26 3 31 -32 1 +38 0.2 0.9 1 -48 38 10 2 +38 0.5 0.5 -6 -11 16 8 2 +38 0.3 0.1 -22 -34 30 -27 2 +38 0.6 0.1 35 1 41 -12 1 +38 0.4 0.9 9 -14 -13 -49 1 +38 0.8 0.1 30 0 46 -4 1 +38 0.3 0.3 -4 -26 35 13 2 +38 0.6 0.7 26 -16 44 36 2 +38 0.5 0.3 12 -50 -4 -27 1 +38 0.3 0.2 5 -6 48 -12 2 +38 0.7 0.5 -10 -40 36 -32 2 +38 0.3 0.7 48 27 43 -5 1 +38 0.1 0.1 9 -45 24 -24 2 +38 0.6 0.8 -20 -27 -26 -39 1 +38 0.1 0.1 -11 -23 2 -13 1 +38 0.7 0.8 -13 -39 45 36 2 +38 0.1 0.2 50 -15 -19 -33 1 +38 0.8 0.1 41 -47 -13 -39 1 +38 0.9 0.6 22 -23 13 -12 1 +38 0.9 0.1 -24 -42 40 -33 2 +38 0.6 0.9 19 -43 25 13 2 +38 0.7 0.8 41 -46 10 4 1 +38 0.9 0.1 18 -1 0 -26 1 +38 0.2 0.7 49 35 -9 -22 1 +38 0.8 0.6 15 3 42 -28 2 +38 0.5 0.1 -25 -36 18 -35 2 +38 0.9 0.7 17 -33 -17 -43 1 +38 0.5 0.4 -2 -17 39 -24 2 +38 0.3 0.4 -7 -26 -17 -24 1 +38 0.5 0.5 50 19 50 49 2 +38 0.2 0.5 -16 -23 34 -23 2 +38 0.3 0.1 23 -32 26 2 2 +38 0.3 0.5 35 17 37 -16 1 +38 0.7 0.8 26 21 3 -11 1 +38 0.4 0.7 -20 -39 42 31 2 +38 0.5 0.4 -5 -45 34 -44 2 +38 0.1 0.3 23 -16 47 17 2 +38 0.6 0.6 16 -34 17 -3 2 +38 0.2 0.2 23 -42 49 36 2 +38 0.6 0.8 -14 -19 21 -48 2 +38 0.7 0.3 39 31 -19 -37 1 +38 0.9 0.8 30 19 8 -26 1 +38 0.3 0.5 -24 -42 41 -33 2 +38 0.9 0.4 -37 -39 46 -15 2 +38 0.9 0.3 -13 -30 -7 -21 1 +38 0.3 0.3 50 1 23 -43 1 +38 0.1 0.8 11 -17 41 -2 2 +38 0.1 0.5 -10 -18 23 -43 2 +38 0.5 0.1 -45 -46 42 -20 1 +38 0.5 0.6 30 -33 11 -3 2 +38 0.1 0.2 35 26 -27 -35 1 +38 0.3 0.2 41 -8 -5 -50 1 +38 0.3 0.9 -34 -35 22 -12 2 +38 0.3 0.6 -12 -29 -26 -36 2 +38 0.8 0.3 -34 -37 8 -48 1 +38 0.4 0.8 -1 -38 45 43 2 +38 0.7 0.7 36 19 -25 -45 1 +38 0.5 0.8 33 -39 35 20 2 +38 0.1 0.5 -18 -39 29 10 2 +38 0.6 0.6 -20 -50 42 -39 2 +38 0.7 0.7 -4 -37 43 -45 2 +38 0.5 0.7 8 -35 -22 -43 1 +38 0.1 0.6 -8 -48 0 -8 2 +38 0.9 0.6 12 -21 50 -47 2 +38 0.9 0.7 38 0 40 18 1 +38 0.7 0.8 30 17 19 -1 1 +38 0.3 0.6 -32 -50 -24 -32 2 +38 0.1 0.9 22 -35 14 -9 2 +38 0.7 0.3 2 -23 29 -8 2 +38 0.3 0.3 -13 -41 43 30 2 +38 0.5 0.2 31 -44 3 -4 1 +38 0.9 0.8 36 -5 -12 -35 1 +38 0.4 0.7 19 -14 -25 -38 1 +38 0.5 0.2 40 -38 50 28 2 +38 0.5 0.8 16 -19 21 -9 2 +38 0.4 0.6 18 -45 -8 -37 1 +38 0.4 0.5 10 -15 33 -25 2 +38 0.2 0.2 21 14 6 -36 1 +38 0.5 0.1 35 12 26 -1 1 +38 0.3 0.1 34 -6 30 3 2 +38 0.8 0.5 39 3 17 -21 1 +38 0.1 0.8 31 -25 48 35 2 +38 0.4 0.4 -9 -15 -8 -49 1 +38 0.9 0.2 40 -4 -1 -23 1 +38 0.7 0.7 27 -21 34 -5 2 +38 0.1 0.3 40 35 4 -35 1 +38 0.7 0.3 24 -22 48 30 2 +38 0.8 0.8 -24 -34 -4 -38 1 +38 0.8 0.7 16 -18 -21 -45 1 +38 0.7 0.3 40 -48 45 -19 1 +38 0.6 0.4 -12 -13 -35 -36 1 +38 0.3 0.4 39 -42 4 0 1 +38 0.9 0.5 -1 -49 -18 -26 1 +38 0.7 0.7 48 -47 36 27 2 +38 0.6 0.9 6 -47 34 -35 2 +38 0.3 0.9 41 18 1 -43 1 +38 0.6 0.1 16 -38 -34 -42 1 +38 0.5 0.9 47 -6 18 -32 2 +38 0.4 0.7 9 1 -12 -38 1 +38 0.9 0.1 40 26 -1 -4 1 +38 0.5 0.1 44 -23 16 -7 1 +38 0.9 0.7 32 -2 -39 -41 1 +38 0.3 0.4 32 22 41 -29 1 +38 0.1 0.4 42 -33 21 -8 2 +38 0.4 0.8 46 -16 27 -22 1 +38 0.8 0.7 -2 -29 21 16 2 +38 0.7 0.2 0 -46 13 7 2 +38 0.9 0.2 50 20 11 -7 1 +38 0.6 0.6 43 -21 41 30 2 +38 0.8 0.1 5 -13 6 -41 1 +38 0.5 0.1 42 -17 -5 -10 1 +38 0.2 0.8 50 13 34 -50 1 +38 0.7 0.1 -7 -47 46 22 2 +38 0.8 0.6 -21 -47 19 5 2 +38 0.4 0.8 13 -14 6 0 2 +38 0.2 0.6 44 18 11 -27 1 +38 0.2 0.7 -28 -41 24 13 2 +38 0.6 0.9 12 -4 -8 -25 1 +38 0.9 0.6 46 -30 5 -4 1 +38 0.2 0.7 6 -2 10 -27 2 +38 0.9 0.5 -8 -35 50 44 2 +38 0.2 0.4 1 -34 22 11 2 +38 0.4 0.7 27 -49 -13 -26 1 +38 0.5 0.5 29 -4 31 -32 1 +38 0.6 0.1 36 25 2 -23 1 +38 0.6 0.7 48 42 36 5 1 +38 0.9 0.8 12 7 -26 -35 1 +38 0.3 0.7 38 10 -5 -43 1 +38 0.3 0.7 -11 -41 41 26 2 +38 0.4 0.6 49 47 30 -40 1 +38 0.6 0.2 -31 -47 42 -48 2 +38 0.2 0.8 42 24 41 4 2 +38 0.2 0.7 43 -4 -10 -40 1 +38 0.6 0.4 -2 -29 5 -40 1 +38 0.6 0.9 11 7 47 -18 2 +38 0.9 0.9 50 -10 24 -34 1 +38 0.7 0.3 49 -7 35 -14 1 +38 0.4 0.3 50 38 7 -16 1 +38 0.2 0.2 -3 -37 39 17 2 +38 0.7 0.2 -24 -41 33 -11 2 +38 0.7 0.6 -12 -23 -18 -46 2 +38 0.9 0.4 45 10 35 -41 1 +38 0.8 0.9 -14 -37 44 -50 2 +38 0.6 0.9 46 -42 16 -1 1 +38 0.4 0.1 6 -28 2 -13 1 +38 0.6 0.3 7 -48 19 -25 2 +38 0.3 0.3 41 18 -14 -25 1 +38 0.5 0.7 -16 -35 41 -30 2 +38 0.8 0.7 27 11 9 7 1 +38 0.7 0.9 24 -19 29 -46 2 +38 0.1 0.1 18 8 0 -28 1 +38 0.7 0.9 -19 -42 -19 -38 2 +38 0.8 0.3 22 -2 -5 -48 1 +38 0.9 0.1 -30 -45 47 -6 2 +38 0.7 0.4 28 24 -3 -12 1 +38 0.9 0.8 26 -2 -28 -29 1 +38 0.3 0.6 41 24 17 -2 1 +38 0.5 0.9 36 -46 17 -46 2 +38 0.3 0.2 12 -47 23 19 2 +38 0.4 0.2 37 -50 49 19 2 +38 0.6 0.7 -6 -9 24 6 2 +38 0.9 0.1 32 -15 -17 -20 1 +38 0.6 0.7 49 16 -9 -37 1 +38 0.7 0.5 -4 -31 50 -25 2 +38 0.4 0.6 43 10 42 40 2 +38 0.9 0.2 -1 -25 44 -36 2 +38 0.1 0.5 -12 -37 0 -8 2 +38 0.8 0.1 41 -48 23 7 1 +38 0.5 0.2 37 -7 -32 -34 1 +38 0.9 0.8 -12 -41 25 12 2 +38 0.4 0.2 21 0 33 -24 1 +38 0.1 0.4 -33 -46 24 -41 2 +38 0.1 0.5 27 26 13 -36 1 +38 0.6 0.5 32 -15 46 -22 1 +38 0.3 0.9 36 -40 26 6 2 +38 0.6 0.5 0 -20 27 -39 2 +38 0.4 0.6 -8 -37 -5 -21 1 +38 0.9 0.8 -3 -47 47 -41 2 +38 0.8 0.9 43 10 29 -19 1 +38 0.7 0.1 40 -8 7 -47 1 +38 0.4 0.3 18 7 44 27 2 +38 0.6 0.2 50 -40 50 34 2 +38 0.4 0.9 22 -8 40 39 2 +38 0.4 0.7 24 4 21 -43 1 +38 0.9 0.3 28 -3 -12 -30 1 +38 0.6 0.1 45 -7 43 -5 1 +38 0.3 0.7 36 -21 -29 -41 1 +38 0.6 0.2 -15 -44 7 -5 2 +38 0.6 0.8 -11 -13 20 -17 2 +38 0.9 0.8 28 -16 -12 -27 1 +38 0.3 0.5 31 26 15 -48 1 +38 0.1 0.1 35 -47 46 -48 1 +38 0.7 0.9 7 -26 2 -43 2 +38 0.2 0.1 4 -45 -20 -37 1 +38 0.3 0.6 -35 -46 32 5 2 +38 0.5 0.1 -37 -41 -20 -40 2 +38 0.2 0.1 26 -36 3 -6 2 +38 0.8 0.6 -13 -19 -15 -34 2 +38 0.7 0.2 -7 -36 15 -38 1 +38 0.2 0.8 -4 -29 41 -13 2 +38 0.8 0.1 16 -15 44 29 2 +38 0.5 0.7 46 7 -15 -19 1 +38 0.5 0.1 30 29 29 9 1 +38 0.2 0.5 44 28 -24 -36 1 +38 0.1 0.5 38 10 18 -49 1 +38 0.9 0.2 -44 -50 41 12 2 +38 0.9 0.7 0 -34 -9 -49 1 +38 0.5 0.7 3 -31 31 -5 2 +38 0.2 0.5 0 -35 37 -12 2 +38 0.4 0.4 49 -20 -20 -46 1 +38 0.1 0.6 -9 -27 -43 -50 2 +38 0.7 0.8 27 -35 35 2 2 +38 0.6 0.4 -48 -49 -11 -14 2 +38 0.2 0.2 33 -9 19 6 2 +38 0.6 0.3 48 33 29 8 1 +38 0.9 0.5 -34 -39 39 3 2 +38 0.6 0.1 45 -7 0 -43 1 +38 0.3 0.2 24 16 -10 -48 1 +38 0.1 0.3 -21 -38 19 -35 2 +38 0.4 0.1 37 -23 23 -29 1 +38 0.8 0.2 -16 -39 49 8 2 +38 0.9 0.2 47 -31 47 40 2 +38 0.1 0.5 -8 -46 41 -45 2 +38 0.1 0.8 46 -4 47 10 2 +38 0.3 0.3 -13 -30 -37 -44 1 +38 0.1 0.8 49 35 -25 -39 1 +38 0.5 0.9 32 31 11 -12 1 +38 0.6 0.4 23 -3 -5 -17 1 +38 0.7 0.6 35 18 1 -29 1 +38 0.9 0.8 6 -22 12 -40 2 +38 0.8 0.3 18 -38 33 -23 1 +38 0.6 0.4 48 -43 0 -2 1 +38 0.6 0.3 -10 -22 40 25 2 +38 0.2 0.5 -3 -48 50 -3 2 +38 0.4 0.3 27 2 26 6 2 +38 0.6 0.4 10 -26 45 39 2 +38 0.1 0.3 46 -29 34 8 2 +38 0.8 0.2 42 19 13 -50 1 +38 0.5 0.3 25 -17 -22 -40 1 +38 0.6 0.1 24 -17 15 -18 1 +38 0.9 0.6 19 -11 26 -27 1 +38 0.5 0.9 -2 -25 22 8 2 +38 0.6 0.9 35 16 20 -42 1 +38 0.9 0.1 20 -12 -9 -28 1 +38 0.8 0.1 -25 -31 29 -31 1 +38 0.4 0.2 15 -42 2 -19 1 +38 0.3 0.5 -31 -48 -2 -30 2 +38 0.2 0.7 -30 -44 19 18 2 +38 0.9 0.2 3 -17 33 -42 1 +38 0.7 0.8 37 -43 -26 -28 1 +38 0.6 0.3 23 -44 8 -6 1 +38 0.4 0.4 -11 -36 42 -42 2 +38 0.6 0.5 36 24 20 -38 1 +38 0.2 0.4 4 -5 17 -45 1 +38 0.2 0.6 29 -33 48 -12 2 +38 0.3 0.3 -21 -34 -10 -13 2 +38 0.1 0.4 48 -27 31 -30 2 +38 0.9 0.7 21 10 -8 -16 1 +38 0.4 0.8 23 11 1 -11 1 +38 0.6 0.7 -10 -23 28 -1 2 +38 0.8 0.2 -8 -9 -6 -25 1 +38 0.8 0.1 6 -21 -10 -21 1 +38 0.6 0.1 9 -35 7 -36 1 +38 0.4 0.7 12 -19 32 18 2 +38 0.4 0.6 -9 -36 0 -32 2 +38 0.6 0.6 12 -45 7 -31 2 +38 0.8 0.5 -17 -20 -5 -7 1 +38 0.3 0.3 11 -32 29 -49 1 +38 0.1 0.2 -44 -47 11 -39 2 +38 0.4 0.5 45 39 44 -16 1 +38 0.3 0.8 32 -50 48 13 2 +38 0.6 0.8 7 -24 48 -3 2 +39 0.2 0.1 48 24 7 -47 1 +39 0.7 0.8 44 -29 17 -50 1 +39 0.4 0.6 14 6 13 -42 1 +39 0.3 0.7 38 -27 -7 -39 2 +39 0.5 0.8 48 20 46 33 1 +39 0.4 0.3 32 -36 3 -39 1 +39 0.7 0.7 -22 -44 50 -26 2 +39 0.6 0.5 42 3 43 15 2 +39 0.4 0.1 13 -33 41 -47 2 +39 0.6 0.1 27 -23 28 11 2 +39 0.6 0.1 -13 -48 13 -8 1 +39 0.2 0.2 36 -33 3 -33 2 +39 0.4 0.4 21 -13 16 8 2 +39 0.3 0.2 1 -23 -27 -37 1 +39 0.4 0.7 -18 -28 -5 -23 1 +39 0.5 0.5 47 -20 -9 -22 1 +39 0.6 0.2 12 0 30 -47 1 +39 0.5 0.1 21 -5 40 -2 1 +39 0.6 0.8 13 -2 -23 -42 1 +39 0.3 0.6 42 31 12 -38 1 +39 0.6 0.8 -11 -29 19 -41 2 +39 0.3 0.7 6 -21 13 -31 2 +39 0.4 0.6 13 -30 29 17 2 +39 0.1 0.4 -44 -45 44 20 2 +39 0.6 0.7 24 -14 36 31 2 +39 0.5 0.7 -10 -18 25 -42 2 +39 0.2 0.9 23 -15 12 -7 2 +39 0.5 0.4 -5 -43 -16 -26 2 +39 0.9 0.4 19 9 24 -19 1 +39 0.5 0.7 -3 -46 48 16 2 +39 0.9 0.1 31 22 25 -1 1 +39 0.9 0.7 -39 -49 32 4 2 +39 0.3 0.2 25 13 -3 -14 1 +39 0.1 0.5 2 0 41 -27 2 +39 0.3 0.9 38 32 -6 -39 1 +39 0.1 0.3 -17 -19 30 -7 1 +39 0.4 0.1 -41 -46 -1 -31 2 +39 0.5 0.8 17 -30 3 -5 2 +39 0.7 0.2 -9 -25 49 15 2 +39 0.7 0.3 6 -9 47 -36 1 +39 0.7 0.2 45 -45 6 -21 1 +39 0.9 0.7 30 8 47 -43 1 +39 0.7 0.5 45 9 13 12 1 +39 0.8 0.5 29 4 50 -39 1 +39 0.4 0.8 18 -14 17 5 1 +39 0.1 0.3 -27 -45 18 -34 1 +39 0.7 0.1 6 2 17 -22 1 +39 0.2 0.5 31 28 48 9 1 +39 0.7 0.6 32 -22 15 -25 2 +39 0.3 0.9 -15 -37 27 19 2 +39 0.2 0.9 -17 -29 48 -11 2 +39 0.7 0.1 27 -26 28 -48 2 +39 0.8 0.1 -22 -23 16 -45 2 +39 0.5 0.8 50 38 49 -34 1 +39 0.1 0.8 -12 -24 33 -21 2 +39 0.6 0.8 15 -12 15 -45 1 +39 0.7 0.4 -4 -42 -23 -25 2 +39 0.5 0.4 23 -37 39 -4 2 +39 0.1 0.6 17 14 38 -7 2 +39 0.8 0.1 0 -4 29 11 2 +39 0.3 0.8 43 -38 -31 -42 1 +39 0.3 0.7 3 -22 46 10 2 +39 0.8 0.7 15 -8 28 -43 1 +39 0.7 0.1 48 -48 50 -11 1 +39 0.2 0.6 20 -15 12 -40 2 +39 0.8 0.1 42 41 -13 -41 1 +39 0.9 0.2 16 -15 -17 -24 2 +39 0.7 0.5 41 -19 44 -20 1 +39 0.7 0.1 -5 -49 40 24 2 +39 0.1 0.9 28 27 0 -2 1 +39 0.3 0.4 2 -47 35 -39 1 +39 0.3 0.7 -27 -50 -20 -22 1 +39 0.3 0.3 20 14 49 12 2 +39 0.1 0.8 24 -19 25 -17 2 +39 0.7 0.9 38 -30 28 -14 2 +39 0.9 0.5 14 -11 11 -1 1 +39 0.7 0.1 -25 -43 -23 -37 2 +39 0.8 0.3 13 10 -14 -18 1 +39 0.7 0.8 26 15 -33 -42 1 +39 0.4 0.9 46 41 45 -22 1 +39 0.8 0.1 25 -49 36 -42 1 +39 0.9 0.3 -4 -44 -3 -48 2 +39 0.9 0.1 40 -29 10 3 1 +39 0.4 0.1 20 -49 4 -16 1 +39 0.9 0.9 1 -36 29 -6 2 +39 0.1 0.6 22 -14 44 -7 2 +39 0.6 0.4 4 -21 11 -37 2 +39 0.9 0.2 36 -12 -16 -21 1 +39 0.6 0.4 -36 -50 11 -17 2 +39 0.5 0.7 24 -7 25 -24 2 +39 0.7 0.3 -12 -37 39 -17 2 +39 0.2 0.9 2 -7 45 -13 2 +39 0.8 0.8 38 -18 33 -32 2 +39 0.7 0.4 -8 -48 -2 -11 2 +39 0.8 0.5 17 3 25 5 1 +39 0.2 0.8 33 5 37 -41 1 +39 0.7 0.6 19 5 14 -45 1 +39 0.4 0.3 50 -42 -19 -26 1 +39 0.9 0.4 -25 -28 30 -16 2 +39 0.3 0.1 4 -8 46 42 2 +39 0.8 0.9 46 44 38 36 2 +39 0.4 0.1 28 -19 25 -35 1 +39 0.5 0.6 50 30 -42 -48 1 +39 0.1 0.6 15 -3 2 -37 1 +39 0.5 0.9 43 -6 -24 -42 2 +39 0.2 0.5 29 -44 19 -6 2 +39 0.4 0.5 43 -35 -18 -34 1 +39 0.4 0.6 12 -18 -23 -42 1 +39 0.9 0.7 38 -11 -11 -47 1 +39 0.8 0.2 47 36 42 -34 1 +39 0.4 0.7 31 -50 9 6 2 +39 0.5 0.4 14 1 19 16 2 +39 0.2 0.8 -15 -37 39 -24 2 +39 0.9 0.4 1 -3 42 -3 1 +39 0.8 0.2 15 -20 -6 -19 1 +39 0.2 0.5 22 4 24 -17 2 +39 0.5 0.1 35 -45 -24 -33 1 +39 0.2 0.4 31 -27 41 -5 2 +39 0.2 0.8 50 -18 0 -11 2 +39 0.8 0.3 37 26 27 -15 1 +39 0.8 0.7 35 -15 50 42 2 +39 0.6 0.9 47 -29 42 -16 2 +39 0.1 0.8 -30 -38 -8 -32 1 +39 0.6 0.4 -1 -45 -1 -22 1 +39 0.9 0.5 31 -13 45 -8 1 +39 0.3 0.3 -3 -27 -12 -40 2 +39 0.9 0.4 -18 -31 35 -11 2 +39 0.8 0.3 36 6 -36 -49 1 +39 0.2 0.1 5 -40 -16 -29 1 +39 0.5 0.3 26 -9 37 -10 2 +39 0.9 0.2 9 -29 22 18 2 +39 0.5 0.8 -27 -49 6 -22 1 +39 0.6 0.2 -8 -50 -16 -50 1 +39 0.9 0.8 -19 -33 -1 -4 2 +39 0.2 0.1 3 -4 48 -38 2 +39 0.5 0.1 42 36 40 -43 1 +39 0.4 0.7 -13 -48 16 11 2 +39 0.9 0.9 11 -21 42 -4 2 +39 0.7 0.2 9 -39 33 -33 1 +39 0.7 0.9 21 4 8 0 2 +39 0.4 0.8 -32 -42 45 -17 2 +39 0.4 0.6 34 -20 30 -44 2 +39 0.5 0.8 29 -24 48 23 2 +39 0.3 0.8 27 6 -8 -12 1 +39 0.7 0.7 -16 -29 -34 -49 1 +39 0.3 0.8 38 -34 -17 -24 1 +39 0.5 0.1 -39 -45 48 -23 2 +39 0.5 0.7 -13 -24 -21 -34 2 +39 0.6 0.3 3 1 21 -34 2 +39 0.8 0.2 4 -8 -3 -16 1 +39 0.8 0.1 48 -12 21 -50 2 +39 0.5 0.2 -9 -25 50 -6 1 +39 0.9 0.8 -24 -36 21 -28 2 +39 0.5 0.9 3 -50 47 -1 2 +39 0.8 0.9 50 -31 14 -33 1 +39 0.6 0.4 -17 -27 49 -21 2 +39 0.7 0.9 -33 -48 -6 -43 2 +39 0.8 0.9 46 -37 -28 -50 1 +39 0.2 0.1 9 0 30 3 2 +39 0.2 0.7 3 -50 50 37 2 +39 0.6 0.9 43 10 1 -21 1 +39 0.2 0.7 -11 -14 35 -45 2 +39 0.3 0.7 -7 -34 36 23 2 +39 0.3 0.8 13 -14 42 -39 2 +39 0.4 0.1 -24 -37 29 -28 1 +39 0.8 0.8 -29 -30 22 11 2 +39 0.8 0.4 31 -36 49 25 2 +39 0.8 0.6 35 22 -1 -12 1 +39 0.3 0.9 33 -23 -25 -37 2 +39 0.9 0.8 37 -14 6 -15 1 +39 0.9 0.2 -12 -47 33 -17 2 +39 0.4 0.2 35 -25 47 2 1 +39 0.4 0.4 -19 -36 17 -18 2 +39 0.1 0.2 30 -45 30 17 2 +39 0.1 0.5 45 29 18 -48 2 +39 0.6 0.1 31 -23 39 -40 1 +39 0.2 0.1 30 27 11 -22 1 +39 0.8 0.1 -1 -33 32 20 2 +39 0.6 0.7 2 -39 38 -5 2 +39 0.3 0.9 -1 -32 10 8 2 +39 0.5 0.2 30 -47 43 -40 1 +39 0.8 0.9 -26 -44 -3 -41 2 +39 0.1 0.3 49 1 41 -27 1 +39 0.1 0.8 1 -41 40 22 2 +39 0.5 0.1 16 -8 19 -11 1 +39 0.1 0.2 4 -12 -21 -24 2 +39 0.4 0.4 50 -21 -3 -42 1 +39 0.6 0.7 6 -16 32 -38 2 +39 0.3 0.3 29 -3 44 1 2 +39 0.5 0.6 32 -30 37 -16 2 +39 0.5 0.9 28 12 14 -20 2 +39 0.3 0.6 27 13 50 -40 1 +39 0.6 0.1 22 6 40 -15 1 +39 0.2 0.8 0 -49 45 -18 2 +39 0.9 0.2 50 -24 48 20 2 +39 0.9 0.5 35 -6 48 -44 1 +39 0.6 0.3 48 22 40 -4 1 +39 0.9 0.7 -40 -42 -22 -31 2 +39 0.3 0.7 45 35 7 -27 2 +39 0.8 0.5 14 -6 12 -20 1 +39 0.5 0.3 5 -26 50 25 2 +39 0.2 0.8 -19 -40 29 9 2 +39 0.4 0.3 40 8 -16 -42 1 +39 0.8 0.9 -25 -40 -19 -38 2 +39 0.3 0.7 50 19 29 -17 1 +39 0.1 0.7 22 8 6 -13 1 +39 0.9 0.4 -21 -33 42 3 2 +39 0.1 0.3 26 -33 13 -31 2 +39 0.4 0.4 42 -47 14 -9 1 +39 0.3 0.2 0 -12 2 -7 2 +39 0.3 0.9 -3 -26 41 11 2 +39 0.1 0.6 23 1 31 22 2 +39 0.9 0.1 -11 -33 44 20 2 +39 0.4 0.9 47 32 -2 -8 1 +39 0.2 0.8 44 37 26 -30 2 +39 0.7 0.3 37 -13 30 11 2 +39 0.8 0.8 25 -22 27 -9 1 +39 0.5 0.4 48 -10 24 17 1 +39 0.8 0.8 31 -23 -8 -26 1 +39 0.7 0.4 24 -47 36 35 2 +39 0.3 0.6 39 5 1 -27 1 +39 0.8 0.9 27 4 45 29 2 +39 0.5 0.8 31 6 33 -18 1 +39 0.6 0.4 40 -16 23 -25 1 +39 0.1 0.2 22 -40 27 -34 2 +39 0.4 0.8 49 9 -6 -20 1 +39 0.2 0.7 -10 -20 -7 -38 2 +39 0.8 0.3 -9 -13 50 -4 2 +39 0.6 0.7 -25 -28 -6 -45 1 +39 0.5 0.3 -20 -26 -5 -31 2 +39 0.8 0.4 25 8 -30 -33 1 +39 0.7 0.7 30 -2 26 -18 2 +39 0.7 0.3 27 21 -15 -37 1 +39 0.6 0.1 40 -2 -10 -15 1 +39 0.6 0.4 7 -34 43 -10 2 +39 0.5 0.1 47 -21 44 18 1 +39 0.2 0.9 20 0 18 11 2 +39 0.2 0.9 9 -48 48 30 2 +39 0.7 0.9 -13 -15 40 27 2 +39 0.5 0.7 -18 -45 -11 -26 2 +39 0.6 0.4 -14 -49 -8 -11 2 +39 0.5 0.2 49 47 48 -10 1 +39 0.9 0.8 20 -46 3 -43 1 +39 0.1 0.7 -17 -48 50 17 2 +39 0.4 0.8 -35 -41 39 -43 2 +39 0.6 0.5 43 17 -42 -50 1 +39 0.1 0.1 14 -39 48 -22 1 +39 0.5 0.1 40 -4 -7 -12 1 +39 0.6 0.7 48 -37 15 -24 1 +39 0.6 0.6 23 -42 14 -32 1 +39 0.4 0.4 8 -40 37 25 2 +39 0.6 0.9 31 -1 33 -9 2 +39 0.7 0.2 -29 -49 -14 -38 1 +39 0.9 0.6 38 -36 12 -11 1 +39 0.4 0.4 -4 -49 30 -32 2 +39 0.7 0.3 34 -8 43 0 2 +39 0.7 0.8 25 15 -28 -48 1 +39 0.7 0.4 -33 -50 12 -47 1 +39 0.8 0.2 41 1 -9 -10 1 +39 0.8 0.8 48 35 49 -46 1 +39 0.5 0.6 24 23 24 22 2 +39 0.7 0.6 1 -6 5 -14 1 +39 0.1 0.9 30 26 -19 -50 1 +39 0.1 0.1 6 5 2 -29 1 +39 0.6 0.8 30 -17 38 12 2 +39 0.8 0.7 18 -30 4 -24 1 +39 0.4 0.8 48 30 20 -43 2 +39 0.5 0.3 39 -41 2 -32 1 +39 0.7 0.5 6 -42 7 -20 2 +39 0.6 0.5 1 -28 37 28 2 +39 0.1 0.9 9 -13 44 38 2 +39 0.6 0.7 41 11 13 -27 1 +39 0.7 0.2 -24 -47 11 1 2 +39 0.8 0.6 0 -12 15 -6 2 +39 0.3 0.7 18 -44 35 33 2 +39 0.6 0.7 0 -35 -24 -42 1 +39 0.7 0.9 13 -4 13 -9 2 +39 0.6 0.9 42 -34 -18 -21 1 +39 0.8 0.2 43 -25 -22 -27 1 +39 0.3 0.3 -3 -15 -25 -49 1 +39 0.5 0.2 39 -40 -24 -47 1 +39 0.2 0.2 -13 -19 10 -36 1 +39 0.3 0.5 23 2 -25 -26 1 +39 0.4 0.6 46 44 38 -14 1 +39 0.7 0.8 19 6 39 37 2 +39 0.3 0.6 -13 -41 24 -35 2 +39 0.6 0.9 2 -43 5 4 1 +39 0.1 0.6 -9 -28 -33 -50 2 +39 0.8 0.1 -34 -49 13 -46 1 +39 0.9 0.8 -6 -14 41 3 2 +39 0.7 0.5 -7 -33 20 -7 2 +39 0.4 0.8 24 -31 24 -4 2 +39 0.1 0.3 42 -28 25 12 2 +39 0.9 0.4 44 2 -44 -46 1 +39 0.9 0.4 23 2 18 -8 2 +39 0.1 0.8 -14 -23 -12 -50 1 +39 0.8 0.9 25 17 0 -4 1 +39 0.6 0.7 42 -35 30 -24 2 +39 0.7 0.1 39 9 48 20 2 +39 0.3 0.9 13 -32 23 -23 2 +40 0.3 0.1 -1 -9 -1 -8 2 +40 0.7 0.5 40 1 30 -15 1 +40 0.7 0.7 27 16 47 8 2 +40 0.1 0.1 28 13 40 -37 1 +40 0.8 0.2 31 -5 10 -13 1 +40 0.8 0.7 -34 -42 50 -23 2 +40 0.1 0.6 -12 -48 20 11 2 +40 0.8 0.2 36 -14 -18 -37 1 +40 0.4 0.7 2 -9 -32 -47 1 +40 0.7 0.3 42 -5 18 -3 1 +40 0.8 0.4 17 6 3 -50 1 +40 0.7 0.4 38 20 42 1 1 +40 0.7 0.5 20 -11 46 25 2 +40 0.8 0.3 -2 -5 49 -39 2 +40 0.1 0.5 19 -13 -20 -32 1 +40 0.9 0.8 11 -12 4 -5 1 +40 0.9 0.3 6 -1 50 16 2 +40 0.5 0.1 9 -24 26 -19 1 +40 0.7 0.1 40 -29 -9 -15 1 +40 0.8 0.9 -34 -35 29 -43 2 +40 0.1 0.9 3 -10 37 -47 2 +40 0.6 0.8 5 -49 17 -27 2 +40 0.1 0.5 25 -42 -3 -28 1 +40 0.2 0.4 4 -49 16 -50 2 +40 0.5 0.7 -5 -19 37 -48 2 +40 0.7 0.4 5 -18 37 8 2 +40 0.3 0.5 16 -48 -40 -42 1 +40 0.1 0.1 49 -50 2 -45 1 +40 0.8 0.7 11 -23 4 -17 1 +40 0.4 0.5 -33 -44 -9 -17 2 +40 0.1 0.7 -3 -22 -26 -44 1 +40 0.6 0.1 50 41 -6 -36 1 +40 0.2 0.5 29 -25 34 -31 2 +40 0.1 0.6 22 -41 35 -46 2 +40 0.1 0.8 -3 -4 -22 -49 1 +40 0.3 0.5 7 -12 -33 -37 1 +40 0.3 0.1 24 17 46 40 2 +40 0.3 0.9 33 -7 6 -39 1 +40 0.5 0.2 37 -14 44 15 2 +40 0.6 0.4 38 -6 49 -21 1 +40 0.2 0.4 -18 -22 -13 -29 2 +40 0.3 0.7 42 -24 -6 -24 1 +40 0.9 0.1 -9 -44 44 13 2 +40 0.9 0.3 21 -47 21 -10 1 +40 0.6 0.3 46 -6 14 6 2 +40 0.1 0.5 12 6 16 -1 2 +40 0.5 0.2 2 -34 36 13 2 +40 0.5 0.7 21 -31 -14 -22 1 +40 0.5 0.9 10 3 32 -37 2 +40 0.5 0.6 44 -22 -10 -38 1 +40 0.5 0.6 37 -26 29 -20 2 +40 0.7 0.6 27 -28 -17 -29 1 +40 0.6 0.5 4 -20 30 0 2 +40 0.6 0.2 39 1 38 12 2 +40 0.6 0.2 20 -27 -29 -37 1 +40 0.6 0.2 24 -5 -21 -39 1 +40 0.7 0.6 0 -25 10 3 2 +40 0.3 0.3 -18 -46 42 -10 2 +40 0.6 0.2 31 19 -34 -42 1 +40 0.3 0.2 2 -38 45 12 2 +40 0.1 0.3 44 -6 -20 -23 1 +40 0.1 0.2 23 -23 -30 -33 1 +40 0.1 0.1 32 -42 32 10 2 +40 0.9 0.7 46 22 22 18 1 +40 0.8 0.4 44 -6 1 -31 1 +40 0.5 0.2 -19 -21 -15 -29 1 +40 0.8 0.6 11 -38 -23 -36 1 +40 0.3 0.7 13 -31 28 -14 2 +40 0.2 0.7 49 17 30 -25 1 +40 0.3 0.9 30 -38 -3 -37 2 +40 0.9 0.5 -24 -30 32 -18 2 +40 0.6 0.1 -16 -21 -13 -31 1 +40 0.2 0.6 21 -38 14 -19 2 +40 0.9 0.2 -10 -21 45 -2 2 +40 0.5 0.2 47 6 -9 -34 1 +40 0.4 0.7 38 18 19 -3 1 +40 0.2 0.9 2 -47 12 -18 2 +40 0.5 0.2 50 16 -33 -38 1 +40 0.5 0.1 10 -8 6 -10 1 +40 0.5 0.5 27 11 6 -49 1 +40 0.3 0.3 38 -20 49 3 2 +40 0.1 0.4 13 -1 20 -33 1 +40 0.8 0.1 47 -13 43 39 2 +40 0.7 0.1 -12 -41 -19 -42 1 +40 0.4 0.3 -6 -48 42 -36 2 +40 0.9 0.5 19 -36 43 -9 1 +40 0.1 0.6 23 -2 -22 -33 1 +40 0.1 0.8 30 -14 5 -1 2 +40 0.1 0.5 7 -10 24 3 2 +40 0.6 0.6 48 -22 43 -23 2 +40 0.7 0.9 16 -39 33 5 2 +40 0.1 0.2 5 -5 -14 -24 1 +40 0.7 0.7 -31 -39 2 -47 2 +40 0.1 0.7 -33 -44 38 4 2 +40 0.3 0.8 -1 -15 24 -7 2 +40 0.8 0.9 6 -44 38 -36 2 +40 0.5 0.4 -6 -26 -34 -38 2 +40 0.3 0.7 18 9 40 2 2 +40 0.4 0.9 -46 -47 47 -24 2 +40 0.5 0.4 47 -18 17 -1 1 +40 0.3 0.3 -7 -32 -8 -39 1 +40 0.4 0.5 42 -3 -12 -37 1 +40 0.5 0.3 34 23 -18 -35 1 +40 0.2 0.9 -17 -41 17 0 2 +40 0.3 0.5 34 -27 32 -36 2 +40 0.9 0.4 48 12 41 37 1 +40 0.8 0.8 30 19 27 -41 1 +40 0.5 0.6 -24 -38 38 34 2 +40 0.8 0.6 48 -38 5 -48 1 +40 0.9 0.9 -15 -23 32 23 2 +40 0.3 0.3 -9 -43 39 -27 2 +40 0.6 0.4 15 -23 24 -50 1 +40 0.9 0.1 14 -28 21 -22 1 +40 0.5 0.7 5 -43 -9 -26 1 +40 0.8 0.7 36 -5 -17 -36 1 +40 0.2 0.1 37 -20 -21 -49 1 +40 0.5 0.9 6 -24 14 -40 2 +40 0.6 0.3 9 5 36 -26 1 +40 0.7 0.3 45 -34 -25 -37 1 +40 0.9 0.6 -33 -34 49 17 2 +40 0.1 0.4 -21 -42 47 -36 2 +40 0.9 0.2 4 -42 -25 -35 1 +40 0.6 0.5 32 -4 47 3 2 +40 0.3 0.7 10 -46 14 -22 2 +40 0.2 0.9 49 11 30 -37 1 +40 0.8 0.8 27 12 42 -2 1 +40 0.1 0.2 27 -12 15 -39 1 +40 0.3 0.7 30 6 50 14 2 +40 0.3 0.3 -4 -46 17 10 2 +40 0.9 0.3 31 -30 35 -21 1 +40 0.8 0.4 45 -48 -14 -37 1 +40 0.8 0.2 5 -13 -28 -29 1 +40 0.3 0.8 22 -19 -4 -26 1 +40 0.9 0.7 40 -43 -2 -5 1 +40 0.5 0.6 49 -34 -24 -26 1 +40 0.8 0.6 -35 -39 29 -36 2 +40 0.1 0.1 50 -50 46 -9 2 +40 0.3 0.8 13 -10 -10 -33 1 +40 0.5 0.6 33 -34 31 19 2 +40 0.1 0.6 35 -13 -41 -49 1 +40 0.8 0.6 33 1 41 18 2 +40 0.2 0.2 49 31 13 -16 1 +40 0.5 0.8 0 -43 10 3 2 +40 0.6 0.5 35 -19 7 -22 1 +40 0.4 0.2 3 -20 41 31 2 +40 0.1 0.3 -3 -10 34 -35 2 +40 0.7 0.5 15 -35 47 -48 2 +40 0.6 0.6 19 -21 21 0 2 +40 0.5 0.4 15 -20 13 -42 1 +40 0.3 0.6 26 17 -11 -37 1 +40 0.8 0.3 -1 -46 9 -26 1 +40 0.1 0.1 46 -2 16 -30 1 +40 0.4 0.7 10 -28 46 3 2 +40 0.3 0.4 -38 -39 10 3 2 +40 0.6 0.9 10 -33 -42 -44 1 +40 0.3 0.9 3 -46 -40 -42 1 +40 0.8 0.2 2 -33 -7 -44 1 +40 0.2 0.8 15 -39 4 -22 2 +40 0.3 0.1 23 3 -10 -34 1 +40 0.1 0.6 49 -14 24 5 2 +40 0.9 0.5 46 29 -7 -30 1 +40 0.8 0.6 -30 -39 -5 -19 2 +40 0.8 0.5 41 -5 -9 -17 1 +40 0.1 0.9 17 -16 24 16 2 +40 0.5 0.7 -38 -39 11 -24 2 +40 0.6 0.9 33 -43 42 -42 2 +40 0.1 0.4 -28 -42 44 -6 2 +40 0.2 0.4 -5 -22 -27 -43 1 +40 0.7 0.8 39 -4 -9 -20 1 +40 0.4 0.7 33 -21 25 21 2 +40 0.8 0.9 30 15 44 -15 2 +40 0.5 0.1 13 7 16 -15 1 +40 0.8 0.8 8 -40 32 5 2 +40 0.2 0.3 -40 -47 37 22 2 +40 0.4 0.2 -11 -34 1 -12 2 +40 0.3 0.6 42 15 44 7 2 +40 0.3 0.5 -15 -34 32 -43 2 +40 0.6 0.7 -16 -29 23 18 2 +40 0.6 0.9 -6 -31 -16 -29 2 +40 0.6 0.5 10 7 19 -13 1 +40 0.1 0.4 31 -30 -22 -28 1 +40 0.2 0.1 28 10 -9 -41 1 +40 0.6 0.4 -24 -33 50 10 2 +40 0.2 0.1 37 15 38 -50 1 +40 0.3 0.1 -8 -29 50 -41 1 +40 0.1 0.9 41 5 -35 -49 1 +40 0.6 0.9 -2 -27 42 -19 2 +40 0.4 0.4 38 -38 -26 -43 1 +40 0.5 0.2 9 -12 47 -46 1 +40 0.7 0.9 46 -45 -6 -16 1 +40 0.8 0.8 33 16 20 -5 1 +40 0.1 0.8 46 1 -9 -50 1 +40 0.3 0.1 49 -4 -19 -38 1 +40 0.4 0.3 7 -20 38 -45 1 +40 0.2 0.4 34 24 17 -47 1 +40 0.9 0.3 16 -8 -24 -49 1 +40 0.3 0.9 40 32 -2 -9 1 +40 0.4 0.1 -24 -37 43 -48 2 +40 0.3 0.5 5 -1 -32 -44 1 +40 0.3 0.8 35 33 19 12 1 +40 0.4 0.4 46 -20 25 -44 1 +40 0.2 0.2 -31 -37 15 6 2 +40 0.4 0.6 4 -15 -30 -42 1 +40 0.9 0.6 -34 -44 -34 -50 2 +40 0.2 0.2 49 31 -21 -27 1 +40 0.4 0.4 2 -24 48 -30 2 +40 0.9 0.5 33 32 49 11 1 +40 0.5 0.9 19 -19 46 31 2 +40 0.4 0.2 33 30 -26 -27 1 +40 0.2 0.2 10 -39 47 5 2 +40 0.6 0.2 50 -9 49 2 1 +40 0.8 0.9 26 0 11 -17 1 +40 0.4 0.2 25 -46 34 33 2 +40 0.6 0.7 -21 -23 43 30 2 +40 0.3 0.5 -22 -36 27 7 2 +40 0.2 0.9 -9 -19 1 -41 2 +40 0.9 0.4 21 10 -35 -44 1 +40 0.3 0.3 19 0 25 -37 1 +40 0.7 0.6 21 1 10 -21 1 +40 0.7 0.2 11 1 -10 -41 1 +40 0.7 0.4 22 -1 36 -39 1 +40 0.6 0.5 -2 -19 37 -40 2 +40 0.9 0.6 11 -20 42 -3 2 +40 0.7 0.5 42 -45 28 -35 1 +40 0.3 0.6 8 -12 37 -47 2 +40 0.5 0.7 43 23 -2 -18 1 +40 0.5 0.3 -34 -50 -42 -44 2 +40 0.2 0.6 -3 -26 -15 -22 1 +40 0.3 0.8 31 -28 35 -46 2 +40 0.5 0.5 32 -11 -8 -25 1 +40 0.5 0.4 44 -3 23 -21 1 +40 0.3 0.9 -23 -40 28 7 2 +40 0.9 0.6 23 -48 16 -9 1 +40 0.2 0.8 36 0 28 3 2 +40 0.6 0.1 -6 -39 49 30 2 +40 0.7 0.9 32 9 44 -3 2 +40 0.6 0.6 49 -13 -17 -21 1 +40 0.7 0.5 16 -32 11 0 1 +40 0.6 0.6 21 -26 44 24 2 +40 0.2 0.2 34 -47 19 11 2 +40 0.4 0.5 48 -25 -17 -42 1 +40 0.2 0.8 29 -17 44 -20 2 +40 0.7 0.8 11 -5 19 14 2 +40 0.2 0.7 -20 -44 6 -8 2 +40 0.4 0.2 24 22 -25 -41 1 +40 0.6 0.6 -15 -37 1 -1 2 +40 0.2 0.2 14 -1 22 -29 1 +40 0.1 0.7 32 5 28 -19 2 +40 0.8 0.9 -36 -37 -7 -31 2 +40 0.9 0.5 12 -26 19 -12 2 +40 0.2 0.9 4 -49 49 -15 2 +40 0.8 0.8 24 -34 22 18 2 +40 0.6 0.7 31 7 -20 -29 1 +40 0.5 0.8 -10 -40 -17 -45 2 +40 0.8 0.4 34 -39 -18 -21 1 +40 0.5 0.3 30 19 6 -13 1 +40 0.4 0.4 18 8 -7 -48 1 +40 0.2 0.9 -30 -49 21 16 2 +40 0.8 0.6 -12 -21 12 -42 2 +40 0.5 0.3 -22 -41 -8 -35 2 +40 0.2 0.6 -7 -35 3 -11 2 +40 0.7 0.4 46 -25 0 -43 1 +40 0.9 0.3 1 -1 48 -49 1 +40 0.4 0.9 43 -10 -23 -49 1 +40 0.3 0.2 2 -26 47 -11 2 +40 0.4 0.9 -19 -38 30 -21 2 +40 0.3 0.1 37 6 47 18 2 +40 0.9 0.2 3 -12 -23 -24 1 +40 0.8 0.3 50 45 46 16 1 +40 0.1 0.2 47 35 10 -19 1 +40 0.9 0.5 48 -28 49 27 2 +40 0.3 0.4 28 -28 48 28 2 +40 0.9 0.3 32 20 37 -37 1 +40 0.4 0.1 -16 -18 31 -6 2 +40 0.8 0.9 -8 -50 14 -20 2 +40 0.2 0.3 49 -2 38 -45 1 +40 0.4 0.7 36 33 -30 -31 1 +40 0.7 0.1 20 -7 -15 -41 1 +40 0.1 0.4 9 -43 10 -24 2 +40 0.3 0.2 31 -6 36 -22 1 +40 0.9 0.8 -18 -25 37 -37 2 +40 0.9 0.3 47 3 -4 -6 1 +40 0.4 0.6 38 9 29 10 1 +40 0.5 0.4 43 -24 39 -35 1 +40 0.6 0.2 29 -43 26 -44 1 +40 0.7 0.4 1 -34 37 22 2 +40 0.5 0.7 38 26 32 9 1 +40 0.2 0.9 22 -48 36 -48 2 +40 0.2 0.1 12 6 50 42 2 +40 0.2 0.9 29 -24 45 16 2 +40 0.5 0.9 -26 -39 -10 -23 2 +40 0.6 0.7 8 -37 46 13 2 +40 0.8 0.5 36 -25 32 -36 2 +40 0.2 0.7 29 12 -3 -41 1 +40 0.4 0.6 50 -47 34 25 2 +40 0.4 0.3 0 -39 24 15 2 +40 0.8 0.3 15 -46 27 -4 1 +40 0.1 0.9 46 25 39 -25 1 +40 0.6 0.2 20 4 30 23 2 +40 0.9 0.8 39 33 49 -1 1 +41 0.8 0.3 49 18 49 -9 1 +41 0.2 0.1 -49 -50 -2 -24 2 +41 0.8 0.7 3 -20 18 4 2 +41 0.8 0.5 31 -7 -18 -43 1 +41 0.7 0.6 35 12 32 1 1 +41 0.1 0.9 44 -41 33 -11 2 +41 0.2 0.2 -6 -24 26 -45 2 +41 0.9 0.3 11 -40 16 -15 1 +41 0.9 0.8 41 -46 9 2 1 +41 0.1 0.7 44 -41 -23 -49 1 +41 0.6 0.1 50 11 44 40 2 +41 0.9 0.6 1 -38 -24 -36 1 +41 0.8 0.8 -20 -45 50 19 2 +41 0.5 0.4 21 17 33 18 2 +41 0.7 0.5 -15 -24 48 0 2 +41 0.9 0.5 -6 -34 8 -20 2 +41 0.3 0.5 38 -24 26 6 2 +41 0.9 0.2 1 -29 -21 -22 1 +41 0.2 0.4 42 16 14 1 1 +41 0.8 0.8 -9 -26 43 24 2 +41 0.9 0.1 49 -8 29 6 1 +41 0.5 0.7 27 -6 7 -41 2 +41 0.9 0.6 -40 -43 22 -2 2 +41 0.1 0.6 44 6 26 -36 1 +41 0.6 0.8 -4 -14 34 -26 2 +41 0.7 0.9 17 14 -3 -10 1 +41 0.5 0.9 46 -25 35 -9 2 +41 0.1 0.8 -27 -43 48 -47 2 +41 0.7 0.7 23 -28 0 -18 1 +41 0.2 0.7 44 -5 16 -22 2 +41 0.1 0.4 -12 -27 37 -23 2 +41 0.5 0.5 16 -49 28 -48 1 +41 0.5 0.2 41 25 21 -6 1 +41 0.3 0.8 50 41 -17 -41 1 +41 0.3 0.4 27 -3 43 38 2 +41 0.8 0.4 -25 -43 23 -35 2 +41 0.4 0.1 25 13 45 -29 1 +41 0.6 0.5 44 21 30 10 1 +41 0.7 0.7 0 -37 48 43 2 +41 0.3 0.9 -14 -22 36 -12 2 +41 0.5 0.6 45 10 -33 -47 1 +41 0.3 0.5 39 -22 -14 -17 1 +41 0.6 0.1 19 13 -15 -25 1 +41 0.6 0.3 7 -20 12 -25 1 +41 0.9 0.9 17 -26 -7 -27 1 +41 0.8 0.4 17 -30 48 44 2 +41 0.6 0.7 -22 -46 26 -25 2 +41 0.8 0.1 -2 -14 15 -26 1 +41 0.2 0.8 34 16 42 14 2 +41 0.8 0.8 -17 -32 50 15 2 +41 0.4 0.5 44 -4 13 -41 1 +41 0.5 0.4 41 -33 34 -5 1 +41 0.2 0.8 22 -39 49 -4 2 +41 0.9 0.4 -13 -46 34 -36 2 +41 0.4 0.1 29 -20 26 5 2 +41 0.5 0.8 10 -41 29 7 2 +41 0.7 0.5 10 -12 43 22 2 +41 0.1 0.7 36 14 39 2 2 +41 0.8 0.6 33 -12 6 -47 1 +41 0.8 0.5 23 -22 17 8 2 +41 0.7 0.9 16 -22 13 -10 2 +41 0.5 0.6 -12 -24 -8 -32 2 +41 0.5 0.2 24 5 2 -1 1 +41 0.1 0.6 26 -36 -39 -50 2 +41 0.6 0.8 18 -43 49 34 2 +41 0.4 0.9 8 7 49 0 2 +41 0.4 0.9 -2 -16 39 16 2 +41 0.5 0.8 48 -43 -2 -24 1 +41 0.9 0.2 0 -35 34 17 2 +41 0.4 0.9 19 -48 -6 -23 2 +41 0.9 0.3 -18 -45 27 19 2 +41 0.8 0.7 8 -6 22 -49 2 +41 0.9 0.9 43 -25 38 1 2 +41 0.7 0.9 12 -8 20 14 2 +41 0.2 0.8 48 29 18 -11 1 +41 0.8 0.3 29 -3 30 5 1 +41 0.9 0.4 45 42 -18 -33 1 +41 0.5 0.6 46 -30 -7 -40 1 +41 0.4 0.4 36 32 -16 -43 1 +41 0.7 0.2 31 27 42 20 1 +41 0.9 0.3 42 35 26 14 1 +41 0.8 0.3 -1 -19 -9 -38 1 +41 0.6 0.2 23 -22 48 -44 1 +41 0.9 0.6 -13 -48 -16 -28 2 +41 0.6 0.7 0 -41 35 21 2 +41 0.7 0.6 34 -41 17 -20 1 +41 0.7 0.8 23 -5 44 13 2 +41 0.7 0.3 -15 -21 42 27 2 +41 0.3 0.4 35 -7 -2 -42 1 +41 0.3 0.4 41 -37 47 -50 2 +41 0.6 0.9 -29 -40 42 36 2 +41 0.4 0.9 26 -5 47 13 2 +41 0.6 0.1 50 -27 35 -8 1 +41 0.4 0.9 -26 -38 -42 -47 1 +41 0.8 0.8 -7 -49 8 -36 2 +41 0.1 0.1 35 -25 5 -47 1 +41 0.1 0.9 -2 -7 0 -48 2 +41 0.9 0.8 47 3 13 0 1 +41 0.8 0.5 -27 -37 -23 -26 2 +41 0.3 0.5 37 9 36 -18 1 +41 0.8 0.9 18 -20 31 -17 2 +41 0.7 0.1 47 -41 20 17 2 +41 0.4 0.9 42 -39 21 -7 2 +41 0.6 0.1 46 21 44 20 1 +41 0.1 0.7 26 -23 12 -38 2 +41 0.7 0.8 40 30 -13 -37 1 +41 0.5 0.4 36 -39 -4 -8 1 +41 0.4 0.5 44 21 18 -5 1 +41 0.9 0.4 24 -39 23 -50 1 +41 0.4 0.9 40 -21 -36 -47 1 +41 0.3 0.9 11 -29 7 -5 2 +41 0.9 0.6 38 -3 18 8 1 +41 0.2 0.9 41 16 -13 -37 1 +41 0.8 0.2 14 -47 24 14 2 +41 0.7 0.2 25 -29 12 -13 1 +41 0.8 0.1 11 -19 8 -40 1 +41 0.2 0.2 -11 -15 24 -45 1 +41 0.6 0.7 -27 -39 -3 -9 2 +41 0.7 0.8 31 -7 36 -15 2 +41 0.7 0.1 25 -6 22 -23 1 +41 0.2 0.4 7 -3 48 -27 2 +41 0.6 0.4 -4 -11 36 -28 2 +41 0.7 0.8 47 -35 27 -19 2 +41 0.4 0.8 -14 -48 -34 -36 1 +41 0.9 0.9 32 -4 23 -22 1 +41 0.1 0.9 8 -18 -26 -31 1 +41 0.3 0.4 3 -35 43 -4 2 +41 0.7 0.4 45 18 -27 -42 1 +41 0.6 0.3 40 -7 48 0 1 +41 0.4 0.9 9 -1 13 -6 2 +41 0.6 0.6 1 -34 18 -5 2 +41 0.3 0.3 10 -4 48 42 2 +41 0.3 0.1 44 1 5 -48 1 +41 0.5 0.1 -45 -50 1 -12 2 +41 0.8 0.8 47 10 43 -2 1 +41 0.3 0.3 11 2 23 8 2 +41 0.1 0.9 -3 -23 0 -45 2 +41 0.2 0.1 28 -22 14 -31 1 +41 0.1 0.9 16 -49 -28 -42 2 +41 0.9 0.2 15 -3 43 -19 1 +41 0.7 0.8 36 -16 33 -36 2 +41 0.2 0.8 -27 -32 35 8 2 +41 0.4 0.2 -21 -23 46 -14 2 +41 0.3 0.6 24 -33 8 -48 2 +41 0.8 0.9 -11 -40 -34 -45 1 +41 0.3 0.8 19 -43 5 -14 2 +41 0.2 0.1 44 -17 24 6 2 +41 0.3 0.2 3 -46 21 -24 2 +41 0.5 0.8 -46 -50 1 -6 2 +41 0.5 0.1 49 -33 36 30 2 +41 0.8 0.7 14 8 1 -11 1 +41 0.3 0.1 49 -27 -29 -34 1 +41 0.5 0.9 -8 -19 4 -47 2 +41 0.7 0.1 15 -36 -26 -44 1 +41 0.7 0.8 -16 -31 35 -3 2 +41 0.7 0.8 42 11 3 -9 1 +41 0.5 0.2 -13 -19 -4 -36 1 +41 0.7 0.3 -16 -24 11 -46 2 +41 0.9 0.3 42 32 48 -3 1 +41 0.3 0.7 49 -41 -22 -47 1 +41 0.3 0.8 11 -9 -32 -39 1 +41 0.5 0.4 -4 -17 -7 -10 2 +41 0.1 0.9 13 -1 20 13 2 +41 0.6 0.5 16 13 20 11 2 +41 0.7 0.5 8 -40 -6 -13 1 +41 0.2 0.8 -22 -43 26 -35 2 +41 0.4 0.1 -8 -33 47 36 2 +41 0.5 0.4 21 -32 30 -48 1 +41 0.7 0.9 0 -39 -8 -27 2 +41 0.6 0.8 8 -30 23 -31 2 +41 0.2 0.1 -8 -48 -22 -30 2 +41 0.6 0.3 36 19 26 4 1 +41 0.1 0.1 30 -22 20 -43 1 +41 0.8 0.1 21 -35 -36 -49 1 +41 0.2 0.5 27 22 34 -11 1 +41 0.7 0.5 48 -23 13 11 1 +41 0.9 0.7 48 30 -48 -50 1 +41 0.1 0.7 16 -5 50 -14 2 +41 0.5 0.8 43 -37 18 -4 2 +41 0.1 0.2 -1 -4 4 -13 1 +41 0.8 0.8 -16 -42 25 -4 2 +41 0.3 0.1 49 37 42 20 1 +41 0.9 0.9 49 31 50 -48 1 +41 0.8 0.5 20 -50 -35 -40 1 +41 0.6 0.9 45 -9 -4 -41 1 +41 0.9 0.6 -28 -46 9 -32 2 +41 0.1 0.6 33 -45 50 -43 2 +41 0.4 0.6 1 -20 16 -31 2 +41 0.4 0.4 47 -23 44 -39 1 +41 0.5 0.1 16 -25 42 -21 2 +41 0.8 0.9 12 -33 19 -35 2 +41 0.5 0.6 -5 -36 14 -19 2 +41 0.4 0.6 -22 -39 32 -42 2 +41 0.7 0.4 45 -29 -20 -35 1 +41 0.7 0.9 -35 -47 35 12 2 +41 0.7 0.7 14 9 -19 -45 1 +41 0.5 0.4 5 -37 4 3 2 +41 0.3 0.3 -7 -34 31 -13 2 +41 0.6 0.5 -9 -45 -1 -27 2 +41 0.8 0.3 -10 -25 -21 -25 1 +41 0.3 0.6 2 -47 -18 -45 1 +41 0.4 0.9 25 -1 40 -50 2 +41 0.2 0.2 -14 -30 10 -2 2 +41 0.3 0.9 12 -13 13 -16 2 +41 0.1 0.1 36 -29 46 -39 1 +41 0.9 0.2 34 -3 -21 -32 1 +41 0.8 0.8 -7 -19 8 -22 2 +41 0.4 0.4 36 -22 -22 -49 1 +41 0.3 0.9 40 -13 32 -39 2 +41 0.9 0.3 43 25 48 -41 1 +41 0.8 0.3 16 -30 15 -47 1 +41 0.7 0.8 29 -49 38 -22 2 +41 0.9 0.3 30 -6 43 37 2 +41 0.8 0.8 32 -39 34 -14 2 +41 0.3 0.5 28 -24 14 -21 1 +41 0.9 0.8 19 -25 45 -18 2 +41 0.9 0.8 45 -22 -12 -28 1 +41 0.2 0.6 39 -21 19 10 2 +41 0.4 0.6 37 36 -7 -12 1 +41 0.6 0.7 -3 -4 -2 -25 1 +41 0.6 0.3 -44 -50 19 18 2 +41 0.1 0.1 39 36 -3 -13 1 +41 0.8 0.7 17 -50 16 6 1 +41 0.4 0.3 41 2 -27 -41 1 +41 0.9 0.8 47 5 -11 -32 1 +41 0.7 0.7 37 34 41 -30 1 +41 0.8 0.1 34 33 19 -35 1 +41 0.5 0.5 19 -45 33 -40 2 +41 0.8 0.7 45 -8 -4 -13 1 +41 0.9 0.2 4 -44 39 17 2 +41 0.2 0.3 12 -20 -6 -46 2 +41 0.6 0.5 -8 -10 -26 -42 1 +41 0.3 0.6 -3 -40 44 -47 2 +41 0.9 0.7 18 -23 17 -10 1 +41 0.9 0.6 37 -2 35 4 1 +41 0.7 0.3 0 -5 8 -16 1 +41 0.2 0.4 -29 -35 22 -50 2 +41 0.7 0.9 47 -22 32 -20 2 +41 0.5 0.9 25 -9 6 -45 1 +41 0.5 0.9 34 27 -4 -45 1 +41 0.4 0.6 23 -48 23 2 2 +41 0.5 0.6 21 -5 -2 -33 1 +41 0.6 0.8 36 -37 14 -42 1 +41 0.2 0.4 23 16 -7 -14 1 +41 0.8 0.1 -11 -48 49 -7 2 +41 0.3 0.9 29 -26 36 -21 2 +41 0.1 0.1 38 -32 4 -42 1 +41 0.6 0.1 37 -24 -41 -48 1 +41 0.9 0.4 37 -47 22 -23 1 +41 0.7 0.9 6 -25 35 -6 2 +41 0.6 0.8 44 -32 45 -1 2 +41 0.7 0.6 42 -10 -12 -38 1 +41 0.9 0.2 -23 -30 4 -35 2 +41 0.6 0.3 35 -22 -30 -43 1 +41 0.2 0.3 6 0 3 -25 1 +41 0.8 0.8 22 -32 18 -41 1 +41 0.4 0.2 -14 -21 9 -33 2 +41 0.2 0.4 42 -15 -5 -12 1 +41 0.5 0.4 44 -42 -19 -40 1 +41 0.5 0.2 28 3 20 -2 1 +41 0.1 0.2 8 -9 -28 -35 1 +41 0.9 0.4 -5 -15 25 19 2 +41 0.9 0.5 14 -29 -3 -22 1 +41 0.3 0.3 -16 -33 41 32 2 +41 0.8 0.3 -10 -45 10 -35 2 +41 0.8 0.3 42 -45 22 -5 1 +41 0.8 0.9 47 20 25 -4 1 +41 0.1 0.8 -20 -38 7 -30 2 +41 0.8 0.6 37 10 46 16 2 +41 0.2 0.3 37 -35 41 -49 2 +41 0.3 0.6 -14 -17 -1 -13 2 +41 0.6 0.5 34 -33 -34 -46 1 +41 0.7 0.5 26 -1 41 -48 1 +41 0.5 0.5 47 -26 12 -46 1 +41 0.5 0.1 38 -42 10 0 1 +41 0.4 0.4 16 -48 4 -19 2 +41 0.3 0.2 -24 -26 35 22 2 +41 0.8 0.2 45 42 12 -44 1 +41 0.1 0.7 23 -8 30 -34 2 +41 0.3 0.3 30 21 43 -43 1 +41 0.7 0.7 19 -42 8 -44 1 +41 0.9 0.3 48 17 20 -30 1 +41 0.3 0.3 50 21 -20 -40 1 +41 0.6 0.4 49 -4 17 2 1 +41 0.1 0.4 23 -22 36 15 2 +41 0.9 0.4 47 40 38 13 1 +41 0.8 0.2 32 -50 -27 -48 1 +41 0.8 0.1 23 -26 42 -18 1 +41 0.8 0.4 46 -5 6 -30 1 +41 0.9 0.5 39 27 -20 -45 1 +41 0.1 0.9 29 10 38 -37 2 +41 0.2 0.4 7 -35 50 8 2 +41 0.9 0.1 -3 -41 42 -21 1 +41 0.9 0.8 37 29 46 32 2 +41 0.3 0.9 44 27 17 -13 1 +41 0.3 0.8 10 -17 27 -9 2 +41 0.7 0.6 46 26 28 -50 1 +41 0.3 0.2 20 -4 32 -30 1 +41 0.3 0.8 42 -21 44 -19 2 +41 0.6 0.5 30 -10 36 -36 1 +42 0.8 0.2 10 -15 44 -20 1 +42 0.4 0.8 25 13 38 -29 1 +42 0.5 0.3 42 37 36 5 1 +42 0.9 0.7 -34 -46 49 19 2 +42 0.3 0.6 -6 -24 4 -1 2 +42 0.7 0.6 20 13 35 -28 1 +42 0.6 0.4 47 -31 42 3 2 +42 0.6 0.7 -25 -28 -13 -14 2 +42 0.5 0.9 -37 -42 34 -24 2 +42 0.6 0.6 35 -26 -34 -45 1 +42 0.3 0.7 16 9 26 15 2 +42 0.7 0.8 49 -31 24 -4 1 +42 0.8 0.7 22 13 -28 -33 1 +42 0.3 0.8 10 -42 50 29 2 +42 0.7 0.2 37 -20 -6 -14 1 +42 0.1 0.6 35 -16 -24 -25 2 +42 0.3 0.9 4 -26 -29 -38 1 +42 0.3 0.1 -26 -50 -44 -46 2 +42 0.4 0.5 -15 -22 43 -37 2 +42 0.2 0.2 18 -25 13 -17 2 +42 0.1 0.3 45 -34 29 -2 2 +42 0.5 0.1 -28 -49 -9 -45 2 +42 0.3 0.3 44 -40 2 -22 1 +42 0.5 0.5 34 -13 49 21 2 +42 0.4 0.3 -14 -23 17 -22 2 +42 0.2 0.4 31 -4 -11 -27 1 +42 0.9 0.2 14 -49 10 -43 1 +42 0.9 0.2 1 -50 24 11 2 +42 0.6 0.7 26 -31 19 -46 1 +42 0.5 0.1 5 -30 30 4 2 +42 0.9 0.3 -26 -36 15 -14 2 +42 0.9 0.7 48 -28 37 -12 1 +42 0.1 0.3 23 -35 13 -1 2 +42 0.7 0.4 15 -8 40 -23 1 +42 0.7 0.5 -10 -36 3 -6 2 +42 0.5 0.9 41 -30 22 -15 2 +42 0.7 0.5 30 -9 17 13 2 +42 0.4 0.2 23 -1 -38 -50 1 +42 0.1 0.2 -22 -36 50 -34 2 +42 0.7 0.1 22 15 36 2 1 +42 0.9 0.8 49 -24 12 7 1 +42 0.1 0.3 6 -20 7 -41 1 +42 0.3 0.8 21 -48 49 44 2 +42 0.7 0.3 -46 -47 11 -48 2 +42 0.4 0.4 7 -19 23 9 2 +42 0.5 0.3 39 -45 -26 -48 1 +42 0.9 0.1 -20 -49 17 -48 1 +42 0.6 0.3 38 -13 0 -31 1 +42 0.7 0.5 -6 -48 46 -32 2 +42 0.2 0.3 46 33 42 -23 1 +42 0.5 0.1 36 22 -12 -28 1 +42 0.7 0.3 -14 -18 44 34 2 +42 0.4 0.3 31 13 -17 -35 1 +42 0.1 0.1 38 -42 -18 -46 1 +42 0.6 0.3 39 28 -15 -31 1 +42 0.6 0.3 4 -10 34 9 2 +42 0.5 0.8 28 26 39 -46 1 +42 0.6 0.4 16 4 12 -2 1 +42 0.7 0.8 31 -22 10 -36 1 +42 0.7 0.5 27 -15 47 -50 1 +42 0.4 0.3 -20 -27 -2 -37 2 +42 0.8 0.7 8 -24 -32 -49 1 +42 0.8 0.2 5 -7 34 -23 1 +42 0.4 0.8 39 19 -7 -40 1 +42 0.4 0.8 29 -50 35 -24 2 +42 0.8 0.6 41 8 -1 -29 1 +42 0.7 0.5 42 33 -31 -36 1 +42 0.1 0.1 -1 -34 46 -6 2 +42 0.3 0.2 7 -42 48 -18 2 +42 0.4 0.1 1 -8 -4 -23 1 +42 0.8 0.1 37 -24 43 -18 1 +42 0.7 0.9 12 -26 37 27 2 +42 0.5 0.9 48 -2 31 -41 2 +42 0.9 0.8 -24 -39 2 -39 2 +42 0.9 0.1 -5 -8 43 -39 1 +42 0.4 0.9 15 -40 27 0 2 +42 0.4 0.7 47 -7 40 -47 2 +42 0.8 0.2 0 -13 -24 -33 1 +42 0.4 0.4 46 39 -27 -40 1 +42 0.6 0.2 45 33 39 -47 1 +42 0.2 0.2 -46 -47 24 23 2 +42 0.7 0.6 46 -1 -20 -26 1 +42 0.8 0.2 -22 -29 13 -25 2 +42 0.5 0.1 50 -22 32 -35 1 +42 0.4 0.3 30 -1 7 3 1 +42 0.4 0.2 19 -34 -21 -28 1 +42 0.4 0.5 32 17 34 -36 1 +42 0.8 0.7 43 3 -9 -21 1 +42 0.4 0.2 44 -4 8 -40 1 +42 0.6 0.4 38 22 26 7 1 +42 0.1 0.7 16 -5 -7 -43 1 +42 0.6 0.7 14 -34 7 2 2 +42 0.7 0.2 35 -34 45 -42 1 +42 0.3 0.6 35 22 49 -36 1 +42 0.5 0.1 16 -35 -30 -45 1 +42 0.4 0.7 1 -45 -25 -27 2 +42 0.3 0.4 0 -23 -24 -46 1 +42 0.1 0.3 45 -26 -6 -38 1 +42 0.6 0.7 14 -48 49 7 2 +42 0.8 0.6 -17 -19 47 19 2 +42 0.3 0.8 -2 -50 37 -29 2 +42 0.8 0.7 49 -35 39 19 2 +42 0.4 0.6 7 -3 9 -21 1 +42 0.7 0.5 33 28 42 7 1 +42 0.7 0.9 40 16 -17 -20 1 +42 0.3 0.3 -10 -12 33 -19 2 +42 0.3 0.7 49 22 -26 -35 1 +42 0.5 0.8 23 -12 -39 -48 1 +42 0.8 0.6 -10 -37 12 -49 2 +42 0.3 0.2 -22 -33 46 28 2 +42 0.6 0.2 -5 -29 9 6 2 +42 0.1 0.3 24 -47 21 -45 2 +42 0.9 0.8 34 -24 24 13 1 +42 0.9 0.8 -13 -41 -11 -20 1 +42 0.7 0.2 35 -18 -28 -44 1 +42 0.9 0.5 43 30 -11 -19 1 +42 0.7 0.5 17 11 15 -23 1 +42 0.8 0.8 16 9 50 21 2 +42 0.8 0.1 41 -49 21 -24 1 +42 0.1 0.4 -37 -49 13 -18 2 +42 0.9 0.3 41 12 10 7 1 +42 0.3 0.5 38 -18 48 -45 1 +42 0.4 0.4 45 -29 -4 -20 1 +42 0.3 0.2 44 26 -32 -45 1 +42 0.2 0.4 16 4 47 20 2 +42 0.2 0.5 -21 -48 33 -20 2 +42 0.2 0.5 42 -13 24 -17 2 +42 0.6 0.8 7 -36 48 -1 2 +42 0.7 0.5 -3 -18 19 15 2 +42 0.3 0.3 -13 -49 23 -36 2 +42 0.2 0.4 0 -23 3 -12 2 +42 0.4 0.2 -11 -31 -16 -29 2 +42 0.7 0.6 49 -28 15 -48 1 +42 0.1 0.5 4 -4 -6 -43 1 +42 0.5 0.1 14 -30 24 -50 1 +42 0.4 0.6 -11 -30 47 -22 2 +42 0.1 0.4 39 33 -3 -44 1 +42 0.9 0.6 49 -35 -18 -29 1 +42 0.5 0.5 48 -20 35 -31 1 +42 0.5 0.5 7 1 33 6 2 +42 0.4 0.3 -34 -46 44 -33 2 +42 0.8 0.7 30 26 -19 -29 1 +42 0.6 0.3 42 36 36 22 1 +42 0.8 0.7 30 24 21 -33 1 +42 0.7 0.5 30 3 32 -12 1 +42 0.1 0.1 -9 -34 -8 -22 2 +42 0.4 0.8 41 -1 50 -9 2 +42 0.3 0.8 -23 -49 32 -1 2 +42 0.8 0.3 14 -8 11 -7 1 +42 0.7 0.4 30 -27 31 -19 1 +42 0.7 0.8 -21 -31 -11 -23 2 +42 0.3 0.8 41 -21 20 -43 2 +42 0.5 0.4 46 -5 24 -39 1 +42 0.9 0.1 25 12 44 41 2 +42 0.3 0.4 15 -32 38 -39 2 +42 0.8 0.9 32 -38 -8 -12 1 +42 0.5 0.6 21 -6 -32 -41 1 +42 0.5 0.4 19 12 -3 -32 1 +42 0.8 0.8 16 -25 31 -37 2 +42 0.9 0.8 -26 -38 36 7 2 +42 0.7 0.4 2 -19 6 -30 1 +42 0.3 0.7 37 -47 29 -41 2 +42 0.4 0.3 -5 -14 13 -16 2 +42 0.7 0.7 -17 -39 24 11 2 +42 0.2 0.1 30 -33 -25 -50 1 +42 0.6 0.1 29 13 24 -19 1 +42 0.2 0.4 -22 -35 -19 -21 1 +42 0.1 0.8 3 -34 30 -21 2 +42 0.6 0.4 19 9 19 4 1 +42 0.1 0.5 25 -8 38 -15 2 +42 0.9 0.1 6 -41 16 -7 1 +42 0.8 0.6 33 -33 31 -19 1 +42 0.7 0.5 33 0 34 -19 1 +42 0.2 0.3 -13 -39 -22 -40 2 +42 0.8 0.5 28 -11 -30 -50 1 +42 0.3 0.1 33 -31 8 0 2 +42 0.3 0.3 5 -48 -4 -36 2 +42 0.2 0.6 27 -15 50 2 2 +42 0.5 0.5 50 -10 -13 -14 1 +42 0.7 0.2 -31 -38 -43 -47 1 +42 0.4 0.8 11 -5 -15 -37 1 +42 0.8 0.9 11 -47 40 9 2 +42 0.9 0.3 21 -6 17 -13 1 +42 0.9 0.4 1 -17 11 7 2 +42 0.4 0.6 10 -44 40 -39 2 +42 0.6 0.2 48 11 22 -37 1 +42 0.8 0.7 22 -18 20 8 2 +42 0.9 0.5 11 -26 40 -13 2 +42 0.3 0.9 9 -49 33 15 2 +42 0.9 0.9 10 -36 22 -35 2 +42 0.6 0.8 8 -25 -11 -38 1 +42 0.9 0.8 -41 -46 38 7 2 +42 0.5 0.8 50 -39 -30 -48 1 +42 0.1 0.4 16 -15 31 -35 2 +42 0.1 0.7 46 -27 9 -6 2 +42 0.5 0.9 15 -26 28 -37 2 +42 0.3 0.2 36 -35 4 -34 1 +42 0.8 0.8 -24 -27 34 4 2 +42 0.2 0.8 39 -29 28 0 2 +42 0.6 0.2 47 -32 33 12 2 +42 0.7 0.5 18 -40 5 -34 1 +42 0.8 0.6 40 -37 -19 -34 1 +42 0.7 0.9 15 -14 -23 -29 1 +42 0.2 0.8 16 8 23 -2 1 +42 0.4 0.4 34 23 43 -5 1 +42 0.1 0.8 4 -12 8 -34 2 +42 0.5 0.9 -43 -49 43 -4 2 +42 0.1 0.4 7 -11 18 8 2 +42 0.5 0.1 11 -49 -25 -49 1 +42 0.5 0.7 41 32 48 -36 1 +42 0.2 0.2 -6 -32 8 -33 2 +42 0.4 0.5 -2 -28 -6 -26 1 +42 0.9 0.7 37 -17 38 -24 1 +42 0.5 0.6 27 -5 13 -3 1 +42 0.3 0.3 6 -7 15 -39 1 +42 0.8 0.2 46 18 38 -43 1 +42 0.9 0.5 46 39 1 -8 1 +42 0.5 0.9 48 20 -13 -30 1 +42 0.3 0.3 42 35 30 -35 1 +42 0.2 0.3 14 -17 49 -32 1 +42 0.6 0.7 42 -27 50 -1 2 +42 0.9 0.7 40 -40 29 -20 1 +42 0.7 0.8 -32 -38 -13 -27 2 +42 0.8 0.5 11 -13 20 8 2 +42 0.7 0.6 30 10 25 -50 1 +42 0.5 0.2 16 -32 -42 -46 1 +42 0.5 0.4 21 -27 8 -48 1 +42 0.6 0.4 34 -5 -9 -14 1 +42 0.8 0.6 32 4 17 -50 1 +42 0.4 0.8 2 -21 -24 -33 1 +42 0.6 0.1 22 13 5 -48 1 +42 0.2 0.8 9 -24 45 -48 2 +42 0.1 0.3 39 31 10 -31 1 +42 0.7 0.2 11 -6 37 -46 1 +42 0.7 0.8 50 22 -13 -46 1 +42 0.9 0.3 -10 -35 25 22 2 +42 0.8 0.2 -7 -46 11 10 2 +42 0.6 0.9 -15 -36 31 10 2 +42 0.6 0.2 -3 -48 31 -36 2 +42 0.8 0.8 -16 -48 18 11 2 +42 0.5 0.1 31 0 -27 -38 1 +42 0.4 0.8 -18 -39 48 30 2 +42 0.2 0.2 29 -35 8 -24 1 +42 0.4 0.6 26 21 29 -26 1 +42 0.2 0.5 -25 -42 -10 -28 2 +42 0.3 0.3 -27 -40 18 0 2 +42 0.4 0.3 9 -48 46 38 2 +42 0.1 0.4 -15 -39 18 -11 2 +42 0.6 0.6 -45 -47 36 -14 2 +42 0.1 0.5 15 0 10 9 2 +42 0.8 0.8 12 -50 50 -46 2 +42 0.9 0.5 44 13 7 -11 1 +42 0.8 0.5 49 -45 43 32 2 +42 0.5 0.5 32 -4 36 -32 1 +42 0.4 0.9 -12 -15 -14 -32 1 +42 0.1 0.6 21 -4 9 -8 2 +42 0.2 0.4 -28 -43 39 34 2 +42 0.6 0.3 14 -22 18 11 2 +42 0.1 0.3 -28 -44 -16 -24 2 +42 0.7 0.8 -24 -50 29 -21 2 +42 0.4 0.1 -38 -41 26 -30 2 +42 0.3 0.8 48 25 34 -7 1 +42 0.3 0.1 -4 -23 36 28 2 +42 0.8 0.8 -9 -32 43 -27 2 +42 0.1 0.2 30 20 38 -30 1 +42 0.5 0.2 36 -28 41 -46 1 +42 0.8 0.2 46 -38 49 35 2 +42 0.6 0.1 15 11 4 -7 1 +42 0.1 0.2 43 23 18 -1 1 +42 0.4 0.6 33 -22 3 -18 2 +42 0.1 0.9 12 4 2 -5 1 +42 0.3 0.3 -9 -23 -27 -29 1 +42 0.3 0.4 50 17 41 19 2 +42 0.5 0.3 -6 -42 -20 -38 1 +42 0.5 0.2 24 -47 17 -49 1 +42 0.9 0.7 -12 -32 25 -25 2 +42 0.8 0.1 48 14 22 13 1 +42 0.9 0.8 38 -27 -7 -45 1 +42 0.3 0.2 -21 -43 -21 -27 2 +42 0.4 0.1 23 -32 37 17 2 +42 0.7 0.4 24 7 44 -6 1 +42 0.8 0.4 16 9 44 -43 1 +42 0.1 0.4 17 -40 17 -16 2 +42 0.9 0.6 -24 -26 19 -15 2 +42 0.4 0.1 20 -35 22 8 2 +42 0.5 0.4 41 14 8 -19 1 +42 0.3 0.3 49 5 24 11 1 +42 0.5 0.8 39 21 -6 -33 1 +42 0.7 0.6 46 -7 -31 -36 1 +42 0.5 0.6 2 -9 1 -4 1 +42 0.1 0.7 12 -21 45 8 2 +42 0.6 0.8 -13 -21 36 -7 2 +42 0.8 0.8 41 12 -33 -39 1 +42 0.6 0.9 -4 -15 9 -46 2 +42 0.6 0.1 21 -35 35 -19 1 +42 0.5 0.7 -13 -36 43 -18 2 +42 0.5 0.1 0 -34 49 3 2 +42 0.5 0.2 50 6 24 -24 1 +42 0.2 0.5 -17 -19 -33 -48 1 +42 0.6 0.7 -2 -27 23 -26 2 +43 0.8 0.1 43 42 31 -33 1 +43 0.7 0.7 -43 -50 9 -3 2 +43 0.4 0.4 -15 -34 -14 -29 2 +43 0.6 0.6 48 25 27 25 1 +43 0.4 0.8 49 6 21 -39 1 +43 0.4 0.6 14 -22 45 -46 2 +43 0.5 0.6 21 -10 37 28 2 +43 0.9 0.2 35 20 43 22 1 +43 0.8 0.8 -3 -15 40 17 2 +43 0.9 0.8 -18 -22 50 14 2 +43 0.3 0.9 -3 -50 20 -38 2 +43 0.6 0.5 47 -15 35 13 1 +43 0.2 0.2 45 28 34 -20 1 +43 0.8 0.4 42 -27 15 -50 1 +43 0.4 0.1 17 -44 42 33 2 +43 0.6 0.8 14 -32 7 -7 1 +43 0.9 0.1 19 -50 48 -42 1 +43 0.1 0.5 16 -11 6 -42 1 +43 0.9 0.4 32 -8 45 -32 1 +43 0.2 0.5 4 -17 -8 -23 2 +43 0.5 0.9 35 8 10 -37 1 +43 0.3 0.2 18 -41 20 -16 1 +43 0.6 0.9 -13 -47 27 -16 2 +43 0.3 0.1 33 -3 -15 -34 1 +43 0.9 0.4 45 4 46 31 1 +43 0.3 0.7 22 -47 32 19 2 +43 0.5 0.1 23 14 40 5 2 +43 0.6 0.6 10 8 -28 -46 1 +43 0.9 0.4 5 -38 -29 -49 1 +43 0.8 0.4 -41 -47 40 -31 2 +43 0.4 0.2 21 -50 48 17 2 +43 0.3 0.1 34 -43 6 -40 1 +43 0.3 0.1 -27 -50 48 -31 2 +43 0.8 0.9 9 -29 27 -34 2 +43 0.4 0.1 42 -40 -24 -30 1 +43 0.6 0.5 28 -8 34 -25 1 +43 0.3 0.2 8 -10 11 -18 1 +43 0.7 0.4 20 13 12 8 2 +43 0.6 0.2 1 -9 12 -17 1 +43 0.6 0.7 32 -15 -19 -38 1 +43 0.3 0.6 21 -3 -15 -46 1 +43 0.4 0.1 48 37 38 -45 1 +43 0.3 0.6 12 -36 -12 -45 1 +43 0.8 0.1 0 -40 -12 -39 1 +43 0.3 0.7 -15 -49 -38 -42 2 +43 0.4 0.8 -12 -31 45 41 2 +43 0.2 0.2 -19 -49 -32 -49 1 +43 0.7 0.6 -30 -42 -25 -27 1 +43 0.9 0.9 50 10 33 -32 1 +43 0.8 0.6 44 -21 10 -6 1 +43 0.1 0.4 28 -5 -9 -43 1 +43 0.1 0.2 7 4 50 -40 1 +43 0.4 0.3 -36 -49 27 -41 1 +43 0.8 0.3 -1 -18 24 -11 1 +43 0.3 0.8 -12 -14 34 24 2 +43 0.6 0.4 42 -3 38 35 2 +43 0.4 0.1 38 -25 41 21 2 +43 0.9 0.2 11 -3 1 -6 1 +43 0.5 0.9 -3 -14 10 -13 2 +43 0.5 0.2 26 -20 38 -40 1 +43 0.1 0.2 1 -35 15 -50 1 +43 0.3 0.5 -6 -47 27 -30 2 +43 0.8 0.5 2 -37 26 16 2 +43 0.7 0.5 38 -5 1 -47 1 +43 0.3 0.5 -2 -34 27 19 2 +43 0.5 0.7 43 -32 -39 -43 1 +43 0.2 0.2 49 -15 36 -11 1 +43 0.8 0.1 40 -15 -2 -19 1 +43 0.7 0.6 49 42 35 -35 1 +43 0.9 0.9 -11 -28 -5 -30 1 +43 0.1 0.6 45 11 50 22 2 +43 0.5 0.4 6 -40 39 -41 2 +43 0.4 0.1 -5 -14 45 31 2 +43 0.7 0.1 45 -39 -23 -50 1 +43 0.2 0.7 49 12 11 -32 1 +43 0.2 0.9 45 -10 3 -2 2 +43 0.7 0.5 22 -45 41 8 2 +43 0.4 0.2 36 5 7 -37 1 +43 0.9 0.6 39 -45 43 -46 1 +43 0.6 0.3 33 -22 1 -5 2 +43 0.2 0.7 50 -18 31 14 2 +43 0.8 0.6 27 17 26 -10 1 +43 0.5 0.7 50 -30 50 -10 2 +43 0.1 0.2 44 -31 -25 -34 2 +43 0.3 0.8 -12 -48 46 -29 2 +43 0.6 0.8 36 28 16 -25 1 +43 0.2 0.5 46 -43 -17 -28 1 +43 0.3 0.6 4 -25 18 -36 1 +43 0.2 0.3 -3 -50 50 -16 2 +43 0.6 0.2 -31 -42 30 18 2 +43 0.5 0.2 18 -5 -4 -20 1 +43 0.7 0.9 48 -38 -39 -42 1 +43 0.6 0.7 9 -42 -27 -35 1 +43 0.3 0.6 46 13 -24 -36 1 +43 0.4 0.6 29 7 -14 -48 1 +43 0.4 0.3 11 -17 48 -20 1 +43 0.8 0.2 37 -47 -16 -19 1 +43 0.5 0.1 36 -35 0 -3 1 +43 0.3 0.3 -1 -30 40 23 2 +43 0.4 0.9 25 -25 33 1 2 +43 0.4 0.8 16 -16 1 -15 2 +43 0.8 0.4 -14 -37 19 11 2 +43 0.9 0.4 25 17 27 -38 1 +43 0.6 0.4 35 34 47 -36 1 +43 0.9 0.8 37 -18 27 -23 1 +43 0.5 0.2 -7 -35 45 37 2 +43 0.2 0.2 33 -13 3 -6 2 +43 0.6 0.7 44 -45 23 -26 2 +43 0.2 0.4 1 -10 39 26 2 +43 0.5 0.2 2 -28 46 -33 2 +43 0.4 0.8 16 5 36 35 2 +43 0.3 0.1 35 -2 0 -47 1 +43 0.8 0.5 5 -18 -6 -35 2 +43 0.1 0.9 3 -18 8 -46 1 +43 0.1 0.1 45 31 -35 -46 1 +43 0.3 0.1 44 -41 47 -7 1 +43 0.8 0.2 -5 -34 17 -45 2 +43 0.1 0.3 -40 -45 30 -7 2 +43 0.2 0.1 47 -9 15 14 2 +43 0.9 0.3 -10 -22 27 0 2 +43 0.7 0.8 43 16 6 -24 1 +43 0.7 0.9 -40 -46 21 -38 2 +43 0.7 0.5 48 -8 36 -8 1 +43 0.3 0.4 12 -17 50 -8 2 +43 0.2 0.7 11 -40 13 -19 2 +43 0.8 0.8 40 39 47 -28 1 +43 0.2 0.2 39 13 -3 -42 1 +43 0.6 0.6 33 14 -43 -44 1 +43 0.1 0.4 42 27 38 15 2 +43 0.7 0.8 36 -20 42 -1 2 +43 0.6 0.3 21 -22 -15 -48 1 +43 0.9 0.2 7 -43 24 6 2 +43 0.9 0.2 7 -29 17 -10 1 +43 0.2 0.3 19 -29 30 -14 2 +43 0.4 0.4 27 -31 2 -18 1 +43 0.5 0.9 -18 -46 -23 -36 2 +43 0.4 0.5 6 -50 36 4 2 +43 0.5 0.9 29 -11 -19 -42 1 +43 0.1 0.1 -7 -17 7 -28 1 +43 0.5 0.2 -15 -27 47 -21 2 +43 0.1 0.8 -1 -26 24 -30 2 +43 0.5 0.1 -6 -31 33 14 2 +43 0.1 0.5 21 -4 21 -4 2 +43 0.9 0.3 40 -47 12 -34 1 +43 0.3 0.4 45 44 16 -16 1 +43 0.3 0.8 39 26 1 -17 1 +43 0.2 0.3 9 -3 24 -26 2 +43 0.2 0.3 -4 -9 21 -34 2 +43 0.7 0.3 29 -5 36 19 2 +43 0.5 0.4 47 31 37 -17 1 +43 0.1 0.3 20 -30 -26 -29 1 +43 0.9 0.2 42 16 43 -28 1 +43 0.4 0.9 4 -47 1 -43 2 +43 0.6 0.2 50 -2 26 10 1 +43 0.2 0.3 -13 -26 -15 -20 1 +43 0.7 0.4 50 35 17 12 1 +43 0.1 0.8 8 -19 -6 -12 1 +43 0.9 0.1 25 4 35 -12 1 +43 0.8 0.1 26 23 21 2 1 +43 0.3 0.4 -13 -19 50 -5 2 +43 0.9 0.8 11 -19 49 -34 2 +43 0.8 0.5 41 -41 45 39 2 +43 0.9 0.8 40 1 32 17 1 +43 0.7 0.7 49 -50 -2 -10 1 +43 0.6 0.7 -6 -50 0 -1 1 +43 0.3 0.6 26 -47 11 6 1 +43 0.1 0.6 38 0 15 10 1 +43 0.2 0.9 27 -10 46 6 2 +43 0.8 0.6 -5 -21 0 -25 2 +43 0.8 0.3 31 -19 -10 -40 1 +43 0.1 0.2 15 -40 -25 -26 2 +43 0.2 0.6 20 16 40 -45 1 +43 0.2 0.4 43 -9 -17 -32 1 +43 0.2 0.9 47 46 26 20 1 +43 0.5 0.2 38 13 21 -14 1 +43 0.2 0.2 29 -8 9 -12 2 +43 0.5 0.1 -14 -40 -6 -33 1 +43 0.5 0.5 3 -4 17 -29 2 +43 0.1 0.1 42 10 39 16 2 +43 0.5 0.3 45 -34 -25 -46 1 +43 0.3 0.3 23 -48 37 2 2 +43 0.1 0.8 28 -23 9 8 2 +43 0.9 0.7 43 12 35 24 1 +43 0.7 0.4 16 -44 -5 -24 2 +43 0.5 0.2 34 -5 18 3 2 +43 0.6 0.2 50 27 -29 -38 1 +43 0.4 0.2 24 -5 -2 -39 1 +43 0.9 0.4 19 -36 39 -18 1 +43 0.6 0.6 49 39 -3 -42 1 +43 0.1 0.3 32 -32 33 -6 2 +43 0.5 0.9 47 29 15 -16 1 +43 0.8 0.3 32 -14 38 -50 1 +43 0.9 0.9 -5 -22 48 20 2 +43 0.8 0.4 37 -22 47 2 2 +43 0.8 0.5 45 29 32 -3 1 +43 0.2 0.5 -10 -42 -48 -49 1 +43 0.5 0.2 22 -40 45 -22 1 +43 0.7 0.5 23 -15 -1 -9 1 +43 0.6 0.9 25 -48 -24 -37 1 +43 0.6 0.1 -22 -44 47 -18 2 +43 0.5 0.8 32 -37 46 -16 2 +43 0.7 0.1 10 -17 49 -49 1 +43 0.6 0.1 28 -50 38 -43 1 +43 0.1 0.9 44 -20 3 -32 2 +43 0.6 0.6 47 19 50 5 1 +43 0.8 0.9 42 -20 6 -34 1 +43 0.6 0.6 29 12 27 20 2 +43 0.2 0.4 33 -7 32 -18 1 +43 0.1 0.3 8 -44 40 -24 2 +43 0.2 0.3 -20 -41 1 -21 2 +43 0.7 0.2 15 -31 48 28 2 +43 0.4 0.5 16 12 10 -3 1 +43 0.7 0.5 39 -5 2 -29 1 +43 0.8 0.2 40 25 11 -29 1 +43 0.3 0.5 14 -40 16 -27 2 +43 0.2 0.5 -23 -34 5 0 2 +43 0.1 0.1 4 -28 -10 -18 2 +43 0.9 0.4 27 -29 -24 -28 1 +43 0.7 0.6 46 38 44 -47 1 +43 0.3 0.4 -36 -40 7 -12 2 +43 0.5 0.2 11 -41 -1 -29 1 +43 0.2 0.6 -1 -18 31 -50 2 +43 0.9 0.9 39 -27 15 -5 1 +43 0.6 0.9 40 4 32 29 2 +43 0.7 0.7 8 -8 44 9 2 +43 0.9 0.9 28 -27 26 -32 2 +43 0.3 0.6 14 -35 38 -42 2 +43 0.2 0.1 42 23 -4 -48 1 +43 0.6 0.6 0 -1 5 -16 2 +43 0.6 0.1 -4 -38 23 12 2 +43 0.9 0.9 -9 -32 28 -28 2 +43 0.4 0.1 -4 -13 -4 -39 1 +43 0.7 0.3 42 -33 20 -42 1 +43 0.6 0.5 20 -30 -1 -38 1 +43 0.1 0.7 -19 -42 -27 -29 2 +43 0.5 0.7 -7 -27 19 4 1 +43 0.3 0.7 26 21 10 -11 1 +43 0.3 0.4 40 -4 15 -10 1 +43 0.5 0.8 18 -17 -12 -33 1 +43 0.5 0.7 -9 -30 28 7 2 +43 0.3 0.8 40 -29 50 48 2 +43 0.8 0.1 -11 -14 31 22 2 +43 0.8 0.2 38 -30 2 -25 1 +43 0.5 0.1 18 -3 -12 -34 1 +43 0.7 0.5 0 -35 -8 -24 1 +43 0.7 0.7 46 35 14 10 1 +43 0.9 0.7 -3 -21 47 6 2 +43 0.7 0.1 21 11 35 -2 1 +43 0.4 0.9 -24 -48 31 20 2 +43 0.2 0.4 43 -12 25 -39 1 +43 0.5 0.6 -24 -34 42 8 2 +43 0.6 0.5 -39 -46 19 -3 2 +43 0.7 0.6 -20 -37 17 9 2 +43 0.2 0.8 23 -17 -3 -27 1 +43 0.8 0.6 16 -40 14 5 2 +43 0.2 0.1 -13 -36 40 -13 2 +43 0.4 0.7 7 -5 25 -32 2 +43 0.8 0.7 41 28 43 28 2 +43 0.9 0.4 -13 -22 11 -34 2 +43 0.4 0.9 18 -31 36 10 2 +43 0.3 0.1 -33 -47 48 -37 2 +43 0.7 0.2 -20 -44 -34 -44 2 +43 0.5 0.3 21 -46 34 -30 1 +43 0.7 0.2 -7 -50 -13 -27 1 +43 0.3 0.4 4 -21 17 -44 2 +43 0.7 0.7 -23 -26 9 -33 2 +43 0.1 0.8 14 -13 23 -9 2 +43 0.4 0.1 10 -33 2 -33 2 +43 0.6 0.6 35 16 42 -4 1 +43 0.1 0.3 46 27 41 -42 1 +43 0.7 0.6 6 -40 27 -50 2 +43 0.1 0.4 45 -38 25 -18 1 +43 0.1 0.4 -25 -50 -22 -36 2 +43 0.5 0.7 15 -28 -31 -39 1 +43 0.5 0.7 20 -25 32 -12 2 +43 0.3 0.7 50 1 20 -35 1 +43 0.8 0.4 14 -14 45 -31 2 +43 0.1 0.2 42 -49 -33 -34 2 +43 0.4 0.9 7 4 24 -33 2 +43 0.5 0.9 8 -12 39 12 2 +43 0.5 0.7 -15 -35 24 -43 2 +43 0.4 0.4 -10 -23 -31 -32 1 +43 0.7 0.8 50 20 34 16 2 +43 0.2 0.4 -9 -46 -18 -22 2 +43 0.3 0.1 26 -28 39 -25 2 +43 0.3 0.1 48 -10 4 -50 1 +43 0.6 0.2 -14 -24 -9 -24 1 +43 0.4 0.3 38 4 4 -42 1 +43 0.3 0.9 -13 -27 30 5 2 +43 0.8 0.9 -16 -31 -40 -47 2 +43 0.7 0.7 17 5 -6 -48 1 +43 0.6 0.7 22 -34 8 -49 2 +43 0.5 0.8 30 7 -23 -43 1 +43 0.4 0.6 -21 -34 14 -27 2 +43 0.8 0.4 -13 -50 15 -7 2 +43 0.6 0.7 10 -10 -39 -50 1 +43 0.7 0.1 24 -20 3 -33 1 +43 0.7 0.1 14 -35 17 -3 1 +43 0.9 0.3 -12 -50 -8 -34 1 +43 0.1 0.9 31 27 37 1 2 +44 0.7 0.1 48 -40 32 -37 1 +44 0.8 0.5 -21 -40 39 3 2 +44 0.6 0.8 -29 -40 14 6 2 +44 0.9 0.3 37 -23 47 -34 1 +44 0.4 0.5 49 -18 -37 -50 1 +44 0.2 0.5 -24 -46 32 18 2 +44 0.8 0.4 41 27 27 -2 1 +44 0.6 0.6 29 -45 7 -28 1 +44 0.7 0.5 -33 -48 27 19 2 +44 0.1 0.8 -39 -42 -34 -49 2 +44 0.8 0.6 -36 -39 31 -44 2 +44 0.9 0.2 26 -1 -20 -21 1 +44 0.8 0.2 -5 -15 24 -32 1 +44 0.8 0.9 -21 -35 -6 -17 2 +44 0.7 0.3 27 -31 25 -41 1 +44 0.6 0.5 -8 -17 9 -28 2 +44 0.9 0.1 -15 -34 -29 -40 1 +44 0.7 0.6 28 -5 0 -4 1 +44 0.2 0.8 8 -33 8 -39 2 +44 0.3 0.4 28 10 3 -35 1 +44 0.6 0.3 -4 -12 32 28 2 +44 0.9 0.2 -40 -49 40 -44 1 +44 0.8 0.3 28 11 41 -48 1 +44 0.7 0.2 -5 -26 46 2 2 +44 0.6 0.9 12 8 -26 -45 1 +44 0.4 0.4 35 -28 5 -36 1 +44 0.4 0.2 -28 -46 39 -44 2 +44 0.8 0.7 14 -34 29 -13 2 +44 0.6 0.8 21 -7 43 37 2 +44 0.9 0.3 -4 -30 8 -38 1 +44 0.6 0.1 -1 -30 29 -38 1 +44 0.2 0.5 -4 -22 14 -32 2 +44 0.5 0.5 -38 -43 39 14 2 +44 0.2 0.5 2 -23 8 -6 2 +44 0.8 0.6 -6 -19 35 -2 2 +44 0.4 0.5 -31 -33 45 -4 2 +44 0.2 0.9 -21 -37 37 -8 2 +44 0.9 0.4 41 -34 29 -49 1 +44 0.2 0.8 -24 -28 43 -15 2 +44 0.5 0.9 -40 -50 11 -17 2 +44 0.2 0.5 -10 -38 0 -19 2 +44 0.9 0.1 40 11 34 -31 1 +44 0.3 0.7 40 14 19 -4 1 +44 0.7 0.6 -8 -36 22 11 2 +44 0.3 0.1 10 -9 8 -31 1 +44 0.3 0.8 3 -27 47 -2 2 +44 0.3 0.4 50 49 -19 -39 1 +44 0.4 0.6 38 -6 48 17 2 +44 0.6 0.4 4 -24 22 -19 2 +44 0.8 0.9 -8 -37 -11 -36 1 +44 0.9 0.1 32 -33 40 -35 1 +44 0.4 0.5 34 -41 -21 -44 1 +44 0.7 0.2 -2 -5 46 -20 2 +44 0.9 0.1 42 11 4 -9 1 +44 0.3 0.1 -12 -37 29 -39 2 +44 0.7 0.6 39 10 5 -35 1 +44 0.8 0.8 -34 -42 -14 -39 2 +44 0.3 0.4 50 25 44 18 1 +44 0.5 0.2 -25 -30 22 0 2 +44 0.8 0.2 34 -39 41 -17 1 +44 0.3 0.3 -27 -32 -33 -37 1 +44 0.5 0.7 15 -26 50 -23 2 +44 0.4 0.1 -13 -21 24 2 2 +44 0.9 0.7 4 -37 34 -16 2 +44 0.4 0.9 -14 -23 27 12 2 +44 0.9 0.9 -8 -46 5 -21 2 +44 0.4 0.3 38 14 29 -28 1 +44 0.1 0.8 -28 -39 5 -29 2 +44 0.2 0.1 25 5 -4 -26 1 +44 0.1 0.6 47 18 41 -49 1 +44 0.8 0.7 4 -29 46 40 2 +44 0.4 0.7 45 42 -27 -35 1 +44 0.9 0.5 37 -16 32 28 1 +44 0.8 0.1 17 -8 32 7 1 +44 0.7 0.8 46 14 9 7 1 +44 0.9 0.3 -3 -34 2 -8 2 +44 0.9 0.5 44 -1 45 -11 1 +44 0.5 0.1 44 26 -9 -20 1 +44 0.6 0.4 33 -40 29 7 2 +44 0.5 0.3 2 -48 31 -42 2 +44 0.7 0.3 -23 -43 1 -47 2 +44 0.5 0.9 31 -40 12 -46 2 +44 0.3 0.1 49 -4 16 -14 1 +44 0.2 0.7 20 -8 31 -3 2 +44 0.2 0.8 -26 -31 -45 -47 1 +44 0.5 0.1 35 14 40 28 2 +44 0.9 0.8 -12 -37 24 -43 2 +44 0.5 0.5 43 -46 -33 -49 1 +44 0.1 0.3 -34 -43 46 -34 2 +44 0.1 0.8 5 3 12 -5 2 +44 0.5 0.6 14 -15 -17 -43 1 +44 0.9 0.2 -27 -38 -30 -41 2 +44 0.2 0.8 6 -36 -9 -44 2 +44 0.7 0.5 48 10 32 -14 1 +44 0.1 0.2 17 -37 41 39 2 +44 0.8 0.2 15 4 24 -45 1 +44 0.4 0.9 10 7 23 -43 2 +44 0.1 0.1 34 -38 24 17 2 +44 0.3 0.4 -20 -43 26 -33 2 +44 0.1 0.4 8 -8 2 -31 2 +44 0.4 0.1 -12 -13 23 -14 2 +44 0.5 0.4 -15 -27 10 -40 2 +44 0.3 0.5 42 -31 38 30 2 +44 0.7 0.5 32 -11 -22 -31 1 +44 0.8 0.8 12 -19 -13 -21 1 +44 0.7 0.8 11 -43 44 -7 2 +44 0.7 0.3 45 7 -33 -40 1 +44 0.8 0.1 -9 -32 36 -2 2 +44 0.7 0.8 44 -11 40 5 2 +44 0.7 0.1 47 45 15 -46 1 +44 0.7 0.3 3 -9 28 -32 1 +44 0.9 0.2 32 -8 29 9 1 +44 0.9 0.7 9 -50 7 -34 1 +44 0.9 0.4 12 -28 42 -1 2 +44 0.9 0.3 6 -29 7 -16 1 +44 0.7 0.1 5 -15 -4 -14 1 +44 0.2 0.1 45 -37 -10 -33 1 +44 0.2 0.4 24 -27 -9 -44 1 +44 0.9 0.1 20 -10 40 25 2 +44 0.6 0.5 -27 -28 19 15 2 +44 0.7 0.8 0 -1 11 -33 2 +44 0.1 0.9 34 6 28 2 2 +44 0.5 0.8 8 -36 -20 -40 1 +44 0.2 0.5 38 34 -6 -9 1 +44 0.6 0.1 46 -25 50 12 1 +44 0.3 0.8 20 -3 47 7 2 +44 0.3 0.8 -11 -28 -20 -50 2 +44 0.4 0.5 43 -15 7 -20 1 +44 0.6 0.3 -11 -30 4 -45 1 +44 0.4 0.6 2 0 40 36 2 +44 0.8 0.1 -3 -41 29 18 2 +44 0.5 0.3 36 23 -10 -50 1 +44 0.4 0.3 6 -14 27 -41 1 +44 0.4 0.5 1 -20 39 -13 2 +44 0.6 0.4 28 -33 45 -16 2 +44 0.1 0.5 25 -44 11 4 2 +44 0.7 0.2 46 2 39 34 2 +44 0.9 0.8 24 -1 3 -14 1 +44 0.4 0.2 36 -11 44 -17 1 +44 0.5 0.9 -45 -50 21 -46 2 +44 0.2 0.8 32 30 26 9 1 +44 0.6 0.2 23 19 -29 -45 1 +44 0.9 0.8 20 -22 43 26 2 +44 0.2 0.9 44 37 26 -24 1 +44 0.7 0.8 27 2 22 -28 1 +44 0.1 0.9 14 -50 2 -35 2 +44 0.3 0.7 46 4 37 15 2 +44 0.9 0.2 50 49 12 -26 1 +44 0.4 0.4 36 19 29 21 2 +44 0.2 0.5 29 8 41 37 2 +44 0.2 0.4 23 11 -3 -32 1 +44 0.7 0.9 47 -45 26 -26 2 +44 0.4 0.6 25 21 -14 -43 1 +44 0.5 0.8 50 18 37 -13 1 +44 0.5 0.7 30 25 10 -27 1 +44 0.3 0.4 22 6 0 -29 1 +44 0.4 0.3 20 -35 17 5 2 +44 0.1 0.5 17 -20 11 -49 2 +44 0.3 0.5 48 -21 8 4 2 +44 0.1 0.9 37 -38 29 19 2 +44 0.6 0.9 -21 -23 -10 -19 2 +44 0.9 0.4 -19 -50 4 -14 2 +44 0.7 0.2 21 -40 -44 -49 1 +44 0.1 0.5 -10 -29 45 -46 2 +44 0.4 0.6 33 -22 39 -41 2 +44 0.7 0.8 12 -21 21 4 2 +44 0.9 0.9 45 35 8 -38 1 +44 0.8 0.1 18 -24 -27 -30 1 +44 0.9 0.9 50 23 29 -40 1 +44 0.5 0.7 24 -30 5 3 2 +44 0.2 0.1 5 -30 -3 -31 1 +44 0.7 0.1 0 -46 50 6 2 +44 0.5 0.6 43 42 -33 -39 1 +44 0.1 0.8 37 13 21 -11 1 +44 0.1 0.5 39 -17 47 32 2 +44 0.2 0.9 34 -14 35 -19 2 +44 0.7 0.8 -28 -50 20 17 2 +44 0.3 0.6 23 -7 1 -9 2 +44 0.2 0.2 46 33 36 -49 1 +44 0.9 0.2 -13 -26 12 -32 2 +44 0.1 0.3 -10 -24 26 -13 2 +44 0.7 0.9 36 -45 10 -15 1 +44 0.6 0.6 50 -25 21 -41 1 +44 0.2 0.9 25 11 -39 -42 1 +44 0.1 0.2 9 7 34 -29 1 +44 0.8 0.1 20 -21 -42 -45 1 +44 0.4 0.2 40 2 16 -22 1 +44 0.7 0.6 17 -14 46 37 2 +44 0.2 0.5 -16 -25 43 -2 2 +44 0.2 0.3 45 -19 -10 -22 1 +44 0.4 0.7 -11 -16 10 -22 2 +44 0.8 0.3 40 -49 -14 -37 1 +44 0.5 0.3 -35 -43 46 -38 2 +44 0.6 0.8 3 -22 -14 -15 1 +44 0.9 0.2 -29 -40 21 2 2 +44 0.4 0.9 38 21 42 -47 1 +44 0.3 0.2 35 27 6 -9 1 +44 0.6 0.1 47 -21 49 7 1 +44 0.1 0.2 19 2 -9 -19 1 +44 0.1 0.9 21 -10 29 -12 2 +44 0.8 0.7 22 -49 4 -6 1 +44 0.9 0.7 24 -18 13 6 1 +44 0.2 0.5 39 -10 43 -7 2 +44 0.8 0.1 24 -25 -5 -13 1 +44 0.9 0.2 26 -12 6 -22 1 +44 0.4 0.7 19 -1 3 -18 1 +44 0.8 0.5 35 -31 3 -10 1 +44 0.6 0.2 32 7 -8 -21 1 +44 0.9 0.2 8 -11 42 23 2 +44 0.8 0.5 14 -26 11 -4 1 +44 0.6 0.7 13 -27 48 -10 2 +44 0.7 0.6 39 -7 5 -13 1 +44 0.2 0.8 20 8 -3 -17 1 +44 0.4 0.3 10 5 46 21 2 +44 0.2 0.8 36 -7 22 15 2 +44 0.5 0.8 36 -11 19 -20 2 +44 0.9 0.4 35 -14 -41 -50 1 +44 0.8 0.4 42 -11 -34 -44 1 +44 0.9 0.6 -9 -28 29 26 2 +44 0.9 0.1 16 -3 -24 -49 1 +44 0.2 0.7 25 -26 3 -48 2 +44 0.7 0.6 18 -49 49 -9 2 +44 0.9 0.7 50 -30 6 -12 1 +44 0.3 0.9 45 -26 27 -29 2 +44 0.5 0.1 -1 -20 25 2 2 +44 0.2 0.1 28 -28 41 29 2 +44 0.3 0.1 45 -25 -2 -24 1 +44 0.9 0.5 -21 -30 37 -45 2 +44 0.5 0.5 41 30 -18 -40 1 +44 0.1 0.4 18 -18 47 8 2 +44 0.7 0.7 42 -41 33 -12 2 +44 0.7 0.4 45 -1 24 -38 1 +44 0.3 0.3 -42 -49 50 -1 2 +44 0.3 0.4 35 27 0 -18 1 +44 0.5 0.9 47 32 47 27 2 +44 0.4 0.2 12 -46 22 -49 1 +44 0.5 0.8 10 -17 -31 -40 1 +44 0.8 0.1 18 16 -32 -50 1 +44 0.1 0.4 -18 -41 -30 -32 2 +44 0.7 0.8 -17 -50 48 -30 2 +44 0.9 0.9 22 -48 -11 -34 1 +44 0.5 0.1 12 -18 3 -15 1 +44 0.4 0.7 25 22 4 -41 1 +44 0.8 0.3 33 -6 34 19 2 +44 0.6 0.7 -1 -12 39 -28 2 +44 0.9 0.9 33 -31 23 19 2 +44 0.2 0.4 19 -25 43 -19 2 +44 0.2 0.2 4 -29 -23 -27 1 +44 0.4 0.1 4 -16 -15 -42 1 +44 0.6 0.7 22 -33 -6 -13 1 +44 0.8 0.2 4 -2 -11 -27 1 +44 0.6 0.7 -2 -14 32 -42 2 +44 0.3 0.7 28 -19 18 7 2 +44 0.3 0.4 21 14 39 26 2 +44 0.9 0.9 -9 -38 11 -47 2 +44 0.6 0.7 32 -33 47 -42 2 +44 0.8 0.2 2 -4 50 -5 1 +44 0.2 0.5 22 -3 28 2 2 +44 0.2 0.1 38 30 -4 -12 1 +44 0.2 0.2 32 -43 16 15 2 +44 0.3 0.3 22 -3 27 -27 1 +44 0.1 0.5 -3 -19 -32 -49 1 +44 0.5 0.7 -21 -29 21 -31 2 +44 0.6 0.4 -6 -35 -21 -23 1 +44 0.4 0.7 -6 -32 10 -48 2 +44 0.1 0.9 30 5 -30 -35 1 +44 0.7 0.7 9 -1 -6 -32 1 +44 0.3 0.2 -9 -24 8 -7 2 +44 0.5 0.4 21 -11 47 -19 2 +44 0.9 0.4 47 19 25 10 1 +44 0.6 0.3 -6 -31 11 -15 2 +44 0.4 0.1 47 -31 -7 -24 1 +44 0.1 0.9 19 6 50 9 2 +44 0.4 0.5 49 22 30 -16 1 +44 0.1 0.5 7 -41 45 -21 2 +44 0.1 0.3 36 -14 21 4 2 +44 0.2 0.9 34 16 31 19 2 +44 0.4 0.5 46 -40 4 -25 1 +44 0.2 0.5 28 -35 16 -13 2 +44 0.9 0.5 32 -24 2 -16 1 +44 0.7 0.1 45 12 35 -38 1 +44 0.8 0.2 29 4 7 -17 1 +44 0.8 0.8 -27 -50 9 -23 2 +44 0.2 0.9 23 -41 2 -1 2 +44 0.9 0.2 35 -38 36 -25 1 +44 0.2 0.2 47 45 26 -11 1 +44 0.4 0.5 41 34 -12 -19 1 +44 0.4 0.6 1 -43 28 -48 2 +44 0.9 0.3 50 -24 47 -39 1 +44 0.5 0.3 38 12 11 -50 1 +44 0.1 0.5 40 15 42 34 2 +44 0.7 0.9 -13 -44 47 39 2 +44 0.2 0.8 50 -43 -4 -29 2 +44 0.2 0.4 47 -37 33 -2 2 +44 0.2 0.1 48 -23 33 -8 2 +44 0.4 0.1 22 -18 14 -48 1 +44 0.6 0.1 49 -22 35 3 1 +44 0.6 0.6 40 39 44 -31 1 +44 0.6 0.5 11 -1 30 20 1 +44 0.7 0.2 30 -44 3 -29 1 +45 0.4 0.1 25 3 22 -1 1 +45 0.9 0.9 35 2 -22 -29 1 +45 0.8 0.7 3 -7 11 -49 1 +45 0.7 0.3 -18 -36 -43 -49 1 +45 0.5 0.5 11 -2 -1 -4 1 +45 0.5 0.2 36 -6 0 -17 1 +45 0.4 0.7 -29 -38 15 -11 2 +45 0.8 0.1 -6 -27 48 -12 2 +45 0.3 0.6 31 -15 21 -12 2 +45 0.9 0.3 14 3 45 -45 1 +45 0.4 0.5 35 8 10 -2 1 +45 0.3 0.2 3 -48 36 -45 2 +45 0.4 0.4 39 0 -13 -38 1 +45 0.8 0.3 17 16 -45 -48 1 +45 0.9 0.6 10 -20 25 -16 1 +45 0.9 0.7 -20 -38 34 -18 2 +45 0.7 0.1 -13 -20 34 17 2 +45 0.2 0.9 40 14 31 25 2 +45 0.2 0.9 -13 -39 8 -4 2 +45 0.4 0.2 23 22 39 -38 1 +45 0.8 0.8 -14 -33 39 -6 2 +45 0.4 0.8 -21 -50 -2 -50 2 +45 0.4 0.8 18 -46 24 2 2 +45 0.3 0.8 32 -42 -27 -50 1 +45 0.7 0.7 28 25 46 -48 1 +45 0.2 0.1 -16 -39 9 -46 1 +45 0.1 0.4 -6 -15 5 -49 1 +45 0.3 0.1 1 -26 -19 -43 1 +45 0.4 0.3 35 30 17 -3 1 +45 0.4 0.4 30 -44 5 3 2 +45 0.4 0.8 -39 -43 46 28 2 +45 0.2 0.1 -1 -7 34 28 2 +45 0.3 0.9 8 -41 50 10 2 +45 0.8 0.7 23 -24 18 8 2 +45 0.3 0.1 -17 -43 45 19 2 +45 0.1 0.6 -37 -45 -14 -21 2 +45 0.4 0.2 -23 -41 14 -20 2 +45 0.4 0.3 -35 -44 -14 -25 2 +45 0.6 0.9 34 -47 14 -31 2 +45 0.9 0.9 5 -31 37 -29 2 +45 0.7 0.3 -3 -47 -20 -49 1 +45 0.4 0.3 49 37 43 -9 1 +45 0.6 0.3 1 -40 27 -30 1 +45 0.9 0.4 9 -31 30 -43 1 +45 0.2 0.9 -42 -49 13 8 2 +45 0.9 0.3 43 2 21 16 1 +45 0.9 0.2 -5 -21 49 15 2 +45 0.6 0.9 41 -8 17 -9 1 +45 0.6 0.3 -46 -48 -39 -46 2 +45 0.4 0.5 -1 -40 13 -19 2 +45 0.9 0.9 50 1 -4 -30 1 +45 0.4 0.8 -2 -4 35 -13 2 +45 0.8 0.8 13 12 28 -14 2 +45 0.5 0.1 34 -16 -13 -17 1 +45 0.1 0.4 8 -23 35 -38 2 +45 0.4 0.1 8 -20 19 -23 1 +45 0.6 0.8 40 -9 -4 -44 1 +45 0.6 0.6 -9 -20 47 -18 2 +45 0.2 0.5 -6 -46 2 -18 2 +45 0.1 0.7 5 -5 -3 -7 1 +45 0.3 0.9 39 -35 -4 -46 2 +45 0.7 0.4 31 26 -29 -50 1 +45 0.9 0.7 27 -16 46 27 2 +45 0.6 0.1 -10 -36 39 31 2 +45 0.3 0.4 28 -32 -27 -48 1 +45 0.4 0.8 -7 -13 -35 -48 1 +45 0.6 0.4 49 32 41 27 1 +45 0.2 0.7 14 -22 34 -47 2 +45 0.5 0.4 17 -14 27 -15 2 +45 0.8 0.5 11 10 -38 -49 1 +45 0.7 0.1 20 -18 37 7 2 +45 0.6 0.7 -45 -49 -23 -39 2 +45 0.8 0.2 -6 -39 32 -10 2 +45 0.9 0.5 47 -5 47 40 2 +45 0.4 0.8 22 -39 20 -8 2 +45 0.3 0.7 -20 -21 -8 -32 2 +45 0.2 0.2 29 -17 40 16 2 +45 0.3 0.7 0 -23 37 4 2 +45 0.7 0.9 41 -10 18 -35 1 +45 0.8 0.8 -32 -47 47 -20 2 +45 0.1 0.5 16 -18 22 -17 2 +45 0.9 0.1 28 -49 28 -1 1 +45 0.9 0.3 -40 -41 13 -5 2 +45 0.1 0.3 2 -5 43 31 2 +45 0.1 0.2 49 44 47 4 1 +45 0.1 0.6 48 34 49 11 1 +45 0.6 0.1 -28 -30 39 -1 2 +45 0.2 0.3 2 -36 14 10 2 +45 0.3 0.1 15 4 1 -20 1 +45 0.7 0.8 25 -24 17 -27 1 +45 0.1 0.7 28 -48 24 -7 2 +45 0.4 0.1 -11 -24 38 -2 2 +45 0.1 0.7 46 18 41 -1 2 +45 0.6 0.1 12 -36 34 -7 1 +45 0.4 0.1 -29 -34 2 -40 2 +45 0.7 0.8 47 9 15 -8 1 +45 0.4 0.8 10 -3 -5 -10 1 +45 0.9 0.9 13 -44 6 -43 1 +45 0.6 0.4 41 -7 -41 -43 1 +45 0.3 0.6 42 -22 21 -30 2 +45 0.7 0.2 -26 -44 45 16 2 +45 0.8 0.2 18 -10 -7 -49 1 +45 0.4 0.6 -6 -21 50 -41 2 +45 0.4 0.2 39 -9 9 -43 1 +45 0.7 0.5 -27 -40 42 -44 2 +45 0.5 0.1 36 30 0 -17 1 +45 0.7 0.4 35 25 30 -15 1 +45 0.4 0.5 -3 -26 39 24 2 +45 0.7 0.7 6 -24 -29 -49 1 +45 0.1 0.1 29 23 21 -2 1 +45 0.9 0.3 45 -8 24 -1 1 +45 0.9 0.2 29 -50 7 -16 1 +45 0.6 0.6 6 -15 19 -16 2 +45 0.1 0.1 48 -18 -25 -32 1 +45 0.7 0.4 -8 -32 29 -22 2 +45 0.7 0.7 48 -50 50 -44 2 +45 0.9 0.1 40 -30 20 -45 1 +45 0.5 0.4 42 -24 15 8 2 +45 0.1 0.7 12 3 -43 -45 1 +45 0.6 0.5 -4 -20 21 -28 2 +45 0.6 0.8 10 -39 25 -39 2 +45 0.2 0.2 36 -16 -24 -26 1 +45 0.6 0.6 37 5 -11 -22 1 +45 0.1 0.1 -22 -45 36 -6 2 +45 0.6 0.8 -5 -42 -14 -47 1 +45 0.6 0.7 11 9 39 32 2 +45 0.4 0.8 50 -13 40 -23 2 +45 0.3 0.3 10 -27 21 8 2 +45 0.3 0.1 35 -45 50 -15 2 +45 0.1 0.8 11 2 27 -15 2 +45 0.5 0.1 25 -21 -38 -44 1 +45 0.2 0.6 -5 -36 12 1 2 +45 0.6 0.8 1 -45 -15 -28 2 +45 0.2 0.3 49 -21 36 18 2 +45 0.1 0.2 24 -47 -19 -32 2 +45 0.7 0.5 39 0 -11 -47 1 +45 0.1 0.4 28 11 19 -47 1 +45 0.1 0.8 4 -5 -8 -42 1 +45 0.6 0.1 -1 -26 24 -34 1 +45 0.7 0.7 7 -24 -19 -30 1 +45 0.1 0.7 11 4 6 -17 1 +45 0.4 0.7 27 -36 26 19 2 +45 0.9 0.4 -16 -27 45 -50 2 +45 0.1 0.6 16 14 40 19 2 +45 0.8 0.6 42 19 17 -50 1 +45 0.2 0.2 36 -32 34 -33 2 +45 0.3 0.3 -21 -48 -11 -17 2 +45 0.3 0.6 -12 -44 8 2 2 +45 0.4 0.1 17 -27 -38 -47 1 +45 0.4 0.5 41 39 -2 -24 1 +45 0.4 0.9 35 0 -32 -38 1 +45 0.5 0.5 33 -9 40 -15 1 +45 0.4 0.8 33 -29 -4 -34 1 +45 0.6 0.9 -10 -42 23 -38 2 +45 0.4 0.2 34 -29 28 -8 2 +45 0.4 0.2 50 8 -30 -37 1 +45 0.4 0.6 -27 -49 49 -19 2 +45 0.2 0.9 12 2 19 10 2 +45 0.9 0.5 6 -36 -8 -27 1 +45 0.5 0.4 21 -3 40 -41 1 +45 0.7 0.7 37 -32 -9 -31 1 +45 0.3 0.5 43 -49 41 20 2 +45 0.4 0.2 26 3 39 19 2 +45 0.4 0.5 -22 -23 -37 -49 1 +45 0.6 0.8 6 -7 49 -2 2 +45 0.7 0.6 46 -33 33 -27 1 +45 0.2 0.4 -28 -32 26 6 2 +45 0.7 0.3 50 46 48 -24 1 +45 0.4 0.7 9 -37 33 1 2 +45 0.5 0.3 1 -30 -34 -36 1 +45 0.6 0.6 7 -48 26 -32 2 +45 0.6 0.1 -8 -39 37 -19 2 +45 0.5 0.5 -9 -35 -15 -48 1 +45 0.2 0.1 3 -3 29 8 2 +45 0.6 0.5 26 11 48 -7 1 +45 0.8 0.4 26 -37 2 -36 1 +45 0.8 0.9 -18 -38 1 -5 2 +45 0.8 0.1 -38 -45 34 4 2 +45 0.4 0.6 16 2 -29 -36 1 +45 0.7 0.5 -6 -36 37 3 2 +45 0.5 0.4 -1 -50 23 12 2 +45 0.7 0.9 42 -49 46 -4 2 +45 0.6 0.2 20 -48 4 -44 1 +45 0.4 0.9 30 -28 29 -4 2 +45 0.9 0.8 47 26 43 37 1 +45 0.6 0.8 19 -7 43 -49 2 +45 0.7 0.6 -43 -48 30 -20 2 +45 0.7 0.6 -4 -5 -4 -36 1 +45 0.5 0.8 13 -31 -11 -39 1 +45 0.5 0.5 8 -47 40 -32 2 +45 0.8 0.2 -45 -49 5 -24 2 +45 0.8 0.7 27 22 14 -29 1 +45 0.5 0.3 22 -33 34 -25 1 +45 0.9 0.3 -13 -16 39 -26 2 +45 0.1 0.3 43 19 17 9 1 +45 0.2 0.1 -5 -34 40 -10 2 +45 0.3 0.4 8 -47 -5 -11 2 +45 0.5 0.4 45 -37 -34 -35 1 +45 0.6 0.3 48 -8 28 -44 1 +45 0.5 0.8 49 -39 -7 -18 1 +45 0.5 0.8 35 -3 21 -11 1 +45 0.6 0.2 -11 -38 49 30 2 +45 0.8 0.7 -20 -22 8 -29 2 +45 0.9 0.4 22 -5 -6 -26 1 +45 0.5 0.2 -11 -16 3 -9 2 +45 0.5 0.2 23 19 50 12 2 +45 0.2 0.1 29 -44 48 12 2 +45 0.8 0.5 3 -23 4 -33 1 +45 0.9 0.9 44 -39 -31 -40 1 +45 0.4 0.4 39 -27 -9 -44 1 +45 0.4 0.9 -29 -30 5 -7 2 +45 0.1 0.2 50 -18 23 21 2 +45 0.6 0.6 45 -28 -5 -43 1 +45 0.7 0.2 50 -9 13 -30 1 +45 0.8 0.1 -30 -36 -15 -44 1 +45 0.3 0.4 -34 -36 18 8 2 +45 0.2 0.2 -1 -2 7 1 2 +45 0.4 0.8 6 -41 21 20 2 +45 0.9 0.9 30 1 22 3 1 +45 0.4 0.6 -33 -35 34 1 2 +45 0.4 0.2 16 -34 48 -13 2 +45 0.9 0.5 47 -40 43 -11 1 +45 0.8 0.4 16 -1 41 -4 1 +45 0.1 0.1 -17 -25 -14 -31 1 +45 0.4 0.3 38 -19 23 -18 1 +45 0.5 0.1 0 -40 21 -37 1 +45 0.4 0.4 0 -37 29 20 2 +45 0.4 0.7 47 44 -10 -38 1 +45 0.5 0.6 -2 -34 12 -12 2 +45 0.3 0.8 13 -14 30 13 2 +45 0.5 0.9 40 35 39 -14 1 +45 0.2 0.5 44 -22 43 -1 2 +45 0.6 0.3 -5 -37 -40 -41 1 +45 0.6 0.2 -10 -36 19 4 2 +45 0.4 0.9 20 -27 5 4 2 +45 0.7 0.3 31 -18 33 -22 1 +45 0.9 0.2 49 3 33 -35 1 +45 0.2 0.6 -15 -25 -3 -35 2 +45 0.9 0.5 16 -20 12 -11 1 +45 0.8 0.4 48 -8 -8 -35 1 +45 0.6 0.5 23 -5 33 14 2 +45 0.5 0.6 40 28 18 -11 1 +45 0.3 0.7 26 -25 5 -3 2 +45 0.7 0.7 -18 -28 -31 -36 1 +45 0.4 0.6 38 -13 38 34 2 +45 0.7 0.7 38 -27 14 1 1 +45 0.3 0.6 22 -4 28 18 2 +45 0.8 0.6 -15 -16 -19 -34 1 +45 0.1 0.1 41 -25 -17 -40 1 +45 0.7 0.3 12 -42 -23 -25 1 +45 0.1 0.6 -2 -5 23 9 2 +45 0.2 0.7 29 -47 -2 -43 2 +45 0.1 0.1 36 -43 31 -12 2 +45 0.7 0.8 17 -44 -18 -27 1 +45 0.5 0.6 49 19 17 -8 1 +45 0.3 0.1 18 -29 48 -48 1 +45 0.6 0.3 49 -23 32 -18 1 +45 0.2 0.4 31 -5 14 -15 2 +45 0.4 0.6 4 -10 0 -7 2 +45 0.5 0.8 28 -21 27 12 2 +45 0.2 0.8 5 -7 -32 -42 1 +45 0.4 0.4 44 42 40 33 1 +45 0.9 0.5 4 -36 19 -48 1 +45 0.7 0.4 -18 -19 22 -35 2 +45 0.7 0.8 25 -38 30 4 2 +45 0.6 0.8 18 -40 -14 -44 1 +45 0.5 0.2 -39 -48 38 -24 2 +45 0.1 0.9 18 11 -22 -38 1 +45 0.6 0.2 8 -50 50 11 2 +45 0.8 0.5 23 -4 -20 -26 1 +45 0.1 0.1 -6 -22 26 -48 1 +45 0.6 0.2 17 -46 35 -20 1 +45 0.6 0.7 15 -18 45 -49 2 +45 0.5 0.2 -34 -39 8 -23 2 +45 0.2 0.3 4 -23 -7 -45 1 +45 0.4 0.4 17 -3 39 -11 2 +45 0.5 0.2 44 11 40 -15 1 +45 0.4 0.1 6 -45 39 -27 2 +45 0.3 0.8 20 -7 18 -50 2 +45 0.2 0.5 25 -2 31 -46 1 +45 0.3 0.4 42 -9 46 -4 2 +45 0.6 0.6 -16 -44 -17 -43 1 +45 0.3 0.4 36 -49 25 -45 2 +45 0.7 0.6 1 -8 6 -31 1 +45 0.8 0.1 11 -39 22 11 2 +45 0.2 0.6 -27 -45 19 18 2 +45 0.1 0.1 31 -41 -16 -32 1 +45 0.6 0.1 33 -29 12 8 1 +45 0.5 0.8 44 2 22 -7 1 +45 0.7 0.6 50 -30 40 -31 1 +45 0.1 0.9 14 -30 44 36 2 +45 0.9 0.5 -38 -45 35 -40 2 +45 0.3 0.6 47 8 46 -3 2 +45 0.7 0.3 24 -21 10 -49 1 +45 0.7 0.1 19 -1 -23 -37 1 +45 0.9 0.2 -14 -35 28 -48 1 +45 0.7 0.2 -16 -21 45 -13 2 +45 0.8 0.6 1 -49 31 -23 2 +45 0.9 0.9 27 -32 21 11 2 +45 0.8 0.2 34 20 28 -28 1 +46 0.9 0.7 3 -50 22 1 2 +46 0.8 0.7 34 15 -6 -37 1 +46 0.3 0.7 30 -32 23 -49 2 +46 0.4 0.7 -13 -47 13 -29 2 +46 0.3 0.5 20 -30 49 -12 2 +46 0.5 0.9 45 42 20 -17 1 +46 0.3 0.8 45 -2 -22 -50 1 +46 0.3 0.2 19 0 19 5 2 +46 0.1 0.8 5 -35 -18 -43 2 +46 0.1 0.7 45 -40 -24 -31 1 +46 0.4 0.3 37 16 41 27 1 +46 0.6 0.1 14 -15 -34 -47 1 +46 0.2 0.8 32 31 30 -30 1 +46 0.4 0.3 48 -9 -17 -47 1 +46 0.6 0.6 49 7 23 -24 2 +46 0.2 0.6 19 -39 25 -34 2 +46 0.3 0.3 15 -9 13 -35 1 +46 0.6 0.7 -3 -48 -5 -43 1 +46 0.8 0.8 35 -25 30 -49 1 +46 0.2 0.3 40 13 18 -21 1 +46 0.3 0.6 40 20 11 -27 1 +46 0.6 0.1 8 5 27 -29 1 +46 0.9 0.5 15 -12 40 10 2 +46 0.4 0.5 -1 -25 -13 -31 2 +46 0.9 0.1 -45 -47 19 -41 1 +46 0.9 0.2 6 -49 9 1 1 +46 0.2 0.7 44 -40 48 19 2 +46 0.5 0.5 50 -29 19 -13 1 +46 0.8 0.8 50 -41 -27 -39 1 +46 0.2 0.1 43 -29 -20 -26 2 +46 0.4 0.3 34 20 -47 -49 1 +46 0.7 0.2 -2 -47 20 -9 2 +46 0.1 0.5 44 6 44 33 2 +46 0.8 0.5 34 -43 15 -21 1 +46 0.8 0.1 50 -22 13 -24 1 +46 0.1 0.8 10 2 48 37 2 +46 0.2 0.8 27 -1 50 -45 2 +46 0.2 0.8 -28 -50 -29 -47 2 +46 0.1 0.3 12 -29 -16 -28 1 +46 0.5 0.7 39 -3 32 -14 2 +46 0.9 0.3 19 -7 48 39 2 +46 0.5 0.5 43 -18 40 -11 2 +46 0.2 0.1 -7 -17 -22 -37 1 +46 0.4 0.5 -32 -38 38 -18 2 +46 0.3 0.5 -2 -3 -18 -28 2 +46 0.2 0.8 24 23 21 15 2 +46 0.2 0.8 25 -41 -24 -27 1 +46 0.3 0.6 44 -21 17 11 2 +46 0.4 0.4 44 5 -11 -39 2 +46 0.9 0.4 25 17 28 -15 1 +46 0.8 0.5 31 -38 49 -37 1 +46 0.7 0.4 6 -7 21 20 2 +46 0.4 0.3 -7 -43 18 4 2 +46 0.4 0.2 49 -2 30 -21 1 +46 0.6 0.1 7 -3 43 -26 2 +46 0.2 0.7 48 8 25 -47 1 +46 0.9 0.9 34 -19 -31 -44 1 +46 0.8 0.1 -1 -47 41 31 2 +46 0.7 0.8 44 19 38 -44 1 +46 0.3 0.1 11 -34 33 -35 1 +46 0.7 0.2 43 -31 7 -18 1 +46 0.6 0.1 -8 -36 28 22 2 +46 0.8 0.2 48 -12 -20 -39 1 +46 0.3 0.9 11 -30 39 -14 2 +46 0.1 0.3 41 -46 1 -38 2 +46 0.9 0.4 46 -7 1 -42 1 +46 0.1 0.3 1 -17 28 -9 2 +46 0.1 0.3 4 -35 39 -48 2 +46 0.3 0.5 7 -35 -17 -33 1 +46 0.5 0.7 15 -48 42 -9 2 +46 0.2 0.5 28 13 -14 -27 1 +46 0.2 0.1 24 -13 50 46 2 +46 0.5 0.2 -9 -20 18 -26 1 +46 0.5 0.4 46 18 45 20 1 +46 0.5 0.1 27 25 -27 -29 1 +46 0.8 0.2 14 -48 37 -50 1 +46 0.5 0.8 42 -31 26 14 2 +46 0.6 0.8 46 -9 45 -14 2 +46 0.8 0.3 -10 -25 48 -28 1 +46 0.1 0.8 17 -1 43 18 2 +46 0.9 0.4 44 -47 4 -47 1 +46 0.8 0.3 29 -35 35 -44 1 +46 0.6 0.4 6 -8 39 -31 1 +46 0.7 0.2 -4 -23 30 -17 2 +46 0.5 0.6 36 -31 23 -37 1 +46 0.7 0.5 46 18 29 20 2 +46 0.8 0.5 11 -37 -18 -28 1 +46 0.1 0.5 30 -20 25 -34 2 +46 0.2 0.6 -37 -43 5 -23 2 +46 0.6 0.9 50 30 49 47 1 +46 0.8 0.7 27 -49 24 -36 1 +46 0.5 0.5 25 11 27 19 2 +46 0.6 0.1 30 20 -10 -25 1 +46 0.3 0.7 33 3 33 -22 1 +46 0.1 0.6 8 -21 37 -37 2 +46 0.5 0.3 -4 -24 -25 -27 2 +46 0.5 0.3 34 -23 31 -13 1 +46 0.8 0.8 -21 -36 49 -14 2 +46 0.9 0.6 17 -4 -18 -37 1 +46 0.7 0.4 -4 -15 27 -11 2 +46 0.4 0.7 33 -49 27 0 2 +46 0.4 0.3 27 -22 37 -24 1 +46 0.9 0.6 -6 -41 28 -42 2 +46 0.9 0.6 20 -15 41 8 1 +46 0.4 0.4 -1 -18 15 10 2 +46 0.8 0.3 47 -28 -7 -39 1 +46 0.6 0.3 11 8 -18 -40 1 +46 0.6 0.4 27 -37 29 3 2 +46 0.4 0.5 41 17 12 -45 1 +46 0.4 0.1 40 13 -7 -45 1 +46 0.7 0.5 47 16 19 -13 1 +46 0.2 0.3 39 -30 36 12 2 +46 0.2 0.9 49 -36 22 1 2 +46 0.9 0.1 10 -35 -8 -24 1 +46 0.7 0.1 16 -49 36 -6 1 +46 0.1 0.4 43 -41 0 -44 2 +46 0.2 0.2 44 13 20 -34 1 +46 0.2 0.3 -4 -24 37 -5 1 +46 0.6 0.2 -24 -39 33 11 2 +46 0.7 0.4 50 10 -12 -39 1 +46 0.1 0.9 26 -39 -41 -43 2 +46 0.8 0.2 17 -13 -11 -26 1 +46 0.5 0.3 41 -50 41 -17 2 +46 0.3 0.6 -4 -41 11 -36 2 +46 0.8 0.6 20 -20 25 15 2 +46 0.2 0.2 18 -33 45 -41 1 +46 0.9 0.3 29 6 43 23 2 +46 0.6 0.7 -3 -21 -10 -50 1 +46 0.5 0.3 9 -28 0 -15 2 +46 0.6 0.1 -4 -17 18 -10 1 +46 0.8 0.9 12 -43 -17 -36 1 +46 0.4 0.4 49 -41 28 21 2 +46 0.1 0.9 2 -20 32 -39 2 +46 0.6 0.4 21 10 46 30 2 +46 0.5 0.7 37 -21 40 -4 2 +46 0.5 0.6 44 -9 -37 -41 1 +46 0.2 0.9 29 -14 47 -6 2 +46 0.5 0.4 26 -16 -15 -39 1 +46 0.3 0.5 46 21 -3 -29 1 +46 0.7 0.1 7 -43 23 -19 1 +46 0.5 0.9 46 -24 39 -29 2 +46 0.4 0.8 32 21 49 13 2 +46 0.3 0.1 32 8 7 -48 1 +46 0.4 0.1 0 -24 -7 -23 2 +46 0.8 0.8 50 3 -5 -20 1 +46 0.7 0.7 -10 -39 45 -29 2 +46 0.9 0.3 24 19 -37 -49 1 +46 0.9 0.6 43 36 38 -43 1 +46 0.9 0.9 34 -23 42 6 2 +46 0.4 0.2 -17 -18 47 3 2 +46 0.6 0.2 50 22 22 -5 1 +46 0.6 0.7 -6 -20 30 -46 2 +46 0.3 0.2 48 19 49 -45 1 +46 0.5 0.8 -15 -26 10 -20 2 +46 0.8 0.6 49 -24 43 9 1 +46 0.1 0.6 32 -46 33 -2 2 +46 0.9 0.8 12 -11 37 -1 2 +46 0.1 0.1 37 10 24 22 2 +46 0.6 0.1 10 -48 39 32 2 +46 0.3 0.6 30 -41 5 1 2 +46 0.7 0.4 41 18 2 -31 1 +46 0.5 0.8 44 5 23 7 1 +46 0.4 0.6 -2 -10 -3 -6 2 +46 0.5 0.8 2 -27 -16 -20 1 +46 0.4 0.9 -7 -45 7 -42 2 +46 0.9 0.4 -2 -23 45 33 2 +46 0.8 0.4 34 25 1 -11 1 +46 0.7 0.7 22 -30 18 9 2 +46 0.6 0.5 38 0 44 -48 1 +46 0.6 0.7 -12 -31 24 -5 2 +46 0.6 0.8 30 -44 47 -19 2 +46 0.4 0.6 7 -28 -24 -48 1 +46 0.2 0.7 8 -4 29 -21 2 +46 0.4 0.1 16 -8 39 -19 1 +46 0.3 0.3 40 2 4 -11 1 +46 0.5 0.7 37 3 -45 -50 1 +46 0.3 0.9 -26 -28 35 -36 2 +46 0.2 0.4 26 -30 31 -35 2 +46 0.7 0.2 3 -23 25 -35 2 +46 0.6 0.6 0 -28 0 -10 1 +46 0.1 0.5 41 16 41 23 2 +46 0.3 0.2 -8 -23 -29 -38 1 +46 0.6 0.8 -33 -45 28 -30 2 +46 0.5 0.1 14 -22 39 -32 2 +46 0.5 0.7 45 -32 -27 -46 2 +46 0.6 0.1 22 -15 19 7 1 +46 0.9 0.5 45 24 45 14 1 +46 0.8 0.2 47 -2 20 -6 1 +46 0.7 0.4 18 -45 29 -6 2 +46 0.7 0.7 -9 -39 -18 -32 2 +46 0.7 0.6 20 -14 2 -46 1 +46 0.7 0.3 25 -18 37 6 2 +46 0.1 0.1 -9 -15 -30 -47 2 +46 0.7 0.7 -22 -30 -21 -47 2 +46 0.3 0.2 5 -38 12 -2 2 +46 0.7 0.8 34 -17 41 27 2 +46 0.3 0.7 45 -45 42 11 2 +46 0.4 0.2 29 24 43 25 1 +46 0.3 0.6 24 -34 21 -16 2 +46 0.4 0.2 41 26 44 15 2 +46 0.9 0.5 21 6 23 20 1 +46 0.9 0.7 -6 -29 21 -24 2 +46 0.2 0.2 19 13 47 22 2 +46 0.7 0.3 45 22 25 -7 1 +46 0.9 0.7 -13 -44 48 10 2 +46 0.4 0.6 42 0 33 -15 1 +46 0.9 0.2 25 -21 9 -4 1 +46 0.6 0.3 42 32 29 25 1 +46 0.1 0.7 33 -46 46 37 2 +46 0.2 0.9 16 0 -24 -48 1 +46 0.6 0.5 21 -1 21 -37 1 +46 0.2 0.1 36 19 3 -49 1 +46 0.1 0.9 -39 -40 29 2 2 +46 0.5 0.8 6 -24 20 9 2 +46 0.3 0.4 2 -4 34 11 2 +46 0.6 0.4 49 -18 -15 -18 1 +46 0.6 0.2 19 -35 42 -18 1 +46 0.6 0.8 4 -38 23 18 2 +46 0.5 0.9 5 -5 -25 -33 2 +46 0.2 0.7 14 13 33 -32 2 +46 0.5 0.7 5 -33 50 25 2 +46 0.8 0.7 45 -31 43 26 2 +46 0.3 0.5 28 -31 48 -7 2 +46 0.8 0.9 20 -38 22 -23 1 +46 0.6 0.8 6 -45 -7 -15 2 +46 0.7 0.5 22 -24 -5 -41 1 +46 0.8 0.7 43 35 -26 -36 1 +46 0.6 0.6 -32 -48 -20 -31 1 +46 0.6 0.7 48 1 -9 -31 1 +46 0.4 0.2 47 -31 37 -8 2 +46 0.2 0.4 19 7 25 19 2 +46 0.5 0.7 35 -31 0 -18 2 +46 0.1 0.9 9 -17 26 0 2 +46 0.8 0.5 40 12 48 -46 1 +46 0.9 0.9 38 26 12 -31 1 +46 0.9 0.8 -24 -36 26 5 2 +46 0.4 0.2 -47 -49 -36 -37 1 +46 0.7 0.6 2 -26 5 -49 1 +46 0.8 0.2 43 -47 -28 -40 1 +46 0.4 0.9 6 -31 7 -35 2 +46 0.6 0.4 47 -18 48 -26 2 +46 0.2 0.4 49 33 35 -28 1 +46 0.3 0.9 28 19 16 -8 2 +46 0.3 0.2 35 -31 1 -22 1 +46 0.7 0.8 16 -1 33 -36 2 +46 0.7 0.2 25 -15 37 21 2 +46 0.9 0.1 42 -27 49 -48 1 +46 0.8 0.3 44 -45 -18 -19 2 +46 0.3 0.9 -45 -47 -26 -44 1 +46 0.1 0.5 30 4 31 12 2 +46 0.3 0.5 7 -19 32 -17 1 +46 0.9 0.4 41 3 32 -12 1 +46 0.1 0.6 38 19 -16 -19 1 +46 0.1 0.1 11 -39 50 -37 2 +46 0.7 0.7 36 -19 18 -34 1 +46 0.1 0.9 33 -38 12 -23 2 +46 0.1 0.3 28 -6 -1 -10 2 +46 0.8 0.7 40 24 -10 -33 1 +46 0.2 0.4 4 -1 11 3 2 +46 0.6 0.8 48 37 29 -4 2 +46 0.3 0.1 -9 -23 43 -25 2 +46 0.6 0.4 8 -18 20 -36 1 +46 0.1 0.8 22 0 50 13 2 +46 0.4 0.9 23 -45 -19 -43 1 +46 0.8 0.4 38 32 -41 -47 1 +46 0.8 0.6 -29 -38 40 -12 2 +46 0.9 0.4 10 -27 36 9 2 +46 0.5 0.4 -28 -29 16 15 2 +46 0.5 0.2 46 2 37 -27 1 +46 0.8 0.8 30 16 37 -45 1 +46 0.3 0.6 33 -28 38 36 2 +46 0.4 0.6 24 -26 23 -5 1 +46 0.2 0.7 -40 -49 -6 -33 2 +46 0.4 0.9 -29 -30 1 -20 2 +46 0.6 0.5 40 -19 41 7 1 +46 0.9 0.2 48 31 -22 -44 1 +46 0.1 0.7 -6 -34 22 -41 2 +46 0.9 0.7 10 -30 9 -24 1 +46 0.4 0.1 -2 -3 9 -30 1 +46 0.6 0.8 -40 -42 18 0 2 +46 0.1 0.8 -6 -37 30 -5 2 +46 0.9 0.9 24 13 23 2 1 +46 0.2 0.5 -18 -38 35 27 2 +46 0.5 0.1 28 -50 -40 -49 1 +46 0.5 0.8 -36 -46 -26 -27 2 +46 0.7 0.3 -44 -47 10 -29 2 +46 0.4 0.9 35 -50 -10 -17 1 +46 0.2 0.6 44 20 45 -50 1 +46 0.7 0.9 50 -45 46 39 2 +46 0.1 0.3 48 -34 48 -33 1 +46 0.5 0.6 34 -37 -6 -40 1 +46 0.4 0.5 50 2 7 -36 1 +46 0.2 0.7 -3 -24 36 9 2 +46 0.8 0.6 27 -37 -10 -33 1 +46 0.4 0.8 36 -12 30 -27 1 +46 0.9 0.7 41 -5 46 43 1 +46 0.3 0.6 -3 -6 -6 -20 2 +46 0.9 0.7 40 8 7 -35 1 +46 0.9 0.5 13 -20 -18 -38 1 +46 0.9 0.5 38 20 34 -26 1 +47 0.8 0.7 -25 -26 44 6 2 +47 0.9 0.2 25 11 -31 -39 1 +47 0.6 0.7 7 -23 47 -19 2 +47 0.4 0.6 49 40 18 -45 2 +47 0.1 0.1 -45 -50 45 1 2 +47 0.6 0.4 34 7 17 -46 1 +47 0.7 0.3 -15 -35 30 -22 2 +47 0.1 0.2 4 3 -3 -8 1 +47 0.2 0.5 48 -40 -7 -37 1 +47 0.6 0.3 16 -44 -15 -18 1 +47 0.4 0.6 34 -25 36 -9 2 +47 0.5 0.1 49 27 37 36 1 +47 0.2 0.7 35 -37 11 5 2 +47 0.3 0.5 -19 -35 -18 -36 2 +47 0.8 0.1 43 35 12 -5 1 +47 0.9 0.5 36 -32 12 -35 1 +47 0.1 0.5 -11 -43 25 -23 2 +47 0.8 0.7 27 -49 8 -46 2 +47 0.8 0.4 -10 -17 -1 -45 1 +47 0.6 0.6 -1 -10 7 -47 2 +47 0.8 0.9 -17 -23 -7 -39 2 +47 0.7 0.7 -3 -14 48 -49 1 +47 0.5 0.8 -27 -34 26 14 2 +47 0.1 0.4 31 30 28 -43 1 +47 0.9 0.6 42 11 19 -6 1 +47 0.1 0.5 45 -27 -12 -24 1 +47 0.2 0.7 25 -18 -39 -47 1 +47 0.4 0.9 14 -21 -6 -41 1 +47 0.1 0.4 44 11 48 22 1 +47 0.6 0.8 36 -32 -3 -39 1 +47 0.2 0.3 44 -30 39 23 2 +47 0.6 0.2 -29 -32 12 -31 1 +47 0.9 0.2 43 -25 26 16 1 +47 0.8 0.9 14 13 -2 -44 1 +47 0.5 0.9 36 -13 21 -38 1 +47 0.4 0.1 41 35 22 7 1 +47 0.1 0.1 14 -1 -3 -11 2 +47 0.9 0.9 27 -14 36 -30 1 +47 0.1 0.7 34 -19 15 1 1 +47 0.7 0.5 37 -26 39 -2 2 +47 0.4 0.5 43 12 45 28 1 +47 0.1 0.5 -18 -26 17 -4 2 +47 0.8 0.2 8 4 17 -16 1 +47 0.2 0.9 4 -12 -7 -21 1 +47 0.8 0.3 -5 -17 38 -3 1 +47 0.9 0.2 50 5 9 -30 1 +47 0.9 0.1 26 -35 18 -28 1 +47 0.5 0.4 42 38 -10 -22 1 +47 0.9 0.3 14 -21 29 -50 1 +47 0.3 0.4 -30 -45 25 -14 1 +47 0.7 0.6 45 4 34 7 1 +47 0.3 0.7 26 5 -25 -47 1 +47 0.9 0.2 6 -20 49 15 2 +47 0.2 0.6 35 33 32 -28 1 +47 0.8 0.8 38 27 4 -14 1 +47 0.6 0.2 -2 -27 -4 -10 1 +47 0.7 0.1 -28 -29 -14 -35 2 +47 0.9 0.6 49 8 11 5 1 +47 0.9 0.6 35 -22 28 17 2 +47 0.3 0.7 34 26 49 48 2 +47 0.6 0.3 -28 -35 -1 -21 1 +47 0.9 0.5 11 -21 41 30 2 +47 0.4 0.6 48 4 0 -48 1 +47 0.6 0.7 -13 -18 -34 -39 1 +47 0.1 0.8 50 13 34 -34 1 +47 0.7 0.6 -31 -33 47 -47 2 +47 0.9 0.3 -10 -45 17 -38 2 +47 0.9 0.1 20 -8 -9 -17 1 +47 0.8 0.3 -34 -41 19 -48 2 +47 0.8 0.7 30 -2 38 -25 1 +47 0.7 0.7 24 22 -29 -32 1 +47 0.5 0.6 39 20 36 12 2 +47 0.8 0.4 14 7 45 -6 1 +47 0.4 0.8 32 12 49 -24 2 +47 0.5 0.6 40 -17 32 -17 1 +47 0.8 0.4 38 -28 1 -32 1 +47 0.9 0.3 32 -9 -20 -31 1 +47 0.4 0.8 46 33 15 10 2 +47 0.9 0.1 20 -31 48 -50 1 +47 0.7 0.5 35 -4 46 37 2 +47 0.4 0.4 11 -28 -9 -21 2 +47 0.5 0.9 35 24 8 -11 2 +47 0.1 0.6 24 2 32 -15 2 +47 0.5 0.2 8 -22 37 15 2 +47 0.6 0.9 20 -44 42 -11 2 +47 0.5 0.6 5 -19 34 -17 2 +47 0.8 0.1 43 -8 26 -40 2 +47 0.4 0.9 6 -45 30 2 2 +47 0.1 0.4 40 -17 29 4 2 +47 0.3 0.2 -14 -18 5 -31 2 +47 0.9 0.5 -13 -15 -14 -35 1 +47 0.6 0.1 -22 -29 12 1 2 +47 0.8 0.5 6 -44 25 -49 1 +47 0.3 0.1 5 -33 41 -16 2 +47 0.2 0.2 35 25 -30 -37 1 +47 0.4 0.1 -4 -16 38 -8 2 +47 0.5 0.8 50 22 28 -25 1 +47 0.9 0.4 -14 -17 -23 -35 1 +47 0.7 0.7 45 5 45 -48 2 +47 0.7 0.5 42 -42 24 -15 2 +47 0.8 0.1 12 -19 12 -30 1 +47 0.3 0.4 27 -44 -9 -36 2 +47 0.8 0.5 21 -7 -5 -11 1 +47 0.1 0.4 49 -18 15 6 2 +47 0.9 0.9 33 0 -8 -16 1 +47 0.7 0.3 5 -41 36 -26 1 +47 0.7 0.2 37 -8 12 6 1 +47 0.4 0.1 37 -18 13 -1 1 +47 0.9 0.5 39 -29 20 12 1 +47 0.2 0.2 -37 -45 49 -17 2 +47 0.1 0.5 12 -49 -36 -48 2 +47 0.9 0.2 9 -9 10 -34 2 +47 0.7 0.8 37 6 -15 -28 1 +47 0.4 0.4 11 -16 18 -35 1 +47 0.2 0.8 37 -2 28 -49 1 +47 0.4 0.4 23 -47 22 -24 1 +47 0.4 0.5 29 3 45 -8 1 +47 0.8 0.6 31 -4 31 -29 1 +47 0.2 0.9 29 -49 -10 -48 2 +47 0.7 0.4 2 -15 32 -25 2 +47 0.7 0.7 48 41 19 -39 1 +47 0.1 0.1 -8 -37 21 -17 2 +47 0.8 0.7 28 16 46 28 2 +47 0.9 0.1 50 7 45 -24 1 +47 0.9 0.5 -7 -24 6 -1 2 +47 0.4 0.8 35 6 -30 -33 1 +47 0.4 0.8 -27 -44 -14 -37 2 +47 0.4 0.2 -10 -28 8 -8 2 +47 0.5 0.3 42 -10 6 -42 1 +47 0.9 0.9 15 -49 40 -22 2 +47 0.3 0.7 21 -36 41 11 2 +47 0.6 0.7 44 -45 46 44 2 +47 0.5 0.5 21 18 41 40 2 +47 0.4 0.9 33 16 -4 -41 2 +47 0.3 0.7 49 47 33 -30 1 +47 0.9 0.9 36 -1 -4 -40 1 +47 0.2 0.9 1 -45 19 -40 2 +47 0.8 0.3 22 -37 50 0 1 +47 0.1 0.3 32 -9 49 -38 1 +47 0.5 0.3 36 -33 44 6 2 +47 0.1 0.6 -20 -26 -33 -39 1 +47 0.1 0.6 19 -19 5 -32 2 +47 0.6 0.1 0 -32 -23 -49 2 +47 0.7 0.5 8 -38 31 27 2 +47 0.9 0.6 11 -46 30 -41 1 +47 0.5 0.5 27 -31 20 -41 1 +47 0.6 0.9 4 -20 39 3 2 +47 0.1 0.9 43 40 30 -21 1 +47 0.8 0.8 24 -11 -24 -32 1 +47 0.3 0.4 22 -36 47 -30 2 +47 0.2 0.9 44 -24 42 -31 2 +47 0.9 0.1 18 -4 -30 -32 1 +47 0.6 0.4 11 -5 47 -15 2 +47 0.5 0.2 -1 -28 9 -21 1 +47 0.4 0.7 9 3 34 19 2 +47 0.3 0.4 45 -9 45 0 2 +47 0.9 0.8 25 9 36 -1 2 +47 0.8 0.5 41 19 16 0 1 +47 0.7 0.3 2 -14 4 -44 2 +47 0.4 0.6 21 -10 21 -12 2 +47 0.3 0.5 36 25 4 -32 1 +47 0.5 0.1 35 -7 15 -34 1 +47 0.4 0.7 22 9 -5 -41 1 +47 0.8 0.8 20 -35 6 -12 2 +47 0.6 0.4 -29 -48 12 -17 2 +47 0.6 0.8 -15 -32 -30 -48 1 +47 0.9 0.8 49 41 47 8 1 +47 0.6 0.4 11 -27 25 -34 2 +47 0.9 0.3 14 7 -15 -22 1 +47 0.3 0.4 41 35 24 -10 1 +47 0.5 0.1 -5 -17 -1 -25 1 +47 0.8 0.2 -41 -44 18 -20 2 +47 0.7 0.6 2 -9 15 -27 1 +47 0.7 0.3 -21 -30 34 -31 1 +47 0.9 0.5 -2 -17 10 -23 2 +47 0.8 0.7 49 -36 17 -4 2 +47 0.7 0.2 32 4 1 -9 1 +47 0.1 0.4 -30 -44 -2 -15 2 +47 0.4 0.2 46 -30 28 22 2 +47 0.8 0.3 38 8 39 32 2 +47 0.1 0.7 3 -18 -16 -49 2 +47 0.7 0.1 -29 -46 4 -4 2 +47 0.1 0.9 30 19 -39 -40 1 +47 0.8 0.4 26 -43 29 -8 1 +47 0.9 0.9 29 18 50 7 2 +47 0.3 0.2 9 -38 -30 -44 1 +47 0.8 0.2 43 -25 -28 -43 1 +47 0.1 0.3 25 -13 45 -24 2 +47 0.3 0.6 31 -18 -11 -18 1 +47 0.3 0.9 -36 -40 13 -30 2 +47 0.1 0.6 1 -49 17 12 2 +47 0.3 0.2 -22 -48 41 -9 2 +47 0.1 0.9 24 -43 24 -42 2 +47 0.3 0.1 34 -28 12 -45 1 +47 0.9 0.7 32 -22 36 -50 1 +47 0.3 0.4 -15 -34 39 34 2 +47 0.4 0.9 49 -18 27 -43 2 +47 0.6 0.6 48 12 46 -15 1 +47 0.6 0.8 47 7 -19 -25 1 +47 0.8 0.1 45 41 9 -36 1 +47 0.5 0.1 33 -6 4 -25 1 +47 0.1 0.1 47 -45 43 20 2 +47 0.7 0.5 21 -17 19 -16 1 +47 0.6 0.8 41 -10 -1 -21 1 +47 0.4 0.2 45 -22 12 -34 1 +47 0.5 0.4 47 -43 15 -23 2 +47 0.5 0.8 18 -40 38 -47 2 +47 0.6 0.5 38 -11 -14 -37 1 +47 0.4 0.4 -11 -33 45 -5 2 +47 0.5 0.3 -17 -42 22 -6 2 +47 0.2 0.4 24 -46 35 6 2 +47 0.3 0.5 28 7 45 -30 2 +47 0.8 0.2 12 -13 -23 -44 1 +47 0.6 0.4 26 19 38 -9 2 +47 0.7 0.8 -10 -45 21 -7 2 +47 0.3 0.9 39 18 50 -36 2 +47 0.2 0.5 20 -18 35 -5 2 +47 0.2 0.5 50 5 3 -1 1 +47 0.5 0.6 50 -29 25 -27 1 +47 0.7 0.5 38 11 47 -40 1 +47 0.6 0.4 50 -48 41 16 2 +47 0.7 0.1 20 7 11 -48 1 +47 0.4 0.5 24 -23 10 -16 2 +47 0.1 0.7 37 -17 37 -16 2 +47 0.8 0.3 7 -10 -2 -10 1 +47 0.2 0.8 -15 -18 -25 -29 1 +47 0.6 0.6 44 2 9 -34 1 +47 0.2 0.3 41 -13 30 -43 1 +47 0.1 0.4 25 -14 -12 -46 1 +47 0.6 0.7 40 -11 -13 -16 1 +47 0.8 0.2 41 6 0 -12 1 +47 0.9 0.8 -14 -43 34 -11 2 +47 0.4 0.1 -38 -48 27 -4 2 +47 0.5 0.1 20 -46 32 -40 1 +47 0.1 0.9 27 -37 30 6 2 +47 0.2 0.5 -2 -33 -18 -29 2 +47 0.7 0.4 36 35 36 -48 1 +47 0.7 0.8 35 28 -13 -36 1 +47 0.8 0.3 34 10 -9 -16 1 +47 0.7 0.6 44 -25 9 -4 1 +47 0.5 0.4 22 -11 45 38 1 +47 0.9 0.7 -14 -36 24 10 2 +47 0.5 0.5 43 -29 50 46 2 +47 0.1 0.1 26 -44 26 15 2 +47 0.4 0.7 2 -50 6 -42 1 +47 0.7 0.4 35 -49 37 -11 1 +47 0.1 0.7 -27 -49 25 -3 2 +47 0.4 0.5 -14 -16 43 2 2 +47 0.9 0.8 38 -38 43 -14 1 +47 0.1 0.4 -3 -7 -1 -23 2 +47 0.5 0.2 18 -11 14 13 2 +47 0.3 0.1 8 -39 38 -34 1 +47 0.2 0.7 46 -10 24 -17 1 +47 0.6 0.1 24 4 32 -10 1 +47 0.6 0.6 41 -2 10 -25 1 +47 0.3 0.2 7 -46 49 12 2 +47 0.4 0.1 37 30 -21 -32 1 +47 0.3 0.6 50 21 -31 -35 1 +47 0.9 0.8 -28 -42 15 -15 2 +47 0.1 0.5 -6 -45 5 -23 2 +47 0.5 0.3 -12 -23 -2 -24 1 +47 0.9 0.6 38 5 34 25 1 +47 0.9 0.1 14 -12 6 -34 1 +47 0.4 0.9 45 10 8 -43 1 +47 0.2 0.6 8 -36 40 -49 2 +47 0.4 0.8 33 28 37 -5 2 +47 0.5 0.3 -10 -44 50 -9 1 +47 0.2 0.3 -4 -49 45 -4 2 +47 0.8 0.6 39 14 32 2 1 +47 0.4 0.7 -2 -16 9 6 2 +47 0.4 0.3 -16 -30 7 -8 2 +47 0.7 0.9 19 -50 17 9 2 +47 0.2 0.7 28 -38 4 -49 1 +47 0.4 0.1 48 33 1 -47 1 +47 0.3 0.9 -3 -16 35 -30 2 +47 0.6 0.2 11 1 44 -13 1 +47 0.2 0.9 7 -29 -1 -44 1 +47 0.5 0.9 43 8 -26 -42 1 +47 0.1 0.1 50 -25 2 -39 1 +47 0.8 0.1 47 44 46 39 2 +47 0.1 0.7 25 -46 32 -48 2 +47 0.1 0.4 50 -40 7 -33 2 +47 0.9 0.4 28 -22 8 6 2 +47 0.7 0.2 32 -14 41 24 2 +47 0.5 0.1 43 0 -25 -31 1 +47 0.9 0.4 20 -19 42 23 2 +47 0.9 0.7 40 7 20 -15 1 +47 0.9 0.4 -17 -26 34 -31 2 +47 0.7 0.6 -10 -50 -3 -35 2 +47 0.4 0.8 33 25 12 -11 2 +47 0.3 0.8 22 -36 43 -40 2 +47 0.5 0.9 28 14 -1 -20 1 +47 0.5 0.5 -30 -46 20 -20 1 +47 0.7 0.7 -21 -35 9 -24 1 +47 0.1 0.5 18 11 27 -41 1 +47 0.6 0.4 10 -4 32 -30 2 +47 0.4 0.2 8 -39 40 7 2 +47 0.9 0.4 49 1 11 -31 1 +47 0.1 0.1 -41 -45 -13 -25 1 +47 0.5 0.4 29 -33 -19 -38 1 +48 0.5 0.9 -1 -43 -35 -41 1 +48 0.9 0.7 -19 -31 43 27 2 +48 0.6 0.7 -41 -48 6 -40 2 +48 0.9 0.6 27 -38 -5 -24 1 +48 0.4 0.5 28 5 -47 -50 1 +48 0.8 0.6 -8 -33 33 2 2 +48 0.8 0.1 38 10 48 17 1 +48 0.4 0.4 50 15 -34 -48 1 +48 0.5 0.8 -47 -48 13 -38 2 +48 0.3 0.6 -33 -47 38 15 2 +48 0.9 0.6 3 -14 43 12 2 +48 0.5 0.5 -26 -28 41 -41 2 +48 0.5 0.5 37 -39 41 -6 2 +48 0.7 0.1 18 -3 43 -24 1 +48 0.5 0.9 -15 -21 19 0 2 +48 0.3 0.6 -17 -42 -11 -13 2 +48 0.7 0.8 29 23 23 16 1 +48 0.3 0.1 23 -30 42 -4 2 +48 0.2 0.9 33 -44 36 23 2 +48 0.8 0.1 -37 -41 27 -35 2 +48 0.6 0.4 -28 -48 50 -18 2 +48 0.3 0.5 -7 -18 35 28 2 +48 0.5 0.9 19 -28 33 -11 2 +48 0.8 0.7 39 -13 39 3 1 +48 0.6 0.2 36 8 -15 -17 1 +48 0.4 0.4 41 3 9 -22 1 +48 0.4 0.6 -16 -21 -24 -35 1 +48 0.2 0.9 35 11 26 -8 2 +48 0.2 0.5 -34 -42 6 -2 2 +48 0.5 0.8 9 -12 -5 -21 1 +48 0.8 0.8 10 -2 42 3 2 +48 0.2 0.1 -15 -43 46 -32 2 +48 0.3 0.1 -18 -37 35 23 2 +48 0.8 0.6 38 -48 50 -14 2 +48 0.3 0.5 43 -49 40 8 2 +48 0.2 0.3 -12 -40 -37 -48 1 +48 0.9 0.7 50 -14 45 25 1 +48 0.5 0.4 -40 -44 17 -28 2 +48 0.3 0.4 4 -27 14 13 2 +48 0.5 0.6 18 -7 26 4 2 +48 0.2 0.9 47 -28 16 -41 2 +48 0.3 0.7 8 -21 26 15 2 +48 0.2 0.6 -20 -27 -17 -26 1 +48 0.4 0.6 47 -13 8 -45 1 +48 0.3 0.9 19 1 -1 -39 1 +48 0.6 0.6 6 -21 41 -8 2 +48 0.9 0.9 26 -2 36 -33 1 +48 0.4 0.5 -24 -32 8 -43 2 +48 0.7 0.4 45 21 49 29 1 +48 0.9 0.8 40 -44 18 -11 1 +48 0.3 0.5 45 35 11 -30 1 +48 0.8 0.5 8 -37 -16 -31 1 +48 0.1 0.1 46 -37 35 -38 2 +48 0.3 0.7 30 29 18 -30 1 +48 0.6 0.3 0 -40 26 -33 1 +48 0.3 0.6 49 10 30 24 2 +48 0.1 0.1 27 26 41 -37 1 +48 0.1 0.8 38 33 17 -9 1 +48 0.7 0.8 48 -12 12 -36 1 +48 0.2 0.7 -7 -22 30 8 2 +48 0.5 0.4 5 -44 -19 -48 1 +48 0.8 0.5 19 11 26 17 2 +48 0.4 0.4 1 0 25 -22 1 +48 0.5 0.4 39 -33 -15 -35 1 +48 0.4 0.5 -6 -49 -26 -34 2 +48 0.7 0.8 7 -36 -14 -37 1 +48 0.3 0.7 37 7 36 35 2 +48 0.8 0.5 26 -11 20 -34 1 +48 0.9 0.2 -1 -20 12 -50 1 +48 0.1 0.5 -11 -41 24 -29 2 +48 0.1 0.8 38 33 39 -41 1 +48 0.5 0.6 34 -18 -7 -9 1 +48 0.7 0.1 40 -39 5 -26 1 +48 0.8 0.9 12 -34 -32 -50 1 +48 0.8 0.2 37 21 18 3 1 +48 0.4 0.3 17 -22 -21 -37 1 +48 0.7 0.4 13 -1 38 -50 1 +48 0.6 0.1 -44 -48 38 30 2 +48 0.7 0.9 33 7 -25 -30 1 +48 0.2 0.4 44 -14 49 24 2 +48 0.1 0.8 -29 -50 10 3 2 +48 0.2 0.4 -15 -20 29 -42 2 +48 0.5 0.3 -23 -29 -3 -31 2 +48 0.1 0.5 26 -38 13 -25 2 +48 0.8 0.9 6 -44 41 -6 2 +48 0.6 0.1 46 42 -15 -33 1 +48 0.4 0.2 -9 -21 32 -13 2 +48 0.2 0.2 31 1 31 18 2 +48 0.4 0.8 12 -19 11 -42 2 +48 0.2 0.4 -9 -21 42 12 2 +48 0.2 0.5 8 -18 27 -48 2 +48 0.2 0.6 -16 -29 45 11 2 +48 0.8 0.9 24 -25 -24 -38 1 +48 0.2 0.5 22 -3 -35 -36 1 +48 0.5 0.2 -29 -38 44 -16 2 +48 0.1 0.7 38 13 16 -21 1 +48 0.1 0.9 -9 -45 -13 -18 2 +48 0.9 0.4 22 -41 38 10 2 +48 0.3 0.7 21 -41 23 -37 2 +48 0.5 0.4 26 4 45 39 2 +48 0.1 0.4 34 -6 19 -11 2 +48 0.1 0.4 -2 -44 -11 -40 2 +48 0.7 0.3 11 -6 43 23 2 +48 0.5 0.3 16 15 -33 -42 1 +48 0.3 0.2 -32 -44 -33 -50 1 +48 0.6 0.3 26 -32 10 -46 1 +48 0.4 0.2 35 22 11 -19 1 +48 0.2 0.3 18 -28 24 -39 2 +48 0.8 0.3 17 -8 -14 -41 1 +48 0.1 0.4 44 -29 34 11 2 +48 0.5 0.7 39 11 43 -17 2 +48 0.4 0.4 -27 -38 46 -19 2 +48 0.3 0.8 0 -4 40 -22 2 +48 0.9 0.2 34 -21 6 -50 1 +48 0.4 0.6 -21 -22 -7 -32 2 +48 0.1 0.4 26 18 -26 -44 1 +48 0.3 0.9 49 15 22 -16 1 +48 0.1 0.3 41 20 1 -34 1 +48 0.7 0.8 25 18 -4 -29 1 +48 0.3 0.9 -4 -35 24 -10 2 +48 0.5 0.2 11 -17 10 -32 1 +48 0.2 0.2 -27 -32 36 -23 2 +48 0.5 0.3 -28 -50 5 -37 2 +48 0.6 0.1 8 -24 -29 -47 1 +48 0.8 0.8 14 -1 -23 -32 1 +48 0.1 0.5 -11 -12 40 27 2 +48 0.6 0.5 49 -34 2 -24 1 +48 0.6 0.3 -7 -22 50 -36 2 +48 0.8 0.5 37 1 42 10 1 +48 0.5 0.9 43 -26 14 4 1 +48 0.4 0.2 50 -31 19 11 2 +48 0.9 0.5 29 -31 7 -4 1 +48 0.7 0.2 29 8 -39 -44 1 +48 0.3 0.7 49 42 -32 -50 1 +48 0.1 0.6 6 -37 -15 -28 2 +48 0.5 0.1 37 -1 15 14 2 +48 0.8 0.8 -5 -25 9 -43 2 +48 0.3 0.1 17 5 10 -13 1 +48 0.8 0.2 -4 -41 4 -33 1 +48 0.1 0.4 -3 -49 -4 -33 2 +48 0.6 0.2 35 -6 49 -39 1 +48 0.7 0.9 -2 -6 -22 -44 1 +48 0.4 0.5 -17 -48 47 -11 2 +48 0.9 0.9 47 -33 -10 -31 1 +48 0.9 0.7 10 -16 -21 -42 1 +48 0.7 0.5 48 -39 42 -38 1 +48 0.1 0.8 -12 -47 27 16 2 +48 0.6 0.5 1 -30 3 -42 1 +48 0.1 0.5 45 39 29 16 1 +48 0.7 0.7 45 39 12 -6 1 +48 0.8 0.7 37 -31 -37 -40 1 +48 0.9 0.5 46 -8 39 -35 1 +48 0.7 0.2 20 2 22 2 1 +48 0.2 0.8 22 -49 28 -41 2 +48 0.3 0.4 -12 -25 27 -6 2 +48 0.6 0.3 50 -12 -21 -30 1 +48 0.3 0.5 15 -26 4 -1 2 +48 0.5 0.7 16 13 14 -21 1 +48 0.8 0.5 42 -41 48 34 2 +48 0.1 0.5 20 -35 42 -26 2 +48 0.4 0.2 18 -30 20 -41 1 +48 0.5 0.5 33 -35 18 -48 1 +48 0.4 0.2 -17 -39 43 11 2 +48 0.3 0.7 27 -30 3 -21 2 +48 0.4 0.1 39 20 34 18 1 +48 0.7 0.6 48 24 6 0 1 +48 0.4 0.1 39 -23 -6 -28 1 +48 0.6 0.8 -4 -29 3 2 2 +48 0.1 0.4 3 -35 14 -19 2 +48 0.5 0.5 -23 -32 11 -44 2 +48 0.6 0.5 -5 -10 26 -1 2 +48 0.7 0.4 23 -21 46 -34 1 +48 0.3 0.1 -2 -43 -15 -21 2 +48 0.9 0.4 49 39 49 26 1 +48 0.7 0.9 30 10 8 -20 1 +48 0.2 0.9 47 -13 17 -37 2 +48 0.5 0.9 4 2 -25 -32 1 +48 0.4 0.5 24 -8 46 0 2 +48 0.9 0.1 33 29 42 -47 1 +48 0.2 0.1 4 -18 -12 -17 1 +48 0.6 0.8 47 -40 10 -9 1 +48 0.9 0.8 -18 -41 23 -14 2 +48 0.5 0.4 21 15 11 -13 1 +48 0.8 0.9 39 17 46 -27 1 +48 0.7 0.6 40 -18 45 -34 1 +48 0.3 0.7 50 18 12 8 1 +48 0.9 0.5 45 41 -2 -23 1 +48 0.7 0.2 49 -47 25 -14 1 +48 0.4 0.3 35 16 28 20 1 +48 0.5 0.8 16 6 17 -10 1 +48 0.3 0.7 33 -8 5 -9 1 +48 0.6 0.8 35 -8 42 21 2 +48 0.4 0.3 37 -17 39 24 2 +48 0.3 0.9 -6 -45 31 7 2 +48 0.7 0.1 33 13 -3 -6 1 +48 0.2 0.1 32 1 3 -18 1 +48 0.6 0.2 -4 -39 45 23 2 +48 0.4 0.1 25 -13 -9 -19 1 +48 0.4 0.3 37 -25 39 -14 1 +48 0.8 0.7 35 -25 12 5 1 +48 0.3 0.1 29 22 -11 -27 1 +48 0.5 0.9 36 22 42 -15 1 +48 0.6 0.1 39 -37 -2 -5 1 +48 0.4 0.2 23 -18 -26 -45 1 +48 0.4 0.8 26 -28 -24 -28 1 +48 0.4 0.4 23 -1 1 -17 1 +48 0.9 0.7 6 -27 -5 -6 1 +48 0.7 0.9 -21 -29 29 -3 2 +48 0.7 0.4 9 -2 11 -46 1 +48 0.9 0.3 31 -13 43 21 1 +48 0.5 0.9 27 -35 2 -11 1 +48 0.1 0.8 10 -4 16 -41 1 +48 0.7 0.6 -19 -45 48 22 2 +48 0.2 0.9 16 -50 45 -3 2 +48 0.6 0.5 28 -2 50 -22 1 +48 0.1 0.2 40 -40 -36 -48 1 +48 0.8 0.4 48 -32 15 -32 1 +48 0.8 0.9 47 -45 -43 -45 1 +48 0.8 0.8 -16 -49 11 5 2 +48 0.7 0.4 13 -22 16 12 2 +48 0.9 0.8 -25 -45 -4 -50 2 +48 0.4 0.5 39 32 -17 -43 1 +48 0.7 0.2 -29 -30 15 14 2 +48 0.7 0.3 -2 -26 1 -5 2 +48 0.5 0.9 -11 -43 44 37 2 +48 0.4 0.1 35 -19 47 -41 1 +48 0.3 0.1 8 -44 10 6 2 +48 0.6 0.7 21 -46 27 18 2 +48 0.5 0.6 39 -39 15 -7 2 +48 0.3 0.5 2 -18 50 23 2 +48 0.7 0.5 19 -9 -18 -45 1 +48 0.5 0.9 38 -6 13 -5 1 +48 0.3 0.5 19 -23 -42 -49 1 +48 0.2 0.4 13 8 45 -1 2 +48 0.6 0.7 -6 -47 -2 -21 2 +48 0.2 0.4 -24 -32 25 19 2 +48 0.5 0.9 7 -41 -14 -30 1 +48 0.2 0.9 17 2 21 6 2 +48 0.1 0.3 -27 -43 24 -33 2 +48 0.6 0.8 -28 -34 18 -28 2 +48 0.4 0.3 -3 -46 10 -43 2 +48 0.2 0.7 16 -5 -27 -37 1 +48 0.9 0.3 -13 -24 -5 -41 1 +48 0.2 0.3 -13 -41 23 10 2 +48 0.5 0.7 37 -17 -19 -49 1 +48 0.9 0.9 -11 -18 -19 -47 1 +48 0.7 0.4 -3 -20 48 37 2 +48 0.6 0.7 17 -47 38 -9 2 +48 0.8 0.2 38 -21 19 6 1 +48 0.6 0.6 -3 -50 35 -3 2 +48 0.4 0.4 16 8 30 29 2 +48 0.5 0.2 38 -47 38 36 2 +48 0.8 0.4 13 -10 -24 -50 1 +48 0.9 0.6 -10 -40 4 -36 2 +48 0.2 0.1 38 -32 7 -33 1 +48 0.6 0.6 24 5 11 -23 1 +48 0.9 0.8 -5 -22 42 39 2 +48 0.5 0.1 12 -48 38 21 2 +48 0.7 0.7 45 13 32 -4 1 +48 0.5 0.6 41 3 20 9 1 +48 0.9 0.7 -5 -10 22 19 2 +48 0.1 0.8 -6 -24 0 -34 2 +48 0.5 0.7 -27 -33 44 -11 2 +48 0.8 0.8 8 -6 37 -11 2 +48 0.3 0.1 36 -23 -35 -50 1 +48 0.6 0.1 21 -1 11 -17 1 +48 0.6 0.6 -7 -26 41 10 2 +48 0.8 0.3 18 -7 42 -32 1 +48 0.3 0.9 -17 -45 18 -7 2 +48 0.1 0.9 17 -14 -39 -41 1 +48 0.4 0.8 -9 -15 14 -39 2 +48 0.8 0.4 -28 -32 -39 -41 1 +48 0.9 0.6 40 24 29 28 1 +48 0.8 0.7 48 18 15 -40 1 +48 0.4 0.7 -3 -23 -12 -15 2 +48 0.4 0.2 46 -9 15 -25 1 +48 0.8 0.9 4 -12 20 -16 2 +48 0.7 0.1 47 42 11 -6 1 +48 0.1 0.4 6 2 44 -19 2 +48 0.8 0.6 -15 -33 36 -35 2 +48 0.1 0.6 40 -24 30 -34 2 +48 0.5 0.2 48 29 48 12 1 +48 0.5 0.8 34 -14 -1 -45 1 +48 0.8 0.3 49 -18 8 -35 1 +48 0.5 0.2 -18 -48 -1 -26 1 +48 0.9 0.2 13 -35 47 -38 1 +48 0.5 0.3 33 -28 35 -12 1 +48 0.1 0.8 35 -31 9 -11 2 +48 0.6 0.4 7 -50 37 -31 2 +48 0.1 0.4 0 -39 -9 -44 1 +48 0.7 0.8 -4 -11 -1 -13 1 +48 0.4 0.6 -25 -31 -3 -14 2 +48 0.7 0.1 39 8 37 16 1 +48 0.4 0.1 6 -31 9 -31 1 +48 0.1 0.9 8 -49 -1 -39 2 +48 0.5 0.2 -2 -36 -12 -33 1 +48 0.3 0.2 12 -45 48 -18 2 +48 0.5 0.5 49 -36 -10 -14 1 +48 0.1 0.7 49 -39 46 8 2 +48 0.1 0.5 -12 -46 32 -22 2 +49 0.9 0.7 22 -29 20 -32 1 +49 0.1 0.2 37 17 25 4 1 +49 0.7 0.1 22 7 19 13 2 +49 0.4 0.6 -16 -40 -2 -8 2 +49 0.8 0.1 6 -20 -22 -36 1 +49 0.5 0.9 42 12 46 -14 1 +49 0.5 0.5 9 -11 26 3 2 +49 0.2 0.6 -25 -37 48 -12 2 +49 0.9 0.4 2 -22 8 -37 1 +49 0.4 0.2 -8 -21 9 -49 1 +49 0.1 0.7 36 -15 29 -14 2 +49 0.3 0.6 36 -27 30 -33 2 +49 0.7 0.8 37 -8 32 -35 2 +49 0.3 0.2 21 -42 31 17 2 +49 0.4 0.2 -22 -38 29 -44 1 +49 0.1 0.1 15 -18 44 33 2 +49 0.6 0.5 0 -4 19 -17 2 +49 0.3 0.3 40 35 -13 -44 1 +49 0.5 0.4 23 -32 -5 -48 1 +49 0.1 0.9 1 -29 30 -32 2 +49 0.7 0.9 50 -42 39 -18 1 +49 0.9 0.6 -7 -14 23 -12 2 +49 0.1 0.6 50 -2 -13 -33 1 +49 0.5 0.8 -20 -36 50 10 2 +49 0.9 0.9 43 28 -12 -35 1 +49 0.9 0.4 -29 -32 5 -1 2 +49 0.5 0.6 30 -35 15 -15 2 +49 0.4 0.6 7 6 39 -27 1 +49 0.5 0.9 -23 -34 27 -48 2 +49 0.6 0.7 41 -49 -11 -48 1 +49 0.6 0.6 30 26 34 16 1 +49 0.5 0.5 29 13 -24 -45 1 +49 0.6 0.8 46 22 47 12 1 +49 0.6 0.6 26 3 15 -37 1 +49 0.8 0.7 34 -27 -13 -34 1 +49 0.7 0.8 -35 -41 49 47 2 +49 0.3 0.1 26 10 33 30 2 +49 0.8 0.9 37 24 17 -30 1 +49 0.6 0.5 -25 -41 -34 -49 2 +49 0.4 0.6 22 -28 16 -37 1 +49 0.9 0.9 20 -32 31 25 2 +49 0.6 0.9 6 -13 -44 -45 1 +49 0.5 0.6 -7 -34 -33 -39 1 +49 0.7 0.5 35 -4 -14 -40 1 +49 0.3 0.4 43 -22 8 -36 1 +49 0.8 0.8 41 -9 26 20 2 +49 0.4 0.3 -8 -25 -32 -46 1 +49 0.6 0.9 45 -37 19 -45 1 +49 0.3 0.9 39 11 45 36 2 +49 0.3 0.9 25 -45 20 9 2 +49 0.9 0.5 29 -37 14 -47 1 +49 0.9 0.4 -4 -50 26 22 2 +49 0.8 0.5 -39 -43 35 -19 2 +49 0.8 0.2 2 -34 32 24 2 +49 0.5 0.3 -1 -32 17 -18 2 +49 0.8 0.2 -45 -50 10 -29 2 +49 0.9 0.6 41 12 7 -17 1 +49 0.8 0.2 -10 -39 34 -24 1 +49 0.2 0.1 49 1 -12 -29 1 +49 0.8 0.1 19 -25 25 -44 1 +49 0.4 0.5 21 -4 41 -26 2 +49 0.1 0.5 -4 -36 44 37 2 +49 0.7 0.2 30 23 -39 -50 1 +49 0.1 0.9 28 -42 -14 -46 1 +49 0.6 0.2 0 -17 48 21 2 +49 0.4 0.7 45 -13 38 -36 2 +49 0.3 0.6 0 -44 -8 -30 1 +49 0.1 0.3 -23 -47 24 -27 2 +49 0.8 0.3 0 -6 -5 -30 1 +49 0.8 0.9 46 -3 32 -35 2 +49 0.7 0.9 45 41 10 -12 1 +49 0.8 0.9 30 -22 35 34 2 +49 0.6 0.6 35 -6 18 -32 1 +49 0.2 0.2 47 35 9 -45 1 +49 0.8 0.3 32 -34 15 -24 1 +49 0.6 0.3 39 23 46 -31 1 +49 0.7 0.8 18 -4 39 35 2 +49 0.7 0.9 48 -36 17 -7 1 +49 0.7 0.7 21 2 50 17 2 +49 0.7 0.3 45 -33 17 -28 1 +49 0.3 0.2 -37 -49 39 6 2 +49 0.1 0.2 38 26 37 -21 1 +49 0.7 0.3 34 -46 44 -29 1 +49 0.2 0.9 46 -16 -6 -34 1 +49 0.4 0.8 2 -5 40 -13 2 +49 0.5 0.7 -4 -42 18 16 2 +49 0.5 0.7 21 -7 -29 -47 1 +49 0.6 0.4 48 23 18 -5 1 +49 0.5 0.5 16 -19 -30 -40 1 +49 0.6 0.8 27 26 30 -30 1 +49 0.8 0.8 17 16 30 -8 2 +49 0.3 0.6 37 4 31 23 2 +49 0.3 0.8 17 -18 31 2 2 +49 0.7 0.2 -32 -50 48 -29 2 +49 0.1 0.5 22 -16 -4 -21 2 +49 0.2 0.4 -14 -36 -18 -23 2 +49 0.9 0.1 20 -47 37 -32 1 +49 0.8 0.5 12 -6 33 22 2 +49 0.3 0.1 37 -47 -9 -38 1 +49 0.4 0.6 32 -14 -15 -32 1 +49 0.4 0.3 12 -25 38 -34 1 +49 0.5 0.3 -5 -27 10 -4 2 +49 0.8 0.8 9 -7 43 -27 2 +49 0.7 0.6 45 23 27 -41 1 +49 0.9 0.5 8 4 29 -41 1 +49 0.7 0.6 7 -21 -1 -41 1 +49 0.1 0.2 42 9 40 -27 1 +49 0.6 0.7 40 34 47 -33 2 +49 0.9 0.4 34 -35 -1 -24 1 +49 0.5 0.4 38 28 33 -50 1 +49 0.9 0.3 -2 -11 45 -28 2 +49 0.6 0.3 40 -28 5 -36 1 +49 0.3 0.6 37 36 30 11 1 +49 0.7 0.7 17 12 1 -15 1 +49 0.1 0.2 15 -14 17 -26 1 +49 0.6 0.2 43 -21 -21 -27 1 +49 0.4 0.6 10 0 48 9 2 +49 0.7 0.3 34 -43 36 35 2 +49 0.1 0.3 29 4 32 7 1 +49 0.7 0.8 -31 -45 10 -44 2 +49 0.4 0.5 36 31 2 -1 1 +49 0.6 0.8 39 28 -28 -48 1 +49 0.7 0.9 -21 -25 23 2 2 +49 0.4 0.4 24 -12 30 -24 2 +49 0.7 0.6 30 -40 -3 -21 1 +49 0.6 0.1 -28 -30 24 -42 1 +49 0.8 0.8 49 31 6 -7 1 +49 0.7 0.5 47 20 20 12 1 +49 0.3 0.8 42 -36 23 -43 2 +49 0.7 0.6 49 -8 -26 -39 1 +49 0.9 0.3 44 -34 5 -47 1 +49 0.9 0.1 39 5 44 28 2 +49 0.6 0.1 24 -38 18 2 2 +49 0.2 0.7 40 37 1 -29 1 +49 0.5 0.8 -2 -44 -9 -34 1 +49 0.9 0.3 49 -21 -24 -39 1 +49 0.1 0.2 30 -50 24 -27 2 +49 0.4 0.2 -4 -10 -5 -45 1 +49 0.6 0.5 8 -48 7 -25 2 +49 0.5 0.5 40 15 8 7 1 +49 0.2 0.7 40 4 10 -26 1 +49 0.1 0.5 -44 -46 46 25 2 +49 0.8 0.2 33 5 11 -35 1 +49 0.8 0.3 -2 -26 -13 -20 1 +49 0.9 0.5 29 -34 14 -12 1 +49 0.1 0.7 37 -16 20 -32 2 +49 0.6 0.9 21 3 14 -25 1 +49 0.2 0.9 39 -1 7 3 2 +49 0.9 0.9 7 -23 36 14 2 +49 0.7 0.5 30 26 41 -39 1 +49 0.8 0.1 5 -26 -5 -42 1 +49 0.2 0.5 1 -17 38 30 2 +49 0.3 0.4 -38 -46 30 -22 2 +49 0.6 0.4 36 -13 -7 -15 1 +49 0.8 0.7 17 -27 42 -48 2 +49 0.2 0.3 38 -34 34 9 2 +49 0.8 0.3 34 -5 -18 -44 1 +49 0.9 0.1 42 -34 41 10 1 +49 0.5 0.6 -6 -29 4 -5 2 +49 0.2 0.4 16 -3 5 -32 1 +49 0.9 0.7 45 4 26 -27 1 +49 0.8 0.6 40 3 15 -14 1 +49 0.6 0.2 7 -3 4 -13 1 +49 0.1 0.6 40 -48 -28 -30 1 +49 0.6 0.4 8 -49 35 -12 2 +49 0.2 0.4 47 -11 38 -10 2 +49 0.2 0.5 14 -47 21 -23 2 +49 0.9 0.5 -2 -50 5 -41 1 +49 0.7 0.5 5 -6 30 -47 2 +49 0.6 0.3 46 -6 14 -35 1 +49 0.8 0.5 41 -10 -9 -39 1 +49 0.8 0.5 27 2 27 -32 1 +49 0.4 0.9 -11 -47 50 -37 2 +49 0.2 0.8 24 21 -33 -43 1 +49 0.4 0.3 -41 -42 -15 -47 2 +49 0.2 0.5 -2 -18 -25 -29 1 +49 0.6 0.3 -2 -32 30 11 2 +49 0.7 0.3 15 -14 -18 -42 1 +49 0.2 0.6 33 -10 26 2 2 +49 0.2 0.4 26 -29 15 -19 2 +49 0.6 0.6 23 -14 32 -41 2 +49 0.2 0.2 37 -28 36 18 2 +49 0.5 0.3 -3 -11 -9 -37 2 +49 0.8 0.2 25 -38 37 22 2 +49 0.1 0.2 15 -13 -1 -30 1 +49 0.8 0.2 7 6 -25 -49 1 +49 0.3 0.9 23 6 -5 -9 1 +49 0.3 0.3 49 -19 42 31 2 +49 0.4 0.3 8 -46 -15 -16 1 +49 0.3 0.6 16 10 -14 -36 1 +49 0.1 0.8 40 -46 30 -47 2 +49 0.5 0.2 9 -50 -8 -14 2 +49 0.3 0.6 -23 -46 -22 -29 2 +49 0.2 0.5 43 34 -21 -24 1 +49 0.8 0.7 -20 -33 13 -30 2 +49 0.2 0.2 23 9 34 -47 1 +49 0.5 0.9 -22 -49 42 -1 2 +49 0.4 0.5 -24 -42 8 -1 2 +49 0.1 0.7 30 29 14 -41 1 +49 0.4 0.5 4 -8 -4 -27 1 +49 0.4 0.2 11 9 -5 -37 1 +49 0.2 0.8 4 -37 -7 -32 2 +49 0.8 0.7 27 -26 15 -25 1 +49 0.1 0.1 -40 -45 34 -42 2 +49 0.8 0.3 -8 -28 -38 -45 1 +49 0.8 0.1 28 -6 50 20 2 +49 0.9 0.8 -24 -44 -19 -27 1 +49 0.8 0.8 -27 -37 -3 -33 2 +49 0.1 0.3 -1 -31 -12 -21 2 +49 0.1 0.5 46 -35 23 8 2 +49 0.3 0.4 -3 -44 31 16 2 +49 0.4 0.9 8 -2 -15 -21 1 +49 0.1 0.7 47 -3 -25 -30 1 +49 0.9 0.6 24 -9 27 16 2 +49 0.3 0.2 0 -28 41 -32 2 +49 0.4 0.1 -6 -11 3 -48 1 +49 0.4 0.9 43 -47 48 -46 2 +49 0.5 0.8 9 -50 13 -7 2 +49 0.2 0.4 -11 -15 29 -31 2 +49 0.2 0.5 10 -5 37 12 2 +49 0.1 0.8 33 -44 -3 -13 2 +49 0.6 0.8 35 -20 2 -49 1 +49 0.5 0.1 -8 -46 47 -44 1 +49 0.7 0.6 -11 -44 29 -22 2 +49 0.6 0.2 -31 -47 37 -11 2 +49 0.1 0.9 -26 -28 35 21 2 +49 0.3 0.3 26 -44 39 -40 1 +49 0.6 0.4 38 -23 -17 -30 1 +49 0.8 0.8 38 14 39 31 2 +49 0.8 0.7 1 -40 48 -34 2 +49 0.1 0.7 -12 -34 45 -45 2 +49 0.1 0.4 9 -29 7 6 2 +49 0.2 0.4 21 14 0 -11 1 +49 0.8 0.6 -13 -37 36 -42 2 +49 0.4 0.1 38 5 17 -45 1 +49 0.6 0.7 47 37 -34 -44 1 +49 0.7 0.7 -13 -41 48 39 2 +49 0.6 0.2 32 -20 -9 -18 1 +49 0.8 0.6 43 -24 5 -5 1 +49 0.2 0.2 8 -20 12 -27 1 +49 0.3 0.8 13 -42 20 15 2 +49 0.7 0.9 -4 -13 24 -50 2 +49 0.5 0.9 40 31 50 34 2 +49 0.5 0.3 -39 -42 -35 -39 1 +49 0.6 0.7 -12 -31 25 11 2 +49 0.2 0.5 -6 -29 9 -36 2 +49 0.6 0.8 7 -43 -7 -49 2 +49 0.4 0.3 50 -28 26 16 2 +49 0.5 0.6 37 -2 43 8 2 +49 0.3 0.7 35 9 -26 -38 1 +49 0.5 0.5 47 38 -8 -24 1 +49 0.8 0.1 30 23 30 -47 1 +49 0.1 0.2 24 12 38 -14 1 +49 0.6 0.6 -10 -20 -35 -47 1 +49 0.7 0.4 1 -35 -6 -44 1 +49 0.7 0.3 36 -19 -38 -44 1 +49 0.6 0.5 8 -22 4 -46 1 +49 0.9 0.3 35 27 1 -3 1 +49 0.9 0.2 1 -10 47 35 2 +49 0.9 0.1 13 5 43 11 2 +49 0.3 0.6 10 -18 -22 -40 1 +49 0.3 0.2 -14 -40 29 9 2 +49 0.4 0.3 30 21 48 16 1 +49 0.9 0.2 42 -50 13 3 1 +49 0.3 0.9 38 19 20 1 1 +49 0.6 0.9 -10 -27 48 -45 2 +49 0.2 0.1 22 1 -7 -24 1 +49 0.9 0.8 50 -42 38 12 1 +49 0.2 0.9 -27 -41 0 -50 2 +49 0.2 0.7 -19 -21 -29 -43 2 +49 0.6 0.7 -15 -43 -9 -48 1 +49 0.3 0.2 36 11 -38 -40 1 +49 0.8 0.5 49 20 -18 -22 1 +49 0.1 0.5 22 -5 -5 -9 1 +49 0.2 0.4 10 -25 7 -42 1 +49 0.3 0.2 27 -20 48 -36 1 +49 0.8 0.5 24 -1 42 -31 2 +49 0.7 0.9 12 -4 20 14 2 +49 0.4 0.3 22 -14 -3 -28 1 +49 0.8 0.5 30 -23 1 -12 1 +49 0.4 0.4 -11 -44 23 -15 2 +49 0.5 0.8 13 -42 21 -12 2 +49 0.5 0.5 27 -36 -5 -22 1 +49 0.6 0.4 48 19 5 -39 1 +49 0.6 0.7 25 -14 -35 -48 1 +49 0.1 0.3 48 -22 -3 -6 2 +49 0.4 0.2 -11 -39 -34 -43 1 +49 0.2 0.3 -1 -24 -1 -49 1 +49 0.3 0.8 -29 -43 7 -9 2 +49 0.7 0.5 33 18 -15 -34 1 +49 0.2 0.5 30 -33 26 20 2 +49 0.2 0.7 29 -41 44 3 2 +49 0.5 0.5 43 37 46 20 1 +49 0.9 0.6 40 -40 -32 -46 1 +49 0.7 0.7 26 0 12 -40 1 +49 0.7 0.9 30 13 10 -24 1 +49 0.4 0.5 -19 -29 9 -9 2 +49 0.2 0.5 -26 -29 10 3 2 +49 0.2 0.4 -12 -39 16 -4 2 +49 0.9 0.8 -4 -38 -23 -41 1 +50 0.7 0.4 12 4 39 -2 1 +50 0.4 0.4 18 -27 17 -10 2 +50 0.3 0.4 -6 -21 -15 -38 1 +50 0.6 0.4 32 1 -37 -50 1 +50 0.1 0.6 45 31 22 -39 1 +50 0.3 0.7 -45 -49 8 3 2 +50 0.2 0.1 12 -4 -37 -49 1 +50 0.3 0.3 -9 -46 -6 -44 1 +50 0.1 0.6 33 -43 42 5 2 +50 0.9 0.1 45 -24 49 -12 1 +50 0.9 0.8 23 -16 -18 -25 1 +50 0.9 0.5 42 -35 26 -13 1 +50 0.4 0.4 19 -41 37 -39 2 +50 0.1 0.8 -29 -37 -37 -40 1 +50 0.2 0.4 44 22 33 10 1 +50 0.6 0.6 -36 -50 37 22 2 +50 0.2 0.1 9 -42 41 -22 2 +50 0.7 0.4 43 -31 -33 -46 1 +50 0.5 0.4 -20 -32 36 -39 2 +50 0.5 0.2 32 -17 37 13 2 +50 0.9 0.2 7 -8 48 7 2 +50 0.1 0.5 35 -6 32 -7 2 +50 0.8 0.5 36 -27 -24 -32 1 +50 0.4 0.9 17 -47 -34 -39 1 +50 0.3 0.4 11 -45 -38 -49 1 +50 0.6 0.3 -7 -8 49 10 2 +50 0.8 0.5 5 -18 35 6 2 +50 0.6 0.8 17 -11 25 -30 2 +50 0.1 0.8 48 -29 47 40 2 +50 0.6 0.4 44 -2 48 -15 1 +50 0.1 0.7 30 -1 -3 -25 1 +50 0.7 0.8 44 -10 -4 -26 1 +50 0.8 0.2 17 4 -13 -21 1 +50 0.9 0.3 11 -33 22 -15 1 +50 0.7 0.2 -33 -43 39 0 2 +50 0.7 0.6 25 -25 38 -32 1 +50 0.1 0.4 -2 -29 12 -40 2 +50 0.5 0.2 -28 -39 31 -28 2 +50 0.5 0.5 46 -1 13 11 1 +50 0.8 0.9 18 2 26 -41 2 +50 0.3 0.4 21 12 49 -13 1 +50 0.5 0.3 42 -33 31 -24 1 +50 0.1 0.9 -37 -48 43 22 2 +50 0.7 0.1 47 30 21 -3 1 +50 0.8 0.8 19 -50 39 -42 2 +50 0.4 0.7 16 1 -7 -35 1 +50 0.6 0.7 34 -13 22 12 2 +50 0.2 0.8 -11 -32 20 15 2 +50 0.9 0.4 -17 -25 35 -47 2 +50 0.4 0.5 14 -27 -22 -36 1 +50 0.6 0.8 6 -39 -29 -45 1 +50 0.6 0.1 -12 -20 21 10 2 +50 0.8 0.5 35 -35 45 28 2 +50 0.1 0.7 22 -47 26 -12 2 +50 0.3 0.8 1 -15 -10 -36 2 +50 0.1 0.4 15 -38 40 18 2 +50 0.6 0.4 -37 -44 36 -39 2 +50 0.2 0.5 46 39 29 -9 1 +50 0.5 0.2 37 -33 20 -9 1 +50 0.9 0.2 -9 -35 26 -35 1 +50 0.6 0.4 19 -22 -23 -32 1 +50 0.1 0.8 28 -5 47 46 2 +50 0.5 0.2 45 9 -5 -14 1 +50 0.3 0.3 37 -50 32 -50 1 +50 0.3 0.1 41 9 -22 -47 1 +50 0.9 0.3 2 -23 48 -8 1 +50 0.9 0.6 34 7 23 -49 1 +50 0.7 0.7 38 -34 15 -28 1 +50 0.7 0.6 10 -14 30 -45 2 +50 0.9 0.7 14 -4 -15 -33 1 +50 0.6 0.3 -23 -35 46 36 2 +50 0.6 0.2 35 -38 8 -11 1 +50 0.4 0.4 31 -33 27 25 2 +50 0.3 0.8 0 -28 31 4 2 +50 0.3 0.3 22 -6 23 -35 1 +50 0.1 0.3 32 6 34 33 2 +50 0.6 0.9 27 -40 -1 -33 1 +50 0.2 0.9 28 -28 27 6 2 +50 0.7 0.7 45 27 -10 -47 1 +50 0.8 0.6 -1 -5 -21 -26 1 +50 0.6 0.9 44 18 33 -43 1 +50 0.8 0.5 27 -24 29 -8 1 +50 0.8 0.3 -22 -39 47 29 2 +50 0.1 0.9 -3 -14 27 -19 2 +50 0.5 0.2 -37 -44 -6 -11 2 +50 0.1 0.2 8 -41 11 -40 1 +50 0.8 0.4 46 42 21 -12 1 +50 0.1 0.6 25 -38 48 15 2 +50 0.5 0.8 36 -44 37 -41 2 +50 0.9 0.2 34 1 15 -5 1 +50 0.2 0.9 -12 -19 4 -17 2 +50 0.5 0.8 20 -26 30 -1 2 +50 0.8 0.7 19 -33 -2 -3 1 +50 0.3 0.1 -19 -24 13 -4 2 +50 0.6 0.4 48 -33 -35 -42 1 +50 0.9 0.7 31 5 45 22 2 +50 0.1 0.5 7 6 12 -41 1 +50 0.5 0.4 3 -46 -5 -6 2 +50 0.2 0.1 18 6 10 -42 1 +50 0.9 0.2 -3 -50 -15 -23 1 +50 0.2 0.6 41 30 -1 -7 1 +50 0.3 0.6 41 22 28 -26 1 +50 0.6 0.8 37 -29 38 -5 2 +50 0.8 0.3 14 1 31 30 2 +50 0.9 0.4 -4 -15 15 -8 2 +50 0.9 0.8 41 -17 10 3 1 +50 0.2 0.9 36 32 20 -11 1 +50 0.7 0.3 43 -37 26 24 1 +50 0.1 0.7 7 -25 35 -49 2 +50 0.9 0.5 23 1 2 -7 1 +50 0.1 0.2 -22 -38 48 20 2 +50 0.3 0.9 -12 -50 20 13 2 +50 0.1 0.1 39 -35 -43 -44 1 +50 0.1 0.5 17 -40 16 -2 2 +50 0.3 0.3 32 -10 26 -14 2 +50 0.4 0.3 24 19 25 19 1 +50 0.8 0.8 40 -5 10 8 1 +50 0.6 0.7 14 -23 21 10 2 +50 0.3 0.4 39 -20 44 -3 2 +50 0.1 0.4 27 -9 42 17 2 +50 0.4 0.9 22 -36 20 -2 2 +50 0.6 0.3 22 -12 32 -39 1 +50 0.1 0.3 32 -17 26 -15 2 +50 0.5 0.2 49 36 28 -50 1 +50 0.8 0.9 26 -20 43 40 2 +50 0.5 0.5 43 29 24 1 1 +50 0.5 0.8 -22 -27 50 29 2 +50 0.2 0.2 20 -50 28 -11 2 +50 0.5 0.3 21 -30 37 24 2 +50 0.9 0.3 15 13 6 -39 1 +50 0.9 0.7 -25 -26 26 -8 2 +50 0.7 0.7 25 7 -11 -19 1 +50 0.6 0.7 -5 -38 29 25 2 +50 0.3 0.3 40 13 25 -1 1 +50 0.9 0.8 8 -46 -18 -32 1 +50 0.8 0.7 -15 -38 27 5 2 +50 0.2 0.2 49 -33 -1 -6 2 +50 0.6 0.2 39 -3 -14 -25 1 +50 0.3 0.8 3 -39 4 -37 2 +50 0.3 0.9 39 37 48 -8 1 +50 0.2 0.6 43 -47 50 4 2 +50 0.6 0.7 -25 -30 -8 -47 2 +50 0.3 0.1 -8 -29 43 -7 2 +50 0.7 0.1 29 -18 -6 -26 1 +50 0.7 0.2 15 -19 24 -5 1 +50 0.1 0.6 -3 -17 9 -36 2 +50 0.4 0.4 -12 -30 21 -35 2 +50 0.6 0.7 0 -50 9 -45 2 +50 0.1 0.8 44 6 23 -25 2 +50 0.1 0.1 25 -11 40 -13 1 +50 0.8 0.4 15 -13 0 -43 1 +50 0.3 0.1 -28 -40 18 -36 1 +50 0.6 0.6 38 -5 -6 -16 1 +50 0.9 0.4 17 12 -7 -28 1 +50 0.9 0.9 48 -23 49 44 2 +50 0.6 0.7 -20 -29 32 -3 2 +50 0.7 0.6 -3 -33 1 -21 2 +50 0.8 0.8 6 -36 37 32 2 +50 0.3 0.9 18 -42 47 31 2 +50 0.7 0.4 28 -49 34 -18 1 +50 0.1 0.6 -7 -43 41 15 2 +50 0.7 0.1 14 -28 -1 -16 1 +50 0.7 0.3 44 0 12 -21 1 +50 0.9 0.8 6 -41 20 -37 2 +50 0.6 0.2 31 -31 42 27 2 +50 0.2 0.2 35 -2 27 8 2 +50 0.2 0.5 -31 -32 44 5 2 +50 0.1 0.6 49 -24 40 -6 2 +50 0.3 0.8 7 -45 40 -31 2 +50 0.4 0.3 43 13 35 13 1 +50 0.9 0.8 23 -9 -5 -39 1 +50 0.8 0.4 42 -37 -8 -28 1 +50 0.4 0.9 -16 -19 30 24 2 +50 0.3 0.6 35 33 39 -12 1 +50 0.2 0.6 28 -36 5 -25 2 +50 0.3 0.3 50 20 9 -38 1 +50 0.4 0.1 2 -14 -2 -42 1 +50 0.2 0.3 -32 -40 9 -11 2 +50 0.7 0.7 39 33 31 2 1 +50 0.3 0.8 23 -50 -21 -49 1 +50 0.6 0.3 22 -33 0 -8 1 +50 0.9 0.1 -5 -48 -17 -26 1 +50 0.7 0.7 46 -30 -30 -40 1 +50 0.8 0.9 12 -9 12 -9 1 +50 0.9 0.4 37 -27 -1 -22 1 +50 0.9 0.2 -16 -38 36 -37 2 +50 0.3 0.2 -14 -25 8 -31 2 +50 0.5 0.9 2 -14 43 20 2 +50 0.8 0.6 0 -3 28 -19 2 +50 0.6 0.5 16 11 44 -2 1 +50 0.8 0.2 6 -39 43 40 2 +50 0.5 0.7 50 3 21 -14 1 +50 0.9 0.3 42 -14 38 -45 1 +50 0.8 0.7 19 -11 18 -25 1 +50 0.7 0.6 22 -13 39 31 2 +50 0.5 0.7 -30 -42 40 -22 2 +50 0.3 0.1 49 9 34 29 2 +50 0.6 0.3 -17 -36 35 0 2 +50 0.7 0.6 11 -43 46 -2 2 +50 0.2 0.1 -30 -49 40 3 2 +50 0.9 0.3 25 24 45 14 1 +50 0.2 0.4 47 29 -2 -7 1 +50 0.2 0.4 34 33 39 -14 1 +50 0.4 0.5 1 -33 18 -34 2 +50 0.3 0.6 49 -31 49 35 2 +50 0.5 0.2 -6 -37 36 30 2 +50 0.3 0.6 9 3 4 3 2 +50 0.3 0.6 -11 -19 3 -33 2 +50 0.6 0.2 43 17 17 -31 1 +50 0.3 0.8 -32 -36 -10 -14 2 +50 0.1 0.8 15 -2 0 -30 1 +50 0.7 0.4 13 -26 32 15 2 +50 0.2 0.8 -9 -18 43 -3 2 +50 0.3 0.4 17 -48 46 13 2 +50 0.9 0.5 46 -7 44 -26 1 +50 0.1 0.7 47 17 26 -27 1 +50 0.9 0.4 -13 -50 41 -17 2 +50 0.5 0.5 28 14 1 -6 1 +50 0.4 0.6 26 -16 37 25 2 +50 0.1 0.5 41 14 -6 -32 1 +50 0.8 0.4 28 -31 45 -18 2 +50 0.8 0.1 33 -6 20 19 1 +50 0.4 0.4 -2 -47 20 3 2 +50 0.8 0.9 32 -9 6 -47 1 +50 0.3 0.2 -11 -30 42 16 2 +50 0.2 0.4 15 2 9 4 1 +50 0.5 0.9 7 -15 38 -45 2 +50 0.4 0.3 31 26 49 -41 1 +50 0.9 0.2 -17 -23 33 -37 2 +50 0.9 0.3 -21 -45 -15 -38 1 +50 0.3 0.1 35 -13 46 24 2 +50 0.8 0.9 38 -46 -12 -17 1 +50 0.5 0.5 22 -22 -3 -33 1 +50 0.6 0.6 -6 -27 20 12 2 +50 0.1 0.7 2 -33 29 -31 2 +50 0.5 0.3 36 -47 2 -29 1 +50 0.2 0.2 -2 -24 -32 -49 1 +50 0.7 0.6 28 26 2 -17 1 +50 0.6 0.9 49 42 31 -50 1 +50 0.9 0.2 -7 -48 42 7 2 +50 0.9 0.6 -1 -12 13 -1 2 +50 0.9 0.1 22 -39 17 -34 1 +50 0.7 0.9 1 -25 25 20 2 +50 0.1 0.1 -20 -48 39 -32 2 +50 0.3 0.8 18 -2 8 -4 2 +50 0.5 0.7 49 -27 -12 -33 1 +50 0.9 0.7 -11 -32 8 -1 2 +50 0.1 0.5 -4 -27 -1 -44 1 +50 0.3 0.5 37 35 17 -4 1 +50 0.8 0.5 23 4 -9 -50 1 +50 0.6 0.2 25 -41 50 41 2 +50 0.8 0.4 40 1 -1 -36 1 +50 0.7 0.5 -16 -48 27 -46 2 +50 0.6 0.8 -29 -42 4 3 2 +50 0.3 0.8 -8 -13 -19 -22 1 +50 0.1 0.8 18 -5 29 -1 2 +50 0.5 0.2 18 14 42 -39 1 +50 0.1 0.1 -1 -37 13 0 2 +50 0.2 0.7 -28 -30 5 -35 2 +50 0.6 0.1 -29 -33 47 -31 2 +50 0.8 0.3 0 -29 50 32 2 +50 0.8 0.4 -18 -26 27 -26 2 +50 0.8 0.2 10 -17 6 -20 1 +50 0.7 0.8 24 -39 27 17 2 +50 0.6 0.3 -3 -43 -26 -43 1 +50 0.2 0.8 38 -11 -1 -38 1 +50 0.5 0.7 5 -1 16 15 2 +50 0.7 0.4 36 23 26 -44 1 +50 0.7 0.7 37 -48 -7 -35 1 +50 0.7 0.5 -25 -44 38 0 2 +50 0.8 0.7 -26 -35 -38 -41 1 +50 0.9 0.4 -9 -43 46 -19 2 +50 0.2 0.8 31 -19 33 -23 2 +50 0.6 0.3 30 23 46 -24 1 +50 0.2 0.9 48 -10 32 -31 2 +50 0.1 0.8 25 7 27 -47 2 +50 0.3 0.4 2 -4 9 -38 1 +50 0.4 0.9 -4 -48 27 -27 2 +50 0.1 0.1 1 -47 11 3 2 +50 0.3 0.7 21 13 21 -18 2 +50 0.9 0.6 28 5 26 10 1 +50 0.2 0.2 35 29 49 -14 1 +50 0.3 0.6 39 12 50 17 2 +50 0.4 0.2 -18 -33 0 -26 2 +50 0.7 0.1 34 32 -18 -32 1 +50 0.9 0.7 21 -28 17 -7 1 +50 0.9 0.5 -7 -25 10 -48 1 +50 0.4 0.8 -11 -28 6 -14 2 +50 0.9 0.4 33 -16 38 -44 1 +50 0.1 0.9 13 11 31 -9 2 +50 0.1 0.1 -3 -44 39 -23 2 +50 0.9 0.2 15 -23 34 -38 1 +50 0.1 0.3 43 -3 21 -19 1 +50 0.2 0.5 -13 -34 33 -23 2 +50 0.5 0.3 28 25 43 21 2 +50 0.2 0.6 32 20 25 -2 1 +50 0.1 0.1 22 7 40 -32 1 +50 0.6 0.7 29 -21 -34 -46 1 +50 0.9 0.3 -23 -46 -4 -49 1 +50 0.9 0.8 42 -26 13 -38 1 diff --git a/Python/hbayesdm/common/extdata/dd_exampleData.txt b/Python/hbayesdm/common/extdata/dd_exampleData.txt new file mode 100644 index 00000000..d90c64c1 --- /dev/null +++ b/Python/hbayesdm/common/extdata/dd_exampleData.txt @@ -0,0 +1,2161 @@ +subjID trial delay_later amount_later delay_sooner amount_sooner choice +1 1 6 10.5 0 10 1 +1 2 170 38.3 0 10 1 +1 3 28 13.4 0 10 1 +1 4 28 31.4 0 10 1 +1 5 85 30.9 0 10 1 +1 6 28 21.1 0 10 1 +1 7 28 13 0 10 1 +1 8 1 21.3 0 10 1 +1 9 28 21.1 0 10 1 +1 10 15 30.1 0 10 1 +1 11 1 10.7 0 10 1 +1 12 85 36.1 0 10 1 +1 13 15 10.5 0 10 1 +1 14 6 16.7 0 10 1 +1 15 1 11 0 10 1 +1 16 15 14.2 0 10 1 +1 17 15 12.5 0 10 1 +1 18 15 20.7 0 10 1 +1 19 6 11 0 10 0 +1 20 28 16.9 0 10 1 +1 21 15 30.1 0 10 1 +1 22 85 24.4 0 10 1 +1 23 170 41.3 0 10 1 +1 24 15 14.2 0 10 1 +1 25 6 10.5 0 10 1 +1 26 170 24.4 0 10 1 +1 27 15 49 0 10 1 +1 28 170 29.7 0 10 1 +1 29 1 11.8 0 10 0 +1 30 6 13.2 0 10 0 +1 31 85 30.9 0 10 1 +1 32 6 44 0 10 1 +1 33 6 35.1 0 10 1 +1 34 28 15.5 0 10 1 +1 35 170 43.3 0 10 1 +1 36 170 33.9 0 10 1 +1 37 1 11 0 10 1 +1 38 1 21.3 0 10 1 +1 39 85 45 0 10 1 +1 40 15 39.6 0 10 1 +1 41 85 10.5 0 10 0 +1 42 170 15 0 10 1 +1 43 170 49.8 0 10 1 +1 44 170 24.4 0 10 1 +1 45 28 13.4 0 10 1 +1 46 1 31.6 0 10 1 +1 47 170 35.6 0 10 1 +1 48 1 41.9 0 10 1 +1 49 6 17.4 0 10 1 +1 50 85 18.4 0 10 1 +1 51 85 27.3 0 10 1 +1 52 85 26 0 10 1 +1 53 170 38.3 0 10 1 +1 54 28 21.7 0 10 1 +1 55 1 10.7 0 10 1 +1 56 170 49.8 0 10 1 +1 57 1 11.2 0 10 1 +1 58 15 20.7 0 10 1 +1 59 6 44 0 10 1 +1 60 28 41.1 0 10 1 +1 61 28 16.9 0 10 1 +1 62 6 14 0 10 1 +1 63 1 31.6 0 10 1 +1 64 15 18.6 0 10 1 +1 65 28 12 0 10 1 +1 66 6 13.2 0 10 1 +1 67 170 43.3 0 10 1 +1 68 28 31.4 0 10 1 +1 69 85 19.5 0 10 1 +1 70 170 35.6 0 10 1 +1 71 85 18.4 0 10 1 +1 72 1 12.5 0 10 1 +1 73 170 41.3 0 10 1 +1 74 170 15 0 10 0 +1 75 28 12 0 10 0 +1 76 85 36.1 0 10 1 +1 77 1 18 0 10 1 +1 78 85 10.5 0 10 0 +1 79 170 33.9 0 10 1 +1 80 6 26.3 0 10 1 +1 81 85 45 0 10 1 +1 82 28 21.7 0 10 1 +1 83 28 13 0 10 0 +1 84 85 27.3 0 10 1 +1 85 15 18.6 0 10 1 +1 86 15 12.5 0 10 1 +1 87 6 26.3 0 10 1 +1 88 6 11 0 10 1 +1 89 15 10.7 0 10 0 +1 90 6 16.7 0 10 1 +1 91 28 41.1 0 10 1 +1 92 85 26 0 10 1 +1 93 85 24.4 0 10 1 +1 94 1 12.5 0 10 1 +1 95 6 17.4 0 10 1 +1 96 6 35.1 0 10 1 +1 97 6 14 0 10 1 +1 98 15 10.5 0 10 0 +1 99 1 11.8 0 10 1 +1 100 15 10.7 0 10 1 +1 101 15 39.6 0 10 1 +1 102 85 19.5 0 10 1 +1 103 1 11.2 0 10 1 +1 104 170 29.7 0 10 1 +1 105 15 49 0 10 1 +1 106 1 41.9 0 10 1 +1 107 1 18 0 10 1 +1 108 28 15.5 0 10 1 +2 1 1 11.8 0 10 0 +2 2 170 35.6 0 10 0 +2 3 85 10.5 0 10 0 +2 4 28 21.1 0 10 1 +2 5 28 13 0 10 0 +2 6 6 10.5 0 10 0 +2 7 15 10.5 0 10 0 +2 8 6 17.4 0 10 1 +2 9 85 26 0 10 1 +2 10 6 35.1 0 10 1 +2 11 28 21.7 0 10 1 +2 12 6 14 0 10 1 +2 13 15 14.2 0 10 0 +2 14 1 12.5 0 10 1 +2 15 170 38.3 0 10 0 +2 16 1 18 0 10 1 +2 17 15 39.6 0 10 1 +2 18 85 18.4 0 10 0 +2 19 28 21.1 0 10 0 +2 20 85 19.5 0 10 0 +2 21 6 11 0 10 1 +2 22 85 30.9 0 10 1 +2 23 1 10.7 0 10 1 +2 24 28 16.9 0 10 0 +2 25 170 29.7 0 10 1 +2 26 170 43.3 0 10 0 +2 27 6 14 0 10 1 +2 28 6 11 0 10 1 +2 29 28 41.1 0 10 1 +2 30 1 31.6 0 10 1 +2 31 15 18.6 0 10 1 +2 32 15 14.2 0 10 1 +2 33 28 12 0 10 0 +2 34 1 21.3 0 10 1 +2 35 85 36.1 0 10 1 +2 36 85 26 0 10 1 +2 37 15 49 0 10 1 +2 38 1 41.9 0 10 1 +2 39 1 21.3 0 10 1 +2 40 170 41.3 0 10 0 +2 41 170 43.3 0 10 0 +2 42 15 18.6 0 10 1 +2 43 15 49 0 10 1 +2 44 170 15 0 10 0 +2 45 85 24.4 0 10 1 +2 46 15 30.1 0 10 1 +2 47 85 18.4 0 10 0 +2 48 170 41.3 0 10 0 +2 49 28 41.1 0 10 1 +2 50 28 31.4 0 10 1 +2 51 6 35.1 0 10 1 +2 52 1 11.2 0 10 0 +2 53 170 33.9 0 10 0 +2 54 28 15.5 0 10 0 +2 55 1 18 0 10 1 +2 56 15 10.7 0 10 0 +2 57 85 45 0 10 1 +2 58 85 19.5 0 10 0 +2 59 6 44 0 10 1 +2 60 85 30.9 0 10 0 +2 61 1 11 0 10 0 +2 62 170 35.6 0 10 1 +2 63 170 29.7 0 10 0 +2 64 6 16.7 0 10 1 +2 65 28 15.5 0 10 0 +2 66 6 44 0 10 1 +2 67 85 10.5 0 10 0 +2 68 85 45 0 10 1 +2 69 1 11 0 10 0 +2 70 15 10.5 0 10 0 +2 71 170 49.8 0 10 0 +2 72 15 20.7 0 10 0 +2 73 6 13.2 0 10 1 +2 74 15 12.5 0 10 1 +2 75 28 13 0 10 1 +2 76 1 10.7 0 10 1 +2 77 28 13.4 0 10 1 +2 78 15 39.6 0 10 1 +2 79 15 20.7 0 10 1 +2 80 1 11.2 0 10 1 +2 81 85 24.4 0 10 1 +2 82 1 12.5 0 10 0 +2 83 170 49.8 0 10 1 +2 84 170 33.9 0 10 1 +2 85 85 27.3 0 10 0 +2 86 170 24.4 0 10 0 +2 87 15 10.7 0 10 0 +2 88 6 16.7 0 10 1 +2 89 1 11.8 0 10 0 +2 90 6 10.5 0 10 0 +2 91 28 12 0 10 0 +2 92 6 17.4 0 10 1 +2 93 28 16.9 0 10 0 +2 94 28 13.4 0 10 1 +2 95 1 31.6 0 10 1 +2 96 85 36.1 0 10 1 +2 97 15 30.1 0 10 0 +2 98 170 15 0 10 0 +2 99 85 27.3 0 10 0 +2 100 170 38.3 0 10 0 +2 101 15 12.5 0 10 0 +2 102 6 26.3 0 10 1 +2 103 1 41.9 0 10 1 +2 104 6 13.2 0 10 1 +2 105 28 21.7 0 10 0 +2 106 170 24.4 0 10 0 +2 107 28 31.4 0 10 0 +2 108 6 26.3 0 10 1 +3 1 28 16.9 0 10 0 +3 2 1 21.3 0 10 1 +3 3 6 44 0 10 1 +3 4 170 49.8 0 10 1 +3 5 28 13.4 0 10 1 +3 6 28 21.1 0 10 1 +3 7 15 14.2 0 10 1 +3 8 6 26.3 0 10 1 +3 9 85 24.4 0 10 0 +3 10 170 41.3 0 10 1 +3 11 28 12 0 10 0 +3 12 15 39.6 0 10 1 +3 13 85 30.9 0 10 1 +3 14 28 31.4 0 10 1 +3 15 85 10.5 0 10 1 +3 16 1 10.7 0 10 1 +3 17 28 31.4 0 10 1 +3 18 6 26.3 0 10 1 +3 19 1 41.9 0 10 1 +3 20 6 13.2 0 10 0 +3 21 28 41.1 0 10 1 +3 22 15 12.5 0 10 1 +3 23 15 39.6 0 10 1 +3 24 85 10.5 0 10 0 +3 25 28 12 0 10 1 +3 26 170 38.3 0 10 1 +3 27 85 36.1 0 10 1 +3 28 1 41.9 0 10 1 +3 29 15 10.5 0 10 0 +3 30 85 19.5 0 10 0 +3 31 85 26 0 10 1 +3 32 85 45 0 10 1 +3 33 1 12.5 0 10 1 +3 34 6 13.2 0 10 1 +3 35 15 10.7 0 10 1 +3 36 1 11 0 10 1 +3 37 15 30.1 0 10 0 +3 38 15 20.7 0 10 1 +3 39 6 17.4 0 10 1 +3 40 6 10.5 0 10 0 +3 41 170 15 0 10 1 +3 42 15 12.5 0 10 1 +3 43 1 31.6 0 10 1 +3 44 15 10.5 0 10 0 +3 45 170 41.3 0 10 1 +3 46 170 15 0 10 0 +3 47 15 18.6 0 10 1 +3 48 6 17.4 0 10 1 +3 49 85 18.4 0 10 0 +3 50 170 43.3 0 10 1 +3 51 28 21.7 0 10 0 +3 52 6 16.7 0 10 0 +3 53 170 33.9 0 10 0 +3 54 1 18 0 10 1 +3 55 1 18 0 10 1 +3 56 15 30.1 0 10 1 +3 57 1 10.7 0 10 0 +3 58 85 27.3 0 10 1 +3 59 6 35.1 0 10 1 +3 60 85 30.9 0 10 1 +3 61 85 24.4 0 10 1 +3 62 85 19.5 0 10 0 +3 63 170 33.9 0 10 1 +3 64 6 10.5 0 10 0 +3 65 85 27.3 0 10 1 +3 66 28 16.9 0 10 0 +3 67 6 35.1 0 10 1 +3 68 15 49 0 10 1 +3 69 85 26 0 10 1 +3 70 85 45 0 10 1 +3 71 1 11.8 0 10 1 +3 72 170 35.6 0 10 1 +3 73 1 31.6 0 10 1 +3 74 28 13 0 10 0 +3 75 28 21.1 0 10 1 +3 76 15 20.7 0 10 1 +3 77 15 10.7 0 10 0 +3 78 28 15.5 0 10 0 +3 79 1 21.3 0 10 1 +3 80 6 14 0 10 1 +3 81 170 49.8 0 10 1 +3 82 85 36.1 0 10 1 +3 83 1 11.2 0 10 0 +3 84 28 15.5 0 10 0 +3 85 170 29.7 0 10 1 +3 86 170 24.4 0 10 1 +3 87 170 24.4 0 10 1 +3 88 28 13.4 0 10 0 +3 89 15 18.6 0 10 1 +3 90 28 21.7 0 10 1 +3 91 85 18.4 0 10 1 +3 92 6 16.7 0 10 1 +3 93 6 11 0 10 1 +3 94 28 41.1 0 10 1 +3 95 170 43.3 0 10 1 +3 96 6 44 0 10 1 +3 97 1 11.2 0 10 1 +3 98 6 11 0 10 1 +3 99 170 35.6 0 10 1 +3 100 15 49 0 10 1 +3 101 170 38.3 0 10 1 +3 102 28 13 0 10 0 +3 103 170 29.7 0 10 1 +3 104 1 12.5 0 10 1 +3 105 1 11 0 10 1 +3 106 1 11.8 0 10 1 +3 107 6 14 0 10 0 +3 108 15 14.2 0 10 1 +4 1 170 41.3 0 10 1 +4 2 170 38.3 0 10 1 +4 3 28 21.1 0 10 1 +4 4 15 20.7 0 10 1 +4 5 85 45 0 10 1 +4 6 85 45 0 10 1 +4 7 28 21.7 0 10 1 +4 8 1 11.2 0 10 1 +4 9 170 49.8 0 10 1 +4 10 6 14 0 10 0 +4 11 28 21.7 0 10 1 +4 12 1 11.2 0 10 1 +4 13 1 31.6 0 10 1 +4 14 6 10.5 0 10 1 +4 15 1 21.3 0 10 1 +4 16 170 43.3 0 10 1 +4 17 1 18 0 10 1 +4 18 15 10.5 0 10 0 +4 19 15 20.7 0 10 1 +4 20 15 39.6 0 10 1 +4 21 170 33.9 0 10 1 +4 22 1 21.3 0 10 1 +4 23 85 30.9 0 10 1 +4 24 15 18.6 0 10 1 +4 25 28 13.4 0 10 1 +4 26 170 15 0 10 1 +4 27 170 41.3 0 10 1 +4 28 85 27.3 0 10 0 +4 29 1 11.8 0 10 0 +4 30 85 24.4 0 10 1 +4 31 15 49 0 10 1 +4 32 6 17.4 0 10 1 +4 33 6 35.1 0 10 1 +4 34 170 15 0 10 0 +4 35 6 26.3 0 10 1 +4 36 170 35.6 0 10 0 +4 37 6 13.2 0 10 1 +4 38 28 15.5 0 10 1 +4 39 1 11 0 10 1 +4 40 15 12.5 0 10 1 +4 41 6 13.2 0 10 0 +4 42 1 10.7 0 10 1 +4 43 6 17.4 0 10 1 +4 44 85 10.5 0 10 0 +4 45 28 13.4 0 10 1 +4 46 1 41.9 0 10 1 +4 47 28 13 0 10 1 +4 48 28 16.9 0 10 0 +4 49 85 36.1 0 10 1 +4 50 15 18.6 0 10 1 +4 51 85 27.3 0 10 1 +4 52 15 49 0 10 1 +4 53 15 30.1 0 10 1 +4 54 170 29.7 0 10 1 +4 55 6 14 0 10 1 +4 56 28 41.1 0 10 1 +4 57 15 30.1 0 10 1 +4 58 15 12.5 0 10 1 +4 59 85 30.9 0 10 1 +4 60 28 21.1 0 10 1 +4 61 6 44 0 10 1 +4 62 28 16.9 0 10 1 +4 63 6 11 0 10 0 +4 64 170 38.3 0 10 1 +4 65 85 18.4 0 10 1 +4 66 85 19.5 0 10 1 +4 67 170 33.9 0 10 0 +4 68 170 35.6 0 10 1 +4 69 15 14.2 0 10 1 +4 70 28 13 0 10 0 +4 71 28 31.4 0 10 1 +4 72 1 11.8 0 10 0 +4 73 1 12.5 0 10 0 +4 74 28 31.4 0 10 1 +4 75 1 12.5 0 10 1 +4 76 28 41.1 0 10 1 +4 77 1 10.7 0 10 1 +4 78 170 24.4 0 10 1 +4 79 6 16.7 0 10 1 +4 80 170 24.4 0 10 1 +4 81 6 35.1 0 10 1 +4 82 1 11 0 10 0 +4 83 28 12 0 10 0 +4 84 15 10.5 0 10 0 +4 85 15 10.7 0 10 0 +4 86 28 12 0 10 1 +4 87 85 19.5 0 10 1 +4 88 6 16.7 0 10 1 +4 89 6 11 0 10 0 +4 90 15 39.6 0 10 1 +4 91 85 24.4 0 10 0 +4 92 6 26.3 0 10 1 +4 93 85 18.4 0 10 1 +4 94 15 14.2 0 10 0 +4 95 6 10.5 0 10 0 +4 96 1 41.9 0 10 1 +4 97 85 36.1 0 10 1 +4 98 85 26 0 10 1 +4 99 28 15.5 0 10 0 +4 100 1 31.6 0 10 1 +4 101 6 44 0 10 1 +4 102 85 26 0 10 1 +4 103 170 29.7 0 10 1 +4 104 170 43.3 0 10 1 +4 105 170 49.8 0 10 1 +4 106 85 10.5 0 10 0 +4 107 1 18 0 10 1 +4 108 15 10.7 0 10 1 +5 1 170 41.3 0 10 1 +5 2 85 18.4 0 10 1 +5 3 28 21.7 0 10 1 +5 4 85 10.5 0 10 0 +5 5 15 14.2 0 10 1 +5 6 28 21.7 0 10 1 +5 7 85 30.9 0 10 1 +5 8 85 26 0 10 1 +5 9 1 10.7 0 10 1 +5 10 28 13 0 10 0 +5 11 170 33.9 0 10 1 +5 12 85 36.1 0 10 0 +5 13 15 30.1 0 10 1 +5 14 1 31.6 0 10 1 +5 15 6 13.2 0 10 1 +5 16 1 11 0 10 1 +5 17 85 24.4 0 10 1 +5 18 1 41.9 0 10 1 +5 19 15 14.2 0 10 0 +5 20 15 20.7 0 10 1 +5 21 15 10.5 0 10 0 +5 22 6 10.5 0 10 1 +5 23 85 45 0 10 1 +5 24 28 16.9 0 10 1 +5 25 1 21.3 0 10 1 +5 26 6 14 0 10 1 +5 27 28 13.4 0 10 0 +5 28 6 17.4 0 10 1 +5 29 170 33.9 0 10 0 +5 30 15 18.6 0 10 1 +5 31 85 45 0 10 1 +5 32 28 13.4 0 10 0 +5 33 15 10.5 0 10 0 +5 34 15 49 0 10 1 +5 35 170 43.3 0 10 1 +5 36 15 39.6 0 10 1 +5 37 85 18.4 0 10 0 +5 38 170 49.8 0 10 1 +5 39 15 10.7 0 10 0 +5 40 170 24.4 0 10 0 +5 41 15 39.6 0 10 1 +5 42 28 41.1 0 10 1 +5 43 85 27.3 0 10 1 +5 44 1 18 0 10 1 +5 45 1 12.5 0 10 1 +5 46 1 11.8 0 10 0 +5 47 28 15.5 0 10 0 +5 48 170 15 0 10 0 +5 49 28 21.1 0 10 1 +5 50 6 11 0 10 0 +5 51 28 31.4 0 10 1 +5 52 1 31.6 0 10 1 +5 53 15 20.7 0 10 1 +5 54 28 31.4 0 10 1 +5 55 1 11.2 0 10 1 +5 56 6 11 0 10 1 +5 57 6 10.5 0 10 1 +5 58 15 10.7 0 10 1 +5 59 28 13 0 10 0 +5 60 85 26 0 10 1 +5 61 6 35.1 0 10 1 +5 62 170 35.6 0 10 1 +5 63 85 27.3 0 10 1 +5 64 85 30.9 0 10 1 +5 65 1 41.9 0 10 1 +5 66 170 35.6 0 10 1 +5 67 28 15.5 0 10 1 +5 68 1 11.2 0 10 1 +5 69 170 49.8 0 10 1 +5 70 15 12.5 0 10 0 +5 71 85 19.5 0 10 1 +5 72 6 16.7 0 10 1 +5 73 1 10.7 0 10 1 +5 74 6 44 0 10 1 +5 75 170 29.7 0 10 1 +5 76 6 17.4 0 10 1 +5 77 1 21.3 0 10 1 +5 78 170 38.3 0 10 0 +5 79 170 24.4 0 10 0 +5 80 6 35.1 0 10 1 +5 81 1 12.5 0 10 1 +5 82 1 11.8 0 10 1 +5 83 28 12 0 10 1 +5 84 28 12 0 10 1 +5 85 85 36.1 0 10 1 +5 86 170 29.7 0 10 1 +5 87 170 43.3 0 10 1 +5 88 1 11 0 10 0 +5 89 85 24.4 0 10 0 +5 90 15 30.1 0 10 1 +5 91 6 14 0 10 0 +5 92 170 38.3 0 10 1 +5 93 6 44 0 10 1 +5 94 6 16.7 0 10 1 +5 95 6 26.3 0 10 1 +5 96 28 16.9 0 10 0 +5 97 85 10.5 0 10 0 +5 98 15 18.6 0 10 1 +5 99 28 21.1 0 10 1 +5 100 170 15 0 10 0 +5 101 15 49 0 10 1 +5 102 170 41.3 0 10 1 +5 103 6 13.2 0 10 1 +5 104 85 19.5 0 10 1 +5 105 6 26.3 0 10 1 +5 106 28 41.1 0 10 1 +5 107 1 18 0 10 1 +5 108 15 12.5 0 10 0 +6 1 15 18.6 0 10 1 +6 2 1 10.7 0 10 1 +6 3 1 11.2 0 10 1 +6 4 15 18.6 0 10 1 +6 5 28 16.9 0 10 1 +6 6 85 27.3 0 10 1 +6 7 28 13 0 10 1 +6 8 15 10.7 0 10 0 +6 9 170 33.9 0 10 1 +6 10 15 14.2 0 10 1 +6 11 15 10.5 0 10 1 +6 12 170 33.9 0 10 1 +6 13 15 39.6 0 10 1 +6 14 1 11.8 0 10 1 +6 15 15 10.7 0 10 0 +6 16 28 21.1 0 10 1 +6 17 85 18.4 0 10 0 +6 18 1 18 0 10 1 +6 19 1 11 0 10 1 +6 20 15 12.5 0 10 1 +6 21 170 38.3 0 10 1 +6 22 1 11 0 10 0 +6 23 6 16.7 0 10 0 +6 24 28 16.9 0 10 1 +6 25 6 17.4 0 10 1 +6 26 1 12.5 0 10 1 +6 27 85 18.4 0 10 0 +6 28 28 31.4 0 10 1 +6 29 6 26.3 0 10 1 +6 30 85 45 0 10 1 +6 31 85 24.4 0 10 1 +6 32 6 16.7 0 10 1 +6 33 85 10.5 0 10 0 +6 34 6 44 0 10 1 +6 35 1 12.5 0 10 1 +6 36 170 15 0 10 0 +6 37 170 15 0 10 0 +6 38 15 39.6 0 10 1 +6 39 85 19.5 0 10 1 +6 40 15 10.5 0 10 1 +6 41 85 27.3 0 10 1 +6 42 170 29.7 0 10 1 +6 43 170 24.4 0 10 1 +6 44 15 14.2 0 10 0 +6 45 6 11 0 10 1 +6 46 1 41.9 0 10 1 +6 47 1 31.6 0 10 1 +6 48 28 13.4 0 10 1 +6 49 15 30.1 0 10 1 +6 50 28 41.1 0 10 1 +6 51 28 13 0 10 1 +6 52 85 19.5 0 10 1 +6 53 170 43.3 0 10 1 +6 54 28 41.1 0 10 1 +6 55 6 17.4 0 10 1 +6 56 15 20.7 0 10 1 +6 57 15 30.1 0 10 1 +6 58 170 49.8 0 10 1 +6 59 85 36.1 0 10 1 +6 60 85 30.9 0 10 1 +6 61 170 35.6 0 10 1 +6 62 15 20.7 0 10 1 +6 63 1 11.2 0 10 0 +6 64 170 24.4 0 10 1 +6 65 28 21.7 0 10 1 +6 66 1 10.7 0 10 1 +6 67 85 45 0 10 1 +6 68 6 10.5 0 10 1 +6 69 15 12.5 0 10 1 +6 70 28 31.4 0 10 1 +6 71 170 38.3 0 10 1 +6 72 1 18 0 10 1 +6 73 1 21.3 0 10 1 +6 74 6 35.1 0 10 1 +6 75 28 13.4 0 10 0 +6 76 85 10.5 0 10 0 +6 77 28 12 0 10 1 +6 78 6 10.5 0 10 1 +6 79 1 11.8 0 10 1 +6 80 6 13.2 0 10 1 +6 81 1 41.9 0 10 1 +6 82 85 36.1 0 10 1 +6 83 28 15.5 0 10 1 +6 84 85 30.9 0 10 1 +6 85 170 43.3 0 10 1 +6 86 85 26 0 10 1 +6 87 28 21.1 0 10 1 +6 88 28 15.5 0 10 0 +6 89 6 11 0 10 1 +6 90 1 31.6 0 10 1 +6 91 170 49.8 0 10 1 +6 92 1 21.3 0 10 1 +6 93 28 21.7 0 10 1 +6 94 170 41.3 0 10 1 +6 95 15 49 0 10 1 +6 96 6 35.1 0 10 1 +6 97 15 49 0 10 1 +6 98 6 26.3 0 10 1 +6 99 28 12 0 10 1 +6 100 6 14 0 10 1 +6 101 6 44 0 10 1 +6 102 170 29.7 0 10 1 +6 103 6 14 0 10 1 +6 104 170 35.6 0 10 1 +6 105 85 26 0 10 1 +6 106 6 13.2 0 10 1 +6 107 170 41.3 0 10 1 +6 108 85 24.4 0 10 1 +7 1 28 13 0 10 1 +7 2 28 41.1 0 10 1 +7 3 170 29.7 0 10 0 +7 4 1 10.7 0 10 1 +7 5 6 17.4 0 10 1 +7 6 15 12.5 0 10 1 +7 7 15 18.6 0 10 1 +7 8 170 24.4 0 10 0 +7 9 1 11 0 10 1 +7 10 28 16.9 0 10 1 +7 11 170 41.3 0 10 1 +7 12 15 10.5 0 10 0 +7 13 6 10.5 0 10 1 +7 14 28 12 0 10 1 +7 15 170 24.4 0 10 1 +7 16 1 10.7 0 10 0 +7 17 6 35.1 0 10 1 +7 18 85 19.5 0 10 0 +7 19 6 26.3 0 10 1 +7 20 85 26 0 10 1 +7 21 1 11.2 0 10 1 +7 22 6 16.7 0 10 1 +7 23 28 31.4 0 10 1 +7 24 170 35.6 0 10 0 +7 25 1 21.3 0 10 1 +7 26 15 20.7 0 10 1 +7 27 15 14.2 0 10 1 +7 28 85 24.4 0 10 1 +7 29 1 11 0 10 1 +7 30 85 27.3 0 10 1 +7 31 15 18.6 0 10 1 +7 32 6 16.7 0 10 1 +7 33 28 21.1 0 10 1 +7 34 15 39.6 0 10 1 +7 35 28 31.4 0 10 1 +7 36 1 11.8 0 10 1 +7 37 170 38.3 0 10 1 +7 38 1 12.5 0 10 1 +7 39 1 11.8 0 10 1 +7 40 28 21.7 0 10 1 +7 41 28 21.1 0 10 1 +7 42 170 33.9 0 10 0 +7 43 6 14 0 10 1 +7 44 15 12.5 0 10 1 +7 45 15 10.7 0 10 1 +7 46 1 41.9 0 10 1 +7 47 1 18 0 10 1 +7 48 15 14.2 0 10 1 +7 49 6 11 0 10 0 +7 50 85 30.9 0 10 1 +7 51 170 49.8 0 10 1 +7 52 6 44 0 10 1 +7 53 85 45 0 10 1 +7 54 170 49.8 0 10 1 +7 55 85 10.5 0 10 0 +7 56 15 49 0 10 1 +7 57 170 15 0 10 0 +7 58 6 13.2 0 10 1 +7 59 170 35.6 0 10 1 +7 60 170 29.7 0 10 0 +7 61 170 15 0 10 0 +7 62 28 15.5 0 10 1 +7 63 28 21.7 0 10 1 +7 64 85 45 0 10 1 +7 65 28 13.4 0 10 0 +7 66 6 44 0 10 1 +7 67 6 10.5 0 10 1 +7 68 85 36.1 0 10 1 +7 69 6 14 0 10 1 +7 70 170 43.3 0 10 1 +7 71 28 12 0 10 0 +7 72 85 24.4 0 10 1 +7 73 85 18.4 0 10 0 +7 74 15 10.7 0 10 0 +7 75 6 35.1 0 10 1 +7 76 15 49 0 10 1 +7 77 85 19.5 0 10 0 +7 78 1 12.5 0 10 1 +7 79 1 18 0 10 1 +7 80 28 13 0 10 0 +7 81 6 17.4 0 10 1 +7 82 1 21.3 0 10 1 +7 83 15 30.1 0 10 1 +7 84 85 26 0 10 0 +7 85 85 30.9 0 10 1 +7 86 170 33.9 0 10 0 +7 87 15 39.6 0 10 1 +7 88 1 41.9 0 10 1 +7 89 170 43.3 0 10 1 +7 90 28 16.9 0 10 0 +7 91 85 10.5 0 10 0 +7 92 1 31.6 0 10 1 +7 93 6 26.3 0 10 1 +7 94 15 30.1 0 10 1 +7 95 1 31.6 0 10 1 +7 96 6 13.2 0 10 1 +7 97 170 38.3 0 10 1 +7 98 85 36.1 0 10 1 +7 99 170 41.3 0 10 1 +7 100 28 13.4 0 10 1 +7 101 28 15.5 0 10 0 +7 102 15 10.5 0 10 0 +7 103 6 11 0 10 0 +7 104 15 20.7 0 10 1 +7 105 85 27.3 0 10 0 +7 106 28 41.1 0 10 1 +7 107 85 18.4 0 10 1 +7 108 1 11.2 0 10 0 +8 1 85 19.5 0 10 0 +8 2 85 19.5 0 10 0 +8 3 28 21.1 0 10 0 +8 4 1 11.2 0 10 0 +8 5 170 33.9 0 10 0 +8 6 85 18.4 0 10 1 +8 7 15 20.7 0 10 1 +8 8 1 21.3 0 10 1 +8 9 15 14.2 0 10 0 +8 10 85 30.9 0 10 0 +8 11 1 11 0 10 1 +8 12 170 49.8 0 10 1 +8 13 1 41.9 0 10 1 +8 14 6 44 0 10 1 +8 15 170 38.3 0 10 1 +8 16 28 12 0 10 0 +8 17 6 10.5 0 10 0 +8 18 28 13 0 10 0 +8 19 6 14 0 10 1 +8 20 170 43.3 0 10 0 +8 21 6 17.4 0 10 1 +8 22 1 18 0 10 1 +8 23 85 36.1 0 10 0 +8 24 15 10.5 0 10 0 +8 25 85 24.4 0 10 1 +8 26 170 29.7 0 10 0 +8 27 6 14 0 10 1 +8 28 15 12.5 0 10 0 +8 29 28 15.5 0 10 0 +8 30 85 45 0 10 1 +8 31 28 13.4 0 10 0 +8 32 6 16.7 0 10 1 +8 33 170 49.8 0 10 0 +8 34 6 17.4 0 10 1 +8 35 85 26 0 10 1 +8 36 1 10.7 0 10 0 +8 37 6 11 0 10 1 +8 38 1 11.8 0 10 1 +8 39 1 12.5 0 10 0 +8 40 85 26 0 10 0 +8 41 15 10.7 0 10 0 +8 42 170 35.6 0 10 1 +8 43 85 27.3 0 10 1 +8 44 170 43.3 0 10 0 +8 45 28 13.4 0 10 0 +8 46 28 12 0 10 0 +8 47 1 31.6 0 10 1 +8 48 6 13.2 0 10 1 +8 49 85 36.1 0 10 1 +8 50 28 21.7 0 10 1 +8 51 15 18.6 0 10 0 +8 52 85 27.3 0 10 0 +8 53 6 26.3 0 10 1 +8 54 1 41.9 0 10 1 +8 55 15 30.1 0 10 1 +8 56 1 10.7 0 10 0 +8 57 170 15 0 10 0 +8 58 6 10.5 0 10 0 +8 59 28 31.4 0 10 1 +8 60 28 41.1 0 10 1 +8 61 170 29.7 0 10 0 +8 62 1 11.8 0 10 0 +8 63 15 18.6 0 10 0 +8 64 1 11 0 10 0 +8 65 170 41.3 0 10 1 +8 66 15 39.6 0 10 1 +8 67 28 31.4 0 10 0 +8 68 6 16.7 0 10 1 +8 69 15 49 0 10 1 +8 70 85 45 0 10 1 +8 71 170 24.4 0 10 1 +8 72 85 24.4 0 10 1 +8 73 1 18 0 10 1 +8 74 85 10.5 0 10 0 +8 75 28 21.7 0 10 1 +8 76 28 16.9 0 10 0 +8 77 6 44 0 10 1 +8 78 170 33.9 0 10 1 +8 79 6 11 0 10 1 +8 80 28 13 0 10 1 +8 81 28 41.1 0 10 1 +8 82 6 13.2 0 10 1 +8 83 28 15.5 0 10 0 +8 84 15 49 0 10 1 +8 85 15 14.2 0 10 1 +8 86 170 41.3 0 10 1 +8 87 15 12.5 0 10 0 +8 88 85 18.4 0 10 1 +8 89 1 12.5 0 10 1 +8 90 15 20.7 0 10 0 +8 91 6 26.3 0 10 1 +8 92 170 24.4 0 10 0 +8 93 28 21.1 0 10 1 +8 94 15 10.5 0 10 0 +8 95 6 35.1 0 10 1 +8 96 85 30.9 0 10 1 +8 97 1 21.3 0 10 1 +8 98 15 39.6 0 10 1 +8 99 170 35.6 0 10 1 +8 100 15 10.7 0 10 1 +8 101 85 10.5 0 10 0 +8 102 28 16.9 0 10 0 +8 103 170 15 0 10 0 +8 104 170 38.3 0 10 0 +8 105 6 35.1 0 10 1 +8 106 1 31.6 0 10 1 +8 107 15 30.1 0 10 1 +8 108 1 11.2 0 10 1 +9 1 1 11.2 0 10 1 +9 2 6 10.5 0 10 0 +9 3 28 31.4 0 10 1 +9 4 15 49 0 10 1 +9 5 15 12.5 0 10 1 +9 6 170 33.9 0 10 1 +9 7 170 35.6 0 10 0 +9 8 6 17.4 0 10 1 +9 9 1 21.3 0 10 1 +9 10 1 10.7 0 10 0 +9 11 1 11.8 0 10 1 +9 12 1 31.6 0 10 1 +9 13 6 16.7 0 10 0 +9 14 1 10.7 0 10 1 +9 15 170 15 0 10 0 +9 16 170 43.3 0 10 1 +9 17 85 27.3 0 10 0 +9 18 28 21.7 0 10 1 +9 19 1 11 0 10 0 +9 20 1 11.8 0 10 1 +9 21 1 12.5 0 10 1 +9 22 6 16.7 0 10 1 +9 23 170 35.6 0 10 1 +9 24 6 11 0 10 1 +9 25 85 30.9 0 10 0 +9 26 28 13 0 10 0 +9 27 28 41.1 0 10 1 +9 28 85 10.5 0 10 0 +9 29 1 11.2 0 10 1 +9 30 85 36.1 0 10 1 +9 31 1 12.5 0 10 1 +9 32 6 26.3 0 10 1 +9 33 170 33.9 0 10 1 +9 34 170 43.3 0 10 0 +9 35 85 10.5 0 10 0 +9 36 170 49.8 0 10 0 +9 37 15 18.6 0 10 1 +9 38 6 14 0 10 1 +9 39 6 11 0 10 0 +9 40 15 39.6 0 10 1 +9 41 85 19.5 0 10 0 +9 42 15 10.7 0 10 0 +9 43 85 36.1 0 10 1 +9 44 1 18 0 10 0 +9 45 170 49.8 0 10 1 +9 46 15 20.7 0 10 1 +9 47 1 11 0 10 1 +9 48 28 13.4 0 10 1 +9 49 15 20.7 0 10 1 +9 50 1 18 0 10 1 +9 51 85 18.4 0 10 1 +9 52 85 18.4 0 10 0 +9 53 85 26 0 10 1 +9 54 28 31.4 0 10 1 +9 55 6 44 0 10 1 +9 56 6 13.2 0 10 0 +9 57 6 10.5 0 10 0 +9 58 28 12 0 10 0 +9 59 15 10.5 0 10 0 +9 60 6 17.4 0 10 1 +9 61 170 24.4 0 10 0 +9 62 15 30.1 0 10 1 +9 63 6 35.1 0 10 1 +9 64 15 10.7 0 10 1 +9 65 15 14.2 0 10 1 +9 66 170 41.3 0 10 1 +9 67 28 21.1 0 10 1 +9 68 6 26.3 0 10 1 +9 69 15 14.2 0 10 1 +9 70 85 24.4 0 10 0 +9 71 85 27.3 0 10 0 +9 72 28 13.4 0 10 1 +9 73 170 29.7 0 10 0 +9 74 28 15.5 0 10 0 +9 75 85 45 0 10 1 +9 76 170 38.3 0 10 0 +9 77 28 16.9 0 10 1 +9 78 6 35.1 0 10 1 +9 79 85 19.5 0 10 0 +9 80 15 18.6 0 10 1 +9 81 15 12.5 0 10 1 +9 82 85 30.9 0 10 0 +9 83 28 12 0 10 1 +9 84 28 21.7 0 10 1 +9 85 28 13 0 10 0 +9 86 1 41.9 0 10 1 +9 87 15 39.6 0 10 1 +9 88 6 13.2 0 10 0 +9 89 1 21.3 0 10 1 +9 90 170 15 0 10 0 +9 91 15 30.1 0 10 1 +9 92 85 26 0 10 0 +9 93 15 49 0 10 1 +9 94 85 45 0 10 1 +9 95 6 14 0 10 0 +9 96 170 38.3 0 10 1 +9 97 170 29.7 0 10 0 +9 98 28 16.9 0 10 0 +9 99 6 44 0 10 1 +9 100 1 31.6 0 10 1 +9 101 15 10.5 0 10 0 +9 102 28 41.1 0 10 1 +9 103 85 24.4 0 10 0 +9 104 28 15.5 0 10 0 +9 105 28 21.1 0 10 1 +9 106 1 41.9 0 10 1 +9 107 170 41.3 0 10 1 +9 108 170 24.4 0 10 0 +10 1 170 41.3 0 10 0 +10 2 6 10.5 0 10 0 +10 3 170 15 0 10 0 +10 4 85 27.3 0 10 0 +10 5 170 15 0 10 0 +10 6 28 13 0 10 0 +10 7 6 35.1 0 10 1 +10 8 15 14.2 0 10 1 +10 9 85 19.5 0 10 0 +10 10 170 43.3 0 10 1 +10 11 85 45 0 10 1 +10 12 1 41.9 0 10 1 +10 13 15 30.1 0 10 1 +10 14 85 26 0 10 1 +10 15 28 12 0 10 1 +10 16 1 11.8 0 10 1 +10 17 15 10.7 0 10 0 +10 18 6 44 0 10 1 +10 19 1 18 0 10 1 +10 20 28 12 0 10 0 +10 21 15 20.7 0 10 1 +10 22 28 41.1 0 10 1 +10 23 15 39.6 0 10 1 +10 24 85 26 0 10 0 +10 25 6 26.3 0 10 1 +10 26 6 35.1 0 10 1 +10 27 6 26.3 0 10 1 +10 28 15 10.5 0 10 0 +10 29 1 31.6 0 10 1 +10 30 170 41.3 0 10 0 +10 31 6 10.5 0 10 1 +10 32 1 11.2 0 10 0 +10 33 170 29.7 0 10 0 +10 34 85 45 0 10 0 +10 35 15 12.5 0 10 0 +10 36 170 38.3 0 10 0 +10 37 85 19.5 0 10 0 +10 38 28 13.4 0 10 0 +10 39 28 13.4 0 10 0 +10 40 15 30.1 0 10 1 +10 41 28 41.1 0 10 1 +10 42 15 10.5 0 10 0 +10 43 170 33.9 0 10 0 +10 44 6 14 0 10 1 +10 45 170 35.6 0 10 1 +10 46 85 10.5 0 10 0 +10 47 85 30.9 0 10 1 +10 48 28 15.5 0 10 0 +10 49 15 39.6 0 10 1 +10 50 6 13.2 0 10 1 +10 51 1 10.7 0 10 1 +10 52 15 14.2 0 10 1 +10 53 6 11 0 10 0 +10 54 6 17.4 0 10 1 +10 55 170 24.4 0 10 1 +10 56 85 18.4 0 10 1 +10 57 28 31.4 0 10 1 +10 58 28 21.7 0 10 1 +10 59 15 18.6 0 10 1 +10 60 85 10.5 0 10 0 +10 61 6 16.7 0 10 1 +10 62 85 18.4 0 10 0 +10 63 6 44 0 10 1 +10 64 1 18 0 10 1 +10 65 28 16.9 0 10 0 +10 66 15 10.7 0 10 0 +10 67 1 10.7 0 10 1 +10 68 15 49 0 10 1 +10 69 170 38.3 0 10 1 +10 70 28 15.5 0 10 0 +10 71 28 31.4 0 10 1 +10 72 6 14 0 10 1 +10 73 170 35.6 0 10 0 +10 74 1 12.5 0 10 1 +10 75 15 18.6 0 10 0 +10 76 1 31.6 0 10 1 +10 77 28 16.9 0 10 1 +10 78 1 21.3 0 10 1 +10 79 15 12.5 0 10 0 +10 80 170 49.8 0 10 0 +10 81 85 27.3 0 10 0 +10 82 6 16.7 0 10 1 +10 83 85 36.1 0 10 0 +10 84 85 36.1 0 10 1 +10 85 6 17.4 0 10 1 +10 86 1 11 0 10 0 +10 87 6 13.2 0 10 0 +10 88 170 29.7 0 10 0 +10 89 1 11.2 0 10 0 +10 90 1 41.9 0 10 1 +10 91 170 33.9 0 10 0 +10 92 1 11.8 0 10 0 +10 93 15 49 0 10 1 +10 94 1 21.3 0 10 0 +10 95 85 30.9 0 10 0 +10 96 15 20.7 0 10 1 +10 97 28 21.1 0 10 0 +10 98 170 24.4 0 10 0 +10 99 85 24.4 0 10 0 +10 100 85 24.4 0 10 0 +10 101 28 21.1 0 10 0 +10 102 28 21.7 0 10 1 +10 103 170 49.8 0 10 1 +10 104 6 11 0 10 1 +10 105 1 12.5 0 10 1 +10 106 28 13 0 10 0 +10 107 170 43.3 0 10 0 +10 108 1 11 0 10 0 +11 1 6 10.5 0 10 0 +11 2 85 36.1 0 10 1 +11 3 85 27.3 0 10 0 +11 4 6 16.7 0 10 1 +11 5 1 31.6 0 10 1 +11 6 170 33.9 0 10 0 +11 7 15 10.5 0 10 0 +11 8 170 35.6 0 10 0 +11 9 15 10.7 0 10 0 +11 10 15 10.7 0 10 1 +11 11 170 15 0 10 0 +11 12 85 26 0 10 0 +11 13 28 21.1 0 10 1 +11 14 170 24.4 0 10 0 +11 15 28 13 0 10 0 +11 16 15 12.5 0 10 1 +11 17 85 19.5 0 10 0 +11 18 85 26 0 10 0 +11 19 6 11 0 10 0 +11 20 6 13.2 0 10 0 +11 21 28 15.5 0 10 0 +11 22 170 41.3 0 10 0 +11 23 6 14 0 10 1 +11 24 1 21.3 0 10 1 +11 25 85 18.4 0 10 1 +11 26 28 12 0 10 1 +11 27 15 49 0 10 1 +11 28 85 45 0 10 1 +11 29 170 41.3 0 10 0 +11 30 170 33.9 0 10 0 +11 31 28 21.7 0 10 1 +11 32 15 18.6 0 10 1 +11 33 1 12.5 0 10 0 +11 34 1 10.7 0 10 1 +11 35 28 21.1 0 10 0 +11 36 170 35.6 0 10 0 +11 37 1 11.2 0 10 1 +11 38 85 19.5 0 10 1 +11 39 1 41.9 0 10 1 +11 40 28 16.9 0 10 0 +11 41 15 30.1 0 10 1 +11 42 15 20.7 0 10 0 +11 43 15 14.2 0 10 1 +11 44 28 13 0 10 1 +11 45 15 12.5 0 10 1 +11 46 170 43.3 0 10 1 +11 47 170 49.8 0 10 1 +11 48 6 10.5 0 10 1 +11 49 15 30.1 0 10 1 +11 50 28 41.1 0 10 1 +11 51 28 41.1 0 10 1 +11 52 6 26.3 0 10 1 +11 53 85 10.5 0 10 0 +11 54 6 26.3 0 10 1 +11 55 6 44 0 10 1 +11 56 85 30.9 0 10 1 +11 57 85 24.4 0 10 0 +11 58 15 39.6 0 10 1 +11 59 1 41.9 0 10 1 +11 60 170 49.8 0 10 0 +11 61 28 31.4 0 10 1 +11 62 28 15.5 0 10 1 +11 63 28 12 0 10 0 +11 64 6 35.1 0 10 1 +11 65 85 24.4 0 10 0 +11 66 15 49 0 10 1 +11 67 15 39.6 0 10 1 +11 68 1 31.6 0 10 1 +11 69 85 36.1 0 10 0 +11 70 15 14.2 0 10 1 +11 71 28 16.9 0 10 0 +11 72 6 35.1 0 10 1 +11 73 170 15 0 10 0 +11 74 1 12.5 0 10 1 +11 75 15 20.7 0 10 0 +11 76 170 24.4 0 10 0 +11 77 85 18.4 0 10 0 +11 78 6 17.4 0 10 1 +11 79 28 31.4 0 10 1 +11 80 1 10.7 0 10 1 +11 81 6 11 0 10 1 +11 82 1 11.8 0 10 0 +11 83 170 43.3 0 10 1 +11 84 1 18 0 10 1 +11 85 1 11.8 0 10 1 +11 86 6 14 0 10 1 +11 87 85 10.5 0 10 0 +11 88 85 30.9 0 10 0 +11 89 85 27.3 0 10 0 +11 90 28 13.4 0 10 0 +11 91 6 17.4 0 10 1 +11 92 170 38.3 0 10 0 +11 93 6 16.7 0 10 1 +11 94 170 38.3 0 10 0 +11 95 1 18 0 10 1 +11 96 1 11 0 10 1 +11 97 170 29.7 0 10 0 +11 98 170 29.7 0 10 0 +11 99 15 18.6 0 10 1 +11 100 15 10.5 0 10 0 +11 101 1 21.3 0 10 1 +11 102 1 11.2 0 10 0 +11 103 28 13.4 0 10 0 +11 104 85 45 0 10 1 +11 105 28 21.7 0 10 1 +11 106 1 11 0 10 0 +11 107 6 13.2 0 10 1 +11 108 6 44 0 10 1 +12 1 1 11.2 0 10 0 +12 2 15 20.7 0 10 1 +12 3 6 10.5 0 10 0 +12 4 6 35.1 0 10 1 +12 5 28 13 0 10 0 +12 6 1 21.3 0 10 1 +12 7 170 35.6 0 10 0 +12 8 1 11 0 10 0 +12 9 1 31.6 0 10 1 +12 10 85 10.5 0 10 0 +12 11 28 13 0 10 0 +12 12 170 43.3 0 10 0 +12 13 170 29.7 0 10 0 +12 14 85 24.4 0 10 0 +12 15 85 27.3 0 10 1 +12 16 85 27.3 0 10 1 +12 17 28 16.9 0 10 1 +12 18 170 41.3 0 10 0 +12 19 28 13.4 0 10 0 +12 20 170 38.3 0 10 0 +12 21 170 43.3 0 10 1 +12 22 15 12.5 0 10 0 +12 23 15 10.7 0 10 0 +12 24 85 45 0 10 1 +12 25 170 15 0 10 0 +12 26 28 12 0 10 1 +12 27 1 41.9 0 10 1 +12 28 15 39.6 0 10 1 +12 29 6 11 0 10 1 +12 30 170 29.7 0 10 0 +12 31 170 49.8 0 10 1 +12 32 15 10.7 0 10 1 +12 33 85 10.5 0 10 1 +12 34 170 15 0 10 0 +12 35 170 41.3 0 10 1 +12 36 6 16.7 0 10 1 +12 37 15 18.6 0 10 1 +12 38 15 14.2 0 10 1 +12 39 6 35.1 0 10 1 +12 40 6 13.2 0 10 1 +12 41 1 12.5 0 10 1 +12 42 6 17.4 0 10 1 +12 43 1 18 0 10 1 +12 44 1 21.3 0 10 1 +12 45 1 11.2 0 10 0 +12 46 1 12.5 0 10 1 +12 47 1 41.9 0 10 1 +12 48 15 30.1 0 10 1 +12 49 6 17.4 0 10 1 +12 50 15 10.5 0 10 0 +12 51 15 14.2 0 10 0 +12 52 28 41.1 0 10 1 +12 53 85 45 0 10 1 +12 54 15 39.6 0 10 1 +12 55 28 15.5 0 10 0 +12 56 85 30.9 0 10 0 +12 57 85 36.1 0 10 1 +12 58 170 35.6 0 10 0 +12 59 6 16.7 0 10 1 +12 60 6 13.2 0 10 0 +12 61 85 30.9 0 10 1 +12 62 15 10.5 0 10 0 +12 63 28 12 0 10 0 +12 64 1 11 0 10 1 +12 65 15 18.6 0 10 1 +12 66 6 10.5 0 10 1 +12 67 6 11 0 10 0 +12 68 15 20.7 0 10 1 +12 69 28 13.4 0 10 1 +12 70 1 10.7 0 10 1 +12 71 6 44 0 10 1 +12 72 170 38.3 0 10 0 +12 73 28 31.4 0 10 1 +12 74 15 12.5 0 10 1 +12 75 170 33.9 0 10 0 +12 76 15 49 0 10 1 +12 77 85 26 0 10 0 +12 78 85 18.4 0 10 0 +12 79 1 11.8 0 10 0 +12 80 85 18.4 0 10 0 +12 81 85 24.4 0 10 1 +12 82 170 49.8 0 10 0 +12 83 28 21.7 0 10 1 +12 84 28 16.9 0 10 1 +12 85 1 18 0 10 0 +12 86 6 26.3 0 10 0 +12 87 28 21.7 0 10 1 +12 88 6 26.3 0 10 1 +12 89 6 44 0 10 1 +12 90 28 21.1 0 10 1 +12 91 85 36.1 0 10 1 +12 92 85 26 0 10 0 +12 93 28 41.1 0 10 1 +12 94 28 21.1 0 10 1 +12 95 28 31.4 0 10 1 +12 96 1 10.7 0 10 0 +12 97 15 30.1 0 10 1 +12 98 1 31.6 0 10 1 +12 99 85 19.5 0 10 0 +12 100 170 24.4 0 10 0 +12 101 15 49 0 10 1 +12 102 6 14 0 10 1 +12 103 85 19.5 0 10 1 +12 104 28 15.5 0 10 0 +12 105 170 24.4 0 10 0 +12 106 1 11.8 0 10 0 +12 107 6 14 0 10 1 +12 108 170 33.9 0 10 0 +13 1 170 41.3 0 10 0 +13 2 15 10.5 0 10 0 +13 3 170 15 0 10 0 +13 4 15 12.5 0 10 1 +13 5 85 45 0 10 1 +13 6 6 44 0 10 1 +13 7 1 11.2 0 10 1 +13 8 170 29.7 0 10 0 +13 9 85 27.3 0 10 1 +13 10 1 12.5 0 10 1 +13 11 15 20.7 0 10 1 +13 12 1 18 0 10 1 +13 13 6 16.7 0 10 1 +13 14 28 12 0 10 0 +13 15 6 35.1 0 10 1 +13 16 15 39.6 0 10 1 +13 17 28 41.1 0 10 1 +13 18 15 18.6 0 10 1 +13 19 1 11.2 0 10 1 +13 20 85 36.1 0 10 0 +13 21 15 10.5 0 10 0 +13 22 170 41.3 0 10 1 +13 23 28 16.9 0 10 1 +13 24 85 26 0 10 0 +13 25 28 16.9 0 10 1 +13 26 6 35.1 0 10 1 +13 27 85 24.4 0 10 1 +13 28 85 45 0 10 1 +13 29 1 11.8 0 10 1 +13 30 170 49.8 0 10 1 +13 31 170 33.9 0 10 0 +13 32 28 13.4 0 10 1 +13 33 1 41.9 0 10 1 +13 34 6 26.3 0 10 1 +13 35 170 35.6 0 10 1 +13 36 6 13.2 0 10 1 +13 37 170 29.7 0 10 0 +13 38 1 11.8 0 10 0 +13 39 85 27.3 0 10 1 +13 40 28 21.7 0 10 1 +13 41 6 14 0 10 0 +13 42 1 11 0 10 1 +13 43 6 14 0 10 1 +13 44 170 43.3 0 10 1 +13 45 15 10.7 0 10 1 +13 46 170 24.4 0 10 0 +13 47 28 21.1 0 10 1 +13 48 6 11 0 10 1 +13 49 15 39.6 0 10 1 +13 50 6 13.2 0 10 1 +13 51 15 10.7 0 10 0 +13 52 85 10.5 0 10 0 +13 53 85 18.4 0 10 1 +13 54 1 12.5 0 10 1 +13 55 15 30.1 0 10 1 +13 56 85 24.4 0 10 0 +13 57 28 12 0 10 0 +13 58 15 49 0 10 1 +13 59 28 41.1 0 10 1 +13 60 170 15 0 10 0 +13 61 85 26 0 10 1 +13 62 15 18.6 0 10 1 +13 63 28 13 0 10 0 +13 64 28 15.5 0 10 0 +13 65 28 31.4 0 10 1 +13 66 85 30.9 0 10 1 +13 67 28 13.4 0 10 0 +13 68 85 10.5 0 10 0 +13 69 1 18 0 10 1 +13 70 28 31.4 0 10 1 +13 71 170 33.9 0 10 0 +13 72 1 31.6 0 10 1 +13 73 28 21.1 0 10 1 +13 74 6 17.4 0 10 1 +13 75 1 21.3 0 10 1 +13 76 6 44 0 10 1 +13 77 85 36.1 0 10 1 +13 78 170 38.3 0 10 0 +13 79 85 30.9 0 10 1 +13 80 170 24.4 0 10 0 +13 81 15 14.2 0 10 0 +13 82 85 19.5 0 10 0 +13 83 85 19.5 0 10 1 +13 84 1 11 0 10 0 +13 85 170 49.8 0 10 1 +13 86 1 41.9 0 10 1 +13 87 6 11 0 10 0 +13 88 28 13 0 10 1 +13 89 15 14.2 0 10 1 +13 90 15 20.7 0 10 1 +13 91 170 35.6 0 10 0 +13 92 28 21.7 0 10 1 +13 93 15 49 0 10 1 +13 94 1 10.7 0 10 1 +13 95 15 12.5 0 10 1 +13 96 28 15.5 0 10 1 +13 97 170 43.3 0 10 1 +13 98 1 21.3 0 10 1 +13 99 6 10.5 0 10 0 +13 100 15 30.1 0 10 1 +13 101 6 17.4 0 10 1 +13 102 6 10.5 0 10 1 +13 103 1 10.7 0 10 1 +13 104 1 31.6 0 10 1 +13 105 6 16.7 0 10 1 +13 106 6 26.3 0 10 1 +13 107 170 38.3 0 10 0 +13 108 85 18.4 0 10 0 +14 1 28 21.7 0 10 1 +14 2 15 14.2 0 10 0 +14 3 6 11 0 10 0 +14 4 15 14.2 0 10 0 +14 5 15 10.7 0 10 1 +14 6 85 30.9 0 10 1 +14 7 6 16.7 0 10 0 +14 8 1 11.8 0 10 1 +14 9 28 13.4 0 10 1 +14 10 1 18 0 10 1 +14 11 15 39.6 0 10 1 +14 12 15 30.1 0 10 1 +14 13 1 11 0 10 0 +14 14 170 41.3 0 10 0 +14 15 6 16.7 0 10 1 +14 16 170 43.3 0 10 0 +14 17 6 35.1 0 10 1 +14 18 15 20.7 0 10 1 +14 19 85 26 0 10 1 +14 20 28 16.9 0 10 1 +14 21 85 19.5 0 10 0 +14 22 28 21.1 0 10 1 +14 23 1 31.6 0 10 1 +14 24 6 26.3 0 10 1 +14 25 28 21.7 0 10 1 +14 26 6 10.5 0 10 0 +14 27 85 24.4 0 10 0 +14 28 85 10.5 0 10 0 +14 29 15 49 0 10 1 +14 30 85 45 0 10 1 +14 31 170 29.7 0 10 1 +14 32 85 27.3 0 10 1 +14 33 170 35.6 0 10 0 +14 34 1 11.8 0 10 1 +14 35 1 18 0 10 1 +14 36 85 27.3 0 10 1 +14 37 6 14 0 10 0 +14 38 28 15.5 0 10 0 +14 39 28 12 0 10 0 +14 40 170 38.3 0 10 1 +14 41 6 13.2 0 10 1 +14 42 85 45 0 10 1 +14 43 6 17.4 0 10 1 +14 44 85 10.5 0 10 0 +14 45 15 10.5 0 10 0 +14 46 15 30.1 0 10 1 +14 47 170 24.4 0 10 0 +14 48 1 12.5 0 10 1 +14 49 15 10.5 0 10 0 +14 50 170 38.3 0 10 1 +14 51 85 18.4 0 10 1 +14 52 1 11 0 10 1 +14 53 170 24.4 0 10 1 +14 54 1 11.2 0 10 1 +14 55 6 10.5 0 10 0 +14 56 1 10.7 0 10 1 +14 57 6 35.1 0 10 1 +14 58 28 13 0 10 1 +14 59 170 29.7 0 10 0 +14 60 28 12 0 10 0 +14 61 85 36.1 0 10 1 +14 62 15 10.7 0 10 1 +14 63 28 21.1 0 10 1 +14 64 15 18.6 0 10 1 +14 65 170 43.3 0 10 1 +14 66 15 18.6 0 10 0 +14 67 85 26 0 10 1 +14 68 28 13.4 0 10 1 +14 69 1 21.3 0 10 1 +14 70 6 11 0 10 1 +14 71 170 35.6 0 10 1 +14 72 170 49.8 0 10 1 +14 73 1 41.9 0 10 1 +14 74 15 12.5 0 10 0 +14 75 1 10.7 0 10 1 +14 76 170 49.8 0 10 1 +14 77 1 31.6 0 10 1 +14 78 85 36.1 0 10 1 +14 79 28 15.5 0 10 1 +14 80 6 44 0 10 1 +14 81 28 13 0 10 0 +14 82 6 14 0 10 1 +14 83 85 18.4 0 10 0 +14 84 15 12.5 0 10 0 +14 85 1 11.2 0 10 0 +14 86 15 49 0 10 1 +14 87 170 33.9 0 10 1 +14 88 85 19.5 0 10 0 +14 89 6 17.4 0 10 1 +14 90 28 41.1 0 10 1 +14 91 6 44 0 10 1 +14 92 170 15 0 10 0 +14 93 28 31.4 0 10 1 +14 94 1 12.5 0 10 1 +14 95 28 16.9 0 10 1 +14 96 85 24.4 0 10 1 +14 97 15 39.6 0 10 1 +14 98 170 41.3 0 10 1 +14 99 1 21.3 0 10 1 +14 100 170 15 0 10 0 +14 101 170 33.9 0 10 1 +14 102 85 30.9 0 10 1 +14 103 28 41.1 0 10 1 +14 104 6 26.3 0 10 1 +14 105 28 31.4 0 10 1 +14 106 6 13.2 0 10 1 +14 107 15 20.7 0 10 1 +14 108 1 41.9 0 10 1 +15 1 15 10.7 0 10 0 +15 2 28 13.4 0 10 0 +15 3 170 33.9 0 10 0 +15 4 15 49 0 10 1 +15 5 28 21.7 0 10 0 +15 6 170 15 0 10 0 +15 7 28 41.1 0 10 1 +15 8 85 45 0 10 1 +15 9 28 13 0 10 1 +15 10 170 33.9 0 10 1 +15 11 6 11 0 10 0 +15 12 85 27.3 0 10 1 +15 13 1 11.8 0 10 1 +15 14 1 10.7 0 10 1 +15 15 28 12 0 10 0 +15 16 6 14 0 10 0 +15 17 1 11.2 0 10 0 +15 18 15 39.6 0 10 1 +15 19 15 30.1 0 10 0 +15 20 15 20.7 0 10 1 +15 21 28 13 0 10 0 +15 22 6 44 0 10 1 +15 23 170 38.3 0 10 0 +15 24 15 18.6 0 10 1 +15 25 15 14.2 0 10 1 +15 26 15 18.6 0 10 1 +15 27 170 41.3 0 10 0 +15 28 28 21.1 0 10 1 +15 29 6 14 0 10 1 +15 30 28 15.5 0 10 0 +15 31 170 24.4 0 10 0 +15 32 1 31.6 0 10 1 +15 33 6 35.1 0 10 1 +15 34 15 30.1 0 10 1 +15 35 170 49.8 0 10 1 +15 36 85 18.4 0 10 0 +15 37 15 10.5 0 10 1 +15 38 170 38.3 0 10 0 +15 39 6 26.3 0 10 1 +15 40 170 41.3 0 10 1 +15 41 85 10.5 0 10 0 +15 42 1 18 0 10 1 +15 43 6 10.5 0 10 1 +15 44 85 19.5 0 10 0 +15 45 1 21.3 0 10 1 +15 46 28 13.4 0 10 1 +15 47 15 39.6 0 10 1 +15 48 170 15 0 10 0 +15 49 85 24.4 0 10 0 +15 50 15 12.5 0 10 0 +15 51 85 30.9 0 10 0 +15 52 28 12 0 10 0 +15 53 85 18.4 0 10 0 +15 54 28 31.4 0 10 1 +15 55 170 35.6 0 10 0 +15 56 1 41.9 0 10 1 +15 57 15 10.7 0 10 0 +15 58 6 44 0 10 1 +15 59 85 26 0 10 0 +15 60 6 26.3 0 10 1 +15 61 170 29.7 0 10 0 +15 62 6 17.4 0 10 1 +15 63 85 36.1 0 10 0 +15 64 1 11 0 10 1 +15 65 1 11.2 0 10 1 +15 66 15 20.7 0 10 1 +15 67 6 10.5 0 10 0 +15 68 28 16.9 0 10 0 +15 69 170 43.3 0 10 0 +15 70 1 21.3 0 10 0 +15 71 1 31.6 0 10 1 +15 72 170 24.4 0 10 0 +15 73 170 35.6 0 10 0 +15 74 1 10.7 0 10 1 +15 75 170 29.7 0 10 0 +15 76 85 36.1 0 10 0 +15 77 6 11 0 10 1 +15 78 1 12.5 0 10 1 +15 79 15 49 0 10 1 +15 80 85 45 0 10 1 +15 81 28 41.1 0 10 1 +15 82 85 10.5 0 10 0 +15 83 1 12.5 0 10 1 +15 84 85 30.9 0 10 1 +15 85 28 16.9 0 10 0 +15 86 85 24.4 0 10 1 +15 87 1 41.9 0 10 1 +15 88 6 16.7 0 10 1 +15 89 170 43.3 0 10 1 +15 90 1 11 0 10 1 +15 91 170 49.8 0 10 1 +15 92 15 12.5 0 10 0 +15 93 1 11.8 0 10 0 +15 94 6 17.4 0 10 0 +15 95 28 21.1 0 10 1 +15 96 28 21.7 0 10 1 +15 97 85 27.3 0 10 0 +15 98 28 31.4 0 10 1 +15 99 6 13.2 0 10 0 +15 100 28 15.5 0 10 1 +15 101 1 18 0 10 0 +15 102 85 19.5 0 10 0 +15 103 6 16.7 0 10 1 +15 104 15 14.2 0 10 0 +15 105 6 13.2 0 10 1 +15 106 6 35.1 0 10 1 +15 107 15 10.5 0 10 1 +15 108 85 26 0 10 0 +16 1 85 10.5 0 10 0 +16 2 85 36.1 0 10 0 +16 3 28 41.1 0 10 1 +16 4 15 12.5 0 10 0 +16 5 6 17.4 0 10 1 +16 6 6 44 0 10 1 +16 7 6 14 0 10 0 +16 8 28 12 0 10 0 +16 9 28 41.1 0 10 1 +16 10 15 18.6 0 10 0 +16 11 85 27.3 0 10 0 +16 12 1 31.6 0 10 1 +16 13 85 45 0 10 1 +16 14 170 38.3 0 10 0 +16 15 28 16.9 0 10 0 +16 16 170 29.7 0 10 0 +16 17 170 15 0 10 0 +16 18 6 14 0 10 1 +16 19 85 18.4 0 10 0 +16 20 170 43.3 0 10 0 +16 21 170 33.9 0 10 0 +16 22 85 26 0 10 0 +16 23 15 10.7 0 10 0 +16 24 15 10.5 0 10 1 +16 25 6 13.2 0 10 0 +16 26 1 10.7 0 10 1 +16 27 28 15.5 0 10 0 +16 28 28 13.4 0 10 0 +16 29 170 35.6 0 10 0 +16 30 170 41.3 0 10 1 +16 31 1 31.6 0 10 1 +16 32 28 15.5 0 10 0 +16 33 85 10.5 0 10 0 +16 34 28 21.7 0 10 0 +16 35 1 21.3 0 10 1 +16 36 170 43.3 0 10 0 +16 37 15 49 0 10 1 +16 38 85 30.9 0 10 0 +16 39 1 11 0 10 0 +16 40 170 41.3 0 10 1 +16 41 6 13.2 0 10 0 +16 42 85 24.4 0 10 0 +16 43 170 15 0 10 1 +16 44 1 11.8 0 10 0 +16 45 85 26 0 10 0 +16 46 15 39.6 0 10 1 +16 47 15 39.6 0 10 1 +16 48 6 26.3 0 10 1 +16 49 1 10.7 0 10 0 +16 50 85 24.4 0 10 1 +16 51 15 20.7 0 10 0 +16 52 1 11 0 10 1 +16 53 1 12.5 0 10 1 +16 54 1 11.2 0 10 1 +16 55 28 21.1 0 10 0 +16 56 170 49.8 0 10 1 +16 57 1 21.3 0 10 1 +16 58 28 13.4 0 10 0 +16 59 15 10.5 0 10 0 +16 60 6 17.4 0 10 1 +16 61 28 31.4 0 10 1 +16 62 85 19.5 0 10 0 +16 63 85 36.1 0 10 1 +16 64 15 14.2 0 10 0 +16 65 6 35.1 0 10 1 +16 66 6 10.5 0 10 1 +16 67 15 18.6 0 10 0 +16 68 1 41.9 0 10 1 +16 69 1 18 0 10 1 +16 70 28 21.1 0 10 1 +16 71 170 24.4 0 10 1 +16 72 15 10.7 0 10 1 +16 73 6 16.7 0 10 0 +16 74 170 49.8 0 10 0 +16 75 15 30.1 0 10 1 +16 76 15 14.2 0 10 0 +16 77 15 20.7 0 10 1 +16 78 28 21.7 0 10 1 +16 79 85 27.3 0 10 0 +16 80 170 35.6 0 10 0 +16 81 28 16.9 0 10 0 +16 82 85 18.4 0 10 0 +16 83 28 13 0 10 0 +16 84 6 11 0 10 0 +16 85 6 35.1 0 10 1 +16 86 1 41.9 0 10 1 +16 87 1 12.5 0 10 1 +16 88 6 11 0 10 1 +16 89 6 26.3 0 10 1 +16 90 170 24.4 0 10 0 +16 91 15 30.1 0 10 1 +16 92 6 44 0 10 1 +16 93 15 12.5 0 10 0 +16 94 85 45 0 10 1 +16 95 15 49 0 10 1 +16 96 170 29.7 0 10 0 +16 97 1 11.2 0 10 1 +16 98 6 10.5 0 10 1 +16 99 170 33.9 0 10 0 +16 100 28 13 0 10 0 +16 101 85 19.5 0 10 0 +16 102 170 38.3 0 10 0 +16 103 28 31.4 0 10 1 +16 104 1 18 0 10 1 +16 105 28 12 0 10 0 +16 106 6 16.7 0 10 1 +16 107 1 11.8 0 10 0 +16 108 85 30.9 0 10 0 +17 1 28 21.7 0 10 0 +17 2 170 43.3 0 10 0 +17 3 28 21.7 0 10 0 +17 4 170 15 0 10 0 +17 5 170 43.3 0 10 0 +17 6 15 18.6 0 10 1 +17 7 85 27.3 0 10 0 +17 8 6 11 0 10 0 +17 9 28 16.9 0 10 0 +17 10 15 30.1 0 10 1 +17 11 15 20.7 0 10 1 +17 12 6 26.3 0 10 1 +17 13 28 12 0 10 0 +17 14 6 10.5 0 10 1 +17 15 1 21.3 0 10 1 +17 16 85 36.1 0 10 1 +17 17 15 18.6 0 10 1 +17 18 28 12 0 10 0 +17 19 170 15 0 10 0 +17 20 28 41.1 0 10 1 +17 21 28 31.4 0 10 1 +17 22 85 45 0 10 1 +17 23 15 12.5 0 10 0 +17 24 6 16.7 0 10 1 +17 25 15 20.7 0 10 0 +17 26 1 11.2 0 10 1 +17 27 15 39.6 0 10 1 +17 28 6 35.1 0 10 1 +17 29 1 10.7 0 10 1 +17 30 15 30.1 0 10 1 +17 31 28 13.4 0 10 0 +17 32 6 16.7 0 10 1 +17 33 170 41.3 0 10 1 +17 34 6 10.5 0 10 0 +17 35 85 19.5 0 10 0 +17 36 6 13.2 0 10 0 +17 37 6 26.3 0 10 1 +17 38 170 49.8 0 10 0 +17 39 1 31.6 0 10 1 +17 40 15 10.7 0 10 1 +17 41 170 24.4 0 10 0 +17 42 6 11 0 10 0 +17 43 15 10.5 0 10 1 +17 44 170 29.7 0 10 0 +17 45 28 15.5 0 10 0 +17 46 85 18.4 0 10 0 +17 47 85 18.4 0 10 0 +17 48 6 14 0 10 1 +17 49 170 38.3 0 10 0 +17 50 15 39.6 0 10 1 +17 51 1 18 0 10 1 +17 52 1 18 0 10 1 +17 53 1 11.8 0 10 1 +17 54 85 45 0 10 1 +17 55 170 33.9 0 10 0 +17 56 170 35.6 0 10 0 +17 57 1 12.5 0 10 0 +17 58 6 44 0 10 1 +17 59 1 11 0 10 0 +17 60 28 15.5 0 10 0 +17 61 15 49 0 10 1 +17 62 170 33.9 0 10 0 +17 63 85 26 0 10 0 +17 64 1 10.7 0 10 1 +17 65 28 16.9 0 10 0 +17 66 6 14 0 10 1 +17 67 15 10.5 0 10 1 +17 68 15 49 0 10 1 +17 69 85 36.1 0 10 0 +17 70 1 31.6 0 10 1 +17 71 1 11 0 10 1 +17 72 28 21.1 0 10 0 +17 73 85 30.9 0 10 0 +17 74 6 44 0 10 1 +17 75 15 12.5 0 10 1 +17 76 170 49.8 0 10 0 +17 77 28 13 0 10 1 +17 78 85 10.5 0 10 0 +17 79 28 13.4 0 10 0 +17 80 1 12.5 0 10 1 +17 81 28 41.1 0 10 1 +17 82 170 38.3 0 10 0 +17 83 170 35.6 0 10 0 +17 84 28 21.1 0 10 1 +17 85 15 10.7 0 10 1 +17 86 1 41.9 0 10 1 +17 87 28 31.4 0 10 1 +17 88 85 10.5 0 10 0 +17 89 1 11.8 0 10 1 +17 90 15 14.2 0 10 1 +17 91 85 24.4 0 10 0 +17 92 6 13.2 0 10 1 +17 93 85 19.5 0 10 0 +17 94 6 17.4 0 10 1 +17 95 85 30.9 0 10 1 +17 96 170 24.4 0 10 0 +17 97 28 13 0 10 0 +17 98 6 17.4 0 10 1 +17 99 170 41.3 0 10 0 +17 100 85 26 0 10 1 +17 101 85 24.4 0 10 0 +17 102 1 11.2 0 10 1 +17 103 85 27.3 0 10 1 +17 104 6 35.1 0 10 1 +17 105 170 29.7 0 10 0 +17 106 1 41.9 0 10 1 +17 107 1 21.3 0 10 1 +17 108 15 14.2 0 10 1 +18 1 170 43.3 0 10 1 +18 2 85 30.9 0 10 1 +18 3 6 14 0 10 1 +18 4 28 31.4 0 10 1 +18 5 170 38.3 0 10 1 +18 6 15 14.2 0 10 1 +18 7 6 44 0 10 1 +18 8 6 11 0 10 1 +18 9 85 19.5 0 10 1 +18 10 15 20.7 0 10 1 +18 11 6 13.2 0 10 1 +18 12 170 15 0 10 0 +18 13 85 26 0 10 1 +18 14 1 18 0 10 1 +18 15 15 14.2 0 10 1 +18 16 85 36.1 0 10 1 +18 17 1 18 0 10 0 +18 18 15 49 0 10 1 +18 19 170 49.8 0 10 1 +18 20 6 35.1 0 10 1 +18 21 85 10.5 0 10 0 +18 22 28 13.4 0 10 0 +18 23 15 20.7 0 10 1 +18 24 85 45 0 10 1 +18 25 15 39.6 0 10 1 +18 26 15 12.5 0 10 1 +18 27 1 11.8 0 10 1 +18 28 1 21.3 0 10 1 +18 29 6 26.3 0 10 1 +18 30 15 12.5 0 10 1 +18 31 6 17.4 0 10 1 +18 32 28 16.9 0 10 1 +18 33 170 41.3 0 10 0 +18 34 170 24.4 0 10 0 +18 35 15 10.7 0 10 0 +18 36 1 10.7 0 10 0 +18 37 6 35.1 0 10 1 +18 38 170 38.3 0 10 1 +18 39 6 44 0 10 1 +18 40 15 30.1 0 10 1 +18 41 28 13 0 10 0 +18 42 15 49 0 10 1 +18 43 6 11 0 10 0 +18 44 15 39.6 0 10 1 +18 45 15 10.7 0 10 0 +18 46 1 11 0 10 1 +18 47 28 21.1 0 10 1 +18 48 28 13 0 10 0 +18 49 1 11.2 0 10 1 +18 50 28 12 0 10 1 +18 51 6 16.7 0 10 1 +18 52 85 27.3 0 10 1 +18 53 170 49.8 0 10 1 +18 54 28 21.7 0 10 1 +18 55 15 10.5 0 10 0 +18 56 170 29.7 0 10 0 +18 57 85 10.5 0 10 0 +18 58 1 11 0 10 1 +18 59 6 14 0 10 1 +18 60 170 33.9 0 10 0 +18 61 170 35.6 0 10 1 +18 62 15 18.6 0 10 1 +18 63 6 26.3 0 10 1 +18 64 85 18.4 0 10 0 +18 65 1 41.9 0 10 1 +18 66 28 12 0 10 1 +18 67 6 16.7 0 10 1 +18 68 170 24.4 0 10 1 +18 69 15 18.6 0 10 1 +18 70 6 17.4 0 10 1 +18 71 85 18.4 0 10 0 +18 72 1 21.3 0 10 1 +18 73 28 41.1 0 10 1 +18 74 85 27.3 0 10 0 +18 75 85 36.1 0 10 1 +18 76 170 35.6 0 10 0 +18 77 28 21.1 0 10 1 +18 78 170 43.3 0 10 1 +18 79 28 21.7 0 10 1 +18 80 85 24.4 0 10 1 +18 81 28 31.4 0 10 1 +18 82 85 45 0 10 1 +18 83 15 10.5 0 10 0 +18 84 6 13.2 0 10 1 +18 85 1 31.6 0 10 1 +18 86 1 31.6 0 10 1 +18 87 85 30.9 0 10 1 +18 88 85 19.5 0 10 1 +18 89 85 24.4 0 10 1 +18 90 28 13.4 0 10 0 +18 91 170 29.7 0 10 1 +18 92 170 33.9 0 10 1 +18 93 28 41.1 0 10 1 +18 94 170 15 0 10 0 +18 95 85 26 0 10 0 +18 96 170 41.3 0 10 1 +18 97 1 12.5 0 10 1 +18 98 1 12.5 0 10 1 +18 99 28 15.5 0 10 1 +18 100 1 11.2 0 10 0 +18 101 6 10.5 0 10 0 +18 102 1 10.7 0 10 1 +18 103 1 11.8 0 10 1 +18 104 28 16.9 0 10 1 +18 105 6 10.5 0 10 1 +18 106 1 41.9 0 10 1 +18 107 28 15.5 0 10 0 +18 108 15 30.1 0 10 1 +19 1 28 41.1 0 10 1 +19 2 170 41.3 0 10 0 +19 3 6 10.5 0 10 0 +19 4 6 44 0 10 1 +19 5 15 12.5 0 10 1 +19 6 28 16.9 0 10 1 +19 7 6 14 0 10 0 +19 8 6 16.7 0 10 1 +19 9 1 31.6 0 10 1 +19 10 1 10.7 0 10 1 +19 11 85 19.5 0 10 1 +19 12 28 16.9 0 10 0 +19 13 170 24.4 0 10 0 +19 14 15 14.2 0 10 1 +19 15 85 26 0 10 0 +19 16 85 36.1 0 10 1 +19 17 15 30.1 0 10 1 +19 18 6 10.5 0 10 0 +19 19 170 24.4 0 10 0 +19 20 15 12.5 0 10 0 +19 21 28 21.7 0 10 0 +19 22 170 15 0 10 0 +19 23 85 10.5 0 10 0 +19 24 1 11.8 0 10 0 +19 25 1 12.5 0 10 1 +19 26 28 15.5 0 10 0 +19 27 6 26.3 0 10 1 +19 28 6 35.1 0 10 1 +19 29 15 18.6 0 10 0 +19 30 170 29.7 0 10 0 +19 31 85 19.5 0 10 0 +19 32 170 43.3 0 10 1 +19 33 28 31.4 0 10 1 +19 34 28 13.4 0 10 1 +19 35 85 26 0 10 0 +19 36 85 10.5 0 10 0 +19 37 85 45 0 10 1 +19 38 28 13 0 10 0 +19 39 170 38.3 0 10 0 +19 40 1 11 0 10 0 +19 41 1 10.7 0 10 0 +19 42 170 29.7 0 10 1 +19 43 6 26.3 0 10 1 +19 44 1 11.2 0 10 0 +19 45 28 41.1 0 10 1 +19 46 1 12.5 0 10 0 +19 47 85 30.9 0 10 1 +19 48 170 33.9 0 10 0 +19 49 28 13 0 10 0 +19 50 170 33.9 0 10 1 +19 51 170 49.8 0 10 1 +19 52 170 35.6 0 10 0 +19 53 15 49 0 10 1 +19 54 1 11.2 0 10 1 +19 55 6 11 0 10 0 +19 56 6 17.4 0 10 1 +19 57 15 49 0 10 1 +19 58 1 11 0 10 0 +19 59 85 18.4 0 10 0 +19 60 15 20.7 0 10 1 +19 61 170 38.3 0 10 0 +19 62 15 39.6 0 10 1 +19 63 6 35.1 0 10 1 +19 64 28 21.1 0 10 1 +19 65 15 39.6 0 10 1 +19 66 15 10.7 0 10 0 +19 67 1 31.6 0 10 1 +19 68 1 41.9 0 10 1 +19 69 170 49.8 0 10 0 +19 70 170 35.6 0 10 0 +19 71 85 36.1 0 10 0 +19 72 28 13.4 0 10 1 +19 73 1 18 0 10 1 +19 74 85 18.4 0 10 1 +19 75 85 24.4 0 10 0 +19 76 170 43.3 0 10 1 +19 77 15 18.6 0 10 1 +19 78 6 13.2 0 10 0 +19 79 6 44 0 10 1 +19 80 15 10.5 0 10 0 +19 81 6 14 0 10 1 +19 82 85 27.3 0 10 1 +19 83 15 30.1 0 10 1 +19 84 6 16.7 0 10 1 +19 85 28 31.4 0 10 1 +19 86 28 21.1 0 10 1 +19 87 15 10.7 0 10 0 +19 88 6 13.2 0 10 1 +19 89 170 41.3 0 10 0 +19 90 28 21.7 0 10 1 +19 91 85 24.4 0 10 1 +19 92 28 12 0 10 0 +19 93 1 11.8 0 10 0 +19 94 28 12 0 10 0 +19 95 1 18 0 10 1 +19 96 28 15.5 0 10 0 +19 97 1 21.3 0 10 1 +19 98 1 21.3 0 10 1 +19 99 1 41.9 0 10 1 +19 100 85 45 0 10 1 +19 101 15 10.5 0 10 1 +19 102 6 11 0 10 1 +19 103 15 14.2 0 10 0 +19 104 15 20.7 0 10 1 +19 105 85 30.9 0 10 0 +19 106 85 27.3 0 10 1 +19 107 6 17.4 0 10 1 +19 108 170 15 0 10 0 +20 1 6 14 0 10 1 +20 2 1 12.5 0 10 0 +20 3 6 16.7 0 10 1 +20 4 15 14.2 0 10 0 +20 5 170 24.4 0 10 0 +20 6 85 18.4 0 10 0 +20 7 28 41.1 0 10 1 +20 8 170 43.3 0 10 1 +20 9 1 21.3 0 10 1 +20 10 85 26 0 10 0 +20 11 1 11 0 10 0 +20 12 6 10.5 0 10 0 +20 13 15 20.7 0 10 1 +20 14 28 13.4 0 10 1 +20 15 170 35.6 0 10 1 +20 16 1 11 0 10 1 +20 17 6 44 0 10 1 +20 18 6 26.3 0 10 1 +20 19 15 39.6 0 10 1 +20 20 28 41.1 0 10 1 +20 21 85 10.5 0 10 0 +20 22 6 16.7 0 10 0 +20 23 1 11.8 0 10 1 +20 24 28 12 0 10 1 +20 25 1 18 0 10 1 +20 26 170 29.7 0 10 0 +20 27 28 21.7 0 10 1 +20 28 15 10.7 0 10 1 +20 29 170 41.3 0 10 1 +20 30 85 19.5 0 10 0 +20 31 85 45 0 10 1 +20 32 170 33.9 0 10 1 +20 33 28 13.4 0 10 0 +20 34 85 27.3 0 10 1 +20 35 28 13 0 10 0 +20 36 15 18.6 0 10 0 +20 37 15 12.5 0 10 1 +20 38 170 24.4 0 10 0 +20 39 6 44 0 10 1 +20 40 85 30.9 0 10 1 +20 41 6 35.1 0 10 1 +20 42 6 26.3 0 10 1 +20 43 6 13.2 0 10 1 +20 44 15 10.7 0 10 1 +20 45 28 21.7 0 10 1 +20 46 170 33.9 0 10 1 +20 47 15 20.7 0 10 1 +20 48 1 10.7 0 10 1 +20 49 28 16.9 0 10 1 +20 50 1 11.2 0 10 0 +20 51 1 12.5 0 10 1 +20 52 15 18.6 0 10 0 +20 53 28 21.1 0 10 1 +20 54 15 14.2 0 10 1 +20 55 85 18.4 0 10 0 +20 56 170 29.7 0 10 0 +20 57 85 45 0 10 1 +20 58 28 31.4 0 10 1 +20 59 15 30.1 0 10 1 +20 60 1 11.8 0 10 1 +20 61 28 31.4 0 10 1 +20 62 85 19.5 0 10 0 +20 63 6 14 0 10 1 +20 64 1 31.6 0 10 1 +20 65 1 10.7 0 10 1 +20 66 15 49 0 10 1 +20 67 1 21.3 0 10 1 +20 68 6 35.1 0 10 1 +20 69 15 10.5 0 10 1 +20 70 85 10.5 0 10 0 +20 71 6 13.2 0 10 1 +20 72 170 49.8 0 10 1 +20 73 170 35.6 0 10 1 +20 74 85 24.4 0 10 1 +20 75 6 11 0 10 1 +20 76 170 49.8 0 10 1 +20 77 15 30.1 0 10 1 +20 78 85 36.1 0 10 1 +20 79 85 26 0 10 1 +20 80 6 17.4 0 10 1 +20 81 170 15 0 10 0 +20 82 15 12.5 0 10 1 +20 83 85 30.9 0 10 0 +20 84 6 10.5 0 10 1 +20 85 1 41.9 0 10 1 +20 86 15 39.6 0 10 1 +20 87 170 43.3 0 10 1 +20 88 28 13 0 10 0 +20 89 28 15.5 0 10 0 +20 90 85 27.3 0 10 1 +20 91 28 15.5 0 10 1 +20 92 170 38.3 0 10 1 +20 93 15 10.5 0 10 1 +20 94 170 15 0 10 1 +20 95 1 41.9 0 10 1 +20 96 1 31.6 0 10 1 +20 97 6 11 0 10 0 +20 98 1 11.2 0 10 0 +20 99 170 41.3 0 10 1 +20 100 1 18 0 10 1 +20 101 28 12 0 10 1 +20 102 28 21.1 0 10 1 +20 103 28 16.9 0 10 1 +20 104 85 36.1 0 10 1 +20 105 85 24.4 0 10 1 +20 106 6 17.4 0 10 1 +20 107 170 38.3 0 10 1 +20 108 15 49 0 10 1 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/dd_single_exampleData.txt b/Python/hbayesdm/common/extdata/dd_single_exampleData.txt new file mode 100644 index 00000000..a729477e --- /dev/null +++ b/Python/hbayesdm/common/extdata/dd_single_exampleData.txt @@ -0,0 +1,109 @@ +subjID trial delay_later amount_later delay_sooner amount_sooner choice +1 1 6 10.5 0 10 1 +1 2 170 38.3 0 10 1 +1 3 28 13.4 0 10 1 +1 4 28 31.4 0 10 1 +1 5 85 30.9 0 10 1 +1 6 28 21.1 0 10 1 +1 7 28 13 0 10 1 +1 8 1 21.3 0 10 1 +1 9 28 21.1 0 10 1 +1 10 15 30.1 0 10 1 +1 11 1 10.7 0 10 1 +1 12 85 36.1 0 10 1 +1 13 15 10.5 0 10 1 +1 14 6 16.7 0 10 1 +1 15 1 11 0 10 1 +1 16 15 14.2 0 10 1 +1 17 15 12.5 0 10 1 +1 18 15 20.7 0 10 1 +1 19 6 11 0 10 0 +1 20 28 16.9 0 10 1 +1 21 15 30.1 0 10 1 +1 22 85 24.4 0 10 1 +1 23 170 41.3 0 10 1 +1 24 15 14.2 0 10 1 +1 25 6 10.5 0 10 1 +1 26 170 24.4 0 10 1 +1 27 15 49 0 10 1 +1 28 170 29.7 0 10 1 +1 29 1 11.8 0 10 0 +1 30 6 13.2 0 10 0 +1 31 85 30.9 0 10 1 +1 32 6 44 0 10 1 +1 33 6 35.1 0 10 1 +1 34 28 15.5 0 10 1 +1 35 170 43.3 0 10 1 +1 36 170 33.9 0 10 1 +1 37 1 11 0 10 1 +1 38 1 21.3 0 10 1 +1 39 85 45 0 10 1 +1 40 15 39.6 0 10 1 +1 41 85 10.5 0 10 0 +1 42 170 15 0 10 1 +1 43 170 49.8 0 10 1 +1 44 170 24.4 0 10 1 +1 45 28 13.4 0 10 1 +1 46 1 31.6 0 10 1 +1 47 170 35.6 0 10 1 +1 48 1 41.9 0 10 1 +1 49 6 17.4 0 10 1 +1 50 85 18.4 0 10 1 +1 51 85 27.3 0 10 1 +1 52 85 26 0 10 1 +1 53 170 38.3 0 10 1 +1 54 28 21.7 0 10 1 +1 55 1 10.7 0 10 1 +1 56 170 49.8 0 10 1 +1 57 1 11.2 0 10 1 +1 58 15 20.7 0 10 1 +1 59 6 44 0 10 1 +1 60 28 41.1 0 10 1 +1 61 28 16.9 0 10 1 +1 62 6 14 0 10 1 +1 63 1 31.6 0 10 1 +1 64 15 18.6 0 10 1 +1 65 28 12 0 10 1 +1 66 6 13.2 0 10 1 +1 67 170 43.3 0 10 1 +1 68 28 31.4 0 10 1 +1 69 85 19.5 0 10 1 +1 70 170 35.6 0 10 1 +1 71 85 18.4 0 10 1 +1 72 1 12.5 0 10 1 +1 73 170 41.3 0 10 1 +1 74 170 15 0 10 0 +1 75 28 12 0 10 0 +1 76 85 36.1 0 10 1 +1 77 1 18 0 10 1 +1 78 85 10.5 0 10 0 +1 79 170 33.9 0 10 1 +1 80 6 26.3 0 10 1 +1 81 85 45 0 10 1 +1 82 28 21.7 0 10 1 +1 83 28 13 0 10 0 +1 84 85 27.3 0 10 1 +1 85 15 18.6 0 10 1 +1 86 15 12.5 0 10 1 +1 87 6 26.3 0 10 1 +1 88 6 11 0 10 1 +1 89 15 10.7 0 10 0 +1 90 6 16.7 0 10 1 +1 91 28 41.1 0 10 1 +1 92 85 26 0 10 1 +1 93 85 24.4 0 10 1 +1 94 1 12.5 0 10 1 +1 95 6 17.4 0 10 1 +1 96 6 35.1 0 10 1 +1 97 6 14 0 10 1 +1 98 15 10.5 0 10 0 +1 99 1 11.8 0 10 1 +1 100 15 10.7 0 10 1 +1 101 15 39.6 0 10 1 +1 102 85 19.5 0 10 1 +1 103 1 11.2 0 10 1 +1 104 170 29.7 0 10 1 +1 105 15 49 0 10 1 +1 106 1 41.9 0 10 1 +1 107 1 18 0 10 1 +1 108 28 15.5 0 10 1 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/gng_exampleData.txt b/Python/hbayesdm/common/extdata/gng_exampleData.txt new file mode 100644 index 00000000..40e0982a --- /dev/null +++ b/Python/hbayesdm/common/extdata/gng_exampleData.txt @@ -0,0 +1,2401 @@ +trialNum cue keyPressed success congruentOutcome outcome subjID +1 1 1 1 2 0 1 +2 2 0 1 1 1 1 +3 4 0 1 1 0 1 +4 4 1 0 1 -1 1 +5 4 0 1 1 0 1 +6 1 1 1 1 1 1 +7 3 0 0 1 -1 1 +8 1 1 1 1 1 1 +9 3 1 1 1 0 1 +10 3 0 0 1 -1 1 +11 4 0 1 1 0 1 +12 4 0 1 1 0 1 +13 4 0 1 1 0 1 +14 1 1 1 1 1 1 +15 1 1 1 1 1 1 +16 2 0 1 1 1 1 +17 2 0 1 1 1 1 +18 4 0 1 1 0 1 +19 2 0 1 1 1 1 +20 3 1 1 1 0 1 +21 3 1 1 1 0 1 +22 4 1 0 2 0 1 +23 2 0 1 1 1 1 +24 3 0 0 1 -1 1 +25 1 1 1 1 1 1 +26 3 0 0 1 -1 1 +27 4 0 1 1 0 1 +28 1 1 1 1 1 1 +29 4 1 0 2 0 1 +30 2 0 1 2 0 1 +31 2 0 1 1 1 1 +32 4 1 0 2 0 1 +33 2 0 1 1 1 1 +34 2 0 1 1 1 1 +35 3 1 1 1 0 1 +36 2 0 1 1 1 1 +37 1 1 1 1 1 1 +38 4 0 1 1 0 1 +39 4 0 1 1 0 1 +40 4 1 0 1 -1 1 +41 3 1 1 1 0 1 +42 2 0 1 1 1 1 +43 2 0 1 1 1 1 +44 2 0 1 1 1 1 +45 2 0 1 1 1 1 +46 3 1 1 1 0 1 +47 2 0 1 1 1 1 +48 2 0 1 1 1 1 +49 1 1 1 1 1 1 +50 3 1 1 2 -1 1 +51 2 1 0 1 0 1 +52 1 1 1 1 1 1 +53 3 1 1 1 0 1 +54 4 0 1 1 0 1 +55 3 1 1 2 -1 1 +56 1 1 1 1 1 1 +57 3 0 0 1 -1 1 +58 1 1 1 1 1 1 +59 3 1 1 2 -1 1 +60 1 1 1 1 1 1 +61 3 1 1 1 0 1 +62 4 1 0 1 -1 1 +63 1 1 1 1 1 1 +64 1 1 1 1 1 1 +65 4 0 1 1 0 1 +66 1 1 1 1 1 1 +67 3 1 1 1 0 1 +68 2 0 1 1 1 1 +69 3 1 1 2 -1 1 +70 1 1 1 1 1 1 +71 2 0 1 1 1 1 +72 2 0 1 1 1 1 +73 1 1 1 1 1 1 +74 4 0 1 2 -1 1 +75 2 0 1 2 0 1 +76 1 1 1 2 0 1 +77 4 1 0 1 -1 1 +78 1 1 1 1 1 1 +79 3 1 1 2 -1 1 +80 3 1 1 1 0 1 +81 1 1 1 1 1 1 +82 1 1 1 1 1 1 +83 3 0 0 1 -1 1 +84 2 0 1 1 1 1 +85 4 0 1 1 0 1 +86 3 1 1 1 0 1 +87 4 0 1 1 0 1 +88 2 0 1 1 1 1 +89 1 1 1 1 1 1 +90 4 0 1 1 0 1 +91 1 1 1 2 0 1 +92 2 0 1 2 0 1 +93 1 1 1 1 1 1 +94 4 0 1 1 0 1 +95 2 0 1 1 1 1 +96 4 1 0 1 -1 1 +97 3 1 1 1 0 1 +98 3 1 1 1 0 1 +99 3 1 1 1 0 1 +100 1 1 1 1 1 1 +101 2 0 1 1 1 1 +102 4 0 1 2 -1 1 +103 4 0 1 1 0 1 +104 3 0 0 1 -1 1 +105 1 1 1 1 1 1 +106 4 0 1 1 0 1 +107 2 0 1 1 1 1 +108 2 0 1 1 1 1 +109 3 1 1 1 0 1 +110 4 0 1 1 0 1 +111 3 1 1 1 0 1 +112 3 1 1 1 0 1 +113 1 1 1 1 1 1 +114 3 1 1 1 0 1 +115 4 0 1 2 -1 1 +116 1 0 0 1 0 1 +117 1 1 1 1 1 1 +118 1 1 1 1 1 1 +119 3 0 0 1 -1 1 +120 2 0 1 1 1 1 +121 2 0 1 2 0 1 +122 4 0 1 1 0 1 +123 1 1 1 2 0 1 +124 4 0 1 1 0 1 +125 3 1 1 2 -1 1 +126 2 0 1 1 1 1 +127 4 0 1 1 0 1 +128 4 0 1 1 0 1 +129 4 0 1 1 0 1 +130 4 1 0 1 -1 1 +131 2 0 1 1 1 1 +132 3 1 1 2 -1 1 +133 1 0 0 1 0 1 +134 1 1 1 1 1 1 +135 3 1 1 1 0 1 +136 3 1 1 1 0 1 +137 4 0 1 2 -1 1 +138 4 0 1 1 0 1 +139 3 1 1 1 0 1 +140 3 1 1 2 -1 1 +141 3 0 0 1 -1 1 +142 2 0 1 2 0 1 +143 2 0 1 2 0 1 +144 2 0 1 2 0 1 +145 4 0 1 1 0 1 +146 1 1 1 2 0 1 +147 3 1 1 1 0 1 +148 3 1 1 1 0 1 +149 2 0 1 2 0 1 +150 1 1 1 1 1 1 +151 1 1 1 2 0 1 +152 1 1 1 1 1 1 +153 3 1 1 1 0 1 +154 4 0 1 1 0 1 +155 1 1 1 1 1 1 +156 3 1 1 1 0 1 +157 4 1 0 1 -1 1 +158 4 0 1 1 0 1 +159 3 1 1 1 0 1 +160 2 0 1 1 1 1 +161 2 0 1 1 1 1 +162 2 0 1 2 0 1 +163 3 1 1 1 0 1 +164 4 0 1 2 -1 1 +165 3 1 1 1 0 1 +166 4 0 1 2 -1 1 +167 2 0 1 1 1 1 +168 2 0 1 2 0 1 +169 1 1 1 1 1 1 +170 4 0 1 1 0 1 +171 3 1 1 2 -1 1 +172 3 1 1 2 -1 1 +173 2 0 1 1 1 1 +174 3 1 1 1 0 1 +175 4 1 0 1 -1 1 +176 2 0 1 1 1 1 +177 4 0 1 1 0 1 +178 2 0 1 2 0 1 +179 4 0 1 1 0 1 +180 1 1 1 2 0 1 +181 1 1 1 1 1 1 +182 3 1 1 1 0 1 +183 2 0 1 1 1 1 +184 1 1 1 1 1 1 +185 4 0 1 1 0 1 +186 3 1 1 1 0 1 +187 1 1 1 1 1 1 +188 3 1 1 2 -1 1 +189 4 0 1 1 0 1 +190 4 0 1 1 0 1 +191 2 0 1 1 1 1 +192 2 0 1 1 1 1 +193 1 1 1 1 1 1 +194 2 0 1 1 1 1 +195 2 0 1 2 0 1 +196 2 0 1 1 1 1 +197 1 1 1 1 1 1 +198 4 0 1 1 0 1 +199 3 1 1 1 0 1 +200 3 1 1 1 0 1 +201 3 1 1 1 0 1 +202 1 1 1 1 1 1 +203 3 1 1 1 0 1 +204 2 0 1 1 1 1 +205 4 0 1 2 -1 1 +206 2 0 1 1 1 1 +207 4 0 1 1 0 1 +208 4 0 1 1 0 1 +209 1 1 1 1 1 1 +210 3 1 1 1 0 1 +211 3 1 1 1 0 1 +212 1 1 1 1 1 1 +213 1 1 1 1 1 1 +214 2 0 1 1 1 1 +215 1 1 1 1 1 1 +216 3 1 1 1 0 1 +217 4 0 1 2 -1 1 +218 2 0 1 1 1 1 +219 2 0 1 1 1 1 +220 1 1 1 2 0 1 +221 2 0 1 1 1 1 +222 1 1 1 1 1 1 +223 1 1 1 1 1 1 +224 4 0 1 2 -1 1 +225 1 1 1 1 1 1 +226 2 0 1 1 1 1 +227 4 1 0 1 -1 1 +228 3 1 1 1 0 1 +229 4 1 0 1 -1 1 +230 1 1 1 1 1 1 +231 2 0 1 1 1 1 +232 1 1 1 1 1 1 +233 3 1 1 1 0 1 +234 2 0 1 1 1 1 +235 1 1 1 1 1 1 +236 4 1 0 1 -1 1 +237 2 0 1 1 1 1 +238 1 1 1 1 1 1 +239 4 0 1 1 0 1 +240 1 1 1 1 1 1 +1 1 1 1 1 1 2 +2 1 1 1 1 1 2 +3 3 1 1 1 0 2 +4 3 0 0 1 -1 2 +5 1 1 1 1 1 2 +6 4 1 0 1 -1 2 +7 4 0 1 1 0 2 +8 3 1 1 1 0 2 +9 3 1 1 1 0 2 +10 3 0 0 1 -1 2 +11 1 1 1 1 1 2 +12 4 0 1 1 0 2 +13 1 1 1 1 1 2 +14 4 0 1 1 0 2 +15 4 0 1 1 0 2 +16 3 1 1 1 0 2 +17 2 0 1 1 1 2 +18 4 0 1 2 -1 2 +19 1 1 1 1 1 2 +20 2 0 1 1 1 2 +21 4 0 1 1 0 2 +22 4 1 0 2 0 2 +23 1 1 1 2 0 2 +24 4 0 1 1 0 2 +25 2 0 1 1 1 2 +26 2 0 1 1 1 2 +27 2 0 1 2 0 2 +28 1 1 1 1 1 2 +29 2 0 1 1 1 2 +30 1 1 1 1 1 2 +31 4 0 1 2 -1 2 +32 2 0 1 2 0 2 +33 3 1 1 2 -1 2 +34 3 1 1 2 -1 2 +35 2 0 1 1 1 2 +36 3 0 0 1 -1 2 +37 4 1 0 1 -1 2 +38 4 1 0 1 -1 2 +39 4 0 1 1 0 2 +40 1 1 1 1 1 2 +41 4 0 1 1 0 2 +42 3 1 1 1 0 2 +43 3 0 0 1 -1 2 +44 1 1 1 2 0 2 +45 3 1 1 1 0 2 +46 4 0 1 1 0 2 +47 4 0 1 1 0 2 +48 2 0 1 1 1 2 +49 2 0 1 1 1 2 +50 2 0 1 1 1 2 +51 1 1 1 1 1 2 +52 3 1 1 1 0 2 +53 3 1 1 1 0 2 +54 4 1 0 1 -1 2 +55 1 1 1 1 1 2 +56 1 1 1 1 1 2 +57 2 0 1 1 1 2 +58 1 1 1 1 1 2 +59 1 1 1 2 0 2 +60 3 1 1 1 0 2 +61 2 0 1 1 1 2 +62 1 1 1 1 1 2 +63 3 1 1 2 -1 2 +64 3 1 1 1 0 2 +65 1 1 1 2 0 2 +66 2 0 1 2 0 2 +67 2 0 1 2 0 2 +68 4 0 1 1 0 2 +69 3 1 1 1 0 2 +70 2 0 1 1 1 2 +71 4 0 1 1 0 2 +72 4 0 1 1 0 2 +73 4 0 1 2 -1 2 +74 1 1 1 1 1 2 +75 4 1 0 1 -1 2 +76 4 0 1 1 0 2 +77 3 1 1 1 0 2 +78 4 0 1 2 -1 2 +79 3 0 0 1 -1 2 +80 4 0 1 1 0 2 +81 3 1 1 2 -1 2 +82 3 1 1 1 0 2 +83 3 1 1 2 -1 2 +84 3 1 1 1 0 2 +85 3 1 1 1 0 2 +86 3 1 1 1 0 2 +87 2 0 1 1 1 2 +88 4 0 1 1 0 2 +89 4 0 1 1 0 2 +90 2 0 1 2 0 2 +91 4 1 0 1 -1 2 +92 1 1 1 2 0 2 +93 4 0 1 2 -1 2 +94 2 0 1 2 0 2 +95 2 0 1 1 1 2 +96 3 1 1 2 -1 2 +97 2 0 1 1 1 2 +98 1 1 1 1 1 2 +99 1 1 1 1 1 2 +100 1 1 1 2 0 2 +101 1 1 1 1 1 2 +102 1 1 1 2 0 2 +103 1 1 1 1 1 2 +104 4 1 0 2 0 2 +105 4 0 1 1 0 2 +106 2 0 1 2 0 2 +107 3 1 1 1 0 2 +108 3 1 1 2 -1 2 +109 3 1 1 1 0 2 +110 1 1 1 2 0 2 +111 3 1 1 1 0 2 +112 2 0 1 1 1 2 +113 3 1 1 2 -1 2 +114 1 1 1 1 1 2 +115 3 1 1 2 -1 2 +116 3 0 0 2 0 2 +117 4 1 0 1 -1 2 +118 2 0 1 1 1 2 +119 2 0 1 2 0 2 +120 4 0 1 1 0 2 +121 1 1 1 1 1 2 +122 2 0 1 1 1 2 +123 4 0 1 2 -1 2 +124 3 0 0 1 -1 2 +125 3 1 1 1 0 2 +126 4 0 1 1 0 2 +127 2 0 1 2 0 2 +128 3 1 1 1 0 2 +129 4 1 0 1 -1 2 +130 4 0 1 1 0 2 +131 2 0 1 1 1 2 +132 2 0 1 2 0 2 +133 3 1 1 1 0 2 +134 3 0 0 1 -1 2 +135 1 1 1 1 1 2 +136 4 0 1 2 -1 2 +137 2 0 1 1 1 2 +138 4 0 1 1 0 2 +139 4 0 1 1 0 2 +140 1 1 1 1 1 2 +141 3 1 1 2 -1 2 +142 2 0 1 1 1 2 +143 2 1 0 2 1 2 +144 4 0 1 1 0 2 +145 2 0 1 1 1 2 +146 4 0 1 2 -1 2 +147 2 0 1 2 0 2 +148 2 0 1 2 0 2 +149 1 1 1 1 1 2 +150 3 1 1 2 -1 2 +151 2 0 1 1 1 2 +152 4 1 0 2 0 2 +153 4 1 0 2 0 2 +154 3 1 1 1 0 2 +155 3 1 1 1 0 2 +156 2 0 1 1 1 2 +157 1 1 1 1 1 2 +158 2 0 1 2 0 2 +159 1 1 1 2 0 2 +160 1 1 1 1 1 2 +161 1 1 1 1 1 2 +162 1 1 1 1 1 2 +163 3 1 1 2 -1 2 +164 3 0 0 1 -1 2 +165 1 1 1 2 0 2 +166 3 1 1 2 -1 2 +167 2 0 1 1 1 2 +168 4 1 0 1 -1 2 +169 2 0 1 1 1 2 +170 1 1 1 1 1 2 +171 3 1 1 1 0 2 +172 1 1 1 1 1 2 +173 4 1 0 1 -1 2 +174 1 1 1 1 1 2 +175 3 1 1 2 -1 2 +176 1 1 1 1 1 2 +177 4 0 1 1 0 2 +178 2 0 1 1 1 2 +179 3 1 1 2 -1 2 +180 2 0 1 1 1 2 +181 3 1 1 2 -1 2 +182 1 1 1 1 1 2 +183 3 0 0 1 -1 2 +184 4 0 1 1 0 2 +185 3 1 1 2 -1 2 +186 2 0 1 1 1 2 +187 2 0 1 1 1 2 +188 1 1 1 1 1 2 +189 1 1 1 1 1 2 +190 1 1 1 2 0 2 +191 2 0 1 1 1 2 +192 2 0 1 1 1 2 +193 3 1 1 2 -1 2 +194 1 1 1 1 1 2 +195 2 0 1 1 1 2 +196 2 0 1 1 1 2 +197 1 1 1 1 1 2 +198 3 0 0 1 -1 2 +199 1 1 1 1 1 2 +200 4 0 1 1 0 2 +201 2 0 1 1 1 2 +202 3 0 0 1 -1 2 +203 4 0 1 2 -1 2 +204 1 1 1 1 1 2 +205 1 1 1 1 1 2 +206 2 0 1 1 1 2 +207 3 1 1 1 0 2 +208 1 1 1 1 1 2 +209 2 0 1 1 1 2 +210 1 1 1 1 1 2 +211 4 1 0 2 0 2 +212 4 0 1 1 0 2 +213 4 1 0 1 -1 2 +214 1 1 1 1 1 2 +215 3 1 1 1 0 2 +216 2 0 1 1 1 2 +217 1 1 1 1 1 2 +218 2 0 1 1 1 2 +219 4 0 1 1 0 2 +220 4 0 1 2 -1 2 +221 4 1 0 1 -1 2 +222 4 1 0 1 -1 2 +223 1 1 1 2 0 2 +224 2 0 1 1 1 2 +225 1 1 1 1 1 2 +226 1 1 1 2 0 2 +227 1 1 1 1 1 2 +228 2 1 0 1 0 2 +229 2 0 1 1 1 2 +230 2 0 1 1 1 2 +231 2 0 1 1 1 2 +232 4 1 0 1 -1 2 +233 3 1 1 1 0 2 +234 3 1 1 1 0 2 +235 4 0 1 2 -1 2 +236 1 1 1 1 1 2 +237 4 0 1 1 0 2 +238 2 0 1 2 0 2 +239 3 1 1 1 0 2 +240 2 0 1 1 1 2 +1 3 0 0 1 -1 3 +2 2 0 1 1 1 3 +3 1 0 0 1 0 3 +4 3 1 1 1 0 3 +5 2 0 1 1 1 3 +6 1 0 0 1 0 3 +7 1 1 1 1 1 3 +8 1 1 1 1 1 3 +9 1 1 1 2 0 3 +10 1 1 1 1 1 3 +11 1 0 0 2 1 3 +12 4 1 0 1 -1 3 +13 1 1 1 1 1 3 +14 4 0 1 1 0 3 +15 2 0 1 2 0 3 +16 3 1 1 1 0 3 +17 2 0 1 1 1 3 +18 3 1 1 2 -1 3 +19 3 1 1 2 -1 3 +20 1 1 1 1 1 3 +21 1 1 1 1 1 3 +22 1 1 1 1 1 3 +23 3 0 0 2 0 3 +24 3 0 0 2 0 3 +25 1 1 1 1 1 3 +26 4 0 1 1 0 3 +27 4 0 1 1 0 3 +28 1 1 1 1 1 3 +29 3 1 1 1 0 3 +30 4 0 1 1 0 3 +31 2 0 1 1 1 3 +32 3 0 0 1 -1 3 +33 2 0 1 1 1 3 +34 4 0 1 1 0 3 +35 4 1 0 1 -1 3 +36 3 0 0 2 0 3 +37 1 1 1 1 1 3 +38 1 1 1 1 1 3 +39 1 0 0 1 0 3 +40 3 0 0 1 -1 3 +41 4 1 0 1 -1 3 +42 1 1 1 1 1 3 +43 3 0 0 1 -1 3 +44 2 0 1 1 1 3 +45 1 1 1 1 1 3 +46 2 1 0 2 1 3 +47 2 0 1 1 1 3 +48 4 0 1 2 -1 3 +49 2 0 1 1 1 3 +50 3 1 1 2 -1 3 +51 1 1 1 2 0 3 +52 4 0 1 2 -1 3 +53 1 1 1 1 1 3 +54 4 1 0 1 -1 3 +55 2 0 1 1 1 3 +56 3 1 1 1 0 3 +57 3 1 1 1 0 3 +58 2 0 1 1 1 3 +59 3 0 0 2 0 3 +60 4 0 1 1 0 3 +61 1 1 1 1 1 3 +62 2 0 1 1 1 3 +63 3 1 1 1 0 3 +64 2 1 0 1 0 3 +65 2 0 1 1 1 3 +66 4 0 1 2 -1 3 +67 2 0 1 1 1 3 +68 2 0 1 1 1 3 +69 4 1 0 1 -1 3 +70 4 0 1 1 0 3 +71 2 0 1 2 0 3 +72 1 1 1 1 1 3 +73 4 0 1 1 0 3 +74 3 0 0 2 0 3 +75 3 1 1 1 0 3 +76 2 0 1 1 1 3 +77 3 1 1 1 0 3 +78 4 1 0 1 -1 3 +79 3 1 1 2 -1 3 +80 4 0 1 2 -1 3 +81 3 1 1 1 0 3 +82 1 1 1 1 1 3 +83 2 0 1 1 1 3 +84 3 0 0 1 -1 3 +85 2 0 1 1 1 3 +86 3 0 0 1 -1 3 +87 2 0 1 1 1 3 +88 2 0 1 1 1 3 +89 3 1 1 1 0 3 +90 4 0 1 2 -1 3 +91 4 1 0 1 -1 3 +92 2 0 1 1 1 3 +93 4 0 1 1 0 3 +94 1 1 1 1 1 3 +95 2 0 1 2 0 3 +96 1 1 1 1 1 3 +97 2 0 1 2 0 3 +98 4 0 1 1 0 3 +99 4 0 1 1 0 3 +100 4 0 1 2 -1 3 +101 3 1 1 1 0 3 +102 2 0 1 1 1 3 +103 2 0 1 1 1 3 +104 4 1 0 2 0 3 +105 2 0 1 1 1 3 +106 1 1 1 2 0 3 +107 1 1 1 1 1 3 +108 2 0 1 1 1 3 +109 2 0 1 2 0 3 +110 2 0 1 2 0 3 +111 2 0 1 2 0 3 +112 1 1 1 1 1 3 +113 2 0 1 1 1 3 +114 4 0 1 1 0 3 +115 1 1 1 1 1 3 +116 2 0 1 1 1 3 +117 4 0 1 2 -1 3 +118 3 0 0 1 -1 3 +119 3 1 1 2 -1 3 +120 1 1 1 1 1 3 +121 4 0 1 1 0 3 +122 1 1 1 1 1 3 +123 2 0 1 2 0 3 +124 1 1 1 1 1 3 +125 4 0 1 2 -1 3 +126 3 1 1 1 0 3 +127 2 0 1 2 0 3 +128 3 1 1 1 0 3 +129 4 0 1 1 0 3 +130 3 1 1 2 -1 3 +131 2 0 1 1 1 3 +132 3 1 1 1 0 3 +133 2 0 1 1 1 3 +134 2 0 1 2 0 3 +135 3 1 1 1 0 3 +136 3 1 1 2 -1 3 +137 1 1 1 1 1 3 +138 2 0 1 1 1 3 +139 1 1 1 2 0 3 +140 4 0 1 2 -1 3 +141 2 0 1 1 1 3 +142 1 1 1 2 0 3 +143 3 1 1 2 -1 3 +144 3 1 1 1 0 3 +145 2 0 1 1 1 3 +146 3 1 1 1 0 3 +147 2 1 0 1 0 3 +148 4 0 1 1 0 3 +149 1 1 1 1 1 3 +150 1 1 1 2 0 3 +151 1 1 1 1 1 3 +152 2 0 1 1 1 3 +153 3 0 0 1 -1 3 +154 1 1 1 1 1 3 +155 4 1 0 2 0 3 +156 1 1 1 2 0 3 +157 4 1 0 1 -1 3 +158 3 1 1 1 0 3 +159 1 1 1 1 1 3 +160 4 0 1 1 0 3 +161 1 1 1 1 1 3 +162 4 1 0 1 -1 3 +163 1 1 1 2 0 3 +164 4 0 1 1 0 3 +165 4 0 1 1 0 3 +166 1 1 1 2 0 3 +167 3 1 1 1 0 3 +168 2 0 1 1 1 3 +169 4 0 1 1 0 3 +170 2 0 1 1 1 3 +171 4 1 0 1 -1 3 +172 3 0 0 1 -1 3 +173 4 0 1 2 -1 3 +174 2 1 0 1 0 3 +175 2 0 1 1 1 3 +176 1 1 1 1 1 3 +177 4 0 1 1 0 3 +178 3 1 1 1 0 3 +179 3 1 1 1 0 3 +180 2 0 1 1 1 3 +181 1 1 1 1 1 3 +182 4 0 1 1 0 3 +183 3 0 0 1 -1 3 +184 3 1 1 1 0 3 +185 4 0 1 1 0 3 +186 4 0 1 1 0 3 +187 1 1 1 1 1 3 +188 4 0 1 1 0 3 +189 3 1 1 2 -1 3 +190 2 0 1 1 1 3 +191 1 1 1 1 1 3 +192 3 1 1 1 0 3 +193 4 0 1 1 0 3 +194 3 1 1 1 0 3 +195 2 0 1 1 1 3 +196 2 0 1 2 0 3 +197 2 0 1 1 1 3 +198 1 1 1 1 1 3 +199 4 0 1 2 -1 3 +200 4 1 0 1 -1 3 +201 2 0 1 2 0 3 +202 3 1 1 1 0 3 +203 3 1 1 1 0 3 +204 1 1 1 1 1 3 +205 4 0 1 1 0 3 +206 1 1 1 1 1 3 +207 3 0 0 1 -1 3 +208 3 1 1 2 -1 3 +209 3 1 1 1 0 3 +210 1 1 1 1 1 3 +211 4 0 1 1 0 3 +212 4 0 1 2 -1 3 +213 4 0 1 1 0 3 +214 1 1 1 1 1 3 +215 1 1 1 1 1 3 +216 4 0 1 2 -1 3 +217 2 0 1 1 1 3 +218 3 1 1 1 0 3 +219 4 0 1 1 0 3 +220 3 1 1 2 -1 3 +221 1 1 1 2 0 3 +222 1 1 1 1 1 3 +223 3 1 1 1 0 3 +224 1 1 1 1 1 3 +225 2 0 1 1 1 3 +226 1 1 1 2 0 3 +227 3 0 0 1 -1 3 +228 2 0 1 1 1 3 +229 3 0 0 1 -1 3 +230 4 0 1 1 0 3 +231 3 1 1 2 -1 3 +232 4 0 1 1 0 3 +233 4 0 1 1 0 3 +234 1 1 1 1 1 3 +235 4 0 1 1 0 3 +236 4 0 1 1 0 3 +237 2 0 1 1 1 3 +238 2 0 1 1 1 3 +239 3 1 1 2 -1 3 +240 1 1 1 2 0 3 +1 3 1 1 1 0 4 +2 3 0 0 1 -1 4 +3 2 1 0 2 1 4 +4 4 0 1 1 0 4 +5 1 0 0 1 0 4 +6 4 1 0 1 -1 4 +7 2 1 0 1 0 4 +8 2 0 1 1 1 4 +9 3 1 1 1 0 4 +10 4 1 0 1 -1 4 +11 2 0 1 1 1 4 +12 4 0 1 2 -1 4 +13 1 0 0 1 0 4 +14 4 0 1 1 0 4 +15 4 1 0 1 -1 4 +16 3 1 1 1 0 4 +17 1 1 1 1 1 4 +18 3 1 1 1 0 4 +19 2 0 1 2 0 4 +20 2 0 1 1 1 4 +21 2 1 0 1 0 4 +22 3 1 1 2 -1 4 +23 3 0 0 1 -1 4 +24 4 0 1 1 0 4 +25 1 1 1 1 1 4 +26 3 1 1 1 0 4 +27 2 0 1 1 1 4 +28 3 1 1 2 -1 4 +29 4 0 1 1 0 4 +30 4 0 1 1 0 4 +31 3 1 1 1 0 4 +32 1 1 1 2 0 4 +33 3 1 1 1 0 4 +34 2 0 1 1 1 4 +35 4 1 0 1 -1 4 +36 3 0 0 2 0 4 +37 3 0 0 1 -1 4 +38 2 0 1 1 1 4 +39 4 0 1 1 0 4 +40 2 0 1 2 0 4 +41 1 1 1 1 1 4 +42 4 0 1 1 0 4 +43 3 1 1 1 0 4 +44 2 0 1 2 0 4 +45 1 1 1 1 1 4 +46 3 1 1 1 0 4 +47 3 1 1 1 0 4 +48 2 1 0 1 0 4 +49 1 1 1 1 1 4 +50 1 1 1 1 1 4 +51 1 1 1 1 1 4 +52 2 0 1 2 0 4 +53 3 1 1 1 0 4 +54 2 0 1 1 1 4 +55 1 1 1 1 1 4 +56 1 1 1 1 1 4 +57 3 0 0 1 -1 4 +58 4 0 1 2 -1 4 +59 2 0 1 1 1 4 +60 1 1 1 2 0 4 +61 1 1 1 1 1 4 +62 2 0 1 1 1 4 +63 1 1 1 2 0 4 +64 4 0 1 1 0 4 +65 2 0 1 1 1 4 +66 1 1 1 1 1 4 +67 2 0 1 1 1 4 +68 3 1 1 2 -1 4 +69 2 0 1 1 1 4 +70 4 0 1 1 0 4 +71 4 0 1 2 -1 4 +72 1 1 1 2 0 4 +73 2 0 1 1 1 4 +74 2 0 1 1 1 4 +75 4 0 1 1 0 4 +76 4 0 1 1 0 4 +77 1 1 1 1 1 4 +78 1 1 1 1 1 4 +79 2 0 1 1 1 4 +80 4 1 0 1 -1 4 +81 4 0 1 1 0 4 +82 1 1 1 1 1 4 +83 2 0 1 1 1 4 +84 1 1 1 1 1 4 +85 1 1 1 2 0 4 +86 4 0 1 1 0 4 +87 4 0 1 1 0 4 +88 1 1 1 1 1 4 +89 2 0 1 1 1 4 +90 3 0 0 1 -1 4 +91 3 1 1 1 0 4 +92 1 1 1 1 1 4 +93 3 1 1 2 -1 4 +94 4 0 1 1 0 4 +95 2 0 1 1 1 4 +96 3 1 1 1 0 4 +97 2 0 1 1 1 4 +98 1 1 1 2 0 4 +99 3 1 1 1 0 4 +100 3 1 1 1 0 4 +101 2 0 1 1 1 4 +102 4 0 1 1 0 4 +103 2 0 1 1 1 4 +104 3 1 1 1 0 4 +105 4 0 1 1 0 4 +106 3 1 1 1 0 4 +107 1 1 1 1 1 4 +108 3 1 1 1 0 4 +109 2 0 1 1 1 4 +110 2 0 1 2 0 4 +111 3 1 1 2 -1 4 +112 1 1 1 1 1 4 +113 4 0 1 1 0 4 +114 2 0 1 1 1 4 +115 3 0 0 1 -1 4 +116 1 1 1 1 1 4 +117 3 1 1 1 0 4 +118 1 1 1 1 1 4 +119 2 0 1 1 1 4 +120 3 1 1 1 0 4 +121 3 0 0 1 -1 4 +122 4 0 1 2 -1 4 +123 2 0 1 1 1 4 +124 2 0 1 1 1 4 +125 4 0 1 1 0 4 +126 4 1 0 1 -1 4 +127 1 1 1 1 1 4 +128 1 1 1 1 1 4 +129 2 0 1 1 1 4 +130 1 1 1 1 1 4 +131 4 0 1 1 0 4 +132 3 1 1 1 0 4 +133 4 0 1 1 0 4 +134 1 1 1 1 1 4 +135 4 0 1 1 0 4 +136 2 0 1 2 0 4 +137 4 0 1 1 0 4 +138 1 1 1 1 1 4 +139 2 0 1 1 1 4 +140 1 1 1 1 1 4 +141 2 0 1 1 1 4 +142 3 1 1 1 0 4 +143 2 0 1 1 1 4 +144 4 0 1 1 0 4 +145 2 0 1 1 1 4 +146 1 1 1 2 0 4 +147 3 1 1 1 0 4 +148 2 0 1 1 1 4 +149 2 0 1 1 1 4 +150 1 1 1 1 1 4 +151 3 1 1 2 -1 4 +152 3 1 1 2 -1 4 +153 1 1 1 1 1 4 +154 1 1 1 1 1 4 +155 3 1 1 1 0 4 +156 3 1 1 1 0 4 +157 2 0 1 1 1 4 +158 1 1 1 1 1 4 +159 4 0 1 1 0 4 +160 4 0 1 1 0 4 +161 3 1 1 1 0 4 +162 3 1 1 1 0 4 +163 4 0 1 1 0 4 +164 2 0 1 1 1 4 +165 4 0 1 1 0 4 +166 4 0 1 1 0 4 +167 3 1 1 1 0 4 +168 1 1 1 1 1 4 +169 4 0 1 1 0 4 +170 2 0 1 1 1 4 +171 1 1 1 2 0 4 +172 4 0 1 1 0 4 +173 1 1 1 1 1 4 +174 4 0 1 2 -1 4 +175 3 1 1 2 -1 4 +176 4 0 1 1 0 4 +177 4 0 1 1 0 4 +178 4 0 1 1 0 4 +179 2 0 1 1 1 4 +180 3 1 1 2 -1 4 +181 2 0 1 1 1 4 +182 1 1 1 1 1 4 +183 1 1 1 1 1 4 +184 2 0 1 1 1 4 +185 3 1 1 2 -1 4 +186 4 0 1 2 -1 4 +187 2 0 1 2 0 4 +188 1 1 1 1 1 4 +189 2 0 1 1 1 4 +190 2 0 1 1 1 4 +191 4 0 1 1 0 4 +192 1 1 1 1 1 4 +193 2 0 1 1 1 4 +194 2 0 1 1 1 4 +195 2 0 1 1 1 4 +196 4 0 1 2 -1 4 +197 3 0 0 2 0 4 +198 1 1 1 1 1 4 +199 3 1 1 1 0 4 +200 3 1 1 1 0 4 +201 2 0 1 1 1 4 +202 3 1 1 1 0 4 +203 3 1 1 1 0 4 +204 2 0 1 1 1 4 +205 1 1 1 1 1 4 +206 1 1 1 2 0 4 +207 4 0 1 1 0 4 +208 4 0 1 2 -1 4 +209 4 1 0 1 -1 4 +210 1 1 1 1 1 4 +211 3 1 1 2 -1 4 +212 4 1 0 1 -1 4 +213 3 0 0 1 -1 4 +214 1 1 1 2 0 4 +215 3 1 1 1 0 4 +216 1 1 1 1 1 4 +217 2 0 1 1 1 4 +218 1 1 1 2 0 4 +219 4 1 0 1 -1 4 +220 3 1 1 1 0 4 +221 3 1 1 1 0 4 +222 3 1 1 1 0 4 +223 4 0 1 1 0 4 +224 3 1 1 1 0 4 +225 4 1 0 1 -1 4 +226 3 1 1 2 -1 4 +227 4 0 1 2 -1 4 +228 4 1 0 1 -1 4 +229 2 0 1 1 1 4 +230 1 1 1 1 1 4 +231 4 0 1 1 0 4 +232 1 1 1 1 1 4 +233 4 0 1 1 0 4 +234 1 1 1 1 1 4 +235 2 0 1 1 1 4 +236 3 1 1 2 -1 4 +237 4 0 1 1 0 4 +238 1 1 1 1 1 4 +239 1 1 1 1 1 4 +240 1 1 1 1 1 4 +1 2 0 1 1 1 5 +2 1 1 1 1 1 5 +3 4 1 0 1 -1 5 +4 1 1 1 1 1 5 +5 4 1 0 2 0 5 +6 1 1 1 1 1 5 +7 4 0 1 2 -1 5 +8 3 0 0 1 -1 5 +9 3 1 1 1 0 5 +10 4 1 0 2 0 5 +11 1 1 1 2 0 5 +12 1 1 1 1 1 5 +13 4 1 0 1 -1 5 +14 2 0 1 1 1 5 +15 2 0 1 1 1 5 +16 3 1 1 1 0 5 +17 2 0 1 1 1 5 +18 4 1 0 1 -1 5 +19 2 0 1 1 1 5 +20 1 1 1 1 1 5 +21 1 1 1 1 1 5 +22 2 0 1 1 1 5 +23 1 1 1 1 1 5 +24 3 1 1 1 0 5 +25 4 0 1 1 0 5 +26 3 1 1 1 0 5 +27 4 0 1 2 -1 5 +28 4 0 1 1 0 5 +29 1 1 1 1 1 5 +30 4 0 1 1 0 5 +31 2 0 1 1 1 5 +32 3 1 1 2 -1 5 +33 3 1 1 1 0 5 +34 4 0 1 1 0 5 +35 2 0 1 1 1 5 +36 4 0 1 1 0 5 +37 3 0 0 1 -1 5 +38 1 1 1 1 1 5 +39 3 0 0 1 -1 5 +40 3 1 1 1 0 5 +41 1 1 1 2 0 5 +42 4 0 1 1 0 5 +43 4 0 1 1 0 5 +44 1 1 1 1 1 5 +45 3 1 1 1 0 5 +46 2 0 1 2 0 5 +47 4 0 1 1 0 5 +48 4 0 1 1 0 5 +49 4 0 1 2 -1 5 +50 3 1 1 1 0 5 +51 2 0 1 1 1 5 +52 1 1 1 2 0 5 +53 4 0 1 1 0 5 +54 4 0 1 1 0 5 +55 1 1 1 1 1 5 +56 3 1 1 1 0 5 +57 2 0 1 1 1 5 +58 1 1 1 1 1 5 +59 2 0 1 1 1 5 +60 3 1 1 2 -1 5 +61 1 1 1 1 1 5 +62 1 1 1 2 0 5 +63 3 0 0 1 -1 5 +64 2 0 1 1 1 5 +65 4 1 0 2 0 5 +66 3 0 0 1 -1 5 +67 4 1 0 1 -1 5 +68 2 0 1 1 1 5 +69 1 1 1 2 0 5 +70 1 1 1 1 1 5 +71 4 0 1 1 0 5 +72 3 0 0 1 -1 5 +73 2 0 1 2 0 5 +74 3 1 1 1 0 5 +75 4 0 1 1 0 5 +76 4 0 1 2 -1 5 +77 1 1 1 2 0 5 +78 3 1 1 1 0 5 +79 2 0 1 1 1 5 +80 1 1 1 1 1 5 +81 4 0 1 2 -1 5 +82 1 1 1 1 1 5 +83 4 1 0 1 -1 5 +84 2 0 1 1 1 5 +85 1 1 1 1 1 5 +86 1 1 1 1 1 5 +87 2 0 1 2 0 5 +88 3 1 1 2 -1 5 +89 3 0 0 1 -1 5 +90 4 0 1 1 0 5 +91 2 0 1 1 1 5 +92 3 1 1 1 0 5 +93 2 0 1 1 1 5 +94 1 1 1 1 1 5 +95 2 0 1 1 1 5 +96 1 1 1 2 0 5 +97 3 1 1 1 0 5 +98 3 0 0 1 -1 5 +99 4 0 1 1 0 5 +100 1 1 1 2 0 5 +101 4 0 1 1 0 5 +102 1 1 1 1 1 5 +103 4 0 1 1 0 5 +104 1 1 1 1 1 5 +105 1 1 1 1 1 5 +106 4 1 0 1 -1 5 +107 2 0 1 1 1 5 +108 1 1 1 1 1 5 +109 1 1 1 1 1 5 +110 3 1 1 2 -1 5 +111 2 0 1 1 1 5 +112 3 1 1 2 -1 5 +113 1 1 1 1 1 5 +114 1 1 1 1 1 5 +115 2 0 1 1 1 5 +116 1 1 1 1 1 5 +117 4 0 1 1 0 5 +118 4 0 1 1 0 5 +119 4 0 1 1 0 5 +120 1 1 1 1 1 5 +121 4 0 1 2 -1 5 +122 2 0 1 1 1 5 +123 1 1 1 1 1 5 +124 2 0 1 1 1 5 +125 3 1 1 2 -1 5 +126 4 0 1 1 0 5 +127 2 0 1 1 1 5 +128 3 1 1 1 0 5 +129 3 1 1 1 0 5 +130 3 1 1 1 0 5 +131 3 1 1 1 0 5 +132 3 1 1 2 -1 5 +133 1 1 1 1 1 5 +134 2 0 1 1 1 5 +135 4 0 1 1 0 5 +136 1 1 1 2 0 5 +137 2 0 1 1 1 5 +138 2 0 1 1 1 5 +139 2 0 1 1 1 5 +140 2 0 1 2 0 5 +141 2 0 1 1 1 5 +142 2 0 1 1 1 5 +143 3 1 1 1 0 5 +144 1 1 1 2 0 5 +145 1 1 1 1 1 5 +146 1 1 1 2 0 5 +147 3 1 1 1 0 5 +148 2 0 1 1 1 5 +149 1 1 1 2 0 5 +150 2 0 1 1 1 5 +151 4 0 1 2 -1 5 +152 4 0 1 2 -1 5 +153 1 1 1 1 1 5 +154 2 0 1 2 0 5 +155 2 0 1 1 1 5 +156 4 0 1 1 0 5 +157 1 1 1 1 1 5 +158 4 0 1 1 0 5 +159 1 1 1 1 1 5 +160 2 0 1 1 1 5 +161 3 1 1 1 0 5 +162 2 0 1 1 1 5 +163 4 0 1 1 0 5 +164 1 1 1 1 1 5 +165 2 0 1 1 1 5 +166 3 1 1 2 -1 5 +167 3 1 1 2 -1 5 +168 3 1 1 1 0 5 +169 4 0 1 1 0 5 +170 2 0 1 1 1 5 +171 2 0 1 2 0 5 +172 4 1 0 2 0 5 +173 3 1 1 1 0 5 +174 4 1 0 2 0 5 +175 2 0 1 1 1 5 +176 1 1 1 1 1 5 +177 2 0 1 1 1 5 +178 3 1 1 1 0 5 +179 2 0 1 1 1 5 +180 1 0 0 2 1 5 +181 1 1 1 1 1 5 +182 4 0 1 1 0 5 +183 1 1 1 1 1 5 +184 1 1 1 1 1 5 +185 1 1 1 1 1 5 +186 3 1 1 1 0 5 +187 3 1 1 1 0 5 +188 3 1 1 2 -1 5 +189 4 0 1 1 0 5 +190 4 0 1 1 0 5 +191 4 0 1 2 -1 5 +192 2 0 1 2 0 5 +193 2 0 1 1 1 5 +194 1 1 1 1 1 5 +195 2 0 1 1 1 5 +196 3 1 1 1 0 5 +197 3 1 1 1 0 5 +198 2 0 1 1 1 5 +199 2 0 1 1 1 5 +200 3 1 1 1 0 5 +201 4 0 1 1 0 5 +202 3 1 1 2 -1 5 +203 2 0 1 1 1 5 +204 2 0 1 1 1 5 +205 3 1 1 1 0 5 +206 2 0 1 1 1 5 +207 2 0 1 2 0 5 +208 3 1 1 1 0 5 +209 2 0 1 1 1 5 +210 1 1 1 1 1 5 +211 3 1 1 1 0 5 +212 4 1 0 1 -1 5 +213 4 1 0 2 0 5 +214 4 0 1 1 0 5 +215 1 1 1 1 1 5 +216 3 0 0 2 0 5 +217 1 1 1 2 0 5 +218 2 0 1 1 1 5 +219 4 0 1 1 0 5 +220 3 0 0 2 0 5 +221 3 0 0 1 -1 5 +222 4 0 1 1 0 5 +223 3 1 1 1 0 5 +224 4 0 1 1 0 5 +225 3 1 1 1 0 5 +226 3 1 1 1 0 5 +227 1 1 1 1 1 5 +228 4 0 1 1 0 5 +229 1 1 1 2 0 5 +230 2 0 1 2 0 5 +231 3 1 1 1 0 5 +232 2 0 1 1 1 5 +233 3 1 1 1 0 5 +234 4 0 1 1 0 5 +235 1 1 1 2 0 5 +236 2 0 1 1 1 5 +237 3 1 1 1 0 5 +238 3 1 1 1 0 5 +239 4 1 0 2 0 5 +240 4 1 0 1 -1 5 +1 3 1 1 2 -1 6 +2 1 0 0 2 1 6 +3 2 1 0 2 1 6 +4 1 0 0 1 0 6 +5 4 1 0 1 -1 6 +6 4 0 1 2 -1 6 +7 2 1 0 2 1 6 +8 4 1 0 1 -1 6 +9 1 0 0 1 0 6 +10 2 1 0 2 1 6 +11 2 1 0 2 1 6 +12 2 1 0 1 0 6 +13 2 1 0 1 0 6 +14 4 0 1 2 -1 6 +15 3 0 0 1 -1 6 +16 3 0 0 1 -1 6 +17 4 1 0 1 -1 6 +18 1 1 1 1 1 6 +19 1 1 1 1 1 6 +20 3 1 1 1 0 6 +21 4 0 1 1 0 6 +22 3 1 1 1 0 6 +23 4 0 1 1 0 6 +24 1 1 1 1 1 6 +25 3 1 1 1 0 6 +26 1 1 1 2 0 6 +27 1 1 1 1 1 6 +28 1 1 1 2 0 6 +29 1 1 1 1 1 6 +30 1 1 1 1 1 6 +31 4 0 1 1 0 6 +32 1 1 1 1 1 6 +33 2 1 0 1 0 6 +34 3 1 1 1 0 6 +35 4 0 1 2 -1 6 +36 3 1 1 2 -1 6 +37 4 1 0 2 0 6 +38 4 1 0 2 0 6 +39 3 1 1 2 -1 6 +40 4 1 0 2 0 6 +41 2 1 0 1 0 6 +42 1 1 1 1 1 6 +43 3 1 1 2 -1 6 +44 1 1 1 1 1 6 +45 4 1 0 1 -1 6 +46 2 1 0 2 1 6 +47 3 0 0 1 -1 6 +48 2 1 0 2 1 6 +49 1 1 1 2 0 6 +50 1 1 1 2 0 6 +51 4 0 1 1 0 6 +52 3 1 1 1 0 6 +53 1 1 1 1 1 6 +54 2 1 0 1 0 6 +55 2 1 0 2 1 6 +56 2 1 0 1 0 6 +57 1 1 1 1 1 6 +58 1 1 1 1 1 6 +59 3 1 1 1 0 6 +60 2 1 0 1 0 6 +61 4 1 0 2 0 6 +62 2 1 0 1 0 6 +63 3 1 1 2 -1 6 +64 3 0 0 2 0 6 +65 2 1 0 1 0 6 +66 3 1 1 1 0 6 +67 4 1 0 1 -1 6 +68 4 0 1 1 0 6 +69 4 0 1 1 0 6 +70 1 1 1 2 0 6 +71 2 1 0 1 0 6 +72 4 0 1 1 0 6 +73 3 1 1 1 0 6 +74 1 1 1 2 0 6 +75 4 1 0 1 -1 6 +76 1 1 1 2 0 6 +77 3 1 1 1 0 6 +78 2 1 0 1 0 6 +79 4 0 1 1 0 6 +80 4 1 0 2 0 6 +81 2 1 0 1 0 6 +82 1 1 1 1 1 6 +83 4 0 1 2 -1 6 +84 2 1 0 2 1 6 +85 2 1 0 1 0 6 +86 4 1 0 1 -1 6 +87 3 1 1 1 0 6 +88 4 0 1 1 0 6 +89 2 1 0 2 1 6 +90 1 1 1 1 1 6 +91 1 1 1 1 1 6 +92 3 1 1 1 0 6 +93 1 1 1 1 1 6 +94 1 1 1 1 1 6 +95 4 0 1 1 0 6 +96 3 1 1 1 0 6 +97 4 0 1 1 0 6 +98 4 0 1 2 -1 6 +99 2 1 0 1 0 6 +100 1 1 1 1 1 6 +101 4 0 1 1 0 6 +102 4 0 1 1 0 6 +103 3 1 1 1 0 6 +104 4 0 1 1 0 6 +105 2 1 0 1 0 6 +106 3 1 1 1 0 6 +107 2 1 0 1 0 6 +108 3 1 1 1 0 6 +109 3 1 1 1 0 6 +110 4 0 1 1 0 6 +111 1 1 1 2 0 6 +112 2 1 0 1 0 6 +113 1 1 1 1 1 6 +114 4 1 0 1 -1 6 +115 1 1 1 2 0 6 +116 4 1 0 1 -1 6 +117 4 0 1 1 0 6 +118 3 1 1 1 0 6 +119 3 0 0 1 -1 6 +120 2 1 0 1 0 6 +121 4 0 1 2 -1 6 +122 3 1 1 1 0 6 +123 4 1 0 1 -1 6 +124 3 1 1 2 -1 6 +125 2 0 1 1 1 6 +126 2 1 0 1 0 6 +127 2 1 0 1 0 6 +128 1 1 1 1 1 6 +129 4 1 0 1 -1 6 +130 3 1 1 1 0 6 +131 4 0 1 1 0 6 +132 2 1 0 1 0 6 +133 2 0 1 1 1 6 +134 2 0 1 1 1 6 +135 3 1 1 1 0 6 +136 3 1 1 1 0 6 +137 2 0 1 1 1 6 +138 4 0 1 1 0 6 +139 1 1 1 2 0 6 +140 2 0 1 1 1 6 +141 2 0 1 2 0 6 +142 4 0 1 1 0 6 +143 1 1 1 1 1 6 +144 4 0 1 1 0 6 +145 4 0 1 2 -1 6 +146 1 1 1 1 1 6 +147 3 0 0 1 -1 6 +148 4 0 1 1 0 6 +149 1 0 0 2 1 6 +150 1 1 1 2 0 6 +151 4 0 1 1 0 6 +152 1 1 1 2 0 6 +153 3 1 1 1 0 6 +154 3 1 1 1 0 6 +155 2 0 1 2 0 6 +156 2 0 1 1 1 6 +157 1 1 1 2 0 6 +158 3 1 1 1 0 6 +159 3 0 0 1 -1 6 +160 3 1 1 1 0 6 +161 3 1 1 1 0 6 +162 1 0 0 1 0 6 +163 4 0 1 1 0 6 +164 3 0 0 1 -1 6 +165 3 1 1 1 0 6 +166 3 1 1 1 0 6 +167 2 0 1 2 0 6 +168 3 1 1 2 -1 6 +169 2 0 1 1 1 6 +170 2 0 1 2 0 6 +171 1 1 1 1 1 6 +172 2 0 1 1 1 6 +173 1 1 1 1 1 6 +174 1 1 1 1 1 6 +175 2 0 1 1 1 6 +176 2 0 1 1 1 6 +177 1 1 1 1 1 6 +178 2 0 1 1 1 6 +179 4 0 1 1 0 6 +180 1 1 1 1 1 6 +181 3 1 1 1 0 6 +182 3 1 1 2 -1 6 +183 3 1 1 1 0 6 +184 4 1 0 1 -1 6 +185 3 1 1 1 0 6 +186 4 0 1 1 0 6 +187 3 1 1 2 -1 6 +188 4 0 1 1 0 6 +189 1 1 1 1 1 6 +190 4 0 1 2 -1 6 +191 1 1 1 1 1 6 +192 3 1 1 1 0 6 +193 3 1 1 2 -1 6 +194 2 0 1 1 1 6 +195 1 1 1 1 1 6 +196 1 1 1 1 1 6 +197 2 0 1 2 0 6 +198 1 1 1 2 0 6 +199 2 1 0 1 0 6 +200 3 1 1 1 0 6 +201 2 0 1 1 1 6 +202 3 1 1 1 0 6 +203 1 1 1 1 1 6 +204 3 1 1 1 0 6 +205 1 1 1 2 0 6 +206 3 1 1 1 0 6 +207 2 0 1 1 1 6 +208 3 1 1 1 0 6 +209 2 0 1 1 1 6 +210 4 1 0 1 -1 6 +211 2 0 1 1 1 6 +212 2 0 1 1 1 6 +213 1 1 1 1 1 6 +214 3 1 1 1 0 6 +215 1 1 1 1 1 6 +216 3 1 1 1 0 6 +217 1 1 1 1 1 6 +218 2 0 1 1 1 6 +219 2 0 1 1 1 6 +220 1 1 1 1 1 6 +221 1 1 1 1 1 6 +222 4 0 1 2 -1 6 +223 1 1 1 1 1 6 +224 4 0 1 1 0 6 +225 4 0 1 1 0 6 +226 4 0 1 1 0 6 +227 3 1 1 1 0 6 +228 2 0 1 1 1 6 +229 2 0 1 2 0 6 +230 3 1 1 1 0 6 +231 2 0 1 1 1 6 +232 2 0 1 1 1 6 +233 4 0 1 1 0 6 +234 2 0 1 1 1 6 +235 1 1 1 2 0 6 +236 4 0 1 2 -1 6 +237 4 0 1 1 0 6 +238 4 0 1 1 0 6 +239 3 0 0 1 -1 6 +240 1 1 1 1 1 6 +1 2 0 1 1 1 7 +2 4 1 0 1 -1 7 +3 4 0 1 1 0 7 +4 3 1 1 1 0 7 +5 3 1 1 1 0 7 +6 3 0 0 1 -1 7 +7 4 0 1 1 0 7 +8 2 0 1 1 1 7 +9 3 1 1 1 0 7 +10 4 0 1 2 -1 7 +11 2 0 1 1 1 7 +12 4 0 1 1 0 7 +13 3 1 1 2 -1 7 +14 1 1 1 1 1 7 +15 1 1 1 1 1 7 +16 1 1 1 1 1 7 +17 1 1 1 1 1 7 +18 2 0 1 1 1 7 +19 1 1 1 1 1 7 +20 3 1 1 1 0 7 +21 2 0 1 1 1 7 +22 3 0 0 1 -1 7 +23 2 1 0 1 0 7 +24 4 0 1 1 0 7 +25 4 1 0 1 -1 7 +26 3 1 1 1 0 7 +27 4 1 0 1 -1 7 +28 1 1 1 1 1 7 +29 1 1 1 1 1 7 +30 3 1 1 2 -1 7 +31 4 0 1 1 0 7 +32 2 0 1 1 1 7 +33 4 0 1 1 0 7 +34 3 1 1 1 0 7 +35 3 0 0 1 -1 7 +36 3 1 1 1 0 7 +37 1 1 1 1 1 7 +38 3 1 1 1 0 7 +39 3 0 0 1 -1 7 +40 4 1 0 1 -1 7 +41 4 0 1 1 0 7 +42 1 1 1 1 1 7 +43 4 0 1 1 0 7 +44 2 0 1 1 1 7 +45 1 1 1 1 1 7 +46 2 0 1 2 0 7 +47 1 1 1 1 1 7 +48 3 1 1 1 0 7 +49 2 0 1 2 0 7 +50 3 1 1 1 0 7 +51 2 0 1 1 1 7 +52 2 0 1 2 0 7 +53 2 0 1 1 1 7 +54 2 1 0 1 0 7 +55 1 1 1 1 1 7 +56 1 1 1 1 1 7 +57 4 0 1 1 0 7 +58 2 0 1 1 1 7 +59 4 0 1 1 0 7 +60 1 1 1 1 1 7 +61 3 1 1 2 -1 7 +62 2 0 1 1 1 7 +63 3 0 0 1 -1 7 +64 4 0 1 1 0 7 +65 3 1 1 1 0 7 +66 4 0 1 1 0 7 +67 2 0 1 2 0 7 +68 4 0 1 1 0 7 +69 2 0 1 2 0 7 +70 1 1 1 1 1 7 +71 4 1 0 2 0 7 +72 2 0 1 2 0 7 +73 3 1 1 1 0 7 +74 4 0 1 1 0 7 +75 3 1 1 1 0 7 +76 1 1 1 1 1 7 +77 2 0 1 1 1 7 +78 4 0 1 1 0 7 +79 2 0 1 1 1 7 +80 4 1 0 2 0 7 +81 3 1 1 2 -1 7 +82 3 1 1 2 -1 7 +83 2 0 1 1 1 7 +84 3 1 1 2 -1 7 +85 2 0 1 2 0 7 +86 3 1 1 2 -1 7 +87 2 0 1 1 1 7 +88 2 0 1 2 0 7 +89 1 1 1 1 1 7 +90 4 0 1 1 0 7 +91 2 0 1 1 1 7 +92 1 1 1 1 1 7 +93 4 1 0 1 -1 7 +94 1 0 0 1 0 7 +95 3 1 1 2 -1 7 +96 1 1 1 1 1 7 +97 3 0 0 1 -1 7 +98 1 1 1 1 1 7 +99 4 0 1 1 0 7 +100 1 1 1 1 1 7 +101 3 1 1 1 0 7 +102 2 0 1 1 1 7 +103 1 1 1 1 1 7 +104 3 1 1 1 0 7 +105 1 1 1 1 1 7 +106 2 1 0 1 0 7 +107 3 1 1 1 0 7 +108 3 1 1 1 0 7 +109 4 0 1 1 0 7 +110 4 1 0 1 -1 7 +111 2 0 1 1 1 7 +112 4 0 1 1 0 7 +113 2 0 1 1 1 7 +114 1 1 1 1 1 7 +115 4 0 1 1 0 7 +116 1 1 1 2 0 7 +117 2 0 1 1 1 7 +118 2 0 1 1 1 7 +119 4 0 1 1 0 7 +120 3 1 1 1 0 7 +121 1 1 1 1 1 7 +122 1 1 1 1 1 7 +123 2 0 1 1 1 7 +124 1 1 1 2 0 7 +125 4 0 1 1 0 7 +126 1 1 1 1 1 7 +127 3 0 0 1 -1 7 +128 4 0 1 1 0 7 +129 3 1 1 2 -1 7 +130 2 0 1 2 0 7 +131 1 1 1 2 0 7 +132 2 1 0 1 0 7 +133 4 0 1 1 0 7 +134 1 1 1 1 1 7 +135 1 1 1 2 0 7 +136 3 1 1 1 0 7 +137 2 0 1 1 1 7 +138 3 1 1 1 0 7 +139 4 0 1 1 0 7 +140 2 0 1 2 0 7 +141 3 1 1 1 0 7 +142 4 0 1 1 0 7 +143 1 1 1 1 1 7 +144 3 1 1 2 -1 7 +145 1 1 1 2 0 7 +146 1 1 1 1 1 7 +147 2 0 1 1 1 7 +148 2 0 1 1 1 7 +149 3 1 1 1 0 7 +150 4 0 1 1 0 7 +151 4 0 1 1 0 7 +152 4 0 1 1 0 7 +153 2 0 1 1 1 7 +154 4 0 1 2 -1 7 +155 4 0 1 2 -1 7 +156 4 0 1 2 -1 7 +157 1 1 1 2 0 7 +158 3 1 1 1 0 7 +159 2 0 1 2 0 7 +160 2 0 1 1 1 7 +161 3 1 1 1 0 7 +162 1 1 1 2 0 7 +163 1 1 1 1 1 7 +164 4 1 0 1 -1 7 +165 4 1 0 1 -1 7 +166 1 1 1 1 1 7 +167 4 1 0 1 -1 7 +168 1 1 1 2 0 7 +169 4 0 1 1 0 7 +170 4 0 1 1 0 7 +171 2 0 1 1 1 7 +172 4 0 1 1 0 7 +173 2 0 1 1 1 7 +174 1 1 1 1 1 7 +175 4 0 1 1 0 7 +176 4 0 1 1 0 7 +177 2 0 1 2 0 7 +178 4 0 1 1 0 7 +179 2 0 1 1 1 7 +180 3 1 1 1 0 7 +181 1 1 1 2 0 7 +182 3 1 1 2 -1 7 +183 3 1 1 1 0 7 +184 1 1 1 1 1 7 +185 3 1 1 2 -1 7 +186 4 0 1 1 0 7 +187 1 1 1 1 1 7 +188 1 1 1 1 1 7 +189 3 0 0 1 -1 7 +190 2 0 1 1 1 7 +191 1 1 1 1 1 7 +192 1 1 1 2 0 7 +193 4 0 1 1 0 7 +194 4 0 1 2 -1 7 +195 1 1 1 2 0 7 +196 4 0 1 1 0 7 +197 2 0 1 1 1 7 +198 2 0 1 1 1 7 +199 2 0 1 1 1 7 +200 1 1 1 1 1 7 +201 4 0 1 2 -1 7 +202 2 0 1 1 1 7 +203 2 0 1 1 1 7 +204 3 0 0 1 -1 7 +205 3 1 1 1 0 7 +206 1 1 1 2 0 7 +207 2 0 1 1 1 7 +208 3 1 1 1 0 7 +209 2 0 1 1 1 7 +210 3 1 1 1 0 7 +211 3 1 1 2 -1 7 +212 4 0 1 2 -1 7 +213 1 1 1 1 1 7 +214 3 1 1 2 -1 7 +215 1 1 1 2 0 7 +216 2 0 1 1 1 7 +217 3 1 1 1 0 7 +218 1 1 1 2 0 7 +219 1 1 1 1 1 7 +220 2 0 1 1 1 7 +221 3 1 1 1 0 7 +222 2 0 1 1 1 7 +223 2 0 1 1 1 7 +224 2 0 1 2 0 7 +225 1 0 0 1 0 7 +226 3 1 1 1 0 7 +227 1 1 1 1 1 7 +228 3 1 1 1 0 7 +229 1 1 1 2 0 7 +230 1 0 0 1 0 7 +231 4 0 1 2 -1 7 +232 2 0 1 1 1 7 +233 3 1 1 2 -1 7 +234 3 0 0 1 -1 7 +235 3 0 0 1 -1 7 +236 4 0 1 1 0 7 +237 3 1 1 1 0 7 +238 4 0 1 1 0 7 +239 1 1 1 1 1 7 +240 4 0 1 1 0 7 +1 3 1 1 1 0 8 +2 2 0 1 1 1 8 +3 3 0 0 1 -1 8 +4 3 1 1 1 0 8 +5 1 0 0 2 1 8 +6 3 1 1 2 -1 8 +7 2 1 0 1 0 8 +8 1 0 0 1 0 8 +9 2 0 1 2 0 8 +10 2 1 0 2 1 8 +11 1 1 1 2 0 8 +12 3 0 0 1 -1 8 +13 4 1 0 2 0 8 +14 3 0 0 1 -1 8 +15 4 1 0 2 0 8 +16 3 1 1 2 -1 8 +17 1 0 0 1 0 8 +18 2 1 0 2 1 8 +19 2 1 0 1 0 8 +20 3 1 1 1 0 8 +21 2 1 0 1 0 8 +22 4 1 0 1 -1 8 +23 2 0 1 2 0 8 +24 3 1 1 1 0 8 +25 2 1 0 1 0 8 +26 3 1 1 1 0 8 +27 3 1 1 1 0 8 +28 4 0 1 2 -1 8 +29 1 1 1 1 1 8 +30 3 1 1 2 -1 8 +31 1 1 1 1 1 8 +32 1 1 1 1 1 8 +33 3 1 1 1 0 8 +34 4 1 0 1 -1 8 +35 4 0 1 1 0 8 +36 2 1 0 2 1 8 +37 3 1 1 2 -1 8 +38 1 1 1 1 1 8 +39 4 1 0 1 -1 8 +40 2 1 0 1 0 8 +41 2 1 0 1 0 8 +42 4 0 1 1 0 8 +43 3 1 1 1 0 8 +44 1 1 1 1 1 8 +45 1 1 1 1 1 8 +46 4 0 1 2 -1 8 +47 3 0 0 1 -1 8 +48 2 1 0 2 1 8 +49 2 1 0 1 0 8 +50 3 1 1 1 0 8 +51 3 1 1 1 0 8 +52 1 1 1 1 1 8 +53 4 0 1 1 0 8 +54 4 1 0 2 0 8 +55 3 1 1 2 -1 8 +56 2 1 0 2 1 8 +57 4 0 1 1 0 8 +58 2 1 0 2 1 8 +59 1 1 1 2 0 8 +60 1 1 1 1 1 8 +61 1 1 1 2 0 8 +62 2 1 0 1 0 8 +63 3 1 1 1 0 8 +64 3 1 1 2 -1 8 +65 4 0 1 1 0 8 +66 3 1 1 1 0 8 +67 3 0 0 2 0 8 +68 1 1 1 1 1 8 +69 4 0 1 1 0 8 +70 1 1 1 2 0 8 +71 4 1 0 1 -1 8 +72 4 0 1 2 -1 8 +73 3 1 1 1 0 8 +74 3 1 1 2 -1 8 +75 4 1 0 1 -1 8 +76 1 1 1 1 1 8 +77 4 0 1 1 0 8 +78 2 1 0 1 0 8 +79 1 1 1 1 1 8 +80 1 1 1 1 1 8 +81 1 1 1 2 0 8 +82 3 0 0 1 -1 8 +83 4 0 1 1 0 8 +84 2 1 0 2 1 8 +85 3 0 0 1 -1 8 +86 4 0 1 1 0 8 +87 2 1 0 1 0 8 +88 1 1 1 1 1 8 +89 2 1 0 1 0 8 +90 4 0 1 1 0 8 +91 4 0 1 1 0 8 +92 4 0 1 1 0 8 +93 1 1 1 2 0 8 +94 2 1 0 1 0 8 +95 4 1 0 2 0 8 +96 2 1 0 1 0 8 +97 2 1 0 2 1 8 +98 4 0 1 1 0 8 +99 2 1 0 1 0 8 +100 4 0 1 1 0 8 +101 1 1 1 1 1 8 +102 2 1 0 2 1 8 +103 1 1 1 1 1 8 +104 4 0 1 1 0 8 +105 4 0 1 1 0 8 +106 4 0 1 1 0 8 +107 1 1 1 1 1 8 +108 2 1 0 1 0 8 +109 2 1 0 1 0 8 +110 3 1 1 1 0 8 +111 3 1 1 1 0 8 +112 1 1 1 1 1 8 +113 3 1 1 1 0 8 +114 4 0 1 1 0 8 +115 2 0 1 1 1 8 +116 2 1 0 1 0 8 +117 4 1 0 1 -1 8 +118 4 0 1 2 -1 8 +119 1 1 1 1 1 8 +120 1 1 1 1 1 8 +121 1 1 1 2 0 8 +122 3 1 1 1 0 8 +123 3 1 1 1 0 8 +124 3 1 1 2 -1 8 +125 2 0 1 1 1 8 +126 2 0 1 1 1 8 +127 1 1 1 1 1 8 +128 1 1 1 1 1 8 +129 2 0 1 1 1 8 +130 1 1 1 1 1 8 +131 2 0 1 1 1 8 +132 3 1 1 1 0 8 +133 4 1 0 1 -1 8 +134 1 1 1 1 1 8 +135 3 1 1 1 0 8 +136 4 0 1 1 0 8 +137 1 1 1 1 1 8 +138 2 0 1 1 1 8 +139 4 0 1 1 0 8 +140 4 0 1 2 -1 8 +141 2 0 1 1 1 8 +142 1 1 1 1 1 8 +143 3 1 1 1 0 8 +144 3 1 1 1 0 8 +145 3 1 1 2 -1 8 +146 3 1 1 1 0 8 +147 4 0 1 1 0 8 +148 1 1 1 2 0 8 +149 4 1 0 2 0 8 +150 2 0 1 1 1 8 +151 4 0 1 1 0 8 +152 1 1 1 1 1 8 +153 2 0 1 1 1 8 +154 4 1 0 1 -1 8 +155 1 1 1 1 1 8 +156 4 0 1 1 0 8 +157 2 0 1 1 1 8 +158 2 0 1 2 0 8 +159 2 0 1 1 1 8 +160 3 1 1 1 0 8 +161 1 1 1 1 1 8 +162 4 0 1 1 0 8 +163 3 1 1 1 0 8 +164 1 1 1 1 1 8 +165 2 0 1 2 0 8 +166 4 0 1 1 0 8 +167 2 0 1 1 1 8 +168 2 0 1 2 0 8 +169 2 0 1 1 1 8 +170 3 1 1 1 0 8 +171 3 1 1 1 0 8 +172 4 0 1 1 0 8 +173 1 1 1 1 1 8 +174 3 1 1 2 -1 8 +175 1 1 1 1 1 8 +176 3 1 1 1 0 8 +177 3 1 1 1 0 8 +178 3 1 1 1 0 8 +179 2 0 1 1 1 8 +180 1 1 1 1 1 8 +181 1 1 1 1 1 8 +182 3 1 1 1 0 8 +183 2 0 1 1 1 8 +184 4 1 0 1 -1 8 +185 4 0 1 2 -1 8 +186 4 0 1 1 0 8 +187 3 1 1 1 0 8 +188 2 0 1 1 1 8 +189 1 1 1 1 1 8 +190 2 0 1 1 1 8 +191 1 1 1 1 1 8 +192 2 0 1 1 1 8 +193 2 0 1 2 0 8 +194 2 0 1 1 1 8 +195 1 1 1 2 0 8 +196 3 1 1 1 0 8 +197 2 0 1 1 1 8 +198 4 0 1 2 -1 8 +199 4 0 1 2 -1 8 +200 2 0 1 1 1 8 +201 3 1 1 2 -1 8 +202 4 0 1 1 0 8 +203 3 1 1 1 0 8 +204 4 0 1 1 0 8 +205 4 1 0 1 -1 8 +206 3 1 1 1 0 8 +207 1 1 1 2 0 8 +208 3 1 1 1 0 8 +209 1 1 1 2 0 8 +210 3 1 1 1 0 8 +211 4 0 1 1 0 8 +212 2 0 1 2 0 8 +213 3 1 1 1 0 8 +214 1 1 1 1 1 8 +215 2 0 1 1 1 8 +216 1 1 1 1 1 8 +217 2 1 0 1 0 8 +218 1 1 1 1 1 8 +219 1 1 1 2 0 8 +220 1 1 1 2 0 8 +221 4 0 1 2 -1 8 +222 3 1 1 1 0 8 +223 1 1 1 1 1 8 +224 3 1 1 1 0 8 +225 4 0 1 2 -1 8 +226 1 1 1 2 0 8 +227 4 1 0 1 -1 8 +228 2 0 1 1 1 8 +229 1 1 1 1 1 8 +230 2 0 1 1 1 8 +231 4 0 1 1 0 8 +232 4 0 1 2 -1 8 +233 2 0 1 1 1 8 +234 1 1 1 1 1 8 +235 4 0 1 1 0 8 +236 1 1 1 1 1 8 +237 3 1 1 2 -1 8 +238 4 0 1 2 -1 8 +239 1 1 1 2 0 8 +240 3 0 0 1 -1 8 +1 2 1 0 1 0 9 +2 3 0 0 2 0 9 +3 3 1 1 1 0 9 +4 1 1 1 1 1 9 +5 4 0 1 1 0 9 +6 1 1 1 1 1 9 +7 3 1 1 1 0 9 +8 4 1 0 1 -1 9 +9 3 1 1 1 0 9 +10 4 0 1 1 0 9 +11 4 1 0 1 -1 9 +12 2 1 0 1 0 9 +13 1 0 0 1 0 9 +14 3 0 0 1 -1 9 +15 3 1 1 1 0 9 +16 2 1 0 1 0 9 +17 2 1 0 1 0 9 +18 1 1 1 1 1 9 +19 2 0 1 2 0 9 +20 4 0 1 2 -1 9 +21 2 1 0 2 1 9 +22 2 1 0 1 0 9 +23 4 1 0 1 -1 9 +24 2 1 0 1 0 9 +25 1 1 1 1 1 9 +26 3 1 1 2 -1 9 +27 2 1 0 1 0 9 +28 3 0 0 1 -1 9 +29 4 1 0 1 -1 9 +30 1 1 1 1 1 9 +31 2 0 1 2 0 9 +32 3 0 0 1 -1 9 +33 3 1 1 1 0 9 +34 3 1 1 1 0 9 +35 2 1 0 1 0 9 +36 2 1 0 1 0 9 +37 2 1 0 1 0 9 +38 4 0 1 1 0 9 +39 1 1 1 1 1 9 +40 2 1 0 1 0 9 +41 4 0 1 1 0 9 +42 3 1 1 1 0 9 +43 1 1 1 1 1 9 +44 4 0 1 1 0 9 +45 4 1 0 1 -1 9 +46 3 1 1 1 0 9 +47 2 0 1 1 1 9 +48 3 1 1 2 -1 9 +49 3 1 1 1 0 9 +50 4 0 1 2 -1 9 +51 2 0 1 1 1 9 +52 4 0 1 1 0 9 +53 4 0 1 1 0 9 +54 1 1 1 1 1 9 +55 1 1 1 1 1 9 +56 1 1 1 2 0 9 +57 4 0 1 1 0 9 +58 1 1 1 2 0 9 +59 4 0 1 1 0 9 +60 1 1 1 1 1 9 +61 4 0 1 1 0 9 +62 2 0 1 1 1 9 +63 3 0 0 1 -1 9 +64 3 1 1 2 -1 9 +65 3 1 1 2 -1 9 +66 4 1 0 1 -1 9 +67 2 0 1 2 0 9 +68 3 0 0 2 0 9 +69 4 0 1 2 -1 9 +70 4 0 1 1 0 9 +71 3 1 1 1 0 9 +72 1 1 1 1 1 9 +73 2 0 1 1 1 9 +74 3 1 1 1 0 9 +75 3 1 1 1 0 9 +76 1 1 1 2 0 9 +77 2 0 1 1 1 9 +78 1 1 1 1 1 9 +79 4 0 1 1 0 9 +80 2 0 1 1 1 9 +81 3 1 1 2 -1 9 +82 2 0 1 1 1 9 +83 2 0 1 1 1 9 +84 1 1 1 1 1 9 +85 2 0 1 2 0 9 +86 3 0 0 1 -1 9 +87 4 0 1 1 0 9 +88 3 0 0 1 -1 9 +89 3 1 1 1 0 9 +90 2 0 1 2 0 9 +91 2 0 1 2 0 9 +92 2 0 1 2 0 9 +93 4 0 1 1 0 9 +94 2 0 1 1 1 9 +95 3 1 1 1 0 9 +96 4 0 1 1 0 9 +97 2 0 1 1 1 9 +98 3 0 0 1 -1 9 +99 4 0 1 1 0 9 +100 3 1 1 1 0 9 +101 2 0 1 1 1 9 +102 4 0 1 1 0 9 +103 2 0 1 1 1 9 +104 4 0 1 1 0 9 +105 2 0 1 2 0 9 +106 4 1 0 2 0 9 +107 1 1 1 1 1 9 +108 4 0 1 1 0 9 +109 4 0 1 1 0 9 +110 2 0 1 2 0 9 +111 2 0 1 1 1 9 +112 3 1 1 1 0 9 +113 2 0 1 2 0 9 +114 3 1 1 1 0 9 +115 3 1 1 2 -1 9 +116 4 0 1 1 0 9 +117 3 1 1 1 0 9 +118 1 0 0 1 0 9 +119 2 0 1 1 1 9 +120 3 1 1 1 0 9 +121 1 1 1 2 0 9 +122 1 1 1 1 1 9 +123 2 0 1 1 1 9 +124 2 0 1 1 1 9 +125 2 0 1 1 1 9 +126 1 1 1 1 1 9 +127 1 1 1 2 0 9 +128 4 0 1 1 0 9 +129 4 0 1 1 0 9 +130 4 0 1 2 -1 9 +131 2 0 1 2 0 9 +132 1 1 1 1 1 9 +133 1 1 1 1 1 9 +134 2 0 1 1 1 9 +135 1 1 1 2 0 9 +136 3 1 1 1 0 9 +137 2 0 1 1 1 9 +138 3 1 1 1 0 9 +139 1 1 1 1 1 9 +140 1 1 1 1 1 9 +141 4 0 1 1 0 9 +142 1 1 1 1 1 9 +143 1 1 1 1 1 9 +144 4 0 1 1 0 9 +145 3 1 1 1 0 9 +146 4 1 0 1 -1 9 +147 3 1 1 2 -1 9 +148 4 0 1 1 0 9 +149 1 1 1 1 1 9 +150 3 1 1 1 0 9 +151 1 1 1 2 0 9 +152 2 0 1 1 1 9 +153 1 0 0 2 1 9 +154 2 0 1 1 1 9 +155 1 0 0 1 0 9 +156 4 0 1 2 -1 9 +157 2 0 1 1 1 9 +158 4 0 1 1 0 9 +159 1 1 1 1 1 9 +160 3 1 1 2 -1 9 +161 2 0 1 1 1 9 +162 3 1 1 1 0 9 +163 2 0 1 1 1 9 +164 2 0 1 1 1 9 +165 4 0 1 1 0 9 +166 2 0 1 1 1 9 +167 4 0 1 1 0 9 +168 1 1 1 1 1 9 +169 3 1 1 1 0 9 +170 1 1 1 2 0 9 +171 2 0 1 2 0 9 +172 4 0 1 1 0 9 +173 4 0 1 1 0 9 +174 4 0 1 1 0 9 +175 3 1 1 1 0 9 +176 2 0 1 1 1 9 +177 4 0 1 1 0 9 +178 1 1 1 2 0 9 +179 4 0 1 1 0 9 +180 1 1 1 1 1 9 +181 3 1 1 1 0 9 +182 4 0 1 1 0 9 +183 4 0 1 1 0 9 +184 1 1 1 1 1 9 +185 3 0 0 1 -1 9 +186 4 0 1 1 0 9 +187 4 0 1 1 0 9 +188 3 1 1 1 0 9 +189 1 1 1 1 1 9 +190 4 1 0 1 -1 9 +191 3 1 1 1 0 9 +192 4 0 1 2 -1 9 +193 3 1 1 1 0 9 +194 4 0 1 1 0 9 +195 2 0 1 1 1 9 +196 1 1 1 1 1 9 +197 3 1 1 1 0 9 +198 1 1 1 1 1 9 +199 2 0 1 1 1 9 +200 2 0 1 2 0 9 +201 2 0 1 1 1 9 +202 3 1 1 1 0 9 +203 4 1 0 1 -1 9 +204 3 1 1 1 0 9 +205 3 1 1 1 0 9 +206 2 0 1 2 0 9 +207 2 0 1 1 1 9 +208 1 1 1 1 1 9 +209 2 0 1 1 1 9 +210 3 1 1 1 0 9 +211 1 1 1 1 1 9 +212 3 1 1 1 0 9 +213 1 1 1 2 0 9 +214 3 1 1 1 0 9 +215 3 1 1 1 0 9 +216 4 0 1 1 0 9 +217 3 1 1 1 0 9 +218 1 1 1 1 1 9 +219 1 1 1 1 1 9 +220 4 0 1 1 0 9 +221 1 1 1 1 1 9 +222 3 1 1 1 0 9 +223 4 1 0 1 -1 9 +224 3 1 1 1 0 9 +225 1 1 1 1 1 9 +226 4 0 1 1 0 9 +227 1 1 1 1 1 9 +228 1 1 1 2 0 9 +229 1 1 1 1 1 9 +230 4 0 1 1 0 9 +231 2 0 1 1 1 9 +232 1 1 1 1 1 9 +233 1 1 1 1 1 9 +234 1 1 1 1 1 9 +235 1 1 1 1 1 9 +236 1 1 1 2 0 9 +237 3 1 1 1 0 9 +238 1 1 1 1 1 9 +239 2 0 1 1 1 9 +240 1 1 1 1 1 9 +1 1 0 0 1 0 10 +2 1 1 1 1 1 10 +3 1 1 1 1 1 10 +4 4 1 0 1 -1 10 +5 4 1 0 1 -1 10 +6 1 1 1 1 1 10 +7 4 0 1 1 0 10 +8 4 0 1 1 0 10 +9 2 0 1 1 1 10 +10 4 0 1 1 0 10 +11 1 1 1 1 1 10 +12 4 0 1 1 0 10 +13 1 1 1 1 1 10 +14 1 1 1 1 1 10 +15 4 1 0 2 0 10 +16 4 1 0 1 -1 10 +17 1 1 1 2 0 10 +18 1 1 1 1 1 10 +19 4 0 1 1 0 10 +20 4 0 1 1 0 10 +21 1 1 1 1 1 10 +22 3 0 0 1 -1 10 +23 3 1 1 2 -1 10 +24 4 0 1 2 -1 10 +25 2 0 1 1 1 10 +26 4 1 0 1 -1 10 +27 1 1 1 1 1 10 +28 3 1 1 1 0 10 +29 3 0 0 1 -1 10 +30 2 1 0 1 0 10 +31 1 1 1 1 1 10 +32 3 1 1 1 0 10 +33 2 1 0 1 0 10 +34 3 1 1 1 0 10 +35 2 0 1 1 1 10 +36 2 0 1 1 1 10 +37 2 0 1 1 1 10 +38 4 1 0 1 -1 10 +39 3 1 1 2 -1 10 +40 1 1 1 1 1 10 +41 3 0 0 1 -1 10 +42 3 0 0 2 0 10 +43 3 0 0 2 0 10 +44 1 1 1 1 1 10 +45 2 0 1 1 1 10 +46 3 0 0 1 -1 10 +47 3 1 1 1 0 10 +48 2 0 1 1 1 10 +49 4 1 0 1 -1 10 +50 3 0 0 1 -1 10 +51 2 1 0 1 0 10 +52 3 1 1 1 0 10 +53 4 0 1 1 0 10 +54 3 1 1 1 0 10 +55 2 0 1 1 1 10 +56 1 1 1 1 1 10 +57 4 0 1 1 0 10 +58 3 1 1 2 -1 10 +59 1 1 1 1 1 10 +60 3 1 1 1 0 10 +61 4 0 1 1 0 10 +62 3 1 1 1 0 10 +63 2 0 1 1 1 10 +64 2 0 1 1 1 10 +65 2 0 1 2 0 10 +66 1 1 1 1 1 10 +67 3 0 0 2 0 10 +68 1 1 1 1 1 10 +69 2 0 1 2 0 10 +70 4 0 1 1 0 10 +71 2 0 1 1 1 10 +72 1 1 1 2 0 10 +73 1 1 1 1 1 10 +74 2 0 1 1 1 10 +75 1 1 1 1 1 10 +76 4 0 1 1 0 10 +77 4 0 1 1 0 10 +78 4 0 1 1 0 10 +79 1 1 1 1 1 10 +80 2 0 1 1 1 10 +81 2 0 1 2 0 10 +82 3 1 1 2 -1 10 +83 2 1 0 1 0 10 +84 3 0 0 2 0 10 +85 3 0 0 2 0 10 +86 3 0 0 1 -1 10 +87 2 0 1 2 0 10 +88 4 0 1 2 -1 10 +89 2 0 1 1 1 10 +90 4 0 1 1 0 10 +91 4 1 0 1 -1 10 +92 3 1 1 1 0 10 +93 4 1 0 2 0 10 +94 3 1 1 1 0 10 +95 1 1 1 1 1 10 +96 3 1 1 1 0 10 +97 2 0 1 1 1 10 +98 2 0 1 1 1 10 +99 2 0 1 1 1 10 +100 1 1 1 1 1 10 +101 4 0 1 1 0 10 +102 2 0 1 1 1 10 +103 1 1 1 1 1 10 +104 2 0 1 1 1 10 +105 1 1 1 1 1 10 +106 3 1 1 1 0 10 +107 4 1 0 1 -1 10 +108 2 0 1 1 1 10 +109 1 1 1 2 0 10 +110 3 1 1 1 0 10 +111 4 0 1 1 0 10 +112 4 0 1 1 0 10 +113 2 0 1 2 0 10 +114 3 0 0 1 -1 10 +115 1 1 1 2 0 10 +116 1 1 1 1 1 10 +117 2 0 1 1 1 10 +118 4 0 1 1 0 10 +119 4 0 1 1 0 10 +120 4 0 1 1 0 10 +121 4 0 1 2 -1 10 +122 2 0 1 2 0 10 +123 4 0 1 1 0 10 +124 3 1 1 1 0 10 +125 2 0 1 2 0 10 +126 3 1 1 1 0 10 +127 1 1 1 1 1 10 +128 4 0 1 1 0 10 +129 2 0 1 1 1 10 +130 4 0 1 2 -1 10 +131 4 0 1 1 0 10 +132 1 1 1 1 1 10 +133 3 0 0 1 -1 10 +134 4 0 1 1 0 10 +135 1 1 1 1 1 10 +136 2 0 1 1 1 10 +137 1 1 1 1 1 10 +138 1 1 1 1 1 10 +139 4 0 1 1 0 10 +140 3 1 1 1 0 10 +141 2 0 1 1 1 10 +142 4 0 1 1 0 10 +143 1 1 1 2 0 10 +144 2 0 1 1 1 10 +145 3 1 1 1 0 10 +146 4 0 1 1 0 10 +147 1 1 1 1 1 10 +148 2 0 1 1 1 10 +149 1 1 1 1 1 10 +150 3 0 0 1 -1 10 +151 2 0 1 1 1 10 +152 2 0 1 1 1 10 +153 3 1 1 1 0 10 +154 3 0 0 2 0 10 +155 2 0 1 2 0 10 +156 2 0 1 1 1 10 +157 4 0 1 1 0 10 +158 3 1 1 1 0 10 +159 4 1 0 2 0 10 +160 3 1 1 1 0 10 +161 1 1 1 1 1 10 +162 2 0 1 1 1 10 +163 1 1 1 1 1 10 +164 2 0 1 1 1 10 +165 1 1 1 1 1 10 +166 4 0 1 1 0 10 +167 3 1 1 1 0 10 +168 3 1 1 1 0 10 +169 1 0 0 1 0 10 +170 3 1 1 1 0 10 +171 3 1 1 1 0 10 +172 2 1 0 1 0 10 +173 4 0 1 1 0 10 +174 1 1 1 1 1 10 +175 2 0 1 1 1 10 +176 4 0 1 2 -1 10 +177 3 1 1 1 0 10 +178 1 1 1 1 1 10 +179 1 1 1 1 1 10 +180 1 1 1 2 0 10 +181 1 1 1 1 1 10 +182 1 1 1 1 1 10 +183 4 0 1 2 -1 10 +184 4 1 0 1 -1 10 +185 2 0 1 1 1 10 +186 1 0 0 1 0 10 +187 2 0 1 2 0 10 +188 2 0 1 1 1 10 +189 3 1 1 1 0 10 +190 4 1 0 1 -1 10 +191 2 0 1 1 1 10 +192 4 0 1 1 0 10 +193 1 1 1 2 0 10 +194 2 0 1 1 1 10 +195 3 1 1 1 0 10 +196 2 0 1 1 1 10 +197 3 1 1 1 0 10 +198 4 0 1 1 0 10 +199 3 1 1 1 0 10 +200 2 0 1 1 1 10 +201 1 1 1 1 1 10 +202 3 1 1 1 0 10 +203 1 1 1 2 0 10 +204 3 1 1 1 0 10 +205 4 0 1 1 0 10 +206 3 1 1 2 -1 10 +207 1 1 1 2 0 10 +208 1 1 1 1 1 10 +209 1 1 1 1 1 10 +210 4 0 1 1 0 10 +211 1 1 1 1 1 10 +212 3 1 1 1 0 10 +213 2 0 1 2 0 10 +214 2 0 1 2 0 10 +215 4 0 1 1 0 10 +216 3 1 1 1 0 10 +217 4 0 1 1 0 10 +218 1 1 1 1 1 10 +219 4 0 1 1 0 10 +220 1 1 1 2 0 10 +221 3 0 0 1 -1 10 +222 2 0 1 2 0 10 +223 2 0 1 1 1 10 +224 1 1 1 1 1 10 +225 4 0 1 1 0 10 +226 1 1 1 2 0 10 +227 2 0 1 1 1 10 +228 4 0 1 1 0 10 +229 2 0 1 1 1 10 +230 3 1 1 1 0 10 +231 3 1 1 1 0 10 +232 3 1 1 1 0 10 +233 3 1 1 1 0 10 +234 2 1 0 1 0 10 +235 3 1 1 2 -1 10 +236 1 1 1 1 1 10 +237 3 0 0 1 -1 10 +238 4 0 1 1 0 10 +239 4 0 1 1 0 10 +240 2 0 1 1 1 10 diff --git a/Python/hbayesdm/common/extdata/igt_exampleData.txt b/Python/hbayesdm/common/extdata/igt_exampleData.txt new file mode 100644 index 00000000..3a6252af --- /dev/null +++ b/Python/hbayesdm/common/extdata/igt_exampleData.txt @@ -0,0 +1,401 @@ +trial choice gain loss subjID +1 3 50 0 1001 +2 2 100 0 1001 +3 3 50 0 1001 +4 4 50 0 1001 +5 4 50 0 1001 +6 4 50 0 1001 +7 4 50 0 1001 +8 3 50 -50 1001 +9 4 50 0 1001 +10 4 50 0 1001 +11 3 50 0 1001 +12 4 50 0 1001 +13 4 50 0 1001 +14 4 50 0 1001 +15 4 50 -250 1001 +16 4 50 0 1001 +17 2 100 0 1001 +18 4 50 0 1001 +19 1 100 0 1001 +20 2 100 0 1001 +21 2 100 0 1001 +22 2 100 0 1001 +23 3 50 -50 1001 +24 2 100 0 1001 +25 4 50 0 1001 +26 1 100 0 1001 +27 1 100 -150 1001 +28 2 100 0 1001 +29 2 100 0 1001 +30 2 100 -1250 1001 +31 1 100 0 1001 +32 4 50 0 1001 +33 1 100 -300 1001 +34 4 50 0 1001 +35 1 100 0 1001 +36 4 50 0 1001 +37 1 100 -200 1001 +38 2 100 0 1001 +39 1 100 0 1001 +40 4 50 0 1001 +41 4 50 0 1001 +42 2 100 0 1001 +43 4 50 0 1001 +44 4 50 -250 1001 +45 4 50 0 1001 +46 2 100 0 1001 +47 4 50 0 1001 +48 1 100 -250 1001 +49 4 50 0 1001 +50 4 50 0 1001 +51 4 50 0 1001 +52 3 50 0 1001 +53 3 50 -50 1001 +54 3 50 0 1001 +55 3 50 -50 1001 +56 3 50 -50 1001 +57 2 100 0 1001 +58 2 100 -1250 1001 +59 4 50 0 1001 +60 4 50 0 1001 +61 4 50 0 1001 +62 4 50 -250 1001 +63 4 50 0 1001 +64 4 50 0 1001 +65 3 50 0 1001 +66 3 50 -25 1001 +67 3 50 -75 1001 +68 4 50 0 1001 +69 4 50 0 1001 +70 4 50 0 1001 +71 4 50 -250 1001 +72 4 50 0 1001 +73 4 50 0 1001 +74 4 50 0 1001 +75 4 50 0 1001 +76 4 50 0 1001 +77 4 50 0 1001 +78 3 50 0 1001 +79 4 50 0 1001 +80 4 50 0 1001 +81 4 50 0 1001 +82 4 50 0 1001 +83 4 50 0 1001 +84 4 50 0 1001 +85 4 50 0 1001 +86 4 50 0 1001 +87 4 50 -250 1001 +88 4 50 0 1001 +89 4 50 0 1001 +90 4 50 0 1001 +91 4 50 0 1001 +92 4 50 0 1001 +93 4 50 0 1001 +94 4 50 0 1001 +95 4 50 0 1001 +96 4 50 0 1001 +97 4 50 -250 1001 +98 4 50 0 1001 +99 4 50 0 1001 +100 4 50 0 1001 +1 3 50 0 1002 +2 3 50 0 1002 +3 3 50 -50 1002 +4 3 50 0 1002 +5 3 50 -50 1002 +6 1 100 0 1002 +7 3 50 0 1002 +8 2 100 0 1002 +9 3 50 -50 1002 +10 3 50 0 1002 +11 4 50 0 1002 +12 3 50 -50 1002 +13 3 50 -50 1002 +14 1 100 0 1002 +15 1 100 -150 1002 +16 3 50 0 1002 +17 4 50 0 1002 +18 4 50 0 1002 +19 4 50 0 1002 +20 4 50 0 1002 +21 4 50 0 1002 +22 3 50 -25 1002 +23 4 50 0 1002 +24 4 50 0 1002 +25 3 50 -75 1002 +26 3 50 0 1002 +27 4 50 0 1002 +28 4 50 -250 1002 +29 4 50 0 1002 +30 4 50 0 1002 +31 4 50 0 1002 +32 4 50 0 1002 +33 4 50 0 1002 +34 4 50 0 1002 +35 4 50 0 1002 +36 4 50 0 1002 +37 4 50 0 1002 +38 4 50 -250 1002 +39 1 100 0 1002 +40 3 50 0 1002 +41 3 50 0 1002 +42 3 50 -25 1002 +43 3 50 -75 1002 +44 1 100 -300 1002 +45 1 100 0 1002 +46 3 50 0 1002 +47 4 50 0 1002 +48 4 50 0 1002 +49 4 50 0 1002 +50 4 50 0 1002 +51 4 50 0 1002 +52 4 50 0 1002 +53 4 50 0 1002 +54 4 50 0 1002 +55 4 50 -250 1002 +56 4 50 0 1002 +57 4 50 0 1002 +58 4 50 0 1002 +59 4 50 0 1002 +60 4 50 0 1002 +61 4 50 -250 1002 +62 4 50 0 1002 +63 4 50 0 1002 +64 4 50 0 1002 +65 4 50 0 1002 +66 4 50 0 1002 +67 4 50 0 1002 +68 4 50 0 1002 +69 4 50 0 1002 +70 4 50 0 1002 +71 4 50 0 1002 +72 4 50 0 1002 +73 4 50 0 1002 +74 4 50 0 1002 +75 4 50 0 1002 +76 1 100 -200 1002 +77 4 50 -250 1002 +78 4 50 0 1002 +79 4 50 0 1002 +80 4 50 0 1002 +81 4 50 0 1002 +82 4 50 0 1002 +83 4 50 0 1002 +84 4 50 0 1002 +85 4 50 0 1002 +86 4 50 0 1002 +87 4 50 -250 1002 +88 4 50 0 1002 +89 4 50 0 1002 +90 4 50 0 1002 +91 4 50 0 1002 +92 4 50 0 1002 +93 4 50 0 1002 +94 4 50 0 1002 +95 4 50 0 1002 +96 4 50 -250 1002 +97 4 50 0 1002 +98 4 50 0 1002 +99 4 50 0 1002 +100 4 50 0 1002 +1 4 50 0 1003 +2 4 50 0 1003 +3 4 50 0 1003 +4 4 50 0 1003 +5 4 50 0 1003 +6 4 50 0 1003 +7 2 100 0 1003 +8 4 50 0 1003 +9 2 100 0 1003 +10 4 50 0 1003 +11 4 50 0 1003 +12 4 50 -250 1003 +13 4 50 0 1003 +14 2 100 0 1003 +15 1 100 0 1003 +16 3 50 0 1003 +17 2 100 0 1003 +18 1 100 0 1003 +19 2 100 0 1003 +20 2 100 0 1003 +21 2 100 0 1003 +22 2 100 0 1003 +23 2 100 -1250 1003 +24 2 100 0 1003 +25 1 100 -150 1003 +26 4 50 0 1003 +27 2 100 0 1003 +28 2 100 0 1003 +29 4 50 0 1003 +30 2 100 0 1003 +31 4 50 0 1003 +32 1 100 0 1003 +33 1 100 -300 1003 +34 4 50 0 1003 +35 4 50 0 1003 +36 3 50 0 1003 +37 4 50 0 1003 +38 4 50 0 1003 +39 4 50 0 1003 +40 4 50 -250 1003 +41 2 100 -1250 1003 +42 3 50 -50 1003 +43 1 100 0 1003 +44 3 50 0 1003 +45 3 50 -50 1003 +46 4 50 0 1003 +47 4 50 0 1003 +48 4 50 0 1003 +49 3 50 0 1003 +50 4 50 0 1003 +51 2 100 0 1003 +52 4 50 0 1003 +53 1 100 -200 1003 +54 4 50 0 1003 +55 4 50 0 1003 +56 2 100 0 1003 +57 4 50 0 1003 +58 4 50 -250 1003 +59 4 50 0 1003 +60 1 100 0 1003 +61 1 100 -250 1003 +62 2 100 0 1003 +63 4 50 0 1003 +64 3 50 -50 1003 +65 1 100 -350 1003 +66 4 50 0 1003 +67 4 50 0 1003 +68 3 50 0 1003 +69 3 50 -50 1003 +70 4 50 0 1003 +71 3 50 -50 1003 +72 3 50 0 1003 +73 4 50 -250 1003 +74 3 50 -25 1003 +75 1 100 0 1003 +76 1 100 -350 1003 +77 2 100 0 1003 +78 3 50 -75 1003 +79 2 100 0 1003 +80 2 100 0 1003 +81 3 50 0 1003 +82 2 100 -1250 1003 +83 3 50 0 1003 +84 3 50 0 1003 +85 4 50 0 1003 +86 3 50 -25 1003 +87 4 50 0 1003 +88 1 100 0 1003 +89 3 50 -75 1003 +90 3 50 0 1003 +91 3 50 -50 1003 +92 3 50 0 1003 +93 3 50 0 1003 +94 3 50 0 1003 +95 3 50 -50 1003 +96 1 100 -250 1003 +97 3 50 -25 1003 +98 3 50 -50 1003 +99 3 50 0 1003 +100 4 50 0 1003 +1 3 50 0 1004 +2 4 50 0 1004 +3 1 100 0 1004 +4 4 50 0 1004 +5 4 50 0 1004 +6 4 50 0 1004 +7 4 50 0 1004 +8 1 100 0 1004 +9 3 50 0 1004 +10 3 50 -50 1004 +11 1 100 -150 1004 +12 1 100 0 1004 +13 1 100 -300 1004 +14 4 50 0 1004 +15 1 100 0 1004 +16 4 50 0 1004 +17 4 50 0 1004 +18 2 100 0 1004 +19 4 50 0 1004 +20 4 50 -250 1004 +21 1 100 -200 1004 +22 2 100 0 1004 +23 3 50 0 1004 +24 4 50 0 1004 +25 2 100 0 1004 +26 2 100 0 1004 +27 2 100 0 1004 +28 2 100 0 1004 +29 2 100 0 1004 +30 2 100 0 1004 +31 2 100 -1250 1004 +32 3 50 -50 1004 +33 4 50 0 1004 +34 1 100 0 1004 +35 2 100 0 1004 +36 3 50 0 1004 +37 1 100 -250 1004 +38 3 50 -50 1004 +39 3 50 0 1004 +40 3 50 -50 1004 +41 4 50 0 1004 +42 4 50 0 1004 +43 4 50 0 1004 +44 4 50 0 1004 +45 4 50 0 1004 +46 4 50 0 1004 +47 4 50 0 1004 +48 4 50 -250 1004 +49 1 100 -350 1004 +50 4 50 0 1004 +51 4 50 0 1004 +52 4 50 0 1004 +53 3 50 -50 1004 +54 4 50 0 1004 +55 3 50 0 1004 +56 3 50 -25 1004 +57 4 50 0 1004 +58 4 50 0 1004 +59 4 50 0 1004 +60 4 50 0 1004 +61 4 50 -250 1004 +62 3 50 -75 1004 +63 3 50 0 1004 +64 3 50 0 1004 +65 4 50 0 1004 +66 3 50 0 1004 +67 2 100 0 1004 +68 4 50 0 1004 +69 1 100 0 1004 +70 4 50 0 1004 +71 3 50 -25 1004 +72 3 50 -75 1004 +73 4 50 0 1004 +74 4 50 0 1004 +75 3 50 0 1004 +76 4 50 -250 1004 +77 3 50 -50 1004 +78 3 50 0 1004 +79 3 50 0 1004 +80 2 100 0 1004 +81 4 50 0 1004 +82 4 50 0 1004 +83 3 50 0 1004 +84 3 50 -50 1004 +85 2 100 0 1004 +86 2 100 -1250 1004 +87 3 50 -25 1004 +88 2 100 0 1004 +89 3 50 -50 1004 +90 3 50 0 1004 +91 3 50 0 1004 +92 4 50 0 1004 +93 4 50 0 1004 +94 4 50 0 1004 +95 3 50 -75 1004 +96 4 50 0 1004 +97 4 50 0 1004 +98 3 50 -50 1004 +99 4 50 0 1004 +100 1 100 -350 1004 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/peer_exampleData.txt b/Python/hbayesdm/common/extdata/peer_exampleData.txt new file mode 100644 index 00000000..d4e222eb --- /dev/null +++ b/Python/hbayesdm/common/extdata/peer_exampleData.txt @@ -0,0 +1,361 @@ +trial condition p_gamble risky_Lpayoff risky_Hpayoff safe_Lpayoff safe_Hpayoff risky_color total_presses choice bonus subjID +1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 +2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 +5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 +7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 1 +8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 +14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 +28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 +32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 +35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 +39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 +44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 +47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 +54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 +55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 +66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 +68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 +69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 +70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 +72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 +1 1 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 +2 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +3 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +4 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +5 1 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 +6 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +7 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +8 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +9 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +10 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +11 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +12 0 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 +13 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +14 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +15 0 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 +16 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +17 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +18 2 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +19 0 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 +20 3 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 +21 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +22 2 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 +23 1 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 +24 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +25 0 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 +26 0 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +27 2 0.6 1.2 51.1 23 24.4 orange 1 0 2.68 2 +28 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +29 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +30 3 0.4 1.2 51.1 23 24.4 orange 1 1 2.68 2 +31 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +32 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +33 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +34 1 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 +35 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +36 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +37 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +38 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +39 0 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 +40 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +41 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +42 1 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 +43 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +44 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +45 3 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 +46 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +47 3 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 +48 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +49 2 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 +50 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +51 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +52 2 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +53 1 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +54 2 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 +55 2 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 +56 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +57 2 0.9 1.2 51.1 23 24.4 orange 1 1 2.68 2 +58 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +59 1 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 +60 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +61 3 0.8 1.2 51.1 23 24.4 orange 1 1 2.68 2 +62 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +63 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +64 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +65 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +66 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +67 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 +68 2 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 +69 0 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 +70 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 +71 3 0.5 1.2 51.1 23 24.4 orange 1 1 2.68 2 +72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 +1 0 0.8 1.2 51.1 23 24.4 orange 2 0 0.25 3 +2 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +3 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +4 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +5 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +6 0 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 +7 3 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 +8 0 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +9 2 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +10 0 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +11 2 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 +12 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +13 1 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 +14 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +15 2 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +16 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +17 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +18 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +19 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +20 3 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +21 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +22 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +23 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +24 0 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +25 2 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 +26 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +27 1 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +28 2 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +29 0 0.6 1.2 51.1 23 24.4 orange 2 1 0.25 3 +30 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +31 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +32 2 0.9 1.2 51.1 23 24.4 orange 1 0 0.25 3 +33 0 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +34 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +35 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +36 3 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 +37 2 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +38 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +39 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +40 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +41 3 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +42 0 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +43 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +44 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +45 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +46 2 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +47 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +48 3 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 +49 1 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 +50 1 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +51 1 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 +52 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +53 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +54 1 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +55 1 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 +56 3 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 +57 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +58 2 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +59 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +60 2 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +61 1 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +62 1 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +63 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +64 2 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +65 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +66 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +67 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +68 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 +69 3 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 +70 0 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 +71 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 +72 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 +1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 +2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 +5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 +7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 4 +8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 +14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 +28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 +32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 +35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 +39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 +44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 +47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 +54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 +55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 +66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 +68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 +69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 +70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 +72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 +1 1 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +2 1 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +3 1 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +4 1 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +5 2 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +6 1 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +7 3 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +8 0 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +9 2 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +10 3 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +11 2 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +12 0 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +13 1 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +14 3 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +15 2 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +16 2 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +17 2 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +18 0 0.7 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +19 0 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +20 2 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +21 2 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +22 1 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +23 2 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +24 0 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +25 1 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +26 3 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +27 0 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +28 1 0.6 1.6 55.5 26.6 28.3 darkcyan 2 0 0.25 5 +29 3 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +30 0 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +31 0 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +32 1 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +33 1 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +34 0 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +35 2 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +36 3 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +37 3 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +38 3 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +39 3 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +40 3 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +41 1 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +42 2 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +43 1 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +44 3 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +45 3 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +46 0 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +47 1 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +48 2 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +49 2 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +50 0 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +51 2 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +52 3 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +53 0 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +54 1 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +55 0 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +56 2 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +57 3 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +58 3 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +59 2 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +60 0 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +61 2 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +62 0 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 +63 0 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +64 0 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 +65 3 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +66 3 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +67 1 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 +68 2 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 +69 0 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 +70 3 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +71 1 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 +72 1 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 diff --git a/Python/hbayesdm/common/extdata/prl_exampleData.txt b/Python/hbayesdm/common/extdata/prl_exampleData.txt new file mode 100644 index 00000000..21abeae7 --- /dev/null +++ b/Python/hbayesdm/common/extdata/prl_exampleData.txt @@ -0,0 +1,2001 @@ +subjID trial choice outcome +1 1 1 1 +1 2 2 1 +1 3 1 1 +1 4 1 1 +1 5 1 1 +1 6 1 1 +1 7 1 1 +1 8 1 1 +1 9 1 -1 +1 10 2 1 +1 11 2 1 +1 12 2 1 +1 13 2 1 +1 14 2 -1 +1 15 1 1 +1 16 2 1 +1 17 1 -1 +1 18 2 1 +1 19 2 -1 +1 20 2 -1 +1 21 1 1 +1 22 1 -1 +1 23 1 -1 +1 24 2 -1 +1 25 2 1 +1 26 2 1 +1 27 2 -1 +1 28 1 -1 +1 29 2 1 +1 30 2 1 +1 31 2 1 +1 32 2 1 +1 33 2 -1 +1 34 2 1 +1 35 2 -1 +1 36 2 1 +1 37 2 1 +1 38 1 -1 +1 39 1 -1 +1 40 2 -1 +1 41 2 -1 +1 42 2 -1 +1 43 1 1 +1 44 1 1 +1 45 1 -1 +1 46 1 1 +1 47 1 1 +1 48 1 1 +1 49 1 -1 +1 50 1 1 +1 51 1 1 +1 52 1 1 +1 53 1 1 +1 54 1 1 +1 55 1 1 +1 56 1 -1 +1 57 1 -1 +1 58 2 1 +1 59 2 1 +1 60 2 -1 +1 61 1 1 +1 62 2 -1 +1 63 1 1 +1 64 1 1 +1 65 1 1 +1 66 1 1 +1 67 1 1 +1 68 1 1 +1 69 1 -1 +1 70 2 1 +1 71 1 -1 +1 72 1 1 +1 73 1 -1 +1 74 2 1 +1 75 2 1 +1 76 2 -1 +1 77 1 1 +1 78 2 1 +1 79 1 1 +1 80 1 -1 +1 81 1 1 +1 82 1 1 +1 83 2 -1 +1 84 1 -1 +1 85 2 -1 +1 86 2 1 +1 87 2 -1 +1 88 1 1 +1 89 1 1 +1 90 1 -1 +1 91 1 -1 +1 92 2 1 +1 93 2 1 +1 94 1 1 +1 95 2 1 +1 96 2 1 +1 97 2 1 +1 98 2 -1 +1 99 1 -1 +1 100 2 1 +2 1 1 1 +2 2 1 -1 +2 3 1 -1 +2 4 2 -1 +2 5 2 -1 +2 6 1 1 +2 7 2 -1 +2 8 2 1 +2 9 1 -1 +2 10 1 1 +2 11 1 -1 +2 12 2 -1 +2 13 2 1 +2 14 2 -1 +2 15 1 1 +2 16 2 1 +2 17 1 1 +2 18 1 1 +2 19 2 1 +2 20 1 1 +2 21 1 1 +2 22 2 1 +2 23 1 1 +2 24 1 -1 +2 25 2 -1 +2 26 1 1 +2 27 1 -1 +2 28 1 1 +2 29 1 -1 +2 30 2 1 +2 31 1 -1 +2 32 1 1 +2 33 2 -1 +2 34 1 -1 +2 35 2 1 +2 36 2 1 +2 37 2 1 +2 38 1 -1 +2 39 2 1 +2 40 1 -1 +2 41 2 -1 +2 42 2 1 +2 43 2 -1 +2 44 2 -1 +2 45 2 1 +2 46 2 -1 +2 47 2 1 +2 48 2 1 +2 49 2 1 +2 50 2 1 +2 51 2 1 +2 52 2 1 +2 53 2 1 +2 54 2 1 +2 55 2 1 +2 56 2 1 +2 57 2 -1 +2 58 2 -1 +2 59 2 1 +2 60 1 -1 +2 61 2 1 +2 62 2 1 +2 63 2 -1 +2 64 2 -1 +2 65 1 1 +2 66 1 1 +2 67 1 1 +2 68 1 -1 +2 69 1 -1 +2 70 1 -1 +2 71 2 -1 +2 72 2 1 +2 73 1 -1 +2 74 1 1 +2 75 2 1 +2 76 2 -1 +2 77 1 1 +2 78 1 -1 +2 79 1 1 +2 80 2 -1 +2 81 1 -1 +2 82 1 -1 +2 83 1 1 +2 84 1 1 +2 85 1 1 +2 86 1 1 +2 87 2 -1 +2 88 1 1 +2 89 1 1 +2 90 1 -1 +2 91 1 -1 +2 92 1 -1 +2 93 1 1 +2 94 2 1 +2 95 1 -1 +2 96 2 -1 +2 97 1 -1 +2 98 1 -1 +2 99 1 1 +2 100 1 1 +3 1 2 -1 +3 2 1 -1 +3 3 1 1 +3 4 1 1 +3 5 2 1 +3 6 2 1 +3 7 1 -1 +3 8 2 1 +3 9 2 -1 +3 10 2 1 +3 11 2 1 +3 12 2 1 +3 13 2 1 +3 14 1 -1 +3 15 2 -1 +3 16 2 -1 +3 17 1 1 +3 18 1 -1 +3 19 2 1 +3 20 2 -1 +3 21 2 1 +3 22 1 -1 +3 23 2 1 +3 24 2 -1 +3 25 1 1 +3 26 1 1 +3 27 2 -1 +3 28 1 1 +3 29 1 -1 +3 30 2 1 +3 31 1 -1 +3 32 2 -1 +3 33 2 -1 +3 34 1 1 +3 35 2 1 +3 36 2 1 +3 37 2 -1 +3 38 2 1 +3 39 2 -1 +3 40 1 -1 +3 41 1 1 +3 42 1 1 +3 43 1 1 +3 44 1 1 +3 45 1 1 +3 46 1 1 +3 47 1 1 +3 48 1 -1 +3 49 1 1 +3 50 2 -1 +3 51 1 -1 +3 52 1 -1 +3 53 1 -1 +3 54 2 1 +3 55 2 -1 +3 56 2 1 +3 57 2 -1 +3 58 1 -1 +3 59 2 1 +3 60 1 -1 +3 61 2 -1 +3 62 1 1 +3 63 1 1 +3 64 2 1 +3 65 2 -1 +3 66 1 1 +3 67 1 1 +3 68 1 -1 +3 69 2 1 +3 70 2 1 +3 71 2 1 +3 72 2 -1 +3 73 2 1 +3 74 1 1 +3 75 2 -1 +3 76 2 -1 +3 77 1 -1 +3 78 1 1 +3 79 1 1 +3 80 2 1 +3 81 2 -1 +3 82 1 1 +3 83 1 1 +3 84 1 1 +3 85 1 -1 +3 86 2 1 +3 87 2 -1 +3 88 1 1 +3 89 1 1 +3 90 2 1 +3 91 2 -1 +3 92 1 1 +3 93 1 -1 +3 94 1 -1 +3 95 1 -1 +3 96 2 1 +3 97 2 -1 +3 98 1 -1 +3 99 2 1 +3 100 2 1 +4 1 2 -1 +4 2 1 1 +4 3 1 -1 +4 4 1 -1 +4 5 1 -1 +4 6 2 -1 +4 7 2 -1 +4 8 2 -1 +4 9 1 -1 +4 10 2 -1 +4 11 1 -1 +4 12 1 -1 +4 13 2 1 +4 14 2 -1 +4 15 1 1 +4 16 1 -1 +4 17 2 -1 +4 18 2 1 +4 19 2 1 +4 20 1 1 +4 21 2 1 +4 22 1 1 +4 23 2 -1 +4 24 1 1 +4 25 1 1 +4 26 1 -1 +4 27 2 -1 +4 28 1 1 +4 29 1 1 +4 30 2 1 +4 31 2 1 +4 32 2 -1 +4 33 2 1 +4 34 2 -1 +4 35 2 1 +4 36 2 1 +4 37 2 1 +4 38 1 1 +4 39 1 -1 +4 40 1 1 +4 41 1 -1 +4 42 2 1 +4 43 2 -1 +4 44 1 -1 +4 45 2 -1 +4 46 1 1 +4 47 1 -1 +4 48 1 -1 +4 49 1 1 +4 50 1 1 +4 51 1 1 +4 52 2 1 +4 53 1 -1 +4 54 1 1 +4 55 2 1 +4 56 2 -1 +4 57 1 1 +4 58 1 1 +4 59 1 -1 +4 60 1 1 +4 61 1 1 +4 62 1 1 +4 63 1 1 +4 64 1 1 +4 65 1 1 +4 66 1 1 +4 67 1 -1 +4 68 1 -1 +4 69 1 1 +4 70 1 1 +4 71 1 -1 +4 72 1 -1 +4 73 2 -1 +4 74 1 -1 +4 75 1 1 +4 76 2 -1 +4 77 1 1 +4 78 1 -1 +4 79 1 1 +4 80 1 1 +4 81 1 -1 +4 82 1 1 +4 83 1 1 +4 84 1 1 +4 85 1 -1 +4 86 2 1 +4 87 2 -1 +4 88 2 -1 +4 89 1 -1 +4 90 2 1 +4 91 2 -1 +4 92 1 1 +4 93 1 -1 +4 94 1 1 +4 95 1 -1 +4 96 2 1 +4 97 2 -1 +4 98 1 1 +4 99 1 -1 +4 100 2 1 +5 1 1 1 +5 2 1 1 +5 3 1 1 +5 4 1 1 +5 5 1 -1 +5 6 1 1 +5 7 1 1 +5 8 1 -1 +5 9 1 1 +5 10 1 -1 +5 11 2 1 +5 12 1 -1 +5 13 2 1 +5 14 2 1 +5 15 2 1 +5 16 2 1 +5 17 2 -1 +5 18 2 -1 +5 19 2 -1 +5 20 1 -1 +5 21 2 1 +5 22 2 -1 +5 23 1 1 +5 24 2 -1 +5 25 1 1 +5 26 1 -1 +5 27 1 1 +5 28 1 -1 +5 29 2 1 +5 30 2 1 +5 31 2 1 +5 32 2 -1 +5 33 1 -1 +5 34 2 1 +5 35 2 1 +5 36 1 1 +5 37 1 -1 +5 38 2 -1 +5 39 2 1 +5 40 2 -1 +5 41 1 -1 +5 42 2 1 +5 43 1 1 +5 44 2 -1 +5 45 2 -1 +5 46 1 1 +5 47 1 1 +5 48 1 -1 +5 49 1 1 +5 50 1 1 +5 51 1 -1 +5 52 1 -1 +5 53 2 -1 +5 54 1 1 +5 55 2 -1 +5 56 1 -1 +5 57 2 1 +5 58 2 -1 +5 59 2 1 +5 60 2 -1 +5 61 1 1 +5 62 2 -1 +5 63 1 1 +5 64 1 1 +5 65 1 1 +5 66 1 -1 +5 67 1 -1 +5 68 1 -1 +5 69 2 1 +5 70 2 1 +5 71 2 -1 +5 72 1 -1 +5 73 2 1 +5 74 2 -1 +5 75 2 1 +5 76 1 1 +5 77 1 -1 +5 78 1 -1 +5 79 2 -1 +5 80 1 1 +5 81 2 -1 +5 82 1 1 +5 83 1 1 +5 84 1 -1 +5 85 1 1 +5 86 1 1 +5 87 2 1 +5 88 1 1 +5 89 2 -1 +5 90 1 -1 +5 91 1 -1 +5 92 1 -1 +5 93 2 1 +5 94 2 1 +5 95 1 1 +5 96 2 -1 +5 97 1 -1 +5 98 1 -1 +5 99 2 -1 +5 100 2 1 +6 1 1 -1 +6 2 1 1 +6 3 1 1 +6 4 1 -1 +6 5 2 -1 +6 6 1 1 +6 7 1 1 +6 8 1 1 +6 9 1 -1 +6 10 1 -1 +6 11 2 -1 +6 12 1 -1 +6 13 2 -1 +6 14 2 1 +6 15 2 -1 +6 16 1 -1 +6 17 1 -1 +6 18 2 -1 +6 19 1 1 +6 20 1 1 +6 21 1 -1 +6 22 2 1 +6 23 2 1 +6 24 2 -1 +6 25 1 -1 +6 26 2 -1 +6 27 1 -1 +6 28 2 -1 +6 29 2 -1 +6 30 1 -1 +6 31 2 1 +6 32 2 -1 +6 33 1 -1 +6 34 2 1 +6 35 2 1 +6 36 2 1 +6 37 2 -1 +6 38 2 -1 +6 39 1 -1 +6 40 1 1 +6 41 2 -1 +6 42 1 1 +6 43 2 -1 +6 44 1 1 +6 45 1 1 +6 46 2 -1 +6 47 1 1 +6 48 2 1 +6 49 2 -1 +6 50 2 -1 +6 51 1 1 +6 52 1 1 +6 53 1 -1 +6 54 1 -1 +6 55 2 1 +6 56 2 -1 +6 57 2 -1 +6 58 1 -1 +6 59 1 1 +6 60 2 -1 +6 61 1 -1 +6 62 1 1 +6 63 2 -1 +6 64 2 -1 +6 65 2 -1 +6 66 1 1 +6 67 1 1 +6 68 1 1 +6 69 1 -1 +6 70 2 1 +6 71 2 1 +6 72 1 1 +6 73 1 -1 +6 74 1 1 +6 75 1 -1 +6 76 2 -1 +6 77 2 -1 +6 78 1 1 +6 79 1 1 +6 80 2 1 +6 81 2 1 +6 82 2 -1 +6 83 2 1 +6 84 2 -1 +6 85 1 1 +6 86 2 -1 +6 87 1 -1 +6 88 2 -1 +6 89 1 1 +6 90 1 1 +6 91 1 1 +6 92 1 -1 +6 93 1 -1 +6 94 1 1 +6 95 1 1 +6 96 1 -1 +6 97 2 1 +6 98 2 -1 +6 99 1 -1 +6 100 1 1 +7 1 2 1 +7 2 2 -1 +7 3 1 -1 +7 4 1 1 +7 5 1 1 +7 6 2 -1 +7 7 1 1 +7 8 1 1 +7 9 1 -1 +7 10 1 -1 +7 11 2 1 +7 12 1 -1 +7 13 2 -1 +7 14 1 1 +7 15 1 -1 +7 16 2 1 +7 17 2 -1 +7 18 1 1 +7 19 1 -1 +7 20 1 -1 +7 21 2 1 +7 22 2 -1 +7 23 1 1 +7 24 1 1 +7 25 1 -1 +7 26 2 -1 +7 27 2 -1 +7 28 1 -1 +7 29 1 -1 +7 30 2 1 +7 31 2 1 +7 32 2 -1 +7 33 2 1 +7 34 1 -1 +7 35 2 1 +7 36 2 -1 +7 37 1 -1 +7 38 2 -1 +7 39 1 1 +7 40 2 -1 +7 41 1 1 +7 42 1 1 +7 43 1 1 +7 44 1 1 +7 45 1 1 +7 46 1 1 +7 47 1 1 +7 48 1 1 +7 49 1 -1 +7 50 1 -1 +7 51 2 1 +7 52 2 1 +7 53 2 1 +7 54 2 1 +7 55 2 1 +7 56 1 -1 +7 57 2 -1 +7 58 2 1 +7 59 2 -1 +7 60 1 -1 +7 61 1 1 +7 62 1 -1 +7 63 2 -1 +7 64 2 -1 +7 65 1 1 +7 66 1 1 +7 67 1 1 +7 68 1 -1 +7 69 2 1 +7 70 2 1 +7 71 2 1 +7 72 2 -1 +7 73 2 1 +7 74 1 -1 +7 75 2 1 +7 76 1 -1 +7 77 2 -1 +7 78 2 1 +7 79 2 1 +7 80 2 -1 +7 81 1 1 +7 82 1 1 +7 83 1 1 +7 84 1 1 +7 85 1 1 +7 86 1 1 +7 87 1 -1 +7 88 2 1 +7 89 2 1 +7 90 2 -1 +7 91 1 1 +7 92 1 -1 +7 93 2 1 +7 94 1 -1 +7 95 1 -1 +7 96 1 -1 +7 97 2 -1 +7 98 2 1 +7 99 2 1 +7 100 2 1 +8 1 2 -1 +8 2 2 -1 +8 3 2 -1 +8 4 1 1 +8 5 1 1 +8 6 1 1 +8 7 1 -1 +8 8 2 -1 +8 9 1 -1 +8 10 1 1 +8 11 1 1 +8 12 1 1 +8 13 2 1 +8 14 2 -1 +8 15 1 -1 +8 16 2 1 +8 17 2 -1 +8 18 2 -1 +8 19 1 1 +8 20 1 -1 +8 21 1 -1 +8 22 2 -1 +8 23 1 -1 +8 24 1 -1 +8 25 2 -1 +8 26 1 1 +8 27 1 1 +8 28 1 1 +8 29 1 -1 +8 30 1 -1 +8 31 2 -1 +8 32 1 -1 +8 33 1 -1 +8 34 2 -1 +8 35 1 1 +8 36 1 -1 +8 37 2 1 +8 38 2 -1 +8 39 2 -1 +8 40 1 1 +8 41 1 1 +8 42 1 1 +8 43 1 -1 +8 44 1 1 +8 45 1 -1 +8 46 2 1 +8 47 1 -1 +8 48 2 -1 +8 49 1 1 +8 50 1 1 +8 51 1 -1 +8 52 1 -1 +8 53 2 1 +8 54 2 1 +8 55 2 1 +8 56 2 1 +8 57 2 1 +8 58 2 1 +8 59 2 1 +8 60 2 1 +8 61 2 -1 +8 62 2 -1 +8 63 1 1 +8 64 1 1 +8 65 1 1 +8 66 1 1 +8 67 1 -1 +8 68 1 -1 +8 69 2 -1 +8 70 2 1 +8 71 2 1 +8 72 1 -1 +8 73 2 1 +8 74 2 -1 +8 75 2 1 +8 76 2 -1 +8 77 2 -1 +8 78 1 -1 +8 79 2 -1 +8 80 1 -1 +8 81 1 -1 +8 82 1 1 +8 83 1 -1 +8 84 1 -1 +8 85 1 -1 +8 86 1 1 +8 87 1 1 +8 88 1 -1 +8 89 1 -1 +8 90 1 1 +8 91 1 1 +8 92 1 -1 +8 93 2 1 +8 94 2 1 +8 95 2 1 +8 96 2 1 +8 97 2 1 +8 98 2 1 +8 99 2 1 +8 100 2 1 +9 1 1 1 +9 2 2 -1 +9 3 1 1 +9 4 1 1 +9 5 1 1 +9 6 1 -1 +9 7 1 -1 +9 8 2 -1 +9 9 1 -1 +9 10 2 1 +9 11 2 -1 +9 12 2 1 +9 13 2 1 +9 14 1 1 +9 15 1 1 +9 16 2 -1 +9 17 2 1 +9 18 2 -1 +9 19 1 -1 +9 20 2 1 +9 21 2 -1 +9 22 2 1 +9 23 2 -1 +9 24 1 1 +9 25 2 -1 +9 26 1 1 +9 27 1 -1 +9 28 2 -1 +9 29 1 1 +9 30 2 1 +9 31 2 1 +9 32 2 1 +9 33 2 1 +9 34 2 1 +9 35 2 1 +9 36 1 1 +9 37 2 -1 +9 38 1 1 +9 39 1 1 +9 40 1 1 +9 41 1 -1 +9 42 2 1 +9 43 2 -1 +9 44 2 1 +9 45 2 1 +9 46 1 -1 +9 47 2 -1 +9 48 1 -1 +9 49 2 1 +9 50 1 -1 +9 51 2 1 +9 52 2 1 +9 53 2 1 +9 54 2 1 +9 55 2 1 +9 56 1 1 +9 57 1 1 +9 58 2 -1 +9 59 1 1 +9 60 1 1 +9 61 2 1 +9 62 1 1 +9 63 2 -1 +9 64 1 1 +9 65 1 -1 +9 66 2 -1 +9 67 1 1 +9 68 2 1 +9 69 2 -1 +9 70 1 -1 +9 71 2 -1 +9 72 2 1 +9 73 2 -1 +9 74 2 1 +9 75 1 1 +9 76 1 1 +9 77 2 1 +9 78 1 -1 +9 79 1 1 +9 80 1 -1 +9 81 1 -1 +9 82 2 -1 +9 83 1 1 +9 84 1 1 +9 85 1 1 +9 86 2 -1 +9 87 1 -1 +9 88 1 1 +9 89 1 -1 +9 90 1 -1 +9 91 2 1 +9 92 2 1 +9 93 2 1 +9 94 2 1 +9 95 2 1 +9 96 1 -1 +9 97 2 1 +9 98 2 -1 +9 99 2 -1 +9 100 1 -1 +10 1 2 -1 +10 2 1 1 +10 3 1 -1 +10 4 1 -1 +10 5 2 -1 +10 6 2 1 +10 7 2 -1 +10 8 1 -1 +10 9 2 1 +10 10 1 1 +10 11 1 1 +10 12 1 -1 +10 13 1 -1 +10 14 2 1 +10 15 2 1 +10 16 2 1 +10 17 2 -1 +10 18 2 -1 +10 19 1 1 +10 20 1 1 +10 21 1 1 +10 22 1 1 +10 23 1 -1 +10 24 2 1 +10 25 2 -1 +10 26 2 1 +10 27 2 -1 +10 28 1 1 +10 29 1 -1 +10 30 1 -1 +10 31 2 -1 +10 32 1 -1 +10 33 2 1 +10 34 2 1 +10 35 1 1 +10 36 1 1 +10 37 2 -1 +10 38 1 1 +10 39 1 1 +10 40 1 1 +10 41 1 1 +10 42 1 1 +10 43 2 -1 +10 44 2 1 +10 45 2 -1 +10 46 1 -1 +10 47 1 -1 +10 48 2 1 +10 49 2 1 +10 50 2 1 +10 51 2 -1 +10 52 1 1 +10 53 1 -1 +10 54 1 -1 +10 55 2 1 +10 56 2 1 +10 57 2 1 +10 58 2 1 +10 59 2 -1 +10 60 1 1 +10 61 1 -1 +10 62 1 1 +10 63 1 1 +10 64 1 1 +10 65 1 1 +10 66 1 -1 +10 67 1 -1 +10 68 2 1 +10 69 2 1 +10 70 2 -1 +10 71 2 1 +10 72 2 1 +10 73 2 1 +10 74 2 1 +10 75 2 1 +10 76 2 -1 +10 77 2 1 +10 78 2 -1 +10 79 2 -1 +10 80 2 1 +10 81 1 -1 +10 82 2 -1 +10 83 1 1 +10 84 1 1 +10 85 1 -1 +10 86 2 -1 +10 87 1 1 +10 88 2 1 +10 89 1 1 +10 90 1 1 +10 91 1 -1 +10 92 1 1 +10 93 1 -1 +10 94 2 1 +10 95 2 1 +10 96 2 1 +10 97 2 -1 +10 98 2 -1 +10 99 1 1 +10 100 1 -1 +11 1 1 -1 +11 2 1 1 +11 3 1 1 +11 4 1 1 +11 5 2 -1 +11 6 1 1 +11 7 1 -1 +11 8 2 1 +11 9 1 -1 +11 10 2 -1 +11 11 1 1 +11 12 1 1 +11 13 1 -1 +11 14 1 1 +11 15 2 -1 +11 16 2 -1 +11 17 1 1 +11 18 1 1 +11 19 1 1 +11 20 1 1 +11 21 2 -1 +11 22 1 1 +11 23 2 1 +11 24 1 1 +11 25 1 1 +11 26 2 1 +11 27 1 1 +11 28 1 1 +11 29 2 -1 +11 30 2 -1 +11 31 1 -1 +11 32 1 -1 +11 33 1 -1 +11 34 2 1 +11 35 2 1 +11 36 2 1 +11 37 2 -1 +11 38 1 1 +11 39 1 1 +11 40 1 1 +11 41 1 1 +11 42 2 -1 +11 43 1 1 +11 44 1 -1 +11 45 2 -1 +11 46 1 -1 +11 47 1 1 +11 48 2 1 +11 49 2 1 +11 50 2 -1 +11 51 2 -1 +11 52 2 -1 +11 53 1 -1 +11 54 1 1 +11 55 1 1 +11 56 1 -1 +11 57 1 -1 +11 58 2 -1 +11 59 1 -1 +11 60 2 -1 +11 61 2 -1 +11 62 2 1 +11 63 2 -1 +11 64 2 1 +11 65 2 1 +11 66 2 1 +11 67 2 1 +11 68 2 1 +11 69 2 -1 +11 70 2 1 +11 71 2 -1 +11 72 2 1 +11 73 2 1 +11 74 1 -1 +11 75 2 1 +11 76 2 1 +11 77 2 -1 +11 78 2 -1 +11 79 1 1 +11 80 1 -1 +11 81 1 1 +11 82 2 -1 +11 83 1 1 +11 84 1 1 +11 85 1 1 +11 86 1 1 +11 87 1 -1 +11 88 1 -1 +11 89 2 -1 +11 90 2 -1 +11 91 1 -1 +11 92 2 1 +11 93 2 -1 +11 94 2 1 +11 95 1 -1 +11 96 2 -1 +11 97 2 1 +11 98 2 1 +11 99 1 -1 +11 100 2 -1 +12 1 1 1 +12 2 2 1 +12 3 1 1 +12 4 2 -1 +12 5 1 1 +12 6 2 -1 +12 7 1 -1 +12 8 2 -1 +12 9 1 1 +12 10 1 -1 +12 11 1 -1 +12 12 2 1 +12 13 2 1 +12 14 1 -1 +12 15 2 1 +12 16 2 1 +12 17 1 -1 +12 18 1 1 +12 19 1 -1 +12 20 2 -1 +12 21 2 -1 +12 22 2 -1 +12 23 1 1 +12 24 2 -1 +12 25 1 1 +12 26 1 1 +12 27 1 1 +12 28 1 1 +12 29 1 -1 +12 30 2 -1 +12 31 1 -1 +12 32 1 1 +12 33 1 1 +12 34 1 1 +12 35 2 1 +12 36 1 1 +12 37 2 -1 +12 38 1 1 +12 39 1 1 +12 40 1 -1 +12 41 2 -1 +12 42 1 -1 +12 43 2 1 +12 44 1 1 +12 45 1 1 +12 46 1 -1 +12 47 2 -1 +12 48 1 -1 +12 49 1 -1 +12 50 1 1 +12 51 1 -1 +12 52 1 1 +12 53 1 1 +12 54 1 -1 +12 55 1 -1 +12 56 2 -1 +12 57 2 1 +12 58 1 -1 +12 59 2 1 +12 60 2 -1 +12 61 2 -1 +12 62 2 -1 +12 63 2 -1 +12 64 1 1 +12 65 2 1 +12 66 2 -1 +12 67 1 -1 +12 68 2 1 +12 69 1 -1 +12 70 2 1 +12 71 1 1 +12 72 2 -1 +12 73 1 1 +12 74 1 -1 +12 75 2 -1 +12 76 1 -1 +12 77 2 1 +12 78 2 1 +12 79 2 -1 +12 80 2 -1 +12 81 1 -1 +12 82 2 -1 +12 83 1 1 +12 84 1 1 +12 85 1 -1 +12 86 1 1 +12 87 2 -1 +12 88 1 1 +12 89 1 1 +12 90 1 1 +12 91 1 -1 +12 92 1 -1 +12 93 1 1 +12 94 1 1 +12 95 1 1 +12 96 1 -1 +12 97 2 1 +12 98 1 -1 +12 99 2 1 +12 100 1 -1 +13 1 1 1 +13 2 2 -1 +13 3 1 1 +13 4 2 1 +13 5 2 -1 +13 6 1 1 +13 7 1 1 +13 8 1 -1 +13 9 1 -1 +13 10 2 -1 +13 11 1 -1 +13 12 1 -1 +13 13 2 1 +13 14 2 1 +13 15 2 1 +13 16 2 1 +13 17 2 1 +13 18 2 1 +13 19 2 -1 +13 20 2 -1 +13 21 2 1 +13 22 2 1 +13 23 2 1 +13 24 2 -1 +13 25 1 -1 +13 26 2 -1 +13 27 2 1 +13 28 2 1 +13 29 2 1 +13 30 2 1 +13 31 2 -1 +13 32 1 1 +13 33 2 1 +13 34 2 1 +13 35 2 -1 +13 36 1 -1 +13 37 2 1 +13 38 1 1 +13 39 2 -1 +13 40 2 -1 +13 41 2 -1 +13 42 1 1 +13 43 1 1 +13 44 1 1 +13 45 1 1 +13 46 1 1 +13 47 2 -1 +13 48 1 1 +13 49 1 1 +13 50 1 -1 +13 51 1 1 +13 52 1 -1 +13 53 2 -1 +13 54 1 1 +13 55 1 -1 +13 56 1 -1 +13 57 2 -1 +13 58 2 1 +13 59 2 1 +13 60 2 -1 +13 61 1 -1 +13 62 2 1 +13 63 2 -1 +13 64 1 -1 +13 65 2 -1 +13 66 2 1 +13 67 1 1 +13 68 1 1 +13 69 1 -1 +13 70 1 -1 +13 71 2 1 +13 72 2 1 +13 73 2 1 +13 74 1 -1 +13 75 2 1 +13 76 2 -1 +13 77 2 1 +13 78 2 1 +13 79 2 -1 +13 80 2 -1 +13 81 1 1 +13 82 1 1 +13 83 1 -1 +13 84 2 -1 +13 85 1 -1 +13 86 2 1 +13 87 2 -1 +13 88 1 -1 +13 89 1 1 +13 90 1 1 +13 91 1 -1 +13 92 2 1 +13 93 1 1 +13 94 2 1 +13 95 2 1 +13 96 1 -1 +13 97 1 -1 +13 98 2 1 +13 99 2 1 +13 100 2 -1 +14 1 2 -1 +14 2 1 -1 +14 3 1 1 +14 4 2 -1 +14 5 1 1 +14 6 1 -1 +14 7 1 1 +14 8 1 1 +14 9 1 -1 +14 10 2 -1 +14 11 2 -1 +14 12 1 1 +14 13 1 -1 +14 14 2 1 +14 15 2 -1 +14 16 2 1 +14 17 2 -1 +14 18 1 1 +14 19 1 -1 +14 20 1 -1 +14 21 2 -1 +14 22 2 -1 +14 23 2 1 +14 24 1 -1 +14 25 2 1 +14 26 1 1 +14 27 2 -1 +14 28 1 1 +14 29 1 -1 +14 30 1 -1 +14 31 2 -1 +14 32 1 1 +14 33 1 -1 +14 34 2 -1 +14 35 1 -1 +14 36 1 1 +14 37 2 -1 +14 38 2 -1 +14 39 1 -1 +14 40 1 1 +14 41 1 -1 +14 42 1 1 +14 43 1 1 +14 44 1 1 +14 45 1 -1 +14 46 1 1 +14 47 2 -1 +14 48 2 -1 +14 49 2 1 +14 50 2 -1 +14 51 2 1 +14 52 1 -1 +14 53 2 1 +14 54 2 1 +14 55 2 1 +14 56 2 -1 +14 57 2 1 +14 58 1 -1 +14 59 2 -1 +14 60 1 1 +14 61 1 1 +14 62 1 -1 +14 63 2 -1 +14 64 1 1 +14 65 2 -1 +14 66 1 -1 +14 67 1 -1 +14 68 1 -1 +14 69 2 1 +14 70 2 1 +14 71 2 -1 +14 72 2 1 +14 73 2 -1 +14 74 2 -1 +14 75 1 1 +14 76 2 1 +14 77 2 1 +14 78 2 1 +14 79 2 -1 +14 80 1 1 +14 81 2 -1 +14 82 1 1 +14 83 1 1 +14 84 1 -1 +14 85 2 1 +14 86 1 1 +14 87 2 -1 +14 88 1 1 +14 89 1 -1 +14 90 1 -1 +14 91 2 -1 +14 92 1 1 +14 93 1 1 +14 94 1 -1 +14 95 1 1 +14 96 1 -1 +14 97 1 1 +14 98 1 1 +14 99 1 -1 +14 100 2 1 +15 1 1 1 +15 2 1 -1 +15 3 1 1 +15 4 2 -1 +15 5 1 1 +15 6 1 -1 +15 7 2 -1 +15 8 1 1 +15 9 1 -1 +15 10 2 -1 +15 11 1 -1 +15 12 2 1 +15 13 2 1 +15 14 1 1 +15 15 2 -1 +15 16 1 -1 +15 17 1 1 +15 18 1 1 +15 19 1 1 +15 20 2 1 +15 21 2 -1 +15 22 2 -1 +15 23 1 -1 +15 24 1 -1 +15 25 1 -1 +15 26 2 -1 +15 27 2 -1 +15 28 1 -1 +15 29 2 1 +15 30 2 1 +15 31 2 1 +15 32 2 -1 +15 33 1 -1 +15 34 1 -1 +15 35 2 -1 +15 36 2 1 +15 37 2 1 +15 38 2 -1 +15 39 2 1 +15 40 1 1 +15 41 1 -1 +15 42 2 -1 +15 43 1 1 +15 44 2 -1 +15 45 1 1 +15 46 1 1 +15 47 1 1 +15 48 1 -1 +15 49 1 -1 +15 50 2 1 +15 51 2 1 +15 52 2 -1 +15 53 1 -1 +15 54 2 1 +15 55 2 1 +15 56 1 1 +15 57 2 -1 +15 58 1 -1 +15 59 2 1 +15 60 2 1 +15 61 1 1 +15 62 2 1 +15 63 2 -1 +15 64 2 -1 +15 65 1 -1 +15 66 1 1 +15 67 2 -1 +15 68 1 -1 +15 69 1 1 +15 70 1 -1 +15 71 1 -1 +15 72 2 1 +15 73 2 1 +15 74 1 1 +15 75 2 1 +15 76 2 -1 +15 77 2 1 +15 78 2 -1 +15 79 1 1 +15 80 1 1 +15 81 1 1 +15 82 1 1 +15 83 1 1 +15 84 1 1 +15 85 1 -1 +15 86 1 1 +15 87 1 1 +15 88 1 -1 +15 89 2 -1 +15 90 1 -1 +15 91 1 -1 +15 92 2 1 +15 93 2 1 +15 94 2 1 +15 95 1 -1 +15 96 2 -1 +15 97 2 1 +15 98 1 -1 +15 99 2 1 +15 100 2 1 +16 1 2 1 +16 2 2 -1 +16 3 1 1 +16 4 2 1 +16 5 1 1 +16 6 1 1 +16 7 2 -1 +16 8 1 1 +16 9 1 1 +16 10 1 -1 +16 11 1 1 +16 12 1 -1 +16 13 2 -1 +16 14 1 -1 +16 15 2 1 +16 16 2 1 +16 17 2 -1 +16 18 2 -1 +16 19 1 1 +16 20 1 1 +16 21 1 -1 +16 22 1 1 +16 23 2 1 +16 24 2 -1 +16 25 1 -1 +16 26 1 1 +16 27 1 1 +16 28 1 -1 +16 29 2 -1 +16 30 2 -1 +16 31 1 1 +16 32 1 -1 +16 33 2 1 +16 34 2 1 +16 35 2 1 +16 36 2 -1 +16 37 2 -1 +16 38 1 -1 +16 39 2 -1 +16 40 1 1 +16 41 1 -1 +16 42 2 -1 +16 43 1 1 +16 44 1 1 +16 45 1 -1 +16 46 2 -1 +16 47 1 1 +16 48 1 -1 +16 49 1 1 +16 50 1 -1 +16 51 1 -1 +16 52 2 -1 +16 53 2 1 +16 54 2 1 +16 55 2 -1 +16 56 2 -1 +16 57 1 -1 +16 58 2 1 +16 59 2 1 +16 60 2 -1 +16 61 2 -1 +16 62 1 1 +16 63 1 -1 +16 64 1 1 +16 65 1 -1 +16 66 2 -1 +16 67 1 -1 +16 68 2 -1 +16 69 2 1 +16 70 2 1 +16 71 1 1 +16 72 2 1 +16 73 2 1 +16 74 2 1 +16 75 2 -1 +16 76 2 -1 +16 77 1 -1 +16 78 1 -1 +16 79 2 -1 +16 80 2 1 +16 81 2 -1 +16 82 1 -1 +16 83 2 1 +16 84 2 -1 +16 85 1 1 +16 86 1 1 +16 87 1 -1 +16 88 1 1 +16 89 1 1 +16 90 1 1 +16 91 1 1 +16 92 1 -1 +16 93 1 1 +16 94 2 1 +16 95 2 -1 +16 96 1 1 +16 97 1 -1 +16 98 1 -1 +16 99 2 1 +16 100 2 1 +17 1 2 -1 +17 2 1 1 +17 3 1 1 +17 4 1 -1 +17 5 1 1 +17 6 1 1 +17 7 1 -1 +17 8 1 -1 +17 9 1 -1 +17 10 2 1 +17 11 2 -1 +17 12 2 1 +17 13 2 1 +17 14 2 1 +17 15 1 -1 +17 16 2 1 +17 17 1 1 +17 18 2 1 +17 19 1 1 +17 20 2 -1 +17 21 2 -1 +17 22 1 1 +17 23 1 1 +17 24 1 1 +17 25 1 1 +17 26 1 1 +17 27 1 1 +17 28 1 1 +17 29 1 -1 +17 30 2 -1 +17 31 1 -1 +17 32 1 -1 +17 33 1 -1 +17 34 2 1 +17 35 2 1 +17 36 2 1 +17 37 2 -1 +17 38 1 1 +17 39 1 1 +17 40 1 1 +17 41 1 -1 +17 42 1 1 +17 43 1 1 +17 44 1 1 +17 45 1 1 +17 46 1 -1 +17 47 1 -1 +17 48 1 1 +17 49 2 1 +17 50 1 1 +17 51 2 -1 +17 52 2 1 +17 53 2 1 +17 54 2 -1 +17 55 2 1 +17 56 2 1 +17 57 1 1 +17 58 2 1 +17 59 2 1 +17 60 2 -1 +17 61 2 -1 +17 62 2 1 +17 63 1 1 +17 64 1 -1 +17 65 1 -1 +17 66 2 -1 +17 67 1 1 +17 68 2 1 +17 69 2 -1 +17 70 1 -1 +17 71 1 1 +17 72 1 -1 +17 73 2 -1 +17 74 1 -1 +17 75 2 -1 +17 76 1 -1 +17 77 2 -1 +17 78 2 -1 +17 79 1 1 +17 80 1 1 +17 81 1 1 +17 82 1 1 +17 83 1 1 +17 84 1 -1 +17 85 1 1 +17 86 1 1 +17 87 1 1 +17 88 1 1 +17 89 1 1 +17 90 1 1 +17 91 1 1 +17 92 1 -1 +17 93 1 1 +17 94 1 -1 +17 95 1 -1 +17 96 2 1 +17 97 2 -1 +17 98 2 1 +17 99 2 -1 +17 100 1 -1 +18 1 1 -1 +18 2 1 1 +18 3 1 1 +18 4 1 1 +18 5 1 1 +18 6 1 -1 +18 7 1 -1 +18 8 2 1 +18 9 2 1 +18 10 2 -1 +18 11 1 1 +18 12 1 1 +18 13 2 1 +18 14 2 1 +18 15 1 1 +18 16 1 1 +18 17 1 1 +18 18 1 1 +18 19 1 1 +18 20 1 1 +18 21 1 1 +18 22 2 -1 +18 23 2 -1 +18 24 1 1 +18 25 1 1 +18 26 1 1 +18 27 1 1 +18 28 1 1 +18 29 1 1 +18 30 1 -1 +18 31 1 1 +18 32 1 -1 +18 33 2 1 +18 34 2 1 +18 35 2 1 +18 36 2 -1 +18 37 2 1 +18 38 2 -1 +18 39 2 1 +18 40 2 -1 +18 41 2 -1 +18 42 2 -1 +18 43 1 -1 +18 44 2 1 +18 45 2 -1 +18 46 1 1 +18 47 1 1 +18 48 1 -1 +18 49 2 1 +18 50 2 -1 +18 51 1 1 +18 52 1 -1 +18 53 1 -1 +18 54 2 1 +18 55 2 1 +18 56 2 1 +18 57 2 -1 +18 58 2 1 +18 59 2 1 +18 60 2 -1 +18 61 2 -1 +18 62 2 -1 +18 63 1 1 +18 64 1 -1 +18 65 1 1 +18 66 1 1 +18 67 2 1 +18 68 1 -1 +18 69 1 1 +18 70 2 -1 +18 71 1 1 +18 72 2 -1 +18 73 1 -1 +18 74 1 -1 +18 75 2 1 +18 76 2 1 +18 77 1 -1 +18 78 2 1 +18 79 2 -1 +18 80 2 -1 +18 81 1 1 +18 82 1 -1 +18 83 2 -1 +18 84 2 1 +18 85 1 -1 +18 86 2 1 +18 87 2 -1 +18 88 1 -1 +18 89 1 -1 +18 90 2 -1 +18 91 1 -1 +18 92 1 -1 +18 93 1 -1 +18 94 1 1 +18 95 2 1 +18 96 2 1 +18 97 2 1 +18 98 1 1 +18 99 2 -1 +18 100 1 1 +19 1 2 -1 +19 2 2 -1 +19 3 2 -1 +19 4 1 -1 +19 5 2 -1 +19 6 1 1 +19 7 1 -1 +19 8 1 -1 +19 9 1 1 +19 10 1 -1 +19 11 2 -1 +19 12 2 1 +19 13 2 1 +19 14 2 1 +19 15 2 1 +19 16 2 -1 +19 17 2 -1 +19 18 1 1 +19 19 1 -1 +19 20 1 1 +19 21 1 -1 +19 22 1 1 +19 23 1 1 +19 24 1 1 +19 25 2 1 +19 26 2 -1 +19 27 1 -1 +19 28 1 1 +19 29 1 -1 +19 30 1 -1 +19 31 1 1 +19 32 2 1 +19 33 1 -1 +19 34 1 -1 +19 35 2 1 +19 36 2 1 +19 37 2 1 +19 38 1 1 +19 39 2 -1 +19 40 2 -1 +19 41 2 -1 +19 42 2 -1 +19 43 1 1 +19 44 1 1 +19 45 1 1 +19 46 1 1 +19 47 1 1 +19 48 1 1 +19 49 1 -1 +19 50 1 -1 +19 51 1 -1 +19 52 2 -1 +19 53 1 1 +19 54 1 -1 +19 55 1 -1 +19 56 2 -1 +19 57 2 1 +19 58 2 1 +19 59 2 1 +19 60 2 -1 +19 61 2 -1 +19 62 1 1 +19 63 1 1 +19 64 2 -1 +19 65 2 -1 +19 66 2 1 +19 67 1 1 +19 68 1 -1 +19 69 2 1 +19 70 2 1 +19 71 2 1 +19 72 1 -1 +19 73 2 1 +19 74 2 -1 +19 75 1 1 +19 76 1 -1 +19 77 2 1 +19 78 2 1 +19 79 2 -1 +19 80 1 1 +19 81 2 -1 +19 82 1 1 +19 83 1 1 +19 84 2 -1 +19 85 1 -1 +19 86 1 1 +19 87 2 1 +19 88 1 1 +19 89 2 -1 +19 90 1 1 +19 91 1 -1 +19 92 1 -1 +19 93 1 -1 +19 94 2 1 +19 95 2 1 +19 96 2 -1 +19 97 1 -1 +19 98 2 1 +19 99 2 1 +19 100 2 1 +20 1 1 1 +20 2 1 -1 +20 3 2 -1 +20 4 1 1 +20 5 1 -1 +20 6 1 1 +20 7 1 -1 +20 8 2 1 +20 9 2 1 +20 10 1 -1 +20 11 2 1 +20 12 2 1 +20 13 1 1 +20 14 2 -1 +20 15 2 1 +20 16 1 1 +20 17 1 1 +20 18 1 1 +20 19 1 -1 +20 20 2 -1 +20 21 1 1 +20 22 1 -1 +20 23 2 -1 +20 24 1 1 +20 25 1 1 +20 26 1 1 +20 27 1 1 +20 28 1 1 +20 29 1 1 +20 30 1 -1 +20 31 2 -1 +20 32 1 -1 +20 33 2 1 +20 34 2 1 +20 35 2 -1 +20 36 2 1 +20 37 2 -1 +20 38 1 -1 +20 39 2 -1 +20 40 1 1 +20 41 1 -1 +20 42 1 1 +20 43 1 1 +20 44 2 1 +20 45 2 -1 +20 46 1 -1 +20 47 2 -1 +20 48 1 -1 +20 49 1 1 +20 50 2 1 +20 51 1 -1 +20 52 2 -1 +20 53 1 -1 +20 54 2 1 +20 55 2 -1 +20 56 2 1 +20 57 2 1 +20 58 2 -1 +20 59 2 1 +20 60 2 -1 +20 61 2 1 +20 62 2 -1 +20 63 2 -1 +20 64 1 1 +20 65 1 1 +20 66 1 1 +20 67 1 1 +20 68 1 -1 +20 69 1 -1 +20 70 2 1 +20 71 2 1 +20 72 2 -1 +20 73 1 1 +20 74 1 -1 +20 75 1 -1 +20 76 1 -1 +20 77 2 1 +20 78 2 1 +20 79 2 1 +20 80 2 1 +20 81 2 1 +20 82 2 1 +20 83 2 1 +20 84 2 -1 +20 85 2 -1 +20 86 1 1 +20 87 1 -1 +20 88 1 1 +20 89 1 -1 +20 90 2 1 +20 91 2 -1 +20 92 1 1 +20 93 2 -1 +20 94 1 -1 +20 95 1 -1 +20 96 2 1 +20 97 2 1 +20 98 2 1 +20 99 2 1 +20 100 2 1 diff --git a/Python/hbayesdm/common/extdata/prl_multipleB_exampleData.txt b/Python/hbayesdm/common/extdata/prl_multipleB_exampleData.txt new file mode 100644 index 00000000..16725497 --- /dev/null +++ b/Python/hbayesdm/common/extdata/prl_multipleB_exampleData.txt @@ -0,0 +1,1801 @@ +ExperimentName subjID block trial choice outcome choiceSwitch choice.ACC choice.RT Subject_Block +PRL_Young_Final 5038 1 1 2 25 1 1 1430 5038_1 +PRL_Young_Final 5038 1 2 2 25 0 1 439 5038_1 +PRL_Young_Final 5038 1 3 2 -25 0 1 374 5038_1 +PRL_Young_Final 5038 1 4 2 25 0 1 267 5038_1 +PRL_Young_Final 5038 1 5 2 25 0 1 331 5038_1 +PRL_Young_Final 5038 1 6 2 -25 0 1 316 5038_1 +PRL_Young_Final 5038 1 7 2 25 0 1 325 5038_1 +PRL_Young_Final 5038 1 8 2 25 0 0 264 5038_1 +PRL_Young_Final 5038 1 9 2 -25 0 0 343 5038_1 +PRL_Young_Final 5038 1 10 2 -25 0 0 292 5038_1 +PRL_Young_Final 5038 1 11 2 -25 0 0 288 5038_1 +PRL_Young_Final 5038 1 12 1 25 1 1 308 5038_1 +PRL_Young_Final 5038 1 13 1 25 0 1 383 5038_1 +PRL_Young_Final 5038 1 14 1 25 0 1 322 5038_1 +PRL_Young_Final 5038 1 15 1 25 0 1 297 5038_1 +PRL_Young_Final 5038 1 16 1 -25 0 1 350 5038_1 +PRL_Young_Final 5038 1 17 1 -25 0 0 484 5038_1 +PRL_Young_Final 5038 1 18 2 -25 1 1 442 5038_1 +PRL_Young_Final 5038 1 19 1 -25 1 0 298 5038_1 +PRL_Young_Final 5038 1 20 1 -25 0 0 312 5038_1 +PRL_Young_Final 5038 1 21 2 25 1 1 358 5038_1 +PRL_Young_Final 5038 1 22 2 25 0 1 397 5038_1 +PRL_Young_Final 5038 1 23 2 25 0 1 563 5038_1 +PRL_Young_Final 5038 1 24 2 25 0 1 351 5038_1 +PRL_Young_Final 5038 1 25 2 25 0 1 278 5038_1 +PRL_Young_Final 5038 1 26 2 25 0 1 222 5038_1 +PRL_Young_Final 5038 1 27 2 -25 0 1 391 5038_1 +PRL_Young_Final 5038 1 28 2 25 0 1 231 5038_1 +PRL_Young_Final 5038 1 29 2 25 0 0 281 5038_1 +PRL_Young_Final 5038 1 30 2 25 0 0 363 5038_1 +PRL_Young_Final 5038 1 31 2 -25 0 0 282 5038_1 +PRL_Young_Final 5038 1 32 2 -25 0 0 308 5038_1 +PRL_Young_Final 5038 1 33 2 -25 0 0 270 5038_1 +PRL_Young_Final 5038 1 34 1 25 1 1 291 5038_1 +PRL_Young_Final 5038 1 35 1 25 0 1 350 5038_1 +PRL_Young_Final 5038 1 36 1 25 0 1 271 5038_1 +PRL_Young_Final 5038 1 37 1 25 0 1 310 5038_1 +PRL_Young_Final 5038 1 38 1 25 0 1 341 5038_1 +PRL_Young_Final 5038 1 39 1 25 0 1 291 5038_1 +PRL_Young_Final 5038 1 40 1 -25 0 1 338 5038_1 +PRL_Young_Final 5038 1 41 1 25 0 1 296 5038_1 +PRL_Young_Final 5038 1 42 1 -25 0 0 419 5038_1 +PRL_Young_Final 5038 1 43 1 -25 0 0 356 5038_1 +PRL_Young_Final 5038 1 44 2 25 1 1 239 5038_1 +PRL_Young_Final 5038 1 45 2 -25 0 1 241 5038_1 +PRL_Young_Final 5038 1 46 2 25 0 1 386 5038_1 +PRL_Young_Final 5038 1 47 2 25 0 1 282 5038_1 +PRL_Young_Final 5038 1 48 2 25 0 1 276 5038_1 +PRL_Young_Final 5038 1 49 2 25 0 1 353 5038_1 +PRL_Young_Final 5038 1 50 2 25 0 1 264 5038_1 +PRL_Young_Final 5038 1 51 2 -25 0 0 412 5038_1 +PRL_Young_Final 5038 1 52 2 25 0 0 381 5038_1 +PRL_Young_Final 5038 1 53 2 -25 0 0 272 5038_1 +PRL_Young_Final 5038 1 54 2 -25 0 0 315 5038_1 +PRL_Young_Final 5038 1 55 2 -25 0 0 343 5038_1 +PRL_Young_Final 5038 1 56 1 -25 1 1 339 5038_1 +PRL_Young_Final 5038 1 57 1 -25 0 1 276 5038_1 +PRL_Young_Final 5038 1 58 2 -25 1 0 302 5038_1 +PRL_Young_Final 5038 1 59 2 -25 0 0 294 5038_1 +PRL_Young_Final 5038 1 60 1 25 1 1 382 5038_1 +PRL_Young_Final 5038 1 61 1 25 0 1 408 5038_1 +PRL_Young_Final 5038 1 62 1 25 0 1 475 5038_1 +PRL_Young_Final 5038 1 63 1 25 0 1 279 5038_1 +PRL_Young_Final 5038 1 64 1 25 0 1 678 5038_1 +PRL_Young_Final 5038 1 65 1 -25 0 0 319 5038_1 +PRL_Young_Final 5038 1 66 1 -25 0 0 1048 5038_1 +PRL_Young_Final 5038 1 67 2 25 1 1 385 5038_1 +PRL_Young_Final 5038 1 68 2 -25 0 1 465 5038_1 +PRL_Young_Final 5038 1 69 2 25 0 1 347 5038_1 +PRL_Young_Final 5038 1 70 2 25 0 1 462 5038_1 +PRL_Young_Final 5038 1 71 2 25 0 1 402 5038_1 +PRL_Young_Final 5038 1 72 2 25 0 1 426 5038_1 +PRL_Young_Final 5038 1 73 2 25 0 0 330 5038_1 +PRL_Young_Final 5038 1 74 2 -25 0 0 337 5038_1 +PRL_Young_Final 5038 1 75 2 25 0 0 236 5038_1 +PRL_Young_Final 5038 1 76 2 -25 0 0 385 5038_1 +PRL_Young_Final 5038 1 77 2 -25 0 0 391 5038_1 +PRL_Young_Final 5038 1 78 1 25 1 1 647 5038_1 +PRL_Young_Final 5038 1 79 1 25 0 1 410 5038_1 +PRL_Young_Final 5038 1 80 1 25 0 1 351 5038_1 +PRL_Young_Final 5038 1 81 1 -25 0 1 286 5038_1 +PRL_Young_Final 5038 1 82 1 25 0 1 359 5038_1 +PRL_Young_Final 5038 1 83 1 25 0 1 295 5038_1 +PRL_Young_Final 5038 1 84 1 -25 0 0 344 5038_1 +PRL_Young_Final 5038 1 85 1 -25 0 0 282 5038_1 +PRL_Young_Final 5038 1 86 2 -25 1 1 667 5038_1 +PRL_Young_Final 5038 1 87 2 25 0 1 331 5038_1 +PRL_Young_Final 5038 1 88 2 25 0 1 382 5038_1 +PRL_Young_Final 5038 1 89 2 25 0 1 300 5038_1 +PRL_Young_Final 5038 1 90 2 25 0 1 307 5038_1 +PRL_Young_Final 5038 1 91 2 25 0 1 329 5038_1 +PRL_Young_Final 5038 1 92 2 -25 0 1 375 5038_1 +PRL_Young_Final 5038 1 93 2 -25 0 1 193 5038_1 +PRL_Young_Final 5038 1 94 1 25 1 1 658 5038_1 +PRL_Young_Final 5038 1 95 1 25 0 1 466 5038_1 +PRL_Young_Final 5038 1 96 1 25 0 1 394 5038_1 +PRL_Young_Final 5038 1 97 1 25 0 1 272 5038_1 +PRL_Young_Final 5038 1 98 1 25 0 1 336 5038_1 +PRL_Young_Final 5038 1 99 1 25 0 1 446 5038_1 +PRL_Young_Final 5038 1 100 1 -25 0 1 387 5038_1 +PRL_Young_Final 5038 1 101 1 25 0 1 415 5038_1 +PRL_Young_Final 5038 1 102 1 -25 0 0 434 5038_1 +PRL_Young_Final 5038 1 103 1 -25 0 0 383 5038_1 +PRL_Young_Final 5038 1 104 2 25 1 1 341 5038_1 +PRL_Young_Final 5038 1 105 2 25 0 1 575 5038_1 +PRL_Young_Final 5038 1 106 2 25 0 1 332 5038_1 +PRL_Young_Final 5038 1 107 2 25 0 1 411 5038_1 +PRL_Young_Final 5038 1 108 2 25 0 1 408 5038_1 +PRL_Young_Final 5038 1 109 2 25 0 1 364 5038_1 +PRL_Young_Final 5038 1 110 2 25 0 0 429 5038_1 +PRL_Young_Final 5038 1 111 2 25 0 0 342 5038_1 +PRL_Young_Final 5038 1 112 2 -25 0 0 56 5038_1 +PRL_Young_Final 5038 1 113 2 -25 0 0 339 5038_1 +PRL_Young_Final 5038 1 114 1 -25 1 1 369 5038_1 +PRL_Young_Final 5038 1 115 1 25 0 1 779 5038_1 +PRL_Young_Final 5038 1 116 1 25 0 1 529 5038_1 +PRL_Young_Final 5038 1 117 1 -25 0 1 397 5038_1 +PRL_Young_Final 5038 1 118 1 25 0 1 414 5038_1 +PRL_Young_Final 5038 1 119 2 -25 1 0 392 5038_1 +PRL_Young_Final 5038 1 120 1 25 1 1 518 5038_1 +PRL_Young_Final 5038 1 121 1 25 0 1 470 5038_1 +PRL_Young_Final 5038 1 122 1 25 0 1 587 5038_1 +PRL_Young_Final 5038 1 123 1 25 0 1 377 5038_1 +PRL_Young_Final 5038 1 124 1 -25 0 1 351 5038_1 +PRL_Young_Final 5038 1 125 1 -25 0 1 331 5038_1 +PRL_Young_Final 5038 1 126 2 -25 1 0 265 5038_1 +PRL_Young_Final 5038 1 127 2 -25 0 0 327 5038_1 +PRL_Young_Final 5038 1 128 1 25 1 1 244 5038_1 +PRL_Young_Final 5038 1 129 1 25 0 1 363 5038_1 +PRL_Young_Final 5038 1 130 1 25 0 1 639 5038_1 +PRL_Young_Final 5038 1 131 1 25 0 1 435 5038_1 +PRL_Young_Final 5038 1 132 1 25 0 1 436 5038_1 +PRL_Young_Final 5038 1 133 1 25 0 1 559 5038_1 +PRL_Young_Final 5038 1 134 1 -25 0 1 388 5038_1 +PRL_Young_Final 5038 1 135 1 25 0 1 271 5038_1 +PRL_Young_Final 5038 1 136 1 -25 0 0 430 5038_1 +PRL_Young_Final 5038 1 137 1 25 0 0 536 5038_1 +PRL_Young_Final 5038 1 138 1 -25 0 0 281 5038_1 +PRL_Young_Final 5038 1 139 2 25 1 1 370 5038_1 +PRL_Young_Final 5038 1 140 2 25 0 1 335 5038_1 +PRL_Young_Final 5038 1 141 2 25 0 1 409 5038_1 +PRL_Young_Final 5038 1 142 2 25 0 1 358 5038_1 +PRL_Young_Final 5038 1 143 2 25 0 1 261 5038_1 +PRL_Young_Final 5038 1 144 2 -25 0 0 294 5038_1 +PRL_Young_Final 5038 1 145 2 -25 0 0 395 5038_1 +PRL_Young_Final 5038 1 146 1 25 1 1 417 5038_1 +PRL_Young_Final 5038 1 147 1 -25 0 1 410 5038_1 +PRL_Young_Final 5038 1 148 2 -25 1 0 348 5038_1 +PRL_Young_Final 5038 1 149 2 -25 0 0 336 5038_1 +PRL_Young_Final 5038 1 150 1 25 1 1 322 5038_1 +PRL_Young_Final 5038 1 151 1 25 0 1 363 5038_1 +PRL_Young_Final 5038 1 152 1 -25 0 1 353 5038_1 +PRL_Young_Final 5038 1 153 1 25 0 1 247 5038_1 +PRL_Young_Final 5038 1 154 1 25 0 1 347 5038_1 +PRL_Young_Final 5038 1 155 1 -25 0 0 341 5038_1 +PRL_Young_Final 5038 1 156 1 -25 0 0 358 5038_1 +PRL_Young_Final 5038 1 157 2 25 1 1 268 5038_1 +PRL_Young_Final 5038 1 158 2 25 0 1 312 5038_1 +PRL_Young_Final 5038 1 159 2 25 0 1 559 5038_1 +PRL_Young_Final 5038 1 160 2 -25 0 1 468 5038_1 +PRL_Young_Final 5038 1 161 2 -25 0 1 938 5038_1 +PRL_Young_Final 5038 1 162 1 25 1 0 277 5038_1 +PRL_Young_Final 5038 1 163 1 -25 0 0 385 5038_1 +PRL_Young_Final 5038 1 164 1 25 0 0 642 5038_1 +PRL_Young_Final 5038 1 165 1 -25 0 0 420 5038_1 +PRL_Young_Final 5038 1 166 1 -25 0 0 307 5038_1 +PRL_Young_Final 5038 1 167 2 25 1 1 260 5038_1 +PRL_Young_Final 5038 1 168 2 25 0 1 312 5038_1 +PRL_Young_Final 5038 1 169 2 25 0 1 305 5038_1 +PRL_Young_Final 5038 1 170 2 25 0 1 354 5038_1 +PRL_Young_Final 5038 1 171 2 25 0 1 341 5038_1 +PRL_Young_Final 5038 1 172 2 25 0 1 355 5038_1 +PRL_Young_Final 5038 1 173 2 -25 0 1 305 5038_1 +PRL_Young_Final 5038 1 174 2 25 0 1 279 5038_1 +PRL_Young_Final 5038 1 175 2 -25 0 0 342 5038_1 +PRL_Young_Final 5038 1 176 2 -25 0 0 116 5038_1 +PRL_Young_Final 5038 1 177 1 25 1 1 38 5038_1 +PRL_Young_Final 5038 1 178 1 25 0 1 326 5038_1 +PRL_Young_Final 5038 1 179 1 25 0 1 368 5038_1 +PRL_Young_Final 5038 1 180 1 25 0 1 373 5038_1 +PRL_Young_Final 5038 1 181 1 25 0 1 313 5038_1 +PRL_Young_Final 5038 1 182 1 25 0 1 300 5038_1 +PRL_Young_Final 5038 1 183 1 -25 0 1 296 5038_1 +PRL_Young_Final 5038 1 184 1 25 0 1 355 5038_1 +PRL_Young_Final 5038 1 185 1 -25 0 0 314 5038_1 +PRL_Young_Final 5038 1 186 2 25 1 1 286 5038_1 +PRL_Young_Final 5038 1 187 2 -25 0 1 442 5038_1 +PRL_Young_Final 5038 1 188 2 25 0 1 364 5038_1 +PRL_Young_Final 5038 1 189 2 25 0 1 336 5038_1 +PRL_Young_Final 5038 1 190 2 25 0 1 464 5038_1 +PRL_Young_Final 5038 1 191 2 25 0 1 367 5038_1 +PRL_Young_Final 5038 1 192 2 25 0 1 356 5038_1 +PRL_Young_Final 5038 1 193 2 -25 0 1 577 5038_1 +PRL_Young_Final 5038 1 194 2 -25 0 0 327 5038_1 +PRL_Young_Final 5038 1 195 1 -25 1 1 919 5038_1 +PRL_Young_Final 5038 1 196 1 25 0 1 292 5038_1 +PRL_Young_Final 5038 1 197 1 25 0 1 570 5038_1 +PRL_Young_Final 5038 1 198 1 25 0 1 397 5038_1 +PRL_Young_Final 5038 1 199 1 25 0 1 611 5038_1 +PRL_Young_Final 5038 1 200 1 25 0 0 373 5038_1 +PRL_Young_Final 5038 2 1 2 25 1 1 884 5038_2 +PRL_Young_Final 5038 2 2 2 25 0 1 435 5038_2 +PRL_Young_Final 5038 2 3 2 -25 0 1 376 5038_2 +PRL_Young_Final 5038 2 4 2 25 0 1 342 5038_2 +PRL_Young_Final 5038 2 5 2 25 0 1 300 5038_2 +PRL_Young_Final 5038 2 6 2 25 0 1 394 5038_2 +PRL_Young_Final 5038 2 7 2 25 0 0 363 5038_2 +PRL_Young_Final 5038 2 8 2 -25 0 0 356 5038_2 +PRL_Young_Final 5038 2 9 2 -25 0 0 348 5038_2 +PRL_Young_Final 5038 2 10 1 25 1 1 305 5038_2 +PRL_Young_Final 5038 2 11 1 25 0 1 448 5038_2 +PRL_Young_Final 5038 2 12 1 25 0 1 328 5038_2 +PRL_Young_Final 5038 2 13 1 25 0 1 752 5038_2 +PRL_Young_Final 5038 2 14 1 -25 0 1 647 5038_2 +PRL_Young_Final 5038 2 15 1 25 0 1 386 5038_2 +PRL_Young_Final 5038 2 16 1 25 0 1 579 5038_2 +PRL_Young_Final 5038 2 17 1 -25 0 1 530 5038_2 +PRL_Young_Final 5038 2 18 1 -25 0 0 646 5038_2 +PRL_Young_Final 5038 2 19 2 25 1 1 279 5038_2 +PRL_Young_Final 5038 2 20 2 25 0 1 401 5038_2 +PRL_Young_Final 5038 2 21 2 25 0 1 476 5038_2 +PRL_Young_Final 5038 2 22 2 25 0 1 363 5038_2 +PRL_Young_Final 5038 2 23 2 25 0 1 435 5038_2 +PRL_Young_Final 5038 2 24 2 -25 0 1 363 5038_2 +PRL_Young_Final 5038 2 25 2 -25 0 1 268 5038_2 +PRL_Young_Final 5038 2 26 1 -25 1 0 426 5038_2 +PRL_Young_Final 5038 2 27 1 -25 0 0 259 5038_2 +PRL_Young_Final 5038 2 28 2 25 1 1 315 5038_2 +PRL_Young_Final 5038 2 29 2 25 0 1 372 5038_2 +PRL_Young_Final 5038 2 30 2 25 0 1 432 5038_2 +PRL_Young_Final 5038 2 31 2 25 0 1 349 5038_2 +PRL_Young_Final 5038 2 32 2 25 0 1 346 5038_2 +PRL_Young_Final 5038 2 33 2 -25 0 0 340 5038_2 +PRL_Young_Final 5038 2 34 2 25 0 0 332 5038_2 +PRL_Young_Final 5038 2 35 2 -25 0 0 348 5038_2 +PRL_Young_Final 5038 2 36 2 -25 0 0 362 5038_2 +PRL_Young_Final 5038 2 37 1 25 1 1 245 5038_2 +PRL_Young_Final 5038 2 38 1 -25 0 1 316 5038_2 +PRL_Young_Final 5038 2 39 1 25 0 1 336 5038_2 +PRL_Young_Final 5038 2 40 1 25 0 1 312 5038_2 +PRL_Young_Final 5038 2 41 1 25 0 1 423 5038_2 +PRL_Young_Final 5038 2 42 1 25 0 1 461 5038_2 +PRL_Young_Final 5038 2 43 1 25 0 1 332 5038_2 +PRL_Young_Final 5038 2 44 1 25 0 1 336 5038_2 +PRL_Young_Final 5038 2 45 1 -25 0 0 361 5038_2 +PRL_Young_Final 5038 2 46 1 -25 0 0 738 5038_2 +PRL_Young_Final 5038 2 47 2 25 1 1 256 5038_2 +PRL_Young_Final 5038 2 48 2 -25 0 1 293 5038_2 +PRL_Young_Final 5038 2 49 2 25 0 1 732 5038_2 +PRL_Young_Final 5038 2 50 2 25 0 1 346 5038_2 +PRL_Young_Final 5038 2 51 2 -25 0 1 503 5038_2 +PRL_Young_Final 5038 2 52 2 -25 0 0 338 5038_2 +PRL_Young_Final 5038 2 53 1 25 1 1 381 5038_2 +PRL_Young_Final 5038 2 54 1 25 0 1 279 5038_2 +PRL_Young_Final 5038 2 55 1 25 0 1 393 5038_2 +PRL_Young_Final 5038 2 56 1 25 0 1 425 5038_2 +PRL_Young_Final 5038 2 57 1 25 0 1 296 5038_2 +PRL_Young_Final 5038 2 58 1 -25 0 1 445 5038_2 +PRL_Young_Final 5038 2 59 1 -25 0 1 279 5038_2 +PRL_Young_Final 5038 2 60 2 -25 1 0 23 5038_2 +PRL_Young_Final 5038 2 61 2 -25 0 0 342 5038_2 +PRL_Young_Final 5038 2 62 1 25 1 1 411 5038_2 +PRL_Young_Final 5038 2 63 1 25 0 1 359 5038_2 +PRL_Young_Final 5038 2 64 1 25 0 1 265 5038_2 +PRL_Young_Final 5038 2 65 1 25 0 1 421 5038_2 +PRL_Young_Final 5038 2 66 1 25 0 1 561 5038_2 +PRL_Young_Final 5038 2 67 1 25 0 0 325 5038_2 +PRL_Young_Final 5038 2 68 1 -25 0 0 356 5038_2 +PRL_Young_Final 5038 2 69 1 25 0 0 343 5038_2 +PRL_Young_Final 5038 2 70 1 -25 0 0 411 5038_2 +PRL_Young_Final 5038 2 71 1 -25 0 0 278 5038_2 +PRL_Young_Final 5038 2 72 2 25 1 1 329 5038_2 +PRL_Young_Final 5038 2 73 2 -25 0 1 347 5038_2 +PRL_Young_Final 5038 2 74 1 -25 1 0 314 5038_2 +PRL_Young_Final 5038 2 75 2 25 1 1 271 5038_2 +PRL_Young_Final 5038 2 76 2 25 0 1 357 5038_2 +PRL_Young_Final 5038 2 77 2 25 0 1 391 5038_2 +PRL_Young_Final 5038 2 78 2 25 0 1 371 5038_2 +PRL_Young_Final 5038 2 79 2 25 0 1 263 5038_2 +PRL_Young_Final 5038 2 80 2 25 0 1 306 5038_2 +PRL_Young_Final 5038 2 81 2 25 0 1 366 5038_2 +PRL_Young_Final 5038 2 82 2 -25 0 1 313 5038_2 +PRL_Young_Final 5038 2 83 2 -25 0 0 379 5038_2 +PRL_Young_Final 5038 2 84 1 25 1 1 328 5038_2 +PRL_Young_Final 5038 2 85 1 25 0 1 388 5038_2 +PRL_Young_Final 5038 2 86 1 -25 0 1 273 5038_2 +PRL_Young_Final 5038 2 87 1 25 0 1 324 5038_2 +PRL_Young_Final 5038 2 88 1 25 0 1 592 5038_2 +PRL_Young_Final 5038 2 89 1 25 0 1 467 5038_2 +PRL_Young_Final 5038 2 90 1 25 0 1 336 5038_2 +PRL_Young_Final 5038 2 91 1 -25 0 0 347 5038_2 +PRL_Young_Final 5038 2 92 1 -25 0 0 320 5038_2 +PRL_Young_Final 5038 2 93 2 25 1 1 447 5038_2 +PRL_Young_Final 5038 2 94 2 -25 0 1 494 5038_2 +PRL_Young_Final 5038 2 95 2 -25 0 1 456 5038_2 +PRL_Young_Final 5038 2 96 1 25 1 0 309 5038_2 +PRL_Young_Final 5038 2 97 1 25 0 0 430 5038_2 +PRL_Young_Final 5038 2 98 1 -25 0 0 315 5038_2 +PRL_Young_Final 5038 2 99 1 -25 0 0 471 5038_2 +PRL_Young_Final 5038 2 100 2 25 1 1 344 5038_2 +PRL_Young_Final 5038 2 101 2 25 0 1 325 5038_2 +PRL_Young_Final 5038 2 102 2 25 0 1 367 5038_2 +PRL_Young_Final 5038 2 103 2 25 0 1 353 5038_2 +PRL_Young_Final 5038 2 104 2 25 0 1 262 5038_2 +PRL_Young_Final 5038 2 105 2 -25 0 0 225 5038_2 +PRL_Young_Final 5038 2 106 2 -25 0 0 435 5038_2 +PRL_Young_Final 5038 2 107 1 25 1 1 319 5038_2 +PRL_Young_Final 5038 2 108 1 -25 0 1 330 5038_2 +PRL_Young_Final 5038 2 109 1 25 0 1 161 5038_2 +PRL_Young_Final 5038 2 110 1 25 0 1 347 5038_2 +PRL_Young_Final 5038 2 111 1 25 0 1 374 5038_2 +PRL_Young_Final 5038 2 112 1 25 0 1 358 5038_2 +PRL_Young_Final 5038 2 113 1 25 0 1 260 5038_2 +PRL_Young_Final 5038 2 114 1 -25 0 0 297 5038_2 +PRL_Young_Final 5038 2 115 1 -25 0 0 329 5038_2 +PRL_Young_Final 5038 2 116 2 25 1 1 295 5038_2 +PRL_Young_Final 5038 2 117 2 25 0 1 318 5038_2 +PRL_Young_Final 5038 2 118 2 -25 0 1 322 5038_2 +PRL_Young_Final 5038 2 119 2 25 0 1 16 5038_2 +PRL_Young_Final 5038 2 120 2 25 0 1 310 5038_2 +PRL_Young_Final 5038 2 121 2 -25 0 1 327 5038_2 +PRL_Young_Final 5038 2 122 1 25 1 0 334 5038_2 +PRL_Young_Final 5038 2 123 1 -25 0 0 330 5038_2 +PRL_Young_Final 5038 2 124 1 -25 0 0 355 5038_2 +PRL_Young_Final 5038 2 125 2 25 1 1 378 5038_2 +PRL_Young_Final 5038 2 126 2 25 0 1 411 5038_2 +PRL_Young_Final 5038 2 127 2 25 0 1 357 5038_2 +PRL_Young_Final 5038 2 128 2 25 0 1 400 5038_2 +PRL_Young_Final 5038 2 129 2 25 0 1 516 5038_2 +PRL_Young_Final 5038 2 130 2 -25 0 1 392 5038_2 +PRL_Young_Final 5038 2 131 2 -25 0 1 294 5038_2 +PRL_Young_Final 5038 2 132 1 -25 1 0 299 5038_2 +PRL_Young_Final 5038 2 133 2 25 1 1 506 5038_2 +PRL_Young_Final 5038 2 134 2 25 0 1 329 5038_2 +PRL_Young_Final 5038 2 135 2 25 0 1 379 5038_2 +PRL_Young_Final 5038 2 136 2 25 0 1 314 5038_2 +PRL_Young_Final 5038 2 137 2 25 0 1 398 5038_2 +PRL_Young_Final 5038 2 138 2 25 0 1 425 5038_2 +PRL_Young_Final 5038 2 139 2 -25 0 1 351 5038_2 +PRL_Young_Final 5038 2 140 2 25 0 1 342 5038_2 +PRL_Young_Final 5038 2 141 2 -25 0 0 335 5038_2 +PRL_Young_Final 5038 2 142 2 -25 0 0 392 5038_2 +PRL_Young_Final 5038 2 143 2 -25 0 0 776 5038_2 +PRL_Young_Final 5038 2 144 1 25 1 1 310 5038_2 +PRL_Young_Final 5038 2 145 1 25 0 1 304 5038_2 +PRL_Young_Final 5038 2 146 1 25 0 1 329 5038_2 +PRL_Young_Final 5038 2 147 1 25 0 1 448 5038_2 +PRL_Young_Final 5038 2 148 1 25 0 1 943 5038_2 +PRL_Young_Final 5038 2 149 1 25 0 1 370 5038_2 +PRL_Young_Final 5038 2 150 1 -25 0 1 356 5038_2 +PRL_Young_Final 5038 2 151 1 -25 0 0 431 5038_2 +PRL_Young_Final 5038 2 152 2 25 1 1 275 5038_2 +PRL_Young_Final 5038 2 153 2 25 0 1 345 5038_2 +PRL_Young_Final 5038 2 154 2 -25 0 1 506 5038_2 +PRL_Young_Final 5038 2 155 2 25 0 1 376 5038_2 +PRL_Young_Final 5038 2 156 2 25 0 1 346 5038_2 +PRL_Young_Final 5038 2 157 2 25 0 1 227 5038_2 +PRL_Young_Final 5038 2 158 2 25 0 0 512 5038_2 +PRL_Young_Final 5038 2 159 2 -25 0 0 383 5038_2 +PRL_Young_Final 5038 2 160 1 25 1 1 1125 5038_2 +PRL_Young_Final 5038 2 161 1 25 0 1 393 5038_2 +PRL_Young_Final 5038 2 162 1 -25 0 1 497 5038_2 +PRL_Young_Final 5038 2 163 1 -25 0 1 375 5038_2 +PRL_Young_Final 5038 2 164 2 25 1 0 291 5038_2 +PRL_Young_Final 5038 2 165 2 -25 0 0 390 5038_2 +PRL_Young_Final 5038 2 166 2 -25 0 0 389 5038_2 +PRL_Young_Final 5038 2 167 1 25 1 1 333 5038_2 +PRL_Young_Final 5038 2 168 1 25 0 1 383 5038_2 +PRL_Young_Final 5038 2 169 1 25 0 1 1005 5038_2 +PRL_Young_Final 5038 2 170 1 25 0 1 618 5038_2 +PRL_Young_Final 5038 2 171 1 25 0 1 448 5038_2 +PRL_Young_Final 5038 2 172 1 25 0 1 391 5038_2 +PRL_Young_Final 5038 2 173 1 -25 0 1 448 5038_2 +PRL_Young_Final 5038 2 174 1 -25 0 0 131 5038_2 +PRL_Young_Final 5038 2 175 2 25 1 1 364 5038_2 +PRL_Young_Final 5038 2 176 2 25 0 1 335 5038_2 +PRL_Young_Final 5038 2 177 2 25 0 1 473 5038_2 +PRL_Young_Final 5038 2 178 2 25 0 1 376 5038_2 +PRL_Young_Final 5038 2 179 2 25 0 1 423 5038_2 +PRL_Young_Final 5038 2 180 2 25 0 1 509 5038_2 +PRL_Young_Final 5038 2 181 2 25 0 1 659 5038_2 +PRL_Young_Final 5038 2 182 2 -25 0 1 450 5038_2 +PRL_Young_Final 5038 2 183 2 -25 0 0 415 5038_2 +PRL_Young_Final 5038 2 184 2 -25 0 0 610 5038_2 +PRL_Young_Final 5038 2 185 1 25 1 1 328 5038_2 +PRL_Young_Final 5038 2 186 1 25 0 1 313 5038_2 +PRL_Young_Final 5038 2 187 1 -25 0 1 344 5038_2 +PRL_Young_Final 5038 2 188 1 25 0 1 399 5038_2 +PRL_Young_Final 5038 2 189 1 25 0 1 436 5038_2 +PRL_Young_Final 5038 2 190 1 25 0 1 588 5038_2 +PRL_Young_Final 5038 2 191 1 25 0 1 523 5038_2 +PRL_Young_Final 5038 2 192 1 25 0 1 735 5038_2 +PRL_Young_Final 5038 2 193 1 -25 0 0 2022 5038_2 +PRL_Young_Final 5038 2 194 1 25 0 0 338 5038_2 +PRL_Young_Final 5038 2 195 1 25 0 0 458 5038_2 +PRL_Young_Final 5038 2 196 1 -25 0 0 535 5038_2 +PRL_Young_Final 5038 2 197 1 -25 0 0 325 5038_2 +PRL_Young_Final 5038 2 198 2 -25 1 1 286 5038_2 +PRL_Young_Final 5038 2 199 2 -25 0 1 355 5038_2 +PRL_Young_Final 5038 2 200 1 -25 1 0 360 5038_2 +PRL_Young_Final 5038 3 1 2 25 1 1 486 5038_3 +PRL_Young_Final 5038 3 2 2 25 0 1 366 5038_3 +PRL_Young_Final 5038 3 3 2 25 0 1 364 5038_3 +PRL_Young_Final 5038 3 4 2 25 0 1 396 5038_3 +PRL_Young_Final 5038 3 5 2 25 0 1 324 5038_3 +PRL_Young_Final 5038 3 6 2 25 0 1 460 5038_3 +PRL_Young_Final 5038 3 7 2 -25 0 1 320 5038_3 +PRL_Young_Final 5038 3 8 2 25 0 1 377 5038_3 +PRL_Young_Final 5038 3 9 2 -25 0 0 370 5038_3 +PRL_Young_Final 5038 3 10 2 -25 0 0 1010 5038_3 +PRL_Young_Final 5038 3 11 1 25 1 1 369 5038_3 +PRL_Young_Final 5038 3 12 1 25 0 1 358 5038_3 +PRL_Young_Final 5038 3 13 1 25 0 1 373 5038_3 +PRL_Young_Final 5038 3 14 1 25 0 1 56 5038_3 +PRL_Young_Final 5038 3 15 1 25 0 1 285 5038_3 +PRL_Young_Final 5038 3 16 1 25 0 1 320 5038_3 +PRL_Young_Final 5038 3 17 1 -25 0 1 319 5038_3 +PRL_Young_Final 5038 3 18 1 -25 0 0 306 5038_3 +PRL_Young_Final 5038 3 19 2 25 1 1 321 5038_3 +PRL_Young_Final 5038 3 20 2 25 0 1 351 5038_3 +PRL_Young_Final 5038 3 21 2 -25 0 1 375 5038_3 +PRL_Young_Final 5038 3 22 2 25 0 1 360 5038_3 +PRL_Young_Final 5038 3 23 2 25 0 1 387 5038_3 +PRL_Young_Final 5038 3 24 2 25 0 1 321 5038_3 +PRL_Young_Final 5038 3 25 2 25 0 1 339 5038_3 +PRL_Young_Final 5038 3 26 2 25 0 1 299 5038_3 +PRL_Young_Final 5038 3 27 2 25 0 0 214 5038_3 +PRL_Young_Final 5038 3 28 2 -25 0 0 441 5038_3 +PRL_Young_Final 5038 3 29 2 -25 0 0 476 5038_3 +PRL_Young_Final 5038 3 30 1 -25 1 1 307 5038_3 +PRL_Young_Final 5038 3 31 2 -25 1 0 597 5038_3 +PRL_Young_Final 5038 3 32 1 -25 1 1 631 5038_3 +PRL_Young_Final 5038 3 33 1 25 0 1 419 5038_3 +PRL_Young_Final 5038 3 34 1 25 0 1 289 5038_3 +PRL_Young_Final 5038 3 35 1 25 0 1 279 5038_3 +PRL_Young_Final 5038 3 36 1 25 0 1 424 5038_3 +PRL_Young_Final 5038 3 37 1 25 0 1 335 5038_3 +PRL_Young_Final 5038 3 38 1 25 0 1 522 5038_3 +PRL_Young_Final 5038 3 39 1 -25 0 0 485 5038_3 +PRL_Young_Final 5038 3 40 1 -25 0 0 401 5038_3 +PRL_Young_Final 5038 3 41 2 -25 1 1 377 5038_3 +PRL_Young_Final 5038 3 42 2 25 0 1 305 5038_3 +PRL_Young_Final 5038 3 43 2 25 0 1 19 5038_3 +PRL_Young_Final 5038 3 44 2 25 0 1 296 5038_3 +PRL_Young_Final 5038 3 45 2 25 0 1 254 5038_3 +PRL_Young_Final 5038 3 46 2 25 0 1 212 5038_3 +PRL_Young_Final 5038 3 47 2 -25 0 0 201 5038_3 +PRL_Young_Final 5038 3 48 2 -25 0 0 164 5038_3 +PRL_Young_Final 5038 3 49 1 25 1 1 727 5038_3 +PRL_Young_Final 5038 3 50 1 25 0 1 323 5038_3 +PRL_Young_Final 5038 3 51 1 -25 0 1 440 5038_3 +PRL_Young_Final 5038 3 52 2 25 1 0 705 5038_3 +PRL_Young_Final 5038 3 53 2 -25 0 0 320 5038_3 +PRL_Young_Final 5038 3 54 2 25 0 0 329 5038_3 +PRL_Young_Final 5038 3 55 2 -25 0 0 349 5038_3 +PRL_Young_Final 5038 3 56 2 -25 0 0 528 5038_3 +PRL_Young_Final 5038 3 57 1 25 1 1 338 5038_3 +PRL_Young_Final 5038 3 58 1 25 0 1 380 5038_3 +PRL_Young_Final 5038 3 59 1 -25 0 1 406 5038_3 +PRL_Young_Final 5038 3 60 1 25 0 1 419 5038_3 +PRL_Young_Final 5038 3 61 1 25 0 1 381 5038_3 +PRL_Young_Final 5038 3 62 1 25 0 1 432 5038_3 +PRL_Young_Final 5038 3 63 1 25 0 1 443 5038_3 +PRL_Young_Final 5038 3 64 1 -25 0 0 273 5038_3 +PRL_Young_Final 5038 3 65 1 -25 0 0 246 5038_3 +PRL_Young_Final 5038 3 66 2 25 1 1 321 5038_3 +PRL_Young_Final 5038 3 67 2 -25 0 1 317 5038_3 +PRL_Young_Final 5038 3 68 2 -25 0 1 409 5038_3 +PRL_Young_Final 5038 3 69 1 -25 1 0 293 5038_3 +PRL_Young_Final 5038 3 70 2 25 1 1 963 5038_3 +PRL_Young_Final 5038 3 71 2 25 0 1 398 5038_3 +PRL_Young_Final 5038 3 72 2 25 0 1 395 5038_3 +PRL_Young_Final 5038 3 73 2 25 0 1 355 5038_3 +PRL_Young_Final 5038 3 74 2 25 0 1 315 5038_3 +PRL_Young_Final 5038 3 75 2 25 0 1 467 5038_3 +PRL_Young_Final 5038 3 76 2 -25 0 0 758 5038_3 +PRL_Young_Final 5038 3 77 2 25 0 0 547 5038_3 +PRL_Young_Final 5038 3 78 2 25 0 0 339 5038_3 +PRL_Young_Final 5038 3 79 2 -25 0 0 442 5038_3 +PRL_Young_Final 5038 3 80 2 -25 0 0 471 5038_3 +PRL_Young_Final 5038 3 81 1 -25 1 1 497 5038_3 +PRL_Young_Final 5038 3 82 2 -25 1 0 2279 5038_3 +PRL_Young_Final 5038 3 83 1 25 1 1 328 5038_3 +PRL_Young_Final 5038 3 84 1 25 0 1 397 5038_3 +PRL_Young_Final 5038 3 85 1 25 0 1 531 5038_3 +PRL_Young_Final 5038 3 86 1 25 0 1 343 5038_3 +PRL_Young_Final 5038 3 87 1 25 0 1 472 5038_3 +PRL_Young_Final 5038 3 88 1 25 0 1 543 5038_3 +PRL_Young_Final 5038 3 89 1 -25 0 0 574 5038_3 +PRL_Young_Final 5038 3 90 1 -25 0 0 975 5038_3 +PRL_Young_Final 5038 3 91 2 25 1 1 1035 5038_3 +PRL_Young_Final 5038 3 92 2 -25 0 1 454 5038_3 +PRL_Young_Final 5038 3 93 2 25 0 1 370 5038_3 +PRL_Young_Final 5038 3 94 2 25 0 1 583 5038_3 +PRL_Young_Final 5038 3 95 2 -25 0 1 333 5038_3 +PRL_Young_Final 5038 3 96 2 25 0 1 508 5038_3 +PRL_Young_Final 5038 3 97 2 25 0 1 262 5038_3 +PRL_Young_Final 5038 3 98 2 -25 0 0 645 5038_3 +PRL_Young_Final 5038 3 99 2 25 0 0 1085 5038_3 +PRL_Young_Final 5038 3 100 2 -25 0 0 423 5038_3 +PRL_Young_Final 5038 3 101 2 -25 0 0 1003 5038_3 +PRL_Young_Final 5038 3 102 1 25 1 1 530 5038_3 +PRL_Young_Final 5038 3 103 1 25 0 1 388 5038_3 +PRL_Young_Final 5038 3 104 1 25 0 1 424 5038_3 +PRL_Young_Final 5038 3 105 1 -25 0 1 536 5038_3 +PRL_Young_Final 5038 3 106 1 -25 0 1 748 5038_3 +PRL_Young_Final 5038 3 107 2 -25 1 0 1117 5038_3 +PRL_Young_Final 5038 3 108 1 25 1 1 1623 5038_3 +PRL_Young_Final 5038 3 109 1 25 0 1 553 5038_3 +PRL_Young_Final 5038 3 110 1 25 0 1 348 5038_3 +PRL_Young_Final 5038 3 111 1 25 0 1 325 5038_3 +PRL_Young_Final 5038 3 112 1 25 0 1 388 5038_3 +PRL_Young_Final 5038 3 113 1 25 0 1 349 5038_3 +PRL_Young_Final 5038 3 114 1 -25 0 0 406 5038_3 +PRL_Young_Final 5038 3 115 1 -25 0 0 1710 5038_3 +PRL_Young_Final 5038 3 116 2 -25 1 1 553 5038_3 +PRL_Young_Final 5038 3 117 1 -25 1 0 356 5038_3 +PRL_Young_Final 5038 3 118 2 25 1 1 290 5038_3 +PRL_Young_Final 5038 3 119 2 25 0 1 167 5038_3 +PRL_Young_Final 5038 3 120 2 25 0 1 250 5038_3 +PRL_Young_Final 5038 3 121 2 25 0 1 278 5038_3 +PRL_Young_Final 5038 3 122 2 25 0 1 344 5038_3 +PRL_Young_Final 5038 3 123 2 -25 0 0 348 5038_3 +PRL_Young_Final 5038 3 124 2 25 0 0 511 5038_3 +PRL_Young_Final 5038 3 125 2 -25 0 0 660 5038_3 +PRL_Young_Final 5038 3 126 2 25 0 0 509 5038_3 +PRL_Young_Final 5038 3 127 2 -25 0 0 293 5038_3 +PRL_Young_Final 5038 3 128 1 25 1 1 492 5038_3 +PRL_Young_Final 5038 3 129 1 25 0 1 353 5038_3 +PRL_Young_Final 5038 3 130 1 -25 0 1 412 5038_3 +PRL_Young_Final 5038 3 131 1 25 0 1 683 5038_3 +PRL_Young_Final 5038 3 132 1 25 0 1 1084 5038_3 +PRL_Young_Final 5038 3 133 1 -25 0 1 1205 5038_3 +PRL_Young_Final 5038 3 134 1 25 0 1 292 5038_3 +PRL_Young_Final 5038 3 135 1 -25 0 0 496 5038_3 +PRL_Young_Final 5038 3 136 2 25 1 1 882 5038_3 +PRL_Young_Final 5038 3 137 2 25 0 1 419 5038_3 +PRL_Young_Final 5038 3 138 2 25 0 1 425 5038_3 +PRL_Young_Final 5038 3 139 2 25 0 1 488 5038_3 +PRL_Young_Final 5038 3 140 2 -25 0 1 625 5038_3 +PRL_Young_Final 5038 3 141 2 -25 0 1 149 5038_3 +PRL_Young_Final 5038 3 142 1 25 1 1 726 5038_3 +PRL_Young_Final 5038 3 143 1 25 0 1 479 5038_3 +PRL_Young_Final 5038 3 144 1 25 0 1 640 5038_3 +PRL_Young_Final 5038 3 145 1 25 0 1 547 5038_3 +PRL_Young_Final 5038 3 146 1 25 0 1 1157 5038_3 +PRL_Young_Final 5038 3 147 1 25 0 1 610 5038_3 +PRL_Young_Final 5038 3 148 1 -25 0 0 398 5038_3 +PRL_Young_Final 5038 3 149 1 -25 0 0 443 5038_3 +PRL_Young_Final 5038 3 150 2 -25 1 1 341 5038_3 +PRL_Young_Final 5038 3 151 2 25 0 1 453 5038_3 +PRL_Young_Final 5038 3 152 2 25 0 1 847 5038_3 +PRL_Young_Final 5038 3 153 2 25 0 1 394 5038_3 +PRL_Young_Final 5038 3 154 2 25 0 1 323 5038_3 +PRL_Young_Final 5038 3 155 2 -25 0 0 465 5038_3 +PRL_Young_Final 5038 3 156 2 -25 0 0 528 5038_3 +PRL_Young_Final 5038 3 157 1 25 1 1 628 5038_3 +PRL_Young_Final 5038 3 158 1 25 0 1 369 5038_3 +PRL_Young_Final 5038 3 159 1 25 0 1 366 5038_3 +PRL_Young_Final 5038 3 160 1 -25 0 1 420 5038_3 +PRL_Young_Final 5038 3 161 1 25 0 1 497 5038_3 +PRL_Young_Final 5038 3 162 1 25 0 0 1019 5038_3 +PRL_Young_Final 5038 3 163 1 25 0 0 468 5038_3 +PRL_Young_Final 5038 3 164 1 -25 0 0 319 5038_3 +PRL_Young_Final 5038 3 165 1 -25 0 0 819 5038_3 +PRL_Young_Final 5038 3 166 2 25 1 1 683 5038_3 +PRL_Young_Final 5038 3 167 2 -25 0 1 434 5038_3 +PRL_Young_Final 5038 3 168 2 25 0 1 417 5038_3 +PRL_Young_Final 5038 3 169 2 25 0 1 564 5038_3 +PRL_Young_Final 5038 3 170 2 25 0 1 431 5038_3 +PRL_Young_Final 5038 3 171 2 25 0 1 391 5038_3 +PRL_Young_Final 5038 3 172 2 25 0 1 331 5038_3 +PRL_Young_Final 5038 3 173 2 -25 0 1 332 5038_3 +PRL_Young_Final 5038 3 174 2 -25 0 0 561 5038_3 +PRL_Young_Final 5038 3 175 1 -25 1 1 345 5038_3 +PRL_Young_Final 5038 3 176 1 25 0 1 290 5038_3 +PRL_Young_Final 5038 3 177 1 25 0 1 514 5038_3 +PRL_Young_Final 5038 3 178 1 25 0 1 451 5038_3 +PRL_Young_Final 5038 3 179 1 25 0 1 459 5038_3 +PRL_Young_Final 5038 3 180 1 25 0 1 90 5038_3 +PRL_Young_Final 5038 3 181 1 25 0 1 449 5038_3 +PRL_Young_Final 5038 3 182 1 -25 0 1 452 5038_3 +PRL_Young_Final 5038 3 183 1 -25 0 0 161 5038_3 +PRL_Young_Final 5038 3 184 2 25 1 1 1073 5038_3 +PRL_Young_Final 5038 3 185 2 25 0 1 702 5038_3 +PRL_Young_Final 5038 3 186 2 25 0 1 1401 5038_3 +PRL_Young_Final 5038 3 187 2 25 0 1 567 5038_3 +PRL_Young_Final 5038 3 188 2 25 0 1 1081 5038_3 +PRL_Young_Final 5038 3 189 2 -25 0 0 659 5038_3 +PRL_Young_Final 5038 3 190 2 -25 0 0 977 5038_3 +PRL_Young_Final 5038 3 191 1 25 1 1 361 5038_3 +PRL_Young_Final 5038 3 192 1 25 0 1 625 5038_3 +PRL_Young_Final 5038 3 193 1 -25 0 1 355 5038_3 +PRL_Young_Final 5038 3 194 2 25 1 0 519 5038_3 +PRL_Young_Final 5038 3 195 2 -25 0 0 348 5038_3 +PRL_Young_Final 5038 3 196 1 25 1 1 616 5038_3 +PRL_Young_Final 5038 3 197 1 25 0 1 322 5038_3 +PRL_Young_Final 5038 3 198 1 -25 0 1 652 5038_3 +PRL_Young_Final 5038 3 199 2 -25 1 0 321 5038_3 +PRL_Young_Final 5038 3 200 1 25 1 1 863 5038_3 +PRL_Young_Final 5036 1 1 1 25 1 1 1282 5036_1 +PRL_Young_Final 5036 1 2 1 25 0 1 1282 5036_1 +PRL_Young_Final 5036 1 3 1 25 0 1 628 5036_1 +PRL_Young_Final 5036 1 4 1 -25 0 1 595 5036_1 +PRL_Young_Final 5036 1 5 1 25 0 1 817 5036_1 +PRL_Young_Final 5036 1 6 1 25 0 1 437 5036_1 +PRL_Young_Final 5036 1 7 1 -25 0 1 472 5036_1 +PRL_Young_Final 5036 1 8 1 25 0 0 459 5036_1 +PRL_Young_Final 5036 1 9 1 -25 0 0 739 5036_1 +PRL_Young_Final 5036 1 10 1 -25 0 0 541 5036_1 +PRL_Young_Final 5036 1 11 1 -25 0 0 538 5036_1 +PRL_Young_Final 5036 1 12 2 25 1 1 1258 5036_1 +PRL_Young_Final 5036 1 13 2 25 0 1 441 5036_1 +PRL_Young_Final 5036 1 14 2 25 0 1 485 5036_1 +PRL_Young_Final 5036 1 15 2 25 0 1 463 5036_1 +PRL_Young_Final 5036 1 16 2 25 0 1 466 5036_1 +PRL_Young_Final 5036 1 17 2 -25 0 1 610 5036_1 +PRL_Young_Final 5036 1 18 2 -25 0 0 421 5036_1 +PRL_Young_Final 5036 1 19 2 -25 0 0 455 5036_1 +PRL_Young_Final 5036 1 20 2 -25 0 0 1076 5036_1 +PRL_Young_Final 5036 1 21 1 -25 1 1 653 5036_1 +PRL_Young_Final 5036 1 22 2 25 1 0 433 5036_1 +PRL_Young_Final 5036 1 23 2 25 0 0 406 5036_1 +PRL_Young_Final 5036 1 24 2 -25 0 0 468 5036_1 +PRL_Young_Final 5036 1 25 2 -25 0 0 422 5036_1 +PRL_Young_Final 5036 1 26 2 -25 0 0 352 5036_1 +PRL_Young_Final 5036 1 27 2 -25 0 0 265 5036_1 +PRL_Young_Final 5036 1 28 2 -25 0 0 475 5036_1 +PRL_Young_Final 5036 1 29 1 25 1 1 454 5036_1 +PRL_Young_Final 5036 1 30 1 25 0 1 310 5036_1 +PRL_Young_Final 5036 1 31 1 25 0 1 289 5036_1 +PRL_Young_Final 5036 1 32 1 25 0 1 330 5036_1 +PRL_Young_Final 5036 1 33 1 25 0 1 494 5036_1 +PRL_Young_Final 5036 1 34 1 25 0 1 305 5036_1 +PRL_Young_Final 5036 1 35 1 -25 0 1 478 5036_1 +PRL_Young_Final 5036 1 36 1 25 0 1 433 5036_1 +PRL_Young_Final 5036 1 37 1 -25 0 0 172 5036_1 +PRL_Young_Final 5036 1 38 1 25 0 0 400 5036_1 +PRL_Young_Final 5036 1 39 1 -25 0 0 402 5036_1 +PRL_Young_Final 5036 1 40 1 -25 0 0 195 5036_1 +PRL_Young_Final 5036 1 41 1 -25 0 0 333 5036_1 +PRL_Young_Final 5036 1 42 1 -25 0 0 197 5036_1 +PRL_Young_Final 5036 1 43 1 -25 0 0 281 5036_1 +PRL_Young_Final 5036 1 44 1 -25 0 0 85 5036_1 +PRL_Young_Final 5036 1 45 1 -25 0 0 160 5036_1 +PRL_Young_Final 5036 1 46 2 25 1 1 857 5036_1 +PRL_Young_Final 5036 1 47 2 25 0 1 598 5036_1 +PRL_Young_Final 5036 1 48 2 25 0 1 217 5036_1 +PRL_Young_Final 5036 1 49 2 25 0 1 93 5036_1 +PRL_Young_Final 5036 1 50 2 25 0 1 450 5036_1 +PRL_Young_Final 5036 1 51 2 25 0 0 459 5036_1 +PRL_Young_Final 5036 1 52 2 -25 0 0 514 5036_1 +PRL_Young_Final 5036 1 53 2 25 0 0 1401 5036_1 +PRL_Young_Final 5036 1 54 2 -25 0 0 503 5036_1 +PRL_Young_Final 5036 1 55 2 -25 0 0 116 5036_1 +PRL_Young_Final 5036 1 56 1 25 1 1 463 5036_1 +PRL_Young_Final 5036 1 57 1 -25 0 1 377 5036_1 +PRL_Young_Final 5036 1 58 1 25 0 1 447 5036_1 +PRL_Young_Final 5036 1 59 1 25 0 1 274 5036_1 +PRL_Young_Final 5036 1 60 1 -25 0 1 434 5036_1 +PRL_Young_Final 5036 1 61 1 25 0 1 251 5036_1 +PRL_Young_Final 5036 1 62 1 25 0 1 301 5036_1 +PRL_Young_Final 5036 1 63 1 25 0 1 319 5036_1 +PRL_Young_Final 5036 1 64 1 -25 0 0 24 5036_1 +PRL_Young_Final 5036 1 65 1 -25 0 0 219 5036_1 +PRL_Young_Final 5036 1 66 1 -25 0 0 463 5036_1 +PRL_Young_Final 5036 1 67 2 25 1 1 541 5036_1 +PRL_Young_Final 5036 1 68 2 25 0 1 243 5036_1 +PRL_Young_Final 5036 1 69 2 -25 0 1 109 5036_1 +PRL_Young_Final 5036 1 70 2 -25 0 1 415 5036_1 +PRL_Young_Final 5036 1 71 1 -25 1 0 557 5036_1 +PRL_Young_Final 5036 1 72 1 25 0 0 331 5036_1 +PRL_Young_Final 5036 1 73 1 25 0 0 495 5036_1 +PRL_Young_Final 5036 1 74 1 -25 0 0 216 5036_1 +PRL_Young_Final 5036 1 75 1 -25 0 0 356 5036_1 +PRL_Young_Final 5036 1 76 1 -25 0 0 417 5036_1 +PRL_Young_Final 5036 1 77 2 25 1 1 457 5036_1 +PRL_Young_Final 5036 1 78 2 25 0 1 490 5036_1 +PRL_Young_Final 5036 1 79 2 25 0 1 196 5036_1 +PRL_Young_Final 5036 1 80 2 25 0 1 452 5036_1 +PRL_Young_Final 5036 1 81 2 25 0 1 224 5036_1 +PRL_Young_Final 5036 1 82 2 25 0 1 583 5036_1 +PRL_Young_Final 5036 1 83 2 -25 0 1 500 5036_1 +PRL_Young_Final 5036 1 84 1 -25 1 0 1289 5036_1 +PRL_Young_Final 5036 1 85 2 25 1 1 604 5036_1 +PRL_Young_Final 5036 1 86 2 25 0 1 485 5036_1 +PRL_Young_Final 5036 1 87 2 25 0 1 513 5036_1 +PRL_Young_Final 5036 1 88 2 25 0 1 1284 5036_1 +PRL_Young_Final 5036 1 89 2 25 0 1 801 5036_1 +PRL_Young_Final 5036 1 90 2 -25 0 0 686 5036_1 +PRL_Young_Final 5036 1 91 1 25 1 1 1769 5036_1 +PRL_Young_Final 5036 1 92 1 25 0 1 301 5036_1 +PRL_Young_Final 5036 1 93 1 -25 0 1 402 5036_1 +PRL_Young_Final 5036 1 94 2 -25 1 0 1137 5036_1 +PRL_Young_Final 5036 1 95 1 25 1 1 591 5036_1 +PRL_Young_Final 5036 1 96 1 25 0 1 199 5036_1 +PRL_Young_Final 5036 1 97 1 -25 0 1 263 5036_1 +PRL_Young_Final 5036 1 98 2 25 1 0 678 5036_1 +PRL_Young_Final 5036 1 99 2 -25 0 0 434 5036_1 +PRL_Young_Final 5036 1 100 2 -25 0 0 1157 5036_1 +PRL_Young_Final 5036 1 101 2 -25 0 0 1457 5036_1 +PRL_Young_Final 5036 1 102 1 25 1 1 492 5036_1 +PRL_Young_Final 5036 1 103 1 25 0 1 1344 5036_1 +PRL_Young_Final 5036 1 104 1 25 0 1 586 5036_1 +PRL_Young_Final 5036 1 105 1 25 0 1 666 5036_1 +PRL_Young_Final 5036 1 106 1 25 0 1 710 5036_1 +PRL_Young_Final 5036 1 107 1 -25 0 1 449 5036_1 +PRL_Young_Final 5036 1 108 2 -25 1 1 1025 5036_1 +PRL_Young_Final 5036 1 109 1 -25 1 0 484 5036_1 +PRL_Young_Final 5036 1 110 1 -25 0 0 427 5036_1 +PRL_Young_Final 5036 1 111 1 -25 0 0 9 5036_1 +PRL_Young_Final 5036 1 112 2 25 1 1 225 5036_1 +PRL_Young_Final 5036 1 113 2 25 0 1 519 5036_1 +PRL_Young_Final 5036 1 114 2 25 0 1 457 5036_1 +PRL_Young_Final 5036 1 115 2 25 0 1 91 5036_1 +PRL_Young_Final 5036 1 116 2 25 0 1 268 5036_1 +PRL_Young_Final 5036 1 117 2 25 0 1 535 5036_1 +PRL_Young_Final 5036 1 118 2 -25 0 1 590 5036_1 +PRL_Young_Final 5036 1 119 1 -25 1 0 727 5036_1 +PRL_Young_Final 5036 1 120 2 25 1 1 980 5036_1 +PRL_Young_Final 5036 1 121 2 25 0 1 399 5036_1 +PRL_Young_Final 5036 1 122 2 25 0 1 386 5036_1 +PRL_Young_Final 5036 1 123 2 25 0 1 294 5036_1 +PRL_Young_Final 5036 1 124 2 25 0 1 1345 5036_1 +PRL_Young_Final 5036 1 125 2 25 0 1 555 5036_1 +PRL_Young_Final 5036 1 126 2 25 0 1 516 5036_1 +PRL_Young_Final 5036 1 127 2 -25 0 1 707 5036_1 +PRL_Young_Final 5036 1 128 2 25 0 0 496 5036_1 +PRL_Young_Final 5036 1 129 2 -25 0 0 487 5036_1 +PRL_Young_Final 5036 1 130 2 25 0 0 237 5036_1 +PRL_Young_Final 5036 1 131 2 -25 0 0 455 5036_1 +PRL_Young_Final 5036 1 132 2 -25 0 0 537 5036_1 +PRL_Young_Final 5036 1 133 2 -25 0 0 514 5036_1 +PRL_Young_Final 5036 1 134 2 -25 0 0 1835 5036_1 +PRL_Young_Final 5036 1 135 2 -25 0 0 456 5036_1 +PRL_Young_Final 5036 1 136 2 -25 0 0 534 5036_1 +PRL_Young_Final 5036 1 137 1 25 1 1 1129 5036_1 +PRL_Young_Final 5036 1 138 1 25 0 1 140 5036_1 +PRL_Young_Final 5036 1 139 1 -25 0 1 409 5036_1 +PRL_Young_Final 5036 1 140 1 25 0 1 210 5036_1 +PRL_Young_Final 5036 1 141 1 25 0 1 242 5036_1 +PRL_Young_Final 5036 1 142 1 25 0 0 57 5036_1 +PRL_Young_Final 5036 1 143 1 25 0 0 49 5036_1 +PRL_Young_Final 5036 1 144 1 -25 0 0 167 5036_1 +PRL_Young_Final 5036 1 145 1 -25 0 0 1150 5036_1 +PRL_Young_Final 5036 1 146 1 -25 0 0 272 5036_1 +PRL_Young_Final 5036 1 147 1 -25 0 0 448 5036_1 +PRL_Young_Final 5036 1 148 1 -25 0 0 112 5036_1 +PRL_Young_Final 5036 1 149 1 -25 0 0 697 5036_1 +PRL_Young_Final 5036 1 150 1 25 0 0 566 5036_1 +PRL_Young_Final 5036 1 151 1 -25 0 0 570 5036_1 +PRL_Young_Final 5036 1 152 1 -25 0 0 425 5036_1 +PRL_Young_Final 5036 1 153 1 -25 0 0 551 5036_1 +PRL_Young_Final 5036 1 154 2 25 1 1 382 5036_1 +PRL_Young_Final 5036 1 155 2 25 0 1 1614 5036_1 +PRL_Young_Final 5036 1 156 2 25 0 1 297 5036_1 +PRL_Young_Final 5036 1 157 2 -25 0 1 118 5036_1 +PRL_Young_Final 5036 1 158 2 -25 0 1 445 5036_1 +PRL_Young_Final 5036 1 159 2 25 0 1 145 5036_1 +PRL_Young_Final 5036 1 160 2 25 0 1 400 5036_1 +PRL_Young_Final 5036 1 161 2 -25 0 0 112 5036_1 +PRL_Young_Final 5036 1 162 1 25 1 1 491 5036_1 +PRL_Young_Final 5036 1 163 1 25 0 1 157 5036_1 +PRL_Young_Final 5036 1 164 1 25 0 1 433 5036_1 +PRL_Young_Final 5036 1 165 1 25 0 1 401 5036_1 +PRL_Young_Final 5036 1 166 1 -25 0 1 433 5036_1 +PRL_Young_Final 5036 1 167 2 25 1 1 484 5036_1 +PRL_Young_Final 5036 1 168 2 25 0 1 595 5036_1 +PRL_Young_Final 5036 1 169 2 25 0 1 422 5036_1 +PRL_Young_Final 5036 1 170 2 25 0 1 369 5036_1 +PRL_Young_Final 5036 1 171 2 25 0 1 411 5036_1 +PRL_Young_Final 5036 1 172 2 25 0 1 450 5036_1 +PRL_Young_Final 5036 1 173 2 25 0 1 161 5036_1 +PRL_Young_Final 5036 1 174 2 -25 0 1 1909 5036_1 +PRL_Young_Final 5036 1 175 1 25 1 1 1234 5036_1 +PRL_Young_Final 5036 1 176 1 25 0 1 477 5036_1 +PRL_Young_Final 5036 1 177 1 -25 0 1 406 5036_1 +PRL_Young_Final 5036 1 178 2 -25 1 0 495 5036_1 +PRL_Young_Final 5036 1 179 2 -25 0 0 475 5036_1 +PRL_Young_Final 5036 1 180 2 -25 0 0 776 5036_1 +PRL_Young_Final 5036 1 181 2 25 0 0 410 5036_1 +PRL_Young_Final 5036 1 182 2 -25 0 0 626 5036_1 +PRL_Young_Final 5036 1 183 2 25 0 0 2067 5036_1 +PRL_Young_Final 5036 1 184 2 -25 0 0 160 5036_1 +PRL_Young_Final 5036 1 185 2 -25 0 0 633 5036_1 +PRL_Young_Final 5036 1 186 2 -25 0 0 1419 5036_1 +PRL_Young_Final 5036 1 187 1 25 1 1 1555 5036_1 +PRL_Young_Final 5036 1 188 1 25 0 1 410 5036_1 +PRL_Young_Final 5036 1 189 1 25 0 1 542 5036_1 +PRL_Young_Final 5036 1 190 1 25 0 1 441 5036_1 +PRL_Young_Final 5036 1 191 1 25 0 1 189 5036_1 +PRL_Young_Final 5036 1 192 1 -25 0 0 395 5036_1 +PRL_Young_Final 5036 1 193 1 -25 0 0 130 5036_1 +PRL_Young_Final 5036 1 194 2 -25 1 1 67 5036_1 +PRL_Young_Final 5036 1 195 2 -25 0 1 55 5036_1 +PRL_Young_Final 5036 1 196 2 25 0 1 1132 5036_1 +PRL_Young_Final 5036 1 197 2 25 0 1 2044 5036_1 +PRL_Young_Final 5036 1 198 2 25 0 1 481 5036_1 +PRL_Young_Final 5036 1 199 2 -25 0 0 482 5036_1 +PRL_Young_Final 5036 1 200 2 25 0 0 950 5036_1 +PRL_Young_Final 5036 2 1 1 25 1 1 3389 5036_2 +PRL_Young_Final 5036 2 2 1 25 0 1 450 5036_2 +PRL_Young_Final 5036 2 3 1 25 0 1 452 5036_2 +PRL_Young_Final 5036 2 4 1 -25 0 1 400 5036_2 +PRL_Young_Final 5036 2 5 1 25 0 1 391 5036_2 +PRL_Young_Final 5036 2 6 1 25 0 1 2144 5036_2 +PRL_Young_Final 5036 2 7 1 25 0 1 429 5036_2 +PRL_Young_Final 5036 2 8 1 25 0 1 502 5036_2 +PRL_Young_Final 5036 2 9 1 25 0 0 372 5036_2 +PRL_Young_Final 5036 2 10 1 -25 0 0 107 5036_2 +PRL_Young_Final 5036 2 11 1 -25 0 0 486 5036_2 +PRL_Young_Final 5036 2 12 2 25 1 1 674 5036_2 +PRL_Young_Final 5036 2 13 2 25 0 1 478 5036_2 +PRL_Young_Final 5036 2 14 2 25 0 1 506 5036_2 +PRL_Young_Final 5036 2 15 2 -25 0 1 505 5036_2 +PRL_Young_Final 5036 2 16 2 25 0 1 285 5036_2 +PRL_Young_Final 5036 2 17 2 -25 0 0 485 5036_2 +PRL_Young_Final 5036 2 18 2 -25 0 0 497 5036_2 +PRL_Young_Final 5036 2 19 1 25 1 1 392 5036_2 +PRL_Young_Final 5036 2 20 1 -25 0 1 546 5036_2 +PRL_Young_Final 5036 2 21 1 25 0 1 107 5036_2 +PRL_Young_Final 5036 2 22 1 25 0 1 1539 5036_2 +PRL_Young_Final 5036 2 23 1 25 0 1 485 5036_2 +PRL_Young_Final 5036 2 24 1 25 0 1 490 5036_2 +PRL_Young_Final 5036 2 25 1 25 0 1 272 5036_2 +PRL_Young_Final 5036 2 26 1 -25 0 1 321 5036_2 +PRL_Young_Final 5036 2 27 2 -25 1 1 1991 5036_2 +PRL_Young_Final 5036 2 28 1 -25 1 0 1638 5036_2 +PRL_Young_Final 5036 2 29 2 25 1 1 968 5036_2 +PRL_Young_Final 5036 2 30 2 25 0 1 514 5036_2 +PRL_Young_Final 5036 2 31 2 25 0 1 508 5036_2 +PRL_Young_Final 5036 2 32 2 25 0 1 308 5036_2 +PRL_Young_Final 5036 2 33 2 25 0 1 311 5036_2 +PRL_Young_Final 5036 2 34 2 25 0 1 240 5036_2 +PRL_Young_Final 5036 2 35 2 -25 0 0 499 5036_2 +PRL_Young_Final 5036 2 36 2 25 0 0 880 5036_2 +PRL_Young_Final 5036 2 37 2 -25 0 0 42 5036_2 +PRL_Young_Final 5036 2 38 1 -25 1 1 1118 5036_2 +PRL_Young_Final 5036 2 39 1 25 0 1 2073 5036_2 +PRL_Young_Final 5036 2 40 1 25 0 1 1534 5036_2 +PRL_Young_Final 5036 2 41 1 25 0 1 1537 5036_2 +PRL_Young_Final 5036 2 42 1 25 0 1 1447 5036_2 +PRL_Young_Final 5036 2 43 1 25 0 1 392 5036_2 +PRL_Young_Final 5036 2 44 1 25 0 1 447 5036_2 +PRL_Young_Final 5036 2 45 1 25 0 1 1778 5036_2 +PRL_Young_Final 5036 2 46 1 -25 0 0 1085 5036_2 +PRL_Young_Final 5036 2 47 1 -25 0 0 1708 5036_2 +PRL_Young_Final 5036 2 48 2 -25 1 1 469 5036_2 +PRL_Young_Final 5036 2 49 2 25 0 1 1336 5036_2 +PRL_Young_Final 5036 2 50 2 25 0 1 723 5036_2 +PRL_Young_Final 5036 2 51 2 -25 0 1 507 5036_2 +PRL_Young_Final 5036 2 52 2 25 0 1 261 5036_2 +PRL_Young_Final 5036 2 53 2 25 0 1 506 5036_2 +PRL_Young_Final 5036 2 54 2 25 0 1 437 5036_2 +PRL_Young_Final 5036 2 55 2 -25 0 0 178 5036_2 +PRL_Young_Final 5036 2 56 2 -25 0 0 1540 5036_2 +PRL_Young_Final 5036 2 57 1 25 1 1 458 5036_2 +PRL_Young_Final 5036 2 58 1 25 0 1 445 5036_2 +PRL_Young_Final 5036 2 59 1 -25 0 1 506 5036_2 +PRL_Young_Final 5036 2 60 1 -25 0 1 300 5036_2 +PRL_Young_Final 5036 2 61 2 -25 1 0 1634 5036_2 +PRL_Young_Final 5036 2 62 2 -25 0 0 268 5036_2 +PRL_Young_Final 5036 2 63 2 25 0 0 408 5036_2 +PRL_Young_Final 5036 2 64 2 -25 0 0 525 5036_2 +PRL_Young_Final 5036 2 65 2 25 0 0 88 5036_2 +PRL_Young_Final 5036 2 66 2 -25 0 0 1491 5036_2 +PRL_Young_Final 5036 2 67 2 -25 0 0 815 5036_2 +PRL_Young_Final 5036 2 68 1 25 1 1 829 5036_2 +PRL_Young_Final 5036 2 69 1 25 0 1 459 5036_2 +PRL_Young_Final 5036 2 70 1 25 0 1 808 5036_2 +PRL_Young_Final 5036 2 71 1 25 0 1 798 5036_2 +PRL_Young_Final 5036 2 72 1 25 0 1 541 5036_2 +PRL_Young_Final 5036 2 73 1 25 0 1 710 5036_2 +PRL_Young_Final 5036 2 74 1 -25 0 1 629 5036_2 +PRL_Young_Final 5036 2 75 1 -25 0 0 547 5036_2 +PRL_Young_Final 5036 2 76 2 25 1 1 2264 5036_2 +PRL_Young_Final 5036 2 77 2 25 0 1 443 5036_2 +PRL_Young_Final 5036 2 78 2 25 0 1 569 5036_2 +PRL_Young_Final 5036 2 79 2 25 0 1 371 5036_2 +PRL_Young_Final 5036 2 80 2 25 0 1 495 5036_2 +PRL_Young_Final 5036 2 81 2 25 0 1 464 5036_2 +PRL_Young_Final 5036 2 82 2 25 0 1 24 5036_2 +PRL_Young_Final 5036 2 83 2 -25 0 1 517 5036_2 +PRL_Young_Final 5036 2 84 2 -25 0 0 562 5036_2 +PRL_Young_Final 5036 2 85 1 25 1 1 1933 5036_2 +PRL_Young_Final 5036 2 86 1 25 0 1 485 5036_2 +PRL_Young_Final 5036 2 87 1 -25 0 1 79 5036_2 +PRL_Young_Final 5036 2 88 1 25 0 1 874 5036_2 +PRL_Young_Final 5036 2 89 1 25 0 1 125 5036_2 +PRL_Young_Final 5036 2 90 1 25 0 1 602 5036_2 +PRL_Young_Final 5036 2 91 1 25 0 1 622 5036_2 +PRL_Young_Final 5036 2 92 1 25 0 1 425 5036_2 +PRL_Young_Final 5036 2 93 1 -25 0 0 512 5036_2 +PRL_Young_Final 5036 2 94 1 -25 0 0 318 5036_2 +PRL_Young_Final 5036 2 95 2 -25 1 1 654 5036_2 +PRL_Young_Final 5036 2 96 2 -25 0 1 83 5036_2 +PRL_Young_Final 5036 2 97 2 25 0 1 195 5036_2 +PRL_Young_Final 5036 2 98 2 25 0 1 301 5036_2 +PRL_Young_Final 5036 2 99 2 25 0 1 201 5036_2 +PRL_Young_Final 5036 2 100 2 25 0 0 498 5036_2 +PRL_Young_Final 5036 2 101 2 25 0 0 467 5036_2 +PRL_Young_Final 5036 2 102 2 -25 0 0 521 5036_2 +PRL_Young_Final 5036 2 103 2 -25 0 0 529 5036_2 +PRL_Young_Final 5036 2 104 2 -25 0 0 252 5036_2 +PRL_Young_Final 5036 2 105 1 25 1 1 424 5036_2 +PRL_Young_Final 5036 2 106 1 25 0 1 448 5036_2 +PRL_Young_Final 5036 2 107 1 25 0 1 403 5036_2 +PRL_Young_Final 5036 2 108 1 -25 0 1 130 5036_2 +PRL_Young_Final 5036 2 109 1 25 0 1 200 5036_2 +PRL_Young_Final 5036 2 110 1 -25 0 0 308 5036_2 +PRL_Young_Final 5036 2 111 1 -25 0 0 566 5036_2 +PRL_Young_Final 5036 2 112 2 25 1 1 196 5036_2 +PRL_Young_Final 5036 2 113 2 25 0 1 387 5036_2 +PRL_Young_Final 5036 2 114 2 25 0 1 1008 5036_2 +PRL_Young_Final 5036 2 115 2 25 0 1 1355 5036_2 +PRL_Young_Final 5036 2 116 2 25 0 1 153 5036_2 +PRL_Young_Final 5036 2 117 2 25 0 1 319 5036_2 +PRL_Young_Final 5036 2 118 2 -25 0 1 453 5036_2 +PRL_Young_Final 5036 2 119 2 25 0 1 228 5036_2 +PRL_Young_Final 5036 2 120 2 -25 0 0 1982 5036_2 +PRL_Young_Final 5036 2 121 2 25 0 0 247 5036_2 +PRL_Young_Final 5036 2 122 2 -25 0 0 1437 5036_2 +PRL_Young_Final 5036 2 123 2 -25 0 0 287 5036_2 +PRL_Young_Final 5036 2 124 2 -25 0 0 898 5036_2 +PRL_Young_Final 5036 2 125 2 -25 0 0 451 5036_2 +PRL_Young_Final 5036 2 126 1 25 1 1 416 5036_2 +PRL_Young_Final 5036 2 127 1 -25 0 1 1363 5036_2 +PRL_Young_Final 5036 2 128 1 25 0 1 383 5036_2 +PRL_Young_Final 5036 2 129 1 25 0 1 508 5036_2 +PRL_Young_Final 5036 2 130 1 25 0 1 562 5036_2 +PRL_Young_Final 5036 2 131 1 25 0 1 546 5036_2 +PRL_Young_Final 5036 2 132 1 -25 0 0 162 5036_2 +PRL_Young_Final 5036 2 133 1 -25 0 0 88 5036_2 +PRL_Young_Final 5036 2 134 2 25 1 1 560 5036_2 +PRL_Young_Final 5036 2 135 2 -25 0 1 730 5036_2 +PRL_Young_Final 5036 2 136 2 -25 0 1 163 5036_2 +PRL_Young_Final 5036 2 137 1 -25 1 0 1807 5036_2 +PRL_Young_Final 5036 2 138 1 25 0 0 506 5036_2 +PRL_Young_Final 5036 2 139 1 -25 0 0 521 5036_2 +PRL_Young_Final 5036 2 140 1 25 0 0 350 5036_2 +PRL_Young_Final 5036 2 141 1 -25 0 0 73 5036_2 +PRL_Young_Final 5036 2 142 2 25 1 1 600 5036_2 +PRL_Young_Final 5036 2 143 2 25 0 1 441 5036_2 +PRL_Young_Final 5036 2 144 2 25 0 1 131 5036_2 +PRL_Young_Final 5036 2 145 2 25 0 1 360 5036_2 +PRL_Young_Final 5036 2 146 2 25 0 1 553 5036_2 +PRL_Young_Final 5036 2 147 2 25 0 1 36 5036_2 +PRL_Young_Final 5036 2 148 2 -25 0 1 460 5036_2 +PRL_Young_Final 5036 2 149 1 -25 1 0 1844 5036_2 +PRL_Young_Final 5036 2 150 2 25 1 1 761 5036_2 +PRL_Young_Final 5036 2 151 2 25 0 1 334 5036_2 +PRL_Young_Final 5036 2 152 2 25 0 1 407 5036_2 +PRL_Young_Final 5036 2 153 2 25 0 1 255 5036_2 +PRL_Young_Final 5036 2 154 2 25 0 1 1566 5036_2 +PRL_Young_Final 5036 2 155 2 25 0 1 656 5036_2 +PRL_Young_Final 5036 2 156 2 25 0 1 514 5036_2 +PRL_Young_Final 5036 2 157 2 -25 0 1 111 5036_2 +PRL_Young_Final 5036 2 158 1 25 1 1 1447 5036_2 +PRL_Young_Final 5036 2 159 1 25 0 1 494 5036_2 +PRL_Young_Final 5036 2 160 1 -25 0 1 629 5036_2 +PRL_Young_Final 5036 2 161 1 25 0 1 973 5036_2 +PRL_Young_Final 5036 2 162 1 25 0 1 183 5036_2 +PRL_Young_Final 5036 2 163 1 -25 0 0 75 5036_2 +PRL_Young_Final 5036 2 164 2 25 1 1 1017 5036_2 +PRL_Young_Final 5036 2 165 2 25 0 1 513 5036_2 +PRL_Young_Final 5036 2 166 2 25 0 1 1553 5036_2 +PRL_Young_Final 5036 2 167 2 -25 0 1 920 5036_2 +PRL_Young_Final 5036 2 168 2 -25 0 1 509 5036_2 +PRL_Young_Final 5036 2 169 1 -25 1 0 1115 5036_2 +PRL_Young_Final 5036 2 170 1 -25 0 0 409 5036_2 +PRL_Young_Final 5036 2 171 2 25 1 1 737 5036_2 +PRL_Young_Final 5036 2 172 2 25 0 1 19 5036_2 +PRL_Young_Final 5036 2 173 2 25 0 1 556 5036_2 +PRL_Young_Final 5036 2 174 2 25 0 1 461 5036_2 +PRL_Young_Final 5036 2 175 2 25 0 1 740 5036_2 +PRL_Young_Final 5036 2 176 2 25 0 1 483 5036_2 +PRL_Young_Final 5036 2 177 2 -25 0 1 488 5036_2 +PRL_Young_Final 5036 2 178 2 25 0 1 143 5036_2 +PRL_Young_Final 5036 2 179 2 -25 0 0 701 5036_2 +PRL_Young_Final 5036 2 180 1 25 1 1 1436 5036_2 +PRL_Young_Final 5036 2 181 1 25 0 1 471 5036_2 +PRL_Young_Final 5036 2 182 1 25 0 1 213 5036_2 +PRL_Young_Final 5036 2 183 1 25 0 1 377 5036_2 +PRL_Young_Final 5036 2 184 1 25 0 1 490 5036_2 +PRL_Young_Final 5036 2 185 1 25 0 1 631 5036_2 +PRL_Young_Final 5036 2 186 1 25 0 0 245 5036_2 +PRL_Young_Final 5036 2 187 1 25 0 0 995 5036_2 +PRL_Young_Final 5036 2 188 1 -25 0 0 974 5036_2 +PRL_Young_Final 5036 2 189 2 -25 1 1 1840 5036_2 +PRL_Young_Final 5036 2 190 1 -25 1 0 1510 5036_2 +PRL_Young_Final 5036 2 191 1 -25 0 0 412 5036_2 +PRL_Young_Final 5036 2 192 2 25 1 1 377 5036_2 +PRL_Young_Final 5036 2 193 2 25 0 1 160 5036_2 +PRL_Young_Final 5036 2 194 2 -25 0 1 410 5036_2 +PRL_Young_Final 5036 2 195 2 25 0 1 82 5036_2 +PRL_Young_Final 5036 2 196 2 25 0 1 509 5036_2 +PRL_Young_Final 5036 2 197 2 25 0 1 430 5036_2 +PRL_Young_Final 5036 2 198 2 -25 0 0 414 5036_2 +PRL_Young_Final 5036 2 199 1 25 1 1 282 5036_2 +PRL_Young_Final 5036 2 200 1 25 0 1 400 5036_2 +PRL_Young_Final 5036 3 1 1 -25 0 1 2267 5036_3 +PRL_Young_Final 5036 3 2 1 -25 0 1 628 5036_3 +PRL_Young_Final 5036 3 3 2 -25 1 0 1419 5036_3 +PRL_Young_Final 5036 3 4 2 -25 0 0 940 5036_3 +PRL_Young_Final 5036 3 5 2 25 0 0 556 5036_3 +PRL_Young_Final 5036 3 6 2 -25 0 0 378 5036_3 +PRL_Young_Final 5036 3 7 2 -25 0 0 304 5036_3 +PRL_Young_Final 5036 3 8 2 -25 0 0 819 5036_3 +PRL_Young_Final 5036 3 9 1 25 1 1 770 5036_3 +PRL_Young_Final 5036 3 10 1 25 0 1 1243 5036_3 +PRL_Young_Final 5036 3 11 1 25 0 1 587 5036_3 +PRL_Young_Final 5036 3 12 1 25 0 1 109 5036_3 +PRL_Young_Final 5036 3 13 1 25 0 1 710 5036_3 +PRL_Young_Final 5036 3 14 1 -25 0 0 446 5036_3 +PRL_Young_Final 5036 3 15 1 -25 0 0 174 5036_3 +PRL_Young_Final 5036 3 16 1 -25 0 0 946 5036_3 +PRL_Young_Final 5036 3 17 2 25 1 1 453 5036_3 +PRL_Young_Final 5036 3 18 2 -25 0 1 496 5036_3 +PRL_Young_Final 5036 3 19 2 25 0 1 447 5036_3 +PRL_Young_Final 5036 3 20 2 25 0 1 464 5036_3 +PRL_Young_Final 5036 3 21 2 25 0 1 263 5036_3 +PRL_Young_Final 5036 3 22 2 25 0 1 321 5036_3 +PRL_Young_Final 5036 3 23 2 25 0 1 326 5036_3 +PRL_Young_Final 5036 3 24 2 -25 0 0 421 5036_3 +PRL_Young_Final 5036 3 25 1 25 1 1 801 5036_3 +PRL_Young_Final 5036 3 26 1 25 0 1 452 5036_3 +PRL_Young_Final 5036 3 27 1 -25 0 1 510 5036_3 +PRL_Young_Final 5036 3 28 1 25 0 1 490 5036_3 +PRL_Young_Final 5036 3 29 1 25 0 1 464 5036_3 +PRL_Young_Final 5036 3 30 1 25 0 0 476 5036_3 +PRL_Young_Final 5036 3 31 1 -25 0 0 610 5036_3 +PRL_Young_Final 5036 3 32 2 -25 1 1 877 5036_3 +PRL_Young_Final 5036 3 33 2 25 0 1 769 5036_3 +PRL_Young_Final 5036 3 34 2 25 0 1 1131 5036_3 +PRL_Young_Final 5036 3 35 2 25 0 1 512 5036_3 +PRL_Young_Final 5036 3 36 2 25 0 1 465 5036_3 +PRL_Young_Final 5036 3 37 2 25 0 1 486 5036_3 +PRL_Young_Final 5036 3 38 2 -25 0 1 669 5036_3 +PRL_Young_Final 5036 3 39 2 -25 0 1 949 5036_3 +PRL_Young_Final 5036 3 40 1 25 1 1 830 5036_3 +PRL_Young_Final 5036 3 41 1 25 0 1 490 5036_3 +PRL_Young_Final 5036 3 42 1 25 0 1 229 5036_3 +PRL_Young_Final 5036 3 43 1 25 0 1 331 5036_3 +PRL_Young_Final 5036 3 44 1 25 0 1 462 5036_3 +PRL_Young_Final 5036 3 45 1 25 0 1 272 5036_3 +PRL_Young_Final 5036 3 46 1 25 0 0 1480 5036_3 +PRL_Young_Final 5036 3 47 1 -25 0 0 562 5036_3 +PRL_Young_Final 5036 3 48 1 -25 0 0 908 5036_3 +PRL_Young_Final 5036 3 49 2 -25 1 1 467 5036_3 +PRL_Young_Final 5036 3 50 2 25 0 1 392 5036_3 +PRL_Young_Final 5036 3 51 2 25 0 1 457 5036_3 +PRL_Young_Final 5036 3 52 2 25 0 1 667 5036_3 +PRL_Young_Final 5036 3 53 2 25 0 1 576 5036_3 +PRL_Young_Final 5036 3 54 2 -25 0 0 196 5036_3 +PRL_Young_Final 5036 3 55 2 -25 0 0 213 5036_3 +PRL_Young_Final 5036 3 56 1 25 1 1 867 5036_3 +PRL_Young_Final 5036 3 57 1 25 0 1 183 5036_3 +PRL_Young_Final 5036 3 58 1 25 0 1 403 5036_3 +PRL_Young_Final 5036 3 59 1 -25 0 1 460 5036_3 +PRL_Young_Final 5036 3 60 1 25 0 1 747 5036_3 +PRL_Young_Final 5036 3 61 1 25 0 1 758 5036_3 +PRL_Young_Final 5036 3 62 1 -25 0 0 62 5036_3 +PRL_Young_Final 5036 3 63 1 -25 0 0 715 5036_3 +PRL_Young_Final 5036 3 64 1 25 0 0 231 5036_3 +PRL_Young_Final 5036 3 65 1 25 0 0 458 5036_3 +PRL_Young_Final 5036 3 66 1 -25 0 0 487 5036_3 +PRL_Young_Final 5036 3 67 1 -25 0 0 59 5036_3 +PRL_Young_Final 5036 3 68 2 -25 1 1 52 5036_3 +PRL_Young_Final 5036 3 69 2 25 0 1 789 5036_3 +PRL_Young_Final 5036 3 70 2 25 0 1 432 5036_3 +PRL_Young_Final 5036 3 71 2 25 0 1 759 5036_3 +PRL_Young_Final 5036 3 72 2 25 0 1 3790 5036_3 +PRL_Young_Final 5036 3 73 2 -25 0 0 638 5036_3 +PRL_Young_Final 5036 3 74 2 -25 0 0 1516 5036_3 +PRL_Young_Final 5036 3 75 1 25 1 1 759 5036_3 +PRL_Young_Final 5036 3 76 1 -25 0 1 455 5036_3 +PRL_Young_Final 5036 3 77 1 -25 0 1 582 5036_3 +PRL_Young_Final 5036 3 78 1 25 0 1 133 5036_3 +PRL_Young_Final 5036 3 79 1 25 0 1 456 5036_3 +PRL_Young_Final 5036 3 80 1 25 0 1 619 5036_3 +PRL_Young_Final 5036 3 81 1 25 0 1 513 5036_3 +PRL_Young_Final 5036 3 82 1 25 0 1 985 5036_3 +PRL_Young_Final 5036 3 83 1 -25 0 0 134 5036_3 +PRL_Young_Final 5036 3 84 1 -25 0 0 569 5036_3 +PRL_Young_Final 5036 3 85 2 25 1 1 473 5036_3 +PRL_Young_Final 5036 3 86 2 -25 0 1 891 5036_3 +PRL_Young_Final 5036 3 87 2 25 0 1 487 5036_3 +PRL_Young_Final 5036 3 88 2 25 0 1 333 5036_3 +PRL_Young_Final 5036 3 89 2 25 0 1 424 5036_3 +PRL_Young_Final 5036 3 90 2 25 0 1 474 5036_3 +PRL_Young_Final 5036 3 91 2 25 0 1 407 5036_3 +PRL_Young_Final 5036 3 92 2 25 0 1 236 5036_3 +PRL_Young_Final 5036 3 93 2 25 0 0 57 5036_3 +PRL_Young_Final 5036 3 94 2 -25 0 0 785 5036_3 +PRL_Young_Final 5036 3 95 1 25 1 1 933 5036_3 +PRL_Young_Final 5036 3 96 1 -25 0 1 522 5036_3 +PRL_Young_Final 5036 3 97 1 25 0 1 243 5036_3 +PRL_Young_Final 5036 3 98 1 25 0 1 58 5036_3 +PRL_Young_Final 5036 3 99 1 -25 0 1 498 5036_3 +PRL_Young_Final 5036 3 100 2 -25 1 0 587 5036_3 +PRL_Young_Final 5036 3 101 2 -25 0 0 89 5036_3 +PRL_Young_Final 5036 3 102 2 -25 0 0 201 5036_3 +PRL_Young_Final 5036 3 103 1 25 1 1 446 5036_3 +PRL_Young_Final 5036 3 104 1 25 0 1 540 5036_3 +PRL_Young_Final 5036 3 105 1 25 0 1 507 5036_3 +PRL_Young_Final 5036 3 106 1 25 0 1 432 5036_3 +PRL_Young_Final 5036 3 107 1 25 0 1 525 5036_3 +PRL_Young_Final 5036 3 108 1 -25 0 1 47 5036_3 +PRL_Young_Final 5036 3 109 1 -25 0 1 210 5036_3 +PRL_Young_Final 5036 3 110 2 25 1 1 661 5036_3 +PRL_Young_Final 5036 3 111 2 25 0 1 225 5036_3 +PRL_Young_Final 5036 3 112 2 25 0 1 443 5036_3 +PRL_Young_Final 5036 3 113 2 25 0 1 289 5036_3 +PRL_Young_Final 5036 3 114 2 25 0 1 596 5036_3 +PRL_Young_Final 5036 3 115 2 -25 0 0 505 5036_3 +PRL_Young_Final 5036 3 116 2 -25 0 0 649 5036_3 +PRL_Young_Final 5036 3 117 1 25 1 1 1022 5036_3 +PRL_Young_Final 5036 3 118 1 -25 0 1 1342 5036_3 +PRL_Young_Final 5036 3 119 1 25 0 1 354 5036_3 +PRL_Young_Final 5036 3 120 1 25 0 1 1119 5036_3 +PRL_Young_Final 5036 3 121 1 25 0 1 911 5036_3 +PRL_Young_Final 5036 3 122 1 25 0 1 2367 5036_3 +PRL_Young_Final 5036 3 123 1 25 0 1 834 5036_3 +PRL_Young_Final 5036 3 124 1 25 0 1 1194 5036_3 +PRL_Young_Final 5036 3 125 1 -25 0 0 2371 5036_3 +PRL_Young_Final 5036 3 126 1 25 0 0 2397 5036_3 +PRL_Young_Final 5036 3 127 1 -25 0 0 313 5036_3 +PRL_Young_Final 5036 3 128 1 25 0 0 547 5036_3 +PRL_Young_Final 5036 3 129 1 -25 0 0 1113 5036_3 +PRL_Young_Final 5036 3 130 1 -25 0 0 283 5036_3 +PRL_Young_Final 5036 3 131 2 25 1 1 73 5036_3 +PRL_Young_Final 5036 3 132 2 -25 0 1 2660 5036_3 +PRL_Young_Final 5036 3 133 2 25 0 1 426 5036_3 +PRL_Young_Final 5036 3 134 2 25 0 1 950 5036_3 +PRL_Young_Final 5036 3 135 2 -25 0 1 744 5036_3 +PRL_Young_Final 5036 3 136 2 25 0 1 637 5036_3 +PRL_Young_Final 5036 3 137 2 -25 0 0 1133 5036_3 +PRL_Young_Final 5036 3 138 1 25 1 1 1031 5036_3 +PRL_Young_Final 5036 3 139 1 25 0 1 540 5036_3 +PRL_Young_Final 5036 3 140 1 25 0 1 447 5036_3 +PRL_Young_Final 5036 3 141 1 25 0 1 594 5036_3 +PRL_Young_Final 5036 3 142 1 -25 0 1 400 5036_3 +PRL_Young_Final 5036 3 143 1 -25 0 1 509 5036_3 +PRL_Young_Final 5036 3 144 2 -25 1 0 1377 5036_3 +PRL_Young_Final 5036 3 145 2 -25 0 0 503 5036_3 +PRL_Young_Final 5036 3 146 1 25 1 1 861 5036_3 +PRL_Young_Final 5036 3 147 1 25 0 1 457 5036_3 +PRL_Young_Final 5036 3 148 1 25 0 1 486 5036_3 +PRL_Young_Final 5036 3 149 1 25 0 1 470 5036_3 +PRL_Young_Final 5036 3 150 1 25 0 1 438 5036_3 +PRL_Young_Final 5036 3 151 1 25 0 1 977 5036_3 +PRL_Young_Final 5036 3 152 1 -25 0 1 762 5036_3 +PRL_Young_Final 5036 3 153 1 25 0 1 1138 5036_3 +PRL_Young_Final 5036 3 154 1 -25 0 0 578 5036_3 +PRL_Young_Final 5036 3 155 2 25 1 1 754 5036_3 +PRL_Young_Final 5036 3 156 2 25 0 1 482 5036_3 +PRL_Young_Final 5036 3 157 2 25 0 1 447 5036_3 +PRL_Young_Final 5036 3 158 2 25 0 1 952 5036_3 +PRL_Young_Final 5036 3 159 2 25 0 1 1078 5036_3 +PRL_Young_Final 5036 3 160 2 25 0 1 934 5036_3 +PRL_Young_Final 5036 3 161 2 -25 0 1 481 5036_3 +PRL_Young_Final 5036 3 162 2 25 0 0 563 5036_3 +PRL_Young_Final 5036 3 163 2 25 0 0 699 5036_3 +PRL_Young_Final 5036 3 164 2 -25 0 0 509 5036_3 +PRL_Young_Final 5036 3 165 1 25 1 1 1000 5036_3 +PRL_Young_Final 5036 3 166 1 25 0 1 488 5036_3 +PRL_Young_Final 5036 3 167 1 -25 0 1 1019 5036_3 +PRL_Young_Final 5036 3 168 1 25 0 1 613 5036_3 +PRL_Young_Final 5036 3 169 1 25 0 1 470 5036_3 +PRL_Young_Final 5036 3 170 1 25 0 1 436 5036_3 +PRL_Young_Final 5036 3 171 1 -25 0 0 269 5036_3 +PRL_Young_Final 5036 3 172 2 25 1 1 1473 5036_3 +PRL_Young_Final 5036 3 173 2 25 0 1 458 5036_3 +PRL_Young_Final 5036 3 174 2 -25 0 1 433 5036_3 +PRL_Young_Final 5036 3 175 2 -25 0 1 556 5036_3 +PRL_Young_Final 5036 3 176 1 -25 1 0 532 5036_3 +PRL_Young_Final 5036 3 177 1 -25 0 0 395 5036_3 +PRL_Young_Final 5036 3 178 1 -25 0 0 393 5036_3 +PRL_Young_Final 5036 3 179 2 25 1 1 421 5036_3 +PRL_Young_Final 5036 3 180 2 25 0 1 342 5036_3 +PRL_Young_Final 5036 3 181 2 25 0 1 436 5036_3 +PRL_Young_Final 5036 3 182 2 25 0 1 126 5036_3 +PRL_Young_Final 5036 3 183 2 25 0 1 533 5036_3 +PRL_Young_Final 5036 3 184 2 -25 0 0 474 5036_3 +PRL_Young_Final 5036 3 185 2 25 0 0 524 5036_3 +PRL_Young_Final 5036 3 186 2 -25 0 0 805 5036_3 +PRL_Young_Final 5036 3 187 1 25 1 1 1009 5036_3 +PRL_Young_Final 5036 3 188 1 -25 0 1 490 5036_3 +PRL_Young_Final 5036 3 189 1 25 0 1 795 5036_3 +PRL_Young_Final 5036 3 190 1 25 0 1 487 5036_3 +PRL_Young_Final 5036 3 191 1 25 0 1 946 5036_3 +PRL_Young_Final 5036 3 192 1 25 0 1 1127 5036_3 +PRL_Young_Final 5036 3 193 1 -25 0 0 677 5036_3 +PRL_Young_Final 5036 3 194 1 -25 0 0 782 5036_3 +PRL_Young_Final 5036 3 195 2 25 1 1 521 5036_3 +PRL_Young_Final 5036 3 196 2 25 0 1 480 5036_3 +PRL_Young_Final 5036 3 197 2 25 0 1 450 5036_3 +PRL_Young_Final 5036 3 198 2 -25 0 1 429 5036_3 +PRL_Young_Final 5036 3 199 2 25 0 1 585 5036_3 +PRL_Young_Final 5036 3 200 2 -25 0 0 102 5036_3 +PRL_Young_Final 5035 1 1 1 25 0 0 753 5035_1 +PRL_Young_Final 5035 1 2 1 -25 0 0 321 5035_1 +PRL_Young_Final 5035 1 3 1 -25 0 0 283 5035_1 +PRL_Young_Final 5035 1 4 2 25 1 1 300 5035_1 +PRL_Young_Final 5035 1 5 2 25 0 1 337 5035_1 +PRL_Young_Final 5035 1 6 2 -25 0 1 285 5035_1 +PRL_Young_Final 5035 1 7 2 25 0 1 363 5035_1 +PRL_Young_Final 5035 1 8 2 25 0 1 281 5035_1 +PRL_Young_Final 5035 1 9 2 -25 0 0 287 5035_1 +PRL_Young_Final 5035 1 10 1 -25 1 1 310 5035_1 +PRL_Young_Final 5035 1 11 2 -25 1 0 906 5035_1 +PRL_Young_Final 5035 1 12 2 -25 0 0 584 5035_1 +PRL_Young_Final 5035 1 13 1 25 1 1 239 5035_1 +PRL_Young_Final 5035 1 14 1 25 0 1 273 5035_1 +PRL_Young_Final 5035 1 15 1 25 0 1 698 5035_1 +PRL_Young_Final 5035 1 16 1 25 0 1 365 5035_1 +PRL_Young_Final 5035 1 17 1 25 0 1 295 5035_1 +PRL_Young_Final 5035 1 18 1 -25 0 0 305 5035_1 +PRL_Young_Final 5035 1 19 1 25 0 0 284 5035_1 +PRL_Young_Final 5035 1 20 1 25 0 0 278 5035_1 +PRL_Young_Final 5035 1 21 1 -25 0 0 276 5035_1 +PRL_Young_Final 5035 1 22 1 -25 0 0 239 5035_1 +PRL_Young_Final 5035 1 23 2 -25 1 1 342 5035_1 +PRL_Young_Final 5035 1 24 2 -25 0 1 536 5035_1 +PRL_Young_Final 5035 1 25 1 -25 1 0 464 5035_1 +PRL_Young_Final 5035 1 26 1 -25 0 0 277 5035_1 +PRL_Young_Final 5035 1 27 1 -25 0 0 412 5035_1 +PRL_Young_Final 5035 1 28 2 25 1 1 371 5035_1 +PRL_Young_Final 5035 1 29 2 25 0 1 311 5035_1 +PRL_Young_Final 5035 1 30 2 25 0 1 303 5035_1 +PRL_Young_Final 5035 1 31 2 25 0 1 410 5035_1 +PRL_Young_Final 5035 1 32 2 25 0 1 293 5035_1 +PRL_Young_Final 5035 1 33 2 25 0 1 706 5035_1 +PRL_Young_Final 5035 1 34 2 -25 0 1 484 5035_1 +PRL_Young_Final 5035 1 35 2 25 0 1 349 5035_1 +PRL_Young_Final 5035 1 36 2 -25 0 0 482 5035_1 +PRL_Young_Final 5035 1 37 2 25 0 0 649 5035_1 +PRL_Young_Final 5035 1 38 2 -25 0 0 543 5035_1 +PRL_Young_Final 5035 1 39 2 -25 0 0 419 5035_1 +PRL_Young_Final 5035 1 40 1 25 1 1 337 5035_1 +PRL_Young_Final 5035 1 41 1 25 0 1 331 5035_1 +PRL_Young_Final 5035 1 42 1 25 0 1 654 5035_1 +PRL_Young_Final 5035 1 43 1 25 0 1 301 5035_1 +PRL_Young_Final 5035 1 44 1 25 0 1 278 5035_1 +PRL_Young_Final 5035 1 45 1 -25 0 0 610 5035_1 +PRL_Young_Final 5035 1 46 1 -25 0 0 427 5035_1 +PRL_Young_Final 5035 1 47 2 25 1 1 336 5035_1 +PRL_Young_Final 5035 1 48 2 -25 0 1 271 5035_1 +PRL_Young_Final 5035 1 49 2 25 0 1 244 5035_1 +PRL_Young_Final 5035 1 50 2 25 0 1 577 5035_1 +PRL_Young_Final 5035 1 51 2 -25 0 1 291 5035_1 +PRL_Young_Final 5035 1 52 2 25 0 1 653 5035_1 +PRL_Young_Final 5035 1 53 2 25 0 1 327 5035_1 +PRL_Young_Final 5035 1 54 2 -25 0 0 306 5035_1 +PRL_Young_Final 5035 1 55 2 -25 0 0 295 5035_1 +PRL_Young_Final 5035 1 56 1 25 1 1 294 5035_1 +PRL_Young_Final 5035 1 57 1 25 0 1 350 5035_1 +PRL_Young_Final 5035 1 58 1 25 0 1 737 5035_1 +PRL_Young_Final 5035 1 59 1 -25 0 1 577 5035_1 +PRL_Young_Final 5035 1 60 1 -25 0 1 331 5035_1 +PRL_Young_Final 5035 1 61 2 -25 1 0 271 5035_1 +PRL_Young_Final 5035 1 62 2 25 0 0 381 5035_1 +PRL_Young_Final 5035 1 63 2 -25 0 0 332 5035_1 +PRL_Young_Final 5035 1 64 2 25 0 0 425 5035_1 +PRL_Young_Final 5035 1 65 2 -25 0 0 290 5035_1 +PRL_Young_Final 5035 1 66 2 -25 0 0 300 5035_1 +PRL_Young_Final 5035 1 67 1 25 1 1 625 5035_1 +PRL_Young_Final 5035 1 68 1 25 0 1 834 5035_1 +PRL_Young_Final 5035 1 69 1 25 0 1 448 5035_1 +PRL_Young_Final 5035 1 70 1 25 0 1 348 5035_1 +PRL_Young_Final 5035 1 71 1 25 0 1 367 5035_1 +PRL_Young_Final 5035 1 72 1 -25 0 0 296 5035_1 +PRL_Young_Final 5035 1 73 1 -25 0 0 624 5035_1 +PRL_Young_Final 5035 1 74 2 25 1 1 371 5035_1 +PRL_Young_Final 5035 1 75 2 -25 0 1 282 5035_1 +PRL_Young_Final 5035 1 76 2 25 0 1 302 5035_1 +PRL_Young_Final 5035 1 77 2 25 0 1 647 5035_1 +PRL_Young_Final 5035 1 78 2 25 0 1 374 5035_1 +PRL_Young_Final 5035 1 79 2 25 0 1 461 5035_1 +PRL_Young_Final 5035 1 80 2 25 0 1 260 5035_1 +PRL_Young_Final 5035 1 81 2 -25 0 0 342 5035_1 +PRL_Young_Final 5035 1 82 2 -25 0 0 438 5035_1 +PRL_Young_Final 5035 1 83 1 25 1 1 290 5035_1 +PRL_Young_Final 5035 1 84 1 25 0 1 350 5035_1 +PRL_Young_Final 5035 1 85 1 -25 0 1 202 5035_1 +PRL_Young_Final 5035 1 86 1 25 0 1 333 5035_1 +PRL_Young_Final 5035 1 87 1 25 0 1 687 5035_1 +PRL_Young_Final 5035 1 88 1 25 0 0 280 5035_1 +PRL_Young_Final 5035 1 89 1 25 0 0 358 5035_1 +PRL_Young_Final 5035 1 90 1 -25 0 0 570 5035_1 +PRL_Young_Final 5035 1 91 1 -25 0 0 632 5035_1 +PRL_Young_Final 5035 1 92 2 -25 1 1 334 5035_1 +PRL_Young_Final 5035 1 93 2 25 0 1 546 5035_1 +PRL_Young_Final 5035 1 94 2 25 0 1 649 5035_1 +PRL_Young_Final 5035 1 95 2 25 0 1 570 5035_1 +PRL_Young_Final 5035 1 96 2 25 0 1 651 5035_1 +PRL_Young_Final 5035 1 97 2 25 0 1 598 5035_1 +PRL_Young_Final 5035 1 98 2 -25 0 0 548 5035_1 +PRL_Young_Final 5035 1 99 2 -25 0 0 505 5035_1 +PRL_Young_Final 5035 1 100 1 -25 1 1 305 5035_1 +PRL_Young_Final 5035 1 101 1 -25 0 1 489 5035_1 +PRL_Young_Final 5035 1 102 2 -25 1 0 250 5035_1 +PRL_Young_Final 5035 1 103 2 -25 0 0 311 5035_1 +PRL_Young_Final 5035 1 104 1 25 1 1 342 5035_1 +PRL_Young_Final 5035 1 105 1 25 0 1 305 5035_1 +PRL_Young_Final 5035 1 106 1 25 0 1 310 5035_1 +PRL_Young_Final 5035 1 107 1 25 0 1 251 5035_1 +PRL_Young_Final 5035 1 108 1 25 0 1 254 5035_1 +PRL_Young_Final 5035 1 109 1 25 0 1 561 5035_1 +PRL_Young_Final 5035 1 110 1 25 0 0 287 5035_1 +PRL_Young_Final 5035 1 111 1 -25 0 0 261 5035_1 +PRL_Young_Final 5035 1 112 1 -25 0 0 276 5035_1 +PRL_Young_Final 5035 1 113 1 -25 0 0 74 5035_1 +PRL_Young_Final 5035 1 114 2 -25 1 1 257 5035_1 +PRL_Young_Final 5035 1 115 2 25 0 1 593 5035_1 +PRL_Young_Final 5035 1 116 2 25 0 1 565 5035_1 +PRL_Young_Final 5035 1 117 2 25 0 1 276 5035_1 +PRL_Young_Final 5035 1 118 2 25 0 1 614 5035_1 +PRL_Young_Final 5035 1 119 2 25 0 1 290 5035_1 +PRL_Young_Final 5035 1 120 2 25 0 1 385 5035_1 +PRL_Young_Final 5035 1 121 2 25 0 1 280 5035_1 +PRL_Young_Final 5035 1 122 2 -25 0 0 426 5035_1 +PRL_Young_Final 5035 1 123 2 -25 0 0 278 5035_1 +PRL_Young_Final 5035 1 124 2 -25 0 0 298 5035_1 +PRL_Young_Final 5035 1 125 1 -25 1 1 283 5035_1 +PRL_Young_Final 5035 1 126 1 25 0 1 338 5035_1 +PRL_Young_Final 5035 1 127 1 25 0 1 284 5035_1 +PRL_Young_Final 5035 1 128 1 -25 0 1 316 5035_1 +PRL_Young_Final 5035 1 129 1 25 0 1 287 5035_1 +PRL_Young_Final 5035 1 130 1 25 0 1 259 5035_1 +PRL_Young_Final 5035 1 131 1 -25 0 0 293 5035_1 +PRL_Young_Final 5035 1 132 1 25 0 0 301 5035_1 +PRL_Young_Final 5035 1 133 1 -25 0 0 360 5035_1 +PRL_Young_Final 5035 1 134 1 25 0 0 285 5035_1 +PRL_Young_Final 5035 1 135 1 -25 0 0 308 5035_1 +PRL_Young_Final 5035 1 136 1 -25 0 0 579 5035_1 +PRL_Young_Final 5035 1 137 1 -25 0 0 447 5035_1 +PRL_Young_Final 5035 1 138 2 25 1 1 257 5035_1 +PRL_Young_Final 5035 1 139 2 25 0 1 263 5035_1 +PRL_Young_Final 5035 1 140 2 25 0 1 268 5035_1 +PRL_Young_Final 5035 1 141 2 -25 0 1 268 5035_1 +PRL_Young_Final 5035 1 142 2 -25 0 1 268 5035_1 +PRL_Young_Final 5035 1 143 2 25 0 1 528 5035_1 +PRL_Young_Final 5035 1 144 2 25 0 1 267 5035_1 +PRL_Young_Final 5035 1 145 2 -25 0 0 314 5035_1 +PRL_Young_Final 5035 1 146 2 -25 0 0 567 5035_1 +PRL_Young_Final 5035 1 147 2 -25 0 0 797 5035_1 +PRL_Young_Final 5035 1 148 1 25 1 1 338 5035_1 +PRL_Young_Final 5035 1 149 1 25 0 1 296 5035_1 +PRL_Young_Final 5035 1 150 1 25 0 1 567 5035_1 +PRL_Young_Final 5035 1 151 1 25 0 1 579 5035_1 +PRL_Young_Final 5035 1 152 1 -25 0 1 303 5035_1 +PRL_Young_Final 5035 1 153 1 25 0 0 286 5035_1 +PRL_Young_Final 5035 1 154 1 25 0 0 278 5035_1 +PRL_Young_Final 5035 1 155 1 -25 0 0 359 5035_1 +PRL_Young_Final 5035 1 156 1 -25 0 0 571 5035_1 +PRL_Young_Final 5035 1 157 2 25 1 1 517 5035_1 +PRL_Young_Final 5035 1 158 2 25 0 1 354 5035_1 +PRL_Young_Final 5035 1 159 2 25 0 1 342 5035_1 +PRL_Young_Final 5035 1 160 2 25 0 1 289 5035_1 +PRL_Young_Final 5035 1 161 2 25 0 1 273 5035_1 +PRL_Young_Final 5035 1 162 2 25 0 1 286 5035_1 +PRL_Young_Final 5035 1 163 2 25 0 1 579 5035_1 +PRL_Young_Final 5035 1 164 2 -25 0 1 571 5035_1 +PRL_Young_Final 5035 1 165 2 -25 0 0 349 5035_1 +PRL_Young_Final 5035 1 166 1 25 1 1 289 5035_1 +PRL_Young_Final 5035 1 167 1 25 0 1 510 5035_1 +PRL_Young_Final 5035 1 168 1 -25 0 1 287 5035_1 +PRL_Young_Final 5035 1 169 1 25 0 1 289 5035_1 +PRL_Young_Final 5035 1 170 1 25 0 1 281 5035_1 +PRL_Young_Final 5035 1 171 1 25 0 1 258 5035_1 +PRL_Young_Final 5035 1 172 1 25 0 1 302 5035_1 +PRL_Young_Final 5035 1 173 1 25 0 1 590 5035_1 +PRL_Young_Final 5035 1 174 1 -25 0 0 298 5035_1 +PRL_Young_Final 5035 1 175 1 -25 0 0 261 5035_1 +PRL_Young_Final 5035 1 176 2 -25 1 1 387 5035_1 +PRL_Young_Final 5035 1 177 2 -25 0 1 362 5035_1 +PRL_Young_Final 5035 1 178 1 -25 1 0 258 5035_1 +PRL_Young_Final 5035 1 179 1 25 0 0 346 5035_1 +PRL_Young_Final 5035 1 180 1 -25 0 0 299 5035_1 +PRL_Young_Final 5035 1 181 1 -25 0 0 311 5035_1 +PRL_Young_Final 5035 1 182 2 25 1 1 274 5035_1 +PRL_Young_Final 5035 1 183 2 25 0 1 295 5035_1 +PRL_Young_Final 5035 1 184 2 25 0 1 325 5035_1 +PRL_Young_Final 5035 1 185 2 25 0 1 330 5035_1 +PRL_Young_Final 5035 1 186 2 25 0 1 278 5035_1 +PRL_Young_Final 5035 1 187 2 -25 0 0 602 5035_1 +PRL_Young_Final 5035 1 188 2 -25 0 0 594 5035_1 +PRL_Young_Final 5035 1 189 1 25 1 1 269 5035_1 +PRL_Young_Final 5035 1 190 1 -25 0 1 274 5035_1 +PRL_Young_Final 5035 1 191 1 25 0 1 271 5035_1 +PRL_Young_Final 5035 1 192 1 25 0 1 301 5035_1 +PRL_Young_Final 5035 1 193 1 25 0 1 322 5035_1 +PRL_Young_Final 5035 1 194 1 25 0 1 332 5035_1 +PRL_Young_Final 5035 1 195 1 25 0 1 337 5035_1 +PRL_Young_Final 5035 1 196 1 25 0 1 274 5035_1 +PRL_Young_Final 5035 1 197 1 -25 0 0 279 5035_1 +PRL_Young_Final 5035 1 198 1 -25 0 0 577 5035_1 +PRL_Young_Final 5035 1 199 2 25 1 1 350 5035_1 +PRL_Young_Final 5035 1 200 2 -25 0 1 262 5035_1 +PRL_Young_Final 5035 2 1 1 -25 1 0 838 5035_2 +PRL_Young_Final 5035 2 2 1 25 0 0 413 5035_2 +PRL_Young_Final 5035 2 3 1 -25 0 0 491 5035_2 +PRL_Young_Final 5035 2 4 1 25 0 0 276 5035_2 +PRL_Young_Final 5035 2 5 1 -25 0 0 381 5035_2 +PRL_Young_Final 5035 2 6 1 -25 0 0 279 5035_2 +PRL_Young_Final 5035 2 7 2 25 1 1 556 5035_2 +PRL_Young_Final 5035 2 8 2 25 0 1 297 5035_2 +PRL_Young_Final 5035 2 9 2 -25 0 1 344 5035_2 +PRL_Young_Final 5035 2 10 2 25 0 1 285 5035_2 +PRL_Young_Final 5035 2 11 2 25 0 1 306 5035_2 +PRL_Young_Final 5035 2 12 2 -25 0 0 567 5035_2 +PRL_Young_Final 5035 2 13 2 -25 0 0 597 5035_2 +PRL_Young_Final 5035 2 14 1 25 1 1 266 5035_2 +PRL_Young_Final 5035 2 15 1 25 0 1 482 5035_2 +PRL_Young_Final 5035 2 16 1 25 0 1 346 5035_2 +PRL_Young_Final 5035 2 17 1 -25 0 1 1135 5035_2 +PRL_Young_Final 5035 2 18 1 -25 0 1 294 5035_2 +PRL_Young_Final 5035 2 19 2 -25 1 0 283 5035_2 +PRL_Young_Final 5035 2 20 2 -25 0 0 356 5035_2 +PRL_Young_Final 5035 2 21 1 25 1 1 312 5035_2 +PRL_Young_Final 5035 2 22 1 25 0 1 322 5035_2 +PRL_Young_Final 5035 2 23 1 25 0 1 664 5035_2 +PRL_Young_Final 5035 2 24 1 25 0 1 586 5035_2 +PRL_Young_Final 5035 2 25 1 25 0 1 429 5035_2 +PRL_Young_Final 5035 2 26 1 25 0 0 479 5035_2 +PRL_Young_Final 5035 2 27 1 25 0 0 348 5035_2 +PRL_Young_Final 5035 2 28 1 -25 0 0 619 5035_2 +PRL_Young_Final 5035 2 29 1 -25 0 0 293 5035_2 +PRL_Young_Final 5035 2 30 2 25 1 1 272 5035_2 +PRL_Young_Final 5035 2 31 2 -25 0 1 306 5035_2 +PRL_Young_Final 5035 2 32 2 25 0 1 452 5035_2 +PRL_Young_Final 5035 2 33 2 25 0 1 262 5035_2 +PRL_Young_Final 5035 2 34 2 25 0 1 269 5035_2 +PRL_Young_Final 5035 2 35 2 25 0 1 272 5035_2 +PRL_Young_Final 5035 2 36 2 -25 0 0 294 5035_2 +PRL_Young_Final 5035 2 37 2 -25 0 0 416 5035_2 +PRL_Young_Final 5035 2 38 2 -25 0 0 368 5035_2 +PRL_Young_Final 5035 2 39 1 25 1 1 252 5035_2 +PRL_Young_Final 5035 2 40 1 25 0 1 241 5035_2 +PRL_Young_Final 5035 2 41 1 25 0 1 309 5035_2 +PRL_Young_Final 5035 2 42 1 -25 0 1 342 5035_2 +PRL_Young_Final 5035 2 43 1 25 0 1 318 5035_2 +PRL_Young_Final 5035 2 44 1 25 0 1 94 5035_2 +PRL_Young_Final 5035 2 45 1 -25 0 0 285 5035_2 +PRL_Young_Final 5035 2 46 1 25 0 0 258 5035_2 +PRL_Young_Final 5035 2 47 1 -25 0 0 363 5035_2 +PRL_Young_Final 5035 2 48 1 -25 0 0 316 5035_2 +PRL_Young_Final 5035 2 49 2 -25 1 1 310 5035_2 +PRL_Young_Final 5035 2 50 2 25 0 1 373 5035_2 +PRL_Young_Final 5035 2 51 2 25 0 1 571 5035_2 +PRL_Young_Final 5035 2 52 2 25 0 1 294 5035_2 +PRL_Young_Final 5035 2 53 2 25 0 1 314 5035_2 +PRL_Young_Final 5035 2 54 2 -25 0 0 628 5035_2 +PRL_Young_Final 5035 2 55 2 -25 0 0 545 5035_2 +PRL_Young_Final 5035 2 56 1 25 1 1 304 5035_2 +PRL_Young_Final 5035 2 57 1 -25 0 1 323 5035_2 +PRL_Young_Final 5035 2 58 1 -25 0 1 295 5035_2 +PRL_Young_Final 5035 2 59 2 -25 1 0 366 5035_2 +PRL_Young_Final 5035 2 60 2 -25 0 0 276 5035_2 +PRL_Young_Final 5035 2 61 1 25 1 1 282 5035_2 +PRL_Young_Final 5035 2 62 1 25 0 1 399 5035_2 +PRL_Young_Final 5035 2 63 1 25 0 1 334 5035_2 +PRL_Young_Final 5035 2 64 1 25 0 1 310 5035_2 +PRL_Young_Final 5035 2 65 1 25 0 1 416 5035_2 +PRL_Young_Final 5035 2 66 1 25 0 1 651 5035_2 +PRL_Young_Final 5035 2 67 1 -25 0 0 739 5035_2 +PRL_Young_Final 5035 2 68 1 25 0 0 368 5035_2 +PRL_Young_Final 5035 2 69 1 -25 0 0 274 5035_2 +PRL_Young_Final 5035 2 70 1 25 0 0 583 5035_2 +PRL_Young_Final 5035 2 71 1 -25 0 0 618 5035_2 +PRL_Young_Final 5035 2 72 1 -25 0 0 350 5035_2 +PRL_Young_Final 5035 2 73 2 -25 1 1 256 5035_2 +PRL_Young_Final 5035 2 74 2 25 0 1 289 5035_2 +PRL_Young_Final 5035 2 75 2 25 0 1 356 5035_2 +PRL_Young_Final 5035 2 76 2 25 0 1 399 5035_2 +PRL_Young_Final 5035 2 77 2 25 0 1 314 5035_2 +PRL_Young_Final 5035 2 78 2 25 0 1 325 5035_2 +PRL_Young_Final 5035 2 79 2 25 0 1 272 5035_2 +PRL_Young_Final 5035 2 80 2 25 0 1 536 5035_2 +PRL_Young_Final 5035 2 81 2 -25 0 0 372 5035_2 +PRL_Young_Final 5035 2 82 2 -25 0 0 308 5035_2 +PRL_Young_Final 5035 2 83 1 -25 1 1 460 5035_2 +PRL_Young_Final 5035 2 84 1 25 0 1 408 5035_2 +PRL_Young_Final 5035 2 85 1 25 0 1 307 5035_2 +PRL_Young_Final 5035 2 86 1 -25 0 1 635 5035_2 +PRL_Young_Final 5035 2 87 1 25 0 1 643 5035_2 +PRL_Young_Final 5035 2 88 1 25 0 1 331 5035_2 +PRL_Young_Final 5035 2 89 1 25 0 1 599 5035_2 +PRL_Young_Final 5035 2 90 1 25 0 1 288 5035_2 +PRL_Young_Final 5035 2 91 1 -25 0 0 271 5035_2 +PRL_Young_Final 5035 2 92 1 -25 0 0 324 5035_2 +PRL_Young_Final 5035 2 93 2 25 1 1 356 5035_2 +PRL_Young_Final 5035 2 94 2 -25 0 1 812 5035_2 +PRL_Young_Final 5035 2 95 2 -25 0 1 767 5035_2 +PRL_Young_Final 5035 2 96 1 25 1 0 309 5035_2 +PRL_Young_Final 5035 2 97 1 25 0 0 278 5035_2 +PRL_Young_Final 5035 2 98 1 -25 0 0 367 5035_2 +PRL_Young_Final 5035 2 99 1 -25 0 0 279 5035_2 +PRL_Young_Final 5035 2 100 1 -25 0 0 489 5035_2 +PRL_Young_Final 5035 2 101 2 25 1 1 336 5035_2 +PRL_Young_Final 5035 2 102 2 25 0 1 285 5035_2 +PRL_Young_Final 5035 2 103 2 25 0 1 299 5035_2 +PRL_Young_Final 5035 2 104 2 25 0 1 455 5035_2 +PRL_Young_Final 5035 2 105 2 25 0 1 381 5035_2 +PRL_Young_Final 5035 2 106 2 25 0 1 327 5035_2 +PRL_Young_Final 5035 2 107 2 -25 0 1 644 5035_2 +PRL_Young_Final 5035 2 108 2 -25 0 0 608 5035_2 +PRL_Young_Final 5035 2 109 2 -25 0 0 280 5035_2 +PRL_Young_Final 5035 2 110 1 25 1 1 315 5035_2 +PRL_Young_Final 5035 2 111 1 25 0 1 677 5035_2 +PRL_Young_Final 5035 2 112 1 25 0 1 260 5035_2 +PRL_Young_Final 5035 2 113 1 25 0 1 652 5035_2 +PRL_Young_Final 5035 2 114 1 25 0 1 565 5035_2 +PRL_Young_Final 5035 2 115 1 -25 0 0 283 5035_2 +PRL_Young_Final 5035 2 116 1 25 0 0 321 5035_2 +PRL_Young_Final 5035 2 117 1 -25 0 0 617 5035_2 +PRL_Young_Final 5035 2 118 1 -25 0 0 477 5035_2 +PRL_Young_Final 5035 2 119 2 25 1 1 336 5035_2 +PRL_Young_Final 5035 2 120 2 25 0 1 379 5035_2 +PRL_Young_Final 5035 2 121 2 -25 0 1 341 5035_2 +PRL_Young_Final 5035 2 122 2 25 0 1 494 5035_2 +PRL_Young_Final 5035 2 123 2 25 0 1 412 5035_2 +PRL_Young_Final 5035 2 124 2 -25 0 0 344 5035_2 +PRL_Young_Final 5035 2 125 2 -25 0 0 654 5035_2 +PRL_Young_Final 5035 2 126 1 -25 1 1 348 5035_2 +PRL_Young_Final 5035 2 127 1 25 0 1 314 5035_2 +PRL_Young_Final 5035 2 128 1 25 0 1 622 5035_2 +PRL_Young_Final 5035 2 129 1 25 0 1 298 5035_2 +PRL_Young_Final 5035 2 130 1 25 0 1 406 5035_2 +PRL_Young_Final 5035 2 131 1 25 0 1 413 5035_2 +PRL_Young_Final 5035 2 132 1 -25 0 1 479 5035_2 +PRL_Young_Final 5035 2 133 1 -25 0 0 390 5035_2 +PRL_Young_Final 5035 2 134 2 -25 1 1 1168 5035_2 +PRL_Young_Final 5035 2 135 2 25 0 1 1025 5035_2 +PRL_Young_Final 5035 2 136 2 25 0 1 383 5035_2 +PRL_Young_Final 5035 2 137 2 25 0 1 415 5035_2 +PRL_Young_Final 5035 2 138 2 25 0 1 334 5035_2 +PRL_Young_Final 5035 2 139 2 25 0 1 369 5035_2 +PRL_Young_Final 5035 2 140 2 25 0 1 428 5035_2 +PRL_Young_Final 5035 2 141 2 -25 0 1 345 5035_2 +PRL_Young_Final 5035 2 142 2 -25 0 0 326 5035_2 +PRL_Young_Final 5035 2 143 1 25 1 1 548 5035_2 +PRL_Young_Final 5035 2 144 1 25 0 1 690 5035_2 +PRL_Young_Final 5035 2 145 1 25 0 1 635 5035_2 +PRL_Young_Final 5035 2 146 1 25 0 1 1661 5035_2 +PRL_Young_Final 5035 2 147 1 25 0 1 358 5035_2 +PRL_Young_Final 5035 2 148 1 25 0 1 443 5035_2 +PRL_Young_Final 5035 2 149 1 25 0 1 353 5035_2 +PRL_Young_Final 5035 2 150 1 -25 0 0 310 5035_2 +PRL_Young_Final 5035 2 151 1 25 0 0 664 5035_2 +PRL_Young_Final 5035 2 152 1 -25 0 0 320 5035_2 +PRL_Young_Final 5035 2 153 1 25 0 0 318 5035_2 +PRL_Young_Final 5035 2 154 1 -25 0 0 630 5035_2 +PRL_Young_Final 5035 2 155 1 -25 0 0 373 5035_2 +PRL_Young_Final 5035 2 156 2 -25 1 1 385 5035_2 +PRL_Young_Final 5035 2 157 2 25 0 1 477 5035_2 +PRL_Young_Final 5035 2 158 2 25 0 1 360 5035_2 +PRL_Young_Final 5035 2 159 2 -25 0 1 524 5035_2 +PRL_Young_Final 5035 2 160 2 25 0 1 495 5035_2 +PRL_Young_Final 5035 2 161 2 -25 0 0 447 5035_2 +PRL_Young_Final 5035 2 162 2 -25 0 0 596 5035_2 +PRL_Young_Final 5035 2 163 1 25 1 1 598 5035_2 +PRL_Young_Final 5035 2 164 1 25 0 1 246 5035_2 +PRL_Young_Final 5035 2 165 1 25 0 1 283 5035_2 +PRL_Young_Final 5035 2 166 1 25 0 1 604 5035_2 +PRL_Young_Final 5035 2 167 1 -25 0 1 261 5035_2 +PRL_Young_Final 5035 2 168 1 -25 0 1 343 5035_2 +PRL_Young_Final 5035 2 169 2 -25 1 0 182 5035_2 +PRL_Young_Final 5035 2 170 2 -25 0 0 346 5035_2 +PRL_Young_Final 5035 2 171 1 25 1 1 432 5035_2 +PRL_Young_Final 5035 2 172 1 25 0 1 264 5035_2 +PRL_Young_Final 5035 2 173 1 25 0 1 347 5035_2 +PRL_Young_Final 5035 2 174 1 25 0 1 724 5035_2 +PRL_Young_Final 5035 2 175 1 25 0 1 607 5035_2 +PRL_Young_Final 5035 2 176 1 25 0 1 298 5035_2 +PRL_Young_Final 5035 2 177 1 25 0 0 292 5035_2 +PRL_Young_Final 5035 2 178 1 25 0 0 377 5035_2 +PRL_Young_Final 5035 2 179 1 -25 0 0 368 5035_2 +PRL_Young_Final 5035 2 180 1 -25 0 0 579 5035_2 +PRL_Young_Final 5035 2 181 2 -25 1 1 580 5035_2 +PRL_Young_Final 5035 2 182 2 25 0 1 371 5035_2 +PRL_Young_Final 5035 2 183 2 25 0 1 840 5035_2 +PRL_Young_Final 5035 2 184 2 25 0 1 642 5035_2 +PRL_Young_Final 5035 2 185 2 25 0 1 14 5035_2 +PRL_Young_Final 5035 2 186 2 25 0 1 262 5035_2 +PRL_Young_Final 5035 2 187 2 25 0 1 532 5035_2 +PRL_Young_Final 5035 2 188 2 25 0 1 379 5035_2 +PRL_Young_Final 5035 2 189 2 -25 0 0 327 5035_2 +PRL_Young_Final 5035 2 190 2 -25 0 0 616 5035_2 +PRL_Young_Final 5035 2 191 1 -25 1 1 319 5035_2 +PRL_Young_Final 5035 2 192 1 25 0 1 292 5035_2 +PRL_Young_Final 5035 2 193 1 25 0 1 620 5035_2 +PRL_Young_Final 5035 2 194 1 -25 0 1 318 5035_2 +PRL_Young_Final 5035 2 195 1 25 0 1 349 5035_2 +PRL_Young_Final 5035 2 196 1 -25 0 0 320 5035_2 +PRL_Young_Final 5035 2 197 1 -25 0 0 289 5035_2 +PRL_Young_Final 5035 2 198 2 25 1 1 641 5035_2 +PRL_Young_Final 5035 2 199 2 25 0 1 600 5035_2 +PRL_Young_Final 5035 2 200 2 25 0 1 597 5035_2 +PRL_Young_Final 5035 3 1 1 25 1 0 553 5035_3 +PRL_Young_Final 5035 3 2 1 -25 0 0 296 5035_3 +PRL_Young_Final 5035 3 3 1 -25 0 0 572 5035_3 +PRL_Young_Final 5035 3 4 2 25 1 1 278 5035_3 +PRL_Young_Final 5035 3 5 2 -25 0 1 527 5035_3 +PRL_Young_Final 5035 3 6 2 -25 0 1 313 5035_3 +PRL_Young_Final 5035 3 7 1 -25 1 0 293 5035_3 +PRL_Young_Final 5035 3 8 1 -25 0 0 267 5035_3 +PRL_Young_Final 5035 3 9 2 25 1 1 345 5035_3 +PRL_Young_Final 5035 3 10 2 25 0 1 314 5035_3 +PRL_Young_Final 5035 3 11 2 25 0 1 611 5035_3 +PRL_Young_Final 5035 3 12 2 25 0 1 280 5035_3 +PRL_Young_Final 5035 3 13 2 25 0 1 250 5035_3 +PRL_Young_Final 5035 3 14 2 25 0 1 266 5035_3 +PRL_Young_Final 5035 3 15 2 -25 0 1 267 5035_3 +PRL_Young_Final 5035 3 16 2 25 0 1 333 5035_3 +PRL_Young_Final 5035 3 17 2 -25 0 0 297 5035_3 +PRL_Young_Final 5035 3 18 2 -25 0 0 701 5035_3 +PRL_Young_Final 5035 3 19 1 25 1 1 311 5035_3 +PRL_Young_Final 5035 3 20 1 25 0 1 285 5035_3 +PRL_Young_Final 5035 3 21 1 25 0 1 470 5035_3 +PRL_Young_Final 5035 3 22 1 25 0 1 1365 5035_3 +PRL_Young_Final 5035 3 23 1 25 0 1 261 5035_3 +PRL_Young_Final 5035 3 24 1 25 0 1 266 5035_3 +PRL_Young_Final 5035 3 25 1 -25 0 1 298 5035_3 +PRL_Young_Final 5035 3 26 1 -25 0 0 412 5035_3 +PRL_Young_Final 5035 3 27 2 25 1 1 283 5035_3 +PRL_Young_Final 5035 3 28 2 25 0 1 499 5035_3 +PRL_Young_Final 5035 3 29 2 -25 0 1 51 5035_3 +PRL_Young_Final 5035 3 30 2 25 0 1 425 5035_3 +PRL_Young_Final 5035 3 31 2 25 0 1 597 5035_3 +PRL_Young_Final 5035 3 32 2 25 0 1 354 5035_3 +PRL_Young_Final 5035 3 33 2 25 0 1 318 5035_3 +PRL_Young_Final 5035 3 34 2 25 0 1 270 5035_3 +PRL_Young_Final 5035 3 35 2 25 0 0 322 5035_3 +PRL_Young_Final 5035 3 36 2 -25 0 0 319 5035_3 +PRL_Young_Final 5035 3 37 2 25 0 0 332 5035_3 +PRL_Young_Final 5035 3 38 2 -25 0 0 340 5035_3 +PRL_Young_Final 5035 3 39 2 -25 0 0 306 5035_3 +PRL_Young_Final 5035 3 40 1 -25 1 1 357 5035_3 +PRL_Young_Final 5035 3 41 1 -25 0 1 311 5035_3 +PRL_Young_Final 5035 3 42 2 -25 1 0 289 5035_3 +PRL_Young_Final 5035 3 43 2 -25 0 0 348 5035_3 +PRL_Young_Final 5035 3 44 1 25 1 1 397 5035_3 +PRL_Young_Final 5035 3 45 1 25 0 1 664 5035_3 +PRL_Young_Final 5035 3 46 1 25 0 1 965 5035_3 +PRL_Young_Final 5035 3 47 1 25 0 1 301 5035_3 +PRL_Young_Final 5035 3 48 1 25 0 1 277 5035_3 +PRL_Young_Final 5035 3 49 1 25 0 1 430 5035_3 +PRL_Young_Final 5035 3 50 1 -25 0 1 399 5035_3 +PRL_Young_Final 5035 3 51 1 25 0 1 398 5035_3 +PRL_Young_Final 5035 3 52 1 -25 0 0 718 5035_3 +PRL_Young_Final 5035 3 53 1 -25 0 0 388 5035_3 +PRL_Young_Final 5035 3 54 1 25 0 0 395 5035_3 +PRL_Young_Final 5035 3 55 1 25 0 0 506 5035_3 +PRL_Young_Final 5035 3 56 1 -25 0 0 343 5035_3 +PRL_Young_Final 5035 3 57 1 -25 0 0 923 5035_3 +PRL_Young_Final 5035 3 58 2 25 1 1 522 5035_3 +PRL_Young_Final 5035 3 59 2 25 0 1 294 5035_3 +PRL_Young_Final 5035 3 60 2 25 0 1 322 5035_3 +PRL_Young_Final 5035 3 61 2 25 0 1 607 5035_3 +PRL_Young_Final 5035 3 62 2 25 0 1 284 5035_3 +PRL_Young_Final 5035 3 63 2 25 0 1 461 5035_3 +PRL_Young_Final 5035 3 64 2 -25 0 1 318 5035_3 +PRL_Young_Final 5035 3 65 2 25 0 1 531 5035_3 +PRL_Young_Final 5035 3 66 2 -25 0 0 274 5035_3 +PRL_Young_Final 5035 3 67 2 -25 0 0 341 5035_3 +PRL_Young_Final 5035 3 68 1 25 1 1 390 5035_3 +PRL_Young_Final 5035 3 69 1 -25 0 1 356 5035_3 +PRL_Young_Final 5035 3 70 1 25 0 1 521 5035_3 +PRL_Young_Final 5035 3 71 1 25 0 1 302 5035_3 +PRL_Young_Final 5035 3 72 1 25 0 1 308 5035_3 +PRL_Young_Final 5035 3 73 1 25 0 1 338 5035_3 +PRL_Young_Final 5035 3 74 1 25 0 1 268 5035_3 +PRL_Young_Final 5035 3 75 1 -25 0 1 128 5035_3 +PRL_Young_Final 5035 3 76 1 -25 0 0 445 5035_3 +PRL_Young_Final 5035 3 77 2 -25 1 1 277 5035_3 +PRL_Young_Final 5035 3 78 2 25 0 1 584 5035_3 +PRL_Young_Final 5035 3 79 2 25 0 1 487 5035_3 +PRL_Young_Final 5035 3 80 2 25 0 1 368 5035_3 +PRL_Young_Final 5035 3 81 2 25 0 1 584 5035_3 +PRL_Young_Final 5035 3 82 2 25 0 1 300 5035_3 +PRL_Young_Final 5035 3 83 2 25 0 1 553 5035_3 +PRL_Young_Final 5035 3 84 2 -25 0 1 296 5035_3 +PRL_Young_Final 5035 3 85 2 -25 0 0 320 5035_3 +PRL_Young_Final 5035 3 86 2 25 0 0 299 5035_3 +PRL_Young_Final 5035 3 87 2 -25 0 0 661 5035_3 +PRL_Young_Final 5035 3 88 2 -25 0 0 304 5035_3 +PRL_Young_Final 5035 3 89 1 25 1 1 312 5035_3 +PRL_Young_Final 5035 3 90 1 25 0 1 631 5035_3 +PRL_Young_Final 5035 3 91 1 25 0 1 658 5035_3 +PRL_Young_Final 5035 3 92 1 25 0 1 248 5035_3 +PRL_Young_Final 5035 3 93 1 25 0 1 301 5035_3 +PRL_Young_Final 5035 3 94 1 25 0 1 551 5035_3 +PRL_Young_Final 5035 3 95 1 -25 0 0 597 5035_3 +PRL_Young_Final 5035 3 96 1 -25 0 0 605 5035_3 +PRL_Young_Final 5035 3 97 2 25 1 1 294 5035_3 +PRL_Young_Final 5035 3 98 2 -25 0 1 461 5035_3 +PRL_Young_Final 5035 3 99 2 25 0 1 313 5035_3 +PRL_Young_Final 5035 3 100 2 25 0 1 370 5035_3 +PRL_Young_Final 5035 3 101 2 -25 0 1 144 5035_3 +PRL_Young_Final 5035 3 102 2 -25 0 0 343 5035_3 +PRL_Young_Final 5035 3 103 1 25 1 1 334 5035_3 +PRL_Young_Final 5035 3 104 1 25 0 1 333 5035_3 +PRL_Young_Final 5035 3 105 1 25 0 1 645 5035_3 +PRL_Young_Final 5035 3 106 1 25 0 1 308 5035_3 +PRL_Young_Final 5035 3 107 1 25 0 1 334 5035_3 +PRL_Young_Final 5035 3 108 1 -25 0 0 305 5035_3 +PRL_Young_Final 5035 3 109 1 -25 0 0 313 5035_3 +PRL_Young_Final 5035 3 110 2 -25 1 1 614 5035_3 +PRL_Young_Final 5035 3 111 2 -25 0 1 585 5035_3 +PRL_Young_Final 5035 3 112 2 25 0 1 273 5035_3 +PRL_Young_Final 5035 3 113 2 25 0 1 626 5035_3 +PRL_Young_Final 5035 3 114 2 25 0 1 790 5035_3 +PRL_Young_Final 5035 3 115 2 25 0 1 402 5035_3 +PRL_Young_Final 5035 3 116 2 25 0 0 591 5035_3 +PRL_Young_Final 5035 3 117 2 -25 0 0 289 5035_3 +PRL_Young_Final 5035 3 118 2 25 0 0 404 5035_3 +PRL_Young_Final 5035 3 119 2 -25 0 0 343 5035_3 +PRL_Young_Final 5035 3 120 2 -25 0 0 635 5035_3 +PRL_Young_Final 5035 3 121 1 25 1 1 298 5035_3 +PRL_Young_Final 5035 3 122 1 25 0 1 804 5035_3 +PRL_Young_Final 5035 3 123 1 -25 0 1 304 5035_3 +PRL_Young_Final 5035 3 124 1 25 0 1 336 5035_3 +PRL_Young_Final 5035 3 125 1 25 0 1 683 5035_3 +PRL_Young_Final 5035 3 126 1 -25 0 0 290 5035_3 +PRL_Young_Final 5035 3 127 1 -25 0 0 403 5035_3 +PRL_Young_Final 5035 3 128 2 25 1 1 291 5035_3 +PRL_Young_Final 5035 3 129 2 25 0 1 311 5035_3 +PRL_Young_Final 5035 3 130 2 25 0 1 327 5035_3 +PRL_Young_Final 5035 3 131 2 25 0 1 303 5035_3 +PRL_Young_Final 5035 3 132 2 25 0 1 267 5035_3 +PRL_Young_Final 5035 3 133 2 -25 0 1 360 5035_3 +PRL_Young_Final 5035 3 134 2 25 0 1 351 5035_3 +PRL_Young_Final 5035 3 135 2 -25 0 0 358 5035_3 +PRL_Young_Final 5035 3 136 2 -25 0 0 354 5035_3 +PRL_Young_Final 5035 3 137 1 25 1 1 615 5035_3 +PRL_Young_Final 5035 3 138 1 -25 0 1 329 5035_3 +PRL_Young_Final 5035 3 139 1 25 0 1 314 5035_3 +PRL_Young_Final 5035 3 140 1 25 0 1 351 5035_3 +PRL_Young_Final 5035 3 141 1 25 0 1 356 5035_3 +PRL_Young_Final 5035 3 142 1 25 0 0 304 5035_3 +PRL_Young_Final 5035 3 143 1 25 0 0 278 5035_3 +PRL_Young_Final 5035 3 144 1 -25 0 0 346 5035_3 +PRL_Young_Final 5035 3 145 1 -25 0 0 378 5035_3 +PRL_Young_Final 5035 3 146 2 25 1 1 253 5035_3 +PRL_Young_Final 5035 3 147 2 25 0 1 336 5035_3 +PRL_Young_Final 5035 3 148 2 -25 0 1 796 5035_3 +PRL_Young_Final 5035 3 149 2 -25 0 1 621 5035_3 +PRL_Young_Final 5035 3 150 1 -25 1 0 329 5035_3 +PRL_Young_Final 5035 3 151 2 25 1 1 249 5035_3 +PRL_Young_Final 5035 3 152 2 25 0 1 302 5035_3 +PRL_Young_Final 5035 3 153 2 25 0 1 390 5035_3 +PRL_Young_Final 5035 3 154 2 25 0 1 341 5035_3 +PRL_Young_Final 5035 3 155 2 25 0 1 260 5035_3 +PRL_Young_Final 5035 3 156 2 25 0 1 278 5035_3 +PRL_Young_Final 5035 3 157 2 -25 0 1 432 5035_3 +PRL_Young_Final 5035 3 158 2 -25 0 0 276 5035_3 +PRL_Young_Final 5035 3 159 1 25 1 1 558 5035_3 +PRL_Young_Final 5035 3 160 1 25 0 1 313 5035_3 +PRL_Young_Final 5035 3 161 1 25 0 1 360 5035_3 +PRL_Young_Final 5035 3 162 1 25 0 1 557 5035_3 +PRL_Young_Final 5035 3 163 1 25 0 1 612 5035_3 +PRL_Young_Final 5035 3 164 1 25 0 1 388 5035_3 +PRL_Young_Final 5035 3 165 1 25 0 1 613 5035_3 +PRL_Young_Final 5035 3 166 1 -25 0 1 260 5035_3 +PRL_Young_Final 5035 3 167 1 -25 0 0 856 5035_3 +PRL_Young_Final 5035 3 168 2 25 1 1 586 5035_3 +PRL_Young_Final 5035 3 169 2 25 0 1 705 5035_3 +PRL_Young_Final 5035 3 170 2 -25 0 1 446 5035_3 +PRL_Young_Final 5035 3 171 2 25 0 1 266 5035_3 +PRL_Young_Final 5035 3 172 2 25 0 1 365 5035_3 +PRL_Young_Final 5035 3 173 2 25 0 1 285 5035_3 +PRL_Young_Final 5035 3 174 2 -25 0 0 268 5035_3 +PRL_Young_Final 5035 3 175 2 25 0 0 255 5035_3 +PRL_Young_Final 5035 3 176 2 -25 0 0 533 5035_3 +PRL_Young_Final 5035 3 177 1 25 1 1 320 5035_3 +PRL_Young_Final 5035 3 178 1 25 0 1 285 5035_3 +PRL_Young_Final 5035 3 179 1 -25 0 1 271 5035_3 +PRL_Young_Final 5035 3 180 1 -25 0 1 553 5035_3 +PRL_Young_Final 5035 3 181 2 -25 1 0 275 5035_3 +PRL_Young_Final 5035 3 182 1 25 1 1 293 5035_3 +PRL_Young_Final 5035 3 183 1 25 0 1 554 5035_3 +PRL_Young_Final 5035 3 184 1 25 0 1 300 5035_3 +PRL_Young_Final 5035 3 185 1 25 0 1 274 5035_3 +PRL_Young_Final 5035 3 186 1 25 0 1 289 5035_3 +PRL_Young_Final 5035 3 187 1 25 0 1 320 5035_3 +PRL_Young_Final 5035 3 188 1 -25 0 1 303 5035_3 +PRL_Young_Final 5035 3 189 1 -25 0 0 390 5035_3 +PRL_Young_Final 5035 3 190 2 25 1 1 272 5035_3 +PRL_Young_Final 5035 3 191 2 25 0 1 673 5035_3 +PRL_Young_Final 5035 3 192 2 25 0 1 263 5035_3 +PRL_Young_Final 5035 3 193 2 25 0 1 274 5035_3 +PRL_Young_Final 5035 3 194 2 25 0 1 578 5035_3 +PRL_Young_Final 5035 3 195 2 25 0 1 483 5035_3 +PRL_Young_Final 5035 3 196 2 -25 0 0 324 5035_3 +PRL_Young_Final 5035 3 197 2 -25 0 0 324 5035_3 +PRL_Young_Final 5035 3 198 1 25 1 1 299 5035_3 +PRL_Young_Final 5035 3 199 1 -25 0 1 406 5035_3 +PRL_Young_Final 5035 3 200 1 25 0 1 272 5035_3 diff --git a/Python/hbayesdm/common/extdata/pst_exampleData.txt b/Python/hbayesdm/common/extdata/pst_exampleData.txt new file mode 100644 index 00000000..76f91700 --- /dev/null +++ b/Python/hbayesdm/common/extdata/pst_exampleData.txt @@ -0,0 +1,1021 @@ +subjID type choice reward +1 12 0 0 +1 56 1 0 +1 34 0 0 +1 34 1 1 +1 12 1 1 +1 56 1 0 +1 56 0 1 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 34 0 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 0 +1 56 0 1 +1 34 1 1 +1 12 1 0 +1 56 0 0 +1 12 0 0 +1 34 1 0 +1 56 0 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 1 +1 12 0 0 +1 34 1 1 +1 56 0 1 +1 12 0 0 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 1 +1 56 0 1 +1 34 1 0 +1 12 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 56 0 1 +1 12 0 1 +1 34 0 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 12 1 0 +1 34 1 1 +1 56 0 1 +1 34 1 0 +1 12 1 0 +1 56 0 1 +1 12 1 0 +1 56 0 0 +1 34 1 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 0 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 0 +1 56 0 1 +1 12 0 0 +1 34 1 0 +1 34 0 0 +1 12 0 0 +1 56 0 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 0 0 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 0 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 34 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 56 0 1 +1 12 1 0 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 0 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 12 1 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 1 +1 34 1 0 +1 56 0 0 +1 34 1 0 +1 12 1 0 +1 56 1 1 +1 12 0 0 +1 34 1 1 +1 56 0 1 +1 34 1 0 +1 12 0 1 +1 12 1 0 +1 56 0 1 +1 34 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 56 1 1 +1 34 1 1 +1 12 1 1 +1 12 1 0 +1 56 0 0 +1 34 0 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 0 0 +1 56 0 1 +1 34 1 0 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 12 1 0 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 56 0 0 +1 12 1 0 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 0 0 +1 56 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 0 1 +1 12 1 1 +1 56 1 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 56 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 34 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 56 1 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 12 1 0 +1 34 0 0 +1 56 0 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 0 +1 56 0 0 +1 34 0 0 +1 34 1 1 +1 12 1 1 +1 56 0 0 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 34 1 0 +1 56 0 1 +1 12 1 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 1 +1 56 0 1 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 34 1 1 +1 12 1 0 +1 56 0 0 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 1 +1 56 0 1 +1 12 1 1 +1 56 0 1 +1 34 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 12 1 0 +1 34 1 0 +1 56 0 0 +1 12 1 0 +1 56 0 1 +1 34 1 1 +1 12 1 1 +1 34 1 1 +1 56 0 0 +1 34 1 0 +1 12 1 1 +1 56 0 0 +1 12 1 1 +1 34 1 0 +1 56 0 0 +1 12 1 1 +1 34 1 1 +1 56 0 0 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 34 1 1 +2 12 0 0 +2 56 0 0 +2 56 1 0 +2 34 1 1 +2 12 1 1 +2 56 1 0 +2 12 1 0 +2 34 1 1 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 34 1 1 +2 12 1 0 +2 56 1 0 +2 56 0 0 +2 34 1 1 +2 12 0 1 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 56 0 0 +2 12 1 0 +2 34 1 1 +2 56 1 0 +2 34 1 1 +2 12 0 0 +2 34 1 1 +2 56 1 1 +2 12 0 1 +2 56 0 1 +2 12 1 1 +2 34 1 0 +2 34 1 1 +2 56 0 1 +2 12 1 0 +2 34 1 0 +2 12 1 0 +2 56 1 1 +2 12 1 1 +2 34 1 0 +2 56 1 0 +2 12 1 1 +2 34 0 0 +2 56 1 1 +2 34 1 1 +2 56 0 0 +2 12 1 1 +2 34 1 1 +2 56 1 1 +2 12 1 1 +2 56 1 1 +2 34 1 1 +2 12 1 1 +2 56 1 0 +2 12 1 1 +2 34 1 0 +3 34 1 0 +3 56 1 1 +3 12 0 0 +3 56 1 1 +3 12 0 0 +3 34 1 1 +3 56 0 1 +3 34 1 1 +3 12 0 1 +3 12 0 0 +3 34 1 1 +3 56 0 0 +3 12 0 0 +3 34 1 1 +3 56 0 1 +3 56 0 0 +3 34 1 1 +3 12 0 0 +3 34 1 1 +3 56 0 0 +3 12 0 1 +3 34 1 1 +3 56 1 0 +3 12 0 1 +3 12 1 1 +3 56 0 0 +3 34 1 0 +3 56 0 0 +3 12 0 0 +3 34 1 0 +3 56 1 0 +3 34 0 0 +3 12 0 1 +3 12 0 0 +3 56 0 0 +3 34 1 1 +3 34 1 1 +3 12 1 1 +3 56 0 0 +3 34 1 1 +3 12 0 0 +3 56 1 0 +3 12 0 0 +3 56 0 0 +3 34 1 1 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 0 1 +3 34 1 0 +3 56 0 0 +3 34 1 0 +3 56 0 0 +3 12 1 0 +3 56 1 1 +3 34 1 0 +3 12 0 1 +3 56 0 0 +3 34 1 1 +3 12 0 0 +3 12 1 1 +3 34 1 0 +3 56 1 1 +3 56 1 1 +3 34 0 0 +3 12 1 1 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 1 1 +3 56 1 0 +3 34 1 0 +3 34 1 0 +3 12 1 0 +3 56 1 1 +3 12 1 1 +3 56 1 1 +3 34 1 1 +3 56 1 0 +3 12 1 1 +3 34 1 1 +3 12 1 1 +3 34 1 1 +3 56 1 1 +3 56 1 1 +3 12 0 0 +3 34 1 1 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 0 0 +3 56 1 1 +3 34 1 1 +3 34 1 0 +3 12 1 1 +3 56 1 1 +3 34 1 0 +3 12 1 1 +3 56 1 1 +3 34 1 1 +3 56 1 1 +3 12 1 1 +3 34 1 1 +3 12 1 1 +3 56 1 0 +3 34 1 1 +3 56 1 1 +3 12 0 0 +3 56 1 1 +3 34 1 1 +3 12 1 0 +3 12 0 0 +3 56 1 1 +3 34 1 1 +3 56 1 1 +3 12 0 0 +3 34 1 0 +3 12 1 1 +3 56 0 0 +3 34 1 1 +4 12 0 0 +4 34 0 0 +4 56 1 1 +4 34 1 1 +4 56 0 1 +4 12 0 0 +4 56 1 0 +4 34 0 1 +4 12 1 1 +4 34 1 0 +4 12 0 1 +4 56 0 1 +4 56 1 0 +4 12 1 1 +4 34 1 1 +4 12 0 0 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 34 0 1 +4 56 1 1 +4 12 0 1 +4 34 0 0 +4 56 0 0 +4 12 0 0 +4 34 1 1 +4 56 1 0 +4 12 0 0 +4 56 0 1 +4 34 0 0 +4 56 1 1 +4 34 0 0 +4 12 1 0 +4 12 1 1 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 56 1 0 +4 34 0 0 +4 12 0 0 +4 56 1 1 +4 34 1 1 +4 56 0 1 +4 12 0 0 +4 34 1 1 +4 12 1 1 +4 34 1 0 +4 56 0 0 +4 56 0 0 +4 34 0 0 +4 12 1 1 +4 12 1 1 +4 56 0 1 +4 34 1 0 +4 12 0 0 +4 34 1 1 +4 56 0 1 +4 34 1 0 +4 56 0 0 +4 12 1 1 +4 56 0 0 +4 12 0 0 +4 34 1 1 +4 34 1 0 +4 12 1 1 +4 56 1 0 +4 12 0 0 +4 34 1 1 +4 56 0 1 +4 34 1 1 +4 56 0 0 +4 12 1 1 +4 56 0 0 +4 12 1 1 +4 34 1 0 +4 12 1 1 +4 34 1 0 +4 56 0 1 +4 34 0 0 +4 12 0 1 +4 56 0 1 +4 34 0 0 +4 56 0 0 +4 12 0 0 +4 34 0 0 +4 12 0 0 +4 56 0 0 +4 56 0 0 +4 34 1 1 +4 12 1 1 +4 12 1 0 +4 34 1 1 +4 56 0 1 +4 12 1 1 +4 34 1 0 +4 56 0 1 +4 34 1 0 +4 12 1 0 +4 56 0 1 +4 12 0 1 +4 56 0 1 +4 34 0 0 +4 34 1 1 +4 12 0 0 +4 56 0 1 +4 12 0 0 +4 34 1 0 +4 56 0 0 +4 34 1 1 +4 12 1 0 +4 56 0 1 +4 12 0 0 +4 56 0 1 +4 34 1 0 +4 12 0 0 +4 56 0 0 +4 34 0 0 +4 12 0 0 +4 56 1 1 +4 34 0 1 +4 56 0 1 +4 12 0 0 +4 34 0 1 +4 34 0 0 +4 12 1 1 +4 56 0 0 +4 56 1 0 +4 12 1 0 +4 34 0 1 +4 56 1 1 +4 12 1 1 +4 34 0 1 +4 12 1 1 +4 56 1 1 +4 34 0 1 +4 34 0 0 +4 12 0 0 +4 56 1 1 +4 12 0 0 +4 56 1 1 +4 34 0 0 +4 56 1 0 +4 12 0 0 +4 34 1 1 +4 12 0 0 +4 34 1 1 +4 56 0 1 +4 56 0 0 +4 34 1 1 +4 12 0 0 +4 56 0 1 +4 12 1 1 +4 34 1 1 +4 34 1 1 +4 56 0 0 +4 12 0 1 +4 34 0 0 +4 56 1 0 +4 12 1 0 +4 12 0 0 +4 56 1 1 +4 34 0 1 +4 56 1 1 +4 12 1 0 +4 34 0 0 +4 56 1 1 +4 12 0 0 +4 34 1 1 +4 34 1 0 +4 56 0 1 +4 12 1 0 +4 34 1 0 +4 56 0 0 +4 12 0 0 +4 34 0 1 +4 56 0 1 +4 12 1 1 +4 12 0 1 +4 56 0 1 +4 34 0 1 +4 12 1 1 +4 34 0 1 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 1 0 +4 34 0 0 +4 56 1 1 +4 12 1 1 +4 34 1 0 +4 56 1 0 +4 12 1 1 +4 12 1 1 +4 56 1 1 +4 34 0 0 +4 56 1 1 +4 12 1 1 +4 34 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 34 1 0 +4 12 0 0 +4 56 1 1 +4 34 1 0 +4 56 1 0 +4 12 1 1 +4 12 1 1 +4 34 1 0 +4 56 1 1 +4 34 0 1 +4 56 1 1 +4 12 1 1 +4 12 1 0 +4 34 0 1 +4 56 1 1 +4 56 1 0 +4 12 0 0 +4 34 0 0 +4 56 1 1 +4 34 1 1 +4 12 1 1 +4 12 1 0 +4 34 1 0 +4 56 1 0 +4 34 0 0 +4 12 1 1 +4 56 1 0 +4 56 0 0 +4 12 1 1 +4 34 1 1 +4 34 1 0 +4 12 0 1 +4 56 0 0 +4 34 0 1 +4 56 0 0 +4 12 1 1 +4 12 0 0 +4 34 0 1 +4 56 1 1 +4 56 0 1 +4 34 0 1 +4 12 1 1 +4 56 0 1 +4 12 1 1 +4 34 0 0 +4 12 1 0 +4 56 0 0 +4 34 1 0 +4 56 0 0 +4 34 1 0 +4 12 0 0 +4 56 0 1 +4 12 1 1 +4 34 0 0 +4 56 1 0 +4 34 1 0 +4 12 0 0 +4 34 0 1 +4 12 1 1 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 0 1 +4 34 0 1 +4 12 1 1 +4 56 0 0 +4 12 1 0 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 0 1 +4 34 0 0 +4 56 0 0 +4 12 1 1 +4 56 0 1 +4 34 0 0 +4 12 1 1 +4 34 0 1 +4 12 1 1 +4 56 0 0 +4 12 1 1 +4 34 0 0 +4 56 0 0 +4 12 1 0 +4 34 0 0 +4 56 0 1 +4 56 0 0 +4 12 1 0 +4 34 1 1 +4 34 1 1 +4 12 0 0 +4 56 1 1 +4 56 1 0 +4 12 1 0 +4 34 0 0 +4 12 0 0 +4 34 0 0 +4 56 0 0 +4 56 0 0 +4 34 0 0 +4 12 1 0 +4 56 0 0 +4 12 0 0 +4 34 0 0 +4 56 0 0 +4 12 0 0 +4 34 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 56 0 0 +4 34 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 12 1 0 +4 34 1 0 +4 12 1 1 +4 56 1 1 +4 56 1 1 +4 34 1 0 +4 12 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 56 1 1 +4 12 1 1 +4 34 1 1 +4 56 1 0 +4 12 1 1 +4 34 1 1 +4 12 1 1 +4 34 1 0 +4 56 1 1 +4 56 1 1 +4 34 1 1 +4 12 1 0 +4 12 1 1 +4 56 1 0 +4 34 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 12 1 1 +4 12 1 1 +4 34 1 1 +4 56 1 1 +4 12 1 1 +4 56 1 1 +4 34 1 1 +4 56 1 0 +4 12 1 1 +4 34 1 1 +4 34 1 1 +4 12 1 1 +4 56 1 0 +5 12 1 1 +5 34 0 0 +5 56 0 1 +5 34 0 1 +5 56 0 1 +5 12 0 0 +5 34 0 0 +5 12 1 1 +5 56 0 0 +5 12 1 1 +5 56 0 0 +5 34 0 0 +5 34 0 0 +5 12 1 0 +5 56 0 0 +5 56 0 1 +5 34 0 1 +5 12 1 1 +5 34 0 0 +5 12 1 1 +5 56 1 1 +5 34 0 1 +5 12 1 1 +5 56 0 0 +5 34 0 1 +5 56 0 0 +5 12 1 1 +5 34 0 1 +5 12 0 0 +5 56 0 0 +5 34 0 0 +5 56 0 1 +5 12 1 1 +5 12 1 1 +5 56 0 0 +5 34 0 0 +5 56 1 0 +5 12 1 0 +5 34 0 1 +5 34 0 0 +5 12 1 1 +5 56 1 1 +5 56 0 1 +5 34 0 0 +5 12 1 1 +5 34 0 1 +5 12 1 1 +5 56 0 0 +5 12 1 0 +5 56 0 0 +5 34 0 0 +5 34 0 1 +5 56 0 0 +5 12 1 1 +5 12 1 1 +5 56 1 1 +5 34 1 0 +5 56 0 1 +5 12 1 1 +5 34 0 1 +5 34 0 1 +5 56 0 0 +5 12 1 0 +5 12 1 1 +5 56 0 1 +5 34 0 0 +5 12 1 1 +5 34 1 0 +5 56 0 0 +5 34 0 1 +5 56 0 0 +5 12 1 1 +5 56 1 0 +5 34 1 1 +5 12 1 1 +5 56 1 1 +5 12 1 0 +5 34 1 1 +5 34 0 1 +5 56 0 0 +5 12 1 0 +5 34 0 0 +5 56 0 0 +5 12 1 1 +5 12 1 1 +5 34 0 0 +5 56 0 0 +5 56 0 0 +5 12 1 1 +5 34 1 0 +5 12 1 1 +5 56 1 0 +5 34 0 0 +5 34 0 0 +5 56 0 0 +5 12 1 1 +5 12 1 0 +5 56 0 0 +5 34 1 0 +5 56 1 1 +5 34 1 1 +5 12 1 1 +5 56 1 1 +5 34 1 1 +5 12 1 1 +5 56 0 1 +5 12 1 1 +5 34 1 0 +5 56 1 0 +5 12 1 1 +5 34 1 1 +5 56 1 1 +5 34 1 1 +5 12 1 1 +5 56 1 0 +5 34 1 1 +5 12 1 1 +5 34 1 0 +5 56 1 1 +5 12 1 0 diff --git a/Python/hbayesdm/common/extdata/ra_data_attend.txt b/Python/hbayesdm/common/extdata/ra_data_attend.txt new file mode 100644 index 00000000..131612dd --- /dev/null +++ b/Python/hbayesdm/common/extdata/ra_data_attend.txt @@ -0,0 +1,4192 @@ +gain loss cert gamble outcome cond subjID +2 -1 0 1 2 0 1 +9 -13.5 0 0 0 0 1 +5 -6.88 0 0 0 0 1 +10 -10 0 0 0 0 1 +6 -2.25 0 1 6 0 1 +6 -6.75 0 0 0 0 1 +9 -4.5 0 1 -4.5 0 1 +10 -13.75 0 0 0 0 1 +6 -8.25 0 0 0 0 1 +5 -10 0 0 0 0 1 +10 -6.25 0 1 10 0 1 +12 -3 0 1 -3 0 1 +12 -9 0 0 0 0 1 +8 -7 0 0 0 0 1 +6 -12 0 0 0 0 1 +8 -2 0 1 -2 0 1 +12 -6 0 1 -6 0 1 +3 0 1 0 1 0 1 +10 -20 0 1 10 0 1 +5 -3.75 0 0 0 0 1 +2 -1.75 0 0 0 0 1 +6 -3.75 0 1 -3.75 0 1 +9 -12.38 0 0 0 0 1 +5 -6.25 0 0 0 0 1 +12 0 4 1 12 0 1 +2 -1.5 0 0 0 0 1 +6 -5.25 0 0 0 0 1 +10 -18.75 0 0 0 0 1 +6 -6 0 0 0 0 1 +12 0 5 1 12 0 1 +4 -2 0 1 4 0 1 +2 -4 0 0 0 0 1 +5 -2.5 0 1 5 0 1 +2 -3.75 0 0 0 0 1 +9 -15.75 0 0 0 0 1 +8 -4 0 1 8 0 1 +26 0 12 1 26 0 1 +6 -1.5 0 1 6 0 1 +4 -6 0 0 0 0 1 +10 -2.5 0 1 -2.5 0 1 +8 -12 0 0 0 0 1 +2 -3.5 0 0 0 0 1 +5 -5.63 0 0 0 0 1 +12 -24 0 0 0 0 1 +25 0 10 1 0 0 1 +4 -6.5 0 0 0 0 1 +5 -9.38 0 0 0 0 1 +5 -7.5 0 0 0 0 1 +4 -4 0 1 -4 0 1 +6 -10.5 0 0 0 0 1 +13 0 6 1 13 0 1 +12 -22.5 0 0 0 0 1 +4 -7.5 0 0 0 0 1 +5 0 2 1 5 0 1 +10 -15 0 0 0 0 1 +9 -16.88 0 0 0 0 1 +2 -2.5 0 0 0 0 1 +10 -16.25 0 0 0 0 1 +6 -11.25 0 0 0 0 1 +4 -1.5 0 1 4 0 1 +6 -9 0 0 0 0 1 +12 -19.5 0 0 0 0 1 +10 -12.5 0 0 0 0 1 +2 -3 0 0 0 0 1 +8 -16 0 0 0 0 1 +4 0 2 1 4 0 1 +12 -7.5 0 1 -7.5 0 1 +12 -13.5 0 0 0 0 1 +22 0 10 1 22 0 1 +12 -21 0 0 0 0 1 +7 0 3 1 7 0 1 +10 -8.75 0 0 0 0 1 +2 -1.25 0 0 0 0 1 +9 -6.75 0 0 0 0 1 +12 0 6 1 12 0 1 +28 0 13 1 28 0 1 +9 -10.13 0 0 0 0 1 +2 -0.5 0 1 2 0 1 +25 0 9 1 25 0 1 +6 -7.5 0 0 0 0 1 +4 -3 0 0 0 0 1 +10 -3.75 0 1 10 0 1 +12 -4.5 0 1 -4.5 0 1 +12 -15 0 0 0 0 1 +6 -3 0 0 0 0 1 +9 -14.63 0 0 0 0 1 +5 -1.25 0 1 -1.25 0 1 +8 -11 0 0 0 0 1 +10 -17.5 0 0 0 0 1 +8 -10 0 0 0 0 1 +9 -9 0 0 0 0 1 +10 -11.25 0 0 0 0 1 +12 -12 0 0 0 0 1 +8 -14 0 0 0 0 1 +12 -16.5 0 1 -16.5 0 1 +4 -7 0 1 -7 0 1 +4 -1 0 1 -1 0 1 +5 -1.88 0 1 5 0 1 +8 0 3 1 0 0 1 +2 -3.25 0 0 0 0 1 +5 -5 0 0 0 0 1 +26 0 10 1 0 0 1 +12 -10.5 0 0 0 0 1 +2 0 1 1 0 0 1 +6 -9.75 0 0 0 0 1 +8 -3 0 1 8 0 1 +13 0 5 1 13 0 1 +10 -7.5 0 0 0 0 1 +8 -13 0 0 0 0 1 +9 -3.38 0 1 -3.38 0 1 +30 0 12 1 0 0 1 +8 -8 0 0 0 0 1 +8 -5 0 0 0 0 1 +12 -18 0 0 0 0 1 +10 -5 0 1 -5 0 1 +9 -11.25 0 0 0 0 1 +9 -7.88 0 0 0 0 1 +8 -6 0 1 -6 0 1 +6 -4.5 0 0 0 0 1 +8 -9 0 0 0 0 1 +4 -5.5 0 0 0 0 1 +4 -5 0 0 0 0 1 +9 -2.25 0 1 -2.25 0 1 +23 0 10 1 0 0 1 +9 -5.63 0 1 -5.63 0 1 +4 -8 0 0 0 0 1 +19 0 8 1 19 0 1 +2 -2 0 0 0 0 1 +5 -8.13 0 0 0 0 1 +5 -4.38 0 0 0 0 1 +2 -2.25 0 0 0 0 1 +2 -0.75 0 1 -0.75 0 1 +2 -2.75 0 0 0 0 1 +5 -8.75 0 0 0 0 1 +9 -18 0 0 0 0 1 +4 -3.5 0 0 0 0 1 +9 -6.75 0 1 -6.75 0 2 +6 -6.75 0 0 0 0 2 +6 -3 0 1 6 0 2 +2 -1.5 0 0 0 0 2 +4 -3 0 0 0 0 2 +5 -6.88 0 0 0 0 2 +12 -9 0 1 12 0 2 +4 -5 0 0 0 0 2 +5 -7.5 0 0 0 0 2 +4 -4 0 1 -4 0 2 +9 -5.63 0 1 -5.63 0 2 +9 -14.63 0 0 0 0 2 +5 -9.38 0 0 0 0 2 +6 -4.5 0 1 6 0 2 +8 -7 0 0 0 0 2 +10 -16.25 0 0 0 0 2 +10 -17.5 0 0 0 0 2 +9 -16.88 0 0 0 0 2 +8 -5 0 1 8 0 2 +6 -1.5 0 1 6 0 2 +12 -18 0 0 0 0 2 +5 -6.25 0 0 0 0 2 +8 -4 0 1 8 0 2 +9 -15.75 0 0 0 0 2 +9 -13.5 0 0 0 0 2 +5 -8.13 0 0 0 0 2 +2 0 1 1 0 0 2 +2 -3.75 0 0 0 0 2 +4 -6.5 0 0 0 0 2 +10 -5 0 1 -5 0 2 +12 -22.5 0 0 0 0 2 +2 -1 0 1 2 0 2 +13 0 6 1 13 0 2 +5 -2.5 0 0 0 0 2 +2 -0.5 0 1 2 0 2 +2 -3.25 0 1 -3.25 0 2 +30 0 12 1 0 0 2 +8 -8 0 1 8 0 2 +4 -5.5 0 0 0 0 2 +23 0 10 1 0 0 2 +4 -3.5 0 0 0 0 2 +5 0 2 1 5 0 2 +8 0 3 1 0 0 2 +9 -10.13 0 0 0 0 2 +8 -16 0 0 0 0 2 +12 -24 0 0 0 0 2 +9 -3.38 0 1 -3.38 0 2 +6 -5.25 0 1 6 0 2 +2 -4 0 0 0 0 2 +4 -1 0 1 -1 0 2 +6 -11.25 0 0 0 0 2 +5 -4.38 0 1 -4.38 0 2 +6 -2.25 0 1 6 0 2 +12 -10.5 0 1 12 0 2 +9 -18 0 0 0 0 2 +10 -20 0 0 0 0 2 +4 -4.5 0 0 0 0 2 +9 -2.25 0 1 -2.25 0 2 +4 -6 0 0 0 0 2 +8 -10 0 1 -10 0 2 +5 -5 0 1 -5 0 2 +5 -8.75 0 0 0 0 2 +8 -6 0 1 -6 0 2 +10 -13.75 0 0 0 0 2 +2 -2.5 0 0 0 0 2 +8 -11 0 1 -11 0 2 +4 -2 0 1 4 0 2 +10 -7.5 0 1 -7.5 0 2 +22 0 10 1 22 0 2 +25 0 10 1 0 0 2 +6 -9.75 0 0 0 0 2 +12 0 5 1 12 0 2 +4 -2.5 0 1 -2.5 0 2 +8 -3 0 1 8 0 2 +10 -11.25 0 1 -11.25 0 2 +5 -10 0 1 5 0 2 +10 -15 0 0 0 0 2 +2 -3.5 0 0 0 0 2 +12 0 4 1 12 0 2 +13 0 5 0 5 0 2 +5 -3.75 0 1 5 0 2 +26 0 12 0 12 0 2 +5 -5.63 0 0 0 0 2 +8 -2 0 1 -2 0 2 +2 -3 0 0 0 0 2 +6 -9 0 0 0 0 2 +9 -7.88 0 0 0 0 2 +8 -14 0 0 0 0 2 +28 0 13 1 28 0 2 +9 -12.38 0 0 0 0 2 +8 -15 0 1 -15 0 2 +10 -2.5 0 1 -2.5 0 2 +4 0 2 1 4 0 2 +12 -6 0 1 -6 0 2 +12 -16.5 0 1 -16.5 0 2 +4 -7.5 0 0 0 0 2 +10 -8.75 0 1 -8.75 0 2 +10 -18.75 0 1 10 0 2 +26 0 10 1 0 0 2 +12 -21 0 1 12 0 2 +2 -0.75 0 1 -0.75 0 2 +9 -9 0 1 -9 0 2 +10 -6.25 0 1 10 0 2 +8 -12 0 1 -12 0 2 +3 0 1 1 0 0 2 +5 -1.88 0 1 5 0 2 +6 -7.5 0 1 -7.5 0 2 +12 -13.5 0 1 12 0 2 +4 -7 0 0 0 0 2 +6 -8.25 0 1 -8.25 0 2 +6 -12 0 0 0 0 2 +6 -10.5 0 0 0 0 2 +4 -8 0 0 0 0 2 +6 -6 0 1 -6 0 2 +12 0 6 1 12 0 2 +12 -19.5 0 1 12 0 2 +19 0 8 1 19 0 2 +12 -15 0 0 0 0 2 +2 -1.75 0 0 0 0 2 +6 -3.75 0 0 0 0 2 +2 -1.25 0 0 0 0 2 +5 -1.25 0 1 -1.25 0 2 +4 -1.5 0 1 4 0 2 +8 -13 0 0 0 0 2 +12 -7.5 0 1 -7.5 0 2 +12 -3 0 1 -3 0 2 +2 -2.75 0 0 0 0 2 +7 0 3 1 7 0 2 +25 0 9 1 25 0 2 +2 -2 0 0 0 0 2 +12 -4.5 0 1 -4.5 0 2 +12 -12 0 1 12 0 2 +5 -3.13 0 1 5 0 2 +9 -11.25 0 0 0 0 2 +8 -9 0 1 -9 0 2 +2 -2.25 0 0 0 0 2 +9 -4.5 0 1 -4.5 0 2 +10 -3.75 0 1 10 0 2 +10 -10 0 1 10 0 2 +10 -12.5 0 0 0 0 2 +2 -2.5 0 0 0 0 3 +5 -5.63 0 0 0 0 3 +6 -7.5 0 0 0 0 3 +26 0 10 1 0 0 3 +9 -4.5 0 0 0 0 3 +2 -1.25 0 0 0 0 3 +8 -3 0 0 0 0 3 +25 0 9 0 9 0 3 +4 -4.5 0 0 0 0 3 +5 -10 0 0 0 0 3 +6 -9 0 0 0 0 3 +10 -6.25 0 0 0 0 3 +4 -4 0 0 0 0 3 +12 -3 0 0 0 0 3 +5 -5 0 0 0 0 3 +12 0 5 0 5 0 3 +6 -9.75 0 0 0 0 3 +19 0 8 0 8 0 3 +4 -7.5 0 0 0 0 3 +12 -9 0 0 0 0 3 +4 -6.5 0 0 0 0 3 +9 -5.63 0 0 0 0 3 +9 -18 0 0 0 0 3 +10 -11.25 0 0 0 0 3 +10 -13.75 0 0 0 0 3 +6 -12 0 0 0 0 3 +10 -12.5 0 0 0 0 3 +4 -7 0 0 0 0 3 +10 -7.5 0 0 0 0 3 +4 -8 0 0 0 0 3 +8 -11 0 0 0 0 3 +12 0 4 1 12 0 3 +9 -3.38 0 0 0 0 3 +10 -18.75 0 0 0 0 3 +2 -3.5 0 0 0 0 3 +2 -1 0 0 0 0 3 +2 -3.25 0 0 0 0 3 +2 0 1 0 1 0 3 +7 0 3 0 3 0 3 +8 0 3 0 3 0 3 +12 -6 0 0 0 0 3 +2 -0.5 0 1 2 0 3 +9 -7.88 0 0 0 0 3 +8 -15 0 0 0 0 3 +2 -1.5 0 0 0 0 3 +12 -22.5 0 0 0 0 3 +8 -7 0 0 0 0 3 +4 -5.5 0 0 0 0 3 +10 -8.75 0 0 0 0 3 +8 -9 0 0 0 0 3 +2 -4 0 0 0 0 3 +4 0 2 1 4 0 3 +8 -8 0 0 0 0 3 +9 -13.5 0 0 0 0 3 +9 -9 0 0 0 0 3 +6 -3.75 0 0 0 0 3 +13 0 6 0 6 0 3 +5 -1.88 0 1 5 0 3 +6 -6 0 0 0 0 3 +5 -6.88 0 0 0 0 3 +8 -16 0 0 0 0 3 +12 -7.5 0 0 0 0 3 +5 -1.25 0 1 -1.25 0 3 +9 -14.63 0 0 0 0 3 +8 -4 0 0 0 0 3 +10 -17.5 0 0 0 0 3 +5 -3.75 0 0 0 0 3 +6 -10.5 0 0 0 0 3 +13 0 5 1 13 0 3 +10 -16.25 0 0 0 0 3 +5 -7.5 0 0 0 0 3 +2 -1.75 0 0 0 0 3 +5 -9.38 0 0 0 0 3 +2 -2.75 0 0 0 0 3 +2 -0.75 0 1 -0.75 0 3 +5 -8.13 0 0 0 0 3 +9 -11.25 0 0 0 0 3 +8 -13 0 0 0 0 3 +9 -16.88 0 0 0 0 3 +2 -2 0 0 0 0 3 +12 -18 0 0 0 0 3 +8 -2 0 1 -2 0 3 +2 -3 0 0 0 0 3 +6 -4.5 0 0 0 0 3 +5 0 2 1 5 0 3 +12 -19.5 0 0 0 0 3 +9 -15.75 0 0 0 0 3 +8 -6 0 0 0 0 3 +10 -2.5 0 1 -2.5 0 3 +9 -6.75 0 0 0 0 3 +6 -6.75 0 0 0 0 3 +2 -3.75 0 0 0 0 3 +10 -5 0 0 0 0 3 +2 -2.25 0 0 0 0 3 +26 0 12 0 12 0 3 +12 -13.5 0 0 0 0 3 +8 -5 0 0 0 0 3 +6 -3 0 0 0 0 3 +10 -3.75 0 0 0 0 3 +12 -10.5 0 0 0 0 3 +4 -5 0 0 0 0 3 +9 -2.25 0 0 0 0 3 +4 -3 0 0 0 0 3 +9 -10.13 0 0 0 0 3 +28 0 13 0 13 0 3 +22 0 10 0 10 0 3 +10 -10 0 0 0 0 3 +4 -1 0 0 0 0 3 +4 -2.5 0 0 0 0 3 +12 -24 0 0 0 0 3 +8 -12 0 0 0 0 3 +3 0 1 1 0 0 3 +9 -12.38 0 0 0 0 3 +23 0 10 0 10 0 3 +4 -3.5 0 0 0 0 3 +4 -1.5 0 0 0 0 3 +8 -10 0 0 0 0 3 +8 -14 0 0 0 0 3 +4 -6 0 0 0 0 3 +25 0 10 0 10 0 3 +12 -16.5 0 0 0 0 3 +12 -12 0 0 0 0 3 +5 -2.5 0 0 0 0 3 +5 -8.75 0 0 0 0 3 +12 -4.5 0 0 0 0 3 +12 -15 0 0 0 0 3 +5 -3.13 0 0 0 0 3 +12 -21 0 0 0 0 3 +5 -4.38 0 0 0 0 3 +6 -11.25 0 0 0 0 3 +30 0 12 0 12 0 3 +6 -1.5 0 1 6 0 3 +12 0 6 0 6 0 3 +4 -2 0 0 0 0 3 +10 -15 0 0 0 0 3 +6 -2.25 0 0 0 0 3 +10 -20 0 0 0 0 3 +6 -5.25 0 0 0 0 3 +5 -6.25 0 0 0 0 3 +6 -8.25 0 0 0 0 3 +4 -4.5 0 1 -4.5 0 4 +10 -12.5 0 0 0 0 4 +26 0 12 1 26 0 4 +6 -7.5 0 1 -7.5 0 4 +4 -6.5 0 0 0 0 4 +12 -4.5 0 1 -4.5 0 4 +5 -2.5 0 1 5 0 4 +6 -12 0 0 0 0 4 +9 -14.63 0 1 9 0 4 +6 -6 0 0 0 0 4 +22 0 10 1 22 0 4 +2 -1 0 1 2 0 4 +8 -3 0 1 8 0 4 +12 -9 0 0 0 0 4 +5 -3.75 0 1 5 0 4 +6 -3 0 1 6 0 4 +4 0 2 0 2 0 4 +28 0 13 1 28 0 4 +12 -15 0 0 0 0 4 +9 -11.25 0 0 0 0 4 +12 -10.5 0 1 12 0 4 +5 -1.88 0 1 5 0 4 +2 -2.75 0 0 0 0 4 +4 -7 0 0 0 0 4 +8 -4 0 1 8 0 4 +2 0 1 1 0 0 4 +2 -3.5 0 0 0 0 4 +2 -1.75 0 1 2 0 4 +5 -5 0 0 0 0 4 +12 -12 0 1 12 0 4 +12 0 6 1 12 0 4 +6 -4.5 0 0 0 0 4 +30 0 12 0 12 0 4 +12 -16.5 0 0 0 0 4 +6 -9.75 0 1 6 0 4 +12 -22.5 0 0 0 0 4 +6 -9 0 1 -9 0 4 +5 -3.13 0 0 0 0 4 +5 -9.38 0 0 0 0 4 +12 -7.5 0 1 -7.5 0 4 +5 0 2 1 5 0 4 +10 -15 0 0 0 0 4 +12 -3 0 1 -3 0 4 +13 0 6 0 6 0 4 +9 -16.88 0 0 0 0 4 +6 -11.25 0 0 0 0 4 +8 -5 0 1 8 0 4 +8 -14 0 0 0 0 4 +12 -24 0 1 -24 0 4 +12 0 5 1 12 0 4 +9 -13.5 0 0 0 0 4 +6 -1.5 0 1 6 0 4 +2 -3 0 0 0 0 4 +10 -2.5 0 1 -2.5 0 4 +2 -0.75 0 0 0 0 4 +6 -10.5 0 0 0 0 4 +2 -0.5 0 1 2 0 4 +10 -10 0 0 0 0 4 +8 -10 0 1 -10 0 4 +9 -12.38 0 0 0 0 4 +4 -6 0 0 0 0 4 +6 -2.25 0 1 6 0 4 +9 -15.75 0 0 0 0 4 +12 -13.5 0 0 0 0 4 +8 -6 0 0 0 0 4 +10 -18.75 0 0 0 0 4 +4 -2 0 0 0 0 4 +5 -1.25 0 1 -1.25 0 4 +6 -5.25 0 0 0 0 4 +4 -8 0 1 4 0 4 +25 0 9 1 25 0 4 +2 -3.25 0 0 0 0 4 +10 -11.25 0 1 -11.25 0 4 +4 -7.5 0 0 0 0 4 +9 -5.63 0 1 -5.63 0 4 +6 -6.75 0 0 0 0 4 +8 -2 0 1 -2 0 4 +5 -6.25 0 0 0 0 4 +23 0 10 0 10 0 4 +8 -13 0 0 0 0 4 +10 -13.75 0 0 0 0 4 +5 -10 0 1 5 0 4 +12 0 4 1 12 0 4 +2 -2.5 0 0 0 0 4 +19 0 8 1 19 0 4 +4 -4 0 0 0 0 4 +4 -1 0 1 -1 0 4 +4 -2.5 0 1 -2.5 0 4 +5 -8.13 0 0 0 0 4 +10 -3.75 0 1 10 0 4 +5 -8.75 0 0 0 0 4 +10 -7.5 0 1 -7.5 0 4 +10 -5 0 1 -5 0 4 +10 -20 0 0 0 0 4 +13 0 5 0 5 0 4 +8 -9 0 0 0 0 4 +8 -12 0 0 0 0 4 +10 -16.25 0 0 0 0 4 +5 -6.88 0 1 5 0 4 +4 -5.5 0 0 0 0 4 +5 -7.5 0 0 0 0 4 +9 -10.13 0 0 0 0 4 +6 -8.25 0 0 0 0 4 +26 0 10 0 10 0 4 +4 -5 0 0 0 0 4 +2 -2.25 0 1 2 0 4 +6 -3.75 0 1 -3.75 0 4 +8 -8 0 1 8 0 4 +9 -6.75 0 0 0 0 4 +8 -15 0 1 -15 0 4 +12 -6 0 1 -6 0 4 +25 0 10 0 10 0 4 +12 -19.5 0 0 0 0 4 +9 -7.88 0 0 0 0 4 +4 -1.5 0 1 4 0 4 +8 -7 0 0 0 0 4 +12 -18 0 1 -18 0 4 +2 -2 0 1 2 0 4 +9 -18 0 0 0 0 4 +2 -1.25 0 0 0 0 4 +8 -16 0 0 0 0 4 +5 -4.38 0 0 0 0 4 +2 -4 0 0 0 0 4 +5 -5.63 0 0 0 0 4 +8 0 3 1 0 0 4 +10 -17.5 0 0 0 0 4 +8 -11 0 0 0 0 4 +2 -1.5 0 1 2 0 4 +4 -3.5 0 0 0 0 4 +2 -3.75 0 0 0 0 4 +3 0 1 1 0 0 4 +12 -21 0 0 0 0 4 +10 -8.75 0 0 0 0 4 +9 -9 0 1 -9 0 4 +4 -3 0 0 0 0 4 +7 0 3 1 7 0 4 +9 -3.38 0 1 -3.38 0 4 +9 -2.25 0 1 -2.25 0 4 +10 -6.25 0 0 0 0 4 +9 -4.5 0 1 -4.5 0 4 +2 -1 0 1 2 0 5 +9 -13.5 0 0 0 0 5 +5 -6.88 0 1 5 0 5 +10 -10 0 1 10 0 5 +6 -2.25 0 0 0 0 5 +6 -6.75 0 1 -6.75 0 5 +9 -4.5 0 0 0 0 5 +10 -13.75 0 0 0 0 5 +6 -8.25 0 0 0 0 5 +5 -10 0 0 0 0 5 +10 -6.25 0 1 10 0 5 +12 -3 0 1 -3 0 5 +12 -9 0 0 0 0 5 +8 -7 0 1 -7 0 5 +6 -12 0 0 0 0 5 +8 -2 0 1 -2 0 5 +12 -6 0 1 -6 0 5 +3 0 1 1 0 0 5 +10 -20 0 1 10 0 5 +5 -3.75 0 1 5 0 5 +2 -1.75 0 0 0 0 5 +6 -3.75 0 0 0 0 5 +9 -12.38 0 0 0 0 5 +5 -6.25 0 0 0 0 5 +12 0 4 0 4 0 5 +2 -1.5 0 1 2 0 5 +6 -5.25 0 0 0 0 5 +10 -18.75 0 0 0 0 5 +6 -6 0 1 -6 0 5 +12 0 5 0 5 0 5 +4 -2 0 1 4 0 5 +2 -4 0 0 0 0 5 +5 -2.5 0 1 5 0 5 +2 -3.75 0 0 0 0 5 +9 -15.75 0 0 0 0 5 +8 -4 0 1 8 0 5 +26 0 12 0 12 0 5 +6 -1.5 0 1 6 0 5 +4 -6 0 0 0 0 5 +10 -2.5 0 1 -2.5 0 5 +8 -12 0 0 0 0 5 +2 -3.5 0 0 0 0 5 +5 -5.63 0 1 -5.63 0 5 +12 -24 0 0 0 0 5 +25 0 10 0 10 0 5 +4 -6.5 0 0 0 0 5 +5 -9.38 0 0 0 0 5 +5 -7.5 0 1 -7.5 0 5 +4 -4 0 1 -4 0 5 +6 -10.5 0 1 -10.5 0 5 +13 0 6 1 13 0 5 +12 -22.5 0 0 0 0 5 +4 -7.5 0 0 0 0 5 +5 0 2 1 5 0 5 +10 -15 0 0 0 0 5 +9 -16.88 0 0 0 0 5 +2 -2.5 0 1 2 0 5 +10 -16.25 0 0 0 0 5 +6 -11.25 0 0 0 0 5 +4 -1.5 0 1 4 0 5 +5 -3.13 0 1 5 0 5 +6 -9 0 0 0 0 5 +12 -19.5 0 0 0 0 5 +10 -12.5 0 0 0 0 5 +2 -3 0 0 0 0 5 +8 -16 0 0 0 0 5 +4 0 2 1 4 0 5 +12 -7.5 0 0 0 0 5 +12 -13.5 0 0 0 0 5 +22 0 10 0 10 0 5 +12 -21 0 0 0 0 5 +7 0 3 0 3 0 5 +10 -8.75 0 1 -8.75 0 5 +2 -1.25 0 1 2 0 5 +9 -6.75 0 1 -6.75 0 5 +12 0 6 0 6 0 5 +28 0 13 0 13 0 5 +9 -10.13 0 0 0 0 5 +2 -0.5 0 1 2 0 5 +25 0 9 0 9 0 5 +6 -7.5 0 1 -7.5 0 5 +4 -3 0 1 4 0 5 +10 -3.75 0 1 10 0 5 +12 -4.5 0 1 -4.5 0 5 +12 -15 0 0 0 0 5 +6 -3 0 1 6 0 5 +9 -14.63 0 0 0 0 5 +5 -1.25 0 1 -1.25 0 5 +8 -11 0 0 0 0 5 +10 -17.5 0 0 0 0 5 +8 -10 0 0 0 0 5 +9 -9 0 0 0 0 5 +10 -11.25 0 0 0 0 5 +12 -12 0 0 0 0 5 +8 -14 0 0 0 0 5 +12 -16.5 0 0 0 0 5 +4 -7 0 0 0 0 5 +4 -1 0 1 -1 0 5 +5 -1.88 0 1 5 0 5 +8 0 3 0 3 0 5 +2 -3.25 0 1 -3.25 0 5 +5 -5 0 1 -5 0 5 +26 0 10 0 10 0 5 +12 -10.5 0 0 0 0 5 +2 0 1 0 1 0 5 +6 -9.75 0 0 0 0 5 +8 -3 0 1 8 0 5 +13 0 5 1 13 0 5 +10 -7.5 0 0 0 0 5 +8 -13 0 0 0 0 5 +9 -3.38 0 1 -3.38 0 5 +8 -15 0 0 0 0 5 +30 0 12 0 12 0 5 +8 -8 0 0 0 0 5 +8 -5 0 1 8 0 5 +12 -18 0 0 0 0 5 +10 -5 0 1 -5 0 5 +9 -11.25 0 1 9 0 5 +9 -7.88 0 0 0 0 5 +8 -6 0 1 -6 0 5 +6 -4.5 0 1 6 0 5 +8 -9 0 0 0 0 5 +4 -5.5 0 0 0 0 5 +4 -5 0 1 4 0 5 +9 -2.25 0 1 -2.25 0 5 +9 -5.63 0 1 -5.63 0 5 +4 -4.5 0 0 0 0 5 +4 -8 0 0 0 0 5 +19 0 8 0 8 0 5 +2 -2 0 1 2 0 5 +5 -8.13 0 1 5 0 5 +5 -4.38 0 1 -4.38 0 5 +2 -2.25 0 0 0 0 5 +2 -0.75 0 1 -0.75 0 5 +2 -2.75 0 0 0 0 5 +5 -8.75 0 0 0 0 5 +9 -18 0 0 0 0 5 +4 -3.5 0 1 4 0 5 +4 -2.5 0 1 -2.5 0 5 +9 -6.75 0 1 -6.75 0 6 +6 -6.75 0 1 -6.75 0 6 +6 -3 0 1 6 0 6 +2 -1.5 0 1 2 0 6 +4 -3 0 1 4 0 6 +5 -6.88 0 0 0 0 6 +12 -9 0 0 0 0 6 +4 -5 0 0 0 0 6 +5 -7.5 0 0 0 0 6 +4 -4 0 1 -4 0 6 +9 -5.63 0 1 -5.63 0 6 +9 -14.63 0 0 0 0 6 +5 -9.38 0 0 0 0 6 +6 -4.5 0 1 6 0 6 +8 -7 0 1 -7 0 6 +10 -16.25 0 0 0 0 6 +10 -17.5 0 0 0 0 6 +9 -16.88 0 0 0 0 6 +8 -5 0 1 8 0 6 +6 -1.5 0 1 6 0 6 +12 -18 0 0 0 0 6 +5 -6.25 0 0 0 0 6 +8 -4 0 1 8 0 6 +9 -15.75 0 0 0 0 6 +9 -13.5 0 0 0 0 6 +5 -8.13 0 0 0 0 6 +2 0 1 1 0 0 6 +2 -3.75 0 1 -3.75 0 6 +4 -6.5 0 0 0 0 6 +10 -5 0 1 -5 0 6 +12 -22.5 0 0 0 0 6 +2 -1 0 1 2 0 6 +13 0 6 0 6 0 6 +5 -2.5 0 1 5 0 6 +2 -0.5 0 1 2 0 6 +2 -3.25 0 0 0 0 6 +30 0 12 1 0 0 6 +8 -8 0 0 0 0 6 +4 -5.5 0 0 0 0 6 +23 0 10 1 0 0 6 +4 -3.5 0 1 4 0 6 +5 0 2 1 5 0 6 +8 0 3 1 0 0 6 +9 -10.13 0 0 0 0 6 +8 -16 0 0 0 0 6 +12 -24 0 0 0 0 6 +9 -3.38 0 1 -3.38 0 6 +6 -5.25 0 0 0 0 6 +2 -4 0 0 0 0 6 +4 -1 0 1 -1 0 6 +6 -11.25 0 0 0 0 6 +5 -4.38 0 1 -4.38 0 6 +6 -2.25 0 1 6 0 6 +12 -10.5 0 0 0 0 6 +9 -18 0 0 0 0 6 +10 -20 0 0 0 0 6 +4 -4.5 0 1 -4.5 0 6 +9 -2.25 0 1 -2.25 0 6 +4 -6 0 0 0 0 6 +8 -10 0 0 0 0 6 +5 -5 0 1 -5 0 6 +5 -8.75 0 0 0 0 6 +8 -6 0 1 -6 0 6 +10 -13.75 0 0 0 0 6 +2 -2.5 0 1 2 0 6 +8 -11 0 0 0 0 6 +4 -2 0 1 4 0 6 +10 -7.5 0 1 -7.5 0 6 +22 0 10 0 10 0 6 +25 0 10 1 0 0 6 +6 -9.75 0 0 0 0 6 +12 0 5 0 5 0 6 +4 -2.5 0 1 -2.5 0 6 +8 -3 0 1 8 0 6 +10 -11.25 0 0 0 0 6 +5 -10 0 0 0 0 6 +10 -15 0 0 0 0 6 +2 -3.5 0 1 -3.5 0 6 +12 0 4 1 12 0 6 +13 0 5 0 5 0 6 +5 -3.75 0 1 5 0 6 +26 0 12 1 26 0 6 +5 -5.63 0 1 -5.63 0 6 +8 -2 0 1 -2 0 6 +2 -3 0 1 -3 0 6 +6 -9 0 0 0 0 6 +9 -7.88 0 1 -7.88 0 6 +8 -14 0 0 0 0 6 +28 0 13 0 13 0 6 +9 -12.38 0 0 0 0 6 +8 -15 0 0 0 0 6 +10 -2.5 0 1 -2.5 0 6 +4 0 2 1 4 0 6 +12 -6 0 1 -6 0 6 +12 -16.5 0 0 0 0 6 +4 -7.5 0 0 0 0 6 +10 -8.75 0 1 -8.75 0 6 +10 -18.75 0 0 0 0 6 +26 0 10 1 0 0 6 +12 -21 0 0 0 0 6 +2 -0.75 0 1 -0.75 0 6 +9 -9 0 1 -9 0 6 +10 -6.25 0 1 10 0 6 +8 -12 0 0 0 0 6 +3 0 1 1 0 0 6 +5 -1.88 0 1 5 0 6 +6 -7.5 0 0 0 0 6 +12 -13.5 0 1 12 0 6 +4 -7 0 0 0 0 6 +6 -8.25 0 0 0 0 6 +6 -12 0 0 0 0 6 +6 -10.5 0 0 0 0 6 +4 -8 0 0 0 0 6 +6 -6 0 1 -6 0 6 +12 0 6 0 6 0 6 +12 -19.5 0 0 0 0 6 +19 0 8 1 19 0 6 +12 -15 0 0 0 0 6 +2 -1.75 0 1 2 0 6 +6 -3.75 0 1 -3.75 0 6 +2 -1.25 0 1 2 0 6 +5 -1.25 0 1 -1.25 0 6 +4 -1.5 0 1 4 0 6 +8 -13 0 0 0 0 6 +12 -7.5 0 1 -7.5 0 6 +12 -3 0 1 -3 0 6 +2 -2.75 0 1 2 0 6 +7 0 3 1 7 0 6 +25 0 9 1 25 0 6 +2 -2 0 1 2 0 6 +12 -4.5 0 1 -4.5 0 6 +12 -12 0 0 0 0 6 +5 -3.13 0 1 5 0 6 +9 -11.25 0 0 0 0 6 +8 -9 0 0 0 0 6 +2 -2.25 0 1 2 0 6 +9 -4.5 0 1 -4.5 0 6 +10 -3.75 0 1 10 0 6 +10 -10 0 0 0 0 6 +10 -12.5 0 0 0 0 6 +2 -2.5 0 1 2 0 7 +5 -5.63 0 0 0 0 7 +6 -7.5 0 0 0 0 7 +26 0 10 1 0 0 7 +9 -4.5 0 1 -4.5 0 7 +2 -1.25 0 1 2 0 7 +8 -3 0 1 8 0 7 +25 0 9 1 25 0 7 +4 -4.5 0 1 -4.5 0 7 +5 -10 0 0 0 0 7 +6 -9 0 0 0 0 7 +10 -6.25 0 0 0 0 7 +4 -4 0 1 -4 0 7 +12 -3 0 1 -3 0 7 +5 -5 0 0 0 0 7 +12 0 5 1 12 0 7 +6 -9.75 0 0 0 0 7 +19 0 8 1 19 0 7 +4 -7.5 0 0 0 0 7 +12 -9 0 0 0 0 7 +4 -6.5 0 0 0 0 7 +9 -5.63 0 1 -5.63 0 7 +9 -18 0 0 0 0 7 +10 -11.25 0 0 0 0 7 +10 -13.75 0 0 0 0 7 +6 -12 0 0 0 0 7 +10 -12.5 0 0 0 0 7 +4 -7 0 0 0 0 7 +10 -7.5 0 0 0 0 7 +4 -8 0 0 0 0 7 +8 -11 0 0 0 0 7 +12 0 4 1 12 0 7 +9 -3.38 0 1 -3.38 0 7 +10 -18.75 0 0 0 0 7 +2 -3.5 0 0 0 0 7 +2 -1 0 1 2 0 7 +2 -3.25 0 0 0 0 7 +2 0 1 1 0 0 7 +7 0 3 1 7 0 7 +8 0 3 1 0 0 7 +12 -6 0 1 -6 0 7 +2 -0.5 0 1 2 0 7 +9 -7.88 0 0 0 0 7 +8 -15 0 0 0 0 7 +2 -1.5 0 1 2 0 7 +12 -22.5 0 0 0 0 7 +8 -7 0 1 -7 0 7 +4 -5.5 0 0 0 0 7 +10 -8.75 0 0 0 0 7 +8 -9 0 0 0 0 7 +2 -4 0 0 0 0 7 +4 0 2 1 4 0 7 +8 -8 0 0 0 0 7 +9 -13.5 0 0 0 0 7 +9 -9 0 0 0 0 7 +6 -3.75 0 1 -3.75 0 7 +13 0 6 0 6 0 7 +5 -1.88 0 1 5 0 7 +6 -6 0 0 0 0 7 +5 -6.88 0 0 0 0 7 +8 -16 0 0 0 0 7 +12 -7.5 0 1 -7.5 0 7 +5 -1.25 0 1 -1.25 0 7 +9 -14.63 0 0 0 0 7 +8 -4 0 1 8 0 7 +10 -17.5 0 0 0 0 7 +5 -3.75 0 1 5 0 7 +6 -10.5 0 0 0 0 7 +13 0 5 1 13 0 7 +10 -16.25 0 0 0 0 7 +5 -7.5 0 0 0 0 7 +2 -1.75 0 1 2 0 7 +5 -9.38 0 0 0 0 7 +2 -2.75 0 0 0 0 7 +2 -0.75 0 1 -0.75 0 7 +5 -8.13 0 0 0 0 7 +9 -11.25 0 0 0 0 7 +8 -13 0 0 0 0 7 +9 -16.88 0 0 0 0 7 +2 -2 0 0 0 0 7 +12 -18 0 0 0 0 7 +8 -2 0 1 -2 0 7 +2 -3 0 0 0 0 7 +6 -4.5 0 1 6 0 7 +5 0 2 1 5 0 7 +12 -19.5 0 0 0 0 7 +9 -15.75 0 0 0 0 7 +8 -6 0 0 0 0 7 +10 -2.5 0 1 -2.5 0 7 +9 -6.75 0 0 0 0 7 +6 -6.75 0 0 0 0 7 +2 -3.75 0 0 0 0 7 +10 -5 0 1 -5 0 7 +2 -2.25 0 0 0 0 7 +26 0 12 1 26 0 7 +12 -13.5 0 0 0 0 7 +8 -5 0 0 0 0 7 +6 -3 0 1 6 0 7 +10 -3.75 0 1 10 0 7 +12 -10.5 0 0 0 0 7 +4 -5 0 0 0 0 7 +9 -2.25 0 1 -2.25 0 7 +4 -3 0 0 0 0 7 +9 -10.13 0 0 0 0 7 +28 0 13 0 13 0 7 +22 0 10 1 22 0 7 +10 -10 0 0 0 0 7 +4 -1 0 1 -1 0 7 +4 -2.5 0 0 0 0 7 +12 -24 0 0 0 0 7 +8 -12 0 0 0 0 7 +3 0 1 1 0 0 7 +9 -12.38 0 0 0 0 7 +23 0 10 1 0 0 7 +4 -3.5 0 0 0 0 7 +4 -1.5 0 1 4 0 7 +8 -10 0 0 0 0 7 +8 -14 0 0 0 0 7 +4 -6 0 0 0 0 7 +25 0 10 1 0 0 7 +12 -16.5 0 0 0 0 7 +12 -12 0 0 0 0 7 +5 -2.5 0 1 5 0 7 +5 -8.75 0 0 0 0 7 +12 -4.5 0 1 -4.5 0 7 +12 -15 0 0 0 0 7 +5 -3.13 0 0 0 0 7 +12 -21 0 1 12 0 7 +5 -4.38 0 0 0 0 7 +6 -11.25 0 0 0 0 7 +30 0 12 1 0 0 7 +6 -1.5 0 1 6 0 7 +12 0 6 1 12 0 7 +4 -2 0 1 4 0 7 +10 -15 0 0 0 0 7 +6 -2.25 0 1 6 0 7 +10 -20 0 0 0 0 7 +6 -5.25 0 0 0 0 7 +5 -6.25 0 0 0 0 7 +6 -8.25 0 0 0 0 7 +4 -4.5 0 1 -4.5 0 8 +10 -12.5 0 0 0 0 8 +26 0 12 0 12 0 8 +6 -7.5 0 0 0 0 8 +4 -6.5 0 0 0 0 8 +12 -4.5 0 1 -4.5 0 8 +5 -2.5 0 1 5 0 8 +6 -12 0 0 0 0 8 +9 -14.63 0 0 0 0 8 +6 -6 0 1 -6 0 8 +22 0 10 0 10 0 8 +2 -1 0 1 2 0 8 +8 -3 0 1 8 0 8 +12 -9 0 1 12 0 8 +5 -3.75 0 1 5 0 8 +6 -3 0 1 6 0 8 +4 0 2 0 2 0 8 +28 0 13 1 28 0 8 +12 -15 0 0 0 0 8 +9 -11.25 0 0 0 0 8 +12 -10.5 0 1 12 0 8 +5 -1.88 0 1 5 0 8 +2 -2.75 0 0 0 0 8 +4 -7 0 0 0 0 8 +8 -4 0 1 8 0 8 +2 0 1 0 1 0 8 +2 -3.5 0 0 0 0 8 +2 -1.75 0 1 2 0 8 +5 -5 0 1 -5 0 8 +12 -12 0 0 0 0 8 +12 0 6 0 6 0 8 +6 -4.5 0 1 6 0 8 +30 0 12 1 0 0 8 +12 -16.5 0 0 0 0 8 +6 -9.75 0 0 0 0 8 +12 -22.5 0 0 0 0 8 +6 -9 0 0 0 0 8 +5 -3.13 0 1 5 0 8 +5 -9.38 0 0 0 0 8 +12 -7.5 0 1 -7.5 0 8 +5 0 2 1 5 0 8 +10 -15 0 0 0 0 8 +12 -3 0 1 -3 0 8 +13 0 6 0 6 0 8 +9 -16.88 0 0 0 0 8 +6 -11.25 0 0 0 0 8 +8 -5 0 1 8 0 8 +8 -14 0 0 0 0 8 +12 -24 0 0 0 0 8 +12 0 5 0 5 0 8 +9 -13.5 0 0 0 0 8 +6 -1.5 0 1 6 0 8 +2 -3 0 0 0 0 8 +10 -2.5 0 1 -2.5 0 8 +2 -0.75 0 1 -0.75 0 8 +6 -10.5 0 0 0 0 8 +2 -0.5 0 1 2 0 8 +10 -10 0 0 0 0 8 +8 -10 0 0 0 0 8 +9 -12.38 0 0 0 0 8 +4 -6 0 0 0 0 8 +6 -2.25 0 1 6 0 8 +9 -15.75 0 0 0 0 8 +12 -13.5 0 0 0 0 8 +8 -6 0 1 -6 0 8 +10 -18.75 0 0 0 0 8 +4 -2 0 1 4 0 8 +5 -1.25 0 1 -1.25 0 8 +6 -5.25 0 1 6 0 8 +4 -8 0 0 0 0 8 +25 0 9 1 25 0 8 +2 -3.25 0 0 0 0 8 +10 -11.25 0 0 0 0 8 +4 -7.5 0 0 0 0 8 +9 -5.63 0 1 -5.63 0 8 +6 -6.75 0 1 -6.75 0 8 +8 -2 0 1 -2 0 8 +5 -6.25 0 0 0 0 8 +23 0 10 0 10 0 8 +8 -13 0 0 0 0 8 +10 -13.75 0 0 0 0 8 +5 -10 0 0 0 0 8 +12 0 4 1 12 0 8 +2 -2.5 0 1 2 0 8 +19 0 8 1 19 0 8 +4 -4 0 0 0 0 8 +4 -1 0 1 -1 0 8 +4 -2.5 0 1 -2.5 0 8 +5 -8.13 0 0 0 0 8 +10 -3.75 0 1 10 0 8 +5 -8.75 0 0 0 0 8 +10 -7.5 0 1 -7.5 0 8 +10 -5 0 1 -5 0 8 +10 -20 0 0 0 0 8 +13 0 5 0 5 0 8 +8 -9 0 0 0 0 8 +8 -12 0 0 0 0 8 +10 -16.25 0 0 0 0 8 +5 -6.88 0 0 0 0 8 +4 -5.5 0 0 0 0 8 +5 -7.5 0 0 0 0 8 +9 -10.13 0 0 0 0 8 +6 -8.25 0 0 0 0 8 +26 0 10 0 10 0 8 +4 -5 0 1 4 0 8 +2 -2.25 0 1 2 0 8 +6 -3.75 0 1 -3.75 0 8 +8 -8 0 1 8 0 8 +9 -6.75 0 1 -6.75 0 8 +8 -15 0 0 0 0 8 +12 -6 0 1 -6 0 8 +25 0 10 1 0 0 8 +12 -19.5 0 0 0 0 8 +9 -7.88 0 1 -7.88 0 8 +4 -1.5 0 1 4 0 8 +8 -7 0 1 -7 0 8 +12 -18 0 0 0 0 8 +2 -2 0 1 2 0 8 +9 -18 0 0 0 0 8 +2 -1.25 0 1 2 0 8 +8 -16 0 0 0 0 8 +5 -4.38 0 1 -4.38 0 8 +2 -4 0 0 0 0 8 +5 -5.63 0 1 -5.63 0 8 +8 0 3 0 3 0 8 +10 -17.5 0 0 0 0 8 +8 -11 0 0 0 0 8 +2 -1.5 0 1 2 0 8 +4 -3.5 0 1 4 0 8 +2 -3.75 0 0 0 0 8 +3 0 1 1 0 0 8 +12 -21 0 0 0 0 8 +10 -8.75 0 1 -8.75 0 8 +9 -9 0 1 -9 0 8 +4 -3 0 1 4 0 8 +7 0 3 0 3 0 8 +9 -3.38 0 1 -3.38 0 8 +9 -2.25 0 1 -2.25 0 8 +10 -6.25 0 1 10 0 8 +9 -4.5 0 1 -4.5 0 8 +2 -1 0 1 2 0 9 +9 -13.5 0 0 0 0 9 +5 -6.88 0 0 0 0 9 +10 -10 0 0 0 0 9 +6 -2.25 0 1 6 0 9 +6 -6.75 0 1 -6.75 0 9 +9 -4.5 0 1 -4.5 0 9 +10 -13.75 0 0 0 0 9 +6 -8.25 0 0 0 0 9 +5 -10 0 0 0 0 9 +10 -6.25 0 1 10 0 9 +12 -3 0 1 -3 0 9 +12 -9 0 1 12 0 9 +8 -7 0 1 -7 0 9 +6 -12 0 0 0 0 9 +8 -2 0 1 -2 0 9 +12 -6 0 1 -6 0 9 +3 0 1 1 0 0 9 +10 -20 0 0 0 0 9 +5 -3.75 0 1 5 0 9 +2 -1.75 0 1 2 0 9 +6 -3.75 0 1 -3.75 0 9 +9 -12.38 0 0 0 0 9 +5 -6.25 0 0 0 0 9 +12 0 4 1 12 0 9 +2 -1.5 0 1 2 0 9 +6 -5.25 0 1 6 0 9 +10 -18.75 0 0 0 0 9 +6 -6 0 1 -6 0 9 +12 0 5 1 12 0 9 +4 -2 0 1 4 0 9 +2 -4 0 0 0 0 9 +5 -2.5 0 1 5 0 9 +9 -15.75 0 0 0 0 9 +8 -4 0 1 8 0 9 +26 0 12 1 26 0 9 +6 -1.5 0 1 6 0 9 +4 -6 0 1 4 0 9 +10 -2.5 0 1 -2.5 0 9 +8 -12 0 0 0 0 9 +2 -3.5 0 0 0 0 9 +5 -5.63 0 0 0 0 9 +12 -24 0 0 0 0 9 +25 0 10 1 0 0 9 +4 -6.5 0 0 0 0 9 +5 -9.38 0 0 0 0 9 +5 -7.5 0 0 0 0 9 +4 -4 0 1 -4 0 9 +6 -10.5 0 0 0 0 9 +13 0 6 1 13 0 9 +12 -22.5 0 0 0 0 9 +4 -7.5 0 0 0 0 9 +5 0 2 1 5 0 9 +10 -15 0 0 0 0 9 +9 -16.88 0 0 0 0 9 +2 -2.5 0 0 0 0 9 +10 -16.25 0 0 0 0 9 +6 -11.25 0 0 0 0 9 +4 -1.5 0 1 4 0 9 +5 -3.13 0 1 5 0 9 +6 -9 0 0 0 0 9 +12 -19.5 0 0 0 0 9 +10 -12.5 0 0 0 0 9 +2 -3 0 1 -3 0 9 +8 -16 0 0 0 0 9 +4 0 2 1 4 0 9 +12 -7.5 0 1 -7.5 0 9 +12 -13.5 0 0 0 0 9 +22 0 10 1 22 0 9 +12 -21 0 0 0 0 9 +7 0 3 1 7 0 9 +10 -8.75 0 0 0 0 9 +2 -1.25 0 1 2 0 9 +9 -6.75 0 1 -6.75 0 9 +12 0 6 1 12 0 9 +28 0 13 1 28 0 9 +9 -10.13 0 0 0 0 9 +2 -0.5 0 1 2 0 9 +25 0 9 1 25 0 9 +6 -7.5 0 1 -7.5 0 9 +4 -3 0 1 4 0 9 +10 -3.75 0 1 10 0 9 +12 -4.5 0 1 -4.5 0 9 +12 -15 0 0 0 0 9 +6 -3 0 1 6 0 9 +9 -14.63 0 0 0 0 9 +5 -1.25 0 1 -1.25 0 9 +8 -11 0 0 0 0 9 +10 -17.5 0 0 0 0 9 +8 -10 0 0 0 0 9 +9 -9 0 0 0 0 9 +10 -11.25 0 0 0 0 9 +12 -12 0 0 0 0 9 +8 -14 0 0 0 0 9 +12 -16.5 0 0 0 0 9 +4 -7 0 0 0 0 9 +4 -1 0 1 -1 0 9 +5 -1.88 0 1 5 0 9 +8 0 3 1 0 0 9 +2 -3.25 0 1 -3.25 0 9 +5 -5 0 1 -5 0 9 +26 0 10 1 0 0 9 +12 -10.5 0 0 0 0 9 +2 0 1 1 0 0 9 +6 -9.75 0 0 0 0 9 +8 -3 0 1 8 0 9 +13 0 5 1 13 0 9 +10 -7.5 0 1 -7.5 0 9 +8 -13 0 0 0 0 9 +9 -3.38 0 1 -3.38 0 9 +8 -15 0 0 0 0 9 +30 0 12 1 0 0 9 +8 -8 0 1 8 0 9 +8 -5 0 1 8 0 9 +12 -18 0 0 0 0 9 +10 -5 0 1 -5 0 9 +9 -11.25 0 0 0 0 9 +9 -7.88 0 1 -7.88 0 9 +8 -6 0 1 -6 0 9 +6 -4.5 0 1 6 0 9 +8 -9 0 0 0 0 9 +4 -5.5 0 1 -5.5 0 9 +4 -5 0 1 4 0 9 +9 -2.25 0 1 -2.25 0 9 +23 0 10 1 0 0 9 +9 -5.63 0 1 -5.63 0 9 +4 -4.5 0 1 -4.5 0 9 +4 -8 0 0 0 0 9 +19 0 8 1 19 0 9 +2 -2 0 1 2 0 9 +5 -8.13 0 0 0 0 9 +5 -4.38 0 1 -4.38 0 9 +2 -2.25 0 1 2 0 9 +2 -0.75 0 1 -0.75 0 9 +2 -2.75 0 0 0 0 9 +5 -8.75 0 0 0 0 9 +9 -18 0 0 0 0 9 +4 -3.5 0 1 4 0 9 +4 -2.5 0 1 -2.5 0 9 +4 -4.5 0 0 0 0 10 +10 -12.5 0 1 -12.5 0 10 +26 0 12 1 26 0 10 +6 -7.5 0 1 -7.5 0 10 +4 -6.5 0 1 4 0 10 +12 -4.5 0 1 -4.5 0 10 +5 -2.5 0 1 5 0 10 +6 -12 0 0 0 0 10 +9 -14.63 0 1 9 0 10 +6 -6 0 0 0 0 10 +22 0 10 0 10 0 10 +2 -1 0 1 2 0 10 +8 -3 0 1 8 0 10 +12 -9 0 1 12 0 10 +5 -3.75 0 1 5 0 10 +6 -3 0 1 6 0 10 +4 0 2 0 2 0 10 +28 0 13 0 13 0 10 +12 -15 0 1 -15 0 10 +9 -11.25 0 1 9 0 10 +12 -10.5 0 1 12 0 10 +5 -1.88 0 1 5 0 10 +2 -2.75 0 1 2 0 10 +4 -7 0 1 -7 0 10 +8 -4 0 1 8 0 10 +2 0 1 0 1 0 10 +2 -3.5 0 1 -3.5 0 10 +2 -1.75 0 1 2 0 10 +5 -5 0 1 -5 0 10 +12 -12 0 1 12 0 10 +12 0 6 0 6 0 10 +6 -4.5 0 1 6 0 10 +30 0 12 0 12 0 10 +12 -16.5 0 1 -16.5 0 10 +6 -9.75 0 1 6 0 10 +12 -22.5 0 0 0 0 10 +6 -9 0 1 -9 0 10 +5 -3.13 0 1 5 0 10 +5 -9.38 0 0 0 0 10 +12 -7.5 0 1 -7.5 0 10 +5 0 2 0 2 0 10 +10 -15 0 1 -15 0 10 +12 -3 0 1 -3 0 10 +13 0 6 1 13 0 10 +9 -16.88 0 1 9 0 10 +6 -11.25 0 1 6 0 10 +8 -5 0 1 8 0 10 +8 -14 0 1 8 0 10 +12 -24 0 1 -24 0 10 +12 0 5 1 12 0 10 +9 -13.5 0 1 9 0 10 +6 -1.5 0 1 6 0 10 +2 -3 0 1 -3 0 10 +10 -2.5 0 1 -2.5 0 10 +2 -0.75 0 1 -0.75 0 10 +6 -10.5 0 1 -10.5 0 10 +2 -0.5 0 1 2 0 10 +10 -10 0 1 10 0 10 +8 -10 0 1 -10 0 10 +9 -12.38 0 1 -12.38 0 10 +4 -6 0 1 4 0 10 +6 -2.25 0 1 6 0 10 +9 -15.75 0 1 -15.75 0 10 +12 -13.5 0 1 12 0 10 +8 -6 0 1 -6 0 10 +10 -18.75 0 1 10 0 10 +4 -2 0 1 4 0 10 +5 -1.25 0 1 -1.25 0 10 +6 -5.25 0 1 6 0 10 +4 -8 0 1 4 0 10 +25 0 9 0 9 0 10 +2 -3.25 0 0 0 0 10 +10 -11.25 0 1 -11.25 0 10 +4 -7.5 0 1 -7.5 0 10 +9 -5.63 0 1 -5.63 0 10 +6 -6.75 0 1 -6.75 0 10 +8 -2 0 1 -2 0 10 +5 -6.25 0 1 5 0 10 +23 0 10 1 0 0 10 +8 -13 0 1 -13 0 10 +10 -13.75 0 1 -13.75 0 10 +5 -10 0 1 5 0 10 +12 0 4 1 12 0 10 +2 -2.5 0 1 2 0 10 +19 0 8 1 19 0 10 +4 -4 0 1 -4 0 10 +4 -1 0 1 -1 0 10 +4 -2.5 0 1 -2.5 0 10 +5 -8.13 0 1 5 0 10 +10 -3.75 0 1 10 0 10 +5 -8.75 0 1 5 0 10 +10 -7.5 0 1 -7.5 0 10 +10 -5 0 1 -5 0 10 +10 -20 0 1 10 0 10 +13 0 5 1 13 0 10 +8 -9 0 1 -9 0 10 +8 -12 0 0 0 0 10 +10 -16.25 0 0 0 0 10 +5 -6.88 0 1 5 0 10 +4 -5.5 0 1 -5.5 0 10 +5 -7.5 0 0 0 0 10 +9 -10.13 0 0 0 0 10 +6 -8.25 0 1 -8.25 0 10 +26 0 10 1 0 0 10 +4 -5 0 1 4 0 10 +2 -2.25 0 1 2 0 10 +6 -3.75 0 1 -3.75 0 10 +9 -6.75 0 1 -6.75 0 10 +8 -15 0 0 0 0 10 +12 -6 0 1 -6 0 10 +25 0 10 1 0 0 10 +12 -19.5 0 0 0 0 10 +9 -7.88 0 1 -7.88 0 10 +4 -1.5 0 1 4 0 10 +8 -7 0 1 -7 0 10 +12 -18 0 1 -18 0 10 +2 -2 0 1 2 0 10 +9 -18 0 1 -18 0 10 +2 -1.25 0 1 2 0 10 +8 -16 0 1 -16 0 10 +5 -4.38 0 1 -4.38 0 10 +2 -4 0 1 2 0 10 +5 -5.63 0 1 -5.63 0 10 +8 0 3 1 0 0 10 +10 -17.5 0 1 -17.5 0 10 +8 -11 0 1 -11 0 10 +2 -1.5 0 1 2 0 10 +4 -3.5 0 1 4 0 10 +2 -3.75 0 1 -3.75 0 10 +3 0 1 1 0 0 10 +12 -21 0 1 12 0 10 +10 -8.75 0 1 -8.75 0 10 +9 -9 0 1 -9 0 10 +4 -3 0 1 4 0 10 +7 0 3 1 7 0 10 +9 -3.38 0 1 -3.38 0 10 +9 -2.25 0 1 -2.25 0 10 +10 -6.25 0 1 10 0 10 +9 -4.5 0 1 -4.5 0 10 +2 -1 0 1 2 0 11 +9 -13.5 0 0 0 0 11 +5 -6.88 0 0 0 0 11 +10 -10 0 1 10 0 11 +6 -2.25 0 1 6 0 11 +6 -6.75 0 0 0 0 11 +9 -4.5 0 1 -4.5 0 11 +10 -13.75 0 0 0 0 11 +6 -8.25 0 0 0 0 11 +5 -10 0 0 0 0 11 +10 -6.25 0 1 10 0 11 +12 -3 0 1 -3 0 11 +12 -9 0 1 12 0 11 +8 -7 0 0 0 0 11 +6 -12 0 0 0 0 11 +8 -2 0 1 -2 0 11 +12 -6 0 1 -6 0 11 +3 0 1 1 0 0 11 +10 -20 0 0 0 0 11 +5 -3.75 0 1 5 0 11 +2 -1.75 0 1 2 0 11 +6 -3.75 0 1 -3.75 0 11 +9 -12.38 0 0 0 0 11 +5 -6.25 0 1 5 0 11 +12 0 4 0 4 0 11 +2 -1.5 0 1 2 0 11 +6 -5.25 0 0 0 0 11 +10 -18.75 0 0 0 0 11 +6 -6 0 1 -6 0 11 +12 0 5 0 5 0 11 +4 -2 0 1 4 0 11 +2 -4 0 0 0 0 11 +5 -2.5 0 1 5 0 11 +2 -3.75 0 0 0 0 11 +9 -15.75 0 0 0 0 11 +8 -4 0 1 8 0 11 +26 0 12 0 12 0 11 +6 -1.5 0 1 6 0 11 +4 -6 0 0 0 0 11 +10 -2.5 0 1 -2.5 0 11 +8 -12 0 0 0 0 11 +2 -3.5 0 0 0 0 11 +5 -5.63 0 0 0 0 11 +12 -24 0 0 0 0 11 +25 0 10 1 0 0 11 +4 -6.5 0 0 0 0 11 +5 -9.38 0 0 0 0 11 +5 -7.5 0 0 0 0 11 +4 -4 0 1 -4 0 11 +6 -10.5 0 0 0 0 11 +13 0 6 0 6 0 11 +12 -22.5 0 0 0 0 11 +4 -7.5 0 0 0 0 11 +10 -15 0 0 0 0 11 +9 -16.88 0 0 0 0 11 +2 -2.5 0 0 0 0 11 +10 -16.25 0 0 0 0 11 +6 -11.25 0 0 0 0 11 +4 -1.5 0 1 4 0 11 +5 -3.13 0 1 5 0 11 +6 -9 0 0 0 0 11 +12 -19.5 0 0 0 0 11 +10 -12.5 0 0 0 0 11 +2 -3 0 1 -3 0 11 +8 -16 0 0 0 0 11 +4 0 2 0 2 0 11 +12 -7.5 0 1 -7.5 0 11 +12 -13.5 0 0 0 0 11 +22 0 10 0 10 0 11 +12 -21 0 0 0 0 11 +7 0 3 0 3 0 11 +10 -8.75 0 1 -8.75 0 11 +2 -1.25 0 1 2 0 11 +9 -6.75 0 1 -6.75 0 11 +12 0 6 1 12 0 11 +28 0 13 0 13 0 11 +9 -10.13 0 0 0 0 11 +2 -0.5 0 1 2 0 11 +25 0 9 1 25 0 11 +6 -7.5 0 0 0 0 11 +4 -3 0 1 4 0 11 +10 -3.75 0 1 10 0 11 +12 -4.5 0 1 -4.5 0 11 +12 -15 0 0 0 0 11 +6 -3 0 1 6 0 11 +9 -14.63 0 0 0 0 11 +5 -1.25 0 1 -1.25 0 11 +8 -11 0 1 -11 0 11 +10 -17.5 0 0 0 0 11 +8 -10 0 0 0 0 11 +9 -9 0 1 -9 0 11 +10 -11.25 0 0 0 0 11 +12 -12 0 0 0 0 11 +8 -14 0 0 0 0 11 +12 -16.5 0 0 0 0 11 +4 -7 0 0 0 0 11 +4 -1 0 1 -1 0 11 +5 -1.88 0 1 5 0 11 +8 0 3 1 0 0 11 +2 -3.25 0 0 0 0 11 +5 -5 0 1 -5 0 11 +26 0 10 0 10 0 11 +12 -10.5 0 1 12 0 11 +2 0 1 0 1 0 11 +6 -9.75 0 0 0 0 11 +8 -3 0 1 8 0 11 +13 0 5 1 13 0 11 +10 -7.5 0 1 -7.5 0 11 +8 -13 0 0 0 0 11 +9 -3.38 0 1 -3.38 0 11 +8 -15 0 0 0 0 11 +30 0 12 1 0 0 11 +8 -8 0 1 8 0 11 +8 -5 0 1 8 0 11 +12 -18 0 0 0 0 11 +10 -5 0 1 -5 0 11 +9 -11.25 0 0 0 0 11 +9 -7.88 0 1 -7.88 0 11 +8 -6 0 1 -6 0 11 +6 -4.5 0 1 6 0 11 +8 -9 0 0 0 0 11 +4 -5.5 0 0 0 0 11 +4 -5 0 0 0 0 11 +9 -2.25 0 1 -2.25 0 11 +23 0 10 0 10 0 11 +9 -5.63 0 1 -5.63 0 11 +4 -4.5 0 0 0 0 11 +4 -8 0 0 0 0 11 +19 0 8 1 19 0 11 +2 -2 0 1 2 0 11 +5 -8.13 0 0 0 0 11 +5 -4.38 0 1 -4.38 0 11 +2 -2.25 0 0 0 0 11 +2 -0.75 0 1 -0.75 0 11 +2 -2.75 0 0 0 0 11 +5 -8.75 0 0 0 0 11 +9 -18 0 0 0 0 11 +4 -3.5 0 1 4 0 11 +4 -2.5 0 1 -2.5 0 11 +9 -6.75 0 0 0 0 12 +6 -6.75 0 0 0 0 12 +6 -3 0 1 6 0 12 +2 -1.5 0 1 2 0 12 +4 -3 0 0 0 0 12 +5 -6.88 0 0 0 0 12 +12 -9 0 0 0 0 12 +4 -5 0 0 0 0 12 +5 -7.5 0 0 0 0 12 +4 -4 0 0 0 0 12 +9 -5.63 0 1 -5.63 0 12 +9 -14.63 0 1 9 0 12 +5 -9.38 0 0 0 0 12 +6 -4.5 0 0 0 0 12 +8 -7 0 0 0 0 12 +10 -16.25 0 0 0 0 12 +10 -17.5 0 0 0 0 12 +9 -16.88 0 0 0 0 12 +8 -5 0 1 8 0 12 +6 -1.5 0 1 6 0 12 +12 -18 0 0 0 0 12 +5 -6.25 0 0 0 0 12 +8 -4 0 1 8 0 12 +9 -15.75 0 0 0 0 12 +9 -13.5 0 0 0 0 12 +5 -8.13 0 0 0 0 12 +2 0 1 0 1 0 12 +2 -3.75 0 0 0 0 12 +4 -6.5 0 0 0 0 12 +10 -5 0 1 -5 0 12 +12 -22.5 0 0 0 0 12 +2 -1 0 1 2 0 12 +13 0 6 0 6 0 12 +5 -2.5 0 1 5 0 12 +2 -0.5 0 1 2 0 12 +2 -3.25 0 0 0 0 12 +30 0 12 0 12 0 12 +8 -8 0 0 0 0 12 +4 -5.5 0 0 0 0 12 +23 0 10 0 10 0 12 +4 -3.5 0 0 0 0 12 +5 0 2 0 2 0 12 +8 0 3 0 3 0 12 +9 -10.13 0 0 0 0 12 +8 -16 0 0 0 0 12 +12 -24 0 0 0 0 12 +9 -3.38 0 1 -3.38 0 12 +6 -5.25 0 0 0 0 12 +2 -4 0 0 0 0 12 +4 -1 0 1 -1 0 12 +6 -11.25 0 0 0 0 12 +5 -4.38 0 0 0 0 12 +6 -2.25 0 1 6 0 12 +12 -10.5 0 0 0 0 12 +9 -18 0 1 -18 0 12 +10 -20 0 1 10 0 12 +4 -4.5 0 0 0 0 12 +9 -2.25 0 1 -2.25 0 12 +4 -6 0 0 0 0 12 +8 -10 0 0 0 0 12 +5 -5 0 0 0 0 12 +5 -8.75 0 0 0 0 12 +8 -6 0 0 0 0 12 +10 -13.75 0 0 0 0 12 +2 -2.5 0 0 0 0 12 +8 -11 0 0 0 0 12 +4 -2 0 1 4 0 12 +10 -7.5 0 1 -7.5 0 12 +22 0 10 1 22 0 12 +25 0 10 0 10 0 12 +6 -9.75 0 0 0 0 12 +12 0 5 0 5 0 12 +4 -2.5 0 1 -2.5 0 12 +8 -3 0 1 8 0 12 +10 -11.25 0 0 0 0 12 +5 -10 0 1 5 0 12 +10 -15 0 1 -15 0 12 +2 -3.5 0 0 0 0 12 +12 0 4 1 12 0 12 +13 0 5 0 5 0 12 +5 -3.75 0 1 5 0 12 +26 0 12 0 12 0 12 +5 -5.63 0 0 0 0 12 +8 -2 0 1 -2 0 12 +2 -3 0 0 0 0 12 +6 -9 0 0 0 0 12 +9 -7.88 0 0 0 0 12 +8 -14 0 0 0 0 12 +28 0 13 1 28 0 12 +9 -12.38 0 0 0 0 12 +8 -15 0 0 0 0 12 +10 -2.5 0 1 -2.5 0 12 +4 0 2 0 2 0 12 +12 -6 0 1 -6 0 12 +12 -16.5 0 1 -16.5 0 12 +4 -7.5 0 0 0 0 12 +10 -8.75 0 1 -8.75 0 12 +10 -18.75 0 1 10 0 12 +26 0 10 0 10 0 12 +12 -21 0 0 0 0 12 +2 -0.75 0 1 -0.75 0 12 +9 -9 0 0 0 0 12 +10 -6.25 0 0 0 0 12 +8 -12 0 0 0 0 12 +3 0 1 1 0 0 12 +5 -1.88 0 1 5 0 12 +6 -7.5 0 0 0 0 12 +12 -13.5 0 0 0 0 12 +4 -7 0 0 0 0 12 +6 -8.25 0 0 0 0 12 +6 -12 0 0 0 0 12 +6 -10.5 0 0 0 0 12 +4 -8 0 0 0 0 12 +6 -6 0 0 0 0 12 +12 0 6 0 6 0 12 +12 -19.5 0 0 0 0 12 +19 0 8 0 8 0 12 +12 -15 0 0 0 0 12 +2 -1.75 0 0 0 0 12 +6 -3.75 0 1 -3.75 0 12 +2 -1.25 0 0 0 0 12 +5 -1.25 0 1 -1.25 0 12 +4 -1.5 0 1 4 0 12 +8 -13 0 0 0 0 12 +12 -7.5 0 0 0 0 12 +12 -3 0 0 0 0 12 +2 -2.75 0 0 0 0 12 +7 0 3 1 7 0 12 +25 0 9 0 9 0 12 +2 -2 0 0 0 0 12 +12 -4.5 0 1 -4.5 0 12 +12 -12 0 1 12 0 12 +5 -3.13 0 0 0 0 12 +9 -11.25 0 0 0 0 12 +8 -9 0 0 0 0 12 +2 -2.25 0 0 0 0 12 +9 -4.5 0 0 0 0 12 +10 -3.75 0 1 10 0 12 +10 -10 0 0 0 0 12 +10 -12.5 0 0 0 0 12 +2 -2.5 0 0 0 0 13 +5 -5.63 0 0 0 0 13 +6 -7.5 0 0 0 0 13 +26 0 10 1 0 0 13 +9 -4.5 0 1 -4.5 0 13 +2 -1.25 0 1 2 0 13 +8 -3 0 1 8 0 13 +25 0 9 0 9 0 13 +4 -4.5 0 0 0 0 13 +5 -10 0 0 0 0 13 +6 -9 0 0 0 0 13 +10 -6.25 0 1 10 0 13 +4 -4 0 1 -4 0 13 +12 -3 0 1 -3 0 13 +5 -5 0 1 -5 0 13 +12 0 5 0 5 0 13 +6 -9.75 0 0 0 0 13 +19 0 8 0 8 0 13 +4 -7.5 0 0 0 0 13 +12 -9 0 1 12 0 13 +4 -6.5 0 0 0 0 13 +9 -5.63 0 1 -5.63 0 13 +9 -18 0 0 0 0 13 +10 -11.25 0 0 0 0 13 +10 -13.75 0 0 0 0 13 +6 -12 0 0 0 0 13 +10 -12.5 0 0 0 0 13 +4 -7 0 0 0 0 13 +10 -7.5 0 0 0 0 13 +4 -8 0 0 0 0 13 +8 -11 0 0 0 0 13 +12 0 4 0 4 0 13 +9 -3.38 0 1 -3.38 0 13 +10 -18.75 0 0 0 0 13 +2 -3.5 0 0 0 0 13 +2 -1 0 0 0 0 13 +2 -3.25 0 0 0 0 13 +2 0 1 0 1 0 13 +7 0 3 0 3 0 13 +8 0 3 0 3 0 13 +12 -6 0 1 -6 0 13 +2 -0.5 0 1 2 0 13 +9 -7.88 0 0 0 0 13 +8 -15 0 0 0 0 13 +2 -1.5 0 0 0 0 13 +12 -22.5 0 0 0 0 13 +8 -7 0 1 -7 0 13 +4 -5.5 0 0 0 0 13 +10 -8.75 0 0 0 0 13 +8 -9 0 0 0 0 13 +2 -4 0 0 0 0 13 +4 0 2 0 2 0 13 +8 -8 0 0 0 0 13 +9 -13.5 0 0 0 0 13 +9 -9 0 0 0 0 13 +6 -3.75 0 0 0 0 13 +13 0 6 0 6 0 13 +5 -1.88 0 1 5 0 13 +6 -6 0 0 0 0 13 +5 -6.88 0 0 0 0 13 +8 -16 0 0 0 0 13 +12 -7.5 0 0 0 0 13 +5 -1.25 0 0 0 0 13 +9 -14.63 0 0 0 0 13 +8 -4 0 1 8 0 13 +10 -17.5 0 0 0 0 13 +5 -3.75 0 0 0 0 13 +6 -10.5 0 0 0 0 13 +13 0 5 0 5 0 13 +10 -16.25 0 1 10 0 13 +5 -7.5 0 0 0 0 13 +2 -1.75 0 0 0 0 13 +5 -9.38 0 0 0 0 13 +2 -2.75 0 0 0 0 13 +2 -0.75 0 1 -0.75 0 13 +5 -8.13 0 0 0 0 13 +9 -11.25 0 0 0 0 13 +8 -13 0 0 0 0 13 +9 -16.88 0 0 0 0 13 +2 -2 0 0 0 0 13 +12 -18 0 0 0 0 13 +8 -2 0 0 0 0 13 +2 -3 0 0 0 0 13 +6 -4.5 0 1 6 0 13 +5 0 2 0 2 0 13 +12 -19.5 0 0 0 0 13 +9 -15.75 0 0 0 0 13 +8 -6 0 0 0 0 13 +10 -2.5 0 1 -2.5 0 13 +9 -6.75 0 1 -6.75 0 13 +6 -6.75 0 0 0 0 13 +2 -3.75 0 0 0 0 13 +10 -5 0 0 0 0 13 +2 -2.25 0 0 0 0 13 +26 0 12 0 12 0 13 +12 -13.5 0 0 0 0 13 +8 -5 0 0 0 0 13 +6 -3 0 1 6 0 13 +10 -3.75 0 0 0 0 13 +12 -10.5 0 0 0 0 13 +4 -5 0 0 0 0 13 +9 -2.25 0 1 -2.25 0 13 +4 -3 0 0 0 0 13 +9 -10.13 0 0 0 0 13 +28 0 13 0 13 0 13 +22 0 10 0 10 0 13 +10 -10 0 0 0 0 13 +4 -1 0 1 -1 0 13 +4 -2.5 0 0 0 0 13 +12 -24 0 0 0 0 13 +8 -12 0 0 0 0 13 +3 0 1 0 1 0 13 +9 -12.38 0 0 0 0 13 +23 0 10 0 10 0 13 +4 -3.5 0 0 0 0 13 +4 -1.5 0 0 0 0 13 +8 -10 0 0 0 0 13 +8 -14 0 0 0 0 13 +4 -6 0 0 0 0 13 +25 0 10 0 10 0 13 +12 -16.5 0 0 0 0 13 +12 -12 0 0 0 0 13 +5 -2.5 0 0 0 0 13 +5 -8.75 0 0 0 0 13 +12 -4.5 0 0 0 0 13 +12 -15 0 0 0 0 13 +5 -3.13 0 0 0 0 13 +12 -21 0 0 0 0 13 +5 -4.38 0 0 0 0 13 +6 -11.25 0 0 0 0 13 +30 0 12 0 12 0 13 +6 -1.5 0 0 0 0 13 +12 0 6 0 6 0 13 +4 -2 0 0 0 0 13 +10 -15 0 0 0 0 13 +6 -2.25 0 1 6 0 13 +10 -20 0 0 0 0 13 +6 -5.25 0 0 0 0 13 +5 -6.25 0 0 0 0 13 +6 -8.25 0 0 0 0 13 +2 -1 0 1 2 0 14 +9 -13.5 0 0 0 0 14 +5 -6.88 0 1 5 0 14 +10 -10 0 1 10 0 14 +6 -2.25 0 1 6 0 14 +6 -6.75 0 0 0 0 14 +9 -4.5 0 1 -4.5 0 14 +10 -13.75 0 1 -13.75 0 14 +6 -8.25 0 0 0 0 14 +5 -10 0 0 0 0 14 +10 -6.25 0 1 10 0 14 +12 -3 0 1 -3 0 14 +12 -9 0 0 0 0 14 +8 -7 0 1 -7 0 14 +6 -12 0 1 6 0 14 +8 -2 0 1 -2 0 14 +12 -6 0 1 -6 0 14 +3 0 1 0 1 0 14 +10 -20 0 1 10 0 14 +5 -3.75 0 1 5 0 14 +2 -1.75 0 1 2 0 14 +6 -3.75 0 1 -3.75 0 14 +9 -12.38 0 0 0 0 14 +5 -6.25 0 0 0 0 14 +12 0 4 1 12 0 14 +2 -1.5 0 1 2 0 14 +6 -5.25 0 0 0 0 14 +10 -18.75 0 0 0 0 14 +6 -6 0 1 -6 0 14 +12 0 5 0 5 0 14 +4 -2 0 1 4 0 14 +2 -4 0 0 0 0 14 +5 -2.5 0 1 5 0 14 +2 -3.75 0 0 0 0 14 +9 -15.75 0 1 -15.75 0 14 +8 -4 0 1 8 0 14 +26 0 12 0 12 0 14 +6 -1.5 0 1 6 0 14 +4 -6 0 0 0 0 14 +10 -2.5 0 1 -2.5 0 14 +8 -12 0 0 0 0 14 +2 -3.5 0 0 0 0 14 +5 -5.63 0 1 -5.63 0 14 +12 -24 0 0 0 0 14 +25 0 10 1 0 0 14 +4 -6.5 0 0 0 0 14 +5 -9.38 0 0 0 0 14 +5 -7.5 0 0 0 0 14 +4 -4 0 0 0 0 14 +6 -10.5 0 1 -10.5 0 14 +13 0 6 0 6 0 14 +12 -22.5 0 0 0 0 14 +4 -7.5 0 0 0 0 14 +5 0 2 1 5 0 14 +10 -15 0 0 0 0 14 +9 -16.88 0 0 0 0 14 +2 -2.5 0 0 0 0 14 +10 -16.25 0 1 10 0 14 +6 -11.25 0 0 0 0 14 +4 -1.5 0 1 4 0 14 +5 -3.13 0 1 5 0 14 +6 -9 0 0 0 0 14 +12 -19.5 0 0 0 0 14 +10 -12.5 0 1 -12.5 0 14 +2 -3 0 0 0 0 14 +8 -16 0 1 -16 0 14 +4 0 2 1 4 0 14 +12 -7.5 0 1 -7.5 0 14 +12 -13.5 0 1 12 0 14 +22 0 10 0 10 0 14 +12 -21 0 0 0 0 14 +7 0 3 1 7 0 14 +10 -8.75 0 1 -8.75 0 14 +2 -1.25 0 0 0 0 14 +9 -6.75 0 1 -6.75 0 14 +12 0 6 1 12 0 14 +28 0 13 0 13 0 14 +9 -10.13 0 1 -10.13 0 14 +2 -0.5 0 1 2 0 14 +25 0 9 1 25 0 14 +6 -7.5 0 0 0 0 14 +4 -3 0 1 4 0 14 +10 -3.75 0 1 10 0 14 +12 -4.5 0 1 -4.5 0 14 +12 -15 0 1 -15 0 14 +6 -3 0 1 6 0 14 +9 -14.63 0 0 0 0 14 +5 -1.25 0 1 -1.25 0 14 +8 -11 0 0 0 0 14 +10 -17.5 0 0 0 0 14 +8 -10 0 0 0 0 14 +9 -9 0 1 -9 0 14 +10 -11.25 0 0 0 0 14 +12 -12 0 1 12 0 14 +8 -14 0 0 0 0 14 +12 -16.5 0 0 0 0 14 +4 -7 0 1 -7 0 14 +4 -1 0 1 -1 0 14 +5 -1.88 0 1 5 0 14 +8 0 3 1 0 0 14 +2 -3.25 0 0 0 0 14 +5 -5 0 1 -5 0 14 +26 0 10 1 0 0 14 +12 -10.5 0 1 12 0 14 +2 0 1 0 1 0 14 +6 -9.75 0 0 0 0 14 +8 -3 0 1 8 0 14 +13 0 5 0 5 0 14 +10 -7.5 0 1 -7.5 0 14 +8 -13 0 0 0 0 14 +9 -3.38 0 1 -3.38 0 14 +8 -15 0 0 0 0 14 +30 0 12 1 0 0 14 +8 -8 0 0 0 0 14 +8 -5 0 1 8 0 14 +12 -18 0 0 0 0 14 +10 -5 0 1 -5 0 14 +9 -11.25 0 0 0 0 14 +9 -7.88 0 1 -7.88 0 14 +8 -6 0 1 -6 0 14 +6 -4.5 0 1 6 0 14 +8 -9 0 1 -9 0 14 +4 -5.5 0 0 0 0 14 +4 -5 0 0 0 0 14 +9 -2.25 0 1 -2.25 0 14 +23 0 10 1 0 0 14 +9 -5.63 0 1 -5.63 0 14 +4 -4.5 0 1 -4.5 0 14 +4 -8 0 1 4 0 14 +19 0 8 0 8 0 14 +2 -2 0 1 2 0 14 +5 -8.13 0 1 5 0 14 +5 -4.38 0 1 -4.38 0 14 +2 -2.25 0 0 0 0 14 +2 -0.75 0 1 -0.75 0 14 +2 -2.75 0 0 0 0 14 +5 -8.75 0 0 0 0 14 +9 -18 0 0 0 0 14 +4 -3.5 0 1 4 0 14 +4 -2.5 0 1 -2.5 0 14 +9 -6.75 0 1 -6.75 0 15 +6 -6.75 0 1 -6.75 0 15 +6 -3 0 1 6 0 15 +2 -1.5 0 1 2 0 15 +4 -3 0 1 4 0 15 +5 -6.88 0 1 5 0 15 +12 -9 0 1 12 0 15 +4 -5 0 1 4 0 15 +5 -7.5 0 1 -7.5 0 15 +4 -4 0 1 -4 0 15 +9 -5.63 0 1 -5.63 0 15 +9 -14.63 0 0 0 0 15 +5 -9.38 0 1 5 0 15 +6 -4.5 0 1 6 0 15 +8 -7 0 1 -7 0 15 +10 -16.25 0 0 0 0 15 +10 -17.5 0 0 0 0 15 +9 -16.88 0 0 0 0 15 +8 -5 0 1 8 0 15 +6 -1.5 0 1 6 0 15 +12 -18 0 1 -18 0 15 +5 -6.25 0 1 5 0 15 +8 -4 0 1 8 0 15 +9 -15.75 0 0 0 0 15 +9 -13.5 0 1 9 0 15 +5 -8.13 0 1 5 0 15 +2 0 1 0 1 0 15 +2 -3.75 0 1 -3.75 0 15 +4 -6.5 0 1 4 0 15 +10 -5 0 1 -5 0 15 +12 -22.5 0 0 0 0 15 +2 -1 0 1 2 0 15 +13 0 6 0 6 0 15 +5 -2.5 0 1 5 0 15 +2 -0.5 0 1 2 0 15 +2 -3.25 0 1 -3.25 0 15 +30 0 12 1 0 0 15 +8 -8 0 1 8 0 15 +4 -5.5 0 1 -5.5 0 15 +23 0 10 1 0 0 15 +4 -3.5 0 1 4 0 15 +5 0 2 1 5 0 15 +8 0 3 1 0 0 15 +9 -10.13 0 0 0 0 15 +8 -16 0 0 0 0 15 +12 -24 0 0 0 0 15 +9 -3.38 0 1 -3.38 0 15 +6 -5.25 0 1 6 0 15 +2 -4 0 0 0 0 15 +4 -1 0 1 -1 0 15 +6 -11.25 0 0 0 0 15 +5 -4.38 0 1 -4.38 0 15 +6 -2.25 0 1 6 0 15 +12 -10.5 0 0 0 0 15 +9 -18 0 0 0 0 15 +10 -20 0 0 0 0 15 +4 -4.5 0 1 -4.5 0 15 +9 -2.25 0 1 -2.25 0 15 +4 -6 0 1 4 0 15 +8 -10 0 1 -10 0 15 +5 -5 0 1 -5 0 15 +5 -8.75 0 1 5 0 15 +8 -6 0 1 -6 0 15 +10 -13.75 0 0 0 0 15 +2 -2.5 0 1 2 0 15 +8 -11 0 0 0 0 15 +4 -2 0 1 4 0 15 +10 -7.5 0 1 -7.5 0 15 +22 0 10 1 22 0 15 +25 0 10 0 10 0 15 +6 -9.75 0 0 0 0 15 +12 0 5 1 12 0 15 +4 -2.5 0 1 -2.5 0 15 +8 -3 0 1 8 0 15 +10 -11.25 0 0 0 0 15 +5 -10 0 0 0 0 15 +10 -15 0 0 0 0 15 +2 -3.5 0 1 -3.5 0 15 +12 0 4 1 12 0 15 +13 0 5 1 13 0 15 +5 -3.75 0 1 5 0 15 +26 0 12 0 12 0 15 +5 -5.63 0 1 -5.63 0 15 +8 -2 0 1 -2 0 15 +2 -3 0 1 -3 0 15 +6 -9 0 0 0 0 15 +9 -7.88 0 1 -7.88 0 15 +8 -14 0 1 8 0 15 +28 0 13 1 28 0 15 +9 -12.38 0 0 0 0 15 +8 -15 0 0 0 0 15 +10 -2.5 0 1 -2.5 0 15 +4 0 2 0 2 0 15 +12 -6 0 1 -6 0 15 +12 -16.5 0 0 0 0 15 +4 -7.5 0 1 -7.5 0 15 +10 -8.75 0 1 -8.75 0 15 +10 -18.75 0 1 10 0 15 +26 0 10 1 0 0 15 +12 -21 0 0 0 0 15 +2 -0.75 0 1 -0.75 0 15 +9 -9 0 1 -9 0 15 +10 -6.25 0 1 10 0 15 +8 -12 0 0 0 0 15 +3 0 1 1 0 0 15 +5 -1.88 0 1 5 0 15 +6 -7.5 0 1 -7.5 0 15 +12 -13.5 0 1 12 0 15 +4 -7 0 0 0 0 15 +6 -8.25 0 0 0 0 15 +6 -12 0 0 0 0 15 +6 -10.5 0 1 -10.5 0 15 +4 -8 0 1 4 0 15 +6 -6 0 1 -6 0 15 +12 0 6 0 6 0 15 +12 -19.5 0 0 0 0 15 +19 0 8 1 19 0 15 +12 -15 0 0 0 0 15 +2 -1.75 0 1 2 0 15 +6 -3.75 0 1 -3.75 0 15 +2 -1.25 0 1 2 0 15 +5 -1.25 0 1 -1.25 0 15 +4 -1.5 0 1 4 0 15 +8 -13 0 0 0 0 15 +12 -7.5 0 1 -7.5 0 15 +12 -3 0 1 -3 0 15 +2 -2.75 0 1 2 0 15 +7 0 3 1 7 0 15 +25 0 9 1 25 0 15 +2 -2 0 1 2 0 15 +12 -4.5 0 1 -4.5 0 15 +12 -12 0 1 12 0 15 +5 -3.13 0 1 5 0 15 +9 -11.25 0 0 0 0 15 +8 -9 0 1 -9 0 15 +2 -2.25 0 1 2 0 15 +9 -4.5 0 1 -4.5 0 15 +10 -3.75 0 1 10 0 15 +10 -10 0 0 0 0 15 +10 -12.5 0 0 0 0 15 +2 -2.5 0 1 2 0 16 +5 -5.63 0 0 0 0 16 +6 -7.5 0 0 0 0 16 +26 0 10 1 0 0 16 +9 -4.5 0 1 -4.5 0 16 +2 -1.25 0 1 2 0 16 +8 -3 0 1 8 0 16 +25 0 9 1 25 0 16 +4 -4.5 0 0 0 0 16 +5 -10 0 0 0 0 16 +6 -9 0 0 0 0 16 +10 -6.25 0 1 10 0 16 +4 -4 0 0 0 0 16 +12 -3 0 1 -3 0 16 +5 -5 0 0 0 0 16 +12 0 5 1 12 0 16 +6 -9.75 0 0 0 0 16 +19 0 8 1 19 0 16 +4 -7.5 0 0 0 0 16 +12 -9 0 0 0 0 16 +4 -6.5 0 0 0 0 16 +9 -5.63 0 1 -5.63 0 16 +9 -18 0 1 -18 0 16 +10 -11.25 0 1 -11.25 0 16 +10 -13.75 0 1 -13.75 0 16 +6 -12 0 0 0 0 16 +10 -12.5 0 0 0 0 16 +4 -7 0 0 0 0 16 +10 -7.5 0 1 -7.5 0 16 +4 -8 0 0 0 0 16 +8 -11 0 1 -11 0 16 +12 0 4 1 12 0 16 +9 -3.38 0 1 -3.38 0 16 +10 -18.75 0 0 0 0 16 +2 -3.5 0 0 0 0 16 +2 -1 0 1 2 0 16 +2 -3.25 0 1 -3.25 0 16 +2 0 1 1 0 0 16 +7 0 3 1 7 0 16 +8 0 3 0 3 0 16 +12 -6 0 1 -6 0 16 +2 -0.5 0 1 2 0 16 +9 -7.88 0 0 0 0 16 +8 -15 0 0 0 0 16 +2 -1.5 0 1 2 0 16 +12 -22.5 0 0 0 0 16 +8 -7 0 1 -7 0 16 +4 -5.5 0 1 -5.5 0 16 +10 -8.75 0 1 -8.75 0 16 +8 -9 0 0 0 0 16 +2 -4 0 0 0 0 16 +4 0 2 1 4 0 16 +8 -8 0 0 0 0 16 +9 -13.5 0 0 0 0 16 +9 -9 0 0 0 0 16 +6 -3.75 0 1 -3.75 0 16 +13 0 6 1 13 0 16 +5 -1.88 0 1 5 0 16 +6 -6 0 0 0 0 16 +5 -6.88 0 0 0 0 16 +8 -16 0 0 0 0 16 +12 -7.5 0 1 -7.5 0 16 +5 -1.25 0 1 -1.25 0 16 +9 -14.63 0 0 0 0 16 +8 -4 0 1 8 0 16 +10 -17.5 0 0 0 0 16 +5 -3.75 0 1 5 0 16 +6 -10.5 0 0 0 0 16 +13 0 5 1 13 0 16 +10 -16.25 0 0 0 0 16 +5 -7.5 0 1 -7.5 0 16 +2 -1.75 0 1 2 0 16 +5 -9.38 0 0 0 0 16 +2 -2.75 0 1 2 0 16 +2 -0.75 0 1 -0.75 0 16 +5 -8.13 0 1 5 0 16 +9 -11.25 0 1 9 0 16 +8 -13 0 0 0 0 16 +9 -16.88 0 1 9 0 16 +2 -2 0 1 2 0 16 +12 -18 0 1 -18 0 16 +8 -2 0 1 -2 0 16 +2 -3 0 0 0 0 16 +6 -4.5 0 1 6 0 16 +5 0 2 1 5 0 16 +12 -19.5 0 1 12 0 16 +9 -15.75 0 1 -15.75 0 16 +8 -6 0 1 -6 0 16 +10 -2.5 0 1 -2.5 0 16 +9 -6.75 0 1 -6.75 0 16 +6 -6.75 0 0 0 0 16 +2 -3.75 0 1 -3.75 0 16 +10 -5 0 1 -5 0 16 +2 -2.25 0 1 2 0 16 +26 0 12 1 26 0 16 +12 -13.5 0 1 12 0 16 +8 -5 0 1 8 0 16 +6 -3 0 1 6 0 16 +10 -3.75 0 1 10 0 16 +12 -10.5 0 1 12 0 16 +4 -5 0 1 4 0 16 +9 -2.25 0 1 -2.25 0 16 +4 -3 0 1 4 0 16 +9 -10.13 0 1 -10.13 0 16 +28 0 13 1 28 0 16 +22 0 10 0 10 0 16 +10 -10 0 0 0 0 16 +4 -1 0 1 -1 0 16 +4 -2.5 0 1 -2.5 0 16 +12 -24 0 0 0 0 16 +8 -12 0 0 0 0 16 +3 0 1 1 0 0 16 +9 -12.38 0 0 0 0 16 +23 0 10 1 0 0 16 +4 -3.5 0 1 4 0 16 +4 -1.5 0 1 4 0 16 +8 -10 0 1 -10 0 16 +8 -14 0 0 0 0 16 +4 -6 0 1 4 0 16 +25 0 10 1 0 0 16 +12 -16.5 0 0 0 0 16 +12 -12 0 1 12 0 16 +5 -2.5 0 1 5 0 16 +5 -8.75 0 1 5 0 16 +12 -4.5 0 1 -4.5 0 16 +12 -15 0 1 -15 0 16 +5 -3.13 0 1 5 0 16 +12 -21 0 0 0 0 16 +5 -4.38 0 1 -4.38 0 16 +6 -11.25 0 0 0 0 16 +30 0 12 1 0 0 16 +6 -1.5 0 1 6 0 16 +12 0 6 1 12 0 16 +4 -2 0 1 4 0 16 +10 -15 0 0 0 0 16 +6 -2.25 0 1 6 0 16 +10 -20 0 0 0 0 16 +6 -5.25 0 1 6 0 16 +5 -6.25 0 1 5 0 16 +6 -8.25 0 1 -8.25 0 16 +4 -4.5 0 0 0 0 17 +10 -12.5 0 0 0 0 17 +26 0 12 0 12 0 17 +6 -7.5 0 0 0 0 17 +4 -6.5 0 0 0 0 17 +12 -4.5 0 0 0 0 17 +5 -2.5 0 0 0 0 17 +6 -12 0 0 0 0 17 +9 -14.63 0 0 0 0 17 +6 -6 0 0 0 0 17 +22 0 10 1 22 0 17 +2 -1 0 0 0 0 17 +8 -3 0 0 0 0 17 +12 -9 0 0 0 0 17 +5 -3.75 0 0 0 0 17 +6 -3 0 0 0 0 17 +4 0 2 1 4 0 17 +28 0 13 1 28 0 17 +12 -15 0 0 0 0 17 +9 -11.25 0 0 0 0 17 +12 -10.5 0 0 0 0 17 +5 -1.88 0 0 0 0 17 +2 -2.75 0 0 0 0 17 +4 -7 0 0 0 0 17 +8 -4 0 0 0 0 17 +2 0 1 1 0 0 17 +2 -3.5 0 0 0 0 17 +2 -1.75 0 0 0 0 17 +5 -5 0 0 0 0 17 +12 -12 0 0 0 0 17 +12 0 6 1 12 0 17 +6 -4.5 0 0 0 0 17 +30 0 12 1 0 0 17 +12 -16.5 0 0 0 0 17 +6 -9.75 0 0 0 0 17 +12 -22.5 0 0 0 0 17 +6 -9 0 0 0 0 17 +5 -3.13 0 0 0 0 17 +5 -9.38 0 0 0 0 17 +12 -7.5 0 1 -7.5 0 17 +5 0 2 1 5 0 17 +10 -15 0 0 0 0 17 +12 -3 0 0 0 0 17 +13 0 6 1 13 0 17 +9 -16.88 0 0 0 0 17 +6 -11.25 0 0 0 0 17 +8 -5 0 0 0 0 17 +8 -14 0 0 0 0 17 +12 -24 0 0 0 0 17 +12 0 5 1 12 0 17 +9 -13.5 0 0 0 0 17 +6 -1.5 0 1 6 0 17 +2 -3 0 0 0 0 17 +10 -2.5 0 1 -2.5 0 17 +2 -0.75 0 0 0 0 17 +6 -10.5 0 0 0 0 17 +2 -0.5 0 0 0 0 17 +10 -10 0 0 0 0 17 +8 -10 0 0 0 0 17 +9 -12.38 0 0 0 0 17 +4 -6 0 0 0 0 17 +6 -2.25 0 0 0 0 17 +9 -15.75 0 0 0 0 17 +12 -13.5 0 0 0 0 17 +8 -6 0 0 0 0 17 +10 -18.75 0 0 0 0 17 +4 -2 0 0 0 0 17 +5 -1.25 0 1 -1.25 0 17 +6 -5.25 0 0 0 0 17 +4 -8 0 0 0 0 17 +25 0 9 1 25 0 17 +2 -3.25 0 0 0 0 17 +10 -11.25 0 0 0 0 17 +4 -7.5 0 0 0 0 17 +9 -5.63 0 0 0 0 17 +6 -6.75 0 0 0 0 17 +8 -2 0 0 0 0 17 +5 -6.25 0 0 0 0 17 +23 0 10 1 0 0 17 +8 -13 0 0 0 0 17 +10 -13.75 0 0 0 0 17 +5 -10 0 0 0 0 17 +12 0 4 1 12 0 17 +2 -2.5 0 0 0 0 17 +19 0 8 1 19 0 17 +4 -4 0 0 0 0 17 +4 -1 0 1 -1 0 17 +4 -2.5 0 0 0 0 17 +5 -8.13 0 0 0 0 17 +10 -3.75 0 0 0 0 17 +5 -8.75 0 0 0 0 17 +10 -7.5 0 0 0 0 17 +10 -5 0 0 0 0 17 +10 -20 0 0 0 0 17 +13 0 5 1 13 0 17 +8 -9 0 0 0 0 17 +8 -12 0 0 0 0 17 +10 -16.25 0 0 0 0 17 +5 -6.88 0 0 0 0 17 +4 -5.5 0 0 0 0 17 +5 -7.5 0 0 0 0 17 +9 -10.13 0 0 0 0 17 +6 -8.25 0 0 0 0 17 +26 0 10 1 0 0 17 +4 -5 0 0 0 0 17 +2 -2.25 0 0 0 0 17 +6 -3.75 0 0 0 0 17 +8 -8 0 0 0 0 17 +9 -6.75 0 0 0 0 17 +8 -15 0 0 0 0 17 +12 -6 0 0 0 0 17 +25 0 10 1 0 0 17 +12 -19.5 0 0 0 0 17 +9 -7.88 0 0 0 0 17 +4 -1.5 0 0 0 0 17 +8 -7 0 0 0 0 17 +12 -18 0 0 0 0 17 +2 -2 0 0 0 0 17 +9 -18 0 0 0 0 17 +2 -1.25 0 0 0 0 17 +8 -16 0 0 0 0 17 +5 -4.38 0 0 0 0 17 +2 -4 0 0 0 0 17 +5 -5.63 0 0 0 0 17 +8 0 3 1 0 0 17 +10 -17.5 0 0 0 0 17 +8 -11 0 0 0 0 17 +2 -1.5 0 0 0 0 17 +4 -3.5 0 0 0 0 17 +2 -3.75 0 0 0 0 17 +3 0 1 1 0 0 17 +12 -21 0 0 0 0 17 +10 -8.75 0 0 0 0 17 +9 -9 0 0 0 0 17 +4 -3 0 0 0 0 17 +7 0 3 1 7 0 17 +9 -3.38 0 0 0 0 17 +9 -2.25 0 0 0 0 17 +10 -6.25 0 0 0 0 17 +9 -4.5 0 0 0 0 17 +2 -1 0 1 2 0 18 +9 -13.5 0 0 0 0 18 +5 -6.88 0 1 5 0 18 +10 -10 0 0 0 0 18 +6 -2.25 0 1 6 0 18 +6 -6.75 0 0 0 0 18 +9 -4.5 0 1 -4.5 0 18 +10 -13.75 0 1 -13.75 0 18 +6 -8.25 0 1 -8.25 0 18 +5 -10 0 0 0 0 18 +10 -6.25 0 1 10 0 18 +12 -3 0 1 -3 0 18 +12 -9 0 1 12 0 18 +8 -7 0 0 0 0 18 +6 -12 0 0 0 0 18 +8 -2 0 1 -2 0 18 +12 -6 0 1 -6 0 18 +3 0 1 1 0 0 18 +10 -20 0 0 0 0 18 +5 -3.75 0 1 5 0 18 +2 -1.75 0 1 2 0 18 +6 -3.75 0 1 -3.75 0 18 +9 -12.38 0 0 0 0 18 +5 -6.25 0 0 0 0 18 +12 0 4 0 4 0 18 +2 -1.5 0 1 2 0 18 +6 -5.25 0 1 6 0 18 +10 -18.75 0 0 0 0 18 +6 -6 0 0 0 0 18 +12 0 5 0 5 0 18 +4 -2 0 1 4 0 18 +2 -4 0 0 0 0 18 +5 -2.5 0 1 5 0 18 +2 -3.75 0 0 0 0 18 +9 -15.75 0 0 0 0 18 +8 -4 0 1 8 0 18 +26 0 12 0 12 0 18 +6 -1.5 0 1 6 0 18 +4 -6 0 0 0 0 18 +10 -2.5 0 1 -2.5 0 18 +8 -12 0 0 0 0 18 +2 -3.5 0 1 -3.5 0 18 +5 -5.63 0 0 0 0 18 +12 -24 0 0 0 0 18 +25 0 10 0 10 0 18 +4 -6.5 0 0 0 0 18 +5 -9.38 0 0 0 0 18 +5 -7.5 0 0 0 0 18 +4 -4 0 0 0 0 18 +6 -10.5 0 0 0 0 18 +13 0 6 0 6 0 18 +12 -22.5 0 0 0 0 18 +4 -7.5 0 0 0 0 18 +5 0 2 1 5 0 18 +10 -15 0 0 0 0 18 +9 -16.88 0 0 0 0 18 +2 -2.5 0 1 2 0 18 +10 -16.25 0 0 0 0 18 +6 -11.25 0 0 0 0 18 +4 -1.5 0 1 4 0 18 +5 -3.13 0 1 5 0 18 +6 -9 0 0 0 0 18 +12 -19.5 0 0 0 0 18 +10 -12.5 0 0 0 0 18 +8 -16 0 0 0 0 18 +4 0 2 1 4 0 18 +12 -7.5 0 1 -7.5 0 18 +12 -13.5 0 0 0 0 18 +22 0 10 0 10 0 18 +12 -21 0 0 0 0 18 +7 0 3 1 7 0 18 +10 -8.75 0 1 -8.75 0 18 +2 -1.25 0 1 2 0 18 +9 -6.75 0 1 -6.75 0 18 +12 0 6 0 6 0 18 +28 0 13 0 13 0 18 +9 -10.13 0 0 0 0 18 +2 -0.5 0 1 2 0 18 +25 0 9 1 25 0 18 +6 -7.5 0 0 0 0 18 +4 -3 0 1 4 0 18 +10 -3.75 0 1 10 0 18 +12 -4.5 0 1 -4.5 0 18 +12 -15 0 0 0 0 18 +6 -3 0 1 6 0 18 +9 -14.63 0 0 0 0 18 +5 -1.25 0 1 -1.25 0 18 +8 -11 0 0 0 0 18 +10 -17.5 0 0 0 0 18 +8 -10 0 0 0 0 18 +9 -9 0 1 -9 0 18 +10 -11.25 0 0 0 0 18 +12 -12 0 1 12 0 18 +8 -14 0 0 0 0 18 +12 -16.5 0 0 0 0 18 +4 -7 0 0 0 0 18 +4 -1 0 1 -1 0 18 +5 -1.88 0 1 5 0 18 +8 0 3 1 0 0 18 +2 -3.25 0 0 0 0 18 +5 -5 0 0 0 0 18 +26 0 10 0 10 0 18 +12 -10.5 0 1 12 0 18 +2 0 1 1 0 0 18 +6 -9.75 0 0 0 0 18 +8 -3 0 1 8 0 18 +13 0 5 0 5 0 18 +10 -7.5 0 0 0 0 18 +8 -13 0 0 0 0 18 +9 -3.38 0 1 -3.38 0 18 +8 -15 0 0 0 0 18 +30 0 12 1 0 0 18 +8 -8 0 0 0 0 18 +8 -5 0 1 8 0 18 +12 -18 0 0 0 0 18 +10 -5 0 1 -5 0 18 +9 -11.25 0 0 0 0 18 +9 -7.88 0 1 -7.88 0 18 +8 -6 0 1 -6 0 18 +6 -4.5 0 1 6 0 18 +8 -9 0 0 0 0 18 +4 -5.5 0 0 0 0 18 +4 -5 0 0 0 0 18 +9 -2.25 0 1 -2.25 0 18 +23 0 10 0 10 0 18 +9 -5.63 0 1 -5.63 0 18 +4 -4.5 0 0 0 0 18 +4 -8 0 0 0 0 18 +19 0 8 0 8 0 18 +2 -2 0 0 0 0 18 +5 -8.13 0 0 0 0 18 +5 -4.38 0 1 -4.38 0 18 +2 -2.25 0 1 2 0 18 +2 -0.75 0 1 -0.75 0 18 +2 -2.75 0 0 0 0 18 +5 -8.75 0 0 0 0 18 +9 -18 0 0 0 0 18 +4 -3.5 0 0 0 0 18 +4 -2.5 0 1 -2.5 0 18 +9 -6.75 0 1 -6.75 0 19 +6 -6.75 0 0 0 0 19 +6 -3 0 1 6 0 19 +2 -1.5 0 0 0 0 19 +4 -3 0 0 0 0 19 +5 -6.88 0 0 0 0 19 +12 -9 0 1 12 0 19 +4 -5 0 0 0 0 19 +5 -7.5 0 0 0 0 19 +4 -4 0 1 -4 0 19 +9 -5.63 0 1 -5.63 0 19 +9 -14.63 0 0 0 0 19 +5 -9.38 0 0 0 0 19 +6 -4.5 0 1 6 0 19 +8 -7 0 1 -7 0 19 +10 -16.25 0 0 0 0 19 +10 -17.5 0 0 0 0 19 +9 -16.88 0 0 0 0 19 +8 -5 0 1 8 0 19 +6 -1.5 0 1 6 0 19 +12 -18 0 0 0 0 19 +5 -6.25 0 0 0 0 19 +8 -4 0 1 8 0 19 +9 -15.75 0 0 0 0 19 +9 -13.5 0 0 0 0 19 +5 -8.13 0 0 0 0 19 +2 0 1 0 1 0 19 +2 -3.75 0 0 0 0 19 +4 -6.5 0 0 0 0 19 +10 -5 0 1 -5 0 19 +12 -22.5 0 0 0 0 19 +2 -1 0 1 2 0 19 +13 0 6 1 13 0 19 +5 -2.5 0 1 5 0 19 +2 -0.5 0 1 2 0 19 +2 -3.25 0 0 0 0 19 +30 0 12 1 0 0 19 +8 -8 0 0 0 0 19 +4 -5.5 0 0 0 0 19 +23 0 10 1 0 0 19 +4 -3.5 0 0 0 0 19 +5 0 2 1 5 0 19 +8 0 3 1 0 0 19 +9 -10.13 0 0 0 0 19 +8 -16 0 0 0 0 19 +12 -24 0 0 0 0 19 +9 -3.38 0 1 -3.38 0 19 +6 -5.25 0 0 0 0 19 +2 -4 0 0 0 0 19 +4 -1 0 1 -1 0 19 +6 -11.25 0 0 0 0 19 +5 -4.38 0 0 0 0 19 +6 -2.25 0 1 6 0 19 +12 -10.5 0 1 12 0 19 +9 -18 0 0 0 0 19 +10 -20 0 0 0 0 19 +4 -4.5 0 0 0 0 19 +9 -2.25 0 1 -2.25 0 19 +4 -6 0 0 0 0 19 +8 -10 0 1 -10 0 19 +5 -5 0 0 0 0 19 +5 -8.75 0 0 0 0 19 +8 -6 0 1 -6 0 19 +10 -13.75 0 0 0 0 19 +2 -2.5 0 0 0 0 19 +8 -11 0 0 0 0 19 +4 -2 0 1 4 0 19 +10 -7.5 0 1 -7.5 0 19 +22 0 10 1 22 0 19 +25 0 10 1 0 0 19 +6 -9.75 0 0 0 0 19 +12 0 5 1 12 0 19 +4 -2.5 0 0 0 0 19 +8 -3 0 1 8 0 19 +10 -11.25 0 0 0 0 19 +5 -10 0 0 0 0 19 +10 -15 0 0 0 0 19 +2 -3.5 0 0 0 0 19 +12 0 4 1 12 0 19 +13 0 5 1 13 0 19 +5 -3.75 0 0 0 0 19 +26 0 12 1 26 0 19 +5 -5.63 0 0 0 0 19 +8 -2 0 1 -2 0 19 +2 -3 0 0 0 0 19 +6 -9 0 0 0 0 19 +9 -7.88 0 0 0 0 19 +8 -14 0 0 0 0 19 +28 0 13 1 28 0 19 +9 -12.38 0 0 0 0 19 +8 -15 0 0 0 0 19 +10 -2.5 0 1 -2.5 0 19 +4 0 2 1 4 0 19 +12 -6 0 1 -6 0 19 +12 -16.5 0 0 0 0 19 +4 -7.5 0 0 0 0 19 +10 -8.75 0 0 0 0 19 +10 -18.75 0 0 0 0 19 +26 0 10 1 0 0 19 +12 -21 0 0 0 0 19 +2 -0.75 0 1 -0.75 0 19 +9 -9 0 0 0 0 19 +10 -6.25 0 1 10 0 19 +8 -12 0 0 0 0 19 +3 0 1 1 0 0 19 +5 -1.88 0 1 5 0 19 +6 -7.5 0 0 0 0 19 +12 -13.5 0 0 0 0 19 +4 -7 0 0 0 0 19 +6 -8.25 0 0 0 0 19 +6 -12 0 0 0 0 19 +6 -10.5 0 0 0 0 19 +4 -8 0 0 0 0 19 +6 -6 0 0 0 0 19 +12 0 6 0 6 0 19 +12 -19.5 0 0 0 0 19 +19 0 8 1 19 0 19 +12 -15 0 0 0 0 19 +2 -1.75 0 1 2 0 19 +6 -3.75 0 0 0 0 19 +2 -1.25 0 0 0 0 19 +5 -1.25 0 1 -1.25 0 19 +4 -1.5 0 1 4 0 19 +8 -13 0 0 0 0 19 +12 -7.5 0 1 -7.5 0 19 +12 -3 0 1 -3 0 19 +2 -2.75 0 0 0 0 19 +7 0 3 1 7 0 19 +25 0 9 1 25 0 19 +2 -2 0 0 0 0 19 +12 -4.5 0 1 -4.5 0 19 +12 -12 0 0 0 0 19 +5 -3.13 0 1 5 0 19 +9 -11.25 0 0 0 0 19 +8 -9 0 0 0 0 19 +2 -2.25 0 0 0 0 19 +9 -4.5 0 1 -4.5 0 19 +10 -3.75 0 1 10 0 19 +10 -10 0 0 0 0 19 +10 -12.5 0 0 0 0 19 +2 -2.5 0 1 2 0 20 +5 -5.63 0 1 -5.63 0 20 +6 -7.5 0 0 0 0 20 +26 0 10 0 10 0 20 +9 -4.5 0 1 -4.5 0 20 +2 -1.25 0 1 2 0 20 +8 -3 0 1 8 0 20 +25 0 9 0 9 0 20 +4 -4.5 0 1 -4.5 0 20 +5 -10 0 0 0 0 20 +6 -9 0 1 -9 0 20 +10 -6.25 0 1 10 0 20 +4 -4 0 1 -4 0 20 +12 -3 0 1 -3 0 20 +5 -5 0 0 0 0 20 +12 0 5 1 12 0 20 +6 -9.75 0 0 0 0 20 +19 0 8 0 8 0 20 +4 -7.5 0 0 0 0 20 +12 -9 0 1 12 0 20 +4 -6.5 0 0 0 0 20 +9 -5.63 0 1 -5.63 0 20 +9 -18 0 1 -18 0 20 +10 -11.25 0 1 -11.25 0 20 +10 -13.75 0 0 0 0 20 +6 -12 0 0 0 0 20 +10 -12.5 0 1 -12.5 0 20 +4 -7 0 0 0 0 20 +10 -7.5 0 1 -7.5 0 20 +4 -8 0 0 0 0 20 +8 -11 0 1 -11 0 20 +12 0 4 0 4 0 20 +9 -3.38 0 1 -3.38 0 20 +10 -18.75 0 0 0 0 20 +2 -3.5 0 1 -3.5 0 20 +2 -1 0 1 2 0 20 +2 -3.25 0 0 0 0 20 +2 0 1 0 1 0 20 +7 0 3 1 7 0 20 +8 0 3 0 3 0 20 +12 -6 0 1 -6 0 20 +2 -0.5 0 1 2 0 20 +9 -7.88 0 0 0 0 20 +8 -15 0 0 0 0 20 +2 -1.5 0 0 0 0 20 +12 -22.5 0 1 -22.5 0 20 +8 -7 0 1 -7 0 20 +4 -5.5 0 0 0 0 20 +10 -8.75 0 0 0 0 20 +8 -9 0 1 -9 0 20 +2 -4 0 0 0 0 20 +4 0 2 0 2 0 20 +8 -8 0 1 8 0 20 +9 -13.5 0 0 0 0 20 +9 -9 0 0 0 0 20 +6 -3.75 0 1 -3.75 0 20 +13 0 6 0 6 0 20 +5 -1.88 0 1 5 0 20 +6 -6 0 1 -6 0 20 +5 -6.88 0 0 0 0 20 +8 -16 0 0 0 0 20 +12 -7.5 0 1 -7.5 0 20 +5 -1.25 0 1 -1.25 0 20 +9 -14.63 0 0 0 0 20 +8 -4 0 1 8 0 20 +10 -17.5 0 0 0 0 20 +5 -3.75 0 0 0 0 20 +6 -10.5 0 0 0 0 20 +13 0 5 0 5 0 20 +10 -16.25 0 0 0 0 20 +5 -7.5 0 0 0 0 20 +2 -1.75 0 0 0 0 20 +5 -9.38 0 0 0 0 20 +2 -2.75 0 1 2 0 20 +2 -0.75 0 1 -0.75 0 20 +5 -8.13 0 0 0 0 20 +9 -11.25 0 0 0 0 20 +8 -13 0 0 0 0 20 +9 -16.88 0 0 0 0 20 +2 -2 0 1 2 0 20 +12 -18 0 0 0 0 20 +8 -2 0 1 -2 0 20 +2 -3 0 1 -3 0 20 +6 -4.5 0 1 6 0 20 +5 0 2 0 2 0 20 +12 -19.5 0 0 0 0 20 +9 -15.75 0 0 0 0 20 +8 -6 0 1 -6 0 20 +10 -2.5 0 1 -2.5 0 20 +9 -6.75 0 0 0 0 20 +6 -6.75 0 0 0 0 20 +2 -3.75 0 0 0 0 20 +10 -5 0 1 -5 0 20 +2 -2.25 0 1 2 0 20 +26 0 12 0 12 0 20 +12 -13.5 0 0 0 0 20 +8 -5 0 0 0 0 20 +6 -3 0 1 6 0 20 +10 -3.75 0 1 10 0 20 +12 -10.5 0 0 0 0 20 +4 -5 0 0 0 0 20 +9 -2.25 0 1 -2.25 0 20 +4 -3 0 0 0 0 20 +9 -10.13 0 0 0 0 20 +28 0 13 0 13 0 20 +22 0 10 1 22 0 20 +10 -10 0 0 0 0 20 +4 -1 0 1 -1 0 20 +4 -2.5 0 1 -2.5 0 20 +12 -24 0 1 -24 0 20 +8 -12 0 1 -12 0 20 +3 0 1 0 1 0 20 +9 -12.38 0 0 0 0 20 +23 0 10 0 10 0 20 +4 -3.5 0 0 0 0 20 +4 -1.5 0 1 4 0 20 +8 -10 0 0 0 0 20 +8 -14 0 0 0 0 20 +4 -6 0 0 0 0 20 +25 0 10 0 10 0 20 +12 -16.5 0 0 0 0 20 +12 -12 0 1 12 0 20 +5 -2.5 0 0 0 0 20 +5 -8.75 0 0 0 0 20 +12 -4.5 0 1 -4.5 0 20 +12 -15 0 0 0 0 20 +5 -3.13 0 0 0 0 20 +12 -21 0 0 0 0 20 +5 -4.38 0 0 0 0 20 +6 -11.25 0 0 0 0 20 +30 0 12 0 12 0 20 +6 -1.5 0 1 6 0 20 +12 0 6 0 6 0 20 +4 -2 0 1 4 0 20 +10 -15 0 0 0 0 20 +6 -2.25 0 1 6 0 20 +10 -20 0 0 0 0 20 +6 -5.25 0 0 0 0 20 +5 -6.25 0 0 0 0 20 +6 -8.25 0 0 0 0 20 +4 -4.5 0 0 0 0 21 +10 -12.5 0 0 0 0 21 +26 0 12 1 26 0 21 +6 -7.5 0 0 0 0 21 +4 -6.5 0 0 0 0 21 +12 -4.5 0 1 -4.5 0 21 +5 -2.5 0 1 5 0 21 +6 -12 0 0 0 0 21 +9 -14.63 0 0 0 0 21 +6 -6 0 0 0 0 21 +22 0 10 1 22 0 21 +2 -1 0 1 2 0 21 +8 -3 0 1 8 0 21 +12 -9 0 0 0 0 21 +5 -3.75 0 0 0 0 21 +6 -3 0 1 6 0 21 +4 0 2 1 4 0 21 +28 0 13 1 28 0 21 +12 -15 0 0 0 0 21 +9 -11.25 0 0 0 0 21 +12 -10.5 0 0 0 0 21 +5 -1.88 0 1 5 0 21 +2 -2.75 0 0 0 0 21 +4 -7 0 0 0 0 21 +8 -4 0 1 8 0 21 +2 0 1 0 1 0 21 +2 -3.5 0 0 0 0 21 +2 -1.75 0 0 0 0 21 +5 -5 0 0 0 0 21 +12 -12 0 0 0 0 21 +12 0 6 1 12 0 21 +6 -4.5 0 0 0 0 21 +30 0 12 1 0 0 21 +12 -16.5 0 0 0 0 21 +6 -9.75 0 0 0 0 21 +12 -22.5 0 0 0 0 21 +6 -9 0 0 0 0 21 +5 -3.13 0 0 0 0 21 +5 -9.38 0 0 0 0 21 +12 -7.5 0 0 0 0 21 +5 0 2 1 5 0 21 +10 -15 0 0 0 0 21 +12 -3 0 1 -3 0 21 +13 0 6 1 13 0 21 +9 -16.88 0 0 0 0 21 +6 -11.25 0 0 0 0 21 +8 -5 0 0 0 0 21 +8 -14 0 0 0 0 21 +12 -24 0 0 0 0 21 +12 0 5 1 12 0 21 +9 -13.5 0 0 0 0 21 +6 -1.5 0 1 6 0 21 +2 -3 0 0 0 0 21 +10 -2.5 0 1 -2.5 0 21 +2 -0.75 0 1 -0.75 0 21 +6 -10.5 0 0 0 0 21 +2 -0.5 0 1 2 0 21 +10 -10 0 0 0 0 21 +8 -10 0 0 0 0 21 +9 -12.38 0 0 0 0 21 +4 -6 0 0 0 0 21 +6 -2.25 0 1 6 0 21 +9 -15.75 0 0 0 0 21 +12 -13.5 0 0 0 0 21 +8 -6 0 0 0 0 21 +10 -18.75 0 0 0 0 21 +4 -2 0 1 4 0 21 +5 -1.25 0 1 -1.25 0 21 +6 -5.25 0 0 0 0 21 +4 -8 0 0 0 0 21 +25 0 9 1 25 0 21 +2 -3.25 0 0 0 0 21 +10 -11.25 0 0 0 0 21 +4 -7.5 0 0 0 0 21 +9 -5.63 0 0 0 0 21 +6 -6.75 0 0 0 0 21 +8 -2 0 1 -2 0 21 +5 -6.25 0 0 0 0 21 +23 0 10 1 0 0 21 +8 -13 0 0 0 0 21 +10 -13.75 0 0 0 0 21 +5 -10 0 0 0 0 21 +12 0 4 1 12 0 21 +2 -2.5 0 0 0 0 21 +19 0 8 1 19 0 21 +4 -4 0 0 0 0 21 +4 -1 0 1 -1 0 21 +4 -2.5 0 1 -2.5 0 21 +5 -8.13 0 0 0 0 21 +10 -3.75 0 1 10 0 21 +5 -8.75 0 0 0 0 21 +10 -7.5 0 0 0 0 21 +10 -5 0 0 0 0 21 +10 -20 0 0 0 0 21 +13 0 5 0 5 0 21 +8 -9 0 0 0 0 21 +8 -12 0 0 0 0 21 +10 -16.25 0 0 0 0 21 +5 -6.88 0 0 0 0 21 +4 -5.5 0 0 0 0 21 +5 -7.5 0 0 0 0 21 +9 -10.13 0 0 0 0 21 +6 -8.25 0 0 0 0 21 +26 0 10 0 10 0 21 +4 -5 0 0 0 0 21 +2 -2.25 0 0 0 0 21 +6 -3.75 0 1 -3.75 0 21 +8 -8 0 0 0 0 21 +9 -6.75 0 0 0 0 21 +8 -15 0 0 0 0 21 +12 -6 0 1 -6 0 21 +25 0 10 0 10 0 21 +12 -19.5 0 0 0 0 21 +9 -7.88 0 0 0 0 21 +4 -1.5 0 1 4 0 21 +8 -7 0 0 0 0 21 +12 -18 0 0 0 0 21 +2 -2 0 0 0 0 21 +9 -18 0 0 0 0 21 +2 -1.25 0 0 0 0 21 +8 -16 0 0 0 0 21 +5 -4.38 0 0 0 0 21 +2 -4 0 0 0 0 21 +5 -5.63 0 0 0 0 21 +8 0 3 1 0 0 21 +10 -17.5 0 0 0 0 21 +8 -11 0 0 0 0 21 +2 -1.5 0 0 0 0 21 +4 -3.5 0 0 0 0 21 +2 -3.75 0 0 0 0 21 +3 0 1 1 0 0 21 +12 -21 0 0 0 0 21 +10 -8.75 0 0 0 0 21 +9 -9 0 0 0 0 21 +4 -3 0 0 0 0 21 +7 0 3 1 7 0 21 +9 -3.38 0 1 -3.38 0 21 +9 -2.25 0 1 -2.25 0 21 +10 -6.25 0 0 0 0 21 +9 -4.5 0 0 0 0 21 +2 -1 0 0 0 0 22 +9 -13.5 0 0 0 0 22 +5 -6.88 0 0 0 0 22 +10 -10 0 0 0 0 22 +6 -2.25 0 1 6 0 22 +6 -6.75 0 0 0 0 22 +9 -4.5 0 1 -4.5 0 22 +10 -13.75 0 0 0 0 22 +6 -8.25 0 0 0 0 22 +5 -10 0 0 0 0 22 +10 -6.25 0 1 10 0 22 +12 -3 0 1 -3 0 22 +12 -9 0 0 0 0 22 +8 -7 0 0 0 0 22 +6 -12 0 0 0 0 22 +8 -2 0 1 -2 0 22 +12 -6 0 1 -6 0 22 +3 0 1 1 0 0 22 +10 -20 0 0 0 0 22 +5 -3.75 0 0 0 0 22 +2 -1.75 0 0 0 0 22 +6 -3.75 0 0 0 0 22 +9 -12.38 0 0 0 0 22 +5 -6.25 0 0 0 0 22 +12 0 4 1 12 0 22 +2 -1.5 0 0 0 0 22 +6 -5.25 0 0 0 0 22 +10 -18.75 0 0 0 0 22 +6 -6 0 0 0 0 22 +12 0 5 1 12 0 22 +4 -2 0 0 0 0 22 +2 -4 0 0 0 0 22 +5 -2.5 0 0 0 0 22 +2 -3.75 0 0 0 0 22 +9 -15.75 0 0 0 0 22 +8 -4 0 0 0 0 22 +26 0 12 1 26 0 22 +6 -1.5 0 0 0 0 22 +4 -6 0 0 0 0 22 +10 -2.5 0 1 -2.5 0 22 +8 -12 0 0 0 0 22 +2 -3.5 0 0 0 0 22 +5 -5.63 0 1 -5.63 0 22 +12 -24 0 0 0 0 22 +25 0 10 1 0 0 22 +4 -6.5 0 0 0 0 22 +5 -9.38 0 0 0 0 22 +5 -7.5 0 0 0 0 22 +4 -4 0 1 -4 0 22 +6 -10.5 0 0 0 0 22 +13 0 6 1 13 0 22 +12 -22.5 0 0 0 0 22 +4 -7.5 0 0 0 0 22 +5 0 2 1 5 0 22 +10 -15 0 0 0 0 22 +9 -16.88 0 0 0 0 22 +2 -2.5 0 0 0 0 22 +10 -16.25 0 0 0 0 22 +6 -11.25 0 0 0 0 22 +4 -1.5 0 0 0 0 22 +5 -3.13 0 0 0 0 22 +6 -9 0 0 0 0 22 +12 -19.5 0 0 0 0 22 +10 -12.5 0 0 0 0 22 +2 -3 0 0 0 0 22 +8 -16 0 0 0 0 22 +4 0 2 1 4 0 22 +12 -7.5 0 1 -7.5 0 22 +12 -13.5 0 0 0 0 22 +22 0 10 1 22 0 22 +12 -21 0 0 0 0 22 +7 0 3 1 7 0 22 +10 -8.75 0 0 0 0 22 +2 -1.25 0 0 0 0 22 +9 -6.75 0 0 0 0 22 +12 0 6 1 12 0 22 +28 0 13 0 13 0 22 +9 -10.13 0 0 0 0 22 +2 -0.5 0 1 2 0 22 +25 0 9 1 25 0 22 +6 -7.5 0 0 0 0 22 +4 -3 0 0 0 0 22 +10 -3.75 0 1 10 0 22 +12 -4.5 0 0 0 0 22 +12 -15 0 1 -15 0 22 +6 -3 0 0 0 0 22 +9 -14.63 0 0 0 0 22 +5 -1.25 0 1 -1.25 0 22 +8 -11 0 0 0 0 22 +10 -17.5 0 0 0 0 22 +8 -10 0 0 0 0 22 +9 -9 0 0 0 0 22 +10 -11.25 0 0 0 0 22 +12 -12 0 1 12 0 22 +8 -14 0 0 0 0 22 +12 -16.5 0 0 0 0 22 +4 -7 0 0 0 0 22 +4 -1 0 1 -1 0 22 +5 -1.88 0 1 5 0 22 +8 0 3 1 0 0 22 +2 -3.25 0 0 0 0 22 +5 -5 0 0 0 0 22 +26 0 10 1 0 0 22 +12 -10.5 0 1 12 0 22 +2 0 1 0 1 0 22 +6 -9.75 0 0 0 0 22 +8 -3 0 1 8 0 22 +13 0 5 1 13 0 22 +10 -7.5 0 0 0 0 22 +8 -13 0 0 0 0 22 +9 -3.38 0 1 -3.38 0 22 +8 -15 0 0 0 0 22 +30 0 12 1 0 0 22 +8 -8 0 0 0 0 22 +8 -5 0 0 0 0 22 +12 -18 0 0 0 0 22 +10 -5 0 1 -5 0 22 +9 -11.25 0 0 0 0 22 +9 -7.88 0 0 0 0 22 +8 -6 0 0 0 0 22 +6 -4.5 0 0 0 0 22 +8 -9 0 0 0 0 22 +4 -5.5 0 0 0 0 22 +4 -5 0 0 0 0 22 +9 -2.25 0 1 -2.25 0 22 +23 0 10 1 0 0 22 +9 -5.63 0 0 0 0 22 +4 -4.5 0 0 0 0 22 +4 -8 0 0 0 0 22 +19 0 8 1 19 0 22 +2 -2 0 0 0 0 22 +5 -8.13 0 0 0 0 22 +5 -4.38 0 0 0 0 22 +2 -2.25 0 0 0 0 22 +2 -0.75 0 0 0 0 22 +2 -2.75 0 0 0 0 22 +5 -8.75 0 0 0 0 22 +9 -18 0 0 0 0 22 +4 -3.5 0 1 4 0 22 +4 -2.5 0 0 0 0 22 +9 -6.75 0 1 -6.75 0 23 +6 -6.75 0 0 0 0 23 +6 -3 0 1 6 0 23 +2 -1.5 0 1 2 0 23 +4 -3 0 1 4 0 23 +5 -6.88 0 0 0 0 23 +12 -9 0 1 12 0 23 +4 -5 0 0 0 0 23 +5 -7.5 0 0 0 0 23 +4 -4 0 1 -4 0 23 +9 -5.63 0 1 -5.63 0 23 +9 -14.63 0 0 0 0 23 +5 -9.38 0 0 0 0 23 +6 -4.5 0 1 6 0 23 +8 -7 0 0 0 0 23 +10 -16.25 0 1 10 0 23 +10 -17.5 0 1 -17.5 0 23 +9 -16.88 0 0 0 0 23 +8 -5 0 1 8 0 23 +6 -1.5 0 1 6 0 23 +12 -18 0 0 0 0 23 +5 -6.25 0 0 0 0 23 +8 -4 0 1 8 0 23 +9 -15.75 0 0 0 0 23 +9 -13.5 0 0 0 0 23 +5 -8.13 0 0 0 0 23 +2 0 1 1 0 0 23 +2 -3.75 0 1 -3.75 0 23 +4 -6.5 0 0 0 0 23 +10 -5 0 1 -5 0 23 +12 -22.5 0 0 0 0 23 +2 -1 0 1 2 0 23 +13 0 6 1 13 0 23 +5 -2.5 0 1 5 0 23 +2 -0.5 0 1 2 0 23 +2 -3.25 0 1 -3.25 0 23 +30 0 12 1 0 0 23 +8 -8 0 0 0 0 23 +4 -5.5 0 0 0 0 23 +23 0 10 1 0 0 23 +4 -3.5 0 1 4 0 23 +5 0 2 1 5 0 23 +8 0 3 1 0 0 23 +9 -10.13 0 0 0 0 23 +8 -16 0 0 0 0 23 +12 -24 0 0 0 0 23 +9 -3.38 0 1 -3.38 0 23 +6 -5.25 0 0 0 0 23 +2 -4 0 0 0 0 23 +4 -1 0 1 -1 0 23 +6 -11.25 0 0 0 0 23 +5 -4.38 0 1 -4.38 0 23 +6 -2.25 0 1 6 0 23 +12 -10.5 0 1 12 0 23 +9 -18 0 0 0 0 23 +10 -20 0 0 0 0 23 +4 -4.5 0 1 -4.5 0 23 +9 -2.25 0 1 -2.25 0 23 +4 -6 0 0 0 0 23 +8 -10 0 0 0 0 23 +5 -5 0 1 -5 0 23 +5 -8.75 0 0 0 0 23 +8 -6 0 0 0 0 23 +10 -13.75 0 0 0 0 23 +2 -2.5 0 1 2 0 23 +8 -11 0 0 0 0 23 +4 -2 0 1 4 0 23 +10 -7.5 0 0 0 0 23 +22 0 10 1 22 0 23 +25 0 10 1 0 0 23 +6 -9.75 0 0 0 0 23 +12 0 5 1 12 0 23 +4 -2.5 0 1 -2.5 0 23 +8 -3 0 1 8 0 23 +10 -11.25 0 0 0 0 23 +5 -10 0 0 0 0 23 +10 -15 0 0 0 0 23 +2 -3.5 0 1 -3.5 0 23 +12 0 4 1 12 0 23 +13 0 5 0 5 0 23 +5 -3.75 0 1 5 0 23 +26 0 12 0 12 0 23 +5 -5.63 0 0 0 0 23 +8 -2 0 1 -2 0 23 +2 -3 0 1 -3 0 23 +6 -9 0 0 0 0 23 +9 -7.88 0 0 0 0 23 +8 -14 0 0 0 0 23 +28 0 13 0 13 0 23 +9 -12.38 0 0 0 0 23 +8 -15 0 0 0 0 23 +10 -2.5 0 1 -2.5 0 23 +4 0 2 0 2 0 23 +12 -6 0 1 -6 0 23 +12 -16.5 0 0 0 0 23 +4 -7.5 0 0 0 0 23 +10 -8.75 0 0 0 0 23 +10 -18.75 0 0 0 0 23 +26 0 10 0 10 0 23 +12 -21 0 0 0 0 23 +2 -0.75 0 1 -0.75 0 23 +9 -9 0 0 0 0 23 +10 -6.25 0 0 0 0 23 +8 -12 0 0 0 0 23 +3 0 1 1 0 0 23 +5 -1.88 0 1 5 0 23 +6 -7.5 0 0 0 0 23 +12 -13.5 0 0 0 0 23 +4 -7 0 0 0 0 23 +6 -8.25 0 0 0 0 23 +6 -12 0 0 0 0 23 +6 -10.5 0 0 0 0 23 +4 -8 0 0 0 0 23 +6 -6 0 0 0 0 23 +12 0 6 0 6 0 23 +12 -19.5 0 0 0 0 23 +19 0 8 1 19 0 23 +12 -15 0 0 0 0 23 +2 -1.75 0 1 2 0 23 +6 -3.75 0 1 -3.75 0 23 +2 -1.25 0 1 2 0 23 +5 -1.25 0 1 -1.25 0 23 +4 -1.5 0 1 4 0 23 +8 -13 0 0 0 0 23 +12 -7.5 0 0 0 0 23 +12 -3 0 1 -3 0 23 +2 -2.75 0 1 2 0 23 +7 0 3 0 3 0 23 +25 0 9 0 9 0 23 +2 -2 0 1 2 0 23 +12 -4.5 0 1 -4.5 0 23 +12 -12 0 0 0 0 23 +5 -3.13 0 1 5 0 23 +9 -11.25 0 0 0 0 23 +8 -9 0 0 0 0 23 +2 -2.25 0 1 2 0 23 +9 -4.5 0 1 -4.5 0 23 +10 -3.75 0 1 10 0 23 +10 -10 0 0 0 0 23 +10 -12.5 0 0 0 0 23 +2 -2.5 0 0 0 0 24 +5 -5.63 0 1 -5.63 0 24 +6 -7.5 0 1 -7.5 0 24 +26 0 10 1 0 0 24 +9 -4.5 0 1 -4.5 0 24 +2 -1.25 0 1 2 0 24 +8 -3 0 1 8 0 24 +25 0 9 1 25 0 24 +4 -4.5 0 1 -4.5 0 24 +5 -10 0 0 0 0 24 +6 -9 0 0 0 0 24 +10 -6.25 0 1 10 0 24 +4 -4 0 0 0 0 24 +12 -3 0 1 -3 0 24 +5 -5 0 1 -5 0 24 +12 0 5 1 12 0 24 +6 -9.75 0 0 0 0 24 +19 0 8 1 19 0 24 +4 -7.5 0 0 0 0 24 +12 -9 0 1 12 0 24 +4 -6.5 0 0 0 0 24 +9 -5.63 0 1 -5.63 0 24 +9 -18 0 0 0 0 24 +10 -11.25 0 1 -11.25 0 24 +10 -13.75 0 0 0 0 24 +6 -12 0 0 0 0 24 +10 -12.5 0 1 -12.5 0 24 +4 -7 0 0 0 0 24 +10 -7.5 0 1 -7.5 0 24 +4 -8 0 0 0 0 24 +8 -11 0 0 0 0 24 +12 0 4 1 12 0 24 +9 -3.38 0 1 -3.38 0 24 +10 -18.75 0 0 0 0 24 +2 -3.5 0 0 0 0 24 +2 -1 0 1 2 0 24 +2 -3.25 0 0 0 0 24 +2 0 1 1 0 0 24 +7 0 3 1 7 0 24 +8 0 3 1 0 0 24 +12 -6 0 1 -6 0 24 +2 -0.5 0 1 2 0 24 +9 -7.88 0 1 -7.88 0 24 +8 -15 0 0 0 0 24 +2 -1.5 0 1 2 0 24 +12 -22.5 0 0 0 0 24 +8 -7 0 1 -7 0 24 +4 -5.5 0 0 0 0 24 +10 -8.75 0 1 -8.75 0 24 +8 -9 0 0 0 0 24 +2 -4 0 0 0 0 24 +4 0 2 1 4 0 24 +8 -8 0 1 8 0 24 +9 -13.5 0 0 0 0 24 +9 -9 0 1 -9 0 24 +6 -3.75 0 1 -3.75 0 24 +13 0 6 1 13 0 24 +5 -1.88 0 1 5 0 24 +6 -6 0 1 -6 0 24 +5 -6.88 0 0 0 0 24 +8 -16 0 0 0 0 24 +12 -7.5 0 1 -7.5 0 24 +5 -1.25 0 1 -1.25 0 24 +9 -14.63 0 0 0 0 24 +8 -4 0 1 8 0 24 +10 -17.5 0 0 0 0 24 +5 -3.75 0 1 5 0 24 +6 -10.5 0 0 0 0 24 +13 0 5 1 13 0 24 +10 -16.25 0 0 0 0 24 +5 -7.5 0 0 0 0 24 +2 -1.75 0 1 2 0 24 +5 -9.38 0 0 0 0 24 +2 -2.75 0 0 0 0 24 +2 -0.75 0 1 -0.75 0 24 +5 -8.13 0 0 0 0 24 +9 -11.25 0 0 0 0 24 +8 -13 0 0 0 0 24 +9 -16.88 0 0 0 0 24 +2 -2 0 1 2 0 24 +12 -18 0 0 0 0 24 +8 -2 0 1 -2 0 24 +2 -3 0 0 0 0 24 +6 -4.5 0 1 6 0 24 +5 0 2 1 5 0 24 +12 -19.5 0 0 0 0 24 +9 -15.75 0 0 0 0 24 +8 -6 0 1 -6 0 24 +10 -2.5 0 1 -2.5 0 24 +9 -6.75 0 1 -6.75 0 24 +6 -6.75 0 0 0 0 24 +2 -3.75 0 0 0 0 24 +10 -5 0 1 -5 0 24 +2 -2.25 0 0 0 0 24 +26 0 12 1 26 0 24 +12 -13.5 0 0 0 0 24 +8 -5 0 1 8 0 24 +6 -3 0 1 6 0 24 +10 -3.75 0 1 10 0 24 +12 -10.5 0 1 12 0 24 +4 -5 0 1 4 0 24 +9 -2.25 0 1 -2.25 0 24 +4 -3 0 1 4 0 24 +9 -10.13 0 0 0 0 24 +28 0 13 1 28 0 24 +22 0 10 1 22 0 24 +10 -10 0 1 10 0 24 +4 -1 0 1 -1 0 24 +4 -2.5 0 1 -2.5 0 24 +12 -24 0 0 0 0 24 +8 -12 0 0 0 0 24 +3 0 1 1 0 0 24 +9 -12.38 0 0 0 0 24 +23 0 10 1 0 0 24 +4 -3.5 0 1 4 0 24 +4 -1.5 0 1 4 0 24 +8 -10 0 0 0 0 24 +8 -14 0 0 0 0 24 +4 -6 0 0 0 0 24 +25 0 10 1 0 0 24 +12 -16.5 0 0 0 0 24 +12 -12 0 1 12 0 24 +5 -2.5 0 1 5 0 24 +5 -8.75 0 0 0 0 24 +12 -4.5 0 1 -4.5 0 24 +12 -15 0 0 0 0 24 +5 -3.13 0 1 5 0 24 +12 -21 0 0 0 0 24 +5 -4.38 0 1 -4.38 0 24 +6 -11.25 0 0 0 0 24 +30 0 12 1 0 0 24 +6 -1.5 0 1 6 0 24 +12 0 6 1 12 0 24 +4 -2 0 1 4 0 24 +10 -15 0 0 0 0 24 +6 -2.25 0 1 6 0 24 +10 -20 0 0 0 0 24 +6 -5.25 0 1 6 0 24 +5 -6.25 0 1 5 0 24 +6 -8.25 0 0 0 0 24 +4 -4.5 0 0 0 0 25 +10 -12.5 0 1 -12.5 0 25 +26 0 12 1 26 0 25 +6 -7.5 0 0 0 0 25 +4 -6.5 0 0 0 0 25 +12 -4.5 0 1 -4.5 0 25 +5 -2.5 0 1 5 0 25 +6 -12 0 0 0 0 25 +9 -14.63 0 0 0 0 25 +6 -6 0 1 -6 0 25 +22 0 10 1 22 0 25 +2 -1 0 1 2 0 25 +8 -3 0 1 8 0 25 +12 -9 0 0 0 0 25 +5 -3.75 0 1 5 0 25 +6 -3 0 1 6 0 25 +4 0 2 1 4 0 25 +28 0 13 1 28 0 25 +12 -15 0 0 0 0 25 +9 -11.25 0 0 0 0 25 +12 -10.5 0 0 0 0 25 +5 -1.88 0 1 5 0 25 +2 -2.75 0 1 2 0 25 +4 -7 0 0 0 0 25 +8 -4 0 1 8 0 25 +2 0 1 1 0 0 25 +2 -3.5 0 1 -3.5 0 25 +2 -1.75 0 1 2 0 25 +5 -5 0 1 -5 0 25 +12 -12 0 0 0 0 25 +12 0 6 1 12 0 25 +6 -4.5 0 1 6 0 25 +30 0 12 1 0 0 25 +12 -16.5 0 0 0 0 25 +6 -9.75 0 0 0 0 25 +12 -22.5 0 0 0 0 25 +6 -9 0 0 0 0 25 +5 -3.13 0 1 5 0 25 +5 -9.38 0 1 5 0 25 +12 -7.5 0 1 -7.5 0 25 +5 0 2 1 5 0 25 +10 -15 0 0 0 0 25 +12 -3 0 1 -3 0 25 +13 0 6 1 13 0 25 +9 -16.88 0 0 0 0 25 +6 -11.25 0 0 0 0 25 +8 -5 0 1 8 0 25 +8 -14 0 0 0 0 25 +12 -24 0 0 0 0 25 +12 0 5 1 12 0 25 +9 -13.5 0 0 0 0 25 +6 -1.5 0 1 6 0 25 +2 -3 0 1 -3 0 25 +10 -2.5 0 1 -2.5 0 25 +2 -0.75 0 1 -0.75 0 25 +6 -10.5 0 0 0 0 25 +2 -0.5 0 1 2 0 25 +10 -10 0 1 10 0 25 +8 -10 0 0 0 0 25 +9 -12.38 0 0 0 0 25 +4 -6 0 0 0 0 25 +6 -2.25 0 1 6 0 25 +9 -15.75 0 0 0 0 25 +12 -13.5 0 0 0 0 25 +8 -6 0 1 -6 0 25 +10 -18.75 0 0 0 0 25 +4 -2 0 1 4 0 25 +5 -1.25 0 1 -1.25 0 25 +6 -5.25 0 1 6 0 25 +4 -8 0 0 0 0 25 +25 0 9 1 25 0 25 +2 -3.25 0 1 -3.25 0 25 +10 -11.25 0 1 -11.25 0 25 +4 -7.5 0 1 -7.5 0 25 +9 -5.63 0 1 -5.63 0 25 +6 -6.75 0 1 -6.75 0 25 +8 -2 0 1 -2 0 25 +5 -6.25 0 1 5 0 25 +23 0 10 1 0 0 25 +8 -13 0 0 0 0 25 +10 -13.75 0 0 0 0 25 +5 -10 0 0 0 0 25 +12 0 4 1 12 0 25 +2 -2.5 0 1 2 0 25 +19 0 8 1 19 0 25 +4 -4 0 1 -4 0 25 +4 -1 0 1 -1 0 25 +4 -2.5 0 1 -2.5 0 25 +5 -8.13 0 1 5 0 25 +10 -3.75 0 1 10 0 25 +5 -8.75 0 0 0 0 25 +10 -7.5 0 1 -7.5 0 25 +10 -5 0 1 -5 0 25 +10 -20 0 0 0 0 25 +13 0 5 1 13 0 25 +8 -9 0 0 0 0 25 +8 -12 0 0 0 0 25 +10 -16.25 0 0 0 0 25 +5 -6.88 0 0 0 0 25 +4 -5.5 0 1 -5.5 0 25 +5 -7.5 0 0 0 0 25 +9 -10.13 0 0 0 0 25 +6 -8.25 0 0 0 0 25 +26 0 10 1 0 0 25 +4 -5 0 1 4 0 25 +2 -2.25 0 1 2 0 25 +6 -3.75 0 1 -3.75 0 25 +8 -8 0 1 8 0 25 +9 -6.75 0 1 -6.75 0 25 +8 -15 0 0 0 0 25 +12 -6 0 1 -6 0 25 +25 0 10 0 10 0 25 +12 -19.5 0 0 0 0 25 +9 -7.88 0 1 -7.88 0 25 +4 -1.5 0 1 4 0 25 +8 -7 0 1 -7 0 25 +12 -18 0 0 0 0 25 +2 -2 0 1 2 0 25 +9 -18 0 0 0 0 25 +2 -1.25 0 1 2 0 25 +8 -16 0 0 0 0 25 +5 -4.38 0 1 -4.38 0 25 +2 -4 0 0 0 0 25 +5 -5.63 0 1 -5.63 0 25 +8 0 3 0 3 0 25 +10 -17.5 0 0 0 0 25 +8 -11 0 0 0 0 25 +2 -1.5 0 1 2 0 25 +4 -3.5 0 1 4 0 25 +2 -3.75 0 1 -3.75 0 25 +3 0 1 0 1 0 25 +12 -21 0 0 0 0 25 +10 -8.75 0 1 -8.75 0 25 +9 -9 0 1 -9 0 25 +4 -3 0 1 4 0 25 +7 0 3 1 7 0 25 +9 -3.38 0 1 -3.38 0 25 +9 -2.25 0 1 -2.25 0 25 +10 -6.25 0 1 10 0 25 +9 -4.5 0 1 -4.5 0 25 +2 -1 0 1 2 0 26 +9 -13.5 0 0 0 0 26 +5 -6.88 0 0 0 0 26 +10 -10 0 1 10 0 26 +6 -2.25 0 1 6 0 26 +6 -6.75 0 0 0 0 26 +9 -4.5 0 1 -4.5 0 26 +10 -13.75 0 0 0 0 26 +6 -8.25 0 0 0 0 26 +5 -10 0 1 5 0 26 +10 -6.25 0 1 10 0 26 +12 -3 0 1 -3 0 26 +12 -9 0 0 0 0 26 +8 -7 0 0 0 0 26 +6 -12 0 0 0 0 26 +8 -2 0 1 -2 0 26 +12 -6 0 1 -6 0 26 +3 0 1 0 1 0 26 +10 -20 0 0 0 0 26 +5 -3.75 0 1 5 0 26 +2 -1.75 0 1 2 0 26 +6 -3.75 0 0 0 0 26 +9 -12.38 0 0 0 0 26 +5 -6.25 0 0 0 0 26 +12 0 4 0 4 0 26 +2 -1.5 0 1 2 0 26 +6 -5.25 0 0 0 0 26 +10 -18.75 0 0 0 0 26 +6 -6 0 0 0 0 26 +12 0 5 1 12 0 26 +4 -2 0 1 4 0 26 +2 -4 0 0 0 0 26 +5 -2.5 0 1 5 0 26 +2 -3.75 0 0 0 0 26 +9 -15.75 0 0 0 0 26 +8 -4 0 1 8 0 26 +26 0 12 0 12 0 26 +6 -1.5 0 1 6 0 26 +4 -6 0 0 0 0 26 +10 -2.5 0 1 -2.5 0 26 +8 -12 0 0 0 0 26 +2 -3.5 0 0 0 0 26 +5 -5.63 0 1 -5.63 0 26 +12 -24 0 0 0 0 26 +25 0 10 1 0 0 26 +4 -6.5 0 0 0 0 26 +5 -9.38 0 0 0 0 26 +5 -7.5 0 1 -7.5 0 26 +4 -4 0 1 -4 0 26 +6 -10.5 0 0 0 0 26 +13 0 6 0 6 0 26 +12 -22.5 0 0 0 0 26 +4 -7.5 0 0 0 0 26 +5 0 2 1 5 0 26 +10 -15 0 0 0 0 26 +9 -16.88 0 0 0 0 26 +2 -2.5 0 0 0 0 26 +10 -16.25 0 0 0 0 26 +6 -11.25 0 0 0 0 26 +4 -1.5 0 1 4 0 26 +5 -3.13 0 1 5 0 26 +6 -9 0 0 0 0 26 +12 -19.5 0 0 0 0 26 +10 -12.5 0 0 0 0 26 +2 -3 0 0 0 0 26 +8 -16 0 0 0 0 26 +4 0 2 1 4 0 26 +12 -7.5 0 1 -7.5 0 26 +12 -13.5 0 0 0 0 26 +22 0 10 1 22 0 26 +12 -21 0 0 0 0 26 +7 0 3 0 3 0 26 +10 -8.75 0 0 0 0 26 +2 -1.25 0 1 2 0 26 +9 -6.75 0 0 0 0 26 +12 0 6 0 6 0 26 +28 0 13 1 28 0 26 +9 -10.13 0 0 0 0 26 +2 -0.5 0 1 2 0 26 +25 0 9 0 9 0 26 +6 -7.5 0 0 0 0 26 +4 -3 0 1 4 0 26 +10 -3.75 0 0 0 0 26 +12 -4.5 0 1 -4.5 0 26 +12 -15 0 0 0 0 26 +6 -3 0 1 6 0 26 +9 -14.63 0 0 0 0 26 +5 -1.25 0 1 -1.25 0 26 +8 -11 0 0 0 0 26 +10 -17.5 0 0 0 0 26 +8 -10 0 0 0 0 26 +9 -9 0 0 0 0 26 +10 -11.25 0 0 0 0 26 +12 -12 0 0 0 0 26 +8 -14 0 0 0 0 26 +12 -16.5 0 0 0 0 26 +4 -7 0 0 0 0 26 +4 -1 0 1 -1 0 26 +5 -1.88 0 1 5 0 26 +8 0 3 1 0 0 26 +2 -3.25 0 0 0 0 26 +5 -5 0 0 0 0 26 +26 0 10 0 10 0 26 +12 -10.5 0 0 0 0 26 +2 0 1 0 1 0 26 +6 -9.75 0 0 0 0 26 +8 -3 0 0 0 0 26 +13 0 5 0 5 0 26 +10 -7.5 0 0 0 0 26 +8 -13 0 0 0 0 26 +9 -3.38 0 1 -3.38 0 26 +8 -15 0 0 0 0 26 +30 0 12 1 0 0 26 +8 -8 0 0 0 0 26 +8 -5 0 1 8 0 26 +12 -18 0 0 0 0 26 +10 -5 0 1 -5 0 26 +9 -11.25 0 0 0 0 26 +9 -7.88 0 0 0 0 26 +8 -6 0 0 0 0 26 +6 -4.5 0 1 6 0 26 +8 -9 0 0 0 0 26 +4 -5.5 0 0 0 0 26 +4 -5 0 0 0 0 26 +9 -2.25 0 1 -2.25 0 26 +23 0 10 1 0 0 26 +9 -5.63 0 1 -5.63 0 26 +4 -4.5 0 0 0 0 26 +4 -8 0 1 4 0 26 +19 0 8 1 19 0 26 +2 -2 0 0 0 0 26 +5 -8.13 0 0 0 0 26 +5 -4.38 0 0 0 0 26 +2 -2.25 0 0 0 0 26 +2 -0.75 0 1 -0.75 0 26 +2 -2.75 0 0 0 0 26 +5 -8.75 0 0 0 0 26 +9 -18 0 0 0 0 26 +4 -3.5 0 0 0 0 26 +4 -2.5 0 1 -2.5 0 26 +9 -6.75 0 1 -6.75 0 27 +6 -6.75 0 1 -6.75 0 27 +6 -3 0 1 6 0 27 +2 -1.5 0 1 2 0 27 +4 -3 0 0 0 0 27 +5 -6.88 0 1 5 0 27 +12 -9 0 1 12 0 27 +4 -5 0 0 0 0 27 +5 -7.5 0 1 -7.5 0 27 +4 -4 0 1 -4 0 27 +9 -5.63 0 1 -5.63 0 27 +9 -14.63 0 1 9 0 27 +5 -9.38 0 0 0 0 27 +6 -4.5 0 1 6 0 27 +8 -7 0 1 -7 0 27 +10 -16.25 0 0 0 0 27 +10 -17.5 0 1 -17.5 0 27 +9 -16.88 0 0 0 0 27 +8 -5 0 1 8 0 27 +6 -1.5 0 1 6 0 27 +12 -18 0 1 -18 0 27 +5 -6.25 0 1 5 0 27 +8 -4 0 1 8 0 27 +9 -15.75 0 1 -15.75 0 27 +9 -13.5 0 0 0 0 27 +5 -8.13 0 1 5 0 27 +2 0 1 1 0 0 27 +2 -3.75 0 0 0 0 27 +4 -6.5 0 1 4 0 27 +10 -5 0 1 -5 0 27 +12 -22.5 0 0 0 0 27 +2 -1 0 1 2 0 27 +13 0 6 1 13 0 27 +5 -2.5 0 1 5 0 27 +2 -0.5 0 1 2 0 27 +2 -3.25 0 1 -3.25 0 27 +30 0 12 1 0 0 27 +8 -8 0 1 8 0 27 +4 -5.5 0 0 0 0 27 +23 0 10 1 0 0 27 +4 -3.5 0 0 0 0 27 +5 0 2 1 5 0 27 +8 0 3 0 3 0 27 +9 -10.13 0 1 -10.13 0 27 +8 -16 0 1 -16 0 27 +12 -24 0 1 -24 0 27 +9 -3.38 0 1 -3.38 0 27 +6 -5.25 0 1 6 0 27 +2 -4 0 1 2 0 27 +4 -1 0 1 -1 0 27 +6 -11.25 0 0 0 0 27 +5 -4.38 0 1 -4.38 0 27 +6 -2.25 0 1 6 0 27 +12 -10.5 0 1 12 0 27 +9 -18 0 1 -18 0 27 +10 -20 0 0 0 0 27 +4 -4.5 0 1 -4.5 0 27 +9 -2.25 0 1 -2.25 0 27 +4 -6 0 1 4 0 27 +8 -10 0 1 -10 0 27 +5 -5 0 1 -5 0 27 +5 -8.75 0 0 0 0 27 +8 -6 0 1 -6 0 27 +10 -13.75 0 0 0 0 27 +2 -2.5 0 1 2 0 27 +8 -11 0 1 -11 0 27 +4 -2 0 1 4 0 27 +10 -7.5 0 1 -7.5 0 27 +22 0 10 1 22 0 27 +25 0 10 1 0 0 27 +6 -9.75 0 1 6 0 27 +12 0 5 1 12 0 27 +4 -2.5 0 1 -2.5 0 27 +8 -3 0 1 8 0 27 +10 -11.25 0 1 -11.25 0 27 +5 -10 0 1 5 0 27 +10 -15 0 0 0 0 27 +2 -3.5 0 0 0 0 27 +12 0 4 0 4 0 27 +13 0 5 1 13 0 27 +5 -3.75 0 1 5 0 27 +26 0 12 1 26 0 27 +5 -5.63 0 1 -5.63 0 27 +8 -2 0 1 -2 0 27 +2 -3 0 1 -3 0 27 +6 -9 0 1 -9 0 27 +9 -7.88 0 1 -7.88 0 27 +8 -14 0 0 0 0 27 +28 0 13 1 28 0 27 +9 -12.38 0 0 0 0 27 +8 -15 0 0 0 0 27 +10 -2.5 0 1 -2.5 0 27 +4 0 2 0 2 0 27 +12 -6 0 1 -6 0 27 +12 -16.5 0 1 -16.5 0 27 +4 -7.5 0 1 -7.5 0 27 +10 -8.75 0 1 -8.75 0 27 +10 -18.75 0 0 0 0 27 +26 0 10 1 0 0 27 +12 -21 0 1 12 0 27 +2 -0.75 0 1 -0.75 0 27 +9 -9 0 1 -9 0 27 +10 -6.25 0 1 10 0 27 +8 -12 0 1 -12 0 27 +3 0 1 1 0 0 27 +5 -1.88 0 1 5 0 27 +6 -7.5 0 1 -7.5 0 27 +12 -13.5 0 1 12 0 27 +4 -7 0 0 0 0 27 +6 -8.25 0 1 -8.25 0 27 +6 -12 0 0 0 0 27 +6 -10.5 0 0 0 0 27 +4 -8 0 1 4 0 27 +6 -6 0 1 -6 0 27 +12 0 6 0 6 0 27 +12 -19.5 0 0 0 0 27 +19 0 8 1 19 0 27 +12 -15 0 0 0 0 27 +2 -1.75 0 1 2 0 27 +6 -3.75 0 1 -3.75 0 27 +2 -1.25 0 1 2 0 27 +5 -1.25 0 1 -1.25 0 27 +4 -1.5 0 1 4 0 27 +8 -13 0 0 0 0 27 +12 -7.5 0 1 -7.5 0 27 +12 -3 0 1 -3 0 27 +2 -2.75 0 0 0 0 27 +7 0 3 1 7 0 27 +25 0 9 1 25 0 27 +2 -2 0 0 0 0 27 +12 -4.5 0 1 -4.5 0 27 +12 -12 0 1 12 0 27 +5 -3.13 0 1 5 0 27 +9 -11.25 0 1 9 0 27 +8 -9 0 0 0 0 27 +2 -2.25 0 1 2 0 27 +9 -4.5 0 1 -4.5 0 27 +10 -3.75 0 1 10 0 27 +10 -10 0 1 10 0 27 +10 -12.5 0 0 0 0 27 +2 -2.5 0 0 0 0 28 +5 -5.63 0 0 0 0 28 +6 -7.5 0 0 0 0 28 +26 0 10 1 0 0 28 +9 -4.5 0 0 0 0 28 +2 -1.25 0 0 0 0 28 +8 -3 0 1 8 0 28 +25 0 9 0 9 0 28 +4 -4.5 0 0 0 0 28 +5 -10 0 0 0 0 28 +6 -9 0 0 0 0 28 +10 -6.25 0 1 10 0 28 +4 -4 0 1 -4 0 28 +12 -3 0 1 -3 0 28 +5 -5 0 0 0 0 28 +12 0 5 0 5 0 28 +6 -9.75 0 0 0 0 28 +19 0 8 0 8 0 28 +4 -7.5 0 0 0 0 28 +12 -9 0 0 0 0 28 +4 -6.5 0 0 0 0 28 +9 -5.63 0 0 0 0 28 +9 -18 0 0 0 0 28 +10 -11.25 0 0 0 0 28 +10 -13.75 0 0 0 0 28 +6 -12 0 0 0 0 28 +10 -12.5 0 0 0 0 28 +4 -7 0 0 0 0 28 +10 -7.5 0 1 -7.5 0 28 +4 -8 0 0 0 0 28 +8 -11 0 0 0 0 28 +12 0 4 0 4 0 28 +9 -3.38 0 1 -3.38 0 28 +10 -18.75 0 0 0 0 28 +2 -3.5 0 0 0 0 28 +2 -1 0 0 0 0 28 +2 -3.25 0 0 0 0 28 +2 0 1 0 1 0 28 +7 0 3 0 3 0 28 +8 0 3 0 3 0 28 +12 -6 0 1 -6 0 28 +2 -0.5 0 1 2 0 28 +9 -7.88 0 0 0 0 28 +8 -15 0 0 0 0 28 +2 -1.5 0 0 0 0 28 +12 -22.5 0 0 0 0 28 +8 -7 0 0 0 0 28 +4 -5.5 0 0 0 0 28 +10 -8.75 0 0 0 0 28 +8 -9 0 0 0 0 28 +2 -4 0 0 0 0 28 +4 0 2 1 4 0 28 +8 -8 0 0 0 0 28 +9 -13.5 0 0 0 0 28 +9 -9 0 0 0 0 28 +6 -3.75 0 0 0 0 28 +13 0 6 0 6 0 28 +5 -1.88 0 1 5 0 28 +6 -6 0 0 0 0 28 +5 -6.88 0 0 0 0 28 +8 -16 0 0 0 0 28 +12 -7.5 0 0 0 0 28 +5 -1.25 0 0 0 0 28 +9 -14.63 0 0 0 0 28 +8 -4 0 0 0 0 28 +10 -17.5 0 0 0 0 28 +5 -3.75 0 0 0 0 28 +6 -10.5 0 0 0 0 28 +13 0 5 0 5 0 28 +10 -16.25 0 0 0 0 28 +5 -7.5 0 0 0 0 28 +2 -1.75 0 0 0 0 28 +5 -9.38 0 0 0 0 28 +2 -2.75 0 0 0 0 28 +2 -0.75 0 1 -0.75 0 28 +5 -8.13 0 0 0 0 28 +9 -11.25 0 0 0 0 28 +8 -13 0 0 0 0 28 +9 -16.88 0 0 0 0 28 +2 -2 0 0 0 0 28 +12 -18 0 0 0 0 28 +8 -2 0 1 -2 0 28 +2 -3 0 0 0 0 28 +6 -4.5 0 0 0 0 28 +5 0 2 0 2 0 28 +12 -19.5 0 0 0 0 28 +9 -15.75 0 0 0 0 28 +8 -6 0 0 0 0 28 +10 -2.5 0 1 -2.5 0 28 +9 -6.75 0 0 0 0 28 +6 -6.75 0 0 0 0 28 +2 -3.75 0 0 0 0 28 +10 -5 0 0 0 0 28 +2 -2.25 0 0 0 0 28 +26 0 12 0 12 0 28 +12 -13.5 0 0 0 0 28 +8 -5 0 0 0 0 28 +6 -3 0 0 0 0 28 +10 -3.75 0 1 10 0 28 +12 -10.5 0 0 0 0 28 +4 -5 0 0 0 0 28 +9 -2.25 0 1 -2.25 0 28 +4 -3 0 0 0 0 28 +9 -10.13 0 0 0 0 28 +28 0 13 0 13 0 28 +22 0 10 0 10 0 28 +10 -10 0 0 0 0 28 +4 -1 0 1 -1 0 28 +4 -2.5 0 0 0 0 28 +12 -24 0 0 0 0 28 +8 -12 0 0 0 0 28 +3 0 1 1 0 0 28 +9 -12.38 0 0 0 0 28 +23 0 10 0 10 0 28 +4 -3.5 0 0 0 0 28 +4 -1.5 0 1 4 0 28 +8 -10 0 0 0 0 28 +8 -14 0 0 0 0 28 +4 -6 0 0 0 0 28 +25 0 10 1 0 0 28 +12 -16.5 0 0 0 0 28 +12 -12 0 0 0 0 28 +5 -2.5 0 0 0 0 28 +5 -8.75 0 0 0 0 28 +12 -4.5 0 0 0 0 28 +12 -15 0 0 0 0 28 +5 -3.13 0 0 0 0 28 +12 -21 0 0 0 0 28 +5 -4.38 0 0 0 0 28 +6 -11.25 0 0 0 0 28 +30 0 12 0 12 0 28 +6 -1.5 0 1 6 0 28 +12 0 6 0 6 0 28 +4 -2 0 0 0 0 28 +10 -15 0 0 0 0 28 +6 -2.25 0 0 0 0 28 +10 -20 0 0 0 0 28 +6 -5.25 0 0 0 0 28 +5 -6.25 0 0 0 0 28 +6 -8.25 0 0 0 0 28 +4 -4.5 0 1 -4.5 0 29 +10 -12.5 0 1 -12.5 0 29 +26 0 12 1 26 0 29 +6 -7.5 0 0 0 0 29 +4 -6.5 0 0 0 0 29 +12 -4.5 0 1 -4.5 0 29 +5 -2.5 0 1 5 0 29 +6 -12 0 0 0 0 29 +9 -14.63 0 1 9 0 29 +6 -6 0 0 0 0 29 +22 0 10 1 22 0 29 +2 -1 0 1 2 0 29 +8 -3 0 0 0 0 29 +12 -9 0 0 0 0 29 +5 -3.75 0 1 5 0 29 +6 -3 0 0 0 0 29 +4 0 2 0 2 0 29 +28 0 13 0 13 0 29 +12 -15 0 0 0 0 29 +9 -11.25 0 0 0 0 29 +12 -10.5 0 0 0 0 29 +5 -1.88 0 1 5 0 29 +2 -2.75 0 0 0 0 29 +4 -7 0 0 0 0 29 +8 -4 0 1 8 0 29 +2 0 1 0 1 0 29 +2 -3.5 0 0 0 0 29 +2 -1.75 0 1 2 0 29 +5 -5 0 0 0 0 29 +12 -12 0 0 0 0 29 +12 0 6 1 12 0 29 +6 -4.5 0 0 0 0 29 +30 0 12 1 0 0 29 +12 -16.5 0 0 0 0 29 +6 -9.75 0 0 0 0 29 +12 -22.5 0 0 0 0 29 +6 -9 0 0 0 0 29 +5 -3.13 0 1 5 0 29 +5 -9.38 0 0 0 0 29 +12 -7.5 0 1 -7.5 0 29 +5 0 2 0 2 0 29 +10 -15 0 0 0 0 29 +12 -3 0 1 -3 0 29 +13 0 6 1 13 0 29 +9 -16.88 0 0 0 0 29 +6 -11.25 0 0 0 0 29 +8 -5 0 0 0 0 29 +8 -14 0 0 0 0 29 +12 -24 0 0 0 0 29 +12 0 5 0 5 0 29 +9 -13.5 0 0 0 0 29 +6 -1.5 0 1 6 0 29 +2 -3 0 0 0 0 29 +10 -2.5 0 1 -2.5 0 29 +2 -0.75 0 1 -0.75 0 29 +6 -10.5 0 0 0 0 29 +2 -0.5 0 1 2 0 29 +10 -10 0 0 0 0 29 +8 -10 0 1 -10 0 29 +9 -12.38 0 0 0 0 29 +4 -6 0 0 0 0 29 +6 -2.25 0 1 6 0 29 +9 -15.75 0 0 0 0 29 +12 -13.5 0 0 0 0 29 +8 -6 0 0 0 0 29 +10 -18.75 0 0 0 0 29 +4 -2 0 1 4 0 29 +5 -1.25 0 1 -1.25 0 29 +6 -5.25 0 1 6 0 29 +4 -8 0 0 0 0 29 +25 0 9 0 9 0 29 +2 -3.25 0 1 -3.25 0 29 +10 -11.25 0 1 -11.25 0 29 +4 -7.5 0 1 -7.5 0 29 +9 -5.63 0 1 -5.63 0 29 +6 -6.75 0 1 -6.75 0 29 +8 -2 0 1 -2 0 29 +5 -6.25 0 0 0 0 29 +23 0 10 1 0 0 29 +8 -13 0 0 0 0 29 +10 -13.75 0 0 0 0 29 +5 -10 0 0 0 0 29 +12 0 4 0 4 0 29 +2 -2.5 0 0 0 0 29 +19 0 8 0 8 0 29 +4 -4 0 0 0 0 29 +4 -1 0 1 -1 0 29 +4 -2.5 0 1 -2.5 0 29 +5 -8.13 0 0 0 0 29 +10 -3.75 0 1 10 0 29 +5 -8.75 0 0 0 0 29 +10 -7.5 0 0 0 0 29 +10 -5 0 1 -5 0 29 +10 -20 0 0 0 0 29 +13 0 5 0 5 0 29 +8 -9 0 0 0 0 29 +8 -12 0 0 0 0 29 +10 -16.25 0 0 0 0 29 +5 -6.88 0 1 5 0 29 +4 -5.5 0 0 0 0 29 +5 -7.5 0 0 0 0 29 +9 -10.13 0 0 0 0 29 +6 -8.25 0 0 0 0 29 +26 0 10 0 10 0 29 +4 -5 0 1 4 0 29 +2 -2.25 0 0 0 0 29 +6 -3.75 0 0 0 0 29 +8 -8 0 0 0 0 29 +9 -6.75 0 0 0 0 29 +8 -15 0 0 0 0 29 +12 -6 0 1 -6 0 29 +25 0 10 0 10 0 29 +12 -19.5 0 0 0 0 29 +9 -7.88 0 0 0 0 29 +4 -1.5 0 1 4 0 29 +8 -7 0 0 0 0 29 +12 -18 0 0 0 0 29 +2 -2 0 0 0 0 29 +9 -18 0 0 0 0 29 +2 -1.25 0 1 2 0 29 +8 -16 0 0 0 0 29 +5 -4.38 0 1 -4.38 0 29 +2 -4 0 0 0 0 29 +5 -5.63 0 0 0 0 29 +8 0 3 0 3 0 29 +10 -17.5 0 0 0 0 29 +8 -11 0 0 0 0 29 +2 -1.5 0 0 0 0 29 +4 -3.5 0 0 0 0 29 +2 -3.75 0 0 0 0 29 +3 0 1 0 1 0 29 +12 -21 0 0 0 0 29 +10 -8.75 0 0 0 0 29 +9 -9 0 0 0 0 29 +4 -3 0 0 0 0 29 +7 0 3 0 3 0 29 +9 -3.38 0 0 0 0 29 +9 -2.25 0 1 -2.25 0 29 +10 -6.25 0 1 10 0 29 +9 -4.5 0 0 0 0 29 +2 -1 0 0 0 0 30 +9 -13.5 0 0 0 0 30 +5 -6.88 0 1 5 0 30 +10 -10 0 0 0 0 30 +6 -2.25 0 1 6 0 30 +6 -6.75 0 0 0 0 30 +9 -4.5 0 1 -4.5 0 30 +10 -13.75 0 0 0 0 30 +6 -8.25 0 0 0 0 30 +5 -10 0 0 0 0 30 +10 -6.25 0 1 10 0 30 +12 -3 0 1 -3 0 30 +12 -9 0 0 0 0 30 +8 -7 0 1 -7 0 30 +6 -12 0 0 0 0 30 +8 -2 0 1 -2 0 30 +12 -6 0 0 0 0 30 +3 0 1 0 1 0 30 +10 -20 0 0 0 0 30 +5 -3.75 0 1 5 0 30 +2 -1.75 0 0 0 0 30 +6 -3.75 0 1 -3.75 0 30 +9 -12.38 0 0 0 0 30 +5 -6.25 0 0 0 0 30 +12 0 4 0 4 0 30 +2 -1.5 0 0 0 0 30 +6 -5.25 0 0 0 0 30 +10 -18.75 0 0 0 0 30 +6 -6 0 0 0 0 30 +12 0 5 0 5 0 30 +4 -2 0 0 0 0 30 +2 -4 0 0 0 0 30 +5 -2.5 0 1 5 0 30 +2 -3.75 0 0 0 0 30 +9 -15.75 0 0 0 0 30 +8 -4 0 1 8 0 30 +26 0 12 1 26 0 30 +6 -1.5 0 0 0 0 30 +4 -6 0 0 0 0 30 +10 -2.5 0 1 -2.5 0 30 +8 -12 0 0 0 0 30 +2 -3.5 0 0 0 0 30 +5 -5.63 0 0 0 0 30 +12 -24 0 0 0 0 30 +25 0 10 1 0 0 30 +4 -6.5 0 0 0 0 30 +5 -9.38 0 0 0 0 30 +5 -7.5 0 0 0 0 30 +4 -4 0 0 0 0 30 +6 -10.5 0 0 0 0 30 +13 0 6 1 13 0 30 +12 -22.5 0 0 0 0 30 +4 -7.5 0 0 0 0 30 +5 0 2 0 2 0 30 +10 -15 0 0 0 0 30 +9 -16.88 0 0 0 0 30 +2 -2.5 0 0 0 0 30 +10 -16.25 0 0 0 0 30 +6 -11.25 0 0 0 0 30 +4 -1.5 0 1 4 0 30 +5 -3.13 0 0 0 0 30 +6 -9 0 0 0 0 30 +12 -19.5 0 0 0 0 30 +10 -12.5 0 0 0 0 30 +2 -3 0 0 0 0 30 +8 -16 0 0 0 0 30 +4 0 2 0 2 0 30 +12 -7.5 0 0 0 0 30 +12 -13.5 0 0 0 0 30 +22 0 10 0 10 0 30 +12 -21 0 0 0 0 30 +7 0 3 0 3 0 30 +10 -8.75 0 0 0 0 30 +2 -1.25 0 0 0 0 30 +9 -6.75 0 0 0 0 30 +12 0 6 1 12 0 30 +28 0 13 0 13 0 30 +9 -10.13 0 0 0 0 30 +2 -0.5 0 1 2 0 30 +25 0 9 0 9 0 30 +6 -7.5 0 0 0 0 30 +4 -3 0 0 0 0 30 +10 -3.75 0 1 10 0 30 +12 -4.5 0 1 -4.5 0 30 +12 -15 0 0 0 0 30 +6 -3 0 0 0 0 30 +9 -14.63 0 0 0 0 30 +5 -1.25 0 0 0 0 30 +8 -11 0 0 0 0 30 +10 -17.5 0 0 0 0 30 +8 -10 0 0 0 0 30 +9 -9 0 0 0 0 30 +10 -11.25 0 0 0 0 30 +12 -12 0 0 0 0 30 +8 -14 0 0 0 0 30 +12 -16.5 0 0 0 0 30 +4 -7 0 0 0 0 30 +4 -1 0 0 0 0 30 +5 -1.88 0 0 0 0 30 +8 0 3 0 3 0 30 +2 -3.25 0 0 0 0 30 +5 -5 0 0 0 0 30 +26 0 10 0 10 0 30 +12 -10.5 0 0 0 0 30 +2 0 1 0 1 0 30 +6 -9.75 0 0 0 0 30 +8 -3 0 0 0 0 30 +13 0 5 0 5 0 30 +10 -7.5 0 0 0 0 30 +8 -13 0 0 0 0 30 +9 -3.38 0 0 0 0 30 +8 -15 0 0 0 0 30 +30 0 12 0 12 0 30 +8 -8 0 0 0 0 30 +8 -5 0 0 0 0 30 +12 -18 0 0 0 0 30 +10 -5 0 0 0 0 30 +9 -11.25 0 0 0 0 30 +9 -7.88 0 0 0 0 30 +8 -6 0 0 0 0 30 +6 -4.5 0 0 0 0 30 +8 -9 0 0 0 0 30 +4 -5.5 0 0 0 0 30 +4 -5 0 0 0 0 30 +9 -2.25 0 1 -2.25 0 30 +23 0 10 0 10 0 30 +9 -5.63 0 0 0 0 30 +4 -4.5 0 0 0 0 30 +4 -8 0 0 0 0 30 +19 0 8 0 8 0 30 +2 -2 0 0 0 0 30 +5 -8.13 0 0 0 0 30 +5 -4.38 0 0 0 0 30 +2 -2.25 0 0 0 0 30 +2 -0.75 0 0 0 0 30 +2 -2.75 0 0 0 0 30 +5 -8.75 0 0 0 0 30 +9 -18 0 0 0 0 30 +4 -3.5 0 0 0 0 30 +4 -2.5 0 1 -2.5 0 30 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/ra_data_reappraisal.txt b/Python/hbayesdm/common/extdata/ra_data_reappraisal.txt new file mode 100644 index 00000000..b67f642b --- /dev/null +++ b/Python/hbayesdm/common/extdata/ra_data_reappraisal.txt @@ -0,0 +1,4190 @@ +gain loss cert gamble outcome cond subjID +9 -11.25 0 1 9 1 1 +8 -16 0 0 0 1 1 +9 -5.63 0 1 -5.63 1 1 +9 -4.5 0 1 9 1 1 +2 -2 0 1 2 1 1 +12 -19.5 0 0 0 1 1 +4 -4.5 0 1 4 1 1 +2 -3.75 0 1 -3.75 1 1 +2 -2.25 0 0 0 1 1 +12 -4.5 0 1 -4.5 1 1 +9 -10.13 0 0 0 1 1 +12 -3 0 1 12 1 1 +10 -17.5 0 0 0 1 1 +5 -4.38 0 1 -4.38 1 1 +5 -7.5 0 0 0 1 1 +6 -11.25 0 0 0 1 1 +2 -1.5 0 1 -1.5 1 1 +9 -6.75 0 1 -6.75 1 1 +4 -7 0 0 0 1 1 +8 -7 0 1 8 1 1 +2 -1.75 0 1 2 1 1 +2 -1 0 1 2 1 1 +10 -6.25 0 1 -6.25 1 1 +6 -6.75 0 0 0 1 1 +9 -2.25 0 1 -2.25 1 1 +2 -0.75 0 1 2 1 1 +12 0 4 1 12 1 1 +6 -3 0 1 -3 1 1 +3 0 1 1 3 1 1 +2 -3 0 0 0 1 1 +10 -13.75 0 1 -13.75 1 1 +6 -2.25 0 1 6 1 1 +5 -1.88 0 1 -1.88 1 1 +12 -13.5 0 1 12 1 1 +22 0 10 1 22 1 1 +9 -12.38 0 0 0 1 1 +26 0 10 1 26 1 1 +12 -10.5 0 1 -10.5 1 1 +10 -2.5 0 1 -2.5 1 1 +25 0 10 1 25 1 1 +9 -15.75 0 1 9 1 1 +7 0 3 1 0 1 1 +10 -10 0 1 10 1 1 +12 -15 0 0 0 1 1 +12 0 6 1 0 1 1 +6 -4.5 0 1 -4.5 1 1 +8 -13 0 0 0 1 1 +10 -16.25 0 0 0 1 1 +5 -1.25 0 1 5 1 1 +4 -4 0 1 4 1 1 +5 -3.75 0 1 5 1 1 +6 -8.25 0 0 0 1 1 +8 -15 0 0 0 1 1 +8 -8 0 1 -8 1 1 +2 -2.75 0 1 -2.75 1 1 +6 -12 0 0 0 1 1 +2 0 1 1 2 1 1 +2 -1.25 0 1 -1.25 1 1 +9 -18 0 0 0 1 1 +6 -9 0 1 -9 1 1 +10 -8.75 0 1 -8.75 1 1 +4 -7.5 0 0 0 1 1 +13 0 6 1 0 1 1 +10 -11.25 0 0 0 1 1 +4 -3 0 1 4 1 1 +10 -5 0 1 10 1 1 +8 -2 0 1 -2 1 1 +4 -2.5 0 0 0 1 1 +2 -3.5 0 0 0 1 1 +2 -2.5 0 1 2 1 1 +6 -3.75 0 0 0 1 1 +8 -3 0 1 8 1 1 +2 -3.25 0 0 0 1 1 +8 -9 0 0 0 1 1 +6 -6 0 0 0 1 1 +8 -11 0 0 0 1 1 +5 -8.75 0 0 0 1 1 +6 -9.75 0 0 0 1 1 +12 -24 0 0 0 1 1 +4 -6.5 0 0 0 1 1 +5 -10 0 0 0 1 1 +30 0 12 1 0 1 1 +12 -18 0 0 0 1 1 +9 -9 0 0 0 1 1 +5 -5 0 1 -5 1 1 +5 -9.38 0 0 0 1 1 +10 -12.5 0 0 0 1 1 +10 -18.75 0 0 0 1 1 +5 -2.5 0 1 -2.5 1 1 +9 -14.63 0 0 0 1 1 +28 0 13 1 0 1 1 +5 -6.88 0 0 0 1 1 +4 -3.5 0 0 0 1 1 +12 -16.5 0 0 0 1 1 +5 -8.13 0 0 0 1 1 +9 -16.88 0 0 0 1 1 +9 -3.38 0 1 -3.38 1 1 +12 0 5 1 0 1 1 +4 -8 0 0 0 1 1 +8 -12 0 0 0 1 1 +8 -4 0 0 0 1 1 +2 -4 0 0 0 1 1 +12 -9 0 1 -9 1 1 +4 -1.5 0 1 4 1 1 +6 -10.5 0 0 0 1 1 +5 -3.13 0 1 5 1 1 +10 -15 0 0 0 1 1 +23 0 10 0 10 1 1 +12 -7.5 0 1 -7.5 1 1 +2 -0.5 0 1 -0.5 1 1 +4 0 2 0 2 1 1 +6 -1.5 0 1 -1.5 1 1 +4 -1 0 1 4 1 1 +10 -20 0 0 0 1 1 +12 -22.5 0 0 0 1 1 +25 0 9 1 0 1 1 +13 0 5 0 5 1 1 +6 -5.25 0 0 0 1 1 +9 -13.5 0 0 0 1 1 +5 0 2 0 2 1 1 +12 -6 0 1 -6 1 1 +5 -6.25 0 0 0 1 1 +10 -3.75 0 1 10 1 1 +9 -7.88 0 0 0 1 1 +8 -6 0 0 0 1 1 +4 -5.5 0 0 0 1 1 +19 0 8 0 8 1 1 +10 -7.5 0 0 0 1 1 +4 -6 0 0 0 1 1 +8 0 3 0 3 1 1 +12 -21 0 0 0 1 1 +4 -2 0 0 0 1 1 +4 -5 0 0 0 1 1 +12 -12 0 0 0 1 1 +8 -5 0 1 -5 1 1 +26 0 12 1 0 1 1 +8 -10 0 0 0 1 1 +5 -5.63 0 0 0 1 1 +2 -1 0 1 2 1 2 +9 -6.75 0 1 -6.75 1 2 +2 -4 0 0 0 1 2 +2 -3.25 0 0 0 1 2 +4 -6.5 0 1 -6.5 1 2 +5 -5.63 0 0 0 1 2 +8 -8 0 1 -8 1 2 +12 -18 0 1 12 1 2 +2 -2.5 0 0 0 1 2 +3 0 1 1 3 1 2 +12 -16.5 0 1 12 1 2 +10 -12.5 0 1 -12.5 1 2 +5 -1.25 0 1 5 1 2 +19 0 8 1 19 1 2 +8 -9 0 0 0 1 2 +5 -10 0 0 0 1 2 +25 0 10 1 25 1 2 +7 0 3 0 3 1 2 +6 -11.25 0 0 0 1 2 +6 -1.5 0 1 -1.5 1 2 +4 -1.5 0 1 4 1 2 +10 -5 0 1 10 1 2 +10 -3.75 0 1 10 1 2 +6 -4.5 0 0 0 1 2 +12 -19.5 0 0 0 1 2 +5 -4.38 0 0 0 1 2 +8 -11 0 0 0 1 2 +2 -0.75 0 1 2 1 2 +2 -1.5 0 0 0 1 2 +6 -6.75 0 0 0 1 2 +4 -6 0 0 0 1 2 +10 -16.25 0 1 -16.25 1 2 +12 -15 0 1 -15 1 2 +6 -5.25 0 1 6 1 2 +12 -21 0 1 12 1 2 +4 -3 0 1 4 1 2 +12 -22.5 0 1 12 1 2 +2 -3.75 0 0 0 1 2 +6 -12 0 1 -12 1 2 +5 -8.13 0 1 5 1 2 +10 -8.75 0 1 -8.75 1 2 +12 -6 0 1 -6 1 2 +5 -5 0 1 -5 1 2 +22 0 10 1 22 1 2 +12 -13.5 0 1 12 1 2 +8 -7 0 1 8 1 2 +4 -3.5 0 0 0 1 2 +9 -12.38 0 1 9 1 2 +10 -7.5 0 1 -7.5 1 2 +26 0 10 1 26 1 2 +12 -4.5 0 1 -4.5 1 2 +8 -15 0 0 0 1 2 +2 -1.75 0 0 0 1 2 +12 0 6 1 0 1 2 +9 -3.38 0 1 -3.38 1 2 +2 -3 0 0 0 1 2 +9 -5.63 0 0 0 1 2 +2 -3.5 0 0 0 1 2 +8 -12 0 0 0 1 2 +10 -18.75 0 1 10 1 2 +4 0 2 1 4 1 2 +2 -2.25 0 0 0 1 2 +9 -2.25 0 1 -2.25 1 2 +10 -13.75 0 1 -13.75 1 2 +28 0 13 1 0 1 2 +4 -2.5 0 1 4 1 2 +9 -15.75 0 1 9 1 2 +10 -15 0 0 0 1 2 +10 -10 0 1 10 1 2 +9 -18 0 0 0 1 2 +12 -24 0 1 -24 1 2 +13 0 5 1 13 1 2 +5 -1.88 0 1 -1.88 1 2 +4 -4.5 0 1 4 1 2 +9 -7.88 0 1 9 1 2 +9 -9 0 1 9 1 2 +25 0 9 1 0 1 2 +12 -12 0 1 -12 1 2 +6 -2.25 0 1 6 1 2 +8 -5 0 1 -5 1 2 +4 -5.5 0 1 -5.5 1 2 +2 -1.25 0 1 -1.25 1 2 +9 -13.5 0 1 -13.5 1 2 +9 -4.5 0 1 9 1 2 +10 -11.25 0 1 10 1 2 +6 -3 0 1 -3 1 2 +10 -2.5 0 1 -2.5 1 2 +12 0 4 1 12 1 2 +10 -20 0 1 10 1 2 +5 -3.75 0 1 5 1 2 +9 -10.13 0 1 -10.13 1 2 +4 -7 0 1 -7 1 2 +12 -10.5 0 1 -10.5 1 2 +8 -16 0 1 8 1 2 +4 -7.5 0 0 0 1 2 +8 0 3 1 8 1 2 +6 -10.5 0 0 0 1 2 +6 -9.75 0 0 0 1 2 +5 -8.75 0 0 0 1 2 +5 -2.5 0 1 -2.5 1 2 +13 0 6 1 0 1 2 +23 0 10 1 0 1 2 +8 -4 0 1 -4 1 2 +9 -11.25 0 1 9 1 2 +5 -6.88 0 0 0 1 2 +4 -4 0 1 4 1 2 +10 -17.5 0 0 0 1 2 +8 -13 0 0 0 1 2 +26 0 12 1 0 1 2 +6 -8.25 0 1 -8.25 1 2 +9 -14.63 0 1 9 1 2 +8 -2 0 1 -2 1 2 +10 -6.25 0 1 -6.25 1 2 +8 -14 0 0 0 1 2 +12 0 5 1 0 1 2 +8 -10 0 0 0 1 2 +30 0 12 1 0 1 2 +5 -7.5 0 0 0 1 2 +5 0 2 1 0 1 2 +6 -3.75 0 1 6 1 2 +6 -6 0 1 -6 1 2 +4 -2 0 1 -2 1 2 +12 -7.5 0 1 -7.5 1 2 +5 -6.25 0 1 5 1 2 +4 -5 0 1 4 1 2 +2 -2.75 0 1 -2.75 1 2 +2 -2 0 1 2 1 2 +6 -9 0 1 -9 1 2 +5 -3.13 0 1 5 1 2 +12 -9 0 1 -9 1 2 +4 -8 0 1 -8 1 2 +4 -1 0 1 4 1 2 +2 0 1 1 2 1 2 +9 -16.88 0 1 9 1 2 +8 -6 0 1 -6 1 2 +2 -0.5 0 1 -0.5 1 2 +6 -7.5 0 1 -7.5 1 2 +8 -3 0 1 8 1 2 +12 -3 0 1 12 1 2 +5 -9.38 0 1 -9.38 1 2 +6 -9.75 0 0 0 1 3 +12 -13.5 0 0 0 1 3 +8 -7 0 1 8 1 3 +10 -7.5 0 0 0 1 3 +2 -2.25 0 0 0 1 3 +6 -8.25 0 0 0 1 3 +10 -16.25 0 0 0 1 3 +3 0 1 1 3 1 3 +4 -3 0 0 0 1 3 +8 -2 0 1 -2 1 3 +4 -2.5 0 0 0 1 3 +5 -5.63 0 0 0 1 3 +5 0 2 1 0 1 3 +30 0 12 1 0 1 3 +9 -4.5 0 0 0 1 3 +4 -7.5 0 0 0 1 3 +26 0 10 0 10 1 3 +10 -6.25 0 0 0 1 3 +2 -4 0 0 0 1 3 +4 -5 0 0 0 1 3 +5 -1.88 0 1 -1.88 1 3 +23 0 10 1 0 1 3 +8 -3 0 0 0 1 3 +8 -12 0 0 0 1 3 +10 -2.5 0 0 0 1 3 +5 -8.13 0 0 0 1 3 +8 -9 0 0 0 1 3 +2 -3 0 0 0 1 3 +9 -11.25 0 0 0 1 3 +9 -12.38 0 0 0 1 3 +12 -15 0 0 0 1 3 +8 -10 0 0 0 1 3 +4 -1 0 1 4 1 3 +8 0 3 1 8 1 3 +4 -3.5 0 0 0 1 3 +8 -8 0 0 0 1 3 +10 -11.25 0 0 0 1 3 +10 -5 0 1 10 1 3 +9 -13.5 0 0 0 1 3 +2 -0.75 0 1 2 1 3 +5 -4.38 0 0 0 1 3 +2 -1.5 0 0 0 1 3 +2 -3.75 0 0 0 1 3 +5 -3.75 0 0 0 1 3 +9 -16.88 0 0 0 1 3 +9 -3.38 0 1 -3.38 1 3 +5 -10 0 0 0 1 3 +26 0 12 0 12 1 3 +5 -9.38 0 0 0 1 3 +6 -1.5 0 1 -1.5 1 3 +10 -10 0 0 0 1 3 +2 -1.25 0 1 -1.25 1 3 +9 -14.63 0 0 0 1 3 +6 -4.5 0 0 0 1 3 +5 -5 0 0 0 1 3 +5 -7.5 0 0 0 1 3 +8 -13 0 0 0 1 3 +5 -3.13 0 0 0 1 3 +8 -5 0 0 0 1 3 +8 -11 0 0 0 1 3 +6 -6.75 0 0 0 1 3 +5 -8.75 0 0 0 1 3 +2 0 1 1 2 1 3 +9 -5.63 0 0 0 1 3 +6 -6 0 0 0 1 3 +4 -5.5 0 0 0 1 3 +6 -3 0 0 0 1 3 +12 -19.5 0 0 0 1 3 +10 -13.75 0 0 0 1 3 +10 -8.75 0 0 0 1 3 +5 -6.88 0 0 0 1 3 +6 -7.5 0 0 0 1 3 +10 -12.5 0 0 0 1 3 +9 -6.75 0 0 0 1 3 +4 -6 0 0 0 1 3 +8 -4 0 1 -4 1 3 +2 -1 0 1 2 1 3 +12 -24 0 0 0 1 3 +12 -6 0 0 0 1 3 +2 -2 0 0 0 1 3 +4 -7 0 0 0 1 3 +12 -9 0 0 0 1 3 +6 -11.25 0 0 0 1 3 +25 0 10 0 10 1 3 +28 0 13 0 13 1 3 +2 -2.75 0 0 0 1 3 +12 -10.5 0 0 0 1 3 +8 -14 0 0 0 1 3 +4 -6.5 0 0 0 1 3 +4 0 2 1 4 1 3 +10 -15 0 0 0 1 3 +12 0 5 1 0 1 3 +10 -18.75 0 0 0 1 3 +12 -3 0 1 12 1 3 +4 -4 0 0 0 1 3 +9 -7.88 0 0 0 1 3 +9 -2.25 0 1 -2.25 1 3 +2 -1.75 0 0 0 1 3 +12 0 6 1 0 1 3 +5 -2.5 0 0 0 1 3 +4 -4.5 0 0 0 1 3 +8 -6 0 0 0 1 3 +12 -18 0 0 0 1 3 +12 -16.5 0 0 0 1 3 +22 0 10 0 10 1 3 +12 -21 0 0 0 1 3 +12 -4.5 0 0 0 1 3 +12 -12 0 0 0 1 3 +19 0 8 0 8 1 3 +2 -2.5 0 0 0 1 3 +12 0 4 1 12 1 3 +4 -2 0 0 0 1 3 +9 -9 0 0 0 1 3 +9 -10.13 0 0 0 1 3 +6 -2.25 0 1 6 1 3 +2 -0.5 0 1 -0.5 1 3 +10 -3.75 0 1 10 1 3 +13 0 5 1 13 1 3 +4 -1.5 0 1 4 1 3 +5 -1.25 0 1 5 1 3 +6 -9 0 0 0 1 3 +10 -17.5 0 0 0 1 3 +6 -12 0 0 0 1 3 +6 -5.25 0 0 0 1 3 +12 -22.5 0 0 0 1 3 +8 -16 0 0 0 1 3 +9 -15.75 0 0 0 1 3 +10 -20 0 0 0 1 3 +13 0 6 1 0 1 3 +4 -8 0 0 0 1 3 +12 -7.5 0 0 0 1 3 +9 -18 0 0 0 1 3 +2 -3.25 0 0 0 1 3 +7 0 3 0 3 1 3 +6 -3.75 0 0 0 1 3 +5 -6.25 0 0 0 1 3 +8 -15 0 0 0 1 3 +25 0 9 0 9 1 3 +2 -3.5 0 0 0 1 3 +6 -10.5 0 0 0 1 3 +9 -10.13 0 1 -10.13 1 4 +12 -10.5 0 0 0 1 4 +25 0 10 1 25 1 4 +4 -7 0 1 -7 1 4 +9 -7.88 0 0 0 1 4 +5 -3.13 0 1 5 1 4 +5 -8.13 0 1 5 1 4 +8 -7 0 0 0 1 4 +12 -6 0 1 -6 1 4 +12 -24 0 0 0 1 4 +12 -21 0 0 0 1 4 +4 -2.5 0 1 4 1 4 +6 -9 0 0 0 1 4 +10 -15 0 1 10 1 4 +8 -6 0 1 -6 1 4 +13 0 6 1 0 1 4 +6 -12 0 1 -12 1 4 +6 -4.5 0 0 0 1 4 +9 -16.88 0 0 0 1 4 +10 -18.75 0 1 10 1 4 +9 -3.38 0 1 -3.38 1 4 +6 -9.75 0 1 -9.75 1 4 +2 -1.75 0 0 0 1 4 +5 0 2 0 2 1 4 +8 -5 0 1 -5 1 4 +8 -9 0 0 0 1 4 +12 0 6 1 0 1 4 +12 0 4 1 12 1 4 +2 -2.5 0 0 0 1 4 +6 -3 0 1 -3 1 4 +10 -7.5 0 1 -7.5 1 4 +5 -2.5 0 1 -2.5 1 4 +5 -3.75 0 1 5 1 4 +10 -3.75 0 1 10 1 4 +2 -3 0 0 0 1 4 +10 -6.25 0 1 -6.25 1 4 +4 -7.5 0 0 0 1 4 +8 -16 0 0 0 1 4 +5 -6.25 0 1 5 1 4 +4 0 2 1 4 1 4 +10 -11.25 0 1 10 1 4 +5 -6.88 0 0 0 1 4 +5 -7.5 0 1 5 1 4 +26 0 12 0 12 1 4 +8 -13 0 1 8 1 4 +4 -4.5 0 0 0 1 4 +8 -10 0 1 -10 1 4 +6 -3.75 0 1 6 1 4 +5 -5.63 0 0 0 1 4 +9 -18 0 0 0 1 4 +12 -13.5 0 1 12 1 4 +7 0 3 0 3 1 4 +8 -14 0 1 8 1 4 +2 -1.5 0 0 0 1 4 +10 -2.5 0 1 -2.5 1 4 +13 0 5 1 13 1 4 +9 -15.75 0 0 0 1 4 +8 -12 0 0 0 1 4 +28 0 13 1 0 1 4 +6 -7.5 0 0 0 1 4 +10 -16.25 0 0 0 1 4 +12 -7.5 0 1 -7.5 1 4 +5 -5 0 0 0 1 4 +2 -2 0 1 2 1 4 +22 0 10 0 10 1 4 +2 -1 0 1 2 1 4 +3 0 1 1 3 1 4 +4 -5.5 0 0 0 1 4 +2 -2.25 0 1 2 1 4 +6 -2.25 0 1 6 1 4 +4 -6.5 0 1 -6.5 1 4 +9 -12.38 0 0 0 1 4 +10 -13.75 0 1 -13.75 1 4 +10 -17.5 0 0 0 1 4 +4 -5 0 1 4 1 4 +9 -11.25 0 1 9 1 4 +10 -10 0 0 0 1 4 +2 -3.25 0 0 0 1 4 +5 -8.75 0 1 5 1 4 +5 -10 0 0 0 1 4 +9 -2.25 0 1 -2.25 1 4 +6 -6.75 0 1 6 1 4 +12 -16.5 0 1 12 1 4 +9 -14.63 0 0 0 1 4 +4 -8 0 0 0 1 4 +6 -5.25 0 1 6 1 4 +9 -6.75 0 1 -6.75 1 4 +12 -12 0 1 -12 1 4 +4 -1 0 1 4 1 4 +12 -15 0 1 -15 1 4 +4 -3.5 0 1 -3.5 1 4 +2 -1.25 0 1 -1.25 1 4 +30 0 12 0 12 1 4 +12 -19.5 0 1 12 1 4 +12 -3 0 1 12 1 4 +5 -1.25 0 0 0 1 4 +5 -1.88 0 1 -1.88 1 4 +2 -3.5 0 0 0 1 4 +12 -9 0 1 -9 1 4 +10 -20 0 0 0 1 4 +8 -4 0 1 -4 1 4 +12 0 5 1 0 1 4 +2 0 1 0 1 1 4 +4 -1.5 0 1 4 1 4 +2 -3.75 0 0 0 1 4 +6 -10.5 0 1 -10.5 1 4 +4 -2 0 1 -2 1 4 +23 0 10 1 0 1 4 +12 -18 0 1 12 1 4 +6 -8.25 0 0 0 1 4 +26 0 10 1 26 1 4 +10 -8.75 0 1 -8.75 1 4 +2 -0.75 0 1 2 1 4 +5 -9.38 0 1 -9.38 1 4 +25 0 9 1 0 1 4 +9 -4.5 0 0 0 1 4 +10 -5 0 1 10 1 4 +2 -4 0 0 0 1 4 +2 -2.75 0 1 -2.75 1 4 +4 -6 0 0 0 1 4 +10 -12.5 0 1 -12.5 1 4 +12 -22.5 0 0 0 1 4 +4 -4 0 1 4 1 4 +2 -0.5 0 1 -0.5 1 4 +8 -2 0 1 -2 1 4 +4 -3 0 0 0 1 4 +6 -11.25 0 1 6 1 4 +8 -15 0 1 -15 1 4 +8 -11 0 0 0 1 4 +12 -4.5 0 1 -4.5 1 4 +19 0 8 1 19 1 4 +6 -6 0 1 -6 1 4 +5 -4.38 0 0 0 1 4 +9 -9 0 1 9 1 4 +6 -1.5 0 1 -1.5 1 4 +9 -13.5 0 0 0 1 4 +9 -5.63 0 1 -5.63 1 4 +8 -8 0 1 -8 1 4 +8 0 3 0 3 1 4 +8 -3 0 0 0 1 4 +9 -11.25 0 1 9 1 5 +8 -16 0 0 0 1 5 +9 -5.63 0 0 0 1 5 +9 -4.5 0 1 9 1 5 +2 -2 0 1 2 1 5 +12 -19.5 0 0 0 1 5 +4 -4.5 0 0 0 1 5 +2 -3.75 0 1 -3.75 1 5 +2 -2.25 0 0 0 1 5 +12 -4.5 0 1 -4.5 1 5 +9 -10.13 0 0 0 1 5 +12 -3 0 1 12 1 5 +10 -17.5 0 0 0 1 5 +5 -4.38 0 1 -4.38 1 5 +5 -7.5 0 1 5 1 5 +6 -11.25 0 0 0 1 5 +2 -1.5 0 1 -1.5 1 5 +9 -6.75 0 1 -6.75 1 5 +4 -7 0 1 -7 1 5 +8 -7 0 1 8 1 5 +2 -1.75 0 0 0 1 5 +2 -1 0 1 2 1 5 +10 -6.25 0 1 -6.25 1 5 +6 -6.75 0 1 6 1 5 +9 -2.25 0 1 -2.25 1 5 +2 -0.75 0 1 2 1 5 +12 0 4 0 4 1 5 +6 -3 0 1 -3 1 5 +3 0 1 1 3 1 5 +2 -3 0 0 0 1 5 +10 -13.75 0 0 0 1 5 +6 -2.25 0 1 6 1 5 +5 -1.88 0 1 -1.88 1 5 +12 -13.5 0 0 0 1 5 +22 0 10 0 10 1 5 +9 -12.38 0 0 0 1 5 +26 0 10 0 10 1 5 +12 -10.5 0 0 0 1 5 +10 -2.5 0 1 -2.5 1 5 +25 0 10 0 10 1 5 +9 -15.75 0 0 0 1 5 +7 0 3 0 3 1 5 +10 -10 0 0 0 1 5 +12 -15 0 0 0 1 5 +12 0 6 0 6 1 5 +6 -4.5 0 0 0 1 5 +8 -13 0 0 0 1 5 +10 -16.25 0 0 0 1 5 +5 -1.25 0 1 5 1 5 +4 -4 0 1 4 1 5 +5 -3.75 0 1 5 1 5 +6 -8.25 0 0 0 1 5 +8 -15 0 0 0 1 5 +8 -8 0 1 -8 1 5 +2 -2.75 0 1 -2.75 1 5 +6 -12 0 1 -12 1 5 +2 0 1 1 2 1 5 +2 -1.25 0 1 -1.25 1 5 +9 -18 0 0 0 1 5 +6 -9 0 0 0 1 5 +10 -8.75 0 1 -8.75 1 5 +4 -7.5 0 0 0 1 5 +13 0 6 1 0 1 5 +10 -11.25 0 0 0 1 5 +4 -3 0 1 4 1 5 +10 -5 0 1 10 1 5 +8 -2 0 1 -2 1 5 +4 -2.5 0 1 4 1 5 +2 -3.5 0 0 0 1 5 +2 -2.5 0 0 0 1 5 +6 -3.75 0 1 6 1 5 +8 -3 0 1 8 1 5 +2 -3.25 0 0 0 1 5 +8 -9 0 0 0 1 5 +6 -6 0 1 -6 1 5 +8 -11 0 0 0 1 5 +5 -8.75 0 1 5 1 5 +6 -9.75 0 0 0 1 5 +12 -24 0 0 0 1 5 +4 -6.5 0 0 0 1 5 +5 -10 0 0 0 1 5 +30 0 12 0 12 1 5 +12 -18 0 0 0 1 5 +9 -9 0 1 9 1 5 +5 -5 0 1 -5 1 5 +5 -9.38 0 0 0 1 5 +10 -12.5 0 0 0 1 5 +10 -18.75 0 0 0 1 5 +5 -2.5 0 1 -2.5 1 5 +9 -14.63 0 0 0 1 5 +28 0 13 0 13 1 5 +5 -6.88 0 0 0 1 5 +4 -3.5 0 1 -3.5 1 5 +12 -16.5 0 0 0 1 5 +5 -8.13 0 0 0 1 5 +9 -16.88 0 0 0 1 5 +9 -3.38 0 1 -3.38 1 5 +12 0 5 1 0 1 5 +4 -8 0 0 0 1 5 +8 -12 0 1 8 1 5 +8 -4 0 1 -4 1 5 +2 -4 0 0 0 1 5 +12 -9 0 1 -9 1 5 +4 -1.5 0 1 4 1 5 +6 -10.5 0 0 0 1 5 +5 -3.13 0 1 5 1 5 +10 -15 0 0 0 1 5 +23 0 10 0 10 1 5 +12 -7.5 0 1 -7.5 1 5 +2 -0.5 0 1 -0.5 1 5 +4 0 2 0 2 1 5 +6 -1.5 0 1 -1.5 1 5 +4 -1 0 1 4 1 5 +10 -20 0 0 0 1 5 +12 -22.5 0 0 0 1 5 +25 0 9 0 9 1 5 +13 0 5 1 13 1 5 +6 -5.25 0 0 0 1 5 +9 -13.5 0 0 0 1 5 +5 0 2 0 2 1 5 +12 -6 0 1 -6 1 5 +5 -6.25 0 1 5 1 5 +10 -3.75 0 1 10 1 5 +9 -7.88 0 0 0 1 5 +8 -6 0 1 -6 1 5 +4 -5.5 0 0 0 1 5 +19 0 8 0 8 1 5 +10 -7.5 0 1 -7.5 1 5 +4 -6 0 0 0 1 5 +8 -14 0 0 0 1 5 +8 0 3 0 3 1 5 +12 -21 0 0 0 1 5 +4 -2 0 1 -2 1 5 +4 -5 0 0 0 1 5 +6 -7.5 0 1 -7.5 1 5 +12 -12 0 1 -12 1 5 +8 -5 0 1 -5 1 5 +26 0 12 0 12 1 5 +8 -10 0 0 0 1 5 +5 -5.63 0 0 0 1 5 +2 -1 0 1 2 1 6 +9 -6.75 0 1 -6.75 1 6 +2 -4 0 0 0 1 6 +2 -3.25 0 1 2 1 6 +4 -6.5 0 0 0 1 6 +5 -5.63 0 1 -5.63 1 6 +8 -8 0 1 -8 1 6 +12 -18 0 0 0 1 6 +2 -2.5 0 1 2 1 6 +3 0 1 1 3 1 6 +12 -16.5 0 1 12 1 6 +10 -12.5 0 1 -12.5 1 6 +5 -1.25 0 1 5 1 6 +19 0 8 1 19 1 6 +8 -9 0 1 -9 1 6 +5 -10 0 0 0 1 6 +25 0 10 1 25 1 6 +7 0 3 1 0 1 6 +6 -11.25 0 1 6 1 6 +6 -1.5 0 1 -1.5 1 6 +4 -1.5 0 1 4 1 6 +10 -5 0 1 10 1 6 +10 -3.75 0 1 10 1 6 +6 -4.5 0 1 -4.5 1 6 +12 -19.5 0 1 12 1 6 +5 -4.38 0 1 -4.38 1 6 +8 -11 0 0 0 1 6 +2 -0.75 0 1 2 1 6 +2 -1.5 0 1 -1.5 1 6 +6 -6.75 0 1 6 1 6 +4 -6 0 1 4 1 6 +10 -16.25 0 1 -16.25 1 6 +12 -15 0 1 -15 1 6 +6 -5.25 0 1 6 1 6 +12 -21 0 0 0 1 6 +4 -3 0 1 4 1 6 +12 -22.5 0 0 0 1 6 +2 -3.75 0 1 -3.75 1 6 +6 -12 0 1 -12 1 6 +5 -8.13 0 1 5 1 6 +10 -8.75 0 1 -8.75 1 6 +12 -6 0 1 -6 1 6 +5 -5 0 1 -5 1 6 +22 0 10 0 10 1 6 +12 -13.5 0 0 0 1 6 +8 -7 0 1 8 1 6 +4 -3.5 0 1 -3.5 1 6 +9 -12.38 0 1 9 1 6 +10 -7.5 0 1 -7.5 1 6 +26 0 10 1 26 1 6 +12 -4.5 0 1 -4.5 1 6 +8 -15 0 1 -15 1 6 +2 -1.75 0 1 2 1 6 +12 0 6 1 0 1 6 +9 -3.38 0 1 -3.38 1 6 +2 -3 0 1 -3 1 6 +9 -5.63 0 1 -5.63 1 6 +2 -3.5 0 1 -3.5 1 6 +8 -12 0 0 0 1 6 +10 -18.75 0 0 0 1 6 +4 0 2 1 4 1 6 +2 -2.25 0 1 2 1 6 +9 -2.25 0 1 -2.25 1 6 +10 -13.75 0 0 0 1 6 +28 0 13 0 13 1 6 +4 -2.5 0 1 4 1 6 +9 -15.75 0 0 0 1 6 +10 -15 0 1 10 1 6 +10 -10 0 1 10 1 6 +9 -18 0 0 0 1 6 +12 -24 0 0 0 1 6 +13 0 5 1 13 1 6 +5 -1.88 0 1 -1.88 1 6 +4 -4.5 0 1 4 1 6 +9 -7.88 0 1 9 1 6 +9 -9 0 1 9 1 6 +25 0 9 1 0 1 6 +12 -12 0 1 -12 1 6 +6 -2.25 0 1 6 1 6 +8 -5 0 1 -5 1 6 +4 -5.5 0 1 -5.5 1 6 +2 -1.25 0 1 -1.25 1 6 +9 -13.5 0 0 0 1 6 +9 -4.5 0 1 9 1 6 +10 -11.25 0 1 10 1 6 +6 -3 0 1 -3 1 6 +10 -2.5 0 1 -2.5 1 6 +12 0 4 1 12 1 6 +10 -20 0 0 0 1 6 +5 -3.75 0 1 5 1 6 +9 -10.13 0 1 -10.13 1 6 +4 -7 0 0 0 1 6 +12 -10.5 0 1 -10.5 1 6 +8 -16 0 0 0 1 6 +4 -7.5 0 1 4 1 6 +8 0 3 1 8 1 6 +6 -10.5 0 0 0 1 6 +6 -9.75 0 1 -9.75 1 6 +5 -8.75 0 0 0 1 6 +5 -2.5 0 1 -2.5 1 6 +13 0 6 1 0 1 6 +23 0 10 1 0 1 6 +8 -4 0 1 -4 1 6 +9 -11.25 0 1 9 1 6 +5 -6.88 0 1 -6.88 1 6 +4 -4 0 1 4 1 6 +10 -17.5 0 0 0 1 6 +26 0 12 0 12 1 6 +6 -8.25 0 1 -8.25 1 6 +9 -14.63 0 1 9 1 6 +8 -2 0 1 -2 1 6 +10 -6.25 0 1 -6.25 1 6 +8 -14 0 1 8 1 6 +12 0 5 0 5 1 6 +8 -10 0 1 -10 1 6 +30 0 12 1 0 1 6 +5 -7.5 0 1 5 1 6 +5 0 2 1 0 1 6 +6 -3.75 0 1 6 1 6 +6 -6 0 1 -6 1 6 +4 -2 0 1 -2 1 6 +12 -7.5 0 1 -7.5 1 6 +5 -6.25 0 0 0 1 6 +4 -5 0 1 4 1 6 +2 -2.75 0 1 -2.75 1 6 +2 -2 0 1 2 1 6 +6 -9 0 1 -9 1 6 +5 -3.13 0 1 5 1 6 +12 -9 0 1 -9 1 6 +4 -8 0 1 -8 1 6 +4 -1 0 1 4 1 6 +2 0 1 1 2 1 6 +9 -16.88 0 0 0 1 6 +8 -6 0 1 -6 1 6 +2 -0.5 0 1 -0.5 1 6 +6 -7.5 0 1 -7.5 1 6 +8 -3 0 1 8 1 6 +12 -3 0 1 12 1 6 +5 -9.38 0 1 -9.38 1 6 +6 -9.75 0 0 0 1 7 +12 -13.5 0 0 0 1 7 +8 -7 0 1 8 1 7 +10 -7.5 0 1 -7.5 1 7 +2 -2.25 0 1 2 1 7 +6 -8.25 0 0 0 1 7 +10 -16.25 0 0 0 1 7 +3 0 1 1 3 1 7 +4 -3 0 1 4 1 7 +8 -2 0 1 -2 1 7 +4 -2.5 0 1 4 1 7 +5 -5.63 0 1 -5.63 1 7 +5 0 2 0 2 1 7 +30 0 12 1 0 1 7 +9 -4.5 0 1 9 1 7 +4 -7.5 0 0 0 1 7 +26 0 10 1 26 1 7 +10 -6.25 0 1 -6.25 1 7 +2 -4 0 0 0 1 7 +4 -5 0 0 0 1 7 +5 -1.88 0 1 -1.88 1 7 +23 0 10 0 10 1 7 +8 -3 0 1 8 1 7 +8 -12 0 0 0 1 7 +10 -2.5 0 1 -2.5 1 7 +5 -8.13 0 0 0 1 7 +8 -9 0 1 -9 1 7 +2 -3 0 0 0 1 7 +9 -11.25 0 0 0 1 7 +9 -12.38 0 0 0 1 7 +12 -15 0 1 -15 1 7 +8 -10 0 0 0 1 7 +4 -1 0 1 4 1 7 +8 0 3 1 8 1 7 +4 -3.5 0 0 0 1 7 +8 -8 0 1 -8 1 7 +10 -11.25 0 0 0 1 7 +10 -5 0 1 10 1 7 +9 -13.5 0 0 0 1 7 +2 -0.75 0 1 2 1 7 +5 -4.38 0 0 0 1 7 +2 -1.5 0 1 -1.5 1 7 +2 -3.75 0 0 0 1 7 +5 -3.75 0 1 5 1 7 +9 -16.88 0 0 0 1 7 +9 -3.38 0 1 -3.38 1 7 +5 -10 0 0 0 1 7 +26 0 12 1 0 1 7 +5 -9.38 0 0 0 1 7 +6 -1.5 0 1 -1.5 1 7 +10 -10 0 1 10 1 7 +2 -1.25 0 1 -1.25 1 7 +9 -14.63 0 0 0 1 7 +6 -4.5 0 1 -4.5 1 7 +5 -5 0 0 0 1 7 +5 -7.5 0 0 0 1 7 +8 -13 0 0 0 1 7 +5 -3.13 0 1 5 1 7 +8 -5 0 1 -5 1 7 +8 -11 0 0 0 1 7 +6 -6.75 0 0 0 1 7 +2 0 1 1 2 1 7 +9 -5.63 0 0 0 1 7 +6 -6 0 0 0 1 7 +4 -5.5 0 0 0 1 7 +6 -3 0 1 -3 1 7 +12 -19.5 0 0 0 1 7 +10 -13.75 0 0 0 1 7 +10 -8.75 0 0 0 1 7 +5 -6.88 0 0 0 1 7 +6 -7.5 0 0 0 1 7 +10 -12.5 0 0 0 1 7 +9 -6.75 0 1 -6.75 1 7 +4 -6 0 0 0 1 7 +8 -4 0 1 -4 1 7 +2 -1 0 1 2 1 7 +12 -24 0 0 0 1 7 +12 -6 0 1 -6 1 7 +2 -2 0 0 0 1 7 +4 -7 0 0 0 1 7 +12 -9 0 1 -9 1 7 +6 -11.25 0 0 0 1 7 +25 0 10 1 25 1 7 +28 0 13 0 13 1 7 +2 -2.75 0 1 -2.75 1 7 +12 -10.5 0 1 -10.5 1 7 +8 -14 0 0 0 1 7 +4 -6.5 0 0 0 1 7 +4 0 2 1 4 1 7 +10 -15 0 0 0 1 7 +12 0 5 1 0 1 7 +10 -18.75 0 0 0 1 7 +12 -3 0 1 12 1 7 +4 -4 0 0 0 1 7 +9 -7.88 0 0 0 1 7 +9 -2.25 0 1 -2.25 1 7 +2 -1.75 0 0 0 1 7 +12 0 6 1 0 1 7 +5 -2.5 0 1 -2.5 1 7 +4 -4.5 0 0 0 1 7 +8 -6 0 0 0 1 7 +12 -18 0 0 0 1 7 +12 -16.5 0 0 0 1 7 +22 0 10 1 22 1 7 +12 -21 0 0 0 1 7 +12 -4.5 0 1 -4.5 1 7 +12 -12 0 0 0 1 7 +19 0 8 1 19 1 7 +2 -2.5 0 0 0 1 7 +12 0 4 1 12 1 7 +4 -2 0 0 0 1 7 +9 -9 0 1 9 1 7 +9 -10.13 0 0 0 1 7 +6 -2.25 0 1 6 1 7 +2 -0.5 0 1 -0.5 1 7 +10 -3.75 0 1 10 1 7 +13 0 5 1 13 1 7 +4 -1.5 0 1 4 1 7 +5 -1.25 0 1 5 1 7 +6 -9 0 0 0 1 7 +10 -17.5 0 0 0 1 7 +6 -12 0 0 0 1 7 +6 -5.25 0 0 0 1 7 +12 -22.5 0 0 0 1 7 +8 -16 0 0 0 1 7 +9 -15.75 0 0 0 1 7 +10 -20 0 0 0 1 7 +13 0 6 1 0 1 7 +4 -8 0 0 0 1 7 +12 -7.5 0 1 -7.5 1 7 +9 -18 0 0 0 1 7 +2 -3.25 0 0 0 1 7 +7 0 3 1 0 1 7 +6 -3.75 0 0 0 1 7 +5 -6.25 0 0 0 1 7 +8 -15 0 0 0 1 7 +25 0 9 1 0 1 7 +2 -3.5 0 0 0 1 7 +6 -10.5 0 0 0 1 7 +9 -10.13 0 0 0 1 8 +12 -10.5 0 1 -10.5 1 8 +25 0 10 1 25 1 8 +4 -7 0 0 0 1 8 +9 -7.88 0 1 9 1 8 +5 -3.13 0 1 5 1 8 +5 -8.13 0 0 0 1 8 +8 -7 0 1 8 1 8 +12 -6 0 1 -6 1 8 +12 -24 0 0 0 1 8 +12 -21 0 0 0 1 8 +4 -2.5 0 1 4 1 8 +6 -9 0 0 0 1 8 +10 -15 0 0 0 1 8 +8 -6 0 1 -6 1 8 +13 0 6 1 0 1 8 +6 -12 0 0 0 1 8 +6 -4.5 0 1 -4.5 1 8 +9 -16.88 0 0 0 1 8 +10 -18.75 0 0 0 1 8 +9 -3.38 0 1 -3.38 1 8 +6 -9.75 0 0 0 1 8 +2 -1.75 0 1 2 1 8 +5 0 2 1 0 1 8 +8 -5 0 1 -5 1 8 +8 -9 0 0 0 1 8 +12 0 6 0 6 1 8 +12 0 4 1 12 1 8 +2 -2.5 0 0 0 1 8 +6 -3 0 1 -3 1 8 +10 -7.5 0 1 -7.5 1 8 +5 -2.5 0 1 -2.5 1 8 +5 -3.75 0 0 0 1 8 +10 -3.75 0 1 10 1 8 +2 -3 0 0 0 1 8 +10 -6.25 0 1 -6.25 1 8 +4 -7.5 0 0 0 1 8 +8 -16 0 0 0 1 8 +5 -6.25 0 0 0 1 8 +4 0 2 1 4 1 8 +10 -11.25 0 0 0 1 8 +5 -6.88 0 0 0 1 8 +5 -7.5 0 0 0 1 8 +26 0 12 1 0 1 8 +8 -13 0 0 0 1 8 +4 -4.5 0 1 4 1 8 +8 -10 0 0 0 1 8 +6 -3.75 0 1 6 1 8 +5 -5.63 0 1 -5.63 1 8 +9 -18 0 0 0 1 8 +12 -13.5 0 1 12 1 8 +7 0 3 1 0 1 8 +8 -14 0 0 0 1 8 +2 -1.5 0 1 -1.5 1 8 +10 -2.5 0 1 -2.5 1 8 +13 0 5 1 13 1 8 +9 -15.75 0 0 0 1 8 +8 -12 0 0 0 1 8 +28 0 13 1 0 1 8 +6 -7.5 0 1 -7.5 1 8 +10 -16.25 0 0 0 1 8 +12 -7.5 0 1 -7.5 1 8 +5 -5 0 1 -5 1 8 +2 -2 0 1 2 1 8 +22 0 10 1 22 1 8 +2 -1 0 1 2 1 8 +3 0 1 1 3 1 8 +4 -5.5 0 1 -5.5 1 8 +2 -2.25 0 1 2 1 8 +6 -2.25 0 1 6 1 8 +4 -6.5 0 1 -6.5 1 8 +9 -12.38 0 0 0 1 8 +10 -13.75 0 0 0 1 8 +10 -17.5 0 1 10 1 8 +4 -5 0 1 4 1 8 +9 -11.25 0 0 0 1 8 +10 -10 0 1 10 1 8 +2 -3.25 0 0 0 1 8 +5 -8.75 0 0 0 1 8 +5 -10 0 0 0 1 8 +9 -2.25 0 1 -2.25 1 8 +6 -6.75 0 1 6 1 8 +12 -16.5 0 1 12 1 8 +9 -14.63 0 0 0 1 8 +4 -8 0 0 0 1 8 +6 -5.25 0 1 6 1 8 +9 -6.75 0 1 -6.75 1 8 +12 -12 0 1 -12 1 8 +4 -1 0 1 4 1 8 +12 -15 0 1 -15 1 8 +4 -3.5 0 1 -3.5 1 8 +2 -1.25 0 1 -1.25 1 8 +30 0 12 1 0 1 8 +12 -19.5 0 0 0 1 8 +12 -3 0 1 12 1 8 +5 -1.25 0 1 5 1 8 +5 -1.88 0 1 -1.88 1 8 +2 -3.5 0 0 0 1 8 +12 -9 0 1 -9 1 8 +10 -20 0 0 0 1 8 +8 -4 0 1 -4 1 8 +12 0 5 1 0 1 8 +2 0 1 0 1 1 8 +4 -1.5 0 1 4 1 8 +2 -3.75 0 1 -3.75 1 8 +6 -10.5 0 0 0 1 8 +4 -2 0 1 -2 1 8 +23 0 10 0 10 1 8 +12 -18 0 1 12 1 8 +6 -8.25 0 1 -8.25 1 8 +26 0 10 1 26 1 8 +10 -8.75 0 1 -8.75 1 8 +2 -0.75 0 1 2 1 8 +5 -9.38 0 0 0 1 8 +25 0 9 1 0 1 8 +9 -4.5 0 1 9 1 8 +10 -5 0 1 10 1 8 +2 -4 0 1 -4 1 8 +2 -2.75 0 1 -2.75 1 8 +4 -6 0 1 4 1 8 +10 -12.5 0 1 -12.5 1 8 +12 -22.5 0 0 0 1 8 +4 -4 0 1 4 1 8 +2 -0.5 0 1 -0.5 1 8 +8 -2 0 1 -2 1 8 +4 -3 0 1 4 1 8 +6 -11.25 0 0 0 1 8 +8 -15 0 0 0 1 8 +8 -11 0 1 8 1 8 +12 -4.5 0 1 -4.5 1 8 +19 0 8 1 19 1 8 +6 -6 0 1 -6 1 8 +5 -4.38 0 1 -4.38 1 8 +9 -9 0 1 9 1 8 +6 -1.5 0 1 -1.5 1 8 +9 -13.5 0 0 0 1 8 +9 -5.63 0 1 -5.63 1 8 +8 -8 0 1 -8 1 8 +8 0 3 1 8 1 8 +8 -3 0 1 8 1 8 +9 -11.25 0 1 9 1 9 +8 -16 0 0 0 1 9 +9 -5.63 0 1 -5.63 1 9 +9 -4.5 0 1 9 1 9 +2 -2 0 1 2 1 9 +12 -19.5 0 0 0 1 9 +4 -4.5 0 1 4 1 9 +2 -3.75 0 1 -3.75 1 9 +2 -2.25 0 1 2 1 9 +12 -4.5 0 1 -4.5 1 9 +9 -10.13 0 0 0 1 9 +12 -3 0 1 12 1 9 +10 -17.5 0 0 0 1 9 +5 -4.38 0 1 -4.38 1 9 +5 -7.5 0 1 5 1 9 +6 -11.25 0 0 0 1 9 +2 -1.5 0 1 -1.5 1 9 +9 -6.75 0 1 -6.75 1 9 +4 -7 0 0 0 1 9 +8 -7 0 1 8 1 9 +2 -1.75 0 1 2 1 9 +2 -1 0 1 2 1 9 +10 -6.25 0 1 -6.25 1 9 +6 -6.75 0 1 6 1 9 +9 -2.25 0 1 -2.25 1 9 +2 -0.75 0 1 2 1 9 +12 0 4 1 12 1 9 +6 -3 0 1 -3 1 9 +3 0 1 1 3 1 9 +2 -3 0 1 -3 1 9 +10 -13.75 0 0 0 1 9 +6 -2.25 0 1 6 1 9 +5 -1.88 0 1 -1.88 1 9 +12 -13.5 0 0 0 1 9 +22 0 10 1 22 1 9 +9 -12.38 0 0 0 1 9 +26 0 10 1 26 1 9 +12 -10.5 0 0 0 1 9 +10 -2.5 0 1 -2.5 1 9 +25 0 10 1 25 1 9 +9 -15.75 0 0 0 1 9 +7 0 3 1 0 1 9 +10 -10 0 0 0 1 9 +12 -15 0 0 0 1 9 +12 0 6 1 0 1 9 +6 -4.5 0 1 -4.5 1 9 +8 -13 0 0 0 1 9 +10 -16.25 0 0 0 1 9 +5 -1.25 0 1 5 1 9 +4 -4 0 1 4 1 9 +5 -3.75 0 1 5 1 9 +6 -8.25 0 0 0 1 9 +8 -15 0 0 0 1 9 +8 -8 0 1 -8 1 9 +2 -2.75 0 1 -2.75 1 9 +6 -12 0 0 0 1 9 +2 0 1 1 2 1 9 +2 -1.25 0 1 -1.25 1 9 +9 -18 0 0 0 1 9 +6 -9 0 0 0 1 9 +10 -8.75 0 0 0 1 9 +4 -7.5 0 0 0 1 9 +13 0 6 1 0 1 9 +10 -11.25 0 0 0 1 9 +4 -3 0 1 4 1 9 +10 -5 0 1 10 1 9 +8 -2 0 1 -2 1 9 +4 -2.5 0 1 4 1 9 +2 -3.5 0 1 -3.5 1 9 +2 -2.5 0 1 2 1 9 +6 -3.75 0 1 6 1 9 +8 -3 0 1 8 1 9 +2 -3.25 0 1 2 1 9 +8 -9 0 1 -9 1 9 +6 -6 0 1 -6 1 9 +8 -11 0 0 0 1 9 +5 -8.75 0 0 0 1 9 +6 -9.75 0 0 0 1 9 +12 -24 0 0 0 1 9 +5 -10 0 0 0 1 9 +30 0 12 1 0 1 9 +12 -18 0 0 0 1 9 +9 -9 0 1 9 1 9 +5 -5 0 1 -5 1 9 +5 -9.38 0 0 0 1 9 +10 -12.5 0 0 0 1 9 +10 -18.75 0 0 0 1 9 +5 -2.5 0 1 -2.5 1 9 +9 -14.63 0 0 0 1 9 +28 0 13 1 0 1 9 +5 -6.88 0 1 -6.88 1 9 +4 -3.5 0 1 -3.5 1 9 +12 -16.5 0 0 0 1 9 +5 -8.13 0 0 0 1 9 +9 -16.88 0 0 0 1 9 +9 -3.38 0 1 -3.38 1 9 +12 0 5 1 0 1 9 +4 -8 0 0 0 1 9 +8 -12 0 0 0 1 9 +8 -4 0 1 -4 1 9 +2 -4 0 1 -4 1 9 +12 -9 0 1 -9 1 9 +4 -1.5 0 1 4 1 9 +6 -10.5 0 0 0 1 9 +5 -3.13 0 1 5 1 9 +10 -15 0 0 0 1 9 +23 0 10 1 0 1 9 +12 -7.5 0 1 -7.5 1 9 +2 -0.5 0 1 -0.5 1 9 +4 0 2 1 4 1 9 +6 -1.5 0 1 -1.5 1 9 +4 -1 0 1 4 1 9 +10 -20 0 0 0 1 9 +12 -22.5 0 0 0 1 9 +25 0 9 1 0 1 9 +13 0 5 1 13 1 9 +6 -5.25 0 1 6 1 9 +9 -13.5 0 0 0 1 9 +5 0 2 1 0 1 9 +12 -6 0 1 -6 1 9 +5 -6.25 0 1 5 1 9 +10 -3.75 0 1 10 1 9 +9 -7.88 0 1 9 1 9 +8 -6 0 1 -6 1 9 +4 -5.5 0 1 -5.5 1 9 +19 0 8 1 19 1 9 +10 -7.5 0 1 -7.5 1 9 +4 -6 0 1 4 1 9 +8 -14 0 0 0 1 9 +8 0 3 1 8 1 9 +12 -21 0 0 0 1 9 +4 -2 0 1 -2 1 9 +4 -5 0 1 4 1 9 +6 -7.5 0 0 0 1 9 +12 -12 0 0 0 1 9 +8 -5 0 1 -5 1 9 +26 0 12 1 0 1 9 +8 -10 0 0 0 1 9 +5 -5.63 0 0 0 1 9 +9 -10.13 0 1 -10.13 1 10 +12 -10.5 0 1 -10.5 1 10 +25 0 10 1 25 1 10 +4 -7 0 1 -7 1 10 +9 -7.88 0 1 9 1 10 +5 -3.13 0 1 5 1 10 +5 -8.13 0 0 0 1 10 +8 -7 0 1 8 1 10 +12 -6 0 1 -6 1 10 +12 -24 0 0 0 1 10 +12 -21 0 0 0 1 10 +4 -2.5 0 1 4 1 10 +6 -9 0 0 0 1 10 +10 -15 0 0 0 1 10 +8 -6 0 1 -6 1 10 +13 0 6 1 0 1 10 +6 -12 0 1 -12 1 10 +6 -4.5 0 1 -4.5 1 10 +9 -16.88 0 1 9 1 10 +10 -18.75 0 1 10 1 10 +9 -3.38 0 1 -3.38 1 10 +6 -9.75 0 1 -9.75 1 10 +2 -1.75 0 1 2 1 10 +5 0 2 1 0 1 10 +8 -5 0 1 -5 1 10 +8 -9 0 1 -9 1 10 +12 0 6 1 0 1 10 +12 0 4 1 12 1 10 +2 -2.5 0 1 2 1 10 +6 -3 0 1 -3 1 10 +10 -7.5 0 1 -7.5 1 10 +5 -2.5 0 1 -2.5 1 10 +5 -3.75 0 1 5 1 10 +10 -3.75 0 1 10 1 10 +2 -3 0 1 -3 1 10 +10 -6.25 0 1 -6.25 1 10 +4 -7.5 0 1 4 1 10 +8 -16 0 1 8 1 10 +5 -6.25 0 1 5 1 10 +4 0 2 1 4 1 10 +10 -11.25 0 1 10 1 10 +5 -6.88 0 1 -6.88 1 10 +5 -7.5 0 1 5 1 10 +26 0 12 0 12 1 10 +8 -13 0 0 0 1 10 +4 -4.5 0 1 4 1 10 +8 -10 0 1 -10 1 10 +6 -3.75 0 1 6 1 10 +5 -5.63 0 1 -5.63 1 10 +9 -18 0 1 9 1 10 +12 -13.5 0 1 12 1 10 +7 0 3 1 0 1 10 +8 -14 0 1 8 1 10 +2 -1.5 0 1 -1.5 1 10 +10 -2.5 0 1 -2.5 1 10 +13 0 5 0 5 1 10 +9 -15.75 0 1 9 1 10 +8 -12 0 1 8 1 10 +28 0 13 0 13 1 10 +6 -7.5 0 1 -7.5 1 10 +10 -16.25 0 1 -16.25 1 10 +12 -7.5 0 1 -7.5 1 10 +5 -5 0 1 -5 1 10 +2 -2 0 1 2 1 10 +22 0 10 1 22 1 10 +2 -1 0 1 2 1 10 +3 0 1 1 3 1 10 +4 -5.5 0 1 -5.5 1 10 +6 -2.25 0 1 6 1 10 +4 -6.5 0 1 -6.5 1 10 +9 -12.38 0 1 9 1 10 +10 -13.75 0 1 -13.75 1 10 +10 -17.5 0 1 10 1 10 +4 -5 0 1 4 1 10 +9 -11.25 0 1 9 1 10 +10 -10 0 1 10 1 10 +2 -3.25 0 1 2 1 10 +5 -8.75 0 1 5 1 10 +5 -10 0 1 5 1 10 +9 -2.25 0 1 -2.25 1 10 +12 -16.5 0 0 0 1 10 +9 -14.63 0 0 0 1 10 +4 -8 0 1 -8 1 10 +6 -5.25 0 1 6 1 10 +9 -6.75 0 1 -6.75 1 10 +12 -12 0 1 -12 1 10 +4 -1 0 1 4 1 10 +12 -15 0 0 0 1 10 +4 -3.5 0 1 -3.5 1 10 +2 -1.25 0 1 -1.25 1 10 +30 0 12 0 12 1 10 +12 -19.5 0 0 0 1 10 +12 -3 0 1 12 1 10 +5 -1.25 0 1 5 1 10 +5 -1.88 0 1 -1.88 1 10 +2 -3.5 0 1 -3.5 1 10 +12 -9 0 1 -9 1 10 +10 -20 0 0 0 1 10 +8 -4 0 1 -4 1 10 +12 0 5 1 0 1 10 +2 0 1 1 2 1 10 +4 -1.5 0 1 4 1 10 +2 -3.75 0 1 -3.75 1 10 +6 -10.5 0 1 -10.5 1 10 +4 -2 0 1 -2 1 10 +23 0 10 1 0 1 10 +12 -18 0 0 0 1 10 +6 -8.25 0 1 -8.25 1 10 +26 0 10 1 26 1 10 +10 -8.75 0 1 -8.75 1 10 +2 -0.75 0 1 2 1 10 +5 -9.38 0 1 -9.38 1 10 +25 0 9 1 0 1 10 +9 -4.5 0 1 9 1 10 +10 -5 0 1 10 1 10 +2 -4 0 1 -4 1 10 +2 -2.75 0 1 -2.75 1 10 +4 -6 0 1 4 1 10 +10 -12.5 0 1 -12.5 1 10 +12 -22.5 0 1 12 1 10 +4 -4 0 1 4 1 10 +2 -0.5 0 1 -0.5 1 10 +8 -2 0 1 -2 1 10 +4 -3 0 1 4 1 10 +6 -11.25 0 1 6 1 10 +8 -15 0 1 -15 1 10 +8 -11 0 1 8 1 10 +12 -4.5 0 1 -4.5 1 10 +19 0 8 1 19 1 10 +6 -6 0 1 -6 1 10 +5 -4.38 0 1 -4.38 1 10 +9 -9 0 1 9 1 10 +6 -1.5 0 1 -1.5 1 10 +9 -13.5 0 1 -13.5 1 10 +9 -5.63 0 1 -5.63 1 10 +8 -8 0 1 -8 1 10 +8 0 3 1 8 1 10 +8 -3 0 1 8 1 10 +9 -11.25 0 0 0 1 11 +8 -16 0 0 0 1 11 +9 -5.63 0 1 -5.63 1 11 +9 -4.5 0 1 9 1 11 +2 -2 0 1 2 1 11 +12 -19.5 0 0 0 1 11 +4 -4.5 0 0 0 1 11 +2 -3.75 0 0 0 1 11 +2 -2.25 0 0 0 1 11 +12 -4.5 0 1 -4.5 1 11 +9 -10.13 0 0 0 1 11 +12 -3 0 1 12 1 11 +10 -17.5 0 0 0 1 11 +5 -4.38 0 1 -4.38 1 11 +5 -7.5 0 0 0 1 11 +6 -11.25 0 0 0 1 11 +2 -1.5 0 1 -1.5 1 11 +9 -6.75 0 1 -6.75 1 11 +4 -7 0 0 0 1 11 +8 -7 0 1 8 1 11 +2 -1.75 0 1 2 1 11 +2 -1 0 1 2 1 11 +10 -6.25 0 1 -6.25 1 11 +6 -6.75 0 1 6 1 11 +9 -2.25 0 1 -2.25 1 11 +2 -0.75 0 1 2 1 11 +12 0 4 1 12 1 11 +6 -3 0 1 -3 1 11 +3 0 1 0 1 1 11 +2 -3 0 1 -3 1 11 +10 -13.75 0 0 0 1 11 +6 -2.25 0 1 6 1 11 +5 -1.88 0 1 -1.88 1 11 +12 -13.5 0 1 12 1 11 +22 0 10 1 22 1 11 +9 -12.38 0 0 0 1 11 +26 0 10 0 10 1 11 +10 -2.5 0 1 -2.5 1 11 +25 0 10 1 25 1 11 +9 -15.75 0 0 0 1 11 +7 0 3 0 3 1 11 +10 -10 0 1 10 1 11 +12 -15 0 0 0 1 11 +12 0 6 0 6 1 11 +6 -4.5 0 1 -4.5 1 11 +8 -13 0 0 0 1 11 +10 -16.25 0 0 0 1 11 +5 -1.25 0 1 5 1 11 +4 -4 0 1 4 1 11 +5 -3.75 0 1 5 1 11 +6 -8.25 0 0 0 1 11 +8 -15 0 0 0 1 11 +8 -8 0 0 0 1 11 +2 -2.75 0 0 0 1 11 +6 -12 0 0 0 1 11 +2 0 1 0 1 1 11 +2 -1.25 0 0 0 1 11 +9 -18 0 0 0 1 11 +6 -9 0 0 0 1 11 +10 -8.75 0 1 -8.75 1 11 +4 -7.5 0 0 0 1 11 +13 0 6 1 0 1 11 +10 -11.25 0 0 0 1 11 +4 -3 0 1 4 1 11 +10 -5 0 1 10 1 11 +8 -2 0 1 -2 1 11 +4 -2.5 0 1 4 1 11 +2 -3.5 0 0 0 1 11 +2 -2.5 0 0 0 1 11 +6 -3.75 0 1 6 1 11 +8 -3 0 1 8 1 11 +2 -3.25 0 0 0 1 11 +8 -9 0 0 0 1 11 +6 -6 0 1 -6 1 11 +8 -11 0 0 0 1 11 +5 -8.75 0 0 0 1 11 +6 -9.75 0 0 0 1 11 +12 -24 0 0 0 1 11 +4 -6.5 0 0 0 1 11 +5 -10 0 0 0 1 11 +30 0 12 1 0 1 11 +12 -18 0 0 0 1 11 +9 -9 0 0 0 1 11 +5 -5 0 1 -5 1 11 +5 -9.38 0 0 0 1 11 +10 -12.5 0 0 0 1 11 +10 -18.75 0 0 0 1 11 +5 -2.5 0 1 -2.5 1 11 +9 -14.63 0 0 0 1 11 +28 0 13 0 13 1 11 +5 -6.88 0 0 0 1 11 +4 -3.5 0 1 -3.5 1 11 +12 -16.5 0 0 0 1 11 +5 -8.13 0 0 0 1 11 +9 -16.88 0 0 0 1 11 +9 -3.38 0 1 -3.38 1 11 +12 0 5 0 5 1 11 +4 -8 0 0 0 1 11 +8 -12 0 0 0 1 11 +8 -4 0 1 -4 1 11 +2 -4 0 0 0 1 11 +12 -9 0 1 -9 1 11 +4 -1.5 0 1 4 1 11 +6 -10.5 0 0 0 1 11 +5 -3.13 0 1 5 1 11 +10 -15 0 0 0 1 11 +23 0 10 0 10 1 11 +12 -7.5 0 1 -7.5 1 11 +2 -0.5 0 1 -0.5 1 11 +4 0 2 0 2 1 11 +6 -1.5 0 1 -1.5 1 11 +4 -1 0 1 4 1 11 +10 -20 0 0 0 1 11 +12 -22.5 0 0 0 1 11 +25 0 9 1 0 1 11 +13 0 5 0 5 1 11 +6 -5.25 0 1 6 1 11 +9 -13.5 0 0 0 1 11 +5 0 2 1 0 1 11 +12 -6 0 1 -6 1 11 +5 -6.25 0 0 0 1 11 +10 -3.75 0 1 10 1 11 +9 -7.88 0 1 9 1 11 +8 -6 0 1 -6 1 11 +4 -5.5 0 0 0 1 11 +19 0 8 1 19 1 11 +10 -7.5 0 1 -7.5 1 11 +4 -6 0 0 0 1 11 +8 -14 0 0 0 1 11 +8 0 3 1 8 1 11 +12 -21 0 0 0 1 11 +4 -2 0 1 -2 1 11 +4 -5 0 0 0 1 11 +6 -7.5 0 0 0 1 11 +12 -12 0 0 0 1 11 +8 -5 0 1 -5 1 11 +26 0 12 0 12 1 11 +8 -10 0 0 0 1 11 +5 -5.63 0 0 0 1 11 +2 -1 0 1 2 1 12 +9 -6.75 0 1 -6.75 1 12 +2 -4 0 0 0 1 12 +2 -3.25 0 0 0 1 12 +4 -6.5 0 0 0 1 12 +5 -5.63 0 0 0 1 12 +8 -8 0 1 -8 1 12 +12 -18 0 1 12 1 12 +2 -2.5 0 0 0 1 12 +3 0 1 0 1 1 12 +12 -16.5 0 1 12 1 12 +10 -12.5 0 0 0 1 12 +5 -1.25 0 1 5 1 12 +19 0 8 0 8 1 12 +8 -9 0 0 0 1 12 +5 -10 0 0 0 1 12 +25 0 10 0 10 1 12 +7 0 3 1 0 1 12 +6 -11.25 0 0 0 1 12 +6 -1.5 0 1 -1.5 1 12 +4 -1.5 0 1 4 1 12 +10 -5 0 1 10 1 12 +10 -3.75 0 0 0 1 12 +6 -4.5 0 0 0 1 12 +12 -19.5 0 0 0 1 12 +5 -4.38 0 0 0 1 12 +8 -11 0 0 0 1 12 +2 -0.75 0 1 2 1 12 +2 -1.5 0 1 -1.5 1 12 +6 -6.75 0 1 6 1 12 +4 -6 0 1 4 1 12 +10 -16.25 0 1 -16.25 1 12 +12 -15 0 1 -15 1 12 +6 -5.25 0 1 6 1 12 +12 -21 0 0 0 1 12 +4 -3 0 0 0 1 12 +12 -22.5 0 1 12 1 12 +2 -3.75 0 0 0 1 12 +6 -12 0 0 0 1 12 +5 -8.13 0 1 5 1 12 +10 -8.75 0 1 -8.75 1 12 +12 -6 0 1 -6 1 12 +5 -5 0 1 -5 1 12 +22 0 10 0 10 1 12 +12 -13.5 0 1 12 1 12 +8 -7 0 1 8 1 12 +4 -3.5 0 0 0 1 12 +9 -12.38 0 0 0 1 12 +10 -7.5 0 1 -7.5 1 12 +26 0 10 1 26 1 12 +12 -4.5 0 1 -4.5 1 12 +8 -15 0 1 -15 1 12 +2 -1.75 0 1 2 1 12 +12 0 6 0 6 1 12 +9 -3.38 0 1 -3.38 1 12 +2 -3 0 0 0 1 12 +9 -5.63 0 1 -5.63 1 12 +2 -3.5 0 0 0 1 12 +8 -12 0 1 8 1 12 +10 -18.75 0 1 10 1 12 +4 0 2 0 2 1 12 +2 -2.25 0 0 0 1 12 +9 -2.25 0 1 -2.25 1 12 +10 -13.75 0 1 -13.75 1 12 +28 0 13 1 0 1 12 +4 -2.5 0 1 4 1 12 +9 -15.75 0 1 9 1 12 +10 -15 0 0 0 1 12 +10 -10 0 1 10 1 12 +9 -18 0 0 0 1 12 +12 -24 0 1 -24 1 12 +13 0 5 1 13 1 12 +5 -1.88 0 1 -1.88 1 12 +4 -4.5 0 0 0 1 12 +9 -7.88 0 1 9 1 12 +9 -9 0 0 0 1 12 +25 0 9 0 9 1 12 +12 -12 0 0 0 1 12 +6 -2.25 0 1 6 1 12 +8 -5 0 0 0 1 12 +4 -5.5 0 0 0 1 12 +2 -1.25 0 0 0 1 12 +9 -13.5 0 0 0 1 12 +9 -4.5 0 1 9 1 12 +10 -11.25 0 0 0 1 12 +6 -3 0 1 -3 1 12 +10 -2.5 0 1 -2.5 1 12 +12 0 4 1 12 1 12 +10 -20 0 0 0 1 12 +5 -3.75 0 0 0 1 12 +9 -10.13 0 0 0 1 12 +4 -7 0 0 0 1 12 +12 -10.5 0 1 -10.5 1 12 +8 -16 0 1 8 1 12 +4 -7.5 0 0 0 1 12 +8 0 3 1 8 1 12 +6 -10.5 0 0 0 1 12 +6 -9.75 0 0 0 1 12 +5 -8.75 0 0 0 1 12 +5 -2.5 0 1 -2.5 1 12 +13 0 6 0 6 1 12 +23 0 10 0 10 1 12 +8 -4 0 1 -4 1 12 +9 -11.25 0 0 0 1 12 +5 -6.88 0 0 0 1 12 +4 -4 0 0 0 1 12 +10 -17.5 0 0 0 1 12 +8 -13 0 0 0 1 12 +26 0 12 0 12 1 12 +6 -8.25 0 0 0 1 12 +9 -14.63 0 0 0 1 12 +8 -2 0 1 -2 1 12 +10 -6.25 0 1 -6.25 1 12 +8 -14 0 0 0 1 12 +12 0 5 1 0 1 12 +8 -10 0 0 0 1 12 +30 0 12 1 0 1 12 +5 -7.5 0 0 0 1 12 +5 0 2 1 0 1 12 +6 -3.75 0 1 6 1 12 +6 -6 0 0 0 1 12 +4 -2 0 1 -2 1 12 +12 -7.5 0 1 -7.5 1 12 +5 -6.25 0 1 5 1 12 +4 -5 0 0 0 1 12 +2 -2.75 0 0 0 1 12 +2 -2 0 0 0 1 12 +6 -9 0 0 0 1 12 +5 -3.13 0 1 5 1 12 +12 -9 0 0 0 1 12 +4 -8 0 0 0 1 12 +4 -1 0 1 4 1 12 +2 0 1 0 1 1 12 +9 -16.88 0 1 9 1 12 +8 -6 0 0 0 1 12 +2 -0.5 0 1 -0.5 1 12 +6 -7.5 0 0 0 1 12 +8 -3 0 1 8 1 12 +12 -3 0 1 12 1 12 +5 -9.38 0 0 0 1 12 +6 -9.75 0 0 0 1 13 +12 -13.5 0 1 12 1 13 +8 -7 0 1 8 1 13 +10 -7.5 0 1 -7.5 1 13 +2 -2.25 0 0 0 1 13 +6 -8.25 0 0 0 1 13 +10 -16.25 0 0 0 1 13 +3 0 1 1 3 1 13 +4 -3 0 1 4 1 13 +8 -2 0 1 -2 1 13 +4 -2.5 0 1 4 1 13 +5 -5.63 0 0 0 1 13 +5 0 2 1 0 1 13 +30 0 12 0 12 1 13 +9 -4.5 0 1 9 1 13 +4 -7.5 0 0 0 1 13 +26 0 10 0 10 1 13 +10 -6.25 0 1 -6.25 1 13 +2 -4 0 0 0 1 13 +4 -5 0 0 0 1 13 +5 -1.88 0 1 -1.88 1 13 +23 0 10 0 10 1 13 +8 -3 0 1 8 1 13 +8 -12 0 0 0 1 13 +10 -2.5 0 1 -2.5 1 13 +5 -8.13 0 0 0 1 13 +8 -9 0 0 0 1 13 +2 -3 0 0 0 1 13 +9 -11.25 0 0 0 1 13 +9 -12.38 0 0 0 1 13 +12 -15 0 0 0 1 13 +8 -10 0 0 0 1 13 +4 -1 0 1 4 1 13 +8 0 3 0 3 1 13 +4 -3.5 0 0 0 1 13 +8 -8 0 1 -8 1 13 +10 -11.25 0 0 0 1 13 +10 -5 0 1 10 1 13 +9 -13.5 0 0 0 1 13 +2 -0.75 0 1 2 1 13 +5 -4.38 0 1 -4.38 1 13 +2 -1.5 0 1 -1.5 1 13 +2 -3.75 0 0 0 1 13 +5 -3.75 0 1 5 1 13 +9 -16.88 0 0 0 1 13 +9 -3.38 0 1 -3.38 1 13 +5 -10 0 0 0 1 13 +26 0 12 0 12 1 13 +5 -9.38 0 0 0 1 13 +6 -1.5 0 1 -1.5 1 13 +10 -10 0 0 0 1 13 +2 -1.25 0 0 0 1 13 +9 -14.63 0 0 0 1 13 +6 -4.5 0 1 -4.5 1 13 +5 -5 0 1 -5 1 13 +5 -7.5 0 0 0 1 13 +8 -13 0 0 0 1 13 +5 -3.13 0 0 0 1 13 +8 -5 0 0 0 1 13 +8 -11 0 0 0 1 13 +6 -6.75 0 0 0 1 13 +5 -8.75 0 0 0 1 13 +2 0 1 0 1 1 13 +6 -6 0 0 0 1 13 +4 -5.5 0 0 0 1 13 +6 -3 0 1 -3 1 13 +12 -19.5 0 0 0 1 13 +10 -13.75 0 0 0 1 13 +10 -8.75 0 0 0 1 13 +5 -6.88 0 0 0 1 13 +6 -7.5 0 0 0 1 13 +10 -12.5 0 0 0 1 13 +9 -6.75 0 1 -6.75 1 13 +8 -4 0 1 -4 1 13 +2 -1 0 1 2 1 13 +12 -24 0 0 0 1 13 +12 -6 0 1 -6 1 13 +2 -2 0 0 0 1 13 +4 -7 0 0 0 1 13 +12 -9 0 1 -9 1 13 +6 -11.25 0 0 0 1 13 +25 0 10 0 10 1 13 +28 0 13 0 13 1 13 +2 -2.75 0 0 0 1 13 +12 -10.5 0 0 0 1 13 +8 -14 0 0 0 1 13 +4 -6.5 0 0 0 1 13 +4 0 2 0 2 1 13 +10 -15 0 0 0 1 13 +12 0 5 0 5 1 13 +10 -18.75 0 0 0 1 13 +12 -3 0 1 12 1 13 +4 -4 0 0 0 1 13 +9 -7.88 0 0 0 1 13 +9 -2.25 0 0 0 1 13 +2 -1.75 0 0 0 1 13 +12 0 6 0 6 1 13 +5 -2.5 0 0 0 1 13 +4 -4.5 0 0 0 1 13 +8 -6 0 0 0 1 13 +12 -18 0 0 0 1 13 +12 -16.5 0 0 0 1 13 +22 0 10 0 10 1 13 +12 -21 0 0 0 1 13 +12 -4.5 0 0 0 1 13 +12 -12 0 0 0 1 13 +19 0 8 0 8 1 13 +2 -2.5 0 0 0 1 13 +12 0 4 0 4 1 13 +4 -2 0 0 0 1 13 +9 -9 0 0 0 1 13 +9 -10.13 0 0 0 1 13 +6 -2.25 0 1 6 1 13 +2 -0.5 0 1 -0.5 1 13 +10 -3.75 0 1 10 1 13 +13 0 5 0 5 1 13 +4 -1.5 0 1 4 1 13 +5 -1.25 0 1 5 1 13 +6 -9 0 0 0 1 13 +10 -17.5 0 0 0 1 13 +6 -12 0 0 0 1 13 +6 -5.25 0 0 0 1 13 +12 -22.5 0 0 0 1 13 +8 -16 0 0 0 1 13 +9 -15.75 0 0 0 1 13 +10 -20 0 0 0 1 13 +13 0 6 0 6 1 13 +4 -8 0 0 0 1 13 +12 -7.5 0 0 0 1 13 +9 -18 0 0 0 1 13 +2 -3.25 0 0 0 1 13 +7 0 3 0 3 1 13 +6 -3.75 0 1 6 1 13 +5 -6.25 0 0 0 1 13 +8 -15 0 0 0 1 13 +25 0 9 0 9 1 13 +2 -3.5 0 0 0 1 13 +6 -10.5 0 0 0 1 13 +9 -11.25 0 0 0 1 14 +8 -16 0 0 0 1 14 +9 -5.63 0 1 -5.63 1 14 +9 -4.5 0 1 9 1 14 +2 -2 0 1 2 1 14 +12 -19.5 0 0 0 1 14 +4 -4.5 0 0 0 1 14 +2 -3.75 0 0 0 1 14 +2 -2.25 0 0 0 1 14 +12 -4.5 0 1 -4.5 1 14 +9 -10.13 0 0 0 1 14 +12 -3 0 1 12 1 14 +10 -17.5 0 0 0 1 14 +5 -4.38 0 1 -4.38 1 14 +5 -7.5 0 1 5 1 14 +6 -11.25 0 0 0 1 14 +2 -1.5 0 1 -1.5 1 14 +9 -6.75 0 1 -6.75 1 14 +4 -7 0 0 0 1 14 +8 -7 0 1 8 1 14 +2 -1.75 0 1 2 1 14 +2 -1 0 1 2 1 14 +10 -6.25 0 1 -6.25 1 14 +6 -6.75 0 1 6 1 14 +9 -2.25 0 1 -2.25 1 14 +2 -0.75 0 1 2 1 14 +12 0 4 1 12 1 14 +6 -3 0 1 -3 1 14 +3 0 1 1 3 1 14 +2 -3 0 0 0 1 14 +10 -13.75 0 1 -13.75 1 14 +6 -2.25 0 1 6 1 14 +5 -1.88 0 1 -1.88 1 14 +12 -13.5 0 1 12 1 14 +22 0 10 0 10 1 14 +9 -12.38 0 1 9 1 14 +26 0 10 1 26 1 14 +12 -10.5 0 1 -10.5 1 14 +10 -2.5 0 1 -2.5 1 14 +25 0 10 0 10 1 14 +9 -15.75 0 1 9 1 14 +7 0 3 1 0 1 14 +10 -10 0 1 10 1 14 +12 -15 0 0 0 1 14 +12 0 6 1 0 1 14 +6 -4.5 0 1 -4.5 1 14 +8 -13 0 0 0 1 14 +10 -16.25 0 0 0 1 14 +5 -1.25 0 1 5 1 14 +4 -4 0 1 4 1 14 +5 -3.75 0 1 5 1 14 +6 -8.25 0 1 -8.25 1 14 +8 -15 0 1 -15 1 14 +8 -8 0 1 -8 1 14 +2 -2.75 0 0 0 1 14 +6 -12 0 1 -12 1 14 +2 0 1 1 2 1 14 +2 -1.25 0 1 -1.25 1 14 +9 -18 0 1 9 1 14 +6 -9 0 0 0 1 14 +10 -8.75 0 1 -8.75 1 14 +4 -7.5 0 0 0 1 14 +13 0 6 1 0 1 14 +10 -11.25 0 1 10 1 14 +4 -3 0 0 0 1 14 +10 -5 0 1 10 1 14 +8 -2 0 1 -2 1 14 +4 -2.5 0 0 0 1 14 +2 -3.5 0 0 0 1 14 +2 -2.5 0 1 2 1 14 +6 -3.75 0 1 6 1 14 +8 -3 0 1 8 1 14 +2 -3.25 0 0 0 1 14 +8 -9 0 1 -9 1 14 +6 -6 0 1 -6 1 14 +8 -11 0 1 8 1 14 +5 -8.75 0 0 0 1 14 +6 -9.75 0 0 0 1 14 +12 -24 0 1 -24 1 14 +4 -6.5 0 1 -6.5 1 14 +5 -10 0 1 5 1 14 +30 0 12 1 0 1 14 +12 -18 0 1 12 1 14 +9 -9 0 0 0 1 14 +5 -5 0 1 -5 1 14 +5 -9.38 0 1 -9.38 1 14 +10 -12.5 0 1 -12.5 1 14 +10 -18.75 0 0 0 1 14 +5 -2.5 0 1 -2.5 1 14 +9 -14.63 0 1 9 1 14 +28 0 13 1 0 1 14 +5 -6.88 0 1 -6.88 1 14 +4 -3.5 0 1 -3.5 1 14 +12 -16.5 0 1 12 1 14 +5 -8.13 0 0 0 1 14 +9 -16.88 0 0 0 1 14 +9 -3.38 0 1 -3.38 1 14 +12 0 5 1 0 1 14 +4 -8 0 0 0 1 14 +8 -12 0 1 8 1 14 +8 -4 0 1 -4 1 14 +2 -4 0 0 0 1 14 +12 -9 0 1 -9 1 14 +4 -1.5 0 1 4 1 14 +6 -10.5 0 0 0 1 14 +5 -3.13 0 1 5 1 14 +10 -15 0 1 10 1 14 +23 0 10 0 10 1 14 +12 -7.5 0 1 -7.5 1 14 +2 -0.5 0 1 -0.5 1 14 +4 0 2 0 2 1 14 +6 -1.5 0 1 -1.5 1 14 +4 -1 0 1 4 1 14 +10 -20 0 0 0 1 14 +12 -22.5 0 1 12 1 14 +25 0 9 0 9 1 14 +13 0 5 1 13 1 14 +6 -5.25 0 1 6 1 14 +9 -13.5 0 0 0 1 14 +5 0 2 0 2 1 14 +12 -6 0 1 -6 1 14 +5 -6.25 0 1 5 1 14 +10 -3.75 0 1 10 1 14 +9 -7.88 0 0 0 1 14 +8 -6 0 1 -6 1 14 +4 -5.5 0 0 0 1 14 +19 0 8 0 8 1 14 +10 -7.5 0 1 -7.5 1 14 +4 -6 0 0 0 1 14 +8 -14 0 0 0 1 14 +8 0 3 0 3 1 14 +12 -21 0 0 0 1 14 +4 -2 0 1 -2 1 14 +4 -5 0 1 4 1 14 +6 -7.5 0 1 -7.5 1 14 +12 -12 0 1 -12 1 14 +8 -5 0 1 -5 1 14 +26 0 12 0 12 1 14 +8 -10 0 1 -10 1 14 +5 -5.63 0 1 -5.63 1 14 +2 -1 0 1 2 1 15 +9 -6.75 0 1 -6.75 1 15 +2 -4 0 0 0 1 15 +2 -3.25 0 1 2 1 15 +4 -6.5 0 1 -6.5 1 15 +5 -5.63 0 1 -5.63 1 15 +8 -8 0 1 -8 1 15 +12 -18 0 1 12 1 15 +2 -2.5 0 1 2 1 15 +3 0 1 0 1 1 15 +12 -16.5 0 1 12 1 15 +10 -12.5 0 1 -12.5 1 15 +5 -1.25 0 1 5 1 15 +19 0 8 0 8 1 15 +8 -9 0 1 -9 1 15 +5 -10 0 1 5 1 15 +25 0 10 1 25 1 15 +7 0 3 0 3 1 15 +6 -11.25 0 0 0 1 15 +6 -1.5 0 1 -1.5 1 15 +4 -1.5 0 1 4 1 15 +10 -5 0 1 10 1 15 +10 -3.75 0 1 10 1 15 +6 -4.5 0 1 -4.5 1 15 +12 -19.5 0 1 12 1 15 +5 -4.38 0 1 -4.38 1 15 +8 -11 0 0 0 1 15 +2 -0.75 0 1 2 1 15 +2 -1.5 0 1 -1.5 1 15 +6 -6.75 0 1 6 1 15 +4 -6 0 1 4 1 15 +10 -16.25 0 1 -16.25 1 15 +12 -15 0 1 -15 1 15 +6 -5.25 0 1 6 1 15 +12 -21 0 1 12 1 15 +4 -3 0 1 4 1 15 +12 -22.5 0 0 0 1 15 +2 -3.75 0 1 -3.75 1 15 +6 -12 0 0 0 1 15 +5 -8.13 0 1 5 1 15 +10 -8.75 0 1 -8.75 1 15 +12 -6 0 1 -6 1 15 +5 -5 0 1 -5 1 15 +22 0 10 1 22 1 15 +12 -13.5 0 1 12 1 15 +8 -7 0 0 0 1 15 +4 -3.5 0 1 -3.5 1 15 +9 -12.38 0 1 9 1 15 +10 -7.5 0 1 -7.5 1 15 +26 0 10 0 10 1 15 +12 -4.5 0 1 -4.5 1 15 +8 -15 0 1 -15 1 15 +2 -1.75 0 1 2 1 15 +12 0 6 1 0 1 15 +9 -3.38 0 1 -3.38 1 15 +2 -3 0 1 -3 1 15 +9 -5.63 0 1 -5.63 1 15 +2 -3.5 0 1 -3.5 1 15 +8 -12 0 1 8 1 15 +10 -18.75 0 1 10 1 15 +4 0 2 1 4 1 15 +2 -2.25 0 1 2 1 15 +9 -2.25 0 1 -2.25 1 15 +10 -13.75 0 1 -13.75 1 15 +28 0 13 1 0 1 15 +4 -2.5 0 1 4 1 15 +9 -15.75 0 0 0 1 15 +10 -15 0 0 0 1 15 +10 -10 0 1 10 1 15 +9 -18 0 0 0 1 15 +12 -24 0 0 0 1 15 +13 0 5 0 5 1 15 +5 -1.88 0 1 -1.88 1 15 +4 -4.5 0 1 4 1 15 +9 -7.88 0 1 9 1 15 +9 -9 0 1 9 1 15 +25 0 9 1 0 1 15 +12 -12 0 1 -12 1 15 +6 -2.25 0 1 6 1 15 +8 -5 0 1 -5 1 15 +4 -5.5 0 1 -5.5 1 15 +2 -1.25 0 1 -1.25 1 15 +9 -13.5 0 1 -13.5 1 15 +9 -4.5 0 1 9 1 15 +10 -11.25 0 1 10 1 15 +6 -3 0 1 -3 1 15 +10 -2.5 0 1 -2.5 1 15 +12 0 4 1 12 1 15 +10 -20 0 0 0 1 15 +5 -3.75 0 1 5 1 15 +9 -10.13 0 1 -10.13 1 15 +4 -7 0 1 -7 1 15 +12 -10.5 0 1 -10.5 1 15 +8 -16 0 1 8 1 15 +4 -7.5 0 1 4 1 15 +8 0 3 0 3 1 15 +6 -10.5 0 0 0 1 15 +6 -9.75 0 1 -9.75 1 15 +5 -8.75 0 1 5 1 15 +5 -2.5 0 1 -2.5 1 15 +13 0 6 0 6 1 15 +23 0 10 1 0 1 15 +8 -4 0 1 -4 1 15 +9 -11.25 0 1 9 1 15 +5 -6.88 0 1 -6.88 1 15 +4 -4 0 1 4 1 15 +10 -17.5 0 0 0 1 15 +8 -13 0 1 8 1 15 +26 0 12 0 12 1 15 +6 -8.25 0 0 0 1 15 +9 -14.63 0 0 0 1 15 +8 -2 0 1 -2 1 15 +10 -6.25 0 1 -6.25 1 15 +8 -14 0 1 8 1 15 +12 0 5 1 0 1 15 +8 -10 0 1 -10 1 15 +30 0 12 1 0 1 15 +5 -7.5 0 1 5 1 15 +5 0 2 1 0 1 15 +6 -3.75 0 1 6 1 15 +6 -6 0 1 -6 1 15 +4 -2 0 1 -2 1 15 +12 -7.5 0 1 -7.5 1 15 +5 -6.25 0 1 5 1 15 +4 -5 0 1 4 1 15 +2 -2.75 0 1 -2.75 1 15 +2 -2 0 1 2 1 15 +6 -9 0 1 -9 1 15 +5 -3.13 0 1 5 1 15 +12 -9 0 1 -9 1 15 +4 -8 0 1 -8 1 15 +4 -1 0 1 4 1 15 +2 0 1 0 1 1 15 +9 -16.88 0 1 9 1 15 +8 -6 0 1 -6 1 15 +2 -0.5 0 1 -0.5 1 15 +6 -7.5 0 1 -7.5 1 15 +8 -3 0 1 8 1 15 +12 -3 0 1 12 1 15 +5 -9.38 0 0 0 1 15 +6 -9.75 0 0 0 1 16 +12 -13.5 0 1 12 1 16 +8 -7 0 0 0 1 16 +10 -7.5 0 1 -7.5 1 16 +2 -2.25 0 1 2 1 16 +6 -8.25 0 0 0 1 16 +10 -16.25 0 1 -16.25 1 16 +3 0 1 1 3 1 16 +4 -3 0 1 4 1 16 +8 -2 0 1 -2 1 16 +4 -2.5 0 1 4 1 16 +5 -5.63 0 0 0 1 16 +5 0 2 0 2 1 16 +30 0 12 1 0 1 16 +9 -4.5 0 1 9 1 16 +4 -7.5 0 0 0 1 16 +26 0 10 1 26 1 16 +10 -6.25 0 1 -6.25 1 16 +2 -4 0 1 -4 1 16 +4 -5 0 0 0 1 16 +5 -1.88 0 1 -1.88 1 16 +23 0 10 1 0 1 16 +8 -3 0 1 8 1 16 +8 -12 0 1 8 1 16 +10 -2.5 0 0 0 1 16 +5 -8.13 0 0 0 1 16 +8 -9 0 1 -9 1 16 +2 -3 0 1 -3 1 16 +9 -11.25 0 1 9 1 16 +9 -12.38 0 0 0 1 16 +12 -15 0 1 -15 1 16 +8 -10 0 0 0 1 16 +4 -1 0 1 4 1 16 +8 0 3 1 8 1 16 +4 -3.5 0 0 0 1 16 +8 -8 0 0 0 1 16 +10 -11.25 0 0 0 1 16 +10 -5 0 1 10 1 16 +9 -13.5 0 0 0 1 16 +2 -0.75 0 1 2 1 16 +5 -4.38 0 1 -4.38 1 16 +2 -1.5 0 1 -1.5 1 16 +2 -3.75 0 1 -3.75 1 16 +5 -3.75 0 1 5 1 16 +9 -16.88 0 0 0 1 16 +9 -3.38 0 1 -3.38 1 16 +5 -10 0 0 0 1 16 +26 0 12 1 0 1 16 +5 -9.38 0 0 0 1 16 +6 -1.5 0 1 -1.5 1 16 +10 -10 0 1 10 1 16 +2 -1.25 0 1 -1.25 1 16 +9 -14.63 0 0 0 1 16 +6 -4.5 0 1 -4.5 1 16 +5 -5 0 1 -5 1 16 +5 -7.5 0 0 0 1 16 +8 -13 0 0 0 1 16 +5 -3.13 0 1 5 1 16 +8 -5 0 1 -5 1 16 +8 -11 0 1 8 1 16 +6 -6.75 0 1 6 1 16 +5 -8.75 0 1 5 1 16 +2 0 1 1 2 1 16 +9 -5.63 0 1 -5.63 1 16 +6 -6 0 1 -6 1 16 +4 -5.5 0 0 0 1 16 +6 -3 0 1 -3 1 16 +12 -19.5 0 1 12 1 16 +10 -13.75 0 0 0 1 16 +10 -8.75 0 1 -8.75 1 16 +5 -6.88 0 1 -6.88 1 16 +6 -7.5 0 0 0 1 16 +10 -12.5 0 1 -12.5 1 16 +9 -6.75 0 1 -6.75 1 16 +4 -6 0 1 4 1 16 +8 -4 0 1 -4 1 16 +2 -1 0 1 2 1 16 +12 -24 0 0 0 1 16 +12 -6 0 1 -6 1 16 +2 -2 0 1 2 1 16 +4 -7 0 0 0 1 16 +12 -9 0 1 -9 1 16 +6 -11.25 0 0 0 1 16 +25 0 10 1 25 1 16 +28 0 13 1 0 1 16 +2 -2.75 0 1 -2.75 1 16 +12 -10.5 0 0 0 1 16 +8 -14 0 0 0 1 16 +4 -6.5 0 0 0 1 16 +4 0 2 1 4 1 16 +10 -15 0 0 0 1 16 +12 0 5 1 0 1 16 +10 -18.75 0 0 0 1 16 +12 -3 0 1 12 1 16 +4 -4 0 1 4 1 16 +9 -7.88 0 1 9 1 16 +9 -2.25 0 1 -2.25 1 16 +2 -1.75 0 1 2 1 16 +12 0 6 1 0 1 16 +5 -2.5 0 1 -2.5 1 16 +4 -4.5 0 1 4 1 16 +8 -6 0 1 -6 1 16 +12 -18 0 0 0 1 16 +12 -16.5 0 1 12 1 16 +22 0 10 0 10 1 16 +12 -21 0 0 0 1 16 +12 -4.5 0 1 -4.5 1 16 +12 -12 0 1 -12 1 16 +19 0 8 1 19 1 16 +2 -2.5 0 1 2 1 16 +12 0 4 1 12 1 16 +4 -2 0 1 -2 1 16 +9 -9 0 0 0 1 16 +9 -10.13 0 0 0 1 16 +6 -2.25 0 1 6 1 16 +2 -0.5 0 1 -0.5 1 16 +10 -3.75 0 1 10 1 16 +13 0 5 1 13 1 16 +4 -1.5 0 1 4 1 16 +5 -1.25 0 1 5 1 16 +6 -9 0 1 -9 1 16 +10 -17.5 0 0 0 1 16 +6 -12 0 0 0 1 16 +6 -5.25 0 1 6 1 16 +12 -22.5 0 1 12 1 16 +8 -16 0 0 0 1 16 +9 -15.75 0 1 9 1 16 +10 -20 0 1 10 1 16 +13 0 6 1 0 1 16 +4 -8 0 0 0 1 16 +12 -7.5 0 1 -7.5 1 16 +9 -18 0 0 0 1 16 +2 -3.25 0 1 2 1 16 +7 0 3 1 0 1 16 +6 -3.75 0 1 6 1 16 +5 -6.25 0 1 5 1 16 +8 -15 0 0 0 1 16 +25 0 9 1 0 1 16 +2 -3.5 0 1 -3.5 1 16 +6 -10.5 0 1 -10.5 1 16 +9 -10.13 0 0 0 1 17 +12 -10.5 0 0 0 1 17 +25 0 10 0 10 1 17 +4 -7 0 0 0 1 17 +9 -7.88 0 0 0 1 17 +5 -3.13 0 0 0 1 17 +5 -8.13 0 0 0 1 17 +8 -7 0 0 0 1 17 +12 -6 0 0 0 1 17 +12 -24 0 0 0 1 17 +12 -21 0 0 0 1 17 +4 -2.5 0 0 0 1 17 +6 -9 0 0 0 1 17 +10 -15 0 0 0 1 17 +8 -6 0 0 0 1 17 +13 0 6 1 0 1 17 +6 -12 0 0 0 1 17 +6 -4.5 0 0 0 1 17 +9 -16.88 0 0 0 1 17 +10 -18.75 0 0 0 1 17 +9 -3.38 0 1 -3.38 1 17 +6 -9.75 0 0 0 1 17 +2 -1.75 0 0 0 1 17 +5 0 2 1 0 1 17 +8 -5 0 0 0 1 17 +8 -9 0 0 0 1 17 +12 0 6 1 0 1 17 +12 0 4 1 12 1 17 +2 -2.5 0 0 0 1 17 +6 -3 0 0 0 1 17 +10 -7.5 0 0 0 1 17 +5 -2.5 0 0 0 1 17 +5 -3.75 0 0 0 1 17 +10 -3.75 0 0 0 1 17 +2 -3 0 0 0 1 17 +10 -6.25 0 0 0 1 17 +4 -7.5 0 0 0 1 17 +8 -16 0 0 0 1 17 +5 -6.25 0 0 0 1 17 +4 0 2 1 4 1 17 +10 -11.25 0 0 0 1 17 +5 -6.88 0 0 0 1 17 +5 -7.5 0 0 0 1 17 +26 0 12 1 0 1 17 +8 -13 0 0 0 1 17 +4 -4.5 0 0 0 1 17 +8 -10 0 0 0 1 17 +6 -3.75 0 0 0 1 17 +5 -5.63 0 0 0 1 17 +9 -18 0 0 0 1 17 +12 -13.5 0 0 0 1 17 +7 0 3 1 0 1 17 +8 -14 0 0 0 1 17 +2 -1.5 0 0 0 1 17 +10 -2.5 0 1 -2.5 1 17 +13 0 5 1 13 1 17 +9 -15.75 0 0 0 1 17 +8 -12 0 0 0 1 17 +28 0 13 1 0 1 17 +6 -7.5 0 0 0 1 17 +10 -16.25 0 0 0 1 17 +12 -7.5 0 0 0 1 17 +5 -5 0 0 0 1 17 +2 -2 0 0 0 1 17 +22 0 10 1 22 1 17 +2 -1 0 0 0 1 17 +3 0 1 1 3 1 17 +4 -5.5 0 0 0 1 17 +2 -2.25 0 0 0 1 17 +6 -2.25 0 0 0 1 17 +4 -6.5 0 0 0 1 17 +9 -12.38 0 0 0 1 17 +10 -13.75 0 0 0 1 17 +10 -17.5 0 0 0 1 17 +4 -5 0 0 0 1 17 +9 -11.25 0 0 0 1 17 +10 -10 0 0 0 1 17 +2 -3.25 0 0 0 1 17 +5 -8.75 0 0 0 1 17 +5 -10 0 0 0 1 17 +9 -2.25 0 1 -2.25 1 17 +6 -6.75 0 0 0 1 17 +12 -16.5 0 0 0 1 17 +9 -14.63 0 0 0 1 17 +4 -8 0 0 0 1 17 +6 -5.25 0 0 0 1 17 +9 -6.75 0 0 0 1 17 +12 -12 0 0 0 1 17 +4 -1 0 1 4 1 17 +12 -15 0 0 0 1 17 +4 -3.5 0 0 0 1 17 +2 -1.25 0 0 0 1 17 +30 0 12 1 0 1 17 +12 -19.5 0 0 0 1 17 +12 -3 0 0 0 1 17 +5 -1.25 0 0 0 1 17 +5 -1.88 0 0 0 1 17 +2 -3.5 0 0 0 1 17 +12 -9 0 0 0 1 17 +10 -20 0 0 0 1 17 +8 -4 0 0 0 1 17 +12 0 5 1 0 1 17 +2 0 1 1 2 1 17 +4 -1.5 0 0 0 1 17 +2 -3.75 0 0 0 1 17 +6 -10.5 0 0 0 1 17 +4 -2 0 0 0 1 17 +23 0 10 1 0 1 17 +12 -18 0 0 0 1 17 +6 -8.25 0 0 0 1 17 +26 0 10 1 26 1 17 +10 -8.75 0 0 0 1 17 +2 -0.75 0 0 0 1 17 +5 -9.38 0 0 0 1 17 +25 0 9 1 0 1 17 +9 -4.5 0 0 0 1 17 +10 -5 0 0 0 1 17 +2 -4 0 0 0 1 17 +2 -2.75 0 0 0 1 17 +4 -6 0 0 0 1 17 +10 -12.5 0 0 0 1 17 +12 -22.5 0 0 0 1 17 +4 -4 0 0 0 1 17 +2 -0.5 0 0 0 1 17 +8 -2 0 1 -2 1 17 +4 -3 0 0 0 1 17 +6 -11.25 0 0 0 1 17 +8 -15 0 0 0 1 17 +8 -11 0 0 0 1 17 +12 -4.5 0 0 0 1 17 +19 0 8 1 19 1 17 +6 -6 0 0 0 1 17 +5 -4.38 0 0 0 1 17 +9 -9 0 0 0 1 17 +6 -1.5 0 0 0 1 17 +9 -13.5 0 0 0 1 17 +9 -5.63 0 0 0 1 17 +8 -8 0 0 0 1 17 +8 0 3 1 8 1 17 +8 -3 0 0 0 1 17 +9 -11.25 0 1 9 1 18 +8 -16 0 0 0 1 18 +9 -5.63 0 1 -5.63 1 18 +9 -4.5 0 1 9 1 18 +2 -2 0 1 2 1 18 +12 -19.5 0 0 0 1 18 +4 -4.5 0 1 4 1 18 +2 -3.75 0 0 0 1 18 +2 -2.25 0 0 0 1 18 +12 -4.5 0 1 -4.5 1 18 +9 -10.13 0 1 -10.13 1 18 +12 -3 0 1 12 1 18 +10 -17.5 0 0 0 1 18 +5 -4.38 0 1 -4.38 1 18 +5 -7.5 0 0 0 1 18 +6 -11.25 0 0 0 1 18 +2 -1.5 0 1 -1.5 1 18 +9 -6.75 0 1 -6.75 1 18 +4 -7 0 1 -7 1 18 +8 -7 0 1 8 1 18 +2 -1.75 0 1 2 1 18 +2 -1 0 1 2 1 18 +10 -6.25 0 1 -6.25 1 18 +6 -6.75 0 1 6 1 18 +9 -2.25 0 1 -2.25 1 18 +2 -0.75 0 1 2 1 18 +12 0 4 0 4 1 18 +6 -3 0 1 -3 1 18 +3 0 1 1 3 1 18 +2 -3 0 0 0 1 18 +10 -13.75 0 0 0 1 18 +6 -2.25 0 1 6 1 18 +5 -1.88 0 1 -1.88 1 18 +12 -13.5 0 1 12 1 18 +22 0 10 0 10 1 18 +9 -12.38 0 0 0 1 18 +26 0 10 0 10 1 18 +12 -10.5 0 1 -10.5 1 18 +10 -2.5 0 1 -2.5 1 18 +25 0 10 0 10 1 18 +9 -15.75 0 0 0 1 18 +7 0 3 1 0 1 18 +10 -10 0 0 0 1 18 +12 -15 0 0 0 1 18 +12 0 6 0 6 1 18 +6 -4.5 0 1 -4.5 1 18 +8 -13 0 0 0 1 18 +10 -16.25 0 0 0 1 18 +5 -1.25 0 1 5 1 18 +4 -4 0 1 4 1 18 +5 -3.75 0 1 5 1 18 +6 -8.25 0 0 0 1 18 +8 -15 0 0 0 1 18 +8 -8 0 1 -8 1 18 +2 -2.75 0 1 -2.75 1 18 +6 -12 0 0 0 1 18 +2 0 1 1 2 1 18 +2 -1.25 0 1 -1.25 1 18 +9 -18 0 0 0 1 18 +6 -9 0 0 0 1 18 +10 -8.75 0 1 -8.75 1 18 +4 -7.5 0 0 0 1 18 +13 0 6 0 6 1 18 +10 -11.25 0 1 10 1 18 +4 -3 0 1 4 1 18 +10 -5 0 1 10 1 18 +8 -2 0 1 -2 1 18 +4 -2.5 0 1 4 1 18 +2 -3.5 0 0 0 1 18 +2 -2.5 0 0 0 1 18 +6 -3.75 0 1 6 1 18 +8 -3 0 1 8 1 18 +2 -3.25 0 0 0 1 18 +8 -9 0 0 0 1 18 +6 -6 0 0 0 1 18 +8 -11 0 0 0 1 18 +5 -8.75 0 0 0 1 18 +6 -9.75 0 0 0 1 18 +12 -24 0 0 0 1 18 +4 -6.5 0 0 0 1 18 +5 -10 0 0 0 1 18 +30 0 12 1 0 1 18 +12 -18 0 0 0 1 18 +9 -9 0 0 0 1 18 +5 -5 0 1 -5 1 18 +5 -9.38 0 0 0 1 18 +10 -12.5 0 0 0 1 18 +10 -18.75 0 0 0 1 18 +5 -2.5 0 1 -2.5 1 18 +9 -14.63 0 0 0 1 18 +28 0 13 0 13 1 18 +5 -6.88 0 0 0 1 18 +4 -3.5 0 1 -3.5 1 18 +12 -16.5 0 0 0 1 18 +5 -8.13 0 0 0 1 18 +9 -16.88 0 0 0 1 18 +9 -3.38 0 1 -3.38 1 18 +12 0 5 1 0 1 18 +4 -8 0 0 0 1 18 +8 -12 0 0 0 1 18 +8 -4 0 1 -4 1 18 +2 -4 0 1 -4 1 18 +12 -9 0 1 -9 1 18 +4 -1.5 0 1 4 1 18 +6 -10.5 0 0 0 1 18 +5 -3.13 0 1 5 1 18 +10 -15 0 0 0 1 18 +23 0 10 0 10 1 18 +12 -7.5 0 1 -7.5 1 18 +2 -0.5 0 1 -0.5 1 18 +4 0 2 1 4 1 18 +6 -1.5 0 1 -1.5 1 18 +4 -1 0 1 4 1 18 +10 -20 0 0 0 1 18 +12 -22.5 0 0 0 1 18 +25 0 9 1 0 1 18 +13 0 5 1 13 1 18 +6 -5.25 0 1 6 1 18 +9 -13.5 0 0 0 1 18 +5 0 2 1 0 1 18 +12 -6 0 1 -6 1 18 +5 -6.25 0 0 0 1 18 +10 -3.75 0 1 10 1 18 +9 -7.88 0 1 9 1 18 +8 -6 0 1 -6 1 18 +4 -5.5 0 0 0 1 18 +19 0 8 0 8 1 18 +10 -7.5 0 1 -7.5 1 18 +4 -6 0 0 0 1 18 +8 -14 0 0 0 1 18 +8 0 3 1 8 1 18 +12 -21 0 0 0 1 18 +4 -2 0 1 -2 1 18 +4 -5 0 0 0 1 18 +6 -7.5 0 0 0 1 18 +12 -12 0 0 0 1 18 +8 -5 0 1 -5 1 18 +26 0 12 0 12 1 18 +8 -10 0 0 0 1 18 +5 -5.63 0 0 0 1 18 +2 -1 0 1 2 1 19 +9 -6.75 0 1 -6.75 1 19 +2 -4 0 0 0 1 19 +2 -3.25 0 0 0 1 19 +4 -6.5 0 1 -6.5 1 19 +5 -5.63 0 1 -5.63 1 19 +8 -8 0 1 -8 1 19 +12 -18 0 1 12 1 19 +2 -2.5 0 0 0 1 19 +3 0 1 1 3 1 19 +12 -16.5 0 1 12 1 19 +10 -12.5 0 0 0 1 19 +5 -1.25 0 1 5 1 19 +19 0 8 1 19 1 19 +8 -9 0 0 0 1 19 +5 -10 0 0 0 1 19 +25 0 10 1 25 1 19 +7 0 3 1 0 1 19 +6 -11.25 0 0 0 1 19 +6 -1.5 0 1 -1.5 1 19 +4 -1.5 0 1 4 1 19 +10 -5 0 1 10 1 19 +10 -3.75 0 1 10 1 19 +6 -4.5 0 1 -4.5 1 19 +12 -19.5 0 0 0 1 19 +5 -4.38 0 1 -4.38 1 19 +8 -11 0 0 0 1 19 +2 -0.75 0 1 2 1 19 +2 -1.5 0 1 -1.5 1 19 +6 -6.75 0 0 0 1 19 +4 -6 0 0 0 1 19 +10 -16.25 0 1 -16.25 1 19 +12 -15 0 1 -15 1 19 +6 -5.25 0 1 6 1 19 +12 -21 0 0 0 1 19 +4 -3 0 1 4 1 19 +12 -22.5 0 1 12 1 19 +2 -3.75 0 0 0 1 19 +6 -12 0 0 0 1 19 +5 -8.13 0 0 0 1 19 +10 -8.75 0 1 -8.75 1 19 +12 -6 0 1 -6 1 19 +5 -5 0 1 -5 1 19 +22 0 10 1 22 1 19 +12 -13.5 0 1 12 1 19 +8 -7 0 1 8 1 19 +4 -3.5 0 1 -3.5 1 19 +9 -12.38 0 0 0 1 19 +10 -7.5 0 1 -7.5 1 19 +26 0 10 1 26 1 19 +12 -4.5 0 1 -4.5 1 19 +8 -15 0 0 0 1 19 +2 -1.75 0 1 2 1 19 +12 0 6 1 0 1 19 +9 -3.38 0 1 -3.38 1 19 +2 -3 0 0 0 1 19 +9 -5.63 0 1 -5.63 1 19 +2 -3.5 0 0 0 1 19 +8 -12 0 0 0 1 19 +10 -18.75 0 0 0 1 19 +4 0 2 1 4 1 19 +2 -2.25 0 0 0 1 19 +9 -2.25 0 1 -2.25 1 19 +10 -13.75 0 0 0 1 19 +28 0 13 1 0 1 19 +4 -2.5 0 1 4 1 19 +9 -15.75 0 0 0 1 19 +10 -15 0 0 0 1 19 +10 -10 0 1 10 1 19 +9 -18 0 0 0 1 19 +12 -24 0 0 0 1 19 +13 0 5 1 13 1 19 +5 -1.88 0 1 -1.88 1 19 +4 -4.5 0 0 0 1 19 +9 -7.88 0 0 0 1 19 +9 -9 0 0 0 1 19 +25 0 9 1 0 1 19 +12 -12 0 0 0 1 19 +6 -2.25 0 1 6 1 19 +8 -5 0 1 -5 1 19 +4 -5.5 0 0 0 1 19 +2 -1.25 0 0 0 1 19 +9 -13.5 0 0 0 1 19 +9 -4.5 0 1 9 1 19 +10 -11.25 0 0 0 1 19 +6 -3 0 1 -3 1 19 +10 -2.5 0 1 -2.5 1 19 +12 0 4 1 12 1 19 +10 -20 0 0 0 1 19 +5 -3.75 0 0 0 1 19 +9 -10.13 0 0 0 1 19 +4 -7 0 0 0 1 19 +12 -10.5 0 1 -10.5 1 19 +8 -16 0 0 0 1 19 +4 -7.5 0 0 0 1 19 +8 0 3 1 8 1 19 +6 -10.5 0 0 0 1 19 +6 -9.75 0 0 0 1 19 +5 -8.75 0 0 0 1 19 +5 -2.5 0 1 -2.5 1 19 +13 0 6 1 0 1 19 +23 0 10 0 10 1 19 +8 -4 0 1 -4 1 19 +9 -11.25 0 0 0 1 19 +5 -6.88 0 0 0 1 19 +4 -4 0 1 4 1 19 +10 -17.5 0 0 0 1 19 +8 -13 0 0 0 1 19 +26 0 12 1 0 1 19 +6 -8.25 0 0 0 1 19 +9 -14.63 0 0 0 1 19 +8 -2 0 1 -2 1 19 +10 -6.25 0 1 -6.25 1 19 +8 -14 0 0 0 1 19 +12 0 5 1 0 1 19 +8 -10 0 0 0 1 19 +30 0 12 1 0 1 19 +5 -7.5 0 0 0 1 19 +5 0 2 1 0 1 19 +6 -3.75 0 1 6 1 19 +6 -6 0 1 -6 1 19 +4 -2 0 1 -2 1 19 +12 -7.5 0 1 -7.5 1 19 +5 -6.25 0 0 0 1 19 +4 -5 0 0 0 1 19 +2 -2.75 0 0 0 1 19 +2 -2 0 1 2 1 19 +6 -9 0 0 0 1 19 +5 -3.13 0 1 5 1 19 +12 -9 0 1 -9 1 19 +4 -8 0 0 0 1 19 +4 -1 0 1 4 1 19 +2 0 1 1 2 1 19 +9 -16.88 0 0 0 1 19 +8 -6 0 1 -6 1 19 +2 -0.5 0 1 -0.5 1 19 +6 -7.5 0 0 0 1 19 +8 -3 0 1 8 1 19 +12 -3 0 1 12 1 19 +5 -9.38 0 0 0 1 19 +6 -9.75 0 0 0 1 20 +12 -13.5 0 0 0 1 20 +8 -7 0 1 8 1 20 +10 -7.5 0 1 -7.5 1 20 +2 -2.25 0 1 2 1 20 +6 -8.25 0 1 -8.25 1 20 +10 -16.25 0 0 0 1 20 +3 0 1 0 1 1 20 +4 -3 0 1 4 1 20 +8 -2 0 1 -2 1 20 +4 -2.5 0 1 4 1 20 +5 -5.63 0 1 -5.63 1 20 +5 0 2 0 2 1 20 +30 0 12 0 12 1 20 +9 -4.5 0 1 9 1 20 +4 -7.5 0 0 0 1 20 +26 0 10 1 26 1 20 +10 -6.25 0 1 -6.25 1 20 +2 -4 0 0 0 1 20 +4 -5 0 1 4 1 20 +5 -1.88 0 1 -1.88 1 20 +23 0 10 0 10 1 20 +8 -3 0 1 8 1 20 +8 -12 0 0 0 1 20 +10 -2.5 0 1 -2.5 1 20 +5 -8.13 0 0 0 1 20 +8 -9 0 1 -9 1 20 +2 -3 0 1 -3 1 20 +9 -11.25 0 1 9 1 20 +9 -12.38 0 0 0 1 20 +12 -15 0 1 -15 1 20 +8 -10 0 0 0 1 20 +4 -1 0 1 4 1 20 +8 0 3 0 3 1 20 +4 -3.5 0 0 0 1 20 +8 -8 0 1 -8 1 20 +10 -11.25 0 1 10 1 20 +10 -5 0 1 10 1 20 +9 -13.5 0 0 0 1 20 +2 -0.75 0 1 2 1 20 +5 -4.38 0 0 0 1 20 +2 -1.5 0 1 -1.5 1 20 +2 -3.75 0 0 0 1 20 +5 -3.75 0 1 5 1 20 +9 -16.88 0 0 0 1 20 +9 -3.38 0 1 -3.38 1 20 +5 -10 0 0 0 1 20 +26 0 12 0 12 1 20 +5 -9.38 0 0 0 1 20 +6 -1.5 0 1 -1.5 1 20 +10 -10 0 1 10 1 20 +2 -1.25 0 0 0 1 20 +9 -14.63 0 0 0 1 20 +6 -4.5 0 1 -4.5 1 20 +5 -5 0 1 -5 1 20 +5 -7.5 0 0 0 1 20 +8 -13 0 0 0 1 20 +5 -3.13 0 1 5 1 20 +8 -5 0 1 -5 1 20 +8 -11 0 1 8 1 20 +6 -6.75 0 1 6 1 20 +5 -8.75 0 0 0 1 20 +2 0 1 0 1 1 20 +9 -5.63 0 1 -5.63 1 20 +6 -6 0 1 -6 1 20 +4 -5.5 0 0 0 1 20 +6 -3 0 1 -3 1 20 +12 -19.5 0 0 0 1 20 +10 -13.75 0 0 0 1 20 +10 -8.75 0 1 -8.75 1 20 +5 -6.88 0 1 -6.88 1 20 +6 -7.5 0 0 0 1 20 +10 -12.5 0 1 -12.5 1 20 +9 -6.75 0 1 -6.75 1 20 +4 -6 0 0 0 1 20 +8 -4 0 1 -4 1 20 +2 -1 0 0 0 1 20 +12 -24 0 0 0 1 20 +12 -6 0 1 -6 1 20 +2 -2 0 1 2 1 20 +4 -7 0 0 0 1 20 +12 -9 0 0 0 1 20 +6 -11.25 0 1 6 1 20 +25 0 10 0 10 1 20 +28 0 13 0 13 1 20 +2 -2.75 0 1 -2.75 1 20 +12 -10.5 0 0 0 1 20 +8 -14 0 0 0 1 20 +4 -6.5 0 0 0 1 20 +4 0 2 0 2 1 20 +10 -15 0 1 10 1 20 +12 0 5 0 5 1 20 +10 -18.75 0 0 0 1 20 +12 -3 0 1 12 1 20 +4 -4 0 0 0 1 20 +9 -7.88 0 0 0 1 20 +9 -2.25 0 1 -2.25 1 20 +2 -1.75 0 0 0 1 20 +12 0 6 0 6 1 20 +5 -2.5 0 1 -2.5 1 20 +4 -4.5 0 1 4 1 20 +8 -6 0 0 0 1 20 +12 -18 0 0 0 1 20 +12 -16.5 0 0 0 1 20 +22 0 10 0 10 1 20 +12 -21 0 0 0 1 20 +12 -4.5 0 1 -4.5 1 20 +12 -12 0 1 -12 1 20 +19 0 8 0 8 1 20 +2 -2.5 0 1 2 1 20 +12 0 4 0 4 1 20 +4 -2 0 1 -2 1 20 +9 -9 0 0 0 1 20 +9 -10.13 0 0 0 1 20 +6 -2.25 0 1 6 1 20 +2 -0.5 0 1 -0.5 1 20 +10 -3.75 0 1 10 1 20 +13 0 5 0 5 1 20 +4 -1.5 0 1 4 1 20 +5 -1.25 0 1 5 1 20 +6 -9 0 0 0 1 20 +10 -17.5 0 0 0 1 20 +6 -12 0 0 0 1 20 +6 -5.25 0 0 0 1 20 +12 -22.5 0 0 0 1 20 +8 -16 0 0 0 1 20 +9 -15.75 0 0 0 1 20 +10 -20 0 0 0 1 20 +13 0 6 0 6 1 20 +4 -8 0 0 0 1 20 +12 -7.5 0 1 -7.5 1 20 +9 -18 0 0 0 1 20 +2 -3.25 0 1 2 1 20 +7 0 3 0 3 1 20 +6 -3.75 0 1 6 1 20 +5 -6.25 0 0 0 1 20 +8 -15 0 0 0 1 20 +25 0 9 0 9 1 20 +2 -3.5 0 0 0 1 20 +6 -10.5 0 0 0 1 20 +9 -10.13 0 0 0 1 21 +12 -10.5 0 0 0 1 21 +25 0 10 1 25 1 21 +4 -7 0 0 0 1 21 +9 -7.88 0 0 0 1 21 +5 -3.13 0 1 5 1 21 +5 -8.13 0 0 0 1 21 +8 -7 0 0 0 1 21 +12 -6 0 1 -6 1 21 +12 -24 0 0 0 1 21 +12 -21 0 0 0 1 21 +4 -2.5 0 1 4 1 21 +6 -9 0 0 0 1 21 +10 -15 0 0 0 1 21 +8 -6 0 1 -6 1 21 +13 0 6 1 0 1 21 +6 -12 0 0 0 1 21 +6 -4.5 0 1 -4.5 1 21 +9 -16.88 0 0 0 1 21 +10 -18.75 0 0 0 1 21 +9 -3.38 0 1 -3.38 1 21 +6 -9.75 0 0 0 1 21 +2 -1.75 0 0 0 1 21 +5 0 2 1 0 1 21 +8 -5 0 1 -5 1 21 +8 -9 0 0 0 1 21 +12 0 6 1 0 1 21 +12 0 4 1 12 1 21 +2 -2.5 0 0 0 1 21 +6 -3 0 1 -3 1 21 +10 -7.5 0 0 0 1 21 +5 -2.5 0 1 -2.5 1 21 +5 -3.75 0 0 0 1 21 +10 -3.75 0 1 10 1 21 +2 -3 0 0 0 1 21 +10 -6.25 0 0 0 1 21 +4 -7.5 0 0 0 1 21 +8 -16 0 0 0 1 21 +5 -6.25 0 0 0 1 21 +4 0 2 1 4 1 21 +10 -11.25 0 0 0 1 21 +5 -6.88 0 0 0 1 21 +5 -7.5 0 0 0 1 21 +26 0 12 1 0 1 21 +8 -13 0 0 0 1 21 +4 -4.5 0 0 0 1 21 +8 -10 0 0 0 1 21 +6 -3.75 0 1 6 1 21 +5 -5.63 0 0 0 1 21 +9 -18 0 0 0 1 21 +12 -13.5 0 0 0 1 21 +7 0 3 1 0 1 21 +8 -14 0 0 0 1 21 +2 -1.5 0 1 -1.5 1 21 +10 -2.5 0 1 -2.5 1 21 +13 0 5 1 13 1 21 +9 -15.75 0 0 0 1 21 +8 -12 0 0 0 1 21 +28 0 13 1 0 1 21 +6 -7.5 0 0 0 1 21 +10 -16.25 0 0 0 1 21 +12 -7.5 0 0 0 1 21 +5 -5 0 0 0 1 21 +2 -2 0 0 0 1 21 +22 0 10 1 22 1 21 +2 -1 0 1 2 1 21 +3 0 1 1 3 1 21 +4 -5.5 0 0 0 1 21 +2 -2.25 0 0 0 1 21 +6 -2.25 0 1 6 1 21 +4 -6.5 0 0 0 1 21 +9 -12.38 0 0 0 1 21 +10 -13.75 0 0 0 1 21 +10 -17.5 0 0 0 1 21 +4 -5 0 0 0 1 21 +9 -11.25 0 0 0 1 21 +10 -10 0 0 0 1 21 +2 -3.25 0 0 0 1 21 +5 -8.75 0 0 0 1 21 +5 -10 0 0 0 1 21 +9 -2.25 0 1 -2.25 1 21 +6 -6.75 0 0 0 1 21 +12 -16.5 0 0 0 1 21 +9 -14.63 0 0 0 1 21 +4 -8 0 0 0 1 21 +6 -5.25 0 0 0 1 21 +12 -12 0 0 0 1 21 +4 -1 0 1 4 1 21 +12 -15 0 0 0 1 21 +4 -3.5 0 0 0 1 21 +2 -1.25 0 1 -1.25 1 21 +30 0 12 1 0 1 21 +12 -19.5 0 0 0 1 21 +12 -3 0 1 12 1 21 +5 -1.25 0 1 5 1 21 +5 -1.88 0 1 -1.88 1 21 +2 -3.5 0 0 0 1 21 +12 -9 0 0 0 1 21 +10 -20 0 0 0 1 21 +8 -4 0 1 -4 1 21 +12 0 5 1 0 1 21 +2 0 1 1 2 1 21 +4 -1.5 0 1 4 1 21 +2 -3.75 0 0 0 1 21 +6 -10.5 0 0 0 1 21 +4 -2 0 1 -2 1 21 +23 0 10 1 0 1 21 +12 -18 0 0 0 1 21 +6 -8.25 0 0 0 1 21 +26 0 10 1 26 1 21 +10 -8.75 0 0 0 1 21 +2 -0.75 0 1 2 1 21 +5 -9.38 0 0 0 1 21 +25 0 9 1 0 1 21 +9 -4.5 0 1 9 1 21 +10 -5 0 1 10 1 21 +2 -4 0 0 0 1 21 +2 -2.75 0 0 0 1 21 +4 -6 0 0 0 1 21 +10 -12.5 0 0 0 1 21 +12 -22.5 0 0 0 1 21 +4 -4 0 0 0 1 21 +2 -0.5 0 1 -0.5 1 21 +8 -2 0 1 -2 1 21 +4 -3 0 0 0 1 21 +6 -11.25 0 0 0 1 21 +8 -15 0 0 0 1 21 +8 -11 0 0 0 1 21 +12 -4.5 0 1 -4.5 1 21 +19 0 8 1 19 1 21 +6 -6 0 0 0 1 21 +5 -4.38 0 0 0 1 21 +9 -9 0 0 0 1 21 +6 -1.5 0 1 -1.5 1 21 +9 -13.5 0 0 0 1 21 +9 -5.63 0 0 0 1 21 +8 -8 0 0 0 1 21 +8 0 3 1 8 1 21 +8 -3 0 1 8 1 21 +9 -11.25 0 0 0 1 22 +8 -16 0 0 0 1 22 +9 -5.63 0 0 0 1 22 +9 -4.5 0 1 9 1 22 +2 -2 0 0 0 1 22 +12 -19.5 0 0 0 1 22 +4 -4.5 0 0 0 1 22 +2 -3.75 0 0 0 1 22 +2 -2.25 0 0 0 1 22 +12 -4.5 0 1 -4.5 1 22 +9 -10.13 0 0 0 1 22 +12 -3 0 1 12 1 22 +10 -17.5 0 0 0 1 22 +5 -4.38 0 0 0 1 22 +5 -7.5 0 0 0 1 22 +6 -11.25 0 0 0 1 22 +2 -1.5 0 0 0 1 22 +9 -6.75 0 0 0 1 22 +4 -7 0 0 0 1 22 +8 -7 0 0 0 1 22 +2 -1.75 0 0 0 1 22 +2 -1 0 0 0 1 22 +10 -6.25 0 1 -6.25 1 22 +6 -6.75 0 0 0 1 22 +9 -2.25 0 1 -2.25 1 22 +2 -0.75 0 1 2 1 22 +12 0 4 1 12 1 22 +6 -3 0 0 0 1 22 +3 0 1 1 3 1 22 +2 -3 0 0 0 1 22 +10 -13.75 0 1 -13.75 1 22 +6 -2.25 0 1 6 1 22 +5 -1.88 0 1 -1.88 1 22 +12 -13.5 0 0 0 1 22 +22 0 10 1 22 1 22 +9 -12.38 0 0 0 1 22 +26 0 10 0 10 1 22 +12 -10.5 0 0 0 1 22 +10 -2.5 0 1 -2.5 1 22 +25 0 10 1 25 1 22 +9 -15.75 0 0 0 1 22 +7 0 3 1 0 1 22 +10 -10 0 1 10 1 22 +12 -15 0 0 0 1 22 +12 0 6 0 6 1 22 +6 -4.5 0 0 0 1 22 +8 -13 0 0 0 1 22 +10 -16.25 0 0 0 1 22 +5 -1.25 0 0 0 1 22 +4 -4 0 0 0 1 22 +5 -3.75 0 0 0 1 22 +6 -8.25 0 0 0 1 22 +8 -15 0 0 0 1 22 +8 -8 0 0 0 1 22 +2 -2.75 0 0 0 1 22 +6 -12 0 0 0 1 22 +2 0 1 0 1 1 22 +2 -1.25 0 1 -1.25 1 22 +9 -18 0 0 0 1 22 +6 -9 0 0 0 1 22 +10 -8.75 0 0 0 1 22 +4 -7.5 0 0 0 1 22 +13 0 6 1 0 1 22 +10 -11.25 0 0 0 1 22 +4 -3 0 1 4 1 22 +10 -5 0 0 0 1 22 +8 -2 0 1 -2 1 22 +4 -2.5 0 1 4 1 22 +2 -3.5 0 0 0 1 22 +2 -2.5 0 0 0 1 22 +6 -3.75 0 0 0 1 22 +8 -3 0 1 8 1 22 +2 -3.25 0 1 2 1 22 +8 -9 0 0 0 1 22 +6 -6 0 0 0 1 22 +8 -11 0 0 0 1 22 +5 -8.75 0 0 0 1 22 +6 -9.75 0 0 0 1 22 +12 -24 0 0 0 1 22 +4 -6.5 0 0 0 1 22 +5 -10 0 0 0 1 22 +30 0 12 1 0 1 22 +12 -18 0 0 0 1 22 +9 -9 0 1 9 1 22 +5 -5 0 0 0 1 22 +5 -9.38 0 0 0 1 22 +10 -12.5 0 0 0 1 22 +10 -18.75 0 0 0 1 22 +5 -2.5 0 1 -2.5 1 22 +9 -14.63 0 0 0 1 22 +28 0 13 0 13 1 22 +5 -6.88 0 0 0 1 22 +4 -3.5 0 1 -3.5 1 22 +12 -16.5 0 1 12 1 22 +5 -8.13 0 0 0 1 22 +9 -16.88 0 0 0 1 22 +9 -3.38 0 1 -3.38 1 22 +12 0 5 1 0 1 22 +4 -8 0 0 0 1 22 +8 -12 0 0 0 1 22 +8 -4 0 0 0 1 22 +2 -4 0 0 0 1 22 +12 -9 0 0 0 1 22 +4 -1.5 0 1 4 1 22 +6 -10.5 0 0 0 1 22 +5 -3.13 0 0 0 1 22 +10 -15 0 1 10 1 22 +23 0 10 0 10 1 22 +12 -7.5 0 0 0 1 22 +2 -0.5 0 1 -0.5 1 22 +4 0 2 1 4 1 22 +6 -1.5 0 0 0 1 22 +4 -1 0 1 4 1 22 +10 -20 0 0 0 1 22 +12 -22.5 0 0 0 1 22 +25 0 9 1 0 1 22 +13 0 5 1 13 1 22 +6 -5.25 0 0 0 1 22 +9 -13.5 0 0 0 1 22 +5 0 2 0 2 1 22 +12 -6 0 1 -6 1 22 +5 -6.25 0 0 0 1 22 +10 -3.75 0 1 10 1 22 +9 -7.88 0 0 0 1 22 +8 -6 0 0 0 1 22 +4 -5.5 0 0 0 1 22 +19 0 8 1 19 1 22 +10 -7.5 0 0 0 1 22 +4 -6 0 1 4 1 22 +8 -14 0 0 0 1 22 +8 0 3 1 8 1 22 +12 -21 0 0 0 1 22 +4 -2 0 0 0 1 22 +4 -5 0 0 0 1 22 +6 -7.5 0 0 0 1 22 +12 -12 0 0 0 1 22 +8 -5 0 1 -5 1 22 +26 0 12 1 0 1 22 +8 -10 0 1 -10 1 22 +5 -5.63 0 1 -5.63 1 22 +2 -1 0 1 2 1 23 +9 -6.75 0 1 -6.75 1 23 +2 -4 0 0 0 1 23 +2 -3.25 0 0 0 1 23 +4 -6.5 0 1 -6.5 1 23 +5 -5.63 0 0 0 1 23 +8 -8 0 1 -8 1 23 +12 -18 0 1 12 1 23 +2 -2.5 0 1 2 1 23 +3 0 1 1 3 1 23 +12 -16.5 0 1 12 1 23 +10 -12.5 0 0 0 1 23 +5 -1.25 0 1 5 1 23 +19 0 8 1 19 1 23 +8 -9 0 0 0 1 23 +5 -10 0 0 0 1 23 +25 0 10 1 25 1 23 +7 0 3 1 0 1 23 +6 -11.25 0 0 0 1 23 +6 -1.5 0 1 -1.5 1 23 +4 -1.5 0 1 4 1 23 +10 -5 0 1 10 1 23 +10 -3.75 0 1 10 1 23 +6 -4.5 0 1 -4.5 1 23 +12 -19.5 0 0 0 1 23 +5 -4.38 0 1 -4.38 1 23 +8 -11 0 0 0 1 23 +2 -0.75 0 1 2 1 23 +2 -1.5 0 1 -1.5 1 23 +6 -6.75 0 0 0 1 23 +4 -6 0 1 4 1 23 +10 -16.25 0 0 0 1 23 +12 -15 0 0 0 1 23 +6 -5.25 0 1 6 1 23 +12 -21 0 0 0 1 23 +4 -3 0 1 4 1 23 +12 -22.5 0 0 0 1 23 +2 -3.75 0 1 -3.75 1 23 +6 -12 0 0 0 1 23 +5 -8.13 0 0 0 1 23 +10 -8.75 0 1 -8.75 1 23 +12 -6 0 0 0 1 23 +5 -5 0 1 -5 1 23 +22 0 10 0 10 1 23 +12 -13.5 0 0 0 1 23 +8 -7 0 0 0 1 23 +4 -3.5 0 1 -3.5 1 23 +9 -12.38 0 0 0 1 23 +10 -7.5 0 0 0 1 23 +26 0 10 1 26 1 23 +12 -4.5 0 1 -4.5 1 23 +8 -15 0 0 0 1 23 +2 -1.75 0 1 2 1 23 +12 0 6 0 6 1 23 +9 -3.38 0 1 -3.38 1 23 +2 -3 0 1 -3 1 23 +9 -5.63 0 0 0 1 23 +2 -3.5 0 1 -3.5 1 23 +8 -12 0 1 8 1 23 +10 -18.75 0 0 0 1 23 +4 0 2 1 4 1 23 +2 -2.25 0 1 2 1 23 +9 -2.25 0 1 -2.25 1 23 +10 -13.75 0 0 0 1 23 +28 0 13 0 13 1 23 +4 -2.5 0 1 4 1 23 +9 -15.75 0 0 0 1 23 +10 -15 0 0 0 1 23 +10 -10 0 1 10 1 23 +9 -18 0 0 0 1 23 +12 -24 0 0 0 1 23 +13 0 5 1 13 1 23 +5 -1.88 0 1 -1.88 1 23 +4 -4.5 0 1 4 1 23 +9 -7.88 0 0 0 1 23 +9 -9 0 0 0 1 23 +25 0 9 1 0 1 23 +12 -12 0 0 0 1 23 +6 -2.25 0 1 6 1 23 +8 -5 0 1 -5 1 23 +4 -5.5 0 0 0 1 23 +2 -1.25 0 1 -1.25 1 23 +9 -13.5 0 0 0 1 23 +9 -4.5 0 1 9 1 23 +10 -11.25 0 0 0 1 23 +6 -3 0 1 -3 1 23 +10 -2.5 0 1 -2.5 1 23 +12 0 4 0 4 1 23 +10 -20 0 0 0 1 23 +5 -3.75 0 1 5 1 23 +9 -10.13 0 0 0 1 23 +4 -7 0 0 0 1 23 +12 -10.5 0 0 0 1 23 +8 -16 0 0 0 1 23 +4 -7.5 0 0 0 1 23 +8 0 3 1 8 1 23 +6 -10.5 0 0 0 1 23 +6 -9.75 0 0 0 1 23 +5 -8.75 0 0 0 1 23 +5 -2.5 0 1 -2.5 1 23 +13 0 6 0 6 1 23 +23 0 10 0 10 1 23 +8 -4 0 1 -4 1 23 +9 -11.25 0 0 0 1 23 +5 -6.88 0 0 0 1 23 +4 -4 0 1 4 1 23 +10 -17.5 0 0 0 1 23 +8 -13 0 0 0 1 23 +26 0 12 0 12 1 23 +6 -8.25 0 0 0 1 23 +9 -14.63 0 0 0 1 23 +8 -2 0 1 -2 1 23 +10 -6.25 0 0 0 1 23 +8 -14 0 0 0 1 23 +12 0 5 1 0 1 23 +8 -10 0 0 0 1 23 +30 0 12 0 12 1 23 +5 -7.5 0 0 0 1 23 +5 0 2 1 0 1 23 +6 -3.75 0 0 0 1 23 +6 -6 0 0 0 1 23 +4 -2 0 1 -2 1 23 +12 -7.5 0 0 0 1 23 +5 -6.25 0 0 0 1 23 +4 -5 0 1 4 1 23 +2 -2.75 0 0 0 1 23 +2 -2 0 1 2 1 23 +6 -9 0 0 0 1 23 +5 -3.13 0 1 5 1 23 +12 -9 0 0 0 1 23 +4 -8 0 0 0 1 23 +4 -1 0 1 4 1 23 +2 0 1 1 2 1 23 +9 -16.88 0 0 0 1 23 +8 -6 0 0 0 1 23 +2 -0.5 0 1 -0.5 1 23 +6 -7.5 0 0 0 1 23 +8 -3 0 1 8 1 23 +12 -3 0 1 12 1 23 +5 -9.38 0 0 0 1 23 +6 -9.75 0 0 0 1 24 +12 -13.5 0 1 12 1 24 +8 -7 0 1 8 1 24 +10 -7.5 0 1 -7.5 1 24 +2 -2.25 0 0 0 1 24 +6 -8.25 0 0 0 1 24 +10 -16.25 0 0 0 1 24 +3 0 1 1 3 1 24 +4 -3 0 1 4 1 24 +8 -2 0 1 -2 1 24 +4 -2.5 0 1 4 1 24 +5 -5.63 0 1 -5.63 1 24 +5 0 2 1 0 1 24 +30 0 12 1 0 1 24 +9 -4.5 0 1 9 1 24 +4 -7.5 0 0 0 1 24 +26 0 10 1 26 1 24 +10 -6.25 0 1 -6.25 1 24 +2 -4 0 0 0 1 24 +4 -5 0 0 0 1 24 +5 -1.88 0 1 -1.88 1 24 +23 0 10 0 10 1 24 +8 -3 0 1 8 1 24 +8 -12 0 0 0 1 24 +10 -2.5 0 1 -2.5 1 24 +5 -8.13 0 0 0 1 24 +8 -9 0 1 -9 1 24 +2 -3 0 1 -3 1 24 +9 -11.25 0 0 0 1 24 +9 -12.38 0 0 0 1 24 +12 -15 0 1 -15 1 24 +8 -10 0 0 0 1 24 +4 -1 0 1 4 1 24 +8 0 3 1 8 1 24 +4 -3.5 0 1 -3.5 1 24 +8 -8 0 1 -8 1 24 +10 -11.25 0 0 0 1 24 +10 -5 0 1 10 1 24 +9 -13.5 0 0 0 1 24 +2 -0.75 0 1 2 1 24 +5 -4.38 0 1 -4.38 1 24 +2 -1.5 0 1 -1.5 1 24 +2 -3.75 0 0 0 1 24 +5 -3.75 0 1 5 1 24 +9 -16.88 0 0 0 1 24 +9 -3.38 0 1 -3.38 1 24 +5 -10 0 0 0 1 24 +26 0 12 1 0 1 24 +5 -9.38 0 0 0 1 24 +6 -1.5 0 1 -1.5 1 24 +10 -10 0 1 10 1 24 +2 -1.25 0 1 -1.25 1 24 +9 -14.63 0 0 0 1 24 +6 -4.5 0 1 -4.5 1 24 +5 -5 0 1 -5 1 24 +5 -7.5 0 0 0 1 24 +8 -13 0 0 0 1 24 +5 -3.13 0 1 5 1 24 +8 -5 0 1 -5 1 24 +8 -11 0 0 0 1 24 +6 -6.75 0 0 0 1 24 +5 -8.75 0 0 0 1 24 +2 0 1 1 2 1 24 +9 -5.63 0 1 -5.63 1 24 +6 -6 0 1 -6 1 24 +4 -5.5 0 0 0 1 24 +6 -3 0 1 -3 1 24 +12 -19.5 0 0 0 1 24 +10 -13.75 0 0 0 1 24 +10 -8.75 0 1 -8.75 1 24 +5 -6.88 0 0 0 1 24 +6 -7.5 0 0 0 1 24 +10 -12.5 0 0 0 1 24 +9 -6.75 0 1 -6.75 1 24 +4 -6 0 0 0 1 24 +8 -4 0 1 -4 1 24 +2 -1 0 1 2 1 24 +12 -24 0 0 0 1 24 +12 -6 0 1 -6 1 24 +2 -2 0 1 2 1 24 +4 -7 0 0 0 1 24 +12 -9 0 1 -9 1 24 +6 -11.25 0 0 0 1 24 +25 0 10 1 25 1 24 +28 0 13 1 0 1 24 +2 -2.75 0 0 0 1 24 +12 -10.5 0 1 -10.5 1 24 +8 -14 0 0 0 1 24 +4 -6.5 0 0 0 1 24 +4 0 2 1 4 1 24 +10 -15 0 0 0 1 24 +12 0 5 1 0 1 24 +10 -18.75 0 0 0 1 24 +12 -3 0 1 12 1 24 +4 -4 0 1 4 1 24 +9 -7.88 0 1 9 1 24 +9 -2.25 0 1 -2.25 1 24 +2 -1.75 0 1 2 1 24 +12 0 6 1 0 1 24 +5 -2.5 0 1 -2.5 1 24 +4 -4.5 0 0 0 1 24 +8 -6 0 1 -6 1 24 +12 -18 0 0 0 1 24 +12 -16.5 0 0 0 1 24 +22 0 10 1 22 1 24 +12 -21 0 0 0 1 24 +12 -4.5 0 1 -4.5 1 24 +12 -12 0 1 -12 1 24 +19 0 8 1 19 1 24 +2 -2.5 0 0 0 1 24 +12 0 4 1 12 1 24 +4 -2 0 1 -2 1 24 +9 -9 0 1 9 1 24 +9 -10.13 0 0 0 1 24 +6 -2.25 0 1 6 1 24 +2 -0.5 0 1 -0.5 1 24 +10 -3.75 0 1 10 1 24 +13 0 5 1 13 1 24 +4 -1.5 0 1 4 1 24 +5 -1.25 0 1 5 1 24 +6 -9 0 0 0 1 24 +10 -17.5 0 0 0 1 24 +6 -12 0 0 0 1 24 +6 -5.25 0 1 6 1 24 +12 -22.5 0 0 0 1 24 +8 -16 0 0 0 1 24 +9 -15.75 0 0 0 1 24 +10 -20 0 0 0 1 24 +13 0 6 1 0 1 24 +4 -8 0 0 0 1 24 +12 -7.5 0 1 -7.5 1 24 +9 -18 0 0 0 1 24 +2 -3.25 0 0 0 1 24 +7 0 3 1 0 1 24 +6 -3.75 0 1 6 1 24 +5 -6.25 0 0 0 1 24 +8 -15 0 0 0 1 24 +25 0 9 1 0 1 24 +2 -3.5 0 0 0 1 24 +6 -10.5 0 0 0 1 24 +9 -10.13 0 1 -10.13 1 25 +12 -10.5 0 1 -10.5 1 25 +25 0 10 1 25 1 25 +4 -7 0 0 0 1 25 +9 -7.88 0 1 9 1 25 +5 -3.13 0 1 5 1 25 +5 -8.13 0 0 0 1 25 +8 -7 0 1 8 1 25 +12 -6 0 1 -6 1 25 +12 -24 0 0 0 1 25 +12 -21 0 0 0 1 25 +4 -2.5 0 1 4 1 25 +6 -9 0 1 -9 1 25 +10 -15 0 1 10 1 25 +8 -6 0 1 -6 1 25 +13 0 6 1 0 1 25 +6 -12 0 0 0 1 25 +6 -4.5 0 0 0 1 25 +9 -16.88 0 1 9 1 25 +10 -18.75 0 0 0 1 25 +9 -3.38 0 1 -3.38 1 25 +6 -9.75 0 1 -9.75 1 25 +2 -1.75 0 1 2 1 25 +5 0 2 1 0 1 25 +8 -5 0 1 -5 1 25 +8 -9 0 1 -9 1 25 +12 0 6 1 0 1 25 +12 0 4 1 12 1 25 +2 -2.5 0 1 2 1 25 +6 -3 0 0 0 1 25 +10 -7.5 0 1 -7.5 1 25 +5 -2.5 0 1 -2.5 1 25 +5 -3.75 0 1 5 1 25 +10 -3.75 0 1 10 1 25 +2 -3 0 0 0 1 25 +10 -6.25 0 1 -6.25 1 25 +4 -7.5 0 0 0 1 25 +8 -16 0 0 0 1 25 +5 -6.25 0 0 0 1 25 +4 0 2 1 4 1 25 +10 -11.25 0 1 10 1 25 +5 -6.88 0 1 -6.88 1 25 +5 -7.5 0 1 5 1 25 +26 0 12 1 0 1 25 +8 -13 0 0 0 1 25 +4 -4.5 0 1 4 1 25 +8 -10 0 1 -10 1 25 +6 -3.75 0 1 6 1 25 +5 -5.63 0 1 -5.63 1 25 +9 -18 0 0 0 1 25 +12 -13.5 0 0 0 1 25 +7 0 3 1 0 1 25 +8 -14 0 1 8 1 25 +2 -1.5 0 1 -1.5 1 25 +10 -2.5 0 1 -2.5 1 25 +13 0 5 1 13 1 25 +9 -15.75 0 0 0 1 25 +8 -12 0 1 8 1 25 +28 0 13 1 0 1 25 +6 -7.5 0 1 -7.5 1 25 +10 -16.25 0 0 0 1 25 +12 -7.5 0 1 -7.5 1 25 +5 -5 0 1 -5 1 25 +2 -2 0 1 2 1 25 +22 0 10 0 10 1 25 +2 -1 0 1 2 1 25 +3 0 1 1 3 1 25 +4 -5.5 0 1 -5.5 1 25 +2 -2.25 0 1 2 1 25 +6 -2.25 0 1 6 1 25 +4 -6.5 0 1 -6.5 1 25 +9 -12.38 0 0 0 1 25 +10 -13.75 0 0 0 1 25 +10 -17.5 0 1 10 1 25 +4 -5 0 1 4 1 25 +9 -11.25 0 1 9 1 25 +10 -10 0 0 0 1 25 +2 -3.25 0 1 2 1 25 +5 -8.75 0 1 5 1 25 +5 -10 0 0 0 1 25 +9 -2.25 0 1 -2.25 1 25 +6 -6.75 0 1 6 1 25 +12 -16.5 0 0 0 1 25 +9 -14.63 0 1 9 1 25 +4 -8 0 0 0 1 25 +6 -5.25 0 1 6 1 25 +9 -6.75 0 1 -6.75 1 25 +12 -12 0 1 -12 1 25 +4 -1 0 1 4 1 25 +12 -15 0 1 -15 1 25 +4 -3.5 0 1 -3.5 1 25 +2 -1.25 0 1 -1.25 1 25 +30 0 12 1 0 1 25 +12 -19.5 0 0 0 1 25 +12 -3 0 1 12 1 25 +5 -1.25 0 1 5 1 25 +5 -1.88 0 1 -1.88 1 25 +2 -3.5 0 1 -3.5 1 25 +12 -9 0 0 0 1 25 +10 -20 0 0 0 1 25 +8 -4 0 1 -4 1 25 +12 0 5 1 0 1 25 +2 0 1 0 1 1 25 +4 -1.5 0 1 4 1 25 +2 -3.75 0 0 0 1 25 +6 -10.5 0 0 0 1 25 +4 -2 0 1 -2 1 25 +23 0 10 0 10 1 25 +12 -18 0 1 12 1 25 +6 -8.25 0 0 0 1 25 +26 0 10 1 26 1 25 +10 -8.75 0 1 -8.75 1 25 +2 -0.75 0 1 2 1 25 +5 -9.38 0 1 -9.38 1 25 +25 0 9 1 0 1 25 +9 -4.5 0 1 9 1 25 +10 -5 0 1 10 1 25 +2 -4 0 1 -4 1 25 +2 -2.75 0 1 -2.75 1 25 +4 -6 0 0 0 1 25 +10 -12.5 0 1 -12.5 1 25 +12 -22.5 0 0 0 1 25 +4 -4 0 1 4 1 25 +2 -0.5 0 1 -0.5 1 25 +8 -2 0 1 -2 1 25 +4 -3 0 1 4 1 25 +6 -11.25 0 0 0 1 25 +8 -15 0 0 0 1 25 +8 -11 0 1 8 1 25 +12 -4.5 0 1 -4.5 1 25 +19 0 8 1 19 1 25 +6 -6 0 1 -6 1 25 +5 -4.38 0 1 -4.38 1 25 +9 -9 0 1 9 1 25 +6 -1.5 0 1 -1.5 1 25 +9 -13.5 0 1 -13.5 1 25 +9 -5.63 0 1 -5.63 1 25 +8 -8 0 1 -8 1 25 +8 0 3 1 8 1 25 +8 -3 0 1 8 1 25 +9 -11.25 0 1 9 1 26 +8 -16 0 0 0 1 26 +9 -5.63 0 1 -5.63 1 26 +9 -4.5 0 1 9 1 26 +2 -2 0 0 0 1 26 +12 -19.5 0 0 0 1 26 +4 -4.5 0 1 4 1 26 +2 -3.75 0 0 0 1 26 +2 -2.25 0 0 0 1 26 +12 -4.5 0 1 -4.5 1 26 +9 -10.13 0 0 0 1 26 +12 -3 0 1 12 1 26 +10 -17.5 0 0 0 1 26 +5 -4.38 0 1 -4.38 1 26 +5 -7.5 0 0 0 1 26 +6 -11.25 0 0 0 1 26 +2 -1.5 0 1 -1.5 1 26 +9 -6.75 0 1 -6.75 1 26 +4 -7 0 0 0 1 26 +8 -7 0 0 0 1 26 +2 -1.75 0 1 2 1 26 +2 -1 0 1 2 1 26 +10 -6.25 0 0 0 1 26 +6 -6.75 0 0 0 1 26 +9 -2.25 0 1 -2.25 1 26 +2 -0.75 0 1 2 1 26 +12 0 4 1 12 1 26 +6 -3 0 1 -3 1 26 +3 0 1 1 3 1 26 +2 -3 0 1 -3 1 26 +10 -13.75 0 1 -13.75 1 26 +6 -2.25 0 1 6 1 26 +5 -1.88 0 1 -1.88 1 26 +12 -13.5 0 0 0 1 26 +22 0 10 0 10 1 26 +9 -12.38 0 0 0 1 26 +26 0 10 0 10 1 26 +12 -10.5 0 0 0 1 26 +10 -2.5 0 1 -2.5 1 26 +25 0 10 0 10 1 26 +9 -15.75 0 0 0 1 26 +7 0 3 1 0 1 26 +10 -10 0 0 0 1 26 +12 -15 0 0 0 1 26 +12 0 6 0 6 1 26 +6 -4.5 0 1 -4.5 1 26 +8 -13 0 0 0 1 26 +10 -16.25 0 0 0 1 26 +5 -1.25 0 1 5 1 26 +4 -4 0 0 0 1 26 +5 -3.75 0 1 5 1 26 +6 -8.25 0 0 0 1 26 +8 -15 0 0 0 1 26 +8 -8 0 0 0 1 26 +2 -2.75 0 1 -2.75 1 26 +6 -12 0 0 0 1 26 +2 0 1 1 2 1 26 +2 -1.25 0 1 -1.25 1 26 +9 -18 0 0 0 1 26 +6 -9 0 0 0 1 26 +10 -8.75 0 1 -8.75 1 26 +4 -7.5 0 0 0 1 26 +13 0 6 1 0 1 26 +10 -11.25 0 0 0 1 26 +4 -3 0 0 0 1 26 +10 -5 0 1 10 1 26 +8 -2 0 1 -2 1 26 +4 -2.5 0 1 4 1 26 +2 -3.5 0 0 0 1 26 +2 -2.5 0 0 0 1 26 +6 -3.75 0 1 6 1 26 +8 -3 0 1 8 1 26 +2 -3.25 0 0 0 1 26 +8 -9 0 0 0 1 26 +6 -6 0 1 -6 1 26 +8 -11 0 0 0 1 26 +5 -8.75 0 0 0 1 26 +6 -9.75 0 0 0 1 26 +12 -24 0 0 0 1 26 +4 -6.5 0 0 0 1 26 +5 -10 0 0 0 1 26 +30 0 12 1 0 1 26 +12 -18 0 0 0 1 26 +9 -9 0 0 0 1 26 +5 -5 0 0 0 1 26 +5 -9.38 0 0 0 1 26 +10 -12.5 0 1 -12.5 1 26 +10 -18.75 0 0 0 1 26 +5 -2.5 0 1 -2.5 1 26 +9 -14.63 0 0 0 1 26 +28 0 13 1 0 1 26 +5 -6.88 0 0 0 1 26 +4 -3.5 0 0 0 1 26 +12 -16.5 0 1 12 1 26 +5 -8.13 0 0 0 1 26 +9 -16.88 0 0 0 1 26 +9 -3.38 0 1 -3.38 1 26 +12 0 5 1 0 1 26 +4 -8 0 0 0 1 26 +8 -12 0 0 0 1 26 +8 -4 0 1 -4 1 26 +2 -4 0 0 0 1 26 +12 -9 0 1 -9 1 26 +4 -1.5 0 1 4 1 26 +6 -10.5 0 0 0 1 26 +5 -3.13 0 1 5 1 26 +10 -15 0 0 0 1 26 +23 0 10 0 10 1 26 +12 -7.5 0 1 -7.5 1 26 +2 -0.5 0 1 -0.5 1 26 +4 0 2 1 4 1 26 +6 -1.5 0 1 -1.5 1 26 +4 -1 0 1 4 1 26 +10 -20 0 0 0 1 26 +12 -22.5 0 0 0 1 26 +25 0 9 1 0 1 26 +13 0 5 1 13 1 26 +6 -5.25 0 0 0 1 26 +9 -13.5 0 0 0 1 26 +5 0 2 1 0 1 26 +12 -6 0 1 -6 1 26 +5 -6.25 0 0 0 1 26 +10 -3.75 0 1 10 1 26 +9 -7.88 0 0 0 1 26 +8 -6 0 0 0 1 26 +4 -5.5 0 0 0 1 26 +19 0 8 1 19 1 26 +10 -7.5 0 0 0 1 26 +4 -6 0 0 0 1 26 +8 -14 0 0 0 1 26 +8 0 3 0 3 1 26 +12 -21 0 0 0 1 26 +4 -2 0 1 -2 1 26 +4 -5 0 0 0 1 26 +6 -7.5 0 0 0 1 26 +12 -12 0 0 0 1 26 +8 -5 0 1 -5 1 26 +26 0 12 0 12 1 26 +8 -10 0 0 0 1 26 +5 -5.63 0 0 0 1 26 +2 -1 0 1 2 1 27 +9 -6.75 0 1 -6.75 1 27 +2 -4 0 1 -4 1 27 +2 -3.25 0 1 2 1 27 +4 -6.5 0 1 -6.5 1 27 +5 -5.63 0 1 -5.63 1 27 +8 -8 0 1 -8 1 27 +12 -18 0 1 12 1 27 +2 -2.5 0 0 0 1 27 +3 0 1 1 3 1 27 +12 -16.5 0 0 0 1 27 +10 -12.5 0 1 -12.5 1 27 +5 -1.25 0 1 5 1 27 +19 0 8 1 19 1 27 +8 -9 0 1 -9 1 27 +5 -10 0 1 5 1 27 +25 0 10 0 10 1 27 +7 0 3 1 0 1 27 +6 -11.25 0 1 6 1 27 +6 -1.5 0 1 -1.5 1 27 +4 -1.5 0 1 4 1 27 +10 -5 0 1 10 1 27 +10 -3.75 0 1 10 1 27 +6 -4.5 0 1 -4.5 1 27 +12 -19.5 0 1 12 1 27 +5 -4.38 0 0 0 1 27 +8 -11 0 1 8 1 27 +2 -0.75 0 1 2 1 27 +2 -1.5 0 0 0 1 27 +6 -6.75 0 1 6 1 27 +4 -6 0 1 4 1 27 +10 -16.25 0 1 -16.25 1 27 +12 -15 0 1 -15 1 27 +6 -5.25 0 1 6 1 27 +12 -21 0 1 12 1 27 +4 -3 0 1 4 1 27 +12 -22.5 0 1 12 1 27 +2 -3.75 0 1 -3.75 1 27 +6 -12 0 1 -12 1 27 +5 -8.13 0 0 0 1 27 +10 -8.75 0 1 -8.75 1 27 +12 -6 0 1 -6 1 27 +5 -5 0 1 -5 1 27 +22 0 10 1 22 1 27 +12 -13.5 0 1 12 1 27 +8 -7 0 1 8 1 27 +4 -3.5 0 1 -3.5 1 27 +9 -12.38 0 1 9 1 27 +10 -7.5 0 1 -7.5 1 27 +26 0 10 1 26 1 27 +12 -4.5 0 1 -4.5 1 27 +8 -15 0 1 -15 1 27 +2 -1.75 0 1 2 1 27 +12 0 6 1 0 1 27 +9 -3.38 0 1 -3.38 1 27 +2 -3 0 1 -3 1 27 +9 -5.63 0 1 -5.63 1 27 +2 -3.5 0 0 0 1 27 +8 -12 0 1 8 1 27 +10 -18.75 0 0 0 1 27 +4 0 2 1 4 1 27 +2 -2.25 0 1 2 1 27 +9 -2.25 0 1 -2.25 1 27 +10 -13.75 0 1 -13.75 1 27 +28 0 13 1 0 1 27 +4 -2.5 0 1 4 1 27 +9 -15.75 0 1 9 1 27 +10 -15 0 1 10 1 27 +10 -10 0 1 10 1 27 +9 -18 0 0 0 1 27 +12 -24 0 0 0 1 27 +13 0 5 0 5 1 27 +5 -1.88 0 1 -1.88 1 27 +4 -4.5 0 1 4 1 27 +9 -7.88 0 1 9 1 27 +9 -9 0 1 9 1 27 +25 0 9 1 0 1 27 +12 -12 0 1 -12 1 27 +6 -2.25 0 1 6 1 27 +8 -5 0 1 -5 1 27 +4 -5.5 0 1 -5.5 1 27 +2 -1.25 0 1 -1.25 1 27 +9 -13.5 0 1 -13.5 1 27 +9 -4.5 0 1 9 1 27 +10 -11.25 0 1 10 1 27 +6 -3 0 1 -3 1 27 +10 -2.5 0 1 -2.5 1 27 +12 0 4 1 12 1 27 +10 -20 0 0 0 1 27 +5 -3.75 0 1 5 1 27 +9 -10.13 0 1 -10.13 1 27 +4 -7 0 1 -7 1 27 +12 -10.5 0 1 -10.5 1 27 +8 -16 0 0 0 1 27 +4 -7.5 0 1 4 1 27 +8 0 3 1 8 1 27 +6 -10.5 0 1 -10.5 1 27 +6 -9.75 0 1 -9.75 1 27 +5 -8.75 0 1 5 1 27 +5 -2.5 0 1 -2.5 1 27 +13 0 6 0 6 1 27 +23 0 10 1 0 1 27 +8 -4 0 1 -4 1 27 +9 -11.25 0 1 9 1 27 +5 -6.88 0 1 -6.88 1 27 +4 -4 0 1 4 1 27 +10 -17.5 0 1 10 1 27 +8 -13 0 0 0 1 27 +26 0 12 1 0 1 27 +6 -8.25 0 1 -8.25 1 27 +9 -14.63 0 1 9 1 27 +8 -2 0 1 -2 1 27 +10 -6.25 0 1 -6.25 1 27 +8 -14 0 0 0 1 27 +12 0 5 1 0 1 27 +8 -10 0 1 -10 1 27 +30 0 12 1 0 1 27 +5 -7.5 0 0 0 1 27 +5 0 2 1 0 1 27 +6 -3.75 0 1 6 1 27 +6 -6 0 1 -6 1 27 +4 -2 0 1 -2 1 27 +12 -7.5 0 1 -7.5 1 27 +5 -6.25 0 1 5 1 27 +4 -5 0 1 4 1 27 +2 -2.75 0 1 -2.75 1 27 +2 -2 0 1 2 1 27 +6 -9 0 1 -9 1 27 +5 -3.13 0 1 5 1 27 +12 -9 0 1 -9 1 27 +4 -8 0 1 -8 1 27 +4 -1 0 1 4 1 27 +2 0 1 0 1 1 27 +9 -16.88 0 0 0 1 27 +8 -6 0 1 -6 1 27 +2 -0.5 0 1 -0.5 1 27 +6 -7.5 0 1 -7.5 1 27 +8 -3 0 1 8 1 27 +12 -3 0 1 12 1 27 +5 -9.38 0 1 -9.38 1 27 +6 -9.75 0 0 0 1 28 +12 -13.5 0 1 12 1 28 +8 -7 0 1 8 1 28 +10 -7.5 0 1 -7.5 1 28 +2 -2.25 0 0 0 1 28 +6 -8.25 0 1 -8.25 1 28 +10 -16.25 0 0 0 1 28 +3 0 1 0 1 1 28 +4 -3 0 1 4 1 28 +8 -2 0 1 -2 1 28 +4 -2.5 0 1 4 1 28 +5 -5.63 0 0 0 1 28 +5 0 2 1 0 1 28 +30 0 12 1 0 1 28 +9 -4.5 0 0 0 1 28 +4 -7.5 0 0 0 1 28 +26 0 10 1 26 1 28 +10 -6.25 0 1 -6.25 1 28 +2 -4 0 0 0 1 28 +4 -5 0 0 0 1 28 +5 -1.88 0 1 -1.88 1 28 +23 0 10 0 10 1 28 +8 -3 0 1 8 1 28 +8 -12 0 0 0 1 28 +10 -2.5 0 0 0 1 28 +5 -8.13 0 0 0 1 28 +8 -9 0 0 0 1 28 +2 -3 0 0 0 1 28 +9 -11.25 0 0 0 1 28 +9 -12.38 0 0 0 1 28 +12 -15 0 0 0 1 28 +8 -10 0 0 0 1 28 +4 -1 0 1 4 1 28 +8 0 3 1 8 1 28 +4 -3.5 0 0 0 1 28 +8 -8 0 0 0 1 28 +10 -11.25 0 0 0 1 28 +10 -5 0 1 10 1 28 +9 -13.5 0 0 0 1 28 +2 -0.75 0 1 2 1 28 +5 -4.38 0 0 0 1 28 +2 -1.5 0 0 0 1 28 +2 -3.75 0 0 0 1 28 +5 -3.75 0 1 5 1 28 +9 -16.88 0 0 0 1 28 +9 -3.38 0 1 -3.38 1 28 +5 -10 0 0 0 1 28 +26 0 12 0 12 1 28 +5 -9.38 0 0 0 1 28 +6 -1.5 0 1 -1.5 1 28 +10 -10 0 0 0 1 28 +2 -1.25 0 0 0 1 28 +9 -14.63 0 0 0 1 28 +6 -4.5 0 0 0 1 28 +5 -5 0 0 0 1 28 +5 -7.5 0 0 0 1 28 +8 -13 0 0 0 1 28 +5 -3.13 0 0 0 1 28 +8 -5 0 1 -5 1 28 +8 -11 0 0 0 1 28 +6 -6.75 0 0 0 1 28 +5 -8.75 0 0 0 1 28 +2 0 1 1 2 1 28 +9 -5.63 0 0 0 1 28 +6 -6 0 0 0 1 28 +4 -5.5 0 0 0 1 28 +6 -3 0 1 -3 1 28 +12 -19.5 0 0 0 1 28 +10 -13.75 0 0 0 1 28 +10 -8.75 0 0 0 1 28 +5 -6.88 0 0 0 1 28 +6 -7.5 0 0 0 1 28 +10 -12.5 0 0 0 1 28 +9 -6.75 0 0 0 1 28 +4 -6 0 0 0 1 28 +8 -4 0 0 0 1 28 +2 -1 0 1 2 1 28 +12 -24 0 0 0 1 28 +12 -6 0 0 0 1 28 +2 -2 0 0 0 1 28 +4 -7 0 0 0 1 28 +12 -9 0 0 0 1 28 +6 -11.25 0 0 0 1 28 +25 0 10 1 25 1 28 +28 0 13 1 0 1 28 +2 -2.75 0 0 0 1 28 +12 -10.5 0 0 0 1 28 +8 -14 0 0 0 1 28 +4 -6.5 0 0 0 1 28 +4 0 2 1 4 1 28 +10 -15 0 0 0 1 28 +12 0 5 1 0 1 28 +10 -18.75 0 0 0 1 28 +12 -3 0 1 12 1 28 +4 -4 0 0 0 1 28 +9 -7.88 0 0 0 1 28 +9 -2.25 0 1 -2.25 1 28 +2 -1.75 0 0 0 1 28 +12 0 6 1 0 1 28 +5 -2.5 0 0 0 1 28 +4 -4.5 0 0 0 1 28 +8 -6 0 0 0 1 28 +12 -18 0 0 0 1 28 +12 -16.5 0 0 0 1 28 +22 0 10 1 22 1 28 +12 -21 0 0 0 1 28 +12 -4.5 0 0 0 1 28 +12 -12 0 0 0 1 28 +19 0 8 1 19 1 28 +2 -2.5 0 0 0 1 28 +12 0 4 1 12 1 28 +4 -2 0 0 0 1 28 +9 -9 0 0 0 1 28 +9 -10.13 0 0 0 1 28 +6 -2.25 0 1 6 1 28 +2 -0.5 0 1 -0.5 1 28 +10 -3.75 0 0 0 1 28 +13 0 5 1 13 1 28 +4 -1.5 0 1 4 1 28 +5 -1.25 0 1 5 1 28 +6 -9 0 0 0 1 28 +10 -17.5 0 0 0 1 28 +6 -12 0 0 0 1 28 +6 -5.25 0 0 0 1 28 +12 -22.5 0 0 0 1 28 +8 -16 0 0 0 1 28 +9 -15.75 0 0 0 1 28 +10 -20 0 0 0 1 28 +13 0 6 0 6 1 28 +4 -8 0 0 0 1 28 +12 -7.5 0 0 0 1 28 +9 -18 0 0 0 1 28 +2 -3.25 0 0 0 1 28 +7 0 3 1 0 1 28 +6 -3.75 0 0 0 1 28 +5 -6.25 0 0 0 1 28 +8 -15 0 0 0 1 28 +25 0 9 1 0 1 28 +2 -3.5 0 0 0 1 28 +6 -10.5 0 0 0 1 28 +9 -10.13 0 0 0 1 29 +12 -10.5 0 1 -10.5 1 29 +25 0 10 1 25 1 29 +4 -7 0 0 0 1 29 +9 -7.88 0 0 0 1 29 +5 -3.13 0 1 5 1 29 +5 -8.13 0 0 0 1 29 +8 -7 0 1 8 1 29 +12 -6 0 1 -6 1 29 +12 -24 0 0 0 1 29 +12 -21 0 0 0 1 29 +4 -2.5 0 1 4 1 29 +6 -9 0 0 0 1 29 +10 -15 0 0 0 1 29 +8 -6 0 1 -6 1 29 +13 0 6 1 0 1 29 +6 -12 0 0 0 1 29 +6 -4.5 0 1 -4.5 1 29 +9 -16.88 0 0 0 1 29 +10 -18.75 0 0 0 1 29 +9 -3.38 0 1 -3.38 1 29 +6 -9.75 0 1 -9.75 1 29 +2 -1.75 0 1 2 1 29 +5 0 2 1 0 1 29 +8 -5 0 0 0 1 29 +8 -9 0 1 -9 1 29 +12 0 6 1 0 1 29 +12 0 4 1 12 1 29 +2 -2.5 0 0 0 1 29 +6 -3 0 1 -3 1 29 +10 -7.5 0 1 -7.5 1 29 +5 -2.5 0 1 -2.5 1 29 +5 -3.75 0 1 5 1 29 +10 -3.75 0 1 10 1 29 +2 -3 0 0 0 1 29 +10 -6.25 0 0 0 1 29 +4 -7.5 0 0 0 1 29 +8 -16 0 0 0 1 29 +5 -6.25 0 0 0 1 29 +4 0 2 1 4 1 29 +10 -11.25 0 0 0 1 29 +5 -6.88 0 0 0 1 29 +5 -7.5 0 1 5 1 29 +26 0 12 0 12 1 29 +8 -13 0 0 0 1 29 +4 -4.5 0 1 4 1 29 +8 -10 0 0 0 1 29 +6 -3.75 0 1 6 1 29 +5 -5.63 0 0 0 1 29 +9 -18 0 0 0 1 29 +12 -13.5 0 1 12 1 29 +7 0 3 0 3 1 29 +8 -14 0 0 0 1 29 +2 -1.5 0 1 -1.5 1 29 +10 -2.5 0 1 -2.5 1 29 +13 0 5 0 5 1 29 +9 -15.75 0 0 0 1 29 +8 -12 0 0 0 1 29 +28 0 13 1 0 1 29 +6 -7.5 0 0 0 1 29 +10 -16.25 0 0 0 1 29 +12 -7.5 0 0 0 1 29 +5 -5 0 1 -5 1 29 +2 -2 0 1 2 1 29 +22 0 10 0 10 1 29 +2 -1 0 1 2 1 29 +3 0 1 0 1 1 29 +4 -5.5 0 0 0 1 29 +2 -2.25 0 0 0 1 29 +6 -2.25 0 1 6 1 29 +4 -6.5 0 0 0 1 29 +9 -12.38 0 0 0 1 29 +10 -13.75 0 0 0 1 29 +10 -17.5 0 0 0 1 29 +4 -5 0 1 4 1 29 +9 -11.25 0 0 0 1 29 +10 -10 0 0 0 1 29 +2 -3.25 0 1 2 1 29 +5 -8.75 0 0 0 1 29 +5 -10 0 0 0 1 29 +9 -2.25 0 1 -2.25 1 29 +6 -6.75 0 0 0 1 29 +12 -16.5 0 0 0 1 29 +9 -14.63 0 0 0 1 29 +4 -8 0 0 0 1 29 +6 -5.25 0 0 0 1 29 +9 -6.75 0 1 -6.75 1 29 +12 -12 0 1 -12 1 29 +4 -1 0 1 4 1 29 +12 -15 0 0 0 1 29 +4 -3.5 0 0 0 1 29 +2 -1.25 0 0 0 1 29 +30 0 12 0 12 1 29 +12 -19.5 0 0 0 1 29 +12 -3 0 0 0 1 29 +5 -1.25 0 0 0 1 29 +5 -1.88 0 1 -1.88 1 29 +2 -3.5 0 1 -3.5 1 29 +12 -9 0 0 0 1 29 +10 -20 0 0 0 1 29 +8 -4 0 1 -4 1 29 +12 0 5 0 5 1 29 +2 0 1 0 1 1 29 +4 -1.5 0 1 4 1 29 +2 -3.75 0 0 0 1 29 +6 -10.5 0 0 0 1 29 +4 -2 0 0 0 1 29 +23 0 10 0 10 1 29 +12 -18 0 0 0 1 29 +6 -8.25 0 0 0 1 29 +26 0 10 0 10 1 29 +10 -8.75 0 0 0 1 29 +2 -0.75 0 1 2 1 29 +5 -9.38 0 0 0 1 29 +25 0 9 0 9 1 29 +9 -4.5 0 0 0 1 29 +10 -5 0 1 10 1 29 +2 -4 0 0 0 1 29 +2 -2.75 0 1 -2.75 1 29 +4 -6 0 0 0 1 29 +10 -12.5 0 0 0 1 29 +12 -22.5 0 0 0 1 29 +4 -4 0 1 4 1 29 +2 -0.5 0 1 -0.5 1 29 +8 -2 0 1 -2 1 29 +4 -3 0 1 4 1 29 +6 -11.25 0 0 0 1 29 +8 -15 0 0 0 1 29 +8 -11 0 0 0 1 29 +12 -4.5 0 1 -4.5 1 29 +19 0 8 0 8 1 29 +6 -6 0 0 0 1 29 +5 -4.38 0 0 0 1 29 +9 -9 0 0 0 1 29 +6 -1.5 0 1 -1.5 1 29 +9 -13.5 0 1 -13.5 1 29 +9 -5.63 0 0 0 1 29 +8 -8 0 0 0 1 29 +8 0 3 0 3 1 29 +8 -3 0 1 8 1 29 +9 -11.25 0 0 0 1 30 +8 -16 0 0 0 1 30 +9 -5.63 0 1 -5.63 1 30 +9 -4.5 0 1 9 1 30 +2 -2 0 0 0 1 30 +12 -19.5 0 0 0 1 30 +4 -4.5 0 0 0 1 30 +2 -3.75 0 1 -3.75 1 30 +2 -2.25 0 0 0 1 30 +12 -4.5 0 1 -4.5 1 30 +9 -10.13 0 0 0 1 30 +12 -3 0 1 12 1 30 +10 -17.5 0 0 0 1 30 +5 -4.38 0 0 0 1 30 +5 -7.5 0 0 0 1 30 +6 -11.25 0 0 0 1 30 +2 -1.5 0 0 0 1 30 +9 -6.75 0 1 -6.75 1 30 +4 -7 0 0 0 1 30 +8 -7 0 0 0 1 30 +2 -1.75 0 0 0 1 30 +2 -1 0 0 0 1 30 +10 -6.25 0 1 -6.25 1 30 +6 -6.75 0 0 0 1 30 +9 -2.25 0 1 -2.25 1 30 +2 -0.75 0 0 0 1 30 +12 0 4 0 4 1 30 +6 -3 0 1 -3 1 30 +3 0 1 0 1 1 30 +2 -3 0 0 0 1 30 +10 -13.75 0 0 0 1 30 +6 -2.25 0 1 6 1 30 +5 -1.88 0 1 -1.88 1 30 +12 -13.5 0 0 0 1 30 +22 0 10 0 10 1 30 +9 -12.38 0 0 0 1 30 +26 0 10 0 10 1 30 +12 -10.5 0 0 0 1 30 +10 -2.5 0 1 -2.5 1 30 +25 0 10 0 10 1 30 +9 -15.75 0 0 0 1 30 +7 0 3 0 3 1 30 +10 -10 0 0 0 1 30 +12 -15 0 0 0 1 30 +12 0 6 0 6 1 30 +6 -4.5 0 0 0 1 30 +8 -13 0 0 0 1 30 +10 -16.25 0 0 0 1 30 +5 -1.25 0 1 5 1 30 +4 -4 0 0 0 1 30 +5 -3.75 0 0 0 1 30 +6 -8.25 0 0 0 1 30 +8 -15 0 0 0 1 30 +8 -8 0 0 0 1 30 +2 -2.75 0 0 0 1 30 +6 -12 0 0 0 1 30 +2 0 1 0 1 1 30 +2 -1.25 0 0 0 1 30 +9 -18 0 0 0 1 30 +6 -9 0 0 0 1 30 +10 -8.75 0 0 0 1 30 +4 -7.5 0 0 0 1 30 +13 0 6 0 6 1 30 +10 -11.25 0 0 0 1 30 +4 -3 0 0 0 1 30 +10 -5 0 0 0 1 30 +8 -2 0 0 0 1 30 +4 -2.5 0 0 0 1 30 +2 -3.5 0 0 0 1 30 +2 -2.5 0 0 0 1 30 +6 -3.75 0 0 0 1 30 +8 -3 0 0 0 1 30 +2 -3.25 0 0 0 1 30 +8 -9 0 0 0 1 30 +6 -6 0 0 0 1 30 +8 -11 0 0 0 1 30 +5 -8.75 0 0 0 1 30 +6 -9.75 0 0 0 1 30 +12 -24 0 0 0 1 30 +4 -6.5 0 0 0 1 30 +5 -10 0 0 0 1 30 +30 0 12 1 0 1 30 +12 -18 0 0 0 1 30 +9 -9 0 0 0 1 30 +5 -5 0 0 0 1 30 +5 -9.38 0 0 0 1 30 +10 -12.5 0 0 0 1 30 +10 -18.75 0 0 0 1 30 +5 -2.5 0 1 -2.5 1 30 +9 -14.63 0 0 0 1 30 +28 0 13 1 0 1 30 +5 -6.88 0 1 -6.88 1 30 +4 -3.5 0 0 0 1 30 +12 -16.5 0 0 0 1 30 +5 -8.13 0 0 0 1 30 +9 -16.88 0 0 0 1 30 +9 -3.38 0 0 0 1 30 +12 0 5 1 0 1 30 +4 -8 0 0 0 1 30 +8 -12 0 0 0 1 30 +8 -4 0 0 0 1 30 +2 -4 0 0 0 1 30 +12 -9 0 1 -9 1 30 +4 -1.5 0 0 0 1 30 +6 -10.5 0 0 0 1 30 +5 -3.13 0 0 0 1 30 +10 -15 0 0 0 1 30 +23 0 10 0 10 1 30 +12 -7.5 0 0 0 1 30 +2 -0.5 0 0 0 1 30 +4 0 2 0 2 1 30 +6 -1.5 0 0 0 1 30 +4 -1 0 0 0 1 30 +10 -20 0 0 0 1 30 +12 -22.5 0 0 0 1 30 +25 0 9 0 9 1 30 +13 0 5 0 5 1 30 +6 -5.25 0 0 0 1 30 +9 -13.5 0 0 0 1 30 +5 0 2 0 2 1 30 +12 -6 0 0 0 1 30 +5 -6.25 0 0 0 1 30 +10 -3.75 0 0 0 1 30 +9 -7.88 0 0 0 1 30 +8 -6 0 0 0 1 30 +4 -5.5 0 0 0 1 30 +19 0 8 0 8 1 30 +10 -7.5 0 0 0 1 30 +4 -6 0 0 0 1 30 +8 -14 0 0 0 1 30 +8 0 3 0 3 1 30 +12 -21 0 0 0 1 30 +4 -2 0 0 0 1 30 +4 -5 0 0 0 1 30 +6 -7.5 0 0 0 1 30 +12 -12 0 0 0 1 30 +8 -5 0 0 0 1 30 +26 0 12 1 0 1 30 +8 -10 0 0 0 1 30 +5 -5.63 0 0 0 1 30 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/ra_exampleData.txt b/Python/hbayesdm/common/extdata/ra_exampleData.txt new file mode 100644 index 00000000..dd6e3536 --- /dev/null +++ b/Python/hbayesdm/common/extdata/ra_exampleData.txt @@ -0,0 +1,701 @@ +gain loss cert gamble outcome cond subjID +9 -6.75 0 1 -6.75 0 2 +6 -6.75 0 0 0 0 2 +6 -3 0 1 6 0 2 +2 -1.5 0 0 0 0 2 +4 -3 0 0 0 0 2 +5 -6.88 0 0 0 0 2 +12 -9 0 1 12 0 2 +4 -5 0 0 0 0 2 +5 -7.5 0 0 0 0 2 +4 -4 0 1 -4 0 2 +9 -5.63 0 1 -5.63 0 2 +9 -14.63 0 0 0 0 2 +5 -9.38 0 0 0 0 2 +6 -4.5 0 1 6 0 2 +8 -7 0 0 0 0 2 +10 -16.25 0 0 0 0 2 +10 -17.5 0 0 0 0 2 +9 -16.88 0 0 0 0 2 +8 -5 0 1 8 0 2 +6 -1.5 0 1 6 0 2 +12 -18 0 0 0 0 2 +5 -6.25 0 0 0 0 2 +8 -4 0 1 8 0 2 +9 -15.75 0 0 0 0 2 +9 -13.5 0 0 0 0 2 +5 -8.13 0 0 0 0 2 +2 0 1 1 0 0 2 +2 -3.75 0 0 0 0 2 +4 -6.5 0 0 0 0 2 +10 -5 0 1 -5 0 2 +12 -22.5 0 0 0 0 2 +2 -1 0 1 2 0 2 +13 0 6 1 13 0 2 +5 -2.5 0 0 0 0 2 +2 -0.5 0 1 2 0 2 +2 -3.25 0 1 -3.25 0 2 +30 0 12 1 0 0 2 +8 -8 0 1 8 0 2 +4 -5.5 0 0 0 0 2 +23 0 10 1 0 0 2 +4 -3.5 0 0 0 0 2 +5 0 2 1 5 0 2 +8 0 3 1 0 0 2 +9 -10.13 0 0 0 0 2 +8 -16 0 0 0 0 2 +12 -24 0 0 0 0 2 +9 -3.38 0 1 -3.38 0 2 +6 -5.25 0 1 6 0 2 +2 -4 0 0 0 0 2 +4 -1 0 1 -1 0 2 +6 -11.25 0 0 0 0 2 +5 -4.38 0 1 -4.38 0 2 +6 -2.25 0 1 6 0 2 +12 -10.5 0 1 12 0 2 +9 -18 0 0 0 0 2 +10 -20 0 0 0 0 2 +4 -4.5 0 0 0 0 2 +9 -2.25 0 1 -2.25 0 2 +4 -6 0 0 0 0 2 +8 -10 0 1 -10 0 2 +5 -5 0 1 -5 0 2 +5 -8.75 0 0 0 0 2 +8 -6 0 1 -6 0 2 +10 -13.75 0 0 0 0 2 +2 -2.5 0 0 0 0 2 +8 -11 0 1 -11 0 2 +4 -2 0 1 4 0 2 +10 -7.5 0 1 -7.5 0 2 +22 0 10 1 22 0 2 +25 0 10 1 0 0 2 +6 -9.75 0 0 0 0 2 +12 0 5 1 12 0 2 +4 -2.5 0 1 -2.5 0 2 +8 -3 0 1 8 0 2 +10 -11.25 0 1 -11.25 0 2 +5 -10 0 1 5 0 2 +10 -15 0 0 0 0 2 +2 -3.5 0 0 0 0 2 +12 0 4 1 12 0 2 +13 0 5 0 5 0 2 +5 -3.75 0 1 5 0 2 +26 0 12 0 12 0 2 +5 -5.63 0 0 0 0 2 +8 -2 0 1 -2 0 2 +2 -3 0 0 0 0 2 +6 -9 0 0 0 0 2 +9 -7.88 0 0 0 0 2 +8 -14 0 0 0 0 2 +28 0 13 1 28 0 2 +9 -12.38 0 0 0 0 2 +8 -15 0 1 -15 0 2 +10 -2.5 0 1 -2.5 0 2 +4 0 2 1 4 0 2 +12 -6 0 1 -6 0 2 +12 -16.5 0 1 -16.5 0 2 +4 -7.5 0 0 0 0 2 +10 -8.75 0 1 -8.75 0 2 +10 -18.75 0 1 10 0 2 +26 0 10 1 0 0 2 +12 -21 0 1 12 0 2 +2 -0.75 0 1 -0.75 0 2 +9 -9 0 1 -9 0 2 +10 -6.25 0 1 10 0 2 +8 -12 0 1 -12 0 2 +3 0 1 1 0 0 2 +5 -1.88 0 1 5 0 2 +6 -7.5 0 1 -7.5 0 2 +12 -13.5 0 1 12 0 2 +4 -7 0 0 0 0 2 +6 -8.25 0 1 -8.25 0 2 +6 -12 0 0 0 0 2 +6 -10.5 0 0 0 0 2 +4 -8 0 0 0 0 2 +6 -6 0 1 -6 0 2 +12 0 6 1 12 0 2 +12 -19.5 0 1 12 0 2 +19 0 8 1 19 0 2 +12 -15 0 0 0 0 2 +2 -1.75 0 0 0 0 2 +6 -3.75 0 0 0 0 2 +2 -1.25 0 0 0 0 2 +5 -1.25 0 1 -1.25 0 2 +4 -1.5 0 1 4 0 2 +8 -13 0 0 0 0 2 +12 -7.5 0 1 -7.5 0 2 +12 -3 0 1 -3 0 2 +2 -2.75 0 0 0 0 2 +7 0 3 1 7 0 2 +25 0 9 1 25 0 2 +2 -2 0 0 0 0 2 +12 -4.5 0 1 -4.5 0 2 +12 -12 0 1 12 0 2 +5 -3.13 0 1 5 0 2 +9 -11.25 0 0 0 0 2 +8 -9 0 1 -9 0 2 +2 -2.25 0 0 0 0 2 +9 -4.5 0 1 -4.5 0 2 +10 -3.75 0 1 10 0 2 +10 -10 0 1 10 0 2 +10 -12.5 0 0 0 0 2 +2 -2.5 0 0 0 0 3 +5 -5.63 0 0 0 0 3 +6 -7.5 0 0 0 0 3 +26 0 10 1 0 0 3 +9 -4.5 0 0 0 0 3 +2 -1.25 0 0 0 0 3 +8 -3 0 0 0 0 3 +25 0 9 0 9 0 3 +4 -4.5 0 0 0 0 3 +5 -10 0 0 0 0 3 +6 -9 0 0 0 0 3 +10 -6.25 0 0 0 0 3 +4 -4 0 0 0 0 3 +12 -3 0 0 0 0 3 +5 -5 0 0 0 0 3 +12 0 5 0 5 0 3 +6 -9.75 0 0 0 0 3 +19 0 8 0 8 0 3 +4 -7.5 0 0 0 0 3 +12 -9 0 0 0 0 3 +4 -6.5 0 0 0 0 3 +9 -5.63 0 0 0 0 3 +9 -18 0 0 0 0 3 +10 -11.25 0 0 0 0 3 +10 -13.75 0 0 0 0 3 +6 -12 0 0 0 0 3 +10 -12.5 0 0 0 0 3 +4 -7 0 0 0 0 3 +10 -7.5 0 0 0 0 3 +4 -8 0 0 0 0 3 +8 -11 0 0 0 0 3 +12 0 4 1 12 0 3 +9 -3.38 0 0 0 0 3 +10 -18.75 0 0 0 0 3 +2 -3.5 0 0 0 0 3 +2 -1 0 0 0 0 3 +2 -3.25 0 0 0 0 3 +2 0 1 0 1 0 3 +7 0 3 0 3 0 3 +8 0 3 0 3 0 3 +12 -6 0 0 0 0 3 +2 -0.5 0 1 2 0 3 +9 -7.88 0 0 0 0 3 +8 -15 0 0 0 0 3 +2 -1.5 0 0 0 0 3 +12 -22.5 0 0 0 0 3 +8 -7 0 0 0 0 3 +4 -5.5 0 0 0 0 3 +10 -8.75 0 0 0 0 3 +8 -9 0 0 0 0 3 +2 -4 0 0 0 0 3 +4 0 2 1 4 0 3 +8 -8 0 0 0 0 3 +9 -13.5 0 0 0 0 3 +9 -9 0 0 0 0 3 +6 -3.75 0 0 0 0 3 +13 0 6 0 6 0 3 +5 -1.88 0 1 5 0 3 +6 -6 0 0 0 0 3 +5 -6.88 0 0 0 0 3 +8 -16 0 0 0 0 3 +12 -7.5 0 0 0 0 3 +5 -1.25 0 1 -1.25 0 3 +9 -14.63 0 0 0 0 3 +8 -4 0 0 0 0 3 +10 -17.5 0 0 0 0 3 +5 -3.75 0 0 0 0 3 +6 -10.5 0 0 0 0 3 +13 0 5 1 13 0 3 +10 -16.25 0 0 0 0 3 +5 -7.5 0 0 0 0 3 +2 -1.75 0 0 0 0 3 +5 -9.38 0 0 0 0 3 +2 -2.75 0 0 0 0 3 +2 -0.75 0 1 -0.75 0 3 +5 -8.13 0 0 0 0 3 +9 -11.25 0 0 0 0 3 +8 -13 0 0 0 0 3 +9 -16.88 0 0 0 0 3 +2 -2 0 0 0 0 3 +12 -18 0 0 0 0 3 +8 -2 0 1 -2 0 3 +2 -3 0 0 0 0 3 +6 -4.5 0 0 0 0 3 +5 0 2 1 5 0 3 +12 -19.5 0 0 0 0 3 +9 -15.75 0 0 0 0 3 +8 -6 0 0 0 0 3 +10 -2.5 0 1 -2.5 0 3 +9 -6.75 0 0 0 0 3 +6 -6.75 0 0 0 0 3 +2 -3.75 0 0 0 0 3 +10 -5 0 0 0 0 3 +2 -2.25 0 0 0 0 3 +26 0 12 0 12 0 3 +12 -13.5 0 0 0 0 3 +8 -5 0 0 0 0 3 +6 -3 0 0 0 0 3 +10 -3.75 0 0 0 0 3 +12 -10.5 0 0 0 0 3 +4 -5 0 0 0 0 3 +9 -2.25 0 0 0 0 3 +4 -3 0 0 0 0 3 +9 -10.13 0 0 0 0 3 +28 0 13 0 13 0 3 +22 0 10 0 10 0 3 +10 -10 0 0 0 0 3 +4 -1 0 0 0 0 3 +4 -2.5 0 0 0 0 3 +12 -24 0 0 0 0 3 +8 -12 0 0 0 0 3 +3 0 1 1 0 0 3 +9 -12.38 0 0 0 0 3 +23 0 10 0 10 0 3 +4 -3.5 0 0 0 0 3 +4 -1.5 0 0 0 0 3 +8 -10 0 0 0 0 3 +8 -14 0 0 0 0 3 +4 -6 0 0 0 0 3 +25 0 10 0 10 0 3 +12 -16.5 0 0 0 0 3 +12 -12 0 0 0 0 3 +5 -2.5 0 0 0 0 3 +5 -8.75 0 0 0 0 3 +12 -4.5 0 0 0 0 3 +12 -15 0 0 0 0 3 +5 -3.13 0 0 0 0 3 +12 -21 0 0 0 0 3 +5 -4.38 0 0 0 0 3 +6 -11.25 0 0 0 0 3 +30 0 12 0 12 0 3 +6 -1.5 0 1 6 0 3 +12 0 6 0 6 0 3 +4 -2 0 0 0 0 3 +10 -15 0 0 0 0 3 +6 -2.25 0 0 0 0 3 +10 -20 0 0 0 0 3 +6 -5.25 0 0 0 0 3 +5 -6.25 0 0 0 0 3 +6 -8.25 0 0 0 0 3 +4 -4.5 0 1 -4.5 0 4 +10 -12.5 0 0 0 0 4 +26 0 12 1 26 0 4 +6 -7.5 0 1 -7.5 0 4 +4 -6.5 0 0 0 0 4 +12 -4.5 0 1 -4.5 0 4 +5 -2.5 0 1 5 0 4 +6 -12 0 0 0 0 4 +9 -14.63 0 1 9 0 4 +6 -6 0 0 0 0 4 +22 0 10 1 22 0 4 +2 -1 0 1 2 0 4 +8 -3 0 1 8 0 4 +12 -9 0 0 0 0 4 +5 -3.75 0 1 5 0 4 +6 -3 0 1 6 0 4 +4 0 2 0 2 0 4 +28 0 13 1 28 0 4 +12 -15 0 0 0 0 4 +9 -11.25 0 0 0 0 4 +12 -10.5 0 1 12 0 4 +5 -1.88 0 1 5 0 4 +2 -2.75 0 0 0 0 4 +4 -7 0 0 0 0 4 +8 -4 0 1 8 0 4 +2 0 1 1 0 0 4 +2 -3.5 0 0 0 0 4 +2 -1.75 0 1 2 0 4 +5 -5 0 0 0 0 4 +12 -12 0 1 12 0 4 +12 0 6 1 12 0 4 +6 -4.5 0 0 0 0 4 +30 0 12 0 12 0 4 +12 -16.5 0 0 0 0 4 +6 -9.75 0 1 6 0 4 +12 -22.5 0 0 0 0 4 +6 -9 0 1 -9 0 4 +5 -3.13 0 0 0 0 4 +5 -9.38 0 0 0 0 4 +12 -7.5 0 1 -7.5 0 4 +5 0 2 1 5 0 4 +10 -15 0 0 0 0 4 +12 -3 0 1 -3 0 4 +13 0 6 0 6 0 4 +9 -16.88 0 0 0 0 4 +6 -11.25 0 0 0 0 4 +8 -5 0 1 8 0 4 +8 -14 0 0 0 0 4 +12 -24 0 1 -24 0 4 +12 0 5 1 12 0 4 +9 -13.5 0 0 0 0 4 +6 -1.5 0 1 6 0 4 +2 -3 0 0 0 0 4 +10 -2.5 0 1 -2.5 0 4 +2 -0.75 0 0 0 0 4 +6 -10.5 0 0 0 0 4 +2 -0.5 0 1 2 0 4 +10 -10 0 0 0 0 4 +8 -10 0 1 -10 0 4 +9 -12.38 0 0 0 0 4 +4 -6 0 0 0 0 4 +6 -2.25 0 1 6 0 4 +9 -15.75 0 0 0 0 4 +12 -13.5 0 0 0 0 4 +8 -6 0 0 0 0 4 +10 -18.75 0 0 0 0 4 +4 -2 0 0 0 0 4 +5 -1.25 0 1 -1.25 0 4 +6 -5.25 0 0 0 0 4 +4 -8 0 1 4 0 4 +25 0 9 1 25 0 4 +2 -3.25 0 0 0 0 4 +10 -11.25 0 1 -11.25 0 4 +4 -7.5 0 0 0 0 4 +9 -5.63 0 1 -5.63 0 4 +6 -6.75 0 0 0 0 4 +8 -2 0 1 -2 0 4 +5 -6.25 0 0 0 0 4 +23 0 10 0 10 0 4 +8 -13 0 0 0 0 4 +10 -13.75 0 0 0 0 4 +5 -10 0 1 5 0 4 +12 0 4 1 12 0 4 +2 -2.5 0 0 0 0 4 +19 0 8 1 19 0 4 +4 -4 0 0 0 0 4 +4 -1 0 1 -1 0 4 +4 -2.5 0 1 -2.5 0 4 +5 -8.13 0 0 0 0 4 +10 -3.75 0 1 10 0 4 +5 -8.75 0 0 0 0 4 +10 -7.5 0 1 -7.5 0 4 +10 -5 0 1 -5 0 4 +10 -20 0 0 0 0 4 +13 0 5 0 5 0 4 +8 -9 0 0 0 0 4 +8 -12 0 0 0 0 4 +10 -16.25 0 0 0 0 4 +5 -6.88 0 1 5 0 4 +4 -5.5 0 0 0 0 4 +5 -7.5 0 0 0 0 4 +9 -10.13 0 0 0 0 4 +6 -8.25 0 0 0 0 4 +26 0 10 0 10 0 4 +4 -5 0 0 0 0 4 +2 -2.25 0 1 2 0 4 +6 -3.75 0 1 -3.75 0 4 +8 -8 0 1 8 0 4 +9 -6.75 0 0 0 0 4 +8 -15 0 1 -15 0 4 +12 -6 0 1 -6 0 4 +25 0 10 0 10 0 4 +12 -19.5 0 0 0 0 4 +9 -7.88 0 0 0 0 4 +4 -1.5 0 1 4 0 4 +8 -7 0 0 0 0 4 +12 -18 0 1 -18 0 4 +2 -2 0 1 2 0 4 +9 -18 0 0 0 0 4 +2 -1.25 0 0 0 0 4 +8 -16 0 0 0 0 4 +5 -4.38 0 0 0 0 4 +2 -4 0 0 0 0 4 +5 -5.63 0 0 0 0 4 +8 0 3 1 0 0 4 +10 -17.5 0 0 0 0 4 +8 -11 0 0 0 0 4 +2 -1.5 0 1 2 0 4 +4 -3.5 0 0 0 0 4 +2 -3.75 0 0 0 0 4 +3 0 1 1 0 0 4 +12 -21 0 0 0 0 4 +10 -8.75 0 0 0 0 4 +9 -9 0 1 -9 0 4 +4 -3 0 0 0 0 4 +7 0 3 1 7 0 4 +9 -3.38 0 1 -3.38 0 4 +9 -2.25 0 1 -2.25 0 4 +10 -6.25 0 0 0 0 4 +9 -4.5 0 1 -4.5 0 4 +9 -6.75 0 1 -6.75 0 6 +6 -6.75 0 1 -6.75 0 6 +6 -3 0 1 6 0 6 +2 -1.5 0 1 2 0 6 +4 -3 0 1 4 0 6 +5 -6.88 0 0 0 0 6 +12 -9 0 0 0 0 6 +4 -5 0 0 0 0 6 +5 -7.5 0 0 0 0 6 +4 -4 0 1 -4 0 6 +9 -5.63 0 1 -5.63 0 6 +9 -14.63 0 0 0 0 6 +5 -9.38 0 0 0 0 6 +6 -4.5 0 1 6 0 6 +8 -7 0 1 -7 0 6 +10 -16.25 0 0 0 0 6 +10 -17.5 0 0 0 0 6 +9 -16.88 0 0 0 0 6 +8 -5 0 1 8 0 6 +6 -1.5 0 1 6 0 6 +12 -18 0 0 0 0 6 +5 -6.25 0 0 0 0 6 +8 -4 0 1 8 0 6 +9 -15.75 0 0 0 0 6 +9 -13.5 0 0 0 0 6 +5 -8.13 0 0 0 0 6 +2 0 1 1 0 0 6 +2 -3.75 0 1 -3.75 0 6 +4 -6.5 0 0 0 0 6 +10 -5 0 1 -5 0 6 +12 -22.5 0 0 0 0 6 +2 -1 0 1 2 0 6 +13 0 6 0 6 0 6 +5 -2.5 0 1 5 0 6 +2 -0.5 0 1 2 0 6 +2 -3.25 0 0 0 0 6 +30 0 12 1 0 0 6 +8 -8 0 0 0 0 6 +4 -5.5 0 0 0 0 6 +23 0 10 1 0 0 6 +4 -3.5 0 1 4 0 6 +5 0 2 1 5 0 6 +8 0 3 1 0 0 6 +9 -10.13 0 0 0 0 6 +8 -16 0 0 0 0 6 +12 -24 0 0 0 0 6 +9 -3.38 0 1 -3.38 0 6 +6 -5.25 0 0 0 0 6 +2 -4 0 0 0 0 6 +4 -1 0 1 -1 0 6 +6 -11.25 0 0 0 0 6 +5 -4.38 0 1 -4.38 0 6 +6 -2.25 0 1 6 0 6 +12 -10.5 0 0 0 0 6 +9 -18 0 0 0 0 6 +10 -20 0 0 0 0 6 +4 -4.5 0 1 -4.5 0 6 +9 -2.25 0 1 -2.25 0 6 +4 -6 0 0 0 0 6 +8 -10 0 0 0 0 6 +5 -5 0 1 -5 0 6 +5 -8.75 0 0 0 0 6 +8 -6 0 1 -6 0 6 +10 -13.75 0 0 0 0 6 +2 -2.5 0 1 2 0 6 +8 -11 0 0 0 0 6 +4 -2 0 1 4 0 6 +10 -7.5 0 1 -7.5 0 6 +22 0 10 0 10 0 6 +25 0 10 1 0 0 6 +6 -9.75 0 0 0 0 6 +12 0 5 0 5 0 6 +4 -2.5 0 1 -2.5 0 6 +8 -3 0 1 8 0 6 +10 -11.25 0 0 0 0 6 +5 -10 0 0 0 0 6 +10 -15 0 0 0 0 6 +2 -3.5 0 1 -3.5 0 6 +12 0 4 1 12 0 6 +13 0 5 0 5 0 6 +5 -3.75 0 1 5 0 6 +26 0 12 1 26 0 6 +5 -5.63 0 1 -5.63 0 6 +8 -2 0 1 -2 0 6 +2 -3 0 1 -3 0 6 +6 -9 0 0 0 0 6 +9 -7.88 0 1 -7.88 0 6 +8 -14 0 0 0 0 6 +28 0 13 0 13 0 6 +9 -12.38 0 0 0 0 6 +8 -15 0 0 0 0 6 +10 -2.5 0 1 -2.5 0 6 +4 0 2 1 4 0 6 +12 -6 0 1 -6 0 6 +12 -16.5 0 0 0 0 6 +4 -7.5 0 0 0 0 6 +10 -8.75 0 1 -8.75 0 6 +10 -18.75 0 0 0 0 6 +26 0 10 1 0 0 6 +12 -21 0 0 0 0 6 +2 -0.75 0 1 -0.75 0 6 +9 -9 0 1 -9 0 6 +10 -6.25 0 1 10 0 6 +8 -12 0 0 0 0 6 +3 0 1 1 0 0 6 +5 -1.88 0 1 5 0 6 +6 -7.5 0 0 0 0 6 +12 -13.5 0 1 12 0 6 +4 -7 0 0 0 0 6 +6 -8.25 0 0 0 0 6 +6 -12 0 0 0 0 6 +6 -10.5 0 0 0 0 6 +4 -8 0 0 0 0 6 +6 -6 0 1 -6 0 6 +12 0 6 0 6 0 6 +12 -19.5 0 0 0 0 6 +19 0 8 1 19 0 6 +12 -15 0 0 0 0 6 +2 -1.75 0 1 2 0 6 +6 -3.75 0 1 -3.75 0 6 +2 -1.25 0 1 2 0 6 +5 -1.25 0 1 -1.25 0 6 +4 -1.5 0 1 4 0 6 +8 -13 0 0 0 0 6 +12 -7.5 0 1 -7.5 0 6 +12 -3 0 1 -3 0 6 +2 -2.75 0 1 2 0 6 +7 0 3 1 7 0 6 +25 0 9 1 25 0 6 +2 -2 0 1 2 0 6 +12 -4.5 0 1 -4.5 0 6 +12 -12 0 0 0 0 6 +5 -3.13 0 1 5 0 6 +9 -11.25 0 0 0 0 6 +8 -9 0 0 0 0 6 +2 -2.25 0 1 2 0 6 +9 -4.5 0 1 -4.5 0 6 +10 -3.75 0 1 10 0 6 +10 -10 0 0 0 0 6 +10 -12.5 0 0 0 0 6 +2 -2.5 0 1 2 0 7 +5 -5.63 0 0 0 0 7 +6 -7.5 0 0 0 0 7 +26 0 10 1 0 0 7 +9 -4.5 0 1 -4.5 0 7 +2 -1.25 0 1 2 0 7 +8 -3 0 1 8 0 7 +25 0 9 1 25 0 7 +4 -4.5 0 1 -4.5 0 7 +5 -10 0 0 0 0 7 +6 -9 0 0 0 0 7 +10 -6.25 0 0 0 0 7 +4 -4 0 1 -4 0 7 +12 -3 0 1 -3 0 7 +5 -5 0 0 0 0 7 +12 0 5 1 12 0 7 +6 -9.75 0 0 0 0 7 +19 0 8 1 19 0 7 +4 -7.5 0 0 0 0 7 +12 -9 0 0 0 0 7 +4 -6.5 0 0 0 0 7 +9 -5.63 0 1 -5.63 0 7 +9 -18 0 0 0 0 7 +10 -11.25 0 0 0 0 7 +10 -13.75 0 0 0 0 7 +6 -12 0 0 0 0 7 +10 -12.5 0 0 0 0 7 +4 -7 0 0 0 0 7 +10 -7.5 0 0 0 0 7 +4 -8 0 0 0 0 7 +8 -11 0 0 0 0 7 +12 0 4 1 12 0 7 +9 -3.38 0 1 -3.38 0 7 +10 -18.75 0 0 0 0 7 +2 -3.5 0 0 0 0 7 +2 -1 0 1 2 0 7 +2 -3.25 0 0 0 0 7 +2 0 1 1 0 0 7 +7 0 3 1 7 0 7 +8 0 3 1 0 0 7 +12 -6 0 1 -6 0 7 +2 -0.5 0 1 2 0 7 +9 -7.88 0 0 0 0 7 +8 -15 0 0 0 0 7 +2 -1.5 0 1 2 0 7 +12 -22.5 0 0 0 0 7 +8 -7 0 1 -7 0 7 +4 -5.5 0 0 0 0 7 +10 -8.75 0 0 0 0 7 +8 -9 0 0 0 0 7 +2 -4 0 0 0 0 7 +4 0 2 1 4 0 7 +8 -8 0 0 0 0 7 +9 -13.5 0 0 0 0 7 +9 -9 0 0 0 0 7 +6 -3.75 0 1 -3.75 0 7 +13 0 6 0 6 0 7 +5 -1.88 0 1 5 0 7 +6 -6 0 0 0 0 7 +5 -6.88 0 0 0 0 7 +8 -16 0 0 0 0 7 +12 -7.5 0 1 -7.5 0 7 +5 -1.25 0 1 -1.25 0 7 +9 -14.63 0 0 0 0 7 +8 -4 0 1 8 0 7 +10 -17.5 0 0 0 0 7 +5 -3.75 0 1 5 0 7 +6 -10.5 0 0 0 0 7 +13 0 5 1 13 0 7 +10 -16.25 0 0 0 0 7 +5 -7.5 0 0 0 0 7 +2 -1.75 0 1 2 0 7 +5 -9.38 0 0 0 0 7 +2 -2.75 0 0 0 0 7 +2 -0.75 0 1 -0.75 0 7 +5 -8.13 0 0 0 0 7 +9 -11.25 0 0 0 0 7 +8 -13 0 0 0 0 7 +9 -16.88 0 0 0 0 7 +2 -2 0 0 0 0 7 +12 -18 0 0 0 0 7 +8 -2 0 1 -2 0 7 +2 -3 0 0 0 0 7 +6 -4.5 0 1 6 0 7 +5 0 2 1 5 0 7 +12 -19.5 0 0 0 0 7 +9 -15.75 0 0 0 0 7 +8 -6 0 0 0 0 7 +10 -2.5 0 1 -2.5 0 7 +9 -6.75 0 0 0 0 7 +6 -6.75 0 0 0 0 7 +2 -3.75 0 0 0 0 7 +10 -5 0 1 -5 0 7 +2 -2.25 0 0 0 0 7 +26 0 12 1 26 0 7 +12 -13.5 0 0 0 0 7 +8 -5 0 0 0 0 7 +6 -3 0 1 6 0 7 +10 -3.75 0 1 10 0 7 +12 -10.5 0 0 0 0 7 +4 -5 0 0 0 0 7 +9 -2.25 0 1 -2.25 0 7 +4 -3 0 0 0 0 7 +9 -10.13 0 0 0 0 7 +28 0 13 0 13 0 7 +22 0 10 1 22 0 7 +10 -10 0 0 0 0 7 +4 -1 0 1 -1 0 7 +4 -2.5 0 0 0 0 7 +12 -24 0 0 0 0 7 +8 -12 0 0 0 0 7 +3 0 1 1 0 0 7 +9 -12.38 0 0 0 0 7 +23 0 10 1 0 0 7 +4 -3.5 0 0 0 0 7 +4 -1.5 0 1 4 0 7 +8 -10 0 0 0 0 7 +8 -14 0 0 0 0 7 +4 -6 0 0 0 0 7 +25 0 10 1 0 0 7 +12 -16.5 0 0 0 0 7 +12 -12 0 0 0 0 7 +5 -2.5 0 1 5 0 7 +5 -8.75 0 0 0 0 7 +12 -4.5 0 1 -4.5 0 7 +12 -15 0 0 0 0 7 +5 -3.13 0 0 0 0 7 +12 -21 0 1 12 0 7 +5 -4.38 0 0 0 0 7 +6 -11.25 0 0 0 0 7 +30 0 12 1 0 0 7 +6 -1.5 0 1 6 0 7 +12 0 6 1 12 0 7 +4 -2 0 1 4 0 7 +10 -15 0 0 0 0 7 +6 -2.25 0 1 6 0 7 +10 -20 0 0 0 0 7 +6 -5.25 0 0 0 0 7 +5 -6.25 0 0 0 0 7 +6 -8.25 0 0 0 0 7 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/rdt_exampleData.txt b/Python/hbayesdm/common/extdata/rdt_exampleData.txt new file mode 100644 index 00000000..79d99830 --- /dev/null +++ b/Python/hbayesdm/common/extdata/rdt_exampleData.txt @@ -0,0 +1,901 @@ +subjID trial_number gamble_cha RT cert gain loss type_cha trial_payoff outcome happy RT_happy gamble type +1 1 safe 1935 40 0 88 loss 0 -40 0 689 0 -1 +1 2 safe 5581 0 103 198 mixed 0 0 0 689 0 0 +1 3 safe 5871 56 0 116 loss 0 -56 0 689 0 -1 +1 4 safe 3932 0 61 124 mixed 0 0 -1 3353 0 0 +1 5 risky 3838 0 60 48 mixed 0 60 -1 3353 1 0 +1 6 risky 1228 0 304 302 mixed 0 -302 -1 3353 1 0 +1 7 safe 2443 76 0 255 loss 0 -76 -1 1064 0 -1 +1 8 safe 1024 96 197 0 gain 0 96 -1 1064 0 1 +1 9 safe 1107 60 190 0 gain 0 60 1 692 0 1 +1 10 safe 1546 80 254 0 gain 0 80 1 692 0 1 +1 11 safe 3902 37 0 70 loss 0 -37 1 944 0 -1 +1 12 risky 1349 0 158 79 mixed 0 158 1 944 1 0 +1 13 safe 794 116 0 598 loss 0 -116 2 811 0 -1 +1 14 safe 1330 60 0 144 loss 0 -60 2 811 0 -1 +1 15 risky 1210 81 228 0 gain 0 0 0 411 1 1 +1 16 risky 1138 0 303 247 mixed 0 303 0 411 1 0 +1 17 risky 996 81 148 0 gain 0 0 1 600 1 1 +1 18 risky 3145 0 101 50 mixed 0 -50 1 600 1 0 +1 19 risky 138 82 335 0 gain 0 0 1 600 1 1 +1 20 risky 3909 104 0 182 loss 0 0 -1 1103 1 -1 +1 21 safe 1575 0 301 449 mixed 0 0 -1 1103 0 0 +1 22 safe 2616 36 74 0 gain 0 36 1 756 0 1 +1 23 risky 2635 59 0 106 loss 0 0 1 756 1 -1 +1 24 risky 3355 0 102 41 mixed 0 -41 1 756 1 0 +1 25 safe 1038 101 0 419 loss 0 -101 -1 955 0 -1 +1 26 safe 893 83 0 284 loss 0 -83 -1 955 0 -1 +1 27 risky 636 39 197 0 gain 0 197 -1 955 1 1 +1 28 safe 139 100 0 503 loss 0 -100 1 629 0 -1 +1 29 risky 333 103 357 0 gain 0 0 1 629 1 1 +1 30 safe 636 117 220 0 gain 0 117 -1 611 0 1 +1 31 risky 1001 64 0 101 loss 0 0 -1 611 1 -1 +1 32 safe 2614 99 182 0 gain 0 99 1 503 0 1 +1 33 safe 596 97 0 281 loss 0 -97 1 503 0 -1 +1 34 risky 96 77 401 0 gain 0 0 0 589 1 1 +1 35 safe 215 98 0 222 loss 0 -98 0 589 0 -1 +1 36 risky 920 0 58 15 mixed 0 -15 -1 490 1 0 +1 37 safe 537 40 0 143 loss 0 -40 -1 490 0 -1 +1 38 risky 1164 0 223 113 mixed 0 -113 -1 939 1 0 +1 39 risky 3247 124 268 0 gain 0 268 -1 939 1 1 +1 40 risky 42 0 63 16 mixed 0 -16 -1 939 1 0 +1 41 risky 131 96 225 0 gain 0 0 -1 667 1 1 +1 42 risky 920 0 223 149 mixed 0 -149 -1 667 1 0 +1 43 risky 612 0 104 28 mixed 0 -28 -1 667 1 0 +1 44 risky 3925 77 0 133 loss 0 -133 -2 639 1 -1 +1 45 safe 1912 64 0 136 loss 0 -64 -2 639 0 -1 +1 46 safe 162 120 0 433 loss 0 -120 -2 639 0 -1 +1 47 risky 369 104 319 0 gain 0 0 -2 641 1 1 +1 48 risky 1531 43 0 77 loss 0 0 -2 641 1 -1 +1 49 risky 640 0 61 40 mixed 0 -40 -1 772 1 0 +1 50 risky 635 0 160 131 mixed 0 -131 -1 772 1 0 +1 51 risky 41 58 151 0 gain 0 151 -1 772 1 1 +1 52 risky 305 0 304 201 mixed 0 304 -2 684 1 0 +1 53 safe 2651 78 0 178 loss 0 -78 -2 684 0 -1 +1 54 risky 239 0 304 153 mixed 0 -153 0 248 1 0 +1 55 risky 805 116 0 200 loss 0 0 0 248 1 -1 +1 56 risky 222 0 59 34 mixed 0 -34 0 463 1 0 +1 57 safe 338 80 0 336 loss 0 -80 0 463 0 -1 +1 58 risky 248 62 252 0 gain 0 0 0 1483 1 1 +1 59 safe 216 80 0 162 loss 0 -80 0 1483 0 -1 +1 60 risky 538 0 97 19 mixed 0 97 0 1129 1 0 +1 61 safe 351 0 102 120 mixed 0 0 0 1129 0 0 +1 62 risky 2484 119 381 0 gain 0 381 0 262 1 1 +1 63 safe 217 123 0 383 loss 0 -123 0 262 0 -1 +1 64 risky 18 117 298 0 gain 0 298 0 1059 1 1 +1 65 safe 71 0 98 154 mixed 0 0 0 1059 0 0 +1 66 safe 373 83 0 221 loss 0 -83 2 753 0 -1 +1 67 risky 619 62 217 0 gain 0 217 2 753 1 1 +1 68 safe 1612 0 61 58 mixed 0 0 2 753 0 0 +1 69 risky 34 37 84 0 gain 0 0 1 874 1 1 +1 70 safe 134 100 0 252 loss 0 -100 1 874 0 -1 +1 71 safe 714 99 169 0 gain 0 99 1 874 0 1 +1 72 safe 828 0 217 179 mixed 0 0 0 3052 0 0 +1 73 safe 88 0 297 364 mixed 0 0 0 3052 0 0 +1 74 risky 558 41 171 0 gain 0 171 0 3052 1 1 +1 75 risky 364 44 79 0 gain 0 0 0 865 1 1 +1 76 risky 501 123 200 0 gain 0 200 0 865 1 1 +1 77 risky 43 82 181 0 gain 0 181 0 552 1 1 +1 78 risky 120 0 61 24 mixed 0 61 0 552 1 0 +1 79 safe 2328 102 0 203 loss 0 -102 2 583 0 -1 +1 80 risky 263 0 303 92 mixed 0 -92 2 583 1 0 +1 81 safe 358 37 0 199 loss 0 -37 -1 611 0 -1 +1 82 safe 1306 82 0 144 loss 0 -82 -1 611 0 -1 +1 83 safe 101 0 218 438 mixed 0 0 0 824 0 0 +1 84 risky 598 119 430 0 gain 0 430 0 824 1 1 +1 85 risky 541 39 110 0 gain 0 0 0 279 1 1 +1 86 risky 6345 0 103 62 mixed 0 -62 0 279 1 0 +1 87 risky 208 44 143 0 gain 0 0 0 279 1 1 +1 88 safe 142 81 0 398 loss 0 -81 -1 618 0 -1 +1 89 safe 105 63 0 215 loss 0 -63 -1 618 0 -1 +1 90 safe 1436 0 218 263 mixed 0 0 -1 618 0 0 +1 91 safe 1136 43 0 108 loss 0 -43 -1 579 0 -1 +1 92 safe 229 79 0 202 loss 0 -79 -1 579 0 -1 +1 93 safe 595 97 0 318 loss 0 -97 0 1311 0 -1 +1 94 safe 5488 0 158 191 mixed 0 0 0 1311 0 0 +1 95 safe 1038 56 0 302 loss 0 -56 0 1311 0 -1 +1 96 safe 124 103 0 364 loss 0 -103 -1 1420 0 -1 +1 97 safe 788 117 0 293 loss 0 -117 -1 1420 0 -1 +1 98 safe 270 0 301 59 mixed 0 0 -1 1420 0 0 +1 99 risky 348 99 248 0 gain 0 248 0 476 1 1 +1 100 risky 2651 0 163 45 mixed 0 -45 0 476 1 0 +1 101 safe 84 42 0 172 loss 0 -42 -1 1537 0 -1 +1 102 safe 40 0 156 243 mixed 0 0 -1 1537 0 0 +1 103 risky 204 0 157 101 mixed 0 157 -1 1537 1 0 +1 104 risky 24 57 133 0 gain 0 133 0 5156 1 1 +1 105 safe 3897 99 417 0 gain 0 99 0 5156 0 1 +1 106 safe 3165 78 130 0 gain 0 78 0 5156 0 1 +1 107 risky 157 83 202 0 gain 0 0 1 595 1 1 +1 108 risky 628 39 123 0 gain 0 0 1 595 1 1 +1 109 safe 225 0 102 78 mixed 0 0 1 595 0 0 +1 110 safe 512 61 0 252 loss 0 -61 -2 700 0 -1 +1 111 safe 185 118 0 219 loss 0 -118 -2 700 0 -1 +1 112 risky 381 56 303 0 gain 0 303 -2 700 1 1 +1 113 risky 410 83 157 0 gain 0 157 -1 1091 1 1 +1 114 risky 1205 0 303 124 mixed 0 303 -1 1091 1 0 +1 115 risky 2050 98 0 166 loss 0 -166 -1 1091 1 -1 +1 116 risky 171 0 159 61 mixed 0 159 0 258 1 0 +1 117 safe 147 0 162 161 mixed 0 0 0 258 0 0 +1 118 safe 410 0 216 334 mixed 0 0 0 258 0 0 +1 119 safe 372 61 0 185 loss 0 -61 0 2066 0 -1 +1 120 safe 1952 62 103 0 gain 0 62 0 2066 0 1 +1 121 safe 516 117 0 503 loss 0 -117 0 2621 0 -1 +1 122 safe 479 0 296 604 mixed 0 0 0 2621 0 0 +1 123 risky 154 119 335 0 gain 0 0 -1 557 1 1 +1 124 safe 3132 0 64 92 mixed 0 0 -1 557 0 0 +1 125 risky 266 0 224 65 mixed 0 224 -1 557 1 0 +1 126 risky 4022 116 240 0 gain 1 240 0 1164 1 1 +1 127 risky 123 56 166 0 gain 0 0 0 1164 1 1 +1 128 safe 499 39 71 0 gain 0 39 0 1164 0 1 +1 129 risky 611 44 101 0 gain 0 101 -1 1045 1 1 +1 130 safe 267 0 104 97 mixed 0 0 -1 1045 0 0 +1 131 safe 423 39 0 100 loss 0 -39 -1 1045 0 -1 +1 132 risky 129 0 219 40 mixed 0 -40 -1 626 1 0 +1 133 safe 903 122 0 339 loss 0 -122 -1 626 0 -1 +1 134 safe 771 0 58 73 mixed 0 0 -1 442 0 0 +1 135 risky 1178 101 497 0 gain 0 497 -1 442 1 1 +1 136 safe 156 123 0 239 loss 0 -123 -1 442 0 -1 +1 137 risky 50 123 601 0 gain 0 0 1 826 1 1 +1 138 risky 4906 83 291 0 gain 0 291 1 826 1 1 +1 139 risky 11109 0 156 31 mixed 0 -31 1 826 1 0 +1 140 risky 795 121 504 0 gain 0 0 -1 651 1 1 +1 141 risky 715 40 0 68 loss 0 0 -1 651 1 -1 +1 142 safe 449 43 0 126 loss 0 -43 -1 651 0 -1 +1 143 risky 13105 0 222 84 mixed 0 -84 0 5028 1 0 +1 144 safe 188 0 158 318 mixed 0 0 0 5028 0 0 +1 145 risky 2599 61 111 0 gain 0 111 0 5028 1 1 +1 146 risky 546 59 124 0 gain 0 124 0 816 1 1 +1 147 safe 405 0 220 223 mixed 0 0 0 816 0 0 +1 148 safe 787 61 0 172 loss 0 -61 0 816 0 -1 +1 149 safe 742 124 0 263 loss 0 -124 -1 1037 0 -1 +1 150 risky 786 103 280 0 gain 0 280 -1 1037 1 1 +2 1 risky 923 64 113 0 gain 0 113 1 4009 1 1 +2 2 safe 854 44 0 75 loss 0 -44 1 4009 0 -1 +2 3 safe 1204 0 220 440 mixed 0 0 1 4009 0 0 +2 4 risky 207 104 416 0 gain 0 416 1 2004 1 1 +2 5 safe 328 124 340 0 gain 0 124 1 2004 0 1 +2 6 risky 521 0 298 87 mixed 0 298 2 1635 1 0 +2 7 safe 488 83 136 0 gain 0 83 2 1635 0 1 +2 8 safe 49 64 145 0 gain 0 64 2 1635 0 1 +2 9 safe 420 42 0 91 loss 0 -42 3 1663 0 -1 +2 10 risky 668 0 223 149 mixed 0 223 3 1663 1 0 +2 11 safe 124 0 223 216 mixed 0 0 3 1663 0 0 +2 12 safe 304 0 224 183 mixed 0 0 4 2299 0 0 +2 13 safe 64 0 220 332 mixed 0 0 4 2299 0 0 +2 14 safe 551 0 102 41 mixed 0 0 4 2299 0 0 +2 15 risky 253 0 161 128 mixed 0 -128 3 1977 1 0 +2 16 safe 717 82 0 143 loss 0 -82 3 1977 0 -1 +2 17 risky 263 82 397 0 gain 0 0 3 1977 1 1 +2 18 safe 35 0 97 199 mixed 0 0 2 1359 0 0 +2 19 safe 251 124 297 0 gain 0 124 2 1359 0 1 +2 20 safe 339 119 0 244 loss 0 -119 3 1376 0 -1 +2 21 safe 252 100 0 503 loss 0 -100 3 1376 0 -1 +2 22 safe 305 79 253 0 gain 0 79 3 1376 0 1 +2 23 safe 50 57 0 296 loss 0 -57 2 1831 0 -1 +2 24 risky 734 77 201 0 gain 0 0 2 1831 1 1 +2 25 safe 496 122 0 505 loss 0 -122 1 1231 0 -1 +2 26 safe 484 98 0 224 loss 0 -98 1 1231 0 -1 +2 27 risky 813 0 303 364 mixed 0 -364 1 1231 1 0 +2 28 safe 447 0 161 194 mixed 0 0 -1 923 0 0 +2 29 safe 297 98 182 0 gain 0 98 -1 923 0 1 +2 30 safe 438 101 318 0 gain 0 101 -1 923 0 1 +2 31 risky 357 62 96 0 gain 0 0 0 1046 1 1 +2 32 safe 369 0 57 124 mixed 0 0 0 1046 0 0 +2 33 risky 357 117 205 0 gain 0 205 0 585 1 1 +2 34 safe 548 0 62 73 mixed 0 0 0 585 0 0 +2 35 safe 354 44 64 0 gain 0 44 1 800 0 1 +2 36 safe 955 0 156 51 mixed 0 0 1 800 0 0 +2 37 safe 752 116 0 215 loss 0 -116 0 693 0 -1 +2 38 safe 323 40 0 67 loss 0 -40 0 693 0 -1 +2 39 safe 286 79 161 0 gain 0 79 1 1575 0 1 +2 40 safe 563 0 60 92 mixed 0 0 1 1575 0 0 +2 41 safe 380 0 301 118 mixed 0 0 1 1575 0 0 +2 42 safe 374 56 0 147 loss 0 -56 1 1684 0 -1 +2 43 safe 309 99 203 0 gain 0 99 1 1684 0 1 +2 44 safe 518 103 362 0 gain 0 103 2 947 0 1 +2 45 safe 255 0 96 51 mixed 0 0 2 947 0 0 +2 46 safe 305 0 63 64 mixed 0 0 2 947 0 0 +2 47 safe 1508 61 0 193 loss 0 -61 2 1786 0 -1 +2 48 safe 241 123 500 0 gain 0 123 2 1786 0 1 +2 49 safe 131 0 102 64 mixed 0 0 2 6084 0 0 +2 50 safe 219 103 0 417 loss 0 -103 2 6084 0 -1 +2 51 safe 502 0 219 48 mixed 0 0 2 6084 0 0 +2 52 safe 326 0 57 18 mixed 0 0 2 1639 0 0 +2 53 safe 292 59 0 117 loss 0 -59 2 1639 0 -1 +2 54 safe 684 76 333 0 gain 0 76 -1 5341 0 1 +2 55 safe 3649 117 218 0 gain 0 117 -1 5341 0 1 +2 56 safe 792 98 0 167 loss 0 -98 1 1275 0 -1 +2 57 safe 296 0 223 67 mixed 0 0 1 1275 0 0 +2 58 risky 323 121 0 380 loss 0 -380 1 1275 1 -1 +2 59 safe 667 40 0 123 loss 0 -40 -2 2709 0 -1 +2 60 safe 2437 40 78 0 gain 0 40 -2 2709 0 1 +2 61 safe 338 98 0 199 loss 0 -98 -2 2709 0 -1 +2 62 safe 264 100 0 282 loss 0 -100 1 2729 0 -1 +2 63 safe 181 0 162 320 mixed 0 0 1 2729 0 0 +2 64 safe 321 0 300 156 mixed 0 0 1 2728 0 0 +2 65 safe 1519 0 299 194 mixed 0 0 1 2728 0 0 +2 66 safe 452 100 0 312 loss 0 -100 1 2728 0 -1 +2 67 safe 215 57 304 0 gain 0 57 2 3944 0 1 +2 68 safe 224 122 0 262 loss 0 -122 2 3944 0 -1 +2 69 safe 911 43 108 0 gain 0 43 1 8538 0 1 +2 70 safe 263 99 276 0 gain 0 99 1 8538 0 1 +2 71 safe 140 0 220 261 mixed 0 0 1 1240 0 0 +2 72 safe 290 81 0 251 loss 0 -81 1 1240 0 -1 +2 73 safe 262 98 496 0 gain 0 98 0 536 0 1 +2 74 safe 452 0 299 304 mixed 0 0 0 536 0 0 +2 75 safe 3302 61 0 250 loss 0 -61 0 536 0 -1 +2 76 safe 281 40 0 199 loss 0 -40 -5 2275 0 -1 +2 77 safe 413 121 0 435 loss 0 -121 -5 2275 0 -1 +2 78 safe 5118 39 127 0 gain 0 39 -5 2275 0 1 +2 79 safe 95 123 236 0 gain 0 123 -2 2182 0 1 +2 80 safe 274 39 0 167 loss 0 -39 -2 2182 0 -1 +2 81 safe 616 62 213 0 gain 0 62 -2 2182 0 1 +2 82 safe 4258 37 0 111 loss 0 -37 3 4483 0 -1 +2 83 safe 273 118 431 0 gain 0 118 3 4483 0 1 +2 84 safe 5294 0 100 104 mixed 0 0 3 4483 0 0 +2 85 safe 264 0 161 67 mixed 0 0 1 5178 0 0 +2 86 safe 344 121 381 0 gain 0 121 1 5178 0 1 +2 87 safe 311 0 158 34 mixed 0 0 -4 970 0 0 +2 88 safe 263 78 0 179 loss 1 -78 -4 970 0 -1 +2 89 safe 298 84 284 0 gain 0 84 -3 949 0 1 +2 90 risky 39 61 0 165 loss 0 -165 -3 949 1 -1 +2 91 safe 4045 43 95 0 gain 0 43 -3 949 0 1 +2 92 safe 269 0 163 79 mixed 0 0 -3 885 0 0 +2 93 safe 41 44 72 0 gain 0 44 -3 885 0 1 +2 94 safe 3629 62 189 0 gain 0 62 -3 885 0 1 +2 95 safe 924 0 103 148 mixed 0 0 -4 1299 0 0 +2 96 safe 268 81 0 404 loss 0 -81 -4 1299 0 -1 +2 97 safe 348 0 98 86 mixed 0 0 0 1213 0 0 +2 98 safe 286 37 148 0 gain 0 37 0 1213 0 1 +2 99 safe 282 0 156 104 mixed 0 0 0 1213 0 0 +2 100 safe 1223 38 0 140 loss 0 -38 -2 2135 0 -1 +2 101 safe 322 118 0 600 loss 0 -118 -2 2135 0 -1 +2 102 safe 425 0 58 42 mixed 0 0 -2 2135 0 0 +2 103 safe 288 0 96 27 mixed 0 0 -4 1296 0 0 +2 104 safe 267 0 100 20 mixed 0 0 -4 1296 0 0 +2 105 risky 957 121 0 295 loss 0 0 -4 1296 1 -1 +2 106 safe 451 0 96 121 mixed 0 0 0 1416 0 0 +2 107 safe 188 0 61 24 mixed 0 0 0 1416 0 0 +2 108 risky 1784 37 0 81 loss 0 -81 0 1416 1 -1 +2 109 safe 327 0 296 454 mixed 0 0 -1 1369 0 0 +2 110 safe 278 38 0 95 loss 0 -38 -1 1369 0 -1 +2 111 safe 406 104 0 182 loss 0 -104 -1 774 0 -1 +2 112 safe 757 56 137 0 gain 0 56 -1 774 0 1 +2 113 safe 295 0 299 249 mixed 0 0 -1 774 0 0 +2 114 safe 2575 80 224 0 gain 0 80 -1 596 0 1 +2 115 safe 499 83 0 340 loss 0 -83 -1 596 0 -1 +2 116 safe 261 60 250 0 gain 0 60 -1 596 0 1 +2 117 safe 174 60 0 112 loss 0 -60 1 1222 0 -1 +2 118 safe 224 44 197 0 gain 0 44 1 1222 0 1 +2 119 safe 272 83 0 227 loss 0 -83 -1 505 0 -1 +2 120 safe 267 0 299 57 mixed 0 0 -1 505 0 0 +2 121 safe 341 56 116 0 gain 0 56 -1 505 0 1 +2 122 safe 155 119 0 204 loss 0 -119 1 1054 0 -1 +2 123 safe 480 41 86 0 gain 0 41 1 1054 0 1 +2 124 safe 271 39 168 0 gain 0 39 1 1054 0 1 +2 125 safe 250 117 263 0 gain 0 117 -5 993 0 1 +2 126 safe 334 0 223 88 mixed 0 0 -5 993 0 0 +2 127 safe 308 63 171 0 gain 0 63 -4 1144 0 1 +2 128 safe 260 0 220 112 mixed 0 0 -4 1144 0 0 +2 129 safe 245 98 219 0 gain 0 98 -4 1023 0 1 +2 130 safe 322 103 169 0 gain 0 103 -4 1023 0 1 +2 131 safe 314 83 0 290 loss 0 -83 -4 1023 0 -1 +2 132 safe 265 0 62 46 mixed 0 0 -5 1193 0 0 +2 133 safe 454 117 0 337 loss 0 -117 -5 1193 0 -1 +2 134 safe 1290 82 146 0 gain 0 82 0 268 0 1 +2 135 safe 9 59 0 135 loss 0 -59 0 268 0 -1 +2 136 safe 233 83 174 0 gain 0 83 -2 779 0 1 +2 137 safe 314 57 0 98 loss 0 -57 -2 779 0 -1 +2 138 safe 267 99 0 249 loss 0 -99 -2 779 0 -1 +2 139 safe 245 104 252 0 gain 0 104 -3 870 0 1 +2 140 safe 79 58 0 215 loss 0 -58 -3 870 0 -1 +2 141 safe 280 76 0 158 loss 0 -76 -3 809 0 -1 +2 142 safe 255 0 164 157 mixed 0 0 -3 809 0 0 +2 143 safe 550 77 0 196 loss 0 -77 -3 969 0 -1 +2 144 safe 77 79 0 135 loss 0 -79 -3 969 0 -1 +2 145 safe 265 0 303 604 mixed 0 0 -2 1203 0 0 +2 146 safe 292 0 156 244 mixed 0 0 -2 1203 0 0 +2 147 safe 300 100 0 364 loss 0 -100 -5 2008 0 -1 +2 148 safe 369 0 64 12 mixed 0 0 -5 2008 0 0 +2 149 risky 300 0 62 27 mixed 0 -27 -3 967 1 0 +2 150 safe 125 123 603 0 gain 0 123 -3 967 0 1 +3 1 risky 1331 0 161 80 mixed 0 -80 0 2245 1 0 +3 2 risky 791 39 69 0 gain 0 0 0 2245 1 1 +3 3 risky 774 120 266 0 gain 0 0 -1 3256 1 1 +3 4 risky 818 63 169 0 gain 0 0 -1 3256 1 1 +3 5 safe 1108 0 304 89 mixed 0 0 -2 2171 0 0 +3 6 safe 1853 0 303 356 mixed 0 0 -2 2171 0 0 +3 7 safe 672 38 103 0 gain 0 38 -2 2171 0 1 +3 8 risky 1258 124 222 0 gain 0 0 1 1230 1 1 +3 9 safe 1401 0 61 30 mixed 0 0 1 1230 0 0 +3 10 risky 2620 116 0 222 loss 0 -222 1 1230 1 -1 +3 11 risky 4004 76 0 292 loss 0 0 0 2470 1 -1 +3 12 safe 2848 96 0 245 loss 0 -96 0 2470 0 -1 +3 13 safe 496 116 198 0 gain 0 116 0 1497 0 1 +3 14 risky 3183 103 0 496 loss 0 0 0 1497 1 -1 +3 15 safe 1640 123 0 300 loss 0 -123 0 1497 0 -1 +3 16 risky 2010 36 201 0 gain 0 0 1 966 1 1 +3 17 safe 1860 79 159 0 gain 0 79 1 966 0 1 +3 18 safe 731 41 170 0 gain 0 41 1 966 0 1 +3 19 safe 3531 118 0 338 loss 0 -118 2 1359 0 -1 +3 20 safe 1121 0 158 68 mixed 0 0 2 1359 0 0 +3 21 risky 642 62 0 101 loss 0 -101 2 1359 1 -1 +3 22 safe 892 64 0 130 loss 0 -64 2 1610 0 -1 +3 23 safe 2057 102 315 0 gain 0 102 2 1610 0 1 +3 24 safe 369 96 172 0 gain 0 96 2 2510 0 1 +3 25 safe 675 37 0 113 loss 0 -37 2 2510 0 -1 +3 26 risky 1180 0 102 17 mixed 0 -17 2 2510 1 0 +3 27 safe 484 0 99 62 mixed 0 0 2 1207 0 0 +3 28 safe 1374 36 0 143 loss 0 -36 2 1207 0 -1 +3 29 safe 243 42 0 167 loss 0 -42 2 1207 0 -1 +3 30 risky 5007 0 102 55 mixed 0 102 1 1043 1 0 +3 31 safe 382 43 0 197 loss 0 -43 1 1043 0 -1 +3 32 safe 1432 118 501 0 gain 0 118 1 1043 0 1 +3 33 safe 3694 60 0 149 loss 0 -60 2 1278 0 -1 +3 34 risky 930 0 160 102 mixed 0 -102 2 1278 1 0 +3 35 safe 2289 81 221 0 gain 0 81 1 1652 0 1 +3 36 safe 1958 61 218 0 gain 0 61 1 1652 0 1 +3 37 safe 2900 0 58 88 mixed 0 0 2 3032 0 0 +3 38 safe 772 122 0 506 loss 0 -122 2 3032 0 -1 +3 39 safe 560 0 223 224 mixed 0 0 2 938 0 0 +3 40 safe 691 0 300 602 mixed 0 0 2 938 0 0 +3 41 safe 843 77 142 0 gain 0 77 2 938 0 1 +3 42 risky 2174 0 300 243 mixed 0 300 2 1234 1 0 +3 43 safe 2380 44 110 0 gain 0 44 2 1234 0 1 +3 44 safe 887 83 253 0 gain 0 83 2 931 0 1 +3 45 safe 329 119 433 0 gain 0 119 2 931 0 1 +3 46 safe 2179 0 297 157 mixed 0 0 2 931 0 0 +3 47 risky 2493 59 111 0 gain 0 0 3 1445 1 1 +3 48 safe 250 0 221 443 mixed 0 0 3 1445 0 0 +3 49 safe 909 37 0 85 loss 0 -37 3 1445 0 -1 +3 50 risky 2574 57 0 117 loss 0 -117 2 1088 1 -1 +3 51 risky 2379 0 102 34 mixed 0 -34 2 1088 1 0 +3 52 risky 902 0 156 29 mixed 0 156 1 839 1 0 +3 53 safe 2553 63 0 166 loss 0 -63 1 839 0 -1 +3 54 risky 2147 100 0 178 loss 0 -178 1 839 1 -1 +3 55 safe 816 84 0 141 loss 0 -84 0 1472 0 -1 +3 56 safe 557 0 99 198 mixed 0 0 0 1472 0 0 +3 57 safe 703 120 0 244 loss 0 -120 0 1472 0 -1 +3 58 safe 404 78 0 401 loss 0 -78 0 1263 0 -1 +3 59 safe 2504 0 220 111 mixed 0 0 0 1263 0 0 +3 60 safe 2346 116 298 0 gain 0 116 -1 2409 0 1 +3 61 safe 2756 63 249 0 gain 0 63 -1 2409 0 1 +3 62 safe 1211 0 220 178 mixed 0 0 -1 2409 0 0 +3 63 safe 1053 0 304 297 mixed 0 0 1 852 0 0 +3 64 risky 1740 116 0 201 loss 0 0 1 852 1 -1 +3 65 safe 1039 83 338 0 gain 0 83 2 1081 0 1 +3 66 safe 522 82 0 197 loss 0 -82 2 1081 0 -1 +3 67 safe 1987 38 123 0 gain 0 38 2 1081 0 1 +3 68 safe 475 61 0 108 loss 0 -61 2 1125 0 -1 +3 69 safe 198 43 0 128 loss 0 -43 2 1125 0 -1 +3 70 safe 4435 0 104 97 mixed 0 0 1 876 0 0 +3 71 safe 477 122 0 429 loss 0 -122 1 876 0 -1 +3 72 safe 2442 0 157 188 mixed 0 0 1 876 0 0 +3 73 risky 2927 38 91 0 gain 0 0 1 8932 1 1 +3 74 safe 641 41 0 68 loss 0 -41 1 8932 0 -1 +3 75 safe 7035 0 101 150 mixed 0 0 2 1656 0 0 +3 76 risky 4390 104 0 165 loss 0 0 2 1656 1 -1 +3 77 risky 3113 0 57 9 mixed 0 -9 3 1419 1 0 +3 78 risky 839 0 223 45 mixed 0 -45 3 1419 1 0 +3 79 safe 576 62 134 0 gain 0 62 3 1419 0 1 +3 80 safe 774 100 0 422 loss 0 -100 3 1130 0 -1 +3 81 safe 1030 57 0 190 loss 0 -57 3 1130 0 -1 +3 82 safe 713 0 58 76 mixed 0 0 2 2386 0 0 +3 83 safe 1322 0 62 63 mixed 0 0 2 2386 0 0 +3 84 risky 2147 98 501 0 gain 0 501 3 1623 1 1 +3 85 safe 1100 120 0 376 loss 0 -120 3 1623 0 -1 +3 86 safe 417 79 0 336 loss 0 -79 3 1445 0 -1 +3 87 safe 851 0 58 23 mixed 0 0 3 1445 0 0 +3 88 safe 168 0 61 124 mixed 0 0 3 1445 0 0 +3 89 safe 93 119 336 0 gain 0 119 3 1825 0 1 +3 90 safe 993 0 156 158 mixed 0 0 3 1825 0 0 +3 91 safe 299 101 178 0 gain 0 101 3 1825 0 1 +3 92 safe 1636 0 304 123 mixed 0 0 4 5059 0 0 +3 93 risky 3777 0 62 21 mixed 0 62 4 5059 1 0 +3 94 safe 525 0 299 447 mixed 0 0 3 1872 0 0 +3 95 safe 588 102 222 0 gain 0 102 3 1872 0 1 +3 96 safe 551 0 217 87 mixed 0 0 2 1089 0 0 +3 97 safe 630 42 68 0 gain 0 42 2 1089 0 1 +3 98 risky 1675 0 96 120 mixed 0 -120 3 1358 1 0 +3 99 safe 133 83 0 137 loss 0 -83 3 1358 0 -1 +3 100 safe 210 118 598 0 gain 0 118 3 1358 0 1 +3 101 safe 355 100 0 219 loss 0 -100 3 1032 0 -1 +3 102 safe 656 63 100 0 gain 0 63 3 1032 0 1 +3 103 safe 210 0 164 324 mixed 0 0 3 1032 0 0 +3 104 safe 4184 83 0 174 loss 0 -83 2 4160 0 -1 +3 105 risky 1178 56 297 0 gain 0 297 2 4160 1 1 +3 106 risky 2517 36 143 0 gain 0 143 3 2632 1 1 +3 107 safe 1485 101 283 0 gain 0 101 3 2632 0 1 +3 108 safe 388 41 0 66 loss 0 -41 3 2632 0 -1 +3 109 safe 1077 0 303 197 mixed 0 0 3 3534 0 0 +3 110 safe 330 41 0 98 loss 0 -41 3 3534 0 -1 +3 111 risky 975 59 191 0 gain 0 0 2 956 1 1 +3 112 risky 568 56 120 0 gain 0 0 2 956 1 1 +3 113 safe 397 58 0 248 loss 0 -58 2 956 0 -1 +3 114 risky 645 0 304 59 mixed 0 304 1 804 1 0 +3 115 safe 940 0 99 38 mixed 0 0 1 804 0 0 +3 116 risky 2733 102 0 281 loss 0 -281 2 1208 1 -1 +3 117 safe 1662 104 0 357 loss 0 -104 2 1208 0 -1 +3 118 risky 2275 83 197 0 gain 0 197 2 1208 1 1 +3 119 safe 846 0 58 38 mixed 0 0 2 1291 0 0 +3 120 risky 1609 81 179 0 gain 0 179 2 1291 1 1 +3 121 safe 163 103 0 316 loss 0 -103 2 1291 0 -1 +3 122 risky 787 98 416 0 gain 0 416 3 1814 1 1 +3 123 safe 237 102 0 202 loss 0 -102 3 1814 0 -1 +3 124 safe 1772 0 161 131 mixed 0 0 3 1104 0 0 +3 125 safe 303 102 248 0 gain 0 102 3 1104 0 1 +3 126 safe 654 124 0 600 loss 0 -124 3 1195 0 -1 +3 127 safe 842 81 0 156 loss 0 -81 3 1195 0 -1 +3 128 risky 1931 0 217 70 mixed 0 217 3 2163 1 0 +3 129 safe 450 0 57 52 mixed 0 0 3 2163 0 0 +3 130 risky 1135 0 159 50 mixed 0 -50 3 2163 1 0 +3 131 risky 522 79 396 0 gain 0 396 2 1997 1 1 +3 132 safe 678 42 78 0 gain 0 42 2 1997 0 1 +3 133 safe 296 0 158 236 mixed 0 0 2 1997 0 0 +3 134 risky 560 98 364 0 gain 0 364 2 1955 1 1 +3 135 safe 617 118 236 0 gain 0 118 2 1955 0 1 +3 136 safe 618 81 0 227 loss 0 -81 2 1955 0 -1 +3 137 safe 1294 0 219 327 mixed 0 0 2 1935 0 0 +3 138 safe 407 61 0 304 loss 0 -61 2 1935 0 -1 +3 139 safe 2348 120 378 0 gain 0 120 2 1630 0 1 +3 140 safe 527 76 284 0 gain 0 76 2 1630 0 1 +3 141 safe 1267 76 137 0 gain 0 76 2 1588 0 1 +3 142 safe 19 36 0 83 loss 0 -36 2 1588 0 -1 +3 143 safe 168 82 0 256 loss 0 -82 2 1588 0 -1 +3 144 safe 540 0 99 79 mixed 0 0 3 1509 0 0 +3 145 safe 601 59 149 0 gain 0 59 3 1509 0 1 +3 146 safe 27 0 216 266 mixed 0 0 3 1509 0 0 +3 147 safe 863 61 0 217 loss 0 -61 2 1697 0 -1 +3 148 safe 326 0 218 146 mixed 0 0 2 1697 0 0 +3 149 risky 2092 101 196 0 gain 0 0 2 1563 1 1 +3 150 safe 451 124 0 265 loss 1 -124 2 1563 0 -1 +4 1 risky 1858 103 0 200 loss 0 0 0 750 1 -1 +4 2 risky 579 101 498 0 gain 0 498 0 750 1 1 +4 3 safe 898 0 100 103 mixed 0 0 0 750 0 0 +4 4 safe 903 99 0 363 loss 0 -99 2 3404 0 -1 +4 5 risky 444 57 300 0 gain 0 300 2 3404 1 1 +4 6 risky 207 79 337 0 gain 0 0 2 2347 1 1 +4 7 safe 584 36 0 82 loss 0 -36 2 2347 0 -1 +4 8 risky 261 42 99 0 gain 0 0 0 1647 1 1 +4 9 risky 77 0 220 326 mixed 0 -326 0 1647 1 0 +4 10 safe 1259 122 0 435 loss 0 -122 0 1647 0 -1 +4 11 safe 848 77 0 143 loss 0 -77 -2 1605 0 -1 +4 12 risky 93 0 303 599 mixed 0 -599 -2 1605 1 0 +4 13 safe 460 38 0 128 loss 0 -38 -2 911 0 -1 +4 14 risky 21 0 299 244 mixed 0 -244 -2 911 1 0 +4 15 risky 865 116 0 500 loss 0 -500 -2 911 1 -1 +4 16 risky 253 120 265 0 gain 0 0 0 1210 1 1 +4 17 risky 837 39 87 0 gain 0 87 0 1210 1 1 +4 18 risky 1292 56 0 215 loss 0 -215 0 1210 1 -1 +4 19 risky 904 124 0 198 loss 0 0 -3 1807 1 -1 +4 20 risky 769 124 0 294 loss 0 0 -3 1807 1 -1 +4 21 risky 1280 116 0 376 loss 0 0 0 606 1 -1 +4 22 risky 1474 64 0 102 loss 0 -102 0 606 1 -1 +4 23 risky 91 0 161 49 mixed 0 161 0 3006 1 0 +4 24 risky 558 119 430 0 gain 0 0 0 3006 1 1 +4 25 risky 393 122 377 0 gain 0 377 0 554 1 1 +4 26 risky 426 96 169 0 gain 0 169 0 554 1 1 +4 27 risky 171 98 247 0 gain 0 0 0 554 1 1 +4 28 risky 890 62 99 0 gain 0 99 1 1038 1 1 +4 29 risky 611 82 227 0 gain 0 0 1 1038 1 1 +4 30 risky 464 0 222 178 mixed 0 222 1 1038 1 0 +4 31 risky 1210 0 103 16 mixed 0 103 1 1159 1 0 +4 32 risky 41 0 300 124 mixed 0 300 1 1159 1 0 +4 33 risky 474 0 300 300 mixed 0 -300 1 1159 1 0 +4 34 safe 438 0 63 58 mixed 0 0 -1 617 0 0 +4 35 risky 437 64 0 252 loss 0 0 -1 617 1 -1 +4 36 risky 507 0 222 260 mixed 0 -260 0 315 1 0 +4 37 risky 1028 63 0 121 loss 0 0 0 315 1 -1 +4 38 risky 420 77 195 0 gain 0 0 0 315 1 1 +4 39 risky 944 0 61 71 mixed 0 61 0 1350 1 0 +4 40 risky 623 39 0 90 loss 0 0 0 1350 1 -1 +4 41 risky 320 79 400 0 gain 0 400 0 469 1 1 +4 42 risky 477 63 187 0 gain 0 187 0 469 1 1 +4 43 risky 605 96 182 0 gain 0 182 0 469 1 1 +4 44 risky 1141 96 0 277 loss 0 -277 2 1110 1 -1 +4 45 risky 452 120 240 0 gain 0 240 2 1110 1 1 +4 46 risky 694 0 60 17 mixed 0 -17 2 1110 1 0 +4 47 risky 861 44 0 68 loss 0 0 0 815 1 -1 +4 48 risky 52 0 156 30 mixed 0 156 0 815 1 0 +4 49 risky 429 0 157 133 mixed 0 -133 0 815 1 0 +4 50 risky 579 0 61 15 mixed 0 61 0 1373 1 0 +4 51 risky 533 120 508 0 gain 0 508 0 1373 1 1 +4 52 risky 112 101 277 0 gain 0 277 0 1373 1 1 +4 53 risky 642 123 596 0 gain 0 0 2 907 1 1 +4 54 safe 707 0 164 237 mixed 0 0 2 907 0 0 +4 55 risky 1610 0 98 80 mixed 0 98 0 497 1 0 +4 56 safe 1042 63 0 152 loss 0 -63 0 497 0 -1 +4 57 safe 1029 79 0 201 loss 0 -79 0 497 0 -1 +4 58 risky 385 0 162 82 mixed 0 -82 0 424 1 0 +4 59 risky 399 38 201 0 gain 0 201 0 424 1 1 +4 60 risky 549 0 301 358 mixed 0 301 0 976 1 0 +4 61 risky 453 79 251 0 gain 0 251 0 976 1 1 +4 62 risky 662 56 111 0 gain 0 111 2 894 1 1 +4 63 risky 613 103 360 0 gain 0 0 2 894 1 1 +4 64 safe 891 36 0 172 loss 0 -36 2 894 0 -1 +4 65 risky 1229 76 0 179 loss 0 -179 0 1002 1 -1 +4 66 safe 1915 0 98 149 mixed 0 0 0 1002 0 0 +4 67 risky 928 123 0 243 loss 0 0 0 1002 1 -1 +4 68 risky 883 0 159 158 mixed 0 159 1 571 1 0 +4 69 risky 411 37 127 0 gain 0 0 1 571 1 1 +4 70 risky 1488 83 0 249 loss 0 0 0 1771 1 -1 +4 71 safe 436 37 0 203 loss 0 -37 0 1771 0 -1 +4 72 risky 630 0 156 106 mixed 0 -106 0 1771 1 0 +4 73 risky 497 0 223 144 mixed 0 -144 0 593 1 0 +4 74 risky 477 0 221 117 mixed 0 221 0 593 1 0 +4 75 risky 447 81 173 0 gain 0 0 0 593 1 1 +4 76 risky 491 124 299 0 gain 0 0 0 448 1 1 +4 77 risky 3490 0 101 67 mixed 0 -67 0 448 1 0 +4 78 risky 603 0 57 34 mixed 0 -34 0 808 1 0 +4 79 risky 1740 96 0 245 loss 0 -245 0 808 1 -1 +4 80 risky 60 0 62 117 mixed 0 62 -1 2861 1 0 +4 81 risky 503 60 213 0 gain 1 0 -1 2861 1 1 +4 82 risky 184 41 108 0 gain 0 0 -1 1771 1 1 +4 83 safe 1134 57 0 105 loss 0 -57 -1 1771 0 -1 +4 84 risky 1143 39 0 74 loss 0 -74 -1 1771 1 -1 +4 85 safe 776 84 0 333 loss 0 -84 -2 650 0 -1 +4 86 risky 441 57 256 0 gain 0 0 -2 650 1 1 +4 87 risky 2638 77 131 0 gain 0 0 -2 2262 1 1 +4 88 risky 376 0 104 56 mixed 0 -56 -2 2262 1 0 +4 89 risky 1022 97 0 315 loss 0 -315 -3 827 1 -1 +4 90 safe 261 99 0 219 loss 0 -99 -3 827 0 -1 +4 91 risky 847 98 0 184 loss 0 0 -2 938 1 -1 +4 92 risky 1344 57 133 0 gain 0 133 -2 938 1 1 +4 93 risky 409 0 101 42 mixed 0 -42 -2 938 1 0 +4 94 risky 425 82 156 0 gain 0 0 0 1091 1 1 +4 95 risky 499 0 222 441 mixed 0 222 0 1091 1 0 +4 96 safe 951 76 147 0 gain 0 76 0 1091 0 1 +4 97 risky 82 0 300 92 mixed 0 300 1 1894 1 0 +4 98 risky 460 58 149 0 gain 0 0 1 1894 1 1 +4 99 safe 91 0 104 197 mixed 0 0 1 1894 0 0 +4 100 risky 876 56 165 0 gain 0 0 -1 1129 1 1 +4 101 risky 1097 43 168 0 gain 0 168 -1 1129 1 1 +4 102 safe 860 58 0 186 loss 0 -58 1 1686 0 -1 +4 103 safe 1226 81 0 286 loss 0 -81 1 1686 0 -1 +4 104 risky 1043 0 63 41 mixed 0 -41 0 1775 1 0 +4 105 risky 543 0 218 45 mixed 0 218 0 1775 1 0 +4 106 risky 414 0 217 65 mixed 0 -65 0 1205 1 0 +4 107 risky 408 42 67 0 gain 0 0 0 1205 1 1 +4 108 safe 412 63 0 171 loss 0 -63 0 1205 0 -1 +4 109 risky 428 0 163 191 mixed 0 163 -1 742 1 0 +4 110 risky 528 0 302 160 mixed 0 302 -1 742 1 0 +4 111 risky 368 103 197 0 gain 0 197 -1 742 1 1 +4 112 safe 565 117 0 601 loss 0 -117 3 912 0 -1 +4 113 risky 435 0 163 66 mixed 0 -66 3 912 1 0 +4 114 risky 1003 0 299 59 mixed 0 -59 -1 695 1 0 +4 115 risky 595 99 423 0 gain 0 423 -1 695 1 1 +4 116 risky 324 0 221 88 mixed 0 221 1 1538 1 0 +4 117 risky 1551 123 0 264 loss 0 0 1 1538 1 -1 +4 118 safe 1445 43 0 98 loss 0 -43 2 661 0 -1 +4 119 risky 364 102 226 0 gain 0 226 2 661 1 1 +4 120 risky 1307 80 0 134 loss 0 -134 1 1858 1 -1 +4 121 risky 394 0 302 447 mixed 0 -447 1 1858 1 0 +4 122 risky 519 0 299 196 mixed 0 299 -2 1036 1 0 +4 123 risky 1050 0 57 26 mixed 0 57 -2 1036 1 0 +4 124 safe 624 80 0 221 loss 0 -80 -2 1036 0 -1 +4 125 risky 720 118 220 0 gain 0 0 0 725 1 1 +4 126 risky 449 118 340 0 gain 0 340 0 725 1 1 +4 127 safe 669 0 96 121 mixed 0 0 0 725 0 0 +4 128 safe 337 37 0 116 loss 0 -37 0 490 0 -1 +4 129 risky 1343 0 60 86 mixed 0 60 0 490 1 0 +4 130 safe 484 80 0 397 loss 0 -80 0 533 0 -1 +4 131 risky 335 0 217 224 mixed 0 -224 0 533 1 0 +4 132 safe 368 56 0 302 loss 0 -56 0 327 0 -1 +4 133 safe 331 77 0 160 loss 0 -77 0 327 0 -1 +4 134 risky 24 43 84 0 gain 0 84 0 327 1 1 +4 135 safe 389 97 0 422 loss 0 -97 0 327 0 -1 +4 136 safe 781 60 0 131 loss 0 -60 0 327 0 -1 +4 137 safe 250 121 0 220 loss 0 -121 0 327 0 -1 +4 138 risky 353 58 122 0 gain 0 0 0 480 1 1 +4 139 risky 447 99 0 170 loss 0 -170 0 480 1 -1 +4 140 risky 266 123 198 0 gain 0 0 0 480 1 1 +4 141 safe 643 38 0 145 loss 0 -38 0 527 0 -1 +4 142 risky 321 79 289 0 gain 0 0 0 527 1 1 +4 143 risky 370 40 75 0 gain 0 0 0 527 1 1 +4 144 risky 347 0 62 53 mixed 0 62 -2 2527 1 0 +4 145 risky 468 103 313 0 gain 0 0 -2 2527 1 1 +4 146 risky 743 0 162 323 mixed 0 -323 0 1309 1 0 +4 147 risky 2941 40 141 0 gain 0 141 0 1309 1 1 +4 148 risky 1030 0 103 27 mixed 0 103 0 446 1 0 +4 149 safe 611 99 0 503 loss 0 -99 0 446 0 -1 +4 150 safe 1960 122 0 334 loss 0 -122 0 446 0 -1 +5 1 risky 1413 103 0 501 loss 0 0 0 2372 1 -1 +5 2 safe 288 41 0 85 loss 0 -41 0 2372 0 -1 +5 3 safe 915 79 0 291 loss 0 -79 0 2372 0 -1 +5 4 risky 990 80 145 0 gain 0 145 0 1917 1 1 +5 5 risky 704 0 221 224 mixed 0 -224 0 1917 1 0 +5 6 risky 141 0 303 596 mixed 0 -596 -1 1218 1 0 +5 7 risky 434 0 157 44 mixed 0 -44 -1 1218 1 0 +5 8 safe 734 36 86 0 gain 0 36 -1 1218 0 1 +5 9 risky 750 84 0 141 loss 0 0 -1 1715 1 -1 +5 10 safe 844 63 169 0 gain 0 63 -1 1715 0 1 +5 11 risky 435 0 220 149 mixed 0 220 -1 1715 1 0 +5 12 risky 640 0 303 200 mixed 0 -200 1 1348 1 0 +5 13 risky 1055 0 161 31 mixed 0 161 1 1348 1 0 +5 14 risky 539 84 226 0 gain 0 0 1 1814 1 1 +5 15 risky 149 0 100 48 mixed 0 -48 1 1814 1 0 +5 16 safe 772 0 158 159 mixed 0 0 1 1814 0 0 +5 17 risky 890 123 0 238 loss 0 0 0 1531 1 -1 +5 18 risky 636 0 301 244 mixed 0 301 0 1531 1 0 +5 19 risky 1118 0 164 190 mixed 0 164 0 1531 1 0 +5 20 risky 220 0 104 41 mixed 0 104 2 1240 1 0 +5 21 safe 849 0 63 70 mixed 0 0 2 1240 0 0 +5 22 risky 885 60 0 171 loss 0 0 1 1120 1 -1 +5 23 risky 222 38 200 0 gain 0 0 1 1120 1 1 +5 24 risky 590 0 220 41 mixed 0 -41 1 1120 1 0 +5 25 risky 816 76 177 0 gain 0 0 0 2236 1 1 +5 26 safe 579 0 223 182 mixed 0 0 0 2236 0 0 +5 27 risky 656 84 287 0 gain 0 287 0 1454 1 1 +5 28 safe 864 97 0 361 loss 0 -97 0 1454 0 -1 +5 29 risky 970 37 0 76 loss 0 0 -1 1625 1 -1 +5 30 risky 1232 123 0 221 loss 0 0 -1 1625 1 -1 +5 31 safe 821 38 0 164 loss 0 -38 3 1293 0 -1 +5 32 safe 938 39 0 203 loss 0 -39 3 1293 0 -1 +5 33 safe 789 123 0 503 loss 0 -123 3 1293 0 -1 +5 34 risky 931 0 60 88 mixed 0 60 -1 1131 1 0 +5 35 risky 430 60 118 0 gain 0 0 -1 1131 1 1 +5 36 safe 1011 116 0 340 loss 0 -116 -1 1131 0 -1 +5 37 safe 895 79 0 173 loss 0 -79 1 1222 0 -1 +5 38 risky 1029 101 0 314 loss 0 -314 1 1222 1 -1 +5 39 risky 790 0 224 110 mixed 0 224 1 1222 1 0 +5 40 risky 1217 60 0 120 loss 0 -120 -1 1166 1 -1 +5 41 risky 676 0 218 261 mixed 0 218 -1 1166 1 0 +5 42 risky 725 43 71 0 gain 0 71 -1 1166 1 1 +5 43 safe 839 98 0 284 loss 0 -98 2 1245 0 -1 +5 44 risky 1430 58 0 187 loss 0 0 2 1245 1 -1 +5 45 risky 786 98 423 0 gain 0 0 2 1245 1 1 +5 46 safe 997 0 156 130 mixed 0 0 1 2042 0 0 +5 47 risky 601 43 165 0 gain 0 0 1 2042 1 1 +5 48 risky 18 59 215 0 gain 0 215 1 2042 1 1 +5 49 risky 1843 120 0 301 loss 0 0 3 2216 1 -1 +5 50 safe 976 0 63 42 mixed 0 0 3 2216 0 0 +5 51 risky 742 42 116 0 gain 0 0 0 1216 1 1 +5 52 safe 1086 39 0 112 loss 0 -39 0 1216 0 -1 +5 53 risky 942 43 0 64 loss 0 -64 0 1216 1 -1 +5 54 risky 1535 77 160 0 gain 0 160 -1 573 1 1 +5 55 risky 1100 37 128 0 gain 0 128 -1 573 1 1 +5 56 risky 1199 121 265 0 gain 0 265 3 1015 1 1 +5 57 risky 800 123 205 0 gain 0 0 3 1015 1 1 +5 58 safe 1077 0 162 322 mixed 0 0 1 1827 0 0 +5 59 risky 705 116 335 0 gain 0 335 1 1827 1 1 +5 60 risky 817 58 252 0 gain 0 252 1 1827 1 1 +5 61 safe 1068 0 98 62 mixed 0 0 3 1060 0 0 +5 62 safe 1034 117 0 428 loss 0 -117 3 1060 0 -1 +5 63 risky 1105 0 103 151 mixed 0 -151 3 1060 1 0 +5 64 risky 1023 124 293 0 gain 0 293 -2 707 1 1 +5 65 safe 857 0 60 61 mixed 0 0 -2 707 0 0 +5 66 safe 1198 0 300 298 mixed 0 0 0 1657 0 0 +5 67 risky 917 100 497 0 gain 0 497 0 1657 1 1 +5 68 safe 839 63 0 301 loss 0 -63 0 1657 0 -1 +5 69 safe 1020 119 0 379 loss 0 -119 0 662 0 -1 +5 70 risky 830 78 335 0 gain 0 335 0 662 1 1 +5 71 risky 1145 0 164 83 mixed 0 164 1 1074 1 0 +5 72 risky 946 0 102 23 mixed 0 -23 1 1074 1 0 +5 73 risky 1198 60 0 129 loss 0 0 0 947 1 -1 +5 74 risky 189 0 301 156 mixed 0 -156 0 947 1 0 +5 75 risky 889 0 63 28 mixed 0 63 0 947 1 0 +5 76 risky 607 83 138 0 gain 0 138 1 1973 1 1 +5 77 safe 1003 99 0 222 loss 0 -99 1 1973 0 -1 +5 78 risky 875 80 400 0 gain 0 0 1 1973 1 1 +5 79 risky 1292 100 0 179 loss 0 0 1 2038 1 -1 +5 80 safe 1613 80 0 164 loss 0 -80 1 2038 0 -1 +5 81 risky 623 0 218 66 mixed 0 218 1 2038 1 0 +5 82 safe 1006 57 0 110 loss 0 -57 1 543 0 -1 +5 83 risky 849 116 428 0 gain 0 0 1 543 1 1 +5 84 safe 1007 43 0 130 loss 0 -43 1 2127 0 -1 +5 85 risky 1154 39 0 96 loss 0 0 1 2127 1 -1 +5 86 risky 954 61 190 0 gain 0 0 1 2127 1 1 +5 87 safe 832 96 0 244 loss 0 -96 1 720 0 -1 +5 88 risky 688 59 148 0 gain 0 0 1 720 1 1 +5 89 risky 835 44 143 0 gain 0 0 1 720 1 1 +5 90 safe 686 0 57 118 mixed 0 0 -1 839 0 0 +5 91 risky 600 96 170 0 gain 0 0 -1 839 1 1 +5 92 risky 717 0 62 19 mixed 0 -19 0 1046 1 0 +5 93 safe 885 0 158 240 mixed 0 0 0 1046 0 0 +5 94 risky 840 40 68 0 gain 0 68 0 1046 1 1 +5 95 risky 905 120 0 205 loss 0 0 1 1086 1 -1 +5 96 risky 766 104 222 0 gain 0 0 1 1086 1 1 +5 97 safe 763 60 0 249 loss 0 -60 1 1344 0 -1 +5 98 safe 939 0 217 443 mixed 0 0 1 1344 0 0 +5 99 risky 815 0 56 9 mixed 0 56 1 1344 1 0 +5 100 safe 631 76 0 403 loss 0 -76 1 1411 0 -1 +5 101 risky 756 0 102 199 mixed 0 102 1 1411 1 0 +5 102 risky 1026 0 222 331 mixed 0 -331 1 1411 1 0 +5 103 safe 1070 104 0 419 loss 0 -104 -2 1054 0 -1 +5 104 risky 811 62 132 0 gain 0 132 -2 1054 1 1 +5 105 risky 1091 64 303 0 gain 0 303 2 1088 1 1 +5 106 risky 667 0 298 116 mixed 0 298 2 1088 1 0 +5 107 safe 880 80 0 194 loss 0 -80 1 587 0 -1 +5 108 safe 1294 0 97 81 mixed 0 0 1 587 0 0 +5 109 risky 689 0 100 31 mixed 0 -31 -1 808 1 0 +5 110 risky 820 57 104 0 gain 0 0 -1 808 1 1 +5 111 safe 639 0 61 46 mixed 0 0 -2 961 0 0 +5 112 risky 783 118 377 0 gain 0 0 -2 961 1 1 +5 113 risky 816 101 358 0 gain 0 358 -1 862 1 1 +5 114 risky 747 84 0 136 loss 0 -136 -1 862 1 -1 +5 115 safe 954 76 0 253 loss 0 -76 -1 862 0 -1 +5 116 risky 1464 122 242 0 gain 0 0 -3 1262 1 1 +5 117 risky 845 82 194 0 gain 0 0 -3 1262 1 1 +5 118 safe 907 58 0 214 loss 0 -58 -3 1262 0 -1 +5 119 safe 973 96 0 164 loss 0 -96 -4 1169 0 -1 +5 120 risky 801 122 503 0 gain 0 0 -4 1169 1 1 +5 121 risky 839 0 157 64 mixed 0 157 -4 1169 1 0 +5 122 safe 1103 36 0 141 loss 0 -36 0 1215 0 -1 +5 123 risky 875 0 100 119 mixed 0 -119 0 1215 1 0 +5 124 risky 873 41 98 0 gain 0 0 -1 796 1 1 +5 125 risky 803 96 283 0 gain 0 0 -1 796 1 1 +5 126 risky 734 103 250 0 gain 0 0 -1 796 1 1 +5 127 safe 994 83 0 221 loss 0 -83 -1 576 0 -1 +5 128 risky 574 80 251 0 gain 0 0 -1 576 1 1 +5 129 risky 1024 40 0 79 loss 0 0 -2 929 1 -1 +5 130 risky 582 97 314 0 gain 0 314 -2 929 1 1 +5 131 risky 594 61 107 0 gain 0 107 -2 929 1 1 +5 132 risky 688 100 198 0 gain 0 0 2 1097 1 1 +5 133 safe 971 0 99 99 mixed 0 0 2 1097 0 0 +5 134 risky 726 0 301 90 mixed 0 301 0 734 1 0 +5 135 risky 1385 0 163 109 mixed 0 -109 0 734 1 0 +5 136 risky 753 122 0 269 loss 1 0 -1 970 1 -1 +5 137 risky 844 60 0 98 loss 0 -98 -1 970 1 -1 +5 138 risky 832 103 184 0 gain 0 0 -1 731 1 1 +5 139 safe 737 97 0 201 loss 0 -97 -1 731 0 -1 +5 140 risky 770 0 304 60 mixed 0 -60 -2 890 1 0 +5 141 safe 832 64 0 151 loss 0 -64 -2 890 0 -1 +5 142 safe 740 77 0 334 loss 0 -77 -3 1066 0 -1 +5 143 risky 696 0 61 24 mixed 0 61 -3 1066 1 0 +5 144 risky 818 0 219 84 mixed 0 219 -1 680 1 0 +5 145 safe 707 120 0 600 loss 0 -120 -1 680 0 -1 +5 146 risky 910 0 303 359 mixed 0 303 -1 489 1 0 +5 147 risky 651 122 598 0 gain 0 598 -1 489 1 1 +5 148 safe 967 0 301 454 mixed 0 0 -1 489 0 0 +5 149 risky 826 119 216 0 gain 0 216 2 808 1 1 +5 150 risky 876 41 80 0 gain 0 80 2 808 1 1 +6 1 risky 8683 0 63 10 mixed 0 -10 0 38019 1 0 +6 2 risky 6548 119 236 0 gain 0 0 0 38019 1 1 +6 3 risky 11388 99 312 0 gain 0 312 -1 2893 1 1 +6 4 risky 4920 0 302 56 mixed 0 -56 -1 2893 1 0 +6 5 risky 5889 0 221 48 mixed 0 -48 -1 1767 1 0 +6 6 safe 1711 0 299 356 mixed 0 0 -1 1767 0 0 +6 7 safe 2687 63 120 0 gain 0 63 0 3312 0 1 +6 8 risky 2482 77 340 0 gain 0 0 0 3312 1 1 +6 9 risky 1313 79 222 0 gain 0 222 -1 4237 1 1 +6 10 risky 3219 43 0 70 loss 0 -70 -1 4237 1 -1 +6 11 risky 840 101 416 0 gain 0 0 -1 2893 1 1 +6 12 risky 2690 0 304 158 mixed 0 304 -1 2893 1 0 +6 13 risky 5480 57 170 0 gain 0 170 1 3609 1 1 +6 14 risky 6195 117 506 0 gain 0 0 1 3609 1 1 +6 15 safe 1799 44 0 204 loss 0 -44 1 3609 0 -1 +6 16 risky 3339 101 0 182 loss 0 -182 0 1761 1 -1 +6 17 safe 1679 37 0 108 loss 0 -37 0 1761 0 -1 +6 18 safe 2923 121 0 378 loss 0 -121 0 1761 0 -1 +6 19 safe 3155 0 64 76 mixed 0 0 -2 2754 0 0 +6 20 safe 4114 58 0 192 loss 0 -58 -2 2754 0 -1 +6 21 risky 3842 59 191 0 gain 0 191 -2 2754 1 1 +6 22 risky 1839 83 396 0 gain 0 396 0 2197 1 1 +6 23 risky 3984 0 101 84 mixed 0 101 0 2197 1 0 +6 24 safe 1262 57 0 254 loss 0 -57 0 2197 0 -1 +6 25 safe 2746 42 0 147 loss 0 -42 0 2171 0 -1 +6 26 risky 2941 0 220 111 mixed 0 -111 0 2171 1 0 +6 27 risky 3376 37 130 0 gain 0 130 0 2171 1 1 +6 28 risky 1224 116 602 0 gain 0 0 1 1356 1 1 +6 29 risky 2118 56 212 0 gain 0 212 1 1356 1 1 +6 30 risky 3519 100 0 165 loss 0 0 1 1356 1 -1 +6 31 safe 1538 97 0 501 loss 0 -97 2 959 0 -1 +6 32 risky 1818 119 378 0 gain 0 0 2 959 1 1 +6 33 safe 2836 98 0 359 loss 0 -98 -1 1881 0 -1 +6 34 risky 6887 83 0 179 loss 0 -179 -1 1881 1 -1 +6 35 risky 972 36 114 0 gain 0 0 -1 1176 1 1 +6 36 risky 4252 0 97 67 mixed 0 97 -1 1176 1 0 +6 37 safe 2055 0 156 164 mixed 0 0 0 1240 0 0 +6 38 risky 1016 0 158 35 mixed 0 158 0 1240 1 0 +6 39 safe 1023 58 0 217 loss 0 -58 0 1240 0 -1 +6 40 risky 3274 44 0 71 loss 0 -71 0 3700 1 -1 +6 41 risky 1661 102 276 0 gain 1 276 0 3700 1 1 +6 42 risky 2404 0 304 247 mixed 0 304 1 884 1 0 +6 43 safe 5414 38 66 0 gain 0 38 1 884 0 1 +6 44 risky 4757 119 263 0 gain 0 263 1 884 1 1 +6 45 risky 2906 100 0 223 loss 0 0 2 5593 1 -1 +6 46 risky 2634 0 104 44 mixed 0 104 2 5593 1 0 +6 47 risky 900 43 142 0 gain 0 0 2 5593 1 1 +6 48 risky 804 0 296 94 mixed 0 296 1 1667 1 0 +6 49 risky 7162 0 224 264 mixed 0 224 1 1667 1 0 +6 50 safe 1344 80 0 252 loss 0 -80 2 1556 0 -1 +6 51 risky 2165 103 356 0 gain 0 0 2 1556 1 1 +6 52 risky 2000 44 197 0 gain 0 0 -1 836 1 1 +6 53 safe 2276 101 198 0 gain 0 101 -1 836 0 1 +6 54 risky 4122 38 103 0 gain 0 0 -1 836 1 1 +6 55 risky 3216 117 0 215 loss 0 0 0 2517 1 -1 +6 56 risky 2745 42 88 0 gain 0 0 0 2517 1 1 +6 57 risky 4050 80 0 156 loss 0 0 0 2517 1 -1 +6 58 safe 4801 98 165 0 gain 0 98 1 1145 0 1 +6 59 risky 4523 104 502 0 gain 0 502 1 1145 1 1 +6 60 risky 2626 0 100 23 mixed 0 100 1 1145 1 0 +6 61 safe 2020 0 59 89 mixed 0 0 2 927 0 0 +6 62 safe 1931 0 96 117 mixed 0 0 2 927 0 0 +6 63 safe 1416 77 0 334 loss 0 -77 1 1456 0 -1 +6 64 safe 1401 97 0 282 loss 0 -97 1 1456 0 -1 +6 65 risky 3129 102 0 198 loss 0 -198 1 1456 1 -1 +6 66 safe 1867 122 0 505 loss 0 -122 0 3611 0 -1 +6 67 risky 3273 0 104 98 mixed 0 104 0 3611 1 0 +6 68 risky 2041 99 218 0 gain 0 218 2 2511 1 1 +6 69 risky 1201 0 58 29 mixed 0 -29 2 2511 1 0 +6 70 safe 5182 63 0 134 loss 0 -63 1 1679 0 -1 +6 71 safe 3177 124 218 0 gain 0 124 1 1679 0 1 +6 72 safe 584 77 0 287 loss 0 -77 1 1261 0 -1 +6 73 risky 2418 82 291 0 gain 0 0 1 1261 1 1 +6 74 risky 3964 44 0 102 loss 0 0 0 1136 1 -1 +6 75 risky 3794 82 0 148 loss 0 0 0 1136 1 -1 +6 76 safe 2515 36 0 89 loss 0 -36 2 1383 0 -1 +6 77 risky 265 117 435 0 gain 0 0 2 1383 1 1 +6 78 risky 6888 57 0 164 loss 0 -164 2 1383 1 -1 +6 79 safe 2396 0 57 63 mixed 0 0 -1 1026 0 0 +6 80 risky 1278 0 297 121 mixed 0 -121 -1 1026 1 0 +6 81 risky 5505 0 296 297 mixed 0 -297 -1 1026 1 0 +6 82 risky 3784 0 61 36 mixed 0 61 0 1676 1 0 +6 83 risky 1794 77 0 136 loss 0 0 0 1676 1 -1 +6 84 risky 2456 119 0 264 loss 0 -264 1 3992 1 -1 +6 85 safe 4586 101 0 313 loss 0 -101 1 3992 0 -1 +6 86 risky 1296 96 244 0 gain 0 0 1 3992 1 1 +6 87 safe 9041 0 98 197 mixed 0 0 1 5878 0 0 +6 88 safe 1707 103 0 252 loss 0 -103 1 5878 0 -1 +6 89 safe 1719 0 99 149 mixed 0 0 1 5878 0 0 +6 90 safe 5513 36 69 0 gain 0 36 -1 1086 0 1 +6 91 safe 2391 61 105 0 gain 0 61 -1 1086 0 1 +6 92 safe 857 64 0 304 loss 0 -64 -1 1086 0 -1 +6 93 safe 5282 118 0 299 loss 0 -118 0 4927 0 -1 +6 94 risky 6335 79 181 0 gain 0 0 0 4927 1 1 +6 95 risky 2827 123 0 198 loss 0 0 -2 1551 1 -1 +6 96 risky 1180 0 159 86 mixed 0 -86 -2 1551 1 0 +6 97 risky 1852 0 56 46 mixed 0 -46 0 7386 1 0 +6 98 risky 2467 0 220 224 mixed 0 220 0 7386 1 0 +6 99 safe 3255 99 178 0 gain 0 99 2 941 0 1 +6 100 safe 1427 59 0 102 loss 0 -59 2 941 0 -1 +6 101 safe 3837 83 0 197 loss 0 -83 2 941 0 -1 +6 102 safe 1837 0 217 436 mixed 0 0 -1 843 0 0 +6 103 risky 3683 0 220 70 mixed 0 220 -1 843 1 0 +6 104 safe 747 61 104 0 gain 0 61 1 1050 0 1 +6 105 safe 2730 0 160 190 mixed 0 0 1 1050 0 0 +6 106 safe 1027 83 144 0 gain 0 83 -1 1258 0 1 +6 107 safe 1506 0 300 603 mixed 0 0 -1 1258 0 0 +6 108 safe 2397 84 161 0 gain 0 84 0 704 0 1 +6 109 safe 852 120 0 432 loss 0 -120 0 704 0 -1 +6 110 risky 1251 64 301 0 gain 0 301 -1 1932 1 1 +6 111 risky 1520 43 166 0 gain 0 166 -1 1932 1 1 +6 112 risky 5107 39 0 77 loss 0 -77 -1 1932 1 -1 +6 113 safe 1632 123 0 597 loss 0 -123 -1 1352 0 -1 +6 114 risky 3292 81 196 0 gain 0 0 -1 1352 1 1 +6 115 safe 789 0 60 119 mixed 0 0 -1 1352 0 0 +6 116 risky 164 0 297 198 mixed 0 297 -2 1108 1 0 +6 117 risky 1374 0 64 19 mixed 0 64 -2 1108 1 0 +6 118 safe 4612 119 0 334 loss 0 -119 -2 1108 0 -1 +6 119 risky 441 0 158 134 mixed 0 158 -1 5816 1 0 +6 120 risky 224 0 56 23 mixed 0 56 -1 5816 1 0 +6 121 safe 6709 62 0 151 loss 0 -62 1 992 0 -1 +6 122 risky 1360 0 161 45 mixed 0 -45 1 992 1 0 +6 123 risky 223 0 156 68 mixed 0 -68 1 992 1 0 +6 124 safe 2146 40 0 165 loss 0 -40 -1 928 0 -1 +6 125 risky 4720 0 219 146 mixed 0 -146 -1 928 1 0 +6 126 risky 1186 0 161 102 mixed 0 161 -1 820 1 0 +6 127 safe 1117 123 201 0 gain 0 123 -1 820 0 1 +6 128 safe 3741 101 0 422 loss 0 -101 -1 820 0 -1 +6 129 risky 3362 59 0 116 loss 0 -116 -1 1713 1 -1 +6 130 risky 3963 0 216 327 mixed 0 -327 -1 1713 1 0 +6 131 risky 2562 57 146 0 gain 0 0 -1 1713 1 1 +6 132 risky 54 58 250 0 gain 0 250 -1 3406 1 1 +6 133 risky 2451 40 83 0 gain 0 0 -1 3406 1 1 +6 134 risky 56 116 340 0 gain 0 0 -1 3406 1 1 +6 135 risky 3118 124 0 244 loss 0 -244 -1 994 1 -1 +6 136 safe 1001 0 300 453 mixed 0 0 -1 994 0 0 +6 137 safe 558 77 0 225 loss 0 -77 0 548 0 -1 +6 138 risky 2346 0 100 51 mixed 0 -51 0 548 1 0 +6 139 safe 5850 0 158 242 mixed 0 0 0 548 0 0 +6 140 risky 1415 0 222 85 mixed 0 222 -1 1771 1 0 +6 141 safe 2947 36 0 129 loss 0 -36 -1 1771 0 -1 +6 142 risky 3290 119 299 0 gain 0 299 -1 1771 1 1 +6 143 safe 6930 78 130 0 gain 0 78 1 1182 0 1 +6 144 risky 5721 59 0 110 loss 0 -110 1 1182 1 -1 +6 145 risky 641 0 221 177 mixed 0 221 1 1182 1 0 +6 146 safe 1530 0 161 323 mixed 0 0 1 1031 0 0 +6 147 risky 336 0 99 30 mixed 0 -30 1 1031 1 0 +6 148 risky 190 61 133 0 gain 0 0 1 1031 1 1 +6 149 risky 26 76 255 0 gain 0 255 -2 823 1 1 +6 150 safe 1139 80 0 396 loss 0 -80 -2 823 0 -1 diff --git a/Python/hbayesdm/common/extdata/ts_exampleData.txt b/Python/hbayesdm/common/extdata/ts_exampleData.txt new file mode 100644 index 00000000..648f94b9 --- /dev/null +++ b/Python/hbayesdm/common/extdata/ts_exampleData.txt @@ -0,0 +1,2191 @@ +subjID trial level1_choice level2_choice reward A1prob A2prob B1prob B2prob +1 2 1 4 1 0.73174 0.44094 0.28525 0.42124 +1 3 1 1 1 0.72582 0.3864 0.30663 0.39319 +1 4 2 1 1 0.7296 0.41459 0.30549 0.34948 +1 5 1 3 0 0.77339 0.40618 0.31232 0.3926 +1 6 1 1 1 0.75457 0.45989 0.30146 0.39908 +1 7 1 1 1 0.799 0.47671 0.30695 0.4193 +1 8 1 3 1 0.8 0.4705 0.28921 0.43012 +1 9 1 4 1 0.8 0.4414 0.32746 0.40748 +1 10 2 4 0 0.79121 0.44951 0.34192 0.4238 +1 11 2 1 0 0.8 0.45063 0.30527 0.41502 +1 12 1 3 0 0.8 0.46023 0.30255 0.43582 +1 13 1 2 0 0.7713 0.45539 0.3145 0.41748 +1 14 2 1 1 0.77967 0.46743 0.33255 0.41147 +1 15 2 4 1 0.8 0.44997 0.33142 0.43247 +1 16 1 1 1 0.8 0.46545 0.38953 0.40187 +1 17 2 4 0 0.78989 0.43383 0.44462 0.39286 +1 18 1 1 1 0.8 0.45304 0.45707 0.41177 +1 19 1 1 1 0.8 0.46451 0.4644 0.35639 +1 20 1 1 1 0.8 0.46125 0.49334 0.33543 +1 21 1 1 0 0.8 0.49285 0.47484 0.36058 +1 22 1 4 0 0.8 0.49623 0.48841 0.34768 +1 23 2 3 1 0.77469 0.54065 0.50539 0.32396 +1 24 2 2 1 0.77481 0.58668 0.50524 0.32207 +1 25 2 3 1 0.78178 0.62035 0.46226 0.32988 +1 26 2 3 1 0.7996 0.59698 0.5076 0.37398 +1 27 2 3 0 0.8 0.61101 0.51855 0.37097 +1 28 2 4 0 0.8 0.57941 0.49362 0.33811 +1 29 1 1 1 0.75907 0.58061 0.49262 0.34061 +1 30 1 1 1 0.78157 0.60034 0.47932 0.32465 +1 31 1 4 1 0.73941 0.57595 0.41336 0.31351 +1 32 1 1 1 0.78407 0.57293 0.40238 0.31508 +1 33 1 1 1 0.7673 0.55497 0.44794 0.32404 +1 34 1 1 1 0.74815 0.57301 0.45619 0.30755 +1 35 1 1 1 0.76077 0.55076 0.45351 0.23356 +1 36 1 1 0 0.78983 0.53785 0.45 0.2218 +1 37 1 4 0 0.79931 0.53644 0.43941 0.25251 +1 38 1 2 0 0.78409 0.52744 0.44277 0.25328 +1 39 2 3 0 0.79235 0.54545 0.42458 0.28172 +1 40 2 4 1 0.7884 0.53537 0.40774 0.30555 +1 41 2 4 0 0.8 0.5217 0.44137 0.30486 +1 42 1 3 0 0.7987 0.53313 0.44258 0.29581 +1 43 1 1 0 0.75319 0.5575 0.46962 0.29889 +1 44 1 2 0 0.75826 0.57211 0.49623 0.34481 +1 45 2 4 0 0.8 0.59358 0.50784 0.33974 +1 46 1 2 1 0.8 0.58261 0.49178 0.31495 +1 47 1 2 0 0.76387 0.51143 0.50769 0.34591 +1 48 1 3 1 0.7373 0.55849 0.4958 0.34391 +1 49 2 3 1 0.71163 0.55437 0.50188 0.37737 +1 50 2 3 1 0.7274 0.55684 0.49608 0.42051 +1 51 2 3 1 0.74133 0.51026 0.50806 0.39224 +1 52 2 3 0 0.78899 0.52159 0.53676 0.39005 +1 53 2 4 0 0.8 0.5142 0.57107 0.33701 +1 54 1 3 1 0.8 0.55215 0.56694 0.31545 +1 55 1 2 1 0.8 0.53609 0.53305 0.30683 +1 56 1 1 1 0.8 0.51736 0.51624 0.29661 +1 57 1 1 1 0.8 0.55649 0.57046 0.30073 +1 58 1 4 0 0.77863 0.54926 0.57542 0.31415 +1 59 1 3 0 0.78765 0.57095 0.5805 0.28316 +1 60 1 1 0 0.7736 0.54228 0.58221 0.23798 +1 61 1 2 0 0.8 0.55273 0.52453 0.2241 +1 62 2 2 1 0.77377 0.54429 0.52093 0.24853 +1 63 1 2 1 0.8 0.53118 0.48452 0.22815 +1 64 1 3 0 0.8 0.5621 0.5142 0.24439 +1 65 1 3 0 0.8 0.58121 0.52545 0.24843 +1 66 1 2 1 0.8 0.59505 0.53803 0.23704 +1 67 2 4 0 0.8 0.61952 0.54213 0.20897 +1 68 1 2 1 0.8 0.5983 0.5531 0.24432 +1 69 1 2 1 0.78218 0.65305 0.57632 0.26855 +1 70 1 2 1 0.74435 0.68187 0.58155 0.30696 +1 71 1 2 1 0.75476 0.68078 0.57166 0.31697 +1 72 1 2 0 0.7518 0.67198 0.59557 0.30499 +1 73 1 1 1 0.77418 0.6968 0.58319 0.32965 +1 74 1 1 1 0.74976 0.71575 0.64715 0.2999 +1 75 1 4 1 0.76123 0.70332 0.63275 0.30766 +1 76 2 4 0 0.75946 0.70432 0.61657 0.30659 +1 77 2 1 1 0.8 0.69223 0.64135 0.32633 +1 78 1 1 1 0.8 0.67848 0.62949 0.29921 +1 79 1 1 0 0.76968 0.66689 0.64594 0.31559 +1 80 1 4 0 0.767 0.66963 0.62129 0.32788 +1 81 2 3 1 0.75012 0.63656 0.60248 0.34237 +1 82 2 3 1 0.7351 0.68337 0.63189 0.30771 +1 83 2 3 1 0.74526 0.67142 0.6594 0.30594 +1 84 2 1 1 0.76226 0.68819 0.6318 0.27628 +1 85 1 1 1 0.7758 0.73023 0.58491 0.29002 +1 86 1 1 1 0.77074 0.74821 0.58291 0.28925 +1 87 1 1 1 0.77089 0.79434 0.57504 0.32894 +1 88 1 1 1 0.74567 0.8 0.55285 0.30923 +1 89 1 1 1 0.7727 0.8 0.59163 0.31176 +1 90 1 1 1 0.79157 0.8 0.5741 0.33049 +1 91 1 1 1 0.8 0.8 0.56745 0.33548 +1 92 1 3 0 0.8 0.77512 0.59173 0.36604 +1 93 1 1 0 0.77964 0.77689 0.65552 0.29529 +1 94 1 4 0 0.72323 0.77346 0.68053 0.28964 +1 95 1 4 0 0.7587 0.79182 0.68303 0.28661 +1 96 1 2 1 0.76904 0.78153 0.69918 0.25219 +1 97 1 2 1 0.77612 0.8 0.7122 0.27558 +1 98 1 4 1 0.79077 0.79734 0.71788 0.28339 +1 99 2 4 1 0.76885 0.778 0.73227 0.29194 +1 100 2 2 0 0.72235 0.76099 0.72207 0.28469 +1 101 2 1 1 0.75343 0.75863 0.68128 0.29834 +1 102 1 1 1 0.77836 0.75896 0.6992 0.29074 +1 103 1 1 0 0.76782 0.74809 0.67502 0.27929 +1 104 2 4 0 0.76299 0.79317 0.66158 0.31297 +1 105 2 3 1 0.76924 0.8 0.64813 0.30434 +1 106 2 1 0 0.79236 0.76987 0.63234 0.29248 +1 107 2 3 0 0.76225 0.74234 0.62737 0.34844 +1 108 1 2 1 0.75963 0.71965 0.63631 0.31392 +1 109 1 2 0 0.78157 0.65906 0.63594 0.29344 +1 110 1 4 0 0.8 0.6691 0.63189 0.33999 +1 111 2 3 1 0.76426 0.64471 0.60207 0.27577 +1 112 2 2 1 0.74667 0.66462 0.62046 0.26335 +1 113 1 3 1 0.78458 0.63884 0.64195 0.27218 +1 114 2 3 0 0.79243 0.63824 0.63688 0.27592 +1 115 1 2 1 0.79322 0.65028 0.62034 0.25584 +1 116 1 4 1 0.79914 0.66745 0.60886 0.25548 +1 117 2 3 1 0.79739 0.61932 0.61802 0.28086 +1 118 2 4 0 0.79022 0.61075 0.61969 0.26407 +1 119 2 3 1 0.8 0.62074 0.62673 0.27659 +1 120 2 1 1 0.8 0.62032 0.57944 0.28841 +1 121 1 4 1 0.79253 0.61165 0.555 0.26186 +1 122 2 4 0 0.8 0.62946 0.54182 0.25526 +1 123 1 3 1 0.79597 0.60834 0.5357 0.2 +1 124 2 3 0 0.78078 0.60309 0.55323 0.22367 +1 125 1 1 1 0.78059 0.59006 0.5389 0.20545 +1 126 1 1 1 0.7415 0.5477 0.53843 0.2 +1 127 1 1 1 0.72498 0.55081 0.54774 0.2 +1 128 1 3 0 0.7273 0.53482 0.54397 0.23411 +1 129 1 1 1 0.6983 0.53396 0.57112 0.26527 +1 130 1 3 0 0.67184 0.55217 0.54923 0.26093 +1 131 1 1 1 0.64299 0.4833 0.56131 0.27607 +1 132 1 4 0 0.64678 0.48409 0.55659 0.26744 +1 133 1 1 1 0.66958 0.48672 0.55672 0.28704 +1 134 1 4 0 0.71353 0.43812 0.54296 0.26765 +1 135 1 1 1 0.72913 0.45831 0.55595 0.26157 +1 136 1 3 1 0.71214 0.40894 0.57912 0.27759 +1 137 2 3 1 0.72246 0.3716 0.5666 0.25731 +1 138 2 3 0 0.70016 0.33562 0.53811 0.26686 +1 139 1 1 0 0.68348 0.29021 0.5032 0.2907 +1 141 1 2 1 0.70413 0.24533 0.53268 0.31855 +1 142 1 2 0 0.74585 0.23758 0.54789 0.32516 +1 143 2 3 0 0.75878 0.20683 0.54172 0.32643 +1 144 1 1 1 0.75508 0.2 0.54123 0.33066 +1 145 1 1 1 0.75405 0.2 0.50283 0.33762 +1 146 1 4 0 0.72616 0.21818 0.51489 0.34734 +1 147 1 1 1 0.72165 0.2146 0.52902 0.33863 +1 148 1 1 1 0.76338 0.22901 0.53995 0.32508 +1 149 1 3 0 0.8 0.24977 0.55147 0.34688 +1 150 1 4 1 0.8 0.22491 0.55515 0.38301 +1 151 2 4 0 0.76821 0.26234 0.54065 0.37305 +1 152 2 3 0 0.77307 0.22488 0.58349 0.37869 +1 153 1 1 1 0.77173 0.21431 0.53551 0.42413 +1 154 1 1 0 0.75927 0.20014 0.50704 0.42257 +1 155 1 3 0 0.75921 0.21264 0.50199 0.38167 +1 156 1 2 0 0.74445 0.22054 0.51196 0.33042 +1 157 2 1 0 0.72395 0.21222 0.48676 0.33988 +1 158 2 1 0 0.71999 0.2298 0.51039 0.3507 +1 159 2 4 1 0.72939 0.2308 0.54111 0.32357 +1 160 2 2 0 0.69386 0.21052 0.54663 0.27117 +1 161 2 4 1 0.69174 0.2 0.53472 0.28176 +1 162 2 4 0 0.71402 0.2 0.59491 0.26687 +1 163 2 3 0 0.71077 0.2 0.5787 0.29751 +1 164 1 4 0 0.70963 0.2 0.60455 0.28655 +1 165 1 1 1 0.73785 0.2 0.60482 0.27845 +1 166 1 1 1 0.75026 0.2 0.60278 0.29223 +1 167 1 1 1 0.78057 0.2 0.59516 0.29242 +1 168 1 1 1 0.7938 0.20923 0.53569 0.27625 +1 169 1 4 0 0.77124 0.25164 0.47943 0.29059 +1 170 1 3 0 0.77023 0.2788 0.50377 0.25799 +1 171 1 1 1 0.76646 0.27905 0.51914 0.26122 +1 172 1 1 1 0.74042 0.24415 0.5069 0.27107 +1 173 1 1 1 0.73021 0.27041 0.4785 0.26917 +1 174 1 4 0 0.71286 0.28303 0.4701 0.29255 +1 175 1 1 0 0.67608 0.30914 0.48553 0.27482 +1 176 1 2 0 0.72568 0.28528 0.46698 0.28983 +1 177 2 3 0 0.75068 0.32288 0.51553 0.32661 +1 178 1 3 1 0.68976 0.33437 0.57487 0.30929 +1 179 2 3 1 0.63552 0.32788 0.56683 0.28999 +1 180 2 3 0 0.65651 0.29706 0.64643 0.32216 +1 181 1 1 0 0.63992 0.28636 0.65593 0.30065 +1 182 1 1 1 0.63118 0.29203 0.61181 0.24868 +1 183 1 1 0 0.61433 0.30691 0.58943 0.26967 +1 184 1 1 1 0.64362 0.28234 0.59775 0.25273 +1 185 2 4 0 0.65589 0.2 0.63046 0.22552 +1 186 2 4 0 0.64753 0.21033 0.62343 0.23167 +1 187 2 3 1 0.6708 0.23303 0.58866 0.24963 +1 188 2 3 1 0.68793 0.2 0.59113 0.30878 +1 189 2 3 1 0.70132 0.2 0.57037 0.30299 +1 191 1 4 1 0.70615 0.23807 0.57935 0.30751 +1 192 2 4 0 0.69038 0.24958 0.56007 0.27807 +1 193 2 4 0 0.72402 0.24868 0.58419 0.29444 +1 194 1 1 1 0.74722 0.22597 0.57091 0.27845 +1 195 1 1 1 0.77007 0.25026 0.59727 0.26951 +1 196 1 3 0 0.75861 0.24017 0.58072 0.24954 +1 197 2 4 0 0.74568 0.2 0.58408 0.24979 +1 198 1 1 1 0.78681 0.21341 0.56264 0.20372 +1 199 1 1 1 0.7694 0.24506 0.54298 0.2 +1 200 1 1 1 0.8 0.22759 0.49432 0.2 +1 201 1 1 0 0.8 0.22705 0.48005 0.2179 +2 1 2 1 1 0.24366 0.21338 0.7897 0.36247 +2 3 2 1 0 0.24195 0.22465 0.7635 0.37649 +2 4 2 2 1 0.24137 0.22427 0.79877 0.3744 +2 5 2 2 0 0.24103 0.2 0.8 0.38687 +2 6 2 3 1 0.2 0.2 0.79295 0.35462 +2 7 2 3 1 0.21009 0.22935 0.79064 0.34995 +2 8 2 2 0 0.2 0.25825 0.79677 0.32497 +2 9 2 1 0 0.2 0.27439 0.77263 0.31415 +2 10 2 2 0 0.25693 0.28699 0.8 0.35165 +2 11 2 3 1 0.23686 0.27897 0.8 0.33176 +2 12 2 3 0 0.2 0.29644 0.78883 0.34925 +2 13 2 2 0 0.21085 0.29313 0.78698 0.38282 +2 14 2 4 0 0.20371 0.30914 0.78273 0.39991 +2 15 2 1 0 0.2 0.27436 0.79031 0.37668 +2 16 2 1 0 0.2 0.31162 0.783 0.38107 +2 17 2 4 1 0.2 0.33142 0.78508 0.39967 +2 18 2 3 1 0.20132 0.3441 0.79349 0.41119 +2 19 2 3 1 0.2 0.2921 0.7947 0.39435 +2 20 2 3 0 0.2 0.28001 0.8 0.38265 +2 21 2 3 0 0.23446 0.29161 0.7848 0.40374 +2 22 2 1 1 0.24324 0.30684 0.78655 0.36654 +2 23 1 3 1 0.25357 0.28896 0.8 0.36812 +2 24 1 2 0 0.247 0.31968 0.778 0.39979 +2 25 2 1 0 0.26191 0.29039 0.78188 0.42514 +2 26 2 4 0 0.24009 0.26705 0.77572 0.43339 +2 27 1 2 0 0.23637 0.27463 0.8 0.44448 +2 28 2 3 1 0.2 0.26527 0.79768 0.43536 +2 29 2 3 0 0.2 0.2249 0.8 0.45377 +2 30 2 1 0 0.27119 0.24548 0.77507 0.47467 +2 31 1 2 0 0.25741 0.25583 0.8 0.43019 +2 32 1 1 0 0.25833 0.25345 0.7833 0.45546 +2 33 1 2 1 0.29274 0.2548 0.75592 0.48444 +2 34 1 2 0 0.24411 0.2674 0.69707 0.50089 +2 35 2 1 0 0.25087 0.29031 0.69606 0.51711 +2 36 1 1 1 0.29422 0.24655 0.7281 0.55837 +2 37 2 2 0 0.28983 0.24619 0.73075 0.64885 +2 38 1 1 0 0.28961 0.22933 0.76907 0.64365 +2 39 2 3 1 0.32305 0.2115 0.72785 0.66863 +2 40 2 3 1 0.32795 0.21391 0.75703 0.68245 +2 41 2 3 1 0.33668 0.2 0.8 0.69042 +2 42 1 4 1 0.32341 0.2 0.7744 0.76419 +2 43 2 3 1 0.2924 0.2 0.77229 0.77877 +2 44 2 2 0 0.29488 0.21148 0.8 0.77328 +2 45 2 3 1 0.32204 0.25048 0.77766 0.8 +2 46 2 2 0 0.29959 0.27915 0.78361 0.8 +2 47 2 2 1 0.30354 0.35484 0.77031 0.77172 +2 48 2 4 1 0.32089 0.33943 0.76879 0.8 +2 49 2 4 1 0.31639 0.30386 0.71735 0.8 +2 50 2 4 0 0.32926 0.34595 0.68555 0.7724 +2 51 1 1 0 0.29947 0.30318 0.6959 0.78212 +2 52 2 1 0 0.29323 0.29421 0.69798 0.8 +2 53 2 4 1 0.31145 0.28711 0.67731 0.8 +2 54 2 4 1 0.35715 0.26453 0.66623 0.8 +2 55 2 4 1 0.34242 0.25018 0.65922 0.76883 +2 56 2 4 1 0.34459 0.25371 0.68819 0.76716 +2 57 2 2 0 0.39018 0.26396 0.63748 0.78614 +2 58 1 1 1 0.3358 0.23748 0.60919 0.8 +2 59 1 1 0 0.31958 0.21064 0.63817 0.8 +2 60 1 2 0 0.29338 0.2 0.68027 0.79001 +2 61 2 1 1 0.27116 0.2 0.709 0.8 +2 62 2 3 0 0.25717 0.2 0.69624 0.77628 +2 63 2 4 1 0.27483 0.2 0.66719 0.75931 +2 64 2 4 1 0.23855 0.2 0.61004 0.74309 +2 65 2 2 1 0.21736 0.2 0.65247 0.77225 +2 66 1 1 0 0.25099 0.2 0.70211 0.74655 +2 67 2 4 1 0.2702 0.2 0.71121 0.7433 +2 68 2 2 0 0.27338 0.2358 0.65203 0.71806 +2 69 2 4 1 0.2925 0.2 0.65285 0.72883 +2 70 2 2 0 0.31246 0.22217 0.65929 0.75781 +2 71 2 1 0 0.32305 0.2 0.66168 0.75266 +2 72 2 1 0 0.28378 0.2 0.65774 0.78056 +2 73 2 2 0 0.26524 0.20141 0.59448 0.77223 +2 74 1 4 0 0.27387 0.2 0.57972 0.76982 +2 75 1 1 1 0.33482 0.2 0.5624 0.8 +2 76 1 3 1 0.30843 0.22087 0.52495 0.77129 +2 77 2 4 1 0.29104 0.24487 0.53711 0.7695 +2 78 1 2 0 0.26102 0.24152 0.50456 0.77789 +2 79 2 4 1 0.2445 0.24204 0.50356 0.75557 +2 80 2 1 1 0.26642 0.23341 0.50453 0.72099 +2 81 2 4 1 0.27563 0.23117 0.51365 0.73239 +2 82 2 2 0 0.24556 0.23887 0.49212 0.76062 +2 83 1 2 0 0.21118 0.22106 0.54552 0.79201 +2 84 1 2 1 0.2 0.26054 0.52037 0.79404 +2 85 2 4 1 0.23536 0.24661 0.57319 0.8 +2 86 2 2 0 0.23971 0.21726 0.60673 0.7575 +2 87 2 4 1 0.27447 0.21378 0.58475 0.7807 +2 88 2 1 0 0.23447 0.22887 0.53945 0.8 +2 89 2 4 1 0.23122 0.2 0.56969 0.8 +2 90 2 4 1 0.21434 0.2 0.58063 0.8 +2 91 2 4 1 0.20412 0.2 0.5776 0.77905 +2 92 2 1 0 0.23715 0.20107 0.59502 0.78801 +2 93 1 2 0 0.2 0.20172 0.56694 0.8 +2 94 2 4 1 0.2 0.23888 0.56918 0.8 +2 95 2 1 0 0.2 0.22836 0.54608 0.79578 +2 96 2 4 1 0.21792 0.22493 0.55862 0.8 +2 97 2 4 0 0.25765 0.26661 0.57298 0.76303 +2 98 2 1 1 0.25462 0.26054 0.58158 0.76424 +2 99 2 4 1 0.25058 0.2355 0.56115 0.77487 +2 100 2 4 1 0.2352 0.2 0.57613 0.77472 +2 101 2 4 0 0.24936 0.20905 0.55364 0.75352 +2 102 2 1 0 0.2433 0.2 0.55993 0.78065 +2 103 2 2 0 0.25461 0.23537 0.58316 0.7884 +2 104 2 2 0 0.25684 0.24005 0.54965 0.7952 +2 105 1 1 0 0.29907 0.2506 0.55251 0.8 +2 106 1 2 1 0.26851 0.2435 0.54227 0.8 +2 107 1 4 1 0.24851 0.22888 0.55616 0.79765 +2 108 2 4 1 0.26537 0.25165 0.56028 0.77126 +2 109 2 1 0 0.26116 0.25402 0.55846 0.73255 +2 110 2 4 0 0.2603 0.24673 0.58361 0.7276 +2 111 2 2 0 0.28591 0.22322 0.64084 0.7201 +2 112 1 4 0 0.26526 0.20484 0.6863 0.712 +2 113 1 4 1 0.26692 0.2 0.70522 0.72084 +2 114 2 4 1 0.27249 0.21392 0.68892 0.72746 +2 115 2 1 0 0.22902 0.20045 0.74818 0.71253 +2 116 2 4 1 0.2353 0.2 0.77855 0.69805 +2 117 2 4 1 0.20838 0.2 0.78606 0.68928 +2 118 2 1 0 0.20182 0.20659 0.79165 0.67785 +2 119 2 4 1 0.21032 0.247 0.77601 0.74302 +2 120 2 4 1 0.20034 0.25251 0.8 0.70396 +2 121 2 4 1 0.2 0.24629 0.79537 0.68448 +2 122 2 4 1 0.21398 0.29466 0.75251 0.66879 +2 123 2 1 0 0.2 0.31706 0.76204 0.6732 +2 124 2 4 1 0.2 0.30489 0.7534 0.71219 +2 125 2 4 1 0.2 0.32492 0.76137 0.71172 +2 126 2 1 0 0.2 0.35076 0.7997 0.71048 +2 127 2 4 0 0.20503 0.31678 0.79524 0.70346 +2 128 2 4 1 0.20516 0.29861 0.76553 0.69496 +2 129 2 4 1 0.22588 0.30163 0.7683 0.72198 +2 130 2 2 0 0.21011 0.32075 0.77334 0.72815 +2 131 2 4 1 0.21068 0.30684 0.76088 0.73397 +2 132 2 4 1 0.2087 0.30048 0.79883 0.74999 +2 133 2 4 1 0.22202 0.30679 0.8 0.7297 +2 134 2 4 0 0.20441 0.28039 0.77104 0.6871 +2 135 2 4 0 0.2029 0.26801 0.75639 0.66139 +2 136 2 4 0 0.20636 0.2252 0.741 0.63109 +2 137 2 1 0 0.24226 0.2 0.78649 0.65203 +2 138 2 4 1 0.25766 0.2 0.7582 0.643 +2 139 1 4 0 0.29617 0.2 0.7412 0.59132 +2 140 2 4 1 0.30146 0.2 0.76005 0.61217 +2 141 2 4 0 0.27104 0.2159 0.75701 0.60006 +2 142 2 4 0 0.26798 0.24948 0.7371 0.61118 +2 143 2 4 0 0.25651 0.23851 0.73358 0.60815 +2 144 2 1 0 0.26757 0.27016 0.72062 0.64522 +2 145 2 4 1 0.28294 0.2391 0.75141 0.62282 +2 146 2 1 0 0.28259 0.23563 0.69756 0.61478 +2 147 2 4 1 0.2582 0.24803 0.70625 0.58711 +2 148 2 2 1 0.28571 0.26536 0.70991 0.60658 +2 149 2 2 0 0.29377 0.23557 0.72483 0.59885 +2 150 2 4 0 0.3194 0.25725 0.74524 0.59905 +2 151 2 4 0 0.30979 0.2444 0.74963 0.58005 +2 152 2 4 1 0.35056 0.22948 0.73684 0.58931 +2 153 2 4 0 0.34977 0.22911 0.72578 0.58484 +2 154 2 4 1 0.34519 0.21168 0.71921 0.60472 +2 155 2 4 1 0.36661 0.23326 0.72028 0.57828 +2 156 2 3 0 0.40117 0.25436 0.71302 0.56412 +2 157 2 4 1 0.40102 0.27823 0.66922 0.56995 +2 158 2 3 1 0.35642 0.26836 0.67426 0.55094 +2 159 1 1 1 0.37148 0.29016 0.67501 0.51965 +2 160 2 1 1 0.3358 0.24635 0.66468 0.50215 +2 161 1 4 0 0.35501 0.24552 0.69507 0.50197 +2 162 2 3 0 0.31346 0.23161 0.66735 0.51181 +2 163 1 2 1 0.30964 0.232 0.6475 0.53865 +2 164 2 3 0 0.30373 0.22914 0.62935 0.55306 +2 165 2 3 1 0.31736 0.22369 0.62071 0.54398 +2 166 2 4 0 0.30014 0.25322 0.61517 0.55492 +2 167 2 4 1 0.34385 0.2456 0.58311 0.5534 +2 168 2 4 0 0.3473 0.2477 0.58684 0.57142 +2 169 2 4 0 0.34401 0.27733 0.59587 0.55711 +2 170 1 1 1 0.33799 0.29646 0.62267 0.58141 +2 171 2 2 0 0.36342 0.31122 0.63888 0.60783 +2 172 2 2 0 0.34621 0.32128 0.63943 0.54333 +2 173 2 4 0 0.32895 0.34686 0.68134 0.49852 +2 174 1 1 1 0.37522 0.31644 0.61196 0.4386 +2 175 2 4 0 0.39076 0.33159 0.65 0.44614 +2 176 2 2 0 0.4096 0.34605 0.68745 0.44148 +2 177 2 2 0 0.46425 0.33531 0.66985 0.44431 +2 178 2 4 0 0.48127 0.34427 0.65921 0.43196 +2 179 2 4 0 0.46951 0.32875 0.66862 0.42214 +2 180 2 4 0 0.45978 0.3009 0.65382 0.42035 +2 181 2 1 1 0.46639 0.31441 0.66291 0.41407 +2 182 2 4 0 0.49453 0.3332 0.6395 0.40546 +2 183 2 2 0 0.48048 0.32783 0.637 0.39346 +2 184 1 1 0 0.50093 0.33951 0.60778 0.42871 +2 185 1 4 1 0.47675 0.33238 0.61487 0.43485 +2 186 2 4 0 0.46652 0.35543 0.62031 0.40333 +2 187 1 4 1 0.50299 0.34544 0.60978 0.38389 +2 188 1 1 1 0.51908 0.35843 0.61294 0.38385 +2 189 2 4 1 0.56691 0.37283 0.60469 0.39722 +2 190 2 4 0 0.57641 0.40698 0.65272 0.40517 +2 191 1 1 0 0.61806 0.40434 0.62457 0.38315 +2 192 2 4 1 0.6387 0.43436 0.59972 0.37162 +2 193 2 4 0 0.6537 0.47132 0.56371 0.36873 +2 194 1 4 1 0.64354 0.44272 0.53871 0.37205 +2 195 1 1 0 0.68281 0.4423 0.53232 0.37961 +2 196 1 1 0 0.68423 0.48885 0.52515 0.38681 +2 197 2 4 0 0.69172 0.49761 0.51816 0.37109 +2 198 1 4 0 0.68823 0.49309 0.51419 0.36965 +2 199 1 2 1 0.68377 0.4935 0.50005 0.35935 +2 200 2 4 0 0.67325 0.48124 0.48284 0.34656 +2 201 2 4 1 0.68844 0.47268 0.52266 0.36539 +3 1 1 4 1 0.66883 0.37325 0.76919 0.69293 +3 3 1 2 0 0.67015 0.3856 0.76941 0.72175 +3 4 2 4 1 0.65867 0.38996 0.73512 0.76353 +3 5 2 4 1 0.61271 0.4136 0.70859 0.77052 +3 6 2 4 0 0.61433 0.42465 0.70933 0.8 +3 7 2 4 1 0.5804 0.39622 0.69341 0.8 +3 8 2 4 0 0.51841 0.38227 0.73289 0.8 +3 9 1 2 0 0.53659 0.3558 0.74592 0.8 +3 10 1 4 1 0.52065 0.38466 0.78221 0.8 +3 11 1 1 0 0.5127 0.37854 0.7661 0.78401 +3 12 2 4 1 0.49501 0.43971 0.7905 0.7796 +3 13 2 4 1 0.49142 0.46183 0.74579 0.78366 +3 14 2 4 1 0.49081 0.46637 0.74794 0.77315 +3 15 2 2 1 0.50132 0.47586 0.74207 0.8 +3 16 2 4 0 0.56473 0.46072 0.79825 0.79796 +3 17 1 4 0 0.54207 0.46664 0.8 0.74878 +3 18 1 2 0 0.58164 0.44106 0.79297 0.72317 +3 19 2 4 1 0.59149 0.45774 0.79293 0.76953 +3 20 2 4 1 0.61672 0.45676 0.77379 0.79815 +3 21 2 2 1 0.62121 0.44059 0.76258 0.8 +3 22 2 2 1 0.63551 0.4599 0.75005 0.76542 +3 23 2 4 1 0.63114 0.46266 0.75579 0.8 +3 24 2 4 0 0.61963 0.49526 0.7527 0.79561 +3 25 2 2 0 0.57841 0.49419 0.72627 0.8 +3 26 2 2 1 0.56152 0.52013 0.78467 0.8 +3 27 2 3 1 0.57869 0.51671 0.78265 0.8 +3 28 2 3 1 0.56639 0.50541 0.75377 0.76181 +3 29 2 2 1 0.53607 0.53711 0.73607 0.77493 +3 30 2 3 1 0.50742 0.57529 0.74619 0.74033 +3 31 2 3 0 0.49056 0.52378 0.73618 0.71541 +3 32 1 3 1 0.4976 0.52854 0.72432 0.70405 +3 33 1 2 1 0.53769 0.5559 0.68652 0.70031 +3 34 1 2 0 0.53378 0.54603 0.67969 0.70818 +3 35 2 3 1 0.52224 0.53683 0.70012 0.73016 +3 36 2 3 0 0.54336 0.51652 0.69302 0.7253 +3 37 1 1 0 0.50921 0.56155 0.67768 0.72735 +3 38 1 2 0 0.52346 0.5659 0.67873 0.73461 +3 39 2 2 1 0.56296 0.54234 0.64272 0.72261 +3 40 2 4 1 0.57085 0.5206 0.67906 0.72352 +3 41 2 4 1 0.58499 0.53196 0.69191 0.72011 +3 42 2 2 0 0.57616 0.51196 0.674 0.74266 +3 43 2 2 0 0.576 0.53392 0.65332 0.75823 +3 44 1 1 1 0.57044 0.52995 0.61126 0.7968 +3 45 1 1 1 0.60101 0.54231 0.60942 0.78605 +3 46 1 1 0 0.57728 0.55258 0.59843 0.8 +3 47 2 1 0 0.55056 0.54806 0.56974 0.8 +3 48 2 4 1 0.55445 0.59867 0.58828 0.7958 +3 49 2 4 0 0.56397 0.57727 0.55507 0.7543 +3 50 2 3 1 0.57406 0.59639 0.54868 0.76199 +3 51 2 3 1 0.5561 0.59867 0.58165 0.75913 +3 52 2 1 1 0.48821 0.63845 0.58467 0.79374 +3 53 1 2 0 0.47204 0.62393 0.60018 0.75774 +3 54 2 4 0 0.48959 0.6457 0.62181 0.73965 +3 55 1 3 1 0.52759 0.60195 0.61241 0.70988 +3 56 1 3 1 0.52772 0.62054 0.57173 0.7234 +3 57 1 1 0 0.50986 0.59709 0.54509 0.73144 +3 58 1 3 0 0.46038 0.60037 0.52496 0.75924 +3 59 2 3 0 0.4769 0.6381 0.50502 0.73557 +3 60 1 1 1 0.4638 0.63734 0.53088 0.73204 +3 61 1 1 0 0.44397 0.62479 0.55098 0.7317 +3 62 1 1 1 0.45771 0.64205 0.56085 0.7122 +3 63 1 3 1 0.41829 0.61723 0.53791 0.65224 +3 64 1 1 0 0.44906 0.58146 0.55191 0.66344 +3 65 2 3 1 0.47217 0.57877 0.525 0.6597 +3 66 2 1 1 0.48396 0.57911 0.48678 0.66715 +3 67 2 3 0 0.48087 0.55254 0.46851 0.68836 +3 68 2 1 0 0.40167 0.54104 0.40646 0.67455 +3 69 1 2 1 0.41253 0.55343 0.41672 0.65517 +3 70 1 2 1 0.42959 0.59563 0.41995 0.68402 +3 71 1 2 1 0.43857 0.59709 0.42729 0.70901 +3 72 1 2 1 0.44418 0.62363 0.4246 0.71959 +3 73 2 4 1 0.4767 0.64183 0.38548 0.78097 +3 74 1 2 1 0.5276 0.62719 0.3852 0.8 +3 75 1 2 1 0.49319 0.624 0.35591 0.8 +3 76 2 4 1 0.54732 0.5904 0.29778 0.8 +3 77 2 4 1 0.54944 0.58123 0.32742 0.77967 +3 78 1 2 1 0.56733 0.53663 0.30483 0.8 +3 79 1 2 0 0.57654 0.53186 0.30929 0.76943 +3 80 2 2 1 0.59232 0.54615 0.32875 0.77195 +3 81 2 4 1 0.6407 0.52331 0.29697 0.8 +3 82 2 4 1 0.63453 0.50234 0.2913 0.76079 +3 83 1 2 1 0.63164 0.53699 0.30748 0.77895 +3 84 1 2 0 0.6282 0.5123 0.30934 0.77445 +3 85 2 4 1 0.60935 0.49884 0.33065 0.74279 +3 86 2 2 0 0.61729 0.54562 0.34929 0.74988 +3 87 1 4 1 0.63495 0.52927 0.31141 0.73159 +3 88 1 1 1 0.6246 0.52432 0.34703 0.73015 +3 89 1 1 1 0.64368 0.48815 0.27377 0.73239 +3 90 2 1 0 0.59542 0.45566 0.26969 0.72239 +3 91 2 1 0 0.59224 0.4519 0.27504 0.69281 +3 92 2 4 1 0.59509 0.5055 0.24022 0.66945 +3 93 2 4 1 0.64672 0.53689 0.22287 0.66914 +3 94 2 4 1 0.63177 0.54698 0.21258 0.68408 +3 95 1 2 0 0.67391 0.57384 0.2072 0.68711 +3 96 2 4 1 0.66292 0.52497 0.2 0.70323 +3 97 2 2 1 0.6416 0.53087 0.20378 0.6965 +3 98 2 4 1 0.60641 0.50909 0.26903 0.6806 +3 99 1 2 1 0.6134 0.48996 0.27622 0.70435 +3 100 2 2 1 0.62637 0.48483 0.31202 0.73029 +3 101 2 4 1 0.58895 0.44496 0.3198 0.72504 +3 102 1 4 1 0.59891 0.50268 0.29841 0.72913 +3 103 1 2 0 0.63238 0.5181 0.2929 0.73254 +3 104 2 2 1 0.64532 0.51598 0.29077 0.79193 +3 105 2 2 0 0.64278 0.47902 0.28531 0.79905 +3 106 2 4 1 0.62308 0.49617 0.30022 0.8 +3 107 2 1 0 0.66055 0.47591 0.30855 0.78427 +3 108 2 4 1 0.66069 0.49633 0.31414 0.8 +3 109 2 4 1 0.66532 0.51261 0.33326 0.8 +3 110 2 1 1 0.66496 0.51259 0.30694 0.79976 +3 111 2 4 1 0.63477 0.50855 0.35965 0.8 +3 112 2 4 0 0.64212 0.47413 0.32055 0.7694 +3 113 2 4 1 0.60057 0.42494 0.35101 0.77125 +3 114 1 1 1 0.56903 0.38249 0.35041 0.76236 +3 115 1 1 1 0.6015 0.39316 0.36371 0.77496 +3 116 1 1 1 0.60273 0.42415 0.42261 0.77538 +3 117 1 4 1 0.64753 0.36608 0.46082 0.74709 +3 118 2 4 1 0.64442 0.40509 0.48388 0.71915 +3 119 1 4 1 0.65391 0.42951 0.48458 0.7488 +3 120 1 1 1 0.68116 0.4308 0.49861 0.71676 +3 121 1 1 0 0.65563 0.46113 0.47371 0.72506 +3 122 2 1 1 0.69349 0.49043 0.47868 0.73556 +3 123 2 4 1 0.66198 0.48623 0.51209 0.74302 +3 124 2 1 0 0.62501 0.50053 0.52244 0.73455 +3 125 2 4 1 0.65673 0.44638 0.51138 0.75814 +3 126 2 2 1 0.64113 0.45613 0.4999 0.7822 +3 127 2 2 1 0.61183 0.47796 0.47914 0.78129 +3 128 2 4 0 0.62885 0.48371 0.46325 0.76828 +3 129 1 2 0 0.65825 0.46961 0.48531 0.7496 +3 130 1 3 0 0.6435 0.48994 0.53024 0.72654 +3 131 1 2 1 0.66244 0.51286 0.52535 0.7488 +3 132 1 2 0 0.68476 0.54099 0.51799 0.7379 +3 133 2 4 1 0.68301 0.55496 0.51328 0.74206 +3 134 2 2 0 0.67316 0.55361 0.48301 0.75786 +3 135 2 2 1 0.67376 0.53684 0.49156 0.76391 +3 136 2 4 0 0.70431 0.5375 0.49248 0.72144 +3 137 2 2 0 0.73911 0.51031 0.50981 0.69143 +3 138 2 3 0 0.73501 0.54236 0.48455 0.65323 +3 139 1 2 1 0.70711 0.53633 0.51912 0.68392 +3 140 1 2 0 0.68128 0.55276 0.48967 0.66202 +3 141 1 2 1 0.66796 0.51312 0.48063 0.67974 +3 142 1 2 0 0.68706 0.52262 0.45528 0.69269 +3 143 2 4 1 0.67081 0.50414 0.40634 0.68221 +3 144 2 2 1 0.60688 0.4994 0.38689 0.68965 +3 145 2 4 1 0.64122 0.47853 0.39266 0.71406 +3 146 2 4 1 0.66933 0.47368 0.37491 0.69829 +3 147 2 2 0 0.6751 0.52406 0.38091 0.70497 +3 148 2 4 1 0.66144 0.49961 0.37475 0.69052 +3 149 2 4 0 0.63735 0.5103 0.36973 0.69204 +3 150 1 2 1 0.63358 0.48455 0.37815 0.68046 +3 151 1 4 1 0.65883 0.47061 0.3947 0.65703 +3 152 1 2 1 0.6302 0.50495 0.39799 0.65565 +3 153 2 4 1 0.62789 0.48344 0.39312 0.63916 +3 154 1 2 1 0.6335 0.46165 0.41299 0.64529 +3 155 1 2 1 0.64593 0.46122 0.38794 0.66622 +3 156 1 2 0 0.646 0.44097 0.3853 0.6999 +3 157 1 2 0 0.63902 0.45708 0.35352 0.70509 +3 158 2 4 1 0.66877 0.4357 0.31695 0.71684 +3 159 2 4 1 0.66383 0.44026 0.28375 0.73352 +3 160 2 4 1 0.6475 0.43008 0.26323 0.68252 +3 161 2 4 1 0.62258 0.43133 0.24392 0.69062 +3 162 2 1 1 0.65065 0.46271 0.22707 0.71892 +3 163 2 4 1 0.60723 0.44933 0.2092 0.71241 +3 164 1 4 1 0.59875 0.43997 0.21956 0.6914 +3 165 1 1 0 0.55818 0.40711 0.2 0.72182 +3 166 1 1 1 0.60092 0.38929 0.25299 0.74315 +3 167 1 1 1 0.6077 0.36729 0.2275 0.74274 +3 168 1 1 1 0.58144 0.36602 0.24947 0.70624 +3 169 1 1 1 0.58884 0.34827 0.2796 0.71898 +3 170 1 4 1 0.61215 0.37417 0.2637 0.74439 +3 171 1 1 1 0.63596 0.36185 0.26624 0.73248 +3 172 1 1 1 0.61559 0.37883 0.22076 0.73546 +3 173 1 1 1 0.58784 0.39491 0.20025 0.73755 +3 174 1 2 1 0.6596 0.38477 0.24322 0.77936 +3 175 1 1 1 0.64983 0.43784 0.27238 0.77963 +3 176 1 1 0 0.63608 0.43822 0.26457 0.78278 +3 177 2 4 1 0.61948 0.43996 0.23311 0.75512 +3 178 2 4 1 0.61418 0.38824 0.23349 0.75632 +3 179 2 1 0 0.6146 0.37627 0.25115 0.8 +3 180 2 2 0 0.62001 0.34678 0.2671 0.79487 +3 181 1 2 1 0.615 0.33971 0.28171 0.79225 +3 182 2 2 0 0.61682 0.32518 0.34198 0.79845 +3 183 2 4 1 0.63967 0.31349 0.3434 0.78929 +3 184 2 4 1 0.62524 0.30235 0.32921 0.78309 +3 185 2 4 1 0.65432 0.28414 0.3005 0.77878 +3 186 2 4 1 0.6499 0.28287 0.3494 0.7755 +3 187 2 4 1 0.6312 0.29965 0.3589 0.739 +3 188 2 4 1 0.60689 0.31089 0.35521 0.74163 +3 189 2 4 0 0.62744 0.29311 0.34019 0.75455 +3 190 2 2 0 0.62018 0.30403 0.37572 0.75018 +3 191 1 1 1 0.59118 0.32691 0.3682 0.74053 +3 192 1 3 0 0.62218 0.31464 0.37339 0.72332 +3 193 1 1 1 0.60768 0.30155 0.3907 0.73393 +3 194 1 1 1 0.62445 0.25367 0.40889 0.7381 +3 195 1 4 1 0.58264 0.27604 0.38269 0.73848 +3 196 1 4 1 0.5586 0.23074 0.38086 0.77833 +3 197 1 4 0 0.54563 0.22598 0.36843 0.73306 +3 198 1 1 0 0.5992 0.24965 0.35665 0.72907 +3 199 2 4 0 0.63541 0.24274 0.35439 0.68775 +3 200 1 2 0 0.64018 0.24858 0.36565 0.6627 +3 201 2 3 1 0.65081 0.25388 0.39391 0.67241 +4 1 2 4 0 0.21199 0.54628 0.68794 0.47466 +4 2 2 4 0 0.2 0.534 0.65541 0.47102 +4 3 2 3 1 0.2 0.57876 0.65958 0.47067 +4 4 2 3 1 0.2 0.56797 0.63188 0.42063 +4 6 2 1 1 0.2 0.54476 0.60146 0.47798 +4 7 2 1 0 0.2 0.52605 0.60722 0.47527 +4 8 2 3 0 0.2 0.52271 0.63572 0.47881 +4 9 2 1 0 0.21722 0.55743 0.64484 0.49461 +4 10 2 3 0 0.22466 0.5777 0.67382 0.49864 +4 11 1 4 0 0.22561 0.58815 0.68153 0.5709 +4 12 1 4 0 0.21568 0.56781 0.67591 0.5935 +4 13 1 2 1 0.22308 0.52535 0.71702 0.60735 +4 14 1 2 1 0.20689 0.53131 0.70323 0.60971 +4 15 1 2 1 0.21517 0.57119 0.70512 0.60726 +4 16 1 2 1 0.2067 0.56055 0.72736 0.59942 +4 17 1 2 0 0.2 0.55337 0.74002 0.60865 +4 18 2 3 1 0.2 0.57323 0.78656 0.53756 +4 19 2 3 0 0.24501 0.57013 0.8 0.55964 +4 20 1 1 0 0.22025 0.59802 0.78074 0.58471 +4 21 1 2 1 0.24619 0.58994 0.78157 0.62366 +4 22 1 2 1 0.2425 0.60365 0.77134 0.67168 +4 23 2 4 1 0.26134 0.62914 0.73476 0.66238 +4 24 1 2 1 0.2523 0.67328 0.76341 0.6803 +4 25 2 4 1 0.21905 0.66907 0.74752 0.67259 +4 26 2 1 0 0.2 0.63148 0.74129 0.64534 +4 27 1 2 1 0.22706 0.64474 0.7103 0.64379 +4 28 2 3 1 0.24795 0.71816 0.73498 0.65137 +4 29 2 3 0 0.25824 0.72237 0.71915 0.64535 +4 30 2 4 1 0.2023 0.73493 0.72564 0.63744 +4 31 1 4 1 0.21389 0.75741 0.72671 0.64195 +4 32 2 2 1 0.2 0.77333 0.72097 0.6328 +4 33 2 2 1 0.2 0.77558 0.69994 0.6418 +4 34 2 2 1 0.2 0.78144 0.67943 0.63996 +4 35 1 3 0 0.2 0.8 0.67301 0.61008 +4 36 1 2 0 0.2 0.8 0.68831 0.63528 +4 37 1 2 1 0.2 0.8 0.69131 0.6034 +4 38 2 3 1 0.20971 0.8 0.70393 0.57568 +4 39 2 3 1 0.21621 0.76178 0.71917 0.55994 +4 40 2 1 0 0.22745 0.74196 0.70603 0.55248 +4 41 2 4 1 0.23812 0.75206 0.68627 0.56138 +4 42 1 2 1 0.26515 0.73895 0.69746 0.57138 +4 43 2 4 0 0.28398 0.76918 0.73134 0.59407 +4 44 2 1 1 0.30592 0.75416 0.70629 0.5629 +4 45 1 4 1 0.31918 0.76789 0.70929 0.56458 +4 46 2 3 1 0.29707 0.71374 0.71305 0.56137 +4 47 1 4 0 0.30046 0.66943 0.72947 0.55543 +4 48 1 2 1 0.27898 0.69381 0.70074 0.5995 +4 49 2 4 1 0.26535 0.69816 0.68161 0.58912 +4 50 1 2 1 0.30351 0.72021 0.67091 0.56377 +4 51 2 4 1 0.33934 0.65248 0.66959 0.56201 +4 52 2 2 1 0.3872 0.63757 0.68541 0.58033 +4 53 1 2 0 0.3944 0.6497 0.67534 0.57672 +4 54 2 3 0 0.41049 0.65797 0.6736 0.57501 +4 55 1 2 1 0.36902 0.61196 0.70441 0.62472 +4 56 1 2 1 0.39708 0.658 0.73746 0.67287 +4 57 1 3 0 0.33752 0.69203 0.75871 0.63325 +4 58 1 4 1 0.33235 0.68372 0.79774 0.65964 +4 59 2 1 0 0.29321 0.66003 0.8 0.68977 +4 60 2 2 1 0.2689 0.68852 0.79386 0.6942 +4 61 1 4 0 0.27333 0.70509 0.78656 0.68412 +4 62 1 1 1 0.29007 0.6768 0.8 0.72379 +4 63 1 1 1 0.27771 0.69015 0.79024 0.74027 +4 64 1 1 0 0.27473 0.72906 0.76889 0.73509 +4 65 1 1 1 0.27454 0.7235 0.75293 0.74844 +4 66 2 3 1 0.27372 0.71364 0.77559 0.75522 +4 67 2 4 1 0.29453 0.64955 0.77966 0.74915 +4 68 2 3 1 0.28646 0.64986 0.79155 0.78968 +4 69 2 4 1 0.26537 0.63016 0.77741 0.78975 +4 70 2 3 1 0.28141 0.66991 0.74299 0.79249 +4 71 1 1 0 0.29099 0.66493 0.79439 0.79014 +4 72 1 4 1 0.31207 0.64723 0.79159 0.78607 +4 73 2 3 1 0.26992 0.64794 0.762 0.79788 +4 74 1 1 0 0.28006 0.57867 0.78492 0.78075 +4 75 2 3 1 0.25879 0.61897 0.77092 0.7282 +4 76 2 1 0 0.21374 0.6422 0.77857 0.7214 +4 77 1 2 1 0.2 0.66219 0.76089 0.71271 +4 78 1 4 1 0.23095 0.63052 0.78842 0.74988 +4 79 2 1 0 0.21211 0.67373 0.75575 0.77481 +4 80 2 4 1 0.22245 0.67839 0.71743 0.78994 +4 81 1 3 1 0.22854 0.67643 0.72384 0.76479 +4 82 2 2 1 0.2 0.65877 0.69777 0.8 +4 83 2 3 1 0.2 0.66073 0.69603 0.8 +4 84 1 1 0 0.2 0.68394 0.70717 0.8 +4 85 2 2 1 0.20384 0.66684 0.75085 0.8 +4 86 1 3 1 0.21624 0.64553 0.77035 0.76305 +4 87 2 4 1 0.22371 0.66605 0.71853 0.79022 +4 88 2 3 1 0.22751 0.70415 0.75329 0.76656 +4 89 2 2 1 0.226 0.71427 0.73792 0.75358 +4 90 2 4 1 0.25551 0.73673 0.75205 0.73508 +4 91 2 1 0 0.24871 0.75519 0.77856 0.70971 +4 92 2 4 1 0.22538 0.69685 0.77893 0.72328 +4 93 2 1 0 0.24222 0.68194 0.77438 0.65775 +4 94 2 4 1 0.25815 0.70205 0.8 0.63861 +4 95 1 1 0 0.27333 0.6861 0.8 0.65481 +4 96 1 4 0 0.27917 0.73356 0.7416 0.67907 +4 97 1 3 1 0.28182 0.71244 0.72781 0.65051 +4 98 1 2 1 0.29413 0.72278 0.7606 0.68453 +4 99 1 2 1 0.2932 0.73863 0.75846 0.68132 +4 100 1 3 1 0.31532 0.69763 0.75898 0.69651 +4 101 1 2 1 0.31612 0.70769 0.74336 0.70307 +4 102 1 2 1 0.3108 0.75304 0.76022 0.6906 +4 103 1 1 0 0.33191 0.79851 0.7261 0.709 +4 104 1 2 1 0.34414 0.79383 0.74593 0.71874 +4 105 1 2 1 0.34368 0.8 0.77512 0.71896 +4 106 1 2 1 0.34419 0.77415 0.78079 0.71189 +4 107 1 2 1 0.37746 0.79259 0.78847 0.70569 +4 108 1 2 1 0.37835 0.79968 0.77385 0.69216 +4 109 1 4 1 0.38553 0.8 0.70916 0.66968 +4 110 1 2 1 0.38058 0.8 0.69244 0.67389 +4 111 1 2 1 0.41382 0.79577 0.70813 0.67588 +4 112 1 2 0 0.36934 0.8 0.66458 0.68569 +4 113 2 3 0 0.35152 0.79807 0.65552 0.63742 +4 114 1 2 0 0.34184 0.8 0.66402 0.60133 +4 115 2 4 1 0.32713 0.8 0.70044 0.5724 +4 116 2 4 1 0.34862 0.8 0.76034 0.54769 +4 117 2 2 1 0.38828 0.8 0.79676 0.5328 +4 118 2 3 1 0.39307 0.8 0.8 0.53451 +4 119 2 4 1 0.39582 0.79676 0.79137 0.50423 +4 120 2 4 0 0.40118 0.8 0.75272 0.46582 +4 121 2 4 0 0.43031 0.8 0.74693 0.48711 +4 122 2 3 1 0.4908 0.8 0.7287 0.48293 +4 123 2 3 0 0.46163 0.8 0.68921 0.46915 +4 124 1 2 1 0.46082 0.76616 0.6904 0.44279 +4 125 1 2 1 0.46621 0.77326 0.68577 0.45188 +4 126 1 2 1 0.41896 0.77596 0.6704 0.42 +4 127 1 3 1 0.40602 0.76215 0.63875 0.37658 +4 128 1 2 1 0.42846 0.78743 0.63211 0.36063 +4 129 2 4 1 0.41213 0.75659 0.6251 0.33481 +4 130 2 4 0 0.41481 0.77493 0.59454 0.28814 +4 131 2 3 1 0.41472 0.79236 0.61594 0.2509 +4 132 2 3 1 0.39245 0.8 0.56165 0.30671 +4 133 1 1 0 0.40761 0.8 0.60698 0.33748 +4 134 1 3 1 0.42713 0.79083 0.64562 0.28783 +4 135 1 4 0 0.42478 0.7864 0.64 0.3166 +4 136 2 2 1 0.41485 0.76076 0.63457 0.29308 +4 137 2 3 1 0.4325 0.79865 0.6661 0.27684 +4 138 1 4 1 0.4363 0.75789 0.66885 0.25926 +4 139 1 2 1 0.4181 0.77731 0.68794 0.28972 +4 140 2 4 0 0.40627 0.76115 0.7093 0.31961 +4 141 2 2 1 0.37519 0.76241 0.69879 0.28667 +4 142 1 3 0 0.3901 0.7591 0.69174 0.30751 +4 143 2 4 1 0.36338 0.71133 0.69568 0.33997 +4 144 1 1 1 0.39841 0.73696 0.72225 0.34169 +4 145 1 1 1 0.41371 0.69938 0.72395 0.36836 +4 146 1 1 0 0.37714 0.71863 0.68051 0.39311 +4 147 1 1 0 0.32263 0.78138 0.72232 0.35715 +4 148 2 2 1 0.3333 0.76573 0.69665 0.40039 +4 149 1 4 0 0.3169 0.77223 0.65767 0.42938 +4 150 1 3 1 0.27789 0.78937 0.68047 0.46507 +4 151 1 2 1 0.23163 0.77209 0.72142 0.47408 +4 152 2 3 1 0.23568 0.76247 0.73256 0.46965 +4 153 1 2 1 0.26304 0.7484 0.73707 0.48612 +4 154 1 4 0 0.21324 0.72897 0.73612 0.50978 +4 155 1 2 1 0.21614 0.74213 0.72873 0.47975 +4 156 1 2 1 0.22546 0.75149 0.73982 0.51567 +4 157 2 4 0 0.2 0.76702 0.73213 0.50302 +4 158 1 2 0 0.2 0.75791 0.76103 0.49764 +4 159 1 2 0 0.24648 0.74262 0.75323 0.48225 +4 160 2 1 0 0.29166 0.7449 0.75737 0.49812 +4 161 1 2 1 0.29722 0.74881 0.78415 0.49579 +4 162 1 2 1 0.29739 0.748 0.75971 0.49409 +4 163 1 2 1 0.29061 0.74109 0.75713 0.47148 +4 164 1 2 1 0.3241 0.8 0.7562 0.49 +4 165 1 1 0 0.30304 0.8 0.74852 0.43331 +4 166 1 3 1 0.30686 0.8 0.79876 0.4492 +4 167 1 2 1 0.27929 0.79193 0.8 0.45587 +4 168 1 2 1 0.27502 0.8 0.8 0.47165 +4 169 1 2 1 0.27626 0.76784 0.8 0.45688 +4 170 1 2 1 0.26103 0.8 0.79842 0.50098 +4 171 2 1 0 0.25009 0.8 0.8 0.52076 +4 172 2 4 0 0.27084 0.76792 0.79399 0.53205 +4 173 1 4 0 0.25985 0.8 0.8 0.52452 +4 174 1 2 1 0.3034 0.8 0.76969 0.53788 +4 175 1 3 1 0.31203 0.8 0.76893 0.55553 +4 176 1 4 1 0.29759 0.79857 0.8 0.54718 +4 177 1 3 1 0.30694 0.8 0.77632 0.50738 +4 178 1 2 1 0.32205 0.79845 0.75379 0.51164 +4 179 1 2 1 0.3711 0.79228 0.75654 0.49837 +4 180 1 2 0 0.36351 0.75311 0.76007 0.52005 +4 181 1 2 1 0.43037 0.78817 0.7648 0.52297 +4 182 1 2 1 0.38317 0.8 0.76207 0.53649 +4 183 1 2 1 0.40583 0.76667 0.7691 0.51703 +4 184 1 2 1 0.37856 0.74345 0.78541 0.54304 +4 185 1 2 0 0.35465 0.75525 0.76958 0.50629 +4 186 1 1 0 0.34375 0.75051 0.8 0.51524 +4 187 1 2 1 0.32132 0.75855 0.79423 0.53117 +4 188 1 2 1 0.3636 0.77127 0.78654 0.58878 +4 189 1 2 0 0.3275 0.78351 0.77677 0.58923 +4 190 1 2 0 0.27943 0.77737 0.76301 0.61983 +4 191 1 2 1 0.27087 0.77048 0.76726 0.63355 +4 192 2 2 1 0.2608 0.7859 0.79498 0.67274 +4 193 1 2 0 0.24295 0.77068 0.8 0.6974 +4 194 1 1 1 0.21104 0.76327 0.74363 0.68911 +4 195 1 2 1 0.2056 0.77968 0.75447 0.67363 +4 196 1 2 1 0.2 0.78194 0.71332 0.67214 +4 197 1 3 1 0.2 0.79051 0.73342 0.72048 +4 198 1 3 1 0.2 0.8 0.75775 0.73538 +4 199 1 3 1 0.2 0.8 0.71951 0.74666 +4 200 1 1 0 0.2 0.79957 0.72178 0.77312 +4 201 1 3 0 0.2 0.77904 0.76431 0.79704 +5 1 2 3 0 0.52965 0.6281 0.39177 0.2627 +5 2 2 1 1 0.50844 0.64534 0.43629 0.25243 +5 3 2 3 1 0.49916 0.65298 0.43716 0.25631 +5 4 1 1 1 0.46066 0.65858 0.45227 0.24514 +5 5 1 4 0 0.46583 0.67651 0.42093 0.28374 +5 6 1 3 1 0.48888 0.66179 0.441 0.31545 +5 7 1 1 1 0.46957 0.67537 0.39708 0.31115 +5 8 1 1 0 0.47773 0.63281 0.41152 0.30077 +5 9 1 1 0 0.42138 0.60612 0.41382 0.35085 +5 10 2 3 0 0.39058 0.65772 0.43496 0.34415 +5 11 2 4 0 0.36632 0.65864 0.38564 0.32864 +5 12 1 2 1 0.3353 0.67799 0.36702 0.30257 +5 13 1 3 1 0.33005 0.67692 0.37394 0.31 +5 14 1 3 0 0.30704 0.70034 0.38948 0.29147 +5 15 1 2 0 0.30188 0.67684 0.37848 0.30749 +5 16 2 3 0 0.31777 0.70184 0.37308 0.33226 +5 17 2 4 0 0.30146 0.68226 0.36574 0.35462 +5 18 1 2 0 0.30953 0.70445 0.36379 0.35352 +5 19 1 1 0 0.29945 0.71922 0.36604 0.36233 +5 21 1 1 0 0.26136 0.76773 0.33342 0.40977 +5 22 1 2 1 0.25017 0.79726 0.26784 0.44439 +5 23 1 4 1 0.25924 0.8 0.24602 0.39868 +5 24 1 2 1 0.25417 0.8 0.26601 0.39396 +5 25 1 2 1 0.24727 0.8 0.28345 0.40097 +5 26 1 2 1 0.24463 0.8 0.27493 0.41779 +5 27 1 2 1 0.22767 0.75664 0.25281 0.37704 +5 28 1 4 1 0.24347 0.75487 0.25652 0.36365 +5 29 1 2 1 0.25231 0.72268 0.27731 0.35213 +5 30 1 4 0 0.25335 0.6809 0.32021 0.34899 +5 31 1 4 1 0.26974 0.64092 0.25591 0.36438 +5 32 1 2 1 0.26745 0.66799 0.2717 0.34281 +5 33 1 2 1 0.28884 0.69135 0.26879 0.34217 +5 34 1 4 0 0.29497 0.69864 0.25664 0.33734 +5 35 1 2 1 0.30562 0.64968 0.24518 0.32997 +5 36 1 2 1 0.28868 0.66533 0.30171 0.30097 +5 37 1 2 0 0.28809 0.7375 0.3034 0.30363 +5 38 1 4 0 0.31865 0.71161 0.30639 0.28397 +5 39 1 1 0 0.31807 0.64858 0.29697 0.30764 +5 40 1 1 0 0.31593 0.65613 0.33298 0.31225 +5 41 1 3 1 0.33026 0.61258 0.33138 0.32014 +5 42 1 2 1 0.32697 0.61912 0.34696 0.33464 +5 43 1 3 0 0.27858 0.63891 0.35506 0.31018 +5 44 1 4 1 0.32086 0.66656 0.41123 0.28709 +5 45 1 2 1 0.34782 0.66129 0.41197 0.2836 +5 46 1 2 1 0.3288 0.70515 0.42619 0.30467 +5 47 1 2 1 0.31461 0.7261 0.46665 0.28781 +5 48 1 2 1 0.29798 0.75841 0.45923 0.24544 +5 49 1 1 0 0.32415 0.74721 0.45376 0.23062 +5 50 1 4 1 0.30859 0.73631 0.42276 0.25451 +5 51 1 4 1 0.30114 0.70529 0.43194 0.24206 +5 52 1 4 0 0.29249 0.67129 0.43607 0.20447 +5 53 1 2 1 0.28941 0.65402 0.47464 0.20202 +5 54 1 4 0 0.28255 0.65782 0.44258 0.24802 +5 55 1 2 0 0.29205 0.65442 0.42603 0.2763 +5 56 1 2 1 0.28681 0.68052 0.43304 0.25667 +5 57 1 2 1 0.28534 0.69036 0.43969 0.30449 +5 58 1 4 1 0.28727 0.72614 0.40972 0.28317 +5 59 1 2 0 0.29809 0.73427 0.40003 0.25991 +5 60 1 2 1 0.32128 0.72385 0.38134 0.25928 +5 61 1 2 1 0.2904 0.77418 0.40214 0.22237 +5 62 1 2 1 0.31291 0.76574 0.39228 0.23189 +5 63 1 2 0 0.31813 0.74611 0.37152 0.21661 +5 64 1 4 1 0.34169 0.72641 0.37578 0.23515 +5 65 1 1 0 0.39352 0.70822 0.32018 0.23678 +5 66 1 4 0 0.35239 0.70569 0.33043 0.25038 +5 67 1 1 0 0.31002 0.73202 0.30254 0.22323 +5 68 1 2 1 0.32702 0.73928 0.32406 0.22419 +5 69 1 2 1 0.32569 0.74191 0.3323 0.2288 +5 70 1 2 0 0.31631 0.75926 0.35622 0.20484 +5 71 1 2 1 0.34697 0.7608 0.3981 0.2 +5 72 1 4 0 0.36965 0.74103 0.41356 0.20749 +5 73 1 4 1 0.33203 0.75547 0.40478 0.24049 +5 74 1 2 0 0.3359 0.8 0.41224 0.23604 +5 75 1 2 1 0.38071 0.77505 0.40267 0.23514 +5 76 1 2 0 0.35913 0.7656 0.44632 0.22138 +5 77 1 2 1 0.32985 0.79312 0.47177 0.24763 +5 78 1 2 1 0.34528 0.72516 0.45731 0.25059 +5 79 1 2 1 0.42887 0.70956 0.52762 0.22566 +5 80 1 2 1 0.50375 0.70408 0.55354 0.24068 +5 81 1 2 1 0.49584 0.69185 0.52126 0.21029 +5 82 1 2 1 0.49765 0.68081 0.51965 0.21723 +5 83 1 4 0 0.47827 0.70016 0.5245 0.22204 +5 84 1 2 0 0.49644 0.72369 0.54001 0.22711 +5 85 1 2 1 0.50782 0.73512 0.5403 0.24375 +5 86 1 2 1 0.48393 0.6719 0.54166 0.22529 +5 87 1 2 0 0.48789 0.6832 0.54899 0.23012 +5 88 1 2 1 0.45357 0.68183 0.54698 0.23454 +5 89 1 2 1 0.43108 0.6934 0.50771 0.28144 +5 90 1 2 1 0.41876 0.69745 0.50987 0.29576 +5 91 1 3 0 0.38172 0.67906 0.49969 0.29294 +5 92 1 2 1 0.37691 0.68526 0.47025 0.2863 +5 93 1 2 1 0.3854 0.66665 0.42952 0.27794 +5 94 1 2 1 0.4366 0.66658 0.43534 0.29518 +5 95 1 2 0 0.42289 0.69998 0.41894 0.35091 +5 96 1 4 0 0.40661 0.68082 0.40679 0.35538 +5 97 1 4 0 0.40432 0.69434 0.43249 0.38228 +5 98 1 4 0 0.39867 0.7264 0.39862 0.32107 +5 99 1 2 1 0.41113 0.6887 0.459 0.32047 +5 100 2 3 0 0.37729 0.72178 0.47418 0.33235 +5 101 1 2 1 0.3844 0.77754 0.48317 0.28709 +5 102 1 2 1 0.36407 0.79627 0.47854 0.29967 +5 103 1 2 1 0.37211 0.8 0.49278 0.26266 +5 104 1 4 0 0.36476 0.8 0.51316 0.2918 +5 105 1 2 1 0.37656 0.7832 0.52443 0.31781 +5 106 1 4 0 0.361 0.75417 0.51713 0.3391 +5 107 1 2 1 0.34127 0.69674 0.51345 0.33678 +5 108 1 3 1 0.36536 0.69128 0.54557 0.37853 +5 109 1 2 0 0.36782 0.68772 0.50025 0.3886 +5 110 1 3 1 0.37694 0.66622 0.52168 0.3531 +5 111 2 3 0 0.40396 0.67503 0.51225 0.35866 +5 112 1 3 0 0.39044 0.77402 0.48213 0.36963 +5 113 1 2 1 0.41819 0.76111 0.42435 0.36787 +5 114 1 3 0 0.43218 0.74342 0.45394 0.37659 +5 115 1 2 1 0.41543 0.7167 0.43029 0.37865 +5 116 1 2 1 0.43204 0.74695 0.47116 0.35511 +5 117 1 2 0 0.42545 0.73504 0.48081 0.38071 +5 118 1 4 1 0.40956 0.76826 0.48392 0.37526 +5 119 1 2 1 0.44331 0.7724 0.493 0.35941 +5 120 1 2 1 0.42941 0.74261 0.48721 0.32865 +5 121 1 2 1 0.46223 0.7079 0.495 0.34236 +5 122 1 4 1 0.45196 0.74791 0.51239 0.30726 +5 123 1 2 1 0.46976 0.7212 0.50553 0.29633 +5 124 1 2 1 0.49744 0.72772 0.47922 0.32832 +5 125 1 4 1 0.48511 0.70999 0.44181 0.35508 +5 126 1 2 1 0.49698 0.72154 0.4094 0.33259 +5 127 1 2 0 0.55174 0.72168 0.34913 0.29959 +5 128 1 2 1 0.56839 0.74423 0.36314 0.29836 +5 129 1 2 1 0.56329 0.74977 0.30709 0.29901 +5 130 1 4 0 0.53117 0.71506 0.30289 0.29889 +5 131 1 2 1 0.53059 0.72266 0.29907 0.27074 +5 132 2 4 0 0.52097 0.73037 0.31229 0.26118 +5 133 1 2 1 0.52505 0.73778 0.30595 0.26641 +5 134 1 2 1 0.51804 0.74373 0.29208 0.22722 +5 135 1 4 0 0.50817 0.69914 0.29086 0.2444 +5 136 1 2 0 0.46426 0.64347 0.29607 0.24786 +5 137 1 2 1 0.45112 0.65173 0.28418 0.2684 +5 138 1 2 1 0.46582 0.63202 0.27425 0.2506 +5 139 1 2 1 0.52614 0.64221 0.30124 0.26622 +5 140 1 2 1 0.48691 0.62286 0.24835 0.26166 +5 141 1 2 1 0.49674 0.65637 0.26224 0.26184 +5 142 1 2 1 0.50251 0.66832 0.27825 0.22906 +5 143 1 2 0 0.52561 0.63081 0.2888 0.25572 +5 144 1 2 0 0.5582 0.63455 0.31322 0.2142 +5 145 1 1 0 0.56369 0.67699 0.28798 0.25044 +5 146 1 2 0 0.59611 0.68733 0.30053 0.26272 +5 147 2 4 0 0.62406 0.68451 0.27671 0.28084 +5 148 2 3 0 0.62594 0.66005 0.2575 0.23694 +5 149 1 2 1 0.60473 0.64258 0.26584 0.2 +5 150 1 2 0 0.55418 0.64354 0.25955 0.2 +5 151 1 2 1 0.49225 0.65877 0.22367 0.2 +5 152 1 4 0 0.48977 0.67444 0.22502 0.2 +5 153 1 2 1 0.49144 0.68793 0.23937 0.2251 +5 154 1 2 1 0.51661 0.67634 0.25181 0.23167 +5 155 1 3 0 0.49595 0.61461 0.27478 0.25776 +5 156 1 2 0 0.51275 0.58055 0.29726 0.29554 +5 157 1 2 1 0.54337 0.57698 0.34097 0.29997 +5 158 1 2 0 0.54004 0.56746 0.33991 0.34567 +5 159 1 2 1 0.5505 0.58749 0.37857 0.33782 +5 160 1 2 0 0.55734 0.58047 0.31952 0.32997 +5 161 1 2 1 0.57778 0.58586 0.32935 0.29741 +5 162 1 3 0 0.61467 0.56721 0.34121 0.29956 +5 163 1 2 0 0.61045 0.60386 0.33623 0.31601 +5 164 1 2 1 0.61667 0.65245 0.37916 0.34916 +5 165 1 4 0 0.60631 0.64049 0.37032 0.32187 +5 166 1 4 1 0.56741 0.6463 0.39292 0.26194 +5 167 1 4 1 0.55779 0.66226 0.35642 0.30488 +5 168 1 2 1 0.60508 0.65471 0.33749 0.31078 +5 169 2 4 0 0.58856 0.68126 0.3558 0.29629 +5 170 1 4 0 0.6058 0.67863 0.36828 0.29849 +5 171 1 4 0 0.63924 0.72809 0.37309 0.27935 +5 172 1 2 0 0.61086 0.76142 0.32803 0.31131 +5 173 1 4 0 0.56857 0.72348 0.4022 0.2991 +5 174 1 2 1 0.57425 0.75776 0.38847 0.31192 +5 175 1 2 1 0.58034 0.73465 0.38833 0.33734 +5 176 1 3 1 0.54472 0.70354 0.39372 0.35991 +5 177 1 3 0 0.58202 0.70963 0.34559 0.35314 +5 178 1 2 1 0.55976 0.69322 0.34919 0.33378 +5 179 1 2 0 0.58258 0.70533 0.3781 0.31117 +5 180 1 2 1 0.55612 0.70875 0.43954 0.32156 +5 181 1 3 1 0.56855 0.67537 0.44562 0.32888 +5 182 1 4 1 0.59863 0.68329 0.42186 0.35163 +5 183 1 3 0 0.60997 0.68519 0.4303 0.37683 +5 184 1 2 0 0.60248 0.71019 0.41902 0.38277 +5 185 1 2 1 0.59809 0.76062 0.43002 0.38323 +5 186 1 4 1 0.6081 0.77644 0.47993 0.38275 +5 187 1 2 1 0.61408 0.79223 0.45948 0.39387 +5 188 1 2 0 0.60928 0.8 0.45733 0.43246 +5 189 1 3 1 0.60294 0.78355 0.42614 0.43557 +5 190 1 3 0 0.5558 0.78433 0.36131 0.4455 +5 191 2 4 0 0.60722 0.77838 0.36265 0.45639 +5 192 1 4 1 0.56958 0.74974 0.39301 0.46816 +5 193 1 2 1 0.57706 0.73897 0.38343 0.45477 +5 194 1 2 1 0.58157 0.70094 0.39122 0.42132 +5 195 1 2 1 0.60293 0.70089 0.38323 0.39178 +5 196 1 2 1 0.57593 0.66786 0.43664 0.4349 +5 197 1 2 0 0.60465 0.64527 0.44414 0.40635 +5 198 1 4 0 0.5616 0.64003 0.46539 0.42425 +5 199 1 2 1 0.58718 0.58884 0.45605 0.43693 +5 200 1 2 0 0.58107 0.59477 0.40883 0.42763 +5 201 1 2 0 0.60801 0.56536 0.38925 0.43773 +6 2 2 4 0 0.73844 0.64629 0.26467 0.27395 +6 3 2 3 0 0.69228 0.64001 0.24449 0.2622 +6 4 1 2 0 0.68433 0.59579 0.25212 0.29312 +6 5 1 1 1 0.69212 0.58937 0.24595 0.3045 +6 6 1 1 1 0.6607 0.53947 0.24873 0.32325 +6 7 1 1 1 0.66944 0.47849 0.24066 0.2849 +6 8 1 1 1 0.63452 0.50521 0.25215 0.28294 +6 9 1 3 0 0.65345 0.53719 0.26617 0.28657 +6 10 2 4 0 0.64699 0.52901 0.22662 0.27622 +6 11 1 1 1 0.61869 0.5252 0.2135 0.26357 +6 12 1 1 0 0.60212 0.53859 0.24008 0.24272 +6 13 1 2 1 0.65511 0.58001 0.26076 0.25046 +6 14 1 2 0 0.63622 0.5112 0.20641 0.28391 +6 15 1 2 1 0.61028 0.53644 0.2 0.26336 +6 16 1 2 1 0.60128 0.53024 0.22805 0.27728 +6 17 1 2 0 0.65173 0.56066 0.2 0.25872 +6 18 1 2 1 0.67288 0.5652 0.2 0.21137 +6 19 1 2 1 0.66971 0.49274 0.2218 0.22404 +6 20 1 2 0 0.68494 0.53305 0.22901 0.22827 +6 21 1 2 0 0.70233 0.55296 0.2461 0.22885 +6 22 2 2 1 0.70664 0.58177 0.2493 0.27281 +6 23 2 4 0 0.68894 0.55935 0.20401 0.2927 +6 24 1 2 0 0.63312 0.52932 0.2 0.25501 +6 26 2 3 1 0.67007 0.49809 0.2 0.29855 +6 27 2 1 1 0.70116 0.47996 0.20695 0.28267 +6 28 2 1 1 0.71325 0.49454 0.2 0.26681 +6 29 1 1 1 0.72645 0.43744 0.2 0.3196 +6 30 1 1 1 0.74659 0.44562 0.2 0.31482 +6 31 1 1 1 0.76222 0.42447 0.2 0.32081 +6 32 1 1 1 0.73448 0.43376 0.2 0.34296 +6 33 1 1 1 0.72797 0.43597 0.2 0.35101 +6 34 1 1 1 0.75074 0.46387 0.2 0.37183 +6 36 1 3 0 0.7328 0.45374 0.20442 0.42417 +6 37 2 4 0 0.7252 0.48332 0.2 0.40393 +6 38 1 1 1 0.73625 0.48365 0.2 0.41363 +6 39 1 4 1 0.70231 0.49133 0.21054 0.38507 +6 40 1 1 1 0.72107 0.49519 0.2 0.39185 +6 41 1 4 0 0.71986 0.48078 0.24709 0.37263 +6 42 1 1 1 0.7031 0.49023 0.2 0.3371 +6 43 1 1 1 0.72264 0.50759 0.2 0.37898 +6 44 1 1 1 0.71507 0.49493 0.20433 0.39462 +6 45 1 1 1 0.7487 0.48231 0.21516 0.36209 +6 46 1 4 1 0.77837 0.4936 0.2 0.37251 +6 47 1 1 0 0.8 0.4782 0.21425 0.36112 +6 48 1 4 0 0.79292 0.46445 0.21974 0.38153 +6 49 2 4 0 0.79228 0.43 0.21181 0.33966 +6 50 1 1 1 0.8 0.42849 0.21877 0.30777 +6 51 1 1 1 0.8 0.41144 0.21003 0.32592 +6 52 1 1 1 0.74326 0.40335 0.2 0.33955 +6 53 1 1 0 0.75236 0.42683 0.2552 0.33861 +6 54 1 1 0 0.77285 0.43779 0.28617 0.32257 +6 55 1 2 1 0.78413 0.42864 0.31439 0.26735 +6 56 1 2 1 0.75446 0.43688 0.29642 0.21333 +6 57 1 4 0 0.75827 0.46732 0.29081 0.2 +6 58 1 2 0 0.76712 0.44268 0.29863 0.20607 +6 59 1 1 1 0.76093 0.46029 0.29629 0.22507 +6 60 1 1 0 0.75172 0.4466 0.27871 0.20603 +6 61 1 4 0 0.76716 0.45762 0.29448 0.20525 +6 62 1 2 1 0.77346 0.48289 0.27256 0.2 +6 63 1 2 0 0.78575 0.49385 0.28216 0.20283 +6 64 1 3 0 0.8 0.53941 0.2776 0.2 +6 65 1 3 0 0.79417 0.55878 0.26483 0.20866 +6 66 1 2 1 0.8 0.55414 0.30446 0.21184 +6 67 1 2 0 0.76477 0.53706 0.30028 0.21075 +6 68 2 2 0 0.79557 0.50808 0.32894 0.24184 +6 69 1 1 1 0.79729 0.50847 0.34599 0.2038 +6 70 1 1 1 0.77915 0.52111 0.36398 0.2 +6 71 1 1 1 0.75315 0.4976 0.37342 0.2 +6 72 1 4 0 0.79673 0.52013 0.36636 0.2 +6 73 1 1 1 0.79215 0.53026 0.36133 0.20624 +6 74 1 1 1 0.79701 0.4543 0.3809 0.2 +6 75 1 1 1 0.78056 0.44464 0.38713 0.2141 +6 76 1 1 1 0.76446 0.46656 0.34142 0.2 +6 77 1 3 1 0.74452 0.47535 0.3358 0.2 +6 78 2 3 0 0.76853 0.48442 0.32546 0.21197 +6 79 1 1 1 0.8 0.43554 0.32101 0.22591 +6 80 1 1 1 0.8 0.41821 0.30145 0.23128 +6 81 1 1 1 0.7939 0.41732 0.32749 0.23821 +6 82 1 3 0 0.8 0.42489 0.40595 0.23603 +6 83 1 1 1 0.772 0.37394 0.36878 0.2 +6 84 1 1 1 0.8 0.38051 0.35345 0.20285 +6 85 1 1 1 0.76923 0.37481 0.35245 0.21921 +6 86 1 4 0 0.78747 0.3609 0.3549 0.21975 +6 87 1 1 1 0.7787 0.36849 0.38117 0.21094 +6 88 1 1 1 0.77307 0.36926 0.38108 0.22898 +6 89 1 3 1 0.75185 0.354 0.37251 0.21602 +6 90 1 3 1 0.73574 0.34884 0.37385 0.23082 +6 91 1 1 1 0.75935 0.35762 0.36155 0.22633 +6 92 1 3 0 0.7433 0.35534 0.38664 0.2 +6 93 1 1 1 0.73787 0.38545 0.38875 0.24564 +6 94 1 1 1 0.75313 0.388 0.36282 0.23557 +6 95 1 1 1 0.76958 0.40559 0.35864 0.20505 +6 96 1 1 0 0.74335 0.37624 0.33644 0.21493 +6 97 1 3 1 0.76881 0.38935 0.31529 0.23183 +6 98 1 3 1 0.8 0.44251 0.3035 0.29254 +6 99 1 1 1 0.79365 0.46767 0.2849 0.29204 +6 100 1 1 1 0.76134 0.45193 0.25515 0.333 +6 101 1 1 1 0.75759 0.47852 0.28939 0.3302 +6 102 1 1 1 0.74885 0.45144 0.32843 0.29179 +6 103 1 3 0 0.746 0.47158 0.35982 0.25733 +6 104 1 3 1 0.69118 0.51695 0.38424 0.2 +6 105 1 1 1 0.70272 0.4799 0.41213 0.2 +6 106 1 1 1 0.74316 0.49883 0.42373 0.22409 +6 107 1 1 1 0.76885 0.49184 0.44116 0.2596 +6 108 1 1 1 0.72596 0.45403 0.42147 0.29699 +6 109 1 1 1 0.73449 0.46472 0.43617 0.25409 +6 110 1 3 1 0.74143 0.43229 0.45642 0.26774 +6 111 1 1 1 0.73912 0.41536 0.47221 0.28046 +6 112 1 1 1 0.76965 0.4156 0.48204 0.28448 +6 113 1 1 0 0.8 0.43366 0.4671 0.25635 +6 114 1 3 0 0.8 0.44463 0.45515 0.23894 +6 115 1 1 1 0.76283 0.42759 0.44516 0.26106 +6 116 1 3 1 0.76396 0.39602 0.43198 0.26465 +6 117 1 1 1 0.72628 0.40751 0.44361 0.23025 +6 118 1 1 1 0.71607 0.41871 0.44249 0.22063 +6 119 1 3 1 0.70541 0.43852 0.43407 0.20998 +6 120 1 3 0 0.70211 0.43268 0.39145 0.21564 +6 121 1 1 1 0.70631 0.43664 0.41911 0.21776 +6 122 1 1 0 0.68467 0.50343 0.397 0.24363 +6 123 2 1 0 0.66341 0.43698 0.40924 0.22622 +6 124 2 3 0 0.66634 0.40102 0.37127 0.24413 +6 125 1 1 1 0.67439 0.3999 0.35503 0.21975 +6 126 2 3 0 0.64118 0.41188 0.36435 0.22777 +6 127 1 4 0 0.61951 0.41806 0.29839 0.2 +6 128 1 1 1 0.61455 0.40946 0.28627 0.20417 +6 129 1 1 0 0.63085 0.36319 0.3193 0.23203 +6 130 1 1 0 0.65582 0.38843 0.35002 0.22153 +6 131 1 2 0 0.66217 0.33886 0.35366 0.2 +6 132 1 1 1 0.68178 0.35875 0.3396 0.21985 +6 133 1 1 1 0.65448 0.34927 0.361 0.21653 +6 134 1 1 0 0.64916 0.3723 0.35015 0.2 +6 135 1 4 0 0.62925 0.37902 0.38279 0.20361 +6 136 1 1 0 0.59687 0.40627 0.42257 0.2 +6 137 1 1 1 0.60549 0.42107 0.39855 0.2 +6 138 1 1 0 0.62875 0.43311 0.41428 0.20039 +6 139 1 4 0 0.59721 0.3936 0.43017 0.2 +6 140 1 4 0 0.59071 0.34912 0.48425 0.20947 +6 141 2 3 0 0.60092 0.34219 0.47539 0.2 +6 142 1 3 0 0.59808 0.34948 0.43629 0.2 +6 143 1 1 0 0.62534 0.41234 0.3883 0.20448 +6 144 1 2 1 0.53923 0.44827 0.40399 0.20877 +6 145 1 2 1 0.54857 0.46654 0.40463 0.24354 +6 146 1 4 0 0.52858 0.4551 0.3712 0.30885 +6 147 1 4 1 0.52064 0.43257 0.35697 0.30294 +6 148 1 2 0 0.54765 0.41182 0.37944 0.29994 +6 149 1 1 0 0.54236 0.37342 0.38255 0.29 +6 150 1 4 0 0.54737 0.41134 0.43858 0.30156 +6 151 2 3 1 0.54518 0.44957 0.48541 0.29498 +6 152 2 3 0 0.54049 0.45317 0.52725 0.27868 +6 153 1 2 0 0.53167 0.48579 0.53491 0.32645 +6 154 1 3 0 0.56726 0.53268 0.52742 0.3149 +6 155 2 3 1 0.52608 0.52041 0.53536 0.36929 +6 156 2 2 1 0.5284 0.49763 0.55458 0.3797 +6 157 1 3 0 0.50124 0.48963 0.54688 0.38372 +6 158 1 2 0 0.49942 0.5344 0.5345 0.33314 +6 159 1 3 1 0.53981 0.51409 0.52742 0.3483 +6 160 2 3 0 0.53189 0.45857 0.54323 0.35816 +6 161 1 3 0 0.5403 0.41014 0.56219 0.37759 +6 162 2 3 0 0.48437 0.37569 0.56655 0.38003 +6 163 2 3 1 0.48562 0.37159 0.52849 0.40983 +6 164 2 2 1 0.51432 0.34938 0.5522 0.36539 +6 165 1 2 0 0.50373 0.31786 0.55346 0.37918 +6 166 1 3 1 0.50697 0.32369 0.53195 0.36926 +6 167 1 3 0 0.50027 0.31776 0.55123 0.40146 +6 168 2 3 1 0.49311 0.36849 0.54299 0.4182 +6 169 2 3 0 0.46946 0.39455 0.55376 0.39565 +6 170 1 2 1 0.45765 0.41354 0.53639 0.36544 +6 171 1 2 0 0.48573 0.43632 0.57664 0.31938 +6 172 2 3 0 0.49735 0.42353 0.52452 0.29386 +6 173 1 2 1 0.48042 0.42558 0.49732 0.29756 +6 174 1 4 0 0.48046 0.36183 0.5306 0.3231 +6 175 1 3 0 0.42026 0.35278 0.49506 0.30487 +6 176 2 3 0 0.35582 0.392 0.51456 0.30989 +6 177 2 3 0 0.40432 0.4218 0.49302 0.3005 +6 178 1 3 0 0.41173 0.45867 0.51064 0.34797 +6 179 1 2 1 0.41262 0.47837 0.52873 0.34487 +6 180 1 2 0 0.45323 0.49058 0.51499 0.33723 +6 181 1 2 1 0.45695 0.48239 0.52702 0.32631 +6 182 1 3 1 0.4087 0.48923 0.52477 0.29357 +6 183 1 3 1 0.37603 0.45321 0.51015 0.29681 +6 184 1 3 0 0.34802 0.45232 0.52372 0.29781 +6 185 1 4 1 0.33154 0.43127 0.55039 0.28834 +6 186 1 2 0 0.30693 0.43095 0.56428 0.31368 +6 187 2 2 0 0.32836 0.44521 0.56101 0.324 +6 188 1 1 0 0.33058 0.44933 0.59314 0.34103 +6 189 2 4 0 0.37315 0.48423 0.55358 0.32311 +6 190 2 3 1 0.38488 0.4916 0.53274 0.35724 +6 191 2 3 0 0.31661 0.52983 0.50062 0.36964 +6 192 1 1 1 0.30432 0.53627 0.48407 0.3694 +6 193 1 1 0 0.31336 0.60341 0.45643 0.38103 +6 194 2 4 0 0.2759 0.58892 0.51222 0.35864 +6 195 1 3 0 0.29853 0.59536 0.49481 0.29334 +6 196 1 1 0 0.29928 0.61646 0.47757 0.26909 +6 197 1 2 0 0.32504 0.56206 0.48146 0.26988 +6 198 1 1 1 0.31099 0.53143 0.4545 0.26122 +6 199 1 1 0 0.31326 0.55127 0.45337 0.26451 +6 200 2 4 0 0.35566 0.53366 0.46122 0.22951 +6 201 2 1 0 0.31696 0.49235 0.45925 0.21454 +7 2 1 2 1 0.73652 0.77382 0.28119 0.41581 +7 3 1 2 1 0.73617 0.72485 0.33184 0.39733 +7 4 1 2 1 0.73024 0.74529 0.34602 0.40709 +7 5 1 2 0 0.75685 0.75935 0.35679 0.43301 +7 6 1 2 1 0.72626 0.7789 0.3369 0.40037 +7 7 1 4 0 0.74553 0.76397 0.32571 0.4309 +7 8 1 3 1 0.77443 0.72212 0.35922 0.41842 +7 9 1 1 1 0.77449 0.75619 0.34035 0.4397 +7 10 1 3 0 0.8 0.7567 0.35428 0.46373 +7 11 2 3 1 0.77508 0.69086 0.37354 0.47258 +7 12 2 3 0 0.76829 0.67967 0.40001 0.50458 +7 13 1 1 0 0.76672 0.69934 0.41569 0.5421 +7 14 2 4 0 0.77127 0.71321 0.37702 0.5448 +7 15 1 2 0 0.77979 0.68525 0.36089 0.55744 +7 16 1 1 0 0.76202 0.71828 0.41109 0.5703 +7 17 1 2 0 0.78828 0.71386 0.44286 0.60376 +7 18 2 1 1 0.78199 0.74743 0.44158 0.59931 +7 19 2 1 1 0.8 0.7754 0.47841 0.56621 +7 20 2 1 1 0.77974 0.78389 0.42621 0.54334 +7 21 2 3 0 0.77194 0.8 0.42647 0.55248 +7 22 2 1 1 0.77586 0.78019 0.42254 0.56588 +7 23 2 3 0 0.76585 0.77611 0.42221 0.54927 +7 24 2 1 1 0.76275 0.78298 0.39476 0.53483 +7 25 2 4 1 0.7644 0.8 0.36015 0.50291 +7 26 2 4 1 0.6958 0.77858 0.37691 0.47691 +7 27 2 4 1 0.68262 0.8 0.36378 0.46858 +7 28 2 4 0 0.6817 0.78741 0.34606 0.45713 +7 29 2 4 1 0.63409 0.7914 0.34804 0.42898 +7 30 2 4 0 0.62849 0.72416 0.34811 0.38981 +7 31 2 4 1 0.62123 0.72053 0.3406 0.42393 +7 32 2 4 0 0.60729 0.72962 0.3686 0.43583 +7 33 1 1 0 0.63289 0.73121 0.36294 0.44422 +7 34 1 2 1 0.66452 0.70818 0.38126 0.50299 +7 35 1 4 1 0.65276 0.69137 0.34886 0.4978 +7 36 1 4 0 0.67635 0.67695 0.36245 0.48723 +7 37 1 2 1 0.6364 0.72169 0.37522 0.46262 +7 38 1 2 0 0.62136 0.68738 0.37165 0.50667 +7 39 1 2 0 0.62684 0.66038 0.3311 0.4098 +7 40 2 4 1 0.62214 0.66569 0.30715 0.39592 +7 41 2 1 1 0.64773 0.65643 0.32032 0.41724 +7 42 2 4 0 0.62275 0.63917 0.30505 0.42642 +7 43 2 2 0 0.62059 0.60141 0.28548 0.37703 +7 44 2 4 0 0.61911 0.58772 0.32664 0.37639 +7 45 1 1 1 0.59969 0.56746 0.32019 0.41392 +7 46 1 3 0 0.60794 0.56318 0.33423 0.41813 +7 47 1 1 1 0.58478 0.55692 0.30522 0.42107 +7 48 1 1 1 0.59251 0.52936 0.31046 0.42059 +7 49 1 1 0 0.62059 0.52717 0.28554 0.43034 +7 50 1 3 0 0.6537 0.58194 0.31337 0.4118 +7 51 1 1 1 0.67234 0.59347 0.33659 0.42892 +7 52 1 4 0 0.68842 0.60563 0.31471 0.44322 +7 53 1 3 0 0.64103 0.6188 0.3388 0.46047 +7 54 1 1 0 0.64114 0.62285 0.37748 0.42213 +7 55 1 3 0 0.59619 0.62455 0.40678 0.41438 +7 56 2 4 0 0.61538 0.57985 0.42666 0.43971 +7 57 2 3 0 0.60028 0.56839 0.42793 0.45859 +7 58 1 2 1 0.60161 0.55339 0.43555 0.46169 +7 59 1 4 0 0.56162 0.53272 0.42529 0.4359 +7 60 1 2 0 0.55868 0.52705 0.4066 0.45736 +7 61 1 4 1 0.58758 0.56185 0.43095 0.48555 +7 62 1 2 1 0.63743 0.55867 0.43084 0.49401 +7 63 1 2 0 0.63078 0.52857 0.47546 0.4925 +7 64 2 1 1 0.66642 0.52672 0.47409 0.51444 +7 65 2 4 1 0.63518 0.50386 0.52161 0.46751 +7 66 2 4 1 0.64562 0.46239 0.51872 0.47957 +7 67 2 4 0 0.64799 0.47066 0.52357 0.45758 +7 68 2 4 0 0.667 0.46017 0.53543 0.48102 +7 69 1 3 0 0.63757 0.46475 0.51101 0.47566 +7 70 1 1 0 0.64153 0.46559 0.5019 0.49674 +7 71 1 2 1 0.65082 0.46107 0.50177 0.46355 +7 72 1 2 0 0.69254 0.45669 0.48715 0.4624 +7 73 1 2 1 0.70784 0.45809 0.45837 0.45079 +7 74 2 4 1 0.68494 0.50599 0.4562 0.46241 +7 75 1 2 1 0.665 0.48582 0.41982 0.45064 +7 76 1 2 0 0.62657 0.47462 0.40381 0.49046 +7 77 1 2 1 0.63548 0.45809 0.43495 0.51459 +7 78 1 2 1 0.62649 0.45812 0.44179 0.52481 +7 79 1 2 0 0.63757 0.44622 0.43598 0.49922 +7 80 1 4 1 0.67383 0.43832 0.4147 0.49838 +7 81 2 4 0 0.67993 0.40577 0.46088 0.45564 +7 82 1 2 0 0.67973 0.40914 0.49205 0.4278 +7 83 1 1 0 0.70219 0.4152 0.51664 0.38622 +7 84 2 3 0 0.67299 0.4017 0.52281 0.38776 +7 85 2 4 0 0.68044 0.43412 0.51532 0.37247 +7 86 1 2 0 0.68635 0.37774 0.51596 0.37094 +7 87 1 1 0 0.70487 0.42532 0.50949 0.33338 +7 88 1 1 1 0.71384 0.40387 0.57109 0.33162 +7 89 1 3 1 0.71681 0.41858 0.56217 0.35235 +7 90 1 1 1 0.71665 0.43533 0.56571 0.3589 +7 91 1 1 0 0.69869 0.4295 0.56662 0.34501 +7 92 1 1 1 0.6688 0.42876 0.56326 0.33795 +7 93 1 1 1 0.65901 0.40379 0.51785 0.31292 +7 94 1 3 0 0.69655 0.47237 0.52794 0.28493 +7 95 1 4 0 0.70413 0.48688 0.5342 0.28189 +7 96 1 1 1 0.72048 0.47089 0.60726 0.28497 +7 97 1 1 1 0.72608 0.48122 0.62543 0.32596 +7 98 1 1 1 0.719 0.52294 0.63479 0.31364 +7 99 1 1 0 0.75845 0.54401 0.6308 0.30227 +7 100 1 1 1 0.77583 0.50055 0.58914 0.31341 +7 101 1 1 1 0.7629 0.5101 0.60475 0.30806 +7 102 1 1 1 0.75136 0.53288 0.60111 0.28629 +7 103 1 4 0 0.7449 0.53476 0.63764 0.26279 +7 104 1 1 1 0.75735 0.5178 0.60022 0.26465 +7 105 1 1 1 0.74006 0.5208 0.6246 0.28593 +7 106 1 4 0 0.74655 0.50826 0.62744 0.27347 +7 107 2 1 0 0.73213 0.49834 0.6107 0.26635 +7 108 1 1 0 0.74252 0.49545 0.58829 0.29655 +7 109 2 3 1 0.75246 0.50399 0.57983 0.30671 +7 110 2 3 1 0.75356 0.53978 0.58352 0.3279 +7 111 2 3 1 0.74623 0.54892 0.61842 0.38178 +7 112 2 1 1 0.73508 0.53497 0.62784 0.38015 +7 113 2 3 1 0.67071 0.53694 0.60079 0.38059 +7 114 2 3 1 0.65294 0.56661 0.61227 0.41993 +7 115 2 3 0 0.6813 0.53256 0.57688 0.42939 +7 116 2 3 0 0.65403 0.48876 0.56384 0.42955 +7 117 1 2 0 0.65483 0.45313 0.58652 0.44558 +7 118 2 3 0 0.62252 0.40745 0.56623 0.45398 +7 119 2 4 0 0.61672 0.43266 0.54611 0.40321 +7 120 1 1 0 0.62597 0.47728 0.55853 0.42995 +7 121 1 1 1 0.62411 0.52183 0.51648 0.40554 +7 122 1 1 0 0.61963 0.46424 0.51519 0.40227 +7 123 1 4 1 0.64348 0.4852 0.5114 0.42042 +7 124 1 1 1 0.64939 0.459 0.51596 0.44302 +7 125 1 1 1 0.62903 0.46474 0.5682 0.42705 +7 126 1 1 1 0.64059 0.44304 0.54478 0.43085 +7 127 1 1 1 0.64861 0.45858 0.59115 0.45264 +7 128 1 1 1 0.67555 0.44122 0.57081 0.45509 +7 129 1 1 1 0.69338 0.48416 0.57055 0.41559 +7 130 1 3 1 0.706 0.52255 0.58431 0.44688 +7 131 1 1 1 0.68307 0.53763 0.58693 0.44044 +7 132 1 1 1 0.74351 0.54766 0.56273 0.43455 +7 133 1 1 1 0.79692 0.52775 0.58414 0.41248 +7 134 1 3 1 0.8 0.54525 0.59477 0.39839 +7 135 1 1 1 0.78379 0.51418 0.54199 0.39777 +7 136 1 1 1 0.76691 0.50612 0.56284 0.41139 +7 137 1 1 1 0.77781 0.51617 0.5476 0.36561 +7 138 1 1 0 0.8 0.52935 0.55226 0.38513 +7 139 1 1 1 0.77705 0.5329 0.50855 0.39285 +7 140 1 1 1 0.8 0.53265 0.55625 0.42593 +7 141 1 1 1 0.78731 0.54815 0.55623 0.4615 +7 142 1 3 0 0.8 0.5041 0.54073 0.42958 +7 143 1 1 1 0.79062 0.47687 0.57157 0.44331 +7 144 1 1 1 0.79089 0.51494 0.59257 0.40092 +7 145 1 4 0 0.8 0.54651 0.57069 0.40706 +7 146 1 3 1 0.8 0.51781 0.5596 0.4409 +7 147 1 1 1 0.8 0.51138 0.56482 0.42916 +7 148 1 1 1 0.79077 0.51298 0.5507 0.4566 +7 149 1 1 1 0.8 0.50618 0.5763 0.46124 +7 150 1 1 1 0.78315 0.5093 0.54557 0.41467 +7 151 1 1 1 0.76184 0.50378 0.51372 0.4155 +7 152 1 1 1 0.8 0.51599 0.54876 0.4234 +7 153 1 4 1 0.77191 0.50295 0.54092 0.41225 +7 154 1 1 0 0.73744 0.53739 0.56046 0.41314 +7 155 1 1 1 0.73295 0.52323 0.60109 0.39622 +7 156 1 1 1 0.71701 0.56102 0.57213 0.37529 +7 157 1 1 1 0.68905 0.56982 0.58759 0.36813 +7 158 1 1 1 0.66678 0.5476 0.54347 0.39532 +7 159 1 1 1 0.64494 0.49831 0.56258 0.41057 +7 160 1 4 1 0.67528 0.52884 0.5608 0.42352 +7 161 1 1 1 0.65777 0.52094 0.52869 0.40421 +7 162 1 4 1 0.66188 0.5178 0.51787 0.40349 +7 163 1 1 1 0.6381 0.5756 0.48972 0.38619 +7 164 1 4 0 0.6234 0.58792 0.50411 0.40694 +7 165 1 1 0 0.59122 0.56722 0.51712 0.40979 +7 166 1 1 1 0.56838 0.58774 0.50826 0.41299 +7 167 1 1 1 0.5784 0.59008 0.54381 0.40637 +7 168 1 1 0 0.60385 0.60007 0.53611 0.4377 +7 169 1 1 0 0.58775 0.58172 0.5346 0.43027 +7 170 2 2 1 0.62321 0.55708 0.52841 0.46651 +7 171 1 3 0 0.63522 0.53954 0.49567 0.44104 +7 172 1 2 1 0.61149 0.53715 0.49165 0.39184 +7 173 1 2 1 0.59542 0.5716 0.50583 0.36408 +7 174 1 2 1 0.59577 0.55592 0.49924 0.35093 +7 175 1 2 0 0.57984 0.54968 0.50682 0.35664 +7 176 1 3 0 0.55362 0.52279 0.50539 0.38891 +7 177 1 2 0 0.53257 0.51765 0.49096 0.37394 +7 178 2 3 1 0.51265 0.48652 0.47513 0.33797 +7 179 1 1 1 0.54152 0.48191 0.49638 0.28258 +7 180 2 2 0 0.51037 0.44645 0.50537 0.29273 +7 181 2 4 0 0.51899 0.47024 0.49558 0.30212 +7 182 2 3 0 0.55107 0.44902 0.49999 0.31915 +7 184 2 3 0 0.53488 0.41045 0.50085 0.24677 +7 185 1 2 1 0.51874 0.40118 0.4794 0.23449 +7 186 1 4 0 0.50501 0.35356 0.48161 0.21752 +7 187 1 2 0 0.51149 0.30729 0.4853 0.24047 +7 188 1 3 1 0.51032 0.32082 0.51876 0.24294 +7 189 1 3 1 0.49661 0.33353 0.54357 0.22793 +7 190 2 2 0 0.507 0.32073 0.57431 0.22494 +7 191 2 3 0 0.52917 0.31669 0.53771 0.22422 +7 192 2 4 0 0.4745 0.28164 0.58907 0.23844 +7 193 1 3 0 0.46175 0.31539 0.56068 0.25404 +7 194 1 1 1 0.46292 0.35015 0.49346 0.26066 +7 195 1 1 1 0.47478 0.30222 0.49708 0.24947 +7 196 1 1 1 0.45377 0.31007 0.50207 0.23864 +7 197 1 1 1 0.43098 0.27325 0.5128 0.2 +7 198 1 3 1 0.44254 0.29017 0.50301 0.2 +7 199 1 1 0 0.4086 0.28842 0.50102 0.20853 +7 200 2 3 1 0.40769 0.24911 0.51045 0.20608 +7 201 2 3 1 0.41026 0.23245 0.5347 0.2 +8 2 2 1 1 0.72272 0.36159 0.66489 0.27837 +8 3 1 2 1 0.71968 0.36712 0.68895 0.30518 +8 4 2 4 0 0.72708 0.34154 0.69245 0.3467 +8 5 2 3 0 0.75828 0.38545 0.69597 0.34304 +8 6 1 2 1 0.76986 0.38524 0.72058 0.32163 +8 7 1 2 1 0.76545 0.40963 0.73387 0.34507 +8 8 1 2 1 0.7734 0.39018 0.76278 0.31589 +8 9 1 2 0 0.78821 0.40092 0.72354 0.33535 +8 10 2 4 0 0.74599 0.38238 0.71034 0.33525 +8 11 1 2 0 0.73145 0.3622 0.71513 0.34081 +8 12 1 1 1 0.76426 0.39595 0.77204 0.32657 +8 13 2 2 1 0.76519 0.34091 0.77712 0.31212 +8 14 2 2 0 0.8 0.35002 0.71746 0.31764 +8 15 2 4 0 0.8 0.39403 0.75616 0.2627 +8 16 1 2 1 0.7785 0.35638 0.72783 0.28618 +8 17 1 4 0 0.75761 0.35088 0.73161 0.26425 +8 18 1 2 0 0.71263 0.33098 0.77363 0.21803 +8 19 1 1 1 0.71657 0.35143 0.77443 0.20496 +8 20 1 1 1 0.72809 0.39404 0.75392 0.22908 +8 21 1 4 0 0.71511 0.39616 0.74218 0.2512 +8 22 1 4 0 0.71898 0.42119 0.73278 0.20837 +8 23 1 1 1 0.69772 0.43582 0.74177 0.2 +8 24 1 1 0 0.68991 0.46405 0.70513 0.20777 +8 25 1 1 0 0.68773 0.42401 0.68911 0.2 +8 26 1 1 1 0.72672 0.40715 0.70028 0.2 +8 27 1 1 1 0.73579 0.46605 0.64975 0.22909 +8 28 1 1 1 0.78535 0.49908 0.62345 0.29034 +8 29 1 1 1 0.78645 0.53396 0.63025 0.26708 +8 30 1 1 0 0.8 0.57474 0.62491 0.27928 +8 31 1 2 0 0.8 0.55664 0.59219 0.25888 +8 32 1 1 1 0.77736 0.51533 0.56281 0.28132 +8 33 1 3 1 0.78572 0.49382 0.58043 0.28552 +8 34 1 2 1 0.8 0.48829 0.60231 0.26588 +8 35 1 2 0 0.7994 0.51443 0.58259 0.22521 +8 36 1 2 1 0.8 0.51975 0.57607 0.27626 +8 37 1 1 1 0.73863 0.45143 0.56481 0.30938 +8 38 1 3 0 0.74349 0.46602 0.53322 0.32414 +8 39 2 1 1 0.78366 0.45325 0.49959 0.3419 +8 40 2 3 0 0.75988 0.47175 0.49019 0.37422 +8 41 2 4 0 0.75219 0.46767 0.48061 0.37156 +8 42 1 1 1 0.79631 0.46215 0.50231 0.3728 +8 43 1 1 1 0.79028 0.4427 0.51929 0.40784 +8 44 1 4 1 0.77799 0.41938 0.46929 0.44665 +8 45 1 1 1 0.8 0.46747 0.44668 0.44329 +8 46 1 1 1 0.7941 0.42631 0.45533 0.45917 +8 47 1 1 1 0.7867 0.45833 0.48318 0.44614 +8 48 1 2 1 0.78057 0.47779 0.49572 0.46559 +8 49 1 2 1 0.75523 0.4499 0.52687 0.44407 +8 50 1 1 1 0.76751 0.45794 0.52563 0.46388 +8 51 1 4 0 0.73705 0.40908 0.55823 0.47995 +8 52 1 1 1 0.74353 0.3851 0.57003 0.45614 +8 53 1 1 1 0.74428 0.35783 0.5685 0.50084 +8 54 1 1 1 0.73822 0.34854 0.55082 0.48659 +8 55 1 1 0 0.70796 0.35846 0.56603 0.50308 +8 56 1 1 1 0.70021 0.33682 0.55176 0.50902 +8 57 2 4 0 0.68065 0.38184 0.54209 0.51248 +8 58 1 1 0 0.70286 0.38027 0.55712 0.5348 +8 59 1 3 1 0.73473 0.36571 0.56786 0.53329 +8 60 1 3 0 0.73215 0.39102 0.57982 0.55559 +8 61 2 1 0 0.75962 0.3911 0.54845 0.54965 +8 62 2 4 0 0.8 0.44048 0.5297 0.53859 +8 63 1 1 1 0.8 0.40466 0.52765 0.54916 +8 64 1 1 0 0.77938 0.41566 0.5246 0.59296 +8 65 1 3 0 0.75575 0.37063 0.51426 0.60145 +8 66 1 4 0 0.74651 0.40092 0.5261 0.58375 +8 67 1 1 0 0.72621 0.37283 0.53563 0.56422 +8 68 1 1 1 0.75576 0.38204 0.53792 0.55336 +8 69 1 1 0 0.75841 0.41184 0.53299 0.53414 +8 70 1 4 1 0.77846 0.39225 0.51454 0.58763 +8 71 1 2 1 0.74423 0.38544 0.5424 0.58411 +8 72 1 1 1 0.73713 0.42122 0.5801 0.57288 +8 73 1 4 1 0.74995 0.41 0.55732 0.61644 +8 74 1 1 1 0.71944 0.44072 0.59852 0.63972 +8 75 1 1 1 0.70632 0.43435 0.62256 0.67195 +8 76 1 1 0 0.7103 0.46444 0.61044 0.66563 +8 77 1 1 1 0.7446 0.4446 0.62344 0.68054 +8 78 1 1 1 0.75614 0.41919 0.6287 0.68762 +8 79 1 1 1 0.75506 0.38343 0.62226 0.65367 +8 80 1 1 1 0.75653 0.38598 0.60912 0.66087 +8 81 1 4 0 0.73538 0.42836 0.62309 0.66879 +8 82 1 1 1 0.75823 0.44314 0.6169 0.62751 +8 83 2 4 0 0.79074 0.456 0.63763 0.61644 +8 84 1 1 1 0.78747 0.44861 0.65821 0.61868 +8 85 1 1 0 0.79994 0.44257 0.63397 0.61364 +8 86 1 4 0 0.79493 0.44211 0.60813 0.59908 +8 87 1 1 0 0.79839 0.40118 0.54373 0.57622 +8 88 1 3 1 0.78054 0.38787 0.53546 0.57043 +8 89 1 2 0 0.8 0.40677 0.51008 0.55589 +8 90 1 4 0 0.8 0.39744 0.49598 0.54377 +8 91 1 1 1 0.76859 0.38977 0.49369 0.54716 +8 92 1 1 1 0.8 0.41353 0.49306 0.53437 +8 93 1 1 1 0.74998 0.4016 0.48395 0.56719 +8 94 1 1 1 0.71606 0.42625 0.42105 0.54828 +8 95 1 1 0 0.76205 0.4315 0.36728 0.54724 +8 96 1 1 1 0.76019 0.45926 0.40119 0.499 +8 97 1 1 1 0.72664 0.45703 0.39698 0.49327 +8 98 2 3 0 0.72215 0.47076 0.34439 0.49322 +8 99 1 3 1 0.69887 0.45542 0.28961 0.49125 +8 100 1 3 0 0.7013 0.44834 0.24887 0.485 +8 101 1 3 0 0.69343 0.45486 0.20955 0.4761 +8 102 1 1 1 0.65582 0.46996 0.2 0.46113 +8 103 1 1 0 0.63285 0.47212 0.2 0.48287 +8 104 1 1 1 0.62979 0.47559 0.2 0.45542 +8 105 1 1 1 0.63382 0.46579 0.2 0.461 +8 106 2 4 1 0.61825 0.46631 0.2 0.50011 +8 107 1 1 0 0.58195 0.48072 0.21164 0.50888 +8 108 1 2 1 0.58192 0.47029 0.26596 0.54347 +8 109 1 4 0 0.54768 0.47038 0.2649 0.52854 +8 110 1 1 0 0.60211 0.48349 0.22934 0.53953 +8 111 1 1 1 0.59398 0.44655 0.23532 0.53892 +8 112 2 3 0 0.57221 0.49309 0.24537 0.52478 +8 113 2 3 0 0.59708 0.47542 0.27066 0.53401 +8 114 1 1 1 0.5744 0.44636 0.29851 0.51063 +8 115 1 4 1 0.58123 0.42844 0.28975 0.5042 +8 116 1 1 0 0.55918 0.40677 0.30295 0.48247 +8 117 1 4 0 0.60221 0.39264 0.25755 0.48985 +8 118 1 1 1 0.62322 0.38096 0.25997 0.47628 +8 119 1 1 0 0.60216 0.40376 0.31645 0.47469 +8 120 1 1 1 0.63668 0.38762 0.32407 0.50604 +8 121 1 4 0 0.64281 0.37437 0.29351 0.49155 +8 122 1 1 1 0.64757 0.40178 0.30047 0.53058 +8 123 1 1 1 0.64206 0.48089 0.34117 0.51659 +8 124 1 1 0 0.61338 0.48418 0.34044 0.52231 +8 125 1 3 0 0.63249 0.48818 0.33356 0.54288 +8 126 1 1 0 0.64622 0.50893 0.3428 0.56352 +8 127 2 4 0 0.65173 0.5071 0.34521 0.56125 +8 128 1 1 1 0.6364 0.49866 0.31433 0.49489 +8 129 1 1 1 0.6426 0.51138 0.30761 0.49526 +8 130 1 3 0 0.65601 0.51237 0.33386 0.49921 +8 132 1 1 1 0.70711 0.53832 0.30634 0.51059 +8 133 1 1 0 0.69874 0.56144 0.3293 0.51964 +8 134 1 4 1 0.67403 0.56679 0.34472 0.55133 +8 135 1 4 0 0.6696 0.58442 0.34797 0.54135 +8 136 1 1 1 0.63877 0.59711 0.34372 0.51172 +8 137 1 1 1 0.61784 0.63393 0.36052 0.52338 +8 138 2 4 1 0.68505 0.66969 0.36838 0.51195 +8 139 1 1 1 0.66854 0.66013 0.37246 0.52443 +8 140 1 1 0 0.67228 0.64484 0.38002 0.51714 +8 141 1 1 0 0.65464 0.58997 0.39632 0.56173 +8 142 1 1 0 0.61896 0.5957 0.37331 0.58496 +8 143 1 1 1 0.64018 0.59859 0.35183 0.58706 +8 144 1 4 0 0.62543 0.63388 0.33652 0.561 +8 145 1 3 1 0.61792 0.61865 0.30557 0.56444 +8 146 1 1 1 0.64088 0.65857 0.36269 0.55997 +8 147 1 3 1 0.66423 0.67844 0.31305 0.55433 +8 148 1 1 1 0.6649 0.75876 0.30007 0.56184 +8 149 1 1 0 0.64727 0.70167 0.27646 0.51868 +8 150 2 4 1 0.66209 0.68147 0.25647 0.51973 +8 151 1 1 1 0.64634 0.65699 0.26901 0.49803 +8 152 1 4 1 0.65724 0.65722 0.25335 0.4673 +8 153 1 2 1 0.65488 0.6575 0.26338 0.47573 +8 154 1 3 0 0.65342 0.68414 0.25197 0.49104 +8 155 2 1 1 0.64961 0.69631 0.27227 0.50288 +8 156 1 3 0 0.64466 0.64614 0.29655 0.50133 +8 157 1 1 1 0.63516 0.66127 0.26392 0.45544 +8 158 1 1 1 0.65012 0.69597 0.26444 0.44891 +8 159 1 1 1 0.63283 0.72946 0.25341 0.44042 +8 160 1 1 0 0.62373 0.74285 0.26646 0.4657 +8 161 1 2 0 0.64012 0.6942 0.25127 0.42213 +8 162 1 1 1 0.62625 0.72194 0.27347 0.38954 +8 163 1 1 1 0.65363 0.73949 0.25458 0.42168 +8 164 1 1 1 0.62054 0.70957 0.25526 0.39074 +8 165 2 4 1 0.61797 0.72743 0.24834 0.39418 +8 166 1 1 1 0.62556 0.73738 0.22064 0.40057 +8 167 1 1 0 0.60339 0.71756 0.2 0.39676 +8 168 1 1 1 0.58168 0.71372 0.21836 0.38182 +8 169 1 4 0 0.62985 0.75977 0.2 0.38573 +8 170 1 1 1 0.65444 0.72687 0.2 0.44398 +8 171 1 1 0 0.67974 0.71431 0.20057 0.4605 +8 172 1 1 1 0.72836 0.69795 0.2 0.42323 +8 173 1 1 1 0.69442 0.67182 0.2 0.41072 +8 174 1 1 0 0.68041 0.71198 0.2 0.42484 +8 175 1 1 1 0.68333 0.72709 0.2 0.39269 +8 176 1 1 1 0.67668 0.71537 0.2 0.38646 +8 177 1 1 0 0.65651 0.72791 0.2 0.39812 +8 178 1 4 1 0.711 0.75634 0.2 0.38376 +8 179 1 1 1 0.71544 0.72615 0.2 0.34603 +8 180 1 1 0 0.72974 0.74606 0.22999 0.31491 +8 181 2 2 1 0.70108 0.7698 0.25176 0.30004 +8 182 1 1 1 0.67697 0.74005 0.24558 0.29188 +8 183 1 1 1 0.76022 0.73588 0.27982 0.29037 +8 184 2 2 1 0.72328 0.73178 0.29401 0.27617 +8 185 2 3 0 0.73633 0.7141 0.3185 0.32137 +8 186 1 1 1 0.71002 0.7047 0.3316 0.28957 +8 187 1 1 0 0.75527 0.70136 0.34886 0.30559 +8 188 1 1 1 0.78373 0.68291 0.3439 0.28045 +8 189 1 1 0 0.7765 0.64914 0.36649 0.25248 +8 190 1 1 1 0.78414 0.6469 0.3685 0.25279 +8 191 1 3 1 0.78078 0.61753 0.33387 0.26636 +8 192 1 1 1 0.77767 0.63941 0.36395 0.27625 +8 193 1 3 0 0.76366 0.6321 0.35046 0.24559 +8 194 1 1 1 0.7597 0.64028 0.36988 0.21377 +8 195 1 4 0 0.73823 0.66169 0.32258 0.2 +8 196 1 1 1 0.73748 0.64822 0.31795 0.2 +8 197 1 1 1 0.75941 0.62476 0.2908 0.2 +8 198 1 1 1 0.77609 0.66111 0.32098 0.2278 +8 199 1 1 0 0.77967 0.62898 0.34737 0.22289 +8 200 1 4 0 0.73928 0.63717 0.33916 0.22653 +8 201 1 1 1 0.78518 0.6633 0.30215 0.23086 +9 1 1 3 0 0.75589 0.74958 0.4954 0.50785 +9 2 1 4 0 0.77074 0.74753 0.49091 0.51417 +9 3 2 3 0 0.75312 0.7488 0.51292 0.51973 +9 4 2 4 1 0.75142 0.73553 0.49914 0.51276 +9 5 2 2 1 0.75532 0.74618 0.51337 0.54812 +9 6 2 4 0 0.7778 0.75498 0.50815 0.57844 +9 7 2 2 1 0.8 0.79755 0.54204 0.61636 +9 8 2 3 1 0.8 0.8 0.5334 0.58682 +9 9 1 2 1 0.79834 0.8 0.49191 0.61194 +9 10 1 2 1 0.8 0.8 0.49474 0.59715 +9 11 1 3 0 0.79249 0.8 0.51127 0.59645 +9 12 1 2 1 0.75369 0.8 0.50994 0.59665 +9 13 2 4 0 0.76151 0.79994 0.53482 0.58578 +9 14 1 2 1 0.74082 0.8 0.53074 0.54457 +9 15 1 3 0 0.72549 0.79943 0.52863 0.5383 +9 16 1 2 1 0.70023 0.8 0.53768 0.52082 +9 17 1 3 1 0.73274 0.8 0.53512 0.50637 +9 18 1 2 1 0.72325 0.8 0.51422 0.47233 +9 19 1 2 1 0.6987 0.7603 0.52669 0.57786 +9 20 1 2 1 0.70606 0.7665 0.50784 0.54725 +9 21 1 3 0 0.74568 0.8 0.47528 0.48847 +9 22 1 2 1 0.73602 0.71019 0.47623 0.46379 +9 23 1 2 1 0.6995 0.78311 0.50349 0.45863 +9 24 2 3 0 0.73815 0.78553 0.48827 0.51755 +9 25 1 2 0 0.73645 0.8 0.46058 0.51965 +9 26 1 2 1 0.70921 0.8 0.43961 0.46746 +9 27 1 2 1 0.70937 0.79914 0.40737 0.52818 +9 28 1 2 1 0.72713 0.79376 0.409 0.51479 +9 29 1 2 1 0.74288 0.8 0.41613 0.5544 +9 30 1 2 1 0.7556 0.78558 0.39214 0.61406 +9 31 1 2 1 0.76292 0.8 0.40288 0.60137 +9 32 1 2 1 0.78922 0.8 0.38361 0.56391 +9 33 2 4 1 0.8 0.75956 0.33854 0.54354 +9 34 2 1 1 0.8 0.74209 0.34263 0.53183 +9 35 2 1 1 0.76504 0.6834 0.30965 0.49896 +9 36 2 4 0 0.77918 0.67152 0.31268 0.50109 +9 37 1 4 0 0.79817 0.67787 0.33283 0.51364 +9 38 1 3 0 0.8 0.65923 0.32099 0.51672 +9 39 1 2 1 0.79847 0.66575 0.28866 0.49839 +9 40 1 2 1 0.8 0.62933 0.24114 0.46222 +9 41 1 3 1 0.8 0.58321 0.25503 0.45842 +9 42 1 3 0 0.8 0.55711 0.26149 0.43178 +9 43 1 2 0 0.76933 0.54428 0.26785 0.45191 +9 44 1 2 0 0.76406 0.56701 0.25289 0.4396 +9 45 2 1 1 0.74421 0.53342 0.26598 0.44684 +9 46 2 3 1 0.8 0.52758 0.2609 0.45764 +9 47 2 3 1 0.77372 0.53752 0.28265 0.40529 +9 48 2 3 1 0.7658 0.52954 0.28731 0.40651 +9 49 2 3 1 0.78144 0.52903 0.29352 0.37285 +9 50 2 3 0 0.76062 0.50885 0.27632 0.38626 +9 51 1 2 1 0.77829 0.50797 0.25896 0.43098 +9 52 1 3 0 0.79212 0.48195 0.2946 0.42884 +9 53 2 3 1 0.7989 0.48888 0.27181 0.44709 +9 54 1 1 0 0.8 0.47165 0.28961 0.47398 +9 55 2 3 0 0.76304 0.48877 0.28486 0.45241 +9 56 2 3 1 0.76383 0.48391 0.2639 0.48111 +9 57 2 3 0 0.77142 0.42317 0.21136 0.47006 +9 58 1 3 0 0.78932 0.43569 0.20504 0.4815 +9 59 1 4 1 0.8 0.44924 0.21098 0.46179 +9 60 1 2 0 0.8 0.42929 0.2 0.49275 +9 61 1 2 0 0.8 0.4061 0.20125 0.50836 +9 62 2 2 1 0.7939 0.43189 0.2 0.45065 +9 63 2 2 0 0.77599 0.42752 0.2 0.43872 +9 64 1 2 1 0.7732 0.41677 0.22647 0.40868 +9 65 2 4 0 0.78238 0.43176 0.22137 0.41566 +9 66 2 3 0 0.76354 0.45853 0.24257 0.41647 +9 67 1 4 0 0.74726 0.45721 0.2302 0.42024 +9 68 1 3 1 0.71946 0.39431 0.25842 0.3981 +9 69 1 1 1 0.7615 0.3867 0.23508 0.41493 +9 70 1 3 0 0.769 0.40698 0.21787 0.4461 +9 71 1 1 1 0.7758 0.38461 0.27463 0.50449 +9 72 2 4 1 0.75578 0.3849 0.28129 0.51699 +9 73 1 4 0 0.75883 0.37006 0.27127 0.51168 +9 74 1 1 0 0.77596 0.34578 0.20299 0.4647 +9 75 2 3 0 0.79787 0.36426 0.2 0.45844 +9 76 1 1 1 0.79981 0.34511 0.20752 0.45614 +9 77 2 4 1 0.8 0.27499 0.2198 0.4812 +9 78 1 1 1 0.8 0.27516 0.20862 0.49358 +9 79 2 1 1 0.8 0.24672 0.2 0.47946 +9 80 2 1 1 0.76502 0.24829 0.21118 0.4635 +9 81 2 4 0 0.77632 0.27321 0.20222 0.45215 +9 82 2 3 1 0.77724 0.24133 0.22068 0.46303 +9 83 2 3 0 0.75954 0.25195 0.2 0.46168 +9 84 2 4 1 0.75151 0.23705 0.22881 0.49423 +9 85 1 1 1 0.75631 0.2 0.2 0.44735 +9 86 1 1 1 0.7591 0.2 0.20426 0.48861 +9 87 1 1 1 0.69645 0.2 0.21528 0.50524 +9 88 1 4 1 0.64578 0.2 0.2 0.51599 +9 89 1 1 0 0.61847 0.24198 0.2 0.51325 +9 90 1 4 1 0.60891 0.22231 0.20642 0.52709 +9 91 2 1 1 0.56935 0.2 0.2 0.53635 +9 92 2 4 0 0.59362 0.22588 0.20084 0.50491 +9 93 1 4 0 0.58413 0.22833 0.20064 0.52309 +9 94 1 1 0 0.59485 0.28355 0.2 0.53893 +9 95 1 3 0 0.63423 0.26507 0.2 0.52302 +9 96 2 3 0 0.60383 0.2807 0.2 0.51331 +9 97 1 2 0 0.58153 0.28407 0.2 0.50771 +9 98 1 3 0 0.58763 0.30448 0.20177 0.51951 +9 99 1 1 0 0.60168 0.29137 0.22474 0.5328 +9 100 2 4 0 0.60534 0.30633 0.22142 0.5267 +9 101 2 4 0 0.61744 0.32021 0.2 0.51496 +9 102 1 2 0 0.60371 0.33654 0.2 0.48967 +9 103 1 2 1 0.61568 0.31611 0.24811 0.48848 +9 104 2 2 1 0.64658 0.29741 0.23902 0.49939 +9 105 2 4 0 0.63724 0.34588 0.24255 0.54398 +9 106 1 3 1 0.68848 0.34246 0.21635 0.59986 +9 107 1 2 0 0.70055 0.35615 0.22945 0.65007 +9 108 1 2 1 0.72668 0.4019 0.22144 0.67431 +9 109 1 2 0 0.73614 0.37601 0.22303 0.65517 +9 110 2 4 0 0.70871 0.38318 0.21633 0.64814 +9 111 2 3 0 0.72082 0.4259 0.22911 0.64106 +9 112 1 1 1 0.74365 0.39062 0.2159 0.67835 +9 113 2 4 0 0.74488 0.42965 0.21564 0.64292 +9 114 1 1 0 0.76232 0.43526 0.23636 0.66444 +9 115 2 1 1 0.77108 0.43833 0.21982 0.72201 +9 116 1 4 1 0.7764 0.38469 0.2197 0.68629 +9 117 2 4 1 0.76204 0.37327 0.22346 0.73047 +9 118 2 4 1 0.73289 0.37751 0.24291 0.76032 +9 119 2 4 1 0.68179 0.37169 0.26213 0.7443 +9 120 2 4 1 0.66835 0.38497 0.29941 0.77099 +9 121 2 1 0 0.69142 0.35369 0.32598 0.75279 +9 122 2 4 1 0.67488 0.37777 0.34756 0.7462 +9 123 2 2 0 0.68119 0.34416 0.31133 0.73815 +9 124 2 4 1 0.66546 0.35682 0.29042 0.77882 +9 125 2 4 0 0.67253 0.39233 0.31288 0.8 +9 127 1 1 1 0.66914 0.36629 0.3453 0.73957 +9 128 2 4 1 0.61275 0.37034 0.35161 0.75057 +9 129 2 4 1 0.62213 0.36436 0.33584 0.72704 +9 130 2 4 1 0.62269 0.34953 0.30498 0.76824 +9 131 1 2 0 0.60851 0.32652 0.30339 0.72964 +9 132 2 4 1 0.60786 0.32857 0.29987 0.72062 +9 133 2 4 1 0.58617 0.33267 0.3049 0.69372 +9 134 2 1 1 0.56321 0.33668 0.30934 0.63765 +9 135 2 1 1 0.56034 0.34 0.32951 0.62249 +9 136 2 4 1 0.59695 0.35349 0.33372 0.64054 +9 137 2 4 0 0.5942 0.33099 0.2794 0.68589 +9 138 2 4 1 0.55607 0.27542 0.26988 0.68944 +9 139 2 1 0 0.55672 0.2687 0.26835 0.71414 +9 140 2 4 1 0.561 0.27377 0.26538 0.71706 +9 141 2 4 1 0.57517 0.25666 0.28595 0.71656 +9 142 2 1 1 0.61529 0.2424 0.26603 0.70067 +9 143 2 4 1 0.64528 0.27415 0.22106 0.73397 +9 144 2 1 0 0.61906 0.28121 0.23817 0.75352 +9 145 2 4 1 0.66645 0.25274 0.27652 0.78179 +9 146 2 2 1 0.63504 0.22433 0.27043 0.78352 +9 147 2 4 1 0.65923 0.22031 0.24342 0.8 +9 148 2 2 0 0.66101 0.24213 0.2582 0.79169 +9 149 2 4 0 0.68021 0.24563 0.27224 0.74907 +9 150 2 2 0 0.64328 0.24075 0.23307 0.70195 +9 151 2 2 0 0.65067 0.22962 0.23184 0.70443 +9 152 1 3 0 0.67629 0.2 0.26824 0.6873 +9 153 1 2 0 0.70799 0.20293 0.26597 0.68027 +9 154 1 1 1 0.72808 0.2 0.2558 0.69619 +9 155 1 3 0 0.67266 0.2 0.26307 0.75591 +9 156 1 4 1 0.66292 0.2 0.23587 0.79649 +9 157 1 1 0 0.66732 0.20346 0.22775 0.79178 +9 158 1 1 1 0.68327 0.20605 0.22413 0.78059 +9 159 1 4 1 0.69984 0.2 0.20639 0.79022 +9 160 1 1 1 0.67476 0.2 0.21937 0.76964 +9 161 2 4 1 0.68327 0.21611 0.20076 0.7332 +9 162 2 4 1 0.67228 0.20292 0.20451 0.72464 +9 163 2 4 1 0.67515 0.25669 0.2 0.76848 +9 164 2 4 1 0.63301 0.25602 0.2 0.77518 +9 165 2 4 1 0.6279 0.28699 0.25009 0.76979 +9 166 2 1 0 0.65378 0.28915 0.21989 0.76559 +9 167 2 4 1 0.6211 0.31583 0.24256 0.79443 +9 168 2 4 1 0.61409 0.30287 0.28766 0.8 +9 169 2 4 1 0.65255 0.35784 0.29354 0.8 +9 170 1 1 0 0.6344 0.31885 0.31769 0.79089 +9 171 2 4 1 0.65216 0.33764 0.36155 0.76434 +9 172 2 4 1 0.67819 0.39544 0.3474 0.74471 +9 173 2 4 1 0.68263 0.40543 0.33819 0.74302 +9 174 2 1 1 0.66787 0.37755 0.3207 0.77943 +9 175 2 1 1 0.65956 0.36316 0.29664 0.76993 +9 176 2 4 1 0.65754 0.35879 0.28283 0.7549 +9 177 2 1 0 0.63822 0.38018 0.27172 0.8 +9 178 1 4 0 0.63985 0.42095 0.2254 0.8 +9 179 1 2 0 0.64619 0.42085 0.21304 0.75356 +9 180 1 1 1 0.59991 0.42416 0.2385 0.75872 +9 181 2 3 0 0.59123 0.44978 0.2418 0.7741 +9 182 1 1 0 0.58685 0.47005 0.26321 0.76732 +9 183 2 1 0 0.59901 0.50363 0.23276 0.75541 +9 184 2 1 0 0.61453 0.50371 0.28101 0.75982 +9 185 1 2 1 0.6131 0.53879 0.26269 0.74025 +9 186 1 2 1 0.62768 0.50382 0.26046 0.71628 +9 187 1 3 0 0.66219 0.52766 0.26739 0.70902 +9 188 1 2 0 0.63736 0.56407 0.29745 0.72158 +9 189 1 4 0 0.62284 0.55832 0.31837 0.72099 +9 190 1 1 1 0.65609 0.53865 0.2909 0.69863 +9 191 1 4 1 0.63598 0.56449 0.29165 0.74445 +9 192 1 4 1 0.65425 0.55375 0.28118 0.76522 +9 193 2 4 1 0.68603 0.57074 0.27263 0.76083 +9 194 2 2 1 0.64706 0.56614 0.2661 0.76867 +9 195 2 4 1 0.68318 0.51971 0.26064 0.74966 +9 196 2 4 0 0.68059 0.497 0.25007 0.75177 +9 197 2 2 1 0.68456 0.51478 0.30823 0.74594 +9 198 1 2 0 0.71844 0.52649 0.33788 0.74639 +9 199 2 4 1 0.71123 0.551 0.35148 0.74464 +9 200 2 3 0 0.72001 0.52342 0.33865 0.72155 +9 201 2 2 1 0.74859 0.49433 0.34542 0.70368 +10 2 1 2 0 0.79533 0.61486 0.33702 0.64806 +10 3 1 3 0 0.77911 0.60512 0.33215 0.66138 +10 4 1 1 1 0.78337 0.61632 0.35911 0.63626 +10 5 1 4 1 0.7892 0.61803 0.31638 0.62637 +10 6 1 4 0 0.78802 0.6319 0.33504 0.61932 +10 7 1 1 1 0.8 0.62275 0.35163 0.64329 +10 8 1 1 1 0.8 0.67669 0.35841 0.6561 +10 9 1 1 0 0.79212 0.63642 0.32298 0.65235 +10 10 2 1 1 0.79185 0.65624 0.33595 0.61702 +10 11 1 1 0 0.74926 0.72931 0.34746 0.58345 +10 12 1 2 0 0.75632 0.76559 0.34628 0.58341 +10 13 1 2 1 0.7639 0.74425 0.36349 0.58836 +10 14 1 4 1 0.76054 0.77998 0.3378 0.60271 +10 15 1 1 1 0.8 0.78498 0.35739 0.61118 +10 16 1 1 1 0.8 0.8 0.35731 0.59052 +10 17 1 1 1 0.8 0.78324 0.35449 0.58059 +10 18 1 1 1 0.8 0.8 0.37265 0.5926 +10 19 1 1 1 0.8 0.79256 0.37965 0.59253 +10 20 1 2 1 0.8 0.8 0.36541 0.59428 +10 21 1 3 1 0.8 0.77415 0.41601 0.60457 +10 22 1 1 1 0.77449 0.77168 0.42524 0.55168 +10 23 1 1 0 0.8 0.75067 0.38646 0.55036 +10 24 2 3 1 0.8 0.7915 0.40166 0.48547 +10 25 1 2 1 0.76914 0.8 0.42448 0.49218 +10 26 1 4 0 0.77758 0.79562 0.44998 0.51029 +10 27 1 1 1 0.8 0.8 0.42856 0.52042 +10 28 1 4 0 0.8 0.8 0.4345 0.50575 +10 29 1 1 1 0.8 0.75882 0.4448 0.48349 +10 30 1 3 0 0.78175 0.8 0.40258 0.44428 +10 31 1 1 1 0.76652 0.8 0.39384 0.39106 +10 32 1 1 0 0.79291 0.78602 0.37873 0.40843 +10 33 1 4 1 0.8 0.76245 0.39005 0.41561 +10 34 1 1 1 0.76776 0.8 0.35845 0.37649 +10 35 1 1 0 0.76479 0.8 0.38451 0.39538 +10 36 1 1 1 0.78675 0.8 0.43522 0.37399 +10 37 1 2 1 0.76881 0.8 0.42214 0.3937 +10 38 1 1 1 0.76719 0.77319 0.46031 0.41895 +10 39 1 1 1 0.76835 0.76873 0.48054 0.4191 +10 40 1 3 1 0.73738 0.8 0.48358 0.43343 +10 41 1 1 1 0.72322 0.8 0.47422 0.44369 +10 42 1 2 1 0.71153 0.8 0.46335 0.42345 +10 43 2 2 1 0.68255 0.8 0.45305 0.44001 +10 44 1 1 1 0.69089 0.78104 0.47197 0.42788 +10 45 1 4 0 0.66732 0.79469 0.44878 0.44591 +10 46 1 3 1 0.65766 0.79189 0.46314 0.44544 +10 47 1 1 0 0.5976 0.7684 0.47052 0.42774 +10 48 1 1 0 0.55005 0.74119 0.42341 0.48673 +10 49 1 1 0 0.53023 0.72798 0.41138 0.52417 +10 50 2 4 1 0.52611 0.77921 0.39979 0.53658 +10 51 1 3 0 0.49055 0.8 0.42019 0.55675 +10 52 1 2 0 0.52201 0.8 0.38315 0.53647 +10 53 1 4 1 0.48311 0.79404 0.38362 0.52962 +10 54 1 1 1 0.47851 0.8 0.37604 0.53622 +10 55 2 3 0 0.47114 0.78186 0.32555 0.54864 +10 56 1 3 0 0.52498 0.8 0.34821 0.50809 +10 57 2 3 1 0.5568 0.73854 0.32226 0.50379 +10 58 1 1 1 0.59107 0.77508 0.35782 0.48549 +10 59 1 2 1 0.57859 0.75152 0.36376 0.45497 +10 60 1 2 1 0.58538 0.78769 0.35096 0.45682 +10 61 1 2 1 0.5831 0.77991 0.36414 0.42087 +10 62 1 2 0 0.62814 0.73505 0.34981 0.38763 +10 63 2 2 1 0.6263 0.70483 0.30452 0.36641 +10 64 2 4 1 0.59054 0.67293 0.31781 0.38955 +10 65 1 1 1 0.59062 0.68057 0.37094 0.38191 +10 66 2 2 0 0.6 0.69509 0.37937 0.3568 +10 67 2 2 1 0.56786 0.66458 0.3822 0.36675 +10 68 1 1 1 0.5604 0.70035 0.38482 0.35478 +10 69 2 2 1 0.57104 0.71245 0.39957 0.35664 +10 70 2 1 0 0.54359 0.71196 0.42378 0.3571 +10 72 2 4 0 0.52826 0.71692 0.47957 0.39249 +10 73 1 1 0 0.52679 0.7008 0.51601 0.41836 +10 74 1 4 0 0.51832 0.69455 0.52682 0.45528 +10 75 1 1 1 0.51664 0.69787 0.52532 0.41025 +10 76 1 3 1 0.47841 0.70222 0.54519 0.40828 +10 77 1 4 1 0.52181 0.7075 0.58058 0.4413 +10 78 1 1 0 0.53371 0.70709 0.61293 0.43702 +10 79 1 3 0 0.51187 0.69805 0.59148 0.41795 +10 80 1 1 0 0.54662 0.71333 0.60554 0.38272 +10 81 1 1 1 0.56164 0.69111 0.62202 0.35641 +10 82 1 3 1 0.58091 0.68219 0.60688 0.36051 +10 83 2 1 1 0.57962 0.68056 0.65021 0.36674 +10 84 1 3 1 0.55223 0.72185 0.6166 0.38495 +10 85 1 3 1 0.53855 0.73672 0.61654 0.40548 +10 86 1 3 0 0.57337 0.74868 0.60743 0.41265 +10 87 2 1 0 0.58888 0.77335 0.60741 0.42571 +10 88 1 2 1 0.57303 0.74299 0.64668 0.39972 +10 89 1 4 0 0.59801 0.76723 0.67844 0.37176 +10 90 1 1 0 0.59176 0.76852 0.67096 0.35297 +10 91 1 4 0 0.57924 0.77291 0.6587 0.31125 +10 92 1 1 1 0.51333 0.72898 0.67478 0.32844 +10 93 1 1 1 0.48641 0.7256 0.70118 0.32468 +10 94 1 1 0 0.5281 0.75115 0.7171 0.31097 +10 95 1 1 0 0.54681 0.72528 0.66602 0.3031 +10 96 1 3 1 0.55095 0.75749 0.68133 0.27906 +10 97 1 1 1 0.51773 0.75759 0.70518 0.28213 +10 98 1 1 0 0.55385 0.74856 0.71976 0.32485 +10 99 1 1 0 0.52698 0.73911 0.72153 0.34305 +10 100 1 3 0 0.52457 0.68834 0.6949 0.33497 +10 101 1 1 1 0.50922 0.70878 0.68 0.32982 +10 102 2 1 1 0.48483 0.64737 0.69028 0.34193 +10 103 2 1 0 0.45722 0.6843 0.7382 0.38723 +10 104 1 1 0 0.40999 0.68738 0.71106 0.40523 +10 105 2 4 1 0.40716 0.6782 0.72897 0.428 +10 106 1 1 1 0.37528 0.64565 0.72724 0.41672 +10 107 1 3 1 0.34385 0.62057 0.75223 0.40683 +10 108 1 2 1 0.34604 0.63244 0.75108 0.39212 +10 109 1 2 0 0.33945 0.59741 0.76341 0.40183 +10 110 2 4 1 0.33353 0.64323 0.77437 0.41816 +10 111 1 1 1 0.31872 0.64159 0.78477 0.39964 +10 112 1 4 0 0.33592 0.63011 0.75495 0.40405 +10 113 1 1 1 0.31717 0.66784 0.76968 0.42643 +10 114 2 1 0 0.29305 0.64843 0.77733 0.43549 +10 115 1 3 1 0.31068 0.65657 0.75338 0.4475 +10 116 1 4 0 0.30067 0.63712 0.75722 0.44616 +10 117 2 4 0 0.29281 0.63177 0.77055 0.45544 +10 118 2 4 1 0.27562 0.63623 0.74456 0.49864 +10 119 2 4 1 0.28123 0.6148 0.7298 0.50748 +10 120 1 1 0 0.29372 0.60176 0.74845 0.48459 +10 121 1 4 1 0.31879 0.61007 0.75153 0.51382 +10 122 1 1 0 0.34325 0.6292 0.78053 0.49692 +10 123 1 3 0 0.33735 0.59963 0.78052 0.45578 +10 124 2 4 0 0.3061 0.6266 0.78756 0.43671 +10 125 1 2 0 0.2692 0.61377 0.78148 0.46674 +10 126 2 4 0 0.28086 0.60337 0.74509 0.49568 +10 127 1 2 1 0.25623 0.61038 0.72673 0.50664 +10 128 1 2 1 0.22991 0.60778 0.69596 0.50548 +10 129 1 2 0 0.21403 0.62861 0.74918 0.56429 +10 130 2 3 1 0.2 0.61951 0.74461 0.5241 +10 131 1 2 0 0.2016 0.57322 0.73803 0.53287 +10 132 2 2 1 0.2 0.57338 0.76688 0.57414 +10 133 2 3 1 0.24192 0.54513 0.78845 0.5805 +10 134 2 3 1 0.22449 0.53931 0.76575 0.54908 +10 135 1 1 0 0.23756 0.49219 0.79371 0.58829 +10 136 2 4 0 0.2 0.51495 0.8 0.59757 +10 137 1 4 1 0.25076 0.50844 0.79613 0.56615 +10 138 1 1 0 0.21233 0.51514 0.79756 0.56617 +10 139 2 3 1 0.21396 0.53692 0.8 0.5165 +10 140 1 2 1 0.24112 0.5471 0.79205 0.53283 +10 141 1 1 0 0.2593 0.56942 0.79459 0.5476 +10 142 1 2 1 0.26206 0.52408 0.8 0.54105 +10 143 1 2 0 0.27497 0.5153 0.78909 0.49856 +10 144 2 4 1 0.27226 0.48823 0.8 0.51845 +10 145 1 3 1 0.25567 0.45553 0.79816 0.52488 +10 146 1 1 1 0.25052 0.42584 0.8 0.5299 +10 147 1 1 0 0.26973 0.42681 0.75384 0.53273 +10 148 2 2 0 0.2731 0.42783 0.77287 0.53278 +10 149 1 1 0 0.27367 0.44849 0.79407 0.55838 +10 150 2 1 0 0.2931 0.49084 0.8 0.5316 +10 151 1 3 0 0.28059 0.52164 0.74324 0.53733 +10 152 1 1 0 0.28957 0.53893 0.75354 0.54824 +10 153 2 3 1 0.28571 0.5029 0.75907 0.54966 +10 154 1 1 0 0.31502 0.49518 0.76867 0.53628 +10 155 1 2 1 0.32785 0.49781 0.7489 0.53514 +10 156 1 1 1 0.40513 0.47091 0.77671 0.51453 +10 157 1 1 1 0.43223 0.45748 0.77637 0.51789 +10 158 1 3 1 0.46163 0.47294 0.74615 0.50768 +10 159 2 4 0 0.46601 0.48429 0.74986 0.49598 +10 160 2 2 0 0.47188 0.49195 0.71257 0.45374 +10 161 2 1 1 0.44031 0.49121 0.68641 0.47482 +10 162 2 3 0 0.41901 0.49526 0.70796 0.44658 +10 163 2 4 1 0.43251 0.46366 0.70508 0.42232 +10 164 1 2 1 0.41364 0.48043 0.70468 0.42756 +10 165 1 2 1 0.40867 0.48836 0.7352 0.39318 +10 166 1 2 1 0.38058 0.5313 0.7467 0.39111 +10 167 2 2 1 0.36313 0.57037 0.70606 0.40742 +10 168 2 1 1 0.35806 0.52469 0.72541 0.39842 +10 169 2 1 1 0.3965 0.55552 0.70164 0.39636 +10 170 1 4 1 0.39881 0.58211 0.7039 0.39538 +10 171 1 2 0 0.40161 0.64315 0.66595 0.43253 +10 172 1 2 1 0.40527 0.63142 0.68344 0.47814 +10 173 1 2 1 0.41818 0.64688 0.68375 0.49872 +10 174 2 4 1 0.43695 0.66292 0.67543 0.48242 +10 175 2 3 1 0.39064 0.70592 0.72162 0.50853 +10 176 2 3 1 0.41418 0.72227 0.7404 0.50848 +10 177 1 4 0 0.42828 0.73365 0.74399 0.50729 +10 178 1 1 0 0.4385 0.714 0.75182 0.52906 +10 179 1 1 1 0.38462 0.71399 0.78125 0.51557 +10 180 1 1 0 0.38243 0.6897 0.8 0.56269 +10 181 1 2 1 0.41956 0.6537 0.78119 0.53999 +10 182 1 3 1 0.46125 0.60926 0.78334 0.5355 +10 183 1 4 1 0.5079 0.58234 0.79041 0.53978 +10 185 2 3 1 0.58773 0.61974 0.77567 0.58005 +10 186 2 1 1 0.56994 0.61598 0.77697 0.5845 +10 187 1 4 1 0.58265 0.65108 0.75528 0.55322 +10 188 1 1 1 0.61641 0.63525 0.73075 0.53583 +10 189 2 3 0 0.61089 0.68513 0.74215 0.55119 +10 190 1 4 0 0.67254 0.67502 0.74304 0.52344 +10 191 2 3 1 0.71849 0.66315 0.75994 0.54305 +10 192 1 2 1 0.73545 0.6294 0.79004 0.53427 +10 194 2 1 0 0.76957 0.61338 0.74831 0.52514 +10 195 1 3 0 0.73544 0.61721 0.75629 0.4949 +10 196 1 2 1 0.7462 0.65698 0.72142 0.46658 +10 197 1 2 0 0.74487 0.67712 0.70299 0.4334 +10 198 1 4 0 0.75813 0.63902 0.7132 0.38684 +10 199 1 3 1 0.77195 0.65874 0.73815 0.4148 +10 200 2 4 0 0.75457 0.63628 0.72188 0.43059 +10 201 1 2 1 0.72843 0.646 0.71037 0.46274 +11 1 2 3 0 0.55043 0.5779 0.35359 0.35152 +11 3 2 3 1 0.56558 0.52928 0.27501 0.34064 +11 4 2 1 0 0.53392 0.562 0.31016 0.31646 +11 5 2 3 1 0.53131 0.54329 0.26991 0.33176 +11 6 2 2 0 0.51301 0.54949 0.31655 0.3123 +11 7 2 2 1 0.49465 0.56101 0.30935 0.29295 +11 8 2 3 1 0.49977 0.59981 0.32269 0.29142 +11 9 2 2 1 0.5337 0.64168 0.27847 0.23984 +11 10 2 2 1 0.54034 0.62302 0.28238 0.25339 +11 11 2 3 0 0.54457 0.62412 0.2874 0.25673 +11 12 2 3 0 0.56074 0.61232 0.31517 0.23104 +11 13 2 1 0 0.52356 0.59777 0.32553 0.237 +11 14 2 3 1 0.51113 0.58218 0.2904 0.24704 +11 15 2 4 0 0.49545 0.58171 0.29121 0.22882 +11 16 2 3 1 0.4855 0.57166 0.29957 0.24484 +11 17 2 3 0 0.46832 0.58533 0.29036 0.26997 +11 18 2 3 0 0.48141 0.5609 0.29432 0.20644 +11 19 2 2 0 0.48431 0.57657 0.27478 0.22073 +11 20 2 4 0 0.5241 0.57124 0.29602 0.24342 +11 21 1 1 0 0.52299 0.60777 0.32382 0.27311 +11 22 1 2 1 0.55617 0.6093 0.31869 0.25054 +11 23 2 3 0 0.53125 0.56702 0.37266 0.23961 +11 24 2 3 1 0.54199 0.54908 0.40232 0.24116 +11 25 1 3 0 0.57052 0.54133 0.35003 0.22522 +11 26 2 2 0 0.55783 0.57273 0.32456 0.2 +11 27 1 1 1 0.58077 0.60062 0.32838 0.2236 +11 28 1 1 1 0.55735 0.57087 0.29145 0.22587 +11 29 1 1 1 0.57661 0.59907 0.24968 0.25374 +11 30 1 4 0 0.54437 0.63611 0.24601 0.24774 +11 31 1 1 0 0.52996 0.60176 0.21448 0.27302 +11 32 2 3 0 0.50178 0.63469 0.2 0.24459 +11 33 1 1 1 0.52212 0.64377 0.20389 0.2 +11 34 1 4 0 0.49338 0.67082 0.2 0.2053 +11 35 1 3 0 0.47172 0.66299 0.2 0.25588 +11 36 1 1 1 0.48227 0.66312 0.21198 0.24343 +11 37 1 1 1 0.4579 0.66234 0.2 0.2323 +11 38 1 1 0 0.45205 0.60848 0.24113 0.28058 +11 39 1 1 0 0.46492 0.65123 0.21024 0.2792 +11 40 1 2 0 0.46757 0.63588 0.2 0.27274 +11 41 2 2 0 0.46378 0.61741 0.2 0.20137 +11 42 2 1 1 0.43549 0.60662 0.2191 0.25492 +11 43 1 1 0 0.44882 0.58923 0.20251 0.23947 +11 44 2 4 0 0.44739 0.61681 0.2209 0.26471 +11 45 2 4 1 0.46384 0.59305 0.25319 0.24613 +11 46 2 2 1 0.51119 0.59576 0.24912 0.24026 +11 47 2 2 1 0.51119 0.61554 0.27732 0.25702 +11 48 2 4 0 0.46991 0.64183 0.23582 0.22444 +11 49 1 2 1 0.48296 0.63369 0.20535 0.27083 +11 50 1 2 0 0.5426 0.70366 0.2 0.32619 +11 51 2 4 0 0.50892 0.69992 0.2 0.32438 +11 52 1 4 1 0.4954 0.71494 0.23481 0.31973 +11 53 1 4 0 0.45152 0.75401 0.22295 0.31486 +11 54 1 4 1 0.44303 0.76902 0.2 0.33248 +11 55 1 2 1 0.44439 0.76276 0.2 0.35535 +11 56 1 2 1 0.47164 0.77372 0.20991 0.30229 +11 57 1 4 1 0.42252 0.76981 0.20823 0.27657 +11 58 1 2 0 0.4626 0.77064 0.22131 0.23807 +11 59 2 4 0 0.47876 0.74553 0.21937 0.2 +11 60 1 2 0 0.47492 0.77258 0.21553 0.20372 +11 61 2 1 1 0.50097 0.797 0.22088 0.2 +11 62 2 1 0 0.54239 0.79098 0.2 0.21974 +11 63 2 3 0 0.52331 0.79352 0.23114 0.23316 +11 64 1 1 0 0.49528 0.76334 0.22954 0.25331 +11 65 1 4 0 0.49876 0.73095 0.26264 0.27285 +11 66 1 1 1 0.53242 0.72036 0.23656 0.25391 +11 67 1 1 1 0.52189 0.67882 0.27964 0.2393 +11 68 1 1 0 0.53176 0.68164 0.25737 0.27294 +11 69 2 3 0 0.54291 0.73061 0.2078 0.27341 +11 70 2 3 1 0.54529 0.72522 0.24896 0.28794 +11 71 2 3 0 0.55668 0.75084 0.26641 0.26938 +11 72 1 1 0 0.53982 0.71284 0.25302 0.27195 +11 73 2 4 0 0.57059 0.72955 0.21981 0.28124 +11 74 2 3 0 0.60017 0.75338 0.2 0.26189 +11 75 2 3 0 0.56987 0.79254 0.23086 0.26063 +11 76 2 4 1 0.53471 0.77639 0.25951 0.27258 +11 77 1 2 1 0.577 0.75346 0.31801 0.28561 +11 78 1 2 1 0.54832 0.71056 0.29922 0.29029 +11 79 1 1 0 0.53751 0.7274 0.31287 0.29789 +11 80 1 2 1 0.54485 0.71156 0.31588 0.2826 +11 81 1 2 0 0.54433 0.69123 0.36089 0.26533 +11 82 2 4 1 0.56687 0.76312 0.36796 0.23672 +11 83 2 4 0 0.60786 0.7831 0.38363 0.23398 +11 84 2 2 1 0.60936 0.78368 0.36804 0.23087 +11 85 1 2 1 0.6211 0.74615 0.36371 0.20084 +11 86 2 4 0 0.5845 0.77236 0.3 0.2 +11 87 1 2 1 0.60376 0.8 0.32359 0.2 +11 88 1 2 1 0.62269 0.77985 0.32173 0.2031 +11 89 1 2 1 0.67831 0.79703 0.35091 0.20138 +11 90 1 2 1 0.69167 0.8 0.37196 0.2 +11 91 1 2 1 0.69856 0.7645 0.36825 0.21451 +11 92 1 2 1 0.69443 0.73286 0.37839 0.24046 +11 93 1 2 1 0.69619 0.72385 0.376 0.28831 +11 94 1 2 1 0.69996 0.7384 0.34066 0.30731 +11 95 1 2 1 0.69694 0.77351 0.34487 0.33476 +11 96 1 4 0 0.66827 0.79268 0.35615 0.27297 +11 97 1 2 0 0.67878 0.8 0.33973 0.27352 +11 98 1 1 0 0.6799 0.8 0.34456 0.25026 +11 99 2 3 0 0.62187 0.8 0.28067 0.22623 +11 100 2 2 1 0.6054 0.77345 0.25924 0.2 +11 101 1 4 0 0.65383 0.77114 0.27204 0.2008 +11 102 1 3 0 0.64727 0.76172 0.25281 0.21862 +11 103 2 4 0 0.6635 0.77711 0.305 0.2 +11 104 2 3 0 0.67197 0.79161 0.28616 0.23714 +11 105 1 2 1 0.64376 0.8 0.30672 0.23704 +11 106 1 2 0 0.63178 0.8 0.29251 0.23955 +11 107 1 2 1 0.6538 0.79894 0.29693 0.24725 +11 108 1 3 0 0.64423 0.7928 0.28867 0.21739 +11 109 1 2 0 0.61983 0.76918 0.29843 0.27407 +11 111 1 4 0 0.54281 0.76111 0.30887 0.26801 +11 112 1 2 1 0.52811 0.74021 0.30316 0.29007 +11 113 1 3 0 0.53029 0.77255 0.2792 0.28303 +11 114 1 2 1 0.53651 0.79728 0.29597 0.30773 +11 115 1 2 0 0.55624 0.78973 0.25582 0.31867 +11 116 1 2 1 0.53997 0.8 0.22837 0.31713 +11 117 1 2 1 0.52671 0.8 0.24036 0.38512 +11 118 1 2 1 0.51438 0.8 0.23973 0.37043 +11 119 1 3 1 0.53182 0.79605 0.27918 0.36907 +11 120 1 2 1 0.50286 0.8 0.28253 0.35504 +11 121 1 2 1 0.50235 0.8 0.29495 0.29598 +11 122 1 3 0 0.48894 0.79331 0.30969 0.31204 +11 123 1 2 1 0.47871 0.79948 0.3263 0.31954 +11 124 1 2 1 0.45365 0.8 0.29099 0.33413 +11 125 1 2 1 0.46635 0.8 0.26215 0.31968 +11 126 1 2 1 0.45381 0.74542 0.23663 0.31977 +11 127 1 2 1 0.44701 0.77734 0.26106 0.32745 +11 128 1 3 0 0.42234 0.8 0.25779 0.32023 +11 129 1 2 1 0.40726 0.79306 0.29614 0.30255 +11 130 1 2 1 0.36714 0.79442 0.30191 0.2763 +11 131 1 4 0 0.36376 0.79681 0.32273 0.27065 +11 132 1 2 1 0.3524 0.79521 0.28949 0.2766 +11 133 1 2 1 0.32952 0.78195 0.24297 0.25312 +11 134 1 2 1 0.28499 0.8 0.24966 0.28736 +11 135 1 2 0 0.26727 0.79224 0.22348 0.3366 +11 136 1 2 1 0.25006 0.79156 0.26094 0.27667 +11 137 1 2 1 0.21646 0.76325 0.24907 0.25544 +11 138 1 2 1 0.2 0.78589 0.26922 0.27831 +11 139 1 2 0 0.2 0.76936 0.2926 0.24553 +11 140 1 2 1 0.23753 0.79722 0.33088 0.24868 +11 141 1 2 0 0.22556 0.8 0.31513 0.27674 +11 142 1 2 0 0.21033 0.8 0.30497 0.28001 +11 143 2 3 1 0.22537 0.8 0.27327 0.29592 +11 144 2 3 0 0.24274 0.79907 0.27427 0.28701 +11 145 2 3 0 0.21851 0.8 0.26859 0.30582 +11 146 2 1 0 0.2178 0.8 0.22666 0.31128 +11 147 2 1 1 0.24484 0.8 0.20365 0.30276 +11 148 2 3 0 0.22017 0.8 0.21582 0.29858 +11 149 2 4 1 0.20838 0.8 0.20549 0.3077 +11 150 1 1 1 0.26729 0.8 0.24835 0.33951 +11 151 2 4 1 0.25459 0.78232 0.22098 0.37519 +11 152 1 1 0 0.30189 0.77343 0.22356 0.39847 +11 153 2 2 0 0.277 0.77971 0.21763 0.41577 +11 154 2 1 0 0.33926 0.74634 0.2 0.37846 +11 155 2 4 1 0.33888 0.74843 0.2 0.38359 +11 156 2 4 1 0.36489 0.74266 0.2 0.33555 +11 157 2 4 0 0.36922 0.72864 0.21039 0.36898 +11 158 2 4 0 0.37651 0.71557 0.25706 0.3556 +11 159 2 1 0 0.3562 0.70798 0.27144 0.39188 +11 160 2 4 0 0.37324 0.73197 0.24524 0.39553 +11 161 2 4 1 0.33247 0.71528 0.25213 0.40268 +11 162 2 4 0 0.32664 0.75746 0.24262 0.39697 +11 163 2 4 0 0.32737 0.75497 0.20079 0.37946 +11 164 2 1 1 0.32241 0.8 0.2 0.40464 +11 165 1 1 1 0.33485 0.8 0.20678 0.40402 +11 166 1 1 1 0.35907 0.8 0.22123 0.40926 +11 167 2 4 1 0.34949 0.8 0.27264 0.41539 +11 168 2 4 0 0.37267 0.8 0.30797 0.436 +11 169 1 4 0 0.39837 0.76644 0.29919 0.41091 +11 170 1 3 0 0.38419 0.74126 0.2761 0.42169 +11 171 1 4 0 0.38649 0.73896 0.24769 0.45184 +11 172 1 1 1 0.3735 0.79988 0.21732 0.43546 +11 173 1 3 0 0.36009 0.8 0.22321 0.44074 +11 174 1 4 0 0.38648 0.8 0.21086 0.45846 +11 175 1 1 0 0.37753 0.7835 0.20936 0.47693 +11 176 1 1 0 0.32905 0.77017 0.20358 0.42754 +11 177 1 1 0 0.3851 0.78473 0.20506 0.44052 +11 178 2 3 1 0.36796 0.78051 0.2 0.3813 +11 179 2 2 1 0.35199 0.79315 0.22019 0.36803 +11 180 2 2 1 0.31801 0.8 0.24509 0.37891 +11 181 2 3 1 0.30674 0.8 0.23432 0.38882 +11 182 2 3 0 0.33107 0.75848 0.2 0.38555 +11 183 2 3 0 0.33389 0.73813 0.2101 0.38101 +11 184 2 3 0 0.3633 0.73729 0.2 0.40103 +11 185 1 2 1 0.35394 0.73968 0.2 0.36388 +11 186 1 2 1 0.3625 0.78628 0.2 0.38816 +11 187 1 2 1 0.37426 0.8 0.2 0.46508 +11 188 1 3 1 0.34876 0.8 0.22985 0.47632 +11 189 1 3 0 0.33813 0.78029 0.2056 0.47281 +11 190 1 4 0 0.37915 0.79114 0.2 0.45265 +11 191 1 2 1 0.40233 0.8 0.24541 0.49344 +11 192 1 2 1 0.42529 0.78379 0.25813 0.51788 +11 193 1 3 0 0.44473 0.8 0.26037 0.55478 +11 194 1 2 1 0.42335 0.77949 0.25251 0.56739 +11 195 1 2 0 0.41433 0.74312 0.25601 0.56701 +11 196 1 2 0 0.3768 0.76441 0.25915 0.56346 +11 197 1 2 1 0.36343 0.78095 0.20219 0.55189 +11 198 1 2 1 0.32245 0.76431 0.2 0.52809 +11 199 1 3 0 0.28258 0.8 0.23392 0.52097 +11 200 1 4 0 0.29243 0.79748 0.22832 0.51401 +11 201 1 2 1 0.30384 0.79552 0.23835 0.52741 \ No newline at end of file diff --git a/Python/hbayesdm/common/extdata/ug_exampleData.txt b/Python/hbayesdm/common/extdata/ug_exampleData.txt new file mode 100644 index 00000000..257795cc --- /dev/null +++ b/Python/hbayesdm/common/extdata/ug_exampleData.txt @@ -0,0 +1,1801 @@ +trial offer accept subjID group +1 3 0 1 LM +2 3 0 1 LM +3 5 0 1 LM +4 4 0 1 LM +5 2 0 1 LM +6 4 0 1 LM +7 3 0 1 LM +8 4 0 1 LM +9 3 0 1 LM +10 4 0 1 LM +11 5 1 1 LM +12 3 0 1 LM +13 5 1 1 LM +14 3 0 1 LM +15 1 0 1 LM +16 2 0 1 LM +17 3 0 1 LM +18 6 1 1 LM +19 2 0 1 LM +20 2 0 1 LM +21 4 1 1 LM +22 3 0 1 LM +23 5 1 1 LM +24 2 0 1 LM +25 4 1 1 LM +26 4 1 1 LM +27 2 0 1 LM +28 6 1 1 LM +29 4 1 1 LM +30 7 1 1 LM +31 9 1 1 LM +32 7 1 1 LM +33 10 1 1 LM +34 7 1 1 LM +35 8 1 1 LM +36 8 1 1 LM +37 11 1 1 LM +38 7 1 1 LM +39 6 1 1 LM +40 6 1 1 LM +41 12 1 1 LM +42 9 1 1 LM +43 5 1 1 LM +44 8 1 1 LM +45 6 1 1 LM +46 7 1 1 LM +47 8 1 1 LM +48 7 1 1 LM +49 8 1 1 LM +50 6 1 1 LM +51 8 1 1 LM +52 7 1 1 LM +53 9 1 1 LM +54 9 1 1 LM +55 8 1 1 LM +56 10 1 1 LM +57 6 1 1 LM +58 10 1 1 LM +59 10 1 1 LM +60 8 1 1 LM +1 3 0 2 LM +2 3 0 2 LM +3 5 0 2 LM +4 4 0 2 LM +5 2 0 2 LM +6 4 0 2 LM +7 3 0 2 LM +8 4 0 2 LM +9 3 0 2 LM +10 4 0 2 LM +11 5 1 2 LM +12 3 0 2 LM +13 5 1 2 LM +14 3 0 2 LM +15 1 0 2 LM +16 2 0 2 LM +17 3 0 2 LM +18 6 1 2 LM +19 2 0 2 LM +20 2 0 2 LM +21 4 0 2 LM +22 3 0 2 LM +23 5 0 2 LM +24 2 0 2 LM +25 4 0 2 LM +26 4 0 2 LM +27 2 0 2 LM +28 6 1 2 LM +29 4 0 2 LM +30 7 1 2 LM +31 9 1 2 LM +32 7 1 2 LM +33 10 1 2 LM +34 7 1 2 LM +35 8 1 2 LM +36 8 1 2 LM +37 11 1 2 LM +38 7 1 2 LM +39 6 1 2 LM +40 6 1 2 LM +41 12 1 2 LM +42 9 1 2 LM +43 5 1 2 LM +44 8 1 2 LM +45 6 1 2 LM +46 7 1 2 LM +47 8 1 2 LM +48 7 1 2 LM +49 8 1 2 LM +50 6 1 2 LM +51 8 1 2 LM +52 7 1 2 LM +53 9 1 2 LM +54 9 1 2 LM +55 8 1 2 LM +56 10 1 2 LM +57 6 1 2 LM +58 10 1 2 LM +59 10 1 2 LM +60 8 1 2 LM +1 3 0 3 LM +2 3 0 3 LM +3 5 1 3 LM +4 4 0 3 LM +5 2 0 3 LM +6 4 0 3 LM +7 3 0 3 LM +8 4 0 3 LM +9 3 0 3 LM +10 4 0 3 LM +11 5 1 3 LM +12 3 0 3 LM +13 5 1 3 LM +14 3 0 3 LM +15 1 0 3 LM +16 2 0 3 LM +17 3 0 3 LM +18 6 1 3 LM +19 2 0 3 LM +20 2 0 3 LM +21 4 0 3 LM +22 3 0 3 LM +23 5 1 3 LM +24 2 0 3 LM +25 4 0 3 LM +26 4 0 3 LM +27 2 0 3 LM +28 6 1 3 LM +29 4 0 3 LM +30 7 1 3 LM +31 9 1 3 LM +32 7 1 3 LM +33 10 1 3 LM +34 7 1 3 LM +35 8 1 3 LM +36 8 1 3 LM +37 11 1 3 LM +38 7 1 3 LM +39 6 1 3 LM +40 6 1 3 LM +41 12 1 3 LM +42 9 1 3 LM +43 5 1 3 LM +44 8 1 3 LM +45 6 1 3 LM +46 7 1 3 LM +47 8 1 3 LM +48 7 1 3 LM +49 8 1 3 LM +50 6 1 3 LM +51 8 1 3 LM +52 7 1 3 LM +53 9 1 3 LM +54 9 1 3 LM +55 8 1 3 LM +56 10 1 3 LM +57 6 1 3 LM +58 10 1 3 LM +59 10 1 3 LM +60 8 1 3 LM +1 3 0 4 LM +2 3 0 4 LM +3 5 1 4 LM +4 4 0 4 LM +5 2 0 4 LM +6 4 0 4 LM +7 3 0 4 LM +8 4 0 4 LM +9 3 0 4 LM +10 4 0 4 LM +11 5 1 4 LM +12 3 0 4 LM +13 5 1 4 LM +14 3 0 4 LM +15 1 0 4 LM +16 2 0 4 LM +17 3 0 4 LM +18 6 1 4 LM +19 2 0 4 LM +20 2 0 4 LM +21 4 0 4 LM +22 3 0 4 LM +23 5 1 4 LM +24 2 0 4 LM +25 4 0 4 LM +26 4 0 4 LM +27 2 0 4 LM +28 6 1 4 LM +29 4 0 4 LM +30 7 1 4 LM +31 9 1 4 LM +32 7 1 4 LM +33 10 1 4 LM +34 7 1 4 LM +35 8 1 4 LM +36 8 1 4 LM +37 11 1 4 LM +38 7 1 4 LM +39 6 1 4 LM +40 6 1 4 LM +41 12 1 4 LM +42 9 1 4 LM +43 5 1 4 LM +44 8 1 4 LM +45 6 0 4 LM +46 7 1 4 LM +47 8 1 4 LM +48 7 1 4 LM +49 8 1 4 LM +50 6 1 4 LM +51 8 1 4 LM +52 7 1 4 LM +53 9 1 4 LM +54 9 1 4 LM +55 8 1 4 LM +56 10 1 4 LM +57 6 1 4 LM +58 10 1 4 LM +59 10 1 4 LM +60 8 1 4 LM +1 3 0 5 LM +2 3 0 5 LM +3 5 1 5 LM +4 4 1 5 LM +5 2 0 5 LM +6 4 0 5 LM +7 3 0 5 LM +8 4 0 5 LM +9 3 0 5 LM +10 4 0 5 LM +11 5 1 5 LM +12 3 0 5 LM +13 5 1 5 LM +14 3 0 5 LM +15 1 0 5 LM +16 2 0 5 LM +17 3 0 5 LM +18 6 1 5 LM +19 2 0 5 LM +20 2 0 5 LM +21 4 0 5 LM +22 3 0 5 LM +23 5 1 5 LM +24 2 0 5 LM +25 4 0 5 LM +26 4 0 5 LM +27 2 0 5 LM +28 6 1 5 LM +29 4 0 5 LM +30 7 1 5 LM +31 9 1 5 LM +32 7 1 5 LM +33 10 1 5 LM +34 7 1 5 LM +35 8 1 5 LM +36 8 1 5 LM +37 11 1 5 LM +38 7 1 5 LM +39 6 1 5 LM +40 6 1 5 LM +41 12 1 5 LM +42 9 1 5 LM +43 5 1 5 LM +44 8 1 5 LM +45 6 1 5 LM +46 7 1 5 LM +47 8 1 5 LM +48 7 1 5 LM +49 8 1 5 LM +50 6 1 5 LM +51 8 1 5 LM +52 7 1 5 LM +53 9 1 5 LM +54 9 1 5 LM +55 8 1 5 LM +56 10 1 5 LM +57 6 1 5 LM +58 10 1 5 LM +59 10 1 5 LM +60 8 1 5 LM +1 3 0 6 LM +2 3 0 6 LM +3 5 1 6 LM +4 4 0 6 LM +5 2 0 6 LM +6 4 0 6 LM +7 3 0 6 LM +8 4 0 6 LM +9 3 0 6 LM +10 4 0 6 LM +11 5 1 6 LM +12 3 0 6 LM +13 5 1 6 LM +14 3 0 6 LM +15 1 0 6 LM +16 2 0 6 LM +17 3 0 6 LM +18 6 1 6 LM +19 2 0 6 LM +20 2 0 6 LM +21 4 0 6 LM +22 3 0 6 LM +23 5 1 6 LM +24 2 0 6 LM +25 4 0 6 LM +26 4 0 6 LM +27 2 0 6 LM +28 6 1 6 LM +29 4 0 6 LM +30 7 1 6 LM +31 9 1 6 LM +32 7 1 6 LM +33 10 1 6 LM +34 7 1 6 LM +35 8 1 6 LM +36 8 1 6 LM +37 11 1 6 LM +38 7 1 6 LM +39 6 1 6 LM +40 6 1 6 LM +41 12 1 6 LM +42 9 1 6 LM +43 5 0 6 LM +44 8 1 6 LM +45 6 1 6 LM +46 7 1 6 LM +47 8 1 6 LM +48 7 1 6 LM +49 8 1 6 LM +50 6 1 6 LM +51 8 1 6 LM +52 7 1 6 LM +53 9 1 6 LM +54 9 1 6 LM +55 8 1 6 LM +56 10 1 6 LM +57 6 1 6 LM +58 10 1 6 LM +59 10 1 6 LM +60 8 1 6 LM +1 3 0 7 LM +2 3 0 7 LM +3 5 0 7 LM +4 4 0 7 LM +5 2 0 7 LM +6 4 0 7 LM +7 3 0 7 LM +8 4 0 7 LM +9 3 0 7 LM +10 4 0 7 LM +11 5 0 7 LM +12 3 0 7 LM +13 5 0 7 LM +14 3 0 7 LM +15 1 0 7 LM +16 2 0 7 LM +17 3 0 7 LM +18 6 1 7 LM +19 2 0 7 LM +20 2 0 7 LM +21 4 0 7 LM +22 3 0 7 LM +23 5 0 7 LM +24 2 0 7 LM +25 4 0 7 LM +26 4 0 7 LM +27 2 0 7 LM +28 6 1 7 LM +29 4 0 7 LM +30 7 1 7 LM +31 9 1 7 LM +32 7 1 7 LM +33 10 1 7 LM +34 7 1 7 LM +35 8 1 7 LM +36 8 1 7 LM +37 11 1 7 LM +38 7 1 7 LM +39 6 1 7 LM +40 6 1 7 LM +41 12 1 7 LM +42 9 1 7 LM +43 5 1 7 LM +44 8 1 7 LM +45 6 1 7 LM +46 7 1 7 LM +47 8 1 7 LM +48 7 1 7 LM +49 8 1 7 LM +50 6 1 7 LM +51 8 1 7 LM +52 7 1 7 LM +53 9 1 7 LM +54 9 1 7 LM +55 8 1 7 LM +56 10 1 7 LM +57 6 1 7 LM +58 10 1 7 LM +59 10 1 7 LM +60 8 1 7 LM +1 3 0 8 LM +2 3 0 8 LM +3 5 1 8 LM +4 4 0 8 LM +5 2 0 8 LM +6 4 0 8 LM +7 3 0 8 LM +8 4 0 8 LM +9 3 0 8 LM +10 4 0 8 LM +11 5 1 8 LM +12 3 0 8 LM +13 5 0 8 LM +14 3 0 8 LM +15 1 0 8 LM +16 2 0 8 LM +17 3 0 8 LM +18 6 1 8 LM +19 2 0 8 LM +20 2 0 8 LM +21 4 0 8 LM +22 3 0 8 LM +23 5 1 8 LM +24 2 0 8 LM +25 4 0 8 LM +26 4 0 8 LM +27 2 0 8 LM +28 6 1 8 LM +29 4 0 8 LM +30 7 1 8 LM +31 9 1 8 LM +32 7 1 8 LM +33 10 1 8 LM +34 7 1 8 LM +35 8 1 8 LM +36 8 1 8 LM +37 11 1 8 LM +38 7 1 8 LM +39 6 1 8 LM +40 6 1 8 LM +41 12 1 8 LM +42 9 1 8 LM +43 5 1 8 LM +44 8 1 8 LM +45 6 1 8 LM +46 7 1 8 LM +47 8 1 8 LM +48 7 1 8 LM +49 8 1 8 LM +50 6 1 8 LM +51 8 1 8 LM +52 7 1 8 LM +53 9 1 8 LM +54 9 1 8 LM +55 8 1 8 LM +56 10 1 8 LM +57 6 1 8 LM +58 10 1 8 LM +59 10 1 8 LM +60 8 1 8 LM +1 3 0 9 LM +2 3 0 9 LM +3 5 1 9 LM +4 4 0 9 LM +5 2 0 9 LM +6 4 0 9 LM +7 3 0 9 LM +8 4 0 9 LM +9 3 0 9 LM +10 4 0 9 LM +11 5 0 9 LM +12 3 0 9 LM +13 5 1 9 LM +14 3 0 9 LM +15 1 0 9 LM +16 2 0 9 LM +17 3 0 9 LM +18 6 1 9 LM +19 2 0 9 LM +20 2 0 9 LM +21 4 0 9 LM +22 3 0 9 LM +23 5 1 9 LM +24 2 0 9 LM +25 4 0 9 LM +26 4 0 9 LM +27 2 0 9 LM +28 6 1 9 LM +29 4 0 9 LM +30 7 1 9 LM +31 9 1 9 LM +32 7 1 9 LM +33 10 1 9 LM +34 7 1 9 LM +35 8 1 9 LM +36 8 1 9 LM +37 11 1 9 LM +38 7 1 9 LM +39 6 1 9 LM +40 6 1 9 LM +41 12 1 9 LM +42 9 1 9 LM +43 5 1 9 LM +44 8 1 9 LM +45 6 1 9 LM +46 7 1 9 LM +47 8 1 9 LM +48 7 1 9 LM +49 8 1 9 LM +50 6 1 9 LM +51 8 1 9 LM +52 7 1 9 LM +53 9 1 9 LM +54 9 1 9 LM +55 8 1 9 LM +56 10 1 9 LM +57 6 1 9 LM +58 10 1 9 LM +59 10 1 9 LM +60 8 1 9 LM +1 3 0 10 LM +2 3 0 10 LM +3 5 0 10 LM +4 4 0 10 LM +5 2 0 10 LM +6 4 0 10 LM +7 3 0 10 LM +8 4 0 10 LM +9 3 0 10 LM +10 4 0 10 LM +11 5 0 10 LM +12 3 0 10 LM +13 5 1 10 LM +14 3 0 10 LM +15 1 0 10 LM +16 2 0 10 LM +17 3 0 10 LM +18 6 1 10 LM +19 2 0 10 LM +20 2 0 10 LM +21 4 0 10 LM +22 3 0 10 LM +23 5 1 10 LM +24 2 0 10 LM +25 4 0 10 LM +26 4 0 10 LM +27 2 0 10 LM +28 6 1 10 LM +29 4 0 10 LM +30 7 1 10 LM +31 9 1 10 LM +32 7 1 10 LM +33 10 1 10 LM +34 7 1 10 LM +35 8 1 10 LM +36 8 1 10 LM +37 11 1 10 LM +38 7 1 10 LM +39 6 1 10 LM +40 6 1 10 LM +41 12 1 10 LM +42 9 1 10 LM +43 5 1 10 LM +44 8 1 10 LM +45 6 1 10 LM +46 7 1 10 LM +47 8 1 10 LM +48 7 1 10 LM +49 8 1 10 LM +50 6 1 10 LM +51 8 1 10 LM +52 7 1 10 LM +53 9 1 10 LM +54 9 1 10 LM +55 8 1 10 LM +56 10 1 10 LM +57 6 1 10 LM +58 10 1 10 LM +59 10 1 10 LM +60 8 1 10 LM +1 3 0 11 LM +2 3 0 11 LM +3 5 1 11 LM +4 4 0 11 LM +5 2 0 11 LM +6 4 0 11 LM +7 3 0 11 LM +8 4 0 11 LM +9 3 0 11 LM +10 4 0 11 LM +11 5 1 11 LM +12 3 0 11 LM +13 5 1 11 LM +14 3 0 11 LM +15 1 0 11 LM +16 2 0 11 LM +17 3 0 11 LM +18 6 1 11 LM +19 2 0 11 LM +20 2 0 11 LM +21 4 0 11 LM +22 3 0 11 LM +23 5 1 11 LM +24 2 0 11 LM +25 4 0 11 LM +26 4 0 11 LM +27 2 0 11 LM +28 6 1 11 LM +29 4 0 11 LM +30 7 1 11 LM +31 9 1 11 LM +32 7 1 11 LM +33 10 1 11 LM +34 7 1 11 LM +35 8 1 11 LM +36 8 1 11 LM +37 11 1 11 LM +38 7 1 11 LM +39 6 1 11 LM +40 6 1 11 LM +41 12 1 11 LM +42 9 1 11 LM +43 5 1 11 LM +44 8 1 11 LM +45 6 1 11 LM +46 7 1 11 LM +47 8 1 11 LM +48 7 1 11 LM +49 8 1 11 LM +50 6 1 11 LM +51 8 1 11 LM +52 7 1 11 LM +53 9 1 11 LM +54 9 1 11 LM +55 8 1 11 LM +56 10 1 11 LM +57 6 1 11 LM +58 10 1 11 LM +59 10 1 11 LM +60 8 1 11 LM +1 3 0 12 LM +2 3 0 12 LM +3 5 1 12 LM +4 4 0 12 LM +5 2 0 12 LM +6 4 0 12 LM +7 3 0 12 LM +8 4 0 12 LM +9 3 0 12 LM +10 4 0 12 LM +11 5 1 12 LM +12 3 0 12 LM +13 5 1 12 LM +14 3 0 12 LM +15 1 0 12 LM +16 2 0 12 LM +17 3 0 12 LM +18 6 1 12 LM +19 2 0 12 LM +20 2 0 12 LM +21 4 0 12 LM +22 3 0 12 LM +23 5 1 12 LM +24 2 0 12 LM +25 4 0 12 LM +26 4 0 12 LM +27 2 0 12 LM +28 6 1 12 LM +29 4 0 12 LM +30 7 1 12 LM +31 9 1 12 LM +32 7 1 12 LM +33 10 1 12 LM +34 7 1 12 LM +35 8 1 12 LM +36 8 1 12 LM +37 11 1 12 LM +38 7 1 12 LM +39 6 1 12 LM +40 6 1 12 LM +41 12 1 12 LM +42 9 1 12 LM +43 5 1 12 LM +44 8 1 12 LM +45 6 1 12 LM +46 7 1 12 LM +47 8 1 12 LM +48 7 1 12 LM +49 8 1 12 LM +50 6 1 12 LM +51 8 1 12 LM +52 7 1 12 LM +53 9 1 12 LM +54 9 1 12 LM +55 8 1 12 LM +56 10 1 12 LM +57 6 1 12 LM +58 10 1 12 LM +59 10 1 12 LM +60 8 1 12 LM +1 3 0 13 LM +2 3 0 13 LM +3 5 0 13 LM +4 4 0 13 LM +5 2 0 13 LM +6 4 0 13 LM +7 3 0 13 LM +8 4 0 13 LM +9 3 0 13 LM +10 4 0 13 LM +11 5 1 13 LM +12 3 0 13 LM +13 5 0 13 LM +14 3 0 13 LM +15 1 0 13 LM +16 2 0 13 LM +17 3 0 13 LM +18 6 1 13 LM +19 2 0 13 LM +20 2 0 13 LM +21 4 0 13 LM +22 3 0 13 LM +23 5 1 13 LM +24 2 0 13 LM +25 4 0 13 LM +26 4 0 13 LM +27 2 0 13 LM +28 6 1 13 LM +29 4 0 13 LM +30 7 1 13 LM +31 9 1 13 LM +32 7 1 13 LM +33 10 1 13 LM +34 7 1 13 LM +35 8 1 13 LM +36 8 1 13 LM +37 11 1 13 LM +38 7 1 13 LM +39 6 1 13 LM +40 6 1 13 LM +41 12 1 13 LM +42 9 1 13 LM +43 5 1 13 LM +44 8 1 13 LM +45 6 1 13 LM +46 7 1 13 LM +47 8 1 13 LM +48 7 1 13 LM +49 8 1 13 LM +50 6 1 13 LM +51 8 1 13 LM +52 7 1 13 LM +53 9 1 13 LM +54 9 1 13 LM +55 8 1 13 LM +56 10 1 13 LM +57 6 1 13 LM +58 10 1 13 LM +59 10 1 13 LM +60 8 1 13 LM +1 3 0 14 LM +2 3 0 14 LM +3 5 1 14 LM +4 4 0 14 LM +5 2 0 14 LM +6 4 0 14 LM +7 3 0 14 LM +8 4 0 14 LM +9 3 0 14 LM +10 4 0 14 LM +11 5 1 14 LM +12 3 0 14 LM +13 5 1 14 LM +14 3 0 14 LM +15 1 0 14 LM +16 2 0 14 LM +17 3 0 14 LM +18 6 1 14 LM +19 2 0 14 LM +20 2 0 14 LM +21 4 0 14 LM +22 3 0 14 LM +23 5 1 14 LM +24 2 0 14 LM +25 4 0 14 LM +26 4 0 14 LM +27 2 0 14 LM +28 6 1 14 LM +29 4 0 14 LM +30 7 1 14 LM +31 9 1 14 LM +32 7 1 14 LM +33 10 1 14 LM +34 7 1 14 LM +35 8 1 14 LM +36 8 1 14 LM +37 11 1 14 LM +38 7 1 14 LM +39 6 1 14 LM +40 6 1 14 LM +41 12 1 14 LM +42 9 1 14 LM +43 5 1 14 LM +44 8 1 14 LM +45 6 1 14 LM +46 7 1 14 LM +47 8 1 14 LM +48 7 1 14 LM +49 8 1 14 LM +50 6 1 14 LM +51 8 1 14 LM +52 7 1 14 LM +53 9 1 14 LM +54 9 1 14 LM +55 8 1 14 LM +56 10 1 14 LM +57 6 1 14 LM +58 10 1 14 LM +59 10 1 14 LM +60 8 1 14 LM +1 3 0 15 LM +2 3 0 15 LM +3 5 1 15 LM +4 4 0 15 LM +5 2 0 15 LM +6 4 0 15 LM +7 3 0 15 LM +8 4 0 15 LM +9 3 0 15 LM +10 4 0 15 LM +11 5 1 15 LM +12 3 0 15 LM +13 5 1 15 LM +14 3 0 15 LM +15 1 0 15 LM +16 2 0 15 LM +17 3 0 15 LM +18 6 1 15 LM +19 2 0 15 LM +20 2 0 15 LM +21 4 0 15 LM +22 3 0 15 LM +23 5 0 15 LM +24 2 0 15 LM +25 4 0 15 LM +26 4 0 15 LM +27 2 0 15 LM +28 6 1 15 LM +29 4 0 15 LM +30 7 1 15 LM +31 9 1 15 LM +32 7 1 15 LM +33 10 1 15 LM +34 7 1 15 LM +35 8 1 15 LM +36 8 1 15 LM +37 11 1 15 LM +38 7 1 15 LM +39 6 1 15 LM +40 6 1 15 LM +41 12 1 15 LM +42 9 1 15 LM +43 5 1 15 LM +44 8 1 15 LM +45 6 1 15 LM +46 7 1 15 LM +47 8 1 15 LM +48 7 1 15 LM +49 8 1 15 LM +50 6 1 15 LM +51 8 1 15 LM +52 7 1 15 LM +53 9 1 15 LM +54 9 1 15 LM +55 8 1 15 LM +56 10 1 15 LM +57 6 1 15 LM +58 10 1 15 LM +59 10 1 15 LM +60 8 1 15 LM +1 3 0 16 LM +2 3 0 16 LM +3 5 1 16 LM +4 4 0 16 LM +5 2 0 16 LM +6 4 0 16 LM +7 3 0 16 LM +8 4 0 16 LM +9 3 0 16 LM +10 4 0 16 LM +11 5 0 16 LM +12 3 0 16 LM +13 5 1 16 LM +14 3 0 16 LM +15 1 0 16 LM +16 2 0 16 LM +17 3 0 16 LM +18 6 1 16 LM +19 2 0 16 LM +20 2 0 16 LM +21 4 0 16 LM +22 3 0 16 LM +23 5 1 16 LM +24 2 0 16 LM +25 4 0 16 LM +26 4 0 16 LM +27 2 0 16 LM +28 6 1 16 LM +29 4 0 16 LM +30 7 1 16 LM +31 9 1 16 LM +32 7 1 16 LM +33 10 1 16 LM +34 7 1 16 LM +35 8 1 16 LM +36 8 1 16 LM +37 11 1 16 LM +38 7 1 16 LM +39 6 1 16 LM +40 6 1 16 LM +41 12 1 16 LM +42 9 1 16 LM +43 5 1 16 LM +44 8 1 16 LM +45 6 1 16 LM +46 7 1 16 LM +47 8 1 16 LM +48 7 1 16 LM +49 8 1 16 LM +50 6 1 16 LM +51 8 1 16 LM +52 7 1 16 LM +53 9 1 16 LM +54 9 1 16 LM +55 8 1 16 LM +56 10 1 16 LM +57 6 1 16 LM +58 10 1 16 LM +59 10 1 16 LM +60 8 1 16 LM +1 3 0 17 LM +2 3 0 17 LM +3 5 1 17 LM +4 4 0 17 LM +5 2 0 17 LM +6 4 0 17 LM +7 3 0 17 LM +8 4 0 17 LM +9 3 0 17 LM +10 4 0 17 LM +11 5 1 17 LM +12 3 0 17 LM +13 5 1 17 LM +14 3 0 17 LM +15 1 0 17 LM +16 2 0 17 LM +17 3 0 17 LM +18 6 1 17 LM +19 2 0 17 LM +20 2 0 17 LM +21 4 0 17 LM +22 3 0 17 LM +23 5 1 17 LM +24 2 0 17 LM +25 4 0 17 LM +26 4 0 17 LM +27 2 0 17 LM +28 6 1 17 LM +29 4 0 17 LM +30 7 1 17 LM +31 9 1 17 LM +32 7 1 17 LM +33 10 1 17 LM +34 7 1 17 LM +35 8 1 17 LM +36 8 1 17 LM +37 11 1 17 LM +38 7 1 17 LM +39 6 1 17 LM +40 6 1 17 LM +41 12 1 17 LM +42 9 1 17 LM +43 5 1 17 LM +44 8 1 17 LM +45 6 1 17 LM +46 7 1 17 LM +47 8 1 17 LM +48 7 1 17 LM +49 8 1 17 LM +50 6 1 17 LM +51 8 1 17 LM +52 7 1 17 LM +53 9 1 17 LM +54 9 1 17 LM +55 8 1 17 LM +56 10 1 17 LM +57 6 1 17 LM +58 10 1 17 LM +59 10 1 17 LM +60 8 1 17 LM +1 3 0 18 LM +2 3 0 18 LM +3 5 1 18 LM +4 4 0 18 LM +5 2 0 18 LM +6 4 0 18 LM +7 3 0 18 LM +8 4 0 18 LM +9 3 0 18 LM +10 4 0 18 LM +11 5 0 18 LM +12 3 0 18 LM +13 5 1 18 LM +14 3 0 18 LM +15 1 0 18 LM +16 2 0 18 LM +17 3 0 18 LM +18 6 1 18 LM +19 2 0 18 LM +20 2 0 18 LM +21 4 0 18 LM +22 3 0 18 LM +23 5 1 18 LM +24 2 0 18 LM +25 4 0 18 LM +26 4 0 18 LM +27 2 0 18 LM +28 6 1 18 LM +29 4 0 18 LM +30 7 1 18 LM +31 9 1 18 LM +32 7 1 18 LM +33 10 1 18 LM +34 7 1 18 LM +35 8 1 18 LM +36 8 1 18 LM +37 11 1 18 LM +38 7 1 18 LM +39 6 1 18 LM +40 6 1 18 LM +41 12 1 18 LM +42 9 1 18 LM +43 5 1 18 LM +44 8 1 18 LM +45 6 1 18 LM +46 7 1 18 LM +47 8 1 18 LM +48 7 1 18 LM +49 8 0 18 LM +50 6 1 18 LM +51 8 1 18 LM +52 7 1 18 LM +53 9 1 18 LM +54 9 1 18 LM +55 8 1 18 LM +56 10 1 18 LM +57 6 1 18 LM +58 10 1 18 LM +59 10 1 18 LM +60 8 1 18 LM +1 3 0 19 LM +2 3 0 19 LM +3 5 1 19 LM +4 4 0 19 LM +5 2 0 19 LM +6 4 0 19 LM +7 3 0 19 LM +8 4 0 19 LM +9 3 0 19 LM +10 4 0 19 LM +11 5 1 19 LM +12 3 0 19 LM +13 5 1 19 LM +14 3 0 19 LM +15 1 0 19 LM +16 2 0 19 LM +17 3 0 19 LM +18 6 1 19 LM +19 2 0 19 LM +20 2 0 19 LM +21 4 0 19 LM +22 3 0 19 LM +23 5 1 19 LM +24 2 0 19 LM +25 4 0 19 LM +26 4 0 19 LM +27 2 0 19 LM +28 6 1 19 LM +29 4 0 19 LM +30 7 1 19 LM +31 9 1 19 LM +32 7 1 19 LM +33 10 1 19 LM +34 7 1 19 LM +35 8 1 19 LM +36 8 1 19 LM +37 11 1 19 LM +38 7 1 19 LM +39 6 1 19 LM +40 6 1 19 LM +41 12 1 19 LM +42 9 1 19 LM +43 5 1 19 LM +44 8 1 19 LM +45 6 1 19 LM +46 7 1 19 LM +47 8 1 19 LM +48 7 1 19 LM +49 8 1 19 LM +50 6 1 19 LM +51 8 1 19 LM +52 7 1 19 LM +53 9 1 19 LM +54 9 1 19 LM +55 8 1 19 LM +56 10 1 19 LM +57 6 1 19 LM +58 10 1 19 LM +59 10 1 19 LM +60 8 1 19 LM +1 3 0 20 LM +2 3 0 20 LM +3 5 0 20 LM +4 4 0 20 LM +5 2 0 20 LM +6 4 0 20 LM +7 3 0 20 LM +8 4 0 20 LM +9 3 0 20 LM +10 4 0 20 LM +11 5 1 20 LM +12 3 0 20 LM +13 5 1 20 LM +14 3 0 20 LM +15 1 0 20 LM +16 2 0 20 LM +17 3 0 20 LM +18 6 1 20 LM +19 2 0 20 LM +20 2 0 20 LM +21 4 0 20 LM +22 3 0 20 LM +23 5 1 20 LM +24 2 0 20 LM +25 4 0 20 LM +26 4 0 20 LM +27 2 0 20 LM +28 6 1 20 LM +29 4 0 20 LM +30 7 1 20 LM +31 9 1 20 LM +32 7 1 20 LM +33 10 1 20 LM +34 7 1 20 LM +35 8 1 20 LM +36 8 1 20 LM +37 11 1 20 LM +38 7 1 20 LM +39 6 1 20 LM +40 6 1 20 LM +41 12 1 20 LM +42 9 1 20 LM +43 5 1 20 LM +44 8 1 20 LM +45 6 1 20 LM +46 7 1 20 LM +47 8 1 20 LM +48 7 1 20 LM +49 8 1 20 LM +50 6 1 20 LM +51 8 1 20 LM +52 7 1 20 LM +53 9 1 20 LM +54 9 1 20 LM +55 8 1 20 LM +56 10 1 20 LM +57 6 1 20 LM +58 10 1 20 LM +59 10 1 20 LM +60 8 1 20 LM +1 3 0 21 LM +2 3 0 21 LM +3 5 1 21 LM +4 4 0 21 LM +5 2 0 21 LM +6 4 0 21 LM +7 3 0 21 LM +8 4 0 21 LM +9 3 0 21 LM +10 4 0 21 LM +11 5 1 21 LM +12 3 0 21 LM +13 5 1 21 LM +14 3 0 21 LM +15 1 0 21 LM +16 2 0 21 LM +17 3 0 21 LM +18 6 1 21 LM +19 2 0 21 LM +20 2 0 21 LM +21 4 0 21 LM +22 3 0 21 LM +23 5 0 21 LM +24 2 0 21 LM +25 4 0 21 LM +26 4 0 21 LM +27 2 0 21 LM +28 6 1 21 LM +29 4 0 21 LM +30 7 1 21 LM +31 9 1 21 LM +32 7 1 21 LM +33 10 1 21 LM +34 7 1 21 LM +35 8 1 21 LM +36 8 1 21 LM +37 11 1 21 LM +38 7 1 21 LM +39 6 1 21 LM +40 6 1 21 LM +41 12 1 21 LM +42 9 1 21 LM +43 5 1 21 LM +44 8 1 21 LM +45 6 1 21 LM +46 7 1 21 LM +47 8 1 21 LM +48 7 1 21 LM +49 8 1 21 LM +50 6 1 21 LM +51 8 1 21 LM +52 7 1 21 LM +53 9 1 21 LM +54 9 1 21 LM +55 8 1 21 LM +56 10 1 21 LM +57 6 1 21 LM +58 10 1 21 LM +59 10 1 21 LM +60 8 1 21 LM +1 3 0 22 LM +2 3 0 22 LM +3 5 1 22 LM +4 4 0 22 LM +5 2 0 22 LM +6 4 0 22 LM +7 3 0 22 LM +8 4 0 22 LM +9 3 0 22 LM +10 4 0 22 LM +11 5 1 22 LM +12 3 0 22 LM +13 5 1 22 LM +14 3 0 22 LM +15 1 0 22 LM +16 2 0 22 LM +17 3 0 22 LM +18 6 1 22 LM +19 2 0 22 LM +20 2 0 22 LM +21 4 0 22 LM +22 3 0 22 LM +23 5 1 22 LM +24 2 0 22 LM +25 4 0 22 LM +26 4 0 22 LM +27 2 0 22 LM +28 6 1 22 LM +29 4 0 22 LM +30 7 1 22 LM +31 9 1 22 LM +32 7 1 22 LM +33 10 1 22 LM +34 7 1 22 LM +35 8 1 22 LM +36 8 1 22 LM +37 11 1 22 LM +38 7 1 22 LM +39 6 1 22 LM +40 6 1 22 LM +41 12 1 22 LM +42 9 1 22 LM +43 5 0 22 LM +44 8 1 22 LM +45 6 1 22 LM +46 7 1 22 LM +47 8 1 22 LM +48 7 1 22 LM +49 8 1 22 LM +50 6 1 22 LM +51 8 1 22 LM +52 7 1 22 LM +53 9 1 22 LM +54 9 1 22 LM +55 8 1 22 LM +56 10 1 22 LM +57 6 1 22 LM +58 10 1 22 LM +59 10 1 22 LM +60 8 1 22 LM +1 3 0 23 LM +2 3 0 23 LM +3 5 1 23 LM +4 4 0 23 LM +5 2 0 23 LM +6 4 0 23 LM +7 3 0 23 LM +8 4 0 23 LM +9 3 0 23 LM +10 4 0 23 LM +11 5 1 23 LM +12 3 0 23 LM +13 5 1 23 LM +14 3 0 23 LM +15 1 0 23 LM +16 2 0 23 LM +17 3 0 23 LM +18 6 1 23 LM +19 2 0 23 LM +20 2 0 23 LM +21 4 0 23 LM +22 3 0 23 LM +23 5 1 23 LM +24 2 0 23 LM +25 4 0 23 LM +26 4 0 23 LM +27 2 0 23 LM +28 6 1 23 LM +29 4 0 23 LM +30 7 1 23 LM +31 9 1 23 LM +32 7 1 23 LM +33 10 1 23 LM +34 7 1 23 LM +35 8 1 23 LM +36 8 1 23 LM +37 11 1 23 LM +38 7 1 23 LM +39 6 1 23 LM +40 6 1 23 LM +41 12 1 23 LM +42 9 1 23 LM +43 5 1 23 LM +44 8 1 23 LM +45 6 1 23 LM +46 7 1 23 LM +47 8 1 23 LM +48 7 1 23 LM +49 8 1 23 LM +50 6 1 23 LM +51 8 1 23 LM +52 7 1 23 LM +53 9 1 23 LM +54 9 1 23 LM +55 8 1 23 LM +56 10 1 23 LM +57 6 1 23 LM +58 10 1 23 LM +59 10 1 23 LM +60 8 1 23 LM +1 3 0 24 LM +2 3 0 24 LM +3 5 1 24 LM +4 4 0 24 LM +5 2 0 24 LM +6 4 0 24 LM +7 3 0 24 LM +8 4 0 24 LM +9 3 0 24 LM +10 4 0 24 LM +11 5 0 24 LM +12 3 0 24 LM +13 5 1 24 LM +14 3 0 24 LM +15 1 0 24 LM +16 2 0 24 LM +17 3 0 24 LM +18 6 1 24 LM +19 2 0 24 LM +20 2 0 24 LM +21 4 0 24 LM +22 3 0 24 LM +23 5 0 24 LM +24 2 0 24 LM +25 4 0 24 LM +26 4 0 24 LM +27 2 0 24 LM +28 6 1 24 LM +29 4 0 24 LM +30 7 1 24 LM +31 9 1 24 LM +32 7 1 24 LM +33 10 1 24 LM +34 7 1 24 LM +35 8 1 24 LM +36 8 1 24 LM +37 11 1 24 LM +38 7 1 24 LM +39 6 1 24 LM +40 6 1 24 LM +41 12 1 24 LM +42 9 1 24 LM +43 5 1 24 LM +44 8 1 24 LM +45 6 1 24 LM +46 7 1 24 LM +47 8 1 24 LM +48 7 1 24 LM +49 8 1 24 LM +50 6 1 24 LM +51 8 1 24 LM +52 7 1 24 LM +53 9 1 24 LM +54 9 1 24 LM +55 8 1 24 LM +56 10 1 24 LM +57 6 1 24 LM +58 10 1 24 LM +59 10 1 24 LM +60 8 1 24 LM +1 3 0 25 LM +2 3 0 25 LM +3 5 1 25 LM +4 4 0 25 LM +5 2 0 25 LM +6 4 0 25 LM +7 3 0 25 LM +8 4 0 25 LM +9 3 0 25 LM +10 4 0 25 LM +11 5 0 25 LM +12 3 0 25 LM +13 5 0 25 LM +14 3 0 25 LM +15 1 0 25 LM +16 2 0 25 LM +17 3 0 25 LM +18 6 1 25 LM +19 2 0 25 LM +20 2 0 25 LM +21 4 0 25 LM +22 3 0 25 LM +23 5 1 25 LM +24 2 0 25 LM +25 4 0 25 LM +26 4 0 25 LM +27 2 0 25 LM +28 6 1 25 LM +29 4 0 25 LM +30 7 1 25 LM +31 9 1 25 LM +32 7 1 25 LM +33 10 1 25 LM +34 7 1 25 LM +35 8 1 25 LM +36 8 1 25 LM +37 11 1 25 LM +38 7 1 25 LM +39 6 1 25 LM +40 6 1 25 LM +41 12 1 25 LM +42 9 1 25 LM +43 5 1 25 LM +44 8 1 25 LM +45 6 1 25 LM +46 7 1 25 LM +47 8 1 25 LM +48 7 1 25 LM +49 8 1 25 LM +50 6 1 25 LM +51 8 1 25 LM +52 7 1 25 LM +53 9 1 25 LM +54 9 1 25 LM +55 8 1 25 LM +56 10 1 25 LM +57 6 1 25 LM +58 10 1 25 LM +59 10 1 25 LM +60 8 1 25 LM +1 3 0 26 LM +2 3 0 26 LM +3 5 0 26 LM +4 4 0 26 LM +5 2 0 26 LM +6 4 0 26 LM +7 3 0 26 LM +8 4 0 26 LM +9 3 0 26 LM +10 4 0 26 LM +11 5 1 26 LM +12 3 0 26 LM +13 5 1 26 LM +14 3 0 26 LM +15 1 0 26 LM +16 2 0 26 LM +17 3 0 26 LM +18 6 1 26 LM +19 2 0 26 LM +20 2 0 26 LM +21 4 0 26 LM +22 3 0 26 LM +23 5 1 26 LM +24 2 0 26 LM +25 4 0 26 LM +26 4 0 26 LM +27 2 0 26 LM +28 6 1 26 LM +29 4 0 26 LM +30 7 1 26 LM +31 9 1 26 LM +32 7 1 26 LM +33 10 1 26 LM +34 7 1 26 LM +35 8 1 26 LM +36 8 1 26 LM +37 11 1 26 LM +38 7 1 26 LM +39 6 1 26 LM +40 6 1 26 LM +41 12 1 26 LM +42 9 1 26 LM +43 5 1 26 LM +44 8 1 26 LM +45 6 1 26 LM +46 7 1 26 LM +47 8 1 26 LM +48 7 1 26 LM +49 8 1 26 LM +50 6 1 26 LM +51 8 1 26 LM +52 7 1 26 LM +53 9 1 26 LM +54 9 1 26 LM +55 8 1 26 LM +56 10 1 26 LM +57 6 1 26 LM +58 10 1 26 LM +59 10 1 26 LM +60 8 1 26 LM +1 3 0 27 LM +2 3 0 27 LM +3 5 1 27 LM +4 4 0 27 LM +5 2 0 27 LM +6 4 0 27 LM +7 3 0 27 LM +8 4 0 27 LM +9 3 0 27 LM +10 4 0 27 LM +11 5 1 27 LM +12 3 0 27 LM +13 5 1 27 LM +14 3 0 27 LM +15 1 0 27 LM +16 2 0 27 LM +17 3 0 27 LM +18 6 1 27 LM +19 2 0 27 LM +20 2 0 27 LM +21 4 0 27 LM +22 3 0 27 LM +23 5 1 27 LM +24 2 0 27 LM +25 4 0 27 LM +26 4 0 27 LM +27 2 0 27 LM +28 6 1 27 LM +29 4 0 27 LM +30 7 1 27 LM +31 9 1 27 LM +32 7 1 27 LM +33 10 1 27 LM +34 7 1 27 LM +35 8 1 27 LM +36 8 1 27 LM +37 11 1 27 LM +38 7 1 27 LM +39 6 1 27 LM +40 6 1 27 LM +41 12 1 27 LM +42 9 1 27 LM +43 5 1 27 LM +44 8 1 27 LM +45 6 1 27 LM +46 7 1 27 LM +47 8 1 27 LM +48 7 1 27 LM +49 8 1 27 LM +50 6 1 27 LM +51 8 1 27 LM +52 7 1 27 LM +53 9 1 27 LM +54 9 1 27 LM +55 8 1 27 LM +56 10 1 27 LM +57 6 1 27 LM +58 10 1 27 LM +59 10 1 27 LM +60 8 1 27 LM +1 3 0 28 LM +2 3 0 28 LM +3 5 0 28 LM +4 4 0 28 LM +5 2 0 28 LM +6 4 0 28 LM +7 3 0 28 LM +8 4 0 28 LM +9 3 0 28 LM +10 4 0 28 LM +11 5 0 28 LM +12 3 0 28 LM +13 5 1 28 LM +14 3 0 28 LM +15 1 0 28 LM +16 2 0 28 LM +17 3 0 28 LM +18 6 1 28 LM +19 2 0 28 LM +20 2 0 28 LM +21 4 0 28 LM +22 3 0 28 LM +23 5 1 28 LM +24 2 0 28 LM +25 4 0 28 LM +26 4 0 28 LM +27 2 0 28 LM +28 6 1 28 LM +29 4 0 28 LM +30 7 1 28 LM +31 9 1 28 LM +32 7 1 28 LM +33 10 1 28 LM +34 7 1 28 LM +35 8 1 28 LM +36 8 1 28 LM +37 11 1 28 LM +38 7 1 28 LM +39 6 1 28 LM +40 6 1 28 LM +41 12 1 28 LM +42 9 1 28 LM +43 5 0 28 LM +44 8 1 28 LM +45 6 1 28 LM +46 7 1 28 LM +47 8 1 28 LM +48 7 1 28 LM +49 8 1 28 LM +50 6 1 28 LM +51 8 1 28 LM +52 7 1 28 LM +53 9 1 28 LM +54 9 1 28 LM +55 8 1 28 LM +56 10 1 28 LM +57 6 1 28 LM +58 10 1 28 LM +59 10 1 28 LM +60 8 1 28 LM +1 3 0 29 LM +2 3 0 29 LM +3 5 1 29 LM +4 4 0 29 LM +5 2 0 29 LM +6 4 0 29 LM +7 3 0 29 LM +8 4 0 29 LM +9 3 0 29 LM +10 4 0 29 LM +11 5 1 29 LM +12 3 0 29 LM +13 5 1 29 LM +14 3 0 29 LM +15 1 0 29 LM +16 2 0 29 LM +17 3 0 29 LM +18 6 1 29 LM +19 2 0 29 LM +20 2 0 29 LM +21 4 0 29 LM +22 3 0 29 LM +23 5 0 29 LM +24 2 0 29 LM +25 4 0 29 LM +26 4 0 29 LM +27 2 0 29 LM +28 6 1 29 LM +29 4 0 29 LM +30 7 1 29 LM +31 9 1 29 LM +32 7 1 29 LM +33 10 1 29 LM +34 7 1 29 LM +35 8 1 29 LM +36 8 1 29 LM +37 11 1 29 LM +38 7 1 29 LM +39 6 1 29 LM +40 6 1 29 LM +41 12 1 29 LM +42 9 1 29 LM +43 5 0 29 LM +44 8 1 29 LM +45 6 1 29 LM +46 7 1 29 LM +47 8 1 29 LM +48 7 1 29 LM +49 8 1 29 LM +50 6 1 29 LM +51 8 1 29 LM +52 7 1 29 LM +53 9 1 29 LM +54 9 1 29 LM +55 8 1 29 LM +56 10 1 29 LM +57 6 1 29 LM +58 10 1 29 LM +59 10 1 29 LM +60 8 1 29 LM +1 3 0 30 LM +2 3 0 30 LM +3 5 1 30 LM +4 4 0 30 LM +5 2 0 30 LM +6 4 0 30 LM +7 3 0 30 LM +8 4 0 30 LM +9 3 0 30 LM +10 4 0 30 LM +11 5 1 30 LM +12 3 0 30 LM +13 5 1 30 LM +14 3 0 30 LM +15 1 0 30 LM +16 2 0 30 LM +17 3 0 30 LM +18 6 1 30 LM +19 2 0 30 LM +20 2 0 30 LM +21 4 0 30 LM +22 3 0 30 LM +23 5 1 30 LM +24 2 0 30 LM +25 4 0 30 LM +26 4 0 30 LM +27 2 0 30 LM +28 6 1 30 LM +29 4 0 30 LM +30 7 1 30 LM +31 9 1 30 LM +32 7 1 30 LM +33 10 1 30 LM +34 7 1 30 LM +35 8 1 30 LM +36 8 1 30 LM +37 11 1 30 LM +38 7 1 30 LM +39 6 0 30 LM +40 6 1 30 LM +41 12 1 30 LM +42 9 1 30 LM +43 5 1 30 LM +44 8 1 30 LM +45 6 1 30 LM +46 7 1 30 LM +47 8 1 30 LM +48 7 1 30 LM +49 8 1 30 LM +50 6 1 30 LM +51 8 1 30 LM +52 7 1 30 LM +53 9 1 30 LM +54 9 1 30 LM +55 8 1 30 LM +56 10 1 30 LM +57 6 1 30 LM +58 10 1 30 LM +59 10 1 30 LM +60 8 1 30 LM diff --git a/Python/hbayesdm/common/extdata/wcs_answersheet.txt b/Python/hbayesdm/common/extdata/wcs_answersheet.txt new file mode 100644 index 00000000..207ac3a1 --- /dev/null +++ b/Python/hbayesdm/common/extdata/wcs_answersheet.txt @@ -0,0 +1,4 @@ + 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 +Color 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 +Form 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 +Number 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 diff --git a/Python/hbayesdm/common/extdata/wcs_exampleData.txt b/Python/hbayesdm/common/extdata/wcs_exampleData.txt new file mode 100644 index 00000000..80cf86db --- /dev/null +++ b/Python/hbayesdm/common/extdata/wcs_exampleData.txt @@ -0,0 +1,1158 @@ +choice outcome subjID trial +1 0 1 1 +1 1 1 2 +4 1 1 3 +1 1 1 4 +2 1 1 5 +3 1 1 6 +4 1 1 7 +1 1 1 8 +2 1 1 9 +3 1 1 10 +4 1 1 11 +1 1 1 12 +4 0 1 13 +3 0 1 14 +1 1 1 15 +4 1 1 16 +2 1 1 17 +4 1 1 18 +1 1 1 19 +4 1 1 20 +2 1 1 21 +3 1 1 22 +2 1 1 23 +1 1 1 24 +2 0 1 25 +2 1 1 26 +3 1 1 27 +4 1 1 28 +2 1 1 29 +1 1 1 30 +4 1 1 31 +1 1 1 32 +3 1 1 33 +4 1 1 34 +1 1 1 35 +4 0 1 36 +2 0 1 37 +3 1 1 38 +1 0 1 39 +4 1 1 40 +1 1 1 41 +3 0 1 42 +4 0 1 43 +2 1 1 44 +1 1 1 45 +3 1 1 46 +4 1 1 47 +3 1 1 48 +2 1 1 49 +3 1 1 50 +4 1 1 51 +2 1 1 52 +1 1 1 53 +2 0 1 54 +4 0 1 55 +2 0 1 56 +1 0 1 57 +3 0 1 58 +3 1 1 59 +2 0 1 60 +4 0 1 61 +3 1 1 62 +1 1 1 63 +3 1 1 64 +1 1 1 65 +3 1 1 66 +1 1 1 67 +4 1 1 68 +2 1 1 69 +3 1 1 70 +1 1 1 71 +3 1 1 72 +3 0 1 73 +2 1 1 74 +1 1 1 75 +3 1 1 76 +2 1 1 77 +1 1 1 78 +3 1 1 79 +4 1 1 80 +2 1 1 81 +3 1 1 82 +4 1 1 83 +1 0 2 1 +1 1 2 2 +1 0 2 3 +1 1 2 4 +2 1 2 5 +3 1 2 6 +4 1 2 7 +1 1 2 8 +2 1 2 9 +3 1 2 10 +4 1 2 11 +1 1 2 12 +3 0 2 13 +3 1 2 14 +2 1 2 15 +4 1 2 16 +1 1 2 17 +3 1 2 18 +1 1 2 19 +3 1 2 20 +4 1 2 21 +2 1 2 22 +3 1 2 23 +4 0 2 24 +1 0 2 25 +3 0 2 26 +4 0 2 27 +1 0 2 28 +2 1 2 29 +3 1 2 30 +4 1 2 31 +1 0 2 32 +3 0 2 33 +3 1 2 34 +2 1 2 35 +4 0 2 36 +4 1 2 37 +3 0 2 38 +4 1 2 39 +3 1 2 40 +1 1 2 41 +4 0 2 42 +3 0 2 43 +1 1 2 44 +2 1 2 45 +3 1 2 46 +4 1 2 47 +1 1 2 48 +2 0 2 49 +2 1 2 50 +1 1 2 51 +2 1 2 52 +4 1 2 53 +2 0 2 54 +3 1 2 55 +4 1 2 56 +3 1 2 57 +1 1 2 58 +3 1 2 59 +4 1 2 60 +2 1 2 61 +3 1 2 62 +1 1 2 63 +3 1 2 64 +2 0 2 65 +3 0 2 66 +1 0 2 67 +4 0 2 68 +2 0 2 69 +3 0 2 70 +4 1 2 71 +3 1 2 72 +3 0 2 73 +4 0 2 74 +2 0 2 75 +3 1 2 76 +4 0 2 77 +1 1 2 78 +1 0 2 79 +4 1 2 80 +2 1 2 81 +3 1 2 82 +4 1 2 83 +1 1 2 84 +4 0 2 85 +3 1 2 86 +2 1 2 87 +4 0 2 88 +1 0 2 89 +3 0 2 90 +4 0 2 91 +1 0 2 92 +2 1 2 93 +1 1 2 94 +4 1 2 95 +1 1 2 96 +4 0 2 97 +3 0 2 98 +2 0 2 99 +3 0 2 100 +4 0 2 101 +3 1 2 102 +2 0 2 103 +3 0 2 104 +1 1 2 105 +4 0 2 106 +3 0 2 107 +1 0 2 108 +1 0 2 109 +3 0 2 110 +4 0 2 111 +3 1 2 112 +3 0 2 113 +2 0 2 114 +1 1 2 115 +2 0 2 116 +1 0 2 117 +1 0 2 118 +3 0 2 119 +4 0 2 120 +3 0 2 121 +3 0 2 122 +3 1 2 123 +4 0 2 124 +2 0 2 125 +3 1 2 126 +1 0 2 127 +3 0 2 128 +1 0 3 1 +1 1 3 2 +4 1 3 3 +1 1 3 4 +2 1 3 5 +3 1 3 6 +4 1 3 7 +1 1 3 8 +2 1 3 9 +3 1 3 10 +4 1 3 11 +1 1 3 12 +4 0 3 13 +3 0 3 14 +2 0 3 15 +4 1 3 16 +1 0 3 17 +3 0 3 18 +1 1 3 19 +3 0 3 20 +4 0 3 21 +3 1 3 22 +2 1 3 23 +1 1 3 24 +4 0 3 25 +1 1 3 26 +3 1 3 27 +4 1 3 28 +2 1 3 29 +3 1 3 30 +4 1 3 31 +2 1 3 32 +4 1 3 33 +3 1 3 34 +1 0 3 35 +4 0 3 36 +2 0 3 37 +3 0 3 38 +1 0 3 39 +4 0 3 40 +1 1 3 41 +3 0 3 42 +4 1 3 43 +2 0 3 44 +3 0 3 45 +3 1 3 46 +4 1 3 47 +3 0 3 48 +2 0 3 49 +3 0 3 50 +4 0 3 51 +2 1 3 52 +1 0 3 53 +2 0 3 54 +4 0 3 55 +2 0 3 56 +1 0 3 57 +1 1 3 58 +3 1 3 59 +4 1 3 60 +2 1 3 61 +3 1 3 62 +1 1 3 63 +3 1 3 64 +1 1 3 65 +3 1 3 66 +1 1 3 67 +1 1 3 68 +2 0 3 69 +1 1 3 70 +4 1 3 71 +1 0 3 72 +4 1 3 73 +2 1 3 74 +1 1 3 75 +3 1 3 76 +2 1 3 77 +1 1 3 78 +3 1 3 79 +4 1 3 80 +2 1 3 81 +3 1 3 82 +4 0 3 83 +1 0 3 84 +4 1 3 85 +2 1 3 86 +3 1 3 87 +4 1 3 88 +1 1 3 89 +3 1 3 90 +4 1 3 91 +1 1 3 92 +2 1 3 93 +1 1 3 94 +2 0 3 95 +1 0 3 96 +4 1 3 97 +3 1 3 98 +2 1 3 99 +1 1 3 100 +4 1 3 101 +2 1 3 102 +4 1 3 103 +3 1 3 104 +1 1 3 105 +2 1 3 106 +3 0 3 107 +2 1 3 108 +2 0 3 109 +2 1 3 110 +1 1 3 111 +3 1 3 112 +2 1 3 113 +4 1 3 114 +1 1 3 115 +3 1 3 116 +2 1 3 117 +4 1 3 118 +1 1 3 119 +1 0 4 1 +1 1 4 2 +4 1 4 3 +1 1 4 4 +2 1 4 5 +3 1 4 6 +4 1 4 7 +1 1 4 8 +2 1 4 9 +3 1 4 10 +4 1 4 11 +1 1 4 12 +3 1 4 13 +2 1 4 14 +1 1 4 15 +4 1 4 16 +2 1 4 17 +4 1 4 18 +1 1 4 19 +4 1 4 20 +2 1 4 21 +3 1 4 22 +2 1 4 23 +1 0 4 24 +4 1 4 25 +3 0 4 26 +3 1 4 27 +4 1 4 28 +2 1 4 29 +1 1 4 30 +4 1 4 31 +1 1 4 32 +3 1 4 33 +4 1 4 34 +1 1 4 35 +4 1 4 36 +2 0 4 37 +3 1 4 38 +1 0 4 39 +4 1 4 40 +1 1 4 41 +3 0 4 42 +4 0 4 43 +2 1 4 44 +3 0 4 45 +2 0 4 46 +4 1 4 47 +3 1 4 48 +2 1 4 49 +3 1 4 50 +4 1 4 51 +2 1 4 52 +1 1 4 53 +2 1 4 54 +4 1 4 55 +2 1 4 56 +1 0 4 57 +1 1 4 58 +3 1 4 59 +2 0 4 60 +2 1 4 61 +3 1 4 62 +1 1 4 63 +3 1 4 64 +1 1 4 65 +3 1 4 66 +1 1 4 67 +4 1 4 68 +2 1 4 69 +1 0 4 70 +4 0 4 71 +3 0 4 72 +4 0 4 73 +2 0 4 74 +1 0 4 75 +3 0 4 76 +2 0 4 77 +1 0 4 78 +3 0 4 79 +4 1 4 80 +2 1 4 81 +3 0 4 82 +1 1 4 83 +3 0 4 84 +4 0 4 85 +2 0 4 86 +3 0 4 87 +4 0 4 88 +1 0 4 89 +3 0 4 90 +4 0 4 91 +1 0 4 92 +2 1 4 93 +1 0 4 94 +2 0 4 95 +2 1 4 96 +4 1 4 97 +3 1 4 98 +2 1 4 99 +1 1 4 100 +4 1 4 101 +2 1 4 102 +4 1 4 103 +3 1 4 104 +1 1 4 105 +2 0 4 106 +3 0 4 107 +1 0 4 108 +3 1 4 109 +2 1 4 110 +1 1 4 111 +3 1 4 112 +2 1 4 113 +4 1 4 114 +1 1 4 115 +3 1 4 116 +2 1 4 117 +1 0 4 118 +4 0 4 119 +2 0 4 120 +1 0 4 121 +3 0 4 122 +1 0 4 123 +2 1 4 124 +4 1 4 125 +3 1 4 126 +1 0 4 127 +2 0 4 128 +1 0 5 1 +1 1 5 2 +4 1 5 3 +1 1 5 4 +2 1 5 5 +3 1 5 6 +4 1 5 7 +1 1 5 8 +2 1 5 9 +3 1 5 10 +4 1 5 11 +1 1 5 12 +3 1 5 13 +2 1 5 14 +1 1 5 15 +4 1 5 16 +2 1 5 17 +4 1 5 18 +1 1 5 19 +4 1 5 20 +2 1 5 21 +3 1 5 22 +2 1 5 23 +1 0 5 24 +2 0 5 25 +2 1 5 26 +4 0 5 27 +1 0 5 28 +2 1 5 29 +1 1 5 30 +4 1 5 31 +1 1 5 32 +3 1 5 33 +4 1 5 34 +1 1 5 35 +4 1 5 36 +2 1 5 37 +3 1 5 38 +1 0 5 39 +4 1 5 40 +1 1 5 41 +3 0 5 42 +4 0 5 43 +2 1 5 44 +3 0 5 45 +2 0 5 46 +4 1 5 47 +1 0 5 48 +2 1 5 49 +2 0 5 50 +4 1 5 51 +2 1 5 52 +1 1 5 53 +2 1 5 54 +4 1 5 55 +2 1 5 56 +1 1 5 57 +3 1 5 58 +1 1 5 59 +2 1 5 60 +4 0 5 61 +3 1 5 62 +1 1 5 63 +3 1 5 64 +1 1 5 65 +4 0 5 66 +1 1 5 67 +4 1 5 68 +2 1 5 69 +3 1 5 70 +1 1 5 71 +4 1 5 72 +3 1 5 73 +4 1 5 74 +2 1 5 75 +1 1 5 76 +3 0 5 77 +2 0 5 78 +1 0 5 79 +4 1 5 80 +2 1 5 81 +4 0 5 82 +1 0 5 83 +4 0 5 84 +2 1 5 85 +3 1 5 86 +2 1 5 87 +1 0 5 88 +2 0 5 89 +2 1 5 90 +3 1 5 91 +4 1 5 92 +2 1 5 93 +1 1 5 94 +4 1 5 95 +1 1 5 96 +3 1 5 97 +4 1 5 98 +1 1 5 99 +1 0 6 1 +4 0 6 2 +2 0 6 3 +1 1 6 4 +2 1 6 5 +3 1 6 6 +4 1 6 7 +1 1 6 8 +2 1 6 9 +3 1 6 10 +4 1 6 11 +1 1 6 12 +4 1 6 13 +3 0 6 14 +2 0 6 15 +4 1 6 16 +1 0 6 17 +3 0 6 18 +1 1 6 19 +4 1 6 20 +2 1 6 21 +3 1 6 22 +2 1 6 23 +1 1 6 24 +2 1 6 25 +1 1 6 26 +3 1 6 27 +4 1 6 28 +2 1 6 29 +3 0 6 30 +4 1 6 31 +2 0 6 32 +4 0 6 33 +3 0 6 34 +1 1 6 35 +4 1 6 36 +2 1 6 37 +3 1 6 38 +1 1 6 39 +4 1 6 40 +1 1 6 41 +3 1 6 42 +4 1 6 43 +2 1 6 44 +3 0 6 45 +2 0 6 46 +1 0 6 47 +3 1 6 48 +2 1 6 49 +4 0 6 50 +1 0 6 51 +2 1 6 52 +2 0 6 53 +2 1 6 54 +4 1 6 55 +2 1 6 56 +1 1 6 57 +3 1 6 58 +1 1 6 59 +2 1 6 60 +4 1 6 61 +3 1 6 62 +1 1 6 63 +2 0 6 64 +1 1 6 65 +3 1 6 66 +4 0 6 67 +4 1 6 68 +2 1 6 69 +3 1 6 70 +1 1 6 71 +4 1 6 72 +3 1 6 73 +4 1 6 74 +2 1 6 75 +1 1 6 76 +3 1 6 77 +2 0 6 78 +2 0 6 79 +4 1 6 80 +2 1 6 81 +3 1 6 82 +4 1 6 83 +1 1 6 84 +2 1 6 85 +3 1 6 86 +2 1 6 87 +3 1 6 88 +4 1 6 89 +2 1 7 1 +3 0 7 2 +4 1 7 3 +4 0 7 4 +2 1 7 5 +3 1 7 6 +4 1 7 7 +1 1 7 8 +2 1 7 9 +3 1 7 10 +4 1 7 11 +1 1 7 12 +4 1 7 13 +3 1 7 14 +2 0 7 15 +4 1 7 16 +1 0 7 17 +3 0 7 18 +1 1 7 19 +4 1 7 20 +2 1 7 21 +2 0 7 22 +2 1 7 23 +1 1 7 24 +2 1 7 25 +1 1 7 26 +3 1 7 27 +4 1 7 28 +2 1 7 29 +1 0 7 30 +4 1 7 31 +2 1 7 32 +4 1 7 33 +3 1 7 34 +1 0 7 35 +4 0 7 36 +2 0 7 37 +3 0 7 38 +1 0 7 39 +4 0 7 40 +1 1 7 41 +4 0 7 42 +4 1 7 43 +2 0 7 44 +3 0 7 45 +3 1 7 46 +4 1 7 47 +3 0 7 48 +3 1 7 49 +2 1 7 50 +1 1 7 51 +2 1 7 52 +4 1 7 53 +1 1 7 54 +4 0 7 55 +2 0 7 56 +1 0 7 57 +3 0 7 58 +1 0 7 59 +2 0 7 60 +4 0 7 61 +3 1 7 62 +1 1 7 63 +2 0 7 64 +2 0 7 65 +1 0 7 66 +4 0 7 67 +4 1 7 68 +2 1 7 69 +3 1 7 70 +1 1 7 71 +1 0 7 72 +2 0 7 73 +4 1 7 74 +2 1 7 75 +1 1 7 76 +3 1 7 77 +2 1 7 78 +1 1 7 79 +4 1 7 80 +2 1 7 81 +3 0 7 82 +1 1 7 83 +1 0 7 84 +2 1 7 85 +3 1 7 86 +2 1 7 87 +3 0 7 88 +4 0 7 89 +2 0 7 90 +3 1 7 91 +4 1 7 92 +2 1 7 93 +1 0 7 94 +4 1 7 95 +1 0 7 96 +3 0 7 97 +4 0 7 98 +1 0 7 99 +4 0 7 100 +2 0 7 101 +3 0 7 102 +4 1 7 103 +4 0 7 104 +1 1 7 105 +4 0 7 106 +3 0 7 107 +2 0 7 108 +1 0 7 109 +2 0 7 110 +4 1 7 111 +3 0 7 112 +2 0 7 113 +3 0 7 114 +1 1 7 115 +2 1 7 116 +1 0 7 117 +1 1 7 118 +3 1 7 119 +4 1 7 120 +3 1 7 121 +1 1 7 122 +3 1 7 123 +4 1 7 124 +2 1 7 125 +3 1 7 126 +2 0 7 127 +2 0 7 128 +1 0 8 1 +3 0 8 2 +4 1 8 3 +1 1 8 4 +2 1 8 5 +3 1 8 6 +4 1 8 7 +1 1 8 8 +2 1 8 9 +3 1 8 10 +4 1 8 11 +1 1 8 12 +3 1 8 13 +3 0 8 14 +1 1 8 15 +4 1 8 16 +1 0 8 17 +4 1 8 18 +1 1 8 19 +4 1 8 20 +2 1 8 21 +3 1 8 22 +2 1 8 23 +1 1 8 24 +1 0 8 25 +1 1 8 26 +4 0 8 27 +4 1 8 28 +2 1 8 29 +1 0 8 30 +4 1 8 31 +1 0 8 32 +3 0 8 33 +4 0 8 34 +1 0 8 35 +4 0 8 36 +2 0 8 37 +3 0 8 38 +1 0 8 39 +4 0 8 40 +1 1 8 41 +3 0 8 42 +4 1 8 43 +2 0 8 44 +1 0 8 45 +2 0 8 46 +4 1 8 47 +3 0 8 48 +2 0 8 49 +3 0 8 50 +4 0 8 51 +2 1 8 52 +1 0 8 53 +2 0 8 54 +4 0 8 55 +2 0 8 56 +3 1 8 57 +1 1 8 58 +3 1 8 59 +4 1 8 60 +2 1 8 61 +3 1 8 62 +1 1 8 63 +3 1 8 64 +1 1 8 65 +3 1 8 66 +1 0 8 67 +1 1 8 68 +4 1 8 69 +1 1 8 70 +4 1 8 71 +3 1 8 72 +4 1 8 73 +2 1 8 74 +1 1 8 75 +3 1 8 76 +2 1 8 77 +1 0 8 78 +3 0 8 79 +4 1 8 80 +2 0 8 81 +3 1 8 82 +4 0 8 83 +1 0 8 84 +4 1 8 85 +2 1 8 86 +3 1 8 87 +4 1 8 88 +1 1 8 89 +3 1 8 90 +4 1 8 91 +1 1 8 92 +2 1 8 93 +1 1 8 94 +2 0 8 95 +1 0 8 96 +4 1 8 97 +3 1 8 98 +2 1 8 99 +1 1 8 100 +4 1 8 101 +2 1 8 102 +4 1 8 103 +3 1 8 104 +1 1 8 105 +4 0 8 106 +4 1 8 107 +1 1 8 108 +2 1 8 109 +3 1 8 110 +4 1 8 111 +1 1 8 112 +3 1 8 113 +2 1 8 114 +1 1 8 115 +2 1 8 116 +4 0 8 117 +4 1 8 118 +1 1 8 119 +3 1 8 120 +2 1 8 121 +1 1 8 122 +3 1 8 123 +2 1 8 124 +4 1 8 125 +3 1 8 126 +2 1 8 127 +2 1 9 1 +1 1 9 2 +4 1 9 3 +1 1 9 4 +2 1 9 5 +3 1 9 6 +4 1 9 7 +1 1 9 8 +2 1 9 9 +3 1 9 10 +4 0 9 11 +1 1 9 12 +4 0 9 13 +3 0 9 14 +1 1 9 15 +4 1 9 16 +1 0 9 17 +4 1 9 18 +1 1 9 19 +4 1 9 20 +2 1 9 21 +3 1 9 22 +2 1 9 23 +1 1 9 24 +2 1 9 25 +1 1 9 26 +3 1 9 27 +4 1 9 28 +2 1 9 29 +3 0 9 30 +4 1 9 31 +1 1 9 32 +3 1 9 33 +4 1 9 34 +1 1 9 35 +4 1 9 36 +4 0 9 37 +3 1 9 38 +1 1 9 39 +4 1 9 40 +1 1 9 41 +3 1 9 42 +4 1 9 43 +2 1 9 44 +1 0 9 45 +2 1 9 46 +4 0 9 47 +3 1 9 48 +2 1 9 49 +3 0 9 50 +4 0 9 51 +2 0 9 52 +1 0 9 53 +1 0 9 54 +4 0 9 55 +2 0 9 56 +1 0 9 57 +3 0 9 58 +1 0 9 59 +2 1 9 60 +4 1 9 61 +3 1 9 62 +1 0 9 63 +2 0 9 64 +2 0 9 65 +1 0 9 66 +4 0 9 67 +4 0 9 68 +4 1 9 69 +1 1 9 70 +1 0 9 71 +4 0 9 72 +3 0 9 73 +4 0 9 74 +2 0 9 75 +1 0 9 76 +3 0 9 77 +2 0 9 78 +1 0 9 79 +4 1 9 80 +2 1 9 81 +4 0 9 82 +1 0 9 83 +4 0 9 84 +2 1 9 85 +3 1 9 86 +2 1 9 87 +1 0 9 88 +4 1 9 89 +2 1 9 90 +3 1 9 91 +4 1 9 92 +2 1 9 93 +1 1 9 94 +4 1 9 95 +1 1 9 96 +3 1 9 97 +4 1 9 98 +1 0 9 99 +4 0 9 100 +2 0 9 101 +3 1 9 102 +4 0 9 103 +4 1 9 104 +1 1 9 105 +4 1 9 106 +3 1 9 107 +2 1 9 108 +1 1 9 109 +3 1 9 110 +4 1 9 111 +3 1 9 112 +2 1 9 113 +3 0 9 114 +1 1 9 115 +3 0 9 116 +2 0 9 117 +1 1 9 118 +4 0 9 119 +4 1 9 120 +3 1 9 121 +1 1 9 122 +3 1 9 123 +4 1 9 124 +2 1 9 125 +3 1 9 126 +1 1 9 127 +1 0 9 128 +1 0 10 1 +1 1 10 2 +2 0 10 3 +1 1 10 4 +4 0 10 5 +3 1 10 6 +4 1 10 7 +1 1 10 8 +2 1 10 9 +3 1 10 10 +2 0 10 11 +1 1 10 12 +4 1 10 13 +3 1 10 14 +2 1 10 15 +4 1 10 16 +1 1 10 17 +3 1 10 18 +1 1 10 19 +4 0 10 20 +2 0 10 21 +2 1 10 22 +2 0 10 23 +1 0 10 24 +4 0 10 25 +1 0 10 26 +3 0 10 27 +4 0 10 28 +2 1 10 29 +3 0 10 30 +4 0 10 31 +2 0 10 32 +4 1 10 33 +3 1 10 34 +2 1 10 35 +1 0 10 36 +4 1 10 37 +2 0 10 38 +4 0 10 39 +3 0 10 40 +1 1 10 41 +2 0 10 42 +3 1 10 43 +1 0 10 44 +2 0 10 45 +3 1 10 46 +1 0 10 47 +3 1 10 48 +2 1 10 49 +2 0 10 50 +4 1 10 51 +3 0 10 52 +2 0 10 53 +1 0 10 54 +1 0 10 55 +4 0 10 56 +2 0 10 57 +1 0 10 58 +3 0 10 59 +2 1 10 60 +4 1 10 61 +3 1 10 62 +2 0 10 63 +1 0 10 64 +1 0 10 65 +4 0 10 66 +2 0 10 67 +1 1 10 68 +4 0 10 69 +3 1 10 70 +4 1 10 71 +1 1 10 72 +2 1 10 73 +3 1 10 74 +4 1 10 75 +1 1 10 76 +4 1 10 77 +3 1 10 78 +2 1 10 79 +4 1 10 80 +1 0 10 81 +3 0 10 82 +1 1 10 83 +3 0 10 84 +4 0 10 85 +3 1 10 86 +3 0 10 87 +4 0 10 88 +4 0 10 89 +3 0 10 90 +4 0 10 91 +1 0 10 92 +2 1 10 93 +1 0 10 94 +4 1 10 95 +1 0 10 96 +4 1 10 97 +3 1 10 98 +2 1 10 99 +3 0 10 100 +4 1 10 101 +3 0 10 102 +2 0 10 103 +4 0 10 104 +1 1 10 105 +3 0 10 106 +4 1 10 107 +2 0 10 108 +2 1 10 109 +3 1 10 110 +4 1 10 111 +1 1 10 112 +3 1 10 113 +2 1 10 114 +1 1 10 115 +2 1 10 116 +4 1 10 117 +1 1 10 118 +3 0 10 119 +4 0 10 120 +3 0 10 121 +1 1 10 122 +3 1 10 123 +4 0 10 124 +4 1 10 125 +3 1 10 126 +2 1 10 127 +2 0 10 128 diff --git a/Python/hbayesdm/common/stan_files/bandit2arm_delta.stan b/Python/hbayesdm/common/stan_files/bandit2arm_delta.stan new file mode 100644 index 00000000..3c44ddde --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit2arm_delta.stan @@ -0,0 +1,109 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; // no lower and upper bounds +} +transformed data { + vector[2] initV; // initial values for EV + initV = rep_vector(0.0, 2); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; // learning rate + vector[N] tau_pr; // inverse temperature +} +transformed parameters { + // subject-level parameters + vector[N] A; + vector[N] tau; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 5; + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1); + tau_pr ~ normal(0, 1); + + // subject loop and trial loop + for (i in 1:N) { + vector[2] ev; // expected value + real PE; // prediction error + + ev = initV; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + choice[i, t] ~ categorical_logit(tau[i] * ev); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + + // value updating (learning) + ev[choice[i, t]] += A[i] * PE; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_tau; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_tau = Phi_approx(mu_pr[2]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + vector[2] ev; // expected value + real PE; // prediction error + + // Initialize values + ev = initV; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute log likelihood of current trial + log_lik[i] += categorical_logit_lpmf(choice[i, t] | tau[i] * ev); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(tau[i] * ev)); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + + // value updating (learning) + ev[choice[i, t]] += A[i] * PE; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/bandit4arm2_kalman_filter.stan b/Python/hbayesdm/common/stan_files/bandit4arm2_kalman_filter.stan new file mode 100644 index 00000000..15d36c63 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit4arm2_kalman_filter.stan @@ -0,0 +1,163 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N,T]; + real outcome[N,T]; +} + +transformed data { + real sigmaO; // sigma_O = 4 + sigmaO = 4; +} + +parameters { + // group-level parameters + vector[6] mu_pr; + vector[6] sigma; + + // subject-level raw parameters, follows norm(0,1), for later Matt Trick + vector[N] lambda_pr; // decay factor + vector[N] theta_pr; // decay center + vector[N] beta_pr; // inverse softmax temperature + vector[N] mu0_pr; // anticipated initial mean of all 4 options + vector[N] sigma0_pr; // anticipated initial sd^2 (uncertainty factor) of all 4 options + vector[N] sigmaD_pr; // sd^2 of diffusion noise +} + +transformed parameters { + // subject-level parameters + vector[N] lambda; + vector[N] theta; + vector[N] beta; + vector[N] mu0; + vector[N] sigma0; + vector[N] sigmaD; + + // Matt Trick + for (i in 1:N) { + lambda[i] = Phi_approx( mu_pr[1] + sigma[1] * lambda_pr[i] ); + theta[i] = Phi_approx( mu_pr[2] + sigma[2] * theta_pr[i] ) * 100; + beta[i] = Phi_approx( mu_pr[3] + sigma[3] * beta_pr[i] ); + mu0[i] = Phi_approx( mu_pr[4] + sigma[4] * mu0_pr[i] ) * 100; + sigma0[i] = Phi_approx( mu_pr[5] + sigma[5] * sigma0_pr[i] ) * 15; + sigmaD[i] = Phi_approx( mu_pr[6] + sigma[6] * sigmaD_pr[i] ) * 15; + } +} + +model { + // prior: hyperparameters + mu_pr ~ normal(0,1); + sigma ~ cauchy(0,5); + + // prior: individual parameters + lambda_pr ~ normal(0,1);; + theta_pr ~ normal(0,1);; + beta_pr ~ normal(0,1);; + mu0_pr ~ normal(0,1);; + sigma0_pr ~ normal(0,1);; + sigmaD_pr ~ normal(0,1);; + + // subject loop and trial loop + for (i in 1:N) { + vector[4] mu_ev; // estimated mean for each option + vector[4] sd_ev_sq; // estimated sd^2 for each option + real pe; // prediction error + real k; // learning rate + + mu_ev = rep_vector(mu0[i] ,4); + sd_ev_sq = rep_vector(sigma0[i]^2, 4); + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + choice[i,t] ~ categorical_logit( beta[i] * mu_ev ); + + // learning rate + k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2 ); + + // prediction error + pe = outcome[i,t] - mu_ev[choice[i,t]]; + + // value updating (learning) + mu_ev[choice[i,t]] += k * pe; + sd_ev_sq[choice[i,t]] *= (1-k); + + // diffusion process + { + mu_ev *= lambda[i]; + mu_ev += (1 - lambda[i]) * theta[i]; + } + { + sd_ev_sq *= lambda[i]^2; + sd_ev_sq += sigmaD[i]^2; + } + } + } +} + +generated quantities { + real mu_lambda; + real mu_theta; + real mu_beta; + real mu_mu0; + real mu_sigma0; + real mu_sigmaD; + real log_lik[N]; + real y_pred[N,T]; + + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_lambda = Phi_approx(mu_pr[1]); + mu_theta = Phi_approx(mu_pr[2]) * 100; + mu_beta = Phi_approx(mu_pr[3]); + mu_mu0 = Phi_approx(mu_pr[4]) * 100; + mu_sigma0 = Phi_approx(mu_pr[5]) * 15; + mu_sigmaD = Phi_approx(mu_pr[6]) * 15; + + { // local block + for (i in 1:N) { + vector[4] mu_ev; // estimated mean for each option + vector[4] sd_ev_sq; // estimated sd^2 for each option + real pe; // prediction error + real k; // learning rate + + log_lik[i] = 0; + mu_ev = rep_vector(mu0[i] ,4); + sd_ev_sq = rep_vector(sigma0[i]^2, 4); + + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + log_lik[i] += categorical_logit_lpmf( choice[i,t] | beta[i] * mu_ev ); + y_pred[i, t] = categorical_rng(softmax(beta[i] * mu_ev)); + + // learning rate + k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2); + + // prediction error + pe = outcome[i,t] - mu_ev[choice[i,t]]; + + // value updating (learning) + mu_ev[choice[i,t]] += k * pe; + sd_ev_sq[choice[i,t]] *= (1-k); + + // diffusion process + { + mu_ev *= lambda[i]; + mu_ev += (1 - lambda[i]) * theta[i]; + } + { + sd_ev_sq *= lambda[i]^2; + sd_ev_sq += sigmaD[i]^2; + } + } + } + } // local block END +} + diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_2par_lapse.stan b/Python/hbayesdm/common/stan_files/bandit4arm_2par_lapse.stan new file mode 100644 index 00000000..b95da5ce --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit4arm_2par_lapse.stan @@ -0,0 +1,173 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) +// w/o reward sensitivity and punishment sensitivity +// in sum, there are three parameters - Arew, Apun, xi +// Aylward et al., 2018, PsyArXiv +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] xi_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] xi; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + xi[i] = Phi_approx(mu_pr[3] + sigma[3] * xi_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = rew[i, t] - Qr[choice[i, t]]; + PEp = los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_xi; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_xi = Phi_approx(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = rew[i, t] - Qr[choice[i, t]]; + PEp = los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_4par.stan b/Python/hbayesdm/common/stan_files/bandit4arm_4par.stan new file mode 100644 index 00000000..18d6acf9 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit4arm_4par.stan @@ -0,0 +1,176 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] R_pr; + vector[N] P_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] R; + vector[N] P; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(Qsum); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_R; + real mu_P; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_R = Phi_approx(mu_pr[3]) * 30; + mu_P = Phi_approx(mu_pr[4]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_logit_lpmf(choice[i, t] | Qsum); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum)); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_lapse.stan b/Python/hbayesdm/common/stan_files/bandit4arm_lapse.stan new file mode 100644 index 00000000..161ce311 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit4arm_lapse.stan @@ -0,0 +1,182 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[5] mu_pr; + vector[5] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] R_pr; + vector[N] P_pr; + vector[N] xi_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] R; + vector[N] P; + vector[N] xi; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; + xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_R; + real mu_P; + real mu_xi; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_R = Phi_approx(mu_pr[3]) * 30; + mu_P = Phi_approx(mu_pr[4]) * 30; + mu_xi = Phi_approx(mu_pr[5]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += Arew[i] * PEr_fic; + Qp += Apun[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_lapse_decay.stan b/Python/hbayesdm/common/stan_files/bandit4arm_lapse_decay.stan new file mode 100644 index 00000000..b089ee21 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit4arm_lapse_decay.stan @@ -0,0 +1,201 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Added decay rate (Niv et al., 2015, J. Neuro) +// Aylward et al., 2018, PsyArXiv +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[6] mu_pr; + vector[6] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] R_pr; + vector[N] P_pr; + vector[N] xi_pr; + vector[N] d_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] R; + vector[N] P; + vector[N] xi; + vector[N] d; + + for (i in 1:N) { + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; + xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); + d[i] = Phi_approx(mu_pr[6] + sigma[6] * d_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + d_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + vector[4] tmp; // temporary vector for Qr and Qp + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + //PEr_fic = -Qr; + //PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ decay rate + //Qr += Arew[i] * PEr_fic; + //Qp += Apun[i] * PEp_fic; + tmp = (1-d[i]) * Qr; + Qr = tmp; + tmp = (1-d[i]) * Qp; + Qp = tmp; + + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_R; + real mu_P; + real mu_xi; + real mu_d; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_R = Phi_approx(mu_pr[3]) * 30; + mu_P = Phi_approx(mu_pr[4]) * 30; + mu_xi = Phi_approx(mu_pr[5]); + mu_d = Phi_approx(mu_pr[6]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + vector[4] tmp; // temporary vector for Qr and Qp + + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + //PEr_fic = -Qr; + //PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ decay rate + //Qr += Arew[i] * PEr_fic; + //Qp += Apun[i] * PEp_fic; + tmp = (1-d[i]) * Qr; + Qr = tmp; + tmp = (1-d[i]) * Qp; + Qp = tmp; + + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_singleA_lapse.stan b/Python/hbayesdm/common/stan_files/bandit4arm_singleA_lapse.stan new file mode 100644 index 00000000..b383f389 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bandit4arm_singleA_lapse.stan @@ -0,0 +1,177 @@ +#include /pre/license.stan + +// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Single learning rate both for R and P. +// Aylward et al., 2018, PsyArXiv +data { + int N; + int T; + int Tsubj[N]; + real rew[N, T]; + real los[N, T]; + int choice[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] R_pr; + vector[N] P_pr; + vector[N] xi_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] R; + vector[N] P; + vector[N] xi; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + R[i] = Phi_approx(mu_pr[2] + sigma[2] * R_pr[i]) * 30; + P[i] = Phi_approx(mu_pr[3] + sigma[3] * P_pr[i]) * 30; + xi[i] = Phi_approx(mu_pr[4] + sigma[4] * xi_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1.0); + R_pr ~ normal(0, 1.0); + P_pr ~ normal(0, 1.0); + xi_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + xi (noise) + choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += A[i] * PEr_fic; + Qp += A[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_R; + real mu_P; + real mu_xi; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_R = Phi_approx(mu_pr[2]) * 30; + mu_P = Phi_approx(mu_pr[3]) * 30; + mu_xi = Phi_approx(mu_pr[4]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] Qr; + vector[4] Qp; + vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) + vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) + vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance + + real Qr_chosen; + real Qp_chosen; + real PEr; // prediction error - for reward of the chosen option + real PEp; // prediction error - for punishment of the chosen option + + // Initialize values + Qr = initV; + Qp = initV; + Qsum = initV; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // compute log likelihood of current trial + log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); + + // Prediction error signals + PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; + PEp = P[i] * los[i, t] - Qp[choice[i, t]]; + PEr_fic = -Qr; + PEp_fic = -Qp; + + // store chosen deck Q values (rew and pun) + Qr_chosen = Qr[choice[i, t]]; + Qp_chosen = Qp[choice[i, t]]; + + // First, update Qr & Qp for all decks w/ fictive updating + Qr += A[i] * PEr_fic; + Qp += A[i] * PEp_fic; + // Replace Q values of chosen deck with correct values using stored values + Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; + Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; + + // Q(sum) + Qsum = Qr + Qp; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/bart_par4.stan b/Python/hbayesdm/common/stan_files/bart_par4.stan new file mode 100644 index 00000000..f66ca8f0 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/bart_par4.stan @@ -0,0 +1,129 @@ +#include /pre/license.stan + +data { + int N; // Number of subjects + int T; // Maximum number of trials + int Tsubj[N]; // Number of trials for each subject + int P; // Number of max pump + 1 ** CAUTION ** + int pumps[N, T]; // Number of pump + int explosion[N, T]; // Whether the balloon exploded (0 or 1) +} + +transformed data{ + // Whether a subject pump the button or not (0 or 1) + int d[N, T, P]; + + for (j in 1:N) { + for (k in 1:Tsubj[j]) { + for (l in 1:P) { + if (l <= pumps[j, k]) + d[j, k, l] = 1; + else + d[j, k, l] = 0; + } + } + } +} + +parameters { + // Group-level parameters + vector[4] mu_pr; + vector[4] sigma; + + // Normally distributed error for Matt trick + vector[N] phi_pr; + vector[N] eta_pr; + vector[N] gam_pr; + vector[N] tau_pr; +} + +transformed parameters { + // Subject-level parameters with Matt trick + vector[N] phi; + vector[N] eta; + vector[N] gam; + vector[N] tau; + + phi = Phi_approx(mu_pr[1] + sigma[1] * phi_pr); + eta = exp(mu_pr[2] + sigma[2] * eta_pr); + gam = exp(mu_pr[3] + sigma[3] * gam_pr); + tau = exp(mu_pr[4] + sigma[4] * tau_pr); +} + +model { + // Prior + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + phi_pr ~ normal(0, 1); + eta_pr ~ normal(0, 1); + gam_pr ~ normal(0, 1); + tau_pr ~ normal(0, 1); + + // Likelihood + for (j in 1:N) { + // Initialize n_succ and n_pump for a subject + int n_succ = 0; // Number of successful pumps + int n_pump = 0; // Number of total pumps + + for (k in 1:Tsubj[j]) { + real p_burst; // Belief on a balloon to be burst + real omega; // Optimal number of pumps + + p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); + omega = -gam[j] / log1m(p_burst); + + // Calculate likelihood with bernoulli distribution + for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) + d[j, k, l] ~ bernoulli_logit(tau[j] * (omega - l)); + + // Update n_succ and n_pump after each trial ends + n_succ += pumps[j, k] - explosion[j, k]; + n_pump += pumps[j, k]; + } + } +} + +generated quantities { + // Actual group-level mean + real mu_phi = Phi_approx(mu_pr[1]); + real mu_eta = exp(mu_pr[2]); + real mu_gam = exp(mu_pr[3]); + real mu_tau = exp(mu_pr[4]); + + // Log-likelihood for model fit + real log_lik = 0; + + // For posterior predictive check + real y_pred[N, T, P]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (j in 1:N) + for (k in 1:T) + for(l in 1:P) + y_pred[j, k, l] = -1; + + { // Local section to save time and space + for (j in 1:N) { + int n_succ = 0; + int n_pump = 0; + + for (k in 1:Tsubj[j]) { + real p_burst; // Belief on a balloon to be burst + real omega; // Optimal number of pumps + + p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); + omega = -gam[j] / log1m(p_burst); + + for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) { + log_lik += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); + y_pred[j, k, l] = bernoulli_logit_rng(tau[j] * (omega - l)); + } + + n_succ += pumps[j, k] - explosion[j, k]; + n_pump += pumps[j, k]; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/choiceRT_ddm.stan b/Python/hbayesdm/common/stan_files/choiceRT_ddm.stan new file mode 100644 index 00000000..58baaec6 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/choiceRT_ddm.stan @@ -0,0 +1,98 @@ +#include /pre/license.stan + +// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists +data { + int N; // Number of subjects + int Nu_max; // Max (across subjects) number of upper boundary responses + int Nl_max; // Max (across subjects) number of lower boundary responses + int Nu[N]; // Number of upper boundary responses for each subj + int Nl[N]; // Number of lower boundary responses for each subj + real RTu[N, Nu_max]; // upper boundary response times + real RTl[N, Nl_max]; // lower boundary response times + real minRT[N]; // minimum RT for each subject of the observed data + real RTbound; // lower bound or RT across all subjects (e.g., 0.1 second) +} + +parameters { + // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R + // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ + // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 + // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 + // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta + // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) + ///* upper boundary of tau must be smaller than minimum RT + //to avoid zero likelihood for fast responses. + //tau can for physiological reasone not be faster than 0.1 s.*/ + + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; + vector[N] beta_pr; + vector[N] delta_pr; + vector[N] tau_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] alpha; // boundary separation + vector[N] beta; // initial bias + vector[N] delta; // drift rate + vector[N] tau; // nondecision time + + for (i in 1:N) { + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]); + tau[i] = Phi_approx(mu_pr[4] + sigma[4] * tau_pr[i]) * (minRT[i] - RTbound) + RTbound; + } + alpha = exp(mu_pr[1] + sigma[1] * alpha_pr); + delta = exp(mu_pr[3] + sigma[3] * delta_pr); +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters for non-centered parameterization + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + delta_pr ~ normal(0, 1); + tau_pr ~ normal(0, 1); + + // Begin subject loop + for (i in 1:N) { + // Response time distributed along wiener first passage time distribution + RTu[i, :Nu[i]] ~ wiener(alpha[i], tau[i], beta[i], delta[i]); + RTl[i, :Nl[i]] ~ wiener(alpha[i], tau[i], 1-beta[i], -delta[i]); + + } // end of subject loop +} + +generated quantities { + // For group level parameters + real mu_alpha; // boundary separation + real mu_beta; // initial bias + real mu_delta; // drift rate + real mu_tau; // nondecision time + + // For log likelihood calculation + real log_lik[N]; + + // Assign group level parameter values + mu_alpha = exp(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]); + mu_delta = exp(mu_pr[3]); + mu_tau = Phi_approx(mu_pr[4]) * (mean(minRT)-RTbound) + RTbound; + + { // local section, this saves time and space + // Begin subject loop + for (i in 1:N) { + log_lik[i] = wiener_lpdf(RTu[i, :Nu[i]] | alpha[i], tau[i], beta[i], delta[i]); + log_lik[i] += wiener_lpdf(RTl[i, :Nl[i]] | alpha[i], tau[i], 1-beta[i], -delta[i]); + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/choiceRT_ddm_single.stan b/Python/hbayesdm/common/stan_files/choiceRT_ddm_single.stan new file mode 100644 index 00000000..6bacd18a --- /dev/null +++ b/Python/hbayesdm/common/stan_files/choiceRT_ddm_single.stan @@ -0,0 +1,58 @@ +#include /pre/license.stan + +// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists +data { + int Nu; // of upper boundary responses + int Nl; // of lower boundary responses + real RTu[Nu]; // upper boundary response times + real RTl[Nl]; // lower boundary response times + real minRT; // minimum RT of the observed data + real RTbound; // lower bound or RT (e.g., 0.1 second) +} + +parameters { + // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R + // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ + // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 + // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 + // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta + // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) + ///* upper boundary of tau must be smaller than minimum RT + //to avoid zero likelihood for fast responses. + //tau can for physiological reasone not be faster than 0.1 s.*/ + + real alpha; // boundary separation + real beta; // initial bias + real delta; // drift rate + real tau; // nondecision time +} + +model { + alpha ~ uniform(0, 5); + beta ~ uniform(0, 1); + delta ~ normal(0, 2); + tau ~ uniform(RTbound, minRT); + + RTu ~ wiener(alpha, tau, beta, delta); + RTl ~ wiener(alpha, tau, 1-beta, -delta); +} + +generated quantities { + + // For log likelihood calculation + real log_lik; + + // For posterior predictive check (Not implementeed yet) + // vector[Nu] y_pred_upper; + // vector[Nl] y_pred_lower; + + { // local section, this saves time and space + log_lik = wiener_lpdf(RTu | alpha, tau, beta, delta); + log_lik += wiener_lpdf(RTl | alpha, tau, 1-beta, -delta); + + // generate posterior predictions (Not implemented yet) + // y_pred_upper = wiener_rng(alpha, tau, beta, delta); + // y_pred_lower = wiener_rng(alpha, tau, 1-beta, -delta); + } +} + diff --git a/Python/hbayesdm/common/stan_files/choiceRT_lba.stan b/Python/hbayesdm/common/stan_files/choiceRT_lba.stan new file mode 100644 index 00000000..222e5a27 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/choiceRT_lba.stan @@ -0,0 +1,278 @@ +#include /pre/license.stan + +// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). +// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. +functions { + real lba_pdf(real t, real b, real A, real v_pdf, real s) { + //PDF of the LBA model + real b_A_tv_ts; + real b_tv_ts; + real term_1b; + real term_2b; + real term_3b; + real term_4b; + real pdf; + + b_A_tv_ts = (b - A - t * v_pdf)/(t * s); + b_tv_ts = (b - t * v_pdf)/(t * s); + + term_1b = v_pdf * Phi(b_A_tv_ts); + term_2b = s * exp(normal_lpdf(fabs(b_A_tv_ts) | 0, 1)); + term_3b = v_pdf * Phi(b_tv_ts); + term_4b = s * exp(normal_lpdf(fabs(b_tv_ts) | 0, 1)); + + pdf = (1/A) * (-term_1b + term_2b + term_3b - term_4b); + + return pdf; + } + + real lba_cdf(real t, real b, real A, real v_cdf, real s) { + //CDF of the LBA model + real b_A_tv; + real b_tv; + real ts; + real term_1a; + real term_2a; + real term_3a; + real term_4a; + real cdf; + + b_A_tv = b - A - t * v_cdf; + b_tv = b - t * v_cdf; + ts = t * s; + + term_1a = b_A_tv/A * Phi(b_A_tv/ts); + term_2a = b_tv/A * Phi(b_tv/ts); + term_3a = ts/A * exp(normal_lpdf(fabs(b_A_tv/ts) | 0, 1)); + term_4a = ts/A * exp(normal_lpdf(fabs(b_tv/ts) | 0, 1)); + + cdf = 1 + term_1a - term_2a + term_3a - term_4a; + + return cdf; + } + + real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { + + real t; + real b; + real cdf; + real pdf; + vector[cols(RT)] prob; + real out; + real prob_neg; + + b = A + d; + for (i in 1:cols(RT)) { + t = RT[1, i] - tau; + if (t > 0) { + cdf = 1; + for (j in 1:num_elements(v)) { + if (RT[2, i] == j) { + pdf = lba_pdf(t, b, A, v[j], s); + } else { + cdf *= lba_cdf(t, b, A, v[j], s); + } + } + prob_neg = 1; + for (j in 1:num_elements(v)) { + prob_neg *= Phi(-v[j]/s); + } + prob[i] = pdf * (1-cdf); + prob[i] /= (1-prob_neg); + if (prob[i] < 1e-10) { + prob[i] = 1e-10; + } + + } else { + prob[i] = 1e-10; + } + } + out = sum(log(prob)); + return out; + } + + vector lba_rng(real d, real A, vector v, real s, real tau) { + + int get_pos_drift; + int no_pos_drift; + int get_first_pos; + vector[num_elements(v)] drift; + int max_iter; + int iter; + real start[num_elements(v)]; + real ttf[num_elements(v)]; + int resp[num_elements(v)]; + real rt; + vector[2] pred; + real b; + + //try to get a positive drift rate + get_pos_drift = 1; + no_pos_drift = 0; + max_iter = 1000; + iter = 0; + while(get_pos_drift) { + for (j in 1:num_elements(v)) { + drift[j] = normal_rng(v[j], s); + if (drift[j] > 0) { + get_pos_drift = 0; + } + } + iter += 1; + if (iter > max_iter) { + get_pos_drift = 0; + no_pos_drift = 1; + } + } + //if both drift rates are <= 0 + //return an infinite response time + if (no_pos_drift) { + pred[1] = -1; + pred[2] = -1; + } else { + b = A + d; + for (i in 1:num_elements(v)) { + //start time of each accumulator + start[i] = uniform_rng(0, A); + //finish times + ttf[i] = (b-start[i])/drift[i]; + } + //rt is the fastest accumulator finish time + //if one is negative get the positive drift + resp = sort_indices_asc(ttf); + { + real temp_ttf[num_elements(v)]; + temp_ttf = sort_asc(ttf); + ttf = temp_ttf; + } + get_first_pos = 1; + iter = 1; + while(get_first_pos) { + if (ttf[iter] > 0) { + pred[1] = ttf[iter]; + pred[2] = resp[iter]; + get_first_pos = 0; + } + iter += 1; + } + } + return pred; + } +} +data { + int N; + int Max_tr; + int N_choices; + int N_cond; + int N_tr_cond[N, N_cond]; + matrix[2, Max_tr] RT[N, N_cond]; + +} + +parameters { + // Hyperparameter means + real mu_d; + real mu_A; + real mu_tau; + vector[N_choices] mu_v[N_cond]; + + // Hyperparameter sigmas + real sigma_d; + real sigma_A; + real sigma_tau; + vector[N_choices] sigma_v[N_cond]; + + // Individual parameters + real d[N]; + real A[N]; + real tau[N]; + vector[N_choices] v[N, N_cond]; +} +transformed parameters { + // s is set to 1 to make model identifiable + real s; + s = 1; +} +model { + // Hyperparameter means + mu_d ~ normal(.5, 1)T[0,]; + mu_A ~ normal(.5, 1)T[0,]; + mu_tau ~ normal(.5, .5)T[0,]; + + // Hyperparameter sigmas + sigma_d ~ gamma(1, 1); + sigma_A ~ gamma(1, 1); + sigma_tau ~ gamma(1, 1); + + // Hyperparameter means and sigmas for multiple drift rates + for (j in 1:N_cond) { + for (n in 1:N_choices) { + mu_v[j, n] ~ normal(2, 1)T[0,]; + sigma_v[j, n] ~ gamma(1, 1); + } + } + + for (i in 1:N) { + // Declare variables + int n_trials; + + // Individual parameters + d[i] ~ normal(mu_d, sigma_d)T[0,]; + A[i] ~ normal(mu_A, sigma_A)T[0,]; + tau[i] ~ normal(mu_tau, sigma_tau)T[0,]; + + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = N_tr_cond[i, j]; + + for (n in 1:N_choices) { + // Drift rate is normally distributed + v[i, j, n] ~ normal(mu_v[j, n], sigma_v[j, n])T[0,]; + } + // Likelihood of RT x Choice + RT[i, j, , 1:n_trials] ~ lba(d[i], A[i], v[i, j,], s, tau[i]); + } + } +} + +generated quantities { + // Declare variables + int n_trials; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + matrix[2, Max_tr] y_pred[N, N_cond]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (j in 1:N_cond) { + for (t in 1:Max_tr) { + y_pred[i, j, , t] = rep_vector(-1, 2); + } + } + } + + { // local section, this saves time and space + for (i in 1:N) { + // Initialize variables + log_lik[i] = 0; + + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = N_tr_cond[i, j]; + + // Sum likelihood over conditions within subjects + log_lik[i] += lba_lpdf(RT[i, j, , 1:n_trials] | d[i], A[i], v[i, j,], s, tau[i]); + + for (t in 1:n_trials) { + // generate posterior predictions + y_pred[i, j, , t] = lba_rng(d[i], A[i], v[i, j,], s, tau[i]); + } + } + } + // end of subject loop + } +} + diff --git a/Python/hbayesdm/common/stan_files/choiceRT_lba_single.stan b/Python/hbayesdm/common/stan_files/choiceRT_lba_single.stan new file mode 100644 index 00000000..1d5fd992 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/choiceRT_lba_single.stan @@ -0,0 +1,239 @@ +#include /pre/license.stan + +// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). +// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. +functions { + real lba_pdf(real t, real b, real A, real v, real s) { + //PDF of the LBA model + real b_A_tv_ts; + real b_tv_ts; + real term_1; + real term_2; + real term_3; + real term_4; + real pdf; + + b_A_tv_ts = (b - A - t * v)/(t * s); + b_tv_ts = (b - t * v)/(t * s); + + term_1 = v * Phi(b_A_tv_ts); + term_2 = s * exp(normal_lpdf(b_A_tv_ts | 0, 1)); + term_3 = v * Phi(b_tv_ts); + term_4 = s * exp(normal_lpdf(b_tv_ts | 0, 1)); + + pdf = (1/A) * (-term_1 + term_2 + term_3 - term_4); + + return pdf; + } + + real lba_cdf(real t, real b, real A, real v, real s) { + //CDF of the LBA model + real b_A_tv; + real b_tv; + real ts; + real term_1; + real term_2; + real term_3; + real term_4; + real cdf; + + b_A_tv = b - A - t * v; + b_tv = b - t * v; + ts = t * s; + + term_1 = b_A_tv/A * Phi(b_A_tv/ts); + term_2 = b_tv/A * Phi(b_tv/ts); + term_3 = ts/A * exp(normal_lpdf(b_A_tv/ts | 0, 1)); + term_4 = ts/A * exp(normal_lpdf(b_tv/ts | 0, 1)); + + cdf = 1 + term_1 - term_2 + term_3 - term_4; + + return cdf; + + } + + real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { + + real t; + real b; + real cdf; + real pdf; + vector[rows(RT)] prob; + real out; + real prob_neg; + + b = A + d; + for (i in 1:rows(RT)) { + t = RT[1, i] - tau; + if (t > 0) { + cdf = 1; + + for (j in 1:num_elements(v)) { + if (RT[2, i] == j) { + pdf = lba_pdf(t, b, A, v[j], s); + } else { + cdf *= (1-lba_cdf(t, b, A, v[j], s)); + } + } + prob_neg = 1; + for (j in 1:num_elements(v)) { + prob_neg *= Phi(-v[j]/s); + } + prob[i] = pdf * cdf; + prob[i] /= (1-prob_neg); + if (prob[i] < 1e-10) { + prob[i] = 1e-10; + } + + } else { + prob[i] = 1e-10; + } + } + out = sum(log(prob)); + return out; + } + + vector lba_rng(real d, real A, vector v, real s, real tau) { + + int get_pos_drift; + int no_pos_drift; + int get_first_pos; + vector[num_elements(v)] drift; + int max_iter; + int iter; + real start[num_elements(v)]; + real ttf[num_elements(v)]; + int resp[num_elements(v)]; + real rt; + vector[2] pred; + real b; + + //try to get a positive drift rate + get_pos_drift = 1; + no_pos_drift = 0; + max_iter = 1000; + iter = 0; + while(get_pos_drift) { + for (j in 1:num_elements(v)) { + drift[j] = normal_rng(v[j], s); + if (drift[j] > 0) { + get_pos_drift = 0; + } + } + iter += 1; + if (iter > max_iter) { + get_pos_drift = 0; + no_pos_drift = 1; + } + } + //if both drift rates are <= 0 + //return an infinite response time + if (no_pos_drift) { + pred[1] = -1; + pred[2] = -1; + } else { + b = A + d; + for (i in 1:num_elements(v)) { + //start time of each accumulator + start[i] = uniform_rng(0, A); + //finish times + ttf[i] = (b-start[i])/drift[i]; + } + //rt is the fastest accumulator finish time + //if one is negative get the positive drift + resp = sort_indices_asc(ttf); + { + real temp_ttf[num_elements(v)]; + temp_ttf = sort_asc(ttf); + ttf = temp_ttf; + } + get_first_pos = 1; + iter = 1; + while(get_first_pos) { + if (ttf[iter] > 0) { + pred[1] = ttf[iter] + tau; + pred[2] = resp[iter]; + get_first_pos = 0; + } + iter += 1; + } + } + return pred; + } +} +data { + int N_choice; + int N_cond; + int tr_cond[N_cond]; + int max_tr; + matrix[2, max_tr] RT[N_cond]; +} + +parameters { + real d; + real A; + real tau; + vector[N_choice] v[N_cond]; +} +transformed parameters { + real s; + s = 1; +} +model { + // Declare variables + int n_trials; + + // Individual parameters + d ~ normal(.5, 1)T[0,]; + A ~ normal(.5, 1)T[0,]; + tau ~ normal(.5, .5)T[0,]; + + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = tr_cond[j]; + + for (n in 1:N_choice) { + // Drift rate is normally distributed + v[j, n] ~ normal(2, 1)T[0,]; + } + // Likelihood of RT x Choice + RT[j, , 1:n_trials] ~ lba(d, A, v[j,], s, tau); + } +} + +generated quantities { + // Declare variables + int n_trials; + + // For log likelihood calculation + real log_lik; + + // For posterior predictive check + matrix[2, max_tr] y_pred[N_cond]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (j in 1:N_cond) { + for (t in 1:max_tr) { + y_pred[j, , t] = rep_vector(-1, 2); + } + } + + // initialize log_lik + log_lik = 0; + + { // local section, this saves time and space + for (j in 1:N_cond) { + // Store number of trials for subject/condition pair + n_trials = tr_cond[j]; + + // Sum likelihood over conditions within subjects + log_lik += lba_lpdf(RT[j, , 1:n_trials] | d, A, v[j,], s, tau); + + for (t in 1:n_trials) { + // generate posterior predictions + y_pred[j, , t] = lba_rng(d, A, v[j,], s, tau); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/cra_exp.stan b/Python/hbayesdm/common/stan_files/cra_exp.stan new file mode 100644 index 00000000..86a44a0e --- /dev/null +++ b/Python/hbayesdm/common/stan_files/cra_exp.stan @@ -0,0 +1,134 @@ +#include /pre/license.stan + +/** + * Choice under Risk and Ambiguity Task + * + * Exponential model in Hsu et al. (2005) Science + */ + +functions { + /** + * Subjective value function with the exponential equation form + */ + real subjective_value(real alpha, real beta, real p, real a, real v) { + return pow(p, 1 + beta * a) * pow(v, alpha); + } +} + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/block for each subject + + int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) + real prob[N, T]; // The objective probability of the variable lottery + real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) + real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) + real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // risk attitude parameter + vector[N] beta_pr; // ambiguity attitude parameter + vector[N] gamma_pr; // inverse temperature parameter +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] alpha; + vector[N] beta; + vector[N] gamma; + + alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; + beta = mu_pr[2] + sigma[2] * beta_pr; + gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); +} + +model { + // hyper parameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 5); + + // individual parameters w/ Matt trick + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + gamma_pr ~ normal(0, 1); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + real p_var; // probability of choosing the variable option + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var = inv_logit(gamma[i] * (u_var - u_fix)); + + target += bernoulli_lpmf(choice[i, t] | p_var); + } + } +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_beta; + real mu_gamma; + + // For log likelihood calculation for each subject + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Model regressors + real sv[N, T]; + real sv_fix[N, T]; + real sv_var[N, T]; + real p_var[N, T]; + + // Set all posterior predictions to -1 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + sv[i, t] = 0; + sv_fix[i, t] = 0; + sv_var[i, t] = 0; + p_var[i, t] = 0; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 2; + mu_beta = mu_pr[2]; + mu_gamma = exp(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Initialize the log likelihood variable to 0. + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); + + sv_fix[i, t] = u_fix; + sv_var[i, t] = u_var; + sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; + + log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); + y_pred[i, t] = bernoulli_rng(p_var[i, t]); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/cra_linear.stan b/Python/hbayesdm/common/stan_files/cra_linear.stan new file mode 100644 index 00000000..b8653c85 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/cra_linear.stan @@ -0,0 +1,130 @@ +#include /pre/license.stan + +/** + * Choice under Risk and Ambiguity Task + * + * Linear model in Levy et al. (2010) J Neurophysiol + */ + +functions { + /** + * Subjective value function with the linear equation form + */ + real subjective_value(real alpha, real beta, real p, real a, real v) { + return (p - beta * a / 2) * pow(v, alpha); + } +} + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/block for each subject + + int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) + real prob[N, T]; // The objective probability of the variable lottery + real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) + real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) + real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // risk attitude parameter + vector[N] beta_pr; // ambiguity attitude parameter + vector[N] gamma_pr; // inverse temperature parameter +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] alpha; + vector[N] beta; + vector[N] gamma; + + alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; + beta = mu_pr[2] + sigma[2] * beta_pr; + gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); +} + +model { + // hyper parameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 5); + + // individual parameters w/ Matt trick + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + gamma_pr ~ normal(0, 1); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + real p_var; // probability of choosing the variable option + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var = inv_logit(gamma[i] * (u_var - u_fix)); + + target += bernoulli_lpmf(choice[i, t] | p_var); + } + } +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_beta; + real mu_gamma; + + // For log likelihood calculation for each subject + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Model regressors + real sv[N, T]; + real sv_fix[N, T]; + real sv_var[N, T]; + real p_var[N, T]; + + // Set all posterior predictions to -1 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 2; + mu_beta = mu_pr[2]; + mu_gamma = exp(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Initialize the log likelihood variable to 0. + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + real u_fix; // subjective value of the fixed lottery + real u_var; // subjective value of the variable lottery + + u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); + u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); + p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); + + sv_fix[i, t] = u_fix; + sv_var[i, t] = u_var; + sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; + + log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); + y_pred[i, t] = bernoulli_rng(p_var[i, t]); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/dbdm_prob_weight.stan b/Python/hbayesdm/common/stan_files/dbdm_prob_weight.stan new file mode 100644 index 00000000..ee248835 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/dbdm_prob_weight.stan @@ -0,0 +1,154 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real opt1hprob[N, T]; + real opt2hprob[N, T]; + real opt1hval[N, T]; + real opt1lval[N, T]; + real opt2hval[N, T]; + real opt2lval[N, T]; +} +transformed data { +} +parameters{ + //group-level parameters + vector[4] mu_pr; + vector[4] sigma; + + //subject-level raw parameters, follows norm(0,1), for later Matt Trick + vector[N] tau_pr; //probability weight parameter + vector[N] rho_pr; //subject utility parameter + vector[N] lambda_pr; //loss aversion parameter + vector[N] beta_pr; //inverse softmax temperature +} + +transformed parameters { + //subject-level parameters + vector[N] tau; + vector[N] rho; + vector[N] lambda; + vector[N] beta; + + //Matt Trick + for (i in 1:N) { + tau[i] = Phi_approx( mu_pr[1] + sigma[1] * tau_pr[i] ); + rho[i] = Phi_approx( mu_pr[2] + sigma[2] * rho_pr[i] )*2; + lambda[i] = Phi_approx( mu_pr[3] + sigma[3] * lambda_pr[i] )*5; + beta[i] = Phi_approx( mu_pr[4] + sigma[4] * beta_pr[i] ); + } +} + +model { + //prior : hyperparameters + mu_pr ~ normal(0,1); + sigma ~ cauchy(0,5); + + //prior : individual parameters + tau_pr ~ normal(0,1); + rho_pr ~ normal(0,1); + lambda_pr ~ normal(0,1); + beta_pr ~ normal(0,1); + + //subject loop and trial loop + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + vector[4] w_prob; + vector[2] U_opt; + + //probability weight function + w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); + w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); + w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); + w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); + + if (opt1hval[i,t]>0) { + if (opt1lval[i,t]>= 0) { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); + } else { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + + if (opt2hval[i,t] > 0) { + if (opt2lval[i,t] >= 0) { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); + } else { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + // compute action probabilities + choice[i, t] ~ categorical_logit(U_opt*beta[i]); + } + } +} + +generated quantities { + real mu_tau; + real mu_rho; + real mu_lambda; + real mu_beta; + real log_lik[N]; + // For posterior predictive check + real y_pred[N,T]; + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_tau = Phi_approx(mu_pr[1]); + mu_rho = Phi_approx(mu_pr[2])*2; + mu_lambda = Phi_approx(mu_pr[3])*5; + mu_beta = Phi_approx(mu_pr[4]); + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + vector[4] w_prob; + vector[2] U_opt; + + //probability weight function + w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); + w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); + w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); + w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); + + if (opt1hval[i,t]>0) { + if (opt1lval[i,t]>= 0) { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); + } else { + U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; + } + + if (opt2hval[i,t] > 0) { + if (opt2lval[i,t] >= 0) { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); + } else { + U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + } else { + U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; + } + + // compute action probabilities + log_lik[i] += categorical_logit_lpmf(choice[i,t] | U_opt*beta[i]); + y_pred[i, t] = categorical_rng(softmax(U_opt*beta[i])); + + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/dd_cs.stan b/Python/hbayesdm/common/stan_files/dd_cs.stan new file mode 100644 index 00000000..d221d34a --- /dev/null +++ b/Python/hbayesdm/common/stan_files/dd_cs.stan @@ -0,0 +1,107 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real delay_later[N, T]; + real amount_later[N, T]; + real delay_sooner[N, T]; + real amount_sooner[N, T]; + int choice[N, T]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] r_pr; // (exponential) discounting rate (Impatience) + vector[N] s_pr; // time-sensitivity + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] r; + vector[N] s; + vector[N] beta; + + for (i in 1:N) { + r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); + s[i] = Phi_approx(mu_pr[2] + sigma[2] * s_pr[i]) * 10; + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 5; + } +} + +model { +// Constant-sensitivity model (Ebert & Prelec, 2007) + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + r_pr ~ normal(0, 1); + s_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); + ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); + choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); + } + } +} +generated quantities { + // For group level parameters + real mu_r; + real mu_s; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_r = Phi_approx(mu_pr[1]); + mu_s = Phi_approx(mu_pr[2]) * 10; + mu_beta = Phi_approx(mu_pr[3]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); + ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/dd_cs_single.stan b/Python/hbayesdm/common/stan_files/dd_cs_single.stan new file mode 100644 index 00000000..2436b8b1 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/dd_cs_single.stan @@ -0,0 +1,63 @@ +#include /pre/license.stan + +data { + int Tsubj; + real delay_later[Tsubj]; + real amount_later[Tsubj]; + real delay_sooner[Tsubj]; + real amount_sooner[Tsubj]; + int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { + real r; // (exponential) discounting rate + real s; // impatience + real beta; // inverse temperature +} + +transformed parameters { + real ev_later[Tsubj]; + real ev_sooner[Tsubj]; + + for (t in 1:Tsubj) { + ev_later[t] = amount_later[t] * exp(-1* (pow(r * delay_later[t], s))); + ev_sooner[t] = amount_sooner[t] * exp(-1* (pow(r * delay_sooner[t], s))); + } +} + +model { + // constant-sensitivity model (Ebert & Prelec, 2007) + // hyperparameters + r ~ uniform(0, 1); + s ~ uniform(0, 10); + beta ~ uniform(0, 5); + + for (t in 1:Tsubj) { + choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); + } +} + +generated quantities { + real logR; + real log_lik; + + // For posterior predictive check + real y_pred[Tsubj]; + + logR = log(r); + + { // local section, this saves time and space + log_lik = 0; + + for (t in 1:Tsubj) { + log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); + + // generate posterior prediction for current trial + y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/dd_exp.stan b/Python/hbayesdm/common/stan_files/dd_exp.stan new file mode 100644 index 00000000..3d772a5a --- /dev/null +++ b/Python/hbayesdm/common/stan_files/dd_exp.stan @@ -0,0 +1,101 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real delay_later[N, T]; + real amount_later[N, T]; + real delay_sooner[N, T]; + real amount_sooner[N, T]; + int choice[N, T]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] r_pr; + vector[N] beta_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] r; + vector[N] beta; + + for (i in 1:N) { + r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; + } +} + +model { +// Exponential function + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + r_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); + choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); + } + } +} +generated quantities { + // For group level parameters + real mu_r; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_r = Phi_approx(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/dd_hyperbolic.stan b/Python/hbayesdm/common/stan_files/dd_hyperbolic.stan new file mode 100644 index 00000000..1551304a --- /dev/null +++ b/Python/hbayesdm/common/stan_files/dd_hyperbolic.stan @@ -0,0 +1,101 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real delay_later[N, T]; + real amount_later[N, T]; + real delay_sooner[N, T]; + real amount_sooner[N, T]; + int choice[N, T]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] k_pr; + vector[N] beta_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] k; + vector[N] beta; + + for (i in 1:N) { + k[i] = Phi_approx(mu_pr[1] + sigma[1] * k_pr[i]); + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; + } +} + +model { +// Hyperbolic function + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + k_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); + choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); + } + } +} +generated quantities { + // For group level parameters + real mu_k; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_k = Phi_approx(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real ev_later; + real ev_sooner; + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); + ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/dd_hyperbolic_single.stan b/Python/hbayesdm/common/stan_files/dd_hyperbolic_single.stan new file mode 100644 index 00000000..be3011f0 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/dd_hyperbolic_single.stan @@ -0,0 +1,57 @@ +#include /pre/license.stan + +data { + int Tsubj; + real delay_later[Tsubj]; + real amount_later[Tsubj]; + real delay_sooner[Tsubj]; + real amount_sooner[Tsubj]; + int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward +} + +transformed data { +} + +parameters { + real k; // discounting rate + real beta; // inverse temperature +} + +transformed parameters { + real ev_later[Tsubj]; + real ev_sooner[Tsubj]; + + for (t in 1:Tsubj) { + ev_later[t] = amount_later[t] / (1 + k * delay_later[t]); + ev_sooner[t] = amount_sooner[t] / (1 + k * delay_sooner[t]); + } +} + +model { + k ~ uniform(0, 1); + beta ~ uniform(0, 5); + + for (t in 1:Tsubj) { + choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); + } +} +generated quantities { + real logK; + real log_lik; + + // For posterior predictive check + real y_pred[Tsubj]; + + logK = log(k); + + { // local section, this saves time and space + log_lik = 0; + for (t in 1:Tsubj) { + log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); + + // generate posterior prediction for current trial + y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/gng_m1.stan b/Python/hbayesdm/common/stan_files/gng_m1.stan new file mode 100644 index 00000000..5ac8abd0 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/gng_m1.stan @@ -0,0 +1,149 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[3] mu_pr; + vector[3] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] rho_pr; // rho, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] rho; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + rho = exp(mu_pr[3] + sigma[3] * rho_pr); +} + +model { +// gng_m1: RW + noise model in Guitart-Masip et al 2012 + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + rho_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_rho; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_rho = exp(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/Python/hbayesdm/common/stan_files/gng_m2.stan b/Python/hbayesdm/common/stan_files/gng_m2.stan new file mode 100644 index 00000000..c9a8ced8 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/gng_m2.stan @@ -0,0 +1,160 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[4] mu_pr; + vector[4] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] b_pr; // go bias + vector[N] rho_pr; // rho, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] b; + vector[N] rho; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + b = mu_pr[3] + sigma[3] * b_pr; // vectorization + rho = exp(mu_pr[4] + sigma[4] * rho_pr); +} + +model { +// gng_m2: RW + noise + bias model in Guitart-Masip et al 2012 + // hyper parameters + mu_pr[1] ~ normal(0, 1.0); + mu_pr[2] ~ normal(0, 1.0); + mu_pr[3] ~ normal(0, 10.0); + mu_pr[4] ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3] ~ cauchy(0, 1.0); + sigma[4] ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + b_pr ~ normal(0, 1.0); + rho_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_b; + real mu_rho; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_b = mu_pr[3]; + mu_rho = exp(mu_pr[4]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/Python/hbayesdm/common/stan_files/gng_m3.stan b/Python/hbayesdm/common/stan_files/gng_m3.stan new file mode 100644 index 00000000..2368ea1a --- /dev/null +++ b/Python/hbayesdm/common/stan_files/gng_m3.stan @@ -0,0 +1,179 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[5] mu_pr; + vector[5] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] b_pr; // go bias + vector[N] pi_pr; // pavlovian bias + vector[N] rho_pr; // rho, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] b; + vector[N] pi; + vector[N] rho; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + b = mu_pr[3] + sigma[3] * b_pr; // vectorization + pi = mu_pr[4] + sigma[4] * pi_pr; + rho = exp(mu_pr[5] + sigma[5] * rho_pr); +} + +model { +// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) + // hyper parameters + mu_pr[1] ~ normal(0, 1.0); + mu_pr[2] ~ normal(0, 1.0); + mu_pr[3] ~ normal(0, 10.0); + mu_pr[4] ~ normal(0, 10.0); + mu_pr[5] ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3:4] ~ cauchy(0, 1.0); + sigma[5] ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + b_pr ~ normal(0, 1.0); + pi_pr ~ normal(0, 1.0); + rho_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // after receiving feedback, update sv[t + 1] + sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_b; + real mu_pi; + real mu_rho; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + real SV[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_b = mu_pr[3]; + mu_pi = mu_pr[4]; + mu_rho = exp(mu_pr[5]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + SV[i, t] = sv[cue[i, t]]; + + // after receiving feedback, update sv[t + 1] + sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); + + // update action values + if (pressed[i, t]) { // update go value + qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { // update no-go value + qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/Python/hbayesdm/common/stan_files/gng_m4.stan b/Python/hbayesdm/common/stan_files/gng_m4.stan new file mode 100644 index 00000000..73e30cb1 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/gng_m4.stan @@ -0,0 +1,210 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int cue[N, T]; + int pressed[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { + // declare as vectors for vectorizing + vector[6] mu_pr; + vector[6] sigma; + vector[N] xi_pr; // noise + vector[N] ep_pr; // learning rate + vector[N] b_pr; // go bias + vector[N] pi_pr; // pavlovian bias + vector[N] rhoRew_pr; // rho reward, inv temp + vector[N] rhoPun_pr; // rho punishment, inv temp +} + +transformed parameters { + vector[N] xi; + vector[N] ep; + vector[N] b; + vector[N] pi; + vector[N] rhoRew; + vector[N] rhoPun; + + for (i in 1:N) { + xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); + ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); + } + b = mu_pr[3] + sigma[3] * b_pr; // vectorization + pi = mu_pr[4] + sigma[4] * pi_pr; + rhoRew = exp(mu_pr[5] + sigma[5] * rhoRew_pr); + rhoPun = exp(mu_pr[6] + sigma[6] * rhoPun_pr); +} + +model { +// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) + // hyper parameters + mu_pr[1] ~ normal(0, 1.0); + mu_pr[2] ~ normal(0, 1.0); + mu_pr[3] ~ normal(0, 10.0); + mu_pr[4] ~ normal(0, 10.0); + mu_pr[5] ~ normal(0, 1.0); + mu_pr[6] ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3:4] ~ cauchy(0, 1.0); + sigma[5:6] ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + xi_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + b_pr ~ normal(0, 1.0); + pi_pr ~ normal(0, 1.0); + rhoRew_pr ~ normal(0, 1.0); + rhoPun_pr ~ normal(0, 1.0); + + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); + + // after receiving feedback, update sv[t + 1] + if (outcome[i, t] >= 0) { + sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); + } else { + sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); + } + + // update action values + if (pressed[i, t]) { // update go value + if (outcome[i, t] >=0) { + qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { + qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); + } + } else { // update no-go value + if (outcome[i, t] >=0) { + qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } else { + qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } + } // end of t loop + } // end of i loop +} + +generated quantities { + real mu_xi; + real mu_ep; + real mu_b; + real mu_pi; + real mu_rhoRew; + real mu_rhoPun; + real log_lik[N]; + real Qgo[N, T]; + real Qnogo[N, T]; + real Wgo[N, T]; + real Wnogo[N, T]; + real SV[N, T]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_xi = Phi_approx(mu_pr[1]); + mu_ep = Phi_approx(mu_pr[2]); + mu_b = mu_pr[3]; + mu_pi = mu_pr[4]; + mu_rhoRew = exp(mu_pr[5]); + mu_rhoPun = exp(mu_pr[6]); + + { // local section, this saves time and space + for (i in 1:N) { + vector[4] wv_g; // action weight for go + vector[4] wv_ng; // action weight for nogo + vector[4] qv_g; // Q value for go + vector[4] qv_ng; // Q value for nogo + vector[4] sv; // stimulus value + vector[4] pGo; // prob of go (press) + + wv_g = initV; + wv_ng = initV; + qv_g = initV; + qv_ng = initV; + sv = initV; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; + wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) + pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); + { // noise + pGo[cue[i, t]] *= (1 - xi[i]); + pGo[cue[i, t]] += xi[i]/2; + } + log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); + + // Model regressors --> store values before being updated + Qgo[i, t] = qv_g[cue[i, t]]; + Qnogo[i, t] = qv_ng[cue[i, t]]; + Wgo[i, t] = wv_g[cue[i, t]]; + Wnogo[i, t] = wv_ng[cue[i, t]]; + SV[i, t] = sv[cue[i, t]]; + + // after receiving feedback, update sv[t + 1] + if (outcome[i, t] >= 0) { + sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); + } else { + sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); + } + + // update action values + if (pressed[i, t]) { // update go value + if (outcome[i, t] >=0) { + qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); + } else { + qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); + } + } else { // update no-go value + if (outcome[i, t] >=0) { + qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } else { + qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); + } + } + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/Python/hbayesdm/common/stan_files/igt_orl.stan b/Python/hbayesdm/common/stan_files/igt_orl.stan new file mode 100644 index 00000000..a560de27 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/igt_orl.stan @@ -0,0 +1,207 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; + real sign_out[N, T]; +} +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[5] mu_pr; + vector[5] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Arew_pr; + vector[N] Apun_pr; + vector[N] K_pr; + vector[N] betaF_pr; + vector[N] betaP_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] Arew; + vector[N] Apun; + vector[N] K; + vector[N] betaF; + vector[N] betaP; + + for (i in 1:N) { + Arew[i] = Phi_approx( mu_pr[1] + sigma[1] * Arew_pr[i] ); + Apun[i] = Phi_approx( mu_pr[2] + sigma[2] * Apun_pr[i] ); + K[i] = Phi_approx(mu_pr[3] + sigma[3] + K_pr[i]) * 5; + } + betaF = mu_pr[4] + sigma[4] * betaF_pr; + betaP = mu_pr[5] + sigma[5] * betaP_pr; +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1:3] ~ normal(0, 0.2); + sigma[4:5] ~ cauchy(0, 1.0); + + // individual parameters + Arew_pr ~ normal(0, 1.0); + Apun_pr ~ normal(0, 1.0); + K_pr ~ normal(0, 1.0); + betaF_pr ~ normal(0, 1.0); + betaP_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] ef; + vector[4] ev; + vector[4] PEfreq_fic; + vector[4] PEval_fic; + vector[4] pers; // perseverance + vector[4] util; + + real PEval; + real PEfreq; + real efChosen; + real evChosen; + real K_tr; + + // Initialize values + ef = initV; + ev = initV; + pers = initV; // initial pers values + util = initV; + K_tr = pow(3, K[i]) - 1; + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit( util ); + + // Prediction error + PEval = outcome[i,t] - ev[ choice[i,t]]; + PEfreq = sign_out[i,t] - ef[ choice[i,t]]; + PEfreq_fic = -sign_out[i,t]/3 - ef; + + // store chosen deck ev + efChosen = ef[ choice[i,t]]; + evChosen = ev[ choice[i,t]]; + + if (outcome[i,t] >= 0) { + // Update ev for all decks + ef += Apun[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Arew[i] * PEval; + } else { + // Update ev for all decks + ef += Arew[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Apun[i] * PEval; + } + + // Perseverance updating + pers[ choice[i,t] ] = 1; // perseverance term + pers /= (1 + K_tr); // decay + + // Utility of expected value and perseverance + util = ev + ef * betaF[i] + pers * betaP[i]; + } + } +} + +generated quantities { + // For group level parameters + real mu_Arew; + real mu_Apun; + real mu_K; + real mu_betaF; + real mu_betaP; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N,T]; + + // Set all posterior predictions to -1 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i,t] = -1; + } + } + + mu_Arew = Phi_approx(mu_pr[1]); + mu_Apun = Phi_approx(mu_pr[2]); + mu_K = Phi_approx(mu_pr[3]) * 5; + mu_betaF = mu_pr[4]; + mu_betaP = mu_pr[5]; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ef; + vector[4] ev; + vector[4] PEfreq_fic; + vector[4] PEval_fic; + vector[4] pers; // perseverance + vector[4] util; + + real PEval; + real PEfreq; + real efChosen; + real evChosen; + real K_tr; + + // Initialize values + log_lik[i] = 0; + ef = initV; + ev = initV; + pers = initV; // initial pers values + util = initV; + K_tr = pow(3, K[i]) - 1; + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf( choice[i, t] | util ); + + // generate posterior prediction for current trial + y_pred[i,t] = categorical_rng(softmax(util)); + + // Prediction error + PEval = outcome[i,t] - ev[ choice[i,t]]; + PEfreq = sign_out[i,t] - ef[ choice[i,t]]; + PEfreq_fic = -sign_out[i,t]/3 - ef; + + // store chosen deck ev + efChosen = ef[ choice[i,t]]; + evChosen = ev[ choice[i,t]]; + + if (outcome[i,t] >= 0) { + // Update ev for all decks + ef += Apun[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Arew[i] * PEval; + } else { + // Update ev for all decks + ef += Arew[i] * PEfreq_fic; + // Update chosendeck with stored value + ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; + ev[ choice[i,t]] = evChosen + Apun[i] * PEval; + } + + // Perseverance updating + pers[ choice[i,t] ] = 1; // perseverance term + pers /= (1 + K_tr); // decay + + // Utility of expected value and perseverance + util = ev + ef * betaF[i] + pers * betaP[i]; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/igt_pvl_decay.stan b/Python/hbayesdm/common/stan_files/igt_pvl_decay.stan new file mode 100644 index 00000000..2d908a19 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/igt_pvl_decay.stan @@ -0,0 +1,134 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; +} +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] alpha_pr; + vector[N] cons_pr; + vector[N] lambda_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] alpha; + vector[N] cons; + vector[N] lambda; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; + cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; + lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + cons_pr ~ normal(0, 1); + lambda_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(theta * ev); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // decay-RI + ev *= A[i]; + ev[choice[i, t]] += curUtil; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_alpha; + real mu_cons; + real mu_lambda; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_alpha = Phi_approx(mu_pr[2]) * 2; + mu_cons = Phi_approx(mu_pr[3]) * 5; + mu_lambda = Phi_approx(mu_pr[4]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + log_lik[i] = 0; + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(theta * ev)); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // decay-RI + ev *= A[i]; + ev[choice[i, t]] += curUtil; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/igt_pvl_delta.stan b/Python/hbayesdm/common/stan_files/igt_pvl_delta.stan new file mode 100644 index 00000000..05c6e870 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/igt_pvl_delta.stan @@ -0,0 +1,132 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; +} +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] alpha_pr; + vector[N] cons_pr; + vector[N] lambda_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] alpha; + vector[N] cons; + vector[N] lambda; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; + cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; + lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; + } +} +model { +// Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + cons_pr ~ normal(0, 1); + lambda_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(theta * ev); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // delta + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_alpha; + real mu_cons; + real mu_lambda; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_alpha = Phi_approx(mu_pr[2]) * 2; + mu_cons = Phi_approx(mu_pr[3]) * 5; + mu_lambda = Phi_approx(mu_pr[4]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ev; + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + log_lik[i] = 0; + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(theta * ev)); + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + } + + // delta + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/igt_vpp.stan b/Python/hbayesdm/common/stan_files/igt_vpp.stan new file mode 100644 index 00000000..61c2b831 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/igt_vpp.stan @@ -0,0 +1,188 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int choice[N, T]; + real outcome[N, T]; +} + +transformed data { + vector[4] initV; + initV = rep_vector(0.0, 4); +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[8] mu_pr; + vector[8] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] A_pr; + vector[N] alpha_pr; + vector[N] cons_pr; + vector[N] lambda_pr; + vector[N] epP_pr; + vector[N] epN_pr; + vector[N] K_pr; + vector[N] w_pr; +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] A; + vector[N] alpha; + vector[N] cons; + vector[N] lambda; + vector[N] epP; + vector[N] epN; + vector[N] K; + vector[N] w; + + for (i in 1:N) { + A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); + alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; + cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; + lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; + K[i] = Phi_approx(mu_pr[7] + sigma[7] * K_pr[i]); + w[i] = Phi_approx(mu_pr[8] + sigma[8] * w_pr[i]); + } + epP = mu_pr[5] + sigma[5] * epP_pr; + epN = mu_pr[6] + sigma[6] * epN_pr; +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1.0); + sigma[1:4] ~ normal(0, 0.2); + sigma[5:6] ~ cauchy(0, 1.0); + sigma[7:8] ~ normal(0, 0.2); + + // individual parameters + A_pr ~ normal(0, 1.0); + alpha_pr ~ normal(0, 1.0); + cons_pr ~ normal(0, 1.0); + lambda_pr ~ normal(0, 1.0); + epP_pr ~ normal(0, 1.0); + epN_pr ~ normal(0, 1.0); + K_pr ~ normal(0, 1.0); + w_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + vector[4] ev; + vector[4] p_next; + vector[4] str; + vector[4] pers; // perseverance + vector[4] V; // weighted sum of ev and pers + + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + pers = initV; // initial pers values + V = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + choice[i, t] ~ categorical_logit(theta * V); + + // perseverance decay + pers *= K[i]; // decay + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + pers[choice[i, t]] += epP[i]; // perseverance term + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + pers[choice[i, t]] += epN[i]; // perseverance term + } + + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + // calculate V + V = w[i] * ev + (1-w[i]) * pers; + } + } +} +generated quantities { + // For group level parameters + real mu_A; + real mu_alpha; + real mu_cons; + real mu_lambda; + real mu_epP; + real mu_epN; + real mu_K; + real mu_w; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_A = Phi_approx(mu_pr[1]); + mu_alpha = Phi_approx(mu_pr[2]) * 2; + mu_cons = Phi_approx(mu_pr[3]) * 5; + mu_lambda = Phi_approx(mu_pr[4]) * 10; + mu_epP = mu_pr[5]; + mu_epN = mu_pr[6]; + mu_K = Phi_approx(mu_pr[7]); + mu_w = Phi_approx(mu_pr[8]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[4] ev; + vector[4] p_next; + vector[4] str; + vector[4] pers; // perseverance + vector[4] V; // weighted sum of ev and pers + + real curUtil; // utility of curFb + real theta; // theta = 3^c - 1 + + // Initialize values + log_lik[i] = 0; + theta = pow(3, cons[i]) -1; + ev = initV; // initial ev values + pers = initV; // initial pers values + V = initV; + + for (t in 1:Tsubj[i]) { + // softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * V); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(theta * V)); + + // perseverance decay + pers *= K[i]; // decay + + if (outcome[i, t] >= 0) { // x(t) >= 0 + curUtil = pow(outcome[i, t], alpha[i]); + pers[choice[i, t]] += epP[i]; // perseverance term + } else { // x(t) < 0 + curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); + pers[choice[i, t]] += epN[i]; // perseverance term + } + + ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); + // calculate V + V = w[i] * ev + (1-w[i]) * pers; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/peer_ocu.stan b/Python/hbayesdm/common/stan_files/peer_ocu.stan new file mode 100644 index 00000000..cd0c52d5 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/peer_ocu.stan @@ -0,0 +1,115 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int condition[N, T]; // 0: solo, 1: ss, 2: mix, 3: rr + real p_gamble[N, T]; + real safe_Hpayoff[N, T]; + real safe_Lpayoff[N, T]; + real risky_Hpayoff[N, T]; + real risky_Lpayoff[N, T]; + int choice[N, T]; +} + +transformed data { +} + +parameters { + vector[3] mu_pr; + vector[3] sigma; + vector[N] rho_pr; + vector[N] tau_pr; + vector[N] ocu_pr; +} + +transformed parameters { + vector[N] rho; + vector[N] tau; + vector[N] ocu; + + for (i in 1:N) { + rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; + } + tau = exp(mu_pr[2] + sigma[2] * tau_pr); + ocu = mu_pr[3] + sigma[3] * ocu_pr; +} + +model { + // peer_ocu + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma[1:2] ~ normal(0, 0.2); + sigma[3] ~ cauchy(0, 1.0); + + // individual parameters w/ Matt trick + rho_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + ocu_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real U_safe; + real U_risky; + + U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); + U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); + if (condition[i, t] == 1) { // safe-safe + U_safe += ocu[i]; + } + if (condition[i, t] == 3) { // risky-risky + U_risky += ocu[i]; + } + choice[i, t] ~ bernoulli_logit(tau[i] * (U_risky - U_safe)); + } + } +} +generated quantities { + real mu_rho; + real mu_tau; + real mu_ocu; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_rho = Phi_approx(mu_pr[1]) * 2; + mu_tau = exp(mu_pr[2]); + mu_ocu = mu_pr[3]; + + { // local section, this saves time and space + for (i in 1:N) { + + // Initialize values + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + real U_safe; + real U_risky; + + U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); + U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); + if (condition[i, t] == 1) { // safe-safe + U_safe += ocu[i]; + } + if (condition[i, t] == 3) { // risky-risky + U_risky += ocu[i]; + } + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | tau[i] * (U_risky - U_safe)); + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(tau[i] * (U_risky - U_safe))); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/pre/license.stan b/Python/hbayesdm/common/stan_files/pre/license.stan new file mode 100644 index 00000000..dec428a6 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/pre/license.stan @@ -0,0 +1,14 @@ +/* + hBayesDM is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + hBayesDM is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with hBayesDM. If not, see . +*/ diff --git a/Python/hbayesdm/common/stan_files/prl_ewa.stan b/Python/hbayesdm/common/stan_files/prl_ewa.stan new file mode 100644 index 00000000..234cf467 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_ewa.stan @@ -0,0 +1,179 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Experience-Weighted Attraction model by Ouden et al. (2013) Neuron + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] phi_pr; // 1-learning rate + vector[N] rho_pr; // experience decay factor + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] phi; + vector[N] rho; + vector[N] beta; + + for (i in 1:N) { + phi[i] = Phi_approx(mu_pr[1] + sigma[1] * phi_pr[i]); + rho[i] = Phi_approx(mu_pr[2] + sigma[2] * rho_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters + phi_pr ~ normal(0, 1); + rho_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // Expected value + vector[2] ew; // Experience weight + + real ewt1; // Experience weight of trial (t - 1) + + // Initialize values + ev = initV; // initial ev values + ew = initV; // initial ew values + + for (t in 1:Tsubj[i]) { + // Softmax choice + choice[i, t] ~ categorical_logit(ev * beta[i]); + + // Store previous experience weight value + ewt1 = ew[choice[i, t]]; + + // Update experience weight for chosen stimulus + { + ew[choice[i, t]] *= rho[i]; + ew[choice[i, t]] += 1; + } + + // Update expected value of chosen stimulus + { + ev[choice[i, t]] *= phi[i] * ewt1; + ev[choice[i, t]] += outcome[i, t]; + ev[choice[i, t]] /= ew[choice[i, t]]; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_phi; + real mu_rho; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + //real mr_ev[N, T, 2]; // Expected value + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + //real mr_ew[N, T, 2]; // Experience weight + real ew_c[N, T]; // Experience weight of the chosen option + real ew_nc[N, T]; // Experience weight of the non-chosen option + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + ew_c[i, t] = 0; + ew_nc[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_phi = Phi_approx(mu_pr[1]); + mu_rho = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // Expected value + vector[2] ew; // Experience weight + + real ewt1; // Experience weight of trial (t-1) + + // Initialize values + ev = initV; // initial ev values + ew = initV; // initial ew values + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // Softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); + + // Store values for model regressors + //mr_ev[i, t] = ev; + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + + //mr_ew[i, t] = ew; + ew_c[i, t] = ew[choice[i, t]]; + ew_nc[i, t] = ew[3 - choice[i, t]]; + + // Store previous experience weight value + ewt1 = ew[choice[i, t]]; + + // Update experience weight for chosen stimulus + { + ew[choice[i, t]] *= rho[i]; + ew[choice[i, t]] += 1; + } + + // Update expected value of chosen stimulus + { + ev[choice[i, t]] *= phi[i] * ewt1; + ev[choice[i, t]] += outcome[i, t]; + ev[choice[i, t]] /= ew[choice[i, t]]; + } + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious.stan b/Python/hbayesdm/common/stan_files/prl_fictitious.stan new file mode 100644 index 00000000..0fb8d486 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_fictitious.stan @@ -0,0 +1,173 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pr; // learning rate + vector[N] alpha_pr; // indecision point + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta; + vector[N] alpha; + vector[N] beta; + + for (i in 1:N) { + eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } + alpha = mu_pr[2] + sigma[2] * alpha_pr; +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1] ~ normal(0, 0.2); + sigma[2] ~ cauchy(0, 1.0); + sigma[3] ~ normal(0, 0.2); + + // Individual parameters + eta_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // Compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // Prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } +} + +generated quantities { + // For group level parameters + real mu_eta; + real mu_alpha; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; //Prediction error of the chosen option + real pe_nc[N, T]; //Prediction error of the non-chosen option + real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_eta = Phi_approx(mu_pr[1]); + mu_alpha = mu_pr[2]; + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_multipleB.stan b/Python/hbayesdm/common/stan_files/prl_fictitious_multipleB.stan new file mode 100644 index 00000000..264d6c8f --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_fictitious_multipleB.stan @@ -0,0 +1,185 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) + */ + +data { + int N; // Number of subjects + + int B; // Max number of blocks across subjects + int Bsubj[N]; // Number of blocks for each subject + + int T; // Max number of trials across subjects + int Tsubj[N, B]; // Number of trials/block for each subject + + int choice[N, B, T]; // Choice for each subject-block-trial + real outcome[N, B, T]; // Outcome (reward/loss) for each subject-block-trial +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pr; // learning rate + vector[N] alpha_pr; // indecision point + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta; + vector[N] alpha; + vector[N] beta; + + for (i in 1:N) { + eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } + alpha = mu_pr[2] + sigma[2] * alpha_pr; +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1] ~ normal(0, 0.2); + sigma[2] ~ cauchy(0, 1.0); + sigma[3] ~ normal(0, 0.2); + + // individual parameters + eta_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + for (bIdx in 1:Bsubj[i]) { // new + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i, bIdx])) { // new + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, bIdx, t] ~ categorical(prob); + //choice[i, t] ~ bernoulli(prob); + + // prediction error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new + PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new + + // value updating (learning) + ev[choice[i, bIdx, t]] += eta[i] * PE; //new + ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new + } // end of t loop + } // end of bIdx loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_eta; + real mu_alpha; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, B, T]; // Expected value of the chosen option + real ev_nc[N, B, T]; // Expected value of the non-chosen option + + real pe_c[N, B, T]; //Prediction error of the chosen option + real pe_nc[N, B, T]; //Prediction error of the non-chosen option + real dv[N, B, T]; //Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, B, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (b in 1:B) { + for (t in 1:T) { + ev_c[i, b, t] = 0; + ev_nc[i, b, t] = 0; + + pe_c[i, b, t] = 0; + pe_nc[i, b, t] = 0; + dv[i, b, t] = 0; + + y_pred[i, b, t] = -1; + } + } + } + + mu_eta = Phi_approx(mu_pr[1]); + mu_alpha = mu_pr[2]; + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + + log_lik[i] = 0; + + for (bIdx in 1:Bsubj[i]) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i, bIdx])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, bIdx, t] | prob); //new + + // generate posterior prediction for current trial + y_pred[i, bIdx, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new + PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new + + // Store values for model regressors + ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; + ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; + + pe_c[i, bIdx, t] = PE; + pe_nc[i, bIdx, t] = PEnc; + dv[i, bIdx, t] = PE - PEnc; + + // value updating (learning) + ev[choice[i, bIdx, t]] += eta[i] * PE; //new + ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new + } // end of t loop + } // end of bIdx loop + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_rp.stan b/Python/hbayesdm/common/stan_files/prl_fictitious_rp.stan new file mode 100644 index 00000000..daa0779c --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_fictitious_rp.stan @@ -0,0 +1,188 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) + */ + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pos_pr; // learning rate, positive PE + vector[N] eta_neg_pr; // learning rate, negative PE + vector[N] alpha_pr; // indecision point + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta_pos; + vector[N] eta_neg; + vector[N] alpha; + vector[N] beta; + + for (i in 1:N) { + eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); + eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); + beta[i] = Phi_approx(mu_pr[4] + sigma[4] * beta_pr[i]) * 10; + } + alpha = mu_pr[3] + sigma[3] * alpha_pr; +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma[1:2] ~ normal(0, 0.2); + sigma[3] ~ cauchy(0, 1.0); + sigma[4] ~ normal(0, 0.2); + + // individual parameters + eta_pos_pr ~ normal(0, 1); + eta_neg_pr ~ normal(0, 1); + alpha_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_eta_pos; + real mu_eta_neg; + real mu_alpha; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; // Prediction error of the chosen option + real pe_nc[N, T]; // Prediction error of the non-chosen option + + real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_eta_pos = Phi_approx(mu_pr[1]); + mu_eta_neg = Phi_approx(mu_pr[2]); + mu_alpha = mu_pr[3]; + mu_beta = Phi_approx(mu_pr[4]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // Value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_rp_woa.stan b/Python/hbayesdm/common/stan_files/prl_fictitious_rp_woa.stan new file mode 100644 index 00000000..48f78a42 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_fictitious_rp_woa.stan @@ -0,0 +1,180 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) with separate learning rates for +PE and -PE & without alpha (indecision point) + */ + +data { + int N; // Number of subjects + int T; // Max number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pos_pr; // learning rate, positive PE + vector[N] eta_neg_pr; // learning rate, negative PE + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta_pos; + vector[N] eta_neg; + vector[N] beta; + + for (i in 1:N) { + eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); + eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + eta_pos_pr ~ normal(0, 1); + eta_neg_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_eta_pos; + real mu_eta_neg; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; // Prediction error of the chosen option + real pe_nc[N, T]; // Prediction error of the non-chosen option + + real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_eta_pos = Phi_approx(mu_pr[1]); + mu_eta_neg = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // Value updating (learning) + if (PE >= 0) { + ev[choice[i, t]] += eta_pos[i] * PE; + ev[3 - choice[i, t]] += eta_pos[i] * PEnc; + } else { + ev[choice[i, t]] += eta_neg[i] * PE; + ev[3 - choice[i, t]] += eta_neg[i] * PEnc; + } + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_woa.stan b/Python/hbayesdm/common/stan_files/prl_fictitious_woa.stan new file mode 100644 index 00000000..58a4053f --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_fictitious_woa.stan @@ -0,0 +1,165 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) without alpha (indecision point) + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[2] mu_pr; + vector[2] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] eta_pr; // learning rate + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] eta; + vector[N] beta; + + for (i in 1:N) { + eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters + eta_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:(Tsubj[i])) { + // Compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + choice[i, t] ~ categorical(prob); + + // Prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } +} + +generated quantities { + // For group level parameters + real mu_eta; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + + real pe_c[N, T]; //Prediction error of the chosen option + real pe_nc[N, T]; //Prediction error of the non-chosen option + real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions, model regressors to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + + pe_c[i, t] = 0; + pe_nc[i, t] = 0; + dv[i, t] =0; + + y_pred[i, t] = -1; + } + } + + mu_eta = Phi_approx(mu_pr[1]); + mu_beta = Phi_approx(mu_pr[2]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // expected value + vector[2] prob; // probability + real prob_1_; + + real PE; // prediction error + real PEnc; // fictitious prediction error (PE-non-chosen) + + // Initialize values + ev = initV; // initial ev values + + log_lik[i] = 0; + + for (t in 1:(Tsubj[i])) { + // compute action probabilities + prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); + prob_1_ = prob[1]; + prob[2] = 1 - prob_1_; + + log_lik[i] += categorical_lpmf(choice[i, t] | prob); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(prob); + + // prediction error + PE = outcome[i, t] - ev[choice[i, t]]; + PEnc = -outcome[i, t] - ev[3-choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + + pe_c[i, t] = PE; + pe_nc[i, t] = PEnc; + dv[i, t] = PE - PEnc; + + // value updating (learning) + ev[choice[i, t]] += eta[i] * PE; + ev[3-choice[i, t]] += eta[i] * PEnc; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_rp.stan b/Python/hbayesdm/common/stan_files/prl_rp.stan new file mode 100644 index 00000000..a7303744 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_rp.stan @@ -0,0 +1,149 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Reward-Punishment Model by Ouden et al. (2013) Neuron + */ + +data { + int N; // Number of subjects + int T; // Maximum number of trials across subjects + int Tsubj[N]; // Number of trials/blocks for each subject + + int choice[N, T]; // The choices subjects made + real outcome[N, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Apun_pr; // learning rate (punishment) + vector[N] Arew_pr; // learning rate (reward) + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Apun; + vector[N] Arew; + vector[N] beta; + + for (i in 1:N) { + Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); + Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Apun_pr ~ normal(0, 1); + Arew_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define Values + vector[2] ev; // Expected value + real PE; // prediction error + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:Tsubj[i]) { + // Softmax choice + choice[i, t] ~ categorical_logit(ev * beta[i]); + + // Prediction Error + PE = outcome[i, t] - ev[choice[i, t]]; + + // Update expected value of chosen stimulus + if (outcome[i, t] > 0) + ev[choice[i, t]] += Arew[i] * PE; + else + ev[choice[i, t]] += Apun[i] * PE; + } + } +} + +generated quantities { + // For group level parameters + real mu_Apun; + real mu_Arew; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, T]; // Expected value of the chosen option + real ev_nc[N, T]; // Expected value of the non-chosen option + real pe[N, T]; // Prediction error + + // For posterior predictive check + real y_pred[N, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + ev_c[i, t] = 0; + ev_nc[i, t] = 0; + pe[i, t] = 0; + + y_pred[i, t] = -1; + } + } + + mu_Apun = Phi_approx(mu_pr[1]); + mu_Arew = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] ev; // Expected value + real PE; // Prediction error + + // Initialize values + ev = initV; // initial ev values + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // Softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); + + // Prediction Error + PE = outcome[i, t] - ev[choice[i, t]]; + + // Store values for model regressors + ev_c[i, t] = ev[choice[i, t]]; + ev_nc[i, t] = ev[3 - choice[i, t]]; + pe[i, t] = PE; + + // Update expected value of chosen stimulus + if (outcome[i, t] > 0) + ev[choice[i, t]] += Arew[i] * PE; + else + ev[choice[i, t]] += Apun[i] * PE; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/prl_rp_multipleB.stan b/Python/hbayesdm/common/stan_files/prl_rp_multipleB.stan new file mode 100644 index 00000000..8cd77c43 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/prl_rp_multipleB.stan @@ -0,0 +1,161 @@ +#include /pre/license.stan + +/** + * Probabilistic Reversal Learning (PRL) Task + * + * Reward-Punishment Model with multiple blocks per subject by Ouden et al. (2013) Neuron + */ + +data { + int N; // Number of subjects + + int B; // Maximum number of blocks across subjects + int Bsubj[N]; // Number of blocks for each subject + + int T; // Maximum number of trials across subjects + int Tsubj[N, B]; // Number of trials/blocks for each subject + + int choice[N, B, T]; // The choices subjects made + real outcome[N, B, T]; // The outcome +} + +transformed data { + // Default value for (re-)initializing parameter vectors + vector[2] initV; + initV = rep_vector(0.0, 2); +} + +// Declare all parameters as vectors for vectorizing +parameters { + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] Apun_pr; // learning rate (punishment) + vector[N] Arew_pr; // learning rate (reward) + vector[N] beta_pr; // inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + vector[N] Apun; + vector[N] Arew; + vector[N] beta; + + for (i in 1:N) { + Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); + Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); + beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + Apun_pr ~ normal(0, 1); + Arew_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + for (bIdx in 1:Bsubj[i]) { // new + // Define Values + vector[2] ev; // Expected value + real PE; // Prediction error + + // Initialize values + ev = initV; // Initial ev values + + for (t in 1:Tsubj[i, bIdx]) { + // Softmax choice + choice[i, bIdx, t] ~ categorical_logit(ev * beta[i]); + + // Prediction Error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; + + // Update expected value of chosen stimulus + if (outcome[i, bIdx, t] > 0) + ev[choice[i, bIdx, t]] += Arew[i] * PE; + else + ev[choice[i, bIdx, t]] += Apun[i] * PE; + } + } + } +} + +generated quantities { + // For group level parameters + real mu_Apun; + real mu_Arew; + real mu_beta; + + // For log likelihood calculation + real log_lik[N]; + + // For model regressors + real ev_c[N, B, T]; // Expected value of the chosen option + real ev_nc[N, B, T]; // Expected value of the non-chosen option + real pe[N, B, T]; // Prediction error + + // For posterior predictive check + real y_pred[N, B, T]; + + // Initialize all the variables to avoid NULL values + for (i in 1:N) { + for (b in 1:B) { + for (t in 1:T) { + ev_c[i, b, t] = 0; + ev_nc[i, b, t] = 0; + pe[i, b, t] = 0; + + y_pred[i, b, t] = -1; + } + } + } + + mu_Apun = Phi_approx(mu_pr[1]); + mu_Arew = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + + log_lik[i] = 0; + + for (bIdx in 1:Bsubj[i]) { // new + // Define values + vector[2] ev; // Expected value + real PE; // prediction error + + // Initialize values + ev = initV; // initial ev values + + for (t in 1:Tsubj[i, bIdx]) { + // Softmax choice + log_lik[i] += categorical_logit_lpmf(choice[i, bIdx, t] | ev * beta[i]); + + // generate posterior prediction for current trial + y_pred[i, bIdx, t] = categorical_rng(softmax(ev * beta[i])); + + // Prediction Error + PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; + + // Store values for model regressors + ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; + ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; + pe[i, bIdx, t] = PE; + + // Update expected value of chosen stimulus + if (outcome[i, bIdx, t] > 0) + ev[choice[i, bIdx, t]] += Arew[i] * PE; + else + ev[choice[i, bIdx, t]] += Apun[i] * PE; + } + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/pst_gainloss_Q.stan b/Python/hbayesdm/common/stan_files/pst_gainloss_Q.stan new file mode 100644 index 00000000..788b9a4e --- /dev/null +++ b/Python/hbayesdm/common/stan_files/pst_gainloss_Q.stan @@ -0,0 +1,114 @@ +#include /pre/license.stan + +data { + int N; // Number of subjects + int T; // Maximum # of trials + int Tsubj[N]; // # of trials for acquisition phase + + int option1[N, T]; + int option2[N, T]; + int choice[N, T]; + real reward[N, T]; +} + +transformed data { + // Default values to initialize the vector of expected values + vector[6] initial_values; + initial_values = rep_vector(0, 6); +} + +parameters { + // Group-level parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level parameters for Matt trick + vector[N] alpha_pos_pr; + vector[N] alpha_neg_pr; + vector[N] beta_pr; +} + +transformed parameters { + vector[N] alpha_pos; + vector[N] alpha_neg; + vector[N] beta; + + alpha_pos = Phi_approx(mu_pr[1] + sigma[1] * alpha_pos_pr); + alpha_neg = Phi_approx(mu_pr[2] + sigma[2] * alpha_neg_pr); + beta = Phi_approx(mu_pr[3] + sigma[3] * beta_pr) * 10; +} + +model { + // Priors for group-level parameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Priors for subject-level parameters + alpha_pos_pr ~ normal(0, 1); + alpha_neg_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + + for (i in 1:N) { + int co; // Chosen option + real delta; // Difference between two options + real pe; // Prediction error + real alpha; + vector[6] ev; // Expected values + + ev = initial_values; + + // Acquisition Phase + for (t in 1:Tsubj[i]) { + co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; + + // Luce choice rule + delta = ev[option1[i, t]] - ev[option2[i, t]]; + target += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); + + pe = reward[i, t] - ev[co]; + alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; + ev[co] += alpha * pe; + } + } +} + +generated quantities { + // For group-level parameters + real mu_alpha_pos; + real mu_alpha_neg; + real mu_beta; + + // For log-likelihood calculation + real log_lik[N]; + + mu_alpha_pos = Phi_approx(mu_pr[1]); + mu_alpha_neg = Phi_approx(mu_pr[2]); + mu_beta = Phi_approx(mu_pr[3]) * 10; + + { + for (i in 1:N) { + int co; // Chosen option + real delta; // Difference between two options + real pe; // Prediction error + real alpha; + vector[6] ev; // Expected values + + ev = initial_values; + log_lik[i] = 0; + + // Acquisition Phase + for (t in 1:Tsubj[i]) { + co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; + + // Luce choice rule + delta = ev[option1[i, t]] - ev[option2[i, t]]; + log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); + + pe = reward[i, t] - ev[co]; + alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; + ev[co] += alpha * pe; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/ra_noLA.stan b/Python/hbayesdm/common/stan_files/ra_noLA.stan new file mode 100644 index 00000000..c5c599c4 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ra_noLA.stan @@ -0,0 +1,95 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int gamble[N, T]; +} + +transformed data { +} + +parameters { + vector[2] mu_pr; + vector[2] sigma; + vector[N] rho_pr; + vector[N] tau_pr; +} + +transformed parameters { + vector[N] rho; + vector[N] tau; + + for (i in 1:N) { + rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; + } +} + +model { + // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + rho_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + gamble[i, t] ~ bernoulli(pGamble); + } + } +} +generated quantities { + real mu_rho; + real mu_tau; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_rho = Phi_approx(mu_pr[1]) * 2; + mu_tau = Phi_approx(mu_pr[2]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + // loss[i, t]=absolute amount of loss (pre-converted in R) + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGamble); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/ra_noRA.stan b/Python/hbayesdm/common/stan_files/ra_noRA.stan new file mode 100644 index 00000000..0f36c3be --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ra_noRA.stan @@ -0,0 +1,95 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int gamble[N, T]; +} + +transformed data { +} + +parameters { + vector[2] mu_pr; + vector[2] sigma; + vector[N] lambda_pr; + vector[N] tau_pr; +} + +transformed parameters { + vector[N] lambda; + vector[N] tau; + + for (i in 1:N) { + lambda[i] = Phi_approx(mu_pr[1] + sigma[1] * lambda_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; + } +} + +model { + // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + lambda_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + // loss[i, t]=absolute amount of loss (pre-converted in R) + evSafe = cert[i, t]; + evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + gamble[i, t] ~ bernoulli(pGamble); + } + } +} +generated quantities { + real mu_lambda; + real mu_tau; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_lambda = Phi_approx(mu_pr[1]) * 5; + mu_tau = Phi_approx(mu_pr[2]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + evSafe = cert[i, t]; + evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGamble); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/ra_prospect.stan b/Python/hbayesdm/common/stan_files/ra_prospect.stan new file mode 100644 index 00000000..542ea460 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ra_prospect.stan @@ -0,0 +1,97 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int gamble[N, T]; +} +transformed data { +} +parameters { + vector[3] mu_pr; + vector[3] sigma; + vector[N] rho_pr; + vector[N] lambda_pr; + vector[N] tau_pr; +} +transformed parameters { + vector[N] rho; + vector[N] lambda; + vector[N] tau; + + for (i in 1:N) { + rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; + lambda[i] = Phi_approx(mu_pr[2] + sigma[2] * lambda_pr[i]) * 5; + tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 30; + } +} +model { + // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS + // hyper parameters + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + rho_pr ~ normal(0, 1.0); + lambda_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + // loss[i, t]=absolute amount of loss (pre-converted in R) + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(loss[i, t], rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + gamble[i, t] ~ bernoulli(pGamble); + } + } +} +generated quantities { + real mu_rho; + real mu_lambda; + real mu_tau; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_rho = Phi_approx(mu_pr[1]) * 2; + mu_lambda = Phi_approx(mu_pr[2]) * 5; + mu_tau = Phi_approx(mu_pr[3]) * 30; + + { // local section, this saves time and space + for (i in 1:N) { + log_lik[i] = 0; + for (t in 1:Tsubj[i]) { + real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. + real evGamble; // they are left as arrays as an example for RL models. + real pGamble; + + evSafe = pow(cert[i, t], rho[i]); + evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(fabs(loss[i, t]), rho[i])); + pGamble = inv_logit(tau[i] * (evGamble - evSafe)); + log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(pGamble); + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/rdt_happiness.stan b/Python/hbayesdm/common/stan_files/rdt_happiness.stan new file mode 100644 index 00000000..3abb9e18 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/rdt_happiness.stan @@ -0,0 +1,146 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real gain[N, T]; + real loss[N, T]; // absolute loss amount + real cert[N, T]; + int type[N, T]; + int gamble[N, T]; + real outcome[N, T]; + real happy[N, T]; + real RT_happy[N, T]; +} +transformed data { +} +parameters { + vector[6] mu_pr; + vector[6] sigma; + vector[N] w0_pr; + vector[N] w1_pr; + vector[N] w2_pr; + vector[N] w3_pr; + vector[N] gam_pr; + vector[N] sig_pr; +} +transformed parameters { + vector[N] w0; + vector[N] w1; + vector[N] w2; + vector[N] w3; + vector[N] gam; + vector[N] sig; + + w0 = mu_pr[1] + sigma[1] * w0_pr; + w1 = mu_pr[2] + sigma[2] * w1_pr; + w2 = mu_pr[3] + sigma[3] * w2_pr; + w3 = mu_pr[4] + sigma[4] * w3_pr; + + for (i in 1:N) { + gam[i] = Phi_approx(mu_pr[5] + sigma[5] * gam_pr[i]); + } + sig = exp(mu_pr[6] + sigma[6] * sig_pr); +} +model { + mu_pr ~ normal(0, 1.0); + sigma ~ normal(0, 0.2); + + // individual parameters w/ Matt trick + w0_pr ~ normal(0, 1.0); + w1_pr ~ normal(0, 1.0); + w2_pr ~ normal(0, 1.0); + w3_pr ~ normal(0, 1.0); + gam_pr ~ normal(0, 1.0); + sig_pr ~ normal(0, 1.0); + + for (i in 1:N) { + real cert_sum; + real ev_sum; + real rpe_sum; + + + cert_sum = 0; + ev_sum = 0; + rpe_sum = 0; + + for (t in 1:Tsubj[i]) { + if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ + happy[i,t] ~ normal(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); + } + + if(gamble[i,t] == 0){ + cert_sum += type[i,t] * cert[i,t]; + } else { + ev_sum += 0.5 * (gain[i,t] - loss[i,t]); + rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); + } + + cert_sum *= gam[i]; + ev_sum *= gam[i]; + rpe_sum *= gam[i]; + } + } +} +generated quantities { + real mu_w0; + real mu_w1; + real mu_w2; + real mu_w3; + real mu_gam; + real mu_sig; + + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_w0 = mu_pr[1]; + mu_w1 = mu_pr[2]; + mu_w2 = mu_pr[3]; + mu_w3 = mu_pr[4]; + mu_gam = Phi_approx(mu_pr[5]); + mu_sig = exp(mu_pr[6]); + + + { // local section, this saves time and space + for (i in 1:N) { + real cert_sum; + real ev_sum; + real rpe_sum; + + log_lik[i] = 0; + + cert_sum = 0; + ev_sum = 0; + rpe_sum = 0; + + for (t in 1:Tsubj[i]) { + if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ + log_lik[i] += normal_lpdf(happy[i, t] | w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); + y_pred[i, t] = normal_rng(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); + } + + if(gamble[i,t] == 0){ + cert_sum += type[i,t] * cert[i,t]; + } else { + ev_sum += 0.5 * (gain[i,t] - loss[i,t]); + rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); + } + + cert_sum *= gam[i]; + ev_sum *= gam[i]; + rpe_sum *= gam[i]; + } + } + } +} + diff --git a/Python/hbayesdm/common/stan_files/ts_par4.stan b/Python/hbayesdm/common/stan_files/ts_par4.stan new file mode 100644 index 00000000..c615f6d0 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ts_par4.stan @@ -0,0 +1,204 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int level1_choice[N,T]; // 1: left, 2: right + int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 + int reward[N,T]; + real trans_prob; +} +transformed data { +} +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[4] mu_pr; + vector[4] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] a_pr; + vector[N] beta_pr; + vector[N] pi_pr; + vector[N] w_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] a; + vector[N] beta; + vector[N] pi; + vector[N] w; + + for (i in 1:N) { + a[i] = Phi_approx( mu_pr[1] + sigma[1] * a_pr[i] ); + beta[i] = exp( mu_pr[2] + sigma[2] * beta_pr[i] ); + pi[i] = Phi_approx( mu_pr[3] + sigma[3] * pi_pr[i] ) * 5; + w[i] = Phi_approx( mu_pr[4] + sigma[4] * w_pr[i] ); + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + a_pr ~ normal(0, 1); + beta_pr ~ normal(0, 1); + pi_pr ~ normal(0, 1); + w_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); + } + level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_a; + real mu_beta; + real mu_pi; + real mu_w; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred_step1[N,T]; + real y_pred_step2[N,T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred_step1[i,t] = -1; + y_pred_step2[i,t] = -1; + } + } + + // Generate group level parameter values + mu_a = Phi_approx( mu_pr[1] ); + mu_beta = exp( mu_pr[2] ); + mu_pi = Phi_approx( mu_pr[3] ) * 5; + mu_w = Phi_approx( mu_pr[4] ); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 + // Level 2 --> choose one of two level 2 options + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); + } + log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); + + // generate posterior prediction for current trial + y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); + y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop + } +} + diff --git a/Python/hbayesdm/common/stan_files/ts_par6.stan b/Python/hbayesdm/common/stan_files/ts_par6.stan new file mode 100644 index 00000000..b472afa0 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ts_par6.stan @@ -0,0 +1,213 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int level1_choice[N,T]; // 1: left, 2: right + int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 + int reward[N,T]; + real trans_prob; +} +transformed data { +} +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[6] mu_pr; + vector[6] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] a1_pr; + vector[N] beta1_pr; + vector[N] a2_pr; + vector[N] beta2_pr; + vector[N] pi_pr; + vector[N] w_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] a1; + vector[N] beta1; + vector[N] a2; + vector[N] beta2; + vector[N] pi; + vector[N] w; + + for (i in 1:N) { + a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); + beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); + a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); + beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); + pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; + w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + a1_pr ~ normal(0, 1); + beta1_pr ~ normal(0, 1); + a2_pr ~ normal(0, 1); + beta2_pr ~ normal(0, 1); + pi_pr ~ normal(0, 1); + w_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_a1; + real mu_beta1; + real mu_a2; + real mu_beta2; + real mu_pi; + real mu_w; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred_step1[N,T]; + real y_pred_step2[N,T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred_step1[i,t] = -1; + y_pred_step2[i,t] = -1; + } + } + + // Generate group level parameter values + mu_a1 = Phi_approx( mu_pr[1] ); + mu_beta1 = exp( mu_pr[2] ); + mu_a2 = Phi_approx( mu_pr[3] ); + mu_beta2 = exp( mu_pr[4] ); + mu_pi = Phi_approx( mu_pr[5] ) * 5; + mu_w = Phi_approx( mu_pr[6] ); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 + // Level 2 --> choose one of two level 2 options + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); + + // generate posterior prediction for current trial + y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); + y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + + } // end of t loop + } // end of i loop + } +} + diff --git a/Python/hbayesdm/common/stan_files/ts_par7.stan b/Python/hbayesdm/common/stan_files/ts_par7.stan new file mode 100644 index 00000000..089042c2 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ts_par7.stan @@ -0,0 +1,217 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + int level1_choice[N,T]; // 1: left, 2: right + int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 + int reward[N,T]; + real trans_prob; +} +transformed data { +} +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[7] mu_pr; + vector[7] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] a1_pr; + vector[N] beta1_pr; + vector[N] a2_pr; + vector[N] beta2_pr; + vector[N] pi_pr; + vector[N] w_pr; + vector[N] lambda_pr; +} +transformed parameters { + // Transform subject-level raw parameters + vector[N] a1; + vector[N] beta1; + vector[N] a2; + vector[N] beta2; + vector[N] pi; + vector[N] w; + vector[N] lambda; + + for (i in 1:N) { + a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); + beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); + a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); + beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); + pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; + w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); + lambda[i] = Phi_approx( mu_pr[7] + sigma[7] * lambda_pr[i] ); + } +} +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + a1_pr ~ normal(0, 1); + beta1_pr ~ normal(0, 1); + a2_pr ~ normal(0, 1); + beta2_pr ~ normal(0, 1); + pi_pr ~ normal(0, 1); + w_pr ~ normal(0, 1); + lambda_pr ~ normal(0, 1); + + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_a1; + real mu_beta1; + real mu_a2; + real mu_beta2; + real mu_pi; + real mu_w; + real mu_lambda; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred_step1[N,T]; + real y_pred_step2[N,T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred_step1[i,t] = -1; + y_pred_step2[i,t] = -1; + } + } + + // Generate group level parameter values + mu_a1 = Phi_approx( mu_pr[1] ); + mu_beta1 = exp( mu_pr[2] ); + mu_a2 = Phi_approx( mu_pr[3] ); + mu_beta2 = exp( mu_pr[4] ); + mu_pi = Phi_approx( mu_pr[5] ) * 5; + mu_w = Phi_approx( mu_pr[6] ); + mu_lambda = Phi_approx( mu_pr[7] ); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) + vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) + vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) + real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 + real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 + int level1_choice_01; + int level2_choice_01; + + // Initialize values + v_mb = rep_vector(0.0, 2); + v_mf = rep_vector(0.0, 6); + v_hybrid = rep_vector(0.0, 2); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + // compute v_mb + v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 + v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 + + // compute v_hybrid + v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum + v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum + + // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial + // level1_choice=1 --> -1, level1_choice=2 --> 1 + level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 + if(t == 1){ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); + } else{ + level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); + } + log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); + + // Observe Level2 and update Level1 of the chosen option + v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); + + // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** + level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 + // Level 2 --> choose one of two level 2 options + if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); + } else { // level2_choice = 1 or 2 + level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); + } + log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); + + // generate posterior prediction for current trial + y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); + y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); + + // After observing the reward at Level 2... + // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward + v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); + + // Update Level 1 v_mf + v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); + } // end of t loop + } // end of i loop + } +} + diff --git a/Python/hbayesdm/common/stan_files/ug_bayes.stan b/Python/hbayesdm/common/stan_files/ug_bayes.stan new file mode 100644 index 00000000..6136e708 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ug_bayes.stan @@ -0,0 +1,167 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real offer[N, T]; + int accept[N, T]; +} + +transformed data { + real initV; + real mu0; + real k0; + real sig20; + real nu0; + + initV = 0.0; + mu0 = 10.0; // initial expectation + k0 = 4.0; + sig20 = 4.0; + nu0 = 10.0; +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // alpha: envy + vector[N] beta_pr; // beta: guilt + vector[N] tau_pr; // tau: inverse temperature +} + +transformed parameters { + // Transform subject-level raw parameters + real alpha[N]; + real beta[N]; + real tau[N]; + + for (i in 1:N) { + alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; + beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; + tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 10; + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + alpha_pr ~ normal(0, 1.0); + beta_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + real util; + real mu_old; + real mu_new; + real k_old; + real k_new; + real sig2_old; + real sig2_new; + real nu_old; + real nu_new; + real PE; // not required for computation + + // Initialize values + mu_old = mu0; + k_old = k0; + sig2_old = sig20; + nu_old = nu0; + + for (t in 1:Tsubj[i]) { + k_new = k_old + 1; + nu_new = nu_old + 1; + mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; + sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); + + PE = offer[i, t] - mu_old; + util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); + + accept[i, t] ~ bernoulli_logit(util * tau[i]); + + // replace old ones with new ones + mu_old = mu_new; + sig2_old = sig2_new; + k_old = k_new; + nu_old = nu_new; + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_beta; + real mu_tau; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 20; + mu_beta = Phi_approx(mu_pr[2]) * 10; + mu_tau = Phi_approx(mu_pr[3]) * 10; + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real util; + real mu_old; + real mu_new; + real k_old; + real k_new; + real sig2_old; + real sig2_new; + real nu_old; + real nu_new; + real PE; // not required for computation + + // Initialize values + mu_old = mu0; + k_old = k0; + sig2_old = sig20; + nu_old = nu0; + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + k_new = k_old + 1; + nu_new = nu_old + 1; + mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; + sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); + + PE = offer[i, t] - mu_old; + util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); + + log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); + + // replace old ones with new ones + mu_old = mu_new; + sig2_old = sig2_new; + k_old = k_new; + nu_old = nu_new; + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/Python/hbayesdm/common/stan_files/ug_delta.stan b/Python/hbayesdm/common/stan_files/ug_delta.stan new file mode 100644 index 00000000..9bb70e0a --- /dev/null +++ b/Python/hbayesdm/common/stan_files/ug_delta.stan @@ -0,0 +1,129 @@ +#include /pre/license.stan + +data { + int N; + int T; + int Tsubj[N]; + real offer[N, T]; + int accept[N, T]; +} + +transformed data { +} + +parameters { +// Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[3] mu_pr; + vector[3] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; // alpha: Envy (sensitivity to norm prediction error) + vector[N] tau_pr; // tau: Inverse temperature + vector[N] ep_pr; // ep: Norm adaptation rate +} + +transformed parameters { + // Transform subject-level raw parameters + real alpha[N]; + real tau[N]; + real ep[N]; + + for (i in 1:N) { + alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; + tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 10; + ep[i] = Phi_approx(mu_pr[3] + sigma[3] * ep_pr[i]); + } +} + +model { + // Hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + alpha_pr ~ normal(0, 1.0); + tau_pr ~ normal(0, 1.0); + ep_pr ~ normal(0, 1.0); + + for (i in 1:N) { + // Define values + real f; // Internal norm + real PE; // Prediction error + real util; // Utility of offer + + // Initialize values + f = 10.0; + + for (t in 1:Tsubj[i]) { + // calculate prediction error + PE = offer[i, t] - f; + + // Update utility + util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); + + // Sampling statement + accept[i, t] ~ bernoulli_logit(util * tau[i]); + + // Update internal norm + f += ep[i] * PE; + + } // end of t loop + } // end of i loop +} + +generated quantities { + // For group level parameters + real mu_alpha; + real mu_tau; + real mu_ep; + + // For log likelihood calculation + real log_lik[N]; + + // For posterior predictive check + real y_pred[N, T]; + + // Set all posterior predictions to 0 (avoids NULL values) + for (i in 1:N) { + for (t in 1:T) { + y_pred[i, t] = -1; + } + } + + mu_alpha = Phi_approx(mu_pr[1]) * 20; + mu_tau = Phi_approx(mu_pr[2]) * 10; + mu_ep = Phi_approx(mu_pr[3]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define values + real f; // Internal norm + real PE; // prediction error + real util; // Utility of offer + + // Initialize values + f = 10.0; + log_lik[i] = 0.0; + + for (t in 1:Tsubj[i]) { + // calculate prediction error + PE = offer[i, t] - f; + + // Update utility + util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); + + // Calculate log likelihood + log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); + + // generate posterior prediction for current trial + y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); + + // Update internal norm + f += ep[i] * PE; + + } // end of t loop + } // end of i loop + } // end of local section +} + diff --git a/Python/hbayesdm/common/stan_files/wcs_sql.stan b/Python/hbayesdm/common/stan_files/wcs_sql.stan new file mode 100644 index 00000000..81b8ce17 --- /dev/null +++ b/Python/hbayesdm/common/stan_files/wcs_sql.stan @@ -0,0 +1,176 @@ +#include /pre/license.stan + +data { + int N; // number of subjects + int T; // max trial + int Tsubj[N]; // number of max trials per subject + + int choice[N, 4, T]; // subject's deck choice within a trial (1, 2, 3 and 4) + int outcome[N, T]; // whether subject's choice is correct or not within a trial (1 and 0) + matrix[1, 3] choice_match_att[N, T]; // indicates which dimension the chosen card matches to within a trial + matrix[3, 4] deck_match_rule[T]; // indicates which dimension(color, form, number) each of the 4 decks matches to within a trial +} + +transformed data { + matrix[1, 3] initAtt; // each subject start with an even attention to each dimension + matrix[1, 3] unit; // used to flip attention after punishing feedback inside the model + + initAtt = rep_matrix(1.0/3.0, 1, 3); + unit = rep_matrix(1.0, 1, 3); +} + +parameters { + // hyper parameters + vector[3] mu_pr; + vector[3] sigma; + + // subject-level raw parameters (for Matt trick) + vector[N] r_pr; // sensitivity to rewarding feedback (reward learning rate) + vector[N] p_pr; // sensitivity to punishing feedback (punishment learning rate) + vector[N] d_pr; // decision consistency (inverse temperature) +} + +transformed parameters { + // transform subject-level raw parameters + vector[N] r; + vector[N] p; + vector[N] d; + + for (i in 1:N) { + r[i] = Phi_approx( mu_pr[1] + sigma[1] * r_pr[i] ); + p[i] = Phi_approx( mu_pr[2] + sigma[2] * p_pr[i] ); + d[i] = Phi_approx( mu_pr[3] + sigma[3] * d_pr[i] ) * 5; + } +} + +model { + // hyperparameters + mu_pr ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // individual parameters + r_pr ~ normal(0, 1); + p_pr ~ normal(0, 1); + d_pr ~ normal(0, 1); + + for (i in 1:N) { + // define values + vector[4] pred_prob_mat; // predicted probability of choosing a deck in each trial based on attention + matrix[1, 3] subj_att; // subject's attention to each dimension + matrix[1, 3] att_signal; // signal where a subject has to pay attention after reward/punishment + real sum_att_signal; // temporary variable to calculate sum(att_signal) + matrix[1, 3] tmpatt; // temporary variable to calculate subj_att + vector[4] tmpp; // temporary variable to calculate pred_prob_mat + + // initiate values + subj_att = initAtt; + pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); + + for (t in 1:Tsubj[i]) { + // multinomial choice + choice[i,,t] ~ multinomial(pred_prob_mat); + + // re-distribute attention after getting a feedback + if (outcome[i,t] == 1) { + att_signal = subj_att .* choice_match_att[i,t]; + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; + } else { + att_signal = subj_att .* (unit - choice_match_att[i,t]); + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; + } + + // scaling to avoid log(0) + subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; + + tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); + tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); + tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); + + // repeat until the final trial + if (t < Tsubj[i]) { + tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; + pred_prob_mat = tmpp/sum(tmpp); + } + + } // end of trial loop + } // end of subject loop +} +generated quantities { + // for group level parameters + real mu_r; + real mu_p; + real mu_d; + + // for log-likelihood calculation + real log_lik[N]; + + // for posterior predictive check + int y_pred[N, 4, T]; + + // initiate the variable to avoid NULL values + for (i in 1:N) { + for (t in 1:T) { + for (deck in 1:4) { + y_pred[i,deck,t] = -1; + } + } + } + + mu_r = Phi_approx(mu_pr[1]); + mu_p = Phi_approx(mu_pr[2]); + mu_d = Phi_approx(mu_pr[3]) * 5; + + { // local section, this saves time and space + for (i in 1:N) { + matrix[1, 3] subj_att; + matrix[1, 3] att_signal; + vector[4] pred_prob_mat; + + matrix[1, 3] tmpatt; + vector[4] tmpp; + + real sum_att_signal; + + subj_att = initAtt; + pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); + + log_lik[i] = 0; + + for (t in 1:Tsubj[i]) { + + log_lik[i] += multinomial_lpmf(choice[i,,t] | pred_prob_mat); + + y_pred[i,,t] = multinomial_rng(pred_prob_mat, 1); + + if(outcome[i,t] == 1) { + att_signal = subj_att .* choice_match_att[i,t]; + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; + } else { + att_signal = subj_att .* (unit - choice_match_att[i,t]); + sum_att_signal = sum(att_signal); + att_signal /= sum_att_signal; + tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; + } + + subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; + + tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); + tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); + tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); + + if(t < Tsubj[i]) { + tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; + pred_prob_mat = tmpp/sum(tmpp); + } + + } // end of trial loop + } // end of subject loop + } // end of local section +} + diff --git a/Python/hbayesdm/diagnostics.py b/Python/hbayesdm/diagnostics.py new file mode 100644 index 00000000..4897a9a6 --- /dev/null +++ b/Python/hbayesdm/diagnostics.py @@ -0,0 +1,136 @@ +from typing import List, Dict, Sequence, Union + +import numpy as np +import pandas as pd +import matplotlib.pyplot as plt +import arviz as az + +from hbayesdm.base import TaskModel + +__all__ = ['rhat', 'print_fit', 'hdi', 'plot_hdi'] + + +def rhat(model_data: TaskModel, + less: float = None) -> Dict[str, Union[List, bool]]: + """Function for extracting Rhat values from hbayesdm output. + + Convenience function for extracting Rhat values from hbayesdm output. + Also possible to check if all Rhat values are less than a specified value. + + Parameters + ---------- + model_data + Output instance of running an hbayesdm model function. + less + [Optional] Upper-bound value to compare extracted Rhat values to. + + Returns + ------- + Dict + Keys are names of the parameters; values are their Rhat values. + Or if `less` was specified, the dictionary values will hold `True` if + all Rhat values (of that parameter) are less than or equal to `less`. + """ + rhat_data = az.rhat(model_data.fit) + if less is None: + return {v.name: v.values.tolist() + for v in rhat_data.data_vars.values()} + else: + return {v.name: v.values.item() + for v in (rhat_data.max() <= less).data_vars.values()} + + +def print_fit(*args: TaskModel, ic: str = 'loo') -> pd.DataFrame: + """Print model-fits (mean LOOIC or WAIC values) of hbayesdm models. + + Parameters + ---------- + args + Output instances of running hbayesdm model functions. + ic + Information criterion (defaults to 'loo'). + + Returns + ------- + pd.DataFrame + Model-fit info per each hbayesdm output given as argument(s). + """ + ic_options = ('loo', 'waic') + if ic not in ic_options: + raise RuntimeError( + 'Information Criterion (ic) must be one of ' + repr(ic_options)) + dataset_dict = { + model_data.model: + az.from_pystan(model_data.fit, log_likelihood='log_lik') + for model_data in args + } + return az.compare(dataset_dict=dataset_dict, ic=ic) + + +def hdi(x: np.ndarray, credible_interval: float = 0.94) -> np.ndarray: + """Calculate highest density interval (HDI). + + This function acts as an alias to `arviz.hpd` function. + + Parameters + ---------- + x + Array containing MCMC samples. + credible_interval + Credible interval to compute. Defaults to 0.94. + + Returns + ------- + np.ndarray + Array containing the lower and upper value of the computed interval. + """ + return az.hpd(x, credible_interval=credible_interval) + + +def plot_hdi(x: np.ndarray, + credible_interval: float = 0.94, + title: str = None, + xlabel: str = 'Value', + ylabel: str = 'Density', + point_estimate: str = None, + bins: Union[int, Sequence, str] = 'auto', + round_to: int = 2, + **kwargs): + """Plot highest density interval (HDI). + + This function redirects input to `arviz.plot_posterior` function. + + Parameters + ---------- + x + Array containing MCMC samples. + credible_interval + Credible interval to plot. Defaults to 0.94. + title + String to set as title of plot. + xlabel + String to set as the x-axis label. + ylabel + String to set as the y-axis label. + point_estimate + Defaults to None. Possible options are 'mean', 'median', 'mode'. + bins + Controls the number of bins. Defaults to 'auto'. + Accepts the same values (or keywords) as plt.hist() does. + round_to + Controls formatting for floating point numbers. Defaults to 2. + **kwargs + Passed as-is to plt.hist(). + """ + kwargs.setdefault('color', 'black') + ax = az.plot_posterior(x, + kind='hist', + credible_interval=credible_interval, + point_estimate=point_estimate, + bins=bins, + round_to=round_to, + **kwargs).item() + ax.set_title(title) + ax.set_xlabel(xlabel) + ax.set_ylabel(ylabel) + plt.show() diff --git a/Python/hbayesdm/models/__init__.py b/Python/hbayesdm/models/__init__.py new file mode 100644 index 00000000..6778b127 --- /dev/null +++ b/Python/hbayesdm/models/__init__.py @@ -0,0 +1,95 @@ +from hbayesdm.models._bandit2arm_delta import bandit2arm_delta +from hbayesdm.models._bandit4arm2_kalman_filter import bandit4arm2_kalman_filter +from hbayesdm.models._bandit4arm_2par_lapse import bandit4arm_2par_lapse +from hbayesdm.models._bandit4arm_4par import bandit4arm_4par +from hbayesdm.models._bandit4arm_lapse import bandit4arm_lapse +from hbayesdm.models._bandit4arm_lapse_decay import bandit4arm_lapse_decay +from hbayesdm.models._bandit4arm_singleA_lapse import bandit4arm_singleA_lapse +from hbayesdm.models._bart_par4 import bart_par4 +from hbayesdm.models._choiceRT_ddm import choiceRT_ddm +from hbayesdm.models._choiceRT_ddm_single import choiceRT_ddm_single +from hbayesdm.models._cra_exp import cra_exp +from hbayesdm.models._cra_linear import cra_linear +from hbayesdm.models._dbdm_prob_weight import dbdm_prob_weight +from hbayesdm.models._dd_cs import dd_cs +from hbayesdm.models._dd_cs_single import dd_cs_single +from hbayesdm.models._dd_exp import dd_exp +from hbayesdm.models._dd_hyperbolic import dd_hyperbolic +from hbayesdm.models._dd_hyperbolic_single import dd_hyperbolic_single +from hbayesdm.models._gng_m1 import gng_m1 +from hbayesdm.models._gng_m2 import gng_m2 +from hbayesdm.models._gng_m3 import gng_m3 +from hbayesdm.models._gng_m4 import gng_m4 +from hbayesdm.models._igt_orl import igt_orl +from hbayesdm.models._igt_pvl_decay import igt_pvl_decay +from hbayesdm.models._igt_pvl_delta import igt_pvl_delta +from hbayesdm.models._igt_vpp import igt_vpp +from hbayesdm.models._peer_ocu import peer_ocu +from hbayesdm.models._prl_ewa import prl_ewa +from hbayesdm.models._prl_fictitious import prl_fictitious +from hbayesdm.models._prl_fictitious_multipleB import prl_fictitious_multipleB +from hbayesdm.models._prl_fictitious_rp import prl_fictitious_rp +from hbayesdm.models._prl_fictitious_rp_woa import prl_fictitious_rp_woa +from hbayesdm.models._prl_fictitious_woa import prl_fictitious_woa +from hbayesdm.models._prl_rp import prl_rp +from hbayesdm.models._prl_rp_multipleB import prl_rp_multipleB +from hbayesdm.models._pst_gainloss_Q import pst_gainloss_Q +from hbayesdm.models._ra_noLA import ra_noLA +from hbayesdm.models._ra_noRA import ra_noRA +from hbayesdm.models._ra_prospect import ra_prospect +from hbayesdm.models._rdt_happiness import rdt_happiness +from hbayesdm.models._ts_par4 import ts_par4 +from hbayesdm.models._ts_par6 import ts_par6 +from hbayesdm.models._ts_par7 import ts_par7 +from hbayesdm.models._ug_bayes import ug_bayes +from hbayesdm.models._ug_delta import ug_delta +from hbayesdm.models._wcs_sql import wcs_sql + +__all__ = [ + 'bandit2arm_delta', + 'bandit4arm2_kalman_filter', + 'bandit4arm_2par_lapse', + 'bandit4arm_4par', + 'bandit4arm_lapse', + 'bandit4arm_lapse_decay', + 'bandit4arm_singleA_lapse', + 'bart_par4', + 'choiceRT_ddm', + 'choiceRT_ddm_single', + 'cra_exp', + 'cra_linear', + 'dbdm_prob_weight', + 'dd_cs', + 'dd_cs_single', + 'dd_exp', + 'dd_hyperbolic', + 'dd_hyperbolic_single', + 'gng_m1', + 'gng_m2', + 'gng_m3', + 'gng_m4', + 'igt_orl', + 'igt_pvl_decay', + 'igt_pvl_delta', + 'igt_vpp', + 'peer_ocu', + 'prl_ewa', + 'prl_fictitious', + 'prl_fictitious_multipleB', + 'prl_fictitious_rp', + 'prl_fictitious_rp_woa', + 'prl_fictitious_woa', + 'prl_rp', + 'prl_rp_multipleB', + 'pst_gainloss_Q', + 'ra_noLA', + 'ra_noRA', + 'ra_prospect', + 'rdt_happiness', + 'ts_par4', + 'ts_par6', + 'ts_par7', + 'ug_bayes', + 'ug_delta', + 'wcs_sql', +] diff --git a/Python/hbayesdm/models/_bandit2arm_delta.py b/Python/hbayesdm/models/_bandit2arm_delta.py new file mode 100644 index 00000000..d35ae2cf --- /dev/null +++ b/Python/hbayesdm/models/_bandit2arm_delta.py @@ -0,0 +1,242 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit2arm_preprocess_func + +__all__ = ['bandit2arm_delta'] + + +class Bandit2ArmDelta(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit2arm', + model_name='delta', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('A', (0, 0.5, 1)), + ('tau', (0, 1, 5)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('A', 'learning rate'), + ('tau', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit2arm_preprocess_func + + +def bandit2arm_delta( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """2-Armed Bandit Task - Rescorla-Wagner (Delta) Model + + Hierarchical Bayesian Modeling of the 2-Armed Bandit Task [Erev2010]_, [Hertwig2004]_ + using Rescorla-Wagner (Delta) Model with the following parameters: + "A" (learning rate), "tau" (inverse temperature). + + + + .. [Erev2010] Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683 + .. [Hertwig2004] Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x + + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 2-Armed Bandit Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1 or 2. + - "outcome": Integer value representing the outcome of the given trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit2arm_delta'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit2arm_delta(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit2ArmDelta( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py new file mode 100644 index 00000000..095d79f2 --- /dev/null +++ b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit4arm2_preprocess_func + +__all__ = ['bandit4arm2_kalman_filter'] + + +class Bandit4Arm2KalmanFilter(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit4arm2', + model_name='kalman_filter', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('lambda', (0, 0.9, 1)), + ('theta', (0, 50, 100)), + ('beta', (0, 0.1, 1)), + ('mu0', (0, 85, 100)), + ('sigma0', (0, 6, 15)), + ('sigmaD', (0, 3, 15)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('lambda', 'decay factor'), + ('theta', 'decay center'), + ('beta', 'inverse softmax temperature'), + ('mu0', 'anticipated initial mean of all 4 options'), + ('sigma0', 'anticipated initial sd (uncertainty factor) of all 4 options'), + ('sigmaD', 'sd of diffusion noise'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit4arm2_preprocess_func + + +def bandit4arm2_kalman_filter( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """4-Armed Bandit Task (modified) - Kalman Filter + + Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) + using Kalman Filter [Daw2006]_ with the following parameters: + "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise). + + + + + .. [Daw2006] Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879. + + .. codeauthor:: Yoonseo Zoh + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 4-Armed Bandit Task (modified), there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1, 2, 3, or 4. + - "outcome": Integer value representing the outcome of the given trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit4arm2_kalman_filter'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit4arm2_kalman_filter(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit4Arm2KalmanFilter( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py new file mode 100644 index 00000000..7ab47599 --- /dev/null +++ b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py @@ -0,0 +1,245 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit4arm_preprocess_func + +__all__ = ['bandit4arm_2par_lapse'] + + +class Bandit4Arm2ParLapse(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit4arm', + model_name='2par_lapse', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('Arew', (0, 0.1, 1)), + ('Apun', (0, 0.1, 1)), + ('xi', (0, 0.1, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Arew', 'reward learning rate'), + ('Apun', 'punishment learning rate'), + ('xi', 'noise'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit4arm_preprocess_func + + +def bandit4arm_2par_lapse( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """4-Armed Bandit Task - 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) + + Hierarchical Bayesian Modeling of the 4-Armed Bandit Task + using 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) [Aylward2018]_ with the following parameters: + "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise). + + + + + .. [Aylward2018] Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 4-Armed Bandit Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1, 2, 3, or 4. + - "gain": Floating point value representing the amount of currency won on the given trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit4arm_2par_lapse'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit4arm_2par_lapse(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit4Arm2ParLapse( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bandit4arm_4par.py b/Python/hbayesdm/models/_bandit4arm_4par.py new file mode 100644 index 00000000..fba9226e --- /dev/null +++ b/Python/hbayesdm/models/_bandit4arm_4par.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit4arm_preprocess_func + +__all__ = ['bandit4arm_4par'] + + +class Bandit4Arm4Par(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit4arm', + model_name='4par', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('Arew', (0, 0.1, 1)), + ('Apun', (0, 0.1, 1)), + ('R', (0, 1, 30)), + ('P', (0, 1, 30)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Arew', 'reward learning rate'), + ('Apun', 'punishment learning rate'), + ('R', 'reward sensitivity'), + ('P', 'punishment sensitivity'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit4arm_preprocess_func + + +def bandit4arm_4par( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """4-Armed Bandit Task - 4 Parameter Model, without C (choice perseveration) + + Hierarchical Bayesian Modeling of the 4-Armed Bandit Task + using 4 Parameter Model, without C (choice perseveration) [Seymour2012]_ with the following parameters: + "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity). + + + + + .. [Seymour2012] Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 4-Armed Bandit Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1, 2, 3, or 4. + - "gain": Floating point value representing the amount of currency won on the given trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit4arm_4par'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit4arm_4par(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit4Arm4Par( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bandit4arm_lapse.py b/Python/hbayesdm/models/_bandit4arm_lapse.py new file mode 100644 index 00000000..d6a155d2 --- /dev/null +++ b/Python/hbayesdm/models/_bandit4arm_lapse.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit4arm_preprocess_func + +__all__ = ['bandit4arm_lapse'] + + +class Bandit4ArmLapse(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit4arm', + model_name='lapse', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('Arew', (0, 0.1, 1)), + ('Apun', (0, 0.1, 1)), + ('R', (0, 1, 30)), + ('P', (0, 1, 30)), + ('xi', (0, 0.1, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Arew', 'reward learning rate'), + ('Apun', 'punishment learning rate'), + ('R', 'reward sensitivity'), + ('P', 'punishment sensitivity'), + ('xi', 'noise'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit4arm_preprocess_func + + +def bandit4arm_lapse( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """4-Armed Bandit Task - 5 Parameter Model, without C (choice perseveration) but with xi (noise) + + Hierarchical Bayesian Modeling of the 4-Armed Bandit Task + using 5 Parameter Model, without C (choice perseveration) but with xi (noise) [Seymour2012]_ with the following parameters: + "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise). + + + + + .. [Seymour2012] Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 4-Armed Bandit Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1, 2, 3, or 4. + - "gain": Floating point value representing the amount of currency won on the given trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit4arm_lapse'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit4arm_lapse(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit4ArmLapse( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py new file mode 100644 index 00000000..6864a806 --- /dev/null +++ b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py @@ -0,0 +1,251 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit4arm_preprocess_func + +__all__ = ['bandit4arm_lapse_decay'] + + +class Bandit4ArmLapseDecay(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit4arm', + model_name='lapse_decay', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('Arew', (0, 0.1, 1)), + ('Apun', (0, 0.1, 1)), + ('R', (0, 1, 30)), + ('P', (0, 1, 30)), + ('xi', (0, 0.1, 1)), + ('d', (0, 0.1, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Arew', 'reward learning rate'), + ('Apun', 'punishment learning rate'), + ('R', 'reward sensitivity'), + ('P', 'punishment sensitivity'), + ('xi', 'noise'), + ('d', 'decay rate'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit4arm_preprocess_func + + +def bandit4arm_lapse_decay( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """4-Armed Bandit Task - 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). + + Hierarchical Bayesian Modeling of the 4-Armed Bandit Task + using 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). [Aylward2018]_ with the following parameters: + "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate). + + + + + .. [Aylward2018] Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 4-Armed Bandit Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1, 2, 3, or 4. + - "gain": Floating point value representing the amount of currency won on the given trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit4arm_lapse_decay'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit4arm_lapse_decay(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit4ArmLapseDecay( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py new file mode 100644 index 00000000..6936562e --- /dev/null +++ b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bandit4arm_preprocess_func + +__all__ = ['bandit4arm_singleA_lapse'] + + +class Bandit4ArmSingleaLapse(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bandit4arm', + model_name='singleA_lapse', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('A', (0, 0.1, 1)), + ('R', (0, 1, 30)), + ('P', (0, 1, 30)), + ('xi', (0, 0.1, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('A', 'learning rate'), + ('R', 'reward sensitivity'), + ('P', 'punishment sensitivity'), + ('xi', 'noise'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bandit4arm_preprocess_func + + +def bandit4arm_singleA_lapse( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """4-Armed Bandit Task - 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. + + Hierarchical Bayesian Modeling of the 4-Armed Bandit Task + using 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. [Aylward2018]_ with the following parameters: + "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise). + + + + + .. [Aylward2018] Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the 4-Armed Bandit Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on the given trial: 1, 2, 3, or 4. + - "gain": Floating point value representing the amount of currency won on the given trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bandit4arm_singleA_lapse'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bandit4arm_singleA_lapse(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return Bandit4ArmSingleaLapse( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py new file mode 100644 index 00000000..b20ff029 --- /dev/null +++ b/Python/hbayesdm/models/_bart_par4.py @@ -0,0 +1,250 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import bart_preprocess_func + +__all__ = ['bart_par4'] + + +class BartPar4(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='bart', + model_name='par4', + model_type='', + data_columns=( + 'subjID', + 'pumps', + 'explosion', + ), + parameters=OrderedDict([ + ('phi', (0, 0.5, 1)), + ('eta', (0, 1, Inf)), + ('gam', (0, 1, Inf)), + ('tau', (0, 1, Inf)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('phi', 'prior belief of balloon not bursting'), + ('eta', 'updating rate'), + ('gam', 'risk-taking parameter'), + ('tau', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = bart_preprocess_func + + +def bart_par4( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Balloon Analogue Risk Task - Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters + + Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task [van_Ravenzwaaij2011]_ + using Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters with the following parameters: + "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature). + + + + .. [van_Ravenzwaaij2011] van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. + + + .. codeauthor:: Harhim Park + .. codeauthor:: Jaeyeong Yang + .. codeauthor:: Ayoung Lee + .. codeauthor:: Jeongbin Oh + .. codeauthor:: Jiyoon Lee + .. codeauthor:: Junha Jang + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Balloon Analogue Risk Task, there should be 3 columns of data + with the labels "subjID", "pumps", "explosion". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "pumps": The number of pumps. + - "explosion": 0: intact, 1: burst + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "pumps", "explosion". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "pumps", "explosion". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('bart_par4'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- bart_par4(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return BartPar4( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py new file mode 100644 index 00000000..c5ca2181 --- /dev/null +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -0,0 +1,251 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import choiceRT_preprocess_func + +__all__ = ['choiceRT_ddm'] + + +class ChoicertDdm(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='choiceRT', + model_name='ddm', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'RT', + ), + parameters=OrderedDict([ + ('alpha', (0, 0.5, Inf)), + ('beta', (0, 0.5, 1)), + ('delta', (0, 0.5, Inf)), + ('tau', (0, 0.15, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=[], + parameters_desc=OrderedDict([ + ('alpha', 'boundary separation'), + ('beta', 'bias'), + ('delta', 'drift rate'), + ('tau', 'non-decision time'), + ]), + additional_args_desc=OrderedDict([ + ('RTbound', 0.1), + ]), + **kwargs, + ) + + _preprocess_func = choiceRT_preprocess_func + + +def choiceRT_ddm( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Choice Reaction Time Task - Drift Diffusion Model + + Hierarchical Bayesian Modeling of the Choice Reaction Time Task + using Drift Diffusion Model [Ratcliff1978]_ with the following parameters: + "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time). + + .. note:: + Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. + + .. note:: + Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. + + + .. [Ratcliff1978] Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Choice Reaction Time Task, there should be 3 columns of data + with the labels "subjID", "choice", "RT". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2). + - "RT": Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "RT". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "RT". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + **(Currently not available.)** Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``RTbound``: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('choiceRT_ddm'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- choiceRT_ddm(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return ChoicertDdm( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py new file mode 100644 index 00000000..6a49139f --- /dev/null +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -0,0 +1,251 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import choiceRT_single_preprocess_func + +__all__ = ['choiceRT_ddm_single'] + + +class ChoicertDdmSingle(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='choiceRT', + model_name='ddm', + model_type='single', + data_columns=( + 'subjID', + 'choice', + 'RT', + ), + parameters=OrderedDict([ + ('alpha', (None, 0.5, None)), + ('beta', (None, 0.5, None)), + ('delta', (None, 0.5, None)), + ('tau', (None, 0.15, None)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=[], + parameters_desc=OrderedDict([ + ('alpha', 'boundary separation'), + ('beta', 'bias'), + ('delta', 'drift rate'), + ('tau', 'non-decision time'), + ]), + additional_args_desc=OrderedDict([ + ('RTbound', 0.1), + ]), + **kwargs, + ) + + _preprocess_func = choiceRT_single_preprocess_func + + +def choiceRT_ddm_single( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Choice Reaction Time Task - Drift Diffusion Model + + Individual Bayesian Modeling of the Choice Reaction Time Task + using Drift Diffusion Model [Ratcliff1978]_ with the following parameters: + "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time). + + .. note:: + Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. + + .. note:: + Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. + + + .. [Ratcliff1978] Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Choice Reaction Time Task, there should be 3 columns of data + with the labels "subjID", "choice", "RT". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2). + - "RT": Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "RT". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "RT". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + **(Currently not available.)** Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``RTbound``: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('choiceRT_ddm_single'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- choiceRT_ddm_single(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return ChoicertDdmSingle( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_cra_exp.py b/Python/hbayesdm/models/_cra_exp.py new file mode 100644 index 00000000..bd0ba6be --- /dev/null +++ b/Python/hbayesdm/models/_cra_exp.py @@ -0,0 +1,252 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import cra_preprocess_func + +__all__ = ['cra_exp'] + + +class CraExp(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='cra', + model_name='exp', + model_type='', + data_columns=( + 'subjID', + 'prob', + 'ambig', + 'reward_var', + 'reward_fix', + 'choice', + ), + parameters=OrderedDict([ + ('alpha', (0, 1, 2)), + ('beta', (-Inf, 0, Inf)), + ('gamma', (0, 1, Inf)), + ]), + regressors=OrderedDict([ + ('sv', 2), + ('sv_fix', 2), + ('sv_var', 2), + ('p_var', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('alpha', 'risk attitude'), + ('beta', 'ambiguity attitude'), + ('gamma', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = cra_preprocess_func + + +def cra_exp( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Choice Under Risk and Ambiguity Task - Exponential Subjective Value Model + + Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task + using Exponential Subjective Value Model [Hsu2005]_ with the following parameters: + "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature). + + + + + .. [Hsu2005] Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327 + + .. codeauthor:: Jaeyeong Yang + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data + with the labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "prob": Objective probability of the variable lottery. + - "ambig": Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery). + - "reward_var": Amount of reward in variable lottery. Assumed to be greater than zero. + - "reward_fix": Amount of reward in fixed lottery. Assumed to be greater than zero. + - "choice": If the variable lottery was selected, choice == 1; otherwise choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "sv", "sv_fix", "sv_var", "p_var". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('cra_exp'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- cra_exp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return CraExp( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_cra_linear.py b/Python/hbayesdm/models/_cra_linear.py new file mode 100644 index 00000000..ea0e85d0 --- /dev/null +++ b/Python/hbayesdm/models/_cra_linear.py @@ -0,0 +1,252 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import cra_preprocess_func + +__all__ = ['cra_linear'] + + +class CraLinear(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='cra', + model_name='linear', + model_type='', + data_columns=( + 'subjID', + 'prob', + 'ambig', + 'reward_var', + 'reward_fix', + 'choice', + ), + parameters=OrderedDict([ + ('alpha', (0, 1, 2)), + ('beta', (-Inf, 0, Inf)), + ('gamma', (0, 1, Inf)), + ]), + regressors=OrderedDict([ + ('sv', 2), + ('sv_fix', 2), + ('sv_var', 2), + ('p_var', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('alpha', 'risk attitude'), + ('beta', 'ambiguity attitude'), + ('gamma', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = cra_preprocess_func + + +def cra_linear( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Choice Under Risk and Ambiguity Task - Linear Subjective Value Model + + Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task + using Linear Subjective Value Model [Levy2010]_ with the following parameters: + "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature). + + + + + .. [Levy2010] Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047. + + .. codeauthor:: Jaeyeong Yang + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data + with the labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "prob": Objective probability of the variable lottery. + - "ambig": Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery). + - "reward_var": Amount of reward in variable lottery. Assumed to be greater than zero. + - "reward_fix": Amount of reward in fixed lottery. Assumed to be greater than zero. + - "choice": If the variable lottery was selected, choice == 1; otherwise choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "sv", "sv_fix", "sv_var", "p_var". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('cra_linear'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- cra_linear(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return CraLinear( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_dbdm_prob_weight.py b/Python/hbayesdm/models/_dbdm_prob_weight.py new file mode 100644 index 00000000..172bd0ab --- /dev/null +++ b/Python/hbayesdm/models/_dbdm_prob_weight.py @@ -0,0 +1,257 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import dbdm_preprocess_func + +__all__ = ['dbdm_prob_weight'] + + +class DbdmProbWeight(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='dbdm', + model_name='prob_weight', + model_type='', + data_columns=( + 'subjID', + 'opt1hprob', + 'opt2hprob', + 'opt1hval', + 'opt1lval', + 'opt2hval', + 'opt2lval', + 'choice', + ), + parameters=OrderedDict([ + ('tau', (0, 0.8, 1)), + ('rho', (0, 0.7, 2)), + ('lambda', (0, 2.5, 5)), + ('beta', (0, 0.2, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('tau', 'probability weight function'), + ('rho', 'subject utility function'), + ('lambda', 'loss aversion parameter'), + ('beta', 'inverse softmax temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = dbdm_preprocess_func + + +def dbdm_prob_weight( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Description Based Decison Making Task - Probability Weight Function + + Hierarchical Bayesian Modeling of the Description Based Decison Making Task + using Probability Weight Function [Erev2010]_, [Hertwig2004]_, [Jessup2008]_ with the following parameters: + "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature). + + + + + .. [Erev2010] Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. + .. [Hertwig2004] Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539. + .. [Jessup2008] Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022. + + .. codeauthor:: Yoonseo Zoh + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Description Based Decison Making Task, there should be 8 columns of data + with the labels "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "opt1hprob": Possiblity of getting higher value of outcome(opt1hval) when choosing option 1. + - "opt2hprob": Possiblity of getting higher value of outcome(opt2hval) when choosing option 2. + - "opt1hval": Possible (with opt1hprob probability) outcome of option 1. + - "opt1lval": Possible (with (1 - opt1hprob) probability) outcome of option 1. + - "opt2hval": Possible (with opt2hprob probability) outcome of option 2. + - "opt2lval": Possible (with (1 - opt2hprob) probability) outcome of option 2. + - "choice": If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('dbdm_prob_weight'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- dbdm_prob_weight(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return DbdmProbWeight( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_dd_cs.py b/Python/hbayesdm/models/_dd_cs.py new file mode 100644 index 00000000..67482180 --- /dev/null +++ b/Python/hbayesdm/models/_dd_cs.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import dd_preprocess_func + +__all__ = ['dd_cs'] + + +class DdCs(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='dd', + model_name='cs', + model_type='', + data_columns=( + 'subjID', + 'delay_later', + 'amount_later', + 'delay_sooner', + 'amount_sooner', + 'choice', + ), + parameters=OrderedDict([ + ('r', (0, 0.1, 1)), + ('s', (0, 1, 10)), + ('beta', (0, 1, 5)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('r', 'exponential discounting rate'), + ('s', 'impatience'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = dd_preprocess_func + + +def dd_cs( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Delay Discounting Task - Constant-Sensitivity (CS) Model + + Hierarchical Bayesian Modeling of the Delay Discounting Task + using Constant-Sensitivity (CS) Model [Ebert2007]_ with the following parameters: + "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature). + + + + + .. [Ebert2007] Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Delay Discounting Task, there should be 6 columns of data + with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "delay_later": An integer representing the delayed days for the later option (e.g. 1, 6, 28). + - "amount_later": A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9). + - "delay_sooner": An integer representing the delayed days for the sooner option (e.g. 0). + - "amount_sooner": A floating point number representing the amount for the sooner option (e.g. 10). + - "choice": If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('dd_cs'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- dd_cs(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return DdCs( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_dd_cs_single.py b/Python/hbayesdm/models/_dd_cs_single.py new file mode 100644 index 00000000..8d5219fc --- /dev/null +++ b/Python/hbayesdm/models/_dd_cs_single.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import dd_single_preprocess_func + +__all__ = ['dd_cs_single'] + + +class DdCsSingle(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='dd', + model_name='cs', + model_type='single', + data_columns=( + 'subjID', + 'delay_later', + 'amount_later', + 'delay_sooner', + 'amount_sooner', + 'choice', + ), + parameters=OrderedDict([ + ('r', (None, 0.1, None)), + ('s', (None, 1, None)), + ('beta', (None, 1, None)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('r', 'exponential discounting rate'), + ('s', 'impatience'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = dd_single_preprocess_func + + +def dd_cs_single( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Delay Discounting Task - Constant-Sensitivity (CS) Model + + Individual Bayesian Modeling of the Delay Discounting Task + using Constant-Sensitivity (CS) Model [Ebert2007]_ with the following parameters: + "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature). + + + + + .. [Ebert2007] Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Delay Discounting Task, there should be 6 columns of data + with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "delay_later": An integer representing the delayed days for the later option (e.g. 1, 6, 28). + - "amount_later": A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9). + - "delay_sooner": An integer representing the delayed days for the sooner option (e.g. 0). + - "amount_sooner": A floating point number representing the amount for the sooner option (e.g. 10). + - "choice": If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('dd_cs_single'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- dd_cs_single(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return DdCsSingle( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_dd_exp.py b/Python/hbayesdm/models/_dd_exp.py new file mode 100644 index 00000000..d48b6a05 --- /dev/null +++ b/Python/hbayesdm/models/_dd_exp.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import dd_preprocess_func + +__all__ = ['dd_exp'] + + +class DdExp(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='dd', + model_name='exp', + model_type='', + data_columns=( + 'subjID', + 'delay_later', + 'amount_later', + 'delay_sooner', + 'amount_sooner', + 'choice', + ), + parameters=OrderedDict([ + ('r', (0, 0.1, 1)), + ('beta', (0, 1, 5)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('r', 'exponential discounting rate'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = dd_preprocess_func + + +def dd_exp( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Delay Discounting Task - Exponential Model + + Hierarchical Bayesian Modeling of the Delay Discounting Task + using Exponential Model [Samuelson1937]_ with the following parameters: + "r" (exponential discounting rate), "beta" (inverse temperature). + + + + + .. [Samuelson1937] Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Delay Discounting Task, there should be 6 columns of data + with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "delay_later": An integer representing the delayed days for the later option (e.g. 1, 6, 28). + - "amount_later": A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9). + - "delay_sooner": An integer representing the delayed days for the sooner option (e.g. 0). + - "amount_sooner": A floating point number representing the amount for the sooner option (e.g. 10). + - "choice": If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('dd_exp'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- dd_exp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return DdExp( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_dd_hyperbolic.py b/Python/hbayesdm/models/_dd_hyperbolic.py new file mode 100644 index 00000000..ed2c56ba --- /dev/null +++ b/Python/hbayesdm/models/_dd_hyperbolic.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import dd_preprocess_func + +__all__ = ['dd_hyperbolic'] + + +class DdHyperbolic(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='dd', + model_name='hyperbolic', + model_type='', + data_columns=( + 'subjID', + 'delay_later', + 'amount_later', + 'delay_sooner', + 'amount_sooner', + 'choice', + ), + parameters=OrderedDict([ + ('k', (0, 0.1, 1)), + ('beta', (0, 1, 5)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('k', 'discounting rate'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = dd_preprocess_func + + +def dd_hyperbolic( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Delay Discounting Task - Hyperbolic Model + + Hierarchical Bayesian Modeling of the Delay Discounting Task + using Hyperbolic Model [Mazur1987]_ with the following parameters: + "k" (discounting rate), "beta" (inverse temperature). + + + + + .. [Mazur1987] Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement. + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Delay Discounting Task, there should be 6 columns of data + with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "delay_later": An integer representing the delayed days for the later option (e.g. 1, 6, 28). + - "amount_later": A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9). + - "delay_sooner": An integer representing the delayed days for the sooner option (e.g. 0). + - "amount_sooner": A floating point number representing the amount for the sooner option (e.g. 10). + - "choice": If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('dd_hyperbolic'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- dd_hyperbolic(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return DdHyperbolic( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_dd_hyperbolic_single.py b/Python/hbayesdm/models/_dd_hyperbolic_single.py new file mode 100644 index 00000000..afa4b78b --- /dev/null +++ b/Python/hbayesdm/models/_dd_hyperbolic_single.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import dd_single_preprocess_func + +__all__ = ['dd_hyperbolic_single'] + + +class DdHyperbolicSingle(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='dd', + model_name='hyperbolic', + model_type='single', + data_columns=( + 'subjID', + 'delay_later', + 'amount_later', + 'delay_sooner', + 'amount_sooner', + 'choice', + ), + parameters=OrderedDict([ + ('k', (None, 0.1, None)), + ('beta', (None, 1, None)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('k', 'discounting rate'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = dd_single_preprocess_func + + +def dd_hyperbolic_single( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Delay Discounting Task - Hyperbolic Model + + Individual Bayesian Modeling of the Delay Discounting Task + using Hyperbolic Model [Mazur1987]_ with the following parameters: + "k" (discounting rate), "beta" (inverse temperature). + + + + + .. [Mazur1987] Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement. + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Delay Discounting Task, there should be 6 columns of data + with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "delay_later": An integer representing the delayed days for the later option (e.g. 1, 6, 28). + - "amount_later": A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9). + - "delay_sooner": An integer representing the delayed days for the sooner option (e.g. 0). + - "amount_sooner": A floating point number representing the amount for the sooner option (e.g. 10). + - "choice": If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('dd_hyperbolic_single'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- dd_hyperbolic_single(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return DdHyperbolicSingle( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_gng_m1.py b/Python/hbayesdm/models/_gng_m1.py new file mode 100644 index 00000000..03d81a51 --- /dev/null +++ b/Python/hbayesdm/models/_gng_m1.py @@ -0,0 +1,248 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import gng_preprocess_func + +__all__ = ['gng_m1'] + + +class GngM1(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='gng', + model_name='m1', + model_type='', + data_columns=( + 'subjID', + 'cue', + 'keyPressed', + 'outcome', + ), + parameters=OrderedDict([ + ('xi', (0, 0.1, 1)), + ('ep', (0, 0.2, 1)), + ('rho', (0, exp(2), Inf)), + ]), + regressors=OrderedDict([ + ('Qgo', 2), + ('Qnogo', 2), + ('Wgo', 2), + ('Wnogo', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('xi', 'noise'), + ('ep', 'learning rate'), + ('rho', 'effective size'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = gng_preprocess_func + + +def gng_m1( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Orthogonalized Go/Nogo Task - RW + noise + + Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task + using RW + noise [Guitart-Masip2012]_ with the following parameters: + "xi" (noise), "ep" (learning rate), "rho" (effective size). + + + + + .. [Guitart-Masip2012] Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Orthogonalized Go/Nogo Task, there should be 4 columns of data + with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "cue": Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4. + - "keyPressed": Binary value representing the subject's response for that trial (where Press == 1; No press == 0). + - "outcome": Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('gng_m1'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- gng_m1(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return GngM1( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_gng_m2.py b/Python/hbayesdm/models/_gng_m2.py new file mode 100644 index 00000000..a19a7ab0 --- /dev/null +++ b/Python/hbayesdm/models/_gng_m2.py @@ -0,0 +1,250 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import gng_preprocess_func + +__all__ = ['gng_m2'] + + +class GngM2(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='gng', + model_name='m2', + model_type='', + data_columns=( + 'subjID', + 'cue', + 'keyPressed', + 'outcome', + ), + parameters=OrderedDict([ + ('xi', (0, 0.1, 1)), + ('ep', (0, 0.2, 1)), + ('b', (-Inf, 0, Inf)), + ('rho', (0, exp(2), Inf)), + ]), + regressors=OrderedDict([ + ('Qgo', 2), + ('Qnogo', 2), + ('Wgo', 2), + ('Wnogo', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('xi', 'noise'), + ('ep', 'learning rate'), + ('b', 'action bias'), + ('rho', 'effective size'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = gng_preprocess_func + + +def gng_m2( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Orthogonalized Go/Nogo Task - RW + noise + bias + + Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task + using RW + noise + bias [Guitart-Masip2012]_ with the following parameters: + "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size). + + + + + .. [Guitart-Masip2012] Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Orthogonalized Go/Nogo Task, there should be 4 columns of data + with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "cue": Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4. + - "keyPressed": Binary value representing the subject's response for that trial (where Press == 1; No press == 0). + - "outcome": Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('gng_m2'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- gng_m2(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return GngM2( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_gng_m3.py b/Python/hbayesdm/models/_gng_m3.py new file mode 100644 index 00000000..21694a4d --- /dev/null +++ b/Python/hbayesdm/models/_gng_m3.py @@ -0,0 +1,253 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import gng_preprocess_func + +__all__ = ['gng_m3'] + + +class GngM3(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='gng', + model_name='m3', + model_type='', + data_columns=( + 'subjID', + 'cue', + 'keyPressed', + 'outcome', + ), + parameters=OrderedDict([ + ('xi', (0, 0.1, 1)), + ('ep', (0, 0.2, 1)), + ('b', (-Inf, 0, Inf)), + ('pi', (-Inf, 0, Inf)), + ('rho', (0, exp(2), Inf)), + ]), + regressors=OrderedDict([ + ('Qgo', 2), + ('Qnogo', 2), + ('Wgo', 2), + ('Wnogo', 2), + ('SV', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('xi', 'noise'), + ('ep', 'learning rate'), + ('b', 'action bias'), + ('pi', 'Pavlovian bias'), + ('rho', 'effective size'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = gng_preprocess_func + + +def gng_m3( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Orthogonalized Go/Nogo Task - RW + noise + bias + pi + + Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task + using RW + noise + bias + pi [Guitart-Masip2012]_ with the following parameters: + "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size). + + + + + .. [Guitart-Masip2012] Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Orthogonalized Go/Nogo Task, there should be 4 columns of data + with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "cue": Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4. + - "keyPressed": Binary value representing the subject's response for that trial (where Press == 1; No press == 0). + - "outcome": Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('gng_m3'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- gng_m3(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return GngM3( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_gng_m4.py b/Python/hbayesdm/models/_gng_m4.py new file mode 100644 index 00000000..9800b79c --- /dev/null +++ b/Python/hbayesdm/models/_gng_m4.py @@ -0,0 +1,255 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import gng_preprocess_func + +__all__ = ['gng_m4'] + + +class GngM4(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='gng', + model_name='m4', + model_type='', + data_columns=( + 'subjID', + 'cue', + 'keyPressed', + 'outcome', + ), + parameters=OrderedDict([ + ('xi', (0, 0.1, 1)), + ('ep', (0, 0.2, 1)), + ('b', (-Inf, 0, Inf)), + ('pi', (-Inf, 0, Inf)), + ('rhoRew', (0, exp(2), Inf)), + ('rhoPun', (0, exp(2), Inf)), + ]), + regressors=OrderedDict([ + ('Qgo', 2), + ('Qnogo', 2), + ('Wgo', 2), + ('Wnogo', 2), + ('SV', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('xi', 'noise'), + ('ep', 'learning rate'), + ('b', 'action bias'), + ('pi', 'Pavlovian bias'), + ('rhoRew', 'reward sensitivity'), + ('rhoPun', 'punishment sensitivity'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = gng_preprocess_func + + +def gng_m4( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Orthogonalized Go/Nogo Task - RW (rew/pun) + noise + bias + pi + + Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task + using RW (rew/pun) + noise + bias + pi [Cavanagh2013]_ with the following parameters: + "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity). + + + + + .. [Cavanagh2013] Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Orthogonalized Go/Nogo Task, there should be 4 columns of data + with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "cue": Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4. + - "keyPressed": Binary value representing the subject's response for that trial (where Press == 1; No press == 0). + - "outcome": Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('gng_m4'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- gng_m4(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return GngM4( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_igt_orl.py b/Python/hbayesdm/models/_igt_orl.py new file mode 100644 index 00000000..8f44b778 --- /dev/null +++ b/Python/hbayesdm/models/_igt_orl.py @@ -0,0 +1,251 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import igt_preprocess_func + +__all__ = ['igt_orl'] + + +class IgtOrl(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='igt', + model_name='orl', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('Arew', (0, 0.1, 1)), + ('Apun', (0, 0.1, 1)), + ('K', (0, 0.1, 5)), + ('betaF', (-Inf, 0.1, Inf)), + ('betaP', (-Inf, 1, Inf)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Arew', 'reward learning rate'), + ('Apun', 'punishment learning rate'), + ('K', 'perseverance decay'), + ('betaF', 'outcome frequency weight'), + ('betaP', 'perseverance weight'), + ]), + additional_args_desc=OrderedDict([ + ('payscale', 100), + ]), + **kwargs, + ) + + _preprocess_func = igt_preprocess_func + + +def igt_orl( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Iowa Gambling Task - Outcome-Representation Learning Model + + Hierarchical Bayesian Modeling of the Iowa Gambling Task [Ahn2008]_ + using Outcome-Representation Learning Model [Haines2018]_ with the following parameters: + "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight). + + + + .. [Ahn2008] Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + .. [Haines2018] Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688 + + .. codeauthor:: Nate Haines + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Iowa Gambling Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4). + - "gain": Floating point value representing the amount of currency won on that trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on that trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``payscale``: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('igt_orl'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- igt_orl(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return IgtOrl( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_igt_pvl_decay.py b/Python/hbayesdm/models/_igt_pvl_decay.py new file mode 100644 index 00000000..3026a60e --- /dev/null +++ b/Python/hbayesdm/models/_igt_pvl_decay.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import igt_preprocess_func + +__all__ = ['igt_pvl_decay'] + + +class IgtPvlDecay(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='igt', + model_name='pvl_decay', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('A', (0, 0.5, 1)), + ('alpha', (0, 0.5, 2)), + ('cons', (0, 1, 5)), + ('lambda', (0, 1, 10)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('A', 'decay rate'), + ('alpha', 'outcome sensitivity'), + ('cons', 'response consistency'), + ('lambda', 'loss aversion'), + ]), + additional_args_desc=OrderedDict([ + ('payscale', 100), + ]), + **kwargs, + ) + + _preprocess_func = igt_preprocess_func + + +def igt_pvl_decay( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Iowa Gambling Task - Prospect Valence Learning (PVL) Decay-RI + + Hierarchical Bayesian Modeling of the Iowa Gambling Task [Ahn2008]_ + using Prospect Valence Learning (PVL) Decay-RI [Ahn2014]_ with the following parameters: + "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion). + + + + .. [Ahn2008] Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + .. [Ahn2014] Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Iowa Gambling Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4). + - "gain": Floating point value representing the amount of currency won on that trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on that trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``payscale``: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('igt_pvl_decay'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- igt_pvl_decay(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return IgtPvlDecay( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_igt_pvl_delta.py b/Python/hbayesdm/models/_igt_pvl_delta.py new file mode 100644 index 00000000..59040338 --- /dev/null +++ b/Python/hbayesdm/models/_igt_pvl_delta.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import igt_preprocess_func + +__all__ = ['igt_pvl_delta'] + + +class IgtPvlDelta(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='igt', + model_name='pvl_delta', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('A', (0, 0.5, 1)), + ('alpha', (0, 0.5, 2)), + ('cons', (0, 1, 5)), + ('lambda', (0, 1, 10)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('A', 'learning rate'), + ('alpha', 'outcome sensitivity'), + ('cons', 'response consistency'), + ('lambda', 'loss aversion'), + ]), + additional_args_desc=OrderedDict([ + ('payscale', 100), + ]), + **kwargs, + ) + + _preprocess_func = igt_preprocess_func + + +def igt_pvl_delta( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Iowa Gambling Task - Prospect Valence Learning (PVL) Delta + + Hierarchical Bayesian Modeling of the Iowa Gambling Task [Ahn2008]_ + using Prospect Valence Learning (PVL) Delta [Ahn2008]_ with the following parameters: + "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion). + + + + .. [Ahn2008] Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Iowa Gambling Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4). + - "gain": Floating point value representing the amount of currency won on that trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on that trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``payscale``: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('igt_pvl_delta'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- igt_pvl_delta(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return IgtPvlDelta( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_igt_vpp.py b/Python/hbayesdm/models/_igt_vpp.py new file mode 100644 index 00000000..874b3523 --- /dev/null +++ b/Python/hbayesdm/models/_igt_vpp.py @@ -0,0 +1,257 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import igt_preprocess_func + +__all__ = ['igt_vpp'] + + +class IgtVpp(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='igt', + model_name='vpp', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'gain', + 'loss', + ), + parameters=OrderedDict([ + ('A', (0, 0.5, 1)), + ('alpha', (0, 0.5, 2)), + ('cons', (0, 1, 5)), + ('lambda', (0, 1, 10)), + ('epP', (-Inf, 0, Inf)), + ('epN', (-Inf, 0, Inf)), + ('K', (0, 0.5, 1)), + ('w', (0, 0.5, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('A', 'learning rate'), + ('alpha', 'outcome sensitivity'), + ('cons', 'response consistency'), + ('lambda', 'loss aversion'), + ('epP', 'gain impact'), + ('epN', 'loss impact'), + ('K', 'decay rate'), + ('w', 'RL weight'), + ]), + additional_args_desc=OrderedDict([ + ('payscale', 100), + ]), + **kwargs, + ) + + _preprocess_func = igt_preprocess_func + + +def igt_vpp( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Iowa Gambling Task - Value-Plus-Perseverance + + Hierarchical Bayesian Modeling of the Iowa Gambling Task [Ahn2008]_ + using Value-Plus-Perseverance [Worthy2013]_ with the following parameters: + "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight). + + + + .. [Ahn2008] Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + .. [Worthy2013] Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Iowa Gambling Task, there should be 4 columns of data + with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4). + - "gain": Floating point value representing the amount of currency won on that trial (e.g. 50, 100). + - "loss": Floating point value representing the amount of currency lost on that trial (e.g. 0, -50). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "gain", "loss". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``payscale``: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('igt_vpp'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- igt_vpp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return IgtVpp( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_peer_ocu.py b/Python/hbayesdm/models/_peer_ocu.py new file mode 100644 index 00000000..2c185686 --- /dev/null +++ b/Python/hbayesdm/models/_peer_ocu.py @@ -0,0 +1,253 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import peer_preprocess_func + +__all__ = ['peer_ocu'] + + +class PeerOcu(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='peer', + model_name='ocu', + model_type='', + data_columns=( + 'subjID', + 'condition', + 'p_gamble', + 'safe_Hpayoff', + 'safe_Lpayoff', + 'risky_Hpayoff', + 'risky_Lpayoff', + 'choice', + ), + parameters=OrderedDict([ + ('rho', (0, 1, 2)), + ('tau', (0, 1, Inf)), + ('ocu', (-Inf, 0, Inf)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('rho', 'risk preference'), + ('tau', 'inverse temperature'), + ('ocu', 'other-conferred utility'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = peer_preprocess_func + + +def peer_ocu( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Peer Influence Task - Other-Conferred Utility (OCU) Model + + Hierarchical Bayesian Modeling of the Peer Influence Task [Chung2015]_ + using Other-Conferred Utility (OCU) Model with the following parameters: + "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility). + + + + .. [Chung2015] Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916. + + + .. codeauthor:: Harhim Park + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Peer Influence Task, there should be 8 columns of data + with the labels "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "condition": 0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky). + - "p_gamble": Probability of receiving a high payoff (same for both options). + - "safe_Hpayoff": High payoff of the safe option. + - "safe_Lpayoff": Low payoff of the safe option. + - "risky_Hpayoff": High payoff of the risky option. + - "risky_Lpayoff": Low payoff of the risky option. + - "choice": Which option was chosen? 0: safe, 1: risky. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('peer_ocu'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- peer_ocu(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PeerOcu( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_ewa.py b/Python/hbayesdm/models/_prl_ewa.py new file mode 100644 index 00000000..e27179dd --- /dev/null +++ b/Python/hbayesdm/models/_prl_ewa.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_preprocess_func + +__all__ = ['prl_ewa'] + + +class PrlEwa(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='ewa', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('phi', (0, 0.5, 1)), + ('rho', (0, 0.1, 1)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 2), + ('ev_nc', 2), + ('ew_c', 2), + ('ew_nc', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('phi', '1 - learning rate'), + ('rho', 'experience decay factor'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_preprocess_func + + +def prl_ewa( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Experience-Weighted Attraction Model + + Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Experience-Weighted Attraction Model [Ouden2013]_ with the following parameters: + "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature). + + + + + .. [Ouden2013] Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "ew_c", "ew_nc". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_ewa'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_ewa(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlEwa( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_fictitious.py b/Python/hbayesdm/models/_prl_fictitious.py new file mode 100644 index 00000000..3a0f7bde --- /dev/null +++ b/Python/hbayesdm/models/_prl_fictitious.py @@ -0,0 +1,248 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_preprocess_func + +__all__ = ['prl_fictitious'] + + +class PrlFictitious(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='fictitious', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('eta', (0, 0.5, 1)), + ('alpha', (-Inf, 0, Inf)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 2), + ('ev_nc', 2), + ('pe_c', 2), + ('pe_nc', 2), + ('dv', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('eta', 'learning rate'), + ('alpha', 'indecision point'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_preprocess_func + + +def prl_fictitious( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Fictitious Update Model + + Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Fictitious Update Model [Glascher2009]_ with the following parameters: + "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature). + + + + + .. [Glascher2009] Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_fictitious'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_fictitious(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlFictitious( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_fictitious_multipleB.py b/Python/hbayesdm/models/_prl_fictitious_multipleB.py new file mode 100644 index 00000000..6e5437f3 --- /dev/null +++ b/Python/hbayesdm/models/_prl_fictitious_multipleB.py @@ -0,0 +1,250 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_multipleB_preprocess_func + +__all__ = ['prl_fictitious_multipleB'] + + +class PrlFictitiousMultipleb(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='fictitious', + model_type='multipleB', + data_columns=( + 'subjID', + 'block', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('eta', (0, 0.5, 1)), + ('alpha', (-Inf, 0, Inf)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 3), + ('ev_nc', 3), + ('pe_c', 3), + ('pe_nc', 3), + ('dv', 3), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('eta', 'learning rate'), + ('alpha', 'indecision point'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_multipleB_preprocess_func + + +def prl_fictitious_multipleB( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Fictitious Update Model + + Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Fictitious Update Model [Glascher2009]_ with the following parameters: + "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature). + + + + + .. [Glascher2009] Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 4 columns of data + with the labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "block": A unique identifier for each of the multiple blocks within each subject. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "block", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "block", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_fictitious_multipleB'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_fictitious_multipleB(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlFictitiousMultipleb( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_fictitious_rp.py b/Python/hbayesdm/models/_prl_fictitious_rp.py new file mode 100644 index 00000000..345eb275 --- /dev/null +++ b/Python/hbayesdm/models/_prl_fictitious_rp.py @@ -0,0 +1,251 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_preprocess_func + +__all__ = ['prl_fictitious_rp'] + + +class PrlFictitiousRp(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='fictitious_rp', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('eta_pos', (0, 0.5, 1)), + ('eta_neg', (0, 0.5, 1)), + ('alpha', (-Inf, 0, Inf)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 2), + ('ev_nc', 2), + ('pe_c', 2), + ('pe_nc', 2), + ('dv', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('eta_pos', 'learning rate, +PE'), + ('eta_neg', 'learning rate, -PE'), + ('alpha', 'indecision point'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_preprocess_func + + +def prl_fictitious_rp( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) + + Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) [Glascher2009]_, [Ouden2013]_ with the following parameters: + "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature). + + + + + .. [Glascher2009] Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + .. [Ouden2013] Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_fictitious_rp'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_fictitious_rp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlFictitiousRp( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py new file mode 100644 index 00000000..fd8f5768 --- /dev/null +++ b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py @@ -0,0 +1,249 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_preprocess_func + +__all__ = ['prl_fictitious_rp_woa'] + + +class PrlFictitiousRpWoa(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='fictitious_rp_woa', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('eta_pos', (0, 0.5, 1)), + ('eta_neg', (0, 0.5, 1)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 2), + ('ev_nc', 2), + ('pe_c', 2), + ('pe_nc', 2), + ('dv', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('eta_pos', 'learning rate, +PE'), + ('eta_neg', 'learning rate, -PE'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_preprocess_func + + +def prl_fictitious_rp_woa( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) + + Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) [Glascher2009]_, [Ouden2013]_ with the following parameters: + "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature). + + + + + .. [Glascher2009] Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + .. [Ouden2013] Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_fictitious_rp_woa'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_fictitious_rp_woa(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlFictitiousRpWoa( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_fictitious_woa.py b/Python/hbayesdm/models/_prl_fictitious_woa.py new file mode 100644 index 00000000..f39e7fa2 --- /dev/null +++ b/Python/hbayesdm/models/_prl_fictitious_woa.py @@ -0,0 +1,246 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_preprocess_func + +__all__ = ['prl_fictitious_woa'] + + +class PrlFictitiousWoa(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='fictitious_woa', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('eta', (0, 0.5, 1)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 2), + ('ev_nc', 2), + ('pe_c', 2), + ('pe_nc', 2), + ('dv', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('eta', 'learning rate'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_preprocess_func + + +def prl_fictitious_woa( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Fictitious Update Model, without alpha (indecision point) + + Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Fictitious Update Model, without alpha (indecision point) [Glascher2009]_ with the following parameters: + "eta" (learning rate), "beta" (inverse temperature). + + + + + .. [Glascher2009] Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_fictitious_woa'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_fictitious_woa(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlFictitiousWoa( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_rp.py b/Python/hbayesdm/models/_prl_rp.py new file mode 100644 index 00000000..4ec619ae --- /dev/null +++ b/Python/hbayesdm/models/_prl_rp.py @@ -0,0 +1,246 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_preprocess_func + +__all__ = ['prl_rp'] + + +class PrlRp(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='rp', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('Apun', (0, 0.1, 1)), + ('Arew', (0, 0.1, 1)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 2), + ('ev_nc', 2), + ('pe', 2), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Apun', 'punishment learning rate'), + ('Arew', 'reward learning rate'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_preprocess_func + + +def prl_rp( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Reward-Punishment Model + + Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Reward-Punishment Model [Ouden2013]_ with the following parameters: + "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature). + + + + + .. [Ouden2013] Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_rp'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_rp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlRp( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_prl_rp_multipleB.py b/Python/hbayesdm/models/_prl_rp_multipleB.py new file mode 100644 index 00000000..dffab98d --- /dev/null +++ b/Python/hbayesdm/models/_prl_rp_multipleB.py @@ -0,0 +1,248 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import prl_multipleB_preprocess_func + +__all__ = ['prl_rp_multipleB'] + + +class PrlRpMultipleb(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='prl', + model_name='rp', + model_type='multipleB', + data_columns=( + 'subjID', + 'block', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('Apun', (0, 0.1, 1)), + ('Arew', (0, 0.1, 1)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + ('ev_c', 3), + ('ev_nc', 3), + ('pe', 3), + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('Apun', 'punishment learning rate'), + ('Arew', 'reward learning rate'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = prl_multipleB_preprocess_func + + +def prl_rp_multipleB( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Reversal Learning Task - Reward-Punishment Model + + Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task + using Reward-Punishment Model [Ouden2013]_ with the following parameters: + "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature). + + + + + .. [Ouden2013] Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 + + .. codeauthor:: Jaeyeong Yang (for model-based regressors) + .. codeauthor:: Harhim Park (for model-based regressors) + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Reversal Learning Task, there should be 4 columns of data + with the labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "block": A unique identifier for each of the multiple blocks within each subject. + - "choice": Integer value representing the option chosen on that trial: 1 or 2. + - "outcome": Integer value representing the outcome of that trial (where reward == 1, and loss == -1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "block", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "block", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "ev_c", "ev_nc", "pe". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('prl_rp_multipleB'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- prl_rp_multipleB(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PrlRpMultipleb( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py new file mode 100644 index 00000000..88697680 --- /dev/null +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -0,0 +1,258 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import pst_preprocess_func + +__all__ = ['pst_gainloss_Q'] + + +class PstGainlossQ(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='pst', + model_name='gainloss_Q', + model_type='', + data_columns=( + 'subjID', + 'type', + 'choice', + 'reward', + ), + parameters=OrderedDict([ + ('alpha_pos', (0, 0.5, 1)), + ('alpha_neg', (0, 0.5, 1)), + ('beta', (0, 1, 10)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('alpha_pos', 'learning rate for positive feedbacks'), + ('alpha_neg', 'learning rate for negative feedbacks'), + ('beta', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = pst_preprocess_func + + +def pst_gainloss_Q( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Probabilistic Selection Task - Gain-Loss Q Learning Model + + Hierarchical Bayesian Modeling of the Probabilistic Selection Task + using Gain-Loss Q Learning Model [Frank2007]_ with the following parameters: + "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature). + + + + + .. [Frank2007] Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316. + + .. codeauthor:: Jaeyeong Yang + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Probabilistic Selection Task, there should be 4 columns of data + with the labels "subjID", "type", "choice", "reward". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "type": Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as below: + + ===== ======== ================== + Code Stimulus Probability to win + ===== ======== ================== + 1 A 80% + 2 B 20% + 3 C 70% + 4 D 30% + 5 E 60% + 6 F 40% + ===== ======== ================== + + The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6. + - "choice": Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0). + - "reward": Amount of reward earned as a result of the trial. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "type", "choice", "reward". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "type", "choice", "reward". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('pst_gainloss_Q'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- pst_gainloss_Q(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return PstGainlossQ( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py new file mode 100644 index 00000000..4b3590a7 --- /dev/null +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -0,0 +1,245 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ra_preprocess_func + +__all__ = ['ra_noLA'] + + +class RaNola(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ra', + model_name='noLA', + model_type='', + data_columns=( + 'subjID', + 'gain', + 'loss', + 'cert', + 'gamble', + ), + parameters=OrderedDict([ + ('rho', (0, 1, 2)), + ('tau', (0, 1, 30)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('rho', 'risk aversion'), + ('tau', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = ra_preprocess_func + + +def ra_noLA( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Risk Aversion Task - Prospect Theory, without loss aversion (LA) parameter + + Hierarchical Bayesian Modeling of the Risk Aversion Task + using Prospect Theory, without loss aversion (LA) parameter [Sokol-Hessner2009]_ with the following parameters: + "rho" (risk aversion), "tau" (inverse temperature). + + + + + .. [Sokol-Hessner2009] Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Risk Aversion Task, there should be 5 columns of data + with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9).} + - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + - "cert": Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero. + - "gamble": If gamble was taken, gamble == 1; else gamble == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ra_noLA'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ra_noLA(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return RaNola( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py new file mode 100644 index 00000000..0a018226 --- /dev/null +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -0,0 +1,245 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ra_preprocess_func + +__all__ = ['ra_noRA'] + + +class RaNora(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ra', + model_name='noRA', + model_type='', + data_columns=( + 'subjID', + 'gain', + 'loss', + 'cert', + 'gamble', + ), + parameters=OrderedDict([ + ('lambda', (0, 1, 5)), + ('tau', (0, 1, 30)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('lambda', 'loss aversion'), + ('tau', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = ra_preprocess_func + + +def ra_noRA( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Risk Aversion Task - Prospect Theory, without risk aversion (RA) parameter + + Hierarchical Bayesian Modeling of the Risk Aversion Task + using Prospect Theory, without risk aversion (RA) parameter [Sokol-Hessner2009]_ with the following parameters: + "lambda" (loss aversion), "tau" (inverse temperature). + + + + + .. [Sokol-Hessner2009] Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Risk Aversion Task, there should be 5 columns of data + with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9).} + - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + - "cert": Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero. + - "gamble": If gamble was taken, gamble == 1; else gamble == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ra_noRA'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ra_noRA(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return RaNora( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py new file mode 100644 index 00000000..895280d7 --- /dev/null +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -0,0 +1,247 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ra_preprocess_func + +__all__ = ['ra_prospect'] + + +class RaProspect(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ra', + model_name='prospect', + model_type='', + data_columns=( + 'subjID', + 'gain', + 'loss', + 'cert', + 'gamble', + ), + parameters=OrderedDict([ + ('rho', (0, 1, 2)), + ('lambda', (0, 1, 5)), + ('tau', (0, 1, 30)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('rho', 'risk aversion'), + ('lambda', 'loss aversion'), + ('tau', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = ra_preprocess_func + + +def ra_prospect( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Risk Aversion Task - Prospect Theory + + Hierarchical Bayesian Modeling of the Risk Aversion Task + using Prospect Theory [Sokol-Hessner2009]_ with the following parameters: + "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature). + + + + + .. [Sokol-Hessner2009] Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Risk Aversion Task, there should be 5 columns of data + with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9).} + - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + - "cert": Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero. + - "gamble": If gamble was taken, gamble == 1; else gamble == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ra_prospect'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ra_prospect(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return RaProspect( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_rdt_happiness.py b/Python/hbayesdm/models/_rdt_happiness.py new file mode 100644 index 00000000..be2e5070 --- /dev/null +++ b/Python/hbayesdm/models/_rdt_happiness.py @@ -0,0 +1,261 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import rdt_preprocess_func + +__all__ = ['rdt_happiness'] + + +class RdtHappiness(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='rdt', + model_name='happiness', + model_type='', + data_columns=( + 'subjID', + 'gain', + 'loss', + 'cert', + 'type', + 'gamble', + 'outcome', + 'happy', + 'RT_happy', + ), + parameters=OrderedDict([ + ('w0', (-Inf, 1, Inf)), + ('w1', (-Inf, 1, Inf)), + ('w2', (-Inf, 1, Inf)), + ('w3', (-Inf, 1, Inf)), + ('gam', (0, 0.5, 1)), + ('sig', (0, 1, Inf)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('w0', 'baseline'), + ('w1', 'weight of certain rewards'), + ('w2', 'weight of expected values'), + ('w3', 'weight of reward prediction errors'), + ('gam', 'forgetting factor'), + ('sig', 'standard deviation of error'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = rdt_preprocess_func + + +def rdt_happiness( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Risky Decision Task - Happiness Computational Model + + Hierarchical Bayesian Modeling of the Risky Decision Task + using Happiness Computational Model [Rutledge2014]_ with the following parameters: + "w0" (baseline), "w1" (weight of certain rewards), "w2" (weight of expected values), "w3" (weight of reward prediction errors), "gam" (forgetting factor), "sig" (standard deviation of error). + + + + + .. [Rutledge2014] Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257. + + .. codeauthor:: Harhim Park + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Risky Decision Task, there should be 9 columns of data + with the labels "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9). + - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + - "cert": Guaranteed amount of a safe option. + - "type": loss == -1, mixed == 0, gain == 1 + - "gamble": If gamble was taken, gamble == 1; else gamble == 0. + - "outcome": Result of the trial. + - "happy": Happiness score. + - "RT_happy": Reaction time for answering the happiness score. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('rdt_happiness'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- rdt_happiness(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return RdtHappiness( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ts_par4.py b/Python/hbayesdm/models/_ts_par4.py new file mode 100644 index 00000000..e5c68e12 --- /dev/null +++ b/Python/hbayesdm/models/_ts_par4.py @@ -0,0 +1,250 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ts_preprocess_func + +__all__ = ['ts_par4'] + + +class TsPar4(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ts', + model_name='par4', + model_type='', + data_columns=( + 'subjID', + 'level1_choice', + 'level2_choice', + 'reward', + ), + parameters=OrderedDict([ + ('a', (0, 0.5, 1)), + ('beta', (0, 1, Inf)), + ('pi', (0, 1, 5)), + ('w', (0, 0.5, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred_step1', 'y_pred_step2'], + parameters_desc=OrderedDict([ + ('a', 'learning rate for both stages 1 & 2'), + ('beta', 'inverse temperature for both stages 1 & 2'), + ('pi', 'perseverance'), + ('w', 'model-based weight'), + ]), + additional_args_desc=OrderedDict([ + ('trans_prob', 0.7), + ]), + **kwargs, + ) + + _preprocess_func = ts_preprocess_func + + +def ts_par4( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Two-Step Task - Hybrid Model, with 4 parameters + + Hierarchical Bayesian Modeling of the Two-Step Task [Daw2011]_ + using Hybrid Model, with 4 parameters [Daw2011]_, [Wunderlich2012]_ with the following parameters: + "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight). + + + + .. [Daw2011] Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + .. [Wunderlich2012] Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424. + + .. codeauthor:: Harhim Park + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Two-Step Task, there should be 4 columns of data + with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "level1_choice": Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2). + - "level2_choice": Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6). + Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value. + - "reward": Reward after Level 2 (0 or 1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``trans_prob``: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ts_par4'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ts_par4(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return TsPar4( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ts_par6.py b/Python/hbayesdm/models/_ts_par6.py new file mode 100644 index 00000000..a691521a --- /dev/null +++ b/Python/hbayesdm/models/_ts_par6.py @@ -0,0 +1,254 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ts_preprocess_func + +__all__ = ['ts_par6'] + + +class TsPar6(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ts', + model_name='par6', + model_type='', + data_columns=( + 'subjID', + 'level1_choice', + 'level2_choice', + 'reward', + ), + parameters=OrderedDict([ + ('a1', (0, 0.5, 1)), + ('beta1', (0, 1, Inf)), + ('a2', (0, 0.5, 1)), + ('beta2', (0, 1, Inf)), + ('pi', (0, 1, 5)), + ('w', (0, 0.5, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred_step1', 'y_pred_step2'], + parameters_desc=OrderedDict([ + ('a1', 'learning rate in stage 1'), + ('beta1', 'inverse temperature in stage 1'), + ('a2', 'learning rate in stage 2'), + ('beta2', 'inverse temperature in stage 2'), + ('pi', 'perseverance'), + ('w', 'model-based weight'), + ]), + additional_args_desc=OrderedDict([ + ('trans_prob', 0.7), + ]), + **kwargs, + ) + + _preprocess_func = ts_preprocess_func + + +def ts_par6( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Two-Step Task - Hybrid Model, with 6 parameters + + Hierarchical Bayesian Modeling of the Two-Step Task [Daw2011]_ + using Hybrid Model, with 6 parameters [Daw2011]_ with the following parameters: + "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight). + + + + .. [Daw2011] Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + + + .. codeauthor:: Harhim Park + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Two-Step Task, there should be 4 columns of data + with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "level1_choice": Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2). + - "level2_choice": Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6). + Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value. + - "reward": Reward after Level 2 (0 or 1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``trans_prob``: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ts_par6'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ts_par6(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return TsPar6( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ts_par7.py b/Python/hbayesdm/models/_ts_par7.py new file mode 100644 index 00000000..df68dc70 --- /dev/null +++ b/Python/hbayesdm/models/_ts_par7.py @@ -0,0 +1,256 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ts_preprocess_func + +__all__ = ['ts_par7'] + + +class TsPar7(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ts', + model_name='par7', + model_type='', + data_columns=( + 'subjID', + 'level1_choice', + 'level2_choice', + 'reward', + ), + parameters=OrderedDict([ + ('a1', (0, 0.5, 1)), + ('beta1', (0, 1, Inf)), + ('a2', (0, 0.5, 1)), + ('beta2', (0, 1, Inf)), + ('pi', (0, 1, 5)), + ('w', (0, 0.5, 1)), + ('lambda', (0, 0.5, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred_step1', 'y_pred_step2'], + parameters_desc=OrderedDict([ + ('a1', 'learning rate in stage 1'), + ('beta1', 'inverse temperature in stage 1'), + ('a2', 'learning rate in stage 2'), + ('beta2', 'inverse temperature in stage 2'), + ('pi', 'perseverance'), + ('w', 'model-based weight'), + ('lambda', 'eligibility trace'), + ]), + additional_args_desc=OrderedDict([ + ('trans_prob', 0.7), + ]), + **kwargs, + ) + + _preprocess_func = ts_preprocess_func + + +def ts_par7( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Two-Step Task - Hybrid Model, with 7 parameters (original model) + + Hierarchical Bayesian Modeling of the Two-Step Task [Daw2011]_ + using Hybrid Model, with 7 parameters (original model) [Daw2011]_ with the following parameters: + "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace). + + + + .. [Daw2011] Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + + + .. codeauthor:: Harhim Park + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Two-Step Task, there should be 4 columns of data + with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "level1_choice": Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2). + - "level2_choice": Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6). + Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value. + - "reward": Reward after Level 2 (0 or 1). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + For this model, it's possible to set the following model-specific argument to a value that you may prefer. + + - ``trans_prob``: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ts_par7'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ts_par7(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return TsPar7( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ug_bayes.py b/Python/hbayesdm/models/_ug_bayes.py new file mode 100644 index 00000000..9e0d7206 --- /dev/null +++ b/Python/hbayesdm/models/_ug_bayes.py @@ -0,0 +1,243 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ug_preprocess_func + +__all__ = ['ug_bayes'] + + +class UgBayes(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ug', + model_name='bayes', + model_type='', + data_columns=( + 'subjID', + 'offer', + 'accept', + ), + parameters=OrderedDict([ + ('alpha', (0, 1, 20)), + ('beta', (0, 0.5, 10)), + ('tau', (0, 1, 10)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('alpha', 'envy'), + ('beta', 'guilt'), + ('tau', 'inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = ug_preprocess_func + + +def ug_bayes( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Norm-Training Ultimatum Game - Ideal Observer Model + + Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game + using Ideal Observer Model [Xiang2013]_ with the following parameters: + "alpha" (envy), "beta" (guilt), "tau" (inverse temperature). + + + + + .. [Xiang2013] Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Norm-Training Ultimatum Game, there should be 3 columns of data + with the labels "subjID", "offer", "accept". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "offer": Floating point value representing the offer made in that trial (e.g. 4, 10, 11). + - "accept": 1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "offer", "accept". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "offer", "accept". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ug_bayes'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ug_bayes(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return UgBayes( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_ug_delta.py b/Python/hbayesdm/models/_ug_delta.py new file mode 100644 index 00000000..4b93f0b3 --- /dev/null +++ b/Python/hbayesdm/models/_ug_delta.py @@ -0,0 +1,243 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import ug_preprocess_func + +__all__ = ['ug_delta'] + + +class UgDelta(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='ug', + model_name='delta', + model_type='', + data_columns=( + 'subjID', + 'offer', + 'accept', + ), + parameters=OrderedDict([ + ('alpha', (0, 1, 20)), + ('tau', (0, 1, 10)), + ('ep', (0, 0.5, 1)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('alpha', 'envy'), + ('tau', 'inverse temperature'), + ('ep', 'norm adaptation rate'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = ug_preprocess_func + + +def ug_delta( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Norm-Training Ultimatum Game - Rescorla-Wagner (Delta) Model + + Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game + using Rescorla-Wagner (Delta) Model [Gu2015]_ with the following parameters: + "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate). + + + + + .. [Gu2015] Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015 + + + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Norm-Training Ultimatum Game, there should be 3 columns of data + with the labels "subjID", "offer", "accept". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "offer": Floating point value representing the offer made in that trial (e.g. 4, 10, 11). + - "accept": 1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0). + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "offer", "accept". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "offer", "accept". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('ug_delta'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- ug_delta(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return UgDelta( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/models/_wcs_sql.py b/Python/hbayesdm/models/_wcs_sql.py new file mode 100644 index 00000000..258a8e36 --- /dev/null +++ b/Python/hbayesdm/models/_wcs_sql.py @@ -0,0 +1,243 @@ +""" +Generated by template. Do not edit by hand. +""" +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import wcs_preprocess_func + +__all__ = ['wcs_sql'] + + +class WcsSql(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='wcs', + model_name='sql', + model_type='', + data_columns=( + 'subjID', + 'choice', + 'outcome', + ), + parameters=OrderedDict([ + ('r', (0, 0.1, 1)), + ('p', (0, 0.1, 1)), + ('d', (0, 1, 5)), + ]), + regressors=OrderedDict([ + + ]), + postpreds=['y_pred'], + parameters_desc=OrderedDict([ + ('r', 'reward sensitivity'), + ('p', 'punishment sensitivity'), + ('d', 'decision consistency or inverse temperature'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = wcs_preprocess_func + + +def wcs_sql( + example: bool = False, + datafile: str = None, + data: pd.DataFrame = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'random', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Wisconsin Card Sorting Task - Sequential Learning Model + + Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task + using Sequential Learning Model [Bishara2010]_ with the following parameters: + "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature). + + + + + .. [Bishara2010] Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13. + + .. codeauthor:: Dayeong Min + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Wisconsin Card Sorting Task, there should be 3 columns of data + with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "choice": Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4. + - "outcome": 1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0. + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "choice", "outcome". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. Currently not available for this model. + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('wcs_sql'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- wcs_sql(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return WcsSql( + example=example, + datafile=datafile, + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py new file mode 100644 index 00000000..72ed1305 --- /dev/null +++ b/Python/hbayesdm/preprocess_funcs.py @@ -0,0 +1,854 @@ +import os +import numpy as np +import pandas as pd + +from hbayesdm.base import _common + + +def bandit2arm_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + choice = np.full((n_subj, t_max), -1, dtype=int) + outcome = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + choice[s][:t] = subj_data['choice'] + outcome[s][:t] = subj_data['outcome'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': choice, + 'outcome': outcome, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def bandit4arm_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + rew = np.full((n_subj, t_max), 0, dtype=float) + los = np.full((n_subj, t_max), 0, dtype=float) + choice = np.full((n_subj, t_max), -1, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + rew[s][:t] = subj_data['gain'] + los[s][:t] = -1 * np.abs(subj_data['loss']) # Use abs + choice[s][:t] = subj_data['choice'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'rew': rew, + 'los': los, + 'choice': choice, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def bandit4arm2_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + choice = np.full((n_subj, t_max), -1, dtype=int) + outcome = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + choice[s][:t] = subj_data['choice'] + outcome[s][:t] = subj_data['outcome'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': choice, + 'outcome': outcome, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def bart_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + pumps = np.full((n_subj, t_max), 0, dtype=int) + explosion = np.full((n_subj, t_max), 0, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + pumps[s][:t] = subj_data['pumps'] + explosion[s][:t] = subj_data['explosion'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'P': max(pumps) + 1, + 'pumps': pumps, + 'explosion': explosion, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def choiceRT_preprocess_func(self, raw_data, general_info, additional_args): + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + + # Number of upper/lower boundary responses + Nu = np.full(n_subj, 0, dtype=int) + Nl = np.full(n_subj, 0, dtype=int) + + # Write Nu, Nl + subj_group = iter(general_info['grouped_data']) + for s in range(n_subj): + _, subj_data = next(subj_group) + value_counts = subj_data['choice'].value_counts() + Nu[s] = value_counts.at[2] + Nl[s] = value_counts.at[1] + + # Reaction-times for upper/lower boundary responses + RTu = np.full((n_subj, max(Nu)), -1, dtype=float) + RTl = np.full((n_subj, max(Nl)), -1, dtype=float) + + # Write RTu, RTl + subj_group = iter(general_info['grouped_data']) + for s in range(n_subj): + _, subj_data = next(subj_group) + RTu[s][:Nu[s]] = subj_data['rt'][subj_data['choice'] == 2] + RTl[s][:Nl[s]] = subj_data['rt'][subj_data['choice'] == 1] + + # Minimum reaction time + minRT = np.full(n_subj, -1, dtype=float) + + # Write minRT + subj_group = iter(general_info['grouped_data']) + for s in range(n_subj): + _, subj_data = next(subj_group) + minRT[s] = min(subj_data['rt']) + + # Use additional_args if provided + RTbound = additional_args.get('RTbound', 0.1) + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'Nu_max': max(Nu), + 'Nl_max': max(Nl), + 'Nu': Nu, + 'Nl': Nl, + 'RTu': RTu, + 'RTl': RTl, + 'minRT': minRT, + 'RTbound': RTbound, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def choiceRT_single_preprocess_func(self, raw_data, general_info, additional_args): + # DataFrames per upper/lower boundary responses + df_upper = raw_data.loc[raw_data['choice'] == 2] + df_lower = raw_data.loc[raw_data['choice'] == 1] + + # Number of upper/lower boundary responses + Nu = len(df_upper) + Nl = len(df_lower) + + # Reaction-times for upper/lower boundary responses + RTu = df_upper['rt'].to_numpy() + RTl = df_lower['rt'].to_numpy() + + # Minimum reaction time + minRT = min(raw_data['rt']) + + # Use additional_args if provided + RTbound = additional_args.get('RTbound', 0.1) + + # Wrap into a dict for pystan + data_dict = { + 'Nu': Nu, + 'Nl': Nl, + 'RTu': RTu, + 'RTl': RTl, + 'minRT': minRT, + 'RTbound': RTbound, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def cra_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + choice = np.full((n_subj, t_max), 0, dtype=int) + prob = np.full((n_subj, t_max), 0, dtype=float) + ambig = np.full((n_subj, t_max), 0, dtype=float) + reward_var = np.full((n_subj, t_max), 0, dtype=float) + reward_fix = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + choice[s][:t] = subj_data['choice'] + prob[s][:t] = subj_data['prob'] + ambig[s][:t] = subj_data['ambig'] + reward_var[s][:t] = subj_data['rewardvar'] + reward_fix[s][:t] = subj_data['rewardfix'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': choice, + 'prob': prob, + 'ambig': ambig, + 'reward_var': reward_var, + 'reward_fix': reward_fix, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def dbdm_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + opt1hprob = np.full((n_subj, t_max), 0, dtype=float) + opt2hprob = np.full((n_subj, t_max), 0, dtype=float) + opt1hval = np.full((n_subj, t_max), 0, dtype=float) + opt1lval = np.full((n_subj, t_max), 0, dtype=float) + opt2hval = np.full((n_subj, t_max), 0, dtype=float) + opt2lval = np.full((n_subj, t_max), 0, dtype=float) + choice = np.full((n_subj, t_max), -1, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + opt1hprob[s][:t] = subj_data['opt1hprob'] + opt2hprob[s][:t] = subj_data['opt2hprob'] + opt1hval[s][:t] = subj_data['opt1hval'] + opt1lval[s][:t] = subj_data['opt1lval'] + opt2hval[s][:t] = subj_data['opt2hval'] + opt2lval[s][:t] = subj_data['opt2lval'] + choice[s][:t] = subj_data['choice'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'opt1hprob': opt1hprob, + 'opt2hprob': opt2hprob, + 'opt1hval': opt1hval, + 'opt1lval': opt1lval, + 'opt2hval': opt2hval, + 'opt2lval': opt2lval, + 'choice': choice, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def dd_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + delay_later = np.full((n_subj, t_max), 0, dtype=float) + amount_later = np.full((n_subj, t_max), 0, dtype=float) + delay_sooner = np.full((n_subj, t_max), 0, dtype=float) + amount_sooner = np.full((n_subj, t_max), 0, dtype=float) + choice = np.full((n_subj, t_max), -1, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + delay_later[s][:t] = subj_data['delaylater'] + amount_later[s][:t] = subj_data['amountlater'] + delay_sooner[s][:t] = subj_data['delaysooner'] + amount_sooner[s][:t] = subj_data['amountsooner'] + choice[s][:t] = subj_data['choice'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'delay_later': delay_later, + 'amount_later': amount_later, + 'delay_sooner': delay_sooner, + 'amount_sooner': amount_sooner, + 'choice': choice, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def dd_single_preprocess_func(self, raw_data, general_info, additional_args): + # Use general_info about raw_data + t_subjs = general_info['t_max'] # Note: use 't_max' not 't_subjs' + + # Extract from raw_data + delay_later = raw_data['delaylater'] + amount_later = raw_data['amountlater'] + delay_sooner = raw_data['delaysooner'] + amount_sooner = raw_data['amountsooner'] + choice = raw_data['choice'] + + # Wrap into a dict for pystan + data_dict = { + 'Tsubj': t_subjs, + 'delay_later': delay_later, + 'amount_later': amount_later, + 'delay_sooner': delay_sooner, + 'amount_sooner': amount_sooner, + 'choice': choice, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def gng_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + cue = np.full((n_subj, t_max), 1, dtype=int) + pressed = np.full((n_subj, t_max), -1, dtype=int) + outcome = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + cue[s][:t] = subj_data['cue'] + pressed[s][:t] = subj_data['keypressed'] + outcome[s][:t] = subj_data['outcome'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'cue': cue, + 'pressed': pressed, + 'outcome': outcome, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def igt_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + y_data = np.full((n_subj, t_max), -1, dtype=int) + rl_matrix = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + y_data[s][:t] = subj_data['choice'] + rl_matrix[s][:t] = subj_data['gain'] - np.abs(subj_data['loss']) + + # Use additional_args if provided + payscale = additional_args.get('payscale', 100) + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': y_data, + 'outcome': rl_matrix / payscale, + 'sign_out': np.sign(rl_matrix), + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def peer_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + condition = np.full((n_subj, t_max), 0, dtype=int) + p_gamble = np.full((n_subj, t_max), 0, dtype=float) + safe_Hpayoff = np.full((n_subj, t_max), 0, dtype=float) + safe_Lpayoff = np.full((n_subj, t_max), 0, dtype=float) + risky_Hpayoff = np.full((n_subj, t_max), 0, dtype=float) + risky_Lpayoff = np.full((n_subj, t_max), 0, dtype=float) + choice = np.full((n_subj, t_max), -1, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + condition[s][:t] = subj_data['condition'] + p_gamble[s][:t] = subj_data['pgamble'] + safe_Hpayoff[s][:t] = subj_data['safehpayoff'] + safe_Lpayoff[s][:t] = subj_data['safelpayoff'] + risky_Hpayoff[s][:t] = subj_data['riskyhpayoff'] + risky_Lpayoff[s][:t] = subj_data['riskylpayoff'] + choice[s][:t] = subj_data['choice'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'condition': condition, + 'p_gamble': p_gamble, + 'safe_Hpayoff': safe_Hpayoff, + 'safe_Lpayoff': safe_Lpayoff, + 'risky_Hpayoff': risky_Hpayoff, + 'risky_Lpayoff': risky_Lpayoff, + 'choice': choice, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def prl_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + choice = np.full((n_subj, t_max), -1, dtype=int) + outcome = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + choice[s][:t] = subj_data['choice'] + outcome[s][:t] = np.sign(subj_data['outcome']) # Use sign + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': choice, + 'outcome': outcome, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def prl_multipleB_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_block_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + b_subjs = general_info['b_subjs'] + b_max = general_info['b_max'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + choice = np.full((n_subj, b_max, t_max), -1, dtype=int) + outcome = np.full((n_subj, b_max, t_max), 0, dtype=float) + + # Write from subj_block_data to the data arrays + for s in range(n_subj): + for b in range(b_subjs[s]): + _, subj_block_data = next(subj_block_group) + t = t_subjs[s][b] + choice[s][b][:t] = subj_block_data['choice'] + outcome[s][b][:t] = np.sign(subj_block_data['outcome']) # Use sign + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'B': b_max, + 'Bsubj': b_subjs, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': choice, + 'outcome': outcome, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def pst_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + option1 = np.full((n_subj, t_max), -1, dtype=int) + option2 = np.full((n_subj, t_max), -1, dtype=int) + choice = np.full((n_subj, t_max), -1, dtype=int) + reward = np.full((n_subj, t_max), -1, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + option1[s][:t] = subj_data['type'] // 10 + option2[s][:t] = subj_data['type'] % 10 + choice[s][:t] = subj_data['choice'] + reward[s][:t] = subj_data['reward'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'option1': option1, + 'option2': option2, + 'choice': choice, + 'reward': reward, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def ra_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + gain = np.full((n_subj, t_max), 0, dtype=float) + loss = np.full((n_subj, t_max), 0, dtype=float) + cert = np.full((n_subj, t_max), 0, dtype=float) + gamble = np.full((n_subj, t_max), -1, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + gain[s][:t] = subj_data['gain'] + loss[s][:t] = np.abs(subj_data['loss']) # Use abs + cert[s][:t] = subj_data['cert'] + gamble[s][:t] = subj_data['gamble'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'gain': gain, + 'loss': loss, + 'cert': cert, + 'gamble': gamble, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def rdt_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + gain = np.full((n_subj, t_max), 0, dtype=float) + loss = np.full((n_subj, t_max), 0, dtype=float) + cert = np.full((n_subj, t_max), 0, dtype=float) + type = np.full((n_subj, t_max), -1, dtype=int) + gamble = np.full((n_subj, t_max), -1, dtype=int) + outcome = np.full((n_subj, t_max), 0, dtype=float) + happy = np.full((n_subj, t_max), 0, dtype=float) + RT_happy = np.full((n_subj, t_max), 0, dtype=float) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + gain[s][:t] = subj_data['gain'] + loss[s][:t] = np.abs(subj_data['loss']) # Use abs + cert[s][:t] = subj_data['cert'] + type[s][:t] = subj_data['type'] + gamble[s][:t] = subj_data['gamble'] + outcome[s][:t] = subj_data['outcome'] + happy[s][:t] = subj_data['happy'] + RT_happy[s][:t] = subj_data['rthappy'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'gain': gain, + 'loss': loss, + 'cert': cert, + 'type': type, + 'gamble': gamble, + 'outcome': outcome, + 'happy': happy, + 'RT_happy': RT_happy, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def ts_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + level1_choice = np.full((n_subj, t_max), 1, dtype=int) + level2_choice = np.full((n_subj, t_max), 1, dtype=int) + reward = np.full((n_subj, t_max), 0, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + level1_choice[s][:t] = subj_data['level1choice'] + level2_choice[s][:t] = subj_data['level2choice'] + reward[s][:t] = subj_data['reward'] + + # Use additional_args if provided + trans_prob = additional_args.get('trans_prob', 0.7) + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'level1_choice': level1_choice, + 'level2_choice': level2_choice, + 'reward': reward, + 'trans_prob': trans_prob, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def ug_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + # Initialize (model-specific) data arrays + offer = np.full((n_subj, t_max), 0, dtype=float) + accept = np.full((n_subj, t_max), -1, dtype=int) + + # Write from subj_data to the data arrays + for s in range(n_subj): + _, subj_data = next(subj_group) + t = t_subjs[s] + offer[s][:t] = subj_data['offer'] + accept[s][:t] = subj_data['accept'] + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'offer': offer, + 'accept': accept, + } + + # Returned data_dict will directly be passed to pystan + return data_dict + + +def wcs_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + # t_max = general_info['t_max'] + t_max = 128 + + # Read from predefined answer sheet + answersheet = os.path.join(_common, 'extdata', 'wcs_answersheet.txt') + answer = pd.read_csv( + answersheet, sep='\t', header=0, index_col=0).to_numpy() - 1 + + # Initialize data arrays + choice = np.full((n_subj, 4, t_max), 0, dtype=int) + outcome = np.full((n_subj, t_max), -1, dtype=int) + choice_match_att = np.full((n_subj, t_max, 1, 3), 0, dtype=int) + deck_match_rule = np.full((t_max, 3, 4), 0, dtype=float) + + # Write choice, outcome, choice_match_att + for s in range(n_subj): + trials = t_subjs[s] + _, subj_data = next(subj_group) + subj_data_choice = subj_data['choice'].to_numpy() - 1 + subj_data_outcome = subj_data['outcome'].to_numpy() + for t in range(trials): + c = subj_data_choice[t] + o = subj_data_outcome[t] + choice[s][c][t] = 1 + outcome[s][t] = o + choice_match_att[s][t][0][:] = (c == answer[:, t]) + + # Write deck_match_rule + for t in range(t_max): + for r in range(3): + deck_match_rule[t][r][answer[r][t]] = 1 + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'Tsubj': t_subjs, + 'choice': choice, + 'outcome': outcome, + 'choice_match_att': choice_match_att, + 'deck_match_rule': deck_match_rule, + } + + # Returned data_dict will directly be passed to pystan + return data_dict diff --git a/Python/setup.cfg b/Python/setup.cfg new file mode 100644 index 00000000..aedd5d81 --- /dev/null +++ b/Python/setup.cfg @@ -0,0 +1,40 @@ +[flake8] +ignore = + # F401: Package imported but but unused + F401, + # F403: 'from import *' used + F403 +max-line-length = 79 + +[pylint] +disable = + # C0103: Argument name doesn't conform to snake_case naming style + C0103, + # R0205: Class inherits from object + R0205, + # R0902: Too many instance attributes + R0902, + # R0903: Too few public methods + R0903, + # R0913: Too many arguments + R0913, + # W0401: Wildcard import + W0401, + # W0611: Unused import + W0611 +max-line-length = 79 + +[mypy] +show_column_numbers = True +show_error_context = True +follow_imports = skip +cache_dir = /dev/null +ignore_missing_imports = True +disallow_untyped_calls = False +warn_return_any = False +strict_optional = True +warn_no_return = True +warn_redundant_casts = False +warn_unused_ignores = False +disallow_untyped_defs = False +check_untyped_defs = False diff --git a/Python/setup.py b/Python/setup.py new file mode 100644 index 00000000..9d5d7de8 --- /dev/null +++ b/Python/setup.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python +import os +import sys +import subprocess +from setuptools import setup, find_packages + +if sys.version_info[:2] < (3, 5): + raise RuntimeError("Python version >= 3.5 required.") + + +MAJOR = 0 +MINOR = 7 +MICRO = 2 +ISRELEASED = False +VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) + + +def git_version(): + """ + Return the git revision as a string + """ + def _minimal_ext_cmd(cmd): + # construct minimal environment + env = {} + for k in ['SYSTEMROOT', 'PATH', 'HOME']: + v = os.environ.get(k) + if v is not None: + env[k] = v + # LANGUAGE is used on win32 + env['LANGUAGE'] = 'C' + env['LANG'] = 'C' + env['LC_ALL'] = 'C' + out = subprocess.Popen( + cmd, stdout=subprocess.PIPE, env=env).communicate()[0] + return out + + try: + out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD']) + GIT_REVISION = out.strip().decode('ascii') + except OSError: + GIT_REVISION = "Unknown" + + return GIT_REVISION + + +def get_version_info(): + # Adding the git rev number needs to be done inside write_version_py(), + # otherwise the import of hbayesdm.version messes up the build under Python 3. + FULLVERSION = VERSION + if os.path.exists('.git'): + GIT_REVISION = git_version() + elif os.path.exists('hbayesdm/version.py'): + # must be a source distribution, use existing version file + try: + from hbayesdm.version import git_revision as GIT_REVISION + except ImportError: + raise ImportError("Unable to import git_revision. Try removing " + "hbayesdm/version.py and the build directory " + "before building.") + else: + GIT_REVISION = "Unknown" + + if not ISRELEASED: + FULLVERSION += '.dev0+' + GIT_REVISION[:7] + + return FULLVERSION, GIT_REVISION + + +def write_version_py(filename='hbayesdm/version.py'): + cnt = """ +# THIS FILE IS GENERATED FROM HBAYESDM SETUP.PY +short_version = '%(version)s' +version = '%(version)s' +full_version = '%(full_version)s' +git_revision = '%(git_revision)s' +release = %(isrelease)s +if not release: + version = full_version +""" + FULLVERSION, GIT_REVISION = get_version_info() + + a = open(filename, 'w') + try: + a.write(cnt % {'version': VERSION, + 'full_version': FULLVERSION, + 'git_revision': GIT_REVISION, + 'isrelease': str(ISRELEASED)}) + finally: + a.close() + + +write_version_py() + + +DESC = 'Python interface for hBayesDM, hierarchical Bayesian modeling of RL-DM tasks' +with open('README.rst', 'r', encoding='utf-8') as f: + LONG_DESC = f.read() +LONG_DESC_TYPE = 'text/restructuredtext' +AUTHOR = 'hBayesDM Developers' +AUTHOR_EMAIL = 'hbayesdm-users@googlegroups.com' +URL = 'https://github.com/CCS-Lab/hBayesDM-py' +LICENSE = 'GPLv3' +CLASSIFIERS = [ + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', + 'Operating System :: OS Independent', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: Scientific/Engineering', +] + +setup( + name='hbayesdm', + version=VERSION, + author='hBayesDM Developers', + author_email='hbayesdm-users@googlegroups.com', + description=DESC, + long_description=LONG_DESC, + long_description_content_type=LONG_DESC_TYPE, + python_requires='>=3.5', + url=URL, + license=LICENSE, + classifiers=CLASSIFIERS, + packages=find_packages(), + install_requires=[ + 'numpy', + 'scipy', + 'pandas', + 'pystan', + 'matplotlib', + 'arviz', + ], + zip_safe=False, + include_package_data=True, +) From 8e244fd93ff59ac6c4f2da74ed5730db8f180644 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Wed, 1 May 2019 19:18:25 +0900 Subject: [PATCH 011/163] Update .gitignore --- .gitignore | 318 ++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 313 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index ab9d883d..6b41936f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,14 +1,275 @@ ## MEMO: While gitignore uses shell glob, Rbuildignore uses regex. -# RStudio files -.Rproj.user +# Created by https://www.gitignore.io/api/r,linux,macos,python,windows,pycharm,jupyternotebook,visualstudiocode +# Edit at https://www.gitignore.io/?templates=r,linux,macos,python,windows,pycharm,jupyternotebook,visualstudiocode + +### JupyterNotebook ### +.ipynb_checkpoints +*/.ipynb_checkpoints/* + +# Remove previous ipynb_checkpoints +# git rm -r .ipynb_checkpoints/ +# + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/modules.xml +# .idea/*.iml +# .idea/modules + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +# JetBrains templates +**___jb_tmp___ + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +.idea/sonarlint + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don’t work, or not +# install all needed dependencies. +#Pipfile.lock + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +### R ### # History files .Rhistory .Rapp.history # Session Data files .RData + +# User-specific files .Ruserdata # Example code in package build process @@ -20,6 +281,9 @@ # Output files from R CMD check /*.Rcheck/ +# RStudio files +.Rproj.user/ + # produced vignettes vignettes/*.html vignettes/*.pdf @@ -35,13 +299,57 @@ vignettes/*.pdf *.utf8.md *.knit.md -# Source files (new) +### R.Bookdown Stack ### +# R package: bookdown caching files +/*_files/ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.gitignore.io/api/r,linux,macos,python,windows,pycharm,jupyternotebook,visualstudiocode + +# R-related *.o *.o.tmp *.cc *.hpp *.so *.dll - -# CRAN-RELEASE CRAN-RELEASE + +# Py-related +hbayesdm/version.py +*.pkl From 497f0a1c5ee0f4fcb4e75c7864941a3bba56ada9 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Thu, 2 May 2019 15:36:05 +0900 Subject: [PATCH 012/163] Update .gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6b41936f..5d144e97 100644 --- a/.gitignore +++ b/.gitignore @@ -351,5 +351,5 @@ $RECYCLE.BIN/ CRAN-RELEASE # Py-related -hbayesdm/version.py +Python/hbayesdm/version.py *.pkl From eadefeb18aa08ed0317f51102a7257844a53bf94 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sun, 30 Jun 2019 14:19:34 +0900 Subject: [PATCH 013/163] Update .editorconfig to include py, json, Makefile --- .editorconfig | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.editorconfig b/.editorconfig index aad7e443..adde1695 100644 --- a/.editorconfig +++ b/.editorconfig @@ -8,6 +8,9 @@ insert_final_newline = true indent_style = space trim_trailing_whitespace = true +[Makefile] +indent_style = tab + [*.md] indent_size = 4 trim_trailing_whitespace = false @@ -17,3 +20,9 @@ indent_size = 2 [*.{c,h,cpp,hpp}] indent_size = 4 + +[*.py] +indent_size = 4 + +[*.json] +indent_size = 2 From 26c750dc7e26f3020827c8dd8fdd52b7b16e4b38 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Mon, 1 Jul 2019 00:04:17 +0900 Subject: [PATCH 014/163] Combine travis.yml to work for both R & Py --- .travis.yml | 122 +++++++++++++++++++++++++++++---------- Python/tests/test_tmp.py | 3 + 2 files changed, 95 insertions(+), 30 deletions(-) create mode 100644 Python/tests/test_tmp.py diff --git a/.travis.yml b/.travis.yml index c0c2bc08..44080ab8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,8 @@ -language: r -sudo: false +# Use cpp to enable both R & Python +language: cpp + +# NOTE: was `false` +sudo: required branches: only: @@ -9,18 +12,12 @@ branches: - /hotfix\/.*/ - /bugfix\/.*/ -r_build_args: '--no-build-vignettes' -r_check_args: '--ignore-vignettes' - # Use cache for packages cache: apt: true packages: true ccache: true - -env: - global: - - MAKEFLAGS="-j 2" + pip: true matrix: include: @@ -35,8 +32,38 @@ matrix: - gcc-7 - g++-7 - gfortran-7 + - libcurl4-openssl-dev + - libxml2-dev env: + - MAKEFLAGS="-j 2" - MATRIX_EVAL="CC=gcc-7 && CXX=g++-7" + before_install: + - sudo apt-get update + - sudo sh -c 'echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" >> /etc/apt/sources.list' + - gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9 + - gpg -a --export E084DAB9 | sudo apt-key add - + - sudo apt-get update + - sudo apt-get install gcc + - sudo apt-get install r-base-core + - sudo apt-get install libssl-dev + - sudo apt-get install -y r-base r-base-dev + - eval "${MATRIX_EVAL}" + - mkdir -p ~/.R/ + - echo "CC = $CC" >> ~/.R/Makevars + - echo "CXX = ${CXX} -fPIC " >> ~/.R/Makevars + - echo "CXX14 = ${CXX} -fPIC -flto=2" >> ~/.R/Makevars + - echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars + install: + - sudo R -e 'install.packages(c("devtools", "roxygen2"), dep = T, quiet = T,repos = "https://cran.rstudio.com")' + - sudo R -e 'library(devtools); library(methods); options(repos=c(CRAN="https://cran.rstudio.com")); devtools::install_deps(pkg = "./R/", dep = T, quiet = T)' + script: + - cd ./R/ + - travis_wait 42 R CMD build . --no-build-vignettes --no-manual + - travis_wait 59 R CMD check hBayesDM*.tar.gz --as-cran --no-build-vignettes --no-manual + r_binary_packages: + - testthat + after_failure: + - cat hBayesDM.Rcheck/00* - name: Ubuntu + g++-7 (BUILD_ALL) os: linux dist: trusty @@ -48,33 +75,68 @@ matrix: - gcc-7 - g++-7 - gfortran-7 + - libcurl4-openssl-dev + - libxml2-dev env: + - MAKEFLAGS="-j 2" - MATRIX_EVAL="CC=gcc-7 && CXX=g++-7" - BUILD_ALL="true" - -before_install: - - eval "${MATRIX_EVAL}" - - mkdir -p ~/.R/ - - echo "CC = $CC" >> ~/.R/Makevars - - echo "CXX = ${CXX} -fPIC " >> ~/.R/Makevars - - echo "CXX14 = ${CXX} -fPIC -flto=2" >> ~/.R/Makevars - - echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars - -install: - - R -e 'install.packages("devtools", quiet = T)' -e 'devtools::install_deps(dep = T, quiet = T)' - -script: - - travis_wait 42 R CMD build . - - travis_wait 59 R CMD check hBayesDM*.tar.gz --as-cran - -r_binary_packages: - - testthat + before_install: + - sudo apt-get update + - sudo sh -c 'echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" >> /etc/apt/sources.list' + - gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9 + - gpg -a --export E084DAB9 | sudo apt-key add - + - sudo apt-get update + - sudo apt-get install gcc + - sudo apt-get install r-base-core + - sudo apt-get install libssl-dev + - sudo apt-get install -y r-base r-base-dev + - eval "${MATRIX_EVAL}" + - mkdir -p ~/.R/ + - echo "CC = $CC" >> ~/.R/Makevars + - echo "CXX = ${CXX} -fPIC " >> ~/.R/Makevars + - echo "CXX14 = ${CXX} -fPIC -flto=2" >> ~/.R/Makevars + - echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars + install: + - sudo R -e 'install.packages(c("devtools", "roxygen2"), dep = T, quiet = T,repos = "https://cran.rstudio.com")' + - sudo R -e 'library(devtools); library(methods); options(repos=c(CRAN="https://cran.rstudio.com")); devtools::install_deps(pkg = "./R/", dep = T, quiet = T)' + script: + - cd ./R/ + - travis_wait 42 R CMD build . --no-build-vignettes --no-manual + - travis_wait 59 R CMD check hBayesDM*.tar.gz --as-cran --no-build-vignettes --no-manual + r_binary_packages: + - testthat + after_failure: + - cat hBayesDM.Rcheck/00* + - name: Python-package + addons: + apt: + packages: + - libcurl4-openssl-dev + - libxml2-dev + before_install: + - sudo -H apt-get install gfortran + - sudo -H apt-get install python3-setuptools + - sudo -H easy_install3 pip + - sudo -H pip3 install --upgrade pip + - sudo -H apt-get install python3-tk + install: + - cd ./Python/ + - sudo -H pip install pipenv + - sudo -H pipenv --python 3.7 + - sudo -H pipenv install --dev --skip-lock + - sudo -H pipenv install -e . --skip-lock + - sudo -H pipenv run pytest tests + script: + - sudo -H pipenv run pytest tests --doctest-modules + after_success: + - sudo -H pipenv run flake8 hbayesdm --format=pylint --statistics --exit-zero + - sudo -H pipenv run pylint hbayesdm --rcfile=setup.cfg --exit-zero + - sudo -H pipenv run travis-sphinx build + - sudo -H pipenv run travis-sphinx deploy # r_github_packages: # - r-lib/covr # after_success: # - Rscript -e 'covr::codecov()' - -after_failure: - - cat hBayesDM.Rcheck/00* diff --git a/Python/tests/test_tmp.py b/Python/tests/test_tmp.py new file mode 100644 index 00000000..aaf307c0 --- /dev/null +++ b/Python/tests/test_tmp.py @@ -0,0 +1,3 @@ +def test_tmp(): + print('Hello world!') + assert(True) From a448651db2090f94a7724d758ccd8d779b92b22a Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Mon, 1 Jul 2019 01:46:46 +0900 Subject: [PATCH 015/163] Edit codecov.yml to work correctly --- codecov.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/codecov.yml b/codecov.yml index 9a4dd6e6..68b65bdb 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,8 +1,12 @@ ignore: - - "docs/" - - "inst/" - - "man/" - - "src/" - - "tools/" + - "R/docs/**/*" + - "R/inst/**/*" + - "R/man/**/*" + - "R/src/**/*" + - "R/tools/**/*" + - "Python/docs/**/*" + - "Python/hbayesdm/models/**/*" + - "Python/hbayesdm/preprocess_funcs.py" + - "JSON/ValidateAll.sh" comment: false From cc85273b8daff0b3401a1ed2a1f2792309672376 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Mon, 1 Jul 2019 21:49:50 +0900 Subject: [PATCH 016/163] Edit MANIFEST.in --- Python/MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/MANIFEST.in b/Python/MANIFEST.in index 0999da74..2d9dd84b 100644 --- a/Python/MANIFEST.in +++ b/Python/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst include LICENSE graft hbayesdm/common -global-exclude .git +exclude hbayesdm/version.py From 6ae38247a22de5e748302d6d5d9bcae5cbfe56f8 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 2 Jul 2019 09:28:50 +0900 Subject: [PATCH 017/163] Edit README's --- Python/README.rst | 34 ++++++------- README.md | 119 ++++++++++++++++++++++++++++++++++++++-------- 2 files changed, 114 insertions(+), 39 deletions(-) diff --git a/Python/README.rst b/Python/README.rst index c4dc2bce..b699f454 100644 --- a/Python/README.rst +++ b/Python/README.rst @@ -1,15 +1,6 @@ hBayesDM-py =========== -.. image:: https://www.repostatus.org/badges/latest/wip.svg - :alt: Project Status: WIP – Initial development is in progress, - but there has not yet been a stable, usable release suitable - for the public. - :target: https://www.repostatus.org/#wip -.. image:: https://travis-ci.com/CCS-Lab/hBayesDM-py.svg?token=gbyEQoyAYgexeSRwBwj6&branch=master - :alt: Travis CI - :target: https://travis-ci.com/CCS-Lab/hBayesDM-py - This is the Python version of *hBayesDM* (hierarchical Bayesian modeling of Decision-Making tasks), a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of @@ -36,23 +27,28 @@ You can install hBayesDM-py from PyPI with the following line: .. code:: bash - pip install hbayesdm + pip install hbayesdm # Install using pip -If you want to install from source (via cloning from GitHub): +If you want to install from source (by cloning from GitHub): .. code:: bash - git clone --recursive https://github.com/CCS-Lab/hBayesDM-py.git - cd hBayesDM-py - python setup.py install + git clone https://github.com/CCS-Lab/hBayesDM.git + cd hBayesDM + cd Python + + python setup.py install # Install from source -If you want to make a virtual environment using `pipenv`_, -you can do so with the following command: +If you want to create a virtual environment using `pipenv`_: .. _pipenv: https://pipenv.readthedocs.io/en/latest/ .. code:: bash - # After cloning (recursively) & cd-ing into hBayesDM-py - pipenv install - pipenv install --dev # For developmental purpose + git clone https://github.com/CCS-Lab/hBayesDM.git + cd hBayesDM + cd Python + + pipenv install --skip-lock # Install using pipenv + pipenv install --dev --skip-lock # For developmental purposes + diff --git a/README.md b/README.md index 61610e8b..0842f705 100644 --- a/README.md +++ b/README.md @@ -7,58 +7,137 @@ [![Downloads](https://cranlogs.r-pkg.org/badges/grand-total/hBayesDM)](https://cran.r-project.org/web/packages/hBayesDM/index.html) [![DOI](https://zenodo.org/badge/doi/10.1162/CPSY_a_00002.svg)](https://doi.org/10.1162/CPSY_a_00002) -**hBayesDM** (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly R package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses [Stan](http://mc-stan.org/) for Bayesian inference. +#### Now supporting *R* and *python*! -## Getting Started +**hBayesDM** (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses [Stan](http://mc-stan.org/) for Bayesian inference. + +Please see the respective sections below for installing hBayesDM with R/python. + +## Getting Started - R ### Prerequisite -To install hBayesDM, **[RStan][rstan] should be properly installed before you proceed**. -For detailed instructions, please go to this link: +To install hBayesDM for R, **[RStan][rstan] needs to be properly installed before you proceed**. +For detailed instructions on having RStan ready prior to installing hBayesDM, please go to this link: https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started [rstan]: https://github.com/stan-dev/rstan ### Installation -hBayesDM can be installed from CRAN by running the following command in R: +The lastest **stable** version of hBayesDM can be installed from CRAN by running the following command in R: ```r install.packages("hBayesDM") # Install hBayesDM from CRAN ``` -or you can also install via GitHub with: +or you can also install from GitHub with: + +```r +# `devtools` is required to install hBayesDM from GitHub +if (!require(devtools)) install.packages("devtools") + +devtools::install_github("CCS-Lab/hBayesDM/R") +``` + +If you want to use the lastest *development* version of hBayesDM, run the following in R: ```r # `devtools` is required to install hBayesDM from GitHub if (!require(devtools)) install.packages("devtools") -devtools::install_github("CCS-Lab/hBayesDM") +devtools::install_github("CCS-Lab/hBayesDM/R@develop") ``` #### Building at once -In default, you should build a Stan file into a binary for the first time to use the -model, so it can be quite bothersome. -In order to build all the models at once, you should set an environmental variable -`BUILD_ALL` to `true`. -We highly recommend you to use multiple cores for build, since it requires quite -a long time to complete. +By default, you will have to wait for compilation when you run each model for the first time. +If you plan on runnning several different models and want to pre-build all models during installation time, +set an environment variable `BUILD_ALL` to `true`, like the following. +We highly recommend you only do so when you have multiple cores available, +since building all models at once takes quite a long time to complete. ```r -Sys.setenv(BUILD_ALL='true') # Build all the models on installation -Sys.setenv(MAKEFLAGS='-j 4') # Use 4 cores for compilation (or the number you want) +Sys.setenv(BUILD_ALL = "true") # Build *all* models at installation time +Sys.setenv(MAKEFLAGS = "-j 4") # Use 4 cores for build (or any other number you want) -install.packages("hBayesDM") # Install from CRAN +install.packages("hBayesDM") # Install from CRAN # or -devtools::install_github("CCS-Lab/hBayesDM") # Install from GitHub +devtools::install_github("CCS-Lab/hBayesDM/R") # Install from GitHub +``` + +## Getting Started - Python + +**hBayesDM-py** supports Python 3.5 or higher. It requires several packages including: +[NumPy][numpy], [SciPy][scipy], [Pandas][pandas], [PyStan][pystan], [Matplotlib][matplotlib], [ArviZ][arviz]. +*(But there's no need to pre-install anything as pip handles all the requirements for us.)* + +[numpy]: https://www.numpy.org/ +[scipy]: https://www.scipy.org/ +[pandas]: https://pandas.pydata.org/ +[pystan]: https://github.com/stan-dev/pystan +[matplotlib]: https://matplotlib.org/ +[arviz]: https://arviz-devs.github.io/arviz/ + +### Installation + +You can install the latest **stable** version of `hbayesdm` from PyPI, through the following command: + +```sh +pip install hbayesdm # Install from PyPI ``` -### Quick Links +or if you want to install from source, by cloning the repo from GitHub: + +```sh +git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo +cd hBayesDM # Move into repo +cd Python # Move into Python subdirectory + +python setup.py install # Install hbayesdm from source +``` + +or if you want to install the latest *development* version of `hbayesdm`: + +```sh +git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo +cd hBayesDM # Move into repo +git checkout develop # Checkout develop branch +cd Python # Move into Python subdirectory + +python setup.py install # Install hbayesdm *develop* version from source +``` + +If you want to create a virtual environment using [`pipenv`](https://pipenv.readthedocs.io/en/latest/) +while installing `hbayesdm`: + +```sh +git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo +cd hBayesDM # Move into repo +cd Python # Move into Python subdirectory + +pipenv install --skip-lock # Install hbayesdm inside pipenv +``` + +**[For contributors]** You can also install all dependencies (including dev) of `hbayesdm`: + +```sh +git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo +cd hBayesDM # Move into repo +cd Python # Move into Python subdirectory + +pipenv install --dev --skip-lock # For developmental purpose +``` + +BTW, we encourage you try out [`pipenv`](https://pipenv.readthedocs.io/en/latest/), a well-documented, rich, high-level virtual environment wrapper for python & pip. + +## Quick Links -- **Tutorial**: http://rpubs.com/CCSL/hBayesDM +- **Tutorial-R**: http://rpubs.com/CCSL/hBayesDM +- **Tutorial-py**: *...on its way...* - **Mailing list**: https://groups.google.com/forum/#!forum/hbayesdm-users - **Bug reports**: https://github.com/CCS-Lab/hBayesDM/issues +- **Contributing**: See the [Wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of this repository. ## Citation @@ -66,7 +145,7 @@ If you used hBayesDM or some of its codes for your research, please cite this pa > Ahn, W.-Y., Haines, N., & Zhang, L. (2017). Revealing neuro-computational mechanisms of reinforcement learning and decision-making with the hBayesDM package. Computational Psychiatry, 1, 24-57. doi:10.1162/CPSY_a_00002. -or for BibTeX: +or using BibTeX: ```bibtex @article{hBayesDM, From 750e05f86f5cce1ba9037e28d13a61403fd8bc15 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Tue, 2 Jul 2019 09:58:39 +0900 Subject: [PATCH 018/163] Edit link in setup.py --- Python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/setup.py b/Python/setup.py index 9d5d7de8..172abc81 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -98,7 +98,7 @@ def write_version_py(filename='hbayesdm/version.py'): LONG_DESC_TYPE = 'text/restructuredtext' AUTHOR = 'hBayesDM Developers' AUTHOR_EMAIL = 'hbayesdm-users@googlegroups.com' -URL = 'https://github.com/CCS-Lab/hBayesDM-py' +URL = 'https://github.com/CCS-Lab/hBayesDM' LICENSE = 'GPLv3' CLASSIFIERS = [ 'Environment :: Console', From d97d4347e244ad5d33d3fbf73506730c4e3d9423 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 2 Jul 2019 14:26:29 +0900 Subject: [PATCH 019/163] Update test codes --- Python/tests/test_gng_m1.py | 11 +++++++++++ Python/tests/test_tmp.py | 3 --- 2 files changed, 11 insertions(+), 3 deletions(-) create mode 100644 Python/tests/test_gng_m1.py delete mode 100644 Python/tests/test_tmp.py diff --git a/Python/tests/test_gng_m1.py b/Python/tests/test_gng_m1.py new file mode 100644 index 00000000..137e2e01 --- /dev/null +++ b/Python/tests/test_gng_m1.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import gng_m1 + + +def test_gng_m1(): + fit = gng_m1(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_tmp.py b/Python/tests/test_tmp.py deleted file mode 100644 index aaf307c0..00000000 --- a/Python/tests/test_tmp.py +++ /dev/null @@ -1,3 +0,0 @@ -def test_tmp(): - print('Hello world!') - assert(True) From f344eefc63bcda466b2e8b9c570dc60daa2d6e56 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sun, 7 Jul 2019 18:23:12 +0900 Subject: [PATCH 020/163] Minor edits --- .gitignore | 4 +++- .travis.yml | 4 ++-- Python/README.rst | 1 + Python/tests/test_gng_m1.py | 20 +++++++++++++++++--- 4 files changed, 23 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 5d144e97..54b3923e 100644 --- a/.gitignore +++ b/.gitignore @@ -351,5 +351,7 @@ $RECYCLE.BIN/ CRAN-RELEASE # Py-related -Python/hbayesdm/version.py *.pkl +Python/.idea +Python/hbayesdm/version.py + diff --git a/.travis.yml b/.travis.yml index 44080ab8..f33b6893 100644 --- a/.travis.yml +++ b/.travis.yml @@ -126,9 +126,9 @@ matrix: - sudo -H pipenv --python 3.7 - sudo -H pipenv install --dev --skip-lock - sudo -H pipenv install -e . --skip-lock - - sudo -H pipenv run pytest tests script: - - sudo -H pipenv run pytest tests --doctest-modules + - travis_wait 30 sudo -H pipenv run pytest tests + - travis_wait 30 sudo -H pipenv run pytest tests --doctest-modules after_success: - sudo -H pipenv run flake8 hbayesdm --format=pylint --statistics --exit-zero - sudo -H pipenv run pylint hbayesdm --rcfile=setup.cfg --exit-zero diff --git a/Python/README.rst b/Python/README.rst index b699f454..6a749503 100644 --- a/Python/README.rst +++ b/Python/README.rst @@ -50,5 +50,6 @@ If you want to create a virtual environment using `pipenv`_: cd Python pipenv install --skip-lock # Install using pipenv + # or pipenv install --dev --skip-lock # For developmental purposes diff --git a/Python/tests/test_gng_m1.py b/Python/tests/test_gng_m1.py index 137e2e01..a7c10912 100644 --- a/Python/tests/test_gng_m1.py +++ b/Python/tests/test_gng_m1.py @@ -1,10 +1,24 @@ import pytest +import pystan -from hbayesdm.models import gng_m1 +from hbayesdm.models import gng_m1, gng_m2, gng_m3, gng_m4 +from hbayesdm import rhat, print_fit -def test_gng_m1(): - fit = gng_m1(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) +def test_gng_models(): + print(pystan.__version__) + + fit = gng_m1(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) + print(fit) + print(fit.all_ind_pars) + print(rhat(fit, less=1.1)) + + fit2 = gng_m2(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) + fit3 = gng_m3(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) + fit4 = gng_m4(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) + + print_fit(fit, fit2, fit3, fit4) # ic='loo' + print_fit(fit, fit2, fit3, fit4, ic='waic') if __name__ == '__main__': From 65f200854619c8596aac6855f4b3d9d0c1147b0b Mon Sep 17 00:00:00 2001 From: Harhim Park Date: Wed, 17 Apr 2019 15:44:18 +0900 Subject: [PATCH 021/163] Make the loglikelihood as a vector --- Python/hbayesdm/common/stan_files/bart_par4.stan | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Python/hbayesdm/common/stan_files/bart_par4.stan b/Python/hbayesdm/common/stan_files/bart_par4.stan index f66ca8f0..2049a200 100644 --- a/Python/hbayesdm/common/stan_files/bart_par4.stan +++ b/Python/hbayesdm/common/stan_files/bart_par4.stan @@ -92,7 +92,7 @@ generated quantities { real mu_tau = exp(mu_pr[4]); // Log-likelihood for model fit - real log_lik = 0; + real log_lik[N]; // For posterior predictive check real y_pred[N, T, P]; @@ -108,6 +108,8 @@ generated quantities { int n_succ = 0; int n_pump = 0; + log_lik[j] = 0; + for (k in 1:Tsubj[j]) { real p_burst; // Belief on a balloon to be burst real omega; // Optimal number of pumps @@ -116,7 +118,7 @@ generated quantities { omega = -gam[j] / log1m(p_burst); for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) { - log_lik += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); + log_lik[j] += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); y_pred[j, k, l] = bernoulli_logit_rng(tau[j] * (omega - l)); } From 308c2cd3fc57fc19dceda097f83f1f0ffc4c7770 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sat, 27 Jul 2019 14:16:45 +0900 Subject: [PATCH 022/163] Apply change to both R and Python --- R/inst/stan_files/bart_par4.stan | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/R/inst/stan_files/bart_par4.stan b/R/inst/stan_files/bart_par4.stan index f66ca8f0..2049a200 100644 --- a/R/inst/stan_files/bart_par4.stan +++ b/R/inst/stan_files/bart_par4.stan @@ -92,7 +92,7 @@ generated quantities { real mu_tau = exp(mu_pr[4]); // Log-likelihood for model fit - real log_lik = 0; + real log_lik[N]; // For posterior predictive check real y_pred[N, T, P]; @@ -108,6 +108,8 @@ generated quantities { int n_succ = 0; int n_pump = 0; + log_lik[j] = 0; + for (k in 1:Tsubj[j]) { real p_burst; // Belief on a balloon to be burst real omega; // Optimal number of pumps @@ -116,7 +118,7 @@ generated quantities { omega = -gam[j] / log1m(p_burst); for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) { - log_lik += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); + log_lik[j] += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); y_pred[j, k, l] = bernoulli_logit_rng(tau[j] * (omega - l)); } From 45ea96a226dbbabe46f16df6b296c3294db2b4c1 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sat, 27 Jul 2019 15:06:18 +0900 Subject: [PATCH 023/163] Resolve #88 --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index f33b6893..8f3cb05c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -134,6 +134,10 @@ matrix: - sudo -H pipenv run pylint hbayesdm --rcfile=setup.cfg --exit-zero - sudo -H pipenv run travis-sphinx build - sudo -H pipenv run travis-sphinx deploy + - name: Test sync (R/Python packages) + script: + - diff -r Python/hbayesdm/common/extdata R/inst/extdata + - diff -r Python/hbayesdm/common/stan_files R/inst/stan_files # r_github_packages: # - r-lib/covr From 26d64d6a666c972aec3535a4f5935707f3ffffdf Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sat, 27 Jul 2019 15:12:44 +0900 Subject: [PATCH 024/163] Try un-synced R/Python stan_files --- Python/hbayesdm/common/stan_files/a.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 Python/hbayesdm/common/stan_files/a.txt diff --git a/Python/hbayesdm/common/stan_files/a.txt b/Python/hbayesdm/common/stan_files/a.txt new file mode 100644 index 00000000..e69de29b From f7bb5c29fff77954a4e6970cf69bf9196f515b17 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sat, 27 Jul 2019 15:19:39 +0900 Subject: [PATCH 025/163] Revert "Try un-synced R/Python stan_files" This reverts commit 26d64d6a666c972aec3535a4f5935707f3ffffdf. --- Python/hbayesdm/common/stan_files/a.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 Python/hbayesdm/common/stan_files/a.txt diff --git a/Python/hbayesdm/common/stan_files/a.txt b/Python/hbayesdm/common/stan_files/a.txt deleted file mode 100644 index e69de29b..00000000 From f73f502aedec7d6c3a068bfd2af8f642390165a8 Mon Sep 17 00:00:00 2001 From: Jethro Lee Date: Sat, 27 Jul 2019 15:19:53 +0900 Subject: [PATCH 026/163] Change order --- .travis.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8f3cb05c..a872bbbf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,6 +21,10 @@ cache: matrix: include: + - name: Test sync (R/Python packages) + script: + - diff -r Python/hbayesdm/common/extdata R/inst/extdata + - diff -r Python/hbayesdm/common/stan_files R/inst/stan_files - name: Ubuntu + g++-7 os: linux dist: trusty @@ -134,10 +138,6 @@ matrix: - sudo -H pipenv run pylint hbayesdm --rcfile=setup.cfg --exit-zero - sudo -H pipenv run travis-sphinx build - sudo -H pipenv run travis-sphinx deploy - - name: Test sync (R/Python packages) - script: - - diff -r Python/hbayesdm/common/extdata R/inst/extdata - - diff -r Python/hbayesdm/common/stan_files R/inst/stan_files # r_github_packages: # - r-lib/covr From 523c132b7df9c70083945bbe6777c18bc584457a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 9 Aug 2019 00:01:03 +0900 Subject: [PATCH 027/163] Make it possible to use VB estimates as initial values --- R/R/hBayesDM_model.R | 110 +++++++++++++++++++++++++------------------ 1 file changed, 65 insertions(+), 45 deletions(-) diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 8b05952a..95e10ed7 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -241,7 +241,7 @@ hBayesDM_model <- function(task_name, ######################################################### ## Prepare: data_list ##### ## pars ##### - ## gen_init for passing to Stan ##### + ## model_name ##### ######################################################### # Preprocess the raw data to pass to Stan @@ -265,47 +265,6 @@ hBayesDM_model <- function(task_name, pars <- c(pars, postpreds) } - # Initial values for the parameters - if (inits[1] == "random") { - gen_init <- "random" - } else { - if (inits[1] == "fixed") { - inits <- unlist(lapply(parameters, "[", 2)) # plausible values of each parameter - } else if (length(inits) != length(parameters)) { - stop("** Length of 'inits' must be ", length(parameters), - " (= the number of parameters of this model). Please check again. **\n") - } - if (model_type == "single") { - gen_init <- function() { - individual_level <- as.list(inits) - names(individual_level) <- names(parameters) - return(individual_level) - } - } else { - gen_init <- function() { - primes <- numeric(length(parameters)) - for (i in 1:length(parameters)) { - lb <- parameters[[i]][1] # lower bound - ub <- parameters[[i]][3] # upper bound - if (is.infinite(lb)) { - primes[i] <- inits[i] # (-Inf, Inf) - } else if (is.infinite(ub)) { - primes[i] <- log(inits[i] - lb) # ( lb, Inf) - } else { - primes[i] <- qnorm((inits[i] - lb) / (ub - lb)) # ( lb, ub) - } - } - group_level <- list(mu_pr = primes, - sigma = rep(1.0, length(primes))) - individual_level <- lapply(primes, function(x) rep(x, n_subj)) - names(individual_level) <- paste0(names(parameters), "_pr") - return(c(group_level, individual_level)) - } - } - } - - ############### Print for user ############### - # Full name of model if (model_type == "") { model <- paste0(task_name, "_", model_name) @@ -326,7 +285,7 @@ hBayesDM_model <- function(task_name, } options(mc.cores = ncore) - # Print for user + ############### Print for user ############### cat("\n") cat("Model name =", model, "\n") cat("Data file =", data, "\n") @@ -380,8 +339,6 @@ hBayesDM_model <- function(task_name, cat("\n") } - ############### Fit & extract ############### - # Designate the Stan model if (is.null(stanmodel_arg)) { if (FLAG_BUILD_ALL) { @@ -395,6 +352,69 @@ hBayesDM_model <- function(task_name, stanmodel_arg <- rstan::stan_model(stanmodel_arg) } + # Initial values for the parameters + if (inits[1] == "vb") { + cat("\n") + cat("****************************************\n") + cat("** Use VB estimates as initial values **\n") + cat("****************************************\n") + + fit_vb <- rstan::vb(object = stanmodel_arg, data = data_list) + m_vb <- colMeans(as.data.frame(fit_vb)) + + gen_init <- function() { + ret <- list( + mu_pr = as.vector(m_vb[startsWith(names(m_vb), 'mu_pr')]), + sigma = as.vector(m_vb[startsWith(names(m_vb), 'sigma')]) + ) + + for (p in names(parameters)) { + ret[[p]] <- as.vector(m_vb[startsWith(names(m_vb), paste0(p, '_pr'))]) + } + + return(ret) + } + } else if (inits[1] == "random") { + gen_init <- "random" + } else { + if (inits[1] == "fixed") { + inits <- unlist(lapply(parameters, "[", 2)) # plausible values of each parameter + } else if (length(inits) != length(parameters)) { + stop("** Length of 'inits' must be ", length(parameters), + " (= the number of parameters of this model). Please check again. **\n") + } + if (model_type == "single") { + gen_init <- function() { + individual_level <- as.list(inits) + names(individual_level) <- names(parameters) + return(individual_level) + } + } else { + gen_init <- function() { + primes <- numeric(length(parameters)) + for (i in 1:length(parameters)) { + lb <- parameters[[i]][1] # lower bound + ub <- parameters[[i]][3] # upper bound + if (is.infinite(lb)) { + primes[i] <- inits[i] # (-Inf, Inf) + } else if (is.infinite(ub)) { + primes[i] <- log(inits[i] - lb) # ( lb, Inf) + } else { + primes[i] <- qnorm((inits[i] - lb) / (ub - lb)) # ( lb, ub) + } + } + group_level <- list(mu_pr = primes, + sigma = rep(1.0, length(primes))) + individual_level <- lapply(primes, function(x) rep(x, n_subj)) + names(individual_level) <- paste0(names(parameters), "_pr") + return(c(group_level, individual_level)) + } + } + } + + + ############### Fit & extract ############### + # Fit the Stan model if (vb) { # if variational Bayesian fit <- rstan::vb(object = stanmodel_arg, From 2928934e16529557fb1411d67c5954a03a42b767 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 9 Aug 2019 00:07:28 +0900 Subject: [PATCH 028/163] Set VB estimates as initial values by default --- R/R/hBayesDM_model.R | 2 +- R/man-roxygen/model-documentation.R | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 95e10ed7..e6fb21e3 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -120,7 +120,7 @@ hBayesDM_model <- function(task_name, nchain = 4, ncore = 1, nthin = 1, - inits = "random", + inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, diff --git a/R/man-roxygen/model-documentation.R b/R/man-roxygen/model-documentation.R index 19776ee3..60e2c042 100644 --- a/R/man-roxygen/model-documentation.R +++ b/R/man-roxygen/model-documentation.R @@ -18,8 +18,8 @@ #' @param nthin Every \code{i == nthin} sample will be used to generate the posterior distribution. #' Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is #' high. -#' @param inits Character value specifying how the initial values should be generated. Options are -#' "fixed" or "random", or your own initial values. +#' @param inits Character value specifying how the initial values should be generated. +#' Possible options are "vb" (default), "fixed", "random", or your own initial values. #' @param indPars Character value specifying how to summarize individual parameters. Current options #' are: "mean", "median", or "mode". #' @param modelRegressor From dec556dd96c9fe13e4a6fa37ac840d97946cec6a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 16 Aug 2019 21:18:33 +0900 Subject: [PATCH 029/163] Implement to use VB estimates as initial values --- Python/hbayesdm/base.py | 70 +++++++++++++++++++++++++++++++++++++---- 1 file changed, 64 insertions(+), 6 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 3720a0b8..e5d4c3da 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -21,7 +21,7 @@ class TaskModel(metaclass=ABCMeta): - """HBayesDM TaskModel Base Class. + """hBayesDM TaskModel Base Class. The base class that is inherited by all hBayesDM task-models. Child classes should implement (i.e. override) the abstract method: `_preprocess_func`. @@ -165,7 +165,12 @@ def _run(self, data_dict = self._preprocess_func( raw_data, general_info, additional_args) pars = self._prepare_pars(model_regressor, inc_postpred) - gen_init = self._prepare_gen_init(inits, general_info['n_subj']) + + n_subj = general_info['n_subj'] + if inits == 'vb': + gen_init = self._prepare_gen_init_vb(data_dict, n_subj) + else: + gen_init = self._prepare_gen_init(inits, n_subj) model = self._get_model_full_name() ncore = self._set_number_of_cores(ncore) @@ -423,16 +428,68 @@ def _prepare_pars(self, model_regressor: bool, inc_postpred: bool) -> List: pars += self.postpreds return pars + def _prepare_gen_init_vb(self, + data_dict: Dict, + n_subj: int, + ) -> Union[str, Callable]: + """Prepare initial values for the parameters using Variational Bayesian + methods. + + Parameters + ---------- + data_dict + Dict holding the data to pass to Stan. + n_subj + Total number of subjects in data. + + Returns + ------- + gen_init : Union[str, Callable] + A function that returns initial values for each parameter, based on + the variational Bayesian method. + """ + model = self._get_model_full_name() + sm = self._designate_stan_model(model) + + try: + fit = sm.vb(data=data_dict) + except Exception: + raise RuntimeError( + 'Failed to get VB estimates for initial values. ' + 'Please re-run the code to try fitting model with VB.') + + len_param = len(self.parameters) + dict_vb = { + k: v + for k, v in zip(fit['mean_par_names'], fit['mean_pars']) + if k.startswith('sigma[') or '_pr[' in k + } + + dict_init = {} + dict_init['mu_pr'] = \ + [dict_vb['mu_pr[%d]' % (i + 1)] for i in range(len_param)] + dict_init['sigma'] = \ + [dict_vb['sigma[%d]' % (i + 1)] for i in range(len_param)] + for p in self.parameters: + dict_init['%s_pr' % p] = \ + [dict_vb['%s_pr[%d]' % (p, i + 1)] for i in range(n_subj)] + + def gen_init(): + return dict_init + + return gen_init + def _prepare_gen_init(self, inits: Union[str, Sequence[float]], - n_subj: int) -> Union[str, Callable]: + n_subj: int, + ) -> Union[str, Callable]: """Prepare initial values for the parameters. Parameters ---------- inits - User-defined inits. Can be the strings 'random' or 'fixed', or a - list of float values to use as initial values for the parameters. + User-defined inits. Can be the strings 'random' or 'fixed', + or a list of float values to use as initial values for parameters. n_subj Total number of subjects in data. @@ -825,7 +882,8 @@ def plot(self, """ type_options = ('dist', 'trace') if type not in type_options: - raise RuntimeError('Plot type must be one of ' + repr(type_options)) + raise RuntimeError( + 'Plot type must be one of ' + repr(type_options)) if self.model_type == 'single': var_names = list(self.parameters_desc) From 759078b053e581ffde1252d4c4bc6b44d1c6f769 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 16 Aug 2019 21:23:39 +0900 Subject: [PATCH 030/163] Use VB estimates as initial values by default for Python --- JSON/PY_CODE_TEMPLATE.txt | 2 +- Python/hbayesdm/models/_bandit2arm_delta.py | 2 +- Python/hbayesdm/models/_bandit4arm2_kalman_filter.py | 2 +- Python/hbayesdm/models/_bandit4arm_2par_lapse.py | 2 +- Python/hbayesdm/models/_bandit4arm_4par.py | 2 +- Python/hbayesdm/models/_bandit4arm_lapse.py | 2 +- Python/hbayesdm/models/_bandit4arm_lapse_decay.py | 2 +- Python/hbayesdm/models/_bandit4arm_singleA_lapse.py | 2 +- Python/hbayesdm/models/_bart_par4.py | 2 +- Python/hbayesdm/models/_choiceRT_ddm.py | 2 +- Python/hbayesdm/models/_choiceRT_ddm_single.py | 2 +- Python/hbayesdm/models/_cra_exp.py | 2 +- Python/hbayesdm/models/_cra_linear.py | 2 +- Python/hbayesdm/models/_dbdm_prob_weight.py | 2 +- Python/hbayesdm/models/_dd_cs.py | 2 +- Python/hbayesdm/models/_dd_cs_single.py | 2 +- Python/hbayesdm/models/_dd_exp.py | 2 +- Python/hbayesdm/models/_dd_hyperbolic.py | 2 +- Python/hbayesdm/models/_dd_hyperbolic_single.py | 2 +- Python/hbayesdm/models/_gng_m1.py | 2 +- Python/hbayesdm/models/_gng_m2.py | 2 +- Python/hbayesdm/models/_gng_m3.py | 2 +- Python/hbayesdm/models/_gng_m4.py | 2 +- Python/hbayesdm/models/_igt_orl.py | 2 +- Python/hbayesdm/models/_igt_pvl_decay.py | 2 +- Python/hbayesdm/models/_igt_pvl_delta.py | 2 +- Python/hbayesdm/models/_igt_vpp.py | 2 +- Python/hbayesdm/models/_peer_ocu.py | 2 +- Python/hbayesdm/models/_prl_ewa.py | 2 +- Python/hbayesdm/models/_prl_fictitious.py | 2 +- Python/hbayesdm/models/_prl_fictitious_multipleB.py | 2 +- Python/hbayesdm/models/_prl_fictitious_rp.py | 2 +- Python/hbayesdm/models/_prl_fictitious_rp_woa.py | 2 +- Python/hbayesdm/models/_prl_fictitious_woa.py | 2 +- Python/hbayesdm/models/_prl_rp.py | 2 +- Python/hbayesdm/models/_prl_rp_multipleB.py | 2 +- Python/hbayesdm/models/_pst_gainloss_Q.py | 2 +- Python/hbayesdm/models/_ra_noLA.py | 2 +- Python/hbayesdm/models/_ra_noRA.py | 2 +- Python/hbayesdm/models/_ra_prospect.py | 2 +- Python/hbayesdm/models/_rdt_happiness.py | 2 +- Python/hbayesdm/models/_ts_par4.py | 2 +- Python/hbayesdm/models/_ts_par6.py | 2 +- Python/hbayesdm/models/_ts_par7.py | 2 +- Python/hbayesdm/models/_ug_bayes.py | 2 +- Python/hbayesdm/models/_ug_delta.py | 2 +- Python/hbayesdm/models/_wcs_sql.py | 2 +- 47 files changed, 47 insertions(+), 47 deletions(-) diff --git a/JSON/PY_CODE_TEMPLATE.txt b/JSON/PY_CODE_TEMPLATE.txt index c90f8cf2..de2b12f8 100644 --- a/JSON/PY_CODE_TEMPLATE.txt +++ b/JSON/PY_CODE_TEMPLATE.txt @@ -47,7 +47,7 @@ def {model_function}( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit2arm_delta.py b/Python/hbayesdm/models/_bandit2arm_delta.py index d35ae2cf..c2e87e3e 100644 --- a/Python/hbayesdm/models/_bandit2arm_delta.py +++ b/Python/hbayesdm/models/_bandit2arm_delta.py @@ -54,7 +54,7 @@ def bandit2arm_delta( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py index 095d79f2..74438f00 100644 --- a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py +++ b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py @@ -62,7 +62,7 @@ def bandit4arm2_kalman_filter( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py index 7ab47599..8b732d48 100644 --- a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py @@ -57,7 +57,7 @@ def bandit4arm_2par_lapse( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit4arm_4par.py b/Python/hbayesdm/models/_bandit4arm_4par.py index fba9226e..6ced4ae7 100644 --- a/Python/hbayesdm/models/_bandit4arm_4par.py +++ b/Python/hbayesdm/models/_bandit4arm_4par.py @@ -59,7 +59,7 @@ def bandit4arm_4par( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit4arm_lapse.py b/Python/hbayesdm/models/_bandit4arm_lapse.py index d6a155d2..6827e204 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse.py @@ -61,7 +61,7 @@ def bandit4arm_lapse( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py index 6864a806..f7f86e93 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py @@ -63,7 +63,7 @@ def bandit4arm_lapse_decay( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py index 6936562e..75163cf6 100644 --- a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py @@ -59,7 +59,7 @@ def bandit4arm_singleA_lapse( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py index b20ff029..4917d06c 100644 --- a/Python/hbayesdm/models/_bart_par4.py +++ b/Python/hbayesdm/models/_bart_par4.py @@ -58,7 +58,7 @@ def bart_par4( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py index c5ca2181..df4ffd1e 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm.py +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -58,7 +58,7 @@ def choiceRT_ddm( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py index 6a49139f..029e9407 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm_single.py +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -58,7 +58,7 @@ def choiceRT_ddm_single( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_cra_exp.py b/Python/hbayesdm/models/_cra_exp.py index bd0ba6be..6d864840 100644 --- a/Python/hbayesdm/models/_cra_exp.py +++ b/Python/hbayesdm/models/_cra_exp.py @@ -62,7 +62,7 @@ def cra_exp( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_cra_linear.py b/Python/hbayesdm/models/_cra_linear.py index ea0e85d0..1762e569 100644 --- a/Python/hbayesdm/models/_cra_linear.py +++ b/Python/hbayesdm/models/_cra_linear.py @@ -62,7 +62,7 @@ def cra_linear( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_dbdm_prob_weight.py b/Python/hbayesdm/models/_dbdm_prob_weight.py index 172bd0ab..dd34c3ec 100644 --- a/Python/hbayesdm/models/_dbdm_prob_weight.py +++ b/Python/hbayesdm/models/_dbdm_prob_weight.py @@ -63,7 +63,7 @@ def dbdm_prob_weight( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_dd_cs.py b/Python/hbayesdm/models/_dd_cs.py index 67482180..21583cb2 100644 --- a/Python/hbayesdm/models/_dd_cs.py +++ b/Python/hbayesdm/models/_dd_cs.py @@ -59,7 +59,7 @@ def dd_cs( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_dd_cs_single.py b/Python/hbayesdm/models/_dd_cs_single.py index 8d5219fc..b0ce5544 100644 --- a/Python/hbayesdm/models/_dd_cs_single.py +++ b/Python/hbayesdm/models/_dd_cs_single.py @@ -59,7 +59,7 @@ def dd_cs_single( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_dd_exp.py b/Python/hbayesdm/models/_dd_exp.py index d48b6a05..e17b457b 100644 --- a/Python/hbayesdm/models/_dd_exp.py +++ b/Python/hbayesdm/models/_dd_exp.py @@ -57,7 +57,7 @@ def dd_exp( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_dd_hyperbolic.py b/Python/hbayesdm/models/_dd_hyperbolic.py index ed2c56ba..7ba44b13 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic.py +++ b/Python/hbayesdm/models/_dd_hyperbolic.py @@ -57,7 +57,7 @@ def dd_hyperbolic( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_dd_hyperbolic_single.py b/Python/hbayesdm/models/_dd_hyperbolic_single.py index afa4b78b..63b27290 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic_single.py +++ b/Python/hbayesdm/models/_dd_hyperbolic_single.py @@ -57,7 +57,7 @@ def dd_hyperbolic_single( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_gng_m1.py b/Python/hbayesdm/models/_gng_m1.py index 03d81a51..ea28bcd1 100644 --- a/Python/hbayesdm/models/_gng_m1.py +++ b/Python/hbayesdm/models/_gng_m1.py @@ -60,7 +60,7 @@ def gng_m1( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_gng_m2.py b/Python/hbayesdm/models/_gng_m2.py index a19a7ab0..9b291993 100644 --- a/Python/hbayesdm/models/_gng_m2.py +++ b/Python/hbayesdm/models/_gng_m2.py @@ -62,7 +62,7 @@ def gng_m2( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_gng_m3.py b/Python/hbayesdm/models/_gng_m3.py index 21694a4d..49f903ec 100644 --- a/Python/hbayesdm/models/_gng_m3.py +++ b/Python/hbayesdm/models/_gng_m3.py @@ -65,7 +65,7 @@ def gng_m3( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_gng_m4.py b/Python/hbayesdm/models/_gng_m4.py index 9800b79c..2a8777bc 100644 --- a/Python/hbayesdm/models/_gng_m4.py +++ b/Python/hbayesdm/models/_gng_m4.py @@ -67,7 +67,7 @@ def gng_m4( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_igt_orl.py b/Python/hbayesdm/models/_igt_orl.py index 8f44b778..1c77446f 100644 --- a/Python/hbayesdm/models/_igt_orl.py +++ b/Python/hbayesdm/models/_igt_orl.py @@ -61,7 +61,7 @@ def igt_orl( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_igt_pvl_decay.py b/Python/hbayesdm/models/_igt_pvl_decay.py index 3026a60e..57983a24 100644 --- a/Python/hbayesdm/models/_igt_pvl_decay.py +++ b/Python/hbayesdm/models/_igt_pvl_decay.py @@ -59,7 +59,7 @@ def igt_pvl_decay( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_igt_pvl_delta.py b/Python/hbayesdm/models/_igt_pvl_delta.py index 59040338..5385ffe2 100644 --- a/Python/hbayesdm/models/_igt_pvl_delta.py +++ b/Python/hbayesdm/models/_igt_pvl_delta.py @@ -59,7 +59,7 @@ def igt_pvl_delta( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_igt_vpp.py b/Python/hbayesdm/models/_igt_vpp.py index 874b3523..7b3db93f 100644 --- a/Python/hbayesdm/models/_igt_vpp.py +++ b/Python/hbayesdm/models/_igt_vpp.py @@ -67,7 +67,7 @@ def igt_vpp( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_peer_ocu.py b/Python/hbayesdm/models/_peer_ocu.py index 2c185686..d8d8aa03 100644 --- a/Python/hbayesdm/models/_peer_ocu.py +++ b/Python/hbayesdm/models/_peer_ocu.py @@ -61,7 +61,7 @@ def peer_ocu( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_ewa.py b/Python/hbayesdm/models/_prl_ewa.py index e27179dd..531bf392 100644 --- a/Python/hbayesdm/models/_prl_ewa.py +++ b/Python/hbayesdm/models/_prl_ewa.py @@ -59,7 +59,7 @@ def prl_ewa( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_fictitious.py b/Python/hbayesdm/models/_prl_fictitious.py index 3a0f7bde..bc284841 100644 --- a/Python/hbayesdm/models/_prl_fictitious.py +++ b/Python/hbayesdm/models/_prl_fictitious.py @@ -60,7 +60,7 @@ def prl_fictitious( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_fictitious_multipleB.py b/Python/hbayesdm/models/_prl_fictitious_multipleB.py index 6e5437f3..00c54cf6 100644 --- a/Python/hbayesdm/models/_prl_fictitious_multipleB.py +++ b/Python/hbayesdm/models/_prl_fictitious_multipleB.py @@ -61,7 +61,7 @@ def prl_fictitious_multipleB( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_fictitious_rp.py b/Python/hbayesdm/models/_prl_fictitious_rp.py index 345eb275..e7bcb925 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp.py @@ -62,7 +62,7 @@ def prl_fictitious_rp( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py index fd8f5768..e821a1cb 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py @@ -60,7 +60,7 @@ def prl_fictitious_rp_woa( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_fictitious_woa.py b/Python/hbayesdm/models/_prl_fictitious_woa.py index f39e7fa2..cef8cd86 100644 --- a/Python/hbayesdm/models/_prl_fictitious_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_woa.py @@ -58,7 +58,7 @@ def prl_fictitious_woa( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_rp.py b/Python/hbayesdm/models/_prl_rp.py index 4ec619ae..b0aadc9e 100644 --- a/Python/hbayesdm/models/_prl_rp.py +++ b/Python/hbayesdm/models/_prl_rp.py @@ -58,7 +58,7 @@ def prl_rp( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_prl_rp_multipleB.py b/Python/hbayesdm/models/_prl_rp_multipleB.py index dffab98d..d6dbef32 100644 --- a/Python/hbayesdm/models/_prl_rp_multipleB.py +++ b/Python/hbayesdm/models/_prl_rp_multipleB.py @@ -59,7 +59,7 @@ def prl_rp_multipleB( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py index 88697680..78581bb1 100644 --- a/Python/hbayesdm/models/_pst_gainloss_Q.py +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -57,7 +57,7 @@ def pst_gainloss_Q( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py index 4b3590a7..3cdc79d8 100644 --- a/Python/hbayesdm/models/_ra_noLA.py +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -56,7 +56,7 @@ def ra_noLA( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py index 0a018226..5e90e29a 100644 --- a/Python/hbayesdm/models/_ra_noRA.py +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -56,7 +56,7 @@ def ra_noRA( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py index 895280d7..d4877260 100644 --- a/Python/hbayesdm/models/_ra_prospect.py +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -58,7 +58,7 @@ def ra_prospect( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_rdt_happiness.py b/Python/hbayesdm/models/_rdt_happiness.py index be2e5070..8e909a68 100644 --- a/Python/hbayesdm/models/_rdt_happiness.py +++ b/Python/hbayesdm/models/_rdt_happiness.py @@ -68,7 +68,7 @@ def rdt_happiness( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ts_par4.py b/Python/hbayesdm/models/_ts_par4.py index e5c68e12..931c4142 100644 --- a/Python/hbayesdm/models/_ts_par4.py +++ b/Python/hbayesdm/models/_ts_par4.py @@ -59,7 +59,7 @@ def ts_par4( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ts_par6.py b/Python/hbayesdm/models/_ts_par6.py index a691521a..c13ef1dd 100644 --- a/Python/hbayesdm/models/_ts_par6.py +++ b/Python/hbayesdm/models/_ts_par6.py @@ -63,7 +63,7 @@ def ts_par6( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ts_par7.py b/Python/hbayesdm/models/_ts_par7.py index df68dc70..6ee9accd 100644 --- a/Python/hbayesdm/models/_ts_par7.py +++ b/Python/hbayesdm/models/_ts_par7.py @@ -65,7 +65,7 @@ def ts_par7( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ug_bayes.py b/Python/hbayesdm/models/_ug_bayes.py index 9e0d7206..8fda0893 100644 --- a/Python/hbayesdm/models/_ug_bayes.py +++ b/Python/hbayesdm/models/_ug_bayes.py @@ -56,7 +56,7 @@ def ug_bayes( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_ug_delta.py b/Python/hbayesdm/models/_ug_delta.py index 4b93f0b3..1276dad6 100644 --- a/Python/hbayesdm/models/_ug_delta.py +++ b/Python/hbayesdm/models/_ug_delta.py @@ -56,7 +56,7 @@ def ug_delta( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, diff --git a/Python/hbayesdm/models/_wcs_sql.py b/Python/hbayesdm/models/_wcs_sql.py index 258a8e36..40ae7f15 100644 --- a/Python/hbayesdm/models/_wcs_sql.py +++ b/Python/hbayesdm/models/_wcs_sql.py @@ -56,7 +56,7 @@ def wcs_sql( nchain: int = 4, ncore: int = 1, nthin: int = 1, - inits: Union[str, Sequence[float]] = 'random', + inits: Union[str, Sequence[float]] = 'vb', ind_pars: str = 'mean', model_regressor: bool = False, vb: bool = False, From 4411e01adb4f4f76f47ee78de76fdb1a40b5eabb Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 19 Aug 2019 12:47:28 +0900 Subject: [PATCH 031/163] Update .travis.yml --- .travis.yml | 164 +++++++++--------------------------- Python/hbayesdm/base.py | 43 ++++++---- Python/requirements-dev.txt | 24 ++++++ Python/requirements.txt | 19 +++++ Python/tests/test_gng_m1.py | 9 +- travis/after-failure.sh | 19 +++++ travis/after-success.sh | 10 +++ travis/script.sh | 20 +++++ travis/setup.sh | 55 ++++++++++++ 9 files changed, 220 insertions(+), 143 deletions(-) create mode 100644 Python/requirements-dev.txt create mode 100644 Python/requirements.txt create mode 100755 travis/after-failure.sh create mode 100755 travis/after-success.sh create mode 100755 travis/script.sh create mode 100755 travis/setup.sh diff --git a/.travis.yml b/.travis.yml index a872bbbf..396b4c60 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,9 @@ language: cpp # NOTE: was `false` sudo: required +os: linux +dist: trusty + branches: only: - master @@ -16,131 +19,48 @@ branches: cache: apt: true packages: true - ccache: true pip: true + directories: + - $HOME/miniconda3 + +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - build-essential + - gcc-7 + - g++-7 + - gfortran-7 + - libcurl4-openssl-dev + - libxml2-dev matrix: include: - - name: Test sync (R/Python packages) - script: - - diff -r Python/hbayesdm/common/extdata R/inst/extdata - - diff -r Python/hbayesdm/common/stan_files R/inst/stan_files - - name: Ubuntu + g++-7 - os: linux - dist: trusty - addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-7 - - g++-7 - - gfortran-7 - - libcurl4-openssl-dev - - libxml2-dev - env: - - MAKEFLAGS="-j 2" - - MATRIX_EVAL="CC=gcc-7 && CXX=g++-7" - before_install: - - sudo apt-get update - - sudo sh -c 'echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" >> /etc/apt/sources.list' - - gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9 - - gpg -a --export E084DAB9 | sudo apt-key add - - - sudo apt-get update - - sudo apt-get install gcc - - sudo apt-get install r-base-core - - sudo apt-get install libssl-dev - - sudo apt-get install -y r-base r-base-dev - - eval "${MATRIX_EVAL}" - - mkdir -p ~/.R/ - - echo "CC = $CC" >> ~/.R/Makevars - - echo "CXX = ${CXX} -fPIC " >> ~/.R/Makevars - - echo "CXX14 = ${CXX} -fPIC -flto=2" >> ~/.R/Makevars - - echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars - install: - - sudo R -e 'install.packages(c("devtools", "roxygen2"), dep = T, quiet = T,repos = "https://cran.rstudio.com")' - - sudo R -e 'library(devtools); library(methods); options(repos=c(CRAN="https://cran.rstudio.com")); devtools::install_deps(pkg = "./R/", dep = T, quiet = T)' - script: - - cd ./R/ - - travis_wait 42 R CMD build . --no-build-vignettes --no-manual - - travis_wait 59 R CMD check hBayesDM*.tar.gz --as-cran --no-build-vignettes --no-manual - r_binary_packages: - - testthat - after_failure: - - cat hBayesDM.Rcheck/00* - - name: Ubuntu + g++-7 (BUILD_ALL) - os: linux - dist: trusty - addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - gcc-7 - - g++-7 - - gfortran-7 - - libcurl4-openssl-dev - - libxml2-dev - env: - - MAKEFLAGS="-j 2" - - MATRIX_EVAL="CC=gcc-7 && CXX=g++-7" - - BUILD_ALL="true" - before_install: - - sudo apt-get update - - sudo sh -c 'echo "deb http://cran.rstudio.com/bin/linux/ubuntu trusty/" >> /etc/apt/sources.list' - - gpg --keyserver keyserver.ubuntu.com --recv-key E084DAB9 - - gpg -a --export E084DAB9 | sudo apt-key add - - - sudo apt-get update - - sudo apt-get install gcc - - sudo apt-get install r-base-core - - sudo apt-get install libssl-dev - - sudo apt-get install -y r-base r-base-dev - - eval "${MATRIX_EVAL}" - - mkdir -p ~/.R/ - - echo "CC = $CC" >> ~/.R/Makevars - - echo "CXX = ${CXX} -fPIC " >> ~/.R/Makevars - - echo "CXX14 = ${CXX} -fPIC -flto=2" >> ~/.R/Makevars - - echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars - install: - - sudo R -e 'install.packages(c("devtools", "roxygen2"), dep = T, quiet = T,repos = "https://cran.rstudio.com")' - - sudo R -e 'library(devtools); library(methods); options(repos=c(CRAN="https://cran.rstudio.com")); devtools::install_deps(pkg = "./R/", dep = T, quiet = T)' - script: - - cd ./R/ - - travis_wait 42 R CMD build . --no-build-vignettes --no-manual - - travis_wait 59 R CMD check hBayesDM*.tar.gz --as-cran --no-build-vignettes --no-manual - r_binary_packages: - - testthat - after_failure: - - cat hBayesDM.Rcheck/00* - - name: Python-package - addons: - apt: - packages: - - libcurl4-openssl-dev - - libxml2-dev - before_install: - - sudo -H apt-get install gfortran - - sudo -H apt-get install python3-setuptools - - sudo -H easy_install3 pip - - sudo -H pip3 install --upgrade pip - - sudo -H apt-get install python3-tk - install: - - cd ./Python/ - - sudo -H pip install pipenv - - sudo -H pipenv --python 3.7 - - sudo -H pipenv install --dev --skip-lock - - sudo -H pipenv install -e . --skip-lock - script: - - travis_wait 30 sudo -H pipenv run pytest tests - - travis_wait 30 sudo -H pipenv run pytest tests --doctest-modules - after_success: - - sudo -H pipenv run flake8 hbayesdm --format=pylint --statistics --exit-zero - - sudo -H pipenv run pylint hbayesdm --rcfile=setup.cfg --exit-zero - - sudo -H pipenv run travis-sphinx build - - sudo -H pipenv run travis-sphinx deploy + - name: 'Test sync on models and data' + env: TARGET='Sync' + - name: 'Test R codes' + env: TARGET='R' && MAKEFLAGS='-j 2' && CC=gcc-7 && CXX=g++-7 + - name: 'Test Python codes (Python 3.5)' + env: TARGET='Python' && PYTHON_VERSION=3.5 +# - name: 'Test Python codes (Python 3.6)' +# env: TARGET='Python' && PYTHON_VERSION=3.6 +# - name: 'Test Python codes (Python 3.7)' +# env: TARGET='Python' && PYTHON_VERSION=3.7 + +before_install: + - export ROOTPATH=`pwd` + - [ "$TARGET" = "R" ] && cd ./R + - [ "$TARGET" = "Python" ] && cd ./Python + +install: + - bash $ROOTPATH/travis/setup.sh + +script: + - bash $ROOTPATH/travis/script.sh -# r_github_packages: -# - r-lib/covr +after_failure: + - bash $ROOTPATH/travis/after-failure.sh -# after_success: -# - Rscript -e 'covr::codecov()' +after_success: + - bash $ROOTPATH/travis/after-success.sh diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 3720a0b8..65205b3e 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -597,24 +597,32 @@ def _designate_stan_model(self, model: str) -> StanModel: """ stan_files = os.path.join(_common, 'stan_files') model_path = os.path.join(stan_files, model + '.stan') - cache_file = 'cached-%s-hbayesdm=%s-pystan=%s.pkl' % ( - model, _hbayesdm_version, _pystan_version) - try: - with open(cache_file, 'rb') as cached_stan_model: - sm = pickle.load(cached_stan_model) - with open(model_path, 'r') as model_stan_code: - assert sm.model_code == model_stan_code.read() - except (FileNotFoundError, AssertionError): - sm = StanModel( - file=model_path, model_name=model, include_paths=[stan_files]) - with open(cache_file, 'wb') as f: - pickle.dump(sm, f) - except: # All other exceptions - raise RuntimeError( - 'Cache file is corrupted. Please remove file `' + - cache_file + '` and run again.') + cache_file = 'cached-%s-hbayesdm=%s-pystan=%s.pkl' % \ + (model, _hbayesdm_version, _pystan_version) + + if os.path.exists(cache_file): + try: + with open(cache_file, 'rb') as cached_stan_model: + sm = pickle.load(cached_stan_model) + with open(model_path, 'r') as model_stan_code: + assert sm.model_code == model_stan_code.read() + does_exist = True + except Exception: + print('Invalid cached StanModel:', cache_file) + print('Remove the cached model...') + os.remove(cache_file) + does_exist = False else: + does_exist = False + + if does_exist: print('Using cached StanModel:', cache_file) + else: + sm = StanModel(file=model_path, model_name=model, + include_paths=[stan_files]) + with open(cache_file, 'wb') as f: + pickle.dump(sm, f) + return sm def _fit_stan_model(self, vb: bool, sm: StanModel, data_dict: Dict, @@ -825,7 +833,8 @@ def plot(self, """ type_options = ('dist', 'trace') if type not in type_options: - raise RuntimeError('Plot type must be one of ' + repr(type_options)) + raise RuntimeError( + 'Plot type must be one of ' + repr(type_options)) if self.model_type == 'single': var_names = list(self.parameters_desc) diff --git a/Python/requirements-dev.txt b/Python/requirements-dev.txt new file mode 100644 index 00000000..45d1fac4 --- /dev/null +++ b/Python/requirements-dev.txt @@ -0,0 +1,24 @@ +################################################################################ +# This requirements file has been automatically generated from `Pipfile` with +# `pipenv-to-requirements` +# +# +# This has been done to maintain backward compatibility with tools and services +# that do not support `Pipfile` yet. +# +# Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and +# `Pipfile.lock` and then regenerate `requirements*.txt`. +################################################################################ + +autopep8 +flake8 +jupyter +jupyterlab +mypy +pylint +pytest +sphinx +sphinx-autobuild +sphinx-autodoc-typehints +sphinx-rtd-theme +travis-sphinx diff --git a/Python/requirements.txt b/Python/requirements.txt new file mode 100644 index 00000000..d249133d --- /dev/null +++ b/Python/requirements.txt @@ -0,0 +1,19 @@ +################################################################################ +# This requirements file has been automatically generated from `Pipfile` with +# `pipenv-to-requirements` +# +# +# This has been done to maintain backward compatibility with tools and services +# that do not support `Pipfile` yet. +# +# Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and +# `Pipfile.lock` and then regenerate `requirements*.txt`. +################################################################################ + +-e . +arviz +matplotlib +numpy +pandas +pystan +scipy diff --git a/Python/tests/test_gng_m1.py b/Python/tests/test_gng_m1.py index a7c10912..b463f8a7 100644 --- a/Python/tests/test_gng_m1.py +++ b/Python/tests/test_gng_m1.py @@ -8,14 +8,15 @@ def test_gng_models(): print(pystan.__version__) - fit = gng_m1(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) + fit = gng_m1(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) + print(fit) print(fit.all_ind_pars) print(rhat(fit, less=1.1)) - fit2 = gng_m2(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) - fit3 = gng_m3(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) - fit4 = gng_m4(example=True, niter=2000, nwarmup=1000, nchain=1, ncore=1) + fit2 = gng_m2(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) + fit3 = gng_m3(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) + fit4 = gng_m4(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) print_fit(fit, fit2, fit3, fit4) # ic='loo' print_fit(fit, fit2, fit3, fit4, ic='waic') diff --git a/travis/after-failure.sh b/travis/after-failure.sh new file mode 100755 index 00000000..9168a884 --- /dev/null +++ b/travis/after-failure.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +# Scripts for R +if [ "$TARGET" = "R" ]; then + Rscript -e 'covr::codecov()' + cat hBayesDM.Rcheck/00* + +# Scripts for Python +elif [ "$TARGET" = "Python" ]; then + flake8 hbayesdm --format=pylint --statistics --exit-zero + pylint hbayesdm --rcfile=setup.cfg --exit-zero + # travis-sphinx build + # travis-sphinx deploy + +# Otherwise +else + echo 'No after-success job required' +fi + diff --git a/travis/after-success.sh b/travis/after-success.sh new file mode 100755 index 00000000..17d03a6e --- /dev/null +++ b/travis/after-success.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +# Scripts for R +if [ "$TARGET" = "R" ]; then + cat hBayesDM.Rcheck/00* + +# Otherwise +else + echo 'No after-failure job required' +fi diff --git a/travis/script.sh b/travis/script.sh new file mode 100755 index 00000000..08b9df36 --- /dev/null +++ b/travis/script.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash + +# Scripts for R +if [ "$TARGET" = "R" ]; then + travis_wait 42 R CMD build . --no-build-vignettes --no-manual + travis_wait 59 R CMD check hBayesDM*.tar.gz --as-cran --no-build-vignettes --no-manual + +# Scripts for Python +elif [ "$TARGET" = "Python" ]; then + travis_wait 30 pytest tests + +# Check sync for models and data +elif [ "$TARGET" = "Sync" ]; then + diff -r Python/hbayesdm/common/extdata R/inst/extdata + diff -r Python/hbayesdm/common/stan_files R/inst/stan_files + +# Otherwise +else + echo 'No script required' +fi diff --git a/travis/setup.sh b/travis/setup.sh new file mode 100755 index 00000000..2df68332 --- /dev/null +++ b/travis/setup.sh @@ -0,0 +1,55 @@ +#!/usr/bin/env bash + +# Setup codes for R +if [ "$TARGET" = "R" ]; then + # Add CRAN as an APT source + sudo echo 'deb https://cloud.r-project.org/bin/linux/ubuntu trusty-cran35/' >> /etc/apt/sources.list + sudo apt-key adv --keyserver keys.gnupg.net --recv-key 'E19F5F87128899B192B1A2C2AD5F960A256A04AF' + sudo apt-get update + + # Install R with the latest version + sudo apt-get install -y --allow-unauthenticated r-base-dev + + # Setup a config for R + mkdir -p ~/.R/ + echo "CC = ${CC}" >> ~/.R/Makevars + echo "CXX = ${CXX} -fPIC " >> ~/.R/Makevars + echo "CXX14 = ${CXX} -fPIC -flto=2" >> ~/.R/Makevars + echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars + + # Install R packages + sudo R \ + -e 'install.packages(c("devtools", "roxygen2", "covr"), quiet = T, repos = "https://cran.rstudio.com")' \ + -e 'devtools::install_deps(dep = T, quiet = T)' + +# Setup codes for Python +elif [ "$TARGET" = "Python" ]; then + # Download Miniconda and install it + wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh + chmod +x miniconda.sh + ./miniconda.sh -b + + # Set PATH for Miniconda + export PATH=$HOME/miniconda3/bin:$PATH + conda init + + # Update conda + conda update --yes conda + + if [[ -z "$PYTHON_VERSION" ]]; then + echo "Use latest Python version" + conda create -y -n test python + else + echo "Use Python ${PYTHON_VERSION}" + conda create -y -n test python="$PYTHON_VERSION" + fi + conda activate test + + # Install dependencies + pip install -r requirements.txt + pip install -r requirements-dev.txt + +# Otherwise +else + echo 'No setup required' +fi From 363372cf0271cfaa373ef76b961da68ce86e0f44 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 19 Aug 2019 22:28:49 +0900 Subject: [PATCH 032/163] Fix Travis settings and related codes --- .travis.yml | 40 ++++++++++++++++++--------------- Python/Pipfile | 30 ------------------------- Python/requirements-dev.txt | 24 -------------------- Python/requirements.txt | 18 +++++---------- travis/after-failure.sh | 10 +-------- travis/after-success.sh | 8 ++++++- travis/script.sh | 1 + travis/setup.sh | 45 ++++++++++++++++++++++++------------- 8 files changed, 66 insertions(+), 110 deletions(-) delete mode 100644 Python/Pipfile delete mode 100644 Python/requirements-dev.txt diff --git a/.travis.yml b/.travis.yml index 396b4c60..5f8c9f9e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,10 +18,9 @@ branches: # Use cache for packages cache: apt: true - packages: true - pip: true directories: - $HOME/miniconda3 + - $HOME/R/Library addons: apt: @@ -37,30 +36,35 @@ addons: matrix: include: - - name: 'Test sync on models and data' - env: TARGET='Sync' - - name: 'Test R codes' - env: TARGET='R' && MAKEFLAGS='-j 2' && CC=gcc-7 && CXX=g++-7 - - name: 'Test Python codes (Python 3.5)' - env: TARGET='Python' && PYTHON_VERSION=3.5 -# - name: 'Test Python codes (Python 3.6)' -# env: TARGET='Python' && PYTHON_VERSION=3.6 -# - name: 'Test Python codes (Python 3.7)' -# env: TARGET='Python' && PYTHON_VERSION=3.7 + - name: 'Test sync on models and data' + env: TARGET='Sync' + - name: 'Test R codes' + env: TARGET='R' + - name: 'Test R codes (BUILD_ALL)' + env: TARGET='R' && BUILD_ALL=1 + - name: 'Test Python codes (Python 3.5)' + env: TARGET='Python' && PYTHON_VERSION=3.5 + - name: 'Test Python codes (Python 3.6)' + env: TARGET='Python' && PYTHON_VERSION=3.6 + - name: 'Test Python codes (Python 3.7)' + env: TARGET='Python' && PYTHON_VERSION=3.7 before_install: - export ROOTPATH=`pwd` - - [ "$TARGET" = "R" ] && cd ./R - - [ "$TARGET" = "Python" ] && cd ./Python + - export MAKEFLAGS='-j 2' + - export CC=gcc-7 + - export CXX=g++-7 + - if [ "$TARGET" = "R" ]; then cd ./R; fi + - if [ "$TARGET" = "Python" ]; then cd ./Python; fi install: - - bash $ROOTPATH/travis/setup.sh + - source $ROOTPATH/travis/setup.sh script: - - bash $ROOTPATH/travis/script.sh + - source $ROOTPATH/travis/script.sh after_failure: - - bash $ROOTPATH/travis/after-failure.sh + - source $ROOTPATH/travis/after-failure.sh after_success: - - bash $ROOTPATH/travis/after-success.sh + - source $ROOTPATH/travis/after-success.sh diff --git a/Python/Pipfile b/Python/Pipfile deleted file mode 100644 index 48dc698c..00000000 --- a/Python/Pipfile +++ /dev/null @@ -1,30 +0,0 @@ -[[source]] -name = "pypi" -url = "https://pypi.org/simple" -verify_ssl = true - -[dev-packages] -autopep8 = "*" -pylint = "*" -flake8 = "*" -mypy = "*" -pytest = "*" -sphinx = "*" -sphinx-rtd-theme = "*" -travis-sphinx = "*" -sphinx-autodoc-typehints = "*" -sphinx-autobuild = "*" -jupyter = "*" -jupyterlab = "*" - -[packages] -numpy = "*" -scipy = "*" -pandas = "*" -pystan = "*" -matplotlib = "*" -arviz = "*" -hbayesdm = {editable = true,path = "."} - -[requires] -python_version = "3" diff --git a/Python/requirements-dev.txt b/Python/requirements-dev.txt deleted file mode 100644 index 45d1fac4..00000000 --- a/Python/requirements-dev.txt +++ /dev/null @@ -1,24 +0,0 @@ -################################################################################ -# This requirements file has been automatically generated from `Pipfile` with -# `pipenv-to-requirements` -# -# -# This has been done to maintain backward compatibility with tools and services -# that do not support `Pipfile` yet. -# -# Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and -# `Pipfile.lock` and then regenerate `requirements*.txt`. -################################################################################ - -autopep8 -flake8 -jupyter -jupyterlab -mypy -pylint -pytest -sphinx -sphinx-autobuild -sphinx-autodoc-typehints -sphinx-rtd-theme -travis-sphinx diff --git a/Python/requirements.txt b/Python/requirements.txt index d249133d..bfb2c0bb 100644 --- a/Python/requirements.txt +++ b/Python/requirements.txt @@ -1,19 +1,13 @@ -################################################################################ -# This requirements file has been automatically generated from `Pipfile` with -# `pipenv-to-requirements` -# -# -# This has been done to maintain backward compatibility with tools and services -# that do not support `Pipfile` yet. -# -# Do NOT edit it directly, use `pipenv install [-d]` to modify `Pipfile` and -# `Pipfile.lock` and then regenerate `requirements*.txt`. -################################################################################ - -e . arviz +flake8 matplotlib numpy pandas +pylint pystan +pytest scipy +sphinx +sphinx-autodoc-typehints +sphinx-rtd-theme diff --git a/travis/after-failure.sh b/travis/after-failure.sh index 9168a884..9b0609ac 100755 --- a/travis/after-failure.sh +++ b/travis/after-failure.sh @@ -2,18 +2,10 @@ # Scripts for R if [ "$TARGET" = "R" ]; then - Rscript -e 'covr::codecov()' cat hBayesDM.Rcheck/00* -# Scripts for Python -elif [ "$TARGET" = "Python" ]; then - flake8 hbayesdm --format=pylint --statistics --exit-zero - pylint hbayesdm --rcfile=setup.cfg --exit-zero - # travis-sphinx build - # travis-sphinx deploy - # Otherwise else - echo 'No after-success job required' + echo 'No after-failure job required' fi diff --git a/travis/after-success.sh b/travis/after-success.sh index 17d03a6e..e8074b46 100755 --- a/travis/after-success.sh +++ b/travis/after-success.sh @@ -2,9 +2,15 @@ # Scripts for R if [ "$TARGET" = "R" ]; then + Rscript -e 'covr::codecov()' cat hBayesDM.Rcheck/00* +# Scripts for Python +elif [ "$TARGET" = "Python" ]; then + flake8 hbayesdm --format=pylint --statistics --exit-zero + pylint hbayesdm --rcfile=setup.cfg --exit-zero + # Otherwise else - echo 'No after-failure job required' + echo 'No after-success job required' fi diff --git a/travis/script.sh b/travis/script.sh index 08b9df36..62afb3ec 100755 --- a/travis/script.sh +++ b/travis/script.sh @@ -18,3 +18,4 @@ elif [ "$TARGET" = "Sync" ]; then else echo 'No script required' fi + diff --git a/travis/setup.sh b/travis/setup.sh index 2df68332..d484fe0d 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -2,13 +2,20 @@ # Setup codes for R if [ "$TARGET" = "R" ]; then + export R_LIBS_USER=~/R/Library + export R_LIBS_SITE=/usr/local/lib/R/site-library:/usr/lib/R/site-library + export _R_CHECK_CRAN_INCOMING_=false + export NOT_CRAN=true + export R_PROFILE=~/.Rprofile.site + # Add CRAN as an APT source sudo echo 'deb https://cloud.r-project.org/bin/linux/ubuntu trusty-cran35/' >> /etc/apt/sources.list sudo apt-key adv --keyserver keys.gnupg.net --recv-key 'E19F5F87128899B192B1A2C2AD5F960A256A04AF' sudo apt-get update # Install R with the latest version - sudo apt-get install -y --allow-unauthenticated r-base-dev + sudo apt-get install -y -q --allow-unauthenticated r-base r-base-core r-base-dev + hash -r # Setup a config for R mkdir -p ~/.R/ @@ -18,36 +25,42 @@ if [ "$TARGET" = "R" ]; then echo "CXX14FLAGS = -mtune=native -march=native -Wno-ignored-attributes -O0" >> ~/.R/Makevars # Install R packages - sudo R \ + Rscript \ -e 'install.packages(c("devtools", "roxygen2", "covr"), quiet = T, repos = "https://cran.rstudio.com")' \ -e 'devtools::install_deps(dep = T, quiet = T)' # Setup codes for Python elif [ "$TARGET" = "Python" ]; then - # Download Miniconda and install it - wget -q https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh - chmod +x miniconda.sh - ./miniconda.sh -b - # Set PATH for Miniconda - export PATH=$HOME/miniconda3/bin:$PATH - conda init + if [ ! -d "$HOME/miniconda" ]; then + # Download Miniconda & Install it + wget -q https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; + bash miniconda.sh -b -p $HOME/miniconda + fi + + # Add PATH for Miniconda + export PATH="$HOME/miniconda/bin:$PATH" + hash -r + + # Conda config & Update conda + conda config --set always_yes yes --set changeps1 no + conda update -q conda - # Update conda - conda update --yes conda + # Debug + conda info -a if [[ -z "$PYTHON_VERSION" ]]; then echo "Use latest Python version" - conda create -y -n test python + conda create -q -n test-`echo $PYTHON_VERSION` python else echo "Use Python ${PYTHON_VERSION}" - conda create -y -n test python="$PYTHON_VERSION" + conda create -q -n test-`echo $PYTHON_VERSION` python="$PYTHON_VERSION" fi - conda activate test + conda activate test-`echo $PYTHON_VERSION` # Install dependencies - pip install -r requirements.txt - pip install -r requirements-dev.txt + pip install -r requirements.txt --upgrade + pip install -r requirements-dev.txt --upgrade # Otherwise else From 003fe13c9f3ea44a2fb1b7f7b51fbb2bb535491f Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 19 Aug 2019 22:35:28 +0900 Subject: [PATCH 033/163] Update shell script codes --- travis/setup.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/travis/setup.sh b/travis/setup.sh index d484fe0d..106767bb 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -51,12 +51,12 @@ elif [ "$TARGET" = "Python" ]; then if [[ -z "$PYTHON_VERSION" ]]; then echo "Use latest Python version" - conda create -q -n test-`echo $PYTHON_VERSION` python + conda create -q -n test-$PYTHON_VERSION python else echo "Use Python ${PYTHON_VERSION}" - conda create -q -n test-`echo $PYTHON_VERSION` python="$PYTHON_VERSION" + conda create -q -n test-$PYTHON_VERSION python="$PYTHON_VERSION" fi - conda activate test-`echo $PYTHON_VERSION` + conda activate test-$PYTHON_VERSION # Install dependencies pip install -r requirements.txt --upgrade From 85a88332c8760ce05affbd990366bb41efb7bec8 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 19 Aug 2019 22:39:17 +0900 Subject: [PATCH 034/163] Remove an unnecessary line --- travis/setup.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/travis/setup.sh b/travis/setup.sh index 106767bb..6f35c9eb 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -60,7 +60,6 @@ elif [ "$TARGET" = "Python" ]; then # Install dependencies pip install -r requirements.txt --upgrade - pip install -r requirements-dev.txt --upgrade # Otherwise else From d68010e1acfb45c22d6603e46aea9bc43011e61e Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 19 Aug 2019 23:16:21 +0900 Subject: [PATCH 035/163] Reorganize models --- {JSON => commons}/ModelInformation.schema.json | 0 {JSON => commons}/README.md | 14 ++++++++------ {JSON => commons}/ValidateAll.sh | 2 +- {JSON => commons}/WritePython.py | 10 +++++++--- {JSON => commons/models}/bandit2arm_delta.json | 0 .../models}/bandit4arm2_kalman_filter.json | 0 .../models}/bandit4arm_2par_lapse.json | 0 {JSON => commons/models}/bandit4arm_4par.json | 0 {JSON => commons/models}/bandit4arm_lapse.json | 0 .../models}/bandit4arm_lapse_decay.json | 0 .../models}/bandit4arm_singleA_lapse.json | 0 {JSON => commons/models}/bart_par4.json | 0 {JSON => commons/models}/choiceRT_ddm.json | 0 {JSON => commons/models}/choiceRT_ddm_single.json | 0 {JSON => commons/models}/cra_exp.json | 0 {JSON => commons/models}/cra_linear.json | 0 {JSON => commons/models}/dbdm_prob_weight.json | 0 {JSON => commons/models}/dd_cs.json | 0 {JSON => commons/models}/dd_cs_single.json | 0 {JSON => commons/models}/dd_exp.json | 0 {JSON => commons/models}/dd_hyperbolic.json | 0 {JSON => commons/models}/dd_hyperbolic_single.json | 0 {JSON => commons/models}/gng_m1.json | 0 {JSON => commons/models}/gng_m2.json | 0 {JSON => commons/models}/gng_m3.json | 0 {JSON => commons/models}/gng_m4.json | 0 {JSON => commons/models}/igt_orl.json | 0 {JSON => commons/models}/igt_pvl_decay.json | 0 {JSON => commons/models}/igt_pvl_delta.json | 0 {JSON => commons/models}/igt_vpp.json | 0 {JSON => commons/models}/peer_ocu.json | 0 {JSON => commons/models}/prl_ewa.json | 0 {JSON => commons/models}/prl_fictitious.json | 0 .../models}/prl_fictitious_multipleB.json | 0 {JSON => commons/models}/prl_fictitious_rp.json | 0 .../models}/prl_fictitious_rp_woa.json | 0 {JSON => commons/models}/prl_fictitious_woa.json | 0 {JSON => commons/models}/prl_rp.json | 0 {JSON => commons/models}/prl_rp_multipleB.json | 0 {JSON => commons/models}/pst_gainloss_Q.json | 0 {JSON => commons/models}/ra_noLA.json | 0 {JSON => commons/models}/ra_noRA.json | 0 {JSON => commons/models}/ra_prospect.json | 0 {JSON => commons/models}/rdt_happiness.json | 0 {JSON => commons/models}/ts_par4.json | 0 {JSON => commons/models}/ts_par6.json | 0 {JSON => commons/models}/ts_par7.json | 0 {JSON => commons/models}/ug_bayes.json | 0 {JSON => commons/models}/ug_delta.json | 0 {JSON => commons/models}/wcs_sql.json | 0 {JSON => commons/templates}/PY_CODE_TEMPLATE.txt | 0 .../templates}/PY_DOCSTRING_TEMPLATE.txt | 0 52 files changed, 16 insertions(+), 10 deletions(-) rename {JSON => commons}/ModelInformation.schema.json (100%) rename {JSON => commons}/README.md (97%) rename {JSON => commons}/ValidateAll.sh (77%) rename {JSON => commons}/WritePython.py (96%) rename {JSON => commons/models}/bandit2arm_delta.json (100%) rename {JSON => commons/models}/bandit4arm2_kalman_filter.json (100%) rename {JSON => commons/models}/bandit4arm_2par_lapse.json (100%) rename {JSON => commons/models}/bandit4arm_4par.json (100%) rename {JSON => commons/models}/bandit4arm_lapse.json (100%) rename {JSON => commons/models}/bandit4arm_lapse_decay.json (100%) rename {JSON => commons/models}/bandit4arm_singleA_lapse.json (100%) rename {JSON => commons/models}/bart_par4.json (100%) rename {JSON => commons/models}/choiceRT_ddm.json (100%) rename {JSON => commons/models}/choiceRT_ddm_single.json (100%) rename {JSON => commons/models}/cra_exp.json (100%) rename {JSON => commons/models}/cra_linear.json (100%) rename {JSON => commons/models}/dbdm_prob_weight.json (100%) rename {JSON => commons/models}/dd_cs.json (100%) rename {JSON => commons/models}/dd_cs_single.json (100%) rename {JSON => commons/models}/dd_exp.json (100%) rename {JSON => commons/models}/dd_hyperbolic.json (100%) rename {JSON => commons/models}/dd_hyperbolic_single.json (100%) rename {JSON => commons/models}/gng_m1.json (100%) rename {JSON => commons/models}/gng_m2.json (100%) rename {JSON => commons/models}/gng_m3.json (100%) rename {JSON => commons/models}/gng_m4.json (100%) rename {JSON => commons/models}/igt_orl.json (100%) rename {JSON => commons/models}/igt_pvl_decay.json (100%) rename {JSON => commons/models}/igt_pvl_delta.json (100%) rename {JSON => commons/models}/igt_vpp.json (100%) rename {JSON => commons/models}/peer_ocu.json (100%) rename {JSON => commons/models}/prl_ewa.json (100%) rename {JSON => commons/models}/prl_fictitious.json (100%) rename {JSON => commons/models}/prl_fictitious_multipleB.json (100%) rename {JSON => commons/models}/prl_fictitious_rp.json (100%) rename {JSON => commons/models}/prl_fictitious_rp_woa.json (100%) rename {JSON => commons/models}/prl_fictitious_woa.json (100%) rename {JSON => commons/models}/prl_rp.json (100%) rename {JSON => commons/models}/prl_rp_multipleB.json (100%) rename {JSON => commons/models}/pst_gainloss_Q.json (100%) rename {JSON => commons/models}/ra_noLA.json (100%) rename {JSON => commons/models}/ra_noRA.json (100%) rename {JSON => commons/models}/ra_prospect.json (100%) rename {JSON => commons/models}/rdt_happiness.json (100%) rename {JSON => commons/models}/ts_par4.json (100%) rename {JSON => commons/models}/ts_par6.json (100%) rename {JSON => commons/models}/ts_par7.json (100%) rename {JSON => commons/models}/ug_bayes.json (100%) rename {JSON => commons/models}/ug_delta.json (100%) rename {JSON => commons/models}/wcs_sql.json (100%) rename {JSON => commons/templates}/PY_CODE_TEMPLATE.txt (100%) rename {JSON => commons/templates}/PY_DOCSTRING_TEMPLATE.txt (100%) diff --git a/JSON/ModelInformation.schema.json b/commons/ModelInformation.schema.json similarity index 100% rename from JSON/ModelInformation.schema.json rename to commons/ModelInformation.schema.json diff --git a/JSON/README.md b/commons/README.md similarity index 97% rename from JSON/README.md rename to commons/README.md index fc89c0a6..e400583d 100644 --- a/JSON/README.md +++ b/commons/README.md @@ -1,9 +1,5 @@ # Model Information JSON Files -Contributed by [Jethro Lee][jethro-lee]. - -[jethro-lee]: https://github.com/dlemfh - ## JSON Schema Schema for the Model Information JSON files is stored in `ModelInformation.schema.json` as a JSON Schema format. @@ -108,9 +104,9 @@ To validate JSON files, you need to have [`jsonschema`][jsonschema] installed; y [jsonschema]: https://github.com/Julian/jsonschema -To validate a single JSON file (e.g. `gng_m1.json`): +To validate a single JSON file (e.g. `models/gng_m1.json`): ``` -$ jsonschema -i gng_m1.json ModelInformation.schema.json +$ jsonschema -i models/gng_m1.json ModelInformation.schema.json ``` To validate all JSON files in directory, use following shell script: @@ -150,3 +146,9 @@ Created file: _bandit2arm_delta.py ... Created file: _wcs_sql.py ``` + +## Contribution + +Largely contributed by [Jethro Lee][jethro-lee]. + +[jethro-lee]: https://github.com/dlemfh diff --git a/JSON/ValidateAll.sh b/commons/ValidateAll.sh similarity index 77% rename from JSON/ValidateAll.sh rename to commons/ValidateAll.sh index d228e151..3dbbc108 100755 --- a/JSON/ValidateAll.sh +++ b/commons/ValidateAll.sh @@ -1,7 +1,7 @@ #!/bin/bash # Written by Jetho Lee -for i in `ls [a-z]*.json`; do +for i in `ls models/[a-z]*.json`; do echo "========== $i ==========" jsonschema -i "$i" ModelInformation.schema.json done diff --git a/JSON/WritePython.py b/commons/WritePython.py similarity index 96% rename from JSON/WritePython.py rename to commons/WritePython.py index 631c4b29..a6f81070 100755 --- a/JSON/WritePython.py +++ b/commons/WritePython.py @@ -7,10 +7,14 @@ import glob import json import re + from pathlib import Path from typing import List, Iterable, Callable from collections import OrderedDict +PATH_TEMPLATE = Path(__file__) / 'templates' +PATH_OUTPUT = Path(__file__) / 'Python' + def main(json_file, verbose): # Make Path object for given filename @@ -48,7 +52,7 @@ def shortify(cite: str) -> str: (shortify(cite), cite) for cite in model_info['model_name']['cite']) # Read template for docstring - with open('PY_DOCSTRING_TEMPLATE.txt', 'r') as f: + with open(PATH_TEMPLATE / 'PY_DOCSTRING_TEMPLATE.txt', 'r') as f: docstring_template = f.read().format( model_function=model_function, task_name=model_info['task_name']['desc'], @@ -104,7 +108,7 @@ def shortify(cite: str) -> str: ) # Read template for model python code - with open('PY_CODE_TEMPLATE.txt', 'r') as f: + with open(PATH_TEMPLATE / 'PY_CODE_TEMPLATE.txt', 'r') as f: code_template = f.read().format( docstring_template=docstring_template, model_function=model_function, @@ -147,7 +151,7 @@ def shortify(cite: str) -> str: print(code_template) else: # Write model python code - code_fn = '_' + model_function + '.py' + code_fn = PATH_OUTPUT / ('_' + model_function + '.py') with open(code_fn, 'w') as f: f.write('"""\nGenerated by template. Do not edit by hand.\n"""\n') f.write(code_template) diff --git a/JSON/bandit2arm_delta.json b/commons/models/bandit2arm_delta.json similarity index 100% rename from JSON/bandit2arm_delta.json rename to commons/models/bandit2arm_delta.json diff --git a/JSON/bandit4arm2_kalman_filter.json b/commons/models/bandit4arm2_kalman_filter.json similarity index 100% rename from JSON/bandit4arm2_kalman_filter.json rename to commons/models/bandit4arm2_kalman_filter.json diff --git a/JSON/bandit4arm_2par_lapse.json b/commons/models/bandit4arm_2par_lapse.json similarity index 100% rename from JSON/bandit4arm_2par_lapse.json rename to commons/models/bandit4arm_2par_lapse.json diff --git a/JSON/bandit4arm_4par.json b/commons/models/bandit4arm_4par.json similarity index 100% rename from JSON/bandit4arm_4par.json rename to commons/models/bandit4arm_4par.json diff --git a/JSON/bandit4arm_lapse.json b/commons/models/bandit4arm_lapse.json similarity index 100% rename from JSON/bandit4arm_lapse.json rename to commons/models/bandit4arm_lapse.json diff --git a/JSON/bandit4arm_lapse_decay.json b/commons/models/bandit4arm_lapse_decay.json similarity index 100% rename from JSON/bandit4arm_lapse_decay.json rename to commons/models/bandit4arm_lapse_decay.json diff --git a/JSON/bandit4arm_singleA_lapse.json b/commons/models/bandit4arm_singleA_lapse.json similarity index 100% rename from JSON/bandit4arm_singleA_lapse.json rename to commons/models/bandit4arm_singleA_lapse.json diff --git a/JSON/bart_par4.json b/commons/models/bart_par4.json similarity index 100% rename from JSON/bart_par4.json rename to commons/models/bart_par4.json diff --git a/JSON/choiceRT_ddm.json b/commons/models/choiceRT_ddm.json similarity index 100% rename from JSON/choiceRT_ddm.json rename to commons/models/choiceRT_ddm.json diff --git a/JSON/choiceRT_ddm_single.json b/commons/models/choiceRT_ddm_single.json similarity index 100% rename from JSON/choiceRT_ddm_single.json rename to commons/models/choiceRT_ddm_single.json diff --git a/JSON/cra_exp.json b/commons/models/cra_exp.json similarity index 100% rename from JSON/cra_exp.json rename to commons/models/cra_exp.json diff --git a/JSON/cra_linear.json b/commons/models/cra_linear.json similarity index 100% rename from JSON/cra_linear.json rename to commons/models/cra_linear.json diff --git a/JSON/dbdm_prob_weight.json b/commons/models/dbdm_prob_weight.json similarity index 100% rename from JSON/dbdm_prob_weight.json rename to commons/models/dbdm_prob_weight.json diff --git a/JSON/dd_cs.json b/commons/models/dd_cs.json similarity index 100% rename from JSON/dd_cs.json rename to commons/models/dd_cs.json diff --git a/JSON/dd_cs_single.json b/commons/models/dd_cs_single.json similarity index 100% rename from JSON/dd_cs_single.json rename to commons/models/dd_cs_single.json diff --git a/JSON/dd_exp.json b/commons/models/dd_exp.json similarity index 100% rename from JSON/dd_exp.json rename to commons/models/dd_exp.json diff --git a/JSON/dd_hyperbolic.json b/commons/models/dd_hyperbolic.json similarity index 100% rename from JSON/dd_hyperbolic.json rename to commons/models/dd_hyperbolic.json diff --git a/JSON/dd_hyperbolic_single.json b/commons/models/dd_hyperbolic_single.json similarity index 100% rename from JSON/dd_hyperbolic_single.json rename to commons/models/dd_hyperbolic_single.json diff --git a/JSON/gng_m1.json b/commons/models/gng_m1.json similarity index 100% rename from JSON/gng_m1.json rename to commons/models/gng_m1.json diff --git a/JSON/gng_m2.json b/commons/models/gng_m2.json similarity index 100% rename from JSON/gng_m2.json rename to commons/models/gng_m2.json diff --git a/JSON/gng_m3.json b/commons/models/gng_m3.json similarity index 100% rename from JSON/gng_m3.json rename to commons/models/gng_m3.json diff --git a/JSON/gng_m4.json b/commons/models/gng_m4.json similarity index 100% rename from JSON/gng_m4.json rename to commons/models/gng_m4.json diff --git a/JSON/igt_orl.json b/commons/models/igt_orl.json similarity index 100% rename from JSON/igt_orl.json rename to commons/models/igt_orl.json diff --git a/JSON/igt_pvl_decay.json b/commons/models/igt_pvl_decay.json similarity index 100% rename from JSON/igt_pvl_decay.json rename to commons/models/igt_pvl_decay.json diff --git a/JSON/igt_pvl_delta.json b/commons/models/igt_pvl_delta.json similarity index 100% rename from JSON/igt_pvl_delta.json rename to commons/models/igt_pvl_delta.json diff --git a/JSON/igt_vpp.json b/commons/models/igt_vpp.json similarity index 100% rename from JSON/igt_vpp.json rename to commons/models/igt_vpp.json diff --git a/JSON/peer_ocu.json b/commons/models/peer_ocu.json similarity index 100% rename from JSON/peer_ocu.json rename to commons/models/peer_ocu.json diff --git a/JSON/prl_ewa.json b/commons/models/prl_ewa.json similarity index 100% rename from JSON/prl_ewa.json rename to commons/models/prl_ewa.json diff --git a/JSON/prl_fictitious.json b/commons/models/prl_fictitious.json similarity index 100% rename from JSON/prl_fictitious.json rename to commons/models/prl_fictitious.json diff --git a/JSON/prl_fictitious_multipleB.json b/commons/models/prl_fictitious_multipleB.json similarity index 100% rename from JSON/prl_fictitious_multipleB.json rename to commons/models/prl_fictitious_multipleB.json diff --git a/JSON/prl_fictitious_rp.json b/commons/models/prl_fictitious_rp.json similarity index 100% rename from JSON/prl_fictitious_rp.json rename to commons/models/prl_fictitious_rp.json diff --git a/JSON/prl_fictitious_rp_woa.json b/commons/models/prl_fictitious_rp_woa.json similarity index 100% rename from JSON/prl_fictitious_rp_woa.json rename to commons/models/prl_fictitious_rp_woa.json diff --git a/JSON/prl_fictitious_woa.json b/commons/models/prl_fictitious_woa.json similarity index 100% rename from JSON/prl_fictitious_woa.json rename to commons/models/prl_fictitious_woa.json diff --git a/JSON/prl_rp.json b/commons/models/prl_rp.json similarity index 100% rename from JSON/prl_rp.json rename to commons/models/prl_rp.json diff --git a/JSON/prl_rp_multipleB.json b/commons/models/prl_rp_multipleB.json similarity index 100% rename from JSON/prl_rp_multipleB.json rename to commons/models/prl_rp_multipleB.json diff --git a/JSON/pst_gainloss_Q.json b/commons/models/pst_gainloss_Q.json similarity index 100% rename from JSON/pst_gainloss_Q.json rename to commons/models/pst_gainloss_Q.json diff --git a/JSON/ra_noLA.json b/commons/models/ra_noLA.json similarity index 100% rename from JSON/ra_noLA.json rename to commons/models/ra_noLA.json diff --git a/JSON/ra_noRA.json b/commons/models/ra_noRA.json similarity index 100% rename from JSON/ra_noRA.json rename to commons/models/ra_noRA.json diff --git a/JSON/ra_prospect.json b/commons/models/ra_prospect.json similarity index 100% rename from JSON/ra_prospect.json rename to commons/models/ra_prospect.json diff --git a/JSON/rdt_happiness.json b/commons/models/rdt_happiness.json similarity index 100% rename from JSON/rdt_happiness.json rename to commons/models/rdt_happiness.json diff --git a/JSON/ts_par4.json b/commons/models/ts_par4.json similarity index 100% rename from JSON/ts_par4.json rename to commons/models/ts_par4.json diff --git a/JSON/ts_par6.json b/commons/models/ts_par6.json similarity index 100% rename from JSON/ts_par6.json rename to commons/models/ts_par6.json diff --git a/JSON/ts_par7.json b/commons/models/ts_par7.json similarity index 100% rename from JSON/ts_par7.json rename to commons/models/ts_par7.json diff --git a/JSON/ug_bayes.json b/commons/models/ug_bayes.json similarity index 100% rename from JSON/ug_bayes.json rename to commons/models/ug_bayes.json diff --git a/JSON/ug_delta.json b/commons/models/ug_delta.json similarity index 100% rename from JSON/ug_delta.json rename to commons/models/ug_delta.json diff --git a/JSON/wcs_sql.json b/commons/models/wcs_sql.json similarity index 100% rename from JSON/wcs_sql.json rename to commons/models/wcs_sql.json diff --git a/JSON/PY_CODE_TEMPLATE.txt b/commons/templates/PY_CODE_TEMPLATE.txt similarity index 100% rename from JSON/PY_CODE_TEMPLATE.txt rename to commons/templates/PY_CODE_TEMPLATE.txt diff --git a/JSON/PY_DOCSTRING_TEMPLATE.txt b/commons/templates/PY_DOCSTRING_TEMPLATE.txt similarity index 100% rename from JSON/PY_DOCSTRING_TEMPLATE.txt rename to commons/templates/PY_DOCSTRING_TEMPLATE.txt From c6c54114b228cbfda88e25cf3dd2520b5f944e19 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 19 Aug 2019 23:16:21 +0900 Subject: [PATCH 036/163] Reorganize models --- {JSON => commons}/ModelInformation.schema.json | 0 {JSON => commons}/README.md | 14 ++++++++------ {JSON => commons}/ValidateAll.sh | 2 +- {JSON => commons}/WritePython.py | 10 +++++++--- {JSON => commons/models}/bandit2arm_delta.json | 0 .../models}/bandit4arm2_kalman_filter.json | 0 .../models}/bandit4arm_2par_lapse.json | 0 {JSON => commons/models}/bandit4arm_4par.json | 0 {JSON => commons/models}/bandit4arm_lapse.json | 0 .../models}/bandit4arm_lapse_decay.json | 0 .../models}/bandit4arm_singleA_lapse.json | 0 {JSON => commons/models}/bart_par4.json | 0 {JSON => commons/models}/choiceRT_ddm.json | 0 {JSON => commons/models}/choiceRT_ddm_single.json | 0 {JSON => commons/models}/cra_exp.json | 0 {JSON => commons/models}/cra_linear.json | 0 {JSON => commons/models}/dbdm_prob_weight.json | 0 {JSON => commons/models}/dd_cs.json | 0 {JSON => commons/models}/dd_cs_single.json | 0 {JSON => commons/models}/dd_exp.json | 0 {JSON => commons/models}/dd_hyperbolic.json | 0 {JSON => commons/models}/dd_hyperbolic_single.json | 0 {JSON => commons/models}/gng_m1.json | 0 {JSON => commons/models}/gng_m2.json | 0 {JSON => commons/models}/gng_m3.json | 0 {JSON => commons/models}/gng_m4.json | 0 {JSON => commons/models}/igt_orl.json | 0 {JSON => commons/models}/igt_pvl_decay.json | 0 {JSON => commons/models}/igt_pvl_delta.json | 0 {JSON => commons/models}/igt_vpp.json | 0 {JSON => commons/models}/peer_ocu.json | 0 {JSON => commons/models}/prl_ewa.json | 0 {JSON => commons/models}/prl_fictitious.json | 0 .../models}/prl_fictitious_multipleB.json | 0 {JSON => commons/models}/prl_fictitious_rp.json | 0 .../models}/prl_fictitious_rp_woa.json | 0 {JSON => commons/models}/prl_fictitious_woa.json | 0 {JSON => commons/models}/prl_rp.json | 0 {JSON => commons/models}/prl_rp_multipleB.json | 0 {JSON => commons/models}/pst_gainloss_Q.json | 0 {JSON => commons/models}/ra_noLA.json | 0 {JSON => commons/models}/ra_noRA.json | 0 {JSON => commons/models}/ra_prospect.json | 0 {JSON => commons/models}/rdt_happiness.json | 0 {JSON => commons/models}/ts_par4.json | 0 {JSON => commons/models}/ts_par6.json | 0 {JSON => commons/models}/ts_par7.json | 0 {JSON => commons/models}/ug_bayes.json | 0 {JSON => commons/models}/ug_delta.json | 0 {JSON => commons/models}/wcs_sql.json | 0 {JSON => commons/templates}/PY_CODE_TEMPLATE.txt | 0 .../templates}/PY_DOCSTRING_TEMPLATE.txt | 0 52 files changed, 16 insertions(+), 10 deletions(-) rename {JSON => commons}/ModelInformation.schema.json (100%) rename {JSON => commons}/README.md (97%) rename {JSON => commons}/ValidateAll.sh (77%) rename {JSON => commons}/WritePython.py (96%) rename {JSON => commons/models}/bandit2arm_delta.json (100%) rename {JSON => commons/models}/bandit4arm2_kalman_filter.json (100%) rename {JSON => commons/models}/bandit4arm_2par_lapse.json (100%) rename {JSON => commons/models}/bandit4arm_4par.json (100%) rename {JSON => commons/models}/bandit4arm_lapse.json (100%) rename {JSON => commons/models}/bandit4arm_lapse_decay.json (100%) rename {JSON => commons/models}/bandit4arm_singleA_lapse.json (100%) rename {JSON => commons/models}/bart_par4.json (100%) rename {JSON => commons/models}/choiceRT_ddm.json (100%) rename {JSON => commons/models}/choiceRT_ddm_single.json (100%) rename {JSON => commons/models}/cra_exp.json (100%) rename {JSON => commons/models}/cra_linear.json (100%) rename {JSON => commons/models}/dbdm_prob_weight.json (100%) rename {JSON => commons/models}/dd_cs.json (100%) rename {JSON => commons/models}/dd_cs_single.json (100%) rename {JSON => commons/models}/dd_exp.json (100%) rename {JSON => commons/models}/dd_hyperbolic.json (100%) rename {JSON => commons/models}/dd_hyperbolic_single.json (100%) rename {JSON => commons/models}/gng_m1.json (100%) rename {JSON => commons/models}/gng_m2.json (100%) rename {JSON => commons/models}/gng_m3.json (100%) rename {JSON => commons/models}/gng_m4.json (100%) rename {JSON => commons/models}/igt_orl.json (100%) rename {JSON => commons/models}/igt_pvl_decay.json (100%) rename {JSON => commons/models}/igt_pvl_delta.json (100%) rename {JSON => commons/models}/igt_vpp.json (100%) rename {JSON => commons/models}/peer_ocu.json (100%) rename {JSON => commons/models}/prl_ewa.json (100%) rename {JSON => commons/models}/prl_fictitious.json (100%) rename {JSON => commons/models}/prl_fictitious_multipleB.json (100%) rename {JSON => commons/models}/prl_fictitious_rp.json (100%) rename {JSON => commons/models}/prl_fictitious_rp_woa.json (100%) rename {JSON => commons/models}/prl_fictitious_woa.json (100%) rename {JSON => commons/models}/prl_rp.json (100%) rename {JSON => commons/models}/prl_rp_multipleB.json (100%) rename {JSON => commons/models}/pst_gainloss_Q.json (100%) rename {JSON => commons/models}/ra_noLA.json (100%) rename {JSON => commons/models}/ra_noRA.json (100%) rename {JSON => commons/models}/ra_prospect.json (100%) rename {JSON => commons/models}/rdt_happiness.json (100%) rename {JSON => commons/models}/ts_par4.json (100%) rename {JSON => commons/models}/ts_par6.json (100%) rename {JSON => commons/models}/ts_par7.json (100%) rename {JSON => commons/models}/ug_bayes.json (100%) rename {JSON => commons/models}/ug_delta.json (100%) rename {JSON => commons/models}/wcs_sql.json (100%) rename {JSON => commons/templates}/PY_CODE_TEMPLATE.txt (100%) rename {JSON => commons/templates}/PY_DOCSTRING_TEMPLATE.txt (100%) diff --git a/JSON/ModelInformation.schema.json b/commons/ModelInformation.schema.json similarity index 100% rename from JSON/ModelInformation.schema.json rename to commons/ModelInformation.schema.json diff --git a/JSON/README.md b/commons/README.md similarity index 97% rename from JSON/README.md rename to commons/README.md index fc89c0a6..e400583d 100644 --- a/JSON/README.md +++ b/commons/README.md @@ -1,9 +1,5 @@ # Model Information JSON Files -Contributed by [Jethro Lee][jethro-lee]. - -[jethro-lee]: https://github.com/dlemfh - ## JSON Schema Schema for the Model Information JSON files is stored in `ModelInformation.schema.json` as a JSON Schema format. @@ -108,9 +104,9 @@ To validate JSON files, you need to have [`jsonschema`][jsonschema] installed; y [jsonschema]: https://github.com/Julian/jsonschema -To validate a single JSON file (e.g. `gng_m1.json`): +To validate a single JSON file (e.g. `models/gng_m1.json`): ``` -$ jsonschema -i gng_m1.json ModelInformation.schema.json +$ jsonschema -i models/gng_m1.json ModelInformation.schema.json ``` To validate all JSON files in directory, use following shell script: @@ -150,3 +146,9 @@ Created file: _bandit2arm_delta.py ... Created file: _wcs_sql.py ``` + +## Contribution + +Largely contributed by [Jethro Lee][jethro-lee]. + +[jethro-lee]: https://github.com/dlemfh diff --git a/JSON/ValidateAll.sh b/commons/ValidateAll.sh similarity index 77% rename from JSON/ValidateAll.sh rename to commons/ValidateAll.sh index d228e151..3dbbc108 100755 --- a/JSON/ValidateAll.sh +++ b/commons/ValidateAll.sh @@ -1,7 +1,7 @@ #!/bin/bash # Written by Jetho Lee -for i in `ls [a-z]*.json`; do +for i in `ls models/[a-z]*.json`; do echo "========== $i ==========" jsonschema -i "$i" ModelInformation.schema.json done diff --git a/JSON/WritePython.py b/commons/WritePython.py similarity index 96% rename from JSON/WritePython.py rename to commons/WritePython.py index 631c4b29..a6f81070 100755 --- a/JSON/WritePython.py +++ b/commons/WritePython.py @@ -7,10 +7,14 @@ import glob import json import re + from pathlib import Path from typing import List, Iterable, Callable from collections import OrderedDict +PATH_TEMPLATE = Path(__file__) / 'templates' +PATH_OUTPUT = Path(__file__) / 'Python' + def main(json_file, verbose): # Make Path object for given filename @@ -48,7 +52,7 @@ def shortify(cite: str) -> str: (shortify(cite), cite) for cite in model_info['model_name']['cite']) # Read template for docstring - with open('PY_DOCSTRING_TEMPLATE.txt', 'r') as f: + with open(PATH_TEMPLATE / 'PY_DOCSTRING_TEMPLATE.txt', 'r') as f: docstring_template = f.read().format( model_function=model_function, task_name=model_info['task_name']['desc'], @@ -104,7 +108,7 @@ def shortify(cite: str) -> str: ) # Read template for model python code - with open('PY_CODE_TEMPLATE.txt', 'r') as f: + with open(PATH_TEMPLATE / 'PY_CODE_TEMPLATE.txt', 'r') as f: code_template = f.read().format( docstring_template=docstring_template, model_function=model_function, @@ -147,7 +151,7 @@ def shortify(cite: str) -> str: print(code_template) else: # Write model python code - code_fn = '_' + model_function + '.py' + code_fn = PATH_OUTPUT / ('_' + model_function + '.py') with open(code_fn, 'w') as f: f.write('"""\nGenerated by template. Do not edit by hand.\n"""\n') f.write(code_template) diff --git a/JSON/bandit2arm_delta.json b/commons/models/bandit2arm_delta.json similarity index 100% rename from JSON/bandit2arm_delta.json rename to commons/models/bandit2arm_delta.json diff --git a/JSON/bandit4arm2_kalman_filter.json b/commons/models/bandit4arm2_kalman_filter.json similarity index 100% rename from JSON/bandit4arm2_kalman_filter.json rename to commons/models/bandit4arm2_kalman_filter.json diff --git a/JSON/bandit4arm_2par_lapse.json b/commons/models/bandit4arm_2par_lapse.json similarity index 100% rename from JSON/bandit4arm_2par_lapse.json rename to commons/models/bandit4arm_2par_lapse.json diff --git a/JSON/bandit4arm_4par.json b/commons/models/bandit4arm_4par.json similarity index 100% rename from JSON/bandit4arm_4par.json rename to commons/models/bandit4arm_4par.json diff --git a/JSON/bandit4arm_lapse.json b/commons/models/bandit4arm_lapse.json similarity index 100% rename from JSON/bandit4arm_lapse.json rename to commons/models/bandit4arm_lapse.json diff --git a/JSON/bandit4arm_lapse_decay.json b/commons/models/bandit4arm_lapse_decay.json similarity index 100% rename from JSON/bandit4arm_lapse_decay.json rename to commons/models/bandit4arm_lapse_decay.json diff --git a/JSON/bandit4arm_singleA_lapse.json b/commons/models/bandit4arm_singleA_lapse.json similarity index 100% rename from JSON/bandit4arm_singleA_lapse.json rename to commons/models/bandit4arm_singleA_lapse.json diff --git a/JSON/bart_par4.json b/commons/models/bart_par4.json similarity index 100% rename from JSON/bart_par4.json rename to commons/models/bart_par4.json diff --git a/JSON/choiceRT_ddm.json b/commons/models/choiceRT_ddm.json similarity index 100% rename from JSON/choiceRT_ddm.json rename to commons/models/choiceRT_ddm.json diff --git a/JSON/choiceRT_ddm_single.json b/commons/models/choiceRT_ddm_single.json similarity index 100% rename from JSON/choiceRT_ddm_single.json rename to commons/models/choiceRT_ddm_single.json diff --git a/JSON/cra_exp.json b/commons/models/cra_exp.json similarity index 100% rename from JSON/cra_exp.json rename to commons/models/cra_exp.json diff --git a/JSON/cra_linear.json b/commons/models/cra_linear.json similarity index 100% rename from JSON/cra_linear.json rename to commons/models/cra_linear.json diff --git a/JSON/dbdm_prob_weight.json b/commons/models/dbdm_prob_weight.json similarity index 100% rename from JSON/dbdm_prob_weight.json rename to commons/models/dbdm_prob_weight.json diff --git a/JSON/dd_cs.json b/commons/models/dd_cs.json similarity index 100% rename from JSON/dd_cs.json rename to commons/models/dd_cs.json diff --git a/JSON/dd_cs_single.json b/commons/models/dd_cs_single.json similarity index 100% rename from JSON/dd_cs_single.json rename to commons/models/dd_cs_single.json diff --git a/JSON/dd_exp.json b/commons/models/dd_exp.json similarity index 100% rename from JSON/dd_exp.json rename to commons/models/dd_exp.json diff --git a/JSON/dd_hyperbolic.json b/commons/models/dd_hyperbolic.json similarity index 100% rename from JSON/dd_hyperbolic.json rename to commons/models/dd_hyperbolic.json diff --git a/JSON/dd_hyperbolic_single.json b/commons/models/dd_hyperbolic_single.json similarity index 100% rename from JSON/dd_hyperbolic_single.json rename to commons/models/dd_hyperbolic_single.json diff --git a/JSON/gng_m1.json b/commons/models/gng_m1.json similarity index 100% rename from JSON/gng_m1.json rename to commons/models/gng_m1.json diff --git a/JSON/gng_m2.json b/commons/models/gng_m2.json similarity index 100% rename from JSON/gng_m2.json rename to commons/models/gng_m2.json diff --git a/JSON/gng_m3.json b/commons/models/gng_m3.json similarity index 100% rename from JSON/gng_m3.json rename to commons/models/gng_m3.json diff --git a/JSON/gng_m4.json b/commons/models/gng_m4.json similarity index 100% rename from JSON/gng_m4.json rename to commons/models/gng_m4.json diff --git a/JSON/igt_orl.json b/commons/models/igt_orl.json similarity index 100% rename from JSON/igt_orl.json rename to commons/models/igt_orl.json diff --git a/JSON/igt_pvl_decay.json b/commons/models/igt_pvl_decay.json similarity index 100% rename from JSON/igt_pvl_decay.json rename to commons/models/igt_pvl_decay.json diff --git a/JSON/igt_pvl_delta.json b/commons/models/igt_pvl_delta.json similarity index 100% rename from JSON/igt_pvl_delta.json rename to commons/models/igt_pvl_delta.json diff --git a/JSON/igt_vpp.json b/commons/models/igt_vpp.json similarity index 100% rename from JSON/igt_vpp.json rename to commons/models/igt_vpp.json diff --git a/JSON/peer_ocu.json b/commons/models/peer_ocu.json similarity index 100% rename from JSON/peer_ocu.json rename to commons/models/peer_ocu.json diff --git a/JSON/prl_ewa.json b/commons/models/prl_ewa.json similarity index 100% rename from JSON/prl_ewa.json rename to commons/models/prl_ewa.json diff --git a/JSON/prl_fictitious.json b/commons/models/prl_fictitious.json similarity index 100% rename from JSON/prl_fictitious.json rename to commons/models/prl_fictitious.json diff --git a/JSON/prl_fictitious_multipleB.json b/commons/models/prl_fictitious_multipleB.json similarity index 100% rename from JSON/prl_fictitious_multipleB.json rename to commons/models/prl_fictitious_multipleB.json diff --git a/JSON/prl_fictitious_rp.json b/commons/models/prl_fictitious_rp.json similarity index 100% rename from JSON/prl_fictitious_rp.json rename to commons/models/prl_fictitious_rp.json diff --git a/JSON/prl_fictitious_rp_woa.json b/commons/models/prl_fictitious_rp_woa.json similarity index 100% rename from JSON/prl_fictitious_rp_woa.json rename to commons/models/prl_fictitious_rp_woa.json diff --git a/JSON/prl_fictitious_woa.json b/commons/models/prl_fictitious_woa.json similarity index 100% rename from JSON/prl_fictitious_woa.json rename to commons/models/prl_fictitious_woa.json diff --git a/JSON/prl_rp.json b/commons/models/prl_rp.json similarity index 100% rename from JSON/prl_rp.json rename to commons/models/prl_rp.json diff --git a/JSON/prl_rp_multipleB.json b/commons/models/prl_rp_multipleB.json similarity index 100% rename from JSON/prl_rp_multipleB.json rename to commons/models/prl_rp_multipleB.json diff --git a/JSON/pst_gainloss_Q.json b/commons/models/pst_gainloss_Q.json similarity index 100% rename from JSON/pst_gainloss_Q.json rename to commons/models/pst_gainloss_Q.json diff --git a/JSON/ra_noLA.json b/commons/models/ra_noLA.json similarity index 100% rename from JSON/ra_noLA.json rename to commons/models/ra_noLA.json diff --git a/JSON/ra_noRA.json b/commons/models/ra_noRA.json similarity index 100% rename from JSON/ra_noRA.json rename to commons/models/ra_noRA.json diff --git a/JSON/ra_prospect.json b/commons/models/ra_prospect.json similarity index 100% rename from JSON/ra_prospect.json rename to commons/models/ra_prospect.json diff --git a/JSON/rdt_happiness.json b/commons/models/rdt_happiness.json similarity index 100% rename from JSON/rdt_happiness.json rename to commons/models/rdt_happiness.json diff --git a/JSON/ts_par4.json b/commons/models/ts_par4.json similarity index 100% rename from JSON/ts_par4.json rename to commons/models/ts_par4.json diff --git a/JSON/ts_par6.json b/commons/models/ts_par6.json similarity index 100% rename from JSON/ts_par6.json rename to commons/models/ts_par6.json diff --git a/JSON/ts_par7.json b/commons/models/ts_par7.json similarity index 100% rename from JSON/ts_par7.json rename to commons/models/ts_par7.json diff --git a/JSON/ug_bayes.json b/commons/models/ug_bayes.json similarity index 100% rename from JSON/ug_bayes.json rename to commons/models/ug_bayes.json diff --git a/JSON/ug_delta.json b/commons/models/ug_delta.json similarity index 100% rename from JSON/ug_delta.json rename to commons/models/ug_delta.json diff --git a/JSON/wcs_sql.json b/commons/models/wcs_sql.json similarity index 100% rename from JSON/wcs_sql.json rename to commons/models/wcs_sql.json diff --git a/JSON/PY_CODE_TEMPLATE.txt b/commons/templates/PY_CODE_TEMPLATE.txt similarity index 100% rename from JSON/PY_CODE_TEMPLATE.txt rename to commons/templates/PY_CODE_TEMPLATE.txt diff --git a/JSON/PY_DOCSTRING_TEMPLATE.txt b/commons/templates/PY_DOCSTRING_TEMPLATE.txt similarity index 100% rename from JSON/PY_DOCSTRING_TEMPLATE.txt rename to commons/templates/PY_DOCSTRING_TEMPLATE.txt From e30336d7c4692e7b83338c2220c2c822e2b4a34c Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 20 Aug 2019 00:31:20 +0900 Subject: [PATCH 037/163] Use data.frame as the first argument --- R/R/hBayesDM_model.R | 51 ++++++++++++++++++++++++++------------------ 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 8b05952a..04d64296 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -114,7 +114,8 @@ hBayesDM_model <- function(task_name, preprocess_func) { # The resulting hBayesDM model function to be returned - function(data = "choose", + function(data = NULL, + datafile = "", niter = 4000, nwarmup = 1000, nchain = 4, @@ -128,6 +129,8 @@ hBayesDM_model <- function(task_name, adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, + use_example = FALSE, + choose_data = FALSE, ...) { ############### Stop checks ############### @@ -142,29 +145,35 @@ hBayesDM_model <- function(task_name, stop("** Posterior predictions are not yet available for this model. **\n") } - # For using "example" or "choose" data - if (data == "example") { - if (model_type == "") { - exampleData <- paste0(task_name, "_", "exampleData.txt") - } else { - exampleData <- paste0(task_name, "_", model_type, "_", "exampleData.txt") + if (!is.null(data)) { + # Use the given data object + raw_data <- data.table::as.data.table(data) + } else { + if (!is.na(datafile) && datafile != '') { + # Use the datafile to read data + } else if (use_example) { + if (model_type == "") { + exampleData <- paste0(task_name, "_", "exampleData.txt") + } else { + exampleData <- paste0(task_name, "_", model_type, "_", "exampleData.txt") + } + datafile <- system.file("extdata", exampleData, package = "hBayesDM") + } else if (choose_data) { + datafile <- file.choose() } - data <- system.file("extdata", exampleData, package = "hBayesDM") - } else if (data == "choose") { - data <- file.choose() - } - # Check if data file exists - if (!file.exists(data)) { - stop("** Data file does not exist. Please check again. **\n", - " e.g. data = \"MySubFolder/myData.txt\"\n") - } + # Check if data file exists + if (!file.exists(datafile)) { + stop("** Data file does not exist. Please check again. **\n", + " e.g. data = \"MySubFolder/myData.txt\"\n") + } - # Load the data - raw_data <- data.table::fread(file = data, header = TRUE, sep = "\t", data.table = TRUE, - fill = TRUE, stringsAsFactors = TRUE, logical01 = FALSE) - # NOTE: Separator is fixed to "\t" because fread() has trouble reading space delimited files - # that have missing values. + # Load the data + raw_data <- data.table::fread(file = datafile, header = TRUE, sep = "\t", data.table = TRUE, + fill = TRUE, stringsAsFactors = TRUE, logical01 = FALSE) + # NOTE: Separator is fixed to "\t" because fread() has trouble reading space delimited files + # that have missing values. + } # Save initial colnames of raw_data for later colnames_raw_data <- colnames(raw_data) From c6b7674a6d519ced6a24f8339156b53608d08e2f Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 20 Aug 2019 00:31:36 +0900 Subject: [PATCH 038/163] Extract preprocessing functions --- R/R/Untitled.ipynb | 430 ++++++++++++++++++ R/R/preprocess_funcs.R | 958 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1388 insertions(+) create mode 100644 R/R/Untitled.ipynb create mode 100644 R/R/preprocess_funcs.R diff --git a/R/R/Untitled.ipynb b/R/R/Untitled.ipynb new file mode 100644 index 00000000..1b666442 --- /dev/null +++ b/R/R/Untitled.ipynb @@ -0,0 +1,430 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 73, + "metadata": {}, + "outputs": [], + "source": [ + "import re\n", + "\n", + "regex = r\"^(?P\\w+) <- hBayesDM_model\\(.+?preprocess_func = (?Pfunction.+?})\\n\\)\"" + ] + }, + { + "cell_type": "code", + "execution_count": 74, + "metadata": {}, + "outputs": [], + "source": [ + "from pathlib import Path\n", + "\n", + "path_cwd = Path.cwd()" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": {}, + "outputs": [], + "source": [ + "fns = sorted(path_cwd.glob('*.R'))" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "metadata": {}, + "outputs": [], + "source": [ + "matches = {}\n", + "for fn in fns:\n", + " with open(fn, 'r') as f:\n", + " codes = f.readlines()\n", + "\n", + " match = list(re.finditer(regex, ''.join(codes), re.MULTILINE | re.DOTALL))\n", + " if len(match) > 0:\n", + " matches[fn.name] = {\n", + " 'model': match[0].group('model'),\n", + " 'func': match[0].group('func').replace('\\n ', '\\n')\n", + " }" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": {}, + "outputs": [], + "source": [ + "tasks = sorted(set([k.split('.')[0].split('_')[0] for k in matches.keys()]))" + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "metadata": {}, + "outputs": [], + "source": [ + "funcs_task = {}\n", + "for k, v in matches.items():\n", + " task = k.split('.')[0].split('_')[0]\n", + " code = '{task}_preprocess_func <- {func}'\\\n", + " .format(model=v['model'], task=task, func=v['func'])\n", + " if task not in funcs_task:\n", + " funcs_task[task] = {v['model']: code}\n", + " elif code not in funcs_task[task].values():\n", + " funcs_task[task][v['model']] = code" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "metadata": {}, + "outputs": [], + "source": [ + "funcs = {t: [f'# From {k}\\n' + v for k, v in kv.items()] for t, kv in funcs_task.items()}" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "metadata": {}, + "outputs": [], + "source": [ + "with open('preprocess_funcs.R', 'w') as f:\n", + " for t, fs in funcs.items():\n", + " f.write(f'#### {t}\\n\\n')\n", + " for func in fs:\n", + " f.writelines(func)\n", + " f.write('\\n\\n')\n", + " f.write('\\n\\n\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "metadata": {}, + "outputs": [], + "source": [ + "import difflib" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "choiceRT\n", + "*** \n", + "\n", + "--- \n", + "\n", + "***************\n", + "\n", + "*** 1,40 ****\n", + "\n", + " choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) {\n", + "! # Use raw_data as a data.frame\n", + "! raw_data <- as.data.frame(raw_data)\n", + " \n", + "! # Use general_info of raw_data\n", + "! subjs <- general_info$subjs\n", + "! n_subj <- general_info$n_subj\n", + "! \n", + "! # Number of upper and lower boundary responses\n", + "! Nu <- with(raw_data, aggregate(choice == 2, by = list(y = subjid), FUN = sum)[[\"x\"]])\n", + "! Nl <- with(raw_data, aggregate(choice == 1, by = list(y = subjid), FUN = sum)[[\"x\"]])\n", + "! \n", + "! # Reaction times for upper and lower boundary responses\n", + "! RTu <- array(-1, c(n_subj, max(Nu)))\n", + "! RTl <- array(-1, c(n_subj, max(Nl)))\n", + "! for (i in 1:n_subj) {\n", + "! subj <- subjs[i]\n", + "! subj_data <- subset(raw_data, raw_data$subjid == subj)\n", + "! \n", + "! RTu[i, 1:Nu[i]] <- subj_data$rt[subj_data$choice == 2] # (Nu/Nl[i]+1):Nu/Nl_max will be padded with 0's\n", + "! RTl[i, 1:Nl[i]] <- subj_data$rt[subj_data$choice == 1] # 0 padding is skipped in likelihood calculation\n", + "! }\n", + "! \n", + "! # Minimum reaction time\n", + "! minRT <- with(raw_data, aggregate(rt, by = list(y = subjid), FUN = min)[[\"x\"]])\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + "! N = n_subj, # Number of subjects\n", + "! Nu_max = max(Nu), # Max (across subjects) number of upper boundary responses\n", + "! Nl_max = max(Nl), # Max (across subjects) number of lower boundary responses\n", + "! Nu = Nu, # Number of upper boundary responses for each subject\n", + "! Nl = Nl, # Number of lower boundary responses for each subject\n", + "! RTu = RTu, # Upper boundary response times\n", + "! RTl = RTl, # Lower boundary response times\n", + "! minRT = minRT, # Minimum RT for each subject\n", + "! RTbound = RTbound # Lower bound of RT across all subjects (e.g., 0.1 second)\n", + " )\n", + " \n", + " # Returned data_list will directly be passed to Stan\n", + "--- 1,18 ----\n", + "\n", + " choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) {\n", + "! # Currently class(raw_data) == \"data.table\"\n", + " \n", + "! # Data.tables for upper and lower boundary responses\n", + "! DT_upper <- raw_data[choice == 2]\n", + "! DT_lower <- raw_data[choice == 1]\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + "! Nu = nrow(DT_upper), # Number of upper boundary responses\n", + "! Nl = nrow(DT_lower), # Number of lower boundary responses\n", + "! RTu = DT_upper$rt, # Upper boundary response times\n", + "! RTl = DT_lower$rt, # Lower boundary response times\n", + "! minRT = min(raw_data$rt), # Minimum RT\n", + "! RTbound = RTbound # Lower bound of RT (e.g., 0.1 second)\n", + " )\n", + " \n", + " # Returned data_list will directly be passed to Stan\n", + "\n", + "\n", + "\n", + "################\n", + "\n", + "\n", + "\n", + "dd\n", + "*** \n", + "\n", + "--- \n", + "\n", + "***************\n", + "\n", + "*** 2,36 ****\n", + "\n", + " # Currently class(raw_data) == \"data.table\"\n", + " \n", + " # Use general_info of raw_data\n", + "- subjs <- general_info$subjs\n", + "- n_subj <- general_info$n_subj\n", + " t_subjs <- general_info$t_subjs\n", + "- t_max <- general_info$t_max\n", + " \n", + "! # Initialize (model-specific) data arrays\n", + "! delay_later <- array( 0, c(n_subj, t_max))\n", + "! amount_later <- array( 0, c(n_subj, t_max))\n", + "! delay_sooner <- array( 0, c(n_subj, t_max))\n", + "! amount_sooner <- array( 0, c(n_subj, t_max))\n", + "! choice <- array(-1, c(n_subj, t_max))\n", + "! \n", + "! # Write from raw_data to the data arrays\n", + "! for (i in 1:n_subj) {\n", + "! subj <- subjs[i]\n", + "! t <- t_subjs[i]\n", + "! DT_subj <- raw_data[subjid == subj]\n", + "! \n", + "! delay_later[i, 1:t] <- DT_subj$delaylater\n", + "! amount_later[i, 1:t] <- DT_subj$amountlater\n", + "! delay_sooner[i, 1:t] <- DT_subj$delaysooner\n", + "! amount_sooner[i, 1:t] <- DT_subj$amountsooner\n", + "! choice[i, 1:t] <- DT_subj$choice\n", + "! }\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + "- N = n_subj,\n", + "- T = t_max,\n", + " Tsubj = t_subjs,\n", + " delay_later = delay_later,\n", + " amount_later = amount_later,\n", + "--- 2,18 ----\n", + "\n", + " # Currently class(raw_data) == \"data.table\"\n", + " \n", + " # Use general_info of raw_data\n", + " t_subjs <- general_info$t_subjs\n", + " \n", + "! # Extract from raw_data\n", + "! delay_later <- raw_data$delaylater\n", + "! amount_later <- raw_data$amountlater\n", + "! delay_sooner <- raw_data$delaysooner\n", + "! amount_sooner <- raw_data$amountsooner\n", + "! choice <- raw_data$choice\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + " Tsubj = t_subjs,\n", + " delay_later = delay_later,\n", + " amount_later = amount_later,\n", + "\n", + "\n", + "\n", + "################\n", + "\n", + "\n", + "\n", + "igt\n", + "*** \n", + "\n", + "--- \n", + "\n", + "***************\n", + "\n", + "*** 23,34 ****\n", + "\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + "! N = n_subj,\n", + "! T = t_max,\n", + "! Tsubj = t_subjs,\n", + "! choice = Ydata,\n", + "! outcome = RLmatrix / payscale,\n", + "! sign_out = sign(RLmatrix)\n", + " )\n", + " \n", + " # Returned data_list will directly be passed to Stan\n", + "--- 23,33 ----\n", + "\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + "! N = n_subj,\n", + "! T = t_max,\n", + "! Tsubj = t_subjs,\n", + "! choice = Ydata,\n", + "! outcome = RLmatrix / payscale\n", + " )\n", + " \n", + " # Returned data_list will directly be passed to Stan\n", + "\n", + "\n", + "\n", + "################\n", + "\n", + "\n", + "\n", + "prl\n", + "*** \n", + "\n", + "--- \n", + "\n", + "***************\n", + "\n", + "*** 4,29 ****\n", + "\n", + " # Use general_info of raw_data\n", + " subjs <- general_info$subjs\n", + " n_subj <- general_info$n_subj\n", + " t_subjs <- general_info$t_subjs\n", + " t_max <- general_info$t_max\n", + " \n", + " # Initialize (model-specific) data arrays\n", + "! choice <- array(-1, c(n_subj, t_max))\n", + "! outcome <- array( 0, c(n_subj, t_max))\n", + " \n", + " # Write from raw_data to the data arrays\n", + " for (i in 1:n_subj) {\n", + " subj <- subjs[i]\n", + "- t <- t_subjs[i]\n", + " DT_subj <- raw_data[subjid == subj]\n", + " \n", + "! choice[i, 1:t] <- DT_subj$choice\n", + "! outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign\n", + " }\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + " N = n_subj,\n", + " T = t_max,\n", + " Tsubj = t_subjs,\n", + " choice = choice,\n", + "--- 4,39 ----\n", + "\n", + " # Use general_info of raw_data\n", + " subjs <- general_info$subjs\n", + " n_subj <- general_info$n_subj\n", + "+ b_subjs <- general_info$b_subjs\n", + "+ b_max <- general_info$b_max\n", + " t_subjs <- general_info$t_subjs\n", + " t_max <- general_info$t_max\n", + " \n", + " # Initialize (model-specific) data arrays\n", + "! choice <- array(-1, c(n_subj, b_max, t_max))\n", + "! outcome <- array( 0, c(n_subj, b_max, t_max))\n", + " \n", + " # Write from raw_data to the data arrays\n", + " for (i in 1:n_subj) {\n", + " subj <- subjs[i]\n", + " DT_subj <- raw_data[subjid == subj]\n", + "+ blocks_of_subj <- unique(DT_subj$block)\n", + " \n", + "! for (b in 1:b_subjs[i]) {\n", + "! curr_block <- blocks_of_subj[b]\n", + "! DT_curr_block <- DT_subj[block == curr_block]\n", + "! t <- t_subjs[i, b]\n", + "! \n", + "! choice[i, b, 1:t] <- DT_curr_block$choice\n", + "! outcome[i, b, 1:t] <- sign(DT_curr_block$outcome) # use sign\n", + "! }\n", + " }\n", + " \n", + " # Wrap into a list for Stan\n", + " data_list <- list(\n", + " N = n_subj,\n", + "+ B = b_max,\n", + "+ Bsubj = b_subjs,\n", + " T = t_max,\n", + " Tsubj = t_subjs,\n", + " choice = choice,\n", + "\n", + "\n", + "\n", + "################\n", + "\n", + "\n", + "\n" + ] + } + ], + "source": [ + "for k, v in {k: v for k, v in funcs_task.items() if len(v) > 1}.items():\n", + " print(k)\n", + " before = funcs_task[k][0].split('\\n')\n", + " after = funcs_task[k][1].split('\\n')\n", + " for line in difflib.context_diff(before, after):\n", + " print(line)\n", + " print('\\n\\n\\n################\\n\\n\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.4" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R new file mode 100644 index 00000000..ba77ef33 --- /dev/null +++ b/R/R/preprocess_funcs.R @@ -0,0 +1,958 @@ +#### bandit2arm + +# From bandit2arm_delta +bandit2arm_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + choice <- array(-1, c(n_subj, t_max)) + outcome <- array( 0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + choice[i, 1:t] <- DT_subj$choice + outcome[i, 1:t] <- DT_subj$outcome + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + choice = choice, + outcome = outcome + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### bandit4arm2 + +# From bandit4arm2_kalman_filter +bandit4arm2_preprocess_func <- function(raw_data, general_info) { + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + choice <- array(-1, c(n_subj, t_max)) + outcome <- array( 0, c(n_subj, t_max)) + + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + choice[i, 1:t] <- DT_subj$choice + outcome[i, 1:t] <- DT_subj$outcome + } + + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + choice = choice, + outcome = outcome + ) + + return(data_list) +} + + + + +#### bandit4arm + +# From bandit4arm_2par_lapse +bandit4arm_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + rew <- array( 0, c(n_subj, t_max)) + los <- array( 0, c(n_subj, t_max)) + choice <- array(-1, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + rew[i, 1:t] <- DT_subj$gain + los[i, 1:t] <- -1 * abs(DT_subj$loss) + choice[i, 1:t] <- DT_subj$choice + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + rew = rew, + los = los, + choice = choice + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### bart + +# From bart_par4 +bart_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + pumps <- array(0, c(n_subj, t_max)) + explosion <- array(0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + pumps[i, 1:t] <- DT_subj$pumps + explosion[i, 1:t] <- DT_subj$explosion + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + P = max(pumps) + 1, + pumps = pumps, + explosion = explosion + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### choiceRT + +# From choiceRT_ddm +choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) { + # Use raw_data as a data.frame + raw_data <- as.data.frame(raw_data) + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + + # Number of upper and lower boundary responses + Nu <- with(raw_data, aggregate(choice == 2, by = list(y = subjid), FUN = sum)[["x"]]) + Nl <- with(raw_data, aggregate(choice == 1, by = list(y = subjid), FUN = sum)[["x"]]) + + # Reaction times for upper and lower boundary responses + RTu <- array(-1, c(n_subj, max(Nu))) + RTl <- array(-1, c(n_subj, max(Nl))) + for (i in 1:n_subj) { + subj <- subjs[i] + subj_data <- subset(raw_data, raw_data$subjid == subj) + + RTu[i, 1:Nu[i]] <- subj_data$rt[subj_data$choice == 2] # (Nu/Nl[i]+1):Nu/Nl_max will be padded with 0's + RTl[i, 1:Nl[i]] <- subj_data$rt[subj_data$choice == 1] # 0 padding is skipped in likelihood calculation + } + + # Minimum reaction time + minRT <- with(raw_data, aggregate(rt, by = list(y = subjid), FUN = min)[["x"]]) + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, # Number of subjects + Nu_max = max(Nu), # Max (across subjects) number of upper boundary responses + Nl_max = max(Nl), # Max (across subjects) number of lower boundary responses + Nu = Nu, # Number of upper boundary responses for each subject + Nl = Nl, # Number of lower boundary responses for each subject + RTu = RTu, # Upper boundary response times + RTl = RTl, # Lower boundary response times + minRT = minRT, # Minimum RT for each subject + RTbound = RTbound # Lower bound of RT across all subjects (e.g., 0.1 second) + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + +# From choiceRT_ddm_single +choiceRT_single_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) { + # Currently class(raw_data) == "data.table" + + # Data.tables for upper and lower boundary responses + DT_upper <- raw_data[choice == 2] + DT_lower <- raw_data[choice == 1] + + # Wrap into a list for Stan + data_list <- list( + Nu = nrow(DT_upper), # Number of upper boundary responses + Nl = nrow(DT_lower), # Number of lower boundary responses + RTu = DT_upper$rt, # Upper boundary response times + RTl = DT_lower$rt, # Lower boundary response times + minRT = min(raw_data$rt), # Minimum RT + RTbound = RTbound # Lower bound of RT (e.g., 0.1 second) + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### cra + +# From cra_exp +cra_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + choice <- array(0, c(n_subj, t_max)) + prob <- array(0, c(n_subj, t_max)) + ambig <- array(0, c(n_subj, t_max)) + reward_var <- array(0, c(n_subj, t_max)) + reward_fix <- array(0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + choice[i, 1:t] <- DT_subj$choice + prob[i, 1:t] <- DT_subj$prob + ambig[i, 1:t] <- DT_subj$ambig + reward_var[i, 1:t] <- DT_subj$rewardvar + reward_fix[i, 1:t] <- DT_subj$rewardfix + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + choice = choice, + prob = prob, + ambig = ambig, + reward_var = reward_var, + reward_fix = reward_fix + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### dbdm + +# From dbdm_prob_weight +dbdm_preprocess_func <- function(raw_data, general_info) { + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + opt1hprob <- array( 0, c(n_subj, t_max)) + opt2hprob <- array( 0, c(n_subj, t_max)) + opt1hval <- array( 0, c(n_subj, t_max)) + opt1lval <- array( 0, c(n_subj, t_max)) + opt2hval <- array( 0, c(n_subj, t_max)) + opt2lval <- array( 0, c(n_subj, t_max)) + choice <- array(-1, c(n_subj, t_max)) + + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + opt1hprob[i, 1:t] <- DT_subj$opt1hprob + opt2hprob[i, 1:t] <- DT_subj$opt2hprob + opt1hval[i, 1:t] <- DT_subj$opt1hval + opt1lval[i, 1:t] <- DT_subj$opt1lval + opt2hval[i, 1:t] <- DT_subj$opt2hval + opt2lval[i, 1:t] <- DT_subj$opt2lval + choice[i, 1:t] <- DT_subj$choice + } + + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + opt1hprob = opt1hprob, + opt2hprob = opt2hprob, + opt1hval = opt1hval, + opt1lval = opt1lval, + opt2hval = opt2hval, + opt2lval = opt2lval, + choice = choice + ) + + return(data_list) +} + + + + +#### dd + +# From dd_cs +dd_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + delay_later <- array( 0, c(n_subj, t_max)) + amount_later <- array( 0, c(n_subj, t_max)) + delay_sooner <- array( 0, c(n_subj, t_max)) + amount_sooner <- array( 0, c(n_subj, t_max)) + choice <- array(-1, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + delay_later[i, 1:t] <- DT_subj$delaylater + amount_later[i, 1:t] <- DT_subj$amountlater + delay_sooner[i, 1:t] <- DT_subj$delaysooner + amount_sooner[i, 1:t] <- DT_subj$amountsooner + choice[i, 1:t] <- DT_subj$choice + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + delay_later = delay_later, + amount_later = amount_later, + delay_sooner = delay_sooner, + amount_sooner = amount_sooner, + choice = choice + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + +# From dd_cs_single +dd_single_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + t_subjs <- general_info$t_subjs + + # Extract from raw_data + delay_later <- raw_data$delaylater + amount_later <- raw_data$amountlater + delay_sooner <- raw_data$delaysooner + amount_sooner <- raw_data$amountsooner + choice <- raw_data$choice + + # Wrap into a list for Stan + data_list <- list( + Tsubj = t_subjs, + delay_later = delay_later, + amount_later = amount_later, + delay_sooner = delay_sooner, + amount_sooner = amount_sooner, + choice = choice + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### gng + +# From gng_m1 +gng_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + cue <- array( 1, c(n_subj, t_max)) + pressed <- array(-1, c(n_subj, t_max)) + outcome <- array( 0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + cue[i, 1:t] <- DT_subj$cue + pressed[i, 1:t] <- DT_subj$keypressed + outcome[i, 1:t] <- DT_subj$outcome + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + cue = cue, + pressed = pressed, + outcome = outcome + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### igt + +# From igt_orl +igt_preprocess_func <- function(raw_data, general_info, payscale = 100) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize data arrays + Ydata <- array(-1, c(n_subj, t_max)) + RLmatrix <- array( 0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + Ydata[i, 1:t] <- DT_subj$choice + RLmatrix[i, 1:t] <- DT_subj$gain - abs(DT_subj$loss) + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + choice = Ydata, + outcome = RLmatrix / payscale, + sign_out = sign(RLmatrix) + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + +#### peer + +# From peer_ocu +peer_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + condition <- array( 0, c(n_subj, t_max)) + p_gamble <- array( 0, c(n_subj, t_max)) + safe_Hpayoff <- array( 0, c(n_subj, t_max)) + safe_Lpayoff <- array( 0, c(n_subj, t_max)) + risky_Hpayoff <- array( 0, c(n_subj, t_max)) + risky_Lpayoff <- array( 0, c(n_subj, t_max)) + choice <- array(-1, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + condition[i, 1:t] <- DT_subj$condition + p_gamble[i, 1:t] <- DT_subj$pgamble + safe_Hpayoff[i, 1:t] <- DT_subj$safehpayoff + safe_Lpayoff[i, 1:t] <- DT_subj$safelpayoff + risky_Hpayoff[i, 1:t] <- DT_subj$riskyhpayoff + risky_Lpayoff[i, 1:t] <- DT_subj$riskylpayoff + choice[i, 1:t] <- DT_subj$choice + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + condition = condition, + p_gamble = p_gamble, + safe_Hpayoff = safe_Hpayoff, + safe_Lpayoff = safe_Lpayoff, + risky_Hpayoff = risky_Hpayoff, + risky_Lpayoff = risky_Lpayoff, + choice = choice + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### prl + +# From prl_ewa +prl_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + choice <- array(-1, c(n_subj, t_max)) + outcome <- array( 0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + choice[i, 1:t] <- DT_subj$choice + outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + choice = choice, + outcome = outcome + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + +# From prl_fictitious_multipleB +prl_multipleB_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + b_subjs <- general_info$b_subjs + b_max <- general_info$b_max + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + choice <- array(-1, c(n_subj, b_max, t_max)) + outcome <- array( 0, c(n_subj, b_max, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + DT_subj <- raw_data[subjid == subj] + blocks_of_subj <- unique(DT_subj$block) + + for (b in 1:b_subjs[i]) { + curr_block <- blocks_of_subj[b] + DT_curr_block <- DT_subj[block == curr_block] + t <- t_subjs[i, b] + + choice[i, b, 1:t] <- DT_curr_block$choice + outcome[i, b, 1:t] <- sign(DT_curr_block$outcome) # use sign + } + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + B = b_max, + Bsubj = b_subjs, + T = t_max, + Tsubj = t_subjs, + choice = choice, + outcome = outcome + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### pst + +# From pst_gainloss_Q +pst_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + option1 <- array(-1, c(n_subj, t_max)) + option2 <- array(-1, c(n_subj, t_max)) + choice <- array(-1, c(n_subj, t_max)) + reward <- array(-1, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + option1[i, 1:t] <- DT_subj$type %/% 10 + option2[i, 1:t] <- DT_subj$type %% 10 + choice[i, 1:t] <- DT_subj$choice + reward[i, 1:t] <- DT_subj$reward + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + option1 = option1, + option2 = option2, + choice = choice, + reward = reward + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### ra + +# From ra_noLA +ra_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + gain <- array( 0, c(n_subj, t_max)) + loss <- array( 0, c(n_subj, t_max)) + cert <- array( 0, c(n_subj, t_max)) + gamble <- array(-1, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + gain[i, 1:t] <- DT_subj$gain + loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount + cert[i, 1:t] <- DT_subj$cert + gamble[i, 1:t] <- DT_subj$gamble + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + gain = gain, + loss = loss, + cert = cert, + gamble = gamble + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### rdt + +# From rdt_happiness +rdt_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + gain <- array( 0, c(n_subj, t_max)) + loss <- array( 0, c(n_subj, t_max)) + cert <- array( 0, c(n_subj, t_max)) + type <- array(-1, c(n_subj, t_max)) + gamble <- array(-1, c(n_subj, t_max)) + outcome <- array( 0, c(n_subj, t_max)) + happy <- array( 0, c(n_subj, t_max)) + RT_happy <- array( 0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + gain[i, 1:t] <- DT_subj$gain + loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount + cert[i, 1:t] <- DT_subj$cert + type[i, 1:t] <- DT_subj$type + gamble[i, 1:t] <- DT_subj$gamble + outcome[i, 1:t] <- DT_subj$outcome + happy[i, 1:t] <- DT_subj$happy + RT_happy[i, 1:t] <- DT_subj$rthappy + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + gain = gain, + loss = loss, + cert = cert, + type = type, + gamble = gamble, + outcome = outcome, + happy = happy, + RT_happy = RT_happy + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### ts + +# From ts_par4 +ts_preprocess_func <- function(raw_data, general_info, trans_prob = 0.7) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + level1_choice <- array(1, c(n_subj, t_max)) + level2_choice <- array(1, c(n_subj, t_max)) + reward <- array(0, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + level1_choice[i, 1:t] <- DT_subj$level1choice + level2_choice[i, 1:t] <- DT_subj$level2choice + reward[i, 1:t] <- DT_subj$reward + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + level1_choice = level1_choice, + level2_choice = level2_choice, + reward = reward, + trans_prob = trans_prob + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### ug + +# From ug_bayes +ug_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + # Initialize (model-specific) data arrays + offer <- array( 0, c(n_subj, t_max)) + accept <- array(-1, c(n_subj, t_max)) + + # Write from raw_data to the data arrays + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + + offer[i, 1:t] <- DT_subj$offer + accept[i, 1:t] <- DT_subj$accept + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + offer = offer, + accept = accept + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + +#### wcs + +# From wcs_sql +wcs_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs +# t_max <- general_info$t_max + t_max <- 128 + + # Read predefined answer sheet + answersheet <- system.file("extdata", "wcs_answersheet.txt", package = "hBayesDM") + answer <- read.table(answersheet, header = TRUE) + + # Initialize data arrays + choice <- array( 0, c(n_subj, 4, t_max)) + outcome <- array(-1, c(n_subj, t_max)) + choice_match_att <- array( 0, c(n_subj, t_max, 1, 3)) # Info about chosen deck (per each trial) + deck_match_rule <- array( 0, c(t_max, 3, 4)) # Info about all 4 decks (per each trial) + + # Write: choice, outcome, choice_match_att + for (i in 1:n_subj) { + subj <- subjs[i] + t <- t_subjs[i] + DT_subj <- raw_data[subjid == subj] + DT_subj_choice <- DT_subj$choice + DT_subj_outcome <- DT_subj$outcome + + for (tr in 1:t) { + ch <- DT_subj_choice[tr] + ou <- DT_subj_outcome[tr] + choice[i, ch, tr] <- 1 + outcome[i, tr] <- ou + choice_match_att[i, tr, 1, ] <- answer[, tr] == ch + } + } + + # Write: deck_match_rule + for (tr in 1:t_max) { + for (ru in 1:3) { + deck_match_rule[tr, ru, answer[ru, tr]] <- 1 + } + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + Tsubj = t_subjs, + choice = choice, + outcome = outcome, + choice_match_att = choice_match_att, + deck_match_rule = deck_match_rule + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} + + + + From c07eaa38f3054c9b5c08fa05333acfbe217be6b5 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 20 Aug 2019 16:36:08 +0900 Subject: [PATCH 039/163] Upload temporary template for R docstring --- commons/templates/R_DOCSTRING_TEMPLATE.txt | 166 +++++++++++++++++++++ 1 file changed, 166 insertions(+) create mode 100644 commons/templates/R_DOCSTRING_TEMPLATE.txt diff --git a/commons/templates/R_DOCSTRING_TEMPLATE.txt b/commons/templates/R_DOCSTRING_TEMPLATE.txt new file mode 100644 index 00000000..c5a49e28 --- /dev/null +++ b/commons/templates/R_DOCSTRING_TEMPLATE.txt @@ -0,0 +1,166 @@ +#' @title <%= TASK_NAME %> <%= get0("TASK_CITE") %> +#' +#' @description +#' <%= MODEL_TYPE %> Bayesian Modeling of the <%= TASK_NAME %> with the following parameters: +#' <%= PARAMETERS %>. +#' +#' <%= ifelse(exists("CONTRIBUTOR"), paste0("@description Contributor: ", CONTRIBUTOR), "") %> +#' +#' @description +#' \strong{MODEL:} <%= MODEL_NAME %> <%= get0("MODEL_CITE") %> +#' +#' @param data A .txt file containing the data to be modeled. Data columns should be labeled as: +#' <%= DATA_COLUMNS %>. See \bold{Details} below for more information. +#' @param niter Number of iterations, including warm-up. Defaults to 4000. +#' @param nwarmup Number of iterations used for warm-up only. Defaults to 1000. +#' @param nchain Number of Markov chains to run. Defaults to 4. +#' @param ncore Number of CPUs to be used for running. Defaults to 1. +#' @param nthin Every \code{i == nthin} sample will be used to generate the posterior distribution. +#' Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +#' high. +#' @param inits Character value specifying how the initial values should be generated. +#' Possible options are "vb" (default), "fixed", "random", or your own initial values. +#' @param indPars Character value specifying how to summarize individual parameters. Current options +#' are: "mean", "median", or "mode". +#' @param modelRegressor +#' <% EXISTS_REGRESSORS <- paste0("For this model they are: ", get0("REGRESSORS"), ".") %> +#' <% NOT_EXISTS_REGRESSORS <- "Currently not available for this model." %> +#' Export model-based regressors? TRUE or FALSE. +#' <%= ifelse(exists("REGRESSORS"), EXISTS_REGRESSORS, NOT_EXISTS_REGRESSORS) %> +#' @param vb Use variational inference to approximately draw from a posterior distribution. Defaults +#' to FALSE. +#' @param inc_postpred +#' <% POSTPREDS_NULL <- exists("IS_NULL_POSTPREDS") && (IS_NULL_POSTPREDS == "TRUE") %> +#' <%= ifelse(POSTPREDS_NULL, "\\strong{(Currently not available.)}", "") %> +#' Include trial-level posterior predictive simulations in model output (may greatly increase file +#' size). Defaults to FALSE. +#' @param adapt_delta Floating point value representing the target acceptance probability of a new +#' sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below. +#' @param stepsize Integer value specifying the size of each leapfrog step that the MCMC sampler can +#' take on each new iteration. See \bold{Details} below. +#' @param max_treedepth Integer value specifying how many leapfrog steps the MCMC sampler can take +#' on each new iteration. See \bold{Details} below. +#' @param ... +#' <% AA_EXP_1 <- "For this model, it's possible to set the following \\strong{model-specific " %> +#' <% AA_EXP_2 <- "argument} to a value that you may prefer. \\cr" %> +#' <%= ifelse(exists("ADDITIONAL_ARG"), paste0(AA_EXP_1, AA_EXP_2), "Not used for this model.") %> +#' <%= ifelse(exists("ADDITIONAL_ARG"), ADDITIONAL_ARG, "") %> +#' +#' @details +#' This section describes some of the function arguments in greater detail. +#' +#' \strong{data} should be assigned a character value specifying the full path and name (including +#' extension information, e.g. ".txt") of the file that contains the behavioral data-set of all +#' subjects of interest for the current analysis. The file should be a \strong{tab-delimited} text +#' file, whose rows represent trial-by-trial observations and columns represent variables.\cr +#' For the <%= TASK_NAME %>, there should be <%= LENGTH_DATA_COLUMNS %> columns of data with the +#' labels <%= DATA_COLUMNS %>. It is not necessary for the columns to be in this particular order, +#' however it is necessary that they be labeled correctly and contain the information below: +#' \describe{ +#' <%= DETAILS_DATA_1 %> +#' <%= get0("DETAILS_DATA_2") %> +#' <%= get0("DETAILS_DATA_3") %> +#' <%= get0("DETAILS_DATA_4") %> +#' <%= get0("DETAILS_DATA_5") %> +#' <%= get0("DETAILS_DATA_6") %> +#' <%= get0("DETAILS_DATA_7") %> +#' <%= get0("DETAILS_DATA_8") %> +#' <%= get0("DETAILS_DATA_9") %> +#' } +#' \strong{*}Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", +#' etc.), but only the data within the column names listed above will be used during the modeling. +#' As long as the necessary columns mentioned above are present and labeled correctly, there is no +#' need to remove other miscellaneous data columns. +#' +#' \strong{nwarmup} is a numerical value that specifies how many MCMC samples should not be stored +#' upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent +#' to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the +#' sampling chains begin) can have a heavy influence on the generated posterior distributions. The +#' \code{nwarmup} argument can be set to a high number in order to curb the effects that initial +#' values have on the resulting posteriors. +#' +#' \strong{nchain} is a numerical value that specifies how many chains (i.e. independent sampling +#' sequences) should be used to draw samples from the posterior distribution. Since the posteriors +#' are generated from a sampling process, it is good practice to run multiple chains to ensure +#' that a reasonably representative posterior is attained. When the sampling is complete, it is +#' possible to check the multiple chains for convergence by running the following line of code: +#' \code{plot(output, type = "trace")}. The trace-plot should resemble a "furry caterpillar". +#' +#' \strong{nthin} is a numerical value that specifies the "skipping" behavior of the MCMC sampler, +#' using only every \code{i == nthin} samples to generate posterior distributions. By default, +#' \code{nthin} is equal to 1, meaning that every sample is used to generate the posterior. +#' +#' \strong{Control Parameters:} \code{adapt_delta}, \code{stepsize}, and \code{max_treedepth} are +#' advanced options that give the user more control over Stan's MCMC sampler. It is recommended +#' that only advanced users change the default values, as alterations can profoundly change the +#' sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in +#' Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for +#' more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC +#' Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide +#' and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical +#' description of these arguments. +#' +#' @return A class "hBayesDM" object \code{modelData} with the following components: +#' \describe{ +#' \item{\code{model}}{Character value that is the name of the model ("<%= MODEL_FUNCTION %>").} +#' \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by +#' \code{indPars}) for each subject.} +#' \item{\code{parVals}}{List object containing the posterior samples over different parameters.} +#' \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan +#' model.} +#' \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by +#' the user.} +#' <% RETURN_REGRESSORS <- "\\item{\\code{modelRegressor}}{List object containing the " %> +#' <% RETURN_REGRESSORS <- paste0(RETURN_REGRESSORS, "extracted model-based regressors.}") %> +#' <%= ifelse(exists("REGRESSORS"), RETURN_REGRESSORS, "") %> +#' } +#' +#' @seealso +#' We refer users to our in-depth tutorial for an example of using hBayesDM: +#' \url{https://rpubs.com/CCSL/hBayesDM} +#' +#' @examples +#' \dontrun{ +#' # Run the model and store results in "output" +#' output <- <%= MODEL_FUNCTION %>("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +#' +#' # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') +#' plot(output, type = "trace") +#' +#' # Check Rhat values (all Rhat values should be less than or equal to 1.1) +#' rhat(output) +#' +#' # Plot the posterior distributions of the hyper-parameters (distributions should be unimodal) +#' plot(output) +#' +#' # Show the WAIC and LOOIC model fit estimates +#' printFit(output) +#' } + +#' @templateVar MODEL_FUNCTION {model_function} +#' @templateVar TASK_NAME {task_name} +#' @templateVar MODEL_NAME {model_name} +#' @templateVar MODEL_CITE {task_cite_short} +#' @templateVar MODEL_TYPE {model_type} +#' @templateVar DATA_COLUMNS {data_columns} +#' @templateVar PARAMETERS {parameters} +#' @templateVar LENGTH_DATA_COLUMNS {data_columns_len} +{data_columns_details} +#' +#' @template model-documentation +#' +#' @export +#' @include hBayesDM_model.R +#' @include preprocess_funcs.R +#' +#' @references +{model_cite_long} +{task_cite_long} +#' +#' @examples +#' +#' \dontrun{ +#' # Paths to data published in Sokol-Hessner et al. (2009) +#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") +#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") +#' } From d80d5ba2d6653ce2ce4d8d12c9dfb6055bb14cb1 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 20 Aug 2019 16:36:21 +0900 Subject: [PATCH 040/163] Remove unnecessary comments --- R/R/preprocess_funcs.R | 110 +---------------------------------------- 1 file changed, 1 insertion(+), 109 deletions(-) diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index ba77ef33..ccd8a8f1 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -1,6 +1,5 @@ -#### bandit2arm +#' -# From bandit2arm_delta bandit2arm_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -37,12 +36,6 @@ bandit2arm_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### bandit4arm2 - -# From bandit4arm2_kalman_filter bandit4arm2_preprocess_func <- function(raw_data, general_info) { subjs <- general_info$subjs n_subj <- general_info$n_subj @@ -72,12 +65,6 @@ bandit4arm2_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### bandit4arm - -# From bandit4arm_2par_lapse bandit4arm_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -117,12 +104,6 @@ bandit4arm_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### bart - -# From bart_par4 bart_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -160,12 +141,6 @@ bart_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### choiceRT - -# From choiceRT_ddm choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) { # Use raw_data as a data.frame raw_data <- as.data.frame(raw_data) @@ -209,7 +184,6 @@ choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) { return(data_list) } -# From choiceRT_ddm_single choiceRT_single_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) { # Currently class(raw_data) == "data.table" @@ -231,12 +205,6 @@ choiceRT_single_preprocess_func <- function(raw_data, general_info, RTbound = 0. return(data_list) } - - - -#### cra - -# From cra_exp cra_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -282,12 +250,6 @@ cra_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### dbdm - -# From dbdm_prob_weight dbdm_preprocess_func <- function(raw_data, general_info) { subjs <- general_info$subjs n_subj <- general_info$n_subj @@ -332,12 +294,6 @@ dbdm_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### dd - -# From dd_cs dd_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -383,7 +339,6 @@ dd_preprocess_func <- function(raw_data, general_info) { return(data_list) } -# From dd_cs_single dd_single_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -411,12 +366,6 @@ dd_single_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### gng - -# From gng_m1 gng_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -456,12 +405,6 @@ gng_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### igt - -# From igt_orl igt_preprocess_func <- function(raw_data, general_info, payscale = 100) { # Currently class(raw_data) == "data.table" @@ -499,11 +442,6 @@ igt_preprocess_func <- function(raw_data, general_info, payscale = 100) { return(data_list) } - - -#### peer - -# From peer_ocu peer_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -555,12 +493,6 @@ peer_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### prl - -# From prl_ewa prl_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -597,7 +529,6 @@ prl_preprocess_func <- function(raw_data, general_info) { return(data_list) } -# From prl_fictitious_multipleB prl_multipleB_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -644,12 +575,6 @@ prl_multipleB_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### pst - -# From pst_gainloss_Q pst_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -692,12 +617,6 @@ pst_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### ra - -# From ra_noLA ra_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -740,12 +659,6 @@ ra_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### rdt - -# From rdt_happiness rdt_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -800,12 +713,6 @@ rdt_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### ts - -# From ts_par4 ts_preprocess_func <- function(raw_data, general_info, trans_prob = 0.7) { # Currently class(raw_data) == "data.table" @@ -846,12 +753,6 @@ ts_preprocess_func <- function(raw_data, general_info, trans_prob = 0.7) { return(data_list) } - - - -#### ug - -# From ug_bayes ug_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -888,12 +789,6 @@ ug_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - -#### wcs - -# From wcs_sql wcs_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" @@ -953,6 +848,3 @@ wcs_preprocess_func <- function(raw_data, general_info) { return(data_list) } - - - From c21b0852961c50d5f07fb5b3caa8aec503282d5b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 20 Aug 2019 21:31:45 +0900 Subject: [PATCH 041/163] Write codes to generate R codes from JSON files --- R/man-roxygen/model-documentation.R | 38 ++- commons/.gitignore | 2 + ...ritePython.py => generate-python-codes.py} | 0 commons/generate-r-codes.py | 296 ++++++++++++++++++ ...RING_TEMPLATE.txt => PY_DOCS_TEMPLATE.txt} | 0 commons/templates/R_CODE_TEMPLATE.txt | 10 + commons/templates/R_DOCSTRING_TEMPLATE.txt | 166 ---------- commons/templates/R_DOCS_TEMPLATE.txt | 25 ++ 8 files changed, 361 insertions(+), 176 deletions(-) create mode 100644 commons/.gitignore rename commons/{WritePython.py => generate-python-codes.py} (100%) mode change 100755 => 100644 create mode 100644 commons/generate-r-codes.py rename commons/templates/{PY_DOCSTRING_TEMPLATE.txt => PY_DOCS_TEMPLATE.txt} (100%) create mode 100644 commons/templates/R_CODE_TEMPLATE.txt delete mode 100644 commons/templates/R_DOCSTRING_TEMPLATE.txt create mode 100644 commons/templates/R_DOCS_TEMPLATE.txt diff --git a/R/man-roxygen/model-documentation.R b/R/man-roxygen/model-documentation.R index 60e2c042..cbc6f134 100644 --- a/R/man-roxygen/model-documentation.R +++ b/R/man-roxygen/model-documentation.R @@ -1,14 +1,16 @@ -#' @title <%= TASK_NAME %> <%= get0("TASK_CITE") %> +#' @title <%= TASK_NAME %> - <%= MODEL_NAME %> #' #' @description -#' <%= MODEL_TYPE %> Bayesian Modeling of the <%= TASK_NAME %> with the following parameters: +#' <%= MODEL_TYPE %> Bayesian Modeling of the <%= TASK_NAME %> <%= get0("TASK_CITE") %> +#' with the following parameters: #' <%= PARAMETERS %>. #' -#' <%= ifelse(exists("CONTRIBUTOR"), paste0("@description Contributor: ", CONTRIBUTOR), "") %> -#' #' @description +#' \strong{TASK:} <%= TASK_NAME %> <%= get0("TASK_CITE") %> #' \strong{MODEL:} <%= MODEL_NAME %> <%= get0("MODEL_CITE") %> #' +#' <%= ifelse(exists("CONTRIBUTOR"), paste0("@description Contributor: ", CONTRIBUTOR), "") %> +#' #' @param data A .txt file containing the data to be modeled. Data columns should be labeled as: #' <%= DATA_COLUMNS %>. See \bold{Details} below for more information. #' @param niter Number of iterations, including warm-up. Defaults to 4000. @@ -30,10 +32,11 @@ #' @param vb Use variational inference to approximately draw from a posterior distribution. Defaults #' to FALSE. #' @param inc_postpred -#' <% POSTPREDS_NULL <- exists("IS_NULL_POSTPREDS") && (IS_NULL_POSTPREDS == "TRUE") %> -#' <%= ifelse(POSTPREDS_NULL, "\\strong{(Currently not available.)}", "") %> +#' <% HAS_POSTPREDS <- exists("POSTPREDS") && (POSTPREDS != "TRUE") %> +#' <%= ifelse(HAS_POSTPREDS, "", "\\strong{(Currently not available.)}") %> #' Include trial-level posterior predictive simulations in model output (may greatly increase file #' size). Defaults to FALSE. +#' <%= ifelse(HAS_POSTPREDS, paste0("If set to TRUE, it includes: ", POSTPREDS), "") %> #' @param adapt_delta Floating point value representing the target acceptance probability of a new #' sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below. #' @param stepsize Integer value specifying the size of each leapfrog step that the MCMC sampler can @@ -41,10 +44,7 @@ #' @param max_treedepth Integer value specifying how many leapfrog steps the MCMC sampler can take #' on each new iteration. See \bold{Details} below. #' @param ... -#' <% AA_EXP_1 <- "For this model, it's possible to set the following \\strong{model-specific " %> -#' <% AA_EXP_2 <- "argument} to a value that you may prefer. \\cr" %> -#' <%= ifelse(exists("ADDITIONAL_ARG"), paste0(AA_EXP_1, AA_EXP_2), "Not used for this model.") %> -#' <%= ifelse(exists("ADDITIONAL_ARG"), ADDITIONAL_ARG, "") %> +#' Additional arguments. See \bold{Details} below. #' #' @details #' This section describes some of the function arguments in greater detail. @@ -100,6 +100,24 @@ #' and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical #' description of these arguments. #' +#' \strong{...}: +#' <% AA_T1 <- "For this model, it's possible to set \\strong{model-specific argument(s)} " %> +#' <% AA_T2 <- "as follows: " %> +#' <% AA_T <- paste0(AA_T1, AA_T2) %> +#' <% AA_F <- "For this model, there is no model-specific argument." %> +#' <%= ifelse(as.integer(ADDITIONAL_ARGS_LEN) > 0, AA_T, AA_F) %> +#' \describe{ +#' <%= get0("ADDITIONAL_ARGS_1") %> +#' <%= get0("ADDITIONAL_ARGS_2") %> +#' <%= get0("ADDITIONAL_ARGS_3") %> +#' <%= get0("ADDITIONAL_ARGS_4") %> +#' <%= get0("ADDITIONAL_ARGS_5") %> +#' <%= get0("ADDITIONAL_ARGS_6") %> +#' <%= get0("ADDITIONAL_ARGS_7") %> +#' <%= get0("ADDITIONAL_ARGS_8") %> +#' <%= get0("ADDITIONAL_ARGS_9") %> +#' } +#' #' @return A class "hBayesDM" object \code{modelData} with the following components: #' \describe{ #' \item{\code{model}}{Character value that is the name of the model ("<%= MODEL_FUNCTION %>").} diff --git a/commons/.gitignore b/commons/.gitignore new file mode 100644 index 00000000..958aad28 --- /dev/null +++ b/commons/.gitignore @@ -0,0 +1,2 @@ +R/ +Python/ diff --git a/commons/WritePython.py b/commons/generate-python-codes.py old mode 100755 new mode 100644 similarity index 100% rename from commons/WritePython.py rename to commons/generate-python-codes.py diff --git a/commons/generate-r-codes.py b/commons/generate-r-codes.py new file mode 100644 index 00000000..53cf8dbb --- /dev/null +++ b/commons/generate-r-codes.py @@ -0,0 +1,296 @@ +#!/usr/bin/env python3 +""" +Generate R codes for hBayesDM using model information defined in a JSON file. +""" +import sys +import argparse +import json +import re +from pathlib import Path +from typing import List, Iterable, Callable +from collections import OrderedDict + +PATH_ROOT = Path(__file__).absolute().parent +PATH_MODELS = PATH_ROOT / 'models' +PATH_TEMPLATE = PATH_ROOT / 'templates' +PATH_OUTPUT = PATH_ROOT / 'R' + +TEMPLATE_DOCS = PATH_TEMPLATE / 'R_DOCS_TEMPLATE.txt' +TEMPLATE_CODE = PATH_TEMPLATE / 'R_CODE_TEMPLATE.txt' + + +def parse_cite_string(cite): + """Parse given APA citation string into a dict object""" + if cite == '': + return None + + regex_authoryear = r'(?P^.+?)\s\((?P\d+?)\)' + regex_author = r'(?=\s\&)?\s?(?P[^,&]+?,\s[^,&]+?)(?=,|\n|\r|$)' + + m_ay = re.search(regex_authoryear, cite) + year = m_ay.group('year') + + authors = [] + for m in re.finditer(regex_author, m_ay.group('authors')): + authors.append(m.group('author')) + + firstauthor = authors[0].split(',')[0] + shortcite = '{}{}'.format(firstauthor, year) + if len(authors) == 1: + barecite = '{}, {}'.format(firstauthor, year) + textcite = '{} ({})'.format(firstauthor, year) + else: + barecite = '{} et al., {}'.format(firstauthor, year) + textcite = '{} et al. ({})'.format(firstauthor, year) + parencite = '({})'.format(barecite) + + return { + 'authors': authors, + 'year': year, + 'shortcite': shortcite, + 'barecite': barecite, + 'textcite': textcite, + 'parencite': parencite, + 'fullcite': cite + } + + +def format_parencite(cites): + if len(cites) == 0: + return '' + return '(' + '; '.join([c['barecite'] for c in cites if c]) + ')' + + +def format_fullcite(cites, sep='\n#\' '): + if len(cites) == 0: + return '' + return sep.join([c['fullcite'] for c in cites if c]) + + +def generate_docstring(info): + # Model full name (Snake-case) + model_function = [info['task_name']['code'], info['model_name']['code']] + if info['model_type']['code'] != '': + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) + + # Citations + task_cite = [parse_cite_string(c) for c in info['task_name']['cite']] + model_cite = [parse_cite_string(c) for c in info['model_name']['cite']] + + task_parencite = format_parencite(task_cite) + model_parencite = format_parencite(model_cite) + + references = format_fullcite(task_cite + model_cite, sep='\n#\'\n#\' ') + + # Notes + if len(info.get('notes', [])) > 0: + notes = '@description\n#\' \\strong{Notes:}\n#\' ' + \ + '\n#\' '.join(info['notes']) + notes = '\n#\' ' + notes + '\n#\'' + else: + notes = '' + + # Contributors + contributors = ', '.join([ + r'\href{%s}{%s} <%s>' % (c['link'], c['name'], c['email']) + for c in info.get('contributors', []) + ]) + + # Data columns + data_columns = ', '.join([ + r'"%s"' % k for k in info.get('data_columns', {}).keys() + ]) + data_columns_len = len(info['data_columns']) + data_columns_details = '\n#\' '.join([ + r'@templateVar DETAILS_DATA_%d \item{"%s"}{%s}' + % (i + 1, k, v.replace('\n', '\\cr')) + for i, (k, v) in enumerate(info['data_columns'].items()) + ]) + + # Parameters + parameters = ', '.join([ + '"%s" (%s)' % (k, v['desc']) + for k, v in info['parameters'].items() + ]) + + # Regressors + regressors = ', '.join([ + '"%s"' % k for k in info.get('regressors', {}).keys() + ]) + + # Postpreds + postpreds = ', '.join([ + '"%s"' % v for v in info.get('postpreds', []) + ]) + + # Additional arguments + additional_args = info.get('additional_args', {}) + additional_args_len = len(additional_args) + if additional_args_len > 0: + additional_args_details = '\n#\' '.join([ + r'@templateVar ADDITIONAL_ARGS_%d \strong{%s}: %s' + % (i + 1, v['code'], v['desc']) + for i, v in enumerate(additional_args) + ]) + additional_args_details += '\n#\'' + else: + additional_args_details = '' + + # Read template for docstring + with open(TEMPLATE_DOCS, 'r') as f: + docs_template = f.read() + + docs = docs_template % dict( + model_function=model_function, + task_name=info['task_name']['desc'], + task_parencite=task_parencite, + model_name=info['model_name']['desc'], + model_parencite=model_parencite, + model_type=info['model_type']['desc'], + notes=notes, + contributor=contributors, + data_columns=data_columns, + data_columns_len=data_columns_len, + data_columns_details=data_columns_details, + parameters=parameters, + regressors=regressors, + postpreds=postpreds, + additional_args_len=additional_args_len, + additional_args_details=additional_args_details, + references=references, + ) + + return docs + + +def generate_code(info): + # Model full name (Snake-case) + model_function = [info['task_name']['code'], info['model_name']['code']] + if info['model_type']['code'] != '': + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) + + # Prefix to preprocess_func + prefix_preprocess_func = info['task_name']['code'] + if info['model_type']['code']: + prefix_preprocess_func += '_' + info['model_type']['code'] + preprocess_func = prefix_preprocess_func + '_preprocess_func' + + # Data columns + data_columns = ', '.join([ + r'"%s"' % k for k in info.get('data_columns', {}).keys() + ]) + + # Parameters + _params = info.get('parameters', {}) + if len(_params) > 0: + parameters = ',\n '.join([ + '"{}" = c({}, {}, {})' + .format(k, + v['info'][0] if v['info'][0] else 'NULL', + v['info'][1] if v['info'][1] else 'NULL', + v['info'][2] if v['info'][2] else 'NULL') + for k, v in _params.items() + ]) + parameters = 'list(\n ' + parameters + '\n )' + else: + parameters = 'NULL' + + # Regressors + _regs = info.get('regressors', {}) + if len(_regs) > 0: + regressors = ',\n '.join([ + '"{}" = {}'.format(k, v) for k, v in _regs.items() + ]) + regressors = 'list(\n ' + regressors + '\n )' + else: + regressors = 'NULL' + + # Postpreds + _postpreds = info.get('postpreds', []) + if len(_postpreds) > 0: + postpreds = ', '.join(['"%s"' % v for v in _postpreds]) + postpreds = 'c(' + postpreds + ')' + else: + postpreds = 'NULL' + + # Read template for model codes + with open(TEMPLATE_CODE, 'r') as f: + code_template = f.read() + + code = code_template % dict( + model_function=model_function, + task_name=info['task_name']['code'], + model_name=info['model_name']['code'], + model_type=info['model_type']['code'], + data_columns=data_columns, + parameters=parameters, + regressors=regressors, + postpreds=postpreds, + preprocess_func=preprocess_func, + ) + + return code + + +def main(json_fn, verbose): + with Path(json_fn) as p: + # Check if file exists + if not p.exists(): + print('FileNotFound: Please specify existing json_file as argument.') + sys.exit(1) + + # Load json_file + with open(p, 'r') as f: + info = model_info = json.load(f, object_pairs_hook=OrderedDict) + + docs = generate_docstring(info) + code = generate_code(info) + output = docs + code + + if verbose: + # Print code string to stdout + print(output) + else: + # Model full name (Snake-case) + model_function = [info['task_name'] + ['code'], info['model_name']['code']] + if info['model_type']['code'] != '': + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) + + if not PATH_OUTPUT.exists(): + PATH_OUTPUT.mkdir(exist_ok=True) + + # Write model python code + code_fn = PATH_OUTPUT / (model_function + '.R') + with open(code_fn, 'w') as f: + f.write(output) + print('Created file:', code_fn.name) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument( + '-a', '--all', + help='write for all json files in directory', + action='store_true') + parser.add_argument( + '-v', '--verbose', + help='print output to stdout instead of writing to file', + action='store_true') + parser.add_argument( + 'json_file', + help='JSON file of the model to generate corresponding python code', + type=str, nargs='*') + + args = parser.parse_args() + + if args.all: + # `all` flag overrides `json_file` & `verbose` + all_json_files = PATH_MODELS.glob('*.json') + for json_fn in all_json_files: + main(json_fn, False) + else: + main(args.json_file, args.verbose) diff --git a/commons/templates/PY_DOCSTRING_TEMPLATE.txt b/commons/templates/PY_DOCS_TEMPLATE.txt similarity index 100% rename from commons/templates/PY_DOCSTRING_TEMPLATE.txt rename to commons/templates/PY_DOCS_TEMPLATE.txt diff --git a/commons/templates/R_CODE_TEMPLATE.txt b/commons/templates/R_CODE_TEMPLATE.txt new file mode 100644 index 00000000..0de91d7a --- /dev/null +++ b/commons/templates/R_CODE_TEMPLATE.txt @@ -0,0 +1,10 @@ +%(model_function)s <- hBayesDM_model( + task_name = "%(task_name)s", + model_name = "%(model_name)s", + model_type = "%(model_type)s", + data_columns = c(%(data_columns)s), + parameters = %(parameters)s, + regressors = %(regressors)s, + postpreds = %(postpreds)s, + preprocess_func = %(preprocess_func)s) + diff --git a/commons/templates/R_DOCSTRING_TEMPLATE.txt b/commons/templates/R_DOCSTRING_TEMPLATE.txt deleted file mode 100644 index c5a49e28..00000000 --- a/commons/templates/R_DOCSTRING_TEMPLATE.txt +++ /dev/null @@ -1,166 +0,0 @@ -#' @title <%= TASK_NAME %> <%= get0("TASK_CITE") %> -#' -#' @description -#' <%= MODEL_TYPE %> Bayesian Modeling of the <%= TASK_NAME %> with the following parameters: -#' <%= PARAMETERS %>. -#' -#' <%= ifelse(exists("CONTRIBUTOR"), paste0("@description Contributor: ", CONTRIBUTOR), "") %> -#' -#' @description -#' \strong{MODEL:} <%= MODEL_NAME %> <%= get0("MODEL_CITE") %> -#' -#' @param data A .txt file containing the data to be modeled. Data columns should be labeled as: -#' <%= DATA_COLUMNS %>. See \bold{Details} below for more information. -#' @param niter Number of iterations, including warm-up. Defaults to 4000. -#' @param nwarmup Number of iterations used for warm-up only. Defaults to 1000. -#' @param nchain Number of Markov chains to run. Defaults to 4. -#' @param ncore Number of CPUs to be used for running. Defaults to 1. -#' @param nthin Every \code{i == nthin} sample will be used to generate the posterior distribution. -#' Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -#' high. -#' @param inits Character value specifying how the initial values should be generated. -#' Possible options are "vb" (default), "fixed", "random", or your own initial values. -#' @param indPars Character value specifying how to summarize individual parameters. Current options -#' are: "mean", "median", or "mode". -#' @param modelRegressor -#' <% EXISTS_REGRESSORS <- paste0("For this model they are: ", get0("REGRESSORS"), ".") %> -#' <% NOT_EXISTS_REGRESSORS <- "Currently not available for this model." %> -#' Export model-based regressors? TRUE or FALSE. -#' <%= ifelse(exists("REGRESSORS"), EXISTS_REGRESSORS, NOT_EXISTS_REGRESSORS) %> -#' @param vb Use variational inference to approximately draw from a posterior distribution. Defaults -#' to FALSE. -#' @param inc_postpred -#' <% POSTPREDS_NULL <- exists("IS_NULL_POSTPREDS") && (IS_NULL_POSTPREDS == "TRUE") %> -#' <%= ifelse(POSTPREDS_NULL, "\\strong{(Currently not available.)}", "") %> -#' Include trial-level posterior predictive simulations in model output (may greatly increase file -#' size). Defaults to FALSE. -#' @param adapt_delta Floating point value representing the target acceptance probability of a new -#' sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below. -#' @param stepsize Integer value specifying the size of each leapfrog step that the MCMC sampler can -#' take on each new iteration. See \bold{Details} below. -#' @param max_treedepth Integer value specifying how many leapfrog steps the MCMC sampler can take -#' on each new iteration. See \bold{Details} below. -#' @param ... -#' <% AA_EXP_1 <- "For this model, it's possible to set the following \\strong{model-specific " %> -#' <% AA_EXP_2 <- "argument} to a value that you may prefer. \\cr" %> -#' <%= ifelse(exists("ADDITIONAL_ARG"), paste0(AA_EXP_1, AA_EXP_2), "Not used for this model.") %> -#' <%= ifelse(exists("ADDITIONAL_ARG"), ADDITIONAL_ARG, "") %> -#' -#' @details -#' This section describes some of the function arguments in greater detail. -#' -#' \strong{data} should be assigned a character value specifying the full path and name (including -#' extension information, e.g. ".txt") of the file that contains the behavioral data-set of all -#' subjects of interest for the current analysis. The file should be a \strong{tab-delimited} text -#' file, whose rows represent trial-by-trial observations and columns represent variables.\cr -#' For the <%= TASK_NAME %>, there should be <%= LENGTH_DATA_COLUMNS %> columns of data with the -#' labels <%= DATA_COLUMNS %>. It is not necessary for the columns to be in this particular order, -#' however it is necessary that they be labeled correctly and contain the information below: -#' \describe{ -#' <%= DETAILS_DATA_1 %> -#' <%= get0("DETAILS_DATA_2") %> -#' <%= get0("DETAILS_DATA_3") %> -#' <%= get0("DETAILS_DATA_4") %> -#' <%= get0("DETAILS_DATA_5") %> -#' <%= get0("DETAILS_DATA_6") %> -#' <%= get0("DETAILS_DATA_7") %> -#' <%= get0("DETAILS_DATA_8") %> -#' <%= get0("DETAILS_DATA_9") %> -#' } -#' \strong{*}Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", -#' etc.), but only the data within the column names listed above will be used during the modeling. -#' As long as the necessary columns mentioned above are present and labeled correctly, there is no -#' need to remove other miscellaneous data columns. -#' -#' \strong{nwarmup} is a numerical value that specifies how many MCMC samples should not be stored -#' upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent -#' to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the -#' sampling chains begin) can have a heavy influence on the generated posterior distributions. The -#' \code{nwarmup} argument can be set to a high number in order to curb the effects that initial -#' values have on the resulting posteriors. -#' -#' \strong{nchain} is a numerical value that specifies how many chains (i.e. independent sampling -#' sequences) should be used to draw samples from the posterior distribution. Since the posteriors -#' are generated from a sampling process, it is good practice to run multiple chains to ensure -#' that a reasonably representative posterior is attained. When the sampling is complete, it is -#' possible to check the multiple chains for convergence by running the following line of code: -#' \code{plot(output, type = "trace")}. The trace-plot should resemble a "furry caterpillar". -#' -#' \strong{nthin} is a numerical value that specifies the "skipping" behavior of the MCMC sampler, -#' using only every \code{i == nthin} samples to generate posterior distributions. By default, -#' \code{nthin} is equal to 1, meaning that every sample is used to generate the posterior. -#' -#' \strong{Control Parameters:} \code{adapt_delta}, \code{stepsize}, and \code{max_treedepth} are -#' advanced options that give the user more control over Stan's MCMC sampler. It is recommended -#' that only advanced users change the default values, as alterations can profoundly change the -#' sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in -#' Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for -#' more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC -#' Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide -#' and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical -#' description of these arguments. -#' -#' @return A class "hBayesDM" object \code{modelData} with the following components: -#' \describe{ -#' \item{\code{model}}{Character value that is the name of the model ("<%= MODEL_FUNCTION %>").} -#' \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by -#' \code{indPars}) for each subject.} -#' \item{\code{parVals}}{List object containing the posterior samples over different parameters.} -#' \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan -#' model.} -#' \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by -#' the user.} -#' <% RETURN_REGRESSORS <- "\\item{\\code{modelRegressor}}{List object containing the " %> -#' <% RETURN_REGRESSORS <- paste0(RETURN_REGRESSORS, "extracted model-based regressors.}") %> -#' <%= ifelse(exists("REGRESSORS"), RETURN_REGRESSORS, "") %> -#' } -#' -#' @seealso -#' We refer users to our in-depth tutorial for an example of using hBayesDM: -#' \url{https://rpubs.com/CCSL/hBayesDM} -#' -#' @examples -#' \dontrun{ -#' # Run the model and store results in "output" -#' output <- <%= MODEL_FUNCTION %>("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) -#' -#' # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') -#' plot(output, type = "trace") -#' -#' # Check Rhat values (all Rhat values should be less than or equal to 1.1) -#' rhat(output) -#' -#' # Plot the posterior distributions of the hyper-parameters (distributions should be unimodal) -#' plot(output) -#' -#' # Show the WAIC and LOOIC model fit estimates -#' printFit(output) -#' } - -#' @templateVar MODEL_FUNCTION {model_function} -#' @templateVar TASK_NAME {task_name} -#' @templateVar MODEL_NAME {model_name} -#' @templateVar MODEL_CITE {task_cite_short} -#' @templateVar MODEL_TYPE {model_type} -#' @templateVar DATA_COLUMNS {data_columns} -#' @templateVar PARAMETERS {parameters} -#' @templateVar LENGTH_DATA_COLUMNS {data_columns_len} -{data_columns_details} -#' -#' @template model-documentation -#' -#' @export -#' @include hBayesDM_model.R -#' @include preprocess_funcs.R -#' -#' @references -{model_cite_long} -{task_cite_long} -#' -#' @examples -#' -#' \dontrun{ -#' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -#' } diff --git a/commons/templates/R_DOCS_TEMPLATE.txt b/commons/templates/R_DOCS_TEMPLATE.txt new file mode 100644 index 00000000..078430bb --- /dev/null +++ b/commons/templates/R_DOCS_TEMPLATE.txt @@ -0,0 +1,25 @@ +#' @templateVar MODEL_FUNCTION %(model_function)s +#' @templateVar CONTRIBUTOR %(contributor)s +#' @templateVar TASK_NAME %(task_name)s +#' @templateVar TASK_CITE %(task_parencite)s +#' @templateVar MODEL_NAME %(model_name)s +#' @templateVar MODEL_CITE %(model_parencite)s +#' @templateVar MODEL_TYPE %(model_type)s +#' @templateVar DATA_COLUMNS %(data_columns)s +#' @templateVar PARAMETERS %(parameters)s +#' @templateVar REGRESSORS %(regressors)s +#' @templateVar POSTPREDS %(postpreds)s +#' @templateVar LENGTH_DATA_COLUMNS %(data_columns_len)s +#' %(data_columns_details)s +#' @templateVar LENGTH_ADDITIONAL_ARGS %(additional_args_len)s +#' %(additional_args_details)s +#' @template model-documentation +#' +#' @export +#' @include hBayesDM_model.R +#' @include preprocess_funcs.R +#' %(notes)s +#' @references +#' %(references)s +#' + From d5bc1098ed5619bddcb7b9408c7fdb2bab02320a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Tue, 20 Aug 2019 21:52:26 +0900 Subject: [PATCH 042/163] Replace R codes with generated ones --- R/R/bandit2arm_delta.R | 65 +++++++----------------- R/R/bandit4arm2_kalman_filter.R | 64 +++++++++-------------- R/R/bandit4arm_2par_lapse.R | 67 ++++++++---------------- R/R/bandit4arm_4par.R | 71 ++++++++------------------ R/R/bandit4arm_lapse.R | 71 ++++++++------------------ R/R/bandit4arm_lapse_decay.R | 72 +++++++++----------------- R/R/bandit4arm_singleA_lapse.R | 68 ++++++++----------------- R/R/bart_par4.R | 68 ++++++++----------------- R/R/choiceRT_ddm.R | 85 +++++++++---------------------- R/R/choiceRT_ddm_single.R | 61 +++++++++------------- R/R/cra_exp.R | 83 +++++++++--------------------- R/R/cra_linear.R | 83 +++++++++--------------------- R/R/dbdm_prob_weight.R | 80 ++++++++--------------------- R/R/dd_cs.R | 73 ++++++++------------------ R/R/dd_cs_single.R | 54 +++++++------------- R/R/dd_exp.R | 71 +++++++------------------- R/R/dd_hyperbolic.R | 66 +++++++----------------- R/R/dd_hyperbolic_single.R | 47 ++++++----------- R/R/gng_m1.R | 76 +++++++++------------------- R/R/gng_m2.R | 78 +++++++++------------------- R/R/gng_m3.R | 82 ++++++++++-------------------- R/R/gng_m4.R | 84 ++++++++++-------------------- R/R/igt_orl.R | 73 +++++++++----------------- R/R/igt_pvl_decay.R | 70 +++++++++---------------- R/R/igt_pvl_delta.R | 68 ++++++++----------------- R/R/igt_vpp.R | 77 ++++++++++------------------ R/R/peer_ocu.R | 79 +++++++---------------------- R/R/preprocess_funcs.R | 2 - R/R/prl_ewa.R | 74 +++++++++------------------ R/R/prl_fictitious.R | 76 +++++++++------------------- R/R/prl_fictitious_multipleB.R | 85 +++++++++---------------------- R/R/prl_fictitious_rp.R | 83 ++++++++++-------------------- R/R/prl_fictitious_rp_woa.R | 81 ++++++++++------------------- R/R/prl_fictitious_woa.R | 75 +++++++++------------------ R/R/prl_rp.R | 72 +++++++++----------------- R/R/prl_rp_multipleB.R | 81 ++++++++--------------------- R/R/pst_gainloss_Q.R | 74 ++++++++------------------- R/R/ra_noLA.R | 79 ++++++++--------------------- R/R/ra_noRA.R | 79 ++++++++--------------------- R/R/ra_prospect.R | 80 ++++++++--------------------- R/R/rdt_happiness.R | 90 +++++++++------------------------ R/R/ts_par4.R | 78 +++++++++------------------- R/R/ts_par6.R | 79 +++++++++-------------------- R/R/ts_par7.R | 81 ++++++++++------------------- R/R/ug_bayes.R | 65 ++++++++---------------- R/R/ug_delta.R | 66 ++++++++---------------- R/R/wcs_sql.R | 90 ++++++++------------------------- 47 files changed, 1035 insertions(+), 2391 deletions(-) diff --git a/R/R/bandit2arm_delta.R b/R/R/bandit2arm_delta.R index d5343bcd..838e31fd 100644 --- a/R/R/bandit2arm_delta.R +++ b/R/R/bandit2arm_delta.R @@ -1,69 +1,42 @@ #' @templateVar MODEL_FUNCTION bandit2arm_delta +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 2-Armed Bandit Task #' @templateVar TASK_CITE (Erev et al., 2010; Hertwig et al., 2004) #' @templateVar MODEL_NAME Rescorla-Wagner (Delta) Model +#' @templateVar MODEL_CITE #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "A" (learning rate), "tau" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice -#' prediction competition: Choices from experience and from description. Journal of Behavioral -#' Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683 +#' Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683 +#' +#' Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x #' -#' Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the -#' Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. -#' http://doi.org/10.1111/j.0956-7976.2004.00715.x bandit2arm_delta <- hBayesDM_model( task_name = "bandit2arm", model_name = "delta", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("A" = c(0, 0.5, 1), - "tau" = c(0, 1, 5)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- DT_subj$outcome - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "A" = c(NULL, 0.5, 1), + "tau" = c(NULL, 1, 5) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit2arm_preprocess_func) diff --git a/R/R/bandit4arm2_kalman_filter.R b/R/R/bandit4arm2_kalman_filter.R index 83b92f57..934df7aa 100644 --- a/R/R/bandit4arm2_kalman_filter.R +++ b/R/R/bandit4arm2_kalman_filter.R @@ -1,62 +1,44 @@ #' @templateVar MODEL_FUNCTION bandit4arm2_kalman_filter -#' @templateVar CONTRIBUTOR \href{https://zohyos7.github.io}{Yoonseo Zoh}, \href{https://lei-zhang.net/}{Lei Zhang} +#' @templateVar CONTRIBUTOR \href{https://zohyos7.github.io}{Yoonseo Zoh} #' @templateVar TASK_NAME 4-Armed Bandit Task (modified) +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Kalman Filter -#' @templateVar MODEL_CITE (Daw et al., 2006, Nature) +#' @templateVar MODEL_CITE (Daw et al., 2006) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates -#' for exploratory decisions in humans. Nature, 441(7095), 876-879. +#' Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879. +#' bandit4arm2_kalman_filter <- hBayesDM_model( task_name = "bandit4arm2", model_name = "kalman_filter", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("lambda" = c(0, 0.9, 1), - "theta" = c(0, 50, 100), - "beta" = c(0, 0.1, 1), - "mu0" = c(0, 85, 100), - "sigma0" = c(0, 6, 15), - "sigmaD" = c(0, 3, 15)), - preprocess_func = function(raw_data, general_info) { - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- DT_subj$outcome - } - - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - return(data_list) - } -) + parameters = list( + "lambda" = c(NULL, 0.9, 1), + "theta" = c(NULL, 50, 100), + "beta" = c(NULL, 0.1, 1), + "mu0" = c(NULL, 85, 100), + "sigma0" = c(NULL, 6, 15), + "sigmaD" = c(NULL, 3, 15) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit4arm2_preprocess_func) diff --git a/R/R/bandit4arm_2par_lapse.R b/R/R/bandit4arm_2par_lapse.R index 19902a56..f5440d94 100644 --- a/R/R/bandit4arm_2par_lapse.R +++ b/R/R/bandit4arm_2par_lapse.R @@ -1,69 +1,42 @@ #' @templateVar MODEL_FUNCTION bandit4arm_2par_lapse +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) -#' @templateVar MODEL_CITE (Aylward et al., 2018, PsyArXiv) +#' @templateVar MODEL_CITE (Aylward et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under -#' uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +#' Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +#' bandit4arm_2par_lapse <- hBayesDM_model( task_name = "bandit4arm", model_name = "2par_lapse", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("Arew" = c(0, 0.1, 1), - "Apun" = c(0, 0.1, 1), - "xi" = c(0, 0.1, 1)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - rew <- array( 0, c(n_subj, t_max)) - los <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - rew[i, 1:t] <- DT_subj$gain - los[i, 1:t] <- -1 * abs(DT_subj$loss) - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - rew = rew, - los = los, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "Arew" = c(NULL, 0.1, 1), + "Apun" = c(NULL, 0.1, 1), + "xi" = c(NULL, 0.1, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit4arm_preprocess_func) diff --git a/R/R/bandit4arm_4par.R b/R/R/bandit4arm_4par.R index 95a868e9..2c706192 100644 --- a/R/R/bandit4arm_4par.R +++ b/R/R/bandit4arm_4par.R @@ -1,70 +1,43 @@ #' @templateVar MODEL_FUNCTION bandit4arm_4par +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME 4 Parameter Model, without C (choice perseveration) -#' @templateVar MODEL_CITE (Seymour et al., 2012, J Neuro) +#' @templateVar MODEL_CITE (Seymour et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in -#' Human Decision-Making. J Neuro, 32(17), 5833-5842. +#' Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. +#' bandit4arm_4par <- hBayesDM_model( task_name = "bandit4arm", model_name = "4par", - data_columns = c("subjID", "choice", "gain", "choice"), - parameters = list("Arew" = c(0, 0.1, 1), - "Apun" = c(0, 0.1, 1), - "R" = c(0, 1, 30), - "P" = c(0, 1, 30)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - rew <- array( 0, c(n_subj, t_max)) - los <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - rew[i, 1:t] <- DT_subj$gain - los[i, 1:t] <- -1 * abs(DT_subj$loss) - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - rew = rew, - los = los, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + model_type = "", + data_columns = c("subjID", "choice", "gain", "loss"), + parameters = list( + "Arew" = c(NULL, 0.1, 1), + "Apun" = c(NULL, 0.1, 1), + "R" = c(NULL, 1, 30), + "P" = c(NULL, 1, 30) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit4arm_preprocess_func) diff --git a/R/R/bandit4arm_lapse.R b/R/R/bandit4arm_lapse.R index 85a13447..d8659032 100644 --- a/R/R/bandit4arm_lapse.R +++ b/R/R/bandit4arm_lapse.R @@ -1,71 +1,44 @@ #' @templateVar MODEL_FUNCTION bandit4arm_lapse +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME 5 Parameter Model, without C (choice perseveration) but with xi (noise) -#' @templateVar MODEL_CITE (Seymour et al., 2012, J Neuro) +#' @templateVar MODEL_CITE (Seymour et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in -#' Human Decision-Making. J Neuro, 32(17), 5833-5842. +#' Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. +#' bandit4arm_lapse <- hBayesDM_model( task_name = "bandit4arm", model_name = "lapse", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("Arew" = c(0, 0.1, 1), - "Apun" = c(0, 0.1, 1), - "R" = c(0, 1, 30), - "P" = c(0, 1, 30), - "xi" = c(0, 0.1, 1)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - rew <- array( 0, c(n_subj, t_max)) - los <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - rew[i, 1:t] <- DT_subj$gain - los[i, 1:t] <- -1 * abs(DT_subj$loss) - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - rew = rew, - los = los, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "Arew" = c(NULL, 0.1, 1), + "Apun" = c(NULL, 0.1, 1), + "R" = c(NULL, 1, 30), + "P" = c(NULL, 1, 30), + "xi" = c(NULL, 0.1, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit4arm_preprocess_func) diff --git a/R/R/bandit4arm_lapse_decay.R b/R/R/bandit4arm_lapse_decay.R index 763558cc..2979dd67 100644 --- a/R/R/bandit4arm_lapse_decay.R +++ b/R/R/bandit4arm_lapse_decay.R @@ -1,71 +1,45 @@ #' @templateVar MODEL_FUNCTION bandit4arm_lapse_decay +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). -#' @templateVar MODEL_CITE (Aylward et al., 2018, PsyArXiv) +#' @templateVar MODEL_CITE (Aylward et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under -#' uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +#' Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +#' bandit4arm_lapse_decay <- hBayesDM_model( task_name = "bandit4arm", model_name = "lapse_decay", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("Arew" = c(0, 0.1, 1), - "Apun" = c(0, 0.1, 1), - "R" = c(0, 1, 30), - "P" = c(0, 1, 30), - "xi" = c(0, 0.1, 1), - "d" = c(0, 0.1, 1)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - rew <- array( 0, c(n_subj, t_max)) - los <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - rew[i, 1:t] <- DT_subj$gain - los[i, 1:t] <- -1 * abs(DT_subj$loss) - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - rew = rew, - los = los, - choice = choice - ) + parameters = list( + "Arew" = c(NULL, 0.1, 1), + "Apun" = c(NULL, 0.1, 1), + "R" = c(NULL, 1, 30), + "P" = c(NULL, 1, 30), + "xi" = c(NULL, 0.1, 1), + "d" = c(NULL, 0.1, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit4arm_preprocess_func) - # Returned data_list will directly be passed to Stan - return(data_list) - } -) diff --git a/R/R/bandit4arm_singleA_lapse.R b/R/R/bandit4arm_singleA_lapse.R index 418e49cb..21a9da22 100644 --- a/R/R/bandit4arm_singleA_lapse.R +++ b/R/R/bandit4arm_singleA_lapse.R @@ -1,69 +1,43 @@ #' @templateVar MODEL_FUNCTION bandit4arm_singleA_lapse +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. -#' @templateVar MODEL_CITE (Aylward et al., 2018, PsyArXiv) +#' @templateVar MODEL_CITE (Aylward et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under -#' uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +#' Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +#' bandit4arm_singleA_lapse <- hBayesDM_model( task_name = "bandit4arm", model_name = "singleA_lapse", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("A" = c(0, 0.1, 1), - "R" = c(0, 1, 30), - "P" = c(0, 1, 30), - "xi" = c(0, 0.1, 1)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - rew <- array( 0, c(n_subj, t_max)) - los <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - rew[i, 1:t] <- DT_subj$gain - los[i, 1:t] <- -1 * abs(DT_subj$loss) - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - rew = rew, - los = los, - choice = choice - ) + parameters = list( + "A" = c(NULL, 0.1, 1), + "R" = c(NULL, 1, 30), + "P" = c(NULL, 1, 30), + "xi" = c(NULL, 0.1, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bandit4arm_preprocess_func) - # Returned data_list will directly be passed to Stan - return(data_list) - } -) diff --git a/R/R/bart_par4.R b/R/R/bart_par4.R index 9ad8e895..a21fbea1 100644 --- a/R/R/bart_par4.R +++ b/R/R/bart_par4.R @@ -1,68 +1,42 @@ #' @templateVar MODEL_FUNCTION bart_par4 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park}, \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang}, \href{https://ccs-lab.github.io/team/ayoung-lee/}{Ayoung Lee}, \href{https://ccs-lab.github.io/team/jeongbin-oh/}{Jeongbin Oh}, \href{https://ccs-lab.github.io/team/jiyoon-lee/}{Jiyoon Lee}, \href{https://ccs-lab.github.io/team/junha-jang/}{Junha Jang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} , \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} , \href{https://ccs-lab.github.io/team/ayoung-lee/}{Ayoung Lee} , \href{https://ccs-lab.github.io/team/jeongbin-oh/}{Jeongbin Oh} , \href{https://ccs-lab.github.io/team/jiyoon-lee/}{Jiyoon Lee} , \href{https://ccs-lab.github.io/team/junha-jang/}{Junha Jang} #' @templateVar TASK_NAME Balloon Analogue Risk Task -#' @templateVar TASK_CITE (Ravenzwaaij et al., 2011) +#' @templateVar TASK_CITE (van Ravenzwaaij et al., 2011) #' @templateVar MODEL_NAME Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters +#' @templateVar MODEL_CITE #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "pumps", "explosion" #' @templateVar PARAMETERS "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"pumps"}{The number of pumps.} #' @templateVar DETAILS_DATA_3 \item{"explosion"}{0: intact, 1: burst} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the -#' BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. +#' van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. +#' bart_par4 <- hBayesDM_model( task_name = "bart", model_name = "par4", + model_type = "", data_columns = c("subjID", "pumps", "explosion"), - parameters = list("phi" = c(0, 0.5, 1), - "eta" = c(0, 1, Inf), - "gam" = c(0, 1, Inf), - "tau" = c(0, 1, Inf)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - pumps <- array(0, c(n_subj, t_max)) - explosion <- array(0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - pumps[i, 1:t] <- DT_subj$pumps - explosion[i, 1:t] <- DT_subj$explosion - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - P = max(pumps) + 1, - pumps = pumps, - explosion = explosion - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "phi" = c(NULL, 0.5, 1), + "eta" = c(NULL, 1, Inf), + "gam" = c(NULL, 1, Inf), + "tau" = c(NULL, 1, Inf) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = bart_preprocess_func) diff --git a/R/R/choiceRT_ddm.R b/R/R/choiceRT_ddm.R index 6acdea5d..1abe5889 100644 --- a/R/R/choiceRT_ddm.R +++ b/R/R/choiceRT_ddm.R @@ -1,85 +1,48 @@ #' @templateVar MODEL_FUNCTION choiceRT_ddm +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Choice Reaction Time Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Drift Diffusion Model -#' @templateVar MODEL_CITE (Ratcliff, 1978, Psychological Review)\cr *Note that this implementation is \strong{not} the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +#' @templateVar MODEL_CITE (Ratcliff, 1978) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "RT" #' @templateVar PARAMETERS "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time) -#' @templateVar IS_NULL_POSTPREDS TRUE -#' @templateVar ADDITIONAL_ARG \code{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Choice made for the current trial, coded as \code{1}/\code{2} to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} -#' @templateVar DETAILS_DATA_3 \item{"RT"}{Choice reaction time for the current trial, in \strong{seconds} (e.g., 0.435 0.383 0.314 0.309, etc.).} +#' @templateVar DETAILS_DATA_2 \item{"choice"}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} +#' @templateVar DETAILS_DATA_3 \item{"RT"}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' @importFrom stats aggregate -#' +#' @include preprocess_funcs.R +#' #' @description -#' Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing -#' -#' Parameters of the DDM (parameter names in Ratcliff), from \url{https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R} -#' \cr - alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). 0 < alpha -#' \cr - beta (b): Initial bias, for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 -#' \cr - delta (v): Drift rate; Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta -#' \cr - tau (ter): Non-decision time + Motor response time + encoding time (high means slow encoding, execution). 0 < tau (in seconds) +#' \strong{Notes:} +#' Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +#' Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. #' #' @references #' Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59 +#' choiceRT_ddm <- hBayesDM_model( task_name = "choiceRT", model_name = "ddm", + model_type = "", data_columns = c("subjID", "choice", "RT"), - parameters = list("alpha" = c(0, 0.5, Inf), - "beta" = c(0, 0.5, 1), - "delta" = c(0, 0.5, Inf), - "tau" = c(0, 0.15, 1)), + parameters = list( + "alpha" = c(NULL, 0.5, Inf), + "beta" = c(NULL, 0.5, 1), + "delta" = c(NULL, 0.5, Inf), + "tau" = c(NULL, 0.15, 1) + ), + regressors = NULL, postpreds = NULL, - preprocess_func = function(raw_data, general_info, RTbound = 0.1) { - # Use raw_data as a data.frame - raw_data <- as.data.frame(raw_data) - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - - # Number of upper and lower boundary responses - Nu <- with(raw_data, aggregate(choice == 2, by = list(y = subjid), FUN = sum)[["x"]]) - Nl <- with(raw_data, aggregate(choice == 1, by = list(y = subjid), FUN = sum)[["x"]]) - - # Reaction times for upper and lower boundary responses - RTu <- array(-1, c(n_subj, max(Nu))) - RTl <- array(-1, c(n_subj, max(Nl))) - for (i in 1:n_subj) { - subj <- subjs[i] - subj_data <- subset(raw_data, raw_data$subjid == subj) - - RTu[i, 1:Nu[i]] <- subj_data$rt[subj_data$choice == 2] # (Nu/Nl[i]+1):Nu/Nl_max will be padded with 0's - RTl[i, 1:Nl[i]] <- subj_data$rt[subj_data$choice == 1] # 0 padding is skipped in likelihood calculation - } - - # Minimum reaction time - minRT <- with(raw_data, aggregate(rt, by = list(y = subjid), FUN = min)[["x"]]) - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, # Number of subjects - Nu_max = max(Nu), # Max (across subjects) number of upper boundary responses - Nl_max = max(Nl), # Max (across subjects) number of lower boundary responses - Nu = Nu, # Number of upper boundary responses for each subject - Nl = Nl, # Number of lower boundary responses for each subject - RTu = RTu, # Upper boundary response times - RTl = RTl, # Lower boundary response times - minRT = minRT, # Minimum RT for each subject - RTbound = RTbound # Lower bound of RT across all subjects (e.g., 0.1 second) - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + preprocess_func = choiceRT_preprocess_func) diff --git a/R/R/choiceRT_ddm_single.R b/R/R/choiceRT_ddm_single.R index 50067d46..dd426c6e 100644 --- a/R/R/choiceRT_ddm_single.R +++ b/R/R/choiceRT_ddm_single.R @@ -1,63 +1,48 @@ #' @templateVar MODEL_FUNCTION choiceRT_ddm_single +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Choice Reaction Time Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Drift Diffusion Model -#' @templateVar MODEL_CITE (Ratcliff, 1978, Psychological Review)\cr *Note that this implementation is \strong{not} the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +#' @templateVar MODEL_CITE (Ratcliff, 1978) #' @templateVar MODEL_TYPE Individual #' @templateVar DATA_COLUMNS "subjID", "choice", "RT" #' @templateVar PARAMETERS "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time) -#' @templateVar IS_NULL_POSTPREDS TRUE -#' @templateVar ADDITIONAL_ARG \code{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Choice made for the current trial, coded as \code{1}/\code{2} to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} -#' @templateVar DETAILS_DATA_3 \item{"RT"}{Choice reaction time for the current trial, in \strong{seconds} (e.g., 0.435 0.383 0.314 0.309, etc.).} +#' @templateVar DETAILS_DATA_2 \item{"choice"}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} +#' @templateVar DETAILS_DATA_3 \item{"RT"}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @description -#' Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing -#' -#' Parameters of the DDM (parameter names in Ratcliff), from \url{https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R} -#' \cr - alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). 0 < alpha -#' \cr - beta (b): Initial bias, for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 -#' \cr - delta (v): Drift rate; Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta -#' \cr - tau (ter): Non-decision time + Motor response time + encoding time (high means slow encoding, execution). 0 < tau (in seconds) +#' \strong{Notes:} +#' Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +#' Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. #' #' @references #' Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59 +#' choiceRT_ddm_single <- hBayesDM_model( task_name = "choiceRT", model_name = "ddm", model_type = "single", data_columns = c("subjID", "choice", "RT"), - parameters = list("alpha" = c(NA, 0.5, NA), - "beta" = c(NA, 0.5, NA), - "delta" = c(NA, 0.5, NA), - "tau" = c(NA, 0.15, NA)), + parameters = list( + "alpha" = c(NULL, 0.5, NULL), + "beta" = c(NULL, 0.5, NULL), + "delta" = c(NULL, 0.5, NULL), + "tau" = c(NULL, 0.15, NULL) + ), + regressors = NULL, postpreds = NULL, - preprocess_func = function(raw_data, general_info, RTbound = 0.1) { - # Currently class(raw_data) == "data.table" - - # Data.tables for upper and lower boundary responses - DT_upper <- raw_data[choice == 2] - DT_lower <- raw_data[choice == 1] - - # Wrap into a list for Stan - data_list <- list( - Nu = nrow(DT_upper), # Number of upper boundary responses - Nl = nrow(DT_lower), # Number of lower boundary responses - RTu = DT_upper$rt, # Upper boundary response times - RTl = DT_lower$rt, # Lower boundary response times - minRT = min(raw_data$rt), # Minimum RT - RTbound = RTbound # Lower bound of RT (e.g., 0.1 second) - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + preprocess_func = choiceRT_single_preprocess_func) diff --git a/R/R/cra_exp.R b/R/R/cra_exp.R index 79db92e6..0ef6c82f 100644 --- a/R/R/cra_exp.R +++ b/R/R/cra_exp.R @@ -1,12 +1,14 @@ #' @templateVar MODEL_FUNCTION cra_exp -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} #' @templateVar TASK_NAME Choice Under Risk and Ambiguity Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Exponential Subjective Value Model -#' @templateVar MODEL_CITE (Hsu et al., 2005, Science) +#' @templateVar MODEL_CITE (Hsu et al., 2005) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice" #' @templateVar PARAMETERS "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature) #' @templateVar REGRESSORS "sv", "sv_fix", "sv_var", "p_var" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"prob"}{Objective probability of the variable lottery.} @@ -14,71 +16,34 @@ #' @templateVar DETAILS_DATA_4 \item{"reward_var"}{Amount of reward in variable lottery. Assumed to be greater than zero.} #' @templateVar DETAILS_DATA_5 \item{"reward_fix"}{Amount of reward in fixed lottery. Assumed to be greater than zero.} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding -#' to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. -#' https://doi.org/10.1126/science.1115327 +#' Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327 +#' cra_exp <- hBayesDM_model( task_name = "cra", model_name = "exp", + model_type = "", data_columns = c("subjID", "prob", "ambig", "reward_var", "reward_fix", "choice"), - parameters = list("alpha" = c(0, 1, 2), - "beta" = c(-Inf, 0, Inf), - "gamma" = c(0, 1, Inf)), - regressors = list("sv" = 2, - "sv_fix" = 2, - "sv_var" = 2, - "p_var" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(0, c(n_subj, t_max)) - prob <- array(0, c(n_subj, t_max)) - ambig <- array(0, c(n_subj, t_max)) - reward_var <- array(0, c(n_subj, t_max)) - reward_fix <- array(0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - prob[i, 1:t] <- DT_subj$prob - ambig[i, 1:t] <- DT_subj$ambig - reward_var[i, 1:t] <- DT_subj$rewardvar - reward_fix[i, 1:t] <- DT_subj$rewardfix - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - prob = prob, - ambig = ambig, - reward_var = reward_var, - reward_fix = reward_fix - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "alpha" = c(NULL, 1, 2), + "beta" = c(-Inf, NULL, Inf), + "gamma" = c(NULL, 1, Inf) + ), + regressors = list( + "sv" = 2, + "sv_fix" = 2, + "sv_var" = 2, + "p_var" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = cra_preprocess_func) diff --git a/R/R/cra_linear.R b/R/R/cra_linear.R index b6746d5f..df765af2 100644 --- a/R/R/cra_linear.R +++ b/R/R/cra_linear.R @@ -1,12 +1,14 @@ #' @templateVar MODEL_FUNCTION cra_linear -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} #' @templateVar TASK_NAME Choice Under Risk and Ambiguity Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Linear Subjective Value Model -#' @templateVar MODEL_CITE (Levy et al., 2010, J Neurophysiol) +#' @templateVar MODEL_CITE (Levy et al., 2010) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice" #' @templateVar PARAMETERS "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature) #' @templateVar REGRESSORS "sv", "sv_fix", "sv_var", "p_var" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"prob"}{Objective probability of the variable lottery.} @@ -14,71 +16,34 @@ #' @templateVar DETAILS_DATA_4 \item{"reward_var"}{Amount of reward in variable lottery. Assumed to be greater than zero.} #' @templateVar DETAILS_DATA_5 \item{"reward_fix"}{Amount of reward in fixed lottery. Assumed to be greater than zero.} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural -#' representation of subjective value under risk and ambiguity. Journal of Neurophysiology, -#' 103(2), 1036-1047. +#' Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047. +#' cra_linear <- hBayesDM_model( task_name = "cra", model_name = "linear", + model_type = "", data_columns = c("subjID", "prob", "ambig", "reward_var", "reward_fix", "choice"), - parameters = list("alpha" = c(0, 1, 2), - "beta" = c(-Inf, 0, Inf), - "gamma" = c(0, 1, Inf)), - regressors = list("sv" = 2, - "sv_fix" = 2, - "sv_var" = 2, - "p_var" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(0, c(n_subj, t_max)) - prob <- array(0, c(n_subj, t_max)) - ambig <- array(0, c(n_subj, t_max)) - reward_var <- array(0, c(n_subj, t_max)) - reward_fix <- array(0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - prob[i, 1:t] <- DT_subj$prob - ambig[i, 1:t] <- DT_subj$ambig - reward_var[i, 1:t] <- DT_subj$rewardvar - reward_fix[i, 1:t] <- DT_subj$rewardfix - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - prob = prob, - ambig = ambig, - reward_var = reward_var, - reward_fix = reward_fix - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "alpha" = c(NULL, 1, 2), + "beta" = c(-Inf, NULL, Inf), + "gamma" = c(NULL, 1, Inf) + ), + regressors = list( + "sv" = 2, + "sv_fix" = 2, + "sv_var" = 2, + "p_var" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = cra_preprocess_func) diff --git a/R/R/dbdm_prob_weight.R b/R/R/dbdm_prob_weight.R index fac6c63a..8ac39d55 100644 --- a/R/R/dbdm_prob_weight.R +++ b/R/R/dbdm_prob_weight.R @@ -1,11 +1,14 @@ #' @templateVar MODEL_FUNCTION dbdm_prob_weight -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/yoonseo-zoh/}{Yoonseo Zoh} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/yoonseo-zoh/}{Yoonseo Zoh} #' @templateVar TASK_NAME Description Based Decison Making Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Probability Weight Function #' @templateVar MODEL_CITE (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice" #' @templateVar PARAMETERS "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 8 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"opt1hprob"}{Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.} @@ -15,73 +18,34 @@ #' @templateVar DETAILS_DATA_6 \item{"opt2hval"}{Possible (with opt2hprob probability) outcome of option 2.} #' @templateVar DETAILS_DATA_7 \item{"opt2lval"}{Possible (with (1 - opt2hprob) probability) outcome of option 2.} #' @templateVar DETAILS_DATA_8 \item{"choice"}{If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A -#' choice prediction competition: Choices from experience and from description. Journal of -#' Behavioral Decision Making, 23(1), 15-47. +#' Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. #' -#' Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the -#' effect of rare events in risky choice. Psychological science, 15(8), 534-539. +#' Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539. +#' +#' Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022. #' -#' Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from -#' prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022. dbdm_prob_weight <- hBayesDM_model( task_name = "dbdm", model_name = "prob_weight", + model_type = "", data_columns = c("subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice"), - parameters = list("tau" = c(0, 0.8, 1), - "rho" = c(0, 0.7, 2), - "lambda" = c(0, 2.5, 5), - "beta" = c(0, 0.2, 1)), - preprocess_func = function(raw_data, general_info) { - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - opt1hprob <- array( 0, c(n_subj, t_max)) - opt2hprob <- array( 0, c(n_subj, t_max)) - opt1hval <- array( 0, c(n_subj, t_max)) - opt1lval <- array( 0, c(n_subj, t_max)) - opt2hval <- array( 0, c(n_subj, t_max)) - opt2lval <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - opt1hprob[i, 1:t] <- DT_subj$opt1hprob - opt2hprob[i, 1:t] <- DT_subj$opt2hprob - opt1hval[i, 1:t] <- DT_subj$opt1hval - opt1lval[i, 1:t] <- DT_subj$opt1lval - opt2hval[i, 1:t] <- DT_subj$opt2hval - opt2lval[i, 1:t] <- DT_subj$opt2lval - choice[i, 1:t] <- DT_subj$choice - } - - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - opt1hprob = opt1hprob, - opt2hprob = opt2hprob, - opt1hval = opt1hval, - opt1lval = opt1lval, - opt2hval = opt2hval, - opt2lval = opt2lval, - choice = choice - ) - - return(data_list) - } -) + parameters = list( + "tau" = c(NULL, 0.8, 1), + "rho" = c(NULL, 0.7, 2), + "lambda" = c(NULL, 2.5, 5), + "beta" = c(NULL, 0.2, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = dbdm_preprocess_func) diff --git a/R/R/dd_cs.R b/R/R/dd_cs.R index 47bef9d2..a27fd669 100644 --- a/R/R/dd_cs.R +++ b/R/R/dd_cs.R @@ -1,10 +1,14 @@ #' @templateVar MODEL_FUNCTION dd_cs +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Constant-Sensitivity (CS) Model -#' @templateVar MODEL_CITE (Ebert & Prelec, 2007, Management Science) +#' @templateVar MODEL_CITE (Ebert et al., 2007) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" #' @templateVar PARAMETERS "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} @@ -12,66 +16,29 @@ #' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} #' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of -#' the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 +#' Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 +#' dd_cs <- hBayesDM_model( task_name = "dd", model_name = "cs", + model_type = "", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), - parameters = list("r" = c(0, 0.1, 1), - "s" = c(0, 1, 10), - "beta" = c(0, 1, 5)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - delay_later <- array( 0, c(n_subj, t_max)) - amount_later <- array( 0, c(n_subj, t_max)) - delay_sooner <- array( 0, c(n_subj, t_max)) - amount_sooner <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - delay_later[i, 1:t] <- DT_subj$delaylater - amount_later[i, 1:t] <- DT_subj$amountlater - delay_sooner[i, 1:t] <- DT_subj$delaysooner - amount_sooner[i, 1:t] <- DT_subj$amountsooner - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - delay_later = delay_later, - amount_later = amount_later, - delay_sooner = delay_sooner, - amount_sooner = amount_sooner, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "r" = c(NULL, 0.1, 1), + "s" = c(NULL, 1, 10), + "beta" = c(NULL, 1, 5) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = dd_preprocess_func) diff --git a/R/R/dd_cs_single.R b/R/R/dd_cs_single.R index 54c37dc3..74699e6c 100644 --- a/R/R/dd_cs_single.R +++ b/R/R/dd_cs_single.R @@ -1,10 +1,14 @@ #' @templateVar MODEL_FUNCTION dd_cs_single +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Constant-Sensitivity (CS) Model -#' @templateVar MODEL_CITE (Ebert & Prelec, 2007, Management Science) +#' @templateVar MODEL_CITE (Ebert et al., 2007) #' @templateVar MODEL_TYPE Individual #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" #' @templateVar PARAMETERS "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} @@ -12,49 +16,29 @@ #' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} #' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of -#' the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 +#' Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 +#' dd_cs_single <- hBayesDM_model( task_name = "dd", model_name = "cs", model_type = "single", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), - parameters = list("r" = c(NA, 0.1, NA), - "s" = c(NA, 1, NA), - "beta" = c(NA, 1, NA)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - t_subjs <- general_info$t_subjs - - # Extract from raw_data - delay_later <- raw_data$delaylater - amount_later <- raw_data$amountlater - delay_sooner <- raw_data$delaysooner - amount_sooner <- raw_data$amountsooner - choice <- raw_data$choice - - # Wrap into a list for Stan - data_list <- list( - Tsubj = t_subjs, - delay_later = delay_later, - amount_later = amount_later, - delay_sooner = delay_sooner, - amount_sooner = amount_sooner, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "r" = c(NULL, 0.1, NULL), + "s" = c(NULL, 1, NULL), + "beta" = c(NULL, 1, NULL) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = dd_single_preprocess_func) diff --git a/R/R/dd_exp.R b/R/R/dd_exp.R index 9a70e708..311d4f0f 100644 --- a/R/R/dd_exp.R +++ b/R/R/dd_exp.R @@ -1,10 +1,14 @@ #' @templateVar MODEL_FUNCTION dd_exp +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Exponential Model -#' @templateVar MODEL_CITE (Samuelson, 1937, The Review of Economic Studies) +#' @templateVar MODEL_CITE (Samuelson, 1937) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" #' @templateVar PARAMETERS "r" (exponential discounting rate), "beta" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} @@ -12,65 +16,28 @@ #' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} #' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), -#' 155. http://doi.org/10.2307/2967612 +#' Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612 +#' dd_exp <- hBayesDM_model( task_name = "dd", model_name = "exp", + model_type = "", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), - parameters = list("r" = c(0, 0.1, 1), - "beta" = c(0, 1, 5)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - delay_later <- array( 0, c(n_subj, t_max)) - amount_later <- array( 0, c(n_subj, t_max)) - delay_sooner <- array( 0, c(n_subj, t_max)) - amount_sooner <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - delay_later[i, 1:t] <- DT_subj$delaylater - amount_later[i, 1:t] <- DT_subj$amountlater - delay_sooner[i, 1:t] <- DT_subj$delaysooner - amount_sooner[i, 1:t] <- DT_subj$amountsooner - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - delay_later = delay_later, - amount_later = amount_later, - delay_sooner = delay_sooner, - amount_sooner = amount_sooner, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "r" = c(NULL, 0.1, 1), + "beta" = c(NULL, 1, 5) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = dd_preprocess_func) diff --git a/R/R/dd_hyperbolic.R b/R/R/dd_hyperbolic.R index 81adbf67..fa180494 100644 --- a/R/R/dd_hyperbolic.R +++ b/R/R/dd_hyperbolic.R @@ -1,10 +1,14 @@ #' @templateVar MODEL_FUNCTION dd_hyperbolic +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Hyperbolic Model #' @templateVar MODEL_CITE (Mazur, 1987) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" #' @templateVar PARAMETERS "k" (discounting rate), "beta" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} @@ -12,64 +16,28 @@ #' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} #' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references #' Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement. +#' dd_hyperbolic <- hBayesDM_model( task_name = "dd", model_name = "hyperbolic", + model_type = "", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), - parameters = list("k" = c(0, 0.1, 1), - "beta" = c(0, 1, 5)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - delay_later <- array( 0, c(n_subj, t_max)) - amount_later <- array( 0, c(n_subj, t_max)) - delay_sooner <- array( 0, c(n_subj, t_max)) - amount_sooner <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - delay_later[i, 1:t] <- DT_subj$delaylater - amount_later[i, 1:t] <- DT_subj$amountlater - delay_sooner[i, 1:t] <- DT_subj$delaysooner - amount_sooner[i, 1:t] <- DT_subj$amountsooner - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - delay_later = delay_later, - amount_later = amount_later, - delay_sooner = delay_sooner, - amount_sooner = amount_sooner, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "k" = c(NULL, 0.1, 1), + "beta" = c(NULL, 1, 5) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = dd_preprocess_func) diff --git a/R/R/dd_hyperbolic_single.R b/R/R/dd_hyperbolic_single.R index 10dd66e6..ea6e623d 100644 --- a/R/R/dd_hyperbolic_single.R +++ b/R/R/dd_hyperbolic_single.R @@ -1,10 +1,14 @@ #' @templateVar MODEL_FUNCTION dd_hyperbolic_single +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Hyperbolic Model #' @templateVar MODEL_CITE (Mazur, 1987) #' @templateVar MODEL_TYPE Individual #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" #' @templateVar PARAMETERS "k" (discounting rate), "beta" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} @@ -12,47 +16,28 @@ #' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} #' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} #' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references #' Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement. +#' dd_hyperbolic_single <- hBayesDM_model( task_name = "dd", model_name = "hyperbolic", model_type = "single", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), - parameters = list("k" = c(NA, 0.1, NA), - "beta" = c(NA, 1, NA)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - t_subjs <- general_info$t_subjs - - # Extract from raw_data - delay_later <- raw_data$delaylater - amount_later <- raw_data$amountlater - delay_sooner <- raw_data$delaysooner - amount_sooner <- raw_data$amountsooner - choice <- raw_data$choice - - # Wrap into a list for Stan - data_list <- list( - Tsubj = t_subjs, - delay_later = delay_later, - amount_later = amount_later, - delay_sooner = delay_sooner, - amount_sooner = amount_sooner, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "k" = c(NULL, 0.1, NULL), + "beta" = c(NULL, 1, NULL) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = dd_single_preprocess_func) diff --git a/R/R/gng_m1.R b/R/R/gng_m1.R index 78c789fd..f2bb49b4 100644 --- a/R/R/gng_m1.R +++ b/R/R/gng_m1.R @@ -1,75 +1,47 @@ #' @templateVar MODEL_FUNCTION gng_m1 +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW + noise -#' @templateVar MODEL_CITE (Guitart-Masip et al., 2012, Neuroimage) +#' @templateVar MODEL_CITE (Guitart-Masip et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" #' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "rho" (effective size) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} #' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). -#' Go and no-go learning in reward and punishment: Interactions between affect and effect. -#' Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +#' Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +#' gng_m1 <- hBayesDM_model( task_name = "gng", model_name = "m1", + model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), - parameters = list("xi" = c(0, 0.1, 1), - "ep" = c(0, 0.2, 1), - "rho" = c(0, exp(2), Inf)), - regressors = list("Qgo" = 2, - "Qnogo" = 2, - "Wgo" = 2, - "Wnogo" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - cue <- array( 1, c(n_subj, t_max)) - pressed <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - cue[i, 1:t] <- DT_subj$cue - pressed[i, 1:t] <- DT_subj$keypressed - outcome[i, 1:t] <- DT_subj$outcome - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - cue = cue, - pressed = pressed, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "xi" = c(NULL, 0.1, 1), + "ep" = c(NULL, 0.2, 1), + "rho" = c(NULL, exp(2), Inf) + ), + regressors = list( + "Qgo" = 2, + "Qnogo" = 2, + "Wgo" = 2, + "Wnogo" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = gng_preprocess_func) diff --git a/R/R/gng_m2.R b/R/R/gng_m2.R index 278eb6c5..278d8c8d 100644 --- a/R/R/gng_m2.R +++ b/R/R/gng_m2.R @@ -1,76 +1,48 @@ #' @templateVar MODEL_FUNCTION gng_m2 +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW + noise + bias -#' @templateVar MODEL_CITE (Guitart-Masip et al., 2012, Neuroimage) +#' @templateVar MODEL_CITE (Guitart-Masip et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" #' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} #' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). -#' Go and no-go learning in reward and punishment: Interactions between affect and effect. -#' Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +#' Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +#' gng_m2 <- hBayesDM_model( task_name = "gng", model_name = "m2", + model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), - parameters = list("xi" = c(0, 0.1, 1), - "ep" = c(0, 0.2, 1), - "b" = c(-Inf, 0, Inf), - "rho" = c(0, exp(2), Inf)), - regressors = list("Qgo" = 2, - "Qnogo" = 2, - "Wgo" = 2, - "Wnogo" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - cue <- array( 1, c(n_subj, t_max)) - pressed <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - cue[i, 1:t] <- DT_subj$cue - pressed[i, 1:t] <- DT_subj$keypressed - outcome[i, 1:t] <- DT_subj$outcome - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - cue = cue, - pressed = pressed, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "xi" = c(NULL, 0.1, 1), + "ep" = c(NULL, 0.2, 1), + "b" = c(-Inf, NULL, Inf), + "rho" = c(NULL, exp(2), Inf) + ), + regressors = list( + "Qgo" = 2, + "Qnogo" = 2, + "Wgo" = 2, + "Wnogo" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = gng_preprocess_func) diff --git a/R/R/gng_m3.R b/R/R/gng_m3.R index ba6b1583..443d8f47 100644 --- a/R/R/gng_m3.R +++ b/R/R/gng_m3.R @@ -1,78 +1,50 @@ #' @templateVar MODEL_FUNCTION gng_m3 +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW + noise + bias + pi -#' @templateVar MODEL_CITE (Guitart-Masip et al., 2012, Neuroimage) +#' @templateVar MODEL_CITE (Guitart-Masip et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" #' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo", "SV" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} #' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). -#' Go and no-go learning in reward and punishment: Interactions between affect and effect. -#' Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +#' Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +#' gng_m3 <- hBayesDM_model( task_name = "gng", model_name = "m3", + model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), - parameters = list("xi" = c(0, 0.1, 1), - "ep" = c(0, 0.2, 1), - "b" = c(-Inf, 0, Inf), - "pi" = c(-Inf, 0, Inf), - "rho" = c(0, exp(2), Inf)), - regressors = list("Qgo" = 2, - "Qnogo" = 2, - "Wgo" = 2, - "Wnogo" = 2, - "SV" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - cue <- array( 1, c(n_subj, t_max)) - pressed <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - cue[i, 1:t] <- DT_subj$cue - pressed[i, 1:t] <- DT_subj$keypressed - outcome[i, 1:t] <- DT_subj$outcome - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - cue = cue, - pressed = pressed, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "xi" = c(NULL, 0.1, 1), + "ep" = c(NULL, 0.2, 1), + "b" = c(-Inf, NULL, Inf), + "pi" = c(-Inf, NULL, Inf), + "rho" = c(NULL, exp(2), Inf) + ), + regressors = list( + "Qgo" = 2, + "Qnogo" = 2, + "Wgo" = 2, + "Wnogo" = 2, + "SV" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = gng_preprocess_func) diff --git a/R/R/gng_m4.R b/R/R/gng_m4.R index 501ad943..bd024c77 100644 --- a/R/R/gng_m4.R +++ b/R/R/gng_m4.R @@ -1,79 +1,51 @@ #' @templateVar MODEL_FUNCTION gng_m4 +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW (rew/pun) + noise + bias + pi -#' @templateVar MODEL_CITE (Cavanagh et al., 2013, J Neuro) +#' @templateVar MODEL_CITE (Cavanagh et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" #' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo", "SV" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} #' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta -#' Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. -#' http://doi.org/10.1523/JNEUROSCI.5754-12.2013 +#' Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013 +#' gng_m4 <- hBayesDM_model( task_name = "gng", model_name = "m4", + model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), - parameters = list("xi" = c(0, 0.1, 1), - "ep" = c(0, 0.2, 1), - "b" = c(-Inf, 0, Inf), - "pi" = c(-Inf, 0, Inf), - "rhoRew" = c(0, exp(2), Inf), - "rhoPun" = c(0, exp(2), Inf)), - regressors = list("Qgo" = 2, - "Qnogo" = 2, - "Wgo" = 2, - "Wnogo" = 2, - "SV" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - cue <- array( 1, c(n_subj, t_max)) - pressed <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - cue[i, 1:t] <- DT_subj$cue - pressed[i, 1:t] <- DT_subj$keypressed - outcome[i, 1:t] <- DT_subj$outcome - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - cue = cue, - pressed = pressed, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "xi" = c(NULL, 0.1, 1), + "ep" = c(NULL, 0.2, 1), + "b" = c(-Inf, NULL, Inf), + "pi" = c(-Inf, NULL, Inf), + "rhoRew" = c(NULL, exp(2), Inf), + "rhoPun" = c(NULL, exp(2), Inf) + ), + regressors = list( + "Qgo" = 2, + "Qnogo" = 2, + "Wgo" = 2, + "Wnogo" = 2, + "SV" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = gng_preprocess_func) diff --git a/R/R/igt_orl.R b/R/R/igt_orl.R index ec6abd6e..93003251 100644 --- a/R/R/igt_orl.R +++ b/R/R/igt_orl.R @@ -1,72 +1,47 @@ #' @templateVar MODEL_FUNCTION igt_orl -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/nate-haines/}{Nate Haines} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/nate-haines/}{Nate Haines} #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Outcome-Representation Learning Model -#' @templateVar MODEL_CITE (Haines et al., 2018, Cognitive Science) +#' @templateVar MODEL_CITE (Haines et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight) -#' @templateVar ADDITIONAL_ARG \code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A -#' Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. -#' https://doi.org/10.1111/cogs.12688 +#' Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +#' +#' Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688 +#' igt_orl <- hBayesDM_model( task_name = "igt", model_name = "orl", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("Arew" = c(0, 0.1, 1), - "Apun" = c(0, 0.1, 1), - "K" = c(0, 0.1, 5), - "betaF" = c(-Inf, 0.1, Inf), - "betaP" = c(-Inf, 1, Inf)), - preprocess_func = function(raw_data, general_info, payscale = 100) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize data arrays - Ydata <- array(-1, c(n_subj, t_max)) - RLmatrix <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - Ydata[i, 1:t] <- DT_subj$choice - RLmatrix[i, 1:t] <- DT_subj$gain - abs(DT_subj$loss) - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = Ydata, - outcome = RLmatrix / payscale, - sign_out = sign(RLmatrix) - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "Arew" = c(NULL, 0.1, 1), + "Apun" = c(NULL, 0.1, 1), + "K" = c(NULL, 0.1, 5), + "betaF" = c(-Inf, 0.1, Inf), + "betaP" = c(-Inf, 1, Inf) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = igt_preprocess_func) diff --git a/R/R/igt_pvl_decay.R b/R/R/igt_pvl_decay.R index d32e9154..859d6a1d 100644 --- a/R/R/igt_pvl_decay.R +++ b/R/R/igt_pvl_decay.R @@ -1,70 +1,46 @@ #' @templateVar MODEL_FUNCTION igt_pvl_decay +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Prospect Valence Learning (PVL) Decay-RI -#' @templateVar MODEL_CITE (Ahn et al., 2014, Frontiers in Psychology) +#' @templateVar MODEL_CITE (Ahn et al., 2014) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion) -#' @templateVar ADDITIONAL_ARG \code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, -#' J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence -#' from computational modeling with pure users. Frontiers in Psychology, 5, 1376. -#' http://doi.org/10.3389/fpsyg.2014.00849 +#' Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +#' +#' Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849 +#' igt_pvl_decay <- hBayesDM_model( task_name = "igt", model_name = "pvl_decay", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("A" = c(0, 0.5, 1), - "alpha" = c(0, 0.5, 2), - "cons" = c(0, 1, 5), - "lambda" = c(0, 1, 10)), - preprocess_func = function(raw_data, general_info, payscale = 100) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize data arrays - Ydata <- array(-1, c(n_subj, t_max)) - RLmatrix <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - Ydata[i, 1:t] <- DT_subj$choice - RLmatrix[i, 1:t] <- DT_subj$gain - abs(DT_subj$loss) - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = Ydata, - outcome = RLmatrix / payscale - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "A" = c(NULL, 0.5, 1), + "alpha" = c(NULL, 0.5, 2), + "cons" = c(NULL, 1, 5), + "lambda" = c(NULL, 1, 10) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = igt_preprocess_func) diff --git a/R/R/igt_pvl_delta.R b/R/R/igt_pvl_delta.R index d02b49ad..16782342 100644 --- a/R/R/igt_pvl_delta.R +++ b/R/R/igt_pvl_delta.R @@ -1,70 +1,46 @@ #' @templateVar MODEL_FUNCTION igt_pvl_delta +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Iowa Gambling Task #' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Prospect Valence Learning (PVL) Delta -#' @templateVar MODEL_CITE (Ahn et al., 2008, Cognitive Science) +#' @templateVar MODEL_CITE (Ahn et al., 2008) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion) -#' @templateVar ADDITIONAL_ARG \code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models -#' using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. -#' http://doi.org/10.1080/03640210802352992 +#' Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +#' +#' Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +#' igt_pvl_delta <- hBayesDM_model( task_name = "igt", model_name = "pvl_delta", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("A" = c(0, 0.5, 1), - "alpha" = c(0, 0.5, 2), - "cons" = c(0, 1, 5), - "lambda" = c(0, 1, 10)), - preprocess_func = function(raw_data, general_info, payscale = 100) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize data arrays - Ydata <- array(-1, c(n_subj, t_max)) - RLmatrix <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - Ydata[i, 1:t] <- DT_subj$choice - RLmatrix[i, 1:t] <- DT_subj$gain - abs(DT_subj$loss) - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = Ydata, - outcome = RLmatrix / payscale - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "A" = c(NULL, 0.5, 1), + "alpha" = c(NULL, 0.5, 2), + "cons" = c(NULL, 1, 5), + "lambda" = c(NULL, 1, 10) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = igt_preprocess_func) diff --git a/R/R/igt_vpp.R b/R/R/igt_vpp.R index 908f5a5d..94cf6d86 100644 --- a/R/R/igt_vpp.R +++ b/R/R/igt_vpp.R @@ -1,73 +1,50 @@ #' @templateVar MODEL_FUNCTION igt_vpp +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Value-Plus-Perseverance -#' @templateVar MODEL_CITE (Worthy et al., 2013, Frontiers in Psychology) +#' @templateVar MODEL_CITE (Worthy et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" #' @templateVar PARAMETERS "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight) -#' @templateVar ADDITIONAL_ARG \code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} #' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} #' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and -#' win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical -#' Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001 +#' Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +#' +#' Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001 +#' igt_vpp <- hBayesDM_model( task_name = "igt", model_name = "vpp", + model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), - parameters = list("A" = c(0, 0.5, 1), - "alpha" = c(0, 0.5, 2), - "cons" = c(0, 1, 5), - "lambda" = c(0, 1, 10), - "epP" = c(-Inf, 0, Inf), - "epN" = c(-Inf, 0, Inf), - "K" = c(0, 0.5, 1), - "w" = c(0, 0.5, 1)), - preprocess_func = function(raw_data, general_info, payscale = 100) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize data arrays - Ydata <- array(-1, c(n_subj, t_max)) - RLmatrix <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - Ydata[i, 1:t] <- DT_subj$choice - RLmatrix[i, 1:t] <- DT_subj$gain - abs(DT_subj$loss) - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = Ydata, - outcome = RLmatrix / payscale - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "A" = c(NULL, 0.5, 1), + "alpha" = c(NULL, 0.5, 2), + "cons" = c(NULL, 1, 5), + "lambda" = c(NULL, 1, 10), + "epP" = c(-Inf, NULL, Inf), + "epN" = c(-Inf, NULL, Inf), + "K" = c(NULL, 0.5, 1), + "w" = c(NULL, 0.5, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = igt_preprocess_func) diff --git a/R/R/peer_ocu.R b/R/R/peer_ocu.R index f7f6a5a2..03f41942 100644 --- a/R/R/peer_ocu.R +++ b/R/R/peer_ocu.R @@ -1,11 +1,14 @@ #' @templateVar MODEL_FUNCTION peer_ocu -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} #' @templateVar TASK_NAME Peer Influence Task #' @templateVar TASK_CITE (Chung et al., 2015) #' @templateVar MODEL_NAME Other-Conferred Utility (OCU) Model +#' @templateVar MODEL_CITE #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice" #' @templateVar PARAMETERS "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 8 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"condition"}{0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).} @@ -15,73 +18,29 @@ #' @templateVar DETAILS_DATA_6 \item{"risky_Hpayoff"}{High payoff of the risky option.} #' @templateVar DETAILS_DATA_7 \item{"risky_Lpayoff"}{Low payoff of the risky option.} #' @templateVar DETAILS_DATA_8 \item{"choice"}{Which option was chosen? 0: safe, 1: risky.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social -#' signals of safety and risk confer utility and have asymmetric effects on observers' choices. -#' Nature Neuroscience, 18(6), 912-916. +#' Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916. +#' peer_ocu <- hBayesDM_model( task_name = "peer", model_name = "ocu", + model_type = "", data_columns = c("subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice"), - parameters = list("rho" = c(0, 1, 2), - "tau" = c(0, 1, Inf), - "ocu" = c(-Inf, 0, Inf)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - condition <- array( 0, c(n_subj, t_max)) - p_gamble <- array( 0, c(n_subj, t_max)) - safe_Hpayoff <- array( 0, c(n_subj, t_max)) - safe_Lpayoff <- array( 0, c(n_subj, t_max)) - risky_Hpayoff <- array( 0, c(n_subj, t_max)) - risky_Lpayoff <- array( 0, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - condition[i, 1:t] <- DT_subj$condition - p_gamble[i, 1:t] <- DT_subj$pgamble - safe_Hpayoff[i, 1:t] <- DT_subj$safehpayoff - safe_Lpayoff[i, 1:t] <- DT_subj$safelpayoff - risky_Hpayoff[i, 1:t] <- DT_subj$riskyhpayoff - risky_Lpayoff[i, 1:t] <- DT_subj$riskylpayoff - choice[i, 1:t] <- DT_subj$choice - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - condition = condition, - p_gamble = p_gamble, - safe_Hpayoff = safe_Hpayoff, - safe_Lpayoff = safe_Lpayoff, - risky_Hpayoff = risky_Hpayoff, - risky_Lpayoff = risky_Lpayoff, - choice = choice - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "rho" = c(NULL, 1, 2), + "tau" = c(NULL, 1, Inf), + "ocu" = c(-Inf, NULL, Inf) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = peer_preprocess_func) diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index ccd8a8f1..ff02d14c 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -1,5 +1,3 @@ -#' - bandit2arm_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" diff --git a/R/R/prl_ewa.R b/R/R/prl_ewa.R index c83e8e68..fb8b1e42 100644 --- a/R/R/prl_ewa.R +++ b/R/R/prl_ewa.R @@ -1,72 +1,46 @@ #' @templateVar MODEL_FUNCTION prl_ewa -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Experience-Weighted Attraction Model -#' @templateVar MODEL_CITE (Ouden et al., 2013, Neuron) +#' @templateVar MODEL_CITE (Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "ew_c", "ew_nc" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. -#' (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), -#' 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +#' prl_ewa <- hBayesDM_model( task_name = "prl", model_name = "ewa", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("phi" = c(0, 0.5, 1), - "rho" = c(0, 0.1, 1), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 2, - "ev_nc" = 2, - "ew_c" = 2, - "ew_nc" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "phi" = c(NULL, 0.5, 1), + "rho" = c(NULL, 0.1, 1), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 2, + "ev_nc" = 2, + "ew_c" = 2, + "ew_nc" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = prl_preprocess_func) diff --git a/R/R/prl_fictitious.R b/R/R/prl_fictitious.R index 39ed96bd..a4d0d966 100644 --- a/R/R/prl_fictitious.R +++ b/R/R/prl_fictitious.R @@ -1,73 +1,47 @@ #' @templateVar MODEL_FUNCTION prl_fictitious -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model -#' @templateVar MODEL_CITE (Glascher et al., 2009, Cerebral Cortex) +#' @templateVar MODEL_CITE (Glascher et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial -#' Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. -#' Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' prl_fictitious <- hBayesDM_model( task_name = "prl", model_name = "fictitious", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("eta" = c(0, 0.5, 1), - "alpha" = c(-Inf, 0, Inf), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 2, - "ev_nc" = 2, - "pe_c" = 2, - "pe_nc" = 2, - "dv" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "eta" = c(NULL, 0.5, 1), + "alpha" = c(-Inf, NULL, Inf), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 2, + "ev_nc" = 2, + "pe_c" = 2, + "pe_nc" = 2, + "dv" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = prl_preprocess_func) diff --git a/R/R/prl_fictitious_multipleB.R b/R/R/prl_fictitious_multipleB.R index d6f71822..6d5c59f9 100644 --- a/R/R/prl_fictitious_multipleB.R +++ b/R/R/prl_fictitious_multipleB.R @@ -1,85 +1,48 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_multipleB -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model -#' @templateVar MODEL_CITE (Glascher et al., 2009, Cerebral Cortex) +#' @templateVar MODEL_CITE (Glascher et al., 2009) #' @templateVar MODEL_TYPE Multiple-Block Hierarchical #' @templateVar DATA_COLUMNS "subjID", "block", "choice", "outcome" #' @templateVar PARAMETERS "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"block"}{A unique identifier for each of the multiple blocks within each subject.} #' @templateVar DETAILS_DATA_3 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_4 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial -#' Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. -#' Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' prl_fictitious_multipleB <- hBayesDM_model( task_name = "prl", model_name = "fictitious", model_type = "multipleB", data_columns = c("subjID", "block", "choice", "outcome"), - parameters = list("eta" = c(0, 0.5, 1), - "alpha" = c(-Inf, 0, Inf), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 3, - "ev_nc" = 3, - "pe_c" = 3, - "pe_nc" = 3, - "dv" = 3), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - b_subjs <- general_info$b_subjs - b_max <- general_info$b_max - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, b_max, t_max)) - outcome <- array( 0, c(n_subj, b_max, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - DT_subj <- raw_data[subjid == subj] - blocks_of_subj <- unique(DT_subj$block) - - for (b in 1:b_subjs[i]) { - curr_block <- blocks_of_subj[b] - DT_curr_block <- DT_subj[block == curr_block] - t <- t_subjs[i, b] - - choice[i, b, 1:t] <- DT_curr_block$choice - outcome[i, b, 1:t] <- sign(DT_curr_block$outcome) # use sign - } - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - B = b_max, - Bsubj = b_subjs, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "eta" = c(NULL, 0.5, 1), + "alpha" = c(-Inf, NULL, Inf), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 3, + "ev_nc" = 3, + "pe_c" = 3, + "pe_nc" = 3, + "dv" = 3 + ), + postpreds = c("y_pred"), + preprocess_func = prl_multipleB_preprocess_func) diff --git a/R/R/prl_fictitious_rp.R b/R/R/prl_fictitious_rp.R index a8ec6d6c..6ab1b766 100644 --- a/R/R/prl_fictitious_rp.R +++ b/R/R/prl_fictitious_rp.R @@ -1,77 +1,50 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_rp -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task -#' @templateVar MODEL_NAME Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), with separate learning rates for positive and negative prediction error (PE) +#' @templateVar TASK_CITE +#' @templateVar MODEL_NAME Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) +#' @templateVar MODEL_CITE (Glascher et al., 2009; Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial -#' Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. -#' Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' +#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 #' -#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. -#' (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), -#' 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 prl_fictitious_rp <- hBayesDM_model( task_name = "prl", model_name = "fictitious_rp", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("eta_pos" = c(0, 0.5, 1), - "eta_neg" = c(0, 0.5, 1), - "alpha" = c(-Inf, 0, Inf), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 2, - "ev_nc" = 2, - "pe_c" = 2, - "pe_nc" = 2, - "dv" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "eta_pos" = c(NULL, 0.5, 1), + "eta_neg" = c(NULL, 0.5, 1), + "alpha" = c(-Inf, NULL, Inf), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 2, + "ev_nc" = 2, + "pe_c" = 2, + "pe_nc" = 2, + "dv" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = prl_preprocess_func) diff --git a/R/R/prl_fictitious_rp_woa.R b/R/R/prl_fictitious_rp_woa.R index 496e7e44..fc039653 100644 --- a/R/R/prl_fictitious_rp_woa.R +++ b/R/R/prl_fictitious_rp_woa.R @@ -1,76 +1,49 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_rp_woa -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task -#' @templateVar MODEL_NAME Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) +#' @templateVar TASK_CITE +#' @templateVar MODEL_NAME Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) +#' @templateVar MODEL_CITE (Glascher et al., 2009; Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial -#' Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. -#' Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' +#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 #' -#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. -#' (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), -#' 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 prl_fictitious_rp_woa <- hBayesDM_model( task_name = "prl", model_name = "fictitious_rp_woa", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("eta_pos" = c(0, 0.5, 1), - "eta_neg" = c(0, 0.5, 1), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 2, - "ev_nc" = 2, - "pe_c" = 2, - "pe_nc" = 2, - "dv" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "eta_pos" = c(NULL, 0.5, 1), + "eta_neg" = c(NULL, 0.5, 1), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 2, + "ev_nc" = 2, + "pe_c" = 2, + "pe_nc" = 2, + "dv" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = prl_preprocess_func) diff --git a/R/R/prl_fictitious_woa.R b/R/R/prl_fictitious_woa.R index 4d1d9250..2aabbb82 100644 --- a/R/R/prl_fictitious_woa.R +++ b/R/R/prl_fictitious_woa.R @@ -1,71 +1,46 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_woa -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task -#' @templateVar MODEL_NAME Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), without alpha (indecision point) +#' @templateVar TASK_CITE +#' @templateVar MODEL_NAME Fictitious Update Model, without alpha (indecision point) +#' @templateVar MODEL_CITE (Glascher et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "eta" (learning rate), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial -#' Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. -#' Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +#' prl_fictitious_woa <- hBayesDM_model( task_name = "prl", model_name = "fictitious_woa", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("eta" = c(0, 0.5, 1), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 2, - "ev_nc" = 2, - "pe_c" = 2, - "pe_nc" = 2, - "dv" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "eta" = c(NULL, 0.5, 1), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 2, + "ev_nc" = 2, + "pe_c" = 2, + "pe_nc" = 2, + "dv" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = prl_preprocess_func) diff --git a/R/R/prl_rp.R b/R/R/prl_rp.R index 44e24341..9eec71b6 100644 --- a/R/R/prl_rp.R +++ b/R/R/prl_rp.R @@ -1,71 +1,45 @@ #' @templateVar MODEL_FUNCTION prl_rp -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Reward-Punishment Model -#' @templateVar MODEL_CITE (Ouden et al., 2013, Neuron) +#' @templateVar MODEL_CITE (Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. -#' (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), -#' 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +#' prl_rp <- hBayesDM_model( task_name = "prl", model_name = "rp", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("Apun" = c(0, 0.1, 1), - "Arew" = c(0, 0.1, 1), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 2, - "ev_nc" = 2, - "pe" = 2), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - choice[i, 1:t] <- DT_subj$choice - outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "Apun" = c(NULL, 0.1, 1), + "Arew" = c(NULL, 0.1, 1), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 2, + "ev_nc" = 2, + "pe" = 2 + ), + postpreds = c("y_pred"), + preprocess_func = prl_preprocess_func) diff --git a/R/R/prl_rp_multipleB.R b/R/R/prl_rp_multipleB.R index 07541231..22c2ac33 100644 --- a/R/R/prl_rp_multipleB.R +++ b/R/R/prl_rp_multipleB.R @@ -1,83 +1,46 @@ #' @templateVar MODEL_FUNCTION prl_rp_multipleB -#' @templateVar CONTRIBUTOR (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Reward-Punishment Model -#' @templateVar MODEL_CITE (Ouden et al., 2013, Neuron) +#' @templateVar MODEL_CITE (Ouden et al., 2013) #' @templateVar MODEL_TYPE Multiple-Block Hierarchical #' @templateVar DATA_COLUMNS "subjID", "block", "choice", "outcome" #' @templateVar PARAMETERS "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe" +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"block"}{A unique identifier for each of the multiple blocks within each subject.} #' @templateVar DETAILS_DATA_3 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} #' @templateVar DETAILS_DATA_4 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. -#' (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), -#' 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +#' Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +#' prl_rp_multipleB <- hBayesDM_model( task_name = "prl", model_name = "rp", model_type = "multipleB", data_columns = c("subjID", "block", "choice", "outcome"), - parameters = list("Apun" = c(0, 0.1, 1), - "Arew" = c(0, 0.1, 1), - "beta" = c(0, 1, 10)), - regressors = list("ev_c" = 3, - "ev_nc" = 3, - "pe" = 3), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - b_subjs <- general_info$b_subjs - b_max <- general_info$b_max - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - choice <- array(-1, c(n_subj, b_max, t_max)) - outcome <- array( 0, c(n_subj, b_max, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - DT_subj <- raw_data[subjid == subj] - blocks_of_subj <- unique(DT_subj$block) - - for (b in 1:b_subjs[i]) { - curr_block <- blocks_of_subj[b] - DT_curr_block <- DT_subj[block == curr_block] - t <- t_subjs[i, b] - - choice[i, b, 1:t] <- DT_curr_block$choice - outcome[i, b, 1:t] <- sign(DT_curr_block$outcome) # use sign - } - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - B = b_max, - Bsubj = b_subjs, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "Apun" = c(NULL, 0.1, 1), + "Arew" = c(NULL, 0.1, 1), + "beta" = c(NULL, 1, 10) + ), + regressors = list( + "ev_c" = 3, + "ev_nc" = 3, + "pe" = 3 + ), + postpreds = c("y_pred"), + preprocess_func = prl_multipleB_preprocess_func) diff --git a/R/R/pst_gainloss_Q.R b/R/R/pst_gainloss_Q.R index 57f76944..d58eb67e 100644 --- a/R/R/pst_gainloss_Q.R +++ b/R/R/pst_gainloss_Q.R @@ -1,74 +1,42 @@ #' @templateVar MODEL_FUNCTION pst_gainloss_Q -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} #' @templateVar TASK_NAME Probabilistic Selection Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Gain-Loss Q Learning Model -#' @templateVar MODEL_CITE (Frank et al., 2007, PNAS) +#' @templateVar MODEL_CITE (Frank et al., 2007) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "type", "choice", "reward" #' @templateVar PARAMETERS "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"type"}{Two-digit number indicating which pair of stimuli were presented for that trial, e.g. \code{12}, \code{34}, or \code{56}. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2.\cr Code for each stimulus type (1~6) is defined as below: \tabular{ccl}{Code \tab Stimulus \tab Probability to win \cr \code{1} \tab A \tab 80\% \cr \code{2} \tab B \tab 20\% \cr \code{3} \tab C \tab 70\% \cr \code{4} \tab D \tab 30\% \cr \code{5} \tab E \tab 60\% \cr \code{6} \tab F \tab 40\%} The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.} +#' @templateVar DETAILS_DATA_2 \item{"type"}{Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as below:\cr\cr ===== ======== ==================\cr Code Stimulus Probability to win\cr ===== ======== ==================\cr 1 A 80%\cr 2 B 20%\cr 3 C 70%\cr 4 D 30%\cr 5 E 60%\cr 6 F 40%\cr ===== ======== ==================\cr\cr The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.} #' @templateVar DETAILS_DATA_3 \item{"choice"}{Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).} #' @templateVar DETAILS_DATA_4 \item{"reward"}{Amount of reward earned as a result of the trial.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic -#' triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings -#' of the National Academy of Sciences, 104(41), 16311-16316. +#' Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316. +#' pst_gainloss_Q <- hBayesDM_model( task_name = "pst", model_name = "gainloss_Q", + model_type = "", data_columns = c("subjID", "type", "choice", "reward"), - parameters = list("alpha_pos" = c(0, 0.5, 1), - "alpha_neg" = c(0, 0.5, 1), - "beta" = c(0, 1, 10)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - option1 <- array(-1, c(n_subj, t_max)) - option2 <- array(-1, c(n_subj, t_max)) - choice <- array(-1, c(n_subj, t_max)) - reward <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - option1[i, 1:t] <- DT_subj$type %/% 10 - option2[i, 1:t] <- DT_subj$type %% 10 - choice[i, 1:t] <- DT_subj$choice - reward[i, 1:t] <- DT_subj$reward - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - option1 = option1, - option2 = option2, - choice = choice, - reward = reward - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "alpha_pos" = c(NULL, 0.5, 1), + "alpha_neg" = c(NULL, 0.5, 1), + "beta" = c(NULL, 1, 10) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = pst_preprocess_func) diff --git a/R/R/ra_noLA.R b/R/R/ra_noLA.R index 8fa5de1d..1f6505ad 100644 --- a/R/R/ra_noLA.R +++ b/R/R/ra_noLA.R @@ -1,81 +1,42 @@ #' @templateVar MODEL_FUNCTION ra_noLA +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Risk Aversion Task -#' @templateVar MODEL_NAME Prospect Theory (Sokol-Hessner et al., 2009, PNAS), without loss aversion (LA) parameter +#' @templateVar TASK_CITE +#' @templateVar MODEL_NAME Prospect Theory, without loss aversion (LA) parameter +#' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "gamble" #' @templateVar PARAMETERS "rho" (risk aversion), "tau" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 5 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).}} #' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} #' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} #' @templateVar DETAILS_DATA_5 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & -#' Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. -#' Proceedings of the National Academy of Sciences of the United States of America, 106(13), -#' 5035-5040. http://www.pnas.org/content/106/13/5035 -#' -#' @examples +#' Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 #' -#' \dontrun{ -#' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -#' } ra_noLA <- hBayesDM_model( task_name = "ra", model_name = "noLA", + model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), - parameters = list("rho" = c(0, 1, 2), - "tau" = c(0, 1, 30)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - gain <- array( 0, c(n_subj, t_max)) - loss <- array( 0, c(n_subj, t_max)) - cert <- array( 0, c(n_subj, t_max)) - gamble <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - gain[i, 1:t] <- DT_subj$gain - loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount - cert[i, 1:t] <- DT_subj$cert - gamble[i, 1:t] <- DT_subj$gamble - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - gain = gain, - loss = loss, - cert = cert, - gamble = gamble - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "rho" = c(NULL, 1, 2), + "tau" = c(NULL, 1, 30) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = ra_preprocess_func) diff --git a/R/R/ra_noRA.R b/R/R/ra_noRA.R index 0b4b7995..94b4a54e 100644 --- a/R/R/ra_noRA.R +++ b/R/R/ra_noRA.R @@ -1,81 +1,42 @@ #' @templateVar MODEL_FUNCTION ra_noRA +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Risk Aversion Task -#' @templateVar MODEL_NAME Prospect Theory (Sokol-Hessner et al., 2009, PNAS), without risk aversion (RA) parameter +#' @templateVar TASK_CITE +#' @templateVar MODEL_NAME Prospect Theory, without risk aversion (RA) parameter +#' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "gamble" #' @templateVar PARAMETERS "lambda" (loss aversion), "tau" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 5 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).}} #' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} #' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} #' @templateVar DETAILS_DATA_5 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & -#' Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. -#' Proceedings of the National Academy of Sciences of the United States of America, 106(13), -#' 5035-5040. http://www.pnas.org/content/106/13/5035 -#' -#' @examples +#' Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 #' -#' \dontrun{ -#' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -#' } ra_noRA <- hBayesDM_model( task_name = "ra", model_name = "noRA", + model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), - parameters = list("lambda" = c(0, 1, 5), - "tau" = c(0, 1, 30)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - gain <- array( 0, c(n_subj, t_max)) - loss <- array( 0, c(n_subj, t_max)) - cert <- array( 0, c(n_subj, t_max)) - gamble <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - gain[i, 1:t] <- DT_subj$gain - loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount - cert[i, 1:t] <- DT_subj$cert - gamble[i, 1:t] <- DT_subj$gamble - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - gain = gain, - loss = loss, - cert = cert, - gamble = gamble - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "lambda" = c(NULL, 1, 5), + "tau" = c(NULL, 1, 30) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = ra_preprocess_func) diff --git a/R/R/ra_prospect.R b/R/R/ra_prospect.R index 50175d98..58ffe947 100644 --- a/R/R/ra_prospect.R +++ b/R/R/ra_prospect.R @@ -1,83 +1,43 @@ #' @templateVar MODEL_FUNCTION ra_prospect +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Risk Aversion Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Prospect Theory -#' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009, PNAS) +#' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "gamble" #' @templateVar PARAMETERS "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 5 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).}} #' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} #' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} #' @templateVar DETAILS_DATA_5 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & -#' Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. -#' Proceedings of the National Academy of Sciences of the United States of America, 106(13), -#' 5035-5040. http://www.pnas.org/content/106/13/5035 -#' -#' @examples +#' Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 #' -#' \dontrun{ -#' # Paths to data published in Sokol-Hessner et al. (2009) -#' path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -#' path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -#' } ra_prospect <- hBayesDM_model( task_name = "ra", model_name = "prospect", + model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), - parameters = list("rho" = c(0, 1, 2), - "lambda" = c(0, 1, 5), - "tau" = c(0, 1, 30)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - gain <- array( 0, c(n_subj, t_max)) - loss <- array( 0, c(n_subj, t_max)) - cert <- array( 0, c(n_subj, t_max)) - gamble <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - gain[i, 1:t] <- DT_subj$gain - loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount - cert[i, 1:t] <- DT_subj$cert - gamble[i, 1:t] <- DT_subj$gamble - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - gain = gain, - loss = loss, - cert = cert, - gamble = gamble - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "rho" = c(NULL, 1, 2), + "lambda" = c(NULL, 1, 5), + "tau" = c(NULL, 1, 30) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = ra_preprocess_func) diff --git a/R/R/rdt_happiness.R b/R/R/rdt_happiness.R index dee65a7d..7f4500cc 100644 --- a/R/R/rdt_happiness.R +++ b/R/R/rdt_happiness.R @@ -1,11 +1,14 @@ #' @templateVar MODEL_FUNCTION rdt_happiness -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} #' @templateVar TASK_NAME Risky Decision Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Happiness Computational Model -#' @templateVar MODEL_CITE (Rutledge et al., 2014, PNAS) +#' @templateVar MODEL_CITE (Rutledge et al., 2014) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy" #' @templateVar PARAMETERS "w0" (baseline), "w1" (weight of certain rewards), "w2" (weight of expected values), "w3" (weight of reward prediction errors), "gam" (forgetting factor), "sig" (standard deviation of error) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 9 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} @@ -16,79 +19,32 @@ #' @templateVar DETAILS_DATA_7 \item{"outcome"}{Result of the trial.} #' @templateVar DETAILS_DATA_8 \item{"happy"}{Happiness score.} #' @templateVar DETAILS_DATA_9 \item{"RT_happy"}{Reaction time for answering the happiness score.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model -#' of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), -#' 12252-12257. +#' Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257. +#' rdt_happiness <- hBayesDM_model( task_name = "rdt", model_name = "happiness", + model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy"), - parameters = list("w0" = c(-Inf, 1, Inf), - "w1" = c(-Inf, 1, Inf), - "w2" = c(-Inf, 1, Inf), - "w3" = c(-Inf, 1, Inf), - "gam" = c(0, 0.5, 1), - "sig" = c(0, 1, Inf)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - gain <- array( 0, c(n_subj, t_max)) - loss <- array( 0, c(n_subj, t_max)) - cert <- array( 0, c(n_subj, t_max)) - type <- array(-1, c(n_subj, t_max)) - gamble <- array(-1, c(n_subj, t_max)) - outcome <- array( 0, c(n_subj, t_max)) - happy <- array( 0, c(n_subj, t_max)) - RT_happy <- array( 0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - gain[i, 1:t] <- DT_subj$gain - loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount - cert[i, 1:t] <- DT_subj$cert - type[i, 1:t] <- DT_subj$type - gamble[i, 1:t] <- DT_subj$gamble - outcome[i, 1:t] <- DT_subj$outcome - happy[i, 1:t] <- DT_subj$happy - RT_happy[i, 1:t] <- DT_subj$rthappy - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - gain = gain, - loss = loss, - cert = cert, - type = type, - gamble = gamble, - outcome = outcome, - happy = happy, - RT_happy = RT_happy - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "w0" = c(-Inf, 1, Inf), + "w1" = c(-Inf, 1, Inf), + "w2" = c(-Inf, 1, Inf), + "w3" = c(-Inf, 1, Inf), + "gam" = c(NULL, 0.5, 1), + "sig" = c(NULL, 1, Inf) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = rdt_preprocess_func) diff --git a/R/R/ts_par4.R b/R/R/ts_par4.R index 91337ea5..cf7ed1df 100644 --- a/R/R/ts_par4.R +++ b/R/R/ts_par4.R @@ -1,78 +1,48 @@ #' @templateVar MODEL_FUNCTION ts_par4 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} #' @templateVar TASK_NAME Two-Step Task #' @templateVar TASK_CITE (Daw et al., 2011) -#' @templateVar MODEL_NAME Hybrid Model (Daw et al., 2011; Wunderlich et al., 2012), with 4 parameters +#' @templateVar MODEL_NAME Hybrid Model, with 4 parameters +#' @templateVar MODEL_CITE (Daw et al., 2011; Wunderlich et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "level1_choice", "level2_choice", "reward" #' @templateVar PARAMETERS "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight) -#' @templateVar ADDITIONAL_ARG \code{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred_step1", "y_pred_step2" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} -#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument \code{trans_prob} to your preferred value.} +#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} #' @templateVar DETAILS_DATA_4 \item{"reward"}{Reward after Level 2 (0 or 1).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). -#' Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), -#' 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' +#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' +#' Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424. #' -#' Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over -#' model-free choice behavior. Neuron, 75(3), 418-424. ts_par4 <- hBayesDM_model( task_name = "ts", model_name = "par4", + model_type = "", data_columns = c("subjID", "level1_choice", "level2_choice", "reward"), - parameters = list("a" = c(0, 0.5, 1), - "beta" = c(0, 1, Inf), - "pi" = c(0, 1, 5), - "w" = c(0, 0.5, 1)), + parameters = list( + "a" = c(NULL, 0.5, 1), + "beta" = c(NULL, 1, Inf), + "pi" = c(NULL, 1, 5), + "w" = c(NULL, 0.5, 1) + ), + regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), - preprocess_func = function(raw_data, general_info, trans_prob = 0.7) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - level1_choice <- array(1, c(n_subj, t_max)) - level2_choice <- array(1, c(n_subj, t_max)) - reward <- array(0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - level1_choice[i, 1:t] <- DT_subj$level1choice - level2_choice[i, 1:t] <- DT_subj$level2choice - reward[i, 1:t] <- DT_subj$reward - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - level1_choice = level1_choice, - level2_choice = level2_choice, - reward = reward, - trans_prob = trans_prob - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + preprocess_func = ts_preprocess_func) diff --git a/R/R/ts_par6.R b/R/R/ts_par6.R index 331678d5..5bddb323 100644 --- a/R/R/ts_par6.R +++ b/R/R/ts_par6.R @@ -1,77 +1,48 @@ #' @templateVar MODEL_FUNCTION ts_par6 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} #' @templateVar TASK_NAME Two-Step Task #' @templateVar TASK_CITE (Daw et al., 2011) -#' @templateVar MODEL_NAME Hybrid Model (Daw et al., 2011, Neuron), with 6 parameters +#' @templateVar MODEL_NAME Hybrid Model, with 6 parameters +#' @templateVar MODEL_CITE (Daw et al., 2011) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "level1_choice", "level2_choice", "reward" #' @templateVar PARAMETERS "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight) -#' @templateVar ADDITIONAL_ARG \code{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred_step1", "y_pred_step2" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} -#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument \code{trans_prob} to your preferred value.} +#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} #' @templateVar DETAILS_DATA_4 \item{"reward"}{Reward after Level 2 (0 or 1).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). -#' Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), -#' 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' +#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' ts_par6 <- hBayesDM_model( task_name = "ts", model_name = "par6", + model_type = "", data_columns = c("subjID", "level1_choice", "level2_choice", "reward"), - parameters = list("a1" = c(0, 0.5, 1), - "beta1" = c(0, 1, Inf), - "a2" = c(0, 0.5, 1), - "beta2" = c(0, 1, Inf), - "pi" = c(0, 1, 5), - "w" = c(0, 0.5, 1)), + parameters = list( + "a1" = c(NULL, 0.5, 1), + "beta1" = c(NULL, 1, Inf), + "a2" = c(NULL, 0.5, 1), + "beta2" = c(NULL, 1, Inf), + "pi" = c(NULL, 1, 5), + "w" = c(NULL, 0.5, 1) + ), + regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), - preprocess_func = function(raw_data, general_info, trans_prob = 0.7) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - level1_choice <- array(1, c(n_subj, t_max)) - level2_choice <- array(1, c(n_subj, t_max)) - reward <- array(0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - level1_choice[i, 1:t] <- DT_subj$level1choice - level2_choice[i, 1:t] <- DT_subj$level2choice - reward[i, 1:t] <- DT_subj$reward - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - level1_choice = level1_choice, - level2_choice = level2_choice, - reward = reward, - trans_prob = trans_prob - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + preprocess_func = ts_preprocess_func) diff --git a/R/R/ts_par7.R b/R/R/ts_par7.R index 5a0a9c66..cdc2cea9 100644 --- a/R/R/ts_par7.R +++ b/R/R/ts_par7.R @@ -1,78 +1,49 @@ #' @templateVar MODEL_FUNCTION ts_par7 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} #' @templateVar TASK_NAME Two-Step Task #' @templateVar TASK_CITE (Daw et al., 2011) -#' @templateVar MODEL_NAME Hybrid Model (Daw et al., 2011, Neuron), with 7 parameters (original model) +#' @templateVar MODEL_NAME Hybrid Model, with 7 parameters (original model) +#' @templateVar MODEL_CITE (Daw et al., 2011) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "level1_choice", "level2_choice", "reward" #' @templateVar PARAMETERS "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace) -#' @templateVar ADDITIONAL_ARG \code{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred_step1", "y_pred_step2" #' @templateVar LENGTH_DATA_COLUMNS 4 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} -#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument \code{trans_prob} to your preferred value.} +#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} #' @templateVar DETAILS_DATA_4 \item{"reward"}{Reward after Level 2 (0 or 1).} +#' @templateVar LENGTH_ADDITIONAL_ARGS 1 +#' @templateVar ADDITIONAL_ARGS_1 \strong{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. #' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). -#' Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), -#' 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' +#' Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +#' ts_par7 <- hBayesDM_model( task_name = "ts", model_name = "par7", + model_type = "", data_columns = c("subjID", "level1_choice", "level2_choice", "reward"), - parameters = list("a1" = c(0, 0.5, 1), - "beta1" = c(0, 1, Inf), - "a2" = c(0, 0.5, 1), - "beta2" = c(0, 1, Inf), - "pi" = c(0, 1, 5), - "w" = c(0, 0.5, 1), - "lambda" = c(0, 0.5, 1)), + parameters = list( + "a1" = c(NULL, 0.5, 1), + "beta1" = c(NULL, 1, Inf), + "a2" = c(NULL, 0.5, 1), + "beta2" = c(NULL, 1, Inf), + "pi" = c(NULL, 1, 5), + "w" = c(NULL, 0.5, 1), + "lambda" = c(NULL, 0.5, 1) + ), + regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), - preprocess_func = function(raw_data, general_info, trans_prob = 0.7) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - level1_choice <- array(1, c(n_subj, t_max)) - level2_choice <- array(1, c(n_subj, t_max)) - reward <- array(0, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - level1_choice[i, 1:t] <- DT_subj$level1choice - level2_choice[i, 1:t] <- DT_subj$level2choice - reward[i, 1:t] <- DT_subj$reward - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - level1_choice = level1_choice, - level2_choice = level2_choice, - reward = reward, - trans_prob = trans_prob - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + preprocess_func = ts_preprocess_func) diff --git a/R/R/ug_bayes.R b/R/R/ug_bayes.R index eb356df3..7edc34a2 100644 --- a/R/R/ug_bayes.R +++ b/R/R/ug_bayes.R @@ -1,66 +1,41 @@ #' @templateVar MODEL_FUNCTION ug_bayes +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Norm-Training Ultimatum Game +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Ideal Observer Model -#' @templateVar MODEL_CITE (Xiang et al., 2013, J Neuro) +#' @templateVar MODEL_CITE (Xiang et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "offer", "accept" #' @templateVar PARAMETERS "alpha" (envy), "beta" (guilt), "tau" (inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"offer"}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} #' @templateVar DETAILS_DATA_3 \item{"accept"}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their -#' Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. -#' http://doi.org/10.1523/JNEUROSCI.1642-12.2013 +#' Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013 +#' ug_bayes <- hBayesDM_model( task_name = "ug", model_name = "bayes", + model_type = "", data_columns = c("subjID", "offer", "accept"), - parameters = list("alpha" = c(0, 1, 20), - "beta" = c(0, 0.5, 10), - "tau" = c(0, 1, 10)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - offer <- array( 0, c(n_subj, t_max)) - accept <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - offer[i, 1:t] <- DT_subj$offer - accept[i, 1:t] <- DT_subj$accept - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - offer = offer, - accept = accept - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "alpha" = c(NULL, 1, 20), + "beta" = c(NULL, 0.5, 10), + "tau" = c(NULL, 1, 10) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = ug_preprocess_func) diff --git a/R/R/ug_delta.R b/R/R/ug_delta.R index e285d543..ac127dff 100644 --- a/R/R/ug_delta.R +++ b/R/R/ug_delta.R @@ -1,67 +1,41 @@ #' @templateVar MODEL_FUNCTION ug_delta +#' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Norm-Training Ultimatum Game +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Rescorla-Wagner (Delta) Model -#' @templateVar MODEL_CITE (Gu et al., 2015, J Neuro) +#' @templateVar MODEL_CITE (Gu et al., 2015) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "offer", "accept" #' @templateVar PARAMETERS "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"offer"}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} #' @templateVar DETAILS_DATA_3 \item{"accept"}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' +#' @include preprocess_funcs.R +#' #' @references -#' Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet -#' Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm -#' Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), -#' 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015 +#' Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015 +#' ug_delta <- hBayesDM_model( task_name = "ug", model_name = "delta", + model_type = "", data_columns = c("subjID", "offer", "accept"), - parameters = list("alpha" = c(0, 1, 20), - "tau" = c(0, 1, 10), - "ep" = c(0, 0.5, 1)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs - t_max <- general_info$t_max - - # Initialize (model-specific) data arrays - offer <- array( 0, c(n_subj, t_max)) - accept <- array(-1, c(n_subj, t_max)) - - # Write from raw_data to the data arrays - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - - offer[i, 1:t] <- DT_subj$offer - accept[i, 1:t] <- DT_subj$accept - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - offer = offer, - accept = accept - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "alpha" = c(NULL, 1, 20), + "tau" = c(NULL, 1, 10), + "ep" = c(NULL, 0.5, 1) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = ug_preprocess_func) diff --git a/R/R/wcs_sql.R b/R/R/wcs_sql.R index 04ad768e..502283af 100644 --- a/R/R/wcs_sql.R +++ b/R/R/wcs_sql.R @@ -1,91 +1,41 @@ #' @templateVar MODEL_FUNCTION wcs_sql -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/dayeong-min/}{Dayeong Min} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/dayeong-min/}{Dayeong Min} #' @templateVar TASK_NAME Wisconsin Card Sorting Task +#' @templateVar TASK_CITE #' @templateVar MODEL_NAME Sequential Learning Model -#' @templateVar MODEL_CITE (Bishara et al., 2010, Journal of Mathematical Psychology) +#' @templateVar MODEL_CITE (Bishara et al., 2010) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" #' @templateVar PARAMETERS "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature) +#' @templateVar REGRESSORS +#' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 #' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} #' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.} #' @templateVar DETAILS_DATA_3 \item{"outcome"}{1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.} -#' +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' #' @template model-documentation #' #' @export #' @include hBayesDM_model.R -#' @importFrom utils read.table -#' +#' @include preprocess_funcs.R +#' #' @references -#' Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. -#' (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in -#' substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13. +#' Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13. +#' wcs_sql <- hBayesDM_model( task_name = "wcs", model_name = "sql", + model_type = "", data_columns = c("subjID", "choice", "outcome"), - parameters = list("r" = c(0, 0.1, 1), - "p" = c(0, 0.1, 1), - "d" = c(0, 1, 5)), - preprocess_func = function(raw_data, general_info) { - # Currently class(raw_data) == "data.table" - - # Use general_info of raw_data - subjs <- general_info$subjs - n_subj <- general_info$n_subj - t_subjs <- general_info$t_subjs -# t_max <- general_info$t_max - t_max <- 128 - - # Read predefined answer sheet - answersheet <- system.file("extdata", "wcs_answersheet.txt", package = "hBayesDM") - answer <- read.table(answersheet, header = TRUE) - - # Initialize data arrays - choice <- array( 0, c(n_subj, 4, t_max)) - outcome <- array(-1, c(n_subj, t_max)) - choice_match_att <- array( 0, c(n_subj, t_max, 1, 3)) # Info about chosen deck (per each trial) - deck_match_rule <- array( 0, c(t_max, 3, 4)) # Info about all 4 decks (per each trial) - - # Write: choice, outcome, choice_match_att - for (i in 1:n_subj) { - subj <- subjs[i] - t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] - DT_subj_choice <- DT_subj$choice - DT_subj_outcome <- DT_subj$outcome - - for (tr in 1:t) { - ch <- DT_subj_choice[tr] - ou <- DT_subj_outcome[tr] - choice[i, ch, tr] <- 1 - outcome[i, tr] <- ou - choice_match_att[i, tr, 1, ] <- answer[, tr] == ch - } - } - - # Write: deck_match_rule - for (tr in 1:t_max) { - for (ru in 1:3) { - deck_match_rule[tr, ru, answer[ru, tr]] <- 1 - } - } - - # Wrap into a list for Stan - data_list <- list( - N = n_subj, - T = t_max, - Tsubj = t_subjs, - choice = choice, - outcome = outcome, - choice_match_att = choice_match_att, - deck_match_rule = deck_match_rule - ) - - # Returned data_list will directly be passed to Stan - return(data_list) - } -) + parameters = list( + "r" = c(NULL, 0.1, 1), + "p" = c(NULL, 0.1, 1), + "d" = c(NULL, 1, 5) + ), + regressors = NULL, + postpreds = c("y_pred"), + preprocess_func = wcs_preprocess_func) From 69fae56a4743bed5a7048d7efac66126b726501f Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 02:26:49 +0900 Subject: [PATCH 043/163] Fix model information --- commons/generate-r-codes.py | 19 ++++++++++--------- commons/models/bart_par4.json | 10 +++++----- commons/models/pst_gainloss_Q.json | 2 +- commons/models/ra_noLA.json | 2 +- commons/models/ra_noRA.json | 2 +- commons/models/ra_prospect.json | 2 +- commons/templates/R_CODE_TEMPLATE.txt | 5 ++--- commons/templates/R_DOCS_TEMPLATE.txt | 2 ++ 8 files changed, 23 insertions(+), 21 deletions(-) diff --git a/commons/generate-r-codes.py b/commons/generate-r-codes.py index 53cf8dbb..a699a712 100644 --- a/commons/generate-r-codes.py +++ b/commons/generate-r-codes.py @@ -49,8 +49,6 @@ def parse_cite_string(cite): 'year': year, 'shortcite': shortcite, 'barecite': barecite, - 'textcite': textcite, - 'parencite': parencite, 'fullcite': cite } @@ -85,7 +83,7 @@ def generate_docstring(info): # Notes if len(info.get('notes', [])) > 0: - notes = '@description\n#\' \\strong{Notes:}\n#\' ' + \ + notes = '@note\n#\' \\strong{Notes:}\n#\' ' + \ '\n#\' '.join(info['notes']) notes = '\n#\' ' + notes + '\n#\'' else: @@ -93,7 +91,8 @@ def generate_docstring(info): # Contributors contributors = ', '.join([ - r'\href{%s}{%s} <%s>' % (c['link'], c['name'], c['email']) + r'\href{%s}{%s} <\email{%s}>' + % (c['link'], c['name'], c['email'].replace('@', '@@')) for c in info.get('contributors', []) ]) @@ -103,14 +102,14 @@ def generate_docstring(info): ]) data_columns_len = len(info['data_columns']) data_columns_details = '\n#\' '.join([ - r'@templateVar DETAILS_DATA_%d \item{"%s"}{%s}' + r'@templateVar DETAILS_DATA_%d \item{%s}{%s}' % (i + 1, k, v.replace('\n', '\\cr')) for i, (k, v) in enumerate(info['data_columns'].items()) ]) # Parameters parameters = ', '.join([ - '"%s" (%s)' % (k, v['desc']) + '\\code{%s} (%s)' % (k, v['desc']) for k, v in info['parameters'].items() ]) @@ -129,7 +128,7 @@ def generate_docstring(info): additional_args_len = len(additional_args) if additional_args_len > 0: additional_args_details = '\n#\' '.join([ - r'@templateVar ADDITIONAL_ARGS_%d \strong{%s}: %s' + r'@templateVar ADDITIONAL_ARGS_%d \item{%s}{%s}' % (i + 1, v['code'], v['desc']) for i, v in enumerate(additional_args) ]) @@ -144,8 +143,10 @@ def generate_docstring(info): docs = docs_template % dict( model_function=model_function, task_name=info['task_name']['desc'], + task_code=info['task_name']['code'], task_parencite=task_parencite, model_name=info['model_name']['desc'], + model_code=info['model_name']['code'], model_parencite=model_parencite, model_type=info['model_type']['desc'], notes=notes, @@ -221,8 +222,8 @@ def generate_code(info): code = code_template % dict( model_function=model_function, - task_name=info['task_name']['code'], - model_name=info['model_name']['code'], + task_code=info['task_name']['code'], + model_code=info['model_name']['code'], model_type=info['model_type']['code'], data_columns=data_columns, parameters=parameters, diff --git a/commons/models/bart_par4.json b/commons/models/bart_par4.json index a0d0db39..844d82a2 100644 --- a/commons/models/bart_par4.json +++ b/commons/models/bart_par4.json @@ -2,14 +2,14 @@ "task_name": { "code": "bart", "desc": "Balloon Analogue Risk Task", - "cite": [ - "van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105." - ] + "cite": [] }, "model_name": { "code": "par4", - "desc": "Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters", - "cite": [] + "desc": "Re-parameterized version of BART model with 4 parameters", + "cite": [ + "van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105." + ] }, "model_type": { "code": "", diff --git a/commons/models/pst_gainloss_Q.json b/commons/models/pst_gainloss_Q.json index f9fcfa65..f2fec053 100644 --- a/commons/models/pst_gainloss_Q.json +++ b/commons/models/pst_gainloss_Q.json @@ -25,7 +25,7 @@ ], "data_columns": { "subjID": "A unique identifier for each subject in the data-set.", - "type": "Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as below:\n\n ===== ======== ==================\n Code Stimulus Probability to win\n ===== ======== ==================\n 1 A 80%\n 2 B 20%\n 3 C 70%\n 4 D 30%\n 5 E 60%\n 6 F 40%\n ===== ======== ==================\n\n The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.", + "type": "Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\\% (type 1), 20\\% (type 2), 70\\% (type 3), 30\\% (type 4), 60\\% (type 5), 40\\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.", "choice": "Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).", "reward": "Amount of reward earned as a result of the trial." }, diff --git a/commons/models/ra_noLA.json b/commons/models/ra_noLA.json index 1c6ad3c5..ea8ec291 100644 --- a/commons/models/ra_noLA.json +++ b/commons/models/ra_noLA.json @@ -19,7 +19,7 @@ "contributors": [], "data_columns": { "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).}", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", "gamble": "If gamble was taken, gamble == 1; else gamble == 0." diff --git a/commons/models/ra_noRA.json b/commons/models/ra_noRA.json index 7dd2f329..0d0ad7c0 100644 --- a/commons/models/ra_noRA.json +++ b/commons/models/ra_noRA.json @@ -19,7 +19,7 @@ "contributors": [], "data_columns": { "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).}", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", "gamble": "If gamble was taken, gamble == 1; else gamble == 0." diff --git a/commons/models/ra_prospect.json b/commons/models/ra_prospect.json index 471c91da..86f43e01 100644 --- a/commons/models/ra_prospect.json +++ b/commons/models/ra_prospect.json @@ -19,7 +19,7 @@ "contributors": [], "data_columns": { "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).}", + "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", "gamble": "If gamble was taken, gamble == 1; else gamble == 0." diff --git a/commons/templates/R_CODE_TEMPLATE.txt b/commons/templates/R_CODE_TEMPLATE.txt index 0de91d7a..d8a3ba0d 100644 --- a/commons/templates/R_CODE_TEMPLATE.txt +++ b/commons/templates/R_CODE_TEMPLATE.txt @@ -1,10 +1,9 @@ %(model_function)s <- hBayesDM_model( - task_name = "%(task_name)s", - model_name = "%(model_name)s", + task_name = "%(task_code)s", + model_name = "%(model_code)s", model_type = "%(model_type)s", data_columns = c(%(data_columns)s), parameters = %(parameters)s, regressors = %(regressors)s, postpreds = %(postpreds)s, preprocess_func = %(preprocess_func)s) - diff --git a/commons/templates/R_DOCS_TEMPLATE.txt b/commons/templates/R_DOCS_TEMPLATE.txt index 078430bb..6fd41d53 100644 --- a/commons/templates/R_DOCS_TEMPLATE.txt +++ b/commons/templates/R_DOCS_TEMPLATE.txt @@ -1,8 +1,10 @@ #' @templateVar MODEL_FUNCTION %(model_function)s #' @templateVar CONTRIBUTOR %(contributor)s #' @templateVar TASK_NAME %(task_name)s +#' @templateVar TASK_CODE %(task_code)s #' @templateVar TASK_CITE %(task_parencite)s #' @templateVar MODEL_NAME %(model_name)s +#' @templateVar MODEL_CODE %(model_code)s #' @templateVar MODEL_CITE %(model_parencite)s #' @templateVar MODEL_TYPE %(model_type)s #' @templateVar DATA_COLUMNS %(data_columns)s From b0c344f9a3c61ef7e56ba6d2023d27f51cfa55fd Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 02:27:26 +0900 Subject: [PATCH 044/163] Re-add README --- R/README.Rmd | 96 ++++++++++++++++++++++++++++++++++++++++++++++++ R/README.md | 102 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 198 insertions(+) create mode 100644 R/README.Rmd create mode 100644 R/README.md diff --git a/R/README.Rmd b/R/README.Rmd new file mode 100644 index 00000000..2d71b920 --- /dev/null +++ b/R/README.Rmd @@ -0,0 +1,96 @@ +--- +output: github_document +--- + +# hBayesDM + +[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) +[![Build Status](https://travis-ci.org/CCS-Lab/hBayesDM.svg?branch=master)](https://travis-ci.org/CCS-Lab/hBayesDM) +[![CRAN Latest Release](https://www.r-pkg.org/badges/version-last-release/hBayesDM)](https://cran.r-project.org/package=hBayesDM) +[![Downloads](https://cranlogs.r-pkg.org/badges/grand-total/hBayesDM)](https://cran.r-project.org/web/packages/hBayesDM/index.html) +[![DOI](https://zenodo.org/badge/doi/10.1162/CPSY_a_00002.svg)](https://doi.org/10.1162/CPSY_a_00002) + +**hBayesDM** (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses [Stan](http://mc-stan.org/) for Bayesian inference. + +## Quick Links + + - **Tutorial**: (R) + - **Mailing list**: + + - **Bug reports**: + - **Contributing**: See the + [Wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of this repository. + + +## Getting Started + +### Prerequisite + +To install hBayesDM for R, **[RStan][rstan] needs to be properly installed before you proceed**. +For detailed instructions on having RStan ready prior to installing hBayesDM, please go to this link: +https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started + +[rstan]: https://github.com/stan-dev/rstan + +### Installation + +The lastest **stable** version of hBayesDM can be installed from CRAN by running the following command in R: + +```r +install.packages("hBayesDM") # Install hBayesDM from CRAN +``` + +or you can also install from GitHub with: + +```r +# `devtools` is required to install hBayesDM from GitHub +if (!require(devtools)) install.packages("devtools") + +devtools::install_github("CCS-Lab/hBayesDM/R") +``` + +If you want to use the lastest *development* version of hBayesDM, run the following in R: + +```r +# `devtools` is required to install hBayesDM from GitHub +if (!require(devtools)) install.packages("devtools") + +devtools::install_github("CCS-Lab/hBayesDM/R@develop") +``` + +### Building at once + +By default, you will have to wait for compilation when you run each model for the first time. +If you plan on runnning several different models and want to pre-build all models during installation time, +set an environment variable `BUILD_ALL` to `true`, like the following. +We highly recommend you only do so when you have multiple cores available, +since building all models at once takes quite a long time to complete. + +```r +Sys.setenv(BUILD_ALL = "true") # Build *all* models at installation time +Sys.setenv(MAKEFLAGS = "-j 4") # Use 4 cores for build (or any other number you want) + +install.packages("hBayesDM") # Install from CRAN +# or +devtools::install_github("CCS-Lab/hBayesDM/R") # Install from GitHub +``` + +## Citation + +If you used hBayesDM or some of its codes for your research, please cite [this +paper][paper]: + +```bibtex +@article{hBayesDM, + title = {Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the {hBayesDM} Package}, + author = {Ahn, Woo-Young and Haines, Nathaniel and Zhang, Lei}, + journal = {Computational Psychiatry}, + year = {2017}, + volume = {1}, + pages = {24--57}, + publisher = {MIT Press}, + url = {doi:10.1162/CPSY_a_00002}, +} +``` + +[paper]: https://www.mitpressjournals.org/doi/full/10.1162/CPSY_a_00002 diff --git a/R/README.md b/R/README.md new file mode 100644 index 00000000..203a5bfd --- /dev/null +++ b/R/README.md @@ -0,0 +1,102 @@ + +# hBayesDM + +[![Project Status: Active – The project has reached a stable, usable +state and is being actively +developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) +[![Build +Status](https://travis-ci.org/CCS-Lab/hBayesDM.svg?branch=master)](https://travis-ci.org/CCS-Lab/hBayesDM) +[![CRAN Latest +Release](https://www.r-pkg.org/badges/version-last-release/hBayesDM)](https://cran.r-project.org/package=hBayesDM) +[![Downloads](https://cranlogs.r-pkg.org/badges/grand-total/hBayesDM)](https://cran.r-project.org/web/packages/hBayesDM/index.html) +[![DOI](https://zenodo.org/badge/doi/10.1162/CPSY_a_00002.svg)](https://doi.org/10.1162/CPSY_a_00002) + +**hBayesDM** (hierarchical Bayesian modeling of Decision-Making tasks) +is a user-friendly package that offers hierarchical Bayesian analysis of +various computational models on an array of decision-making tasks. +hBayesDM uses [Stan](http://mc-stan.org/) for Bayesian inference. + +## Quick Links + + - **Tutorial**: (R) + - **Mailing list**: + + - **Bug reports**: + - **Contributing**: See the + [Wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of this repository. + +## Getting Started + +### Prerequisite + +To install hBayesDM for R, **[RStan](https://github.com/stan-dev/rstan) +needs to be properly installed before you proceed**. For detailed +instructions on having RStan ready prior to installing hBayesDM, please +go to this link: + + +### Installation + +The lastest **stable** version of hBayesDM can be installed from CRAN by +running the following command in R: + +``` r +install.packages("hBayesDM") # Install hBayesDM from CRAN +``` + +or you can also install from GitHub with: + +``` r +# `devtools` is required to install hBayesDM from GitHub +if (!require(devtools)) install.packages("devtools") + +devtools::install_github("CCS-Lab/hBayesDM/R") +``` + +If you want to use the lastest *development* version of hBayesDM, run +the following in R: + +``` r +# `devtools` is required to install hBayesDM from GitHub +if (!require(devtools)) install.packages("devtools") + +devtools::install_github("CCS-Lab/hBayesDM/R@develop") +``` + +### Building at once + +By default, you will have to wait for compilation when you run each +model for the first time. If you plan on runnning several different +models and want to pre-build all models during installation time, set an +environment variable `BUILD_ALL` to `true`, like the following. We +highly recommend you only do so when you have multiple cores available, +since building all models at once takes quite a long time to +complete. + +``` r +Sys.setenv(BUILD_ALL = "true") # Build *all* models at installation time +Sys.setenv(MAKEFLAGS = "-j 4") # Use 4 cores for build (or any other number you want) + +install.packages("hBayesDM") # Install from CRAN +# or +devtools::install_github("CCS-Lab/hBayesDM/R") # Install from GitHub +``` + +## Citation + +If you used hBayesDM or some of its codes for your research, please cite +[this +paper](https://www.mitpressjournals.org/doi/full/10.1162/CPSY_a_00002): + +``` bibtex +@article{hBayesDM, + title = {Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the {hBayesDM} Package}, + author = {Ahn, Woo-Young and Haines, Nathaniel and Zhang, Lei}, + journal = {Computational Psychiatry}, + year = {2017}, + volume = {1}, + pages = {24--57}, + publisher = {MIT Press}, + url = {doi:10.1162/CPSY_a_00002}, +} +``` From 44dff0124952c9c215c03385a6faff3e3dc4964b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 02:29:31 +0900 Subject: [PATCH 045/163] Update R codes with generated ones --- R/DESCRIPTION | 5 +- R/NAMESPACE | 2 - R/R/Untitled.ipynb | 430 -------------------------------- R/R/bandit2arm_delta.R | 11 +- R/R/bandit4arm2_kalman_filter.R | 13 +- R/R/bandit4arm_2par_lapse.R | 13 +- R/R/bandit4arm_4par.R | 13 +- R/R/bandit4arm_lapse.R | 13 +- R/R/bandit4arm_lapse_decay.R | 13 +- R/R/bandit4arm_singleA_lapse.R | 13 +- R/R/bart_par4.R | 19 +- R/R/choiceRT_ddm.R | 15 +- R/R/choiceRT_ddm_single.R | 15 +- R/R/cra_exp.R | 19 +- R/R/cra_linear.R | 19 +- R/R/dbdm_prob_weight.R | 23 +- R/R/dd_cs.R | 17 +- R/R/dd_cs_single.R | 17 +- R/R/dd_exp.R | 17 +- R/R/dd_hyperbolic.R | 17 +- R/R/dd_hyperbolic_single.R | 17 +- R/R/gng_m1.R | 13 +- R/R/gng_m2.R | 13 +- R/R/gng_m3.R | 13 +- R/R/gng_m4.R | 13 +- R/R/hBayesDM.R | 5 +- R/R/hBayesDM_model.R | 1 - R/R/igt_orl.R | 17 +- R/R/igt_pvl_decay.R | 15 +- R/R/igt_pvl_delta.R | 15 +- R/R/igt_vpp.R | 15 +- R/R/peer_ocu.R | 23 +- R/R/preprocess_funcs.R | 3 + R/R/prl_ewa.R | 13 +- R/R/prl_fictitious.R | 13 +- R/R/prl_fictitious_multipleB.R | 15 +- R/R/prl_fictitious_rp.R | 13 +- R/R/prl_fictitious_rp_woa.R | 13 +- R/R/prl_fictitious_woa.R | 13 +- R/R/prl_rp.R | 13 +- R/R/prl_rp_multipleB.R | 15 +- R/R/pst_gainloss_Q.R | 15 +- R/R/ra_noLA.R | 15 +- R/R/ra_noRA.R | 15 +- R/R/ra_prospect.R | 15 +- R/R/rdt_happiness.R | 25 +- R/R/ts_par4.R | 17 +- R/R/ts_par6.R | 17 +- R/R/ts_par7.R | 17 +- R/R/ug_bayes.R | 11 +- R/R/ug_delta.R | 11 +- R/R/wcs_sql.R | 13 +- R/R/zzz.R | 3 +- 53 files changed, 384 insertions(+), 765 deletions(-) delete mode 100644 R/R/Untitled.ipynb diff --git a/R/DESCRIPTION b/R/DESCRIPTION index 3933f9fb..4a5607e1 100644 --- a/R/DESCRIPTION +++ b/R/DESCRIPTION @@ -1,6 +1,6 @@ Package: hBayesDM Title: Hierarchical Bayesian Modeling of Decision-Making Tasks -Version: 0.7.2 +Version: 0.7.2.9000 Date: 2019-02-11 Author: Woo-Young Ahn [aut, cre], @@ -39,7 +39,7 @@ LinkingTo: RcppEigen (>= 0.3.3.3.0), rstan (>= 2.18.1), StanHeaders (>= 2.18.0) -URL: https://rpubs.com/CCSL/hBayesDM +URL: https://github.com/CCS-Lab/hBayesDM BugReports: https://github.com/CCS-Lab/hBayesDM/issues License: GPL-3 LazyData: true @@ -49,6 +49,7 @@ RoxygenNote: 6.1.1 SystemRequirements: GNU make Collate: 'HDIofMCMC.R' + 'preprocess_funcs.R' 'stanmodels.R' 'settings.R' 'hBayesDM_model.R' diff --git a/R/NAMESPACE b/R/NAMESPACE index 9e39cf9d..983d7a4f 100644 --- a/R/NAMESPACE +++ b/R/NAMESPACE @@ -28,7 +28,6 @@ export(gng_m1) export(gng_m2) export(gng_m3) export(gng_m4) -export(hBayesDM_model) export(igt_orl) export(igt_pvl_decay) export(igt_pvl_delta) @@ -87,7 +86,6 @@ importFrom(rstan,stan_plot) importFrom(rstan,summary) importFrom(rstan,traceplot) importFrom(rstan,vb) -importFrom(stats,aggregate) importFrom(stats,complete.cases) importFrom(stats,density) importFrom(stats,median) diff --git a/R/R/Untitled.ipynb b/R/R/Untitled.ipynb deleted file mode 100644 index 1b666442..00000000 --- a/R/R/Untitled.ipynb +++ /dev/null @@ -1,430 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 73, - "metadata": {}, - "outputs": [], - "source": [ - "import re\n", - "\n", - "regex = r\"^(?P\\w+) <- hBayesDM_model\\(.+?preprocess_func = (?Pfunction.+?})\\n\\)\"" - ] - }, - { - "cell_type": "code", - "execution_count": 74, - "metadata": {}, - "outputs": [], - "source": [ - "from pathlib import Path\n", - "\n", - "path_cwd = Path.cwd()" - ] - }, - { - "cell_type": "code", - "execution_count": 75, - "metadata": {}, - "outputs": [], - "source": [ - "fns = sorted(path_cwd.glob('*.R'))" - ] - }, - { - "cell_type": "code", - "execution_count": 76, - "metadata": {}, - "outputs": [], - "source": [ - "matches = {}\n", - "for fn in fns:\n", - " with open(fn, 'r') as f:\n", - " codes = f.readlines()\n", - "\n", - " match = list(re.finditer(regex, ''.join(codes), re.MULTILINE | re.DOTALL))\n", - " if len(match) > 0:\n", - " matches[fn.name] = {\n", - " 'model': match[0].group('model'),\n", - " 'func': match[0].group('func').replace('\\n ', '\\n')\n", - " }" - ] - }, - { - "cell_type": "code", - "execution_count": 79, - "metadata": {}, - "outputs": [], - "source": [ - "tasks = sorted(set([k.split('.')[0].split('_')[0] for k in matches.keys()]))" - ] - }, - { - "cell_type": "code", - "execution_count": 84, - "metadata": {}, - "outputs": [], - "source": [ - "funcs_task = {}\n", - "for k, v in matches.items():\n", - " task = k.split('.')[0].split('_')[0]\n", - " code = '{task}_preprocess_func <- {func}'\\\n", - " .format(model=v['model'], task=task, func=v['func'])\n", - " if task not in funcs_task:\n", - " funcs_task[task] = {v['model']: code}\n", - " elif code not in funcs_task[task].values():\n", - " funcs_task[task][v['model']] = code" - ] - }, - { - "cell_type": "code", - "execution_count": 88, - "metadata": {}, - "outputs": [], - "source": [ - "funcs = {t: [f'# From {k}\\n' + v for k, v in kv.items()] for t, kv in funcs_task.items()}" - ] - }, - { - "cell_type": "code", - "execution_count": 92, - "metadata": {}, - "outputs": [], - "source": [ - "with open('preprocess_funcs.R', 'w') as f:\n", - " for t, fs in funcs.items():\n", - " f.write(f'#### {t}\\n\\n')\n", - " for func in fs:\n", - " f.writelines(func)\n", - " f.write('\\n\\n')\n", - " f.write('\\n\\n\\n')" - ] - }, - { - "cell_type": "code", - "execution_count": 82, - "metadata": {}, - "outputs": [], - "source": [ - "import difflib" - ] - }, - { - "cell_type": "code", - "execution_count": 83, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "choiceRT\n", - "*** \n", - "\n", - "--- \n", - "\n", - "***************\n", - "\n", - "*** 1,40 ****\n", - "\n", - " choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) {\n", - "! # Use raw_data as a data.frame\n", - "! raw_data <- as.data.frame(raw_data)\n", - " \n", - "! # Use general_info of raw_data\n", - "! subjs <- general_info$subjs\n", - "! n_subj <- general_info$n_subj\n", - "! \n", - "! # Number of upper and lower boundary responses\n", - "! Nu <- with(raw_data, aggregate(choice == 2, by = list(y = subjid), FUN = sum)[[\"x\"]])\n", - "! Nl <- with(raw_data, aggregate(choice == 1, by = list(y = subjid), FUN = sum)[[\"x\"]])\n", - "! \n", - "! # Reaction times for upper and lower boundary responses\n", - "! RTu <- array(-1, c(n_subj, max(Nu)))\n", - "! RTl <- array(-1, c(n_subj, max(Nl)))\n", - "! for (i in 1:n_subj) {\n", - "! subj <- subjs[i]\n", - "! subj_data <- subset(raw_data, raw_data$subjid == subj)\n", - "! \n", - "! RTu[i, 1:Nu[i]] <- subj_data$rt[subj_data$choice == 2] # (Nu/Nl[i]+1):Nu/Nl_max will be padded with 0's\n", - "! RTl[i, 1:Nl[i]] <- subj_data$rt[subj_data$choice == 1] # 0 padding is skipped in likelihood calculation\n", - "! }\n", - "! \n", - "! # Minimum reaction time\n", - "! minRT <- with(raw_data, aggregate(rt, by = list(y = subjid), FUN = min)[[\"x\"]])\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - "! N = n_subj, # Number of subjects\n", - "! Nu_max = max(Nu), # Max (across subjects) number of upper boundary responses\n", - "! Nl_max = max(Nl), # Max (across subjects) number of lower boundary responses\n", - "! Nu = Nu, # Number of upper boundary responses for each subject\n", - "! Nl = Nl, # Number of lower boundary responses for each subject\n", - "! RTu = RTu, # Upper boundary response times\n", - "! RTl = RTl, # Lower boundary response times\n", - "! minRT = minRT, # Minimum RT for each subject\n", - "! RTbound = RTbound # Lower bound of RT across all subjects (e.g., 0.1 second)\n", - " )\n", - " \n", - " # Returned data_list will directly be passed to Stan\n", - "--- 1,18 ----\n", - "\n", - " choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) {\n", - "! # Currently class(raw_data) == \"data.table\"\n", - " \n", - "! # Data.tables for upper and lower boundary responses\n", - "! DT_upper <- raw_data[choice == 2]\n", - "! DT_lower <- raw_data[choice == 1]\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - "! Nu = nrow(DT_upper), # Number of upper boundary responses\n", - "! Nl = nrow(DT_lower), # Number of lower boundary responses\n", - "! RTu = DT_upper$rt, # Upper boundary response times\n", - "! RTl = DT_lower$rt, # Lower boundary response times\n", - "! minRT = min(raw_data$rt), # Minimum RT\n", - "! RTbound = RTbound # Lower bound of RT (e.g., 0.1 second)\n", - " )\n", - " \n", - " # Returned data_list will directly be passed to Stan\n", - "\n", - "\n", - "\n", - "################\n", - "\n", - "\n", - "\n", - "dd\n", - "*** \n", - "\n", - "--- \n", - "\n", - "***************\n", - "\n", - "*** 2,36 ****\n", - "\n", - " # Currently class(raw_data) == \"data.table\"\n", - " \n", - " # Use general_info of raw_data\n", - "- subjs <- general_info$subjs\n", - "- n_subj <- general_info$n_subj\n", - " t_subjs <- general_info$t_subjs\n", - "- t_max <- general_info$t_max\n", - " \n", - "! # Initialize (model-specific) data arrays\n", - "! delay_later <- array( 0, c(n_subj, t_max))\n", - "! amount_later <- array( 0, c(n_subj, t_max))\n", - "! delay_sooner <- array( 0, c(n_subj, t_max))\n", - "! amount_sooner <- array( 0, c(n_subj, t_max))\n", - "! choice <- array(-1, c(n_subj, t_max))\n", - "! \n", - "! # Write from raw_data to the data arrays\n", - "! for (i in 1:n_subj) {\n", - "! subj <- subjs[i]\n", - "! t <- t_subjs[i]\n", - "! DT_subj <- raw_data[subjid == subj]\n", - "! \n", - "! delay_later[i, 1:t] <- DT_subj$delaylater\n", - "! amount_later[i, 1:t] <- DT_subj$amountlater\n", - "! delay_sooner[i, 1:t] <- DT_subj$delaysooner\n", - "! amount_sooner[i, 1:t] <- DT_subj$amountsooner\n", - "! choice[i, 1:t] <- DT_subj$choice\n", - "! }\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - "- N = n_subj,\n", - "- T = t_max,\n", - " Tsubj = t_subjs,\n", - " delay_later = delay_later,\n", - " amount_later = amount_later,\n", - "--- 2,18 ----\n", - "\n", - " # Currently class(raw_data) == \"data.table\"\n", - " \n", - " # Use general_info of raw_data\n", - " t_subjs <- general_info$t_subjs\n", - " \n", - "! # Extract from raw_data\n", - "! delay_later <- raw_data$delaylater\n", - "! amount_later <- raw_data$amountlater\n", - "! delay_sooner <- raw_data$delaysooner\n", - "! amount_sooner <- raw_data$amountsooner\n", - "! choice <- raw_data$choice\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - " Tsubj = t_subjs,\n", - " delay_later = delay_later,\n", - " amount_later = amount_later,\n", - "\n", - "\n", - "\n", - "################\n", - "\n", - "\n", - "\n", - "igt\n", - "*** \n", - "\n", - "--- \n", - "\n", - "***************\n", - "\n", - "*** 23,34 ****\n", - "\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - "! N = n_subj,\n", - "! T = t_max,\n", - "! Tsubj = t_subjs,\n", - "! choice = Ydata,\n", - "! outcome = RLmatrix / payscale,\n", - "! sign_out = sign(RLmatrix)\n", - " )\n", - " \n", - " # Returned data_list will directly be passed to Stan\n", - "--- 23,33 ----\n", - "\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - "! N = n_subj,\n", - "! T = t_max,\n", - "! Tsubj = t_subjs,\n", - "! choice = Ydata,\n", - "! outcome = RLmatrix / payscale\n", - " )\n", - " \n", - " # Returned data_list will directly be passed to Stan\n", - "\n", - "\n", - "\n", - "################\n", - "\n", - "\n", - "\n", - "prl\n", - "*** \n", - "\n", - "--- \n", - "\n", - "***************\n", - "\n", - "*** 4,29 ****\n", - "\n", - " # Use general_info of raw_data\n", - " subjs <- general_info$subjs\n", - " n_subj <- general_info$n_subj\n", - " t_subjs <- general_info$t_subjs\n", - " t_max <- general_info$t_max\n", - " \n", - " # Initialize (model-specific) data arrays\n", - "! choice <- array(-1, c(n_subj, t_max))\n", - "! outcome <- array( 0, c(n_subj, t_max))\n", - " \n", - " # Write from raw_data to the data arrays\n", - " for (i in 1:n_subj) {\n", - " subj <- subjs[i]\n", - "- t <- t_subjs[i]\n", - " DT_subj <- raw_data[subjid == subj]\n", - " \n", - "! choice[i, 1:t] <- DT_subj$choice\n", - "! outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign\n", - " }\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - " N = n_subj,\n", - " T = t_max,\n", - " Tsubj = t_subjs,\n", - " choice = choice,\n", - "--- 4,39 ----\n", - "\n", - " # Use general_info of raw_data\n", - " subjs <- general_info$subjs\n", - " n_subj <- general_info$n_subj\n", - "+ b_subjs <- general_info$b_subjs\n", - "+ b_max <- general_info$b_max\n", - " t_subjs <- general_info$t_subjs\n", - " t_max <- general_info$t_max\n", - " \n", - " # Initialize (model-specific) data arrays\n", - "! choice <- array(-1, c(n_subj, b_max, t_max))\n", - "! outcome <- array( 0, c(n_subj, b_max, t_max))\n", - " \n", - " # Write from raw_data to the data arrays\n", - " for (i in 1:n_subj) {\n", - " subj <- subjs[i]\n", - " DT_subj <- raw_data[subjid == subj]\n", - "+ blocks_of_subj <- unique(DT_subj$block)\n", - " \n", - "! for (b in 1:b_subjs[i]) {\n", - "! curr_block <- blocks_of_subj[b]\n", - "! DT_curr_block <- DT_subj[block == curr_block]\n", - "! t <- t_subjs[i, b]\n", - "! \n", - "! choice[i, b, 1:t] <- DT_curr_block$choice\n", - "! outcome[i, b, 1:t] <- sign(DT_curr_block$outcome) # use sign\n", - "! }\n", - " }\n", - " \n", - " # Wrap into a list for Stan\n", - " data_list <- list(\n", - " N = n_subj,\n", - "+ B = b_max,\n", - "+ Bsubj = b_subjs,\n", - " T = t_max,\n", - " Tsubj = t_subjs,\n", - " choice = choice,\n", - "\n", - "\n", - "\n", - "################\n", - "\n", - "\n", - "\n" - ] - } - ], - "source": [ - "for k, v in {k: v for k, v in funcs_task.items() if len(v) > 1}.items():\n", - " print(k)\n", - " before = funcs_task[k][0].split('\\n')\n", - " after = funcs_task[k][1].split('\\n')\n", - " for line in difflib.context_diff(before, after):\n", - " print(line)\n", - " print('\\n\\n\\n################\\n\\n\\n')" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.7.4" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/R/R/bandit2arm_delta.R b/R/R/bandit2arm_delta.R index 838e31fd..7afd87ab 100644 --- a/R/R/bandit2arm_delta.R +++ b/R/R/bandit2arm_delta.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION bandit2arm_delta #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 2-Armed Bandit Task +#' @templateVar TASK_CODE bandit2arm #' @templateVar TASK_CITE (Erev et al., 2010; Hertwig et al., 2004) #' @templateVar MODEL_NAME Rescorla-Wagner (Delta) Model +#' @templateVar MODEL_CODE delta #' @templateVar MODEL_CITE #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "A" (learning rate), "tau" (inverse temperature) +#' @templateVar PARAMETERS \code{A} (learning rate), \code{tau} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -39,4 +41,3 @@ bandit2arm_delta <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit2arm_preprocess_func) - diff --git a/R/R/bandit4arm2_kalman_filter.R b/R/R/bandit4arm2_kalman_filter.R index 934df7aa..8181b67d 100644 --- a/R/R/bandit4arm2_kalman_filter.R +++ b/R/R/bandit4arm2_kalman_filter.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION bandit4arm2_kalman_filter -#' @templateVar CONTRIBUTOR \href{https://zohyos7.github.io}{Yoonseo Zoh} +#' @templateVar CONTRIBUTOR \href{https://zohyos7.github.io}{Yoonseo Zoh} <\email{zohyos7@@gmail.com}> #' @templateVar TASK_NAME 4-Armed Bandit Task (modified) +#' @templateVar TASK_CODE bandit4arm2 #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Kalman Filter +#' @templateVar MODEL_CODE kalman_filter #' @templateVar MODEL_CITE (Daw et al., 2006) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise) +#' @templateVar PARAMETERS \code{lambda} (decay factor), \code{theta} (decay center), \code{beta} (inverse softmax temperature), \code{mu0} (anticipated initial mean of all 4 options), \code{sigma0} (anticipated initial sd (uncertainty factor) of all 4 options), \code{sigmaD} (sd of diffusion noise) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -41,4 +43,3 @@ bandit4arm2_kalman_filter <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit4arm2_preprocess_func) - diff --git a/R/R/bandit4arm_2par_lapse.R b/R/R/bandit4arm_2par_lapse.R index f5440d94..0db8e65e 100644 --- a/R/R/bandit4arm_2par_lapse.R +++ b/R/R/bandit4arm_2par_lapse.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION bandit4arm_2par_lapse #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CODE bandit4arm #' @templateVar TASK_CITE #' @templateVar MODEL_NAME 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) +#' @templateVar MODEL_CODE 2par_lapse #' @templateVar MODEL_CITE (Aylward et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise) +#' @templateVar PARAMETERS \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{xi} (noise) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -39,4 +41,3 @@ bandit4arm_2par_lapse <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit4arm_preprocess_func) - diff --git a/R/R/bandit4arm_4par.R b/R/R/bandit4arm_4par.R index 2c706192..7c61a6c1 100644 --- a/R/R/bandit4arm_4par.R +++ b/R/R/bandit4arm_4par.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION bandit4arm_4par #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CODE bandit4arm #' @templateVar TASK_CITE #' @templateVar MODEL_NAME 4 Parameter Model, without C (choice perseveration) +#' @templateVar MODEL_CODE 4par #' @templateVar MODEL_CITE (Seymour et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity) +#' @templateVar PARAMETERS \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -40,4 +42,3 @@ bandit4arm_4par <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit4arm_preprocess_func) - diff --git a/R/R/bandit4arm_lapse.R b/R/R/bandit4arm_lapse.R index d8659032..e24c0baa 100644 --- a/R/R/bandit4arm_lapse.R +++ b/R/R/bandit4arm_lapse.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION bandit4arm_lapse #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CODE bandit4arm #' @templateVar TASK_CITE #' @templateVar MODEL_NAME 5 Parameter Model, without C (choice perseveration) but with xi (noise) +#' @templateVar MODEL_CODE lapse #' @templateVar MODEL_CITE (Seymour et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise) +#' @templateVar PARAMETERS \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity), \code{xi} (noise) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -41,4 +43,3 @@ bandit4arm_lapse <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit4arm_preprocess_func) - diff --git a/R/R/bandit4arm_lapse_decay.R b/R/R/bandit4arm_lapse_decay.R index 2979dd67..4f632c2a 100644 --- a/R/R/bandit4arm_lapse_decay.R +++ b/R/R/bandit4arm_lapse_decay.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION bandit4arm_lapse_decay #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CODE bandit4arm #' @templateVar TASK_CITE #' @templateVar MODEL_NAME 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). +#' @templateVar MODEL_CODE lapse_decay #' @templateVar MODEL_CITE (Aylward et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate) +#' @templateVar PARAMETERS \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity), \code{xi} (noise), \code{d} (decay rate) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -42,4 +44,3 @@ bandit4arm_lapse_decay <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit4arm_preprocess_func) - diff --git a/R/R/bandit4arm_singleA_lapse.R b/R/R/bandit4arm_singleA_lapse.R index 21a9da22..4da4ddf2 100644 --- a/R/R/bandit4arm_singleA_lapse.R +++ b/R/R/bandit4arm_singleA_lapse.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION bandit4arm_singleA_lapse #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME 4-Armed Bandit Task +#' @templateVar TASK_CODE bandit4arm #' @templateVar TASK_CITE #' @templateVar MODEL_NAME 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. +#' @templateVar MODEL_CODE singleA_lapse #' @templateVar MODEL_CITE (Aylward et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise) +#' @templateVar PARAMETERS \code{A} (learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity), \code{xi} (noise) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -40,4 +42,3 @@ bandit4arm_singleA_lapse <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bandit4arm_preprocess_func) - diff --git a/R/R/bart_par4.R b/R/R/bart_par4.R index a21fbea1..7b152dea 100644 --- a/R/R/bart_par4.R +++ b/R/R/bart_par4.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION bart_par4 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} , \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} , \href{https://ccs-lab.github.io/team/ayoung-lee/}{Ayoung Lee} , \href{https://ccs-lab.github.io/team/jeongbin-oh/}{Jeongbin Oh} , \href{https://ccs-lab.github.io/team/jiyoon-lee/}{Jiyoon Lee} , \href{https://ccs-lab.github.io/team/junha-jang/}{Junha Jang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@@gmail.com}>, \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/ayoung-lee/}{Ayoung Lee} <\email{aylee2008@@naver.com}>, \href{https://ccs-lab.github.io/team/jeongbin-oh/}{Jeongbin Oh} <\email{ows0104@@gmail.com}>, \href{https://ccs-lab.github.io/team/jiyoon-lee/}{Jiyoon Lee} <\email{nicole.lee2001@@gmail.com}>, \href{https://ccs-lab.github.io/team/junha-jang/}{Junha Jang} <\email{andy627robo@@naver.com}> #' @templateVar TASK_NAME Balloon Analogue Risk Task -#' @templateVar TASK_CITE (van Ravenzwaaij et al., 2011) -#' @templateVar MODEL_NAME Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters -#' @templateVar MODEL_CITE +#' @templateVar TASK_CODE bart +#' @templateVar TASK_CITE +#' @templateVar MODEL_NAME Re-parameterized version of BART model with 4 parameters +#' @templateVar MODEL_CODE par4 +#' @templateVar MODEL_CITE (van Ravenzwaaij et al., 2011) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "pumps", "explosion" -#' @templateVar PARAMETERS "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature) +#' @templateVar PARAMETERS \code{phi} (prior belief of balloon not bursting), \code{eta} (updating rate), \code{gam} (risk-taking parameter), \code{tau} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"pumps"}{The number of pumps.} -#' @templateVar DETAILS_DATA_3 \item{"explosion"}{0: intact, 1: burst} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{pumps}{The number of pumps.} +#' @templateVar DETAILS_DATA_3 \item{explosion}{0: intact, 1: burst} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -39,4 +41,3 @@ bart_par4 <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = bart_preprocess_func) - diff --git a/R/R/choiceRT_ddm.R b/R/R/choiceRT_ddm.R index 1abe5889..2c7ca9c9 100644 --- a/R/R/choiceRT_ddm.R +++ b/R/R/choiceRT_ddm.R @@ -1,20 +1,22 @@ #' @templateVar MODEL_FUNCTION choiceRT_ddm #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Choice Reaction Time Task +#' @templateVar TASK_CODE choiceRT #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Drift Diffusion Model +#' @templateVar MODEL_CODE ddm #' @templateVar MODEL_CITE (Ratcliff, 1978) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "RT" -#' @templateVar PARAMETERS "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time) +#' @templateVar PARAMETERS \code{alpha} (boundary separation), \code{beta} (bias), \code{delta} (drift rate), \code{tau} (non-decision time) #' @templateVar REGRESSORS #' @templateVar POSTPREDS #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} -#' @templateVar DETAILS_DATA_3 \item{"RT"}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} +#' @templateVar DETAILS_DATA_3 \item{RT}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). +#' @templateVar ADDITIONAL_ARGS_1 \item{RTbound}{Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).} #' #' @template model-documentation #' @@ -22,7 +24,7 @@ #' @include hBayesDM_model.R #' @include preprocess_funcs.R #' -#' @description +#' @note #' \strong{Notes:} #' Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. #' Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. @@ -45,4 +47,3 @@ choiceRT_ddm <- hBayesDM_model( regressors = NULL, postpreds = NULL, preprocess_func = choiceRT_preprocess_func) - diff --git a/R/R/choiceRT_ddm_single.R b/R/R/choiceRT_ddm_single.R index dd426c6e..d80f84c8 100644 --- a/R/R/choiceRT_ddm_single.R +++ b/R/R/choiceRT_ddm_single.R @@ -1,20 +1,22 @@ #' @templateVar MODEL_FUNCTION choiceRT_ddm_single #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Choice Reaction Time Task +#' @templateVar TASK_CODE choiceRT #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Drift Diffusion Model +#' @templateVar MODEL_CODE ddm #' @templateVar MODEL_CITE (Ratcliff, 1978) #' @templateVar MODEL_TYPE Individual #' @templateVar DATA_COLUMNS "subjID", "choice", "RT" -#' @templateVar PARAMETERS "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time) +#' @templateVar PARAMETERS \code{alpha} (boundary separation), \code{beta} (bias), \code{delta} (drift rate), \code{tau} (non-decision time) #' @templateVar REGRESSORS #' @templateVar POSTPREDS #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} -#' @templateVar DETAILS_DATA_3 \item{"RT"}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} +#' @templateVar DETAILS_DATA_3 \item{RT}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds). +#' @templateVar ADDITIONAL_ARGS_1 \item{RTbound}{Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).} #' #' @template model-documentation #' @@ -22,7 +24,7 @@ #' @include hBayesDM_model.R #' @include preprocess_funcs.R #' -#' @description +#' @note #' \strong{Notes:} #' Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. #' Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. @@ -45,4 +47,3 @@ choiceRT_ddm_single <- hBayesDM_model( regressors = NULL, postpreds = NULL, preprocess_func = choiceRT_single_preprocess_func) - diff --git a/R/R/cra_exp.R b/R/R/cra_exp.R index 0ef6c82f..55e62bb8 100644 --- a/R/R/cra_exp.R +++ b/R/R/cra_exp.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION cra_exp -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@@gmail.com}> #' @templateVar TASK_NAME Choice Under Risk and Ambiguity Task +#' @templateVar TASK_CODE cra #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Exponential Subjective Value Model +#' @templateVar MODEL_CODE exp #' @templateVar MODEL_CITE (Hsu et al., 2005) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice" -#' @templateVar PARAMETERS "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature) +#' @templateVar PARAMETERS \code{alpha} (risk attitude), \code{beta} (ambiguity attitude), \code{gamma} (inverse temperature) #' @templateVar REGRESSORS "sv", "sv_fix", "sv_var", "p_var" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"prob"}{Objective probability of the variable lottery.} -#' @templateVar DETAILS_DATA_3 \item{"ambig"}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} -#' @templateVar DETAILS_DATA_4 \item{"reward_var"}{Amount of reward in variable lottery. Assumed to be greater than zero.} -#' @templateVar DETAILS_DATA_5 \item{"reward_fix"}{Amount of reward in fixed lottery. Assumed to be greater than zero.} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{prob}{Objective probability of the variable lottery.} +#' @templateVar DETAILS_DATA_3 \item{ambig}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} +#' @templateVar DETAILS_DATA_4 \item{reward_var}{Amount of reward in variable lottery. Assumed to be greater than zero.} +#' @templateVar DETAILS_DATA_5 \item{reward_fix}{Amount of reward in fixed lottery. Assumed to be greater than zero.} +#' @templateVar DETAILS_DATA_6 \item{choice}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -46,4 +48,3 @@ cra_exp <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = cra_preprocess_func) - diff --git a/R/R/cra_linear.R b/R/R/cra_linear.R index df765af2..f34d4c89 100644 --- a/R/R/cra_linear.R +++ b/R/R/cra_linear.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION cra_linear -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@@gmail.com}> #' @templateVar TASK_NAME Choice Under Risk and Ambiguity Task +#' @templateVar TASK_CODE cra #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Linear Subjective Value Model +#' @templateVar MODEL_CODE linear #' @templateVar MODEL_CITE (Levy et al., 2010) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice" -#' @templateVar PARAMETERS "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature) +#' @templateVar PARAMETERS \code{alpha} (risk attitude), \code{beta} (ambiguity attitude), \code{gamma} (inverse temperature) #' @templateVar REGRESSORS "sv", "sv_fix", "sv_var", "p_var" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"prob"}{Objective probability of the variable lottery.} -#' @templateVar DETAILS_DATA_3 \item{"ambig"}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} -#' @templateVar DETAILS_DATA_4 \item{"reward_var"}{Amount of reward in variable lottery. Assumed to be greater than zero.} -#' @templateVar DETAILS_DATA_5 \item{"reward_fix"}{Amount of reward in fixed lottery. Assumed to be greater than zero.} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{prob}{Objective probability of the variable lottery.} +#' @templateVar DETAILS_DATA_3 \item{ambig}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} +#' @templateVar DETAILS_DATA_4 \item{reward_var}{Amount of reward in variable lottery. Assumed to be greater than zero.} +#' @templateVar DETAILS_DATA_5 \item{reward_fix}{Amount of reward in fixed lottery. Assumed to be greater than zero.} +#' @templateVar DETAILS_DATA_6 \item{choice}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -46,4 +48,3 @@ cra_linear <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = cra_preprocess_func) - diff --git a/R/R/dbdm_prob_weight.R b/R/R/dbdm_prob_weight.R index 8ac39d55..2ec687cc 100644 --- a/R/R/dbdm_prob_weight.R +++ b/R/R/dbdm_prob_weight.R @@ -1,23 +1,25 @@ #' @templateVar MODEL_FUNCTION dbdm_prob_weight -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/yoonseo-zoh/}{Yoonseo Zoh} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/yoonseo-zoh/}{Yoonseo Zoh} <\email{zohyos7@@gmail.com}> #' @templateVar TASK_NAME Description Based Decison Making Task +#' @templateVar TASK_CODE dbdm #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Probability Weight Function +#' @templateVar MODEL_CODE prob_weight #' @templateVar MODEL_CITE (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice" -#' @templateVar PARAMETERS "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature) +#' @templateVar PARAMETERS \code{tau} (probability weight function), \code{rho} (subject utility function), \code{lambda} (loss aversion parameter), \code{beta} (inverse softmax temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 8 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"opt1hprob"}{Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.} -#' @templateVar DETAILS_DATA_3 \item{"opt2hprob"}{Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.} -#' @templateVar DETAILS_DATA_4 \item{"opt1hval"}{Possible (with opt1hprob probability) outcome of option 1.} -#' @templateVar DETAILS_DATA_5 \item{"opt1lval"}{Possible (with (1 - opt1hprob) probability) outcome of option 1.} -#' @templateVar DETAILS_DATA_6 \item{"opt2hval"}{Possible (with opt2hprob probability) outcome of option 2.} -#' @templateVar DETAILS_DATA_7 \item{"opt2lval"}{Possible (with (1 - opt2hprob) probability) outcome of option 2.} -#' @templateVar DETAILS_DATA_8 \item{"choice"}{If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{opt1hprob}{Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.} +#' @templateVar DETAILS_DATA_3 \item{opt2hprob}{Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.} +#' @templateVar DETAILS_DATA_4 \item{opt1hval}{Possible (with opt1hprob probability) outcome of option 1.} +#' @templateVar DETAILS_DATA_5 \item{opt1lval}{Possible (with (1 - opt1hprob) probability) outcome of option 1.} +#' @templateVar DETAILS_DATA_6 \item{opt2hval}{Possible (with opt2hprob probability) outcome of option 2.} +#' @templateVar DETAILS_DATA_7 \item{opt2lval}{Possible (with (1 - opt2hprob) probability) outcome of option 2.} +#' @templateVar DETAILS_DATA_8 \item{choice}{If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -48,4 +50,3 @@ dbdm_prob_weight <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = dbdm_preprocess_func) - diff --git a/R/R/dd_cs.R b/R/R/dd_cs.R index a27fd669..5d2bb5c0 100644 --- a/R/R/dd_cs.R +++ b/R/R/dd_cs.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION dd_cs #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CODE dd #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Constant-Sensitivity (CS) Model +#' @templateVar MODEL_CODE cs #' @templateVar MODEL_CITE (Ebert et al., 2007) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" -#' @templateVar PARAMETERS "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{r} (exponential discounting rate), \code{s} (impatience), \code{beta} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} -#' @templateVar DETAILS_DATA_3 \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} -#' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} -#' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} +#' @templateVar DETAILS_DATA_3 \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} +#' @templateVar DETAILS_DATA_4 \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} +#' @templateVar DETAILS_DATA_5 \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} +#' @templateVar DETAILS_DATA_6 \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -41,4 +43,3 @@ dd_cs <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = dd_preprocess_func) - diff --git a/R/R/dd_cs_single.R b/R/R/dd_cs_single.R index 74699e6c..5306406b 100644 --- a/R/R/dd_cs_single.R +++ b/R/R/dd_cs_single.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION dd_cs_single #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CODE dd #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Constant-Sensitivity (CS) Model +#' @templateVar MODEL_CODE cs #' @templateVar MODEL_CITE (Ebert et al., 2007) #' @templateVar MODEL_TYPE Individual #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" -#' @templateVar PARAMETERS "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{r} (exponential discounting rate), \code{s} (impatience), \code{beta} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} -#' @templateVar DETAILS_DATA_3 \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} -#' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} -#' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} +#' @templateVar DETAILS_DATA_3 \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} +#' @templateVar DETAILS_DATA_4 \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} +#' @templateVar DETAILS_DATA_5 \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} +#' @templateVar DETAILS_DATA_6 \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -41,4 +43,3 @@ dd_cs_single <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = dd_single_preprocess_func) - diff --git a/R/R/dd_exp.R b/R/R/dd_exp.R index 311d4f0f..6c720882 100644 --- a/R/R/dd_exp.R +++ b/R/R/dd_exp.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION dd_exp #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CODE dd #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Exponential Model +#' @templateVar MODEL_CODE exp #' @templateVar MODEL_CITE (Samuelson, 1937) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" -#' @templateVar PARAMETERS "r" (exponential discounting rate), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{r} (exponential discounting rate), \code{beta} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} -#' @templateVar DETAILS_DATA_3 \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} -#' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} -#' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} +#' @templateVar DETAILS_DATA_3 \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} +#' @templateVar DETAILS_DATA_4 \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} +#' @templateVar DETAILS_DATA_5 \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} +#' @templateVar DETAILS_DATA_6 \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -40,4 +42,3 @@ dd_exp <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = dd_preprocess_func) - diff --git a/R/R/dd_hyperbolic.R b/R/R/dd_hyperbolic.R index fa180494..24fad886 100644 --- a/R/R/dd_hyperbolic.R +++ b/R/R/dd_hyperbolic.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION dd_hyperbolic #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CODE dd #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Hyperbolic Model +#' @templateVar MODEL_CODE hyperbolic #' @templateVar MODEL_CITE (Mazur, 1987) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" -#' @templateVar PARAMETERS "k" (discounting rate), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{k} (discounting rate), \code{beta} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} -#' @templateVar DETAILS_DATA_3 \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} -#' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} -#' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} +#' @templateVar DETAILS_DATA_3 \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} +#' @templateVar DETAILS_DATA_4 \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} +#' @templateVar DETAILS_DATA_5 \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} +#' @templateVar DETAILS_DATA_6 \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -40,4 +42,3 @@ dd_hyperbolic <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = dd_preprocess_func) - diff --git a/R/R/dd_hyperbolic_single.R b/R/R/dd_hyperbolic_single.R index ea6e623d..46497855 100644 --- a/R/R/dd_hyperbolic_single.R +++ b/R/R/dd_hyperbolic_single.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION dd_hyperbolic_single #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Delay Discounting Task +#' @templateVar TASK_CODE dd #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Hyperbolic Model +#' @templateVar MODEL_CODE hyperbolic #' @templateVar MODEL_CITE (Mazur, 1987) #' @templateVar MODEL_TYPE Individual #' @templateVar DATA_COLUMNS "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice" -#' @templateVar PARAMETERS "k" (discounting rate), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{k} (discounting rate), \code{beta} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 6 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} -#' @templateVar DETAILS_DATA_3 \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} -#' @templateVar DETAILS_DATA_4 \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} -#' @templateVar DETAILS_DATA_5 \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} -#' @templateVar DETAILS_DATA_6 \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} +#' @templateVar DETAILS_DATA_3 \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} +#' @templateVar DETAILS_DATA_4 \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} +#' @templateVar DETAILS_DATA_5 \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} +#' @templateVar DETAILS_DATA_6 \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -40,4 +42,3 @@ dd_hyperbolic_single <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = dd_single_preprocess_func) - diff --git a/R/R/gng_m1.R b/R/R/gng_m1.R index f2bb49b4..4e98bc0b 100644 --- a/R/R/gng_m1.R +++ b/R/R/gng_m1.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION gng_m1 #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CODE gng #' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW + noise +#' @templateVar MODEL_CODE m1 #' @templateVar MODEL_CITE (Guitart-Masip et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" -#' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "rho" (effective size) +#' @templateVar PARAMETERS \code{xi} (noise), \code{ep} (learning rate), \code{rho} (effective size) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} -#' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} +#' @templateVar DETAILS_DATA_4 \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -44,4 +46,3 @@ gng_m1 <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = gng_preprocess_func) - diff --git a/R/R/gng_m2.R b/R/R/gng_m2.R index 278d8c8d..106b7346 100644 --- a/R/R/gng_m2.R +++ b/R/R/gng_m2.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION gng_m2 #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CODE gng #' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW + noise + bias +#' @templateVar MODEL_CODE m2 #' @templateVar MODEL_CITE (Guitart-Masip et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" -#' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size) +#' @templateVar PARAMETERS \code{xi} (noise), \code{ep} (learning rate), \code{b} (action bias), \code{rho} (effective size) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} -#' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} +#' @templateVar DETAILS_DATA_4 \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -45,4 +47,3 @@ gng_m2 <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = gng_preprocess_func) - diff --git a/R/R/gng_m3.R b/R/R/gng_m3.R index 443d8f47..7460a8df 100644 --- a/R/R/gng_m3.R +++ b/R/R/gng_m3.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION gng_m3 #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CODE gng #' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW + noise + bias + pi +#' @templateVar MODEL_CODE m3 #' @templateVar MODEL_CITE (Guitart-Masip et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" -#' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size) +#' @templateVar PARAMETERS \code{xi} (noise), \code{ep} (learning rate), \code{b} (action bias), \code{pi} (Pavlovian bias), \code{rho} (effective size) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo", "SV" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} -#' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} +#' @templateVar DETAILS_DATA_4 \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -47,4 +49,3 @@ gng_m3 <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = gng_preprocess_func) - diff --git a/R/R/gng_m4.R b/R/R/gng_m4.R index bd024c77..101a65fc 100644 --- a/R/R/gng_m4.R +++ b/R/R/gng_m4.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION gng_m4 #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Orthogonalized Go/Nogo Task +#' @templateVar TASK_CODE gng #' @templateVar TASK_CITE #' @templateVar MODEL_NAME RW (rew/pun) + noise + bias + pi +#' @templateVar MODEL_CODE m4 #' @templateVar MODEL_CITE (Cavanagh et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "cue", "keyPressed", "outcome" -#' @templateVar PARAMETERS "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity) +#' @templateVar PARAMETERS \code{xi} (noise), \code{ep} (learning rate), \code{b} (action bias), \code{pi} (Pavlovian bias), \code{rhoRew} (reward sensitivity), \code{rhoPun} (punishment sensitivity) #' @templateVar REGRESSORS "Qgo", "Qnogo", "Wgo", "Wnogo", "SV" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} -#' @templateVar DETAILS_DATA_4 \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} +#' @templateVar DETAILS_DATA_4 \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -48,4 +50,3 @@ gng_m4 <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = gng_preprocess_func) - diff --git a/R/R/hBayesDM.R b/R/R/hBayesDM.R index 7bf3f1b1..329604e3 100644 --- a/R/R/hBayesDM.R +++ b/R/R/hBayesDM.R @@ -1,4 +1,5 @@ -#' Hierarchical Bayesian Modeling of Decision-Making Tasks +#' @title Hierarchical Bayesian Modeling of Decision-Making Tasks +#' #' @docType package #' @name hBayesDM-package #' @aliases hBayesDM @@ -6,6 +7,7 @@ #' #' @import methods #' @import Rcpp +#' #' @description #' Fit an array of decision-making tasks with computational models in a hierarchical Bayesian framework. Can perform hierarchical Bayesian analysis of various computational models with a single line of coding. #' Bolded tasks, followed by their respective models, are itemized below. @@ -71,5 +73,4 @@ #' #' Lei Zhang \email{bnuzhanglei2008@@gmail.com} #' -#' NULL diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 64fd43b1..8f9d733e 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -5,7 +5,6 @@ #' #' Contributor: \href{https://ccs-lab.github.io/team/jethro-lee/}{Jethro Lee} #' -#' @export #' @keywords internal #' #' @include settings.R diff --git a/R/R/igt_orl.R b/R/R/igt_orl.R index 93003251..a4fceaea 100644 --- a/R/R/igt_orl.R +++ b/R/R/igt_orl.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION igt_orl -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/nate-haines/}{Nate Haines} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/nate-haines/}{Nate Haines} <\email{haines.175@@osu.edu}> #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CODE igt #' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Outcome-Representation Learning Model +#' @templateVar MODEL_CODE orl #' @templateVar MODEL_CITE (Haines et al., 2018) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight) +#' @templateVar PARAMETERS \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{K} (perseverance decay), \code{betaF} (outcome frequency weight), \code{betaP} (perseverance weight) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar ADDITIONAL_ARGS_1 \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} #' #' @template model-documentation #' @@ -44,4 +46,3 @@ igt_orl <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = igt_preprocess_func) - diff --git a/R/R/igt_pvl_decay.R b/R/R/igt_pvl_decay.R index 859d6a1d..686fd2b3 100644 --- a/R/R/igt_pvl_decay.R +++ b/R/R/igt_pvl_decay.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION igt_pvl_decay #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CODE igt #' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Prospect Valence Learning (PVL) Decay-RI +#' @templateVar MODEL_CODE pvl_decay #' @templateVar MODEL_CITE (Ahn et al., 2014) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion) +#' @templateVar PARAMETERS \code{A} (decay rate), \code{alpha} (outcome sensitivity), \code{cons} (response consistency), \code{lambda} (loss aversion) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar ADDITIONAL_ARGS_1 \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} #' #' @template model-documentation #' @@ -43,4 +45,3 @@ igt_pvl_decay <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = igt_preprocess_func) - diff --git a/R/R/igt_pvl_delta.R b/R/R/igt_pvl_delta.R index 16782342..3a0b2e1b 100644 --- a/R/R/igt_pvl_delta.R +++ b/R/R/igt_pvl_delta.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION igt_pvl_delta #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CODE igt #' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Prospect Valence Learning (PVL) Delta +#' @templateVar MODEL_CODE pvl_delta #' @templateVar MODEL_CITE (Ahn et al., 2008) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion) +#' @templateVar PARAMETERS \code{A} (learning rate), \code{alpha} (outcome sensitivity), \code{cons} (response consistency), \code{lambda} (loss aversion) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar ADDITIONAL_ARGS_1 \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} #' #' @template model-documentation #' @@ -43,4 +45,3 @@ igt_pvl_delta <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = igt_preprocess_func) - diff --git a/R/R/igt_vpp.R b/R/R/igt_vpp.R index 94cf6d86..8e525384 100644 --- a/R/R/igt_vpp.R +++ b/R/R/igt_vpp.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION igt_vpp #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Iowa Gambling Task +#' @templateVar TASK_CODE igt #' @templateVar TASK_CITE (Ahn et al., 2008) #' @templateVar MODEL_NAME Value-Plus-Perseverance +#' @templateVar MODEL_CODE vpp #' @templateVar MODEL_CITE (Worthy et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight) +#' @templateVar PARAMETERS \code{A} (learning rate), \code{alpha} (outcome sensitivity), \code{cons} (response consistency), \code{lambda} (loss aversion), \code{epP} (gain impact), \code{epN} (loss impact), \code{K} (decay rate), \code{w} (RL weight) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} +#' @templateVar DETAILS_DATA_3 \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} +#' @templateVar DETAILS_DATA_4 \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. +#' @templateVar ADDITIONAL_ARGS_1 \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} #' #' @template model-documentation #' @@ -47,4 +49,3 @@ igt_vpp <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = igt_preprocess_func) - diff --git a/R/R/peer_ocu.R b/R/R/peer_ocu.R index 03f41942..871c74f3 100644 --- a/R/R/peer_ocu.R +++ b/R/R/peer_ocu.R @@ -1,23 +1,25 @@ #' @templateVar MODEL_FUNCTION peer_ocu -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Peer Influence Task +#' @templateVar TASK_CODE peer #' @templateVar TASK_CITE (Chung et al., 2015) #' @templateVar MODEL_NAME Other-Conferred Utility (OCU) Model +#' @templateVar MODEL_CODE ocu #' @templateVar MODEL_CITE #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice" -#' @templateVar PARAMETERS "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility) +#' @templateVar PARAMETERS \code{rho} (risk preference), \code{tau} (inverse temperature), \code{ocu} (other-conferred utility) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 8 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"condition"}{0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).} -#' @templateVar DETAILS_DATA_3 \item{"p_gamble"}{Probability of receiving a high payoff (same for both options).} -#' @templateVar DETAILS_DATA_4 \item{"safe_Hpayoff"}{High payoff of the safe option.} -#' @templateVar DETAILS_DATA_5 \item{"safe_Lpayoff"}{Low payoff of the safe option.} -#' @templateVar DETAILS_DATA_6 \item{"risky_Hpayoff"}{High payoff of the risky option.} -#' @templateVar DETAILS_DATA_7 \item{"risky_Lpayoff"}{Low payoff of the risky option.} -#' @templateVar DETAILS_DATA_8 \item{"choice"}{Which option was chosen? 0: safe, 1: risky.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{condition}{0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).} +#' @templateVar DETAILS_DATA_3 \item{p_gamble}{Probability of receiving a high payoff (same for both options).} +#' @templateVar DETAILS_DATA_4 \item{safe_Hpayoff}{High payoff of the safe option.} +#' @templateVar DETAILS_DATA_5 \item{safe_Lpayoff}{Low payoff of the safe option.} +#' @templateVar DETAILS_DATA_6 \item{risky_Hpayoff}{High payoff of the risky option.} +#' @templateVar DETAILS_DATA_7 \item{risky_Lpayoff}{Low payoff of the risky option.} +#' @templateVar DETAILS_DATA_8 \item{choice}{Which option was chosen? 0: safe, 1: risky.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -43,4 +45,3 @@ peer_ocu <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = peer_preprocess_func) - diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index ff02d14c..d71778c1 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -1,3 +1,6 @@ +#' @noRd +#' @keywords internal + bandit2arm_preprocess_func <- function(raw_data, general_info) { # Currently class(raw_data) == "data.table" diff --git a/R/R/prl_ewa.R b/R/R/prl_ewa.R index fb8b1e42..e2526856 100644 --- a/R/R/prl_ewa.R +++ b/R/R/prl_ewa.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION prl_ewa -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Experience-Weighted Attraction Model +#' @templateVar MODEL_CODE ewa #' @templateVar MODEL_CITE (Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{phi} (1 - learning rate), \code{rho} (experience decay factor), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "ew_c", "ew_nc" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -43,4 +45,3 @@ prl_ewa <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_preprocess_func) - diff --git a/R/R/prl_fictitious.R b/R/R/prl_fictitious.R index a4d0d966..076b036f 100644 --- a/R/R/prl_fictitious.R +++ b/R/R/prl_fictitious.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION prl_fictitious -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model +#' @templateVar MODEL_CODE fictitious #' @templateVar MODEL_CITE (Glascher et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{eta} (learning rate), \code{alpha} (indecision point), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -44,4 +46,3 @@ prl_fictitious <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_preprocess_func) - diff --git a/R/R/prl_fictitious_multipleB.R b/R/R/prl_fictitious_multipleB.R index 6d5c59f9..1ac8a46b 100644 --- a/R/R/prl_fictitious_multipleB.R +++ b/R/R/prl_fictitious_multipleB.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_multipleB -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model +#' @templateVar MODEL_CODE fictitious #' @templateVar MODEL_CITE (Glascher et al., 2009) #' @templateVar MODEL_TYPE Multiple-Block Hierarchical #' @templateVar DATA_COLUMNS "subjID", "block", "choice", "outcome" -#' @templateVar PARAMETERS "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{eta} (learning rate), \code{alpha} (indecision point), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"block"}{A unique identifier for each of the multiple blocks within each subject.} -#' @templateVar DETAILS_DATA_3 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_4 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{block}{A unique identifier for each of the multiple blocks within each subject.} +#' @templateVar DETAILS_DATA_3 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_4 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -45,4 +47,3 @@ prl_fictitious_multipleB <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_multipleB_preprocess_func) - diff --git a/R/R/prl_fictitious_rp.R b/R/R/prl_fictitious_rp.R index 6ab1b766..6414998e 100644 --- a/R/R/prl_fictitious_rp.R +++ b/R/R/prl_fictitious_rp.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_rp -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) +#' @templateVar MODEL_CODE fictitious_rp #' @templateVar MODEL_CITE (Glascher et al., 2009; Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{eta_pos} (learning rate, +PE), \code{eta_neg} (learning rate, -PE), \code{alpha} (indecision point), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -47,4 +49,3 @@ prl_fictitious_rp <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_preprocess_func) - diff --git a/R/R/prl_fictitious_rp_woa.R b/R/R/prl_fictitious_rp_woa.R index fc039653..68681ecb 100644 --- a/R/R/prl_fictitious_rp_woa.R +++ b/R/R/prl_fictitious_rp_woa.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_rp_woa -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) +#' @templateVar MODEL_CODE fictitious_rp_woa #' @templateVar MODEL_CITE (Glascher et al., 2009; Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{eta_pos} (learning rate, +PE), \code{eta_neg} (learning rate, -PE), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -46,4 +48,3 @@ prl_fictitious_rp_woa <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_preprocess_func) - diff --git a/R/R/prl_fictitious_woa.R b/R/R/prl_fictitious_woa.R index 2aabbb82..d23ae926 100644 --- a/R/R/prl_fictitious_woa.R +++ b/R/R/prl_fictitious_woa.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION prl_fictitious_woa -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Fictitious Update Model, without alpha (indecision point) +#' @templateVar MODEL_CODE fictitious_woa #' @templateVar MODEL_CITE (Glascher et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "eta" (learning rate), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{eta} (learning rate), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe_c", "pe_nc", "dv" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -43,4 +45,3 @@ prl_fictitious_woa <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_preprocess_func) - diff --git a/R/R/prl_rp.R b/R/R/prl_rp.R index 9eec71b6..5aa87bc5 100644 --- a/R/R/prl_rp.R +++ b/R/R/prl_rp.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION prl_rp -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Reward-Punishment Model +#' @templateVar MODEL_CODE rp #' @templateVar MODEL_CITE (Ouden et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{Apun} (punishment learning rate), \code{Arew} (reward learning rate), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -42,4 +44,3 @@ prl_rp <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_preprocess_func) - diff --git a/R/R/prl_rp_multipleB.R b/R/R/prl_rp_multipleB.R index 22c2ac33..f2f96361 100644 --- a/R/R/prl_rp_multipleB.R +++ b/R/R/prl_rp_multipleB.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION prl_rp_multipleB -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} , \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Reversal Learning Task +#' @templateVar TASK_CODE prl #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Reward-Punishment Model +#' @templateVar MODEL_CODE rp #' @templateVar MODEL_CITE (Ouden et al., 2013) #' @templateVar MODEL_TYPE Multiple-Block Hierarchical #' @templateVar DATA_COLUMNS "subjID", "block", "choice", "outcome" -#' @templateVar PARAMETERS "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{Apun} (punishment learning rate), \code{Arew} (reward learning rate), \code{beta} (inverse temperature) #' @templateVar REGRESSORS "ev_c", "ev_nc", "pe" #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"block"}{A unique identifier for each of the multiple blocks within each subject.} -#' @templateVar DETAILS_DATA_3 \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} -#' @templateVar DETAILS_DATA_4 \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{block}{A unique identifier for each of the multiple blocks within each subject.} +#' @templateVar DETAILS_DATA_3 \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} +#' @templateVar DETAILS_DATA_4 \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -43,4 +45,3 @@ prl_rp_multipleB <- hBayesDM_model( ), postpreds = c("y_pred"), preprocess_func = prl_multipleB_preprocess_func) - diff --git a/R/R/pst_gainloss_Q.R b/R/R/pst_gainloss_Q.R index d58eb67e..3f931980 100644 --- a/R/R/pst_gainloss_Q.R +++ b/R/R/pst_gainloss_Q.R @@ -1,19 +1,21 @@ #' @templateVar MODEL_FUNCTION pst_gainloss_Q -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@@gmail.com}> #' @templateVar TASK_NAME Probabilistic Selection Task +#' @templateVar TASK_CODE pst #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Gain-Loss Q Learning Model +#' @templateVar MODEL_CODE gainloss_Q #' @templateVar MODEL_CITE (Frank et al., 2007) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "type", "choice", "reward" -#' @templateVar PARAMETERS "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature) +#' @templateVar PARAMETERS \code{alpha_pos} (learning rate for positive feedbacks), \code{alpha_neg} (learning rate for negative feedbacks), \code{beta} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"type"}{Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as below:\cr\cr ===== ======== ==================\cr Code Stimulus Probability to win\cr ===== ======== ==================\cr 1 A 80%\cr 2 B 20%\cr 3 C 70%\cr 4 D 30%\cr 5 E 60%\cr 6 F 40%\cr ===== ======== ==================\cr\cr The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.} -#' @templateVar DETAILS_DATA_3 \item{"choice"}{Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).} -#' @templateVar DETAILS_DATA_4 \item{"reward"}{Amount of reward earned as a result of the trial.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{type}{Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\% (type 1), 20\% (type 2), 70\% (type 3), 30\% (type 4), 60\% (type 5), 40\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.} +#' @templateVar DETAILS_DATA_3 \item{choice}{Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).} +#' @templateVar DETAILS_DATA_4 \item{reward}{Amount of reward earned as a result of the trial.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -39,4 +41,3 @@ pst_gainloss_Q <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = pst_preprocess_func) - diff --git a/R/R/ra_noLA.R b/R/R/ra_noLA.R index 1f6505ad..767c16b4 100644 --- a/R/R/ra_noLA.R +++ b/R/R/ra_noLA.R @@ -1,20 +1,22 @@ #' @templateVar MODEL_FUNCTION ra_noLA #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Risk Aversion Task +#' @templateVar TASK_CODE ra #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Prospect Theory, without loss aversion (LA) parameter +#' @templateVar MODEL_CODE noLA #' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "gamble" -#' @templateVar PARAMETERS "rho" (risk aversion), "tau" (inverse temperature) +#' @templateVar PARAMETERS \code{rho} (risk aversion), \code{tau} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 5 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).}} -#' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} -#' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} -#' @templateVar DETAILS_DATA_5 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_3 \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} +#' @templateVar DETAILS_DATA_4 \item{cert}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} +#' @templateVar DETAILS_DATA_5 \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -39,4 +41,3 @@ ra_noLA <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = ra_preprocess_func) - diff --git a/R/R/ra_noRA.R b/R/R/ra_noRA.R index 94b4a54e..c7221a45 100644 --- a/R/R/ra_noRA.R +++ b/R/R/ra_noRA.R @@ -1,20 +1,22 @@ #' @templateVar MODEL_FUNCTION ra_noRA #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Risk Aversion Task +#' @templateVar TASK_CODE ra #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Prospect Theory, without risk aversion (RA) parameter +#' @templateVar MODEL_CODE noRA #' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "gamble" -#' @templateVar PARAMETERS "lambda" (loss aversion), "tau" (inverse temperature) +#' @templateVar PARAMETERS \code{lambda} (loss aversion), \code{tau} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 5 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).}} -#' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} -#' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} -#' @templateVar DETAILS_DATA_5 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_3 \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} +#' @templateVar DETAILS_DATA_4 \item{cert}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} +#' @templateVar DETAILS_DATA_5 \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -39,4 +41,3 @@ ra_noRA <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = ra_preprocess_func) - diff --git a/R/R/ra_prospect.R b/R/R/ra_prospect.R index 58ffe947..e9c526e0 100644 --- a/R/R/ra_prospect.R +++ b/R/R/ra_prospect.R @@ -1,20 +1,22 @@ #' @templateVar MODEL_FUNCTION ra_prospect #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Risk Aversion Task +#' @templateVar TASK_CODE ra #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Prospect Theory +#' @templateVar MODEL_CODE prospect #' @templateVar MODEL_CITE (Sokol-Hessner et al., 2009) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "gamble" -#' @templateVar PARAMETERS "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature) +#' @templateVar PARAMETERS \code{rho} (risk aversion), \code{lambda} (loss aversion), \code{tau} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 5 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).}} -#' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} -#' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} -#' @templateVar DETAILS_DATA_5 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_3 \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} +#' @templateVar DETAILS_DATA_4 \item{cert}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} +#' @templateVar DETAILS_DATA_5 \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -40,4 +42,3 @@ ra_prospect <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = ra_preprocess_func) - diff --git a/R/R/rdt_happiness.R b/R/R/rdt_happiness.R index 7f4500cc..f7cacb54 100644 --- a/R/R/rdt_happiness.R +++ b/R/R/rdt_happiness.R @@ -1,24 +1,26 @@ #' @templateVar MODEL_FUNCTION rdt_happiness -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Risky Decision Task +#' @templateVar TASK_CODE rdt #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Happiness Computational Model +#' @templateVar MODEL_CODE happiness #' @templateVar MODEL_CITE (Rutledge et al., 2014) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy" -#' @templateVar PARAMETERS "w0" (baseline), "w1" (weight of certain rewards), "w2" (weight of expected values), "w3" (weight of reward prediction errors), "gam" (forgetting factor), "sig" (standard deviation of error) +#' @templateVar PARAMETERS \code{w0} (baseline), \code{w1} (weight of certain rewards), \code{w2} (weight of expected values), \code{w3} (weight of reward prediction errors), \code{gam} (forgetting factor), \code{sig} (standard deviation of error) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 9 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} -#' @templateVar DETAILS_DATA_3 \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} -#' @templateVar DETAILS_DATA_4 \item{"cert"}{Guaranteed amount of a safe option.} -#' @templateVar DETAILS_DATA_5 \item{"type"}{loss == -1, mixed == 0, gain == 1} -#' @templateVar DETAILS_DATA_6 \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} -#' @templateVar DETAILS_DATA_7 \item{"outcome"}{Result of the trial.} -#' @templateVar DETAILS_DATA_8 \item{"happy"}{Happiness score.} -#' @templateVar DETAILS_DATA_9 \item{"RT_happy"}{Reaction time for answering the happiness score.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} +#' @templateVar DETAILS_DATA_3 \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} +#' @templateVar DETAILS_DATA_4 \item{cert}{Guaranteed amount of a safe option.} +#' @templateVar DETAILS_DATA_5 \item{type}{loss == -1, mixed == 0, gain == 1} +#' @templateVar DETAILS_DATA_6 \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} +#' @templateVar DETAILS_DATA_7 \item{outcome}{Result of the trial.} +#' @templateVar DETAILS_DATA_8 \item{happy}{Happiness score.} +#' @templateVar DETAILS_DATA_9 \item{RT_happy}{Reaction time for answering the happiness score.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -47,4 +49,3 @@ rdt_happiness <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = rdt_preprocess_func) - diff --git a/R/R/ts_par4.R b/R/R/ts_par4.R index cf7ed1df..28863e9a 100644 --- a/R/R/ts_par4.R +++ b/R/R/ts_par4.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION ts_par4 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Two-Step Task +#' @templateVar TASK_CODE ts #' @templateVar TASK_CITE (Daw et al., 2011) #' @templateVar MODEL_NAME Hybrid Model, with 4 parameters +#' @templateVar MODEL_CODE par4 #' @templateVar MODEL_CITE (Daw et al., 2011; Wunderlich et al., 2012) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "level1_choice", "level2_choice", "reward" -#' @templateVar PARAMETERS "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight) +#' @templateVar PARAMETERS \code{a} (learning rate for both stages 1 & 2), \code{beta} (inverse temperature for both stages 1 & 2), \code{pi} (perseverance), \code{w} (model-based weight) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred_step1", "y_pred_step2" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} -#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} -#' @templateVar DETAILS_DATA_4 \item{"reward"}{Reward after Level 2 (0 or 1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{level1_choice}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} +#' @templateVar DETAILS_DATA_3 \item{level2_choice}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} +#' @templateVar DETAILS_DATA_4 \item{reward}{Reward after Level 2 (0 or 1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. +#' @templateVar ADDITIONAL_ARGS_1 \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} #' #' @template model-documentation #' @@ -45,4 +47,3 @@ ts_par4 <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), preprocess_func = ts_preprocess_func) - diff --git a/R/R/ts_par6.R b/R/R/ts_par6.R index 5bddb323..9a7fc19e 100644 --- a/R/R/ts_par6.R +++ b/R/R/ts_par6.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION ts_par6 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Two-Step Task +#' @templateVar TASK_CODE ts #' @templateVar TASK_CITE (Daw et al., 2011) #' @templateVar MODEL_NAME Hybrid Model, with 6 parameters +#' @templateVar MODEL_CODE par6 #' @templateVar MODEL_CITE (Daw et al., 2011) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "level1_choice", "level2_choice", "reward" -#' @templateVar PARAMETERS "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight) +#' @templateVar PARAMETERS \code{a1} (learning rate in stage 1), \code{beta1} (inverse temperature in stage 1), \code{a2} (learning rate in stage 2), \code{beta2} (inverse temperature in stage 2), \code{pi} (perseverance), \code{w} (model-based weight) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred_step1", "y_pred_step2" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} -#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} -#' @templateVar DETAILS_DATA_4 \item{"reward"}{Reward after Level 2 (0 or 1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{level1_choice}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} +#' @templateVar DETAILS_DATA_3 \item{level2_choice}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} +#' @templateVar DETAILS_DATA_4 \item{reward}{Reward after Level 2 (0 or 1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. +#' @templateVar ADDITIONAL_ARGS_1 \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} #' #' @template model-documentation #' @@ -45,4 +47,3 @@ ts_par6 <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), preprocess_func = ts_preprocess_func) - diff --git a/R/R/ts_par7.R b/R/R/ts_par7.R index cdc2cea9..1f21cbb0 100644 --- a/R/R/ts_par7.R +++ b/R/R/ts_par7.R @@ -1,21 +1,23 @@ #' @templateVar MODEL_FUNCTION ts_par7 -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@@gmail.com}> #' @templateVar TASK_NAME Two-Step Task +#' @templateVar TASK_CODE ts #' @templateVar TASK_CITE (Daw et al., 2011) #' @templateVar MODEL_NAME Hybrid Model, with 7 parameters (original model) +#' @templateVar MODEL_CODE par7 #' @templateVar MODEL_CITE (Daw et al., 2011) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "level1_choice", "level2_choice", "reward" -#' @templateVar PARAMETERS "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace) +#' @templateVar PARAMETERS \code{a1} (learning rate in stage 1), \code{beta1} (inverse temperature in stage 1), \code{a2} (learning rate in stage 2), \code{beta2} (inverse temperature in stage 2), \code{pi} (perseverance), \code{w} (model-based weight), \code{lambda} (eligibility trace) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred_step1", "y_pred_step2" #' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} -#' @templateVar DETAILS_DATA_3 \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} -#' @templateVar DETAILS_DATA_4 \item{"reward"}{Reward after Level 2 (0 or 1).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{level1_choice}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} +#' @templateVar DETAILS_DATA_3 \item{level2_choice}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} +#' @templateVar DETAILS_DATA_4 \item{reward}{Reward after Level 2 (0 or 1).} #' @templateVar LENGTH_ADDITIONAL_ARGS 1 -#' @templateVar ADDITIONAL_ARGS_1 \strong{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7. +#' @templateVar ADDITIONAL_ARGS_1 \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} #' #' @template model-documentation #' @@ -46,4 +48,3 @@ ts_par7 <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), preprocess_func = ts_preprocess_func) - diff --git a/R/R/ug_bayes.R b/R/R/ug_bayes.R index 7edc34a2..5948312f 100644 --- a/R/R/ug_bayes.R +++ b/R/R/ug_bayes.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION ug_bayes #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Norm-Training Ultimatum Game +#' @templateVar TASK_CODE ug #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Ideal Observer Model +#' @templateVar MODEL_CODE bayes #' @templateVar MODEL_CITE (Xiang et al., 2013) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "offer", "accept" -#' @templateVar PARAMETERS "alpha" (envy), "beta" (guilt), "tau" (inverse temperature) +#' @templateVar PARAMETERS \code{alpha} (envy), \code{beta} (guilt), \code{tau} (inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"offer"}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} -#' @templateVar DETAILS_DATA_3 \item{"accept"}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{offer}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} +#' @templateVar DETAILS_DATA_3 \item{accept}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -38,4 +40,3 @@ ug_bayes <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = ug_preprocess_func) - diff --git a/R/R/ug_delta.R b/R/R/ug_delta.R index ac127dff..7007049c 100644 --- a/R/R/ug_delta.R +++ b/R/R/ug_delta.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION ug_delta #' @templateVar CONTRIBUTOR #' @templateVar TASK_NAME Norm-Training Ultimatum Game +#' @templateVar TASK_CODE ug #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Rescorla-Wagner (Delta) Model +#' @templateVar MODEL_CODE delta #' @templateVar MODEL_CITE (Gu et al., 2015) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "offer", "accept" -#' @templateVar PARAMETERS "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate) +#' @templateVar PARAMETERS \code{alpha} (envy), \code{tau} (inverse temperature), \code{ep} (norm adaptation rate) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"offer"}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} -#' @templateVar DETAILS_DATA_3 \item{"accept"}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{offer}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} +#' @templateVar DETAILS_DATA_3 \item{accept}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -38,4 +40,3 @@ ug_delta <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = ug_preprocess_func) - diff --git a/R/R/wcs_sql.R b/R/R/wcs_sql.R index 502283af..570e7e6f 100644 --- a/R/R/wcs_sql.R +++ b/R/R/wcs_sql.R @@ -1,18 +1,20 @@ #' @templateVar MODEL_FUNCTION wcs_sql -#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/dayeong-min/}{Dayeong Min} +#' @templateVar CONTRIBUTOR \href{https://ccs-lab.github.io/team/dayeong-min/}{Dayeong Min} <\email{mindy2801@@snu.ac.kr}> #' @templateVar TASK_NAME Wisconsin Card Sorting Task +#' @templateVar TASK_CODE wcs #' @templateVar TASK_CITE #' @templateVar MODEL_NAME Sequential Learning Model +#' @templateVar MODEL_CODE sql #' @templateVar MODEL_CITE (Bishara et al., 2010) #' @templateVar MODEL_TYPE Hierarchical #' @templateVar DATA_COLUMNS "subjID", "choice", "outcome" -#' @templateVar PARAMETERS "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature) +#' @templateVar PARAMETERS \code{r} (reward sensitivity), \code{p} (punishment sensitivity), \code{d} (decision consistency or inverse temperature) #' @templateVar REGRESSORS #' @templateVar POSTPREDS "y_pred" #' @templateVar LENGTH_DATA_COLUMNS 3 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.} -#' @templateVar DETAILS_DATA_3 \item{"outcome"}{1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.} +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{choice}{Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.} +#' @templateVar DETAILS_DATA_3 \item{outcome}{1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -38,4 +40,3 @@ wcs_sql <- hBayesDM_model( regressors = NULL, postpreds = c("y_pred"), preprocess_func = wcs_preprocess_func) - diff --git a/R/R/zzz.R b/R/R/zzz.R index 65d3e4a0..b913cb25 100644 --- a/R/R/zzz.R +++ b/R/R/zzz.R @@ -5,7 +5,8 @@ packageStartupMessage("\n\nThis is hBayesDM version ", ver, "\n\n") } -.onLoad <- function(libname, pkgname) { # nocov start +.onLoad <- function(libname, pkgname) { + # nocov start if (FLAG_BUILD_ALL) { modules <- paste0("stan_fit4", names(stanmodels), "_mod") for (m in modules) loadModule(m, what = TRUE) From 4df07eb78184871ee033690650b5c6a872380c0d Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 02:30:01 +0900 Subject: [PATCH 046/163] Generate docs using pkgdown --- R/_pkgdown.yml | 5 - R/docs/authors.html | 14 +- R/docs/index.html | 92 +- R/docs/reference/HDIofMCMC.html | 14 +- R/docs/reference/bandit2arm.html | 333 ++++ R/docs/reference/bandit2arm_delta.html | 116 +- R/docs/reference/bandit4arm.html | 1029 +++++++++++ R/docs/reference/bandit4arm2.html | 334 ++++ .../reference/bandit4arm2_kalman_filter.html | 119 +- R/docs/reference/bandit4arm_2par_lapse.html | 113 +- R/docs/reference/bandit4arm_4par.html | 113 +- R/docs/reference/bandit4arm_lapse.html | 113 +- R/docs/reference/bandit4arm_lapse_decay.html | 113 +- .../reference/bandit4arm_singleA_lapse.html | 117 +- R/docs/reference/bart.html | 334 ++++ R/docs/reference/bart_par4.html | 115 +- R/docs/reference/choiceRT.html | 518 ++++++ R/docs/reference/choiceRT_ddm.html | 132 +- R/docs/reference/choiceRT_ddm_single.html | 132 +- R/docs/reference/choiceRT_lba.html | 14 +- R/docs/reference/choiceRT_lba_single.html | 14 +- R/docs/reference/cra.html | 515 ++++++ R/docs/reference/cra_exp.html | 123 +- R/docs/reference/cra_linear.html | 120 +- R/docs/reference/dbdm.html | 341 ++++ R/docs/reference/dbdm_prob_weight.html | 132 +- R/docs/reference/dd.html | 1039 +++++++++++ R/docs/reference/dd_cs.html | 120 +- R/docs/reference/dd_cs_single.html | 117 +- R/docs/reference/dd_exp.html | 120 +- R/docs/reference/dd_hyperbolic.html | 114 +- R/docs/reference/dd_hyperbolic_single.html | 114 +- R/docs/reference/estimate_mode.html | 14 +- R/docs/reference/extract_ic.html | 14 +- R/docs/reference/gng.html | 855 +++++++++ R/docs/reference/gng_m1.html | 115 +- R/docs/reference/gng_m2.html | 115 +- R/docs/reference/gng_m3.html | 115 +- R/docs/reference/gng_m4.html | 115 +- R/docs/reference/hBayesDM-package.html | 14 +- R/docs/reference/hBayesDM_model.html | 14 +- R/docs/reference/igt.html | 861 +++++++++ R/docs/reference/igt_orl.html | 123 +- R/docs/reference/igt_pvl_decay.html | 117 +- R/docs/reference/igt_pvl_delta.html | 116 +- R/docs/reference/igt_vpp.html | 119 +- R/docs/reference/index.html | 119 +- R/docs/reference/multiplot.html | 14 +- R/docs/reference/peer.html | 339 ++++ R/docs/reference/peer_ocu.html | 129 +- R/docs/reference/plot.hBayesDM.html | 14 +- R/docs/reference/plotDist.html | 14 +- R/docs/reference/plotHDI.html | 14 +- R/docs/reference/plotInd.html | 14 +- R/docs/reference/printFit.html | 14 +- R/docs/reference/prl.html | 1563 +++++++++++++++++ R/docs/reference/prl_ewa.html | 117 +- R/docs/reference/prl_fictitious.html | 114 +- .../reference/prl_fictitious_multipleB.html | 120 +- R/docs/reference/prl_fictitious_rp.html | 118 +- R/docs/reference/prl_fictitious_rp_woa.html | 118 +- R/docs/reference/prl_fictitious_woa.html | 114 +- R/docs/reference/prl_rp.html | 117 +- R/docs/reference/prl_rp_multipleB.html | 116 +- R/docs/reference/pst.html | 334 ++++ R/docs/reference/pst_gainloss_Q.html | 131 +- R/docs/reference/ra.html | 684 ++++++++ R/docs/reference/ra_noLA.html | 125 +- R/docs/reference/ra_noRA.html | 125 +- R/docs/reference/ra_prospect.html | 122 +- R/docs/reference/rdt.html | 340 ++++ R/docs/reference/rdt_happiness.html | 128 +- R/docs/reference/rhat.html | 14 +- R/docs/reference/ts.html | 691 ++++++++ R/docs/reference/ts_par4.html | 126 +- R/docs/reference/ts_par6.html | 123 +- R/docs/reference/ts_par7.html | 123 +- R/docs/reference/ug.html | 505 ++++++ R/docs/reference/ug_bayes.html | 115 +- R/docs/reference/ug_delta.html | 116 +- R/docs/reference/wcs.html | 334 ++++ R/docs/reference/wcs_sql.html | 119 +- R/man-roxygen/README.md | 60 - R/man-roxygen/model-documentation.R | 111 +- R/man/bandit2arm_delta.Rd | 84 +- R/man/bandit4arm2_kalman_filter.Rd | 87 +- R/man/bandit4arm_2par_lapse.Rd | 81 +- R/man/bandit4arm_4par.Rd | 81 +- R/man/bandit4arm_lapse.Rd | 81 +- R/man/bandit4arm_lapse_decay.Rd | 81 +- R/man/bandit4arm_singleA_lapse.Rd | 85 +- R/man/bart_par4.Rd | 83 +- R/man/choiceRT_ddm.Rd | 93 +- R/man/choiceRT_ddm_single.Rd | 93 +- R/man/cra_exp.Rd | 91 +- R/man/cra_linear.Rd | 88 +- R/man/dbdm_prob_weight.Rd | 100 +- R/man/dd_cs.Rd | 88 +- R/man/dd_cs_single.Rd | 85 +- R/man/dd_exp.Rd | 88 +- R/man/dd_hyperbolic.Rd | 82 +- R/man/dd_hyperbolic_single.Rd | 82 +- R/man/gng_m1.Rd | 83 +- R/man/gng_m2.Rd | 83 +- R/man/gng_m3.Rd | 83 +- R/man/gng_m4.Rd | 83 +- R/man/igt_orl.Rd | 92 +- R/man/igt_pvl_decay.Rd | 86 +- R/man/igt_pvl_delta.Rd | 85 +- R/man/igt_vpp.Rd | 88 +- R/man/peer_ocu.Rd | 97 +- R/man/prl_ewa.Rd | 85 +- R/man/prl_fictitious.Rd | 82 +- R/man/prl_fictitious_multipleB.Rd | 88 +- R/man/prl_fictitious_rp.Rd | 86 +- R/man/prl_fictitious_rp_woa.Rd | 86 +- R/man/prl_fictitious_woa.Rd | 82 +- R/man/prl_rp.Rd | 85 +- R/man/prl_rp_multipleB.Rd | 84 +- R/man/pst_gainloss_Q.Rd | 86 +- R/man/ra_noLA.Rd | 94 +- R/man/ra_noRA.Rd | 94 +- R/man/ra_prospect.Rd | 91 +- R/man/rdt_happiness.Rd | 96 +- R/man/ts_par4.Rd | 95 +- R/man/ts_par6.Rd | 92 +- R/man/ts_par7.Rd | 92 +- R/man/ug_bayes.Rd | 83 +- R/man/ug_delta.Rd | 84 +- R/man/wcs_sql.Rd | 87 +- 130 files changed, 17173 insertions(+), 3872 deletions(-) create mode 100644 R/docs/reference/bandit2arm.html create mode 100644 R/docs/reference/bandit4arm.html create mode 100644 R/docs/reference/bandit4arm2.html create mode 100644 R/docs/reference/bart.html create mode 100644 R/docs/reference/choiceRT.html create mode 100644 R/docs/reference/cra.html create mode 100644 R/docs/reference/dbdm.html create mode 100644 R/docs/reference/dd.html create mode 100644 R/docs/reference/gng.html create mode 100644 R/docs/reference/igt.html create mode 100644 R/docs/reference/peer.html create mode 100644 R/docs/reference/prl.html create mode 100644 R/docs/reference/pst.html create mode 100644 R/docs/reference/ra.html create mode 100644 R/docs/reference/rdt.html create mode 100644 R/docs/reference/ts.html create mode 100644 R/docs/reference/ug.html create mode 100644 R/docs/reference/wcs.html delete mode 100644 R/man-roxygen/README.md diff --git a/R/_pkgdown.yml b/R/_pkgdown.yml index 60f4d379..3e7016f3 100644 --- a/R/_pkgdown.yml +++ b/R/_pkgdown.yml @@ -28,8 +28,3 @@ reference: - plotInd - printFit - rhat -- title: Internal functions - desc: > - These functions are for the developmental purpose. - contents: - - hBayesDM_model diff --git a/R/docs/authors.html b/R/docs/authors.html index 49dff5fc..9f30df5c 100644 --- a/R/docs/authors.html +++ b/R/docs/authors.html @@ -60,7 +60,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000 @@ -74,14 +74,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -95,7 +97,7 @@

    Ahn W, Haines N, Zhang L (2017). diff --git a/R/docs/index.html b/R/docs/index.html index b371fe03..17c562c5 100644 --- a/R/docs/index.html +++ b/R/docs/index.html @@ -22,7 +22,7 @@ -

    +
    @@ -63,48 +67,59 @@
    -
    +
    + + + +
    -

    hBayesDM (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly R package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses Stan for Bayesian inference.

    +

    hBayesDM (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses Stan for Bayesian inference.

    +

    Please see the respective sections below for installing hBayesDM with R/python.

    Getting Started

    Prerequisite

    -

    To install hBayesDM, RStan should be properly installed before you proceed. For detailed instructions, please go to this link: https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started

    +

    To install hBayesDM for R, RStan needs to be properly installed before you proceed. For detailed instructions on having RStan ready prior to installing hBayesDM, please go to this link: https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started

    Installation

    -

    hBayesDM can be installed from CRAN by running the following command in R:

    +

    The lastest stable version of hBayesDM can be installed from CRAN by running the following command in R:

    install.packages("hBayesDM")  # Install hBayesDM from CRAN
    -

    or you can also install via GitHub with:

    +

    or you can also install from GitHub with:

    +devtools::install_github("CCS-Lab/hBayesDM/R")
    +

    If you want to use the lastest development version of hBayesDM, run the following in R:

    +

    Building at once

    -

    In default, you should build a Stan file into a binary for the first time to use the model, so it can be quite bothersome. In order to build all the models at once, you should set an environmental variable BUILD_ALL to true. We highly recommend you to use multiple cores for build, since it requires quite a long time to complete.

    -
    Sys.setenv(BUILD_ALL='true')  # Build all the models on installation
    -Sys.setenv(MAKEFLAGS='-j 4')  # Use 4 cores for compilation (or the number you want)
    -
    -install.packages("hBayesDM")  # Install from CRAN
    -# or
    -devtools::install_github("CCS-Lab/hBayesDM")  # Install from GitHub
    +

    By default, you will have to wait for compilation when you run each model for the first time. If you plan on runnning several different models and want to pre-build all models during installation time, set an environment variable BUILD_ALL to true, like the following. We highly recommend you only do so when you have multiple cores available, since building all models at once takes quite a long time to complete.

    +
    Sys.setenv(BUILD_ALL = "true")  # Build *all* models at installation time
    +Sys.setenv(MAKEFLAGS = "-j 4")  # Use 4 cores for build (or any other number you want)
    +
    +install.packages("hBayesDM")                    # Install from CRAN
    +# or
    +devtools::install_github("CCS-Lab/hBayesDM/R")  # Install from GitHub
    - + -
    @@ -143,6 +155,8 @@

    Links

    @@ -169,7 +183,7 @@

    Developers

    -
    +

    Dev status

    • Project Status: Active – The project has reached a stable, usable state and is being actively developed.
    • @@ -180,8 +194,10 @@

      Dev status

    +
    +
    diff --git a/R/docs/reference/HDIofMCMC.html b/R/docs/reference/HDIofMCMC.html index 5a706882..58a623a5 100644 --- a/R/docs/reference/HDIofMCMC.html +++ b/R/docs/reference/HDIofMCMC.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -78,14 +78,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,7 +101,7 @@
    diff --git a/R/docs/reference/bandit2arm.html b/R/docs/reference/bandit2arm.html new file mode 100644 index 00000000..4b832c75 --- /dev/null +++ b/R/docs/reference/bandit2arm.html @@ -0,0 +1,333 @@ + + + + + + + + +Rescorla-Wagner (Delta) Model — bandit2arm_delta • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the 2-Armed Bandit Task using <$= MODEL_NAME +It has the following parameters: "A" (learning rate), "tau" (inverse temperature).

    +
      +
    • Task: 2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004)

    • +
    • Model: Rescorla-Wagner (Delta) Model

    • +
    + +
    + +
    bandit2arm_delta(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit2arm_delta").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 2-Armed Bandit Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683

    +

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit2arm_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/bandit2arm_delta.html b/R/docs/reference/bandit2arm_delta.html index 41190190..ce5e3d51 100644 --- a/R/docs/reference/bandit2arm_delta.html +++ b/R/docs/reference/bandit2arm_delta.html @@ -6,7 +6,7 @@ -2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004) — bandit2arm_delta • hBayesDM +Rescorla-Wagner (Delta) Model — bandit2arm_delta • hBayesDM @@ -30,11 +30,14 @@ - + - + @@ -65,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 2-Armed Bandit Task with the following parameters: - "A" (learning rate), "tau" (inverse temperature).

    -

    MODEL: Rescorla-Wagner (Delta) Model

    +

    Hierarchical Bayesian Modeling of the 2-Armed Bandit Task using Rescorla-Wagner (Delta) Model. +It has the following parameters: A (learning rate), tau (inverse temperature).

    +
      +
    • Task: 2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004)

    • +
    • Model: Rescorla-Wagner (Delta) Model

    • +
    -
    bandit2arm_delta(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    bandit2arm_delta(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit2arm_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit2arm_delta").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,9 +250,9 @@

    Details For the 2-Armed Bandit Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,12 +284,8 @@

    Details

    References

    -

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice - prediction competition: Choices from experience and from description. Journal of Behavioral - Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683

    -

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the - Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. - http://doi.org/10.1111/j.0956-7976.2004.00715.x

    +

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683

    +

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x

    See also

    @@ -270,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit2arm_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit2arm_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit2arm_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm.html b/R/docs/reference/bandit4arm.html
    new file mode 100644
    index 00000000..109e6ae5
    --- /dev/null
    +++ b/R/docs/reference/bandit4arm.html
    @@ -0,0 +1,1029 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) — bandit4arm_2par_lapse • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME +It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018)

    • +
    + +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME +It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012)

    • +
    + +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME +It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012)

    • +
    + +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME +It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018)

    • +
    + +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME +It has the following parameters: "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018)

    • +
    + +
    + +
    bandit4arm_2par_lapse(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +bandit4arm_4par(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +bandit4arm_lapse(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +bandit4arm_lapse_decay(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +bandit4arm_singleA_lapse(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit4arm_2par_lapse").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit4arm_4par").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit4arm_lapse").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit4arm_lapse_decay").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit4arm_singleA_lapse").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 4-Armed Bandit Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 4-Armed Bandit Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 4-Armed Bandit Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 4-Armed Bandit Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 4-Armed Bandit Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    +

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    +

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    +

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    +

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit4arm_2par_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit4arm_4par("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit4arm_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit4arm_lapse_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit4arm_singleA_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/bandit4arm2.html b/R/docs/reference/bandit4arm2.html new file mode 100644 index 00000000..e90f7731 --- /dev/null +++ b/R/docs/reference/bandit4arm2.html @@ -0,0 +1,334 @@ + + + + + + + + +Kalman Filter — bandit4arm2_kalman_filter • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) using <$= MODEL_NAME +It has the following parameters: "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise).

    + + +
    + +
    bandit4arm2_kalman_filter(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bandit4arm2_kalman_filter").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the 4-Armed Bandit Task (modified), there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- bandit4arm2_kalman_filter("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/bandit4arm2_kalman_filter.html b/R/docs/reference/bandit4arm2_kalman_filter.html index 481be0d8..6e1f07d0 100644 --- a/R/docs/reference/bandit4arm2_kalman_filter.html +++ b/R/docs/reference/bandit4arm2_kalman_filter.html @@ -6,7 +6,7 @@ -4-Armed Bandit Task (modified) — bandit4arm2_kalman_filter • hBayesDM +Kalman Filter — bandit4arm2_kalman_filter • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) with the following parameters: - "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise).

    -

    Contributor: Yoonseo Zoh, Lei Zhang

    -

    MODEL: Kalman Filter (Daw et al., 2006, Nature)

    +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) using Kalman Filter. +It has the following parameters: lambda (decay factor), theta (decay center), beta (inverse softmax temperature), mu0 (anticipated initial mean of all 4 options), sigma0 (anticipated initial sd (uncertainty factor) of all 4 options), sigmaD (sd of diffusion noise).

    +
      +
    • Task: 4-Armed Bandit Task (modified)

    • +
    • Model: Kalman Filter (Daw et al., 2006)

    • +
    -
    bandit4arm2_kalman_filter(data = "choose", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1,
    -  inits = "random", indPars = "mean", modelRegressor = FALSE,
    -  vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95,
    -  stepsize = 1, max_treedepth = 10, ...)
    +
    bandit4arm2_kalman_filter(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm2_kalman_filter").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit4arm2_kalman_filter").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,9 +250,9 @@

    Details For the 4-Armed Bandit Task (modified), there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    "outcome"

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,11 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Yoonseo Zoh <zohyos7@gmail.com>

    References

    -

    Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates - for exploratory decisions in humans. Nature, 441(7095), 876-879.

    +

    Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879.

    See also

    @@ -268,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm2_kalman_filter("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit4arm2_kalman_filter(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit4arm2_kalman_filter(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_2par_lapse.html b/R/docs/reference/bandit4arm_2par_lapse.html
    index 1474473b..8e980adf 100644
    --- a/R/docs/reference/bandit4arm_2par_lapse.html
    +++ b/R/docs/reference/bandit4arm_2par_lapse.html
    @@ -6,7 +6,7 @@
     
     
     
    -4-Armed Bandit Task — bandit4arm_2par_lapse • hBayesDM
    +3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) — bandit4arm_2par_lapse • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise).

    -

    MODEL: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018, PsyArXiv)

    +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise). +It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), xi (noise).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018)

    • +
    -
    bandit4arm_2par_lapse(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    bandit4arm_2par_lapse(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_2par_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit4arm_2par_lapse").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,10 +250,10 @@

    Details For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    "gain"

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,8 +285,7 @@

    Details

    References

    -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under - uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    +

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_2par_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit4arm_2par_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit4arm_2par_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_4par.html b/R/docs/reference/bandit4arm_4par.html
    index 74642fcc..51cfc179 100644
    --- a/R/docs/reference/bandit4arm_4par.html
    +++ b/R/docs/reference/bandit4arm_4par.html
    @@ -6,7 +6,7 @@
     
     
     
    -4-Armed Bandit Task — bandit4arm_4par • hBayesDM
    +4 Parameter Model, without C (choice perseveration) — bandit4arm_4par • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity).

    -

    MODEL: 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012, J Neuro)

    +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 4 Parameter Model, without C (choice perseveration). +It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), R (reward sensitivity), P (punishment sensitivity).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012)

    • +
    -
    bandit4arm_4par(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    bandit4arm_4par(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_4par").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit4arm_4par").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,10 +250,10 @@

    Details For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    "gain"

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,8 +285,7 @@

    Details

    References

    -

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in - Human Decision-Making. J Neuro, 32(17), 5833-5842.

    +

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_4par("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit4arm_4par(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit4arm_4par(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_lapse.html b/R/docs/reference/bandit4arm_lapse.html
    index beda70e2..06784806 100644
    --- a/R/docs/reference/bandit4arm_lapse.html
    +++ b/R/docs/reference/bandit4arm_lapse.html
    @@ -6,7 +6,7 @@
     
     
     
    -4-Armed Bandit Task — bandit4arm_lapse • hBayesDM
    +5 Parameter Model, without C (choice perseveration) but with xi (noise) — bandit4arm_lapse • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise).

    -

    MODEL: 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012, J Neuro)

    +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 5 Parameter Model, without C (choice perseveration) but with xi (noise). +It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), R (reward sensitivity), P (punishment sensitivity), xi (noise).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012)

    • +
    -
    bandit4arm_lapse(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    bandit4arm_lapse(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit4arm_lapse").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,10 +250,10 @@

    Details For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    "gain"

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,8 +285,7 @@

    Details

    References

    -

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in - Human Decision-Making. J Neuro, 32(17), 5833-5842.

    +

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit4arm_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit4arm_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_lapse_decay.html b/R/docs/reference/bandit4arm_lapse_decay.html
    index cea16eaa..c93d7073 100644
    --- a/R/docs/reference/bandit4arm_lapse_decay.html
    +++ b/R/docs/reference/bandit4arm_lapse_decay.html
    @@ -6,7 +6,7 @@
     
     
     
    -4-Armed Bandit Task — bandit4arm_lapse_decay • hBayesDM
    +5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). — bandit4arm_lapse_decay • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate).

    -

    MODEL: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018, PsyArXiv)

    +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).. +It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), R (reward sensitivity), P (punishment sensitivity), xi (noise), d (decay rate).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018)

    • +
    -
    bandit4arm_lapse_decay(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    bandit4arm_lapse_decay(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_lapse_decay").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit4arm_lapse_decay").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,10 +250,10 @@

    Details For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    "gain"

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,8 +285,7 @@

    Details

    References

    -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under - uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    +

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_lapse_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit4arm_lapse_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit4arm_lapse_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_singleA_lapse.html b/R/docs/reference/bandit4arm_singleA_lapse.html
    index 9ed34409..fcb66468 100644
    --- a/R/docs/reference/bandit4arm_singleA_lapse.html
    +++ b/R/docs/reference/bandit4arm_singleA_lapse.html
    @@ -6,7 +6,7 @@
     
     
     
    -4-Armed Bandit Task — bandit4arm_singleA_lapse • hBayesDM
    +4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. — bandit4arm_singleA_lapse • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise).

    -

    MODEL: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018, PsyArXiv)

    +

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.. +It has the following parameters: A (learning rate), R (reward sensitivity), P (punishment sensitivity), xi (noise).

    +
      +
    • Task: 4-Armed Bandit Task

    • +
    • Model: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018)

    • +
    -
    bandit4arm_singleA_lapse(data = "choose", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1,
    -  inits = "random", indPars = "mean", modelRegressor = FALSE,
    -  vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95,
    -  stepsize = 1, max_treedepth = 10, ...)
    +
    bandit4arm_singleA_lapse(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_singleA_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bandit4arm_singleA_lapse").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,10 +250,10 @@

    Details For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    "gain"

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    +
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,8 +285,7 @@

    Details

    References

    -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under - uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    +

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_singleA_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bandit4arm_singleA_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bandit4arm_singleA_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bart.html b/R/docs/reference/bart.html
    new file mode 100644
    index 00000000..7d8385ff
    --- /dev/null
    +++ b/R/docs/reference/bart.html
    @@ -0,0 +1,334 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Re-parameterized version of BART Model with 4 parameters (Ravenzwaaij et al., 2011) — bart_par4 • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task using <$= MODEL_NAME +It has the following parameters: "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature).

    + + +
    + +
    bart_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "pumps", "explosion". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("bart_par4").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Balloon Analogue Risk Task, there should be 3 columns of data with the + labels "subjID", "pumps", "explosion". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    pumps

    The number of pumps.

    +
    explosion

    0: intact, 1: burst

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- bart_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/bart_par4.html b/R/docs/reference/bart_par4.html index ebe14535..c62b271e 100644 --- a/R/docs/reference/bart_par4.html +++ b/R/docs/reference/bart_par4.html @@ -6,7 +6,7 @@ -Balloon Analogue Risk Task (Ravenzwaaij et al., 2011) — bart_par4 • hBayesDM +Re-parameterized version of BART model with 4 parameters — bart_par4 • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task with the following parameters: - "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature).

    -

    Contributor: Harhim Park, Jaeyeong Yang, Ayoung Lee, Jeongbin Oh, Jiyoon Lee, Junha Jang

    -

    MODEL: Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters

    +

    Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task using Re-parameterized version of BART model with 4 parameters. +It has the following parameters: phi (prior belief of balloon not bursting), eta (updating rate), gam (risk-taking parameter), tau (inverse temperature).

    +
      +
    • Task: Balloon Analogue Risk Task

    • +
    • Model: Re-parameterized version of BART model with 4 parameters (van Ravenzwaaij et al., 2011)

    • +
    -
    bart_par4(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    bart_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "pumps", "explosion". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bart_par4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"bart_par4").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,9 +250,9 @@

    Details For the Balloon Analogue Risk Task, there should be 3 columns of data with the labels "subjID", "pumps", "explosion". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "pumps"

    The number of pumps.

    -
    "explosion"

    0: intact, 1: burst

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    pumps

    The number of pumps.

    +
    explosion

    0: intact, 1: burst

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,11 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Harhim Park <hrpark12@gmail.com>, Jaeyeong Yang <jaeyeong.yang1125@gmail.com>, Ayoung Lee <aylee2008@naver.com>, Jeongbin Oh <ows0104@gmail.com>, Jiyoon Lee <nicole.lee2001@gmail.com>, Junha Jang <andy627robo@naver.com>

    References

    -

    van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the - BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105.

    +

    van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105.

    See also

    @@ -268,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bart_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- bart_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- bart_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/choiceRT.html b/R/docs/reference/choiceRT.html
    new file mode 100644
    index 00000000..b6ff0eda
    --- /dev/null
    +++ b/R/docs/reference/choiceRT.html
    @@ -0,0 +1,518 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Drift Diffusion Model — choiceRT_ddm • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Choice Reaction Time Task using <$= MODEL_NAME +It has the following parameters: "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time).

    +
      +
    • Task: Choice Reaction Time Task

    • +
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • +
    + +

    Individual Bayesian Modeling of the Choice Reaction Time Task using <$= MODEL_NAME +It has the following parameters: "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time).

    +
      +
    • Task: Choice Reaction Time Task

    • +
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • +
    + +
    + +
    choiceRT_ddm(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +choiceRT_ddm_single(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "RT". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    + +

    (Not available for this model)

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "RT". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    + +

    (Not available for this model)

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("choiceRT_ddm").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("choiceRT_ddm_single").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Choice Reaction Time Task, there should be 3 columns of data with the + labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    +
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Choice Reaction Time Task, there should be 3 columns of data with the + labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    +
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    Note

    + +

    Notes: +Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    +

    Notes: +Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    + +

    References

    + +

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    +

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- choiceRT_ddm("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- choiceRT_ddm_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/choiceRT_ddm.html b/R/docs/reference/choiceRT_ddm.html index 7127c4ac..14914da6 100644 --- a/R/docs/reference/choiceRT_ddm.html +++ b/R/docs/reference/choiceRT_ddm.html @@ -6,7 +6,7 @@ -Choice Reaction Time Task — choiceRT_ddm • hBayesDM +Drift Diffusion Model — choiceRT_ddm • hBayesDM @@ -30,17 +30,14 @@ - + - + @@ -71,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -85,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -105,37 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Choice Reaction Time Task with the following parameters: - "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time).

    -

    MODEL: Drift Diffusion Model (Ratcliff, 1978, Psychological Review)
    *Note that this implementation is not the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters.

    -

    Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing

    -

    Parameters of the DDM (parameter names in Ratcliff), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R -
    - alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). 0 < alpha -
    - beta (b): Initial bias, for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 -
    - delta (v): Drift rate; Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta -
    - tau (ter): Non-decision time + Motor response time + encoding time (high means slow encoding, execution). 0 < tau (in seconds)

    +

    Hierarchical Bayesian Modeling of the Choice Reaction Time Task using Drift Diffusion Model. +It has the following parameters: alpha (boundary separation), beta (bias), delta (drift rate), tau (non-decision time).

    +
      +
    • Task: Choice Reaction Time Task

    • +
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • +
    -
    choiceRT_ddm(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    choiceRT_ddm(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -162,8 +159,8 @@

    Arg

    - + @@ -172,19 +169,19 @@

    Arg

    - + +to FALSE.

    - + @@ -200,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "RT". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    (Currently not available.) -Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE. +Not available for this model.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -RTbound: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("choiceRT_ddm").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"choiceRT_ddm").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -235,9 +250,9 @@

    Details For the Choice Reaction Time Task, there should be 3 columns of data with the labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    -
    "RT"

    Choice reaction time for the current trial, in seconds (e.g., 0.435 0.383 0.314 0.309, etc.).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    +
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -267,6 +282,12 @@

    Details and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Note

    + +

    Notes: +Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    +

    References

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    @@ -279,8 +300,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- choiceRT_ddm("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- choiceRT_ddm(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- choiceRT_ddm(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    @@ -304,6 +328,8 @@ 

    Contents

  • Details
  • +
  • Note
  • +
  • References
  • See also
  • diff --git a/R/docs/reference/choiceRT_ddm_single.html b/R/docs/reference/choiceRT_ddm_single.html index ce8d3169..55376920 100644 --- a/R/docs/reference/choiceRT_ddm_single.html +++ b/R/docs/reference/choiceRT_ddm_single.html @@ -6,7 +6,7 @@ -Choice Reaction Time Task — choiceRT_ddm_single • hBayesDM +Drift Diffusion Model — choiceRT_ddm_single • hBayesDM @@ -30,17 +30,14 @@ - + - + @@ -71,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -85,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -105,37 +104,35 @@
    -

    Individual Bayesian Modeling of the Choice Reaction Time Task with the following parameters: - "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time).

    -

    MODEL: Drift Diffusion Model (Ratcliff, 1978, Psychological Review)
    *Note that this implementation is not the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters.

    -

    Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing

    -

    Parameters of the DDM (parameter names in Ratcliff), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R -
    - alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). 0 < alpha -
    - beta (b): Initial bias, for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 -
    - delta (v): Drift rate; Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta -
    - tau (ter): Non-decision time + Motor response time + encoding time (high means slow encoding, execution). 0 < tau (in seconds)

    +

    Individual Bayesian Modeling of the Choice Reaction Time Task using Drift Diffusion Model. +It has the following parameters: alpha (boundary separation), beta (bias), delta (drift rate), tau (non-decision time).

    +
      +
    • Task: Choice Reaction Time Task

    • +
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • +
    -
    choiceRT_ddm_single(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    choiceRT_ddm_single(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -162,8 +159,8 @@

    Arg

    - + @@ -172,19 +169,19 @@

    Arg

    - + +to FALSE.

    - + @@ -200,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "RT". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    (Currently not available.) -Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE. +Not available for this model.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -RTbound: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("choiceRT_ddm_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"choiceRT_ddm_single").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -235,9 +250,9 @@

    Details For the Choice Reaction Time Task, there should be 3 columns of data with the labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    -
    "RT"

    Choice reaction time for the current trial, in seconds (e.g., 0.435 0.383 0.314 0.309, etc.).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    +
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -267,6 +282,12 @@

    Details and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Note

    + +

    Notes: +Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    +

    References

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    @@ -279,8 +300,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- choiceRT_ddm_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- choiceRT_ddm_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- choiceRT_ddm_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    @@ -304,6 +328,8 @@ 

    Contents

  • Details
  • +
  • Note
  • +
  • References
  • See also
  • diff --git a/R/docs/reference/choiceRT_lba.html b/R/docs/reference/choiceRT_lba.html index 939e7edb..2c7c0e0a 100644 --- a/R/docs/reference/choiceRT_lba.html +++ b/R/docs/reference/choiceRT_lba.html @@ -69,7 +69,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -83,14 +83,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -104,7 +106,7 @@
    diff --git a/R/docs/reference/choiceRT_lba_single.html b/R/docs/reference/choiceRT_lba_single.html index b0913c26..388dae56 100644 --- a/R/docs/reference/choiceRT_lba_single.html +++ b/R/docs/reference/choiceRT_lba_single.html @@ -69,7 +69,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -83,14 +83,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -104,7 +106,7 @@
    diff --git a/R/docs/reference/cra.html b/R/docs/reference/cra.html new file mode 100644 index 00000000..420b62a0 --- /dev/null +++ b/R/docs/reference/cra.html @@ -0,0 +1,515 @@ + + + + + + + + +Exponential Subjective Value Model — cra_exp • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using <$= MODEL_NAME +It has the following parameters: "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature).

    + + +

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using <$= MODEL_NAME +It has the following parameters: "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature).

    + + +
    + +
    cra_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +cra_linear(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("cra_exp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("cra_linear").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the + labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    prob

    Objective probability of the variable lottery.

    +
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    +
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    +
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    +
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the + labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    prob

    Objective probability of the variable lottery.

    +
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    +
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    +
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    +
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327

    +

    Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- cra_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- cra_linear("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/cra_exp.html b/R/docs/reference/cra_exp.html index c4f7c7eb..b33544e5 100644 --- a/R/docs/reference/cra_exp.html +++ b/R/docs/reference/cra_exp.html @@ -6,7 +6,7 @@ -Choice Under Risk and Ambiguity Task — cra_exp • hBayesDM +Exponential Subjective Value Model — cra_exp • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task with the following parameters: - "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature).

    -

    Contributor: Jaeyeong Yang

    -

    MODEL: Exponential Subjective Value Model (Hsu et al., 2005, Science)

    +

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using Exponential Subjective Value Model. +It has the following parameters: alpha (risk attitude), beta (ambiguity attitude), gamma (inverse temperature).

    +
      +
    • Task: Choice Under Risk and Ambiguity Task

    • +
    • Model: Exponential Subjective Value Model (Hsu et al., 2005)

    • +
    -
    cra_exp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    cra_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -188,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("cra_exp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"cra_exp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,12 +250,12 @@

    Details For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "prob"

    Objective probability of the variable lottery.

    -
    "ambig"

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    -
    "reward_var"

    Amount of reward in variable lottery. Assumed to be greater than zero.

    -
    "reward_fix"

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    -
    "choice"

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    prob

    Objective probability of the variable lottery.

    +
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    +
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    +
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    +
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,12 +284,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang <jaeyeong.yang1125@gmail.com>

    References

    -

    Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding - to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. - https://doi.org/10.1126/science.1115327

    +

    Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327

    See also

    @@ -271,8 +299,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- cra_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- cra_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- cra_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/cra_linear.html b/R/docs/reference/cra_linear.html
    index 6c1b8e38..c1543d12 100644
    --- a/R/docs/reference/cra_linear.html
    +++ b/R/docs/reference/cra_linear.html
    @@ -6,7 +6,7 @@
     
     
     
    -Choice Under Risk and Ambiguity Task — cra_linear • hBayesDM
    +Linear Subjective Value Model — cra_linear • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task with the following parameters: - "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature).

    -

    Contributor: Jaeyeong Yang

    -

    MODEL: Linear Subjective Value Model (Levy et al., 2010, J Neurophysiol)

    +

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using Linear Subjective Value Model. +It has the following parameters: alpha (risk attitude), beta (ambiguity attitude), gamma (inverse temperature).

    +
      +
    • Task: Choice Under Risk and Ambiguity Task

    • +
    • Model: Linear Subjective Value Model (Levy et al., 2010)

    • +
    -
    cra_linear(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    cra_linear(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("cra_linear").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"cra_linear").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,12 +250,12 @@

    Details For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "prob"

    Objective probability of the variable lottery.

    -
    "ambig"

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    -
    "reward_var"

    Amount of reward in variable lottery. Assumed to be greater than zero.

    -
    "reward_fix"

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    -
    "choice"

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    prob

    Objective probability of the variable lottery.

    +
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    +
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    +
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    +
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -257,12 +284,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang <jaeyeong.yang1125@gmail.com>

    References

    -

    Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural - representation of subjective value under risk and ambiguity. Journal of Neurophysiology, - 103(2), 1036-1047.

    +

    Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047.

    See also

    @@ -272,8 +299,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- cra_linear("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- cra_linear(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- cra_linear(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dbdm.html b/R/docs/reference/dbdm.html
    new file mode 100644
    index 00000000..457a37ad
    --- /dev/null
    +++ b/R/docs/reference/dbdm.html
    @@ -0,0 +1,341 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Probability Weight Function — dbdm_prob_weight • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Description Based Decison Making Task using <$= MODEL_NAME +It has the following parameters: "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature).

    +
      +
    • Task: Description Based Decison Making Task

    • +
    • Model: Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008)

    • +
    • Contributor: Yoonseo Zoh <zohyos7@gmail.com>

    • +
    + +
    + +
    dbdm_prob_weight(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("dbdm_prob_weight").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Description Based Decison Making Task, there should be 8 columns of data with the + labels "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    opt1hprob

    Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.

    +
    opt2hprob

    Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.

    +
    opt1hval

    Possible (with opt1hprob probability) outcome of option 1.

    +
    opt1lval

    Possible (with (1 - opt1hprob) probability) outcome of option 1.

    +
    opt2hval

    Possible (with opt2hprob probability) outcome of option 2.

    +
    opt2lval

    Possible (with (1 - opt2hprob) probability) outcome of option 2.

    +
    choice

    If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47.

    +

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539.

    +

    Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- dbdm_prob_weight("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/dbdm_prob_weight.html b/R/docs/reference/dbdm_prob_weight.html index 906b5266..e38bca3f 100644 --- a/R/docs/reference/dbdm_prob_weight.html +++ b/R/docs/reference/dbdm_prob_weight.html @@ -6,7 +6,7 @@ -Description Based Decison Making Task — dbdm_prob_weight • hBayesDM +Probability Weight Function — dbdm_prob_weight • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Description Based Decison Making Task with the following parameters: - "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature).

    -

    Contributor: Yoonseo Zoh

    -

    MODEL: Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008)

    +

    Hierarchical Bayesian Modeling of the Description Based Decison Making Task using Probability Weight Function. +It has the following parameters: tau (probability weight function), rho (subject utility function), lambda (loss aversion parameter), beta (inverse softmax temperature).

    +
      +
    • Task: Description Based Decison Making Task

    • +
    • Model: Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008)

    • +
    -
    dbdm_prob_weight(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    dbdm_prob_weight(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dbdm_prob_weight").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"dbdm_prob_weight").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,14 +250,14 @@

    Details For the Description Based Decison Making Task, there should be 8 columns of data with the labels "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "opt1hprob"

    Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.

    -
    "opt2hprob"

    Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.

    -
    "opt1hval"

    Possible (with opt1hprob probability) outcome of option 1.

    -
    "opt1lval"

    Possible (with (1 - opt1hprob) probability) outcome of option 1.

    -
    "opt2hval"

    Possible (with opt2hprob probability) outcome of option 2.

    -
    "opt2lval"

    Possible (with (1 - opt2hprob) probability) outcome of option 2.

    -
    "choice"

    If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    opt1hprob

    Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.

    +
    opt2hprob

    Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.

    +
    opt1hval

    Possible (with opt1hprob probability) outcome of option 1.

    +
    opt1lval

    Possible (with (1 - opt1hprob) probability) outcome of option 1.

    +
    opt2hval

    Possible (with opt2hprob probability) outcome of option 2.

    +
    opt2lval

    Possible (with (1 - opt2hprob) probability) outcome of option 2.

    +
    choice

    If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -259,16 +286,14 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Yoonseo Zoh <zohyos7@gmail.com>

    References

    -

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A - choice prediction competition: Choices from experience and from description. Journal of - Behavioral Decision Making, 23(1), 15-47.

    -

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the - effect of rare events in risky choice. Psychological science, 15(8), 534-539.

    -

    Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from - prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022.

    +

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47.

    +

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539.

    +

    Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022.

    See also

    @@ -278,8 +303,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dbdm_prob_weight("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- dbdm_prob_weight(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- dbdm_prob_weight(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd.html b/R/docs/reference/dd.html
    new file mode 100644
    index 00000000..eb41c0f0
    --- /dev/null
    +++ b/R/docs/reference/dd.html
    @@ -0,0 +1,1039 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Constant-Sensitivity (CS) Model — dd_cs • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME +It has the following parameters: "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • +
    + +

    Individual Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME +It has the following parameters: "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • +
    + +

    Hierarchical Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME +It has the following parameters: "r" (exponential discounting rate), "beta" (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Exponential Model (Samuelson, 1937)

    • +
    + +

    Hierarchical Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME +It has the following parameters: "k" (discounting rate), "beta" (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Hyperbolic Model (Mazur, 1987)

    • +
    + +

    Individual Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME +It has the following parameters: "k" (discounting rate), "beta" (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Hyperbolic Model (Mazur, 1987)

    • +
    + +
    + +
    dd_cs(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +dd_cs_single(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +dd_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +dd_hyperbolic(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +dd_hyperbolic_single(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("dd_cs").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("dd_cs_single").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("dd_exp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("dd_hyperbolic").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("dd_hyperbolic_single").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Delay Discounting Task, there should be 6 columns of data with the + labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Delay Discounting Task, there should be 6 columns of data with the + labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Delay Discounting Task, there should be 6 columns of data with the + labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Delay Discounting Task, there should be 6 columns of data with the + labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Delay Discounting Task, there should be 6 columns of data with the + labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    +

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    +

    Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612

    +

    Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.

    +

    Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- dd_cs("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- dd_cs_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- dd_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- dd_hyperbolic("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- dd_hyperbolic_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/dd_cs.html b/R/docs/reference/dd_cs.html index 3eefb4aa..0fab6033 100644 --- a/R/docs/reference/dd_cs.html +++ b/R/docs/reference/dd_cs.html @@ -6,7 +6,7 @@ -Delay Discounting Task — dd_cs • hBayesDM +Constant-Sensitivity (CS) Model — dd_cs • hBayesDM @@ -30,11 +30,14 @@ - + - + @@ -65,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Delay Discounting Task with the following parameters: - "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature).

    -

    MODEL: Constant-Sensitivity (CS) Model (Ebert & Prelec, 2007, Management Science)

    +

    Hierarchical Bayesian Modeling of the Delay Discounting Task using Constant-Sensitivity (CS) Model. +It has the following parameters: r (exponential discounting rate), s (impatience), beta (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • +
    -
    dd_cs(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    dd_cs(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_cs").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"dd_cs").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,12 +250,12 @@

    Details For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "delay_later"

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    "amount_later"

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    "delay_sooner"

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    "amount_sooner"

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    "choice"

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -257,8 +287,7 @@

    Details

    References

    -

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of - the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    +

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    See also

    @@ -268,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_cs("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- dd_cs(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- dd_cs(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_cs_single.html b/R/docs/reference/dd_cs_single.html
    index c398cd4e..eb09e91a 100644
    --- a/R/docs/reference/dd_cs_single.html
    +++ b/R/docs/reference/dd_cs_single.html
    @@ -6,7 +6,7 @@
     
     
     
    -Delay Discounting Task — dd_cs_single • hBayesDM
    +Constant-Sensitivity (CS) Model — dd_cs_single • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Individual Bayesian Modeling of the Delay Discounting Task with the following parameters: - "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature).

    -

    MODEL: Constant-Sensitivity (CS) Model (Ebert & Prelec, 2007, Management Science)

    +

    Individual Bayesian Modeling of the Delay Discounting Task using Constant-Sensitivity (CS) Model. +It has the following parameters: r (exponential discounting rate), s (impatience), beta (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • +
    -
    dd_cs_single(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    dd_cs_single(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_cs_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"dd_cs_single").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,12 +250,12 @@

    Details For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "delay_later"

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    "amount_later"

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    "delay_sooner"

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    "amount_sooner"

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    "choice"

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -258,8 +287,7 @@

    Details

    References

    -

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of - the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    +

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    See also

    @@ -269,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_cs_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- dd_cs_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- dd_cs_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_exp.html b/R/docs/reference/dd_exp.html
    index 39975fde..99ea87f9 100644
    --- a/R/docs/reference/dd_exp.html
    +++ b/R/docs/reference/dd_exp.html
    @@ -6,7 +6,7 @@
     
     
     
    -Delay Discounting Task — dd_exp • hBayesDM
    +Exponential Model — dd_exp • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Delay Discounting Task with the following parameters: - "r" (exponential discounting rate), "beta" (inverse temperature).

    -

    MODEL: Exponential Model (Samuelson, 1937, The Review of Economic Studies)

    +

    Hierarchical Bayesian Modeling of the Delay Discounting Task using Exponential Model. +It has the following parameters: r (exponential discounting rate), beta (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Exponential Model (Samuelson, 1937)

    • +
    -
    dd_exp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    dd_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_exp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"dd_exp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,12 +250,12 @@

    Details For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "delay_later"

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    "amount_later"

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    "delay_sooner"

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    "amount_sooner"

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    "choice"

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -257,8 +287,7 @@

    Details

    References

    -

    Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), - 155. http://doi.org/10.2307/2967612

    +

    Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612

    See also

    @@ -268,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- dd_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- dd_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_hyperbolic.html b/R/docs/reference/dd_hyperbolic.html
    index d5afd7e1..0ae39300 100644
    --- a/R/docs/reference/dd_hyperbolic.html
    +++ b/R/docs/reference/dd_hyperbolic.html
    @@ -6,7 +6,7 @@
     
     
     
    -Delay Discounting Task — dd_hyperbolic • hBayesDM
    +Hyperbolic Model — dd_hyperbolic • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Delay Discounting Task with the following parameters: - "k" (discounting rate), "beta" (inverse temperature).

    -

    MODEL: Hyperbolic Model (Mazur, 1987)

    +

    Hierarchical Bayesian Modeling of the Delay Discounting Task using Hyperbolic Model. +It has the following parameters: k (discounting rate), beta (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Hyperbolic Model (Mazur, 1987)

    • +
    -
    dd_hyperbolic(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    dd_hyperbolic(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_hyperbolic").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"dd_hyperbolic").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,12 +250,12 @@

    Details For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "delay_later"

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    "amount_later"

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    "delay_sooner"

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    "amount_sooner"

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    "choice"

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -268,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_hyperbolic("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- dd_hyperbolic(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- dd_hyperbolic(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_hyperbolic_single.html b/R/docs/reference/dd_hyperbolic_single.html
    index ccc040ba..8eb82cd2 100644
    --- a/R/docs/reference/dd_hyperbolic_single.html
    +++ b/R/docs/reference/dd_hyperbolic_single.html
    @@ -6,7 +6,7 @@
     
     
     
    -Delay Discounting Task — dd_hyperbolic_single • hBayesDM
    +Hyperbolic Model — dd_hyperbolic_single • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Individual Bayesian Modeling of the Delay Discounting Task with the following parameters: - "k" (discounting rate), "beta" (inverse temperature).

    -

    MODEL: Hyperbolic Model (Mazur, 1987)

    +

    Individual Bayesian Modeling of the Delay Discounting Task using Hyperbolic Model. +It has the following parameters: k (discounting rate), beta (inverse temperature).

    +
      +
    • Task: Delay Discounting Task

    • +
    • Model: Hyperbolic Model (Mazur, 1987)

    • +
    -
    dd_hyperbolic_single(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    dd_hyperbolic_single(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_hyperbolic_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"dd_hyperbolic_single").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,12 +250,12 @@

    Details For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "delay_later"

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    "amount_later"

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    "delay_sooner"

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    "amount_sooner"

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    "choice"

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    +
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    +
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    +
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    +
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -268,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_hyperbolic_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- dd_hyperbolic_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- dd_hyperbolic_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/estimate_mode.html b/R/docs/reference/estimate_mode.html
    index 2d15bd14..57e3b6ee 100644
    --- a/R/docs/reference/estimate_mode.html
    +++ b/R/docs/reference/estimate_mode.html
    @@ -64,7 +64,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -78,14 +78,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,7 +101,7 @@
    diff --git a/R/docs/reference/extract_ic.html b/R/docs/reference/extract_ic.html index 3fb18fc2..a7e00e9c 100644 --- a/R/docs/reference/extract_ic.html +++ b/R/docs/reference/extract_ic.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -77,14 +77,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -98,7 +100,7 @@
    diff --git a/R/docs/reference/gng.html b/R/docs/reference/gng.html new file mode 100644 index 00000000..9a6fc51c --- /dev/null +++ b/R/docs/reference/gng.html @@ -0,0 +1,855 @@ + + + + + + + + +RW + noise — gng_m1 • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME +It has the following parameters: "xi" (noise), "ep" (learning rate), "rho" (effective size).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW + noise (Guitart-Masip et al., 2012)

    • +
    + +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME +It has the following parameters: "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW + noise + bias (Guitart-Masip et al., 2012)

    • +
    + +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME +It has the following parameters: "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW + noise + bias + pi (Guitart-Masip et al., 2012)

    • +
    + +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME +It has the following parameters: "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013)

    • +
    + +
    + +
    gng_m1(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +gng_m2(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +gng_m3(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +gng_m4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("gng_m1").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("gng_m2").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("gng_m3").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("gng_m4").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the + labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the + labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the + labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the + labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    +

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    +

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    +

    Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- gng_m1("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- gng_m2("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- gng_m3("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- gng_m4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/gng_m1.html b/R/docs/reference/gng_m1.html index 4730ee62..c3656746 100644 --- a/R/docs/reference/gng_m1.html +++ b/R/docs/reference/gng_m1.html @@ -6,7 +6,7 @@ -Orthogonalized Go/Nogo Task — gng_m1 • hBayesDM +RW + noise — gng_m1 • hBayesDM @@ -30,11 +30,14 @@ - + - + @@ -65,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "rho" (effective size).

    -

    MODEL: RW + noise (Guitart-Masip et al., 2012, Neuroimage)

    +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise. +It has the following parameters: xi (noise), ep (learning rate), rho (effective size).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW + noise (Guitart-Masip et al., 2012)

    • +
    -
    gng_m1(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    gng_m1(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m1").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"gng_m1").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,10 +250,10 @@

    Details For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "cue"

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    "keyPressed"

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    "outcome"

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,9 +285,7 @@

    Details

    References

    -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). - Go and no-go learning in reward and punishment: Interactions between affect and effect. - Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    +

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m1("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- gng_m1(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- gng_m1(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m2.html b/R/docs/reference/gng_m2.html
    index e20981a4..e040c9cb 100644
    --- a/R/docs/reference/gng_m2.html
    +++ b/R/docs/reference/gng_m2.html
    @@ -6,7 +6,7 @@
     
     
     
    -Orthogonalized Go/Nogo Task — gng_m2 • hBayesDM
    +RW + noise + bias — gng_m2 • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size).

    -

    MODEL: RW + noise + bias (Guitart-Masip et al., 2012, Neuroimage)

    +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise + bias. +It has the following parameters: xi (noise), ep (learning rate), b (action bias), rho (effective size).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW + noise + bias (Guitart-Masip et al., 2012)

    • +
    -
    gng_m2(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    gng_m2(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m2").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"gng_m2").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,10 +250,10 @@

    Details For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "cue"

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    "keyPressed"

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    "outcome"

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,9 +285,7 @@

    Details

    References

    -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). - Go and no-go learning in reward and punishment: Interactions between affect and effect. - Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    +

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m2("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- gng_m2(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- gng_m2(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m3.html b/R/docs/reference/gng_m3.html
    index 595a883a..6f10acd3 100644
    --- a/R/docs/reference/gng_m3.html
    +++ b/R/docs/reference/gng_m3.html
    @@ -6,7 +6,7 @@
     
     
     
    -Orthogonalized Go/Nogo Task — gng_m3 • hBayesDM
    +RW + noise + bias + pi — gng_m3 • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size).

    -

    MODEL: RW + noise + bias + pi (Guitart-Masip et al., 2012, Neuroimage)

    +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise + bias + pi. +It has the following parameters: xi (noise), ep (learning rate), b (action bias), pi (Pavlovian bias), rho (effective size).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW + noise + bias + pi (Guitart-Masip et al., 2012)

    • +
    -
    gng_m3(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    gng_m3(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m3").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"gng_m3").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,10 +250,10 @@

    Details For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "cue"

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    "keyPressed"

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    "outcome"

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,9 +285,7 @@

    Details

    References

    -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). - Go and no-go learning in reward and punishment: Interactions between affect and effect. - Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    +

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m3("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- gng_m3(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- gng_m3(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m4.html b/R/docs/reference/gng_m4.html
    index 475469a4..e0c69d84 100644
    --- a/R/docs/reference/gng_m4.html
    +++ b/R/docs/reference/gng_m4.html
    @@ -6,7 +6,7 @@
     
     
     
    -Orthogonalized Go/Nogo Task — gng_m4 • hBayesDM
    +RW (rew/pun) + noise + bias + pi — gng_m4 • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity).

    -

    MODEL: RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013, J Neuro)

    +

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW (rew/pun) + noise + bias + pi. +It has the following parameters: xi (noise), ep (learning rate), b (action bias), pi (Pavlovian bias), rhoRew (reward sensitivity), rhoPun (punishment sensitivity).

    +
      +
    • Task: Orthogonalized Go/Nogo Task

    • +
    • Model: RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013)

    • +
    -
    gng_m4(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    gng_m4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"gng_m4").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,10 +250,10 @@

    Details For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "cue"

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    "keyPressed"

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    "outcome"

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    +
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    +
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,9 +285,7 @@

    Details

    References

    -

    Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta - Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. - http://doi.org/10.1523/JNEUROSCI.5754-12.2013

    +

    Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013

    See also

    @@ -267,8 +295,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- gng_m4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- gng_m4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/hBayesDM-package.html b/R/docs/reference/hBayesDM-package.html
    index 8a9cb039..73ffedfe 100644
    --- a/R/docs/reference/hBayesDM-package.html
    +++ b/R/docs/reference/hBayesDM-package.html
    @@ -110,7 +110,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -124,14 +124,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -145,7 +147,7 @@
    diff --git a/R/docs/reference/hBayesDM_model.html b/R/docs/reference/hBayesDM_model.html index 43834a2b..5bf36545 100644 --- a/R/docs/reference/hBayesDM_model.html +++ b/R/docs/reference/hBayesDM_model.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -78,14 +78,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,7 +101,7 @@
    diff --git a/R/docs/reference/igt.html b/R/docs/reference/igt.html new file mode 100644 index 00000000..0a1ed428 --- /dev/null +++ b/R/docs/reference/igt.html @@ -0,0 +1,861 @@ + + + + + + + + +Outcome-Representation Learning Model — igt_orl • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME +It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Outcome-Representation Learning Model (Haines et al., 2018)

    • +
    • Contributor: Nate Haines <haines.175@osu.edu>

    • +
    + +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME +It has the following parameters: "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014)

    • +
    + +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME +It has the following parameters: "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Prospect Valence Learning (PVL) Delta (Ahn et al., 2008)

    • +
    + +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME +It has the following parameters: "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Value-Plus-Perseverance (Worthy et al., 2013)

    • +
    + +
    + +
    igt_orl(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +igt_pvl_decay(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +igt_pvl_delta(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +igt_vpp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("igt_orl").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("igt_pvl_decay").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("igt_pvl_delta").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("igt_vpp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Iowa Gambling Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Iowa Gambling Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Iowa Gambling Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Iowa Gambling Task, there should be 4 columns of data with the + labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- igt_orl("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- igt_pvl_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- igt_pvl_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- igt_vpp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/igt_orl.html b/R/docs/reference/igt_orl.html index 4341dfcd..73c4eee2 100644 --- a/R/docs/reference/igt_orl.html +++ b/R/docs/reference/igt_orl.html @@ -6,7 +6,7 @@ -Iowa Gambling Task — igt_orl • hBayesDM +Outcome-Representation Learning Model — igt_orl • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight).

    -

    Contributor: Nate Haines

    -

    MODEL: Outcome-Representation Learning Model (Haines et al., 2018, Cognitive Science)

    +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Outcome-Representation Learning Model. +It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), K (perseverance decay), betaF (outcome frequency weight), betaP (perseverance weight).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Outcome-Representation Learning Model (Haines et al., 2018)

    • +
    -
    igt_orl(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    igt_orl(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -188,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -payscale: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_orl").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"igt_orl").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,10 +250,10 @@

    Details For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    "gain"

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,12 +282,13 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Nate Haines <haines.175@osu.edu>

    References

    -

    Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A - Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. - https://doi.org/10.1111/cogs.12688

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688

    See also

    @@ -270,8 +298,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_orl("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- igt_orl(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- igt_orl(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_pvl_decay.html b/R/docs/reference/igt_pvl_decay.html
    index fcfcb05a..8cdd00f3 100644
    --- a/R/docs/reference/igt_pvl_decay.html
    +++ b/R/docs/reference/igt_pvl_decay.html
    @@ -6,7 +6,7 @@
     
     
     
    -Iowa Gambling Task — igt_pvl_decay • hBayesDM
    +Prospect Valence Learning (PVL) Decay-RI — igt_pvl_decay • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion).

    -

    MODEL: Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014, Frontiers in Psychology)

    +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Prospect Valence Learning (PVL) Decay-RI. +It has the following parameters: A (decay rate), alpha (outcome sensitivity), cons (response consistency), lambda (loss aversion).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014)

    • +
    -
    igt_pvl_decay(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    igt_pvl_decay(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -payscale: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_pvl_decay").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"igt_pvl_decay").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,10 +250,10 @@

    Details For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    "gain"

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -257,10 +285,8 @@

    Details

    References

    -

    Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, - J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence - from computational modeling with pure users. Frontiers in Psychology, 5, 1376. - http://doi.org/10.3389/fpsyg.2014.00849

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849

    See also

    @@ -270,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_pvl_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- igt_pvl_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- igt_pvl_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_pvl_delta.html b/R/docs/reference/igt_pvl_delta.html
    index 37204c65..be2db542 100644
    --- a/R/docs/reference/igt_pvl_delta.html
    +++ b/R/docs/reference/igt_pvl_delta.html
    @@ -6,7 +6,7 @@
     
     
     
    -Iowa Gambling Task (Ahn et al., 2008) — igt_pvl_delta • hBayesDM
    +Prospect Valence Learning (PVL) Delta — igt_pvl_delta • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion).

    -

    MODEL: Prospect Valence Learning (PVL) Delta (Ahn et al., 2008, Cognitive Science)

    +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Prospect Valence Learning (PVL) Delta. +It has the following parameters: A (learning rate), alpha (outcome sensitivity), cons (response consistency), lambda (loss aversion).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Prospect Valence Learning (PVL) Delta (Ahn et al., 2008)

    • +
    -
    igt_pvl_delta(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    igt_pvl_delta(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -payscale: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_pvl_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"igt_pvl_delta").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,10 +250,10 @@

    Details For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    "gain"

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -257,9 +285,8 @@

    Details

    References

    -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models - using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. - http://doi.org/10.1080/03640210802352992

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    See also

    @@ -269,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_pvl_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- igt_pvl_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- igt_pvl_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_vpp.html b/R/docs/reference/igt_vpp.html
    index 0347eb9b..f5e90177 100644
    --- a/R/docs/reference/igt_vpp.html
    +++ b/R/docs/reference/igt_vpp.html
    @@ -6,7 +6,7 @@
     
     
     
    -Iowa Gambling Task — igt_vpp • hBayesDM
    +Value-Plus-Perseverance — igt_vpp • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight).

    -

    MODEL: Value-Plus-Perseverance (Worthy et al., 2013, Frontiers in Psychology)

    +

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Value-Plus-Perseverance. +It has the following parameters: A (learning rate), alpha (outcome sensitivity), cons (response consistency), lambda (loss aversion), epP (gain impact), epN (loss impact), K (decay rate), w (RL weight).

    +
      +
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • +
    • Model: Value-Plus-Perseverance (Worthy et al., 2013)

    • +
    -
    igt_vpp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    igt_vpp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -payscale: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_vpp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"igt_vpp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,10 +250,10 @@

    Details For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    "gain"

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    "loss"

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    +
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    +
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,9 +285,8 @@

    Details

    References

    -

    Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and - win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical - Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001

    +

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    +

    Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001

    See also

    @@ -268,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_vpp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- igt_vpp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- igt_vpp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/index.html b/R/docs/reference/index.html
    index dda5dfbe..4895f067 100644
    --- a/R/docs/reference/index.html
    +++ b/R/docs/reference/index.html
    @@ -60,7 +60,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -74,14 +74,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -117,61 +119,61 @@

    bandit2arm_delta()

    -

    2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004)

    +

    Rescorla-Wagner (Delta) Model

    bandit4arm2_kalman_filter()

    -

    4-Armed Bandit Task (modified)

    +

    Kalman Filter

    bandit4arm_2par_lapse()

    -

    4-Armed Bandit Task

    +

    3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)

    bandit4arm_4par()

    -

    4-Armed Bandit Task

    +

    4 Parameter Model, without C (choice perseveration)

    bandit4arm_lapse()

    -

    4-Armed Bandit Task

    +

    5 Parameter Model, without C (choice perseveration) but with xi (noise)

    bandit4arm_lapse_decay()

    -

    4-Armed Bandit Task

    +

    5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).

    bandit4arm_singleA_lapse()

    -

    4-Armed Bandit Task

    +

    4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.

    bart_par4()

    -

    Balloon Analogue Risk Task (Ravenzwaaij et al., 2011)

    +

    Re-parameterized version of BART model with 4 parameters

    choiceRT_ddm()

    -

    Choice Reaction Time Task

    +

    Drift Diffusion Model

    choiceRT_ddm_single()

    -

    Choice Reaction Time Task

    +

    Drift Diffusion Model

    @@ -189,217 +191,217 @@

    cra_exp()

    -

    Choice Under Risk and Ambiguity Task

    +

    Exponential Subjective Value Model

    cra_linear()

    -

    Choice Under Risk and Ambiguity Task

    +

    Linear Subjective Value Model

    dbdm_prob_weight()

    -

    Description Based Decison Making Task

    +

    Probability Weight Function

    dd_cs()

    -

    Delay Discounting Task

    +

    Constant-Sensitivity (CS) Model

    dd_cs_single()

    -

    Delay Discounting Task

    +

    Constant-Sensitivity (CS) Model

    dd_exp()

    -

    Delay Discounting Task

    +

    Exponential Model

    dd_hyperbolic()

    -

    Delay Discounting Task

    +

    Hyperbolic Model

    dd_hyperbolic_single()

    -

    Delay Discounting Task

    +

    Hyperbolic Model

    gng_m1()

    -

    Orthogonalized Go/Nogo Task

    +

    RW + noise

    gng_m2()

    -

    Orthogonalized Go/Nogo Task

    +

    RW + noise + bias

    gng_m3()

    -

    Orthogonalized Go/Nogo Task

    +

    RW + noise + bias + pi

    gng_m4()

    -

    Orthogonalized Go/Nogo Task

    +

    RW (rew/pun) + noise + bias + pi

    igt_orl()

    -

    Iowa Gambling Task

    +

    Outcome-Representation Learning Model

    igt_pvl_decay()

    -

    Iowa Gambling Task

    +

    Prospect Valence Learning (PVL) Decay-RI

    igt_pvl_delta()

    -

    Iowa Gambling Task (Ahn et al., 2008)

    +

    Prospect Valence Learning (PVL) Delta

    igt_vpp()

    -

    Iowa Gambling Task

    +

    Value-Plus-Perseverance

    peer_ocu()

    -

    Peer Influence Task (Chung et al., 2015)

    +

    Other-Conferred Utility (OCU) Model

    prl_ewa()

    -

    Probabilistic Reversal Learning Task

    +

    Experience-Weighted Attraction Model

    prl_fictitious()

    -

    Probabilistic Reversal Learning Task

    +

    Fictitious Update Model

    prl_fictitious_multipleB()

    -

    Probabilistic Reversal Learning Task

    +

    Fictitious Update Model

    prl_fictitious_rp()

    -

    Probabilistic Reversal Learning Task

    +

    Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)

    prl_fictitious_rp_woa()

    -

    Probabilistic Reversal Learning Task

    +

    Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)

    prl_fictitious_woa()

    -

    Probabilistic Reversal Learning Task

    +

    Fictitious Update Model, without alpha (indecision point)

    prl_rp()

    -

    Probabilistic Reversal Learning Task

    +

    Reward-Punishment Model

    prl_rp_multipleB()

    -

    Probabilistic Reversal Learning Task

    +

    Reward-Punishment Model

    pst_gainloss_Q()

    -

    Probabilistic Selection Task

    +

    Gain-Loss Q Learning Model

    ra_noLA()

    -

    Risk Aversion Task

    +

    Prospect Theory, without loss aversion (LA) parameter

    ra_noRA()

    -

    Risk Aversion Task

    +

    Prospect Theory, without risk aversion (RA) parameter

    ra_prospect()

    -

    Risk Aversion Task

    +

    Prospect Theory

    rdt_happiness()

    -

    Risky Decision Task

    +

    Happiness Computational Model

    ts_par4()

    -

    Two-Step Task (Daw et al., 2011)

    +

    Hybrid Model, with 4 parameters

    ts_par6()

    -

    Two-Step Task (Daw et al., 2011)

    +

    Hybrid Model, with 6 parameters

    ts_par7()

    -

    Two-Step Task (Daw et al., 2011)

    +

    Hybrid Model, with 7 parameters (original model)

    ug_bayes()

    -

    Norm-Training Ultimatum Game

    +

    Ideal Observer Model

    ug_delta()

    -

    Norm-Training Ultimatum Game

    +

    Rescorla-Wagner (Delta) Model

    wcs_sql()

    -

    Wisconsin Card Sorting Task

    +

    Sequential Learning Model

    @@ -463,20 +465,6 @@

    Function for extracting Rhat values from an hBayesDM object

    - - - -

    Internal functions

    -

    These functions are for the developmental purpose.

    - - - - - -

    hBayesDM_model()

    - -

    hBayesDM Model Base Function

    - @@ -486,7 +474,6 @@

    Contents

    diff --git a/R/docs/reference/multiplot.html b/R/docs/reference/multiplot.html index cb1fe81b..9ddaa923 100644 --- a/R/docs/reference/multiplot.html +++ b/R/docs/reference/multiplot.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000 @@ -78,14 +78,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,7 +101,7 @@
    diff --git a/R/docs/reference/peer.html b/R/docs/reference/peer.html new file mode 100644 index 00000000..a9be34d5 --- /dev/null +++ b/R/docs/reference/peer.html @@ -0,0 +1,339 @@ + + + + + + + + +Other-Conferred Utility (OCU) Model — peer_ocu • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Peer Influence Task using <$= MODEL_NAME +It has the following parameters: "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility).

    + + +
    + +
    peer_ocu(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("peer_ocu").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Peer Influence Task, there should be 8 columns of data with the + labels "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    condition

    0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).

    +
    p_gamble

    Probability of receiving a high payoff (same for both options).

    +
    safe_Hpayoff

    High payoff of the safe option.

    +
    safe_Lpayoff

    Low payoff of the safe option.

    +
    risky_Hpayoff

    High payoff of the risky option.

    +
    risky_Lpayoff

    Low payoff of the risky option.

    +
    choice

    Which option was chosen? 0: safe, 1: risky.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- peer_ocu("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/peer_ocu.html b/R/docs/reference/peer_ocu.html index 5de3ea79..17ca7132 100644 --- a/R/docs/reference/peer_ocu.html +++ b/R/docs/reference/peer_ocu.html @@ -6,7 +6,7 @@ -Peer Influence Task (Chung et al., 2015) — peer_ocu • hBayesDM +Other-Conferred Utility (OCU) Model — peer_ocu • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Peer Influence Task with the following parameters: - "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility).

    -

    Contributor: Harhim Park

    -

    MODEL: Other-Conferred Utility (OCU) Model

    +

    Hierarchical Bayesian Modeling of the Peer Influence Task using Other-Conferred Utility (OCU) Model. +It has the following parameters: rho (risk preference), tau (inverse temperature), ocu (other-conferred utility).

    +
      +
    • Task: Peer Influence Task (Chung et al., 2015)

    • +
    • Model: Other-Conferred Utility (OCU) Model

    • +
    -
    peer_ocu(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    peer_ocu(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -188,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("peer_ocu").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"peer_ocu").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,14 +250,14 @@

    Details For the Peer Influence Task, there should be 8 columns of data with the labels "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "condition"

    0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).

    -
    "p_gamble"

    Probability of receiving a high payoff (same for both options).

    -
    "safe_Hpayoff"

    High payoff of the safe option.

    -
    "safe_Lpayoff"

    Low payoff of the safe option.

    -
    "risky_Hpayoff"

    High payoff of the risky option.

    -
    "risky_Lpayoff"

    Low payoff of the risky option.

    -
    "choice"

    Which option was chosen? 0: safe, 1: risky.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    condition

    0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).

    +
    p_gamble

    Probability of receiving a high payoff (same for both options).

    +
    safe_Hpayoff

    High payoff of the safe option.

    +
    safe_Lpayoff

    Low payoff of the safe option.

    +
    risky_Hpayoff

    High payoff of the risky option.

    +
    risky_Lpayoff

    Low payoff of the risky option.

    +
    choice

    Which option was chosen? 0: safe, 1: risky.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -258,12 +286,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Harhim Park <hrpark12@gmail.com>

    References

    -

    Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social - signals of safety and risk confer utility and have asymmetric effects on observers' choices. - Nature Neuroscience, 18(6), 912-916.

    +

    Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916.

    See also

    @@ -273,8 +301,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- peer_ocu("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- peer_ocu(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- peer_ocu(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/plot.hBayesDM.html b/R/docs/reference/plot.hBayesDM.html
    index 5e227d0b..fc061be4 100644
    --- a/R/docs/reference/plot.hBayesDM.html
    +++ b/R/docs/reference/plot.hBayesDM.html
    @@ -63,7 +63,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -77,14 +77,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -98,7 +100,7 @@
    diff --git a/R/docs/reference/plotDist.html b/R/docs/reference/plotDist.html index e4de0a1e..d80fef70 100644 --- a/R/docs/reference/plotDist.html +++ b/R/docs/reference/plotDist.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -77,14 +77,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -98,7 +100,7 @@
    diff --git a/R/docs/reference/plotHDI.html b/R/docs/reference/plotHDI.html index 1610368a..eb73107e 100644 --- a/R/docs/reference/plotHDI.html +++ b/R/docs/reference/plotHDI.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -77,14 +77,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -98,7 +100,7 @@
    diff --git a/R/docs/reference/plotInd.html b/R/docs/reference/plotInd.html index b693ac5c..d7e1c28b 100644 --- a/R/docs/reference/plotInd.html +++ b/R/docs/reference/plotInd.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -77,14 +77,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -98,7 +100,7 @@
    diff --git a/R/docs/reference/printFit.html b/R/docs/reference/printFit.html index 9ccaf316..bf891801 100644 --- a/R/docs/reference/printFit.html +++ b/R/docs/reference/printFit.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -77,14 +77,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -98,7 +100,7 @@
    diff --git a/R/docs/reference/prl.html b/R/docs/reference/prl.html new file mode 100644 index 00000000..259f8114 --- /dev/null +++ b/R/docs/reference/prl.html @@ -0,0 +1,1563 @@ + + + + + + + + +Experience-Weighted Attraction Model — prl_ewa • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature).

    + + +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature).

    + + +

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature).

    + + +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature).

    + + +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature).

    + + +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "eta" (learning rate), "beta" (inverse temperature).

    + + +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature).

    + + +

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME +It has the following parameters: "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature).

    + + +
    + +
    prl_ewa(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_fictitious(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_fictitious_multipleB(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_fictitious_rp(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_fictitious_rp_woa(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_fictitious_woa(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_rp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +prl_rp_multipleB(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "ew_c", "ew_nc".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "block", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "block", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_ewa").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_fictitious").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_fictitious_multipleB").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_fictitious_rp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_fictitious_rp_woa").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_fictitious_woa").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_rp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("prl_rp_multipleB").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the + labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    block

    A unique identifier for each of the multiple blocks within each subject.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the + labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    block

    A unique identifier for each of the multiple blocks within each subject.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_ewa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_fictitious("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_fictitious_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_fictitious_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_fictitious_rp_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_fictitious_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- prl_rp_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/prl_ewa.html b/R/docs/reference/prl_ewa.html index 7ae8f3b3..2a1b27b3 100644 --- a/R/docs/reference/prl_ewa.html +++ b/R/docs/reference/prl_ewa.html @@ -6,7 +6,7 @@ -Probabilistic Reversal Learning Task — prl_ewa • hBayesDM +Experience-Weighted Attraction Model — prl_ewa • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Experience-Weighted Attraction Model (Ouden et al., 2013, Neuron)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Experience-Weighted Attraction Model. +It has the following parameters: phi (1 - learning rate), rho (experience decay factor), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Experience-Weighted Attraction Model (Ouden et al., 2013)

    • +
    -
    prl_ewa(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    prl_ewa(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -188,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "ew_c", "ew_nc".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_ewa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_ewa").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,9 +250,9 @@

    Details For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -253,12 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    See also

    @@ -268,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_ewa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_ewa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_ewa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious.html b/R/docs/reference/prl_fictitious.html
    index 9778a6e0..7b4a3718 100644
    --- a/R/docs/reference/prl_fictitious.html
    +++ b/R/docs/reference/prl_fictitious.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_fictitious • hBayesDM
    +Fictitious Update Model — prl_fictitious • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model. +It has the following parameters: eta (learning rate), alpha (indecision point), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Fictitious Update Model (Glascher et al., 2009)

    • +
    -
    prl_fictitious(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    prl_fictitious(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_fictitious").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,9 +250,9 @@

    Details For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,12 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    See also

    @@ -269,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_fictitious(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_fictitious(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_multipleB.html b/R/docs/reference/prl_fictitious_multipleB.html
    index 6ea37a4e..019b43b1 100644
    --- a/R/docs/reference/prl_fictitious_multipleB.html
    +++ b/R/docs/reference/prl_fictitious_multipleB.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_fictitious_multipleB • hBayesDM
    +Fictitious Update Model — prl_fictitious_multipleB • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex)

    +

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model. +It has the following parameters: eta (learning rate), alpha (indecision point), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Fictitious Update Model (Glascher et al., 2009)

    • +
    -
    prl_fictitious_multipleB(data = "choose", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1,
    -  inits = "random", indPars = "mean", modelRegressor = FALSE,
    -  vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95,
    -  stepsize = 1, max_treedepth = 10, ...)
    +
    prl_fictitious_multipleB(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "block", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_multipleB").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_fictitious_multipleB").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,10 +250,10 @@

    Details For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "block"

    A unique identifier for each of the multiple blocks within each subject.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    block

    A unique identifier for each of the multiple blocks within each subject.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,12 +282,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    See also

    @@ -270,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_fictitious_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_fictitious_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_rp.html b/R/docs/reference/prl_fictitious_rp.html
    index 8e2b1974..174f5e27 100644
    --- a/R/docs/reference/prl_fictitious_rp.html
    +++ b/R/docs/reference/prl_fictitious_rp.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_fictitious_rp • hBayesDM
    +Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) — prl_fictitious_rp • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), with separate learning rates for positive and negative prediction error (PE)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE). +It has the following parameters: eta_pos (learning rate, +PE), eta_neg (learning rate, -PE), alpha (indecision point), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) (Glascher et al., 2009; Ouden et al., 2013)

    • +
    -
    prl_fictitious_rp(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    prl_fictitious_rp(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_rp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_fictitious_rp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,9 +250,9 @@

    Details For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,15 +281,13 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    See also

    @@ -272,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_fictitious_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_fictitious_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_rp_woa.html b/R/docs/reference/prl_fictitious_rp_woa.html
    index b9dbc0a6..0f2c7bd8 100644
    --- a/R/docs/reference/prl_fictitious_rp_woa.html
    +++ b/R/docs/reference/prl_fictitious_rp_woa.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_fictitious_rp_woa • hBayesDM
    +Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) — prl_fictitious_rp_woa • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point). +It has the following parameters: eta_pos (learning rate, +PE), eta_neg (learning rate, -PE), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) (Glascher et al., 2009; Ouden et al., 2013)

    • +
    -
    prl_fictitious_rp_woa(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    prl_fictitious_rp_woa(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_rp_woa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_fictitious_rp_woa").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,9 +250,9 @@

    Details For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,15 +281,13 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    See also

    @@ -272,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_rp_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_fictitious_rp_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_fictitious_rp_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_woa.html b/R/docs/reference/prl_fictitious_woa.html
    index 7de402c0..316c3c99 100644
    --- a/R/docs/reference/prl_fictitious_woa.html
    +++ b/R/docs/reference/prl_fictitious_woa.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_fictitious_woa • hBayesDM
    +Fictitious Update Model, without alpha (indecision point) — prl_fictitious_woa • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta" (learning rate), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), without alpha (indecision point)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, without alpha (indecision point). +It has the following parameters: eta (learning rate), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Fictitious Update Model, without alpha (indecision point) (Glascher et al., 2009)

    • +
    -
    prl_fictitious_woa(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    prl_fictitious_woa(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_woa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_fictitious_woa").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,9 +250,9 @@

    Details For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,12 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    +

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    See also

    @@ -269,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_fictitious_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_fictitious_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_rp.html b/R/docs/reference/prl_rp.html
    index 176bd1b3..dc4805b6 100644
    --- a/R/docs/reference/prl_rp.html
    +++ b/R/docs/reference/prl_rp.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_rp • hBayesDM
    +Reward-Punishment Model — prl_rp • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Reward-Punishment Model (Ouden et al., 2013, Neuron)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Reward-Punishment Model. +It has the following parameters: Apun (punishment learning rate), Arew (reward learning rate), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Reward-Punishment Model (Ouden et al., 2013)

    • +
    -
    prl_rp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    prl_rp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -188,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_rp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_rp").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,9 +250,9 @@

    Details For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -253,12 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    See also

    @@ -268,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_rp_multipleB.html b/R/docs/reference/prl_rp_multipleB.html
    index e1c3b2e2..5d5d5f56 100644
    --- a/R/docs/reference/prl_rp_multipleB.html
    +++ b/R/docs/reference/prl_rp_multipleB.html
    @@ -6,7 +6,7 @@
     
     
     
    -Probabilistic Reversal Learning Task — prl_rp_multipleB • hBayesDM
    +Reward-Punishment Model — prl_rp_multipleB • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature).

    -

    Contributor: (for model-based regressors) Jaeyeong Yang and Harhim Park

    -

    MODEL: Reward-Punishment Model (Ouden et al., 2013, Neuron)

    +

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Reward-Punishment Model. +It has the following parameters: Apun (punishment learning rate), Arew (reward learning rate), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Reversal Learning Task

    • +
    • Model: Reward-Punishment Model (Ouden et al., 2013)

    • +
    -
    prl_rp_multipleB(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    prl_rp_multipleB(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "block", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. +

    Whether to export model-based regressors (TRUE or FALSE). For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_rp_multipleB").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"prl_rp_multipleB").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    -
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,10 +250,10 @@

    Details For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "block"

    A unique identifier for each of the multiple blocks within each subject.

    -
    "choice"

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    "outcome"

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    block

    A unique identifier for each of the multiple blocks within each subject.

    +
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    +
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,12 +282,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com>

    References

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    +

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    See also

    @@ -270,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_rp_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- prl_rp_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- prl_rp_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/pst.html b/R/docs/reference/pst.html
    new file mode 100644
    index 00000000..40aa8168
    --- /dev/null
    +++ b/R/docs/reference/pst.html
    @@ -0,0 +1,334 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Gain-Loss Q Learning Model — pst_gainloss_Q • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Probabilistic Selection Task using <$= MODEL_NAME +It has the following parameters: "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature).

    + + +
    + +
    pst_gainloss_Q(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "type", "choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("pst_gainloss_Q").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Probabilistic Selection Task, there should be 4 columns of data with the + labels "subjID", "type", "choice", "reward". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).

    +
    reward

    Amount of reward earned as a result of the trial.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- pst_gainloss_Q("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/pst_gainloss_Q.html b/R/docs/reference/pst_gainloss_Q.html index f412917a..2a8e6a9b 100644 --- a/R/docs/reference/pst_gainloss_Q.html +++ b/R/docs/reference/pst_gainloss_Q.html @@ -6,7 +6,7 @@ -Probabilistic Selection Task — pst_gainloss_Q • hBayesDM +Gain-Loss Q Learning Model — pst_gainloss_Q • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Probabilistic Selection Task with the following parameters: - "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature).

    -

    Contributor: Jaeyeong Yang

    -

    MODEL: Gain-Loss Q Learning Model (Frank et al., 2007, PNAS)

    +

    Hierarchical Bayesian Modeling of the Probabilistic Selection Task using Gain-Loss Q Learning Model. +It has the following parameters: alpha_pos (learning rate for positive feedbacks), alpha_neg (learning rate for negative feedbacks), beta (inverse temperature).

    +
      +
    • Task: Probabilistic Selection Task

    • +
    • Model: Gain-Loss Q Learning Model (Frank et al., 2007)

    • +
    -
    pst_gainloss_Q(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    pst_gainloss_Q(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "type", "choice", "reward". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("pst_gainloss_Q").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"pst_gainloss_Q").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,23 +250,10 @@

    Details For the Probabilistic Selection Task, there should be 4 columns of data with the labels "subjID", "type", "choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "type"

    Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2.
    Code for each stimulus type (1~6) is defined as below: - - - - - - - - - - - -
    CodeStimulusProbability to win
    1
    A80%
    2B
    20%3
    C70%
    4
    D30%
    5E
    60%6
    F40%
    - The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.

    -
    "choice"

    Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).

    -
    "reward"

    Amount of reward earned as a result of the trial.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    type

    Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80% (type 1), 20% (type 2), 70% (type 3), 30% (type 4), 60% (type 5), 40% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.

    +
    choice

    Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).

    +
    reward

    Amount of reward earned as a result of the trial.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -268,12 +282,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Jaeyeong Yang <jaeyeong.yang1125@gmail.com>

    References

    -

    Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic - triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings - of the National Academy of Sciences, 104(41), 16311-16316.

    +

    Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316.

    See also

    @@ -283,8 +297,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- pst_gainloss_Q("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- pst_gainloss_Q(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- pst_gainloss_Q(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ra.html b/R/docs/reference/ra.html
    new file mode 100644
    index 00000000..22262559
    --- /dev/null
    +++ b/R/docs/reference/ra.html
    @@ -0,0 +1,684 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Prospect Theory, without loss aversion (LA) parameter — ra_noLA • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Risk Aversion Task using <$= MODEL_NAME +It has the following parameters: "rho" (risk aversion), "tau" (inverse temperature).

    +
      +
    • Task: Risk Aversion Task

    • +
    • Model: Prospect Theory, without loss aversion (LA) parameter (Sokol-Hessner et al., 2009)

    • +
    + +

    Hierarchical Bayesian Modeling of the Risk Aversion Task using <$= MODEL_NAME +It has the following parameters: "lambda" (loss aversion), "tau" (inverse temperature).

    +
      +
    • Task: Risk Aversion Task

    • +
    • Model: Prospect Theory, without risk aversion (RA) parameter (Sokol-Hessner et al., 2009)

    • +
    + +

    Hierarchical Bayesian Modeling of the Risk Aversion Task using <$= MODEL_NAME +It has the following parameters: "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature).

    +
      +
    • Task: Risk Aversion Task

    • +
    • Model: Prospect Theory (Sokol-Hessner et al., 2009)

    • +
    + +
    + +
    ra_noLA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +ra_noRA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +ra_prospect(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ra_noLA").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ra_noRA").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ra_prospect").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Risk Aversion Task, there should be 5 columns of data with the + labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Risk Aversion Task, there should be 5 columns of data with the + labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Risk Aversion Task, there should be 5 columns of data with the + labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    +

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    +

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- ra_noLA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- ra_noRA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- ra_prospect("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/ra_noLA.html b/R/docs/reference/ra_noLA.html index f109afc4..6ae49c92 100644 --- a/R/docs/reference/ra_noLA.html +++ b/R/docs/reference/ra_noLA.html @@ -6,7 +6,7 @@ -Risk Aversion Task — ra_noLA • hBayesDM +Prospect Theory, without loss aversion (LA) parameter — ra_noLA • hBayesDM @@ -30,11 +30,14 @@ - + - + @@ -65,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Risk Aversion Task with the following parameters: - "rho" (risk aversion), "tau" (inverse temperature).

    -

    MODEL: Prospect Theory (Sokol-Hessner et al., 2009, PNAS), without loss aversion (LA) parameter

    +

    Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory, without loss aversion (LA) parameter. +It has the following parameters: rho (risk aversion), tau (inverse temperature).

    +
      +
    • Task: Risk Aversion Task

    • +
    • Model: Prospect Theory, without loss aversion (LA) parameter (Sokol-Hessner et al., 2009)

    • +
    -
    ra_noLA(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ra_noLA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ra_noLA").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ra_noLA").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,11 +250,11 @@

    Details For the Risk Aversion Task, there should be 5 columns of data with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "gain"

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    "loss"

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    "cert"

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    "gamble"

    If gamble was taken, gamble == 1; else gamble == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,10 +286,7 @@

    Details

    References

    -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & - Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. - Proceedings of the National Academy of Sciences of the United States of America, 106(13), - 5035-5040. http://www.pnas.org/content/106/13/5035

    +

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    See also

    @@ -269,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ra_noLA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ra_noLA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ra_noLA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    @@ -283,11 +313,6 @@ 

    Examp # Show the WAIC and LOOIC model fit estimates printFit(output) -# }
    -
    # NOT RUN { -# Paths to data published in Sokol-Hessner et al. (2009) -path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") # }

    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Risk Aversion Task with the following parameters: - "lambda" (loss aversion), "tau" (inverse temperature).

    -

    MODEL: Prospect Theory (Sokol-Hessner et al., 2009, PNAS), without risk aversion (RA) parameter

    +

    Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory, without risk aversion (RA) parameter. +It has the following parameters: lambda (loss aversion), tau (inverse temperature).

    +
      +
    • Task: Risk Aversion Task

    • +
    • Model: Prospect Theory, without risk aversion (RA) parameter (Sokol-Hessner et al., 2009)

    • +
    -
    ra_noRA(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ra_noRA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ra_noRA").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ra_noRA").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,11 +250,11 @@

    Details For the Risk Aversion Task, there should be 5 columns of data with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "gain"

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    "loss"

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    "cert"

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    "gamble"

    If gamble was taken, gamble == 1; else gamble == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -256,10 +286,7 @@

    Details

    References

    -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & - Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. - Proceedings of the National Academy of Sciences of the United States of America, 106(13), - 5035-5040. http://www.pnas.org/content/106/13/5035

    +

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    See also

    @@ -269,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ra_noRA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ra_noRA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ra_noRA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    @@ -283,11 +313,6 @@ 

    Examp # Show the WAIC and LOOIC model fit estimates printFit(output) -# }
    -
    # NOT RUN { -# Paths to data published in Sokol-Hessner et al. (2009) -path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") # }

    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Risk Aversion Task with the following parameters: - "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature).

    -

    MODEL: Prospect Theory (Sokol-Hessner et al., 2009, PNAS)

    +

    Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory. +It has the following parameters: rho (risk aversion), lambda (loss aversion), tau (inverse temperature).

    +
      +
    • Task: Risk Aversion Task

    • +
    • Model: Prospect Theory (Sokol-Hessner et al., 2009)

    • +
    -
    ra_prospect(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    ra_prospect(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -150,8 +159,8 @@

    Arg

    - + @@ -160,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -187,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ra_prospect").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ra_prospect").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -221,11 +250,11 @@

    Details For the Risk Aversion Task, there should be 5 columns of data with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "gain"

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    "loss"

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    "cert"

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    "gamble"

    If gamble was taken, gamble == 1; else gamble == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -257,10 +286,7 @@

    Details

    References

    -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & - Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. - Proceedings of the National Academy of Sciences of the United States of America, 106(13), - 5035-5040. http://www.pnas.org/content/106/13/5035

    +

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    See also

    @@ -270,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ra_prospect("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ra_prospect(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ra_prospect(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    @@ -284,11 +313,6 @@ 

    Examp # Show the WAIC and LOOIC model fit estimates printFit(output) -# }
    -
    # NOT RUN { -# Paths to data published in Sokol-Hessner et al. (2009) -path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") # }

    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,32 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Risky Decision Task with the following parameters: - "w0" (baseline), "w1" (weight of certain rewards), "w2" (weight of expected values), "w3" (weight of reward prediction errors), "gam" (forgetting factor), "sig" (standard deviation of error).

    -

    Contributor: Harhim Park

    -

    MODEL: Happiness Computational Model (Rutledge et al., 2014, PNAS)

    +

    Hierarchical Bayesian Modeling of the Risky Decision Task using Happiness Computational Model. +It has the following parameters: w0 (baseline), w1 (weight of certain rewards), w2 (weight of expected values), w3 (weight of reward prediction errors), gam (forgetting factor), sig (standard deviation of error).

    +
      +
    • Task: Risky Decision Task

    • +
    • Model: Happiness Computational Model (Rutledge et al., 2014)

    • +
    -
    rdt_happiness(data = "choose", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "random",
    +    
    rdt_happiness(data = NULL, datafile = "", niter = 4000,
    +  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -152,8 +159,8 @@

    Arg

    - + @@ -162,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -189,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("rdt_happiness").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"rdt_happiness").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,15 +250,15 @@

    Details For the Risky Decision Task, there should be 9 columns of data with the labels "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "gain"

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    "loss"

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    "cert"

    Guaranteed amount of a safe option.

    -
    "type"

    loss == -1, mixed == 0, gain == 1

    -
    "gamble"

    If gamble was taken, gamble == 1; else gamble == 0.

    -
    "outcome"

    Result of the trial.

    -
    "happy"

    Happiness score.

    -
    "RT_happy"

    Reaction time for answering the happiness score.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    +
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    +
    cert

    Guaranteed amount of a safe option.

    +
    type

    loss == -1, mixed == 0, gain == 1

    +
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    +
    outcome

    Result of the trial.

    +
    happy

    Happiness score.

    +
    RT_happy

    Reaction time for answering the happiness score.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -260,12 +287,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Harhim Park <hrpark12@gmail.com>

    References

    -

    Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model - of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), - 12252-12257.

    +

    Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257.

    See also

    @@ -275,8 +302,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- rdt_happiness("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- rdt_happiness(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- rdt_happiness(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/rhat.html b/R/docs/reference/rhat.html
    index e6ec9a61..9ce1133a 100644
    --- a/R/docs/reference/rhat.html
    +++ b/R/docs/reference/rhat.html
    @@ -65,7 +65,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +79,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,7 +102,7 @@
    diff --git a/R/docs/reference/ts.html b/R/docs/reference/ts.html new file mode 100644 index 00000000..6c5eec19 --- /dev/null +++ b/R/docs/reference/ts.html @@ -0,0 +1,691 @@ + + + + + + + + +Hybrid Model, with 4 parameters — ts_par4 • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Two-Step Task using <$= MODEL_NAME +It has the following parameters: "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight).

    +
      +
    • Task: Two-Step Task (Daw et al., 2011)

    • +
    • Model: Hybrid Model, with 4 parameters (Daw et al., 2011; Wunderlich et al., 2012)

    • +
    • Contributor: Harhim Park <hrpark12@gmail.com>

    • +
    + +

    Hierarchical Bayesian Modeling of the Two-Step Task using <$= MODEL_NAME +It has the following parameters: "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight).

    +
      +
    • Task: Two-Step Task (Daw et al., 2011)

    • +
    • Model: Hybrid Model, with 6 parameters (Daw et al., 2011)

    • +
    • Contributor: Harhim Park <hrpark12@gmail.com>

    • +
    + +

    Hierarchical Bayesian Modeling of the Two-Step Task using <$= MODEL_NAME +It has the following parameters: "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace).

    +
      +
    • Task: Two-Step Task (Daw et al., 2011)

    • +
    • Model: Hybrid Model, with 7 parameters (original model) (Daw et al., 2011)

    • +
    • Contributor: Harhim Park <hrpark12@gmail.com>

    • +
    + +
    + +
    ts_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +ts_par6(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +ts_par7(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ts_par4").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ts_par6").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ts_par7").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Two-Step Task, there should be 4 columns of data with the + labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    +
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    +
    reward

    Reward after Level 2 (0 or 1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Two-Step Task, there should be 4 columns of data with the + labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    +
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    +
    reward

    Reward after Level 2 (0 or 1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Two-Step Task, there should be 4 columns of data with the + labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    +
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    +
    reward

    Reward after Level 2 (0 or 1).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424.

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- ts_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- ts_par6("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- ts_par7("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/ts_par4.html b/R/docs/reference/ts_par4.html index 5839b5b6..d0c3ec95 100644 --- a/R/docs/reference/ts_par4.html +++ b/R/docs/reference/ts_par4.html @@ -6,7 +6,7 @@ -Two-Step Task (Daw et al., 2011) — ts_par4 • hBayesDM +Hybrid Model, with 4 parameters — ts_par4 • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Two-Step Task with the following parameters: - "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight).

    -

    Contributor: Harhim Park

    -

    MODEL: Hybrid Model (Daw et al., 2011; Wunderlich et al., 2012), with 4 parameters

    +

    Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 4 parameters. +It has the following parameters: a (learning rate for both stages 1 & 2), beta (inverse temperature for both stages 1 & 2), pi (perseverance), w (model-based weight).

    +
      +
    • Task: Two-Step Task (Daw et al., 2011)

    • +
    • Model: Hybrid Model, with 4 parameters (Daw et al., 2011; Wunderlich et al., 2012)

    • +
    -
    ts_par4(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ts_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    @@ -188,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -trans_prob: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ts_par4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ts_par4").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,10 +250,10 @@

    Details For the Two-Step Task, there should be 4 columns of data with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "level1_choice"

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    "level2_choice"

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument trans_prob to your preferred value.

    -
    "reward"

    Reward after Level 2 (0 or 1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    +
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    +
    reward

    Reward after Level 2 (0 or 1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,14 +282,14 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Harhim Park <hrpark12@gmail.com>

    References

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). - Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), - 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over - model-free choice behavior. Neuron, 75(3), 418-424.

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424.

    See also

    @@ -272,8 +299,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ts_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ts_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ts_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ts_par6.html b/R/docs/reference/ts_par6.html
    index bf40ace3..e61bf458 100644
    --- a/R/docs/reference/ts_par6.html
    +++ b/R/docs/reference/ts_par6.html
    @@ -6,7 +6,7 @@
     
     
     
    -Two-Step Task (Daw et al., 2011) — ts_par6 • hBayesDM
    +Hybrid Model, with 6 parameters — ts_par6 • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Two-Step Task with the following parameters: - "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight).

    -

    Contributor: Harhim Park

    -

    MODEL: Hybrid Model (Daw et al., 2011, Neuron), with 6 parameters

    +

    Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 6 parameters. +It has the following parameters: a1 (learning rate in stage 1), beta1 (inverse temperature in stage 1), a2 (learning rate in stage 2), beta2 (inverse temperature in stage 2), pi (perseverance), w (model-based weight).

    +
      +
    • Task: Two-Step Task (Daw et al., 2011)

    • +
    • Model: Hybrid Model, with 6 parameters (Daw et al., 2011)

    • +
    -
    ts_par6(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ts_par6(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    @@ -188,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -trans_prob: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ts_par6").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ts_par6").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,10 +250,10 @@

    Details For the Two-Step Task, there should be 4 columns of data with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "level1_choice"

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    "level2_choice"

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument trans_prob to your preferred value.

    -
    "reward"

    Reward after Level 2 (0 or 1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    +
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    +
    reward

    Reward after Level 2 (0 or 1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,12 +282,13 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Harhim Park <hrpark12@gmail.com>

    References

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). - Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), - 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    See also

    @@ -270,8 +298,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ts_par6("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ts_par6(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ts_par6(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ts_par7.html b/R/docs/reference/ts_par7.html
    index d5a8b7b6..d2375356 100644
    --- a/R/docs/reference/ts_par7.html
    +++ b/R/docs/reference/ts_par7.html
    @@ -6,7 +6,7 @@
     
     
     
    -Two-Step Task (Daw et al., 2011) — ts_par7 • hBayesDM
    +Hybrid Model, with 7 parameters (original model) — ts_par7 • hBayesDM
     
     
     
    @@ -30,12 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -66,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Two-Step Task with the following parameters: - "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace).

    -

    Contributor: Harhim Park

    -

    MODEL: Hybrid Model (Daw et al., 2011, Neuron), with 7 parameters (original model)

    +

    Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 7 parameters (original model). +It has the following parameters: a1 (learning rate in stage 1), beta1 (inverse temperature in stage 1), a2 (learning rate in stage 2), beta2 (inverse temperature in stage 2), pi (perseverance), w (model-based weight), lambda (eligibility trace).

    +
      +
    • Task: Two-Step Task (Daw et al., 2011)

    • +
    • Model: Hybrid Model, with 7 parameters (original model) (Daw et al., 2011)

    • +
    -
    ts_par7(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ts_par7(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    @@ -188,28 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    For this model, it's possible to set the following model-specific argument to a value that you may prefer.
    -trans_prob: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    For this model, it's possible to set model-specific argument(s) as follows:

    +
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ts_par7").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ts_par7").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -223,10 +250,10 @@

    Details For the Two-Step Task, there should be 4 columns of data with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "level1_choice"

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    "level2_choice"

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument trans_prob to your preferred value.

    -
    "reward"

    Reward after Level 2 (0 or 1).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    +
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    +
    reward

    Reward after Level 2 (0 or 1).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -255,12 +282,13 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Harhim Park <hrpark12@gmail.com>

    References

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). - Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), - 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    +

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    See also

    @@ -270,8 +298,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ts_par7("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ts_par7(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ts_par7(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ug.html b/R/docs/reference/ug.html
    new file mode 100644
    index 00000000..f65c64f3
    --- /dev/null
    +++ b/R/docs/reference/ug.html
    @@ -0,0 +1,505 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Ideal Observer Model — ug_bayes • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using <$= MODEL_NAME +It has the following parameters: "alpha" (envy), "beta" (guilt), "tau" (inverse temperature).

    +
      +
    • Task: Norm-Training Ultimatum Game

    • +
    • Model: Ideal Observer Model (Xiang et al., 2013)

    • +
    + +

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using <$= MODEL_NAME +It has the following parameters: "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate).

    +
      +
    • Task: Norm-Training Ultimatum Game

    • +
    • Model: Rescorla-Wagner (Delta) Model (Gu et al., 2015)

    • +
    + +
    + +
    ug_bayes(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    +ug_delta(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "offer", "accept". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "offer", "accept". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ug_bayes").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("ug_delta").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Norm-Training Ultimatum Game, there should be 3 columns of data with the + labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    +
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Norm-Training Ultimatum Game, there should be 3 columns of data with the + labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    +
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013

    +

    Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- ug_bayes("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }# NOT RUN {
    +# Run the model and store results in "output"
    +output <- ug_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/ug_bayes.html b/R/docs/reference/ug_bayes.html index 45bce3ec..4b9e56c4 100644 --- a/R/docs/reference/ug_bayes.html +++ b/R/docs/reference/ug_bayes.html @@ -6,7 +6,7 @@ -Norm-Training Ultimatum Game — ug_bayes • hBayesDM +Ideal Observer Model — ug_bayes • hBayesDM @@ -30,11 +30,14 @@ - + - + @@ -65,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game with the following parameters: - "alpha" (envy), "beta" (guilt), "tau" (inverse temperature).

    -

    MODEL: Ideal Observer Model (Xiang et al., 2013, J Neuro)

    +

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using Ideal Observer Model. +It has the following parameters: alpha (envy), beta (guilt), tau (inverse temperature).

    +
      +
    • Task: Norm-Training Ultimatum Game

    • +
    • Model: Ideal Observer Model (Xiang et al., 2013)

    • +
    -
    ug_bayes(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ug_bayes(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "offer", "accept". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ug_bayes").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ug_bayes").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,9 +250,9 @@

    Details For the Norm-Training Ultimatum Game, there should be 3 columns of data with the labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "offer"

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    -
    "accept"

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    +
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,9 +284,7 @@

    Details

    References

    -

    Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their - Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. - http://doi.org/10.1523/JNEUROSCI.1642-12.2013

    +

    Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013

    See also

    @@ -266,8 +294,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ug_bayes("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ug_bayes(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ug_bayes(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ug_delta.html b/R/docs/reference/ug_delta.html
    index b0f430cf..81ae5bf4 100644
    --- a/R/docs/reference/ug_delta.html
    +++ b/R/docs/reference/ug_delta.html
    @@ -6,7 +6,7 @@
     
     
     
    -Norm-Training Ultimatum Game — ug_delta • hBayesDM
    +Rescorla-Wagner (Delta) Model — ug_delta • hBayesDM
     
     
     
    @@ -30,11 +30,14 @@
     
     
     
    -
    +
     
    -
    +
     
     
     
    @@ -65,7 +68,7 @@
           
           
             hBayesDM
    -        0.7.2
    +        0.7.2.9000
           
         
    @@ -79,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -99,30 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game with the following parameters: - "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate).

    -

    MODEL: Rescorla-Wagner (Delta) Model (Gu et al., 2015, J Neuro)

    +

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using Rescorla-Wagner (Delta) Model. +It has the following parameters: alpha (envy), tau (inverse temperature), ep (norm adaptation rate).

    +
      +
    • Task: Norm-Training Ultimatum Game

    • +
    • Model: Rescorla-Wagner (Delta) Model (Gu et al., 2015)

    • +
    -
    ug_delta(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    ug_delta(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -149,8 +159,8 @@

    Arg

    - + @@ -159,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -186,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "offer", "accept". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ug_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"ug_delta").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -220,9 +250,9 @@

    Details For the Norm-Training Ultimatum Game, there should be 3 columns of data with the labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "offer"

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    -
    "accept"

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    +
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -254,10 +284,7 @@

    Details

    References

    -

    Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet - Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm - Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), - 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015

    +

    Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015

    See also

    @@ -267,8 +294,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ug_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- ug_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- ug_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/wcs.html b/R/docs/reference/wcs.html
    new file mode 100644
    index 00000000..6245caf2
    --- /dev/null
    +++ b/R/docs/reference/wcs.html
    @@ -0,0 +1,334 @@
    +
    +
    +
    +  
    +  
    +
    +
    +
    +Sequential Learning Model — wcs_sql • hBayesDM
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    +  
    +
    +  
    +    
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task using <$= MODEL_NAME +It has the following parameters: "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature).

    + + +
    + +
    wcs_sql(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. +Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE.

    +

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + + + + + + + + + +
    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model ("wcs_sql").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Wisconsin Card Sorting Task, there should be 3 columns of data with the + labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.

    +
    outcome

    1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    + +

    References

    + +

    Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model and store results in "output"
    +output <- wcs_sql("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/docs/reference/wcs_sql.html b/R/docs/reference/wcs_sql.html index 6453daf5..504a5026 100644 --- a/R/docs/reference/wcs_sql.html +++ b/R/docs/reference/wcs_sql.html @@ -6,7 +6,7 @@ -Wisconsin Card Sorting Task — wcs_sql • hBayesDM +Sequential Learning Model — wcs_sql • hBayesDM @@ -30,12 +30,14 @@ - + - + @@ -66,7 +68,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -80,14 +82,16 @@
  • Reference -
  • -
  • - Changelog
  • @@ -100,31 +104,35 @@
    -

    Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task with the following parameters: - "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature).

    -

    Contributor: Dayeong Min

    -

    MODEL: Sequential Learning Model (Bishara et al., 2010, Journal of Mathematical Psychology)

    +

    Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task using Sequential Learning Model. +It has the following parameters: r (reward sensitivity), p (punishment sensitivity), d (decision consistency or inverse temperature).

    +
      +
    • Task: Wisconsin Card Sorting Task

    • +
    • Model: Sequential Learning Model (Bishara et al., 2010)

    • +
    -
    wcs_sql(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "random", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    +
    wcs_sql(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    +  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    +  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)

    Arguments

    - - + @@ -151,8 +159,8 @@

    Arg

    - + @@ -161,18 +169,19 @@

    Arg

    - + +to FALSE.

    +size). Defaults to FALSE. +If set to TRUE, it includes: "y_pred"

    @@ -188,27 +197,46 @@

    Arg

    + + + + + + + + - +
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: +

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file +containing the data (datafile) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See Details below for more information.

    inits

    Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars
    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    Whether to export model-based regressors (TRUE or FALSE). +Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    adapt_deltamax_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. +By default, set to FALSE.

    ...

    Not used for this model.

    For this model, there is no model-specific argument.

    + + + + + + + + + +

    Value

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("wcs_sql").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by +

    model

    Character value that is the name of the model (\code"wcs_sql").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan +

    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by +

    rawdata

    Data.frame containing the raw data used to fit the model, as specified by the user.

    - +
    modelRegressor

    List object containing the extracted model-based regressors.

    @@ -222,9 +250,9 @@

    Details For the Wisconsin Card Sorting Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject in the data-set.

    -
    "choice"

    Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.

    -
    "outcome"

    1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    choice

    Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.

    +
    outcome

    1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. As long as the necessary columns mentioned above are present and labeled correctly, there is no @@ -253,12 +281,12 @@

    Details Algorithm Parameters' of the Stan User's Guide and Reference Manual, or to the help page for stan for a less technical description of these arguments.

    +

    Contributors

    +Dayeong Min <mindy2801@snu.ac.kr>

    References

    -

    Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. - (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in - substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13.

    +

    Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13.

    See also

    @@ -268,8 +296,11 @@

    See a

    Examples

    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- wcs_sql("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +# Run the model with a given data.frame as df
    +output <- wcs_sql(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- wcs_sql(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man-roxygen/README.md b/R/man-roxygen/README.md
    deleted file mode 100644
    index 60887ede..00000000
    --- a/R/man-roxygen/README.md
    +++ /dev/null
    @@ -1,60 +0,0 @@
    -# How to document model functions (by Jethro Lee)
    -
    -Template Variable | Required? | Format
    --|-|-
    -`MODEL_FUNCTION` | Y |
    -`CONTRIBUTOR` | *optional* | \href{   }{   }, ...
    -`TASK_NAME` | Y |
    -`TASK_CITE` | *optional* | (   )
    -`MODEL_NAME` | Y |
    -`MODEL_CITE` | *optional* | (   )
    -`MODEL_TYPE` | Y | `Hierarchical`
    *or*
    `Individual`
    *or*
    `Multiple-Block Hierarchical` -`DATA_COLUMNS` | Y | "   ", ... -`PARAMETERS` | Y | "   " (   ), ... -`REGRESSORS` | *optional* | "   ", ... -`IS_NULL_POSTPREDS` | *optional* | `TRUE` -`ADDITIONAL_ARG` | *optional* | \code{   }: *explanation here* -`LENGTH_DATA_COLUMNS` | Y | # -`DETAILS_DATA_1` | Y | `\item{"subjID"}{A unique identifier for each subject in the data-set.}` -`DETAILS_DATA_2` | *optional* | \item{"   "}{   } -`DETAILS_DATA_3` | *optional* | \item{"   "}{   } -`DETAILS_DATA_4` | *optional* | \item{"   "}{   } -`DETAILS_DATA_5` | *optional* | \item{"   "}{   } -`DETAILS_DATA_6` | *optional* | \item{"   "}{   } -`DETAILS_DATA_7` | *optional* | \item{"   "}{   } -`DETAILS_DATA_8` | *optional* | \item{"   "}{   } -`DETAILS_DATA_9` | *optional* | \item{"   "}{   } - -## Example: `igt_pvl_decay.R` -```R -#' @templateVar MODEL_FUNCTION igt_pvl_decay -#' @templateVar TASK_NAME Iowa Gambling Task -#' @templateVar MODEL_NAME Prospect Valence Learning (PVL) Decay-RI -#' @templateVar MODEL_CITE (Ahn et al., 2014, Frontiers in Psychology) -#' @templateVar MODEL_TYPE Hierarchical -#' @templateVar DATA_COLUMNS "subjID", "choice", "gain", "loss" -#' @templateVar PARAMETERS "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion) -#' @templateVar ADDITIONAL_ARG \code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100. -#' @templateVar LENGTH_DATA_COLUMNS 4 -#' @templateVar DETAILS_DATA_1 \item{"subjID"}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} -#' @templateVar DETAILS_DATA_3 \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} -#' @templateVar DETAILS_DATA_4 \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} -#' -#' @template model-documentation -#' -#' @export -#' @include hBayesDM_model.R -#' -#' @references -#' Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, -#' J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence -#' from computational modeling with pure users. Frontiers in Psychology, 5, 1376. -#' http://doi.org/10.3389/fpsyg.2014.00849 -``` - -## How to work with the template: `model-documentation.R` -- R expressions between `<%` and `%>` are **executed** in-place. -- The value of the R expression between `<%=` and `%>` is **printed**. -- All text outside of that is printed *as-is*. -#### See more: roxygen2 uses [brew](https://www.rdocumentation.org/packages/brew/versions/1.0-6/topics/brew) to preprocess the template. diff --git a/R/man-roxygen/model-documentation.R b/R/man-roxygen/model-documentation.R index cbc6f134..90993a18 100644 --- a/R/man-roxygen/model-documentation.R +++ b/R/man-roxygen/model-documentation.R @@ -1,18 +1,20 @@ -#' @title <%= TASK_NAME %> - <%= MODEL_NAME %> +#' @title <%= MODEL_NAME %> #' #' @description -#' <%= MODEL_TYPE %> Bayesian Modeling of the <%= TASK_NAME %> <%= get0("TASK_CITE") %> -#' with the following parameters: -#' <%= PARAMETERS %>. +#' <%= MODEL_TYPE %> Bayesian Modeling of the <%= TASK_NAME %> using <%= MODEL_NAME %>. +#' It has the following parameters: <%= PARAMETERS %>. #' -#' @description -#' \strong{TASK:} <%= TASK_NAME %> <%= get0("TASK_CITE") %> -#' \strong{MODEL:} <%= MODEL_NAME %> <%= get0("MODEL_CITE") %> -#' -#' <%= ifelse(exists("CONTRIBUTOR"), paste0("@description Contributor: ", CONTRIBUTOR), "") %> +#' \itemize{ +#' \item \strong{Task}: <%= TASK_NAME %> <%= ifelse(!is.na(TASK_CITE), TASK_CITE, '') %> +#' \item \strong{Model}: <%= MODEL_NAME %> <%= ifelse(!is.na(MODEL_CITE), MODEL_CITE, '') %> +#' } #' -#' @param data A .txt file containing the data to be modeled. Data columns should be labeled as: +#' @param data,datafile A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +#' containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: #' <%= DATA_COLUMNS %>. See \bold{Details} below for more information. +#' @param use_example Whether to use example data. By default, set to \code{FALSE}. +#' @param choose_data Whether to choose data with an interactive window. +#' By default, set to \code{FALSE}. #' @param niter Number of iterations, including warm-up. Defaults to 4000. #' @param nwarmup Number of iterations used for warm-up only. Defaults to 1000. #' @param nchain Number of Markov chains to run. Defaults to 4. @@ -26,17 +28,18 @@ #' are: "mean", "median", or "mode". #' @param modelRegressor #' <% EXISTS_REGRESSORS <- paste0("For this model they are: ", get0("REGRESSORS"), ".") %> -#' <% NOT_EXISTS_REGRESSORS <- "Currently not available for this model." %> -#' Export model-based regressors? TRUE or FALSE. -#' <%= ifelse(exists("REGRESSORS"), EXISTS_REGRESSORS, NOT_EXISTS_REGRESSORS) %> +#' <% NOT_EXISTS_REGRESSORS <- "Not available for this model." %> +#' Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +#' <%= ifelse(!is.na(REGRESSORS), EXISTS_REGRESSORS, NOT_EXISTS_REGRESSORS) %> #' @param vb Use variational inference to approximately draw from a posterior distribution. Defaults -#' to FALSE. +#' to \code{FALSE}. #' @param inc_postpred -#' <% HAS_POSTPREDS <- exists("POSTPREDS") && (POSTPREDS != "TRUE") %> -#' <%= ifelse(HAS_POSTPREDS, "", "\\strong{(Currently not available.)}") %> +#' <% HAS_POSTPREDS <- !is.na(POSTPREDS) %> +#' <% PP_T <- paste0("If set to \\code{TRUE}, it includes: ", POSTPREDS) %> +#' <% PP_F <- "Not available for this model." %> #' Include trial-level posterior predictive simulations in model output (may greatly increase file -#' size). Defaults to FALSE. -#' <%= ifelse(HAS_POSTPREDS, paste0("If set to TRUE, it includes: ", POSTPREDS), "") %> +#' size). Defaults to \code{FALSE}. +#' <%= ifelse(HAS_POSTPREDS, PP_T, PP_F) %> #' @param adapt_delta Floating point value representing the target acceptance probability of a new #' sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below. #' @param stepsize Integer value specifying the size of each leapfrog step that the MCMC sampler can @@ -44,7 +47,37 @@ #' @param max_treedepth Integer value specifying how many leapfrog steps the MCMC sampler can take #' on each new iteration. See \bold{Details} below. #' @param ... -#' Additional arguments. See \bold{Details} below. +#' <% AA_T1 <- "For this model, it's possible to set \\strong{model-specific argument(s)} " %> +#' <% AA_T2 <- "as follows: " %> +#' <% AA_T <- paste0(AA_T1, AA_T2) %> +#' <% AA_F <- "For this model, there is no model-specific argument." %> +#' <%= ifelse(as.integer(LENGTH_ADDITIONAL_ARGS) > 0, AA_T, AA_F) %> +#' \describe{ +#' <%= get0("ADDITIONAL_ARGS_1") %> +#' <%= get0("ADDITIONAL_ARGS_2") %> +#' <%= get0("ADDITIONAL_ARGS_3") %> +#' <%= get0("ADDITIONAL_ARGS_4") %> +#' <%= get0("ADDITIONAL_ARGS_5") %> +#' <%= get0("ADDITIONAL_ARGS_6") %> +#' <%= get0("ADDITIONAL_ARGS_7") %> +#' <%= get0("ADDITIONAL_ARGS_8") %> +#' <%= get0("ADDITIONAL_ARGS_9") %> +#' } +#' +#' @return A class "hBayesDM" object \code{modelData} with the following components: +#' \describe{ +#' \item{model}{Character value that is the name of the model (\\code{"<%= MODEL_FUNCTION %>"}).} +#' \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by +#' \code{indPars}) for each subject.} +#' \item{parVals}{List object containing the posterior samples over different parameters.} +#' \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan +#' model.} +#' \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by +#' the user.} +#' <% RETURN_REGRESSORS <- "\\item{modelRegressor}{List object containing the " %> +#' <% RETURN_REGRESSORS <- paste0(RETURN_REGRESSORS, "extracted model-based regressors.}") %> +#' <%= ifelse(!is.na("REGRESSORS"), RETURN_REGRESSORS, "") %> +#' } #' #' @details #' This section describes some of the function arguments in greater detail. @@ -100,38 +133,7 @@ #' and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical #' description of these arguments. #' -#' \strong{...}: -#' <% AA_T1 <- "For this model, it's possible to set \\strong{model-specific argument(s)} " %> -#' <% AA_T2 <- "as follows: " %> -#' <% AA_T <- paste0(AA_T1, AA_T2) %> -#' <% AA_F <- "For this model, there is no model-specific argument." %> -#' <%= ifelse(as.integer(ADDITIONAL_ARGS_LEN) > 0, AA_T, AA_F) %> -#' \describe{ -#' <%= get0("ADDITIONAL_ARGS_1") %> -#' <%= get0("ADDITIONAL_ARGS_2") %> -#' <%= get0("ADDITIONAL_ARGS_3") %> -#' <%= get0("ADDITIONAL_ARGS_4") %> -#' <%= get0("ADDITIONAL_ARGS_5") %> -#' <%= get0("ADDITIONAL_ARGS_6") %> -#' <%= get0("ADDITIONAL_ARGS_7") %> -#' <%= get0("ADDITIONAL_ARGS_8") %> -#' <%= get0("ADDITIONAL_ARGS_9") %> -#' } -#' -#' @return A class "hBayesDM" object \code{modelData} with the following components: -#' \describe{ -#' \item{\code{model}}{Character value that is the name of the model ("<%= MODEL_FUNCTION %>").} -#' \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by -#' \code{indPars}) for each subject.} -#' \item{\code{parVals}}{List object containing the posterior samples over different parameters.} -#' \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan -#' model.} -#' \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by -#' the user.} -#' <% RETURN_REGRESSORS <- "\\item{\\code{modelRegressor}}{List object containing the " %> -#' <% RETURN_REGRESSORS <- paste0(RETURN_REGRESSORS, "extracted model-based regressors.}") %> -#' <%= ifelse(exists("REGRESSORS"), RETURN_REGRESSORS, "") %> -#' } +#' <%= ifelse(!is.na(CONTRIBUTOR), paste0("\\subsection{Contributors}{", CONTRIBUTOR, "}"), "") %> #' #' @seealso #' We refer users to our in-depth tutorial for an example of using hBayesDM: @@ -139,8 +141,11 @@ #' #' @examples #' \dontrun{ -#' # Run the model and store results in "output" -#' output <- <%= MODEL_FUNCTION %>("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +#' # Run the model with a given data.frame as df +#' output <- <%= MODEL_FUNCTION %>(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +#' +#' # Run the model with example data +#' output <- <%= MODEL_FUNCTION %>(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) #' #' # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') #' plot(output, type = "trace") diff --git a/R/man/bandit2arm_delta.Rd b/R/man/bandit2arm_delta.Rd index a8a33501..79f40bd8 100644 --- a/R/man/bandit2arm_delta.Rd +++ b/R/man/bandit2arm_delta.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit2arm_delta.R \name{bandit2arm_delta} \alias{bandit2arm_delta} -\title{2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004)} +\title{Rescorla-Wagner (Delta) Model} \usage{ -bandit2arm_delta(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +bandit2arm_delta(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit2arm_delta(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit2arm_delta").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit2arm_delta"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 2-Armed Bandit Task with the following parameters: - "A" (learning rate), "tau" (inverse temperature). +Hierarchical Bayesian Modeling of the 2-Armed Bandit Task using Rescorla-Wagner (Delta) Model. +It has the following parameters: \code{A} (learning rate), \code{tau} (inverse temperature). -\strong{MODEL:} Rescorla-Wagner (Delta) Model +\itemize{ + \item \strong{Task}: 2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004) + \item \strong{Model}: Rescorla-Wagner (Delta) Model +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,9 +106,9 @@ For the 2-Armed Bandit Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} @@ -130,8 +151,11 @@ For the 2-Armed Bandit Task, there should be 3 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit2arm_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit2arm_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit2arm_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,13 +171,9 @@ printFit(output) } } \references{ -Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice - prediction competition: Choices from experience and from description. Journal of Behavioral - Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683 +Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683 -Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the - Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. - http://doi.org/10.1111/j.0956-7976.2004.00715.x +Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bandit4arm2_kalman_filter.Rd b/R/man/bandit4arm2_kalman_filter.Rd index 5cb6482d..a18e15ae 100644 --- a/R/man/bandit4arm2_kalman_filter.Rd +++ b/R/man/bandit4arm2_kalman_filter.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit4arm2_kalman_filter.R \name{bandit4arm2_kalman_filter} \alias{bandit4arm2_kalman_filter} -\title{4-Armed Bandit Task (modified)} +\title{Kalman Filter} \usage{ -bandit4arm2_kalman_filter(data = "choose", niter = 4000, - nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, - inits = "random", indPars = "mean", modelRegressor = FALSE, - vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, - stepsize = 1, max_treedepth = 10, ...) +bandit4arm2_kalman_filter(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit4arm2_kalman_filter(data = "choose", niter = 4000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit4arm2_kalman_filter").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit4arm2_kalman_filter"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) with the following parameters: - "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise). +Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) using Kalman Filter. +It has the following parameters: \code{lambda} (decay factor), \code{theta} (decay center), \code{beta} (inverse softmax temperature), \code{mu0} (anticipated initial mean of all 4 options), \code{sigma0} (anticipated initial sd (uncertainty factor) of all 4 options), \code{sigmaD} (sd of diffusion noise). -Contributor: \href{https://zohyos7.github.io}{Yoonseo Zoh}, \href{https://lei-zhang.net/}{Lei Zhang} - -\strong{MODEL:} Kalman Filter (Daw et al., 2006, Nature) +\itemize{ + \item \strong{Task}: 4-Armed Bandit Task (modified) + \item \strong{Model}: Kalman Filter (Daw et al., 2006) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,9 +106,9 @@ For the 4-Armed Bandit Task (modified), there should be 3 columns of data with t labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} - \item{"outcome"}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} + \item{outcome}{Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the 4-Armed Bandit Task (modified), there should be 3 columns of data with t Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://zohyos7.github.io}{Yoonseo Zoh} <\email{zohyos7@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit4arm2_kalman_filter("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit4arm2_kalman_filter(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit4arm2_kalman_filter(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,8 +173,7 @@ printFit(output) } } \references{ -Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates - for exploratory decisions in humans. Nature, 441(7095), 876-879. +Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bandit4arm_2par_lapse.Rd b/R/man/bandit4arm_2par_lapse.Rd index 203107e3..48ee7239 100644 --- a/R/man/bandit4arm_2par_lapse.Rd +++ b/R/man/bandit4arm_2par_lapse.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit4arm_2par_lapse.R \name{bandit4arm_2par_lapse} \alias{bandit4arm_2par_lapse} -\title{4-Armed Bandit Task} +\title{3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)} \usage{ -bandit4arm_2par_lapse(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +bandit4arm_2par_lapse(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit4arm_2par_lapse(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit4arm_2par_lapse").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit4arm_2par_lapse"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise). +Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise). +It has the following parameters: \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{xi} (noise). -\strong{MODEL:} 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018, PsyArXiv) +\itemize{ + \item \strong{Task}: 4-Armed Bandit Task + \item \strong{Model}: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,10 +106,10 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} - \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} + \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} @@ -130,8 +151,11 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit4arm_2par_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit4arm_2par_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit4arm_2par_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,8 +171,7 @@ printFit(output) } } \references{ -Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under - uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bandit4arm_4par.Rd b/R/man/bandit4arm_4par.Rd index 9119d44b..559f21e7 100644 --- a/R/man/bandit4arm_4par.Rd +++ b/R/man/bandit4arm_4par.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit4arm_4par.R \name{bandit4arm_4par} \alias{bandit4arm_4par} -\title{4-Armed Bandit Task} +\title{4 Parameter Model, without C (choice perseveration)} \usage{ -bandit4arm_4par(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +bandit4arm_4par(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit4arm_4par(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit4arm_4par").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit4arm_4par"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity). +Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 4 Parameter Model, without C (choice perseveration). +It has the following parameters: \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity). -\strong{MODEL:} 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012, J Neuro) +\itemize{ + \item \strong{Task}: 4-Armed Bandit Task + \item \strong{Model}: 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,10 +106,10 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} - \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} + \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} @@ -130,8 +151,11 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit4arm_4par("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit4arm_4par(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit4arm_4par(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,8 +171,7 @@ printFit(output) } } \references{ -Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in - Human Decision-Making. J Neuro, 32(17), 5833-5842. +Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bandit4arm_lapse.Rd b/R/man/bandit4arm_lapse.Rd index 8733203d..91cf16bf 100644 --- a/R/man/bandit4arm_lapse.Rd +++ b/R/man/bandit4arm_lapse.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit4arm_lapse.R \name{bandit4arm_lapse} \alias{bandit4arm_lapse} -\title{4-Armed Bandit Task} +\title{5 Parameter Model, without C (choice perseveration) but with xi (noise)} \usage{ -bandit4arm_lapse(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +bandit4arm_lapse(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit4arm_lapse(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit4arm_lapse").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit4arm_lapse"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise). +Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 5 Parameter Model, without C (choice perseveration) but with xi (noise). +It has the following parameters: \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity), \code{xi} (noise). -\strong{MODEL:} 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012, J Neuro) +\itemize{ + \item \strong{Task}: 4-Armed Bandit Task + \item \strong{Model}: 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,10 +106,10 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} - \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} + \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} @@ -130,8 +151,11 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit4arm_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit4arm_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit4arm_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,8 +171,7 @@ printFit(output) } } \references{ -Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in - Human Decision-Making. J Neuro, 32(17), 5833-5842. +Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bandit4arm_lapse_decay.Rd b/R/man/bandit4arm_lapse_decay.Rd index af07744d..7b916d58 100644 --- a/R/man/bandit4arm_lapse_decay.Rd +++ b/R/man/bandit4arm_lapse_decay.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit4arm_lapse_decay.R \name{bandit4arm_lapse_decay} \alias{bandit4arm_lapse_decay} -\title{4-Armed Bandit Task} +\title{5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).} \usage{ -bandit4arm_lapse_decay(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +bandit4arm_lapse_decay(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit4arm_lapse_decay(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit4arm_lapse_decay").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit4arm_lapse_decay"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate). +Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).. +It has the following parameters: \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity), \code{xi} (noise), \code{d} (decay rate). -\strong{MODEL:} 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018, PsyArXiv) +\itemize{ + \item \strong{Task}: 4-Armed Bandit Task + \item \strong{Model}: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,10 +106,10 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} - \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} + \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} @@ -130,8 +151,11 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit4arm_lapse_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit4arm_lapse_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit4arm_lapse_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,8 +171,7 @@ printFit(output) } } \references{ -Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under - uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bandit4arm_singleA_lapse.Rd b/R/man/bandit4arm_singleA_lapse.Rd index ff7bf3c8..7441b8b5 100644 --- a/R/man/bandit4arm_singleA_lapse.Rd +++ b/R/man/bandit4arm_singleA_lapse.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bandit4arm_singleA_lapse.R \name{bandit4arm_singleA_lapse} \alias{bandit4arm_singleA_lapse} -\title{4-Armed Bandit Task} +\title{4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.} \usage{ -bandit4arm_singleA_lapse(data = "choose", niter = 4000, - nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, - inits = "random", indPars = "mean", modelRegressor = FALSE, - vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, - stepsize = 1, max_treedepth = 10, ...) +bandit4arm_singleA_lapse(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bandit4arm_singleA_lapse(data = "choose", niter = 4000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bandit4arm_singleA_lapse").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bandit4arm_singleA_lapse"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the 4-Armed Bandit Task with the following parameters: - "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise). +Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.. +It has the following parameters: \code{A} (learning rate), \code{R} (reward sensitivity), \code{P} (punishment sensitivity), \code{xi} (noise). -\strong{MODEL:} 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018, PsyArXiv) +\itemize{ + \item \strong{Task}: 4-Armed Bandit Task + \item \strong{Model}: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,10 +106,10 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} - \item{"gain"}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.} + \item{gain}{Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).} @@ -130,8 +151,11 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bandit4arm_singleA_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bandit4arm_singleA_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bandit4arm_singleA_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,8 +171,7 @@ printFit(output) } } \references{ -Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under - uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/bart_par4.Rd b/R/man/bart_par4.Rd index 71911264..e4250a67 100644 --- a/R/man/bart_par4.Rd +++ b/R/man/bart_par4.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/bart_par4.R \name{bart_par4} \alias{bart_par4} -\title{Balloon Analogue Risk Task (Ravenzwaaij et al., 2011)} +\title{Re-parameterized version of BART model with 4 parameters} \usage{ -bart_par4(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +bart_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "pumps", "explosion". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ bart_par4(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("bart_par4").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"bart_par4"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task with the following parameters: - "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature). +Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task using Re-parameterized version of BART model with 4 parameters. +It has the following parameters: \code{phi} (prior belief of balloon not bursting), \code{eta} (updating rate), \code{gam} (risk-taking parameter), \code{tau} (inverse temperature). -Contributor: \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park}, \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang}, \href{https://ccs-lab.github.io/team/ayoung-lee/}{Ayoung Lee}, \href{https://ccs-lab.github.io/team/jeongbin-oh/}{Jeongbin Oh}, \href{https://ccs-lab.github.io/team/jiyoon-lee/}{Jiyoon Lee}, \href{https://ccs-lab.github.io/team/junha-jang/}{Junha Jang} - -\strong{MODEL:} Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters +\itemize{ + \item \strong{Task}: Balloon Analogue Risk Task + \item \strong{Model}: Re-parameterized version of BART model with 4 parameters (van Ravenzwaaij et al., 2011) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,9 +106,9 @@ For the Balloon Analogue Risk Task, there should be 3 columns of data with the labels "subjID", "pumps", "explosion". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"pumps"}{The number of pumps.} - \item{"explosion"}{0: intact, 1: burst} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{pumps}{The number of pumps.} + \item{explosion}{0: intact, 1: burst} @@ -129,11 +148,16 @@ For the Balloon Analogue Risk Task, there should be 3 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@gmail.com}>, \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/ayoung-lee/}{Ayoung Lee} <\email{aylee2008@naver.com}>, \href{https://ccs-lab.github.io/team/jeongbin-oh/}{Jeongbin Oh} <\email{ows0104@gmail.com}>, \href{https://ccs-lab.github.io/team/jiyoon-lee/}{Jiyoon Lee} <\email{nicole.lee2001@gmail.com}>, \href{https://ccs-lab.github.io/team/junha-jang/}{Junha Jang} <\email{andy627robo@naver.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- bart_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- bart_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- bart_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,8 +173,7 @@ printFit(output) } } \references{ -van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the - BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. +van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/choiceRT_ddm.Rd b/R/man/choiceRT_ddm.Rd index 4e6cceee..306578eb 100644 --- a/R/man/choiceRT_ddm.Rd +++ b/R/man/choiceRT_ddm.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/choiceRT_ddm.R \name{choiceRT_ddm} \alias{choiceRT_ddm} -\title{Choice Reaction Time Task} +\title{Drift Diffusion Model} \usage{ -choiceRT_ddm(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +choiceRT_ddm(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "RT". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,21 +27,21 @@ choiceRT_ddm(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} -\item{inc_postpred}{\strong{(Currently not available.)} -Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +\item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to \code{FALSE}. +Not available for this model.} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -51,38 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{RTbound}{Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("choiceRT_ddm").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"choiceRT_ddm"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Choice Reaction Time Task with the following parameters: - "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time). - -\strong{MODEL:} Drift Diffusion Model (Ratcliff, 1978, Psychological Review)\cr *Note that this implementation is \strong{not} the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Hierarchical Bayesian Modeling of the Choice Reaction Time Task using Drift Diffusion Model. +It has the following parameters: \code{alpha} (boundary separation), \code{beta} (bias), \code{delta} (drift rate), \code{tau} (non-decision time). -Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing - -Parameters of the DDM (parameter names in Ratcliff), from \url{https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R} -\cr - alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). 0 < alpha -\cr - beta (b): Initial bias, for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 -\cr - delta (v): Drift rate; Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta -\cr - tau (ter): Non-decision time + Motor response time + encoding time (high means slow encoding, execution). 0 < tau (in seconds) +\itemize{ + \item \strong{Task}: Choice Reaction Time Task + \item \strong{Model}: Drift Diffusion Model (Ratcliff, 1978) +} } \details{ This section describes some of the function arguments in greater detail. @@ -95,9 +106,9 @@ For the Choice Reaction Time Task, there should be 3 columns of data with the labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Choice made for the current trial, coded as \code{1}/\code{2} to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} - \item{"RT"}{Choice reaction time for the current trial, in \strong{seconds} (e.g., 0.435 0.383 0.314 0.309, etc.).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} + \item{RT}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} @@ -138,10 +149,18 @@ For the Choice Reaction Time Task, there should be 3 columns of data with the and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. } +\note{ +\strong{Notes:} +Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. +} \examples{ \dontrun{ -# Run the model and store results in "output" -output <- choiceRT_ddm("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- choiceRT_ddm(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- choiceRT_ddm(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") diff --git a/R/man/choiceRT_ddm_single.Rd b/R/man/choiceRT_ddm_single.Rd index 48d6fd5e..a8f0e6d2 100644 --- a/R/man/choiceRT_ddm_single.Rd +++ b/R/man/choiceRT_ddm_single.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/choiceRT_ddm_single.R \name{choiceRT_ddm_single} \alias{choiceRT_ddm_single} -\title{Choice Reaction Time Task} +\title{Drift Diffusion Model} \usage{ -choiceRT_ddm_single(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +choiceRT_ddm_single(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "RT". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,21 +27,21 @@ choiceRT_ddm_single(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} -\item{inc_postpred}{\strong{(Currently not available.)} -Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +\item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to \code{FALSE}. +Not available for this model.} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -51,38 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{RTbound}: Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{RTbound}{Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("choiceRT_ddm_single").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"choiceRT_ddm_single"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Individual Bayesian Modeling of the Choice Reaction Time Task with the following parameters: - "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time). - -\strong{MODEL:} Drift Diffusion Model (Ratcliff, 1978, Psychological Review)\cr *Note that this implementation is \strong{not} the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Individual Bayesian Modeling of the Choice Reaction Time Task using Drift Diffusion Model. +It has the following parameters: \code{alpha} (boundary separation), \code{beta} (bias), \code{delta} (drift rate), \code{tau} (non-decision time). -Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing - -Parameters of the DDM (parameter names in Ratcliff), from \url{https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R} -\cr - alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). 0 < alpha -\cr - beta (b): Initial bias, for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 -\cr - delta (v): Drift rate; Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta -\cr - tau (ter): Non-decision time + Motor response time + encoding time (high means slow encoding, execution). 0 < tau (in seconds) +\itemize{ + \item \strong{Task}: Choice Reaction Time Task + \item \strong{Model}: Drift Diffusion Model (Ratcliff, 1978) +} } \details{ This section describes some of the function arguments in greater detail. @@ -95,9 +106,9 @@ For the Choice Reaction Time Task, there should be 3 columns of data with the labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Choice made for the current trial, coded as \code{1}/\code{2} to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} - \item{"RT"}{Choice reaction time for the current trial, in \strong{seconds} (e.g., 0.435 0.383 0.314 0.309, etc.).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).} + \item{RT}{Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).} @@ -138,10 +149,18 @@ For the Choice Reaction Time Task, there should be 3 columns of data with the and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. } +\note{ +\strong{Notes:} +Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. +Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing. +} \examples{ \dontrun{ -# Run the model and store results in "output" -output <- choiceRT_ddm_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- choiceRT_ddm_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- choiceRT_ddm_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") diff --git a/R/man/cra_exp.Rd b/R/man/cra_exp.Rd index 7fc6786e..a6a0e6da 100644 --- a/R/man/cra_exp.Rd +++ b/R/man/cra_exp.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/cra_exp.R \name{cra_exp} \alias{cra_exp} -\title{Choice Under Risk and Ambiguity Task} +\title{Exponential Subjective Value Model} \usage{ -cra_exp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +cra_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ cra_exp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "sv", "sv_fix", "sv_var", "p_var".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("cra_exp").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"cra_exp"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task with the following parameters: - "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature). +Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using Exponential Subjective Value Model. +It has the following parameters: \code{alpha} (risk attitude), \code{beta} (ambiguity attitude), \code{gamma} (inverse temperature). -Contributor: \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} - -\strong{MODEL:} Exponential Subjective Value Model (Hsu et al., 2005, Science) +\itemize{ + \item \strong{Task}: Choice Under Risk and Ambiguity Task + \item \strong{Model}: Exponential Subjective Value Model (Hsu et al., 2005) +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,12 +106,12 @@ For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"prob"}{Objective probability of the variable lottery.} - \item{"ambig"}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} - \item{"reward_var"}{Amount of reward in variable lottery. Assumed to be greater than zero.} - \item{"reward_fix"}{Amount of reward in fixed lottery. Assumed to be greater than zero.} - \item{"choice"}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{prob}{Objective probability of the variable lottery.} + \item{ambig}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} + \item{reward_var}{Amount of reward in variable lottery. Assumed to be greater than zero.} + \item{reward_fix}{Amount of reward in fixed lottery. Assumed to be greater than zero.} + \item{choice}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} @@ -128,11 +148,16 @@ For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- cra_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- cra_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- cra_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,9 +173,7 @@ printFit(output) } } \references{ -Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding - to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. - https://doi.org/10.1126/science.1115327 +Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/cra_linear.Rd b/R/man/cra_linear.Rd index f0bbb5d0..ba904d41 100644 --- a/R/man/cra_linear.Rd +++ b/R/man/cra_linear.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/cra_linear.R \name{cra_linear} \alias{cra_linear} -\title{Choice Under Risk and Ambiguity Task} +\title{Linear Subjective Value Model} \usage{ -cra_linear(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +cra_linear(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ cra_linear(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "sv", "sv_fix", "sv_var", "p_var".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("cra_linear").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"cra_linear"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task with the following parameters: - "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature). +Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using Linear Subjective Value Model. +It has the following parameters: \code{alpha} (risk attitude), \code{beta} (ambiguity attitude), \code{gamma} (inverse temperature). -Contributor: \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} - -\strong{MODEL:} Linear Subjective Value Model (Levy et al., 2010, J Neurophysiol) +\itemize{ + \item \strong{Task}: Choice Under Risk and Ambiguity Task + \item \strong{Model}: Linear Subjective Value Model (Levy et al., 2010) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,12 +106,12 @@ For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"prob"}{Objective probability of the variable lottery.} - \item{"ambig"}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} - \item{"reward_var"}{Amount of reward in variable lottery. Assumed to be greater than zero.} - \item{"reward_fix"}{Amount of reward in fixed lottery. Assumed to be greater than zero.} - \item{"choice"}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{prob}{Objective probability of the variable lottery.} + \item{ambig}{Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).} + \item{reward_var}{Amount of reward in variable lottery. Assumed to be greater than zero.} + \item{reward_fix}{Amount of reward in fixed lottery. Assumed to be greater than zero.} + \item{choice}{If the variable lottery was selected, choice == 1; otherwise choice == 0.} @@ -129,11 +148,16 @@ For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- cra_linear("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- cra_linear(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- cra_linear(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural - representation of subjective value under risk and ambiguity. Journal of Neurophysiology, - 103(2), 1036-1047. +Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/dbdm_prob_weight.Rd b/R/man/dbdm_prob_weight.Rd index 485e3043..7707496c 100644 --- a/R/man/dbdm_prob_weight.Rd +++ b/R/man/dbdm_prob_weight.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/dbdm_prob_weight.R \name{dbdm_prob_weight} \alias{dbdm_prob_weight} -\title{Description Based Decison Making Task} +\title{Probability Weight Function} \usage{ -dbdm_prob_weight(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +dbdm_prob_weight(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ dbdm_prob_weight(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("dbdm_prob_weight").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"dbdm_prob_weight"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Description Based Decison Making Task with the following parameters: - "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature). +Hierarchical Bayesian Modeling of the Description Based Decison Making Task using Probability Weight Function. +It has the following parameters: \code{tau} (probability weight function), \code{rho} (subject utility function), \code{lambda} (loss aversion parameter), \code{beta} (inverse softmax temperature). -Contributor: \href{https://ccs-lab.github.io/team/yoonseo-zoh/}{Yoonseo Zoh} - -\strong{MODEL:} Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008) +\itemize{ + \item \strong{Task}: Description Based Decison Making Task + \item \strong{Model}: Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,14 +106,14 @@ For the Description Based Decison Making Task, there should be 8 columns of data labels "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"opt1hprob"}{Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.} - \item{"opt2hprob"}{Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.} - \item{"opt1hval"}{Possible (with opt1hprob probability) outcome of option 1.} - \item{"opt1lval"}{Possible (with (1 - opt1hprob) probability) outcome of option 1.} - \item{"opt2hval"}{Possible (with opt2hprob probability) outcome of option 2.} - \item{"opt2lval"}{Possible (with (1 - opt2hprob) probability) outcome of option 2.} - \item{"choice"}{If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{opt1hprob}{Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.} + \item{opt2hprob}{Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.} + \item{opt1hval}{Possible (with opt1hprob probability) outcome of option 1.} + \item{opt1lval}{Possible (with (1 - opt1hprob) probability) outcome of option 1.} + \item{opt2hval}{Possible (with opt2hprob probability) outcome of option 2.} + \item{opt2lval}{Possible (with (1 - opt2hprob) probability) outcome of option 2.} + \item{choice}{If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.} } \strong{*}Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", @@ -129,11 +148,16 @@ For the Description Based Decison Making Task, there should be 8 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/yoonseo-zoh/}{Yoonseo Zoh} <\email{zohyos7@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- dbdm_prob_weight("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- dbdm_prob_weight(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- dbdm_prob_weight(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,15 +173,11 @@ printFit(output) } } \references{ -Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A - choice prediction competition: Choices from experience and from description. Journal of - Behavioral Decision Making, 23(1), 15-47. +Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. -Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the - effect of rare events in risky choice. Psychological science, 15(8), 534-539. +Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539. -Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from - prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022. +Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/dd_cs.Rd b/R/man/dd_cs.Rd index 4a6b4392..f77c2f04 100644 --- a/R/man/dd_cs.Rd +++ b/R/man/dd_cs.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/dd_cs.R \name{dd_cs} \alias{dd_cs} -\title{Delay Discounting Task} +\title{Constant-Sensitivity (CS) Model} \usage{ -dd_cs(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +dd_cs(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ dd_cs(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("dd_cs").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"dd_cs"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Delay Discounting Task with the following parameters: - "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Delay Discounting Task using Constant-Sensitivity (CS) Model. +It has the following parameters: \code{r} (exponential discounting rate), \code{s} (impatience), \code{beta} (inverse temperature). -\strong{MODEL:} Constant-Sensitivity (CS) Model (Ebert & Prelec, 2007, Management Science) +\itemize{ + \item \strong{Task}: Delay Discounting Task + \item \strong{Model}: Constant-Sensitivity (CS) Model (Ebert et al., 2007) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,12 +106,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} - \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} - \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} - \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} - \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} + \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} + \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} + \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} + \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} @@ -129,8 +151,11 @@ For the Delay Discounting Task, there should be 6 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- dd_cs("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- dd_cs(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- dd_cs(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,8 +171,7 @@ printFit(output) } } \references{ -Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of - the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 +Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/dd_cs_single.Rd b/R/man/dd_cs_single.Rd index 36627ab0..caf7755d 100644 --- a/R/man/dd_cs_single.Rd +++ b/R/man/dd_cs_single.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/dd_cs_single.R \name{dd_cs_single} \alias{dd_cs_single} -\title{Delay Discounting Task} +\title{Constant-Sensitivity (CS) Model} \usage{ -dd_cs_single(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +dd_cs_single(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ dd_cs_single(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("dd_cs_single").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"dd_cs_single"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Individual Bayesian Modeling of the Delay Discounting Task with the following parameters: - "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature). +Individual Bayesian Modeling of the Delay Discounting Task using Constant-Sensitivity (CS) Model. +It has the following parameters: \code{r} (exponential discounting rate), \code{s} (impatience), \code{beta} (inverse temperature). -\strong{MODEL:} Constant-Sensitivity (CS) Model (Ebert & Prelec, 2007, Management Science) +\itemize{ + \item \strong{Task}: Delay Discounting Task + \item \strong{Model}: Constant-Sensitivity (CS) Model (Ebert et al., 2007) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,12 +106,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} - \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} - \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} - \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} - \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} + \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} + \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} + \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} + \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} @@ -130,8 +151,11 @@ For the Delay Discounting Task, there should be 6 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- dd_cs_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- dd_cs_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- dd_cs_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,8 +171,7 @@ printFit(output) } } \references{ -Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of - the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 +Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/dd_exp.Rd b/R/man/dd_exp.Rd index 22ff861a..4faf252d 100644 --- a/R/man/dd_exp.Rd +++ b/R/man/dd_exp.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/dd_exp.R \name{dd_exp} \alias{dd_exp} -\title{Delay Discounting Task} +\title{Exponential Model} \usage{ -dd_exp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +dd_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ dd_exp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("dd_exp").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"dd_exp"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Delay Discounting Task with the following parameters: - "r" (exponential discounting rate), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Delay Discounting Task using Exponential Model. +It has the following parameters: \code{r} (exponential discounting rate), \code{beta} (inverse temperature). -\strong{MODEL:} Exponential Model (Samuelson, 1937, The Review of Economic Studies) +\itemize{ + \item \strong{Task}: Delay Discounting Task + \item \strong{Model}: Exponential Model (Samuelson, 1937) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,12 +106,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} - \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} - \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} - \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} - \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} + \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} + \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} + \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} + \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} @@ -129,8 +151,11 @@ For the Delay Discounting Task, there should be 6 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- dd_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- dd_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- dd_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,8 +171,7 @@ printFit(output) } } \references{ -Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), - 155. http://doi.org/10.2307/2967612 +Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/dd_hyperbolic.Rd b/R/man/dd_hyperbolic.Rd index 8080878e..497b02c5 100644 --- a/R/man/dd_hyperbolic.Rd +++ b/R/man/dd_hyperbolic.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/dd_hyperbolic.R \name{dd_hyperbolic} \alias{dd_hyperbolic} -\title{Delay Discounting Task} +\title{Hyperbolic Model} \usage{ -dd_hyperbolic(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +dd_hyperbolic(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ dd_hyperbolic(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("dd_hyperbolic").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"dd_hyperbolic"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Delay Discounting Task with the following parameters: - "k" (discounting rate), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Delay Discounting Task using Hyperbolic Model. +It has the following parameters: \code{k} (discounting rate), \code{beta} (inverse temperature). -\strong{MODEL:} Hyperbolic Model (Mazur, 1987) +\itemize{ + \item \strong{Task}: Delay Discounting Task + \item \strong{Model}: Hyperbolic Model (Mazur, 1987) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,12 +106,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} - \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} - \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} - \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} - \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} + \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} + \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} + \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} + \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} @@ -130,8 +151,11 @@ For the Delay Discounting Task, there should be 6 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- dd_hyperbolic("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- dd_hyperbolic(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- dd_hyperbolic(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") diff --git a/R/man/dd_hyperbolic_single.Rd b/R/man/dd_hyperbolic_single.Rd index ae10b412..cf5bb7ff 100644 --- a/R/man/dd_hyperbolic_single.Rd +++ b/R/man/dd_hyperbolic_single.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/dd_hyperbolic_single.R \name{dd_hyperbolic_single} \alias{dd_hyperbolic_single} -\title{Delay Discounting Task} +\title{Hyperbolic Model} \usage{ -dd_hyperbolic_single(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +dd_hyperbolic_single(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ dd_hyperbolic_single(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("dd_hyperbolic_single").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"dd_hyperbolic_single"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Individual Bayesian Modeling of the Delay Discounting Task with the following parameters: - "k" (discounting rate), "beta" (inverse temperature). +Individual Bayesian Modeling of the Delay Discounting Task using Hyperbolic Model. +It has the following parameters: \code{k} (discounting rate), \code{beta} (inverse temperature). -\strong{MODEL:} Hyperbolic Model (Mazur, 1987) +\itemize{ + \item \strong{Task}: Delay Discounting Task + \item \strong{Model}: Hyperbolic Model (Mazur, 1987) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,12 +106,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"delay_later"}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} - \item{"amount_later"}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} - \item{"delay_sooner"}{An integer representing the delayed days for the sooner option (e.g. 0).} - \item{"amount_sooner"}{A floating point number representing the amount for the sooner option (e.g. 10).} - \item{"choice"}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{delay_later}{An integer representing the delayed days for the later option (e.g. 1, 6, 28).} + \item{amount_later}{A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).} + \item{delay_sooner}{An integer representing the delayed days for the sooner option (e.g. 0).} + \item{amount_sooner}{A floating point number representing the amount for the sooner option (e.g. 10).} + \item{choice}{If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.} @@ -130,8 +151,11 @@ For the Delay Discounting Task, there should be 6 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- dd_hyperbolic_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- dd_hyperbolic_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- dd_hyperbolic_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") diff --git a/R/man/gng_m1.Rd b/R/man/gng_m1.Rd index c426389b..820b090d 100644 --- a/R/man/gng_m1.Rd +++ b/R/man/gng_m1.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/gng_m1.R \name{gng_m1} \alias{gng_m1} -\title{Orthogonalized Go/Nogo Task} +\title{RW + noise} \usage{ -gng_m1(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +gng_m1(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ gng_m1(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("gng_m1").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"gng_m1"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "rho" (effective size). +Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise. +It has the following parameters: \code{xi} (noise), \code{ep} (learning rate), \code{rho} (effective size). -\strong{MODEL:} RW + noise (Guitart-Masip et al., 2012, Neuroimage) +\itemize{ + \item \strong{Task}: Orthogonalized Go/Nogo Task + \item \strong{Model}: RW + noise (Guitart-Masip et al., 2012) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,10 +106,10 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} - \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} - \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} + \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} + \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} @@ -129,8 +151,11 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- gng_m1("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- gng_m1(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- gng_m1(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,9 +171,7 @@ printFit(output) } } \references{ -Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). - Go and no-go learning in reward and punishment: Interactions between affect and effect. - Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/gng_m2.Rd b/R/man/gng_m2.Rd index 847be5ea..67e8e7d7 100644 --- a/R/man/gng_m2.Rd +++ b/R/man/gng_m2.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/gng_m2.R \name{gng_m2} \alias{gng_m2} -\title{Orthogonalized Go/Nogo Task} +\title{RW + noise + bias} \usage{ -gng_m2(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +gng_m2(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ gng_m2(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("gng_m2").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"gng_m2"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size). +Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise + bias. +It has the following parameters: \code{xi} (noise), \code{ep} (learning rate), \code{b} (action bias), \code{rho} (effective size). -\strong{MODEL:} RW + noise + bias (Guitart-Masip et al., 2012, Neuroimage) +\itemize{ + \item \strong{Task}: Orthogonalized Go/Nogo Task + \item \strong{Model}: RW + noise + bias (Guitart-Masip et al., 2012) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,10 +106,10 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} - \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} - \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} + \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} + \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} @@ -129,8 +151,11 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- gng_m2("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- gng_m2(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- gng_m2(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,9 +171,7 @@ printFit(output) } } \references{ -Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). - Go and no-go learning in reward and punishment: Interactions between affect and effect. - Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/gng_m3.Rd b/R/man/gng_m3.Rd index cc29ad42..024ee75b 100644 --- a/R/man/gng_m3.Rd +++ b/R/man/gng_m3.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/gng_m3.R \name{gng_m3} \alias{gng_m3} -\title{Orthogonalized Go/Nogo Task} +\title{RW + noise + bias + pi} \usage{ -gng_m3(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +gng_m3(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ gng_m3(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("gng_m3").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"gng_m3"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size). +Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise + bias + pi. +It has the following parameters: \code{xi} (noise), \code{ep} (learning rate), \code{b} (action bias), \code{pi} (Pavlovian bias), \code{rho} (effective size). -\strong{MODEL:} RW + noise + bias + pi (Guitart-Masip et al., 2012, Neuroimage) +\itemize{ + \item \strong{Task}: Orthogonalized Go/Nogo Task + \item \strong{Model}: RW + noise + bias + pi (Guitart-Masip et al., 2012) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,10 +106,10 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} - \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} - \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} + \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} + \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} @@ -129,8 +151,11 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- gng_m3("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- gng_m3(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- gng_m3(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,9 +171,7 @@ printFit(output) } } \references{ -Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). - Go and no-go learning in reward and punishment: Interactions between affect and effect. - Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 +Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/gng_m4.Rd b/R/man/gng_m4.Rd index 0847073a..eb0821fc 100644 --- a/R/man/gng_m4.Rd +++ b/R/man/gng_m4.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/gng_m4.R \name{gng_m4} \alias{gng_m4} -\title{Orthogonalized Go/Nogo Task} +\title{RW (rew/pun) + noise + bias + pi} \usage{ -gng_m4(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +gng_m4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ gng_m4(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("gng_m4").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"gng_m4"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task with the following parameters: - "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity). +Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW (rew/pun) + noise + bias + pi. +It has the following parameters: \code{xi} (noise), \code{ep} (learning rate), \code{b} (action bias), \code{pi} (Pavlovian bias), \code{rhoRew} (reward sensitivity), \code{rhoPun} (punishment sensitivity). -\strong{MODEL:} RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013, J Neuro) +\itemize{ + \item \strong{Task}: Orthogonalized Go/Nogo Task + \item \strong{Model}: RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,10 +106,10 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"cue"}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} - \item{"keyPressed"}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} - \item{"outcome"}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{cue}{Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.} + \item{keyPressed}{Binary value representing the subject's response for that trial (where Press == 1; No press == 0).} + \item{outcome}{Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).} @@ -129,8 +151,11 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- gng_m4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- gng_m4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- gng_m4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,9 +171,7 @@ printFit(output) } } \references{ -Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta - Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. - http://doi.org/10.1523/JNEUROSCI.5754-12.2013 +Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/igt_orl.Rd b/R/man/igt_orl.Rd index f2e7cfc2..75f17365 100644 --- a/R/man/igt_orl.Rd +++ b/R/man/igt_orl.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/igt_orl.R \name{igt_orl} \alias{igt_orl} -\title{Iowa Gambling Task} +\title{Outcome-Representation Learning Model} \usage{ -igt_orl(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +igt_orl(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ igt_orl(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,32 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("igt_orl").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"igt_orl"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight). - -Contributor: \href{https://ccs-lab.github.io/team/nate-haines/}{Nate Haines} +Hierarchical Bayesian Modeling of the Iowa Gambling Task using Outcome-Representation Learning Model. +It has the following parameters: \code{Arew} (reward learning rate), \code{Apun} (punishment learning rate), \code{K} (perseverance decay), \code{betaF} (outcome frequency weight), \code{betaP} (perseverance weight). -\strong{MODEL:} Outcome-Representation Learning Model (Haines et al., 2018, Cognitive Science) +\itemize{ + \item \strong{Task}: Iowa Gambling Task (Ahn et al., 2008) + \item \strong{Model}: Outcome-Representation Learning Model (Haines et al., 2018) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} - \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} + \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} @@ -129,11 +148,16 @@ For the Iowa Gambling Task, there should be 4 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/nate-haines/}{Nate Haines} <\email{haines.175@osu.edu}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- igt_orl("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- igt_orl(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- igt_orl(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,9 @@ printFit(output) } } \references{ -Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A - Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. - https://doi.org/10.1111/cogs.12688 +Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + +Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/igt_pvl_decay.Rd b/R/man/igt_pvl_decay.Rd index 5b338806..d1f4d262 100644 --- a/R/man/igt_pvl_decay.Rd +++ b/R/man/igt_pvl_decay.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/igt_pvl_decay.R \name{igt_pvl_decay} \alias{igt_pvl_decay} -\title{Iowa Gambling Task} +\title{Prospect Valence Learning (PVL) Decay-RI} \usage{ -igt_pvl_decay(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +igt_pvl_decay(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ igt_pvl_decay(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,30 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("igt_pvl_decay").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"igt_pvl_decay"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion). +Hierarchical Bayesian Modeling of the Iowa Gambling Task using Prospect Valence Learning (PVL) Decay-RI. +It has the following parameters: \code{A} (decay rate), \code{alpha} (outcome sensitivity), \code{cons} (response consistency), \code{lambda} (loss aversion). -\strong{MODEL:} Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014, Frontiers in Psychology) +\itemize{ + \item \strong{Task}: Iowa Gambling Task (Ahn et al., 2008) + \item \strong{Model}: Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014) +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,10 +106,10 @@ For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} - \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} + \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} @@ -131,8 +151,11 @@ For the Iowa Gambling Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- igt_pvl_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- igt_pvl_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- igt_pvl_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,10 +171,9 @@ printFit(output) } } \references{ -Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, - J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence - from computational modeling with pure users. Frontiers in Psychology, 5, 1376. - http://doi.org/10.3389/fpsyg.2014.00849 +Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + +Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/igt_pvl_delta.Rd b/R/man/igt_pvl_delta.Rd index 2d0a327f..081c7156 100644 --- a/R/man/igt_pvl_delta.Rd +++ b/R/man/igt_pvl_delta.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/igt_pvl_delta.R \name{igt_pvl_delta} \alias{igt_pvl_delta} -\title{Iowa Gambling Task (Ahn et al., 2008)} +\title{Prospect Valence Learning (PVL) Delta} \usage{ -igt_pvl_delta(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +igt_pvl_delta(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ igt_pvl_delta(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,30 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("igt_pvl_delta").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"igt_pvl_delta"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion). +Hierarchical Bayesian Modeling of the Iowa Gambling Task using Prospect Valence Learning (PVL) Delta. +It has the following parameters: \code{A} (learning rate), \code{alpha} (outcome sensitivity), \code{cons} (response consistency), \code{lambda} (loss aversion). -\strong{MODEL:} Prospect Valence Learning (PVL) Delta (Ahn et al., 2008, Cognitive Science) +\itemize{ + \item \strong{Task}: Iowa Gambling Task (Ahn et al., 2008) + \item \strong{Model}: Prospect Valence Learning (PVL) Delta (Ahn et al., 2008) +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,10 +106,10 @@ For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} - \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} + \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} @@ -131,8 +151,11 @@ For the Iowa Gambling Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- igt_pvl_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- igt_pvl_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- igt_pvl_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,9 +171,9 @@ printFit(output) } } \references{ -Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models - using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. - http://doi.org/10.1080/03640210802352992 +Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + +Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/igt_vpp.Rd b/R/man/igt_vpp.Rd index dd513f7d..8123b8cf 100644 --- a/R/man/igt_vpp.Rd +++ b/R/man/igt_vpp.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/igt_vpp.R \name{igt_vpp} \alias{igt_vpp} -\title{Iowa Gambling Task} +\title{Value-Plus-Perseverance} \usage{ -igt_vpp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +igt_vpp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ igt_vpp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,30 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{payscale}: Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("igt_vpp").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"igt_vpp"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Iowa Gambling Task with the following parameters: - "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight). +Hierarchical Bayesian Modeling of the Iowa Gambling Task using Value-Plus-Perseverance. +It has the following parameters: \code{A} (learning rate), \code{alpha} (outcome sensitivity), \code{cons} (response consistency), \code{lambda} (loss aversion), \code{epP} (gain impact), \code{epN} (loss impact), \code{K} (decay rate), \code{w} (RL weight). -\strong{MODEL:} Value-Plus-Perseverance (Worthy et al., 2013, Frontiers in Psychology) +\itemize{ + \item \strong{Task}: Iowa Gambling Task (Ahn et al., 2008) + \item \strong{Model}: Value-Plus-Perseverance (Worthy et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,10 +106,10 @@ For the Iowa Gambling Task, there should be 4 columns of data with the labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} - \item{"gain"}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} - \item{"loss"}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).} + \item{gain}{Floating point value representing the amount of currency won on that trial (e.g. 50, 100).} + \item{loss}{Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).} @@ -130,8 +151,11 @@ For the Iowa Gambling Task, there should be 4 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- igt_vpp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- igt_vpp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- igt_vpp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -147,9 +171,9 @@ printFit(output) } } \references{ -Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and - win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical - Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001 +Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 + +Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/peer_ocu.Rd b/R/man/peer_ocu.Rd index 10a3ac01..0c3d2c5f 100644 --- a/R/man/peer_ocu.Rd +++ b/R/man/peer_ocu.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/peer_ocu.R \name{peer_ocu} \alias{peer_ocu} -\title{Peer Influence Task (Chung et al., 2015)} +\title{Other-Conferred Utility (OCU) Model} \usage{ -peer_ocu(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +peer_ocu(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ peer_ocu(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("peer_ocu").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"peer_ocu"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Peer Influence Task with the following parameters: - "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility). +Hierarchical Bayesian Modeling of the Peer Influence Task using Other-Conferred Utility (OCU) Model. +It has the following parameters: \code{rho} (risk preference), \code{tau} (inverse temperature), \code{ocu} (other-conferred utility). -Contributor: \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Other-Conferred Utility (OCU) Model +\itemize{ + \item \strong{Task}: Peer Influence Task (Chung et al., 2015) + \item \strong{Model}: Other-Conferred Utility (OCU) Model +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,14 +106,14 @@ For the Peer Influence Task, there should be 8 columns of data with the labels "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"condition"}{0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).} - \item{"p_gamble"}{Probability of receiving a high payoff (same for both options).} - \item{"safe_Hpayoff"}{High payoff of the safe option.} - \item{"safe_Lpayoff"}{Low payoff of the safe option.} - \item{"risky_Hpayoff"}{High payoff of the risky option.} - \item{"risky_Lpayoff"}{Low payoff of the risky option.} - \item{"choice"}{Which option was chosen? 0: safe, 1: risky.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{condition}{0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).} + \item{p_gamble}{Probability of receiving a high payoff (same for both options).} + \item{safe_Hpayoff}{High payoff of the safe option.} + \item{safe_Lpayoff}{Low payoff of the safe option.} + \item{risky_Hpayoff}{High payoff of the risky option.} + \item{risky_Lpayoff}{Low payoff of the risky option.} + \item{choice}{Which option was chosen? 0: safe, 1: risky.} } \strong{*}Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", @@ -128,11 +148,16 @@ For the Peer Influence Task, there should be 8 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- peer_ocu("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- peer_ocu(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- peer_ocu(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,9 +173,7 @@ printFit(output) } } \references{ -Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social - signals of safety and risk confer utility and have asymmetric effects on observers' choices. - Nature Neuroscience, 18(6), 912-916. +Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_ewa.Rd b/R/man/prl_ewa.Rd index 36711a99..a0d525db 100644 --- a/R/man/prl_ewa.Rd +++ b/R/man/prl_ewa.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/prl_ewa.R \name{prl_ewa} \alias{prl_ewa} -\title{Probabilistic Reversal Learning Task} +\title{Experience-Weighted Attraction Model} \usage{ -prl_ewa(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +prl_ewa(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ prl_ewa(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "ew_c", "ew_nc".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_ewa").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_ewa"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Experience-Weighted Attraction Model. +It has the following parameters: \code{phi} (1 - learning rate), \code{rho} (experience decay factor), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Experience-Weighted Attraction Model (Ouden et al., 2013, Neuron) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Experience-Weighted Attraction Model (Ouden et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,9 +106,9 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -128,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_ewa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_ewa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_ewa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,9 +173,7 @@ printFit(output) } } \references{ -Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_fictitious.Rd b/R/man/prl_fictitious.Rd index 8bb8b288..43943daa 100644 --- a/R/man/prl_fictitious.Rd +++ b/R/man/prl_fictitious.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/prl_fictitious.R \name{prl_fictitious} \alias{prl_fictitious} -\title{Probabilistic Reversal Learning Task} +\title{Fictitious Update Model} \usage{ -prl_fictitious(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +prl_fictitious(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ prl_fictitious(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_fictitious").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_fictitious"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model. +It has the following parameters: \code{eta} (learning rate), \code{alpha} (indecision point), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Fictitious Update Model (Glascher et al., 2009) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,9 +106,9 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_fictitious("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_fictitious(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_fictitious(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_fictitious_multipleB.Rd b/R/man/prl_fictitious_multipleB.Rd index c7bb4c61..02110281 100644 --- a/R/man/prl_fictitious_multipleB.Rd +++ b/R/man/prl_fictitious_multipleB.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/prl_fictitious_multipleB.R \name{prl_fictitious_multipleB} \alias{prl_fictitious_multipleB} -\title{Probabilistic Reversal Learning Task} +\title{Fictitious Update Model} \usage{ -prl_fictitious_multipleB(data = "choose", niter = 4000, - nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, - inits = "random", indPars = "mean", modelRegressor = FALSE, - vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, - stepsize = 1, max_treedepth = 10, ...) +prl_fictitious_multipleB(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "block", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ prl_fictitious_multipleB(data = "choose", niter = 4000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_fictitious_multipleB").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_fictitious_multipleB"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature). +Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model. +It has the following parameters: \code{eta} (learning rate), \code{alpha} (indecision point), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Fictitious Update Model (Glascher et al., 2009) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Probabilistic Reversal Learning Task, there should be 4 columns of data labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"block"}{A unique identifier for each of the multiple blocks within each subject.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{block}{A unique identifier for each of the multiple blocks within each subject.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 4 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_fictitious_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_fictitious_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_fictitious_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_fictitious_rp.Rd b/R/man/prl_fictitious_rp.Rd index 4a2b181e..808c7df4 100644 --- a/R/man/prl_fictitious_rp.Rd +++ b/R/man/prl_fictitious_rp.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/prl_fictitious_rp.R \name{prl_fictitious_rp} \alias{prl_fictitious_rp} -\title{Probabilistic Reversal Learning Task} +\title{Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)} \usage{ -prl_fictitious_rp(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +prl_fictitious_rp(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ prl_fictitious_rp(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_fictitious_rp").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_fictitious_rp"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE). +It has the following parameters: \code{eta_pos} (learning rate, +PE), \code{eta_neg} (learning rate, -PE), \code{alpha} (indecision point), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), with separate learning rates for positive and negative prediction error (PE) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) (Glascher et al., 2009; Ouden et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,9 +106,9 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_fictitious_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_fictitious_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_fictitious_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,13 +173,9 @@ printFit(output) } } \references{ -Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 -Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_fictitious_rp_woa.Rd b/R/man/prl_fictitious_rp_woa.Rd index 6c9dc44f..949eab87 100644 --- a/R/man/prl_fictitious_rp_woa.Rd +++ b/R/man/prl_fictitious_rp_woa.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/prl_fictitious_rp_woa.R \name{prl_fictitious_rp_woa} \alias{prl_fictitious_rp_woa} -\title{Probabilistic Reversal Learning Task} +\title{Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)} \usage{ -prl_fictitious_rp_woa(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +prl_fictitious_rp_woa(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ prl_fictitious_rp_woa(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_fictitious_rp_woa").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_fictitious_rp_woa"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point). +It has the following parameters: \code{eta_pos} (learning rate, +PE), \code{eta_neg} (learning rate, -PE), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) (Glascher et al., 2009; Ouden et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,9 +106,9 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_fictitious_rp_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_fictitious_rp_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_fictitious_rp_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,13 +173,9 @@ printFit(output) } } \references{ -Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 -Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_fictitious_woa.Rd b/R/man/prl_fictitious_woa.Rd index 19e47571..dbdb5dc3 100644 --- a/R/man/prl_fictitious_woa.Rd +++ b/R/man/prl_fictitious_woa.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/prl_fictitious_woa.R \name{prl_fictitious_woa} \alias{prl_fictitious_woa} -\title{Probabilistic Reversal Learning Task} +\title{Fictitious Update Model, without alpha (indecision point)} \usage{ -prl_fictitious_woa(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +prl_fictitious_woa(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ prl_fictitious_woa(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_fictitious_woa").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_fictitious_woa"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "eta" (learning rate), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, without alpha (indecision point). +It has the following parameters: \code{eta} (learning rate), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Fictitious Update Model (Glascher et al., 2009, Cerebral Cortex), without alpha (indecision point) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Fictitious Update Model, without alpha (indecision point) (Glascher et al., 2009) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,9 +106,9 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_fictitious_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_fictitious_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_fictitious_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial - Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. - Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_rp.Rd b/R/man/prl_rp.Rd index 548f2c78..e0cc6ac4 100644 --- a/R/man/prl_rp.Rd +++ b/R/man/prl_rp.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/prl_rp.R \name{prl_rp} \alias{prl_rp} -\title{Probabilistic Reversal Learning Task} +\title{Reward-Punishment Model} \usage{ -prl_rp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +prl_rp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ prl_rp(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_rp").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_rp"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Reward-Punishment Model. +It has the following parameters: \code{Apun} (punishment learning rate), \code{Arew} (reward learning rate), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Reward-Punishment Model (Ouden et al., 2013, Neuron) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Reward-Punishment Model (Ouden et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,9 +106,9 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -128,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,9 +173,7 @@ printFit(output) } } \references{ -Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/prl_rp_multipleB.Rd b/R/man/prl_rp_multipleB.Rd index a0d76318..cb884a10 100644 --- a/R/man/prl_rp_multipleB.Rd +++ b/R/man/prl_rp_multipleB.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/prl_rp_multipleB.R \name{prl_rp_multipleB} \alias{prl_rp_multipleB} -\title{Probabilistic Reversal Learning Task} +\title{Reward-Punishment Model} \usage{ -prl_rp_multipleB(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +prl_rp_multipleB(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "block", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ prl_rp_multipleB(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). For this model they are: "ev_c", "ev_nc", "pe".} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("prl_rp_multipleB").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"prl_rp_multipleB"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - \item{\code{modelRegressor}}{List object containing the extracted model-based regressors.} + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task with the following parameters: - "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature). +Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Reward-Punishment Model. +It has the following parameters: \code{Apun} (punishment learning rate), \code{Arew} (reward learning rate), \code{beta} (inverse temperature). -Contributor: (for model-based regressors) \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} and \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Reward-Punishment Model (Ouden et al., 2013, Neuron) +\itemize{ + \item \strong{Task}: Probabilistic Reversal Learning Task + \item \strong{Model}: Reward-Punishment Model (Ouden et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Probabilistic Reversal Learning Task, there should be 4 columns of data labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"block"}{A unique identifier for each of the multiple blocks within each subject.} - \item{"choice"}{Integer value representing the option chosen on that trial: 1 or 2.} - \item{"outcome"}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{block}{A unique identifier for each of the multiple blocks within each subject.} + \item{choice}{Integer value representing the option chosen on that trial: 1 or 2.} + \item{outcome}{Integer value representing the outcome of that trial (where reward == 1, and loss == -1).} @@ -129,11 +148,16 @@ For the Probabilistic Reversal Learning Task, there should be 4 columns of data Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang (for model-based regressors)} <\email{jaeyeong.yang1125@gmail.com}>, \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park (for model-based regressors)} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- prl_rp_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- prl_rp_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- prl_rp_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. - (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), - 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/pst_gainloss_Q.Rd b/R/man/pst_gainloss_Q.Rd index 6f0bb0bb..0872d260 100644 --- a/R/man/pst_gainloss_Q.Rd +++ b/R/man/pst_gainloss_Q.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/pst_gainloss_Q.R \name{pst_gainloss_Q} \alias{pst_gainloss_Q} -\title{Probabilistic Selection Task} +\title{Gain-Loss Q Learning Model} \usage{ -pst_gainloss_Q(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +pst_gainloss_Q(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "type", "choice", "reward". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ pst_gainloss_Q(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("pst_gainloss_Q").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"pst_gainloss_Q"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Probabilistic Selection Task with the following parameters: - "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature). +Hierarchical Bayesian Modeling of the Probabilistic Selection Task using Gain-Loss Q Learning Model. +It has the following parameters: \code{alpha_pos} (learning rate for positive feedbacks), \code{alpha_neg} (learning rate for negative feedbacks), \code{beta} (inverse temperature). -Contributor: \href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} - -\strong{MODEL:} Gain-Loss Q Learning Model (Frank et al., 2007, PNAS) +\itemize{ + \item \strong{Task}: Probabilistic Selection Task + \item \strong{Model}: Gain-Loss Q Learning Model (Frank et al., 2007) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Probabilistic Selection Task, there should be 4 columns of data with the labels "subjID", "type", "choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"type"}{Two-digit number indicating which pair of stimuli were presented for that trial, e.g. \code{12}, \code{34}, or \code{56}. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2.\cr Code for each stimulus type (1~6) is defined as below: \tabular{ccl}{Code \tab Stimulus \tab Probability to win \cr \code{1} \tab A \tab 80\% \cr \code{2} \tab B \tab 20\% \cr \code{3} \tab C \tab 70\% \cr \code{4} \tab D \tab 30\% \cr \code{5} \tab E \tab 60\% \cr \code{6} \tab F \tab 40\%} The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.} - \item{"choice"}{Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).} - \item{"reward"}{Amount of reward earned as a result of the trial.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{type}{Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\% (type 1), 20\% (type 2), 70\% (type 3), 30\% (type 4), 60\% (type 5), 40\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.} + \item{choice}{Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).} + \item{reward}{Amount of reward earned as a result of the trial.} @@ -129,11 +148,16 @@ For the Probabilistic Selection Task, there should be 4 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/jaeyeong-yang/}{Jaeyeong Yang} <\email{jaeyeong.yang1125@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- pst_gainloss_Q("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- pst_gainloss_Q(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- pst_gainloss_Q(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic - triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings - of the National Academy of Sciences, 104(41), 16311-16316. +Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ra_noLA.Rd b/R/man/ra_noLA.Rd index aa936308..79fae7b0 100644 --- a/R/man/ra_noLA.Rd +++ b/R/man/ra_noLA.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ra_noLA.R \name{ra_noLA} \alias{ra_noLA} -\title{Risk Aversion Task} +\title{Prospect Theory, without loss aversion (LA) parameter} \usage{ -ra_noLA(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ra_noLA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ra_noLA(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ra_noLA").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ra_noLA"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Risk Aversion Task with the following parameters: - "rho" (risk aversion), "tau" (inverse temperature). +Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory, without loss aversion (LA) parameter. +It has the following parameters: \code{rho} (risk aversion), \code{tau} (inverse temperature). -\strong{MODEL:} Prospect Theory (Sokol-Hessner et al., 2009, PNAS), without loss aversion (LA) parameter +\itemize{ + \item \strong{Task}: Risk Aversion Task + \item \strong{Model}: Prospect Theory, without loss aversion (LA) parameter (Sokol-Hessner et al., 2009) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,11 +106,11 @@ For the Risk Aversion Task, there should be 5 columns of data with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} - \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} - \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} - \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} + \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} + \item{cert}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} + \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} @@ -129,8 +151,11 @@ For the Risk Aversion Task, there should be 5 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ra_noLA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ra_noLA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ra_noLA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -144,18 +169,9 @@ plot(output) # Show the WAIC and LOOIC model fit estimates printFit(output) } - -\dontrun{ -# Paths to data published in Sokol-Hessner et al. (2009) -path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -} } \references{ -Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & - Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. - Proceedings of the National Academy of Sciences of the United States of America, 106(13), - 5035-5040. http://www.pnas.org/content/106/13/5035 +Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ra_noRA.Rd b/R/man/ra_noRA.Rd index 060a39d0..53d5bbf3 100644 --- a/R/man/ra_noRA.Rd +++ b/R/man/ra_noRA.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ra_noRA.R \name{ra_noRA} \alias{ra_noRA} -\title{Risk Aversion Task} +\title{Prospect Theory, without risk aversion (RA) parameter} \usage{ -ra_noRA(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ra_noRA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ra_noRA(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ra_noRA").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ra_noRA"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Risk Aversion Task with the following parameters: - "lambda" (loss aversion), "tau" (inverse temperature). +Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory, without risk aversion (RA) parameter. +It has the following parameters: \code{lambda} (loss aversion), \code{tau} (inverse temperature). -\strong{MODEL:} Prospect Theory (Sokol-Hessner et al., 2009, PNAS), without risk aversion (RA) parameter +\itemize{ + \item \strong{Task}: Risk Aversion Task + \item \strong{Model}: Prospect Theory, without risk aversion (RA) parameter (Sokol-Hessner et al., 2009) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,11 +106,11 @@ For the Risk Aversion Task, there should be 5 columns of data with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} - \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} - \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} - \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} + \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} + \item{cert}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} + \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} @@ -129,8 +151,11 @@ For the Risk Aversion Task, there should be 5 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ra_noRA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ra_noRA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ra_noRA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -144,18 +169,9 @@ plot(output) # Show the WAIC and LOOIC model fit estimates printFit(output) } - -\dontrun{ -# Paths to data published in Sokol-Hessner et al. (2009) -path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -} } \references{ -Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & - Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. - Proceedings of the National Academy of Sciences of the United States of America, 106(13), - 5035-5040. http://www.pnas.org/content/106/13/5035 +Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ra_prospect.Rd b/R/man/ra_prospect.Rd index dea44925..2930cb0c 100644 --- a/R/man/ra_prospect.Rd +++ b/R/man/ra_prospect.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/ra_prospect.R \name{ra_prospect} \alias{ra_prospect} -\title{Risk Aversion Task} +\title{Prospect Theory} \usage{ -ra_prospect(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +ra_prospect(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ ra_prospect(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ra_prospect").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ra_prospect"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Risk Aversion Task with the following parameters: - "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature). +Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory. +It has the following parameters: \code{rho} (risk aversion), \code{lambda} (loss aversion), \code{tau} (inverse temperature). -\strong{MODEL:} Prospect Theory (Sokol-Hessner et al., 2009, PNAS) +\itemize{ + \item \strong{Task}: Risk Aversion Task + \item \strong{Model}: Prospect Theory (Sokol-Hessner et al., 2009) +} } \details{ This section describes some of the function arguments in greater detail. @@ -85,11 +106,11 @@ For the Risk Aversion Task, there should be 5 columns of data with the labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} - \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} - \item{"cert"}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} - \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} + \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} + \item{cert}{Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.} + \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} @@ -130,8 +151,11 @@ For the Risk Aversion Task, there should be 5 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ra_prospect("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ra_prospect(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ra_prospect(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -145,18 +169,9 @@ plot(output) # Show the WAIC and LOOIC model fit estimates printFit(output) } - -\dontrun{ -# Paths to data published in Sokol-Hessner et al. (2009) -path_to_attend_data <- system.file("extdata", "ra_data_attend.txt", package = "hBayesDM") -path_to_regulate_data <- system.file("extdata", "ra_data_reappraisal.txt", package = "hBayesDM") -} } \references{ -Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & - Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. - Proceedings of the National Academy of Sciences of the United States of America, 106(13), - 5035-5040. http://www.pnas.org/content/106/13/5035 +Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/rdt_happiness.Rd b/R/man/rdt_happiness.Rd index 30067294..fd486c02 100644 --- a/R/man/rdt_happiness.Rd +++ b/R/man/rdt_happiness.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/rdt_happiness.R \name{rdt_happiness} \alias{rdt_happiness} -\title{Risky Decision Task} +\title{Happiness Computational Model} \usage{ -rdt_happiness(data = "choose", niter = 4000, nwarmup = 1000, - nchain = 4, ncore = 1, nthin = 1, inits = "random", +rdt_happiness(data = NULL, datafile = "", niter = 4000, + nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb", indPars = "mean", modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, - max_treedepth = 10, ...) + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -26,20 +27,21 @@ rdt_happiness(data = "choose", niter = 4000, nwarmup = 1000, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -50,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("rdt_happiness").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"rdt_happiness"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Risky Decision Task with the following parameters: - "w0" (baseline), "w1" (weight of certain rewards), "w2" (weight of expected values), "w3" (weight of reward prediction errors), "gam" (forgetting factor), "sig" (standard deviation of error). +Hierarchical Bayesian Modeling of the Risky Decision Task using Happiness Computational Model. +It has the following parameters: \code{w0} (baseline), \code{w1} (weight of certain rewards), \code{w2} (weight of expected values), \code{w3} (weight of reward prediction errors), \code{gam} (forgetting factor), \code{sig} (standard deviation of error). -Contributor: \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} - -\strong{MODEL:} Happiness Computational Model (Rutledge et al., 2014, PNAS) +\itemize{ + \item \strong{Task}: Risky Decision Task + \item \strong{Model}: Happiness Computational Model (Rutledge et al., 2014) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,15 +106,15 @@ For the Risky Decision Task, there should be 9 columns of data with the labels "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"gain"}{Possible (50\%) gain outcome of a risky option (e.g. 9).} - \item{"loss"}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} - \item{"cert"}{Guaranteed amount of a safe option.} - \item{"type"}{loss == -1, mixed == 0, gain == 1} - \item{"gamble"}{If gamble was taken, gamble == 1; else gamble == 0.} - \item{"outcome"}{Result of the trial.} - \item{"happy"}{Happiness score.} - \item{"RT_happy"}{Reaction time for answering the happiness score.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{gain}{Possible (50\%) gain outcome of a risky option (e.g. 9).} + \item{loss}{Possible (50\%) loss outcome of a risky option (e.g. 5, or -5).} + \item{cert}{Guaranteed amount of a safe option.} + \item{type}{loss == -1, mixed == 0, gain == 1} + \item{gamble}{If gamble was taken, gamble == 1; else gamble == 0.} + \item{outcome}{Result of the trial.} + \item{happy}{Happiness score.} + \item{RT_happy}{Reaction time for answering the happiness score.} } \strong{*}Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", etc.), but only the data within the column names listed above will be used during the modeling. @@ -129,11 +148,16 @@ For the Risky Decision Task, there should be 9 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- rdt_happiness("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- rdt_happiness(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- rdt_happiness(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,7 @@ printFit(output) } } \references{ -Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model - of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), - 12252-12257. +Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ts_par4.Rd b/R/man/ts_par4.Rd index 9a6f72cd..097b14fc 100644 --- a/R/man/ts_par4.Rd +++ b/R/man/ts_par4.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ts_par4.R \name{ts_par4} \alias{ts_par4} -\title{Two-Step Task (Daw et al., 2011)} +\title{Hybrid Model, with 4 parameters} \usage{ -ts_par4(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ts_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ts_par4(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred_step1", "y_pred_step2"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,32 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ts_par4").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ts_par4"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Two-Step Task with the following parameters: - "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight). - -Contributor: \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 4 parameters. +It has the following parameters: \code{a} (learning rate for both stages 1 & 2), \code{beta} (inverse temperature for both stages 1 & 2), \code{pi} (perseverance), \code{w} (model-based weight). -\strong{MODEL:} Hybrid Model (Daw et al., 2011; Wunderlich et al., 2012), with 4 parameters +\itemize{ + \item \strong{Task}: Two-Step Task (Daw et al., 2011) + \item \strong{Model}: Hybrid Model, with 4 parameters (Daw et al., 2011; Wunderlich et al., 2012) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Two-Step Task, there should be 4 columns of data with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} - \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument \code{trans_prob} to your preferred value.} - \item{"reward"}{Reward after Level 2 (0 or 1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{level1_choice}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} + \item{level2_choice}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} + \item{reward}{Reward after Level 2 (0 or 1).} @@ -129,11 +148,16 @@ For the Two-Step Task, there should be 4 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ts_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ts_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ts_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,12 +173,11 @@ printFit(output) } } \references{ -Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). - Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), - 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + +Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 -Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over - model-free choice behavior. Neuron, 75(3), 418-424. +Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ts_par6.Rd b/R/man/ts_par6.Rd index 8dc52e08..60fc2802 100644 --- a/R/man/ts_par6.Rd +++ b/R/man/ts_par6.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ts_par6.R \name{ts_par6} \alias{ts_par6} -\title{Two-Step Task (Daw et al., 2011)} +\title{Hybrid Model, with 6 parameters} \usage{ -ts_par6(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ts_par6(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ts_par6(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred_step1", "y_pred_step2"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,32 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ts_par6").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ts_par6"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Two-Step Task with the following parameters: - "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight). - -Contributor: \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 6 parameters. +It has the following parameters: \code{a1} (learning rate in stage 1), \code{beta1} (inverse temperature in stage 1), \code{a2} (learning rate in stage 2), \code{beta2} (inverse temperature in stage 2), \code{pi} (perseverance), \code{w} (model-based weight). -\strong{MODEL:} Hybrid Model (Daw et al., 2011, Neuron), with 6 parameters +\itemize{ + \item \strong{Task}: Two-Step Task (Daw et al., 2011) + \item \strong{Model}: Hybrid Model, with 6 parameters (Daw et al., 2011) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Two-Step Task, there should be 4 columns of data with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} - \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument \code{trans_prob} to your preferred value.} - \item{"reward"}{Reward after Level 2 (0 or 1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{level1_choice}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} + \item{level2_choice}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} + \item{reward}{Reward after Level 2 (0 or 1).} @@ -129,11 +148,16 @@ For the Two-Step Task, there should be 4 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ts_par6("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ts_par6(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ts_par6(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,9 @@ printFit(output) } } \references{ -Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). - Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), - 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + +Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ts_par7.Rd b/R/man/ts_par7.Rd index f39721e1..625c3d30 100644 --- a/R/man/ts_par7.Rd +++ b/R/man/ts_par7.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ts_par7.R \name{ts_par7} \alias{ts_par7} -\title{Two-Step Task (Daw et al., 2011)} +\title{Hybrid Model, with 7 parameters (original model)} \usage{ -ts_par7(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ts_par7(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ts_par7(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred_step1", "y_pred_step2"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,32 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{For this model, it's possible to set the following \strong{model-specific argument} to a value that you may prefer. \cr -\code{trans_prob}: Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: +\describe{ + \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.} + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ts_par7").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ts_par7"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Two-Step Task with the following parameters: - "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace). - -Contributor: \href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} +Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 7 parameters (original model). +It has the following parameters: \code{a1} (learning rate in stage 1), \code{beta1} (inverse temperature in stage 1), \code{a2} (learning rate in stage 2), \code{beta2} (inverse temperature in stage 2), \code{pi} (perseverance), \code{w} (model-based weight), \code{lambda} (eligibility trace). -\strong{MODEL:} Hybrid Model (Daw et al., 2011, Neuron), with 7 parameters (original model) +\itemize{ + \item \strong{Task}: Two-Step Task (Daw et al., 2011) + \item \strong{Model}: Hybrid Model, with 7 parameters (original model) (Daw et al., 2011) +} } \details{ This section describes some of the function arguments in greater detail. @@ -87,10 +106,10 @@ For the Two-Step Task, there should be 4 columns of data with the labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"level1_choice"}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} - \item{"level2_choice"}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr *Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument \code{trans_prob} to your preferred value.} - \item{"reward"}{Reward after Level 2 (0 or 1).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{level1_choice}{Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).} + \item{level2_choice}{Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\cr Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.} + \item{reward}{Reward after Level 2 (0 or 1).} @@ -129,11 +148,16 @@ For the Two-Step Task, there should be 4 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/harhim-park/}{Harhim Park} <\email{hrpark12@gmail.com}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ts_par7("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ts_par7(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ts_par7(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -149,9 +173,9 @@ printFit(output) } } \references{ -Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). - Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), - 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + +Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ug_bayes.Rd b/R/man/ug_bayes.Rd index 4bdb2538..8d01105a 100644 --- a/R/man/ug_bayes.Rd +++ b/R/man/ug_bayes.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ug_bayes.R \name{ug_bayes} \alias{ug_bayes} -\title{Norm-Training Ultimatum Game} +\title{Ideal Observer Model} \usage{ -ug_bayes(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ug_bayes(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "offer", "accept". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ug_bayes(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ug_bayes").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ug_bayes"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game with the following parameters: - "alpha" (envy), "beta" (guilt), "tau" (inverse temperature). +Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using Ideal Observer Model. +It has the following parameters: \code{alpha} (envy), \code{beta} (guilt), \code{tau} (inverse temperature). -\strong{MODEL:} Ideal Observer Model (Xiang et al., 2013, J Neuro) +\itemize{ + \item \strong{Task}: Norm-Training Ultimatum Game + \item \strong{Model}: Ideal Observer Model (Xiang et al., 2013) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,9 +106,9 @@ For the Norm-Training Ultimatum Game, there should be 3 columns of data with the labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"offer"}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} - \item{"accept"}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{offer}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} + \item{accept}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} @@ -129,8 +151,11 @@ For the Norm-Training Ultimatum Game, there should be 3 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ug_bayes("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ug_bayes(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ug_bayes(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,9 +171,7 @@ printFit(output) } } \references{ -Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their - Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. - http://doi.org/10.1523/JNEUROSCI.1642-12.2013 +Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/ug_delta.Rd b/R/man/ug_delta.Rd index 138b0ea4..830cf93c 100644 --- a/R/man/ug_delta.Rd +++ b/R/man/ug_delta.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/ug_delta.R \name{ug_delta} \alias{ug_delta} -\title{Norm-Training Ultimatum Game} +\title{Rescorla-Wagner (Delta) Model} \usage{ -ug_delta(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +ug_delta(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "offer", "accept". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ ug_delta(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,29 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("ug_delta").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"ug_delta"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game with the following parameters: - "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate). +Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using Rescorla-Wagner (Delta) Model. +It has the following parameters: \code{alpha} (envy), \code{tau} (inverse temperature), \code{ep} (norm adaptation rate). -\strong{MODEL:} Rescorla-Wagner (Delta) Model (Gu et al., 2015, J Neuro) +\itemize{ + \item \strong{Task}: Norm-Training Ultimatum Game + \item \strong{Model}: Rescorla-Wagner (Delta) Model (Gu et al., 2015) +} } \details{ This section describes some of the function arguments in greater detail. @@ -84,9 +106,9 @@ For the Norm-Training Ultimatum Game, there should be 3 columns of data with the labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"offer"}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} - \item{"accept"}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{offer}{Floating point value representing the offer made in that trial (e.g. 4, 10, 11).} + \item{accept}{1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).} @@ -129,8 +151,11 @@ For the Norm-Training Ultimatum Game, there should be 3 columns of data with the } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- ug_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- ug_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- ug_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -146,10 +171,7 @@ printFit(output) } } \references{ -Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet - Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm - Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), - 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015 +Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015 } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: diff --git a/R/man/wcs_sql.Rd b/R/man/wcs_sql.Rd index 4e338be2..d9834975 100644 --- a/R/man/wcs_sql.Rd +++ b/R/man/wcs_sql.Rd @@ -2,15 +2,17 @@ % Please edit documentation in R/wcs_sql.R \name{wcs_sql} \alias{wcs_sql} -\title{Wisconsin Card Sorting Task} +\title{Sequential Learning Model} \usage{ -wcs_sql(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, - ncore = 1, nthin = 1, inits = "random", indPars = "mean", - modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, - adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +wcs_sql(data = NULL, datafile = "", niter = 4000, nwarmup = 1000, + nchain = 4, ncore = 1, nthin = 1, inits = "vb", + indPars = "mean", modelRegressor = FALSE, vb = FALSE, + inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1, + max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...) } \arguments{ -\item{data}{A .txt file containing the data to be modeled. Data columns should be labeled as: +\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file +containing the data (\code{datafile}) to be modeled. Data columns should be labeled as: "subjID", "choice", "outcome". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -25,20 +27,21 @@ wcs_sql(data = "choose", niter = 4000, nwarmup = 1000, nchain = 4, Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.} -\item{inits}{Character value specifying how the initial values should be generated. Options are -"fixed" or "random", or your own initial values.} +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} \item{indPars}{Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".} -\item{modelRegressor}{Export model-based regressors? TRUE or FALSE. -Currently not available for this model.} +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +Not available for this model.} \item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.} +to \code{FALSE}.} \item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.} +size). Defaults to \code{FALSE}. +If set to \code{TRUE}, it includes: "y_pred"} \item{adapt_delta}{Floating point value representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} @@ -49,31 +52,48 @@ take on each new iteration. See \bold{Details} below.} \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See \bold{Details} below.} -\item{...}{Not used for this model.} +\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.} + +\item{choose_data}{Whether to choose data with an interactive window. +By default, set to \code{FALSE}.} + +\item{...}{For this model, there is no model-specific argument. +\describe{ + + + + + + + + + +}} } \value{ A class "hBayesDM" object \code{modelData} with the following components: \describe{ - \item{\code{model}}{Character value that is the name of the model ("wcs_sql").} - \item{\code{allIndPars}}{Data.frame containing the summarized parameter values (as specified by + \item{model}{Character value that is the name of the model (\\code{"wcs_sql"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by \code{indPars}) for each subject.} - \item{\code{parVals}}{List object containing the posterior samples over different parameters.} - \item{\code{fit}}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan model.} - \item{\code{rawdata}}{Data.frame containing the raw data used to fit the model, as specified by + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by the user.} - + \item{modelRegressor}{List object containing the extracted model-based regressors.} } } \description{ -Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task with the following parameters: - "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature). +Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task using Sequential Learning Model. +It has the following parameters: \code{r} (reward sensitivity), \code{p} (punishment sensitivity), \code{d} (decision consistency or inverse temperature). -Contributor: \href{https://ccs-lab.github.io/team/dayeong-min/}{Dayeong Min} - -\strong{MODEL:} Sequential Learning Model (Bishara et al., 2010, Journal of Mathematical Psychology) +\itemize{ + \item \strong{Task}: Wisconsin Card Sorting Task + \item \strong{Model}: Sequential Learning Model (Bishara et al., 2010) +} } \details{ This section describes some of the function arguments in greater detail. @@ -86,9 +106,9 @@ For the Wisconsin Card Sorting Task, there should be 3 columns of data with the labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ - \item{"subjID"}{A unique identifier for each subject in the data-set.} - \item{"choice"}{Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.} - \item{"outcome"}{1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.} + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{choice}{Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.} + \item{outcome}{1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.} @@ -128,11 +148,16 @@ For the Wisconsin Card Sorting Task, there should be 3 columns of data with the Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical description of these arguments. + +\subsection{Contributors}{\href{https://ccs-lab.github.io/team/dayeong-min/}{Dayeong Min} <\email{mindy2801@snu.ac.kr}>} } \examples{ \dontrun{ -# Run the model and store results in "output" -output <- wcs_sql("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) +# Run the model with a given data.frame as df +output <- wcs_sql(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- wcs_sql(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) # Visually check convergence of the sampling chains (should look like 'hairy caterpillars') plot(output, type = "trace") @@ -148,9 +173,7 @@ printFit(output) } } \references{ -Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. - (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in - substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13. +Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13. } \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: From d26a3689a41c646aea18ab589055e85669850e83 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 02:40:12 +0900 Subject: [PATCH 047/163] Re-add NEWS.md --- R/NEWS.md | 121 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 121 insertions(+) create mode 100644 R/NEWS.md diff --git a/R/NEWS.md b/R/NEWS.md new file mode 100644 index 00000000..bcf080be --- /dev/null +++ b/R/NEWS.md @@ -0,0 +1,121 @@ +# hBayesDM 0.7.2 + +* Add three new models for the bandit4arm task: `bandit4arm_2par_lapse`, + `bandit4arm_lapse_decay` and `bandit4arm_singleA_lapse`. +* Fix various (minor) errors. + +# hBayesDM 0.7.1 + +* Make it usable without manually loading `rstan`. +* Remove an annoying warning about using `..insensitive_data_columns`. + +# hBayesDM 0.7.0 + +* Now, in default, you should build a Stan file into a binary for the first time to use it. To build all the models on installation, you should set an environmental variable `BUILD_ALL` to `true` before installation. +* Now all the implemented models are refactored using `hBayesDM_model` function. You don't have to change anything to use them, but developers can easily implement new models now! +* We added a Kalman filter model for 4-armed bandit task (`bandit4arm2_kalman_filter`; Daw et al., 2006) and a probability weighting function for general description-based tasks (`dbdm_prob_weight`; Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008). +* Initial values of parameter estimation for some models are updated as plausible values, and the parameter boundaries of several models are fixed (see more on issue #63 and #64 in Github). +* Exponential and linear models for choice under risk and ambiguity task now have four model regressors: `sv`, `sv_fix`, `sv_var`, and `p_var`. +* Fix the Travix CI settings and related codes to be properly passed. + +# hBayesDM 0.6.3 + +* Update the dependencies on rstan (>= 2.18.1) +* No changes on model files, as same as the version 0.6.2 + +# hBayesDM 0.6.2 + +* Fix an error on choiceRT_ddm (#44) + +# hBayesDM 0.6.1 + +* Solve an issue with built binary files. +* Fix an error on peer_ocu with misplaced parentheses. + +# hBayesDM 0.6.0 + +* Add new tasks (Balloon Analogue Risk Task, Choice under Risk and Ambiguity Task, Probabilistic Selection Task, Risky Decision Task (a.k.a. Happiness task), Wisconsin Card Sorting Task) +* Add a new model for the Iowa Gambling Task (igt_orl) +* Change priors (Half-Cauchy(0, 5) --> Half-Cauchy(0, 1) or Half-Normal(0, 0.2) +* printFit function now provides LOOIC weights and/or WAIC weights + +# hBayesDM 0.5.1 + +* Add models for the Two Step task +* Add models without indecision point parameter (alpha) for the PRL task (prl_*_woa.stan) +* Model-based regressors for the PRL task are now available +* For the PRL task & prl_fictitious.stan & prl_fictitious_rp.stan --> change the range of alpha (indecision point) from [0, 1] to [-Inf, Inf] + +# hBayesDM 0.5.0 + +* Support variational Bayesian methods (vb=TRUE) +* Allow posterior predictive checks, except for drift-diffusion models (inc_postpred=TRUE) +* Add the peer influence task (Chung et al., 2015, USE WITH CAUTION for now and PLEASE GIVE US FEEDBACK!) +* Add 'prl_fictitious_rp' model +* Made changes to be compatible with the newest Stan version (e.g., // instead of # for commenting). +* In 'prl_*' models, 'rewlos' is replaced by 'outcome' so that column names and labels would be consistent across tasks as much as possible. +* Email feature is disabled as R mail package does not allow users to send anonymous emails anymore. +* When outputs are saved as a file (*.RData), the file name now contains the name of the data file. + +# hBayesDM 0.4.0 + +* Add a choice reaction time task and evidence accumulation models + - Drift diffusion model (both hierarchical and single-subject) + - Linear Ballistic Accumulator (LBA) model (both hierarchical and single-subject) +* Add PRL models that can fit multiple blocks +* Add single-subject versions for the delay discounting task (`dd_hyperbolic_single` and `dd_cs_single`). +* Standardize variable names across all models (e.g., `rewlos` --> `outcome` for all models) +* Separate versions for CRAN and GitHub. All models/features are identical but the GitHub version contains precompilled models. + +# hBayesDM 0.3.1 + +* Remove dependence on the modeest package. Now use a built-in function to estimate the mode of a posterior distribution. +* Rewrite the "printFit" function. + +# hBayesDM 0.3.0 + +* Made several changes following the guidelines for R packages providing interfaces to Stan. +* Stan models are precompiled and models will run immediately when called. +* The default number of chains is set to 4. +* The default value of `adapt_delta` is set to 0.95 to reduce the potential for divergences. +* The “printFit” function uses LOOIC by default. Users can select WAIC or both (LOOIC & WAIC) if needed. + +# hBayesDM 0.2.3.3 + +* Add help files +* Add a function for checking Rhat values (rhat). +* Change a link to its tutorial website + +# hBayesDM 0.2.3.2 + +* Use wide normal distributions for unbounded parameters (gng_* models). +* Automatic removal of rows (trials) containing NAs. + +# hBayesDM 0.2.3.1 + +* Add a function for plotting individual parameters (plotInd) + +# hBayesDM 0.2.3 + +* Add a new task: the Ultimatum Game +* Add new models for the Probabilistic Reversal Learning and Risk Aversion tasks +* ‘bandit2arm’ -> change its name to ‘bandit2arm_delta’. Now all model names are in the same format (i.e., TASK_MODEL). +* Users can extract model-based regressors from gng_m* models +* Include the option of customizing control parameters (adapt_delta, max_treedepth, stepsize) +* ‘plotHDI’ function -> add ‘fontSize’ argument & change the color of histogram + +# hBayesDM 0.2.1 + +## Bug fixes + +* All models: Fix errors when indPars=“mode” +* ra_prospect model: Add description for column names of a data (*.txt) file + +## Change + +* Change standard deviations of ‘b’ and ‘pi’ priors in gng_* models + +# hBayesDM 0.2.0 + +Initially released. + From 05a1742ffc08204d1d803a9a3216bde0876e96d1 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 03:58:19 +0900 Subject: [PATCH 048/163] Generate Python test codes from JSON files --- commons/.gitignore | 1 + commons/generate-python-codes.py | 52 ++++++++++++++++++++------ commons/generate-r-codes.py | 9 +++-- commons/templates/PY_TEST_TEMPLATE.txt | 11 ++++++ 4 files changed, 57 insertions(+), 16 deletions(-) create mode 100644 commons/templates/PY_TEST_TEMPLATE.txt diff --git a/commons/.gitignore b/commons/.gitignore index 958aad28..4ac3f22f 100644 --- a/commons/.gitignore +++ b/commons/.gitignore @@ -1,2 +1,3 @@ R/ Python/ +Python-tests/ diff --git a/commons/generate-python-codes.py b/commons/generate-python-codes.py index a6f81070..af70fdd1 100644 --- a/commons/generate-python-codes.py +++ b/commons/generate-python-codes.py @@ -1,5 +1,7 @@ #!/usr/bin/env python3 """ +Generate Python codes for hBayesDM using model information defined in a JSON file. + Written by Jethro Lee. """ import sys @@ -7,13 +9,19 @@ import glob import json import re - from pathlib import Path from typing import List, Iterable, Callable from collections import OrderedDict -PATH_TEMPLATE = Path(__file__) / 'templates' -PATH_OUTPUT = Path(__file__) / 'Python' +PATH_ROOT = Path(__file__).absolute().parent +PATH_MODELS = PATH_ROOT / 'models' +PATH_TEMPLATE = PATH_ROOT / 'templates' +PATH_OUTPUT = PATH_ROOT / 'Python' +PATH_OUTPUT_TEST = PATH_ROOT / 'Python-tests' + +TEMPLATE_DOCS = PATH_TEMPLATE / 'PY_DOCS_TEMPLATE.txt' +TEMPLATE_CODE = PATH_TEMPLATE / 'PY_CODE_TEMPLATE.txt' +TEMPLATE_TEST = PATH_TEMPLATE / 'PY_TEST_TEMPLATE.txt' def main(json_file, verbose): @@ -27,10 +35,13 @@ def main(json_file, verbose): # Load json_file with open(path_fn, 'r') as f: - model_info = json.load(f, object_pairs_hook=OrderedDict) + info = model_info = json.load(f, object_pairs_hook=OrderedDict) # Model full name (Snake-case) - model_function = path_fn.name.replace('.json', '') + model_function = [info['task_name']['code'], info['model_name']['code']] + if info['model_type']['code'] != '': + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) # Model class name (Pascal-case) class_name = model_function.title().replace('_', '') @@ -52,7 +63,7 @@ def shortify(cite: str) -> str: (shortify(cite), cite) for cite in model_info['model_name']['cite']) # Read template for docstring - with open(PATH_TEMPLATE / 'PY_DOCSTRING_TEMPLATE.txt', 'r') as f: + with open(TEMPLATE_DOCS, 'r') as f: docstring_template = f.read().format( model_function=model_function, task_name=model_info['task_name']['desc'], @@ -108,7 +119,7 @@ def shortify(cite: str) -> str: ) # Read template for model python code - with open(PATH_TEMPLATE / 'PY_CODE_TEMPLATE.txt', 'r') as f: + with open(TEMPLATE_CODE, 'r') as f: code_template = f.read().format( docstring_template=docstring_template, model_function=model_function, @@ -146,16 +157,32 @@ def shortify(cite: str) -> str: sep='\n '), ) + with open(TEMPLATE_TEST, 'r') as f: + test_template = f.read() + + test = test_template.format(model_function=model_function) + if verbose: # Print code string to stdout print(code_template) else: + if not PATH_OUTPUT.exists(): + PATH_OUTPUT.mkdir(exist_ok=True) + + if not PATH_OUTPUT_TEST.exists(): + PATH_OUTPUT_TEST.mkdir(exist_ok=True) + # Write model python code code_fn = PATH_OUTPUT / ('_' + model_function + '.py') with open(code_fn, 'w') as f: f.write('"""\nGenerated by template. Do not edit by hand.\n"""\n') f.write(code_template) - print('Created file: ' + code_fn) + print('Created file: ', code_fn.name) + + test_fn = PATH_OUTPUT_TEST / ('test_' + model_function + '.py') + with open(test_fn, 'w') as f: + f.write(test) + print('Created file: ', test_fn.name) def format_list(data: Iterable, @@ -229,14 +256,15 @@ def message_additional_args(additional_args: List) -> str: parser.add_argument( 'json_file', help='JSON file of the model to generate corresponding python code', - type=str) + type=str, nargs='*') args = parser.parse_args() if args.all: # `all` flag overrides `json_file` & `verbose` - all_json_files = glob.glob('[a-z]*.json') - for json_fn in all_json_files: + all_json_files = PATH_MODELS.glob('*.json') + for json_fn in sorted(all_json_files): main(json_fn, False) else: - main(args.json_file, args.verbose) + for fn in args.json_file: + main(fn, args.verbose) diff --git a/commons/generate-r-codes.py b/commons/generate-r-codes.py index a699a712..90e3dfc8 100644 --- a/commons/generate-r-codes.py +++ b/commons/generate-r-codes.py @@ -244,7 +244,7 @@ def main(json_fn, verbose): # Load json_file with open(p, 'r') as f: - info = model_info = json.load(f, object_pairs_hook=OrderedDict) + info = json.load(f, object_pairs_hook=OrderedDict) docs = generate_docstring(info) code = generate_code(info) @@ -255,8 +255,8 @@ def main(json_fn, verbose): print(output) else: # Model full name (Snake-case) - model_function = [info['task_name'] - ['code'], info['model_name']['code']] + model_function = [info['task_name']['code'], + info['model_name']['code']] if info['model_type']['code'] != '': model_function.append(info['model_type']['code']) model_function = '_'.join(model_function) @@ -294,4 +294,5 @@ def main(json_fn, verbose): for json_fn in all_json_files: main(json_fn, False) else: - main(args.json_file, args.verbose) + for fn in args.json_file: + main(fn, args.verbose) diff --git a/commons/templates/PY_TEST_TEMPLATE.txt b/commons/templates/PY_TEST_TEMPLATE.txt new file mode 100644 index 00000000..87de2417 --- /dev/null +++ b/commons/templates/PY_TEST_TEMPLATE.txt @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import {model_function} + + +def test_{model_function}(): + _ = {model_function}(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() From ece4f512fcce216dc65108405d94439fe418d78e Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 03:58:26 +0900 Subject: [PATCH 049/163] Add test functions --- Python/tests/test_bandit2arm_delta.py | 11 ++++++++++ .../tests/test_bandit4arm2_kalman_filter.py | 11 ++++++++++ Python/tests/test_bandit4arm_2par_lapse.py | 12 +++++++++++ Python/tests/test_bandit4arm_4par.py | 11 ++++++++++ Python/tests/test_bandit4arm_lapse.py | 11 ++++++++++ Python/tests/test_bandit4arm_lapse_decay.py | 11 ++++++++++ Python/tests/test_bandit4arm_singleA_lapse.py | 11 ++++++++++ Python/tests/test_bart_par4.py | 11 ++++++++++ Python/tests/test_choiceRT_ddm.py | 11 ++++++++++ Python/tests/test_choiceRT_ddm_single.py | 11 ++++++++++ Python/tests/test_cra_exp.py | 11 ++++++++++ Python/tests/test_cra_linear.py | 11 ++++++++++ Python/tests/test_dbdm_prob_weight.py | 11 ++++++++++ Python/tests/test_dd_cs.py | 11 ++++++++++ Python/tests/test_dd_cs_single.py | 11 ++++++++++ Python/tests/test_dd_exp.py | 11 ++++++++++ Python/tests/test_dd_hyperbolic.py | 11 ++++++++++ Python/tests/test_dd_hyperbolic_single.py | 11 ++++++++++ Python/tests/test_gng_m1.py | 21 +++---------------- Python/tests/test_gng_m2.py | 11 ++++++++++ Python/tests/test_gng_m3.py | 11 ++++++++++ Python/tests/test_gng_m4.py | 11 ++++++++++ Python/tests/test_igt_orl.py | 11 ++++++++++ Python/tests/test_igt_pvl_decay.py | 11 ++++++++++ Python/tests/test_igt_pvl_delta.py | 11 ++++++++++ Python/tests/test_igt_vpp.py | 11 ++++++++++ Python/tests/test_peer_ocu.py | 11 ++++++++++ Python/tests/test_prl_ewa.py | 11 ++++++++++ Python/tests/test_prl_fictitious.py | 11 ++++++++++ Python/tests/test_prl_fictitious_multipleB.py | 11 ++++++++++ Python/tests/test_prl_fictitious_rp.py | 11 ++++++++++ Python/tests/test_prl_fictitious_rp_woa.py | 11 ++++++++++ Python/tests/test_prl_fictitious_woa.py | 11 ++++++++++ Python/tests/test_prl_rp.py | 11 ++++++++++ Python/tests/test_prl_rp_multipleB.py | 11 ++++++++++ Python/tests/test_pst_gainloss_Q.py | 11 ++++++++++ Python/tests/test_ra_noLA.py | 11 ++++++++++ Python/tests/test_ra_noRA.py | 11 ++++++++++ Python/tests/test_ra_prospect.py | 11 ++++++++++ Python/tests/test_rdt_happiness.py | 11 ++++++++++ Python/tests/test_ts_par4.py | 11 ++++++++++ Python/tests/test_ts_par6.py | 11 ++++++++++ Python/tests/test_ts_par7.py | 11 ++++++++++ Python/tests/test_ug_bayes.py | 11 ++++++++++ Python/tests/test_ug_delta.py | 11 ++++++++++ Python/tests/test_wcs_sql.py | 11 ++++++++++ 46 files changed, 499 insertions(+), 18 deletions(-) create mode 100644 Python/tests/test_bandit2arm_delta.py create mode 100644 Python/tests/test_bandit4arm2_kalman_filter.py create mode 100644 Python/tests/test_bandit4arm_2par_lapse.py create mode 100644 Python/tests/test_bandit4arm_4par.py create mode 100644 Python/tests/test_bandit4arm_lapse.py create mode 100644 Python/tests/test_bandit4arm_lapse_decay.py create mode 100644 Python/tests/test_bandit4arm_singleA_lapse.py create mode 100644 Python/tests/test_bart_par4.py create mode 100644 Python/tests/test_choiceRT_ddm.py create mode 100644 Python/tests/test_choiceRT_ddm_single.py create mode 100644 Python/tests/test_cra_exp.py create mode 100644 Python/tests/test_cra_linear.py create mode 100644 Python/tests/test_dbdm_prob_weight.py create mode 100644 Python/tests/test_dd_cs.py create mode 100644 Python/tests/test_dd_cs_single.py create mode 100644 Python/tests/test_dd_exp.py create mode 100644 Python/tests/test_dd_hyperbolic.py create mode 100644 Python/tests/test_dd_hyperbolic_single.py create mode 100644 Python/tests/test_gng_m2.py create mode 100644 Python/tests/test_gng_m3.py create mode 100644 Python/tests/test_gng_m4.py create mode 100644 Python/tests/test_igt_orl.py create mode 100644 Python/tests/test_igt_pvl_decay.py create mode 100644 Python/tests/test_igt_pvl_delta.py create mode 100644 Python/tests/test_igt_vpp.py create mode 100644 Python/tests/test_peer_ocu.py create mode 100644 Python/tests/test_prl_ewa.py create mode 100644 Python/tests/test_prl_fictitious.py create mode 100644 Python/tests/test_prl_fictitious_multipleB.py create mode 100644 Python/tests/test_prl_fictitious_rp.py create mode 100644 Python/tests/test_prl_fictitious_rp_woa.py create mode 100644 Python/tests/test_prl_fictitious_woa.py create mode 100644 Python/tests/test_prl_rp.py create mode 100644 Python/tests/test_prl_rp_multipleB.py create mode 100644 Python/tests/test_pst_gainloss_Q.py create mode 100644 Python/tests/test_ra_noLA.py create mode 100644 Python/tests/test_ra_noRA.py create mode 100644 Python/tests/test_ra_prospect.py create mode 100644 Python/tests/test_rdt_happiness.py create mode 100644 Python/tests/test_ts_par4.py create mode 100644 Python/tests/test_ts_par6.py create mode 100644 Python/tests/test_ts_par7.py create mode 100644 Python/tests/test_ug_bayes.py create mode 100644 Python/tests/test_ug_delta.py create mode 100644 Python/tests/test_wcs_sql.py diff --git a/Python/tests/test_bandit2arm_delta.py b/Python/tests/test_bandit2arm_delta.py new file mode 100644 index 00000000..6762b96f --- /dev/null +++ b/Python/tests/test_bandit2arm_delta.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bandit2arm_delta + + +def test_bandit2arm_delta(): + _ = bandit2arm_delta(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bandit4arm2_kalman_filter.py b/Python/tests/test_bandit4arm2_kalman_filter.py new file mode 100644 index 00000000..5cab7c3b --- /dev/null +++ b/Python/tests/test_bandit4arm2_kalman_filter.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bandit4arm2_kalman_filter + + +def test_bandit4arm2_kalman_filter(): + _ = bandit4arm2_kalman_filter(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bandit4arm_2par_lapse.py b/Python/tests/test_bandit4arm_2par_lapse.py new file mode 100644 index 00000000..8169d99d --- /dev/null +++ b/Python/tests/test_bandit4arm_2par_lapse.py @@ -0,0 +1,12 @@ +import pytest + +from hbayesdm.models import bandit4arm_2par_lapse + + +def test_bandit4arm_2par_lapse(): + _ = bandit4arm_2par_lapse(example=True, niter=2, + nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bandit4arm_4par.py b/Python/tests/test_bandit4arm_4par.py new file mode 100644 index 00000000..30d4f50b --- /dev/null +++ b/Python/tests/test_bandit4arm_4par.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bandit4arm_4par + + +def test_bandit4arm_4par(): + _ = bandit4arm_4par(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bandit4arm_lapse.py b/Python/tests/test_bandit4arm_lapse.py new file mode 100644 index 00000000..8b19d8cc --- /dev/null +++ b/Python/tests/test_bandit4arm_lapse.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bandit4arm_lapse + + +def test_bandit4arm_lapse(): + _ = bandit4arm_lapse(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bandit4arm_lapse_decay.py b/Python/tests/test_bandit4arm_lapse_decay.py new file mode 100644 index 00000000..971a6fa9 --- /dev/null +++ b/Python/tests/test_bandit4arm_lapse_decay.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bandit4arm_lapse_decay + + +def test_bandit4arm_lapse_decay(): + _ = bandit4arm_lapse_decay(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bandit4arm_singleA_lapse.py b/Python/tests/test_bandit4arm_singleA_lapse.py new file mode 100644 index 00000000..d4a4cd05 --- /dev/null +++ b/Python/tests/test_bandit4arm_singleA_lapse.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bandit4arm_singleA_lapse + + +def test_bandit4arm_singleA_lapse(): + _ = bandit4arm_singleA_lapse(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_bart_par4.py b/Python/tests/test_bart_par4.py new file mode 100644 index 00000000..70270e7c --- /dev/null +++ b/Python/tests/test_bart_par4.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import bart_par4 + + +def test_bart_par4(): + _ = bart_par4(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_choiceRT_ddm.py b/Python/tests/test_choiceRT_ddm.py new file mode 100644 index 00000000..6dca70ac --- /dev/null +++ b/Python/tests/test_choiceRT_ddm.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import choiceRT_ddm + + +def test_choiceRT_ddm(): + _ = choiceRT_ddm(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_choiceRT_ddm_single.py b/Python/tests/test_choiceRT_ddm_single.py new file mode 100644 index 00000000..f0a24855 --- /dev/null +++ b/Python/tests/test_choiceRT_ddm_single.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import choiceRT_ddm_single + + +def test_choiceRT_ddm_single(): + _ = choiceRT_ddm_single(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_cra_exp.py b/Python/tests/test_cra_exp.py new file mode 100644 index 00000000..e9b66b50 --- /dev/null +++ b/Python/tests/test_cra_exp.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import cra_exp + + +def test_cra_exp(): + _ = cra_exp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_cra_linear.py b/Python/tests/test_cra_linear.py new file mode 100644 index 00000000..2054d802 --- /dev/null +++ b/Python/tests/test_cra_linear.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import cra_linear + + +def test_cra_linear(): + _ = cra_linear(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_dbdm_prob_weight.py b/Python/tests/test_dbdm_prob_weight.py new file mode 100644 index 00000000..f4071d11 --- /dev/null +++ b/Python/tests/test_dbdm_prob_weight.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import dbdm_prob_weight + + +def test_dbdm_prob_weight(): + _ = dbdm_prob_weight(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_dd_cs.py b/Python/tests/test_dd_cs.py new file mode 100644 index 00000000..7afd92f2 --- /dev/null +++ b/Python/tests/test_dd_cs.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import dd_cs + + +def test_dd_cs(): + _ = dd_cs(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_dd_cs_single.py b/Python/tests/test_dd_cs_single.py new file mode 100644 index 00000000..9fcc4efc --- /dev/null +++ b/Python/tests/test_dd_cs_single.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import dd_cs_single + + +def test_dd_cs_single(): + _ = dd_cs_single(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_dd_exp.py b/Python/tests/test_dd_exp.py new file mode 100644 index 00000000..c1de2d5b --- /dev/null +++ b/Python/tests/test_dd_exp.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import dd_exp + + +def test_dd_exp(): + _ = dd_exp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_dd_hyperbolic.py b/Python/tests/test_dd_hyperbolic.py new file mode 100644 index 00000000..68f29116 --- /dev/null +++ b/Python/tests/test_dd_hyperbolic.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import dd_hyperbolic + + +def test_dd_hyperbolic(): + _ = dd_hyperbolic(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_dd_hyperbolic_single.py b/Python/tests/test_dd_hyperbolic_single.py new file mode 100644 index 00000000..a295d943 --- /dev/null +++ b/Python/tests/test_dd_hyperbolic_single.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import dd_hyperbolic_single + + +def test_dd_hyperbolic_single(): + _ = dd_hyperbolic_single(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_gng_m1.py b/Python/tests/test_gng_m1.py index b463f8a7..2e38fd33 100644 --- a/Python/tests/test_gng_m1.py +++ b/Python/tests/test_gng_m1.py @@ -1,25 +1,10 @@ import pytest -import pystan -from hbayesdm.models import gng_m1, gng_m2, gng_m3, gng_m4 -from hbayesdm import rhat, print_fit +from hbayesdm.models import gng_m1 -def test_gng_models(): - print(pystan.__version__) - - fit = gng_m1(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) - - print(fit) - print(fit.all_ind_pars) - print(rhat(fit, less=1.1)) - - fit2 = gng_m2(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) - fit3 = gng_m3(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) - fit4 = gng_m4(example=True, niter=200, nwarmup=100, nchain=1, ncore=1) - - print_fit(fit, fit2, fit3, fit4) # ic='loo' - print_fit(fit, fit2, fit3, fit4, ic='waic') +def test_gng_m1(): + _ = gng_m1(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m2.py b/Python/tests/test_gng_m2.py new file mode 100644 index 00000000..b94c9a62 --- /dev/null +++ b/Python/tests/test_gng_m2.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import gng_m2 + + +def test_gng_m2(): + _ = gng_m2(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_gng_m3.py b/Python/tests/test_gng_m3.py new file mode 100644 index 00000000..8ccfe683 --- /dev/null +++ b/Python/tests/test_gng_m3.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import gng_m3 + + +def test_gng_m3(): + _ = gng_m3(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_gng_m4.py b/Python/tests/test_gng_m4.py new file mode 100644 index 00000000..0513e816 --- /dev/null +++ b/Python/tests/test_gng_m4.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import gng_m4 + + +def test_gng_m4(): + _ = gng_m4(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_igt_orl.py b/Python/tests/test_igt_orl.py new file mode 100644 index 00000000..1e7505a9 --- /dev/null +++ b/Python/tests/test_igt_orl.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import igt_orl + + +def test_igt_orl(): + _ = igt_orl(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_igt_pvl_decay.py b/Python/tests/test_igt_pvl_decay.py new file mode 100644 index 00000000..656f0558 --- /dev/null +++ b/Python/tests/test_igt_pvl_decay.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import igt_pvl_decay + + +def test_igt_pvl_decay(): + _ = igt_pvl_decay(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_igt_pvl_delta.py b/Python/tests/test_igt_pvl_delta.py new file mode 100644 index 00000000..804090d6 --- /dev/null +++ b/Python/tests/test_igt_pvl_delta.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import igt_pvl_delta + + +def test_igt_pvl_delta(): + _ = igt_pvl_delta(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_igt_vpp.py b/Python/tests/test_igt_vpp.py new file mode 100644 index 00000000..f8235302 --- /dev/null +++ b/Python/tests/test_igt_vpp.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import igt_vpp + + +def test_igt_vpp(): + _ = igt_vpp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_peer_ocu.py b/Python/tests/test_peer_ocu.py new file mode 100644 index 00000000..c046ad0f --- /dev/null +++ b/Python/tests/test_peer_ocu.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import peer_ocu + + +def test_peer_ocu(): + _ = peer_ocu(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_ewa.py b/Python/tests/test_prl_ewa.py new file mode 100644 index 00000000..88c2135b --- /dev/null +++ b/Python/tests/test_prl_ewa.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_ewa + + +def test_prl_ewa(): + _ = prl_ewa(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_fictitious.py b/Python/tests/test_prl_fictitious.py new file mode 100644 index 00000000..886cdca2 --- /dev/null +++ b/Python/tests/test_prl_fictitious.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_fictitious + + +def test_prl_fictitious(): + _ = prl_fictitious(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_fictitious_multipleB.py b/Python/tests/test_prl_fictitious_multipleB.py new file mode 100644 index 00000000..bd53f91d --- /dev/null +++ b/Python/tests/test_prl_fictitious_multipleB.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_fictitious_multipleB + + +def test_prl_fictitious_multipleB(): + _ = prl_fictitious_multipleB(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_fictitious_rp.py b/Python/tests/test_prl_fictitious_rp.py new file mode 100644 index 00000000..5c64afff --- /dev/null +++ b/Python/tests/test_prl_fictitious_rp.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_fictitious_rp + + +def test_prl_fictitious_rp(): + _ = prl_fictitious_rp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_fictitious_rp_woa.py b/Python/tests/test_prl_fictitious_rp_woa.py new file mode 100644 index 00000000..9091f8dd --- /dev/null +++ b/Python/tests/test_prl_fictitious_rp_woa.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_fictitious_rp_woa + + +def test_prl_fictitious_rp_woa(): + _ = prl_fictitious_rp_woa(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_fictitious_woa.py b/Python/tests/test_prl_fictitious_woa.py new file mode 100644 index 00000000..35825200 --- /dev/null +++ b/Python/tests/test_prl_fictitious_woa.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_fictitious_woa + + +def test_prl_fictitious_woa(): + _ = prl_fictitious_woa(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_rp.py b/Python/tests/test_prl_rp.py new file mode 100644 index 00000000..5c8ee1c4 --- /dev/null +++ b/Python/tests/test_prl_rp.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_rp + + +def test_prl_rp(): + _ = prl_rp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_prl_rp_multipleB.py b/Python/tests/test_prl_rp_multipleB.py new file mode 100644 index 00000000..9bfe6c05 --- /dev/null +++ b/Python/tests/test_prl_rp_multipleB.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import prl_rp_multipleB + + +def test_prl_rp_multipleB(): + _ = prl_rp_multipleB(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_pst_gainloss_Q.py b/Python/tests/test_pst_gainloss_Q.py new file mode 100644 index 00000000..f36ef346 --- /dev/null +++ b/Python/tests/test_pst_gainloss_Q.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import pst_gainloss_Q + + +def test_pst_gainloss_Q(): + _ = pst_gainloss_Q(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ra_noLA.py b/Python/tests/test_ra_noLA.py new file mode 100644 index 00000000..da5c0780 --- /dev/null +++ b/Python/tests/test_ra_noLA.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ra_noLA + + +def test_ra_noLA(): + _ = ra_noLA(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ra_noRA.py b/Python/tests/test_ra_noRA.py new file mode 100644 index 00000000..342bd88c --- /dev/null +++ b/Python/tests/test_ra_noRA.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ra_noRA + + +def test_ra_noRA(): + _ = ra_noRA(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ra_prospect.py b/Python/tests/test_ra_prospect.py new file mode 100644 index 00000000..39f5a6d3 --- /dev/null +++ b/Python/tests/test_ra_prospect.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ra_prospect + + +def test_ra_prospect(): + _ = ra_prospect(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_rdt_happiness.py b/Python/tests/test_rdt_happiness.py new file mode 100644 index 00000000..726c7f21 --- /dev/null +++ b/Python/tests/test_rdt_happiness.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import rdt_happiness + + +def test_rdt_happiness(): + _ = rdt_happiness(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ts_par4.py b/Python/tests/test_ts_par4.py new file mode 100644 index 00000000..e354ad46 --- /dev/null +++ b/Python/tests/test_ts_par4.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ts_par4 + + +def test_ts_par4(): + _ = ts_par4(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ts_par6.py b/Python/tests/test_ts_par6.py new file mode 100644 index 00000000..83e34fd2 --- /dev/null +++ b/Python/tests/test_ts_par6.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ts_par6 + + +def test_ts_par6(): + _ = ts_par6(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ts_par7.py b/Python/tests/test_ts_par7.py new file mode 100644 index 00000000..30bc3e3c --- /dev/null +++ b/Python/tests/test_ts_par7.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ts_par7 + + +def test_ts_par7(): + _ = ts_par7(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ug_bayes.py b/Python/tests/test_ug_bayes.py new file mode 100644 index 00000000..8def3d6a --- /dev/null +++ b/Python/tests/test_ug_bayes.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ug_bayes + + +def test_ug_bayes(): + _ = ug_bayes(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_ug_delta.py b/Python/tests/test_ug_delta.py new file mode 100644 index 00000000..a7824dd0 --- /dev/null +++ b/Python/tests/test_ug_delta.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import ug_delta + + +def test_ug_delta(): + _ = ug_delta(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/Python/tests/test_wcs_sql.py b/Python/tests/test_wcs_sql.py new file mode 100644 index 00000000..1141ba40 --- /dev/null +++ b/Python/tests/test_wcs_sql.py @@ -0,0 +1,11 @@ +import pytest + +from hbayesdm.models import wcs_sql + + +def test_wcs_sql(): + _ = wcs_sql(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() From 2001a5d0395638cd94956fc895bb77cbae11ead8 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 04:10:30 +0900 Subject: [PATCH 050/163] Fix R conf --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 5f8c9f9e..5a1c19e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,7 +41,7 @@ matrix: - name: 'Test R codes' env: TARGET='R' - name: 'Test R codes (BUILD_ALL)' - env: TARGET='R' && BUILD_ALL=1 + env: TARGET='R' && BUILD_ALL=true - name: 'Test Python codes (Python 3.5)' env: TARGET='Python' && PYTHON_VERSION=3.5 - name: 'Test Python codes (Python 3.6)' From c2f3f82e7cccc0a10bb7b5a7877667ede8101158 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 04:10:49 +0900 Subject: [PATCH 051/163] Fix settings for ReadTheDocs --- .readthedocs.yml | 23 +++++++++++++++++++++++ Python/requirements.txt | 1 - travis/setup.sh | 1 + 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 .readthedocs.yml diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..eaae4089 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,23 @@ +# .readthedocs.yml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: Python/docs/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +formats: + - htmlzip + - pdf + +# Optionally set the version of Python and requirements required to build your docs +python: + version: 3.7 + install: + - requirements: Python/requirements.txt + - method: pip + path: . diff --git a/Python/requirements.txt b/Python/requirements.txt index bfb2c0bb..e14c2660 100644 --- a/Python/requirements.txt +++ b/Python/requirements.txt @@ -1,4 +1,3 @@ --e . arviz flake8 matplotlib diff --git a/travis/setup.sh b/travis/setup.sh index 6f35c9eb..23d10fac 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -60,6 +60,7 @@ elif [ "$TARGET" = "Python" ]; then # Install dependencies pip install -r requirements.txt --upgrade + pip install . # Otherwise else From ea082afe8093a4f8bf28aabf172ab24852e4216f Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 04:14:15 +0900 Subject: [PATCH 052/163] Fix settings for ReadTheDocs --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index eaae4089..1fc9bdc8 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,4 +20,4 @@ python: install: - requirements: Python/requirements.txt - method: pip - path: . + path: ./Python From 8e9fa28ec3d7aba83a36bf8dea5ab9b46b5ebd54 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 11:34:58 +0900 Subject: [PATCH 053/163] Lengthen the timeout for Python tests to 59 mins --- travis/script.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/script.sh b/travis/script.sh index 62afb3ec..5b20ee41 100755 --- a/travis/script.sh +++ b/travis/script.sh @@ -7,7 +7,7 @@ if [ "$TARGET" = "R" ]; then # Scripts for Python elif [ "$TARGET" = "Python" ]; then - travis_wait 30 pytest tests + travis_wait 59 pytest tests # Check sync for models and data elif [ "$TARGET" = "Sync" ]; then From 193ada4041a90d8eb27464f40f0c337abbb52aee Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:06:32 +0900 Subject: [PATCH 054/163] Do not run R test with BUILD_ALL=true --- .travis.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5a1c19e7..b16d029c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ # Use cpp to enable both R & Python language: cpp -# NOTE: was `false` sudo: required os: linux @@ -40,8 +39,6 @@ matrix: env: TARGET='Sync' - name: 'Test R codes' env: TARGET='R' - - name: 'Test R codes (BUILD_ALL)' - env: TARGET='R' && BUILD_ALL=true - name: 'Test Python codes (Python 3.5)' env: TARGET='Python' && PYTHON_VERSION=3.5 - name: 'Test Python codes (Python 3.6)' From 6e0a9c008e690afaf99a1abe67e9e4d477b28ecb Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:06:43 +0900 Subject: [PATCH 055/163] Only test ra_prospect on Python tests --- travis/script.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/script.sh b/travis/script.sh index 5b20ee41..f77a10dd 100755 --- a/travis/script.sh +++ b/travis/script.sh @@ -7,7 +7,7 @@ if [ "$TARGET" = "R" ]; then # Scripts for Python elif [ "$TARGET" = "Python" ]; then - travis_wait 59 pytest tests + travis_wait 30 pytest tests/test_ra_prospect.py # Check sync for models and data elif [ "$TARGET" = "Sync" ]; then From f502853df1753685891109a8f670dbcff3cf4e39 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:15:16 +0900 Subject: [PATCH 056/163] Change destination folder names --- commons/.gitignore | 4 ++-- commons/generate-python-codes.py | 2 +- commons/generate-r-codes.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/commons/.gitignore b/commons/.gitignore index 4ac3f22f..55d7377a 100644 --- a/commons/.gitignore +++ b/commons/.gitignore @@ -1,3 +1,3 @@ -R/ -Python/ +R-codes/ +Python-codes/ Python-tests/ diff --git a/commons/generate-python-codes.py b/commons/generate-python-codes.py index af70fdd1..f7110d89 100644 --- a/commons/generate-python-codes.py +++ b/commons/generate-python-codes.py @@ -16,7 +16,7 @@ PATH_ROOT = Path(__file__).absolute().parent PATH_MODELS = PATH_ROOT / 'models' PATH_TEMPLATE = PATH_ROOT / 'templates' -PATH_OUTPUT = PATH_ROOT / 'Python' +PATH_OUTPUT = PATH_ROOT / 'Python-codes' PATH_OUTPUT_TEST = PATH_ROOT / 'Python-tests' TEMPLATE_DOCS = PATH_TEMPLATE / 'PY_DOCS_TEMPLATE.txt' diff --git a/commons/generate-r-codes.py b/commons/generate-r-codes.py index 90e3dfc8..759efaf9 100644 --- a/commons/generate-r-codes.py +++ b/commons/generate-r-codes.py @@ -13,7 +13,7 @@ PATH_ROOT = Path(__file__).absolute().parent PATH_MODELS = PATH_ROOT / 'models' PATH_TEMPLATE = PATH_ROOT / 'templates' -PATH_OUTPUT = PATH_ROOT / 'R' +PATH_OUTPUT = PATH_ROOT / 'R-codes' TEMPLATE_DOCS = PATH_TEMPLATE / 'R_DOCS_TEMPLATE.txt' TEMPLATE_CODE = PATH_TEMPLATE / 'R_CODE_TEMPLATE.txt' From 1e5456c11396502861412e8d4770c0e91b581689 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:17:06 +0900 Subject: [PATCH 057/163] Use nwarmup=5 and niter=10 for tests --- Python/tests/test_bandit2arm_delta.py | 3 ++- Python/tests/test_bandit4arm2_kalman_filter.py | 3 ++- Python/tests/test_bandit4arm_2par_lapse.py | 4 ++-- Python/tests/test_bandit4arm_4par.py | 3 ++- Python/tests/test_bandit4arm_lapse.py | 3 ++- Python/tests/test_bandit4arm_lapse_decay.py | 3 ++- Python/tests/test_bandit4arm_singleA_lapse.py | 3 ++- Python/tests/test_bart_par4.py | 3 ++- Python/tests/test_choiceRT_ddm.py | 3 ++- Python/tests/test_choiceRT_ddm_single.py | 3 ++- Python/tests/test_cra_exp.py | 3 ++- Python/tests/test_cra_linear.py | 3 ++- Python/tests/test_dbdm_prob_weight.py | 3 ++- Python/tests/test_dd_cs.py | 3 ++- Python/tests/test_dd_cs_single.py | 3 ++- Python/tests/test_dd_exp.py | 3 ++- Python/tests/test_dd_hyperbolic.py | 3 ++- Python/tests/test_dd_hyperbolic_single.py | 3 ++- Python/tests/test_gng_m1.py | 3 ++- Python/tests/test_gng_m2.py | 3 ++- Python/tests/test_gng_m3.py | 3 ++- Python/tests/test_gng_m4.py | 3 ++- Python/tests/test_igt_orl.py | 3 ++- Python/tests/test_igt_pvl_decay.py | 3 ++- Python/tests/test_igt_pvl_delta.py | 3 ++- Python/tests/test_igt_vpp.py | 3 ++- Python/tests/test_peer_ocu.py | 3 ++- Python/tests/test_prl_ewa.py | 3 ++- Python/tests/test_prl_fictitious.py | 3 ++- Python/tests/test_prl_fictitious_multipleB.py | 3 ++- Python/tests/test_prl_fictitious_rp.py | 3 ++- Python/tests/test_prl_fictitious_rp_woa.py | 3 ++- Python/tests/test_prl_fictitious_woa.py | 3 ++- Python/tests/test_prl_rp.py | 3 ++- Python/tests/test_prl_rp_multipleB.py | 3 ++- Python/tests/test_pst_gainloss_Q.py | 3 ++- Python/tests/test_ra_noLA.py | 3 ++- Python/tests/test_ra_noRA.py | 3 ++- Python/tests/test_ra_prospect.py | 3 ++- Python/tests/test_rdt_happiness.py | 3 ++- Python/tests/test_ts_par4.py | 3 ++- Python/tests/test_ts_par6.py | 3 ++- Python/tests/test_ts_par7.py | 3 ++- Python/tests/test_ug_bayes.py | 3 ++- Python/tests/test_ug_delta.py | 3 ++- Python/tests/test_wcs_sql.py | 3 ++- commons/templates/PY_TEST_TEMPLATE.txt | 3 ++- 47 files changed, 94 insertions(+), 48 deletions(-) diff --git a/Python/tests/test_bandit2arm_delta.py b/Python/tests/test_bandit2arm_delta.py index 6762b96f..4be4dafa 100644 --- a/Python/tests/test_bandit2arm_delta.py +++ b/Python/tests/test_bandit2arm_delta.py @@ -4,7 +4,8 @@ def test_bandit2arm_delta(): - _ = bandit2arm_delta(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bandit2arm_delta( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm2_kalman_filter.py b/Python/tests/test_bandit4arm2_kalman_filter.py index 5cab7c3b..f6ea30be 100644 --- a/Python/tests/test_bandit4arm2_kalman_filter.py +++ b/Python/tests/test_bandit4arm2_kalman_filter.py @@ -4,7 +4,8 @@ def test_bandit4arm2_kalman_filter(): - _ = bandit4arm2_kalman_filter(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bandit4arm2_kalman_filter( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_2par_lapse.py b/Python/tests/test_bandit4arm_2par_lapse.py index 8169d99d..8a1a235e 100644 --- a/Python/tests/test_bandit4arm_2par_lapse.py +++ b/Python/tests/test_bandit4arm_2par_lapse.py @@ -4,8 +4,8 @@ def test_bandit4arm_2par_lapse(): - _ = bandit4arm_2par_lapse(example=True, niter=2, - nwarmup=1, nchain=1, ncore=1) + _ = bandit4arm_2par_lapse( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_4par.py b/Python/tests/test_bandit4arm_4par.py index 30d4f50b..f8307fd8 100644 --- a/Python/tests/test_bandit4arm_4par.py +++ b/Python/tests/test_bandit4arm_4par.py @@ -4,7 +4,8 @@ def test_bandit4arm_4par(): - _ = bandit4arm_4par(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bandit4arm_4par( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_lapse.py b/Python/tests/test_bandit4arm_lapse.py index 8b19d8cc..56fde745 100644 --- a/Python/tests/test_bandit4arm_lapse.py +++ b/Python/tests/test_bandit4arm_lapse.py @@ -4,7 +4,8 @@ def test_bandit4arm_lapse(): - _ = bandit4arm_lapse(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bandit4arm_lapse( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_lapse_decay.py b/Python/tests/test_bandit4arm_lapse_decay.py index 971a6fa9..71b80d5d 100644 --- a/Python/tests/test_bandit4arm_lapse_decay.py +++ b/Python/tests/test_bandit4arm_lapse_decay.py @@ -4,7 +4,8 @@ def test_bandit4arm_lapse_decay(): - _ = bandit4arm_lapse_decay(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bandit4arm_lapse_decay( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_singleA_lapse.py b/Python/tests/test_bandit4arm_singleA_lapse.py index d4a4cd05..0245d532 100644 --- a/Python/tests/test_bandit4arm_singleA_lapse.py +++ b/Python/tests/test_bandit4arm_singleA_lapse.py @@ -4,7 +4,8 @@ def test_bandit4arm_singleA_lapse(): - _ = bandit4arm_singleA_lapse(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bandit4arm_singleA_lapse( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bart_par4.py b/Python/tests/test_bart_par4.py index 70270e7c..a9aa1ea9 100644 --- a/Python/tests/test_bart_par4.py +++ b/Python/tests/test_bart_par4.py @@ -4,7 +4,8 @@ def test_bart_par4(): - _ = bart_par4(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = bart_par4( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_choiceRT_ddm.py b/Python/tests/test_choiceRT_ddm.py index 6dca70ac..40aa6376 100644 --- a/Python/tests/test_choiceRT_ddm.py +++ b/Python/tests/test_choiceRT_ddm.py @@ -4,7 +4,8 @@ def test_choiceRT_ddm(): - _ = choiceRT_ddm(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = choiceRT_ddm( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_choiceRT_ddm_single.py b/Python/tests/test_choiceRT_ddm_single.py index f0a24855..6330f26b 100644 --- a/Python/tests/test_choiceRT_ddm_single.py +++ b/Python/tests/test_choiceRT_ddm_single.py @@ -4,7 +4,8 @@ def test_choiceRT_ddm_single(): - _ = choiceRT_ddm_single(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = choiceRT_ddm_single( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_cra_exp.py b/Python/tests/test_cra_exp.py index e9b66b50..79557cb5 100644 --- a/Python/tests/test_cra_exp.py +++ b/Python/tests/test_cra_exp.py @@ -4,7 +4,8 @@ def test_cra_exp(): - _ = cra_exp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = cra_exp( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_cra_linear.py b/Python/tests/test_cra_linear.py index 2054d802..191cb199 100644 --- a/Python/tests/test_cra_linear.py +++ b/Python/tests/test_cra_linear.py @@ -4,7 +4,8 @@ def test_cra_linear(): - _ = cra_linear(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = cra_linear( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dbdm_prob_weight.py b/Python/tests/test_dbdm_prob_weight.py index f4071d11..084e159a 100644 --- a/Python/tests/test_dbdm_prob_weight.py +++ b/Python/tests/test_dbdm_prob_weight.py @@ -4,7 +4,8 @@ def test_dbdm_prob_weight(): - _ = dbdm_prob_weight(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = dbdm_prob_weight( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_cs.py b/Python/tests/test_dd_cs.py index 7afd92f2..5d897973 100644 --- a/Python/tests/test_dd_cs.py +++ b/Python/tests/test_dd_cs.py @@ -4,7 +4,8 @@ def test_dd_cs(): - _ = dd_cs(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = dd_cs( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_cs_single.py b/Python/tests/test_dd_cs_single.py index 9fcc4efc..0882f866 100644 --- a/Python/tests/test_dd_cs_single.py +++ b/Python/tests/test_dd_cs_single.py @@ -4,7 +4,8 @@ def test_dd_cs_single(): - _ = dd_cs_single(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = dd_cs_single( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_exp.py b/Python/tests/test_dd_exp.py index c1de2d5b..4591b7ba 100644 --- a/Python/tests/test_dd_exp.py +++ b/Python/tests/test_dd_exp.py @@ -4,7 +4,8 @@ def test_dd_exp(): - _ = dd_exp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = dd_exp( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_hyperbolic.py b/Python/tests/test_dd_hyperbolic.py index 68f29116..129a1310 100644 --- a/Python/tests/test_dd_hyperbolic.py +++ b/Python/tests/test_dd_hyperbolic.py @@ -4,7 +4,8 @@ def test_dd_hyperbolic(): - _ = dd_hyperbolic(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = dd_hyperbolic( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_hyperbolic_single.py b/Python/tests/test_dd_hyperbolic_single.py index a295d943..a22eb2fa 100644 --- a/Python/tests/test_dd_hyperbolic_single.py +++ b/Python/tests/test_dd_hyperbolic_single.py @@ -4,7 +4,8 @@ def test_dd_hyperbolic_single(): - _ = dd_hyperbolic_single(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = dd_hyperbolic_single( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m1.py b/Python/tests/test_gng_m1.py index 2e38fd33..239ade8f 100644 --- a/Python/tests/test_gng_m1.py +++ b/Python/tests/test_gng_m1.py @@ -4,7 +4,8 @@ def test_gng_m1(): - _ = gng_m1(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = gng_m1( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m2.py b/Python/tests/test_gng_m2.py index b94c9a62..82beb979 100644 --- a/Python/tests/test_gng_m2.py +++ b/Python/tests/test_gng_m2.py @@ -4,7 +4,8 @@ def test_gng_m2(): - _ = gng_m2(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = gng_m2( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m3.py b/Python/tests/test_gng_m3.py index 8ccfe683..24947f57 100644 --- a/Python/tests/test_gng_m3.py +++ b/Python/tests/test_gng_m3.py @@ -4,7 +4,8 @@ def test_gng_m3(): - _ = gng_m3(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = gng_m3( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m4.py b/Python/tests/test_gng_m4.py index 0513e816..5157cd91 100644 --- a/Python/tests/test_gng_m4.py +++ b/Python/tests/test_gng_m4.py @@ -4,7 +4,8 @@ def test_gng_m4(): - _ = gng_m4(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = gng_m4( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_orl.py b/Python/tests/test_igt_orl.py index 1e7505a9..fdbec617 100644 --- a/Python/tests/test_igt_orl.py +++ b/Python/tests/test_igt_orl.py @@ -4,7 +4,8 @@ def test_igt_orl(): - _ = igt_orl(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = igt_orl( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_pvl_decay.py b/Python/tests/test_igt_pvl_decay.py index 656f0558..dbd575f7 100644 --- a/Python/tests/test_igt_pvl_decay.py +++ b/Python/tests/test_igt_pvl_decay.py @@ -4,7 +4,8 @@ def test_igt_pvl_decay(): - _ = igt_pvl_decay(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = igt_pvl_decay( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_pvl_delta.py b/Python/tests/test_igt_pvl_delta.py index 804090d6..25785e8b 100644 --- a/Python/tests/test_igt_pvl_delta.py +++ b/Python/tests/test_igt_pvl_delta.py @@ -4,7 +4,8 @@ def test_igt_pvl_delta(): - _ = igt_pvl_delta(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = igt_pvl_delta( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_vpp.py b/Python/tests/test_igt_vpp.py index f8235302..54e55b21 100644 --- a/Python/tests/test_igt_vpp.py +++ b/Python/tests/test_igt_vpp.py @@ -4,7 +4,8 @@ def test_igt_vpp(): - _ = igt_vpp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = igt_vpp( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_peer_ocu.py b/Python/tests/test_peer_ocu.py index c046ad0f..71fcede3 100644 --- a/Python/tests/test_peer_ocu.py +++ b/Python/tests/test_peer_ocu.py @@ -4,7 +4,8 @@ def test_peer_ocu(): - _ = peer_ocu(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = peer_ocu( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_ewa.py b/Python/tests/test_prl_ewa.py index 88c2135b..e75974dc 100644 --- a/Python/tests/test_prl_ewa.py +++ b/Python/tests/test_prl_ewa.py @@ -4,7 +4,8 @@ def test_prl_ewa(): - _ = prl_ewa(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_ewa( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious.py b/Python/tests/test_prl_fictitious.py index 886cdca2..46d456e5 100644 --- a/Python/tests/test_prl_fictitious.py +++ b/Python/tests/test_prl_fictitious.py @@ -4,7 +4,8 @@ def test_prl_fictitious(): - _ = prl_fictitious(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_fictitious( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_multipleB.py b/Python/tests/test_prl_fictitious_multipleB.py index bd53f91d..b5f8eee6 100644 --- a/Python/tests/test_prl_fictitious_multipleB.py +++ b/Python/tests/test_prl_fictitious_multipleB.py @@ -4,7 +4,8 @@ def test_prl_fictitious_multipleB(): - _ = prl_fictitious_multipleB(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_fictitious_multipleB( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_rp.py b/Python/tests/test_prl_fictitious_rp.py index 5c64afff..e4d61e9c 100644 --- a/Python/tests/test_prl_fictitious_rp.py +++ b/Python/tests/test_prl_fictitious_rp.py @@ -4,7 +4,8 @@ def test_prl_fictitious_rp(): - _ = prl_fictitious_rp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_fictitious_rp( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_rp_woa.py b/Python/tests/test_prl_fictitious_rp_woa.py index 9091f8dd..72192f62 100644 --- a/Python/tests/test_prl_fictitious_rp_woa.py +++ b/Python/tests/test_prl_fictitious_rp_woa.py @@ -4,7 +4,8 @@ def test_prl_fictitious_rp_woa(): - _ = prl_fictitious_rp_woa(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_fictitious_rp_woa( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_woa.py b/Python/tests/test_prl_fictitious_woa.py index 35825200..34c5605e 100644 --- a/Python/tests/test_prl_fictitious_woa.py +++ b/Python/tests/test_prl_fictitious_woa.py @@ -4,7 +4,8 @@ def test_prl_fictitious_woa(): - _ = prl_fictitious_woa(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_fictitious_woa( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_rp.py b/Python/tests/test_prl_rp.py index 5c8ee1c4..fdc80ca8 100644 --- a/Python/tests/test_prl_rp.py +++ b/Python/tests/test_prl_rp.py @@ -4,7 +4,8 @@ def test_prl_rp(): - _ = prl_rp(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_rp( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_rp_multipleB.py b/Python/tests/test_prl_rp_multipleB.py index 9bfe6c05..880c554d 100644 --- a/Python/tests/test_prl_rp_multipleB.py +++ b/Python/tests/test_prl_rp_multipleB.py @@ -4,7 +4,8 @@ def test_prl_rp_multipleB(): - _ = prl_rp_multipleB(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = prl_rp_multipleB( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_pst_gainloss_Q.py b/Python/tests/test_pst_gainloss_Q.py index f36ef346..6be734d4 100644 --- a/Python/tests/test_pst_gainloss_Q.py +++ b/Python/tests/test_pst_gainloss_Q.py @@ -4,7 +4,8 @@ def test_pst_gainloss_Q(): - _ = pst_gainloss_Q(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = pst_gainloss_Q( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ra_noLA.py b/Python/tests/test_ra_noLA.py index da5c0780..776a0ff7 100644 --- a/Python/tests/test_ra_noLA.py +++ b/Python/tests/test_ra_noLA.py @@ -4,7 +4,8 @@ def test_ra_noLA(): - _ = ra_noLA(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ra_noLA( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ra_noRA.py b/Python/tests/test_ra_noRA.py index 342bd88c..14e43a94 100644 --- a/Python/tests/test_ra_noRA.py +++ b/Python/tests/test_ra_noRA.py @@ -4,7 +4,8 @@ def test_ra_noRA(): - _ = ra_noRA(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ra_noRA( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ra_prospect.py b/Python/tests/test_ra_prospect.py index 39f5a6d3..779386f0 100644 --- a/Python/tests/test_ra_prospect.py +++ b/Python/tests/test_ra_prospect.py @@ -4,7 +4,8 @@ def test_ra_prospect(): - _ = ra_prospect(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ra_prospect( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_rdt_happiness.py b/Python/tests/test_rdt_happiness.py index 726c7f21..9a0a1d4c 100644 --- a/Python/tests/test_rdt_happiness.py +++ b/Python/tests/test_rdt_happiness.py @@ -4,7 +4,8 @@ def test_rdt_happiness(): - _ = rdt_happiness(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = rdt_happiness( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ts_par4.py b/Python/tests/test_ts_par4.py index e354ad46..f6e1b7c8 100644 --- a/Python/tests/test_ts_par4.py +++ b/Python/tests/test_ts_par4.py @@ -4,7 +4,8 @@ def test_ts_par4(): - _ = ts_par4(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ts_par4( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ts_par6.py b/Python/tests/test_ts_par6.py index 83e34fd2..e293d92b 100644 --- a/Python/tests/test_ts_par6.py +++ b/Python/tests/test_ts_par6.py @@ -4,7 +4,8 @@ def test_ts_par6(): - _ = ts_par6(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ts_par6( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ts_par7.py b/Python/tests/test_ts_par7.py index 30bc3e3c..69c0880c 100644 --- a/Python/tests/test_ts_par7.py +++ b/Python/tests/test_ts_par7.py @@ -4,7 +4,8 @@ def test_ts_par7(): - _ = ts_par7(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ts_par7( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ug_bayes.py b/Python/tests/test_ug_bayes.py index 8def3d6a..bbffb4d3 100644 --- a/Python/tests/test_ug_bayes.py +++ b/Python/tests/test_ug_bayes.py @@ -4,7 +4,8 @@ def test_ug_bayes(): - _ = ug_bayes(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ug_bayes( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ug_delta.py b/Python/tests/test_ug_delta.py index a7824dd0..fb2ed0c8 100644 --- a/Python/tests/test_ug_delta.py +++ b/Python/tests/test_ug_delta.py @@ -4,7 +4,8 @@ def test_ug_delta(): - _ = ug_delta(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = ug_delta( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_wcs_sql.py b/Python/tests/test_wcs_sql.py index 1141ba40..6753eba3 100644 --- a/Python/tests/test_wcs_sql.py +++ b/Python/tests/test_wcs_sql.py @@ -4,7 +4,8 @@ def test_wcs_sql(): - _ = wcs_sql(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = wcs_sql( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/commons/templates/PY_TEST_TEMPLATE.txt b/commons/templates/PY_TEST_TEMPLATE.txt index 87de2417..c1d89d9a 100644 --- a/commons/templates/PY_TEST_TEMPLATE.txt +++ b/commons/templates/PY_TEST_TEMPLATE.txt @@ -4,7 +4,8 @@ from hbayesdm.models import {model_function} def test_{model_function}(): - _ = {model_function}(example=True, niter=2, nwarmup=1, nchain=1, ncore=1) + _ = {model_function}( + example=True, niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': From 1985fb2a39d649ae409f12d8147b2e7afa202224 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:17:31 +0900 Subject: [PATCH 058/163] Remove unnecessary file --- NEWS.md | 120 -------------------------------------------------------- 1 file changed, 120 deletions(-) delete mode 100644 NEWS.md diff --git a/NEWS.md b/NEWS.md deleted file mode 100644 index 8b0ebf97..00000000 --- a/NEWS.md +++ /dev/null @@ -1,120 +0,0 @@ -# hBayesDM 0.7.2 - -* Add three new models for the bandit4arm task: `bandit4arm_2par_lapse`, - `bandit4arm_lapse_decay` and `bandit4arm_singleA_lapse`. -* Fix various (minor) errors. - -# hBayesDM 0.7.1 - -* Make it usable without manually loading `rstan`. -* Remove an annoying warning about using `..insensitive_data_columns`. - -# hBayesDM 0.7.0 - -* Now, in default, you should build a Stan file into a binary for the first time to use it. To build all the models on installation, you should set an environmental variable `BUILD_ALL` to `true` before installation. -* Now all the implemented models are refactored using `hBayesDM_model` function. You don't have to change anything to use them, but developers can easily implement new models now! -* We added a Kalman filter model for 4-armed bandit task (`bandit4arm2_kalman_filter`; Daw et al., 2006) and a probability weighting function for general description-based tasks (`dbdm_prob_weight`; Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008). -* Initial values of parameter estimation for some models are updated as plausible values, and the parameter boundaries of several models are fixed (see more on issue #63 and #64 in Github). -* Exponential and linear models for choice under risk and ambiguity task now have four model regressors: `sv`, `sv_fix`, `sv_var`, and `p_var`. -* Fix the Travix CI settings and related codes to be properly passed. - -# hBayesDM 0.6.3 - -* Update the dependencies on rstan (>= 2.18.1) -* No changes on model files, as same as the version 0.6.2 - -# hBayesDM 0.6.2 - -* Fix an error on choiceRT_ddm (#44) - -# hBayesDM 0.6.1 - -* Solve an issue with built binary files. -* Fix an error on peer_ocu with misplaced parentheses. - -# hBayesDM 0.6.0 - -* Add new tasks (Balloon Analogue Risk Task, Choice under Risk and Ambiguity Task, Probabilistic Selection Task, Risky Decision Task (a.k.a. Happiness task), Wisconsin Card Sorting Task) -* Add a new model for the Iowa Gambling Task (igt_orl) -* Change priors (Half-Cauchy(0, 5) --> Half-Cauchy(0, 1) or Half-Normal(0, 0.2) -* printFit function now provides LOOIC weights and/or WAIC weights - -# hBayesDM 0.5.1 - -* Add models for the Two Step task -* Add models without indecision point parameter (alpha) for the PRL task (prl_*_woa.stan) -* Model-based regressors for the PRL task are now available -* For the PRL task & prl_fictitious.stan & prl_fictitious_rp.stan --> change the range of alpha (indecision point) from [0, 1] to [-Inf, Inf] - -# hBayesDM 0.5.0 - -* Support variational Bayesian methods (vb=TRUE) -* Allow posterior predictive checks, except for drift-diffusion models (inc_postpred=TRUE) -* Add the peer influence task (Chung et al., 2015, USE WITH CAUTION for now and PLEASE GIVE US FEEDBACK!) -* Add 'prl_fictitious_rp' model -* Made changes to be compatible with the newest Stan version (e.g., // instead of # for commenting). -* In 'prl_*' models, 'rewlos' is replaced by 'outcome' so that column names and labels would be consistent across tasks as much as possible. -* Email feature is disabled as R mail package does not allow users to send anonymous emails anymore. -* When outputs are saved as a file (*.RData), the file name now contains the name of the data file. - -# hBayesDM 0.4.0 - -* Add a choice reaction time task and evidence accumulation models - - Drift diffusion model (both hierarchical and single-subject) - - Linear Ballistic Accumulator (LBA) model (both hierarchical and single-subject) -* Add PRL models that can fit multiple blocks -* Add single-subject versions for the delay discounting task (`dd_hyperbolic_single` and `dd_cs_single`). -* Standardize variable names across all models (e.g., `rewlos` --> `outcome` for all models) -* Separate versions for CRAN and GitHub. All models/features are identical but the GitHub version contains precompilled models. - -# hBayesDM 0.3.1 - -* Remove dependence on the modeest package. Now use a built-in function to estimate the mode of a posterior distribution. -* Rewrite the "printFit" function. - -# hBayesDM 0.3.0 - -* Made several changes following the guidelines for R packages providing interfaces to Stan. -* Stan models are precompiled and models will run immediately when called. -* The default number of chains is set to 4. -* The default value of `adapt_delta` is set to 0.95 to reduce the potential for divergences. -* The “printFit” function uses LOOIC by default. Users can select WAIC or both (LOOIC & WAIC) if needed. - -# hBayesDM 0.2.3.3 - -* Add help files -* Add a function for checking Rhat values (rhat). -* Change a link to its tutorial website - -# hBayesDM 0.2.3.2 - -* Use wide normal distributions for unbounded parameters (gng_* models). -* Automatic removal of rows (trials) containing NAs. - -# hBayesDM 0.2.3.1 - -* Add a function for plotting individual parameters (plotInd) - -# hBayesDM 0.2.3 - -* Add a new task: the Ultimatum Game -* Add new models for the Probabilistic Reversal Learning and Risk Aversion tasks -* ‘bandit2arm’ -> change its name to ‘bandit2arm_delta’. Now all model names are in the same format (i.e., TASK_MODEL). -* Users can extract model-based regressors from gng_m* models -* Include the option of customizing control parameters (adapt_delta, max_treedepth, stepsize) -* ‘plotHDI’ function -> add ‘fontSize’ argument & change the color of histogram - -# hBayesDM 0.2.1 - -## Bug fixes - -* All models: Fix errors when indPars=“mode” -* ra_prospect model: Add description for column names of a data (*.txt) file - -## Change - -* Change standard deviations of ‘b’ and ‘pi’ priors in gng_* models - -# hBayesDM 0.2.0 - -Initially released. From 104dadee8bf9b6dc5a642266811fad7c4f273029 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:33:13 +0900 Subject: [PATCH 059/163] Use random values if VB fails --- Python/hbayesdm/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 1bebf5d3..76cbdfcf 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -454,9 +454,10 @@ def _prepare_gen_init_vb(self, try: fit = sm.vb(data=data_dict) except Exception: - raise RuntimeError( + raise RuntimeWarning( 'Failed to get VB estimates for initial values. ' - 'Please re-run the code to try fitting model with VB.') + 'Use random values for initial values.') + return 'random' len_param = len(self.parameters) dict_vb = { From e3da6d0eacc869f51d6d493835eab9495e2f0e5b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 12:59:26 +0900 Subject: [PATCH 060/163] Fix extract_ic.R --- R/R/extract_ic.R | 48 ++++++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/R/R/extract_ic.R b/R/R/extract_ic.R index cf20d3b4..ddafa217 100644 --- a/R/R/extract_ic.R +++ b/R/R/extract_ic.R @@ -1,6 +1,6 @@ #' Extract Model Comparison Estimates #' -#' @param modelData Object returned by \code{'hBayesDM'} model function +#' @param model_data Object returned by \code{'hBayesDM'} model function #' @param ic Information Criterion. 'looic', 'waic', or 'both' #' @param ncore Number of corse to use when computing LOOIC #' @@ -19,30 +19,34 @@ #' extract_ic(output, ic = "waic") #' } #' -extract_ic <- function(modelData = NULL, - ic = "looic", +extract_ic <- function(model_data = NULL, + ic = "looic", ncore = 2) { - - # Access fit within modelData - stanFit <- modelData$fit - n_chains <- length(stanFit@stan_args) - + if (!(ic %in% c("looic", "waic", "both"))) + stop("Set 'ic' as 'looic', 'waic' or 'both' \n") + + # Access fit within model_data + stan_fit <- model_data$fit + n_chains <- length(stan_fit@stan_args) + # extract LOOIC and WAIC, from Stanfit IC <- list() - - lik <- loo::extract_log_lik(stanfit = stanFit, parameter_name = 'log_lik') - rel_eff <- loo::relative_eff(exp(lik), chain_id = rep(1:n_chains, nrow(lik)/n_chains), cores = getOption("mc.cores", ncore)) - - if (ic == "looic") { - IC$LOOIC <- loo::loo(lik, r_eff = rel_eff, cores = getOption("mc.cores", ncore)) - } else if (ic == "waic") { + + lik <- loo::extract_log_lik( + stanfit = stan_fit, + parameter_name = "log_lik") + + rel_eff <- loo::relative_eff( + exp(lik), + chain_id = rep(1:n_chains, each = nrow(lik) / n_chains), + cores = getOption("mc.cores", ncore)) + + if (ic %in% c("looic", "both")) + IC$LOOIC <- loo::loo(lik, r_eff = rel_eff, + cores = getOption("mc.cores", ncore)) + + if (ic %in% c("waic", "both")) IC$WAIC <- loo::waic(lik) - } else if (ic == "both") { - IC$LOOIC <- loo::loo(lik, r_eff = rel_eff, cores = getOption("mc.cores", ncore)) - IC$WAIC <- loo::waic(lik) - } else { - stop("Set 'ic' as 'looic', 'waic' or 'both' \n") - } - + return(IC) } From 66654978b7f66a5bf772fdb9717e341c6a48e00b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 19:32:54 +0900 Subject: [PATCH 061/163] Fix VB estimates suitable for single type models --- Python/hbayesdm/base.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 76cbdfcf..cd6a9ea7 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -460,20 +460,20 @@ def _prepare_gen_init_vb(self, return 'random' len_param = len(self.parameters) - dict_vb = { - k: v - for k, v in zip(fit['mean_par_names'], fit['mean_pars']) - if k.startswith('sigma[') or '_pr[' in k - } + dict_vb = dict(zip(fit['mean_par_names'], fit['mean_pars'])) dict_init = {} - dict_init['mu_pr'] = \ - [dict_vb['mu_pr[%d]' % (i + 1)] for i in range(len_param)] - dict_init['sigma'] = \ - [dict_vb['sigma[%d]' % (i + 1)] for i in range(len_param)] - for p in self.parameters: - dict_init['%s_pr' % p] = \ - [dict_vb['%s_pr[%d]' % (p, i + 1)] for i in range(n_subj)] + if self.model_type == 'single': + for p in self.parameters: + dict_init[p] = dict_vb[p] + else: + dict_init['mu_pr'] = \ + [dict_vb['mu_pr[%d]' % (i + 1)] for i in range(len_param)] + dict_init['sigma'] = \ + [dict_vb['sigma[%d]' % (i + 1)] for i in range(len_param)] + for p in self.parameters: + dict_init['%s_pr' % p] = \ + [dict_vb['%s_pr[%d]' % (p, i + 1)] for i in range(n_subj)] def gen_init(): return dict_init From 6e847d1b4c4a860bd3c0aec78c6134f4dd9acf73 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 19:33:09 +0900 Subject: [PATCH 062/163] Use np.max instead of built-in max function --- Python/hbayesdm/preprocess_funcs.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py index 72ed1305..ffe8e000 100644 --- a/Python/hbayesdm/preprocess_funcs.py +++ b/Python/hbayesdm/preprocess_funcs.py @@ -136,7 +136,7 @@ def bart_preprocess_func(self, raw_data, general_info, additional_args): 'N': n_subj, 'T': t_max, 'Tsubj': t_subjs, - 'P': max(pumps) + 1, + 'P': np.max(pumps) + 1, 'pumps': pumps, 'explosion': explosion, } @@ -163,8 +163,8 @@ def choiceRT_preprocess_func(self, raw_data, general_info, additional_args): Nl[s] = value_counts.at[1] # Reaction-times for upper/lower boundary responses - RTu = np.full((n_subj, max(Nu)), -1, dtype=float) - RTl = np.full((n_subj, max(Nl)), -1, dtype=float) + RTu = np.full((n_subj, np.max(Nu)), -1, dtype=float) + RTl = np.full((n_subj, np.max(Nl)), -1, dtype=float) # Write RTu, RTl subj_group = iter(general_info['grouped_data']) @@ -188,8 +188,8 @@ def choiceRT_preprocess_func(self, raw_data, general_info, additional_args): # Wrap into a dict for pystan data_dict = { 'N': n_subj, - 'Nu_max': max(Nu), - 'Nl_max': max(Nl), + 'Nu_max': np.max(Nu), + 'Nl_max': np.max(Nl), 'Nu': Nu, 'Nl': Nl, 'RTu': RTu, From ce84b007c4f06bba96af7d7102a0965e88cb426e Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 19:41:22 +0900 Subject: [PATCH 063/163] Follow R convention for dev versioning --- Python/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Python/setup.py b/Python/setup.py index 172abc81..eda026b0 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -61,7 +61,8 @@ def get_version_info(): GIT_REVISION = "Unknown" if not ISRELEASED: - FULLVERSION += '.dev0+' + GIT_REVISION[:7] + # Following the R versioning convention + FULLVERSION += '.9000' return FULLVERSION, GIT_REVISION From b868ab9e76345e233169ff86ca4efb111635387a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 22:06:21 +0900 Subject: [PATCH 064/163] Write codes to generate R test codes --- commons/.gitignore | 1 + commons/generate-r-codes.py | 39 ++++++++++++++++++++++----- commons/templates/R_TEST_TEMPLATE.txt | 11 ++++++++ 3 files changed, 44 insertions(+), 7 deletions(-) create mode 100644 commons/templates/R_TEST_TEMPLATE.txt diff --git a/commons/.gitignore b/commons/.gitignore index 55d7377a..de87f6d6 100644 --- a/commons/.gitignore +++ b/commons/.gitignore @@ -1,3 +1,4 @@ R-codes/ +R-tests/ Python-codes/ Python-tests/ diff --git a/commons/generate-r-codes.py b/commons/generate-r-codes.py index 759efaf9..3365bca1 100644 --- a/commons/generate-r-codes.py +++ b/commons/generate-r-codes.py @@ -14,9 +14,11 @@ PATH_MODELS = PATH_ROOT / 'models' PATH_TEMPLATE = PATH_ROOT / 'templates' PATH_OUTPUT = PATH_ROOT / 'R-codes' +PATH_OUTPUT_TEST = PATH_ROOT / 'R-tests' TEMPLATE_DOCS = PATH_TEMPLATE / 'R_DOCS_TEMPLATE.txt' TEMPLATE_CODE = PATH_TEMPLATE / 'R_CODE_TEMPLATE.txt' +TEMPLATE_TEST = PATH_TEMPLATE / 'R_TEST_TEMPLATE.txt' def parse_cite_string(cite): @@ -38,11 +40,8 @@ def parse_cite_string(cite): shortcite = '{}{}'.format(firstauthor, year) if len(authors) == 1: barecite = '{}, {}'.format(firstauthor, year) - textcite = '{} ({})'.format(firstauthor, year) else: barecite = '{} et al., {}'.format(firstauthor, year) - textcite = '{} et al. ({})'.format(firstauthor, year) - parencite = '({})'.format(barecite) return { 'authors': authors, @@ -65,7 +64,7 @@ def format_fullcite(cites, sep='\n#\' '): return sep.join([c['fullcite'] for c in cites if c]) -def generate_docstring(info): +def generate_docs(info): # Model full name (Snake-case) model_function = [info['task_name']['code'], info['model_name']['code']] if info['model_type']['code'] != '': @@ -235,19 +234,37 @@ def generate_code(info): return code +def generate_test(info): + # Model full name (Snake-case) + model_function = [info['task_name']['code'], info['model_name']['code']] + if info['model_type']['code'] != '': + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) + + # Read template for model tests + with open(TEMPLATE_TEST, 'r') as f: + test_template = f.read() + + test = test_template % dict(model_function=model_function) + + return test + + def main(json_fn, verbose): with Path(json_fn) as p: # Check if file exists if not p.exists(): - print('FileNotFound: Please specify existing json_file as argument.') + print('FileNotFound: ' + 'Please specify existing json_file as argument.') sys.exit(1) # Load json_file with open(p, 'r') as f: info = json.load(f, object_pairs_hook=OrderedDict) - docs = generate_docstring(info) + docs = generate_docs(info) code = generate_code(info) + test = generate_test(info) output = docs + code if verbose: @@ -263,13 +280,21 @@ def main(json_fn, verbose): if not PATH_OUTPUT.exists(): PATH_OUTPUT.mkdir(exist_ok=True) + if not PATH_OUTPUT_TEST.exists(): + PATH_OUTPUT_TEST.mkdir(exist_ok=True) - # Write model python code + # Write model codes code_fn = PATH_OUTPUT / (model_function + '.R') with open(code_fn, 'w') as f: f.write(output) print('Created file:', code_fn.name) + # Write test codes + test_fn = PATH_OUTPUT_TEST / ('test_' + model_function + '.R') + with open(test_fn, 'w') as f: + f.write(test) + print('Created file:', test_fn.name) + if __name__ == '__main__': parser = argparse.ArgumentParser() diff --git a/commons/templates/R_TEST_TEMPLATE.txt b/commons/templates/R_TEST_TEMPLATE.txt new file mode 100644 index 00000000..9dee0a58 --- /dev/null +++ b/commons/templates/R_TEST_TEMPLATE.txt @@ -0,0 +1,11 @@ +context("Test %(model_function)s") +library(hBayesDM) + +test_that("Test %(model_function)s", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(%(model_function)s( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) From 8aa92ab71e1cb9803b98b8d6fc18080e7228a67b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 22:22:56 +0900 Subject: [PATCH 065/163] Use testthat to test R codes --- R/DESCRIPTION | 2 ++ R/tests/testthat.R | 4 ++++ R/tests/testthat/test_bandit2arm_delta.R | 11 +++++++++++ R/tests/testthat/test_bandit4arm2_kalman_filter.R | 11 +++++++++++ R/tests/testthat/test_bandit4arm_2par_lapse.R | 11 +++++++++++ R/tests/testthat/test_bandit4arm_4par.R | 11 +++++++++++ R/tests/testthat/test_bandit4arm_lapse.R | 11 +++++++++++ R/tests/testthat/test_bandit4arm_lapse_decay.R | 11 +++++++++++ R/tests/testthat/test_bandit4arm_singleA_lapse.R | 11 +++++++++++ R/tests/testthat/test_bart_par4.R | 11 +++++++++++ R/tests/testthat/test_choiceRT_ddm.R | 11 +++++++++++ R/tests/testthat/test_choiceRT_ddm_single.R | 11 +++++++++++ R/tests/testthat/test_cra_exp.R | 11 +++++++++++ R/tests/testthat/test_cra_linear.R | 11 +++++++++++ R/tests/testthat/test_dbdm_prob_weight.R | 11 +++++++++++ R/tests/testthat/test_dd_cs.R | 11 +++++++++++ R/tests/testthat/test_dd_cs_single.R | 11 +++++++++++ R/tests/testthat/test_dd_exp.R | 11 +++++++++++ R/tests/testthat/test_dd_hyperbolic.R | 11 +++++++++++ R/tests/testthat/test_dd_hyperbolic_single.R | 11 +++++++++++ R/tests/testthat/test_gng_m1.R | 11 +++++++++++ R/tests/testthat/test_gng_m2.R | 11 +++++++++++ R/tests/testthat/test_gng_m3.R | 11 +++++++++++ R/tests/testthat/test_gng_m4.R | 11 +++++++++++ R/tests/testthat/test_igt_orl.R | 11 +++++++++++ R/tests/testthat/test_igt_pvl_decay.R | 11 +++++++++++ R/tests/testthat/test_igt_pvl_delta.R | 11 +++++++++++ R/tests/testthat/test_igt_vpp.R | 11 +++++++++++ R/tests/testthat/test_peer_ocu.R | 11 +++++++++++ R/tests/testthat/test_prl_ewa.R | 11 +++++++++++ R/tests/testthat/test_prl_fictitious.R | 11 +++++++++++ R/tests/testthat/test_prl_fictitious_multipleB.R | 11 +++++++++++ R/tests/testthat/test_prl_fictitious_rp.R | 11 +++++++++++ R/tests/testthat/test_prl_fictitious_rp_woa.R | 11 +++++++++++ R/tests/testthat/test_prl_fictitious_woa.R | 11 +++++++++++ R/tests/testthat/test_prl_rp.R | 11 +++++++++++ R/tests/testthat/test_prl_rp_multipleB.R | 11 +++++++++++ R/tests/testthat/test_pst_gainloss_Q.R | 11 +++++++++++ R/tests/testthat/test_ra_noLA.R | 11 +++++++++++ R/tests/testthat/test_ra_noRA.R | 11 +++++++++++ R/tests/testthat/test_ra_prospect.R | 11 +++++++++++ R/tests/testthat/test_rdt_happiness.R | 11 +++++++++++ R/tests/testthat/test_ts_par4.R | 11 +++++++++++ R/tests/testthat/test_ts_par6.R | 11 +++++++++++ R/tests/testthat/test_ts_par7.R | 11 +++++++++++ R/tests/testthat/test_ug_bayes.R | 11 +++++++++++ R/tests/testthat/test_ug_delta.R | 11 +++++++++++ R/tests/testthat/test_wcs_sql.R | 11 +++++++++++ 48 files changed, 512 insertions(+) create mode 100644 R/tests/testthat.R create mode 100644 R/tests/testthat/test_bandit2arm_delta.R create mode 100644 R/tests/testthat/test_bandit4arm2_kalman_filter.R create mode 100644 R/tests/testthat/test_bandit4arm_2par_lapse.R create mode 100644 R/tests/testthat/test_bandit4arm_4par.R create mode 100644 R/tests/testthat/test_bandit4arm_lapse.R create mode 100644 R/tests/testthat/test_bandit4arm_lapse_decay.R create mode 100644 R/tests/testthat/test_bandit4arm_singleA_lapse.R create mode 100644 R/tests/testthat/test_bart_par4.R create mode 100644 R/tests/testthat/test_choiceRT_ddm.R create mode 100644 R/tests/testthat/test_choiceRT_ddm_single.R create mode 100644 R/tests/testthat/test_cra_exp.R create mode 100644 R/tests/testthat/test_cra_linear.R create mode 100644 R/tests/testthat/test_dbdm_prob_weight.R create mode 100644 R/tests/testthat/test_dd_cs.R create mode 100644 R/tests/testthat/test_dd_cs_single.R create mode 100644 R/tests/testthat/test_dd_exp.R create mode 100644 R/tests/testthat/test_dd_hyperbolic.R create mode 100644 R/tests/testthat/test_dd_hyperbolic_single.R create mode 100644 R/tests/testthat/test_gng_m1.R create mode 100644 R/tests/testthat/test_gng_m2.R create mode 100644 R/tests/testthat/test_gng_m3.R create mode 100644 R/tests/testthat/test_gng_m4.R create mode 100644 R/tests/testthat/test_igt_orl.R create mode 100644 R/tests/testthat/test_igt_pvl_decay.R create mode 100644 R/tests/testthat/test_igt_pvl_delta.R create mode 100644 R/tests/testthat/test_igt_vpp.R create mode 100644 R/tests/testthat/test_peer_ocu.R create mode 100644 R/tests/testthat/test_prl_ewa.R create mode 100644 R/tests/testthat/test_prl_fictitious.R create mode 100644 R/tests/testthat/test_prl_fictitious_multipleB.R create mode 100644 R/tests/testthat/test_prl_fictitious_rp.R create mode 100644 R/tests/testthat/test_prl_fictitious_rp_woa.R create mode 100644 R/tests/testthat/test_prl_fictitious_woa.R create mode 100644 R/tests/testthat/test_prl_rp.R create mode 100644 R/tests/testthat/test_prl_rp_multipleB.R create mode 100644 R/tests/testthat/test_pst_gainloss_Q.R create mode 100644 R/tests/testthat/test_ra_noLA.R create mode 100644 R/tests/testthat/test_ra_noRA.R create mode 100644 R/tests/testthat/test_ra_prospect.R create mode 100644 R/tests/testthat/test_rdt_happiness.R create mode 100644 R/tests/testthat/test_ts_par4.R create mode 100644 R/tests/testthat/test_ts_par6.R create mode 100644 R/tests/testthat/test_ts_par7.R create mode 100644 R/tests/testthat/test_ug_bayes.R create mode 100644 R/tests/testthat/test_ug_delta.R create mode 100644 R/tests/testthat/test_wcs_sql.R diff --git a/R/DESCRIPTION b/R/DESCRIPTION index 4a5607e1..a54f1dfa 100644 --- a/R/DESCRIPTION +++ b/R/DESCRIPTION @@ -112,3 +112,5 @@ Collate: 'ug_delta.R' 'wcs_sql.R' 'zzz.R' +Suggests: + testthat diff --git a/R/tests/testthat.R b/R/tests/testthat.R new file mode 100644 index 00000000..5ee556ea --- /dev/null +++ b/R/tests/testthat.R @@ -0,0 +1,4 @@ +library(testthat) +library(hBayesDM) + +test_check("hBayesDM") diff --git a/R/tests/testthat/test_bandit2arm_delta.R b/R/tests/testthat/test_bandit2arm_delta.R new file mode 100644 index 00000000..88568325 --- /dev/null +++ b/R/tests/testthat/test_bandit2arm_delta.R @@ -0,0 +1,11 @@ +context("Test bandit2arm_delta") +library(hBayesDM) + +test_that("Test bandit2arm_delta", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit2arm_delta( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bandit4arm2_kalman_filter.R b/R/tests/testthat/test_bandit4arm2_kalman_filter.R new file mode 100644 index 00000000..1bb865c6 --- /dev/null +++ b/R/tests/testthat/test_bandit4arm2_kalman_filter.R @@ -0,0 +1,11 @@ +context("Test bandit4arm2_kalman_filter") +library(hBayesDM) + +test_that("Test bandit4arm2_kalman_filter", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit4arm2_kalman_filter( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bandit4arm_2par_lapse.R b/R/tests/testthat/test_bandit4arm_2par_lapse.R new file mode 100644 index 00000000..119cf6c7 --- /dev/null +++ b/R/tests/testthat/test_bandit4arm_2par_lapse.R @@ -0,0 +1,11 @@ +context("Test bandit4arm_2par_lapse") +library(hBayesDM) + +test_that("Test bandit4arm_2par_lapse", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit4arm_2par_lapse( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bandit4arm_4par.R b/R/tests/testthat/test_bandit4arm_4par.R new file mode 100644 index 00000000..725876ba --- /dev/null +++ b/R/tests/testthat/test_bandit4arm_4par.R @@ -0,0 +1,11 @@ +context("Test bandit4arm_4par") +library(hBayesDM) + +test_that("Test bandit4arm_4par", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit4arm_4par( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bandit4arm_lapse.R b/R/tests/testthat/test_bandit4arm_lapse.R new file mode 100644 index 00000000..a5baa890 --- /dev/null +++ b/R/tests/testthat/test_bandit4arm_lapse.R @@ -0,0 +1,11 @@ +context("Test bandit4arm_lapse") +library(hBayesDM) + +test_that("Test bandit4arm_lapse", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit4arm_lapse( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bandit4arm_lapse_decay.R b/R/tests/testthat/test_bandit4arm_lapse_decay.R new file mode 100644 index 00000000..ae2b2a0b --- /dev/null +++ b/R/tests/testthat/test_bandit4arm_lapse_decay.R @@ -0,0 +1,11 @@ +context("Test bandit4arm_lapse_decay") +library(hBayesDM) + +test_that("Test bandit4arm_lapse_decay", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit4arm_lapse_decay( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bandit4arm_singleA_lapse.R b/R/tests/testthat/test_bandit4arm_singleA_lapse.R new file mode 100644 index 00000000..30be92fa --- /dev/null +++ b/R/tests/testthat/test_bandit4arm_singleA_lapse.R @@ -0,0 +1,11 @@ +context("Test bandit4arm_singleA_lapse") +library(hBayesDM) + +test_that("Test bandit4arm_singleA_lapse", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bandit4arm_singleA_lapse( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_bart_par4.R b/R/tests/testthat/test_bart_par4.R new file mode 100644 index 00000000..87fd06bb --- /dev/null +++ b/R/tests/testthat/test_bart_par4.R @@ -0,0 +1,11 @@ +context("Test bart_par4") +library(hBayesDM) + +test_that("Test bart_par4", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(bart_par4( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_choiceRT_ddm.R b/R/tests/testthat/test_choiceRT_ddm.R new file mode 100644 index 00000000..1e6d85e2 --- /dev/null +++ b/R/tests/testthat/test_choiceRT_ddm.R @@ -0,0 +1,11 @@ +context("Test choiceRT_ddm") +library(hBayesDM) + +test_that("Test choiceRT_ddm", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(choiceRT_ddm( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_choiceRT_ddm_single.R b/R/tests/testthat/test_choiceRT_ddm_single.R new file mode 100644 index 00000000..8c1f90d4 --- /dev/null +++ b/R/tests/testthat/test_choiceRT_ddm_single.R @@ -0,0 +1,11 @@ +context("Test choiceRT_ddm_single") +library(hBayesDM) + +test_that("Test choiceRT_ddm_single", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(choiceRT_ddm_single( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_cra_exp.R b/R/tests/testthat/test_cra_exp.R new file mode 100644 index 00000000..ded8cb29 --- /dev/null +++ b/R/tests/testthat/test_cra_exp.R @@ -0,0 +1,11 @@ +context("Test cra_exp") +library(hBayesDM) + +test_that("Test cra_exp", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(cra_exp( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_cra_linear.R b/R/tests/testthat/test_cra_linear.R new file mode 100644 index 00000000..40ffee53 --- /dev/null +++ b/R/tests/testthat/test_cra_linear.R @@ -0,0 +1,11 @@ +context("Test cra_linear") +library(hBayesDM) + +test_that("Test cra_linear", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(cra_linear( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_dbdm_prob_weight.R b/R/tests/testthat/test_dbdm_prob_weight.R new file mode 100644 index 00000000..0091edcc --- /dev/null +++ b/R/tests/testthat/test_dbdm_prob_weight.R @@ -0,0 +1,11 @@ +context("Test dbdm_prob_weight") +library(hBayesDM) + +test_that("Test dbdm_prob_weight", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(dbdm_prob_weight( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_dd_cs.R b/R/tests/testthat/test_dd_cs.R new file mode 100644 index 00000000..7b9a552f --- /dev/null +++ b/R/tests/testthat/test_dd_cs.R @@ -0,0 +1,11 @@ +context("Test dd_cs") +library(hBayesDM) + +test_that("Test dd_cs", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(dd_cs( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_dd_cs_single.R b/R/tests/testthat/test_dd_cs_single.R new file mode 100644 index 00000000..5b69b365 --- /dev/null +++ b/R/tests/testthat/test_dd_cs_single.R @@ -0,0 +1,11 @@ +context("Test dd_cs_single") +library(hBayesDM) + +test_that("Test dd_cs_single", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(dd_cs_single( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_dd_exp.R b/R/tests/testthat/test_dd_exp.R new file mode 100644 index 00000000..86e81f1a --- /dev/null +++ b/R/tests/testthat/test_dd_exp.R @@ -0,0 +1,11 @@ +context("Test dd_exp") +library(hBayesDM) + +test_that("Test dd_exp", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(dd_exp( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_dd_hyperbolic.R b/R/tests/testthat/test_dd_hyperbolic.R new file mode 100644 index 00000000..71246980 --- /dev/null +++ b/R/tests/testthat/test_dd_hyperbolic.R @@ -0,0 +1,11 @@ +context("Test dd_hyperbolic") +library(hBayesDM) + +test_that("Test dd_hyperbolic", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(dd_hyperbolic( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_dd_hyperbolic_single.R b/R/tests/testthat/test_dd_hyperbolic_single.R new file mode 100644 index 00000000..de624a5c --- /dev/null +++ b/R/tests/testthat/test_dd_hyperbolic_single.R @@ -0,0 +1,11 @@ +context("Test dd_hyperbolic_single") +library(hBayesDM) + +test_that("Test dd_hyperbolic_single", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(dd_hyperbolic_single( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_gng_m1.R b/R/tests/testthat/test_gng_m1.R new file mode 100644 index 00000000..292b5093 --- /dev/null +++ b/R/tests/testthat/test_gng_m1.R @@ -0,0 +1,11 @@ +context("Test gng_m1") +library(hBayesDM) + +test_that("Test gng_m1", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(gng_m1( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_gng_m2.R b/R/tests/testthat/test_gng_m2.R new file mode 100644 index 00000000..a4e82763 --- /dev/null +++ b/R/tests/testthat/test_gng_m2.R @@ -0,0 +1,11 @@ +context("Test gng_m2") +library(hBayesDM) + +test_that("Test gng_m2", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(gng_m2( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_gng_m3.R b/R/tests/testthat/test_gng_m3.R new file mode 100644 index 00000000..7388aa78 --- /dev/null +++ b/R/tests/testthat/test_gng_m3.R @@ -0,0 +1,11 @@ +context("Test gng_m3") +library(hBayesDM) + +test_that("Test gng_m3", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(gng_m3( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_gng_m4.R b/R/tests/testthat/test_gng_m4.R new file mode 100644 index 00000000..b32bdafb --- /dev/null +++ b/R/tests/testthat/test_gng_m4.R @@ -0,0 +1,11 @@ +context("Test gng_m4") +library(hBayesDM) + +test_that("Test gng_m4", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(gng_m4( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_igt_orl.R b/R/tests/testthat/test_igt_orl.R new file mode 100644 index 00000000..f7cfbe7f --- /dev/null +++ b/R/tests/testthat/test_igt_orl.R @@ -0,0 +1,11 @@ +context("Test igt_orl") +library(hBayesDM) + +test_that("Test igt_orl", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(igt_orl( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_igt_pvl_decay.R b/R/tests/testthat/test_igt_pvl_decay.R new file mode 100644 index 00000000..e42dba06 --- /dev/null +++ b/R/tests/testthat/test_igt_pvl_decay.R @@ -0,0 +1,11 @@ +context("Test igt_pvl_decay") +library(hBayesDM) + +test_that("Test igt_pvl_decay", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(igt_pvl_decay( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_igt_pvl_delta.R b/R/tests/testthat/test_igt_pvl_delta.R new file mode 100644 index 00000000..7b7c48c9 --- /dev/null +++ b/R/tests/testthat/test_igt_pvl_delta.R @@ -0,0 +1,11 @@ +context("Test igt_pvl_delta") +library(hBayesDM) + +test_that("Test igt_pvl_delta", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(igt_pvl_delta( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_igt_vpp.R b/R/tests/testthat/test_igt_vpp.R new file mode 100644 index 00000000..384d1e7d --- /dev/null +++ b/R/tests/testthat/test_igt_vpp.R @@ -0,0 +1,11 @@ +context("Test igt_vpp") +library(hBayesDM) + +test_that("Test igt_vpp", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(igt_vpp( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_peer_ocu.R b/R/tests/testthat/test_peer_ocu.R new file mode 100644 index 00000000..09842e8b --- /dev/null +++ b/R/tests/testthat/test_peer_ocu.R @@ -0,0 +1,11 @@ +context("Test peer_ocu") +library(hBayesDM) + +test_that("Test peer_ocu", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(peer_ocu( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_ewa.R b/R/tests/testthat/test_prl_ewa.R new file mode 100644 index 00000000..bdf4d261 --- /dev/null +++ b/R/tests/testthat/test_prl_ewa.R @@ -0,0 +1,11 @@ +context("Test prl_ewa") +library(hBayesDM) + +test_that("Test prl_ewa", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_ewa( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_fictitious.R b/R/tests/testthat/test_prl_fictitious.R new file mode 100644 index 00000000..21ef0a0d --- /dev/null +++ b/R/tests/testthat/test_prl_fictitious.R @@ -0,0 +1,11 @@ +context("Test prl_fictitious") +library(hBayesDM) + +test_that("Test prl_fictitious", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_fictitious( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_fictitious_multipleB.R b/R/tests/testthat/test_prl_fictitious_multipleB.R new file mode 100644 index 00000000..71eb8641 --- /dev/null +++ b/R/tests/testthat/test_prl_fictitious_multipleB.R @@ -0,0 +1,11 @@ +context("Test prl_fictitious_multipleB") +library(hBayesDM) + +test_that("Test prl_fictitious_multipleB", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_fictitious_multipleB( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_fictitious_rp.R b/R/tests/testthat/test_prl_fictitious_rp.R new file mode 100644 index 00000000..57600992 --- /dev/null +++ b/R/tests/testthat/test_prl_fictitious_rp.R @@ -0,0 +1,11 @@ +context("Test prl_fictitious_rp") +library(hBayesDM) + +test_that("Test prl_fictitious_rp", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_fictitious_rp( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_fictitious_rp_woa.R b/R/tests/testthat/test_prl_fictitious_rp_woa.R new file mode 100644 index 00000000..d0794e3a --- /dev/null +++ b/R/tests/testthat/test_prl_fictitious_rp_woa.R @@ -0,0 +1,11 @@ +context("Test prl_fictitious_rp_woa") +library(hBayesDM) + +test_that("Test prl_fictitious_rp_woa", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_fictitious_rp_woa( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_fictitious_woa.R b/R/tests/testthat/test_prl_fictitious_woa.R new file mode 100644 index 00000000..46e0c65f --- /dev/null +++ b/R/tests/testthat/test_prl_fictitious_woa.R @@ -0,0 +1,11 @@ +context("Test prl_fictitious_woa") +library(hBayesDM) + +test_that("Test prl_fictitious_woa", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_fictitious_woa( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_rp.R b/R/tests/testthat/test_prl_rp.R new file mode 100644 index 00000000..0a20c1ec --- /dev/null +++ b/R/tests/testthat/test_prl_rp.R @@ -0,0 +1,11 @@ +context("Test prl_rp") +library(hBayesDM) + +test_that("Test prl_rp", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_rp( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_prl_rp_multipleB.R b/R/tests/testthat/test_prl_rp_multipleB.R new file mode 100644 index 00000000..9e007d14 --- /dev/null +++ b/R/tests/testthat/test_prl_rp_multipleB.R @@ -0,0 +1,11 @@ +context("Test prl_rp_multipleB") +library(hBayesDM) + +test_that("Test prl_rp_multipleB", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(prl_rp_multipleB( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_pst_gainloss_Q.R b/R/tests/testthat/test_pst_gainloss_Q.R new file mode 100644 index 00000000..a26616ee --- /dev/null +++ b/R/tests/testthat/test_pst_gainloss_Q.R @@ -0,0 +1,11 @@ +context("Test pst_gainloss_Q") +library(hBayesDM) + +test_that("Test pst_gainloss_Q", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(pst_gainloss_Q( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ra_noLA.R b/R/tests/testthat/test_ra_noLA.R new file mode 100644 index 00000000..5dd55dc7 --- /dev/null +++ b/R/tests/testthat/test_ra_noLA.R @@ -0,0 +1,11 @@ +context("Test ra_noLA") +library(hBayesDM) + +test_that("Test ra_noLA", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ra_noLA( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ra_noRA.R b/R/tests/testthat/test_ra_noRA.R new file mode 100644 index 00000000..8a27453a --- /dev/null +++ b/R/tests/testthat/test_ra_noRA.R @@ -0,0 +1,11 @@ +context("Test ra_noRA") +library(hBayesDM) + +test_that("Test ra_noRA", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ra_noRA( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ra_prospect.R b/R/tests/testthat/test_ra_prospect.R new file mode 100644 index 00000000..ba57f2b9 --- /dev/null +++ b/R/tests/testthat/test_ra_prospect.R @@ -0,0 +1,11 @@ +context("Test ra_prospect") +library(hBayesDM) + +test_that("Test ra_prospect", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ra_prospect( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_rdt_happiness.R b/R/tests/testthat/test_rdt_happiness.R new file mode 100644 index 00000000..751bbf02 --- /dev/null +++ b/R/tests/testthat/test_rdt_happiness.R @@ -0,0 +1,11 @@ +context("Test rdt_happiness") +library(hBayesDM) + +test_that("Test rdt_happiness", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(rdt_happiness( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ts_par4.R b/R/tests/testthat/test_ts_par4.R new file mode 100644 index 00000000..b2ae4d7c --- /dev/null +++ b/R/tests/testthat/test_ts_par4.R @@ -0,0 +1,11 @@ +context("Test ts_par4") +library(hBayesDM) + +test_that("Test ts_par4", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ts_par4( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ts_par6.R b/R/tests/testthat/test_ts_par6.R new file mode 100644 index 00000000..7c220fb8 --- /dev/null +++ b/R/tests/testthat/test_ts_par6.R @@ -0,0 +1,11 @@ +context("Test ts_par6") +library(hBayesDM) + +test_that("Test ts_par6", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ts_par6( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ts_par7.R b/R/tests/testthat/test_ts_par7.R new file mode 100644 index 00000000..60b41b12 --- /dev/null +++ b/R/tests/testthat/test_ts_par7.R @@ -0,0 +1,11 @@ +context("Test ts_par7") +library(hBayesDM) + +test_that("Test ts_par7", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ts_par7( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ug_bayes.R b/R/tests/testthat/test_ug_bayes.R new file mode 100644 index 00000000..4834d264 --- /dev/null +++ b/R/tests/testthat/test_ug_bayes.R @@ -0,0 +1,11 @@ +context("Test ug_bayes") +library(hBayesDM) + +test_that("Test ug_bayes", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ug_bayes( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_ug_delta.R b/R/tests/testthat/test_ug_delta.R new file mode 100644 index 00000000..eabbc43a --- /dev/null +++ b/R/tests/testthat/test_ug_delta.R @@ -0,0 +1,11 @@ +context("Test ug_delta") +library(hBayesDM) + +test_that("Test ug_delta", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(ug_delta( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) diff --git a/R/tests/testthat/test_wcs_sql.R b/R/tests/testthat/test_wcs_sql.R new file mode 100644 index 00000000..8bb7c3c8 --- /dev/null +++ b/R/tests/testthat/test_wcs_sql.R @@ -0,0 +1,11 @@ +context("Test wcs_sql") +library(hBayesDM) + +test_that("Test wcs_sql", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(wcs_sql( + use_example = TRUE, + niter=10, nwarmup=5, nchain=1, ncore=1)) +}) From f17e237082daba7948b3ff91056572fe4efe56b5 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 22:23:00 +0900 Subject: [PATCH 066/163] Use random values if VB fails --- R/R/hBayesDM_model.R | 67 ++++++++++++++++++++++++++++++-------------- 1 file changed, 46 insertions(+), 21 deletions(-) diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 8f9d733e..7ea47691 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -361,35 +361,60 @@ hBayesDM_model <- function(task_name, } # Initial values for the parameters + gen_init <- NULL if (inits[1] == "vb") { - cat("\n") - cat("****************************************\n") - cat("** Use VB estimates as initial values **\n") - cat("****************************************\n") - - fit_vb <- rstan::vb(object = stanmodel_arg, data = data_list) - m_vb <- colMeans(as.data.frame(fit_vb)) - - gen_init <- function() { - ret <- list( - mu_pr = as.vector(m_vb[startsWith(names(m_vb), 'mu_pr')]), - sigma = as.vector(m_vb[startsWith(names(m_vb), 'sigma')]) - ) + if (vb) { + cat("\n") + cat("*****************************************\n") + cat("** Use random values as initial values **\n") + cat("*****************************************\n") + gen_init <- "random" - for (p in names(parameters)) { - ret[[p]] <- as.vector(m_vb[startsWith(names(m_vb), paste0(p, '_pr'))]) - } + } else { + cat("\n") + cat("****************************************\n") + cat("** Use VB estimates as initial values **\n") + cat("****************************************\n") + + tryCatch({ + fit_vb <- rstan::vb(object = stanmodel_arg, data = data_list) + m_vb <- colMeans(as.data.frame(fit_vb)) + + gen_init <<- function() { + ret <- list( + mu_pr = as.vector(m_vb[startsWith(names(m_vb), "mu_pr")]), + sigma = as.vector(m_vb[startsWith(names(m_vb), "sigma")]) + ) + + for (p in names(parameters)) { + ret[[p]] <- as.vector(m_vb[startsWith(names(m_vb), paste0(p, "_pr"))]) + } - return(ret) + return(ret) + } + }, error = function(e) { + cat("\n") + cat("******************************************\n") + cat("** Failed to obtain VB estimates. **\n") + cat("** Use random values as initial values. **\n") + cat("******************************************\n") + gen_init <<- "random" + }) } } else if (inits[1] == "random") { + cat("\n") + cat("*****************************************\n") + cat("** Use random values as initial values **\n") + cat("*****************************************\n") gen_init <- "random" } else { if (inits[1] == "fixed") { - inits <- unlist(lapply(parameters, "[", 2)) # plausible values of each parameter + # plausible values of each parameter + inits <- unlist(lapply(parameters, "[", 2)) } else if (length(inits) != length(parameters)) { - stop("** Length of 'inits' must be ", length(parameters), - " (= the number of parameters of this model). Please check again. **\n") + stop("** Length of 'inits' must be ", length(parameters), " ", + "(= the number of parameters of this model). ", + "Please check again. **\n") } if (model_type == "single") { gen_init <- function() { @@ -424,7 +449,7 @@ hBayesDM_model <- function(task_name, ############### Fit & extract ############### # Fit the Stan model - if (vb) { # if variational Bayesian + if (vb) { fit <- rstan::vb(object = stanmodel_arg, data = data_list, pars = pars, From d0a4683bb4848405898e1f33fa5a690d5382c005 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 23:24:34 +0900 Subject: [PATCH 067/163] Automate code generating with one shell script --- commons/generate-codes.sh | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100755 commons/generate-codes.sh diff --git a/commons/generate-codes.sh b/commons/generate-codes.sh new file mode 100755 index 00000000..e5c67891 --- /dev/null +++ b/commons/generate-codes.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +python3 generate-r-codes.py -a +cp R-codes/*.R ../R/R/ +cp R-tests/*.R ../R/tests/testthat/ + +python3 generate-python-codes.py -a +cp Python-codes/_*.py ../Python/hbayesdm/models/ +cp Python-tests/*.py ../Python/tests/ From 9dc15ef74a8339d077f33b0ed0d924af48fcd055 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 23:24:46 +0900 Subject: [PATCH 068/163] Update documentations --- Python/hbayesdm/models/_bart_par4.py | 8 ++++---- Python/hbayesdm/models/_pst_gainloss_Q.py | 15 +-------------- Python/hbayesdm/models/_ra_noLA.py | 2 +- Python/hbayesdm/models/_ra_noRA.py | 2 +- Python/hbayesdm/models/_ra_prospect.py | 2 +- 5 files changed, 8 insertions(+), 21 deletions(-) diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py index 4917d06c..920d988e 100644 --- a/Python/hbayesdm/models/_bart_par4.py +++ b/Python/hbayesdm/models/_bart_par4.py @@ -67,16 +67,16 @@ def bart_par4( stepsize: float = 1, max_treedepth: int = 10, **additional_args: Any) -> TaskModel: - """Balloon Analogue Risk Task - Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters + """Balloon Analogue Risk Task - Re-parameterized version of BART model with 4 parameters - Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task [van_Ravenzwaaij2011]_ - using Re-parameterized version (by Harhim Park & Jaeyeong Yang) of BART Model (Ravenzwaaij et al., 2011) with 4 parameters with the following parameters: + Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task + using Re-parameterized version of BART model with 4 parameters [van_Ravenzwaaij2011]_ with the following parameters: "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature). - .. [van_Ravenzwaaij2011] van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. + .. [van_Ravenzwaaij2011] van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105. .. codeauthor:: Harhim Park .. codeauthor:: Jaeyeong Yang diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py index 78581bb1..c2ebfc4c 100644 --- a/Python/hbayesdm/models/_pst_gainloss_Q.py +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -90,20 +90,7 @@ def pst_gainloss_Q( correctly and contain the information below: - "subjID": A unique identifier for each subject in the data-set. - - "type": Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as below: - - ===== ======== ================== - Code Stimulus Probability to win - ===== ======== ================== - 1 A 80% - 2 B 20% - 3 C 70% - 4 D 30% - 5 E 60% - 6 F 40% - ===== ======== ================== - - The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6. + - "type": Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\% (type 1), 20\% (type 2), 70\% (type 3), 30\% (type 4), 60\% (type 5), 40\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6. - "choice": Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0). - "reward": Amount of reward earned as a result of the trial. diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py index 3cdc79d8..6e990c82 100644 --- a/Python/hbayesdm/models/_ra_noLA.py +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -89,7 +89,7 @@ def ra_noLA( correctly and contain the information below: - "subjID": A unique identifier for each subject in the data-set. - - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9).} + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9). - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). - "cert": Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero. - "gamble": If gamble was taken, gamble == 1; else gamble == 0. diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py index 5e90e29a..34cac275 100644 --- a/Python/hbayesdm/models/_ra_noRA.py +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -89,7 +89,7 @@ def ra_noRA( correctly and contain the information below: - "subjID": A unique identifier for each subject in the data-set. - - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9).} + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9). - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). - "cert": Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero. - "gamble": If gamble was taken, gamble == 1; else gamble == 0. diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py index d4877260..4bcdc100 100644 --- a/Python/hbayesdm/models/_ra_prospect.py +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -91,7 +91,7 @@ def ra_prospect( correctly and contain the information below: - "subjID": A unique identifier for each subject in the data-set. - - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9).} + - "gain": Possible (50\%) gain outcome of a risky option (e.g. 9). - "loss": Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). - "cert": Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero. - "gamble": If gamble was taken, gamble == 1; else gamble == 0. From 79f7dcb434d599fde7c4f0e0327b5e0e2754e039 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 21 Aug 2019 23:24:58 +0900 Subject: [PATCH 069/163] Update Rdocs --- R/.gitignore | 2 ++ R/man-roxygen/model-documentation.R | 4 ++-- R/man/bandit2arm_delta.Rd | 13 +------------ R/man/bandit4arm2_kalman_filter.Rd | 13 +------------ R/man/bandit4arm_2par_lapse.Rd | 13 +------------ R/man/bandit4arm_4par.Rd | 13 +------------ R/man/bandit4arm_lapse.Rd | 13 +------------ R/man/bandit4arm_lapse_decay.Rd | 13 +------------ R/man/bandit4arm_singleA_lapse.Rd | 13 +------------ R/man/bart_par4.Rd | 13 +------------ R/man/cra_exp.Rd | 13 +------------ R/man/cra_linear.Rd | 13 +------------ R/man/dbdm_prob_weight.Rd | 13 +------------ R/man/dd_cs.Rd | 13 +------------ R/man/dd_cs_single.Rd | 13 +------------ R/man/dd_exp.Rd | 13 +------------ R/man/dd_hyperbolic.Rd | 13 +------------ R/man/dd_hyperbolic_single.Rd | 13 +------------ R/man/gng_m1.Rd | 13 +------------ R/man/gng_m2.Rd | 13 +------------ R/man/gng_m3.Rd | 13 +------------ R/man/gng_m4.Rd | 13 +------------ R/man/peer_ocu.Rd | 13 +------------ R/man/prl_ewa.Rd | 13 +------------ R/man/prl_fictitious.Rd | 13 +------------ R/man/prl_fictitious_multipleB.Rd | 13 +------------ R/man/prl_fictitious_rp.Rd | 13 +------------ R/man/prl_fictitious_rp_woa.Rd | 13 +------------ R/man/prl_fictitious_woa.Rd | 13 +------------ R/man/prl_rp.Rd | 13 +------------ R/man/prl_rp_multipleB.Rd | 13 +------------ R/man/pst_gainloss_Q.Rd | 13 +------------ R/man/ra_noLA.Rd | 13 +------------ R/man/ra_noRA.Rd | 13 +------------ R/man/ra_prospect.Rd | 13 +------------ R/man/rdt_happiness.Rd | 13 +------------ R/man/ug_bayes.Rd | 13 +------------ R/man/ug_delta.Rd | 13 +------------ R/man/wcs_sql.Rd | 13 +------------ 39 files changed, 41 insertions(+), 446 deletions(-) create mode 100644 R/.gitignore diff --git a/R/.gitignore b/R/.gitignore new file mode 100644 index 00000000..01dcbd20 --- /dev/null +++ b/R/.gitignore @@ -0,0 +1,2 @@ +hBayesDM*.tar.gz +hBayesDM.Rcheck/ diff --git a/R/man-roxygen/model-documentation.R b/R/man-roxygen/model-documentation.R index 90993a18..8073fbd3 100644 --- a/R/man-roxygen/model-documentation.R +++ b/R/man-roxygen/model-documentation.R @@ -52,7 +52,7 @@ #' <% AA_T <- paste0(AA_T1, AA_T2) %> #' <% AA_F <- "For this model, there is no model-specific argument." %> #' <%= ifelse(as.integer(LENGTH_ADDITIONAL_ARGS) > 0, AA_T, AA_F) %> -#' \describe{ +#' <%= ifelse(as.integer(LENGTH_ADDITIONAL_ARGS) > 0, "\\describe{", "") %> #' <%= get0("ADDITIONAL_ARGS_1") %> #' <%= get0("ADDITIONAL_ARGS_2") %> #' <%= get0("ADDITIONAL_ARGS_3") %> @@ -62,7 +62,7 @@ #' <%= get0("ADDITIONAL_ARGS_7") %> #' <%= get0("ADDITIONAL_ARGS_8") %> #' <%= get0("ADDITIONAL_ARGS_9") %> -#' } +#' <%= ifelse(as.integer(LENGTH_ADDITIONAL_ARGS) > 0, "}", "") %> #' #' @return A class "hBayesDM" object \code{modelData} with the following components: #' \describe{ diff --git a/R/man/bandit2arm_delta.Rd b/R/man/bandit2arm_delta.Rd index 79f40bd8..54cd1cf6 100644 --- a/R/man/bandit2arm_delta.Rd +++ b/R/man/bandit2arm_delta.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bandit4arm2_kalman_filter.Rd b/R/man/bandit4arm2_kalman_filter.Rd index a18e15ae..2d57e414 100644 --- a/R/man/bandit4arm2_kalman_filter.Rd +++ b/R/man/bandit4arm2_kalman_filter.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bandit4arm_2par_lapse.Rd b/R/man/bandit4arm_2par_lapse.Rd index 48ee7239..9d414aa0 100644 --- a/R/man/bandit4arm_2par_lapse.Rd +++ b/R/man/bandit4arm_2par_lapse.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bandit4arm_4par.Rd b/R/man/bandit4arm_4par.Rd index 559f21e7..1575f987 100644 --- a/R/man/bandit4arm_4par.Rd +++ b/R/man/bandit4arm_4par.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bandit4arm_lapse.Rd b/R/man/bandit4arm_lapse.Rd index 91cf16bf..7cbbcea2 100644 --- a/R/man/bandit4arm_lapse.Rd +++ b/R/man/bandit4arm_lapse.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bandit4arm_lapse_decay.Rd b/R/man/bandit4arm_lapse_decay.Rd index 7b916d58..47980565 100644 --- a/R/man/bandit4arm_lapse_decay.Rd +++ b/R/man/bandit4arm_lapse_decay.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bandit4arm_singleA_lapse.Rd b/R/man/bandit4arm_singleA_lapse.Rd index 7441b8b5..8aeac789 100644 --- a/R/man/bandit4arm_singleA_lapse.Rd +++ b/R/man/bandit4arm_singleA_lapse.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/bart_par4.Rd b/R/man/bart_par4.Rd index e4250a67..086f3548 100644 --- a/R/man/bart_par4.Rd +++ b/R/man/bart_par4.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/cra_exp.Rd b/R/man/cra_exp.Rd index a6a0e6da..f4fe81c2 100644 --- a/R/man/cra_exp.Rd +++ b/R/man/cra_exp.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/cra_linear.Rd b/R/man/cra_linear.Rd index ba904d41..9f8dbc7c 100644 --- a/R/man/cra_linear.Rd +++ b/R/man/cra_linear.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/dbdm_prob_weight.Rd b/R/man/dbdm_prob_weight.Rd index 7707496c..9a151a92 100644 --- a/R/man/dbdm_prob_weight.Rd +++ b/R/man/dbdm_prob_weight.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/dd_cs.Rd b/R/man/dd_cs.Rd index f77c2f04..2048694c 100644 --- a/R/man/dd_cs.Rd +++ b/R/man/dd_cs.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/dd_cs_single.Rd b/R/man/dd_cs_single.Rd index caf7755d..4726f175 100644 --- a/R/man/dd_cs_single.Rd +++ b/R/man/dd_cs_single.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/dd_exp.Rd b/R/man/dd_exp.Rd index 4faf252d..740d963a 100644 --- a/R/man/dd_exp.Rd +++ b/R/man/dd_exp.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/dd_hyperbolic.Rd b/R/man/dd_hyperbolic.Rd index 497b02c5..617187c4 100644 --- a/R/man/dd_hyperbolic.Rd +++ b/R/man/dd_hyperbolic.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/dd_hyperbolic_single.Rd b/R/man/dd_hyperbolic_single.Rd index cf5bb7ff..aef3c683 100644 --- a/R/man/dd_hyperbolic_single.Rd +++ b/R/man/dd_hyperbolic_single.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/gng_m1.Rd b/R/man/gng_m1.Rd index 820b090d..3ce5edfa 100644 --- a/R/man/gng_m1.Rd +++ b/R/man/gng_m1.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/gng_m2.Rd b/R/man/gng_m2.Rd index 67e8e7d7..6e61bcbe 100644 --- a/R/man/gng_m2.Rd +++ b/R/man/gng_m2.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/gng_m3.Rd b/R/man/gng_m3.Rd index 024ee75b..0d8b59ca 100644 --- a/R/man/gng_m3.Rd +++ b/R/man/gng_m3.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/gng_m4.Rd b/R/man/gng_m4.Rd index eb0821fc..3d35d4ea 100644 --- a/R/man/gng_m4.Rd +++ b/R/man/gng_m4.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/peer_ocu.Rd b/R/man/peer_ocu.Rd index 0c3d2c5f..2ff4a582 100644 --- a/R/man/peer_ocu.Rd +++ b/R/man/peer_ocu.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_ewa.Rd b/R/man/prl_ewa.Rd index a0d525db..be7d8222 100644 --- a/R/man/prl_ewa.Rd +++ b/R/man/prl_ewa.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_fictitious.Rd b/R/man/prl_fictitious.Rd index 43943daa..5b62e2a8 100644 --- a/R/man/prl_fictitious.Rd +++ b/R/man/prl_fictitious.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_fictitious_multipleB.Rd b/R/man/prl_fictitious_multipleB.Rd index 02110281..0d70365b 100644 --- a/R/man/prl_fictitious_multipleB.Rd +++ b/R/man/prl_fictitious_multipleB.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_fictitious_rp.Rd b/R/man/prl_fictitious_rp.Rd index 808c7df4..6942fbe4 100644 --- a/R/man/prl_fictitious_rp.Rd +++ b/R/man/prl_fictitious_rp.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_fictitious_rp_woa.Rd b/R/man/prl_fictitious_rp_woa.Rd index 949eab87..9a22fa46 100644 --- a/R/man/prl_fictitious_rp_woa.Rd +++ b/R/man/prl_fictitious_rp_woa.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_fictitious_woa.Rd b/R/man/prl_fictitious_woa.Rd index dbdb5dc3..3689e3cd 100644 --- a/R/man/prl_fictitious_woa.Rd +++ b/R/man/prl_fictitious_woa.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_rp.Rd b/R/man/prl_rp.Rd index e0cc6ac4..cd568712 100644 --- a/R/man/prl_rp.Rd +++ b/R/man/prl_rp.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/prl_rp_multipleB.Rd b/R/man/prl_rp_multipleB.Rd index cb884a10..3cedc4f9 100644 --- a/R/man/prl_rp_multipleB.Rd +++ b/R/man/prl_rp_multipleB.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/pst_gainloss_Q.Rd b/R/man/pst_gainloss_Q.Rd index 0872d260..d7d51e19 100644 --- a/R/man/pst_gainloss_Q.Rd +++ b/R/man/pst_gainloss_Q.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/ra_noLA.Rd b/R/man/ra_noLA.Rd index 79fae7b0..829f701d 100644 --- a/R/man/ra_noLA.Rd +++ b/R/man/ra_noLA.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/ra_noRA.Rd b/R/man/ra_noRA.Rd index 53d5bbf3..0c56c482 100644 --- a/R/man/ra_noRA.Rd +++ b/R/man/ra_noRA.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/ra_prospect.Rd b/R/man/ra_prospect.Rd index 2930cb0c..9e62c63d 100644 --- a/R/man/ra_prospect.Rd +++ b/R/man/ra_prospect.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/rdt_happiness.Rd b/R/man/rdt_happiness.Rd index fd486c02..f26afe6f 100644 --- a/R/man/rdt_happiness.Rd +++ b/R/man/rdt_happiness.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/ug_bayes.Rd b/R/man/ug_bayes.Rd index 8d01105a..72fd2768 100644 --- a/R/man/ug_bayes.Rd +++ b/R/man/ug_bayes.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/ug_delta.Rd b/R/man/ug_delta.Rd index 830cf93c..5e31111b 100644 --- a/R/man/ug_delta.Rd +++ b/R/man/ug_delta.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: diff --git a/R/man/wcs_sql.Rd b/R/man/wcs_sql.Rd index d9834975..0175135f 100644 --- a/R/man/wcs_sql.Rd +++ b/R/man/wcs_sql.Rd @@ -57,18 +57,7 @@ on each new iteration. See \bold{Details} below.} \item{choose_data}{Whether to choose data with an interactive window. By default, set to \code{FALSE}.} -\item{...}{For this model, there is no model-specific argument. -\describe{ - - - - - - - - - -}} +\item{...}{For this model, there is no model-specific argument.} } \value{ A class "hBayesDM" object \code{modelData} with the following components: From 3f166f3cbf96e261bf77e18c09b2599fc0bafcc5 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 00:02:46 +0900 Subject: [PATCH 070/163] Install testthat --- travis/setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/setup.sh b/travis/setup.sh index 23d10fac..222a2d5a 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -26,7 +26,7 @@ if [ "$TARGET" = "R" ]; then # Install R packages Rscript \ - -e 'install.packages(c("devtools", "roxygen2", "covr"), quiet = T, repos = "https://cran.rstudio.com")' \ + -e 'install.packages(c("devtools", "roxygen2", "testthat", "covr"), quiet = T, repos = "https://cran.rstudio.com")' \ -e 'devtools::install_deps(dep = T, quiet = T)' # Setup codes for Python From 46a0582021d82d4e85422aae07a6f6d1da497626 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 01:25:19 +0900 Subject: [PATCH 071/163] Set as CRAN --- travis/setup.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/travis/setup.sh b/travis/setup.sh index 222a2d5a..9ba855c5 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -5,7 +5,6 @@ if [ "$TARGET" = "R" ]; then export R_LIBS_USER=~/R/Library export R_LIBS_SITE=/usr/local/lib/R/site-library:/usr/lib/R/site-library export _R_CHECK_CRAN_INCOMING_=false - export NOT_CRAN=true export R_PROFILE=~/.Rprofile.site # Add CRAN as an APT source From 516e8959a6d9c65abf54f1a86f8b97b853809643 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 02:49:11 +0900 Subject: [PATCH 072/163] Update documentations --- Python/docs/requirements.txt | 13 ++++++++++++ Python/docs/source/conf.py | 32 ++++++++++++++++++++++-------- Python/docs/source/diagnostics.rst | 4 ++++ Python/docs/source/index.rst | 8 ++------ Python/docs/source/models.rst | 4 ++++ 5 files changed, 47 insertions(+), 14 deletions(-) create mode 100644 Python/docs/requirements.txt create mode 100644 Python/docs/source/diagnostics.rst create mode 100644 Python/docs/source/models.rst diff --git a/Python/docs/requirements.txt b/Python/docs/requirements.txt new file mode 100644 index 00000000..75e8bda8 --- /dev/null +++ b/Python/docs/requirements.txt @@ -0,0 +1,13 @@ +./Python/. +arviz +flake8 +matplotlib +numpy +pandas +pylint +pystan +pytest +scipy +sphinx +sphinx-autodoc-typehints +sphinx-rtd-theme diff --git a/Python/docs/source/conf.py b/Python/docs/source/conf.py index deef2b62..154dd6ec 100644 --- a/Python/docs/source/conf.py +++ b/Python/docs/source/conf.py @@ -21,8 +21,8 @@ # -- Project information ----------------------------------------------------- project = 'hBayesDM' -copyright = '2019, hBayesDM Developers' -author = 'hBayesDM Developers' +copyright = '2019, hBayesDM developers' +author = 'hBayesDM developers' # The short X.Y version version = hbayesdm.version.version @@ -40,9 +40,8 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ + 'sphinx.ext.napoleon', 'sphinx.ext.autodoc', - 'sphinx.ext.doctest', - 'sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.githubpages', ] @@ -181,7 +180,24 @@ # -- Extension configuration ------------------------------------------------- -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True +# Autodoc settings +autoclass_content = 'both' +autodoc_member_order = 'bysource' +autodoc_default_options = { + 'members': True, + 'undoc-members': True, + 'show-inheritance': False +} +autodoc_typehints = 'none' + +# Napoleon settings +napoleon_numpy_docstring = True +napoleon_include_init_with_doc = False +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = False +napoleon_use_rtype = False diff --git a/Python/docs/source/diagnostics.rst b/Python/docs/source/diagnostics.rst new file mode 100644 index 00000000..81843b06 --- /dev/null +++ b/Python/docs/source/diagnostics.rst @@ -0,0 +1,4 @@ +Diagnostics (:mod:`hbayesdm.diagnostics`) +========================================= + +.. automodule:: hbayesdm.diagnostics diff --git a/Python/docs/source/index.rst b/Python/docs/source/index.rst index 7330aea7..5077ba19 100644 --- a/Python/docs/source/index.rst +++ b/Python/docs/source/index.rst @@ -1,15 +1,11 @@ -.. hBayesDM documentation master file, created by - sphinx-quickstart on Thu Feb 14 10:06:38 2019. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - .. include:: ../../README.rst .. toctree:: :maxdepth: 2 :caption: Contents: - + models.rst + diagnostics.rst Indices and tables ------------------ diff --git a/Python/docs/source/models.rst b/Python/docs/source/models.rst new file mode 100644 index 00000000..12c21b8f --- /dev/null +++ b/Python/docs/source/models.rst @@ -0,0 +1,4 @@ +Models (:py:mod:`hbayesdm.models`) +================================== + +.. automodule:: hbayesdm.models From 7c6ccbce4e58413612b1130cda23586d5e957b70 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 02:51:38 +0900 Subject: [PATCH 073/163] Update READMEs --- Python/README.rst | 15 ------ R/README.Rmd | 2 +- R/README.md | 2 +- README.md | 133 ++-------------------------------------------- 4 files changed, 7 insertions(+), 145 deletions(-) diff --git a/Python/README.rst b/Python/README.rst index 6a749503..cf9264d2 100644 --- a/Python/README.rst +++ b/Python/README.rst @@ -38,18 +38,3 @@ If you want to install from source (by cloning from GitHub): cd Python python setup.py install # Install from source - -If you want to create a virtual environment using `pipenv`_: - -.. _pipenv: https://pipenv.readthedocs.io/en/latest/ - -.. code:: bash - - git clone https://github.com/CCS-Lab/hBayesDM.git - cd hBayesDM - cd Python - - pipenv install --skip-lock # Install using pipenv - # or - pipenv install --dev --skip-lock # For developmental purposes - diff --git a/R/README.Rmd b/R/README.Rmd index 2d71b920..59334f72 100644 --- a/R/README.Rmd +++ b/R/README.Rmd @@ -2,7 +2,7 @@ output: github_document --- -# hBayesDM +# hBayesDM-R [![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) [![Build Status](https://travis-ci.org/CCS-Lab/hBayesDM.svg?branch=master)](https://travis-ci.org/CCS-Lab/hBayesDM) diff --git a/R/README.md b/R/README.md index 203a5bfd..159bfeee 100644 --- a/R/README.md +++ b/R/README.md @@ -1,5 +1,5 @@ -# hBayesDM +# hBayesDM-R [![Project Status: Active – The project has reached a stable, usable state and is being actively diff --git a/README.md b/README.md index 0842f705..ef092b56 100644 --- a/README.md +++ b/README.md @@ -7,147 +7,24 @@ [![Downloads](https://cranlogs.r-pkg.org/badges/grand-total/hBayesDM)](https://cran.r-project.org/web/packages/hBayesDM/index.html) [![DOI](https://zenodo.org/badge/doi/10.1162/CPSY_a_00002.svg)](https://doi.org/10.1162/CPSY_a_00002) -#### Now supporting *R* and *python*! - **hBayesDM** (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses [Stan](http://mc-stan.org/) for Bayesian inference. -Please see the respective sections below for installing hBayesDM with R/python. - -## Getting Started - R - -### Prerequisite - -To install hBayesDM for R, **[RStan][rstan] needs to be properly installed before you proceed**. -For detailed instructions on having RStan ready prior to installing hBayesDM, please go to this link: -https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started - -[rstan]: https://github.com/stan-dev/rstan - -### Installation - -The lastest **stable** version of hBayesDM can be installed from CRAN by running the following command in R: - -```r -install.packages("hBayesDM") # Install hBayesDM from CRAN -``` - -or you can also install from GitHub with: - -```r -# `devtools` is required to install hBayesDM from GitHub -if (!require(devtools)) install.packages("devtools") - -devtools::install_github("CCS-Lab/hBayesDM/R") -``` - -If you want to use the lastest *development* version of hBayesDM, run the following in R: - -```r -# `devtools` is required to install hBayesDM from GitHub -if (!require(devtools)) install.packages("devtools") - -devtools::install_github("CCS-Lab/hBayesDM/R@develop") -``` - -#### Building at once - -By default, you will have to wait for compilation when you run each model for the first time. -If you plan on runnning several different models and want to pre-build all models during installation time, -set an environment variable `BUILD_ALL` to `true`, like the following. -We highly recommend you only do so when you have multiple cores available, -since building all models at once takes quite a long time to complete. - -```r -Sys.setenv(BUILD_ALL = "true") # Build *all* models at installation time -Sys.setenv(MAKEFLAGS = "-j 4") # Use 4 cores for build (or any other number you want) - -install.packages("hBayesDM") # Install from CRAN -# or -devtools::install_github("CCS-Lab/hBayesDM/R") # Install from GitHub -``` - -## Getting Started - Python - -**hBayesDM-py** supports Python 3.5 or higher. It requires several packages including: -[NumPy][numpy], [SciPy][scipy], [Pandas][pandas], [PyStan][pystan], [Matplotlib][matplotlib], [ArviZ][arviz]. -*(But there's no need to pre-install anything as pip handles all the requirements for us.)* - -[numpy]: https://www.numpy.org/ -[scipy]: https://www.scipy.org/ -[pandas]: https://pandas.pydata.org/ -[pystan]: https://github.com/stan-dev/pystan -[matplotlib]: https://matplotlib.org/ -[arviz]: https://arviz-devs.github.io/arviz/ - -### Installation - -You can install the latest **stable** version of `hbayesdm` from PyPI, through the following command: - -```sh -pip install hbayesdm # Install from PyPI -``` - -or if you want to install from source, by cloning the repo from GitHub: - -```sh -git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo -cd hBayesDM # Move into repo -cd Python # Move into Python subdirectory - -python setup.py install # Install hbayesdm from source -``` - -or if you want to install the latest *development* version of `hbayesdm`: - -```sh -git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo -cd hBayesDM # Move into repo -git checkout develop # Checkout develop branch -cd Python # Move into Python subdirectory - -python setup.py install # Install hbayesdm *develop* version from source -``` - -If you want to create a virtual environment using [`pipenv`](https://pipenv.readthedocs.io/en/latest/) -while installing `hbayesdm`: - -```sh -git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo -cd hBayesDM # Move into repo -cd Python # Move into Python subdirectory - -pipenv install --skip-lock # Install hbayesdm inside pipenv -``` - -**[For contributors]** You can also install all dependencies (including dev) of `hbayesdm`: - -```sh -git clone https://github.com/CCS-Lab/hBayesDM.git # Clone repo -cd hBayesDM # Move into repo -cd Python # Move into Python subdirectory - -pipenv install --dev --skip-lock # For developmental purpose -``` - -BTW, we encourage you try out [`pipenv`](https://pipenv.readthedocs.io/en/latest/), a well-documented, rich, high-level virtual environment wrapper for python & pip. +Now, **hBayesDM** supports both [R](./R) and [Python](./Python)! ## Quick Links -- **Tutorial-R**: http://rpubs.com/CCSL/hBayesDM -- **Tutorial-py**: *...on its way...* +- **Tutorial**: http://rpubs.com/CCSL/hBayesDM (R) - **Mailing list**: https://groups.google.com/forum/#!forum/hbayesdm-users - **Bug reports**: https://github.com/CCS-Lab/hBayesDM/issues - **Contributing**: See the [Wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of this repository. ## Citation -If you used hBayesDM or some of its codes for your research, please cite this paper: - -> Ahn, W.-Y., Haines, N., & Zhang, L. (2017). Revealing neuro-computational mechanisms of reinforcement learning and decision-making with the hBayesDM package. Computational Psychiatry, 1, 24-57. doi:10.1162/CPSY_a_00002. +If you used hBayesDM or some of its codes for your research, please cite [this paper][paper]: -or using BibTeX: +[paper]: https://www.mitpressjournals.org/doi/full/10.1162/CPSY_a_00002 -```bibtex +``` bibtex @article{hBayesDM, title = {Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the {hBayesDM} Package}, author = {Ahn, Woo-Young and Haines, Nathaniel and Zhang, Lei}, From d3ef59553dedb797ced8a22986da5fe2d8a6cf2c Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:04:56 +0900 Subject: [PATCH 074/163] Update documentation structure --- Python/docs/Makefile | 18 +-- Python/docs/conf.py | 88 ++++++++++++ Python/docs/diagnostics.rst | 8 ++ Python/docs/{source => }/index.rst | 3 +- Python/docs/make.bat | 35 +++++ Python/docs/{source => }/models.rst | 0 Python/docs/source/conf.py | 203 ---------------------------- Python/docs/source/diagnostics.rst | 4 - 8 files changed, 139 insertions(+), 220 deletions(-) create mode 100644 Python/docs/conf.py create mode 100644 Python/docs/diagnostics.rst rename Python/docs/{source => }/index.rst (74%) create mode 100644 Python/docs/make.bat rename Python/docs/{source => }/models.rst (100%) delete mode 100644 Python/docs/source/conf.py delete mode 100644 Python/docs/source/diagnostics.rst diff --git a/Python/docs/Makefile b/Python/docs/Makefile index 783edd8f..d4bb2cbb 100644 --- a/Python/docs/Makefile +++ b/Python/docs/Makefile @@ -1,22 +1,18 @@ # Minimal makefile for Sphinx documentation # -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXAUTOBUILD = sphinx-autobuild -SOURCEDIR = source -BUILDDIR = build -PACKAGEDIR = ../hbayesdm +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -livehtml: - @$(SPHINXAUTOBUILD) -b html -z "$(PACKAGEDIR)" "$(SOURCEDIR)" "$(BUILDDIR)/html" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile livehtml +.PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). diff --git a/Python/docs/conf.py b/Python/docs/conf.py new file mode 100644 index 00000000..cff1e88a --- /dev/null +++ b/Python/docs/conf.py @@ -0,0 +1,88 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + +import hbayesdm # noqa: E402 + + +# -- Project information ----------------------------------------------------- + +project = 'hBayesDM' +copyright = '2019, hBayesDM developers' +author = 'hBayesDM developers' + +# The short X.Y version +version = hbayesdm.version.version +# The full version, including alpha/beta/rc tags +release = hbayesdm.version.release + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.napoleon', + 'sphinx.ext.autodoc', + 'sphinx.ext.mathjax', + 'sphinx.ext.githubpages', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + + +# -- Extension configuration ------------------------------------------------- + +# Autodoc settings +autoclass_content = 'both' +autodoc_member_order = 'bysource' +autodoc_default_options = { + 'members': True, + 'undoc-members': True, + 'show-inheritance': False +} +autodoc_typehints = 'none' + +# Napoleon settings +napoleon_numpy_docstring = True +napoleon_include_init_with_doc = False +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = False +napoleon_use_rtype = False diff --git a/Python/docs/diagnostics.rst b/Python/docs/diagnostics.rst new file mode 100644 index 00000000..75148c67 --- /dev/null +++ b/Python/docs/diagnostics.rst @@ -0,0 +1,8 @@ +Diagnostics (:mod:`hbayesdm.diagnostics`) +========================================= + +.. autofunction:: hbayesdm.rhat +.. autofunction:: hbayesdm.print_fit +.. autofunction:: hbayesdm.hdi +.. autofunction:: hbayesdm.plot_hdi + diff --git a/Python/docs/source/index.rst b/Python/docs/index.rst similarity index 74% rename from Python/docs/source/index.rst rename to Python/docs/index.rst index 5077ba19..7ea126a3 100644 --- a/Python/docs/source/index.rst +++ b/Python/docs/index.rst @@ -1,8 +1,7 @@ -.. include:: ../../README.rst +.. include:: ../README.rst .. toctree:: :maxdepth: 2 - :caption: Contents: models.rst diagnostics.rst diff --git a/Python/docs/make.bat b/Python/docs/make.bat new file mode 100644 index 00000000..2119f510 --- /dev/null +++ b/Python/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/Python/docs/source/models.rst b/Python/docs/models.rst similarity index 100% rename from Python/docs/source/models.rst rename to Python/docs/models.rst diff --git a/Python/docs/source/conf.py b/Python/docs/source/conf.py deleted file mode 100644 index 154dd6ec..00000000 --- a/Python/docs/source/conf.py +++ /dev/null @@ -1,203 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Configuration file for the Sphinx documentation builder. -# -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/master/config - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys -sys.path.insert(0, os.path.abspath('..')) - -import hbayesdm # noqa: E402 - -# -- Project information ----------------------------------------------------- - -project = 'hBayesDM' -copyright = '2019, hBayesDM developers' -author = 'hBayesDM developers' - -# The short X.Y version -version = hbayesdm.version.version -# The full version, including alpha/beta/rc tags -release = hbayesdm.version.release - - -# -- General configuration --------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.napoleon', - 'sphinx.ext.autodoc', - 'sphinx.ext.mathjax', - 'sphinx.ext.githubpages', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = None - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# -# html_sidebars = {} - - -# -- Options for HTMLHelp output --------------------------------------------- - -# Output file base name for HTML help builder. -htmlhelp_basename = 'hBayesDMdoc' - - -# -- Options for LaTeX output ------------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'hBayesDM.tex', 'hBayesDM Documentation', - 'hBayesDM Developers', 'manual'), -] - - -# -- Options for manual page output ------------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'hbayesdm', 'hBayesDM Documentation', - [author], 1) -] - - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'hBayesDM', 'hBayesDM Documentation', - author, 'hBayesDM', 'One line description of project.', - 'Miscellaneous'), -] - - -# -- Options for Epub output ------------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -# -# epub_identifier = '' - -# A unique identification for the text. -# -# epub_uid = '' - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - - -# -- Extension configuration ------------------------------------------------- - -# Autodoc settings -autoclass_content = 'both' -autodoc_member_order = 'bysource' -autodoc_default_options = { - 'members': True, - 'undoc-members': True, - 'show-inheritance': False -} -autodoc_typehints = 'none' - -# Napoleon settings -napoleon_numpy_docstring = True -napoleon_include_init_with_doc = False -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = False -napoleon_use_rtype = False diff --git a/Python/docs/source/diagnostics.rst b/Python/docs/source/diagnostics.rst deleted file mode 100644 index 81843b06..00000000 --- a/Python/docs/source/diagnostics.rst +++ /dev/null @@ -1,4 +0,0 @@ -Diagnostics (:mod:`hbayesdm.diagnostics`) -========================================= - -.. automodule:: hbayesdm.diagnostics From 6f6d91d9685435715506102802256e78940d2061 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:09:48 +0900 Subject: [PATCH 075/163] Update conf.py --- Python/docs/conf.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/Python/docs/conf.py b/Python/docs/conf.py index cff1e88a..ff822572 100644 --- a/Python/docs/conf.py +++ b/Python/docs/conf.py @@ -23,11 +23,6 @@ copyright = '2019, hBayesDM developers' author = 'hBayesDM developers' -# The short X.Y version -version = hbayesdm.version.version -# The full version, including alpha/beta/rc tags -release = hbayesdm.version.release - # -- General configuration --------------------------------------------------- From 74d2048d0cdcb03f6648efcc56cc7e7c59587b8c Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:12:57 +0900 Subject: [PATCH 076/163] Update docs/requirements.txt --- Python/docs/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/docs/requirements.txt b/Python/docs/requirements.txt index 75e8bda8..08eadf39 100644 --- a/Python/docs/requirements.txt +++ b/Python/docs/requirements.txt @@ -1,4 +1,4 @@ -./Python/. +-e ./Python/. arviz flake8 matplotlib From ec2d4afa9024d10bd6c183d35edc6179d5586177 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:23:02 +0900 Subject: [PATCH 077/163] Use Path on setup.py --- Python/setup.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Python/setup.py b/Python/setup.py index eda026b0..44c04ae0 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -1,13 +1,17 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 import os import sys import subprocess from setuptools import setup, find_packages +from pathlib import Path if sys.version_info[:2] < (3, 5): raise RuntimeError("Python version >= 3.5 required.") +PATH_ROOT = Path(__file__).absolute().parent + + MAJOR = 0 MINOR = 7 MICRO = 2 @@ -80,7 +84,7 @@ def write_version_py(filename='hbayesdm/version.py'): """ FULLVERSION, GIT_REVISION = get_version_info() - a = open(filename, 'w') + a = open(PATH_ROOT / filename, 'w') try: a.write(cnt % {'version': VERSION, 'full_version': FULLVERSION, From 6939d1f93fb42b12e47faea7ac5ded9ced03dc87 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:25:35 +0900 Subject: [PATCH 078/163] Update docs/requirements.txt --- Python/docs/requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Python/docs/requirements.txt b/Python/docs/requirements.txt index 08eadf39..f53a273e 100644 --- a/Python/docs/requirements.txt +++ b/Python/docs/requirements.txt @@ -1,4 +1,3 @@ --e ./Python/. arviz flake8 matplotlib @@ -10,4 +9,4 @@ pytest scipy sphinx sphinx-autodoc-typehints -sphinx-rtd-theme +./Python From 177044350dc2e3c318940bd8b5b1c50dab867634 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:36:22 +0900 Subject: [PATCH 079/163] Do not use a separate file for versioning --- Python/hbayesdm/__init__.py | 3 +- Python/setup.py | 79 +------------------------------------ 2 files changed, 2 insertions(+), 80 deletions(-) diff --git a/Python/hbayesdm/__init__.py b/Python/hbayesdm/__init__.py index d48d8c2c..1461b292 100644 --- a/Python/hbayesdm/__init__.py +++ b/Python/hbayesdm/__init__.py @@ -1,4 +1,3 @@ -from hbayesdm.version import version as __version__ from hbayesdm.diagnostics import rhat, print_fit, hdi, plot_hdi -__all__ = ['__version__', 'rhat', 'print_fit', 'hdi', 'plot_hdi'] +__all__ = ['rhat', 'print_fit', 'hdi', 'plot_hdi'] diff --git a/Python/setup.py b/Python/setup.py index 44c04ae0..3be7f4b0 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -17,84 +17,7 @@ MICRO = 2 ISRELEASED = False VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) - - -def git_version(): - """ - Return the git revision as a string - """ - def _minimal_ext_cmd(cmd): - # construct minimal environment - env = {} - for k in ['SYSTEMROOT', 'PATH', 'HOME']: - v = os.environ.get(k) - if v is not None: - env[k] = v - # LANGUAGE is used on win32 - env['LANGUAGE'] = 'C' - env['LANG'] = 'C' - env['LC_ALL'] = 'C' - out = subprocess.Popen( - cmd, stdout=subprocess.PIPE, env=env).communicate()[0] - return out - - try: - out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD']) - GIT_REVISION = out.strip().decode('ascii') - except OSError: - GIT_REVISION = "Unknown" - - return GIT_REVISION - - -def get_version_info(): - # Adding the git rev number needs to be done inside write_version_py(), - # otherwise the import of hbayesdm.version messes up the build under Python 3. - FULLVERSION = VERSION - if os.path.exists('.git'): - GIT_REVISION = git_version() - elif os.path.exists('hbayesdm/version.py'): - # must be a source distribution, use existing version file - try: - from hbayesdm.version import git_revision as GIT_REVISION - except ImportError: - raise ImportError("Unable to import git_revision. Try removing " - "hbayesdm/version.py and the build directory " - "before building.") - else: - GIT_REVISION = "Unknown" - - if not ISRELEASED: - # Following the R versioning convention - FULLVERSION += '.9000' - - return FULLVERSION, GIT_REVISION - - -def write_version_py(filename='hbayesdm/version.py'): - cnt = """ -# THIS FILE IS GENERATED FROM HBAYESDM SETUP.PY -short_version = '%(version)s' -version = '%(version)s' -full_version = '%(full_version)s' -git_revision = '%(git_revision)s' -release = %(isrelease)s -if not release: - version = full_version -""" - FULLVERSION, GIT_REVISION = get_version_info() - - a = open(PATH_ROOT / filename, 'w') - try: - a.write(cnt % {'version': VERSION, - 'full_version': FULLVERSION, - 'git_revision': GIT_REVISION, - 'isrelease': str(ISRELEASED)}) - finally: - a.close() - - -write_version_py() +VERSION += '' if ISRELEASED else '.9000' DESC = 'Python interface for hBayesDM, hierarchical Bayesian modeling of RL-DM tasks' From 491a0502704e52ed4caa1e18956fb682f789b2c7 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:40:37 +0900 Subject: [PATCH 080/163] Do not use _hbayesdm_version --- Python/hbayesdm/base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index cd6a9ea7..18e3a8b4 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -12,7 +12,6 @@ import matplotlib.pyplot as plt import arviz as az -from hbayesdm import __version__ as _hbayesdm_version from pystan import __version__ as _pystan_version __all__ = ['TaskModel'] @@ -655,8 +654,7 @@ def _designate_stan_model(self, model: str) -> StanModel: """ stan_files = os.path.join(_common, 'stan_files') model_path = os.path.join(stan_files, model + '.stan') - cache_file = 'cached-%s-hbayesdm=%s-pystan=%s.pkl' % \ - (model, _hbayesdm_version, _pystan_version) + cache_file = 'cached-%s-pystan_%s.pkl' % (model, _pystan_version) if os.path.exists(cache_file): try: From 6ca6e229f2202c64f92a31eb82a143965b1770d4 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:46:02 +0900 Subject: [PATCH 081/163] Update docs --- Python/docs/conf.py | 5 ----- Python/docs/models.rst | 2 ++ 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/Python/docs/conf.py b/Python/docs/conf.py index ff822572..c2be5955 100644 --- a/Python/docs/conf.py +++ b/Python/docs/conf.py @@ -63,11 +63,6 @@ # Autodoc settings autoclass_content = 'both' autodoc_member_order = 'bysource' -autodoc_default_options = { - 'members': True, - 'undoc-members': True, - 'show-inheritance': False -} autodoc_typehints = 'none' # Napoleon settings diff --git a/Python/docs/models.rst b/Python/docs/models.rst index 12c21b8f..a5785dbc 100644 --- a/Python/docs/models.rst +++ b/Python/docs/models.rst @@ -2,3 +2,5 @@ Models (:py:mod:`hbayesdm.models`) ================================== .. automodule:: hbayesdm.models + :members: + :no-undoc-members: From 7f5a092f487e1c0bdcb8faf0213b27746bca6c69 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:49:54 +0900 Subject: [PATCH 082/163] Update docs --- Python/docs/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Python/docs/conf.py b/Python/docs/conf.py index c2be5955..effadc19 100644 --- a/Python/docs/conf.py +++ b/Python/docs/conf.py @@ -44,6 +44,8 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +master_doc = 'index' + # -- Options for HTML output ------------------------------------------------- From c599942183279ded6491543a62a8dafad337e3d7 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:51:57 +0900 Subject: [PATCH 083/163] Update docs --- Python/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/docs/conf.py b/Python/docs/conf.py index effadc19..bf64e942 100644 --- a/Python/docs/conf.py +++ b/Python/docs/conf.py @@ -44,7 +44,7 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] -master_doc = 'index' +master_doc = 'index.rst' # -- Options for HTML output ------------------------------------------------- From 1e933766dee405d7f402130f0d90f4b5c7eecbfc Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 03:55:22 +0900 Subject: [PATCH 084/163] Update docs --- Python/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/docs/conf.py b/Python/docs/conf.py index bf64e942..effadc19 100644 --- a/Python/docs/conf.py +++ b/Python/docs/conf.py @@ -44,7 +44,7 @@ # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] -master_doc = 'index.rst' +master_doc = 'index' # -- Options for HTML output ------------------------------------------------- From 084b2b7769d7aa29bb657822093999f9167c758e Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 04:02:34 +0900 Subject: [PATCH 085/163] Use ReadTheDocs theme --- Python/docs/conf.py | 5 ++++- Python/docs/requirements.txt | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Python/docs/conf.py b/Python/docs/conf.py index effadc19..7320dce2 100644 --- a/Python/docs/conf.py +++ b/Python/docs/conf.py @@ -10,6 +10,7 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # +import sphinx_rtd_theme import os import sys sys.path.insert(0, os.path.abspath('..')) @@ -52,7 +53,9 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'alabaster' +html_theme = "sphinx_rtd_theme" + +html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, diff --git a/Python/docs/requirements.txt b/Python/docs/requirements.txt index f53a273e..291ce917 100644 --- a/Python/docs/requirements.txt +++ b/Python/docs/requirements.txt @@ -9,4 +9,5 @@ pytest scipy sphinx sphinx-autodoc-typehints +sphinx_rtd_theme ./Python From 9ec81f7ed887182e8beb89349e9860acc467b3fc Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 04:17:54 +0900 Subject: [PATCH 086/163] Update docs for Python --- Python/README.rst | 27 +++++++++++++++++++++++---- Python/docs/diagnostics.rst | 9 +++------ Python/docs/index.rst | 1 - Python/docs/models.rst | 4 ++-- 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/Python/README.rst b/Python/README.rst index cf9264d2..442b17ff 100644 --- a/Python/README.rst +++ b/Python/README.rst @@ -11,8 +11,7 @@ decision-making tasks. *hBayesDM* uses `PyStan`_ (Python interface for .. _Stan: http://mc-stan.org/ hBayesDM-py supports Python 3.5 or higher. It requires several packages including: - -* `NumPy`_, `SciPy`_, `Pandas`_, `PyStan`_, `Matplotlib`_, `ArviZ`_ +`NumPy`_, `SciPy`_, `Pandas`_, `PyStan`_, `Matplotlib`_, and `ArviZ`_. .. _NumPy: https://www.numpy.org/ .. _SciPy: https://www.scipy.org/ @@ -27,7 +26,7 @@ You can install hBayesDM-py from PyPI with the following line: .. code:: bash - pip install hbayesdm # Install using pip + pip install hbayesdm # Install using pip If you want to install from source (by cloning from GitHub): @@ -37,4 +36,24 @@ If you want to install from source (by cloning from GitHub): cd hBayesDM cd Python - python setup.py install # Install from source + python setup.py install # Install from source + +Citation +-------- + +If you used hBayesDM or some of its codes for your research, please cite `this paper`_: + +.. _this paper: https://www.mitpressjournals.org/doi/full/10.1162/CPSY_a_00002 + +.. code:: bibtex + + @article{hBayesDM, + title = {Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the {hBayesDM} Package}, + author = {Ahn, Woo-Young and Haines, Nathaniel and Zhang, Lei}, + journal = {Computational Psychiatry}, + year = {2017}, + volume = {1}, + pages = {24--57}, + publisher = {MIT Press}, + url = {doi:10.1162/CPSY_a_00002}, + } diff --git a/Python/docs/diagnostics.rst b/Python/docs/diagnostics.rst index 75148c67..e2ce9e6e 100644 --- a/Python/docs/diagnostics.rst +++ b/Python/docs/diagnostics.rst @@ -1,8 +1,5 @@ -Diagnostics (:mod:`hbayesdm.diagnostics`) -========================================= +Diagnostics +=========== -.. autofunction:: hbayesdm.rhat -.. autofunction:: hbayesdm.print_fit -.. autofunction:: hbayesdm.hdi -.. autofunction:: hbayesdm.plot_hdi +.. automodule:: hbayesdm.diagnostics diff --git a/Python/docs/index.rst b/Python/docs/index.rst index 7ea126a3..83528560 100644 --- a/Python/docs/index.rst +++ b/Python/docs/index.rst @@ -10,5 +10,4 @@ Indices and tables ------------------ * :ref:`genindex` -* :ref:`modindex` * :ref:`search` diff --git a/Python/docs/models.rst b/Python/docs/models.rst index a5785dbc..a4c10160 100644 --- a/Python/docs/models.rst +++ b/Python/docs/models.rst @@ -1,5 +1,5 @@ -Models (:py:mod:`hbayesdm.models`) -================================== +Models +====== .. automodule:: hbayesdm.models :members: From e261ccfc8cdad00319934a5dbf069a6703de2199 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 04:31:28 +0900 Subject: [PATCH 087/163] Add .gitignore --- Python/.gitignore | 132 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 132 insertions(+) create mode 100644 Python/.gitignore diff --git a/Python/.gitignore b/Python/.gitignore new file mode 100644 index 00000000..d9545d63 --- /dev/null +++ b/Python/.gitignore @@ -0,0 +1,132 @@ + +# Created by https://www.gitignore.io/api/python +# Edit at https://www.gitignore.io/?templates=python + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# End of https://www.gitignore.io/api/python + From d797a85e1b06860f7733174610ac2144b26926b3 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 04:33:25 +0900 Subject: [PATCH 088/163] Update docs for Python --- Python/docs/diagnostics.rst | 2 ++ Python/docs/models.rst | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Python/docs/diagnostics.rst b/Python/docs/diagnostics.rst index e2ce9e6e..959482bc 100644 --- a/Python/docs/diagnostics.rst +++ b/Python/docs/diagnostics.rst @@ -2,4 +2,6 @@ Diagnostics =========== .. automodule:: hbayesdm.diagnostics + :members: + :undoc-members: diff --git a/Python/docs/models.rst b/Python/docs/models.rst index a4c10160..bfe002e0 100644 --- a/Python/docs/models.rst +++ b/Python/docs/models.rst @@ -3,4 +3,4 @@ Models .. automodule:: hbayesdm.models :members: - :no-undoc-members: + :undoc-members: From 360b8b8f12e3d54ec9b5b31d6f803564c6803f6e Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 04:35:38 +0900 Subject: [PATCH 089/163] Do not generate PDF and ePub --- .readthedocs.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 1fc9bdc8..001c4a00 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -9,11 +9,6 @@ version: 2 sphinx: configuration: Python/docs/conf.py -# Optionally build your docs in additional formats such as PDF and ePub -formats: - - htmlzip - - pdf - # Optionally set the version of Python and requirements required to build your docs python: version: 3.7 From f4e210ab1f825be022280b3be6cdaac2648fa518 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 13:42:38 +0900 Subject: [PATCH 090/163] Convert JSON to YAML --- commons/convert-json-to-yaml.py | 14 ++++ commons/models-yaml/bandit2arm_delta.yml | 38 +++++++++ .../models-yaml/bandit4arm2_kalman_filter.yml | 65 +++++++++++++++ commons/models-yaml/bandit4arm_2par_lapse.yml | 44 +++++++++++ commons/models-yaml/bandit4arm_4par.yml | 50 ++++++++++++ commons/models-yaml/bandit4arm_lapse.yml | 56 +++++++++++++ .../models-yaml/bandit4arm_lapse_decay.yml | 62 +++++++++++++++ .../models-yaml/bandit4arm_singleA_lapse.yml | 50 ++++++++++++ commons/models-yaml/bart_par4.yml | 73 +++++++++++++++++ commons/models-yaml/choiceRT_ddm.yml | 54 +++++++++++++ commons/models-yaml/choiceRT_ddm_single.yml | 54 +++++++++++++ commons/models-yaml/cra_exp.yml | 54 +++++++++++++ commons/models-yaml/cra_linear.yml | 54 +++++++++++++ commons/models-yaml/dbdm_prob_weight.yml | 60 ++++++++++++++ commons/models-yaml/dd_cs.yml | 46 +++++++++++ commons/models-yaml/dd_cs_single.yml | 46 +++++++++++ commons/models-yaml/dd_exp.yml | 40 ++++++++++ commons/models-yaml/dd_hyperbolic.yml | 40 ++++++++++ commons/models-yaml/dd_hyperbolic_single.yml | 40 ++++++++++ commons/models-yaml/gng_m1.yml | 48 +++++++++++ commons/models-yaml/gng_m2.yml | 54 +++++++++++++ commons/models-yaml/gng_m3.yml | 61 ++++++++++++++ commons/models-yaml/gng_m4.yml | 67 ++++++++++++++++ commons/models-yaml/igt_orl.yml | 65 +++++++++++++++ commons/models-yaml/igt_pvl_decay.yml | 55 +++++++++++++ commons/models-yaml/igt_pvl_delta.yml | 55 +++++++++++++ commons/models-yaml/igt_vpp.yml | 79 +++++++++++++++++++ commons/models-yaml/peer_ocu.yml | 52 ++++++++++++ commons/models-yaml/prl_ewa.yml | 55 +++++++++++++ commons/models-yaml/prl_fictitious.yml | 56 +++++++++++++ .../models-yaml/prl_fictitious_multipleB.yml | 57 +++++++++++++ commons/models-yaml/prl_fictitious_rp.yml | 63 +++++++++++++++ commons/models-yaml/prl_fictitious_rp_woa.yml | 57 +++++++++++++ commons/models-yaml/prl_fictitious_woa.yml | 50 ++++++++++++ commons/models-yaml/prl_rp.yml | 54 +++++++++++++ commons/models-yaml/prl_rp_multipleB.yml | 55 +++++++++++++ commons/models-yaml/pst_gainloss_Q.yml | 48 +++++++++++ commons/models-yaml/ra_noLA.yml | 39 +++++++++ commons/models-yaml/ra_noRA.yml | 39 +++++++++ commons/models-yaml/ra_prospect.yml | 45 +++++++++++ commons/models-yaml/rdt_happiness.yml | 71 +++++++++++++++++ commons/models-yaml/ts_par4.yml | 61 ++++++++++++++ commons/models-yaml/ts_par6.yml | 72 +++++++++++++++++ commons/models-yaml/ts_par7.yml | 78 ++++++++++++++++++ commons/models-yaml/ug_bayes.yml | 43 ++++++++++ commons/models-yaml/ug_delta.yml | 43 ++++++++++ commons/models-yaml/wcs_sql.yml | 47 +++++++++++ 47 files changed, 2509 insertions(+) create mode 100644 commons/convert-json-to-yaml.py create mode 100644 commons/models-yaml/bandit2arm_delta.yml create mode 100644 commons/models-yaml/bandit4arm2_kalman_filter.yml create mode 100644 commons/models-yaml/bandit4arm_2par_lapse.yml create mode 100644 commons/models-yaml/bandit4arm_4par.yml create mode 100644 commons/models-yaml/bandit4arm_lapse.yml create mode 100644 commons/models-yaml/bandit4arm_lapse_decay.yml create mode 100644 commons/models-yaml/bandit4arm_singleA_lapse.yml create mode 100644 commons/models-yaml/bart_par4.yml create mode 100644 commons/models-yaml/choiceRT_ddm.yml create mode 100644 commons/models-yaml/choiceRT_ddm_single.yml create mode 100644 commons/models-yaml/cra_exp.yml create mode 100644 commons/models-yaml/cra_linear.yml create mode 100644 commons/models-yaml/dbdm_prob_weight.yml create mode 100644 commons/models-yaml/dd_cs.yml create mode 100644 commons/models-yaml/dd_cs_single.yml create mode 100644 commons/models-yaml/dd_exp.yml create mode 100644 commons/models-yaml/dd_hyperbolic.yml create mode 100644 commons/models-yaml/dd_hyperbolic_single.yml create mode 100644 commons/models-yaml/gng_m1.yml create mode 100644 commons/models-yaml/gng_m2.yml create mode 100644 commons/models-yaml/gng_m3.yml create mode 100644 commons/models-yaml/gng_m4.yml create mode 100644 commons/models-yaml/igt_orl.yml create mode 100644 commons/models-yaml/igt_pvl_decay.yml create mode 100644 commons/models-yaml/igt_pvl_delta.yml create mode 100644 commons/models-yaml/igt_vpp.yml create mode 100644 commons/models-yaml/peer_ocu.yml create mode 100644 commons/models-yaml/prl_ewa.yml create mode 100644 commons/models-yaml/prl_fictitious.yml create mode 100644 commons/models-yaml/prl_fictitious_multipleB.yml create mode 100644 commons/models-yaml/prl_fictitious_rp.yml create mode 100644 commons/models-yaml/prl_fictitious_rp_woa.yml create mode 100644 commons/models-yaml/prl_fictitious_woa.yml create mode 100644 commons/models-yaml/prl_rp.yml create mode 100644 commons/models-yaml/prl_rp_multipleB.yml create mode 100644 commons/models-yaml/pst_gainloss_Q.yml create mode 100644 commons/models-yaml/ra_noLA.yml create mode 100644 commons/models-yaml/ra_noRA.yml create mode 100644 commons/models-yaml/ra_prospect.yml create mode 100644 commons/models-yaml/rdt_happiness.yml create mode 100644 commons/models-yaml/ts_par4.yml create mode 100644 commons/models-yaml/ts_par6.yml create mode 100644 commons/models-yaml/ts_par7.yml create mode 100644 commons/models-yaml/ug_bayes.yml create mode 100644 commons/models-yaml/ug_delta.yml create mode 100644 commons/models-yaml/wcs_sql.yml diff --git a/commons/convert-json-to-yaml.py b/commons/convert-json-to-yaml.py new file mode 100644 index 00000000..6fce878f --- /dev/null +++ b/commons/convert-json-to-yaml.py @@ -0,0 +1,14 @@ +import os +from pathlib import Path + +PATH_ROOT = Path(__file__).absolute().parent +PATH_JSON = PATH_ROOT / 'models' +PATH_YAML = PATH_ROOT / 'models-yaml' + +if not PATH_YAML.exists(): + PATH_YAML.mkdir() + +for p_json in PATH_JSON.glob('*.json'): + p_yaml = PATH_YAML / p_json.name.replace('.json', '.yml') + os.system(f'json2yaml {str(p_json)} > {str(p_yaml)}') + print('Done:', p_yaml) diff --git a/commons/models-yaml/bandit2arm_delta.yml b/commons/models-yaml/bandit2arm_delta.yml new file mode 100644 index 00000000..13c97c3d --- /dev/null +++ b/commons/models-yaml/bandit2arm_delta.yml @@ -0,0 +1,38 @@ +--- + task_name: + code: "bandit2arm" + desc: "2-Armed Bandit Task" + cite: + - "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683" + - "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x" + model_name: + code: "delta" + desc: "Rescorla-Wagner (Delta) Model" + cite: "" + model_type: + code: "" + desc: "Hierarchical" + notes: + contributors: + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1 or 2." + outcome: "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." + parameters: + A: + desc: "learning rate" + info: + - 0 + - 0.5 + - 1 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - 5 + regressors: + postpreds: + - "y_pred" + additional_args: + diff --git a/commons/models-yaml/bandit4arm2_kalman_filter.yml b/commons/models-yaml/bandit4arm2_kalman_filter.yml new file mode 100644 index 00000000..7060b923 --- /dev/null +++ b/commons/models-yaml/bandit4arm2_kalman_filter.yml @@ -0,0 +1,65 @@ +--- + task_name: + code: "bandit4arm2" + desc: "4-Armed Bandit Task (modified)" + cite: [] + model_name: + code: "kalman_filter" + desc: "Kalman Filter" + cite: + - "Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Yoonseo Zoh" + email: "zohyos7@gmail.com" + link: "https://zohyos7.github.io" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." + outcome: "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." + parameters: + lambda: + desc: "decay factor" + info: + - 0 + - 0.9 + - 1 + theta: + desc: "decay center" + info: + - 0 + - 50 + - 100 + beta: + desc: "inverse softmax temperature" + info: + - 0 + - 0.1 + - 1 + mu0: + desc: "anticipated initial mean of all 4 options" + info: + - 0 + - 85 + - 100 + sigma0: + desc: "anticipated initial sd (uncertainty factor) of all 4 options" + info: + - 0 + - 6 + - 15 + sigmaD: + desc: "sd of diffusion noise" + info: + - 0 + - 3 + - 15 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/bandit4arm_2par_lapse.yml b/commons/models-yaml/bandit4arm_2par_lapse.yml new file mode 100644 index 00000000..2e3b6f6d --- /dev/null +++ b/commons/models-yaml/bandit4arm_2par_lapse.yml @@ -0,0 +1,44 @@ +--- + task_name: + code: "bandit4arm" + desc: "4-Armed Bandit Task" + cite: [] + model_name: + code: "2par_lapse" + desc: "3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)" + cite: + - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." + gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + parameters: + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/bandit4arm_4par.yml b/commons/models-yaml/bandit4arm_4par.yml new file mode 100644 index 00000000..aebcc42f --- /dev/null +++ b/commons/models-yaml/bandit4arm_4par.yml @@ -0,0 +1,50 @@ +--- + task_name: + code: "bandit4arm" + desc: "4-Armed Bandit Task" + cite: [] + model_name: + code: "4par" + desc: "4 Parameter Model, without C (choice perseveration)" + cite: + - "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." + gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + parameters: + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + R: + desc: "reward sensitivity" + info: + - 0 + - 1 + - 30 + P: + desc: "punishment sensitivity" + info: + - 0 + - 1 + - 30 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/bandit4arm_lapse.yml b/commons/models-yaml/bandit4arm_lapse.yml new file mode 100644 index 00000000..fd672974 --- /dev/null +++ b/commons/models-yaml/bandit4arm_lapse.yml @@ -0,0 +1,56 @@ +--- + task_name: + code: "bandit4arm" + desc: "4-Armed Bandit Task" + cite: [] + model_name: + code: "lapse" + desc: "5 Parameter Model, without C (choice perseveration) but with xi (noise)" + cite: + - "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." + gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + parameters: + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + R: + desc: "reward sensitivity" + info: + - 0 + - 1 + - 30 + P: + desc: "punishment sensitivity" + info: + - 0 + - 1 + - 30 + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/bandit4arm_lapse_decay.yml b/commons/models-yaml/bandit4arm_lapse_decay.yml new file mode 100644 index 00000000..224d3d64 --- /dev/null +++ b/commons/models-yaml/bandit4arm_lapse_decay.yml @@ -0,0 +1,62 @@ +--- + task_name: + code: "bandit4arm" + desc: "4-Armed Bandit Task" + cite: [] + model_name: + code: "lapse_decay" + desc: "5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro)." + cite: + - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." + gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + parameters: + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + R: + desc: "reward sensitivity" + info: + - 0 + - 1 + - 30 + P: + desc: "punishment sensitivity" + info: + - 0 + - 1 + - 30 + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + d: + desc: "decay rate" + info: + - 0 + - 0.1 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/bandit4arm_singleA_lapse.yml b/commons/models-yaml/bandit4arm_singleA_lapse.yml new file mode 100644 index 00000000..eb88a2a9 --- /dev/null +++ b/commons/models-yaml/bandit4arm_singleA_lapse.yml @@ -0,0 +1,50 @@ +--- + task_name: + code: "bandit4arm" + desc: "4-Armed Bandit Task" + cite: [] + model_name: + code: "singleA_lapse" + desc: "4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P." + cite: + - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." + gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." + parameters: + A: + desc: "learning rate" + info: + - 0 + - 0.1 + - 1 + R: + desc: "reward sensitivity" + info: + - 0 + - 1 + - 30 + P: + desc: "punishment sensitivity" + info: + - 0 + - 1 + - 30 + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/bart_par4.yml b/commons/models-yaml/bart_par4.yml new file mode 100644 index 00000000..ca5e3328 --- /dev/null +++ b/commons/models-yaml/bart_par4.yml @@ -0,0 +1,73 @@ +--- + task_name: + code: "bart" + desc: "Balloon Analogue Risk Task" + cite: [] + model_name: + code: "par4" + desc: "Re-parameterized version of BART model with 4 parameters" + cite: + - "van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Harhim Park" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + - + name: "Jaeyeong Yang" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Ayoung Lee" + email: "aylee2008@naver.com" + link: "https://ccs-lab.github.io/team/ayoung-lee/" + - + name: "Jeongbin Oh" + email: "ows0104@gmail.com" + link: "https://ccs-lab.github.io/team/jeongbin-oh/" + - + name: "Jiyoon Lee" + email: "nicole.lee2001@gmail.com" + link: "https://ccs-lab.github.io/team/jiyoon-lee/" + - + name: "Junha Jang" + email: "andy627robo@naver.com" + link: "https://ccs-lab.github.io/team/junha-jang/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + pumps: "The number of pumps." + explosion: "0: intact, 1: burst" + parameters: + phi: + desc: "prior belief of balloon not bursting" + info: + - 0 + - 0.5 + - 1 + eta: + desc: "updating rate" + info: + - 0 + - 1 + - "Inf" + gam: + desc: "risk-taking parameter" + info: + - 0 + - 1 + - "Inf" + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - "Inf" + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/choiceRT_ddm.yml b/commons/models-yaml/choiceRT_ddm.yml new file mode 100644 index 00000000..ab50e0a4 --- /dev/null +++ b/commons/models-yaml/choiceRT_ddm.yml @@ -0,0 +1,54 @@ +--- + task_name: + code: "choiceRT" + desc: "Choice Reaction Time Task" + cite: [] + model_name: + code: "ddm" + desc: "Drift Diffusion Model" + cite: + - "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" + model_type: + code: "" + desc: "Hierarchical" + notes: + - "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters." + - "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2)." + RT: "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." + parameters: + alpha: + desc: "boundary separation" + info: + - 0 + - 0.5 + - "Inf" + beta: + desc: "bias" + info: + - 0 + - 0.5 + - 1 + delta: + desc: "drift rate" + info: + - 0 + - 0.5 + - "Inf" + tau: + desc: "non-decision time" + info: + - 0 + - 0.15 + - 1 + regressors: {} + postpreds: [] + additional_args: + - + code: "RTbound" + default: 0.1 + desc: "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." + diff --git a/commons/models-yaml/choiceRT_ddm_single.yml b/commons/models-yaml/choiceRT_ddm_single.yml new file mode 100644 index 00000000..ec103589 --- /dev/null +++ b/commons/models-yaml/choiceRT_ddm_single.yml @@ -0,0 +1,54 @@ +--- + task_name: + code: "choiceRT" + desc: "Choice Reaction Time Task" + cite: [] + model_name: + code: "ddm" + desc: "Drift Diffusion Model" + cite: + - "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" + model_type: + code: "single" + desc: "Individual" + notes: + - "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters." + - "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2)." + RT: "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." + parameters: + alpha: + desc: "boundary separation" + info: + - null + - 0.5 + - null + beta: + desc: "bias" + info: + - null + - 0.5 + - null + delta: + desc: "drift rate" + info: + - null + - 0.5 + - null + tau: + desc: "non-decision time" + info: + - null + - 0.15 + - null + regressors: {} + postpreds: [] + additional_args: + - + code: "RTbound" + default: 0.1 + desc: "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." + diff --git a/commons/models-yaml/cra_exp.yml b/commons/models-yaml/cra_exp.yml new file mode 100644 index 00000000..4ca83c27 --- /dev/null +++ b/commons/models-yaml/cra_exp.yml @@ -0,0 +1,54 @@ +--- + task_name: + code: "cra" + desc: "Choice Under Risk and Ambiguity Task" + cite: [] + model_name: + code: "exp" + desc: "Exponential Subjective Value Model" + cite: + - "Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + prob: "Objective probability of the variable lottery." + ambig: "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery)." + reward_var: "Amount of reward in variable lottery. Assumed to be greater than zero." + reward_fix: "Amount of reward in fixed lottery. Assumed to be greater than zero." + choice: "If the variable lottery was selected, choice == 1; otherwise choice == 0." + parameters: + alpha: + desc: "risk attitude" + info: + - 0 + - 1 + - 2 + beta: + desc: "ambiguity attitude" + info: + - "-Inf" + - 0 + - "Inf" + gamma: + desc: "inverse temperature" + info: + - 0 + - 1 + - "Inf" + regressors: + sv: 2 + sv_fix: 2 + sv_var: 2 + p_var: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/cra_linear.yml b/commons/models-yaml/cra_linear.yml new file mode 100644 index 00000000..4586c700 --- /dev/null +++ b/commons/models-yaml/cra_linear.yml @@ -0,0 +1,54 @@ +--- + task_name: + code: "cra" + desc: "Choice Under Risk and Ambiguity Task" + cite: [] + model_name: + code: "linear" + desc: "Linear Subjective Value Model" + cite: + - "Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + prob: "Objective probability of the variable lottery." + ambig: "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery)." + reward_var: "Amount of reward in variable lottery. Assumed to be greater than zero." + reward_fix: "Amount of reward in fixed lottery. Assumed to be greater than zero." + choice: "If the variable lottery was selected, choice == 1; otherwise choice == 0." + parameters: + alpha: + desc: "risk attitude" + info: + - 0 + - 1 + - 2 + beta: + desc: "ambiguity attitude" + info: + - "-Inf" + - 0 + - "Inf" + gamma: + desc: "inverse temperature" + info: + - 0 + - 1 + - "Inf" + regressors: + sv: 2 + sv_fix: 2 + sv_var: 2 + p_var: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/dbdm_prob_weight.yml b/commons/models-yaml/dbdm_prob_weight.yml new file mode 100644 index 00000000..a1b179a0 --- /dev/null +++ b/commons/models-yaml/dbdm_prob_weight.yml @@ -0,0 +1,60 @@ +--- + task_name: + code: "dbdm" + desc: "Description Based Decison Making Task" + cite: [] + model_name: + code: "prob_weight" + desc: "Probability Weight Function" + cite: + - "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47." + - "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539." + - "Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Yoonseo Zoh" + email: "zohyos7@gmail.com" + link: "https://ccs-lab.github.io/team/yoonseo-zoh/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + opt1hprob: "Possiblity of getting higher value of outcome(opt1hval) when choosing option 1." + opt2hprob: "Possiblity of getting higher value of outcome(opt2hval) when choosing option 2." + opt1hval: "Possible (with opt1hprob probability) outcome of option 1." + opt1lval: "Possible (with (1 - opt1hprob) probability) outcome of option 1." + opt2hval: "Possible (with opt2hprob probability) outcome of option 2." + opt2lval: "Possible (with (1 - opt2hprob) probability) outcome of option 2." + choice: "If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2." + parameters: + tau: + desc: "probability weight function" + info: + - 0 + - 0.8 + - 1 + rho: + desc: "subject utility function" + info: + - 0 + - 0.7 + - 2 + lambda: + desc: "loss aversion parameter" + info: + - 0 + - 2.5 + - 5 + beta: + desc: "inverse softmax temperature" + info: + - 0 + - 0.2 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/dd_cs.yml b/commons/models-yaml/dd_cs.yml new file mode 100644 index 00000000..c3f8c6a2 --- /dev/null +++ b/commons/models-yaml/dd_cs.yml @@ -0,0 +1,46 @@ +--- + task_name: + code: "dd" + desc: "Delay Discounting Task" + cite: [] + model_name: + code: "cs" + desc: "Constant-Sensitivity (CS) Model" + cite: + - "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." + amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." + delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." + amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." + choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + parameters: + r: + desc: "exponential discounting rate" + info: + - 0 + - 0.1 + - 1 + s: + desc: "impatience" + info: + - 0 + - 1 + - 10 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 5 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/dd_cs_single.yml b/commons/models-yaml/dd_cs_single.yml new file mode 100644 index 00000000..43ddf754 --- /dev/null +++ b/commons/models-yaml/dd_cs_single.yml @@ -0,0 +1,46 @@ +--- + task_name: + code: "dd" + desc: "Delay Discounting Task" + cite: [] + model_name: + code: "cs" + desc: "Constant-Sensitivity (CS) Model" + cite: + - "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" + model_type: + code: "single" + desc: "Individual" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." + amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." + delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." + amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." + choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + parameters: + r: + desc: "exponential discounting rate" + info: + - null + - 0.1 + - null + s: + desc: "impatience" + info: + - null + - 1 + - null + beta: + desc: "inverse temperature" + info: + - null + - 1 + - null + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/dd_exp.yml b/commons/models-yaml/dd_exp.yml new file mode 100644 index 00000000..eadaad38 --- /dev/null +++ b/commons/models-yaml/dd_exp.yml @@ -0,0 +1,40 @@ +--- + task_name: + code: "dd" + desc: "Delay Discounting Task" + cite: [] + model_name: + code: "exp" + desc: "Exponential Model" + cite: + - "Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." + amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." + delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." + amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." + choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + parameters: + r: + desc: "exponential discounting rate" + info: + - 0 + - 0.1 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 5 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/dd_hyperbolic.yml b/commons/models-yaml/dd_hyperbolic.yml new file mode 100644 index 00000000..965f79ce --- /dev/null +++ b/commons/models-yaml/dd_hyperbolic.yml @@ -0,0 +1,40 @@ +--- + task_name: + code: "dd" + desc: "Delay Discounting Task" + cite: [] + model_name: + code: "hyperbolic" + desc: "Hyperbolic Model" + cite: + - "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." + amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." + delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." + amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." + choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + parameters: + k: + desc: "discounting rate" + info: + - 0 + - 0.1 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 5 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/dd_hyperbolic_single.yml b/commons/models-yaml/dd_hyperbolic_single.yml new file mode 100644 index 00000000..e820f463 --- /dev/null +++ b/commons/models-yaml/dd_hyperbolic_single.yml @@ -0,0 +1,40 @@ +--- + task_name: + code: "dd" + desc: "Delay Discounting Task" + cite: [] + model_name: + code: "hyperbolic" + desc: "Hyperbolic Model" + cite: + - "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." + model_type: + code: "single" + desc: "Individual" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." + amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." + delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." + amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." + choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." + parameters: + k: + desc: "discounting rate" + info: + - null + - 0.1 + - null + beta: + desc: "inverse temperature" + info: + - null + - 1 + - null + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/gng_m1.yml b/commons/models-yaml/gng_m1.yml new file mode 100644 index 00000000..0037ea53 --- /dev/null +++ b/commons/models-yaml/gng_m1.yml @@ -0,0 +1,48 @@ +--- + task_name: + code: "gng" + desc: "Orthogonalized Go/Nogo Task" + cite: [] + model_name: + code: "m1" + desc: "RW + noise" + cite: + - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." + keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." + outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + parameters: + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + ep: + desc: "learning rate" + info: + - 0 + - 0.2 + - 1 + rho: + desc: "effective size" + info: + - 0 + - "exp(2)" + - "Inf" + regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/gng_m2.yml b/commons/models-yaml/gng_m2.yml new file mode 100644 index 00000000..318d7ea6 --- /dev/null +++ b/commons/models-yaml/gng_m2.yml @@ -0,0 +1,54 @@ +--- + task_name: + code: "gng" + desc: "Orthogonalized Go/Nogo Task" + cite: [] + model_name: + code: "m2" + desc: "RW + noise + bias" + cite: + - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." + keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." + outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + parameters: + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + ep: + desc: "learning rate" + info: + - 0 + - 0.2 + - 1 + b: + desc: "action bias" + info: + - "-Inf" + - 0 + - "Inf" + rho: + desc: "effective size" + info: + - 0 + - "exp(2)" + - "Inf" + regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/gng_m3.yml b/commons/models-yaml/gng_m3.yml new file mode 100644 index 00000000..881b80a1 --- /dev/null +++ b/commons/models-yaml/gng_m3.yml @@ -0,0 +1,61 @@ +--- + task_name: + code: "gng" + desc: "Orthogonalized Go/Nogo Task" + cite: [] + model_name: + code: "m3" + desc: "RW + noise + bias + pi" + cite: + - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." + keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." + outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + parameters: + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + ep: + desc: "learning rate" + info: + - 0 + - 0.2 + - 1 + b: + desc: "action bias" + info: + - "-Inf" + - 0 + - "Inf" + pi: + desc: "Pavlovian bias" + info: + - "-Inf" + - 0 + - "Inf" + rho: + desc: "effective size" + info: + - 0 + - "exp(2)" + - "Inf" + regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 + SV: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/gng_m4.yml b/commons/models-yaml/gng_m4.yml new file mode 100644 index 00000000..4066f3e2 --- /dev/null +++ b/commons/models-yaml/gng_m4.yml @@ -0,0 +1,67 @@ +--- + task_name: + code: "gng" + desc: "Orthogonalized Go/Nogo Task" + cite: [] + model_name: + code: "m4" + desc: "RW (rew/pun) + noise + bias + pi" + cite: + - "Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." + keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." + outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." + parameters: + xi: + desc: "noise" + info: + - 0 + - 0.1 + - 1 + ep: + desc: "learning rate" + info: + - 0 + - 0.2 + - 1 + b: + desc: "action bias" + info: + - "-Inf" + - 0 + - "Inf" + pi: + desc: "Pavlovian bias" + info: + - "-Inf" + - 0 + - "Inf" + rhoRew: + desc: "reward sensitivity" + info: + - 0 + - "exp(2)" + - "Inf" + rhoPun: + desc: "punishment sensitivity" + info: + - 0 + - "exp(2)" + - "Inf" + regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 + SV: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/igt_orl.yml b/commons/models-yaml/igt_orl.yml new file mode 100644 index 00000000..592e8a29 --- /dev/null +++ b/commons/models-yaml/igt_orl.yml @@ -0,0 +1,65 @@ +--- + task_name: + code: "igt" + desc: "Iowa Gambling Task" + cite: + - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + model_name: + code: "orl" + desc: "Outcome-Representation Learning Model" + cite: + - "Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Nate Haines" + email: "haines.175@osu.edu" + link: "https://ccs-lab.github.io/team/nate-haines/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." + gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + parameters: + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + K: + desc: "perseverance decay" + info: + - 0 + - 0.1 + - 5 + betaF: + desc: "outcome frequency weight" + info: + - "-Inf" + - 0.1 + - "Inf" + betaP: + desc: "perseverance weight" + info: + - "-Inf" + - 1 + - "Inf" + regressors: {} + postpreds: + - "y_pred" + additional_args: + - + code: "payscale" + default: 100 + desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + diff --git a/commons/models-yaml/igt_pvl_decay.yml b/commons/models-yaml/igt_pvl_decay.yml new file mode 100644 index 00000000..f10a3a33 --- /dev/null +++ b/commons/models-yaml/igt_pvl_decay.yml @@ -0,0 +1,55 @@ +--- + task_name: + code: "igt" + desc: "Iowa Gambling Task" + cite: + - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + model_name: + code: "pvl_decay" + desc: "Prospect Valence Learning (PVL) Decay-RI" + cite: + - "Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." + gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + parameters: + A: + desc: "decay rate" + info: + - 0 + - 0.5 + - 1 + alpha: + desc: "outcome sensitivity" + info: + - 0 + - 0.5 + - 2 + cons: + desc: "response consistency" + info: + - 0 + - 1 + - 5 + lambda: + desc: "loss aversion" + info: + - 0 + - 1 + - 10 + regressors: {} + postpreds: + - "y_pred" + additional_args: + - + code: "payscale" + default: 100 + desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + diff --git a/commons/models-yaml/igt_pvl_delta.yml b/commons/models-yaml/igt_pvl_delta.yml new file mode 100644 index 00000000..8e396457 --- /dev/null +++ b/commons/models-yaml/igt_pvl_delta.yml @@ -0,0 +1,55 @@ +--- + task_name: + code: "igt" + desc: "Iowa Gambling Task" + cite: + - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + model_name: + code: "pvl_delta" + desc: "Prospect Valence Learning (PVL) Delta" + cite: + - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." + gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + parameters: + A: + desc: "learning rate" + info: + - 0 + - 0.5 + - 1 + alpha: + desc: "outcome sensitivity" + info: + - 0 + - 0.5 + - 2 + cons: + desc: "response consistency" + info: + - 0 + - 1 + - 5 + lambda: + desc: "loss aversion" + info: + - 0 + - 1 + - 10 + regressors: {} + postpreds: + - "y_pred" + additional_args: + - + code: "payscale" + default: 100 + desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + diff --git a/commons/models-yaml/igt_vpp.yml b/commons/models-yaml/igt_vpp.yml new file mode 100644 index 00000000..0798673b --- /dev/null +++ b/commons/models-yaml/igt_vpp.yml @@ -0,0 +1,79 @@ +--- + task_name: + code: "igt" + desc: "Iowa Gambling Task" + cite: + - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" + model_name: + code: "vpp" + desc: "Value-Plus-Perseverance" + cite: + - "Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." + gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." + loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." + parameters: + A: + desc: "learning rate" + info: + - 0 + - 0.5 + - 1 + alpha: + desc: "outcome sensitivity" + info: + - 0 + - 0.5 + - 2 + cons: + desc: "response consistency" + info: + - 0 + - 1 + - 5 + lambda: + desc: "loss aversion" + info: + - 0 + - 1 + - 10 + epP: + desc: "gain impact" + info: + - "-Inf" + - 0 + - "Inf" + epN: + desc: "loss impact" + info: + - "-Inf" + - 0 + - "Inf" + K: + desc: "decay rate" + info: + - 0 + - 0.5 + - 1 + w: + desc: "RL weight" + info: + - 0 + - 0.5 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: + - + code: "payscale" + default: 100 + desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." + diff --git a/commons/models-yaml/peer_ocu.yml b/commons/models-yaml/peer_ocu.yml new file mode 100644 index 00000000..e56ddee1 --- /dev/null +++ b/commons/models-yaml/peer_ocu.yml @@ -0,0 +1,52 @@ +--- + task_name: + code: "peer" + desc: "Peer Influence Task" + cite: + - "Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916." + model_name: + code: "ocu" + desc: "Other-Conferred Utility (OCU) Model" + cite: [] + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Harhim Park" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + condition: "0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky)." + p_gamble: "Probability of receiving a high payoff (same for both options)." + safe_Hpayoff: "High payoff of the safe option." + safe_Lpayoff: "Low payoff of the safe option." + risky_Hpayoff: "High payoff of the risky option." + risky_Lpayoff: "Low payoff of the risky option." + choice: "Which option was chosen? 0: safe, 1: risky." + parameters: + rho: + desc: "risk preference" + info: + - 0 + - 1 + - 2 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - "Inf" + ocu: + desc: "other-conferred utility" + info: + - "-Inf" + - 0 + - "Inf" + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_ewa.yml b/commons/models-yaml/prl_ewa.yml new file mode 100644 index 00000000..90487c43 --- /dev/null +++ b/commons/models-yaml/prl_ewa.yml @@ -0,0 +1,55 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "ewa" + desc: "Experience-Weighted Attraction Model" + cite: + - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + phi: + desc: "1 - learning rate" + info: + - 0 + - 0.5 + - 1 + rho: + desc: "experience decay factor" + info: + - 0 + - 0.1 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 2 + ev_nc: 2 + ew_c: 2 + ew_nc: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_fictitious.yml b/commons/models-yaml/prl_fictitious.yml new file mode 100644 index 00000000..51f05afb --- /dev/null +++ b/commons/models-yaml/prl_fictitious.yml @@ -0,0 +1,56 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "fictitious" + desc: "Fictitious Update Model" + cite: + - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + eta: + desc: "learning rate" + info: + - 0 + - 0.5 + - 1 + alpha: + desc: "indecision point" + info: + - "-Inf" + - 0 + - "Inf" + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_fictitious_multipleB.yml b/commons/models-yaml/prl_fictitious_multipleB.yml new file mode 100644 index 00000000..261fb792 --- /dev/null +++ b/commons/models-yaml/prl_fictitious_multipleB.yml @@ -0,0 +1,57 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "fictitious" + desc: "Fictitious Update Model" + cite: + - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + model_type: + code: "multipleB" + desc: "Multiple-Block Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + block: "A unique identifier for each of the multiple blocks within each subject." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + eta: + desc: "learning rate" + info: + - 0 + - 0.5 + - 1 + alpha: + desc: "indecision point" + info: + - "-Inf" + - 0 + - "Inf" + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 3 + ev_nc: 3 + pe_c: 3 + pe_nc: 3 + dv: 3 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_fictitious_rp.yml b/commons/models-yaml/prl_fictitious_rp.yml new file mode 100644 index 00000000..d110785d --- /dev/null +++ b/commons/models-yaml/prl_fictitious_rp.yml @@ -0,0 +1,63 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "fictitious_rp" + desc: "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)" + cite: + - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + eta_pos: + desc: "learning rate, +PE" + info: + - 0 + - 0.5 + - 1 + eta_neg: + desc: "learning rate, -PE" + info: + - 0 + - 0.5 + - 1 + alpha: + desc: "indecision point" + info: + - "-Inf" + - 0 + - "Inf" + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_fictitious_rp_woa.yml b/commons/models-yaml/prl_fictitious_rp_woa.yml new file mode 100644 index 00000000..a435b0d1 --- /dev/null +++ b/commons/models-yaml/prl_fictitious_rp_woa.yml @@ -0,0 +1,57 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "fictitious_rp_woa" + desc: "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)" + cite: + - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + eta_pos: + desc: "learning rate, +PE" + info: + - 0 + - 0.5 + - 1 + eta_neg: + desc: "learning rate, -PE" + info: + - 0 + - 0.5 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_fictitious_woa.yml b/commons/models-yaml/prl_fictitious_woa.yml new file mode 100644 index 00000000..48b3a436 --- /dev/null +++ b/commons/models-yaml/prl_fictitious_woa.yml @@ -0,0 +1,50 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "fictitious_woa" + desc: "Fictitious Update Model, without alpha (indecision point)" + cite: + - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + eta: + desc: "learning rate" + info: + - 0 + - 0.5 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_rp.yml b/commons/models-yaml/prl_rp.yml new file mode 100644 index 00000000..3db4e297 --- /dev/null +++ b/commons/models-yaml/prl_rp.yml @@ -0,0 +1,54 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "rp" + desc: "Reward-Punishment Model" + cite: + - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 2 + ev_nc: 2 + pe: 2 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/prl_rp_multipleB.yml b/commons/models-yaml/prl_rp_multipleB.yml new file mode 100644 index 00000000..8f720519 --- /dev/null +++ b/commons/models-yaml/prl_rp_multipleB.yml @@ -0,0 +1,55 @@ +--- + task_name: + code: "prl" + desc: "Probabilistic Reversal Learning Task" + cite: [] + model_name: + code: "rp" + desc: "Reward-Punishment Model" + cite: + - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" + model_type: + code: "multipleB" + desc: "Multiple-Block Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang (for model-based regressors)" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + - + name: "Harhim Park (for model-based regressors)" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + block: "A unique identifier for each of the multiple blocks within each subject." + choice: "Integer value representing the option chosen on that trial: 1 or 2." + outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." + parameters: + Apun: + desc: "punishment learning rate" + info: + - 0 + - 0.1 + - 1 + Arew: + desc: "reward learning rate" + info: + - 0 + - 0.1 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: + ev_c: 3 + ev_nc: 3 + pe: 3 + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/pst_gainloss_Q.yml b/commons/models-yaml/pst_gainloss_Q.yml new file mode 100644 index 00000000..d674093f --- /dev/null +++ b/commons/models-yaml/pst_gainloss_Q.yml @@ -0,0 +1,48 @@ +--- + task_name: + code: "pst" + desc: "Probabilistic Selection Task" + cite: [] + model_name: + code: "gainloss_Q" + desc: "Gain-Loss Q Learning Model" + cite: + - "Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Jaeyeong Yang" + email: "jaeyeong.yang1125@gmail.com" + link: "https://ccs-lab.github.io/team/jaeyeong-yang/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + type: "Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\\% (type 1), 20\\% (type 2), 70\\% (type 3), 30\\% (type 4), 60\\% (type 5), 40\\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6." + choice: "Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0)." + reward: "Amount of reward earned as a result of the trial." + parameters: + alpha_pos: + desc: "learning rate for positive feedbacks" + info: + - 0 + - 0.5 + - 1 + alpha_neg: + desc: "learning rate for negative feedbacks" + info: + - 0 + - 0.5 + - 1 + beta: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/ra_noLA.yml b/commons/models-yaml/ra_noLA.yml new file mode 100644 index 00000000..06b293f1 --- /dev/null +++ b/commons/models-yaml/ra_noLA.yml @@ -0,0 +1,39 @@ +--- + task_name: + code: "ra" + desc: "Risk Aversion Task" + cite: [] + model_name: + code: "noLA" + desc: "Prospect Theory, without loss aversion (LA) parameter" + cite: + - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." + loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." + cert: "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero." + gamble: "If gamble was taken, gamble == 1; else gamble == 0." + parameters: + rho: + desc: "risk aversion" + info: + - 0 + - 1 + - 2 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - 30 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/ra_noRA.yml b/commons/models-yaml/ra_noRA.yml new file mode 100644 index 00000000..64730ac9 --- /dev/null +++ b/commons/models-yaml/ra_noRA.yml @@ -0,0 +1,39 @@ +--- + task_name: + code: "ra" + desc: "Risk Aversion Task" + cite: [] + model_name: + code: "noRA" + desc: "Prospect Theory, without risk aversion (RA) parameter" + cite: + - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." + loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." + cert: "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero." + gamble: "If gamble was taken, gamble == 1; else gamble == 0." + parameters: + lambda: + desc: "loss aversion" + info: + - 0 + - 1 + - 5 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - 30 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/ra_prospect.yml b/commons/models-yaml/ra_prospect.yml new file mode 100644 index 00000000..58718dfb --- /dev/null +++ b/commons/models-yaml/ra_prospect.yml @@ -0,0 +1,45 @@ +--- + task_name: + code: "ra" + desc: "Risk Aversion Task" + cite: [] + model_name: + code: "prospect" + desc: "Prospect Theory" + cite: + - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." + loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." + cert: "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero." + gamble: "If gamble was taken, gamble == 1; else gamble == 0." + parameters: + rho: + desc: "risk aversion" + info: + - 0 + - 1 + - 2 + lambda: + desc: "loss aversion" + info: + - 0 + - 1 + - 5 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - 30 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/rdt_happiness.yml b/commons/models-yaml/rdt_happiness.yml new file mode 100644 index 00000000..43d5cb2b --- /dev/null +++ b/commons/models-yaml/rdt_happiness.yml @@ -0,0 +1,71 @@ +--- + task_name: + code: "rdt" + desc: "Risky Decision Task" + cite: [] + model_name: + code: "happiness" + desc: "Happiness Computational Model" + cite: + - "Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Harhim Park" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." + loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." + cert: "Guaranteed amount of a safe option." + type: "loss == -1, mixed == 0, gain == 1" + gamble: "If gamble was taken, gamble == 1; else gamble == 0." + outcome: "Result of the trial." + happy: "Happiness score." + RT_happy: "Reaction time for answering the happiness score." + parameters: + w0: + desc: "baseline" + info: + - "-Inf" + - 1 + - "Inf" + w1: + desc: "weight of certain rewards" + info: + - "-Inf" + - 1 + - "Inf" + w2: + desc: "weight of expected values" + info: + - "-Inf" + - 1 + - "Inf" + w3: + desc: "weight of reward prediction errors" + info: + - "-Inf" + - 1 + - "Inf" + gam: + desc: "forgetting factor" + info: + - 0 + - 0.5 + - 1 + sig: + desc: "standard deviation of error" + info: + - 0 + - 1 + - "Inf" + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/ts_par4.yml b/commons/models-yaml/ts_par4.yml new file mode 100644 index 00000000..f62b1872 --- /dev/null +++ b/commons/models-yaml/ts_par4.yml @@ -0,0 +1,61 @@ +--- + task_name: + code: "ts" + desc: "Two-Step Task" + cite: + - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + model_name: + code: "par4" + desc: "Hybrid Model, with 4 parameters" + cite: + - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + - "Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Harhim Park" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + level1_choice: "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2)." + level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value." + reward: "Reward after Level 2 (0 or 1)." + parameters: + a: + desc: "learning rate for both stages 1 & 2" + info: + - 0 + - 0.5 + - 1 + beta: + desc: "inverse temperature for both stages 1 & 2" + info: + - 0 + - 1 + - "Inf" + pi: + desc: "perseverance" + info: + - 0 + - 1 + - 5 + w: + desc: "model-based weight" + info: + - 0 + - 0.5 + - 1 + regressors: {} + postpreds: + - "y_pred_step1" + - "y_pred_step2" + additional_args: + - + code: "trans_prob" + default: 0.7 + desc: "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." + diff --git a/commons/models-yaml/ts_par6.yml b/commons/models-yaml/ts_par6.yml new file mode 100644 index 00000000..1a840cfa --- /dev/null +++ b/commons/models-yaml/ts_par6.yml @@ -0,0 +1,72 @@ +--- + task_name: + code: "ts" + desc: "Two-Step Task" + cite: + - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + model_name: + code: "par6" + desc: "Hybrid Model, with 6 parameters" + cite: + - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Harhim Park" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + level1_choice: "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2)." + level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value." + reward: "Reward after Level 2 (0 or 1)." + parameters: + a1: + desc: "learning rate in stage 1" + info: + - 0 + - 0.5 + - 1 + beta1: + desc: "inverse temperature in stage 1" + info: + - 0 + - 1 + - "Inf" + a2: + desc: "learning rate in stage 2" + info: + - 0 + - 0.5 + - 1 + beta2: + desc: "inverse temperature in stage 2" + info: + - 0 + - 1 + - "Inf" + pi: + desc: "perseverance" + info: + - 0 + - 1 + - 5 + w: + desc: "model-based weight" + info: + - 0 + - 0.5 + - 1 + regressors: {} + postpreds: + - "y_pred_step1" + - "y_pred_step2" + additional_args: + - + code: "trans_prob" + default: 0.7 + desc: "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." + diff --git a/commons/models-yaml/ts_par7.yml b/commons/models-yaml/ts_par7.yml new file mode 100644 index 00000000..d82006f2 --- /dev/null +++ b/commons/models-yaml/ts_par7.yml @@ -0,0 +1,78 @@ +--- + task_name: + code: "ts" + desc: "Two-Step Task" + cite: + - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + model_name: + code: "par7" + desc: "Hybrid Model, with 7 parameters (original model)" + cite: + - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Harhim Park" + email: "hrpark12@gmail.com" + link: "https://ccs-lab.github.io/team/harhim-park/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + level1_choice: "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2)." + level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value." + reward: "Reward after Level 2 (0 or 1)." + parameters: + a1: + desc: "learning rate in stage 1" + info: + - 0 + - 0.5 + - 1 + beta1: + desc: "inverse temperature in stage 1" + info: + - 0 + - 1 + - "Inf" + a2: + desc: "learning rate in stage 2" + info: + - 0 + - 0.5 + - 1 + beta2: + desc: "inverse temperature in stage 2" + info: + - 0 + - 1 + - "Inf" + pi: + desc: "perseverance" + info: + - 0 + - 1 + - 5 + w: + desc: "model-based weight" + info: + - 0 + - 0.5 + - 1 + lambda: + desc: "eligibility trace" + info: + - 0 + - 0.5 + - 1 + regressors: {} + postpreds: + - "y_pred_step1" + - "y_pred_step2" + additional_args: + - + code: "trans_prob" + default: 0.7 + desc: "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." + diff --git a/commons/models-yaml/ug_bayes.yml b/commons/models-yaml/ug_bayes.yml new file mode 100644 index 00000000..3f1f9b93 --- /dev/null +++ b/commons/models-yaml/ug_bayes.yml @@ -0,0 +1,43 @@ +--- + task_name: + code: "ug" + desc: "Norm-Training Ultimatum Game" + cite: [] + model_name: + code: "bayes" + desc: "Ideal Observer Model" + cite: + - "Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + offer: "Floating point value representing the offer made in that trial (e.g. 4, 10, 11)." + accept: "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." + parameters: + alpha: + desc: "envy" + info: + - 0 + - 1 + - 20 + beta: + desc: "guilt" + info: + - 0 + - 0.5 + - 10 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/ug_delta.yml b/commons/models-yaml/ug_delta.yml new file mode 100644 index 00000000..fa23cf78 --- /dev/null +++ b/commons/models-yaml/ug_delta.yml @@ -0,0 +1,43 @@ +--- + task_name: + code: "ug" + desc: "Norm-Training Ultimatum Game" + cite: [] + model_name: + code: "delta" + desc: "Rescorla-Wagner (Delta) Model" + cite: + - "Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015" + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: [] + data_columns: + subjID: "A unique identifier for each subject in the data-set." + offer: "Floating point value representing the offer made in that trial (e.g. 4, 10, 11)." + accept: "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." + parameters: + alpha: + desc: "envy" + info: + - 0 + - 1 + - 20 + tau: + desc: "inverse temperature" + info: + - 0 + - 1 + - 10 + ep: + desc: "norm adaptation rate" + info: + - 0 + - 0.5 + - 1 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + diff --git a/commons/models-yaml/wcs_sql.yml b/commons/models-yaml/wcs_sql.yml new file mode 100644 index 00000000..460f309d --- /dev/null +++ b/commons/models-yaml/wcs_sql.yml @@ -0,0 +1,47 @@ +--- + task_name: + code: "wcs" + desc: "Wisconsin Card Sorting Task" + cite: [] + model_name: + code: "sql" + desc: "Sequential Learning Model" + cite: + - "Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13." + model_type: + code: "" + desc: "Hierarchical" + notes: [] + contributors: + - + name: "Dayeong Min" + email: "mindy2801@snu.ac.kr" + link: "https://ccs-lab.github.io/team/dayeong-min/" + data_columns: + subjID: "A unique identifier for each subject in the data-set." + choice: "Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4." + outcome: "1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0." + parameters: + r: + desc: "reward sensitivity" + info: + - 0 + - 0.1 + - 1 + p: + desc: "punishment sensitivity" + info: + - 0 + - 0.1 + - 1 + d: + desc: "decision consistency or inverse temperature" + info: + - 0 + - 1 + - 5 + regressors: {} + postpreds: + - "y_pred" + additional_args: [] + From 3d56c5fa51a3125d8b2b420505aa57c2d76ccb80 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 18:17:26 +0900 Subject: [PATCH 091/163] Replace JSON files with YAML --- .../bandit2arm_delta.json | 0 .../bandit4arm2_kalman_filter.json | 0 .../bandit4arm_2par_lapse.json | 0 .../bandit4arm_4par.json | 0 .../bandit4arm_lapse.json | 0 .../bandit4arm_lapse_decay.json | 0 .../bandit4arm_singleA_lapse.json | 0 .../{models => models-json}/bart_par4.json | 0 .../{models => models-json}/choiceRT_ddm.json | 0 .../choiceRT_ddm_single.json | 0 commons/{models => models-json}/cra_exp.json | 0 .../{models => models-json}/cra_linear.json | 0 .../dbdm_prob_weight.json | 0 commons/{models => models-json}/dd_cs.json | 0 .../{models => models-json}/dd_cs_single.json | 0 commons/{models => models-json}/dd_exp.json | 0 .../dd_hyperbolic.json | 0 .../dd_hyperbolic_single.json | 0 commons/{models => models-json}/gng_m1.json | 0 commons/{models => models-json}/gng_m2.json | 0 commons/{models => models-json}/gng_m3.json | 0 commons/{models => models-json}/gng_m4.json | 0 commons/{models => models-json}/igt_orl.json | 0 .../igt_pvl_decay.json | 0 .../igt_pvl_delta.json | 0 commons/{models => models-json}/igt_vpp.json | 0 commons/{models => models-json}/peer_ocu.json | 0 commons/{models => models-json}/prl_ewa.json | 0 .../prl_fictitious.json | 0 .../prl_fictitious_multipleB.json | 0 .../prl_fictitious_rp.json | 0 .../prl_fictitious_rp_woa.json | 0 .../prl_fictitious_woa.json | 0 commons/{models => models-json}/prl_rp.json | 0 .../prl_rp_multipleB.json | 0 .../pst_gainloss_Q.json | 0 commons/{models => models-json}/ra_noLA.json | 0 commons/{models => models-json}/ra_noRA.json | 0 .../{models => models-json}/ra_prospect.json | 0 .../rdt_happiness.json | 0 commons/{models => models-json}/ts_par4.json | 0 commons/{models => models-json}/ts_par6.json | 0 commons/{models => models-json}/ts_par7.json | 0 commons/{models => models-json}/ug_bayes.json | 0 commons/{models => models-json}/ug_delta.json | 0 commons/{models => models-json}/wcs_sql.json | 0 commons/models/bandit2arm_delta.yml | 35 +++++++++++ commons/models/bandit4arm2_kalman_filter.yml | 47 ++++++++++++++ commons/models/bandit4arm_2par_lapse.yml | 38 +++++++++++ commons/models/bandit4arm_4par.yml | 40 ++++++++++++ commons/models/bandit4arm_lapse.yml | 43 +++++++++++++ commons/models/bandit4arm_lapse_decay.yml | 47 ++++++++++++++ commons/models/bandit4arm_singleA_lapse.yml | 41 ++++++++++++ commons/models/bart_par4.yml | 55 ++++++++++++++++ commons/models/choiceRT_ddm.yml | 47 ++++++++++++++ commons/models/choiceRT_ddm_single.yml | 47 ++++++++++++++ commons/models/cra_exp.yml | 45 +++++++++++++ commons/models/cra_linear.yml | 45 +++++++++++++ commons/models/dbdm_prob_weight.yml | 52 +++++++++++++++ commons/models/dd_cs.yml | 41 ++++++++++++ commons/models/dd_cs_single.yml | 41 ++++++++++++ commons/models/dd_exp.yml | 38 +++++++++++ commons/models/dd_hyperbolic.yml | 37 +++++++++++ commons/models/dd_hyperbolic_single.yml | 37 +++++++++++ commons/models/gng_m1.yml | 41 ++++++++++++ commons/models/gng_m2.yml | 44 +++++++++++++ commons/models/gng_m3.yml | 48 ++++++++++++++ commons/models/gng_m4.yml | 51 +++++++++++++++ commons/models/igt_orl.yml | 54 ++++++++++++++++ commons/models/igt_pvl_decay.yml | 49 +++++++++++++++ commons/models/igt_pvl_delta.yml | 48 ++++++++++++++ commons/models/igt_vpp.yml | 60 ++++++++++++++++++ commons/models/peer_ocu.yml | 42 +++++++++++++ commons/models/prl_ewa.yml | 45 +++++++++++++ commons/models/prl_fictitious.yml | 46 ++++++++++++++ commons/models/prl_fictitious_multipleB.yml | 47 ++++++++++++++ commons/models/prl_fictitious_rp.yml | 53 ++++++++++++++++ commons/models/prl_fictitious_rp_woa.yml | 50 +++++++++++++++ commons/models/prl_fictitious_woa.yml | 43 +++++++++++++ commons/models/prl_rp.yml | 44 +++++++++++++ commons/models/prl_rp_multipleB.yml | 45 +++++++++++++ commons/models/pst_gainloss_Q.yml | 45 +++++++++++++ commons/models/ra_noLA.yml | 35 +++++++++++ commons/models/ra_noRA.yml | 35 +++++++++++ commons/models/ra_prospect.yml | 38 +++++++++++ commons/models/rdt_happiness.yml | 52 +++++++++++++++ commons/models/ts_par4.yml | 56 +++++++++++++++++ commons/models/ts_par6.yml | 60 ++++++++++++++++++ commons/models/ts_par7.yml | 63 +++++++++++++++++++ commons/models/ug_bayes.yml | 36 +++++++++++ commons/models/ug_delta.yml | 37 +++++++++++ commons/models/wcs_sql.yml | 39 ++++++++++++ 92 files changed, 2082 insertions(+) rename commons/{models => models-json}/bandit2arm_delta.json (100%) rename commons/{models => models-json}/bandit4arm2_kalman_filter.json (100%) rename commons/{models => models-json}/bandit4arm_2par_lapse.json (100%) rename commons/{models => models-json}/bandit4arm_4par.json (100%) rename commons/{models => models-json}/bandit4arm_lapse.json (100%) rename commons/{models => models-json}/bandit4arm_lapse_decay.json (100%) rename commons/{models => models-json}/bandit4arm_singleA_lapse.json (100%) rename commons/{models => models-json}/bart_par4.json (100%) rename commons/{models => models-json}/choiceRT_ddm.json (100%) rename commons/{models => models-json}/choiceRT_ddm_single.json (100%) rename commons/{models => models-json}/cra_exp.json (100%) rename commons/{models => models-json}/cra_linear.json (100%) rename commons/{models => models-json}/dbdm_prob_weight.json (100%) rename commons/{models => models-json}/dd_cs.json (100%) rename commons/{models => models-json}/dd_cs_single.json (100%) rename commons/{models => models-json}/dd_exp.json (100%) rename commons/{models => models-json}/dd_hyperbolic.json (100%) rename commons/{models => models-json}/dd_hyperbolic_single.json (100%) rename commons/{models => models-json}/gng_m1.json (100%) rename commons/{models => models-json}/gng_m2.json (100%) rename commons/{models => models-json}/gng_m3.json (100%) rename commons/{models => models-json}/gng_m4.json (100%) rename commons/{models => models-json}/igt_orl.json (100%) rename commons/{models => models-json}/igt_pvl_decay.json (100%) rename commons/{models => models-json}/igt_pvl_delta.json (100%) rename commons/{models => models-json}/igt_vpp.json (100%) rename commons/{models => models-json}/peer_ocu.json (100%) rename commons/{models => models-json}/prl_ewa.json (100%) rename commons/{models => models-json}/prl_fictitious.json (100%) rename commons/{models => models-json}/prl_fictitious_multipleB.json (100%) rename commons/{models => models-json}/prl_fictitious_rp.json (100%) rename commons/{models => models-json}/prl_fictitious_rp_woa.json (100%) rename commons/{models => models-json}/prl_fictitious_woa.json (100%) rename commons/{models => models-json}/prl_rp.json (100%) rename commons/{models => models-json}/prl_rp_multipleB.json (100%) rename commons/{models => models-json}/pst_gainloss_Q.json (100%) rename commons/{models => models-json}/ra_noLA.json (100%) rename commons/{models => models-json}/ra_noRA.json (100%) rename commons/{models => models-json}/ra_prospect.json (100%) rename commons/{models => models-json}/rdt_happiness.json (100%) rename commons/{models => models-json}/ts_par4.json (100%) rename commons/{models => models-json}/ts_par6.json (100%) rename commons/{models => models-json}/ts_par7.json (100%) rename commons/{models => models-json}/ug_bayes.json (100%) rename commons/{models => models-json}/ug_delta.json (100%) rename commons/{models => models-json}/wcs_sql.json (100%) create mode 100644 commons/models/bandit2arm_delta.yml create mode 100644 commons/models/bandit4arm2_kalman_filter.yml create mode 100644 commons/models/bandit4arm_2par_lapse.yml create mode 100644 commons/models/bandit4arm_4par.yml create mode 100644 commons/models/bandit4arm_lapse.yml create mode 100644 commons/models/bandit4arm_lapse_decay.yml create mode 100644 commons/models/bandit4arm_singleA_lapse.yml create mode 100644 commons/models/bart_par4.yml create mode 100644 commons/models/choiceRT_ddm.yml create mode 100644 commons/models/choiceRT_ddm_single.yml create mode 100644 commons/models/cra_exp.yml create mode 100644 commons/models/cra_linear.yml create mode 100644 commons/models/dbdm_prob_weight.yml create mode 100644 commons/models/dd_cs.yml create mode 100644 commons/models/dd_cs_single.yml create mode 100644 commons/models/dd_exp.yml create mode 100644 commons/models/dd_hyperbolic.yml create mode 100644 commons/models/dd_hyperbolic_single.yml create mode 100644 commons/models/gng_m1.yml create mode 100644 commons/models/gng_m2.yml create mode 100644 commons/models/gng_m3.yml create mode 100644 commons/models/gng_m4.yml create mode 100644 commons/models/igt_orl.yml create mode 100644 commons/models/igt_pvl_decay.yml create mode 100644 commons/models/igt_pvl_delta.yml create mode 100644 commons/models/igt_vpp.yml create mode 100644 commons/models/peer_ocu.yml create mode 100644 commons/models/prl_ewa.yml create mode 100644 commons/models/prl_fictitious.yml create mode 100644 commons/models/prl_fictitious_multipleB.yml create mode 100644 commons/models/prl_fictitious_rp.yml create mode 100644 commons/models/prl_fictitious_rp_woa.yml create mode 100644 commons/models/prl_fictitious_woa.yml create mode 100644 commons/models/prl_rp.yml create mode 100644 commons/models/prl_rp_multipleB.yml create mode 100644 commons/models/pst_gainloss_Q.yml create mode 100644 commons/models/ra_noLA.yml create mode 100644 commons/models/ra_noRA.yml create mode 100644 commons/models/ra_prospect.yml create mode 100644 commons/models/rdt_happiness.yml create mode 100644 commons/models/ts_par4.yml create mode 100644 commons/models/ts_par6.yml create mode 100644 commons/models/ts_par7.yml create mode 100644 commons/models/ug_bayes.yml create mode 100644 commons/models/ug_delta.yml create mode 100644 commons/models/wcs_sql.yml diff --git a/commons/models/bandit2arm_delta.json b/commons/models-json/bandit2arm_delta.json similarity index 100% rename from commons/models/bandit2arm_delta.json rename to commons/models-json/bandit2arm_delta.json diff --git a/commons/models/bandit4arm2_kalman_filter.json b/commons/models-json/bandit4arm2_kalman_filter.json similarity index 100% rename from commons/models/bandit4arm2_kalman_filter.json rename to commons/models-json/bandit4arm2_kalman_filter.json diff --git a/commons/models/bandit4arm_2par_lapse.json b/commons/models-json/bandit4arm_2par_lapse.json similarity index 100% rename from commons/models/bandit4arm_2par_lapse.json rename to commons/models-json/bandit4arm_2par_lapse.json diff --git a/commons/models/bandit4arm_4par.json b/commons/models-json/bandit4arm_4par.json similarity index 100% rename from commons/models/bandit4arm_4par.json rename to commons/models-json/bandit4arm_4par.json diff --git a/commons/models/bandit4arm_lapse.json b/commons/models-json/bandit4arm_lapse.json similarity index 100% rename from commons/models/bandit4arm_lapse.json rename to commons/models-json/bandit4arm_lapse.json diff --git a/commons/models/bandit4arm_lapse_decay.json b/commons/models-json/bandit4arm_lapse_decay.json similarity index 100% rename from commons/models/bandit4arm_lapse_decay.json rename to commons/models-json/bandit4arm_lapse_decay.json diff --git a/commons/models/bandit4arm_singleA_lapse.json b/commons/models-json/bandit4arm_singleA_lapse.json similarity index 100% rename from commons/models/bandit4arm_singleA_lapse.json rename to commons/models-json/bandit4arm_singleA_lapse.json diff --git a/commons/models/bart_par4.json b/commons/models-json/bart_par4.json similarity index 100% rename from commons/models/bart_par4.json rename to commons/models-json/bart_par4.json diff --git a/commons/models/choiceRT_ddm.json b/commons/models-json/choiceRT_ddm.json similarity index 100% rename from commons/models/choiceRT_ddm.json rename to commons/models-json/choiceRT_ddm.json diff --git a/commons/models/choiceRT_ddm_single.json b/commons/models-json/choiceRT_ddm_single.json similarity index 100% rename from commons/models/choiceRT_ddm_single.json rename to commons/models-json/choiceRT_ddm_single.json diff --git a/commons/models/cra_exp.json b/commons/models-json/cra_exp.json similarity index 100% rename from commons/models/cra_exp.json rename to commons/models-json/cra_exp.json diff --git a/commons/models/cra_linear.json b/commons/models-json/cra_linear.json similarity index 100% rename from commons/models/cra_linear.json rename to commons/models-json/cra_linear.json diff --git a/commons/models/dbdm_prob_weight.json b/commons/models-json/dbdm_prob_weight.json similarity index 100% rename from commons/models/dbdm_prob_weight.json rename to commons/models-json/dbdm_prob_weight.json diff --git a/commons/models/dd_cs.json b/commons/models-json/dd_cs.json similarity index 100% rename from commons/models/dd_cs.json rename to commons/models-json/dd_cs.json diff --git a/commons/models/dd_cs_single.json b/commons/models-json/dd_cs_single.json similarity index 100% rename from commons/models/dd_cs_single.json rename to commons/models-json/dd_cs_single.json diff --git a/commons/models/dd_exp.json b/commons/models-json/dd_exp.json similarity index 100% rename from commons/models/dd_exp.json rename to commons/models-json/dd_exp.json diff --git a/commons/models/dd_hyperbolic.json b/commons/models-json/dd_hyperbolic.json similarity index 100% rename from commons/models/dd_hyperbolic.json rename to commons/models-json/dd_hyperbolic.json diff --git a/commons/models/dd_hyperbolic_single.json b/commons/models-json/dd_hyperbolic_single.json similarity index 100% rename from commons/models/dd_hyperbolic_single.json rename to commons/models-json/dd_hyperbolic_single.json diff --git a/commons/models/gng_m1.json b/commons/models-json/gng_m1.json similarity index 100% rename from commons/models/gng_m1.json rename to commons/models-json/gng_m1.json diff --git a/commons/models/gng_m2.json b/commons/models-json/gng_m2.json similarity index 100% rename from commons/models/gng_m2.json rename to commons/models-json/gng_m2.json diff --git a/commons/models/gng_m3.json b/commons/models-json/gng_m3.json similarity index 100% rename from commons/models/gng_m3.json rename to commons/models-json/gng_m3.json diff --git a/commons/models/gng_m4.json b/commons/models-json/gng_m4.json similarity index 100% rename from commons/models/gng_m4.json rename to commons/models-json/gng_m4.json diff --git a/commons/models/igt_orl.json b/commons/models-json/igt_orl.json similarity index 100% rename from commons/models/igt_orl.json rename to commons/models-json/igt_orl.json diff --git a/commons/models/igt_pvl_decay.json b/commons/models-json/igt_pvl_decay.json similarity index 100% rename from commons/models/igt_pvl_decay.json rename to commons/models-json/igt_pvl_decay.json diff --git a/commons/models/igt_pvl_delta.json b/commons/models-json/igt_pvl_delta.json similarity index 100% rename from commons/models/igt_pvl_delta.json rename to commons/models-json/igt_pvl_delta.json diff --git a/commons/models/igt_vpp.json b/commons/models-json/igt_vpp.json similarity index 100% rename from commons/models/igt_vpp.json rename to commons/models-json/igt_vpp.json diff --git a/commons/models/peer_ocu.json b/commons/models-json/peer_ocu.json similarity index 100% rename from commons/models/peer_ocu.json rename to commons/models-json/peer_ocu.json diff --git a/commons/models/prl_ewa.json b/commons/models-json/prl_ewa.json similarity index 100% rename from commons/models/prl_ewa.json rename to commons/models-json/prl_ewa.json diff --git a/commons/models/prl_fictitious.json b/commons/models-json/prl_fictitious.json similarity index 100% rename from commons/models/prl_fictitious.json rename to commons/models-json/prl_fictitious.json diff --git a/commons/models/prl_fictitious_multipleB.json b/commons/models-json/prl_fictitious_multipleB.json similarity index 100% rename from commons/models/prl_fictitious_multipleB.json rename to commons/models-json/prl_fictitious_multipleB.json diff --git a/commons/models/prl_fictitious_rp.json b/commons/models-json/prl_fictitious_rp.json similarity index 100% rename from commons/models/prl_fictitious_rp.json rename to commons/models-json/prl_fictitious_rp.json diff --git a/commons/models/prl_fictitious_rp_woa.json b/commons/models-json/prl_fictitious_rp_woa.json similarity index 100% rename from commons/models/prl_fictitious_rp_woa.json rename to commons/models-json/prl_fictitious_rp_woa.json diff --git a/commons/models/prl_fictitious_woa.json b/commons/models-json/prl_fictitious_woa.json similarity index 100% rename from commons/models/prl_fictitious_woa.json rename to commons/models-json/prl_fictitious_woa.json diff --git a/commons/models/prl_rp.json b/commons/models-json/prl_rp.json similarity index 100% rename from commons/models/prl_rp.json rename to commons/models-json/prl_rp.json diff --git a/commons/models/prl_rp_multipleB.json b/commons/models-json/prl_rp_multipleB.json similarity index 100% rename from commons/models/prl_rp_multipleB.json rename to commons/models-json/prl_rp_multipleB.json diff --git a/commons/models/pst_gainloss_Q.json b/commons/models-json/pst_gainloss_Q.json similarity index 100% rename from commons/models/pst_gainloss_Q.json rename to commons/models-json/pst_gainloss_Q.json diff --git a/commons/models/ra_noLA.json b/commons/models-json/ra_noLA.json similarity index 100% rename from commons/models/ra_noLA.json rename to commons/models-json/ra_noLA.json diff --git a/commons/models/ra_noRA.json b/commons/models-json/ra_noRA.json similarity index 100% rename from commons/models/ra_noRA.json rename to commons/models-json/ra_noRA.json diff --git a/commons/models/ra_prospect.json b/commons/models-json/ra_prospect.json similarity index 100% rename from commons/models/ra_prospect.json rename to commons/models-json/ra_prospect.json diff --git a/commons/models/rdt_happiness.json b/commons/models-json/rdt_happiness.json similarity index 100% rename from commons/models/rdt_happiness.json rename to commons/models-json/rdt_happiness.json diff --git a/commons/models/ts_par4.json b/commons/models-json/ts_par4.json similarity index 100% rename from commons/models/ts_par4.json rename to commons/models-json/ts_par4.json diff --git a/commons/models/ts_par6.json b/commons/models-json/ts_par6.json similarity index 100% rename from commons/models/ts_par6.json rename to commons/models-json/ts_par6.json diff --git a/commons/models/ts_par7.json b/commons/models-json/ts_par7.json similarity index 100% rename from commons/models/ts_par7.json rename to commons/models-json/ts_par7.json diff --git a/commons/models/ug_bayes.json b/commons/models-json/ug_bayes.json similarity index 100% rename from commons/models/ug_bayes.json rename to commons/models-json/ug_bayes.json diff --git a/commons/models/ug_delta.json b/commons/models-json/ug_delta.json similarity index 100% rename from commons/models/ug_delta.json rename to commons/models-json/ug_delta.json diff --git a/commons/models/wcs_sql.json b/commons/models-json/wcs_sql.json similarity index 100% rename from commons/models/wcs_sql.json rename to commons/models-json/wcs_sql.json diff --git a/commons/models/bandit2arm_delta.yml b/commons/models/bandit2arm_delta.yml new file mode 100644 index 00000000..f6a07b27 --- /dev/null +++ b/commons/models/bandit2arm_delta.yml @@ -0,0 +1,35 @@ +task_name: + code: bandit2arm + desc: 2-Armed Bandit Task + cite: + - 'Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). + A choice prediction competition: Choices from experience and from description. + Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683' + - Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience + and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. + http://doi.org/10.1111/j.0956-7976.2004.00715.x +model_name: + code: delta + desc: Rescorla-Wagner (Delta) Model + cite: +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1 or 2.' + outcome: Integer value representing the outcome of the given trial (where reward + == 1, and loss == -1). +parameters: + A: + desc: learning rate + info: [0, 0.5, 1] + tau: + desc: inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bandit4arm2_kalman_filter.yml b/commons/models/bandit4arm2_kalman_filter.yml new file mode 100644 index 00000000..cf52cbc6 --- /dev/null +++ b/commons/models/bandit4arm2_kalman_filter.yml @@ -0,0 +1,47 @@ +task_name: + code: bandit4arm2 + desc: 4-Armed Bandit Task (modified) + cite: +model_name: + code: kalman_filter + desc: Kalman Filter + cite: + - Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical + substrates for exploratory decisions in humans. Nature, 441(7095), 876-879. +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Yoonseo Zoh + email: zohyos7@gmail.com + link: https://zohyos7.github.io +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1, 2, + 3, or 4.' + outcome: Integer value representing the outcome of the given trial (where reward + == 1, and loss == -1). +parameters: + lambda: + desc: decay factor + info: [0, 0.9, 1] + theta: + desc: decay center + info: [0, 50, 100] + beta: + desc: inverse softmax temperature + info: [0, 0.1, 1] + mu0: + desc: anticipated initial mean of all 4 options + info: [0, 85, 100] + sigma0: + desc: anticipated initial sd (uncertainty factor) of all 4 options + info: [0, 6, 15] + sigmaD: + desc: sd of diffusion noise + info: [0, 3, 15] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bandit4arm_2par_lapse.yml b/commons/models/bandit4arm_2par_lapse.yml new file mode 100644 index 00000000..7b540306 --- /dev/null +++ b/commons/models/bandit4arm_2par_lapse.yml @@ -0,0 +1,38 @@ +task_name: + code: bandit4arm + desc: 4-Armed Bandit Task + cite: +model_name: + code: 2par_lapse + desc: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), + and P (punishment sensitivity). But with xi (noise) + cite: + - Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making + under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1, 2, + 3, or 4.' + gain: Floating point value representing the amount of currency won on the given + trial (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on the given + trial (e.g. 0, -50). +parameters: + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + xi: + desc: noise + info: [0, 0.1, 1] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bandit4arm_4par.yml b/commons/models/bandit4arm_4par.yml new file mode 100644 index 00000000..edb934de --- /dev/null +++ b/commons/models/bandit4arm_4par.yml @@ -0,0 +1,40 @@ +task_name: + code: bandit4arm + desc: 4-Armed Bandit Task + cite: +model_name: + code: 4par + desc: 4 Parameter Model, without C (choice perseveration) + cite: + - Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward + Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1, 2, + 3, or 4.' + gain: Floating point value representing the amount of currency won on the given + trial (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on the given + trial (e.g. 0, -50). +parameters: + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + R: + desc: reward sensitivity + info: [0, 1, 30] + P: + desc: punishment sensitivity + info: [0, 1, 30] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bandit4arm_lapse.yml b/commons/models/bandit4arm_lapse.yml new file mode 100644 index 00000000..7afc9696 --- /dev/null +++ b/commons/models/bandit4arm_lapse.yml @@ -0,0 +1,43 @@ +task_name: + code: bandit4arm + desc: 4-Armed Bandit Task + cite: +model_name: + code: lapse + desc: 5 Parameter Model, without C (choice perseveration) but with xi (noise) + cite: + - Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward + Value in Human Decision-Making. J Neuro, 32(17), 5833-5842. +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1, 2, + 3, or 4.' + gain: Floating point value representing the amount of currency won on the given + trial (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on the given + trial (e.g. 0, -50). +parameters: + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + R: + desc: reward sensitivity + info: [0, 1, 30] + P: + desc: punishment sensitivity + info: [0, 1, 30] + xi: + desc: noise + info: [0, 0.1, 1] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bandit4arm_lapse_decay.yml b/commons/models/bandit4arm_lapse_decay.yml new file mode 100644 index 00000000..f8b0d55f --- /dev/null +++ b/commons/models/bandit4arm_lapse_decay.yml @@ -0,0 +1,47 @@ +task_name: + code: bandit4arm + desc: 4-Armed Bandit Task + cite: +model_name: + code: lapse_decay + desc: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added + decay rate (Niv et al., 2015, J. Neuro). + cite: + - Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making + under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1, 2, + 3, or 4.' + gain: Floating point value representing the amount of currency won on the given + trial (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on the given + trial (e.g. 0, -50). +parameters: + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + R: + desc: reward sensitivity + info: [0, 1, 30] + P: + desc: punishment sensitivity + info: [0, 1, 30] + xi: + desc: noise + info: [0, 0.1, 1] + d: + desc: decay rate + info: [0, 0.1, 1] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bandit4arm_singleA_lapse.yml b/commons/models/bandit4arm_singleA_lapse.yml new file mode 100644 index 00000000..11d4ac9a --- /dev/null +++ b/commons/models/bandit4arm_singleA_lapse.yml @@ -0,0 +1,41 @@ +task_name: + code: bandit4arm + desc: 4-Armed Bandit Task + cite: +model_name: + code: singleA_lapse + desc: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single + learning rate both for R and P. + cite: + - Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making + under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on the given trial: 1, 2, + 3, or 4.' + gain: Floating point value representing the amount of currency won on the given + trial (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on the given + trial (e.g. 0, -50). +parameters: + A: + desc: learning rate + info: [0, 0.1, 1] + R: + desc: reward sensitivity + info: [0, 1, 30] + P: + desc: punishment sensitivity + info: [0, 1, 30] + xi: + desc: noise + info: [0, 0.1, 1] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/bart_par4.yml b/commons/models/bart_par4.yml new file mode 100644 index 00000000..bf4e29e2 --- /dev/null +++ b/commons/models/bart_par4.yml @@ -0,0 +1,55 @@ +task_name: + code: bart + desc: Balloon Analogue Risk Task + cite: +model_name: + code: par4 + desc: Re-parameterized version of BART model with 4 parameters + cite: + - 'van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model + decomposition of the BART: Assessment and application. Journal of Mathematical + Psychology, 55(1), 94-105.' +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Harhim Park + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +- name: Jaeyeong Yang + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Ayoung Lee + email: aylee2008@naver.com + link: https://ccs-lab.github.io/team/ayoung-lee/ +- name: Jeongbin Oh + email: ows0104@gmail.com + link: https://ccs-lab.github.io/team/jeongbin-oh/ +- name: Jiyoon Lee + email: nicole.lee2001@gmail.com + link: https://ccs-lab.github.io/team/jiyoon-lee/ +- name: Junha Jang + email: andy627robo@naver.com + link: https://ccs-lab.github.io/team/junha-jang/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + pumps: The number of pumps. + explosion: '0: intact, 1: burst' +parameters: + phi: + desc: prior belief of balloon not bursting + info: [0, 0.5, 1] + eta: + desc: updating rate + info: [0, 1, Inf] + gam: + desc: risk-taking parameter + info: [0, 1, Inf] + tau: + desc: inverse temperature + info: [0, 1, Inf] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/choiceRT_ddm.yml b/commons/models/choiceRT_ddm.yml new file mode 100644 index 00000000..d4c848f1 --- /dev/null +++ b/commons/models/choiceRT_ddm.yml @@ -0,0 +1,47 @@ +task_name: + code: choiceRT + desc: Choice Reaction Time Task + cite: +model_name: + code: ddm + desc: Drift Diffusion Model + cite: + - Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), + 59-108. http://doi.org/10.1037/0033-295X.85.2.59 +model_type: + code: + desc: Hierarchical +notes: +- Note that this implementation is NOT the full Drift Diffusion Model as described + in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, + starting point, and non-decision time; but not the between- and within-trial variances + in these parameters. +- Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew + Ellis, and potential others @ Stan mailing. +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: Choice made for the current trial, coded as 1/2 to indicate lower/upper + boundary or left/right choices (e.g., 1 1 1 2 1 2). + RT: Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 + 0.314 0.309, etc.). +parameters: + alpha: + desc: boundary separation + info: [0, 0.5, Inf] + beta: + desc: bias + info: [0, 0.5, 1] + delta: + desc: drift rate + info: [0, 0.5, Inf] + tau: + desc: non-decision time + info: [0, 0.15, 1] +regressors: +postpreds: +additional_args: +- code: RTbound + default: 0.1 + desc: Floating point value representing the lower bound (i.e., minimum allowed) + reaction time. Defaults to 0.1 (100 milliseconds). diff --git a/commons/models/choiceRT_ddm_single.yml b/commons/models/choiceRT_ddm_single.yml new file mode 100644 index 00000000..b5ed8e76 --- /dev/null +++ b/commons/models/choiceRT_ddm_single.yml @@ -0,0 +1,47 @@ +task_name: + code: choiceRT + desc: Choice Reaction Time Task + cite: +model_name: + code: ddm + desc: Drift Diffusion Model + cite: + - Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), + 59-108. http://doi.org/10.1037/0033-295X.85.2.59 +model_type: + code: single + desc: Individual +notes: +- Note that this implementation is NOT the full Drift Diffusion Model as described + in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, + starting point, and non-decision time; but not the between- and within-trial variances + in these parameters. +- Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew + Ellis, and potential others @ Stan mailing. +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: Choice made for the current trial, coded as 1/2 to indicate lower/upper + boundary or left/right choices (e.g., 1 1 1 2 1 2). + RT: Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 + 0.314 0.309, etc.). +parameters: + alpha: + desc: boundary separation + info: [0, 0.5, Inf] + beta: + desc: bias + info: [0, 0.5, 1] + delta: + desc: drift rate + info: [0, 0.5, Inf] + tau: + desc: non-decision time + info: [0, 0.15, 1] +regressors: +postpreds: +additional_args: +- code: RTbound + default: 0.1 + desc: Floating point value representing the lower bound (i.e., minimum allowed) + reaction time. Defaults to 0.1 (100 milliseconds). diff --git a/commons/models/cra_exp.yml b/commons/models/cra_exp.yml new file mode 100644 index 00000000..520b09e9 --- /dev/null +++ b/commons/models/cra_exp.yml @@ -0,0 +1,45 @@ +task_name: + code: cra + desc: Choice Under Risk and Ambiguity Task + cite: +model_name: + code: exp + desc: Exponential Subjective Value Model + cite: + - Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems + responding to degrees of uncertainty in human decision-making. Science, 310(5754), + 1680-1683. https://doi.org/10.1126/science.1115327 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + prob: Objective probability of the variable lottery. + ambig: Ambiguity level of the variable lottery (0 for risky lottery; greater than + 0 for ambiguous lottery). + reward_var: Amount of reward in variable lottery. Assumed to be greater than zero. + reward_fix: Amount of reward in fixed lottery. Assumed to be greater than zero. + choice: If the variable lottery was selected, choice == 1; otherwise choice == 0. +parameters: + alpha: + desc: risk attitude + info: [0, 1, 2] + beta: + desc: ambiguity attitude + info: [-Inf, 0, Inf] + gamma: + desc: inverse temperature + info: [0, 1, Inf] +regressors: + sv: 2 + sv_fix: 2 + sv_var: 2 + p_var: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/cra_linear.yml b/commons/models/cra_linear.yml new file mode 100644 index 00000000..22d036fb --- /dev/null +++ b/commons/models/cra_linear.yml @@ -0,0 +1,45 @@ +task_name: + code: cra + desc: Choice Under Risk and Ambiguity Task + cite: +model_name: + code: linear + desc: Linear Subjective Value Model + cite: + - Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). + Neural representation of subjective value under risk and ambiguity. Journal of + Neurophysiology, 103(2), 1036-1047. +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + prob: Objective probability of the variable lottery. + ambig: Ambiguity level of the variable lottery (0 for risky lottery; greater than + 0 for ambiguous lottery). + reward_var: Amount of reward in variable lottery. Assumed to be greater than zero. + reward_fix: Amount of reward in fixed lottery. Assumed to be greater than zero. + choice: If the variable lottery was selected, choice == 1; otherwise choice == 0. +parameters: + alpha: + desc: risk attitude + info: [0, 1, 2] + beta: + desc: ambiguity attitude + info: [-Inf, 0, Inf] + gamma: + desc: inverse temperature + info: [0, 1, Inf] +regressors: + sv: 2 + sv_fix: 2 + sv_var: 2 + p_var: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/dbdm_prob_weight.yml b/commons/models/dbdm_prob_weight.yml new file mode 100644 index 00000000..6e5bf329 --- /dev/null +++ b/commons/models/dbdm_prob_weight.yml @@ -0,0 +1,52 @@ +task_name: + code: dbdm + desc: Description Based Decison Making Task + cite: +model_name: + code: prob_weight + desc: Probability Weight Function + cite: + - 'Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, + C. (2010). A choice prediction competition: Choices from experience and from description. + Journal of Behavioral Decision Making, 23(1), 15-47.' + - Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience + and the effect of rare events in risky choice. Psychological science, 15(8), 534-539. + - Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence + from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022. +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Yoonseo Zoh + email: zohyos7@gmail.com + link: https://ccs-lab.github.io/team/yoonseo-zoh/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + opt1hprob: Possiblity of getting higher value of outcome(opt1hval) when choosing + option 1. + opt2hprob: Possiblity of getting higher value of outcome(opt2hval) when choosing + option 2. + opt1hval: Possible (with opt1hprob probability) outcome of option 1. + opt1lval: Possible (with (1 - opt1hprob) probability) outcome of option 1. + opt2hval: Possible (with opt2hprob probability) outcome of option 2. + opt2lval: Possible (with (1 - opt2hprob) probability) outcome of option 2. + choice: If option 1 was selected, choice == 1; else if option 2 was selected, choice + == 2. +parameters: + tau: + desc: probability weight function + info: [0, 0.8, 1] + rho: + desc: subject utility function + info: [0, 0.7, 2] + lambda: + desc: loss aversion parameter + info: [0, 2.5, 5] + beta: + desc: inverse softmax temperature + info: [0, 0.2, 1] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/dd_cs.yml b/commons/models/dd_cs.yml new file mode 100644 index 00000000..5d3e0802 --- /dev/null +++ b/commons/models/dd_cs.yml @@ -0,0 +1,41 @@ +task_name: + code: dd + desc: Delay Discounting Task + cite: +model_name: + code: cs + desc: Constant-Sensitivity (CS) Model + cite: + - 'Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity + and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + delay_later: An integer representing the delayed days for the later option (e.g. + 1, 6, 28). + amount_later: A floating point number representing the amount for the later option + (e.g. 10.5, 13.4, 30.9). + delay_sooner: An integer representing the delayed days for the sooner option (e.g. + 0). + amount_sooner: A floating point number representing the amount for the sooner option + (e.g. 10). + choice: If amount_later was selected, choice == 1; else if amount_sooner was selected, + choice == 0. +parameters: + r: + desc: exponential discounting rate + info: [0, 0.1, 1] + s: + desc: impatience + info: [0, 1, 10] + beta: + desc: inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/dd_cs_single.yml b/commons/models/dd_cs_single.yml new file mode 100644 index 00000000..b1799bf9 --- /dev/null +++ b/commons/models/dd_cs_single.yml @@ -0,0 +1,41 @@ +task_name: + code: dd + desc: Delay Discounting Task + cite: +model_name: + code: cs + desc: Constant-Sensitivity (CS) Model + cite: + - 'Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity + and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671' +model_type: + code: single + desc: Individual +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + delay_later: An integer representing the delayed days for the later option (e.g. + 1, 6, 28). + amount_later: A floating point number representing the amount for the later option + (e.g. 10.5, 13.4, 30.9). + delay_sooner: An integer representing the delayed days for the sooner option (e.g. + 0). + amount_sooner: A floating point number representing the amount for the sooner option + (e.g. 10). + choice: If amount_later was selected, choice == 1; else if amount_sooner was selected, + choice == 0. +parameters: + r: + desc: exponential discounting rate + info: [0, 0.1, 1] + s: + desc: impatience + info: [0, 1, 10] + beta: + desc: inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/dd_exp.yml b/commons/models/dd_exp.yml new file mode 100644 index 00000000..47b26407 --- /dev/null +++ b/commons/models/dd_exp.yml @@ -0,0 +1,38 @@ +task_name: + code: dd + desc: Delay Discounting Task + cite: +model_name: + code: exp + desc: Exponential Model + cite: + - Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic + Studies, 4(2), 155. http://doi.org/10.2307/2967612 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + delay_later: An integer representing the delayed days for the later option (e.g. + 1, 6, 28). + amount_later: A floating point number representing the amount for the later option + (e.g. 10.5, 13.4, 30.9). + delay_sooner: An integer representing the delayed days for the sooner option (e.g. + 0). + amount_sooner: A floating point number representing the amount for the sooner option + (e.g. 10). + choice: If amount_later was selected, choice == 1; else if amount_sooner was selected, + choice == 0. +parameters: + r: + desc: exponential discounting rate + info: [0, 0.1, 1] + beta: + desc: inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/dd_hyperbolic.yml b/commons/models/dd_hyperbolic.yml new file mode 100644 index 00000000..6425cd08 --- /dev/null +++ b/commons/models/dd_hyperbolic.yml @@ -0,0 +1,37 @@ +task_name: + code: dd + desc: Delay Discounting Task + cite: +model_name: + code: hyperbolic + desc: Hyperbolic Model + cite: + - Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement. +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + delay_later: An integer representing the delayed days for the later option (e.g. + 1, 6, 28). + amount_later: A floating point number representing the amount for the later option + (e.g. 10.5, 13.4, 30.9). + delay_sooner: An integer representing the delayed days for the sooner option (e.g. + 0). + amount_sooner: A floating point number representing the amount for the sooner option + (e.g. 10). + choice: If amount_later was selected, choice == 1; else if amount_sooner was selected, + choice == 0. +parameters: + k: + desc: discounting rate + info: [0, 0.1, 1] + beta: + desc: inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/dd_hyperbolic_single.yml b/commons/models/dd_hyperbolic_single.yml new file mode 100644 index 00000000..c7555926 --- /dev/null +++ b/commons/models/dd_hyperbolic_single.yml @@ -0,0 +1,37 @@ +task_name: + code: dd + desc: Delay Discounting Task + cite: +model_name: + code: hyperbolic + desc: Hyperbolic Model + cite: + - Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement. +model_type: + code: single + desc: Individual +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + delay_later: An integer representing the delayed days for the later option (e.g. + 1, 6, 28). + amount_later: A floating point number representing the amount for the later option + (e.g. 10.5, 13.4, 30.9). + delay_sooner: An integer representing the delayed days for the sooner option (e.g. + 0). + amount_sooner: A floating point number representing the amount for the sooner option + (e.g. 10). + choice: If amount_later was selected, choice == 1; else if amount_sooner was selected, + choice == 0. +parameters: + k: + desc: discounting rate + info: [0, 0.1, 1] + beta: + desc: inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/gng_m1.yml b/commons/models/gng_m1.yml new file mode 100644 index 00000000..97304b17 --- /dev/null +++ b/commons/models/gng_m1.yml @@ -0,0 +1,41 @@ +task_name: + code: gng + desc: Orthogonalized Go/Nogo Task + cite: +model_name: + code: m1 + desc: RW + noise + cite: + - 'Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, + R. J. (2012). Go and no-go learning in reward and punishment: Interactions between + affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + cue: 'Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.' + keyPressed: Binary value representing the subject's response for that trial (where + Press == 1; No press == 0). + outcome: Ternary value representing the outcome of that trial (where Positive feedback + == 1; Neutral feedback == 0; Negative feedback == -1). +parameters: + xi: + desc: noise + info: [0, 0.1, 1] + ep: + desc: learning rate + info: [0, 0.2, 1] + rho: + desc: effective size + info: [0, exp(2), Inf] +regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/gng_m2.yml b/commons/models/gng_m2.yml new file mode 100644 index 00000000..81907d02 --- /dev/null +++ b/commons/models/gng_m2.yml @@ -0,0 +1,44 @@ +task_name: + code: gng + desc: Orthogonalized Go/Nogo Task + cite: +model_name: + code: m2 + desc: RW + noise + bias + cite: + - 'Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, + R. J. (2012). Go and no-go learning in reward and punishment: Interactions between + affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + cue: 'Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.' + keyPressed: Binary value representing the subject's response for that trial (where + Press == 1; No press == 0). + outcome: Ternary value representing the outcome of that trial (where Positive feedback + == 1; Neutral feedback == 0; Negative feedback == -1). +parameters: + xi: + desc: noise + info: [0, 0.1, 1] + ep: + desc: learning rate + info: [0, 0.2, 1] + b: + desc: action bias + info: [-Inf, 0, Inf] + rho: + desc: effective size + info: [0, exp(2), Inf] +regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/gng_m3.yml b/commons/models/gng_m3.yml new file mode 100644 index 00000000..37e71625 --- /dev/null +++ b/commons/models/gng_m3.yml @@ -0,0 +1,48 @@ +task_name: + code: gng + desc: Orthogonalized Go/Nogo Task + cite: +model_name: + code: m3 + desc: RW + noise + bias + pi + cite: + - 'Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, + R. J. (2012). Go and no-go learning in reward and punishment: Interactions between + affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + cue: 'Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.' + keyPressed: Binary value representing the subject's response for that trial (where + Press == 1; No press == 0). + outcome: Ternary value representing the outcome of that trial (where Positive feedback + == 1; Neutral feedback == 0; Negative feedback == -1). +parameters: + xi: + desc: noise + info: [0, 0.1, 1] + ep: + desc: learning rate + info: [0, 0.2, 1] + b: + desc: action bias + info: [-Inf, 0, Inf] + pi: + desc: Pavlovian bias + info: [-Inf, 0, Inf] + rho: + desc: effective size + info: [0, exp(2), Inf] +regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 + SV: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/gng_m4.yml b/commons/models/gng_m4.yml new file mode 100644 index 00000000..621a0938 --- /dev/null +++ b/commons/models/gng_m4.yml @@ -0,0 +1,51 @@ +task_name: + code: gng + desc: Orthogonalized Go/Nogo Task + cite: +model_name: + code: m4 + desc: RW (rew/pun) + noise + bias + pi + cite: + - Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). + Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), + 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + cue: 'Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.' + keyPressed: Binary value representing the subject's response for that trial (where + Press == 1; No press == 0). + outcome: Ternary value representing the outcome of that trial (where Positive feedback + == 1; Neutral feedback == 0; Negative feedback == -1). +parameters: + xi: + desc: noise + info: [0, 0.1, 1] + ep: + desc: learning rate + info: [0, 0.2, 1] + b: + desc: action bias + info: [-Inf, 0, Inf] + pi: + desc: Pavlovian bias + info: [-Inf, 0, Inf] + rhoRew: + desc: reward sensitivity + info: [0, exp(2), Inf] + rhoPun: + desc: punishment sensitivity + info: [0, exp(2), Inf] +regressors: + Qgo: 2 + Qnogo: 2 + Wgo: 2 + Wnogo: 2 + SV: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/igt_orl.yml b/commons/models/igt_orl.yml new file mode 100644 index 00000000..d80ac658 --- /dev/null +++ b/commons/models/igt_orl.yml @@ -0,0 +1,54 @@ +task_name: + code: igt + desc: Iowa Gambling Task + cite: + - Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision + learning models using the generalization criterion method. Cognitive Science, + 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +model_name: + code: orl + desc: Outcome-Representation Learning Model + cite: + - 'Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning + Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive + Science. https://doi.org/10.1111/cogs.12688' +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Nate Haines + email: haines.175@osu.edu + link: https://ccs-lab.github.io/team/nate-haines/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: Integer indicating which deck was chosen on that trial (where A==1, B==2, + C==3, and D==4). + gain: Floating point value representing the amount of currency won on that trial + (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on that trial + (e.g. 0, -50). +parameters: + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + K: + desc: perseverance decay + info: [0, 0.1, 5] + betaF: + desc: outcome frequency weight + info: [-Inf, 0.1, Inf] + betaP: + desc: perseverance weight + info: [-Inf, 1, Inf] +regressors: +postpreds: +- y_pred +additional_args: +- code: payscale + default: 100 + desc: Raw payoffs within data are divided by this number. Used for scaling data. + Defaults to 100. diff --git a/commons/models/igt_pvl_decay.yml b/commons/models/igt_pvl_decay.yml new file mode 100644 index 00000000..7d2c172a --- /dev/null +++ b/commons/models/igt_pvl_decay.yml @@ -0,0 +1,49 @@ +task_name: + code: igt + desc: Iowa Gambling Task + cite: + - Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision + learning models using the generalization criterion method. Cognitive Science, + 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +model_name: + code: pvl_decay + desc: Prospect Valence Learning (PVL) Decay-RI + cite: + - 'Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, + A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in + protracted abstinence: evidence from computational modeling with pure users. Frontiers + in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: Integer indicating which deck was chosen on that trial (where A==1, B==2, + C==3, and D==4). + gain: Floating point value representing the amount of currency won on that trial + (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on that trial + (e.g. 0, -50). +parameters: + A: + desc: decay rate + info: [0, 0.5, 1] + alpha: + desc: outcome sensitivity + info: [0, 0.5, 2] + cons: + desc: response consistency + info: [0, 1, 5] + lambda: + desc: loss aversion + info: [0, 1, 10] +regressors: +postpreds: +- y_pred +additional_args: +- code: payscale + default: 100 + desc: Raw payoffs within data are divided by this number. Used for scaling data. + Defaults to 100. diff --git a/commons/models/igt_pvl_delta.yml b/commons/models/igt_pvl_delta.yml new file mode 100644 index 00000000..7f19f6bc --- /dev/null +++ b/commons/models/igt_pvl_delta.yml @@ -0,0 +1,48 @@ +task_name: + code: igt + desc: Iowa Gambling Task + cite: + - Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision + learning models using the generalization criterion method. Cognitive Science, + 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +model_name: + code: pvl_delta + desc: Prospect Valence Learning (PVL) Delta + cite: + - Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision + learning models using the generalization criterion method. Cognitive Science, + 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: Integer indicating which deck was chosen on that trial (where A==1, B==2, + C==3, and D==4). + gain: Floating point value representing the amount of currency won on that trial + (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on that trial + (e.g. 0, -50). +parameters: + A: + desc: learning rate + info: [0, 0.5, 1] + alpha: + desc: outcome sensitivity + info: [0, 0.5, 2] + cons: + desc: response consistency + info: [0, 1, 5] + lambda: + desc: loss aversion + info: [0, 1, 10] +regressors: +postpreds: +- y_pred +additional_args: +- code: payscale + default: 100 + desc: Raw payoffs within data are divided by this number. Used for scaling data. + Defaults to 100. diff --git a/commons/models/igt_vpp.yml b/commons/models/igt_vpp.yml new file mode 100644 index 00000000..2028b2e7 --- /dev/null +++ b/commons/models/igt_vpp.yml @@ -0,0 +1,60 @@ +task_name: + code: igt + desc: Iowa Gambling Task + cite: + - Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision + learning models using the generalization criterion method. Cognitive Science, + 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992 +model_name: + code: vpp + desc: Value-Plus-Perseverance + cite: + - 'Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning + and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal + of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: Integer indicating which deck was chosen on that trial (where A==1, B==2, + C==3, and D==4). + gain: Floating point value representing the amount of currency won on that trial + (e.g. 50, 100). + loss: Floating point value representing the amount of currency lost on that trial + (e.g. 0, -50). +parameters: + A: + desc: learning rate + info: [0, 0.5, 1] + alpha: + desc: outcome sensitivity + info: [0, 0.5, 2] + cons: + desc: response consistency + info: [0, 1, 5] + lambda: + desc: loss aversion + info: [0, 1, 10] + epP: + desc: gain impact + info: [-Inf, 0, Inf] + epN: + desc: loss impact + info: [-Inf, 0, Inf] + K: + desc: decay rate + info: [0, 0.5, 1] + w: + desc: RL weight + info: [0, 0.5, 1] +regressors: +postpreds: +- y_pred +additional_args: +- code: payscale + default: 100 + desc: Raw payoffs within data are divided by this number. Used for scaling data. + Defaults to 100. diff --git a/commons/models/peer_ocu.yml b/commons/models/peer_ocu.yml new file mode 100644 index 00000000..85ca3daa --- /dev/null +++ b/commons/models/peer_ocu.yml @@ -0,0 +1,42 @@ +task_name: + code: peer + desc: Peer Influence Task + cite: + - Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). + Social signals of safety and risk confer utility and have asymmetric effects on + observers' choices. Nature Neuroscience, 18(6), 912-916. +model_name: + code: ocu + desc: Other-Conferred Utility (OCU) Model + cite: +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Harhim Park + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + condition: '0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).' + p_gamble: Probability of receiving a high payoff (same for both options). + safe_Hpayoff: High payoff of the safe option. + safe_Lpayoff: Low payoff of the safe option. + risky_Hpayoff: High payoff of the risky option. + risky_Lpayoff: Low payoff of the risky option. + choice: 'Which option was chosen? 0: safe, 1: risky.' +parameters: + rho: + desc: risk preference + info: [0, 1, 2] + tau: + desc: inverse temperature + info: [0, 1, Inf] + ocu: + desc: other-conferred utility + info: [-Inf, 0, Inf] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_ewa.yml b/commons/models/prl_ewa.yml new file mode 100644 index 00000000..6d8a10a3 --- /dev/null +++ b/commons/models/prl_ewa.yml @@ -0,0 +1,45 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: ewa + desc: Experience-Weighted Attraction Model + cite: + - Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., + Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal + Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + phi: + desc: 1 - learning rate + info: [0, 0.5, 1] + rho: + desc: experience decay factor + info: [0, 0.1, 1] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 2 + ev_nc: 2 + ew_c: 2 + ew_nc: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_fictitious.yml b/commons/models/prl_fictitious.yml new file mode 100644 index 00000000..df371570 --- /dev/null +++ b/commons/models/prl_fictitious.yml @@ -0,0 +1,46 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: fictitious + desc: Fictitious Update Model + cite: + - Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for + Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related + Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + eta: + desc: learning rate + info: [0, 0.5, 1] + alpha: + desc: indecision point + info: [-Inf, 0, Inf] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_fictitious_multipleB.yml b/commons/models/prl_fictitious_multipleB.yml new file mode 100644 index 00000000..b3e4ee17 --- /dev/null +++ b/commons/models/prl_fictitious_multipleB.yml @@ -0,0 +1,47 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: fictitious + desc: Fictitious Update Model + cite: + - Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for + Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related + Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +model_type: + code: multipleB + desc: Multiple-Block Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + block: A unique identifier for each of the multiple blocks within each subject. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + eta: + desc: learning rate + info: [0, 0.5, 1] + alpha: + desc: indecision point + info: [-Inf, 0, Inf] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 3 + ev_nc: 3 + pe_c: 3 + pe_nc: 3 + dv: 3 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_fictitious_rp.yml b/commons/models/prl_fictitious_rp.yml new file mode 100644 index 00000000..c98312cc --- /dev/null +++ b/commons/models/prl_fictitious_rp.yml @@ -0,0 +1,53 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: fictitious_rp + desc: Fictitious Update Model, with separate learning rates for positive and negative + prediction error (PE) + cite: + - Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for + Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related + Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + - Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., + Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal + Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + eta_pos: + desc: learning rate, +PE + info: [0, 0.5, 1] + eta_neg: + desc: learning rate, -PE + info: [0, 0.5, 1] + alpha: + desc: indecision point + info: [-Inf, 0, Inf] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_fictitious_rp_woa.yml b/commons/models/prl_fictitious_rp_woa.yml new file mode 100644 index 00000000..feae5d13 --- /dev/null +++ b/commons/models/prl_fictitious_rp_woa.yml @@ -0,0 +1,50 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: fictitious_rp_woa + desc: Fictitious Update Model, with separate learning rates for positive and negative + prediction error (PE), without alpha (indecision point) + cite: + - Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for + Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related + Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 + - Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., + Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal + Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + eta_pos: + desc: learning rate, +PE + info: [0, 0.5, 1] + eta_neg: + desc: learning rate, -PE + info: [0, 0.5, 1] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_fictitious_woa.yml b/commons/models/prl_fictitious_woa.yml new file mode 100644 index 00000000..e7903202 --- /dev/null +++ b/commons/models/prl_fictitious_woa.yml @@ -0,0 +1,43 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: fictitious_woa + desc: Fictitious Update Model, without alpha (indecision point) + cite: + - Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for + Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related + Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + eta: + desc: learning rate + info: [0, 0.5, 1] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 2 + ev_nc: 2 + pe_c: 2 + pe_nc: 2 + dv: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_rp.yml b/commons/models/prl_rp.yml new file mode 100644 index 00000000..262d6e3f --- /dev/null +++ b/commons/models/prl_rp.yml @@ -0,0 +1,44 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: rp + desc: Reward-Punishment Model + cite: + - Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., + Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal + Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 2 + ev_nc: 2 + pe: 2 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/prl_rp_multipleB.yml b/commons/models/prl_rp_multipleB.yml new file mode 100644 index 00000000..b551b0aa --- /dev/null +++ b/commons/models/prl_rp_multipleB.yml @@ -0,0 +1,45 @@ +task_name: + code: prl + desc: Probabilistic Reversal Learning Task + cite: +model_name: + code: rp + desc: Reward-Punishment Model + cite: + - Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., + Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal + Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030 +model_type: + code: multipleB + desc: Multiple-Block Hierarchical +notes: +contributors: +- name: Jaeyeong Yang (for model-based regressors) + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +- name: Harhim Park (for model-based regressors) + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + block: A unique identifier for each of the multiple blocks within each subject. + choice: 'Integer value representing the option chosen on that trial: 1 or 2.' + outcome: Integer value representing the outcome of that trial (where reward == 1, + and loss == -1). +parameters: + Apun: + desc: punishment learning rate + info: [0, 0.1, 1] + Arew: + desc: reward learning rate + info: [0, 0.1, 1] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: + ev_c: 3 + ev_nc: 3 + pe: 3 +postpreds: +- y_pred +additional_args: diff --git a/commons/models/pst_gainloss_Q.yml b/commons/models/pst_gainloss_Q.yml new file mode 100644 index 00000000..f5efccc9 --- /dev/null +++ b/commons/models/pst_gainloss_Q.yml @@ -0,0 +1,45 @@ +task_name: + code: pst + desc: Probabilistic Selection Task + cite: +model_name: + code: gainloss_Q + desc: Gain-Loss Q Learning Model + cite: + - Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. + (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement + learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316. +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Jaeyeong Yang + email: jaeyeong.yang1125@gmail.com + link: https://ccs-lab.github.io/team/jaeyeong-yang/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + type: Two-digit number indicating which pair of stimuli were presented for that + trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented + stimulus for option1, while the digit on the right (ones-digit) indicates that + for option2. Code for each stimulus type (1~6) is defined as for 80\% (type 1), + 20\% (type 2), 70\% (type 3), 30\% (type 4), 60\% (type 5), 40\% (type 6). The + modeling will still work even if different probabilities are used for the stimuli; + however, the total number of stimuli should be less than or equal to 6. + choice: Whether the subject chose the left option (option1) out of the given two + options (i.e. if option1 was chosen, 1; if option2 was chosen, 0). + reward: Amount of reward earned as a result of the trial. +parameters: + alpha_pos: + desc: learning rate for positive feedbacks + info: [0, 0.5, 1] + alpha_neg: + desc: learning rate for negative feedbacks + info: [0, 0.5, 1] + beta: + desc: inverse temperature + info: [0, 1, 10] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/ra_noLA.yml b/commons/models/ra_noLA.yml new file mode 100644 index 00000000..a7d001fe --- /dev/null +++ b/commons/models/ra_noLA.yml @@ -0,0 +1,35 @@ +task_name: + code: ra + desc: Risk Aversion Task + cite: +model_name: + code: noLA + desc: Prospect Theory, without loss aversion (LA) parameter + cite: + - Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, + E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' + Loss Aversion. Proceedings of the National Academy of Sciences of the United States + of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + gain: Possible (50\%) gain outcome of a risky option (e.g. 9). + loss: Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + cert: Guaranteed amount of a safe option. "cert" is assumed to be zero or greater + than zero. + gamble: If gamble was taken, gamble == 1; else gamble == 0. +parameters: + rho: + desc: risk aversion + info: [0, 1, 2] + tau: + desc: inverse temperature + info: [0, 1, 30] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/ra_noRA.yml b/commons/models/ra_noRA.yml new file mode 100644 index 00000000..2082b6d1 --- /dev/null +++ b/commons/models/ra_noRA.yml @@ -0,0 +1,35 @@ +task_name: + code: ra + desc: Risk Aversion Task + cite: +model_name: + code: noRA + desc: Prospect Theory, without risk aversion (RA) parameter + cite: + - Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, + E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' + Loss Aversion. Proceedings of the National Academy of Sciences of the United States + of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + gain: Possible (50\%) gain outcome of a risky option (e.g. 9). + loss: Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + cert: Guaranteed amount of a safe option. "cert" is assumed to be zero or greater + than zero. + gamble: If gamble was taken, gamble == 1; else gamble == 0. +parameters: + lambda: + desc: loss aversion + info: [0, 1, 5] + tau: + desc: inverse temperature + info: [0, 1, 30] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/ra_prospect.yml b/commons/models/ra_prospect.yml new file mode 100644 index 00000000..ac346240 --- /dev/null +++ b/commons/models/ra_prospect.yml @@ -0,0 +1,38 @@ +task_name: + code: ra + desc: Risk Aversion Task + cite: +model_name: + code: prospect + desc: Prospect Theory + cite: + - Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, + E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' + Loss Aversion. Proceedings of the National Academy of Sciences of the United States + of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + gain: Possible (50\%) gain outcome of a risky option (e.g. 9). + loss: Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + cert: Guaranteed amount of a safe option. "cert" is assumed to be zero or greater + than zero. + gamble: If gamble was taken, gamble == 1; else gamble == 0. +parameters: + rho: + desc: risk aversion + info: [0, 1, 2] + lambda: + desc: loss aversion + info: [0, 1, 5] + tau: + desc: inverse temperature + info: [0, 1, 30] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/rdt_happiness.yml b/commons/models/rdt_happiness.yml new file mode 100644 index 00000000..9f37f8a0 --- /dev/null +++ b/commons/models/rdt_happiness.yml @@ -0,0 +1,52 @@ +task_name: + code: rdt + desc: Risky Decision Task + cite: +model_name: + code: happiness + desc: Happiness Computational Model + cite: + - Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational + and neural model of momentary subjective well-being. Proceedings of the National + Academy of Sciences, 111(33), 12252-12257. +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Harhim Park + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + gain: Possible (50\%) gain outcome of a risky option (e.g. 9). + loss: Possible (50\%) loss outcome of a risky option (e.g. 5, or -5). + cert: Guaranteed amount of a safe option. + type: loss == -1, mixed == 0, gain == 1 + gamble: If gamble was taken, gamble == 1; else gamble == 0. + outcome: Result of the trial. + happy: Happiness score. + RT_happy: Reaction time for answering the happiness score. +parameters: + w0: + desc: baseline + info: [-Inf, 1, Inf] + w1: + desc: weight of certain rewards + info: [-Inf, 1, Inf] + w2: + desc: weight of expected values + info: [-Inf, 1, Inf] + w3: + desc: weight of reward prediction errors + info: [-Inf, 1, Inf] + gam: + desc: forgetting factor + info: [0, 0.5, 1] + sig: + desc: standard deviation of error + info: [0, 1, Inf] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/ts_par4.yml b/commons/models/ts_par4.yml new file mode 100644 index 00000000..1ba97f12 --- /dev/null +++ b/commons/models/ts_par4.yml @@ -0,0 +1,56 @@ +task_name: + code: ts + desc: Two-Step Task + cite: + - Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. + J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. + Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +model_name: + code: par4 + desc: Hybrid Model, with 4 parameters + cite: + - Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. + J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. + Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 + - Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based + over model-free choice behavior. Neuron, 75(3), 418-424. +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Harhim Park + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + level1_choice: 'Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).' + level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: + stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus + 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) + transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 + in Level 2 with a common (0.7 by default) transition. To change this default transition + probability, set the function argument `trans_prob` to your preferred value." + reward: Reward after Level 2 (0 or 1). +parameters: + a: + desc: learning rate for both stages 1 & 2 + info: [0, 0.5, 1] + beta: + desc: inverse temperature for both stages 1 & 2 + info: [0, 1, Inf] + pi: + desc: perseverance + info: [0, 1, 5] + w: + desc: model-based weight + info: [0, 0.5, 1] +regressors: +postpreds: +- y_pred_step1 +- y_pred_step2 +additional_args: +- code: trans_prob + default: 0.7 + desc: Common state transition probability from Stage (Level) 1 to Stage (Level) + 2. Defaults to 0.7. diff --git a/commons/models/ts_par6.yml b/commons/models/ts_par6.yml new file mode 100644 index 00000000..55bfca19 --- /dev/null +++ b/commons/models/ts_par6.yml @@ -0,0 +1,60 @@ +task_name: + code: ts + desc: Two-Step Task + cite: + - Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. + J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. + Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +model_name: + code: par6 + desc: Hybrid Model, with 6 parameters + cite: + - Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. + J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. + Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Harhim Park + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + level1_choice: 'Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).' + level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: + stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus + 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) + transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 + in Level 2 with a common (0.7 by default) transition. To change this default transition + probability, set the function argument `trans_prob` to your preferred value." + reward: Reward after Level 2 (0 or 1). +parameters: + a1: + desc: learning rate in stage 1 + info: [0, 0.5, 1] + beta1: + desc: inverse temperature in stage 1 + info: [0, 1, Inf] + a2: + desc: learning rate in stage 2 + info: [0, 0.5, 1] + beta2: + desc: inverse temperature in stage 2 + info: [0, 1, Inf] + pi: + desc: perseverance + info: [0, 1, 5] + w: + desc: model-based weight + info: [0, 0.5, 1] +regressors: +postpreds: +- y_pred_step1 +- y_pred_step2 +additional_args: +- code: trans_prob + default: 0.7 + desc: Common state transition probability from Stage (Level) 1 to Stage (Level) + 2. Defaults to 0.7. diff --git a/commons/models/ts_par7.yml b/commons/models/ts_par7.yml new file mode 100644 index 00000000..359d9aaf --- /dev/null +++ b/commons/models/ts_par7.yml @@ -0,0 +1,63 @@ +task_name: + code: ts + desc: Two-Step Task + cite: + - Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. + J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. + Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +model_name: + code: par7 + desc: Hybrid Model, with 7 parameters (original model) + cite: + - Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. + J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. + Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027 +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Harhim Park + email: hrpark12@gmail.com + link: https://ccs-lab.github.io/team/harhim-park/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + level1_choice: 'Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).' + level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: + stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus + 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) + transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 + in Level 2 with a common (0.7 by default) transition. To change this default transition + probability, set the function argument `trans_prob` to your preferred value." + reward: Reward after Level 2 (0 or 1). +parameters: + a1: + desc: learning rate in stage 1 + info: [0, 0.5, 1] + beta1: + desc: inverse temperature in stage 1 + info: [0, 1, Inf] + a2: + desc: learning rate in stage 2 + info: [0, 0.5, 1] + beta2: + desc: inverse temperature in stage 2 + info: [0, 1, Inf] + pi: + desc: perseverance + info: [0, 1, 5] + w: + desc: model-based weight + info: [0, 0.5, 1] + lambda: + desc: eligibility trace + info: [0, 0.5, 1] +regressors: +postpreds: +- y_pred_step1 +- y_pred_step2 +additional_args: +- code: trans_prob + default: 0.7 + desc: Common state transition probability from Stage (Level) 1 to Stage (Level) + 2. Defaults to 0.7. diff --git a/commons/models/ug_bayes.yml b/commons/models/ug_bayes.yml new file mode 100644 index 00000000..1c6747d0 --- /dev/null +++ b/commons/models/ug_bayes.yml @@ -0,0 +1,36 @@ +task_name: + code: ug + desc: Norm-Training Ultimatum Game + cite: +model_name: + code: bayes + desc: Ideal Observer Model + cite: + - Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of + Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), + 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013 +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + offer: Floating point value representing the offer made in that trial (e.g. 4, 10, + 11). + accept: 1 or 0, indicating whether the offer was accepted in that trial (where accepted + == 1, rejected == 0). +parameters: + alpha: + desc: envy + info: [0, 1, 20] + beta: + desc: guilt + info: [0, 0.5, 10] + tau: + desc: inverse temperature + info: [0, 1, 10] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/ug_delta.yml b/commons/models/ug_delta.yml new file mode 100644 index 00000000..ca09da38 --- /dev/null +++ b/commons/models/ug_delta.yml @@ -0,0 +1,37 @@ +task_name: + code: ug + desc: Norm-Training Ultimatum Game + cite: +model_name: + code: delta + desc: Rescorla-Wagner (Delta) Model + cite: + - 'Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). + Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal + Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal + of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015' +model_type: + code: + desc: Hierarchical +notes: +contributors: +data_columns: + subjID: A unique identifier for each subject in the data-set. + offer: Floating point value representing the offer made in that trial (e.g. 4, 10, + 11). + accept: 1 or 0, indicating whether the offer was accepted in that trial (where accepted + == 1, rejected == 0). +parameters: + alpha: + desc: envy + info: [0, 1, 20] + tau: + desc: inverse temperature + info: [0, 1, 10] + ep: + desc: norm adaptation rate + info: [0, 0.5, 1] +regressors: +postpreds: +- y_pred +additional_args: diff --git a/commons/models/wcs_sql.yml b/commons/models/wcs_sql.yml new file mode 100644 index 00000000..2df863f0 --- /dev/null +++ b/commons/models/wcs_sql.yml @@ -0,0 +1,39 @@ +task_name: + code: wcs + desc: Wisconsin Card Sorting Task + cite: +model_name: + code: sql + desc: Sequential Learning Model + cite: + - 'Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & + Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort + task: Assessing processes in substance dependent individuals. Journal of Mathematical + Psychology, 54(1), 5-13.' +model_type: + code: + desc: Hierarchical +notes: +contributors: +- name: Dayeong Min + email: mindy2801@snu.ac.kr + link: https://ccs-lab.github.io/team/dayeong-min/ +data_columns: + subjID: A unique identifier for each subject in the data-set. + choice: 'Integer value indicating which deck was chosen on that trial: 1, 2, 3, + or 4.' + outcome: '1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.' +parameters: + r: + desc: reward sensitivity + info: [0, 0.1, 1] + p: + desc: punishment sensitivity + info: [0, 0.1, 1] + d: + desc: decision consistency or inverse temperature + info: [0, 1, 5] +regressors: +postpreds: +- y_pred +additional_args: From b92257b27af170d918c287863db538f8b4ea2a68 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 20:38:07 +0900 Subject: [PATCH 092/163] Remove unnecessary files --- commons/convert-json-to-yaml.py | 14 ---- commons/models-json/bandit2arm_delta.json | 39 --------- .../bandit4arm2_kalman_filter.json | 60 -------------- .../models-json/bandit4arm_2par_lapse.json | 43 ---------- commons/models-json/bandit4arm_4par.json | 47 ----------- commons/models-json/bandit4arm_lapse.json | 51 ------------ .../models-json/bandit4arm_lapse_decay.json | 55 ------------- .../models-json/bandit4arm_singleA_lapse.json | 47 ----------- commons/models-json/bart_par4.json | 77 ------------------ commons/models-json/choiceRT_ddm.json | 55 ------------- commons/models-json/choiceRT_ddm_single.json | 55 ------------- commons/models-json/cra_exp.json | 56 ------------- commons/models-json/cra_linear.json | 56 ------------- commons/models-json/dbdm_prob_weight.json | 59 -------------- commons/models-json/dd_cs.json | 45 ----------- commons/models-json/dd_cs_single.json | 45 ----------- commons/models-json/dd_exp.json | 41 ---------- commons/models-json/dd_hyperbolic.json | 41 ---------- commons/models-json/dd_hyperbolic_single.json | 41 ---------- commons/models-json/gng_m1.json | 48 ----------- commons/models-json/gng_m2.json | 52 ------------ commons/models-json/gng_m3.json | 57 ------------- commons/models-json/gng_m4.json | 61 -------------- commons/models-json/igt_orl.json | 65 --------------- commons/models-json/igt_pvl_decay.json | 55 ------------- commons/models-json/igt_pvl_delta.json | 55 ------------- commons/models-json/igt_vpp.json | 71 ----------------- commons/models-json/peer_ocu.json | 53 ------------- commons/models-json/prl_ewa.json | 58 -------------- commons/models-json/prl_fictitious.json | 59 -------------- .../models-json/prl_fictitious_multipleB.json | 60 -------------- commons/models-json/prl_fictitious_rp.json | 64 --------------- .../models-json/prl_fictitious_rp_woa.json | 60 -------------- commons/models-json/prl_fictitious_woa.json | 55 ------------- commons/models-json/prl_rp.json | 57 ------------- commons/models-json/prl_rp_multipleB.json | 58 -------------- commons/models-json/pst_gainloss_Q.json | 49 ------------ commons/models-json/ra_noLA.json | 40 ---------- commons/models-json/ra_noRA.json | 40 ---------- commons/models-json/ra_prospect.json | 44 ----------- commons/models-json/rdt_happiness.json | 66 ---------------- commons/models-json/ts_par4.json | 62 --------------- commons/models-json/ts_par6.json | 69 ---------------- commons/models-json/ts_par7.json | 73 ----------------- commons/models-json/ug_bayes.json | 42 ---------- commons/models-json/ug_delta.json | 42 ---------- commons/models-json/wcs_sql.json | 48 ----------- commons/models-yaml/bandit2arm_delta.yml | 38 --------- .../models-yaml/bandit4arm2_kalman_filter.yml | 65 --------------- commons/models-yaml/bandit4arm_2par_lapse.yml | 44 ----------- commons/models-yaml/bandit4arm_4par.yml | 50 ------------ commons/models-yaml/bandit4arm_lapse.yml | 56 ------------- .../models-yaml/bandit4arm_lapse_decay.yml | 62 --------------- .../models-yaml/bandit4arm_singleA_lapse.yml | 50 ------------ commons/models-yaml/bart_par4.yml | 73 ----------------- commons/models-yaml/choiceRT_ddm.yml | 54 ------------- commons/models-yaml/choiceRT_ddm_single.yml | 54 ------------- commons/models-yaml/cra_exp.yml | 54 ------------- commons/models-yaml/cra_linear.yml | 54 ------------- commons/models-yaml/dbdm_prob_weight.yml | 60 -------------- commons/models-yaml/dd_cs.yml | 46 ----------- commons/models-yaml/dd_cs_single.yml | 46 ----------- commons/models-yaml/dd_exp.yml | 40 ---------- commons/models-yaml/dd_hyperbolic.yml | 40 ---------- commons/models-yaml/dd_hyperbolic_single.yml | 40 ---------- commons/models-yaml/gng_m1.yml | 48 ----------- commons/models-yaml/gng_m2.yml | 54 ------------- commons/models-yaml/gng_m3.yml | 61 -------------- commons/models-yaml/gng_m4.yml | 67 ---------------- commons/models-yaml/igt_orl.yml | 65 --------------- commons/models-yaml/igt_pvl_decay.yml | 55 ------------- commons/models-yaml/igt_pvl_delta.yml | 55 ------------- commons/models-yaml/igt_vpp.yml | 79 ------------------- commons/models-yaml/peer_ocu.yml | 52 ------------ commons/models-yaml/prl_ewa.yml | 55 ------------- commons/models-yaml/prl_fictitious.yml | 56 ------------- .../models-yaml/prl_fictitious_multipleB.yml | 57 ------------- commons/models-yaml/prl_fictitious_rp.yml | 63 --------------- commons/models-yaml/prl_fictitious_rp_woa.yml | 57 ------------- commons/models-yaml/prl_fictitious_woa.yml | 50 ------------ commons/models-yaml/prl_rp.yml | 54 ------------- commons/models-yaml/prl_rp_multipleB.yml | 55 ------------- commons/models-yaml/pst_gainloss_Q.yml | 48 ----------- commons/models-yaml/ra_noLA.yml | 39 --------- commons/models-yaml/ra_noRA.yml | 39 --------- commons/models-yaml/ra_prospect.yml | 45 ----------- commons/models-yaml/rdt_happiness.yml | 71 ----------------- commons/models-yaml/ts_par4.yml | 61 -------------- commons/models-yaml/ts_par6.yml | 72 ----------------- commons/models-yaml/ts_par7.yml | 78 ------------------ commons/models-yaml/ug_bayes.yml | 43 ---------- commons/models-yaml/ug_delta.yml | 43 ---------- commons/models-yaml/wcs_sql.yml | 47 ----------- 93 files changed, 4985 deletions(-) delete mode 100644 commons/convert-json-to-yaml.py delete mode 100644 commons/models-json/bandit2arm_delta.json delete mode 100644 commons/models-json/bandit4arm2_kalman_filter.json delete mode 100644 commons/models-json/bandit4arm_2par_lapse.json delete mode 100644 commons/models-json/bandit4arm_4par.json delete mode 100644 commons/models-json/bandit4arm_lapse.json delete mode 100644 commons/models-json/bandit4arm_lapse_decay.json delete mode 100644 commons/models-json/bandit4arm_singleA_lapse.json delete mode 100644 commons/models-json/bart_par4.json delete mode 100644 commons/models-json/choiceRT_ddm.json delete mode 100644 commons/models-json/choiceRT_ddm_single.json delete mode 100644 commons/models-json/cra_exp.json delete mode 100644 commons/models-json/cra_linear.json delete mode 100644 commons/models-json/dbdm_prob_weight.json delete mode 100644 commons/models-json/dd_cs.json delete mode 100644 commons/models-json/dd_cs_single.json delete mode 100644 commons/models-json/dd_exp.json delete mode 100644 commons/models-json/dd_hyperbolic.json delete mode 100644 commons/models-json/dd_hyperbolic_single.json delete mode 100644 commons/models-json/gng_m1.json delete mode 100644 commons/models-json/gng_m2.json delete mode 100644 commons/models-json/gng_m3.json delete mode 100644 commons/models-json/gng_m4.json delete mode 100644 commons/models-json/igt_orl.json delete mode 100644 commons/models-json/igt_pvl_decay.json delete mode 100644 commons/models-json/igt_pvl_delta.json delete mode 100644 commons/models-json/igt_vpp.json delete mode 100644 commons/models-json/peer_ocu.json delete mode 100644 commons/models-json/prl_ewa.json delete mode 100644 commons/models-json/prl_fictitious.json delete mode 100644 commons/models-json/prl_fictitious_multipleB.json delete mode 100644 commons/models-json/prl_fictitious_rp.json delete mode 100644 commons/models-json/prl_fictitious_rp_woa.json delete mode 100644 commons/models-json/prl_fictitious_woa.json delete mode 100644 commons/models-json/prl_rp.json delete mode 100644 commons/models-json/prl_rp_multipleB.json delete mode 100644 commons/models-json/pst_gainloss_Q.json delete mode 100644 commons/models-json/ra_noLA.json delete mode 100644 commons/models-json/ra_noRA.json delete mode 100644 commons/models-json/ra_prospect.json delete mode 100644 commons/models-json/rdt_happiness.json delete mode 100644 commons/models-json/ts_par4.json delete mode 100644 commons/models-json/ts_par6.json delete mode 100644 commons/models-json/ts_par7.json delete mode 100644 commons/models-json/ug_bayes.json delete mode 100644 commons/models-json/ug_delta.json delete mode 100644 commons/models-json/wcs_sql.json delete mode 100644 commons/models-yaml/bandit2arm_delta.yml delete mode 100644 commons/models-yaml/bandit4arm2_kalman_filter.yml delete mode 100644 commons/models-yaml/bandit4arm_2par_lapse.yml delete mode 100644 commons/models-yaml/bandit4arm_4par.yml delete mode 100644 commons/models-yaml/bandit4arm_lapse.yml delete mode 100644 commons/models-yaml/bandit4arm_lapse_decay.yml delete mode 100644 commons/models-yaml/bandit4arm_singleA_lapse.yml delete mode 100644 commons/models-yaml/bart_par4.yml delete mode 100644 commons/models-yaml/choiceRT_ddm.yml delete mode 100644 commons/models-yaml/choiceRT_ddm_single.yml delete mode 100644 commons/models-yaml/cra_exp.yml delete mode 100644 commons/models-yaml/cra_linear.yml delete mode 100644 commons/models-yaml/dbdm_prob_weight.yml delete mode 100644 commons/models-yaml/dd_cs.yml delete mode 100644 commons/models-yaml/dd_cs_single.yml delete mode 100644 commons/models-yaml/dd_exp.yml delete mode 100644 commons/models-yaml/dd_hyperbolic.yml delete mode 100644 commons/models-yaml/dd_hyperbolic_single.yml delete mode 100644 commons/models-yaml/gng_m1.yml delete mode 100644 commons/models-yaml/gng_m2.yml delete mode 100644 commons/models-yaml/gng_m3.yml delete mode 100644 commons/models-yaml/gng_m4.yml delete mode 100644 commons/models-yaml/igt_orl.yml delete mode 100644 commons/models-yaml/igt_pvl_decay.yml delete mode 100644 commons/models-yaml/igt_pvl_delta.yml delete mode 100644 commons/models-yaml/igt_vpp.yml delete mode 100644 commons/models-yaml/peer_ocu.yml delete mode 100644 commons/models-yaml/prl_ewa.yml delete mode 100644 commons/models-yaml/prl_fictitious.yml delete mode 100644 commons/models-yaml/prl_fictitious_multipleB.yml delete mode 100644 commons/models-yaml/prl_fictitious_rp.yml delete mode 100644 commons/models-yaml/prl_fictitious_rp_woa.yml delete mode 100644 commons/models-yaml/prl_fictitious_woa.yml delete mode 100644 commons/models-yaml/prl_rp.yml delete mode 100644 commons/models-yaml/prl_rp_multipleB.yml delete mode 100644 commons/models-yaml/pst_gainloss_Q.yml delete mode 100644 commons/models-yaml/ra_noLA.yml delete mode 100644 commons/models-yaml/ra_noRA.yml delete mode 100644 commons/models-yaml/ra_prospect.yml delete mode 100644 commons/models-yaml/rdt_happiness.yml delete mode 100644 commons/models-yaml/ts_par4.yml delete mode 100644 commons/models-yaml/ts_par6.yml delete mode 100644 commons/models-yaml/ts_par7.yml delete mode 100644 commons/models-yaml/ug_bayes.yml delete mode 100644 commons/models-yaml/ug_delta.yml delete mode 100644 commons/models-yaml/wcs_sql.yml diff --git a/commons/convert-json-to-yaml.py b/commons/convert-json-to-yaml.py deleted file mode 100644 index 6fce878f..00000000 --- a/commons/convert-json-to-yaml.py +++ /dev/null @@ -1,14 +0,0 @@ -import os -from pathlib import Path - -PATH_ROOT = Path(__file__).absolute().parent -PATH_JSON = PATH_ROOT / 'models' -PATH_YAML = PATH_ROOT / 'models-yaml' - -if not PATH_YAML.exists(): - PATH_YAML.mkdir() - -for p_json in PATH_JSON.glob('*.json'): - p_yaml = PATH_YAML / p_json.name.replace('.json', '.yml') - os.system(f'json2yaml {str(p_json)} > {str(p_yaml)}') - print('Done:', p_yaml) diff --git a/commons/models-json/bandit2arm_delta.json b/commons/models-json/bandit2arm_delta.json deleted file mode 100644 index 371ba5cb..00000000 --- a/commons/models-json/bandit2arm_delta.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "task_name": { - "code": "bandit2arm", - "desc": "2-Armed Bandit Task", - "cite": [ - "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683", - "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x" - ] - }, - "model_name": { - "code": "delta", - "desc": "Rescorla-Wagner (Delta) Model", - "cite": [] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1 or 2.", - "outcome": "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "A": { - "desc": "learning rate", - "info": [0, 0.5, 1] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, 5] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bandit4arm2_kalman_filter.json b/commons/models-json/bandit4arm2_kalman_filter.json deleted file mode 100644 index 3b20d664..00000000 --- a/commons/models-json/bandit4arm2_kalman_filter.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "task_name": { - "code": "bandit4arm2", - "desc": "4-Armed Bandit Task (modified)", - "cite": [] - }, - "model_name": { - "code": "kalman_filter", - "desc": "Kalman Filter", - "cite": [ - "Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Yoonseo Zoh", - "email": "zohyos7@gmail.com", - "link": "https://zohyos7.github.io" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", - "outcome": "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "lambda": { - "desc": "decay factor", - "info": [0, 0.9, 1] - }, - "theta": { - "desc": "decay center", - "info": [0, 50, 100] - }, - "beta": { - "desc": "inverse softmax temperature", - "info": [0, 0.1, 1] - }, - "mu0": { - "desc": "anticipated initial mean of all 4 options", - "info": [0, 85, 100] - }, - "sigma0": { - "desc": "anticipated initial sd (uncertainty factor) of all 4 options", - "info": [0, 6, 15] - }, - "sigmaD": { - "desc": "sd of diffusion noise", - "info": [0, 3, 15] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bandit4arm_2par_lapse.json b/commons/models-json/bandit4arm_2par_lapse.json deleted file mode 100644 index 93d99289..00000000 --- a/commons/models-json/bandit4arm_2par_lapse.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "task_name": { - "code": "bandit4arm", - "desc": "4-Armed Bandit Task", - "cite": [] - }, - "model_name": { - "code": "2par_lapse", - "desc": "3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)", - "cite": [ - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", - "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - }, - "parameters": { - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bandit4arm_4par.json b/commons/models-json/bandit4arm_4par.json deleted file mode 100644 index 81d7b8ee..00000000 --- a/commons/models-json/bandit4arm_4par.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "task_name": { - "code": "bandit4arm", - "desc": "4-Armed Bandit Task", - "cite": [] - }, - "model_name": { - "code": "4par", - "desc": "4 Parameter Model, without C (choice perseveration)", - "cite": [ - "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", - "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - }, - "parameters": { - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "R": { - "desc": "reward sensitivity", - "info": [0, 1, 30] - }, - "P": { - "desc": "punishment sensitivity", - "info": [0, 1, 30] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bandit4arm_lapse.json b/commons/models-json/bandit4arm_lapse.json deleted file mode 100644 index cd981d67..00000000 --- a/commons/models-json/bandit4arm_lapse.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "task_name": { - "code": "bandit4arm", - "desc": "4-Armed Bandit Task", - "cite": [] - }, - "model_name": { - "code": "lapse", - "desc": "5 Parameter Model, without C (choice perseveration) but with xi (noise)", - "cite": [ - "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", - "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - }, - "parameters": { - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "R": { - "desc": "reward sensitivity", - "info": [0, 1, 30] - }, - "P": { - "desc": "punishment sensitivity", - "info": [0, 1, 30] - }, - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bandit4arm_lapse_decay.json b/commons/models-json/bandit4arm_lapse_decay.json deleted file mode 100644 index 81145a39..00000000 --- a/commons/models-json/bandit4arm_lapse_decay.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "task_name": { - "code": "bandit4arm", - "desc": "4-Armed Bandit Task", - "cite": [] - }, - "model_name": { - "code": "lapse_decay", - "desc": "5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).", - "cite": [ - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", - "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - }, - "parameters": { - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "R": { - "desc": "reward sensitivity", - "info": [0, 1, 30] - }, - "P": { - "desc": "punishment sensitivity", - "info": [0, 1, 30] - }, - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - }, - "d": { - "desc": "decay rate", - "info": [0, 0.1, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bandit4arm_singleA_lapse.json b/commons/models-json/bandit4arm_singleA_lapse.json deleted file mode 100644 index 07583331..00000000 --- a/commons/models-json/bandit4arm_singleA_lapse.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "task_name": { - "code": "bandit4arm", - "desc": "4-Armed Bandit Task", - "cite": [] - }, - "model_name": { - "code": "singleA_lapse", - "desc": "4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.", - "cite": [ - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.", - "gain": "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - }, - "parameters": { - "A": { - "desc": "learning rate", - "info": [0, 0.1, 1] - }, - "R": { - "desc": "reward sensitivity", - "info": [0, 1, 30] - }, - "P": { - "desc": "punishment sensitivity", - "info": [0, 1, 30] - }, - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/bart_par4.json b/commons/models-json/bart_par4.json deleted file mode 100644 index 844d82a2..00000000 --- a/commons/models-json/bart_par4.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "task_name": { - "code": "bart", - "desc": "Balloon Analogue Risk Task", - "cite": [] - }, - "model_name": { - "code": "par4", - "desc": "Re-parameterized version of BART model with 4 parameters", - "cite": [ - "van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Harhim Park", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - }, - { - "name": "Jaeyeong Yang", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Ayoung Lee", - "email": "aylee2008@naver.com", - "link": "https://ccs-lab.github.io/team/ayoung-lee/" - }, - { - "name": "Jeongbin Oh", - "email": "ows0104@gmail.com", - "link": "https://ccs-lab.github.io/team/jeongbin-oh/" - }, - { - "name": "Jiyoon Lee", - "email": "nicole.lee2001@gmail.com", - "link": "https://ccs-lab.github.io/team/jiyoon-lee/" - }, - { - "name": "Junha Jang", - "email": "andy627robo@naver.com", - "link": "https://ccs-lab.github.io/team/junha-jang/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "pumps": "The number of pumps.", - "explosion": "0: intact, 1: burst" - }, - "parameters": { - "phi": { - "desc": "prior belief of balloon not bursting", - "info": [0, 0.5, 1] - }, - "eta": { - "desc": "updating rate", - "info": [0, 1, "Inf"] - }, - "gam": { - "desc": "risk-taking parameter", - "info": [0, 1, "Inf"] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, "Inf"] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/choiceRT_ddm.json b/commons/models-json/choiceRT_ddm.json deleted file mode 100644 index 6050c43a..00000000 --- a/commons/models-json/choiceRT_ddm.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "task_name": { - "code": "choiceRT", - "desc": "Choice Reaction Time Task", - "cite": [] - }, - "model_name": { - "code": "ddm", - "desc": "Drift Diffusion Model", - "cite": [ - "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [ - "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters.", - "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." - ], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).", - "RT": "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." - }, - "parameters": { - "alpha": { - "desc": "boundary separation", - "info": [0, 0.5, "Inf"] - }, - "beta": { - "desc": "bias", - "info": [0, 0.5, 1] - }, - "delta": { - "desc": "drift rate", - "info": [0, 0.5, "Inf"] - }, - "tau": { - "desc": "non-decision time", - "info": [0, 0.15, 1] - } - }, - "regressors": {}, - "postpreds": [], - "additional_args": [ - { - "code": "RTbound", - "default": 0.1, - "desc": "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." - } - ] -} diff --git a/commons/models-json/choiceRT_ddm_single.json b/commons/models-json/choiceRT_ddm_single.json deleted file mode 100644 index 70954155..00000000 --- a/commons/models-json/choiceRT_ddm_single.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "task_name": { - "code": "choiceRT", - "desc": "Choice Reaction Time Task", - "cite": [] - }, - "model_name": { - "code": "ddm", - "desc": "Drift Diffusion Model", - "cite": [ - "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" - ] - }, - "model_type": { - "code": "single", - "desc": "Individual" - }, - "notes": [ - "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters.", - "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." - ], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).", - "RT": "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." - }, - "parameters": { - "alpha": { - "desc": "boundary separation", - "info": [null, 0.5, null] - }, - "beta": { - "desc": "bias", - "info": [null, 0.5, null] - }, - "delta": { - "desc": "drift rate", - "info": [null, 0.5, null] - }, - "tau": { - "desc": "non-decision time", - "info": [null, 0.15, null] - } - }, - "regressors": {}, - "postpreds": [], - "additional_args": [ - { - "code": "RTbound", - "default": 0.1, - "desc": "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." - } - ] -} diff --git a/commons/models-json/cra_exp.json b/commons/models-json/cra_exp.json deleted file mode 100644 index 9dacc77b..00000000 --- a/commons/models-json/cra_exp.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "task_name": { - "code": "cra", - "desc": "Choice Under Risk and Ambiguity Task", - "cite": [] - }, - "model_name": { - "code": "exp", - "desc": "Exponential Subjective Value Model", - "cite": [ - "Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "prob": "Objective probability of the variable lottery.", - "ambig": "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).", - "reward_var": "Amount of reward in variable lottery. Assumed to be greater than zero.", - "reward_fix": "Amount of reward in fixed lottery. Assumed to be greater than zero.", - "choice": "If the variable lottery was selected, choice == 1; otherwise choice == 0." - }, - "parameters": { - "alpha": { - "desc": "risk attitude", - "info": [0, 1, 2] - }, - "beta": { - "desc": "ambiguity attitude", - "info": ["-Inf", 0, "Inf"] - }, - "gamma": { - "desc": "inverse temperature", - "info": [0, 1, "Inf"] - } - }, - "regressors": { - "sv": 2, - "sv_fix": 2, - "sv_var": 2, - "p_var": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/cra_linear.json b/commons/models-json/cra_linear.json deleted file mode 100644 index d8ce1cfa..00000000 --- a/commons/models-json/cra_linear.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "task_name": { - "code": "cra", - "desc": "Choice Under Risk and Ambiguity Task", - "cite": [] - }, - "model_name": { - "code": "linear", - "desc": "Linear Subjective Value Model", - "cite": [ - "Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "prob": "Objective probability of the variable lottery.", - "ambig": "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).", - "reward_var": "Amount of reward in variable lottery. Assumed to be greater than zero.", - "reward_fix": "Amount of reward in fixed lottery. Assumed to be greater than zero.", - "choice": "If the variable lottery was selected, choice == 1; otherwise choice == 0." - }, - "parameters": { - "alpha": { - "desc": "risk attitude", - "info": [0, 1, 2] - }, - "beta": { - "desc": "ambiguity attitude", - "info": ["-Inf", 0, "Inf"] - }, - "gamma": { - "desc": "inverse temperature", - "info": [0, 1, "Inf"] - } - }, - "regressors": { - "sv": 2, - "sv_fix": 2, - "sv_var": 2, - "p_var": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/dbdm_prob_weight.json b/commons/models-json/dbdm_prob_weight.json deleted file mode 100644 index d9650466..00000000 --- a/commons/models-json/dbdm_prob_weight.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "task_name": { - "code": "dbdm", - "desc": "Description Based Decison Making Task", - "cite": [] - }, - "model_name": { - "code": "prob_weight", - "desc": "Probability Weight Function", - "cite": [ - "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47.", - "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539.", - "Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Yoonseo Zoh", - "email": "zohyos7@gmail.com", - "link": "https://ccs-lab.github.io/team/yoonseo-zoh/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "opt1hprob": "Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.", - "opt2hprob": "Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.", - "opt1hval": "Possible (with opt1hprob probability) outcome of option 1.", - "opt1lval": "Possible (with (1 - opt1hprob) probability) outcome of option 1.", - "opt2hval": "Possible (with opt2hprob probability) outcome of option 2.", - "opt2lval": "Possible (with (1 - opt2hprob) probability) outcome of option 2.", - "choice": "If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2." - }, - "parameters": { - "tau": { - "desc": "probability weight function", - "info": [0, 0.8, 1] - }, - "rho": { - "desc": "subject utility function", - "info": [0, 0.7, 2] - }, - "lambda": { - "desc": "loss aversion parameter", - "info": [0, 2.5, 5] - }, - "beta": { - "desc": "inverse softmax temperature", - "info": [0, 0.2, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/dd_cs.json b/commons/models-json/dd_cs.json deleted file mode 100644 index 96ff42fb..00000000 --- a/commons/models-json/dd_cs.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "task_name": { - "code": "dd", - "desc": "Delay Discounting Task", - "cite": [] - }, - "model_name": { - "code": "cs", - "desc": "Constant-Sensitivity (CS) Model", - "cite": [ - "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", - "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", - "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", - "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", - "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - }, - "parameters": { - "r": { - "desc": "exponential discounting rate", - "info": [0, 0.1, 1] - }, - "s": { - "desc": "impatience", - "info": [0, 1, 10] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 5] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/dd_cs_single.json b/commons/models-json/dd_cs_single.json deleted file mode 100644 index 58d4ccb7..00000000 --- a/commons/models-json/dd_cs_single.json +++ /dev/null @@ -1,45 +0,0 @@ -{ - "task_name": { - "code": "dd", - "desc": "Delay Discounting Task", - "cite": [] - }, - "model_name": { - "code": "cs", - "desc": "Constant-Sensitivity (CS) Model", - "cite": [ - "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" - ] - }, - "model_type": { - "code": "single", - "desc": "Individual" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", - "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", - "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", - "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", - "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - }, - "parameters": { - "r": { - "desc": "exponential discounting rate", - "info": [null, 0.1, null] - }, - "s": { - "desc": "impatience", - "info": [null, 1, null] - }, - "beta": { - "desc": "inverse temperature", - "info": [null, 1, null] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/dd_exp.json b/commons/models-json/dd_exp.json deleted file mode 100644 index 9f9794d5..00000000 --- a/commons/models-json/dd_exp.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "task_name": { - "code": "dd", - "desc": "Delay Discounting Task", - "cite": [] - }, - "model_name": { - "code": "exp", - "desc": "Exponential Model", - "cite": [ - "Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", - "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", - "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", - "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", - "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - }, - "parameters": { - "r": { - "desc": "exponential discounting rate", - "info": [0, 0.1, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 5] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/dd_hyperbolic.json b/commons/models-json/dd_hyperbolic.json deleted file mode 100644 index 809e131f..00000000 --- a/commons/models-json/dd_hyperbolic.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "task_name": { - "code": "dd", - "desc": "Delay Discounting Task", - "cite": [] - }, - "model_name": { - "code": "hyperbolic", - "desc": "Hyperbolic Model", - "cite": [ - "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", - "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", - "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", - "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", - "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - }, - "parameters": { - "k": { - "desc": "discounting rate", - "info": [0, 0.1, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 5] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/dd_hyperbolic_single.json b/commons/models-json/dd_hyperbolic_single.json deleted file mode 100644 index e89ba3f4..00000000 --- a/commons/models-json/dd_hyperbolic_single.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "task_name": { - "code": "dd", - "desc": "Delay Discounting Task", - "cite": [] - }, - "model_name": { - "code": "hyperbolic", - "desc": "Hyperbolic Model", - "cite": [ - "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." - ] - }, - "model_type": { - "code": "single", - "desc": "Individual" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "delay_later": "An integer representing the delayed days for the later option (e.g. 1, 6, 28).", - "amount_later": "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).", - "delay_sooner": "An integer representing the delayed days for the sooner option (e.g. 0).", - "amount_sooner": "A floating point number representing the amount for the sooner option (e.g. 10).", - "choice": "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - }, - "parameters": { - "k": { - "desc": "discounting rate", - "info": [null, 0.1, null] - }, - "beta": { - "desc": "inverse temperature", - "info": [null, 1, null] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/gng_m1.json b/commons/models-json/gng_m1.json deleted file mode 100644 index 99b7ac01..00000000 --- a/commons/models-json/gng_m1.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "task_name": { - "code": "gng", - "desc": "Orthogonalized Go/Nogo Task", - "cite": [] - }, - "model_name": { - "code": "m1", - "desc": "RW + noise", - "cite": [ - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", - "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", - "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - }, - "parameters": { - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - }, - "ep": { - "desc": "learning rate", - "info": [0, 0.2, 1] - }, - "rho": { - "desc": "effective size", - "info": [0, "exp(2)", "Inf"] - } - }, - "regressors": { - "Qgo": 2, - "Qnogo": 2, - "Wgo": 2, - "Wnogo": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/gng_m2.json b/commons/models-json/gng_m2.json deleted file mode 100644 index 3f97a1ed..00000000 --- a/commons/models-json/gng_m2.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "task_name": { - "code": "gng", - "desc": "Orthogonalized Go/Nogo Task", - "cite": [] - }, - "model_name": { - "code": "m2", - "desc": "RW + noise + bias", - "cite": [ - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", - "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", - "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - }, - "parameters": { - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - }, - "ep": { - "desc": "learning rate", - "info": [0, 0.2, 1] - }, - "b": { - "desc": "action bias", - "info": ["-Inf", 0, "Inf"] - }, - "rho": { - "desc": "effective size", - "info": [0, "exp(2)", "Inf"] - } - }, - "regressors": { - "Qgo": 2, - "Qnogo": 2, - "Wgo": 2, - "Wnogo": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/gng_m3.json b/commons/models-json/gng_m3.json deleted file mode 100644 index b4fdcbba..00000000 --- a/commons/models-json/gng_m3.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "task_name": { - "code": "gng", - "desc": "Orthogonalized Go/Nogo Task", - "cite": [] - }, - "model_name": { - "code": "m3", - "desc": "RW + noise + bias + pi", - "cite": [ - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", - "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", - "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - }, - "parameters": { - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - }, - "ep": { - "desc": "learning rate", - "info": [0, 0.2, 1] - }, - "b": { - "desc": "action bias", - "info": ["-Inf", 0, "Inf"] - }, - "pi": { - "desc": "Pavlovian bias", - "info": ["-Inf", 0, "Inf"] - }, - "rho": { - "desc": "effective size", - "info": [0, "exp(2)", "Inf"] - } - }, - "regressors": { - "Qgo": 2, - "Qnogo": 2, - "Wgo": 2, - "Wnogo": 2, - "SV": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/gng_m4.json b/commons/models-json/gng_m4.json deleted file mode 100644 index 8e3626ec..00000000 --- a/commons/models-json/gng_m4.json +++ /dev/null @@ -1,61 +0,0 @@ -{ - "task_name": { - "code": "gng", - "desc": "Orthogonalized Go/Nogo Task", - "cite": [] - }, - "model_name": { - "code": "m4", - "desc": "RW (rew/pun) + noise + bias + pi", - "cite": [ - "Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "cue": "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.", - "keyPressed": "Binary value representing the subject's response for that trial (where Press == 1; No press == 0).", - "outcome": "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - }, - "parameters": { - "xi": { - "desc": "noise", - "info": [0, 0.1, 1] - }, - "ep": { - "desc": "learning rate", - "info": [0, 0.2, 1] - }, - "b": { - "desc": "action bias", - "info": ["-Inf", 0, "Inf"] - }, - "pi": { - "desc": "Pavlovian bias", - "info": ["-Inf", 0, "Inf"] - }, - "rhoRew": { - "desc": "reward sensitivity", - "info": [0, "exp(2)", "Inf"] - }, - "rhoPun": { - "desc": "punishment sensitivity", - "info": [0, "exp(2)", "Inf"] - } - }, - "regressors": { - "Qgo": 2, - "Qnogo": 2, - "Wgo": 2, - "Wnogo": 2, - "SV": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/igt_orl.json b/commons/models-json/igt_orl.json deleted file mode 100644 index 8f399b46..00000000 --- a/commons/models-json/igt_orl.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "task_name": { - "code": "igt", - "desc": "Iowa Gambling Task", - "cite": [ - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - ] - }, - "model_name": { - "code": "orl", - "desc": "Outcome-Representation Learning Model", - "cite": [ - "Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Nate Haines", - "email": "haines.175@osu.edu", - "link": "https://ccs-lab.github.io/team/nate-haines/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", - "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - }, - "parameters": { - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "K": { - "desc": "perseverance decay", - "info": [0, 0.1, 5] - }, - "betaF": { - "desc": "outcome frequency weight", - "info": ["-Inf", 0.1, "Inf"] - }, - "betaP": { - "desc": "perseverance weight", - "info": ["-Inf", 1, "Inf"] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [ - { - "code": "payscale", - "default": 100, - "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - } - ] -} diff --git a/commons/models-json/igt_pvl_decay.json b/commons/models-json/igt_pvl_decay.json deleted file mode 100644 index 8886e5f5..00000000 --- a/commons/models-json/igt_pvl_decay.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "task_name": { - "code": "igt", - "desc": "Iowa Gambling Task", - "cite": [ - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - ] - }, - "model_name": { - "code": "pvl_decay", - "desc": "Prospect Valence Learning (PVL) Decay-RI", - "cite": [ - "Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", - "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - }, - "parameters": { - "A": { - "desc": "decay rate", - "info": [0, 0.5, 1] - }, - "alpha": { - "desc": "outcome sensitivity", - "info": [0, 0.5, 2] - }, - "cons": { - "desc": "response consistency", - "info": [0, 1, 5] - }, - "lambda": { - "desc": "loss aversion", - "info": [0, 1, 10] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [ - { - "code": "payscale", - "default": 100, - "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - } - ] -} diff --git a/commons/models-json/igt_pvl_delta.json b/commons/models-json/igt_pvl_delta.json deleted file mode 100644 index dade6353..00000000 --- a/commons/models-json/igt_pvl_delta.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "task_name": { - "code": "igt", - "desc": "Iowa Gambling Task", - "cite": [ - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - ] - }, - "model_name": { - "code": "pvl_delta", - "desc": "Prospect Valence Learning (PVL) Delta", - "cite": [ - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", - "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - }, - "parameters": { - "A": { - "desc": "learning rate", - "info": [0, 0.5, 1] - }, - "alpha": { - "desc": "outcome sensitivity", - "info": [0, 0.5, 2] - }, - "cons": { - "desc": "response consistency", - "info": [0, 1, 5] - }, - "lambda": { - "desc": "loss aversion", - "info": [0, 1, 10] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [ - { - "code": "payscale", - "default": 100, - "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - } - ] -} diff --git a/commons/models-json/igt_vpp.json b/commons/models-json/igt_vpp.json deleted file mode 100644 index 70c18b34..00000000 --- a/commons/models-json/igt_vpp.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "task_name": { - "code": "igt", - "desc": "Iowa Gambling Task", - "cite": [ - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - ] - }, - "model_name": { - "code": "vpp", - "desc": "Value-Plus-Perseverance", - "cite": [ - "Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).", - "gain": "Floating point value representing the amount of currency won on that trial (e.g. 50, 100).", - "loss": "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - }, - "parameters": { - "A": { - "desc": "learning rate", - "info": [0, 0.5, 1] - }, - "alpha": { - "desc": "outcome sensitivity", - "info": [0, 0.5, 2] - }, - "cons": { - "desc": "response consistency", - "info": [0, 1, 5] - }, - "lambda": { - "desc": "loss aversion", - "info": [0, 1, 10] - }, - "epP": { - "desc": "gain impact", - "info": ["-Inf", 0, "Inf"] - }, - "epN": { - "desc": "loss impact", - "info": ["-Inf", 0, "Inf"] - }, - "K": { - "desc": "decay rate", - "info": [0, 0.5, 1] - }, - "w": { - "desc": "RL weight", - "info": [0, 0.5, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [ - { - "code": "payscale", - "default": 100, - "desc": "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - } - ] -} diff --git a/commons/models-json/peer_ocu.json b/commons/models-json/peer_ocu.json deleted file mode 100644 index ff82a77c..00000000 --- a/commons/models-json/peer_ocu.json +++ /dev/null @@ -1,53 +0,0 @@ -{ - "task_name": { - "code": "peer", - "desc": "Peer Influence Task", - "cite": [ - "Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916." - ] - }, - "model_name": { - "code": "ocu", - "desc": "Other-Conferred Utility (OCU) Model", - "cite": [] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Harhim Park", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "condition": "0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).", - "p_gamble": "Probability of receiving a high payoff (same for both options).", - "safe_Hpayoff": "High payoff of the safe option.", - "safe_Lpayoff": "Low payoff of the safe option.", - "risky_Hpayoff": "High payoff of the risky option.", - "risky_Lpayoff": "Low payoff of the risky option.", - "choice": "Which option was chosen? 0: safe, 1: risky." - }, - "parameters": { - "rho": { - "desc": "risk preference", - "info": [0, 1, 2] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, "Inf"] - }, - "ocu": { - "desc": "other-conferred utility", - "info": ["-Inf", 0, "Inf"] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_ewa.json b/commons/models-json/prl_ewa.json deleted file mode 100644 index 14f249e1..00000000 --- a/commons/models-json/prl_ewa.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "ewa", - "desc": "Experience-Weighted Attraction Model", - "cite": [ - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "phi": { - "desc": "1 - learning rate", - "info": [0, 0.5, 1] - }, - "rho": { - "desc": "experience decay factor", - "info": [0, 0.1, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 2, - "ev_nc": 2, - "ew_c": 2, - "ew_nc": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_fictitious.json b/commons/models-json/prl_fictitious.json deleted file mode 100644 index 102b32c2..00000000 --- a/commons/models-json/prl_fictitious.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "fictitious", - "desc": "Fictitious Update Model", - "cite": [ - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "eta": { - "desc": "learning rate", - "info": [0, 0.5, 1] - }, - "alpha": { - "desc": "indecision point", - "info": ["-Inf", 0, "Inf"] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 2, - "ev_nc": 2, - "pe_c": 2, - "pe_nc": 2, - "dv": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_fictitious_multipleB.json b/commons/models-json/prl_fictitious_multipleB.json deleted file mode 100644 index e4792f7a..00000000 --- a/commons/models-json/prl_fictitious_multipleB.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "fictitious", - "desc": "Fictitious Update Model", - "cite": [ - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - ] - }, - "model_type": { - "code": "multipleB", - "desc": "Multiple-Block Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "block": "A unique identifier for each of the multiple blocks within each subject.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "eta": { - "desc": "learning rate", - "info": [0, 0.5, 1] - }, - "alpha": { - "desc": "indecision point", - "info": ["-Inf", 0, "Inf"] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 3, - "ev_nc": 3, - "pe_c": 3, - "pe_nc": 3, - "dv": 3 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_fictitious_rp.json b/commons/models-json/prl_fictitious_rp.json deleted file mode 100644 index 0684fa12..00000000 --- a/commons/models-json/prl_fictitious_rp.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "fictitious_rp", - "desc": "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)", - "cite": [ - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098", - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "eta_pos": { - "desc": "learning rate, +PE", - "info": [0, 0.5, 1] - }, - "eta_neg": { - "desc": "learning rate, -PE", - "info": [0, 0.5, 1] - }, - "alpha": { - "desc": "indecision point", - "info": ["-Inf", 0, "Inf"] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 2, - "ev_nc": 2, - "pe_c": 2, - "pe_nc": 2, - "dv": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_fictitious_rp_woa.json b/commons/models-json/prl_fictitious_rp_woa.json deleted file mode 100644 index 1a942c2e..00000000 --- a/commons/models-json/prl_fictitious_rp_woa.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "fictitious_rp_woa", - "desc": "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)", - "cite": [ - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098", - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "eta_pos": { - "desc": "learning rate, +PE", - "info": [0, 0.5, 1] - }, - "eta_neg": { - "desc": "learning rate, -PE", - "info": [0, 0.5, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 2, - "ev_nc": 2, - "pe_c": 2, - "pe_nc": 2, - "dv": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_fictitious_woa.json b/commons/models-json/prl_fictitious_woa.json deleted file mode 100644 index 69b35d55..00000000 --- a/commons/models-json/prl_fictitious_woa.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "fictitious_woa", - "desc": "Fictitious Update Model, without alpha (indecision point)", - "cite": [ - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "eta": { - "desc": "learning rate", - "info": [0, 0.5, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 2, - "ev_nc": 2, - "pe_c": 2, - "pe_nc": 2, - "dv": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_rp.json b/commons/models-json/prl_rp.json deleted file mode 100644 index 6272765c..00000000 --- a/commons/models-json/prl_rp.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "rp", - "desc": "Reward-Punishment Model", - "cite": [ - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 2, - "ev_nc": 2, - "pe": 2 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/prl_rp_multipleB.json b/commons/models-json/prl_rp_multipleB.json deleted file mode 100644 index f300b69f..00000000 --- a/commons/models-json/prl_rp_multipleB.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "task_name": { - "code": "prl", - "desc": "Probabilistic Reversal Learning Task", - "cite": [] - }, - "model_name": { - "code": "rp", - "desc": "Reward-Punishment Model", - "cite": [ - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - ] - }, - "model_type": { - "code": "multipleB", - "desc": "Multiple-Block Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang (for model-based regressors)", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - }, - { - "name": "Harhim Park (for model-based regressors)", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "block": "A unique identifier for each of the multiple blocks within each subject.", - "choice": "Integer value representing the option chosen on that trial: 1 or 2.", - "outcome": "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - }, - "parameters": { - "Apun": { - "desc": "punishment learning rate", - "info": [0, 0.1, 1] - }, - "Arew": { - "desc": "reward learning rate", - "info": [0, 0.1, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": { - "ev_c": 3, - "ev_nc": 3, - "pe": 3 - }, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/pst_gainloss_Q.json b/commons/models-json/pst_gainloss_Q.json deleted file mode 100644 index f2fec053..00000000 --- a/commons/models-json/pst_gainloss_Q.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "task_name": { - "code": "pst", - "desc": "Probabilistic Selection Task", - "cite": [] - }, - "model_name": { - "code": "gainloss_Q", - "desc": "Gain-Loss Q Learning Model", - "cite": [ - "Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Jaeyeong Yang", - "email": "jaeyeong.yang1125@gmail.com", - "link": "https://ccs-lab.github.io/team/jaeyeong-yang/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "type": "Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\\% (type 1), 20\\% (type 2), 70\\% (type 3), 30\\% (type 4), 60\\% (type 5), 40\\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.", - "choice": "Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).", - "reward": "Amount of reward earned as a result of the trial." - }, - "parameters": { - "alpha_pos": { - "desc": "learning rate for positive feedbacks", - "info": [0, 0.5, 1] - }, - "alpha_neg": { - "desc": "learning rate for negative feedbacks", - "info": [0, 0.5, 1] - }, - "beta": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/ra_noLA.json b/commons/models-json/ra_noLA.json deleted file mode 100644 index ea8ec291..00000000 --- a/commons/models-json/ra_noLA.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "task_name": { - "code": "ra", - "desc": "Risk Aversion Task", - "cite": [] - }, - "model_name": { - "code": "noLA", - "desc": "Prospect Theory, without loss aversion (LA) parameter", - "cite": [ - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", - "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", - "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", - "gamble": "If gamble was taken, gamble == 1; else gamble == 0." - }, - "parameters": { - "rho": { - "desc": "risk aversion", - "info": [0, 1, 2] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, 30] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/ra_noRA.json b/commons/models-json/ra_noRA.json deleted file mode 100644 index 0d0ad7c0..00000000 --- a/commons/models-json/ra_noRA.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "task_name": { - "code": "ra", - "desc": "Risk Aversion Task", - "cite": [] - }, - "model_name": { - "code": "noRA", - "desc": "Prospect Theory, without risk aversion (RA) parameter", - "cite": [ - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", - "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", - "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", - "gamble": "If gamble was taken, gamble == 1; else gamble == 0." - }, - "parameters": { - "lambda": { - "desc": "loss aversion", - "info": [0, 1, 5] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, 30] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/ra_prospect.json b/commons/models-json/ra_prospect.json deleted file mode 100644 index 86f43e01..00000000 --- a/commons/models-json/ra_prospect.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "task_name": { - "code": "ra", - "desc": "Risk Aversion Task", - "cite": [] - }, - "model_name": { - "code": "prospect", - "desc": "Prospect Theory", - "cite": [ - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", - "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", - "cert": "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero.", - "gamble": "If gamble was taken, gamble == 1; else gamble == 0." - }, - "parameters": { - "rho": { - "desc": "risk aversion", - "info": [0, 1, 2] - }, - "lambda": { - "desc": "loss aversion", - "info": [0, 1, 5] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, 30] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/rdt_happiness.json b/commons/models-json/rdt_happiness.json deleted file mode 100644 index d8d77706..00000000 --- a/commons/models-json/rdt_happiness.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "task_name": { - "code": "rdt", - "desc": "Risky Decision Task", - "cite": [] - }, - "model_name": { - "code": "happiness", - "desc": "Happiness Computational Model", - "cite": [ - "Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Harhim Park", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "gain": "Possible (50\\%) gain outcome of a risky option (e.g. 9).", - "loss": "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5).", - "cert": "Guaranteed amount of a safe option.", - "type": "loss == -1, mixed == 0, gain == 1", - "gamble": "If gamble was taken, gamble == 1; else gamble == 0.", - "outcome": "Result of the trial.", - "happy": "Happiness score.", - "RT_happy": "Reaction time for answering the happiness score." - }, - "parameters": { - "w0": { - "desc": "baseline", - "info": ["-Inf", 1, "Inf"] - }, - "w1": { - "desc": "weight of certain rewards", - "info": ["-Inf", 1, "Inf"] - }, - "w2": { - "desc": "weight of expected values", - "info": ["-Inf", 1, "Inf"] - }, - "w3": { - "desc": "weight of reward prediction errors", - "info": ["-Inf", 1, "Inf"] - }, - "gam": { - "desc": "forgetting factor", - "info": [0, 0.5, 1] - }, - "sig": { - "desc": "standard deviation of error", - "info": [0, 1, "Inf"] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/ts_par4.json b/commons/models-json/ts_par4.json deleted file mode 100644 index fd7d3512..00000000 --- a/commons/models-json/ts_par4.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "task_name": { - "code": "ts", - "desc": "Two-Step Task", - "cite": [ - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - ] - }, - "model_name": { - "code": "par4", - "desc": "Hybrid Model, with 4 parameters", - "cite": [ - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027", - "Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Harhim Park", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "level1_choice": "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).", - "level2_choice": "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.", - "reward": "Reward after Level 2 (0 or 1)." - }, - "parameters": { - "a": { - "desc": "learning rate for both stages 1 & 2", - "info": [0, 0.5, 1] - }, - "beta": { - "desc": "inverse temperature for both stages 1 & 2", - "info": [0, 1, "Inf"] - }, - "pi": { - "desc": "perseverance", - "info": [0, 1, 5] - }, - "w": { - "desc": "model-based weight", - "info": [0, 0.5, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred_step1", "y_pred_step2"], - "additional_args": [ - { - "code": "trans_prob", - "default": 0.7, - "desc": "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." - } - ] -} diff --git a/commons/models-json/ts_par6.json b/commons/models-json/ts_par6.json deleted file mode 100644 index 02a5e3c6..00000000 --- a/commons/models-json/ts_par6.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "task_name": { - "code": "ts", - "desc": "Two-Step Task", - "cite": [ - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - ] - }, - "model_name": { - "code": "par6", - "desc": "Hybrid Model, with 6 parameters", - "cite": [ - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Harhim Park", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "level1_choice": "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).", - "level2_choice": "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.", - "reward": "Reward after Level 2 (0 or 1)." - }, - "parameters": { - "a1": { - "desc": "learning rate in stage 1", - "info": [0, 0.5, 1] - }, - "beta1": { - "desc": "inverse temperature in stage 1", - "info": [0, 1, "Inf"] - }, - "a2": { - "desc": "learning rate in stage 2", - "info": [0, 0.5, 1] - }, - "beta2": { - "desc": "inverse temperature in stage 2", - "info": [0, 1, "Inf"] - }, - "pi": { - "desc": "perseverance", - "info": [0, 1, 5] - }, - "w": { - "desc": "model-based weight", - "info": [0, 0.5, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred_step1", "y_pred_step2"], - "additional_args": [ - { - "code": "trans_prob", - "default": 0.7, - "desc": "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." - } - ] -} diff --git a/commons/models-json/ts_par7.json b/commons/models-json/ts_par7.json deleted file mode 100644 index 9e3b5e32..00000000 --- a/commons/models-json/ts_par7.json +++ /dev/null @@ -1,73 +0,0 @@ -{ - "task_name": { - "code": "ts", - "desc": "Two-Step Task", - "cite": [ - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - ] - }, - "model_name": { - "code": "par7", - "desc": "Hybrid Model, with 7 parameters (original model)", - "cite": [ - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Harhim Park", - "email": "hrpark12@gmail.com", - "link": "https://ccs-lab.github.io/team/harhim-park/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "level1_choice": "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).", - "level2_choice": "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.", - "reward": "Reward after Level 2 (0 or 1)." - }, - "parameters": { - "a1": { - "desc": "learning rate in stage 1", - "info": [0, 0.5, 1] - }, - "beta1": { - "desc": "inverse temperature in stage 1", - "info": [0, 1, "Inf"] - }, - "a2": { - "desc": "learning rate in stage 2", - "info": [0, 0.5, 1] - }, - "beta2": { - "desc": "inverse temperature in stage 2", - "info": [0, 1, "Inf"] - }, - "pi": { - "desc": "perseverance", - "info": [0, 1, 5] - }, - "w": { - "desc": "model-based weight", - "info": [0, 0.5, 1] - }, - "lambda": { - "desc": "eligibility trace", - "info": [0, 0.5, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred_step1", "y_pred_step2"], - "additional_args": [ - { - "code": "trans_prob", - "default": 0.7, - "desc": "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." - } - ] -} diff --git a/commons/models-json/ug_bayes.json b/commons/models-json/ug_bayes.json deleted file mode 100644 index d34f9078..00000000 --- a/commons/models-json/ug_bayes.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "task_name": { - "code": "ug", - "desc": "Norm-Training Ultimatum Game", - "cite": [] - }, - "model_name": { - "code": "bayes", - "desc": "Ideal Observer Model", - "cite": [ - "Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "offer": "Floating point value representing the offer made in that trial (e.g. 4, 10, 11).", - "accept": "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." - }, - "parameters": { - "alpha": { - "desc": "envy", - "info": [0, 1, 20] - }, - "beta": { - "desc": "guilt", - "info": [0, 0.5, 10] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, 10] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/ug_delta.json b/commons/models-json/ug_delta.json deleted file mode 100644 index 6c797f51..00000000 --- a/commons/models-json/ug_delta.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "task_name": { - "code": "ug", - "desc": "Norm-Training Ultimatum Game", - "cite": [] - }, - "model_name": { - "code": "delta", - "desc": "Rescorla-Wagner (Delta) Model", - "cite": [ - "Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015" - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "offer": "Floating point value representing the offer made in that trial (e.g. 4, 10, 11).", - "accept": "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." - }, - "parameters": { - "alpha": { - "desc": "envy", - "info": [0, 1, 20] - }, - "tau": { - "desc": "inverse temperature", - "info": [0, 1, 10] - }, - "ep": { - "desc": "norm adaptation rate", - "info": [0, 0.5, 1] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-json/wcs_sql.json b/commons/models-json/wcs_sql.json deleted file mode 100644 index 126ae5af..00000000 --- a/commons/models-json/wcs_sql.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "task_name": { - "code": "wcs", - "desc": "Wisconsin Card Sorting Task", - "cite": [] - }, - "model_name": { - "code": "sql", - "desc": "Sequential Learning Model", - "cite": [ - "Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13." - ] - }, - "model_type": { - "code": "", - "desc": "Hierarchical" - }, - "notes": [], - "contributors": [ - { - "name": "Dayeong Min", - "email": "mindy2801@snu.ac.kr", - "link": "https://ccs-lab.github.io/team/dayeong-min/" - } - ], - "data_columns": { - "subjID": "A unique identifier for each subject in the data-set.", - "choice": "Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.", - "outcome": "1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0." - }, - "parameters": { - "r": { - "desc": "reward sensitivity", - "info": [0, 0.1, 1] - }, - "p": { - "desc": "punishment sensitivity", - "info": [0, 0.1, 1] - }, - "d": { - "desc": "decision consistency or inverse temperature", - "info": [0, 1, 5] - } - }, - "regressors": {}, - "postpreds": ["y_pred"], - "additional_args": [] -} diff --git a/commons/models-yaml/bandit2arm_delta.yml b/commons/models-yaml/bandit2arm_delta.yml deleted file mode 100644 index 13c97c3d..00000000 --- a/commons/models-yaml/bandit2arm_delta.yml +++ /dev/null @@ -1,38 +0,0 @@ ---- - task_name: - code: "bandit2arm" - desc: "2-Armed Bandit Task" - cite: - - "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683" - - "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x" - model_name: - code: "delta" - desc: "Rescorla-Wagner (Delta) Model" - cite: "" - model_type: - code: "" - desc: "Hierarchical" - notes: - contributors: - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1 or 2." - outcome: "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." - parameters: - A: - desc: "learning rate" - info: - - 0 - - 0.5 - - 1 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - 5 - regressors: - postpreds: - - "y_pred" - additional_args: - diff --git a/commons/models-yaml/bandit4arm2_kalman_filter.yml b/commons/models-yaml/bandit4arm2_kalman_filter.yml deleted file mode 100644 index 7060b923..00000000 --- a/commons/models-yaml/bandit4arm2_kalman_filter.yml +++ /dev/null @@ -1,65 +0,0 @@ ---- - task_name: - code: "bandit4arm2" - desc: "4-Armed Bandit Task (modified)" - cite: [] - model_name: - code: "kalman_filter" - desc: "Kalman Filter" - cite: - - "Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Yoonseo Zoh" - email: "zohyos7@gmail.com" - link: "https://zohyos7.github.io" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." - outcome: "Integer value representing the outcome of the given trial (where reward == 1, and loss == -1)." - parameters: - lambda: - desc: "decay factor" - info: - - 0 - - 0.9 - - 1 - theta: - desc: "decay center" - info: - - 0 - - 50 - - 100 - beta: - desc: "inverse softmax temperature" - info: - - 0 - - 0.1 - - 1 - mu0: - desc: "anticipated initial mean of all 4 options" - info: - - 0 - - 85 - - 100 - sigma0: - desc: "anticipated initial sd (uncertainty factor) of all 4 options" - info: - - 0 - - 6 - - 15 - sigmaD: - desc: "sd of diffusion noise" - info: - - 0 - - 3 - - 15 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/bandit4arm_2par_lapse.yml b/commons/models-yaml/bandit4arm_2par_lapse.yml deleted file mode 100644 index 2e3b6f6d..00000000 --- a/commons/models-yaml/bandit4arm_2par_lapse.yml +++ /dev/null @@ -1,44 +0,0 @@ ---- - task_name: - code: "bandit4arm" - desc: "4-Armed Bandit Task" - cite: [] - model_name: - code: "2par_lapse" - desc: "3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)" - cite: - - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." - gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - parameters: - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/bandit4arm_4par.yml b/commons/models-yaml/bandit4arm_4par.yml deleted file mode 100644 index aebcc42f..00000000 --- a/commons/models-yaml/bandit4arm_4par.yml +++ /dev/null @@ -1,50 +0,0 @@ ---- - task_name: - code: "bandit4arm" - desc: "4-Armed Bandit Task" - cite: [] - model_name: - code: "4par" - desc: "4 Parameter Model, without C (choice perseveration)" - cite: - - "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." - gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - parameters: - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - R: - desc: "reward sensitivity" - info: - - 0 - - 1 - - 30 - P: - desc: "punishment sensitivity" - info: - - 0 - - 1 - - 30 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/bandit4arm_lapse.yml b/commons/models-yaml/bandit4arm_lapse.yml deleted file mode 100644 index fd672974..00000000 --- a/commons/models-yaml/bandit4arm_lapse.yml +++ /dev/null @@ -1,56 +0,0 @@ ---- - task_name: - code: "bandit4arm" - desc: "4-Armed Bandit Task" - cite: [] - model_name: - code: "lapse" - desc: "5 Parameter Model, without C (choice perseveration) but with xi (noise)" - cite: - - "Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." - gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - parameters: - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - R: - desc: "reward sensitivity" - info: - - 0 - - 1 - - 30 - P: - desc: "punishment sensitivity" - info: - - 0 - - 1 - - 30 - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/bandit4arm_lapse_decay.yml b/commons/models-yaml/bandit4arm_lapse_decay.yml deleted file mode 100644 index 224d3d64..00000000 --- a/commons/models-yaml/bandit4arm_lapse_decay.yml +++ /dev/null @@ -1,62 +0,0 @@ ---- - task_name: - code: "bandit4arm" - desc: "4-Armed Bandit Task" - cite: [] - model_name: - code: "lapse_decay" - desc: "5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro)." - cite: - - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." - gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - parameters: - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - R: - desc: "reward sensitivity" - info: - - 0 - - 1 - - 30 - P: - desc: "punishment sensitivity" - info: - - 0 - - 1 - - 30 - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - d: - desc: "decay rate" - info: - - 0 - - 0.1 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/bandit4arm_singleA_lapse.yml b/commons/models-yaml/bandit4arm_singleA_lapse.yml deleted file mode 100644 index eb88a2a9..00000000 --- a/commons/models-yaml/bandit4arm_singleA_lapse.yml +++ /dev/null @@ -1,50 +0,0 @@ ---- - task_name: - code: "bandit4arm" - desc: "4-Armed Bandit Task" - cite: [] - model_name: - code: "singleA_lapse" - desc: "4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P." - cite: - - "Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on the given trial: 1, 2, 3, or 4." - gain: "Floating point value representing the amount of currency won on the given trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50)." - parameters: - A: - desc: "learning rate" - info: - - 0 - - 0.1 - - 1 - R: - desc: "reward sensitivity" - info: - - 0 - - 1 - - 30 - P: - desc: "punishment sensitivity" - info: - - 0 - - 1 - - 30 - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/bart_par4.yml b/commons/models-yaml/bart_par4.yml deleted file mode 100644 index ca5e3328..00000000 --- a/commons/models-yaml/bart_par4.yml +++ /dev/null @@ -1,73 +0,0 @@ ---- - task_name: - code: "bart" - desc: "Balloon Analogue Risk Task" - cite: [] - model_name: - code: "par4" - desc: "Re-parameterized version of BART model with 4 parameters" - cite: - - "van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Harhim Park" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - - - name: "Jaeyeong Yang" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Ayoung Lee" - email: "aylee2008@naver.com" - link: "https://ccs-lab.github.io/team/ayoung-lee/" - - - name: "Jeongbin Oh" - email: "ows0104@gmail.com" - link: "https://ccs-lab.github.io/team/jeongbin-oh/" - - - name: "Jiyoon Lee" - email: "nicole.lee2001@gmail.com" - link: "https://ccs-lab.github.io/team/jiyoon-lee/" - - - name: "Junha Jang" - email: "andy627robo@naver.com" - link: "https://ccs-lab.github.io/team/junha-jang/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - pumps: "The number of pumps." - explosion: "0: intact, 1: burst" - parameters: - phi: - desc: "prior belief of balloon not bursting" - info: - - 0 - - 0.5 - - 1 - eta: - desc: "updating rate" - info: - - 0 - - 1 - - "Inf" - gam: - desc: "risk-taking parameter" - info: - - 0 - - 1 - - "Inf" - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - "Inf" - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/choiceRT_ddm.yml b/commons/models-yaml/choiceRT_ddm.yml deleted file mode 100644 index ab50e0a4..00000000 --- a/commons/models-yaml/choiceRT_ddm.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- - task_name: - code: "choiceRT" - desc: "Choice Reaction Time Task" - cite: [] - model_name: - code: "ddm" - desc: "Drift Diffusion Model" - cite: - - "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" - model_type: - code: "" - desc: "Hierarchical" - notes: - - "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters." - - "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2)." - RT: "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." - parameters: - alpha: - desc: "boundary separation" - info: - - 0 - - 0.5 - - "Inf" - beta: - desc: "bias" - info: - - 0 - - 0.5 - - 1 - delta: - desc: "drift rate" - info: - - 0 - - 0.5 - - "Inf" - tau: - desc: "non-decision time" - info: - - 0 - - 0.15 - - 1 - regressors: {} - postpreds: [] - additional_args: - - - code: "RTbound" - default: 0.1 - desc: "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." - diff --git a/commons/models-yaml/choiceRT_ddm_single.yml b/commons/models-yaml/choiceRT_ddm_single.yml deleted file mode 100644 index ec103589..00000000 --- a/commons/models-yaml/choiceRT_ddm_single.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- - task_name: - code: "choiceRT" - desc: "Choice Reaction Time Task" - cite: [] - model_name: - code: "ddm" - desc: "Drift Diffusion Model" - cite: - - "Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59" - model_type: - code: "single" - desc: "Individual" - notes: - - "Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters." - - "Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing." - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2)." - RT: "Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.)." - parameters: - alpha: - desc: "boundary separation" - info: - - null - - 0.5 - - null - beta: - desc: "bias" - info: - - null - - 0.5 - - null - delta: - desc: "drift rate" - info: - - null - - 0.5 - - null - tau: - desc: "non-decision time" - info: - - null - - 0.15 - - null - regressors: {} - postpreds: [] - additional_args: - - - code: "RTbound" - default: 0.1 - desc: "Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds)." - diff --git a/commons/models-yaml/cra_exp.yml b/commons/models-yaml/cra_exp.yml deleted file mode 100644 index 4ca83c27..00000000 --- a/commons/models-yaml/cra_exp.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- - task_name: - code: "cra" - desc: "Choice Under Risk and Ambiguity Task" - cite: [] - model_name: - code: "exp" - desc: "Exponential Subjective Value Model" - cite: - - "Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - prob: "Objective probability of the variable lottery." - ambig: "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery)." - reward_var: "Amount of reward in variable lottery. Assumed to be greater than zero." - reward_fix: "Amount of reward in fixed lottery. Assumed to be greater than zero." - choice: "If the variable lottery was selected, choice == 1; otherwise choice == 0." - parameters: - alpha: - desc: "risk attitude" - info: - - 0 - - 1 - - 2 - beta: - desc: "ambiguity attitude" - info: - - "-Inf" - - 0 - - "Inf" - gamma: - desc: "inverse temperature" - info: - - 0 - - 1 - - "Inf" - regressors: - sv: 2 - sv_fix: 2 - sv_var: 2 - p_var: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/cra_linear.yml b/commons/models-yaml/cra_linear.yml deleted file mode 100644 index 4586c700..00000000 --- a/commons/models-yaml/cra_linear.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- - task_name: - code: "cra" - desc: "Choice Under Risk and Ambiguity Task" - cite: [] - model_name: - code: "linear" - desc: "Linear Subjective Value Model" - cite: - - "Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - prob: "Objective probability of the variable lottery." - ambig: "Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery)." - reward_var: "Amount of reward in variable lottery. Assumed to be greater than zero." - reward_fix: "Amount of reward in fixed lottery. Assumed to be greater than zero." - choice: "If the variable lottery was selected, choice == 1; otherwise choice == 0." - parameters: - alpha: - desc: "risk attitude" - info: - - 0 - - 1 - - 2 - beta: - desc: "ambiguity attitude" - info: - - "-Inf" - - 0 - - "Inf" - gamma: - desc: "inverse temperature" - info: - - 0 - - 1 - - "Inf" - regressors: - sv: 2 - sv_fix: 2 - sv_var: 2 - p_var: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/dbdm_prob_weight.yml b/commons/models-yaml/dbdm_prob_weight.yml deleted file mode 100644 index a1b179a0..00000000 --- a/commons/models-yaml/dbdm_prob_weight.yml +++ /dev/null @@ -1,60 +0,0 @@ ---- - task_name: - code: "dbdm" - desc: "Description Based Decison Making Task" - cite: [] - model_name: - code: "prob_weight" - desc: "Probability Weight Function" - cite: - - "Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47." - - "Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539." - - "Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Yoonseo Zoh" - email: "zohyos7@gmail.com" - link: "https://ccs-lab.github.io/team/yoonseo-zoh/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - opt1hprob: "Possiblity of getting higher value of outcome(opt1hval) when choosing option 1." - opt2hprob: "Possiblity of getting higher value of outcome(opt2hval) when choosing option 2." - opt1hval: "Possible (with opt1hprob probability) outcome of option 1." - opt1lval: "Possible (with (1 - opt1hprob) probability) outcome of option 1." - opt2hval: "Possible (with opt2hprob probability) outcome of option 2." - opt2lval: "Possible (with (1 - opt2hprob) probability) outcome of option 2." - choice: "If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2." - parameters: - tau: - desc: "probability weight function" - info: - - 0 - - 0.8 - - 1 - rho: - desc: "subject utility function" - info: - - 0 - - 0.7 - - 2 - lambda: - desc: "loss aversion parameter" - info: - - 0 - - 2.5 - - 5 - beta: - desc: "inverse softmax temperature" - info: - - 0 - - 0.2 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/dd_cs.yml b/commons/models-yaml/dd_cs.yml deleted file mode 100644 index c3f8c6a2..00000000 --- a/commons/models-yaml/dd_cs.yml +++ /dev/null @@ -1,46 +0,0 @@ ---- - task_name: - code: "dd" - desc: "Delay Discounting Task" - cite: [] - model_name: - code: "cs" - desc: "Constant-Sensitivity (CS) Model" - cite: - - "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." - amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." - delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." - amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." - choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - parameters: - r: - desc: "exponential discounting rate" - info: - - 0 - - 0.1 - - 1 - s: - desc: "impatience" - info: - - 0 - - 1 - - 10 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 5 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/dd_cs_single.yml b/commons/models-yaml/dd_cs_single.yml deleted file mode 100644 index 43ddf754..00000000 --- a/commons/models-yaml/dd_cs_single.yml +++ /dev/null @@ -1,46 +0,0 @@ ---- - task_name: - code: "dd" - desc: "Delay Discounting Task" - cite: [] - model_name: - code: "cs" - desc: "Constant-Sensitivity (CS) Model" - cite: - - "Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671" - model_type: - code: "single" - desc: "Individual" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." - amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." - delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." - amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." - choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - parameters: - r: - desc: "exponential discounting rate" - info: - - null - - 0.1 - - null - s: - desc: "impatience" - info: - - null - - 1 - - null - beta: - desc: "inverse temperature" - info: - - null - - 1 - - null - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/dd_exp.yml b/commons/models-yaml/dd_exp.yml deleted file mode 100644 index eadaad38..00000000 --- a/commons/models-yaml/dd_exp.yml +++ /dev/null @@ -1,40 +0,0 @@ ---- - task_name: - code: "dd" - desc: "Delay Discounting Task" - cite: [] - model_name: - code: "exp" - desc: "Exponential Model" - cite: - - "Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." - amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." - delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." - amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." - choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - parameters: - r: - desc: "exponential discounting rate" - info: - - 0 - - 0.1 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 5 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/dd_hyperbolic.yml b/commons/models-yaml/dd_hyperbolic.yml deleted file mode 100644 index 965f79ce..00000000 --- a/commons/models-yaml/dd_hyperbolic.yml +++ /dev/null @@ -1,40 +0,0 @@ ---- - task_name: - code: "dd" - desc: "Delay Discounting Task" - cite: [] - model_name: - code: "hyperbolic" - desc: "Hyperbolic Model" - cite: - - "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." - amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." - delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." - amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." - choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - parameters: - k: - desc: "discounting rate" - info: - - 0 - - 0.1 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 5 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/dd_hyperbolic_single.yml b/commons/models-yaml/dd_hyperbolic_single.yml deleted file mode 100644 index e820f463..00000000 --- a/commons/models-yaml/dd_hyperbolic_single.yml +++ /dev/null @@ -1,40 +0,0 @@ ---- - task_name: - code: "dd" - desc: "Delay Discounting Task" - cite: [] - model_name: - code: "hyperbolic" - desc: "Hyperbolic Model" - cite: - - "Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement." - model_type: - code: "single" - desc: "Individual" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - delay_later: "An integer representing the delayed days for the later option (e.g. 1, 6, 28)." - amount_later: "A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9)." - delay_sooner: "An integer representing the delayed days for the sooner option (e.g. 0)." - amount_sooner: "A floating point number representing the amount for the sooner option (e.g. 10)." - choice: "If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0." - parameters: - k: - desc: "discounting rate" - info: - - null - - 0.1 - - null - beta: - desc: "inverse temperature" - info: - - null - - 1 - - null - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/gng_m1.yml b/commons/models-yaml/gng_m1.yml deleted file mode 100644 index 0037ea53..00000000 --- a/commons/models-yaml/gng_m1.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- - task_name: - code: "gng" - desc: "Orthogonalized Go/Nogo Task" - cite: [] - model_name: - code: "m1" - desc: "RW + noise" - cite: - - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." - keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." - outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - parameters: - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - ep: - desc: "learning rate" - info: - - 0 - - 0.2 - - 1 - rho: - desc: "effective size" - info: - - 0 - - "exp(2)" - - "Inf" - regressors: - Qgo: 2 - Qnogo: 2 - Wgo: 2 - Wnogo: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/gng_m2.yml b/commons/models-yaml/gng_m2.yml deleted file mode 100644 index 318d7ea6..00000000 --- a/commons/models-yaml/gng_m2.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- - task_name: - code: "gng" - desc: "Orthogonalized Go/Nogo Task" - cite: [] - model_name: - code: "m2" - desc: "RW + noise + bias" - cite: - - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." - keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." - outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - parameters: - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - ep: - desc: "learning rate" - info: - - 0 - - 0.2 - - 1 - b: - desc: "action bias" - info: - - "-Inf" - - 0 - - "Inf" - rho: - desc: "effective size" - info: - - 0 - - "exp(2)" - - "Inf" - regressors: - Qgo: 2 - Qnogo: 2 - Wgo: 2 - Wnogo: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/gng_m3.yml b/commons/models-yaml/gng_m3.yml deleted file mode 100644 index 881b80a1..00000000 --- a/commons/models-yaml/gng_m3.yml +++ /dev/null @@ -1,61 +0,0 @@ ---- - task_name: - code: "gng" - desc: "Orthogonalized Go/Nogo Task" - cite: [] - model_name: - code: "m3" - desc: "RW + noise + bias + pi" - cite: - - "Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." - keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." - outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - parameters: - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - ep: - desc: "learning rate" - info: - - 0 - - 0.2 - - 1 - b: - desc: "action bias" - info: - - "-Inf" - - 0 - - "Inf" - pi: - desc: "Pavlovian bias" - info: - - "-Inf" - - 0 - - "Inf" - rho: - desc: "effective size" - info: - - 0 - - "exp(2)" - - "Inf" - regressors: - Qgo: 2 - Qnogo: 2 - Wgo: 2 - Wnogo: 2 - SV: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/gng_m4.yml b/commons/models-yaml/gng_m4.yml deleted file mode 100644 index 4066f3e2..00000000 --- a/commons/models-yaml/gng_m4.yml +++ /dev/null @@ -1,67 +0,0 @@ ---- - task_name: - code: "gng" - desc: "Orthogonalized Go/Nogo Task" - cite: [] - model_name: - code: "m4" - desc: "RW (rew/pun) + noise + bias + pi" - cite: - - "Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - cue: "Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4." - keyPressed: "Binary value representing the subject's response for that trial (where Press == 1; No press == 0)." - outcome: "Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1)." - parameters: - xi: - desc: "noise" - info: - - 0 - - 0.1 - - 1 - ep: - desc: "learning rate" - info: - - 0 - - 0.2 - - 1 - b: - desc: "action bias" - info: - - "-Inf" - - 0 - - "Inf" - pi: - desc: "Pavlovian bias" - info: - - "-Inf" - - 0 - - "Inf" - rhoRew: - desc: "reward sensitivity" - info: - - 0 - - "exp(2)" - - "Inf" - rhoPun: - desc: "punishment sensitivity" - info: - - 0 - - "exp(2)" - - "Inf" - regressors: - Qgo: 2 - Qnogo: 2 - Wgo: 2 - Wnogo: 2 - SV: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/igt_orl.yml b/commons/models-yaml/igt_orl.yml deleted file mode 100644 index 592e8a29..00000000 --- a/commons/models-yaml/igt_orl.yml +++ /dev/null @@ -1,65 +0,0 @@ ---- - task_name: - code: "igt" - desc: "Iowa Gambling Task" - cite: - - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - model_name: - code: "orl" - desc: "Outcome-Representation Learning Model" - cite: - - "Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Nate Haines" - email: "haines.175@osu.edu" - link: "https://ccs-lab.github.io/team/nate-haines/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." - gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - parameters: - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - K: - desc: "perseverance decay" - info: - - 0 - - 0.1 - - 5 - betaF: - desc: "outcome frequency weight" - info: - - "-Inf" - - 0.1 - - "Inf" - betaP: - desc: "perseverance weight" - info: - - "-Inf" - - 1 - - "Inf" - regressors: {} - postpreds: - - "y_pred" - additional_args: - - - code: "payscale" - default: 100 - desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - diff --git a/commons/models-yaml/igt_pvl_decay.yml b/commons/models-yaml/igt_pvl_decay.yml deleted file mode 100644 index f10a3a33..00000000 --- a/commons/models-yaml/igt_pvl_decay.yml +++ /dev/null @@ -1,55 +0,0 @@ ---- - task_name: - code: "igt" - desc: "Iowa Gambling Task" - cite: - - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - model_name: - code: "pvl_decay" - desc: "Prospect Valence Learning (PVL) Decay-RI" - cite: - - "Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." - gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - parameters: - A: - desc: "decay rate" - info: - - 0 - - 0.5 - - 1 - alpha: - desc: "outcome sensitivity" - info: - - 0 - - 0.5 - - 2 - cons: - desc: "response consistency" - info: - - 0 - - 1 - - 5 - lambda: - desc: "loss aversion" - info: - - 0 - - 1 - - 10 - regressors: {} - postpreds: - - "y_pred" - additional_args: - - - code: "payscale" - default: 100 - desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - diff --git a/commons/models-yaml/igt_pvl_delta.yml b/commons/models-yaml/igt_pvl_delta.yml deleted file mode 100644 index 8e396457..00000000 --- a/commons/models-yaml/igt_pvl_delta.yml +++ /dev/null @@ -1,55 +0,0 @@ ---- - task_name: - code: "igt" - desc: "Iowa Gambling Task" - cite: - - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - model_name: - code: "pvl_delta" - desc: "Prospect Valence Learning (PVL) Delta" - cite: - - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." - gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - parameters: - A: - desc: "learning rate" - info: - - 0 - - 0.5 - - 1 - alpha: - desc: "outcome sensitivity" - info: - - 0 - - 0.5 - - 2 - cons: - desc: "response consistency" - info: - - 0 - - 1 - - 5 - lambda: - desc: "loss aversion" - info: - - 0 - - 1 - - 10 - regressors: {} - postpreds: - - "y_pred" - additional_args: - - - code: "payscale" - default: 100 - desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - diff --git a/commons/models-yaml/igt_vpp.yml b/commons/models-yaml/igt_vpp.yml deleted file mode 100644 index 0798673b..00000000 --- a/commons/models-yaml/igt_vpp.yml +++ /dev/null @@ -1,79 +0,0 @@ ---- - task_name: - code: "igt" - desc: "Iowa Gambling Task" - cite: - - "Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992" - model_name: - code: "vpp" - desc: "Value-Plus-Perseverance" - cite: - - "Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4)." - gain: "Floating point value representing the amount of currency won on that trial (e.g. 50, 100)." - loss: "Floating point value representing the amount of currency lost on that trial (e.g. 0, -50)." - parameters: - A: - desc: "learning rate" - info: - - 0 - - 0.5 - - 1 - alpha: - desc: "outcome sensitivity" - info: - - 0 - - 0.5 - - 2 - cons: - desc: "response consistency" - info: - - 0 - - 1 - - 5 - lambda: - desc: "loss aversion" - info: - - 0 - - 1 - - 10 - epP: - desc: "gain impact" - info: - - "-Inf" - - 0 - - "Inf" - epN: - desc: "loss impact" - info: - - "-Inf" - - 0 - - "Inf" - K: - desc: "decay rate" - info: - - 0 - - 0.5 - - 1 - w: - desc: "RL weight" - info: - - 0 - - 0.5 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: - - - code: "payscale" - default: 100 - desc: "Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100." - diff --git a/commons/models-yaml/peer_ocu.yml b/commons/models-yaml/peer_ocu.yml deleted file mode 100644 index e56ddee1..00000000 --- a/commons/models-yaml/peer_ocu.yml +++ /dev/null @@ -1,52 +0,0 @@ ---- - task_name: - code: "peer" - desc: "Peer Influence Task" - cite: - - "Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916." - model_name: - code: "ocu" - desc: "Other-Conferred Utility (OCU) Model" - cite: [] - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Harhim Park" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - condition: "0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky)." - p_gamble: "Probability of receiving a high payoff (same for both options)." - safe_Hpayoff: "High payoff of the safe option." - safe_Lpayoff: "Low payoff of the safe option." - risky_Hpayoff: "High payoff of the risky option." - risky_Lpayoff: "Low payoff of the risky option." - choice: "Which option was chosen? 0: safe, 1: risky." - parameters: - rho: - desc: "risk preference" - info: - - 0 - - 1 - - 2 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - "Inf" - ocu: - desc: "other-conferred utility" - info: - - "-Inf" - - 0 - - "Inf" - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_ewa.yml b/commons/models-yaml/prl_ewa.yml deleted file mode 100644 index 90487c43..00000000 --- a/commons/models-yaml/prl_ewa.yml +++ /dev/null @@ -1,55 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "ewa" - desc: "Experience-Weighted Attraction Model" - cite: - - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - phi: - desc: "1 - learning rate" - info: - - 0 - - 0.5 - - 1 - rho: - desc: "experience decay factor" - info: - - 0 - - 0.1 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 2 - ev_nc: 2 - ew_c: 2 - ew_nc: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_fictitious.yml b/commons/models-yaml/prl_fictitious.yml deleted file mode 100644 index 51f05afb..00000000 --- a/commons/models-yaml/prl_fictitious.yml +++ /dev/null @@ -1,56 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "fictitious" - desc: "Fictitious Update Model" - cite: - - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - eta: - desc: "learning rate" - info: - - 0 - - 0.5 - - 1 - alpha: - desc: "indecision point" - info: - - "-Inf" - - 0 - - "Inf" - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 2 - ev_nc: 2 - pe_c: 2 - pe_nc: 2 - dv: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_fictitious_multipleB.yml b/commons/models-yaml/prl_fictitious_multipleB.yml deleted file mode 100644 index 261fb792..00000000 --- a/commons/models-yaml/prl_fictitious_multipleB.yml +++ /dev/null @@ -1,57 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "fictitious" - desc: "Fictitious Update Model" - cite: - - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - model_type: - code: "multipleB" - desc: "Multiple-Block Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - block: "A unique identifier for each of the multiple blocks within each subject." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - eta: - desc: "learning rate" - info: - - 0 - - 0.5 - - 1 - alpha: - desc: "indecision point" - info: - - "-Inf" - - 0 - - "Inf" - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 3 - ev_nc: 3 - pe_c: 3 - pe_nc: 3 - dv: 3 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_fictitious_rp.yml b/commons/models-yaml/prl_fictitious_rp.yml deleted file mode 100644 index d110785d..00000000 --- a/commons/models-yaml/prl_fictitious_rp.yml +++ /dev/null @@ -1,63 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "fictitious_rp" - desc: "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)" - cite: - - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - eta_pos: - desc: "learning rate, +PE" - info: - - 0 - - 0.5 - - 1 - eta_neg: - desc: "learning rate, -PE" - info: - - 0 - - 0.5 - - 1 - alpha: - desc: "indecision point" - info: - - "-Inf" - - 0 - - "Inf" - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 2 - ev_nc: 2 - pe_c: 2 - pe_nc: 2 - dv: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_fictitious_rp_woa.yml b/commons/models-yaml/prl_fictitious_rp_woa.yml deleted file mode 100644 index a435b0d1..00000000 --- a/commons/models-yaml/prl_fictitious_rp_woa.yml +++ /dev/null @@ -1,57 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "fictitious_rp_woa" - desc: "Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)" - cite: - - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - eta_pos: - desc: "learning rate, +PE" - info: - - 0 - - 0.5 - - 1 - eta_neg: - desc: "learning rate, -PE" - info: - - 0 - - 0.5 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 2 - ev_nc: 2 - pe_c: 2 - pe_nc: 2 - dv: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_fictitious_woa.yml b/commons/models-yaml/prl_fictitious_woa.yml deleted file mode 100644 index 48b3a436..00000000 --- a/commons/models-yaml/prl_fictitious_woa.yml +++ /dev/null @@ -1,50 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "fictitious_woa" - desc: "Fictitious Update Model, without alpha (indecision point)" - cite: - - "Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - eta: - desc: "learning rate" - info: - - 0 - - 0.5 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 2 - ev_nc: 2 - pe_c: 2 - pe_nc: 2 - dv: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_rp.yml b/commons/models-yaml/prl_rp.yml deleted file mode 100644 index 3db4e297..00000000 --- a/commons/models-yaml/prl_rp.yml +++ /dev/null @@ -1,54 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "rp" - desc: "Reward-Punishment Model" - cite: - - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 2 - ev_nc: 2 - pe: 2 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/prl_rp_multipleB.yml b/commons/models-yaml/prl_rp_multipleB.yml deleted file mode 100644 index 8f720519..00000000 --- a/commons/models-yaml/prl_rp_multipleB.yml +++ /dev/null @@ -1,55 +0,0 @@ ---- - task_name: - code: "prl" - desc: "Probabilistic Reversal Learning Task" - cite: [] - model_name: - code: "rp" - desc: "Reward-Punishment Model" - cite: - - "Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030" - model_type: - code: "multipleB" - desc: "Multiple-Block Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang (for model-based regressors)" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - - - name: "Harhim Park (for model-based regressors)" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - block: "A unique identifier for each of the multiple blocks within each subject." - choice: "Integer value representing the option chosen on that trial: 1 or 2." - outcome: "Integer value representing the outcome of that trial (where reward == 1, and loss == -1)." - parameters: - Apun: - desc: "punishment learning rate" - info: - - 0 - - 0.1 - - 1 - Arew: - desc: "reward learning rate" - info: - - 0 - - 0.1 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: - ev_c: 3 - ev_nc: 3 - pe: 3 - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/pst_gainloss_Q.yml b/commons/models-yaml/pst_gainloss_Q.yml deleted file mode 100644 index d674093f..00000000 --- a/commons/models-yaml/pst_gainloss_Q.yml +++ /dev/null @@ -1,48 +0,0 @@ ---- - task_name: - code: "pst" - desc: "Probabilistic Selection Task" - cite: [] - model_name: - code: "gainloss_Q" - desc: "Gain-Loss Q Learning Model" - cite: - - "Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Jaeyeong Yang" - email: "jaeyeong.yang1125@gmail.com" - link: "https://ccs-lab.github.io/team/jaeyeong-yang/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - type: "Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80\\% (type 1), 20\\% (type 2), 70\\% (type 3), 30\\% (type 4), 60\\% (type 5), 40\\% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6." - choice: "Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0)." - reward: "Amount of reward earned as a result of the trial." - parameters: - alpha_pos: - desc: "learning rate for positive feedbacks" - info: - - 0 - - 0.5 - - 1 - alpha_neg: - desc: "learning rate for negative feedbacks" - info: - - 0 - - 0.5 - - 1 - beta: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/ra_noLA.yml b/commons/models-yaml/ra_noLA.yml deleted file mode 100644 index 06b293f1..00000000 --- a/commons/models-yaml/ra_noLA.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- - task_name: - code: "ra" - desc: "Risk Aversion Task" - cite: [] - model_name: - code: "noLA" - desc: "Prospect Theory, without loss aversion (LA) parameter" - cite: - - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." - loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." - cert: "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero." - gamble: "If gamble was taken, gamble == 1; else gamble == 0." - parameters: - rho: - desc: "risk aversion" - info: - - 0 - - 1 - - 2 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - 30 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/ra_noRA.yml b/commons/models-yaml/ra_noRA.yml deleted file mode 100644 index 64730ac9..00000000 --- a/commons/models-yaml/ra_noRA.yml +++ /dev/null @@ -1,39 +0,0 @@ ---- - task_name: - code: "ra" - desc: "Risk Aversion Task" - cite: [] - model_name: - code: "noRA" - desc: "Prospect Theory, without risk aversion (RA) parameter" - cite: - - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." - loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." - cert: "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero." - gamble: "If gamble was taken, gamble == 1; else gamble == 0." - parameters: - lambda: - desc: "loss aversion" - info: - - 0 - - 1 - - 5 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - 30 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/ra_prospect.yml b/commons/models-yaml/ra_prospect.yml deleted file mode 100644 index 58718dfb..00000000 --- a/commons/models-yaml/ra_prospect.yml +++ /dev/null @@ -1,45 +0,0 @@ ---- - task_name: - code: "ra" - desc: "Risk Aversion Task" - cite: [] - model_name: - code: "prospect" - desc: "Prospect Theory" - cite: - - "Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." - loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." - cert: "Guaranteed amount of a safe option. \"cert\" is assumed to be zero or greater than zero." - gamble: "If gamble was taken, gamble == 1; else gamble == 0." - parameters: - rho: - desc: "risk aversion" - info: - - 0 - - 1 - - 2 - lambda: - desc: "loss aversion" - info: - - 0 - - 1 - - 5 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - 30 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/rdt_happiness.yml b/commons/models-yaml/rdt_happiness.yml deleted file mode 100644 index 43d5cb2b..00000000 --- a/commons/models-yaml/rdt_happiness.yml +++ /dev/null @@ -1,71 +0,0 @@ ---- - task_name: - code: "rdt" - desc: "Risky Decision Task" - cite: [] - model_name: - code: "happiness" - desc: "Happiness Computational Model" - cite: - - "Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Harhim Park" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - gain: "Possible (50\\%) gain outcome of a risky option (e.g. 9)." - loss: "Possible (50\\%) loss outcome of a risky option (e.g. 5, or -5)." - cert: "Guaranteed amount of a safe option." - type: "loss == -1, mixed == 0, gain == 1" - gamble: "If gamble was taken, gamble == 1; else gamble == 0." - outcome: "Result of the trial." - happy: "Happiness score." - RT_happy: "Reaction time for answering the happiness score." - parameters: - w0: - desc: "baseline" - info: - - "-Inf" - - 1 - - "Inf" - w1: - desc: "weight of certain rewards" - info: - - "-Inf" - - 1 - - "Inf" - w2: - desc: "weight of expected values" - info: - - "-Inf" - - 1 - - "Inf" - w3: - desc: "weight of reward prediction errors" - info: - - "-Inf" - - 1 - - "Inf" - gam: - desc: "forgetting factor" - info: - - 0 - - 0.5 - - 1 - sig: - desc: "standard deviation of error" - info: - - 0 - - 1 - - "Inf" - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/ts_par4.yml b/commons/models-yaml/ts_par4.yml deleted file mode 100644 index f62b1872..00000000 --- a/commons/models-yaml/ts_par4.yml +++ /dev/null @@ -1,61 +0,0 @@ ---- - task_name: - code: "ts" - desc: "Two-Step Task" - cite: - - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - model_name: - code: "par4" - desc: "Hybrid Model, with 4 parameters" - cite: - - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - - "Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Harhim Park" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - level1_choice: "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2)." - level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value." - reward: "Reward after Level 2 (0 or 1)." - parameters: - a: - desc: "learning rate for both stages 1 & 2" - info: - - 0 - - 0.5 - - 1 - beta: - desc: "inverse temperature for both stages 1 & 2" - info: - - 0 - - 1 - - "Inf" - pi: - desc: "perseverance" - info: - - 0 - - 1 - - 5 - w: - desc: "model-based weight" - info: - - 0 - - 0.5 - - 1 - regressors: {} - postpreds: - - "y_pred_step1" - - "y_pred_step2" - additional_args: - - - code: "trans_prob" - default: 0.7 - desc: "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." - diff --git a/commons/models-yaml/ts_par6.yml b/commons/models-yaml/ts_par6.yml deleted file mode 100644 index 1a840cfa..00000000 --- a/commons/models-yaml/ts_par6.yml +++ /dev/null @@ -1,72 +0,0 @@ ---- - task_name: - code: "ts" - desc: "Two-Step Task" - cite: - - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - model_name: - code: "par6" - desc: "Hybrid Model, with 6 parameters" - cite: - - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Harhim Park" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - level1_choice: "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2)." - level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value." - reward: "Reward after Level 2 (0 or 1)." - parameters: - a1: - desc: "learning rate in stage 1" - info: - - 0 - - 0.5 - - 1 - beta1: - desc: "inverse temperature in stage 1" - info: - - 0 - - 1 - - "Inf" - a2: - desc: "learning rate in stage 2" - info: - - 0 - - 0.5 - - 1 - beta2: - desc: "inverse temperature in stage 2" - info: - - 0 - - 1 - - "Inf" - pi: - desc: "perseverance" - info: - - 0 - - 1 - - 5 - w: - desc: "model-based weight" - info: - - 0 - - 0.5 - - 1 - regressors: {} - postpreds: - - "y_pred_step1" - - "y_pred_step2" - additional_args: - - - code: "trans_prob" - default: 0.7 - desc: "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." - diff --git a/commons/models-yaml/ts_par7.yml b/commons/models-yaml/ts_par7.yml deleted file mode 100644 index d82006f2..00000000 --- a/commons/models-yaml/ts_par7.yml +++ /dev/null @@ -1,78 +0,0 @@ ---- - task_name: - code: "ts" - desc: "Two-Step Task" - cite: - - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - model_name: - code: "par7" - desc: "Hybrid Model, with 7 parameters (original model)" - cite: - - "Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Harhim Park" - email: "hrpark12@gmail.com" - link: "https://ccs-lab.github.io/team/harhim-park/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - level1_choice: "Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2)." - level2_choice: "Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).\n Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value." - reward: "Reward after Level 2 (0 or 1)." - parameters: - a1: - desc: "learning rate in stage 1" - info: - - 0 - - 0.5 - - 1 - beta1: - desc: "inverse temperature in stage 1" - info: - - 0 - - 1 - - "Inf" - a2: - desc: "learning rate in stage 2" - info: - - 0 - - 0.5 - - 1 - beta2: - desc: "inverse temperature in stage 2" - info: - - 0 - - 1 - - "Inf" - pi: - desc: "perseverance" - info: - - 0 - - 1 - - 5 - w: - desc: "model-based weight" - info: - - 0 - - 0.5 - - 1 - lambda: - desc: "eligibility trace" - info: - - 0 - - 0.5 - - 1 - regressors: {} - postpreds: - - "y_pred_step1" - - "y_pred_step2" - additional_args: - - - code: "trans_prob" - default: 0.7 - desc: "Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7." - diff --git a/commons/models-yaml/ug_bayes.yml b/commons/models-yaml/ug_bayes.yml deleted file mode 100644 index 3f1f9b93..00000000 --- a/commons/models-yaml/ug_bayes.yml +++ /dev/null @@ -1,43 +0,0 @@ ---- - task_name: - code: "ug" - desc: "Norm-Training Ultimatum Game" - cite: [] - model_name: - code: "bayes" - desc: "Ideal Observer Model" - cite: - - "Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - offer: "Floating point value representing the offer made in that trial (e.g. 4, 10, 11)." - accept: "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." - parameters: - alpha: - desc: "envy" - info: - - 0 - - 1 - - 20 - beta: - desc: "guilt" - info: - - 0 - - 0.5 - - 10 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/ug_delta.yml b/commons/models-yaml/ug_delta.yml deleted file mode 100644 index fa23cf78..00000000 --- a/commons/models-yaml/ug_delta.yml +++ /dev/null @@ -1,43 +0,0 @@ ---- - task_name: - code: "ug" - desc: "Norm-Training Ultimatum Game" - cite: [] - model_name: - code: "delta" - desc: "Rescorla-Wagner (Delta) Model" - cite: - - "Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015" - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: [] - data_columns: - subjID: "A unique identifier for each subject in the data-set." - offer: "Floating point value representing the offer made in that trial (e.g. 4, 10, 11)." - accept: "1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0)." - parameters: - alpha: - desc: "envy" - info: - - 0 - - 1 - - 20 - tau: - desc: "inverse temperature" - info: - - 0 - - 1 - - 10 - ep: - desc: "norm adaptation rate" - info: - - 0 - - 0.5 - - 1 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - diff --git a/commons/models-yaml/wcs_sql.yml b/commons/models-yaml/wcs_sql.yml deleted file mode 100644 index 460f309d..00000000 --- a/commons/models-yaml/wcs_sql.yml +++ /dev/null @@ -1,47 +0,0 @@ ---- - task_name: - code: "wcs" - desc: "Wisconsin Card Sorting Task" - cite: [] - model_name: - code: "sql" - desc: "Sequential Learning Model" - cite: - - "Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13." - model_type: - code: "" - desc: "Hierarchical" - notes: [] - contributors: - - - name: "Dayeong Min" - email: "mindy2801@snu.ac.kr" - link: "https://ccs-lab.github.io/team/dayeong-min/" - data_columns: - subjID: "A unique identifier for each subject in the data-set." - choice: "Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4." - outcome: "1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0." - parameters: - r: - desc: "reward sensitivity" - info: - - 0 - - 0.1 - - 1 - p: - desc: "punishment sensitivity" - info: - - 0 - - 0.1 - - 1 - d: - desc: "decision consistency or inverse temperature" - info: - - 0 - - 1 - - 5 - regressors: {} - postpreds: - - "y_pred" - additional_args: [] - From 1b26c25cf4f6c38cfe3a9fde6972aef9dfac9a63 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 20:38:20 +0900 Subject: [PATCH 093/163] Modify it to use YAML files --- commons/.gitignore | 8 +- .../{generate-r-codes.py => convert-to-r.py} | 220 +++++++++++------- commons/generate-codes.sh | 6 +- 3 files changed, 140 insertions(+), 94 deletions(-) rename commons/{generate-r-codes.py => convert-to-r.py} (60%) diff --git a/commons/.gitignore b/commons/.gitignore index de87f6d6..99e78a23 100644 --- a/commons/.gitignore +++ b/commons/.gitignore @@ -1,4 +1,4 @@ -R-codes/ -R-tests/ -Python-codes/ -Python-tests/ +_r-codes/ +_r-tests/ +_py-codes/ +_py-tests/ diff --git a/commons/generate-r-codes.py b/commons/convert-to-r.py similarity index 60% rename from commons/generate-r-codes.py rename to commons/convert-to-r.py index 3365bca1..a5f5b54f 100644 --- a/commons/generate-r-codes.py +++ b/commons/convert-to-r.py @@ -1,29 +1,65 @@ #!/usr/bin/env python3 """ -Generate R codes for hBayesDM using model information defined in a JSON file. +Generate R codes for hBayesDM using model information defined in YAML files. """ import sys import argparse -import json import re from pathlib import Path -from typing import List, Iterable, Callable from collections import OrderedDict +import yaml +try: + from yaml import CLoader as Loader, CDumper as Dumper +except ImportError: + from yaml import Loader, Dumper + + +def represent_none(self, _): + return self.represent_scalar('tag:yaml.org,2002:null', '') + + +Dumper.add_representer(type(None), represent_none) + PATH_ROOT = Path(__file__).absolute().parent PATH_MODELS = PATH_ROOT / 'models' PATH_TEMPLATE = PATH_ROOT / 'templates' -PATH_OUTPUT = PATH_ROOT / 'R-codes' -PATH_OUTPUT_TEST = PATH_ROOT / 'R-tests' +PATH_OUTPUT = PATH_ROOT / '_r-codes' +PATH_OUTPUT_TEST = PATH_ROOT / '_r-tests' TEMPLATE_DOCS = PATH_TEMPLATE / 'R_DOCS_TEMPLATE.txt' TEMPLATE_CODE = PATH_TEMPLATE / 'R_CODE_TEMPLATE.txt' TEMPLATE_TEST = PATH_TEMPLATE / 'R_TEST_TEMPLATE.txt' +def ordered_load(stream, Loader=Loader, object_pairs_hook=OrderedDict): + class OrderedLoader(Loader): + pass + + def construct_mapping(loader, node): + loader.flatten_mapping(node) + return object_pairs_hook(loader.construct_pairs(node)) + OrderedLoader.add_constructor( + yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, + construct_mapping) + return yaml.load(stream, OrderedLoader) + + +def ordered_dump(data, stream=None, Dumper=Dumper, **kwds): + class OrderedDumper(Dumper): + pass + + def _dict_representer(dumper, data): + return dumper.represent_mapping( + yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, + data.items()) + OrderedDumper.add_representer(OrderedDict, _dict_representer) + return yaml.dump(data, stream, OrderedDumper, **kwds) + + def parse_cite_string(cite): """Parse given APA citation string into a dict object""" - if cite == '': + if not cite: return None regex_authoryear = r'(?P^.+?)\s\((?P\d+?)\)' @@ -67,13 +103,20 @@ def format_fullcite(cites, sep='\n#\' '): def generate_docs(info): # Model full name (Snake-case) model_function = [info['task_name']['code'], info['model_name']['code']] - if info['model_type']['code'] != '': + if info['model_type']['code']: model_function.append(info['model_type']['code']) model_function = '_'.join(model_function) # Citations - task_cite = [parse_cite_string(c) for c in info['task_name']['cite']] - model_cite = [parse_cite_string(c) for c in info['model_name']['cite']] + if info['task_name'].get('cite'): + task_cite = [parse_cite_string(c) for c in info['task_name']['cite']] + else: + task_cite = [] + + if info['model_name'].get('cite'): + model_cite = [parse_cite_string(c) for c in info['model_name']['cite']] + else: + model_cite = [] task_parencite = format_parencite(task_cite) model_parencite = format_parencite(model_cite) @@ -81,7 +124,7 @@ def generate_docs(info): references = format_fullcite(task_cite + model_cite, sep='\n#\'\n#\' ') # Notes - if len(info.get('notes', [])) > 0: + if info.get('notes'): notes = '@note\n#\' \\strong{Notes:}\n#\' ' + \ '\n#\' '.join(info['notes']) notes = '\n#\' ' + notes + '\n#\'' @@ -89,11 +132,14 @@ def generate_docs(info): notes = '' # Contributors - contributors = ', '.join([ - r'\href{%s}{%s} <\email{%s}>' - % (c['link'], c['name'], c['email'].replace('@', '@@')) - for c in info.get('contributors', []) - ]) + if info.get('contributors'): + contributors = ', '.join([ + r'\href{%s}{%s} <\email{%s}>' + % (c['link'], c['name'], c['email'].replace('@', '@@')) + for c in info['contributors'] + ]) + else: + contributors = '' # Data columns data_columns = ', '.join([ @@ -113,26 +159,32 @@ def generate_docs(info): ]) # Regressors - regressors = ', '.join([ - '"%s"' % k for k in info.get('regressors', {}).keys() - ]) + if info.get('regressors'): + regressors = ', '.join([ + '"%s"' % k for k in info['regressors'].keys() + ]) + else: + regressors = '' # Postpreds - postpreds = ', '.join([ - '"%s"' % v for v in info.get('postpreds', []) - ]) + if info.get('postpreds'): + postpreds = ', '.join(['"%s"' % v for v in info['postpreds']]) + else: + postpreds = '' # Additional arguments - additional_args = info.get('additional_args', {}) - additional_args_len = len(additional_args) - if additional_args_len > 0: - additional_args_details = '\n#\' '.join([ - r'@templateVar ADDITIONAL_ARGS_%d \item{%s}{%s}' - % (i + 1, v['code'], v['desc']) - for i, v in enumerate(additional_args) - ]) - additional_args_details += '\n#\'' + if info.get('additional_args'): + additional_args = info.get('additional_args', {}) + additional_args_len = len(additional_args) + if additional_args_len > 0: + additional_args_details = '\n#\' '.join([ + r'@templateVar ADDITIONAL_ARGS_%d \item{%s}{%s}' + % (i + 1, v['code'], v['desc']) + for i, v in enumerate(additional_args) + ]) + additional_args_details += '\n#\'' else: + additional_args_len = 0 additional_args_details = '' # Read template for docstring @@ -167,7 +219,7 @@ def generate_docs(info): def generate_code(info): # Model full name (Snake-case) model_function = [info['task_name']['code'], info['model_name']['code']] - if info['model_type']['code'] != '': + if info['model_type']['code']: model_function.append(info['model_type']['code']) model_function = '_'.join(model_function) @@ -184,7 +236,7 @@ def generate_code(info): # Parameters _params = info.get('parameters', {}) - if len(_params) > 0: + if _params and len(_params) > 0: parameters = ',\n '.join([ '"{}" = c({}, {}, {})' .format(k, @@ -199,7 +251,7 @@ def generate_code(info): # Regressors _regs = info.get('regressors', {}) - if len(_regs) > 0: + if _regs and len(_regs) > 0: regressors = ',\n '.join([ '"{}" = {}'.format(k, v) for k, v in _regs.items() ]) @@ -209,7 +261,7 @@ def generate_code(info): # Postpreds _postpreds = info.get('postpreds', []) - if len(_postpreds) > 0: + if _postpreds and len(_postpreds) > 0: postpreds = ', '.join(['"%s"' % v for v in _postpreds]) postpreds = 'c(' + postpreds + ')' else: @@ -237,7 +289,7 @@ def generate_code(info): def generate_test(info): # Model full name (Snake-case) model_function = [info['task_name']['code'], info['model_name']['code']] - if info['model_type']['code'] != '': + if info['model_type']['code']: model_function.append(info['model_type']['code']) model_function = '_'.join(model_function) @@ -250,74 +302,68 @@ def generate_test(info): return test -def main(json_fn, verbose): - with Path(json_fn) as p: - # Check if file exists - if not p.exists(): - print('FileNotFound: ' - 'Please specify existing json_file as argument.') - sys.exit(1) +def main(info_fn): + # Check if file exists + if not info_fn.exists(): + print('FileNotFound:', info_fn) + sys.exit(1) - # Load json_file - with open(p, 'r') as f: - info = json.load(f, object_pairs_hook=OrderedDict) + with open(info_fn, 'r') as f: + info = ordered_load(f, Loader=Loader) + # Generate codes docs = generate_docs(info) code = generate_code(info) test = generate_test(info) output = docs + code - if verbose: - # Print code string to stdout - print(output) - else: - # Model full name (Snake-case) - model_function = [info['task_name']['code'], - info['model_name']['code']] - if info['model_type']['code'] != '': - model_function.append(info['model_type']['code']) - model_function = '_'.join(model_function) - - if not PATH_OUTPUT.exists(): - PATH_OUTPUT.mkdir(exist_ok=True) - if not PATH_OUTPUT_TEST.exists(): - PATH_OUTPUT_TEST.mkdir(exist_ok=True) - - # Write model codes - code_fn = PATH_OUTPUT / (model_function + '.R') - with open(code_fn, 'w') as f: - f.write(output) - print('Created file:', code_fn.name) - - # Write test codes - test_fn = PATH_OUTPUT_TEST / ('test_' + model_function + '.R') - with open(test_fn, 'w') as f: - f.write(test) - print('Created file:', test_fn.name) + # Model full name (Snake-case) + model_function = [info['task_name']['code'], + info['model_name']['code']] + if info['model_type']['code']: + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) + + # Make directories if not exist + if not PATH_OUTPUT.exists(): + PATH_OUTPUT.mkdir(exist_ok=True) + if not PATH_OUTPUT_TEST.exists(): + PATH_OUTPUT_TEST.mkdir(exist_ok=True) + + # Write model codes + code_fn = PATH_OUTPUT / (model_function + '.R') + with open(code_fn, 'w') as f: + f.write(output) + + # Write test codes + test_fn = PATH_OUTPUT_TEST / ('test_' + model_function + '.R') + with open(test_fn, 'w') as f: + f.write(test) if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument( - '-a', '--all', - help='write for all json files in directory', - action='store_true') parser.add_argument( '-v', '--verbose', - help='print output to stdout instead of writing to file', + help='Whether to print its process.', action='store_true') parser.add_argument( - 'json_file', - help='JSON file of the model to generate corresponding python code', - type=str, nargs='*') + 'info_files', + help='YAML-formatted file(s) for model information.', + type=str, + nargs='*') args = parser.parse_args() - if args.all: - # `all` flag overrides `json_file` & `verbose` - all_json_files = PATH_MODELS.glob('*.json') - for json_fn in all_json_files: - main(json_fn, False) + if args.info_files: + info_fns = [PATH_MODELS / fn for fn in args.info_files] else: - for fn in args.json_file: - main(fn, args.verbose) + info_fns = sorted(PATH_MODELS.glob('*.yml')) + + num_models = len(info_fns) + + for i, info_fn in enumerate(info_fns): + main(info_fn) + if args.verbose: + print('[{:2d} / {:2d}] Done for {}' + .format(i + 1, num_models, info_fn)) diff --git a/commons/generate-codes.sh b/commons/generate-codes.sh index e5c67891..dbfe0d4f 100755 --- a/commons/generate-codes.sh +++ b/commons/generate-codes.sh @@ -1,8 +1,8 @@ #!/bin/bash -python3 generate-r-codes.py -a -cp R-codes/*.R ../R/R/ -cp R-tests/*.R ../R/tests/testthat/ +python3 convert-to-r.py +cp _r-codes/*.R ../R/R/ +cp _r-tests/*.R ../R/tests/testthat/ python3 generate-python-codes.py -a cp Python-codes/_*.py ../Python/hbayesdm/models/ From 7d612080d30474bf2eea9047a8d21dc1fd0ee2dd Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 20:38:55 +0900 Subject: [PATCH 094/163] Rename the shell script for validation --- commons/{ValidateAll.sh => validate-models.sh} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename commons/{ValidateAll.sh => validate-models.sh} (100%) diff --git a/commons/ValidateAll.sh b/commons/validate-models.sh similarity index 100% rename from commons/ValidateAll.sh rename to commons/validate-models.sh From c838c0201363ad1e42ffa9a5a8404032abd8e41e Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 21:07:51 +0900 Subject: [PATCH 095/163] Update generator codes --- ...erate-python-codes.py => convert-to-py.py} | 308 ++++++++++-------- commons/convert-to-r.py | 15 +- commons/generate-codes.sh | 6 +- .../{ => misc}/ModelInformation.schema.json | 0 commons/{ => misc}/validate-models.sh | 0 5 files changed, 183 insertions(+), 146 deletions(-) rename commons/{generate-python-codes.py => convert-to-py.py} (53%) rename commons/{ => misc}/ModelInformation.schema.json (100%) rename commons/{ => misc}/validate-models.sh (100%) diff --git a/commons/generate-python-codes.py b/commons/convert-to-py.py similarity index 53% rename from commons/generate-python-codes.py rename to commons/convert-to-py.py index f7110d89..46ae65cd 100644 --- a/commons/generate-python-codes.py +++ b/commons/convert-to-py.py @@ -1,45 +1,126 @@ -#!/usr/bin/env python3 """ -Generate Python codes for hBayesDM using model information defined in a JSON file. - -Written by Jethro Lee. +Generate Python codes for hBayesDM using model information defined in YAML +files. """ import sys import argparse -import glob import json import re from pathlib import Path -from typing import List, Iterable, Callable from collections import OrderedDict +import yaml +try: + from yaml import CLoader as Loader, CDumper as Dumper +except ImportError: + from yaml import Loader, Dumper + + +def represent_none(self, _): + return self.represent_scalar('tag:yaml.org,2002:null', '') + + +Dumper.add_representer(type(None), represent_none) + PATH_ROOT = Path(__file__).absolute().parent PATH_MODELS = PATH_ROOT / 'models' PATH_TEMPLATE = PATH_ROOT / 'templates' -PATH_OUTPUT = PATH_ROOT / 'Python-codes' -PATH_OUTPUT_TEST = PATH_ROOT / 'Python-tests' +PATH_OUTPUT_CODE = PATH_ROOT / '_py-codes' +PATH_OUTPUT_TEST = PATH_ROOT / '_py-tests' TEMPLATE_DOCS = PATH_TEMPLATE / 'PY_DOCS_TEMPLATE.txt' TEMPLATE_CODE = PATH_TEMPLATE / 'PY_CODE_TEMPLATE.txt' TEMPLATE_TEST = PATH_TEMPLATE / 'PY_TEST_TEMPLATE.txt' -def main(json_file, verbose): - # Make Path object for given filename - path_fn = Path(json_file) +def ordered_load(stream, Loader=Loader, object_pairs_hook=OrderedDict): + class OrderedLoader(Loader): + pass + + def construct_mapping(loader, node): + loader.flatten_mapping(node) + return object_pairs_hook(loader.construct_pairs(node)) + OrderedLoader.add_constructor( + yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, + construct_mapping) + return yaml.load(stream, OrderedLoader) + + +def ordered_dump(data, stream=None, Dumper=Dumper, **kwds): + class OrderedDumper(Dumper): + pass + + def _dict_representer(dumper, data): + return dumper.represent_mapping( + yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, + data.items()) + OrderedDumper.add_representer(OrderedDict, _dict_representer) + return yaml.dump(data, stream, OrderedDumper, **kwds) + + +def format_list(data, fmt, sep): + return sep.join(map(fmt.format, data)) + + +def format_dict(data, fmt, sep, pre=lambda v: v): + return sep.join(fmt.format(k, pre(v)) for k, v in data.items()) + + +def format_list_of_dict(data, *keys, fmt, sep): + return sep.join(fmt.format(*(d[k] for k in keys)) for d in data) + + +def message_model_regressor_parameter(regressors): + if regressors: + return 'For this model they are: ' + format_list( + regressors, fmt='"{}"', sep=', ') + else: + return 'Currently not available for this model' + + +def message_model_regressor_return(regressors): + if regressors: + return ( + '- ``model_regressor``: ' + + 'Dict holding the extracted model-based regressors.') + else: + return '' + + +def message_postpreds(postpreds): + if not postpreds: + return '**(Currently not available.)** ' + else: + return '' + + +def message_additional_args(additional_args): + if additional_args: + return ( + 'For this model, it\'s possible to set the following model-' + + 'specific argument to a value that you may prefer.\n\n ' + + format_list_of_dict( + additional_args, + 'code', 'desc', + fmt='- ``{}``: {}', + sep='\n ')) + else: + return 'Not used for this model.' + +def main(info_fn): # Check if file exists - if not path_fn.exists(): - print('FileNotFound: Please specify existing json_file as argument.') + if not info_fn.exists(): + print('FileNotFound:', info_fn) sys.exit(1) - # Load json_file - with open(path_fn, 'r') as f: - info = model_info = json.load(f, object_pairs_hook=OrderedDict) + # Load model information + with open(info_fn, 'r') as f: + info = ordered_load(f, Loader=Loader) # Model full name (Snake-case) model_function = [info['task_name']['code'], info['model_name']['code']] - if info['model_type']['code'] != '': + if info['model_type']['code']: model_function.append(info['model_type']['code']) model_function = '_'.join(model_function) @@ -47,9 +128,14 @@ def main(json_file, verbose): class_name = model_function.title().replace('_', '') # Prefix to preprocess_func - prefix_preprocess_func = model_info['task_name']['code'] - if model_info['model_type']['code']: - prefix_preprocess_func += '_' + model_info['model_type']['code'] + prefix_preprocess_func = info['task_name']['code'] + if info['model_type']['code']: + prefix_preprocess_func += '_' + info['model_type']['code'] + + # Model type code + model_type_code = info['model_type'].get('code') + if model_type_code is None: + model_type_code = '' # Preprocess citations def shortify(cite: str) -> str: @@ -57,16 +143,24 @@ def shortify(cite: str) -> str: m = re.search('\\((\\d{4})\\)', cite) year = m.group(1) if m else '' return last_name + year - task_cite = OrderedDict( - (shortify(cite), cite) for cite in model_info['task_name']['cite']) - model_cite = OrderedDict( - (shortify(cite), cite) for cite in model_info['model_name']['cite']) + + if info['task_name'].get('cite'): + task_cite = OrderedDict( + (shortify(cite), cite) for cite in info['task_name']['cite']) + else: + task_cite = {} + + if info['model_name'].get('cite'): + model_cite = OrderedDict( + (shortify(cite), cite) for cite in info['model_name']['cite']) + else: + model_cite = {} # Read template for docstring with open(TEMPLATE_DOCS, 'r') as f: docstring_template = f.read().format( model_function=model_function, - task_name=model_info['task_name']['desc'], + task_name=info['task_name']['desc'], task_cite_short=format_list( task_cite, fmt='[{}]_', @@ -75,7 +169,7 @@ def shortify(cite: str) -> str: task_cite, fmt='.. [{}] {}', sep='\n '), - model_name=model_info['model_name']['desc'], + model_name=info['model_name']['desc'], model_cite_short=format_list( model_cite, fmt='[{}]_', @@ -85,37 +179,37 @@ def shortify(cite: str) -> str: if k not in task_cite), fmt='.. [{}] {}', sep='\n '), - model_type=model_info['model_type']['desc'], + model_type=info['model_type']['desc'], notes=format_list( - model_info['notes'], + info.get('notes') if info.get('notes') else [], fmt='.. note::\n {}', sep='\n\n '), contributors=format_list_of_dict( - model_info['contributors'], + info.get('contributors') if info.get('contributors') else [], 'name', 'email', fmt='.. codeauthor:: {} <{}>', sep='\n '), data_columns=format_list( - model_info['data_columns'], + info['data_columns'], fmt='"{}"', sep=', '), - data_columns_len=len(model_info['data_columns']), + data_columns_len=len(info['data_columns']), data_columns_details=format_dict( - model_info['data_columns'], + info['data_columns'], fmt='- "{}": {}', sep='\n '), parameters=format_dict( - model_info['parameters'], + info['parameters'], fmt='"{}" ({})', sep=', ', pre=lambda v: v['desc']), model_regressor_parameter=message_model_regressor_parameter( - model_info['regressors']), + info['regressors']), model_regressor_return=message_model_regressor_return( - model_info['regressors']), - postpreds=message_postpreds(model_info['postpreds']), + info['regressors']), + postpreds=message_postpreds(info['postpreds']), additional_args=message_additional_args( - model_info['additional_args']), + info['additional_args']), ) # Read template for model python code @@ -125,33 +219,34 @@ def shortify(cite: str) -> str: model_function=model_function, class_name=class_name, prefix_preprocess_func=prefix_preprocess_func, - task_name=model_info['task_name']['code'], - model_name=model_info['model_name']['code'], - model_type=model_info['model_type']['code'], + task_name=info['task_name']['code'], + model_name=info['model_name']['code'], + model_type=model_type_code, data_columns=format_list( - model_info['data_columns'], + info['data_columns'], fmt="'{}',", sep='\n '), parameters=format_dict( - model_info['parameters'], + info['parameters'], fmt="('{}', ({})),", sep='\n ', pre=lambda v: ', '.join(map(str, v['info']))), regressors=format_dict( - model_info['regressors'], + info.get('regressors') if info.get('regressors') else {}, fmt="('{}', {}),", sep='\n '), postpreds=format_list( - model_info['postpreds'], + info.get('postpreds') if info.get('postpreds') else [], fmt="'{}'", sep=', '), parameters_desc=format_dict( - model_info['parameters'], + info['parameters'], fmt="('{}', '{}'),", sep='\n ', pre=lambda v: v['desc']), additional_args_desc=format_list_of_dict( - model_info['additional_args'], + info.get('additional_args') if info.get('additional_args') else + [], 'code', 'default', fmt="('{}', {}),", sep='\n '), @@ -162,109 +257,46 @@ def shortify(cite: str) -> str: test = test_template.format(model_function=model_function) - if verbose: - # Print code string to stdout - print(code_template) - else: - if not PATH_OUTPUT.exists(): - PATH_OUTPUT.mkdir(exist_ok=True) - - if not PATH_OUTPUT_TEST.exists(): - PATH_OUTPUT_TEST.mkdir(exist_ok=True) - - # Write model python code - code_fn = PATH_OUTPUT / ('_' + model_function + '.py') - with open(code_fn, 'w') as f: - f.write('"""\nGenerated by template. Do not edit by hand.\n"""\n') - f.write(code_template) - print('Created file: ', code_fn.name) - - test_fn = PATH_OUTPUT_TEST / ('test_' + model_function + '.py') - with open(test_fn, 'w') as f: - f.write(test) - print('Created file: ', test_fn.name) - - -def format_list(data: Iterable, - fmt: str, - sep: str) -> str: - return sep.join(map(fmt.format, data)) - - -def format_dict(data: OrderedDict, - fmt: str, - sep: str, - pre: Callable = lambda v: v) -> str: - return sep.join(fmt.format(k, pre(v)) for k, v in data.items()) - - -def format_list_of_dict(data: List[OrderedDict], - *keys: str, - fmt: str, - sep: str) -> str: - return sep.join(fmt.format(*(d[k] for k in keys)) for d in data) - - -def message_model_regressor_parameter(regressors: OrderedDict) -> str: - if regressors: - return 'For this model they are: ' + format_list( - regressors, fmt='"{}"', sep=', ') - else: - return 'Currently not available for this model' - - -def message_model_regressor_return(regressors: OrderedDict) -> str: - if regressors: - return ( - '- ``model_regressor``: ' - + 'Dict holding the extracted model-based regressors.') - else: - return '' + # Make directories if not exist + if not PATH_OUTPUT_CODE.exists(): + PATH_OUTPUT_CODE.mkdir(exist_ok=True) + if not PATH_OUTPUT_TEST.exists(): + PATH_OUTPUT_TEST.mkdir(exist_ok=True) + # Write model codes + code_fn = PATH_OUTPUT_CODE / ('_' + model_function + '.py') + with open(code_fn, 'w') as f: + f.write(code_template) -def message_postpreds(postpreds: List) -> str: - if not postpreds: - return '**(Currently not available.)** ' - else: - return '' - - -def message_additional_args(additional_args: List) -> str: - if additional_args: - return ( - 'For this model, it\'s possible to set the following model-' - + 'specific argument to a value that you may prefer.\n\n ' - + format_list_of_dict( - additional_args, - 'code', 'desc', - fmt='- ``{}``: {}', - sep='\n ')) - else: - return 'Not used for this model.' + # Write test codes + test_fn = PATH_OUTPUT_TEST / ('test_' + model_function + '.py') + with open(test_fn, 'w') as f: + f.write(test) if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument( - '-a', '--all', - help='write for all json files in directory', - action='store_true') parser.add_argument( '-v', '--verbose', - help='print output to stdout instead of writing to file', + help='Whether to print its process.', action='store_true') parser.add_argument( - 'json_file', - help='JSON file of the model to generate corresponding python code', - type=str, nargs='*') + 'info_files', + help='YAML-formatted file(s) for model information.', + type=str, + nargs='*') args = parser.parse_args() - if args.all: - # `all` flag overrides `json_file` & `verbose` - all_json_files = PATH_MODELS.glob('*.json') - for json_fn in sorted(all_json_files): - main(json_fn, False) + if args.info_files: + info_fns = [PATH_MODELS / fn for fn in args.info_files] else: - for fn in args.json_file: - main(fn, args.verbose) + info_fns = sorted(PATH_MODELS.glob('*.yml')) + + num_models = len(info_fns) + + for i, info_fn in enumerate(info_fns): + main(info_fn) + if args.verbose: + print('[{:2d} / {:2d}] Done for {}' + .format(i + 1, num_models, info_fn)) diff --git a/commons/convert-to-r.py b/commons/convert-to-r.py index a5f5b54f..fa2873c8 100644 --- a/commons/convert-to-r.py +++ b/commons/convert-to-r.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """ Generate R codes for hBayesDM using model information defined in YAML files. """ @@ -229,6 +228,11 @@ def generate_code(info): prefix_preprocess_func += '_' + info['model_type']['code'] preprocess_func = prefix_preprocess_func + '_preprocess_func' + # Model type code + model_type_code = info['model_type'].get('code') + if model_type_code is None: + model_type_code = '' + # Data columns data_columns = ', '.join([ r'"%s"' % k for k in info.get('data_columns', {}).keys() @@ -240,9 +244,9 @@ def generate_code(info): parameters = ',\n '.join([ '"{}" = c({}, {}, {})' .format(k, - v['info'][0] if v['info'][0] else 'NULL', - v['info'][1] if v['info'][1] else 'NULL', - v['info'][2] if v['info'][2] else 'NULL') + v['info'][0] if v['info'][0] is not None else 'NULL', + v['info'][1] if v['info'][1] is not None else 'NULL', + v['info'][2] if v['info'][2] is not None else 'NULL') for k, v in _params.items() ]) parameters = 'list(\n ' + parameters + '\n )' @@ -275,7 +279,7 @@ def generate_code(info): model_function=model_function, task_code=info['task_name']['code'], model_code=info['model_name']['code'], - model_type=info['model_type']['code'], + model_type=model_type_code, data_columns=data_columns, parameters=parameters, regressors=regressors, @@ -308,6 +312,7 @@ def main(info_fn): print('FileNotFound:', info_fn) sys.exit(1) + # Load model information with open(info_fn, 'r') as f: info = ordered_load(f, Loader=Loader) diff --git a/commons/generate-codes.sh b/commons/generate-codes.sh index dbfe0d4f..38fa16e1 100755 --- a/commons/generate-codes.sh +++ b/commons/generate-codes.sh @@ -4,6 +4,6 @@ python3 convert-to-r.py cp _r-codes/*.R ../R/R/ cp _r-tests/*.R ../R/tests/testthat/ -python3 generate-python-codes.py -a -cp Python-codes/_*.py ../Python/hbayesdm/models/ -cp Python-tests/*.py ../Python/tests/ +python3 convert-to-py.py +cp _py-codes/_*.py ../Python/hbayesdm/models/ +cp _py-tests/*.py ../Python/tests/ diff --git a/commons/ModelInformation.schema.json b/commons/misc/ModelInformation.schema.json similarity index 100% rename from commons/ModelInformation.schema.json rename to commons/misc/ModelInformation.schema.json diff --git a/commons/validate-models.sh b/commons/misc/validate-models.sh similarity index 100% rename from commons/validate-models.sh rename to commons/misc/validate-models.sh From f5693273f95de0489fb7f6083c27c2dbf168139d Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 21:07:56 +0900 Subject: [PATCH 096/163] Update R and Python codes using YAML files --- Python/hbayesdm/models/_bandit2arm_delta.py | 3 --- .../models/_bandit4arm2_kalman_filter.py | 3 --- Python/hbayesdm/models/_bandit4arm_2par_lapse.py | 3 --- Python/hbayesdm/models/_bandit4arm_4par.py | 3 --- Python/hbayesdm/models/_bandit4arm_lapse.py | 3 --- .../hbayesdm/models/_bandit4arm_lapse_decay.py | 3 --- .../hbayesdm/models/_bandit4arm_singleA_lapse.py | 3 --- Python/hbayesdm/models/_bart_par4.py | 3 --- Python/hbayesdm/models/_choiceRT_ddm.py | 3 --- Python/hbayesdm/models/_choiceRT_ddm_single.py | 11 ++++------- Python/hbayesdm/models/_cra_exp.py | 3 --- Python/hbayesdm/models/_cra_linear.py | 3 --- Python/hbayesdm/models/_dbdm_prob_weight.py | 3 --- Python/hbayesdm/models/_dd_cs.py | 3 --- Python/hbayesdm/models/_dd_cs_single.py | 9 +++------ Python/hbayesdm/models/_dd_exp.py | 3 --- Python/hbayesdm/models/_dd_hyperbolic.py | 3 --- Python/hbayesdm/models/_dd_hyperbolic_single.py | 7 ++----- Python/hbayesdm/models/_gng_m1.py | 3 --- Python/hbayesdm/models/_gng_m2.py | 3 --- Python/hbayesdm/models/_gng_m3.py | 3 --- Python/hbayesdm/models/_gng_m4.py | 3 --- Python/hbayesdm/models/_igt_orl.py | 3 --- Python/hbayesdm/models/_igt_pvl_decay.py | 3 --- Python/hbayesdm/models/_igt_pvl_delta.py | 3 --- Python/hbayesdm/models/_igt_vpp.py | 3 --- Python/hbayesdm/models/_peer_ocu.py | 3 --- Python/hbayesdm/models/_prl_ewa.py | 3 --- Python/hbayesdm/models/_prl_fictitious.py | 3 --- .../hbayesdm/models/_prl_fictitious_multipleB.py | 3 --- Python/hbayesdm/models/_prl_fictitious_rp.py | 3 --- Python/hbayesdm/models/_prl_fictitious_rp_woa.py | 3 --- Python/hbayesdm/models/_prl_fictitious_woa.py | 3 --- Python/hbayesdm/models/_prl_rp.py | 3 --- Python/hbayesdm/models/_prl_rp_multipleB.py | 3 --- Python/hbayesdm/models/_pst_gainloss_Q.py | 3 --- Python/hbayesdm/models/_ra_noLA.py | 3 --- Python/hbayesdm/models/_ra_noRA.py | 3 --- Python/hbayesdm/models/_ra_prospect.py | 3 --- Python/hbayesdm/models/_rdt_happiness.py | 3 --- Python/hbayesdm/models/_ts_par4.py | 3 --- Python/hbayesdm/models/_ts_par6.py | 3 --- Python/hbayesdm/models/_ts_par7.py | 3 --- Python/hbayesdm/models/_ug_bayes.py | 3 --- Python/hbayesdm/models/_ug_delta.py | 3 --- Python/hbayesdm/models/_wcs_sql.py | 3 --- R/R/bandit2arm_delta.R | 4 ++-- R/R/bandit4arm2_kalman_filter.R | 12 ++++++------ R/R/bandit4arm_2par_lapse.R | 6 +++--- R/R/bandit4arm_4par.R | 8 ++++---- R/R/bandit4arm_lapse.R | 10 +++++----- R/R/bandit4arm_lapse_decay.R | 12 ++++++------ R/R/bandit4arm_singleA_lapse.R | 8 ++++---- R/R/bart_par4.R | 8 ++++---- R/R/choiceRT_ddm.R | 8 ++++---- R/R/choiceRT_ddm_single.R | 8 ++++---- R/R/cra_exp.R | 6 +++--- R/R/cra_linear.R | 6 +++--- R/R/dbdm_prob_weight.R | 8 ++++---- R/R/dd_cs.R | 6 +++--- R/R/dd_cs_single.R | 6 +++--- R/R/dd_exp.R | 4 ++-- R/R/dd_hyperbolic.R | 4 ++-- R/R/dd_hyperbolic_single.R | 4 ++-- R/R/gng_m1.R | 6 +++--- R/R/gng_m2.R | 8 ++++---- R/R/gng_m3.R | 10 +++++----- R/R/gng_m4.R | 12 ++++++------ R/R/igt_orl.R | 6 +++--- R/R/igt_pvl_decay.R | 8 ++++---- R/R/igt_pvl_delta.R | 8 ++++---- R/R/igt_vpp.R | 16 ++++++++-------- R/R/peer_ocu.R | 6 +++--- R/R/prl_ewa.R | 6 +++--- R/R/prl_fictitious.R | 6 +++--- R/R/prl_fictitious_multipleB.R | 6 +++--- R/R/prl_fictitious_rp.R | 8 ++++---- R/R/prl_fictitious_rp_woa.R | 6 +++--- R/R/prl_fictitious_woa.R | 4 ++-- R/R/prl_rp.R | 6 +++--- R/R/prl_rp_multipleB.R | 6 +++--- R/R/pst_gainloss_Q.R | 6 +++--- R/R/ra_noLA.R | 4 ++-- R/R/ra_noRA.R | 4 ++-- R/R/ra_prospect.R | 6 +++--- R/R/rdt_happiness.R | 4 ++-- R/R/ts_par4.R | 8 ++++---- R/R/ts_par6.R | 12 ++++++------ R/R/ts_par7.R | 14 +++++++------- R/R/ug_bayes.R | 6 +++--- R/R/ug_delta.R | 6 +++--- R/R/wcs_sql.R | 6 +++--- 92 files changed, 175 insertions(+), 313 deletions(-) diff --git a/Python/hbayesdm/models/_bandit2arm_delta.py b/Python/hbayesdm/models/_bandit2arm_delta.py index c2e87e3e..ee8f36b5 100644 --- a/Python/hbayesdm/models/_bandit2arm_delta.py +++ b/Python/hbayesdm/models/_bandit2arm_delta.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py index 74438f00..06b31c0d 100644 --- a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py +++ b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py index 8b732d48..e642cc9a 100644 --- a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bandit4arm_4par.py b/Python/hbayesdm/models/_bandit4arm_4par.py index 6ced4ae7..5ef259fb 100644 --- a/Python/hbayesdm/models/_bandit4arm_4par.py +++ b/Python/hbayesdm/models/_bandit4arm_4par.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bandit4arm_lapse.py b/Python/hbayesdm/models/_bandit4arm_lapse.py index 6827e204..f982d17a 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py index f7f86e93..907f7e6e 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py index 75163cf6..41bef90a 100644 --- a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py index 920d988e..95b67396 100644 --- a/Python/hbayesdm/models/_bart_par4.py +++ b/Python/hbayesdm/models/_bart_par4.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py index df4ffd1e..dedfc782 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm.py +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py index 029e9407..5f453e28 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm_single.py +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict @@ -25,10 +22,10 @@ def __init__(self, **kwargs): 'RT', ), parameters=OrderedDict([ - ('alpha', (None, 0.5, None)), - ('beta', (None, 0.5, None)), - ('delta', (None, 0.5, None)), - ('tau', (None, 0.15, None)), + ('alpha', (0, 0.5, Inf)), + ('beta', (0, 0.5, 1)), + ('delta', (0, 0.5, Inf)), + ('tau', (0, 0.15, 1)), ]), regressors=OrderedDict([ diff --git a/Python/hbayesdm/models/_cra_exp.py b/Python/hbayesdm/models/_cra_exp.py index 6d864840..e59fce9e 100644 --- a/Python/hbayesdm/models/_cra_exp.py +++ b/Python/hbayesdm/models/_cra_exp.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_cra_linear.py b/Python/hbayesdm/models/_cra_linear.py index 1762e569..8ff9c1de 100644 --- a/Python/hbayesdm/models/_cra_linear.py +++ b/Python/hbayesdm/models/_cra_linear.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_dbdm_prob_weight.py b/Python/hbayesdm/models/_dbdm_prob_weight.py index dd34c3ec..7d1b8407 100644 --- a/Python/hbayesdm/models/_dbdm_prob_weight.py +++ b/Python/hbayesdm/models/_dbdm_prob_weight.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_dd_cs.py b/Python/hbayesdm/models/_dd_cs.py index 21583cb2..6e30b128 100644 --- a/Python/hbayesdm/models/_dd_cs.py +++ b/Python/hbayesdm/models/_dd_cs.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_dd_cs_single.py b/Python/hbayesdm/models/_dd_cs_single.py index b0ce5544..f2185110 100644 --- a/Python/hbayesdm/models/_dd_cs_single.py +++ b/Python/hbayesdm/models/_dd_cs_single.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict @@ -28,9 +25,9 @@ def __init__(self, **kwargs): 'choice', ), parameters=OrderedDict([ - ('r', (None, 0.1, None)), - ('s', (None, 1, None)), - ('beta', (None, 1, None)), + ('r', (0, 0.1, 1)), + ('s', (0, 1, 10)), + ('beta', (0, 1, 5)), ]), regressors=OrderedDict([ diff --git a/Python/hbayesdm/models/_dd_exp.py b/Python/hbayesdm/models/_dd_exp.py index e17b457b..619fbca8 100644 --- a/Python/hbayesdm/models/_dd_exp.py +++ b/Python/hbayesdm/models/_dd_exp.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_dd_hyperbolic.py b/Python/hbayesdm/models/_dd_hyperbolic.py index 7ba44b13..7d24d0c5 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic.py +++ b/Python/hbayesdm/models/_dd_hyperbolic.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_dd_hyperbolic_single.py b/Python/hbayesdm/models/_dd_hyperbolic_single.py index 63b27290..87738db8 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic_single.py +++ b/Python/hbayesdm/models/_dd_hyperbolic_single.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict @@ -28,8 +25,8 @@ def __init__(self, **kwargs): 'choice', ), parameters=OrderedDict([ - ('k', (None, 0.1, None)), - ('beta', (None, 1, None)), + ('k', (0, 0.1, 1)), + ('beta', (0, 1, 5)), ]), regressors=OrderedDict([ diff --git a/Python/hbayesdm/models/_gng_m1.py b/Python/hbayesdm/models/_gng_m1.py index ea28bcd1..a61841f7 100644 --- a/Python/hbayesdm/models/_gng_m1.py +++ b/Python/hbayesdm/models/_gng_m1.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_gng_m2.py b/Python/hbayesdm/models/_gng_m2.py index 9b291993..3a3a599f 100644 --- a/Python/hbayesdm/models/_gng_m2.py +++ b/Python/hbayesdm/models/_gng_m2.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_gng_m3.py b/Python/hbayesdm/models/_gng_m3.py index 49f903ec..48cf2123 100644 --- a/Python/hbayesdm/models/_gng_m3.py +++ b/Python/hbayesdm/models/_gng_m3.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_gng_m4.py b/Python/hbayesdm/models/_gng_m4.py index 2a8777bc..6de30765 100644 --- a/Python/hbayesdm/models/_gng_m4.py +++ b/Python/hbayesdm/models/_gng_m4.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_igt_orl.py b/Python/hbayesdm/models/_igt_orl.py index 1c77446f..e7c5cc88 100644 --- a/Python/hbayesdm/models/_igt_orl.py +++ b/Python/hbayesdm/models/_igt_orl.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_igt_pvl_decay.py b/Python/hbayesdm/models/_igt_pvl_decay.py index 57983a24..50a86d00 100644 --- a/Python/hbayesdm/models/_igt_pvl_decay.py +++ b/Python/hbayesdm/models/_igt_pvl_decay.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_igt_pvl_delta.py b/Python/hbayesdm/models/_igt_pvl_delta.py index 5385ffe2..f41f3f12 100644 --- a/Python/hbayesdm/models/_igt_pvl_delta.py +++ b/Python/hbayesdm/models/_igt_pvl_delta.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_igt_vpp.py b/Python/hbayesdm/models/_igt_vpp.py index 7b3db93f..77e324cf 100644 --- a/Python/hbayesdm/models/_igt_vpp.py +++ b/Python/hbayesdm/models/_igt_vpp.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_peer_ocu.py b/Python/hbayesdm/models/_peer_ocu.py index d8d8aa03..72ae7235 100644 --- a/Python/hbayesdm/models/_peer_ocu.py +++ b/Python/hbayesdm/models/_peer_ocu.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_ewa.py b/Python/hbayesdm/models/_prl_ewa.py index 531bf392..6d96f561 100644 --- a/Python/hbayesdm/models/_prl_ewa.py +++ b/Python/hbayesdm/models/_prl_ewa.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_fictitious.py b/Python/hbayesdm/models/_prl_fictitious.py index bc284841..f64a5acb 100644 --- a/Python/hbayesdm/models/_prl_fictitious.py +++ b/Python/hbayesdm/models/_prl_fictitious.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_fictitious_multipleB.py b/Python/hbayesdm/models/_prl_fictitious_multipleB.py index 00c54cf6..ac1dec0d 100644 --- a/Python/hbayesdm/models/_prl_fictitious_multipleB.py +++ b/Python/hbayesdm/models/_prl_fictitious_multipleB.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_fictitious_rp.py b/Python/hbayesdm/models/_prl_fictitious_rp.py index e7bcb925..99610f17 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py index e821a1cb..945fbce3 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_fictitious_woa.py b/Python/hbayesdm/models/_prl_fictitious_woa.py index cef8cd86..4150ae91 100644 --- a/Python/hbayesdm/models/_prl_fictitious_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_woa.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_rp.py b/Python/hbayesdm/models/_prl_rp.py index b0aadc9e..a6d16305 100644 --- a/Python/hbayesdm/models/_prl_rp.py +++ b/Python/hbayesdm/models/_prl_rp.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_prl_rp_multipleB.py b/Python/hbayesdm/models/_prl_rp_multipleB.py index d6dbef32..987b2922 100644 --- a/Python/hbayesdm/models/_prl_rp_multipleB.py +++ b/Python/hbayesdm/models/_prl_rp_multipleB.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py index c2ebfc4c..3af97a4d 100644 --- a/Python/hbayesdm/models/_pst_gainloss_Q.py +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py index 6e990c82..d7554bcf 100644 --- a/Python/hbayesdm/models/_ra_noLA.py +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py index 34cac275..8b611445 100644 --- a/Python/hbayesdm/models/_ra_noRA.py +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py index 4bcdc100..7bf1f93b 100644 --- a/Python/hbayesdm/models/_ra_prospect.py +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_rdt_happiness.py b/Python/hbayesdm/models/_rdt_happiness.py index 8e909a68..ad75ecb1 100644 --- a/Python/hbayesdm/models/_rdt_happiness.py +++ b/Python/hbayesdm/models/_rdt_happiness.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ts_par4.py b/Python/hbayesdm/models/_ts_par4.py index 931c4142..ea8c59f6 100644 --- a/Python/hbayesdm/models/_ts_par4.py +++ b/Python/hbayesdm/models/_ts_par4.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ts_par6.py b/Python/hbayesdm/models/_ts_par6.py index c13ef1dd..a2106171 100644 --- a/Python/hbayesdm/models/_ts_par6.py +++ b/Python/hbayesdm/models/_ts_par6.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ts_par7.py b/Python/hbayesdm/models/_ts_par7.py index 6ee9accd..88c91705 100644 --- a/Python/hbayesdm/models/_ts_par7.py +++ b/Python/hbayesdm/models/_ts_par7.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ug_bayes.py b/Python/hbayesdm/models/_ug_bayes.py index 8fda0893..d61250ad 100644 --- a/Python/hbayesdm/models/_ug_bayes.py +++ b/Python/hbayesdm/models/_ug_bayes.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_ug_delta.py b/Python/hbayesdm/models/_ug_delta.py index 1276dad6..d2f7e697 100644 --- a/Python/hbayesdm/models/_ug_delta.py +++ b/Python/hbayesdm/models/_ug_delta.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/Python/hbayesdm/models/_wcs_sql.py b/Python/hbayesdm/models/_wcs_sql.py index 40ae7f15..fd662a48 100644 --- a/Python/hbayesdm/models/_wcs_sql.py +++ b/Python/hbayesdm/models/_wcs_sql.py @@ -1,6 +1,3 @@ -""" -Generated by template. Do not edit by hand. -""" from typing import Sequence, Union, Any from collections import OrderedDict diff --git a/R/R/bandit2arm_delta.R b/R/R/bandit2arm_delta.R index 7afd87ab..0cd38534 100644 --- a/R/R/bandit2arm_delta.R +++ b/R/R/bandit2arm_delta.R @@ -35,8 +35,8 @@ bandit2arm_delta <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "A" = c(NULL, 0.5, 1), - "tau" = c(NULL, 1, 5) + "A" = c(0, 0.5, 1), + "tau" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bandit4arm2_kalman_filter.R b/R/R/bandit4arm2_kalman_filter.R index 8181b67d..96d4b47a 100644 --- a/R/R/bandit4arm2_kalman_filter.R +++ b/R/R/bandit4arm2_kalman_filter.R @@ -33,12 +33,12 @@ bandit4arm2_kalman_filter <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "lambda" = c(NULL, 0.9, 1), - "theta" = c(NULL, 50, 100), - "beta" = c(NULL, 0.1, 1), - "mu0" = c(NULL, 85, 100), - "sigma0" = c(NULL, 6, 15), - "sigmaD" = c(NULL, 3, 15) + "lambda" = c(0, 0.9, 1), + "theta" = c(0, 50, 100), + "beta" = c(0, 0.1, 1), + "mu0" = c(0, 85, 100), + "sigma0" = c(0, 6, 15), + "sigmaD" = c(0, 3, 15) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bandit4arm_2par_lapse.R b/R/R/bandit4arm_2par_lapse.R index 0db8e65e..28337b76 100644 --- a/R/R/bandit4arm_2par_lapse.R +++ b/R/R/bandit4arm_2par_lapse.R @@ -34,9 +34,9 @@ bandit4arm_2par_lapse <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "Arew" = c(NULL, 0.1, 1), - "Apun" = c(NULL, 0.1, 1), - "xi" = c(NULL, 0.1, 1) + "Arew" = c(0, 0.1, 1), + "Apun" = c(0, 0.1, 1), + "xi" = c(0, 0.1, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bandit4arm_4par.R b/R/R/bandit4arm_4par.R index 7c61a6c1..8e6c57fa 100644 --- a/R/R/bandit4arm_4par.R +++ b/R/R/bandit4arm_4par.R @@ -34,10 +34,10 @@ bandit4arm_4par <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "Arew" = c(NULL, 0.1, 1), - "Apun" = c(NULL, 0.1, 1), - "R" = c(NULL, 1, 30), - "P" = c(NULL, 1, 30) + "Arew" = c(0, 0.1, 1), + "Apun" = c(0, 0.1, 1), + "R" = c(0, 1, 30), + "P" = c(0, 1, 30) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bandit4arm_lapse.R b/R/R/bandit4arm_lapse.R index e24c0baa..d70986f1 100644 --- a/R/R/bandit4arm_lapse.R +++ b/R/R/bandit4arm_lapse.R @@ -34,11 +34,11 @@ bandit4arm_lapse <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "Arew" = c(NULL, 0.1, 1), - "Apun" = c(NULL, 0.1, 1), - "R" = c(NULL, 1, 30), - "P" = c(NULL, 1, 30), - "xi" = c(NULL, 0.1, 1) + "Arew" = c(0, 0.1, 1), + "Apun" = c(0, 0.1, 1), + "R" = c(0, 1, 30), + "P" = c(0, 1, 30), + "xi" = c(0, 0.1, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bandit4arm_lapse_decay.R b/R/R/bandit4arm_lapse_decay.R index 4f632c2a..e1354c46 100644 --- a/R/R/bandit4arm_lapse_decay.R +++ b/R/R/bandit4arm_lapse_decay.R @@ -34,12 +34,12 @@ bandit4arm_lapse_decay <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "Arew" = c(NULL, 0.1, 1), - "Apun" = c(NULL, 0.1, 1), - "R" = c(NULL, 1, 30), - "P" = c(NULL, 1, 30), - "xi" = c(NULL, 0.1, 1), - "d" = c(NULL, 0.1, 1) + "Arew" = c(0, 0.1, 1), + "Apun" = c(0, 0.1, 1), + "R" = c(0, 1, 30), + "P" = c(0, 1, 30), + "xi" = c(0, 0.1, 1), + "d" = c(0, 0.1, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bandit4arm_singleA_lapse.R b/R/R/bandit4arm_singleA_lapse.R index 4da4ddf2..f40e8ac9 100644 --- a/R/R/bandit4arm_singleA_lapse.R +++ b/R/R/bandit4arm_singleA_lapse.R @@ -34,10 +34,10 @@ bandit4arm_singleA_lapse <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "A" = c(NULL, 0.1, 1), - "R" = c(NULL, 1, 30), - "P" = c(NULL, 1, 30), - "xi" = c(NULL, 0.1, 1) + "A" = c(0, 0.1, 1), + "R" = c(0, 1, 30), + "P" = c(0, 1, 30), + "xi" = c(0, 0.1, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/bart_par4.R b/R/R/bart_par4.R index 7b152dea..bc471b32 100644 --- a/R/R/bart_par4.R +++ b/R/R/bart_par4.R @@ -33,10 +33,10 @@ bart_par4 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "pumps", "explosion"), parameters = list( - "phi" = c(NULL, 0.5, 1), - "eta" = c(NULL, 1, Inf), - "gam" = c(NULL, 1, Inf), - "tau" = c(NULL, 1, Inf) + "phi" = c(0, 0.5, 1), + "eta" = c(0, 1, Inf), + "gam" = c(0, 1, Inf), + "tau" = c(0, 1, Inf) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/choiceRT_ddm.R b/R/R/choiceRT_ddm.R index 2c7ca9c9..1c0383b0 100644 --- a/R/R/choiceRT_ddm.R +++ b/R/R/choiceRT_ddm.R @@ -39,10 +39,10 @@ choiceRT_ddm <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "RT"), parameters = list( - "alpha" = c(NULL, 0.5, Inf), - "beta" = c(NULL, 0.5, 1), - "delta" = c(NULL, 0.5, Inf), - "tau" = c(NULL, 0.15, 1) + "alpha" = c(0, 0.5, Inf), + "beta" = c(0, 0.5, 1), + "delta" = c(0, 0.5, Inf), + "tau" = c(0, 0.15, 1) ), regressors = NULL, postpreds = NULL, diff --git a/R/R/choiceRT_ddm_single.R b/R/R/choiceRT_ddm_single.R index d80f84c8..1437459d 100644 --- a/R/R/choiceRT_ddm_single.R +++ b/R/R/choiceRT_ddm_single.R @@ -39,10 +39,10 @@ choiceRT_ddm_single <- hBayesDM_model( model_type = "single", data_columns = c("subjID", "choice", "RT"), parameters = list( - "alpha" = c(NULL, 0.5, NULL), - "beta" = c(NULL, 0.5, NULL), - "delta" = c(NULL, 0.5, NULL), - "tau" = c(NULL, 0.15, NULL) + "alpha" = c(0, 0.5, Inf), + "beta" = c(0, 0.5, 1), + "delta" = c(0, 0.5, Inf), + "tau" = c(0, 0.15, 1) ), regressors = NULL, postpreds = NULL, diff --git a/R/R/cra_exp.R b/R/R/cra_exp.R index 55e62bb8..c8e95734 100644 --- a/R/R/cra_exp.R +++ b/R/R/cra_exp.R @@ -36,9 +36,9 @@ cra_exp <- hBayesDM_model( model_type = "", data_columns = c("subjID", "prob", "ambig", "reward_var", "reward_fix", "choice"), parameters = list( - "alpha" = c(NULL, 1, 2), - "beta" = c(-Inf, NULL, Inf), - "gamma" = c(NULL, 1, Inf) + "alpha" = c(0, 1, 2), + "beta" = c(-Inf, 0, Inf), + "gamma" = c(0, 1, Inf) ), regressors = list( "sv" = 2, diff --git a/R/R/cra_linear.R b/R/R/cra_linear.R index f34d4c89..5e0d5d25 100644 --- a/R/R/cra_linear.R +++ b/R/R/cra_linear.R @@ -36,9 +36,9 @@ cra_linear <- hBayesDM_model( model_type = "", data_columns = c("subjID", "prob", "ambig", "reward_var", "reward_fix", "choice"), parameters = list( - "alpha" = c(NULL, 1, 2), - "beta" = c(-Inf, NULL, Inf), - "gamma" = c(NULL, 1, Inf) + "alpha" = c(0, 1, 2), + "beta" = c(-Inf, 0, Inf), + "gamma" = c(0, 1, Inf) ), regressors = list( "sv" = 2, diff --git a/R/R/dbdm_prob_weight.R b/R/R/dbdm_prob_weight.R index 2ec687cc..e1edbaee 100644 --- a/R/R/dbdm_prob_weight.R +++ b/R/R/dbdm_prob_weight.R @@ -42,10 +42,10 @@ dbdm_prob_weight <- hBayesDM_model( model_type = "", data_columns = c("subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice"), parameters = list( - "tau" = c(NULL, 0.8, 1), - "rho" = c(NULL, 0.7, 2), - "lambda" = c(NULL, 2.5, 5), - "beta" = c(NULL, 0.2, 1) + "tau" = c(0, 0.8, 1), + "rho" = c(0, 0.7, 2), + "lambda" = c(0, 2.5, 5), + "beta" = c(0, 0.2, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/dd_cs.R b/R/R/dd_cs.R index 5d2bb5c0..2552c384 100644 --- a/R/R/dd_cs.R +++ b/R/R/dd_cs.R @@ -36,9 +36,9 @@ dd_cs <- hBayesDM_model( model_type = "", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), parameters = list( - "r" = c(NULL, 0.1, 1), - "s" = c(NULL, 1, 10), - "beta" = c(NULL, 1, 5) + "r" = c(0, 0.1, 1), + "s" = c(0, 1, 10), + "beta" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/dd_cs_single.R b/R/R/dd_cs_single.R index 5306406b..2094267e 100644 --- a/R/R/dd_cs_single.R +++ b/R/R/dd_cs_single.R @@ -36,9 +36,9 @@ dd_cs_single <- hBayesDM_model( model_type = "single", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), parameters = list( - "r" = c(NULL, 0.1, NULL), - "s" = c(NULL, 1, NULL), - "beta" = c(NULL, 1, NULL) + "r" = c(0, 0.1, 1), + "s" = c(0, 1, 10), + "beta" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/dd_exp.R b/R/R/dd_exp.R index 6c720882..cef55f6d 100644 --- a/R/R/dd_exp.R +++ b/R/R/dd_exp.R @@ -36,8 +36,8 @@ dd_exp <- hBayesDM_model( model_type = "", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), parameters = list( - "r" = c(NULL, 0.1, 1), - "beta" = c(NULL, 1, 5) + "r" = c(0, 0.1, 1), + "beta" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/dd_hyperbolic.R b/R/R/dd_hyperbolic.R index 24fad886..2fdb4fd7 100644 --- a/R/R/dd_hyperbolic.R +++ b/R/R/dd_hyperbolic.R @@ -36,8 +36,8 @@ dd_hyperbolic <- hBayesDM_model( model_type = "", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), parameters = list( - "k" = c(NULL, 0.1, 1), - "beta" = c(NULL, 1, 5) + "k" = c(0, 0.1, 1), + "beta" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/dd_hyperbolic_single.R b/R/R/dd_hyperbolic_single.R index 46497855..be3744fc 100644 --- a/R/R/dd_hyperbolic_single.R +++ b/R/R/dd_hyperbolic_single.R @@ -36,8 +36,8 @@ dd_hyperbolic_single <- hBayesDM_model( model_type = "single", data_columns = c("subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice"), parameters = list( - "k" = c(NULL, 0.1, NULL), - "beta" = c(NULL, 1, NULL) + "k" = c(0, 0.1, 1), + "beta" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/gng_m1.R b/R/R/gng_m1.R index 4e98bc0b..49942ae2 100644 --- a/R/R/gng_m1.R +++ b/R/R/gng_m1.R @@ -34,9 +34,9 @@ gng_m1 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), parameters = list( - "xi" = c(NULL, 0.1, 1), - "ep" = c(NULL, 0.2, 1), - "rho" = c(NULL, exp(2), Inf) + "xi" = c(0, 0.1, 1), + "ep" = c(0, 0.2, 1), + "rho" = c(0, exp(2), Inf) ), regressors = list( "Qgo" = 2, diff --git a/R/R/gng_m2.R b/R/R/gng_m2.R index 106b7346..4aa66293 100644 --- a/R/R/gng_m2.R +++ b/R/R/gng_m2.R @@ -34,10 +34,10 @@ gng_m2 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), parameters = list( - "xi" = c(NULL, 0.1, 1), - "ep" = c(NULL, 0.2, 1), - "b" = c(-Inf, NULL, Inf), - "rho" = c(NULL, exp(2), Inf) + "xi" = c(0, 0.1, 1), + "ep" = c(0, 0.2, 1), + "b" = c(-Inf, 0, Inf), + "rho" = c(0, exp(2), Inf) ), regressors = list( "Qgo" = 2, diff --git a/R/R/gng_m3.R b/R/R/gng_m3.R index 7460a8df..95245789 100644 --- a/R/R/gng_m3.R +++ b/R/R/gng_m3.R @@ -34,11 +34,11 @@ gng_m3 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), parameters = list( - "xi" = c(NULL, 0.1, 1), - "ep" = c(NULL, 0.2, 1), - "b" = c(-Inf, NULL, Inf), - "pi" = c(-Inf, NULL, Inf), - "rho" = c(NULL, exp(2), Inf) + "xi" = c(0, 0.1, 1), + "ep" = c(0, 0.2, 1), + "b" = c(-Inf, 0, Inf), + "pi" = c(-Inf, 0, Inf), + "rho" = c(0, exp(2), Inf) ), regressors = list( "Qgo" = 2, diff --git a/R/R/gng_m4.R b/R/R/gng_m4.R index 101a65fc..aa5cb6c9 100644 --- a/R/R/gng_m4.R +++ b/R/R/gng_m4.R @@ -34,12 +34,12 @@ gng_m4 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "cue", "keyPressed", "outcome"), parameters = list( - "xi" = c(NULL, 0.1, 1), - "ep" = c(NULL, 0.2, 1), - "b" = c(-Inf, NULL, Inf), - "pi" = c(-Inf, NULL, Inf), - "rhoRew" = c(NULL, exp(2), Inf), - "rhoPun" = c(NULL, exp(2), Inf) + "xi" = c(0, 0.1, 1), + "ep" = c(0, 0.2, 1), + "b" = c(-Inf, 0, Inf), + "pi" = c(-Inf, 0, Inf), + "rhoRew" = c(0, exp(2), Inf), + "rhoPun" = c(0, exp(2), Inf) ), regressors = list( "Qgo" = 2, diff --git a/R/R/igt_orl.R b/R/R/igt_orl.R index a4fceaea..143529b2 100644 --- a/R/R/igt_orl.R +++ b/R/R/igt_orl.R @@ -37,9 +37,9 @@ igt_orl <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "Arew" = c(NULL, 0.1, 1), - "Apun" = c(NULL, 0.1, 1), - "K" = c(NULL, 0.1, 5), + "Arew" = c(0, 0.1, 1), + "Apun" = c(0, 0.1, 1), + "K" = c(0, 0.1, 5), "betaF" = c(-Inf, 0.1, Inf), "betaP" = c(-Inf, 1, Inf) ), diff --git a/R/R/igt_pvl_decay.R b/R/R/igt_pvl_decay.R index 686fd2b3..d4eac4f5 100644 --- a/R/R/igt_pvl_decay.R +++ b/R/R/igt_pvl_decay.R @@ -37,10 +37,10 @@ igt_pvl_decay <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "A" = c(NULL, 0.5, 1), - "alpha" = c(NULL, 0.5, 2), - "cons" = c(NULL, 1, 5), - "lambda" = c(NULL, 1, 10) + "A" = c(0, 0.5, 1), + "alpha" = c(0, 0.5, 2), + "cons" = c(0, 1, 5), + "lambda" = c(0, 1, 10) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/igt_pvl_delta.R b/R/R/igt_pvl_delta.R index 3a0b2e1b..6d4b430b 100644 --- a/R/R/igt_pvl_delta.R +++ b/R/R/igt_pvl_delta.R @@ -37,10 +37,10 @@ igt_pvl_delta <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "A" = c(NULL, 0.5, 1), - "alpha" = c(NULL, 0.5, 2), - "cons" = c(NULL, 1, 5), - "lambda" = c(NULL, 1, 10) + "A" = c(0, 0.5, 1), + "alpha" = c(0, 0.5, 2), + "cons" = c(0, 1, 5), + "lambda" = c(0, 1, 10) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/igt_vpp.R b/R/R/igt_vpp.R index 8e525384..ea203ba7 100644 --- a/R/R/igt_vpp.R +++ b/R/R/igt_vpp.R @@ -37,14 +37,14 @@ igt_vpp <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "gain", "loss"), parameters = list( - "A" = c(NULL, 0.5, 1), - "alpha" = c(NULL, 0.5, 2), - "cons" = c(NULL, 1, 5), - "lambda" = c(NULL, 1, 10), - "epP" = c(-Inf, NULL, Inf), - "epN" = c(-Inf, NULL, Inf), - "K" = c(NULL, 0.5, 1), - "w" = c(NULL, 0.5, 1) + "A" = c(0, 0.5, 1), + "alpha" = c(0, 0.5, 2), + "cons" = c(0, 1, 5), + "lambda" = c(0, 1, 10), + "epP" = c(-Inf, 0, Inf), + "epN" = c(-Inf, 0, Inf), + "K" = c(0, 0.5, 1), + "w" = c(0, 0.5, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/peer_ocu.R b/R/R/peer_ocu.R index 871c74f3..4498dbe6 100644 --- a/R/R/peer_ocu.R +++ b/R/R/peer_ocu.R @@ -38,9 +38,9 @@ peer_ocu <- hBayesDM_model( model_type = "", data_columns = c("subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice"), parameters = list( - "rho" = c(NULL, 1, 2), - "tau" = c(NULL, 1, Inf), - "ocu" = c(-Inf, NULL, Inf) + "rho" = c(0, 1, 2), + "tau" = c(0, 1, Inf), + "ocu" = c(-Inf, 0, Inf) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/prl_ewa.R b/R/R/prl_ewa.R index e2526856..d8e07487 100644 --- a/R/R/prl_ewa.R +++ b/R/R/prl_ewa.R @@ -33,9 +33,9 @@ prl_ewa <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "phi" = c(NULL, 0.5, 1), - "rho" = c(NULL, 0.1, 1), - "beta" = c(NULL, 1, 10) + "phi" = c(0, 0.5, 1), + "rho" = c(0, 0.1, 1), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 2, diff --git a/R/R/prl_fictitious.R b/R/R/prl_fictitious.R index 076b036f..6cfa7101 100644 --- a/R/R/prl_fictitious.R +++ b/R/R/prl_fictitious.R @@ -33,9 +33,9 @@ prl_fictitious <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "eta" = c(NULL, 0.5, 1), - "alpha" = c(-Inf, NULL, Inf), - "beta" = c(NULL, 1, 10) + "eta" = c(0, 0.5, 1), + "alpha" = c(-Inf, 0, Inf), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 2, diff --git a/R/R/prl_fictitious_multipleB.R b/R/R/prl_fictitious_multipleB.R index 1ac8a46b..f3a3c886 100644 --- a/R/R/prl_fictitious_multipleB.R +++ b/R/R/prl_fictitious_multipleB.R @@ -34,9 +34,9 @@ prl_fictitious_multipleB <- hBayesDM_model( model_type = "multipleB", data_columns = c("subjID", "block", "choice", "outcome"), parameters = list( - "eta" = c(NULL, 0.5, 1), - "alpha" = c(-Inf, NULL, Inf), - "beta" = c(NULL, 1, 10) + "eta" = c(0, 0.5, 1), + "alpha" = c(-Inf, 0, Inf), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 3, diff --git a/R/R/prl_fictitious_rp.R b/R/R/prl_fictitious_rp.R index 6414998e..7aac73e4 100644 --- a/R/R/prl_fictitious_rp.R +++ b/R/R/prl_fictitious_rp.R @@ -35,10 +35,10 @@ prl_fictitious_rp <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "eta_pos" = c(NULL, 0.5, 1), - "eta_neg" = c(NULL, 0.5, 1), - "alpha" = c(-Inf, NULL, Inf), - "beta" = c(NULL, 1, 10) + "eta_pos" = c(0, 0.5, 1), + "eta_neg" = c(0, 0.5, 1), + "alpha" = c(-Inf, 0, Inf), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 2, diff --git a/R/R/prl_fictitious_rp_woa.R b/R/R/prl_fictitious_rp_woa.R index 68681ecb..d7fe6c6b 100644 --- a/R/R/prl_fictitious_rp_woa.R +++ b/R/R/prl_fictitious_rp_woa.R @@ -35,9 +35,9 @@ prl_fictitious_rp_woa <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "eta_pos" = c(NULL, 0.5, 1), - "eta_neg" = c(NULL, 0.5, 1), - "beta" = c(NULL, 1, 10) + "eta_pos" = c(0, 0.5, 1), + "eta_neg" = c(0, 0.5, 1), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 2, diff --git a/R/R/prl_fictitious_woa.R b/R/R/prl_fictitious_woa.R index d23ae926..5dccea1c 100644 --- a/R/R/prl_fictitious_woa.R +++ b/R/R/prl_fictitious_woa.R @@ -33,8 +33,8 @@ prl_fictitious_woa <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "eta" = c(NULL, 0.5, 1), - "beta" = c(NULL, 1, 10) + "eta" = c(0, 0.5, 1), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 2, diff --git a/R/R/prl_rp.R b/R/R/prl_rp.R index 5aa87bc5..bd439509 100644 --- a/R/R/prl_rp.R +++ b/R/R/prl_rp.R @@ -33,9 +33,9 @@ prl_rp <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "Apun" = c(NULL, 0.1, 1), - "Arew" = c(NULL, 0.1, 1), - "beta" = c(NULL, 1, 10) + "Apun" = c(0, 0.1, 1), + "Arew" = c(0, 0.1, 1), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 2, diff --git a/R/R/prl_rp_multipleB.R b/R/R/prl_rp_multipleB.R index f2f96361..865346d1 100644 --- a/R/R/prl_rp_multipleB.R +++ b/R/R/prl_rp_multipleB.R @@ -34,9 +34,9 @@ prl_rp_multipleB <- hBayesDM_model( model_type = "multipleB", data_columns = c("subjID", "block", "choice", "outcome"), parameters = list( - "Apun" = c(NULL, 0.1, 1), - "Arew" = c(NULL, 0.1, 1), - "beta" = c(NULL, 1, 10) + "Apun" = c(0, 0.1, 1), + "Arew" = c(0, 0.1, 1), + "beta" = c(0, 1, 10) ), regressors = list( "ev_c" = 3, diff --git a/R/R/pst_gainloss_Q.R b/R/R/pst_gainloss_Q.R index 3f931980..0551a1b0 100644 --- a/R/R/pst_gainloss_Q.R +++ b/R/R/pst_gainloss_Q.R @@ -34,9 +34,9 @@ pst_gainloss_Q <- hBayesDM_model( model_type = "", data_columns = c("subjID", "type", "choice", "reward"), parameters = list( - "alpha_pos" = c(NULL, 0.5, 1), - "alpha_neg" = c(NULL, 0.5, 1), - "beta" = c(NULL, 1, 10) + "alpha_pos" = c(0, 0.5, 1), + "alpha_neg" = c(0, 0.5, 1), + "beta" = c(0, 1, 10) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/ra_noLA.R b/R/R/ra_noLA.R index 767c16b4..57fb35e7 100644 --- a/R/R/ra_noLA.R +++ b/R/R/ra_noLA.R @@ -35,8 +35,8 @@ ra_noLA <- hBayesDM_model( model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list( - "rho" = c(NULL, 1, 2), - "tau" = c(NULL, 1, 30) + "rho" = c(0, 1, 2), + "tau" = c(0, 1, 30) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/ra_noRA.R b/R/R/ra_noRA.R index c7221a45..0fede4ff 100644 --- a/R/R/ra_noRA.R +++ b/R/R/ra_noRA.R @@ -35,8 +35,8 @@ ra_noRA <- hBayesDM_model( model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list( - "lambda" = c(NULL, 1, 5), - "tau" = c(NULL, 1, 30) + "lambda" = c(0, 1, 5), + "tau" = c(0, 1, 30) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/ra_prospect.R b/R/R/ra_prospect.R index e9c526e0..b9d70dea 100644 --- a/R/R/ra_prospect.R +++ b/R/R/ra_prospect.R @@ -35,9 +35,9 @@ ra_prospect <- hBayesDM_model( model_type = "", data_columns = c("subjID", "gain", "loss", "cert", "gamble"), parameters = list( - "rho" = c(NULL, 1, 2), - "lambda" = c(NULL, 1, 5), - "tau" = c(NULL, 1, 30) + "rho" = c(0, 1, 2), + "lambda" = c(0, 1, 5), + "tau" = c(0, 1, 30) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/rdt_happiness.R b/R/R/rdt_happiness.R index f7cacb54..8db06323 100644 --- a/R/R/rdt_happiness.R +++ b/R/R/rdt_happiness.R @@ -43,8 +43,8 @@ rdt_happiness <- hBayesDM_model( "w1" = c(-Inf, 1, Inf), "w2" = c(-Inf, 1, Inf), "w3" = c(-Inf, 1, Inf), - "gam" = c(NULL, 0.5, 1), - "sig" = c(NULL, 1, Inf) + "gam" = c(0, 0.5, 1), + "sig" = c(0, 1, Inf) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/ts_par4.R b/R/R/ts_par4.R index 28863e9a..1e26b202 100644 --- a/R/R/ts_par4.R +++ b/R/R/ts_par4.R @@ -39,10 +39,10 @@ ts_par4 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "level1_choice", "level2_choice", "reward"), parameters = list( - "a" = c(NULL, 0.5, 1), - "beta" = c(NULL, 1, Inf), - "pi" = c(NULL, 1, 5), - "w" = c(NULL, 0.5, 1) + "a" = c(0, 0.5, 1), + "beta" = c(0, 1, Inf), + "pi" = c(0, 1, 5), + "w" = c(0, 0.5, 1) ), regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), diff --git a/R/R/ts_par6.R b/R/R/ts_par6.R index 9a7fc19e..b4d1d18c 100644 --- a/R/R/ts_par6.R +++ b/R/R/ts_par6.R @@ -37,12 +37,12 @@ ts_par6 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "level1_choice", "level2_choice", "reward"), parameters = list( - "a1" = c(NULL, 0.5, 1), - "beta1" = c(NULL, 1, Inf), - "a2" = c(NULL, 0.5, 1), - "beta2" = c(NULL, 1, Inf), - "pi" = c(NULL, 1, 5), - "w" = c(NULL, 0.5, 1) + "a1" = c(0, 0.5, 1), + "beta1" = c(0, 1, Inf), + "a2" = c(0, 0.5, 1), + "beta2" = c(0, 1, Inf), + "pi" = c(0, 1, 5), + "w" = c(0, 0.5, 1) ), regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), diff --git a/R/R/ts_par7.R b/R/R/ts_par7.R index 1f21cbb0..6db2d767 100644 --- a/R/R/ts_par7.R +++ b/R/R/ts_par7.R @@ -37,13 +37,13 @@ ts_par7 <- hBayesDM_model( model_type = "", data_columns = c("subjID", "level1_choice", "level2_choice", "reward"), parameters = list( - "a1" = c(NULL, 0.5, 1), - "beta1" = c(NULL, 1, Inf), - "a2" = c(NULL, 0.5, 1), - "beta2" = c(NULL, 1, Inf), - "pi" = c(NULL, 1, 5), - "w" = c(NULL, 0.5, 1), - "lambda" = c(NULL, 0.5, 1) + "a1" = c(0, 0.5, 1), + "beta1" = c(0, 1, Inf), + "a2" = c(0, 0.5, 1), + "beta2" = c(0, 1, Inf), + "pi" = c(0, 1, 5), + "w" = c(0, 0.5, 1), + "lambda" = c(0, 0.5, 1) ), regressors = NULL, postpreds = c("y_pred_step1", "y_pred_step2"), diff --git a/R/R/ug_bayes.R b/R/R/ug_bayes.R index 5948312f..def9fc5d 100644 --- a/R/R/ug_bayes.R +++ b/R/R/ug_bayes.R @@ -33,9 +33,9 @@ ug_bayes <- hBayesDM_model( model_type = "", data_columns = c("subjID", "offer", "accept"), parameters = list( - "alpha" = c(NULL, 1, 20), - "beta" = c(NULL, 0.5, 10), - "tau" = c(NULL, 1, 10) + "alpha" = c(0, 1, 20), + "beta" = c(0, 0.5, 10), + "tau" = c(0, 1, 10) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/ug_delta.R b/R/R/ug_delta.R index 7007049c..cf1df4ca 100644 --- a/R/R/ug_delta.R +++ b/R/R/ug_delta.R @@ -33,9 +33,9 @@ ug_delta <- hBayesDM_model( model_type = "", data_columns = c("subjID", "offer", "accept"), parameters = list( - "alpha" = c(NULL, 1, 20), - "tau" = c(NULL, 1, 10), - "ep" = c(NULL, 0.5, 1) + "alpha" = c(0, 1, 20), + "tau" = c(0, 1, 10), + "ep" = c(0, 0.5, 1) ), regressors = NULL, postpreds = c("y_pred"), diff --git a/R/R/wcs_sql.R b/R/R/wcs_sql.R index 570e7e6f..649ae970 100644 --- a/R/R/wcs_sql.R +++ b/R/R/wcs_sql.R @@ -33,9 +33,9 @@ wcs_sql <- hBayesDM_model( model_type = "", data_columns = c("subjID", "choice", "outcome"), parameters = list( - "r" = c(NULL, 0.1, 1), - "p" = c(NULL, 0.1, 1), - "d" = c(NULL, 1, 5) + "r" = c(0, 0.1, 1), + "p" = c(0, 0.1, 1), + "d" = c(0, 1, 5) ), regressors = NULL, postpreds = c("y_pred"), From 26682075c40227a55d96cd1944f8ee7d934e706c Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 22:22:10 +0900 Subject: [PATCH 097/163] Generate __init__.py automatically --- Python/hbayesdm/models/__init__.py | 94 +++++++++++++++--------------- commons/convert-to-py.py | 30 ++++++++++ 2 files changed, 77 insertions(+), 47 deletions(-) diff --git a/Python/hbayesdm/models/__init__.py b/Python/hbayesdm/models/__init__.py index 6778b127..b0d89462 100644 --- a/Python/hbayesdm/models/__init__.py +++ b/Python/hbayesdm/models/__init__.py @@ -1,49 +1,49 @@ -from hbayesdm.models._bandit2arm_delta import bandit2arm_delta -from hbayesdm.models._bandit4arm2_kalman_filter import bandit4arm2_kalman_filter -from hbayesdm.models._bandit4arm_2par_lapse import bandit4arm_2par_lapse -from hbayesdm.models._bandit4arm_4par import bandit4arm_4par -from hbayesdm.models._bandit4arm_lapse import bandit4arm_lapse -from hbayesdm.models._bandit4arm_lapse_decay import bandit4arm_lapse_decay -from hbayesdm.models._bandit4arm_singleA_lapse import bandit4arm_singleA_lapse -from hbayesdm.models._bart_par4 import bart_par4 -from hbayesdm.models._choiceRT_ddm import choiceRT_ddm -from hbayesdm.models._choiceRT_ddm_single import choiceRT_ddm_single -from hbayesdm.models._cra_exp import cra_exp -from hbayesdm.models._cra_linear import cra_linear -from hbayesdm.models._dbdm_prob_weight import dbdm_prob_weight -from hbayesdm.models._dd_cs import dd_cs -from hbayesdm.models._dd_cs_single import dd_cs_single -from hbayesdm.models._dd_exp import dd_exp -from hbayesdm.models._dd_hyperbolic import dd_hyperbolic -from hbayesdm.models._dd_hyperbolic_single import dd_hyperbolic_single -from hbayesdm.models._gng_m1 import gng_m1 -from hbayesdm.models._gng_m2 import gng_m2 -from hbayesdm.models._gng_m3 import gng_m3 -from hbayesdm.models._gng_m4 import gng_m4 -from hbayesdm.models._igt_orl import igt_orl -from hbayesdm.models._igt_pvl_decay import igt_pvl_decay -from hbayesdm.models._igt_pvl_delta import igt_pvl_delta -from hbayesdm.models._igt_vpp import igt_vpp -from hbayesdm.models._peer_ocu import peer_ocu -from hbayesdm.models._prl_ewa import prl_ewa -from hbayesdm.models._prl_fictitious import prl_fictitious -from hbayesdm.models._prl_fictitious_multipleB import prl_fictitious_multipleB -from hbayesdm.models._prl_fictitious_rp import prl_fictitious_rp -from hbayesdm.models._prl_fictitious_rp_woa import prl_fictitious_rp_woa -from hbayesdm.models._prl_fictitious_woa import prl_fictitious_woa -from hbayesdm.models._prl_rp import prl_rp -from hbayesdm.models._prl_rp_multipleB import prl_rp_multipleB -from hbayesdm.models._pst_gainloss_Q import pst_gainloss_Q -from hbayesdm.models._ra_noLA import ra_noLA -from hbayesdm.models._ra_noRA import ra_noRA -from hbayesdm.models._ra_prospect import ra_prospect -from hbayesdm.models._rdt_happiness import rdt_happiness -from hbayesdm.models._ts_par4 import ts_par4 -from hbayesdm.models._ts_par6 import ts_par6 -from hbayesdm.models._ts_par7 import ts_par7 -from hbayesdm.models._ug_bayes import ug_bayes -from hbayesdm.models._ug_delta import ug_delta -from hbayesdm.models._wcs_sql import wcs_sql +from ._bandit2arm_delta import bandit2arm_delta +from ._bandit4arm2_kalman_filter import bandit4arm2_kalman_filter +from ._bandit4arm_2par_lapse import bandit4arm_2par_lapse +from ._bandit4arm_4par import bandit4arm_4par +from ._bandit4arm_lapse import bandit4arm_lapse +from ._bandit4arm_lapse_decay import bandit4arm_lapse_decay +from ._bandit4arm_singleA_lapse import bandit4arm_singleA_lapse +from ._bart_par4 import bart_par4 +from ._choiceRT_ddm import choiceRT_ddm +from ._choiceRT_ddm_single import choiceRT_ddm_single +from ._cra_exp import cra_exp +from ._cra_linear import cra_linear +from ._dbdm_prob_weight import dbdm_prob_weight +from ._dd_cs import dd_cs +from ._dd_cs_single import dd_cs_single +from ._dd_exp import dd_exp +from ._dd_hyperbolic import dd_hyperbolic +from ._dd_hyperbolic_single import dd_hyperbolic_single +from ._gng_m1 import gng_m1 +from ._gng_m2 import gng_m2 +from ._gng_m3 import gng_m3 +from ._gng_m4 import gng_m4 +from ._igt_orl import igt_orl +from ._igt_pvl_decay import igt_pvl_decay +from ._igt_pvl_delta import igt_pvl_delta +from ._igt_vpp import igt_vpp +from ._peer_ocu import peer_ocu +from ._prl_ewa import prl_ewa +from ._prl_fictitious import prl_fictitious +from ._prl_fictitious_multipleB import prl_fictitious_multipleB +from ._prl_fictitious_rp import prl_fictitious_rp +from ._prl_fictitious_rp_woa import prl_fictitious_rp_woa +from ._prl_fictitious_woa import prl_fictitious_woa +from ._prl_rp import prl_rp +from ._prl_rp_multipleB import prl_rp_multipleB +from ._pst_gainloss_Q import pst_gainloss_Q +from ._ra_noLA import ra_noLA +from ._ra_noRA import ra_noRA +from ._ra_prospect import ra_prospect +from ._rdt_happiness import rdt_happiness +from ._ts_par4 import ts_par4 +from ._ts_par6 import ts_par6 +from ._ts_par7 import ts_par7 +from ._ug_bayes import ug_bayes +from ._ug_delta import ug_delta +from ._wcs_sql import wcs_sql __all__ = [ 'bandit2arm_delta', @@ -92,4 +92,4 @@ 'ug_bayes', 'ug_delta', 'wcs_sql', -] +] \ No newline at end of file diff --git a/commons/convert-to-py.py b/commons/convert-to-py.py index 46ae65cd..74bb8611 100644 --- a/commons/convert-to-py.py +++ b/commons/convert-to-py.py @@ -274,6 +274,34 @@ def shortify(cite: str) -> str: f.write(test) +def generate_init(info_fns): + mfs = [] + + for info_fn in info_fns: + # Load model information + with open(info_fn, 'r') as f: + info = ordered_load(f, Loader=Loader) + + # Model full name (Snake-case) + model_function = [info['task_name']['code'], + info['model_name']['code']] + if info['model_type']['code']: + model_function.append(info['model_type']['code']) + model_function = '_'.join(model_function) + + mfs.append(model_function) + + lines = [] + lines += ['from ._{mf} import {mf}'.format(mf=mf) for mf in mfs] + lines += [''] + lines += ['__all__ = ['] + lines += [' \'{mf}\','.format(mf=mf) for mf in mfs] + lines += [']'] + + with open(PATH_OUTPUT_CODE / '__init__.py', 'w') as f: + f.write('\n'.join(lines)) + + if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument( @@ -300,3 +328,5 @@ def shortify(cite: str) -> str: if args.verbose: print('[{:2d} / {:2d}] Done for {}' .format(i + 1, num_models, info_fn)) + + generate_init(sorted(PATH_MODELS.glob('*.yml'))) From aff4d36155de62f20290ef850264e8080e6ea8ca Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 22:41:49 +0900 Subject: [PATCH 098/163] Use symbolic links for stan_files and extdata --- Python/hbayesdm/common/extdata | 1 + Python/hbayesdm/common/stan_files | 1 + R/inst/extdata | 1 + R/inst/extdata/bandit2arm_exampleData.txt | 2001 --- R/inst/extdata/bandit4arm2_exampleData.txt | 3001 ---- R/inst/extdata/bandit4arm_exampleData.txt | 2001 --- R/inst/extdata/bart_exampleData.txt | 91 - R/inst/extdata/choiceRT_exampleData.txt | 5001 ------ .../extdata/choiceRT_single_exampleData.txt | 1001 -- R/inst/extdata/cra_exampleData.txt | 541 - R/inst/extdata/dbdm_exampleData.txt | 15001 ---------------- R/inst/extdata/dd_exampleData.txt | 2161 --- R/inst/extdata/dd_single_exampleData.txt | 109 - R/inst/extdata/gng_exampleData.txt | 2401 --- R/inst/extdata/igt_exampleData.txt | 401 - R/inst/extdata/peer_exampleData.txt | 361 - R/inst/extdata/prl_exampleData.txt | 2001 --- R/inst/extdata/prl_multipleB_exampleData.txt | 1801 -- R/inst/extdata/pst_exampleData.txt | 1021 -- R/inst/extdata/ra_data_attend.txt | 4192 ----- R/inst/extdata/ra_data_reappraisal.txt | 4190 ----- R/inst/extdata/ra_exampleData.txt | 701 - R/inst/extdata/rdt_exampleData.txt | 901 - R/inst/extdata/ts_exampleData.txt | 2191 --- R/inst/extdata/ug_exampleData.txt | 1801 -- R/inst/extdata/wcs_answersheet.txt | 4 - R/inst/extdata/wcs_exampleData.txt | 1158 -- R/inst/stan_files | 1 + R/inst/stan_files/bandit2arm_delta.stan | 109 - .../stan_files/bandit4arm2_kalman_filter.stan | 163 - R/inst/stan_files/bandit4arm_2par_lapse.stan | 173 - R/inst/stan_files/bandit4arm_4par.stan | 176 - R/inst/stan_files/bandit4arm_lapse.stan | 182 - R/inst/stan_files/bandit4arm_lapse_decay.stan | 201 - .../stan_files/bandit4arm_singleA_lapse.stan | 177 - R/inst/stan_files/bart_par4.stan | 131 - R/inst/stan_files/choiceRT_ddm.stan | 98 - R/inst/stan_files/choiceRT_ddm_single.stan | 58 - R/inst/stan_files/choiceRT_lba.stan | 278 - R/inst/stan_files/choiceRT_lba_single.stan | 239 - R/inst/stan_files/cra_exp.stan | 134 - R/inst/stan_files/cra_linear.stan | 130 - R/inst/stan_files/dbdm_prob_weight.stan | 154 - R/inst/stan_files/dd_cs.stan | 107 - R/inst/stan_files/dd_cs_single.stan | 63 - R/inst/stan_files/dd_exp.stan | 101 - R/inst/stan_files/dd_hyperbolic.stan | 101 - R/inst/stan_files/dd_hyperbolic_single.stan | 57 - R/inst/stan_files/gng_m1.stan | 149 - R/inst/stan_files/gng_m2.stan | 160 - R/inst/stan_files/gng_m3.stan | 179 - R/inst/stan_files/gng_m4.stan | 210 - R/inst/stan_files/igt_orl.stan | 207 - R/inst/stan_files/igt_pvl_decay.stan | 134 - R/inst/stan_files/igt_pvl_delta.stan | 132 - R/inst/stan_files/igt_vpp.stan | 188 - R/inst/stan_files/peer_ocu.stan | 115 - R/inst/stan_files/pre/license.stan | 14 - R/inst/stan_files/prl_ewa.stan | 179 - R/inst/stan_files/prl_fictitious.stan | 173 - .../stan_files/prl_fictitious_multipleB.stan | 185 - R/inst/stan_files/prl_fictitious_rp.stan | 188 - R/inst/stan_files/prl_fictitious_rp_woa.stan | 180 - R/inst/stan_files/prl_fictitious_woa.stan | 165 - R/inst/stan_files/prl_rp.stan | 149 - R/inst/stan_files/prl_rp_multipleB.stan | 161 - R/inst/stan_files/pst_gainloss_Q.stan | 114 - R/inst/stan_files/ra_noLA.stan | 95 - R/inst/stan_files/ra_noRA.stan | 95 - R/inst/stan_files/ra_prospect.stan | 97 - R/inst/stan_files/rdt_happiness.stan | 146 - R/inst/stan_files/ts_par4.stan | 204 - R/inst/stan_files/ts_par6.stan | 213 - R/inst/stan_files/ts_par7.stan | 217 - R/inst/stan_files/ug_bayes.stan | 167 - R/inst/stan_files/ug_delta.stan | 129 - R/inst/stan_files/wcs_sql.stan | 176 - .../extdata/bandit2arm_exampleData.txt | 0 .../extdata/bandit4arm2_exampleData.txt | 0 .../extdata/bandit4arm_exampleData.txt | 0 .../extdata/bart_exampleData.txt | 0 .../extdata/choiceRT_exampleData.txt | 0 .../extdata/choiceRT_single_exampleData.txt | 0 .../extdata/cra_exampleData.txt | 0 .../extdata/dbdm_exampleData.txt | 0 .../extdata/dd_exampleData.txt | 0 .../extdata/dd_single_exampleData.txt | 0 .../extdata/gng_exampleData.txt | 0 .../extdata/igt_exampleData.txt | 0 .../extdata/peer_exampleData.txt | 0 .../extdata/prl_exampleData.txt | 0 .../extdata/prl_multipleB_exampleData.txt | 0 .../extdata/pst_exampleData.txt | 0 .../extdata/ra_data_attend.txt | 0 .../extdata/ra_data_reappraisal.txt | 0 .../extdata/ra_exampleData.txt | 0 .../extdata/rdt_exampleData.txt | 0 .../extdata/ts_exampleData.txt | 0 .../extdata/ug_exampleData.txt | 0 .../extdata/wcs_answersheet.txt | 0 .../extdata/wcs_exampleData.txt | 0 .../stan_files/bandit2arm_delta.stan | 0 .../stan_files/bandit4arm2_kalman_filter.stan | 0 .../stan_files/bandit4arm_2par_lapse.stan | 0 .../stan_files/bandit4arm_4par.stan | 0 .../stan_files/bandit4arm_lapse.stan | 0 .../stan_files/bandit4arm_lapse_decay.stan | 0 .../stan_files/bandit4arm_singleA_lapse.stan | 0 .../stan_files/bart_par4.stan | 0 .../stan_files/choiceRT_ddm.stan | 0 .../stan_files/choiceRT_ddm_single.stan | 0 .../stan_files/choiceRT_lba.stan | 0 .../stan_files/choiceRT_lba_single.stan | 0 .../stan_files/cra_exp.stan | 0 .../stan_files/cra_linear.stan | 0 .../stan_files/dbdm_prob_weight.stan | 0 .../common => commons}/stan_files/dd_cs.stan | 0 .../stan_files/dd_cs_single.stan | 0 .../common => commons}/stan_files/dd_exp.stan | 0 .../stan_files/dd_hyperbolic.stan | 0 .../stan_files/dd_hyperbolic_single.stan | 0 .../common => commons}/stan_files/gng_m1.stan | 0 .../common => commons}/stan_files/gng_m2.stan | 0 .../common => commons}/stan_files/gng_m3.stan | 0 .../common => commons}/stan_files/gng_m4.stan | 0 .../stan_files/igt_orl.stan | 0 .../stan_files/igt_pvl_decay.stan | 0 .../stan_files/igt_pvl_delta.stan | 0 .../stan_files/igt_vpp.stan | 0 .../stan_files/peer_ocu.stan | 0 .../stan_files/pre/license.stan | 0 .../stan_files/prl_ewa.stan | 0 .../stan_files/prl_fictitious.stan | 0 .../stan_files/prl_fictitious_multipleB.stan | 0 .../stan_files/prl_fictitious_rp.stan | 0 .../stan_files/prl_fictitious_rp_woa.stan | 0 .../stan_files/prl_fictitious_woa.stan | 0 .../common => commons}/stan_files/prl_rp.stan | 0 .../stan_files/prl_rp_multipleB.stan | 0 .../stan_files/pst_gainloss_Q.stan | 0 .../stan_files/ra_noLA.stan | 0 .../stan_files/ra_noRA.stan | 0 .../stan_files/ra_prospect.stan | 0 .../stan_files/rdt_happiness.stan | 0 .../stan_files/ts_par4.stan | 0 .../stan_files/ts_par6.stan | 0 .../stan_files/ts_par7.stan | 0 .../stan_files/ug_bayes.stan | 0 .../stan_files/ug_delta.stan | 0 .../stan_files/wcs_sql.stan | 0 150 files changed, 4 insertions(+), 61385 deletions(-) create mode 120000 Python/hbayesdm/common/extdata create mode 120000 Python/hbayesdm/common/stan_files create mode 120000 R/inst/extdata delete mode 100644 R/inst/extdata/bandit2arm_exampleData.txt delete mode 100644 R/inst/extdata/bandit4arm2_exampleData.txt delete mode 100644 R/inst/extdata/bandit4arm_exampleData.txt delete mode 100644 R/inst/extdata/bart_exampleData.txt delete mode 100644 R/inst/extdata/choiceRT_exampleData.txt delete mode 100644 R/inst/extdata/choiceRT_single_exampleData.txt delete mode 100644 R/inst/extdata/cra_exampleData.txt delete mode 100644 R/inst/extdata/dbdm_exampleData.txt delete mode 100644 R/inst/extdata/dd_exampleData.txt delete mode 100644 R/inst/extdata/dd_single_exampleData.txt delete mode 100644 R/inst/extdata/gng_exampleData.txt delete mode 100644 R/inst/extdata/igt_exampleData.txt delete mode 100644 R/inst/extdata/peer_exampleData.txt delete mode 100644 R/inst/extdata/prl_exampleData.txt delete mode 100644 R/inst/extdata/prl_multipleB_exampleData.txt delete mode 100644 R/inst/extdata/pst_exampleData.txt delete mode 100644 R/inst/extdata/ra_data_attend.txt delete mode 100644 R/inst/extdata/ra_data_reappraisal.txt delete mode 100644 R/inst/extdata/ra_exampleData.txt delete mode 100644 R/inst/extdata/rdt_exampleData.txt delete mode 100644 R/inst/extdata/ts_exampleData.txt delete mode 100644 R/inst/extdata/ug_exampleData.txt delete mode 100644 R/inst/extdata/wcs_answersheet.txt delete mode 100644 R/inst/extdata/wcs_exampleData.txt create mode 120000 R/inst/stan_files delete mode 100644 R/inst/stan_files/bandit2arm_delta.stan delete mode 100644 R/inst/stan_files/bandit4arm2_kalman_filter.stan delete mode 100644 R/inst/stan_files/bandit4arm_2par_lapse.stan delete mode 100644 R/inst/stan_files/bandit4arm_4par.stan delete mode 100644 R/inst/stan_files/bandit4arm_lapse.stan delete mode 100644 R/inst/stan_files/bandit4arm_lapse_decay.stan delete mode 100644 R/inst/stan_files/bandit4arm_singleA_lapse.stan delete mode 100644 R/inst/stan_files/bart_par4.stan delete mode 100644 R/inst/stan_files/choiceRT_ddm.stan delete mode 100644 R/inst/stan_files/choiceRT_ddm_single.stan delete mode 100644 R/inst/stan_files/choiceRT_lba.stan delete mode 100644 R/inst/stan_files/choiceRT_lba_single.stan delete mode 100644 R/inst/stan_files/cra_exp.stan delete mode 100644 R/inst/stan_files/cra_linear.stan delete mode 100644 R/inst/stan_files/dbdm_prob_weight.stan delete mode 100644 R/inst/stan_files/dd_cs.stan delete mode 100644 R/inst/stan_files/dd_cs_single.stan delete mode 100644 R/inst/stan_files/dd_exp.stan delete mode 100644 R/inst/stan_files/dd_hyperbolic.stan delete mode 100644 R/inst/stan_files/dd_hyperbolic_single.stan delete mode 100644 R/inst/stan_files/gng_m1.stan delete mode 100644 R/inst/stan_files/gng_m2.stan delete mode 100644 R/inst/stan_files/gng_m3.stan delete mode 100644 R/inst/stan_files/gng_m4.stan delete mode 100644 R/inst/stan_files/igt_orl.stan delete mode 100644 R/inst/stan_files/igt_pvl_decay.stan delete mode 100644 R/inst/stan_files/igt_pvl_delta.stan delete mode 100644 R/inst/stan_files/igt_vpp.stan delete mode 100644 R/inst/stan_files/peer_ocu.stan delete mode 100644 R/inst/stan_files/pre/license.stan delete mode 100644 R/inst/stan_files/prl_ewa.stan delete mode 100644 R/inst/stan_files/prl_fictitious.stan delete mode 100644 R/inst/stan_files/prl_fictitious_multipleB.stan delete mode 100644 R/inst/stan_files/prl_fictitious_rp.stan delete mode 100644 R/inst/stan_files/prl_fictitious_rp_woa.stan delete mode 100644 R/inst/stan_files/prl_fictitious_woa.stan delete mode 100644 R/inst/stan_files/prl_rp.stan delete mode 100644 R/inst/stan_files/prl_rp_multipleB.stan delete mode 100644 R/inst/stan_files/pst_gainloss_Q.stan delete mode 100644 R/inst/stan_files/ra_noLA.stan delete mode 100644 R/inst/stan_files/ra_noRA.stan delete mode 100644 R/inst/stan_files/ra_prospect.stan delete mode 100644 R/inst/stan_files/rdt_happiness.stan delete mode 100644 R/inst/stan_files/ts_par4.stan delete mode 100644 R/inst/stan_files/ts_par6.stan delete mode 100644 R/inst/stan_files/ts_par7.stan delete mode 100644 R/inst/stan_files/ug_bayes.stan delete mode 100644 R/inst/stan_files/ug_delta.stan delete mode 100644 R/inst/stan_files/wcs_sql.stan rename {Python/hbayesdm/common => commons}/extdata/bandit2arm_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/bandit4arm2_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/bandit4arm_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/bart_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/choiceRT_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/choiceRT_single_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/cra_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/dbdm_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/dd_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/dd_single_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/gng_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/igt_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/peer_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/prl_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/prl_multipleB_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/pst_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/ra_data_attend.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/ra_data_reappraisal.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/ra_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/rdt_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/ts_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/ug_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/wcs_answersheet.txt (100%) rename {Python/hbayesdm/common => commons}/extdata/wcs_exampleData.txt (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit2arm_delta.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit4arm2_kalman_filter.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit4arm_2par_lapse.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit4arm_4par.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit4arm_lapse.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit4arm_lapse_decay.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bandit4arm_singleA_lapse.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/bart_par4.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/choiceRT_ddm.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/choiceRT_ddm_single.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/choiceRT_lba.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/choiceRT_lba_single.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/cra_exp.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/cra_linear.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/dbdm_prob_weight.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/dd_cs.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/dd_cs_single.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/dd_exp.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/dd_hyperbolic.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/dd_hyperbolic_single.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/gng_m1.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/gng_m2.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/gng_m3.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/gng_m4.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/igt_orl.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/igt_pvl_decay.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/igt_pvl_delta.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/igt_vpp.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/peer_ocu.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/pre/license.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_ewa.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_fictitious.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_fictitious_multipleB.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_fictitious_rp.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_fictitious_rp_woa.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_fictitious_woa.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_rp.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/prl_rp_multipleB.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/pst_gainloss_Q.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ra_noLA.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ra_noRA.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ra_prospect.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/rdt_happiness.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ts_par4.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ts_par6.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ts_par7.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ug_bayes.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/ug_delta.stan (100%) rename {Python/hbayesdm/common => commons}/stan_files/wcs_sql.stan (100%) diff --git a/Python/hbayesdm/common/extdata b/Python/hbayesdm/common/extdata new file mode 120000 index 00000000..ad0c739f --- /dev/null +++ b/Python/hbayesdm/common/extdata @@ -0,0 +1 @@ +../../../commons/extdata \ No newline at end of file diff --git a/Python/hbayesdm/common/stan_files b/Python/hbayesdm/common/stan_files new file mode 120000 index 00000000..7fa07d34 --- /dev/null +++ b/Python/hbayesdm/common/stan_files @@ -0,0 +1 @@ +../../../commons/stan_files \ No newline at end of file diff --git a/R/inst/extdata b/R/inst/extdata new file mode 120000 index 00000000..fcec3fef --- /dev/null +++ b/R/inst/extdata @@ -0,0 +1 @@ +../../commons/extdata \ No newline at end of file diff --git a/R/inst/extdata/bandit2arm_exampleData.txt b/R/inst/extdata/bandit2arm_exampleData.txt deleted file mode 100644 index d28e2ca2..00000000 --- a/R/inst/extdata/bandit2arm_exampleData.txt +++ /dev/null @@ -1,2001 +0,0 @@ -subjID trial choice outcome -1 1 1 1 -1 2 2 -1 -1 3 2 -1 -1 4 2 -1 -1 5 1 -1 -1 6 2 -1 -1 7 2 1 -1 8 1 1 -1 9 1 1 -1 10 1 -1 -1 11 2 1 -1 12 1 1 -1 13 2 -1 -1 14 1 -1 -1 15 1 1 -1 16 1 1 -1 17 2 -1 -1 18 2 -1 -1 19 2 -1 -1 20 2 1 -1 21 1 -1 -1 22 1 -1 -1 23 2 -1 -1 24 2 -1 -1 25 2 -1 -1 26 1 1 -1 27 1 -1 -1 28 2 -1 -1 29 1 1 -1 30 1 1 -1 31 1 1 -1 32 1 1 -1 33 2 -1 -1 34 1 1 -1 35 1 1 -1 36 1 -1 -1 37 2 -1 -1 38 1 1 -1 39 2 -1 -1 40 1 1 -1 41 1 1 -1 42 1 1 -1 43 2 -1 -1 44 1 1 -1 45 1 1 -1 46 1 1 -1 47 1 1 -1 48 2 1 -1 49 1 1 -1 50 1 1 -1 51 1 1 -1 52 2 -1 -1 53 1 1 -1 54 1 -1 -1 55 1 -1 -1 56 2 -1 -1 57 1 -1 -1 58 2 1 -1 59 2 1 -1 60 2 -1 -1 61 1 1 -1 62 1 -1 -1 63 2 1 -1 64 1 1 -1 65 1 1 -1 66 2 -1 -1 67 1 -1 -1 68 2 1 -1 69 2 -1 -1 70 2 -1 -1 71 2 -1 -1 72 1 1 -1 73 2 1 -1 74 1 1 -1 75 2 -1 -1 76 1 -1 -1 77 1 -1 -1 78 2 -1 -1 79 1 -1 -1 80 2 -1 -1 81 1 1 -1 82 2 -1 -1 83 2 -1 -1 84 1 -1 -1 85 1 1 -1 86 1 1 -1 87 1 1 -1 88 1 1 -1 89 1 1 -1 90 1 -1 -1 91 2 1 -1 92 2 1 -1 93 1 1 -1 94 2 1 -1 95 2 -1 -1 96 1 -1 -1 97 1 1 -1 98 1 -1 -1 99 1 -1 -1 100 1 -1 -2 1 2 1 -2 2 2 -1 -2 3 2 -1 -2 4 1 1 -2 5 2 1 -2 6 1 1 -2 7 1 1 -2 8 1 -1 -2 9 1 -1 -2 10 2 1 -2 11 1 1 -2 12 2 -1 -2 13 2 -1 -2 14 1 1 -2 15 2 1 -2 16 1 1 -2 17 1 1 -2 18 1 1 -2 19 2 1 -2 20 1 1 -2 21 2 1 -2 22 1 1 -2 23 2 1 -2 24 2 -1 -2 25 1 1 -2 26 2 1 -2 27 1 1 -2 28 2 1 -2 29 2 -1 -2 30 1 1 -2 31 2 1 -2 32 1 1 -2 33 2 1 -2 34 2 1 -2 35 2 1 -2 36 2 -1 -2 37 1 1 -2 38 1 -1 -2 39 2 1 -2 40 1 1 -2 41 2 1 -2 42 2 -1 -2 43 1 1 -2 44 2 -1 -2 45 2 -1 -2 46 1 1 -2 47 1 -1 -2 48 1 1 -2 49 2 -1 -2 50 1 1 -2 51 1 1 -2 52 1 1 -2 53 2 -1 -2 54 1 1 -2 55 1 -1 -2 56 1 -1 -2 57 2 1 -2 58 1 1 -2 59 2 -1 -2 60 1 1 -2 61 1 1 -2 62 1 1 -2 63 1 1 -2 64 1 1 -2 65 1 -1 -2 66 1 1 -2 67 2 -1 -2 68 1 -1 -2 69 2 1 -2 70 1 1 -2 71 2 -1 -2 72 2 1 -2 73 1 1 -2 74 2 -1 -2 75 1 -1 -2 76 2 1 -2 77 1 1 -2 78 1 1 -2 79 1 1 -2 80 1 -1 -2 81 2 -1 -2 82 2 -1 -2 83 1 1 -2 84 2 1 -2 85 1 -1 -2 86 2 1 -2 87 1 1 -2 88 1 1 -2 89 1 -1 -2 90 1 -1 -2 91 1 1 -2 92 1 1 -2 93 2 1 -2 94 2 -1 -2 95 1 -1 -2 96 1 1 -2 97 2 1 -2 98 1 1 -2 99 1 -1 -2 100 2 -1 -3 1 1 1 -3 2 2 1 -3 3 1 1 -3 4 2 -1 -3 5 1 1 -3 6 1 1 -3 7 1 1 -3 8 2 -1 -3 9 1 1 -3 10 1 1 -3 11 1 1 -3 12 1 1 -3 13 2 1 -3 14 2 1 -3 15 1 1 -3 16 2 -1 -3 17 2 -1 -3 18 1 1 -3 19 2 1 -3 20 2 -1 -3 21 2 1 -3 22 2 -1 -3 23 1 1 -3 24 2 -1 -3 25 1 1 -3 26 2 -1 -3 27 1 -1 -3 28 1 1 -3 29 2 1 -3 30 1 -1 -3 31 2 -1 -3 32 1 1 -3 33 1 -1 -3 34 2 1 -3 35 2 1 -3 36 1 1 -3 37 2 1 -3 38 1 1 -3 39 2 1 -3 40 1 -1 -3 41 2 -1 -3 42 2 -1 -3 43 2 -1 -3 44 1 1 -3 45 1 -1 -3 46 1 1 -3 47 1 1 -3 48 1 1 -3 49 1 1 -3 50 1 1 -3 51 2 -1 -3 52 1 1 -3 53 2 -1 -3 54 1 -1 -3 55 1 -1 -3 56 1 1 -3 57 1 -1 -3 58 1 1 -3 59 1 1 -3 60 1 -1 -3 61 1 1 -3 62 2 -1 -3 63 1 1 -3 64 1 1 -3 65 1 1 -3 66 2 -1 -3 67 1 -1 -3 68 1 -1 -3 69 2 -1 -3 70 2 -1 -3 71 2 1 -3 72 2 -1 -3 73 1 1 -3 74 2 1 -3 75 2 -1 -3 76 1 -1 -3 77 1 1 -3 78 1 1 -3 79 2 -1 -3 80 1 1 -3 81 1 -1 -3 82 1 -1 -3 83 1 1 -3 84 1 1 -3 85 2 1 -3 86 1 1 -3 87 1 1 -3 88 1 1 -3 89 1 1 -3 90 2 -1 -3 91 1 -1 -3 92 2 -1 -3 93 2 -1 -3 94 2 -1 -3 95 2 -1 -3 96 1 1 -3 97 1 -1 -3 98 1 -1 -3 99 2 1 -3 100 1 1 -4 1 2 -1 -4 2 2 1 -4 3 2 1 -4 4 2 1 -4 5 1 1 -4 6 2 1 -4 7 1 1 -4 8 1 1 -4 9 1 1 -4 10 2 -1 -4 11 2 -1 -4 12 1 1 -4 13 1 -1 -4 14 2 -1 -4 15 1 1 -4 16 1 1 -4 17 1 -1 -4 18 2 1 -4 19 1 1 -4 20 2 -1 -4 21 2 1 -4 22 1 1 -4 23 1 -1 -4 24 2 -1 -4 25 1 1 -4 26 1 -1 -4 27 1 -1 -4 28 2 -1 -4 29 2 1 -4 30 2 -1 -4 31 2 1 -4 32 2 -1 -4 33 2 -1 -4 34 1 1 -4 35 1 -1 -4 36 2 -1 -4 37 1 -1 -4 38 2 1 -4 39 2 -1 -4 40 2 -1 -4 41 1 1 -4 42 2 1 -4 43 1 -1 -4 44 1 -1 -4 45 2 1 -4 46 1 -1 -4 47 2 1 -4 48 2 1 -4 49 2 -1 -4 50 2 -1 -4 51 1 1 -4 52 1 1 -4 53 1 1 -4 54 2 1 -4 55 1 1 -4 56 1 1 -4 57 1 1 -4 58 1 1 -4 59 2 1 -4 60 1 1 -4 61 2 1 -4 62 1 -1 -4 63 2 -1 -4 64 2 -1 -4 65 2 -1 -4 66 1 1 -4 67 2 -1 -4 68 1 -1 -4 69 1 -1 -4 70 1 1 -4 71 2 1 -4 72 2 -1 -4 73 2 1 -4 74 1 -1 -4 75 2 -1 -4 76 1 1 -4 77 1 1 -4 78 1 -1 -4 79 2 -1 -4 80 1 1 -4 81 2 -1 -4 82 1 1 -4 83 1 -1 -4 84 1 -1 -4 85 2 1 -4 86 1 1 -4 87 1 1 -4 88 2 1 -4 89 2 -1 -4 90 2 -1 -4 91 1 1 -4 92 1 1 -4 93 2 1 -4 94 1 1 -4 95 2 1 -4 96 2 -1 -4 97 2 1 -4 98 1 1 -4 99 2 -1 -4 100 2 1 -5 1 2 -1 -5 2 2 1 -5 3 1 -1 -5 4 2 1 -5 5 2 -1 -5 6 1 1 -5 7 1 -1 -5 8 1 -1 -5 9 2 1 -5 10 1 -1 -5 11 1 -1 -5 12 2 -1 -5 13 1 1 -5 14 1 -1 -5 15 1 1 -5 16 2 -1 -5 17 1 -1 -5 18 1 -1 -5 19 1 1 -5 20 1 1 -5 21 1 -1 -5 22 1 1 -5 23 2 -1 -5 24 2 1 -5 25 1 1 -5 26 1 1 -5 27 2 -1 -5 28 1 1 -5 29 1 1 -5 30 2 -1 -5 31 1 -1 -5 32 2 1 -5 33 1 -1 -5 34 2 -1 -5 35 2 -1 -5 36 1 1 -5 37 1 -1 -5 38 2 1 -5 39 1 1 -5 40 2 -1 -5 41 1 1 -5 42 1 1 -5 43 1 1 -5 44 1 -1 -5 45 1 1 -5 46 2 -1 -5 47 1 1 -5 48 2 1 -5 49 1 1 -5 50 1 1 -5 51 1 1 -5 52 2 -1 -5 53 1 1 -5 54 2 -1 -5 55 1 1 -5 56 1 -1 -5 57 1 1 -5 58 1 -1 -5 59 2 1 -5 60 2 1 -5 61 2 -1 -5 62 1 -1 -5 63 2 1 -5 64 1 1 -5 65 2 1 -5 66 2 1 -5 67 1 1 -5 68 1 -1 -5 69 2 -1 -5 70 1 -1 -5 71 2 1 -5 72 1 1 -5 73 2 -1 -5 74 2 -1 -5 75 2 -1 -5 76 2 -1 -5 77 1 -1 -5 78 1 1 -5 79 1 1 -5 80 2 -1 -5 81 1 1 -5 82 2 -1 -5 83 2 1 -5 84 2 1 -5 85 1 1 -5 86 1 1 -5 87 2 1 -5 88 1 1 -5 89 1 1 -5 90 2 -1 -5 91 1 1 -5 92 2 -1 -5 93 1 -1 -5 94 1 1 -5 95 1 1 -5 96 1 1 -5 97 1 -1 -5 98 1 1 -5 99 1 1 -5 100 1 -1 -6 1 1 -1 -6 2 2 -1 -6 3 2 -1 -6 4 1 -1 -6 5 1 1 -6 6 1 1 -6 7 2 1 -6 8 1 -1 -6 9 2 -1 -6 10 2 -1 -6 11 2 1 -6 12 1 1 -6 13 2 -1 -6 14 2 -1 -6 15 2 1 -6 16 1 1 -6 17 1 -1 -6 18 2 -1 -6 19 2 1 -6 20 1 -1 -6 21 2 -1 -6 22 1 -1 -6 23 2 -1 -6 24 1 -1 -6 25 1 1 -6 26 1 1 -6 27 1 1 -6 28 1 1 -6 29 2 -1 -6 30 1 -1 -6 31 1 1 -6 32 1 1 -6 33 1 -1 -6 34 1 -1 -6 35 1 -1 -6 36 1 1 -6 37 1 -1 -6 38 2 1 -6 39 2 1 -6 40 1 1 -6 41 2 1 -6 42 1 -1 -6 43 2 -1 -6 44 1 1 -6 45 1 1 -6 46 2 -1 -6 47 1 1 -6 48 1 1 -6 49 1 1 -6 50 2 -1 -6 51 1 1 -6 52 1 1 -6 53 1 1 -6 54 1 -1 -6 55 1 1 -6 56 1 -1 -6 57 2 1 -6 58 2 -1 -6 59 1 1 -6 60 1 1 -6 61 1 1 -6 62 2 -1 -6 63 1 1 -6 64 1 -1 -6 65 1 1 -6 66 1 -1 -6 67 1 -1 -6 68 2 1 -6 69 2 -1 -6 70 1 -1 -6 71 1 1 -6 72 1 1 -6 73 2 -1 -6 74 1 -1 -6 75 1 -1 -6 76 2 -1 -6 77 2 -1 -6 78 1 1 -6 79 1 1 -6 80 1 1 -6 81 1 -1 -6 82 2 1 -6 83 1 1 -6 84 1 -1 -6 85 1 -1 -6 86 2 -1 -6 87 2 -1 -6 88 1 1 -6 89 1 1 -6 90 2 -1 -6 91 2 1 -6 92 1 1 -6 93 2 -1 -6 94 1 1 -6 95 2 1 -6 96 1 1 -6 97 1 -1 -6 98 1 -1 -6 99 1 -1 -6 100 1 1 -7 1 2 -1 -7 2 1 -1 -7 3 1 1 -7 4 2 -1 -7 5 2 -1 -7 6 1 1 -7 7 1 1 -7 8 1 -1 -7 9 1 1 -7 10 1 1 -7 11 1 -1 -7 12 1 -1 -7 13 1 1 -7 14 1 -1 -7 15 1 -1 -7 16 1 1 -7 17 1 1 -7 18 2 -1 -7 19 1 -1 -7 20 1 -1 -7 21 1 1 -7 22 2 1 -7 23 2 -1 -7 24 1 1 -7 25 1 1 -7 26 1 1 -7 27 1 -1 -7 28 2 -1 -7 29 1 1 -7 30 1 1 -7 31 2 -1 -7 32 1 1 -7 33 1 -1 -7 34 1 1 -7 35 1 1 -7 36 1 -1 -7 37 2 -1 -7 38 1 1 -7 39 1 -1 -7 40 2 -1 -7 41 1 1 -7 42 1 1 -7 43 1 1 -7 44 1 1 -7 45 1 1 -7 46 1 1 -7 47 1 -1 -7 48 1 -1 -7 49 2 1 -7 50 1 1 -7 51 2 1 -7 52 2 1 -7 53 2 -1 -7 54 2 1 -7 55 2 1 -7 56 1 1 -7 57 1 1 -7 58 1 -1 -7 59 2 -1 -7 60 1 -1 -7 61 2 -1 -7 62 1 1 -7 63 1 1 -7 64 1 1 -7 65 1 1 -7 66 1 -1 -7 67 1 1 -7 68 1 1 -7 69 1 1 -7 70 1 1 -7 71 1 1 -7 72 2 1 -7 73 1 1 -7 74 1 1 -7 75 1 1 -7 76 2 -1 -7 77 1 1 -7 78 1 -1 -7 79 2 -1 -7 80 1 1 -7 81 1 1 -7 82 2 -1 -7 83 1 -1 -7 84 1 1 -7 85 2 -1 -7 86 1 1 -7 87 1 1 -7 88 1 -1 -7 89 1 -1 -7 90 2 -1 -7 91 1 1 -7 92 1 1 -7 93 1 -1 -7 94 1 1 -7 95 1 -1 -7 96 1 1 -7 97 1 1 -7 98 2 1 -7 99 2 -1 -7 100 1 1 -8 1 2 1 -8 2 2 -1 -8 3 2 -1 -8 4 1 1 -8 5 2 1 -8 6 1 -1 -8 7 2 -1 -8 8 2 1 -8 9 1 1 -8 10 2 1 -8 11 1 1 -8 12 1 1 -8 13 2 -1 -8 14 1 1 -8 15 1 -1 -8 16 2 1 -8 17 2 -1 -8 18 2 -1 -8 19 2 1 -8 20 1 1 -8 21 2 1 -8 22 2 -1 -8 23 1 -1 -8 24 1 -1 -8 25 2 1 -8 26 2 -1 -8 27 2 1 -8 28 2 1 -8 29 1 1 -8 30 2 -1 -8 31 1 1 -8 32 1 -1 -8 33 1 1 -8 34 1 1 -8 35 2 -1 -8 36 2 -1 -8 37 1 -1 -8 38 2 -1 -8 39 1 1 -8 40 1 1 -8 41 1 1 -8 42 1 -1 -8 43 1 1 -8 44 1 1 -8 45 1 1 -8 46 1 -1 -8 47 2 -1 -8 48 2 -1 -8 49 1 1 -8 50 2 -1 -8 51 1 -1 -8 52 2 -1 -8 53 2 -1 -8 54 2 -1 -8 55 1 1 -8 56 2 1 -8 57 1 1 -8 58 1 -1 -8 59 1 -1 -8 60 2 1 -8 61 2 -1 -8 62 2 1 -8 63 2 -1 -8 64 1 -1 -8 65 2 -1 -8 66 1 1 -8 67 1 -1 -8 68 1 -1 -8 69 1 1 -8 70 2 -1 -8 71 2 -1 -8 72 2 1 -8 73 1 1 -8 74 1 -1 -8 75 1 -1 -8 76 1 1 -8 77 1 1 -8 78 1 -1 -8 79 2 -1 -8 80 2 1 -8 81 2 -1 -8 82 1 1 -8 83 1 1 -8 84 1 1 -8 85 1 1 -8 86 1 1 -8 87 1 1 -8 88 1 1 -8 89 2 -1 -8 90 1 -1 -8 91 2 1 -8 92 2 -1 -8 93 1 1 -8 94 1 1 -8 95 2 -1 -8 96 1 1 -8 97 1 1 -8 98 1 1 -8 99 1 1 -8 100 1 1 -9 1 1 1 -9 2 1 1 -9 3 1 1 -9 4 1 1 -9 5 1 -1 -9 6 1 1 -9 7 1 1 -9 8 1 1 -9 9 1 1 -9 10 2 -1 -9 11 1 1 -9 12 2 -1 -9 13 2 -1 -9 14 1 -1 -9 15 1 1 -9 16 1 -1 -9 17 1 1 -9 18 1 1 -9 19 1 1 -9 20 1 1 -9 21 1 -1 -9 22 1 1 -9 23 2 -1 -9 24 2 1 -9 25 1 1 -9 26 1 -1 -9 27 2 -1 -9 28 1 -1 -9 29 1 -1 -9 30 2 -1 -9 31 1 -1 -9 32 1 1 -9 33 1 1 -9 34 1 -1 -9 35 1 -1 -9 36 2 -1 -9 37 2 1 -9 38 1 1 -9 39 1 1 -9 40 2 -1 -9 41 1 -1 -9 42 1 1 -9 43 1 1 -9 44 2 1 -9 45 1 1 -9 46 2 -1 -9 47 1 1 -9 48 1 1 -9 49 1 1 -9 50 2 -1 -9 51 1 -1 -9 52 1 -1 -9 53 1 1 -9 54 2 -1 -9 55 1 -1 -9 56 2 -1 -9 57 1 1 -9 58 1 -1 -9 59 1 1 -9 60 2 -1 -9 61 1 -1 -9 62 1 -1 -9 63 1 1 -9 64 1 -1 -9 65 1 1 -9 66 1 -1 -9 67 1 -1 -9 68 2 -1 -9 69 2 -1 -9 70 2 -1 -9 71 2 1 -9 72 2 1 -9 73 1 1 -9 74 1 1 -9 75 1 1 -9 76 2 1 -9 77 2 -1 -9 78 1 -1 -9 79 1 1 -9 80 1 1 -9 81 1 1 -9 82 2 1 -9 83 2 1 -9 84 1 1 -9 85 2 -1 -9 86 2 1 -9 87 2 -1 -9 88 2 -1 -9 89 1 -1 -9 90 1 -1 -9 91 2 1 -9 92 2 -1 -9 93 2 -1 -9 94 2 -1 -9 95 1 1 -9 96 1 1 -9 97 1 1 -9 98 2 -1 -9 99 1 -1 -9 100 2 1 -10 1 1 -1 -10 2 2 -1 -10 3 2 -1 -10 4 2 -1 -10 5 1 1 -10 6 1 1 -10 7 1 1 -10 8 2 -1 -10 9 1 1 -10 10 1 1 -10 11 2 -1 -10 12 1 -1 -10 13 2 -1 -10 14 2 1 -10 15 2 -1 -10 16 1 -1 -10 17 1 -1 -10 18 2 -1 -10 19 1 1 -10 20 1 1 -10 21 2 1 -10 22 2 -1 -10 23 1 -1 -10 24 1 -1 -10 25 2 -1 -10 26 1 1 -10 27 2 -1 -10 28 1 1 -10 29 2 1 -10 30 1 1 -10 31 1 1 -10 32 2 -1 -10 33 1 1 -10 34 1 1 -10 35 1 -1 -10 36 1 1 -10 37 1 1 -10 38 2 -1 -10 39 1 -1 -10 40 1 1 -10 41 1 -1 -10 42 1 -1 -10 43 1 1 -10 44 1 1 -10 45 1 1 -10 46 1 -1 -10 47 1 1 -10 48 1 -1 -10 49 2 -1 -10 50 1 1 -10 51 1 1 -10 52 1 -1 -10 53 1 1 -10 54 2 -1 -10 55 1 -1 -10 56 1 1 -10 57 1 -1 -10 58 2 -1 -10 59 2 -1 -10 60 1 1 -10 61 1 -1 -10 62 1 1 -10 63 1 1 -10 64 1 1 -10 65 1 -1 -10 66 2 -1 -10 67 2 -1 -10 68 1 1 -10 69 1 1 -10 70 1 1 -10 71 1 1 -10 72 1 1 -10 73 1 -1 -10 74 1 1 -10 75 1 -1 -10 76 1 -1 -10 77 1 1 -10 78 1 1 -10 79 2 -1 -10 80 1 1 -10 81 1 -1 -10 82 2 -1 -10 83 1 1 -10 84 1 1 -10 85 1 1 -10 86 1 -1 -10 87 1 1 -10 88 1 -1 -10 89 1 1 -10 90 2 1 -10 91 1 1 -10 92 2 -1 -10 93 1 -1 -10 94 1 1 -10 95 1 1 -10 96 2 -1 -10 97 1 -1 -10 98 2 -1 -10 99 2 -1 -10 100 2 1 -11 1 2 -1 -11 2 1 1 -11 3 2 1 -11 4 1 -1 -11 5 2 -1 -11 6 2 1 -11 7 2 -1 -11 8 1 1 -11 9 1 -1 -11 10 2 -1 -11 11 2 -1 -11 12 1 1 -11 13 1 1 -11 14 2 -1 -11 15 2 -1 -11 16 2 -1 -11 17 1 -1 -11 18 1 1 -11 19 1 1 -11 20 1 -1 -11 21 2 -1 -11 22 1 1 -11 23 1 -1 -11 24 2 -1 -11 25 1 -1 -11 26 1 1 -11 27 2 -1 -11 28 1 1 -11 29 1 1 -11 30 1 1 -11 31 1 -1 -11 32 1 1 -11 33 2 -1 -11 34 2 -1 -11 35 1 1 -11 36 1 -1 -11 37 1 1 -11 38 1 -1 -11 39 2 -1 -11 40 2 -1 -11 41 1 1 -11 42 2 -1 -11 43 1 -1 -11 44 1 -1 -11 45 1 1 -11 46 1 1 -11 47 1 -1 -11 48 1 1 -11 49 1 1 -11 50 1 1 -11 51 1 -1 -11 52 1 -1 -11 53 1 1 -11 54 1 -1 -11 55 1 1 -11 56 1 -1 -11 57 1 -1 -11 58 1 1 -11 59 2 1 -11 60 1 -1 -11 61 1 1 -11 62 1 -1 -11 63 1 1 -11 64 1 1 -11 65 1 -1 -11 66 1 1 -11 67 1 -1 -11 68 1 1 -11 69 1 1 -11 70 2 -1 -11 71 1 -1 -11 72 2 -1 -11 73 1 1 -11 74 1 1 -11 75 1 1 -11 76 2 -1 -11 77 1 -1 -11 78 2 -1 -11 79 1 -1 -11 80 1 -1 -11 81 1 -1 -11 82 2 -1 -11 83 2 -1 -11 84 2 -1 -11 85 1 -1 -11 86 1 -1 -11 87 2 -1 -11 88 1 -1 -11 89 1 -1 -11 90 2 -1 -11 91 1 1 -11 92 1 1 -11 93 1 1 -11 94 1 -1 -11 95 1 1 -11 96 1 1 -11 97 1 1 -11 98 1 1 -11 99 1 1 -11 100 1 1 -12 1 2 1 -12 2 2 -1 -12 3 2 -1 -12 4 2 -1 -12 5 1 1 -12 6 2 1 -12 7 2 1 -12 8 2 -1 -12 9 1 1 -12 10 2 -1 -12 11 2 -1 -12 12 2 1 -12 13 2 1 -12 14 1 1 -12 15 2 -1 -12 16 2 1 -12 17 2 -1 -12 18 1 -1 -12 19 1 -1 -12 20 2 1 -12 21 2 1 -12 22 1 1 -12 23 1 1 -12 24 1 1 -12 25 2 -1 -12 26 1 -1 -12 27 2 -1 -12 28 2 -1 -12 29 2 -1 -12 30 1 1 -12 31 1 1 -12 32 1 1 -12 33 1 -1 -12 34 1 1 -12 35 2 -1 -12 36 1 1 -12 37 2 -1 -12 38 1 -1 -12 39 2 -1 -12 40 1 -1 -12 41 1 1 -12 42 1 1 -12 43 1 1 -12 44 1 -1 -12 45 1 1 -12 46 1 -1 -12 47 1 1 -12 48 1 -1 -12 49 1 1 -12 50 1 -1 -12 51 2 -1 -12 52 1 1 -12 53 1 1 -12 54 1 1 -12 55 1 1 -12 56 1 1 -12 57 2 -1 -12 58 1 -1 -12 59 2 -1 -12 60 1 1 -12 61 1 1 -12 62 1 1 -12 63 1 -1 -12 64 1 1 -12 65 1 1 -12 66 1 1 -12 67 1 1 -12 68 1 1 -12 69 1 1 -12 70 2 -1 -12 71 1 1 -12 72 1 1 -12 73 1 1 -12 74 1 -1 -12 75 1 1 -12 76 1 1 -12 77 1 1 -12 78 1 1 -12 79 1 1 -12 80 1 1 -12 81 1 1 -12 82 1 1 -12 83 1 -1 -12 84 2 -1 -12 85 2 -1 -12 86 2 -1 -12 87 2 -1 -12 88 1 1 -12 89 1 -1 -12 90 2 -1 -12 91 2 1 -12 92 2 -1 -12 93 2 1 -12 94 1 -1 -12 95 2 -1 -12 96 1 -1 -12 97 2 -1 -12 98 2 -1 -12 99 1 1 -12 100 2 -1 -13 1 2 -1 -13 2 1 1 -13 3 1 1 -13 4 1 -1 -13 5 2 -1 -13 6 1 1 -13 7 1 -1 -13 8 1 -1 -13 9 1 -1 -13 10 1 1 -13 11 2 -1 -13 12 2 -1 -13 13 1 1 -13 14 2 1 -13 15 2 -1 -13 16 2 -1 -13 17 1 1 -13 18 1 -1 -13 19 2 -1 -13 20 1 1 -13 21 1 1 -13 22 1 -1 -13 23 1 -1 -13 24 2 1 -13 25 1 1 -13 26 1 1 -13 27 1 -1 -13 28 1 1 -13 29 1 -1 -13 30 2 1 -13 31 1 -1 -13 32 2 -1 -13 33 2 -1 -13 34 2 -1 -13 35 2 1 -13 36 1 1 -13 37 1 -1 -13 38 2 -1 -13 39 2 -1 -13 40 1 -1 -13 41 1 1 -13 42 2 -1 -13 43 2 1 -13 44 1 1 -13 45 2 -1 -13 46 2 1 -13 47 1 1 -13 48 1 1 -13 49 2 -1 -13 50 2 -1 -13 51 2 -1 -13 52 1 1 -13 53 2 1 -13 54 1 1 -13 55 1 1 -13 56 1 1 -13 57 1 1 -13 58 1 1 -13 59 1 -1 -13 60 1 -1 -13 61 2 1 -13 62 2 1 -13 63 2 1 -13 64 2 -1 -13 65 2 -1 -13 66 1 1 -13 67 2 -1 -13 68 2 1 -13 69 1 1 -13 70 2 1 -13 71 2 1 -13 72 2 -1 -13 73 2 1 -13 74 1 -1 -13 75 1 -1 -13 76 1 1 -13 77 1 -1 -13 78 1 1 -13 79 1 1 -13 80 2 1 -13 81 2 -1 -13 82 2 1 -13 83 1 1 -13 84 2 -1 -13 85 1 1 -13 86 2 -1 -13 87 1 1 -13 88 1 1 -13 89 1 -1 -13 90 1 1 -13 91 1 -1 -13 92 1 1 -13 93 1 1 -13 94 1 1 -13 95 1 1 -13 96 1 1 -13 97 1 1 -13 98 1 1 -13 99 1 1 -13 100 1 1 -14 1 2 -1 -14 2 1 1 -14 3 1 1 -14 4 1 1 -14 5 1 -1 -14 6 1 1 -14 7 2 -1 -14 8 1 1 -14 9 1 1 -14 10 2 -1 -14 11 1 -1 -14 12 2 -1 -14 13 2 1 -14 14 2 -1 -14 15 2 -1 -14 16 1 1 -14 17 1 1 -14 18 1 -1 -14 19 1 1 -14 20 1 1 -14 21 1 1 -14 22 2 -1 -14 23 2 -1 -14 24 1 1 -14 25 1 1 -14 26 2 -1 -14 27 2 1 -14 28 2 1 -14 29 2 -1 -14 30 2 -1 -14 31 1 1 -14 32 2 -1 -14 33 1 -1 -14 34 1 -1 -14 35 2 1 -14 36 2 1 -14 37 1 -1 -14 38 2 1 -14 39 2 -1 -14 40 2 1 -14 41 1 -1 -14 42 2 -1 -14 43 2 -1 -14 44 2 -1 -14 45 2 -1 -14 46 2 -1 -14 47 2 -1 -14 48 1 -1 -14 49 2 1 -14 50 2 1 -14 51 2 1 -14 52 2 -1 -14 53 1 1 -14 54 2 -1 -14 55 1 1 -14 56 2 -1 -14 57 1 1 -14 58 1 1 -14 59 2 -1 -14 60 2 -1 -14 61 2 1 -14 62 1 -1 -14 63 1 1 -14 64 1 -1 -14 65 1 -1 -14 66 1 1 -14 67 1 1 -14 68 1 1 -14 69 1 1 -14 70 2 -1 -14 71 2 -1 -14 72 2 1 -14 73 2 -1 -14 74 1 1 -14 75 2 -1 -14 76 1 1 -14 77 1 1 -14 78 1 -1 -14 79 2 -1 -14 80 2 -1 -14 81 1 1 -14 82 1 1 -14 83 1 1 -14 84 1 -1 -14 85 1 1 -14 86 2 -1 -14 87 2 1 -14 88 1 1 -14 89 1 1 -14 90 2 -1 -14 91 1 1 -14 92 1 -1 -14 93 1 1 -14 94 1 1 -14 95 1 1 -14 96 2 1 -14 97 1 -1 -14 98 1 1 -14 99 1 1 -14 100 1 1 -15 1 1 -1 -15 2 2 -1 -15 3 1 1 -15 4 1 1 -15 5 1 -1 -15 6 2 1 -15 7 2 1 -15 8 2 -1 -15 9 2 -1 -15 10 1 1 -15 11 1 -1 -15 12 1 1 -15 13 1 1 -15 14 1 -1 -15 15 2 1 -15 16 1 -1 -15 17 2 1 -15 18 1 -1 -15 19 2 1 -15 20 1 1 -15 21 2 1 -15 22 1 1 -15 23 1 1 -15 24 2 -1 -15 25 2 -1 -15 26 2 -1 -15 27 2 1 -15 28 2 1 -15 29 2 1 -15 30 1 1 -15 31 1 1 -15 32 1 1 -15 33 2 -1 -15 34 1 1 -15 35 1 1 -15 36 1 1 -15 37 2 1 -15 38 2 -1 -15 39 1 -1 -15 40 2 -1 -15 41 1 -1 -15 42 1 -1 -15 43 1 1 -15 44 1 1 -15 45 1 -1 -15 46 1 1 -15 47 1 1 -15 48 2 -1 -15 49 1 -1 -15 50 1 1 -15 51 2 1 -15 52 1 -1 -15 53 1 -1 -15 54 1 1 -15 55 1 -1 -15 56 1 1 -15 57 1 -1 -15 58 1 1 -15 59 2 1 -15 60 1 -1 -15 61 2 1 -15 62 2 1 -15 63 1 1 -15 64 2 -1 -15 65 2 -1 -15 66 1 1 -15 67 1 -1 -15 68 1 1 -15 69 1 1 -15 70 1 1 -15 71 1 -1 -15 72 1 1 -15 73 1 -1 -15 74 1 1 -15 75 1 1 -15 76 2 -1 -15 77 1 -1 -15 78 2 -1 -15 79 2 1 -15 80 1 -1 -15 81 2 -1 -15 82 2 1 -15 83 1 -1 -15 84 2 -1 -15 85 1 1 -15 86 1 1 -15 87 1 -1 -15 88 2 -1 -15 89 2 -1 -15 90 1 1 -15 91 1 1 -15 92 1 -1 -15 93 1 1 -15 94 1 1 -15 95 1 -1 -15 96 2 1 -15 97 1 1 -15 98 2 1 -15 99 1 1 -15 100 2 1 -16 1 2 -1 -16 2 2 1 -16 3 2 1 -16 4 1 1 -16 5 2 -1 -16 6 2 -1 -16 7 2 -1 -16 8 1 1 -16 9 2 -1 -16 10 1 1 -16 11 2 1 -16 12 1 -1 -16 13 1 1 -16 14 2 1 -16 15 1 1 -16 16 1 -1 -16 17 2 -1 -16 18 1 -1 -16 19 2 -1 -16 20 1 1 -16 21 1 1 -16 22 1 1 -16 23 2 1 -16 24 1 1 -16 25 1 1 -16 26 2 -1 -16 27 1 1 -16 28 2 -1 -16 29 1 1 -16 30 1 1 -16 31 1 -1 -16 32 1 -1 -16 33 1 -1 -16 34 2 -1 -16 35 1 1 -16 36 1 -1 -16 37 1 -1 -16 38 1 -1 -16 39 1 -1 -16 40 1 -1 -16 41 2 -1 -16 42 1 1 -16 43 2 -1 -16 44 1 1 -16 45 2 -1 -16 46 2 -1 -16 47 1 1 -16 48 1 1 -16 49 2 -1 -16 50 1 1 -16 51 1 1 -16 52 1 1 -16 53 1 1 -16 54 1 -1 -16 55 1 1 -16 56 1 1 -16 57 1 -1 -16 58 2 -1 -16 59 2 1 -16 60 2 -1 -16 61 1 1 -16 62 1 -1 -16 63 1 -1 -16 64 1 1 -16 65 1 1 -16 66 1 1 -16 67 1 1 -16 68 1 1 -16 69 1 -1 -16 70 1 1 -16 71 1 1 -16 72 1 -1 -16 73 1 1 -16 74 2 -1 -16 75 1 1 -16 76 1 -1 -16 77 2 1 -16 78 2 1 -16 79 1 -1 -16 80 2 -1 -16 81 1 1 -16 82 2 -1 -16 83 2 -1 -16 84 2 -1 -16 85 1 1 -16 86 1 1 -16 87 1 1 -16 88 2 1 -16 89 1 -1 -16 90 2 1 -16 91 2 -1 -16 92 2 1 -16 93 1 -1 -16 94 1 -1 -16 95 1 1 -16 96 2 -1 -16 97 1 -1 -16 98 2 -1 -16 99 2 -1 -16 100 1 1 -17 1 1 1 -17 2 1 1 -17 3 1 1 -17 4 1 1 -17 5 1 1 -17 6 1 1 -17 7 1 1 -17 8 1 1 -17 9 2 -1 -17 10 1 1 -17 11 2 1 -17 12 1 1 -17 13 1 1 -17 14 1 -1 -17 15 1 -1 -17 16 2 -1 -17 17 2 -1 -17 18 2 -1 -17 19 2 -1 -17 20 1 1 -17 21 1 1 -17 22 1 -1 -17 23 2 1 -17 24 1 -1 -17 25 1 1 -17 26 1 1 -17 27 1 1 -17 28 1 -1 -17 29 2 -1 -17 30 2 -1 -17 31 1 -1 -17 32 1 1 -17 33 1 -1 -17 34 1 1 -17 35 2 -1 -17 36 1 -1 -17 37 2 1 -17 38 2 -1 -17 39 2 -1 -17 40 1 1 -17 41 1 1 -17 42 1 1 -17 43 1 1 -17 44 1 -1 -17 45 1 1 -17 46 1 -1 -17 47 1 -1 -17 48 1 -1 -17 49 1 1 -17 50 2 1 -17 51 1 1 -17 52 1 1 -17 53 2 -1 -17 54 1 -1 -17 55 2 1 -17 56 2 1 -17 57 2 -1 -17 58 2 -1 -17 59 1 -1 -17 60 2 -1 -17 61 2 -1 -17 62 1 1 -17 63 2 1 -17 64 1 -1 -17 65 2 -1 -17 66 2 -1 -17 67 2 -1 -17 68 2 -1 -17 69 1 1 -17 70 1 1 -17 71 1 1 -17 72 1 1 -17 73 1 -1 -17 74 2 -1 -17 75 1 1 -17 76 1 1 -17 77 2 -1 -17 78 1 1 -17 79 1 -1 -17 80 1 1 -17 81 1 1 -17 82 1 -1 -17 83 1 1 -17 84 1 -1 -17 85 1 1 -17 86 1 1 -17 87 1 1 -17 88 1 1 -17 89 1 -1 -17 90 1 -1 -17 91 1 1 -17 92 1 1 -17 93 1 1 -17 94 2 -1 -17 95 1 -1 -17 96 1 -1 -17 97 1 1 -17 98 1 1 -17 99 1 1 -17 100 2 -1 -18 1 2 -1 -18 2 1 -1 -18 3 2 -1 -18 4 2 1 -18 5 2 -1 -18 6 2 -1 -18 7 1 -1 -18 8 1 1 -18 9 1 1 -18 10 2 -1 -18 11 1 1 -18 12 2 1 -18 13 1 1 -18 14 1 1 -18 15 1 1 -18 16 1 -1 -18 17 2 -1 -18 18 2 1 -18 19 1 -1 -18 20 1 1 -18 21 1 1 -18 22 1 -1 -18 23 2 -1 -18 24 2 -1 -18 25 1 1 -18 26 1 1 -18 27 2 -1 -18 28 1 1 -18 29 1 -1 -18 30 2 1 -18 31 1 1 -18 32 2 -1 -18 33 1 1 -18 34 2 1 -18 35 2 -1 -18 36 1 -1 -18 37 1 1 -18 38 1 1 -18 39 2 1 -18 40 2 -1 -18 41 2 1 -18 42 2 1 -18 43 1 -1 -18 44 2 1 -18 45 2 1 -18 46 2 -1 -18 47 1 1 -18 48 2 -1 -18 49 2 -1 -18 50 2 1 -18 51 1 1 -18 52 1 1 -18 53 2 -1 -18 54 2 1 -18 55 2 -1 -18 56 1 1 -18 57 1 1 -18 58 2 1 -18 59 1 1 -18 60 1 -1 -18 61 2 1 -18 62 2 -1 -18 63 1 -1 -18 64 2 -1 -18 65 1 1 -18 66 2 1 -18 67 1 -1 -18 68 1 -1 -18 69 1 1 -18 70 1 -1 -18 71 2 -1 -18 72 2 -1 -18 73 1 1 -18 74 1 -1 -18 75 2 1 -18 76 1 1 -18 77 2 -1 -18 78 1 1 -18 79 2 -1 -18 80 2 -1 -18 81 1 1 -18 82 1 1 -18 83 1 1 -18 84 2 1 -18 85 2 -1 -18 86 2 1 -18 87 1 1 -18 88 1 1 -18 89 2 -1 -18 90 1 1 -18 91 1 1 -18 92 1 -1 -18 93 1 -1 -18 94 1 -1 -18 95 1 1 -18 96 1 1 -18 97 1 1 -18 98 1 -1 -18 99 2 -1 -18 100 1 -1 -19 1 1 1 -19 2 1 -1 -19 3 2 -1 -19 4 1 1 -19 5 1 1 -19 6 1 -1 -19 7 1 1 -19 8 2 -1 -19 9 1 1 -19 10 1 1 -19 11 1 -1 -19 12 2 1 -19 13 2 1 -19 14 2 -1 -19 15 1 1 -19 16 2 1 -19 17 2 -1 -19 18 2 -1 -19 19 1 1 -19 20 2 -1 -19 21 1 1 -19 22 2 -1 -19 23 1 -1 -19 24 1 1 -19 25 1 1 -19 26 1 -1 -19 27 1 1 -19 28 2 1 -19 29 1 1 -19 30 1 1 -19 31 2 1 -19 32 2 1 -19 33 2 -1 -19 34 1 -1 -19 35 1 1 -19 36 1 1 -19 37 1 1 -19 38 1 -1 -19 39 1 1 -19 40 2 1 -19 41 2 1 -19 42 2 1 -19 43 2 1 -19 44 2 -1 -19 45 1 -1 -19 46 1 1 -19 47 2 -1 -19 48 2 -1 -19 49 1 1 -19 50 1 1 -19 51 1 1 -19 52 2 -1 -19 53 1 1 -19 54 1 1 -19 55 2 -1 -19 56 1 -1 -19 57 1 -1 -19 58 1 1 -19 59 1 1 -19 60 1 1 -19 61 1 1 -19 62 1 -1 -19 63 1 -1 -19 64 1 1 -19 65 1 -1 -19 66 1 1 -19 67 1 1 -19 68 1 1 -19 69 1 1 -19 70 1 -1 -19 71 2 -1 -19 72 1 -1 -19 73 2 1 -19 74 2 -1 -19 75 2 1 -19 76 1 -1 -19 77 1 1 -19 78 2 -1 -19 79 2 -1 -19 80 1 1 -19 81 1 -1 -19 82 1 -1 -19 83 2 1 -19 84 2 -1 -19 85 1 -1 -19 86 1 1 -19 87 2 -1 -19 88 2 1 -19 89 2 1 -19 90 1 1 -19 91 2 -1 -19 92 1 -1 -19 93 2 -1 -19 94 1 1 -19 95 1 -1 -19 96 2 1 -19 97 1 1 -19 98 2 -1 -19 99 1 1 -19 100 2 -1 -20 1 1 1 -20 2 1 -1 -20 3 1 -1 -20 4 2 -1 -20 5 2 -1 -20 6 1 -1 -20 7 1 -1 -20 8 2 -1 -20 9 1 1 -20 10 1 -1 -20 11 1 -1 -20 12 1 -1 -20 13 2 -1 -20 14 2 1 -20 15 2 1 -20 16 2 -1 -20 17 2 1 -20 18 2 -1 -20 19 1 -1 -20 20 2 -1 -20 21 1 1 -20 22 1 1 -20 23 1 1 -20 24 1 1 -20 25 1 1 -20 26 1 1 -20 27 1 1 -20 28 1 1 -20 29 1 1 -20 30 1 -1 -20 31 1 -1 -20 32 2 -1 -20 33 1 -1 -20 34 1 1 -20 35 2 -1 -20 36 2 1 -20 37 1 1 -20 38 1 -1 -20 39 1 -1 -20 40 1 1 -20 41 2 1 -20 42 1 1 -20 43 2 1 -20 44 1 -1 -20 45 2 -1 -20 46 2 -1 -20 47 2 1 -20 48 1 1 -20 49 1 1 -20 50 1 1 -20 51 1 -1 -20 52 1 1 -20 53 1 -1 -20 54 1 1 -20 55 1 1 -20 56 2 1 -20 57 1 1 -20 58 1 -1 -20 59 2 -1 -20 60 2 -1 -20 61 1 1 -20 62 1 1 -20 63 1 -1 -20 64 2 1 -20 65 2 -1 -20 66 2 -1 -20 67 1 -1 -20 68 1 1 -20 69 1 -1 -20 70 2 -1 -20 71 1 1 -20 72 1 -1 -20 73 2 -1 -20 74 1 -1 -20 75 1 1 -20 76 1 -1 -20 77 1 1 -20 78 1 -1 -20 79 1 1 -20 80 1 -1 -20 81 2 -1 -20 82 1 -1 -20 83 2 -1 -20 84 1 1 -20 85 1 1 -20 86 1 -1 -20 87 2 -1 -20 88 2 1 -20 89 1 1 -20 90 1 -1 -20 91 2 -1 -20 92 1 -1 -20 93 2 -1 -20 94 2 1 -20 95 1 -1 -20 96 2 1 -20 97 2 -1 -20 98 1 1 -20 99 1 -1 -20 100 2 1 diff --git a/R/inst/extdata/bandit4arm2_exampleData.txt b/R/inst/extdata/bandit4arm2_exampleData.txt deleted file mode 100644 index 68ac99e3..00000000 --- a/R/inst/extdata/bandit4arm2_exampleData.txt +++ /dev/null @@ -1,3001 +0,0 @@ -subjID choice outcome -1 4 33 -1 3 84 -1 3 88 -1 2 36 -1 1 67 -1 2 28 -1 1 74 -1 1 76 -1 1 79 -1 1 84 -1 1 82 -1 3 87 -1 3 85 -1 4 48 -1 1 79 -1 3 76 -1 3 73 -1 1 75 -1 1 71 -1 3 61 -1 3 60 -1 1 72 -1 1 66 -1 1 61 -1 4 42 -1 2 41 -1 1 57 -1 4 32 -1 3 67 -1 1 57 -1 1 58 -1 3 71 -1 3 66 -1 2 41 -1 1 70 -1 1 73 -1 1 74 -1 4 45 -1 1 72 -1 1 73 -1 1 70 -1 1 66 -1 1 69 -1 4 42 -1 3 58 -1 1 68 -1 2 53 -1 3 58 -1 4 50 -1 1 73 -1 3 65 -1 2 44 -1 3 64 -1 4 44 -1 2 41 -1 1 61 -1 1 56 -1 4 44 -1 1 51 -1 1 55 -1 4 47 -1 4 48 -1 1 59 -1 3 55 -1 3 44 -1 1 59 -1 2 25 -1 1 59 -1 2 31 -1 3 45 -1 1 58 -1 1 56 -1 1 58 -1 3 51 -1 4 52 -1 1 55 -1 4 56 -1 4 55 -1 1 54 -1 1 50 -1 3 58 -1 1 53 -1 1 51 -1 2 31 -1 3 58 -1 4 55 -1 4 55 -1 3 59 -1 1 50 -1 1 54 -1 1 53 -1 4 52 -1 3 57 -1 4 60 -1 4 60 -1 4 58 -1 4 60 -1 4 55 -1 4 56 -1 4 59 -1 2 36 -1 4 59 -1 3 48 -1 2 41 -1 4 62 -1 4 62 -1 1 44 -1 2 48 -1 4 66 -1 2 53 -1 3 56 -1 2 56 -1 2 61 -1 3 49 -1 2 58 -1 4 68 -1 2 54 -1 3 49 -1 4 70 -1 4 69 -1 4 73 -1 4 77 -1 2 54 -1 4 70 -1 1 49 -1 4 69 -1 1 50 -1 2 63 -1 1 55 -1 2 57 -1 3 53 -1 2 57 -1 4 85 -1 4 85 -1 4 88 -1 4 82 -1 2 62 -1 4 75 -1 3 61 -1 3 61 -1 3 62 -1 4 77 -1 2 66 -1 3 59 -1 4 74 -1 4 79 -1 4 79 -1 1 73 -1 4 76 -1 4 76 -1 4 78 -1 4 77 -1 4 78 -1 2 66 -1 4 80 -1 4 74 -1 1 69 -1 4 69 -1 4 73 -1 3 70 -1 3 67 -1 1 69 -1 1 71 -1 2 79 -1 1 68 -1 2 82 -1 2 84 -1 2 86 -1 2 84 -1 2 82 -1 2 84 -1 2 85 -1 2 82 -1 3 76 -1 1 62 -1 2 85 -1 3 74 -1 3 70 -1 3 69 -1 2 86 -1 3 60 -1 2 89 -1 2 86 -1 2 81 -1 2 80 -1 3 64 -1 3 67 -1 2 86 -1 1 56 -1 2 91 -1 2 91 -1 4 55 -1 2 87 -1 4 52 -1 2 85 -1 3 63 -1 2 92 -1 2 90 -1 2 90 -1 3 56 -1 2 89 -1 3 60 -1 3 60 -1 2 88 -1 2 84 -1 2 86 -1 3 53 -1 2 81 -1 2 82 -1 3 63 -1 2 78 -1 1 57 -1 2 80 -1 4 48 -1 4 44 -1 3 61 -1 3 64 -1 4 40 -1 3 67 -1 4 39 -1 1 60 -1 1 59 -1 3 68 -1 2 73 -1 3 70 -1 2 70 -1 2 70 -1 3 77 -1 2 74 -1 1 62 -1 3 82 -1 4 29 -1 4 29 -1 4 34 -1 3 73 -1 2 71 -1 2 67 -1 2 61 -1 2 60 -1 1 76 -1 1 70 -1 2 60 -1 2 57 -1 1 71 -1 1 69 -1 1 67 -1 3 58 -1 3 55 -1 2 50 -1 1 62 -1 2 52 -1 4 47 -1 2 54 -1 3 50 -1 2 53 -1 1 66 -1 4 51 -1 1 65 -1 3 50 -1 2 50 -1 3 51 -1 2 47 -1 1 60 -1 3 52 -1 4 47 -1 2 32 -1 3 56 -1 4 46 -1 1 63 -1 4 43 -1 2 24 -1 2 27 -1 1 63 -1 1 61 -1 4 40 -1 1 65 -1 1 69 -1 3 70 -1 2 25 -1 1 71 -1 1 73 -1 3 62 -1 3 69 -1 3 65 -1 1 72 -1 3 63 -1 1 75 -1 3 68 -1 2 28 -1 3 61 -1 3 64 -1 3 63 -1 4 36 -1 3 58 -1 2 35 -1 1 64 -1 3 67 -1 3 68 -1 3 66 -1 3 64 -2 1 62 -2 4 34 -2 2 36 -2 3 88 -2 3 91 -2 3 97 -2 3 94 -2 3 94 -2 3 93 -2 3 93 -2 3 88 -2 3 87 -2 3 85 -2 3 85 -2 3 81 -2 3 76 -2 3 73 -2 3 65 -2 3 64 -2 3 61 -2 3 60 -2 1 72 -2 1 66 -2 1 61 -2 1 62 -2 1 61 -2 3 62 -2 2 43 -2 1 56 -2 3 68 -2 3 68 -2 1 62 -2 4 41 -2 3 64 -2 1 70 -2 1 73 -2 1 74 -2 1 74 -2 1 72 -2 3 59 -2 1 70 -2 1 66 -2 1 69 -2 1 69 -2 2 46 -2 1 68 -2 1 70 -2 1 70 -2 1 72 -2 1 73 -2 1 70 -2 3 61 -2 3 64 -2 3 64 -2 1 66 -2 1 61 -2 1 56 -2 1 53 -2 1 51 -2 3 58 -2 4 47 -2 3 60 -2 3 57 -2 3 55 -2 3 44 -2 1 59 -2 3 44 -2 1 59 -2 4 50 -2 1 59 -2 1 58 -2 1 56 -2 2 20 -2 1 59 -2 4 52 -2 4 55 -2 1 53 -2 3 61 -2 3 58 -2 3 57 -2 3 58 -2 3 56 -2 4 57 -2 3 58 -2 4 57 -2 3 58 -2 3 59 -2 1 51 -2 4 61 -2 3 54 -2 3 50 -2 1 47 -2 4 57 -2 3 62 -2 1 49 -2 3 61 -2 1 46 -2 4 55 -2 1 42 -2 3 55 -2 3 53 -2 2 39 -2 1 43 -2 4 57 -2 3 50 -2 4 62 -2 1 44 -2 4 63 -2 4 66 -2 4 71 -2 4 71 -2 4 67 -2 4 66 -2 4 69 -2 4 72 -2 4 68 -2 4 70 -2 4 71 -2 4 70 -2 4 69 -2 4 73 -2 4 77 -2 4 72 -2 4 70 -2 4 68 -2 4 69 -2 4 72 -2 4 75 -2 2 58 -2 4 80 -2 4 81 -2 4 84 -2 4 85 -2 4 85 -2 4 88 -2 4 82 -2 4 80 -2 4 75 -2 4 73 -2 4 75 -2 4 76 -2 4 77 -2 4 77 -2 4 77 -2 4 74 -2 4 79 -2 4 79 -2 4 79 -2 4 76 -2 4 76 -2 4 78 -2 4 77 -2 4 78 -2 4 76 -2 4 80 -2 4 74 -2 4 72 -2 4 69 -2 4 73 -2 2 76 -2 2 76 -2 4 68 -2 2 78 -2 2 79 -2 2 82 -2 2 82 -2 2 84 -2 2 86 -2 2 84 -2 2 82 -2 2 84 -2 2 85 -2 2 82 -2 2 84 -2 2 84 -2 2 85 -2 2 88 -2 2 90 -2 2 87 -2 2 86 -2 2 86 -2 2 89 -2 2 86 -2 2 81 -2 2 80 -2 2 77 -2 2 81 -2 2 86 -2 2 85 -2 2 91 -2 2 91 -2 2 87 -2 2 87 -2 2 83 -2 4 51 -2 2 91 -2 2 92 -2 2 90 -2 2 90 -2 2 91 -2 2 89 -2 2 88 -2 2 90 -2 2 88 -2 2 84 -2 2 86 -2 2 81 -2 2 81 -2 2 82 -2 2 81 -2 2 78 -2 2 78 -2 2 80 -2 2 80 -2 2 80 -2 2 81 -2 2 78 -2 2 80 -2 2 78 -2 2 76 -2 2 78 -2 2 74 -2 2 73 -2 2 73 -2 2 73 -2 2 70 -2 2 70 -2 2 74 -2 2 74 -2 2 76 -2 2 74 -2 2 75 -2 2 73 -2 2 73 -2 2 69 -2 2 71 -2 4 32 -2 2 61 -2 2 60 -2 1 76 -2 1 70 -2 1 72 -2 1 69 -2 1 71 -2 2 52 -2 1 67 -2 1 63 -2 2 49 -2 1 63 -2 1 62 -2 2 52 -2 1 56 -2 3 52 -2 1 60 -2 1 62 -2 1 66 -2 1 67 -2 1 65 -2 1 63 -2 1 60 -2 1 61 -2 1 59 -2 2 45 -2 1 62 -2 1 64 -2 1 64 -2 1 63 -2 1 63 -2 1 63 -2 1 63 -2 1 63 -2 1 64 -2 1 63 -2 3 68 -2 3 68 -2 3 69 -2 3 69 -2 3 70 -2 3 67 -2 3 64 -2 3 60 -2 3 62 -2 1 73 -2 1 73 -2 1 72 -2 1 73 -2 1 75 -2 1 72 -2 1 72 -2 1 67 -2 1 64 -2 1 65 -2 1 65 -2 3 58 -2 1 63 -2 3 57 -2 4 27 -2 1 65 -2 1 62 -2 3 64 -3 3 85 -3 1 60 -3 3 88 -3 2 36 -3 3 91 -3 3 97 -3 3 94 -3 3 94 -3 3 93 -3 3 93 -3 4 37 -3 3 87 -3 4 46 -3 1 82 -3 3 81 -3 3 76 -3 3 73 -3 1 75 -3 3 64 -3 1 71 -3 1 70 -3 1 72 -3 1 66 -3 3 67 -3 1 62 -3 3 68 -3 1 57 -3 3 61 -3 3 67 -3 3 68 -3 4 37 -3 3 71 -3 2 37 -3 1 65 -3 3 68 -3 4 41 -3 1 74 -3 1 74 -3 3 64 -3 4 48 -3 4 43 -3 3 56 -3 1 69 -3 4 42 -3 4 44 -3 3 59 -3 1 70 -3 1 70 -3 1 72 -3 3 64 -3 3 65 -3 1 67 -3 2 45 -3 1 65 -3 1 66 -3 2 38 -3 3 67 -3 3 65 -3 2 29 -3 4 48 -3 3 62 -3 1 57 -3 1 59 -3 1 57 -3 3 44 -3 4 55 -3 4 51 -3 1 59 -3 4 50 -3 1 59 -3 1 58 -3 2 22 -3 4 50 -3 4 53 -3 3 51 -3 4 55 -3 4 56 -3 3 61 -3 3 58 -3 1 50 -3 4 52 -3 4 53 -3 3 55 -3 3 58 -3 3 58 -3 4 55 -3 3 59 -3 1 51 -3 1 50 -3 4 61 -3 4 56 -3 4 52 -3 4 57 -3 2 31 -3 1 49 -3 3 61 -3 3 57 -3 4 55 -3 3 58 -3 2 32 -3 3 53 -3 3 51 -3 4 60 -3 4 57 -3 4 62 -3 1 47 -3 3 47 -3 2 48 -3 3 53 -3 1 38 -3 1 41 -3 4 67 -3 4 66 -3 2 58 -3 2 58 -3 2 55 -3 3 49 -3 2 53 -3 2 54 -3 3 51 -3 2 52 -3 3 48 -3 3 45 -3 4 70 -3 2 55 -3 2 59 -3 4 72 -3 4 75 -3 1 55 -3 4 80 -3 2 61 -3 1 64 -3 4 85 -3 4 85 -3 2 60 -3 4 82 -3 4 80 -3 4 75 -3 4 73 -3 4 75 -3 4 76 -3 4 77 -3 4 77 -3 4 77 -3 4 74 -3 4 79 -3 4 79 -3 1 73 -3 1 72 -3 4 76 -3 4 78 -3 1 66 -3 3 68 -3 4 76 -3 3 68 -3 1 65 -3 3 68 -3 4 69 -3 2 78 -3 1 72 -3 4 69 -3 1 69 -3 1 71 -3 2 79 -3 2 82 -3 1 68 -3 1 69 -3 2 86 -3 2 84 -3 1 62 -3 3 64 -3 2 85 -3 2 82 -3 2 84 -3 2 84 -3 2 85 -3 2 88 -3 2 90 -3 1 60 -3 2 86 -3 1 53 -3 2 89 -3 2 86 -3 1 62 -3 2 80 -3 2 77 -3 2 81 -3 3 67 -3 2 85 -3 2 91 -3 2 91 -3 1 52 -3 2 87 -3 2 83 -3 2 85 -3 2 91 -3 2 92 -3 2 90 -3 2 90 -3 1 47 -3 2 89 -3 2 88 -3 2 90 -3 2 88 -3 2 84 -3 2 86 -3 2 81 -3 2 81 -3 3 59 -3 4 49 -3 2 78 -3 2 78 -3 2 80 -3 2 80 -3 2 80 -3 2 81 -3 2 78 -3 2 80 -3 2 78 -3 1 60 -3 4 35 -3 3 68 -3 2 73 -3 1 64 -3 3 70 -3 3 72 -3 3 75 -3 2 74 -3 2 74 -3 2 76 -3 2 74 -3 3 79 -3 2 73 -3 1 69 -3 3 73 -3 3 69 -3 3 67 -3 2 61 -3 3 66 -3 3 63 -3 3 62 -3 2 60 -3 2 57 -3 3 66 -3 3 63 -3 2 52 -3 2 51 -3 2 49 -3 4 45 -3 2 46 -3 1 61 -3 2 49 -3 3 52 -3 4 52 -3 1 62 -3 3 47 -3 3 49 -3 3 51 -3 4 48 -3 1 60 -3 4 50 -3 4 53 -3 4 52 -3 1 62 -3 1 64 -3 3 52 -3 2 26 -3 4 46 -3 2 31 -3 4 43 -3 3 67 -3 1 64 -3 3 65 -3 3 68 -3 1 62 -3 1 65 -3 3 69 -3 3 70 -3 4 46 -3 3 64 -3 3 60 -3 4 45 -3 4 45 -3 1 73 -3 3 65 -3 2 25 -3 1 75 -3 3 68 -3 4 40 -3 4 37 -3 1 64 -3 1 65 -3 3 63 -3 1 66 -3 3 57 -3 1 64 -3 3 67 -3 1 65 -3 1 62 -3 1 56 -4 2 38 -4 3 84 -4 3 88 -4 3 88 -4 3 91 -4 3 97 -4 3 94 -4 3 94 -4 3 93 -4 3 93 -4 3 88 -4 3 87 -4 3 85 -4 3 85 -4 3 81 -4 3 76 -4 3 73 -4 3 65 -4 3 64 -4 1 71 -4 1 70 -4 3 61 -4 3 68 -4 2 31 -4 3 69 -4 3 68 -4 1 57 -4 3 61 -4 3 67 -4 4 33 -4 3 68 -4 1 62 -4 1 63 -4 1 65 -4 1 70 -4 1 73 -4 1 74 -4 1 74 -4 1 72 -4 4 48 -4 4 43 -4 1 66 -4 2 48 -4 1 69 -4 3 58 -4 3 59 -4 3 59 -4 3 58 -4 3 62 -4 1 73 -4 1 70 -4 1 67 -4 1 67 -4 2 43 -4 3 65 -4 1 61 -4 1 56 -4 1 53 -4 1 51 -4 4 48 -4 3 62 -4 4 48 -4 1 59 -4 2 31 -4 3 44 -4 3 44 -4 1 59 -4 3 47 -4 2 31 -4 1 59 -4 4 46 -4 1 56 -4 1 58 -4 3 51 -4 2 28 -4 4 55 -4 3 58 -4 1 52 -4 3 58 -4 2 35 -4 4 52 -4 3 56 -4 1 51 -4 3 58 -4 3 58 -4 4 55 -4 3 59 -4 3 59 -4 4 61 -4 3 54 -4 4 56 -4 4 52 -4 2 33 -4 4 60 -4 1 49 -4 3 61 -4 3 57 -4 3 57 -4 3 58 -4 4 59 -4 3 53 -4 3 51 -4 4 60 -4 4 57 -4 4 62 -4 1 47 -4 3 47 -4 4 63 -4 4 66 -4 1 38 -4 2 56 -4 4 67 -4 4 66 -4 4 69 -4 4 72 -4 4 68 -4 4 70 -4 4 71 -4 4 70 -4 1 51 -4 4 73 -4 4 77 -4 4 72 -4 4 70 -4 4 68 -4 4 69 -4 4 72 -4 3 48 -4 4 79 -4 1 57 -4 4 81 -4 4 84 -4 4 85 -4 2 58 -4 3 53 -4 4 82 -4 4 80 -4 4 75 -4 2 64 -4 4 75 -4 4 76 -4 1 73 -4 3 59 -4 4 77 -4 3 61 -4 3 65 -4 4 79 -4 2 66 -4 1 72 -4 2 68 -4 1 69 -4 4 77 -4 4 78 -4 3 68 -4 4 80 -4 2 68 -4 3 68 -4 2 75 -4 3 71 -4 2 76 -4 3 67 -4 2 72 -4 4 69 -4 2 79 -4 4 66 -4 2 82 -4 2 84 -4 2 86 -4 4 70 -4 2 82 -4 1 66 -4 4 78 -4 2 82 -4 3 76 -4 2 84 -4 4 72 -4 2 88 -4 2 90 -4 3 69 -4 4 76 -4 1 53 -4 4 73 -4 4 69 -4 3 64 -4 2 80 -4 3 64 -4 2 81 -4 2 86 -4 2 85 -4 2 91 -4 2 91 -4 2 87 -4 2 87 -4 2 83 -4 2 85 -4 2 91 -4 2 92 -4 4 49 -4 2 90 -4 1 47 -4 3 59 -4 2 88 -4 3 60 -4 2 88 -4 3 57 -4 2 86 -4 2 81 -4 2 81 -4 2 82 -4 2 81 -4 2 78 -4 2 78 -4 2 80 -4 2 80 -4 2 80 -4 2 81 -4 2 78 -4 2 80 -4 1 57 -4 2 76 -4 2 78 -4 2 74 -4 2 73 -4 2 73 -4 2 73 -4 2 70 -4 3 75 -4 3 77 -4 3 76 -4 3 79 -4 2 74 -4 2 75 -4 3 75 -4 3 76 -4 2 69 -4 3 69 -4 2 67 -4 4 32 -4 2 60 -4 3 63 -4 1 70 -4 1 72 -4 2 57 -4 4 36 -4 3 63 -4 1 67 -4 2 51 -4 2 49 -4 1 63 -4 4 46 -4 3 51 -4 4 47 -4 3 52 -4 3 50 -4 1 62 -4 2 55 -4 2 55 -4 3 51 -4 1 63 -4 1 60 -4 1 61 -4 4 53 -4 1 60 -4 2 41 -4 4 47 -4 1 64 -4 1 63 -4 1 63 -4 3 58 -4 1 63 -4 3 67 -4 1 64 -4 1 63 -4 3 68 -4 3 68 -4 4 44 -4 3 69 -4 3 70 -4 3 67 -4 4 45 -4 3 60 -4 1 73 -4 1 73 -4 1 73 -4 4 39 -4 3 63 -4 4 37 -4 2 23 -4 3 67 -4 3 61 -4 4 36 -4 1 65 -4 3 63 -4 3 58 -4 1 63 -4 3 57 -4 3 67 -4 1 65 -4 3 66 -4 1 56 -5 3 85 -5 2 40 -5 3 88 -5 4 40 -5 1 67 -5 3 97 -5 3 94 -5 3 94 -5 3 93 -5 1 84 -5 3 88 -5 1 81 -5 3 85 -5 3 85 -5 3 81 -5 3 76 -5 3 73 -5 1 75 -5 3 64 -5 3 61 -5 3 60 -5 1 72 -5 1 66 -5 1 61 -5 1 62 -5 4 42 -5 3 62 -5 3 61 -5 1 56 -5 2 40 -5 4 37 -5 3 71 -5 1 63 -5 3 64 -5 1 70 -5 1 73 -5 1 74 -5 1 74 -5 3 64 -5 1 73 -5 1 70 -5 1 66 -5 1 69 -5 3 57 -5 1 71 -5 3 59 -5 2 53 -5 1 70 -5 1 72 -5 1 73 -5 4 46 -5 1 67 -5 3 64 -5 2 43 -5 1 66 -5 2 38 -5 3 67 -5 3 65 -5 3 59 -5 1 55 -5 2 29 -5 3 60 -5 4 51 -5 4 53 -5 3 44 -5 4 55 -5 1 59 -5 1 59 -5 4 50 -5 4 48 -5 1 58 -5 1 56 -5 3 46 -5 1 59 -5 1 61 -5 1 55 -5 3 58 -5 2 30 -5 4 57 -5 1 50 -5 1 48 -5 4 53 -5 3 55 -5 1 57 -5 1 51 -5 3 58 -5 3 59 -5 1 51 -5 4 61 -5 4 61 -5 1 53 -5 3 53 -5 4 57 -5 2 31 -5 4 60 -5 4 58 -5 3 57 -5 1 44 -5 1 42 -5 2 32 -5 4 59 -5 3 51 -5 4 60 -5 3 47 -5 4 62 -5 4 62 -5 3 47 -5 2 48 -5 4 66 -5 4 71 -5 4 71 -5 4 67 -5 3 48 -5 4 69 -5 4 72 -5 4 68 -5 2 54 -5 2 53 -5 4 70 -5 2 53 -5 1 51 -5 3 48 -5 2 54 -5 4 70 -5 4 68 -5 4 69 -5 2 60 -5 4 75 -5 4 79 -5 4 80 -5 2 61 -5 2 57 -5 4 85 -5 4 85 -5 2 60 -5 2 62 -5 4 80 -5 3 60 -5 3 61 -5 2 60 -5 4 76 -5 2 64 -5 4 77 -5 4 77 -5 4 74 -5 2 66 -5 4 79 -5 4 79 -5 4 76 -5 2 68 -5 4 78 -5 4 77 -5 2 69 -5 4 76 -5 4 80 -5 2 68 -5 2 72 -5 2 75 -5 4 73 -5 4 74 -5 4 69 -5 2 72 -5 4 69 -5 1 72 -5 1 68 -5 4 62 -5 3 71 -5 1 67 -5 4 70 -5 2 82 -5 2 84 -5 2 85 -5 2 82 -5 4 76 -5 3 73 -5 2 85 -5 2 88 -5 2 90 -5 1 60 -5 2 86 -5 1 53 -5 2 89 -5 2 86 -5 2 81 -5 3 66 -5 2 77 -5 2 81 -5 2 86 -5 2 85 -5 2 91 -5 1 54 -5 2 87 -5 4 56 -5 2 83 -5 2 85 -5 2 91 -5 2 92 -5 2 90 -5 2 90 -5 2 91 -5 2 89 -5 2 88 -5 2 90 -5 2 88 -5 2 84 -5 2 86 -5 2 81 -5 2 81 -5 2 82 -5 2 81 -5 1 55 -5 4 50 -5 1 54 -5 2 80 -5 2 80 -5 2 81 -5 2 78 -5 2 80 -5 2 78 -5 2 76 -5 2 78 -5 3 68 -5 2 73 -5 1 64 -5 1 61 -5 4 26 -5 2 70 -5 2 74 -5 2 74 -5 3 79 -5 3 82 -5 3 79 -5 3 75 -5 3 76 -5 2 69 -5 3 69 -5 3 67 -5 2 61 -5 4 36 -5 3 63 -5 3 62 -5 4 34 -5 3 59 -5 2 55 -5 4 39 -5 2 52 -5 2 51 -5 2 49 -5 2 50 -5 4 46 -5 3 51 -5 2 49 -5 2 54 -5 3 50 -5 3 52 -5 1 66 -5 1 67 -5 1 65 -5 3 50 -5 2 50 -5 1 61 -5 1 59 -5 1 60 -5 4 50 -5 4 47 -5 2 32 -5 3 56 -5 1 63 -5 3 58 -5 2 26 -5 1 63 -5 4 43 -5 1 63 -5 4 38 -5 1 62 -5 2 20 -5 1 69 -5 1 70 -5 4 46 -5 1 71 -5 1 73 -5 3 62 -5 4 45 -5 1 73 -5 4 39 -5 4 38 -5 3 66 -5 3 68 -5 1 72 -5 1 67 -5 3 64 -5 3 63 -5 1 65 -5 3 58 -5 3 57 -5 1 64 -5 3 67 -5 2 42 -5 3 66 -5 2 46 -6 2 38 -6 1 60 -6 3 88 -6 3 88 -6 4 40 -6 3 97 -6 3 94 -6 1 76 -6 3 93 -6 3 93 -6 2 29 -6 3 87 -6 3 85 -6 2 30 -6 4 49 -6 1 77 -6 1 76 -6 3 65 -6 3 64 -6 1 71 -6 1 70 -6 3 61 -6 3 68 -6 1 61 -6 1 62 -6 4 42 -6 2 38 -6 3 61 -6 1 56 -6 4 33 -6 3 68 -6 3 71 -6 2 37 -6 3 64 -6 3 68 -6 1 73 -6 1 74 -6 1 74 -6 1 72 -6 3 59 -6 1 70 -6 1 66 -6 4 41 -6 1 69 -6 1 71 -6 1 68 -6 4 53 -6 3 58 -6 1 72 -6 1 73 -6 4 46 -6 1 67 -6 1 67 -6 1 65 -6 3 65 -6 1 61 -6 1 56 -6 1 53 -6 1 51 -6 2 28 -6 4 47 -6 4 48 -6 1 59 -6 1 57 -6 4 56 -6 4 55 -6 4 51 -6 2 31 -6 1 59 -6 1 59 -6 1 58 -6 3 48 -6 3 46 -6 1 59 -6 1 61 -6 1 55 -6 1 53 -6 2 30 -6 4 57 -6 1 50 -6 3 58 -6 3 56 -6 4 57 -6 4 59 -6 2 34 -6 4 55 -6 3 59 -6 3 59 -6 1 50 -6 1 54 -6 4 56 -6 4 52 -6 3 57 -6 3 62 -6 4 60 -6 4 58 -6 3 57 -6 3 57 -6 3 58 -6 4 59 -6 4 59 -6 2 39 -6 4 60 -6 2 41 -6 1 42 -6 3 52 -6 2 47 -6 4 63 -6 3 53 -6 4 71 -6 1 41 -6 3 50 -6 4 66 -6 1 51 -6 3 54 -6 2 55 -6 2 54 -6 1 52 -6 3 47 -6 1 51 -6 2 52 -6 1 49 -6 2 54 -6 1 47 -6 4 68 -6 2 59 -6 3 47 -6 3 48 -6 1 55 -6 2 57 -6 2 61 -6 2 57 -6 2 56 -6 1 63 -6 4 88 -6 4 82 -6 2 62 -6 3 60 -6 4 73 -6 4 75 -6 4 76 -6 2 64 -6 2 66 -6 3 59 -6 2 67 -6 1 70 -6 3 61 -6 2 66 -6 3 65 -6 1 71 -6 1 69 -6 2 66 -6 1 67 -6 2 66 -6 4 80 -6 4 74 -6 4 72 -6 2 75 -6 4 73 -6 1 72 -6 1 70 -6 1 69 -6 1 71 -6 1 72 -6 4 66 -6 3 71 -6 4 61 -6 3 66 -6 3 65 -6 3 61 -6 2 84 -6 3 64 -6 3 70 -6 1 68 -6 1 62 -6 1 60 -6 1 64 -6 1 62 -6 4 70 -6 2 86 -6 2 86 -6 1 55 -6 4 69 -6 2 81 -6 2 80 -6 3 64 -6 1 63 -6 2 86 -6 2 85 -6 4 55 -6 1 54 -6 1 52 -6 3 60 -6 3 63 -6 4 51 -6 1 47 -6 4 49 -6 3 62 -6 2 90 -6 2 91 -6 2 89 -6 2 88 -6 2 90 -6 1 51 -6 2 84 -6 4 52 -6 2 81 -6 2 81 -6 2 82 -6 3 63 -6 3 60 -6 1 57 -6 2 80 -6 4 48 -6 2 80 -6 2 81 -6 1 53 -6 2 80 -6 2 78 -6 3 65 -6 1 60 -6 4 33 -6 3 68 -6 2 73 -6 2 73 -6 2 70 -6 4 26 -6 2 74 -6 2 74 -6 2 76 -6 2 74 -6 2 75 -6 3 75 -6 3 76 -6 3 73 -6 2 71 -6 1 70 -6 3 65 -6 3 66 -6 4 32 -6 2 62 -6 1 72 -6 4 35 -6 3 66 -6 1 69 -6 1 67 -6 2 51 -6 3 55 -6 2 50 -6 1 62 -6 2 52 -6 1 56 -6 4 48 -6 2 59 -6 3 52 -6 2 55 -6 2 55 -6 1 65 -6 3 50 -6 4 47 -6 2 51 -6 1 59 -6 2 45 -6 1 62 -6 3 54 -6 3 52 -6 1 63 -6 3 60 -6 2 31 -6 1 63 -6 4 42 -6 4 43 -6 3 65 -6 3 68 -6 2 23 -6 3 69 -6 1 69 -6 1 70 -6 3 67 -6 1 71 -6 3 60 -6 1 73 -6 3 69 -6 3 65 -6 1 72 -6 3 63 -6 1 75 -6 1 72 -6 2 28 -6 1 67 -6 2 37 -6 4 37 -6 4 36 -6 2 33 -6 3 57 -6 1 64 -6 3 67 -6 4 27 -6 1 62 -6 3 64 -7 4 33 -7 1 60 -7 2 36 -7 3 88 -7 3 91 -7 3 97 -7 3 94 -7 3 94 -7 3 93 -7 3 93 -7 3 88 -7 3 87 -7 3 85 -7 1 82 -7 3 81 -7 1 77 -7 3 73 -7 3 65 -7 1 71 -7 3 61 -7 2 23 -7 1 72 -7 4 45 -7 1 61 -7 3 69 -7 2 41 -7 1 57 -7 3 61 -7 1 56 -7 2 40 -7 3 68 -7 3 71 -7 3 66 -7 1 65 -7 1 70 -7 1 73 -7 1 74 -7 1 74 -7 1 72 -7 1 73 -7 2 45 -7 3 56 -7 1 69 -7 4 42 -7 1 71 -7 1 68 -7 1 70 -7 1 70 -7 4 50 -7 3 64 -7 3 65 -7 1 67 -7 3 64 -7 1 65 -7 3 65 -7 1 61 -7 1 56 -7 3 65 -7 3 59 -7 2 28 -7 4 47 -7 1 57 -7 3 57 -7 3 55 -7 1 57 -7 1 59 -7 3 44 -7 4 51 -7 4 50 -7 4 48 -7 1 58 -7 4 48 -7 1 58 -7 1 59 -7 4 52 -7 3 55 -7 1 53 -7 2 30 -7 1 54 -7 4 55 -7 3 58 -7 3 56 -7 1 51 -7 4 59 -7 4 57 -7 1 49 -7 1 51 -7 4 60 -7 1 50 -7 4 61 -7 1 53 -7 4 52 -7 3 57 -7 3 62 -7 3 63 -7 3 61 -7 3 57 -7 2 28 -7 4 56 -7 1 43 -7 4 59 -7 3 51 -7 4 60 -7 4 57 -7 1 42 -7 3 52 -7 3 47 -7 4 63 -7 4 66 -7 4 71 -7 2 56 -7 1 44 -7 3 48 -7 4 69 -7 4 72 -7 2 55 -7 1 54 -7 2 53 -7 3 47 -7 4 69 -7 2 52 -7 4 77 -7 4 72 -7 3 46 -7 4 68 -7 4 69 -7 4 72 -7 4 75 -7 3 49 -7 4 80 -7 4 81 -7 4 84 -7 4 85 -7 4 85 -7 4 88 -7 4 82 -7 4 80 -7 4 75 -7 1 67 -7 4 75 -7 4 76 -7 4 77 -7 4 77 -7 4 77 -7 4 74 -7 1 70 -7 4 79 -7 4 79 -7 1 72 -7 4 76 -7 4 78 -7 4 77 -7 4 78 -7 4 76 -7 4 80 -7 4 74 -7 1 69 -7 4 69 -7 4 73 -7 4 74 -7 4 69 -7 1 69 -7 1 71 -7 4 68 -7 1 68 -7 4 62 -7 1 69 -7 1 67 -7 4 70 -7 3 61 -7 3 64 -7 1 60 -7 3 70 -7 1 68 -7 1 62 -7 3 72 -7 3 74 -7 1 62 -7 4 70 -7 1 61 -7 3 60 -7 4 73 -7 1 56 -7 4 67 -7 3 66 -7 4 65 -7 4 59 -7 3 67 -7 3 68 -7 3 63 -7 4 56 -7 2 87 -7 2 87 -7 1 46 -7 2 85 -7 3 63 -7 2 92 -7 2 90 -7 2 90 -7 2 91 -7 2 89 -7 2 88 -7 2 90 -7 2 88 -7 2 84 -7 2 86 -7 2 81 -7 2 81 -7 3 59 -7 2 81 -7 2 78 -7 2 78 -7 2 80 -7 2 80 -7 2 80 -7 3 61 -7 2 78 -7 2 80 -7 2 78 -7 3 65 -7 2 78 -7 2 74 -7 3 68 -7 1 64 -7 2 73 -7 2 70 -7 2 70 -7 2 74 -7 3 76 -7 1 62 -7 3 82 -7 1 64 -7 3 75 -7 2 73 -7 2 69 -7 2 71 -7 3 67 -7 3 65 -7 4 36 -7 3 63 -7 3 62 -7 2 60 -7 1 69 -7 2 55 -7 1 69 -7 2 52 -7 1 63 -7 3 55 -7 1 63 -7 3 51 -7 1 61 -7 1 56 -7 1 58 -7 1 60 -7 2 53 -7 1 66 -7 1 67 -7 2 52 -7 1 63 -7 3 54 -7 2 51 -7 1 59 -7 4 52 -7 4 50 -7 2 32 -7 2 32 -7 1 63 -7 1 63 -7 1 63 -7 4 43 -7 1 63 -7 3 62 -7 3 65 -7 3 68 -7 3 68 -7 3 69 -7 1 69 -7 1 70 -7 1 72 -7 1 71 -7 3 60 -7 2 27 -7 1 73 -7 1 73 -7 1 72 -7 1 73 -7 1 75 -7 1 72 -7 4 40 -7 1 67 -7 4 36 -7 1 65 -7 3 63 -7 1 66 -7 2 35 -7 2 41 -7 1 66 -7 3 68 -7 1 62 -7 3 64 -8 3 85 -8 3 84 -8 3 88 -8 4 40 -8 1 67 -8 3 97 -8 2 26 -8 3 94 -8 3 93 -8 3 93 -8 3 88 -8 3 87 -8 1 85 -8 3 85 -8 3 81 -8 1 77 -8 3 73 -8 1 75 -8 1 71 -8 3 61 -8 1 70 -8 1 72 -8 1 66 -8 3 67 -8 3 69 -8 1 61 -8 4 36 -8 3 61 -8 3 67 -8 1 57 -8 2 39 -8 1 62 -8 3 66 -8 3 64 -8 3 68 -8 3 71 -8 3 69 -8 3 68 -8 1 72 -8 3 59 -8 4 43 -8 1 66 -8 1 69 -8 3 57 -8 3 58 -8 1 68 -8 1 70 -8 3 58 -8 1 72 -8 1 73 -8 4 46 -8 4 44 -8 1 67 -8 4 44 -8 1 66 -8 3 67 -8 3 67 -8 1 53 -8 3 59 -8 1 55 -8 4 47 -8 4 48 -8 3 57 -8 1 57 -8 3 44 -8 4 55 -8 4 51 -8 4 51 -8 1 59 -8 2 27 -8 3 45 -8 1 56 -8 4 50 -8 3 51 -8 4 52 -8 4 55 -8 3 58 -8 4 55 -8 3 58 -8 1 50 -8 3 58 -8 3 56 -8 4 57 -8 1 57 -8 3 58 -8 3 58 -8 3 59 -8 1 51 -8 4 61 -8 4 61 -8 4 56 -8 3 53 -8 3 57 -8 4 60 -8 3 63 -8 3 61 -8 3 57 -8 4 55 -8 3 58 -8 1 43 -8 2 36 -8 4 59 -8 1 43 -8 2 41 -8 4 62 -8 4 62 -8 4 63 -8 4 63 -8 4 66 -8 4 71 -8 4 71 -8 3 50 -8 4 66 -8 1 51 -8 4 72 -8 1 52 -8 4 70 -8 4 71 -8 2 54 -8 1 51 -8 4 73 -8 4 77 -8 4 72 -8 4 70 -8 4 68 -8 4 69 -8 1 50 -8 4 75 -8 4 79 -8 3 51 -8 3 53 -8 4 84 -8 4 85 -8 4 85 -8 4 88 -8 4 82 -8 4 80 -8 4 75 -8 4 73 -8 4 75 -8 2 60 -8 4 77 -8 3 59 -8 4 77 -8 4 74 -8 4 79 -8 4 79 -8 4 79 -8 4 76 -8 4 76 -8 4 78 -8 4 77 -8 4 78 -8 4 76 -8 4 80 -8 4 74 -8 4 72 -8 4 69 -8 4 73 -8 4 74 -8 4 69 -8 4 68 -8 3 72 -8 3 70 -8 4 66 -8 2 82 -8 2 84 -8 3 66 -8 2 84 -8 3 61 -8 2 84 -8 2 85 -8 2 82 -8 2 84 -8 2 84 -8 2 85 -8 2 88 -8 2 90 -8 2 87 -8 2 86 -8 2 86 -8 2 89 -8 2 86 -8 2 81 -8 2 80 -8 2 77 -8 4 59 -8 2 86 -8 2 85 -8 2 91 -8 2 91 -8 2 87 -8 2 87 -8 2 83 -8 2 85 -8 2 91 -8 4 49 -8 2 90 -8 2 90 -8 2 91 -8 3 59 -8 2 88 -8 2 90 -8 2 88 -8 2 84 -8 2 86 -8 2 81 -8 1 51 -8 2 82 -8 2 81 -8 2 78 -8 2 78 -8 2 80 -8 2 80 -8 2 80 -8 2 81 -8 2 78 -8 2 80 -8 2 78 -8 2 76 -8 2 78 -8 2 74 -8 2 73 -8 2 73 -8 4 28 -8 2 70 -8 1 61 -8 2 74 -8 2 74 -8 2 76 -8 2 74 -8 2 75 -8 2 73 -8 2 73 -8 2 69 -8 2 71 -8 2 67 -8 1 70 -8 2 60 -8 1 76 -8 3 62 -8 3 62 -8 2 57 -8 3 66 -8 1 69 -8 3 58 -8 1 63 -8 1 63 -8 4 45 -8 1 62 -8 2 52 -8 1 56 -8 1 58 -8 4 52 -8 4 52 -8 1 66 -8 3 49 -8 1 65 -8 3 50 -8 3 54 -8 1 61 -8 1 59 -8 1 60 -8 4 50 -8 1 64 -8 1 64 -8 1 63 -8 1 63 -8 3 58 -8 2 26 -8 1 63 -8 1 64 -8 1 63 -8 3 68 -8 4 40 -8 3 69 -8 3 69 -8 3 70 -8 3 67 -8 1 71 -8 1 73 -8 1 73 -8 3 69 -8 1 73 -8 1 72 -8 1 73 -8 1 75 -8 1 72 -8 1 72 -8 3 61 -8 2 37 -8 3 63 -8 3 63 -8 1 66 -8 4 30 -8 3 57 -8 1 66 -8 1 65 -8 3 66 -8 1 56 -9 4 33 -9 1 60 -9 1 61 -9 3 88 -9 3 91 -9 3 97 -9 3 94 -9 3 94 -9 3 93 -9 3 93 -9 3 88 -9 4 43 -9 3 85 -9 3 85 -9 4 49 -9 1 77 -9 1 76 -9 2 20 -9 3 64 -9 1 71 -9 1 70 -9 3 61 -9 1 66 -9 3 67 -9 1 62 -9 3 68 -9 4 36 -9 3 61 -9 1 56 -9 3 68 -9 3 68 -9 2 38 -9 4 41 -9 1 65 -9 1 70 -9 1 73 -9 1 74 -9 1 74 -9 1 72 -9 1 73 -9 3 55 -9 1 66 -9 1 69 -9 1 69 -9 4 44 -9 3 59 -9 2 53 -9 1 70 -9 1 72 -9 4 50 -9 1 70 -9 4 44 -9 4 41 -9 2 43 -9 1 66 -9 1 61 -9 4 44 -9 2 34 -9 1 51 -9 1 55 -9 1 58 -9 3 60 -9 1 59 -9 4 53 -9 3 44 -9 1 59 -9 4 51 -9 4 51 -9 1 59 -9 4 48 -9 2 23 -9 2 22 -9 4 50 -9 1 59 -9 3 51 -9 3 55 -9 1 53 -9 3 61 -9 4 57 -9 1 50 -9 2 31 -9 3 56 -9 4 57 -9 4 59 -9 2 34 -9 3 58 -9 1 51 -9 3 59 -9 3 58 -9 1 54 -9 4 56 -9 3 53 -9 1 49 -9 1 48 -9 1 49 -9 4 58 -9 4 60 -9 4 55 -9 4 56 -9 4 59 -9 4 59 -9 4 59 -9 4 60 -9 4 57 -9 2 45 -9 3 52 -9 4 63 -9 4 63 -9 2 51 -9 2 53 -9 4 71 -9 2 56 -9 4 66 -9 1 51 -9 4 72 -9 3 48 -9 4 70 -9 2 53 -9 2 54 -9 2 53 -9 2 52 -9 1 49 -9 3 45 -9 4 70 -9 4 68 -9 4 69 -9 4 72 -9 3 48 -9 4 79 -9 4 80 -9 1 59 -9 4 84 -9 4 85 -9 4 85 -9 4 88 -9 4 82 -9 4 80 -9 1 65 -9 4 73 -9 1 68 -9 4 76 -9 2 64 -9 4 77 -9 1 71 -9 1 70 -9 4 79 -9 4 79 -9 4 79 -9 4 76 -9 4 76 -9 4 78 -9 4 77 -9 4 78 -9 4 76 -9 2 67 -9 1 65 -9 1 69 -9 1 73 -9 4 73 -9 4 74 -9 4 69 -9 4 68 -9 2 78 -9 2 79 -9 2 82 -9 4 62 -9 2 84 -9 4 59 -9 2 84 -9 4 72 -9 2 84 -9 4 78 -9 1 64 -9 4 76 -9 4 73 -9 2 85 -9 4 70 -9 4 73 -9 2 87 -9 2 86 -9 2 86 -9 2 89 -9 2 86 -9 2 81 -9 2 80 -9 2 77 -9 3 67 -9 4 57 -9 3 68 -9 2 91 -9 2 91 -9 2 87 -9 2 87 -9 2 83 -9 1 45 -9 3 63 -9 2 92 -9 2 90 -9 2 90 -9 2 91 -9 4 49 -9 2 88 -9 2 90 -9 2 88 -9 2 84 -9 2 86 -9 2 81 -9 2 81 -9 2 82 -9 2 81 -9 1 55 -9 2 78 -9 2 80 -9 2 80 -9 2 80 -9 2 81 -9 2 78 -9 2 80 -9 2 78 -9 3 65 -9 2 78 -9 3 68 -9 2 73 -9 3 68 -9 3 70 -9 2 70 -9 3 75 -9 3 77 -9 2 74 -9 3 79 -9 3 82 -9 3 79 -9 3 75 -9 2 73 -9 1 67 -9 3 69 -9 4 32 -9 3 65 -9 1 74 -9 3 63 -9 1 70 -9 3 62 -9 2 57 -9 3 66 -9 2 52 -9 1 67 -9 1 63 -9 3 55 -9 4 45 -9 3 51 -9 3 51 -9 3 54 -9 1 58 -9 1 60 -9 1 62 -9 1 66 -9 1 67 -9 1 65 -9 2 52 -9 4 47 -9 1 61 -9 1 59 -9 1 60 -9 3 52 -9 1 64 -9 1 64 -9 4 46 -9 4 46 -9 1 63 -9 1 63 -9 4 42 -9 2 27 -9 1 63 -9 1 61 -9 3 68 -9 3 69 -9 3 69 -9 3 70 -9 3 67 -9 1 71 -9 1 73 -9 3 62 -9 3 69 -9 1 73 -9 3 65 -9 4 38 -9 3 66 -9 3 68 -9 1 72 -9 1 67 -9 3 64 -9 3 63 -9 4 36 -9 1 66 -9 3 57 -9 1 64 -9 1 66 -9 3 68 -9 1 62 -9 3 64 -10 1 62 -10 4 34 -10 3 88 -10 2 36 -10 3 91 -10 3 97 -10 3 94 -10 3 94 -10 3 93 -10 3 93 -10 3 88 -10 3 87 -10 3 85 -10 3 85 -10 3 81 -10 3 76 -10 3 73 -10 3 65 -10 3 64 -10 3 61 -10 3 60 -10 3 61 -10 3 68 -10 2 31 -10 3 69 -10 3 68 -10 3 62 -10 3 61 -10 4 32 -10 3 68 -10 3 68 -10 1 62 -10 3 66 -10 3 64 -10 3 68 -10 3 71 -10 3 69 -10 2 44 -10 1 72 -10 3 59 -10 1 70 -10 3 56 -10 1 69 -10 1 69 -10 1 71 -10 1 68 -10 1 70 -10 1 70 -10 1 72 -10 3 64 -10 1 70 -10 1 67 -10 1 67 -10 1 65 -10 1 66 -10 2 38 -10 2 36 -10 1 53 -10 1 51 -10 3 58 -10 1 58 -10 3 60 -10 2 32 -10 4 53 -10 3 44 -10 4 55 -10 1 59 -10 3 47 -10 3 48 -10 4 48 -10 2 23 -10 4 48 -10 1 58 -10 1 59 -10 1 61 -10 4 55 -10 1 53 -10 1 52 -10 4 57 -10 4 55 -10 2 31 -10 4 53 -10 4 57 -10 3 58 -10 3 58 -10 3 58 -10 3 59 -10 4 60 -10 3 58 -10 4 61 -10 2 37 -10 3 53 -10 1 49 -10 4 60 -10 1 49 -10 4 58 -10 4 60 -10 4 55 -10 3 58 -10 3 55 -10 3 53 -10 3 51 -10 4 60 -10 3 47 -10 4 62 -10 4 62 -10 4 63 -10 1 48 -10 4 66 -10 1 38 -10 4 71 -10 4 67 -10 3 48 -10 4 69 -10 4 72 -10 4 68 -10 4 70 -10 4 71 -10 4 70 -10 4 69 -10 1 51 -10 4 77 -10 4 72 -10 4 70 -10 2 55 -10 3 49 -10 1 50 -10 4 75 -10 4 79 -10 4 80 -10 4 81 -10 4 84 -10 4 85 -10 4 85 -10 4 88 -10 4 82 -10 4 80 -10 4 75 -10 4 73 -10 4 75 -10 4 76 -10 4 77 -10 4 77 -10 4 77 -10 4 74 -10 4 79 -10 4 79 -10 4 79 -10 4 76 -10 4 76 -10 4 78 -10 4 77 -10 2 69 -10 4 76 -10 4 80 -10 4 74 -10 4 72 -10 4 69 -10 4 73 -10 4 74 -10 4 69 -10 4 68 -10 1 71 -10 1 72 -10 1 68 -10 4 62 -10 2 84 -10 4 59 -10 1 65 -10 1 62 -10 2 84 -10 2 85 -10 2 82 -10 2 84 -10 2 84 -10 2 85 -10 2 88 -10 2 90 -10 2 87 -10 2 86 -10 2 86 -10 2 89 -10 2 86 -10 2 81 -10 2 80 -10 2 77 -10 2 81 -10 2 86 -10 2 85 -10 2 91 -10 2 91 -10 2 87 -10 2 87 -10 2 83 -10 2 85 -10 2 91 -10 2 92 -10 2 90 -10 2 90 -10 2 91 -10 2 89 -10 2 88 -10 2 90 -10 2 88 -10 2 84 -10 2 86 -10 2 81 -10 2 81 -10 2 82 -10 2 81 -10 2 78 -10 2 78 -10 2 80 -10 2 80 -10 2 80 -10 2 81 -10 2 78 -10 2 80 -10 2 78 -10 2 76 -10 2 78 -10 2 74 -10 2 73 -10 2 73 -10 3 70 -10 3 72 -10 4 26 -10 3 77 -10 3 76 -10 3 79 -10 2 74 -10 2 75 -10 3 75 -10 2 73 -10 2 69 -10 4 32 -10 2 67 -10 3 65 -10 2 60 -10 3 63 -10 2 62 -10 3 62 -10 3 59 -10 2 55 -10 2 52 -10 3 58 -10 1 63 -10 2 49 -10 1 63 -10 3 51 -10 3 51 -10 1 56 -10 1 58 -10 3 50 -10 2 53 -10 1 66 -10 1 67 -10 1 65 -10 1 63 -10 1 60 -10 1 61 -10 1 59 -10 1 60 -10 1 62 -10 1 64 -10 1 64 -10 1 63 -10 3 60 -10 1 63 -10 1 63 -10 1 63 -10 1 64 -10 4 42 -10 1 61 -10 1 62 -10 3 69 -10 3 69 -10 4 42 -10 3 67 -10 4 45 -10 3 60 -10 3 62 -10 3 69 -10 3 65 -10 3 65 -10 3 63 -10 3 66 -10 3 68 -10 3 67 -10 3 61 -10 3 64 -10 2 32 -10 3 63 -10 3 58 -10 3 57 -10 3 57 -10 3 67 -10 3 68 -10 3 66 -10 3 64 diff --git a/R/inst/extdata/bandit4arm_exampleData.txt b/R/inst/extdata/bandit4arm_exampleData.txt deleted file mode 100644 index 92ffad0d..00000000 --- a/R/inst/extdata/bandit4arm_exampleData.txt +++ /dev/null @@ -1,2001 +0,0 @@ -subjID gain loss choice -102 0 0 2 -102 1 0 1 -102 0 -1 1 -102 1 0 3 -102 0 -1 3 -102 0 -1 4 -102 0 -1 4 -102 0 0 2 -102 1 0 2 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 -1 3 -102 0 0 1 -102 1 0 1 -102 0 0 2 -102 0 0 2 -102 1 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 1 -102 0 -1 2 -102 1 0 3 -102 0 0 1 -102 1 0 3 -102 0 -1 3 -102 0 0 1 -102 0 -1 3 -102 0 0 1 -102 0 0 1 -102 1 0 2 -102 0 -1 3 -102 0 0 2 -102 1 0 1 -102 1 0 3 -102 0 0 2 -102 0 0 1 -102 0 -1 3 -102 0 0 3 -102 0 0 1 -102 0 0 2 -102 1 -1 3 -102 0 0 1 -102 0 0 3 -102 0 0 1 -102 0 -1 3 -102 0 0 1 -102 0 -1 2 -102 0 -1 1 -102 0 -1 3 -102 0 0 2 -102 1 -1 1 -102 0 0 2 -102 1 0 1 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 1 0 4 -102 0 0 1 -102 0 0 4 -102 0 0 1 -102 0 0 2 -102 0 0 4 -102 0 0 3 -102 0 -1 1 -102 0 -1 4 -102 0 0 2 -102 1 -1 3 -102 0 -1 2 -102 0 -1 3 -102 0 0 1 -102 0 -1 1 -102 1 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 1 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 1 0 1 -102 1 0 1 -102 0 0 1 -102 0 0 3 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 2 -102 0 -1 1 -102 1 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 1 -102 1 0 2 -102 0 0 2 -102 0 0 2 -102 0 0 1 -102 0 0 2 -102 0 -1 3 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 1 0 3 -102 0 0 2 -102 0 0 3 -102 0 0 3 -102 0 0 4 -102 1 0 2 -102 0 -1 2 -102 0 0 1 -102 1 0 1 -102 1 -1 1 -102 0 0 2 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 0 0 4 -102 0 -1 2 -102 0 0 1 -102 0 0 4 -102 0 -1 3 -102 1 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 4 -102 0 0 4 -102 0 -1 3 -102 0 0 2 -102 0 0 1 -102 1 -1 4 -102 0 0 2 -102 1 0 1 -102 0 0 1 -102 1 0 2 -102 0 -1 2 -102 0 0 1 -102 0 -1 4 -102 0 0 2 -102 0 -1 2 -102 1 0 1 -102 0 0 1 -102 1 0 2 -102 0 -1 2 -102 0 0 1 -102 1 0 2 -102 1 0 2 -102 0 0 2 -102 0 0 4 -102 0 0 2 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 0 -1 4 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 3 -102 0 0 4 -102 0 0 2 -102 0 0 1 -102 0 0 1 -102 0 0 2 -102 1 0 4 -102 0 -1 4 -102 0 0 3 -102 0 0 3 -102 1 0 1 -102 0 0 1 -102 0 0 2 -102 0 0 3 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 0 0 3 -102 0 0 1 -102 1 0 1 -102 0 -1 1 -102 1 0 3 -102 1 0 3 -102 0 -1 3 -102 0 0 4 -102 0 0 2 -102 1 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 1 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 0 -1 2 -102 0 0 1 -102 0 0 1 -102 1 0 1 -102 0 0 1 -102 0 0 3 -201 0 0 1 -201 0 -1 2 -201 0 -1 3 -201 0 -1 4 -201 0 0 1 -201 1 -1 1 -201 0 0 4 -201 0 0 3 -201 0 0 3 -201 1 0 1 -201 0 0 4 -201 0 0 1 -201 0 0 2 -201 1 -1 3 -201 1 0 3 -201 1 -1 2 -201 1 0 2 -201 0 0 4 -201 0 0 3 -201 0 -1 1 -201 0 -1 4 -201 0 0 2 -201 0 -1 3 -201 0 0 2 -201 0 0 3 -201 0 -1 1 -201 1 0 4 -201 0 -1 4 -201 0 -1 1 -201 1 0 2 -201 1 -1 3 -201 0 0 3 -201 1 0 3 -201 0 0 2 -201 1 0 4 -201 0 -1 1 -201 0 0 2 -201 1 0 3 -201 0 0 2 -201 0 -1 3 -201 0 -1 1 -201 0 0 4 -201 0 0 3 -201 0 -1 2 -201 0 0 3 -201 0 0 3 -201 0 0 3 -201 0 0 3 -201 1 0 1 -201 0 -1 4 -201 0 0 1 -201 0 0 4 -201 1 -1 4 -201 0 0 4 -201 1 0 1 -201 0 -1 2 -201 0 -1 2 -201 0 0 1 -201 0 0 3 -201 1 0 3 -201 0 -1 2 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 0 2 -201 1 -1 3 -201 0 -1 3 -201 0 0 2 -201 1 -1 3 -201 1 -1 3 -201 0 -1 3 -201 1 -1 2 -201 0 -1 3 -201 1 0 4 -201 0 -1 1 -201 0 0 4 -201 1 0 4 -201 0 0 4 -201 0 0 4 -201 0 0 2 -201 0 0 4 -201 0 0 2 -201 0 0 1 -201 0 0 4 -201 0 0 3 -201 0 0 4 -201 1 0 1 -201 1 0 1 -201 0 -1 2 -201 0 0 4 -201 1 0 1 -201 0 0 4 -201 0 0 1 -201 0 -1 4 -201 0 0 2 -201 0 -1 3 -201 0 0 1 -201 1 -1 4 -201 1 0 1 -201 0 0 1 -201 0 0 4 -201 0 0 2 -201 0 -1 3 -201 1 0 2 -201 0 -1 2 -201 0 -1 1 -201 1 -1 4 -201 0 -1 4 -201 1 0 1 -201 1 0 4 -201 0 0 2 -201 0 0 4 -201 0 -1 3 -201 0 0 2 -201 1 -1 4 -201 1 -1 1 -201 0 0 4 -201 0 0 1 -201 1 0 4 -201 0 0 1 -201 1 0 1 -201 1 0 2 -201 1 0 4 -201 0 0 3 -201 1 0 1 -201 0 -1 2 -201 0 0 1 -201 1 0 4 -201 0 0 3 -201 0 0 2 -201 1 -1 1 -201 1 0 4 -201 0 0 1 -201 0 0 1 -201 0 0 1 -201 0 0 1 -201 0 0 4 -201 0 -1 1 -201 0 -1 4 -201 0 0 2 -201 1 0 1 -201 0 -1 4 -201 0 -1 1 -201 0 0 3 -201 0 -1 3 -201 0 -1 4 -201 0 -1 3 -201 1 0 2 -201 1 0 3 -201 0 0 1 -201 1 0 2 -201 0 0 4 -201 0 0 2 -201 0 0 1 -201 0 -1 3 -201 1 0 2 -201 0 -1 4 -201 0 -1 2 -201 1 0 4 -201 0 -1 2 -201 1 0 3 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 -1 2 -201 0 -1 3 -201 0 0 4 -201 0 0 1 -201 0 0 4 -201 0 0 2 -201 0 -1 3 -201 0 -1 2 -201 0 -1 2 -201 1 0 1 -201 1 -1 3 -201 0 0 2 -201 1 0 3 -201 1 0 1 -201 0 0 3 -201 0 0 4 -201 0 0 1 -201 1 -1 3 -201 1 -1 2 -201 0 0 3 -201 1 0 2 -201 0 0 3 -201 0 0 4 -201 0 0 1 -201 0 -1 4 -201 0 0 1 -201 0 0 3 -201 0 0 2 -201 0 0 3 -201 0 -1 2 -201 0 0 2 -201 0 0 3 -201 0 0 4 -201 0 0 4 -202 0 0 3 -202 0 0 1 -202 1 -1 1 -202 0 0 3 -202 0 0 2 -202 0 0 4 -202 1 0 1 -202 0 0 1 -202 0 0 3 -202 1 -1 2 -202 0 0 2 -202 0 0 4 -202 0 -1 3 -202 1 0 1 -202 0 0 4 -202 1 0 1 -202 0 0 1 -202 0 0 2 -202 1 -1 3 -202 0 0 3 -202 0 0 1 -202 0 0 4 -202 0 0 3 -202 0 0 1 -202 0 0 2 -202 0 0 1 -202 0 -1 3 -202 0 0 1 -202 1 0 4 -202 0 -1 1 -202 1 0 4 -202 0 -1 1 -202 0 -1 3 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 -1 3 -202 0 0 1 -202 0 0 2 -202 0 0 4 -202 0 -1 1 -202 1 0 3 -202 0 0 1 -202 0 0 2 -202 0 -1 3 -202 0 0 1 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 0 1 -202 0 0 3 -202 1 0 2 -202 0 0 3 -202 0 -1 2 -202 0 0 1 -202 0 0 4 -202 0 0 4 -202 0 0 1 -202 1 0 3 -202 0 0 2 -202 0 0 1 -202 1 -1 4 -202 0 0 1 -202 0 -1 4 -202 0 -1 2 -202 1 -1 3 -202 1 0 1 -202 0 -1 3 -202 0 0 4 -202 0 0 2 -202 0 0 4 -202 0 0 3 -202 0 0 2 -202 0 -1 1 -202 1 0 4 -202 1 0 3 -202 0 0 1 -202 1 -1 4 -202 0 -1 3 -202 0 0 2 -202 0 -1 2 -202 0 0 2 -202 0 0 3 -202 0 0 1 -202 1 0 4 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 0 2 -202 0 0 2 -202 1 -1 1 -202 0 0 3 -202 0 0 4 -202 1 0 2 -202 0 0 1 -202 0 0 4 -202 0 0 3 -202 0 0 2 -202 0 0 2 -202 0 0 3 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 -1 2 -202 0 -1 1 -202 1 0 4 -202 0 -1 3 -202 1 0 4 -202 1 0 4 -202 0 0 4 -202 0 -1 1 -202 0 0 2 -202 0 0 3 -202 0 0 3 -202 0 0 4 -202 1 0 1 -202 1 0 1 -202 0 -1 2 -202 0 0 3 -202 0 0 4 -202 0 0 2 -202 0 -1 1 -202 1 0 4 -202 1 0 4 -202 0 -1 3 -202 1 0 4 -202 1 -1 4 -202 0 0 4 -202 0 -1 2 -202 0 0 4 -202 0 -1 1 -202 0 0 3 -202 1 0 4 -202 0 0 4 -202 0 -1 1 -202 1 0 4 -202 1 0 4 -202 0 0 3 -202 1 0 2 -202 1 0 2 -202 0 -1 1 -202 0 0 4 -202 1 0 2 -202 0 0 2 -202 0 -1 3 -202 0 0 2 -202 0 0 4 -202 0 -1 1 -202 0 0 3 -202 0 0 1 -202 0 -1 2 -202 0 0 3 -202 0 0 1 -202 0 0 4 -202 1 0 3 -202 0 0 1 -202 0 -1 2 -202 0 0 3 -202 0 0 1 -202 0 0 1 -202 0 0 4 -202 0 -1 1 -202 0 0 4 -202 0 -1 2 -202 0 -1 3 -202 0 0 1 -202 0 0 4 -202 1 0 2 -202 1 0 2 -202 0 0 2 -202 1 0 3 -202 0 0 3 -202 0 0 1 -202 0 0 1 -202 0 0 4 -202 0 0 2 -202 1 0 3 -202 0 0 1 -202 0 0 4 -202 1 0 3 -202 1 0 3 -202 1 0 3 -202 0 0 3 -202 0 0 1 -202 0 -1 4 -202 0 -1 2 -202 0 0 2 -202 0 -1 3 -202 0 0 1 -202 0 0 4 -202 0 0 3 -202 0 0 2 -202 0 0 4 -202 0 0 1 -202 0 0 3 -202 0 0 2 -202 1 0 3 -202 0 0 4 -203 0 0 3 -203 0 0 2 -203 0 0 3 -203 0 -1 1 -203 0 0 3 -203 1 0 4 -203 0 -1 4 -203 0 -1 2 -203 0 -1 1 -203 0 0 3 -203 1 0 2 -203 1 0 2 -203 0 -1 2 -203 0 0 4 -203 0 0 3 -203 0 -1 1 -203 0 0 3 -203 0 -1 2 -203 0 0 4 -203 0 -1 3 -203 1 -1 1 -203 0 0 1 -203 0 -1 3 -203 0 0 2 -203 0 0 1 -203 1 0 4 -203 1 0 4 -203 0 0 4 -203 1 0 3 -203 0 -1 3 -203 1 0 1 -203 1 0 1 -203 0 0 2 -203 1 -1 4 -203 0 -1 3 -203 0 0 1 -203 0 0 2 -203 0 -1 3 -203 0 -1 4 -203 0 -1 1 -203 1 -1 2 -203 1 0 4 -203 0 0 3 -203 0 0 4 -203 0 0 1 -203 1 0 2 -203 0 -1 3 -203 0 -1 4 -203 1 0 2 -203 0 -1 1 -203 0 0 3 -203 0 -1 1 -203 0 0 2 -203 0 -1 4 -203 0 0 3 -203 0 -1 1 -203 0 0 2 -203 0 0 1 -203 0 0 4 -203 0 -1 3 -203 0 0 2 -203 0 -1 3 -203 0 0 1 -203 1 -1 4 -203 0 -1 4 -203 1 0 2 -203 0 0 2 -203 0 0 2 -203 1 0 1 -203 0 0 1 -203 0 0 2 -203 1 0 1 -203 0 -1 3 -203 0 -1 1 -203 0 -1 2 -203 0 0 4 -203 0 -1 3 -203 1 0 1 -203 0 0 1 -203 0 0 1 -203 1 0 2 -203 1 0 2 -203 0 0 2 -203 0 0 3 -203 0 0 4 -203 0 0 2 -203 1 -1 3 -203 1 0 1 -203 0 0 1 -203 0 0 1 -203 0 0 4 -203 1 0 3 -203 1 0 3 -203 0 0 3 -203 1 0 3 -203 1 -1 3 -203 0 -1 2 -203 0 0 3 -203 0 -1 1 -203 0 0 4 -203 0 0 2 -203 0 0 3 -203 0 0 4 -203 0 0 2 -203 1 -1 1 -203 1 -1 2 -203 1 0 3 -203 0 0 1 -203 0 -1 3 -203 0 0 4 -203 0 0 2 -203 1 0 3 -203 0 0 1 -203 0 0 3 -203 0 0 4 -203 0 0 2 -203 0 -1 1 -203 1 0 3 -203 0 0 2 -203 1 0 3 -203 0 -1 3 -203 1 -1 4 -203 0 0 1 -203 0 -1 3 -203 0 0 2 -203 0 0 4 -203 1 -1 3 -203 0 0 1 -203 0 0 2 -203 0 0 3 -203 0 -1 4 -203 0 0 2 -203 0 0 3 -203 1 0 1 -203 0 0 1 -203 0 0 3 -203 0 0 2 -203 1 -1 4 -203 0 0 3 -203 0 0 1 -203 0 0 3 -203 1 0 2 -203 0 0 1 -203 0 0 2 -203 0 -1 4 -203 0 0 3 -203 0 -1 1 -203 0 0 2 -203 1 -1 3 -203 1 -1 2 -203 0 0 1 -203 0 -1 3 -203 0 0 4 -203 0 -1 2 -203 0 0 3 -203 0 -1 1 -203 1 0 2 -203 0 -1 4 -203 1 -1 3 -203 0 -1 2 -203 0 -1 1 -203 0 0 4 -203 1 0 3 -203 1 -1 1 -203 1 0 3 -203 1 0 3 -203 1 0 3 -203 0 0 3 -203 1 0 3 -203 0 0 3 -203 0 0 3 -203 0 0 3 -203 0 0 3 -203 0 -1 1 -203 0 0 3 -203 1 -1 4 -203 0 -1 2 -203 1 0 3 -203 0 0 3 -203 0 -1 3 -203 0 0 1 -203 1 -1 4 -203 0 0 2 -203 1 0 3 -203 0 0 1 -203 0 -1 3 -203 1 -1 3 -203 0 -1 3 -203 1 0 2 -203 0 0 2 -203 0 0 1 -203 1 0 4 -203 1 0 4 -203 0 0 4 -203 0 0 3 -203 0 -1 4 -203 0 0 2 -203 0 0 3 -203 0 0 1 -203 1 0 4 -204 0 0 1 -204 0 -1 4 -204 1 0 3 -204 1 -1 2 -204 1 0 2 -204 0 0 2 -204 0 0 2 -204 0 0 1 -204 0 -1 3 -204 0 0 4 -204 0 0 2 -204 0 0 4 -204 0 0 2 -204 0 -1 1 -204 0 0 2 -204 0 0 4 -204 1 0 3 -204 1 0 3 -204 1 0 2 -204 0 -1 3 -204 1 0 1 -204 0 0 2 -204 0 0 4 -204 0 0 2 -204 1 0 3 -204 0 0 1 -204 0 -1 2 -204 0 0 3 -204 0 0 2 -204 0 0 4 -204 0 0 3 -204 1 0 2 -204 0 -1 1 -204 0 0 2 -204 0 -1 3 -204 0 -1 3 -204 0 0 2 -204 0 0 2 -204 0 0 1 -204 1 -1 4 -204 0 0 4 -204 1 0 3 -204 0 0 2 -204 1 0 2 -204 0 -1 4 -204 0 -1 3 -204 0 0 2 -204 0 0 2 -204 1 -1 3 -204 0 0 2 -204 1 0 1 -204 0 0 2 -204 0 -1 1 -204 0 -1 4 -204 1 0 3 -204 0 -1 2 -204 1 0 3 -204 0 0 1 -204 0 0 1 -204 1 0 3 -204 0 0 1 -204 0 0 4 -204 1 0 3 -204 0 0 2 -204 1 0 1 -204 1 -1 2 -204 1 0 3 -204 0 0 1 -204 1 0 4 -204 0 0 2 -204 0 -1 4 -204 0 0 1 -204 1 0 3 -204 0 0 2 -204 0 -1 3 -204 0 -1 1 -204 1 0 3 -204 0 -1 2 -204 1 0 4 -204 1 0 3 -204 1 0 3 -204 0 0 1 -204 0 0 2 -204 0 0 3 -204 0 0 4 -204 0 0 2 -204 0 -1 1 -204 0 0 3 -204 0 0 2 -204 1 -1 3 -204 0 0 1 -204 0 0 3 -204 0 -1 4 -204 1 0 2 -204 0 0 3 -204 0 0 1 -204 0 0 2 -204 1 0 3 -204 0 0 2 -204 0 -1 3 -204 1 0 1 -204 0 0 1 -204 0 0 2 -204 0 0 3 -204 0 -1 4 -204 0 0 2 -204 0 0 4 -204 0 0 3 -204 0 -1 1 -204 0 -1 2 -204 1 0 4 -204 1 0 4 -204 1 -1 3 -204 0 0 3 -204 0 -1 3 -204 1 -1 3 -204 0 -1 2 -204 1 0 2 -204 0 -1 2 -204 1 0 4 -204 1 0 4 -204 0 0 1 -204 0 -1 1 -204 0 -1 1 -204 0 0 4 -204 0 0 4 -204 1 0 4 -204 0 0 2 -204 0 -1 2 -204 1 -1 2 -204 1 -1 3 -204 0 0 4 -204 1 0 4 -204 0 0 4 -204 0 0 4 -204 0 0 4 -204 0 -1 1 -204 0 -1 2 -204 0 0 2 -204 0 -1 3 -204 1 -1 4 -204 0 0 4 -204 0 0 4 -204 0 -1 2 -204 1 0 1 -204 0 0 4 -204 0 0 3 -204 0 -1 3 -204 0 0 3 -204 0 -1 1 -204 0 -1 2 -204 0 -1 1 -204 0 0 3 -204 0 0 4 -204 0 0 4 -204 0 -1 4 -204 0 0 4 -204 0 0 3 -204 0 0 3 -204 0 -1 3 -204 0 -1 2 -204 0 0 2 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 4 -204 0 0 4 -204 0 -1 4 -204 0 0 3 -204 1 0 3 -204 0 -1 3 -204 0 -1 3 -204 1 0 3 -204 0 0 2 -204 0 -1 4 -204 1 0 4 -204 0 0 3 -204 0 0 1 -204 0 0 1 -204 0 0 1 -204 0 0 3 -204 0 0 4 -204 0 0 4 -204 0 -1 3 -204 0 -1 3 -204 0 0 3 -204 0 0 4 -204 0 0 3 -204 0 0 4 -204 0 0 2 -204 1 0 1 -204 0 -1 4 -204 0 -1 4 -204 1 0 3 -205 1 0 1 -205 0 -1 4 -205 0 0 3 -205 0 0 1 -205 0 0 2 -205 1 0 4 -205 1 0 4 -205 0 -1 4 -205 0 0 3 -205 0 0 4 -205 0 -1 1 -205 1 0 2 -205 1 0 4 -205 0 -1 2 -205 0 0 4 -205 0 0 3 -205 0 0 4 -205 0 -1 1 -205 1 -1 3 -205 0 0 4 -205 0 0 2 -205 0 0 2 -205 0 0 3 -205 0 -1 4 -205 0 0 1 -205 1 0 1 -205 1 0 3 -205 0 0 4 -205 0 -1 1 -205 1 -1 3 -205 0 0 2 -205 1 0 2 -205 0 0 3 -205 1 -1 3 -205 1 0 4 -205 0 0 4 -205 0 0 2 -205 0 0 1 -205 0 -1 3 -205 0 -1 4 -205 1 0 2 -205 0 0 2 -205 0 -1 1 -205 0 -1 3 -205 0 0 4 -205 0 -1 1 -205 0 -1 3 -205 0 -1 4 -205 0 0 2 -205 0 0 2 -205 0 0 4 -205 0 0 2 -205 0 -1 1 -205 0 0 3 -205 1 0 4 -205 0 0 4 -205 0 0 2 -205 0 0 1 -205 0 0 3 -205 0 -1 4 -205 0 0 2 -205 1 0 1 -205 1 0 3 -205 0 0 1 -205 0 0 3 -205 0 -1 4 -205 0 0 2 -205 0 0 1 -205 0 -1 4 -205 0 0 4 -205 0 0 3 -205 0 -1 3 -205 0 -1 2 -205 0 0 4 -205 0 0 4 -205 0 0 3 -205 0 0 4 -205 0 0 4 -205 0 -1 1 -205 0 0 2 -205 1 0 4 -205 0 0 4 -205 0 -1 3 -205 1 0 1 -205 0 0 2 -205 0 -1 3 -205 0 -1 1 -205 0 -1 4 -205 0 0 2 -205 0 0 3 -205 0 0 1 -205 0 0 4 -205 1 0 2 -205 1 0 3 -205 1 -1 2 -205 0 0 2 -205 1 0 3 -205 0 0 3 -205 0 0 1 -205 0 0 4 -205 0 0 4 -205 0 0 3 -205 1 0 2 -205 0 0 2 -205 0 0 4 -205 0 0 1 -205 1 0 3 -205 1 -1 3 -205 0 0 4 -205 0 -1 2 -205 0 0 2 -205 0 0 3 -205 0 0 4 -205 0 0 1 -205 0 0 1 -205 1 -1 4 -205 0 0 3 -205 1 0 4 -205 1 -1 2 -205 0 0 4 -205 0 0 3 -205 1 -1 2 -205 0 0 1 -205 0 0 3 -205 0 0 4 -205 1 0 2 -205 1 -1 4 -205 0 0 3 -205 0 0 2 -205 0 0 1 -205 1 0 2 -205 1 0 2 -205 0 0 2 -205 0 0 3 -205 0 0 4 -205 0 -1 1 -205 0 0 4 -205 0 0 3 -205 0 -1 2 -205 0 0 4 -205 1 -1 1 -205 1 0 1 -205 1 0 1 -205 0 0 1 -205 1 -1 1 -205 0 -1 3 -205 0 0 2 -205 0 0 4 -205 0 0 4 -205 0 0 1 -205 1 0 1 -205 0 0 1 -205 1 -1 3 -205 0 0 2 -205 0 0 1 -205 0 -1 4 -205 0 0 1 -205 0 -1 2 -205 0 0 1 -205 1 0 3 -205 0 -1 3 -205 0 0 2 -205 0 0 1 -205 1 0 4 -205 0 0 1 -205 0 0 4 -205 0 0 2 -205 0 0 1 -205 1 -1 3 -205 0 0 2 -205 1 0 1 -205 0 0 1 -205 0 0 4 -205 1 0 1 -205 0 0 3 -205 0 -1 2 -205 0 0 1 -205 0 -1 4 -205 0 -1 3 -205 0 0 2 -205 1 0 1 -205 0 0 1 -205 0 -1 3 -205 0 0 1 -205 0 0 2 -205 0 0 3 -205 0 0 4 -205 0 0 1 -205 0 0 4 -205 0 -1 3 -205 0 0 2 -205 1 -1 2 -205 1 0 1 -205 0 0 1 -205 0 0 4 -205 0 0 1 -205 0 0 3 -205 1 -1 1 -205 0 0 1 -205 0 0 2 -206 1 0 1 -206 0 0 2 -206 0 0 1 -206 1 -1 1 -206 0 -1 3 -206 0 0 1 -206 0 0 2 -206 0 0 4 -206 0 0 1 -206 1 -1 2 -206 1 0 3 -206 0 0 3 -206 0 0 4 -206 0 -1 1 -206 0 -1 2 -206 0 0 3 -206 1 0 3 -206 0 0 4 -206 1 0 3 -206 1 -1 3 -206 0 0 3 -206 0 0 1 -206 0 0 3 -206 0 0 2 -206 1 0 3 -206 0 -1 4 -206 1 0 3 -206 1 0 1 -206 1 0 1 -206 0 0 1 -206 0 0 3 -206 0 0 3 -206 0 0 3 -206 1 -1 1 -206 0 -1 2 -206 0 0 4 -206 0 0 3 -206 1 0 3 -206 1 0 3 -206 0 0 3 -206 0 -1 1 -206 0 0 3 -206 0 0 3 -206 0 0 3 -206 0 0 3 -206 1 0 1 -206 0 0 3 -206 0 0 2 -206 0 -1 4 -206 0 0 1 -206 1 0 1 -206 0 0 3 -206 0 0 2 -206 0 0 4 -206 0 -1 1 -206 0 0 3 -206 0 -1 4 -206 0 0 1 -206 0 0 3 -206 0 0 1 -206 0 0 4 -206 0 0 1 -206 1 0 3 -206 1 -1 3 -206 0 0 1 -206 0 -1 3 -206 0 -1 4 -206 0 -1 2 -206 0 -1 3 -206 1 0 1 -206 1 0 1 -206 1 0 1 -206 1 0 1 -206 1 -1 1 -206 0 -1 1 -206 0 -1 1 -206 0 0 3 -206 0 0 1 -206 0 0 4 -206 1 0 2 -206 0 0 1 -206 1 0 1 -206 1 0 3 -206 1 0 1 -206 1 0 3 -206 0 0 1 -206 0 0 3 -206 1 -1 1 -206 1 0 3 -206 0 -1 1 -206 0 -1 3 -206 0 -1 1 -206 1 0 2 -206 0 0 2 -206 0 -1 1 -206 0 -1 4 -206 0 0 3 -206 0 0 1 -206 0 0 3 -206 1 0 1 -206 0 0 1 -206 0 0 3 -206 1 0 4 -206 0 0 1 -206 0 0 4 -206 1 -1 2 -206 1 0 4 -206 1 0 4 -206 1 0 4 -206 0 -1 4 -206 0 0 3 -206 1 0 4 -206 0 0 4 -206 0 0 3 -206 0 0 4 -206 1 0 2 -206 0 0 2 -206 0 0 4 -206 0 0 2 -206 0 0 1 -206 0 -1 2 -206 0 0 4 -206 0 0 3 -206 0 0 2 -206 1 0 2 -206 0 0 2 -206 0 0 1 -206 1 0 4 -206 0 0 4 -206 0 -1 3 -206 0 -1 2 -206 0 -1 1 -206 0 0 4 -206 0 0 1 -206 0 -1 3 -206 1 0 2 -206 1 -1 2 -206 1 -1 2 -206 0 0 2 -206 1 -1 4 -206 1 0 4 -206 1 0 4 -206 0 0 4 -206 0 0 3 -206 0 0 1 -206 0 0 2 -206 0 0 4 -206 0 0 3 -206 0 0 1 -206 0 0 2 -206 0 0 1 -206 0 0 3 -206 0 -1 3 -206 0 0 4 -206 0 -1 4 -206 0 0 1 -206 0 -1 1 -206 0 -1 2 -206 0 -1 2 -206 0 0 1 -206 0 0 1 -206 0 -1 1 -206 0 -1 3 -206 1 0 4 -206 0 0 4 -206 0 0 4 -206 0 0 4 -206 0 0 4 -206 0 0 2 -206 0 0 4 -206 0 0 2 -206 0 0 4 -206 0 0 4 -206 0 0 2 -206 0 0 4 -206 0 0 3 -206 0 -1 4 -206 0 0 2 -206 1 0 2 -206 0 -1 2 -206 0 -1 4 -206 0 0 3 -206 0 0 3 -206 0 0 2 -206 1 0 2 -206 0 0 2 -206 1 0 2 -206 0 0 2 -206 1 -1 2 -206 1 0 2 -206 1 0 2 -206 0 0 2 -206 1 0 2 -206 0 -1 2 -206 0 -1 2 -206 0 0 4 -206 1 0 2 -206 0 -1 2 -206 0 0 2 -206 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 1 -207 1 0 1 -207 0 0 1 -207 0 0 3 -207 0 0 4 -207 0 -1 3 -207 0 0 1 -207 0 0 1 -207 0 0 4 -207 1 -1 3 -207 0 0 3 -207 0 0 1 -207 0 0 1 -207 0 0 1 -207 0 -1 2 -207 0 0 4 -207 0 -1 4 -207 0 0 3 -207 1 -1 1 -207 0 0 3 -207 0 0 3 -207 0 0 3 -207 1 0 3 -207 0 0 3 -207 1 0 4 -207 0 -1 4 -207 1 -1 1 -207 0 0 4 -207 0 -1 3 -207 0 0 3 -207 0 0 3 -207 0 0 3 -207 1 0 4 -207 1 0 3 -207 0 0 3 -207 0 -1 1 -207 0 0 3 -207 1 0 3 -207 0 -1 3 -207 0 0 4 -207 0 -1 3 -207 0 -1 4 -207 0 0 1 -207 0 -1 3 -207 1 0 1 -207 1 0 1 -207 0 0 1 -207 0 -1 1 -207 0 0 1 -207 0 -1 2 -207 1 0 3 -207 0 0 3 -207 0 -1 3 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 1 -207 1 -1 1 -207 0 0 4 -207 0 -1 4 -207 0 0 3 -207 0 0 3 -207 1 0 3 -207 0 0 3 -207 0 -1 3 -207 1 -1 1 -207 0 -1 1 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 -1 2 -207 1 0 4 -207 0 -1 4 -207 0 -1 3 -207 0 0 1 -207 0 0 1 -207 1 0 1 -207 0 0 1 -207 1 -1 1 -207 0 -1 1 -207 0 0 4 -207 0 -1 3 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 -1 2 -207 0 -1 1 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 2 -207 0 0 2 -207 0 -1 2 -207 0 -1 1 -207 1 0 1 -207 0 0 1 -207 0 -1 3 -207 0 0 3 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 0 -1 4 -207 1 0 2 -207 0 -1 2 -207 0 0 2 -207 0 -1 2 -207 1 0 1 -207 0 -1 1 -207 1 -1 1 -207 1 0 1 -207 0 0 3 -207 0 -1 3 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 0 0 4 -207 1 -1 4 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 0 2 -207 0 0 2 -207 1 0 2 -207 0 -1 2 -207 1 0 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 0 0 4 -207 0 -1 4 -207 0 0 4 -207 1 0 4 -207 0 0 4 -207 0 0 4 -207 1 -1 4 -207 0 -1 4 -207 0 0 3 -207 1 0 3 -207 0 -1 3 -207 0 0 3 -207 1 -1 3 -207 0 0 3 -207 0 0 3 -207 0 0 3 -207 0 0 1 -207 0 0 1 -207 0 0 1 -207 0 -1 1 -207 0 0 1 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 -1 2 -207 1 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 2 -207 0 0 4 -207 1 -1 4 -207 0 0 4 -207 0 -1 4 -207 0 -1 4 -207 0 0 3 -207 1 -1 3 -207 1 -1 3 -207 1 0 3 -207 1 -1 3 -207 0 0 3 -207 0 -1 3 -207 0 -1 2 -207 0 0 1 -207 0 -1 1 -207 0 -1 1 -207 0 0 4 -207 0 0 4 -207 0 -1 4 -208 0 0 1 -208 0 0 2 -208 1 0 3 -208 0 0 3 -208 0 0 1 -208 0 -1 2 -208 1 -1 3 -208 0 0 4 -208 0 0 4 -208 0 -1 3 -208 0 0 4 -208 0 0 1 -208 0 0 1 -208 0 0 4 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 3 -208 1 -1 3 -208 1 0 3 -208 1 0 3 -208 1 0 3 -208 1 0 3 -208 1 0 3 -208 1 -1 3 -208 1 0 3 -208 1 -1 3 -208 0 -1 3 -208 0 -1 3 -208 0 -1 3 -208 0 0 2 -208 1 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 1 -208 0 -1 1 -208 1 0 1 -208 0 0 1 -208 0 0 1 -208 1 0 1 -208 0 0 1 -208 0 -1 1 -208 0 0 1 -208 0 -1 1 -208 0 -1 3 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 2 -208 0 0 1 -208 0 0 2 -208 0 0 3 -208 0 -1 4 -208 0 0 1 -208 0 0 2 -208 0 0 3 -208 1 0 1 -208 0 0 1 -208 0 0 1 -208 1 0 1 -208 1 0 1 -208 0 0 1 -208 0 -1 1 -208 0 0 2 -208 0 0 3 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 1 -208 0 0 2 -208 0 -1 3 -208 1 -1 1 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 1 0 1 -208 0 0 1 -208 0 -1 1 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 3 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 1 -208 0 0 2 -208 0 -1 3 -208 0 0 1 -208 0 0 2 -208 0 -1 1 -208 0 -1 2 -208 0 0 3 -208 0 0 4 -208 1 -1 3 -208 0 -1 3 -208 1 0 4 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 1 -1 4 -208 0 0 4 -208 1 -1 4 -208 0 -1 4 -208 0 0 4 -208 1 -1 4 -208 0 0 4 -208 0 0 4 -208 1 -1 4 -208 1 0 4 -208 1 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 4 -208 1 0 4 -208 1 0 4 -208 1 0 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 4 -208 0 0 4 -208 1 0 4 -208 0 0 4 -208 1 0 4 -208 1 0 4 -208 1 -1 4 -208 1 0 4 -208 1 -1 4 -208 0 0 4 -208 1 0 4 -208 1 0 4 -208 0 -1 4 -208 0 0 4 -208 1 -1 4 -208 0 -1 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 1 -208 0 -1 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 0 0 4 -208 0 -1 4 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 -1 3 -208 0 0 3 -208 0 -1 3 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 0 2 -208 0 0 2 -208 1 -1 1 -208 0 0 2 -208 0 0 1 -208 0 -1 2 -208 0 0 1 -208 0 0 1 -208 0 0 1 -208 0 -1 3 -208 0 0 4 -208 0 0 4 -208 0 -1 4 -208 1 0 2 -208 0 0 2 -208 0 0 2 -208 1 0 2 -208 0 -1 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 1 -1 2 -208 0 0 2 -208 0 0 2 -208 0 0 2 -208 1 -1 2 -208 0 0 1 -208 0 -1 2 -208 0 0 2 -208 0 0 2 -209 0 0 3 -209 0 0 1 -209 0 0 4 -209 0 0 2 -209 0 0 4 -209 0 0 1 -209 1 0 3 -209 0 0 2 -209 1 -1 2 -209 0 0 4 -209 0 0 1 -209 0 0 3 -209 0 0 2 -209 0 0 1 -209 1 0 3 -209 1 0 3 -209 0 0 3 -209 0 0 2 -209 0 0 4 -209 1 -1 1 -209 1 0 1 -209 1 -1 1 -209 0 0 1 -209 1 0 2 -209 0 0 2 -209 1 0 4 -209 0 0 4 -209 0 -1 1 -209 0 0 3 -209 1 -1 2 -209 0 -1 2 -209 0 0 2 -209 0 -1 4 -209 0 -1 3 -209 0 -1 1 -209 1 -1 4 -209 1 -1 3 -209 0 0 3 -209 0 0 2 -209 1 -1 4 -209 0 0 4 -209 0 0 1 -209 0 0 3 -209 0 0 2 -209 0 0 4 -209 0 0 3 -209 1 0 1 -209 1 -1 1 -209 0 0 1 -209 0 0 2 -209 0 -1 3 -209 1 0 4 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 0 -1 3 -209 0 -1 3 -209 0 0 1 -209 0 0 2 -209 0 0 3 -209 0 0 4 -209 0 0 1 -209 0 0 2 -209 0 0 2 -209 1 0 4 -209 0 0 3 -209 0 0 1 -209 0 0 4 -209 0 -1 2 -209 1 0 3 -209 1 -1 3 -209 0 0 4 -209 1 0 4 -209 0 -1 4 -209 0 -1 1 -209 0 -1 3 -209 0 0 2 -209 1 0 2 -209 0 -1 2 -209 1 0 4 -209 0 -1 4 -209 1 0 3 -209 0 0 3 -209 0 0 3 -209 0 0 1 -209 1 0 1 -209 0 -1 2 -209 0 0 2 -209 0 -1 3 -209 0 0 4 -209 0 -1 4 -209 0 0 3 -209 1 -1 3 -209 1 0 4 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 1 0 3 -209 0 0 2 -209 1 0 3 -209 0 0 2 -209 0 0 1 -209 0 -1 2 -209 0 0 3 -209 0 0 1 -209 1 0 4 -209 0 0 3 -209 0 -1 1 -209 0 0 2 -209 1 -1 4 -209 0 0 1 -209 0 0 4 -209 1 0 2 -209 0 0 2 -209 0 0 1 -209 0 -1 3 -209 1 0 1 -209 0 0 4 -209 0 0 4 -209 0 0 4 -209 0 -1 2 -209 0 0 2 -209 0 0 2 -209 0 0 1 -209 0 0 3 -209 0 0 3 -209 0 -1 3 -209 0 0 3 -209 0 0 3 -209 1 0 1 -209 1 0 1 -209 1 0 4 -209 0 0 4 -209 0 0 3 -209 1 -1 2 -209 0 0 2 -209 0 0 3 -209 1 -1 1 -209 0 0 4 -209 0 -1 1 -209 0 0 1 -209 0 -1 2 -209 0 -1 4 -209 1 -1 4 -209 0 0 4 -209 0 -1 3 -209 0 0 3 -209 1 0 2 -209 0 0 2 -209 0 0 1 -209 0 -1 3 -209 0 0 2 -209 0 0 1 -209 0 0 1 -209 0 0 4 -209 0 0 1 -209 1 0 3 -209 0 0 3 -209 0 -1 3 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 0 0 3 -209 0 -1 1 -209 0 -1 3 -209 0 0 3 -209 0 0 1 -209 0 -1 4 -209 0 -1 4 -209 0 0 2 -209 0 0 1 -209 0 0 4 -209 1 0 2 -209 0 0 1 -209 0 -1 4 -209 1 0 1 -209 0 0 3 -209 0 -1 4 -209 0 -1 3 -209 0 0 3 -209 0 0 4 -209 0 0 2 -209 0 0 3 -209 0 0 3 -209 0 0 1 -209 0 -1 4 -209 0 0 3 -209 0 -1 3 -209 0 -1 2 -209 0 0 4 -209 0 0 1 -209 1 0 2 -209 0 0 4 -209 1 0 2 -209 1 0 1 -209 0 0 2 -209 0 0 3 -209 0 -1 4 -209 0 0 4 -209 0 0 1 \ No newline at end of file diff --git a/R/inst/extdata/bart_exampleData.txt b/R/inst/extdata/bart_exampleData.txt deleted file mode 100644 index 4890955f..00000000 --- a/R/inst/extdata/bart_exampleData.txt +++ /dev/null @@ -1,91 +0,0 @@ -subjID group trial reward pumps explosion -101 0 1 1 14 1 -101 0 2 1 39 0 -101 0 3 1 50 0 -101 0 4 1 81 0 -101 0 5 1 45 1 -101 0 6 1 80 0 -101 0 7 1 45 1 -101 0 8 1 90 0 -101 0 9 1 65 1 -101 0 10 1 27 1 -101 0 11 1 40 1 -101 0 12 1 80 1 -101 0 13 1 95 1 -101 0 14 1 12 1 -101 0 15 1 36 1 -101 0 16 1 75 0 -101 0 17 1 55 1 -101 0 18 1 70 0 -101 0 19 1 26 1 -101 0 20 1 68 0 -101 0 21 1 55 1 -101 0 22 1 75 0 -101 0 23 1 64 1 -101 0 24 1 5 1 -101 0 25 1 38 1 -101 0 26 1 76 0 -101 0 27 1 57 0 -101 0 28 1 56 1 -101 0 29 1 48 1 -101 0 30 1 88 1 -103 0 1 1 88 1 -103 0 2 1 36 1 -103 0 3 1 30 0 -103 0 4 1 50 0 -103 0 5 1 56 1 -103 0 6 1 40 0 -103 0 7 1 6 1 -103 0 8 1 33 1 -103 0 9 1 30 0 -103 0 10 1 50 0 -103 0 11 1 22 1 -103 0 12 1 56 1 -103 0 13 1 20 0 -103 0 14 1 30 0 -103 0 15 1 40 0 -103 0 16 1 30 0 -103 0 17 1 30 0 -103 0 18 1 40 0 -103 0 19 1 45 0 -103 0 20 1 50 0 -103 0 21 1 55 1 -103 0 22 1 50 0 -103 0 23 1 60 0 -103 0 24 1 3 1 -103 0 25 1 40 0 -103 0 26 1 50 0 -103 0 27 1 30 0 -103 0 28 1 50 0 -103 0 29 1 30 0 -103 0 30 1 50 0 -104 0 1 1 50 0 -104 0 2 1 38 1 -104 0 3 1 50 0 -104 0 4 1 24 1 -104 0 5 1 46 1 -104 0 6 1 42 0 -104 0 7 1 11 1 -104 0 8 1 50 0 -104 0 9 1 28 1 -104 0 10 1 50 0 -104 0 11 1 50 1 -104 0 12 1 40 0 -104 0 13 1 40 0 -104 0 14 1 2 1 -104 0 15 1 40 0 -104 0 16 1 40 0 -104 0 17 1 40 0 -104 0 18 1 40 0 -104 0 19 1 25 1 -104 0 20 1 40 0 -104 0 21 1 40 0 -104 0 22 1 40 0 -104 0 23 1 36 1 -104 0 24 1 38 0 -104 0 25 1 50 0 -104 0 26 1 38 1 -104 0 27 1 40 0 -104 0 28 1 40 0 -104 0 29 1 30 0 -104 0 30 1 40 0 diff --git a/R/inst/extdata/choiceRT_exampleData.txt b/R/inst/extdata/choiceRT_exampleData.txt deleted file mode 100644 index bbe2b474..00000000 --- a/R/inst/extdata/choiceRT_exampleData.txt +++ /dev/null @@ -1,5001 +0,0 @@ -RT choice subjID condition -0.238126253704183 1 1 1 -0.788334139249308 2 1 1 -0.524351202388138 2 1 1 -1.30852451859186 1 1 1 -0.244177006142252 1 1 1 -0.512534281943979 1 1 1 -0.570872020376975 2 1 1 -0.552056452179357 1 1 1 -0.298121361381527 2 1 1 -0.323864684737407 1 1 1 -0.542476237007045 1 1 1 -0.457829931981559 2 1 1 -0.214443816443766 1 1 1 -0.282641758197282 2 1 1 -0.577296397953241 1 1 1 -0.80363268095685 2 1 1 -0.630866151842371 2 1 1 -0.561537877283935 2 1 1 -0.447864619700588 1 1 1 -0.271079966516117 2 1 1 -0.286558308483825 2 1 1 -0.402853789793329 2 1 1 -0.261247265870358 2 1 1 -0.954323974954787 1 1 1 -0.233982750292549 2 1 1 -0.534509968347321 2 1 1 -1.38489463892966 1 1 1 -0.51382752398596 2 1 1 -0.877226598584423 2 1 1 -0.59661096895894 2 1 1 -0.653486235884601 2 1 1 -0.499754559401486 2 1 1 -0.234607668817517 2 1 1 -0.531596228343812 2 1 1 -0.517067421390557 2 1 1 -0.286714432990514 2 1 1 -0.280389415416944 1 1 1 -0.770848791728697 2 1 1 -0.242534242474749 2 1 1 -1.21402951161598 1 1 1 -0.254230773115822 1 1 1 -0.235607609409862 1 1 1 -0.893859490775577 2 1 1 -0.4248828895841 2 1 1 -0.806633683066691 1 1 1 -0.52846751057204 2 1 1 -0.283404274358359 2 1 1 -0.38261147359119 1 1 1 -0.366467333270928 2 1 1 -0.89906087165271 2 1 1 -0.473523175525898 2 1 1 -0.61052334774835 2 1 1 -0.348877038822898 2 1 1 -0.509848343105319 2 1 1 -0.714362767211544 2 1 1 -0.366653361634071 1 1 1 -0.504639516528354 2 1 1 -0.789291266027802 1 1 1 -0.220496731951155 2 1 1 -0.225368494671686 2 1 1 -0.935425512110651 1 1 1 -0.596093103065834 2 1 1 -0.751187828634478 2 1 1 -0.398369973292919 2 1 1 -0.803192132747886 1 1 1 -0.653642313281921 2 1 1 -0.759465190620081 2 1 1 -0.301158475484036 2 1 1 -0.468546635484975 2 1 1 -0.45136376067397 2 1 1 -0.225730206953994 1 1 1 -0.871541732294617 2 1 1 -1.02231746439083 2 1 1 -0.453015412970327 1 1 1 -0.198424664401742 2 1 1 -0.300531454438104 2 1 1 -0.473723469079576 1 1 1 -0.397993417619097 2 1 1 -0.990744721453659 2 1 1 -0.576175729949669 2 1 1 -0.200104343196362 2 1 1 -0.397950225292451 2 1 1 -0.595871677587168 1 1 1 -0.631283245367399 2 1 1 -0.225640535433198 2 1 1 -0.671278939344137 2 1 1 -0.562888330598081 1 1 1 -0.713201556333214 1 1 1 -0.429372024083033 2 1 1 -0.559437949496943 1 1 1 -0.747758954300599 2 1 1 -0.668556572370471 2 1 1 -0.179933868089705 1 1 1 -0.557946405103375 2 1 1 -0.781925159045207 2 1 1 -0.629998909619026 2 1 1 -0.73419031432803 1 1 1 -0.917048954570217 2 1 1 -1.27326330493077 1 1 1 -0.715099066135782 1 1 1 -0.561629162179203 2 1 1 -0.439342876745989 1 1 1 -0.212480989248291 2 1 1 -0.431997523692581 2 1 1 -0.504823085985375 1 1 1 -0.209443682735351 2 1 1 -0.535478168252645 1 1 1 -0.523309589143815 2 1 1 -0.292526841667345 2 1 1 -0.292598915819633 1 1 1 -0.383304045988112 2 1 1 -0.220801631101784 2 1 1 -0.240025256471961 2 1 1 -0.374617088048471 2 1 1 -0.225139772246513 2 1 1 -0.504765209525881 1 1 1 -0.536719069622199 1 1 1 -0.265730079523484 2 1 1 -0.788176797412021 2 1 1 -0.317054055572024 1 1 1 -0.984288372815029 1 1 1 -0.242704368769227 1 1 1 -1.16310843477133 1 1 1 -0.671512143534472 2 1 1 -0.235624281398265 2 1 1 -0.585278561981407 1 1 1 -0.313710683818167 1 1 1 -0.512453071354528 2 1 1 -0.318816084203735 2 1 1 -0.492290766723273 2 1 1 -0.869267244819061 2 1 1 -0.416347372277426 2 1 1 -1.25171209855063 1 1 1 -0.421124063985099 2 1 1 -0.330265759909128 2 1 1 -0.32442759213596 2 1 1 -0.499405834143408 2 1 1 -0.605809814064198 2 1 1 -0.441813584555195 1 1 1 -0.355018580197292 2 1 1 -0.284917824978601 2 1 1 -0.712509291577718 2 1 1 -0.360087543146394 2 1 1 -0.593758109292972 2 1 1 -0.21793928877364 2 1 1 -0.511916501085172 1 1 1 -1.65665966055448 2 1 1 -0.462252095429733 1 1 1 -0.375694324193756 2 1 1 -0.573330510111478 1 1 1 -0.624806212238662 1 1 1 -0.3221554867038 1 1 1 -0.394184550616579 1 1 1 -0.233898257977356 2 1 1 -0.616451835954318 2 1 1 -0.435745652986984 2 1 1 -0.309831870195393 1 1 1 -0.303567774481905 1 1 1 -0.268141575894932 2 1 1 -0.685546680374616 2 1 1 -0.315857448984633 2 1 1 -0.358148021225324 1 1 1 -0.561597978729496 2 1 1 -0.575763837785002 2 1 1 -0.408409797786314 2 1 1 -0.456470478096314 2 1 1 -0.211008154807298 1 1 1 -0.537560426488747 1 1 1 -0.474119050536192 1 1 1 -0.349680702914349 2 1 1 -0.43874642118394 2 1 1 -0.741099937281951 2 1 1 -0.397490501092685 1 1 1 -0.455993632903328 2 1 1 -0.531917883353318 2 1 1 -0.544592749033783 2 1 1 -0.74575081631549 1 1 1 -0.482830763020483 2 1 1 -0.280104823458282 2 1 1 -0.674827163589054 2 1 1 -0.27232449929437 1 1 1 -0.33609945965603 2 1 1 -0.642687813456977 2 1 1 -0.45152584390343 2 1 1 -0.393612819207325 1 1 1 -0.403513480920972 2 1 1 -0.55270209232572 2 1 1 -0.282474350101989 1 1 1 -0.225686494015142 1 1 1 -0.3032960404285 1 1 1 -0.741695387202929 1 1 1 -0.23627922113503 1 1 1 -0.317661404771517 2 1 1 -0.365881950379812 1 1 1 -0.671407911504626 2 1 1 -0.6327672361385 2 1 1 -0.408730216599132 2 1 1 -1.05295329016947 2 1 1 -0.647929253014634 2 1 1 -0.272505386795946 1 1 1 -0.477000937785718 2 1 1 -0.593679670773664 2 1 1 -0.485804513765726 1 1 1 -0.685108031619407 2 1 1 -0.463863491717212 2 1 1 -0.280869562583906 2 1 1 -0.484442256816249 1 1 1 -0.374203282894535 1 1 1 -0.205270568757322 2 1 1 -0.285730023779721 1 1 1 -0.420031671350127 2 1 1 -0.304140334800815 2 1 1 -0.455400240565684 1 1 1 -0.319295225911816 2 1 1 -0.853456173431349 2 1 1 -1.28296521539738 2 1 1 -0.402276812108308 2 1 1 -1.60365089898574 2 1 1 -0.513436951554669 2 1 1 -0.635287982445216 2 1 1 -0.756725913746622 1 1 1 -0.538411817875012 2 1 1 -0.252807751300543 1 1 1 -0.306493263230248 2 1 1 -0.328940637779731 1 1 1 -0.295149174376265 2 1 1 -0.428772773247104 2 1 1 -0.710257617392816 2 1 1 -2.1398843380733 2 1 1 -0.390964230021283 1 1 1 -0.30264732818644 1 1 1 -0.24604561365542 2 1 1 -0.326118394989355 2 1 1 -0.444302762917929 2 1 1 -0.994994120515054 2 1 1 -0.329747734506691 2 1 1 -0.244820417609073 2 1 1 -0.434344901812039 2 1 1 -0.245526039713125 2 1 1 -0.371387027622059 2 1 1 -0.396016682526436 2 1 1 -0.868293655068221 2 1 1 -0.339580118779972 2 1 1 -0.377321305638716 1 1 1 -0.352058350011174 2 1 1 -0.523222420484193 1 1 1 -1.63006360968846 2 1 1 -0.403780279358626 1 1 1 -0.33450821318739 2 1 1 -0.246049648436144 1 1 1 -0.73900563703035 2 1 1 -0.70659002598455 1 1 1 -0.251224036209508 1 1 1 -0.279682884105716 2 1 1 -0.446835970242547 2 1 1 -0.344773155307199 2 1 1 -1.18156313011751 2 1 1 -0.40159469187599 1 1 1 -0.662618250249293 2 1 1 -0.484088636200293 2 1 1 -0.249177412018199 1 1 1 -0.635565342005854 2 1 1 -0.237344366033974 2 1 1 -0.262837667936303 2 1 1 -0.223824529758815 2 1 1 -0.544851049052962 1 1 1 -1.22941904309934 1 1 1 -1.18790150456476 2 1 1 -0.397012831119724 2 1 1 -0.542500816372649 1 1 1 -0.215934076714995 2 1 1 -1.30455859046761 2 1 1 -0.536099297245265 2 1 1 -0.414264536316934 2 1 1 -0.777679859044325 2 1 1 -0.697575719549679 1 1 1 -0.704761484394448 2 1 1 -0.286893353427223 1 1 1 -0.681973124438239 2 1 1 -0.397462829482937 2 1 1 -0.257670640245336 2 1 1 -0.236649584180499 2 1 1 -0.436790435094707 1 1 1 -0.574656753851278 1 1 1 -0.253082319735779 1 1 1 -0.61806692862892 1 1 1 -0.46661603680114 2 1 1 -0.195332992231242 1 1 1 -0.229629897436442 1 1 1 -0.992646398039104 2 1 1 -0.94136783174252 2 1 1 -0.837333099387364 1 1 1 -0.430204780391451 2 1 1 -1.37515921760222 2 1 1 -0.89875556054097 2 1 1 -0.696864042518777 2 1 1 -0.332933586834615 2 1 1 -0.334070550417085 2 1 1 -0.38676132253602 2 1 1 -0.306404665389991 2 1 1 -0.478254432945422 2 1 1 -0.601997570889218 2 1 1 -0.373642558748753 2 1 1 -0.29388256861859 1 1 1 -0.403146732540824 2 1 1 -0.754379822737839 2 1 1 -0.20827688411218 2 1 1 -0.211975975201092 2 1 1 -0.591340246795799 1 1 1 -0.263322621163444 2 1 1 -0.525170614901281 1 1 1 -0.206823345071543 1 1 1 -0.935520204615524 1 1 1 -0.550910831841 2 1 1 -0.232504114652867 2 1 1 -0.391975720570035 1 1 1 -0.52537232580037 2 1 1 -0.604827669281913 2 1 1 -0.440173374557048 2 1 1 -0.294878838994327 2 1 1 -0.323868811622971 1 1 1 -0.240824506056104 2 1 1 -0.423271049333481 2 1 1 -0.849356591210965 2 1 1 -0.335818515496422 2 1 1 -0.538745656799135 2 1 1 -0.4208751745964 2 1 1 -0.55146359110108 2 1 1 -0.467882029849217 2 1 1 -0.567777388073783 2 1 1 -0.311394332684366 1 1 1 -1.33531192845093 1 1 1 -0.368867535882799 2 1 1 -0.340092989922591 2 1 1 -0.299811445088077 2 1 1 -0.198131285653104 2 1 1 -1.57688580580023 2 1 1 -0.671467937043381 2 1 1 -0.422481878776226 2 1 1 -0.48313672040092 1 1 1 -0.473697344635179 1 1 1 -0.68951966048344 2 1 1 -0.633967567703449 2 1 1 -0.382761102393661 1 1 1 -0.68736056335558 1 1 1 -0.677810537621417 2 1 1 -0.249614606331914 2 1 1 -0.755321813543998 2 1 1 -0.370198385669219 1 1 1 -0.697638915493631 1 1 1 -0.684828719775979 2 1 1 -0.502349799392655 1 1 1 -0.583605969114717 1 1 1 -0.517041977935336 1 1 1 -0.411670106317747 1 1 1 -0.207610898625408 1 1 1 -0.414006631133478 2 1 1 -0.921999310392829 1 1 1 -0.409507167245215 2 1 1 -0.584716070617761 2 1 1 -0.666513112126972 2 1 1 -0.233005827550518 2 1 1 -0.60770657746225 1 1 1 -0.221784346267773 2 1 1 -0.771391695716424 2 1 1 -1.21988159355549 1 1 1 -0.408933678664394 2 1 1 -1.03374983542661 2 1 1 -0.939198644733114 2 1 1 -0.962067734082042 2 1 1 -0.473406448845882 2 1 1 -0.223644602219167 2 1 1 -1.12139515597077 1 1 1 -0.299025722625131 2 1 1 -0.55009896091157 1 1 1 -0.578799507502895 1 1 1 -0.962097234341087 1 1 1 -0.348861796367042 2 1 1 -0.35773121339554 2 1 1 -0.246740510307971 1 1 1 -0.407111627051893 1 1 1 -0.550930871978825 1 1 1 -0.422754497543909 2 1 1 -0.758063342099552 2 1 1 -0.380847347114823 2 1 1 -0.377055603284598 2 1 1 -0.65608839650545 1 1 1 -1.42357385911498 2 1 1 -0.232798506755752 2 1 1 -0.539867634108279 2 1 1 -0.392489725525737 2 1 1 -0.367110223983889 2 1 1 -0.839639947757427 2 1 1 -0.592327430792799 2 1 1 -0.239126262427817 2 1 1 -0.328625329636268 2 1 1 -0.284257957756146 1 1 1 -0.331590178883346 1 1 1 -0.620620411662111 2 1 1 -0.755967038010479 2 1 1 -0.331900743408574 2 1 1 -0.421146686045199 1 1 1 -1.07476503410067 2 1 1 -0.507112598176372 1 1 1 -0.311566718621004 2 1 1 -0.301319320070233 2 1 1 -0.393257034342845 2 1 1 -0.673521481008061 2 1 1 -0.291567562966672 2 1 1 -1.05249328382332 2 1 1 -0.694698152076518 2 1 1 -0.733055920143737 2 1 1 -0.334350894107303 2 1 1 -0.542807697456418 1 1 1 -0.579281169009386 1 1 1 -0.320837583848137 1 1 1 -0.488074071042795 1 1 1 -0.213060081069537 2 1 1 -0.237230647833275 2 1 1 -0.237572229668373 1 1 1 -0.241805498724672 1 1 1 -0.21505246069559 2 1 1 -0.625069689033177 2 1 1 -0.391789762960315 1 1 1 -0.360924641936915 2 1 1 -0.434831888026175 1 1 1 -1.53947356804897 2 1 1 -0.390459073072731 2 1 1 -0.327186719063663 1 1 1 -0.451681415339723 1 1 1 -0.551841771615269 2 1 1 -0.41039773179749 1 1 1 -0.926634118987433 2 1 1 -0.813362027443744 2 1 1 -0.632371052186083 2 1 1 -1.07271976627787 1 1 1 -0.347281073927582 1 1 1 -0.44423560152159 1 1 1 -0.576366534316911 2 1 1 -0.279713029952993 2 1 1 -0.881466843024701 2 1 1 -0.374654223890455 1 1 1 -0.246340230252564 1 1 1 -0.46051090791758 2 1 1 -0.610478508455545 1 1 1 -0.290070606427311 2 1 1 -0.544420557842503 1 1 1 -0.776693279362721 1 1 1 -0.235406028367375 2 1 1 -0.239531675743827 1 1 1 -0.44775078332261 2 1 1 -0.272084709816774 1 1 1 -0.490027056594032 2 1 1 -1.11466956380519 2 1 1 -0.270448404879725 2 1 1 -0.442949902437612 1 1 1 -0.570651632322539 1 1 1 -0.32265845661882 2 1 1 -0.407435441210764 2 1 1 -0.200085052390358 2 1 1 -0.358511835895485 2 1 1 -1.2431214333383 2 1 1 -0.696171754957839 1 1 1 -0.2777627469669 1 1 1 -0.429359856138122 2 1 1 -0.340524177360971 2 1 1 -0.199944337376957 2 1 1 -0.398334292684942 2 1 1 -0.388541579168816 2 1 1 -0.398547679838622 1 1 1 -0.839309822360769 2 1 1 -0.280253849702043 2 1 1 -0.547345720269382 2 1 1 -0.376647832731017 2 1 1 -0.455530332435412 2 1 1 -0.334196466045242 2 1 1 -0.759777271734527 2 1 1 -1.10869967729068 2 1 1 -0.222920909328599 2 1 1 -0.243727194101031 2 1 1 -0.331283374352904 2 1 1 -0.489803545251022 1 1 1 -0.2736011848833 2 1 1 -0.432409628386385 1 1 1 -0.447747022319498 2 1 1 -0.736283852147818 2 1 1 -0.461500847594122 1 1 1 -0.359367876631285 1 1 1 -0.418098062593873 2 1 1 -0.502693165924066 1 1 1 -0.260188072876792 1 1 1 -0.348437996297828 1 1 1 -1.57562306974174 2 1 1 -0.316108820930013 2 1 1 -0.421685918698271 2 1 1 -0.578695918727619 2 1 1 -1.12879309366769 2 1 1 -1.03916993441652 2 1 1 -0.492207222672778 1 1 1 -0.33283217994747 2 1 1 -0.39422420306568 2 1 2 -0.362300838201913 1 1 2 -0.469662901313467 2 1 2 -0.820030023322582 1 1 2 -0.234551440695508 2 1 2 -0.331679248955791 1 1 2 -0.527229640837085 2 1 2 -0.91734807805308 2 1 2 -0.319175515877037 2 1 2 -0.651053459158852 1 1 2 -0.661459624685597 2 1 2 -0.281279784597852 2 1 2 -0.342078529279457 1 1 2 -0.3636800828231 2 1 2 -0.484151346003298 1 1 2 -0.658827635325395 1 1 2 -0.622208937699232 1 1 2 -0.580811030835409 2 1 2 -0.441808620117506 1 1 2 -0.36060243933493 2 1 2 -0.831194064165385 2 1 2 -0.361776006347027 2 1 2 -0.777351339265196 1 1 2 -0.278293909155803 2 1 2 -0.278507100800553 2 1 2 -0.884402648451047 1 1 2 -0.342560342613834 2 1 2 -0.809676649841315 2 1 2 -0.516858099569803 1 1 2 -0.634645370682583 2 1 2 -0.249686099229778 1 1 2 -1.33141985698474 1 1 2 -0.453726915386914 2 1 2 -0.290504549136735 2 1 2 -0.487095756746479 2 1 2 -0.346501172556082 2 1 2 -0.393430828426059 1 1 2 -0.504449494787339 2 1 2 -0.367999687491587 2 1 2 -0.352469038071531 1 1 2 -0.234560015153837 2 1 2 -0.940841504372444 1 1 2 -0.2046902513565 2 1 2 -0.461341997193658 1 1 2 -0.610339950737745 2 1 2 -0.446921029186028 1 1 2 -0.515591108864551 2 1 2 -1.58260395843454 2 1 2 -0.344764743329778 2 1 2 -0.427254054893139 2 1 2 -0.516158776880019 1 1 2 -1.2612303673015 2 1 2 -0.613528615965816 2 1 2 -0.267963577139406 2 1 2 -0.307594651280269 2 1 2 -0.24101706884499 1 1 2 -0.455753268732021 2 1 2 -0.405040912881131 2 1 2 -0.288094483330521 1 1 2 -0.545610622237084 2 1 2 -0.452142838999807 1 1 2 -0.594527943497764 1 1 2 -0.88116621589308 2 1 2 -0.277767297820233 2 1 2 -0.279551393619652 2 1 2 -0.365460511604365 2 1 2 -0.556212898406868 2 1 2 -0.328560209842821 1 1 2 -0.531013993625691 1 1 2 -0.231888430468412 1 1 2 -0.677110774143983 2 1 2 -0.453921989085917 2 1 2 -0.459571696136957 2 1 2 -0.393356837769246 1 1 2 -0.511202810478497 1 1 2 -0.693614307574487 1 1 2 -0.240863923388269 2 1 2 -0.321852817508144 1 1 2 -0.270908403919833 2 1 2 -0.820724000663825 1 1 2 -0.235189573689813 2 1 2 -0.326155088030317 2 1 2 -0.631590224724998 1 1 2 -0.441990726662034 1 1 2 -0.84336570752273 2 1 2 -0.359995826600722 2 1 2 -0.251400135935091 1 1 2 -0.412798716611553 1 1 2 -0.257997459005081 1 1 2 -0.324911808695266 2 1 2 -0.869954063020224 2 1 2 -0.316774804913553 1 1 2 -0.802438949561354 2 1 2 -0.753010120858102 2 1 2 -0.50447570028204 1 1 2 -0.472994968867572 2 1 2 -0.365558799398694 2 1 2 -0.355836646801112 1 1 2 -0.571157381310202 2 1 2 -0.634686215618027 2 1 2 -0.270208965991148 2 1 2 -0.328585338874615 1 1 2 -0.384434393299423 2 1 2 -0.316023575731398 1 1 2 -0.494817395995112 2 1 2 -0.300504460120145 2 1 2 -0.347783059904907 2 1 2 -1.02851702876777 2 1 2 -0.364863367923789 2 1 2 -0.460777943415657 2 1 2 -0.382793622325279 2 1 2 -0.273403607994913 2 1 2 -0.609426470046583 2 1 2 -0.297792901344866 1 1 2 -0.370479141756967 2 1 2 -0.882238434259769 1 1 2 -0.496857265474561 2 1 2 -0.277702369672893 2 1 2 -0.446926962878622 1 1 2 -0.36757607051588 1 1 2 -0.557136267106436 2 1 2 -1.00333007744122 2 1 2 -0.760219976689289 2 1 2 -0.332068843559009 2 1 2 -0.548961093445682 2 1 2 -0.313465233961872 2 1 2 -0.550216771807154 1 1 2 -0.29794278574353 1 1 2 -0.234198048951483 2 1 2 -0.273445183254746 2 1 2 -0.574886295740124 2 1 2 -0.258382409058055 1 1 2 -0.409845586460725 2 1 2 -0.326206723132256 2 1 2 -0.642595268751117 2 1 2 -0.232356531769144 2 1 2 -1.70736951927255 1 1 2 -0.274687338325608 2 1 2 -0.40877430223826 2 1 2 -0.365729356985064 2 1 2 -0.6050000403314 2 1 2 -0.592011487134505 2 1 2 -0.557179211825432 1 1 2 -0.873296855773591 1 1 2 -0.216826762785491 1 1 2 -0.517886780128018 2 1 2 -0.398323720600925 1 1 2 -1.12139464302831 2 1 2 -0.249538486660475 2 1 2 -0.360304338880141 1 1 2 -0.627773044075362 2 1 2 -0.996274959906684 2 1 2 -0.202797819180771 1 1 2 -0.383153769101205 1 1 2 -0.324797856324902 1 1 2 -0.239421301531662 2 1 2 -0.24289898785908 1 1 2 -0.547746136913622 1 1 2 -0.386255965400912 2 1 2 -0.60223673049116 2 1 2 -0.549261776998216 2 1 2 -0.395992071688511 2 1 2 -0.217402932038072 2 1 2 -0.295305459515413 2 1 2 -0.447909826549637 2 1 2 -0.71950962867128 2 1 2 -0.794816583397332 1 1 2 -0.241318968932987 2 1 2 -0.556293493098233 2 1 2 -0.238208378562322 2 1 2 -0.499247181746743 1 1 2 -0.317050968536836 2 1 2 -0.322686857249444 2 1 2 -0.71276761076242 2 1 2 -0.301030966624334 2 1 2 -0.336641004565653 2 1 2 -0.812046026214206 2 1 2 -0.270220261704131 2 1 2 -0.701954145112022 2 1 2 -0.43964095073941 2 1 2 -0.384704421988213 2 1 2 -0.501487364681699 2 1 2 -0.455023781459671 2 1 2 -0.332474164305816 2 1 2 -0.567142874907982 2 1 2 -0.253324335182053 2 1 2 -0.444329558298367 2 1 2 -0.750457236950695 2 1 2 -0.292500297080332 2 1 2 -0.319745451630673 2 1 2 -0.286210384865368 2 1 2 -0.283637752128579 1 1 2 -0.236044970372654 2 1 2 -0.606532173767213 1 1 2 -1.32620595835061 2 1 2 -0.49881945892801 2 1 2 -1.00559201100603 2 1 2 -0.498129494834216 2 1 2 -0.682007132416635 2 1 2 -0.521249610973914 2 1 2 -0.229929750671033 2 1 2 -1.12814610238938 1 1 2 -0.74135939367203 2 1 2 -1.14362542630031 2 1 2 -0.261969169934014 1 1 2 -0.240668217312327 1 1 2 -1.59220860546119 2 1 2 -0.459005868330534 2 1 2 -0.290018768199601 2 1 2 -0.204589440835719 1 1 2 -0.619039312673667 2 1 2 -0.667083334382893 1 1 2 -0.359845320132008 1 1 2 -0.912937103767445 2 1 2 -0.522430834145349 2 1 2 -0.297762304149053 1 1 2 -0.276240304783596 2 1 2 -0.399051717562123 2 1 2 -0.404254481667734 2 1 2 -1.23765251352633 1 1 2 -1.2213528437925 2 1 2 -0.554106620313858 2 1 2 -0.513543854359058 2 1 2 -0.718560875752879 2 1 2 -0.299045404005468 1 1 2 -0.197161504481574 2 1 2 -0.355424533393654 2 1 2 -0.601322385280793 2 1 2 -0.31408110064814 1 1 2 -0.681928297252204 2 1 2 -0.257899160580357 1 1 2 -0.331853308281021 2 1 2 -0.932271244383807 2 1 2 -0.762290747363875 1 1 2 -0.610315223598599 2 1 2 -0.508310743979851 2 1 2 -0.293542339726516 1 1 2 -0.249532498898509 2 1 2 -0.240661946068682 1 1 2 -0.480573774515142 2 1 2 -0.26503112695042 1 1 2 -0.745033574361612 2 1 2 -0.313418912457887 2 1 2 -0.428468490020874 2 1 2 -0.619836697801129 2 1 2 -0.404856983338945 2 1 2 -0.225135719018744 2 1 2 -0.247203725168153 2 1 2 -0.473126435201081 2 1 2 -0.758881984366834 2 1 2 -0.530103620429835 2 1 2 -0.609787747426196 2 1 2 -0.42023331047044 1 1 2 -0.294545387085857 1 1 2 -0.311952071319945 2 1 2 -0.793299410776987 2 1 2 -0.376179978035794 2 1 2 -0.230418084856786 2 1 2 -0.25879024565358 2 1 2 -0.264796453159985 2 1 2 -0.745485785923675 2 1 2 -0.224026456721164 2 1 2 -0.6030135494348 1 1 2 -0.489733962171922 2 1 2 -0.39466687509252 1 1 2 -0.552205654391275 2 1 2 -0.575332864606377 2 1 2 -0.673079198373531 1 1 2 -0.346849143283538 1 1 2 -0.384205850032696 1 1 2 -0.382157410278578 2 1 2 -0.294710963958947 2 1 2 -0.487164402385991 1 1 2 -0.571768796864126 2 1 2 -0.243155946253846 1 1 2 -0.366816988109117 1 1 2 -0.556560232965345 2 1 2 -0.842238270178048 2 1 2 -0.630587019970835 1 1 2 -0.849346128585895 2 1 2 -0.301910596058078 1 1 2 -0.494388435823995 1 1 2 -0.635279696032103 2 1 2 -0.244333041807845 1 1 2 -0.462722638825509 2 1 2 -0.355580520263025 1 1 2 -0.419159925222802 2 1 2 -0.229441499742296 2 1 2 -0.272011206196529 2 1 2 -0.457900548880182 1 1 2 -0.42581270796691 2 1 2 -0.192946477357373 2 1 2 -0.920226167527353 2 1 2 -0.870356567626495 2 1 2 -0.506429857300226 1 1 2 -1.35129991323996 2 1 2 -0.81782131154567 1 1 2 -0.312753351203148 2 1 2 -0.240147726849663 2 1 2 -0.331594506915926 2 1 2 -0.303605405427918 2 1 2 -1.3692312598303 2 1 2 -0.464969591870211 2 1 2 -0.365081121121442 2 1 2 -0.53678523283272 1 1 2 -0.362543809949933 2 1 2 -0.300077415892361 2 1 2 -0.565255726546058 2 1 2 -0.508969800017276 1 1 2 -0.197931347436034 2 1 2 -0.425448002968464 2 1 2 -0.917689004198981 2 1 2 -0.265209389680314 1 1 2 -0.399534037154238 1 1 2 -0.292118455947818 2 1 2 -0.352289208022807 2 1 2 -0.800608594982045 2 1 2 -0.251768687680971 1 1 2 -0.280448242529617 2 1 2 -0.243473452337431 1 1 2 -0.286916333216323 2 1 2 -0.838049565027792 1 1 2 -0.244529227888586 1 1 2 -0.71863102671265 2 1 2 -1.98916958946463 2 1 2 -0.238806320688673 2 1 2 -0.693785935221629 2 1 2 -0.495890282805749 1 1 2 -0.191174545766406 2 1 2 -0.836607694563896 2 1 2 -0.391165841939288 2 1 2 -0.566993167116615 2 1 2 -0.240715729525045 2 1 2 -0.354039896192607 1 1 2 -1.21434836670206 2 1 2 -0.48154154974369 2 1 2 -0.798332748413893 2 1 2 -0.650917466844914 2 1 2 -0.384224495536896 1 1 2 -0.945026137940947 2 1 2 -0.418508744931679 2 1 2 -0.659365265496408 1 1 2 -0.271823834279208 2 1 2 -0.43920360190222 2 1 2 -0.449532948575899 2 1 2 -1.02510373135742 2 1 2 -0.49889822568904 1 1 2 -1.23542122006285 1 1 2 -0.274287087904293 2 1 2 -0.673318487358746 2 1 2 -0.397619138281994 2 1 2 -1.0555886269523 2 1 2 -0.229089202292979 1 1 2 -0.697871360095817 2 1 2 -0.614287525537126 1 1 2 -0.322578991679628 1 1 2 -0.605688688250448 2 1 2 -0.534481750546624 2 1 2 -0.565101205666666 1 1 2 -0.378499737439249 1 1 2 -0.52995012536057 2 1 2 -2.45554336829165 1 1 2 -0.744067895318506 2 1 2 -0.673980171567151 2 1 2 -0.33496535179204 2 1 2 -0.703852317870538 2 1 2 -0.623851718541645 2 1 2 -0.275936871629696 2 1 2 -0.245843960416957 2 1 2 -0.220780887604494 2 1 2 -0.585098991357547 2 1 2 -0.343992796279959 1 1 2 -0.580010432096859 2 1 2 -0.377174286172397 2 1 2 -0.614794867960386 1 1 2 -0.235740390671863 1 1 2 -0.498093604359181 2 1 2 -0.422668225465882 2 1 2 -0.85458472173833 2 1 2 -0.318077105190021 2 1 2 -0.660599386236034 2 1 2 -0.44253879597235 2 1 2 -0.482452162905769 2 1 2 -0.569360166827625 2 1 2 -1.5195957937337 1 1 2 -0.335177741698269 2 1 2 -0.241392133198455 2 1 2 -0.503619286339201 2 1 2 -0.578489345701315 2 1 2 -0.327768204464024 2 1 2 -0.436095089114902 2 1 2 -0.424989568541 1 1 2 -0.214045608149353 2 1 2 -0.306116924393253 1 1 2 -0.476975246716608 2 1 2 -0.756750965776553 2 1 2 -0.312822136071239 2 1 2 -0.470827169455528 1 1 2 -0.37315029361616 2 1 2 -0.685155315108113 2 1 2 -0.959530818891534 2 1 2 -0.913595420136271 2 1 2 -0.595438752846243 2 1 2 -0.36802176344941 2 1 2 -0.418544504566566 2 1 2 -0.363048213885529 2 1 2 -0.254338756573215 1 1 2 -0.730957519992839 1 1 2 -0.263046554830887 1 1 2 -0.46094182659418 2 1 2 -0.531059000798822 2 1 2 -0.353280265477637 2 1 2 -0.464342980616116 2 1 2 -0.229724823533327 2 1 2 -0.504945673660676 2 1 2 -0.532493395334424 2 1 2 -0.423801763698387 1 1 2 -0.787113721614964 2 1 2 -0.223160559034952 2 1 2 -0.419595856308554 2 1 2 -0.396648316145306 2 1 2 -0.308908241587595 2 1 2 -0.627802576140553 2 1 2 -0.64888860721256 2 1 2 -0.738730808101364 2 1 2 -0.658745774579089 1 1 2 -0.679191956616965 1 1 2 -0.278164538209912 1 1 2 -0.205911141408479 2 1 2 -0.241638399787725 2 1 2 -0.24858355547484 1 1 2 -0.73740496979995 2 1 2 -0.247935082999496 2 1 2 -0.826311099617232 2 1 2 -0.590607775557781 1 1 2 -0.351249908681046 1 1 2 -0.370792468725378 2 1 2 -0.389722068994738 1 1 2 -0.251157837165118 2 1 2 -0.663087218040623 2 1 2 -0.454359737429872 2 1 2 -0.435474095638232 2 1 2 -0.284410206592962 2 1 2 -0.344506290138683 2 1 2 -1.01768620078799 1 1 2 -0.331330031800195 1 1 2 -0.277021859762052 1 1 2 -0.347332671037543 1 1 2 -0.286836805838407 1 1 2 -0.340934631295205 2 1 2 -1.22270556676254 1 1 2 -0.360534849486478 1 1 2 -0.359892263518994 2 1 2 -0.552595743599511 2 1 2 -0.301744081404754 2 1 2 -0.416037514267758 2 1 2 -0.541344562283886 2 1 2 -0.579986637345764 2 1 2 -0.221912718773351 2 1 2 -0.465245817277264 2 1 2 -0.474738754014913 2 1 2 -1.70409538281312 2 1 2 -1.02235518855245 2 1 2 -0.301214497598036 1 1 2 -0.991176433131545 2 1 2 -0.401432084705109 2 1 2 -1.11715380433533 2 1 2 -0.81719064511715 2 1 2 -0.549517654685354 1 1 2 -0.251345033237621 2 1 2 -0.357859075575934 2 1 2 -0.90132423193762 2 1 2 -0.272936669704676 1 1 2 -0.455508577827349 2 1 2 -0.861185664428614 2 1 2 -0.266987292082781 2 1 2 -0.578879341650739 2 1 2 -0.649256823455797 2 1 2 -0.418711362089519 2 1 2 -0.433426379919396 2 1 2 -0.642462173639701 1 1 2 -0.406446379518523 2 1 2 -0.290863063788828 1 1 2 -0.395803052313048 2 1 2 -0.311087619708231 2 1 2 -0.279185686505835 1 1 2 -0.412823984876793 1 1 2 -0.314508721309633 2 1 2 -0.417280760034167 2 1 2 -0.357813047077128 2 1 2 -0.256161295149574 2 1 2 -0.240326641914136 2 1 2 -0.469105961018824 2 1 2 -0.23311026462364 2 1 2 -0.219699590325278 2 1 2 -0.267828103451759 2 1 2 -0.324090708482963 1 1 2 -0.882370084866449 2 1 2 -0.296556033418114 2 1 2 -0.535028311840886 1 1 2 -0.43175137215661 2 2 1 -0.644941841007773 2 2 1 -0.454575049110823 1 2 1 -0.279695948494544 1 2 1 -0.426171246559654 2 2 1 -0.610510950165697 2 2 1 -0.230689244897577 1 2 1 -0.548095008243392 1 2 1 -0.98541029605035 2 2 1 -0.468950980918864 2 2 1 -0.684795778239259 2 2 1 -0.24699387402904 2 2 1 -0.27748506685569 2 2 1 -0.223809391596467 2 2 1 -0.400670922523929 2 2 1 -0.281960311355797 2 2 1 -0.231152419723023 2 2 1 -0.222978006492174 2 2 1 -0.524142717204131 1 2 1 -0.668726417947444 2 2 1 -0.683251780945197 1 2 1 -0.40863429831843 2 2 1 -0.276342613901865 2 2 1 -0.213193944799305 1 2 1 -0.40877289939876 2 2 1 -0.331817008251402 2 2 1 -0.258591934173063 1 2 1 -0.630903225088589 2 2 1 -0.987067038242542 2 2 1 -0.388841679090983 2 2 1 -0.50346695279445 2 2 1 -0.806091271285255 2 2 1 -0.492528933755195 2 2 1 -0.333653132977917 2 2 1 -0.385279766168457 2 2 1 -0.595417467221174 2 2 1 -0.365549592893083 2 2 1 -0.433959310098169 2 2 1 -0.573461315434403 2 2 1 -0.344529930746843 2 2 1 -0.291958498883562 1 2 1 -0.70457842689903 1 2 1 -0.806575731336316 2 2 1 -0.398225114239535 1 2 1 -0.377149378210516 2 2 1 -0.4258972597472 2 2 1 -0.345195995455676 1 2 1 -0.351061263845004 1 2 1 -0.608134033587742 2 2 1 -0.59067213970621 2 2 1 -0.422680728979774 1 2 1 -0.365154436343711 2 2 1 -0.329038190670398 2 2 1 -0.50673589153492 1 2 1 -0.370741158040407 2 2 1 -0.326087804776541 2 2 1 -0.255452470867296 2 2 1 -0.390552106391914 2 2 1 -0.26482744433547 2 2 1 -0.33730434871047 2 2 1 -0.394549621635902 2 2 1 -0.309813859295583 2 2 1 -0.63449833976955 2 2 1 -1.06805758473031 2 2 1 -0.421454806981705 2 2 1 -0.281636679807289 2 2 1 -0.554306471235953 2 2 1 -0.604870136671619 2 2 1 -1.08217269575099 1 2 1 -0.379299151823082 2 2 1 -0.689740592470931 2 2 1 -0.254395650138749 2 2 1 -0.414172534052434 2 2 1 -0.520113577322035 2 2 1 -0.431996596446885 2 2 1 -1.08053726808782 2 2 1 -0.3988833679393 1 2 1 -0.353686935845557 2 2 1 -0.260219916678923 2 2 1 -0.86593681879694 2 2 1 -0.225590599418998 2 2 1 -0.349164601087427 2 2 1 -0.458075994072888 2 2 1 -0.294956639081246 2 2 1 -0.313858826908574 2 2 1 -0.31162986062042 1 2 1 -0.507204360257467 2 2 1 -0.352032600138167 1 2 1 -0.663557530182887 2 2 1 -0.199857632822527 1 2 1 -0.514502853877809 2 2 1 -0.469535266171427 2 2 1 -0.355020801164096 2 2 1 -0.683808271711007 2 2 1 -0.301603299502107 2 2 1 -0.269580060746496 2 2 1 -0.299073546316696 2 2 1 -1.10983985933577 2 2 1 -0.360307123921532 2 2 1 -0.442261294563025 2 2 1 -0.435168028548888 1 2 1 -0.270793007645254 2 2 1 -0.607563481133469 2 2 1 -0.242718448543483 2 2 1 -0.760580795652265 2 2 1 -0.431996589129307 2 2 1 -0.374282624407255 2 2 1 -0.416206432567024 2 2 1 -0.519571791432021 2 2 1 -0.196032953071603 2 2 1 -0.315596729977301 1 2 1 -0.679912355835528 2 2 1 -0.264862579134914 1 2 1 -0.218987554044978 2 2 1 -0.355620540652982 1 2 1 -0.543244665580406 1 2 1 -0.457512624736921 2 2 1 -1.3226460471116 1 2 1 -0.525011653461871 1 2 1 -0.370965283148772 2 2 1 -0.347902925695899 1 2 1 -0.398586878749805 2 2 1 -0.548961196694153 1 2 1 -0.410882145807631 2 2 1 -0.198715843872579 1 2 1 -0.225346382503031 2 2 1 -0.42578338272523 2 2 1 -0.270583704112055 2 2 1 -0.219387522590806 1 2 1 -0.342735985144739 2 2 1 -0.553884896165182 2 2 1 -0.34639458884139 2 2 1 -0.283955583130347 2 2 1 -0.223220560804016 2 2 1 -0.243212165170184 1 2 1 -0.417166480278331 2 2 1 -0.339544388395638 2 2 1 -0.546503987947626 1 2 1 -0.318410466893085 1 2 1 -0.339234853728487 1 2 1 -0.340516936127161 2 2 1 -0.518403351170541 2 2 1 -0.195409190162283 1 2 1 -0.358173332839706 2 2 1 -0.699954775815217 1 2 1 -0.348838538015419 2 2 1 -0.270023193413005 2 2 1 -0.266646456805835 2 2 1 -1.16775233367232 1 2 1 -0.432285328985634 2 2 1 -0.392918105200082 2 2 1 -0.280124804921595 2 2 1 -0.339253528717098 1 2 1 -0.612654152551717 2 2 1 -1.09354543233683 1 2 1 -0.253131913451641 1 2 1 -0.407884093764528 2 2 1 -0.729923816383264 2 2 1 -0.412724482985278 1 2 1 -0.422136530830045 1 2 1 -0.195907626308766 1 2 1 -0.250168829070227 2 2 1 -0.361614330778561 2 2 1 -0.477174944954289 1 2 1 -0.630016603117949 1 2 1 -0.465295384344847 2 2 1 -0.294143656815915 2 2 1 -0.264628027587206 2 2 1 -0.207214488692379 2 2 1 -0.275361465213498 2 2 1 -0.390244603648003 2 2 1 -0.392019933911482 2 2 1 -0.419421788233775 2 2 1 -0.398851313639373 2 2 1 -0.222684342528921 1 2 1 -0.309535304324601 2 2 1 -0.518814359944856 1 2 1 -0.852128247986192 2 2 1 -0.281410288223871 2 2 1 -1.04526845496218 1 2 1 -0.299671718323509 2 2 1 -1.2206902247079 2 2 1 -0.266245221651821 2 2 1 -0.585856917539606 2 2 1 -0.246713344194944 2 2 1 -0.243221392767397 1 2 1 -0.466101604938217 2 2 1 -0.681358629889813 2 2 1 -0.316032029193665 2 2 1 -0.37711415731265 2 2 1 -0.321756308895114 2 2 1 -0.339426132611737 1 2 1 -0.462776164378388 2 2 1 -0.242651330034962 1 2 1 -0.246338808493067 2 2 1 -0.351443718315621 2 2 1 -0.879179474160666 2 2 1 -0.275892228592311 2 2 1 -1.19843207513575 2 2 1 -0.492212434904309 2 2 1 -0.235334077460408 1 2 1 -0.658781985580715 2 2 1 -0.496722023486868 2 2 1 -0.389333111607481 1 2 1 -0.290682514514568 1 2 1 -0.296078339261109 2 2 1 -1.13573146386925 2 2 1 -0.321243222665541 1 2 1 -0.47852089377703 2 2 1 -0.706173198859061 2 2 1 -0.342262066554139 1 2 1 -0.229700613420935 2 2 1 -0.262779311846245 1 2 1 -0.292772839155619 2 2 1 -0.412064699877953 2 2 1 -0.459836048826499 2 2 1 -0.490523892738996 2 2 1 -0.279524933441799 2 2 1 -0.369615897239809 2 2 1 -0.230758234694844 2 2 1 -0.252861805612104 2 2 1 -0.491841566756603 2 2 1 -0.381802651857499 2 2 1 -0.347186500235918 2 2 1 -0.608385233814657 2 2 1 -1.06849187905581 2 2 1 -0.312240566682192 1 2 1 -0.846639420443936 1 2 1 -0.712399792744085 2 2 1 -0.25012681536537 2 2 1 -0.474130073908507 2 2 1 -0.669096572588102 2 2 1 -0.245614214665135 1 2 1 -1.09046461098125 2 2 1 -0.865104839101706 2 2 1 -0.725212442091508 2 2 1 -0.328860852235035 2 2 1 -0.526209106175903 2 2 1 -0.421498807641989 2 2 1 -0.293969113717582 2 2 1 -0.390793667483304 2 2 1 -0.74262292487233 2 2 1 -0.308167280867968 2 2 1 -0.359026772195073 1 2 1 -0.328927185365953 2 2 1 -1.21905584255683 1 2 1 -0.500288047433814 2 2 1 -0.224842633452238 2 2 1 -0.382005686607667 2 2 1 -0.300634446023351 1 2 1 -0.417876867416724 2 2 1 -0.371249215012469 2 2 1 -0.788689811346923 1 2 1 -0.662689531590809 1 2 1 -0.471005868314423 2 2 1 -0.594444358601939 2 2 1 -1.55077240941125 1 2 1 -0.927706317276666 1 2 1 -0.649826050593124 1 2 1 -0.28075741006474 2 2 1 -0.505810290842985 2 2 1 -0.49711754981939 2 2 1 -0.317978096635881 2 2 1 -0.684248959928731 2 2 1 -0.24282378340995 2 2 1 -0.481707664140375 2 2 1 -0.373537373349082 1 2 1 -0.405447957366669 2 2 1 -0.748014256841301 2 2 1 -0.711834286991734 2 2 1 -0.907962085626992 1 2 1 -0.369967811242 2 2 1 -0.295993682640687 2 2 1 -0.373284266243751 2 2 1 -0.34166217722553 2 2 1 -0.42937750854584 1 2 1 -0.470915823976768 2 2 1 -0.740950067010803 2 2 1 -0.674240772478605 2 2 1 -0.424450122438996 2 2 1 -0.26745277302258 2 2 1 -1.16123242588962 1 2 1 -0.579767054462027 2 2 1 -1.13724912004989 2 2 1 -0.310882482715339 2 2 1 -0.510927903962018 2 2 1 -0.344111958329695 2 2 1 -0.618226135802301 2 2 1 -0.351479460921543 2 2 1 -0.286917418430935 2 2 1 -0.297983520129636 2 2 1 -0.56257712830786 2 2 1 -0.716682250022604 2 2 1 -0.414592630645323 1 2 1 -0.238976677322081 2 2 1 -0.272443854818692 2 2 1 -1.16955807004935 2 2 1 -1.09939549809574 2 2 1 -0.270028442968248 2 2 1 -0.788721847905805 2 2 1 -0.4191755150052 2 2 1 -0.316400373681771 2 2 1 -0.609802807606279 2 2 1 -0.242772508104779 2 2 1 -0.793135194837104 1 2 1 -0.225673630294491 2 2 1 -0.368031893686271 2 2 1 -0.276628839207783 2 2 1 -0.431489483371041 2 2 1 -0.389776699040007 1 2 1 -0.561033032142085 2 2 1 -0.330526167790471 2 2 1 -0.420110538629517 2 2 1 -0.270319448711143 2 2 1 -0.531423698665226 1 2 1 -0.476628212169931 2 2 1 -0.232314221820144 1 2 1 -0.941428986722565 2 2 1 -0.212025112102429 2 2 1 -0.368427723696994 2 2 1 -0.70992072587502 2 2 1 -0.925525840482286 2 2 1 -0.377883521547475 1 2 1 -0.78063938574767 2 2 1 -1.74503683482489 1 2 1 -0.251612907306528 2 2 1 -0.301650511821631 2 2 1 -0.813066289415148 2 2 1 -0.578407966843961 1 2 1 -0.348273146613647 2 2 1 -0.626193739500669 2 2 1 -0.253140397879093 2 2 1 -0.259307456267337 1 2 1 -0.415832848798801 1 2 1 -0.556832384556447 1 2 1 -0.673572632335394 1 2 1 -0.799853330760023 2 2 1 -0.930742156958785 1 2 1 -0.535762437608495 1 2 1 -0.473189488245964 2 2 1 -0.524542993511125 2 2 1 -0.739545131805635 1 2 1 -0.235355785766015 2 2 1 -0.260584654694577 1 2 1 -0.629506660259397 2 2 1 -0.231557754200238 2 2 1 -0.441319321469825 2 2 1 -0.583215313492174 2 2 1 -0.716830295625359 2 2 1 -0.199491993130699 2 2 1 -0.431091799266252 2 2 1 -0.206797413339198 2 2 1 -0.891303968036612 2 2 1 -0.418088670691812 2 2 1 -0.695686622676713 2 2 1 -0.509442640220052 2 2 1 -0.534434162394219 1 2 1 -0.22957675981285 2 2 1 -0.237510411071828 2 2 1 -0.384742470864086 2 2 1 -1.13440323753284 2 2 1 -0.371282462508375 2 2 1 -0.586952463908924 2 2 1 -0.751807274502031 2 2 1 -0.411626801231686 2 2 1 -0.788795034331271 2 2 1 -0.20733393183141 1 2 1 -0.638857588359423 1 2 1 -0.261472367531119 2 2 1 -0.373277752845772 2 2 1 -1.02436843366298 2 2 1 -0.302502960194587 1 2 1 -0.661168427682398 2 2 1 -0.233395542415348 2 2 1 -0.294733463977297 1 2 1 -0.26544588339993 2 2 1 -0.279478601813994 1 2 1 -0.926988733721204 2 2 1 -0.47577107073081 2 2 1 -0.265434794900874 2 2 1 -0.285106533088262 2 2 1 -0.888150923648132 1 2 1 -0.533632864862185 2 2 1 -0.816980040369266 2 2 1 -0.453517844009076 2 2 1 -0.32540514132032 1 2 1 -0.755359450830742 2 2 1 -0.388781842189814 2 2 1 -0.411602949797336 2 2 1 -0.269196234885745 2 2 1 -0.403147780188977 2 2 1 -0.815435476047168 2 2 1 -0.384261600014836 2 2 1 -0.267710822428141 2 2 1 -0.376570816086018 2 2 1 -0.364120979475635 2 2 1 -0.217074883970687 2 2 1 -0.354010708705527 2 2 1 -2.19563497894271 2 2 1 -0.269514173923494 2 2 1 -0.589071950609085 2 2 1 -0.343631456123552 2 2 1 -0.567816994849473 2 2 1 -0.510957888944779 2 2 1 -0.608732197392097 2 2 1 -0.312065520452347 2 2 1 -0.719826312987153 2 2 1 -0.991538495850398 2 2 1 -0.590296881002275 2 2 1 -0.585207716920772 2 2 1 -0.513932742073003 1 2 1 -0.29300310455318 2 2 1 -0.395229070645386 2 2 1 -0.251238693438004 1 2 1 -0.358100976516223 2 2 1 -0.604658428518133 2 2 1 -0.28898691264998 1 2 1 -0.616268731936217 1 2 1 -0.241534354644201 2 2 1 -0.586222445209675 2 2 1 -0.530578635018236 1 2 1 -0.400144208555685 2 2 1 -0.385729124722071 2 2 1 -0.397295110458581 2 2 1 -1.1980503220687 2 2 1 -0.284244205357729 2 2 1 -0.241347374440344 2 2 1 -0.379496078440646 1 2 1 -0.313029336995714 1 2 1 -0.233874351279794 2 2 1 -0.592645188650851 2 2 1 -0.380520487588823 2 2 1 -0.273459017317749 2 2 1 -0.381468845247399 2 2 1 -0.50996411763119 2 2 1 -0.26417116674038 2 2 1 -0.445725904718431 1 2 1 -0.284100667163705 2 2 1 -0.372087396465745 2 2 1 -0.259337317980368 1 2 1 -1.90238201143875 2 2 1 -0.420439527811802 2 2 1 -0.676790701130044 2 2 1 -0.491676544128052 2 2 1 -0.41814454475348 1 2 1 -0.325129499365181 2 2 1 -0.250560441839221 2 2 1 -0.42121953335634 2 2 1 -0.720448454867087 2 2 1 -0.32286005578195 2 2 1 -0.401634215363402 2 2 1 -0.823101550994882 1 2 1 -0.22652814141488 2 2 1 -0.574012915325073 2 2 1 -0.303814059799115 2 2 1 -0.38868794499924 2 2 1 -0.468185403754697 1 2 1 -0.64039931341012 2 2 1 -0.4922813732329 2 2 1 -0.685240592163985 2 2 1 -0.460729493739463 2 2 1 -0.454264406687532 2 2 1 -0.292098065982487 2 2 1 -0.259711243117317 2 2 1 -0.476819420143709 2 2 1 -0.54147202807433 2 2 1 -0.294960782219479 2 2 1 -0.265064471822996 2 2 1 -0.268586719914729 2 2 1 -0.663075377720097 2 2 1 -0.443778226905662 2 2 1 -0.33084725716745 2 2 1 -0.45665127223439 2 2 1 -0.937763503430533 2 2 1 -1.25335333650901 2 2 1 -0.882712190513773 2 2 1 -0.292970601755812 2 2 1 -0.34861595618408 1 2 1 -0.278204853725307 2 2 1 -0.677235764875177 1 2 1 -1.08316216966813 2 2 1 -0.365942431227363 2 2 1 -0.305484095875439 2 2 1 -0.235624184994017 2 2 1 -0.398658337232737 2 2 1 -0.434318631638857 2 2 1 -0.355187848050399 1 2 1 -0.454420611999672 2 2 1 -0.198639116074042 2 2 1 -0.359080427717729 2 2 1 -1.74384487531936 2 2 1 -0.191245680862617 2 2 1 -0.783931590262253 2 2 1 -0.567473626830682 2 2 1 -0.45296456401947 2 2 1 -0.248317703257658 2 2 1 -0.699649264703012 2 2 1 -0.611361282985861 2 2 1 -0.941610741093518 1 2 1 -0.58654507029159 2 2 1 -0.371419809546963 2 2 2 -0.476259551913618 2 2 2 -0.31638619890226 2 2 2 -0.623249848667632 2 2 2 -0.241497913775783 2 2 2 -0.270061097781809 1 2 2 -0.293966968248291 1 2 2 -0.299480877137146 1 2 2 -0.576031229139046 2 2 2 -0.574375556532669 2 2 2 -0.484563476656291 2 2 2 -0.397331777673979 2 2 2 -0.501973158919077 2 2 2 -0.354716577854211 2 2 2 -0.608633700459432 2 2 2 -0.263227756156975 2 2 2 -0.812103027527017 2 2 2 -0.433996647919137 2 2 2 -0.516347639257426 2 2 2 -0.272600890706883 2 2 2 -0.286047555800837 2 2 2 -0.342521016193953 2 2 2 -0.443573882489982 1 2 2 -0.526526295854292 2 2 2 -0.372529192399058 1 2 2 -0.294310906384799 2 2 2 -0.964693955497906 1 2 2 -0.388120088040495 2 2 2 -0.292344532077584 2 2 2 -1.15960223884759 2 2 2 -0.405153223966 1 2 2 -0.354989409321607 2 2 2 -0.218636295771409 2 2 2 -0.517752319489056 2 2 2 -0.288570371961949 2 2 2 -0.557762064455683 2 2 2 -0.216932595356505 1 2 2 -0.586061017160239 2 2 2 -0.269412734371731 2 2 2 -0.622722702637302 1 2 2 -0.244923602459739 2 2 2 -0.685296064602417 2 2 2 -0.292439180652386 1 2 2 -0.26310281088841 1 2 2 -0.234853617821344 2 2 2 -0.505912235218254 2 2 2 -0.221940510664284 2 2 2 -0.204399980929288 2 2 2 -0.848247148946075 2 2 2 -0.652791317257383 2 2 2 -0.258633103875262 2 2 2 -0.66307312794457 2 2 2 -0.4115021817099 2 2 2 -0.61208665562783 2 2 2 -0.384453937267756 2 2 2 -0.661908613432912 1 2 2 -0.606234829188098 2 2 2 -1.29671884936154 2 2 2 -0.286389778612672 2 2 2 -0.44358020190157 2 2 2 -0.331248138375298 2 2 2 -0.699098366691914 2 2 2 -0.286835588654431 2 2 2 -0.27329751741887 2 2 2 -0.578226988740838 1 2 2 -0.411952465608936 2 2 2 -0.373654621521716 2 2 2 -0.51548587469447 1 2 2 -1.11290090946189 2 2 2 -0.61606619557625 1 2 2 -0.698620145254327 1 2 2 -0.213186526028619 1 2 2 -0.256304787769457 2 2 2 -0.203323922453708 2 2 2 -0.451141466550498 2 2 2 -0.432233770300206 1 2 2 -0.619352781461983 2 2 2 -0.858258004341018 2 2 2 -0.325535345759254 2 2 2 -0.284928261958504 2 2 2 -0.292632701103941 2 2 2 -0.448776353232441 2 2 2 -0.629255153926353 2 2 2 -0.263200495144972 2 2 2 -0.352588083053461 1 2 2 -0.236540541826709 2 2 2 -0.707959296574283 2 2 2 -1.28313325031642 2 2 2 -0.301751574718914 1 2 2 -0.561892223203863 1 2 2 -0.282412604594248 2 2 2 -0.304262276952806 2 2 2 -0.407357953713203 2 2 2 -0.210707057789059 2 2 2 -0.560396081439257 2 2 2 -0.339009581511832 2 2 2 -0.996419746213449 2 2 2 -1.00183750288417 2 2 2 -1.32504284872589 2 2 2 -1.0721106707744 2 2 2 -0.377861404944634 2 2 2 -0.71900023167808 1 2 2 -0.319706843290023 2 2 2 -0.416610305545232 2 2 2 -0.206654488398495 2 2 2 -0.921766469149586 1 2 2 -0.658742909074791 2 2 2 -0.263893467268196 2 2 2 -0.190201252069023 1 2 2 -0.517337895143614 2 2 2 -0.224739734085673 2 2 2 -0.218898805354731 2 2 2 -0.717954990040875 2 2 2 -0.209228192652069 1 2 2 -0.256025079388851 1 2 2 -0.326258537383908 2 2 2 -0.689759693215715 2 2 2 -0.610908694182847 1 2 2 -0.337065226697079 2 2 2 -0.1870490728342 2 2 2 -0.299662174395397 2 2 2 -0.277717334862863 2 2 2 -1.23731761519909 2 2 2 -0.43474766698581 2 2 2 -0.557318058556568 2 2 2 -0.680232429047272 1 2 2 -0.322869377985879 2 2 2 -0.438605410585611 2 2 2 -0.87241634651293 2 2 2 -0.539311881419031 2 2 2 -0.475182882058131 2 2 2 -0.271154490775633 2 2 2 -0.385232918900933 2 2 2 -0.505906394481136 1 2 2 -0.442070078279938 1 2 2 -0.571547043533657 2 2 2 -0.655792477355547 2 2 2 -0.298499878396393 2 2 2 -0.193724242862765 2 2 2 -0.26006530791065 1 2 2 -0.356708786110689 1 2 2 -0.491543605775341 2 2 2 -0.393111774151399 2 2 2 -0.64026773631928 2 2 2 -1.21142909598262 2 2 2 -0.474157252918212 2 2 2 -0.262364047166446 1 2 2 -0.258812965103118 1 2 2 -0.348162908277828 2 2 2 -0.272495641205976 2 2 2 -0.312010686501704 2 2 2 -0.38306021754942 2 2 2 -0.661593514913509 2 2 2 -0.433229374187291 1 2 2 -0.361015067322576 2 2 2 -0.240000417220632 2 2 2 -0.354358867878031 1 2 2 -0.30397159906092 2 2 2 -0.678050772142903 2 2 2 -0.716435686835505 1 2 2 -0.688441301707592 2 2 2 -0.394328672411684 2 2 2 -0.46447905310017 2 2 2 -0.884150949192416 2 2 2 -0.248771015164462 2 2 2 -0.379182107844877 2 2 2 -1.29071049141673 2 2 2 -0.277475559903592 2 2 2 -0.428681740063866 2 2 2 -0.206857299277309 2 2 2 -0.669454595982171 2 2 2 -0.324919636356833 2 2 2 -0.395123689833804 1 2 2 -0.351936531306306 2 2 2 -0.55429721844539 2 2 2 -0.250263471266211 2 2 2 -0.372186767472496 1 2 2 -1.00061699085178 2 2 2 -1.09921193229266 2 2 2 -0.650060223521224 2 2 2 -0.232120071117234 2 2 2 -0.410458958763894 1 2 2 -0.26086224185435 2 2 2 -0.602658853022438 2 2 2 -0.282185336344145 2 2 2 -1.26553444840965 2 2 2 -0.382143185875273 2 2 2 -0.248988201311841 2 2 2 -0.982723892409823 2 2 2 -0.444303934998749 2 2 2 -0.64934036686621 1 2 2 -0.476803291197273 2 2 2 -0.385320489981782 2 2 2 -0.345492479856484 2 2 2 -0.422553263338974 2 2 2 -0.450135392979508 2 2 2 -0.18351739521214 2 2 2 -0.72669927106078 1 2 2 -0.375683539988626 1 2 2 -0.217452533729198 2 2 2 -0.664573219425088 2 2 2 -0.40675170248381 2 2 2 -0.687937270624779 2 2 2 -0.746504000572466 2 2 2 -0.315055664212289 2 2 2 -0.567678006192237 2 2 2 -0.35926586984242 2 2 2 -0.260726328355797 2 2 2 -0.420592363854161 1 2 2 -0.396878488001735 2 2 2 -0.25808424693846 1 2 2 -0.690268836967113 2 2 2 -0.829884430478792 2 2 2 -0.53272810901146 1 2 2 -1.01904300744411 2 2 2 -0.404198494338956 2 2 2 -0.406416489634305 2 2 2 -0.740461569878665 2 2 2 -0.397707721968874 2 2 2 -0.316960678261711 2 2 2 -0.393796380182408 2 2 2 -0.612468746065398 2 2 2 -0.45411966684652 1 2 2 -0.785721727701694 2 2 2 -0.9899071317292 2 2 2 -0.301479726835548 2 2 2 -0.202054252934703 1 2 2 -0.404304395375019 2 2 2 -0.405109216362114 2 2 2 -0.437782938427307 2 2 2 -0.323522608388588 2 2 2 -0.498830694826068 2 2 2 -0.37932833915468 2 2 2 -0.566103976465953 2 2 2 -0.285143334962036 2 2 2 -0.331554043990072 1 2 2 -1.1056014894372 1 2 2 -0.526579897732621 1 2 2 -0.498630096551189 1 2 2 -0.302146695343523 2 2 2 -0.593188221646493 2 2 2 -0.875545940047122 1 2 2 -0.21829415545408 2 2 2 -1.27038332570518 2 2 2 -0.282958152993897 1 2 2 -0.316439713175455 2 2 2 -0.579173988441469 1 2 2 -0.444463360833938 2 2 2 -0.269250618007444 1 2 2 -0.470819079103018 2 2 2 -0.209668973816132 2 2 2 -0.458009773429269 2 2 2 -0.408503850676956 2 2 2 -0.27215890031715 2 2 2 -0.326582192001007 2 2 2 -1.06297741258528 1 2 2 -0.322713389608647 1 2 2 -0.457320344283686 2 2 2 -0.358703584766666 1 2 2 -0.266227903632889 2 2 2 -0.559988919421634 2 2 2 -0.286133456649917 2 2 2 -0.505411943083196 1 2 2 -0.525780093131127 1 2 2 -0.469126014876269 2 2 2 -1.04689087989818 2 2 2 -0.319875950338349 2 2 2 -0.250597639059042 1 2 2 -0.457613518448636 2 2 2 -0.387669523459911 1 2 2 -0.434913766029881 2 2 2 -0.48328427011083 2 2 2 -0.243610412662936 2 2 2 -0.342488023626944 2 2 2 -1.02638570164986 2 2 2 -1.63528669167027 2 2 2 -0.547318790274417 2 2 2 -0.440550940111696 1 2 2 -0.373207977309306 1 2 2 -0.4309907178462 2 2 2 -0.687697858349405 2 2 2 -0.285905993586428 1 2 2 -0.295218110682198 2 2 2 -0.650238504586291 2 2 2 -0.353180609354725 2 2 2 -0.336105599731412 2 2 2 -0.308137951395616 2 2 2 -0.347726332414955 1 2 2 -0.36943111917592 2 2 2 -0.281602615433194 2 2 2 -0.341345778831345 2 2 2 -0.467241317856716 2 2 2 -0.805429950125371 2 2 2 -0.235507874506382 1 2 2 -0.60030833998794 2 2 2 -0.676534495912984 2 2 2 -0.217928389514833 2 2 2 -1.05751503498892 2 2 2 -0.279644280745951 2 2 2 -0.658652885367294 2 2 2 -0.958304606178503 2 2 2 -0.346568405752533 1 2 2 -0.56248781541788 2 2 2 -0.209725804674779 2 2 2 -0.846930432147221 2 2 2 -0.330138876610716 1 2 2 -0.420408554089049 2 2 2 -0.620358503800179 2 2 2 -0.429248922416652 2 2 2 -0.382833067577587 2 2 2 -0.340311828954195 2 2 2 -0.389953896192246 2 2 2 -0.53663347892141 2 2 2 -0.49895548899123 2 2 2 -0.941153754879819 1 2 2 -0.507411654917284 2 2 2 -1.20160702354942 2 2 2 -0.478889238903742 1 2 2 -0.901438524126996 2 2 2 -0.918390742809495 2 2 2 -0.493350491230538 1 2 2 -0.52166085506493 2 2 2 -0.389420617429571 2 2 2 -0.270742745239298 1 2 2 -1.37088764103588 2 2 2 -0.727863288960697 2 2 2 -1.12836933231538 2 2 2 -0.386561751596785 2 2 2 -0.4309399047834 2 2 2 -0.614786852311502 2 2 2 -0.411950362176773 2 2 2 -1.78194623155386 2 2 2 -0.539225103492103 2 2 2 -1.02211318479885 2 2 2 -0.544572050183936 2 2 2 -0.584989507154119 2 2 2 -0.248709232375088 2 2 2 -0.641762262858976 2 2 2 -0.535282924615562 2 2 2 -0.329604040226998 2 2 2 -0.279268417344702 2 2 2 -0.233349948825794 2 2 2 -0.412932663530443 2 2 2 -0.415689266706035 1 2 2 -0.482166933803416 2 2 2 -0.893443627004475 2 2 2 -0.446076551783159 2 2 2 -0.843967533571949 2 2 2 -0.292399559523647 2 2 2 -0.638203851975096 2 2 2 -0.993243483944454 2 2 2 -0.268455934511667 2 2 2 -0.248177339713737 2 2 2 -0.317553308658084 2 2 2 -0.312268015328109 2 2 2 -0.340662839522388 2 2 2 -0.277346119406243 2 2 2 -0.583825795661779 1 2 2 -0.315409861504152 1 2 2 -1.07687410829711 2 2 2 -0.444325240814203 2 2 2 -0.387565615939017 2 2 2 -0.267788731901758 2 2 2 -0.311270050983203 1 2 2 -1.22802442335164 2 2 2 -0.853133012546484 2 2 2 -0.258818891608348 2 2 2 -0.545664728569855 1 2 2 -0.243040509115306 1 2 2 -0.616018319395203 2 2 2 -0.424267110499089 2 2 2 -0.663051122325687 2 2 2 -0.226946615302446 2 2 2 -0.523585890324027 2 2 2 -0.35581713292406 2 2 2 -0.266158931754381 1 2 2 -0.720311139462917 1 2 2 -0.218118535654997 2 2 2 -0.821362912627226 1 2 2 -0.264989552139514 1 2 2 -0.511682799792117 2 2 2 -0.56125463965235 1 2 2 -0.973606014834926 2 2 2 -0.518075507295568 1 2 2 -0.272191894573665 2 2 2 -0.310819538858286 2 2 2 -0.368211947094363 2 2 2 -0.465474933911655 2 2 2 -0.561177518235567 1 2 2 -0.603470353749625 1 2 2 -0.334194497275073 2 2 2 -0.641942706323965 2 2 2 -0.356630874252134 2 2 2 -0.232871443126139 1 2 2 -0.333649448973833 2 2 2 -0.609159624944822 1 2 2 -1.45610468230462 2 2 2 -0.466354725792031 2 2 2 -0.304184098280551 1 2 2 -0.477141349721271 2 2 2 -0.303009331142076 2 2 2 -0.524109669978762 2 2 2 -0.43671698415524 2 2 2 -0.42787128143151 2 2 2 -1.08981029598678 1 2 2 -0.226054298232117 2 2 2 -0.383400885338281 2 2 2 -0.583586502122542 1 2 2 -0.778194753021581 2 2 2 -0.268775804780166 1 2 2 -0.502664308069978 2 2 2 -1.36226361256616 2 2 2 -0.335324976828731 2 2 2 -0.43949839083169 2 2 2 -0.432777757912515 2 2 2 -0.267162022023955 1 2 2 -0.879352118632758 2 2 2 -0.413066977072891 2 2 2 -0.421261106481918 2 2 2 -0.513682794838585 2 2 2 -0.232814503782821 2 2 2 -0.977943934285527 1 2 2 -0.278625342042981 2 2 2 -0.978405820837462 2 2 2 -0.43674850370889 2 2 2 -0.237684082723394 2 2 2 -0.244342880645148 2 2 2 -0.313124526412448 1 2 2 -0.244341553331277 2 2 2 -0.850477515504803 2 2 2 -0.346690313973946 2 2 2 -0.427706912439349 2 2 2 -0.383097524593988 1 2 2 -0.672506676903199 2 2 2 -0.325668111506743 2 2 2 -0.501131325233736 2 2 2 -0.240168060476825 2 2 2 -0.235178051076048 2 2 2 -0.47616856065887 2 2 2 -0.262979002698665 1 2 2 -0.526351217536873 2 2 2 -0.337727201047472 2 2 2 -0.255335801167391 2 2 2 -0.382811211430241 2 2 2 -0.526518217287997 2 2 2 -0.212982195364599 2 2 2 -0.197639872379599 2 2 2 -0.401129269762392 2 2 2 -2.19377506417666 2 2 2 -0.254835580976153 2 2 2 -0.635043789020716 2 2 2 -0.603192459522677 1 2 2 -0.780119188280459 1 2 2 -0.387445357368451 2 2 2 -0.352999715984171 2 2 2 -0.215260154150075 2 2 2 -0.438343141309741 1 2 2 -0.383351790215377 2 2 2 -0.290180848854179 2 2 2 -0.30327695172642 2 2 2 -1.16042778280559 2 2 2 -0.223187293483549 2 2 2 -0.699562632635369 2 2 2 -0.469536665963427 2 2 2 -0.614760127368693 1 2 2 -0.250342025167407 2 2 2 -0.192874356525872 1 2 2 -0.246314460692576 2 2 2 -0.37206273201885 2 2 2 -0.206582351239156 2 2 2 -0.534304707606674 2 2 2 -0.343229958652054 2 2 2 -0.777141268031828 2 2 2 -0.209204354796177 2 2 2 -0.897870484588665 1 2 2 -0.484064308449244 2 2 2 -0.309753836749031 2 2 2 -0.420217764048858 2 2 2 -0.564902124329689 2 2 2 -0.393733362161397 1 2 2 -0.266506772096653 2 2 2 -0.28962567887685 2 2 2 -0.528206137761708 2 2 2 -0.301389722137156 1 2 2 -0.223917441106893 2 2 2 -0.779205007871088 1 2 2 -0.218027803393641 2 2 2 -0.288022434145067 2 2 2 -0.303093997774882 1 2 2 -0.390286981959269 2 2 2 -0.464770428369033 2 2 2 -1.73960719684067 2 2 2 -0.248026714745345 2 2 2 -0.36728769019827 2 2 2 -0.596680154881044 1 2 2 -0.383715207146668 2 2 2 -0.376823540619621 2 2 2 -0.685403427627866 1 2 2 -0.441814166283547 2 2 2 -0.493957818252071 2 2 2 -1.09158112036438 2 3 1 -0.303214556399873 2 3 1 -0.546502981537546 1 3 1 -0.217431965663716 2 3 1 -0.268170730263647 2 3 1 -0.346976002816777 2 3 1 -0.229324148845003 1 3 1 -0.816902188437 1 3 1 -0.294576387592954 2 3 1 -0.633680684608576 1 3 1 -1.72847500258562 2 3 1 -0.296884162972746 2 3 1 -0.382321634766408 1 3 1 -0.763284418821156 1 3 1 -0.434224234531732 1 3 1 -1.19271589875813 1 3 1 -0.377781463528736 2 3 1 -0.79392960381382 2 3 1 -0.282338627630539 2 3 1 -0.327667168963983 2 3 1 -0.364638540293463 2 3 1 -0.690522801097352 1 3 1 -0.278680481396497 2 3 1 -1.52418840340842 2 3 1 -0.311969951458862 1 3 1 -0.317776650235954 2 3 1 -0.423691550838739 1 3 1 -0.671006456874602 1 3 1 -0.877883789761534 2 3 1 -0.302971325325345 1 3 1 -0.671517638524883 2 3 1 -1.11390681916392 2 3 1 -0.46388915584611 2 3 1 -0.439407447713224 2 3 1 -0.304368717367806 1 3 1 -0.598571664636264 2 3 1 -0.293087082176115 1 3 1 -0.88569612171942 1 3 1 -0.250899042539296 1 3 1 -0.357059055876667 2 3 1 -1.18166158962524 2 3 1 -0.470682360024002 2 3 1 -0.894894450156942 1 3 1 -0.435735118013038 2 3 1 -0.30295075552671 2 3 1 -0.470570639524463 2 3 1 -0.23748433775057 2 3 1 -0.360451685172226 1 3 1 -0.441474734419253 2 3 1 -0.586503558927763 2 3 1 -0.489843937397201 1 3 1 -0.466272618907063 2 3 1 -0.614130590008736 1 3 1 -0.328854179555165 1 3 1 -1.00309638651768 1 3 1 -0.616986168975414 2 3 1 -0.708134443160147 1 3 1 -0.187898870895148 1 3 1 -0.54082217240692 2 3 1 -1.57411391072384 1 3 1 -0.4493227844752 1 3 1 -0.713892489238243 2 3 1 -0.342186658762456 1 3 1 -1.02117655005718 1 3 1 -0.683440987874987 2 3 1 -0.423935184637998 1 3 1 -0.973133914601076 1 3 1 -0.38306019074276 2 3 1 -0.616703039430407 2 3 1 -0.424716691275681 2 3 1 -0.571910388646059 2 3 1 -0.321910647628946 2 3 1 -0.279467364732086 1 3 1 -0.511770823160077 1 3 1 -0.448839994649654 1 3 1 -0.408993038286618 2 3 1 -0.384671509573393 1 3 1 -0.59475727306752 2 3 1 -0.248904553159929 2 3 1 -0.730389140239337 2 3 1 -0.919037835604557 1 3 1 -0.264109362057892 1 3 1 -0.989121286812907 1 3 1 -0.828041122491036 1 3 1 -0.266898502330599 2 3 1 -0.254034221622117 2 3 1 -1.11127936853007 2 3 1 -0.877022707380551 2 3 1 -0.497101983703828 2 3 1 -0.55789237566765 2 3 1 -0.759066137604798 2 3 1 -0.346006778408851 2 3 1 -0.258561043266019 2 3 1 -0.934282414397615 2 3 1 -0.328642052298081 2 3 1 -0.550227936556864 1 3 1 -0.376633857594849 1 3 1 -0.304037777411272 2 3 1 -0.431708301902904 1 3 1 -0.349019174949225 1 3 1 -0.920298316488849 2 3 1 -0.39826548226189 1 3 1 -0.725829444504715 2 3 1 -0.588644591374367 1 3 1 -0.246128207487776 1 3 1 -1.11590498746582 2 3 1 -0.294876994112035 2 3 1 -0.641111356601665 1 3 1 -0.508053986081123 1 3 1 -0.771118458382302 1 3 1 -0.285699501579415 1 3 1 -1.02462070103652 1 3 1 -0.272825662912259 2 3 1 -0.484400350353985 2 3 1 -0.484569939314766 2 3 1 -0.531386883569837 1 3 1 -0.410632364466612 1 3 1 -0.529817458600413 2 3 1 -1.13935537418794 2 3 1 -0.2756098273084 1 3 1 -0.578175605563475 1 3 1 -1.05572643035039 2 3 1 -0.404530205629778 2 3 1 -0.68459004427736 2 3 1 -0.512697405878432 2 3 1 -0.429332115307925 2 3 1 -0.295864219583054 2 3 1 -0.619738889875145 2 3 1 -0.574787509981818 2 3 1 -0.25342580412108 2 3 1 -0.510997220464868 1 3 1 -0.314205597309042 2 3 1 -0.445509182794708 2 3 1 -1.21240465641764 2 3 1 -0.394946660382341 2 3 1 -0.452801112877752 2 3 1 -0.403350637136158 2 3 1 -0.576985676386101 2 3 1 -0.389264776452976 1 3 1 -0.823295103130808 1 3 1 -0.463468613723993 1 3 1 -0.245377944795518 2 3 1 -0.996044085392399 2 3 1 -0.690423137827953 2 3 1 -0.663845653189127 2 3 1 -0.459849111784745 2 3 1 -1.17832890976462 2 3 1 -1.71465565607573 2 3 1 -0.559470318252231 2 3 1 -1.62127201031263 2 3 1 -1.75932882254012 1 3 1 -0.810068975707212 2 3 1 -0.32567798881547 2 3 1 -0.405265927230293 2 3 1 -0.312319795786779 2 3 1 -0.664164798713009 2 3 1 -0.972174600565453 1 3 1 -0.350736426389176 1 3 1 -0.464183487885217 1 3 1 -0.979942810598283 2 3 1 -0.312621099364353 1 3 1 -0.321946657262611 2 3 1 -0.662512744175165 1 3 1 -0.265782966766695 2 3 1 -0.554547549403016 1 3 1 -0.670230788357581 2 3 1 -0.545148391569713 2 3 1 -0.3944126912798 2 3 1 -0.350245544303979 2 3 1 -0.447316724864116 2 3 1 -0.494291506086329 2 3 1 -0.294165307093089 2 3 1 -0.600045380632821 2 3 1 -0.653173135952646 2 3 1 -0.802716451437717 1 3 1 -0.425486199464103 1 3 1 -0.567162388988331 2 3 1 -0.216466918462054 2 3 1 -0.274646226936591 1 3 1 -0.27952911656832 2 3 1 -0.310673451915856 2 3 1 -1.85814147833547 2 3 1 -0.410157918175516 2 3 1 -0.422501872163458 1 3 1 -0.47177034302856 1 3 1 -0.371140269829411 2 3 1 -0.595311459484279 2 3 1 -1.50368663175704 1 3 1 -1.01238877285822 1 3 1 -0.330032112990162 2 3 1 -0.803689764137746 1 3 1 -1.11335733385196 2 3 1 -1.77758737390525 2 3 1 -0.960114443062577 1 3 1 -0.459716792002552 2 3 1 -1.48363248910813 2 3 1 -0.52360780431933 2 3 1 -0.377016971697135 2 3 1 -1.48796279286523 2 3 1 -1.5584173576682 2 3 1 -0.477222600875173 2 3 1 -0.806192776105325 2 3 1 -1.22714760697165 2 3 1 -0.285202509776337 2 3 1 -0.353998885887131 1 3 1 -0.614217926473409 2 3 1 -0.677981366002188 1 3 1 -1.39319663705638 2 3 1 -1.34011509916811 2 3 1 -0.736100370936723 2 3 1 -0.600109865280918 2 3 1 -1.28500130472893 1 3 1 -1.36640296134039 2 3 1 -0.801718417343498 2 3 1 -0.529860706811969 2 3 1 -0.389318546329582 2 3 1 -0.535588867826002 2 3 1 -0.920404797918709 2 3 1 -0.560133933106455 2 3 1 -0.239473881876335 1 3 1 -0.473860966075698 1 3 1 -0.230702658461256 1 3 1 -1.73611605008423 2 3 1 -0.497233574120993 2 3 1 -1.52778704234278 2 3 1 -1.02496145425289 2 3 1 -0.715732898893069 2 3 1 -0.499095116872872 2 3 1 -0.638470641059599 2 3 1 -0.352348027353716 1 3 1 -0.309076440163353 2 3 1 -0.351142175060277 2 3 1 -0.342163837984379 2 3 1 -0.796243815020877 1 3 1 -0.235901518677146 2 3 1 -0.848976396909524 2 3 1 -0.986793834419597 2 3 1 -1.52074724886378 2 3 1 -0.364767724102028 1 3 1 -1.12083572206547 2 3 1 -0.230356527206167 1 3 1 -0.472472696293136 2 3 1 -0.83628255017543 1 3 1 -2.45314242351987 2 3 1 -0.272291207710981 2 3 1 -0.944208776627134 2 3 1 -0.331972697221215 2 3 1 -0.240983354619397 2 3 1 -0.26997177771289 2 3 1 -0.920385413917779 1 3 1 -0.510927528682238 2 3 1 -0.287553521793071 2 3 1 -2.17875010817382 2 3 1 -0.292020199007728 2 3 1 -0.358620202445595 2 3 1 -1.36196670709987 2 3 1 -0.532103665995839 2 3 1 -1.78987248486592 1 3 1 -0.723800730925627 2 3 1 -0.389487868366274 2 3 1 -0.523651645920756 2 3 1 -0.564094358706312 2 3 1 -0.438071614397111 1 3 1 -0.396815552735571 1 3 1 -0.835877625163203 2 3 1 -0.960913558586309 2 3 1 -0.38021035653061 2 3 1 -0.637286675900738 1 3 1 -0.290784805544286 2 3 1 -0.568957565244384 2 3 1 -0.236438662927156 2 3 1 -1.10987563109661 2 3 1 -0.394257199940267 2 3 1 -0.941426455590548 1 3 1 -1.16694900264559 2 3 1 -0.449530936018223 1 3 1 -1.30291521810678 2 3 1 -0.663336993076141 2 3 1 -0.412620028111287 1 3 1 -0.213769081676035 1 3 1 -1.14008562289037 2 3 1 -0.642154856872125 2 3 1 -1.12186732245763 2 3 1 -0.530059942824884 2 3 1 -0.743562690339846 2 3 1 -0.467733874019439 1 3 1 -0.347897157855929 1 3 1 -0.271346908743046 2 3 1 -1.64808023049025 1 3 1 -0.873213094661973 1 3 1 -0.36045322327288 2 3 1 -0.415893829939983 2 3 1 -0.263744233102411 2 3 1 -0.540852200357253 2 3 1 -1.27190438964105 2 3 1 -0.692221005703411 2 3 1 -0.885861728476599 2 3 1 -0.465274050871376 1 3 1 -0.446986860614239 2 3 1 -1.14339397523192 1 3 1 -0.698229667938408 1 3 1 -0.545774956181041 2 3 1 -0.252737225149388 1 3 1 -0.956242959384857 1 3 1 -0.559955447458839 1 3 1 -0.321668964016761 1 3 1 -0.495953400106333 1 3 1 -0.435695907294935 2 3 1 -1.29125035927742 1 3 1 -1.14445342406167 2 3 1 -0.303314792589389 1 3 1 -0.550870572581426 1 3 1 -0.589452519460692 1 3 1 -0.793764837082831 2 3 1 -1.15478617130203 2 3 1 -0.423154299941937 2 3 1 -0.441625445567769 1 3 1 -1.04879230934071 1 3 1 -0.328428851869649 2 3 1 -0.679231844674899 2 3 1 -1.15492451938846 2 3 1 -1.08528509664462 1 3 1 -0.483072408512607 2 3 1 -1.64223021381801 2 3 1 -0.51690071016677 1 3 1 -0.912126868764157 2 3 1 -0.628163734423868 1 3 1 -0.698176751617721 1 3 1 -0.333876511447483 2 3 1 -0.479648257326482 2 3 1 -0.387197092688304 2 3 1 -0.692552401206789 2 3 1 -0.842058155042385 2 3 1 -0.71750243288607 1 3 1 -0.305258214294853 1 3 1 -0.443541182282758 2 3 1 -1.57795342301233 2 3 1 -0.849830671160219 2 3 1 -2.64336117374313 2 3 1 -0.602149680031763 1 3 1 -0.494502380433554 1 3 1 -0.301592730343628 2 3 1 -0.838419470710967 1 3 1 -0.667334464560839 2 3 1 -1.50007402393947 2 3 1 -0.389803326920504 2 3 1 -0.882528306696846 2 3 1 -0.372982160615418 2 3 1 -0.349280124360808 1 3 1 -0.512012574758371 2 3 1 -1.17220110413599 2 3 1 -0.414969949251035 2 3 1 -1.63347309320552 1 3 1 -0.303593516632502 2 3 1 -0.4186484600358 2 3 1 -0.339028778633365 2 3 1 -0.717224138012012 2 3 1 -0.779291021701807 2 3 1 -0.561060495047965 2 3 1 -1.80155372469456 2 3 1 -0.350666104484292 2 3 1 -1.75384028882697 2 3 1 -0.34614655108637 2 3 1 -0.857888702654049 2 3 1 -0.538538273376274 1 3 1 -0.808165431176924 2 3 1 -1.12566956065676 2 3 1 -0.401747844392863 1 3 1 -0.420638560385403 1 3 1 -0.689567547922525 1 3 1 -0.687101320313498 2 3 1 -1.11450021231709 1 3 1 -1.05673194424108 2 3 1 -0.378493955443519 1 3 1 -0.374806303246874 2 3 1 -0.605170645685489 1 3 1 -0.568600361954804 1 3 1 -0.279564048875058 2 3 1 -0.737344841204778 1 3 1 -0.383626489427317 1 3 1 -0.236592626403799 2 3 1 -0.606395313320835 2 3 1 -0.258446694712414 1 3 1 -0.667654851854366 1 3 1 -0.709849063285861 2 3 1 -1.06652304155128 2 3 1 -0.375309922256882 1 3 1 -0.305631875482354 2 3 1 -1.00431113766878 2 3 1 -0.731812037602777 1 3 1 -0.414232775562026 2 3 1 -0.826500384443797 2 3 1 -0.719116830057354 1 3 1 -0.61855729451436 2 3 1 -0.399444901441129 1 3 1 -0.2295675381027 2 3 1 -0.361734141102366 2 3 1 -0.928229131678883 1 3 1 -0.767105903673052 2 3 1 -0.312954135574329 2 3 1 -0.316125600053679 2 3 1 -0.83356862967393 2 3 1 -0.807805288376209 2 3 1 -0.529747082048339 2 3 1 -0.579438577427002 2 3 1 -0.748413890954428 2 3 1 -0.282714299825272 2 3 1 -1.32349690412234 2 3 1 -0.265264643396819 2 3 1 -1.05199545660803 1 3 1 -0.354844761549886 2 3 1 -0.673382389127187 2 3 1 -0.353927127140679 2 3 1 -0.742708866815784 2 3 1 -0.32440131369228 1 3 1 -1.15424939546308 2 3 1 -0.341988033892061 2 3 1 -0.385901020315423 2 3 1 -0.286688838450355 2 3 1 -0.862986046754551 1 3 1 -0.362564960443358 2 3 1 -0.618539574023911 1 3 1 -0.618839114124994 1 3 1 -0.351742202769926 2 3 1 -1.04713435904685 2 3 1 -1.13219276272773 1 3 1 -0.431897141646835 2 3 1 -0.903368625643694 2 3 1 -0.845773237970464 1 3 1 -0.448267685056844 1 3 1 -0.227965869389189 2 3 1 -0.727189656817706 1 3 1 -0.920965873390772 2 3 1 -2.86369573335364 2 3 1 -1.79113426784109 2 3 1 -0.434371557096156 1 3 1 -0.297049698054049 2 3 1 -0.908700456343662 2 3 1 -0.840014957530216 1 3 1 -0.391451573590056 2 3 1 -0.964762751519269 2 3 1 -1.97499804009749 1 3 1 -1.58759823353077 2 3 1 -2.10229181480408 1 3 1 -0.573075746749156 2 3 1 -0.87229795094926 2 3 1 -0.764360676764928 1 3 1 -0.236433389533537 2 3 1 -0.500319361165157 2 3 1 -0.436148207342909 1 3 1 -1.17237545076077 2 3 1 -0.305589800257361 2 3 1 -0.269400199640921 2 3 1 -1.35212659081556 1 3 1 -0.377929769476066 2 3 1 -0.899332425662463 1 3 1 -1.00394626493931 2 3 1 -1.10094408622287 2 3 1 -1.18830151405781 2 3 1 -0.284872387153195 2 3 1 -0.375024301973256 1 3 1 -0.404469325185188 1 3 1 -0.727775813795801 2 3 1 -0.252419279794447 2 3 1 -0.375032470671666 2 3 1 -0.309265085564879 2 3 1 -0.462609251209814 2 3 1 -0.320190368690629 1 3 1 -0.477686210715907 1 3 1 -0.275346713714152 2 3 1 -0.438717701647585 2 3 1 -0.810110401646601 2 3 1 -1.07883036639961 2 3 1 -0.809608091311169 2 3 1 -0.418153700722572 1 3 1 -0.764475113911811 1 3 1 -0.412993008145338 2 3 1 -1.335962806722 1 3 1 -0.875235294298827 1 3 1 -0.326172123137794 2 3 1 -0.231731510095046 2 3 1 -0.914448514006612 1 3 1 -0.550916483189837 2 3 1 -0.880466208259979 2 3 1 -0.601176005143088 2 3 1 -1.43879123063957 2 3 1 -0.621745991817644 2 3 1 -0.3528215159095 2 3 1 -0.30994437648555 1 3 1 -0.90362627319135 2 3 1 -0.903886767560117 2 3 1 -0.835640862308006 2 3 1 -1.20726153384552 2 3 1 -0.653989199174602 2 3 1 -0.915035948130758 2 3 1 -0.726052417728461 2 3 1 -1.01911267691402 2 3 1 -0.352826644011026 2 3 1 -0.440580677663477 1 3 1 -0.999490854549375 1 3 1 -0.514934236463869 1 3 2 -0.270420118537311 2 3 2 -0.646471943779092 2 3 2 -0.338830766035059 2 3 2 -0.491671962582901 2 3 2 -0.629886758393846 2 3 2 -0.51589107754444 2 3 2 -0.353946635128139 2 3 2 -0.331203176196343 1 3 2 -1.99524093298412 2 3 2 -1.02440243540096 2 3 2 -0.282898155808958 2 3 2 -0.599827056733371 2 3 2 -0.238817716879006 2 3 2 -0.206191544000187 1 3 2 -0.855183187835193 2 3 2 -0.722927152841454 2 3 2 -0.780157089830913 2 3 2 -0.509420971748398 2 3 2 -0.96103805001364 2 3 2 -0.493670434412268 1 3 2 -0.214424723176226 2 3 2 -0.392688836409781 1 3 2 -0.658620383209045 2 3 2 -0.216107419356536 2 3 2 -0.57723401544534 2 3 2 -0.527348367007325 1 3 2 -0.473776142345069 2 3 2 -0.424350872006699 2 3 2 -2.16246776879602 1 3 2 -0.579491048291868 2 3 2 -0.317300006903978 1 3 2 -1.25002685289334 1 3 2 -0.322077006208459 2 3 2 -0.65598919200563 1 3 2 -1.4631363964763 2 3 2 -1.68342497778485 1 3 2 -0.950063435678861 1 3 2 -0.205802941406673 2 3 2 -0.442509433023036 1 3 2 -0.818174077396608 2 3 2 -0.286175835912647 2 3 2 -0.33842480954584 2 3 2 -1.38048451044948 2 3 2 -0.39988067006981 1 3 2 -0.253643662104766 2 3 2 -0.560976394476593 2 3 2 -0.517941831953703 1 3 2 -0.487317274258782 1 3 2 -0.722089923618615 2 3 2 -0.468435888325438 2 3 2 -0.673049975899187 2 3 2 -0.404615085266369 2 3 2 -0.337034159076431 2 3 2 -0.435942135312604 2 3 2 -0.369014730704062 2 3 2 -0.567411102529725 1 3 2 -0.683010310238025 2 3 2 -0.290243271494673 2 3 2 -0.316097101922202 1 3 2 -1.26062843312539 2 3 2 -0.511300941444236 2 3 2 -2.2216549447733 2 3 2 -0.6322594359434 2 3 2 -1.05478582553533 2 3 2 -0.435637851164182 1 3 2 -0.468027792640505 2 3 2 -0.420219455300886 1 3 2 -0.273929740512875 1 3 2 -0.793839005062366 2 3 2 -0.700039266476368 2 3 2 -0.698393675491842 2 3 2 -0.320657243989843 1 3 2 -1.16100839379935 1 3 2 -0.64693140923479 1 3 2 -0.512516768633988 2 3 2 -0.742023676531162 2 3 2 -0.972058194099057 2 3 2 -0.398653703474479 1 3 2 -0.410331563672334 2 3 2 -0.222283100080092 1 3 2 -0.469355586816016 1 3 2 -1.19333537174541 2 3 2 -0.307250192296768 2 3 2 -0.818587555385279 1 3 2 -0.236745366451503 2 3 2 -0.881972502593121 2 3 2 -0.277399097478032 2 3 2 -0.522816700136251 1 3 2 -1.03885756789198 2 3 2 -0.423490173116203 2 3 2 -0.603714429771403 1 3 2 -0.343602016522983 2 3 2 -1.06315814327229 2 3 2 -1.64394374580309 2 3 2 -0.258950271624856 2 3 2 -0.839767784808717 1 3 2 -0.422102182085427 2 3 2 -1.19755245402792 2 3 2 -0.36311410858205 1 3 2 -1.32548841200299 2 3 2 -1.25935619927138 2 3 2 -1.94209928030842 2 3 2 -0.373060834025449 2 3 2 -0.3101485804146 2 3 2 -0.533892648348211 2 3 2 -0.744784639587278 2 3 2 -1.18381744027858 2 3 2 -0.240744324843731 2 3 2 -0.946050579944945 1 3 2 -0.627753953881302 1 3 2 -0.370445161933509 1 3 2 -0.300650302064735 1 3 2 -0.373459020239413 2 3 2 -0.430948400866446 1 3 2 -1.49574317491573 2 3 2 -1.3101827626935 2 3 2 -1.67262325791643 1 3 2 -0.241993699720061 2 3 2 -0.328265869000834 2 3 2 -0.384015136641366 1 3 2 -0.217469612226687 2 3 2 -0.353910918208547 1 3 2 -1.26306667028009 2 3 2 -0.283701100770668 2 3 2 -0.442696536960043 1 3 2 -1.23508427666644 2 3 2 -0.211641858949563 2 3 2 -1.31762978216566 2 3 2 -0.427189324868075 1 3 2 -0.427280548495511 2 3 2 -1.48538078369227 1 3 2 -0.628708315466251 2 3 2 -1.78361132590903 2 3 2 -0.239589904781277 1 3 2 -0.562274806702486 2 3 2 -0.721667014209347 1 3 2 -0.659149048175133 2 3 2 -0.829239328543952 2 3 2 -1.02272830279552 2 3 2 -0.671890699523076 2 3 2 -1.72636732645653 1 3 2 -0.615737367953486 2 3 2 -1.07678479171103 1 3 2 -0.538623051607739 2 3 2 -1.39867639023568 1 3 2 -0.218925904964068 2 3 2 -0.240724605293016 2 3 2 -0.452439100915242 2 3 2 -0.694089462161358 2 3 2 -1.84709801261543 2 3 2 -0.564582100435542 2 3 2 -0.725448687770764 2 3 2 -0.30708178683197 1 3 2 -0.47535866323473 2 3 2 -0.829935945880499 2 3 2 -0.90098748577001 2 3 2 -0.321123642659435 2 3 2 -0.295296832786178 2 3 2 -0.312203918236697 2 3 2 -0.967729509776647 1 3 2 -1.5438301792962 2 3 2 -0.493390293618284 2 3 2 -0.799464309749491 2 3 2 -0.492248525373071 1 3 2 -0.876464392051748 1 3 2 -0.914622363311664 2 3 2 -0.939704807763569 1 3 2 -0.901114296634733 1 3 2 -2.83967944372257 1 3 2 -0.633623902249606 1 3 2 -0.590830718948575 2 3 2 -0.644050309060636 2 3 2 -0.618329617213195 1 3 2 -0.406935681035188 2 3 2 -0.680935599818192 2 3 2 -0.689990604040213 2 3 2 -0.234441659901205 1 3 2 -0.628614985132006 2 3 2 -0.603104314434233 1 3 2 -0.458166460126185 2 3 2 -0.5410221389939 2 3 2 -0.806727630952272 1 3 2 -0.74995864564573 2 3 2 -0.925796642818387 2 3 2 -1.48881282804597 2 3 2 -1.32567553513673 2 3 2 -0.720300933060513 1 3 2 -0.845676771367771 2 3 2 -1.03943195815275 2 3 2 -0.269814661026592 2 3 2 -0.303313872334609 1 3 2 -0.668337346160446 1 3 2 -0.802759823174443 1 3 2 -1.0891459476003 1 3 2 -0.524162399076158 2 3 2 -0.843176928462498 2 3 2 -0.378654464972701 2 3 2 -0.37200187900001 2 3 2 -1.24108370520966 2 3 2 -0.407967861247184 1 3 2 -0.385604160482279 2 3 2 -1.37807312575617 2 3 2 -1.6236739129127 2 3 2 -0.299498250287133 2 3 2 -1.50283781972101 2 3 2 -0.425002443191797 2 3 2 -0.793010798627147 1 3 2 -0.443607069543917 2 3 2 -0.553450577754003 2 3 2 -0.856593591339924 2 3 2 -0.525593955394394 2 3 2 -0.896348840912902 2 3 2 -2.32094781645657 2 3 2 -0.343409204205168 2 3 2 -2.55188788674935 1 3 2 -0.59347479355615 2 3 2 -1.14133837346965 1 3 2 -0.769316859974066 1 3 2 -0.553345822537459 1 3 2 -0.280403390069466 1 3 2 -0.351752315403098 2 3 2 -0.464937188483399 1 3 2 -0.498643366432877 2 3 2 -1.20768606813256 2 3 2 -0.810552918965932 2 3 2 -0.814572293196432 2 3 2 -1.12466835158388 1 3 2 -0.605892652281983 2 3 2 -0.718062408112775 1 3 2 -1.43756814834945 2 3 2 -0.434523586879683 2 3 2 -1.1331064813251 2 3 2 -0.347724946096093 2 3 2 -0.493607135881693 1 3 2 -0.67651193182361 1 3 2 -0.552022281319177 1 3 2 -0.321142527517636 2 3 2 -0.597883853359763 1 3 2 -0.895001662908153 1 3 2 -0.48578933838309 1 3 2 -1.68093599717435 1 3 2 -0.70886900345093 2 3 2 -0.366715507465807 2 3 2 -0.229193753495571 1 3 2 -0.456535243655345 2 3 2 -0.96352225520781 1 3 2 -0.744795713668557 2 3 2 -0.469577483093902 2 3 2 -0.32463756963168 1 3 2 -0.718346029022071 2 3 2 -1.29491659083823 2 3 2 -0.461155144420931 2 3 2 -0.5798492161351 2 3 2 -1.05582449803871 2 3 2 -0.28481585650096 2 3 2 -0.410730259856026 2 3 2 -1.60060704532405 2 3 2 -0.534513028770362 2 3 2 -0.290600969494565 1 3 2 -0.508134824209082 2 3 2 -0.733677137815329 1 3 2 -0.524546051284814 2 3 2 -0.626615353700544 2 3 2 -0.207338061930642 2 3 2 -2.49191138211878 1 3 2 -0.446516299324413 2 3 2 -0.568970465239259 2 3 2 -1.30104078909728 2 3 2 -0.573721490299906 2 3 2 -0.395039645429215 2 3 2 -0.868269050301906 2 3 2 -1.39110795542721 2 3 2 -0.732569267335244 2 3 2 -0.346330122949029 2 3 2 -0.28247517183272 2 3 2 -0.339777576105551 2 3 2 -0.709070836414342 2 3 2 -0.741775977804924 2 3 2 -1.3105501266272 2 3 2 -0.650809742033691 1 3 2 -1.21685252483908 2 3 2 -1.31226952331015 2 3 2 -0.671641212759192 2 3 2 -2.60476050728261 1 3 2 -0.423290539217436 1 3 2 -0.747391695995648 2 3 2 -0.415214528560093 2 3 2 -0.732547499620541 2 3 2 -1.11203310275713 1 3 2 -0.310793178630083 1 3 2 -0.532873467984992 2 3 2 -0.24543893048753 2 3 2 -0.262637774460857 2 3 2 -0.638852263528672 2 3 2 -0.268251093022516 2 3 2 -0.754730587787048 1 3 2 -0.304771345055942 2 3 2 -0.600949799200535 2 3 2 -0.454921964270315 2 3 2 -0.291060243483869 2 3 2 -0.850896084981839 2 3 2 -1.02404745430124 2 3 2 -0.740373725034996 2 3 2 -0.697249433946795 2 3 2 -3.22861057623448 2 3 2 -0.614917615221698 2 3 2 -0.417940802999645 2 3 2 -0.428130364139945 1 3 2 -0.330671065628431 2 3 2 -0.62670607061658 2 3 2 -1.26038076404455 2 3 2 -0.322284823454811 2 3 2 -0.429191549101784 2 3 2 -0.455247274613782 2 3 2 -0.663368332878807 2 3 2 -0.305769717251401 2 3 2 -1.50140899013577 1 3 2 -1.45389192339163 2 3 2 -2.98105925544205 2 3 2 -1.04255718339312 2 3 2 -0.942508435934038 2 3 2 -2.787892838843 2 3 2 -0.840734403641314 2 3 2 -0.489925811963693 2 3 2 -1.8552326561657 2 3 2 -0.38632322022465 1 3 2 -0.256936541763573 2 3 2 -0.766126754945232 1 3 2 -0.451147102611256 2 3 2 -0.268349419782926 2 3 2 -0.284282929689453 2 3 2 -0.71369157188727 1 3 2 -0.984328464038398 1 3 2 -0.391254858951931 2 3 2 -0.473609040280498 1 3 2 -0.392371474332231 1 3 2 -0.512961553349468 2 3 2 -0.49196894137911 2 3 2 -1.98481178504207 2 3 2 -0.335597786997023 1 3 2 -0.951871051474144 2 3 2 -0.43773254271624 2 3 2 -0.759326775665626 1 3 2 -0.964163992380983 2 3 2 -0.243214557688043 2 3 2 -1.06020924390952 2 3 2 -0.624129170951697 2 3 2 -0.631213229469376 2 3 2 -1.22749512645753 1 3 2 -1.4544220968578 2 3 2 -0.704504240730269 2 3 2 -0.358484343002385 2 3 2 -0.550361017867011 2 3 2 -0.300375078256161 2 3 2 -0.739678180371973 1 3 2 -0.646734220557972 1 3 2 -0.777682254344784 2 3 2 -0.87982617759661 2 3 2 -0.418244912065538 2 3 2 -0.443192766363974 2 3 2 -0.61563753996371 1 3 2 -1.56614219537768 2 3 2 -2.65065478085341 2 3 2 -0.305863823661165 2 3 2 -0.954934661408583 2 3 2 -0.449191627811582 1 3 2 -0.793807602132907 2 3 2 -0.519871565006984 1 3 2 -0.293207709999379 1 3 2 -0.634867211065706 1 3 2 -0.469994277604704 2 3 2 -0.343616111556125 2 3 2 -0.23004178016569 1 3 2 -0.335931900338173 2 3 2 -0.743164799438406 2 3 2 -0.582587466556771 2 3 2 -0.412610130745763 2 3 2 -0.303143772129072 2 3 2 -1.28143560920008 2 3 2 -1.17554195841916 1 3 2 -0.656531128634536 2 3 2 -0.29995775158261 1 3 2 -0.89568315126836 2 3 2 -1.30618389742279 2 3 2 -3.0496460415615 2 3 2 -0.743633550842162 2 3 2 -0.976245680087169 2 3 2 -0.376124867059714 2 3 2 -0.537516104244315 2 3 2 -0.212538006220838 2 3 2 -0.571699948650591 2 3 2 -0.658537447407892 2 3 2 -0.409180121003016 2 3 2 -0.38453187413556 2 3 2 -1.39971354422791 2 3 2 -1.48526501316284 2 3 2 -0.765450963277084 2 3 2 -0.45683796523625 2 3 2 -0.49455927446116 2 3 2 -0.565813339958499 2 3 2 -0.51579075751888 2 3 2 -0.579482217533706 1 3 2 -0.464431057746426 2 3 2 -0.33385297359306 1 3 2 -0.237785587972524 2 3 2 -0.788856968485086 2 3 2 -0.247738584125967 2 3 2 -1.29521163578326 1 3 2 -0.51405310891982 2 3 2 -0.248969113363235 1 3 2 -0.8258802772869 2 3 2 -0.858915328254726 2 3 2 -1.04412031745921 1 3 2 -1.67795472999734 1 3 2 -0.560822550684719 2 3 2 -0.639828932713558 2 3 2 -0.606061526335406 1 3 2 -0.446197468121209 2 3 2 -0.889197611107733 2 3 2 -0.229591061070164 1 3 2 -0.598751053548388 1 3 2 -0.42084079282726 2 3 2 -0.280912227540918 2 3 2 -0.353535095083615 1 3 2 -0.533533031895995 2 3 2 -0.40991726170081 2 3 2 -0.386705907004533 2 3 2 -0.663325409649471 2 3 2 -1.05367422973975 1 3 2 -1.41642911541684 2 3 2 -0.728074140704459 2 3 2 -0.448237114304907 2 3 2 -0.471483947133633 2 3 2 -1.26271559797945 1 3 2 -0.241118847003316 2 3 2 -0.551833217379812 1 3 2 -0.508606043806118 2 3 2 -0.364460896466132 2 3 2 -0.412399264984449 2 3 2 -0.305920977565598 2 3 2 -0.705398182563824 2 3 2 -0.204036911418345 1 3 2 -1.04820599938717 1 3 2 -0.643323321586422 2 3 2 -3.66898798188367 2 3 2 -1.08829013517781 1 3 2 -0.361592831884118 1 3 2 -0.979363639648445 2 3 2 -0.521111784853412 1 3 2 -0.343395604193243 1 3 2 -0.587048881881688 2 3 2 -1.41201467474607 2 3 2 -0.443024780470065 2 3 2 -2.95728532098558 2 3 2 -0.319216793259789 2 3 2 -0.221315652964487 2 3 2 -0.77093692467471 1 3 2 -0.73558455041612 1 3 2 -0.259119262605434 2 3 2 -0.48994285788748 1 3 2 -0.571960539121533 2 3 2 -0.30717971899547 2 3 2 -1.97586628351188 2 3 2 -0.375432444639877 2 3 2 -0.811045564934994 2 3 2 -0.911400482590164 2 3 2 -0.421553307064521 1 3 2 -0.836499690800059 1 3 2 -0.709656783694355 1 3 2 -0.738884945936119 1 3 2 -0.369565008846999 1 3 2 -0.379597876167422 1 3 2 -0.673815169801798 2 3 2 -1.61852146139474 1 3 2 -0.341581465482509 1 3 2 -0.263351833487348 2 3 2 -0.34610526127482 2 3 2 -0.840263767605542 2 3 2 -0.860111461225023 2 3 2 -0.500246193912611 2 3 2 -0.611622543641809 2 3 2 -0.304839820514316 1 3 2 -0.478872857619653 2 3 2 -0.966509448052867 2 3 2 -0.369970546426949 2 3 2 -0.424912789436735 2 3 2 -0.639361694609756 2 3 2 -0.638867619514155 2 3 2 -0.439145854141595 2 3 2 -1.2433130957394 2 3 2 -0.546013305487959 2 3 2 -0.621366845453756 1 3 2 -0.371921153976491 1 3 2 -1.72030292611725 2 3 2 -0.265728845949588 2 3 2 -0.250485215272467 2 3 2 -0.260055352791922 2 3 2 -1.04055348391978 2 3 2 -0.639817829535305 2 3 2 -0.436687399202203 2 4 1 -0.390002899730434 2 4 1 -0.689914798071677 2 4 1 -0.89709020993931 2 4 1 -0.427183990353492 1 4 1 -0.340777120972685 2 4 1 -0.251597448645233 1 4 1 -0.270029795224852 1 4 1 -0.36506923015438 2 4 1 -0.925483297260795 1 4 1 -0.491542011496093 1 4 1 -0.584011448243567 2 4 1 -1.27419822711881 2 4 1 -0.33102420792392 2 4 1 -0.351900683919713 1 4 1 -0.393343181453058 2 4 1 -1.11700088666809 2 4 1 -0.383684827552196 2 4 1 -0.319389244865323 2 4 1 -0.207961019321362 1 4 1 -0.247906583019937 2 4 1 -0.664874815584718 2 4 1 -0.633174401608791 2 4 1 -0.228811949915 2 4 1 -0.313807509549483 1 4 1 -0.258061143771553 1 4 1 -1.40526930242479 2 4 1 -0.617601017184864 2 4 1 -0.448539769566249 2 4 1 -0.301286656201828 2 4 1 -0.35035514703207 2 4 1 -1.08961020047286 1 4 1 -0.29793786190371 1 4 1 -0.384511098165857 2 4 1 -0.761604738984846 1 4 1 -0.341173784779225 2 4 1 -0.376431442957684 2 4 1 -0.484742402575381 2 4 1 -0.257151781895977 1 4 1 -0.597747950821735 2 4 1 -0.510086318540574 2 4 1 -0.414694239051273 2 4 1 -0.583680054953304 1 4 1 -0.260760440689632 2 4 1 -0.798233246796322 1 4 1 -0.299861199950565 1 4 1 -0.276645559734816 1 4 1 -0.754832912251529 2 4 1 -0.409850111348969 2 4 1 -0.357937922566155 1 4 1 -1.74732047917189 2 4 1 -0.234683752237039 1 4 1 -0.379935287004687 2 4 1 -0.400355036755306 2 4 1 -0.199836116985197 1 4 1 -0.29654658741011 1 4 1 -0.40070736644743 2 4 1 -0.810244937262253 1 4 1 -0.232476182488422 1 4 1 -0.441531921393063 1 4 1 -0.266981994270395 1 4 1 -0.594986042709096 2 4 1 -0.226949086978422 2 4 1 -0.521975525478104 2 4 1 -0.342357539413783 1 4 1 -0.317929036077879 1 4 1 -0.252848528033154 1 4 1 -0.323519822370531 1 4 1 -0.543317920252121 2 4 1 -0.24136946576349 2 4 1 -0.924045894157614 1 4 1 -0.206395302547672 1 4 1 -0.32962698245246 1 4 1 -0.351093830002051 2 4 1 -0.393905213694999 1 4 1 -0.215647621306677 1 4 1 -1.07707197772823 2 4 1 -0.389509392394056 2 4 1 -0.223818285290267 1 4 1 -0.506337167510338 2 4 1 -0.471183820790944 2 4 1 -0.845694161269827 2 4 1 -0.709573151871471 1 4 1 -0.17746060467044 1 4 1 -0.31073859900678 2 4 1 -0.441048237227571 1 4 1 -0.46110944700935 1 4 1 -0.54945827722732 2 4 1 -0.784046498525351 2 4 1 -0.67011316512292 2 4 1 -0.835724249453141 2 4 1 -0.446765564545247 2 4 1 -0.672425348568567 2 4 1 -0.327795527459012 1 4 1 -0.638194736699407 1 4 1 -0.706923294313998 2 4 1 -0.599365457528674 1 4 1 -0.323701086823648 2 4 1 -0.219227233336576 1 4 1 -0.457730880006645 1 4 1 -0.315697871971176 2 4 1 -0.589925939568761 1 4 1 -0.247439085357404 1 4 1 -0.32425097397633 1 4 1 -0.597517113461805 2 4 1 -0.718798195346532 2 4 1 -0.638921440877047 1 4 1 -0.255295790134737 2 4 1 -0.327574345419546 2 4 1 -0.290528460395922 2 4 1 -0.812154747100783 2 4 1 -0.62206059702272 2 4 1 -0.692613337029294 2 4 1 -0.625883974901828 2 4 1 -0.441430346433252 2 4 1 -0.490081824112803 2 4 1 -0.244708505986005 2 4 1 -0.670449909527048 2 4 1 -0.548567982146401 2 4 1 -0.670609785865862 2 4 1 -0.232196621565708 1 4 1 -1.00556527970583 1 4 1 -0.362798089622382 2 4 1 -0.299887052564318 1 4 1 -0.29571247705273 2 4 1 -0.379234401357032 1 4 1 -0.370440077361359 1 4 1 -0.431164494502316 1 4 1 -0.420428289154959 2 4 1 -0.730550233135442 2 4 1 -0.196484263350659 1 4 1 -0.322244565661879 1 4 1 -0.419903128817093 2 4 1 -0.887401080614827 2 4 1 -0.347054627607897 1 4 1 -0.478136351931499 2 4 1 -0.22065845309499 1 4 1 -0.230551027213474 1 4 1 -0.774783709604525 2 4 1 -0.4017025367099 2 4 1 -0.473979554090044 2 4 1 -0.647672920929279 2 4 1 -0.498264780188815 2 4 1 -0.242543415916194 1 4 1 -1.50936872822537 2 4 1 -0.566867242699491 1 4 1 -1.47001919200243 2 4 1 -0.443050568657419 1 4 1 -0.617820295621617 2 4 1 -0.359479796449494 2 4 1 -0.604637805760835 2 4 1 -0.586057575592382 2 4 1 -0.440372792115214 1 4 1 -0.33669989715765 2 4 1 -0.46750885766708 2 4 1 -0.256884377023786 2 4 1 -0.404464415727674 1 4 1 -1.31921997047773 1 4 1 -0.430411845210472 2 4 1 -0.191395478638844 1 4 1 -2.65424233758138 2 4 1 -0.446895568163648 2 4 1 -1.16838122758826 1 4 1 -0.404164043001054 1 4 1 -0.453588390001177 1 4 1 -0.489843569086762 2 4 1 -0.511073192179454 1 4 1 -0.366748489459315 2 4 1 -1.08784444950982 1 4 1 -0.3127679937141 1 4 1 -0.374479796833337 2 4 1 -0.536583188537731 2 4 1 -0.468590946410184 1 4 1 -0.844010712248646 2 4 1 -0.557108724831039 1 4 1 -0.221613723664657 1 4 1 -0.485469547021943 2 4 1 -0.263615872608948 1 4 1 -0.441259334915482 2 4 1 -0.198072070553861 1 4 1 -1.50553220647041 2 4 1 -0.489507279251992 1 4 1 -0.864537266238903 2 4 1 -0.45282939606678 1 4 1 -0.746554343478926 2 4 1 -0.867773306761754 1 4 1 -0.767200054356521 1 4 1 -0.23660645439005 1 4 1 -1.39880762611901 2 4 1 -0.350271079317704 1 4 1 -1.40586843906866 1 4 1 -0.478210911942578 2 4 1 -0.338115182852339 2 4 1 -0.730320711764892 1 4 1 -0.956280538468675 1 4 1 -0.420525233125089 1 4 1 -0.347936329704388 1 4 1 -0.474630289993715 1 4 1 -0.819946152352902 2 4 1 -0.934441627702885 1 4 1 -0.730580755467428 1 4 1 -0.271300024998423 2 4 1 -0.304557666577329 1 4 1 -0.693593360198042 2 4 1 -0.275013203541771 2 4 1 -1.04949192092882 1 4 1 -0.912930864419114 2 4 1 -0.245497508402249 1 4 1 -0.340496536712849 1 4 1 -0.206302342316397 1 4 1 -0.349666013963138 1 4 1 -0.391997601731428 2 4 1 -0.31394682736151 2 4 1 -0.19581160832742 1 4 1 -1.06233406823564 1 4 1 -0.415830063959857 2 4 1 -0.593378048695084 2 4 1 -0.684233440252769 2 4 1 -0.688883920330433 2 4 1 -1.6950993730873 2 4 1 -0.432896756723563 2 4 1 -0.431906140578051 2 4 1 -1.52282637445437 1 4 1 -0.405643762755849 2 4 1 -0.3906375449839 2 4 1 -0.493637490245746 2 4 1 -0.253401712848908 1 4 1 -0.651891849887022 1 4 1 -0.191738721475655 1 4 1 -0.274200778844831 1 4 1 -0.687221734267395 1 4 1 -0.401284964672383 2 4 1 -1.30759449667971 2 4 1 -0.45371033421329 1 4 1 -0.414999090718536 1 4 1 -0.339864441346799 2 4 1 -0.224062601508878 1 4 1 -0.267986100808596 2 4 1 -0.205728192609412 1 4 1 -0.294602964122564 1 4 1 -0.57048745621061 2 4 1 -0.312999536718822 2 4 1 -0.468539738808252 2 4 1 -0.433964076498741 2 4 1 -0.263357126730855 1 4 1 -0.403225684575735 2 4 1 -0.505097149684945 1 4 1 -0.352103525497929 1 4 1 -0.353799144457459 2 4 1 -1.93501413202878 2 4 1 -0.234778101685937 1 4 1 -0.915861513813591 1 4 1 -0.222561853971709 2 4 1 -0.704078227413159 1 4 1 -0.324489257545476 1 4 1 -0.360961619402915 1 4 1 -1.08221841300972 2 4 1 -0.255068442126581 1 4 1 -0.427031071589034 2 4 1 -0.268383547265101 2 4 1 -0.527836858914301 2 4 1 -0.436588005092254 2 4 1 -0.63511533446365 1 4 1 -0.234879561500897 2 4 1 -0.401628516319902 1 4 1 -0.244232194192342 2 4 1 -0.228576135937129 1 4 1 -0.479791504967864 1 4 1 -0.623390007344234 2 4 1 -0.464270391434225 1 4 1 -0.366609113909401 2 4 1 -0.96274637101792 1 4 1 -0.576483736943277 2 4 1 -0.403581781434509 2 4 1 -0.639484168435868 2 4 1 -0.62685113380266 2 4 1 -0.504458338039312 1 4 1 -0.402748748564798 2 4 1 -0.214342237683536 2 4 1 -0.349689666731501 1 4 1 -0.920646509992372 2 4 1 -0.27944693184416 1 4 1 -0.44815474516242 2 4 1 -0.205465436502175 1 4 1 -0.58051540184786 2 4 1 -1.39132555887266 2 4 1 -0.364514642632317 2 4 1 -0.70643155804624 1 4 1 -0.323310314942546 2 4 1 -0.175430213692877 1 4 1 -0.324665401127095 2 4 1 -0.339971125940635 1 4 1 -0.357027781829466 2 4 1 -0.197390554919544 1 4 1 -1.19812104028543 1 4 1 -0.362575817654 2 4 1 -0.38070286088775 2 4 1 -0.468393488473505 2 4 1 -0.69309042773624 2 4 1 -0.489049481806581 2 4 1 -0.550976348834216 2 4 1 -0.359974012139019 2 4 1 -0.430662557513296 2 4 1 -0.230762012931962 1 4 1 -0.464324012490502 2 4 1 -0.312270150962999 2 4 1 -0.407424579565668 1 4 1 -1.69173371199779 2 4 1 -0.279156190669336 1 4 1 -1.09358568468554 2 4 1 -0.799141912889128 2 4 1 -0.471229301469417 2 4 1 -0.86570523590372 2 4 1 -0.83707942075239 2 4 1 -0.301114204037757 2 4 1 -0.26254546100438 1 4 1 -0.480187644205008 2 4 1 -1.26489425453197 1 4 1 -0.348490685840493 1 4 1 -1.80827808496735 2 4 1 -0.688328204172357 1 4 1 -0.327390856146583 1 4 1 -0.296891297267103 2 4 1 -0.537842103983798 2 4 1 -0.61750762959656 1 4 1 -0.514317599884349 2 4 1 -0.484707489461247 2 4 1 -1.02339278515862 2 4 1 -0.261167936671032 1 4 1 -0.304205039689018 2 4 1 -0.663106162019803 2 4 1 -0.717654991060284 2 4 1 -0.303534711764191 1 4 1 -0.330517625218401 2 4 1 -0.421085131279027 1 4 1 -0.205818877232142 2 4 1 -0.316549542909507 2 4 1 -0.204577406444421 1 4 1 -0.23632265848357 1 4 1 -0.548244833679468 2 4 1 -0.388542899040365 1 4 1 -0.664718352371726 2 4 1 -0.675589276314572 2 4 1 -0.797202947732321 2 4 1 -0.210476690239724 1 4 1 -0.368876471933727 1 4 1 -3.36252865887517 2 4 1 -1.08817433313251 2 4 1 -0.302267582741235 2 4 1 -0.528662894227581 2 4 1 -0.405868957673161 2 4 1 -0.78835957170157 2 4 1 -0.348515180995261 1 4 1 -0.400786810234322 2 4 1 -0.723106946883847 1 4 1 -0.302360607513298 1 4 1 -0.398732629036892 2 4 1 -0.404922292377484 1 4 1 -0.317519731671834 2 4 1 -0.34850072573164 1 4 1 -0.236746589712662 1 4 1 -0.343500348276907 2 4 1 -0.913186037917405 1 4 1 -0.399298167621525 1 4 1 -0.384145099266467 2 4 1 -0.202414771592286 1 4 1 -1.18165252887664 2 4 1 -0.330694054268851 1 4 1 -0.32152973903766 2 4 1 -0.491640817467057 2 4 1 -0.355523637611526 2 4 1 -0.652568975873668 1 4 1 -0.83445083655571 2 4 1 -1.2259345279659 2 4 1 -0.359422092118994 2 4 1 -0.706531400652435 2 4 1 -0.393761838705891 2 4 1 -0.637784009231585 2 4 1 -0.72390141241909 1 4 1 -0.250576856916352 1 4 1 -0.294901560775219 2 4 1 -0.360607616504287 2 4 1 -0.94020952091997 2 4 1 -0.512044648878922 2 4 1 -0.795154031544809 2 4 1 -0.483945751646636 2 4 1 -1.00668402779973 1 4 1 -0.354165221763607 2 4 1 -0.589695938125084 1 4 1 -0.317505434986294 1 4 1 -1.43373221832127 1 4 1 -0.288390917154746 2 4 1 -0.405502079908248 1 4 1 -0.446525389342848 2 4 1 -0.532552597088654 1 4 1 -0.313955963239798 2 4 1 -0.353727419903597 2 4 1 -0.370222838745741 1 4 1 -0.53179693872268 2 4 1 -0.367449479194125 1 4 1 -0.552379937342038 2 4 1 -0.990656154189996 2 4 1 -0.339737828162076 2 4 1 -0.637641033103 2 4 1 -0.42876311661298 2 4 1 -0.23749457142906 2 4 1 -0.319603538159136 2 4 1 -0.374605787276471 1 4 1 -0.849512129786118 2 4 1 -1.00104076228899 2 4 1 -0.198287811121547 1 4 1 -0.298821665244969 2 4 1 -0.291809945237506 1 4 1 -0.629558789118005 2 4 1 -0.387853650221803 2 4 1 -0.403253897590665 1 4 1 -0.467497496264389 2 4 1 -0.76163717207652 2 4 1 -0.509727112968688 2 4 1 -0.298777861255781 1 4 1 -0.511840234402182 2 4 1 -0.285361728745086 2 4 1 -0.597484562760948 2 4 1 -0.289800259644921 2 4 1 -0.583545618964623 2 4 1 -0.567804592187857 1 4 1 -0.507416826265034 1 4 1 -0.802494675316203 2 4 1 -0.977640762711063 1 4 1 -0.310065306648296 1 4 1 -0.345153633596149 2 4 1 -0.347175805412226 1 4 1 -0.412977137398828 2 4 1 -0.285153755999717 1 4 1 -0.239659186319816 2 4 1 -0.339455800385343 2 4 1 -0.268577501046517 2 4 1 -0.658394714841171 1 4 1 -0.778666141135073 2 4 1 -0.316640157058985 1 4 1 -0.223585419102432 1 4 1 -0.865653039717611 2 4 1 -0.366142103215405 1 4 1 -1.06763238053885 1 4 1 -0.649850247862938 1 4 1 -0.512047384065615 2 4 1 -0.416754656003573 2 4 1 -0.251040108432236 1 4 1 -0.360104447735964 2 4 1 -0.69217517705459 2 4 1 -1.20890660646077 2 4 1 -0.395473147659342 1 4 1 -2.01509671350874 2 4 1 -0.257146924592025 1 4 1 -0.56334930950831 2 4 1 -0.283190539517862 1 4 1 -0.926385510466009 2 4 1 -0.670839555017221 1 4 1 -0.209765778999125 1 4 1 -0.736401422311992 2 4 1 -0.652193628618288 1 4 1 -0.21004611504302 1 4 1 -0.427729746817706 2 4 1 -0.279727994007439 1 4 1 -0.973427619297358 1 4 1 -0.484447288964673 2 4 1 -0.773172203339819 1 4 1 -0.451517138859729 2 4 1 -0.366340544422608 1 4 1 -0.537698395391627 1 4 1 -0.502490066721104 2 4 1 -0.320507642052774 1 4 1 -0.521457049830957 2 4 1 -0.516601763002701 2 4 1 -0.335922415995894 1 4 1 -0.433364895325246 2 4 1 -0.680419249073426 2 4 1 -0.297293517357851 1 4 1 -0.442221587569742 1 4 1 -0.27166805138535 1 4 1 -0.61736599846165 2 4 1 -0.486875798898661 1 4 1 -0.221131066783682 1 4 1 -0.631893098333021 1 4 1 -0.680334804706098 2 4 1 -0.439239414100042 2 4 1 -0.844299183787246 1 4 1 -0.879748183908203 2 4 1 -0.881004157734928 2 4 1 -0.503475477012917 2 4 1 -0.345571841551121 2 4 1 -0.60106814148883 2 4 1 -0.380097253620339 2 4 1 -0.414803174135158 2 4 1 -0.205017263198972 1 4 1 -0.284670026282373 1 4 1 -0.362803142116261 1 4 1 -0.244497025354602 1 4 1 -0.227388249258125 2 4 1 -0.427473866189717 2 4 1 -0.753126345774075 2 4 2 -0.390711548236765 1 4 2 -0.270643332729808 1 4 2 -0.226363776905272 2 4 2 -0.465280739944602 2 4 2 -0.436604485335886 2 4 2 -0.682522814767707 2 4 2 -0.346349842215826 1 4 2 -0.661058914850539 1 4 2 -0.283908385401013 1 4 2 -0.439045953436687 2 4 2 -0.357784731597969 1 4 2 -0.201869884529536 1 4 2 -0.360806495344085 1 4 2 -0.925367484247164 2 4 2 -0.415837898905798 1 4 2 -0.227962933710507 1 4 2 -0.312737890779197 2 4 2 -0.432575973780529 1 4 2 -0.253471376207378 2 4 2 -0.288980617785268 2 4 2 -0.205568280701361 1 4 2 -0.355465859040812 1 4 2 -0.533249899426881 1 4 2 -0.428341046561918 1 4 2 -0.468946411082872 2 4 2 -0.64190965481191 2 4 2 -0.208274022160394 1 4 2 -0.217167590394544 1 4 2 -0.348200391766719 1 4 2 -0.617665880709998 2 4 2 -1.2550193745344 2 4 2 -0.566722271395131 2 4 2 -0.9275505182546 1 4 2 -0.360441788744519 2 4 2 -0.334083907534406 1 4 2 -0.621024094129476 1 4 2 -0.21468225890971 1 4 2 -0.764606539690629 2 4 2 -0.513552230720492 2 4 2 -0.274575330019494 1 4 2 -0.442639118882473 2 4 2 -0.333565588104835 1 4 2 -0.32578303931817 1 4 2 -0.937568367512386 1 4 2 -0.267480287372774 2 4 2 -0.531291803062624 2 4 2 -0.356804813639121 2 4 2 -0.229458300439067 1 4 2 -0.491510334559396 1 4 2 -0.294506874060557 2 4 2 -0.450689439535521 2 4 2 -0.330518336785702 2 4 2 -0.629929903785771 2 4 2 -0.556005918181015 2 4 2 -0.298001295445146 2 4 2 -0.373025553749853 1 4 2 -0.470482102722226 1 4 2 -0.341137316160965 2 4 2 -0.395674428886868 1 4 2 -0.612038098963857 2 4 2 -0.357119967218812 1 4 2 -0.248258993043406 2 4 2 -0.43888644731273 2 4 2 -0.236904461349091 1 4 2 -0.175318379144127 1 4 2 -0.295979215918683 2 4 2 -0.830651376675517 2 4 2 -0.282437606937073 2 4 2 -0.398046893313355 1 4 2 -0.438749921630364 2 4 2 -0.910170394806412 1 4 2 -0.612914933569449 2 4 2 -0.321192322725182 1 4 2 -0.568814752481222 2 4 2 -0.897945007327145 2 4 2 -0.498239908323843 2 4 2 -1.13610274041636 1 4 2 -0.38015282393527 2 4 2 -1.88042120748211 1 4 2 -0.475694408506186 2 4 2 -0.783173222507789 1 4 2 -1.15066675545721 2 4 2 -0.557329951105813 2 4 2 -0.571462117657645 1 4 2 -0.67890197694106 1 4 2 -0.602173033246056 1 4 2 -0.394020910343337 2 4 2 -0.313420994224523 1 4 2 -0.286589909536176 1 4 2 -0.705950642195192 2 4 2 -0.229774735082982 1 4 2 -1.23278117367534 1 4 2 -0.22937452069431 1 4 2 -0.280442062594111 2 4 2 -0.327747771853932 2 4 2 -0.235540251439368 1 4 2 -0.482389789621482 2 4 2 -0.221470341978539 1 4 2 -1.543390472393 1 4 2 -0.407965459047609 2 4 2 -0.861415260414181 2 4 2 -0.257801138767577 1 4 2 -1.08565280721424 2 4 2 -0.581197798001077 1 4 2 -0.324088998372876 2 4 2 -0.357086553411739 1 4 2 -1.40348937798243 1 4 2 -0.193914649716736 1 4 2 -1.21046346658098 1 4 2 -0.67741215417897 1 4 2 -0.329541261493576 1 4 2 -0.708070291827501 2 4 2 -0.799545815866931 1 4 2 -0.284679156256938 2 4 2 -0.394752939066119 2 4 2 -1.28735898040161 2 4 2 -0.28801399136096 2 4 2 -0.648970026347328 1 4 2 -0.441656916239289 1 4 2 -0.255895819732594 1 4 2 -0.273287514635962 1 4 2 -0.355331159272485 2 4 2 -0.660329500255681 2 4 2 -1.60223591193477 2 4 2 -0.312706096050944 2 4 2 -0.370271886124509 1 4 2 -0.4718314797338 2 4 2 -0.27614855720277 1 4 2 -0.483448608439343 1 4 2 -0.234968080618399 1 4 2 -0.391112229320968 2 4 2 -0.462383731150063 1 4 2 -1.84554462107946 2 4 2 -0.368383992484067 2 4 2 -0.427732918131404 2 4 2 -0.261183473976277 1 4 2 -0.443004159664207 1 4 2 -0.319045852909917 1 4 2 -0.459946072664613 1 4 2 -0.308634108085083 2 4 2 -0.698726479780432 2 4 2 -0.236639844152539 1 4 2 -0.331687357050262 1 4 2 -0.478759287978135 2 4 2 -0.368101563033333 2 4 2 -0.305576932610112 1 4 2 -0.429846006190982 2 4 2 -0.760633202811727 2 4 2 -0.413548649793985 2 4 2 -0.854550457981442 1 4 2 -0.793486300366621 2 4 2 -0.865787783075263 2 4 2 -0.29645719210445 2 4 2 -0.286957661251985 2 4 2 -0.799348039285717 2 4 2 -0.655555119608612 2 4 2 -0.742570590536339 2 4 2 -0.618028617950327 2 4 2 -0.287032292482981 1 4 2 -0.810817641683865 2 4 2 -0.542866362535735 2 4 2 -0.461735825500108 1 4 2 -0.360081585122882 2 4 2 -0.686393593652603 2 4 2 -0.406506979162325 1 4 2 -0.417722137978479 2 4 2 -0.199502298944795 1 4 2 -0.283998907427584 1 4 2 -0.641575484713698 1 4 2 -0.536268680798931 1 4 2 -1.30578408194073 2 4 2 -0.184223471103508 1 4 2 -0.69467941754697 2 4 2 -0.22448622806897 2 4 2 -0.286771373284366 1 4 2 -0.688354585035901 1 4 2 -0.967997645085735 1 4 2 -0.379271417939524 1 4 2 -0.657624551473419 2 4 2 -0.288156373424384 1 4 2 -0.798729464202731 2 4 2 -0.299408333666402 2 4 2 -0.569629360200182 2 4 2 -0.368672408571234 2 4 2 -0.689419089326237 2 4 2 -1.39439675244552 2 4 2 -0.349220352929323 1 4 2 -0.83628328787567 1 4 2 -0.954621512616944 1 4 2 -0.974085341355247 2 4 2 -0.232985801064047 1 4 2 -0.392841148562383 1 4 2 -0.215131830305328 1 4 2 -0.293782797485867 2 4 2 -0.271250958358236 1 4 2 -0.624040890183475 1 4 2 -0.805265827410823 2 4 2 -0.294091703458089 2 4 2 -0.710826711143953 2 4 2 -1.22718633292131 1 4 2 -0.634166705031742 2 4 2 -0.34847720815033 2 4 2 -0.401314348829917 1 4 2 -0.232628383153352 1 4 2 -0.732405673120817 1 4 2 -0.721193585825821 2 4 2 -0.409057462385994 2 4 2 -0.584778932939187 1 4 2 -0.232717575480756 2 4 2 -0.815862413646243 2 4 2 -0.384196628908383 1 4 2 -0.578948587380573 1 4 2 -1.09711900247466 1 4 2 -0.848385257280831 2 4 2 -0.569159709314235 2 4 2 -0.549417965516691 1 4 2 -0.732526266656702 2 4 2 -0.308790829645616 2 4 2 -0.99654060065045 1 4 2 -0.589889727186112 1 4 2 -0.5484884279984 2 4 2 -0.479934907640303 2 4 2 -0.270818907737512 1 4 2 -0.21367589349819 2 4 2 -0.400772720552252 1 4 2 -1.19500378366517 2 4 2 -0.247627583441975 2 4 2 -0.344233591133932 2 4 2 -0.226694192794682 2 4 2 -0.838668645891454 1 4 2 -0.706466288045509 1 4 2 -0.294054475401509 2 4 2 -0.686122317625166 1 4 2 -0.403497095541725 2 4 2 -0.243178882660292 2 4 2 -0.699616329368069 1 4 2 -0.307343994601662 1 4 2 -0.42437788244234 1 4 2 -0.502486986226067 2 4 2 -1.28866364433504 2 4 2 -0.305567946577639 2 4 2 -0.378703212426002 2 4 2 -0.792580021830969 1 4 2 -0.304089937760933 1 4 2 -0.311408526252319 1 4 2 -0.430396597223223 2 4 2 -0.297243961067233 2 4 2 -0.44197992729068 2 4 2 -0.316197941983761 2 4 2 -0.352238116060612 1 4 2 -0.537215363451521 1 4 2 -0.313054348616914 2 4 2 -2.41486023609302 2 4 2 -0.278189933299107 1 4 2 -0.608375967955867 2 4 2 -0.322433656896367 2 4 2 -0.510101175645582 2 4 2 -0.536008515884717 2 4 2 -0.411222727405007 1 4 2 -0.322077650775546 1 4 2 -0.376593462100595 1 4 2 -0.373346452642805 1 4 2 -0.204108290848997 1 4 2 -1.13673544543737 1 4 2 -0.608043497918468 2 4 2 -0.492504866468463 1 4 2 -2.64459046440055 2 4 2 -0.451189826082847 2 4 2 -0.576354637504905 2 4 2 -1.03341312876181 2 4 2 -1.06203790709498 2 4 2 -0.467283293752499 1 4 2 -0.59305758857692 1 4 2 -1.02820034315313 2 4 2 -0.243096585776871 2 4 2 -0.241979269170888 1 4 2 -2.12555571038889 2 4 2 -0.251788236043855 1 4 2 -0.723821381450342 1 4 2 -0.433526132962029 2 4 2 -0.21544327239061 1 4 2 -0.393688512078822 1 4 2 -0.284221169948672 2 4 2 -0.576093893566291 2 4 2 -1.15305709835803 1 4 2 -0.577913219145157 1 4 2 -0.715910910811142 1 4 2 -0.75590490398109 1 4 2 -0.360319468198415 2 4 2 -0.632185262786175 1 4 2 -0.18238157652647 1 4 2 -0.562586077551566 2 4 2 -0.215854179362236 1 4 2 -0.312933166228936 2 4 2 -0.555832005681486 2 4 2 -0.318457487234359 1 4 2 -0.582899849996915 2 4 2 -0.245962703292602 1 4 2 -0.465283341657699 2 4 2 -0.414444633194467 2 4 2 -1.24551335125207 2 4 2 -0.545757922901803 1 4 2 -0.231000979626702 1 4 2 -0.99685036055635 2 4 2 -0.384519234744453 2 4 2 -0.313068825633183 2 4 2 -0.272366676123266 2 4 2 -0.369113998245052 1 4 2 -0.852502642553233 2 4 2 -0.500222326108646 2 4 2 -1.33492159400307 2 4 2 -0.334531945802007 2 4 2 -0.345532493468953 2 4 2 -0.411524900150396 1 4 2 -0.412179135130737 1 4 2 -0.181615360189367 1 4 2 -0.362837785162399 2 4 2 -0.357363228746261 1 4 2 -0.341279195040987 1 4 2 -0.509319762686041 2 4 2 -0.832335744829212 1 4 2 -0.626841628293068 2 4 2 -0.410567315100069 2 4 2 -0.323450520248485 1 4 2 -0.460402879304217 2 4 2 -0.978638366687773 2 4 2 -0.391658527387104 2 4 2 -0.451809465395574 2 4 2 -0.593742477301499 2 4 2 -0.662146826253225 2 4 2 -0.437989819033819 1 4 2 -0.269628201689827 2 4 2 -0.435351210611835 1 4 2 -0.873512993112175 2 4 2 -0.303862086205385 2 4 2 -0.273370196428826 2 4 2 -0.346348296895532 1 4 2 -0.998766995801073 1 4 2 -0.500807556509458 2 4 2 -0.377700114272957 1 4 2 -0.557960766756848 2 4 2 -0.445694136085316 1 4 2 -0.45906855277066 2 4 2 -0.754461289543913 1 4 2 -0.400053087442967 2 4 2 -0.551849440102895 2 4 2 -0.442947303118676 2 4 2 -0.333326868270498 2 4 2 -0.299790315442093 2 4 2 -0.553945654472965 2 4 2 -0.451050492701825 2 4 2 -0.324222556361926 1 4 2 -0.535724245715277 2 4 2 -0.238621696921591 2 4 2 -1.05972477089422 2 4 2 -0.381000045748109 2 4 2 -0.622451138992948 2 4 2 -0.373517923040452 1 4 2 -0.379879533262278 2 4 2 -0.61017258538097 1 4 2 -0.373931156084275 2 4 2 -0.271265445543895 2 4 2 -1.13340860093329 2 4 2 -0.48332741629014 2 4 2 -0.278034332012135 2 4 2 -0.617732524410426 2 4 2 -0.295315882068786 1 4 2 -0.399809513238085 2 4 2 -0.593796551838838 1 4 2 -0.448034890388041 1 4 2 -0.507568706345751 1 4 2 -0.361775441496837 2 4 2 -0.680971086222875 2 4 2 -0.760052527880231 1 4 2 -0.523382086841163 2 4 2 -0.782073278018563 2 4 2 -0.366504296431615 1 4 2 -0.546586584253047 1 4 2 -0.214235924471223 2 4 2 -0.366212595435754 1 4 2 -0.291167146710121 1 4 2 -0.409610194588736 2 4 2 -0.356114057741968 2 4 2 -0.219739077268034 1 4 2 -0.672870970697509 2 4 2 -0.459606203597495 1 4 2 -0.526098601526635 2 4 2 -0.372672585132755 2 4 2 -0.597289086386129 2 4 2 -0.626632354698616 1 4 2 -0.332982149438951 1 4 2 -0.278836235260797 1 4 2 -0.322540461337514 2 4 2 -0.78120963284111 1 4 2 -0.23866016678153 1 4 2 -0.50573581272823 1 4 2 -0.307268642300638 1 4 2 -0.780733176074318 2 4 2 -0.524376179559523 2 4 2 -0.189183803390314 1 4 2 -0.845904136307582 2 4 2 -0.278587013457745 1 4 2 -0.225849178715147 1 4 2 -0.505248402948615 2 4 2 -0.366510147437898 2 4 2 -0.786656646299225 1 4 2 -0.28859561129487 2 4 2 -1.0544558000918 2 4 2 -0.393129941472734 2 4 2 -0.209426913346118 1 4 2 -0.557848900331885 1 4 2 -0.428353664374262 1 4 2 -0.530074401536137 2 4 2 -0.364796976039387 1 4 2 -0.31697830785037 2 4 2 -0.257110187742915 2 4 2 -0.860619019042301 2 4 2 -0.275753333898939 2 4 2 -0.397675327020122 2 4 2 -0.400511098922939 2 4 2 -0.948090448726289 2 4 2 -0.792802580857493 2 4 2 -0.492364374799986 2 4 2 -0.535712706435388 2 4 2 -0.303750231704068 2 4 2 -0.418198300886687 1 4 2 -0.242831317224143 1 4 2 -0.366807263977893 1 4 2 -0.950283301933651 2 4 2 -0.482878099739341 1 4 2 -0.97631347487202 1 4 2 -0.244665797059032 1 4 2 -0.383566210558107 2 4 2 -0.301865356343443 1 4 2 -0.649745063591824 2 4 2 -0.697050710541562 2 4 2 -0.809378147682444 2 4 2 -0.462123476270074 2 4 2 -0.245248883261683 1 4 2 -0.243212808416138 2 4 2 -0.314440748173076 2 4 2 -1.04698406308231 2 4 2 -0.544087182534972 2 4 2 -0.406124565426359 2 4 2 -0.718327290138134 2 4 2 -0.595497257669703 1 4 2 -0.666308342077789 1 4 2 -0.22438634675569 1 4 2 -0.365080530436751 1 4 2 -0.461875979293363 1 4 2 -0.815985780732118 2 4 2 -0.332674339664941 2 4 2 -0.567202795788581 2 4 2 -0.673655794950335 2 4 2 -0.477370669971454 2 4 2 -0.479764717684493 1 4 2 -0.68004172954729 2 4 2 -0.257963031440288 1 4 2 -0.321677163018359 2 4 2 -0.31627004676119 1 4 2 -0.529702779903144 2 4 2 -0.248585059238814 1 4 2 -0.282689321559121 1 4 2 -0.470660250683795 1 4 2 -0.250018593647379 1 4 2 -0.374469379392143 2 4 2 -0.275948837992336 1 4 2 -0.404124761645375 2 4 2 -0.461681809050967 2 4 2 -0.564107772448883 2 4 2 -0.384697573024493 1 4 2 -0.554591581848138 2 4 2 -0.584464813875503 2 4 2 -0.342621685779372 1 4 2 -1.16358906465507 1 4 2 -0.988193013514123 2 4 2 -0.332789439955037 2 4 2 -1.25271325447985 2 4 2 -0.177397789305329 1 4 2 -0.362176598671433 1 4 2 -0.419965871836781 1 4 2 -0.60515910819425 2 4 2 -0.31624308445277 2 4 2 -0.844077751163308 1 4 2 -0.3735652345134 1 4 2 -0.291452020039806 2 4 2 -0.232044027590617 2 4 2 -0.401670392843103 1 4 2 -0.453346479512868 1 4 2 -0.538601982527949 2 4 2 -0.830402429105011 2 4 2 -0.335367042770124 2 4 2 -1.69818743759647 2 4 2 -0.685148294374147 2 4 2 -0.563455535517706 1 4 2 -0.346418976965531 2 4 2 -0.662726370377329 2 4 2 -0.325948295612478 1 4 2 -0.790574826320759 2 4 2 -0.350496877596168 2 5 1 -0.874105967045303 2 5 1 -0.618625438899854 2 5 1 -0.418505998819245 2 5 1 -0.314600840328866 2 5 1 -0.381663329894006 2 5 1 -0.45975664723401 2 5 1 -0.629213989765841 2 5 1 -0.54170350431826 2 5 1 -0.765292807564366 2 5 1 -0.385154972025414 2 5 1 -1.0123064604624 1 5 1 -0.594468240472198 1 5 1 -0.432884690638713 1 5 1 -0.489852620287608 1 5 1 -0.439171145526351 2 5 1 -1.22541928040664 2 5 1 -0.489630344610841 1 5 1 -0.274646407464894 2 5 1 -0.321919494972732 2 5 1 -0.77321886884371 2 5 1 -0.618168337784024 1 5 1 -0.534313291237878 1 5 1 -0.248497494092332 2 5 1 -0.902277840078671 2 5 1 -0.362848845387321 1 5 1 -0.51771202639715 1 5 1 -0.354988230953876 1 5 1 -0.408263930712262 1 5 1 -0.515818454942625 2 5 1 -1.13442796786063 1 5 1 -0.922450951783412 2 5 1 -0.918236744271086 1 5 1 -0.615340787166512 2 5 1 -1.33086229422069 2 5 1 -0.773498580150888 1 5 1 -0.355111810070351 2 5 1 -0.463437947168048 1 5 1 -0.448309657003003 2 5 1 -0.6988174528086 1 5 1 -0.681543871813962 2 5 1 -0.388077823480071 1 5 1 -0.769550702497453 2 5 1 -0.276308219836844 2 5 1 -0.284812859728204 2 5 1 -0.362311034619243 2 5 1 -1.17071306250983 2 5 1 -0.866609929152046 2 5 1 -0.835449276907315 1 5 1 -0.398346916321057 2 5 1 -0.358189328108751 2 5 1 -0.554585348153169 1 5 1 -0.39760626253423 1 5 1 -0.392260790271942 1 5 1 -0.374565507395057 1 5 1 -0.490572674372733 1 5 1 -0.262430183982469 2 5 1 -0.406476847741613 2 5 1 -0.734398707641991 2 5 1 -0.808457812365959 2 5 1 -0.48642275256533 2 5 1 -0.652503153805679 2 5 1 -0.549320171737622 1 5 1 -0.363404443230036 1 5 1 -0.840787255553956 2 5 1 -0.659531467134479 2 5 1 -0.52542297118843 2 5 1 -1.641319692926 1 5 1 -0.349747975299305 2 5 1 -0.696664492661082 1 5 1 -0.999158454741339 2 5 1 -0.494799923420879 1 5 1 -0.696955053780108 2 5 1 -0.320987556616071 1 5 1 -0.49483529420845 2 5 1 -0.327928353910945 2 5 1 -0.243634957449529 2 5 1 -0.382623755383196 2 5 1 -0.340391545486682 1 5 1 -0.370852704813291 1 5 1 -0.513953236176426 1 5 1 -0.98488957304638 1 5 1 -0.72920423163152 1 5 1 -0.417878765259703 2 5 1 -0.969267542459561 2 5 1 -0.588656109235448 1 5 1 -0.309310108339579 2 5 1 -0.359079679822589 2 5 1 -0.432293522236456 2 5 1 -0.365699146412261 2 5 1 -0.280030960748012 1 5 1 -0.267439129890066 1 5 1 -1.64146572476405 1 5 1 -0.383985418553408 2 5 1 -0.820780704522388 2 5 1 -0.287252707592816 2 5 1 -0.471656559488085 1 5 1 -0.688654531662119 1 5 1 -1.51116458250868 1 5 1 -0.256595632268106 1 5 1 -1.44192189522027 1 5 1 -0.423719184953423 2 5 1 -0.474641530750688 1 5 1 -0.95167387507805 1 5 1 -0.249940083181891 2 5 1 -0.948956034762691 1 5 1 -0.497334710175687 2 5 1 -0.401258517347727 1 5 1 -0.593937951684305 1 5 1 -0.399453837192165 1 5 1 -0.339706036542822 2 5 1 -0.358808063230196 2 5 1 -0.399870518236517 2 5 1 -0.402281253533687 1 5 1 -1.21105144216928 2 5 1 -0.276484649301743 2 5 1 -0.614037537089416 1 5 1 -0.468119101565478 2 5 1 -0.363072435808267 2 5 1 -0.424349274964132 2 5 1 -0.817780271882891 2 5 1 -0.348979602184705 2 5 1 -0.428954419119663 1 5 1 -0.377745649623147 2 5 1 -1.52858746330548 1 5 1 -0.38829844572601 2 5 1 -0.272639358091468 1 5 1 -0.410898025055377 2 5 1 -0.345789355606247 1 5 1 -0.29532986306681 1 5 1 -1.41353962836894 1 5 1 -0.405012581079275 2 5 1 -0.308523509241669 1 5 1 -0.277244934012402 1 5 1 -0.317403964863673 2 5 1 -0.590597620939944 2 5 1 -2.60905439597824 2 5 1 -0.312334336841847 1 5 1 -0.437294940463007 2 5 1 -0.749488878001604 1 5 1 -1.55915796602619 2 5 1 -0.672396814187568 1 5 1 -0.32268652713371 1 5 1 -2.33549213748734 2 5 1 -0.533821174405078 1 5 1 -0.29694295172823 2 5 1 -0.428694990290054 1 5 1 -0.744566758964104 2 5 1 -0.902120586395754 2 5 1 -0.373865886414247 2 5 1 -0.34709384467313 2 5 1 -0.579724512470357 2 5 1 -0.480526240272104 2 5 1 -0.280388545170291 2 5 1 -0.327331492746605 2 5 1 -0.307049013223802 1 5 1 -0.67645551514592 2 5 1 -1.45539495712329 1 5 1 -0.414781606222836 2 5 1 -0.464869946932275 2 5 1 -0.502525874440138 1 5 1 -0.395815499456874 2 5 1 -0.354740455281115 2 5 1 -0.387644457183764 1 5 1 -0.441119885285695 2 5 1 -0.541299937122488 1 5 1 -0.462769272386231 2 5 1 -0.568510300909836 1 5 1 -0.739186686655553 2 5 1 -0.590037876611731 1 5 1 -0.876929440339061 2 5 1 -0.38745072608018 1 5 1 -0.874268369237665 1 5 1 -0.595292592424216 1 5 1 -0.704226262163879 2 5 1 -0.297080465628835 1 5 1 -0.873924224577836 1 5 1 -0.466850429992947 2 5 1 -2.19102033049238 2 5 1 -0.707932653803704 2 5 1 -0.52289483412265 1 5 1 -0.291400893995193 2 5 1 -0.334262174169645 1 5 1 -0.413969229169715 2 5 1 -0.368758425061548 2 5 1 -0.480077442145665 1 5 1 -1.00325797217147 1 5 1 -0.34303135632964 1 5 1 -0.449371536501468 2 5 1 -0.626712575183433 2 5 1 -0.300330886841042 1 5 1 -0.601032456393769 1 5 1 -0.398517086169262 2 5 1 -0.492233676994753 1 5 1 -0.559665551547645 2 5 1 -0.322079290494793 1 5 1 -1.00119635579758 2 5 1 -0.273572712925721 2 5 1 -0.997674531871953 2 5 1 -0.660614796170524 2 5 1 -0.682509343887865 2 5 1 -0.709029245629886 1 5 1 -0.450254655958821 2 5 1 -0.489362486290041 2 5 1 -0.256831501902699 1 5 1 -0.277749492989249 2 5 1 -0.570615557380466 1 5 1 -0.660249945470972 1 5 1 -0.913042539886274 2 5 1 -0.276157449004863 2 5 1 -0.42554491044995 1 5 1 -0.504500759944897 1 5 1 -0.464430914488706 2 5 1 -0.619432554673588 2 5 1 -0.344307477421206 2 5 1 -0.353936127201951 2 5 1 -0.542570048569743 1 5 1 -0.670056529092721 2 5 1 -1.69346839376907 1 5 1 -0.329122766203157 2 5 1 -0.580777607436693 2 5 1 -0.425192302825809 2 5 1 -1.01918716149399 1 5 1 -0.451237334938158 2 5 1 -0.365780961202646 1 5 1 -0.435969164806333 2 5 1 -0.446219693449255 2 5 1 -0.86148405175356 1 5 1 -0.378754506829674 2 5 1 -0.636129328599341 2 5 1 -0.484019299704474 1 5 1 -0.35361046366484 2 5 1 -0.778577922802192 2 5 1 -0.553712077213866 2 5 1 -0.576470926490595 1 5 1 -0.956990527979208 2 5 1 -0.676276969153139 2 5 1 -0.600336478541173 2 5 1 -1.1497148815834 2 5 1 -0.311559946333853 1 5 1 -0.32583211853549 2 5 1 -0.5129988563233 2 5 1 -0.706544486405484 2 5 1 -0.437094165223538 1 5 1 -0.824751569569288 1 5 1 -0.809546777157912 2 5 1 -0.537141407451724 2 5 1 -0.662502129558617 1 5 1 -0.501691224984125 2 5 1 -0.30901205010725 2 5 1 -0.47867098754842 1 5 1 -0.456986416516351 2 5 1 -0.275926112108479 1 5 1 -0.410807847551661 2 5 1 -0.828356765357645 1 5 1 -0.40635702823069 2 5 1 -0.986564014728032 2 5 1 -0.567057974036205 2 5 1 -0.322394251750056 2 5 1 -0.722132927785589 2 5 1 -0.409410905169481 2 5 1 -0.709898951889328 1 5 1 -0.541547402623561 1 5 1 -0.256670165225662 2 5 1 -0.49411541489472 1 5 1 -0.919104100863578 1 5 1 -0.549914334516905 2 5 1 -0.505235686112949 2 5 1 -0.282338529860808 2 5 1 -0.383976247257099 2 5 1 -0.680828005837556 1 5 1 -0.540327329724261 1 5 1 -1.05569191297992 2 5 1 -0.349514052734471 1 5 1 -0.496154085894348 2 5 1 -0.456989724057387 2 5 1 -0.730859840316074 1 5 1 -1.05327361608711 2 5 1 -0.614480004129318 2 5 1 -1.09479710995942 2 5 1 -0.456403655924994 2 5 1 -0.248929266015883 2 5 1 -0.345437497681469 2 5 1 -0.390161598039643 2 5 1 -1.59636606625796 1 5 1 -0.297653976651779 1 5 1 -0.53027456008198 2 5 1 -0.325786425396216 1 5 1 -0.412885828047688 1 5 1 -0.430942952085056 2 5 1 -0.628988308893905 2 5 1 -0.303432064418254 2 5 1 -0.502479650486677 2 5 1 -0.374956563195693 2 5 1 -0.317036830382213 1 5 1 -0.360257190747668 2 5 1 -0.392605112045462 2 5 1 -0.263946843562968 2 5 1 -0.543352649787032 2 5 1 -0.451523452374327 2 5 1 -0.588811763316336 2 5 1 -0.496429527557548 1 5 1 -0.363606147439157 1 5 1 -0.590754640562368 2 5 1 -0.399066967540575 2 5 1 -0.481461654111483 1 5 1 -0.44511198129432 2 5 1 -0.733957111974783 2 5 1 -0.233363499390124 2 5 1 -0.290775652443224 2 5 1 -1.0231168093639 2 5 1 -0.531611729352149 1 5 1 -0.825587045098161 2 5 1 -0.902074251648083 2 5 1 -0.323175839216028 2 5 1 -0.505991444493804 2 5 1 -0.261543288336422 1 5 1 -0.554486494360695 1 5 1 -0.351246068594391 2 5 1 -0.515328779203654 2 5 1 -0.639677471438791 2 5 1 -0.480386256226869 2 5 1 -0.712967189057983 2 5 1 -1.34226678264378 2 5 1 -0.522093483130097 2 5 1 -0.335177947867927 1 5 1 -0.427739523462974 2 5 1 -0.724445283326656 2 5 1 -0.520065928077709 2 5 1 -0.690085575348652 2 5 1 -0.395666881799587 2 5 1 -0.253416617473968 2 5 1 -0.292975240297012 2 5 1 -0.880042969703422 2 5 1 -1.18765090929155 2 5 1 -0.471824574051573 1 5 1 -0.3819371403618 1 5 1 -0.489731712873816 2 5 1 -0.517213617852645 2 5 1 -1.02045998930259 1 5 1 -0.353507003760541 2 5 1 -0.28738904500089 2 5 1 -0.761600567581336 1 5 1 -0.744138933405505 1 5 1 -0.628607646940531 1 5 1 -0.892684033273368 2 5 1 -0.415671512759732 2 5 1 -0.862657966498822 2 5 1 -0.44210809069166 2 5 1 -0.524200017230228 1 5 1 -0.324994378309518 2 5 1 -0.426992098360617 2 5 1 -0.421266101618318 1 5 1 -0.592739096667285 2 5 1 -0.555131716241223 2 5 1 -0.881544992259764 2 5 1 -0.712182588708822 2 5 1 -0.744752154767 2 5 1 -0.606487498310839 2 5 1 -0.461699520660315 2 5 1 -0.745717699517249 1 5 1 -1.24267939276122 2 5 1 -1.06725166167186 2 5 1 -0.688125268149 2 5 1 -0.322706592623082 2 5 1 -0.625340088917108 1 5 1 -0.481462311469004 2 5 1 -1.11856169670045 2 5 1 -0.496653373252084 2 5 1 -0.306702987850203 1 5 1 -0.299778976884002 2 5 1 -0.255084547535916 2 5 1 -0.324409887328244 2 5 1 -0.28656822676276 2 5 1 -0.685862574486554 1 5 1 -0.25080565529039 2 5 1 -0.423005534054917 2 5 1 -0.567258936488528 2 5 1 -0.453882515712717 2 5 1 -0.889040190992262 1 5 1 -0.527722880894638 1 5 1 -0.25678851823785 1 5 1 -0.569171422366059 1 5 1 -0.438002657123947 1 5 1 -1.07105043333329 2 5 1 -0.371311478728212 2 5 1 -0.427478236470728 2 5 1 -0.690413385769963 2 5 1 -1.23990009741994 1 5 1 -0.35055316921775 2 5 1 -0.320095074553569 2 5 1 -0.360451278466255 2 5 1 -1.3783410124737 1 5 1 -0.342094531805991 2 5 1 -0.939591296738548 1 5 1 -0.377754647410711 2 5 1 -0.397257993518603 2 5 1 -0.556583275671363 2 5 1 -0.814148052150712 1 5 1 -0.445052953408459 2 5 1 -0.635785394694251 1 5 1 -1.45734899209363 2 5 1 -0.553038339349569 2 5 1 -0.487114354718737 2 5 1 -0.326029703773354 2 5 1 -0.378499631880882 1 5 1 -0.249998652473012 2 5 1 -0.777922576637263 1 5 1 -0.344758853651022 2 5 1 -0.469680901216615 1 5 1 -0.572885048100009 2 5 1 -0.813047280725817 2 5 1 -0.31824550606856 1 5 1 -1.52076632666272 2 5 1 -0.525665900915789 2 5 1 -0.835443800969408 2 5 1 -0.411960036228098 2 5 1 -0.366444785673607 2 5 1 -0.583008070195596 2 5 1 -0.306789626562622 2 5 1 -0.32265104149373 2 5 1 -0.60706446511574 1 5 1 -0.660415820740065 1 5 1 -0.41961055869239 2 5 1 -1.18355949324619 2 5 1 -1.7831326042876 1 5 1 -1.32079987606431 1 5 1 -0.31469679737234 2 5 1 -0.742130214925183 2 5 1 -0.278159578075745 2 5 1 -0.944262180658039 1 5 1 -1.00185185390192 2 5 1 -0.723912539579018 2 5 1 -0.730046670620961 2 5 1 -0.594612038608092 2 5 1 -0.609003026408566 1 5 1 -0.703005542893214 2 5 1 -0.371613566829059 2 5 1 -0.839961901535526 2 5 1 -0.444097755957375 2 5 1 -0.358077624513172 2 5 1 -0.284271913540958 2 5 1 -0.479974130859609 2 5 1 -0.545791838695864 1 5 1 -0.332965450514954 2 5 1 -0.591764667761429 2 5 1 -0.298607348317416 2 5 1 -0.247276738340079 2 5 1 -0.798461231259409 2 5 1 -0.441564925769082 2 5 1 -1.48128308355737 2 5 1 -0.31434796484399 1 5 1 -1.20347173161341 2 5 1 -0.739554951126451 1 5 1 -0.497097781531935 2 5 1 -0.484159304177581 1 5 1 -0.244849956861225 1 5 1 -0.504629073199244 1 5 1 -0.569133223143607 2 5 1 -1.12622703221084 2 5 1 -0.764250286320223 2 5 1 -0.484641004950062 2 5 1 -0.514942303502618 2 5 1 -0.605883128391681 2 5 1 -0.49588937623075 1 5 1 -0.396953900942646 2 5 1 -0.520906400655699 1 5 1 -0.745428404112575 2 5 1 -0.351085063768204 1 5 1 -0.996492367823443 2 5 1 -0.831728540518663 2 5 1 -0.277512426545143 2 5 1 -0.25988569649848 2 5 1 -0.309494901893108 2 5 1 -0.506418492133261 1 5 1 -1.0882032936743 2 5 1 -0.561957049307115 1 5 1 -0.818825222243945 2 5 1 -0.53741814818001 2 5 1 -0.398894432067162 1 5 1 -0.370547579851629 2 5 1 -0.441101002850091 1 5 1 -0.335051494643735 1 5 1 -0.359857472939855 1 5 1 -1.23898294637209 2 5 1 -0.303629801372895 2 5 1 -1.27191285267288 2 5 1 -0.255294810871718 2 5 1 -0.387744030748659 2 5 1 -0.950171765183971 2 5 1 -0.338933464143833 2 5 1 -0.374472149784474 1 5 1 -0.850121988549967 1 5 1 -0.368357738596241 2 5 1 -0.422292677162516 1 5 1 -0.670555601613663 2 5 1 -0.443253833059252 2 5 1 -0.445156135335308 1 5 1 -0.570124671736916 1 5 1 -0.715586041727328 1 5 1 -0.59944925951305 1 5 2 -0.571270118524135 1 5 2 -0.478721803809417 2 5 2 -0.677464125838552 2 5 2 -0.663628439043173 2 5 2 -0.269338514686921 1 5 2 -0.602552243929772 1 5 2 -0.291299463050662 1 5 2 -0.44101267885359 1 5 2 -0.311931727273563 2 5 2 -0.352091009578926 1 5 2 -0.313916230690944 1 5 2 -0.374014526157238 1 5 2 -0.300236548555279 1 5 2 -0.361852622471219 1 5 2 -0.290120165572002 1 5 2 -1.3190023778617 1 5 2 -0.813585821304588 2 5 2 -0.563167577163749 2 5 2 -0.424847079638711 1 5 2 -0.431682396745519 1 5 2 -0.742666504831229 2 5 2 -1.3016483866513 1 5 2 -0.355061924288677 1 5 2 -0.645449822853174 2 5 2 -0.288378961868379 1 5 2 -0.374463137914422 2 5 2 -0.405984822829934 2 5 2 -0.460634380609883 2 5 2 -0.750352906162385 1 5 2 -0.644664165310704 1 5 2 -0.300369902496596 1 5 2 -0.443395281476769 1 5 2 -0.242967135467797 2 5 2 -0.286922136386556 2 5 2 -0.287290418411787 2 5 2 -0.521549068525531 2 5 2 -0.292427955172831 1 5 2 -0.689267068868006 2 5 2 -0.518413816193551 2 5 2 -0.318270239089209 1 5 2 -0.382457012636577 2 5 2 -1.01440803729853 1 5 2 -1.00158066089162 1 5 2 -0.568667032400329 2 5 2 -0.414610843415938 1 5 2 -0.400258366212628 2 5 2 -1.00809741464603 2 5 2 -1.27133812224764 2 5 2 -0.743864210325877 2 5 2 -0.775841632110899 2 5 2 -0.300321917136189 2 5 2 -0.3583802317039 2 5 2 -0.460038260487213 1 5 2 -0.364671001753739 2 5 2 -0.452730720520749 2 5 2 -0.650435164535667 1 5 2 -1.30257818709608 2 5 2 -0.467408775207611 2 5 2 -0.413873488779555 2 5 2 -0.362883875858316 1 5 2 -0.706627565831075 2 5 2 -0.965894478924112 1 5 2 -0.364415938903456 1 5 2 -0.302357207565824 2 5 2 -0.523675741606119 1 5 2 -0.246098564525208 1 5 2 -0.534354328367928 2 5 2 -0.305317088730255 2 5 2 -0.360877156880599 2 5 2 -0.269313918771234 1 5 2 -0.522107720783827 2 5 2 -0.798496955992481 1 5 2 -0.774270654545548 1 5 2 -0.689189662698451 2 5 2 -0.541317395948974 1 5 2 -0.758336347994286 2 5 2 -1.04260254410127 1 5 2 -0.298448066365288 2 5 2 -0.687244199835044 1 5 2 -0.321017344151699 2 5 2 -1.30053982037248 1 5 2 -0.871190362112565 2 5 2 -0.854567944471819 1 5 2 -0.406633311451941 2 5 2 -0.595529004717776 2 5 2 -0.932485076424955 2 5 2 -0.670527589251614 2 5 2 -0.349539633417549 2 5 2 -0.527533696703081 2 5 2 -0.604204077013109 1 5 2 -0.396372501238897 2 5 2 -0.54363245627552 2 5 2 -1.01374770072993 2 5 2 -0.442516401166061 1 5 2 -0.337787899261388 2 5 2 -0.611607046647056 2 5 2 -0.582231962637285 2 5 2 -0.531731222343829 2 5 2 -0.70961760628681 2 5 2 -0.983672547433239 2 5 2 -0.853752013607103 2 5 2 -0.299100847268621 2 5 2 -0.458144991493894 2 5 2 -0.646448464231831 2 5 2 -0.349113081280827 2 5 2 -0.371396695556209 2 5 2 -0.380762038115444 2 5 2 -0.626193352278793 2 5 2 -0.319957343473038 2 5 2 -0.894827474594344 1 5 2 -0.371085339061208 2 5 2 -0.274700685448708 2 5 2 -0.472512525590188 2 5 2 -0.428272329032601 1 5 2 -0.81108343332377 2 5 2 -0.29226124190246 1 5 2 -0.709302851449251 1 5 2 -0.441502263285431 2 5 2 -0.377865986040148 1 5 2 -0.299141714875424 2 5 2 -0.852037775518583 1 5 2 -0.367443619829809 2 5 2 -0.833467723342496 1 5 2 -0.518949570328891 2 5 2 -1.08071399168627 2 5 2 -1.45803253458175 1 5 2 -0.403737991042524 2 5 2 -0.430597937237068 1 5 2 -0.693327507928039 2 5 2 -0.409427255768774 2 5 2 -0.415266426974346 2 5 2 -0.399348890241505 1 5 2 -1.00807008741471 1 5 2 -0.600334249477256 2 5 2 -0.723090984967466 1 5 2 -0.30124593772841 2 5 2 -0.526226731553072 1 5 2 -0.909950421520731 2 5 2 -0.327267873435672 1 5 2 -1.07209919243311 2 5 2 -0.29726599356851 1 5 2 -0.552066919100173 2 5 2 -0.494894143356916 2 5 2 -0.32978714763287 2 5 2 -0.590111357521564 2 5 2 -0.453342223889263 2 5 2 -0.402928162250864 2 5 2 -1.28649448740156 2 5 2 -0.307709380863996 1 5 2 -0.28828133716514 2 5 2 -0.293039690509469 2 5 2 -0.416060987991466 2 5 2 -0.475558200663385 2 5 2 -1.05487716699584 2 5 2 -0.338172024306927 1 5 2 -0.595539455240045 1 5 2 -0.579146103187199 2 5 2 -0.786799876060352 1 5 2 -0.241065859944711 2 5 2 -0.582321691985273 2 5 2 -0.460354760620268 1 5 2 -0.841722330305237 1 5 2 -1.01167714961156 2 5 2 -0.424713060969898 1 5 2 -0.394167127869212 2 5 2 -1.03416630772583 1 5 2 -1.32959777810628 1 5 2 -1.75972860107492 1 5 2 -0.615179768965791 2 5 2 -0.319854717980425 2 5 2 -0.489657096763377 1 5 2 -0.670661576197628 2 5 2 -0.472203770212263 2 5 2 -0.765998706988045 1 5 2 -0.333182293252276 1 5 2 -0.570043234180048 1 5 2 -0.30363661516842 1 5 2 -0.332466771451815 2 5 2 -0.997841282553196 2 5 2 -0.45787243741055 1 5 2 -0.334190036053108 2 5 2 -0.419647452096217 2 5 2 -0.477289639435407 2 5 2 -0.319493821935502 2 5 2 -0.602105491492108 2 5 2 -0.901453621588105 2 5 2 -0.302006597660369 2 5 2 -0.370541701419962 1 5 2 -0.632484359068564 2 5 2 -0.675493181471213 1 5 2 -0.702604527810739 2 5 2 -0.457494041206726 1 5 2 -1.02133348990189 1 5 2 -1.02366570258029 2 5 2 -0.6241274220945 2 5 2 -1.13319965338725 1 5 2 -0.608397741451449 2 5 2 -0.275232263227218 2 5 2 -0.646078828525116 1 5 2 -0.392384020972726 2 5 2 -0.611608179642105 1 5 2 -0.403732771272542 2 5 2 -0.475709943826484 1 5 2 -0.644107463291593 1 5 2 -0.529061673835631 2 5 2 -0.76205402727557 1 5 2 -0.478372023349786 1 5 2 -0.721916054782157 2 5 2 -0.298281970063148 2 5 2 -0.308419274846538 2 5 2 -1.13676249928442 2 5 2 -0.489633518133111 1 5 2 -0.81718265267318 2 5 2 -0.296510774521216 1 5 2 -0.41545087025183 2 5 2 -0.858883400769863 1 5 2 -0.588983529667847 2 5 2 -1.05271712018426 2 5 2 -0.305721646872819 1 5 2 -0.639265026387045 2 5 2 -0.640474642443045 1 5 2 -0.896096517036621 2 5 2 -0.42307255863364 2 5 2 -0.415674104307401 1 5 2 -0.27547910375578 1 5 2 -0.348853636625538 2 5 2 -0.283695041149401 2 5 2 -0.305964485294963 2 5 2 -0.993384639595699 2 5 2 -0.747661562638797 1 5 2 -0.374983766761583 2 5 2 -0.709902782274922 1 5 2 -0.39029996774982 2 5 2 -0.443342047659481 1 5 2 -0.25232121385486 2 5 2 -0.606258771370627 2 5 2 -0.56630014842487 2 5 2 -0.407573540497359 2 5 2 -0.488426329742728 2 5 2 -0.530548943298116 1 5 2 -0.836491434275815 2 5 2 -0.368718555457284 2 5 2 -0.627508832177755 1 5 2 -0.658507784089307 2 5 2 -0.494633132243137 2 5 2 -0.767210656356636 1 5 2 -0.279455460728518 2 5 2 -0.680268781163734 1 5 2 -0.434657646694503 1 5 2 -0.975121716315162 2 5 2 -0.454458052785016 2 5 2 -0.870719661036972 2 5 2 -0.601447265395704 2 5 2 -0.883151345159082 2 5 2 -0.79883814953365 2 5 2 -0.520533085849907 2 5 2 -0.740289828056042 1 5 2 -0.808434675051423 2 5 2 -0.323990809323793 1 5 2 -0.402569321518717 1 5 2 -0.520691303484606 1 5 2 -0.401064022355165 2 5 2 -0.402344022560083 1 5 2 -0.532289549565749 2 5 2 -0.706963165521545 2 5 2 -0.784955813139153 2 5 2 -0.360194468075243 2 5 2 -0.409815687475514 1 5 2 -0.82043050263301 2 5 2 -0.460274040204098 2 5 2 -0.419051670972866 2 5 2 -0.599443515950589 2 5 2 -0.966096764539077 2 5 2 -0.366186511338898 1 5 2 -1.4222044721659 2 5 2 -0.777184212128937 2 5 2 -0.591852836588032 2 5 2 -0.770749892926039 2 5 2 -0.70434735829414 1 5 2 -0.765666276417329 2 5 2 -0.40346241426283 1 5 2 -0.391165632121021 2 5 2 -0.370020173988749 2 5 2 -1.58683703850196 1 5 2 -1.57793133770567 2 5 2 -0.80075289464325 2 5 2 -0.256904564618549 1 5 2 -0.53622262912349 1 5 2 -0.453580971173257 2 5 2 -0.410069535718748 1 5 2 -0.515797332567113 2 5 2 -0.952842898198181 1 5 2 -0.363748661621775 2 5 2 -0.428437274072119 2 5 2 -0.370569493908707 1 5 2 -1.55504308977282 1 5 2 -0.2910968027665 2 5 2 -0.889633822477091 2 5 2 -1.06768254922828 1 5 2 -0.737740843880572 2 5 2 -0.999020093280879 2 5 2 -0.332193354553405 2 5 2 -0.489468556048885 1 5 2 -0.276614446525954 2 5 2 -0.411812431184961 1 5 2 -0.645266731187802 2 5 2 -0.355603707761157 2 5 2 -0.390302407266954 1 5 2 -0.77292099479717 2 5 2 -0.252389739406451 2 5 2 -0.876313308318194 2 5 2 -0.58287646271364 1 5 2 -0.288751011208501 1 5 2 -0.431566553814174 2 5 2 -0.589008102697159 2 5 2 -0.829161903382978 2 5 2 -0.628538983915815 2 5 2 -0.621394296269468 2 5 2 -0.579181298723461 1 5 2 -0.727901955182036 2 5 2 -0.631355875603683 2 5 2 -0.860874327171326 2 5 2 -0.343433754720578 2 5 2 -0.5773516679935 2 5 2 -0.279980088015754 1 5 2 -0.297786857526651 2 5 2 -0.352028385521676 2 5 2 -0.718284977347952 1 5 2 -0.386491843574301 1 5 2 -0.733817922945248 2 5 2 -0.330039961374457 1 5 2 -0.562137897292054 2 5 2 -0.493718153186244 2 5 2 -0.384521948665274 2 5 2 -0.541882345607494 2 5 2 -0.899433484810609 1 5 2 -0.239671549366562 1 5 2 -0.903508962409293 2 5 2 -0.437062486670204 1 5 2 -0.47204968825503 2 5 2 -0.463054778904269 1 5 2 -0.337988558333662 1 5 2 -0.945352936382255 2 5 2 -0.319331081252348 1 5 2 -1.10841845342301 1 5 2 -0.727028251710372 1 5 2 -0.418174877683897 2 5 2 -0.83459503151359 2 5 2 -0.341783851166967 2 5 2 -0.409549042090065 2 5 2 -1.39638569008014 2 5 2 -0.282660568650718 2 5 2 -0.377102854745555 2 5 2 -0.559130208630371 1 5 2 -0.406889638299026 2 5 2 -0.874392505642916 2 5 2 -0.549370228408864 1 5 2 -0.702882081610178 1 5 2 -0.387196237366316 2 5 2 -0.254396302589893 2 5 2 -0.661141881945967 2 5 2 -0.353850139949898 2 5 2 -0.940367165724872 2 5 2 -0.257306998632217 2 5 2 -0.25948459184901 1 5 2 -0.282447060372156 2 5 2 -0.434471601810715 2 5 2 -0.531847599879585 2 5 2 -0.831639598690597 1 5 2 -0.476057177723281 1 5 2 -0.689960964595721 2 5 2 -0.762885904963058 2 5 2 -0.622547245815982 2 5 2 -0.467121910624321 2 5 2 -0.444998878563145 2 5 2 -0.610618477959826 2 5 2 -0.406946642496399 1 5 2 -1.0606594018364 2 5 2 -0.554673891728355 2 5 2 -0.249462989686065 2 5 2 -0.391072779178118 1 5 2 -0.502851072240924 2 5 2 -0.375337554826657 2 5 2 -0.459877784492948 1 5 2 -0.37669560180294 2 5 2 -0.35853976242436 2 5 2 -0.299654614882035 2 5 2 -0.722167944915479 2 5 2 -0.470173817048549 2 5 2 -0.434030303678653 1 5 2 -0.38807847057575 2 5 2 -0.461429537822728 1 5 2 -0.810700838760469 2 5 2 -0.249051828431355 1 5 2 -0.317554462771952 2 5 2 -0.262256455504448 1 5 2 -0.2953873757043 2 5 2 -0.696729236528574 2 5 2 -0.528266303919385 2 5 2 -0.55142571005823 2 5 2 -0.586822599864067 2 5 2 -0.418740386790856 2 5 2 -0.335528681201811 1 5 2 -0.842500045429954 2 5 2 -0.285946789650486 2 5 2 -0.408435800240321 2 5 2 -0.413125087979462 2 5 2 -0.393886958711384 1 5 2 -0.253356738206904 2 5 2 -0.312045370960966 1 5 2 -0.798452098494563 2 5 2 -0.492707665345048 2 5 2 -0.716083098282908 2 5 2 -0.234541570552336 2 5 2 -1.35239146034105 2 5 2 -0.718129009054262 1 5 2 -0.718707390761021 2 5 2 -0.491926442341928 2 5 2 -0.654723295742436 2 5 2 -2.2535330863484 2 5 2 -0.427000474398908 2 5 2 -0.709004302987488 1 5 2 -0.516478985375353 2 5 2 -0.399159476675353 2 5 2 -0.442334074498277 2 5 2 -0.305764408172937 1 5 2 -0.374730267131031 2 5 2 -0.258402933869162 2 5 2 -0.356729146842492 2 5 2 -0.249659208975827 2 5 2 -0.513939361328391 2 5 2 -0.273198932158475 2 5 2 -0.299847483659362 1 5 2 -0.541237531522651 2 5 2 -0.392932766582102 1 5 2 -0.525638020825498 2 5 2 -0.333414128837149 1 5 2 -0.576494101054249 2 5 2 -0.511222521868291 1 5 2 -0.412380296323655 2 5 2 -0.399606860754613 2 5 2 -0.997015772263903 2 5 2 -0.443550015156711 2 5 2 -0.837716892291427 2 5 2 -0.390478993062678 1 5 2 -0.636766756207244 2 5 2 -0.737585807116948 2 5 2 -0.295250014308323 2 5 2 -0.608066528187843 2 5 2 -1.24729266077018 2 5 2 -0.41288102715652 1 5 2 -1.24953422410599 1 5 2 -0.351993007234192 2 5 2 -0.335694580769538 1 5 2 -0.423647638118759 1 5 2 -0.734734535205897 1 5 2 -0.260015569529333 1 5 2 -0.750757643884208 2 5 2 -0.595132916679284 2 5 2 -0.522805168311647 2 5 2 -0.843659849398215 2 5 2 -1.10699652185756 2 5 2 -0.391076744361603 2 5 2 -0.312026720740498 2 5 2 -0.607969730004942 2 5 2 -1.00812364162894 2 5 2 -0.43084197949303 2 5 2 -0.453046882496868 1 5 2 -0.370369606134876 1 5 2 -0.775483364298362 1 5 2 -0.396231522637068 2 5 2 -0.413528320853371 2 5 2 -1.46772948611848 2 5 2 -0.341708188807674 2 5 2 -0.419394806122751 1 5 2 -0.257895217630086 2 5 2 -0.415728977418159 2 5 2 -0.316442984226336 2 5 2 -0.968259065135459 2 5 2 -0.387870086772944 1 5 2 -0.716049265883702 2 5 2 -0.633645416807576 1 5 2 -0.953173953972706 2 5 2 -0.690728024005709 2 5 2 -1.10833203046202 2 5 2 -2.21338412745891 2 5 2 -0.324098654160468 2 5 2 -0.673932874285758 2 5 2 -0.800813516749607 2 5 2 -0.696084502169422 1 5 2 -0.355840028465312 1 5 2 -0.683207616367023 1 5 2 -0.320920277031855 2 5 2 -1.03878518793101 1 5 2 -0.609374353605396 2 5 2 -0.578037696967778 2 5 2 -0.322774773347465 2 5 2 -0.444321937393125 2 5 2 -0.706403834907649 2 5 2 diff --git a/R/inst/extdata/choiceRT_single_exampleData.txt b/R/inst/extdata/choiceRT_single_exampleData.txt deleted file mode 100644 index c925a82a..00000000 --- a/R/inst/extdata/choiceRT_single_exampleData.txt +++ /dev/null @@ -1,1001 +0,0 @@ -RT choice subjID condition -0.238126253704183 1 1 1 -0.788334139249308 2 1 1 -0.524351202388138 2 1 1 -1.30852451859186 1 1 1 -0.244177006142252 1 1 1 -0.512534281943979 1 1 1 -0.570872020376975 2 1 1 -0.552056452179357 1 1 1 -0.298121361381527 2 1 1 -0.323864684737407 1 1 1 -0.542476237007045 1 1 1 -0.457829931981559 2 1 1 -0.214443816443766 1 1 1 -0.282641758197282 2 1 1 -0.577296397953241 1 1 1 -0.80363268095685 2 1 1 -0.630866151842371 2 1 1 -0.561537877283935 2 1 1 -0.447864619700588 1 1 1 -0.271079966516117 2 1 1 -0.286558308483825 2 1 1 -0.402853789793329 2 1 1 -0.261247265870358 2 1 1 -0.954323974954787 1 1 1 -0.233982750292549 2 1 1 -0.534509968347321 2 1 1 -1.38489463892966 1 1 1 -0.51382752398596 2 1 1 -0.877226598584423 2 1 1 -0.59661096895894 2 1 1 -0.653486235884601 2 1 1 -0.499754559401486 2 1 1 -0.234607668817517 2 1 1 -0.531596228343812 2 1 1 -0.517067421390557 2 1 1 -0.286714432990514 2 1 1 -0.280389415416944 1 1 1 -0.770848791728697 2 1 1 -0.242534242474749 2 1 1 -1.21402951161598 1 1 1 -0.254230773115822 1 1 1 -0.235607609409862 1 1 1 -0.893859490775577 2 1 1 -0.4248828895841 2 1 1 -0.806633683066691 1 1 1 -0.52846751057204 2 1 1 -0.283404274358359 2 1 1 -0.38261147359119 1 1 1 -0.366467333270928 2 1 1 -0.89906087165271 2 1 1 -0.473523175525898 2 1 1 -0.61052334774835 2 1 1 -0.348877038822898 2 1 1 -0.509848343105319 2 1 1 -0.714362767211544 2 1 1 -0.366653361634071 1 1 1 -0.504639516528354 2 1 1 -0.789291266027802 1 1 1 -0.220496731951155 2 1 1 -0.225368494671686 2 1 1 -0.935425512110651 1 1 1 -0.596093103065834 2 1 1 -0.751187828634478 2 1 1 -0.398369973292919 2 1 1 -0.803192132747886 1 1 1 -0.653642313281921 2 1 1 -0.759465190620081 2 1 1 -0.301158475484036 2 1 1 -0.468546635484975 2 1 1 -0.45136376067397 2 1 1 -0.225730206953994 1 1 1 -0.871541732294617 2 1 1 -1.02231746439083 2 1 1 -0.453015412970327 1 1 1 -0.198424664401742 2 1 1 -0.300531454438104 2 1 1 -0.473723469079576 1 1 1 -0.397993417619097 2 1 1 -0.990744721453659 2 1 1 -0.576175729949669 2 1 1 -0.200104343196362 2 1 1 -0.397950225292451 2 1 1 -0.595871677587168 1 1 1 -0.631283245367399 2 1 1 -0.225640535433198 2 1 1 -0.671278939344137 2 1 1 -0.562888330598081 1 1 1 -0.713201556333214 1 1 1 -0.429372024083033 2 1 1 -0.559437949496943 1 1 1 -0.747758954300599 2 1 1 -0.668556572370471 2 1 1 -0.179933868089705 1 1 1 -0.557946405103375 2 1 1 -0.781925159045207 2 1 1 -0.629998909619026 2 1 1 -0.73419031432803 1 1 1 -0.917048954570217 2 1 1 -1.27326330493077 1 1 1 -0.715099066135782 1 1 1 -0.561629162179203 2 1 1 -0.439342876745989 1 1 1 -0.212480989248291 2 1 1 -0.431997523692581 2 1 1 -0.504823085985375 1 1 1 -0.209443682735351 2 1 1 -0.535478168252645 1 1 1 -0.523309589143815 2 1 1 -0.292526841667345 2 1 1 -0.292598915819633 1 1 1 -0.383304045988112 2 1 1 -0.220801631101784 2 1 1 -0.240025256471961 2 1 1 -0.374617088048471 2 1 1 -0.225139772246513 2 1 1 -0.504765209525881 1 1 1 -0.536719069622199 1 1 1 -0.265730079523484 2 1 1 -0.788176797412021 2 1 1 -0.317054055572024 1 1 1 -0.984288372815029 1 1 1 -0.242704368769227 1 1 1 -1.16310843477133 1 1 1 -0.671512143534472 2 1 1 -0.235624281398265 2 1 1 -0.585278561981407 1 1 1 -0.313710683818167 1 1 1 -0.512453071354528 2 1 1 -0.318816084203735 2 1 1 -0.492290766723273 2 1 1 -0.869267244819061 2 1 1 -0.416347372277426 2 1 1 -1.25171209855063 1 1 1 -0.421124063985099 2 1 1 -0.330265759909128 2 1 1 -0.32442759213596 2 1 1 -0.499405834143408 2 1 1 -0.605809814064198 2 1 1 -0.441813584555195 1 1 1 -0.355018580197292 2 1 1 -0.284917824978601 2 1 1 -0.712509291577718 2 1 1 -0.360087543146394 2 1 1 -0.593758109292972 2 1 1 -0.21793928877364 2 1 1 -0.511916501085172 1 1 1 -1.65665966055448 2 1 1 -0.462252095429733 1 1 1 -0.375694324193756 2 1 1 -0.573330510111478 1 1 1 -0.624806212238662 1 1 1 -0.3221554867038 1 1 1 -0.394184550616579 1 1 1 -0.233898257977356 2 1 1 -0.616451835954318 2 1 1 -0.435745652986984 2 1 1 -0.309831870195393 1 1 1 -0.303567774481905 1 1 1 -0.268141575894932 2 1 1 -0.685546680374616 2 1 1 -0.315857448984633 2 1 1 -0.358148021225324 1 1 1 -0.561597978729496 2 1 1 -0.575763837785002 2 1 1 -0.408409797786314 2 1 1 -0.456470478096314 2 1 1 -0.211008154807298 1 1 1 -0.537560426488747 1 1 1 -0.474119050536192 1 1 1 -0.349680702914349 2 1 1 -0.43874642118394 2 1 1 -0.741099937281951 2 1 1 -0.397490501092685 1 1 1 -0.455993632903328 2 1 1 -0.531917883353318 2 1 1 -0.544592749033783 2 1 1 -0.74575081631549 1 1 1 -0.482830763020483 2 1 1 -0.280104823458282 2 1 1 -0.674827163589054 2 1 1 -0.27232449929437 1 1 1 -0.33609945965603 2 1 1 -0.642687813456977 2 1 1 -0.45152584390343 2 1 1 -0.393612819207325 1 1 1 -0.403513480920972 2 1 1 -0.55270209232572 2 1 1 -0.282474350101989 1 1 1 -0.225686494015142 1 1 1 -0.3032960404285 1 1 1 -0.741695387202929 1 1 1 -0.23627922113503 1 1 1 -0.317661404771517 2 1 1 -0.365881950379812 1 1 1 -0.671407911504626 2 1 1 -0.6327672361385 2 1 1 -0.408730216599132 2 1 1 -1.05295329016947 2 1 1 -0.647929253014634 2 1 1 -0.272505386795946 1 1 1 -0.477000937785718 2 1 1 -0.593679670773664 2 1 1 -0.485804513765726 1 1 1 -0.685108031619407 2 1 1 -0.463863491717212 2 1 1 -0.280869562583906 2 1 1 -0.484442256816249 1 1 1 -0.374203282894535 1 1 1 -0.205270568757322 2 1 1 -0.285730023779721 1 1 1 -0.420031671350127 2 1 1 -0.304140334800815 2 1 1 -0.455400240565684 1 1 1 -0.319295225911816 2 1 1 -0.853456173431349 2 1 1 -1.28296521539738 2 1 1 -0.402276812108308 2 1 1 -1.60365089898574 2 1 1 -0.513436951554669 2 1 1 -0.635287982445216 2 1 1 -0.756725913746622 1 1 1 -0.538411817875012 2 1 1 -0.252807751300543 1 1 1 -0.306493263230248 2 1 1 -0.328940637779731 1 1 1 -0.295149174376265 2 1 1 -0.428772773247104 2 1 1 -0.710257617392816 2 1 1 -2.1398843380733 2 1 1 -0.390964230021283 1 1 1 -0.30264732818644 1 1 1 -0.24604561365542 2 1 1 -0.326118394989355 2 1 1 -0.444302762917929 2 1 1 -0.994994120515054 2 1 1 -0.329747734506691 2 1 1 -0.244820417609073 2 1 1 -0.434344901812039 2 1 1 -0.245526039713125 2 1 1 -0.371387027622059 2 1 1 -0.396016682526436 2 1 1 -0.868293655068221 2 1 1 -0.339580118779972 2 1 1 -0.377321305638716 1 1 1 -0.352058350011174 2 1 1 -0.523222420484193 1 1 1 -1.63006360968846 2 1 1 -0.403780279358626 1 1 1 -0.33450821318739 2 1 1 -0.246049648436144 1 1 1 -0.73900563703035 2 1 1 -0.70659002598455 1 1 1 -0.251224036209508 1 1 1 -0.279682884105716 2 1 1 -0.446835970242547 2 1 1 -0.344773155307199 2 1 1 -1.18156313011751 2 1 1 -0.40159469187599 1 1 1 -0.662618250249293 2 1 1 -0.484088636200293 2 1 1 -0.249177412018199 1 1 1 -0.635565342005854 2 1 1 -0.237344366033974 2 1 1 -0.262837667936303 2 1 1 -0.223824529758815 2 1 1 -0.544851049052962 1 1 1 -1.22941904309934 1 1 1 -1.18790150456476 2 1 1 -0.397012831119724 2 1 1 -0.542500816372649 1 1 1 -0.215934076714995 2 1 1 -1.30455859046761 2 1 1 -0.536099297245265 2 1 1 -0.414264536316934 2 1 1 -0.777679859044325 2 1 1 -0.697575719549679 1 1 1 -0.704761484394448 2 1 1 -0.286893353427223 1 1 1 -0.681973124438239 2 1 1 -0.397462829482937 2 1 1 -0.257670640245336 2 1 1 -0.236649584180499 2 1 1 -0.436790435094707 1 1 1 -0.574656753851278 1 1 1 -0.253082319735779 1 1 1 -0.61806692862892 1 1 1 -0.46661603680114 2 1 1 -0.195332992231242 1 1 1 -0.229629897436442 1 1 1 -0.992646398039104 2 1 1 -0.94136783174252 2 1 1 -0.837333099387364 1 1 1 -0.430204780391451 2 1 1 -1.37515921760222 2 1 1 -0.89875556054097 2 1 1 -0.696864042518777 2 1 1 -0.332933586834615 2 1 1 -0.334070550417085 2 1 1 -0.38676132253602 2 1 1 -0.306404665389991 2 1 1 -0.478254432945422 2 1 1 -0.601997570889218 2 1 1 -0.373642558748753 2 1 1 -0.29388256861859 1 1 1 -0.403146732540824 2 1 1 -0.754379822737839 2 1 1 -0.20827688411218 2 1 1 -0.211975975201092 2 1 1 -0.591340246795799 1 1 1 -0.263322621163444 2 1 1 -0.525170614901281 1 1 1 -0.206823345071543 1 1 1 -0.935520204615524 1 1 1 -0.550910831841 2 1 1 -0.232504114652867 2 1 1 -0.391975720570035 1 1 1 -0.52537232580037 2 1 1 -0.604827669281913 2 1 1 -0.440173374557048 2 1 1 -0.294878838994327 2 1 1 -0.323868811622971 1 1 1 -0.240824506056104 2 1 1 -0.423271049333481 2 1 1 -0.849356591210965 2 1 1 -0.335818515496422 2 1 1 -0.538745656799135 2 1 1 -0.4208751745964 2 1 1 -0.55146359110108 2 1 1 -0.467882029849217 2 1 1 -0.567777388073783 2 1 1 -0.311394332684366 1 1 1 -1.33531192845093 1 1 1 -0.368867535882799 2 1 1 -0.340092989922591 2 1 1 -0.299811445088077 2 1 1 -0.198131285653104 2 1 1 -1.57688580580023 2 1 1 -0.671467937043381 2 1 1 -0.422481878776226 2 1 1 -0.48313672040092 1 1 1 -0.473697344635179 1 1 1 -0.68951966048344 2 1 1 -0.633967567703449 2 1 1 -0.382761102393661 1 1 1 -0.68736056335558 1 1 1 -0.677810537621417 2 1 1 -0.249614606331914 2 1 1 -0.755321813543998 2 1 1 -0.370198385669219 1 1 1 -0.697638915493631 1 1 1 -0.684828719775979 2 1 1 -0.502349799392655 1 1 1 -0.583605969114717 1 1 1 -0.517041977935336 1 1 1 -0.411670106317747 1 1 1 -0.207610898625408 1 1 1 -0.414006631133478 2 1 1 -0.921999310392829 1 1 1 -0.409507167245215 2 1 1 -0.584716070617761 2 1 1 -0.666513112126972 2 1 1 -0.233005827550518 2 1 1 -0.60770657746225 1 1 1 -0.221784346267773 2 1 1 -0.771391695716424 2 1 1 -1.21988159355549 1 1 1 -0.408933678664394 2 1 1 -1.03374983542661 2 1 1 -0.939198644733114 2 1 1 -0.962067734082042 2 1 1 -0.473406448845882 2 1 1 -0.223644602219167 2 1 1 -1.12139515597077 1 1 1 -0.299025722625131 2 1 1 -0.55009896091157 1 1 1 -0.578799507502895 1 1 1 -0.962097234341087 1 1 1 -0.348861796367042 2 1 1 -0.35773121339554 2 1 1 -0.246740510307971 1 1 1 -0.407111627051893 1 1 1 -0.550930871978825 1 1 1 -0.422754497543909 2 1 1 -0.758063342099552 2 1 1 -0.380847347114823 2 1 1 -0.377055603284598 2 1 1 -0.65608839650545 1 1 1 -1.42357385911498 2 1 1 -0.232798506755752 2 1 1 -0.539867634108279 2 1 1 -0.392489725525737 2 1 1 -0.367110223983889 2 1 1 -0.839639947757427 2 1 1 -0.592327430792799 2 1 1 -0.239126262427817 2 1 1 -0.328625329636268 2 1 1 -0.284257957756146 1 1 1 -0.331590178883346 1 1 1 -0.620620411662111 2 1 1 -0.755967038010479 2 1 1 -0.331900743408574 2 1 1 -0.421146686045199 1 1 1 -1.07476503410067 2 1 1 -0.507112598176372 1 1 1 -0.311566718621004 2 1 1 -0.301319320070233 2 1 1 -0.393257034342845 2 1 1 -0.673521481008061 2 1 1 -0.291567562966672 2 1 1 -1.05249328382332 2 1 1 -0.694698152076518 2 1 1 -0.733055920143737 2 1 1 -0.334350894107303 2 1 1 -0.542807697456418 1 1 1 -0.579281169009386 1 1 1 -0.320837583848137 1 1 1 -0.488074071042795 1 1 1 -0.213060081069537 2 1 1 -0.237230647833275 2 1 1 -0.237572229668373 1 1 1 -0.241805498724672 1 1 1 -0.21505246069559 2 1 1 -0.625069689033177 2 1 1 -0.391789762960315 1 1 1 -0.360924641936915 2 1 1 -0.434831888026175 1 1 1 -1.53947356804897 2 1 1 -0.390459073072731 2 1 1 -0.327186719063663 1 1 1 -0.451681415339723 1 1 1 -0.551841771615269 2 1 1 -0.41039773179749 1 1 1 -0.926634118987433 2 1 1 -0.813362027443744 2 1 1 -0.632371052186083 2 1 1 -1.07271976627787 1 1 1 -0.347281073927582 1 1 1 -0.44423560152159 1 1 1 -0.576366534316911 2 1 1 -0.279713029952993 2 1 1 -0.881466843024701 2 1 1 -0.374654223890455 1 1 1 -0.246340230252564 1 1 1 -0.46051090791758 2 1 1 -0.610478508455545 1 1 1 -0.290070606427311 2 1 1 -0.544420557842503 1 1 1 -0.776693279362721 1 1 1 -0.235406028367375 2 1 1 -0.239531675743827 1 1 1 -0.44775078332261 2 1 1 -0.272084709816774 1 1 1 -0.490027056594032 2 1 1 -1.11466956380519 2 1 1 -0.270448404879725 2 1 1 -0.442949902437612 1 1 1 -0.570651632322539 1 1 1 -0.32265845661882 2 1 1 -0.407435441210764 2 1 1 -0.200085052390358 2 1 1 -0.358511835895485 2 1 1 -1.2431214333383 2 1 1 -0.696171754957839 1 1 1 -0.2777627469669 1 1 1 -0.429359856138122 2 1 1 -0.340524177360971 2 1 1 -0.199944337376957 2 1 1 -0.398334292684942 2 1 1 -0.388541579168816 2 1 1 -0.398547679838622 1 1 1 -0.839309822360769 2 1 1 -0.280253849702043 2 1 1 -0.547345720269382 2 1 1 -0.376647832731017 2 1 1 -0.455530332435412 2 1 1 -0.334196466045242 2 1 1 -0.759777271734527 2 1 1 -1.10869967729068 2 1 1 -0.222920909328599 2 1 1 -0.243727194101031 2 1 1 -0.331283374352904 2 1 1 -0.489803545251022 1 1 1 -0.2736011848833 2 1 1 -0.432409628386385 1 1 1 -0.447747022319498 2 1 1 -0.736283852147818 2 1 1 -0.461500847594122 1 1 1 -0.359367876631285 1 1 1 -0.418098062593873 2 1 1 -0.502693165924066 1 1 1 -0.260188072876792 1 1 1 -0.348437996297828 1 1 1 -1.57562306974174 2 1 1 -0.316108820930013 2 1 1 -0.421685918698271 2 1 1 -0.578695918727619 2 1 1 -1.12879309366769 2 1 1 -1.03916993441652 2 1 1 -0.492207222672778 1 1 1 -0.33283217994747 2 1 1 -0.39422420306568 2 1 2 -0.362300838201913 1 1 2 -0.469662901313467 2 1 2 -0.820030023322582 1 1 2 -0.234551440695508 2 1 2 -0.331679248955791 1 1 2 -0.527229640837085 2 1 2 -0.91734807805308 2 1 2 -0.319175515877037 2 1 2 -0.651053459158852 1 1 2 -0.661459624685597 2 1 2 -0.281279784597852 2 1 2 -0.342078529279457 1 1 2 -0.3636800828231 2 1 2 -0.484151346003298 1 1 2 -0.658827635325395 1 1 2 -0.622208937699232 1 1 2 -0.580811030835409 2 1 2 -0.441808620117506 1 1 2 -0.36060243933493 2 1 2 -0.831194064165385 2 1 2 -0.361776006347027 2 1 2 -0.777351339265196 1 1 2 -0.278293909155803 2 1 2 -0.278507100800553 2 1 2 -0.884402648451047 1 1 2 -0.342560342613834 2 1 2 -0.809676649841315 2 1 2 -0.516858099569803 1 1 2 -0.634645370682583 2 1 2 -0.249686099229778 1 1 2 -1.33141985698474 1 1 2 -0.453726915386914 2 1 2 -0.290504549136735 2 1 2 -0.487095756746479 2 1 2 -0.346501172556082 2 1 2 -0.393430828426059 1 1 2 -0.504449494787339 2 1 2 -0.367999687491587 2 1 2 -0.352469038071531 1 1 2 -0.234560015153837 2 1 2 -0.940841504372444 1 1 2 -0.2046902513565 2 1 2 -0.461341997193658 1 1 2 -0.610339950737745 2 1 2 -0.446921029186028 1 1 2 -0.515591108864551 2 1 2 -1.58260395843454 2 1 2 -0.344764743329778 2 1 2 -0.427254054893139 2 1 2 -0.516158776880019 1 1 2 -1.2612303673015 2 1 2 -0.613528615965816 2 1 2 -0.267963577139406 2 1 2 -0.307594651280269 2 1 2 -0.24101706884499 1 1 2 -0.455753268732021 2 1 2 -0.405040912881131 2 1 2 -0.288094483330521 1 1 2 -0.545610622237084 2 1 2 -0.452142838999807 1 1 2 -0.594527943497764 1 1 2 -0.88116621589308 2 1 2 -0.277767297820233 2 1 2 -0.279551393619652 2 1 2 -0.365460511604365 2 1 2 -0.556212898406868 2 1 2 -0.328560209842821 1 1 2 -0.531013993625691 1 1 2 -0.231888430468412 1 1 2 -0.677110774143983 2 1 2 -0.453921989085917 2 1 2 -0.459571696136957 2 1 2 -0.393356837769246 1 1 2 -0.511202810478497 1 1 2 -0.693614307574487 1 1 2 -0.240863923388269 2 1 2 -0.321852817508144 1 1 2 -0.270908403919833 2 1 2 -0.820724000663825 1 1 2 -0.235189573689813 2 1 2 -0.326155088030317 2 1 2 -0.631590224724998 1 1 2 -0.441990726662034 1 1 2 -0.84336570752273 2 1 2 -0.359995826600722 2 1 2 -0.251400135935091 1 1 2 -0.412798716611553 1 1 2 -0.257997459005081 1 1 2 -0.324911808695266 2 1 2 -0.869954063020224 2 1 2 -0.316774804913553 1 1 2 -0.802438949561354 2 1 2 -0.753010120858102 2 1 2 -0.50447570028204 1 1 2 -0.472994968867572 2 1 2 -0.365558799398694 2 1 2 -0.355836646801112 1 1 2 -0.571157381310202 2 1 2 -0.634686215618027 2 1 2 -0.270208965991148 2 1 2 -0.328585338874615 1 1 2 -0.384434393299423 2 1 2 -0.316023575731398 1 1 2 -0.494817395995112 2 1 2 -0.300504460120145 2 1 2 -0.347783059904907 2 1 2 -1.02851702876777 2 1 2 -0.364863367923789 2 1 2 -0.460777943415657 2 1 2 -0.382793622325279 2 1 2 -0.273403607994913 2 1 2 -0.609426470046583 2 1 2 -0.297792901344866 1 1 2 -0.370479141756967 2 1 2 -0.882238434259769 1 1 2 -0.496857265474561 2 1 2 -0.277702369672893 2 1 2 -0.446926962878622 1 1 2 -0.36757607051588 1 1 2 -0.557136267106436 2 1 2 -1.00333007744122 2 1 2 -0.760219976689289 2 1 2 -0.332068843559009 2 1 2 -0.548961093445682 2 1 2 -0.313465233961872 2 1 2 -0.550216771807154 1 1 2 -0.29794278574353 1 1 2 -0.234198048951483 2 1 2 -0.273445183254746 2 1 2 -0.574886295740124 2 1 2 -0.258382409058055 1 1 2 -0.409845586460725 2 1 2 -0.326206723132256 2 1 2 -0.642595268751117 2 1 2 -0.232356531769144 2 1 2 -1.70736951927255 1 1 2 -0.274687338325608 2 1 2 -0.40877430223826 2 1 2 -0.365729356985064 2 1 2 -0.6050000403314 2 1 2 -0.592011487134505 2 1 2 -0.557179211825432 1 1 2 -0.873296855773591 1 1 2 -0.216826762785491 1 1 2 -0.517886780128018 2 1 2 -0.398323720600925 1 1 2 -1.12139464302831 2 1 2 -0.249538486660475 2 1 2 -0.360304338880141 1 1 2 -0.627773044075362 2 1 2 -0.996274959906684 2 1 2 -0.202797819180771 1 1 2 -0.383153769101205 1 1 2 -0.324797856324902 1 1 2 -0.239421301531662 2 1 2 -0.24289898785908 1 1 2 -0.547746136913622 1 1 2 -0.386255965400912 2 1 2 -0.60223673049116 2 1 2 -0.549261776998216 2 1 2 -0.395992071688511 2 1 2 -0.217402932038072 2 1 2 -0.295305459515413 2 1 2 -0.447909826549637 2 1 2 -0.71950962867128 2 1 2 -0.794816583397332 1 1 2 -0.241318968932987 2 1 2 -0.556293493098233 2 1 2 -0.238208378562322 2 1 2 -0.499247181746743 1 1 2 -0.317050968536836 2 1 2 -0.322686857249444 2 1 2 -0.71276761076242 2 1 2 -0.301030966624334 2 1 2 -0.336641004565653 2 1 2 -0.812046026214206 2 1 2 -0.270220261704131 2 1 2 -0.701954145112022 2 1 2 -0.43964095073941 2 1 2 -0.384704421988213 2 1 2 -0.501487364681699 2 1 2 -0.455023781459671 2 1 2 -0.332474164305816 2 1 2 -0.567142874907982 2 1 2 -0.253324335182053 2 1 2 -0.444329558298367 2 1 2 -0.750457236950695 2 1 2 -0.292500297080332 2 1 2 -0.319745451630673 2 1 2 -0.286210384865368 2 1 2 -0.283637752128579 1 1 2 -0.236044970372654 2 1 2 -0.606532173767213 1 1 2 -1.32620595835061 2 1 2 -0.49881945892801 2 1 2 -1.00559201100603 2 1 2 -0.498129494834216 2 1 2 -0.682007132416635 2 1 2 -0.521249610973914 2 1 2 -0.229929750671033 2 1 2 -1.12814610238938 1 1 2 -0.74135939367203 2 1 2 -1.14362542630031 2 1 2 -0.261969169934014 1 1 2 -0.240668217312327 1 1 2 -1.59220860546119 2 1 2 -0.459005868330534 2 1 2 -0.290018768199601 2 1 2 -0.204589440835719 1 1 2 -0.619039312673667 2 1 2 -0.667083334382893 1 1 2 -0.359845320132008 1 1 2 -0.912937103767445 2 1 2 -0.522430834145349 2 1 2 -0.297762304149053 1 1 2 -0.276240304783596 2 1 2 -0.399051717562123 2 1 2 -0.404254481667734 2 1 2 -1.23765251352633 1 1 2 -1.2213528437925 2 1 2 -0.554106620313858 2 1 2 -0.513543854359058 2 1 2 -0.718560875752879 2 1 2 -0.299045404005468 1 1 2 -0.197161504481574 2 1 2 -0.355424533393654 2 1 2 -0.601322385280793 2 1 2 -0.31408110064814 1 1 2 -0.681928297252204 2 1 2 -0.257899160580357 1 1 2 -0.331853308281021 2 1 2 -0.932271244383807 2 1 2 -0.762290747363875 1 1 2 -0.610315223598599 2 1 2 -0.508310743979851 2 1 2 -0.293542339726516 1 1 2 -0.249532498898509 2 1 2 -0.240661946068682 1 1 2 -0.480573774515142 2 1 2 -0.26503112695042 1 1 2 -0.745033574361612 2 1 2 -0.313418912457887 2 1 2 -0.428468490020874 2 1 2 -0.619836697801129 2 1 2 -0.404856983338945 2 1 2 -0.225135719018744 2 1 2 -0.247203725168153 2 1 2 -0.473126435201081 2 1 2 -0.758881984366834 2 1 2 -0.530103620429835 2 1 2 -0.609787747426196 2 1 2 -0.42023331047044 1 1 2 -0.294545387085857 1 1 2 -0.311952071319945 2 1 2 -0.793299410776987 2 1 2 -0.376179978035794 2 1 2 -0.230418084856786 2 1 2 -0.25879024565358 2 1 2 -0.264796453159985 2 1 2 -0.745485785923675 2 1 2 -0.224026456721164 2 1 2 -0.6030135494348 1 1 2 -0.489733962171922 2 1 2 -0.39466687509252 1 1 2 -0.552205654391275 2 1 2 -0.575332864606377 2 1 2 -0.673079198373531 1 1 2 -0.346849143283538 1 1 2 -0.384205850032696 1 1 2 -0.382157410278578 2 1 2 -0.294710963958947 2 1 2 -0.487164402385991 1 1 2 -0.571768796864126 2 1 2 -0.243155946253846 1 1 2 -0.366816988109117 1 1 2 -0.556560232965345 2 1 2 -0.842238270178048 2 1 2 -0.630587019970835 1 1 2 -0.849346128585895 2 1 2 -0.301910596058078 1 1 2 -0.494388435823995 1 1 2 -0.635279696032103 2 1 2 -0.244333041807845 1 1 2 -0.462722638825509 2 1 2 -0.355580520263025 1 1 2 -0.419159925222802 2 1 2 -0.229441499742296 2 1 2 -0.272011206196529 2 1 2 -0.457900548880182 1 1 2 -0.42581270796691 2 1 2 -0.192946477357373 2 1 2 -0.920226167527353 2 1 2 -0.870356567626495 2 1 2 -0.506429857300226 1 1 2 -1.35129991323996 2 1 2 -0.81782131154567 1 1 2 -0.312753351203148 2 1 2 -0.240147726849663 2 1 2 -0.331594506915926 2 1 2 -0.303605405427918 2 1 2 -1.3692312598303 2 1 2 -0.464969591870211 2 1 2 -0.365081121121442 2 1 2 -0.53678523283272 1 1 2 -0.362543809949933 2 1 2 -0.300077415892361 2 1 2 -0.565255726546058 2 1 2 -0.508969800017276 1 1 2 -0.197931347436034 2 1 2 -0.425448002968464 2 1 2 -0.917689004198981 2 1 2 -0.265209389680314 1 1 2 -0.399534037154238 1 1 2 -0.292118455947818 2 1 2 -0.352289208022807 2 1 2 -0.800608594982045 2 1 2 -0.251768687680971 1 1 2 -0.280448242529617 2 1 2 -0.243473452337431 1 1 2 -0.286916333216323 2 1 2 -0.838049565027792 1 1 2 -0.244529227888586 1 1 2 -0.71863102671265 2 1 2 -1.98916958946463 2 1 2 -0.238806320688673 2 1 2 -0.693785935221629 2 1 2 -0.495890282805749 1 1 2 -0.191174545766406 2 1 2 -0.836607694563896 2 1 2 -0.391165841939288 2 1 2 -0.566993167116615 2 1 2 -0.240715729525045 2 1 2 -0.354039896192607 1 1 2 -1.21434836670206 2 1 2 -0.48154154974369 2 1 2 -0.798332748413893 2 1 2 -0.650917466844914 2 1 2 -0.384224495536896 1 1 2 -0.945026137940947 2 1 2 -0.418508744931679 2 1 2 -0.659365265496408 1 1 2 -0.271823834279208 2 1 2 -0.43920360190222 2 1 2 -0.449532948575899 2 1 2 -1.02510373135742 2 1 2 -0.49889822568904 1 1 2 -1.23542122006285 1 1 2 -0.274287087904293 2 1 2 -0.673318487358746 2 1 2 -0.397619138281994 2 1 2 -1.0555886269523 2 1 2 -0.229089202292979 1 1 2 -0.697871360095817 2 1 2 -0.614287525537126 1 1 2 -0.322578991679628 1 1 2 -0.605688688250448 2 1 2 -0.534481750546624 2 1 2 -0.565101205666666 1 1 2 -0.378499737439249 1 1 2 -0.52995012536057 2 1 2 -2.45554336829165 1 1 2 -0.744067895318506 2 1 2 -0.673980171567151 2 1 2 -0.33496535179204 2 1 2 -0.703852317870538 2 1 2 -0.623851718541645 2 1 2 -0.275936871629696 2 1 2 -0.245843960416957 2 1 2 -0.220780887604494 2 1 2 -0.585098991357547 2 1 2 -0.343992796279959 1 1 2 -0.580010432096859 2 1 2 -0.377174286172397 2 1 2 -0.614794867960386 1 1 2 -0.235740390671863 1 1 2 -0.498093604359181 2 1 2 -0.422668225465882 2 1 2 -0.85458472173833 2 1 2 -0.318077105190021 2 1 2 -0.660599386236034 2 1 2 -0.44253879597235 2 1 2 -0.482452162905769 2 1 2 -0.569360166827625 2 1 2 -1.5195957937337 1 1 2 -0.335177741698269 2 1 2 -0.241392133198455 2 1 2 -0.503619286339201 2 1 2 -0.578489345701315 2 1 2 -0.327768204464024 2 1 2 -0.436095089114902 2 1 2 -0.424989568541 1 1 2 -0.214045608149353 2 1 2 -0.306116924393253 1 1 2 -0.476975246716608 2 1 2 -0.756750965776553 2 1 2 -0.312822136071239 2 1 2 -0.470827169455528 1 1 2 -0.37315029361616 2 1 2 -0.685155315108113 2 1 2 -0.959530818891534 2 1 2 -0.913595420136271 2 1 2 -0.595438752846243 2 1 2 -0.36802176344941 2 1 2 -0.418544504566566 2 1 2 -0.363048213885529 2 1 2 -0.254338756573215 1 1 2 -0.730957519992839 1 1 2 -0.263046554830887 1 1 2 -0.46094182659418 2 1 2 -0.531059000798822 2 1 2 -0.353280265477637 2 1 2 -0.464342980616116 2 1 2 -0.229724823533327 2 1 2 -0.504945673660676 2 1 2 -0.532493395334424 2 1 2 -0.423801763698387 1 1 2 -0.787113721614964 2 1 2 -0.223160559034952 2 1 2 -0.419595856308554 2 1 2 -0.396648316145306 2 1 2 -0.308908241587595 2 1 2 -0.627802576140553 2 1 2 -0.64888860721256 2 1 2 -0.738730808101364 2 1 2 -0.658745774579089 1 1 2 -0.679191956616965 1 1 2 -0.278164538209912 1 1 2 -0.205911141408479 2 1 2 -0.241638399787725 2 1 2 -0.24858355547484 1 1 2 -0.73740496979995 2 1 2 -0.247935082999496 2 1 2 -0.826311099617232 2 1 2 -0.590607775557781 1 1 2 -0.351249908681046 1 1 2 -0.370792468725378 2 1 2 -0.389722068994738 1 1 2 -0.251157837165118 2 1 2 -0.663087218040623 2 1 2 -0.454359737429872 2 1 2 -0.435474095638232 2 1 2 -0.284410206592962 2 1 2 -0.344506290138683 2 1 2 -1.01768620078799 1 1 2 -0.331330031800195 1 1 2 -0.277021859762052 1 1 2 -0.347332671037543 1 1 2 -0.286836805838407 1 1 2 -0.340934631295205 2 1 2 -1.22270556676254 1 1 2 -0.360534849486478 1 1 2 -0.359892263518994 2 1 2 -0.552595743599511 2 1 2 -0.301744081404754 2 1 2 -0.416037514267758 2 1 2 -0.541344562283886 2 1 2 -0.579986637345764 2 1 2 -0.221912718773351 2 1 2 -0.465245817277264 2 1 2 -0.474738754014913 2 1 2 -1.70409538281312 2 1 2 -1.02235518855245 2 1 2 -0.301214497598036 1 1 2 -0.991176433131545 2 1 2 -0.401432084705109 2 1 2 -1.11715380433533 2 1 2 -0.81719064511715 2 1 2 -0.549517654685354 1 1 2 -0.251345033237621 2 1 2 -0.357859075575934 2 1 2 -0.90132423193762 2 1 2 -0.272936669704676 1 1 2 -0.455508577827349 2 1 2 -0.861185664428614 2 1 2 -0.266987292082781 2 1 2 -0.578879341650739 2 1 2 -0.649256823455797 2 1 2 -0.418711362089519 2 1 2 -0.433426379919396 2 1 2 -0.642462173639701 1 1 2 -0.406446379518523 2 1 2 -0.290863063788828 1 1 2 -0.395803052313048 2 1 2 -0.311087619708231 2 1 2 -0.279185686505835 1 1 2 -0.412823984876793 1 1 2 -0.314508721309633 2 1 2 -0.417280760034167 2 1 2 -0.357813047077128 2 1 2 -0.256161295149574 2 1 2 -0.240326641914136 2 1 2 -0.469105961018824 2 1 2 -0.23311026462364 2 1 2 -0.219699590325278 2 1 2 -0.267828103451759 2 1 2 -0.324090708482963 1 1 2 -0.882370084866449 2 1 2 -0.296556033418114 2 1 2 -0.535028311840886 1 1 2 diff --git a/R/inst/extdata/cra_exampleData.txt b/R/inst/extdata/cra_exampleData.txt deleted file mode 100644 index a658ea69..00000000 --- a/R/inst/extdata/cra_exampleData.txt +++ /dev/null @@ -1,541 +0,0 @@ -subjID trial_number RT prob reward_var reward_fix outcome types ambig choice -1 1 2579 0.5 342 50 0 ambiguous 0.75 0 -1 2 1736 0.375 91 50 0 low 0 1 -1 3 1006 0.5 342 50 342 ambiguous 0.5 1 -1 4 1374 0.375 183 50 0 low 0 1 -1 5 1119 0.25 648 50 648 low 0 1 -1 6 1147 0.375 648 50 0 low 0 1 -1 7 1034 0.375 99 50 99 low 0 1 -1 8 953 0.375 98 50 98 low 0 1 -1 9 1114 0.5 54 50 50 ambiguous 0.5 0 -1 10 3243 0.5 99 50 50 ambiguous 0.5 0 -1 11 2955 0.5 340 50 50 ambiguous 0.75 0 -1 12 1105 0.5 91 50 50 ambiguous 0.5 0 -1 13 920 0.375 342 50 0 low 0 1 -1 14 242 0.125 98 50 0 low 0 0 -1 15 1665 0.25 181 50 0 low 0 0 -1 16 801 0.5 183 50 183 ambiguous 0.25 1 -1 17 793 0.5 183 50 0 ambiguous 0.75 0 -1 18 816 0.5 46 50 0 ambiguous 0.5 0 -1 19 1009 0.375 340 50 0 low 0 1 -1 20 191 0.25 46 50 50 low 0 0 -1 21 64 0.25 342 50 0 low 0 0 -1 22 807 0.125 648 50 0 low 0 1 -1 23 1047 0.25 98 50 50 low 0 0 -1 24 401 0.125 91 50 0 low 0 0 -1 25 1009 0.125 342 50 50 low 0 0 -1 26 707 0.5 99 50 0 ambiguous 0.25 1 -1 27 516 0.5 181 50 181 ambiguous 0.25 1 -1 28 66 0.375 48 50 0 low 0 0 -1 29 2206 0.5 340 50 0 ambiguous 0.5 0 -1 30 826 0.5 343 50 343 ambiguous 0.5 1 -1 31 391 0.125 343 50 50 low 0 0 -1 32 293 0.25 54 50 0 low 0 0 -1 33 310 0.5 648 50 648 ambiguous 0.5 1 -1 34 923 0.375 648 50 0 low 0 1 -1 35 744 0.5 48 50 0 ambiguous 0.25 0 -1 36 278 0.5 48 50 0 ambiguous 0.75 0 -1 37 450 0.375 46 50 0 low 0 0 -1 38 267 0.5 654 50 0 ambiguous 0.5 1 -1 39 169 0.5 54 50 50 ambiguous 0.25 0 -1 40 179 0.5 46 50 0 ambiguous 0.75 0 -1 41 142 0.5 648 50 0 ambiguous 0.25 1 -1 42 863 0.5 648 50 0 ambiguous 0.75 0 -1 43 75 0.25 183 50 0 low 0 0 -1 44 183 0.25 91 50 0 low 0 0 -1 45 84 0.125 181 50 50 low 0 0 -1 46 2191 0.375 343 50 0 low 0 1 -1 47 269 0.125 648 50 0 low 0 0 -1 48 396 0.125 99 50 50 low 0 0 -1 49 137 0.5 654 50 0 ambiguous 0.25 1 -1 50 124 0.5 342 50 0 ambiguous 0.25 1 -1 51 1926 0.5 91 50 50 ambiguous 0.75 0 -1 52 96 0.125 183 50 50 low 0 0 -1 53 59 0.5 98 50 0 ambiguous 0.75 0 -1 54 342 0.125 340 50 0 low 0 0 -1 55 157 0.375 54 50 50 low 0 0 -1 56 122 0.5 183 50 0 ambiguous 0.75 0 -1 57 29 0.125 48 50 50 low 0 0 -1 58 985 0.375 183 50 0 low 0 1 -1 59 142 0.5 183 50 183 ambiguous 0.25 1 -1 60 246 0.125 54 50 50 low 0 0 -1 61 254 0.5 99 50 50 ambiguous 0.75 0 -1 62 107 0.5 648 50 648 ambiguous 0.25 1 -1 63 86 0.5 343 50 0 ambiguous 0.25 1 -1 64 50 0.25 48 50 50 low 0 0 -1 65 1507 0.125 183 50 50 low 0 0 -1 66 247 0.25 99 50 50 low 0 0 -1 67 21 0.5 98 50 98 ambiguous 0.25 1 -1 68 276 0.5 183 50 0 ambiguous 0.5 0 -1 69 1697 0.25 343 50 0 low 0 0 -1 70 208 0.5 648 50 648 ambiguous 0.5 1 -1 71 874 0.5 183 50 50 ambiguous 0.5 0 -1 72 4451 0.25 654 50 50 low 0 0 -1 73 255 0.5 181 50 0 ambiguous 0.75 0 -1 74 220 0.5 654 50 50 ambiguous 0.75 0 -1 75 2058 0.5 46 50 0 ambiguous 0.25 0 -1 76 198 0.125 46 50 50 low 0 0 -1 77 293 0.5 91 50 91 ambiguous 0.25 1 -1 78 133 0.5 54 50 50 ambiguous 0.75 0 -1 79 281 0.375 181 50 181 low 0 1 -1 80 63 0.5 48 50 0 ambiguous 0.5 0 -1 81 1945 0.5 181 50 181 ambiguous 0.5 1 -1 82 238 0.25 183 50 50 low 0 0 -1 83 210 0.25 340 50 0 low 0 0 -1 84 3110 0.5 648 50 648 ambiguous 0.75 1 -1 85 660 0.5 343 50 0 ambiguous 0.75 0 -1 86 13 0.5 98 50 0 ambiguous 0.5 0 -1 87 744 0.375 654 50 0 low 0 1 -1 88 3835 0.125 654 50 0 low 0 0 -1 89 72 0.25 648 50 0 low 0 1 -1 90 90 0.5 340 50 340 ambiguous 0.25 1 -2 1 857 0.375 647 50 0 low 0 1 -2 2 437 0.5 99 50 0 ambiguous 0.75 0 -2 3 289 0.5 96 50 0 ambiguous 0.25 1 -2 4 514 0.5 184 50 184 ambiguous 0.75 1 -2 5 233 0.5 336 50 336 ambiguous 0.25 1 -2 6 321 0.375 180 50 0 low 0 1 -2 7 266 0.5 47 50 50 ambiguous 0.75 0 -2 8 288 0.375 181 50 0 low 0 0 -2 9 480 0.25 647 50 50 low 0 0 -2 10 330 0.5 180 50 180 ambiguous 0.25 1 -2 11 421 0.25 180 50 0 low 0 1 -2 12 290 0.5 47 50 0 ambiguous 0.25 0 -2 13 540 0.125 91 50 0 low 0 0 -2 14 71 0.5 91 50 0 ambiguous 0.5 0 -2 15 184 0.5 647 50 0 ambiguous 0.25 0 -2 16 236 0.5 649 50 649 ambiguous 0.5 1 -2 17 364 0.375 336 50 0 low 0 1 -2 18 241 0.375 91 50 0 low 0 1 -2 19 62 0.375 96 50 50 low 0 0 -2 20 456 0.25 649 50 50 low 0 0 -2 21 653 0.5 91 50 0 ambiguous 0.75 1 -2 22 282 0.25 184 50 50 low 0 0 -2 23 42 0.125 49 50 0 low 0 0 -2 24 52 0.125 181 50 0 low 0 0 -2 25 443 0.125 180 50 0 low 0 0 -2 26 353 0.25 181 50 181 low 0 1 -2 27 265 0.375 48 50 0 low 0 0 -2 28 245 0.5 647 50 0 ambiguous 0.5 1 -2 29 286 0.125 647 50 0 low 0 0 -2 30 198 0.25 336 50 0 low 0 0 -2 31 76 0.5 49 50 0 ambiguous 0.5 0 -2 32 261 0.5 340 50 0 ambiguous 0.25 1 -2 33 166 0.5 99 50 50 ambiguous 0.5 0 -2 34 333 0.125 336 50 0 low 0 0 -2 35 99 0.125 340 50 0 low 0 0 -2 36 255 0.5 647 50 0 ambiguous 0.25 0 -2 37 257 0.5 647 50 0 ambiguous 0.75 1 -2 38 199 0.375 184 50 0 low 0 1 -2 39 118 0.375 49 50 0 low 0 0 -2 40 233 0.5 180 50 50 ambiguous 0.5 0 -2 41 49 0.5 49 50 49 ambiguous 0.25 1 -2 42 102 0.25 48 50 0 low 0 0 -2 43 512 0.5 181 50 181 ambiguous 0.75 1 -2 44 20 0.125 336 50 50 low 0 0 -2 45 198 0.5 48 50 50 ambiguous 0.5 0 -2 46 201 0.25 340 50 50 low 0 0 -2 47 17 0.5 96 50 96 ambiguous 0.5 1 -2 48 74 0.25 47 50 50 low 0 0 -2 49 211 0.25 99 50 50 low 0 0 -2 50 109 0.5 49 50 0 ambiguous 0.75 0 -2 51 410 0.125 649 50 50 low 0 0 -2 52 304 0.5 649 50 50 ambiguous 0.25 0 -2 53 220 0.25 91 50 0 low 0 0 -2 54 21 0.5 336 50 50 ambiguous 0.75 0 -2 55 271 0.5 48 50 0 ambiguous 0.75 0 -2 56 1458 0.125 99 50 50 low 0 0 -2 57 254 0.25 49 50 50 low 0 0 -2 58 216 0.5 340 50 0 ambiguous 0.75 1 -2 59 241 0.375 647 50 0 low 0 1 -2 60 21 0.5 647 50 0 ambiguous 0.75 1 -2 61 8 0.5 340 50 340 ambiguous 0.5 1 -2 62 168 0.5 336 50 0 ambiguous 0.5 1 -2 63 387 0.5 184 50 50 ambiguous 0.5 0 -2 64 266 0.375 99 50 0 low 0 0 -2 65 277 0.5 91 50 0 ambiguous 0.25 0 -2 66 350 0.5 647 50 0 ambiguous 0.5 0 -2 67 358 0.5 47 50 0 ambiguous 0.5 0 -2 68 407 0.5 184 50 0 ambiguous 0.25 0 -2 69 5 0.125 647 50 50 low 0 0 -2 70 369 0.125 48 50 50 low 0 0 -2 71 175 0.375 649 50 649 low 0 1 -2 72 650 0.25 647 50 50 low 0 0 -2 73 459 0.5 336 50 0 ambiguous 0.5 1 -2 74 129 0.5 96 50 0 ambiguous 0.75 0 -2 75 443 0.125 96 50 50 low 0 0 -2 76 398 0.375 340 50 50 low 0 0 -2 77 105 0.5 99 50 0 ambiguous 0.25 1 -2 78 239 0.125 47 50 0 low 0 0 -2 79 76 0.5 48 50 50 ambiguous 0.25 0 -2 80 198 0.25 336 50 336 low 0 1 -2 81 186 0.5 649 50 649 ambiguous 0.75 1 -2 82 130 0.5 181 50 0 ambiguous 0.25 1 -2 83 211 0.5 336 50 336 ambiguous 0.75 1 -2 84 231 0.5 180 50 50 ambiguous 0.75 0 -2 85 75 0.5 181 50 0 ambiguous 0.5 1 -2 86 41 0.375 47 50 50 low 0 0 -2 87 406 0.125 184 50 0 low 0 1 -2 88 367 0.25 96 50 0 low 0 1 -2 89 100 0.5 336 50 336 ambiguous 0.25 1 -2 90 967 0.375 336 50 0 low 0 0 -3 1 2755 0.5 341 50 0 ambiguous 0.25 1 -3 2 1695 0.125 183 50 0 low 0 1 -3 3 1291 0.5 92 50 0 ambiguous 0.75 0 -3 4 940 0.25 341 50 0 low 0 1 -3 5 1716 0.25 342 50 0 low 0 1 -3 6 1165 0.375 653 50 0 low 0 1 -3 7 1306 0.5 343 50 343 ambiguous 0.75 1 -3 8 1815 0.5 182 50 0 ambiguous 0.25 1 -3 9 1467 0.125 653 50 0 low 0 1 -3 10 1420 0.5 343 50 0 ambiguous 0.25 1 -3 11 1625 0.25 653 50 0 low 0 1 -3 12 1157 0.5 646 50 646 ambiguous 0.5 1 -3 13 1225 0.5 183 50 0 ambiguous 0.25 1 -3 14 1438 0.25 183 50 183 low 0 1 -3 15 1683 0.5 653 50 0 ambiguous 0.75 1 -3 16 1838 0.5 50 50 0 ambiguous 0.5 0 -3 17 1618 0.25 50 50 0 low 0 0 -3 18 1708 0.5 183 50 183 ambiguous 0.75 1 -3 19 970 0.5 94 50 0 ambiguous 0.75 1 -3 20 1151 0.5 653 50 0 ambiguous 0.75 1 -3 21 1928 0.5 646 50 0 ambiguous 0.25 1 -3 22 1758 0.5 653 50 0 ambiguous 0.5 1 -3 23 2629 0.125 653 50 0 low 0 1 -3 24 1439 0.5 183 50 0 ambiguous 0.25 1 -3 25 1328 0.5 50 50 0 ambiguous 0.75 0 -3 26 1193 0.5 342 50 0 ambiguous 0.75 1 -3 27 1290 0.5 94 50 94 ambiguous 0.5 1 -3 28 1487 0.5 183 50 0 ambiguous 0.5 1 -3 29 1154 0.5 94 50 50 ambiguous 0.25 0 -3 30 1205 0.375 94 50 50 low 0 0 -3 31 1449 0.25 182 50 0 low 0 1 -3 32 1497 0.5 342 50 0 ambiguous 0.25 1 -3 33 1430 0.25 183 50 0 low 0 1 -3 34 1514 0.375 92 50 0 low 0 0 -3 35 992 0.5 653 50 0 ambiguous 0.25 1 -3 36 1920 0.5 343 50 0 ambiguous 0.5 1 -3 37 1612 0.5 653 50 653 ambiguous 0.5 1 -3 38 1224 0.5 341 50 0 ambiguous 0.5 1 -3 39 549 0.375 342 50 342 low 0 1 -3 40 617 0.5 94 50 0 ambiguous 0.5 0 -3 41 1139 0.125 341 50 341 low 0 1 -3 42 1991 0.375 50 50 50 low 0 1 -3 43 1678 0.125 94 50 0 low 0 0 -3 44 1776 0.375 94 50 50 low 0 0 -3 45 1733 0.125 183 50 0 low 0 1 -3 46 1216 0.25 343 50 0 low 0 1 -3 47 1125 0.375 182 50 0 low 0 1 -3 48 1618 0.125 342 50 0 low 0 1 -3 49 1828 0.375 51 50 50 low 0 0 -3 50 1781 0.25 646 50 0 low 0 1 -3 51 553 0.375 183 50 0 low 0 1 -3 52 899 0.5 183 50 183 ambiguous 0.75 1 -3 53 388 0.125 47 50 50 low 0 0 -3 54 615 0.5 47 50 0 ambiguous 0.75 0 -3 55 594 0.375 343 50 0 low 0 1 -3 56 346 0.25 47 50 0 low 0 0 -3 57 1069 0.125 343 50 0 low 0 1 -3 58 894 0.5 51 50 50 ambiguous 0.25 0 -3 59 576 0.5 646 50 0 ambiguous 0.75 1 -3 60 592 0.125 182 50 0 low 0 1 -3 61 1508 0.5 92 50 0 ambiguous 0.5 0 -3 62 383 0.375 646 50 646 low 0 1 -3 63 428 0.5 51 50 50 ambiguous 0.5 0 -3 64 432 0.125 51 50 50 low 0 0 -3 65 454 0.375 47 50 0 low 0 0 -3 66 926 0.5 92 50 0 ambiguous 0.25 0 -3 67 346 0.375 341 50 341 low 0 1 -3 68 355 0.5 51 50 0 ambiguous 0.75 0 -3 69 879 0.25 94 50 50 low 0 0 -3 70 827 0.125 92 50 0 low 0 0 -3 71 437 0.5 182 50 182 ambiguous 0.5 1 -3 72 432 0.5 47 50 0 ambiguous 0.25 0 -3 73 411 0.5 341 50 0 ambiguous 0.75 0 -3 74 1125 0.375 183 50 183 low 0 1 -3 75 422 0.125 646 50 0 low 0 1 -3 76 290 0.5 47 50 50 ambiguous 0.5 0 -3 77 366 0.25 94 50 0 low 0 0 -3 78 360 0.25 653 50 0 low 0 1 -3 79 396 0.375 653 50 653 low 0 1 -3 80 408 0.125 94 50 0 low 0 0 -3 81 442 0.5 183 50 0 ambiguous 0.5 1 -3 82 419 0.25 92 50 50 low 0 0 -3 83 1415 0.5 50 50 0 ambiguous 0.25 0 -3 84 1163 0.5 182 50 50 ambiguous 0.75 0 -3 85 717 0.5 94 50 50 ambiguous 0.25 0 -3 86 537 0.5 342 50 0 ambiguous 0.5 1 -3 87 1530 0.5 94 50 50 ambiguous 0.75 0 -3 88 1024 0.25 51 50 0 low 0 0 -3 89 375 0.5 653 50 653 ambiguous 0.25 1 -3 90 777 0.125 50 50 0 low 0 0 -4 1 940 0.5 339 50 339 ambiguous 0.75 1 -4 2 3222 0.5 337 50 337 ambiguous 0.75 1 -4 3 1295 0.25 184 50 0 low 0 1 -4 4 1943 0.5 182 50 0 ambiguous 0.25 1 -4 5 1176 0.375 652 50 652 low 0 1 -4 6 918 0.5 337 50 0 ambiguous 0.25 1 -4 7 1404 0.25 99 50 0 low 0 1 -4 8 1259 0.125 52 50 0 low 0 1 -4 9 1847 0.125 337 50 0 low 0 1 -4 10 952 0.5 182 50 182 ambiguous 0.75 1 -4 11 1341 0.5 52 50 0 ambiguous 0.25 1 -4 12 2206 0.5 93 50 0 ambiguous 0.75 1 -4 13 4242 0.375 182 50 0 low 0 1 -4 14 13020 0.125 339 50 0 low 0 0 -4 15 1142 0.375 179 50 0 low 0 1 -4 16 1633 0.5 339 50 0 ambiguous 0.5 1 -4 17 1077 0.25 94 50 50 low 0 0 -4 18 2892 0.5 48 50 0 ambiguous 0.75 0 -4 19 524 0.5 652 50 652 ambiguous 0.5 1 -4 20 797 0.5 337 50 337 ambiguous 0.5 1 -4 21 1576 0.5 650 50 650 ambiguous 0.5 1 -4 22 1018 0.25 339 50 0 low 0 1 -4 23 1626 0.25 339 50 0 low 0 1 -4 24 766 0.5 94 50 0 ambiguous 0.25 1 -4 25 1089 0.5 94 50 94 ambiguous 0.75 1 -4 26 546 0.5 650 50 650 ambiguous 0.75 1 -4 27 982 0.125 93 50 0 low 0 0 -4 28 1950 0.125 650 50 650 low 0 1 -4 29 663 0.125 179 50 0 low 0 1 -4 30 482 0.375 650 50 650 low 0 1 -4 31 634 0.25 337 50 337 low 0 1 -4 32 466 0.5 94 50 94 ambiguous 0.5 1 -4 33 1844 0.25 182 50 0 low 0 1 -4 34 576 0.375 339 50 0 low 0 1 -4 35 618 0.125 182 50 50 low 0 0 -4 36 659 0.5 48 50 0 ambiguous 0.75 0 -4 37 389 0.125 652 50 0 low 0 1 -4 38 1116 0.375 99 50 0 low 0 1 -4 39 2504 0.25 93 50 0 low 0 1 -4 40 374 0.5 650 50 650 ambiguous 0.25 1 -4 41 342 0.5 179 50 179 ambiguous 0.5 1 -4 42 409 0.375 48 50 48 low 0 1 -4 43 2010 0.125 48 50 50 low 0 0 -4 44 445 0.5 179 50 0 ambiguous 0.25 1 -4 45 412 0.5 184 50 184 ambiguous 0.25 1 -4 46 404 0.375 650 50 0 low 0 1 -4 47 392 0.125 184 50 50 low 0 0 -4 48 1678 0.125 339 50 339 low 0 1 -4 49 428 0.5 339 50 0 ambiguous 0.75 1 -4 50 385 0.5 99 50 99 ambiguous 0.25 1 -4 51 370 0.5 93 50 93 ambiguous 0.25 1 -4 52 537 0.25 48 50 50 low 0 0 -4 53 1625 0.5 52 50 0 ambiguous 0.5 1 -4 54 355 0.25 650 50 0 low 0 1 -4 55 400 0.25 650 50 650 low 0 1 -4 56 381 0.5 48 50 50 ambiguous 0.5 0 -4 57 339 0.5 339 50 339 ambiguous 0.25 1 -4 58 320 0.375 339 50 0 low 0 1 -4 59 375 0.375 48 50 50 low 0 0 -4 60 525 0.5 184 50 184 ambiguous 0.75 1 -4 61 1071 0.125 99 50 50 low 0 0 -4 62 1389 0.5 652 50 652 ambiguous 0.75 1 -4 63 359 0.5 652 50 0 ambiguous 0.25 1 -4 64 412 0.375 337 50 0 low 0 1 -4 65 434 0.5 650 50 0 ambiguous 0.75 1 -4 66 566 0.5 339 50 0 ambiguous 0.5 1 -4 67 875 0.375 94 50 0 low 0 1 -4 68 361 0.375 93 50 93 low 0 1 -4 69 381 0.125 48 50 0 low 0 0 -4 70 671 0.5 650 50 650 ambiguous 0.5 1 -4 71 900 0.25 48 50 0 low 0 0 -4 72 394 0.5 48 50 0 ambiguous 0.25 0 -4 73 294 0.5 179 50 0 ambiguous 0.75 1 -4 74 249 0.5 93 50 93 ambiguous 0.5 1 -4 75 341 0.375 184 50 0 low 0 1 -4 76 1096 0.5 182 50 0 ambiguous 0.5 1 -4 77 1049 0.25 52 50 0 low 0 0 -4 78 339 0.5 48 50 0 ambiguous 0.25 0 -4 79 418 0.5 650 50 0 ambiguous 0.25 1 -4 80 415 0.375 52 50 50 low 0 0 -4 81 354 0.5 339 50 0 ambiguous 0.25 1 -4 82 1097 0.25 652 50 0 low 0 1 -4 83 580 0.125 94 50 50 low 0 0 -4 84 360 0.5 99 50 0 ambiguous 0.5 1 -4 85 1281 0.25 179 50 0 low 0 1 -4 86 642 0.125 650 50 0 low 0 1 -4 87 279 0.5 99 50 0 ambiguous 0.75 1 -4 88 926 0.5 52 50 0 ambiguous 0.75 0 -4 89 906 0.5 48 50 0 ambiguous 0.5 0 -4 90 326 0.5 184 50 0 ambiguous 0.5 1 -5 1 459 0.5 340 50 340 ambiguous 0.5 1 -5 2 762 0.5 52 50 0 ambiguous 0.5 0 -5 3 623 0.5 97 50 97 ambiguous 0.75 1 -5 4 722 0.5 337 50 337 ambiguous 0.5 1 -5 5 1220 0.5 183 50 0 ambiguous 0.25 1 -5 6 983 0.25 52 50 0 low 0 0 -5 7 919 0.375 650 50 0 low 0 1 -5 8 802 0.375 183 50 183 low 0 1 -5 9 834 0.5 339 50 339 ambiguous 0.75 1 -5 10 810 0.5 52 50 0 ambiguous 0.75 0 -5 11 657 0.5 649 50 649 ambiguous 0.75 1 -5 12 801 0.25 650 50 0 low 0 1 -5 13 803 0.5 50 50 50 ambiguous 0.75 0 -5 14 839 0.125 50 50 0 low 0 0 -5 15 824 0.125 50 50 0 low 0 0 -5 16 950 0.5 50 50 50 ambiguous 0.25 0 -5 17 870 0.5 183 50 183 ambiguous 0.5 1 -5 18 776 0.375 92 50 92 low 0 1 -5 19 854 0.125 97 50 50 low 0 0 -5 20 760 0.5 92 50 92 ambiguous 0.75 1 -5 21 713 0.125 649 50 0 low 0 1 -5 22 821 0.5 337 50 337 ambiguous 0.25 1 -5 23 810 0.5 650 50 0 ambiguous 0.75 1 -5 24 1050 0.375 340 50 0 low 0 1 -5 25 928 0.375 654 50 0 low 0 1 -5 26 725 0.5 50 50 50 ambiguous 0.75 0 -5 27 728 0.25 337 50 0 low 0 1 -5 28 657 0.5 654 50 654 ambiguous 0.25 1 -5 29 703 0.5 92 50 92 ambiguous 0.5 1 -5 30 823 0.375 183 50 183 low 0 1 -5 31 852 0.5 99 50 0 ambiguous 0.5 1 -5 32 638 0.5 649 50 649 ambiguous 0.5 1 -5 33 861 0.5 339 50 0 ambiguous 0.5 1 -5 34 768 0.375 184 50 184 low 0 1 -5 35 641 0.25 340 50 0 low 0 0 -5 36 741 0.375 339 50 339 low 0 1 -5 37 829 0.5 183 50 183 ambiguous 0.5 1 -5 38 782 0.5 340 50 0 ambiguous 0.75 1 -5 39 909 0.5 97 50 97 ambiguous 0.5 1 -5 40 736 0.125 654 50 0 low 0 0 -5 41 883 0.5 649 50 649 ambiguous 0.25 1 -5 42 681 0.25 97 50 50 low 0 0 -5 43 893 0.25 92 50 0 low 0 1 -5 44 810 0.375 50 50 50 low 0 0 -5 45 1219 0.5 52 50 0 ambiguous 0.25 0 -5 46 911 0.25 649 50 50 low 0 0 -5 47 781 0.5 340 50 0 ambiguous 0.25 1 -5 48 763 0.375 337 50 0 low 0 1 -5 49 810 0.5 184 50 184 ambiguous 0.5 1 -5 50 756 0.25 654 50 50 low 0 0 -5 51 735 0.375 97 50 0 low 0 1 -5 52 728 0.375 649 50 0 low 0 1 -5 53 1035 0.5 337 50 0 ambiguous 0.75 1 -5 54 743 0.5 183 50 183 ambiguous 0.25 1 -5 55 857 0.5 99 50 50 ambiguous 0.75 0 -5 56 742 0.5 339 50 339 ambiguous 0.25 1 -5 57 652 0.5 650 50 0 ambiguous 0.5 1 -5 58 777 0.5 92 50 0 ambiguous 0.25 1 -5 59 837 0.5 50 50 50 ambiguous 0.25 0 -5 60 775 0.5 50 50 50 ambiguous 0.5 0 -5 61 872 0.25 183 50 50 low 0 0 -5 62 789 0.5 654 50 654 ambiguous 0.75 1 -5 63 793 0.375 99 50 0 low 0 1 -5 64 888 0.125 650 50 50 low 0 0 -5 65 851 0.5 99 50 50 ambiguous 0.25 0 -5 66 878 0.25 50 50 0 low 0 0 -5 67 920 0.375 52 50 50 low 0 0 -5 68 772 0.25 183 50 0 low 0 1 -5 69 784 0.25 184 50 0 low 0 1 -5 70 957 0.5 650 50 650 ambiguous 0.25 1 -5 71 746 0.5 183 50 183 ambiguous 0.75 1 -5 72 784 0.5 184 50 0 ambiguous 0.25 1 -5 73 750 0.125 340 50 50 low 0 0 -5 74 746 0.5 50 50 0 ambiguous 0.5 0 -5 75 937 0.125 184 50 50 low 0 0 -5 76 836 0.125 339 50 0 low 0 1 -5 77 720 0.25 50 50 50 low 0 0 -5 78 729 0.25 99 50 0 low 0 1 -5 79 639 0.5 183 50 183 ambiguous 0.75 1 -5 80 784 0.125 99 50 0 low 0 0 -5 81 599 0.25 339 50 0 low 0 1 -5 82 705 0.375 50 50 0 low 0 0 -5 83 817 0.125 183 50 0 low 0 0 -5 84 785 0.5 97 50 0 ambiguous 0.25 1 -5 85 726 0.125 183 50 50 low 0 0 -5 86 1112 0.125 92 50 0 low 0 0 -5 87 799 0.125 52 50 0 low 0 0 -5 88 818 0.5 654 50 0 ambiguous 0.5 1 -5 89 847 0.5 184 50 0 ambiguous 0.75 1 -5 90 778 0.125 337 50 0 low 0 0 -6 1 7265 0.25 648 50 0 low 0 1 -6 2 8033 0.375 651 50 0 low 0 1 -6 3 5415 0.375 338 50 338 low 0 1 -6 4 5183 0.5 337 50 0 ambiguous 0.5 1 -6 5 1609 0.375 54 50 0 low 0 0 -6 6 3036 0.5 646 50 0 ambiguous 0.75 1 -6 7 10138 0.5 49 50 50 ambiguous 0.75 0 -6 8 3121 0.375 648 50 0 low 0 1 -6 9 2224 0.25 176 50 50 low 0 0 -6 10 3415 0.125 49 50 0 low 0 0 -6 11 3309 0.5 646 50 0 ambiguous 0.25 1 -6 12 5624 0.25 184 50 184 low 0 1 -6 13 5032 0.5 54 50 50 ambiguous 0.75 0 -6 14 5991 0.5 53 50 0 ambiguous 0.5 0 -6 15 2220 0.25 176 50 0 low 0 1 -6 16 665 0.25 49 50 0 low 0 0 -6 17 6233 0.125 651 50 651 low 0 1 -6 18 6381 0.125 91 50 0 low 0 0 -6 19 15254 0.5 338 50 0 ambiguous 0.25 1 -6 20 8786 0.375 337 50 0 low 0 1 -6 21 11423 0.5 91 50 91 ambiguous 0.25 1 -6 22 5114 0.125 99 50 50 low 0 0 -6 23 2545 0.125 53 50 50 low 0 0 -6 24 13957 0.5 341 50 50 ambiguous 0.5 0 -6 25 1837 0.5 648 50 0 ambiguous 0.25 1 -6 26 4679 0.375 91 50 0 low 0 1 -6 27 2697 0.125 91 50 50 low 0 0 -6 28 12661 0.5 651 50 0 ambiguous 0.75 1 -6 29 1942 0.5 99 50 99 ambiguous 0.5 1 -6 30 3170 0.5 99 50 99 ambiguous 0.25 1 -6 31 6455 0.375 99 50 99 low 0 1 -6 32 3171 0.25 651 50 0 low 0 1 -6 33 5667 0.375 176 50 0 low 0 1 -6 34 4606 0.5 91 50 0 ambiguous 0.75 1 -6 35 9317 0.125 646 50 0 low 0 0 -6 36 1734 0.5 651 50 651 ambiguous 0.5 1 -6 37 6134 0.5 91 50 0 ambiguous 0.5 1 -6 38 1547 0.375 91 50 0 low 0 1 -6 39 729 0.5 176 50 176 ambiguous 0.25 1 -6 40 4438 0.5 49 50 0 ambiguous 0.25 0 -6 41 4940 0.25 54 50 50 low 0 0 -6 42 1126 0.5 49 50 50 ambiguous 0.5 0 -6 43 1726 0.5 176 50 50 ambiguous 0.75 0 -6 44 611 0.5 341 50 341 ambiguous 0.25 1 -6 45 982 0.5 91 50 91 ambiguous 0.25 1 -6 46 3389 0.5 184 50 184 ambiguous 0.5 1 -6 47 372 0.375 184 50 0 low 0 1 -6 48 54 0.125 341 50 0 low 0 1 -6 49 5306 0.25 91 50 50 low 0 0 -6 50 806 0.25 91 50 50 low 0 0 -6 51 2225 0.25 341 50 0 low 0 0 -6 52 1382 0.5 651 50 0 ambiguous 0.25 1 -6 53 4960 0.5 176 50 0 ambiguous 0.25 1 -6 54 641 0.375 646 50 0 low 0 1 -6 55 1525 0.5 646 50 646 ambiguous 0.5 1 -6 56 1188 0.25 646 50 0 low 0 0 -6 57 2095 0.375 53 50 50 low 0 0 -6 58 346 0.125 54 50 50 low 0 0 -6 59 4855 0.25 338 50 50 low 0 0 -6 60 4182 0.25 337 50 50 low 0 0 -6 61 788 0.125 338 50 50 low 0 0 -6 62 2593 0.5 91 50 0 ambiguous 0.75 0 -6 63 163 0.125 184 50 50 low 0 0 -6 64 965 0.5 184 50 0 ambiguous 0.25 1 -6 65 927 0.5 176 50 0 ambiguous 0.5 1 -6 66 2085 0.125 176 50 50 low 0 0 -6 67 826 0.375 341 50 341 low 0 1 -6 68 5905 0.5 54 50 50 ambiguous 0.5 0 -6 69 27 0.375 49 50 0 low 0 0 -6 70 2324 0.25 53 50 0 low 0 0 -6 71 606 0.5 648 50 648 ambiguous 0.5 1 -6 72 8977 0.5 53 50 50 ambiguous 0.25 0 -6 73 3788 0.125 337 50 0 low 0 0 -6 74 3013 0.375 176 50 176 low 0 1 -6 75 732 0.5 53 50 50 ambiguous 0.75 0 -6 76 2932 0.5 648 50 0 ambiguous 0.75 1 -6 77 520 0.5 337 50 0 ambiguous 0.25 1 -6 78 4407 0.25 99 50 0 low 0 0 -6 79 5193 0.5 54 50 50 ambiguous 0.25 0 -6 80 3191 0.5 91 50 0 ambiguous 0.5 1 -6 81 390 0.5 176 50 0 ambiguous 0.5 1 -6 82 7450 0.5 341 50 341 ambiguous 0.75 1 -6 83 2018 0.5 337 50 50 ambiguous 0.75 0 -6 84 1206 0.5 184 50 0 ambiguous 0.75 0 -6 85 1363 0.125 648 50 0 low 0 0 -6 86 3957 0.5 338 50 0 ambiguous 0.5 1 -6 87 6344 0.125 176 50 0 low 0 0 -6 88 5897 0.5 99 50 0 ambiguous 0.75 0 -6 89 1421 0.5 338 50 50 ambiguous 0.75 0 -6 90 885 0.5 176 50 0 ambiguous 0.75 0 \ No newline at end of file diff --git a/R/inst/extdata/dbdm_exampleData.txt b/R/inst/extdata/dbdm_exampleData.txt deleted file mode 100644 index 0bb2520d..00000000 --- a/R/inst/extdata/dbdm_exampleData.txt +++ /dev/null @@ -1,15001 +0,0 @@ -subjID opt1hprob opt2hprob opt1hval opt1lval opt2hval opt2lval choice -1 0.9 0.5 -14 -30 30 -43 2 -1 0.3 0.3 18 -15 46 36 2 -1 0.5 0.5 -26 -44 10 -5 2 -1 0.2 0.4 -8 -43 26 17 2 -1 0.3 0.3 30 -37 44 24 2 -1 0.6 0.2 46 -26 10 -14 1 -1 0.8 0.9 48 -49 -12 -30 1 -1 0.8 0.1 -8 -16 48 0 2 -1 0.2 0.5 27 -30 28 27 2 -1 0.1 0.3 -3 -48 2 -34 2 -1 0.6 0.4 -30 -39 49 -31 2 -1 0.1 0.9 29 -4 8 7 2 -1 0.9 0.9 12 -21 27 -13 1 -1 0.9 0.1 -1 -39 43 11 2 -1 0.5 0.2 22 -18 22 -12 2 -1 0.5 0.2 -9 -50 -4 -12 2 -1 0.4 0.2 -22 -45 -12 -49 2 -1 0.2 0.7 39 -4 19 -36 2 -1 0.8 0.3 32 -24 3 -25 1 -1 0.7 0.6 41 0 38 31 2 -1 0.7 0.4 28 5 43 -4 2 -1 0.5 0.3 28 -24 33 -22 1 -1 0.6 0.8 23 -15 -7 -35 1 -1 0.1 0.2 25 -42 -31 -35 1 -1 0.1 0.5 49 -34 3 -9 2 -1 0.6 0.5 38 -16 -42 -49 1 -1 0.6 0.5 6 -46 21 -3 2 -1 0.5 0.9 -18 -50 32 -42 2 -1 0.8 0.3 9 4 42 13 2 -1 0.3 0.8 41 34 -23 -25 1 -1 0.1 0.6 38 30 21 -7 1 -1 0.4 0.3 21 -32 -3 -40 1 -1 0.8 0.6 43 4 33 -40 1 -1 0.2 0.4 2 -4 5 -30 1 -1 0.5 0.9 -11 -37 6 -28 2 -1 0.8 0.8 31 -4 31 9 2 -1 0.2 0.6 33 -26 -4 -44 2 -1 0.8 0.3 43 14 49 9 2 -1 0.8 0.4 33 20 30 18 2 -1 0.8 0.2 19 -35 -5 -41 1 -1 0.4 0.8 7 -9 16 -11 2 -1 0.1 0.3 -8 -41 34 -35 2 -1 0.8 0.5 47 15 8 7 1 -1 0.4 0.9 -1 -8 22 -6 2 -1 0.8 0.8 16 6 34 -43 1 -1 0.5 0.4 22 -44 1 -29 1 -1 0.5 0.4 17 -22 -15 -20 1 -1 0.6 0.7 14 -33 -3 -14 1 -1 0.3 0.6 19 10 -34 -45 1 -1 0.4 0.6 30 18 35 28 2 -1 0.3 0.4 -4 -5 43 -13 2 -1 0.7 0.4 -7 -43 11 0 2 -1 0.4 0.7 24 5 14 -47 1 -1 0.3 0.7 42 14 22 13 1 -1 0.7 0.9 44 8 32 -11 1 -1 0.9 0.4 47 36 -36 -41 1 -1 0.3 0.9 7 -47 15 -26 2 -1 0.9 0.8 4 -39 47 0 2 -1 0.6 0.8 48 -8 28 -46 1 -1 0.6 0.5 47 35 38 12 2 -1 0.9 0.9 35 9 11 -37 1 -1 0.4 0.1 30 -16 -29 -40 1 -1 0.6 0.7 17 -31 -32 -39 1 -1 0.6 0.1 40 32 31 11 1 -1 0.1 0.1 -20 -38 49 -17 2 -1 0.7 0.6 36 -2 -42 -48 1 -1 0.5 0.5 9 -39 -1 -18 2 -1 0.6 0.5 46 -48 49 33 2 -1 0.6 0.7 -34 -46 49 38 2 -1 0.8 0.5 47 39 -5 -44 1 -1 0.5 0.9 41 -32 44 -35 2 -1 0.8 0.4 50 -41 38 6 1 -1 0.8 0.5 14 -24 -30 -43 1 -1 0.7 0.7 27 -32 17 -3 2 -1 0.6 0.1 48 -4 8 4 1 -1 0.6 0.3 10 -10 -22 -30 1 -1 0.3 0.7 3 -45 0 -39 2 -1 0.5 0.4 41 33 45 12 1 -1 0.5 0.1 39 -32 -34 -41 1 -1 0.9 0.5 40 33 10 8 1 -1 0.2 0.3 -2 -17 -4 -35 1 -1 0.6 0.2 25 -13 45 5 1 -1 0.2 0.1 10 -7 19 -23 1 -1 0.9 0.1 49 -21 29 25 2 -1 0.8 0.1 45 19 39 -44 1 -1 0.7 0.3 48 40 48 1 1 -1 0.8 0.7 37 -37 41 28 2 -1 0.3 0.8 26 -20 35 30 2 -1 0.2 0.2 0 -17 14 -36 2 -1 0.8 0.2 20 -19 -4 -29 1 -1 0.5 0.7 -7 -11 -16 -29 2 -1 0.8 0.4 48 -27 -1 -39 1 -1 0.3 0.9 15 -33 18 -14 2 -1 0.6 0.2 -12 -21 -34 -44 1 -1 0.5 0.7 26 1 10 -6 1 -1 0.9 0.1 35 -48 35 -9 1 -1 0.5 0.6 32 1 -4 -5 1 -1 0.7 0.7 28 2 42 -19 2 -1 0.6 0.6 20 3 42 7 2 -1 0.2 0.4 36 -25 16 -28 2 -1 0.1 0.4 12 -7 -10 -48 1 -1 0.7 0.2 -1 -24 47 -4 2 -1 0.3 0.7 -24 -35 33 27 2 -1 0.9 0.8 19 -47 23 -43 1 -1 0.1 0.7 38 -24 15 2 2 -1 0.1 0.4 48 -9 34 -40 1 -1 0.7 0.1 32 -35 23 -14 1 -1 0.1 0.4 23 4 -15 -34 1 -1 0.2 0.8 -9 -13 38 -42 2 -1 0.9 0.3 -35 -44 15 -44 1 -1 0.4 0.2 4 -8 18 -39 1 -1 0.7 0.4 22 17 42 -14 1 -1 0.5 0.5 25 -16 -4 -19 2 -1 0.8 0.2 41 -24 0 -22 1 -1 0.4 0.3 27 22 23 7 1 -1 0.1 0.3 17 5 15 -16 1 -1 0.8 0.5 -7 -41 49 16 2 -1 0.2 0.7 35 3 -6 -36 1 -1 0.3 0.6 19 -15 16 14 2 -1 0.7 0.3 34 14 22 -27 1 -1 0.7 0.3 39 -42 45 43 2 -1 0.2 0.5 47 -32 41 -39 2 -1 0.8 0.1 20 -4 43 29 2 -1 0.4 0.3 42 -3 8 -26 1 -1 0.6 0.9 24 2 -27 -44 1 -1 0.7 0.4 39 -44 47 16 2 -1 0.2 0.9 -2 -14 0 -24 2 -1 0.9 0.7 -24 -50 45 27 2 -1 0.9 0.4 -22 -35 26 -21 2 -1 0.2 0.1 16 11 26 -49 1 -1 0.2 0.6 1 -21 32 16 2 -1 0.1 0.7 -25 -42 50 -8 2 -1 0.7 0.1 -2 -37 -7 -10 1 -1 0.9 0.6 -24 -26 -7 -25 1 -1 0.8 0.1 33 -35 43 -47 1 -1 0.7 0.8 24 -23 49 15 2 -1 0.8 0.8 -15 -20 26 16 2 -1 0.3 0.4 40 -18 14 -47 2 -1 0.1 0.3 23 -17 49 -36 2 -1 0.1 0.2 14 -41 17 -36 2 -1 0.4 0.7 34 3 -20 -44 1 -1 0.5 0.7 -48 -50 38 12 2 -1 0.1 0.2 -20 -47 -20 -45 2 -1 0.4 0.3 41 -16 13 -27 1 -1 0.8 0.4 40 20 29 -12 1 -1 0.6 0.1 -9 -15 -6 -46 1 -1 0.1 0.1 16 -41 48 -24 2 -1 0.9 0.7 7 -50 11 -33 1 -1 0.3 0.1 39 -34 7 -19 1 -1 0.1 0.5 17 -35 -33 -35 1 -1 0.1 0.1 46 38 10 -13 1 -1 0.2 0.2 35 -30 7 3 2 -1 0.7 0.3 -44 -48 6 5 2 -1 0.6 0.8 -33 -36 5 -7 2 -1 0.2 0.4 -42 -45 0 -22 2 -1 0.9 0.1 -15 -49 -6 -33 1 -1 0.4 0.2 46 3 -26 -33 1 -1 0.5 0.9 7 -21 7 -9 2 -1 0.6 0.9 45 3 37 25 2 -1 0.3 0.1 9 1 25 -41 1 -1 0.4 0.6 -3 -10 22 15 2 -1 0.5 0.1 39 -22 4 -28 1 -1 0.2 0.7 4 -35 -12 -14 2 -1 0.4 0.4 17 0 24 -49 1 -1 0.3 0.7 28 6 19 -18 1 -1 0.9 0.7 26 -28 28 -47 1 -1 0.9 0.5 37 -34 16 10 1 -1 0.8 0.2 0 -47 45 43 2 -1 0.1 0.3 44 39 -6 -47 1 -1 0.7 0.6 -24 -33 35 1 2 -1 0.5 0.1 24 5 23 16 2 -1 0.2 0.4 -8 -41 22 -46 2 -1 0.3 0.7 16 4 36 35 2 -1 0.6 0.1 -11 -26 45 44 2 -1 0.7 0.7 -8 -49 48 -48 2 -1 0.2 0.4 36 1 3 -29 1 -1 0.6 0.4 13 -16 18 -17 1 -1 0.9 0.1 43 26 -15 -41 1 -1 0.6 0.7 12 -35 43 29 2 -1 0.9 0.7 36 1 23 -23 1 -1 0.3 0.4 4 -10 28 -26 1 -1 0.8 0.1 -19 -46 33 -30 2 -1 0.2 0.3 43 -31 50 3 2 -1 0.2 0.5 42 -6 1 -40 2 -1 0.8 0.5 24 -31 43 33 2 -1 0.2 0.1 -20 -47 26 -25 2 -1 0.5 0.3 4 -19 50 -48 1 -1 0.7 0.7 31 30 -16 -26 1 -1 0.2 0.2 42 -7 13 -13 2 -1 0.2 0.8 47 -37 25 -23 2 -1 0.6 0.6 46 -4 19 -1 1 -1 0.2 0.5 22 16 31 8 1 -1 0.5 0.8 11 2 -26 -47 1 -1 0.4 0.6 -3 -27 4 -47 1 -1 0.7 0.3 3 -30 46 4 2 -1 0.5 0.2 40 14 50 15 2 -1 0.6 0.6 26 -3 23 -42 1 -1 0.2 0.4 17 -5 48 38 2 -1 0.7 0.1 -41 -50 33 5 2 -1 0.3 0.3 36 -3 38 -16 1 -1 0.8 0.4 17 -37 7 -11 1 -1 0.8 0.4 44 -15 -8 -47 1 -1 0.1 0.9 37 10 34 21 2 -1 0.8 0.5 -18 -29 5 -16 2 -1 0.8 0.7 48 -25 -5 -8 1 -1 0.6 0.1 36 -25 36 -37 1 -1 0.6 0.6 29 19 8 -19 1 -1 0.8 0.9 16 -26 38 -33 2 -1 0.4 0.4 9 1 42 8 2 -1 0.6 0.3 36 -19 49 43 2 -1 0.3 0.9 23 12 -9 -24 1 -1 0.5 0.2 -2 -34 -9 -32 1 -1 0.9 0.2 -3 -44 42 -3 2 -1 0.6 0.9 41 -47 15 -34 1 -1 0.1 0.5 38 33 -23 -48 1 -1 0.9 0.7 15 -5 23 -19 2 -1 0.5 0.7 34 -29 23 19 2 -1 0.4 0.1 44 -25 3 -27 1 -1 0.4 0.9 26 25 -27 -37 1 -1 0.6 0.4 32 -9 31 -18 1 -1 0.1 0.5 -22 -29 32 -10 2 -1 0.1 0.3 26 10 31 -47 1 -1 0.6 0.5 42 -40 42 -41 1 -1 0.7 0.9 47 -34 40 -28 1 -1 0.8 0.6 -12 -36 20 -16 2 -1 0.9 0.6 25 -31 27 10 1 -1 0.5 0.6 21 -29 -4 -8 1 -1 0.5 0.8 -2 -19 47 41 2 -1 0.4 0.7 37 -14 -5 -8 1 -1 0.1 0.4 4 -17 -4 -27 2 -1 0.5 0.1 0 -49 40 12 2 -1 0.2 0.1 -9 -18 17 -49 1 -1 0.3 0.3 43 -47 30 -16 2 -1 0.8 0.7 39 -3 43 -21 1 -1 0.8 0.2 -28 -33 9 -25 2 -1 0.5 0.2 24 -50 50 5 2 -1 0.2 0.5 33 0 44 -18 2 -1 0.9 0.8 34 1 38 36 2 -1 0.2 0.7 -22 -36 15 -6 2 -1 0.6 0.3 42 16 31 -29 1 -1 0.7 0.9 9 -11 49 30 2 -1 0.6 0.6 43 -22 32 -22 2 -1 0.3 0.4 38 37 41 -39 1 -1 0.9 0.2 32 25 42 -33 1 -1 0.8 0.3 32 30 48 -30 1 -1 0.3 0.7 -4 -30 10 8 2 -1 0.7 0.7 -12 -14 -34 -50 1 -1 0.9 0.8 42 38 31 -40 1 -1 0.4 0.2 4 -43 -8 -11 2 -1 0.1 0.5 13 -16 27 10 2 -1 0.5 0.4 7 -22 5 -46 1 -1 0.3 0.4 45 -31 32 4 2 -1 0.8 0.7 38 -26 45 -27 1 -1 0.9 0.5 -4 -10 48 -7 2 -1 0.6 0.9 20 -43 38 18 2 -1 0.9 0.2 -1 -6 34 -44 1 -1 0.7 0.2 37 2 49 -2 1 -1 0.2 0.6 14 -43 21 -40 2 -1 0.5 0.3 22 -16 42 39 2 -1 0.7 0.1 -15 -45 16 -4 2 -1 0.9 0.6 2 -14 50 -25 2 -1 0.1 0.1 -33 -38 9 0 2 -1 0.2 0.2 -13 -28 26 -28 2 -1 0.9 0.2 35 -38 37 6 1 -1 0.7 0.5 -2 -50 39 -27 2 -1 0.8 0.3 42 -47 40 -20 1 -1 0.4 0.1 9 -9 -10 -46 1 -1 0.4 0.9 -27 -28 45 12 2 -1 0.2 0.8 23 21 40 -18 2 -1 0.8 0.8 9 -49 46 6 2 -1 0.7 0.8 -12 -13 -35 -50 1 -1 0.4 0.2 37 -8 27 -24 1 -1 0.3 0.6 -19 -28 45 -31 2 -1 0.4 0.4 -26 -50 -14 -16 2 -1 0.6 0.9 18 -9 24 19 2 -1 0.2 0.6 17 7 -10 -27 1 -1 0.5 0.8 47 -40 15 -33 2 -1 0.5 0.1 19 6 46 2 1 -1 0.7 0.2 12 -30 27 -8 2 -1 0.5 0.9 31 -32 43 -41 2 -1 0.4 0.7 -35 -45 -27 -45 2 -1 0.2 0.6 45 -13 47 -13 2 -1 0.7 0.2 19 -27 -12 -48 1 -1 0.9 0.9 26 -31 20 -8 2 -1 0.8 0.6 27 24 35 12 1 -1 0.4 0.1 22 -20 30 3 2 -1 0.5 0.5 16 -31 38 -19 1 -1 0.4 0.2 47 44 38 18 1 -1 0.7 0.4 8 -39 50 -18 2 -1 0.7 0.4 19 -25 33 -41 1 -1 0.7 0.9 39 15 23 -42 1 -1 0.8 0.4 8 -39 21 -40 1 -1 0.2 0.7 5 4 47 13 2 -1 0.2 0.5 4 -4 20 -43 1 -1 0.5 0.6 -3 -34 48 34 2 -1 0.5 0.7 16 -11 34 14 2 -1 0.5 0.2 35 -2 27 -44 1 -1 0.4 0.6 -9 -35 24 -36 2 -1 0.8 0.2 28 -21 30 8 1 -1 0.4 0.6 43 -31 13 -33 1 -2 0.2 0.2 8 -22 43 35 2 -2 0.2 0.6 18 -12 -19 -32 1 -2 0.3 0.2 29 -37 28 19 2 -2 0.3 0.1 -39 -45 -7 -16 2 -2 0.3 0.9 34 12 49 25 2 -2 0.9 0.6 43 -25 50 -29 1 -2 0.5 0.1 -13 -35 21 -19 2 -2 0.2 0.5 22 -12 25 -28 2 -2 0.8 0.5 -12 -50 15 8 2 -2 0.2 0.5 50 -5 6 -22 1 -2 0.7 0.2 33 22 4 -37 1 -2 0.1 0.1 3 -47 -15 -25 1 -2 0.5 0.9 19 -34 39 -49 2 -2 0.8 0.3 29 19 4 -41 1 -2 0.9 0.9 26 2 17 5 2 -2 0.1 0.1 -12 -16 45 37 2 -2 0.2 0.9 7 -44 9 -42 2 -2 0.9 0.1 23 -24 39 14 2 -2 0.4 0.8 32 21 29 10 1 -2 0.8 0.3 -37 -49 18 -49 2 -2 0.5 0.4 26 -31 18 -30 1 -2 0.6 0.4 15 -27 19 10 2 -2 0.9 0.9 8 -21 38 33 2 -2 0.8 0.7 30 4 -14 -31 1 -2 0.7 0.4 20 17 21 -38 1 -2 0.4 0.9 21 -40 -11 -29 2 -2 0.2 0.4 25 -8 30 -3 2 -2 0.9 0.6 24 11 7 -12 1 -2 0.3 0.7 44 -17 -14 -48 1 -2 0.8 0.4 25 1 19 -14 1 -2 0.3 0.1 35 -2 4 -20 1 -2 0.2 0.7 2 -42 8 -14 2 -2 0.9 0.3 35 1 39 -50 1 -2 0.3 0.8 13 -4 33 -49 1 -2 0.9 0.2 24 -12 15 -5 1 -2 0.7 0.1 23 -19 -20 -27 1 -2 0.4 0.5 24 -39 49 -6 2 -2 0.9 0.9 50 28 41 -19 1 -2 0.9 0.6 15 -28 -13 -22 1 -2 0.2 0.2 50 -6 47 42 2 -2 0.4 0.6 10 -38 0 -12 2 -2 0.1 0.7 39 -17 26 4 2 -2 0.2 0.1 38 -49 28 -22 1 -2 0.9 0.2 -11 -25 50 37 2 -2 0.4 0.7 30 -32 34 -39 2 -2 0.7 0.8 6 -22 -22 -50 1 -2 0.9 0.9 9 -14 40 -23 2 -2 0.8 0.6 -43 -46 20 13 2 -2 0.6 0.1 31 -39 15 -16 1 -2 0.8 0.9 -12 -23 15 0 2 -2 0.1 0.3 -4 -19 38 10 2 -2 0.9 0.6 37 -46 24 -27 1 -2 0.4 0.7 16 -32 34 -31 2 -2 0.9 0.9 36 27 14 -32 1 -2 0.2 0.7 1 -49 33 -48 2 -2 0.6 0.8 -7 -29 33 7 2 -2 0.4 0.2 9 -8 29 -24 1 -2 0.3 0.6 -16 -35 45 10 2 -2 0.5 0.6 49 -14 17 -13 1 -2 0.3 0.6 -4 -11 18 -10 2 -2 0.6 0.4 -20 -43 -8 -18 1 -2 0.5 0.7 15 -16 16 11 2 -2 0.1 0.7 32 8 -10 -12 1 -2 0.8 0.3 10 -35 2 -35 1 -2 0.9 0.4 -30 -41 9 -25 2 -2 0.1 0.2 -25 -37 -9 -17 2 -2 0.9 0.7 22 -5 34 -35 1 -2 0.9 0.3 -28 -37 -4 -42 2 -2 0.1 0.7 27 -34 9 -3 2 -2 0.6 0.9 16 6 -45 -49 1 -2 0.6 0.3 33 11 -1 -7 1 -2 0.7 0.5 42 -1 15 -42 1 -2 0.6 0.8 49 25 20 -29 1 -2 0.4 0.3 27 -38 -13 -19 1 -2 0.8 0.5 11 -11 -5 -41 1 -2 0.2 0.8 22 -3 1 -35 1 -2 0.1 0.9 15 -18 -14 -48 1 -2 0.1 0.6 47 33 48 -23 1 -2 0.7 0.1 39 -8 -24 -27 1 -2 0.9 0.7 16 -22 48 -27 2 -2 0.3 0.7 28 14 37 -33 1 -2 0.2 0.8 40 22 28 25 2 -2 0.9 0.2 9 -35 34 -50 1 -2 0.9 0.9 37 -45 40 -45 2 -2 0.9 0.5 -13 -33 30 -26 2 -2 0.5 0.6 32 -23 -3 -45 1 -2 0.1 0.5 34 9 47 -41 1 -2 0.5 0.4 -26 -35 35 24 2 -2 0.5 0.4 6 -15 -4 -47 1 -2 0.1 0.5 45 -28 16 -17 2 -2 0.2 0.4 47 -5 38 -39 2 -2 0.4 0.5 15 -41 40 -12 2 -2 0.5 0.5 49 -21 49 -38 1 -2 0.5 0.5 10 -5 45 -37 2 -2 0.5 0.7 24 19 3 -39 1 -2 0.5 0.9 19 3 -1 -37 1 -2 0.8 0.7 -9 -43 44 -32 2 -2 0.9 0.1 47 27 50 -22 1 -2 0.3 0.8 35 30 27 -32 1 -2 0.4 0.2 25 -25 29 28 2 -2 0.5 0.9 -38 -42 24 11 2 -2 0.5 0.1 -1 -38 46 -47 2 -2 0.2 0.6 26 2 12 -14 1 -2 0.3 0.2 -35 -42 28 7 2 -2 0.9 0.2 7 -37 21 6 1 -2 0.3 0.1 2 -29 40 -38 1 -2 0.1 0.5 7 -47 3 -18 2 -2 0.6 0.4 -14 -50 6 -49 2 -2 0.5 0.3 32 17 45 -31 1 -2 0.5 0.8 -10 -26 5 -48 2 -2 0.7 0.1 -8 -33 26 -10 2 -2 0.7 0.4 43 -26 32 -41 1 -2 0.1 0.1 30 -24 38 -28 1 -2 0.7 0.4 -21 -26 10 -22 2 -2 0.6 0.4 23 17 7 -32 1 -2 0.5 0.2 41 30 33 -37 1 -2 0.2 0.8 -28 -48 34 3 2 -2 0.4 0.2 -13 -30 47 33 2 -2 0.9 0.2 47 25 20 -43 1 -2 0.6 0.4 -3 -32 -7 -34 1 -2 0.2 0.5 -20 -49 2 -22 2 -2 0.7 0.9 9 -19 -12 -44 1 -2 0.7 0.8 1 -20 17 -1 2 -2 0.4 0.9 19 -38 -9 -48 1 -2 0.7 0.3 8 -5 47 38 2 -2 0.7 0.2 35 -4 16 6 1 -2 0.8 0.4 46 9 -27 -43 1 -2 0.2 0.7 -6 -28 40 31 2 -2 0.2 0.2 31 -17 44 21 2 -2 0.7 0.5 9 5 9 -2 1 -2 0.9 0.3 26 -23 14 9 1 -2 0.8 0.3 27 -12 -31 -35 1 -2 0.1 0.7 -23 -25 16 -27 2 -2 0.9 0.9 8 -48 2 -49 1 -2 0.3 0.6 19 -34 -20 -31 1 -2 0.4 0.3 26 -5 8 -31 1 -2 0.4 0.2 40 25 39 -17 1 -2 0.2 0.2 33 -33 40 31 2 -2 0.1 0.1 28 -43 10 -26 2 -2 0.6 0.9 14 6 3 -14 1 -2 0.9 0.1 -2 -37 12 -33 1 -2 0.9 0.5 35 26 44 39 2 -2 0.5 0.9 50 36 35 -13 1 -2 0.3 0.1 6 -44 -10 -36 1 -2 0.8 0.9 47 10 3 -12 1 -2 0.2 0.2 48 -43 -23 -31 2 -2 0.1 0.1 -27 -50 12 4 2 -2 0.3 0.7 33 2 48 7 2 -2 0.4 0.8 24 23 34 -36 1 -2 0.9 0.6 22 -30 25 21 2 -2 0.3 0.5 48 43 -11 -49 1 -2 0.6 0.4 10 -16 1 -9 2 -2 0.4 0.7 41 -32 28 -21 2 -2 0.6 0.9 45 25 14 -5 1 -2 0.4 0.3 43 -49 44 6 2 -2 0.1 0.9 36 -9 12 -30 2 -2 0.9 0.5 -37 -48 42 -29 2 -2 0.8 0.6 42 -42 2 -13 1 -2 0.8 0.3 30 -10 4 -40 1 -2 0.2 0.9 33 9 -21 -46 1 -2 0.3 0.7 -24 -33 -2 -20 2 -2 0.8 0.3 30 18 17 8 1 -2 0.1 0.6 23 -37 26 -39 2 -2 0.5 0.4 31 -50 49 -3 2 -2 0.4 0.1 -4 -33 41 -8 2 -2 0.7 0.1 16 -46 14 -35 1 -2 0.6 0.9 -4 -34 46 2 2 -2 0.6 0.1 -13 -21 10 -8 1 -2 0.9 0.9 3 2 48 -35 2 -2 0.4 0.6 13 -4 43 -43 2 -2 0.6 0.6 -19 -43 24 -18 2 -2 0.2 0.2 20 -14 50 -1 2 -2 0.6 0.5 -42 -48 -24 -27 2 -2 0.8 0.2 31 -20 4 -44 1 -2 0.8 0.1 8 2 27 -28 1 -2 0.4 0.1 29 1 3 -22 1 -2 0.1 0.8 27 -34 9 -7 2 -2 0.4 0.4 14 -7 45 -43 2 -2 0.9 0.2 7 5 38 -33 1 -2 0.5 0.9 37 -2 42 -26 2 -2 0.4 0.1 45 3 13 -34 1 -2 0.9 0.6 47 -28 39 -15 1 -2 0.4 0.2 1 -3 26 -8 2 -2 0.9 0.7 34 22 23 13 1 -2 0.5 0.9 -2 -34 6 1 2 -2 0.1 0.2 35 -36 44 -29 2 -2 0.9 0.1 -5 -20 42 36 2 -2 0.1 0.3 2 1 35 -20 1 -2 0.6 0.6 -24 -39 39 -9 2 -2 0.9 0.8 6 -35 13 -49 1 -2 0.5 0.7 17 -30 1 -46 2 -2 0.5 0.2 30 -43 34 9 2 -2 0.3 0.1 49 -42 13 -1 2 -2 0.6 0.2 50 19 -16 -29 1 -2 0.1 0.2 36 6 34 3 1 -2 0.9 0.7 -8 -26 12 1 2 -2 0.7 0.3 -15 -35 8 -35 2 -2 0.1 0.3 -27 -47 40 28 2 -2 0.7 0.8 12 -15 -29 -38 1 -2 0.7 0.9 26 -25 -5 -22 1 -2 0.1 0.3 50 35 49 17 1 -2 0.6 0.7 0 -28 26 -12 2 -2 0.6 0.9 8 -8 8 1 2 -2 0.4 0.4 15 13 35 -16 1 -2 0.6 0.8 32 8 47 -8 2 -2 0.4 0.4 48 -7 13 -1 1 -2 0.9 0.9 7 -12 28 -24 2 -2 0.2 0.3 36 11 34 -24 1 -2 0.2 0.1 -36 -46 -34 -46 2 -2 0.3 0.4 41 -28 11 -5 2 -2 0.3 0.9 35 31 22 12 1 -2 0.2 0.7 34 -47 7 4 2 -2 0.3 0.5 -4 -7 23 -36 1 -2 0.6 0.7 30 12 -6 -22 1 -2 0.4 0.5 28 27 12 -35 1 -2 0.5 0.2 -7 -28 27 17 2 -2 0.1 0.8 50 -17 4 -16 2 -2 0.1 0.6 -15 -30 34 19 2 -2 0.8 0.7 19 -19 -17 -32 1 -2 0.3 0.7 24 14 -8 -31 1 -2 0.4 0.3 34 -16 12 -17 1 -2 0.3 0.7 25 -35 43 41 2 -2 0.7 0.8 37 11 39 26 2 -2 0.1 0.7 21 -29 39 -41 2 -2 0.4 0.5 25 14 50 -19 2 -2 0.7 0.9 24 -14 18 -10 1 -2 0.7 0.7 37 -31 13 -1 1 -2 0.8 0.4 3 -26 -20 -48 1 -2 0.9 0.8 35 -14 24 -40 1 -2 0.3 0.5 7 -47 31 -21 2 -2 0.1 0.6 28 -27 -34 -46 2 -2 0.9 0.6 -18 -41 37 -44 2 -2 0.8 0.2 -22 -28 -5 -46 1 -2 0.2 0.4 12 -39 12 -3 2 -2 0.1 0.7 29 -13 -17 -42 1 -2 0.9 0.1 31 26 6 -23 1 -2 0.4 0.2 42 -11 47 -29 1 -2 0.9 0.8 -9 -29 -4 -47 2 -2 0.1 0.9 31 -50 42 2 2 -2 0.6 0.1 6 -14 26 -46 1 -2 0.8 0.9 35 -46 -10 -35 1 -2 0.1 0.8 11 -6 26 -13 2 -2 0.8 0.8 31 -2 22 -15 1 -2 0.7 0.8 22 6 41 -22 2 -2 0.8 0.6 -42 -47 33 -9 2 -2 0.5 0.9 15 -25 1 -22 1 -2 0.8 0.8 -40 -50 11 4 2 -2 0.2 0.7 50 34 5 -24 1 -2 0.1 0.6 7 -17 49 -1 2 -2 0.7 0.8 27 -50 17 -10 2 -2 0.2 0.7 47 -33 23 -46 2 -2 0.6 0.5 27 -37 -18 -31 1 -2 0.5 0.9 -1 -49 -2 -17 2 -2 0.5 0.2 -19 -28 46 -47 2 -2 0.5 0.4 -7 -20 38 4 2 -2 0.3 0.4 20 -4 27 13 2 -2 0.8 0.1 -15 -16 45 -10 2 -2 0.6 0.5 19 1 -21 -42 1 -2 0.1 0.3 5 -4 20 -8 2 -2 0.1 0.7 -36 -37 29 22 2 -2 0.4 0.5 24 0 11 -20 1 -2 0.5 0.3 -42 -48 24 -7 2 -2 0.1 0.7 29 6 15 -35 2 -2 0.4 0.6 -13 -47 48 -2 2 -2 0.5 0.1 36 4 21 -24 1 -2 0.8 0.4 -13 -38 38 -8 2 -2 0.5 0.1 41 -22 -1 -12 1 -2 0.7 0.4 40 30 16 -18 1 -2 0.4 0.3 11 -9 47 38 2 -2 0.4 0.7 -10 -17 2 -11 2 -2 0.9 0.1 -2 -20 28 13 2 -2 0.9 0.6 -8 -38 45 -14 2 -2 0.9 0.1 5 -8 0 -37 2 -2 0.3 0.7 -10 -24 46 19 2 -2 0.9 0.7 -22 -25 -14 -16 2 -2 0.8 0.4 -21 -28 44 -4 2 -2 0.2 0.7 18 -49 46 -17 2 -2 0.2 0.6 50 -49 26 -18 2 -2 0.5 0.6 30 17 50 38 2 -2 0.4 0.7 43 -31 0 -27 1 -2 0.3 0.3 21 -22 35 -49 1 -2 0.2 0.3 25 -43 43 -50 2 -2 0.8 0.5 8 -22 4 -39 1 -2 0.8 0.6 -13 -20 21 -18 2 -2 0.4 0.6 -8 -31 1 -9 2 -2 0.2 0.1 20 -25 -3 -23 1 -2 0.7 0.3 41 -47 46 -35 1 -2 0.2 0.8 23 -48 31 -9 2 -2 0.9 0.8 20 -38 -19 -38 1 -2 0.8 0.5 27 12 47 35 2 -2 0.5 0.8 43 8 19 -45 1 -2 0.7 0.4 -7 -14 7 5 2 -2 0.9 0.5 41 -12 48 29 1 -2 0.5 0.8 29 -34 -21 -49 1 -2 0.1 0.4 1 -6 49 -18 2 -2 0.2 0.2 49 -40 -15 -20 1 -2 0.8 0.6 40 -15 37 -8 1 -2 0.5 0.9 7 -48 -17 -50 1 -2 0.4 0.3 40 15 -6 -49 1 -2 0.4 0.9 36 14 13 -29 1 -3 0.5 0.5 46 32 36 10 1 -3 0.3 0.1 0 -26 -34 -42 1 -3 0.7 0.1 49 25 -29 -39 1 -3 0.5 0.5 3 -18 8 -11 2 -3 0.4 0.8 -12 -40 -17 -50 2 -3 0.2 0.3 49 -14 -33 -39 1 -3 0.3 0.1 -18 -19 30 8 2 -3 0.2 0.4 48 -4 49 30 2 -3 0.1 0.3 42 29 14 -7 1 -3 0.1 0.3 -20 -40 39 1 2 -3 0.9 0.2 37 -25 4 -9 1 -3 0.8 0.7 -12 -46 25 -11 2 -3 0.6 0.6 15 -41 17 -40 2 -3 0.7 0.8 0 -4 32 9 2 -3 0.2 0.6 47 -4 13 -41 1 -3 0.4 0.7 11 -7 -11 -30 1 -3 0.8 0.3 38 -42 -26 -39 1 -3 0.1 0.7 47 -32 -11 -34 2 -3 0.3 0.3 33 -7 32 -27 2 -3 0.7 0.6 -9 -10 33 -47 2 -3 0.1 0.9 17 -23 11 -2 2 -3 0.9 0.8 -2 -49 36 4 2 -3 0.3 0.3 11 -19 2 -38 1 -3 0.6 0.8 -16 -20 25 -16 2 -3 0.2 0.1 49 -43 18 -39 1 -3 0.3 0.4 31 -49 -12 -50 1 -3 0.5 0.3 42 1 49 -27 1 -3 0.5 0.4 23 -21 29 -47 1 -3 0.3 0.8 0 -35 6 -17 2 -3 0.3 0.4 29 -22 35 6 2 -3 0.7 0.7 -28 -35 -20 -34 1 -3 0.8 0.1 2 -25 39 -5 1 -3 0.4 0.5 18 -37 39 -40 2 -3 0.8 0.7 19 -35 22 1 2 -3 0.5 0.6 -32 -50 -2 -19 2 -3 0.7 0.5 25 -47 35 0 1 -3 0.8 0.5 -12 -27 36 10 2 -3 0.9 0.3 28 9 12 -18 1 -3 0.7 0.2 38 20 42 25 2 -3 0.4 0.7 36 -20 21 -12 2 -3 0.7 0.4 25 -36 -9 -24 1 -3 0.9 0.6 34 -24 29 27 2 -3 0.7 0.5 42 -14 49 30 2 -3 0.7 0.6 -12 -18 -10 -49 1 -3 0.1 0.3 -8 -30 29 -11 2 -3 0.6 0.2 -13 -42 42 -1 2 -3 0.4 0.1 -14 -31 -23 -46 1 -3 0.5 0.9 37 -32 33 15 2 -3 0.9 0.8 -6 -9 43 -27 2 -3 0.3 0.3 11 -49 39 -27 1 -3 0.7 0.8 19 -28 8 -23 1 -3 0.5 0.4 -32 -46 22 -48 2 -3 0.7 0.2 22 -30 36 30 2 -3 0.3 0.3 13 -4 10 -36 1 -3 0.5 0.6 19 -42 35 -34 2 -3 0.1 0.2 49 -19 -16 -47 1 -3 0.1 0.5 50 37 32 -17 1 -3 0.1 0.3 -43 -45 28 -25 2 -3 0.3 0.3 24 -7 34 -45 1 -3 0.1 0.1 39 21 -22 -36 1 -3 0.4 0.7 28 24 -26 -33 1 -3 0.6 0.8 -13 -32 13 -34 2 -3 0.4 0.2 -42 -50 20 3 2 -3 0.4 0.9 41 32 35 9 1 -3 0.5 0.2 18 -38 -48 -50 1 -3 0.4 0.6 49 26 32 14 1 -3 0.3 0.1 -3 -49 18 -36 2 -3 0.9 0.7 42 33 -20 -30 1 -3 0.4 0.9 -5 -27 -15 -42 2 -3 0.8 0.9 -15 -43 3 -10 2 -3 0.7 0.3 -7 -43 -23 -27 1 -3 0.1 0.6 40 -50 10 -43 2 -3 0.2 0.3 11 6 37 29 2 -3 0.7 0.1 50 -3 28 -5 1 -3 0.5 0.6 47 43 16 0 1 -3 0.9 0.1 21 -2 48 -50 1 -3 0.5 0.4 9 -5 -1 -2 2 -3 0.5 0.1 -4 -36 28 16 2 -3 0.8 0.5 -1 -3 44 5 2 -3 0.5 0.3 5 -7 33 11 2 -3 0.8 0.8 50 -21 47 -8 1 -3 0.6 0.5 -11 -12 -5 -14 2 -3 0.8 0.1 49 -23 8 -17 1 -3 0.4 0.8 48 -15 13 -8 1 -3 0.3 0.6 46 -47 -9 -44 2 -3 0.5 0.1 -34 -44 37 -25 2 -3 0.7 0.3 49 -43 26 -3 1 -3 0.8 0.1 15 -30 37 -9 1 -3 0.6 0.3 49 -26 50 -41 1 -3 0.1 0.2 8 -6 11 -31 1 -3 0.9 0.3 34 29 -7 -50 1 -3 0.3 0.4 46 12 44 7 1 -3 0.7 0.7 31 5 37 -5 1 -3 0.5 0.4 15 -24 50 -33 2 -3 0.6 0.6 -12 -17 13 -20 2 -3 0.3 0.3 40 -50 28 13 2 -3 0.1 0.5 21 -6 38 -32 2 -3 0.2 0.8 -30 -40 -3 -5 2 -3 0.4 0.6 12 -50 21 -43 2 -3 0.3 0.5 -5 -32 30 -4 2 -3 0.3 0.5 33 -6 11 -3 2 -3 0.6 0.4 20 -6 -11 -40 1 -3 0.5 0.9 19 -19 38 30 2 -3 0.2 0.5 41 24 14 -44 1 -3 0.8 0.4 -26 -49 20 -20 2 -3 0.8 0.4 11 -5 -22 -26 1 -3 0.8 0.6 44 -34 5 4 1 -3 0.7 0.2 25 10 32 -7 1 -3 0.5 0.2 45 31 24 11 1 -3 0.9 0.6 47 -23 27 -21 1 -3 0.7 0.9 42 27 32 16 2 -3 0.9 0.2 37 26 14 -19 1 -3 0.3 0.2 28 -44 47 -3 2 -3 0.2 0.6 42 10 30 23 1 -3 0.7 0.2 10 -48 -8 -18 1 -3 0.1 0.7 39 21 45 42 1 -3 0.1 0.8 28 -36 32 -46 2 -3 0.9 0.1 -35 -48 12 -15 2 -3 0.3 0.6 -1 -14 35 28 2 -3 0.2 0.5 34 1 48 -44 2 -3 0.3 0.2 34 -27 49 21 2 -3 0.7 0.6 -18 -49 -18 -46 1 -3 0.5 0.1 8 -47 29 -7 2 -3 0.9 0.7 47 -45 7 -7 1 -3 0.8 0.7 48 -30 5 -39 1 -3 0.3 0.7 9 -26 32 -35 2 -3 0.1 0.6 0 -16 33 -9 2 -3 0.1 0.6 35 19 21 -46 1 -3 0.3 0.6 -10 -44 -5 -9 2 -3 0.8 0.5 -24 -44 20 -43 2 -3 0.7 0.2 21 -37 19 -35 1 -3 0.6 0.6 39 31 -23 -26 1 -3 0.2 0.9 40 -47 45 36 2 -3 0.6 0.8 27 -6 24 22 1 -3 0.1 0.5 38 -45 39 -7 2 -3 0.3 0.5 -1 -2 11 -50 1 -3 0.5 0.7 -38 -42 30 -31 2 -3 0.2 0.1 29 8 28 -12 1 -3 0.1 0.5 10 -3 44 1 2 -3 0.2 0.2 29 27 41 -21 1 -3 0.6 0.1 24 -35 -6 -37 1 -3 0.1 0.7 14 -39 21 -45 2 -3 0.3 0.6 -26 -39 26 -43 2 -3 0.1 0.4 7 -17 -35 -45 1 -3 0.2 0.8 16 -50 46 -30 2 -3 0.6 0.9 29 -47 -15 -17 1 -3 0.8 0.9 46 -27 45 -5 2 -3 0.4 0.4 12 11 -5 -47 1 -3 0.4 0.1 48 42 18 -36 1 -3 0.5 0.3 -11 -23 50 -35 2 -3 0.4 0.5 29 -35 47 -21 2 -3 0.3 0.5 19 -21 -40 -49 1 -3 0.4 0.1 42 -1 -37 -50 1 -3 0.4 0.9 13 -17 -2 -11 1 -3 0.2 0.7 37 -4 -19 -22 1 -3 0.9 0.5 33 -28 33 -6 1 -3 0.1 0.7 15 -28 -41 -49 1 -3 0.4 0.7 41 -40 48 39 2 -3 0.4 0.1 28 0 32 26 2 -3 0.8 0.1 -13 -27 -19 -28 1 -3 0.6 0.6 4 -13 31 1 2 -3 0.2 0.5 -18 -42 47 26 2 -3 0.4 0.1 43 40 -20 -28 1 -3 0.6 0.6 14 -50 -34 -35 1 -3 0.3 0.3 -2 -37 50 -41 2 -3 0.9 0.8 24 18 46 -48 1 -3 0.5 0.5 7 2 41 -23 2 -3 0.8 0.6 26 20 29 24 1 -3 0.4 0.7 31 -26 31 -8 2 -3 0.9 0.1 14 -1 -33 -50 1 -3 0.4 0.2 -28 -30 37 -41 2 -3 0.3 0.6 41 -42 37 29 2 -3 0.5 0.6 29 -50 42 6 2 -3 0.1 0.7 31 -42 47 -12 2 -3 0.2 0.9 22 -36 -2 -5 1 -3 0.2 0.7 49 21 24 -25 1 -3 0.8 0.5 48 31 7 -21 1 -3 0.2 0.3 -12 -38 14 -22 2 -3 0.7 0.5 39 27 12 -28 1 -3 0.9 0.7 4 -26 9 -16 1 -3 0.4 0.6 -8 -37 34 16 2 -3 0.7 0.4 3 -50 2 -27 1 -3 0.2 0.1 4 -14 27 26 2 -3 0.9 0.7 -33 -36 43 -36 2 -3 0.1 0.2 -27 -40 22 5 2 -3 0.4 0.7 31 -13 6 -45 1 -3 0.3 0.5 24 -3 -38 -44 1 -3 0.7 0.7 48 -46 3 -41 1 -3 0.5 0.7 2 -9 30 2 2 -3 0.1 0.7 46 14 26 -49 1 -3 0.8 0.5 -34 -42 48 24 2 -3 0.8 0.1 37 -25 37 -14 1 -3 0.1 0.2 38 30 12 -2 1 -3 0.2 0.3 -3 -50 35 -27 2 -3 0.2 0.9 -28 -39 48 5 2 -3 0.4 0.9 27 -9 -12 -30 1 -3 0.6 0.1 -7 -27 25 7 2 -3 0.7 0.7 -36 -42 9 -27 2 -3 0.2 0.6 22 -38 40 11 2 -3 0.4 0.4 42 -45 50 -34 2 -3 0.3 0.5 50 -6 38 -40 1 -3 0.9 0.4 40 -11 40 -18 1 -3 0.3 0.6 -7 -13 34 -25 2 -3 0.5 0.6 36 -14 -7 -15 1 -3 0.5 0.6 36 5 -11 -28 1 -3 0.2 0.2 43 31 9 -5 1 -3 0.9 0.2 0 -42 34 -49 1 -3 0.4 0.4 31 13 4 -4 1 -3 0.8 0.1 -1 -30 -21 -38 1 -3 0.6 0.1 28 -9 24 -43 1 -3 0.9 0.9 -4 -21 13 -24 2 -3 0.2 0.7 7 -50 48 33 2 -3 0.7 0.7 -17 -46 33 -40 2 -3 0.2 0.5 22 -43 31 -14 2 -3 0.5 0.9 -4 -14 18 -18 2 -3 0.6 0.4 -10 -13 -45 -48 1 -3 0.3 0.4 9 -32 -15 -44 2 -3 0.3 0.7 41 7 0 -1 1 -3 0.3 0.9 5 -19 -20 -28 1 -3 0.5 0.1 41 -42 -30 -31 1 -3 0.8 0.7 -5 -45 30 -48 2 -3 0.8 0.4 -8 -44 23 -5 2 -3 0.5 0.1 -22 -28 -36 -39 1 -3 0.7 0.6 -2 -48 33 -28 2 -3 0.3 0.7 15 -11 7 -36 2 -3 0.6 0.4 25 -25 50 16 2 -3 0.6 0.2 19 -37 34 -8 1 -3 0.2 0.9 -8 -10 30 14 2 -3 0.4 0.1 31 -23 44 -45 1 -3 0.6 0.3 2 -28 44 -47 1 -3 0.7 0.6 -21 -47 -4 -9 2 -3 0.1 0.3 47 33 -45 -47 1 -3 0.2 0.1 43 12 34 2 1 -3 0.3 0.3 26 6 -21 -27 1 -3 0.2 0.2 -30 -41 48 9 2 -3 0.6 0.5 32 29 27 25 1 -3 0.3 0.5 18 -16 4 -34 1 -3 0.8 0.3 39 33 3 -13 1 -3 0.7 0.2 5 -45 1 -3 1 -3 0.1 0.1 -33 -34 32 7 2 -3 0.5 0.7 40 23 4 -15 1 -3 0.1 0.7 26 -21 16 -27 2 -3 0.7 0.4 -18 -27 42 -40 2 -3 0.6 0.4 -8 -49 48 -36 2 -3 0.9 0.8 29 -3 -43 -49 1 -3 0.5 0.9 34 -5 41 -15 2 -3 0.2 0.1 -10 -16 34 -22 2 -3 0.2 0.8 27 22 31 -37 2 -3 0.9 0.9 -17 -29 37 -22 2 -3 0.5 0.9 -9 -32 45 38 2 -3 0.8 0.6 36 10 12 -22 1 -3 0.3 0.2 -6 -32 19 -36 2 -3 0.2 0.7 7 -6 16 -35 2 -3 0.2 0.6 45 -43 37 36 2 -3 0.3 0.6 41 -33 23 -14 2 -3 0.5 0.6 -7 -28 -2 -24 2 -3 0.8 0.7 44 12 14 -14 1 -3 0.9 0.6 32 -49 46 -39 1 -3 0.8 0.2 -34 -37 -6 -41 2 -3 0.3 0.1 47 4 -6 -42 1 -3 0.9 0.9 37 -17 29 -26 1 -3 0.6 0.3 -16 -18 44 -46 1 -3 0.4 0.5 4 -45 7 -12 2 -3 0.5 0.8 -27 -48 38 -50 2 -3 0.6 0.6 10 -27 28 -30 2 -3 0.3 0.8 -18 -42 -22 -31 1 -3 0.8 0.7 39 -12 -34 -49 1 -3 0.6 0.7 46 -37 43 4 2 -3 0.2 0.6 30 -6 -30 -32 1 -3 0.5 0.2 38 22 36 -26 1 -3 0.3 0.6 -27 -39 50 46 2 -3 0.8 0.2 -20 -27 50 -25 2 -3 0.9 0.5 27 -33 38 -23 1 -3 0.4 0.9 24 -43 -14 -15 1 -3 0.4 0.4 33 17 33 -1 1 -3 0.8 0.7 28 25 -30 -48 1 -3 0.7 0.4 -12 -31 5 -46 2 -3 0.8 0.2 31 9 47 3 1 -3 0.4 0.8 -9 -18 20 5 2 -3 0.9 0.9 -2 -49 8 -25 2 -3 0.8 0.9 33 -19 -3 -35 1 -3 0.6 0.3 36 32 25 17 1 -3 0.4 0.5 12 4 28 12 2 -3 0.3 0.6 22 6 5 -16 1 -3 0.1 0.4 44 0 15 -35 1 -3 0.6 0.9 17 14 -8 -10 1 -3 0.6 0.4 40 15 16 -32 1 -3 0.2 0.4 -1 -50 -4 -17 2 -3 0.4 0.4 -15 -39 44 5 2 -3 0.6 0.7 41 -48 14 -9 2 -3 0.2 0.6 11 -42 16 -2 2 -3 0.6 0.3 42 -5 33 -8 1 -3 0.2 0.5 45 36 20 -31 1 -3 0.8 0.8 24 18 35 32 2 -3 0.2 0.3 47 22 33 -19 1 -3 0.8 0.8 9 0 -13 -20 1 -3 0.8 0.6 -27 -42 42 -45 2 -3 0.3 0.4 32 8 31 18 2 -3 0.6 0.6 49 -21 39 12 2 -3 0.5 0.8 48 24 -5 -49 1 -4 0.9 0.3 -20 -48 17 6 2 -4 0.8 0.7 13 -33 -2 -13 1 -4 0.8 0.6 -24 -26 28 -48 2 -4 0.6 0.2 1 -19 -12 -46 1 -4 0.3 0.2 10 -44 -26 -38 1 -4 0.9 0.8 -12 -20 15 -50 2 -4 0.1 0.2 12 -27 34 13 2 -4 0.2 0.3 10 -30 23 -15 2 -4 0.7 0.5 29 -13 38 9 2 -4 0.1 0.9 23 16 45 -27 2 -4 0.6 0.3 11 -46 9 -46 1 -4 0.1 0.5 -36 -43 32 15 2 -4 0.8 0.5 -7 -35 22 -30 2 -4 0.6 0.7 50 24 -3 -43 1 -4 0.6 0.5 49 4 -41 -46 1 -4 0.7 0.5 39 38 45 -15 1 -4 0.1 0.8 50 -2 7 -26 2 -4 0.8 0.3 23 -14 34 6 2 -4 0.2 0.8 22 3 -8 -23 2 -4 0.5 0.5 33 -9 -19 -25 1 -4 0.8 0.7 48 6 32 8 1 -4 0.7 0.6 48 -26 48 5 2 -4 0.7 0.6 9 -25 -22 -39 1 -4 0.5 0.6 41 -21 -28 -41 1 -4 0.8 0.7 19 11 47 -32 1 -4 0.6 0.7 22 0 -21 -44 1 -4 0.9 0.1 -9 -44 50 -32 2 -4 0.3 0.3 1 -22 32 12 2 -4 0.4 0.7 22 -16 27 -12 2 -4 0.7 0.6 14 -10 27 -8 2 -4 0.4 0.3 50 31 2 -2 1 -4 0.1 0.8 47 -36 -22 -32 2 -4 0.7 0.9 1 -3 37 -30 2 -4 0.3 0.9 36 8 23 10 1 -4 0.1 0.3 28 27 -36 -45 1 -4 0.5 0.3 42 -27 46 1 2 -4 0.9 0.5 28 10 29 20 2 -4 0.1 0.9 10 -17 25 18 2 -4 0.2 0.3 37 23 -24 -41 1 -4 0.2 0.2 18 -40 42 -48 2 -4 0.3 0.6 18 -27 41 39 2 -4 0.4 0.7 21 6 -1 -50 1 -4 0.4 0.4 15 14 20 -35 1 -4 0.1 0.8 13 -49 44 33 2 -4 0.3 0.2 -31 -42 30 -45 2 -4 0.6 0.4 -12 -23 22 -42 2 -4 0.9 0.1 -26 -40 -25 -30 2 -4 0.7 0.2 38 -27 34 -13 1 -4 0.9 0.1 42 -31 45 -1 1 -4 0.5 0.8 41 -3 37 -12 1 -4 0.2 0.5 31 2 17 -44 1 -4 0.8 0.8 -4 -31 16 -15 2 -4 0.9 0.3 24 1 15 -4 1 -4 0.8 0.4 9 -22 28 -5 2 -4 0.9 0.3 47 -24 17 -16 1 -4 0.8 0.4 -12 -14 -13 -21 2 -4 0.1 0.9 1 -50 4 -36 2 -4 0.6 0.7 10 -36 -16 -22 1 -4 0.5 0.2 26 13 24 -31 1 -4 0.4 0.9 1 -15 18 -41 2 -4 0.1 0.5 41 32 -33 -47 1 -4 0.6 0.4 4 -25 -12 -50 1 -4 0.6 0.2 -2 -22 25 5 2 -4 0.5 0.3 -2 -8 34 31 2 -4 0.9 0.9 6 -26 -6 -45 1 -4 0.1 0.2 23 0 38 -6 2 -4 0.8 0.7 -25 -40 9 -37 1 -4 0.5 0.2 -5 -14 48 -18 2 -4 0.2 0.1 6 -34 0 -50 1 -4 0.9 0.1 22 -2 -7 -47 1 -4 0.5 0.5 41 29 41 23 1 -4 0.3 0.5 2 -4 20 -24 2 -4 0.5 0.9 -14 -42 2 -13 2 -4 0.9 0.9 46 16 49 2 1 -4 0.9 0.3 49 39 -16 -29 1 -4 0.1 0.2 36 -28 40 -15 2 -4 0.5 0.9 3 -7 35 15 2 -4 0.1 0.4 3 -18 32 -12 2 -4 0.7 0.6 22 -34 18 4 2 -4 0.8 0.2 5 -6 16 -14 1 -4 0.3 0.9 24 9 -9 -32 1 -4 0.4 0.9 23 13 11 -46 1 -4 0.9 0.5 38 -6 -5 -32 1 -4 0.7 0.5 -23 -48 -26 -32 2 -4 0.2 0.8 19 -14 48 31 2 -4 0.3 0.2 50 39 33 30 1 -4 0.5 0.8 -33 -50 35 -45 2 -4 0.8 0.3 30 -5 32 -33 1 -4 0.1 0.8 -31 -40 35 4 2 -4 0.9 0.1 41 10 4 -49 1 -4 0.7 0.3 -9 -30 41 -26 2 -4 0.9 0.5 -20 -31 -25 -41 1 -4 0.3 0.8 45 -15 28 -17 2 -4 0.5 0.4 -24 -37 36 32 2 -4 0.8 0.1 18 -45 19 -10 1 -4 0.8 0.7 37 -19 -10 -40 1 -4 0.1 0.6 46 13 -30 -39 1 -4 0.3 0.4 15 -4 46 17 2 -4 0.4 0.4 -12 -32 45 38 2 -4 0.5 0.4 36 -19 -11 -13 1 -4 0.3 0.5 47 32 -21 -47 1 -4 0.3 0.3 -2 -25 -33 -34 1 -4 0.4 0.2 39 1 25 -35 1 -4 0.3 0.9 32 -32 35 21 2 -4 0.2 0.3 -9 -26 36 -4 2 -4 0.9 0.1 -10 -21 9 -3 2 -4 0.2 0.7 47 28 11 -22 1 -4 0.1 0.8 38 -4 39 38 2 -4 0.8 0.2 -28 -36 36 17 2 -4 0.6 0.6 10 -50 -2 -42 1 -4 0.7 0.4 37 -5 5 -44 1 -4 0.9 0.6 22 -36 26 -12 1 -4 0.8 0.5 11 -2 49 -41 1 -4 0.2 0.9 8 -25 29 -49 2 -4 0.6 0.3 -39 -49 6 -42 2 -4 0.7 0.9 44 -3 44 -33 2 -4 0.2 0.4 48 43 46 -34 1 -4 0.2 0.4 33 -33 -16 -36 1 -4 0.5 0.4 47 -48 19 12 2 -4 0.3 0.8 50 49 18 -3 1 -4 0.2 0.6 -11 -34 24 -21 2 -4 0.2 0.3 -33 -39 -20 -21 2 -4 0.5 0.8 -23 -46 -30 -43 2 -4 0.3 0.5 32 23 34 7 2 -4 0.3 0.5 -15 -50 -32 -37 2 -4 0.1 0.9 7 -25 46 7 2 -4 0.2 0.9 -40 -48 19 -1 2 -4 0.4 0.6 41 -21 -23 -25 1 -4 0.9 0.5 -29 -36 13 -27 2 -4 0.4 0.7 37 34 32 21 1 -4 0.2 0.3 21 2 -39 -49 1 -4 0.5 0.9 39 3 6 -24 1 -4 0.3 0.9 37 -19 -2 -17 1 -4 0.4 0.2 50 -8 -18 -41 1 -4 0.5 0.7 -7 -29 16 -41 2 -4 0.2 0.2 27 -32 20 -31 1 -4 0.3 0.5 32 -7 24 -11 2 -4 0.5 0.6 -25 -40 -28 -33 2 -4 0.4 0.3 42 30 45 -19 1 -4 0.8 0.4 25 -26 -4 -20 1 -4 0.4 0.3 9 -50 36 -14 2 -4 0.2 0.1 6 -30 -8 -33 1 -4 0.7 0.1 4 -14 3 -12 1 -4 0.4 0.7 32 10 47 -36 2 -4 0.5 0.4 36 15 24 -3 1 -4 0.8 0.2 23 7 10 -16 1 -4 0.6 0.7 14 -44 19 14 2 -4 0.7 0.4 -27 -39 -41 -43 2 -4 0.4 0.6 50 -19 -25 -42 1 -4 0.2 0.6 1 -34 29 -29 2 -4 0.2 0.3 16 -49 7 -32 1 -4 0.6 0.6 5 -39 12 -43 1 -4 0.4 0.2 28 -36 32 -24 1 -4 0.9 0.9 17 -9 5 0 1 -4 0.4 0.4 23 -47 -9 -37 1 -4 0.4 0.9 45 -44 28 -37 2 -4 0.8 0.6 21 12 -14 -17 1 -4 0.1 0.2 33 16 37 -24 1 -4 0.7 0.9 -31 -35 50 13 2 -4 0.5 0.7 -4 -17 -24 -35 1 -4 0.2 0.3 45 40 14 -15 1 -4 0.1 0.5 39 -10 -13 -21 1 -4 0.4 0.8 -3 -24 27 15 2 -4 0.2 0.5 16 0 37 -46 2 -4 0.6 0.4 3 -41 49 27 2 -4 0.7 0.6 21 5 11 -8 1 -4 0.6 0.4 48 -4 -12 -37 1 -4 0.9 0.7 18 -49 30 -22 2 -4 0.2 0.6 -28 -32 23 -35 2 -4 0.3 0.9 48 -41 40 -27 2 -4 0.6 0.9 1 -1 4 -30 1 -4 0.1 0.5 1 0 33 3 2 -4 0.5 0.9 -8 -33 33 -46 2 -4 0.1 0.4 -29 -37 22 4 2 -4 0.4 0.3 20 -8 48 -12 2 -4 0.9 0.2 15 -38 36 2 1 -4 0.1 0.3 3 -37 34 -22 2 -4 0.1 0.4 25 -48 26 -20 2 -4 0.7 0.5 -1 -50 36 -11 2 -4 0.8 0.3 50 -25 49 48 2 -4 0.3 0.9 -4 -36 29 -5 2 -4 0.8 0.1 47 21 12 -33 1 -4 0.1 0.6 45 -31 33 26 2 -4 0.5 0.7 37 -8 7 3 1 -4 0.1 0.5 12 -29 19 -36 1 -4 0.1 0.4 34 27 -14 -26 1 -4 0.1 0.1 32 30 23 -46 1 -4 0.7 0.7 45 -14 33 -40 1 -4 0.3 0.7 -1 -22 19 -46 2 -4 0.7 0.9 -30 -45 -14 -33 2 -4 0.3 0.9 -39 -44 15 -36 2 -4 0.7 0.1 23 -35 13 -35 1 -4 0.2 0.3 16 -42 48 -1 2 -4 0.2 0.1 20 -38 38 -7 2 -4 0.1 0.9 -33 -38 47 -2 2 -4 0.3 0.9 47 -44 19 -36 2 -4 0.7 0.4 40 27 49 46 2 -4 0.5 0.3 -3 -42 24 9 2 -4 0.5 0.9 9 -44 15 -19 2 -4 0.6 0.3 -19 -33 -24 -26 2 -4 0.4 0.5 -32 -48 27 -1 2 -4 0.1 0.4 38 -15 -26 -35 2 -4 0.1 0.2 39 -6 -14 -22 1 -4 0.8 0.5 40 -35 17 -21 1 -4 0.6 0.1 24 17 7 -28 1 -4 0.4 0.5 36 12 45 -10 1 -4 0.2 0.2 49 -23 -6 -18 2 -4 0.7 0.1 47 -22 45 -43 1 -4 0.1 0.3 29 17 21 16 1 -4 0.2 0.7 16 -33 16 -37 2 -4 0.5 0.8 7 -9 15 -44 2 -4 0.8 0.2 40 -24 -4 -45 1 -4 0.2 0.6 -11 -34 21 -17 2 -4 0.8 0.6 7 -32 34 27 2 -4 0.2 0.3 -17 -40 46 -45 2 -4 0.6 0.8 16 -6 6 -44 1 -4 0.2 0.7 17 6 12 8 1 -4 0.5 0.4 -2 -23 34 -38 2 -4 0.9 0.1 -30 -34 19 -45 2 -4 0.9 0.6 42 -23 44 -46 1 -4 0.5 0.8 40 -24 -16 -35 1 -4 0.4 0.1 11 -1 -11 -38 1 -4 0.4 0.2 -6 -40 27 -34 2 -4 0.7 0.7 21 -12 23 -38 1 -4 0.6 0.1 47 -14 9 -24 1 -4 0.3 0.9 36 -23 16 -26 1 -4 0.8 0.3 29 -20 35 -14 1 -4 0.4 0.1 29 -33 21 -42 1 -4 0.9 0.2 -27 -43 11 -48 2 -4 0.2 0.1 22 -25 29 -13 2 -4 0.9 0.6 14 -8 -12 -23 1 -4 0.6 0.1 -13 -18 1 -23 1 -4 0.3 0.5 22 -20 -2 -48 1 -4 0.4 0.1 24 13 38 -43 1 -4 0.9 0.7 40 -30 44 38 2 -4 0.3 0.7 1 -26 -4 -41 1 -4 0.2 0.6 20 -6 -22 -29 1 -4 0.6 0.9 -9 -16 37 32 2 -4 0.7 0.7 6 -8 40 2 2 -4 0.6 0.6 10 -21 39 18 2 -4 0.1 0.5 -3 -33 2 -47 2 -4 0.9 0.3 -20 -43 37 11 2 -4 0.9 0.1 27 -20 5 -14 1 -4 0.9 0.2 3 -19 -41 -49 1 -4 0.2 0.1 18 -5 45 -5 2 -4 0.8 0.1 -13 -36 -10 -38 2 -4 0.4 0.6 45 34 40 -17 1 -4 0.5 0.9 36 17 -2 -36 1 -4 0.1 0.1 14 1 -17 -38 1 -4 0.4 0.1 6 -10 -1 -49 1 -4 0.4 0.2 -10 -45 16 -13 2 -4 0.8 0.9 -11 -18 4 -38 2 -4 0.2 0.5 4 -49 8 1 2 -4 0.1 0.4 -32 -39 41 27 2 -4 0.1 0.5 14 1 -8 -21 1 -4 0.3 0.8 7 0 -18 -24 1 -4 0.9 0.1 24 -15 13 -40 1 -4 0.8 0.9 33 -36 -2 -48 1 -4 0.8 0.7 -42 -45 -17 -36 2 -4 0.8 0.8 31 20 -29 -31 1 -4 0.8 0.1 31 -25 -14 -45 1 -4 0.5 0.8 29 -28 28 -37 2 -4 0.6 0.3 -4 -23 -38 -43 1 -4 0.8 0.2 36 -6 47 -35 1 -4 0.4 0.5 -16 -41 48 22 2 -4 0.5 0.3 -10 -20 -21 -23 1 -4 0.9 0.3 17 -17 35 -17 1 -4 0.4 0.2 12 5 47 11 2 -4 0.8 0.8 10 -36 -23 -47 1 -4 0.6 0.6 24 -20 11 2 2 -4 0.9 0.5 35 -38 17 -26 1 -4 0.1 0.9 9 -5 -32 -40 1 -4 0.9 0.8 -37 -45 11 -28 2 -4 0.1 0.2 13 -35 29 -46 1 -4 0.2 0.2 36 23 33 -4 1 -4 0.9 0.2 38 5 -4 -39 1 -4 0.6 0.7 8 -35 43 40 2 -4 0.6 0.2 16 -14 23 19 2 -4 0.8 0.2 43 15 -21 -23 1 -4 0.5 0.3 50 -45 11 -42 1 -4 0.3 0.9 8 -18 6 -8 2 -4 0.8 0.7 -34 -49 11 -7 2 -4 0.5 0.6 22 -12 44 19 2 -4 0.8 0.1 2 -15 22 -29 2 -4 0.4 0.9 14 -20 27 -36 2 -4 0.8 0.7 6 -18 42 -23 2 -4 0.7 0.6 3 -17 33 19 2 -4 0.3 0.3 7 -23 3 -17 1 -4 0.4 0.6 29 -21 -26 -32 1 -4 0.6 0.3 15 -4 14 -45 1 -4 0.4 0.9 -10 -25 24 17 2 -4 0.7 0.5 5 -41 15 -29 2 -4 0.3 0.9 40 -23 10 -3 2 -4 0.4 0.5 -1 -20 -3 -11 2 -4 0.7 0.5 45 9 32 -22 1 -4 0.2 0.7 18 -37 42 -15 2 -4 0.6 0.9 16 -36 29 -32 2 -4 0.3 0.2 5 -41 44 -43 2 -4 0.2 0.6 23 -31 43 -33 2 -4 0.3 0.4 43 9 -4 -31 1 -5 0.5 0.9 6 -44 -11 -14 1 -5 0.1 0.6 43 -4 34 -9 2 -5 0.7 0.7 24 -25 -19 -21 1 -5 0.9 0.3 -9 -14 22 -31 1 -5 0.7 0.3 2 -16 30 -18 1 -5 0.4 0.3 28 -27 35 26 2 -5 0.8 0.3 27 -15 -2 -39 1 -5 0.5 0.2 16 -40 18 -43 1 -5 0.5 0.4 26 -33 20 15 2 -5 0.7 0.1 8 -33 -18 -34 1 -5 0.7 0.9 15 -24 32 9 1 -5 0.7 0.4 -1 -50 30 -2 2 -5 0.9 0.6 43 2 42 -10 2 -5 0.6 0.7 12 -48 46 40 2 -5 0.9 0.7 49 -40 49 26 1 -5 0.2 0.3 47 -20 27 -12 2 -5 0.6 0.3 41 20 -22 -44 1 -5 0.5 0.4 32 -5 7 4 2 -5 0.3 0.4 1 -10 15 -9 2 -5 0.6 0.6 45 43 37 30 2 -5 0.6 0.1 39 33 -31 -45 1 -5 0.5 0.6 48 -47 39 -32 2 -5 0.1 0.9 46 40 27 -9 1 -5 0.6 0.3 -15 -28 16 -38 1 -5 0.9 0.3 50 -24 -9 -21 1 -5 0.3 0.2 44 -26 23 -5 1 -5 0.1 0.6 46 16 32 4 2 -5 0.8 0.4 30 8 -6 -33 1 -5 0.5 0.4 50 -32 -15 -19 1 -5 0.9 0.4 1 -34 9 -22 2 -5 0.6 0.3 43 5 -5 -30 1 -5 0.4 0.3 44 0 45 21 2 -5 0.8 0.4 9 -5 47 -1 2 -5 0.6 0.1 17 7 47 -5 1 -5 0.5 0.7 2 -2 29 -20 2 -5 0.7 0.7 35 32 38 7 1 -5 0.6 0.3 15 -18 25 -31 1 -5 0.3 0.3 32 -45 28 26 2 -5 0.6 0.2 14 -42 15 -6 1 -5 0.3 0.3 29 -18 35 17 2 -5 0.9 0.6 44 -14 24 14 1 -5 0.3 0.2 38 3 -20 -21 1 -5 0.7 0.1 3 -7 2 -46 1 -5 0.5 0.7 32 -45 0 -45 1 -5 0.5 0.1 -2 -29 28 -37 2 -5 0.4 0.4 31 -24 -7 -12 1 -5 0.5 0.6 -21 -28 -27 -49 1 -5 0.7 0.9 21 12 34 21 2 -5 0.9 0.2 -3 -29 -43 -46 1 -5 0.8 0.6 19 7 50 24 2 -5 0.7 0.5 38 15 30 -40 1 -5 0.3 0.4 0 -26 49 -19 2 -5 0.9 0.6 23 -10 48 -1 1 -5 0.9 0.8 16 -17 -2 -48 1 -5 0.7 0.4 46 10 42 10 2 -5 0.9 0.8 23 22 35 -2 1 -5 0.1 0.6 47 14 22 -43 2 -5 0.9 0.4 12 -41 27 18 2 -5 0.3 0.2 27 25 36 16 1 -5 0.1 0.4 7 -47 17 -15 2 -5 0.4 0.7 33 32 36 -1 2 -5 0.1 0.3 48 -8 45 -35 1 -5 0.6 0.9 -23 -33 35 -18 2 -5 0.8 0.5 12 -47 26 -46 1 -5 0.3 0.8 19 -17 21 3 2 -5 0.2 0.9 30 14 30 -9 1 -5 0.5 0.8 44 -38 30 -11 2 -5 0.1 0.1 -23 -40 47 44 2 -5 0.2 0.9 40 7 24 6 1 -5 0.6 0.4 26 7 21 12 2 -5 0.7 0.6 41 -15 -16 -46 1 -5 0.3 0.4 12 1 2 -40 1 -5 0.6 0.2 -6 -13 4 -9 2 -5 0.5 0.9 49 47 -16 -41 1 -5 0.2 0.6 28 -22 10 -14 2 -5 0.5 0.3 -9 -28 -13 -46 2 -5 0.7 0.6 -1 -44 24 -10 2 -5 0.5 0.4 50 -41 49 -8 2 -5 0.4 0.7 -14 -33 -2 -9 2 -5 0.4 0.7 15 -50 -2 -26 1 -5 0.5 0.1 -38 -41 34 -46 1 -5 0.3 0.5 19 -45 4 -33 2 -5 0.5 0.1 48 32 36 -14 1 -5 0.5 0.9 -18 -37 45 26 2 -5 0.6 0.5 24 17 -21 -36 1 -5 0.9 0.3 -5 -35 8 -33 2 -5 0.5 0.8 40 26 35 29 2 -5 0.2 0.3 14 -34 -3 -23 2 -5 0.9 0.4 12 -22 5 -29 1 -5 0.2 0.6 48 43 25 -40 1 -5 0.4 0.8 -26 -41 23 5 2 -5 0.6 0.8 37 2 -13 -40 2 -5 0.4 0.2 12 10 47 -11 1 -5 0.8 0.6 3 -19 16 -21 1 -5 0.8 0.5 -2 -46 -16 -37 2 -5 0.3 0.2 41 36 9 -28 1 -5 0.6 0.4 0 -50 -15 -22 1 -5 0.1 0.8 23 -27 -9 -22 1 -5 0.8 0.7 13 -48 44 -50 1 -5 0.9 0.8 2 1 32 29 2 -5 0.9 0.1 40 -39 -10 -30 1 -5 0.8 0.7 -15 -46 -25 -27 1 -5 0.6 0.7 3 -5 -34 -49 1 -5 0.2 0.8 47 45 -12 -30 1 -5 0.4 0.7 19 -7 -20 -36 2 -5 0.1 0.3 15 -32 3 -9 2 -5 0.7 0.6 -15 -35 14 -21 2 -5 0.2 0.4 -42 -43 -7 -40 1 -5 0.9 0.2 12 -6 33 -37 1 -5 0.8 0.4 49 -39 25 12 2 -5 0.7 0.1 43 -3 -29 -45 1 -5 0.9 0.5 25 -3 -6 -14 1 -5 0.7 0.3 39 -36 42 -33 1 -5 0.9 0.8 -6 -30 46 -21 2 -5 0.5 0.7 41 24 11 -18 1 -5 0.7 0.9 23 -19 30 19 2 -5 0.4 0.1 -3 -36 37 14 2 -5 0.5 0.1 9 -24 16 -19 1 -5 0.8 0.3 43 -44 28 -27 1 -5 0.9 0.6 16 -43 47 36 2 -5 0.9 0.9 43 -14 -2 -12 1 -5 0.6 0.5 -11 -14 39 -40 1 -5 0.1 0.8 5 -13 45 -48 2 -5 0.5 0.7 37 36 21 -44 1 -5 0.4 0.5 49 8 -13 -25 1 -5 0.1 0.1 17 7 29 -25 1 -5 0.7 0.1 39 -46 15 -8 1 -5 0.8 0.4 42 -23 8 -45 1 -5 0.1 0.8 -43 -44 18 -19 2 -5 0.1 0.5 42 25 13 -8 1 -5 0.7 0.5 42 -40 41 -32 1 -5 0.7 0.9 46 -43 40 19 1 -5 0.9 0.9 2 -21 44 -42 2 -5 0.3 0.1 25 -7 40 4 1 -5 0.1 0.4 28 -27 -27 -31 1 -5 0.1 0.1 18 -17 22 -15 1 -5 0.7 0.5 17 7 28 -7 2 -5 0.2 0.2 44 5 34 -3 2 -5 0.2 0.9 16 -6 7 -7 2 -5 0.3 0.8 46 44 31 18 1 -5 0.6 0.4 9 -43 -3 -6 2 -5 0.9 0.8 43 -41 18 -38 1 -5 0.1 0.2 47 -49 45 37 2 -5 0.8 0.3 9 -22 8 4 1 -5 0.9 0.2 12 -17 -13 -47 1 -5 0.5 0.2 -20 -24 7 -9 1 -5 0.5 0.4 31 -46 -1 -37 1 -5 0.1 0.8 49 14 43 -13 1 -5 0.7 0.4 38 -24 37 -31 1 -5 0.2 0.2 17 -50 -5 -23 1 -5 0.6 0.8 -12 -36 14 -22 2 -5 0.3 0.9 48 16 21 -33 2 -5 0.1 0.1 -6 -9 45 -50 2 -5 0.2 0.5 41 12 -22 -31 1 -5 0.6 0.2 43 -30 -14 -20 1 -5 0.3 0.7 27 -49 -3 -49 1 -5 0.3 0.9 34 -47 49 19 2 -5 0.6 0.2 21 -46 28 -32 2 -5 0.8 0.5 -12 -49 17 -21 2 -5 0.2 0.8 8 -44 20 -47 2 -5 0.1 0.4 30 -24 33 -28 2 -5 0.9 0.2 5 -30 10 0 1 -5 0.5 0.3 -17 -25 38 31 2 -5 0.4 0.6 -8 -37 29 14 2 -5 0.8 0.7 40 3 6 -32 1 -5 0.5 0.7 -5 -38 40 1 2 -5 0.4 0.4 -3 -5 3 -36 1 -5 0.7 0.8 13 -36 46 -24 2 -5 0.6 0.6 47 -10 8 -30 1 -5 0.6 0.3 49 -30 -1 -3 1 -5 0.4 0.8 8 -18 27 12 2 -5 0.5 0.4 -11 -23 -3 -32 2 -5 0.4 0.8 43 -44 19 -7 1 -5 0.1 0.5 43 -2 16 -15 1 -5 0.9 0.6 -3 -28 9 -44 2 -5 0.4 0.7 -24 -28 7 -27 2 -5 0.1 0.6 16 -9 -31 -46 2 -5 0.8 0.7 43 27 46 -27 1 -5 0.3 0.4 -10 -25 41 -9 2 -5 0.8 0.1 36 -1 23 -6 1 -5 0.8 0.1 25 24 8 -13 1 -5 0.2 0.7 -14 -36 -25 -50 1 -5 0.5 0.3 -9 -44 40 -6 2 -5 0.5 0.2 35 26 -8 -36 1 -5 0.4 0.7 -18 -34 4 -28 2 -5 0.4 0.3 34 -49 -9 -16 1 -5 0.4 0.6 23 -7 -10 -19 1 -5 0.9 0.7 31 12 47 -37 1 -5 0.9 0.8 4 -25 9 -7 1 -5 0.2 0.1 19 -44 47 4 2 -5 0.8 0.8 -4 -12 20 -22 2 -5 0.2 0.1 15 -47 7 -40 2 -5 0.8 0.7 17 9 31 -32 2 -5 0.5 0.2 25 0 -35 -37 1 -5 0.1 0.6 -3 -40 -9 -35 1 -5 0.3 0.6 42 25 36 -2 2 -5 0.2 0.7 -12 -13 44 -10 2 -5 0.3 0.4 34 -2 4 -5 1 -5 0.6 0.9 9 -43 -3 -25 1 -5 0.1 0.7 25 -14 38 -18 2 -5 0.7 0.9 -18 -32 25 -20 2 -5 0.5 0.9 32 -39 41 -19 2 -5 0.3 0.2 -26 -28 21 -5 2 -5 0.2 0.2 48 6 35 -6 1 -5 0.9 0.4 -13 -34 19 2 2 -5 0.6 0.7 26 -26 11 -23 1 -5 0.2 0.8 34 -19 4 -20 2 -5 0.6 0.6 -10 -42 6 -30 2 -5 0.3 0.9 -17 -34 -7 -10 2 -5 0.2 0.2 8 -30 27 23 2 -5 0.5 0.3 33 4 1 -36 1 -5 0.1 0.7 -14 -34 -4 -26 2 -5 0.1 0.5 -26 -27 35 -37 2 -5 0.1 0.7 46 -50 42 0 2 -5 0.3 0.4 23 -50 13 -3 2 -5 0.1 0.6 28 -14 39 37 2 -5 0.2 0.8 9 -11 15 5 2 -5 0.2 0.5 -29 -44 12 -10 2 -5 0.4 0.6 -31 -40 -5 -40 2 -5 0.2 0.5 44 -22 36 -29 2 -5 0.7 0.5 11 2 14 1 1 -5 0.9 0.8 18 -22 26 -12 2 -5 0.8 0.7 -11 -35 14 -11 2 -5 0.6 0.4 2 -46 36 27 1 -5 0.5 0.3 -42 -50 24 13 2 -5 0.1 0.6 48 -36 31 -27 2 -5 0.4 0.4 49 -11 -35 -49 1 -5 0.4 0.2 45 25 44 42 2 -5 0.3 0.5 -7 -36 12 3 2 -5 0.6 0.6 -4 -23 45 38 2 -5 0.5 0.5 33 -46 -15 -21 1 -5 0.6 0.6 -1 -20 -4 -30 2 -5 0.4 0.7 41 -14 24 -23 2 -5 0.9 0.7 -30 -48 29 18 2 -5 0.7 0.6 -12 -22 13 -35 2 -5 0.7 0.7 -10 -31 48 -10 2 -5 0.6 0.3 37 20 -39 -47 1 -5 0.1 0.4 -6 -14 16 -32 2 -5 0.5 0.9 45 15 11 -13 1 -5 0.2 0.3 -20 -32 44 -19 2 -5 0.4 0.6 49 -44 17 -25 2 -5 0.6 0.2 27 -46 -9 -32 1 -5 0.1 0.5 -2 -19 26 -12 2 -5 0.4 0.9 35 3 50 -36 2 -5 0.7 0.7 -22 -33 4 -28 2 -5 0.3 0.1 9 -43 49 -38 1 -5 0.7 0.1 3 -45 49 -11 1 -5 0.6 0.2 48 32 35 -39 1 -5 0.8 0.3 -18 -49 28 24 2 -5 0.1 0.6 40 -35 -11 -22 2 -5 0.3 0.4 3 -46 20 13 2 -5 0.4 0.8 -11 -36 -25 -26 2 -5 0.9 0.1 34 -28 30 0 1 -5 0.2 0.4 41 -3 39 -20 2 -5 0.4 0.4 -35 -49 -9 -31 1 -5 0.2 0.8 27 20 23 6 2 -5 0.8 0.3 -1 -44 -43 -49 1 -5 0.5 0.1 42 24 44 -26 1 -5 0.4 0.9 50 20 49 25 1 -5 0.6 0.6 47 38 -9 -24 1 -5 0.5 0.4 23 -33 35 -37 2 -5 0.5 0.8 23 -15 -1 -48 1 -5 0.1 0.8 -14 -47 17 -39 2 -5 0.4 0.4 33 -12 20 -23 1 -5 0.6 0.5 49 10 47 43 1 -5 0.4 0.8 23 -2 21 -35 1 -5 0.3 0.8 -20 -36 16 -23 2 -5 0.6 0.7 37 34 47 -26 1 -5 0.2 0.4 14 -2 42 14 2 -5 0.6 0.6 10 -42 -28 -48 1 -5 0.2 0.7 43 -27 -1 -38 1 -5 0.6 0.3 40 -41 -3 -47 1 -5 0.1 0.7 -7 -35 41 39 2 -5 0.5 0.1 13 -7 -22 -40 1 -5 0.8 0.1 20 -19 0 -17 1 -5 0.6 0.4 34 22 22 -6 1 -5 0.3 0.9 46 -20 23 3 2 -5 0.4 0.3 20 -12 11 10 1 -5 0.5 0.1 35 28 37 28 1 -5 0.3 0.8 16 -30 14 -4 2 -5 0.9 0.9 13 -43 -6 -28 1 -5 0.4 0.8 0 -17 -5 -49 2 -5 0.5 0.3 -21 -47 32 -5 2 -5 0.1 0.6 47 25 3 -48 1 -5 0.6 0.3 7 -42 -5 -47 2 -5 0.2 0.9 44 34 -22 -33 1 -5 0.3 0.5 48 -19 17 -23 1 -5 0.6 0.9 -12 -24 45 40 2 -5 0.1 0.1 46 21 33 -22 1 -5 0.2 0.8 33 -16 -15 -28 1 -5 0.5 0.1 20 -19 36 11 2 -5 0.5 0.8 6 -5 10 -2 2 -5 0.6 0.1 6 -3 44 11 2 -5 0.8 0.8 29 -21 2 -25 1 -5 0.4 0.6 48 -29 18 -18 1 -5 0.7 0.5 46 -12 16 -25 1 -5 0.9 0.1 42 18 -34 -48 1 -5 0.9 0.7 -36 -39 19 -2 2 -5 0.8 0.6 41 10 30 -50 1 -5 0.3 0.1 49 -20 47 26 2 -6 0.8 0.6 43 -30 11 3 1 -6 0.2 0.6 8 2 29 7 2 -6 0.8 0.4 -18 -50 21 8 2 -6 0.7 0.1 -12 -24 34 -39 1 -6 0.9 0.4 49 -9 1 -38 1 -6 0.9 0.7 28 -12 33 -41 1 -6 0.5 0.1 30 22 40 28 2 -6 0.5 0.6 -26 -28 1 -1 2 -6 0.5 0.7 15 7 23 -25 2 -6 0.1 0.8 43 18 -13 -22 1 -6 0.7 0.8 -2 -35 28 1 2 -6 0.4 0.6 21 -20 36 11 2 -6 0.7 0.9 -39 -40 -11 -43 2 -6 0.5 0.3 1 -41 45 16 2 -6 0.6 0.7 8 -34 30 -43 2 -6 0.6 0.2 8 -43 18 -50 1 -6 0.9 0.2 21 -39 27 -37 1 -6 0.8 0.1 -18 -41 46 16 2 -6 0.1 0.2 3 -17 15 -30 1 -6 0.3 0.8 7 -39 4 -38 2 -6 0.9 0.5 41 -11 -41 -43 1 -6 0.3 0.8 -2 -50 1 -4 2 -6 0.2 0.3 48 0 34 9 2 -6 0.8 0.1 -29 -41 47 10 2 -6 0.1 0.9 21 -9 -23 -35 1 -6 0.4 0.9 -36 -39 25 -17 2 -6 0.1 0.6 50 13 -1 -20 1 -6 0.3 0.6 35 16 50 8 2 -6 0.8 0.3 48 -40 43 -39 1 -6 0.4 0.7 -6 -16 45 22 2 -6 0.7 0.9 28 1 33 -16 2 -6 0.4 0.6 30 -22 33 20 2 -6 0.6 0.1 47 -14 -2 -47 1 -6 0.1 0.5 16 -16 46 5 2 -6 0.4 0.2 43 27 -9 -27 1 -6 0.4 0.2 -1 -17 6 -35 1 -6 0.3 0.3 19 9 19 -41 1 -6 0.6 0.7 50 -23 -9 -22 1 -6 0.8 0.4 -16 -46 -34 -46 1 -6 0.7 0.3 35 23 49 -1 1 -6 0.3 0.2 27 13 5 -29 1 -6 0.6 0.7 44 2 15 -18 1 -6 0.5 0.5 31 10 -10 -38 1 -6 0.2 0.4 45 1 16 -11 1 -6 0.2 0.6 -40 -43 -4 -36 2 -6 0.3 0.2 20 -20 20 -46 1 -6 0.6 0.4 24 17 44 -9 1 -6 0.3 0.6 35 0 45 -24 2 -6 0.6 0.2 15 -3 23 -26 1 -6 0.8 0.8 31 10 34 32 2 -6 0.4 0.5 37 -40 8 -36 2 -6 0.3 0.5 13 -42 28 -6 2 -6 0.3 0.1 9 4 26 -38 1 -6 0.4 0.6 35 -50 42 35 2 -6 0.3 0.7 1 -44 36 30 2 -6 0.5 0.3 12 -18 -14 -46 1 -6 0.4 0.2 32 -21 44 8 2 -6 0.8 0.8 6 -22 44 15 2 -6 0.7 0.6 -8 -30 -8 -25 1 -6 0.5 0.8 18 15 -6 -7 1 -6 0.3 0.4 -31 -42 39 20 2 -6 0.1 0.3 33 6 22 -40 1 -6 0.2 0.6 26 -7 31 -42 2 -6 0.1 0.5 22 -10 41 -5 2 -6 0.4 0.2 6 -6 33 27 2 -6 0.3 0.8 29 -4 48 -37 2 -6 0.5 0.4 -22 -29 8 -36 2 -6 0.7 0.7 1 -13 10 -11 2 -6 0.1 0.2 -11 -25 15 -28 2 -6 0.4 0.1 27 7 11 1 1 -6 0.7 0.5 -11 -35 0 -7 2 -6 0.9 0.2 50 32 -6 -25 1 -6 0.2 0.2 -11 -23 34 -27 2 -6 0.7 0.6 30 -15 44 -37 1 -6 0.4 0.5 31 22 13 0 1 -6 0.6 0.6 44 -3 38 36 2 -6 0.5 0.5 8 2 38 -7 2 -6 0.3 0.2 -11 -30 18 -21 2 -6 0.7 0.4 -37 -38 -21 -48 1 -6 0.5 0.8 24 -33 38 32 2 -6 0.7 0.5 33 6 31 12 1 -6 0.9 0.8 39 -16 40 -35 1 -6 0.2 0.7 35 6 -35 -46 1 -6 0.8 0.3 18 -45 -16 -17 1 -6 0.8 0.9 41 -45 45 3 2 -6 0.4 0.2 38 28 21 -10 1 -6 0.5 0.6 -17 -43 6 -8 2 -6 0.7 0.3 43 -12 48 19 2 -6 0.2 0.9 39 15 37 -26 2 -6 0.5 0.9 17 -20 -11 -48 1 -6 0.3 0.6 -6 -13 36 -21 2 -6 0.6 0.2 12 -49 14 -4 1 -6 0.8 0.6 30 20 18 -47 1 -6 0.1 0.2 7 -10 29 -46 1 -6 0.8 0.2 4 -16 -7 -9 1 -6 0.6 0.7 22 -38 -8 -19 1 -6 0.1 0.8 18 -13 -26 -45 1 -6 0.3 0.3 -34 -38 22 -38 2 -6 0.1 0.9 32 -24 -25 -34 1 -6 0.3 0.5 16 -23 20 -11 2 -6 0.6 0.6 38 -36 -13 -24 1 -6 0.9 0.5 -6 -20 -27 -42 1 -6 0.4 0.5 1 -24 34 -18 2 -6 0.8 0.4 44 29 -3 -37 1 -6 0.2 0.8 49 -33 50 42 2 -6 0.4 0.9 2 -25 36 -8 2 -6 0.3 0.8 35 -40 22 -44 2 -6 0.8 0.1 27 -50 -34 -40 1 -6 0.3 0.4 45 -49 21 10 2 -6 0.5 0.6 20 -7 -34 -45 1 -6 0.3 0.9 -5 -21 35 33 2 -6 0.6 0.6 41 -11 -20 -35 1 -6 0.3 0.5 34 -43 -8 -16 1 -6 0.6 0.2 28 -36 33 -4 1 -6 0.9 0.4 49 -21 30 -11 1 -6 0.3 0.9 9 -23 28 -18 2 -6 0.3 0.9 31 20 4 -22 1 -6 0.3 0.1 31 10 -35 -44 1 -6 0.2 0.8 10 1 27 -2 2 -6 0.7 0.4 29 -29 5 -33 1 -6 0.6 0.9 4 -47 40 26 2 -6 0.1 0.9 8 -26 -22 -30 1 -6 0.2 0.3 26 -29 26 24 2 -6 0.2 0.4 41 -20 -18 -30 2 -6 0.7 0.1 6 -7 6 -3 1 -6 0.7 0.1 50 17 33 31 1 -6 0.5 0.9 15 -7 22 -17 2 -6 0.2 0.2 48 30 4 -13 1 -6 0.6 0.7 37 -39 32 -39 1 -6 0.6 0.7 28 -26 49 47 2 -6 0.7 0.5 46 -11 18 -38 1 -6 0.9 0.4 45 -7 47 42 2 -6 0.2 0.6 25 21 -21 -33 1 -6 0.1 0.6 37 -44 -13 -26 2 -6 0.5 0.5 41 33 27 -20 1 -6 0.7 0.9 14 2 35 28 1 -6 0.2 0.2 2 -26 23 2 2 -6 0.6 0.1 32 10 -12 -46 1 -6 0.1 0.1 -29 -43 39 4 2 -6 0.7 0.5 42 20 -37 -39 1 -6 0.2 0.3 17 4 22 3 1 -6 0.4 0.6 -12 -30 22 11 2 -6 0.8 0.9 41 24 35 -17 1 -6 0.6 0.7 -5 -38 36 24 2 -6 0.6 0.8 -37 -38 34 15 2 -6 0.6 0.8 6 -8 -7 -15 1 -6 0.8 0.5 39 33 45 -33 1 -6 0.8 0.7 22 -6 4 -14 1 -6 0.7 0.9 -19 -22 -33 -41 1 -6 0.6 0.6 39 -25 -18 -27 1 -6 0.7 0.3 -11 -24 -5 -33 1 -6 0.6 0.1 45 25 28 22 1 -6 0.3 0.9 28 -2 -2 -37 1 -6 0.9 0.8 10 -44 20 9 1 -6 0.2 0.9 50 -16 13 -21 2 -6 0.8 0.4 -14 -33 -42 -44 2 -6 0.8 0.2 31 -10 0 -7 1 -6 0.9 0.9 11 -38 36 -39 2 -6 0.3 0.5 15 -4 -16 -20 1 -6 0.3 0.5 16 -43 34 -43 2 -6 0.9 0.6 -30 -41 -31 -36 1 -6 0.1 0.2 -18 -49 18 5 2 -6 0.1 0.1 25 -19 35 -49 1 -6 0.2 0.1 14 4 -7 -30 1 -6 0.5 0.9 42 21 29 -19 1 -6 0.2 0.8 17 1 -1 -16 1 -6 0.7 0.3 -17 -31 44 -37 2 -6 0.9 0.4 0 -10 20 13 2 -6 0.4 0.9 16 -4 22 -44 2 -6 0.1 0.9 25 -40 -17 -33 2 -6 0.3 0.5 -7 -48 21 -24 2 -6 0.7 0.1 -16 -31 34 7 2 -6 0.9 0.2 22 -12 28 24 1 -6 0.1 0.5 28 3 27 -50 2 -6 0.9 0.7 -13 -38 -17 -18 2 -6 0.9 0.3 3 -41 37 -1 2 -6 0.6 0.2 -16 -36 -2 -37 2 -6 0.9 0.5 30 26 41 -40 1 -6 0.3 0.7 24 -18 -3 -24 1 -6 0.1 0.2 -16 -41 39 17 2 -6 0.7 0.7 40 36 -37 -46 1 -6 0.2 0.4 27 -46 41 8 2 -6 0.7 0.3 27 2 1 -16 1 -6 0.1 0.9 49 -36 10 -21 2 -6 0.5 0.2 22 11 30 -8 1 -6 0.8 0.8 0 -7 24 -3 2 -6 0.5 0.5 -7 -11 36 6 2 -6 0.4 0.4 39 14 -27 -44 1 -6 0.3 0.2 36 -14 21 6 2 -6 0.7 0.8 4 -25 46 17 2 -6 0.1 0.8 20 -10 25 -26 2 -6 0.6 0.6 12 0 11 -46 1 -6 0.1 0.5 43 23 32 -28 1 -6 0.6 0.9 -3 -18 47 38 2 -6 0.9 0.8 -18 -36 -14 -42 2 -6 0.8 0.4 10 -3 -9 -36 1 -6 0.2 0.8 3 -31 29 14 2 -6 0.6 0.1 11 -23 -25 -34 1 -6 0.7 0.5 39 -9 46 19 2 -6 0.8 0.6 44 -50 13 6 1 -6 0.2 0.9 27 -46 -8 -48 1 -6 0.2 0.7 50 -32 -28 -43 1 -6 0.8 0.5 32 -41 -1 -18 1 -6 0.6 0.5 -27 -33 38 -38 2 -6 0.8 0.8 31 -23 -35 -37 1 -6 0.5 0.9 -37 -38 10 -24 2 -6 0.3 0.6 11 10 33 -20 1 -6 0.3 0.5 -21 -22 11 -17 2 -6 0.7 0.7 29 12 46 -34 1 -6 0.7 0.5 17 -30 26 -3 1 -6 0.5 0.2 8 -15 1 -4 1 -6 0.5 0.7 35 20 36 25 2 -6 0.1 0.4 -25 -44 -8 -28 2 -6 0.8 0.9 46 -17 -6 -12 1 -6 0.3 0.4 -5 -40 -24 -36 1 -6 0.2 0.7 27 13 50 -50 2 -6 0.9 0.8 49 39 48 -2 2 -6 0.6 0.2 -15 -41 -7 -9 1 -6 0.5 0.2 49 22 38 -50 1 -6 0.3 0.7 35 17 48 12 2 -6 0.3 0.9 15 -44 35 -11 2 -6 0.1 0.9 27 7 46 25 2 -6 0.8 0.4 38 -46 15 -32 1 -6 0.6 0.7 27 -10 44 21 2 -6 0.2 0.7 10 -49 41 -10 2 -6 0.7 0.3 29 19 28 -28 1 -6 0.3 0.6 49 -6 26 -14 2 -6 0.7 0.4 20 -29 -5 -17 1 -6 0.8 0.9 20 -15 5 -1 1 -6 0.9 0.6 -19 -45 32 -6 2 -6 0.5 0.6 2 -29 -15 -48 1 -6 0.1 0.1 45 -40 7 -4 2 -6 0.3 0.1 -8 -39 30 -38 2 -6 0.8 0.8 37 1 -28 -32 1 -6 0.7 0.7 0 -26 35 -14 2 -6 0.7 0.1 -6 -19 10 -45 1 -6 0.5 0.7 42 -44 11 -17 2 -6 0.2 0.7 40 -33 8 -18 2 -6 0.8 0.2 25 -23 48 43 2 -6 0.7 0.2 10 1 20 5 1 -6 0.4 0.5 22 -39 -8 -19 2 -6 0.2 0.5 42 -45 17 -17 2 -6 0.9 0.5 -20 -39 48 -34 2 -6 0.8 0.7 -25 -26 11 -26 2 -6 0.5 0.3 20 -15 36 -16 1 -6 0.8 0.1 -18 -33 15 -1 2 -6 0.3 0.4 -3 -21 30 -8 2 -6 0.9 0.8 -4 -25 49 -24 2 -6 0.1 0.8 47 -13 33 -45 2 -6 0.6 0.6 -34 -37 2 -6 2 -6 0.8 0.3 0 -5 29 -1 2 -6 0.3 0.7 30 -25 45 -8 2 -6 0.7 0.1 14 -24 1 -3 1 -6 0.1 0.7 15 -10 23 -11 2 -6 0.1 0.7 -6 -48 -9 -30 2 -6 0.9 0.5 38 -8 -25 -39 1 -6 0.7 0.9 -32 -39 2 0 2 -6 0.2 0.9 -1 -45 14 5 2 -6 0.7 0.7 -3 -36 41 -8 2 -6 0.3 0.9 -2 -26 47 -11 2 -6 0.8 0.6 47 8 45 12 2 -6 0.9 0.1 -39 -44 43 37 2 -6 0.3 0.7 -3 -28 -18 -33 1 -6 0.3 0.5 23 13 14 -18 1 -6 0.2 0.4 11 6 35 -21 2 -6 0.3 0.2 50 19 -17 -47 1 -6 0.1 0.3 49 37 16 8 1 -6 0.5 0.1 -7 -23 -7 -34 1 -6 0.3 0.6 25 -11 50 35 2 -6 0.2 0.3 44 41 37 -23 1 -6 0.8 0.6 39 -18 2 -45 1 -6 0.3 0.5 18 -10 40 -15 2 -6 0.6 0.8 23 -37 2 1 1 -6 0.5 0.2 17 -50 1 -22 1 -6 0.9 0.2 20 8 -1 -12 1 -6 0.8 0.8 40 -32 44 1 2 -6 0.7 0.4 36 -50 -17 -22 1 -6 0.8 0.8 -27 -41 30 29 2 -6 0.5 0.9 19 11 -3 -34 1 -6 0.6 0.3 2 -38 -11 -20 1 -6 0.8 0.5 -3 -30 23 -6 2 -6 0.7 0.9 -4 -19 33 -8 2 -6 0.3 0.6 4 2 26 -37 1 -6 0.3 0.2 -33 -36 3 -13 2 -6 0.9 0.3 -31 -32 -15 -21 2 -6 0.9 0.8 35 -29 27 -42 1 -6 0.2 0.7 44 34 -3 -26 1 -6 0.3 0.4 -27 -39 -33 -50 1 -6 0.1 0.6 20 13 14 9 1 -6 0.8 0.4 40 -19 -1 -10 1 -6 0.3 0.8 44 5 -6 -37 1 -6 0.1 0.9 42 37 33 -46 2 -6 0.2 0.9 7 -25 -15 -24 2 -6 0.9 0.1 39 -46 -3 -4 1 -6 0.5 0.9 -21 -24 -27 -36 1 -6 0.7 0.3 37 -22 -12 -31 1 -6 0.9 0.9 4 -50 43 -13 2 -6 0.2 0.7 40 1 50 23 2 -6 0.7 0.9 6 -11 -5 -47 1 -6 0.1 0.4 -26 -28 48 -35 2 -7 0.5 0.3 37 7 -2 -6 1 -7 0.7 0.6 -10 -35 16 -38 2 -7 0.5 0.4 40 14 48 -7 1 -7 0.6 0.9 20 -27 22 -1 2 -7 0.3 0.5 46 -2 -5 -30 1 -7 0.8 0.3 -1 -6 17 -18 2 -7 0.5 0.5 39 23 -19 -47 1 -7 0.3 0.2 40 -16 -17 -40 1 -7 0.5 0.1 6 -24 50 -3 2 -7 0.7 0.1 35 6 -5 -42 1 -7 0.3 0.3 41 19 49 -44 1 -7 0.4 0.7 -16 -48 24 -24 2 -7 0.5 0.5 18 -9 37 15 2 -7 0.7 0.7 42 21 25 -50 1 -7 0.8 0.9 16 -35 -3 -15 1 -7 0.2 0.7 45 41 32 15 1 -7 0.3 0.4 39 -4 45 38 2 -7 0.8 0.4 -19 -34 15 -39 2 -7 0.8 0.7 27 -21 -15 -30 1 -7 0.6 0.6 38 -32 47 -49 2 -7 0.5 0.1 6 -38 7 -43 1 -7 0.8 0.9 36 18 44 24 2 -7 0.7 0.3 36 -16 37 -29 1 -7 0.6 0.9 33 27 45 -32 1 -7 0.1 0.4 -19 -41 13 -13 2 -7 0.9 0.3 8 -25 32 -20 1 -7 0.9 0.2 8 -39 44 -38 1 -7 0.4 0.3 20 -46 10 -37 2 -7 0.9 0.2 39 24 17 0 1 -7 0.7 0.1 39 -22 3 -40 1 -7 0.7 0.5 -22 -25 49 21 2 -7 0.8 0.9 -2 -41 -1 -26 2 -7 0.3 0.9 40 -25 17 -28 2 -7 0.9 0.4 43 23 21 -31 1 -7 0.3 0.2 -8 -34 2 -44 2 -7 0.1 0.6 27 -48 39 21 2 -7 0.8 0.9 6 -7 -42 -46 1 -7 0.8 0.8 -10 -35 45 14 2 -7 0.2 0.7 27 11 36 -42 2 -7 0.1 0.8 14 -19 -2 -20 2 -7 0.9 0.3 33 27 19 7 1 -7 0.2 0.8 15 -3 38 -7 2 -7 0.3 0.5 48 4 23 -5 1 -7 0.7 0.8 13 10 10 -23 1 -7 0.4 0.9 33 0 32 14 2 -7 0.2 0.6 43 -26 50 -7 2 -7 0.9 0.2 43 35 -24 -44 1 -7 0.8 0.4 30 21 20 -12 1 -7 0.5 0.2 6 2 11 -3 1 -7 0.3 0.8 2 -17 19 -50 2 -7 0.5 0.3 -28 -43 -17 -35 2 -7 0.9 0.7 36 13 27 10 1 -7 0.4 0.8 44 -4 27 -42 2 -7 0.9 0.3 2 -9 -23 -27 1 -7 0.9 0.5 18 -2 38 -50 1 -7 0.1 0.2 48 47 16 -18 1 -7 0.5 0.4 6 -2 26 15 2 -7 0.4 0.1 -16 -48 -3 -23 2 -7 0.1 0.9 12 -41 17 -25 2 -7 0.4 0.1 -1 -15 33 1 2 -7 0.2 0.1 -14 -39 34 -30 2 -7 0.3 0.5 -3 -11 29 -41 2 -7 0.9 0.3 41 17 -11 -13 1 -7 0.3 0.6 21 15 49 -44 1 -7 0.3 0.7 46 -1 42 39 2 -7 0.3 0.1 -11 -37 13 -49 2 -7 0.4 0.6 33 -9 41 -50 2 -7 0.5 0.1 16 -10 40 -43 1 -7 0.7 0.6 20 10 -6 -47 1 -7 0.7 0.6 21 -18 42 40 2 -7 0.8 0.8 -12 -28 -12 -43 1 -7 0.7 0.5 28 -48 0 -41 1 -7 0.3 0.1 36 -43 29 -19 2 -7 0.9 0.4 31 -15 12 -45 1 -7 0.8 0.3 -8 -25 48 47 2 -7 0.4 0.5 32 -27 41 23 2 -7 0.8 0.9 45 -36 20 8 1 -7 0.6 0.3 39 30 44 16 1 -7 0.7 0.6 4 -39 33 -48 2 -7 0.9 0.3 50 28 38 -27 1 -7 0.9 0.2 12 -25 -1 -26 1 -7 0.1 0.2 28 -6 4 -2 2 -7 0.7 0.5 29 -37 40 14 2 -7 0.6 0.9 16 -43 10 -3 2 -7 0.2 0.8 21 -24 31 -24 2 -7 0.3 0.9 50 -9 -3 -24 1 -7 0.1 0.2 4 -12 44 5 2 -7 0.9 0.8 47 -42 10 -16 1 -7 0.1 0.7 29 -24 -27 -43 1 -7 0.8 0.6 45 37 37 -2 1 -7 0.8 0.5 37 -9 49 -36 1 -7 0.2 0.6 10 -8 -45 -47 1 -7 0.9 0.4 37 36 12 -49 1 -7 0.9 0.3 17 -14 -14 -16 1 -7 0.8 0.1 33 30 -24 -46 1 -7 0.7 0.5 32 -34 15 -16 1 -7 0.8 0.4 30 19 43 -18 1 -7 0.8 0.3 38 -15 -7 -11 1 -7 0.1 0.4 35 -39 -17 -25 2 -7 0.8 0.5 34 25 -10 -11 1 -7 0.4 0.6 -1 -26 34 -34 2 -7 0.7 0.7 30 -21 4 -11 1 -7 0.5 0.1 13 -33 3 -45 1 -7 0.6 0.2 34 -23 35 -24 1 -7 0.2 0.3 24 -14 -6 -47 1 -7 0.5 0.2 28 -34 50 -19 1 -7 0.7 0.7 35 26 -7 -16 1 -7 0.5 0.5 34 13 17 -10 1 -7 0.1 0.3 -30 -49 10 -44 2 -7 0.9 0.7 23 1 44 22 2 -7 0.6 0.9 50 6 -29 -36 1 -7 0.1 0.9 24 -5 32 5 2 -7 0.4 0.3 31 3 1 -50 1 -7 0.4 0.3 0 -37 -16 -32 1 -7 0.5 0.8 34 4 31 -31 2 -7 0.2 0.1 50 -45 50 -49 1 -7 0.2 0.1 22 14 33 -36 1 -7 0.2 0.8 11 -26 -3 -25 2 -7 0.8 0.8 23 -1 32 5 2 -7 0.7 0.4 -24 -47 28 -32 2 -7 0.9 0.4 -16 -18 -1 -33 2 -7 0.6 0.2 -14 -42 36 -3 2 -7 0.7 0.4 49 19 -12 -37 1 -7 0.7 0.6 -3 -22 24 5 2 -7 0.8 0.6 15 -16 22 -37 1 -7 0.4 0.3 40 22 32 26 2 -7 0.4 0.8 20 -24 -33 -37 1 -7 0.5 0.5 48 -35 -17 -50 1 -7 0.1 0.5 8 0 44 23 2 -7 0.8 0.8 14 -33 48 -30 2 -7 0.4 0.7 31 -35 -1 -50 1 -7 0.8 0.9 -8 -49 -14 -24 1 -7 0.9 0.5 42 -11 -22 -41 1 -7 0.7 0.6 29 -34 25 12 2 -7 0.2 0.4 35 -24 37 15 2 -7 0.6 0.3 48 12 21 18 1 -7 0.2 0.9 -22 -34 12 -11 2 -7 0.3 0.8 48 -10 7 -23 2 -7 0.4 0.2 2 -19 38 -8 2 -7 0.7 0.9 24 -8 41 -26 2 -7 0.7 0.3 -3 -12 -3 -5 2 -7 0.6 0.2 40 9 40 -38 1 -7 0.7 0.2 3 -15 46 12 2 -7 0.4 0.8 4 -27 46 -40 2 -7 0.8 0.4 24 -7 -24 -29 1 -7 0.8 0.3 32 -1 41 -29 1 -7 0.9 0.4 -3 -34 35 3 2 -7 0.9 0.3 40 -27 29 -11 1 -7 0.2 0.9 -3 -35 -13 -49 2 -7 0.2 0.8 37 28 25 -13 1 -7 0.1 0.2 -15 -36 50 25 2 -7 0.3 0.5 -7 -30 4 -25 2 -7 0.6 0.7 21 10 13 8 1 -7 0.3 0.4 2 -22 41 -31 2 -7 0.5 0.1 0 -47 29 -45 1 -7 0.3 0.5 23 6 14 -25 1 -7 0.2 0.3 -24 -27 6 -5 2 -7 0.2 0.6 14 -1 43 -8 2 -7 0.6 0.2 -40 -41 30 17 2 -7 0.7 0.7 7 -50 9 -34 1 -7 0.9 0.4 37 -41 15 -1 1 -7 0.9 0.8 26 19 22 -45 1 -7 0.1 0.2 10 -31 30 13 2 -7 0.1 0.6 -31 -48 1 -36 2 -7 0.3 0.4 32 1 -7 -8 1 -7 0.3 0.1 -6 -27 24 10 2 -7 0.7 0.5 5 2 48 -15 2 -7 0.6 0.6 13 -23 14 -9 2 -7 0.2 0.9 13 -41 31 2 2 -7 0.2 0.5 34 22 10 -49 1 -7 0.2 0.8 29 1 3 -35 1 -7 0.4 0.6 26 -4 49 -23 2 -7 0.3 0.3 35 -40 -33 -34 1 -7 0.7 0.5 38 -41 3 -7 1 -7 0.7 0.1 22 -49 44 30 2 -7 0.3 0.4 33 -47 31 30 2 -7 0.3 0.2 28 -36 10 -44 1 -7 0.9 0.1 -23 -29 7 -26 1 -7 0.1 0.7 -20 -32 19 17 2 -7 0.3 0.7 8 -41 4 -11 2 -7 0.7 0.2 42 -50 40 5 2 -7 0.8 0.5 -7 -38 10 3 2 -7 0.9 0.8 39 -29 8 -29 1 -7 0.1 0.3 6 -37 20 -3 2 -7 0.7 0.7 21 -22 36 -42 2 -7 0.8 0.2 42 -16 29 16 1 -7 0.5 0.6 36 -43 35 6 2 -7 0.6 0.1 14 2 50 18 2 -7 0.1 0.2 45 -22 38 -49 2 -7 0.4 0.8 -23 -37 7 -32 2 -7 0.5 0.7 12 -25 20 -32 2 -7 0.4 0.7 10 -5 28 10 2 -7 0.3 0.9 22 19 32 23 2 -7 0.7 0.9 -3 -34 50 38 2 -7 0.7 0.1 -16 -47 19 11 2 -7 0.5 0.4 -21 -31 -12 -50 1 -7 0.4 0.1 39 30 37 -15 1 -7 0.6 0.3 -14 -31 30 -17 2 -7 0.9 0.4 25 14 44 -16 1 -7 0.9 0.6 -4 -17 21 -28 2 -7 0.5 0.9 1 -20 14 -3 2 -7 0.6 0.7 46 -4 -21 -27 1 -7 0.5 0.9 48 24 0 -37 1 -7 0.2 0.1 36 35 37 -44 1 -7 0.7 0.5 32 22 17 7 1 -7 0.9 0.8 38 -49 42 36 2 -7 0.9 0.9 -8 -35 20 -33 2 -7 0.2 0.9 23 -33 47 -12 2 -7 0.5 0.7 41 30 -1 -36 1 -7 0.1 0.7 48 -24 29 9 2 -7 0.1 0.1 -15 -21 10 -47 1 -7 0.3 0.5 38 35 -17 -32 1 -7 0.6 0.6 13 -13 19 -49 1 -7 0.5 0.7 7 -15 24 -20 2 -7 0.5 0.1 15 -39 22 21 2 -7 0.4 0.3 49 -3 41 -5 1 -7 0.3 0.4 23 -27 -2 -33 1 -7 0.5 0.8 15 -16 33 -34 2 -7 0.9 0.9 -21 -26 13 -24 2 -7 0.6 0.4 48 16 -6 -21 1 -7 0.3 0.1 45 -25 -16 -44 1 -7 0.5 0.9 -15 -43 29 -2 2 -7 0.7 0.5 42 -45 -12 -34 1 -7 0.8 0.5 39 6 32 24 2 -7 0.7 0.3 -18 -49 -20 -32 2 -7 0.5 0.6 -21 -32 10 7 2 -7 0.3 0.4 8 -26 -30 -44 1 -7 0.8 0.3 -6 -25 32 -14 2 -7 0.3 0.9 0 -27 13 -22 2 -7 0.5 0.1 40 39 41 -33 1 -7 0.6 0.8 34 0 43 -49 2 -7 0.7 0.5 -11 -19 9 -36 2 -7 0.8 0.2 46 -45 27 20 1 -7 0.2 0.2 14 -19 29 20 2 -7 0.2 0.9 4 -35 -1 -18 2 -7 0.5 0.1 -45 -49 23 -44 1 -7 0.6 0.4 -36 -40 39 10 2 -7 0.4 0.8 18 -10 -25 -47 1 -7 0.8 0.9 45 2 30 13 1 -7 0.7 0.5 -28 -44 -31 -45 2 -7 0.3 0.9 30 10 -25 -27 1 -7 0.3 0.5 33 -46 18 9 2 -7 0.2 0.7 35 3 49 -40 2 -7 0.1 0.4 28 -11 26 -49 1 -7 0.3 0.2 -32 -33 35 24 2 -7 0.5 0.3 25 -21 45 18 2 -7 0.5 0.2 15 -6 0 -25 1 -7 0.3 0.1 35 -36 -4 -14 2 -7 0.8 0.6 42 4 -6 -36 1 -7 0.6 0.8 29 -21 36 -15 2 -7 0.4 0.8 49 21 30 20 1 -7 0.8 0.7 8 -23 15 -43 2 -7 0.7 0.6 33 14 20 9 1 -7 0.5 0.5 14 -18 27 23 2 -7 0.9 0.4 -25 -49 15 -16 2 -7 0.7 0.5 19 3 35 -30 1 -7 0.2 0.8 48 0 47 -40 2 -7 0.1 0.1 42 -47 -14 -43 2 -7 0.3 0.4 17 -15 45 -45 2 -7 0.4 0.4 36 -33 7 -19 1 -7 0.2 0.2 29 -18 -5 -10 2 -7 0.2 0.7 21 -39 19 3 2 -7 0.3 0.7 25 1 -8 -23 1 -7 0.3 0.1 5 -10 11 1 2 -7 0.3 0.3 17 6 41 -6 2 -7 0.8 0.5 30 -23 36 31 2 -7 0.9 0.8 -22 -31 31 -23 2 -7 0.1 0.4 0 -43 39 14 2 -7 0.1 0.5 21 -13 1 -19 2 -7 0.2 0.6 23 -35 -17 -42 2 -7 0.3 0.8 35 22 -12 -13 1 -7 0.1 0.1 25 -28 37 -29 2 -7 0.1 0.4 12 -8 48 -45 1 -7 0.5 0.7 -8 -35 44 -29 2 -7 0.7 0.1 46 -45 6 -46 1 -7 0.9 0.6 17 -42 11 -13 1 -7 0.4 0.4 26 -31 49 14 2 -7 0.9 0.2 -23 -38 18 -43 2 -7 0.2 0.2 21 -16 -2 -3 1 -7 0.8 0.6 2 -25 45 17 2 -7 0.6 0.9 33 -45 -6 -10 1 -7 0.9 0.5 44 21 20 -34 1 -7 0.7 0.1 3 2 -30 -37 1 -7 0.5 0.2 29 -25 44 -16 1 -7 0.7 0.2 13 1 34 33 2 -7 0.4 0.1 3 -31 32 -8 2 -7 0.1 0.7 3 -28 31 4 2 -7 0.2 0.9 45 25 -7 -10 1 -7 0.9 0.5 45 5 38 -32 1 -7 0.1 0.5 -30 -41 -26 -42 1 -7 0.1 0.2 35 -33 1 -37 2 -7 0.6 0.6 27 -11 50 35 2 -7 0.8 0.8 15 -6 31 -13 2 -7 0.5 0.2 38 32 30 -17 1 -7 0.9 0.7 29 -35 49 20 2 -7 0.4 0.2 23 -47 -20 -37 1 -7 0.7 0.2 40 10 10 0 1 -7 0.8 0.3 -13 -48 47 35 2 -7 0.5 0.1 -4 -31 29 -19 1 -7 0.3 0.4 12 -44 28 1 2 -8 0.3 0.4 49 10 22 -3 1 -8 0.3 0.3 36 -15 15 0 2 -8 0.1 0.5 23 -24 -6 -40 1 -8 0.4 0.7 19 -40 35 16 2 -8 0.5 0.8 6 -11 18 -39 1 -8 0.1 0.5 26 -22 26 -44 2 -8 0.6 0.8 35 -38 50 7 2 -8 0.4 0.1 11 5 6 -7 1 -8 0.8 0.5 11 4 -11 -30 1 -8 0.2 0.9 12 -23 33 30 2 -8 0.3 0.1 45 -6 22 21 2 -8 0.4 0.5 5 -46 43 -42 2 -8 0.7 0.2 -15 -23 23 10 2 -8 0.4 0.4 0 -27 45 -4 2 -8 0.6 0.2 -8 -15 24 -45 2 -8 0.7 0.5 37 -14 8 -8 1 -8 0.9 0.6 25 -16 14 -12 1 -8 0.5 0.3 -25 -27 5 -43 2 -8 0.2 0.8 14 4 32 30 2 -8 0.6 0.2 21 -42 44 -49 1 -8 0.3 0.5 29 22 49 15 2 -8 0.7 0.2 -15 -29 22 -2 2 -8 0.6 0.5 39 -36 34 20 2 -8 0.5 0.3 32 -30 13 1 2 -8 0.1 0.7 43 6 -15 -28 1 -8 0.3 0.6 46 -41 4 -18 1 -8 0.1 0.1 9 -21 7 -9 2 -8 0.5 0.2 43 11 32 -46 1 -8 0.1 0.3 24 0 -42 -45 1 -8 0.3 0.4 40 26 47 -10 1 -8 0.2 0.8 10 -32 -30 -44 1 -8 0.3 0.6 47 0 19 16 2 -8 0.9 0.1 -34 -48 25 17 2 -8 0.1 0.5 4 -49 34 15 2 -8 0.6 0.5 4 -11 -10 -37 2 -8 0.7 0.2 -13 -42 -39 -45 1 -8 0.7 0.6 1 -35 10 -27 2 -8 0.2 0.3 -25 -40 40 -44 2 -8 0.9 0.9 8 -14 -2 -13 1 -8 0.4 0.1 49 19 20 -32 1 -8 0.9 0.8 43 -10 37 34 1 -8 0.7 0.6 27 -44 -4 -14 1 -8 0.6 0.4 12 -44 -1 -29 1 -8 0.5 0.5 44 41 -15 -22 1 -8 0.3 0.8 9 -34 47 -23 2 -8 0.5 0.8 14 8 3 -7 1 -8 0.8 0.9 5 -2 29 19 2 -8 0.7 0.8 41 12 -2 -9 1 -8 0.8 0.6 -4 -17 13 -32 2 -8 0.4 0.2 39 36 41 -38 1 -8 0.1 0.3 46 -22 34 7 2 -8 0.3 0.7 24 -41 3 -32 2 -8 0.2 0.6 50 -17 24 -33 2 -8 0.4 0.7 26 14 37 -21 1 -8 0.3 0.6 32 13 16 -37 1 -8 0.2 0.8 43 14 24 -46 1 -8 0.1 0.5 7 -42 35 -17 2 -8 0.1 0.6 29 2 -29 -44 1 -8 0.3 0.2 12 -33 14 -21 1 -8 0.4 0.3 31 8 31 -33 1 -8 0.7 0.9 13 -31 17 5 2 -8 0.8 0.2 31 20 6 4 1 -8 0.9 0.8 -1 -27 31 -7 2 -8 0.5 0.3 23 -29 27 -32 1 -8 0.2 0.8 -25 -46 -18 -38 2 -8 0.6 0.7 -3 -29 44 15 2 -8 0.4 0.5 42 -28 9 -16 1 -8 0.6 0.3 15 -39 49 -32 1 -8 0.5 0.9 43 -2 48 22 2 -8 0.1 0.4 21 -43 -19 -36 1 -8 0.1 0.2 7 -31 42 -1 2 -8 0.2 0.5 47 -7 31 30 2 -8 0.4 0.4 31 -9 19 -7 1 -8 0.4 0.9 32 12 -15 -36 1 -8 0.1 0.1 -5 -28 30 -49 1 -8 0.1 0.6 -28 -37 44 -41 2 -8 0.4 0.5 -46 -50 -26 -47 1 -8 0.1 0.5 28 -19 -23 -39 1 -8 0.3 0.8 16 -22 -4 -10 1 -8 0.7 0.1 26 21 3 -2 1 -8 0.2 0.2 38 -40 -16 -17 1 -8 0.3 0.5 -5 -19 32 -3 2 -8 0.7 0.7 34 -9 42 -6 2 -8 0.6 0.8 24 13 -16 -44 1 -8 0.8 0.4 14 8 4 -37 1 -8 0.6 0.5 23 -35 24 -7 2 -8 0.5 0.9 2 -17 -3 -29 1 -8 0.5 0.2 2 -35 19 -23 2 -8 0.2 0.2 -3 -42 -12 -36 2 -8 0.8 0.6 50 2 23 22 1 -8 0.3 0.9 3 -8 -1 -2 1 -8 0.7 0.6 -14 -33 41 -6 2 -8 0.1 0.1 44 -36 -2 -27 2 -8 0.4 0.8 17 -35 -10 -50 1 -8 0.9 0.3 37 -6 42 1 1 -8 0.2 0.5 3 -20 23 10 2 -8 0.4 0.6 6 -40 20 -12 2 -8 0.7 0.3 48 30 -21 -27 1 -8 0.9 0.9 22 -48 8 -9 1 -8 0.5 0.7 40 -48 9 -37 2 -8 0.5 0.2 -42 -43 13 5 2 -8 0.6 0.3 14 -46 24 -4 1 -8 0.5 0.1 43 -48 18 4 2 -8 0.5 0.8 -22 -45 42 -16 2 -8 0.4 0.1 32 -6 12 -29 1 -8 0.2 0.6 -4 -33 -2 -35 1 -8 0.4 0.4 23 -4 50 29 2 -8 0.9 0.4 46 -41 0 -33 1 -8 0.2 0.5 44 -7 -3 -34 1 -8 0.7 0.7 41 23 22 12 1 -8 0.1 0.5 11 -4 14 5 2 -8 0.7 0.9 34 31 32 14 1 -8 0.4 0.1 -7 -28 12 -2 2 -8 0.4 0.5 10 -35 23 -39 2 -8 0.1 0.2 -18 -40 20 -20 2 -8 0.1 0.6 27 -36 12 -4 2 -8 0.9 0.3 40 15 -2 -31 1 -8 0.9 0.8 -11 -35 25 -23 2 -8 0.4 0.2 17 -31 38 -15 2 -8 0.9 0.6 2 -22 26 -24 2 -8 0.2 0.5 -22 -43 19 -47 2 -8 0.8 0.5 43 39 27 -28 1 -8 0.1 0.9 -4 -41 -9 -28 2 -8 0.7 0.4 42 -42 47 -3 1 -8 0.6 0.4 6 3 38 29 2 -8 0.2 0.9 24 11 22 -32 2 -8 0.3 0.8 45 0 49 27 2 -8 0.9 0.2 20 -17 34 10 2 -8 0.4 0.1 40 -39 24 -4 1 -8 0.6 0.4 39 -24 49 -34 1 -8 0.3 0.9 5 -8 -16 -40 1 -8 0.9 0.6 -13 -38 33 -23 2 -8 0.2 0.3 48 -8 -3 -35 1 -8 0.8 0.4 5 -44 39 31 2 -8 0.4 0.3 -1 -10 48 -18 2 -8 0.3 0.1 28 9 12 5 1 -8 0.7 0.6 33 -34 5 -2 1 -8 0.7 0.4 5 -49 46 35 2 -8 0.7 0.6 49 -44 13 -8 1 -8 0.3 0.8 21 -37 24 -12 2 -8 0.4 0.5 9 -1 -14 -21 1 -8 0.7 0.4 38 -49 34 -4 1 -8 0.5 0.3 26 -48 32 6 2 -8 0.7 0.3 -4 -14 49 -17 2 -8 0.9 0.1 10 -41 -11 -15 1 -8 0.6 0.8 24 -23 4 -21 2 -8 0.9 0.1 31 -39 4 -8 1 -8 0.1 0.8 -31 -39 37 -36 2 -8 0.4 0.7 20 9 24 -23 1 -8 0.9 0.8 42 14 9 -27 1 -8 0.1 0.3 -14 -32 32 -48 1 -8 0.9 0.8 7 -30 14 2 2 -8 0.3 0.1 11 -7 38 -29 1 -8 0.2 0.4 33 -48 16 8 2 -8 0.9 0.5 33 11 30 -41 1 -8 0.3 0.5 33 23 -29 -37 1 -8 0.2 0.2 41 -2 3 -34 1 -8 0.9 0.7 33 -2 11 0 1 -8 0.9 0.8 49 -33 -15 -30 1 -8 0.5 0.4 34 33 32 15 1 -8 0.2 0.5 32 11 13 -47 1 -8 0.5 0.8 26 -35 43 41 2 -8 0.1 0.4 21 -45 29 7 2 -8 0.7 0.2 15 -2 27 0 1 -8 0.9 0.2 50 -7 46 -43 1 -8 0.7 0.5 20 -24 43 7 2 -8 0.5 0.7 35 -50 14 -37 2 -8 0.9 0.5 10 8 -13 -50 1 -8 0.1 0.7 1 -12 -14 -15 1 -8 0.3 0.5 -6 -42 24 -17 2 -8 0.8 0.7 17 9 -24 -28 1 -8 0.3 0.8 -46 -48 41 -21 2 -8 0.9 0.8 -28 -42 40 -18 2 -8 0.1 0.2 49 17 28 -28 1 -8 0.8 0.5 50 -19 -24 -33 1 -8 0.6 0.9 48 33 47 -11 1 -8 0.9 0.3 49 -42 -18 -41 1 -8 0.3 0.2 -17 -45 -2 -37 1 -8 0.4 0.1 32 -41 22 5 2 -8 0.7 0.5 18 -2 13 -10 2 -8 0.2 0.3 37 23 25 -25 1 -8 0.3 0.9 -13 -27 6 -49 2 -8 0.7 0.1 1 -28 -1 -20 1 -8 0.8 0.7 29 22 23 -16 1 -8 0.1 0.7 43 -47 22 -45 2 -8 0.4 0.8 31 -5 47 8 2 -8 0.3 0.1 33 -17 4 -14 1 -8 0.4 0.2 -8 -28 -42 -46 1 -8 0.9 0.2 48 -4 49 -35 1 -8 0.8 0.6 -11 -25 43 -35 2 -8 0.2 0.5 38 -48 -14 -30 1 -8 0.6 0.4 34 15 35 -6 1 -8 0.5 0.2 7 -3 -19 -25 1 -8 0.9 0.4 1 -1 25 -24 2 -8 0.8 0.1 12 -9 23 -45 1 -8 0.6 0.5 -14 -24 2 -18 2 -8 0.2 0.3 35 -11 35 8 1 -8 0.2 0.3 30 -9 38 13 2 -8 0.4 0.3 -11 -22 29 -20 2 -8 0.9 0.7 18 -20 24 -13 1 -8 0.1 0.1 50 -37 26 4 2 -8 0.4 0.8 32 -36 0 -46 1 -8 0.8 0.1 43 41 26 -13 1 -8 0.7 0.2 27 -30 49 19 2 -8 0.5 0.3 -16 -29 -13 -14 2 -8 0.7 0.5 -22 -25 39 37 2 -8 0.2 0.4 25 -16 1 -40 1 -8 0.4 0.2 32 -18 30 22 2 -8 0.9 0.6 -8 -24 0 -2 2 -8 0.2 0.8 10 -1 11 -18 2 -8 0.7 0.7 5 0 47 -24 2 -8 0.1 0.8 -30 -36 34 27 2 -8 0.8 0.5 -19 -29 16 -29 2 -8 0.2 0.4 5 -18 -5 -28 1 -8 0.1 0.1 34 9 29 -46 1 -8 0.4 0.9 14 -6 50 4 2 -8 0.3 0.7 38 -45 -28 -32 1 -8 0.3 0.5 50 -21 32 -37 2 -8 0.3 0.4 30 -44 47 -43 2 -8 0.4 0.4 49 3 36 -11 1 -8 0.5 0.7 -33 -36 37 10 2 -8 0.8 0.9 -17 -36 48 -17 2 -8 0.6 0.5 26 -33 44 11 2 -8 0.2 0.6 4 -6 20 16 2 -8 0.7 0.6 34 9 -14 -28 1 -8 0.3 0.2 4 -3 29 -35 2 -8 0.4 0.6 39 -14 4 -49 1 -8 0.7 0.3 -10 -16 24 -44 1 -8 0.7 0.3 41 -25 24 -34 1 -8 0.9 0.8 34 11 3 -1 1 -8 0.9 0.3 45 39 18 -11 1 -8 0.8 0.8 36 -38 22 9 1 -8 0.9 0.4 18 7 -33 -35 1 -8 0.6 0.4 38 -13 47 4 2 -8 0.5 0.6 28 -32 6 -42 1 -8 0.7 0.2 10 -48 37 -46 1 -8 0.2 0.7 37 -25 22 -17 2 -8 0.4 0.8 26 -16 44 28 2 -8 0.9 0.2 49 4 26 -43 1 -8 0.9 0.7 47 -27 11 -21 1 -8 0.7 0.3 30 -20 28 7 1 -8 0.9 0.4 -18 -47 14 11 2 -8 0.2 0.4 5 -44 -11 -15 1 -8 0.4 0.6 46 -1 18 -21 1 -8 0.7 0.5 16 -49 1 -30 1 -8 0.1 0.8 -5 -37 -28 -44 2 -8 0.9 0.1 -33 -41 12 -27 1 -8 0.9 0.4 -18 -48 -37 -41 1 -8 0.2 0.6 24 -6 -14 -30 1 -8 0.5 0.4 20 -19 -9 -28 1 -8 0.4 0.9 33 -33 32 30 2 -8 0.1 0.5 -6 -42 11 -15 2 -8 0.6 0.5 45 36 6 -23 1 -8 0.8 0.7 21 -33 13 -28 1 -8 0.9 0.2 42 33 -23 -38 1 -8 0.6 0.1 14 -40 22 -5 2 -8 0.4 0.2 -22 -24 -3 -31 1 -8 0.1 0.1 -31 -33 24 -23 1 -8 0.7 0.6 46 -39 33 24 2 -8 0.6 0.9 43 33 -13 -48 1 -8 0.3 0.1 44 15 22 -42 1 -8 0.9 0.5 49 46 48 6 1 -8 0.5 0.1 42 25 38 -26 1 -8 0.6 0.1 9 -49 21 -14 1 -8 0.4 0.4 46 -41 41 23 2 -8 0.4 0.8 30 -7 22 12 2 -8 0.3 0.6 42 21 35 21 1 -8 0.5 0.5 47 -27 12 -18 1 -8 0.4 0.5 44 40 24 8 2 -8 0.6 0.1 -22 -25 46 -29 2 -8 0.5 0.5 41 17 50 0 1 -8 0.9 0.1 -1 -23 -1 -27 1 -8 0.1 0.5 12 -42 -11 -42 2 -8 0.9 0.6 9 -29 48 30 2 -8 0.6 0.4 39 12 -24 -47 1 -8 0.6 0.2 -13 -22 -28 -50 2 -8 0.9 0.7 41 -35 6 -20 1 -8 0.2 0.4 48 -7 17 -46 1 -8 0.2 0.2 -8 -17 35 33 2 -8 0.3 0.8 41 -16 11 -26 2 -8 0.3 0.7 -5 -31 23 -31 2 -8 0.5 0.9 38 -7 12 2 2 -8 0.6 0.7 24 -33 7 -25 1 -8 0.8 0.4 46 13 -8 -45 1 -8 0.6 0.6 35 0 -12 -27 1 -8 0.3 0.2 -1 -36 17 -5 2 -8 0.3 0.9 -15 -50 9 -48 2 -8 0.1 0.6 22 -41 3 -18 2 -8 0.6 0.2 46 41 42 -35 1 -8 0.5 0.2 -22 -25 42 33 2 -8 0.5 0.2 47 -1 24 3 1 -8 0.4 0.1 -30 -44 18 -27 2 -8 0.8 0.4 47 35 12 -3 1 -8 0.9 0.2 46 -46 35 -20 1 -8 0.4 0.3 11 -24 16 7 2 -8 0.5 0.6 20 14 12 -14 1 -8 0.5 0.8 41 37 22 -50 1 -8 0.1 0.8 41 -43 40 -23 2 -8 0.1 0.1 -4 -32 23 15 2 -8 0.4 0.2 33 15 26 24 1 -9 0.1 0.5 -3 -20 19 -30 2 -9 0.9 0.4 23 -2 48 2 1 -9 0.7 0.5 44 31 33 20 1 -9 0.6 0.6 40 4 -5 -38 1 -9 0.2 0.9 50 -46 7 -34 1 -9 0.3 0.9 -3 -20 -32 -33 1 -9 0.8 0.1 -22 -33 45 24 2 -9 0.8 0.4 44 -10 30 15 2 -9 0.8 0.5 6 -25 50 3 2 -9 0.9 0.3 40 39 25 -43 1 -9 0.1 0.8 33 30 31 -17 2 -9 0.7 0.3 39 -35 16 -34 1 -9 0.8 0.9 26 -1 8 -27 1 -9 0.8 0.3 40 -39 49 17 1 -9 0.8 0.1 -11 -29 46 12 2 -9 0.4 0.5 9 6 -23 -42 1 -9 0.7 0.9 47 35 41 -20 1 -9 0.6 0.1 49 43 20 -32 1 -9 0.7 0.8 37 19 38 36 1 -9 0.6 0.8 24 -31 39 -36 2 -9 0.6 0.1 -14 -44 -4 -5 2 -9 0.2 0.1 30 -33 8 -1 1 -9 0.7 0.4 -7 -20 12 -10 2 -9 0.8 0.6 -34 -38 -7 -15 1 -9 0.7 0.6 8 -30 22 -30 2 -9 0.4 0.2 46 -26 -44 -46 1 -9 0.8 0.2 34 -23 34 -25 1 -9 0.3 0.9 35 -2 24 -43 2 -9 0.4 0.5 -25 -46 10 -35 2 -9 0.7 0.9 24 -8 -27 -40 1 -9 0.2 0.9 31 26 38 20 2 -9 0.7 0.3 40 4 9 -29 1 -9 0.7 0.8 48 -6 45 -49 2 -9 0.6 0.2 9 -1 31 2 1 -9 0.6 0.4 8 -8 45 -38 1 -9 0.5 0.2 -1 -31 25 -23 2 -9 0.7 0.4 49 -28 27 23 2 -9 0.3 0.1 45 -28 -36 -38 1 -9 0.5 0.8 20 14 -17 -44 1 -9 0.9 0.2 14 -17 41 39 2 -9 0.2 0.8 46 4 40 -14 1 -9 0.8 0.2 50 -12 1 -40 1 -9 0.8 0.5 -13 -34 -14 -44 1 -9 0.5 0.1 26 12 -36 -47 1 -9 0.2 0.9 47 -30 48 39 2 -9 0.7 0.4 36 11 24 12 1 -9 0.9 0.7 27 -7 41 -38 1 -9 0.3 0.4 -30 -42 -23 -31 2 -9 0.7 0.6 14 -6 -29 -43 1 -9 0.9 0.4 -13 -24 -11 -20 1 -9 0.5 0.4 32 -7 0 -15 1 -9 0.2 0.1 41 -29 28 -47 2 -9 0.7 0.6 -11 -19 3 -46 2 -9 0.8 0.9 -20 -24 49 33 2 -9 0.4 0.6 20 18 -23 -48 1 -9 0.1 0.6 38 -7 5 -2 2 -9 0.5 0.8 -26 -27 41 -15 2 -9 0.7 0.8 46 -26 45 -49 2 -9 0.1 0.4 5 -26 -19 -31 2 -9 0.7 0.6 18 -47 40 -18 2 -9 0.1 0.6 33 31 31 16 1 -9 0.7 0.7 -27 -44 -33 -45 2 -9 0.4 0.8 26 23 -15 -21 1 -9 0.1 0.5 29 18 -1 -3 1 -9 0.8 0.4 15 -14 35 -28 1 -9 0.6 0.6 33 12 34 15 2 -9 0.1 0.1 37 -17 -4 -36 1 -9 0.7 0.1 -5 -12 15 -31 1 -9 0.4 0.7 -14 -24 41 -4 2 -9 0.4 0.6 39 35 -26 -44 1 -9 0.9 0.7 46 13 -34 -35 1 -9 0.9 0.5 -17 -26 2 -22 2 -9 0.8 0.4 -2 -10 28 -37 2 -9 0.2 0.1 1 -50 30 -17 2 -9 0.5 0.8 -34 -45 43 -18 2 -9 0.1 0.3 -4 -27 39 -22 2 -9 0.2 0.8 50 -44 -3 -10 2 -9 0.5 0.7 -25 -34 24 -11 2 -9 0.4 0.7 39 37 37 -43 1 -9 0.7 0.6 -13 -46 -33 -35 1 -9 0.3 0.4 28 -14 -3 -16 1 -9 0.4 0.4 33 -4 33 2 2 -9 0.5 0.2 35 -8 45 43 2 -9 0.8 0.4 41 23 18 -48 1 -9 0.3 0.8 46 -21 33 -34 2 -9 0.3 0.4 3 -50 40 17 2 -9 0.4 0.9 8 -12 42 31 2 -9 0.9 0.7 26 16 36 -12 2 -9 0.3 0.8 -11 -44 47 -34 2 -9 0.5 0.9 20 -31 -41 -43 1 -9 0.7 0.6 -23 -42 2 -30 2 -9 0.3 0.9 40 20 -26 -49 1 -9 0.8 0.5 29 -32 10 -45 1 -9 0.3 0.8 -28 -44 16 -10 2 -9 0.1 0.4 -5 -30 -2 -24 2 -9 0.3 0.2 28 21 41 -50 1 -9 0.6 0.7 25 -31 12 -44 1 -9 0.7 0.4 17 -3 37 -12 1 -9 0.5 0.1 30 -22 33 -7 1 -9 0.9 0.2 -35 -37 36 9 2 -9 0.8 0.3 5 -46 34 22 2 -9 0.3 0.9 21 7 -29 -42 1 -9 0.8 0.2 24 -7 35 23 2 -9 0.6 0.3 44 0 29 -21 1 -9 0.9 0.4 3 -48 -23 -30 1 -9 0.3 0.1 35 1 30 -25 1 -9 0.9 0.2 20 -8 42 -24 1 -9 0.1 0.1 8 -41 -26 -38 1 -9 0.6 0.9 39 -3 -12 -28 1 -9 0.9 0.6 6 -46 18 0 1 -9 0.2 0.7 -13 -49 34 -1 2 -9 0.3 0.3 2 -12 31 21 2 -9 0.6 0.1 -18 -30 -35 -41 1 -9 0.4 0.4 48 -32 9 1 2 -9 0.2 0.9 -2 -24 41 -40 2 -9 0.2 0.4 -7 -34 26 -20 2 -9 0.2 0.2 49 -2 50 16 2 -9 0.9 0.7 47 -33 36 -13 1 -9 0.5 0.9 29 -38 17 14 2 -9 0.8 0.7 -19 -27 -26 -48 2 -9 0.8 0.9 38 17 13 1 1 -9 0.9 0.4 39 -42 35 -1 1 -9 0.5 0.5 -38 -46 -29 -42 1 -9 0.6 0.9 -11 -50 26 -6 2 -9 0.7 0.6 -4 -23 36 -50 2 -9 0.6 0.9 36 23 32 -14 2 -9 0.4 0.2 -18 -48 5 -18 2 -9 0.8 0.4 42 6 47 23 1 -9 0.1 0.8 3 -35 32 -48 2 -9 0.9 0.4 30 -23 42 35 2 -9 0.4 0.7 13 -40 42 -8 2 -9 0.2 0.8 -38 -50 -10 -27 2 -9 0.4 0.1 -10 -47 21 -4 2 -9 0.9 0.1 20 -37 47 -33 1 -9 0.8 0.9 40 -34 17 -22 2 -9 0.8 0.7 -3 -6 -21 -30 1 -9 0.9 0.8 1 -32 24 -41 2 -9 0.4 0.8 43 -31 46 27 2 -9 0.1 0.7 18 -22 13 -19 2 -9 0.4 0.3 20 -50 49 46 2 -9 0.2 0.4 -8 -50 40 33 2 -9 0.7 0.6 25 -11 29 -7 2 -9 0.5 0.2 -12 -33 25 -3 2 -9 0.2 0.1 31 -48 -4 -16 2 -9 0.6 0.7 25 -16 33 17 2 -9 0.1 0.4 32 31 5 -25 1 -9 0.9 0.1 29 -23 -5 -27 1 -9 0.6 0.9 12 -18 31 -48 2 -9 0.3 0.7 -12 -23 34 -45 2 -9 0.8 0.2 15 -49 44 -3 1 -9 0.4 0.6 35 -40 35 -4 1 -9 0.6 0.8 4 -35 47 33 2 -9 0.9 0.5 49 -41 -36 -49 1 -9 0.2 0.5 11 10 30 -28 1 -9 0.1 0.6 11 -36 49 -24 2 -9 0.7 0.3 1 -31 45 27 2 -9 0.1 0.6 -6 -42 48 2 2 -9 0.7 0.4 40 6 23 -14 1 -9 0.6 0.2 25 -46 45 7 2 -9 0.4 0.3 2 -32 34 -9 2 -9 0.1 0.2 -37 -46 -5 -37 2 -9 0.2 0.6 25 -11 38 -21 2 -9 0.2 0.2 -10 -47 33 -2 2 -9 0.8 0.2 24 -10 -36 -46 1 -9 0.2 0.9 48 7 25 -16 2 -9 0.7 0.5 48 -43 29 -42 1 -9 0.4 0.5 -30 -42 38 -50 2 -9 0.8 0.9 41 20 33 -49 1 -9 0.9 0.6 -2 -48 -28 -30 2 -9 0.5 0.8 -1 -50 24 -16 2 -9 0.8 0.4 40 37 -27 -29 1 -9 0.2 0.3 14 -19 -23 -37 2 -9 0.1 0.4 10 -25 -22 -33 2 -9 0.1 0.5 47 -27 40 11 2 -9 0.6 0.3 -19 -20 35 -8 2 -9 0.4 0.8 24 -17 29 18 2 -9 0.5 0.4 25 -21 28 6 2 -9 0.7 0.5 -12 -31 -25 -46 2 -9 0.6 0.1 46 -11 31 7 1 -9 0.6 0.9 21 -14 14 -14 1 -9 0.3 0.4 3 -33 14 -19 2 -9 0.3 0.6 8 5 39 -43 1 -9 0.7 0.4 13 -23 42 0 1 -9 0.5 0.4 6 -37 28 -49 1 -9 0.9 0.6 34 4 15 -42 1 -9 0.2 0.1 42 10 17 -46 1 -9 0.4 0.2 27 -49 -35 -41 1 -9 0.9 0.4 49 -38 44 37 1 -9 0.1 0.2 -18 -34 -29 -50 1 -9 0.8 0.8 22 15 35 33 1 -9 0.5 0.8 43 21 -28 -37 1 -9 0.2 0.4 49 28 24 -2 1 -9 0.8 0.4 4 -7 43 33 2 -9 0.9 0.7 11 5 -25 -38 1 -9 0.8 0.1 11 10 34 17 2 -9 0.7 0.9 -2 -22 39 28 2 -9 0.4 0.1 35 8 42 -30 1 -9 0.3 0.2 40 37 22 -27 1 -9 0.9 0.4 4 -26 26 -45 1 -9 0.1 0.6 48 46 49 -50 1 -9 0.8 0.9 42 -9 30 7 2 -9 0.6 0.2 23 -35 17 -7 2 -9 0.2 0.6 39 -31 10 -23 1 -9 0.5 0.5 36 -40 -25 -49 2 -9 0.7 0.9 32 -42 34 -29 2 -9 0.8 0.9 4 -35 19 -13 2 -9 0.1 0.3 38 -42 49 -21 2 -9 0.2 0.6 12 -34 0 -15 2 -9 0.9 0.6 26 3 -13 -49 1 -9 0.3 0.6 27 -14 39 32 2 -9 0.1 0.6 19 -44 31 17 2 -9 0.1 0.2 24 1 -10 -33 1 -9 0.4 0.1 35 -25 12 -45 1 -9 0.6 0.2 23 4 27 -28 1 -9 0.9 0.1 46 8 32 22 1 -9 0.2 0.9 39 26 23 -11 2 -9 0.7 0.8 49 20 50 -42 1 -9 0.9 0.4 1 -27 -10 -50 1 -9 0.4 0.2 7 -13 47 6 2 -9 0.4 0.2 41 8 32 9 2 -9 0.9 0.4 44 -14 22 17 1 -9 0.8 0.8 38 24 31 -22 1 -9 0.9 0.8 21 -44 15 1 2 -9 0.2 0.4 14 -19 -20 -47 1 -9 0.9 0.1 -5 -48 49 -19 2 -9 0.7 0.1 34 -4 2 -26 1 -9 0.1 0.6 5 -4 -24 -50 1 -9 0.5 0.4 26 -50 14 -49 1 -9 0.2 0.7 16 -25 44 12 2 -9 0.8 0.9 30 14 -18 -20 1 -9 0.4 0.9 -18 -29 -24 -31 1 -9 0.6 0.5 39 -35 30 -14 1 -9 0.9 0.2 44 -43 13 -35 1 -9 0.4 0.5 47 38 34 -14 1 -9 0.3 0.2 41 22 -41 -50 1 -9 0.5 0.6 41 -5 34 -3 2 -9 0.8 0.3 27 -26 44 -49 1 -9 0.6 0.9 43 14 19 -44 1 -9 0.5 0.4 -40 -49 49 12 2 -9 0.1 0.5 14 -33 46 -5 2 -9 0.6 0.8 49 45 18 -14 1 -9 0.4 0.2 3 -48 13 6 2 -9 0.5 0.1 -1 -34 -38 -46 1 -9 0.5 0.1 -9 -14 1 -48 1 -9 0.1 0.8 -12 -36 45 -24 2 -9 0.3 0.1 11 -6 26 -32 2 -9 0.1 0.8 -28 -48 48 -39 2 -9 0.1 0.8 35 14 -3 -21 1 -9 0.3 0.9 41 14 9 -18 1 -9 0.3 0.4 2 -40 -2 -38 1 -9 0.2 0.5 50 29 25 -4 1 -9 0.9 0.9 24 1 -42 -44 1 -9 0.5 0.9 36 -9 35 -30 2 -9 0.3 0.5 -8 -20 -5 -43 1 -9 0.2 0.6 41 9 4 -4 1 -9 0.9 0.1 7 -34 8 5 2 -9 0.2 0.6 -22 -48 16 -45 2 -9 0.3 0.4 38 36 23 -44 1 -9 0.5 0.2 46 -10 33 -46 1 -9 0.3 0.8 38 34 46 6 1 -9 0.5 0.3 3 -27 49 40 2 -9 0.2 0.5 39 -9 45 -11 2 -9 0.8 0.8 20 15 40 -46 2 -9 0.4 0.4 45 -21 33 27 2 -9 0.2 0.9 33 -50 33 15 2 -9 0.6 0.6 -2 -50 18 0 2 -9 0.4 0.1 41 33 -6 -23 1 -9 0.2 0.8 41 16 7 -46 1 -9 0.9 0.3 -10 -47 48 -5 2 -9 0.7 0.6 33 -16 26 -10 1 -9 0.3 0.6 14 -21 50 45 2 -9 0.7 0.3 49 -13 35 20 2 -9 0.1 0.7 20 -39 35 -38 2 -9 0.5 0.1 35 -45 0 -15 2 -9 0.1 0.9 -3 -13 -5 -14 1 -9 0.5 0.7 46 -26 37 -11 2 -9 0.8 0.9 9 -32 31 14 2 -9 0.6 0.4 44 -10 12 -24 2 -9 0.9 0.4 -18 -49 41 37 2 -9 0.4 0.7 34 -32 26 -37 2 -9 0.6 0.8 7 -26 24 -22 2 -9 0.3 0.1 -37 -39 49 -11 2 -9 0.6 0.4 43 -15 -32 -41 1 -9 0.6 0.6 21 -11 -1 -10 1 -9 0.7 0.1 10 -17 44 18 2 -9 0.5 0.8 16 -4 30 -27 2 -9 0.9 0.2 45 5 49 35 1 -9 0.1 0.2 -5 -35 45 -48 1 -9 0.8 0.5 12 6 -8 -20 1 -9 0.1 0.8 35 -34 34 -30 2 -9 0.3 0.2 13 -42 38 -23 2 -9 0.8 0.5 -32 -47 -3 -5 2 -9 0.1 0.5 37 -45 3 -3 2 -9 0.7 0.1 37 15 29 24 2 -9 0.6 0.9 35 -7 46 34 2 -9 0.6 0.2 41 -36 34 -22 1 -9 0.5 0.3 35 -40 -8 -39 2 -9 0.9 0.2 15 -48 -4 -25 1 -9 0.5 0.1 -22 -49 26 -29 2 -9 0.2 0.9 -2 -16 0 -14 2 -10 0.5 0.2 23 -35 7 -7 2 -10 0.9 0.2 35 33 14 -26 1 -10 0.5 0.9 32 -38 22 -30 2 -10 0.4 0.6 47 -39 3 2 2 -10 0.9 0.8 38 -29 1 -7 1 -10 0.7 0.4 -34 -48 42 -24 2 -10 0.9 0.6 -12 -49 -9 -36 2 -10 0.3 0.5 7 0 21 -17 1 -10 0.3 0.3 27 -11 11 -8 1 -10 0.6 0.3 41 9 32 -34 1 -10 0.3 0.3 22 -34 41 7 2 -10 0.5 0.9 40 -30 -22 -34 1 -10 0.7 0.1 22 -7 39 -38 1 -10 0.3 0.8 -18 -34 -5 -35 2 -10 0.4 0.8 -10 -27 -35 -47 2 -10 0.4 0.6 45 21 -9 -31 1 -10 0.3 0.4 34 -32 32 -24 1 -10 0.5 0.3 33 -26 35 0 2 -10 0.9 0.5 -5 -33 17 -27 2 -10 0.5 0.1 11 -43 -18 -48 1 -10 0.7 0.2 40 -39 15 -33 1 -10 0.4 0.8 23 -38 19 -22 2 -10 0.8 0.5 26 -46 -24 -43 1 -10 0.1 0.8 25 17 49 9 2 -10 0.5 0.9 19 -31 -10 -38 1 -10 0.5 0.3 43 -14 -9 -46 1 -10 0.3 0.5 43 -20 29 -36 2 -10 0.7 0.7 28 -41 28 -35 1 -10 0.8 0.2 -23 -29 35 14 2 -10 0.9 0.4 21 14 32 1 1 -10 0.7 0.8 45 24 26 -4 1 -10 0.2 0.1 24 -19 39 -9 2 -10 0.2 0.5 35 -47 49 -16 2 -10 0.4 0.7 45 5 4 -18 1 -10 0.5 0.8 34 -25 -32 -50 1 -10 0.8 0.1 -6 -9 43 39 2 -10 0.2 0.7 -30 -35 0 -20 2 -10 0.2 0.4 -7 -36 31 -48 2 -10 0.3 0.2 48 -16 -3 -6 2 -10 0.2 0.5 0 -10 31 -27 2 -10 0.8 0.2 -10 -50 12 -7 2 -10 0.3 0.1 34 25 47 41 2 -10 0.9 0.9 21 -30 11 -44 1 -10 0.5 0.3 -14 -44 -5 -28 2 -10 0.2 0.4 39 -26 -3 -28 1 -10 0.8 0.8 -26 -36 43 -4 2 -10 0.9 0.6 50 11 1 -20 1 -10 0.9 0.5 -14 -30 47 3 2 -10 0.6 0.8 45 -15 14 -1 1 -10 0.4 0.7 41 4 39 -37 1 -10 0.7 0.5 -15 -41 -8 -19 2 -10 0.1 0.6 7 -31 29 -37 2 -10 0.3 0.1 11 -50 38 -35 1 -10 0.9 0.2 28 -39 44 -16 1 -10 0.7 0.1 49 11 29 -43 1 -10 0.1 0.5 -5 -35 19 6 2 -10 0.4 0.1 17 13 -15 -19 1 -10 0.1 0.1 46 42 -5 -36 1 -10 0.2 0.6 36 33 32 -48 1 -10 0.1 0.1 -3 -32 18 -43 1 -10 0.2 0.2 22 -42 -30 -39 1 -10 0.4 0.6 40 -24 43 -28 2 -10 0.1 0.6 41 -48 23 -6 2 -10 0.1 0.8 -42 -44 23 11 2 -10 0.1 0.6 34 -23 -21 -32 1 -10 0.6 0.3 41 27 -9 -30 1 -10 0.1 0.7 40 20 50 5 2 -10 0.9 0.3 -15 -16 20 2 2 -10 0.1 0.9 36 -13 42 19 2 -10 0.6 0.1 14 -47 26 -27 1 -10 0.9 0.5 1 -7 42 9 2 -10 0.3 0.2 42 -11 18 16 2 -10 0.3 0.2 35 -40 3 -47 2 -10 0.4 0.3 40 -18 -6 -34 1 -10 0.8 0.5 29 -34 47 -8 1 -10 0.7 0.8 -23 -32 11 -12 2 -10 0.3 0.9 -31 -39 -16 -49 2 -10 0.7 0.1 37 -17 21 -48 1 -10 0.9 0.3 44 -11 34 -23 1 -10 0.5 0.5 -26 -44 -21 -41 2 -10 0.1 0.2 37 -38 17 -3 2 -10 0.4 0.5 34 20 -3 -11 1 -10 0.4 0.7 -8 -11 -10 -23 1 -10 0.8 0.9 21 -19 17 -40 1 -10 0.5 0.1 6 3 38 -18 1 -10 0.7 0.8 26 -16 27 -44 2 -10 0.5 0.1 39 -6 29 16 2 -10 0.6 0.2 14 13 17 0 1 -10 0.2 0.9 -22 -39 27 -32 2 -10 0.9 0.6 44 39 25 -35 1 -10 0.3 0.9 18 -4 45 -11 2 -10 0.3 0.6 15 -35 32 -1 2 -10 0.7 0.7 3 -15 25 -29 2 -10 0.6 0.8 41 -7 -8 -35 1 -10 0.9 0.8 -2 -45 36 5 2 -10 0.3 0.9 -34 -49 32 15 2 -10 0.8 0.4 39 -41 32 -40 1 -10 0.8 0.6 46 43 48 14 1 -10 0.7 0.8 17 -39 25 -2 2 -10 0.6 0.5 30 -41 46 -18 2 -10 0.9 0.8 0 -2 -23 -32 1 -10 0.9 0.7 44 4 47 36 2 -10 0.5 0.9 4 -48 21 -41 2 -10 0.8 0.5 -3 -45 46 43 2 -10 0.8 0.9 37 -21 13 -37 1 -10 0.2 0.2 10 -6 35 -15 2 -10 0.5 0.6 26 -40 -28 -48 1 -10 0.2 0.5 8 -4 -12 -46 1 -10 0.6 0.5 46 30 45 -18 1 -10 0.7 0.9 36 29 -12 -18 1 -10 0.1 0.9 40 4 -29 -36 1 -10 0.4 0.9 26 5 26 -43 1 -10 0.7 0.6 36 -46 49 -7 2 -10 0.2 0.1 49 1 45 -13 1 -10 0.3 0.8 -27 -48 0 -36 2 -10 0.5 0.9 -2 -9 40 -4 2 -10 0.7 0.4 1 -42 35 34 2 -10 0.7 0.5 -27 -42 -23 -25 2 -10 0.3 0.2 11 -23 19 -25 2 -10 0.7 0.6 50 -43 -25 -39 1 -10 0.7 0.4 46 35 48 5 1 -10 0.6 0.8 19 -18 30 13 2 -10 0.2 0.8 42 -48 40 0 2 -10 0.6 0.6 42 14 47 44 1 -10 0.9 0.7 47 -27 10 1 1 -10 0.4 0.6 -4 -34 34 16 2 -10 0.6 0.7 -26 -47 24 -3 2 -10 0.9 0.3 0 -41 17 -19 2 -10 0.8 0.5 8 -22 50 22 2 -10 0.7 0.4 29 -42 29 24 2 -10 0.5 0.2 -6 -42 14 -6 2 -10 0.4 0.7 49 -9 -7 -48 1 -10 0.4 0.9 16 8 26 3 2 -10 0.9 0.9 17 -13 -9 -49 1 -10 0.7 0.1 10 -40 12 -11 2 -10 0.1 0.1 -18 -30 -13 -45 1 -10 0.9 0.3 -22 -48 -6 -46 1 -10 0.1 0.3 49 3 -29 -43 1 -10 0.9 0.3 25 -45 32 21 2 -10 0.3 0.1 15 -48 14 -35 2 -10 0.7 0.9 44 42 43 -2 1 -10 0.9 0.7 33 -37 40 -37 2 -10 0.2 0.2 23 -44 49 34 2 -10 0.3 0.4 16 -34 17 -23 2 -10 0.9 0.3 46 11 48 -39 1 -10 0.4 0.9 29 5 9 5 2 -10 0.5 0.5 45 -31 48 22 2 -10 0.6 0.2 40 18 13 -15 1 -10 0.9 0.6 45 0 1 -24 1 -10 0.6 0.3 -14 -41 -21 -31 2 -10 0.3 0.3 33 -23 40 -33 1 -10 0.2 0.7 21 -1 44 16 2 -10 0.2 0.6 -25 -47 31 -17 2 -10 0.7 0.1 4 -38 33 18 2 -10 0.4 0.6 26 -38 31 -9 2 -10 0.8 0.1 27 -37 6 -38 1 -10 0.7 0.8 20 -48 12 -17 2 -10 0.5 0.4 32 -26 47 19 2 -10 0.2 0.2 42 4 0 -44 1 -10 0.1 0.5 6 -1 50 41 2 -10 0.3 0.7 39 11 50 -29 1 -10 0.4 0.1 -1 -50 50 -19 1 -10 0.9 0.3 -2 -24 22 -41 1 -10 0.3 0.1 -32 -33 36 24 2 -10 0.2 0.1 2 -45 -27 -32 2 -10 0.6 0.5 31 5 -5 -11 1 -10 0.9 0.9 32 -19 -7 -27 1 -10 0.4 0.8 10 -38 -17 -35 1 -10 0.9 0.5 25 -32 -37 -40 1 -10 0.1 0.9 23 13 -8 -23 1 -10 0.6 0.1 29 1 22 14 1 -10 0.4 0.1 30 -3 24 -35 1 -10 0.5 0.3 20 -48 40 6 2 -10 0.5 0.5 -33 -38 18 -15 2 -10 0.8 0.8 33 11 39 20 1 -10 0.8 0.7 29 -30 28 25 2 -10 0.2 0.2 -29 -30 -4 -43 2 -10 0.2 0.4 25 -13 35 -5 2 -10 0.5 0.2 41 22 13 -24 1 -10 0.1 0.6 -14 -47 40 2 2 -10 0.9 0.2 24 -43 13 -14 1 -10 0.8 0.8 3 -8 -3 -50 1 -10 0.5 0.8 -5 -16 38 -49 2 -10 0.3 0.4 23 12 -29 -41 1 -10 0.9 0.1 39 -46 27 -3 1 -10 0.2 0.8 43 17 7 -23 1 -10 0.5 0.8 17 13 -16 -30 1 -10 0.3 0.6 29 10 20 -3 1 -10 0.7 0.7 46 -11 27 -42 1 -10 0.8 0.5 32 -9 49 -4 1 -10 0.7 0.6 6 -28 45 -24 2 -10 0.1 0.1 38 -6 14 -15 1 -10 0.8 0.4 20 11 45 -18 1 -10 0.2 0.9 -6 -50 27 8 2 -10 0.4 0.3 30 -43 10 -9 2 -10 0.9 0.4 -17 -50 22 -19 2 -10 0.2 0.6 -31 -38 22 -32 2 -10 0.6 0.9 0 -48 -22 -43 1 -10 0.5 0.7 50 49 35 -5 1 -10 0.4 0.2 32 -48 50 1 2 -10 0.6 0.9 48 35 18 0 1 -10 0.8 0.8 41 -17 -16 -40 1 -10 0.9 0.5 12 -28 28 -17 2 -10 0.7 0.7 44 9 32 -28 1 -10 0.2 0.6 3 -9 46 27 2 -10 0.9 0.9 -23 -26 15 -23 2 -10 0.7 0.8 35 17 43 1 2 -10 0.8 0.2 50 -35 -18 -40 1 -10 0.2 0.1 23 -43 23 -11 2 -10 0.3 0.4 28 -17 43 -15 2 -10 0.1 0.4 -10 -14 30 10 2 -10 0.2 0.8 3 -32 38 25 2 -10 0.9 0.7 -8 -41 32 -12 2 -10 0.5 0.4 -5 -9 16 -24 1 -10 0.5 0.7 17 14 26 -32 1 -10 0.5 0.4 20 -25 11 -1 1 -10 0.4 0.7 -27 -41 -19 -49 2 -10 0.8 0.7 35 23 28 -20 1 -10 0.6 0.1 50 22 50 -18 1 -10 0.6 0.9 -14 -40 15 8 2 -10 0.2 0.2 37 -41 49 -6 2 -10 0.7 0.8 -4 -31 8 -34 2 -10 0.5 0.9 33 -8 18 -35 2 -10 0.6 0.4 25 -10 15 -34 1 -10 0.2 0.8 38 6 -48 -50 1 -10 0.6 0.1 49 0 -8 -18 1 -10 0.2 0.6 0 -33 -24 -27 1 -10 0.3 0.4 35 -7 39 -38 1 -10 0.2 0.5 10 7 25 0 2 -10 0.5 0.1 43 12 44 -13 1 -10 0.2 0.9 -19 -24 -14 -47 2 -10 0.5 0.5 27 14 -29 -47 1 -10 0.6 0.8 43 -35 -9 -38 1 -10 0.1 0.4 20 -21 32 -47 2 -10 0.7 0.7 0 -23 12 3 2 -10 0.3 0.1 6 -35 40 16 2 -10 0.4 0.4 13 -44 46 27 2 -10 0.7 0.9 23 -12 38 8 2 -10 0.3 0.4 31 3 9 -27 1 -10 0.4 0.6 22 -40 42 32 2 -10 0.5 0.3 47 -21 -13 -34 1 -10 0.2 0.1 33 -33 -2 -23 1 -10 0.3 0.3 29 -10 29 -16 2 -10 0.4 0.2 4 -15 -23 -50 1 -10 0.9 0.9 -23 -27 40 -12 2 -10 0.7 0.4 39 -24 39 -33 1 -10 0.8 0.6 -9 -23 50 18 2 -10 0.1 0.8 31 21 13 -41 1 -10 0.3 0.6 43 -31 4 -31 2 -10 0.2 0.8 13 -14 42 -45 2 -10 0.3 0.3 45 14 -31 -38 1 -10 0.3 0.5 18 -38 3 -32 2 -10 0.4 0.7 -9 -14 7 -31 2 -10 0.8 0.4 46 -37 33 9 1 -10 0.6 0.7 17 -23 3 -38 1 -10 0.3 0.7 -4 -37 38 0 2 -10 0.7 0.6 14 -47 40 -26 2 -10 0.1 0.6 36 -16 38 19 2 -10 0.5 0.5 6 -11 47 7 2 -10 0.8 0.2 40 33 28 27 1 -10 0.6 0.3 -14 -37 30 0 2 -10 0.1 0.3 40 -5 -11 -47 1 -10 0.2 0.6 23 -14 5 -22 1 -10 0.3 0.5 29 -15 20 -20 1 -10 0.7 0.2 -30 -46 -2 -14 2 -10 0.5 0.4 -15 -16 21 -36 1 -10 0.8 0.4 29 10 -9 -18 1 -10 0.7 0.9 34 27 0 -7 1 -10 0.1 0.5 8 -11 24 -16 2 -10 0.1 0.1 11 -25 49 28 2 -10 0.3 0.2 45 -16 33 -5 2 -10 0.4 0.4 -4 -19 -10 -35 2 -10 0.6 0.1 48 -36 -19 -35 1 -10 0.8 0.1 -8 -20 41 -30 1 -10 0.6 0.5 -6 -26 16 11 2 -10 0.8 0.1 7 -10 27 -36 1 -10 0.6 0.5 -21 -36 48 -9 2 -10 0.6 0.1 39 -42 30 -25 1 -10 0.3 0.4 36 -29 43 -22 1 -10 0.3 0.4 45 12 -14 -19 1 -10 0.8 0.9 19 -23 6 -3 1 -10 0.7 0.6 45 -39 13 -50 1 -10 0.6 0.8 27 -33 28 22 2 -10 0.1 0.1 -17 -34 18 -7 2 -10 0.3 0.8 40 -4 33 31 2 -10 0.4 0.6 34 -19 0 -10 1 -10 0.8 0.5 0 -5 -7 -23 1 -10 0.6 0.2 18 7 45 -36 1 -10 0.5 0.6 20 -5 31 -3 2 -10 0.9 0.3 20 -19 35 -25 1 -10 0.6 0.7 39 -30 39 12 2 -10 0.2 0.6 25 23 35 13 1 -10 0.7 0.8 -7 -30 32 14 2 -10 0.7 0.1 20 -50 5 -39 1 -10 0.4 0.6 33 12 -21 -29 1 -10 0.6 0.9 44 37 30 -45 1 -10 0.2 0.9 50 -28 39 33 2 -10 0.8 0.2 35 27 5 -21 1 -10 0.7 0.9 1 -27 2 -5 2 -10 0.3 0.7 -37 -38 -1 -3 2 -11 0.6 0.8 -4 -11 18 1 2 -11 0.6 0.3 38 5 17 -11 1 -11 0.5 0.7 33 -9 -13 -17 1 -11 0.1 0.2 -26 -49 -15 -29 2 -11 0.6 0.6 -19 -24 11 -20 2 -11 0.5 0.8 25 14 11 -43 1 -11 0.7 0.5 22 -2 -24 -37 1 -11 0.4 0.8 -34 -35 23 -29 2 -11 0.8 0.3 -1 -6 14 1 2 -11 0.9 0.5 25 -32 15 -1 1 -11 0.8 0.1 16 -26 50 -35 1 -11 0.1 0.7 48 -4 42 -8 2 -11 0.1 0.1 8 -39 8 -34 1 -11 0.6 0.6 35 -11 45 -46 1 -11 0.7 0.8 49 -26 33 1 1 -11 0.6 0.3 50 12 49 -35 1 -11 0.5 0.8 37 -38 -11 -35 1 -11 0.5 0.6 30 -46 -14 -22 1 -11 0.6 0.5 48 -2 4 -26 1 -11 0.3 0.6 40 -4 48 10 2 -11 0.3 0.2 13 -8 13 4 2 -11 0.7 0.8 -19 -43 21 -5 2 -11 0.8 0.4 -31 -32 10 -20 2 -11 0.7 0.4 8 -33 -46 -49 1 -11 0.4 0.9 34 26 40 -33 1 -11 0.4 0.4 -12 -50 -4 -9 2 -11 0.1 0.5 50 -31 50 46 2 -11 0.9 0.8 -4 -34 4 -50 2 -11 0.3 0.5 23 19 32 -30 1 -11 0.6 0.2 -10 -31 0 -48 2 -11 0.7 0.8 30 -7 44 2 2 -11 0.4 0.1 49 27 7 -22 1 -11 0.4 0.9 18 -4 25 -16 2 -11 0.4 0.3 32 -33 46 -24 1 -11 0.9 0.8 24 -14 15 -39 1 -11 0.4 0.6 46 43 33 -36 1 -11 0.1 0.6 47 -32 40 35 2 -11 0.9 0.5 17 -31 -2 -12 1 -11 0.4 0.4 43 20 -13 -23 1 -11 0.6 0.2 17 -34 -24 -45 1 -11 0.6 0.1 9 8 47 19 1 -11 0.9 0.5 32 21 10 -17 1 -11 0.3 0.3 37 33 -22 -36 1 -11 0.8 0.3 4 -6 39 24 2 -11 0.1 0.9 14 -20 5 -15 2 -11 0.3 0.1 -4 -37 45 11 2 -11 0.2 0.5 -20 -43 44 -49 2 -11 0.6 0.5 47 -26 15 -39 1 -11 0.3 0.9 -17 -27 -11 -20 2 -11 0.1 0.9 35 -14 49 14 2 -11 0.1 0.1 13 -34 49 -13 2 -11 0.8 0.7 30 -43 21 -19 1 -11 0.3 0.4 -14 -16 -7 -47 2 -11 0.3 0.9 13 -36 37 -44 1 -11 0.5 0.8 -8 -27 20 -25 2 -11 0.8 0.2 22 5 1 -5 1 -11 0.7 0.1 27 -25 -13 -45 1 -11 0.4 0.5 50 18 -2 -7 1 -11 0.4 0.5 31 -9 17 8 2 -11 0.7 0.8 7 0 24 -38 2 -11 0.6 0.4 9 -39 -28 -34 1 -11 0.4 0.4 39 -42 -4 -13 1 -11 0.3 0.4 28 -43 16 -32 1 -11 0.7 0.5 50 -29 32 -40 1 -11 0.1 0.1 18 -33 40 26 2 -11 0.7 0.7 48 -27 10 -44 1 -11 0.9 0.8 47 3 -1 -46 1 -11 0.3 0.7 2 -33 -28 -48 1 -11 0.6 0.5 16 2 -11 -27 1 -11 0.6 0.1 3 -20 11 -43 1 -11 0.8 0.6 7 -27 -13 -24 1 -11 0.5 0.9 22 -13 5 -33 1 -11 0.3 0.6 40 21 39 -25 1 -11 0.4 0.3 13 -12 13 10 2 -11 0.1 0.6 12 -37 5 -5 2 -11 0.8 0.7 44 4 12 -12 1 -11 0.1 0.7 37 14 39 -4 1 -11 0.7 0.2 22 4 20 19 1 -11 0.4 0.7 46 42 6 -26 1 -11 0.5 0.2 10 -3 13 -21 1 -11 0.3 0.3 16 6 34 -12 1 -11 0.6 0.2 17 11 36 -31 1 -11 0.2 0.3 5 1 15 -15 1 -11 0.4 0.7 43 -10 40 -40 2 -11 0.8 0.8 -12 -23 1 -26 2 -11 0.1 0.3 42 41 48 -14 1 -11 0.1 0.8 -12 -22 -17 -30 2 -11 0.5 0.2 18 -29 39 -7 1 -11 0.8 0.2 24 17 30 -30 1 -11 0.6 0.3 -2 -4 15 -4 2 -11 0.8 0.1 2 -46 5 -38 1 -11 0.5 0.7 -34 -44 32 31 2 -11 0.9 0.2 18 -24 11 -22 1 -11 0.3 0.8 15 -17 -22 -31 1 -11 0.6 0.3 39 12 9 -40 1 -11 0.9 0.1 36 32 35 32 1 -11 0.1 0.1 41 4 44 -47 1 -11 0.7 0.2 18 -13 33 27 2 -11 0.4 0.9 46 -41 26 -10 2 -11 0.3 0.2 38 -43 -23 -41 1 -11 0.1 0.6 -23 -24 37 0 2 -11 0.6 0.1 33 -45 35 18 2 -11 0.5 0.5 5 -47 -16 -45 1 -11 0.8 0.9 33 -38 42 -15 2 -11 0.4 0.1 -43 -47 46 25 2 -11 0.5 0.1 -18 -32 33 -21 2 -11 0.1 0.6 19 15 -23 -40 1 -11 0.2 0.6 31 -31 29 2 2 -11 0.5 0.1 8 -47 42 15 2 -11 0.2 0.7 39 -20 -1 -47 1 -11 0.1 0.3 9 -17 -35 -36 1 -11 0.6 0.3 37 35 31 -11 1 -11 0.6 0.8 49 -49 12 -31 1 -11 0.9 0.2 9 -33 25 -23 1 -11 0.6 0.5 25 4 9 -31 1 -11 0.5 0.8 20 -7 45 18 2 -11 0.7 0.6 48 -1 34 23 2 -11 0.7 0.3 23 15 -1 -16 1 -11 0.4 0.3 -6 -43 21 13 2 -11 0.9 0.3 -32 -38 20 6 2 -11 0.2 0.3 -27 -32 30 -48 1 -11 0.7 0.9 46 6 31 27 1 -11 0.8 0.5 -16 -39 41 3 2 -11 0.8 0.6 22 10 50 -26 1 -11 0.8 0.2 28 -23 8 -40 1 -11 0.3 0.3 33 -33 18 -38 2 -11 0.7 0.7 34 -8 23 -23 1 -11 0.6 0.5 29 -12 29 -10 2 -11 0.2 0.1 -23 -25 41 -28 2 -11 0.2 0.4 -14 -44 50 44 2 -11 0.9 0.4 4 -11 9 -19 1 -11 0.3 0.4 -6 -44 50 -35 2 -11 0.2 0.2 15 -12 24 9 2 -11 0.3 0.4 39 32 30 -40 1 -11 0.8 0.5 42 29 14 -28 1 -11 0.7 0.7 -11 -21 39 -31 2 -11 0.9 0.8 40 -13 27 -1 2 -11 0.9 0.9 -30 -41 -27 -50 1 -11 0.8 0.9 36 -30 -11 -36 1 -11 0.6 0.9 -31 -43 17 -43 2 -11 0.4 0.1 28 -39 44 -11 1 -11 0.8 0.5 47 15 47 18 2 -11 0.8 0.6 -11 -46 45 7 2 -11 0.9 0.3 16 -45 10 4 1 -11 0.5 0.6 0 -31 14 -8 2 -11 0.7 0.2 -7 -25 12 -31 2 -11 0.6 0.2 14 -41 -29 -44 1 -11 0.6 0.7 21 -8 35 18 2 -11 0.4 0.3 2 -31 11 2 2 -11 0.7 0.5 44 -43 -49 -50 1 -11 0.4 0.8 3 0 -3 -12 2 -11 0.4 0.4 31 11 -36 -37 1 -11 0.5 0.6 11 -50 -20 -50 1 -11 0.3 0.3 49 -15 49 -7 2 -11 0.1 0.3 28 -46 25 -46 2 -11 0.9 0.7 19 -6 41 -42 1 -11 0.1 0.8 2 -25 24 -1 2 -11 0.4 0.7 44 -40 7 -19 2 -11 0.7 0.1 4 -4 47 38 2 -11 0.4 0.8 10 -2 26 -47 2 -11 0.2 0.5 23 -12 -12 -49 1 -11 0.2 0.8 36 23 20 7 1 -11 0.4 0.5 46 24 28 -44 1 -11 0.3 0.6 29 -41 43 28 2 -11 0.8 0.2 5 -47 43 21 2 -11 0.5 0.4 13 -18 20 -33 1 -11 0.7 0.5 37 -27 33 18 2 -11 0.6 0.2 48 -44 17 14 2 -11 0.5 0.5 15 11 47 18 2 -11 0.1 0.5 35 -16 39 23 2 -11 0.4 0.4 3 2 38 3 2 -11 0.4 0.7 31 -13 7 -20 2 -11 0.5 0.4 -10 -25 15 8 2 -11 0.2 0.5 -22 -45 20 -21 2 -11 0.9 0.1 25 -5 -11 -24 1 -11 0.4 0.2 37 -30 5 -16 1 -11 0.5 0.6 24 -37 37 -49 2 -11 0.5 0.4 35 -15 -35 -49 1 -11 0.6 0.7 43 -22 30 3 2 -11 0.3 0.3 32 -32 48 -23 1 -11 0.2 0.3 3 -1 -40 -43 1 -11 0.7 0.6 28 8 0 -21 1 -11 0.2 0.8 38 -36 0 -27 2 -11 0.7 0.9 -17 -39 -2 -29 2 -11 0.3 0.7 -22 -40 -17 -33 1 -11 0.9 0.1 2 -14 -17 -28 1 -11 0.4 0.5 -9 -33 -27 -35 2 -11 0.4 0.1 33 -39 43 32 2 -11 0.6 0.7 36 -48 -4 -25 1 -11 0.3 0.7 37 27 13 -3 1 -11 0.1 0.1 -1 -9 20 -31 1 -11 0.4 0.4 -9 -42 -32 -45 2 -11 0.7 0.1 2 -23 9 0 2 -11 0.7 0.4 11 -2 -36 -45 1 -11 0.1 0.3 -13 -23 44 -37 2 -11 0.4 0.3 -40 -48 4 -45 2 -11 0.7 0.5 19 -46 21 -43 1 -11 0.8 0.6 14 8 22 -24 2 -11 0.4 0.6 7 -1 41 -41 2 -11 0.3 0.2 46 43 38 -39 1 -11 0.4 0.9 -21 -40 49 -42 2 -11 0.3 0.6 7 -44 40 20 2 -11 0.1 0.3 50 -13 47 -30 1 -11 0.1 0.4 -7 -15 23 -30 2 -11 0.6 0.2 35 12 40 20 1 -11 0.5 0.3 -1 -17 -11 -32 2 -11 0.3 0.8 46 32 27 -1 2 -11 0.7 0.6 25 -36 -2 -16 1 -11 0.4 0.4 8 -20 40 37 2 -11 0.4 0.7 6 -14 -1 -40 2 -11 0.8 0.7 35 -45 37 -20 1 -11 0.5 0.9 -14 -23 -6 -19 2 -11 0.1 0.2 38 30 47 -25 1 -11 0.5 0.9 12 -10 25 -15 2 -11 0.8 0.3 -19 -24 31 9 2 -11 0.6 0.4 39 -8 -36 -44 1 -11 0.6 0.4 15 -33 23 21 2 -11 0.2 0.6 33 -1 -19 -30 2 -11 0.1 0.8 28 -9 2 -40 2 -11 0.3 0.9 37 -9 22 -34 1 -11 0.7 0.2 -2 -28 -4 -7 2 -11 0.4 0.6 43 -8 -31 -42 1 -11 0.1 0.6 42 -15 21 20 2 -11 0.3 0.4 46 38 44 21 2 -11 0.2 0.2 19 -6 29 -19 1 -11 0.2 0.6 20 -29 38 35 1 -11 0.3 0.3 -2 -16 41 36 2 -11 0.7 0.3 33 -5 -7 -41 1 -11 0.6 0.4 34 11 40 -12 1 -11 0.6 0.4 -32 -35 45 -36 2 -11 0.1 0.5 -9 -40 -29 -47 2 -11 0.2 0.3 -37 -39 -26 -40 1 -11 0.3 0.3 37 -15 6 -27 1 -11 0.5 0.8 49 46 -6 -17 1 -11 0.7 0.6 26 -48 26 2 2 -11 0.7 0.2 11 10 25 -48 1 -11 0.1 0.2 4 -43 -6 -28 1 -11 0.6 0.8 -9 -41 5 -36 2 -11 0.4 0.5 28 -27 27 -39 1 -11 0.3 0.2 -16 -24 -31 -41 1 -11 0.3 0.1 30 21 21 -4 1 -11 0.8 0.6 16 -15 17 11 2 -11 0.8 0.4 -3 -40 26 21 2 -11 0.1 0.7 8 -18 36 35 2 -11 0.4 0.2 22 -13 50 -13 1 -11 0.4 0.6 40 -36 39 -40 2 -11 0.8 0.5 -4 -30 -1 -32 2 -11 0.2 0.5 39 -19 -9 -25 1 -11 0.7 0.7 3 -46 40 9 2 -11 0.3 0.2 -36 -44 34 18 2 -11 0.4 0.2 -37 -41 31 2 2 -11 0.3 0.2 -37 -41 37 -37 2 -11 0.3 0.6 15 -33 31 30 2 -11 0.4 0.5 -19 -26 -28 -34 1 -11 0.2 0.9 33 -3 23 -41 2 -11 0.7 0.2 30 20 24 9 1 -11 0.4 0.6 0 -49 18 13 2 -11 0.7 0.3 45 -46 49 -9 1 -11 0.8 0.3 -9 -46 18 -37 2 -11 0.9 0.7 24 -23 20 -27 1 -11 0.2 0.2 39 0 21 9 2 -11 0.6 0.2 19 -45 45 10 2 -11 0.7 0.2 15 11 9 -8 1 -11 0.2 0.7 27 -15 19 -5 2 -11 0.6 0.5 18 -13 35 27 2 -11 0.2 0.2 35 -22 31 -45 1 -11 0.4 0.1 47 -1 18 -27 1 -11 0.7 0.3 3 -32 -23 -38 1 -11 0.7 0.2 8 -35 -25 -35 1 -11 0.3 0.8 33 29 5 -6 1 -11 0.2 0.8 -25 -32 24 2 2 -11 0.9 0.1 33 -4 25 21 1 -11 0.8 0.8 -7 -20 25 14 2 -11 0.4 0.5 -17 -26 28 26 2 -11 0.1 0.4 -1 -25 -6 -33 1 -11 0.5 0.5 34 -21 35 12 2 -11 0.3 0.1 35 25 6 -14 1 -11 0.8 0.3 31 -19 41 -7 2 -11 0.8 0.4 -38 -40 22 -11 2 -11 0.3 0.1 14 -5 -28 -49 1 -11 0.3 0.1 31 30 -16 -45 1 -11 0.5 0.2 38 -46 -23 -48 1 -11 0.7 0.6 45 -20 -8 -18 1 -11 0.7 0.3 49 25 -2 -33 1 -11 0.9 0.4 35 32 6 -25 1 -11 0.9 0.1 -1 -13 40 -6 2 -11 0.3 0.1 -7 -16 -18 -43 1 -11 0.6 0.6 -10 -49 34 9 2 -11 0.6 0.2 0 -41 -3 -49 1 -11 0.3 0.3 -11 -45 -29 -47 1 -11 0.1 0.3 -19 -35 9 3 2 -11 0.4 0.1 -23 -30 26 -38 2 -11 0.1 0.4 6 -6 28 -27 2 -11 0.8 0.3 -14 -36 39 9 2 -11 0.8 0.2 5 -23 45 -44 1 -11 0.6 0.2 36 -9 -15 -20 1 -11 0.9 0.2 1 -15 -4 -38 1 -11 0.1 0.6 19 4 39 -3 2 -11 0.7 0.6 36 28 46 -43 1 -11 0.4 0.1 19 -3 12 -33 1 -12 0.8 0.1 49 41 -1 -43 1 -12 0.5 0.1 32 -3 47 -25 1 -12 0.9 0.3 21 -26 33 21 2 -12 0.8 0.2 37 12 -20 -47 1 -12 0.4 0.4 47 -1 45 41 2 -12 0.7 0.9 0 -4 4 -42 1 -12 0.5 0.5 36 21 -26 -31 1 -12 0.4 0.9 23 -2 46 -14 2 -12 0.3 0.1 11 -14 26 24 2 -12 0.9 0.4 46 14 48 27 1 -12 0.6 0.6 28 27 0 -50 1 -12 0.7 0.8 46 -20 44 -33 2 -12 0.9 0.5 44 -3 28 -3 1 -12 0.9 0.7 -34 -43 -11 -16 2 -12 0.4 0.7 49 -30 1 -15 1 -12 0.5 0.7 11 7 31 -46 2 -12 0.5 0.2 21 9 -14 -24 1 -12 0.4 0.6 14 -17 46 -26 2 -12 0.7 0.3 -5 -40 18 3 2 -12 0.4 0.8 27 -1 50 46 2 -12 0.5 0.3 23 -4 46 -18 1 -12 0.1 0.1 25 -26 -9 -46 1 -12 0.8 0.4 23 -45 37 13 2 -12 0.2 0.5 37 12 46 -44 1 -12 0.5 0.9 2 -2 45 31 2 -12 0.5 0.4 43 -46 10 -21 1 -12 0.9 0.1 27 -4 39 -36 1 -12 0.5 0.2 32 -44 -26 -29 1 -12 0.1 0.6 42 -48 27 -17 2 -12 0.5 0.7 36 -28 18 13 2 -12 0.3 0.8 32 -50 48 -10 2 -12 0.4 0.4 50 5 32 27 2 -12 0.6 0.4 -31 -47 12 -24 2 -12 0.2 0.9 38 7 8 0 1 -12 0.9 0.3 35 28 50 0 1 -12 0.3 0.4 15 2 31 -20 1 -12 0.8 0.5 -30 -50 23 7 2 -12 0.8 0.1 38 -19 48 39 2 -12 0.3 0.5 45 17 -2 -19 1 -12 0.1 0.3 11 -5 -26 -29 1 -12 0.8 0.7 25 -13 45 0 2 -12 0.7 0.8 29 -48 20 -26 2 -12 0.7 0.8 8 -24 6 -30 1 -12 0.1 0.9 -17 -25 -11 -45 2 -12 0.7 0.5 45 -8 -18 -50 1 -12 0.3 0.8 24 -1 45 -48 2 -12 0.6 0.1 32 28 -13 -37 1 -12 0.6 0.4 -41 -49 -22 -44 2 -12 0.7 0.3 12 -15 36 23 2 -12 0.4 0.1 -29 -44 50 -39 2 -12 0.9 0.2 48 -8 -17 -47 1 -12 0.4 0.1 41 5 36 -16 1 -12 0.3 0.1 28 -28 4 -3 2 -12 0.4 0.1 28 12 24 -36 1 -12 0.4 0.7 26 -18 15 10 2 -12 0.1 0.3 44 -17 33 -7 2 -12 0.2 0.6 -26 -49 20 -40 2 -12 0.6 0.1 21 -28 21 -6 1 -12 0.2 0.7 41 8 -8 -46 1 -12 0.2 0.7 48 -22 17 -10 2 -12 0.9 0.2 42 30 29 11 1 -12 0.3 0.3 -11 -28 36 -37 2 -12 0.8 0.9 -4 -39 20 7 2 -12 0.6 0.5 10 -22 15 -50 1 -12 0.5 0.2 49 -38 27 -16 1 -12 0.3 0.3 -30 -48 27 -1 2 -12 0.7 0.1 -1 -49 30 14 2 -12 0.8 0.9 -22 -37 -42 -46 1 -12 0.9 0.4 12 -45 48 -42 1 -12 0.2 0.2 39 -10 0 -24 1 -12 0.4 0.5 12 -45 0 -4 2 -12 0.6 0.2 -16 -31 41 -4 2 -12 0.1 0.7 2 -12 -13 -41 1 -12 0.1 0.9 10 -33 -11 -13 2 -12 0.2 0.4 1 -8 -5 -31 1 -12 0.5 0.3 32 -10 48 -38 1 -12 0.4 0.5 13 -36 36 -22 2 -12 0.2 0.9 41 16 49 43 2 -12 0.3 0.9 -24 -50 8 -28 2 -12 0.5 0.2 50 14 47 42 2 -12 0.2 0.8 -9 -17 -43 -45 1 -12 0.5 0.6 41 11 37 -10 1 -12 0.6 0.6 15 -9 37 -27 2 -12 0.2 0.4 -37 -45 36 -33 2 -12 0.2 0.1 45 -4 21 -6 1 -12 0.2 0.9 22 -33 35 31 2 -12 0.8 0.2 36 -10 -7 -20 1 -12 0.6 0.5 13 -22 40 23 2 -12 0.8 0.8 -9 -30 47 -38 2 -12 0.1 0.2 30 -15 35 -42 1 -12 0.6 0.9 9 -15 -13 -17 1 -12 0.9 0.4 22 -27 3 -29 1 -12 0.9 0.6 17 -12 24 -21 1 -12 0.6 0.8 41 5 15 1 1 -12 0.6 0.1 35 -37 11 -42 1 -12 0.1 0.3 45 -2 37 -46 1 -12 0.7 0.3 9 6 33 17 2 -12 0.4 0.9 32 -50 6 -23 2 -12 0.8 0.7 1 -30 4 -45 1 -12 0.7 0.9 -18 -24 -23 -30 1 -12 0.7 0.2 -35 -40 13 5 2 -12 0.8 0.4 34 -24 29 5 1 -12 0.5 0.3 32 -4 24 -44 1 -12 0.6 0.1 -21 -30 -4 -9 2 -12 0.7 0.9 13 -15 11 -28 2 -12 0.3 0.8 -8 -38 -14 -28 2 -12 0.2 0.3 -37 -43 -17 -18 2 -12 0.7 0.5 29 -5 -24 -32 1 -12 0.3 0.3 -35 -37 2 -9 2 -12 0.5 0.8 42 11 -9 -44 1 -12 0.5 0.4 42 -20 -1 -11 1 -12 0.1 0.6 17 -5 -17 -40 1 -12 0.8 0.2 26 0 9 -21 1 -12 0.2 0.8 -20 -34 47 -24 2 -12 0.4 0.9 -10 -23 22 -36 2 -12 0.3 0.7 20 -8 3 -12 1 -12 0.4 0.8 0 -5 29 -15 2 -12 0.2 0.4 21 13 -10 -24 1 -12 0.3 0.6 4 -16 37 -47 2 -12 0.4 0.4 43 4 22 -35 1 -12 0.1 0.7 48 -12 21 -3 2 -12 0.5 0.4 4 -22 -24 -28 1 -12 0.8 0.9 44 -49 46 14 2 -12 0.4 0.2 -1 -10 36 -17 2 -12 0.2 0.6 -28 -32 27 -2 2 -12 0.1 0.4 26 5 -13 -33 1 -12 0.5 0.2 2 -23 0 -5 2 -12 0.2 0.9 38 5 -2 -6 1 -12 0.9 0.5 38 -43 37 18 1 -12 0.6 0.3 17 -19 -35 -47 1 -12 0.7 0.1 6 -17 40 0 2 -12 0.2 0.1 -7 -38 35 20 2 -12 0.4 0.4 30 9 -8 -48 1 -12 0.6 0.5 24 -20 16 -32 1 -12 0.4 0.2 -7 -11 12 -19 1 -12 0.3 0.6 -18 -48 -17 -45 2 -12 0.2 0.5 -6 -37 13 -47 2 -12 0.1 0.1 40 -27 3 -17 1 -12 0.6 0.4 8 5 28 27 2 -12 0.3 0.2 -7 -40 -31 -43 1 -12 0.7 0.5 -34 -42 50 35 2 -12 0.6 0.2 15 4 -13 -26 1 -12 0.6 0.7 -19 -32 47 46 2 -12 0.5 0.7 2 -25 -4 -37 2 -12 0.2 0.6 22 -38 46 -46 2 -12 0.4 0.1 24 -32 -19 -43 1 -12 0.5 0.7 -10 -45 -17 -31 2 -12 0.5 0.2 22 -28 15 7 2 -12 0.5 0.4 18 -24 26 -9 2 -12 0.1 0.2 14 5 1 -17 1 -12 0.4 0.9 41 37 21 -22 1 -12 0.7 0.2 26 -11 -2 -43 1 -12 0.9 0.6 9 -20 31 -4 2 -12 0.3 0.9 11 -10 22 1 2 -12 0.3 0.2 -22 -27 -28 -48 1 -12 0.6 0.9 -35 -43 11 8 2 -12 0.2 0.3 -30 -32 43 28 2 -12 0.9 0.7 24 23 33 2 1 -12 0.9 0.4 -28 -36 35 28 2 -12 0.2 0.3 25 -10 -7 -29 1 -12 0.7 0.1 46 -25 9 3 1 -12 0.5 0.7 26 -30 -11 -29 1 -12 0.7 0.6 18 -23 26 -43 1 -12 0.6 0.2 30 16 -8 -25 1 -12 0.7 0.3 44 -30 43 -15 1 -12 0.2 0.7 13 -32 35 -29 2 -12 0.8 0.1 -20 -32 14 -10 2 -12 0.3 0.9 37 -15 -20 -21 1 -12 0.7 0.3 -10 -23 40 -11 2 -12 0.4 0.5 8 -40 0 -11 2 -12 0.8 0.2 -48 -49 21 13 2 -12 0.2 0.5 42 -22 40 34 2 -12 0.3 0.2 -35 -42 43 -28 2 -12 0.3 0.1 50 3 49 -22 1 -12 0.2 0.2 16 -23 18 -47 1 -12 0.4 0.3 -1 -28 29 -13 2 -12 0.9 0.8 22 3 10 0 1 -12 0.8 0.8 46 -30 -5 -22 1 -12 0.9 0.8 10 -4 50 45 2 -12 0.2 0.2 12 -1 -30 -35 1 -12 0.2 0.4 3 -46 27 18 2 -12 0.4 0.7 1 -5 28 -14 2 -12 0.7 0.8 31 -48 12 7 2 -12 0.8 0.8 10 -12 19 -48 1 -12 0.1 0.3 29 -13 -1 -22 1 -12 0.3 0.7 50 -45 -11 -33 1 -12 0.2 0.3 23 -2 -15 -35 1 -12 0.6 0.4 -2 -34 -12 -28 1 -12 0.2 0.5 -39 -43 30 -45 2 -12 0.4 0.9 38 -18 29 1 2 -12 0.4 0.3 -25 -39 29 -30 2 -12 0.7 0.3 31 -10 16 -32 1 -12 0.3 0.5 20 -39 -10 -17 2 -12 0.4 0.3 2 -48 12 -16 2 -12 0.6 0.8 -17 -34 15 -33 2 -12 0.1 0.1 23 -8 20 6 2 -12 0.2 0.2 -29 -39 49 3 2 -12 0.2 0.3 33 9 30 -6 1 -12 0.5 0.3 36 6 45 37 2 -12 0.9 0.3 30 -24 2 -22 1 -12 0.8 0.6 39 14 28 23 1 -12 0.7 0.2 34 27 37 14 1 -12 0.6 0.7 -35 -39 29 -38 2 -12 0.5 0.5 50 -21 -26 -44 1 -12 0.4 0.9 37 -36 39 -35 2 -12 0.4 0.1 17 -39 -22 -25 1 -12 0.3 0.4 50 19 -5 -33 1 -12 0.1 0.7 13 -39 13 -6 2 -12 0.5 0.1 45 -28 -30 -33 1 -12 0.4 0.1 -45 -49 18 -25 2 -12 0.1 0.8 21 -7 25 -37 2 -12 0.9 0.1 -42 -43 48 -13 2 -12 0.3 0.4 46 -21 33 -13 2 -12 0.5 0.7 6 -11 40 -5 2 -12 0.3 0.5 31 -34 38 -17 2 -12 0.4 0.5 -6 -10 36 -30 2 -12 0.7 0.3 41 -36 -28 -31 1 -12 0.5 0.2 -9 -20 21 6 2 -12 0.9 0.3 -9 -24 45 19 2 -12 0.9 0.3 41 31 7 -5 1 -12 0.9 0.3 5 -37 19 -36 1 -12 0.7 0.6 47 -35 27 20 2 -12 0.3 0.8 36 8 49 -10 2 -12 0.6 0.8 21 2 48 2 2 -12 0.3 0.7 -17 -29 28 -36 2 -12 0.3 0.9 50 1 4 3 1 -12 0.5 0.8 -4 -31 28 -7 2 -12 0.9 0.8 48 16 45 -5 1 -12 0.2 0.3 -22 -33 36 -46 2 -12 0.7 0.2 49 12 -10 -46 1 -12 0.4 0.6 24 23 3 -1 1 -12 0.9 0.8 -3 -37 -8 -23 2 -12 0.5 0.6 32 6 27 -21 1 -12 0.1 0.2 49 27 10 -13 1 -12 0.1 0.9 3 -1 -5 -37 1 -12 0.6 0.1 37 -39 36 -48 1 -12 0.8 0.3 -7 -9 14 -6 2 -12 0.4 0.4 32 17 -25 -31 1 -12 0.8 0.7 39 -7 43 -1 1 -12 0.5 0.5 -12 -46 37 -18 2 -12 0.1 0.6 16 -32 30 -31 2 -12 0.8 0.2 4 2 46 -28 1 -12 0.1 0.8 27 -49 0 -14 2 -12 0.2 0.4 19 -39 5 -22 2 -12 0.9 0.2 9 -38 12 -46 1 -12 0.1 0.5 11 -41 13 -6 2 -12 0.5 0.2 41 40 -2 -18 1 -12 0.2 0.9 50 39 -8 -12 1 -12 0.3 0.8 3 -46 -23 -47 2 -12 0.1 0.8 1 -20 28 2 2 -12 0.7 0.8 -11 -18 43 41 2 -12 0.7 0.3 31 -38 -29 -37 1 -12 0.8 0.9 28 -34 38 29 2 -12 0.2 0.8 39 -38 29 -27 2 -12 0.1 0.7 4 2 26 -11 2 -12 0.3 0.1 48 -36 -18 -34 1 -12 0.5 0.7 -4 -13 -19 -37 1 -12 0.7 0.1 49 -39 39 1 1 -12 0.4 0.8 29 20 -24 -28 1 -12 0.7 0.8 16 -31 14 -12 2 -12 0.3 0.2 49 -8 29 -45 1 -12 0.1 0.3 40 -3 18 -9 1 -12 0.8 0.6 -7 -9 3 -27 2 -12 0.2 0.2 14 -20 24 -28 1 -12 0.8 0.5 9 -27 0 -39 1 -12 0.4 0.8 46 -4 47 -49 2 -12 0.8 0.1 -13 -24 32 -26 1 -12 0.8 0.8 -28 -36 22 12 2 -12 0.4 0.9 23 -47 7 -50 2 -12 0.1 0.7 0 -11 6 -2 2 -12 0.3 0.4 29 -9 20 -27 1 -12 0.8 0.3 -9 -33 12 8 2 -12 0.1 0.8 20 -30 29 -38 2 -12 0.3 0.6 16 -4 18 6 2 -12 0.1 0.6 -42 -44 8 -14 2 -12 0.2 0.8 38 -41 28 -39 2 -12 0.8 0.2 49 21 44 25 1 -12 0.1 0.1 -8 -23 45 -8 2 -12 0.5 0.4 12 -31 36 3 2 -12 0.4 0.9 -20 -27 18 -1 2 -12 0.5 0.2 -2 -22 38 -23 2 -12 0.9 0.4 49 -34 43 24 1 -12 0.8 0.6 41 -38 -20 -33 1 -12 0.1 0.3 35 -27 34 3 2 -12 0.2 0.3 -10 -50 44 -28 2 -12 0.1 0.2 33 27 -1 -25 1 -12 0.9 0.2 -14 -37 31 21 2 -12 0.2 0.3 -23 -32 7 -43 2 -12 0.2 0.5 38 -7 0 -9 1 -12 0.7 0.8 10 -44 41 36 2 -12 0.9 0.3 19 -5 15 -31 1 -12 0.4 0.2 41 33 -32 -44 1 -12 0.6 0.7 18 -44 13 -44 2 -12 0.8 0.3 48 -44 -9 -33 1 -12 0.4 0.9 -11 -20 25 -36 2 -12 0.3 0.4 3 -47 36 29 2 -12 0.3 0.2 23 -49 -6 -49 1 -12 0.4 0.4 5 -37 -14 -20 1 -12 0.3 0.8 -11 -24 19 -36 2 -12 0.5 0.6 2 -34 41 -30 2 -13 0.9 0.1 35 -15 49 -44 1 -13 0.5 0.8 33 26 23 -21 1 -13 0.5 0.3 27 -26 44 3 2 -13 0.5 0.8 26 -15 29 -11 2 -13 0.7 0.1 -13 -24 42 15 2 -13 0.8 0.4 45 -22 44 -5 1 -13 0.2 0.6 50 -23 12 -35 2 -13 0.1 0.4 33 12 6 -32 1 -13 0.2 0.9 14 2 0 -6 1 -13 0.2 0.7 21 -8 15 -31 1 -13 0.8 0.8 43 23 6 -24 1 -13 0.2 0.9 -5 -38 26 5 2 -13 0.4 0.1 47 -41 15 10 2 -13 0.9 0.7 24 13 35 28 2 -13 0.4 0.3 22 11 43 -27 1 -13 0.5 0.5 45 33 14 -44 1 -13 0.8 0.9 -42 -47 27 -33 2 -13 0.8 0.7 0 -32 21 -1 2 -13 0.5 0.6 37 26 37 -4 1 -13 0.8 0.2 -12 -47 -37 -39 1 -13 0.9 0.1 49 -49 13 -21 1 -13 0.9 0.6 -3 -28 34 -16 2 -13 0.8 0.1 23 -9 29 8 1 -13 0.4 0.3 -29 -32 -3 -44 2 -13 0.8 0.4 -18 -22 38 -39 2 -13 0.5 0.6 35 -6 13 -15 1 -13 0.8 0.3 25 -8 46 36 2 -13 0.4 0.7 43 -49 20 17 2 -13 0.5 0.6 18 -13 45 -17 2 -13 0.8 0.1 40 -25 -3 -15 1 -13 0.3 0.1 49 23 46 11 1 -13 0.3 0.5 -23 -33 -37 -38 1 -13 0.1 0.5 45 17 4 -31 1 -13 0.5 0.7 16 15 18 1 1 -13 0.2 0.6 28 -10 25 18 2 -13 0.1 0.7 41 -12 -33 -37 1 -13 0.2 0.1 17 -7 42 -5 2 -13 0.6 0.8 41 -21 18 11 2 -13 0.3 0.1 17 -1 -28 -35 1 -13 0.7 0.3 6 -1 29 -26 1 -13 0.3 0.1 26 17 49 4 1 -13 0.1 0.5 28 27 -18 -42 1 -13 0.4 0.1 -20 -22 -8 -28 1 -13 0.3 0.8 46 27 20 -50 1 -13 0.8 0.1 -30 -34 36 -36 1 -13 0.8 0.7 18 17 47 39 2 -13 0.7 0.3 -39 -42 -15 -37 2 -13 0.6 0.3 42 8 20 6 1 -13 0.1 0.3 24 9 31 12 2 -13 0.5 0.7 -10 -48 22 3 2 -13 0.5 0.1 47 -11 34 -23 1 -13 0.2 0.1 6 4 1 -2 1 -13 0.8 0.3 31 -18 11 2 1 -13 0.5 0.2 35 -3 24 -12 1 -13 0.3 0.9 -17 -18 10 -24 2 -13 0.3 0.2 -20 -37 36 7 2 -13 0.4 0.3 19 16 41 9 2 -13 0.6 0.5 18 -10 14 -1 1 -13 0.3 0.1 -3 -11 36 -7 2 -13 0.1 0.6 48 -5 49 11 2 -13 0.2 0.1 30 -18 21 -12 1 -13 0.4 0.4 16 -12 19 -35 1 -13 0.8 0.8 46 21 38 -23 1 -13 0.4 0.4 -17 -47 19 6 2 -13 0.2 0.5 17 -4 -9 -15 1 -13 0.6 0.8 3 -36 39 -19 2 -13 0.1 0.3 45 -27 -42 -43 1 -13 0.1 0.3 42 -27 -6 -29 1 -13 0.7 0.8 10 -29 14 -34 2 -13 0.9 0.8 29 5 -28 -34 1 -13 0.8 0.7 -22 -45 39 -20 2 -13 0.1 0.3 2 1 4 -20 1 -13 0.4 0.6 12 -32 -14 -23 1 -13 0.9 0.8 43 -2 44 6 1 -13 0.1 0.8 -41 -49 39 -2 2 -13 0.4 0.6 37 6 47 24 2 -13 0.7 0.3 -1 -43 -26 -42 1 -13 0.8 0.8 48 34 -41 -49 1 -13 0.4 0.3 43 -21 -28 -35 1 -13 0.1 0.4 -31 -44 41 23 2 -13 0.2 0.4 37 -47 39 3 2 -13 0.4 0.4 -10 -17 -1 -27 2 -13 0.7 0.7 21 -25 29 0 2 -13 0.6 0.3 30 17 29 -37 1 -13 0.1 0.8 7 -31 2 -11 2 -13 0.3 0.8 -21 -36 -9 -47 2 -13 0.8 0.2 12 -38 13 -23 1 -13 0.4 0.1 42 -17 39 -9 1 -13 0.9 0.7 43 -49 13 -31 1 -13 0.9 0.9 12 -30 -36 -48 1 -13 0.9 0.7 24 -47 -30 -45 1 -13 0.5 0.2 -1 -4 44 -8 2 -13 0.8 0.3 -16 -39 39 -36 2 -13 0.7 0.8 35 2 26 -20 1 -13 0.1 0.5 48 32 27 -14 1 -13 0.5 0.6 -3 -17 49 1 2 -13 0.9 0.6 22 -18 28 -38 1 -13 0.4 0.6 32 -13 7 -7 1 -13 0.8 0.3 32 5 26 24 2 -13 0.2 0.8 4 -50 20 -8 2 -13 0.8 0.2 43 -34 2 0 1 -13 0.9 0.6 48 -26 22 -32 1 -13 0.4 0.4 0 -22 -5 -42 1 -13 0.8 0.9 17 -32 12 10 2 -13 0.1 0.3 42 34 43 24 1 -13 0.7 0.4 39 -43 26 -22 1 -13 0.6 0.2 47 3 -35 -37 1 -13 0.9 0.1 30 -16 49 22 2 -13 0.2 0.3 -7 -20 8 -9 2 -13 0.6 0.4 46 -50 29 25 2 -13 0.3 0.4 40 -10 44 -48 1 -13 0.1 0.1 -26 -39 7 -29 2 -13 0.5 0.3 48 -21 -14 -41 1 -13 0.5 0.4 -26 -40 13 -46 2 -13 0.7 0.5 4 -47 -9 -34 1 -13 0.4 0.7 41 -12 -9 -43 1 -13 0.6 0.4 38 3 24 20 2 -13 0.7 0.6 37 -40 30 -27 1 -13 0.4 0.9 30 -18 13 -41 1 -13 0.2 0.1 43 27 20 -12 1 -13 0.8 0.5 11 -26 33 12 2 -13 0.7 0.1 42 -32 30 19 1 -13 0.8 0.5 49 -36 46 11 2 -13 0.8 0.7 4 -17 9 -11 2 -13 0.1 0.3 36 -16 34 -50 2 -13 0.1 0.5 40 -41 -48 -49 1 -13 0.2 0.9 2 1 40 -42 2 -13 0.8 0.7 15 -14 -25 -39 1 -13 0.7 0.5 28 -37 17 -48 1 -13 0.3 0.8 29 28 -22 -43 1 -13 0.7 0.5 36 -29 23 -13 1 -13 0.4 0.6 38 -22 20 -50 1 -13 0.5 0.4 -22 -47 14 -8 2 -13 0.6 0.4 5 -44 16 -24 2 -13 0.9 0.3 17 -23 24 -28 1 -13 0.5 0.5 36 27 -41 -49 1 -13 0.6 0.3 -37 -45 15 -16 2 -13 0.6 0.6 0 -46 33 -7 2 -13 0.8 0.9 14 -49 44 43 2 -13 0.5 0.2 45 -22 0 -34 1 -13 0.9 0.5 30 -45 -17 -42 1 -13 0.3 0.5 34 13 21 -8 1 -13 0.7 0.9 31 14 -4 -22 1 -13 0.6 0.7 9 -8 -27 -49 1 -13 0.7 0.1 50 39 -17 -48 1 -13 0.7 0.2 16 -24 20 -46 1 -13 0.4 0.7 50 -20 3 -8 1 -13 0.8 0.2 47 41 -10 -49 1 -13 0.6 0.6 42 -34 19 -6 1 -13 0.4 0.8 33 -23 28 19 2 -13 0.4 0.5 1 -48 32 17 2 -13 0.4 0.7 31 2 43 14 2 -13 0.5 0.8 -35 -46 -17 -48 2 -13 0.1 0.7 13 0 36 -7 2 -13 0.6 0.6 -22 -49 35 -22 2 -13 0.3 0.5 32 -22 46 -8 2 -13 0.1 0.8 24 -16 23 -7 2 -13 0.4 0.1 -25 -29 47 5 2 -13 0.7 0.8 38 -45 36 28 2 -13 0.5 0.7 37 -26 -8 -33 1 -13 0.2 0.6 34 -35 -6 -17 2 -13 0.8 0.9 21 -21 -19 -24 1 -13 0.7 0.1 37 -42 -32 -38 1 -13 0.3 0.7 14 -15 36 -12 2 -13 0.9 0.8 44 -19 4 -16 1 -13 0.2 0.2 34 -30 19 -36 1 -13 0.1 0.9 44 19 22 4 2 -13 0.3 0.1 -27 -28 -10 -20 2 -13 0.8 0.9 -2 -27 27 -47 2 -13 0.8 0.1 -9 -31 -4 -22 1 -13 0.3 0.3 16 -31 -6 -43 1 -13 0.5 0.4 46 12 -11 -43 1 -13 0.2 0.3 -12 -50 38 3 2 -13 0.6 0.5 17 -25 4 -27 1 -13 0.4 0.4 -8 -44 -2 -49 1 -13 0.4 0.9 29 4 8 -23 1 -13 0.6 0.3 10 -35 13 -26 1 -13 0.9 0.9 9 -35 -5 -16 1 -13 0.1 0.2 33 3 35 -32 1 -13 0.4 0.1 26 24 0 -7 1 -13 0.7 0.2 9 4 20 15 2 -13 0.2 0.2 35 -33 -6 -18 1 -13 0.2 0.5 41 -41 8 -10 2 -13 0.4 0.8 20 -40 23 -16 2 -13 0.3 0.3 48 9 16 -35 1 -13 0.7 0.9 50 -6 47 -17 2 -13 0.5 0.9 46 -11 -5 -9 1 -13 0.7 0.6 41 -12 0 -32 1 -13 0.9 0.1 -12 -32 -7 -15 1 -13 0.8 0.2 37 29 -5 -16 1 -13 0.4 0.8 36 7 -25 -44 1 -13 0.2 0.9 -4 -15 -22 -37 1 -13 0.8 0.1 -31 -34 33 -5 2 -13 0.1 0.3 9 -17 -33 -41 1 -13 0.4 0.1 8 -2 48 -8 1 -13 0.3 0.4 47 -26 48 -40 2 -13 0.5 0.8 23 -26 34 -19 2 -13 0.6 0.5 11 -47 47 43 2 -13 0.6 0.5 47 37 25 -2 1 -13 0.5 0.1 28 24 33 -47 1 -13 0.8 0.4 17 3 22 6 2 -13 0.1 0.8 -46 -50 -13 -33 2 -13 0.4 0.5 34 24 22 16 1 -13 0.6 0.9 18 -45 -4 -40 1 -13 0.7 0.8 16 11 4 -25 1 -13 0.3 0.2 -20 -37 4 -43 2 -13 0.5 0.9 -1 -10 44 32 2 -13 0.4 0.8 43 -36 27 -38 2 -13 0.3 0.3 4 -42 41 -1 2 -13 0.7 0.8 47 -24 -40 -41 1 -13 0.6 0.4 4 -2 48 25 2 -13 0.2 0.3 33 -11 35 26 2 -13 0.7 0.1 8 0 13 3 1 -13 0.4 0.5 -5 -26 43 34 2 -13 0.8 0.7 -18 -50 -3 -32 2 -13 0.2 0.9 34 -2 29 14 2 -13 0.3 0.6 1 -27 18 6 2 -13 0.8 0.4 40 2 12 -14 1 -13 0.5 0.9 -14 -37 -18 -34 1 -13 0.9 0.2 6 -11 42 -50 1 -13 0.5 0.4 47 38 34 3 1 -13 0.9 0.7 21 -31 5 3 1 -13 0.9 0.3 19 8 23 -1 1 -13 0.9 0.1 -1 -14 0 -23 1 -13 0.2 0.3 -6 -15 47 -40 2 -13 0.7 0.8 40 -26 44 -33 2 -13 0.6 0.8 36 -26 -4 -10 1 -13 0.7 0.3 25 -17 -8 -42 1 -13 0.3 0.6 31 -25 14 7 2 -13 0.6 0.7 17 -25 28 4 2 -13 0.3 0.9 34 -8 26 8 2 -13 0.7 0.5 16 -45 35 29 2 -13 0.2 0.7 28 -20 27 -29 2 -13 0.6 0.4 45 -31 -11 -33 1 -13 0.9 0.6 31 -18 46 24 2 -13 0.3 0.6 12 -42 3 -5 2 -13 0.5 0.6 6 -12 23 -26 2 -13 0.8 0.5 12 -21 50 -2 2 -13 0.9 0.8 45 23 -22 -29 1 -13 0.3 0.6 18 16 16 -37 1 -13 0.2 0.5 43 -21 29 -29 1 -13 0.5 0.2 38 27 -35 -41 1 -13 0.8 0.7 46 5 49 -42 1 -13 0.9 0.3 6 -46 7 -50 1 -13 0.2 0.8 -11 -12 -19 -40 1 -13 0.6 0.2 8 -44 -10 -43 1 -13 0.3 0.7 41 -6 40 23 2 -13 0.7 0.8 10 -25 -32 -50 1 -13 0.5 0.3 49 13 37 -31 1 -13 0.8 0.5 39 19 50 -14 1 -13 0.4 0.6 29 24 44 13 2 -13 0.7 0.4 -1 -41 46 -23 2 -13 0.6 0.4 44 28 -3 -17 1 -13 0.3 0.8 49 -40 50 -41 2 -13 0.8 0.8 48 -37 26 21 1 -13 0.3 0.7 -17 -19 4 -30 2 -13 0.5 0.7 36 2 26 -2 2 -13 0.4 0.1 12 -42 -15 -25 1 -13 0.1 0.4 32 -29 45 -17 2 -13 0.4 0.2 40 4 33 -40 1 -13 0.5 0.1 33 -8 -32 -45 1 -13 0.7 0.6 8 -42 -3 -5 1 -13 0.4 0.6 -29 -45 46 15 2 -13 0.7 0.3 18 -14 40 -14 1 -13 0.1 0.6 -31 -44 -34 -36 2 -13 0.7 0.3 20 -14 7 1 1 -13 0.2 0.3 16 -38 50 2 2 -13 0.5 0.4 8 -39 -4 -49 1 -13 0.7 0.2 18 3 -6 -30 1 -13 0.3 0.8 43 -4 -2 -40 1 -13 0.6 0.6 16 -17 34 33 2 -13 0.7 0.6 -17 -32 17 10 2 -13 0.2 0.8 40 -40 48 18 2 -13 0.9 0.5 41 35 50 -31 1 -13 0.9 0.3 20 13 23 -50 1 -13 0.8 0.7 33 -2 47 -14 2 -13 0.2 0.7 32 -49 34 11 2 -13 0.6 0.9 40 8 44 5 2 -13 0.8 0.5 50 48 21 3 1 -13 0.3 0.3 -46 -49 28 -39 2 -13 0.7 0.6 -12 -21 33 3 2 -13 0.5 0.8 -29 -35 23 -34 2 -13 0.8 0.1 9 -34 36 34 2 -13 0.7 0.8 6 -21 40 36 2 -13 0.5 0.7 11 -35 19 10 2 -13 0.4 0.6 14 6 32 -3 1 -13 0.4 0.8 45 34 -30 -31 1 -13 0.5 0.6 20 5 43 -6 2 -13 0.4 0.7 -12 -38 22 15 2 -13 0.3 0.3 10 -17 16 -47 1 -13 0.6 0.5 44 26 35 29 1 -13 0.6 0.6 6 0 -2 -47 1 -13 0.5 0.8 38 33 15 -44 1 -13 0.5 0.1 7 -31 27 -6 2 -13 0.6 0.8 10 2 24 -18 2 -13 0.2 0.5 36 20 40 -35 1 -13 0.7 0.9 18 -14 -20 -24 1 -13 0.2 0.9 -36 -37 7 -50 2 -13 0.5 0.7 20 -25 15 -5 2 -13 0.9 0.7 47 40 -26 -29 1 -14 0.6 0.5 49 -49 -13 -37 1 -14 0.1 0.7 22 7 50 3 2 -14 0.3 0.9 -32 -39 21 13 2 -14 0.2 0.1 6 -44 -7 -33 1 -14 0.6 0.1 6 -34 2 -15 2 -14 0.6 0.9 43 -25 -35 -46 1 -14 0.2 0.6 33 -16 31 -28 2 -14 0.5 0.9 9 -35 31 -20 2 -14 0.3 0.5 34 -40 -20 -21 1 -14 0.4 0.1 49 -27 19 -41 1 -14 0.6 0.4 26 22 34 28 2 -14 0.6 0.2 18 -32 1 -27 1 -14 0.7 0.6 32 3 44 24 2 -14 0.2 0.9 44 37 42 13 2 -14 0.1 0.4 -35 -38 40 -29 2 -14 0.2 0.8 39 -44 33 11 2 -14 0.7 0.9 19 -2 21 -7 1 -14 0.1 0.3 -30 -32 23 -42 2 -14 0.8 0.7 3 -23 -5 -37 1 -14 0.2 0.5 24 -21 -15 -44 1 -14 0.4 0.2 4 -11 48 -29 2 -14 0.8 0.3 25 -30 17 -48 1 -14 0.4 0.6 22 18 14 -10 1 -14 0.4 0.2 39 8 48 -24 2 -14 0.3 0.6 8 -42 36 -35 2 -14 0.1 0.6 -3 -11 -6 -15 2 -14 0.5 0.1 5 -4 -2 -23 1 -14 0.3 0.7 27 -4 25 4 1 -14 0.8 0.8 -17 -29 -4 -5 1 -14 0.9 0.6 -2 -43 -22 -40 1 -14 0.9 0.5 23 -19 -5 -13 1 -14 0.5 0.7 -7 -8 30 -8 2 -14 0.8 0.7 37 35 27 -15 1 -14 0.1 0.1 50 22 29 9 2 -14 0.3 0.2 19 -11 0 -9 1 -14 0.1 0.5 36 -3 48 -16 2 -14 0.3 0.8 13 -39 43 12 2 -14 0.6 0.6 30 -14 11 -1 2 -14 0.3 0.8 28 -48 -4 -11 1 -14 0.2 0.7 33 -42 -18 -38 1 -14 0.2 0.9 25 -36 -15 -25 2 -14 0.5 0.6 -1 -30 41 -17 2 -14 0.7 0.5 -6 -40 13 -49 2 -14 0.8 0.3 21 4 -11 -47 1 -14 0.1 0.1 -10 -34 30 -28 2 -14 0.7 0.3 12 -27 15 -40 1 -14 0.5 0.2 -35 -42 13 -5 2 -14 0.8 0.8 18 5 21 -13 2 -14 0.1 0.5 22 6 41 -8 1 -14 0.4 0.8 45 -13 -16 -49 1 -14 0.5 0.1 9 2 -37 -49 1 -14 0.6 0.8 -4 -24 -14 -43 1 -14 0.4 0.9 31 -31 -7 -28 1 -14 0.3 0.6 38 -16 20 -27 1 -14 0.4 0.3 -11 -48 -1 -18 2 -14 0.5 0.5 -17 -38 38 -43 2 -14 0.7 0.8 20 -31 32 -21 2 -14 0.3 0.3 20 -8 35 -15 2 -14 0.8 0.3 -11 -34 17 -17 2 -14 0.6 0.4 -16 -20 26 -49 1 -14 0.8 0.1 30 -29 46 42 2 -14 0.3 0.8 40 14 24 14 2 -14 0.1 0.1 40 -37 -3 -44 1 -14 0.7 0.5 50 33 42 37 1 -14 0.7 0.5 16 6 20 -6 1 -14 0.5 0.4 13 -2 19 -37 2 -14 0.5 0.9 41 16 41 -1 2 -14 0.2 0.1 16 2 38 22 2 -14 0.5 0.7 29 -33 31 -4 2 -14 0.6 0.5 42 -30 12 -33 1 -14 0.5 0.4 31 -14 -3 -10 1 -14 0.3 0.4 5 -36 35 -24 1 -14 0.5 0.2 31 -18 50 -24 1 -14 0.5 0.7 2 -38 44 40 2 -14 0.3 0.7 27 2 -2 -17 1 -14 0.1 0.4 26 3 26 -19 1 -14 0.2 0.8 34 31 -7 -48 1 -14 0.2 0.4 35 -25 -21 -38 1 -14 0.6 0.8 -7 -49 -26 -35 1 -14 0.3 0.8 33 -45 21 9 2 -14 0.3 0.5 -27 -43 36 -35 2 -14 0.1 0.3 27 6 -22 -44 1 -14 0.7 0.2 4 -35 21 -7 2 -14 0.2 0.3 40 -32 4 -14 1 -14 0.3 0.6 45 8 9 -7 1 -14 0.1 0.8 39 -13 37 30 2 -14 0.9 0.3 -44 -49 39 -43 2 -14 0.5 0.4 0 -9 34 1 2 -14 0.2 0.1 45 -26 48 -23 1 -14 0.2 0.2 20 -22 -18 -50 1 -14 0.6 0.3 19 -41 22 -47 1 -14 0.6 0.7 26 -10 27 1 2 -14 0.9 0.1 25 -17 32 21 2 -14 0.3 0.8 26 -41 4 -16 2 -14 0.7 0.2 27 -42 -16 -45 1 -14 0.4 0.5 -2 -3 19 -26 2 -14 0.7 0.6 33 21 49 31 1 -14 0.3 0.6 18 -25 -12 -13 1 -14 0.6 0.4 13 -10 28 8 2 -14 0.8 0.9 32 -27 1 -7 1 -14 0.8 0.2 32 -6 22 -37 1 -14 0.1 0.5 -15 -31 42 -35 2 -14 0.7 0.7 -20 -33 24 -14 2 -14 0.2 0.7 46 -18 30 -14 2 -14 0.1 0.5 46 -33 39 -23 2 -14 0.8 0.6 18 -28 -17 -35 1 -14 0.5 0.6 35 -26 48 8 2 -14 0.1 0.2 -22 -24 35 7 2 -14 0.6 0.6 -1 -21 27 -19 1 -14 0.3 0.6 33 -6 39 10 2 -14 0.3 0.9 -27 -32 20 -47 2 -14 0.4 0.7 -16 -43 -15 -45 1 -14 0.2 0.9 16 -40 12 10 2 -14 0.8 0.1 -13 -43 1 -47 1 -14 0.3 0.8 -23 -39 47 13 2 -14 0.2 0.2 37 -48 43 12 2 -14 0.7 0.9 -6 -48 -33 -45 1 -14 0.7 0.8 43 -19 -1 -38 1 -14 0.3 0.8 -5 -7 -5 -26 2 -14 0.7 0.7 31 -18 11 -49 1 -14 0.1 0.1 6 -13 21 -44 1 -14 0.5 0.4 40 9 31 5 1 -14 0.9 0.9 5 -46 -25 -44 1 -14 0.7 0.8 44 39 3 -31 1 -14 0.9 0.9 3 -36 24 -1 2 -14 0.7 0.3 -3 -45 23 -36 2 -14 0.4 0.7 21 -16 2 -10 1 -14 0.6 0.6 6 -3 42 19 2 -14 0.8 0.4 34 -28 35 33 1 -14 0.5 0.7 11 2 22 -42 2 -14 0.8 0.6 22 -23 43 -45 1 -14 0.4 0.9 3 -21 47 -1 2 -14 0.4 0.7 12 4 23 -39 1 -14 0.7 0.5 40 -5 24 -30 1 -14 0.1 0.9 20 9 -2 -42 1 -14 0.1 0.1 40 21 44 -45 1 -14 0.4 0.1 21 -31 5 -25 1 -14 0.2 0.2 -24 -34 25 23 2 -14 0.9 0.3 23 -21 1 -25 1 -14 0.1 0.3 -1 -47 8 7 2 -14 0.9 0.1 10 -30 43 -39 2 -14 0.1 0.8 13 0 21 -3 2 -14 0.5 0.3 37 17 17 -36 1 -14 0.4 0.1 10 -28 34 0 2 -14 0.2 0.1 -4 -31 37 -36 2 -14 0.4 0.4 38 33 22 -13 1 -14 0.6 0.5 36 -10 -27 -35 1 -14 0.1 0.7 -2 -42 22 -10 2 -14 0.7 0.9 40 25 32 -35 1 -14 0.8 0.9 6 -41 39 23 2 -14 0.8 0.7 49 14 -30 -32 1 -14 0.7 0.1 -15 -36 17 16 2 -14 0.8 0.1 -3 -42 34 -19 1 -14 0.2 0.8 48 43 14 -12 1 -14 0.4 0.5 24 23 30 27 2 -14 0.3 0.1 28 -38 34 -36 1 -14 0.8 0.5 23 -46 -20 -27 1 -14 0.4 0.5 50 -6 38 1 1 -14 0.9 0.4 11 -11 42 -16 1 -14 0.4 0.6 41 38 36 -13 1 -14 0.6 0.6 -8 -14 47 -9 2 -14 0.4 0.5 -5 -47 7 -26 2 -14 0.4 0.2 -2 -44 3 -28 1 -14 0.2 0.5 -4 -33 10 -14 1 -14 0.2 0.3 28 -7 35 -6 2 -14 0.3 0.3 34 31 40 -43 1 -14 0.3 0.5 -5 -45 20 -7 2 -14 0.9 0.2 -12 -40 45 -17 2 -14 0.3 0.3 40 15 24 -8 1 -14 0.7 0.4 4 1 40 -44 2 -14 0.9 0.3 47 -25 23 4 1 -14 0.6 0.1 31 -19 -3 -6 1 -14 0.4 0.1 -10 -24 48 -11 2 -14 0.1 0.7 41 -46 -24 -34 2 -14 0.3 0.2 5 1 26 -49 1 -14 0.5 0.2 40 9 15 8 1 -14 0.3 0.5 46 8 19 2 1 -14 0.3 0.2 38 -23 35 -19 2 -14 0.8 0.6 44 -24 49 -31 1 -14 0.1 0.1 48 -37 4 -11 1 -14 0.2 0.5 46 8 13 -31 1 -14 0.9 0.4 23 16 15 -3 1 -14 0.7 0.5 31 -44 44 -30 1 -14 0.9 0.8 37 -46 -27 -31 1 -14 0.1 0.1 -25 -27 46 20 2 -14 0.6 0.9 -2 -36 46 25 2 -14 0.2 0.3 -1 -8 -36 -47 1 -14 0.7 0.6 29 9 13 -26 2 -14 0.5 0.8 23 -16 32 -3 2 -14 0.9 0.9 10 -41 33 -12 2 -14 0.9 0.7 3 -27 46 36 2 -14 0.6 0.6 39 -24 21 11 2 -14 0.3 0.3 14 -27 10 -21 1 -14 0.6 0.7 -17 -44 19 -18 2 -14 0.9 0.4 39 -47 47 -12 1 -14 0.9 0.6 -38 -45 16 -35 2 -14 0.7 0.2 35 30 33 23 2 -14 0.8 0.6 -9 -42 19 -35 2 -14 0.2 0.6 -14 -25 18 -36 2 -14 0.5 0.9 -22 -35 35 32 2 -14 0.3 0.8 18 17 44 4 1 -14 0.6 0.5 -2 -33 29 14 2 -14 0.2 0.8 -3 -38 17 -47 2 -14 0.1 0.7 7 -31 -18 -26 1 -14 0.3 0.7 16 -33 -17 -29 1 -14 0.8 0.2 15 -16 -44 -49 1 -14 0.1 0.4 43 -5 21 -41 1 -14 0.3 0.6 10 -28 22 -4 2 -14 0.1 0.4 29 -8 8 6 2 -14 0.3 0.9 6 -12 20 5 2 -14 0.1 0.5 -25 -49 24 -16 2 -14 0.8 0.1 40 -29 -24 -31 1 -14 0.4 0.1 22 -18 -22 -28 1 -14 0.6 0.4 5 -8 17 -41 1 -14 0.7 0.5 20 -1 24 -11 1 -14 0.2 0.2 40 -30 23 11 2 -14 0.8 0.2 29 -30 -12 -13 1 -14 0.1 0.5 30 -1 31 -18 1 -14 0.8 0.8 21 17 20 -9 1 -14 0.6 0.5 16 -10 -3 -14 1 -14 0.4 0.5 28 -49 36 27 2 -14 0.4 0.9 17 6 14 6 2 -14 0.4 0.1 1 -16 28 -8 1 -14 0.2 0.8 5 -14 -35 -48 1 -14 0.1 0.8 42 7 23 -14 2 -14 0.2 0.7 29 11 32 -11 1 -14 0.3 0.9 32 27 48 -4 2 -14 0.8 0.2 34 -17 12 -42 1 -14 0.1 0.7 15 -13 -9 -50 1 -14 0.2 0.4 40 33 5 -44 1 -14 0.5 0.3 46 -50 -16 -22 1 -14 0.5 0.7 21 -36 41 15 2 -14 0.8 0.3 -4 -5 35 16 2 -14 0.6 0.3 25 -31 35 21 2 -14 0.5 0.6 49 15 -40 -46 1 -14 0.7 0.9 -3 -21 41 -20 2 -14 0.4 0.8 37 27 -32 -50 1 -14 0.7 0.9 30 -50 28 -47 1 -14 0.6 0.3 46 -16 -17 -30 2 -14 0.9 0.8 6 -30 -6 -44 1 -14 0.2 0.4 23 12 9 -25 1 -14 0.3 0.6 18 -44 11 -41 2 -14 0.6 0.1 46 -46 -12 -47 1 -14 0.2 0.4 44 40 46 -23 1 -14 0.6 0.6 39 26 40 -47 1 -14 0.7 0.1 36 -14 -35 -49 1 -14 0.6 0.2 39 -21 -23 -28 1 -14 0.6 0.9 -1 -40 -3 -25 2 -14 0.5 0.9 18 -18 20 -6 2 -14 0.6 0.1 27 -5 46 18 2 -14 0.4 0.7 43 26 6 -45 1 -14 0.7 0.5 39 6 50 32 2 -14 0.9 0.5 29 -23 -7 -26 1 -14 0.8 0.7 -40 -45 -1 -30 2 -14 0.7 0.6 35 1 45 -7 2 -14 0.9 0.3 -13 -30 2 -20 2 -14 0.3 0.4 30 -18 -17 -50 1 -14 0.9 0.9 -25 -41 38 28 2 -14 0.8 0.1 -31 -36 39 -16 1 -14 0.3 0.1 -25 -34 9 -24 2 -14 0.7 0.5 14 3 3 -30 1 -14 0.3 0.8 -29 -40 -4 -31 2 -14 0.5 0.5 -18 -20 22 -25 2 -14 0.2 0.7 43 12 39 37 2 -14 0.9 0.4 15 -21 10 5 2 -14 0.5 0.4 45 -18 5 -16 1 -14 0.5 0.3 15 -1 -26 -31 1 -14 0.2 0.2 41 -13 47 -50 1 -14 0.8 0.2 31 -47 43 24 2 -14 0.6 0.2 44 -14 49 20 2 -14 0.8 0.6 22 -4 25 -6 1 -14 0.4 0.4 15 2 -4 -35 2 -14 0.2 0.6 -30 -36 17 -40 2 -14 0.4 0.9 -19 -22 32 24 2 -14 0.1 0.7 29 -38 30 -38 2 -14 0.1 0.3 19 -45 27 19 2 -14 0.8 0.7 16 -29 14 -11 2 -14 0.8 0.2 50 22 45 -33 1 -14 0.6 0.1 -22 -37 49 -33 2 -14 0.6 0.4 3 -43 26 9 2 -14 0.7 0.6 50 12 -13 -44 1 -14 0.4 0.4 5 -45 41 28 2 -14 0.4 0.2 1 -4 7 -37 2 -14 0.8 0.5 38 32 9 -21 1 -14 0.2 0.7 22 -37 46 -25 2 -14 0.8 0.8 -42 -44 22 13 2 -14 0.1 0.3 49 44 43 0 1 -14 0.2 0.1 16 -35 47 -39 2 -14 0.6 0.5 44 -35 -20 -37 1 -14 0.4 0.8 6 -18 24 -28 2 -14 0.9 0.4 8 -11 30 -42 2 -14 0.5 0.3 14 -25 44 -10 2 -14 0.9 0.2 49 22 46 -9 1 -14 0.6 0.6 29 2 21 -4 2 -14 0.4 0.2 0 -46 41 -29 2 -14 0.7 0.4 35 10 44 19 2 -14 0.8 0.8 50 35 15 -21 1 -14 0.1 0.1 45 2 40 -30 1 -14 0.7 0.6 -28 -30 5 -13 2 -14 0.7 0.8 -17 -29 48 29 2 -15 0.9 0.1 -19 -33 31 15 2 -15 0.4 0.5 36 5 -2 -8 1 -15 0.5 0.2 13 -23 39 -33 1 -15 0.8 0.3 36 -9 24 -35 1 -15 0.4 0.1 10 8 37 17 1 -15 0.3 0.8 5 -26 -9 -10 1 -15 0.4 0.3 14 -34 21 -45 1 -15 0.2 0.4 -31 -49 2 -24 2 -15 0.6 0.4 50 48 -41 -50 1 -15 0.2 0.8 -8 -34 45 15 2 -15 0.1 0.6 32 -27 14 0 2 -15 0.9 0.6 10 -49 -5 -43 1 -15 0.2 0.1 2 -6 14 -15 2 -15 0.1 0.7 -6 -34 18 -34 2 -15 0.2 0.6 13 -32 -36 -45 1 -15 0.1 0.7 0 -38 23 -22 2 -15 0.2 0.1 4 -27 -13 -38 1 -15 0.2 0.8 48 -35 -1 -46 2 -15 0.9 0.2 16 -40 10 -25 1 -15 0.7 0.8 13 -6 44 6 2 -15 0.8 0.7 35 31 -14 -47 1 -15 0.1 0.3 50 17 3 -12 1 -15 0.4 0.1 37 -42 18 -29 2 -15 0.8 0.6 -8 -22 49 6 2 -15 0.9 0.7 9 -17 27 -50 2 -15 0.2 0.7 0 -44 21 -16 2 -15 0.5 0.2 18 -8 35 -21 1 -15 0.1 0.1 37 27 46 18 1 -15 0.6 0.1 -32 -47 -6 -19 2 -15 0.4 0.6 6 -31 31 11 2 -15 0.5 0.4 34 26 50 49 2 -15 0.2 0.6 2 -10 36 21 2 -15 0.1 0.4 -42 -44 30 -43 2 -15 0.5 0.4 25 -23 29 -11 2 -15 0.9 0.5 46 0 39 -37 1 -15 0.6 0.5 41 -20 16 8 1 -15 0.1 0.9 46 23 -45 -50 1 -15 0.9 0.5 -3 -35 29 -50 2 -15 0.4 0.2 28 -49 3 -23 1 -15 0.7 0.1 30 -26 2 -35 1 -15 0.7 0.6 49 2 12 -28 1 -15 0.5 0.3 45 -2 17 10 1 -15 0.7 0.1 42 -12 7 -35 1 -15 0.1 0.7 48 -8 45 -19 2 -15 0.3 0.5 -10 -13 49 -46 2 -15 0.2 0.5 24 4 11 5 1 -15 0.1 0.2 27 18 -7 -34 1 -15 0.2 0.9 28 18 42 33 2 -15 0.9 0.1 18 9 31 -33 1 -15 0.6 0.9 11 6 7 -30 1 -15 0.1 0.4 32 -42 35 -47 2 -15 0.9 0.3 15 -23 -28 -37 1 -15 0.6 0.8 -18 -39 28 18 2 -15 0.8 0.6 28 -30 45 11 2 -15 0.1 0.8 27 23 -3 -18 1 -15 0.5 0.2 -5 -27 6 -38 2 -15 0.8 0.1 39 23 50 -19 1 -15 0.4 0.6 30 13 49 -2 2 -15 0.9 0.1 46 14 0 -19 1 -15 0.6 0.7 37 5 -29 -30 1 -15 0.3 0.8 42 -45 22 -12 2 -15 0.2 0.7 18 -14 47 24 2 -15 0.5 0.5 30 -46 22 -18 2 -15 0.5 0.4 49 -7 -15 -41 1 -15 0.9 0.5 -35 -38 42 33 2 -15 0.9 0.1 39 -19 -29 -34 1 -15 0.9 0.2 -42 -43 -36 -41 2 -15 0.9 0.9 13 -31 24 3 2 -15 0.3 0.8 44 23 35 -13 1 -15 0.1 0.6 41 30 42 28 2 -15 0.2 0.4 17 -25 13 1 2 -15 0.5 0.2 -6 -9 22 -17 2 -15 0.3 0.3 17 -35 -44 -45 1 -15 0.1 0.4 32 -2 41 17 2 -15 0.9 0.8 25 -49 49 18 2 -15 0.1 0.8 38 34 31 9 1 -15 0.5 0.7 20 -36 41 12 2 -15 0.5 0.2 5 -11 -28 -36 1 -15 0.8 0.5 45 -6 14 5 1 -15 0.1 0.6 4 -13 10 -18 2 -15 0.8 0.8 -32 -39 14 -36 2 -15 0.5 0.3 9 -38 45 15 2 -15 0.5 0.3 -5 -20 35 -17 2 -15 0.6 0.5 16 -5 50 10 2 -15 0.5 0.3 42 -16 4 3 2 -15 0.6 0.7 -11 -36 -6 -49 2 -15 0.9 0.2 14 -12 15 12 2 -15 0.8 0.6 -25 -26 25 10 2 -15 0.2 0.4 -27 -42 49 -18 2 -15 0.7 0.5 1 -33 17 8 2 -15 0.2 0.6 32 -47 50 -30 2 -15 0.3 0.4 -11 -34 46 -44 2 -15 0.3 0.8 21 -5 39 -1 2 -15 0.2 0.6 19 10 13 -28 1 -15 0.1 0.8 10 -12 13 -35 2 -15 0.6 0.7 -18 -46 -25 -46 1 -15 0.1 0.5 15 2 21 0 2 -15 0.3 0.4 48 -30 33 -30 1 -15 0.3 0.6 46 32 -12 -29 1 -15 0.5 0.2 5 4 28 0 1 -15 0.8 0.8 46 9 28 12 2 -15 0.3 0.2 13 4 35 12 2 -15 0.8 0.9 21 -35 20 -37 2 -15 0.7 0.3 46 -18 10 -39 1 -15 0.1 0.6 28 -31 22 -12 1 -15 0.8 0.1 -18 -22 44 19 2 -15 0.4 0.3 49 -47 -9 -29 2 -15 0.3 0.8 42 40 23 -31 1 -15 0.4 0.3 44 -40 35 21 2 -15 0.7 0.3 8 -49 -3 -30 1 -15 0.3 0.3 31 -3 26 -31 1 -15 0.8 0.6 -2 -50 -3 -22 2 -15 0.5 0.7 12 -41 37 9 2 -15 0.7 0.7 41 -27 15 -33 1 -15 0.7 0.6 22 14 17 8 1 -15 0.5 0.6 47 -14 31 1 1 -15 0.9 0.9 50 -4 -6 -45 1 -15 0.6 0.9 -13 -30 -36 -40 1 -15 0.8 0.3 39 -38 14 1 1 -15 0.1 0.9 40 10 10 -23 1 -15 0.4 0.9 -45 -46 8 -8 2 -15 0.2 0.8 10 -12 -2 -41 2 -15 0.8 0.6 38 2 32 -17 1 -15 0.5 0.5 31 -23 49 -28 2 -15 0.8 0.5 25 -14 -20 -22 1 -15 0.7 0.6 -30 -41 45 33 2 -15 0.1 0.2 2 -17 41 -47 1 -15 0.3 0.3 10 -42 30 -4 2 -15 0.3 0.5 49 -35 33 26 2 -15 0.5 0.3 45 -18 -3 -30 1 -15 0.4 0.8 45 -22 21 -42 2 -15 0.7 0.7 44 -45 21 -21 1 -15 0.1 0.4 17 10 43 35 2 -15 0.4 0.3 42 -36 6 -9 1 -15 0.6 0.7 12 3 35 30 2 -15 0.2 0.7 -42 -46 5 -32 2 -15 0.7 0.1 49 15 -29 -38 1 -15 0.5 0.4 0 -43 28 -3 2 -15 0.1 0.4 19 -34 -30 -44 1 -15 0.5 0.5 28 1 -34 -39 1 -15 0.9 0.5 0 -1 16 5 2 -15 0.1 0.8 18 -31 40 -9 2 -15 0.7 0.1 -4 -28 29 -26 1 -15 0.4 0.4 23 19 32 -11 2 -15 0.7 0.8 14 -5 -14 -47 1 -15 0.6 0.7 -20 -25 13 -21 2 -15 0.4 0.3 27 -3 -12 -18 1 -15 0.4 0.8 19 6 4 -35 1 -15 0.1 0.4 32 -43 23 14 2 -15 0.7 0.4 -38 -44 12 -21 2 -15 0.2 0.3 40 4 28 -9 1 -15 0.8 0.9 41 -8 -11 -22 1 -15 0.1 0.8 4 -34 -7 -16 2 -15 0.3 0.9 49 8 44 22 2 -15 0.6 0.1 5 -20 28 -50 1 -15 0.1 0.1 -45 -48 42 10 2 -15 0.9 0.8 11 -12 47 -6 2 -15 0.5 0.1 35 -38 -25 -41 1 -15 0.5 0.1 -18 -41 29 -24 2 -15 0.9 0.1 7 -38 18 -45 1 -15 0.6 0.9 41 -18 48 -16 2 -15 0.7 0.8 44 -44 19 -37 1 -15 0.5 0.6 32 -13 1 -48 1 -15 0.1 0.6 38 -43 -7 -21 2 -15 0.6 0.3 -35 -41 -13 -22 2 -15 0.1 0.6 -33 -42 8 -17 2 -15 0.1 0.3 20 -43 -23 -33 1 -15 0.5 0.5 21 -35 11 5 2 -15 0.3 0.8 15 4 -6 -16 1 -15 0.2 0.2 -11 -32 7 -6 2 -15 0.4 0.9 39 3 12 -8 2 -15 0.4 0.5 23 20 49 25 2 -15 0.1 0.1 22 -23 -5 -39 1 -15 0.7 0.7 6 -14 2 -49 1 -15 0.2 0.8 -41 -48 27 -12 2 -15 0.7 0.7 5 -22 25 -27 2 -15 0.3 0.3 -7 -43 26 13 2 -15 0.9 0.3 37 -42 22 -20 1 -15 0.9 0.6 21 -44 34 15 2 -15 0.1 0.8 -1 -14 14 -10 2 -15 0.6 0.6 50 37 42 28 1 -15 0.9 0.3 34 28 -24 -27 1 -15 0.2 0.5 21 16 -15 -40 1 -15 0.7 0.9 -38 -50 19 3 2 -15 0.5 0.8 -35 -50 26 -28 2 -15 0.3 0.2 37 -7 32 -17 1 -15 0.7 0.6 48 40 39 6 1 -15 0.1 0.3 49 29 40 22 1 -15 0.6 0.1 -13 -39 -24 -30 1 -15 0.5 0.6 8 -7 1 -3 1 -15 0.9 0.3 38 -31 36 4 1 -15 0.5 0.3 25 -5 3 -31 1 -15 0.2 0.8 4 -34 22 1 2 -15 0.9 0.2 10 8 0 -6 1 -15 0.8 0.9 -14 -43 28 -47 2 -15 0.1 0.8 43 6 -1 -37 1 -15 0.9 0.1 16 -45 40 -2 1 -15 0.8 0.8 -20 -26 29 -29 2 -15 0.9 0.5 41 34 19 -7 1 -15 0.9 0.4 9 -21 24 9 2 -15 0.5 0.7 13 -14 7 -26 2 -15 0.5 0.2 -26 -45 41 16 2 -15 0.9 0.6 -25 -37 34 -32 2 -15 0.2 0.4 17 -26 46 -31 2 -15 0.2 0.4 27 -9 34 -4 2 -15 0.1 0.9 16 -34 13 -14 2 -15 0.1 0.9 20 -40 34 -18 2 -15 0.6 0.9 30 -35 -25 -50 1 -15 0.4 0.5 14 -21 48 -24 2 -15 0.6 0.9 14 -12 29 -7 2 -15 0.9 0.2 23 -17 -6 -15 1 -15 0.3 0.7 11 3 1 -47 1 -15 0.7 0.2 21 4 44 19 2 -15 0.3 0.1 19 -23 25 2 2 -15 0.9 0.9 -14 -33 13 -27 2 -15 0.2 0.9 6 -33 12 -2 2 -15 0.8 0.2 -5 -25 29 -7 2 -15 0.7 0.8 22 -24 29 0 2 -15 0.4 0.7 44 -40 20 -27 2 -15 0.1 0.3 35 -17 29 23 2 -15 0.8 0.5 20 5 3 -25 1 -15 0.5 0.1 -17 -24 34 13 2 -15 0.9 0.1 40 16 42 -30 1 -15 0.7 0.9 -16 -27 40 -7 2 -15 0.2 0.3 33 31 12 -27 1 -15 0.3 0.4 5 -19 -35 -42 1 -15 0.6 0.7 2 -5 37 -1 2 -15 0.2 0.5 37 35 -6 -9 1 -15 0.4 0.9 27 15 38 -45 2 -15 0.2 0.3 14 -20 19 -43 2 -15 0.6 0.3 20 -33 25 -24 2 -15 0.8 0.8 19 5 20 -42 2 -15 0.2 0.8 5 -10 25 -16 2 -15 0.8 0.1 40 16 44 15 2 -15 0.5 0.3 48 -44 41 21 2 -15 0.6 0.5 36 30 35 28 1 -15 0.3 0.2 17 -18 45 29 2 -15 0.6 0.5 44 17 26 -28 1 -15 0.1 0.5 13 -42 50 -24 2 -15 0.2 0.2 39 5 48 5 2 -15 0.2 0.9 -7 -20 -1 -47 2 -15 0.7 0.5 38 27 50 -18 1 -15 0.9 0.1 18 -47 -10 -15 1 -15 0.3 0.5 31 -45 -14 -35 2 -15 0.7 0.2 -37 -38 0 -46 1 -15 0.5 0.7 28 -22 25 7 2 -15 0.3 0.1 3 -48 -13 -15 2 -15 0.5 0.3 -14 -15 49 17 2 -15 0.2 0.4 -17 -49 -34 -47 1 -15 0.8 0.7 -5 -48 13 -22 2 -15 0.1 0.8 12 -5 11 10 2 -15 0.9 0.2 -25 -40 -16 -42 1 -15 0.7 0.6 48 -14 33 -4 1 -15 0.5 0.9 12 -27 11 3 2 -15 0.5 0.1 39 -1 31 -21 1 -15 0.3 0.5 48 -29 21 -20 2 -15 0.6 0.9 40 -30 43 12 2 -15 0.6 0.5 28 -32 37 -19 1 -15 0.5 0.9 -9 -49 34 20 2 -15 0.5 0.2 46 -30 25 5 1 -15 0.1 0.4 -32 -34 -7 -35 1 -15 0.4 0.3 -7 -35 6 -41 1 -15 0.2 0.5 40 -30 -35 -49 1 -15 0.1 0.3 22 -3 38 -4 2 -15 0.3 0.6 -19 -43 47 4 2 -15 0.5 0.5 44 -32 -37 -45 1 -15 0.6 0.2 21 -18 -16 -27 1 -15 0.7 0.5 48 34 27 12 1 -15 0.9 0.2 40 -43 40 -6 1 -15 0.9 0.2 29 -4 8 7 1 -15 0.4 0.2 8 -50 44 13 2 -15 0.9 0.3 44 31 38 4 1 -15 0.3 0.2 20 -40 39 -14 2 -15 0.2 0.4 18 -36 44 40 2 -15 0.3 0.1 -6 -22 30 -22 1 -15 0.3 0.5 34 -21 48 -31 2 -15 0.4 0.1 5 -33 29 10 2 -15 0.4 0.2 48 -26 38 -26 1 -15 0.3 0.6 16 -33 21 -16 2 -15 0.1 0.2 -21 -45 36 13 2 -15 0.6 0.8 35 -14 5 -39 1 -15 0.7 0.7 39 -28 21 6 2 -15 0.8 0.6 -18 -25 35 -21 2 -15 0.2 0.9 30 -34 33 -36 2 -15 0.3 0.3 47 22 37 -47 1 -15 0.1 0.7 18 -47 -15 -28 1 -15 0.3 0.5 7 4 27 -40 1 -15 0.1 0.7 42 -35 -9 -50 1 -15 0.7 0.6 50 -12 23 14 1 -15 0.1 0.5 21 -31 16 -17 2 -15 0.8 0.4 -1 -10 24 11 2 -15 0.2 0.5 45 -37 -14 -28 1 -15 0.5 0.2 -24 -48 3 -21 1 -15 0.7 0.6 29 -1 40 10 2 -15 0.7 0.5 41 -20 38 -26 2 -15 0.1 0.2 33 1 41 -16 1 -15 0.2 0.2 32 21 42 -8 1 -15 0.9 0.3 40 29 7 -29 1 -15 0.3 0.3 10 -47 39 37 2 -15 0.7 0.8 46 22 17 -22 1 -16 0.1 0.3 -23 -45 -1 -32 2 -16 0.7 0.1 41 8 4 -2 1 -16 0.6 0.7 44 -23 -11 -17 1 -16 0.3 0.8 45 17 27 25 1 -16 0.4 0.9 10 -7 29 16 2 -16 0.4 0.4 16 3 31 -30 1 -16 0.6 0.5 49 -12 40 29 2 -16 0.7 0.5 10 -9 -36 -43 1 -16 0.7 0.6 37 -47 29 -23 1 -16 0.8 0.6 32 -18 48 -40 1 -16 0.9 0.3 17 -26 45 31 2 -16 0.2 0.8 24 -5 -1 -19 1 -16 0.1 0.4 17 -18 -5 -19 2 -16 0.2 0.5 42 -31 23 -38 2 -16 0.5 0.7 27 -18 39 -8 2 -16 0.4 0.1 24 3 30 -30 1 -16 0.1 0.4 21 -10 29 5 2 -16 0.8 0.4 6 -42 50 22 2 -16 0.7 0.3 29 -32 14 -8 1 -16 0.8 0.3 38 36 26 -7 1 -16 0.2 0.2 12 -18 21 -8 2 -16 0.2 0.3 -33 -42 33 -4 2 -16 0.3 0.9 14 -33 36 11 2 -16 0.7 0.5 19 -15 -36 -44 1 -16 0.5 0.2 28 -47 28 -48 1 -16 0.5 0.8 24 -45 13 -8 2 -16 0.6 0.8 43 -24 35 -32 2 -16 0.9 0.7 12 -41 17 -14 1 -16 0.4 0.9 20 -8 4 -38 1 -16 0.9 0.4 44 10 28 -44 1 -16 0.2 0.4 2 -27 42 5 2 -16 0.9 0.5 2 -1 38 -30 1 -16 0.1 0.4 19 -3 -5 -23 1 -16 0.1 0.6 29 26 43 -7 1 -16 0.6 0.8 -25 -29 9 6 2 -16 0.2 0.6 26 -31 24 -22 2 -16 0.7 0.8 19 -37 32 -36 2 -16 0.7 0.7 19 -30 50 -34 2 -16 0.9 0.4 50 15 -26 -44 1 -16 0.9 0.7 1 -8 -24 -29 1 -16 0.8 0.7 4 -14 4 -12 1 -16 0.6 0.7 34 32 40 0 2 -16 0.5 0.2 -26 -50 -20 -46 1 -16 0.9 0.7 44 -47 3 -14 1 -16 0.9 0.9 33 12 32 -33 1 -16 0.4 0.3 50 -2 -17 -28 1 -16 0.9 0.1 22 -32 49 5 1 -16 0.9 0.4 29 18 -38 -39 1 -16 0.6 0.1 31 0 47 41 2 -16 0.5 0.9 23 -3 -4 -16 1 -16 0.4 0.2 34 17 35 -47 1 -16 0.3 0.3 35 -3 -7 -40 1 -16 0.5 0.6 4 -19 -28 -48 1 -16 0.6 0.7 11 10 6 -41 1 -16 0.8 0.3 14 -45 -19 -50 1 -16 0.1 0.9 27 -35 2 -43 2 -16 0.1 0.8 10 9 -13 -45 1 -16 0.5 0.4 -33 -45 45 -25 2 -16 0.1 0.5 -5 -31 -26 -35 2 -16 0.5 0.7 -1 -19 27 -7 2 -16 0.7 0.9 12 1 -37 -49 1 -16 0.7 0.2 38 34 4 -6 1 -16 0.6 0.1 22 -32 32 8 2 -16 0.4 0.4 31 -7 43 19 2 -16 0.3 0.5 25 -38 22 -26 2 -16 0.4 0.8 -3 -19 44 -2 2 -16 0.5 0.3 37 -23 18 4 2 -16 0.9 0.1 30 -12 9 -48 1 -16 0.6 0.9 4 -7 30 -25 2 -16 0.2 0.9 34 -46 9 -34 2 -16 0.5 0.2 20 -26 40 -12 2 -16 0.9 0.2 -29 -50 34 -33 2 -16 0.3 0.6 44 23 20 -30 1 -16 0.1 0.9 3 -15 20 -2 2 -16 0.4 0.4 -22 -42 -27 -38 2 -16 0.7 0.1 -39 -48 45 -2 2 -16 0.1 0.8 43 8 45 22 2 -16 0.5 0.4 -11 -43 -33 -48 1 -16 0.9 0.7 8 -8 24 -18 2 -16 0.1 0.4 0 -25 40 -21 2 -16 0.7 0.5 42 34 22 -45 1 -16 0.5 0.3 28 2 31 -16 1 -16 0.4 0.8 2 -4 36 24 2 -16 0.1 0.2 48 -38 47 27 2 -16 0.8 0.1 -1 -4 -32 -44 1 -16 0.4 0.8 -4 -44 39 -8 2 -16 0.2 0.9 28 -34 7 -43 2 -16 0.1 0.3 -13 -24 16 -34 2 -16 0.9 0.1 41 26 15 -31 1 -16 0.6 0.6 48 -33 -32 -33 1 -16 0.8 0.1 42 -40 22 -48 1 -16 0.4 0.9 -1 -19 49 46 2 -16 0.2 0.5 29 -49 3 -21 2 -16 0.8 0.8 43 11 -34 -35 1 -16 0.1 0.5 43 -6 44 42 2 -16 0.2 0.6 -28 -35 -38 -39 2 -16 0.4 0.1 -26 -38 21 -38 2 -16 0.6 0.3 14 2 32 -30 1 -16 0.1 0.1 25 7 -12 -32 1 -16 0.7 0.4 43 10 49 41 2 -16 0.1 0.8 42 -38 41 2 2 -16 0.2 0.8 -11 -23 -3 -8 2 -16 0.5 0.1 -15 -38 38 -45 2 -16 0.3 0.7 -27 -49 -8 -18 2 -16 0.2 0.4 18 -34 40 -37 2 -16 0.5 0.7 0 -18 41 16 2 -16 0.1 0.1 28 -50 40 24 2 -16 0.5 0.4 18 -4 -10 -16 1 -16 0.1 0.7 -1 -38 23 -17 2 -16 0.5 0.5 -10 -15 12 -31 2 -16 0.5 0.1 1 -11 50 -27 1 -16 0.2 0.6 48 25 41 8 1 -16 0.3 0.8 -35 -42 -2 -3 2 -16 0.4 0.4 42 18 42 37 2 -16 0.7 0.6 9 -19 -2 -20 1 -16 0.6 0.3 -29 -33 14 -8 2 -16 0.2 0.8 5 -2 44 17 2 -16 0.2 0.5 38 -41 -4 -19 2 -16 0.8 0.6 48 -14 -31 -41 1 -16 0.9 0.5 2 -7 46 19 2 -16 0.4 0.7 11 -14 37 -19 2 -16 0.3 0.8 45 -33 41 -28 2 -16 0.3 0.3 11 -39 6 -24 2 -16 0.9 0.8 47 -27 -5 -19 1 -16 0.7 0.8 30 -23 48 26 2 -16 0.4 0.3 46 -17 28 -19 1 -16 0.3 0.9 -22 -23 -14 -37 1 -16 0.7 0.9 50 -24 -1 -15 1 -16 0.4 0.9 -5 -50 50 -16 2 -16 0.6 0.7 26 -23 50 -48 2 -16 0.4 0.1 49 3 15 -39 1 -16 0.9 0.4 29 23 -13 -33 1 -16 0.2 0.6 33 -25 13 -44 2 -16 0.9 0.6 24 -25 27 -5 1 -16 0.6 0.1 28 -42 21 -35 1 -16 0.4 0.3 1 -6 9 -12 1 -16 0.3 0.2 -9 -33 42 33 2 -16 0.7 0.4 -4 -46 17 -5 2 -16 0.7 0.9 21 -12 32 26 2 -16 0.2 0.4 -31 -46 49 12 2 -16 0.2 0.6 20 -44 46 -28 2 -16 0.1 0.1 3 -49 -6 -35 2 -16 0.7 0.5 37 11 -21 -38 1 -16 0.3 0.8 36 -42 -9 -25 1 -16 0.7 0.2 16 -6 18 17 2 -16 0.7 0.8 15 -37 35 -12 2 -16 0.6 0.4 19 -11 48 46 2 -16 0.3 0.6 46 -39 19 -29 2 -16 0.9 0.3 -16 -21 -26 -31 1 -16 0.4 0.3 11 -21 -6 -41 1 -16 0.7 0.2 -2 -22 38 -37 1 -16 0.7 0.4 44 -20 21 -22 1 -16 0.3 0.1 28 27 41 -26 1 -16 0.9 0.8 31 -46 -23 -26 1 -16 0.1 0.7 -1 -34 14 -45 2 -16 0.6 0.6 6 -46 23 -8 2 -16 0.9 0.8 15 -20 23 -40 1 -16 0.2 0.5 -1 -10 34 29 2 -16 0.9 0.8 -10 -14 30 -40 2 -16 0.1 0.6 0 -5 10 2 2 -16 0.8 0.2 3 -28 -5 -45 1 -16 0.6 0.9 20 -17 36 -32 2 -16 0.5 0.6 20 -47 47 -41 2 -16 0.6 0.6 18 -23 40 -10 2 -16 0.3 0.6 7 -29 -6 -24 1 -16 0.5 0.7 42 -2 41 -20 1 -16 0.7 0.8 26 -30 18 -27 1 -16 0.8 0.5 -11 -30 -31 -42 1 -16 0.1 0.9 19 12 35 7 2 -16 0.2 0.8 25 -8 37 -2 2 -16 0.5 0.9 16 -29 32 -42 2 -16 0.1 0.2 26 -29 20 -27 2 -16 0.9 0.9 39 34 42 16 2 -16 0.8 0.6 -2 -16 38 -22 2 -16 0.1 0.1 -12 -49 39 17 2 -16 0.5 0.2 44 -22 34 33 2 -16 0.5 0.8 37 -9 4 -31 1 -16 0.8 0.1 13 -21 44 10 2 -16 0.6 0.5 42 -37 40 33 2 -16 0.7 0.5 10 -8 26 -7 2 -16 0.3 0.6 30 0 38 1 2 -16 0.5 0.5 9 8 19 17 2 -16 0.9 0.5 8 -36 49 -15 2 -16 0.5 0.6 -15 -16 46 24 2 -16 0.2 0.6 21 15 31 -48 1 -16 0.9 0.2 -44 -47 32 -43 2 -16 0.2 0.2 14 1 40 2 2 -16 0.8 0.5 35 28 32 -11 1 -16 0.5 0.5 -38 -46 -31 -44 2 -16 0.4 0.1 -16 -41 18 -1 2 -16 0.4 0.6 28 -31 -14 -48 1 -16 0.7 0.1 -3 -41 -4 -41 1 -16 0.8 0.9 14 -21 31 -15 2 -16 0.5 0.6 40 -29 48 -15 2 -16 0.8 0.2 41 24 34 -28 1 -16 0.1 0.2 26 3 -9 -15 1 -16 0.1 0.8 18 -24 -3 -27 2 -16 0.9 0.8 33 -21 44 41 2 -16 0.8 0.2 45 -17 29 -16 1 -16 0.2 0.2 35 32 50 -18 1 -16 0.8 0.1 48 -28 36 -32 1 -16 0.8 0.2 35 -9 5 -25 1 -16 0.4 0.6 21 -5 15 -50 1 -16 0.5 0.7 50 16 37 -49 1 -16 0.8 0.9 -7 -24 18 -42 2 -16 0.8 0.8 45 -23 32 24 1 -16 0.3 0.7 -5 -44 45 42 2 -16 0.3 0.9 -35 -49 37 -2 2 -16 0.5 0.4 25 -21 26 -46 1 -16 0.7 0.3 20 -46 38 26 2 -16 0.5 0.8 16 -3 46 -13 2 -16 0.6 0.6 17 -22 48 18 2 -16 0.4 0.6 26 13 -36 -50 1 -16 0.4 0.6 40 23 41 -20 1 -16 0.2 0.6 -24 -50 -17 -23 2 -16 0.1 0.6 25 12 23 -37 1 -16 0.4 0.2 22 -22 49 -47 1 -16 0.6 0.2 -6 -42 -11 -32 1 -16 0.3 0.5 40 0 -5 -23 1 -16 0.3 0.7 16 5 24 -6 2 -16 0.7 0.7 36 21 46 -33 1 -16 0.9 0.3 12 -45 43 13 2 -16 0.7 0.2 -3 -27 32 0 2 -16 0.2 0.5 4 -23 8 0 2 -16 0.9 0.3 47 -18 48 7 1 -16 0.8 0.1 22 -23 30 -1 1 -16 0.4 0.2 20 -34 30 -40 1 -16 0.4 0.3 49 -27 -38 -46 1 -16 0.7 0.7 44 14 -7 -26 1 -16 0.3 0.6 50 -3 21 20 2 -16 0.4 0.9 8 -5 -35 -46 1 -16 0.4 0.6 24 -45 -18 -29 1 -16 0.9 0.1 21 -14 20 3 1 -16 0.9 0.6 9 -15 -16 -27 1 -16 0.6 0.6 29 -44 40 -17 2 -16 0.2 0.8 4 -16 9 -8 2 -16 0.5 0.5 40 -19 37 -28 1 -16 0.2 0.3 41 -23 12 -21 1 -16 0.5 0.3 27 -8 9 -11 1 -16 0.9 0.8 27 -10 22 -37 1 -16 0.4 0.6 29 -43 17 5 2 -16 0.7 0.8 4 -35 42 22 2 -16 0.4 0.3 45 14 -11 -16 1 -16 0.2 0.7 11 -37 7 5 2 -16 0.7 0.3 17 -26 34 -9 2 -16 0.9 0.5 42 26 38 6 1 -16 0.2 0.1 8 -30 17 -24 1 -16 0.5 0.5 36 0 28 -41 1 -16 0.6 0.7 15 -23 32 2 2 -16 0.5 0.7 0 -42 -23 -44 1 -16 0.5 0.1 -11 -38 -24 -36 1 -16 0.4 0.9 42 33 -2 -28 1 -16 0.3 0.7 49 40 34 31 1 -16 0.8 0.1 45 -23 -37 -48 1 -16 0.5 0.6 7 5 11 -26 1 -16 0.6 0.8 -42 -47 21 9 2 -16 0.2 0.9 49 -23 -27 -31 1 -16 0.5 0.8 32 4 21 -15 1 -16 0.7 0.4 38 -30 3 2 1 -16 0.9 0.5 44 9 37 13 1 -16 0.4 0.4 50 -11 40 5 2 -16 0.3 0.9 25 -22 30 24 2 -16 0.3 0.1 -1 -9 -30 -38 1 -16 0.9 0.7 18 -21 5 -18 1 -16 0.7 0.8 20 12 5 -25 1 -16 0.5 0.2 15 -37 8 -34 1 -16 0.6 0.3 41 9 47 -10 1 -16 0.1 0.2 22 -5 42 -25 1 -16 0.9 0.7 8 -7 10 -37 1 -16 0.3 0.3 -32 -43 43 -7 2 -16 0.7 0.4 30 -7 24 -40 1 -16 0.4 0.6 44 -43 -37 -40 1 -16 0.9 0.7 48 18 29 -7 1 -16 0.3 0.5 38 2 10 4 1 -16 0.5 0.8 13 -45 4 -9 2 -16 0.1 0.6 43 -33 23 16 2 -16 0.9 0.3 15 -23 22 15 2 -16 0.1 0.7 34 -48 -16 -20 2 -16 0.5 0.6 -6 -14 48 -35 2 -16 0.1 0.2 -36 -49 -2 -29 2 -16 0.2 0.9 9 -2 -8 -19 1 -16 0.7 0.1 8 -40 23 -30 1 -16 0.9 0.9 49 -44 32 -48 1 -16 0.9 0.8 -24 -32 12 -24 2 -16 0.9 0.4 29 17 28 -44 1 -16 0.5 0.6 1 -50 48 40 2 -16 0.2 0.6 27 -36 43 14 2 -16 0.3 0.7 -28 -46 42 12 2 -16 0.4 0.2 -15 -37 39 22 2 -16 0.1 0.8 17 -24 -39 -50 1 -16 0.1 0.6 38 -16 42 -49 2 -16 0.3 0.8 39 24 -1 -43 1 -16 0.9 0.6 29 -13 32 -19 1 -16 0.4 0.4 37 -9 8 -23 1 -16 0.7 0.5 38 -17 12 8 1 -16 0.8 0.4 8 2 -21 -36 1 -16 0.8 0.5 9 8 -7 -42 1 -16 0.1 0.1 -1 -49 -42 -46 1 -16 0.7 0.5 47 39 2 -1 1 -16 0.6 0.6 48 17 28 -48 1 -17 0.6 0.3 -6 -13 46 -19 2 -17 0.4 0.7 32 -2 -15 -40 1 -17 0.3 0.9 33 -24 44 41 2 -17 0.4 0.1 13 -42 -11 -24 1 -17 0.5 0.6 25 -18 38 3 2 -17 0.7 0.5 45 -1 31 -35 1 -17 0.3 0.2 24 -19 -14 -21 1 -17 0.1 0.4 -27 -44 -6 -50 2 -17 0.4 0.6 30 -13 17 -48 1 -17 0.5 0.1 35 28 1 -5 1 -17 0.2 0.2 18 -16 16 -15 1 -17 0.8 0.2 12 -24 -11 -41 1 -17 0.5 0.9 38 -41 27 11 2 -17 0.3 0.3 23 -3 -26 -43 1 -17 0.3 0.8 18 -46 41 22 2 -17 0.4 0.5 -26 -33 18 14 2 -17 0.7 0.7 -9 -23 38 -14 2 -17 0.7 0.7 34 32 16 -7 1 -17 0.4 0.2 17 -44 7 -26 1 -17 0.4 0.6 -5 -25 -33 -46 1 -17 0.1 0.8 47 32 39 -28 1 -17 0.9 0.5 30 -36 20 -9 1 -17 0.3 0.6 2 -4 33 15 2 -17 0.8 0.2 -27 -44 38 18 2 -17 0.2 0.4 3 -47 50 -31 2 -17 0.2 0.2 -20 -41 11 -2 2 -17 0.3 0.3 24 -18 39 -18 2 -17 0.4 0.1 -29 -47 47 31 2 -17 0.7 0.6 -12 -13 18 -44 2 -17 0.9 0.2 33 -38 49 41 2 -17 0.7 0.5 -3 -8 -33 -42 1 -17 0.6 0.3 47 -34 15 -1 1 -17 0.5 0.9 -22 -36 38 -23 2 -17 0.6 0.2 13 7 -16 -17 1 -17 0.4 0.7 -27 -29 16 -4 2 -17 0.2 0.8 49 -7 -3 -21 1 -17 0.5 0.6 3 -31 10 -33 2 -17 0.3 0.4 -23 -38 49 37 2 -17 0.2 0.6 45 41 24 -28 1 -17 0.8 0.3 45 36 25 -25 1 -17 0.8 0.1 35 11 -7 -13 1 -17 0.3 0.7 -12 -39 11 -7 2 -17 0.9 0.3 45 14 -17 -47 1 -17 0.4 0.4 3 -2 -5 -14 1 -17 0.2 0.3 19 -28 5 -28 2 -17 0.2 0.2 42 -16 47 33 2 -17 0.7 0.7 -34 -42 37 -24 2 -17 0.4 0.1 28 -5 6 -20 1 -17 0.4 0.8 -21 -23 19 -16 2 -17 0.1 0.8 11 -27 27 -14 2 -17 0.8 0.5 49 1 5 2 1 -17 0.9 0.9 -20 -36 44 25 2 -17 0.8 0.2 -4 -41 39 -48 2 -17 0.2 0.1 42 33 29 -29 1 -17 0.9 0.8 46 13 -5 -35 1 -17 0.8 0.8 18 -17 47 -11 2 -17 0.4 0.4 42 16 45 30 1 -17 0.3 0.6 42 30 15 -46 1 -17 0.3 0.2 47 41 13 -11 1 -17 0.2 0.4 43 25 46 16 2 -17 0.1 0.5 0 -45 0 -46 2 -17 0.8 0.2 49 -31 48 -4 1 -17 0.3 0.3 -9 -20 41 26 2 -17 0.2 0.1 30 -28 -9 -42 1 -17 0.2 0.2 -14 -17 46 -37 2 -17 0.6 0.2 37 22 48 16 1 -17 0.9 0.7 17 -12 -10 -22 1 -17 0.4 0.8 31 10 30 15 2 -17 0.8 0.7 -6 -32 44 3 2 -17 0.1 0.3 34 33 49 -2 1 -17 0.6 0.3 -42 -43 40 4 2 -17 0.2 0.6 27 -1 32 16 2 -17 0.7 0.1 -2 -17 -23 -28 1 -17 0.9 0.7 -45 -50 32 -35 2 -17 0.1 0.8 31 -36 42 -15 2 -17 0.4 0.9 34 -48 45 -22 2 -17 0.1 0.6 27 -6 15 -27 2 -17 0.2 0.3 12 0 27 0 2 -17 0.4 0.3 33 28 45 -43 1 -17 0.7 0.9 11 -50 30 26 2 -17 0.8 0.6 -9 -38 23 -30 2 -17 0.3 0.3 22 -38 19 -45 1 -17 0.9 0.5 43 11 29 -2 1 -17 0.9 0.2 -46 -49 -37 -49 1 -17 0.9 0.5 -1 -38 23 -11 2 -17 0.3 0.8 14 2 44 22 2 -17 0.8 0.4 24 -1 40 -36 1 -17 0.1 0.6 4 -13 18 -22 2 -17 0.1 0.5 22 15 49 44 2 -17 0.4 0.4 17 12 13 -28 1 -17 0.8 0.8 36 -26 19 -5 1 -17 0.6 0.8 12 -7 13 -14 2 -17 0.3 0.8 8 -26 -2 -32 2 -17 0.4 0.6 9 -45 10 -25 2 -17 0.2 0.2 32 -14 -44 -47 1 -17 0.4 0.1 -22 -38 -15 -39 1 -17 0.3 0.7 -40 -47 43 33 2 -17 0.5 0.5 6 -38 -9 -37 1 -17 0.4 0.2 -45 -48 -4 -26 2 -17 0.8 0.4 8 -50 40 -36 1 -17 0.5 0.2 15 -36 41 -3 2 -17 0.5 0.8 38 -33 35 -41 2 -17 0.3 0.1 -17 -35 -16 -46 1 -17 0.8 0.2 22 -17 -1 -31 1 -17 0.9 0.1 -14 -42 37 -22 2 -17 0.6 0.8 -25 -27 41 -9 2 -17 0.5 0.1 -6 -7 35 1 2 -17 0.2 0.4 19 -30 50 -32 2 -17 0.3 0.6 -7 -34 -5 -13 2 -17 0.9 0.5 35 14 33 0 1 -17 0.7 0.3 48 -35 32 24 2 -17 0.1 0.5 21 -4 8 -35 1 -17 0.7 0.6 39 -40 21 -30 1 -17 0.7 0.4 -40 -49 12 -8 2 -17 0.3 0.9 48 16 39 -39 2 -17 0.9 0.5 36 -42 42 6 1 -17 0.8 0.1 14 -10 42 37 2 -17 0.4 0.7 17 -31 42 -9 2 -17 0.8 0.1 43 8 49 -37 1 -17 0.1 0.2 30 -44 33 -5 2 -17 0.8 0.1 21 -49 9 -23 1 -17 0.3 0.6 38 29 50 36 2 -17 0.2 0.4 10 -15 43 13 2 -17 0.1 0.8 15 -9 33 17 2 -17 0.2 0.1 39 -32 42 37 2 -17 0.6 0.1 45 -47 26 -30 1 -17 0.1 0.9 36 10 38 11 2 -17 0.8 0.2 7 -35 38 -20 2 -17 0.1 0.8 27 -19 -16 -20 2 -17 0.2 0.9 0 -9 33 -32 2 -17 0.1 0.9 27 -50 9 -21 2 -17 0.1 0.7 -31 -47 41 -31 2 -17 0.5 0.1 -1 -30 49 -27 2 -17 0.9 0.5 -9 -43 -24 -47 1 -17 0.5 0.2 -8 -35 36 8 2 -17 0.1 0.4 20 -40 -12 -27 1 -17 0.6 0.9 34 15 49 -30 2 -17 0.6 0.9 -4 -6 48 14 2 -17 0.9 0.4 26 -20 -33 -39 1 -17 0.2 0.3 40 -43 -20 -47 1 -17 0.5 0.6 -15 -22 43 30 2 -17 0.8 0.2 19 2 -21 -26 1 -17 0.7 0.9 33 29 24 -35 1 -17 0.5 0.1 48 37 19 -36 1 -17 0.2 0.2 35 -24 -2 -27 1 -17 0.1 0.1 -19 -32 33 8 2 -17 0.4 0.1 -12 -36 21 18 2 -17 0.9 0.7 -13 -25 33 -27 2 -17 0.2 0.1 40 -19 -20 -26 1 -17 0.2 0.9 23 -8 -8 -13 1 -17 0.8 0.7 24 7 -31 -45 1 -17 0.8 0.1 15 -28 -23 -49 1 -17 0.1 0.5 28 -22 3 -22 2 -17 0.8 0.4 22 -40 43 -4 2 -17 0.7 0.2 28 22 43 4 1 -17 0.3 0.9 36 -12 19 -22 2 -17 0.7 0.7 28 -47 -9 -11 1 -17 0.2 0.1 45 15 5 -36 1 -17 0.2 0.1 -32 -41 38 -39 2 -17 0.8 0.9 34 -13 -29 -47 1 -17 0.7 0.7 31 -28 45 -2 2 -17 0.8 0.2 -3 -13 49 -21 2 -17 0.3 0.3 38 9 45 -30 1 -17 0.5 0.3 -24 -44 38 -12 2 -17 0.8 0.8 23 -29 48 -9 2 -17 0.7 0.4 -4 -34 3 -8 1 -17 0.1 0.1 5 -42 30 -44 2 -17 0.5 0.8 -34 -43 5 -32 2 -17 0.9 0.9 31 -37 -36 -38 1 -17 0.9 0.7 -11 -38 33 -41 2 -17 0.2 0.7 31 -20 3 -24 1 -17 0.2 0.5 -26 -41 14 9 2 -17 0.8 0.8 -43 -49 21 -10 2 -17 0.6 0.7 15 11 24 5 2 -17 0.8 0.4 39 -5 27 -19 1 -17 0.8 0.3 22 -31 49 -8 1 -17 0.8 0.8 18 7 33 6 2 -17 0.4 0.8 44 38 30 -26 1 -17 0.6 0.2 41 36 50 0 1 -17 0.9 0.2 37 -27 47 -7 1 -17 0.9 0.9 35 -41 -13 -44 1 -17 0.8 0.8 3 -17 21 -22 1 -17 0.3 0.4 -4 -47 -9 -21 2 -17 0.9 0.3 23 -15 0 -19 1 -17 0.1 0.7 27 -4 -23 -48 1 -17 0.3 0.8 46 -44 45 -16 2 -17 0.2 0.5 -22 -33 37 -45 2 -17 0.7 0.6 -14 -15 44 -49 2 -17 0.2 0.4 23 -10 25 -41 2 -17 0.9 0.1 -4 -20 12 6 2 -17 0.7 0.4 28 -6 36 18 2 -17 0.2 0.1 20 -22 48 -48 1 -17 0.8 0.8 17 -26 30 15 2 -17 0.8 0.9 37 -16 -10 -19 1 -17 0.4 0.5 -9 -40 21 -6 2 -17 0.1 0.9 41 6 48 35 2 -17 0.9 0.1 -12 -40 17 4 2 -17 0.7 0.8 34 -15 30 22 2 -17 0.8 0.4 25 10 4 -35 1 -17 0.8 0.1 9 -49 -16 -32 1 -17 0.7 0.7 10 -48 45 -13 2 -17 0.8 0.8 26 -20 8 -34 1 -17 0.8 0.1 -8 -20 2 -17 1 -17 0.3 0.5 -12 -50 45 -27 2 -17 0.7 0.8 -36 -37 -40 -46 1 -17 0.1 0.5 -14 -50 37 33 2 -17 0.2 0.7 5 -46 18 -4 2 -17 0.6 0.5 32 -45 26 24 2 -17 0.2 0.8 -13 -25 36 -36 2 -17 0.6 0.8 -20 -39 32 -19 2 -17 0.2 0.2 41 -34 -35 -46 1 -17 0.6 0.5 7 -35 16 13 2 -17 0.2 0.3 47 32 22 -35 1 -17 0.1 0.3 9 -43 36 -14 2 -17 0.9 0.4 25 -18 48 -5 1 -17 0.6 0.3 3 1 -8 -11 1 -17 0.9 0.1 28 3 -17 -45 1 -17 0.4 0.7 22 -4 -35 -38 1 -17 0.7 0.6 45 -2 -4 -49 1 -17 0.3 0.1 17 -20 49 31 2 -17 0.1 0.6 16 12 16 -15 1 -17 0.4 0.2 -2 -36 18 -30 2 -17 0.7 0.5 11 -29 19 -14 2 -17 0.1 0.4 -26 -33 -11 -50 2 -17 0.1 0.8 -35 -47 33 -36 2 -17 0.7 0.3 49 -45 27 -35 1 -17 0.9 0.6 40 -38 36 -15 1 -17 0.1 0.4 32 -16 -18 -22 1 -17 0.1 0.3 27 -23 47 30 2 -17 0.4 0.8 42 12 34 8 2 -17 0.4 0.1 10 -28 -9 -23 1 -17 0.5 0.4 -9 -48 -20 -39 2 -17 0.4 0.4 -20 -50 37 -14 2 -17 0.7 0.5 18 4 15 -15 1 -17 0.4 0.7 -25 -42 34 12 2 -17 0.9 0.7 48 18 22 9 1 -17 0.7 0.7 44 36 1 -34 1 -17 0.1 0.5 5 -17 45 -40 2 -17 0.5 0.3 22 -38 12 4 2 -17 0.1 0.8 39 -19 3 -48 2 -17 0.3 0.5 47 -49 -17 -50 1 -17 0.9 0.1 -20 -38 -2 -16 2 -17 0.2 0.1 42 4 26 -26 1 -17 0.9 0.7 50 -24 19 -38 1 -17 0.9 0.2 49 13 -8 -46 1 -17 0.3 0.1 45 -13 -35 -38 1 -17 0.3 0.4 -5 -30 13 -35 2 -17 0.9 0.4 27 -31 43 36 2 -17 0.4 0.5 33 -22 -29 -33 1 -17 0.6 0.2 40 -40 8 -12 1 -17 0.1 0.3 43 -22 0 -39 1 -17 0.1 0.5 13 -40 18 -34 2 -17 0.5 0.8 -1 -26 41 -46 2 -17 0.5 0.9 -2 -49 25 5 2 -17 0.4 0.8 -32 -36 40 -3 2 -17 0.2 0.8 32 -17 37 3 2 -17 0.7 0.4 18 16 44 -50 1 -17 0.5 0.2 -5 -36 -20 -27 1 -17 0.8 0.6 -12 -17 20 -30 2 -17 0.5 0.1 -36 -41 8 7 2 -17 0.3 0.1 43 -50 13 10 2 -17 0.4 0.5 -16 -39 34 13 2 -17 0.9 0.8 37 8 28 8 2 -17 0.3 0.9 1 -8 8 -13 2 -17 0.7 0.8 4 -31 5 -16 2 -17 0.8 0.9 -17 -36 35 -22 2 -17 0.7 0.8 -23 -25 6 -28 2 -17 0.5 0.2 43 -34 26 -2 2 -17 0.7 0.8 14 -32 17 2 2 -17 0.7 0.9 8 -33 4 -32 2 -17 0.9 0.5 23 -43 -1 -6 1 -17 0.9 0.7 -10 -50 32 3 2 -17 0.7 0.3 6 -30 -12 -19 1 -17 0.2 0.6 48 -4 30 -15 2 -17 0.6 0.1 5 -8 37 14 2 -17 0.7 0.8 17 -4 25 -49 2 -17 0.9 0.4 -10 -47 -5 -46 1 -17 0.6 0.5 45 -4 -26 -38 1 -17 0.3 0.8 -13 -44 31 6 2 -17 0.4 0.9 4 2 -10 -35 1 -17 0.5 0.9 42 16 44 27 2 -17 0.4 0.1 50 0 -27 -38 1 -17 0.7 0.1 37 26 9 -33 1 -17 0.2 0.9 47 -14 36 35 2 -17 0.7 0.3 37 15 47 -32 1 -17 0.1 0.7 -6 -19 36 27 2 -17 0.9 0.1 46 17 2 -47 1 -17 0.4 0.6 8 -12 -28 -45 1 -17 0.4 0.6 8 -44 38 -24 2 -17 0.3 0.7 19 -5 -18 -29 1 -17 0.2 0.8 22 -9 23 -20 2 -17 0.1 0.7 21 -13 23 0 2 -17 0.2 0.4 -6 -8 34 -41 2 -17 0.6 0.6 10 -32 45 28 2 -17 0.1 0.7 39 23 49 16 2 -17 0.1 0.7 29 7 30 -27 2 -17 0.8 0.3 6 -46 -8 -29 1 -17 0.2 0.7 46 -44 28 -26 2 -17 0.7 0.6 19 -20 13 -49 1 -17 0.3 0.2 28 21 -22 -48 1 -18 0.5 0.5 21 -3 35 -17 2 -18 0.4 0.7 9 -38 4 -42 2 -18 0.3 0.4 -13 -42 35 13 2 -18 0.8 0.8 30 -41 32 -47 2 -18 0.1 0.8 34 -4 -20 -35 1 -18 0.4 0.8 14 -25 8 -43 2 -18 0.5 0.6 42 27 21 -38 1 -18 0.5 0.2 -8 -41 49 -17 2 -18 0.7 0.4 45 4 -11 -16 1 -18 0.5 0.7 1 -2 -7 -41 1 -18 0.6 0.3 -1 -35 10 1 2 -18 0.4 0.2 0 -12 -21 -31 2 -18 0.2 0.7 0 -21 -7 -12 2 -18 0.7 0.3 27 -16 -15 -19 1 -18 0.2 0.3 9 -10 25 -22 2 -18 0.5 0.9 -21 -29 48 11 2 -18 0.2 0.1 38 -13 -23 -46 1 -18 0.3 0.5 23 9 15 -47 1 -18 0.9 0.8 9 -1 39 4 2 -18 0.1 0.4 17 9 31 -28 1 -18 0.4 0.6 3 -15 27 -23 2 -18 0.3 0.6 21 -6 48 -10 2 -18 0.1 0.8 46 14 42 19 2 -18 0.6 0.4 47 -19 46 -34 1 -18 0.1 0.3 -32 -50 -9 -11 2 -18 0.2 0.2 9 -38 -32 -33 1 -18 0.9 0.4 -10 -23 25 -40 2 -18 0.4 0.1 -24 -31 -33 -39 2 -18 0.6 0.8 9 -41 -8 -50 1 -18 0.2 0.5 50 38 22 -24 1 -18 0.1 0.3 -10 -44 22 19 2 -18 0.9 0.4 35 17 28 13 1 -18 0.4 0.8 22 5 30 11 2 -18 0.1 0.7 36 9 8 4 1 -18 0.3 0.5 -7 -14 11 0 2 -18 0.9 0.7 33 -46 21 -32 1 -18 0.8 0.2 -16 -24 30 0 2 -18 0.8 0.4 50 8 19 -8 1 -18 0.5 0.7 21 -10 49 10 2 -18 0.6 0.7 48 14 49 9 1 -18 0.3 0.9 -30 -47 -4 -39 2 -18 0.3 0.4 -22 -28 35 -28 2 -18 0.1 0.4 27 11 -25 -29 1 -18 0.9 0.1 20 17 35 6 1 -18 0.5 0.4 44 -16 -32 -46 1 -18 0.7 0.6 35 -17 -30 -33 1 -18 0.3 0.6 36 24 25 -34 1 -18 0.4 0.9 50 -26 8 -17 2 -18 0.9 0.2 20 11 30 -41 1 -18 0.5 0.6 -14 -49 22 -42 2 -18 0.9 0.9 36 -34 21 -23 1 -18 0.8 0.7 29 -3 23 17 2 -18 0.5 0.8 35 14 -37 -49 1 -18 0.2 0.6 35 -1 44 -40 2 -18 0.9 0.8 47 37 -28 -42 1 -18 0.6 0.5 -40 -43 39 -27 2 -18 0.9 0.1 26 8 44 12 1 -18 0.3 0.7 39 -17 -2 -27 1 -18 0.7 0.6 -30 -40 39 -28 2 -18 0.2 0.9 32 -17 47 -43 2 -18 0.7 0.7 17 -47 21 -14 2 -18 0.6 0.7 -34 -40 47 -15 2 -18 0.9 0.1 20 16 -17 -32 1 -18 0.2 0.4 47 39 47 25 1 -18 0.5 0.4 9 -50 10 -31 1 -18 0.2 0.8 17 1 45 10 2 -18 0.4 0.5 -28 -33 33 -19 2 -18 0.9 0.8 19 5 17 -7 1 -18 0.6 0.5 28 25 49 -2 1 -18 0.4 0.1 33 29 -10 -46 1 -18 0.4 0.9 -15 -31 44 26 2 -18 0.1 0.1 -26 -28 11 -40 1 -18 0.8 0.3 41 -33 41 8 2 -18 0.4 0.7 1 -35 13 -46 2 -18 0.8 0.2 31 -36 19 -23 1 -18 0.4 0.9 24 -30 21 -45 2 -18 0.2 0.1 41 -26 7 -46 1 -18 0.9 0.9 -34 -39 29 -46 2 -18 0.9 0.9 12 -39 13 -32 1 -18 0.4 0.5 39 -18 5 -34 1 -18 0.2 0.2 8 -27 -38 -42 1 -18 0.6 0.3 36 -16 12 -38 1 -18 0.2 0.5 10 -48 24 20 2 -18 0.7 0.2 34 28 49 39 2 -18 0.3 0.5 44 -47 -20 -47 1 -18 0.3 0.5 21 3 16 -6 1 -18 0.6 0.1 21 19 -18 -47 1 -18 0.7 0.7 47 -23 29 -20 2 -18 0.4 0.6 25 18 16 -40 1 -18 0.6 0.4 -39 -50 -11 -13 2 -18 0.4 0.9 28 -35 12 -49 2 -18 0.1 0.4 14 -40 32 -28 2 -18 0.5 0.6 35 -17 36 -40 1 -18 0.6 0.6 31 15 28 24 1 -18 0.5 0.2 43 -21 -19 -36 1 -18 0.7 0.9 30 14 29 -41 1 -18 0.8 0.3 21 -42 48 35 2 -18 0.9 0.5 38 32 32 -31 1 -18 0.9 0.6 47 -34 35 20 1 -18 0.4 0.2 -25 -48 -40 -47 1 -18 0.2 0.5 -24 -28 46 44 2 -18 0.4 0.6 46 -35 18 -26 2 -18 0.7 0.4 11 -20 30 27 2 -18 0.4 0.3 37 -33 -3 -36 1 -18 0.7 0.2 -18 -31 40 -1 2 -18 0.9 0.4 17 -40 -26 -43 1 -18 0.1 0.6 12 4 -5 -35 1 -18 0.1 0.8 35 24 42 -3 2 -18 0.5 0.9 19 -2 5 -29 2 -18 0.8 0.9 42 -19 23 9 1 -18 0.3 0.9 37 -15 -6 -11 1 -18 0.4 0.5 46 36 -17 -40 1 -18 0.1 0.9 39 -23 38 -50 2 -18 0.3 0.3 24 18 -1 -19 1 -18 0.5 0.3 22 10 6 -50 1 -18 0.7 0.8 14 -41 25 -30 2 -18 0.8 0.2 47 42 41 29 1 -18 0.3 0.1 17 -2 47 -5 1 -18 0.6 0.9 50 37 39 -29 1 -18 0.9 0.1 14 11 6 -38 1 -18 0.8 0.3 29 -20 15 5 1 -18 0.6 0.1 -7 -33 47 -27 2 -18 0.4 0.6 33 -24 40 -6 2 -18 0.7 0.3 -35 -40 48 -36 2 -18 0.1 0.5 32 16 24 -20 1 -18 0.7 0.7 47 -47 32 30 2 -18 0.8 0.9 49 22 38 -48 1 -18 0.2 0.3 38 -27 6 -24 2 -18 0.5 0.3 21 -25 41 -16 1 -18 0.6 0.9 26 -10 32 -48 2 -18 0.7 0.5 -16 -21 45 39 2 -18 0.2 0.3 2 -30 29 -8 2 -18 0.5 0.3 16 5 40 6 2 -18 0.6 0.3 19 -18 21 -29 1 -18 0.8 0.9 48 -1 13 8 1 -18 0.9 0.3 19 18 -5 -46 1 -18 0.1 0.7 30 -15 19 -8 2 -18 0.3 0.5 -28 -50 -16 -47 1 -18 0.1 0.1 50 -44 35 12 2 -18 0.1 0.8 20 -43 35 26 2 -18 0.3 0.4 33 -37 23 -25 2 -18 0.7 0.5 8 2 -8 -37 1 -18 0.3 0.2 3 -15 -10 -39 1 -18 0.4 0.6 -15 -50 19 -13 2 -18 0.3 0.3 18 12 43 21 2 -18 0.7 0.5 40 38 11 -7 1 -18 0.9 0.4 16 -34 -16 -36 1 -18 0.2 0.8 44 -32 -38 -48 1 -18 0.7 0.5 50 26 42 39 2 -18 0.4 0.5 37 -18 45 -4 2 -18 0.3 0.8 22 -40 17 -12 2 -18 0.5 0.3 5 -11 36 -21 2 -18 0.5 0.3 2 -42 38 23 2 -18 0.7 0.8 -11 -39 33 4 2 -18 0.4 0.4 39 11 -28 -37 1 -18 0.5 0.1 -12 -36 -41 -49 1 -18 0.8 0.3 -5 -13 -29 -32 1 -18 0.2 0.8 12 4 42 0 2 -18 0.8 0.1 3 -14 6 -19 1 -18 0.1 0.4 -8 -40 -22 -48 2 -18 0.3 0.4 36 -27 20 -46 1 -18 0.3 0.2 41 6 42 39 2 -18 0.2 0.4 19 -7 9 -27 1 -18 0.9 0.6 31 5 16 -46 1 -18 0.3 0.5 -9 -18 10 -38 2 -18 0.8 0.9 34 -4 34 11 2 -18 0.3 0.3 38 14 14 -48 1 -18 0.3 0.7 28 3 25 7 2 -18 0.2 0.8 11 -17 12 -27 2 -18 0.1 0.2 1 -32 -22 -24 2 -18 0.4 0.2 -5 -30 19 -37 1 -18 0.9 0.8 -20 -22 -23 -25 1 -18 0.5 0.1 8 -42 19 -25 1 -18 0.1 0.7 41 31 13 -42 1 -18 0.5 0.5 -6 -32 -17 -39 1 -18 0.4 0.7 36 -45 44 33 2 -18 0.6 0.6 19 -41 -11 -12 1 -18 0.4 0.5 8 -50 24 -8 2 -18 0.5 0.8 5 -12 5 -9 2 -18 0.5 0.4 48 -5 26 -23 1 -18 0.3 0.9 19 12 10 -30 1 -18 0.6 0.8 12 -27 45 42 2 -18 0.4 0.3 46 5 39 -20 1 -18 0.2 0.7 33 25 31 27 1 -18 0.5 0.5 17 -7 3 -5 2 -18 0.9 0.8 13 -36 43 14 2 -18 0.6 0.7 20 -8 47 -34 2 -18 0.6 0.3 -5 -16 -23 -38 1 -18 0.2 0.7 25 -29 29 18 2 -18 0.2 0.7 -20 -45 44 -40 2 -18 0.9 0.2 50 -38 21 -46 1 -18 0.2 0.2 3 -27 35 7 2 -18 0.8 0.1 43 -34 46 13 2 -18 0.1 0.4 27 -18 -33 -49 1 -18 0.7 0.5 21 -4 41 -2 2 -18 0.2 0.1 46 -6 42 -16 1 -18 0.7 0.6 27 12 0 -21 1 -18 0.3 0.9 38 -47 41 -48 2 -18 0.3 0.4 50 21 45 18 2 -18 0.4 0.6 -17 -49 -9 -44 2 -18 0.5 0.4 37 -12 30 -28 1 -18 0.1 0.3 35 -10 -9 -28 1 -18 0.7 0.5 5 -27 -25 -38 1 -18 0.7 0.8 48 -11 27 -11 1 -18 0.7 0.7 20 -7 41 -22 2 -18 0.3 0.6 9 -38 -14 -16 2 -18 0.6 0.8 12 -25 -11 -46 1 -18 0.1 0.4 38 -4 25 -20 2 -18 0.5 0.7 34 18 -12 -34 1 -18 0.1 0.1 -8 -21 40 -36 2 -18 0.5 0.8 21 7 18 4 2 -18 0.9 0.7 35 22 21 -21 1 -18 0.6 0.2 49 33 35 -8 1 -18 0.5 0.4 18 -7 35 -2 2 -18 0.8 0.3 36 -41 41 -40 1 -18 0.2 0.2 5 -47 -8 -9 2 -18 0.3 0.7 -20 -46 31 19 2 -18 0.1 0.7 -14 -50 49 37 2 -18 0.6 0.6 9 -41 50 16 2 -18 0.7 0.6 7 -31 49 -19 2 -18 0.8 0.2 -18 -29 20 -27 2 -18 0.1 0.3 16 -4 -8 -41 1 -18 0.2 0.2 45 -29 43 19 2 -18 0.4 0.3 6 -22 50 7 2 -18 0.8 0.1 -33 -44 -7 -23 2 -18 0.3 0.2 49 14 -33 -47 1 -18 0.1 0.1 0 -30 32 27 2 -18 0.4 0.6 36 -45 38 -32 1 -18 0.4 0.9 42 25 50 -34 2 -18 0.3 0.2 43 5 20 -45 1 -18 0.1 0.2 -8 -30 45 27 2 -18 0.5 0.4 -10 -27 25 -22 2 -18 0.2 0.6 46 41 34 -29 1 -18 0.6 0.9 13 4 -12 -27 1 -18 0.8 0.4 6 -8 -29 -48 1 -18 0.3 0.9 26 -45 -34 -35 1 -18 0.8 0.4 48 -10 -26 -48 1 -18 0.6 0.1 1 -8 5 -12 2 -18 0.8 0.1 -47 -48 43 37 2 -18 0.9 0.7 27 -17 38 3 1 -18 0.5 0.2 44 8 40 -42 1 -18 0.1 0.7 30 28 33 26 2 -18 0.7 0.9 -5 -12 -4 -28 1 -18 0.3 0.3 43 10 11 -5 1 -18 0.4 0.9 41 -33 25 -48 2 -18 0.2 0.9 32 13 21 -31 1 -18 0.6 0.4 -28 -39 27 16 2 -18 0.5 0.3 -4 -38 40 12 2 -18 0.2 0.5 -3 -31 5 -39 2 -18 0.6 0.2 -18 -32 17 -11 2 -18 0.5 0.1 6 -43 18 -47 1 -18 0.6 0.5 6 -41 35 16 2 -18 0.3 0.4 24 -24 10 -6 2 -18 0.8 0.8 -14 -27 7 -14 2 -18 0.2 0.7 30 -47 5 -13 2 -18 0.8 0.5 -25 -49 23 -9 2 -18 0.2 0.1 39 18 22 -38 1 -18 0.2 0.7 -14 -31 37 -39 2 -18 0.3 0.6 46 -6 8 1 2 -18 0.6 0.1 23 -26 34 5 1 -18 0.5 0.3 49 36 40 -43 1 -18 0.5 0.2 45 18 42 40 2 -18 0.7 0.4 0 -33 20 -12 2 -18 0.4 0.3 46 39 28 -44 1 -18 0.5 0.5 45 -10 30 10 2 -18 0.2 0.3 29 -25 19 -16 2 -18 0.1 0.5 50 -5 43 -37 2 -18 0.9 0.5 30 25 -10 -34 1 -18 0.2 0.8 39 -26 -26 -34 1 -18 0.1 0.1 27 -8 19 0 1 -18 0.5 0.8 50 40 38 34 2 -18 0.7 0.8 9 -6 35 -31 2 -18 0.5 0.9 -47 -48 43 -48 2 -18 0.1 0.6 23 5 -11 -42 1 -18 0.8 0.1 11 -25 34 18 2 -18 0.7 0.2 45 -43 21 -39 1 -18 0.5 0.8 47 26 37 21 2 -18 0.3 0.7 50 43 23 -30 1 -18 0.9 0.7 30 -34 -30 -37 1 -18 0.2 0.6 46 -30 -12 -39 1 -18 0.3 0.4 -23 -47 -14 -36 1 -18 0.4 0.5 -20 -50 11 -45 2 -18 0.4 0.4 33 -31 -1 -3 1 -18 0.5 0.5 25 -29 17 11 2 -18 0.3 0.6 21 -42 46 27 2 -18 0.9 0.5 -16 -25 -23 -26 2 -18 0.9 0.8 8 -49 -5 -24 1 -18 0.2 0.3 9 -50 0 -14 2 -18 0.6 0.2 24 -13 39 -7 1 -18 0.8 0.2 -3 -39 35 2 2 -18 0.9 0.7 44 38 41 -21 1 -18 0.8 0.6 26 -21 14 -28 1 -18 0.9 0.7 38 -9 45 -28 1 -18 0.3 0.9 49 20 26 -1 1 -18 0.2 0.9 36 26 -20 -29 1 -18 0.3 0.5 -31 -32 17 4 2 -18 0.6 0.3 34 -26 50 44 2 -18 0.8 0.5 35 18 41 7 1 -18 0.9 0.8 -21 -24 10 -33 2 -18 0.8 0.5 38 -41 49 28 2 -19 0.5 0.5 49 -4 -7 -50 1 -19 0.6 0.5 3 -17 45 -39 2 -19 0.2 0.9 19 18 27 8 1 -19 0.9 0.2 50 20 -10 -19 1 -19 0.3 0.6 42 9 13 -40 1 -19 0.6 0.4 28 -14 18 6 2 -19 0.3 0.3 9 -31 -11 -33 1 -19 0.6 0.8 45 21 -18 -50 1 -19 0.7 0.2 50 0 -21 -38 1 -19 0.7 0.2 12 -41 -14 -37 1 -19 0.4 0.6 31 15 -6 -18 1 -19 0.2 0.4 32 -44 47 20 2 -19 0.8 0.5 21 -15 3 -35 1 -19 0.2 0.5 21 4 20 -48 1 -19 0.1 0.5 28 -47 -16 -18 2 -19 0.9 0.5 40 29 33 -26 1 -19 0.7 0.1 44 36 8 -3 1 -19 0.7 0.6 4 -34 18 -29 2 -19 0.3 0.7 16 12 -32 -49 1 -19 0.8 0.3 -23 -35 24 -20 2 -19 0.5 0.1 -20 -30 37 -3 2 -19 0.8 0.9 21 -48 28 6 2 -19 0.7 0.7 21 12 38 -34 2 -19 0.9 0.6 44 -19 44 -49 1 -19 0.2 0.9 6 -8 21 10 2 -19 0.7 0.7 23 -21 50 -43 1 -19 0.1 0.2 -17 -35 16 -7 2 -19 0.5 0.2 14 -45 -30 -48 1 -19 0.4 0.3 21 -24 -21 -22 1 -19 0.3 0.1 13 -46 9 -30 1 -19 0.3 0.4 -22 -38 -25 -41 2 -19 0.7 0.9 48 3 25 12 2 -19 0.1 0.3 50 43 29 -37 1 -19 0.8 0.4 16 8 -33 -40 1 -19 0.2 0.3 49 -5 19 -41 1 -19 0.4 0.7 49 46 50 -9 1 -19 0.2 0.5 -18 -29 9 -14 2 -19 0.4 0.7 23 -45 21 -18 2 -19 0.6 0.4 30 -13 33 -7 1 -19 0.5 0.3 15 -14 42 11 2 -19 0.9 0.6 -15 -21 45 -40 2 -19 0.5 0.7 23 -35 26 24 2 -19 0.1 0.3 27 -40 -5 -21 2 -19 0.1 0.2 -16 -20 43 -45 2 -19 0.8 0.1 33 14 27 -9 1 -19 0.9 0.5 -22 -44 -15 -18 1 -19 0.4 0.1 21 -20 42 -46 1 -19 0.6 0.5 17 -28 45 -50 2 -19 0.2 0.1 8 -2 45 30 2 -19 0.5 0.6 -42 -50 31 -18 2 -19 0.2 0.8 29 3 28 -6 1 -19 0.4 0.2 25 -29 1 -34 1 -19 0.9 0.3 -34 -42 31 -37 2 -19 0.4 0.3 49 -23 -13 -19 1 -19 0.3 0.2 49 34 -6 -41 1 -19 0.4 0.4 0 -45 13 5 2 -19 0.8 0.5 47 -36 41 -5 1 -19 0.5 0.7 -29 -46 -7 -38 2 -19 0.8 0.6 30 24 17 -32 1 -19 0.9 0.1 -18 -49 31 1 2 -19 0.7 0.4 40 -43 50 -36 1 -19 0.2 0.4 38 30 -14 -45 1 -19 0.2 0.7 3 -41 -27 -38 2 -19 0.7 0.9 36 -36 -10 -46 1 -19 0.5 0.5 -9 -13 32 -41 2 -19 0.6 0.7 37 36 -20 -44 1 -19 0.1 0.2 30 1 43 39 2 -19 0.6 0.8 35 -31 46 -25 2 -19 0.2 0.5 24 -12 -29 -48 1 -19 0.5 0.6 -32 -44 -32 -35 1 -19 0.7 0.2 50 -50 39 -34 1 -19 0.5 0.3 4 -27 32 -6 2 -19 0.8 0.9 46 -10 15 -4 1 -19 0.7 0.5 12 -43 43 13 2 -19 0.3 0.8 18 -21 24 12 2 -19 0.4 0.1 25 24 44 -40 1 -19 0.6 0.6 -23 -46 44 -42 2 -19 0.7 0.2 -13 -18 20 -15 2 -19 0.1 0.5 -19 -28 -25 -41 1 -19 0.9 0.7 -14 -19 29 -35 2 -19 0.1 0.1 43 41 20 0 1 -19 0.5 0.7 -7 -35 23 -23 2 -19 0.2 0.7 24 22 43 -26 2 -19 0.2 0.8 32 -19 -7 -41 2 -19 0.3 0.9 15 -30 22 -49 2 -19 0.5 0.5 37 28 -34 -48 1 -19 0.5 0.4 26 -32 -24 -43 1 -19 0.4 0.8 34 33 23 -50 1 -19 0.7 0.2 34 -41 31 -6 1 -19 0.8 0.1 28 -15 32 -16 1 -19 0.2 0.9 41 4 32 -50 2 -19 0.4 0.8 43 -35 49 -22 2 -19 0.6 0.9 43 27 32 22 1 -19 0.1 0.1 36 -50 8 -50 1 -19 0.6 0.1 35 -35 -19 -44 1 -19 0.8 0.4 33 -2 -6 -7 1 -19 0.4 0.4 46 25 6 2 1 -19 0.3 0.2 18 -11 -33 -39 1 -19 0.7 0.5 36 29 -9 -44 1 -19 0.3 0.8 13 -7 1 -25 2 -19 0.6 0.7 -24 -32 21 -35 2 -19 0.7 0.2 9 -20 32 26 2 -19 0.5 0.3 47 4 -31 -33 1 -19 0.8 0.7 -10 -45 6 -35 2 -19 0.4 0.2 -10 -30 39 -33 2 -19 0.6 0.1 -25 -34 28 -45 2 -19 0.7 0.7 23 7 -17 -27 1 -19 0.2 0.2 15 -32 7 -36 2 -19 0.9 0.3 34 28 -1 -10 1 -19 0.3 0.2 49 22 16 -7 1 -19 0.3 0.1 27 2 -7 -24 1 -19 0.5 0.1 25 -48 22 -35 1 -19 0.5 0.5 8 3 49 -48 1 -19 0.1 0.3 -2 -45 12 -45 2 -19 0.1 0.1 -23 -31 29 -48 1 -19 0.7 0.5 18 0 49 11 2 -19 0.8 0.2 -2 -23 42 9 2 -19 0.2 0.8 9 -10 -8 -50 1 -19 0.4 0.1 -23 -39 8 -12 2 -19 0.7 0.9 21 15 45 28 2 -19 0.5 0.8 -25 -46 16 -42 2 -19 0.5 0.4 -1 -16 22 -39 1 -19 0.5 0.5 5 -37 -4 -24 1 -19 0.2 0.8 -16 -19 -32 -35 1 -19 0.7 0.9 11 -4 -12 -34 1 -19 0.6 0.3 39 -37 25 15 2 -19 0.1 0.8 13 -37 7 -16 2 -19 0.5 0.8 -36 -50 49 -18 2 -19 0.2 0.9 47 1 15 -3 2 -19 0.2 0.2 49 31 6 -35 1 -19 0.9 0.9 45 2 -31 -33 1 -19 0.9 0.9 5 -31 35 -33 2 -19 0.1 0.5 19 16 19 -3 1 -19 0.2 0.5 43 -6 38 -16 2 -19 0.8 0.3 -33 -47 0 -49 2 -19 0.9 0.8 -31 -49 48 -49 2 -19 0.4 0.8 25 -23 24 -11 2 -19 0.5 0.6 42 37 -12 -20 1 -19 0.6 0.4 47 -33 50 38 2 -19 0.7 0.2 -19 -21 17 0 2 -19 0.7 0.2 13 1 -1 -36 1 -19 0.1 0.5 43 -36 -2 -12 2 -19 0.1 0.4 47 42 13 -14 1 -19 0.7 0.1 45 9 32 -6 1 -19 0.8 0.9 33 -34 3 -48 1 -19 0.8 0.2 -14 -25 -1 -34 2 -19 0.4 0.3 16 4 -34 -36 1 -19 0.4 0.5 -2 -47 33 16 2 -19 0.2 0.9 20 -39 -16 -30 1 -19 0.1 0.3 22 17 3 -44 1 -19 0.8 0.3 -8 -42 -23 -31 1 -19 0.7 0.1 48 6 43 5 1 -19 0.6 0.3 9 -35 -47 -48 1 -19 0.9 0.3 36 -43 26 -28 1 -19 0.4 0.1 -40 -48 25 -26 2 -19 0.5 0.2 45 -39 25 -7 2 -19 0.2 0.3 45 -46 11 -15 2 -19 0.3 0.2 43 4 -37 -39 1 -19 0.6 0.5 10 9 45 -36 1 -19 0.6 0.7 30 -12 26 -39 2 -19 0.1 0.3 49 1 19 -4 2 -19 0.8 0.4 24 -46 47 32 2 -19 0.8 0.9 -7 -41 6 -26 2 -19 0.1 0.9 -31 -47 20 -1 2 -19 0.9 0.6 -28 -41 9 0 2 -19 0.7 0.9 -32 -48 44 -11 2 -19 0.3 0.4 47 41 49 -12 1 -19 0.9 0.3 15 -12 18 6 1 -19 0.1 0.3 37 -8 31 11 2 -19 0.7 0.6 4 -41 19 -12 2 -19 0.7 0.7 43 -30 26 -3 1 -19 0.5 0.7 -6 -8 -11 -35 1 -19 0.4 0.8 -20 -21 34 -3 2 -19 0.9 0.1 32 10 48 -3 1 -19 0.9 0.8 16 15 41 21 2 -19 0.7 0.2 46 -15 -10 -45 1 -19 0.9 0.3 30 16 3 -22 1 -19 0.3 0.8 -1 -3 23 -17 2 -19 0.1 0.1 15 -49 44 30 2 -19 0.7 0.5 27 -35 40 -44 1 -19 0.4 0.8 14 -25 48 -41 2 -19 0.5 0.5 31 -24 35 5 2 -19 0.9 0.7 48 1 -32 -37 1 -19 0.2 0.6 50 2 8 -16 1 -19 0.7 0.8 -27 -46 41 -16 2 -19 0.3 0.5 13 -30 25 -5 2 -19 0.1 0.3 23 0 12 -19 1 -19 0.7 0.4 45 16 50 -20 1 -19 0.8 0.2 7 -39 -14 -32 1 -19 0.2 0.7 39 -49 13 -2 2 -19 0.6 0.9 21 -3 33 -1 2 -19 0.9 0.2 48 -3 0 -47 1 -19 0.5 0.1 -16 -28 35 -13 2 -19 0.3 0.3 44 -1 49 -34 1 -19 0.7 0.2 -11 -21 -19 -20 2 -19 0.7 0.4 3 -33 47 7 2 -19 0.7 0.8 -28 -46 37 -36 2 -19 0.7 0.5 -1 -25 15 11 2 -19 0.7 0.2 48 21 -27 -50 1 -19 0.2 0.8 -5 -25 16 -8 2 -19 0.4 0.9 50 22 48 44 1 -19 0.8 0.2 36 -49 14 13 1 -19 0.8 0.9 40 -21 27 -6 1 -19 0.3 0.7 49 3 12 9 1 -19 0.4 0.7 46 -23 17 -29 1 -19 0.9 0.2 -30 -49 41 -9 2 -19 0.1 0.6 23 0 -21 -39 1 -19 0.5 0.1 14 -30 -16 -19 1 -19 0.6 0.1 46 -46 -4 -26 1 -19 0.2 0.4 40 12 39 19 2 -19 0.5 0.9 4 -10 40 4 2 -19 0.9 0.7 -23 -26 18 -45 2 -19 0.7 0.3 14 -22 -30 -38 1 -19 0.6 0.2 32 -35 16 9 2 -19 0.9 0.2 -27 -47 -23 -50 1 -19 0.2 0.2 -23 -25 49 -44 2 -19 0.5 0.7 42 -48 19 -45 2 -19 0.7 0.4 -33 -43 43 -44 2 -19 0.2 0.5 10 -20 8 -19 2 -19 0.2 0.3 1 -17 -19 -47 1 -19 0.1 0.6 48 -27 -12 -48 1 -19 0.7 0.3 30 -40 36 -2 1 -19 0.1 0.5 38 -32 14 -24 2 -19 0.2 0.6 10 -35 0 -27 2 -19 0.6 0.8 25 -6 -19 -27 1 -19 0.4 0.1 -41 -47 -7 -47 1 -19 0.9 0.8 27 -50 49 37 2 -19 0.2 0.7 -43 -50 37 -21 2 -19 0.5 0.2 8 -27 44 42 2 -19 0.6 0.9 49 -32 35 -28 2 -19 0.7 0.9 27 25 2 -39 1 -19 0.7 0.2 45 -2 -5 -30 1 -19 0.2 0.4 46 26 11 -44 1 -19 0.2 0.2 32 -24 25 -31 1 -19 0.3 0.7 40 33 23 -8 1 -19 0.3 0.7 9 -16 5 -23 2 -19 0.1 0.4 38 -45 9 -18 2 -19 0.2 0.2 41 -9 1 -39 1 -19 0.6 0.7 15 -12 -9 -48 1 -19 0.6 0.8 -14 -45 5 -44 2 -19 0.9 0.4 -19 -47 7 -15 2 -19 0.9 0.6 44 -4 27 -29 1 -19 0.6 0.5 0 -15 8 -43 1 -19 0.1 0.3 19 -1 30 25 2 -19 0.8 0.5 19 7 -25 -41 1 -19 0.8 0.2 -5 -33 44 -24 2 -19 0.8 0.5 20 -34 -22 -40 1 -19 0.9 0.2 -17 -48 -11 -39 1 -19 0.7 0.4 7 -37 43 -21 2 -19 0.2 0.7 35 -20 38 29 2 -19 0.1 0.9 48 40 48 14 2 -19 0.4 0.5 -12 -33 -2 -16 1 -19 0.1 0.9 14 -45 19 17 2 -19 0.5 0.1 17 -33 24 -47 1 -19 0.7 0.6 -7 -23 25 5 2 -19 0.8 0.3 41 34 -12 -20 1 -19 0.6 0.3 13 3 8 -18 1 -19 0.3 0.5 43 12 -10 -22 1 -19 0.7 0.3 48 -5 35 -33 1 -19 0.3 0.7 -1 -5 -22 -41 1 -19 0.6 0.3 50 22 14 -8 1 -19 0.2 0.5 30 -26 39 -6 2 -19 0.5 0.5 12 -7 21 11 2 -19 0.4 0.4 41 -50 -13 -50 1 -19 0.2 0.9 35 -49 -1 -31 2 -19 0.2 0.2 11 -44 16 -16 2 -19 0.1 0.2 40 -9 10 -50 1 -19 0.1 0.4 -25 -42 -15 -20 2 -19 0.8 0.8 30 -43 29 -8 1 -19 0.3 0.5 -9 -25 50 45 2 -19 0.2 0.4 30 -33 20 -29 2 -19 0.2 0.8 45 25 -5 -8 1 -19 0.6 0.5 22 -21 0 -21 1 -19 0.8 0.2 -12 -36 28 -34 2 -19 0.8 0.9 27 -26 -31 -40 1 -19 0.2 0.8 -21 -44 28 -23 2 -19 0.9 0.6 -10 -16 28 27 2 -19 0.8 0.5 45 -45 -15 -39 1 -19 0.4 0.8 5 2 -10 -13 2 -19 0.4 0.1 44 -7 44 -25 1 -19 0.8 0.4 18 5 19 3 1 -19 0.6 0.4 48 12 -48 -50 1 -19 0.6 0.5 49 -47 37 -38 1 -19 0.1 0.4 -36 -49 15 -49 2 -19 0.2 0.1 41 -5 -27 -35 1 -19 0.6 0.7 12 -5 5 -32 1 -19 0.3 0.3 -9 -32 18 -22 2 -19 0.8 0.5 25 11 -31 -32 1 -19 0.5 0.8 8 -27 41 -20 2 -19 0.5 0.6 8 -31 47 22 2 -19 0.9 0.1 37 13 38 -24 1 -19 0.6 0.2 -4 -50 18 -42 2 -19 0.7 0.7 10 -1 3 -2 2 -19 0.3 0.3 7 -35 12 -15 2 -19 0.3 0.6 42 -5 28 -50 1 -19 0.5 0.6 12 -27 26 20 2 -19 0.7 0.2 -7 -50 31 -50 1 -19 0.9 0.9 2 -35 47 37 2 -19 0.5 0.4 23 18 25 0 1 -19 0.4 0.8 18 -6 7 -35 1 -20 0.9 0.3 27 0 40 20 2 -20 0.4 0.8 4 -12 39 -9 2 -20 0.8 0.6 -18 -48 26 -37 2 -20 0.6 0.4 43 -21 20 -26 1 -20 0.1 0.9 26 13 5 1 1 -20 0.8 0.3 45 39 41 8 1 -20 0.8 0.1 39 -41 27 -5 1 -20 0.5 0.2 49 -10 -7 -15 1 -20 0.1 0.6 49 -35 48 -19 2 -20 0.2 0.8 26 -8 10 -20 1 -20 0.6 0.1 34 5 35 8 1 -20 0.3 0.5 -17 -47 31 -4 2 -20 0.5 0.2 48 -29 14 -3 1 -20 0.2 0.5 25 2 38 26 2 -20 0.6 0.1 31 -6 30 -8 1 -20 0.6 0.2 10 -27 -6 -14 1 -20 0.6 0.4 28 17 37 20 2 -20 0.1 0.7 36 13 20 0 1 -20 0.1 0.9 12 -37 -11 -25 2 -20 0.8 0.9 -9 -15 47 11 2 -20 0.9 0.1 43 1 -7 -31 1 -20 0.6 0.4 -13 -41 9 -25 2 -20 0.4 0.6 36 -26 14 -9 1 -20 0.2 0.8 -1 -36 2 -42 2 -20 0.4 0.4 32 5 45 30 2 -20 0.5 0.3 -22 -43 -6 -8 2 -20 0.6 0.2 21 13 -23 -43 1 -20 0.7 0.6 28 20 16 -30 1 -20 0.3 0.9 16 -32 24 14 2 -20 0.6 0.6 -26 -29 -6 -31 2 -20 0.1 0.5 43 33 22 -43 1 -20 0.3 0.2 7 -39 3 -19 2 -20 0.4 0.7 38 -3 22 3 2 -20 0.7 0.5 45 12 27 -16 1 -20 0.3 0.8 -18 -43 43 30 2 -20 0.6 0.2 37 17 25 -16 1 -20 0.4 0.7 31 -29 40 -44 2 -20 0.2 0.3 -1 -4 -3 -20 1 -20 0.1 0.3 33 19 33 15 1 -20 0.4 0.6 45 41 -3 -25 1 -20 0.3 0.3 34 -16 34 -11 1 -20 0.6 0.5 19 17 17 -40 1 -20 0.2 0.6 24 8 26 -31 2 -20 0.9 0.5 31 29 42 4 1 -20 0.7 0.1 -11 -23 14 -13 2 -20 0.4 0.6 -23 -43 13 -7 2 -20 0.8 0.2 36 -18 34 -30 1 -20 0.6 0.8 -33 -50 26 -8 2 -20 0.9 0.6 3 -31 34 -14 2 -20 0.5 0.6 27 6 21 -10 1 -20 0.7 0.2 -11 -29 13 -47 1 -20 0.5 0.6 -9 -48 19 -50 2 -20 0.7 0.9 3 -14 -34 -38 1 -20 0.1 0.4 34 -14 39 -2 2 -20 0.3 0.2 8 -36 37 -3 2 -20 0.7 0.2 2 -43 -30 -46 1 -20 0.5 0.7 37 -11 36 -9 2 -20 0.8 0.7 39 -4 25 -38 1 -20 0.6 0.7 25 -34 -16 -18 1 -20 0.6 0.2 21 5 23 21 2 -20 0.9 0.8 -16 -22 25 -25 2 -20 0.5 0.2 -11 -27 19 0 2 -20 0.1 0.9 20 6 31 -9 2 -20 0.6 0.1 19 -45 6 -37 1 -20 0.8 0.4 -4 -46 20 -48 1 -20 0.5 0.7 20 4 44 5 2 -20 0.9 0.1 1 -49 24 5 2 -20 0.4 0.7 35 9 33 -8 1 -20 0.5 0.6 26 -44 -12 -21 1 -20 0.6 0.2 12 -40 44 -29 1 -20 0.8 0.8 31 -24 25 15 2 -20 0.9 0.4 27 9 31 -47 1 -20 0.8 0.5 -45 -46 8 -1 2 -20 0.4 0.8 50 -49 20 -35 2 -20 0.9 0.5 -8 -17 43 -44 2 -20 0.9 0.6 43 -10 32 -13 1 -20 0.9 0.3 31 9 38 -12 1 -20 0.4 0.3 46 -38 -30 -37 1 -20 0.1 0.4 47 -44 13 4 2 -20 0.5 0.7 14 -18 5 -10 2 -20 0.6 0.6 6 -8 10 7 2 -20 0.6 0.4 -20 -28 -25 -41 1 -20 0.6 0.5 37 5 -31 -48 1 -20 0.2 0.9 3 -21 -31 -38 1 -20 0.7 0.6 -9 -22 3 -27 2 -20 0.2 0.7 -18 -48 0 -25 2 -20 0.3 0.7 47 46 24 -31 1 -20 0.5 0.4 -40 -43 -16 -39 1 -20 0.8 0.3 37 -42 45 16 2 -20 0.9 0.8 21 -3 34 3 2 -20 0.5 0.2 21 -6 -11 -21 1 -20 0.7 0.7 48 -9 -35 -37 1 -20 0.3 0.8 22 -34 -14 -50 1 -20 0.5 0.7 4 3 19 -2 2 -20 0.3 0.2 -7 -42 20 -21 2 -20 0.5 0.3 -9 -20 -20 -30 2 -20 0.8 0.1 36 -39 -1 -45 1 -20 0.8 0.8 18 -36 -34 -40 1 -20 0.4 0.3 33 1 -14 -20 1 -20 0.8 0.7 37 -21 46 -40 1 -20 0.8 0.7 -37 -42 33 27 2 -20 0.2 0.6 46 -22 45 14 2 -20 0.5 0.6 -4 -39 11 -46 2 -20 0.4 0.2 25 -22 48 47 2 -20 0.8 0.4 38 -13 27 -47 1 -20 0.2 0.9 -26 -48 -20 -22 2 -20 0.6 0.8 -13 -25 30 -4 2 -20 0.4 0.6 -6 -28 29 27 2 -20 0.5 0.6 38 -46 17 -24 2 -20 0.8 0.3 27 -43 27 -1 1 -20 0.4 0.1 29 -2 -2 -7 1 -20 0.5 0.9 -26 -32 14 -11 2 -20 0.8 0.2 40 -47 9 2 1 -20 0.5 0.2 50 17 -21 -25 1 -20 0.8 0.4 38 -44 19 -13 1 -20 0.9 0.3 41 17 28 -25 1 -20 0.7 0.5 34 -3 48 -33 1 -20 0.4 0.8 42 41 44 41 2 -20 0.2 0.7 15 7 3 -31 1 -20 0.7 0.6 -13 -27 40 21 2 -20 0.8 0.5 -5 -12 -14 -33 1 -20 0.6 0.3 3 -23 45 18 2 -20 0.7 0.1 48 45 44 -10 1 -20 0.8 0.8 29 -10 29 -8 1 -20 0.3 0.9 -10 -23 25 -8 2 -20 0.3 0.8 18 -40 48 -34 2 -20 0.3 0.7 30 -33 7 -44 1 -20 0.7 0.4 40 -3 -5 -16 1 -20 0.7 0.1 -25 -38 -24 -26 2 -20 0.7 0.2 49 10 -14 -37 1 -20 0.1 0.5 6 -44 36 -33 2 -20 0.9 0.1 45 -40 38 -29 1 -20 0.4 0.7 34 11 49 20 1 -20 0.5 0.4 12 -48 -15 -35 1 -20 0.2 0.9 -7 -9 -26 -30 1 -20 0.4 0.4 -21 -26 40 -7 2 -20 0.8 0.4 18 -10 33 -1 2 -20 0.2 0.7 25 -14 15 -16 2 -20 0.9 0.4 46 16 42 -25 1 -20 0.9 0.3 44 25 -21 -32 1 -20 0.1 0.1 -4 -41 5 -37 1 -20 0.1 0.2 39 -28 22 -2 2 -20 0.8 0.5 35 -13 2 -7 1 -20 0.5 0.2 32 -47 42 -29 1 -20 0.8 0.6 -3 -25 6 -33 2 -20 0.4 0.4 1 -15 36 -44 1 -20 0.2 0.5 46 2 -1 -43 1 -20 0.9 0.7 29 -3 22 10 1 -20 0.4 0.3 13 -37 19 15 2 -20 0.3 0.2 28 -4 35 19 2 -20 0.1 0.5 -20 -36 19 -32 2 -20 0.9 0.5 18 10 -1 -29 1 -20 0.8 0.7 39 8 8 -10 1 -20 0.4 0.1 3 -24 31 -34 1 -20 0.4 0.3 29 -29 25 14 2 -20 0.7 0.1 -25 -30 2 -19 2 -20 0.1 0.6 22 -17 21 -11 2 -20 0.4 0.9 15 -31 -1 -36 1 -20 0.9 0.2 -3 -47 32 5 2 -20 0.1 0.9 43 -9 43 -44 2 -20 0.6 0.1 40 -40 27 2 1 -20 0.8 0.3 26 -1 49 -6 1 -20 0.6 0.5 48 3 -35 -44 1 -20 0.1 0.1 13 5 40 39 2 -20 0.1 0.5 20 -50 -13 -29 2 -20 0.1 0.6 -4 -19 -28 -34 1 -20 0.5 0.1 47 43 35 4 1 -20 0.4 0.8 -8 -39 48 28 2 -20 0.8 0.5 7 3 49 23 2 -20 0.4 0.9 2 -36 38 12 2 -20 0.6 0.9 47 23 28 -25 1 -20 0.3 0.9 -1 -5 34 -10 2 -20 0.4 0.7 34 -15 27 13 2 -20 0.8 0.3 50 13 32 -45 1 -20 0.8 0.8 13 8 14 -26 1 -20 0.8 0.2 5 -8 48 41 2 -20 0.7 0.6 34 -10 49 37 2 -20 0.7 0.3 40 33 21 -22 1 -20 0.5 0.6 12 -17 45 27 2 -20 0.4 0.1 -30 -41 48 27 2 -20 0.9 0.1 -24 -39 3 -41 2 -20 0.2 0.6 43 -3 34 27 2 -20 0.7 0.9 41 -16 38 12 2 -20 0.4 0.6 18 -30 38 -24 2 -20 0.7 0.5 36 21 28 -43 1 -20 0.5 0.2 43 -15 -18 -20 1 -20 0.6 0.5 2 -39 -15 -32 1 -20 0.1 0.5 -42 -49 -17 -36 2 -20 0.1 0.9 36 -40 -2 -24 2 -20 0.1 0.5 17 -34 10 -38 2 -20 0.7 0.4 11 -18 35 30 2 -20 0.7 0.4 20 -36 -35 -44 1 -20 0.8 0.8 -21 -30 -6 -39 2 -20 0.2 0.8 48 27 -24 -48 1 -20 0.4 0.7 14 -24 -30 -49 1 -20 0.2 0.4 -9 -23 45 -49 2 -20 0.2 0.8 36 -48 25 19 2 -20 0.6 0.9 1 -48 33 16 2 -20 0.7 0.6 27 -31 37 -47 1 -20 0.4 0.5 -5 -49 31 20 2 -20 0.3 0.2 49 -2 46 23 2 -20 0.5 0.9 45 -16 44 -47 2 -20 0.8 0.8 1 -26 49 -1 2 -20 0.9 0.8 -20 -30 3 -9 2 -20 0.1 0.1 29 -47 43 40 2 -20 0.2 0.7 8 -8 -8 -34 1 -20 0.4 0.5 26 13 28 1 1 -20 0.9 0.4 48 18 25 -2 1 -20 0.5 0.4 47 28 -34 -45 1 -20 0.1 0.9 -40 -43 15 -3 2 -20 0.6 0.5 10 4 32 -25 1 -20 0.3 0.3 11 -14 30 -10 2 -20 0.4 0.7 24 -36 29 -50 2 -20 0.9 0.8 40 -1 39 -35 1 -20 0.7 0.2 -14 -17 -46 -49 1 -20 0.5 0.9 21 17 34 22 2 -20 0.7 0.1 30 -43 36 -35 1 -20 0.5 0.6 8 -28 31 17 2 -20 0.7 0.8 38 21 44 18 1 -20 0.3 0.3 0 -37 29 -26 2 -20 0.7 0.8 23 6 46 -26 2 -20 0.8 0.4 41 8 39 -42 1 -20 0.7 0.5 -12 -27 10 -24 2 -20 0.9 0.1 -29 -49 37 11 2 -20 0.6 0.1 -16 -18 6 -45 1 -20 0.8 0.2 -4 -30 19 3 2 -20 0.7 0.3 9 -43 33 27 2 -20 0.1 0.8 50 30 12 10 1 -20 0.3 0.1 -40 -42 33 -25 2 -20 0.9 0.7 17 -29 36 10 2 -20 0.3 0.8 33 -37 18 16 2 -20 0.5 0.1 -21 -25 13 -27 2 -20 0.9 0.7 -21 -36 -17 -28 1 -20 0.6 0.1 50 37 43 -42 1 -20 0.5 0.2 46 7 29 -17 1 -20 0.3 0.4 19 -40 36 26 2 -20 0.7 0.3 34 -40 43 2 2 -20 0.8 0.3 20 1 26 -6 1 -20 0.2 0.9 6 -26 39 -36 2 -20 0.6 0.7 48 9 29 24 2 -20 0.8 0.5 9 -16 -4 -15 1 -20 0.9 0.3 -9 -48 24 -22 2 -20 0.5 0.5 14 -3 -8 -15 1 -20 0.2 0.3 20 -14 23 -12 2 -20 0.9 0.9 48 -46 24 -29 1 -20 0.7 0.3 41 12 34 -20 1 -20 0.9 0.4 13 -32 31 -37 1 -20 0.2 0.6 30 -22 26 -49 2 -20 0.5 0.9 46 -16 39 -50 2 -20 0.5 0.7 42 34 -37 -39 1 -20 0.4 0.4 24 7 5 -42 1 -20 0.4 0.2 18 -42 11 -21 2 -20 0.8 0.8 -28 -45 13 -1 2 -20 0.3 0.9 20 -38 19 -6 2 -20 0.3 0.5 49 8 -19 -22 1 -20 0.5 0.2 40 -30 -10 -13 1 -20 0.3 0.8 40 29 28 26 1 -20 0.7 0.1 -6 -21 2 -22 1 -20 0.8 0.4 40 -35 -1 -22 1 -20 0.4 0.2 38 -31 1 -7 1 -20 0.7 0.9 13 -6 41 5 2 -20 0.5 0.1 46 31 36 -42 1 -20 0.7 0.7 -27 -49 47 -46 2 -20 0.8 0.3 -14 -33 -28 -40 1 -20 0.8 0.1 7 -31 24 -34 1 -20 0.3 0.4 28 3 26 -37 1 -20 0.8 0.6 22 -4 -14 -18 1 -20 0.7 0.8 -3 -7 14 -45 2 -20 0.3 0.4 46 -32 37 8 2 -20 0.1 0.6 38 -8 47 -16 2 -20 0.6 0.6 25 -33 28 -17 2 -20 0.9 0.8 42 25 18 -44 1 -20 0.8 0.7 44 -50 49 38 2 -20 0.3 0.4 28 -28 22 6 2 -20 0.4 0.6 -21 -47 43 -14 2 -20 0.5 0.2 23 -39 -15 -16 1 -20 0.1 0.3 7 -3 45 20 2 -20 0.7 0.1 21 11 38 -11 1 -20 0.6 0.2 6 -4 43 -17 1 -20 0.9 0.7 48 25 25 16 1 -20 0.4 0.7 6 -29 12 -30 2 -20 0.2 0.7 -47 -49 -20 -49 2 -20 0.7 0.8 -13 -26 -21 -39 1 -20 0.6 0.4 41 33 -20 -30 1 -20 0.8 0.7 -14 -43 19 -47 2 -20 0.1 0.3 5 -48 24 -8 2 -20 0.7 0.5 -8 -48 -3 -19 2 -20 0.5 0.5 40 20 -6 -17 1 -20 0.3 0.9 4 3 41 31 2 -20 0.6 0.6 -10 -18 40 -10 2 -20 0.4 0.4 -13 -36 -23 -48 1 -20 0.7 0.4 -18 -29 28 -27 2 -20 0.1 0.1 40 29 -28 -36 1 -20 0.6 0.6 33 -38 0 -48 1 -20 0.1 0.7 27 -34 5 -15 2 -20 0.7 0.7 -10 -11 3 -12 2 -20 0.6 0.7 24 -36 31 -17 2 -20 0.8 0.2 -29 -50 20 -46 2 -20 0.1 0.3 -9 -29 -26 -46 1 -20 0.6 0.9 15 -41 21 4 2 -21 0.3 0.3 -1 -35 6 -34 2 -21 0.2 0.9 45 25 45 -3 1 -21 0.2 0.1 -18 -29 -11 -46 1 -21 0.7 0.3 -7 -37 18 6 2 -21 0.4 0.4 34 -27 1 -17 1 -21 0.1 0.8 32 20 -2 -23 1 -21 0.8 0.7 42 13 50 31 2 -21 0.8 0.9 23 -15 15 -40 2 -21 0.7 0.6 -27 -42 -16 -39 2 -21 0.7 0.5 40 30 43 -5 1 -21 0.4 0.6 30 13 26 -36 1 -21 0.4 0.6 -4 -46 -9 -37 1 -21 0.9 0.1 42 -48 47 22 1 -21 0.6 0.9 24 2 11 -48 1 -21 0.8 0.5 46 20 40 16 1 -21 0.7 0.3 34 -29 45 6 2 -21 0.1 0.1 22 -22 38 -43 1 -21 0.8 0.7 -21 -30 50 -35 2 -21 0.2 0.9 45 -38 50 -42 2 -21 0.1 0.7 -7 -29 0 -7 2 -21 0.8 0.4 9 -50 -20 -26 1 -21 0.7 0.6 33 31 28 -21 1 -21 0.6 0.7 -17 -49 20 -43 2 -21 0.7 0.9 36 23 -8 -14 1 -21 0.2 0.9 49 -42 10 -13 2 -21 0.3 0.3 -11 -30 -20 -36 2 -21 0.8 0.5 31 7 14 -43 1 -21 0.7 0.7 24 4 12 -14 1 -21 0.7 0.2 -14 -23 -11 -38 1 -21 0.8 0.3 47 -18 15 -24 1 -21 0.5 0.1 -10 -35 44 -14 2 -21 0.3 0.5 41 10 37 -30 1 -21 0.8 0.5 -16 -45 45 39 2 -21 0.3 0.4 -44 -49 17 -40 2 -21 0.1 0.1 32 24 0 -10 1 -21 0.5 0.2 -6 -47 9 -28 2 -21 0.2 0.3 49 -16 43 -44 2 -21 0.5 0.4 47 -21 -22 -45 1 -21 0.5 0.3 48 41 7 -25 1 -21 0.9 0.2 -20 -41 31 -2 2 -21 0.8 0.9 48 4 36 -37 2 -21 0.1 0.9 -43 -48 -13 -30 2 -21 0.7 0.7 -3 -44 7 -36 2 -21 0.3 0.2 31 0 -21 -27 1 -21 0.4 0.8 6 -41 44 12 2 -21 0.6 0.7 33 -22 25 0 1 -21 0.8 0.8 -24 -43 12 -35 2 -21 0.1 0.4 30 4 -11 -44 1 -21 0.3 0.4 33 -24 24 -18 1 -21 0.1 0.7 31 -23 27 -16 2 -21 0.9 0.8 27 1 27 -21 1 -21 0.4 0.4 -21 -31 -17 -35 2 -21 0.4 0.5 0 -39 20 -49 2 -21 0.9 0.5 25 22 3 -46 1 -21 0.8 0.7 -38 -42 5 -50 2 -21 0.3 0.6 40 23 1 -20 1 -21 0.3 0.9 33 -18 -8 -19 1 -21 0.7 0.9 36 18 -40 -41 1 -21 0.3 0.7 10 -20 50 37 2 -21 0.5 0.4 39 -41 45 -12 1 -21 0.8 0.4 12 -8 -29 -33 1 -21 0.9 0.6 18 2 49 -10 1 -21 0.7 0.1 25 -8 -6 -11 1 -21 0.6 0.7 44 -14 1 -14 1 -21 0.4 0.6 -30 -39 8 -27 2 -21 0.2 0.3 -1 -49 48 -29 2 -21 0.1 0.3 50 -49 10 6 2 -21 0.7 0.7 34 31 1 -27 1 -21 0.1 0.4 28 -19 45 20 2 -21 0.1 0.3 -35 -48 1 -7 2 -21 0.2 0.6 -29 -32 18 -16 2 -21 0.3 0.2 5 -19 34 -8 2 -21 0.6 0.2 -16 -36 -22 -23 2 -21 0.1 0.1 14 -39 23 14 2 -21 0.1 0.9 47 39 -15 -29 1 -21 0.8 0.8 15 -46 34 7 2 -21 0.2 0.7 38 -34 -11 -19 1 -21 0.1 0.5 -12 -47 16 -33 2 -21 0.6 0.1 -14 -24 -7 -40 1 -21 0.8 0.8 9 -13 35 -44 2 -21 0.5 0.8 -11 -14 13 -47 2 -21 0.6 0.1 1 -10 36 18 2 -21 0.6 0.9 7 -25 -35 -41 1 -21 0.7 0.9 3 -12 24 5 2 -21 0.4 0.9 19 -49 45 16 2 -21 0.9 0.6 4 -4 21 -32 2 -21 0.6 0.7 46 -22 30 -21 1 -21 0.9 0.4 -4 -5 30 -41 2 -21 0.1 0.5 40 -35 45 33 2 -21 0.7 0.1 49 -49 45 5 1 -21 0.1 0.3 -34 -48 0 -44 2 -21 0.2 0.2 -4 -45 16 -2 2 -21 0.1 0.1 14 1 -37 -46 2 -21 0.5 0.2 -9 -31 -27 -29 1 -21 0.9 0.4 -2 -20 -22 -29 1 -21 0.2 0.9 13 -10 -7 -43 1 -21 0.5 0.1 -12 -26 -28 -38 2 -21 0.9 0.9 16 -40 47 -4 2 -21 0.5 0.4 -3 -18 49 27 2 -21 0.6 0.1 44 35 2 -9 1 -21 0.1 0.5 -10 -45 16 -19 2 -21 0.9 0.3 30 2 23 -19 1 -21 0.4 0.7 -2 -25 -16 -48 2 -21 0.3 0.9 0 -18 8 -7 2 -21 0.6 0.7 -5 -42 37 17 2 -21 0.1 0.3 48 -39 50 -20 2 -21 0.1 0.8 40 24 15 -34 1 -21 0.5 0.4 36 -48 41 -37 1 -21 0.8 0.4 9 -28 41 11 2 -21 0.2 0.9 -8 -47 -29 -33 2 -21 0.5 0.8 24 9 -22 -48 1 -21 0.7 0.2 28 -42 18 -8 1 -21 0.7 0.2 42 -33 -6 -12 1 -21 0.3 0.3 35 21 35 -6 2 -21 0.3 0.5 13 -40 30 26 2 -21 0.9 0.4 50 35 34 0 1 -21 0.8 0.3 -3 -7 43 -5 1 -21 0.2 0.9 -20 -25 7 1 2 -21 0.1 0.1 -9 -45 40 -34 2 -21 0.5 0.9 -13 -39 50 9 2 -21 0.5 0.7 39 24 23 -44 1 -21 0.1 0.9 48 35 17 1 1 -21 0.2 0.9 -10 -38 18 -30 2 -21 0.5 0.2 -2 -42 3 -4 2 -21 0.7 0.1 15 10 8 -27 1 -21 0.1 0.7 31 -39 -32 -40 2 -21 0.3 0.8 33 0 -39 -46 1 -21 0.3 0.1 50 0 29 -25 1 -21 0.8 0.3 30 -21 -34 -48 1 -21 0.5 0.1 32 -41 35 34 2 -21 0.2 0.6 2 -13 -8 -26 2 -21 0.1 0.8 37 25 1 -3 1 -21 0.1 0.2 18 -6 24 -45 1 -21 0.1 0.3 42 37 -19 -23 1 -21 0.9 0.6 35 31 4 -37 1 -21 0.6 0.3 7 -19 26 16 2 -21 0.8 0.4 11 -18 23 -44 1 -21 0.9 0.3 -6 -31 20 -42 2 -21 0.3 0.7 42 35 19 -47 1 -21 0.7 0.1 22 -38 26 20 2 -21 0.4 0.4 -5 -40 0 -7 2 -21 0.6 0.2 30 -6 33 -49 1 -21 0.1 0.6 43 32 -14 -44 1 -21 0.6 0.2 48 9 2 -4 1 -21 0.1 0.5 -13 -26 21 -30 2 -21 0.9 0.5 19 -2 42 -19 1 -21 0.5 0.3 -26 -42 49 9 2 -21 0.2 0.8 16 -9 32 1 2 -21 0.7 0.2 38 21 42 1 1 -21 0.2 0.5 -37 -50 -37 -40 2 -21 0.5 0.1 -27 -47 45 -28 2 -21 0.1 0.2 5 -13 5 -23 2 -21 0.3 0.4 19 -36 32 12 2 -21 0.8 0.7 -2 -42 40 -20 1 -21 0.2 0.1 -14 -17 11 -38 1 -21 0.7 0.5 -2 -23 49 48 2 -21 0.3 0.4 31 -47 27 8 2 -21 0.1 0.9 32 -31 36 17 2 -21 0.7 0.6 50 0 47 -24 1 -21 0.6 0.2 -42 -43 33 10 2 -21 0.8 0.7 38 -6 -38 -48 1 -21 0.6 0.4 10 -40 32 5 2 -21 0.5 0.1 22 12 -2 -17 1 -21 0.3 0.4 29 -38 -20 -37 1 -21 0.3 0.6 50 40 -5 -41 1 -21 0.9 0.3 36 -50 43 -34 1 -21 0.9 0.8 36 -24 -12 -47 1 -21 0.9 0.3 20 -20 44 17 2 -21 0.9 0.4 39 -6 37 -43 1 -21 0.6 0.9 -13 -28 13 -47 2 -21 0.3 0.4 -9 -23 29 -34 2 -21 0.3 0.3 26 -40 23 -42 2 -21 0.5 0.7 38 35 36 20 1 -21 0.9 0.1 25 -22 37 23 1 -21 0.5 0.1 30 -38 33 -33 1 -21 0.4 0.9 18 -7 -14 -47 1 -21 0.2 0.2 -20 -36 -9 -50 2 -21 0.6 0.9 19 16 50 -28 2 -21 0.7 0.8 36 -6 -6 -20 1 -21 0.6 0.7 50 -34 46 15 2 -21 0.5 0.6 33 -36 -18 -25 1 -21 0.3 0.7 50 -46 25 -14 2 -21 0.4 0.5 -9 -46 50 -3 2 -21 0.2 0.1 11 -38 26 -12 2 -21 0.7 0.8 38 19 33 11 1 -21 0.5 0.2 39 -23 7 -4 2 -21 0.6 0.9 9 -11 -22 -39 1 -21 0.3 0.7 -21 -44 24 0 2 -21 0.6 0.4 29 -37 7 -34 1 -21 0.9 0.1 16 -26 37 -33 1 -21 0.3 0.4 24 6 20 19 1 -21 0.1 0.1 -17 -50 47 1 2 -21 0.6 0.3 45 -44 -37 -40 1 -21 0.4 0.8 16 -5 49 25 2 -21 0.7 0.7 38 -12 22 14 2 -21 0.9 0.3 -16 -27 -28 -31 1 -21 0.4 0.6 30 -8 -27 -42 1 -21 0.2 0.9 13 5 7 -13 2 -21 0.7 0.2 -22 -41 50 -15 2 -21 0.3 0.5 -35 -43 10 -1 2 -21 0.1 0.7 17 -49 7 -35 1 -21 0.9 0.6 21 7 -31 -45 1 -21 0.6 0.2 19 -8 -4 -22 2 -21 0.4 0.7 41 18 15 -38 1 -21 0.9 0.3 35 -14 50 -21 1 -21 0.5 0.6 41 18 27 -28 1 -21 0.8 0.1 -3 -17 29 -16 1 -21 0.7 0.6 -29 -32 -16 -21 2 -21 0.1 0.3 -17 -18 -4 -33 1 -21 0.3 0.4 34 -17 30 -15 2 -21 0.7 0.4 48 41 38 20 1 -21 0.8 0.1 -15 -27 37 -15 1 -21 0.5 0.2 -20 -43 25 0 2 -21 0.4 0.9 28 15 3 -36 1 -21 0.4 0.9 43 -17 -16 -21 1 -21 0.3 0.4 34 -17 23 -36 1 -21 0.5 0.9 5 -30 40 24 2 -21 0.9 0.2 49 10 0 -17 1 -21 0.3 0.3 46 -27 24 19 2 -21 0.7 0.9 44 2 -19 -25 1 -21 0.9 0.5 -9 -49 37 -5 2 -21 0.1 0.6 40 -19 -30 -42 1 -21 0.6 0.7 15 -14 -3 -45 1 -21 0.3 0.9 48 40 50 28 1 -21 0.1 0.5 -13 -16 8 -35 2 -21 0.8 0.4 50 -4 45 -31 1 -21 0.9 0.3 38 22 50 30 1 -21 0.8 0.9 -2 -46 -17 -37 2 -21 0.3 0.9 23 -17 11 -23 2 -21 0.7 0.8 -9 -50 37 -32 2 -21 0.4 0.9 43 22 1 -37 1 -21 0.2 0.3 19 -25 -35 -48 2 -21 0.3 0.2 26 9 -15 -50 1 -21 0.9 0.5 29 -14 -11 -17 1 -21 0.6 0.9 45 2 -29 -38 1 -21 0.2 0.9 -23 -27 41 -9 2 -21 0.6 0.7 36 8 4 1 1 -21 0.1 0.4 4 -5 11 -21 1 -21 0.5 0.1 49 -18 -35 -43 1 -21 0.1 0.7 16 -36 33 -39 2 -21 0.9 0.6 48 -44 38 -9 1 -21 0.8 0.2 22 -29 -42 -44 1 -21 0.5 0.3 14 -32 -17 -37 1 -21 0.5 0.7 18 -30 20 -32 2 -21 0.3 0.4 40 38 41 28 1 -21 0.3 0.4 47 -43 20 -20 2 -21 0.2 0.9 21 6 26 -19 2 -21 0.8 0.3 24 -18 44 28 2 -21 0.4 0.4 -8 -23 1 -45 2 -21 0.1 0.6 16 -46 49 -3 2 -21 0.9 0.6 19 -42 -20 -42 1 -21 0.8 0.5 -13 -23 39 30 2 -21 0.1 0.6 49 8 46 -18 1 -21 0.1 0.9 32 5 3 -14 2 -21 0.2 0.4 36 35 22 -17 1 -21 0.7 0.8 -9 -43 26 -38 2 -21 0.8 0.7 13 -26 1 -29 1 -21 0.3 0.3 34 27 26 -21 1 -21 0.3 0.7 48 41 -11 -40 1 -21 0.3 0.1 11 -32 -30 -38 1 -21 0.3 0.3 42 -2 -5 -7 2 -21 0.1 0.8 43 4 20 -47 1 -21 0.1 0.5 -9 -31 11 -5 2 -21 0.5 0.9 1 -4 17 5 2 -21 0.7 0.2 26 9 50 49 2 -21 0.9 0.7 0 -47 12 8 2 -21 0.7 0.6 1 -25 48 -36 2 -21 0.6 0.5 19 -4 32 1 1 -21 0.3 0.7 23 5 46 31 2 -21 0.3 0.4 30 -33 7 -10 2 -21 0.8 0.9 7 -47 42 -20 2 -21 0.9 0.6 46 -28 16 -19 1 -21 0.3 0.6 49 41 50 -14 1 -21 0.7 0.5 25 -48 -13 -34 1 -21 0.3 0.9 27 -37 9 -12 2 -21 0.2 0.1 11 -5 -35 -36 1 -21 0.1 0.6 30 -9 0 -23 1 -21 0.1 0.2 -33 -44 34 21 2 -21 0.8 0.2 42 39 6 -26 1 -21 0.4 0.4 -4 -40 -48 -49 1 -21 0.5 0.3 37 19 41 -22 1 -21 0.1 0.6 16 -31 31 -24 2 -21 0.4 0.5 9 -1 49 44 2 -21 0.8 0.7 35 -47 50 48 2 -21 0.6 0.2 5 -18 -11 -15 1 -21 0.2 0.1 27 -13 9 -41 1 -21 0.2 0.4 -5 -31 28 -7 2 -21 0.5 0.8 29 -49 50 -16 2 -21 0.6 0.1 -23 -43 8 -17 2 -21 0.5 0.5 -20 -35 18 9 2 -21 0.4 0.8 23 -33 32 -6 2 -21 0.7 0.4 48 -16 12 11 1 -21 0.2 0.8 41 -2 25 24 2 -21 0.8 0.2 -19 -40 -15 -33 1 -21 0.1 0.9 50 31 20 -19 1 -21 0.8 0.2 7 -38 47 7 2 -21 0.9 0.7 27 6 -17 -23 1 -21 0.2 0.9 11 -37 12 -7 2 -21 0.4 0.6 36 -13 -15 -36 1 -21 0.3 0.2 -24 -48 4 -42 2 -22 0.9 0.2 23 12 32 -49 1 -22 0.1 0.6 -4 -17 47 -17 2 -22 0.7 0.8 -17 -25 32 -16 2 -22 0.3 0.8 31 -29 13 -7 2 -22 0.9 0.6 47 -28 46 -44 1 -22 0.8 0.4 29 -29 35 34 2 -22 0.4 0.4 -20 -47 13 -3 2 -22 0.4 0.3 24 9 1 -30 1 -22 0.3 0.2 22 -25 27 -34 1 -22 0.1 0.8 -16 -25 0 -27 2 -22 0.5 0.5 12 -19 -2 -14 1 -22 0.3 0.8 32 -19 5 -1 2 -22 0.8 0.3 30 13 3 -23 1 -22 0.8 0.2 39 35 34 30 1 -22 0.3 0.2 3 -4 15 -5 2 -22 0.6 0.5 33 7 26 21 1 -22 0.1 0.3 48 45 18 -44 1 -22 0.7 0.1 16 -7 45 33 2 -22 0.3 0.4 27 -46 20 -26 2 -22 0.7 0.6 39 -13 -14 -22 1 -22 0.2 0.7 22 15 42 -31 1 -22 0.1 0.5 48 5 32 -37 1 -22 0.8 0.8 -19 -35 42 34 2 -22 0.6 0.5 45 -17 23 -35 1 -22 0.2 0.7 4 -34 30 0 2 -22 0.8 0.4 13 8 43 7 2 -22 0.3 0.9 33 19 48 -46 1 -22 0.4 0.3 43 -20 29 10 2 -22 0.3 0.6 45 -17 16 -29 2 -22 0.3 0.2 13 11 41 21 1 -22 0.9 0.9 37 23 -1 -46 1 -22 0.4 0.9 6 -25 -15 -22 1 -22 0.1 0.8 -21 -32 23 5 2 -22 0.6 0.5 -12 -48 -16 -28 2 -22 0.8 0.6 32 6 -9 -31 1 -22 0.9 0.6 -12 -29 25 16 2 -22 0.1 0.7 43 13 13 -25 1 -22 0.9 0.7 41 13 42 -6 2 -22 0.9 0.1 38 -43 12 -19 1 -22 0.3 0.6 50 22 46 -6 1 -22 0.1 0.8 16 -19 -18 -49 2 -22 0.3 0.4 1 -9 46 31 2 -22 0.1 0.2 42 36 8 -2 1 -22 0.9 0.7 5 -19 -18 -49 1 -22 0.2 0.3 13 -3 5 -48 2 -22 0.6 0.8 4 -25 -10 -26 2 -22 0.8 0.6 31 -23 7 -21 1 -22 0.9 0.1 41 2 4 -24 1 -22 0.7 0.8 1 -32 44 8 2 -22 0.7 0.3 40 -49 17 -26 1 -22 0.6 0.3 -11 -33 36 -39 2 -22 0.5 0.7 -27 -34 25 15 2 -22 0.6 0.4 9 -3 26 -26 1 -22 0.1 0.4 -6 -35 28 26 2 -22 0.1 0.6 32 9 49 -44 2 -22 0.9 0.5 29 -12 39 -47 1 -22 0.8 0.8 47 2 -34 -35 1 -22 0.6 0.5 46 -39 41 -17 2 -22 0.3 0.7 38 -13 9 -22 1 -22 0.8 0.3 -17 -39 25 21 2 -22 0.8 0.2 -9 -38 -11 -31 1 -22 0.2 0.8 33 -6 45 40 2 -22 0.1 0.3 -16 -21 17 -46 1 -22 0.1 0.9 23 5 46 7 2 -22 0.8 0.8 24 -26 -2 -27 1 -22 0.4 0.8 23 -19 43 -45 2 -22 0.6 0.5 20 -18 1 -48 1 -22 0.2 0.2 -8 -30 32 -11 2 -22 0.7 0.6 17 16 -15 -21 1 -22 0.7 0.7 46 -5 17 -35 1 -22 0.4 0.8 12 -43 -16 -48 1 -22 0.6 0.2 35 14 -22 -36 2 -22 0.4 0.9 -26 -48 -21 -49 1 -22 0.1 0.9 14 -50 4 -2 2 -22 0.4 0.2 47 -30 41 -38 1 -22 0.8 0.6 13 -7 -9 -19 1 -22 0.4 0.5 50 20 39 -34 1 -22 0.6 0.3 22 5 2 -36 1 -22 0.8 0.8 31 -24 10 -46 1 -22 0.9 0.1 -26 -43 15 -2 2 -22 0.1 0.1 5 2 5 -43 1 -22 0.8 0.2 6 -1 37 3 2 -22 0.2 0.9 50 -40 22 7 2 -22 0.3 0.6 -6 -40 -39 -42 1 -22 0.2 0.6 -4 -44 41 13 2 -22 0.6 0.2 -16 -26 29 -9 2 -22 0.4 0.2 21 16 -27 -43 1 -22 0.3 0.8 49 26 36 -4 1 -22 0.2 0.5 -8 -45 -13 -37 1 -22 0.1 0.1 14 -27 8 -40 2 -22 0.4 0.9 4 -49 14 -42 2 -22 0.7 0.2 35 -47 -14 -40 1 -22 0.5 0.4 37 -44 17 14 2 -22 0.2 0.8 24 8 5 -3 2 -22 0.3 0.6 -20 -31 34 13 2 -22 0.8 0.7 48 24 -12 -13 1 -22 0.8 0.7 41 -36 36 26 2 -22 0.4 0.1 -18 -41 2 -24 2 -22 0.1 0.9 -25 -36 -3 -49 1 -22 0.1 0.3 40 32 -12 -21 1 -22 0.7 0.3 5 -42 15 -9 1 -22 0.3 0.5 21 -13 9 -45 2 -22 0.4 0.1 -5 -10 -22 -35 2 -22 0.4 0.4 35 8 -31 -46 1 -22 0.4 0.2 -11 -28 36 -45 1 -22 0.7 0.1 16 -11 49 19 2 -22 0.7 0.9 16 -21 47 7 2 -22 0.8 0.1 25 -49 9 -49 1 -22 0.2 0.6 3 0 46 -31 2 -22 0.8 0.3 -9 -20 3 -5 2 -22 0.2 0.3 36 -34 12 1 2 -22 0.7 0.2 -18 -39 43 -48 2 -22 0.7 0.6 23 -38 -15 -35 1 -22 0.5 0.1 37 -10 34 -22 1 -22 0.7 0.2 43 -1 7 -30 1 -22 0.4 0.7 42 -22 32 -2 2 -22 0.8 0.9 6 -36 34 9 2 -22 0.1 0.8 -12 -50 29 -2 2 -22 0.5 0.5 -19 -46 -36 -48 1 -22 0.4 0.4 9 -7 -34 -49 1 -22 0.5 0.4 41 22 50 -35 1 -22 0.7 0.4 -39 -45 49 -36 2 -22 0.5 0.1 49 40 28 2 1 -22 0.7 0.1 -13 -27 41 30 2 -22 0.9 0.9 43 33 50 -15 1 -22 0.6 0.5 27 -19 36 -25 1 -22 0.3 0.6 -17 -50 -24 -27 1 -22 0.4 0.5 20 16 32 16 2 -22 0.8 0.8 24 -20 7 -17 1 -22 0.1 0.2 19 10 22 -18 1 -22 0.1 0.4 8 -12 11 -21 2 -22 0.1 0.2 -8 -33 -29 -36 1 -22 0.2 0.8 23 -38 3 -9 1 -22 0.1 0.1 22 -42 -21 -35 2 -22 0.6 0.7 28 20 38 13 2 -22 0.2 0.7 -11 -35 15 -30 2 -22 0.2 0.8 43 -6 -10 -48 1 -22 0.6 0.5 20 -32 16 -48 1 -22 0.6 0.2 24 12 24 -41 1 -22 0.7 0.3 34 -25 42 28 1 -22 0.4 0.8 -8 -13 -34 -45 1 -22 0.7 0.8 37 16 -19 -41 1 -22 0.8 0.8 -23 -44 28 -30 2 -22 0.5 0.1 40 27 50 -41 1 -22 0.3 0.7 -2 -19 8 -19 2 -22 0.7 0.7 13 -41 20 -47 2 -22 0.7 0.9 35 33 -18 -39 1 -22 0.7 0.4 -5 -34 33 -2 2 -22 0.3 0.5 -17 -42 -14 -33 1 -22 0.6 0.9 -13 -14 21 -37 2 -22 0.9 0.6 12 -17 -23 -36 2 -22 0.2 0.6 -21 -45 48 -31 2 -22 0.3 0.7 32 -43 34 -50 2 -22 0.6 0.8 40 -42 49 -36 2 -22 0.5 0.4 11 10 -24 -46 1 -22 0.2 0.9 23 -14 21 -31 2 -22 0.3 0.5 50 0 40 -48 1 -22 0.6 0.4 -26 -49 50 26 2 -22 0.8 0.6 12 -45 32 28 2 -22 0.5 0.5 35 -15 -18 -33 1 -22 0.1 0.7 1 -27 26 -39 2 -22 0.2 0.2 32 12 -18 -43 2 -22 0.2 0.5 35 -20 12 8 2 -22 0.2 0.9 44 -6 36 -43 2 -22 0.2 0.8 -3 -19 -5 -21 2 -22 0.4 0.1 48 20 15 -38 1 -22 0.2 0.1 -23 -43 38 0 2 -22 0.2 0.2 -18 -32 16 -21 1 -22 0.7 0.3 41 9 21 20 1 -22 0.9 0.6 49 -12 48 19 1 -22 0.1 0.5 32 -36 5 -27 2 -22 0.3 0.3 30 -9 13 -6 2 -22 0.1 0.8 32 4 40 -8 2 -22 0.2 0.8 27 -30 -4 -26 2 -22 0.6 0.3 22 -25 35 -24 2 -22 0.8 0.4 -34 -43 24 -27 1 -22 0.5 0.8 -39 -43 -18 -41 2 -22 0.7 0.7 23 -1 44 -44 1 -22 0.6 0.8 -32 -43 -8 -31 1 -22 0.1 0.9 40 31 47 -22 2 -22 0.3 0.6 2 0 -20 -47 1 -22 0.5 0.2 -36 -37 -21 -33 2 -22 0.6 0.4 34 20 8 -2 1 -22 0.9 0.8 -15 -20 16 -32 2 -22 0.9 0.4 -21 -27 35 -20 2 -22 0.9 0.9 27 -36 35 -15 2 -22 0.8 0.2 -13 -45 5 -9 2 -22 0.4 0.9 31 -7 50 16 2 -22 0.9 0.8 -9 -50 22 -45 2 -22 0.6 0.2 16 6 -22 -35 1 -22 0.2 0.6 48 20 29 -2 2 -22 0.5 0.8 36 -3 41 5 2 -22 0.5 0.4 15 -5 26 -16 2 -22 0.8 0.3 6 -24 -18 -38 1 -22 0.6 0.8 46 44 49 -44 1 -22 0.9 0.7 34 33 25 -5 1 -22 0.3 0.7 3 -19 25 13 2 -22 0.9 0.2 -37 -50 -16 -23 1 -22 0.2 0.4 4 -3 -45 -46 1 -22 0.7 0.2 30 -6 26 12 2 -22 0.6 0.3 8 -45 5 -17 2 -22 0.4 0.9 -2 -27 35 -50 2 -22 0.9 0.8 39 13 -30 -33 1 -22 0.3 0.9 45 7 26 -46 2 -22 0.7 0.8 35 -9 2 -37 1 -22 0.6 0.9 -16 -27 -18 -20 2 -22 0.8 0.7 14 -30 46 -32 1 -22 0.1 0.8 50 26 43 -6 2 -22 0.6 0.7 3 -16 -41 -42 1 -22 0.9 0.3 24 -15 10 -22 1 -22 0.1 0.3 10 2 18 -45 1 -22 0.8 0.5 -21 -45 -1 -25 1 -22 0.1 0.7 2 -39 -14 -22 1 -22 0.6 0.1 7 -34 18 -18 2 -22 0.4 0.4 5 -16 48 -12 1 -22 0.5 0.8 24 -42 15 11 2 -22 0.1 0.8 48 -5 16 -45 2 -22 0.3 0.6 41 14 38 -7 1 -22 0.2 0.5 34 -5 -10 -23 1 -22 0.8 0.7 -10 -16 39 -45 2 -22 0.1 0.6 27 -39 -9 -37 2 -22 0.3 0.5 36 27 40 -44 1 -22 0.2 0.3 -9 -29 8 -19 2 -22 0.8 0.9 38 17 23 13 2 -22 0.7 0.4 48 15 -22 -24 1 -22 0.5 0.6 9 4 8 7 1 -22 0.7 0.8 35 -32 -5 -34 1 -22 0.3 0.9 26 -5 -20 -46 1 -22 0.3 0.3 -17 -28 -2 -33 2 -22 0.1 0.8 -13 -17 4 -12 1 -22 0.4 0.3 -1 -39 28 -28 2 -22 0.2 0.9 8 -37 38 16 2 -22 0.2 0.7 50 -16 -9 -19 1 -22 0.9 0.1 -21 -32 41 -7 2 -22 0.4 0.5 32 -32 14 -35 2 -22 0.8 0.7 44 35 -9 -10 1 -22 0.5 0.8 13 -27 49 4 1 -22 0.6 0.3 34 20 -22 -47 1 -22 0.8 0.5 36 -3 48 30 1 -22 0.4 0.9 50 3 2 -32 1 -22 0.4 0.5 35 -25 39 -13 1 -22 0.7 0.6 8 -35 -12 -16 1 -22 0.8 0.8 21 -50 -31 -46 1 -22 0.1 0.8 41 -23 46 6 2 -22 0.5 0.9 10 -3 14 -30 2 -22 0.2 0.1 43 -21 -44 -48 1 -22 0.5 0.2 30 8 43 40 2 -22 0.8 0.9 38 -47 42 -41 2 -22 0.2 0.1 13 -7 24 11 2 -22 0.7 0.7 8 -12 31 13 2 -22 0.4 0.1 39 -8 -21 -31 1 -22 0.7 0.3 7 -39 -17 -34 2 -22 0.1 0.5 49 -37 42 -9 2 -22 0.2 0.8 45 5 38 6 2 -22 0.9 0.4 33 -27 42 40 2 -22 0.3 0.7 14 -29 -5 -9 1 -22 0.8 0.3 45 -28 31 -47 1 -22 0.6 0.5 11 5 26 -2 2 -22 0.7 0.5 42 -5 11 -14 2 -22 0.4 0.7 47 28 29 26 2 -22 0.5 0.2 31 -20 -12 -47 1 -22 0.4 0.2 46 -6 49 -23 1 -22 0.7 0.8 18 -10 46 -17 2 -22 0.5 0.2 -19 -32 37 11 2 -22 0.1 0.9 42 32 48 -42 2 -22 0.3 0.4 21 -47 0 -18 1 -22 0.1 0.1 30 18 39 -13 1 -22 0.3 0.9 39 -12 3 1 2 -22 0.5 0.8 40 -36 7 -5 2 -22 0.2 0.6 -4 -39 -18 -30 1 -22 0.7 0.2 14 -1 -26 -41 1 -22 0.2 0.3 15 -31 22 -7 2 -22 0.4 0.5 50 -38 15 -8 2 -22 0.8 0.7 16 -37 -28 -34 1 -22 0.8 0.9 42 31 27 0 1 -22 0.4 0.6 -13 -29 11 -31 2 -22 0.6 0.8 14 -13 37 -8 2 -22 0.1 0.8 29 -29 8 -47 2 -22 0.9 0.1 3 -1 -13 -41 2 -22 0.1 0.5 -24 -42 24 20 2 -22 0.8 0.5 44 -5 16 -41 1 -22 0.7 0.4 34 -36 48 7 1 -22 0.1 0.9 35 8 10 -31 2 -22 0.3 0.9 48 -43 23 -17 1 -22 0.7 0.3 44 34 34 19 1 -22 0.3 0.2 27 -2 28 7 1 -22 0.3 0.1 -5 -17 44 -23 2 -22 0.8 0.2 24 2 23 -24 1 -22 0.7 0.3 38 -40 29 -6 1 -22 0.3 0.5 23 2 26 -49 1 -22 0.3 0.4 32 -6 14 -30 1 -22 0.6 0.2 42 20 39 -48 1 -22 0.1 0.9 35 29 20 12 1 -22 0.1 0.1 -18 -27 33 -45 1 -22 0.3 0.4 -6 -20 -7 -13 2 -22 0.7 0.7 30 -35 31 -36 1 -22 0.5 0.3 0 -31 -26 -30 2 -22 0.5 0.9 28 -19 -2 -7 1 -22 0.4 0.2 -13 -27 43 1 2 -22 0.1 0.7 39 12 -26 -35 1 -23 0.5 0.1 30 -35 18 -48 1 -23 0.7 0.9 -17 -43 -42 -46 1 -23 0.7 0.9 34 16 23 -7 1 -23 0.8 0.9 40 21 48 29 2 -23 0.6 0.2 -16 -20 23 -37 1 -23 0.3 0.2 21 -9 38 29 2 -23 0.4 0.8 50 -30 19 -32 2 -23 0.6 0.8 -14 -29 37 -35 2 -23 0.2 0.4 36 29 50 -2 1 -23 0.1 0.7 5 -33 -20 -38 2 -23 0.2 0.1 10 -41 24 -22 2 -23 0.5 0.4 5 -15 -17 -29 1 -23 0.6 0.3 32 3 -19 -33 1 -23 0.1 0.6 30 -44 2 -33 2 -23 0.8 0.1 38 -12 -22 -31 1 -23 0.3 0.3 -6 -37 33 17 2 -23 0.8 0.5 9 0 33 -13 2 -23 0.9 0.7 15 -30 23 -24 2 -23 0.7 0.4 39 -40 21 7 2 -23 0.7 0.5 8 -6 48 -33 2 -23 0.5 0.6 -11 -12 46 -26 2 -23 0.5 0.3 41 38 22 -47 1 -23 0.4 0.7 27 -25 -2 -50 1 -23 0.1 0.3 -19 -46 40 -45 2 -23 0.9 0.1 18 -2 28 -29 1 -23 0.7 0.4 26 -29 17 16 2 -23 0.1 0.1 -11 -24 35 -28 2 -23 0.2 0.2 18 -39 50 10 2 -23 0.7 0.7 14 -21 -46 -49 1 -23 0.1 0.8 31 -2 -8 -24 1 -23 0.2 0.8 30 -38 -11 -37 1 -23 0.6 0.1 19 -35 -6 -45 1 -23 0.6 0.9 -1 -10 -1 -50 1 -23 0.1 0.7 -24 -42 -33 -39 2 -23 0.7 0.1 -40 -50 27 -7 2 -23 0.9 0.1 34 -12 -40 -48 1 -23 0.3 0.5 46 -12 -9 -44 1 -23 0.4 0.8 11 -33 -1 -20 2 -23 0.6 0.4 36 27 3 -25 1 -23 0.4 0.2 -16 -22 35 25 2 -23 0.1 0.4 18 -37 50 36 2 -23 0.9 0.1 20 -27 -28 -49 1 -23 0.2 0.8 -17 -50 5 -23 2 -23 0.5 0.5 28 9 6 -12 1 -23 0.3 0.3 40 -41 4 2 2 -23 0.6 0.8 -10 -28 30 4 2 -23 0.2 0.1 -8 -13 -11 -43 1 -23 0.6 0.3 29 -22 38 24 2 -23 0.2 0.8 25 11 34 -32 2 -23 0.7 0.5 44 -37 -40 -50 1 -23 0.8 0.8 7 -46 18 -30 2 -23 0.6 0.7 49 3 41 9 2 -23 0.2 0.9 16 -25 39 2 2 -23 0.9 0.9 44 -48 -4 -21 1 -23 0.7 0.8 41 -32 18 -2 1 -23 0.3 0.1 41 -28 -15 -48 1 -23 0.5 0.7 48 47 -30 -39 1 -23 0.5 0.3 14 -47 19 6 2 -23 0.4 0.3 48 11 36 -48 1 -23 0.9 0.1 19 12 6 -31 1 -23 0.3 0.3 13 -23 -29 -37 1 -23 0.5 0.4 0 -44 50 -25 2 -23 0.7 0.4 -13 -25 10 -45 1 -23 0.7 0.4 -29 -37 47 31 2 -23 0.5 0.2 45 -2 12 -1 1 -23 0.5 0.6 14 -33 -28 -36 1 -23 0.3 0.5 40 -21 31 16 2 -23 0.1 0.6 -15 -18 10 -22 2 -23 0.9 0.8 22 16 35 -37 2 -23 0.7 0.5 28 -34 29 -27 1 -23 0.9 0.4 48 -49 10 -45 1 -23 0.2 0.7 -27 -37 21 -42 2 -23 0.8 0.9 6 -45 5 -35 1 -23 0.8 0.5 43 -27 10 -50 1 -23 0.3 0.5 47 -20 16 3 2 -23 0.2 0.7 26 19 15 -48 1 -23 0.4 0.9 26 -17 -13 -32 1 -23 0.2 0.6 12 -45 3 -31 2 -23 0.9 0.9 -21 -27 -3 -33 2 -23 0.9 0.6 -4 -22 19 -49 2 -23 0.2 0.4 -22 -39 20 -13 2 -23 0.4 0.6 34 22 -6 -33 1 -23 0.3 0.2 29 0 -26 -41 1 -23 0.7 0.2 48 24 36 16 1 -23 0.3 0.6 -31 -33 -31 -42 1 -23 0.2 0.5 -14 -49 39 24 2 -23 0.1 0.3 1 -17 43 -13 2 -23 0.9 0.8 -13 -33 -7 -8 2 -23 0.3 0.2 -19 -44 26 -49 2 -23 0.3 0.6 39 -42 40 -41 2 -23 0.3 0.6 30 6 25 -20 1 -23 0.1 0.1 33 20 -14 -49 1 -23 0.8 0.2 -4 -13 0 -43 1 -23 0.2 0.2 44 12 39 -42 1 -23 0.4 0.7 -24 -27 -35 -44 1 -23 0.9 0.4 15 -4 -13 -45 1 -23 0.9 0.1 -19 -39 38 -11 2 -23 0.4 0.7 41 2 -35 -41 1 -23 0.5 0.9 20 -31 13 -22 2 -23 0.1 0.3 20 -2 34 -49 1 -23 0.8 0.4 -18 -31 40 -24 2 -23 0.8 0.5 23 -29 40 -43 1 -23 0.7 0.5 15 -15 43 40 2 -23 0.8 0.3 43 20 11 5 1 -23 0.1 0.2 39 -12 8 -7 2 -23 0.9 0.8 -23 -33 42 8 2 -23 0.1 0.4 16 15 42 -21 1 -23 0.3 0.6 -18 -48 -14 -17 2 -23 0.3 0.7 32 21 49 39 2 -23 0.8 0.1 47 -14 49 15 1 -23 0.1 0.2 30 -37 -7 -38 2 -23 0.1 0.1 27 -37 34 -21 2 -23 0.5 0.9 26 -41 15 -17 2 -23 0.7 0.3 37 32 -12 -48 1 -23 0.1 0.8 45 15 -36 -48 1 -23 0.2 0.2 -31 -39 16 -33 2 -23 0.3 0.4 34 11 6 -10 1 -23 0.9 0.7 38 36 44 -24 1 -23 0.3 0.9 47 -2 34 17 2 -23 0.2 0.9 35 -4 28 -14 2 -23 0.9 0.8 15 -16 -21 -45 1 -23 0.5 0.7 46 45 -10 -44 1 -23 0.4 0.8 49 -23 -26 -32 1 -23 0.3 0.2 -19 -34 47 29 2 -23 0.1 0.3 24 -14 27 -48 2 -23 0.6 0.9 16 11 10 -6 1 -23 0.8 0.8 5 -9 -3 -46 1 -23 0.8 0.9 29 -34 37 -16 2 -23 0.6 0.7 16 -44 -5 -33 1 -23 0.1 0.2 4 -10 8 -5 2 -23 0.9 0.1 44 8 42 -42 1 -23 0.8 0.2 50 30 26 6 1 -23 0.6 0.9 50 -42 33 1 2 -23 0.5 0.1 13 -50 -29 -47 1 -23 0.3 0.2 32 9 40 6 1 -23 0.4 0.7 43 -24 -19 -40 1 -23 0.1 0.8 -16 -38 43 -2 2 -23 0.3 0.9 -15 -18 39 37 2 -23 0.9 0.8 34 8 6 2 1 -23 0.6 0.5 3 -23 -13 -20 1 -23 0.1 0.4 41 26 43 -44 1 -23 0.3 0.5 -9 -46 34 -27 2 -23 0.1 0.6 -14 -21 -25 -46 1 -23 0.3 0.3 23 -4 41 25 2 -23 0.2 0.4 -27 -34 29 3 2 -23 0.4 0.5 -10 -11 18 1 2 -23 0.3 0.9 32 -24 42 23 2 -23 0.9 0.3 5 -37 39 -23 1 -23 0.2 0.7 40 -6 17 -46 2 -23 0.6 0.6 43 -10 45 8 2 -23 0.2 0.4 18 11 34 -38 1 -23 0.2 0.1 50 33 41 1 1 -23 0.4 0.1 43 -40 2 -39 1 -23 0.9 0.2 -16 -50 -4 -34 1 -23 0.8 0.2 36 -26 45 -26 1 -23 0.9 0.9 28 -10 46 -25 2 -23 0.9 0.5 28 0 37 30 2 -23 0.3 0.6 -23 -28 -16 -42 2 -23 0.3 0.4 7 -8 37 -37 2 -23 0.2 0.3 39 8 33 -28 1 -23 0.4 0.2 28 -35 29 -43 1 -23 0.7 0.9 38 20 29 -20 2 -23 0.6 0.9 -5 -27 -15 -42 1 -23 0.9 0.2 -4 -14 46 -23 1 -23 0.2 0.7 41 22 13 -40 1 -23 0.6 0.3 33 -31 -5 -24 1 -23 0.3 0.6 32 16 42 29 2 -23 0.1 0.9 42 -10 -26 -45 1 -23 0.1 0.3 26 10 45 -22 1 -23 0.6 0.5 -33 -42 1 -16 2 -23 0.2 0.8 16 1 31 0 2 -23 0.4 0.5 -20 -50 49 -30 2 -23 0.2 0.7 37 25 24 23 1 -23 0.3 0.6 34 25 15 -12 1 -23 0.5 0.3 13 -38 47 -49 1 -23 0.4 0.8 6 -4 18 -14 2 -23 0.3 0.5 48 -10 16 -19 2 -23 0.2 0.3 -28 -38 -24 -35 1 -23 0.4 0.9 41 -38 -26 -48 1 -23 0.7 0.2 46 -18 31 -31 1 -23 0.8 0.8 42 8 18 -28 1 -23 0.6 0.4 9 -45 31 -30 2 -23 0.7 0.4 30 9 36 24 1 -23 0.8 0.4 48 40 -6 -32 1 -23 0.8 0.1 30 -22 3 -6 1 -23 0.1 0.2 -37 -44 49 42 2 -23 0.7 0.4 8 -43 49 38 2 -23 0.8 0.2 49 -37 -6 -23 1 -23 0.9 0.2 18 -12 12 0 1 -23 0.2 0.3 47 -16 47 23 2 -23 0.7 0.4 4 -2 42 12 2 -23 0.3 0.6 35 -44 37 7 2 -23 0.2 0.5 19 2 22 -46 1 -23 0.2 0.4 5 -43 42 0 2 -23 0.1 0.4 -21 -30 46 -45 2 -23 0.1 0.9 -9 -22 3 -31 2 -23 0.5 0.3 47 -50 -13 -15 1 -23 0.7 0.6 13 -20 -36 -41 1 -23 0.9 0.6 -2 -32 35 11 2 -23 0.3 0.4 27 -35 41 -10 2 -23 0.9 0.9 -45 -47 -23 -34 2 -23 0.6 0.7 -15 -22 23 5 2 -23 0.2 0.1 26 22 -39 -50 1 -23 0.3 0.3 26 12 5 -35 1 -23 0.6 0.1 -25 -26 20 -47 1 -23 0.5 0.8 -15 -45 32 -32 2 -23 0.1 0.9 38 29 -11 -15 1 -23 0.5 0.1 37 -49 39 38 2 -23 0.3 0.8 40 7 41 6 2 -23 0.5 0.1 1 -35 21 -13 2 -23 0.2 0.4 25 20 35 8 1 -23 0.8 0.7 50 -11 25 12 1 -23 0.9 0.1 33 22 29 -38 1 -23 0.7 0.2 13 -48 21 -6 1 -23 0.8 0.4 22 -9 18 -50 1 -23 0.1 0.2 9 -48 48 -30 2 -23 0.1 0.8 7 -5 18 -21 2 -23 0.9 0.9 18 -49 48 16 2 -23 0.7 0.3 33 8 0 -25 1 -23 0.8 0.1 46 -4 -9 -32 1 -23 0.2 0.5 -16 -46 44 13 2 -23 0.3 0.2 44 -44 21 10 2 -23 0.2 0.6 49 1 7 -3 1 -23 0.1 0.2 21 -13 44 -3 2 -23 0.2 0.6 35 -14 -5 -7 1 -23 0.2 0.2 -9 -39 30 18 2 -23 0.8 0.7 -23 -33 19 -43 2 -23 0.1 0.1 43 -24 0 -5 2 -23 0.2 0.5 48 4 -23 -46 1 -23 0.4 0.5 -16 -23 25 -36 2 -23 0.4 0.8 33 31 -19 -41 1 -23 0.5 0.2 29 -2 31 -10 1 -23 0.8 0.6 29 -45 32 -37 1 -23 0.2 0.3 26 -30 39 -49 1 -23 0.9 0.3 -26 -36 -42 -45 1 -23 0.6 0.5 43 -24 -21 -38 1 -23 0.9 0.9 8 -4 6 -49 1 -23 0.2 0.4 -23 -45 20 -3 2 -23 0.5 0.8 -7 -49 38 -38 2 -23 0.6 0.1 -5 -38 21 -17 2 -23 0.2 0.5 26 15 -18 -37 1 -23 0.1 0.4 5 -16 41 17 2 -23 0.6 0.2 8 5 34 32 2 -23 0.7 0.1 -13 -14 31 26 2 -23 0.5 0.4 38 21 33 -45 1 -23 0.4 0.7 42 -13 42 -12 2 -23 0.1 0.8 -28 -31 50 23 2 -23 0.6 0.4 19 -26 21 -36 1 -23 0.3 0.9 39 -19 41 38 2 -23 0.5 0.5 -17 -31 10 -23 2 -23 0.6 0.4 47 41 -22 -43 1 -23 0.6 0.3 1 -32 2 -17 1 -23 0.6 0.4 -5 -28 19 -4 2 -23 0.6 0.2 27 -26 33 -47 1 -23 0.8 0.7 36 -29 25 -14 1 -23 0.3 0.6 -23 -29 23 -27 2 -23 0.1 0.1 15 12 33 24 2 -23 0.8 0.9 6 -50 -27 -35 1 -23 0.2 0.5 46 19 -20 -40 1 -23 0.2 0.8 14 4 17 -10 2 -23 0.3 0.7 8 -25 -2 -41 2 -23 0.3 0.2 27 -33 50 -49 1 -23 0.8 0.5 13 -30 0 -10 1 -23 0.3 0.8 22 5 -2 -19 1 -23 0.6 0.2 10 -15 41 10 2 -23 0.9 0.9 -10 -40 36 -38 2 -23 0.6 0.7 24 11 -18 -48 1 -23 0.4 0.3 1 -44 30 -21 2 -23 0.7 0.6 39 10 41 -9 1 -23 0.6 0.8 22 -36 43 27 2 -23 0.1 0.5 35 25 11 -2 1 -23 0.3 0.6 2 -37 -39 -45 1 -23 0.4 0.6 48 24 4 -24 1 -23 0.1 0.5 27 -4 6 2 2 -23 0.9 0.8 42 28 38 -44 1 -23 0.1 0.2 48 -24 31 27 2 -23 0.3 0.9 45 -20 3 -6 2 -23 0.8 0.7 37 -31 30 26 2 -23 0.3 0.7 -6 -19 30 10 2 -23 0.9 0.4 39 16 50 -46 1 -23 0.2 0.1 47 -3 41 11 2 -23 0.1 0.8 -34 -46 7 -9 2 -23 0.3 0.3 38 32 37 -18 1 -23 0.2 0.6 31 -46 30 -38 2 -23 0.7 0.2 -6 -32 -1 -50 1 -23 0.8 0.8 35 34 20 -29 1 -23 0.4 0.9 -22 -45 17 -36 2 -23 0.1 0.4 47 -48 39 -8 2 -23 0.9 0.1 -41 -45 16 -45 1 -23 0.1 0.3 21 -18 35 -16 2 -23 0.2 0.9 43 1 31 13 2 -23 0.6 0.1 8 -35 31 -35 1 -23 0.9 0.5 31 17 17 -22 1 -23 0.7 0.6 38 0 -11 -30 1 -23 0.5 0.9 45 22 45 6 2 -23 0.3 0.1 -9 -35 49 6 2 -23 0.2 0.4 -18 -21 -16 -45 1 -23 0.7 0.6 14 -14 -22 -38 1 -23 0.4 0.6 34 -4 -28 -29 1 -23 0.2 0.6 5 -38 -30 -50 1 -24 0.9 0.3 0 -26 50 12 2 -24 0.7 0.4 29 26 -36 -50 1 -24 0.7 0.1 26 -30 37 -14 1 -24 0.9 0.4 -5 -30 36 1 2 -24 0.3 0.1 37 5 -3 -25 1 -24 0.8 0.5 5 -16 -4 -29 1 -24 0.5 0.6 27 -20 -24 -43 1 -24 0.3 0.6 -5 -12 2 -4 2 -24 0.5 0.2 -12 -27 -10 -38 1 -24 0.5 0.3 46 -40 17 -11 1 -24 0.5 0.6 47 -6 35 -27 1 -24 0.5 0.7 36 32 1 -41 1 -24 0.6 0.9 -34 -50 48 43 2 -24 0.7 0.1 41 -47 18 -19 1 -24 0.7 0.4 37 25 -8 -30 1 -24 0.2 0.3 31 -26 -13 -48 1 -24 0.4 0.6 28 -43 29 7 2 -24 0.8 0.4 36 -30 42 32 1 -24 0.4 0.4 36 -25 -27 -38 1 -24 0.3 0.5 22 -41 35 -44 2 -24 0.6 0.7 32 -25 27 -13 2 -24 0.5 0.2 36 20 25 7 1 -24 0.9 0.2 7 -33 -9 -36 1 -24 0.6 0.5 47 29 15 -41 1 -24 0.6 0.6 4 -50 39 -2 2 -24 0.6 0.2 49 15 -2 -42 1 -24 0.8 0.9 -31 -40 10 -30 2 -24 0.7 0.2 -4 -49 -18 -47 1 -24 0.6 0.9 35 15 22 -30 2 -24 0.2 0.6 9 -14 38 -39 2 -24 0.1 0.8 21 -34 -20 -22 2 -24 0.2 0.7 13 11 45 -5 2 -24 0.5 0.5 -29 -49 -23 -50 1 -24 0.4 0.3 15 -21 50 -21 1 -24 0.9 0.9 22 7 -37 -45 1 -24 0.3 0.6 44 26 3 -38 1 -24 0.1 0.7 44 -25 -27 -36 1 -24 0.4 0.9 -26 -38 11 -38 2 -24 0.3 0.2 42 -15 2 -39 1 -24 0.2 0.8 6 -14 27 -48 2 -24 0.4 0.4 40 7 47 5 2 -24 0.9 0.6 45 -11 -17 -49 1 -24 0.4 0.2 14 -45 -11 -32 1 -24 0.1 0.7 14 -19 18 -11 2 -24 0.4 0.5 25 -17 50 -28 2 -24 0.5 0.6 18 -38 30 25 2 -24 0.5 0.4 -12 -18 3 -10 1 -24 0.7 0.3 2 -44 -11 -29 1 -24 0.6 0.7 47 3 46 -2 2 -24 0.5 0.4 -23 -37 20 -45 2 -24 0.7 0.2 2 -28 -27 -45 1 -24 0.1 0.4 44 14 6 -19 1 -24 0.3 0.4 6 -44 46 -1 2 -24 0.1 0.7 15 -21 34 -7 2 -24 0.3 0.4 40 -43 -8 -30 1 -24 0.3 0.9 40 -33 41 -41 2 -24 0.5 0.5 29 -22 0 -5 2 -24 0.6 0.2 16 -45 22 -45 1 -24 0.8 0.1 -3 -42 17 15 2 -24 0.6 0.6 18 -6 50 31 2 -24 0.6 0.9 -15 -19 36 -37 2 -24 0.8 0.8 11 -18 19 -24 2 -24 0.1 0.9 -39 -42 41 -21 2 -24 0.2 0.4 -39 -49 2 -23 2 -24 0.4 0.5 36 6 -18 -23 1 -24 0.7 0.9 -15 -30 39 8 2 -24 0.1 0.9 44 -40 10 -32 2 -24 0.1 0.2 23 -13 -37 -46 1 -24 0.4 0.1 32 22 29 -31 1 -24 0.4 0.4 15 -3 32 19 1 -24 0.3 0.8 47 36 35 16 1 -24 0.3 0.5 37 32 7 -16 1 -24 0.6 0.6 41 -36 46 -10 2 -24 0.8 0.7 50 17 -14 -44 1 -24 0.3 0.9 7 -44 27 -4 2 -24 0.9 0.8 39 -37 21 -30 1 -24 0.7 0.4 0 -34 -22 -23 1 -24 0.1 0.9 12 -13 32 18 2 -24 0.4 0.9 -8 -49 29 24 2 -24 0.8 0.4 -5 -36 45 -8 2 -24 0.4 0.2 8 -13 -33 -48 1 -24 0.6 0.4 30 14 49 -31 1 -24 0.1 0.1 33 28 -32 -33 1 -24 0.4 0.1 44 4 18 -17 1 -24 0.7 0.6 -16 -40 3 -46 1 -24 0.5 0.6 45 -8 31 -49 2 -24 0.1 0.1 26 -50 23 -6 2 -24 0.7 0.1 -21 -42 49 -23 2 -24 0.1 0.3 -3 -30 41 -40 2 -24 0.3 0.6 17 -26 48 -4 2 -24 0.7 0.7 17 7 5 -43 1 -24 0.4 0.1 42 6 -14 -24 1 -24 0.7 0.9 43 -12 50 -13 2 -24 0.4 0.8 21 -40 9 -50 1 -24 0.1 0.5 27 -19 -30 -44 1 -24 0.4 0.8 43 25 -1 -42 1 -24 0.1 0.2 14 -22 -22 -30 1 -24 0.6 0.9 35 29 -20 -49 1 -24 0.9 0.9 25 -50 0 -31 1 -24 0.5 0.7 9 -45 48 -33 2 -24 0.9 0.5 43 22 11 -9 1 -24 0.4 0.3 -12 -46 -4 -29 2 -24 0.2 0.3 -29 -40 12 -11 1 -24 0.6 0.3 41 7 26 21 2 -24 0.3 0.7 29 9 30 -23 1 -24 0.1 0.5 46 29 49 25 1 -24 0.4 0.8 37 8 44 -49 1 -24 0.5 0.8 3 -5 46 -4 2 -24 0.6 0.3 -10 -48 16 -9 1 -24 0.7 0.6 14 -39 38 -11 2 -24 0.2 0.5 32 13 -27 -48 1 -24 0.7 0.2 -1 -21 14 -15 2 -24 0.1 0.3 29 3 14 -38 1 -24 0.9 0.7 29 -31 -21 -41 1 -24 0.4 0.9 30 -33 10 -18 2 -24 0.9 0.9 50 31 44 -12 1 -24 0.5 0.1 22 -15 -6 -9 1 -24 0.3 0.1 38 23 31 16 2 -24 0.8 0.6 50 8 32 -34 1 -24 0.1 0.7 -7 -15 48 -22 2 -24 0.4 0.9 -13 -50 45 34 2 -24 0.2 0.4 15 -23 49 31 2 -24 0.1 0.1 33 7 37 -21 1 -24 0.9 0.4 18 -45 28 12 2 -24 0.8 0.4 14 -43 -7 -25 1 -24 0.2 0.7 15 -31 -25 -43 1 -24 0.9 0.6 31 20 -16 -38 1 -24 0.7 0.4 -1 -6 19 -10 2 -24 0.6 0.5 40 -17 -5 -38 2 -24 0.3 0.2 36 -25 43 28 2 -24 0.7 0.5 50 -40 25 -21 1 -24 0.2 0.9 47 44 31 -35 1 -24 0.2 0.3 48 -27 46 21 2 -24 0.6 0.7 27 -19 33 -44 2 -24 0.8 0.1 39 -1 36 -13 1 -24 0.3 0.6 -10 -47 50 32 2 -24 0.6 0.4 27 7 17 -49 1 -24 0.1 0.8 28 -11 16 -2 2 -24 0.6 0.6 -7 -31 29 15 2 -24 0.9 0.2 49 47 -6 -29 1 -24 0.1 0.9 28 14 -12 -28 1 -24 0.1 0.4 15 3 -6 -10 1 -24 0.6 0.4 40 25 -14 -36 1 -24 0.2 0.3 6 -47 38 0 2 -24 0.4 0.3 -21 -26 41 16 2 -24 0.4 0.4 36 -20 5 -22 1 -24 0.8 0.5 33 -38 23 9 1 -24 0.2 0.4 5 2 16 0 2 -24 0.7 0.4 -19 -37 21 -21 2 -24 0.3 0.5 -4 -35 -7 -9 2 -24 0.1 0.1 -29 -33 29 23 2 -24 0.1 0.1 22 -13 28 -32 2 -24 0.3 0.4 37 16 48 -29 1 -24 0.4 0.5 6 -14 16 -36 1 -24 0.9 0.7 -1 -6 -8 -40 1 -24 0.4 0.3 -7 -40 47 6 2 -24 0.5 0.2 22 -32 20 -44 2 -24 0.7 0.4 14 -38 18 15 2 -24 0.3 0.6 36 -44 18 -20 2 -24 0.4 0.6 19 -49 -24 -43 1 -24 0.2 0.4 16 13 37 16 2 -24 0.8 0.6 41 30 29 15 2 -24 0.2 0.7 18 -23 40 24 2 -24 0.7 0.4 8 -2 -13 -46 2 -24 0.6 0.5 -10 -23 40 2 2 -24 0.8 0.3 18 -39 -9 -27 1 -24 0.3 0.7 -19 -31 23 8 2 -24 0.5 0.3 -9 -15 48 34 2 -24 0.1 0.1 36 -24 -7 -43 1 -24 0.5 0.7 13 0 34 -39 2 -24 0.7 0.4 30 28 34 -27 1 -24 0.5 0.9 -10 -47 -24 -36 1 -24 0.1 0.7 2 -5 41 14 2 -24 0.3 0.7 8 -46 -19 -44 1 -24 0.5 0.2 17 -12 38 -49 1 -24 0.3 0.5 -33 -39 17 8 2 -24 0.2 0.4 14 -11 47 -44 1 -24 0.5 0.9 43 -41 -14 -36 1 -24 0.8 0.2 -18 -21 49 23 2 -24 0.6 0.3 49 -25 -45 -48 1 -24 0.4 0.6 16 -46 36 -2 2 -24 0.4 0.9 -8 -11 42 5 2 -24 0.5 0.7 50 7 30 -11 2 -24 0.9 0.6 46 6 3 -26 1 -24 0.6 0.5 -18 -49 36 -5 2 -24 0.5 0.3 38 6 -22 -44 1 -24 0.6 0.8 9 5 45 -23 1 -24 0.4 0.6 33 -14 14 -16 2 -24 0.4 0.6 39 -47 -22 -35 1 -24 0.9 0.2 11 -28 29 -12 1 -24 0.3 0.9 -26 -27 38 36 2 -24 0.5 0.6 32 -38 15 -30 1 -24 0.1 0.3 24 22 43 30 1 -24 0.6 0.3 27 -28 -41 -49 1 -24 0.8 0.8 22 12 42 31 2 -24 0.8 0.9 38 28 29 -41 2 -24 0.4 0.7 41 -38 -1 -33 2 -24 0.2 0.4 46 16 28 -48 1 -24 0.5 0.9 13 -30 36 16 2 -24 0.1 0.8 41 25 46 -18 1 -24 0.8 0.3 -34 -35 15 -2 2 -24 0.2 0.3 -16 -40 31 8 2 -24 0.9 0.3 44 6 -11 -19 1 -24 0.7 0.9 43 23 13 -16 1 -24 0.3 0.4 22 -50 13 -33 1 -24 0.4 0.1 33 -1 48 -22 1 -24 0.6 0.5 -24 -45 38 35 2 -24 0.6 0.3 42 -47 20 -26 1 -24 0.9 0.4 19 14 -1 -42 1 -24 0.7 0.2 -18 -43 2 -30 1 -24 0.1 0.4 19 -14 12 8 2 -24 0.2 0.8 18 6 13 -33 2 -24 0.6 0.6 49 -50 40 32 2 -24 0.9 0.8 -9 -11 15 -30 2 -24 0.4 0.9 -24 -41 -31 -46 1 -24 0.1 0.3 -5 -42 -25 -43 1 -24 0.8 0.9 -18 -36 -3 -32 2 -24 0.8 0.1 14 -14 26 -38 1 -24 0.5 0.9 -4 -20 -6 -22 1 -24 0.1 0.4 17 -14 -17 -22 1 -24 0.2 0.4 6 -30 19 -37 2 -24 0.7 0.9 34 -33 19 9 2 -24 0.8 0.5 31 -36 5 -34 1 -24 0.8 0.7 7 -14 -10 -43 2 -24 0.4 0.3 34 10 29 -1 1 -24 0.4 0.5 1 -33 -20 -22 1 -24 0.1 0.3 18 -12 -3 -8 2 -24 0.7 0.6 4 -36 15 -37 2 -24 0.6 0.8 48 -23 2 -6 1 -24 0.1 0.4 -4 -9 43 -1 1 -24 0.5 0.1 16 5 35 31 2 -24 0.1 0.5 -3 -40 -13 -29 2 -24 0.8 0.7 40 -17 4 -44 1 -24 0.3 0.3 45 -15 26 -17 1 -24 0.8 0.7 38 -31 -16 -44 1 -24 0.7 0.3 -34 -41 4 -35 2 -24 0.9 0.2 -17 -47 36 14 2 -24 0.9 0.6 23 -3 49 4 2 -24 0.4 0.2 42 6 36 16 1 -24 0.5 0.7 -10 -32 19 -22 2 -24 0.8 0.9 31 -17 7 5 2 -24 0.9 0.8 49 -30 5 -19 1 -24 0.1 0.9 37 -24 48 44 2 -24 0.8 0.7 8 -36 -26 -31 1 -24 0.7 0.9 45 37 46 -39 1 -24 0.6 0.1 38 19 17 -4 1 -24 0.9 0.6 37 -7 1 0 1 -24 0.1 0.7 44 -22 47 -49 2 -24 0.6 0.4 28 13 37 -1 1 -24 0.8 0.3 -17 -44 28 -22 2 -24 0.7 0.3 44 10 25 6 1 -24 0.8 0.2 -29 -33 10 8 2 -24 0.1 0.7 44 -36 5 -1 2 -24 0.8 0.7 21 10 13 -22 2 -24 0.1 0.6 30 22 45 11 1 -24 0.1 0.1 39 -18 46 40 2 -24 0.4 0.2 10 -3 31 26 2 -24 0.9 0.1 41 -22 -39 -44 1 -24 0.5 0.3 -10 -37 -12 -34 2 -24 0.9 0.5 0 -14 -5 -33 2 -24 0.7 0.7 22 -49 3 -24 1 -24 0.8 0.3 29 1 9 -43 1 -24 0.1 0.9 29 6 -38 -41 1 -24 0.5 0.7 -10 -27 39 -3 2 -24 0.3 0.8 -9 -10 34 -50 2 -24 0.4 0.1 43 -46 43 1 2 -24 0.7 0.7 15 -22 -2 -15 1 -24 0.6 0.5 39 -21 -24 -30 1 -24 0.3 0.4 39 -27 48 1 2 -24 0.3 0.7 20 -27 38 9 2 -24 0.6 0.9 43 36 24 -34 2 -24 0.3 0.8 -16 -33 29 -22 2 -24 0.3 0.9 1 -34 -11 -16 2 -24 0.4 0.4 15 -25 12 10 2 -24 0.5 0.2 23 -3 49 44 2 -24 0.7 0.6 -42 -44 23 -47 2 -24 0.5 0.1 46 35 27 19 1 -24 0.8 0.5 31 21 41 6 1 -24 0.3 0.8 2 -21 34 9 2 -24 0.7 0.7 22 -12 25 -15 2 -24 0.1 0.6 50 45 32 -13 1 -24 0.4 0.5 5 -47 41 -31 2 -24 0.2 0.8 17 -4 43 -48 2 -24 0.6 0.6 28 23 10 -7 1 -24 0.7 0.7 36 -35 23 -16 1 -24 0.7 0.8 33 -15 36 23 2 -24 0.5 0.3 -4 -16 -36 -39 1 -24 0.5 0.4 46 37 39 14 1 -24 0.1 0.4 50 3 -15 -29 1 -24 0.5 0.6 24 -36 2 -5 2 -24 0.3 0.8 25 -17 -15 -40 1 -24 0.1 0.4 33 -50 9 6 2 -24 0.2 0.9 -32 -48 38 -40 2 -24 0.3 0.4 39 13 31 3 1 -24 0.5 0.3 -7 -33 -11 -43 1 -24 0.5 0.6 12 6 -25 -39 1 -24 0.1 0.3 27 -32 49 -31 2 -24 0.1 0.6 -2 -34 5 -23 1 -24 0.8 0.3 22 -45 16 10 1 -24 0.5 0.5 -16 -25 -6 -13 1 -25 0.4 0.5 41 1 38 24 2 -25 0.8 0.3 49 -23 -7 -43 1 -25 0.5 0.2 10 5 20 -16 2 -25 0.2 0.5 3 -43 34 14 2 -25 0.2 0.9 46 -2 -10 -17 1 -25 0.3 0.9 19 18 19 -16 1 -25 0.7 0.7 -26 -45 -34 -48 1 -25 0.9 0.3 28 -6 17 -25 1 -25 0.2 0.4 45 1 44 -3 1 -25 0.1 0.9 37 29 10 -46 1 -25 0.1 0.7 33 -38 -2 -48 1 -25 0.3 0.8 3 -36 -10 -29 2 -25 0.4 0.3 41 -48 36 -35 1 -25 0.3 0.5 -19 -42 25 14 2 -25 0.7 0.4 41 -43 29 9 1 -25 0.1 0.7 17 -28 9 -18 2 -25 0.1 0.1 23 -25 46 25 2 -25 0.9 0.9 35 18 47 -4 2 -25 0.4 0.3 28 -26 -6 -44 2 -25 0.3 0.3 17 9 50 8 2 -25 0.4 0.1 -25 -37 36 -10 2 -25 0.1 0.9 7 -10 -5 -13 1 -25 0.2 0.5 -18 -49 28 -43 2 -25 0.4 0.2 12 -20 -30 -36 1 -25 0.8 0.5 47 5 3 -26 1 -25 0.9 0.9 35 -13 29 11 2 -25 0.6 0.6 44 -7 15 -23 1 -25 0.1 0.6 50 17 32 -31 1 -25 0.7 0.6 3 -3 -23 -24 1 -25 0.8 0.1 10 -24 50 -29 1 -25 0.1 0.8 47 9 44 18 1 -25 0.6 0.8 21 -1 40 -11 2 -25 0.5 0.1 35 14 9 -20 1 -25 0.7 0.7 19 16 38 -13 1 -25 0.3 0.6 -30 -36 -17 -50 2 -25 0.6 0.8 22 -49 24 12 2 -25 0.2 0.5 -2 -12 32 -13 2 -25 0.3 0.2 34 19 -8 -10 1 -25 0.1 0.8 21 -15 45 -18 2 -25 0.2 0.5 -10 -21 20 3 2 -25 0.1 0.1 37 -28 -6 -27 1 -25 0.9 0.4 11 -33 37 2 2 -25 0.5 0.3 -15 -32 -19 -44 1 -25 0.2 0.7 -3 -48 -21 -48 2 -25 0.5 0.2 10 -38 -35 -42 1 -25 0.6 0.7 17 -28 2 -32 1 -25 0.3 0.5 -16 -21 3 -33 1 -25 0.2 0.2 46 23 8 -37 1 -25 0.6 0.8 44 -46 30 -34 2 -25 0.8 0.8 -36 -39 38 31 2 -25 0.7 0.7 31 3 -23 -30 1 -25 0.7 0.9 27 -12 30 -31 1 -25 0.5 0.3 38 -30 3 -16 1 -25 0.2 0.1 24 -26 27 4 2 -25 0.7 0.9 21 11 2 -4 1 -25 0.4 0.8 -49 -50 16 -29 2 -25 0.8 0.2 14 -43 46 11 2 -25 0.3 0.6 29 -36 18 -47 1 -25 0.7 0.1 30 -31 28 -7 1 -25 0.8 0.2 31 30 32 -21 1 -25 0.9 0.8 34 15 14 2 1 -25 0.6 0.7 46 -16 26 -1 2 -25 0.3 0.2 36 25 11 -49 1 -25 0.2 0.4 -5 -42 -9 -25 1 -25 0.8 0.4 0 -48 16 -21 2 -25 0.9 0.4 21 -17 16 11 2 -25 0.5 0.8 -27 -46 48 25 2 -25 0.1 0.9 20 -8 45 3 2 -25 0.9 0.4 -25 -26 6 -15 2 -25 0.8 0.8 -25 -33 10 -17 2 -25 0.9 0.3 38 -10 10 -26 1 -25 0.7 0.7 -30 -32 43 -14 2 -25 0.6 0.3 -11 -43 -39 -46 2 -25 0.4 0.2 7 -45 -36 -42 1 -25 0.9 0.9 -25 -43 37 -30 2 -25 0.8 0.4 -15 -35 17 -39 2 -25 0.7 0.6 -38 -39 43 -3 2 -25 0.4 0.2 -37 -44 -11 -44 2 -25 0.6 0.2 26 -44 11 -35 1 -25 0.9 0.5 22 -41 -11 -35 1 -25 0.6 0.5 24 15 -1 -19 1 -25 0.7 0.4 22 -10 13 -5 1 -25 0.7 0.5 38 26 -20 -35 1 -25 0.3 0.6 40 8 29 12 2 -25 0.8 0.4 12 -21 38 22 2 -25 0.7 0.7 25 -5 30 -43 1 -25 0.1 0.3 -20 -29 -17 -33 2 -25 0.6 0.7 -10 -44 0 -1 2 -25 0.5 0.7 50 8 12 -6 1 -25 0.7 0.5 30 -8 47 -4 2 -25 0.2 0.9 50 -36 30 -5 2 -25 0.3 0.9 12 -17 33 -17 2 -25 0.7 0.8 37 -24 24 18 2 -25 0.3 0.1 36 -26 -21 -36 1 -25 0.1 0.9 20 6 6 -7 2 -25 0.1 0.9 -40 -46 9 8 2 -25 0.2 0.6 48 29 0 -12 1 -25 0.4 0.8 9 -6 11 8 2 -25 0.7 0.6 12 -2 44 -22 2 -25 0.6 0.1 -14 -23 8 -11 2 -25 0.6 0.6 26 -19 -33 -44 1 -25 0.8 0.3 41 5 48 -47 1 -25 0.9 0.8 15 -33 42 35 2 -25 0.4 0.1 45 19 -4 -19 1 -25 0.3 0.4 -20 -28 -2 -11 1 -25 0.7 0.4 43 15 11 10 1 -25 0.9 0.3 -3 -41 46 32 2 -25 0.1 0.7 13 -11 -38 -45 1 -25 0.5 0.1 -15 -28 1 -32 2 -25 0.8 0.9 -22 -50 -24 -30 2 -25 0.5 0.5 -33 -50 -22 -44 2 -25 0.5 0.2 25 -32 25 0 2 -25 0.1 0.8 41 12 38 -2 1 -25 0.1 0.4 -29 -49 28 -47 2 -25 0.9 0.6 -37 -44 -10 -27 2 -25 0.7 0.4 37 -18 8 3 1 -25 0.7 0.3 45 43 24 18 1 -25 0.3 0.6 29 -3 -7 -14 2 -25 0.3 0.5 6 -35 24 -30 2 -25 0.3 0.7 -45 -47 18 -46 2 -25 0.5 0.4 7 -3 32 1 2 -25 0.9 0.5 26 14 -6 -8 1 -25 0.6 0.9 10 -47 48 46 2 -25 0.7 0.4 19 7 -13 -34 1 -25 0.9 0.8 8 -42 22 -14 2 -25 0.4 0.6 27 -11 -12 -42 1 -25 0.8 0.8 49 23 46 -35 1 -25 0.8 0.2 -23 -45 -4 -39 2 -25 0.4 0.9 25 -1 38 22 2 -25 0.3 0.9 21 -45 -2 -36 1 -25 0.1 0.4 24 -43 28 9 2 -25 0.1 0.7 12 -10 49 -33 2 -25 0.1 0.5 28 -2 35 -9 2 -25 0.7 0.3 8 -31 -8 -16 1 -25 0.3 0.5 50 11 44 -24 1 -25 0.1 0.6 49 17 -5 -26 1 -25 0.9 0.6 -5 -44 41 -47 2 -25 0.4 0.2 -20 -33 6 -34 2 -25 0.5 0.7 15 -47 24 2 2 -25 0.4 0.7 47 -7 28 -39 1 -25 0.5 0.9 -22 -29 28 -46 2 -25 0.9 0.7 24 13 15 -30 1 -25 0.2 0.2 12 -45 29 -37 2 -25 0.4 0.9 36 -11 34 -32 2 -25 0.7 0.7 -39 -42 -45 -46 2 -25 0.6 0.7 -20 -34 32 25 2 -25 0.5 0.9 30 -14 -10 -29 1 -25 0.7 0.8 -15 -17 45 -27 2 -25 0.8 0.6 15 -44 41 12 2 -25 0.4 0.7 36 15 20 -9 1 -25 0.4 0.6 30 -32 -11 -12 1 -25 0.7 0.5 29 -26 24 -33 2 -25 0.9 0.7 36 -6 38 -36 1 -25 0.5 0.4 14 -13 48 45 2 -25 0.1 0.3 -34 -39 41 22 2 -25 0.8 0.4 24 -16 7 -45 1 -25 0.2 0.5 48 37 32 -8 1 -25 0.8 0.9 36 33 19 -14 1 -25 0.4 0.2 -25 -31 20 11 2 -25 0.9 0.1 -16 -17 -15 -23 2 -25 0.3 0.6 -40 -47 40 -14 2 -25 0.5 0.7 13 2 37 -27 1 -25 0.4 0.3 11 -30 42 -47 1 -25 0.2 0.4 41 -5 29 7 2 -25 0.1 0.4 3 -1 -5 -48 1 -25 0.5 0.6 -33 -46 26 -38 2 -25 0.8 0.3 23 -38 10 -42 1 -25 0.3 0.3 37 4 41 -30 1 -25 0.3 0.1 13 2 37 4 2 -25 0.9 0.3 12 -15 4 -17 1 -25 0.3 0.9 45 -31 36 -18 2 -25 0.4 0.3 25 -29 -12 -21 1 -25 0.6 0.9 35 -20 -23 -33 1 -25 0.8 0.2 4 -23 18 -22 1 -25 0.4 0.4 23 -12 32 -4 1 -25 0.1 0.1 50 -47 21 8 2 -25 0.1 0.4 18 -4 29 -22 2 -25 0.4 0.3 39 -34 -3 -27 1 -25 0.8 0.1 -21 -48 23 -12 2 -25 0.2 0.4 44 38 8 -23 1 -25 0.4 0.3 -7 -8 33 -22 2 -25 0.4 0.3 24 -30 33 -46 2 -25 0.1 0.6 16 -32 33 -5 2 -25 0.4 0.7 -30 -43 11 -47 1 -25 0.5 0.8 29 -38 9 -19 2 -25 0.3 0.6 -25 -45 41 -13 2 -25 0.6 0.2 9 -18 18 -6 2 -25 0.5 0.1 37 -38 27 -22 1 -25 0.7 0.1 -10 -17 27 -15 1 -25 0.8 0.2 41 -45 29 19 1 -25 0.5 0.1 46 -42 19 -29 1 -25 0.4 0.5 29 5 -18 -39 1 -25 0.6 0.3 30 -24 1 -18 1 -25 0.8 0.2 -10 -22 6 -42 1 -25 0.2 0.4 -1 -46 -11 -19 2 -25 0.4 0.4 18 -30 9 -22 2 -25 0.5 0.6 26 11 44 -8 2 -25 0.9 0.6 -31 -43 12 -18 2 -25 0.1 0.2 44 -45 36 29 2 -25 0.1 0.7 22 7 11 4 2 -25 0.2 0.3 38 -33 39 -39 1 -25 0.8 0.8 43 -38 42 -19 2 -25 0.9 0.9 -1 -45 -26 -27 1 -25 0.5 0.4 30 19 31 -34 1 -25 0.6 0.1 48 -29 43 14 1 -25 0.2 0.5 46 3 25 5 2 -25 0.7 0.5 -1 -4 34 -26 2 -25 0.1 0.8 -12 -33 26 -18 2 -25 0.7 0.6 50 6 -6 -48 1 -25 0.2 0.9 9 -50 -10 -29 2 -25 0.2 0.9 34 -50 15 -3 2 -25 0.2 0.7 10 -46 19 -29 2 -25 0.4 0.8 12 -9 -6 -17 1 -25 0.5 0.4 -19 -38 -6 -50 1 -25 0.3 0.7 33 -21 -15 -17 1 -25 0.1 0.4 7 -2 36 -35 2 -25 0.3 0.8 -13 -33 25 5 2 -25 0.8 0.4 40 20 49 1 1 -25 0.7 0.6 -4 -26 34 -5 2 -25 0.9 0.2 47 43 14 2 1 -25 0.6 0.4 30 14 17 -22 1 -25 0.3 0.4 7 -25 24 -32 2 -25 0.1 0.8 29 24 40 -29 1 -25 0.6 0.8 -18 -38 -36 -46 2 -25 0.6 0.8 -27 -48 49 6 2 -25 0.1 0.7 8 -48 20 -15 2 -25 0.9 0.8 28 4 32 28 1 -25 0.2 0.7 18 2 37 -10 2 -25 0.9 0.2 36 26 38 -19 1 -25 0.4 0.9 -23 -24 15 -14 2 -25 0.2 0.2 -22 -33 -7 -22 2 -25 0.8 0.1 -3 -42 43 -20 2 -25 0.9 0.1 -19 -39 2 0 2 -25 0.7 0.5 8 -24 21 -34 1 -25 0.1 0.4 -12 -13 40 -35 2 -25 0.3 0.9 41 29 13 -14 1 -25 0.4 0.1 -43 -44 -22 -50 1 -25 0.8 0.6 46 44 0 -14 1 -25 0.2 0.3 41 -35 37 15 2 -25 0.5 0.8 36 -9 12 -39 1 -25 0.7 0.3 38 11 22 -11 1 -25 0.8 0.1 44 -49 32 4 1 -25 0.1 0.3 -26 -38 34 8 2 -25 0.3 0.7 7 -15 29 9 2 -25 0.8 0.3 26 -18 9 -43 1 -25 0.5 0.9 10 -46 18 -49 2 -25 0.9 0.7 -14 -36 26 -16 2 -25 0.9 0.1 44 -15 5 -33 2 -25 0.7 0.6 40 -15 40 30 2 -25 0.2 0.6 22 2 33 -12 1 -25 0.7 0.8 33 -28 30 15 2 -25 0.7 0.8 17 -12 -14 -44 1 -25 0.2 0.2 11 9 40 -1 1 -25 0.5 0.1 23 -7 49 -7 1 -25 0.6 0.3 49 0 37 -14 1 -25 0.6 0.8 43 -44 27 -16 2 -25 0.5 0.8 -28 -48 45 32 2 -25 0.8 0.4 50 -22 17 7 1 -25 0.9 0.5 24 -40 13 -10 1 -25 0.7 0.4 34 25 42 -24 1 -25 0.3 0.9 41 -10 -5 -37 1 -25 0.7 0.1 20 -22 -26 -49 1 -25 0.7 0.1 42 32 40 0 1 -25 0.8 0.8 -9 -10 21 15 2 -25 0.3 0.4 29 -49 32 11 2 -25 0.6 0.6 16 -11 45 11 2 -25 0.3 0.9 32 -37 -9 -31 1 -25 0.4 0.7 -9 -28 47 -15 2 -25 0.5 0.4 12 -38 5 -17 1 -25 0.2 0.6 -11 -39 17 -22 2 -25 0.8 0.6 -1 -7 48 26 2 -25 0.1 0.3 37 -15 44 -3 2 -25 0.1 0.5 -12 -47 39 1 2 -25 0.1 0.1 30 7 40 -29 1 -25 0.7 0.1 -12 -41 7 -19 2 -25 0.6 0.8 4 -5 7 -14 1 -25 0.5 0.4 32 -12 26 -48 2 -25 0.2 0.9 22 -9 -13 -36 1 -25 0.4 0.2 4 -10 47 -26 1 -25 0.4 0.4 34 -21 2 -24 1 -25 0.7 0.3 38 -33 0 -50 1 -25 0.3 0.3 34 -19 27 18 2 -25 0.6 0.6 44 -10 2 -50 1 -25 0.6 0.4 -34 -38 39 20 2 -25 0.7 0.6 28 -44 33 -23 1 -25 0.8 0.7 -3 -8 29 -38 2 -25 0.3 0.8 31 -29 33 6 2 -25 0.6 0.9 45 -34 -2 -4 1 -25 0.6 0.2 19 -23 21 7 2 -25 0.8 0.6 -14 -33 1 -39 2 -25 0.3 0.5 22 3 -3 -39 1 -25 0.9 0.9 -26 -45 34 29 2 -25 0.7 0.3 25 17 33 -47 1 -25 0.4 0.2 -13 -34 6 -45 1 -25 0.7 0.5 29 -16 -4 -35 1 -25 0.8 0.9 26 -39 36 -38 2 -25 0.9 0.2 12 -4 -28 -46 1 -25 0.4 0.1 16 -17 22 -44 1 -25 0.2 0.7 49 10 -1 -16 1 -25 0.6 0.5 -13 -43 12 -47 1 -26 0.7 0.7 28 -31 44 31 2 -26 0.8 0.4 40 -2 49 -21 1 -26 0.7 0.9 -40 -46 32 -50 2 -26 0.2 0.2 1 -47 30 -5 2 -26 0.7 0.2 25 -30 47 17 2 -26 0.4 0.2 50 39 -10 -18 1 -26 0.4 0.2 -31 -48 13 -50 2 -26 0.7 0.1 23 0 36 -24 1 -26 0.1 0.3 26 10 24 -32 1 -26 0.8 0.2 47 -8 44 -1 1 -26 0.2 0.4 43 3 23 12 2 -26 0.3 0.6 20 -18 22 -21 2 -26 0.9 0.5 -5 -38 33 -13 2 -26 0.9 0.8 -20 -40 48 12 2 -26 0.8 0.3 -2 -20 48 -27 2 -26 0.3 0.5 38 -29 -30 -43 1 -26 0.7 0.1 -15 -18 -8 -45 1 -26 0.8 0.6 10 -43 -30 -38 1 -26 0.4 0.5 22 -38 3 -26 1 -26 0.7 0.9 28 -9 -3 -21 1 -26 0.2 0.9 12 -31 21 -35 2 -26 0.5 0.4 38 -33 -2 -40 1 -26 0.9 0.1 16 -32 4 -50 1 -26 0.5 0.8 46 -12 41 -36 2 -26 0.1 0.2 21 -41 -10 -30 1 -26 0.9 0.9 29 -1 14 -36 1 -26 0.5 0.7 12 -3 -37 -39 1 -26 0.2 0.2 33 -29 50 27 2 -26 0.2 0.2 -12 -45 -26 -35 1 -26 0.7 0.1 9 -18 33 31 2 -26 0.7 0.8 24 -21 10 -27 1 -26 0.5 0.6 48 -41 26 -23 1 -26 0.2 0.8 35 -17 28 10 2 -26 0.7 0.7 -17 -28 -43 -49 1 -26 0.3 0.5 38 21 -20 -31 1 -26 0.9 0.1 -2 -23 -18 -35 1 -26 0.5 0.4 45 -23 24 -1 1 -26 0.5 0.1 -40 -43 8 2 2 -26 0.1 0.7 44 41 -7 -26 1 -26 0.4 0.7 30 -12 50 7 2 -26 0.7 0.6 18 -48 21 -19 1 -26 0.7 0.4 2 -40 -4 -32 1 -26 0.7 0.5 -27 -50 -38 -47 1 -26 0.5 0.1 45 -14 -1 -42 1 -26 0.4 0.5 7 -31 43 41 2 -26 0.5 0.1 -19 -26 32 13 2 -26 0.7 0.4 21 20 48 -42 1 -26 0.7 0.4 -20 -46 -14 -33 1 -26 0.3 0.7 -7 -15 2 -8 2 -26 0.1 0.7 49 -23 41 -16 2 -26 0.7 0.9 49 47 47 41 2 -26 0.7 0.9 48 -49 35 33 2 -26 0.8 0.9 30 -2 -20 -35 1 -26 0.7 0.1 38 29 -32 -47 1 -26 0.8 0.1 22 7 8 -15 1 -26 0.1 0.1 24 13 -15 -26 1 -26 0.1 0.2 -2 -44 37 34 2 -26 0.7 0.3 25 -17 14 -38 1 -26 0.7 0.3 41 28 12 11 1 -26 0.1 0.1 -36 -47 36 -17 2 -26 0.7 0.9 14 5 46 35 2 -26 0.6 0.2 36 32 29 1 1 -26 0.1 0.4 27 -44 22 -38 2 -26 0.8 0.7 47 6 39 -2 1 -26 0.2 0.4 49 -21 49 29 2 -26 0.7 0.7 4 -34 45 -47 2 -26 0.8 0.6 40 -12 -13 -19 1 -26 0.8 0.5 47 14 10 -43 1 -26 0.6 0.1 19 -39 32 -24 1 -26 0.1 0.5 -2 -18 -41 -46 1 -26 0.6 0.1 -14 -31 29 -20 2 -26 0.1 0.1 7 -10 31 -24 1 -26 0.1 0.8 40 39 12 -32 1 -26 0.7 0.4 43 -35 14 -33 1 -26 0.2 0.5 19 -31 42 9 2 -26 0.6 0.2 39 -4 41 -17 1 -26 0.2 0.2 1 -12 0 -42 1 -26 0.2 0.5 38 -15 -2 -21 1 -26 0.2 0.7 35 -27 32 2 2 -26 0.2 0.6 47 6 12 -23 1 -26 0.6 0.9 -12 -32 38 -6 2 -26 0.7 0.7 24 -29 5 4 2 -26 0.2 0.8 50 -44 25 -9 2 -26 0.5 0.7 -13 -22 23 -29 2 -26 0.6 0.3 3 -38 30 -5 2 -26 0.6 0.4 44 -44 -10 -18 1 -26 0.7 0.5 -36 -38 3 -23 2 -26 0.6 0.7 -9 -15 2 -32 2 -26 0.9 0.4 40 16 44 -32 1 -26 0.3 0.3 12 2 39 -29 1 -26 0.7 0.3 39 26 47 34 2 -26 0.4 0.4 5 -19 44 -6 2 -26 0.5 0.9 23 -35 -3 -22 1 -26 0.3 0.3 48 -15 10 -39 1 -26 0.6 0.2 9 -45 49 41 2 -26 0.9 0.6 40 -39 7 -8 1 -26 0.9 0.7 -8 -13 -41 -49 1 -26 0.6 0.6 29 -5 21 -21 1 -26 0.5 0.8 1 -45 48 37 2 -26 0.6 0.8 6 2 9 -2 2 -26 0.9 0.8 36 35 41 -13 1 -26 0.7 0.9 17 -5 -15 -43 1 -26 0.3 0.9 8 -27 10 5 2 -26 0.3 0.8 -22 -25 16 13 2 -26 0.8 0.3 -20 -44 -6 -11 2 -26 0.6 0.2 9 -9 46 -13 1 -26 0.7 0.2 -13 -42 30 10 2 -26 0.7 0.2 -11 -39 25 15 2 -26 0.5 0.7 -7 -9 41 25 2 -26 0.7 0.2 -5 -37 30 -31 2 -26 0.4 0.7 24 -12 -4 -41 1 -26 0.5 0.4 0 -31 -24 -26 1 -26 0.7 0.3 18 -20 7 -34 1 -26 0.9 0.1 -26 -40 22 12 2 -26 0.2 0.7 -22 -30 -16 -20 2 -26 0.7 0.3 39 -20 35 24 1 -26 0.2 0.1 36 -47 35 28 2 -26 0.4 0.6 18 -32 41 39 2 -26 0.7 0.7 50 42 24 -13 1 -26 0.4 0.6 -14 -34 18 -17 2 -26 0.3 0.1 47 -24 48 -18 1 -26 0.7 0.5 42 30 13 -11 1 -26 0.6 0.5 33 28 -40 -44 1 -26 0.4 0.4 -33 -40 16 -18 2 -26 0.8 0.8 49 22 -15 -49 1 -26 0.5 0.4 36 -19 -42 -45 1 -26 0.2 0.9 37 7 -10 -30 1 -26 0.6 0.9 -21 -46 26 -9 2 -26 0.1 0.9 -25 -41 25 -20 2 -26 0.5 0.3 -27 -44 27 -23 2 -26 0.3 0.7 45 25 0 -9 1 -26 0.6 0.7 10 -2 -15 -17 1 -26 0.1 0.8 -27 -50 -6 -18 2 -26 0.4 0.3 8 -30 -16 -28 1 -26 0.9 0.2 -33 -46 -16 -22 2 -26 0.8 0.9 -29 -35 24 -30 2 -26 0.5 0.3 -5 -47 -15 -27 1 -26 0.1 0.6 5 -31 -1 -7 2 -26 0.7 0.1 47 -24 22 -39 1 -26 0.6 0.4 21 19 39 -48 1 -26 0.1 0.6 26 11 0 -40 1 -26 0.4 0.5 20 -40 20 18 2 -26 0.9 0.7 -25 -46 23 -31 2 -26 0.9 0.9 47 -15 -23 -26 1 -26 0.3 0.6 -13 -45 50 -13 2 -26 0.2 0.1 -32 -33 -31 -32 1 -26 0.7 0.5 9 -26 50 24 2 -26 0.5 0.6 -20 -49 39 6 2 -26 0.6 0.1 22 -15 28 -21 1 -26 0.6 0.1 30 -43 30 -30 1 -26 0.2 0.9 -28 -44 3 -32 2 -26 0.7 0.6 12 -30 -11 -17 1 -26 0.4 0.9 21 -44 38 8 2 -26 0.2 0.7 4 -14 -19 -34 1 -26 0.2 0.3 9 -25 -41 -46 1 -26 0.7 0.5 41 -6 12 -25 1 -26 0.4 0.7 35 31 37 10 1 -26 0.4 0.6 19 12 -16 -43 1 -26 0.5 0.9 36 -14 45 24 2 -26 0.7 0.3 -38 -48 23 -48 2 -26 0.5 0.8 25 -37 22 -28 2 -26 0.4 0.1 -15 -24 8 -42 2 -26 0.1 0.1 35 1 16 -34 1 -26 0.4 0.2 42 13 -3 -5 1 -26 0.1 0.8 0 -10 32 14 2 -26 0.7 0.1 35 -7 -5 -23 1 -26 0.7 0.8 -21 -30 32 0 2 -26 0.7 0.8 11 2 17 -10 1 -26 0.8 0.7 -18 -20 9 2 2 -26 0.7 0.7 46 3 50 -17 1 -26 0.2 0.8 5 -27 50 -26 2 -26 0.3 0.6 41 -4 -7 -50 1 -26 0.8 0.6 30 -3 11 -50 1 -26 0.9 0.8 35 -21 6 -33 1 -26 0.2 0.1 -26 -49 45 -10 2 -26 0.4 0.3 8 -20 -4 -42 1 -26 0.8 0.7 24 0 41 -25 2 -26 0.1 0.6 26 -49 11 -25 2 -26 0.6 0.4 42 -37 -7 -35 1 -26 0.1 0.2 48 30 -26 -40 1 -26 0.5 0.3 36 -46 27 -34 1 -26 0.2 0.1 47 26 18 -10 1 -26 0.7 0.4 41 -50 25 -48 1 -26 0.8 0.5 -8 -25 23 -46 2 -26 0.8 0.7 17 -49 17 -32 2 -26 0.6 0.2 -18 -49 -33 -50 1 -26 0.8 0.9 33 -44 8 -9 1 -26 0.4 0.4 30 -39 -31 -35 1 -26 0.2 0.2 -1 -48 14 -45 2 -26 0.4 0.8 -22 -34 49 -10 2 -26 0.8 0.6 14 -17 5 3 1 -26 0.2 0.5 13 -21 45 -6 2 -26 0.3 0.7 41 -37 50 -16 2 -26 0.8 0.2 6 4 28 -25 1 -26 0.4 0.7 -34 -35 -8 -20 2 -26 0.6 0.8 49 40 40 -8 1 -26 0.8 0.7 19 -33 41 1 2 -26 0.5 0.3 45 27 25 -1 1 -26 0.8 0.3 34 21 29 14 2 -26 0.5 0.9 42 -12 11 -43 2 -26 0.9 0.6 4 -45 1 -3 2 -26 0.2 0.1 43 32 29 -14 1 -26 0.9 0.6 -7 -24 15 -39 2 -26 0.3 0.7 45 -22 -28 -31 1 -26 0.3 0.7 37 -20 17 3 2 -26 0.6 0.3 4 -45 -9 -24 1 -26 0.6 0.7 -4 -30 47 34 2 -26 0.6 0.3 -38 -49 44 27 2 -26 0.7 0.9 4 -19 25 -9 2 -26 0.5 0.2 -16 -37 25 -2 2 -26 0.8 0.3 33 -34 8 -26 1 -26 0.9 0.1 20 -9 21 -30 1 -26 0.6 0.6 30 -8 21 1 1 -26 0.1 0.6 45 -7 36 -50 2 -26 0.4 0.9 37 -32 11 -20 1 -26 0.4 0.6 -17 -18 -18 -45 1 -26 0.1 0.3 16 -26 41 -15 2 -26 0.8 0.8 -1 -48 30 26 2 -26 0.5 0.7 31 -43 11 -22 1 -26 0.9 0.2 46 45 -24 -26 1 -26 0.2 0.1 5 -44 23 -43 1 -26 0.1 0.9 -16 -47 8 -48 2 -26 0.7 0.2 38 29 -10 -21 1 -26 0.9 0.5 13 -47 25 17 2 -26 0.2 0.1 42 27 25 -26 1 -26 0.6 0.1 45 11 19 -7 1 -26 0.7 0.4 2 -39 -34 -37 1 -26 0.6 0.8 16 -18 4 -50 1 -26 0.3 0.5 18 -40 22 -14 2 -26 0.8 0.6 16 -19 29 7 2 -26 0.6 0.9 46 -5 31 -40 1 -26 0.2 0.8 16 2 37 13 2 -26 0.6 0.1 -1 -4 16 9 1 -26 0.9 0.2 35 1 47 -6 1 -26 0.3 0.1 -6 -42 0 -6 2 -26 0.4 0.1 14 -17 5 -43 1 -26 0.6 0.1 -20 -43 2 -47 1 -26 0.6 0.2 -29 -36 42 25 2 -26 0.8 0.7 17 1 25 10 2 -26 0.8 0.8 -33 -43 40 -45 2 -26 0.4 0.9 39 36 16 1 1 -26 0.5 0.8 -1 -46 36 22 2 -26 0.3 0.6 -12 -21 29 -41 2 -26 0.9 0.9 40 -42 -28 -41 1 -26 0.9 0.9 25 -41 31 -14 1 -26 0.8 0.3 1 -29 33 -33 1 -26 0.5 0.1 -4 -14 15 -29 2 -26 0.8 0.9 5 -24 0 -2 1 -26 0.2 0.1 -23 -46 36 25 2 -26 0.3 0.1 33 -8 26 -43 1 -26 0.7 0.6 50 27 -14 -46 1 -26 0.8 0.9 24 -38 -8 -18 1 -26 0.8 0.1 42 -43 3 -18 1 -26 0.3 0.5 26 16 7 -20 1 -26 0.7 0.2 35 32 14 -18 1 -26 0.8 0.9 -18 -48 12 10 2 -26 0.4 0.8 -11 -49 33 22 2 -26 0.3 0.5 -18 -31 -32 -49 1 -26 0.8 0.1 37 22 39 -48 1 -26 0.3 0.9 39 -14 12 2 2 -26 0.9 0.1 50 21 42 0 1 -26 0.1 0.5 42 -40 -3 -8 2 -26 0.1 0.7 41 -29 46 -14 2 -26 0.7 0.2 48 22 41 -26 1 -26 0.2 0.1 -37 -50 42 -28 2 -26 0.2 0.3 24 -32 7 -17 2 -26 0.3 0.9 35 25 20 19 2 -26 0.6 0.7 -23 -46 -38 -45 1 -26 0.7 0.9 16 8 -2 -11 1 -26 0.3 0.3 50 10 47 -5 1 -26 0.5 0.5 26 18 38 -34 1 -26 0.4 0.7 1 -11 41 -36 2 -26 0.4 0.2 -2 -11 49 16 2 -26 0.2 0.3 42 -25 50 -35 1 -26 0.2 0.4 8 -49 0 -26 2 -26 0.5 0.8 39 -29 -4 -30 1 -26 0.7 0.6 32 19 -39 -50 1 -26 0.3 0.8 10 -45 36 -13 2 -26 0.6 0.8 21 12 15 -2 1 -26 0.3 0.4 -15 -39 12 -16 2 -26 0.3 0.7 43 -18 -10 -41 1 -26 0.5 0.4 16 2 11 7 1 -26 0.4 0.2 16 -32 34 -12 2 -26 0.1 0.2 46 -15 -11 -40 1 -26 0.5 0.4 1 -28 21 -39 2 -26 0.1 0.9 6 -38 28 22 2 -26 0.5 0.3 43 9 1 -46 1 -26 0.5 0.4 0 -23 35 22 2 -26 0.3 0.2 -2 -9 27 -1 2 -26 0.9 0.9 -3 -44 25 -15 2 -26 0.1 0.8 -30 -47 -1 -13 2 -26 0.8 0.8 -13 -25 49 -30 2 -26 0.1 0.6 12 -14 46 7 2 -26 0.6 0.6 -35 -39 23 -4 2 -26 0.9 0.4 21 -4 -19 -21 1 -26 0.3 0.6 18 -11 -25 -36 1 -26 0.7 0.7 10 -50 7 -34 2 -26 0.1 0.9 17 -36 48 -18 2 -26 0.9 0.6 11 -18 -15 -34 1 -26 0.5 0.3 -26 -42 -10 -36 1 -27 0.1 0.6 -6 -24 -17 -23 2 -27 0.9 0.1 50 8 34 -19 1 -27 0.7 0.5 44 2 -36 -39 1 -27 0.9 0.4 19 -28 -35 -48 1 -27 0.5 0.8 50 -39 -27 -31 1 -27 0.2 0.9 -11 -23 12 9 2 -27 0.2 0.2 31 4 1 -43 1 -27 0.5 0.9 39 -17 18 -3 1 -27 0.5 0.1 45 -40 19 -25 1 -27 0.4 0.2 -19 -24 50 -12 2 -27 0.5 0.6 42 6 35 -5 1 -27 0.2 0.4 26 19 34 15 2 -27 0.7 0.4 44 43 4 -20 1 -27 0.8 0.6 48 -46 41 -18 1 -27 0.1 0.4 36 -22 21 -29 2 -27 0.1 0.5 -38 -48 45 -8 2 -27 0.7 0.4 5 -25 33 -6 2 -27 0.6 0.7 43 39 12 -28 1 -27 0.4 0.7 23 13 36 -47 1 -27 0.4 0.8 50 -35 22 -47 2 -27 0.3 0.3 -31 -45 42 8 2 -27 0.7 0.8 18 -28 8 -10 2 -27 0.9 0.6 16 -3 10 4 1 -27 0.4 0.4 40 -39 49 -15 2 -27 0.8 0.3 -20 -22 -13 -23 2 -27 0.2 0.8 3 -4 21 -34 2 -27 0.4 0.6 -33 -46 35 -19 2 -27 0.9 0.6 -40 -43 46 39 2 -27 0.7 0.4 26 -47 35 -40 1 -27 0.2 0.1 0 -27 5 -25 2 -27 0.1 0.8 50 -39 -8 -17 2 -27 0.6 0.7 18 1 -23 -28 1 -27 0.4 0.2 -12 -25 50 -21 2 -27 0.4 0.6 31 0 49 41 2 -27 0.5 0.2 41 -34 41 19 2 -27 0.7 0.6 47 17 43 34 2 -27 0.2 0.5 6 -49 6 -42 2 -27 0.1 0.8 -12 -19 31 -30 2 -27 0.3 0.4 46 -28 -18 -29 1 -27 0.7 0.8 10 -29 32 -38 2 -27 0.5 0.1 41 -34 21 5 2 -27 0.4 0.6 46 -22 46 19 2 -27 0.2 0.1 47 -8 46 -19 1 -27 0.2 0.8 -22 -48 41 39 2 -27 0.7 0.6 46 21 29 -21 1 -27 0.6 0.8 38 -1 49 43 2 -27 0.5 0.5 -9 -25 26 6 2 -27 0.6 0.1 -31 -50 27 20 2 -27 0.8 0.1 43 32 36 34 2 -27 0.5 0.8 24 -48 34 14 2 -27 0.7 0.4 49 36 37 -27 1 -27 0.2 0.4 -14 -38 -30 -44 1 -27 0.9 0.8 27 -11 17 -35 1 -27 0.1 0.9 -39 -47 -1 -50 2 -27 0.6 0.7 -20 -49 45 16 2 -27 0.3 0.6 9 2 -10 -35 1 -27 0.4 0.9 16 11 25 -16 2 -27 0.5 0.9 18 -34 -21 -24 1 -27 0.7 0.6 26 -49 42 -36 2 -27 0.8 0.2 41 -14 29 -50 1 -27 0.2 0.4 43 -2 42 21 2 -27 0.1 0.8 48 29 35 -14 1 -27 0.1 0.6 22 20 30 -18 1 -27 0.6 0.9 26 16 26 -33 1 -27 0.1 0.5 6 -7 48 -49 2 -27 0.6 0.1 3 -28 48 -2 2 -27 0.2 0.5 -21 -39 12 11 2 -27 0.5 0.3 49 -37 48 -41 1 -27 0.6 0.1 41 -2 5 -38 1 -27 0.8 0.4 25 11 29 -15 1 -27 0.3 0.1 36 -38 22 -32 1 -27 0.4 0.2 2 -28 -12 -34 1 -27 0.1 0.1 20 -32 23 -27 2 -27 0.8 0.9 13 7 15 -37 1 -27 0.2 0.1 -34 -38 2 -43 2 -27 0.8 0.4 -6 -12 32 -34 2 -27 0.2 0.8 6 -44 3 -47 2 -27 0.4 0.6 24 16 1 -23 1 -27 0.7 0.9 50 -50 46 39 2 -27 0.3 0.1 25 -32 49 -21 1 -27 0.5 0.6 7 3 28 -11 2 -27 0.1 0.7 26 -7 0 -13 1 -27 0.4 0.5 43 -11 -20 -48 1 -27 0.1 0.1 50 -22 16 -37 1 -27 0.3 0.8 31 -15 -22 -33 1 -27 0.4 0.2 31 14 29 -32 1 -27 0.9 0.1 48 -4 42 17 1 -27 0.4 0.4 15 -22 18 16 2 -27 0.5 0.6 23 -25 7 -44 1 -27 0.1 0.9 32 24 -23 -45 1 -27 0.3 0.2 10 4 -33 -34 1 -27 0.4 0.1 -10 -31 16 -42 1 -27 0.2 0.4 -4 -37 47 6 2 -27 0.9 0.8 18 16 45 -41 2 -27 0.2 0.6 1 -45 -8 -24 2 -27 0.4 0.7 29 -16 -9 -38 1 -27 0.6 0.8 31 -2 -42 -46 1 -27 0.9 0.4 -26 -50 42 1 2 -27 0.4 0.9 18 14 16 -44 1 -27 0.3 0.8 -7 -33 49 -36 2 -27 0.6 0.9 49 -33 37 30 2 -27 0.5 0.1 42 -36 8 -37 1 -27 0.2 0.9 -2 -21 23 -48 2 -27 0.5 0.1 14 -43 -3 -36 1 -27 0.6 0.2 27 -36 30 17 2 -27 0.7 0.9 1 -39 40 2 2 -27 0.4 0.8 -3 -43 -20 -35 2 -27 0.7 0.3 28 0 -25 -30 1 -27 0.4 0.2 49 -42 44 -21 1 -27 0.7 0.4 31 -36 44 26 2 -27 0.5 0.1 -20 -33 5 -29 2 -27 0.4 0.8 38 -21 30 29 2 -27 0.4 0.7 47 -46 32 -33 2 -27 0.1 0.1 26 13 11 -28 1 -27 0.2 0.5 26 -26 22 -33 2 -27 0.7 0.5 -5 -10 44 33 2 -27 0.7 0.7 47 -21 10 2 1 -27 0.3 0.7 -37 -49 18 -24 2 -27 0.5 0.3 39 28 -9 -23 1 -27 0.6 0.4 40 15 50 -33 1 -27 0.3 0.4 12 -32 43 25 2 -27 0.5 0.2 11 -20 5 -23 1 -27 0.2 0.1 40 5 -11 -20 1 -27 0.5 0.9 40 -7 27 11 2 -27 0.6 0.3 -17 -40 -12 -20 2 -27 0.1 0.1 42 -14 -18 -35 1 -27 0.7 0.2 48 33 -12 -41 1 -27 0.7 0.1 11 -8 -1 -50 1 -27 0.4 0.5 45 14 12 7 1 -27 0.4 0.5 0 -23 14 -38 2 -27 0.6 0.7 46 30 32 -48 1 -27 0.4 0.3 4 -31 26 -29 2 -27 0.3 0.1 8 -13 39 -13 2 -27 0.1 0.3 3 -35 -5 -22 2 -27 0.7 0.3 22 -6 18 -49 1 -27 0.2 0.7 16 -36 15 -40 2 -27 0.1 0.5 7 -48 13 -33 2 -27 0.3 0.9 -7 -37 27 -18 2 -27 0.3 0.7 -7 -32 -36 -48 1 -27 0.9 0.4 35 7 -27 -45 1 -27 0.5 0.9 23 -22 16 1 2 -27 0.3 0.1 36 -1 48 44 2 -27 0.1 0.4 43 6 -14 -36 1 -27 0.5 0.8 -4 -22 47 2 2 -27 0.5 0.7 21 -33 10 -35 1 -27 0.3 0.2 -8 -40 -19 -50 1 -27 0.4 0.6 47 -7 -19 -44 1 -27 0.5 0.1 -20 -45 15 -9 2 -27 0.7 0.5 47 -17 -5 -39 1 -27 0.8 0.1 9 -29 40 -3 2 -27 0.6 0.8 11 -42 -35 -46 1 -27 0.5 0.5 0 -15 -24 -44 1 -27 0.5 0.7 -2 -45 48 -38 2 -27 0.4 0.8 -10 -29 39 -23 2 -27 0.5 0.3 43 -15 -11 -31 1 -27 0.7 0.9 41 -24 47 -12 2 -27 0.9 0.9 -14 -46 16 -10 2 -27 0.4 0.1 -2 -14 15 -1 2 -27 0.1 0.9 3 -11 -15 -31 1 -27 0.2 0.3 8 1 18 -30 1 -27 0.8 0.4 31 16 14 -50 1 -27 0.8 0.7 40 -27 2 -9 1 -27 0.7 0.1 -21 -40 -1 -30 1 -27 0.3 0.5 50 14 33 -25 1 -27 0.9 0.6 25 20 -2 -42 1 -27 0.9 0.1 6 -49 49 41 2 -27 0.9 0.5 47 31 -37 -38 1 -27 0.4 0.8 45 -12 -23 -25 1 -27 0.8 0.9 45 -36 33 -29 2 -27 0.1 0.5 20 -44 48 -24 2 -27 0.9 0.7 35 -1 -6 -7 1 -27 0.2 0.6 -13 -42 16 -12 2 -27 0.1 0.7 35 22 12 0 1 -27 0.4 0.2 -29 -41 23 -13 2 -27 0.9 0.3 6 -42 46 7 2 -27 0.3 0.6 11 -5 29 -47 1 -27 0.3 0.9 16 -49 -19 -42 1 -27 0.6 0.2 39 21 33 -32 1 -27 0.8 0.6 -21 -37 30 -42 2 -27 0.6 0.9 12 11 9 -24 1 -27 0.7 0.6 -13 -49 5 -2 2 -27 0.9 0.5 -1 -49 1 -34 1 -27 0.9 0.5 29 -19 42 32 2 -27 0.3 0.3 0 -1 -7 -46 1 -27 0.5 0.7 29 -50 34 -3 2 -27 0.3 0.8 38 22 36 -32 1 -27 0.7 0.8 19 -24 32 26 2 -27 0.1 0.5 -42 -49 12 -19 2 -27 0.4 0.1 43 -3 -27 -32 1 -27 0.8 0.2 50 -6 3 -23 1 -27 0.6 0.8 24 -11 43 20 2 -27 0.8 0.9 33 -15 -6 -12 1 -27 0.6 0.8 42 -39 47 -24 2 -27 0.3 0.4 32 7 7 -18 1 -27 0.1 0.4 -7 -36 30 -14 2 -27 0.3 0.9 -3 -36 12 -38 2 -27 0.9 0.1 -1 -11 31 18 2 -27 0.1 0.9 11 -37 -9 -44 2 -27 0.1 0.8 22 20 24 -14 1 -27 0.5 0.2 -29 -30 17 -5 2 -27 0.5 0.5 -1 -16 37 17 2 -27 0.3 0.1 4 -31 39 32 2 -27 0.1 0.7 15 -49 36 -6 2 -27 0.9 0.4 43 -47 17 -11 1 -27 0.5 0.3 39 -5 41 -31 1 -27 0.2 0.4 16 14 14 -16 1 -27 0.1 0.4 31 20 23 -45 1 -27 0.7 0.8 0 -8 29 -38 2 -27 0.9 0.7 -5 -49 -10 -44 1 -27 0.4 0.9 38 -35 28 2 2 -27 0.7 0.7 1 -15 5 -47 1 -27 0.1 0.3 33 -13 17 14 2 -27 0.7 0.7 49 -14 18 10 1 -27 0.4 0.3 -23 -32 10 -47 2 -27 0.5 0.7 30 -29 14 -13 2 -27 0.1 0.5 -27 -43 6 -24 2 -27 0.8 0.5 47 34 7 5 1 -27 0.5 0.1 8 -32 -14 -28 1 -27 0.6 0.1 45 -28 30 -46 1 -27 0.5 0.5 46 -10 20 -17 1 -27 0.5 0.7 45 19 46 22 2 -27 0.1 0.2 26 21 34 -29 1 -27 0.2 0.3 -6 -20 3 -48 1 -27 0.8 0.2 39 -35 22 -49 1 -27 0.1 0.4 44 -25 7 -7 2 -27 0.4 0.9 25 6 9 -46 1 -27 0.8 0.4 -11 -13 36 -45 2 -27 0.1 0.7 -15 -19 21 20 2 -27 0.8 0.5 45 -34 7 -20 1 -27 0.1 0.9 -17 -43 21 -19 2 -27 0.2 0.4 -15 -41 46 32 2 -27 0.1 0.4 49 -37 -19 -46 1 -27 0.8 0.2 17 6 -10 -31 1 -27 0.6 0.8 40 -45 26 -49 2 -27 0.2 0.9 16 4 -11 -46 1 -27 0.7 0.1 -15 -47 19 -49 1 -27 0.2 0.7 35 -29 31 0 2 -27 0.7 0.6 3 -3 47 -14 2 -27 0.6 0.3 -4 -15 -40 -50 1 -27 0.1 0.7 48 14 35 -22 1 -27 0.3 0.8 22 -32 33 18 2 -27 0.2 0.5 29 -36 37 -45 2 -27 0.7 0.3 -25 -42 34 -39 2 -27 0.8 0.4 -20 -49 45 -15 2 -27 0.6 0.1 5 -27 38 -26 1 -27 0.2 0.6 30 26 38 -3 1 -27 0.9 0.8 18 9 -28 -40 1 -27 0.5 0.6 27 -25 12 11 2 -27 0.6 0.3 14 -42 46 -18 2 -27 0.2 0.2 25 13 30 -20 1 -27 0.8 0.2 -4 -25 -7 -47 1 -27 0.3 0.9 50 6 23 20 2 -27 0.3 0.2 7 -9 43 -28 1 -27 0.6 0.9 37 3 -18 -43 1 -27 0.6 0.1 42 -31 -17 -22 1 -27 0.6 0.2 46 20 21 -12 1 -27 0.6 0.6 38 -35 21 -25 1 -27 0.9 0.7 6 -9 43 27 2 -27 0.2 0.8 46 9 34 -41 2 -27 0.8 0.4 18 -42 19 -44 1 -27 0.7 0.9 13 -10 -4 -39 1 -27 0.9 0.7 28 4 31 -36 1 -27 0.4 0.1 27 -36 49 13 2 -27 0.1 0.2 18 8 -35 -45 1 -27 0.2 0.6 30 2 43 0 2 -27 0.5 0.3 -21 -50 -8 -40 2 -27 0.8 0.6 44 3 -36 -42 1 -27 0.2 0.6 2 -8 22 -47 2 -27 0.6 0.5 32 -44 4 -10 1 -27 0.7 0.2 17 -39 3 0 2 -27 0.7 0.3 11 -37 1 -31 1 -27 0.3 0.9 44 -6 39 14 2 -27 0.9 0.5 35 24 41 -4 1 -27 0.9 0.5 32 -41 35 -10 1 -27 0.6 0.1 31 10 28 -36 1 -27 0.2 0.5 -15 -43 -26 -29 2 -27 0.1 0.6 17 4 -30 -42 1 -27 0.1 0.2 -19 -45 -29 -30 2 -27 0.3 0.2 -31 -32 31 9 2 -27 0.2 0.2 5 4 6 -27 1 -27 0.8 0.5 -17 -21 50 -49 2 -27 0.4 0.7 24 -32 9 -41 2 -27 0.9 0.2 28 -10 20 -36 1 -27 0.9 0.7 -4 -10 37 29 2 -27 0.6 0.4 5 -30 -30 -32 1 -27 0.8 0.8 -7 -32 47 28 2 -27 0.2 0.7 23 4 47 -17 2 -27 0.2 0.7 -2 -33 7 -37 2 -27 0.4 0.7 43 -1 50 -13 2 -27 0.8 0.5 21 -8 0 -49 1 -27 0.6 0.6 11 -10 38 33 2 -27 0.1 0.9 18 10 5 -14 1 -27 0.5 0.2 33 5 40 -5 1 -27 0.8 0.7 21 -38 -24 -27 1 -27 0.9 0.5 33 -21 -21 -43 1 -27 0.6 0.1 39 -9 -28 -47 1 -27 0.3 0.7 28 -26 -26 -41 1 -27 0.9 0.8 -6 -9 37 -12 2 -27 0.6 0.5 36 -49 40 34 2 -27 0.5 0.8 -1 -2 -34 -46 1 -28 0.9 0.3 21 -45 41 32 2 -28 0.8 0.8 15 -46 50 -23 2 -28 0.2 0.6 25 -15 18 15 2 -28 0.3 0.3 -24 -42 16 -2 2 -28 0.5 0.9 31 -40 24 -3 2 -28 0.3 0.1 30 28 35 -37 1 -28 0.4 0.6 5 -34 48 -10 2 -28 0.1 0.3 -24 -34 27 11 2 -28 0.6 0.1 33 -32 17 -25 1 -28 0.9 0.8 41 -27 27 -45 1 -28 0.1 0.2 -29 -41 25 14 2 -28 0.1 0.3 34 -47 28 -15 2 -28 0.4 0.1 19 -39 -14 -34 1 -28 0.8 0.3 38 -9 45 -8 1 -28 0.4 0.6 30 -2 26 -49 1 -28 0.4 0.3 34 31 32 17 2 -28 0.2 0.4 47 -5 50 -25 2 -28 0.4 0.5 38 -34 42 -23 2 -28 0.1 0.4 26 -9 11 -31 1 -28 0.2 0.7 -17 -34 47 2 2 -28 0.4 0.4 -22 -26 33 -5 2 -28 0.2 0.8 25 -26 31 -30 2 -28 0.2 0.3 41 21 29 -47 1 -28 0.2 0.1 26 -32 7 1 2 -28 0.2 0.9 47 -6 11 -18 2 -28 0.1 0.1 39 10 4 -46 1 -28 0.2 0.6 -11 -38 -2 -48 2 -28 0.4 0.3 15 -2 34 12 2 -28 0.8 0.2 42 -27 33 29 2 -28 0.9 0.7 7 -27 50 10 2 -28 0.1 0.1 -26 -31 2 -38 2 -28 0.5 0.8 20 1 4 -29 1 -28 0.1 0.4 47 -18 16 -46 2 -28 0.9 0.8 13 -40 1 -34 1 -28 0.8 0.2 29 -15 -24 -39 1 -28 0.1 0.7 29 -35 29 19 2 -28 0.5 0.4 41 -39 34 -10 1 -28 0.1 0.5 44 -37 14 0 2 -28 0.7 0.6 38 25 29 -19 1 -28 0.3 0.1 -15 -38 46 9 2 -28 0.6 0.1 5 -29 44 -35 1 -28 0.3 0.5 46 -31 1 -24 1 -28 0.8 0.1 -42 -49 42 21 2 -28 0.9 0.8 7 -15 2 -32 1 -28 0.2 0.8 35 -35 7 -32 2 -28 0.3 0.9 7 -7 38 23 2 -28 0.1 0.2 -13 -14 33 -37 1 -28 0.6 0.7 23 -49 17 -48 1 -28 0.6 0.6 38 -4 48 -10 2 -28 0.5 0.3 16 -23 0 -26 1 -28 0.1 0.1 35 20 41 -2 1 -28 0.7 0.4 -31 -44 21 16 2 -28 0.1 0.7 25 -4 21 5 2 -28 0.6 0.3 1 -40 -34 -49 1 -28 0.5 0.1 21 13 0 -10 1 -28 0.6 0.1 -15 -20 38 36 2 -28 0.2 0.8 45 -23 22 -28 2 -28 0.6 0.8 48 -31 21 -28 1 -28 0.8 0.3 -29 -32 31 -26 2 -28 0.5 0.7 44 1 43 -36 1 -28 0.3 0.9 -19 -50 45 9 2 -28 0.5 0.9 3 -28 48 37 2 -28 0.5 0.9 16 -8 24 -29 2 -28 0.8 0.6 43 1 -28 -41 1 -28 0.1 0.1 8 -26 16 -42 1 -28 0.3 0.5 -2 -18 50 -17 2 -28 0.7 0.1 -20 -38 18 -13 2 -28 0.8 0.2 16 0 17 -36 1 -28 0.3 0.1 22 13 32 12 1 -28 0.1 0.9 47 30 9 -3 1 -28 0.6 0.6 0 -19 23 -18 2 -28 0.8 0.6 43 -26 23 11 1 -28 0.2 0.5 13 -27 47 -36 2 -28 0.1 0.4 5 -33 29 -41 2 -28 0.6 0.4 -7 -42 32 -49 2 -28 0.2 0.5 12 -5 4 -7 1 -28 0.1 0.8 -11 -28 -14 -47 2 -28 0.9 0.8 41 23 20 9 1 -28 0.4 0.6 32 19 35 -41 1 -28 0.5 0.3 26 -40 -15 -16 1 -28 0.7 0.6 32 12 6 -3 1 -28 0.8 0.4 -22 -41 -16 -43 2 -28 0.8 0.8 39 1 4 0 1 -28 0.2 0.7 17 -45 37 -12 2 -28 0.8 0.4 35 -18 42 3 2 -28 0.7 0.2 7 -7 14 -3 1 -28 0.3 0.2 40 19 31 -40 1 -28 0.5 0.4 45 21 21 20 1 -28 0.6 0.6 22 21 3 -38 1 -28 0.7 0.7 34 -17 20 9 1 -28 0.2 0.6 -15 -37 37 -12 2 -28 0.9 0.5 38 -9 50 4 1 -28 0.7 0.1 16 -19 5 -10 1 -28 0.8 0.2 -21 -39 7 -38 1 -28 0.6 0.8 -26 -33 47 8 2 -28 0.2 0.7 20 -38 -7 -22 2 -28 0.6 0.5 43 33 28 17 1 -28 0.6 0.2 40 -2 37 -47 1 -28 0.8 0.8 25 -5 -12 -22 1 -28 0.5 0.2 22 9 -29 -43 1 -28 0.9 0.7 49 23 17 -35 1 -28 0.7 0.2 45 38 29 -39 1 -28 0.1 0.2 20 -9 35 33 2 -28 0.7 0.2 42 -41 38 -14 1 -28 0.6 0.9 26 -37 17 -36 2 -28 0.6 0.2 38 22 35 -7 1 -28 0.5 0.3 19 8 22 6 1 -28 0.4 0.7 -16 -39 16 4 2 -28 0.6 0.7 48 1 19 2 1 -28 0.1 0.5 47 -30 46 3 2 -28 0.1 0.1 5 -23 16 -47 1 -28 0.3 0.8 46 -50 47 -11 2 -28 0.1 0.7 -16 -46 -35 -47 1 -28 0.6 0.4 -2 -12 4 -39 1 -28 0.6 0.8 -5 -10 43 -19 2 -28 0.3 0.8 43 36 48 26 1 -28 0.1 0.8 21 -3 48 38 2 -28 0.8 0.8 2 -28 38 37 2 -28 0.9 0.8 -35 -47 -3 -50 2 -28 0.5 0.7 44 18 26 -8 1 -28 0.1 0.2 41 -36 1 -20 2 -28 0.3 0.1 15 -1 34 11 2 -28 0.3 0.8 44 -22 42 21 2 -28 0.4 0.4 -30 -46 46 27 2 -28 0.3 0.2 -15 -44 31 0 2 -28 0.8 0.9 -12 -35 48 -26 2 -28 0.3 0.6 27 -8 -26 -44 1 -28 0.4 0.1 -31 -49 22 -44 2 -28 0.3 0.1 -10 -18 35 3 2 -28 0.6 0.8 10 -20 -17 -36 1 -28 0.1 0.5 -17 -46 12 -32 2 -28 0.6 0.9 5 -38 47 29 2 -28 0.5 0.2 24 -30 36 16 2 -28 0.8 0.9 -12 -46 1 -9 2 -28 0.1 0.2 6 -19 -11 -17 2 -28 0.7 0.1 -12 -46 49 -48 1 -28 0.1 0.6 11 9 20 19 2 -28 0.3 0.1 16 6 32 -13 1 -28 0.7 0.1 4 -12 40 33 2 -28 0.2 0.9 49 33 37 -34 2 -28 0.2 0.4 25 -29 27 -17 2 -28 0.7 0.8 42 -24 23 -48 1 -28 0.1 0.3 44 -48 1 -46 1 -28 0.3 0.8 10 7 -5 -26 1 -28 0.6 0.7 7 -13 46 9 2 -28 0.5 0.6 36 8 20 -12 1 -28 0.9 0.7 37 -42 36 -4 1 -28 0.6 0.9 33 9 -18 -47 1 -28 0.9 0.8 41 28 47 -44 1 -28 0.1 0.7 -5 -8 -14 -47 1 -28 0.5 0.4 11 3 18 -9 1 -28 0.3 0.2 49 19 47 45 2 -28 0.6 0.7 49 34 13 -33 1 -28 0.3 0.8 -37 -44 48 -45 2 -28 0.6 0.9 -23 -33 12 -33 2 -28 0.2 0.4 9 -4 -15 -24 1 -28 0.2 0.1 48 10 50 22 2 -28 0.2 0.9 36 18 47 -8 2 -28 0.2 0.7 29 -40 -29 -47 2 -28 0.2 0.6 38 36 29 -2 1 -28 0.6 0.8 9 -46 16 -38 2 -28 0.1 0.6 39 6 -3 -11 1 -28 0.9 0.4 7 -35 35 -47 1 -28 0.8 0.5 41 27 29 -43 1 -28 0.6 0.7 32 -16 47 -24 2 -28 0.5 0.8 11 -31 6 -46 2 -28 0.3 0.9 34 3 32 27 2 -28 0.1 0.7 -3 -37 -36 -43 2 -28 0.4 0.3 4 -27 -11 -34 1 -28 0.5 0.7 33 -21 12 -20 1 -28 0.2 0.6 9 -44 35 -8 2 -28 0.8 0.1 44 -17 -45 -47 1 -28 0.6 0.6 9 -46 3 -11 1 -28 0.1 0.2 15 -18 17 -5 2 -28 0.1 0.9 35 -46 32 8 2 -28 0.4 0.9 23 22 31 2 2 -28 0.4 0.7 34 20 -16 -29 1 -28 0.7 0.9 40 -14 -28 -41 1 -28 0.3 0.7 31 -4 39 24 2 -28 0.7 0.6 50 37 19 -23 1 -28 0.1 0.1 33 -43 45 -36 1 -28 0.5 0.8 -14 -30 8 -9 2 -28 0.3 0.2 3 -9 43 -28 1 -28 0.7 0.6 33 -16 36 -12 2 -28 0.1 0.7 -4 -35 1 -7 2 -28 0.4 0.5 -5 -48 -1 -2 2 -28 0.1 0.8 31 -8 42 -44 2 -28 0.9 0.6 44 -48 26 -10 1 -28 0.4 0.2 39 -9 47 46 2 -28 0.6 0.4 35 4 -3 -38 1 -28 0.1 0.1 -32 -42 -8 -44 2 -28 0.9 0.9 -2 -8 26 -9 2 -28 0.8 0.2 -43 -47 40 -6 2 -28 0.8 0.7 29 -4 -22 -35 1 -28 0.7 0.5 47 -46 28 -36 1 -28 0.8 0.5 -14 -35 17 -39 2 -28 0.4 0.9 30 20 41 31 2 -28 0.3 0.1 -30 -45 50 -32 2 -28 0.7 0.8 11 -40 -2 -8 1 -28 0.3 0.6 15 3 18 11 2 -28 0.3 0.8 -19 -34 29 -8 2 -28 0.2 0.5 43 8 38 5 2 -28 0.2 0.5 24 -8 3 -48 1 -28 0.4 0.2 5 -34 47 -43 2 -28 0.2 0.1 12 -17 9 -7 2 -28 0.6 0.9 6 -38 -4 -38 2 -28 0.9 0.2 28 -21 41 7 1 -28 0.3 0.4 -6 -20 -13 -34 1 -28 0.4 0.5 29 -14 19 -40 1 -28 0.8 0.7 23 19 17 15 1 -28 0.9 0.5 12 -36 49 23 2 -28 0.8 0.6 27 0 35 -46 1 -28 0.2 0.2 -8 -46 50 42 2 -28 0.4 0.8 47 27 -4 -16 1 -28 0.7 0.7 37 -10 32 30 2 -28 0.8 0.3 27 -11 2 -47 1 -28 0.5 0.6 -21 -35 29 4 2 -28 0.5 0.5 39 -11 34 -6 2 -28 0.3 0.9 5 -30 10 5 2 -28 0.1 0.9 20 -27 36 -29 2 -28 0.8 0.8 -38 -42 40 -41 2 -28 0.9 0.7 -31 -42 25 -48 2 -28 0.2 0.6 46 21 35 -48 1 -28 0.3 0.3 -8 -34 9 -7 2 -28 0.7 0.4 -15 -31 46 -19 2 -28 0.9 0.5 43 -6 32 15 1 -28 0.4 0.1 28 24 47 -33 1 -28 0.2 0.5 35 -37 -20 -23 1 -28 0.5 0.5 36 -44 1 -26 1 -28 0.7 0.1 13 -31 35 4 2 -28 0.8 0.1 -34 -40 18 -46 2 -28 0.9 0.3 15 -3 25 -36 1 -28 0.3 0.2 14 -31 29 -2 2 -28 0.2 0.1 49 45 -31 -50 1 -28 0.9 0.6 12 9 39 -20 1 -28 0.1 0.4 -18 -24 5 -25 2 -28 0.6 0.1 -36 -40 -2 -26 2 -28 0.1 0.4 18 -42 -4 -42 2 -28 0.9 0.9 24 -48 16 -13 1 -28 0.6 0.3 12 -46 24 -12 2 -28 0.5 0.2 7 -18 17 -13 1 -28 0.9 0.5 26 -27 2 -46 1 -28 0.6 0.4 37 -18 32 -31 1 -28 0.6 0.4 30 -17 12 -50 1 -28 0.9 0.9 7 -5 22 18 2 -28 0.7 0.2 25 -44 -34 -48 1 -28 0.5 0.4 40 -22 32 -39 1 -28 0.1 0.2 21 -13 45 -31 1 -28 0.2 0.7 -6 -34 7 -18 2 -28 0.2 0.8 35 -42 45 3 2 -28 0.6 0.4 8 -13 41 23 2 -28 0.7 0.1 17 -26 35 15 2 -28 0.8 0.8 -37 -45 1 0 2 -28 0.9 0.3 -18 -34 42 10 2 -28 0.4 0.1 49 -15 -12 -35 1 -28 0.4 0.4 44 -49 -31 -38 1 -28 0.8 0.9 -20 -37 43 42 2 -28 0.3 0.1 50 6 5 -14 1 -28 0.8 0.4 42 37 8 -23 1 -28 0.6 0.7 -18 -26 2 -26 2 -28 0.5 0.8 18 -26 31 -20 2 -28 0.3 0.4 -22 -50 28 -49 2 -28 0.2 0.8 36 -47 30 -2 2 -28 0.8 0.4 2 -9 11 -6 2 -28 0.4 0.5 9 -48 -5 -41 1 -28 0.8 0.8 -12 -18 8 -23 2 -28 0.8 0.7 0 -17 36 35 2 -28 0.3 0.6 25 21 49 -43 1 -28 0.3 0.3 24 -33 23 4 2 -28 0.6 0.3 30 -36 -25 -50 1 -28 0.6 0.8 -34 -43 -31 -32 2 -28 0.9 0.3 33 -25 46 11 1 -28 0.6 0.6 9 -2 39 29 2 -28 0.4 0.9 24 -41 -7 -20 1 -28 0.9 0.6 -13 -49 47 11 2 -28 0.6 0.3 -12 -35 25 -35 2 -28 0.4 0.4 -8 -47 26 -11 2 -28 0.2 0.6 28 15 12 -17 1 -28 0.4 0.9 -14 -50 -21 -28 1 -28 0.9 0.7 -9 -35 45 43 2 -28 0.1 0.6 -14 -16 44 2 2 -28 0.2 0.4 41 20 49 -25 1 -28 0.7 0.3 49 14 -37 -50 1 -28 0.3 0.4 -3 -4 45 -7 2 -28 0.5 0.6 24 20 -20 -21 1 -28 0.7 0.4 -14 -41 9 8 2 -28 0.7 0.3 16 -36 27 11 2 -28 0.1 0.5 -10 -36 40 -30 2 -28 0.9 0.6 32 15 -10 -13 1 -28 0.9 0.6 50 -40 -29 -32 1 -28 0.9 0.4 1 -50 42 34 2 -28 0.7 0.9 50 -16 -10 -50 1 -28 0.8 0.2 44 -27 33 7 1 -28 0.9 0.3 10 -19 34 24 2 -28 0.9 0.3 6 -15 -2 -30 1 -28 0.1 0.2 32 -20 28 19 2 -28 0.5 0.4 -14 -46 49 4 2 -28 0.1 0.9 24 13 -19 -35 1 -28 0.9 0.9 21 -41 39 -16 2 -28 0.6 0.5 42 21 40 -23 1 -29 0.1 0.8 20 -39 12 8 2 -29 0.8 0.9 35 28 -13 -31 1 -29 0.2 0.4 39 -19 18 14 2 -29 0.8 0.9 28 -22 -5 -40 1 -29 0.1 0.9 2 -5 36 27 2 -29 0.3 0.7 9 -26 45 4 2 -29 0.7 0.1 1 -44 49 -10 1 -29 0.5 0.6 16 -19 38 21 2 -29 0.2 0.8 0 -32 24 -45 2 -29 0.5 0.4 18 -1 43 15 2 -29 0.4 0.5 46 -27 38 29 2 -29 0.4 0.4 24 11 3 -48 1 -29 0.3 0.3 42 -16 48 -6 2 -29 0.7 0.3 -9 -28 5 -19 1 -29 0.1 0.8 -14 -31 -21 -45 2 -29 0.9 0.4 -7 -42 50 48 2 -29 0.8 0.9 2 -44 -18 -25 1 -29 0.5 0.2 26 -45 40 11 2 -29 0.7 0.7 16 -23 -3 -12 1 -29 0.2 0.7 43 -17 23 -19 2 -29 0.3 0.2 -12 -35 33 -7 2 -29 0.2 0.6 13 -15 15 -34 1 -29 0.8 0.1 -37 -44 31 16 2 -29 0.5 0.6 16 -9 15 -22 2 -29 0.7 0.3 4 -18 -32 -44 1 -29 0.3 0.4 2 -23 18 -6 2 -29 0.8 0.1 46 -36 29 3 1 -29 0.6 0.5 43 21 39 -46 1 -29 0.6 0.2 -5 -45 -5 -25 1 -29 0.4 0.1 11 -19 23 -41 1 -29 0.9 0.4 47 -5 47 1 1 -29 0.2 0.9 29 -31 20 9 2 -29 0.1 0.9 48 22 18 -16 1 -29 0.2 0.3 30 -14 -3 -18 1 -29 0.2 0.9 20 -45 15 -25 2 -29 0.1 0.2 48 -37 10 -7 1 -29 0.3 0.5 25 23 18 -45 1 -29 0.1 0.3 25 -34 19 -27 2 -29 0.5 0.5 13 -19 48 -40 2 -29 0.5 0.9 -16 -38 8 -40 2 -29 0.6 0.8 20 -35 47 -41 2 -29 0.5 0.5 -7 -14 48 30 2 -29 0.3 0.2 23 -30 19 9 2 -29 0.9 0.8 38 18 36 24 2 -29 0.4 0.3 34 -40 39 -32 2 -29 0.4 0.7 24 23 24 3 1 -29 0.1 0.4 11 -31 44 4 2 -29 0.2 0.3 14 -48 -4 -8 2 -29 0.3 0.2 42 -36 -42 -49 1 -29 0.2 0.8 30 18 20 -49 1 -29 0.6 0.5 25 -32 -28 -47 1 -29 0.9 0.1 45 32 -5 -30 1 -29 0.8 0.6 26 -20 32 5 1 -29 0.4 0.7 22 -22 -16 -49 1 -29 0.4 0.1 20 8 43 33 2 -29 0.9 0.3 26 8 44 28 2 -29 0.7 0.2 22 -40 44 -44 2 -29 0.3 0.4 31 -29 -15 -26 2 -29 0.5 0.5 45 36 48 -24 1 -29 0.5 0.6 34 3 50 -22 1 -29 0.6 0.6 48 36 28 -40 1 -29 0.7 0.7 33 -11 11 -26 1 -29 0.1 0.9 30 0 3 -32 2 -29 0.8 0.4 16 -46 19 -7 1 -29 0.8 0.7 -24 -36 41 -12 2 -29 0.8 0.5 25 -9 -5 -39 1 -29 0.9 0.2 21 -34 9 -28 1 -29 0.3 0.4 49 -35 27 12 2 -29 0.3 0.6 -14 -24 20 -45 2 -29 0.9 0.5 -20 -25 7 -8 2 -29 0.8 0.1 38 -23 24 13 1 -29 0.4 0.4 30 0 44 39 2 -29 0.7 0.8 24 -2 -31 -32 1 -29 0.3 0.3 -34 -35 22 -35 2 -29 0.1 0.4 21 5 16 12 2 -29 0.2 0.1 -21 -26 -10 -13 2 -29 0.2 0.2 -18 -45 24 -37 2 -29 0.5 0.6 32 23 14 -20 1 -29 0.2 0.9 -9 -29 -13 -36 1 -29 0.4 0.8 -15 -44 41 38 2 -29 0.2 0.9 33 -36 -34 -38 1 -29 0.7 0.5 38 6 -31 -45 1 -29 0.8 0.7 30 -40 47 -46 1 -29 0.9 0.8 -33 -36 46 -3 2 -29 0.9 0.6 10 -29 45 39 2 -29 0.1 0.3 50 10 42 34 2 -29 0.4 0.3 -16 -29 9 5 2 -29 0.2 0.5 -5 -26 -35 -45 1 -29 0.8 0.7 -32 -35 30 -44 2 -29 0.8 0.5 15 -37 38 3 2 -29 0.3 0.3 -11 -45 24 -33 2 -29 0.3 0.3 -13 -16 -6 -13 2 -29 0.4 0.1 47 -7 29 25 2 -29 0.8 0.3 21 -45 -13 -50 1 -29 0.6 0.1 -10 -47 32 12 2 -29 0.3 0.1 -5 -42 47 26 2 -29 0.4 0.2 13 -11 15 -38 1 -29 0.3 0.7 -10 -13 31 4 2 -29 0.4 0.3 6 -38 40 25 2 -29 0.7 0.9 -27 -36 -13 -46 1 -29 0.4 0.9 33 -1 -11 -41 1 -29 0.5 0.6 12 2 8 -9 2 -29 0.4 0.8 30 0 29 -18 2 -29 0.3 0.2 33 -41 -19 -48 1 -29 0.1 0.5 30 -2 4 -2 1 -29 0.3 0.9 32 26 45 13 2 -29 0.9 0.8 2 -37 41 -32 2 -29 0.7 0.3 28 4 47 -22 1 -29 0.2 0.2 -1 -8 30 2 2 -29 0.3 0.7 21 -46 -4 -16 1 -29 0.5 0.4 2 -21 32 16 2 -29 0.7 0.8 47 -42 41 -19 1 -29 0.4 0.3 -1 -24 45 12 2 -29 0.6 0.1 47 43 48 24 2 -29 0.7 0.4 37 18 -1 -50 1 -29 0.5 0.2 48 12 34 -19 1 -29 0.3 0.3 24 6 35 29 2 -29 0.6 0.7 44 32 -2 -32 1 -29 0.3 0.2 21 17 -4 -22 1 -29 0.7 0.2 42 16 -24 -33 1 -29 0.1 0.9 18 7 44 -26 2 -29 0.8 0.8 39 -40 42 -19 2 -29 0.4 0.6 -1 -31 37 -2 2 -29 0.8 0.9 27 7 7 -35 1 -29 0.5 0.6 47 -14 -1 -29 1 -29 0.9 0.8 20 -45 46 29 2 -29 0.6 0.2 -17 -35 45 -15 2 -29 0.6 0.1 -17 -30 27 -2 2 -29 0.2 0.7 47 42 -26 -34 1 -29 0.8 0.2 -12 -49 22 13 2 -29 0.6 0.6 8 -40 -12 -46 1 -29 0.9 0.1 -28 -50 -12 -13 1 -29 0.6 0.2 15 -9 29 -35 1 -29 0.6 0.7 -6 -36 43 0 2 -29 0.4 0.3 41 -7 -33 -38 1 -29 0.7 0.1 25 -10 -25 -30 1 -29 0.9 0.1 34 -26 38 -35 1 -29 0.9 0.2 16 -15 3 -37 1 -29 0.7 0.9 -5 -9 37 -26 2 -29 0.8 0.2 35 -28 36 -40 1 -29 0.5 0.1 28 -34 -21 -46 1 -29 0.5 0.1 8 -24 28 -15 1 -29 0.9 0.6 14 -15 31 12 2 -29 0.3 0.8 12 6 47 -20 2 -29 0.1 0.1 2 -19 32 -47 1 -29 0.5 0.5 30 -29 -33 -47 1 -29 0.1 0.9 48 -9 -17 -45 2 -29 0.4 0.5 21 -29 -31 -44 1 -29 0.2 0.5 1 -1 -38 -40 1 -29 0.8 0.4 11 -5 47 -12 1 -29 0.2 0.1 -14 -49 -3 -26 2 -29 0.3 0.8 29 -21 9 2 2 -29 0.9 0.8 34 -21 33 4 2 -29 0.3 0.1 28 16 17 -18 1 -29 0.5 0.7 37 -49 35 34 2 -29 0.7 0.7 23 -26 50 9 2 -29 0.5 0.2 34 32 10 -10 1 -29 0.2 0.3 37 -3 44 -43 2 -29 0.6 0.8 41 10 3 -2 1 -29 0.8 0.7 33 10 15 -19 1 -29 0.7 0.9 14 10 -18 -48 1 -29 0.4 0.3 49 18 -5 -24 1 -29 0.3 0.3 -47 -49 31 23 2 -29 0.3 0.3 40 -30 26 -7 2 -29 0.3 0.5 17 -36 38 36 2 -29 0.1 0.5 34 30 -14 -36 1 -29 0.4 0.2 35 -7 -14 -36 1 -29 0.1 0.4 -12 -23 4 -37 1 -29 0.7 0.3 0 -14 -11 -31 2 -29 0.8 0.8 23 -18 17 11 2 -29 0.1 0.5 27 -42 31 -11 2 -29 0.9 0.5 47 -8 -23 -32 1 -29 0.5 0.4 16 -31 25 -37 2 -29 0.3 0.4 29 -39 42 -25 2 -29 0.6 0.6 47 -1 32 -28 1 -29 0.3 0.8 48 40 24 -38 2 -29 0.7 0.7 -3 -27 48 28 2 -29 0.1 0.7 6 -43 -34 -44 2 -29 0.2 0.1 39 -50 48 -1 1 -29 0.9 0.5 48 -25 10 -49 1 -29 0.5 0.6 -6 -45 41 -20 2 -29 0.1 0.7 -19 -29 22 6 2 -29 0.4 0.6 17 -8 34 12 2 -29 0.1 0.3 -8 -15 -26 -34 1 -29 0.1 0.7 31 -4 -9 -16 1 -29 0.1 0.5 -3 -34 14 -35 2 -29 0.4 0.7 36 -21 -4 -30 1 -29 0.3 0.8 40 16 2 -5 2 -29 0.8 0.2 18 -27 12 4 1 -29 0.9 0.8 46 5 44 43 2 -29 0.6 0.5 39 -24 42 -15 2 -29 0.8 0.5 21 -47 -22 -29 1 -29 0.5 0.7 15 -31 -3 -31 1 -29 0.6 0.8 -20 -23 39 -35 2 -29 0.3 0.2 32 -11 26 24 2 -29 0.5 0.9 7 -17 17 2 2 -29 0.5 0.1 23 -30 -19 -41 1 -29 0.3 0.7 39 25 50 39 2 -29 0.2 0.5 12 -10 36 28 2 -29 0.6 0.9 30 -42 42 -14 2 -29 0.4 0.5 22 10 35 -27 1 -29 0.7 0.5 45 -2 -22 -43 1 -29 0.5 0.3 2 -48 26 10 2 -29 0.2 0.6 8 -1 44 -24 1 -29 0.7 0.5 -7 -36 32 -45 2 -29 0.3 0.2 -10 -44 42 -25 2 -29 0.2 0.3 46 -47 50 44 2 -29 0.4 0.3 31 -22 3 2 2 -29 0.7 0.5 -2 -50 5 -43 2 -29 0.6 0.4 29 22 41 38 1 -29 0.3 0.7 -11 -36 -21 -37 1 -29 0.3 0.2 42 0 23 -47 1 -29 0.5 0.6 6 -4 -21 -48 1 -29 0.9 0.7 29 -24 27 -1 1 -29 0.4 0.5 0 -26 45 -39 2 -29 0.7 0.2 29 -5 9 8 1 -29 0.9 0.4 -39 -44 12 -36 2 -29 0.2 0.5 -34 -39 20 -48 2 -29 0.8 0.3 29 -7 -7 -35 1 -29 0.9 0.6 18 1 39 27 2 -29 0.4 0.9 36 -16 -36 -43 1 -29 0.3 0.8 42 -34 16 -48 2 -29 0.4 0.8 -26 -44 -9 -31 2 -29 0.1 0.7 9 -43 -17 -18 2 -29 0.5 0.5 -25 -32 23 15 2 -29 0.1 0.6 46 -13 42 -46 2 -29 0.2 0.6 44 -6 26 8 2 -29 0.6 0.9 29 -39 15 -28 1 -29 0.8 0.6 4 -45 1 -36 2 -29 0.6 0.8 39 31 38 -32 1 -29 0.7 0.4 32 11 -20 -28 1 -29 0.8 0.8 -20 -37 9 -20 2 -29 0.4 0.7 -25 -26 40 10 2 -29 0.2 0.4 45 -15 1 -21 1 -29 0.4 0.8 39 -44 47 14 2 -29 0.6 0.6 49 -21 34 -34 1 -29 0.3 0.6 32 23 1 -5 1 -29 0.8 0.5 13 -1 5 -29 1 -29 0.5 0.7 24 -42 -9 -27 2 -29 0.4 0.9 16 -43 22 9 2 -29 0.9 0.9 49 4 50 -16 1 -29 0.5 0.9 -33 -39 -30 -47 2 -29 0.1 0.6 45 23 -4 -28 1 -29 0.6 0.7 27 -42 36 -39 2 -29 0.9 0.7 -4 -41 -13 -37 1 -29 0.8 0.9 19 -39 40 -3 2 -29 0.5 0.8 9 -29 50 9 2 -29 0.5 0.8 43 19 24 -40 1 -29 0.8 0.3 27 1 49 -17 1 -29 0.5 0.2 38 -2 41 -19 1 -29 0.7 0.1 23 5 48 40 2 -29 0.7 0.8 -15 -44 30 -24 2 -29 0.5 0.7 50 22 8 7 1 -29 0.5 0.5 43 25 -24 -50 1 -29 0.4 0.5 42 -15 20 -17 1 -29 0.9 0.1 41 30 -12 -28 1 -29 0.7 0.2 30 -6 30 -47 1 -29 0.3 0.6 -3 -14 -22 -34 2 -29 0.6 0.3 34 -47 19 -10 2 -29 0.5 0.5 44 -7 29 -17 1 -29 0.1 0.2 31 13 32 -6 1 -29 0.1 0.1 39 20 2 -34 1 -29 0.4 0.5 49 2 24 15 2 -29 0.2 0.8 -8 -39 37 5 2 -29 0.8 0.4 24 2 -28 -43 1 -29 0.8 0.5 41 6 -7 -50 1 -29 0.7 0.8 7 -44 40 -33 2 -29 0.9 0.1 28 3 9 5 1 -29 0.3 0.7 33 -7 27 -14 2 -29 0.5 0.8 2 -40 -4 -34 2 -29 0.4 0.9 41 29 -14 -23 1 -29 0.7 0.8 12 -38 -37 -43 1 -29 0.2 0.7 24 11 -27 -33 1 -29 0.8 0.6 40 19 27 -50 1 -29 0.8 0.3 -43 -50 46 -30 2 -29 0.7 0.4 42 4 -27 -46 1 -29 0.4 0.9 23 -38 47 29 2 -29 0.9 0.9 34 18 48 -35 2 -29 0.6 0.4 0 -10 28 5 2 -29 0.6 0.9 20 -39 50 32 2 -29 0.5 0.4 43 10 4 -12 2 -29 0.4 0.3 0 -32 25 -24 2 -29 0.1 0.4 28 23 -9 -50 1 -29 0.7 0.2 31 -37 27 -8 1 -29 0.7 0.6 -4 -17 0 -16 2 -29 0.5 0.7 11 -43 -6 -38 1 -29 0.6 0.7 34 -12 -8 -38 1 -29 0.3 0.4 -7 -20 -3 -37 1 -29 0.5 0.5 5 -7 9 -46 1 -29 0.4 0.4 48 -26 39 -31 1 -29 0.6 0.4 31 -1 42 30 2 -29 0.4 0.4 -40 -47 33 -41 1 -29 0.9 0.1 40 36 25 -3 1 -29 0.2 0.9 -13 -14 35 10 2 -29 0.3 0.6 23 -26 -27 -47 1 -29 0.4 0.9 37 -32 36 -10 2 -29 0.2 0.8 -2 -25 33 32 2 -29 0.6 0.6 49 30 -10 -20 1 -29 0.1 0.4 -18 -43 -26 -31 1 -29 0.4 0.6 -22 -49 35 -17 2 -30 0.2 0.6 43 13 47 33 2 -30 0.1 0.2 10 -5 14 -25 1 -30 0.1 0.5 26 12 33 -30 1 -30 0.8 0.9 30 -40 43 -8 2 -30 0.2 0.7 -7 -39 -1 -38 2 -30 0.4 0.8 -1 -14 0 -29 2 -30 0.5 0.9 47 23 10 -20 1 -30 0.5 0.1 45 28 14 -19 1 -30 0.9 0.4 45 -25 9 -3 1 -30 0.1 0.7 34 -45 15 -19 2 -30 0.3 0.9 32 22 26 8 1 -30 0.2 0.5 50 -6 22 21 2 -30 0.5 0.7 46 -47 24 -47 1 -30 0.1 0.8 -38 -42 13 -2 2 -30 0.5 0.8 41 -38 50 12 2 -30 0.4 0.9 -25 -30 39 24 2 -30 0.6 0.5 42 -15 -8 -36 1 -30 0.4 0.9 2 -13 8 1 2 -30 0.6 0.4 47 -13 -19 -37 1 -30 0.3 0.7 50 -46 43 29 2 -30 0.9 0.4 24 -28 -17 -26 1 -30 0.9 0.1 41 30 27 -27 1 -30 0.8 0.9 25 -18 3 -14 1 -30 0.9 0.4 45 -15 49 -9 1 -30 0.3 0.4 12 -40 36 -22 2 -30 0.6 0.3 49 2 -7 -11 1 -30 0.6 0.9 -1 -47 28 11 2 -30 0.6 0.4 -12 -27 7 0 2 -30 0.7 0.3 49 -24 45 34 2 -30 0.4 0.8 33 19 -30 -43 1 -30 0.2 0.1 26 -36 11 -33 1 -30 0.1 0.2 24 -22 50 -2 2 -30 0.7 0.3 42 24 -38 -46 1 -30 0.2 0.2 -8 -32 26 -37 2 -30 0.5 0.3 12 -28 29 -2 2 -30 0.8 0.7 40 -10 26 -46 1 -30 0.1 0.4 41 16 -7 -32 1 -30 0.5 0.4 36 27 44 31 2 -30 0.8 0.2 1 -32 32 -7 2 -30 0.9 0.9 -1 -24 44 33 2 -30 0.6 0.5 19 7 32 -16 1 -30 0.5 0.2 42 18 4 -33 1 -30 0.2 0.2 31 -48 50 -17 2 -30 0.7 0.3 47 -29 -23 -25 1 -30 0.7 0.4 50 12 44 -43 1 -30 0.1 0.2 17 -50 32 31 2 -30 0.4 0.3 34 30 33 28 1 -30 0.1 0.4 41 36 27 12 1 -30 0.7 0.9 39 -4 -31 -47 1 -30 0.7 0.7 28 -31 40 -46 2 -30 0.9 0.5 3 -38 10 -19 1 -30 0.1 0.3 0 -48 -15 -47 2 -30 0.6 0.9 50 5 32 7 1 -30 0.7 0.9 -12 -43 49 -17 2 -30 0.5 0.6 43 -16 -46 -49 1 -30 0.5 0.1 39 -17 49 37 2 -30 0.7 0.3 -7 -37 -1 -42 1 -30 0.3 0.4 10 9 -27 -46 1 -30 0.8 0.3 -6 -14 3 -3 2 -30 0.1 0.4 36 -47 34 -25 2 -30 0.8 0.1 -33 -43 18 12 2 -30 0.1 0.8 36 -49 47 10 2 -30 0.8 0.2 -22 -50 21 -9 2 -30 0.7 0.7 41 37 30 -31 1 -30 0.2 0.1 3 -30 16 -47 1 -30 0.7 0.7 26 -5 50 -32 2 -30 0.5 0.1 8 -20 18 7 2 -30 0.3 0.8 24 -36 -25 -38 1 -30 0.1 0.4 37 -40 30 -19 2 -30 0.7 0.9 -8 -40 44 34 2 -30 0.5 0.2 -11 -16 8 3 2 -30 0.6 0.3 -34 -36 -20 -22 2 -30 0.9 0.7 -2 -38 17 -40 2 -30 0.6 0.3 43 21 -42 -50 1 -30 0.8 0.1 45 -7 -13 -22 1 -30 0.8 0.9 21 -9 34 -11 2 -30 0.3 0.2 -7 -32 1 -34 1 -30 0.8 0.2 -4 -48 49 -11 2 -30 0.4 0.1 6 -30 32 -9 2 -30 0.6 0.7 21 19 -11 -14 1 -30 0.1 0.2 -6 -39 -2 -16 2 -30 0.1 0.8 -5 -26 9 -5 2 -30 0.2 0.1 -10 -24 24 -38 1 -30 0.8 0.3 18 3 48 -50 1 -30 0.1 0.5 -6 -38 29 17 2 -30 0.7 0.6 22 17 47 17 2 -30 0.2 0.6 15 -42 34 25 2 -30 0.4 0.1 -11 -40 25 24 2 -30 0.4 0.5 -16 -47 -4 -9 2 -30 0.6 0.2 12 8 18 -50 1 -30 0.5 0.9 -26 -30 50 -42 2 -30 0.8 0.3 40 -6 -1 -42 1 -30 0.1 0.7 36 -34 -7 -46 2 -30 0.1 0.3 24 11 -6 -25 1 -30 0.9 0.2 38 16 -33 -49 1 -30 0.4 0.1 -19 -33 48 45 2 -30 0.8 0.8 3 0 43 13 2 -30 0.5 0.4 39 25 45 1 1 -30 0.1 0.2 4 -7 44 5 2 -30 0.9 0.8 36 23 44 -8 2 -30 0.5 0.6 43 30 -20 -44 1 -30 0.4 0.1 26 -2 38 12 2 -30 0.2 0.4 -12 -35 41 -47 2 -30 0.3 0.9 37 19 36 -36 2 -30 0.7 0.4 41 -2 18 -20 1 -30 0.6 0.9 10 -28 -12 -50 1 -30 0.7 0.4 25 17 -16 -46 1 -30 0.9 0.6 -28 -37 2 -49 2 -30 0.3 0.9 22 7 38 -23 2 -30 0.8 0.9 39 28 48 -47 2 -30 0.7 0.9 26 0 -31 -50 1 -30 0.9 0.4 43 42 22 -36 1 -30 0.1 0.2 7 1 22 -25 1 -30 0.9 0.7 40 19 46 -28 1 -30 0.3 0.2 0 -4 -4 -34 1 -30 0.8 0.2 48 46 6 -12 1 -30 0.8 0.9 5 2 41 -27 2 -30 0.5 0.3 13 -5 25 -34 1 -30 0.4 0.8 26 7 47 -4 2 -30 0.9 0.4 -25 -33 39 -42 2 -30 0.7 0.3 15 -39 31 -28 1 -30 0.2 0.2 -33 -41 47 31 2 -30 0.7 0.4 46 -6 5 -1 1 -30 0.6 0.6 33 -44 -4 -31 1 -30 0.4 0.4 -35 -45 20 15 2 -30 0.1 0.4 40 2 -19 -25 1 -30 0.3 0.8 -2 -29 0 -40 2 -30 0.6 0.7 19 -17 -2 -20 1 -30 0.7 0.3 13 -31 -7 -42 1 -30 0.6 0.6 15 -24 18 -18 2 -30 0.9 0.4 23 -5 45 -7 1 -30 0.3 0.2 -40 -44 15 -34 2 -30 0.3 0.3 38 -41 26 -20 2 -30 0.4 0.3 26 -44 37 -2 2 -30 0.2 0.2 13 -41 37 -27 2 -30 0.1 0.1 -35 -40 50 -28 2 -30 0.4 0.5 -6 -25 26 -46 2 -30 0.1 0.6 20 -35 18 -10 2 -30 0.6 0.8 13 2 -7 -38 1 -30 0.1 0.4 26 24 5 -15 1 -30 0.5 0.2 -4 -7 30 -24 1 -30 0.6 0.2 18 -20 44 -7 2 -30 0.1 0.7 43 0 16 -17 1 -30 0.5 0.1 1 -38 26 -16 2 -30 0.8 0.7 -26 -45 43 19 2 -30 0.2 0.3 38 -42 25 8 2 -30 0.1 0.1 28 -46 44 -4 2 -30 0.2 0.3 -5 -48 18 -47 2 -30 0.4 0.8 4 -37 27 -22 2 -30 0.5 0.6 33 11 10 -18 1 -30 0.3 0.3 43 -44 0 -21 2 -30 0.9 0.9 36 26 30 -5 1 -30 0.4 0.2 48 -15 -10 -48 1 -30 0.3 0.7 18 14 33 -10 2 -30 0.5 0.4 34 20 1 -19 1 -30 0.1 0.4 48 46 41 14 1 -30 0.2 0.3 39 -22 31 -32 1 -30 0.6 0.5 33 -17 33 -14 1 -30 0.3 0.1 -4 -20 29 -16 2 -30 0.9 0.2 35 28 33 -6 1 -30 0.5 0.7 20 -43 31 -27 2 -30 0.4 0.1 -29 -47 -25 -30 2 -30 0.8 0.5 -9 -25 29 -33 2 -30 0.4 0.9 28 -42 43 6 2 -30 0.8 0.4 42 -6 8 -25 1 -30 0.3 0.4 28 -8 46 21 2 -30 0.5 0.7 40 -17 -28 -41 1 -30 0.9 0.9 39 -34 -6 -45 1 -30 0.5 0.6 19 10 41 35 2 -30 0.1 0.5 19 -23 48 19 2 -30 0.7 0.3 10 -38 -1 -9 1 -30 0.5 0.2 39 -40 2 -45 1 -30 0.4 0.3 42 41 46 34 1 -30 0.6 0.6 -9 -37 -32 -44 1 -30 0.5 0.8 10 -8 25 -44 2 -30 0.2 0.1 40 -24 31 15 2 -30 0.2 0.5 -17 -36 -3 -24 2 -30 0.4 0.3 -39 -50 30 16 2 -30 0.8 0.8 46 5 -3 -21 1 -30 0.5 0.3 -2 -4 29 -4 2 -30 0.6 0.6 23 -37 26 12 2 -30 0.3 0.8 -48 -49 25 -48 2 -30 0.5 0.7 4 -14 31 23 2 -30 0.7 0.3 9 -17 29 28 2 -30 0.4 0.5 44 -21 37 -20 2 -30 0.2 0.3 48 2 -33 -39 1 -30 0.4 0.9 34 24 5 -49 1 -30 0.5 0.7 22 12 8 -10 1 -30 0.8 0.5 21 -49 37 1 2 -30 0.4 0.5 11 -37 6 -31 1 -30 0.8 0.1 45 36 43 -36 1 -30 0.4 0.1 2 -28 30 29 2 -30 0.6 0.6 2 -33 32 -2 2 -30 0.8 0.1 -27 -41 8 -13 2 -30 0.5 0.6 36 -11 32 -41 1 -30 0.2 0.7 37 -48 41 38 2 -30 0.3 0.4 37 -31 -21 -50 1 -30 0.7 0.9 26 -2 12 -5 1 -30 0.7 0.6 25 -49 -17 -42 1 -30 0.8 0.4 13 -1 10 -35 1 -30 0.1 0.6 44 -36 -6 -31 2 -30 0.7 0.4 48 7 22 -40 1 -30 0.6 0.7 -7 -36 8 -29 2 -30 0.1 0.9 13 1 47 -41 2 -30 0.6 0.2 -40 -47 2 -9 2 -30 0.7 0.2 -27 -46 47 -31 2 -30 0.7 0.4 31 -38 45 39 2 -30 0.5 0.2 42 -38 28 19 2 -30 0.7 0.8 17 -2 50 13 2 -30 0.1 0.3 1 -36 46 44 2 -30 0.9 0.2 33 6 16 -22 1 -30 0.2 0.5 14 -3 6 -17 1 -30 0.4 0.3 -11 -43 40 16 2 -30 0.9 0.2 46 -1 -10 -45 1 -30 0.4 0.9 38 -40 7 -44 2 -30 0.8 0.4 35 -18 47 39 2 -30 0.6 0.4 -17 -26 26 10 2 -30 0.8 0.1 43 -24 -26 -34 1 -30 0.6 0.1 47 -50 -7 -16 1 -30 0.1 0.3 -13 -27 50 -37 2 -30 0.6 0.8 28 24 31 11 2 -30 0.2 0.5 30 28 -26 -43 1 -30 0.6 0.6 24 -20 -2 -42 1 -30 0.5 0.4 31 -38 6 -20 1 -30 0.4 0.1 22 -44 45 7 2 -30 0.9 0.9 43 -35 9 -35 1 -30 0.7 0.2 18 -9 19 8 2 -30 0.8 0.6 18 -6 -28 -36 1 -30 0.9 0.1 49 -4 41 15 1 -30 0.5 0.2 -16 -38 -17 -42 1 -30 0.9 0.7 46 -18 31 -46 1 -30 0.6 0.8 46 17 6 -15 1 -30 0.6 0.8 46 -21 -22 -50 1 -30 0.1 0.9 46 31 49 22 2 -30 0.7 0.5 4 -25 17 -30 1 -30 0.3 0.1 -18 -24 -15 -27 1 -30 0.9 0.7 11 -10 -6 -47 1 -30 0.6 0.6 42 -8 4 -8 1 -30 0.8 0.3 13 -3 45 -6 1 -30 0.8 0.7 18 -1 49 -46 2 -30 0.2 0.8 16 -40 44 -12 2 -30 0.1 0.3 -31 -34 28 -22 2 -30 0.3 0.8 24 -23 41 -39 2 -30 0.3 0.5 24 16 46 -9 1 -30 0.1 0.9 6 -48 39 22 2 -30 0.9 0.5 47 -12 -7 -35 1 -30 0.9 0.9 -41 -46 50 17 2 -30 0.3 0.1 -22 -30 26 18 2 -30 0.2 0.9 12 -24 40 21 2 -30 0.9 0.7 29 -22 35 3 1 -30 0.8 0.4 27 -25 13 -23 1 -30 0.8 0.7 -3 -22 35 1 2 -30 0.7 0.7 23 9 21 -1 1 -30 0.3 0.7 -7 -11 43 4 2 -30 0.4 0.3 -18 -48 38 32 2 -30 0.2 0.3 19 2 38 27 2 -30 0.4 0.6 25 -27 45 2 2 -30 0.6 0.2 17 -32 -21 -42 1 -30 0.8 0.6 2 -13 9 -45 1 -30 0.5 0.9 -6 -26 40 -16 2 -30 0.2 0.3 23 -47 29 12 2 -30 0.2 0.2 33 20 14 11 1 -30 0.9 0.3 -8 -28 5 -17 2 -30 0.2 0.6 48 27 41 17 1 -30 0.6 0.2 15 -49 -14 -36 1 -30 0.1 0.2 32 18 16 -37 1 -30 0.4 0.5 22 -28 41 32 2 -30 0.4 0.4 -12 -33 -33 -38 1 -30 0.8 0.5 27 -28 -38 -40 1 -30 0.7 0.7 16 -17 45 -31 2 -30 0.2 0.8 42 25 41 1 1 -30 0.1 0.3 35 -22 46 -40 2 -30 0.7 0.5 7 -3 41 -35 1 -30 0.6 0.8 -19 -37 42 -50 2 -30 0.5 0.9 13 -20 31 -42 2 -30 0.4 0.9 42 -28 33 4 2 -30 0.8 0.7 2 -32 42 -4 2 -30 0.1 0.6 -1 -44 7 0 2 -30 0.4 0.4 31 -23 7 -10 1 -30 0.1 0.7 24 10 -11 -48 1 -30 0.7 0.6 11 -14 10 -42 1 -30 0.1 0.3 -29 -39 5 -44 2 -30 0.4 0.4 37 2 27 26 2 -30 0.4 0.7 -18 -36 -17 -46 2 -30 0.6 0.4 32 -24 -25 -36 1 -30 0.4 0.6 12 -19 -31 -46 1 -30 0.8 0.8 28 8 46 -50 2 -30 0.3 0.5 37 1 14 7 1 -30 0.4 0.2 45 13 24 17 1 -30 0.2 0.2 18 -49 48 -35 2 -30 0.9 0.1 23 10 13 -11 1 -30 0.2 0.3 43 37 36 22 1 -30 0.5 0.5 9 -28 17 -24 2 -30 0.8 0.7 32 15 12 -49 1 -30 0.4 0.6 -2 -33 44 -15 2 -30 0.1 0.2 42 -46 29 17 2 -30 0.4 0.3 -38 -40 39 6 2 -30 0.3 0.7 29 12 36 34 2 -30 0.8 0.9 -30 -43 43 -8 2 -30 0.5 0.1 -4 -39 -10 -17 2 -31 0.8 0.1 35 -25 14 -1 1 -31 0.7 0.9 -2 -7 47 -42 2 -31 0.8 0.6 -20 -24 48 -38 2 -31 0.7 0.4 49 36 35 -14 1 -31 0.9 0.3 48 47 32 13 1 -31 0.1 0.3 38 -25 -23 -47 1 -31 0.8 0.9 24 -21 13 -16 1 -31 0.8 0.1 37 21 -31 -41 1 -31 0.6 0.1 10 -43 -10 -36 1 -31 0.9 0.4 19 1 26 -15 1 -31 0.1 0.9 43 17 13 -8 1 -31 0.3 0.8 22 -22 2 -3 2 -31 0.8 0.2 -5 -37 25 -23 1 -31 0.5 0.9 22 7 49 -13 2 -31 0.8 0.5 -12 -17 17 -6 2 -31 0.7 0.7 7 -18 48 37 2 -31 0.8 0.5 -33 -34 31 -37 2 -31 0.3 0.3 -17 -21 49 -19 2 -31 0.7 0.4 12 8 -7 -21 1 -31 0.3 0.3 24 -30 21 -49 1 -31 0.4 0.6 4 -5 23 -35 1 -31 0.2 0.6 33 18 29 21 1 -31 0.3 0.9 -8 -39 -23 -45 2 -31 0.6 0.5 9 -2 42 33 2 -31 0.6 0.1 7 -10 21 -35 1 -31 0.1 0.1 50 12 32 -28 1 -31 0.2 0.5 50 -24 5 -19 2 -31 0.4 0.5 19 -7 17 -42 2 -31 0.3 0.4 9 -38 2 -46 1 -31 0.6 0.9 45 -18 42 25 2 -31 0.5 0.1 1 -41 27 -48 1 -31 0.7 0.5 42 -15 19 -13 1 -31 0.4 0.1 5 -19 1 0 1 -31 0.9 0.6 -17 -23 41 -50 2 -31 0.5 0.1 -9 -38 18 5 2 -31 0.2 0.2 39 37 12 2 1 -31 0.6 0.2 -26 -34 10 -40 2 -31 0.5 0.2 -17 -46 15 14 2 -31 0.8 0.5 22 -29 31 14 2 -31 0.8 0.4 16 6 29 -5 1 -31 0.8 0.3 -11 -26 22 -19 2 -31 0.6 0.2 20 -41 26 -45 1 -31 0.3 0.9 13 3 34 10 2 -31 0.6 0.9 20 -34 7 -39 1 -31 0.9 0.8 49 21 42 -10 1 -31 0.2 0.7 29 -11 21 0 2 -31 0.3 0.2 40 -9 34 -26 2 -31 0.2 0.8 25 -24 16 -26 2 -31 0.7 0.8 -26 -40 26 -22 2 -31 0.8 0.9 -21 -38 12 -36 2 -31 0.4 0.5 39 28 46 -10 1 -31 0.1 0.8 38 27 10 1 1 -31 0.6 0.5 41 -7 10 3 1 -31 0.4 0.3 -1 -44 -25 -35 1 -31 0.2 0.1 32 -7 50 -14 1 -31 0.4 0.5 47 -38 22 -13 2 -31 0.3 0.5 30 12 -3 -33 1 -31 0.6 0.5 29 11 -7 -50 1 -31 0.4 0.6 19 -19 6 -46 1 -31 0.2 0.4 39 29 20 -33 1 -31 0.7 0.5 10 -36 -29 -34 1 -31 0.5 0.7 48 -19 18 -12 2 -31 0.2 0.4 22 -3 19 -44 1 -31 0.3 0.3 -8 -13 27 15 2 -31 0.7 0.1 6 -4 29 -1 1 -31 0.6 0.3 1 -8 25 -20 1 -31 0.2 0.2 -24 -27 15 -16 2 -31 0.5 0.8 27 10 34 16 2 -31 0.5 0.6 32 7 24 -13 2 -31 0.8 0.7 37 16 15 12 1 -31 0.4 0.4 33 -27 -11 -49 1 -31 0.3 0.9 43 3 -43 -50 1 -31 0.2 0.4 20 -34 46 -10 2 -31 0.5 0.3 22 -12 38 -39 1 -31 0.8 0.6 -18 -46 -22 -45 1 -31 0.1 0.8 46 -23 24 21 2 -31 0.3 0.4 9 -41 40 16 2 -31 0.2 0.6 16 -13 -13 -31 1 -31 0.9 0.8 12 -26 -13 -23 1 -31 0.6 0.9 31 -36 47 5 2 -31 0.4 0.5 14 -19 40 31 2 -31 0.4 0.9 7 -40 -1 -40 2 -31 0.8 0.7 38 17 -29 -34 1 -31 0.5 0.8 24 9 14 3 2 -31 0.2 0.3 43 2 41 -12 1 -31 0.5 0.2 43 12 50 -50 1 -31 0.4 0.2 46 -4 18 -47 1 -31 0.4 0.7 4 -38 -8 -38 2 -31 0.2 0.6 47 -24 38 7 2 -31 0.3 0.4 48 5 27 15 1 -31 0.3 0.8 41 -35 20 -41 2 -31 0.4 0.2 0 -44 5 -9 2 -31 0.9 0.1 37 -20 -1 -29 1 -31 0.3 0.9 48 -17 31 11 2 -31 0.8 0.8 47 42 31 -9 1 -31 0.1 0.6 50 49 -4 -33 1 -31 0.8 0.8 -36 -39 36 -2 2 -31 0.5 0.1 30 -16 -8 -19 1 -31 0.2 0.6 12 2 6 -38 1 -31 0.6 0.3 -13 -49 6 -47 1 -31 0.7 0.1 -20 -43 45 -28 2 -31 0.8 0.9 31 5 7 -31 1 -31 0.1 0.7 -1 -6 39 -26 2 -31 0.5 0.4 27 -5 35 -40 1 -31 0.7 0.1 36 28 49 -13 1 -31 0.9 0.1 24 -3 39 -23 1 -31 0.3 0.1 -2 -25 40 32 2 -31 0.9 0.7 47 25 34 -35 1 -31 0.7 0.2 6 1 21 -4 1 -31 0.2 0.6 -34 -41 12 4 2 -31 0.8 0.6 15 -38 19 -5 2 -31 0.5 0.9 17 -34 34 -8 2 -31 0.6 0.8 44 34 20 -30 1 -31 0.1 0.6 46 24 15 -13 1 -31 0.2 0.6 29 10 49 5 2 -31 0.1 0.7 18 -1 -5 -41 1 -31 0.1 0.9 48 42 -30 -36 1 -31 0.4 0.2 -1 -19 -37 -49 1 -31 0.9 0.4 48 -48 49 45 2 -31 0.6 0.6 -3 -47 0 -24 2 -31 0.7 0.2 -21 -47 23 9 2 -31 0.3 0.2 24 -7 44 -25 1 -31 0.3 0.6 -37 -39 33 -48 2 -31 0.8 0.8 -9 -36 -24 -46 1 -31 0.8 0.4 1 -47 -15 -49 1 -31 0.3 0.1 44 29 19 -23 1 -31 0.7 0.9 28 -21 22 0 2 -31 0.9 0.8 -31 -38 48 3 2 -31 0.6 0.3 47 -7 31 -42 1 -31 0.2 0.7 -4 -23 6 -46 2 -31 0.4 0.8 -4 -44 10 -4 2 -31 0.5 0.1 41 -41 42 -2 1 -31 0.8 0.3 -9 -48 23 -38 2 -31 0.5 0.1 44 -30 38 8 1 -31 0.2 0.8 23 -12 18 -12 2 -31 0.8 0.3 39 -37 16 -43 1 -31 0.7 0.9 41 -43 11 -23 1 -31 0.6 0.3 4 -11 -6 -50 1 -31 0.6 0.8 11 -10 9 -26 2 -31 0.9 0.8 9 0 14 -14 2 -31 0.3 0.6 2 -25 14 -36 2 -31 0.9 0.1 -38 -39 38 12 2 -31 0.7 0.1 35 -24 49 30 2 -31 0.4 0.2 18 -14 11 -27 1 -31 0.7 0.3 -24 -37 38 3 2 -31 0.9 0.1 3 -34 2 -18 1 -31 0.6 0.8 26 -26 23 -26 2 -31 0.6 0.1 8 -41 -5 -26 1 -31 0.1 0.9 39 -31 -20 -42 1 -31 0.9 0.3 40 10 -6 -40 1 -31 0.8 0.4 28 -21 49 -36 1 -31 0.2 0.2 24 -38 -6 -45 1 -31 0.5 0.9 43 -23 -7 -18 1 -31 0.8 0.8 -20 -25 38 -26 2 -31 0.8 0.1 10 -13 18 -50 1 -31 0.9 0.9 -42 -50 41 3 2 -31 0.6 0.8 -8 -44 16 -36 2 -31 0.5 0.2 43 40 16 11 1 -31 0.3 0.6 28 2 33 -47 1 -31 0.6 0.7 12 -34 50 -19 2 -31 0.8 0.5 23 -45 -1 -28 1 -31 0.5 0.2 17 3 0 -3 1 -31 0.2 0.4 15 -26 11 -13 2 -31 0.8 0.5 44 -24 28 -40 1 -31 0.8 0.4 16 -17 11 -26 1 -31 0.1 0.6 8 -21 35 2 2 -31 0.2 0.9 10 -38 49 -8 2 -31 0.2 0.8 14 -30 -17 -35 1 -31 0.6 0.8 41 -27 45 9 1 -31 0.7 0.8 0 -8 24 3 2 -31 0.1 0.8 11 9 -22 -49 1 -31 0.3 0.6 34 28 38 9 1 -31 0.5 0.9 -5 -50 8 -3 2 -31 0.3 0.7 -31 -37 42 -16 2 -31 0.8 0.7 8 -5 44 -24 2 -31 0.4 0.1 10 -31 23 2 2 -31 0.4 0.9 48 43 42 27 2 -31 0.6 0.9 38 -38 46 -23 2 -31 0.5 0.7 13 3 -32 -42 1 -31 0.5 0.4 45 -4 34 -37 1 -31 0.6 0.3 25 -11 -38 -45 1 -31 0.1 0.9 -16 -30 19 -9 2 -31 0.1 0.2 40 -1 36 0 2 -31 0.9 0.2 32 -8 47 27 2 -31 0.6 0.9 43 -22 35 -39 1 -31 0.6 0.5 25 17 -3 -41 1 -31 0.5 0.5 28 -24 50 -42 1 -31 0.1 0.7 12 -18 45 9 2 -31 0.4 0.8 -43 -48 44 -14 2 -31 0.6 0.6 29 -44 5 2 2 -31 0.8 0.9 -24 -48 36 -7 2 -31 0.8 0.3 3 -1 17 6 2 -31 0.5 0.2 -19 -49 42 -12 2 -31 0.5 0.3 48 -3 -33 -44 1 -31 0.3 0.4 20 -43 50 19 2 -31 0.4 0.5 13 5 35 -22 1 -31 0.9 0.4 33 6 -28 -46 1 -31 0.2 0.8 -15 -25 43 -31 2 -31 0.6 0.1 27 14 23 -46 1 -31 0.9 0.8 18 -40 20 -46 1 -31 0.9 0.7 28 -47 -10 -19 1 -31 0.6 0.3 4 -17 -38 -44 1 -31 0.2 0.2 -11 -38 -19 -32 1 -31 0.7 0.1 11 -31 11 -42 1 -31 0.6 0.6 16 -6 0 -14 1 -31 0.9 0.6 30 12 19 -4 1 -31 0.9 0.3 29 13 36 -9 1 -31 0.4 0.6 43 32 31 -12 1 -31 0.3 0.7 24 -6 19 -46 1 -31 0.6 0.7 -1 -18 33 12 2 -31 0.2 0.7 33 -32 -11 -17 2 -31 0.5 0.1 19 15 12 -37 1 -31 0.8 0.3 -1 -49 10 -20 1 -31 0.5 0.5 -2 -47 15 10 2 -31 0.9 0.7 43 18 49 12 2 -31 0.8 0.4 -5 -46 19 -8 2 -31 0.4 0.6 15 12 20 -2 1 -31 0.5 0.5 -18 -33 25 -14 2 -31 0.1 0.2 23 -17 -4 -35 1 -31 0.1 0.3 42 -23 2 -6 1 -31 0.8 0.9 46 18 30 6 2 -31 0.8 0.8 40 8 27 -3 1 -31 0.6 0.7 31 24 35 28 2 -31 0.3 0.7 31 -27 -6 -35 1 -31 0.8 0.2 -33 -44 16 -41 2 -31 0.1 0.9 16 -13 33 2 2 -31 0.8 0.7 23 19 30 -47 1 -31 0.6 0.6 18 -38 -5 -8 1 -31 0.2 0.5 4 -44 39 -15 2 -31 0.3 0.1 41 -42 -5 -35 1 -31 0.7 0.5 47 -36 28 4 1 -31 0.6 0.5 14 3 -4 -37 1 -31 0.1 0.1 39 7 42 15 2 -31 0.6 0.3 46 17 14 2 1 -31 0.2 0.3 47 -11 38 1 2 -31 0.1 0.8 45 -37 34 -13 2 -31 0.3 0.2 -18 -21 -7 -15 1 -31 0.8 0.4 1 -45 -13 -19 2 -31 0.4 0.2 5 -7 32 12 2 -31 0.3 0.9 21 12 14 -34 1 -31 0.3 0.2 4 -11 25 -34 2 -31 0.8 0.2 50 -35 -16 -38 1 -31 0.4 0.8 44 -9 46 -1 2 -31 0.4 0.5 24 -10 9 -27 2 -31 0.6 0.7 -26 -49 34 -14 2 -31 0.6 0.8 49 7 49 41 1 -31 0.4 0.1 13 -43 3 -17 2 -31 0.1 0.3 29 17 4 -3 1 -31 0.2 0.3 40 -30 36 16 2 -31 0.3 0.8 26 -7 4 -20 2 -31 0.3 0.1 25 -21 -14 -39 1 -31 0.5 0.1 -19 -45 43 -26 2 -31 0.8 0.4 -19 -34 -7 -44 1 -31 0.8 0.1 -43 -48 -34 -39 1 -31 0.2 0.1 12 7 -9 -32 1 -31 0.7 0.5 42 33 27 -33 1 -31 0.8 0.9 -30 -38 -3 -19 2 -31 0.5 0.8 -6 -40 20 18 2 -31 0.8 0.9 28 10 -14 -28 1 -31 0.8 0.6 37 -31 34 -12 1 -31 0.4 0.4 43 -47 16 3 2 -31 0.4 0.1 27 -47 8 -43 1 -31 0.2 0.6 13 -27 -16 -48 2 -31 0.5 0.1 16 -15 32 -35 1 -31 0.8 0.5 -24 -41 40 35 2 -31 0.2 0.5 32 12 38 -1 2 -31 0.8 0.5 -16 -45 46 -20 2 -31 0.3 0.4 -22 -23 42 15 2 -31 0.5 0.6 -7 -29 41 15 2 -31 0.3 0.9 -29 -49 4 -36 2 -31 0.5 0.7 48 12 45 44 1 -31 0.1 0.7 10 -44 31 -37 2 -31 0.3 0.9 36 -31 38 -40 2 -31 0.8 0.3 34 -15 11 -19 1 -31 0.8 0.2 47 -33 2 -23 1 -31 0.3 0.2 -32 -35 23 -31 2 -31 0.5 0.9 10 7 28 -13 2 -31 0.3 0.8 -9 -32 2 -43 2 -31 0.2 0.6 25 -40 -2 -38 1 -31 0.5 0.8 31 -13 27 -28 2 -31 0.7 0.9 -1 -26 49 16 2 -31 0.1 0.5 -44 -45 16 11 2 -31 0.2 0.3 19 -33 43 41 2 -31 0.5 0.5 21 6 38 -10 2 -31 0.4 0.4 7 -5 -13 -45 1 -31 0.8 0.8 36 15 25 16 1 -31 0.1 0.4 -32 -45 -13 -42 2 -31 0.2 0.1 21 17 37 36 2 -31 0.2 0.5 6 -37 47 34 2 -31 0.8 0.6 12 -16 36 -7 2 -31 0.4 0.5 -18 -27 -36 -46 1 -31 0.9 0.9 35 32 48 33 2 -31 0.1 0.7 27 -50 44 25 2 -31 0.9 0.2 -41 -49 29 5 2 -31 0.8 0.5 41 19 17 -18 1 -31 0.8 0.6 19 -40 -9 -18 1 -31 0.7 0.2 46 -49 32 -43 1 -31 0.1 0.2 11 -36 47 43 2 -31 0.2 0.6 -32 -47 13 -32 2 -31 0.4 0.2 43 -42 24 2 1 -32 0.4 0.6 -30 -39 32 -41 2 -32 0.3 0.8 12 8 45 -35 2 -32 0.8 0.1 13 -12 9 -48 1 -32 0.3 0.4 18 -18 8 -33 1 -32 0.2 0.7 46 8 -29 -35 1 -32 0.4 0.9 24 -18 7 -5 2 -32 0.8 0.2 -37 -39 4 -25 2 -32 0.7 0.6 -19 -48 29 22 2 -32 0.1 0.6 15 -27 46 -45 2 -32 0.8 0.1 26 -4 9 -22 1 -32 0.1 0.1 32 11 17 -20 1 -32 0.4 0.2 48 -47 12 -48 1 -32 0.1 0.6 43 -16 20 -30 2 -32 0.1 0.5 23 -5 7 3 2 -32 0.5 0.4 -12 -20 42 -15 2 -32 0.1 0.4 48 -30 35 15 2 -32 0.4 0.9 -2 -5 43 -25 2 -32 0.5 0.1 -15 -21 49 24 2 -32 0.7 0.5 21 -17 14 -4 1 -32 0.1 0.9 48 14 30 -31 1 -32 0.7 0.2 26 -31 4 -11 1 -32 0.5 0.6 -1 -20 31 14 2 -32 0.7 0.3 10 -9 23 22 2 -32 0.7 0.7 19 -45 -31 -48 1 -32 0.9 0.8 48 -27 49 30 2 -32 0.2 0.4 19 -2 17 -6 2 -32 0.1 0.1 25 18 28 17 1 -32 0.5 0.6 44 -12 47 -28 2 -32 0.7 0.2 50 -27 9 -49 1 -32 0.3 0.3 26 -15 -7 -16 1 -32 0.1 0.6 23 -45 -38 -47 1 -32 0.7 0.4 23 -35 44 -43 1 -32 0.1 0.2 -19 -40 34 17 2 -32 0.4 0.9 45 -30 42 -32 2 -32 0.9 0.8 30 -15 36 -2 2 -32 0.1 0.5 35 31 47 -50 1 -32 0.8 0.8 -13 -21 -17 -45 1 -32 0.3 0.3 23 -23 33 19 2 -32 0.9 0.7 47 45 15 -14 1 -32 0.6 0.6 -13 -25 -5 -16 2 -32 0.1 0.9 12 -41 23 -43 2 -32 0.4 0.7 25 -30 14 5 2 -32 0.2 0.7 35 16 8 -43 1 -32 0.6 0.8 38 -27 -5 -46 1 -32 0.8 0.6 37 -47 23 2 2 -32 0.5 0.2 18 -27 43 22 2 -32 0.7 0.5 29 18 30 18 2 -32 0.9 0.7 50 26 -27 -44 1 -32 0.3 0.6 40 29 44 2 1 -32 0.1 0.1 44 -39 26 8 2 -32 0.2 0.1 48 9 48 19 2 -32 0.1 0.5 -5 -37 39 36 2 -32 0.2 0.9 41 30 16 4 1 -32 0.1 0.9 -11 -17 32 -37 2 -32 0.9 0.8 -28 -29 49 14 2 -32 0.5 0.4 25 14 0 -16 1 -32 0.2 0.7 4 -30 -22 -24 2 -32 0.8 0.7 -14 -23 49 -33 2 -32 0.2 0.4 39 22 49 -49 1 -32 0.9 0.6 25 14 -19 -46 1 -32 0.4 0.1 -19 -33 47 42 2 -32 0.3 0.4 19 -17 33 32 2 -32 0.2 0.5 31 13 1 -33 1 -32 0.2 0.4 -31 -46 3 -34 2 -32 0.2 0.4 -4 -20 -20 -48 1 -32 0.8 0.4 30 8 14 -39 1 -32 0.9 0.2 37 22 7 4 1 -32 0.2 0.6 -26 -29 45 -40 2 -32 0.7 0.8 23 -23 46 -21 2 -32 0.2 0.1 0 -27 -24 -50 1 -32 0.9 0.8 41 -18 4 -50 1 -32 0.9 0.9 29 14 46 -42 2 -32 0.4 0.6 35 15 25 -39 1 -32 0.9 0.7 40 -4 -9 -46 1 -32 0.1 0.5 -30 -41 42 -29 2 -32 0.3 0.6 46 15 45 19 2 -32 0.6 0.1 -13 -34 13 -13 2 -32 0.3 0.4 34 -25 33 -31 2 -32 0.9 0.8 9 -16 36 30 2 -32 0.1 0.9 27 -23 7 -29 2 -32 0.4 0.3 50 47 34 23 2 -32 0.1 0.6 -18 -22 1 -47 2 -32 0.5 0.8 13 -35 -1 -15 2 -32 0.9 0.6 39 -33 -6 -44 1 -32 0.3 0.3 39 -36 42 5 2 -32 0.5 0.6 39 -48 45 -2 2 -32 0.3 0.1 -33 -48 45 -40 1 -32 0.6 0.7 23 -14 33 -36 2 -32 0.1 0.2 48 -41 31 14 2 -32 0.8 0.2 31 -21 50 -12 1 -32 0.3 0.4 23 -42 12 -14 2 -32 0.1 0.1 24 -47 13 -35 2 -32 0.4 0.6 48 -5 26 -39 1 -32 0.4 0.7 4 -6 -40 -42 1 -32 0.5 0.4 37 18 -1 -18 1 -32 0.6 0.1 28 -38 42 6 2 -32 0.8 0.2 33 -27 40 1 1 -32 0.9 0.8 37 -1 9 -42 1 -32 0.7 0.3 27 -34 31 -31 1 -32 0.1 0.7 20 -9 6 -48 2 -32 0.4 0.6 24 -27 -26 -42 1 -32 0.6 0.1 21 -19 45 36 2 -32 0.3 0.7 48 26 -27 -43 1 -32 0.4 0.7 16 13 4 -37 1 -32 0.5 0.1 50 40 27 -45 1 -32 0.8 0.7 42 -38 14 -20 1 -32 0.6 0.8 1 -44 -1 -42 2 -32 0.8 0.4 -16 -26 27 -35 1 -32 0.3 0.1 46 45 10 -11 1 -32 0.6 0.5 33 -50 39 -10 1 -32 0.7 0.8 29 -43 46 -3 2 -32 0.2 0.9 40 -18 -12 -48 1 -32 0.7 0.9 13 -46 49 20 2 -32 0.6 0.8 -25 -47 38 -24 2 -32 0.7 0.5 35 -10 22 -3 1 -32 0.7 0.1 33 6 -19 -23 1 -32 0.9 0.9 -12 -20 19 -17 2 -32 0.8 0.2 30 -32 21 -37 1 -32 0.7 0.6 12 -36 33 -39 2 -32 0.1 0.2 22 -26 -24 -32 1 -32 0.9 0.9 34 -28 -17 -45 1 -32 0.6 0.3 21 -20 23 -39 1 -32 0.6 0.8 6 -9 5 -37 2 -32 0.5 0.4 0 -2 -4 -6 1 -32 0.2 0.7 26 -31 28 10 2 -32 0.1 0.3 27 6 24 -32 1 -32 0.1 0.1 48 -42 -11 -46 1 -32 0.8 0.8 48 -37 -11 -13 1 -32 0.8 0.7 -19 -20 1 -21 1 -32 0.9 0.4 14 -11 36 25 2 -32 0.2 0.5 34 4 8 -4 1 -32 0.5 0.8 32 -1 6 -38 1 -32 0.3 0.9 31 25 41 -20 2 -32 0.1 0.1 4 -34 46 -50 2 -32 0.3 0.5 14 -11 8 -24 2 -32 0.5 0.7 10 4 48 -38 2 -32 0.8 0.7 35 -13 21 8 1 -32 0.1 0.2 11 -22 37 -27 2 -32 0.6 0.1 8 -40 6 -32 1 -32 0.6 0.9 21 -24 39 -19 2 -32 0.3 0.5 22 -33 41 -19 2 -32 0.8 0.7 21 -40 50 -9 2 -32 0.5 0.3 -7 -41 14 -4 2 -32 0.4 0.7 20 -42 -32 -39 1 -32 0.2 0.3 41 29 -12 -26 1 -32 0.6 0.6 30 -25 47 -17 2 -32 0.3 0.6 41 39 15 -15 1 -32 0.9 0.8 -1 -23 30 10 2 -32 0.2 0.7 29 -40 34 -12 2 -32 0.1 0.1 7 -33 34 -16 2 -32 0.1 0.3 15 -22 7 -14 2 -32 0.9 0.2 38 -16 -17 -31 1 -32 0.8 0.7 29 -7 30 -6 1 -32 0.8 0.4 19 6 18 4 1 -32 0.7 0.3 -4 -38 22 -28 2 -32 0.1 0.3 49 7 23 -39 1 -32 0.2 0.7 31 1 -21 -44 1 -32 0.4 0.9 48 10 38 19 2 -32 0.3 0.7 33 30 -4 -50 1 -32 0.3 0.5 42 -5 -22 -31 1 -32 0.6 0.7 50 -17 -38 -45 1 -32 0.7 0.3 27 -47 40 36 2 -32 0.4 0.4 28 -15 30 -41 1 -32 0.7 0.9 3 2 12 -15 2 -32 0.3 0.8 17 -35 -2 -40 2 -32 0.8 0.3 44 -23 45 3 1 -32 0.6 0.9 40 11 44 43 2 -32 0.9 0.9 31 28 45 3 2 -32 0.5 0.5 -14 -48 12 -16 2 -32 0.6 0.7 18 4 13 5 1 -32 0.8 0.7 41 18 28 -32 1 -32 0.3 0.6 -8 -28 0 -17 2 -32 0.9 0.6 48 -26 20 -26 1 -32 0.6 0.9 21 -16 16 -27 1 -32 0.5 0.9 26 -29 40 39 2 -32 0.3 0.9 36 -44 12 -12 2 -32 0.2 0.4 40 -1 19 10 2 -32 0.5 0.7 45 -38 44 -21 2 -32 0.5 0.4 39 -10 -3 -38 1 -32 0.5 0.6 -16 -29 29 -27 2 -32 0.4 0.3 47 -11 19 -8 1 -32 0.6 0.2 18 -29 7 -26 1 -32 0.5 0.3 36 -19 7 -17 1 -32 0.3 0.5 34 26 -28 -29 1 -32 0.6 0.8 20 -36 40 25 2 -32 0.8 0.8 -27 -37 24 17 2 -32 0.2 0.5 40 3 50 22 2 -32 0.1 0.2 24 -39 -39 -41 1 -32 0.8 0.6 -6 -40 14 3 2 -32 0.1 0.8 32 11 40 8 2 -32 0.6 0.5 9 -20 47 -4 2 -32 0.5 0.6 44 -47 -30 -42 1 -32 0.8 0.5 -21 -35 1 -23 2 -32 0.4 0.9 1 -44 3 -15 2 -32 0.5 0.5 -7 -30 10 -42 2 -32 0.6 0.6 -2 -30 32 -29 2 -32 0.3 0.1 -8 -40 17 -9 2 -32 0.4 0.2 13 -21 22 -10 1 -32 0.7 0.7 -1 -48 -39 -42 2 -32 0.3 0.5 8 -14 35 -4 2 -32 0.2 0.2 25 17 12 -17 1 -32 0.8 0.2 41 31 -4 -49 1 -32 0.7 0.1 3 -3 31 -2 2 -32 0.1 0.1 49 -19 45 39 2 -32 0.2 0.4 -1 -45 -15 -38 2 -32 0.9 0.6 -27 -45 -27 -38 2 -32 0.7 0.4 20 -45 -17 -29 1 -32 0.7 0.5 49 -36 39 18 2 -32 0.9 0.5 45 16 33 -36 1 -32 0.3 0.7 47 41 25 -30 1 -32 0.5 0.8 -13 -43 4 -29 2 -32 0.1 0.3 -8 -11 -25 -42 1 -32 0.4 0.2 36 -32 -16 -20 1 -32 0.7 0.5 -31 -40 40 1 2 -32 0.8 0.8 26 -9 -12 -22 1 -32 0.2 0.6 28 -46 -25 -29 1 -32 0.8 0.2 36 1 -6 -34 1 -32 0.6 0.9 42 -39 48 -19 2 -32 0.8 0.3 -10 -49 2 -43 2 -32 0.8 0.2 16 -30 12 -24 1 -32 0.9 0.8 0 -2 -16 -43 1 -32 0.1 0.5 3 -21 -43 -49 1 -32 0.7 0.5 36 26 47 -50 1 -32 0.1 0.1 26 -29 -20 -24 1 -32 0.7 0.3 27 -44 12 1 1 -32 0.7 0.9 -8 -45 36 -43 2 -32 0.1 0.6 25 2 -7 -26 1 -32 0.8 0.1 46 -9 27 -35 1 -32 0.9 0.4 -1 -4 -15 -37 2 -32 0.2 0.6 22 -31 -13 -49 1 -32 0.5 0.1 42 -38 -26 -30 1 -32 0.7 0.3 48 9 1 -25 1 -32 0.6 0.4 22 4 28 -4 1 -32 0.6 0.2 23 15 2 -28 1 -32 0.5 0.1 19 9 46 16 1 -32 0.1 0.1 7 1 37 -37 1 -32 0.3 0.4 39 -13 25 4 2 -32 0.2 0.3 35 26 6 -21 1 -32 0.8 0.8 38 -32 42 -30 2 -32 0.9 0.3 26 -16 -28 -43 1 -32 0.1 0.3 22 -10 -39 -43 1 -32 0.9 0.4 -4 -26 39 34 2 -32 0.4 0.8 18 -41 25 -46 2 -32 0.7 0.2 -4 -15 50 -46 1 -32 0.6 0.1 -34 -36 30 -23 2 -32 0.5 0.1 32 -1 7 -35 1 -32 0.7 0.7 8 -37 42 -2 2 -32 0.9 0.7 47 5 19 12 1 -32 0.7 0.8 42 7 10 -34 1 -32 0.9 0.1 34 23 -6 -26 1 -32 0.2 0.5 28 -48 8 -42 2 -32 0.9 0.3 7 -6 33 -33 1 -32 0.3 0.3 -16 -48 42 -17 2 -32 0.3 0.1 37 25 5 -42 1 -32 0.8 0.1 46 39 39 13 1 -32 0.4 0.5 -7 -48 36 -7 2 -32 0.8 0.1 -8 -39 37 13 2 -32 0.1 0.7 -17 -40 45 -23 2 -32 0.1 0.4 37 -8 21 -35 2 -32 0.9 0.3 13 -10 34 -14 2 -32 0.1 0.9 25 10 34 11 1 -32 0.3 0.8 -21 -33 31 -11 2 -32 0.5 0.7 23 4 49 37 2 -32 0.8 0.2 1 -9 25 12 2 -32 0.1 0.1 -1 -50 -26 -37 2 -32 0.2 0.7 5 -21 -4 -34 2 -32 0.6 0.8 -19 -35 23 -6 2 -32 0.1 0.4 45 40 18 -30 1 -32 0.3 0.2 18 -39 4 -1 2 -32 0.9 0.9 23 -11 44 1 2 -32 0.5 0.9 21 -17 10 -41 2 -32 0.3 0.6 34 -47 44 -26 2 -32 0.4 0.1 13 -24 38 29 2 -32 0.1 0.2 42 -1 -37 -49 1 -32 0.9 0.4 27 -8 39 -23 1 -32 0.4 0.5 22 18 13 -12 1 -32 0.5 0.9 37 5 2 -32 1 -32 0.2 0.7 40 19 4 -42 1 -32 0.4 0.9 34 -21 -22 -33 1 -32 0.4 0.3 11 -12 46 38 2 -32 0.3 0.7 39 -11 23 -49 2 -32 0.1 0.2 29 1 14 5 1 -32 0.5 0.9 18 8 27 -48 2 -32 0.9 0.8 25 8 27 4 1 -32 0.2 0.5 28 -4 37 8 2 -32 0.7 0.8 36 10 16 -28 1 -32 0.6 0.1 13 -44 46 38 2 -32 0.8 0.6 -11 -39 12 -24 2 -32 0.7 0.5 10 7 26 18 2 -32 0.7 0.9 -8 -13 23 -23 2 -32 0.1 0.8 23 -2 2 -33 1 -32 0.6 0.4 38 -13 -4 -11 1 -32 0.3 0.4 43 -34 25 -49 2 -32 0.8 0.2 9 -17 -1 -46 1 -32 0.6 0.4 34 -47 12 -15 2 -32 0.1 0.8 -33 -46 -1 -13 2 -32 0.6 0.8 -9 -29 45 -7 2 -32 0.9 0.5 37 -49 42 -18 1 -32 0.9 0.2 40 -32 33 3 1 -32 0.2 0.9 13 -43 5 -35 2 -33 0.8 0.3 3 -33 0 -34 1 -33 0.6 0.4 -12 -42 -8 -18 2 -33 0.6 0.5 13 -40 13 -26 1 -33 0.2 0.4 -20 -35 -7 -14 2 -33 0.7 0.9 32 -11 42 38 2 -33 0.2 0.1 -5 -33 13 -45 1 -33 0.6 0.3 28 -48 -46 -50 1 -33 0.1 0.5 26 -2 48 41 2 -33 0.2 0.9 33 -43 32 -34 2 -33 0.3 0.9 50 -4 41 -7 2 -33 0.7 0.4 -12 -29 0 -22 2 -33 0.7 0.7 38 34 1 -47 1 -33 0.4 0.5 27 -15 21 -11 2 -33 0.5 0.6 12 -39 -3 -11 2 -33 0.3 0.9 36 -34 41 24 2 -33 0.1 0.6 31 -2 21 9 2 -33 0.9 0.3 39 16 -12 -28 1 -33 0.6 0.4 40 -34 8 -16 1 -33 0.5 0.8 16 -37 3 0 2 -33 0.6 0.9 -16 -24 11 -20 2 -33 0.7 0.2 20 -14 21 -31 1 -33 0.2 0.6 -2 -49 -22 -49 2 -33 0.5 0.8 34 31 -2 -22 1 -33 0.4 0.9 32 25 15 -34 1 -33 0.9 0.7 14 -10 37 -23 2 -33 0.1 0.7 14 -24 -31 -43 1 -33 0.9 0.1 -5 -40 39 18 2 -33 0.8 0.4 31 2 -20 -45 1 -33 0.2 0.1 36 -6 -5 -26 1 -33 0.4 0.9 16 -48 -14 -36 1 -33 0.5 0.7 47 -17 -5 -10 1 -33 0.7 0.7 49 -40 -31 -44 1 -33 0.1 0.9 20 -28 22 -15 2 -33 0.3 0.2 -15 -42 33 -17 2 -33 0.3 0.1 22 -26 -8 -24 1 -33 0.4 0.3 42 -19 45 44 2 -33 0.9 0.7 42 -21 40 2 1 -33 0.8 0.1 9 -3 -6 -8 1 -33 0.9 0.2 38 10 -9 -18 1 -33 0.8 0.3 42 27 -13 -49 1 -33 0.6 0.2 43 -28 23 3 1 -33 0.9 0.2 -24 -31 35 -35 2 -33 0.9 0.6 -12 -49 4 -48 2 -33 0.6 0.1 28 1 19 -48 1 -33 0.8 0.2 6 -24 50 -11 2 -33 0.1 0.9 36 28 49 23 2 -33 0.4 0.2 49 -2 2 -44 1 -33 0.4 0.1 40 -14 45 28 2 -33 0.7 0.9 32 -2 45 2 2 -33 0.6 0.1 34 -39 49 32 2 -33 0.5 0.5 -29 -42 -23 -46 1 -33 0.5 0.9 7 -8 3 -3 2 -33 0.4 0.9 29 -33 43 -27 2 -33 0.5 0.7 17 -46 27 -1 2 -33 0.6 0.6 47 -17 -25 -34 1 -33 0.4 0.6 41 1 46 -42 1 -33 0.4 0.3 17 -23 27 -49 1 -33 0.3 0.8 11 -21 29 -10 2 -33 0.9 0.9 43 -48 1 -4 1 -33 0.5 0.6 -27 -41 48 43 2 -33 0.6 0.4 26 -37 -23 -31 1 -33 0.7 0.6 38 0 -16 -31 1 -33 0.9 0.8 32 -48 20 -46 1 -33 0.3 0.2 40 -48 6 -6 2 -33 0.3 0.7 -5 -34 42 31 2 -33 0.7 0.4 25 -21 19 11 2 -33 0.9 0.9 38 32 21 -3 1 -33 0.8 0.6 40 -27 29 13 2 -33 0.8 0.2 43 -19 44 -32 1 -33 0.6 0.8 5 -23 18 7 2 -33 0.4 0.5 -25 -32 33 -38 2 -33 0.7 0.4 25 -31 20 -36 1 -33 0.4 0.9 29 -25 41 3 2 -33 0.9 0.7 -20 -34 46 29 2 -33 0.4 0.4 44 -30 34 22 2 -33 0.8 0.2 32 -49 19 14 2 -33 0.9 0.5 -2 -32 -4 -44 1 -33 0.6 0.8 19 -40 34 0 2 -33 0.4 0.3 -4 -10 -5 -36 1 -33 0.2 0.2 43 5 18 12 1 -33 0.8 0.3 17 -8 13 -1 1 -33 0.9 0.4 12 -22 34 -29 1 -33 0.5 0.4 5 -13 37 -36 2 -33 0.3 0.3 44 34 32 -49 1 -33 0.7 0.9 9 7 30 -42 2 -33 0.7 0.6 37 -42 37 -38 1 -33 0.3 0.4 35 -15 41 -41 1 -33 0.7 0.8 50 -24 39 -23 1 -33 0.6 0.6 38 -23 -1 -3 1 -33 0.8 0.2 -27 -30 -6 -25 2 -33 0.7 0.2 18 11 0 -11 1 -33 0.7 0.1 20 -11 50 24 2 -33 0.1 0.1 38 -47 26 -41 2 -33 0.5 0.5 -32 -44 22 20 2 -33 0.1 0.4 -49 -50 -39 -47 2 -33 0.3 0.4 10 -47 48 -18 2 -33 0.1 0.5 -2 -16 41 2 2 -33 0.9 0.2 39 36 32 -22 1 -33 0.7 0.9 40 -6 46 -33 2 -33 0.2 0.2 46 -20 43 35 2 -33 0.6 0.4 48 6 47 14 1 -33 0.4 0.2 50 -29 6 -27 1 -33 0.1 0.1 40 -32 31 -20 2 -33 0.5 0.9 21 13 -28 -43 1 -33 0.7 0.1 34 -19 46 -11 1 -33 0.8 0.5 47 38 -14 -32 1 -33 0.7 0.3 34 32 29 -46 1 -33 0.3 0.6 22 -7 -1 -41 1 -33 0.8 0.6 45 40 -23 -42 1 -33 0.8 0.9 45 34 48 1 1 -33 0.9 0.4 13 -35 44 7 2 -33 0.7 0.5 49 10 10 5 1 -33 0.4 0.3 41 -45 21 -38 1 -33 0.5 0.9 38 22 -17 -35 1 -33 0.6 0.1 -22 -33 -7 -22 2 -33 0.4 0.8 -27 -37 40 -8 2 -33 0.2 0.3 -4 -16 11 -25 1 -33 0.7 0.2 29 8 19 -31 1 -33 0.7 0.9 50 -6 13 -48 1 -33 0.2 0.6 37 -4 27 -21 2 -33 0.8 0.2 -26 -28 36 18 2 -33 0.7 0.8 -4 -17 13 -24 2 -33 0.2 0.6 24 -2 36 5 2 -33 0.1 0.1 37 -11 47 -22 1 -33 0.7 0.9 -23 -48 47 31 2 -33 0.4 0.7 -20 -21 27 -42 2 -33 0.8 0.4 45 -26 37 -38 1 -33 0.8 0.2 -1 -25 34 31 2 -33 0.1 0.1 24 -24 -27 -44 1 -33 0.7 0.8 -11 -27 44 41 2 -33 0.4 0.3 39 -1 43 23 2 -33 0.1 0.1 24 -8 -18 -29 1 -33 0.6 0.6 5 -41 -28 -49 1 -33 0.1 0.6 6 -42 46 22 2 -33 0.5 0.5 -32 -43 5 -1 2 -33 0.4 0.3 1 -33 -14 -31 1 -33 0.1 0.4 -21 -45 35 -13 2 -33 0.3 0.3 -30 -39 48 41 2 -33 0.7 0.2 34 -16 48 2 1 -33 0.4 0.1 37 -11 42 -37 1 -33 0.3 0.7 29 -9 -30 -31 1 -33 0.7 0.1 17 6 31 12 1 -33 0.7 0.2 44 -14 -43 -50 1 -33 0.9 0.7 -15 -25 36 -4 2 -33 0.3 0.6 33 22 18 16 1 -33 0.1 0.9 -12 -35 32 9 2 -33 0.6 0.1 13 -25 43 -2 2 -33 0.6 0.8 48 -40 8 -17 1 -33 0.1 0.7 14 1 35 18 2 -33 0.2 0.3 -30 -45 9 5 2 -33 0.7 0.5 8 -24 48 15 2 -33 0.4 0.1 -10 -13 17 -49 1 -33 0.7 0.4 11 -7 21 -34 1 -33 0.2 0.9 16 -33 39 8 2 -33 0.1 0.6 20 -27 43 -48 2 -33 0.5 0.5 34 11 -18 -30 1 -33 0.9 0.6 9 -24 49 -15 2 -33 0.1 0.7 28 -9 45 -11 2 -33 0.8 0.5 43 -41 -26 -40 1 -33 0.1 0.7 25 -25 42 14 2 -33 0.8 0.9 10 5 36 31 2 -33 0.5 0.9 24 -29 -44 -50 1 -33 0.5 0.2 30 -19 16 -19 1 -33 0.6 0.6 32 10 -2 -25 1 -33 0.6 0.5 6 -28 32 -20 2 -33 0.6 0.9 -5 -15 34 23 2 -33 0.6 0.9 21 -40 2 -30 1 -33 0.2 0.2 12 -31 -1 -5 2 -33 0.4 0.3 42 -4 -5 -30 1 -33 0.5 0.2 20 -23 -6 -13 1 -33 0.7 0.2 46 40 -4 -5 1 -33 0.7 0.8 23 15 18 11 1 -33 0.8 0.7 11 -50 7 -14 2 -33 0.6 0.5 -20 -39 32 1 2 -33 0.2 0.7 43 -35 14 -6 2 -33 0.4 0.2 28 1 20 -50 1 -33 0.8 0.9 38 -20 42 4 2 -33 0.9 0.3 41 -30 27 14 1 -33 0.7 0.5 -22 -34 -45 -46 1 -33 0.4 0.8 44 -24 11 -41 1 -33 0.9 0.9 37 1 9 -16 1 -33 0.8 0.9 -5 -16 1 -44 2 -33 0.2 0.4 30 -3 37 -31 1 -33 0.2 0.3 14 -43 6 -28 2 -33 0.8 0.8 7 -43 27 0 2 -33 0.5 0.9 5 -6 45 30 2 -33 0.8 0.5 -10 -45 15 3 2 -33 0.5 0.6 30 -2 34 3 2 -33 0.5 0.9 37 -44 21 19 2 -33 0.2 0.9 30 -45 34 -6 2 -33 0.7 0.6 32 -38 -10 -37 1 -33 0.5 0.4 -11 -26 -19 -49 1 -33 0.1 0.1 20 -40 34 -30 2 -33 0.7 0.1 35 11 6 -35 1 -33 0.9 0.7 18 -36 -7 -23 1 -33 0.6 0.2 30 18 25 -28 1 -33 0.8 0.3 -28 -32 45 -44 2 -33 0.6 0.9 -32 -39 40 -39 2 -33 0.6 0.1 43 5 -38 -43 1 -33 0.5 0.4 42 33 -38 -40 1 -33 0.7 0.9 -5 -10 38 -39 2 -33 0.5 0.6 22 -7 32 16 2 -33 0.1 0.4 41 34 9 -15 1 -33 0.1 0.7 29 -28 31 -48 2 -33 0.8 0.7 27 -26 31 -19 1 -33 0.5 0.4 19 15 50 -31 1 -33 0.5 0.4 -14 -35 9 -31 2 -33 0.3 0.7 41 -47 -26 -34 1 -33 0.5 0.3 48 29 39 -24 1 -33 0.9 0.1 34 26 19 -30 1 -33 0.8 0.5 49 -31 43 25 2 -33 0.8 0.1 34 33 -17 -21 1 -33 0.9 0.9 22 -48 48 -22 2 -33 0.3 0.1 21 14 15 -20 1 -33 0.6 0.5 -16 -46 17 -12 2 -33 0.3 0.2 -41 -49 -7 -40 2 -33 0.9 0.8 24 -10 17 -50 1 -33 0.9 0.5 50 -37 -27 -31 1 -33 0.3 0.8 -26 -29 -22 -44 2 -33 0.3 0.2 41 -19 33 -16 1 -33 0.6 0.1 42 -5 -23 -38 1 -33 0.2 0.9 -28 -50 28 2 2 -33 0.2 0.6 25 -8 -9 -32 1 -33 0.9 0.6 16 -43 14 -29 1 -33 0.1 0.7 28 2 46 39 2 -33 0.3 0.9 22 10 -9 -43 1 -33 0.5 0.1 35 15 23 12 1 -33 0.6 0.5 48 -44 -15 -43 1 -33 0.9 0.6 44 10 -30 -38 1 -33 0.1 0.6 39 9 48 -8 2 -33 0.9 0.1 -27 -30 32 11 2 -33 0.9 0.5 -6 -11 40 -29 2 -33 0.6 0.6 -23 -42 41 16 2 -33 0.3 0.7 -19 -38 -9 -25 2 -33 0.8 0.7 -4 -24 14 0 2 -33 0.1 0.1 12 1 11 -4 1 -33 0.8 0.4 -18 -30 29 -50 1 -33 0.7 0.5 -4 -50 32 -8 2 -33 0.8 0.5 21 5 27 2 1 -33 0.2 0.7 33 2 28 -39 2 -33 0.7 0.8 10 -25 12 -49 2 -33 0.3 0.6 36 -38 22 3 2 -33 0.8 0.2 48 28 39 9 1 -33 0.9 0.4 19 -11 34 -45 1 -33 0.1 0.6 -19 -48 9 -22 2 -33 0.3 0.2 13 -13 44 -30 1 -33 0.9 0.3 20 4 -15 -50 1 -33 0.2 0.1 10 9 24 8 1 -33 0.7 0.5 -17 -34 33 20 2 -33 0.4 0.2 -29 -32 -16 -45 1 -33 0.6 0.9 -11 -32 25 -43 2 -33 0.9 0.8 5 -28 33 -22 2 -33 0.7 0.9 50 -37 45 -24 2 -33 0.3 0.1 -10 -38 6 -11 2 -33 0.8 0.6 10 -34 50 -18 2 -33 0.6 0.7 24 -47 -16 -17 1 -33 0.8 0.4 36 24 5 -38 1 -33 0.3 0.3 -23 -31 -29 -34 1 -33 0.6 0.9 10 -10 22 9 2 -33 0.3 0.6 32 16 48 23 2 -33 0.5 0.6 42 -50 40 -18 2 -33 0.5 0.3 47 30 46 -9 1 -33 0.3 0.3 -40 -45 40 -21 2 -33 0.8 0.2 29 -43 39 37 2 -33 0.1 0.9 1 -33 46 -35 2 -33 0.2 0.9 -9 -28 -20 -35 1 -33 0.1 0.5 -2 -3 28 -41 2 -33 0.5 0.9 -3 -14 28 18 2 -33 0.5 0.3 -4 -37 26 -37 2 -33 0.3 0.5 -26 -29 -3 -31 2 -33 0.7 0.1 1 -31 19 -33 1 -33 0.9 0.2 0 -24 10 -27 1 -33 0.4 0.2 2 -26 10 -43 1 -33 0.1 0.5 37 -33 -6 -19 2 -33 0.2 0.7 37 -50 7 -26 2 -33 0.7 0.6 36 33 13 -7 1 -33 0.7 0.3 6 -10 -6 -29 1 -33 0.6 0.2 36 -39 0 -20 1 -33 0.6 0.4 -27 -37 -20 -49 1 -33 0.4 0.9 48 -5 -45 -46 1 -33 0.5 0.4 48 21 35 -33 1 -33 0.8 0.3 -4 -18 13 -16 2 -33 0.9 0.8 42 19 40 -37 1 -33 0.1 0.3 11 -2 5 -37 1 -33 0.2 0.8 -21 -38 45 39 2 -33 0.8 0.6 -19 -36 21 -10 2 -33 0.5 0.4 41 -32 -23 -40 1 -33 0.2 0.2 25 -46 28 -12 2 -33 0.4 0.2 -12 -48 6 -40 2 -33 0.3 0.1 -26 -38 13 -34 2 -33 0.6 0.4 47 2 -23 -45 1 -33 0.8 0.2 30 2 -7 -12 1 -33 0.1 0.1 2 -49 -35 -45 1 -33 0.7 0.2 40 9 -29 -32 1 -33 0.4 0.8 24 -47 13 -26 2 -33 0.3 0.4 48 1 17 -22 1 -33 0.1 0.9 -11 -24 29 24 2 -33 0.8 0.8 21 -35 -26 -46 1 -33 0.9 0.7 38 28 -9 -28 1 -33 0.7 0.3 -1 -13 -3 -41 1 -34 0.5 0.4 30 -27 14 -1 2 -34 0.8 0.3 -3 -41 21 10 2 -34 0.6 0.7 35 -36 19 -11 1 -34 0.3 0.6 -14 -50 34 -21 2 -34 0.3 0.8 -12 -38 47 -10 2 -34 0.7 0.2 40 -9 34 -44 1 -34 0.4 0.1 -17 -38 28 26 2 -34 0.1 0.9 -16 -39 11 4 2 -34 0.5 0.3 -45 -49 -26 -29 2 -34 0.1 0.3 1 -35 21 -6 2 -34 0.5 0.3 35 18 30 -34 1 -34 0.7 0.5 -5 -30 29 25 2 -34 0.7 0.6 23 -2 16 -28 1 -34 0.3 0.1 -8 -25 24 20 2 -34 0.7 0.6 36 -33 -5 -14 1 -34 0.6 0.6 21 -48 -1 -41 1 -34 0.6 0.2 -7 -44 11 -29 1 -34 0.4 0.6 49 46 23 -5 1 -34 0.2 0.2 34 26 26 21 1 -34 0.3 0.6 41 39 16 10 1 -34 0.5 0.5 18 -30 -16 -35 1 -34 0.8 0.8 49 -48 19 -1 1 -34 0.8 0.7 19 -9 46 -43 2 -34 0.5 0.4 -14 -20 -28 -36 1 -34 0.5 0.8 -10 -49 26 -39 2 -34 0.2 0.6 18 -36 -19 -46 1 -34 0.6 0.2 -7 -38 10 -21 2 -34 0.9 0.4 24 -13 42 40 2 -34 0.8 0.2 24 22 28 -31 1 -34 0.9 0.3 -8 -19 22 -43 1 -34 0.4 0.9 -10 -45 47 23 2 -34 0.9 0.1 37 -4 1 -29 1 -34 0.7 0.8 36 16 44 -23 1 -34 0.4 0.8 19 18 -32 -50 1 -34 0.7 0.1 14 9 29 -1 1 -34 0.8 0.4 -10 -22 14 -22 2 -34 0.3 0.5 -2 -5 -15 -48 1 -34 0.1 0.7 45 15 18 1 1 -34 0.1 0.1 10 -24 45 -43 1 -34 0.9 0.8 19 -44 17 14 2 -34 0.8 0.1 43 -45 48 25 2 -34 0.3 0.9 31 13 45 40 2 -34 0.8 0.4 24 -29 -24 -37 1 -34 0.4 0.8 5 -42 34 10 2 -34 0.3 0.1 31 26 22 -32 1 -34 0.5 0.3 22 -47 -9 -13 2 -34 0.5 0.9 35 -41 3 -16 1 -34 0.7 0.6 20 -4 46 41 2 -34 0.4 0.4 41 -34 27 13 2 -34 0.8 0.9 20 17 28 15 2 -34 0.3 0.3 33 -50 39 -25 2 -34 0.3 0.5 28 -31 48 42 2 -34 0.4 0.5 44 6 -4 -50 1 -34 0.2 0.4 44 -2 16 11 2 -34 0.8 0.1 18 -12 -5 -43 1 -34 0.9 0.1 41 -40 25 -13 1 -34 0.1 0.3 7 -29 32 15 2 -34 0.4 0.6 -1 -33 17 16 2 -34 0.7 0.1 30 -19 27 18 2 -34 0.5 0.4 44 18 26 14 1 -34 0.3 0.5 29 -42 30 -47 2 -34 0.4 0.3 27 24 4 -40 1 -34 0.9 0.3 26 20 38 28 2 -34 0.3 0.7 31 6 38 35 2 -34 0.4 0.8 35 -37 29 6 2 -34 0.4 0.1 20 5 5 -18 1 -34 0.2 0.7 45 31 -30 -45 1 -34 0.4 0.3 47 -10 -15 -50 1 -34 0.4 0.3 -38 -48 5 -20 2 -34 0.3 0.9 34 -37 31 -46 2 -34 0.2 0.1 -34 -42 6 -8 2 -34 0.9 0.2 37 -43 -21 -48 1 -34 0.6 0.7 6 0 -6 -19 1 -34 0.1 0.9 7 5 19 -18 2 -34 0.2 0.8 -22 -23 37 36 2 -34 0.7 0.5 10 -12 14 -49 1 -34 0.4 0.8 28 12 9 4 1 -34 0.2 0.6 13 4 18 -43 1 -34 0.1 0.5 -8 -38 30 -39 2 -34 0.8 0.4 -1 -16 23 10 2 -34 0.6 0.7 46 -33 15 -2 1 -34 0.6 0.5 50 33 -12 -14 1 -34 0.1 0.5 34 -25 2 -50 1 -34 0.7 0.6 33 14 8 7 1 -34 0.3 0.7 39 -21 29 -36 2 -34 0.3 0.6 41 18 20 -29 1 -34 0.2 0.1 28 -2 5 -40 1 -34 0.6 0.9 43 10 12 -32 1 -34 0.2 0.7 43 -46 -7 -45 2 -34 0.6 0.5 26 -38 23 -42 1 -34 0.8 0.7 -13 -31 40 15 2 -34 0.4 0.3 -32 -35 50 -6 2 -34 0.4 0.2 40 -2 -40 -42 1 -34 0.4 0.7 27 -2 12 10 2 -34 0.7 0.1 -11 -25 37 -40 1 -34 0.5 0.9 49 -47 -43 -46 1 -34 0.7 0.3 11 -44 44 2 2 -34 0.7 0.9 -2 -23 42 -21 2 -34 0.8 0.6 36 -43 -14 -22 1 -34 0.4 0.2 28 27 25 -8 1 -34 0.7 0.9 -14 -34 31 -22 2 -34 0.3 0.6 5 -15 -20 -50 1 -34 0.7 0.4 26 0 50 7 1 -34 0.2 0.7 2 -27 46 -20 2 -34 0.6 0.7 1 -28 14 12 2 -34 0.4 0.3 -8 -9 34 -20 1 -34 0.3 0.4 49 48 27 -21 1 -34 0.4 0.5 48 -35 36 24 2 -34 0.8 0.7 9 -24 26 22 2 -34 0.2 0.5 38 -20 -11 -41 1 -34 0.5 0.1 16 -15 10 -8 1 -34 0.7 0.1 10 -5 50 -32 1 -34 0.8 0.4 -26 -44 29 -47 2 -34 0.7 0.8 42 0 48 9 2 -34 0.2 0.3 -21 -33 46 -38 2 -34 0.8 0.7 39 18 -4 -48 1 -34 0.9 0.4 -14 -41 -32 -48 1 -34 0.1 0.5 36 22 -34 -39 1 -34 0.9 0.9 50 -3 -16 -39 1 -34 0.1 0.1 -39 -49 -15 -32 2 -34 0.1 0.2 -17 -28 -28 -30 1 -34 0.8 0.6 22 -48 13 5 2 -34 0.6 0.9 8 -6 47 11 2 -34 0.4 0.9 -7 -10 -5 -17 1 -34 0.7 0.8 -10 -19 5 -4 2 -34 0.9 0.4 26 -17 -34 -42 1 -34 0.4 0.7 36 14 39 -49 1 -34 0.2 0.3 19 11 32 22 2 -34 0.7 0.8 34 -20 48 -3 2 -34 0.5 0.2 22 -17 -8 -49 1 -34 0.3 0.5 45 -25 26 -2 2 -34 0.4 0.4 -28 -46 35 -50 2 -34 0.7 0.1 -11 -29 22 -44 1 -34 0.4 0.9 10 -15 32 -22 2 -34 0.3 0.6 -3 -20 -23 -31 1 -34 0.5 0.6 36 15 22 6 1 -34 0.5 0.7 -12 -23 -9 -25 2 -34 0.2 0.4 42 25 -14 -37 1 -34 0.2 0.6 22 3 36 -29 2 -34 0.3 0.8 2 -15 21 19 2 -34 0.6 0.5 13 -13 35 -28 2 -34 0.7 0.9 32 28 -3 -10 1 -34 0.5 0.4 -36 -42 44 32 2 -34 0.6 0.4 -20 -31 39 -34 2 -34 0.4 0.2 15 -45 32 16 2 -34 0.4 0.2 46 43 40 20 1 -34 0.5 0.8 34 -9 -32 -37 1 -34 0.4 0.1 -19 -33 -12 -36 1 -34 0.7 0.1 12 -44 18 12 2 -34 0.6 0.3 -20 -45 -11 -19 2 -34 0.5 0.4 43 -35 31 -10 1 -34 0.5 0.1 -6 -35 -12 -24 1 -34 0.6 0.1 44 -18 31 5 2 -34 0.3 0.2 -4 -44 -8 -31 2 -34 0.5 0.3 27 -37 10 8 2 -34 0.5 0.8 21 -19 -1 -12 1 -34 0.7 0.3 8 5 48 12 2 -34 0.6 0.9 46 44 32 15 1 -34 0.1 0.2 -41 -44 41 37 2 -34 0.7 0.4 13 -18 17 -41 1 -34 0.3 0.9 -9 -44 23 7 2 -34 0.6 0.9 -23 -34 26 12 2 -34 0.9 0.1 5 -50 4 -17 1 -34 0.1 0.2 48 37 3 -5 1 -34 0.9 0.8 37 5 -1 -50 1 -34 0.4 0.3 48 -48 -26 -38 1 -34 0.5 0.8 43 -21 -20 -25 1 -34 0.3 0.5 45 40 35 4 1 -34 0.1 0.1 20 -15 -18 -32 1 -34 0.3 0.3 32 -43 12 -14 2 -34 0.6 0.8 26 -9 36 -34 2 -34 0.8 0.6 3 -14 9 -5 2 -34 0.8 0.3 42 -21 39 -5 1 -34 0.6 0.8 30 29 -2 -13 1 -34 0.3 0.3 38 30 14 -21 1 -34 0.5 0.4 6 -31 23 0 2 -34 0.7 0.6 18 -32 36 17 2 -34 0.3 0.7 19 14 47 6 2 -34 0.5 0.6 1 -15 39 -44 2 -34 0.8 0.1 -28 -40 48 38 2 -34 0.8 0.7 5 -24 -17 -35 1 -34 0.7 0.5 15 -33 3 -22 1 -34 0.8 0.5 -6 -48 -20 -36 1 -34 0.2 0.4 44 -7 48 -47 1 -34 0.2 0.6 -13 -44 40 25 2 -34 0.5 0.7 -16 -39 40 -27 2 -34 0.4 0.2 -13 -30 -24 -44 1 -34 0.8 0.9 15 -19 39 -20 2 -34 0.2 0.5 0 -9 33 -7 2 -34 0.4 0.7 21 -40 37 -23 2 -34 0.9 0.4 31 -1 17 -7 1 -34 0.6 0.6 -15 -20 -26 -32 1 -34 0.1 0.1 -18 -49 30 0 2 -34 0.1 0.6 -35 -48 35 -42 2 -34 0.3 0.2 38 -46 18 -3 2 -34 0.5 0.2 -34 -46 37 -2 2 -34 0.8 0.8 25 -3 13 -8 1 -34 0.2 0.1 13 -16 23 -8 2 -34 0.8 0.9 38 -32 -6 -31 1 -34 0.5 0.8 22 -23 32 -2 2 -34 0.5 0.1 41 35 -18 -34 1 -34 0.1 0.4 37 -33 24 7 2 -34 0.4 0.1 33 -36 18 -22 1 -34 0.3 0.1 -21 -29 -35 -49 1 -34 0.1 0.5 13 -40 8 -27 2 -34 0.1 0.7 -14 -41 -10 -16 2 -34 0.3 0.9 31 -7 30 10 2 -34 0.3 0.6 -15 -31 48 -12 2 -34 0.6 0.2 30 17 43 -13 1 -34 0.7 0.4 50 -31 -3 -25 1 -34 0.3 0.4 28 12 27 -12 1 -34 0.2 0.4 26 -24 6 -40 1 -34 0.6 0.4 47 -23 49 20 2 -34 0.2 0.5 2 -37 45 -29 2 -34 0.8 0.3 17 -15 -8 -46 1 -34 0.4 0.3 41 -38 43 -43 1 -34 0.7 0.9 36 6 -15 -17 1 -34 0.9 0.2 8 -29 18 -22 1 -34 0.9 0.7 46 8 6 -49 1 -34 0.2 0.2 4 3 45 31 2 -34 0.9 0.4 -36 -46 48 -48 2 -34 0.2 0.2 -1 -3 18 -41 1 -34 0.8 0.5 39 -33 0 -38 1 -34 0.8 0.8 33 20 27 26 1 -34 0.3 0.6 24 14 8 1 1 -34 0.5 0.3 -7 -48 42 -3 2 -34 0.9 0.4 37 34 36 30 1 -34 0.5 0.7 -19 -25 3 -32 2 -34 0.6 0.3 26 17 32 -38 1 -34 0.1 0.4 48 15 47 -6 1 -34 0.9 0.2 32 -28 8 -46 1 -34 0.6 0.6 -19 -28 38 -15 2 -34 0.4 0.5 1 -31 -4 -19 2 -34 0.4 0.3 18 -49 50 -44 2 -34 0.5 0.5 1 -39 -21 -32 1 -34 0.2 0.5 48 32 9 -1 1 -34 0.3 0.9 20 -47 -25 -29 1 -34 0.1 0.5 43 -26 -38 -41 1 -34 0.9 0.3 31 -17 24 -11 1 -34 0.1 0.3 -23 -27 44 -18 2 -34 0.1 0.5 46 -5 25 5 2 -34 0.5 0.3 49 28 -10 -38 1 -34 0.8 0.3 -5 -32 32 -50 2 -34 0.9 0.4 22 19 37 11 1 -34 0.2 0.6 6 -9 42 40 2 -34 0.4 0.5 8 -16 -8 -11 1 -34 0.3 0.4 47 -32 3 -26 1 -34 0.4 0.3 46 -2 -4 -24 1 -34 0.4 0.6 43 35 7 -5 1 -34 0.5 0.3 8 -6 39 -5 2 -34 0.2 0.3 33 19 46 -48 1 -34 0.7 0.1 36 28 12 8 1 -34 0.8 0.6 45 -32 -31 -50 1 -34 0.5 0.1 6 -43 -17 -41 1 -34 0.5 0.8 24 -7 47 -22 2 -34 0.9 0.1 -16 -19 -16 -50 1 -34 0.2 0.8 -11 -34 15 -14 2 -34 0.7 0.2 28 13 42 6 1 -34 0.3 0.5 17 -26 24 6 2 -34 0.5 0.5 -26 -37 -8 -37 2 -34 0.7 0.7 -3 -9 1 -43 2 -34 0.5 0.1 31 -21 -17 -39 1 -34 0.6 0.2 42 -44 23 -28 1 -34 0.5 0.2 3 -25 9 -11 2 -34 0.7 0.8 28 -5 19 -6 2 -34 0.4 0.6 22 -47 26 6 2 -34 0.7 0.2 42 -7 24 23 1 -34 0.9 0.6 42 36 -25 -27 1 -34 0.9 0.4 28 -19 41 -14 1 -34 0.1 0.5 31 -33 2 -23 2 -34 0.3 0.6 18 -45 44 8 2 -34 0.4 0.7 37 22 33 5 1 -34 0.1 0.8 7 -47 -11 -42 2 -34 0.9 0.1 17 -45 33 18 2 -34 0.2 0.1 19 -16 33 -47 1 -34 0.7 0.8 1 -42 17 10 2 -34 0.4 0.4 -15 -26 8 -14 2 -34 0.7 0.3 36 -29 13 -43 1 -34 0.2 0.5 38 19 34 -5 2 -34 0.9 0.8 48 -4 11 -17 1 -34 0.8 0.3 -1 -36 43 31 2 -34 0.8 0.3 9 -2 8 -50 1 -34 0.5 0.5 50 -29 39 5 2 -34 0.8 0.9 25 15 5 -35 1 -34 0.7 0.9 35 -20 -2 -24 1 -34 0.7 0.4 30 -40 48 -15 1 -34 0.6 0.4 8 -44 36 -10 2 -34 0.4 0.3 17 -9 26 1 2 -34 0.5 0.3 -22 -25 48 30 2 -34 0.6 0.7 1 -39 28 -43 2 -34 0.6 0.2 36 -11 31 -15 1 -34 0.4 0.5 38 0 -29 -31 1 -34 0.6 0.3 9 -14 11 4 2 -34 0.2 0.5 -34 -50 41 -27 2 -34 0.9 0.7 28 -4 33 11 2 -34 0.8 0.5 -13 -31 49 -26 2 -34 0.1 0.5 45 16 8 -40 1 -34 0.3 0.3 15 -46 9 0 2 -34 0.4 0.7 35 -48 34 2 2 -34 0.8 0.3 33 -42 10 4 1 -35 0.9 0.1 49 -42 26 -3 1 -35 0.3 0.3 9 0 25 -42 1 -35 0.4 0.9 23 -48 37 13 2 -35 0.3 0.4 8 -7 -12 -17 1 -35 0.9 0.7 6 -14 6 -35 2 -35 0.9 0.6 18 -7 11 -14 1 -35 0.8 0.9 -19 -37 28 -34 2 -35 0.1 0.1 5 -11 41 33 2 -35 0.6 0.2 45 26 -17 -28 1 -35 0.8 0.1 4 -10 43 8 2 -35 0.6 0.8 18 -45 -15 -34 1 -35 0.3 0.2 33 -32 15 -19 1 -35 0.4 0.1 47 28 5 -36 1 -35 0.7 0.1 31 -35 41 31 2 -35 0.2 0.2 6 -4 -28 -49 1 -35 0.1 0.1 -11 -40 31 -9 2 -35 0.3 0.9 26 -49 9 -25 2 -35 0.2 0.3 6 -30 46 13 2 -35 0.9 0.9 13 -2 48 -16 2 -35 0.2 0.8 50 49 36 -13 1 -35 0.1 0.8 11 -45 43 -33 2 -35 0.2 0.7 39 -23 -36 -49 1 -35 0.9 0.6 -20 -21 16 -23 2 -35 0.7 0.2 -8 -18 40 -30 2 -35 0.2 0.5 33 24 23 -31 1 -35 0.7 0.6 13 -3 42 -50 1 -35 0.7 0.6 38 -12 -6 -15 1 -35 0.3 0.5 25 -38 32 5 2 -35 0.2 0.8 35 -44 15 -31 2 -35 0.4 0.1 37 -45 5 -28 1 -35 0.5 0.4 38 -37 -19 -49 1 -35 0.5 0.1 27 18 27 -16 1 -35 0.4 0.1 -27 -33 49 26 2 -35 0.5 0.7 9 -10 50 9 2 -35 0.8 0.8 49 -16 38 19 2 -35 0.4 0.3 -17 -42 2 -43 1 -35 0.4 0.9 -33 -35 18 -40 2 -35 0.5 0.8 -6 -11 39 15 2 -35 0.1 0.5 45 -13 -14 -19 1 -35 0.4 0.7 31 -15 45 -24 2 -35 0.2 0.5 4 -21 50 9 2 -35 0.1 0.9 45 -8 -14 -33 1 -35 0.6 0.9 21 -25 26 -45 2 -35 0.2 0.6 -25 -26 -6 -9 2 -35 0.6 0.6 46 27 12 -8 1 -35 0.9 0.8 40 36 7 5 1 -35 0.9 0.7 23 -10 36 -16 1 -35 0.5 0.4 18 -25 -17 -29 1 -35 0.8 0.5 6 -24 26 -21 2 -35 0.4 0.9 48 -39 9 -48 2 -35 0.1 0.9 -27 -46 40 -34 2 -35 0.6 0.1 32 -39 -39 -42 1 -35 0.2 0.1 36 -46 -10 -26 1 -35 0.6 0.7 -35 -42 23 -34 2 -35 0.1 0.3 33 11 2 -30 1 -35 0.2 0.6 46 -23 21 14 2 -35 0.9 0.1 35 -25 -29 -50 1 -35 0.2 0.4 -6 -11 -16 -44 1 -35 0.4 0.8 -19 -24 36 5 2 -35 0.6 0.4 32 23 19 0 1 -35 0.3 0.3 -3 -24 1 -50 1 -35 0.6 0.1 24 16 2 -43 1 -35 0.3 0.8 25 -16 20 -44 2 -35 0.3 0.9 46 -8 45 -24 2 -35 0.1 0.7 38 -26 -16 -21 1 -35 0.7 0.6 9 -9 19 1 2 -35 0.1 0.1 43 -44 1 -23 2 -35 0.3 0.7 15 -26 30 -2 2 -35 0.2 0.9 -19 -50 19 -16 2 -35 0.2 0.2 28 -4 -22 -23 1 -35 0.4 0.8 48 0 38 30 2 -35 0.5 0.7 17 9 23 -11 1 -35 0.9 0.2 36 -6 45 7 1 -35 0.5 0.7 -12 -45 -21 -39 2 -35 0.2 0.7 29 2 43 -26 2 -35 0.8 0.6 17 14 36 4 1 -35 0.7 0.3 9 -3 23 15 2 -35 0.7 0.7 29 -31 11 -28 1 -35 0.8 0.6 10 -38 28 17 2 -35 0.4 0.2 -6 -13 24 -25 1 -35 0.2 0.3 23 -24 21 19 2 -35 0.5 0.1 40 10 28 -18 1 -35 0.5 0.6 -27 -32 13 -40 2 -35 0.3 0.2 39 16 24 17 1 -35 0.6 0.5 49 -35 25 -5 1 -35 0.5 0.5 50 17 33 -48 1 -35 0.6 0.8 49 -11 15 -31 1 -35 0.7 0.2 -4 -16 -3 -48 1 -35 0.4 0.7 38 -23 15 11 2 -35 0.7 0.7 14 -19 20 -45 1 -35 0.4 0.5 43 13 28 -25 1 -35 0.7 0.8 17 -45 42 7 2 -35 0.6 0.3 -11 -37 -3 -40 1 -35 0.2 0.6 38 -29 33 4 2 -35 0.7 0.3 48 -21 49 6 1 -35 0.3 0.6 46 -45 35 -25 2 -35 0.7 0.6 -20 -42 3 -16 2 -35 0.7 0.2 40 -39 36 32 2 -35 0.5 0.2 13 -47 22 9 2 -35 0.2 0.9 25 19 -27 -49 1 -35 0.6 0.6 -9 -49 43 -29 2 -35 0.4 0.3 27 16 21 -7 1 -35 0.2 0.1 20 1 22 -27 1 -35 0.9 0.1 -40 -41 37 26 2 -35 0.3 0.6 -24 -48 9 -19 2 -35 0.9 0.5 35 20 28 -13 1 -35 0.5 0.1 -21 -43 32 10 2 -35 0.9 0.1 46 -30 37 7 1 -35 0.2 0.4 30 -15 -4 -29 1 -35 0.2 0.9 -15 -35 6 -12 2 -35 0.6 0.2 32 -48 -4 -9 1 -35 0.3 0.1 -18 -30 49 31 2 -35 0.2 0.6 9 5 -3 -36 1 -35 0.9 0.1 46 -7 28 -39 1 -35 0.1 0.7 40 -44 11 -29 2 -35 0.8 0.8 15 0 43 -22 2 -35 0.4 0.5 46 -38 45 -31 1 -35 0.3 0.1 24 -17 28 -27 1 -35 0.8 0.2 -11 -35 50 37 2 -35 0.7 0.1 -5 -48 -13 -37 1 -35 0.9 0.8 30 -11 -13 -17 1 -35 0.9 0.6 22 2 37 -8 2 -35 0.2 0.3 20 -7 34 -32 2 -35 0.3 0.5 36 -12 -27 -43 1 -35 0.3 0.3 48 30 -2 -28 1 -35 0.1 0.9 -14 -45 8 -14 2 -35 0.9 0.5 6 -41 13 -40 1 -35 0.7 0.1 36 -43 -27 -38 1 -35 0.5 0.6 9 8 12 -37 1 -35 0.3 0.3 47 -7 -4 -25 1 -35 0.7 0.7 4 -31 46 -49 2 -35 0.8 0.5 43 40 44 -47 1 -35 0.4 0.3 -13 -37 37 -37 2 -35 0.3 0.2 34 -10 -1 -32 1 -35 0.6 0.9 30 -13 49 -15 2 -35 0.5 0.6 -31 -46 -6 -28 2 -35 0.4 0.3 -32 -43 -43 -48 2 -35 0.8 0.8 14 -42 17 2 2 -35 0.1 0.8 -18 -38 -17 -48 2 -35 0.9 0.1 24 -26 0 -22 1 -35 0.5 0.5 41 -28 -1 -31 1 -35 0.1 0.5 33 -22 50 -28 2 -35 0.2 0.3 23 20 7 -46 1 -35 0.4 0.6 -17 -33 48 -5 2 -35 0.3 0.1 -17 -44 47 46 2 -35 0.4 0.8 2 -15 47 15 2 -35 0.4 0.1 41 0 35 -40 1 -35 0.9 0.1 44 23 47 -17 1 -35 0.5 0.9 -6 -36 15 -16 2 -35 0.9 0.8 43 -11 -5 -20 1 -35 0.2 0.6 7 3 -7 -18 1 -35 0.9 0.6 31 -31 34 1 1 -35 0.1 0.7 40 17 -16 -33 1 -35 0.1 0.1 8 -30 44 -36 2 -35 0.9 0.9 12 -48 13 8 2 -35 0.8 0.5 -16 -38 46 -5 2 -35 0.9 0.7 17 5 29 -26 1 -35 0.5 0.7 50 39 33 -44 1 -35 0.7 0.7 8 -23 9 -50 1 -35 0.9 0.9 48 2 33 12 1 -35 0.5 0.9 38 -32 14 -46 2 -35 0.7 0.9 20 -40 47 8 2 -35 0.1 0.2 37 -9 46 -16 2 -35 0.4 0.8 -3 -16 47 35 2 -35 0.2 0.3 21 -50 23 1 2 -35 0.7 0.8 0 -5 30 20 2 -35 0.7 0.5 39 27 -7 -21 1 -35 0.4 0.8 -8 -49 39 3 2 -35 0.6 0.3 44 8 -1 -12 1 -35 0.2 0.7 12 -4 -14 -38 1 -35 0.7 0.4 0 -26 4 -17 2 -35 0.9 0.8 33 -15 6 -10 1 -35 0.9 0.3 38 -1 31 -24 1 -35 0.3 0.4 -2 -6 21 -1 2 -35 0.6 0.8 -11 -14 19 -10 2 -35 0.5 0.4 -19 -32 17 13 2 -35 0.7 0.8 21 -37 9 -44 1 -35 0.1 0.3 -26 -50 14 11 2 -35 0.4 0.8 24 -6 1 -18 1 -35 0.5 0.2 -26 -29 12 -36 2 -35 0.4 0.4 41 -47 39 -5 2 -35 0.9 0.1 -45 -48 27 -3 2 -35 0.4 0.8 30 -7 50 20 2 -35 0.4 0.3 12 -50 27 -44 2 -35 0.6 0.6 24 -7 32 -7 2 -35 0.9 0.5 46 34 -15 -24 1 -35 0.2 0.7 20 5 7 -23 1 -35 0.4 0.5 29 18 -42 -46 1 -35 0.3 0.6 28 -6 -19 -21 1 -35 0.8 0.6 6 2 -13 -16 1 -35 0.2 0.3 38 -47 -28 -33 1 -35 0.5 0.1 -7 -12 -8 -34 1 -35 0.4 0.2 -30 -45 15 -50 2 -35 0.8 0.4 19 -49 -3 -35 1 -35 0.5 0.4 9 -42 32 -12 2 -35 0.1 0.8 38 -40 46 -49 2 -35 0.7 0.5 4 -17 2 -40 1 -35 0.1 0.2 26 12 1 -14 1 -35 0.6 0.3 -19 -34 -20 -25 1 -35 0.7 0.3 23 -10 -5 -50 1 -35 0.1 0.5 -9 -25 -6 -48 1 -35 0.2 0.6 12 -29 28 21 2 -35 0.6 0.6 -1 -9 -15 -36 1 -35 0.7 0.9 48 29 34 -8 1 -35 0.3 0.6 22 -43 -3 -20 2 -35 0.9 0.2 35 24 43 2 1 -35 0.1 0.3 35 26 45 -43 1 -35 0.5 0.1 46 -40 3 2 1 -35 0.3 0.8 -1 -13 34 -14 2 -35 0.3 0.1 35 -9 45 22 2 -35 0.1 0.5 -35 -41 32 -33 2 -35 0.6 0.4 2 -44 -2 -23 1 -35 0.9 0.6 -44 -50 12 -28 2 -35 0.8 0.6 29 -5 5 -13 1 -35 0.9 0.7 35 25 44 7 2 -35 0.5 0.1 20 -34 33 24 2 -35 0.6 0.5 41 -7 37 26 2 -35 0.2 0.2 34 4 13 -32 1 -35 0.4 0.6 -22 -32 32 11 2 -35 0.9 0.8 8 2 48 24 2 -35 0.8 0.7 -25 -29 -2 -25 2 -35 0.6 0.2 39 -15 48 47 2 -35 0.7 0.5 10 -24 40 34 2 -35 0.3 0.6 -2 -37 42 -38 2 -35 0.7 0.1 37 -38 50 -46 1 -35 0.5 0.2 -7 -44 9 -30 2 -35 0.1 0.8 26 -35 37 11 2 -35 0.5 0.3 38 -11 49 9 2 -35 0.2 0.5 8 -27 31 -27 2 -35 0.7 0.2 36 -29 47 15 2 -35 0.5 0.9 35 26 38 -10 1 -35 0.5 0.8 7 -13 47 -44 2 -35 0.5 0.5 7 -24 -2 -7 2 -35 0.9 0.1 0 -50 26 14 2 -35 0.5 0.7 31 12 18 6 1 -35 0.7 0.7 12 2 17 -21 2 -35 0.3 0.8 -4 -47 14 5 2 -35 0.5 0.6 4 -2 8 -41 1 -35 0.5 0.4 -15 -38 39 -19 2 -35 0.5 0.4 37 0 28 -37 1 -35 0.1 0.9 47 8 -7 -46 1 -35 0.6 0.7 -11 -35 20 -33 2 -35 0.6 0.6 44 -31 34 8 2 -35 0.4 0.9 -30 -32 11 -49 2 -35 0.8 0.9 12 -25 31 -3 2 -35 0.4 0.3 -17 -32 30 -23 2 -35 0.8 0.5 15 6 24 20 2 -35 0.1 0.9 26 -48 33 3 2 -35 0.6 0.9 1 -32 30 -20 2 -35 0.8 0.8 -2 -21 15 -21 2 -35 0.7 0.9 31 -35 -6 -24 1 -35 0.3 0.7 6 -11 39 -1 2 -35 0.7 0.1 0 -47 47 -8 2 -35 0.3 0.7 41 -49 42 -48 2 -35 0.6 0.4 12 -12 -48 -49 1 -35 0.5 0.9 22 -29 38 -1 2 -35 0.9 0.2 -20 -38 23 -44 2 -35 0.8 0.8 10 -15 -20 -31 1 -35 0.6 0.5 18 -37 15 -10 2 -35 0.9 0.2 41 5 16 -19 1 -35 0.3 0.8 25 -14 10 -18 2 -35 0.9 0.7 8 -41 27 4 2 -35 0.8 0.1 46 -49 30 23 1 -35 0.4 0.3 17 -44 44 37 2 -35 0.2 0.8 29 -19 -2 -27 1 -35 0.1 0.9 11 -45 8 -46 2 -35 0.3 0.1 20 -13 5 -44 1 -35 0.1 0.1 44 -6 5 -23 1 -35 0.5 0.9 20 -43 43 -19 2 -35 0.1 0.8 45 -33 15 -43 2 -35 0.7 0.2 30 18 38 -9 1 -35 0.4 0.2 28 24 5 -10 1 -35 0.8 0.2 -2 -23 42 32 2 -35 0.9 0.9 42 -15 -27 -47 1 -35 0.9 0.9 25 -16 27 -17 2 -35 0.2 0.4 42 23 -2 -40 1 -35 0.8 0.5 43 -48 -34 -40 1 -35 0.4 0.5 6 -17 6 -27 1 -35 0.4 0.7 14 -36 -19 -40 1 -35 0.8 0.9 34 -36 30 12 2 -35 0.7 0.5 32 -37 34 -42 1 -35 0.7 0.1 37 0 -18 -45 1 -35 0.3 0.7 -15 -38 6 -6 2 -35 0.2 0.4 31 -5 -5 -8 1 -35 0.9 0.2 13 12 -30 -47 1 -35 0.6 0.8 44 41 17 -47 1 -35 0.4 0.6 11 -45 -26 -45 1 -35 0.5 0.1 10 6 16 -22 1 -35 0.8 0.5 -9 -45 37 4 2 -35 0.1 0.4 36 29 7 -9 1 -35 0.2 0.2 33 -4 28 -45 1 -35 0.7 0.6 48 7 22 -24 1 -35 0.4 0.2 37 -39 27 -13 2 -35 0.4 0.5 17 -16 16 1 2 -35 0.2 0.4 24 -8 42 -49 1 -35 0.5 0.8 40 21 1 -5 1 -35 0.8 0.3 36 19 -21 -38 1 -35 0.8 0.5 -6 -22 -30 -44 1 -35 0.3 0.2 31 15 -12 -44 1 -35 0.5 0.6 -5 -13 49 -34 2 -36 0.4 0.6 21 -7 -19 -46 1 -36 0.2 0.2 -10 -46 37 -31 2 -36 0.5 0.6 -21 -25 10 -33 2 -36 0.6 0.4 -1 -47 20 -36 2 -36 0.7 0.4 45 -44 1 -20 1 -36 0.6 0.5 32 -14 -21 -45 1 -36 0.9 0.6 20 -45 -6 -15 1 -36 0.5 0.7 -9 -34 11 -9 2 -36 0.3 0.6 32 16 -13 -23 1 -36 0.6 0.2 47 -14 13 -10 1 -36 0.4 0.2 47 1 2 -28 1 -36 0.5 0.8 34 -23 5 -10 1 -36 0.4 0.8 36 -24 1 -42 1 -36 0.1 0.5 14 -31 25 21 2 -36 0.4 0.5 48 18 10 -5 1 -36 0.2 0.7 34 -12 -6 -34 1 -36 0.8 0.5 19 -42 -38 -39 1 -36 0.2 0.5 4 -32 22 -42 2 -36 0.8 0.3 26 -30 -4 -6 1 -36 0.3 0.3 4 -41 39 2 2 -36 0.9 0.3 13 -35 7 -31 1 -36 0.7 0.8 50 -20 -25 -48 1 -36 0.2 0.2 -14 -18 -29 -37 1 -36 0.6 0.3 46 10 12 -37 1 -36 0.6 0.2 49 4 -17 -47 1 -36 0.1 0.6 5 2 23 -36 2 -36 0.3 0.3 26 21 -22 -32 1 -36 0.4 0.4 14 -20 40 -35 2 -36 0.3 0.2 -35 -38 47 31 2 -36 0.2 0.8 6 -24 -3 -35 2 -36 0.9 0.7 40 34 28 -44 1 -36 0.2 0.6 -15 -16 11 -14 2 -36 0.2 0.9 -11 -30 -5 -13 2 -36 0.4 0.8 43 20 42 -48 1 -36 0.7 0.5 28 19 30 -17 1 -36 0.9 0.6 27 -42 42 7 2 -36 0.8 0.7 -9 -36 23 -26 2 -36 0.5 0.9 35 9 30 21 2 -36 0.6 0.6 11 8 2 -12 1 -36 0.3 0.5 33 -30 48 -16 2 -36 0.4 0.6 -29 -49 43 -40 2 -36 0.9 0.1 49 5 -38 -41 1 -36 0.8 0.5 31 -13 49 -14 1 -36 0.8 0.7 10 -42 31 29 2 -36 0.7 0.1 35 -4 29 4 1 -36 0.9 0.8 48 -37 -16 -24 1 -36 0.8 0.6 23 -19 44 31 2 -36 0.8 0.3 7 -9 8 -31 1 -36 0.1 0.4 -20 -49 30 -35 2 -36 0.2 0.9 -25 -27 30 -17 2 -36 0.1 0.3 42 -14 44 43 2 -36 0.5 0.6 46 -26 -11 -31 1 -36 0.8 0.1 -24 -39 16 -24 2 -36 0.3 0.8 23 -14 -4 -27 1 -36 0.7 0.1 15 9 50 -9 1 -36 0.9 0.5 40 11 21 -29 1 -36 0.4 0.7 24 -50 41 -18 2 -36 0.5 0.5 14 -14 -18 -48 1 -36 0.8 0.1 21 -25 -1 -22 1 -36 0.1 0.7 3 -17 46 -22 2 -36 0.4 0.5 30 9 45 -1 1 -36 0.5 0.5 5 -32 26 -39 2 -36 0.6 0.3 43 40 38 -8 1 -36 0.7 0.2 -24 -27 23 -4 2 -36 0.1 0.5 12 -32 20 -38 2 -36 0.8 0.8 4 -50 36 -12 2 -36 0.2 0.2 40 26 47 -43 1 -36 0.4 0.8 -25 -44 32 -39 2 -36 0.4 0.9 24 8 23 -15 2 -36 0.4 0.7 23 0 -14 -44 1 -36 0.7 0.5 2 -43 -43 -47 1 -36 0.8 0.2 36 -14 5 -17 1 -36 0.7 0.9 29 15 -27 -40 1 -36 0.6 0.7 13 -49 0 -17 2 -36 0.9 0.3 3 1 50 41 2 -36 0.2 0.2 -18 -29 40 -17 2 -36 0.7 0.9 24 -29 17 -1 2 -36 0.7 0.9 -33 -41 -27 -34 2 -36 0.9 0.9 9 8 7 2 1 -36 0.9 0.3 39 26 -32 -46 1 -36 0.1 0.9 -11 -33 32 23 2 -36 0.4 0.1 28 -38 -22 -31 1 -36 0.1 0.3 36 -6 32 -23 2 -36 0.4 0.3 20 14 -12 -50 1 -36 0.8 0.3 41 7 -6 -20 1 -36 0.9 0.9 -6 -11 -2 -6 2 -36 0.3 0.7 50 -18 45 -20 2 -36 0.9 0.4 48 38 -1 -24 1 -36 0.4 0.1 48 43 26 11 1 -36 0.2 0.3 50 -9 -41 -43 1 -36 0.5 0.7 16 -13 3 -46 1 -36 0.9 0.1 21 -22 25 23 2 -36 0.5 0.8 -23 -49 40 -18 2 -36 0.5 0.9 4 -12 2 -7 2 -36 0.7 0.8 -25 -44 38 -23 2 -36 0.5 0.3 -3 -22 45 -17 2 -36 0.3 0.7 49 -46 9 -6 2 -36 0.8 0.5 32 -17 40 -7 1 -36 0.6 0.8 8 -30 -33 -47 1 -36 0.5 0.5 45 -30 36 -19 2 -36 0.3 0.3 26 -3 11 -1 2 -36 0.7 0.5 -19 -24 3 -47 2 -36 0.9 0.6 26 -22 41 -18 1 -36 0.1 0.7 22 -33 48 23 2 -36 0.3 0.8 28 -18 37 -28 2 -36 0.8 0.3 27 16 -13 -32 1 -36 0.3 0.9 6 3 26 -26 2 -36 0.9 0.7 41 -22 -4 -21 1 -36 0.9 0.7 39 -3 30 21 1 -36 0.7 0.8 48 44 23 -7 1 -36 0.1 0.8 -2 -9 -23 -27 1 -36 0.3 0.6 45 1 -21 -41 1 -36 0.8 0.5 23 -10 -6 -22 1 -36 0.5 0.9 40 6 0 -27 1 -36 0.1 0.9 33 19 33 22 2 -36 0.9 0.2 28 -7 31 -43 1 -36 0.3 0.9 32 -46 -41 -50 1 -36 0.7 0.8 -3 -50 -2 -38 2 -36 0.3 0.6 46 7 30 -36 1 -36 0.8 0.9 -8 -50 49 -12 2 -36 0.1 0.8 20 9 46 5 2 -36 0.7 0.7 -12 -19 25 3 2 -36 0.5 0.6 35 2 -3 -32 1 -36 0.2 0.8 -12 -44 -9 -34 2 -36 0.4 0.2 -5 -7 48 -45 1 -36 0.7 0.2 42 -43 32 25 2 -36 0.6 0.1 36 20 50 -50 1 -36 0.2 0.3 38 3 3 -27 1 -36 0.7 0.3 15 -13 37 -37 1 -36 0.2 0.2 40 30 16 2 1 -36 0.1 0.8 46 40 30 -20 1 -36 0.2 0.5 22 1 34 7 2 -36 0.1 0.2 33 3 13 -45 1 -36 0.7 0.8 34 18 -39 -40 1 -36 0.7 0.9 40 -15 49 -29 2 -36 0.4 0.9 -2 -11 31 27 2 -36 0.7 0.6 -6 -33 36 2 2 -36 0.5 0.7 20 -7 46 -15 2 -36 0.9 0.4 25 -9 41 14 2 -36 0.9 0.9 37 -14 0 -6 1 -36 0.9 0.6 10 -22 21 -42 1 -36 0.2 0.4 31 7 -10 -15 1 -36 0.5 0.3 11 5 50 -18 1 -36 0.1 0.1 34 -20 22 -43 1 -36 0.8 0.4 22 -26 -36 -48 1 -36 0.4 0.4 13 9 42 -41 1 -36 0.5 0.2 -24 -38 49 -27 2 -36 0.1 0.5 -21 -41 34 -33 2 -36 0.4 0.6 29 -20 10 3 2 -36 0.7 0.2 3 -30 29 -14 1 -36 0.7 0.3 -28 -37 26 -33 2 -36 0.4 0.4 -10 -20 24 7 2 -36 0.6 0.1 1 -29 48 4 2 -36 0.5 0.2 -27 -40 19 -18 2 -36 0.9 0.2 -2 -11 39 3 2 -36 0.9 0.1 36 -47 -12 -39 1 -36 0.9 0.4 19 11 41 1 1 -36 0.6 0.1 36 -43 38 25 2 -36 0.1 0.5 -13 -25 -21 -23 2 -36 0.1 0.3 -4 -30 49 28 2 -36 0.2 0.1 -26 -50 -8 -24 2 -36 0.2 0.1 13 -36 49 22 2 -36 0.1 0.5 21 -17 36 19 2 -36 0.2 0.3 3 -26 40 13 2 -36 0.7 0.9 19 -21 29 16 2 -36 0.5 0.8 22 -46 29 -41 2 -36 0.1 0.7 16 14 -34 -43 1 -36 0.4 0.5 -36 -50 17 -3 2 -36 0.3 0.2 36 5 24 -23 1 -36 0.6 0.6 -8 -44 50 -24 2 -36 0.5 0.2 31 -40 -5 -33 1 -36 0.5 0.4 39 5 -3 -36 1 -36 0.8 0.5 4 -47 31 18 2 -36 0.4 0.5 21 -37 43 32 2 -36 0.5 0.2 -16 -36 40 -28 2 -36 0.4 0.4 38 37 26 -45 1 -36 0.2 0.6 16 -30 -3 -7 2 -36 0.3 0.5 -12 -41 48 8 2 -36 0.6 0.6 17 7 42 -6 2 -36 0.1 0.1 38 21 47 15 2 -36 0.3 0.5 20 -10 28 -15 2 -36 0.1 0.9 7 -11 18 -35 2 -36 0.4 0.8 45 -25 30 -41 2 -36 0.7 0.6 -32 -40 34 -4 2 -36 0.9 0.3 33 -2 26 -29 1 -36 0.3 0.5 40 -32 38 -24 2 -36 0.1 0.8 16 -10 -44 -50 1 -36 0.3 0.8 50 -40 17 14 2 -36 0.9 0.8 19 11 13 -28 1 -36 0.7 0.5 6 -14 40 -28 2 -36 0.8 0.5 -47 -48 0 -37 2 -36 0.6 0.3 25 18 -36 -43 1 -36 0.8 0.7 48 -30 46 17 2 -36 0.8 0.9 19 -19 -37 -46 1 -36 0.5 0.9 -21 -29 41 -16 2 -36 0.3 0.9 48 4 35 0 2 -36 0.6 0.3 39 -42 28 -30 1 -36 0.9 0.2 36 -15 9 -24 1 -36 0.7 0.2 8 -46 -24 -41 1 -36 0.8 0.6 16 -25 25 -8 2 -36 0.8 0.4 38 -7 25 -4 1 -36 0.5 0.3 46 -23 -1 -8 1 -36 0.5 0.5 43 -32 0 -50 1 -36 0.2 0.1 10 -46 16 -25 2 -36 0.1 0.1 -38 -48 -38 -44 2 -36 0.7 0.2 8 -2 32 -34 1 -36 0.2 0.2 31 -41 33 12 2 -36 0.8 0.1 19 -21 45 34 2 -36 0.4 0.2 -36 -43 49 -13 2 -36 0.7 0.9 12 -40 15 3 2 -36 0.9 0.5 15 12 20 19 2 -36 0.6 0.1 38 27 33 -44 1 -36 0.5 0.9 38 26 44 40 2 -36 0.5 0.5 -6 -34 34 -21 2 -36 0.8 0.5 22 -19 38 0 2 -36 0.7 0.1 -28 -42 46 17 2 -36 0.2 0.3 40 20 13 -38 1 -36 0.8 0.5 48 -27 -35 -44 1 -36 0.9 0.8 -27 -50 -19 -28 2 -36 0.6 0.9 43 15 35 7 2 -36 0.9 0.4 41 -19 -26 -39 1 -36 0.1 0.5 4 -33 44 -28 2 -36 0.2 0.5 8 -26 45 -39 2 -36 0.9 0.7 42 34 14 -14 1 -36 0.3 0.8 32 -32 -32 -38 1 -36 0.5 0.1 8 -36 38 -3 2 -36 0.7 0.8 13 -19 28 6 2 -36 0.3 0.8 -20 -40 -16 -41 2 -36 0.1 0.5 14 -8 34 -37 2 -36 0.3 0.8 7 -11 -24 -41 1 -36 0.3 0.4 19 -11 -25 -26 1 -36 0.4 0.3 -1 -16 47 46 2 -36 0.8 0.8 38 7 -13 -19 1 -36 0.3 0.2 20 4 27 -45 1 -36 0.8 0.3 0 -2 35 10 2 -36 0.4 0.5 -13 -15 27 -45 2 -36 0.4 0.6 33 -2 11 -42 1 -36 0.8 0.7 46 24 28 -45 1 -36 0.4 0.1 46 36 12 7 1 -36 0.6 0.4 -11 -29 14 -29 2 -36 0.5 0.7 29 2 34 -14 2 -36 0.3 0.7 17 -43 25 -41 2 -36 0.4 0.2 45 9 49 1 1 -36 0.2 0.3 27 23 -10 -26 1 -36 0.6 0.4 26 -26 3 -17 1 -36 0.2 0.2 33 28 20 -25 1 -36 0.2 0.4 47 -2 -7 -18 1 -36 0.2 0.6 5 -39 -11 -16 2 -36 0.2 0.7 2 -45 35 -36 2 -36 0.9 0.4 16 5 -5 -19 1 -36 0.7 0.5 7 4 16 -40 1 -36 0.9 0.7 -12 -30 23 -34 2 -36 0.2 0.3 46 37 29 -20 1 -36 0.2 0.5 40 -12 26 -24 2 -36 0.1 0.3 20 -4 30 -42 1 -36 0.7 0.1 16 -28 -5 -49 1 -36 0.8 0.6 46 9 22 -34 1 -36 0.9 0.7 47 20 -27 -28 1 -36 0.8 0.2 47 -41 27 21 1 -36 0.7 0.5 26 4 -25 -39 1 -36 0.4 0.8 -22 -39 49 30 2 -36 0.7 0.4 2 -5 27 -6 2 -36 0.6 0.1 46 -18 25 13 2 -36 0.6 0.3 8 -31 -4 -9 2 -36 0.3 0.4 48 -40 28 13 2 -36 0.3 0.4 19 -17 0 -8 2 -36 0.1 0.1 -44 -48 19 -16 2 -36 0.4 0.8 49 -37 -30 -38 1 -36 0.2 0.5 -24 -49 14 -14 2 -36 0.5 0.2 48 35 29 15 1 -36 0.3 0.9 -19 -38 13 -33 2 -36 0.7 0.2 7 -34 44 10 2 -36 0.7 0.7 44 -47 -31 -39 1 -36 0.5 0.4 40 -44 -25 -50 1 -36 0.2 0.3 50 -3 -10 -31 1 -36 0.8 0.9 -40 -49 35 -25 2 -36 0.2 0.5 39 9 30 -20 1 -36 0.5 0.7 24 3 21 -26 1 -36 0.6 0.1 5 -20 -6 -7 2 -36 0.8 0.5 23 -10 15 -16 1 -36 0.6 0.5 28 -8 -31 -40 1 -36 0.5 0.1 12 -34 4 -24 1 -36 0.7 0.1 -15 -37 -7 -36 1 -36 0.7 0.8 24 -33 29 -40 2 -36 0.1 0.3 43 25 28 7 1 -36 0.8 0.9 41 -17 23 -26 1 -36 0.5 0.8 -11 -46 15 -39 2 -36 0.9 0.5 -21 -42 34 -21 2 -36 0.3 0.1 -2 -34 2 -36 1 -36 0.2 0.2 29 -40 -10 -33 1 -36 0.5 0.2 36 23 5 -29 1 -36 0.8 0.1 18 -4 -3 -16 1 -36 0.9 0.2 43 -16 47 38 1 -36 0.1 0.3 28 -16 28 -6 2 -36 0.7 0.3 25 -35 49 4 2 -36 0.9 0.3 18 -20 30 6 2 -36 0.2 0.7 42 -13 21 17 2 -36 0.8 0.2 37 -37 29 27 2 -36 0.9 0.7 48 14 45 -41 1 -36 0.5 0.9 35 -48 20 -46 2 -37 0.5 0.5 -12 -16 45 43 2 -37 0.3 0.5 -13 -17 3 -45 2 -37 0.7 0.6 29 -7 16 -25 1 -37 0.2 0.7 43 33 26 -14 1 -37 0.1 0.7 24 -30 46 -21 2 -37 0.7 0.4 25 -28 33 -45 1 -37 0.3 0.5 36 5 43 22 2 -37 0.5 0.9 10 -14 7 -38 1 -37 0.5 0.6 16 -24 46 -4 2 -37 0.4 0.3 16 -9 22 -2 1 -37 0.5 0.2 20 -7 22 16 2 -37 0.4 0.7 -26 -32 8 -10 2 -37 0.4 0.6 -40 -47 9 -4 2 -37 0.9 0.9 -1 -4 -20 -24 1 -37 0.6 0.5 -10 -45 -20 -49 1 -37 0.4 0.3 39 28 49 -4 1 -37 0.5 0.8 47 -37 44 -23 2 -37 0.1 0.2 19 -16 25 -15 1 -37 0.1 0.2 35 8 35 9 1 -37 0.3 0.1 25 0 35 -27 1 -37 0.3 0.3 -12 -21 5 -44 2 -37 0.1 0.2 -27 -34 15 -13 2 -37 0.5 0.7 42 31 4 -17 1 -37 0.7 0.7 -10 -16 45 41 2 -37 0.5 0.7 29 20 -3 -36 1 -37 0.3 0.8 1 -18 -16 -39 1 -37 0.5 0.2 17 -40 -40 -43 1 -37 0.2 0.6 29 -22 -1 -35 1 -37 0.7 0.3 10 -28 20 -4 1 -37 0.4 0.7 23 -48 -8 -26 1 -37 0.2 0.5 13 -32 1 -46 1 -37 0.9 0.9 -26 -42 48 26 2 -37 0.6 0.3 15 -38 25 -4 2 -37 0.6 0.3 -22 -30 32 -28 2 -37 0.1 0.6 37 -16 39 -10 2 -37 0.7 0.9 22 -19 48 -11 2 -37 0.4 0.2 43 -35 29 -35 1 -37 0.4 0.4 11 -37 4 -2 2 -37 0.4 0.7 -3 -39 31 -5 2 -37 0.6 0.3 22 10 49 31 1 -37 0.5 0.6 19 -40 48 -3 2 -37 0.5 0.1 -4 -19 38 -6 2 -37 0.6 0.9 12 -7 7 -31 1 -37 0.7 0.2 -39 -48 44 -30 2 -37 0.8 0.3 49 -11 19 -30 1 -37 0.6 0.7 -11 -36 25 18 2 -37 0.8 0.1 22 -17 -8 -24 1 -37 0.3 0.5 -15 -26 22 -44 2 -37 0.7 0.2 20 -23 36 11 2 -37 0.4 0.4 14 -10 -16 -42 1 -37 0.9 0.4 28 -1 -25 -46 1 -37 0.3 0.8 9 8 27 7 2 -37 0.3 0.5 25 -38 45 -8 2 -37 0.8 0.9 10 1 4 -15 1 -37 0.8 0.6 -43 -47 -3 -42 2 -37 0.6 0.1 49 29 43 15 1 -37 0.5 0.4 42 6 42 5 1 -37 0.3 0.8 44 -40 15 13 2 -37 0.1 0.5 21 -33 36 -22 2 -37 0.4 0.6 30 6 -4 -13 1 -37 0.2 0.3 -33 -37 41 10 2 -37 0.5 0.7 41 24 16 -15 1 -37 0.5 0.3 44 2 49 -47 1 -37 0.6 0.5 34 -13 15 -48 1 -37 0.1 0.6 13 3 27 -40 1 -37 0.4 0.1 1 -45 -26 -37 1 -37 0.8 0.4 25 -27 -39 -50 1 -37 0.2 0.1 24 13 36 -39 1 -37 0.4 0.5 33 -47 9 -14 2 -37 0.9 0.2 -39 -46 42 14 2 -37 0.9 0.8 31 -5 28 20 2 -37 0.3 0.6 42 31 -37 -44 1 -37 0.7 0.8 26 -12 0 -8 1 -37 0.2 0.7 34 -12 26 -28 2 -37 0.2 0.7 -20 -25 31 14 2 -37 0.2 0.3 27 -9 19 -1 2 -37 0.4 0.2 -28 -45 -2 -48 1 -37 0.1 0.4 13 5 41 30 2 -37 0.6 0.9 46 0 19 -36 1 -37 0.8 0.1 -28 -38 21 -50 1 -37 0.2 0.6 -1 -35 -7 -35 2 -37 0.5 0.8 28 -21 21 -24 2 -37 0.5 0.7 15 -35 -45 -49 1 -37 0.5 0.2 43 -21 47 31 2 -37 0.7 0.1 10 4 -11 -39 1 -37 0.7 0.6 40 8 0 -13 1 -37 0.7 0.5 -10 -33 25 0 2 -37 0.5 0.5 -16 -45 14 -22 2 -37 0.1 0.4 25 -46 15 -37 2 -37 0.6 0.1 24 -36 3 -26 1 -37 0.1 0.8 29 15 50 -17 2 -37 0.3 0.1 3 -31 5 3 2 -37 0.3 0.6 37 -11 18 -32 1 -37 0.5 0.1 1 -11 40 -31 1 -37 0.2 0.3 33 10 19 -38 1 -37 0.6 0.2 10 -3 49 39 2 -37 0.7 0.3 -11 -34 35 -26 2 -37 0.3 0.7 34 -16 13 -23 1 -37 0.5 0.9 6 -41 29 27 2 -37 0.7 0.1 16 6 27 -30 1 -37 0.4 0.2 37 -30 34 6 2 -37 0.4 0.3 30 -12 49 -9 1 -37 0.1 0.5 41 -38 27 17 2 -37 0.2 0.2 38 -10 18 4 2 -37 0.8 0.4 37 7 -30 -44 1 -37 0.5 0.5 -6 -44 39 10 2 -37 0.8 0.4 1 -41 26 -10 2 -37 0.5 0.5 47 44 -18 -22 1 -37 0.4 0.2 0 -11 36 -46 2 -37 0.2 0.7 -3 -27 -2 -17 2 -37 0.7 0.7 -12 -13 36 -20 2 -37 0.3 0.3 49 -31 17 -21 2 -37 0.4 0.9 4 -8 -16 -20 1 -37 0.5 0.5 -6 -39 37 -4 2 -37 0.5 0.6 35 -28 -25 -40 1 -37 0.2 0.7 23 -49 16 -6 2 -37 0.8 0.5 44 -12 12 -19 1 -37 0.9 0.5 26 -9 23 15 2 -37 0.2 0.8 27 -38 50 -1 2 -37 0.6 0.2 13 -17 43 16 2 -37 0.2 0.8 29 12 -27 -39 1 -37 0.4 0.4 1 -16 45 -38 2 -37 0.6 0.6 31 -47 26 -12 1 -37 0.9 0.2 41 -12 25 6 1 -37 0.3 0.2 15 10 47 -11 1 -37 0.8 0.3 19 3 47 -1 1 -37 0.4 0.1 29 -46 33 -22 1 -37 0.2 0.4 45 -17 47 -47 1 -37 0.4 0.2 49 15 39 -7 1 -37 0.2 0.9 -2 -42 26 9 2 -37 0.8 0.6 38 16 32 26 1 -37 0.1 0.5 15 -20 47 -42 2 -37 0.1 0.7 19 -40 18 -30 2 -37 0.7 0.2 32 -20 -15 -24 1 -37 0.5 0.5 -11 -28 39 -26 2 -37 0.4 0.2 28 11 34 -26 1 -37 0.8 0.4 39 -34 9 6 2 -37 0.7 0.5 -17 -28 -10 -24 1 -37 0.4 0.5 29 -24 45 -38 2 -37 0.9 0.6 27 20 -10 -28 1 -37 0.9 0.7 -11 -44 7 -4 2 -37 0.2 0.6 41 1 22 -17 2 -37 0.2 0.2 -20 -31 -43 -48 1 -37 0.2 0.1 1 -50 5 -41 1 -37 0.2 0.7 23 -29 18 9 2 -37 0.7 0.6 43 -15 44 -14 2 -37 0.1 0.3 22 12 19 14 2 -37 0.9 0.2 50 -19 9 8 1 -37 0.7 0.1 49 5 21 3 1 -37 0.7 0.9 18 -20 2 -48 1 -37 0.9 0.2 37 -24 20 -33 1 -37 0.4 0.3 14 -25 -31 -43 1 -37 0.5 0.9 16 2 33 9 2 -37 0.9 0.1 50 -38 6 -26 1 -37 0.7 0.4 21 -19 -38 -41 1 -37 0.7 0.7 13 -27 -7 -37 1 -37 0.1 0.6 7 4 27 -28 1 -37 0.4 0.2 30 3 -10 -32 1 -37 0.3 0.6 49 -50 40 -31 2 -37 0.6 0.4 47 33 33 -47 1 -37 0.3 0.6 -10 -25 14 5 2 -37 0.6 0.2 36 -47 31 -34 1 -37 0.6 0.7 38 31 -14 -22 1 -37 0.9 0.6 7 -15 19 -9 1 -37 0.4 0.5 48 -7 -4 -44 1 -37 0.6 0.7 -2 -44 45 11 2 -37 0.9 0.4 29 -38 31 -49 1 -37 0.3 0.7 42 18 -9 -23 1 -37 0.5 0.3 42 -1 41 -19 1 -37 0.8 0.1 26 -45 30 25 2 -37 0.8 0.1 -11 -21 37 -20 2 -37 0.8 0.3 29 -48 50 18 2 -37 0.6 0.6 4 -6 17 -24 1 -37 0.6 0.9 -23 -32 -7 -12 1 -37 0.9 0.1 -4 -36 -11 -40 1 -37 0.9 0.6 32 -40 23 -11 1 -37 0.5 0.7 39 -7 -3 -36 1 -37 0.3 0.7 48 17 43 -7 2 -37 0.9 0.2 33 -36 16 -24 1 -37 0.9 0.1 8 -17 44 4 2 -37 0.2 0.8 29 -4 8 -3 2 -37 0.7 0.1 37 28 23 -27 1 -37 0.8 0.8 24 20 -6 -23 1 -37 0.9 0.5 3 -45 46 -29 2 -37 0.3 0.1 9 -29 -1 -26 2 -37 0.7 0.2 49 47 29 10 1 -37 0.4 0.7 49 -41 8 -45 1 -37 0.1 0.8 -22 -34 38 29 2 -37 0.7 0.9 -16 -37 38 -4 2 -37 0.8 0.3 18 1 40 -37 1 -37 0.8 0.8 21 16 39 -27 1 -37 0.1 0.1 30 9 48 47 2 -37 0.2 0.3 26 1 28 19 2 -37 0.4 0.2 -15 -38 -30 -45 1 -37 0.4 0.4 35 -40 -39 -42 1 -37 0.1 0.2 36 8 -10 -46 1 -37 0.8 0.8 10 3 32 -25 1 -37 0.8 0.2 25 12 38 -18 1 -37 0.6 0.4 42 7 37 -15 1 -37 0.2 0.5 50 31 44 -23 1 -37 0.9 0.3 42 36 38 -20 1 -37 0.5 0.4 -20 -46 -18 -23 2 -37 0.9 0.9 8 2 29 -28 2 -37 0.5 0.5 29 -25 7 -9 1 -37 0.8 0.1 48 4 50 5 1 -37 0.7 0.1 15 -43 -35 -36 1 -37 0.7 0.5 28 7 39 17 1 -37 0.3 0.2 32 -47 46 -7 2 -37 0.8 0.7 47 -45 14 -47 1 -37 0.5 0.8 -40 -46 -33 -46 1 -37 0.3 0.5 -12 -21 2 -50 2 -37 0.7 0.1 30 -47 17 -48 1 -37 0.3 0.1 -46 -47 25 20 2 -37 0.9 0.8 47 -15 -10 -49 1 -37 0.1 0.8 48 -46 50 46 2 -37 0.4 0.6 -11 -21 -9 -30 2 -37 0.3 0.9 -35 -43 -6 -17 2 -37 0.9 0.5 -45 -47 -4 -11 2 -37 0.9 0.7 -11 -44 7 -3 1 -37 0.4 0.7 20 4 26 9 2 -37 0.3 0.8 -4 -45 17 -49 2 -37 0.4 0.8 27 -21 -20 -30 1 -37 0.9 0.2 37 -32 7 -3 1 -37 0.3 0.2 45 32 38 -23 1 -37 0.2 0.2 -2 -31 -10 -17 1 -37 0.9 0.6 16 -30 36 3 2 -37 0.4 0.7 -12 -42 25 -36 2 -37 0.4 0.1 -27 -32 26 -37 2 -37 0.6 0.2 17 12 41 -45 1 -37 0.4 0.9 48 -25 8 -40 2 -37 0.7 0.3 12 -11 15 -2 2 -37 0.3 0.9 21 -23 24 -43 2 -37 0.2 0.2 50 24 34 -38 1 -37 0.2 0.9 8 -6 5 -21 2 -37 0.2 0.4 1 -26 46 -47 2 -37 0.9 0.5 -25 -47 37 -6 2 -37 0.8 0.9 13 -8 -1 -9 1 -37 0.4 0.2 42 10 50 21 1 -37 0.8 0.3 -7 -15 -1 -21 2 -37 0.8 0.5 32 7 48 39 2 -37 0.8 0.9 23 20 27 10 2 -37 0.1 0.3 28 -1 48 -2 2 -37 0.5 0.1 49 19 11 -33 1 -37 0.9 0.6 32 -2 28 -33 1 -37 0.5 0.5 30 -7 -30 -41 1 -37 0.1 0.2 36 21 44 15 1 -37 0.7 0.4 23 2 5 -13 1 -37 0.7 0.4 5 -5 35 32 2 -37 0.4 0.6 -26 -31 -8 -50 2 -37 0.4 0.2 34 28 -17 -36 1 -37 0.8 0.9 44 -17 50 -30 2 -37 0.6 0.8 31 -33 -3 -14 1 -37 0.9 0.1 -10 -48 12 -27 1 -37 0.8 0.3 8 -4 32 -14 1 -37 0.7 0.5 -2 -33 6 -22 2 -37 0.4 0.5 46 45 7 -50 1 -37 0.5 0.8 9 -37 25 -25 2 -37 0.5 0.3 32 8 32 19 2 -37 0.5 0.8 43 41 48 21 2 -37 0.3 0.9 30 -34 40 -23 2 -37 0.4 0.5 24 -32 -40 -45 1 -37 0.6 0.4 -17 -33 -22 -49 1 -37 0.6 0.8 46 26 8 -28 1 -37 0.9 0.3 -10 -30 12 9 2 -37 0.2 0.4 -1 -21 26 -29 1 -37 0.9 0.7 27 4 5 -12 1 -37 0.3 0.4 -21 -35 9 -47 2 -37 0.2 0.3 -5 -9 22 -1 2 -37 0.6 0.9 50 -47 -25 -37 1 -37 0.2 0.9 -2 -4 28 24 2 -37 0.9 0.9 7 -34 5 -15 1 -37 0.1 0.7 35 15 50 12 2 -37 0.5 0.1 49 -35 -15 -27 1 -37 0.2 0.2 27 -24 0 -40 1 -37 0.7 0.4 26 -33 -8 -49 1 -37 0.3 0.3 9 7 7 -27 1 -37 0.6 0.9 17 -28 32 22 2 -37 0.6 0.7 16 -26 -23 -35 1 -37 0.7 0.1 13 -18 45 -38 2 -37 0.5 0.8 26 8 18 -33 1 -37 0.3 0.7 -35 -40 -1 -11 2 -37 0.6 0.3 -35 -39 22 -20 2 -37 0.7 0.6 -26 -28 24 -43 2 -37 0.7 0.2 23 -50 21 -14 1 -37 0.6 0.3 5 -37 50 -15 2 -37 0.3 0.5 -10 -48 44 26 2 -37 0.9 0.5 7 -34 3 -43 1 -37 0.5 0.5 26 -13 27 9 2 -37 0.4 0.4 -40 -46 31 0 2 -37 0.7 0.2 24 15 11 -30 1 -37 0.3 0.9 23 -46 19 -17 2 -37 0.5 0.4 24 3 21 -43 1 -37 0.5 0.2 33 30 40 29 2 -37 0.9 0.4 -2 -9 24 -1 2 -37 0.9 0.2 41 30 41 -13 1 -37 0.2 0.8 20 -23 22 -25 2 -37 0.3 0.7 27 3 28 9 2 -37 0.4 0.1 50 3 39 5 1 -37 0.3 0.1 22 -29 20 6 2 -37 0.9 0.3 46 26 -31 -39 1 -38 0.2 0.1 -9 -30 -14 -40 1 -38 0.5 0.7 38 -12 11 -31 1 -38 0.5 0.2 42 -20 14 -6 1 -38 0.5 0.9 46 16 23 -10 1 -38 0.1 0.7 35 -25 48 -49 2 -38 0.6 0.2 29 -29 30 -20 1 -38 0.3 0.2 18 16 -10 -41 1 -38 0.1 0.3 26 22 -17 -46 1 -38 0.2 0.7 43 -12 49 38 2 -38 0.8 0.6 49 -27 12 -14 1 -38 0.2 0.2 41 -41 45 -13 2 -38 0.4 0.1 29 8 -41 -49 1 -38 0.1 0.5 8 -6 -2 -4 2 -38 0.5 0.4 49 42 -10 -36 1 -38 0.6 0.5 9 -10 -11 -37 1 -38 0.2 0.8 26 24 16 -46 1 -38 0.6 0.4 49 -20 27 -41 1 -38 0.6 0.3 -1 -31 -20 -33 1 -38 0.8 0.2 -24 -34 39 -23 2 -38 0.5 0.3 4 -1 -11 -47 1 -38 0.7 0.9 5 -49 50 29 2 -38 0.3 0.6 47 -4 27 5 2 -38 0.8 0.3 33 -50 9 -38 1 -38 0.4 0.5 45 -23 -18 -41 1 -38 0.3 0.2 -17 -18 23 -8 1 -38 0.6 0.9 46 -28 26 -41 2 -38 0.5 0.6 31 -36 25 -9 2 -38 0.8 0.2 26 3 31 -32 1 -38 0.2 0.9 1 -48 38 10 2 -38 0.5 0.5 -6 -11 16 8 2 -38 0.3 0.1 -22 -34 30 -27 2 -38 0.6 0.1 35 1 41 -12 1 -38 0.4 0.9 9 -14 -13 -49 1 -38 0.8 0.1 30 0 46 -4 1 -38 0.3 0.3 -4 -26 35 13 2 -38 0.6 0.7 26 -16 44 36 2 -38 0.5 0.3 12 -50 -4 -27 1 -38 0.3 0.2 5 -6 48 -12 2 -38 0.7 0.5 -10 -40 36 -32 2 -38 0.3 0.7 48 27 43 -5 1 -38 0.1 0.1 9 -45 24 -24 2 -38 0.6 0.8 -20 -27 -26 -39 1 -38 0.1 0.1 -11 -23 2 -13 1 -38 0.7 0.8 -13 -39 45 36 2 -38 0.1 0.2 50 -15 -19 -33 1 -38 0.8 0.1 41 -47 -13 -39 1 -38 0.9 0.6 22 -23 13 -12 1 -38 0.9 0.1 -24 -42 40 -33 2 -38 0.6 0.9 19 -43 25 13 2 -38 0.7 0.8 41 -46 10 4 1 -38 0.9 0.1 18 -1 0 -26 1 -38 0.2 0.7 49 35 -9 -22 1 -38 0.8 0.6 15 3 42 -28 2 -38 0.5 0.1 -25 -36 18 -35 2 -38 0.9 0.7 17 -33 -17 -43 1 -38 0.5 0.4 -2 -17 39 -24 2 -38 0.3 0.4 -7 -26 -17 -24 1 -38 0.5 0.5 50 19 50 49 2 -38 0.2 0.5 -16 -23 34 -23 2 -38 0.3 0.1 23 -32 26 2 2 -38 0.3 0.5 35 17 37 -16 1 -38 0.7 0.8 26 21 3 -11 1 -38 0.4 0.7 -20 -39 42 31 2 -38 0.5 0.4 -5 -45 34 -44 2 -38 0.1 0.3 23 -16 47 17 2 -38 0.6 0.6 16 -34 17 -3 2 -38 0.2 0.2 23 -42 49 36 2 -38 0.6 0.8 -14 -19 21 -48 2 -38 0.7 0.3 39 31 -19 -37 1 -38 0.9 0.8 30 19 8 -26 1 -38 0.3 0.5 -24 -42 41 -33 2 -38 0.9 0.4 -37 -39 46 -15 2 -38 0.9 0.3 -13 -30 -7 -21 1 -38 0.3 0.3 50 1 23 -43 1 -38 0.1 0.8 11 -17 41 -2 2 -38 0.1 0.5 -10 -18 23 -43 2 -38 0.5 0.1 -45 -46 42 -20 1 -38 0.5 0.6 30 -33 11 -3 2 -38 0.1 0.2 35 26 -27 -35 1 -38 0.3 0.2 41 -8 -5 -50 1 -38 0.3 0.9 -34 -35 22 -12 2 -38 0.3 0.6 -12 -29 -26 -36 2 -38 0.8 0.3 -34 -37 8 -48 1 -38 0.4 0.8 -1 -38 45 43 2 -38 0.7 0.7 36 19 -25 -45 1 -38 0.5 0.8 33 -39 35 20 2 -38 0.1 0.5 -18 -39 29 10 2 -38 0.6 0.6 -20 -50 42 -39 2 -38 0.7 0.7 -4 -37 43 -45 2 -38 0.5 0.7 8 -35 -22 -43 1 -38 0.1 0.6 -8 -48 0 -8 2 -38 0.9 0.6 12 -21 50 -47 2 -38 0.9 0.7 38 0 40 18 1 -38 0.7 0.8 30 17 19 -1 1 -38 0.3 0.6 -32 -50 -24 -32 2 -38 0.1 0.9 22 -35 14 -9 2 -38 0.7 0.3 2 -23 29 -8 2 -38 0.3 0.3 -13 -41 43 30 2 -38 0.5 0.2 31 -44 3 -4 1 -38 0.9 0.8 36 -5 -12 -35 1 -38 0.4 0.7 19 -14 -25 -38 1 -38 0.5 0.2 40 -38 50 28 2 -38 0.5 0.8 16 -19 21 -9 2 -38 0.4 0.6 18 -45 -8 -37 1 -38 0.4 0.5 10 -15 33 -25 2 -38 0.2 0.2 21 14 6 -36 1 -38 0.5 0.1 35 12 26 -1 1 -38 0.3 0.1 34 -6 30 3 2 -38 0.8 0.5 39 3 17 -21 1 -38 0.1 0.8 31 -25 48 35 2 -38 0.4 0.4 -9 -15 -8 -49 1 -38 0.9 0.2 40 -4 -1 -23 1 -38 0.7 0.7 27 -21 34 -5 2 -38 0.1 0.3 40 35 4 -35 1 -38 0.7 0.3 24 -22 48 30 2 -38 0.8 0.8 -24 -34 -4 -38 1 -38 0.8 0.7 16 -18 -21 -45 1 -38 0.7 0.3 40 -48 45 -19 1 -38 0.6 0.4 -12 -13 -35 -36 1 -38 0.3 0.4 39 -42 4 0 1 -38 0.9 0.5 -1 -49 -18 -26 1 -38 0.7 0.7 48 -47 36 27 2 -38 0.6 0.9 6 -47 34 -35 2 -38 0.3 0.9 41 18 1 -43 1 -38 0.6 0.1 16 -38 -34 -42 1 -38 0.5 0.9 47 -6 18 -32 2 -38 0.4 0.7 9 1 -12 -38 1 -38 0.9 0.1 40 26 -1 -4 1 -38 0.5 0.1 44 -23 16 -7 1 -38 0.9 0.7 32 -2 -39 -41 1 -38 0.3 0.4 32 22 41 -29 1 -38 0.1 0.4 42 -33 21 -8 2 -38 0.4 0.8 46 -16 27 -22 1 -38 0.8 0.7 -2 -29 21 16 2 -38 0.7 0.2 0 -46 13 7 2 -38 0.9 0.2 50 20 11 -7 1 -38 0.6 0.6 43 -21 41 30 2 -38 0.8 0.1 5 -13 6 -41 1 -38 0.5 0.1 42 -17 -5 -10 1 -38 0.2 0.8 50 13 34 -50 1 -38 0.7 0.1 -7 -47 46 22 2 -38 0.8 0.6 -21 -47 19 5 2 -38 0.4 0.8 13 -14 6 0 2 -38 0.2 0.6 44 18 11 -27 1 -38 0.2 0.7 -28 -41 24 13 2 -38 0.6 0.9 12 -4 -8 -25 1 -38 0.9 0.6 46 -30 5 -4 1 -38 0.2 0.7 6 -2 10 -27 2 -38 0.9 0.5 -8 -35 50 44 2 -38 0.2 0.4 1 -34 22 11 2 -38 0.4 0.7 27 -49 -13 -26 1 -38 0.5 0.5 29 -4 31 -32 1 -38 0.6 0.1 36 25 2 -23 1 -38 0.6 0.7 48 42 36 5 1 -38 0.9 0.8 12 7 -26 -35 1 -38 0.3 0.7 38 10 -5 -43 1 -38 0.3 0.7 -11 -41 41 26 2 -38 0.4 0.6 49 47 30 -40 1 -38 0.6 0.2 -31 -47 42 -48 2 -38 0.2 0.8 42 24 41 4 2 -38 0.2 0.7 43 -4 -10 -40 1 -38 0.6 0.4 -2 -29 5 -40 1 -38 0.6 0.9 11 7 47 -18 2 -38 0.9 0.9 50 -10 24 -34 1 -38 0.7 0.3 49 -7 35 -14 1 -38 0.4 0.3 50 38 7 -16 1 -38 0.2 0.2 -3 -37 39 17 2 -38 0.7 0.2 -24 -41 33 -11 2 -38 0.7 0.6 -12 -23 -18 -46 2 -38 0.9 0.4 45 10 35 -41 1 -38 0.8 0.9 -14 -37 44 -50 2 -38 0.6 0.9 46 -42 16 -1 1 -38 0.4 0.1 6 -28 2 -13 1 -38 0.6 0.3 7 -48 19 -25 2 -38 0.3 0.3 41 18 -14 -25 1 -38 0.5 0.7 -16 -35 41 -30 2 -38 0.8 0.7 27 11 9 7 1 -38 0.7 0.9 24 -19 29 -46 2 -38 0.1 0.1 18 8 0 -28 1 -38 0.7 0.9 -19 -42 -19 -38 2 -38 0.8 0.3 22 -2 -5 -48 1 -38 0.9 0.1 -30 -45 47 -6 2 -38 0.7 0.4 28 24 -3 -12 1 -38 0.9 0.8 26 -2 -28 -29 1 -38 0.3 0.6 41 24 17 -2 1 -38 0.5 0.9 36 -46 17 -46 2 -38 0.3 0.2 12 -47 23 19 2 -38 0.4 0.2 37 -50 49 19 2 -38 0.6 0.7 -6 -9 24 6 2 -38 0.9 0.1 32 -15 -17 -20 1 -38 0.6 0.7 49 16 -9 -37 1 -38 0.7 0.5 -4 -31 50 -25 2 -38 0.4 0.6 43 10 42 40 2 -38 0.9 0.2 -1 -25 44 -36 2 -38 0.1 0.5 -12 -37 0 -8 2 -38 0.8 0.1 41 -48 23 7 1 -38 0.5 0.2 37 -7 -32 -34 1 -38 0.9 0.8 -12 -41 25 12 2 -38 0.4 0.2 21 0 33 -24 1 -38 0.1 0.4 -33 -46 24 -41 2 -38 0.1 0.5 27 26 13 -36 1 -38 0.6 0.5 32 -15 46 -22 1 -38 0.3 0.9 36 -40 26 6 2 -38 0.6 0.5 0 -20 27 -39 2 -38 0.4 0.6 -8 -37 -5 -21 1 -38 0.9 0.8 -3 -47 47 -41 2 -38 0.8 0.9 43 10 29 -19 1 -38 0.7 0.1 40 -8 7 -47 1 -38 0.4 0.3 18 7 44 27 2 -38 0.6 0.2 50 -40 50 34 2 -38 0.4 0.9 22 -8 40 39 2 -38 0.4 0.7 24 4 21 -43 1 -38 0.9 0.3 28 -3 -12 -30 1 -38 0.6 0.1 45 -7 43 -5 1 -38 0.3 0.7 36 -21 -29 -41 1 -38 0.6 0.2 -15 -44 7 -5 2 -38 0.6 0.8 -11 -13 20 -17 2 -38 0.9 0.8 28 -16 -12 -27 1 -38 0.3 0.5 31 26 15 -48 1 -38 0.1 0.1 35 -47 46 -48 1 -38 0.7 0.9 7 -26 2 -43 2 -38 0.2 0.1 4 -45 -20 -37 1 -38 0.3 0.6 -35 -46 32 5 2 -38 0.5 0.1 -37 -41 -20 -40 2 -38 0.2 0.1 26 -36 3 -6 2 -38 0.8 0.6 -13 -19 -15 -34 2 -38 0.7 0.2 -7 -36 15 -38 1 -38 0.2 0.8 -4 -29 41 -13 2 -38 0.8 0.1 16 -15 44 29 2 -38 0.5 0.7 46 7 -15 -19 1 -38 0.5 0.1 30 29 29 9 1 -38 0.2 0.5 44 28 -24 -36 1 -38 0.1 0.5 38 10 18 -49 1 -38 0.9 0.2 -44 -50 41 12 2 -38 0.9 0.7 0 -34 -9 -49 1 -38 0.5 0.7 3 -31 31 -5 2 -38 0.2 0.5 0 -35 37 -12 2 -38 0.4 0.4 49 -20 -20 -46 1 -38 0.1 0.6 -9 -27 -43 -50 2 -38 0.7 0.8 27 -35 35 2 2 -38 0.6 0.4 -48 -49 -11 -14 2 -38 0.2 0.2 33 -9 19 6 2 -38 0.6 0.3 48 33 29 8 1 -38 0.9 0.5 -34 -39 39 3 2 -38 0.6 0.1 45 -7 0 -43 1 -38 0.3 0.2 24 16 -10 -48 1 -38 0.1 0.3 -21 -38 19 -35 2 -38 0.4 0.1 37 -23 23 -29 1 -38 0.8 0.2 -16 -39 49 8 2 -38 0.9 0.2 47 -31 47 40 2 -38 0.1 0.5 -8 -46 41 -45 2 -38 0.1 0.8 46 -4 47 10 2 -38 0.3 0.3 -13 -30 -37 -44 1 -38 0.1 0.8 49 35 -25 -39 1 -38 0.5 0.9 32 31 11 -12 1 -38 0.6 0.4 23 -3 -5 -17 1 -38 0.7 0.6 35 18 1 -29 1 -38 0.9 0.8 6 -22 12 -40 2 -38 0.8 0.3 18 -38 33 -23 1 -38 0.6 0.4 48 -43 0 -2 1 -38 0.6 0.3 -10 -22 40 25 2 -38 0.2 0.5 -3 -48 50 -3 2 -38 0.4 0.3 27 2 26 6 2 -38 0.6 0.4 10 -26 45 39 2 -38 0.1 0.3 46 -29 34 8 2 -38 0.8 0.2 42 19 13 -50 1 -38 0.5 0.3 25 -17 -22 -40 1 -38 0.6 0.1 24 -17 15 -18 1 -38 0.9 0.6 19 -11 26 -27 1 -38 0.5 0.9 -2 -25 22 8 2 -38 0.6 0.9 35 16 20 -42 1 -38 0.9 0.1 20 -12 -9 -28 1 -38 0.8 0.1 -25 -31 29 -31 1 -38 0.4 0.2 15 -42 2 -19 1 -38 0.3 0.5 -31 -48 -2 -30 2 -38 0.2 0.7 -30 -44 19 18 2 -38 0.9 0.2 3 -17 33 -42 1 -38 0.7 0.8 37 -43 -26 -28 1 -38 0.6 0.3 23 -44 8 -6 1 -38 0.4 0.4 -11 -36 42 -42 2 -38 0.6 0.5 36 24 20 -38 1 -38 0.2 0.4 4 -5 17 -45 1 -38 0.2 0.6 29 -33 48 -12 2 -38 0.3 0.3 -21 -34 -10 -13 2 -38 0.1 0.4 48 -27 31 -30 2 -38 0.9 0.7 21 10 -8 -16 1 -38 0.4 0.8 23 11 1 -11 1 -38 0.6 0.7 -10 -23 28 -1 2 -38 0.8 0.2 -8 -9 -6 -25 1 -38 0.8 0.1 6 -21 -10 -21 1 -38 0.6 0.1 9 -35 7 -36 1 -38 0.4 0.7 12 -19 32 18 2 -38 0.4 0.6 -9 -36 0 -32 2 -38 0.6 0.6 12 -45 7 -31 2 -38 0.8 0.5 -17 -20 -5 -7 1 -38 0.3 0.3 11 -32 29 -49 1 -38 0.1 0.2 -44 -47 11 -39 2 -38 0.4 0.5 45 39 44 -16 1 -38 0.3 0.8 32 -50 48 13 2 -38 0.6 0.8 7 -24 48 -3 2 -39 0.2 0.1 48 24 7 -47 1 -39 0.7 0.8 44 -29 17 -50 1 -39 0.4 0.6 14 6 13 -42 1 -39 0.3 0.7 38 -27 -7 -39 2 -39 0.5 0.8 48 20 46 33 1 -39 0.4 0.3 32 -36 3 -39 1 -39 0.7 0.7 -22 -44 50 -26 2 -39 0.6 0.5 42 3 43 15 2 -39 0.4 0.1 13 -33 41 -47 2 -39 0.6 0.1 27 -23 28 11 2 -39 0.6 0.1 -13 -48 13 -8 1 -39 0.2 0.2 36 -33 3 -33 2 -39 0.4 0.4 21 -13 16 8 2 -39 0.3 0.2 1 -23 -27 -37 1 -39 0.4 0.7 -18 -28 -5 -23 1 -39 0.5 0.5 47 -20 -9 -22 1 -39 0.6 0.2 12 0 30 -47 1 -39 0.5 0.1 21 -5 40 -2 1 -39 0.6 0.8 13 -2 -23 -42 1 -39 0.3 0.6 42 31 12 -38 1 -39 0.6 0.8 -11 -29 19 -41 2 -39 0.3 0.7 6 -21 13 -31 2 -39 0.4 0.6 13 -30 29 17 2 -39 0.1 0.4 -44 -45 44 20 2 -39 0.6 0.7 24 -14 36 31 2 -39 0.5 0.7 -10 -18 25 -42 2 -39 0.2 0.9 23 -15 12 -7 2 -39 0.5 0.4 -5 -43 -16 -26 2 -39 0.9 0.4 19 9 24 -19 1 -39 0.5 0.7 -3 -46 48 16 2 -39 0.9 0.1 31 22 25 -1 1 -39 0.9 0.7 -39 -49 32 4 2 -39 0.3 0.2 25 13 -3 -14 1 -39 0.1 0.5 2 0 41 -27 2 -39 0.3 0.9 38 32 -6 -39 1 -39 0.1 0.3 -17 -19 30 -7 1 -39 0.4 0.1 -41 -46 -1 -31 2 -39 0.5 0.8 17 -30 3 -5 2 -39 0.7 0.2 -9 -25 49 15 2 -39 0.7 0.3 6 -9 47 -36 1 -39 0.7 0.2 45 -45 6 -21 1 -39 0.9 0.7 30 8 47 -43 1 -39 0.7 0.5 45 9 13 12 1 -39 0.8 0.5 29 4 50 -39 1 -39 0.4 0.8 18 -14 17 5 1 -39 0.1 0.3 -27 -45 18 -34 1 -39 0.7 0.1 6 2 17 -22 1 -39 0.2 0.5 31 28 48 9 1 -39 0.7 0.6 32 -22 15 -25 2 -39 0.3 0.9 -15 -37 27 19 2 -39 0.2 0.9 -17 -29 48 -11 2 -39 0.7 0.1 27 -26 28 -48 2 -39 0.8 0.1 -22 -23 16 -45 2 -39 0.5 0.8 50 38 49 -34 1 -39 0.1 0.8 -12 -24 33 -21 2 -39 0.6 0.8 15 -12 15 -45 1 -39 0.7 0.4 -4 -42 -23 -25 2 -39 0.5 0.4 23 -37 39 -4 2 -39 0.1 0.6 17 14 38 -7 2 -39 0.8 0.1 0 -4 29 11 2 -39 0.3 0.8 43 -38 -31 -42 1 -39 0.3 0.7 3 -22 46 10 2 -39 0.8 0.7 15 -8 28 -43 1 -39 0.7 0.1 48 -48 50 -11 1 -39 0.2 0.6 20 -15 12 -40 2 -39 0.8 0.1 42 41 -13 -41 1 -39 0.9 0.2 16 -15 -17 -24 2 -39 0.7 0.5 41 -19 44 -20 1 -39 0.7 0.1 -5 -49 40 24 2 -39 0.1 0.9 28 27 0 -2 1 -39 0.3 0.4 2 -47 35 -39 1 -39 0.3 0.7 -27 -50 -20 -22 1 -39 0.3 0.3 20 14 49 12 2 -39 0.1 0.8 24 -19 25 -17 2 -39 0.7 0.9 38 -30 28 -14 2 -39 0.9 0.5 14 -11 11 -1 1 -39 0.7 0.1 -25 -43 -23 -37 2 -39 0.8 0.3 13 10 -14 -18 1 -39 0.7 0.8 26 15 -33 -42 1 -39 0.4 0.9 46 41 45 -22 1 -39 0.8 0.1 25 -49 36 -42 1 -39 0.9 0.3 -4 -44 -3 -48 2 -39 0.9 0.1 40 -29 10 3 1 -39 0.4 0.1 20 -49 4 -16 1 -39 0.9 0.9 1 -36 29 -6 2 -39 0.1 0.6 22 -14 44 -7 2 -39 0.6 0.4 4 -21 11 -37 2 -39 0.9 0.2 36 -12 -16 -21 1 -39 0.6 0.4 -36 -50 11 -17 2 -39 0.5 0.7 24 -7 25 -24 2 -39 0.7 0.3 -12 -37 39 -17 2 -39 0.2 0.9 2 -7 45 -13 2 -39 0.8 0.8 38 -18 33 -32 2 -39 0.7 0.4 -8 -48 -2 -11 2 -39 0.8 0.5 17 3 25 5 1 -39 0.2 0.8 33 5 37 -41 1 -39 0.7 0.6 19 5 14 -45 1 -39 0.4 0.3 50 -42 -19 -26 1 -39 0.9 0.4 -25 -28 30 -16 2 -39 0.3 0.1 4 -8 46 42 2 -39 0.8 0.9 46 44 38 36 2 -39 0.4 0.1 28 -19 25 -35 1 -39 0.5 0.6 50 30 -42 -48 1 -39 0.1 0.6 15 -3 2 -37 1 -39 0.5 0.9 43 -6 -24 -42 2 -39 0.2 0.5 29 -44 19 -6 2 -39 0.4 0.5 43 -35 -18 -34 1 -39 0.4 0.6 12 -18 -23 -42 1 -39 0.9 0.7 38 -11 -11 -47 1 -39 0.8 0.2 47 36 42 -34 1 -39 0.4 0.7 31 -50 9 6 2 -39 0.5 0.4 14 1 19 16 2 -39 0.2 0.8 -15 -37 39 -24 2 -39 0.9 0.4 1 -3 42 -3 1 -39 0.8 0.2 15 -20 -6 -19 1 -39 0.2 0.5 22 4 24 -17 2 -39 0.5 0.1 35 -45 -24 -33 1 -39 0.2 0.4 31 -27 41 -5 2 -39 0.2 0.8 50 -18 0 -11 2 -39 0.8 0.3 37 26 27 -15 1 -39 0.8 0.7 35 -15 50 42 2 -39 0.6 0.9 47 -29 42 -16 2 -39 0.1 0.8 -30 -38 -8 -32 1 -39 0.6 0.4 -1 -45 -1 -22 1 -39 0.9 0.5 31 -13 45 -8 1 -39 0.3 0.3 -3 -27 -12 -40 2 -39 0.9 0.4 -18 -31 35 -11 2 -39 0.8 0.3 36 6 -36 -49 1 -39 0.2 0.1 5 -40 -16 -29 1 -39 0.5 0.3 26 -9 37 -10 2 -39 0.9 0.2 9 -29 22 18 2 -39 0.5 0.8 -27 -49 6 -22 1 -39 0.6 0.2 -8 -50 -16 -50 1 -39 0.9 0.8 -19 -33 -1 -4 2 -39 0.2 0.1 3 -4 48 -38 2 -39 0.5 0.1 42 36 40 -43 1 -39 0.4 0.7 -13 -48 16 11 2 -39 0.9 0.9 11 -21 42 -4 2 -39 0.7 0.2 9 -39 33 -33 1 -39 0.7 0.9 21 4 8 0 2 -39 0.4 0.8 -32 -42 45 -17 2 -39 0.4 0.6 34 -20 30 -44 2 -39 0.5 0.8 29 -24 48 23 2 -39 0.3 0.8 27 6 -8 -12 1 -39 0.7 0.7 -16 -29 -34 -49 1 -39 0.3 0.8 38 -34 -17 -24 1 -39 0.5 0.1 -39 -45 48 -23 2 -39 0.5 0.7 -13 -24 -21 -34 2 -39 0.6 0.3 3 1 21 -34 2 -39 0.8 0.2 4 -8 -3 -16 1 -39 0.8 0.1 48 -12 21 -50 2 -39 0.5 0.2 -9 -25 50 -6 1 -39 0.9 0.8 -24 -36 21 -28 2 -39 0.5 0.9 3 -50 47 -1 2 -39 0.8 0.9 50 -31 14 -33 1 -39 0.6 0.4 -17 -27 49 -21 2 -39 0.7 0.9 -33 -48 -6 -43 2 -39 0.8 0.9 46 -37 -28 -50 1 -39 0.2 0.1 9 0 30 3 2 -39 0.2 0.7 3 -50 50 37 2 -39 0.6 0.9 43 10 1 -21 1 -39 0.2 0.7 -11 -14 35 -45 2 -39 0.3 0.7 -7 -34 36 23 2 -39 0.3 0.8 13 -14 42 -39 2 -39 0.4 0.1 -24 -37 29 -28 1 -39 0.8 0.8 -29 -30 22 11 2 -39 0.8 0.4 31 -36 49 25 2 -39 0.8 0.6 35 22 -1 -12 1 -39 0.3 0.9 33 -23 -25 -37 2 -39 0.9 0.8 37 -14 6 -15 1 -39 0.9 0.2 -12 -47 33 -17 2 -39 0.4 0.2 35 -25 47 2 1 -39 0.4 0.4 -19 -36 17 -18 2 -39 0.1 0.2 30 -45 30 17 2 -39 0.1 0.5 45 29 18 -48 2 -39 0.6 0.1 31 -23 39 -40 1 -39 0.2 0.1 30 27 11 -22 1 -39 0.8 0.1 -1 -33 32 20 2 -39 0.6 0.7 2 -39 38 -5 2 -39 0.3 0.9 -1 -32 10 8 2 -39 0.5 0.2 30 -47 43 -40 1 -39 0.8 0.9 -26 -44 -3 -41 2 -39 0.1 0.3 49 1 41 -27 1 -39 0.1 0.8 1 -41 40 22 2 -39 0.5 0.1 16 -8 19 -11 1 -39 0.1 0.2 4 -12 -21 -24 2 -39 0.4 0.4 50 -21 -3 -42 1 -39 0.6 0.7 6 -16 32 -38 2 -39 0.3 0.3 29 -3 44 1 2 -39 0.5 0.6 32 -30 37 -16 2 -39 0.5 0.9 28 12 14 -20 2 -39 0.3 0.6 27 13 50 -40 1 -39 0.6 0.1 22 6 40 -15 1 -39 0.2 0.8 0 -49 45 -18 2 -39 0.9 0.2 50 -24 48 20 2 -39 0.9 0.5 35 -6 48 -44 1 -39 0.6 0.3 48 22 40 -4 1 -39 0.9 0.7 -40 -42 -22 -31 2 -39 0.3 0.7 45 35 7 -27 2 -39 0.8 0.5 14 -6 12 -20 1 -39 0.5 0.3 5 -26 50 25 2 -39 0.2 0.8 -19 -40 29 9 2 -39 0.4 0.3 40 8 -16 -42 1 -39 0.8 0.9 -25 -40 -19 -38 2 -39 0.3 0.7 50 19 29 -17 1 -39 0.1 0.7 22 8 6 -13 1 -39 0.9 0.4 -21 -33 42 3 2 -39 0.1 0.3 26 -33 13 -31 2 -39 0.4 0.4 42 -47 14 -9 1 -39 0.3 0.2 0 -12 2 -7 2 -39 0.3 0.9 -3 -26 41 11 2 -39 0.1 0.6 23 1 31 22 2 -39 0.9 0.1 -11 -33 44 20 2 -39 0.4 0.9 47 32 -2 -8 1 -39 0.2 0.8 44 37 26 -30 2 -39 0.7 0.3 37 -13 30 11 2 -39 0.8 0.8 25 -22 27 -9 1 -39 0.5 0.4 48 -10 24 17 1 -39 0.8 0.8 31 -23 -8 -26 1 -39 0.7 0.4 24 -47 36 35 2 -39 0.3 0.6 39 5 1 -27 1 -39 0.8 0.9 27 4 45 29 2 -39 0.5 0.8 31 6 33 -18 1 -39 0.6 0.4 40 -16 23 -25 1 -39 0.1 0.2 22 -40 27 -34 2 -39 0.4 0.8 49 9 -6 -20 1 -39 0.2 0.7 -10 -20 -7 -38 2 -39 0.8 0.3 -9 -13 50 -4 2 -39 0.6 0.7 -25 -28 -6 -45 1 -39 0.5 0.3 -20 -26 -5 -31 2 -39 0.8 0.4 25 8 -30 -33 1 -39 0.7 0.7 30 -2 26 -18 2 -39 0.7 0.3 27 21 -15 -37 1 -39 0.6 0.1 40 -2 -10 -15 1 -39 0.6 0.4 7 -34 43 -10 2 -39 0.5 0.1 47 -21 44 18 1 -39 0.2 0.9 20 0 18 11 2 -39 0.2 0.9 9 -48 48 30 2 -39 0.7 0.9 -13 -15 40 27 2 -39 0.5 0.7 -18 -45 -11 -26 2 -39 0.6 0.4 -14 -49 -8 -11 2 -39 0.5 0.2 49 47 48 -10 1 -39 0.9 0.8 20 -46 3 -43 1 -39 0.1 0.7 -17 -48 50 17 2 -39 0.4 0.8 -35 -41 39 -43 2 -39 0.6 0.5 43 17 -42 -50 1 -39 0.1 0.1 14 -39 48 -22 1 -39 0.5 0.1 40 -4 -7 -12 1 -39 0.6 0.7 48 -37 15 -24 1 -39 0.6 0.6 23 -42 14 -32 1 -39 0.4 0.4 8 -40 37 25 2 -39 0.6 0.9 31 -1 33 -9 2 -39 0.7 0.2 -29 -49 -14 -38 1 -39 0.9 0.6 38 -36 12 -11 1 -39 0.4 0.4 -4 -49 30 -32 2 -39 0.7 0.3 34 -8 43 0 2 -39 0.7 0.8 25 15 -28 -48 1 -39 0.7 0.4 -33 -50 12 -47 1 -39 0.8 0.2 41 1 -9 -10 1 -39 0.8 0.8 48 35 49 -46 1 -39 0.5 0.6 24 23 24 22 2 -39 0.7 0.6 1 -6 5 -14 1 -39 0.1 0.9 30 26 -19 -50 1 -39 0.1 0.1 6 5 2 -29 1 -39 0.6 0.8 30 -17 38 12 2 -39 0.8 0.7 18 -30 4 -24 1 -39 0.4 0.8 48 30 20 -43 2 -39 0.5 0.3 39 -41 2 -32 1 -39 0.7 0.5 6 -42 7 -20 2 -39 0.6 0.5 1 -28 37 28 2 -39 0.1 0.9 9 -13 44 38 2 -39 0.6 0.7 41 11 13 -27 1 -39 0.7 0.2 -24 -47 11 1 2 -39 0.8 0.6 0 -12 15 -6 2 -39 0.3 0.7 18 -44 35 33 2 -39 0.6 0.7 0 -35 -24 -42 1 -39 0.7 0.9 13 -4 13 -9 2 -39 0.6 0.9 42 -34 -18 -21 1 -39 0.8 0.2 43 -25 -22 -27 1 -39 0.3 0.3 -3 -15 -25 -49 1 -39 0.5 0.2 39 -40 -24 -47 1 -39 0.2 0.2 -13 -19 10 -36 1 -39 0.3 0.5 23 2 -25 -26 1 -39 0.4 0.6 46 44 38 -14 1 -39 0.7 0.8 19 6 39 37 2 -39 0.3 0.6 -13 -41 24 -35 2 -39 0.6 0.9 2 -43 5 4 1 -39 0.1 0.6 -9 -28 -33 -50 2 -39 0.8 0.1 -34 -49 13 -46 1 -39 0.9 0.8 -6 -14 41 3 2 -39 0.7 0.5 -7 -33 20 -7 2 -39 0.4 0.8 24 -31 24 -4 2 -39 0.1 0.3 42 -28 25 12 2 -39 0.9 0.4 44 2 -44 -46 1 -39 0.9 0.4 23 2 18 -8 2 -39 0.1 0.8 -14 -23 -12 -50 1 -39 0.8 0.9 25 17 0 -4 1 -39 0.6 0.7 42 -35 30 -24 2 -39 0.7 0.1 39 9 48 20 2 -39 0.3 0.9 13 -32 23 -23 2 -40 0.3 0.1 -1 -9 -1 -8 2 -40 0.7 0.5 40 1 30 -15 1 -40 0.7 0.7 27 16 47 8 2 -40 0.1 0.1 28 13 40 -37 1 -40 0.8 0.2 31 -5 10 -13 1 -40 0.8 0.7 -34 -42 50 -23 2 -40 0.1 0.6 -12 -48 20 11 2 -40 0.8 0.2 36 -14 -18 -37 1 -40 0.4 0.7 2 -9 -32 -47 1 -40 0.7 0.3 42 -5 18 -3 1 -40 0.8 0.4 17 6 3 -50 1 -40 0.7 0.4 38 20 42 1 1 -40 0.7 0.5 20 -11 46 25 2 -40 0.8 0.3 -2 -5 49 -39 2 -40 0.1 0.5 19 -13 -20 -32 1 -40 0.9 0.8 11 -12 4 -5 1 -40 0.9 0.3 6 -1 50 16 2 -40 0.5 0.1 9 -24 26 -19 1 -40 0.7 0.1 40 -29 -9 -15 1 -40 0.8 0.9 -34 -35 29 -43 2 -40 0.1 0.9 3 -10 37 -47 2 -40 0.6 0.8 5 -49 17 -27 2 -40 0.1 0.5 25 -42 -3 -28 1 -40 0.2 0.4 4 -49 16 -50 2 -40 0.5 0.7 -5 -19 37 -48 2 -40 0.7 0.4 5 -18 37 8 2 -40 0.3 0.5 16 -48 -40 -42 1 -40 0.1 0.1 49 -50 2 -45 1 -40 0.8 0.7 11 -23 4 -17 1 -40 0.4 0.5 -33 -44 -9 -17 2 -40 0.1 0.7 -3 -22 -26 -44 1 -40 0.6 0.1 50 41 -6 -36 1 -40 0.2 0.5 29 -25 34 -31 2 -40 0.1 0.6 22 -41 35 -46 2 -40 0.1 0.8 -3 -4 -22 -49 1 -40 0.3 0.5 7 -12 -33 -37 1 -40 0.3 0.1 24 17 46 40 2 -40 0.3 0.9 33 -7 6 -39 1 -40 0.5 0.2 37 -14 44 15 2 -40 0.6 0.4 38 -6 49 -21 1 -40 0.2 0.4 -18 -22 -13 -29 2 -40 0.3 0.7 42 -24 -6 -24 1 -40 0.9 0.1 -9 -44 44 13 2 -40 0.9 0.3 21 -47 21 -10 1 -40 0.6 0.3 46 -6 14 6 2 -40 0.1 0.5 12 6 16 -1 2 -40 0.5 0.2 2 -34 36 13 2 -40 0.5 0.7 21 -31 -14 -22 1 -40 0.5 0.9 10 3 32 -37 2 -40 0.5 0.6 44 -22 -10 -38 1 -40 0.5 0.6 37 -26 29 -20 2 -40 0.7 0.6 27 -28 -17 -29 1 -40 0.6 0.5 4 -20 30 0 2 -40 0.6 0.2 39 1 38 12 2 -40 0.6 0.2 20 -27 -29 -37 1 -40 0.6 0.2 24 -5 -21 -39 1 -40 0.7 0.6 0 -25 10 3 2 -40 0.3 0.3 -18 -46 42 -10 2 -40 0.6 0.2 31 19 -34 -42 1 -40 0.3 0.2 2 -38 45 12 2 -40 0.1 0.3 44 -6 -20 -23 1 -40 0.1 0.2 23 -23 -30 -33 1 -40 0.1 0.1 32 -42 32 10 2 -40 0.9 0.7 46 22 22 18 1 -40 0.8 0.4 44 -6 1 -31 1 -40 0.5 0.2 -19 -21 -15 -29 1 -40 0.8 0.6 11 -38 -23 -36 1 -40 0.3 0.7 13 -31 28 -14 2 -40 0.2 0.7 49 17 30 -25 1 -40 0.3 0.9 30 -38 -3 -37 2 -40 0.9 0.5 -24 -30 32 -18 2 -40 0.6 0.1 -16 -21 -13 -31 1 -40 0.2 0.6 21 -38 14 -19 2 -40 0.9 0.2 -10 -21 45 -2 2 -40 0.5 0.2 47 6 -9 -34 1 -40 0.4 0.7 38 18 19 -3 1 -40 0.2 0.9 2 -47 12 -18 2 -40 0.5 0.2 50 16 -33 -38 1 -40 0.5 0.1 10 -8 6 -10 1 -40 0.5 0.5 27 11 6 -49 1 -40 0.3 0.3 38 -20 49 3 2 -40 0.1 0.4 13 -1 20 -33 1 -40 0.8 0.1 47 -13 43 39 2 -40 0.7 0.1 -12 -41 -19 -42 1 -40 0.4 0.3 -6 -48 42 -36 2 -40 0.9 0.5 19 -36 43 -9 1 -40 0.1 0.6 23 -2 -22 -33 1 -40 0.1 0.8 30 -14 5 -1 2 -40 0.1 0.5 7 -10 24 3 2 -40 0.6 0.6 48 -22 43 -23 2 -40 0.7 0.9 16 -39 33 5 2 -40 0.1 0.2 5 -5 -14 -24 1 -40 0.7 0.7 -31 -39 2 -47 2 -40 0.1 0.7 -33 -44 38 4 2 -40 0.3 0.8 -1 -15 24 -7 2 -40 0.8 0.9 6 -44 38 -36 2 -40 0.5 0.4 -6 -26 -34 -38 2 -40 0.3 0.7 18 9 40 2 2 -40 0.4 0.9 -46 -47 47 -24 2 -40 0.5 0.4 47 -18 17 -1 1 -40 0.3 0.3 -7 -32 -8 -39 1 -40 0.4 0.5 42 -3 -12 -37 1 -40 0.5 0.3 34 23 -18 -35 1 -40 0.2 0.9 -17 -41 17 0 2 -40 0.3 0.5 34 -27 32 -36 2 -40 0.9 0.4 48 12 41 37 1 -40 0.8 0.8 30 19 27 -41 1 -40 0.5 0.6 -24 -38 38 34 2 -40 0.8 0.6 48 -38 5 -48 1 -40 0.9 0.9 -15 -23 32 23 2 -40 0.3 0.3 -9 -43 39 -27 2 -40 0.6 0.4 15 -23 24 -50 1 -40 0.9 0.1 14 -28 21 -22 1 -40 0.5 0.7 5 -43 -9 -26 1 -40 0.8 0.7 36 -5 -17 -36 1 -40 0.2 0.1 37 -20 -21 -49 1 -40 0.5 0.9 6 -24 14 -40 2 -40 0.6 0.3 9 5 36 -26 1 -40 0.7 0.3 45 -34 -25 -37 1 -40 0.9 0.6 -33 -34 49 17 2 -40 0.1 0.4 -21 -42 47 -36 2 -40 0.9 0.2 4 -42 -25 -35 1 -40 0.6 0.5 32 -4 47 3 2 -40 0.3 0.7 10 -46 14 -22 2 -40 0.2 0.9 49 11 30 -37 1 -40 0.8 0.8 27 12 42 -2 1 -40 0.1 0.2 27 -12 15 -39 1 -40 0.3 0.7 30 6 50 14 2 -40 0.3 0.3 -4 -46 17 10 2 -40 0.9 0.3 31 -30 35 -21 1 -40 0.8 0.4 45 -48 -14 -37 1 -40 0.8 0.2 5 -13 -28 -29 1 -40 0.3 0.8 22 -19 -4 -26 1 -40 0.9 0.7 40 -43 -2 -5 1 -40 0.5 0.6 49 -34 -24 -26 1 -40 0.8 0.6 -35 -39 29 -36 2 -40 0.1 0.1 50 -50 46 -9 2 -40 0.3 0.8 13 -10 -10 -33 1 -40 0.5 0.6 33 -34 31 19 2 -40 0.1 0.6 35 -13 -41 -49 1 -40 0.8 0.6 33 1 41 18 2 -40 0.2 0.2 49 31 13 -16 1 -40 0.5 0.8 0 -43 10 3 2 -40 0.6 0.5 35 -19 7 -22 1 -40 0.4 0.2 3 -20 41 31 2 -40 0.1 0.3 -3 -10 34 -35 2 -40 0.7 0.5 15 -35 47 -48 2 -40 0.6 0.6 19 -21 21 0 2 -40 0.5 0.4 15 -20 13 -42 1 -40 0.3 0.6 26 17 -11 -37 1 -40 0.8 0.3 -1 -46 9 -26 1 -40 0.1 0.1 46 -2 16 -30 1 -40 0.4 0.7 10 -28 46 3 2 -40 0.3 0.4 -38 -39 10 3 2 -40 0.6 0.9 10 -33 -42 -44 1 -40 0.3 0.9 3 -46 -40 -42 1 -40 0.8 0.2 2 -33 -7 -44 1 -40 0.2 0.8 15 -39 4 -22 2 -40 0.3 0.1 23 3 -10 -34 1 -40 0.1 0.6 49 -14 24 5 2 -40 0.9 0.5 46 29 -7 -30 1 -40 0.8 0.6 -30 -39 -5 -19 2 -40 0.8 0.5 41 -5 -9 -17 1 -40 0.1 0.9 17 -16 24 16 2 -40 0.5 0.7 -38 -39 11 -24 2 -40 0.6 0.9 33 -43 42 -42 2 -40 0.1 0.4 -28 -42 44 -6 2 -40 0.2 0.4 -5 -22 -27 -43 1 -40 0.7 0.8 39 -4 -9 -20 1 -40 0.4 0.7 33 -21 25 21 2 -40 0.8 0.9 30 15 44 -15 2 -40 0.5 0.1 13 7 16 -15 1 -40 0.8 0.8 8 -40 32 5 2 -40 0.2 0.3 -40 -47 37 22 2 -40 0.4 0.2 -11 -34 1 -12 2 -40 0.3 0.6 42 15 44 7 2 -40 0.3 0.5 -15 -34 32 -43 2 -40 0.6 0.7 -16 -29 23 18 2 -40 0.6 0.9 -6 -31 -16 -29 2 -40 0.6 0.5 10 7 19 -13 1 -40 0.1 0.4 31 -30 -22 -28 1 -40 0.2 0.1 28 10 -9 -41 1 -40 0.6 0.4 -24 -33 50 10 2 -40 0.2 0.1 37 15 38 -50 1 -40 0.3 0.1 -8 -29 50 -41 1 -40 0.1 0.9 41 5 -35 -49 1 -40 0.6 0.9 -2 -27 42 -19 2 -40 0.4 0.4 38 -38 -26 -43 1 -40 0.5 0.2 9 -12 47 -46 1 -40 0.7 0.9 46 -45 -6 -16 1 -40 0.8 0.8 33 16 20 -5 1 -40 0.1 0.8 46 1 -9 -50 1 -40 0.3 0.1 49 -4 -19 -38 1 -40 0.4 0.3 7 -20 38 -45 1 -40 0.2 0.4 34 24 17 -47 1 -40 0.9 0.3 16 -8 -24 -49 1 -40 0.3 0.9 40 32 -2 -9 1 -40 0.4 0.1 -24 -37 43 -48 2 -40 0.3 0.5 5 -1 -32 -44 1 -40 0.3 0.8 35 33 19 12 1 -40 0.4 0.4 46 -20 25 -44 1 -40 0.2 0.2 -31 -37 15 6 2 -40 0.4 0.6 4 -15 -30 -42 1 -40 0.9 0.6 -34 -44 -34 -50 2 -40 0.2 0.2 49 31 -21 -27 1 -40 0.4 0.4 2 -24 48 -30 2 -40 0.9 0.5 33 32 49 11 1 -40 0.5 0.9 19 -19 46 31 2 -40 0.4 0.2 33 30 -26 -27 1 -40 0.2 0.2 10 -39 47 5 2 -40 0.6 0.2 50 -9 49 2 1 -40 0.8 0.9 26 0 11 -17 1 -40 0.4 0.2 25 -46 34 33 2 -40 0.6 0.7 -21 -23 43 30 2 -40 0.3 0.5 -22 -36 27 7 2 -40 0.2 0.9 -9 -19 1 -41 2 -40 0.9 0.4 21 10 -35 -44 1 -40 0.3 0.3 19 0 25 -37 1 -40 0.7 0.6 21 1 10 -21 1 -40 0.7 0.2 11 1 -10 -41 1 -40 0.7 0.4 22 -1 36 -39 1 -40 0.6 0.5 -2 -19 37 -40 2 -40 0.9 0.6 11 -20 42 -3 2 -40 0.7 0.5 42 -45 28 -35 1 -40 0.3 0.6 8 -12 37 -47 2 -40 0.5 0.7 43 23 -2 -18 1 -40 0.5 0.3 -34 -50 -42 -44 2 -40 0.2 0.6 -3 -26 -15 -22 1 -40 0.3 0.8 31 -28 35 -46 2 -40 0.5 0.5 32 -11 -8 -25 1 -40 0.5 0.4 44 -3 23 -21 1 -40 0.3 0.9 -23 -40 28 7 2 -40 0.9 0.6 23 -48 16 -9 1 -40 0.2 0.8 36 0 28 3 2 -40 0.6 0.1 -6 -39 49 30 2 -40 0.7 0.9 32 9 44 -3 2 -40 0.6 0.6 49 -13 -17 -21 1 -40 0.7 0.5 16 -32 11 0 1 -40 0.6 0.6 21 -26 44 24 2 -40 0.2 0.2 34 -47 19 11 2 -40 0.4 0.5 48 -25 -17 -42 1 -40 0.2 0.8 29 -17 44 -20 2 -40 0.7 0.8 11 -5 19 14 2 -40 0.2 0.7 -20 -44 6 -8 2 -40 0.4 0.2 24 22 -25 -41 1 -40 0.6 0.6 -15 -37 1 -1 2 -40 0.2 0.2 14 -1 22 -29 1 -40 0.1 0.7 32 5 28 -19 2 -40 0.8 0.9 -36 -37 -7 -31 2 -40 0.9 0.5 12 -26 19 -12 2 -40 0.2 0.9 4 -49 49 -15 2 -40 0.8 0.8 24 -34 22 18 2 -40 0.6 0.7 31 7 -20 -29 1 -40 0.5 0.8 -10 -40 -17 -45 2 -40 0.8 0.4 34 -39 -18 -21 1 -40 0.5 0.3 30 19 6 -13 1 -40 0.4 0.4 18 8 -7 -48 1 -40 0.2 0.9 -30 -49 21 16 2 -40 0.8 0.6 -12 -21 12 -42 2 -40 0.5 0.3 -22 -41 -8 -35 2 -40 0.2 0.6 -7 -35 3 -11 2 -40 0.7 0.4 46 -25 0 -43 1 -40 0.9 0.3 1 -1 48 -49 1 -40 0.4 0.9 43 -10 -23 -49 1 -40 0.3 0.2 2 -26 47 -11 2 -40 0.4 0.9 -19 -38 30 -21 2 -40 0.3 0.1 37 6 47 18 2 -40 0.9 0.2 3 -12 -23 -24 1 -40 0.8 0.3 50 45 46 16 1 -40 0.1 0.2 47 35 10 -19 1 -40 0.9 0.5 48 -28 49 27 2 -40 0.3 0.4 28 -28 48 28 2 -40 0.9 0.3 32 20 37 -37 1 -40 0.4 0.1 -16 -18 31 -6 2 -40 0.8 0.9 -8 -50 14 -20 2 -40 0.2 0.3 49 -2 38 -45 1 -40 0.4 0.7 36 33 -30 -31 1 -40 0.7 0.1 20 -7 -15 -41 1 -40 0.1 0.4 9 -43 10 -24 2 -40 0.3 0.2 31 -6 36 -22 1 -40 0.9 0.8 -18 -25 37 -37 2 -40 0.9 0.3 47 3 -4 -6 1 -40 0.4 0.6 38 9 29 10 1 -40 0.5 0.4 43 -24 39 -35 1 -40 0.6 0.2 29 -43 26 -44 1 -40 0.7 0.4 1 -34 37 22 2 -40 0.5 0.7 38 26 32 9 1 -40 0.2 0.9 22 -48 36 -48 2 -40 0.2 0.1 12 6 50 42 2 -40 0.2 0.9 29 -24 45 16 2 -40 0.5 0.9 -26 -39 -10 -23 2 -40 0.6 0.7 8 -37 46 13 2 -40 0.8 0.5 36 -25 32 -36 2 -40 0.2 0.7 29 12 -3 -41 1 -40 0.4 0.6 50 -47 34 25 2 -40 0.4 0.3 0 -39 24 15 2 -40 0.8 0.3 15 -46 27 -4 1 -40 0.1 0.9 46 25 39 -25 1 -40 0.6 0.2 20 4 30 23 2 -40 0.9 0.8 39 33 49 -1 1 -41 0.8 0.3 49 18 49 -9 1 -41 0.2 0.1 -49 -50 -2 -24 2 -41 0.8 0.7 3 -20 18 4 2 -41 0.8 0.5 31 -7 -18 -43 1 -41 0.7 0.6 35 12 32 1 1 -41 0.1 0.9 44 -41 33 -11 2 -41 0.2 0.2 -6 -24 26 -45 2 -41 0.9 0.3 11 -40 16 -15 1 -41 0.9 0.8 41 -46 9 2 1 -41 0.1 0.7 44 -41 -23 -49 1 -41 0.6 0.1 50 11 44 40 2 -41 0.9 0.6 1 -38 -24 -36 1 -41 0.8 0.8 -20 -45 50 19 2 -41 0.5 0.4 21 17 33 18 2 -41 0.7 0.5 -15 -24 48 0 2 -41 0.9 0.5 -6 -34 8 -20 2 -41 0.3 0.5 38 -24 26 6 2 -41 0.9 0.2 1 -29 -21 -22 1 -41 0.2 0.4 42 16 14 1 1 -41 0.8 0.8 -9 -26 43 24 2 -41 0.9 0.1 49 -8 29 6 1 -41 0.5 0.7 27 -6 7 -41 2 -41 0.9 0.6 -40 -43 22 -2 2 -41 0.1 0.6 44 6 26 -36 1 -41 0.6 0.8 -4 -14 34 -26 2 -41 0.7 0.9 17 14 -3 -10 1 -41 0.5 0.9 46 -25 35 -9 2 -41 0.1 0.8 -27 -43 48 -47 2 -41 0.7 0.7 23 -28 0 -18 1 -41 0.2 0.7 44 -5 16 -22 2 -41 0.1 0.4 -12 -27 37 -23 2 -41 0.5 0.5 16 -49 28 -48 1 -41 0.5 0.2 41 25 21 -6 1 -41 0.3 0.8 50 41 -17 -41 1 -41 0.3 0.4 27 -3 43 38 2 -41 0.8 0.4 -25 -43 23 -35 2 -41 0.4 0.1 25 13 45 -29 1 -41 0.6 0.5 44 21 30 10 1 -41 0.7 0.7 0 -37 48 43 2 -41 0.3 0.9 -14 -22 36 -12 2 -41 0.5 0.6 45 10 -33 -47 1 -41 0.3 0.5 39 -22 -14 -17 1 -41 0.6 0.1 19 13 -15 -25 1 -41 0.6 0.3 7 -20 12 -25 1 -41 0.9 0.9 17 -26 -7 -27 1 -41 0.8 0.4 17 -30 48 44 2 -41 0.6 0.7 -22 -46 26 -25 2 -41 0.8 0.1 -2 -14 15 -26 1 -41 0.2 0.8 34 16 42 14 2 -41 0.8 0.8 -17 -32 50 15 2 -41 0.4 0.5 44 -4 13 -41 1 -41 0.5 0.4 41 -33 34 -5 1 -41 0.2 0.8 22 -39 49 -4 2 -41 0.9 0.4 -13 -46 34 -36 2 -41 0.4 0.1 29 -20 26 5 2 -41 0.5 0.8 10 -41 29 7 2 -41 0.7 0.5 10 -12 43 22 2 -41 0.1 0.7 36 14 39 2 2 -41 0.8 0.6 33 -12 6 -47 1 -41 0.8 0.5 23 -22 17 8 2 -41 0.7 0.9 16 -22 13 -10 2 -41 0.5 0.6 -12 -24 -8 -32 2 -41 0.5 0.2 24 5 2 -1 1 -41 0.1 0.6 26 -36 -39 -50 2 -41 0.6 0.8 18 -43 49 34 2 -41 0.4 0.9 8 7 49 0 2 -41 0.4 0.9 -2 -16 39 16 2 -41 0.5 0.8 48 -43 -2 -24 1 -41 0.9 0.2 0 -35 34 17 2 -41 0.4 0.9 19 -48 -6 -23 2 -41 0.9 0.3 -18 -45 27 19 2 -41 0.8 0.7 8 -6 22 -49 2 -41 0.9 0.9 43 -25 38 1 2 -41 0.7 0.9 12 -8 20 14 2 -41 0.2 0.8 48 29 18 -11 1 -41 0.8 0.3 29 -3 30 5 1 -41 0.9 0.4 45 42 -18 -33 1 -41 0.5 0.6 46 -30 -7 -40 1 -41 0.4 0.4 36 32 -16 -43 1 -41 0.7 0.2 31 27 42 20 1 -41 0.9 0.3 42 35 26 14 1 -41 0.8 0.3 -1 -19 -9 -38 1 -41 0.6 0.2 23 -22 48 -44 1 -41 0.9 0.6 -13 -48 -16 -28 2 -41 0.6 0.7 0 -41 35 21 2 -41 0.7 0.6 34 -41 17 -20 1 -41 0.7 0.8 23 -5 44 13 2 -41 0.7 0.3 -15 -21 42 27 2 -41 0.3 0.4 35 -7 -2 -42 1 -41 0.3 0.4 41 -37 47 -50 2 -41 0.6 0.9 -29 -40 42 36 2 -41 0.4 0.9 26 -5 47 13 2 -41 0.6 0.1 50 -27 35 -8 1 -41 0.4 0.9 -26 -38 -42 -47 1 -41 0.8 0.8 -7 -49 8 -36 2 -41 0.1 0.1 35 -25 5 -47 1 -41 0.1 0.9 -2 -7 0 -48 2 -41 0.9 0.8 47 3 13 0 1 -41 0.8 0.5 -27 -37 -23 -26 2 -41 0.3 0.5 37 9 36 -18 1 -41 0.8 0.9 18 -20 31 -17 2 -41 0.7 0.1 47 -41 20 17 2 -41 0.4 0.9 42 -39 21 -7 2 -41 0.6 0.1 46 21 44 20 1 -41 0.1 0.7 26 -23 12 -38 2 -41 0.7 0.8 40 30 -13 -37 1 -41 0.5 0.4 36 -39 -4 -8 1 -41 0.4 0.5 44 21 18 -5 1 -41 0.9 0.4 24 -39 23 -50 1 -41 0.4 0.9 40 -21 -36 -47 1 -41 0.3 0.9 11 -29 7 -5 2 -41 0.9 0.6 38 -3 18 8 1 -41 0.2 0.9 41 16 -13 -37 1 -41 0.8 0.2 14 -47 24 14 2 -41 0.7 0.2 25 -29 12 -13 1 -41 0.8 0.1 11 -19 8 -40 1 -41 0.2 0.2 -11 -15 24 -45 1 -41 0.6 0.7 -27 -39 -3 -9 2 -41 0.7 0.8 31 -7 36 -15 2 -41 0.7 0.1 25 -6 22 -23 1 -41 0.2 0.4 7 -3 48 -27 2 -41 0.6 0.4 -4 -11 36 -28 2 -41 0.7 0.8 47 -35 27 -19 2 -41 0.4 0.8 -14 -48 -34 -36 1 -41 0.9 0.9 32 -4 23 -22 1 -41 0.1 0.9 8 -18 -26 -31 1 -41 0.3 0.4 3 -35 43 -4 2 -41 0.7 0.4 45 18 -27 -42 1 -41 0.6 0.3 40 -7 48 0 1 -41 0.4 0.9 9 -1 13 -6 2 -41 0.6 0.6 1 -34 18 -5 2 -41 0.3 0.3 10 -4 48 42 2 -41 0.3 0.1 44 1 5 -48 1 -41 0.5 0.1 -45 -50 1 -12 2 -41 0.8 0.8 47 10 43 -2 1 -41 0.3 0.3 11 2 23 8 2 -41 0.1 0.9 -3 -23 0 -45 2 -41 0.2 0.1 28 -22 14 -31 1 -41 0.1 0.9 16 -49 -28 -42 2 -41 0.9 0.2 15 -3 43 -19 1 -41 0.7 0.8 36 -16 33 -36 2 -41 0.2 0.8 -27 -32 35 8 2 -41 0.4 0.2 -21 -23 46 -14 2 -41 0.3 0.6 24 -33 8 -48 2 -41 0.8 0.9 -11 -40 -34 -45 1 -41 0.3 0.8 19 -43 5 -14 2 -41 0.2 0.1 44 -17 24 6 2 -41 0.3 0.2 3 -46 21 -24 2 -41 0.5 0.8 -46 -50 1 -6 2 -41 0.5 0.1 49 -33 36 30 2 -41 0.8 0.7 14 8 1 -11 1 -41 0.3 0.1 49 -27 -29 -34 1 -41 0.5 0.9 -8 -19 4 -47 2 -41 0.7 0.1 15 -36 -26 -44 1 -41 0.7 0.8 -16 -31 35 -3 2 -41 0.7 0.8 42 11 3 -9 1 -41 0.5 0.2 -13 -19 -4 -36 1 -41 0.7 0.3 -16 -24 11 -46 2 -41 0.9 0.3 42 32 48 -3 1 -41 0.3 0.7 49 -41 -22 -47 1 -41 0.3 0.8 11 -9 -32 -39 1 -41 0.5 0.4 -4 -17 -7 -10 2 -41 0.1 0.9 13 -1 20 13 2 -41 0.6 0.5 16 13 20 11 2 -41 0.7 0.5 8 -40 -6 -13 1 -41 0.2 0.8 -22 -43 26 -35 2 -41 0.4 0.1 -8 -33 47 36 2 -41 0.5 0.4 21 -32 30 -48 1 -41 0.7 0.9 0 -39 -8 -27 2 -41 0.6 0.8 8 -30 23 -31 2 -41 0.2 0.1 -8 -48 -22 -30 2 -41 0.6 0.3 36 19 26 4 1 -41 0.1 0.1 30 -22 20 -43 1 -41 0.8 0.1 21 -35 -36 -49 1 -41 0.2 0.5 27 22 34 -11 1 -41 0.7 0.5 48 -23 13 11 1 -41 0.9 0.7 48 30 -48 -50 1 -41 0.1 0.7 16 -5 50 -14 2 -41 0.5 0.8 43 -37 18 -4 2 -41 0.1 0.2 -1 -4 4 -13 1 -41 0.8 0.8 -16 -42 25 -4 2 -41 0.3 0.1 49 37 42 20 1 -41 0.9 0.9 49 31 50 -48 1 -41 0.8 0.5 20 -50 -35 -40 1 -41 0.6 0.9 45 -9 -4 -41 1 -41 0.9 0.6 -28 -46 9 -32 2 -41 0.1 0.6 33 -45 50 -43 2 -41 0.4 0.6 1 -20 16 -31 2 -41 0.4 0.4 47 -23 44 -39 1 -41 0.5 0.1 16 -25 42 -21 2 -41 0.8 0.9 12 -33 19 -35 2 -41 0.5 0.6 -5 -36 14 -19 2 -41 0.4 0.6 -22 -39 32 -42 2 -41 0.7 0.4 45 -29 -20 -35 1 -41 0.7 0.9 -35 -47 35 12 2 -41 0.7 0.7 14 9 -19 -45 1 -41 0.5 0.4 5 -37 4 3 2 -41 0.3 0.3 -7 -34 31 -13 2 -41 0.6 0.5 -9 -45 -1 -27 2 -41 0.8 0.3 -10 -25 -21 -25 1 -41 0.3 0.6 2 -47 -18 -45 1 -41 0.4 0.9 25 -1 40 -50 2 -41 0.2 0.2 -14 -30 10 -2 2 -41 0.3 0.9 12 -13 13 -16 2 -41 0.1 0.1 36 -29 46 -39 1 -41 0.9 0.2 34 -3 -21 -32 1 -41 0.8 0.8 -7 -19 8 -22 2 -41 0.4 0.4 36 -22 -22 -49 1 -41 0.3 0.9 40 -13 32 -39 2 -41 0.9 0.3 43 25 48 -41 1 -41 0.8 0.3 16 -30 15 -47 1 -41 0.7 0.8 29 -49 38 -22 2 -41 0.9 0.3 30 -6 43 37 2 -41 0.8 0.8 32 -39 34 -14 2 -41 0.3 0.5 28 -24 14 -21 1 -41 0.9 0.8 19 -25 45 -18 2 -41 0.9 0.8 45 -22 -12 -28 1 -41 0.2 0.6 39 -21 19 10 2 -41 0.4 0.6 37 36 -7 -12 1 -41 0.6 0.7 -3 -4 -2 -25 1 -41 0.6 0.3 -44 -50 19 18 2 -41 0.1 0.1 39 36 -3 -13 1 -41 0.8 0.7 17 -50 16 6 1 -41 0.4 0.3 41 2 -27 -41 1 -41 0.9 0.8 47 5 -11 -32 1 -41 0.7 0.7 37 34 41 -30 1 -41 0.8 0.1 34 33 19 -35 1 -41 0.5 0.5 19 -45 33 -40 2 -41 0.8 0.7 45 -8 -4 -13 1 -41 0.9 0.2 4 -44 39 17 2 -41 0.2 0.3 12 -20 -6 -46 2 -41 0.6 0.5 -8 -10 -26 -42 1 -41 0.3 0.6 -3 -40 44 -47 2 -41 0.9 0.7 18 -23 17 -10 1 -41 0.9 0.6 37 -2 35 4 1 -41 0.7 0.3 0 -5 8 -16 1 -41 0.2 0.4 -29 -35 22 -50 2 -41 0.7 0.9 47 -22 32 -20 2 -41 0.5 0.9 25 -9 6 -45 1 -41 0.5 0.9 34 27 -4 -45 1 -41 0.4 0.6 23 -48 23 2 2 -41 0.5 0.6 21 -5 -2 -33 1 -41 0.6 0.8 36 -37 14 -42 1 -41 0.2 0.4 23 16 -7 -14 1 -41 0.8 0.1 -11 -48 49 -7 2 -41 0.3 0.9 29 -26 36 -21 2 -41 0.1 0.1 38 -32 4 -42 1 -41 0.6 0.1 37 -24 -41 -48 1 -41 0.9 0.4 37 -47 22 -23 1 -41 0.7 0.9 6 -25 35 -6 2 -41 0.6 0.8 44 -32 45 -1 2 -41 0.7 0.6 42 -10 -12 -38 1 -41 0.9 0.2 -23 -30 4 -35 2 -41 0.6 0.3 35 -22 -30 -43 1 -41 0.2 0.3 6 0 3 -25 1 -41 0.8 0.8 22 -32 18 -41 1 -41 0.4 0.2 -14 -21 9 -33 2 -41 0.2 0.4 42 -15 -5 -12 1 -41 0.5 0.4 44 -42 -19 -40 1 -41 0.5 0.2 28 3 20 -2 1 -41 0.1 0.2 8 -9 -28 -35 1 -41 0.9 0.4 -5 -15 25 19 2 -41 0.9 0.5 14 -29 -3 -22 1 -41 0.3 0.3 -16 -33 41 32 2 -41 0.8 0.3 -10 -45 10 -35 2 -41 0.8 0.3 42 -45 22 -5 1 -41 0.8 0.9 47 20 25 -4 1 -41 0.1 0.8 -20 -38 7 -30 2 -41 0.8 0.6 37 10 46 16 2 -41 0.2 0.3 37 -35 41 -49 2 -41 0.3 0.6 -14 -17 -1 -13 2 -41 0.6 0.5 34 -33 -34 -46 1 -41 0.7 0.5 26 -1 41 -48 1 -41 0.5 0.5 47 -26 12 -46 1 -41 0.5 0.1 38 -42 10 0 1 -41 0.4 0.4 16 -48 4 -19 2 -41 0.3 0.2 -24 -26 35 22 2 -41 0.8 0.2 45 42 12 -44 1 -41 0.1 0.7 23 -8 30 -34 2 -41 0.3 0.3 30 21 43 -43 1 -41 0.7 0.7 19 -42 8 -44 1 -41 0.9 0.3 48 17 20 -30 1 -41 0.3 0.3 50 21 -20 -40 1 -41 0.6 0.4 49 -4 17 2 1 -41 0.1 0.4 23 -22 36 15 2 -41 0.9 0.4 47 40 38 13 1 -41 0.8 0.2 32 -50 -27 -48 1 -41 0.8 0.1 23 -26 42 -18 1 -41 0.8 0.4 46 -5 6 -30 1 -41 0.9 0.5 39 27 -20 -45 1 -41 0.1 0.9 29 10 38 -37 2 -41 0.2 0.4 7 -35 50 8 2 -41 0.9 0.1 -3 -41 42 -21 1 -41 0.9 0.8 37 29 46 32 2 -41 0.3 0.9 44 27 17 -13 1 -41 0.3 0.8 10 -17 27 -9 2 -41 0.7 0.6 46 26 28 -50 1 -41 0.3 0.2 20 -4 32 -30 1 -41 0.3 0.8 42 -21 44 -19 2 -41 0.6 0.5 30 -10 36 -36 1 -42 0.8 0.2 10 -15 44 -20 1 -42 0.4 0.8 25 13 38 -29 1 -42 0.5 0.3 42 37 36 5 1 -42 0.9 0.7 -34 -46 49 19 2 -42 0.3 0.6 -6 -24 4 -1 2 -42 0.7 0.6 20 13 35 -28 1 -42 0.6 0.4 47 -31 42 3 2 -42 0.6 0.7 -25 -28 -13 -14 2 -42 0.5 0.9 -37 -42 34 -24 2 -42 0.6 0.6 35 -26 -34 -45 1 -42 0.3 0.7 16 9 26 15 2 -42 0.7 0.8 49 -31 24 -4 1 -42 0.8 0.7 22 13 -28 -33 1 -42 0.3 0.8 10 -42 50 29 2 -42 0.7 0.2 37 -20 -6 -14 1 -42 0.1 0.6 35 -16 -24 -25 2 -42 0.3 0.9 4 -26 -29 -38 1 -42 0.3 0.1 -26 -50 -44 -46 2 -42 0.4 0.5 -15 -22 43 -37 2 -42 0.2 0.2 18 -25 13 -17 2 -42 0.1 0.3 45 -34 29 -2 2 -42 0.5 0.1 -28 -49 -9 -45 2 -42 0.3 0.3 44 -40 2 -22 1 -42 0.5 0.5 34 -13 49 21 2 -42 0.4 0.3 -14 -23 17 -22 2 -42 0.2 0.4 31 -4 -11 -27 1 -42 0.9 0.2 14 -49 10 -43 1 -42 0.9 0.2 1 -50 24 11 2 -42 0.6 0.7 26 -31 19 -46 1 -42 0.5 0.1 5 -30 30 4 2 -42 0.9 0.3 -26 -36 15 -14 2 -42 0.9 0.7 48 -28 37 -12 1 -42 0.1 0.3 23 -35 13 -1 2 -42 0.7 0.4 15 -8 40 -23 1 -42 0.7 0.5 -10 -36 3 -6 2 -42 0.5 0.9 41 -30 22 -15 2 -42 0.7 0.5 30 -9 17 13 2 -42 0.4 0.2 23 -1 -38 -50 1 -42 0.1 0.2 -22 -36 50 -34 2 -42 0.7 0.1 22 15 36 2 1 -42 0.9 0.8 49 -24 12 7 1 -42 0.1 0.3 6 -20 7 -41 1 -42 0.3 0.8 21 -48 49 44 2 -42 0.7 0.3 -46 -47 11 -48 2 -42 0.4 0.4 7 -19 23 9 2 -42 0.5 0.3 39 -45 -26 -48 1 -42 0.9 0.1 -20 -49 17 -48 1 -42 0.6 0.3 38 -13 0 -31 1 -42 0.7 0.5 -6 -48 46 -32 2 -42 0.2 0.3 46 33 42 -23 1 -42 0.5 0.1 36 22 -12 -28 1 -42 0.7 0.3 -14 -18 44 34 2 -42 0.4 0.3 31 13 -17 -35 1 -42 0.1 0.1 38 -42 -18 -46 1 -42 0.6 0.3 39 28 -15 -31 1 -42 0.6 0.3 4 -10 34 9 2 -42 0.5 0.8 28 26 39 -46 1 -42 0.6 0.4 16 4 12 -2 1 -42 0.7 0.8 31 -22 10 -36 1 -42 0.7 0.5 27 -15 47 -50 1 -42 0.4 0.3 -20 -27 -2 -37 2 -42 0.8 0.7 8 -24 -32 -49 1 -42 0.8 0.2 5 -7 34 -23 1 -42 0.4 0.8 39 19 -7 -40 1 -42 0.4 0.8 29 -50 35 -24 2 -42 0.8 0.6 41 8 -1 -29 1 -42 0.7 0.5 42 33 -31 -36 1 -42 0.1 0.1 -1 -34 46 -6 2 -42 0.3 0.2 7 -42 48 -18 2 -42 0.4 0.1 1 -8 -4 -23 1 -42 0.8 0.1 37 -24 43 -18 1 -42 0.7 0.9 12 -26 37 27 2 -42 0.5 0.9 48 -2 31 -41 2 -42 0.9 0.8 -24 -39 2 -39 2 -42 0.9 0.1 -5 -8 43 -39 1 -42 0.4 0.9 15 -40 27 0 2 -42 0.4 0.7 47 -7 40 -47 2 -42 0.8 0.2 0 -13 -24 -33 1 -42 0.4 0.4 46 39 -27 -40 1 -42 0.6 0.2 45 33 39 -47 1 -42 0.2 0.2 -46 -47 24 23 2 -42 0.7 0.6 46 -1 -20 -26 1 -42 0.8 0.2 -22 -29 13 -25 2 -42 0.5 0.1 50 -22 32 -35 1 -42 0.4 0.3 30 -1 7 3 1 -42 0.4 0.2 19 -34 -21 -28 1 -42 0.4 0.5 32 17 34 -36 1 -42 0.8 0.7 43 3 -9 -21 1 -42 0.4 0.2 44 -4 8 -40 1 -42 0.6 0.4 38 22 26 7 1 -42 0.1 0.7 16 -5 -7 -43 1 -42 0.6 0.7 14 -34 7 2 2 -42 0.7 0.2 35 -34 45 -42 1 -42 0.3 0.6 35 22 49 -36 1 -42 0.5 0.1 16 -35 -30 -45 1 -42 0.4 0.7 1 -45 -25 -27 2 -42 0.3 0.4 0 -23 -24 -46 1 -42 0.1 0.3 45 -26 -6 -38 1 -42 0.6 0.7 14 -48 49 7 2 -42 0.8 0.6 -17 -19 47 19 2 -42 0.3 0.8 -2 -50 37 -29 2 -42 0.8 0.7 49 -35 39 19 2 -42 0.4 0.6 7 -3 9 -21 1 -42 0.7 0.5 33 28 42 7 1 -42 0.7 0.9 40 16 -17 -20 1 -42 0.3 0.3 -10 -12 33 -19 2 -42 0.3 0.7 49 22 -26 -35 1 -42 0.5 0.8 23 -12 -39 -48 1 -42 0.8 0.6 -10 -37 12 -49 2 -42 0.3 0.2 -22 -33 46 28 2 -42 0.6 0.2 -5 -29 9 6 2 -42 0.1 0.3 24 -47 21 -45 2 -42 0.9 0.8 34 -24 24 13 1 -42 0.9 0.8 -13 -41 -11 -20 1 -42 0.7 0.2 35 -18 -28 -44 1 -42 0.9 0.5 43 30 -11 -19 1 -42 0.7 0.5 17 11 15 -23 1 -42 0.8 0.8 16 9 50 21 2 -42 0.8 0.1 41 -49 21 -24 1 -42 0.1 0.4 -37 -49 13 -18 2 -42 0.9 0.3 41 12 10 7 1 -42 0.3 0.5 38 -18 48 -45 1 -42 0.4 0.4 45 -29 -4 -20 1 -42 0.3 0.2 44 26 -32 -45 1 -42 0.2 0.4 16 4 47 20 2 -42 0.2 0.5 -21 -48 33 -20 2 -42 0.2 0.5 42 -13 24 -17 2 -42 0.6 0.8 7 -36 48 -1 2 -42 0.7 0.5 -3 -18 19 15 2 -42 0.3 0.3 -13 -49 23 -36 2 -42 0.2 0.4 0 -23 3 -12 2 -42 0.4 0.2 -11 -31 -16 -29 2 -42 0.7 0.6 49 -28 15 -48 1 -42 0.1 0.5 4 -4 -6 -43 1 -42 0.5 0.1 14 -30 24 -50 1 -42 0.4 0.6 -11 -30 47 -22 2 -42 0.1 0.4 39 33 -3 -44 1 -42 0.9 0.6 49 -35 -18 -29 1 -42 0.5 0.5 48 -20 35 -31 1 -42 0.5 0.5 7 1 33 6 2 -42 0.4 0.3 -34 -46 44 -33 2 -42 0.8 0.7 30 26 -19 -29 1 -42 0.6 0.3 42 36 36 22 1 -42 0.8 0.7 30 24 21 -33 1 -42 0.7 0.5 30 3 32 -12 1 -42 0.1 0.1 -9 -34 -8 -22 2 -42 0.4 0.8 41 -1 50 -9 2 -42 0.3 0.8 -23 -49 32 -1 2 -42 0.8 0.3 14 -8 11 -7 1 -42 0.7 0.4 30 -27 31 -19 1 -42 0.7 0.8 -21 -31 -11 -23 2 -42 0.3 0.8 41 -21 20 -43 2 -42 0.5 0.4 46 -5 24 -39 1 -42 0.9 0.1 25 12 44 41 2 -42 0.3 0.4 15 -32 38 -39 2 -42 0.8 0.9 32 -38 -8 -12 1 -42 0.5 0.6 21 -6 -32 -41 1 -42 0.5 0.4 19 12 -3 -32 1 -42 0.8 0.8 16 -25 31 -37 2 -42 0.9 0.8 -26 -38 36 7 2 -42 0.7 0.4 2 -19 6 -30 1 -42 0.3 0.7 37 -47 29 -41 2 -42 0.4 0.3 -5 -14 13 -16 2 -42 0.7 0.7 -17 -39 24 11 2 -42 0.2 0.1 30 -33 -25 -50 1 -42 0.6 0.1 29 13 24 -19 1 -42 0.2 0.4 -22 -35 -19 -21 1 -42 0.1 0.8 3 -34 30 -21 2 -42 0.6 0.4 19 9 19 4 1 -42 0.1 0.5 25 -8 38 -15 2 -42 0.9 0.1 6 -41 16 -7 1 -42 0.8 0.6 33 -33 31 -19 1 -42 0.7 0.5 33 0 34 -19 1 -42 0.2 0.3 -13 -39 -22 -40 2 -42 0.8 0.5 28 -11 -30 -50 1 -42 0.3 0.1 33 -31 8 0 2 -42 0.3 0.3 5 -48 -4 -36 2 -42 0.2 0.6 27 -15 50 2 2 -42 0.5 0.5 50 -10 -13 -14 1 -42 0.7 0.2 -31 -38 -43 -47 1 -42 0.4 0.8 11 -5 -15 -37 1 -42 0.8 0.9 11 -47 40 9 2 -42 0.9 0.3 21 -6 17 -13 1 -42 0.9 0.4 1 -17 11 7 2 -42 0.4 0.6 10 -44 40 -39 2 -42 0.6 0.2 48 11 22 -37 1 -42 0.8 0.7 22 -18 20 8 2 -42 0.9 0.5 11 -26 40 -13 2 -42 0.3 0.9 9 -49 33 15 2 -42 0.9 0.9 10 -36 22 -35 2 -42 0.6 0.8 8 -25 -11 -38 1 -42 0.9 0.8 -41 -46 38 7 2 -42 0.5 0.8 50 -39 -30 -48 1 -42 0.1 0.4 16 -15 31 -35 2 -42 0.1 0.7 46 -27 9 -6 2 -42 0.5 0.9 15 -26 28 -37 2 -42 0.3 0.2 36 -35 4 -34 1 -42 0.8 0.8 -24 -27 34 4 2 -42 0.2 0.8 39 -29 28 0 2 -42 0.6 0.2 47 -32 33 12 2 -42 0.7 0.5 18 -40 5 -34 1 -42 0.8 0.6 40 -37 -19 -34 1 -42 0.7 0.9 15 -14 -23 -29 1 -42 0.2 0.8 16 8 23 -2 1 -42 0.4 0.4 34 23 43 -5 1 -42 0.1 0.8 4 -12 8 -34 2 -42 0.5 0.9 -43 -49 43 -4 2 -42 0.1 0.4 7 -11 18 8 2 -42 0.5 0.1 11 -49 -25 -49 1 -42 0.5 0.7 41 32 48 -36 1 -42 0.2 0.2 -6 -32 8 -33 2 -42 0.4 0.5 -2 -28 -6 -26 1 -42 0.9 0.7 37 -17 38 -24 1 -42 0.5 0.6 27 -5 13 -3 1 -42 0.3 0.3 6 -7 15 -39 1 -42 0.8 0.2 46 18 38 -43 1 -42 0.9 0.5 46 39 1 -8 1 -42 0.5 0.9 48 20 -13 -30 1 -42 0.3 0.3 42 35 30 -35 1 -42 0.2 0.3 14 -17 49 -32 1 -42 0.6 0.7 42 -27 50 -1 2 -42 0.9 0.7 40 -40 29 -20 1 -42 0.7 0.8 -32 -38 -13 -27 2 -42 0.8 0.5 11 -13 20 8 2 -42 0.7 0.6 30 10 25 -50 1 -42 0.5 0.2 16 -32 -42 -46 1 -42 0.5 0.4 21 -27 8 -48 1 -42 0.6 0.4 34 -5 -9 -14 1 -42 0.8 0.6 32 4 17 -50 1 -42 0.4 0.8 2 -21 -24 -33 1 -42 0.6 0.1 22 13 5 -48 1 -42 0.2 0.8 9 -24 45 -48 2 -42 0.1 0.3 39 31 10 -31 1 -42 0.7 0.2 11 -6 37 -46 1 -42 0.7 0.8 50 22 -13 -46 1 -42 0.9 0.3 -10 -35 25 22 2 -42 0.8 0.2 -7 -46 11 10 2 -42 0.6 0.9 -15 -36 31 10 2 -42 0.6 0.2 -3 -48 31 -36 2 -42 0.8 0.8 -16 -48 18 11 2 -42 0.5 0.1 31 0 -27 -38 1 -42 0.4 0.8 -18 -39 48 30 2 -42 0.2 0.2 29 -35 8 -24 1 -42 0.4 0.6 26 21 29 -26 1 -42 0.2 0.5 -25 -42 -10 -28 2 -42 0.3 0.3 -27 -40 18 0 2 -42 0.4 0.3 9 -48 46 38 2 -42 0.1 0.4 -15 -39 18 -11 2 -42 0.6 0.6 -45 -47 36 -14 2 -42 0.1 0.5 15 0 10 9 2 -42 0.8 0.8 12 -50 50 -46 2 -42 0.9 0.5 44 13 7 -11 1 -42 0.8 0.5 49 -45 43 32 2 -42 0.5 0.5 32 -4 36 -32 1 -42 0.4 0.9 -12 -15 -14 -32 1 -42 0.1 0.6 21 -4 9 -8 2 -42 0.2 0.4 -28 -43 39 34 2 -42 0.6 0.3 14 -22 18 11 2 -42 0.1 0.3 -28 -44 -16 -24 2 -42 0.7 0.8 -24 -50 29 -21 2 -42 0.4 0.1 -38 -41 26 -30 2 -42 0.3 0.8 48 25 34 -7 1 -42 0.3 0.1 -4 -23 36 28 2 -42 0.8 0.8 -9 -32 43 -27 2 -42 0.1 0.2 30 20 38 -30 1 -42 0.5 0.2 36 -28 41 -46 1 -42 0.8 0.2 46 -38 49 35 2 -42 0.6 0.1 15 11 4 -7 1 -42 0.1 0.2 43 23 18 -1 1 -42 0.4 0.6 33 -22 3 -18 2 -42 0.1 0.9 12 4 2 -5 1 -42 0.3 0.3 -9 -23 -27 -29 1 -42 0.3 0.4 50 17 41 19 2 -42 0.5 0.3 -6 -42 -20 -38 1 -42 0.5 0.2 24 -47 17 -49 1 -42 0.9 0.7 -12 -32 25 -25 2 -42 0.8 0.1 48 14 22 13 1 -42 0.9 0.8 38 -27 -7 -45 1 -42 0.3 0.2 -21 -43 -21 -27 2 -42 0.4 0.1 23 -32 37 17 2 -42 0.7 0.4 24 7 44 -6 1 -42 0.8 0.4 16 9 44 -43 1 -42 0.1 0.4 17 -40 17 -16 2 -42 0.9 0.6 -24 -26 19 -15 2 -42 0.4 0.1 20 -35 22 8 2 -42 0.5 0.4 41 14 8 -19 1 -42 0.3 0.3 49 5 24 11 1 -42 0.5 0.8 39 21 -6 -33 1 -42 0.7 0.6 46 -7 -31 -36 1 -42 0.5 0.6 2 -9 1 -4 1 -42 0.1 0.7 12 -21 45 8 2 -42 0.6 0.8 -13 -21 36 -7 2 -42 0.8 0.8 41 12 -33 -39 1 -42 0.6 0.9 -4 -15 9 -46 2 -42 0.6 0.1 21 -35 35 -19 1 -42 0.5 0.7 -13 -36 43 -18 2 -42 0.5 0.1 0 -34 49 3 2 -42 0.5 0.2 50 6 24 -24 1 -42 0.2 0.5 -17 -19 -33 -48 1 -42 0.6 0.7 -2 -27 23 -26 2 -43 0.8 0.1 43 42 31 -33 1 -43 0.7 0.7 -43 -50 9 -3 2 -43 0.4 0.4 -15 -34 -14 -29 2 -43 0.6 0.6 48 25 27 25 1 -43 0.4 0.8 49 6 21 -39 1 -43 0.4 0.6 14 -22 45 -46 2 -43 0.5 0.6 21 -10 37 28 2 -43 0.9 0.2 35 20 43 22 1 -43 0.8 0.8 -3 -15 40 17 2 -43 0.9 0.8 -18 -22 50 14 2 -43 0.3 0.9 -3 -50 20 -38 2 -43 0.6 0.5 47 -15 35 13 1 -43 0.2 0.2 45 28 34 -20 1 -43 0.8 0.4 42 -27 15 -50 1 -43 0.4 0.1 17 -44 42 33 2 -43 0.6 0.8 14 -32 7 -7 1 -43 0.9 0.1 19 -50 48 -42 1 -43 0.1 0.5 16 -11 6 -42 1 -43 0.9 0.4 32 -8 45 -32 1 -43 0.2 0.5 4 -17 -8 -23 2 -43 0.5 0.9 35 8 10 -37 1 -43 0.3 0.2 18 -41 20 -16 1 -43 0.6 0.9 -13 -47 27 -16 2 -43 0.3 0.1 33 -3 -15 -34 1 -43 0.9 0.4 45 4 46 31 1 -43 0.3 0.7 22 -47 32 19 2 -43 0.5 0.1 23 14 40 5 2 -43 0.6 0.6 10 8 -28 -46 1 -43 0.9 0.4 5 -38 -29 -49 1 -43 0.8 0.4 -41 -47 40 -31 2 -43 0.4 0.2 21 -50 48 17 2 -43 0.3 0.1 34 -43 6 -40 1 -43 0.3 0.1 -27 -50 48 -31 2 -43 0.8 0.9 9 -29 27 -34 2 -43 0.4 0.1 42 -40 -24 -30 1 -43 0.6 0.5 28 -8 34 -25 1 -43 0.3 0.2 8 -10 11 -18 1 -43 0.7 0.4 20 13 12 8 2 -43 0.6 0.2 1 -9 12 -17 1 -43 0.6 0.7 32 -15 -19 -38 1 -43 0.3 0.6 21 -3 -15 -46 1 -43 0.4 0.1 48 37 38 -45 1 -43 0.3 0.6 12 -36 -12 -45 1 -43 0.8 0.1 0 -40 -12 -39 1 -43 0.3 0.7 -15 -49 -38 -42 2 -43 0.4 0.8 -12 -31 45 41 2 -43 0.2 0.2 -19 -49 -32 -49 1 -43 0.7 0.6 -30 -42 -25 -27 1 -43 0.9 0.9 50 10 33 -32 1 -43 0.8 0.6 44 -21 10 -6 1 -43 0.1 0.4 28 -5 -9 -43 1 -43 0.1 0.2 7 4 50 -40 1 -43 0.4 0.3 -36 -49 27 -41 1 -43 0.8 0.3 -1 -18 24 -11 1 -43 0.3 0.8 -12 -14 34 24 2 -43 0.6 0.4 42 -3 38 35 2 -43 0.4 0.1 38 -25 41 21 2 -43 0.9 0.2 11 -3 1 -6 1 -43 0.5 0.9 -3 -14 10 -13 2 -43 0.5 0.2 26 -20 38 -40 1 -43 0.1 0.2 1 -35 15 -50 1 -43 0.3 0.5 -6 -47 27 -30 2 -43 0.8 0.5 2 -37 26 16 2 -43 0.7 0.5 38 -5 1 -47 1 -43 0.3 0.5 -2 -34 27 19 2 -43 0.5 0.7 43 -32 -39 -43 1 -43 0.2 0.2 49 -15 36 -11 1 -43 0.8 0.1 40 -15 -2 -19 1 -43 0.7 0.6 49 42 35 -35 1 -43 0.9 0.9 -11 -28 -5 -30 1 -43 0.1 0.6 45 11 50 22 2 -43 0.5 0.4 6 -40 39 -41 2 -43 0.4 0.1 -5 -14 45 31 2 -43 0.7 0.1 45 -39 -23 -50 1 -43 0.2 0.7 49 12 11 -32 1 -43 0.2 0.9 45 -10 3 -2 2 -43 0.7 0.5 22 -45 41 8 2 -43 0.4 0.2 36 5 7 -37 1 -43 0.9 0.6 39 -45 43 -46 1 -43 0.6 0.3 33 -22 1 -5 2 -43 0.2 0.7 50 -18 31 14 2 -43 0.8 0.6 27 17 26 -10 1 -43 0.5 0.7 50 -30 50 -10 2 -43 0.1 0.2 44 -31 -25 -34 2 -43 0.3 0.8 -12 -48 46 -29 2 -43 0.6 0.8 36 28 16 -25 1 -43 0.2 0.5 46 -43 -17 -28 1 -43 0.3 0.6 4 -25 18 -36 1 -43 0.2 0.3 -3 -50 50 -16 2 -43 0.6 0.2 -31 -42 30 18 2 -43 0.5 0.2 18 -5 -4 -20 1 -43 0.7 0.9 48 -38 -39 -42 1 -43 0.6 0.7 9 -42 -27 -35 1 -43 0.3 0.6 46 13 -24 -36 1 -43 0.4 0.6 29 7 -14 -48 1 -43 0.4 0.3 11 -17 48 -20 1 -43 0.8 0.2 37 -47 -16 -19 1 -43 0.5 0.1 36 -35 0 -3 1 -43 0.3 0.3 -1 -30 40 23 2 -43 0.4 0.9 25 -25 33 1 2 -43 0.4 0.8 16 -16 1 -15 2 -43 0.8 0.4 -14 -37 19 11 2 -43 0.9 0.4 25 17 27 -38 1 -43 0.6 0.4 35 34 47 -36 1 -43 0.9 0.8 37 -18 27 -23 1 -43 0.5 0.2 -7 -35 45 37 2 -43 0.2 0.2 33 -13 3 -6 2 -43 0.6 0.7 44 -45 23 -26 2 -43 0.2 0.4 1 -10 39 26 2 -43 0.5 0.2 2 -28 46 -33 2 -43 0.4 0.8 16 5 36 35 2 -43 0.3 0.1 35 -2 0 -47 1 -43 0.8 0.5 5 -18 -6 -35 2 -43 0.1 0.9 3 -18 8 -46 1 -43 0.1 0.1 45 31 -35 -46 1 -43 0.3 0.1 44 -41 47 -7 1 -43 0.8 0.2 -5 -34 17 -45 2 -43 0.1 0.3 -40 -45 30 -7 2 -43 0.2 0.1 47 -9 15 14 2 -43 0.9 0.3 -10 -22 27 0 2 -43 0.7 0.8 43 16 6 -24 1 -43 0.7 0.9 -40 -46 21 -38 2 -43 0.7 0.5 48 -8 36 -8 1 -43 0.3 0.4 12 -17 50 -8 2 -43 0.2 0.7 11 -40 13 -19 2 -43 0.8 0.8 40 39 47 -28 1 -43 0.2 0.2 39 13 -3 -42 1 -43 0.6 0.6 33 14 -43 -44 1 -43 0.1 0.4 42 27 38 15 2 -43 0.7 0.8 36 -20 42 -1 2 -43 0.6 0.3 21 -22 -15 -48 1 -43 0.9 0.2 7 -43 24 6 2 -43 0.9 0.2 7 -29 17 -10 1 -43 0.2 0.3 19 -29 30 -14 2 -43 0.4 0.4 27 -31 2 -18 1 -43 0.5 0.9 -18 -46 -23 -36 2 -43 0.4 0.5 6 -50 36 4 2 -43 0.5 0.9 29 -11 -19 -42 1 -43 0.1 0.1 -7 -17 7 -28 1 -43 0.5 0.2 -15 -27 47 -21 2 -43 0.1 0.8 -1 -26 24 -30 2 -43 0.5 0.1 -6 -31 33 14 2 -43 0.1 0.5 21 -4 21 -4 2 -43 0.9 0.3 40 -47 12 -34 1 -43 0.3 0.4 45 44 16 -16 1 -43 0.3 0.8 39 26 1 -17 1 -43 0.2 0.3 9 -3 24 -26 2 -43 0.2 0.3 -4 -9 21 -34 2 -43 0.7 0.3 29 -5 36 19 2 -43 0.5 0.4 47 31 37 -17 1 -43 0.1 0.3 20 -30 -26 -29 1 -43 0.9 0.2 42 16 43 -28 1 -43 0.4 0.9 4 -47 1 -43 2 -43 0.6 0.2 50 -2 26 10 1 -43 0.2 0.3 -13 -26 -15 -20 1 -43 0.7 0.4 50 35 17 12 1 -43 0.1 0.8 8 -19 -6 -12 1 -43 0.9 0.1 25 4 35 -12 1 -43 0.8 0.1 26 23 21 2 1 -43 0.3 0.4 -13 -19 50 -5 2 -43 0.9 0.8 11 -19 49 -34 2 -43 0.8 0.5 41 -41 45 39 2 -43 0.9 0.8 40 1 32 17 1 -43 0.7 0.7 49 -50 -2 -10 1 -43 0.6 0.7 -6 -50 0 -1 1 -43 0.3 0.6 26 -47 11 6 1 -43 0.1 0.6 38 0 15 10 1 -43 0.2 0.9 27 -10 46 6 2 -43 0.8 0.6 -5 -21 0 -25 2 -43 0.8 0.3 31 -19 -10 -40 1 -43 0.1 0.2 15 -40 -25 -26 2 -43 0.2 0.6 20 16 40 -45 1 -43 0.2 0.4 43 -9 -17 -32 1 -43 0.2 0.9 47 46 26 20 1 -43 0.5 0.2 38 13 21 -14 1 -43 0.2 0.2 29 -8 9 -12 2 -43 0.5 0.1 -14 -40 -6 -33 1 -43 0.5 0.5 3 -4 17 -29 2 -43 0.1 0.1 42 10 39 16 2 -43 0.5 0.3 45 -34 -25 -46 1 -43 0.3 0.3 23 -48 37 2 2 -43 0.1 0.8 28 -23 9 8 2 -43 0.9 0.7 43 12 35 24 1 -43 0.7 0.4 16 -44 -5 -24 2 -43 0.5 0.2 34 -5 18 3 2 -43 0.6 0.2 50 27 -29 -38 1 -43 0.4 0.2 24 -5 -2 -39 1 -43 0.9 0.4 19 -36 39 -18 1 -43 0.6 0.6 49 39 -3 -42 1 -43 0.1 0.3 32 -32 33 -6 2 -43 0.5 0.9 47 29 15 -16 1 -43 0.8 0.3 32 -14 38 -50 1 -43 0.9 0.9 -5 -22 48 20 2 -43 0.8 0.4 37 -22 47 2 2 -43 0.8 0.5 45 29 32 -3 1 -43 0.2 0.5 -10 -42 -48 -49 1 -43 0.5 0.2 22 -40 45 -22 1 -43 0.7 0.5 23 -15 -1 -9 1 -43 0.6 0.9 25 -48 -24 -37 1 -43 0.6 0.1 -22 -44 47 -18 2 -43 0.5 0.8 32 -37 46 -16 2 -43 0.7 0.1 10 -17 49 -49 1 -43 0.6 0.1 28 -50 38 -43 1 -43 0.1 0.9 44 -20 3 -32 2 -43 0.6 0.6 47 19 50 5 1 -43 0.8 0.9 42 -20 6 -34 1 -43 0.6 0.6 29 12 27 20 2 -43 0.2 0.4 33 -7 32 -18 1 -43 0.1 0.3 8 -44 40 -24 2 -43 0.2 0.3 -20 -41 1 -21 2 -43 0.7 0.2 15 -31 48 28 2 -43 0.4 0.5 16 12 10 -3 1 -43 0.7 0.5 39 -5 2 -29 1 -43 0.8 0.2 40 25 11 -29 1 -43 0.3 0.5 14 -40 16 -27 2 -43 0.2 0.5 -23 -34 5 0 2 -43 0.1 0.1 4 -28 -10 -18 2 -43 0.9 0.4 27 -29 -24 -28 1 -43 0.7 0.6 46 38 44 -47 1 -43 0.3 0.4 -36 -40 7 -12 2 -43 0.5 0.2 11 -41 -1 -29 1 -43 0.2 0.6 -1 -18 31 -50 2 -43 0.9 0.9 39 -27 15 -5 1 -43 0.6 0.9 40 4 32 29 2 -43 0.7 0.7 8 -8 44 9 2 -43 0.9 0.9 28 -27 26 -32 2 -43 0.3 0.6 14 -35 38 -42 2 -43 0.2 0.1 42 23 -4 -48 1 -43 0.6 0.6 0 -1 5 -16 2 -43 0.6 0.1 -4 -38 23 12 2 -43 0.9 0.9 -9 -32 28 -28 2 -43 0.4 0.1 -4 -13 -4 -39 1 -43 0.7 0.3 42 -33 20 -42 1 -43 0.6 0.5 20 -30 -1 -38 1 -43 0.1 0.7 -19 -42 -27 -29 2 -43 0.5 0.7 -7 -27 19 4 1 -43 0.3 0.7 26 21 10 -11 1 -43 0.3 0.4 40 -4 15 -10 1 -43 0.5 0.8 18 -17 -12 -33 1 -43 0.5 0.7 -9 -30 28 7 2 -43 0.3 0.8 40 -29 50 48 2 -43 0.8 0.1 -11 -14 31 22 2 -43 0.8 0.2 38 -30 2 -25 1 -43 0.5 0.1 18 -3 -12 -34 1 -43 0.7 0.5 0 -35 -8 -24 1 -43 0.7 0.7 46 35 14 10 1 -43 0.9 0.7 -3 -21 47 6 2 -43 0.7 0.1 21 11 35 -2 1 -43 0.4 0.9 -24 -48 31 20 2 -43 0.2 0.4 43 -12 25 -39 1 -43 0.5 0.6 -24 -34 42 8 2 -43 0.6 0.5 -39 -46 19 -3 2 -43 0.7 0.6 -20 -37 17 9 2 -43 0.2 0.8 23 -17 -3 -27 1 -43 0.8 0.6 16 -40 14 5 2 -43 0.2 0.1 -13 -36 40 -13 2 -43 0.4 0.7 7 -5 25 -32 2 -43 0.8 0.7 41 28 43 28 2 -43 0.9 0.4 -13 -22 11 -34 2 -43 0.4 0.9 18 -31 36 10 2 -43 0.3 0.1 -33 -47 48 -37 2 -43 0.7 0.2 -20 -44 -34 -44 2 -43 0.5 0.3 21 -46 34 -30 1 -43 0.7 0.2 -7 -50 -13 -27 1 -43 0.3 0.4 4 -21 17 -44 2 -43 0.7 0.7 -23 -26 9 -33 2 -43 0.1 0.8 14 -13 23 -9 2 -43 0.4 0.1 10 -33 2 -33 2 -43 0.6 0.6 35 16 42 -4 1 -43 0.1 0.3 46 27 41 -42 1 -43 0.7 0.6 6 -40 27 -50 2 -43 0.1 0.4 45 -38 25 -18 1 -43 0.1 0.4 -25 -50 -22 -36 2 -43 0.5 0.7 15 -28 -31 -39 1 -43 0.5 0.7 20 -25 32 -12 2 -43 0.3 0.7 50 1 20 -35 1 -43 0.8 0.4 14 -14 45 -31 2 -43 0.1 0.2 42 -49 -33 -34 2 -43 0.4 0.9 7 4 24 -33 2 -43 0.5 0.9 8 -12 39 12 2 -43 0.5 0.7 -15 -35 24 -43 2 -43 0.4 0.4 -10 -23 -31 -32 1 -43 0.7 0.8 50 20 34 16 2 -43 0.2 0.4 -9 -46 -18 -22 2 -43 0.3 0.1 26 -28 39 -25 2 -43 0.3 0.1 48 -10 4 -50 1 -43 0.6 0.2 -14 -24 -9 -24 1 -43 0.4 0.3 38 4 4 -42 1 -43 0.3 0.9 -13 -27 30 5 2 -43 0.8 0.9 -16 -31 -40 -47 2 -43 0.7 0.7 17 5 -6 -48 1 -43 0.6 0.7 22 -34 8 -49 2 -43 0.5 0.8 30 7 -23 -43 1 -43 0.4 0.6 -21 -34 14 -27 2 -43 0.8 0.4 -13 -50 15 -7 2 -43 0.6 0.7 10 -10 -39 -50 1 -43 0.7 0.1 24 -20 3 -33 1 -43 0.7 0.1 14 -35 17 -3 1 -43 0.9 0.3 -12 -50 -8 -34 1 -43 0.1 0.9 31 27 37 1 2 -44 0.7 0.1 48 -40 32 -37 1 -44 0.8 0.5 -21 -40 39 3 2 -44 0.6 0.8 -29 -40 14 6 2 -44 0.9 0.3 37 -23 47 -34 1 -44 0.4 0.5 49 -18 -37 -50 1 -44 0.2 0.5 -24 -46 32 18 2 -44 0.8 0.4 41 27 27 -2 1 -44 0.6 0.6 29 -45 7 -28 1 -44 0.7 0.5 -33 -48 27 19 2 -44 0.1 0.8 -39 -42 -34 -49 2 -44 0.8 0.6 -36 -39 31 -44 2 -44 0.9 0.2 26 -1 -20 -21 1 -44 0.8 0.2 -5 -15 24 -32 1 -44 0.8 0.9 -21 -35 -6 -17 2 -44 0.7 0.3 27 -31 25 -41 1 -44 0.6 0.5 -8 -17 9 -28 2 -44 0.9 0.1 -15 -34 -29 -40 1 -44 0.7 0.6 28 -5 0 -4 1 -44 0.2 0.8 8 -33 8 -39 2 -44 0.3 0.4 28 10 3 -35 1 -44 0.6 0.3 -4 -12 32 28 2 -44 0.9 0.2 -40 -49 40 -44 1 -44 0.8 0.3 28 11 41 -48 1 -44 0.7 0.2 -5 -26 46 2 2 -44 0.6 0.9 12 8 -26 -45 1 -44 0.4 0.4 35 -28 5 -36 1 -44 0.4 0.2 -28 -46 39 -44 2 -44 0.8 0.7 14 -34 29 -13 2 -44 0.6 0.8 21 -7 43 37 2 -44 0.9 0.3 -4 -30 8 -38 1 -44 0.6 0.1 -1 -30 29 -38 1 -44 0.2 0.5 -4 -22 14 -32 2 -44 0.5 0.5 -38 -43 39 14 2 -44 0.2 0.5 2 -23 8 -6 2 -44 0.8 0.6 -6 -19 35 -2 2 -44 0.4 0.5 -31 -33 45 -4 2 -44 0.2 0.9 -21 -37 37 -8 2 -44 0.9 0.4 41 -34 29 -49 1 -44 0.2 0.8 -24 -28 43 -15 2 -44 0.5 0.9 -40 -50 11 -17 2 -44 0.2 0.5 -10 -38 0 -19 2 -44 0.9 0.1 40 11 34 -31 1 -44 0.3 0.7 40 14 19 -4 1 -44 0.7 0.6 -8 -36 22 11 2 -44 0.3 0.1 10 -9 8 -31 1 -44 0.3 0.8 3 -27 47 -2 2 -44 0.3 0.4 50 49 -19 -39 1 -44 0.4 0.6 38 -6 48 17 2 -44 0.6 0.4 4 -24 22 -19 2 -44 0.8 0.9 -8 -37 -11 -36 1 -44 0.9 0.1 32 -33 40 -35 1 -44 0.4 0.5 34 -41 -21 -44 1 -44 0.7 0.2 -2 -5 46 -20 2 -44 0.9 0.1 42 11 4 -9 1 -44 0.3 0.1 -12 -37 29 -39 2 -44 0.7 0.6 39 10 5 -35 1 -44 0.8 0.8 -34 -42 -14 -39 2 -44 0.3 0.4 50 25 44 18 1 -44 0.5 0.2 -25 -30 22 0 2 -44 0.8 0.2 34 -39 41 -17 1 -44 0.3 0.3 -27 -32 -33 -37 1 -44 0.5 0.7 15 -26 50 -23 2 -44 0.4 0.1 -13 -21 24 2 2 -44 0.9 0.7 4 -37 34 -16 2 -44 0.4 0.9 -14 -23 27 12 2 -44 0.9 0.9 -8 -46 5 -21 2 -44 0.4 0.3 38 14 29 -28 1 -44 0.1 0.8 -28 -39 5 -29 2 -44 0.2 0.1 25 5 -4 -26 1 -44 0.1 0.6 47 18 41 -49 1 -44 0.8 0.7 4 -29 46 40 2 -44 0.4 0.7 45 42 -27 -35 1 -44 0.9 0.5 37 -16 32 28 1 -44 0.8 0.1 17 -8 32 7 1 -44 0.7 0.8 46 14 9 7 1 -44 0.9 0.3 -3 -34 2 -8 2 -44 0.9 0.5 44 -1 45 -11 1 -44 0.5 0.1 44 26 -9 -20 1 -44 0.6 0.4 33 -40 29 7 2 -44 0.5 0.3 2 -48 31 -42 2 -44 0.7 0.3 -23 -43 1 -47 2 -44 0.5 0.9 31 -40 12 -46 2 -44 0.3 0.1 49 -4 16 -14 1 -44 0.2 0.7 20 -8 31 -3 2 -44 0.2 0.8 -26 -31 -45 -47 1 -44 0.5 0.1 35 14 40 28 2 -44 0.9 0.8 -12 -37 24 -43 2 -44 0.5 0.5 43 -46 -33 -49 1 -44 0.1 0.3 -34 -43 46 -34 2 -44 0.1 0.8 5 3 12 -5 2 -44 0.5 0.6 14 -15 -17 -43 1 -44 0.9 0.2 -27 -38 -30 -41 2 -44 0.2 0.8 6 -36 -9 -44 2 -44 0.7 0.5 48 10 32 -14 1 -44 0.1 0.2 17 -37 41 39 2 -44 0.8 0.2 15 4 24 -45 1 -44 0.4 0.9 10 7 23 -43 2 -44 0.1 0.1 34 -38 24 17 2 -44 0.3 0.4 -20 -43 26 -33 2 -44 0.1 0.4 8 -8 2 -31 2 -44 0.4 0.1 -12 -13 23 -14 2 -44 0.5 0.4 -15 -27 10 -40 2 -44 0.3 0.5 42 -31 38 30 2 -44 0.7 0.5 32 -11 -22 -31 1 -44 0.8 0.8 12 -19 -13 -21 1 -44 0.7 0.8 11 -43 44 -7 2 -44 0.7 0.3 45 7 -33 -40 1 -44 0.8 0.1 -9 -32 36 -2 2 -44 0.7 0.8 44 -11 40 5 2 -44 0.7 0.1 47 45 15 -46 1 -44 0.7 0.3 3 -9 28 -32 1 -44 0.9 0.2 32 -8 29 9 1 -44 0.9 0.7 9 -50 7 -34 1 -44 0.9 0.4 12 -28 42 -1 2 -44 0.9 0.3 6 -29 7 -16 1 -44 0.7 0.1 5 -15 -4 -14 1 -44 0.2 0.1 45 -37 -10 -33 1 -44 0.2 0.4 24 -27 -9 -44 1 -44 0.9 0.1 20 -10 40 25 2 -44 0.6 0.5 -27 -28 19 15 2 -44 0.7 0.8 0 -1 11 -33 2 -44 0.1 0.9 34 6 28 2 2 -44 0.5 0.8 8 -36 -20 -40 1 -44 0.2 0.5 38 34 -6 -9 1 -44 0.6 0.1 46 -25 50 12 1 -44 0.3 0.8 20 -3 47 7 2 -44 0.3 0.8 -11 -28 -20 -50 2 -44 0.4 0.5 43 -15 7 -20 1 -44 0.6 0.3 -11 -30 4 -45 1 -44 0.4 0.6 2 0 40 36 2 -44 0.8 0.1 -3 -41 29 18 2 -44 0.5 0.3 36 23 -10 -50 1 -44 0.4 0.3 6 -14 27 -41 1 -44 0.4 0.5 1 -20 39 -13 2 -44 0.6 0.4 28 -33 45 -16 2 -44 0.1 0.5 25 -44 11 4 2 -44 0.7 0.2 46 2 39 34 2 -44 0.9 0.8 24 -1 3 -14 1 -44 0.4 0.2 36 -11 44 -17 1 -44 0.5 0.9 -45 -50 21 -46 2 -44 0.2 0.8 32 30 26 9 1 -44 0.6 0.2 23 19 -29 -45 1 -44 0.9 0.8 20 -22 43 26 2 -44 0.2 0.9 44 37 26 -24 1 -44 0.7 0.8 27 2 22 -28 1 -44 0.1 0.9 14 -50 2 -35 2 -44 0.3 0.7 46 4 37 15 2 -44 0.9 0.2 50 49 12 -26 1 -44 0.4 0.4 36 19 29 21 2 -44 0.2 0.5 29 8 41 37 2 -44 0.2 0.4 23 11 -3 -32 1 -44 0.7 0.9 47 -45 26 -26 2 -44 0.4 0.6 25 21 -14 -43 1 -44 0.5 0.8 50 18 37 -13 1 -44 0.5 0.7 30 25 10 -27 1 -44 0.3 0.4 22 6 0 -29 1 -44 0.4 0.3 20 -35 17 5 2 -44 0.1 0.5 17 -20 11 -49 2 -44 0.3 0.5 48 -21 8 4 2 -44 0.1 0.9 37 -38 29 19 2 -44 0.6 0.9 -21 -23 -10 -19 2 -44 0.9 0.4 -19 -50 4 -14 2 -44 0.7 0.2 21 -40 -44 -49 1 -44 0.1 0.5 -10 -29 45 -46 2 -44 0.4 0.6 33 -22 39 -41 2 -44 0.7 0.8 12 -21 21 4 2 -44 0.9 0.9 45 35 8 -38 1 -44 0.8 0.1 18 -24 -27 -30 1 -44 0.9 0.9 50 23 29 -40 1 -44 0.5 0.7 24 -30 5 3 2 -44 0.2 0.1 5 -30 -3 -31 1 -44 0.7 0.1 0 -46 50 6 2 -44 0.5 0.6 43 42 -33 -39 1 -44 0.1 0.8 37 13 21 -11 1 -44 0.1 0.5 39 -17 47 32 2 -44 0.2 0.9 34 -14 35 -19 2 -44 0.7 0.8 -28 -50 20 17 2 -44 0.3 0.6 23 -7 1 -9 2 -44 0.2 0.2 46 33 36 -49 1 -44 0.9 0.2 -13 -26 12 -32 2 -44 0.1 0.3 -10 -24 26 -13 2 -44 0.7 0.9 36 -45 10 -15 1 -44 0.6 0.6 50 -25 21 -41 1 -44 0.2 0.9 25 11 -39 -42 1 -44 0.1 0.2 9 7 34 -29 1 -44 0.8 0.1 20 -21 -42 -45 1 -44 0.4 0.2 40 2 16 -22 1 -44 0.7 0.6 17 -14 46 37 2 -44 0.2 0.5 -16 -25 43 -2 2 -44 0.2 0.3 45 -19 -10 -22 1 -44 0.4 0.7 -11 -16 10 -22 2 -44 0.8 0.3 40 -49 -14 -37 1 -44 0.5 0.3 -35 -43 46 -38 2 -44 0.6 0.8 3 -22 -14 -15 1 -44 0.9 0.2 -29 -40 21 2 2 -44 0.4 0.9 38 21 42 -47 1 -44 0.3 0.2 35 27 6 -9 1 -44 0.6 0.1 47 -21 49 7 1 -44 0.1 0.2 19 2 -9 -19 1 -44 0.1 0.9 21 -10 29 -12 2 -44 0.8 0.7 22 -49 4 -6 1 -44 0.9 0.7 24 -18 13 6 1 -44 0.2 0.5 39 -10 43 -7 2 -44 0.8 0.1 24 -25 -5 -13 1 -44 0.9 0.2 26 -12 6 -22 1 -44 0.4 0.7 19 -1 3 -18 1 -44 0.8 0.5 35 -31 3 -10 1 -44 0.6 0.2 32 7 -8 -21 1 -44 0.9 0.2 8 -11 42 23 2 -44 0.8 0.5 14 -26 11 -4 1 -44 0.6 0.7 13 -27 48 -10 2 -44 0.7 0.6 39 -7 5 -13 1 -44 0.2 0.8 20 8 -3 -17 1 -44 0.4 0.3 10 5 46 21 2 -44 0.2 0.8 36 -7 22 15 2 -44 0.5 0.8 36 -11 19 -20 2 -44 0.9 0.4 35 -14 -41 -50 1 -44 0.8 0.4 42 -11 -34 -44 1 -44 0.9 0.6 -9 -28 29 26 2 -44 0.9 0.1 16 -3 -24 -49 1 -44 0.2 0.7 25 -26 3 -48 2 -44 0.7 0.6 18 -49 49 -9 2 -44 0.9 0.7 50 -30 6 -12 1 -44 0.3 0.9 45 -26 27 -29 2 -44 0.5 0.1 -1 -20 25 2 2 -44 0.2 0.1 28 -28 41 29 2 -44 0.3 0.1 45 -25 -2 -24 1 -44 0.9 0.5 -21 -30 37 -45 2 -44 0.5 0.5 41 30 -18 -40 1 -44 0.1 0.4 18 -18 47 8 2 -44 0.7 0.7 42 -41 33 -12 2 -44 0.7 0.4 45 -1 24 -38 1 -44 0.3 0.3 -42 -49 50 -1 2 -44 0.3 0.4 35 27 0 -18 1 -44 0.5 0.9 47 32 47 27 2 -44 0.4 0.2 12 -46 22 -49 1 -44 0.5 0.8 10 -17 -31 -40 1 -44 0.8 0.1 18 16 -32 -50 1 -44 0.1 0.4 -18 -41 -30 -32 2 -44 0.7 0.8 -17 -50 48 -30 2 -44 0.9 0.9 22 -48 -11 -34 1 -44 0.5 0.1 12 -18 3 -15 1 -44 0.4 0.7 25 22 4 -41 1 -44 0.8 0.3 33 -6 34 19 2 -44 0.6 0.7 -1 -12 39 -28 2 -44 0.9 0.9 33 -31 23 19 2 -44 0.2 0.4 19 -25 43 -19 2 -44 0.2 0.2 4 -29 -23 -27 1 -44 0.4 0.1 4 -16 -15 -42 1 -44 0.6 0.7 22 -33 -6 -13 1 -44 0.8 0.2 4 -2 -11 -27 1 -44 0.6 0.7 -2 -14 32 -42 2 -44 0.3 0.7 28 -19 18 7 2 -44 0.3 0.4 21 14 39 26 2 -44 0.9 0.9 -9 -38 11 -47 2 -44 0.6 0.7 32 -33 47 -42 2 -44 0.8 0.2 2 -4 50 -5 1 -44 0.2 0.5 22 -3 28 2 2 -44 0.2 0.1 38 30 -4 -12 1 -44 0.2 0.2 32 -43 16 15 2 -44 0.3 0.3 22 -3 27 -27 1 -44 0.1 0.5 -3 -19 -32 -49 1 -44 0.5 0.7 -21 -29 21 -31 2 -44 0.6 0.4 -6 -35 -21 -23 1 -44 0.4 0.7 -6 -32 10 -48 2 -44 0.1 0.9 30 5 -30 -35 1 -44 0.7 0.7 9 -1 -6 -32 1 -44 0.3 0.2 -9 -24 8 -7 2 -44 0.5 0.4 21 -11 47 -19 2 -44 0.9 0.4 47 19 25 10 1 -44 0.6 0.3 -6 -31 11 -15 2 -44 0.4 0.1 47 -31 -7 -24 1 -44 0.1 0.9 19 6 50 9 2 -44 0.4 0.5 49 22 30 -16 1 -44 0.1 0.5 7 -41 45 -21 2 -44 0.1 0.3 36 -14 21 4 2 -44 0.2 0.9 34 16 31 19 2 -44 0.4 0.5 46 -40 4 -25 1 -44 0.2 0.5 28 -35 16 -13 2 -44 0.9 0.5 32 -24 2 -16 1 -44 0.7 0.1 45 12 35 -38 1 -44 0.8 0.2 29 4 7 -17 1 -44 0.8 0.8 -27 -50 9 -23 2 -44 0.2 0.9 23 -41 2 -1 2 -44 0.9 0.2 35 -38 36 -25 1 -44 0.2 0.2 47 45 26 -11 1 -44 0.4 0.5 41 34 -12 -19 1 -44 0.4 0.6 1 -43 28 -48 2 -44 0.9 0.3 50 -24 47 -39 1 -44 0.5 0.3 38 12 11 -50 1 -44 0.1 0.5 40 15 42 34 2 -44 0.7 0.9 -13 -44 47 39 2 -44 0.2 0.8 50 -43 -4 -29 2 -44 0.2 0.4 47 -37 33 -2 2 -44 0.2 0.1 48 -23 33 -8 2 -44 0.4 0.1 22 -18 14 -48 1 -44 0.6 0.1 49 -22 35 3 1 -44 0.6 0.6 40 39 44 -31 1 -44 0.6 0.5 11 -1 30 20 1 -44 0.7 0.2 30 -44 3 -29 1 -45 0.4 0.1 25 3 22 -1 1 -45 0.9 0.9 35 2 -22 -29 1 -45 0.8 0.7 3 -7 11 -49 1 -45 0.7 0.3 -18 -36 -43 -49 1 -45 0.5 0.5 11 -2 -1 -4 1 -45 0.5 0.2 36 -6 0 -17 1 -45 0.4 0.7 -29 -38 15 -11 2 -45 0.8 0.1 -6 -27 48 -12 2 -45 0.3 0.6 31 -15 21 -12 2 -45 0.9 0.3 14 3 45 -45 1 -45 0.4 0.5 35 8 10 -2 1 -45 0.3 0.2 3 -48 36 -45 2 -45 0.4 0.4 39 0 -13 -38 1 -45 0.8 0.3 17 16 -45 -48 1 -45 0.9 0.6 10 -20 25 -16 1 -45 0.9 0.7 -20 -38 34 -18 2 -45 0.7 0.1 -13 -20 34 17 2 -45 0.2 0.9 40 14 31 25 2 -45 0.2 0.9 -13 -39 8 -4 2 -45 0.4 0.2 23 22 39 -38 1 -45 0.8 0.8 -14 -33 39 -6 2 -45 0.4 0.8 -21 -50 -2 -50 2 -45 0.4 0.8 18 -46 24 2 2 -45 0.3 0.8 32 -42 -27 -50 1 -45 0.7 0.7 28 25 46 -48 1 -45 0.2 0.1 -16 -39 9 -46 1 -45 0.1 0.4 -6 -15 5 -49 1 -45 0.3 0.1 1 -26 -19 -43 1 -45 0.4 0.3 35 30 17 -3 1 -45 0.4 0.4 30 -44 5 3 2 -45 0.4 0.8 -39 -43 46 28 2 -45 0.2 0.1 -1 -7 34 28 2 -45 0.3 0.9 8 -41 50 10 2 -45 0.8 0.7 23 -24 18 8 2 -45 0.3 0.1 -17 -43 45 19 2 -45 0.1 0.6 -37 -45 -14 -21 2 -45 0.4 0.2 -23 -41 14 -20 2 -45 0.4 0.3 -35 -44 -14 -25 2 -45 0.6 0.9 34 -47 14 -31 2 -45 0.9 0.9 5 -31 37 -29 2 -45 0.7 0.3 -3 -47 -20 -49 1 -45 0.4 0.3 49 37 43 -9 1 -45 0.6 0.3 1 -40 27 -30 1 -45 0.9 0.4 9 -31 30 -43 1 -45 0.2 0.9 -42 -49 13 8 2 -45 0.9 0.3 43 2 21 16 1 -45 0.9 0.2 -5 -21 49 15 2 -45 0.6 0.9 41 -8 17 -9 1 -45 0.6 0.3 -46 -48 -39 -46 2 -45 0.4 0.5 -1 -40 13 -19 2 -45 0.9 0.9 50 1 -4 -30 1 -45 0.4 0.8 -2 -4 35 -13 2 -45 0.8 0.8 13 12 28 -14 2 -45 0.5 0.1 34 -16 -13 -17 1 -45 0.1 0.4 8 -23 35 -38 2 -45 0.4 0.1 8 -20 19 -23 1 -45 0.6 0.8 40 -9 -4 -44 1 -45 0.6 0.6 -9 -20 47 -18 2 -45 0.2 0.5 -6 -46 2 -18 2 -45 0.1 0.7 5 -5 -3 -7 1 -45 0.3 0.9 39 -35 -4 -46 2 -45 0.7 0.4 31 26 -29 -50 1 -45 0.9 0.7 27 -16 46 27 2 -45 0.6 0.1 -10 -36 39 31 2 -45 0.3 0.4 28 -32 -27 -48 1 -45 0.4 0.8 -7 -13 -35 -48 1 -45 0.6 0.4 49 32 41 27 1 -45 0.2 0.7 14 -22 34 -47 2 -45 0.5 0.4 17 -14 27 -15 2 -45 0.8 0.5 11 10 -38 -49 1 -45 0.7 0.1 20 -18 37 7 2 -45 0.6 0.7 -45 -49 -23 -39 2 -45 0.8 0.2 -6 -39 32 -10 2 -45 0.9 0.5 47 -5 47 40 2 -45 0.4 0.8 22 -39 20 -8 2 -45 0.3 0.7 -20 -21 -8 -32 2 -45 0.2 0.2 29 -17 40 16 2 -45 0.3 0.7 0 -23 37 4 2 -45 0.7 0.9 41 -10 18 -35 1 -45 0.8 0.8 -32 -47 47 -20 2 -45 0.1 0.5 16 -18 22 -17 2 -45 0.9 0.1 28 -49 28 -1 1 -45 0.9 0.3 -40 -41 13 -5 2 -45 0.1 0.3 2 -5 43 31 2 -45 0.1 0.2 49 44 47 4 1 -45 0.1 0.6 48 34 49 11 1 -45 0.6 0.1 -28 -30 39 -1 2 -45 0.2 0.3 2 -36 14 10 2 -45 0.3 0.1 15 4 1 -20 1 -45 0.7 0.8 25 -24 17 -27 1 -45 0.1 0.7 28 -48 24 -7 2 -45 0.4 0.1 -11 -24 38 -2 2 -45 0.1 0.7 46 18 41 -1 2 -45 0.6 0.1 12 -36 34 -7 1 -45 0.4 0.1 -29 -34 2 -40 2 -45 0.7 0.8 47 9 15 -8 1 -45 0.4 0.8 10 -3 -5 -10 1 -45 0.9 0.9 13 -44 6 -43 1 -45 0.6 0.4 41 -7 -41 -43 1 -45 0.3 0.6 42 -22 21 -30 2 -45 0.7 0.2 -26 -44 45 16 2 -45 0.8 0.2 18 -10 -7 -49 1 -45 0.4 0.6 -6 -21 50 -41 2 -45 0.4 0.2 39 -9 9 -43 1 -45 0.7 0.5 -27 -40 42 -44 2 -45 0.5 0.1 36 30 0 -17 1 -45 0.7 0.4 35 25 30 -15 1 -45 0.4 0.5 -3 -26 39 24 2 -45 0.7 0.7 6 -24 -29 -49 1 -45 0.1 0.1 29 23 21 -2 1 -45 0.9 0.3 45 -8 24 -1 1 -45 0.9 0.2 29 -50 7 -16 1 -45 0.6 0.6 6 -15 19 -16 2 -45 0.1 0.1 48 -18 -25 -32 1 -45 0.7 0.4 -8 -32 29 -22 2 -45 0.7 0.7 48 -50 50 -44 2 -45 0.9 0.1 40 -30 20 -45 1 -45 0.5 0.4 42 -24 15 8 2 -45 0.1 0.7 12 3 -43 -45 1 -45 0.6 0.5 -4 -20 21 -28 2 -45 0.6 0.8 10 -39 25 -39 2 -45 0.2 0.2 36 -16 -24 -26 1 -45 0.6 0.6 37 5 -11 -22 1 -45 0.1 0.1 -22 -45 36 -6 2 -45 0.6 0.8 -5 -42 -14 -47 1 -45 0.6 0.7 11 9 39 32 2 -45 0.4 0.8 50 -13 40 -23 2 -45 0.3 0.3 10 -27 21 8 2 -45 0.3 0.1 35 -45 50 -15 2 -45 0.1 0.8 11 2 27 -15 2 -45 0.5 0.1 25 -21 -38 -44 1 -45 0.2 0.6 -5 -36 12 1 2 -45 0.6 0.8 1 -45 -15 -28 2 -45 0.2 0.3 49 -21 36 18 2 -45 0.1 0.2 24 -47 -19 -32 2 -45 0.7 0.5 39 0 -11 -47 1 -45 0.1 0.4 28 11 19 -47 1 -45 0.1 0.8 4 -5 -8 -42 1 -45 0.6 0.1 -1 -26 24 -34 1 -45 0.7 0.7 7 -24 -19 -30 1 -45 0.1 0.7 11 4 6 -17 1 -45 0.4 0.7 27 -36 26 19 2 -45 0.9 0.4 -16 -27 45 -50 2 -45 0.1 0.6 16 14 40 19 2 -45 0.8 0.6 42 19 17 -50 1 -45 0.2 0.2 36 -32 34 -33 2 -45 0.3 0.3 -21 -48 -11 -17 2 -45 0.3 0.6 -12 -44 8 2 2 -45 0.4 0.1 17 -27 -38 -47 1 -45 0.4 0.5 41 39 -2 -24 1 -45 0.4 0.9 35 0 -32 -38 1 -45 0.5 0.5 33 -9 40 -15 1 -45 0.4 0.8 33 -29 -4 -34 1 -45 0.6 0.9 -10 -42 23 -38 2 -45 0.4 0.2 34 -29 28 -8 2 -45 0.4 0.2 50 8 -30 -37 1 -45 0.4 0.6 -27 -49 49 -19 2 -45 0.2 0.9 12 2 19 10 2 -45 0.9 0.5 6 -36 -8 -27 1 -45 0.5 0.4 21 -3 40 -41 1 -45 0.7 0.7 37 -32 -9 -31 1 -45 0.3 0.5 43 -49 41 20 2 -45 0.4 0.2 26 3 39 19 2 -45 0.4 0.5 -22 -23 -37 -49 1 -45 0.6 0.8 6 -7 49 -2 2 -45 0.7 0.6 46 -33 33 -27 1 -45 0.2 0.4 -28 -32 26 6 2 -45 0.7 0.3 50 46 48 -24 1 -45 0.4 0.7 9 -37 33 1 2 -45 0.5 0.3 1 -30 -34 -36 1 -45 0.6 0.6 7 -48 26 -32 2 -45 0.6 0.1 -8 -39 37 -19 2 -45 0.5 0.5 -9 -35 -15 -48 1 -45 0.2 0.1 3 -3 29 8 2 -45 0.6 0.5 26 11 48 -7 1 -45 0.8 0.4 26 -37 2 -36 1 -45 0.8 0.9 -18 -38 1 -5 2 -45 0.8 0.1 -38 -45 34 4 2 -45 0.4 0.6 16 2 -29 -36 1 -45 0.7 0.5 -6 -36 37 3 2 -45 0.5 0.4 -1 -50 23 12 2 -45 0.7 0.9 42 -49 46 -4 2 -45 0.6 0.2 20 -48 4 -44 1 -45 0.4 0.9 30 -28 29 -4 2 -45 0.9 0.8 47 26 43 37 1 -45 0.6 0.8 19 -7 43 -49 2 -45 0.7 0.6 -43 -48 30 -20 2 -45 0.7 0.6 -4 -5 -4 -36 1 -45 0.5 0.8 13 -31 -11 -39 1 -45 0.5 0.5 8 -47 40 -32 2 -45 0.8 0.2 -45 -49 5 -24 2 -45 0.8 0.7 27 22 14 -29 1 -45 0.5 0.3 22 -33 34 -25 1 -45 0.9 0.3 -13 -16 39 -26 2 -45 0.1 0.3 43 19 17 9 1 -45 0.2 0.1 -5 -34 40 -10 2 -45 0.3 0.4 8 -47 -5 -11 2 -45 0.5 0.4 45 -37 -34 -35 1 -45 0.6 0.3 48 -8 28 -44 1 -45 0.5 0.8 49 -39 -7 -18 1 -45 0.5 0.8 35 -3 21 -11 1 -45 0.6 0.2 -11 -38 49 30 2 -45 0.8 0.7 -20 -22 8 -29 2 -45 0.9 0.4 22 -5 -6 -26 1 -45 0.5 0.2 -11 -16 3 -9 2 -45 0.5 0.2 23 19 50 12 2 -45 0.2 0.1 29 -44 48 12 2 -45 0.8 0.5 3 -23 4 -33 1 -45 0.9 0.9 44 -39 -31 -40 1 -45 0.4 0.4 39 -27 -9 -44 1 -45 0.4 0.9 -29 -30 5 -7 2 -45 0.1 0.2 50 -18 23 21 2 -45 0.6 0.6 45 -28 -5 -43 1 -45 0.7 0.2 50 -9 13 -30 1 -45 0.8 0.1 -30 -36 -15 -44 1 -45 0.3 0.4 -34 -36 18 8 2 -45 0.2 0.2 -1 -2 7 1 2 -45 0.4 0.8 6 -41 21 20 2 -45 0.9 0.9 30 1 22 3 1 -45 0.4 0.6 -33 -35 34 1 2 -45 0.4 0.2 16 -34 48 -13 2 -45 0.9 0.5 47 -40 43 -11 1 -45 0.8 0.4 16 -1 41 -4 1 -45 0.1 0.1 -17 -25 -14 -31 1 -45 0.4 0.3 38 -19 23 -18 1 -45 0.5 0.1 0 -40 21 -37 1 -45 0.4 0.4 0 -37 29 20 2 -45 0.4 0.7 47 44 -10 -38 1 -45 0.5 0.6 -2 -34 12 -12 2 -45 0.3 0.8 13 -14 30 13 2 -45 0.5 0.9 40 35 39 -14 1 -45 0.2 0.5 44 -22 43 -1 2 -45 0.6 0.3 -5 -37 -40 -41 1 -45 0.6 0.2 -10 -36 19 4 2 -45 0.4 0.9 20 -27 5 4 2 -45 0.7 0.3 31 -18 33 -22 1 -45 0.9 0.2 49 3 33 -35 1 -45 0.2 0.6 -15 -25 -3 -35 2 -45 0.9 0.5 16 -20 12 -11 1 -45 0.8 0.4 48 -8 -8 -35 1 -45 0.6 0.5 23 -5 33 14 2 -45 0.5 0.6 40 28 18 -11 1 -45 0.3 0.7 26 -25 5 -3 2 -45 0.7 0.7 -18 -28 -31 -36 1 -45 0.4 0.6 38 -13 38 34 2 -45 0.7 0.7 38 -27 14 1 1 -45 0.3 0.6 22 -4 28 18 2 -45 0.8 0.6 -15 -16 -19 -34 1 -45 0.1 0.1 41 -25 -17 -40 1 -45 0.7 0.3 12 -42 -23 -25 1 -45 0.1 0.6 -2 -5 23 9 2 -45 0.2 0.7 29 -47 -2 -43 2 -45 0.1 0.1 36 -43 31 -12 2 -45 0.7 0.8 17 -44 -18 -27 1 -45 0.5 0.6 49 19 17 -8 1 -45 0.3 0.1 18 -29 48 -48 1 -45 0.6 0.3 49 -23 32 -18 1 -45 0.2 0.4 31 -5 14 -15 2 -45 0.4 0.6 4 -10 0 -7 2 -45 0.5 0.8 28 -21 27 12 2 -45 0.2 0.8 5 -7 -32 -42 1 -45 0.4 0.4 44 42 40 33 1 -45 0.9 0.5 4 -36 19 -48 1 -45 0.7 0.4 -18 -19 22 -35 2 -45 0.7 0.8 25 -38 30 4 2 -45 0.6 0.8 18 -40 -14 -44 1 -45 0.5 0.2 -39 -48 38 -24 2 -45 0.1 0.9 18 11 -22 -38 1 -45 0.6 0.2 8 -50 50 11 2 -45 0.8 0.5 23 -4 -20 -26 1 -45 0.1 0.1 -6 -22 26 -48 1 -45 0.6 0.2 17 -46 35 -20 1 -45 0.6 0.7 15 -18 45 -49 2 -45 0.5 0.2 -34 -39 8 -23 2 -45 0.2 0.3 4 -23 -7 -45 1 -45 0.4 0.4 17 -3 39 -11 2 -45 0.5 0.2 44 11 40 -15 1 -45 0.4 0.1 6 -45 39 -27 2 -45 0.3 0.8 20 -7 18 -50 2 -45 0.2 0.5 25 -2 31 -46 1 -45 0.3 0.4 42 -9 46 -4 2 -45 0.6 0.6 -16 -44 -17 -43 1 -45 0.3 0.4 36 -49 25 -45 2 -45 0.7 0.6 1 -8 6 -31 1 -45 0.8 0.1 11 -39 22 11 2 -45 0.2 0.6 -27 -45 19 18 2 -45 0.1 0.1 31 -41 -16 -32 1 -45 0.6 0.1 33 -29 12 8 1 -45 0.5 0.8 44 2 22 -7 1 -45 0.7 0.6 50 -30 40 -31 1 -45 0.1 0.9 14 -30 44 36 2 -45 0.9 0.5 -38 -45 35 -40 2 -45 0.3 0.6 47 8 46 -3 2 -45 0.7 0.3 24 -21 10 -49 1 -45 0.7 0.1 19 -1 -23 -37 1 -45 0.9 0.2 -14 -35 28 -48 1 -45 0.7 0.2 -16 -21 45 -13 2 -45 0.8 0.6 1 -49 31 -23 2 -45 0.9 0.9 27 -32 21 11 2 -45 0.8 0.2 34 20 28 -28 1 -46 0.9 0.7 3 -50 22 1 2 -46 0.8 0.7 34 15 -6 -37 1 -46 0.3 0.7 30 -32 23 -49 2 -46 0.4 0.7 -13 -47 13 -29 2 -46 0.3 0.5 20 -30 49 -12 2 -46 0.5 0.9 45 42 20 -17 1 -46 0.3 0.8 45 -2 -22 -50 1 -46 0.3 0.2 19 0 19 5 2 -46 0.1 0.8 5 -35 -18 -43 2 -46 0.1 0.7 45 -40 -24 -31 1 -46 0.4 0.3 37 16 41 27 1 -46 0.6 0.1 14 -15 -34 -47 1 -46 0.2 0.8 32 31 30 -30 1 -46 0.4 0.3 48 -9 -17 -47 1 -46 0.6 0.6 49 7 23 -24 2 -46 0.2 0.6 19 -39 25 -34 2 -46 0.3 0.3 15 -9 13 -35 1 -46 0.6 0.7 -3 -48 -5 -43 1 -46 0.8 0.8 35 -25 30 -49 1 -46 0.2 0.3 40 13 18 -21 1 -46 0.3 0.6 40 20 11 -27 1 -46 0.6 0.1 8 5 27 -29 1 -46 0.9 0.5 15 -12 40 10 2 -46 0.4 0.5 -1 -25 -13 -31 2 -46 0.9 0.1 -45 -47 19 -41 1 -46 0.9 0.2 6 -49 9 1 1 -46 0.2 0.7 44 -40 48 19 2 -46 0.5 0.5 50 -29 19 -13 1 -46 0.8 0.8 50 -41 -27 -39 1 -46 0.2 0.1 43 -29 -20 -26 2 -46 0.4 0.3 34 20 -47 -49 1 -46 0.7 0.2 -2 -47 20 -9 2 -46 0.1 0.5 44 6 44 33 2 -46 0.8 0.5 34 -43 15 -21 1 -46 0.8 0.1 50 -22 13 -24 1 -46 0.1 0.8 10 2 48 37 2 -46 0.2 0.8 27 -1 50 -45 2 -46 0.2 0.8 -28 -50 -29 -47 2 -46 0.1 0.3 12 -29 -16 -28 1 -46 0.5 0.7 39 -3 32 -14 2 -46 0.9 0.3 19 -7 48 39 2 -46 0.5 0.5 43 -18 40 -11 2 -46 0.2 0.1 -7 -17 -22 -37 1 -46 0.4 0.5 -32 -38 38 -18 2 -46 0.3 0.5 -2 -3 -18 -28 2 -46 0.2 0.8 24 23 21 15 2 -46 0.2 0.8 25 -41 -24 -27 1 -46 0.3 0.6 44 -21 17 11 2 -46 0.4 0.4 44 5 -11 -39 2 -46 0.9 0.4 25 17 28 -15 1 -46 0.8 0.5 31 -38 49 -37 1 -46 0.7 0.4 6 -7 21 20 2 -46 0.4 0.3 -7 -43 18 4 2 -46 0.4 0.2 49 -2 30 -21 1 -46 0.6 0.1 7 -3 43 -26 2 -46 0.2 0.7 48 8 25 -47 1 -46 0.9 0.9 34 -19 -31 -44 1 -46 0.8 0.1 -1 -47 41 31 2 -46 0.7 0.8 44 19 38 -44 1 -46 0.3 0.1 11 -34 33 -35 1 -46 0.7 0.2 43 -31 7 -18 1 -46 0.6 0.1 -8 -36 28 22 2 -46 0.8 0.2 48 -12 -20 -39 1 -46 0.3 0.9 11 -30 39 -14 2 -46 0.1 0.3 41 -46 1 -38 2 -46 0.9 0.4 46 -7 1 -42 1 -46 0.1 0.3 1 -17 28 -9 2 -46 0.1 0.3 4 -35 39 -48 2 -46 0.3 0.5 7 -35 -17 -33 1 -46 0.5 0.7 15 -48 42 -9 2 -46 0.2 0.5 28 13 -14 -27 1 -46 0.2 0.1 24 -13 50 46 2 -46 0.5 0.2 -9 -20 18 -26 1 -46 0.5 0.4 46 18 45 20 1 -46 0.5 0.1 27 25 -27 -29 1 -46 0.8 0.2 14 -48 37 -50 1 -46 0.5 0.8 42 -31 26 14 2 -46 0.6 0.8 46 -9 45 -14 2 -46 0.8 0.3 -10 -25 48 -28 1 -46 0.1 0.8 17 -1 43 18 2 -46 0.9 0.4 44 -47 4 -47 1 -46 0.8 0.3 29 -35 35 -44 1 -46 0.6 0.4 6 -8 39 -31 1 -46 0.7 0.2 -4 -23 30 -17 2 -46 0.5 0.6 36 -31 23 -37 1 -46 0.7 0.5 46 18 29 20 2 -46 0.8 0.5 11 -37 -18 -28 1 -46 0.1 0.5 30 -20 25 -34 2 -46 0.2 0.6 -37 -43 5 -23 2 -46 0.6 0.9 50 30 49 47 1 -46 0.8 0.7 27 -49 24 -36 1 -46 0.5 0.5 25 11 27 19 2 -46 0.6 0.1 30 20 -10 -25 1 -46 0.3 0.7 33 3 33 -22 1 -46 0.1 0.6 8 -21 37 -37 2 -46 0.5 0.3 -4 -24 -25 -27 2 -46 0.5 0.3 34 -23 31 -13 1 -46 0.8 0.8 -21 -36 49 -14 2 -46 0.9 0.6 17 -4 -18 -37 1 -46 0.7 0.4 -4 -15 27 -11 2 -46 0.4 0.7 33 -49 27 0 2 -46 0.4 0.3 27 -22 37 -24 1 -46 0.9 0.6 -6 -41 28 -42 2 -46 0.9 0.6 20 -15 41 8 1 -46 0.4 0.4 -1 -18 15 10 2 -46 0.8 0.3 47 -28 -7 -39 1 -46 0.6 0.3 11 8 -18 -40 1 -46 0.6 0.4 27 -37 29 3 2 -46 0.4 0.5 41 17 12 -45 1 -46 0.4 0.1 40 13 -7 -45 1 -46 0.7 0.5 47 16 19 -13 1 -46 0.2 0.3 39 -30 36 12 2 -46 0.2 0.9 49 -36 22 1 2 -46 0.9 0.1 10 -35 -8 -24 1 -46 0.7 0.1 16 -49 36 -6 1 -46 0.1 0.4 43 -41 0 -44 2 -46 0.2 0.2 44 13 20 -34 1 -46 0.2 0.3 -4 -24 37 -5 1 -46 0.6 0.2 -24 -39 33 11 2 -46 0.7 0.4 50 10 -12 -39 1 -46 0.1 0.9 26 -39 -41 -43 2 -46 0.8 0.2 17 -13 -11 -26 1 -46 0.5 0.3 41 -50 41 -17 2 -46 0.3 0.6 -4 -41 11 -36 2 -46 0.8 0.6 20 -20 25 15 2 -46 0.2 0.2 18 -33 45 -41 1 -46 0.9 0.3 29 6 43 23 2 -46 0.6 0.7 -3 -21 -10 -50 1 -46 0.5 0.3 9 -28 0 -15 2 -46 0.6 0.1 -4 -17 18 -10 1 -46 0.8 0.9 12 -43 -17 -36 1 -46 0.4 0.4 49 -41 28 21 2 -46 0.1 0.9 2 -20 32 -39 2 -46 0.6 0.4 21 10 46 30 2 -46 0.5 0.7 37 -21 40 -4 2 -46 0.5 0.6 44 -9 -37 -41 1 -46 0.2 0.9 29 -14 47 -6 2 -46 0.5 0.4 26 -16 -15 -39 1 -46 0.3 0.5 46 21 -3 -29 1 -46 0.7 0.1 7 -43 23 -19 1 -46 0.5 0.9 46 -24 39 -29 2 -46 0.4 0.8 32 21 49 13 2 -46 0.3 0.1 32 8 7 -48 1 -46 0.4 0.1 0 -24 -7 -23 2 -46 0.8 0.8 50 3 -5 -20 1 -46 0.7 0.7 -10 -39 45 -29 2 -46 0.9 0.3 24 19 -37 -49 1 -46 0.9 0.6 43 36 38 -43 1 -46 0.9 0.9 34 -23 42 6 2 -46 0.4 0.2 -17 -18 47 3 2 -46 0.6 0.2 50 22 22 -5 1 -46 0.6 0.7 -6 -20 30 -46 2 -46 0.3 0.2 48 19 49 -45 1 -46 0.5 0.8 -15 -26 10 -20 2 -46 0.8 0.6 49 -24 43 9 1 -46 0.1 0.6 32 -46 33 -2 2 -46 0.9 0.8 12 -11 37 -1 2 -46 0.1 0.1 37 10 24 22 2 -46 0.6 0.1 10 -48 39 32 2 -46 0.3 0.6 30 -41 5 1 2 -46 0.7 0.4 41 18 2 -31 1 -46 0.5 0.8 44 5 23 7 1 -46 0.4 0.6 -2 -10 -3 -6 2 -46 0.5 0.8 2 -27 -16 -20 1 -46 0.4 0.9 -7 -45 7 -42 2 -46 0.9 0.4 -2 -23 45 33 2 -46 0.8 0.4 34 25 1 -11 1 -46 0.7 0.7 22 -30 18 9 2 -46 0.6 0.5 38 0 44 -48 1 -46 0.6 0.7 -12 -31 24 -5 2 -46 0.6 0.8 30 -44 47 -19 2 -46 0.4 0.6 7 -28 -24 -48 1 -46 0.2 0.7 8 -4 29 -21 2 -46 0.4 0.1 16 -8 39 -19 1 -46 0.3 0.3 40 2 4 -11 1 -46 0.5 0.7 37 3 -45 -50 1 -46 0.3 0.9 -26 -28 35 -36 2 -46 0.2 0.4 26 -30 31 -35 2 -46 0.7 0.2 3 -23 25 -35 2 -46 0.6 0.6 0 -28 0 -10 1 -46 0.1 0.5 41 16 41 23 2 -46 0.3 0.2 -8 -23 -29 -38 1 -46 0.6 0.8 -33 -45 28 -30 2 -46 0.5 0.1 14 -22 39 -32 2 -46 0.5 0.7 45 -32 -27 -46 2 -46 0.6 0.1 22 -15 19 7 1 -46 0.9 0.5 45 24 45 14 1 -46 0.8 0.2 47 -2 20 -6 1 -46 0.7 0.4 18 -45 29 -6 2 -46 0.7 0.7 -9 -39 -18 -32 2 -46 0.7 0.6 20 -14 2 -46 1 -46 0.7 0.3 25 -18 37 6 2 -46 0.1 0.1 -9 -15 -30 -47 2 -46 0.7 0.7 -22 -30 -21 -47 2 -46 0.3 0.2 5 -38 12 -2 2 -46 0.7 0.8 34 -17 41 27 2 -46 0.3 0.7 45 -45 42 11 2 -46 0.4 0.2 29 24 43 25 1 -46 0.3 0.6 24 -34 21 -16 2 -46 0.4 0.2 41 26 44 15 2 -46 0.9 0.5 21 6 23 20 1 -46 0.9 0.7 -6 -29 21 -24 2 -46 0.2 0.2 19 13 47 22 2 -46 0.7 0.3 45 22 25 -7 1 -46 0.9 0.7 -13 -44 48 10 2 -46 0.4 0.6 42 0 33 -15 1 -46 0.9 0.2 25 -21 9 -4 1 -46 0.6 0.3 42 32 29 25 1 -46 0.1 0.7 33 -46 46 37 2 -46 0.2 0.9 16 0 -24 -48 1 -46 0.6 0.5 21 -1 21 -37 1 -46 0.2 0.1 36 19 3 -49 1 -46 0.1 0.9 -39 -40 29 2 2 -46 0.5 0.8 6 -24 20 9 2 -46 0.3 0.4 2 -4 34 11 2 -46 0.6 0.4 49 -18 -15 -18 1 -46 0.6 0.2 19 -35 42 -18 1 -46 0.6 0.8 4 -38 23 18 2 -46 0.5 0.9 5 -5 -25 -33 2 -46 0.2 0.7 14 13 33 -32 2 -46 0.5 0.7 5 -33 50 25 2 -46 0.8 0.7 45 -31 43 26 2 -46 0.3 0.5 28 -31 48 -7 2 -46 0.8 0.9 20 -38 22 -23 1 -46 0.6 0.8 6 -45 -7 -15 2 -46 0.7 0.5 22 -24 -5 -41 1 -46 0.8 0.7 43 35 -26 -36 1 -46 0.6 0.6 -32 -48 -20 -31 1 -46 0.6 0.7 48 1 -9 -31 1 -46 0.4 0.2 47 -31 37 -8 2 -46 0.2 0.4 19 7 25 19 2 -46 0.5 0.7 35 -31 0 -18 2 -46 0.1 0.9 9 -17 26 0 2 -46 0.8 0.5 40 12 48 -46 1 -46 0.9 0.9 38 26 12 -31 1 -46 0.9 0.8 -24 -36 26 5 2 -46 0.4 0.2 -47 -49 -36 -37 1 -46 0.7 0.6 2 -26 5 -49 1 -46 0.8 0.2 43 -47 -28 -40 1 -46 0.4 0.9 6 -31 7 -35 2 -46 0.6 0.4 47 -18 48 -26 2 -46 0.2 0.4 49 33 35 -28 1 -46 0.3 0.9 28 19 16 -8 2 -46 0.3 0.2 35 -31 1 -22 1 -46 0.7 0.8 16 -1 33 -36 2 -46 0.7 0.2 25 -15 37 21 2 -46 0.9 0.1 42 -27 49 -48 1 -46 0.8 0.3 44 -45 -18 -19 2 -46 0.3 0.9 -45 -47 -26 -44 1 -46 0.1 0.5 30 4 31 12 2 -46 0.3 0.5 7 -19 32 -17 1 -46 0.9 0.4 41 3 32 -12 1 -46 0.1 0.6 38 19 -16 -19 1 -46 0.1 0.1 11 -39 50 -37 2 -46 0.7 0.7 36 -19 18 -34 1 -46 0.1 0.9 33 -38 12 -23 2 -46 0.1 0.3 28 -6 -1 -10 2 -46 0.8 0.7 40 24 -10 -33 1 -46 0.2 0.4 4 -1 11 3 2 -46 0.6 0.8 48 37 29 -4 2 -46 0.3 0.1 -9 -23 43 -25 2 -46 0.6 0.4 8 -18 20 -36 1 -46 0.1 0.8 22 0 50 13 2 -46 0.4 0.9 23 -45 -19 -43 1 -46 0.8 0.4 38 32 -41 -47 1 -46 0.8 0.6 -29 -38 40 -12 2 -46 0.9 0.4 10 -27 36 9 2 -46 0.5 0.4 -28 -29 16 15 2 -46 0.5 0.2 46 2 37 -27 1 -46 0.8 0.8 30 16 37 -45 1 -46 0.3 0.6 33 -28 38 36 2 -46 0.4 0.6 24 -26 23 -5 1 -46 0.2 0.7 -40 -49 -6 -33 2 -46 0.4 0.9 -29 -30 1 -20 2 -46 0.6 0.5 40 -19 41 7 1 -46 0.9 0.2 48 31 -22 -44 1 -46 0.1 0.7 -6 -34 22 -41 2 -46 0.9 0.7 10 -30 9 -24 1 -46 0.4 0.1 -2 -3 9 -30 1 -46 0.6 0.8 -40 -42 18 0 2 -46 0.1 0.8 -6 -37 30 -5 2 -46 0.9 0.9 24 13 23 2 1 -46 0.2 0.5 -18 -38 35 27 2 -46 0.5 0.1 28 -50 -40 -49 1 -46 0.5 0.8 -36 -46 -26 -27 2 -46 0.7 0.3 -44 -47 10 -29 2 -46 0.4 0.9 35 -50 -10 -17 1 -46 0.2 0.6 44 20 45 -50 1 -46 0.7 0.9 50 -45 46 39 2 -46 0.1 0.3 48 -34 48 -33 1 -46 0.5 0.6 34 -37 -6 -40 1 -46 0.4 0.5 50 2 7 -36 1 -46 0.2 0.7 -3 -24 36 9 2 -46 0.8 0.6 27 -37 -10 -33 1 -46 0.4 0.8 36 -12 30 -27 1 -46 0.9 0.7 41 -5 46 43 1 -46 0.3 0.6 -3 -6 -6 -20 2 -46 0.9 0.7 40 8 7 -35 1 -46 0.9 0.5 13 -20 -18 -38 1 -46 0.9 0.5 38 20 34 -26 1 -47 0.8 0.7 -25 -26 44 6 2 -47 0.9 0.2 25 11 -31 -39 1 -47 0.6 0.7 7 -23 47 -19 2 -47 0.4 0.6 49 40 18 -45 2 -47 0.1 0.1 -45 -50 45 1 2 -47 0.6 0.4 34 7 17 -46 1 -47 0.7 0.3 -15 -35 30 -22 2 -47 0.1 0.2 4 3 -3 -8 1 -47 0.2 0.5 48 -40 -7 -37 1 -47 0.6 0.3 16 -44 -15 -18 1 -47 0.4 0.6 34 -25 36 -9 2 -47 0.5 0.1 49 27 37 36 1 -47 0.2 0.7 35 -37 11 5 2 -47 0.3 0.5 -19 -35 -18 -36 2 -47 0.8 0.1 43 35 12 -5 1 -47 0.9 0.5 36 -32 12 -35 1 -47 0.1 0.5 -11 -43 25 -23 2 -47 0.8 0.7 27 -49 8 -46 2 -47 0.8 0.4 -10 -17 -1 -45 1 -47 0.6 0.6 -1 -10 7 -47 2 -47 0.8 0.9 -17 -23 -7 -39 2 -47 0.7 0.7 -3 -14 48 -49 1 -47 0.5 0.8 -27 -34 26 14 2 -47 0.1 0.4 31 30 28 -43 1 -47 0.9 0.6 42 11 19 -6 1 -47 0.1 0.5 45 -27 -12 -24 1 -47 0.2 0.7 25 -18 -39 -47 1 -47 0.4 0.9 14 -21 -6 -41 1 -47 0.1 0.4 44 11 48 22 1 -47 0.6 0.8 36 -32 -3 -39 1 -47 0.2 0.3 44 -30 39 23 2 -47 0.6 0.2 -29 -32 12 -31 1 -47 0.9 0.2 43 -25 26 16 1 -47 0.8 0.9 14 13 -2 -44 1 -47 0.5 0.9 36 -13 21 -38 1 -47 0.4 0.1 41 35 22 7 1 -47 0.1 0.1 14 -1 -3 -11 2 -47 0.9 0.9 27 -14 36 -30 1 -47 0.1 0.7 34 -19 15 1 1 -47 0.7 0.5 37 -26 39 -2 2 -47 0.4 0.5 43 12 45 28 1 -47 0.1 0.5 -18 -26 17 -4 2 -47 0.8 0.2 8 4 17 -16 1 -47 0.2 0.9 4 -12 -7 -21 1 -47 0.8 0.3 -5 -17 38 -3 1 -47 0.9 0.2 50 5 9 -30 1 -47 0.9 0.1 26 -35 18 -28 1 -47 0.5 0.4 42 38 -10 -22 1 -47 0.9 0.3 14 -21 29 -50 1 -47 0.3 0.4 -30 -45 25 -14 1 -47 0.7 0.6 45 4 34 7 1 -47 0.3 0.7 26 5 -25 -47 1 -47 0.9 0.2 6 -20 49 15 2 -47 0.2 0.6 35 33 32 -28 1 -47 0.8 0.8 38 27 4 -14 1 -47 0.6 0.2 -2 -27 -4 -10 1 -47 0.7 0.1 -28 -29 -14 -35 2 -47 0.9 0.6 49 8 11 5 1 -47 0.9 0.6 35 -22 28 17 2 -47 0.3 0.7 34 26 49 48 2 -47 0.6 0.3 -28 -35 -1 -21 1 -47 0.9 0.5 11 -21 41 30 2 -47 0.4 0.6 48 4 0 -48 1 -47 0.6 0.7 -13 -18 -34 -39 1 -47 0.1 0.8 50 13 34 -34 1 -47 0.7 0.6 -31 -33 47 -47 2 -47 0.9 0.3 -10 -45 17 -38 2 -47 0.9 0.1 20 -8 -9 -17 1 -47 0.8 0.3 -34 -41 19 -48 2 -47 0.8 0.7 30 -2 38 -25 1 -47 0.7 0.7 24 22 -29 -32 1 -47 0.5 0.6 39 20 36 12 2 -47 0.8 0.4 14 7 45 -6 1 -47 0.4 0.8 32 12 49 -24 2 -47 0.5 0.6 40 -17 32 -17 1 -47 0.8 0.4 38 -28 1 -32 1 -47 0.9 0.3 32 -9 -20 -31 1 -47 0.4 0.8 46 33 15 10 2 -47 0.9 0.1 20 -31 48 -50 1 -47 0.7 0.5 35 -4 46 37 2 -47 0.4 0.4 11 -28 -9 -21 2 -47 0.5 0.9 35 24 8 -11 2 -47 0.1 0.6 24 2 32 -15 2 -47 0.5 0.2 8 -22 37 15 2 -47 0.6 0.9 20 -44 42 -11 2 -47 0.5 0.6 5 -19 34 -17 2 -47 0.8 0.1 43 -8 26 -40 2 -47 0.4 0.9 6 -45 30 2 2 -47 0.1 0.4 40 -17 29 4 2 -47 0.3 0.2 -14 -18 5 -31 2 -47 0.9 0.5 -13 -15 -14 -35 1 -47 0.6 0.1 -22 -29 12 1 2 -47 0.8 0.5 6 -44 25 -49 1 -47 0.3 0.1 5 -33 41 -16 2 -47 0.2 0.2 35 25 -30 -37 1 -47 0.4 0.1 -4 -16 38 -8 2 -47 0.5 0.8 50 22 28 -25 1 -47 0.9 0.4 -14 -17 -23 -35 1 -47 0.7 0.7 45 5 45 -48 2 -47 0.7 0.5 42 -42 24 -15 2 -47 0.8 0.1 12 -19 12 -30 1 -47 0.3 0.4 27 -44 -9 -36 2 -47 0.8 0.5 21 -7 -5 -11 1 -47 0.1 0.4 49 -18 15 6 2 -47 0.9 0.9 33 0 -8 -16 1 -47 0.7 0.3 5 -41 36 -26 1 -47 0.7 0.2 37 -8 12 6 1 -47 0.4 0.1 37 -18 13 -1 1 -47 0.9 0.5 39 -29 20 12 1 -47 0.2 0.2 -37 -45 49 -17 2 -47 0.1 0.5 12 -49 -36 -48 2 -47 0.9 0.2 9 -9 10 -34 2 -47 0.7 0.8 37 6 -15 -28 1 -47 0.4 0.4 11 -16 18 -35 1 -47 0.2 0.8 37 -2 28 -49 1 -47 0.4 0.4 23 -47 22 -24 1 -47 0.4 0.5 29 3 45 -8 1 -47 0.8 0.6 31 -4 31 -29 1 -47 0.2 0.9 29 -49 -10 -48 2 -47 0.7 0.4 2 -15 32 -25 2 -47 0.7 0.7 48 41 19 -39 1 -47 0.1 0.1 -8 -37 21 -17 2 -47 0.8 0.7 28 16 46 28 2 -47 0.9 0.1 50 7 45 -24 1 -47 0.9 0.5 -7 -24 6 -1 2 -47 0.4 0.8 35 6 -30 -33 1 -47 0.4 0.8 -27 -44 -14 -37 2 -47 0.4 0.2 -10 -28 8 -8 2 -47 0.5 0.3 42 -10 6 -42 1 -47 0.9 0.9 15 -49 40 -22 2 -47 0.3 0.7 21 -36 41 11 2 -47 0.6 0.7 44 -45 46 44 2 -47 0.5 0.5 21 18 41 40 2 -47 0.4 0.9 33 16 -4 -41 2 -47 0.3 0.7 49 47 33 -30 1 -47 0.9 0.9 36 -1 -4 -40 1 -47 0.2 0.9 1 -45 19 -40 2 -47 0.8 0.3 22 -37 50 0 1 -47 0.1 0.3 32 -9 49 -38 1 -47 0.5 0.3 36 -33 44 6 2 -47 0.1 0.6 -20 -26 -33 -39 1 -47 0.1 0.6 19 -19 5 -32 2 -47 0.6 0.1 0 -32 -23 -49 2 -47 0.7 0.5 8 -38 31 27 2 -47 0.9 0.6 11 -46 30 -41 1 -47 0.5 0.5 27 -31 20 -41 1 -47 0.6 0.9 4 -20 39 3 2 -47 0.1 0.9 43 40 30 -21 1 -47 0.8 0.8 24 -11 -24 -32 1 -47 0.3 0.4 22 -36 47 -30 2 -47 0.2 0.9 44 -24 42 -31 2 -47 0.9 0.1 18 -4 -30 -32 1 -47 0.6 0.4 11 -5 47 -15 2 -47 0.5 0.2 -1 -28 9 -21 1 -47 0.4 0.7 9 3 34 19 2 -47 0.3 0.4 45 -9 45 0 2 -47 0.9 0.8 25 9 36 -1 2 -47 0.8 0.5 41 19 16 0 1 -47 0.7 0.3 2 -14 4 -44 2 -47 0.4 0.6 21 -10 21 -12 2 -47 0.3 0.5 36 25 4 -32 1 -47 0.5 0.1 35 -7 15 -34 1 -47 0.4 0.7 22 9 -5 -41 1 -47 0.8 0.8 20 -35 6 -12 2 -47 0.6 0.4 -29 -48 12 -17 2 -47 0.6 0.8 -15 -32 -30 -48 1 -47 0.9 0.8 49 41 47 8 1 -47 0.6 0.4 11 -27 25 -34 2 -47 0.9 0.3 14 7 -15 -22 1 -47 0.3 0.4 41 35 24 -10 1 -47 0.5 0.1 -5 -17 -1 -25 1 -47 0.8 0.2 -41 -44 18 -20 2 -47 0.7 0.6 2 -9 15 -27 1 -47 0.7 0.3 -21 -30 34 -31 1 -47 0.9 0.5 -2 -17 10 -23 2 -47 0.8 0.7 49 -36 17 -4 2 -47 0.7 0.2 32 4 1 -9 1 -47 0.1 0.4 -30 -44 -2 -15 2 -47 0.4 0.2 46 -30 28 22 2 -47 0.8 0.3 38 8 39 32 2 -47 0.1 0.7 3 -18 -16 -49 2 -47 0.7 0.1 -29 -46 4 -4 2 -47 0.1 0.9 30 19 -39 -40 1 -47 0.8 0.4 26 -43 29 -8 1 -47 0.9 0.9 29 18 50 7 2 -47 0.3 0.2 9 -38 -30 -44 1 -47 0.8 0.2 43 -25 -28 -43 1 -47 0.1 0.3 25 -13 45 -24 2 -47 0.3 0.6 31 -18 -11 -18 1 -47 0.3 0.9 -36 -40 13 -30 2 -47 0.1 0.6 1 -49 17 12 2 -47 0.3 0.2 -22 -48 41 -9 2 -47 0.1 0.9 24 -43 24 -42 2 -47 0.3 0.1 34 -28 12 -45 1 -47 0.9 0.7 32 -22 36 -50 1 -47 0.3 0.4 -15 -34 39 34 2 -47 0.4 0.9 49 -18 27 -43 2 -47 0.6 0.6 48 12 46 -15 1 -47 0.6 0.8 47 7 -19 -25 1 -47 0.8 0.1 45 41 9 -36 1 -47 0.5 0.1 33 -6 4 -25 1 -47 0.1 0.1 47 -45 43 20 2 -47 0.7 0.5 21 -17 19 -16 1 -47 0.6 0.8 41 -10 -1 -21 1 -47 0.4 0.2 45 -22 12 -34 1 -47 0.5 0.4 47 -43 15 -23 2 -47 0.5 0.8 18 -40 38 -47 2 -47 0.6 0.5 38 -11 -14 -37 1 -47 0.4 0.4 -11 -33 45 -5 2 -47 0.5 0.3 -17 -42 22 -6 2 -47 0.2 0.4 24 -46 35 6 2 -47 0.3 0.5 28 7 45 -30 2 -47 0.8 0.2 12 -13 -23 -44 1 -47 0.6 0.4 26 19 38 -9 2 -47 0.7 0.8 -10 -45 21 -7 2 -47 0.3 0.9 39 18 50 -36 2 -47 0.2 0.5 20 -18 35 -5 2 -47 0.2 0.5 50 5 3 -1 1 -47 0.5 0.6 50 -29 25 -27 1 -47 0.7 0.5 38 11 47 -40 1 -47 0.6 0.4 50 -48 41 16 2 -47 0.7 0.1 20 7 11 -48 1 -47 0.4 0.5 24 -23 10 -16 2 -47 0.1 0.7 37 -17 37 -16 2 -47 0.8 0.3 7 -10 -2 -10 1 -47 0.2 0.8 -15 -18 -25 -29 1 -47 0.6 0.6 44 2 9 -34 1 -47 0.2 0.3 41 -13 30 -43 1 -47 0.1 0.4 25 -14 -12 -46 1 -47 0.6 0.7 40 -11 -13 -16 1 -47 0.8 0.2 41 6 0 -12 1 -47 0.9 0.8 -14 -43 34 -11 2 -47 0.4 0.1 -38 -48 27 -4 2 -47 0.5 0.1 20 -46 32 -40 1 -47 0.1 0.9 27 -37 30 6 2 -47 0.2 0.5 -2 -33 -18 -29 2 -47 0.7 0.4 36 35 36 -48 1 -47 0.7 0.8 35 28 -13 -36 1 -47 0.8 0.3 34 10 -9 -16 1 -47 0.7 0.6 44 -25 9 -4 1 -47 0.5 0.4 22 -11 45 38 1 -47 0.9 0.7 -14 -36 24 10 2 -47 0.5 0.5 43 -29 50 46 2 -47 0.1 0.1 26 -44 26 15 2 -47 0.4 0.7 2 -50 6 -42 1 -47 0.7 0.4 35 -49 37 -11 1 -47 0.1 0.7 -27 -49 25 -3 2 -47 0.4 0.5 -14 -16 43 2 2 -47 0.9 0.8 38 -38 43 -14 1 -47 0.1 0.4 -3 -7 -1 -23 2 -47 0.5 0.2 18 -11 14 13 2 -47 0.3 0.1 8 -39 38 -34 1 -47 0.2 0.7 46 -10 24 -17 1 -47 0.6 0.1 24 4 32 -10 1 -47 0.6 0.6 41 -2 10 -25 1 -47 0.3 0.2 7 -46 49 12 2 -47 0.4 0.1 37 30 -21 -32 1 -47 0.3 0.6 50 21 -31 -35 1 -47 0.9 0.8 -28 -42 15 -15 2 -47 0.1 0.5 -6 -45 5 -23 2 -47 0.5 0.3 -12 -23 -2 -24 1 -47 0.9 0.6 38 5 34 25 1 -47 0.9 0.1 14 -12 6 -34 1 -47 0.4 0.9 45 10 8 -43 1 -47 0.2 0.6 8 -36 40 -49 2 -47 0.4 0.8 33 28 37 -5 2 -47 0.5 0.3 -10 -44 50 -9 1 -47 0.2 0.3 -4 -49 45 -4 2 -47 0.8 0.6 39 14 32 2 1 -47 0.4 0.7 -2 -16 9 6 2 -47 0.4 0.3 -16 -30 7 -8 2 -47 0.7 0.9 19 -50 17 9 2 -47 0.2 0.7 28 -38 4 -49 1 -47 0.4 0.1 48 33 1 -47 1 -47 0.3 0.9 -3 -16 35 -30 2 -47 0.6 0.2 11 1 44 -13 1 -47 0.2 0.9 7 -29 -1 -44 1 -47 0.5 0.9 43 8 -26 -42 1 -47 0.1 0.1 50 -25 2 -39 1 -47 0.8 0.1 47 44 46 39 2 -47 0.1 0.7 25 -46 32 -48 2 -47 0.1 0.4 50 -40 7 -33 2 -47 0.9 0.4 28 -22 8 6 2 -47 0.7 0.2 32 -14 41 24 2 -47 0.5 0.1 43 0 -25 -31 1 -47 0.9 0.4 20 -19 42 23 2 -47 0.9 0.7 40 7 20 -15 1 -47 0.9 0.4 -17 -26 34 -31 2 -47 0.7 0.6 -10 -50 -3 -35 2 -47 0.4 0.8 33 25 12 -11 2 -47 0.3 0.8 22 -36 43 -40 2 -47 0.5 0.9 28 14 -1 -20 1 -47 0.5 0.5 -30 -46 20 -20 1 -47 0.7 0.7 -21 -35 9 -24 1 -47 0.1 0.5 18 11 27 -41 1 -47 0.6 0.4 10 -4 32 -30 2 -47 0.4 0.2 8 -39 40 7 2 -47 0.9 0.4 49 1 11 -31 1 -47 0.1 0.1 -41 -45 -13 -25 1 -47 0.5 0.4 29 -33 -19 -38 1 -48 0.5 0.9 -1 -43 -35 -41 1 -48 0.9 0.7 -19 -31 43 27 2 -48 0.6 0.7 -41 -48 6 -40 2 -48 0.9 0.6 27 -38 -5 -24 1 -48 0.4 0.5 28 5 -47 -50 1 -48 0.8 0.6 -8 -33 33 2 2 -48 0.8 0.1 38 10 48 17 1 -48 0.4 0.4 50 15 -34 -48 1 -48 0.5 0.8 -47 -48 13 -38 2 -48 0.3 0.6 -33 -47 38 15 2 -48 0.9 0.6 3 -14 43 12 2 -48 0.5 0.5 -26 -28 41 -41 2 -48 0.5 0.5 37 -39 41 -6 2 -48 0.7 0.1 18 -3 43 -24 1 -48 0.5 0.9 -15 -21 19 0 2 -48 0.3 0.6 -17 -42 -11 -13 2 -48 0.7 0.8 29 23 23 16 1 -48 0.3 0.1 23 -30 42 -4 2 -48 0.2 0.9 33 -44 36 23 2 -48 0.8 0.1 -37 -41 27 -35 2 -48 0.6 0.4 -28 -48 50 -18 2 -48 0.3 0.5 -7 -18 35 28 2 -48 0.5 0.9 19 -28 33 -11 2 -48 0.8 0.7 39 -13 39 3 1 -48 0.6 0.2 36 8 -15 -17 1 -48 0.4 0.4 41 3 9 -22 1 -48 0.4 0.6 -16 -21 -24 -35 1 -48 0.2 0.9 35 11 26 -8 2 -48 0.2 0.5 -34 -42 6 -2 2 -48 0.5 0.8 9 -12 -5 -21 1 -48 0.8 0.8 10 -2 42 3 2 -48 0.2 0.1 -15 -43 46 -32 2 -48 0.3 0.1 -18 -37 35 23 2 -48 0.8 0.6 38 -48 50 -14 2 -48 0.3 0.5 43 -49 40 8 2 -48 0.2 0.3 -12 -40 -37 -48 1 -48 0.9 0.7 50 -14 45 25 1 -48 0.5 0.4 -40 -44 17 -28 2 -48 0.3 0.4 4 -27 14 13 2 -48 0.5 0.6 18 -7 26 4 2 -48 0.2 0.9 47 -28 16 -41 2 -48 0.3 0.7 8 -21 26 15 2 -48 0.2 0.6 -20 -27 -17 -26 1 -48 0.4 0.6 47 -13 8 -45 1 -48 0.3 0.9 19 1 -1 -39 1 -48 0.6 0.6 6 -21 41 -8 2 -48 0.9 0.9 26 -2 36 -33 1 -48 0.4 0.5 -24 -32 8 -43 2 -48 0.7 0.4 45 21 49 29 1 -48 0.9 0.8 40 -44 18 -11 1 -48 0.3 0.5 45 35 11 -30 1 -48 0.8 0.5 8 -37 -16 -31 1 -48 0.1 0.1 46 -37 35 -38 2 -48 0.3 0.7 30 29 18 -30 1 -48 0.6 0.3 0 -40 26 -33 1 -48 0.3 0.6 49 10 30 24 2 -48 0.1 0.1 27 26 41 -37 1 -48 0.1 0.8 38 33 17 -9 1 -48 0.7 0.8 48 -12 12 -36 1 -48 0.2 0.7 -7 -22 30 8 2 -48 0.5 0.4 5 -44 -19 -48 1 -48 0.8 0.5 19 11 26 17 2 -48 0.4 0.4 1 0 25 -22 1 -48 0.5 0.4 39 -33 -15 -35 1 -48 0.4 0.5 -6 -49 -26 -34 2 -48 0.7 0.8 7 -36 -14 -37 1 -48 0.3 0.7 37 7 36 35 2 -48 0.8 0.5 26 -11 20 -34 1 -48 0.9 0.2 -1 -20 12 -50 1 -48 0.1 0.5 -11 -41 24 -29 2 -48 0.1 0.8 38 33 39 -41 1 -48 0.5 0.6 34 -18 -7 -9 1 -48 0.7 0.1 40 -39 5 -26 1 -48 0.8 0.9 12 -34 -32 -50 1 -48 0.8 0.2 37 21 18 3 1 -48 0.4 0.3 17 -22 -21 -37 1 -48 0.7 0.4 13 -1 38 -50 1 -48 0.6 0.1 -44 -48 38 30 2 -48 0.7 0.9 33 7 -25 -30 1 -48 0.2 0.4 44 -14 49 24 2 -48 0.1 0.8 -29 -50 10 3 2 -48 0.2 0.4 -15 -20 29 -42 2 -48 0.5 0.3 -23 -29 -3 -31 2 -48 0.1 0.5 26 -38 13 -25 2 -48 0.8 0.9 6 -44 41 -6 2 -48 0.6 0.1 46 42 -15 -33 1 -48 0.4 0.2 -9 -21 32 -13 2 -48 0.2 0.2 31 1 31 18 2 -48 0.4 0.8 12 -19 11 -42 2 -48 0.2 0.4 -9 -21 42 12 2 -48 0.2 0.5 8 -18 27 -48 2 -48 0.2 0.6 -16 -29 45 11 2 -48 0.8 0.9 24 -25 -24 -38 1 -48 0.2 0.5 22 -3 -35 -36 1 -48 0.5 0.2 -29 -38 44 -16 2 -48 0.1 0.7 38 13 16 -21 1 -48 0.1 0.9 -9 -45 -13 -18 2 -48 0.9 0.4 22 -41 38 10 2 -48 0.3 0.7 21 -41 23 -37 2 -48 0.5 0.4 26 4 45 39 2 -48 0.1 0.4 34 -6 19 -11 2 -48 0.1 0.4 -2 -44 -11 -40 2 -48 0.7 0.3 11 -6 43 23 2 -48 0.5 0.3 16 15 -33 -42 1 -48 0.3 0.2 -32 -44 -33 -50 1 -48 0.6 0.3 26 -32 10 -46 1 -48 0.4 0.2 35 22 11 -19 1 -48 0.2 0.3 18 -28 24 -39 2 -48 0.8 0.3 17 -8 -14 -41 1 -48 0.1 0.4 44 -29 34 11 2 -48 0.5 0.7 39 11 43 -17 2 -48 0.4 0.4 -27 -38 46 -19 2 -48 0.3 0.8 0 -4 40 -22 2 -48 0.9 0.2 34 -21 6 -50 1 -48 0.4 0.6 -21 -22 -7 -32 2 -48 0.1 0.4 26 18 -26 -44 1 -48 0.3 0.9 49 15 22 -16 1 -48 0.1 0.3 41 20 1 -34 1 -48 0.7 0.8 25 18 -4 -29 1 -48 0.3 0.9 -4 -35 24 -10 2 -48 0.5 0.2 11 -17 10 -32 1 -48 0.2 0.2 -27 -32 36 -23 2 -48 0.5 0.3 -28 -50 5 -37 2 -48 0.6 0.1 8 -24 -29 -47 1 -48 0.8 0.8 14 -1 -23 -32 1 -48 0.1 0.5 -11 -12 40 27 2 -48 0.6 0.5 49 -34 2 -24 1 -48 0.6 0.3 -7 -22 50 -36 2 -48 0.8 0.5 37 1 42 10 1 -48 0.5 0.9 43 -26 14 4 1 -48 0.4 0.2 50 -31 19 11 2 -48 0.9 0.5 29 -31 7 -4 1 -48 0.7 0.2 29 8 -39 -44 1 -48 0.3 0.7 49 42 -32 -50 1 -48 0.1 0.6 6 -37 -15 -28 2 -48 0.5 0.1 37 -1 15 14 2 -48 0.8 0.8 -5 -25 9 -43 2 -48 0.3 0.1 17 5 10 -13 1 -48 0.8 0.2 -4 -41 4 -33 1 -48 0.1 0.4 -3 -49 -4 -33 2 -48 0.6 0.2 35 -6 49 -39 1 -48 0.7 0.9 -2 -6 -22 -44 1 -48 0.4 0.5 -17 -48 47 -11 2 -48 0.9 0.9 47 -33 -10 -31 1 -48 0.9 0.7 10 -16 -21 -42 1 -48 0.7 0.5 48 -39 42 -38 1 -48 0.1 0.8 -12 -47 27 16 2 -48 0.6 0.5 1 -30 3 -42 1 -48 0.1 0.5 45 39 29 16 1 -48 0.7 0.7 45 39 12 -6 1 -48 0.8 0.7 37 -31 -37 -40 1 -48 0.9 0.5 46 -8 39 -35 1 -48 0.7 0.2 20 2 22 2 1 -48 0.2 0.8 22 -49 28 -41 2 -48 0.3 0.4 -12 -25 27 -6 2 -48 0.6 0.3 50 -12 -21 -30 1 -48 0.3 0.5 15 -26 4 -1 2 -48 0.5 0.7 16 13 14 -21 1 -48 0.8 0.5 42 -41 48 34 2 -48 0.1 0.5 20 -35 42 -26 2 -48 0.4 0.2 18 -30 20 -41 1 -48 0.5 0.5 33 -35 18 -48 1 -48 0.4 0.2 -17 -39 43 11 2 -48 0.3 0.7 27 -30 3 -21 2 -48 0.4 0.1 39 20 34 18 1 -48 0.7 0.6 48 24 6 0 1 -48 0.4 0.1 39 -23 -6 -28 1 -48 0.6 0.8 -4 -29 3 2 2 -48 0.1 0.4 3 -35 14 -19 2 -48 0.5 0.5 -23 -32 11 -44 2 -48 0.6 0.5 -5 -10 26 -1 2 -48 0.7 0.4 23 -21 46 -34 1 -48 0.3 0.1 -2 -43 -15 -21 2 -48 0.9 0.4 49 39 49 26 1 -48 0.7 0.9 30 10 8 -20 1 -48 0.2 0.9 47 -13 17 -37 2 -48 0.5 0.9 4 2 -25 -32 1 -48 0.4 0.5 24 -8 46 0 2 -48 0.9 0.1 33 29 42 -47 1 -48 0.2 0.1 4 -18 -12 -17 1 -48 0.6 0.8 47 -40 10 -9 1 -48 0.9 0.8 -18 -41 23 -14 2 -48 0.5 0.4 21 15 11 -13 1 -48 0.8 0.9 39 17 46 -27 1 -48 0.7 0.6 40 -18 45 -34 1 -48 0.3 0.7 50 18 12 8 1 -48 0.9 0.5 45 41 -2 -23 1 -48 0.7 0.2 49 -47 25 -14 1 -48 0.4 0.3 35 16 28 20 1 -48 0.5 0.8 16 6 17 -10 1 -48 0.3 0.7 33 -8 5 -9 1 -48 0.6 0.8 35 -8 42 21 2 -48 0.4 0.3 37 -17 39 24 2 -48 0.3 0.9 -6 -45 31 7 2 -48 0.7 0.1 33 13 -3 -6 1 -48 0.2 0.1 32 1 3 -18 1 -48 0.6 0.2 -4 -39 45 23 2 -48 0.4 0.1 25 -13 -9 -19 1 -48 0.4 0.3 37 -25 39 -14 1 -48 0.8 0.7 35 -25 12 5 1 -48 0.3 0.1 29 22 -11 -27 1 -48 0.5 0.9 36 22 42 -15 1 -48 0.6 0.1 39 -37 -2 -5 1 -48 0.4 0.2 23 -18 -26 -45 1 -48 0.4 0.8 26 -28 -24 -28 1 -48 0.4 0.4 23 -1 1 -17 1 -48 0.9 0.7 6 -27 -5 -6 1 -48 0.7 0.9 -21 -29 29 -3 2 -48 0.7 0.4 9 -2 11 -46 1 -48 0.9 0.3 31 -13 43 21 1 -48 0.5 0.9 27 -35 2 -11 1 -48 0.1 0.8 10 -4 16 -41 1 -48 0.7 0.6 -19 -45 48 22 2 -48 0.2 0.9 16 -50 45 -3 2 -48 0.6 0.5 28 -2 50 -22 1 -48 0.1 0.2 40 -40 -36 -48 1 -48 0.8 0.4 48 -32 15 -32 1 -48 0.8 0.9 47 -45 -43 -45 1 -48 0.8 0.8 -16 -49 11 5 2 -48 0.7 0.4 13 -22 16 12 2 -48 0.9 0.8 -25 -45 -4 -50 2 -48 0.4 0.5 39 32 -17 -43 1 -48 0.7 0.2 -29 -30 15 14 2 -48 0.7 0.3 -2 -26 1 -5 2 -48 0.5 0.9 -11 -43 44 37 2 -48 0.4 0.1 35 -19 47 -41 1 -48 0.3 0.1 8 -44 10 6 2 -48 0.6 0.7 21 -46 27 18 2 -48 0.5 0.6 39 -39 15 -7 2 -48 0.3 0.5 2 -18 50 23 2 -48 0.7 0.5 19 -9 -18 -45 1 -48 0.5 0.9 38 -6 13 -5 1 -48 0.3 0.5 19 -23 -42 -49 1 -48 0.2 0.4 13 8 45 -1 2 -48 0.6 0.7 -6 -47 -2 -21 2 -48 0.2 0.4 -24 -32 25 19 2 -48 0.5 0.9 7 -41 -14 -30 1 -48 0.2 0.9 17 2 21 6 2 -48 0.1 0.3 -27 -43 24 -33 2 -48 0.6 0.8 -28 -34 18 -28 2 -48 0.4 0.3 -3 -46 10 -43 2 -48 0.2 0.7 16 -5 -27 -37 1 -48 0.9 0.3 -13 -24 -5 -41 1 -48 0.2 0.3 -13 -41 23 10 2 -48 0.5 0.7 37 -17 -19 -49 1 -48 0.9 0.9 -11 -18 -19 -47 1 -48 0.7 0.4 -3 -20 48 37 2 -48 0.6 0.7 17 -47 38 -9 2 -48 0.8 0.2 38 -21 19 6 1 -48 0.6 0.6 -3 -50 35 -3 2 -48 0.4 0.4 16 8 30 29 2 -48 0.5 0.2 38 -47 38 36 2 -48 0.8 0.4 13 -10 -24 -50 1 -48 0.9 0.6 -10 -40 4 -36 2 -48 0.2 0.1 38 -32 7 -33 1 -48 0.6 0.6 24 5 11 -23 1 -48 0.9 0.8 -5 -22 42 39 2 -48 0.5 0.1 12 -48 38 21 2 -48 0.7 0.7 45 13 32 -4 1 -48 0.5 0.6 41 3 20 9 1 -48 0.9 0.7 -5 -10 22 19 2 -48 0.1 0.8 -6 -24 0 -34 2 -48 0.5 0.7 -27 -33 44 -11 2 -48 0.8 0.8 8 -6 37 -11 2 -48 0.3 0.1 36 -23 -35 -50 1 -48 0.6 0.1 21 -1 11 -17 1 -48 0.6 0.6 -7 -26 41 10 2 -48 0.8 0.3 18 -7 42 -32 1 -48 0.3 0.9 -17 -45 18 -7 2 -48 0.1 0.9 17 -14 -39 -41 1 -48 0.4 0.8 -9 -15 14 -39 2 -48 0.8 0.4 -28 -32 -39 -41 1 -48 0.9 0.6 40 24 29 28 1 -48 0.8 0.7 48 18 15 -40 1 -48 0.4 0.7 -3 -23 -12 -15 2 -48 0.4 0.2 46 -9 15 -25 1 -48 0.8 0.9 4 -12 20 -16 2 -48 0.7 0.1 47 42 11 -6 1 -48 0.1 0.4 6 2 44 -19 2 -48 0.8 0.6 -15 -33 36 -35 2 -48 0.1 0.6 40 -24 30 -34 2 -48 0.5 0.2 48 29 48 12 1 -48 0.5 0.8 34 -14 -1 -45 1 -48 0.8 0.3 49 -18 8 -35 1 -48 0.5 0.2 -18 -48 -1 -26 1 -48 0.9 0.2 13 -35 47 -38 1 -48 0.5 0.3 33 -28 35 -12 1 -48 0.1 0.8 35 -31 9 -11 2 -48 0.6 0.4 7 -50 37 -31 2 -48 0.1 0.4 0 -39 -9 -44 1 -48 0.7 0.8 -4 -11 -1 -13 1 -48 0.4 0.6 -25 -31 -3 -14 2 -48 0.7 0.1 39 8 37 16 1 -48 0.4 0.1 6 -31 9 -31 1 -48 0.1 0.9 8 -49 -1 -39 2 -48 0.5 0.2 -2 -36 -12 -33 1 -48 0.3 0.2 12 -45 48 -18 2 -48 0.5 0.5 49 -36 -10 -14 1 -48 0.1 0.7 49 -39 46 8 2 -48 0.1 0.5 -12 -46 32 -22 2 -49 0.9 0.7 22 -29 20 -32 1 -49 0.1 0.2 37 17 25 4 1 -49 0.7 0.1 22 7 19 13 2 -49 0.4 0.6 -16 -40 -2 -8 2 -49 0.8 0.1 6 -20 -22 -36 1 -49 0.5 0.9 42 12 46 -14 1 -49 0.5 0.5 9 -11 26 3 2 -49 0.2 0.6 -25 -37 48 -12 2 -49 0.9 0.4 2 -22 8 -37 1 -49 0.4 0.2 -8 -21 9 -49 1 -49 0.1 0.7 36 -15 29 -14 2 -49 0.3 0.6 36 -27 30 -33 2 -49 0.7 0.8 37 -8 32 -35 2 -49 0.3 0.2 21 -42 31 17 2 -49 0.4 0.2 -22 -38 29 -44 1 -49 0.1 0.1 15 -18 44 33 2 -49 0.6 0.5 0 -4 19 -17 2 -49 0.3 0.3 40 35 -13 -44 1 -49 0.5 0.4 23 -32 -5 -48 1 -49 0.1 0.9 1 -29 30 -32 2 -49 0.7 0.9 50 -42 39 -18 1 -49 0.9 0.6 -7 -14 23 -12 2 -49 0.1 0.6 50 -2 -13 -33 1 -49 0.5 0.8 -20 -36 50 10 2 -49 0.9 0.9 43 28 -12 -35 1 -49 0.9 0.4 -29 -32 5 -1 2 -49 0.5 0.6 30 -35 15 -15 2 -49 0.4 0.6 7 6 39 -27 1 -49 0.5 0.9 -23 -34 27 -48 2 -49 0.6 0.7 41 -49 -11 -48 1 -49 0.6 0.6 30 26 34 16 1 -49 0.5 0.5 29 13 -24 -45 1 -49 0.6 0.8 46 22 47 12 1 -49 0.6 0.6 26 3 15 -37 1 -49 0.8 0.7 34 -27 -13 -34 1 -49 0.7 0.8 -35 -41 49 47 2 -49 0.3 0.1 26 10 33 30 2 -49 0.8 0.9 37 24 17 -30 1 -49 0.6 0.5 -25 -41 -34 -49 2 -49 0.4 0.6 22 -28 16 -37 1 -49 0.9 0.9 20 -32 31 25 2 -49 0.6 0.9 6 -13 -44 -45 1 -49 0.5 0.6 -7 -34 -33 -39 1 -49 0.7 0.5 35 -4 -14 -40 1 -49 0.3 0.4 43 -22 8 -36 1 -49 0.8 0.8 41 -9 26 20 2 -49 0.4 0.3 -8 -25 -32 -46 1 -49 0.6 0.9 45 -37 19 -45 1 -49 0.3 0.9 39 11 45 36 2 -49 0.3 0.9 25 -45 20 9 2 -49 0.9 0.5 29 -37 14 -47 1 -49 0.9 0.4 -4 -50 26 22 2 -49 0.8 0.5 -39 -43 35 -19 2 -49 0.8 0.2 2 -34 32 24 2 -49 0.5 0.3 -1 -32 17 -18 2 -49 0.8 0.2 -45 -50 10 -29 2 -49 0.9 0.6 41 12 7 -17 1 -49 0.8 0.2 -10 -39 34 -24 1 -49 0.2 0.1 49 1 -12 -29 1 -49 0.8 0.1 19 -25 25 -44 1 -49 0.4 0.5 21 -4 41 -26 2 -49 0.1 0.5 -4 -36 44 37 2 -49 0.7 0.2 30 23 -39 -50 1 -49 0.1 0.9 28 -42 -14 -46 1 -49 0.6 0.2 0 -17 48 21 2 -49 0.4 0.7 45 -13 38 -36 2 -49 0.3 0.6 0 -44 -8 -30 1 -49 0.1 0.3 -23 -47 24 -27 2 -49 0.8 0.3 0 -6 -5 -30 1 -49 0.8 0.9 46 -3 32 -35 2 -49 0.7 0.9 45 41 10 -12 1 -49 0.8 0.9 30 -22 35 34 2 -49 0.6 0.6 35 -6 18 -32 1 -49 0.2 0.2 47 35 9 -45 1 -49 0.8 0.3 32 -34 15 -24 1 -49 0.6 0.3 39 23 46 -31 1 -49 0.7 0.8 18 -4 39 35 2 -49 0.7 0.9 48 -36 17 -7 1 -49 0.7 0.7 21 2 50 17 2 -49 0.7 0.3 45 -33 17 -28 1 -49 0.3 0.2 -37 -49 39 6 2 -49 0.1 0.2 38 26 37 -21 1 -49 0.7 0.3 34 -46 44 -29 1 -49 0.2 0.9 46 -16 -6 -34 1 -49 0.4 0.8 2 -5 40 -13 2 -49 0.5 0.7 -4 -42 18 16 2 -49 0.5 0.7 21 -7 -29 -47 1 -49 0.6 0.4 48 23 18 -5 1 -49 0.5 0.5 16 -19 -30 -40 1 -49 0.6 0.8 27 26 30 -30 1 -49 0.8 0.8 17 16 30 -8 2 -49 0.3 0.6 37 4 31 23 2 -49 0.3 0.8 17 -18 31 2 2 -49 0.7 0.2 -32 -50 48 -29 2 -49 0.1 0.5 22 -16 -4 -21 2 -49 0.2 0.4 -14 -36 -18 -23 2 -49 0.9 0.1 20 -47 37 -32 1 -49 0.8 0.5 12 -6 33 22 2 -49 0.3 0.1 37 -47 -9 -38 1 -49 0.4 0.6 32 -14 -15 -32 1 -49 0.4 0.3 12 -25 38 -34 1 -49 0.5 0.3 -5 -27 10 -4 2 -49 0.8 0.8 9 -7 43 -27 2 -49 0.7 0.6 45 23 27 -41 1 -49 0.9 0.5 8 4 29 -41 1 -49 0.7 0.6 7 -21 -1 -41 1 -49 0.1 0.2 42 9 40 -27 1 -49 0.6 0.7 40 34 47 -33 2 -49 0.9 0.4 34 -35 -1 -24 1 -49 0.5 0.4 38 28 33 -50 1 -49 0.9 0.3 -2 -11 45 -28 2 -49 0.6 0.3 40 -28 5 -36 1 -49 0.3 0.6 37 36 30 11 1 -49 0.7 0.7 17 12 1 -15 1 -49 0.1 0.2 15 -14 17 -26 1 -49 0.6 0.2 43 -21 -21 -27 1 -49 0.4 0.6 10 0 48 9 2 -49 0.7 0.3 34 -43 36 35 2 -49 0.1 0.3 29 4 32 7 1 -49 0.7 0.8 -31 -45 10 -44 2 -49 0.4 0.5 36 31 2 -1 1 -49 0.6 0.8 39 28 -28 -48 1 -49 0.7 0.9 -21 -25 23 2 2 -49 0.4 0.4 24 -12 30 -24 2 -49 0.7 0.6 30 -40 -3 -21 1 -49 0.6 0.1 -28 -30 24 -42 1 -49 0.8 0.8 49 31 6 -7 1 -49 0.7 0.5 47 20 20 12 1 -49 0.3 0.8 42 -36 23 -43 2 -49 0.7 0.6 49 -8 -26 -39 1 -49 0.9 0.3 44 -34 5 -47 1 -49 0.9 0.1 39 5 44 28 2 -49 0.6 0.1 24 -38 18 2 2 -49 0.2 0.7 40 37 1 -29 1 -49 0.5 0.8 -2 -44 -9 -34 1 -49 0.9 0.3 49 -21 -24 -39 1 -49 0.1 0.2 30 -50 24 -27 2 -49 0.4 0.2 -4 -10 -5 -45 1 -49 0.6 0.5 8 -48 7 -25 2 -49 0.5 0.5 40 15 8 7 1 -49 0.2 0.7 40 4 10 -26 1 -49 0.1 0.5 -44 -46 46 25 2 -49 0.8 0.2 33 5 11 -35 1 -49 0.8 0.3 -2 -26 -13 -20 1 -49 0.9 0.5 29 -34 14 -12 1 -49 0.1 0.7 37 -16 20 -32 2 -49 0.6 0.9 21 3 14 -25 1 -49 0.2 0.9 39 -1 7 3 2 -49 0.9 0.9 7 -23 36 14 2 -49 0.7 0.5 30 26 41 -39 1 -49 0.8 0.1 5 -26 -5 -42 1 -49 0.2 0.5 1 -17 38 30 2 -49 0.3 0.4 -38 -46 30 -22 2 -49 0.6 0.4 36 -13 -7 -15 1 -49 0.8 0.7 17 -27 42 -48 2 -49 0.2 0.3 38 -34 34 9 2 -49 0.8 0.3 34 -5 -18 -44 1 -49 0.9 0.1 42 -34 41 10 1 -49 0.5 0.6 -6 -29 4 -5 2 -49 0.2 0.4 16 -3 5 -32 1 -49 0.9 0.7 45 4 26 -27 1 -49 0.8 0.6 40 3 15 -14 1 -49 0.6 0.2 7 -3 4 -13 1 -49 0.1 0.6 40 -48 -28 -30 1 -49 0.6 0.4 8 -49 35 -12 2 -49 0.2 0.4 47 -11 38 -10 2 -49 0.2 0.5 14 -47 21 -23 2 -49 0.9 0.5 -2 -50 5 -41 1 -49 0.7 0.5 5 -6 30 -47 2 -49 0.6 0.3 46 -6 14 -35 1 -49 0.8 0.5 41 -10 -9 -39 1 -49 0.8 0.5 27 2 27 -32 1 -49 0.4 0.9 -11 -47 50 -37 2 -49 0.2 0.8 24 21 -33 -43 1 -49 0.4 0.3 -41 -42 -15 -47 2 -49 0.2 0.5 -2 -18 -25 -29 1 -49 0.6 0.3 -2 -32 30 11 2 -49 0.7 0.3 15 -14 -18 -42 1 -49 0.2 0.6 33 -10 26 2 2 -49 0.2 0.4 26 -29 15 -19 2 -49 0.6 0.6 23 -14 32 -41 2 -49 0.2 0.2 37 -28 36 18 2 -49 0.5 0.3 -3 -11 -9 -37 2 -49 0.8 0.2 25 -38 37 22 2 -49 0.1 0.2 15 -13 -1 -30 1 -49 0.8 0.2 7 6 -25 -49 1 -49 0.3 0.9 23 6 -5 -9 1 -49 0.3 0.3 49 -19 42 31 2 -49 0.4 0.3 8 -46 -15 -16 1 -49 0.3 0.6 16 10 -14 -36 1 -49 0.1 0.8 40 -46 30 -47 2 -49 0.5 0.2 9 -50 -8 -14 2 -49 0.3 0.6 -23 -46 -22 -29 2 -49 0.2 0.5 43 34 -21 -24 1 -49 0.8 0.7 -20 -33 13 -30 2 -49 0.2 0.2 23 9 34 -47 1 -49 0.5 0.9 -22 -49 42 -1 2 -49 0.4 0.5 -24 -42 8 -1 2 -49 0.1 0.7 30 29 14 -41 1 -49 0.4 0.5 4 -8 -4 -27 1 -49 0.4 0.2 11 9 -5 -37 1 -49 0.2 0.8 4 -37 -7 -32 2 -49 0.8 0.7 27 -26 15 -25 1 -49 0.1 0.1 -40 -45 34 -42 2 -49 0.8 0.3 -8 -28 -38 -45 1 -49 0.8 0.1 28 -6 50 20 2 -49 0.9 0.8 -24 -44 -19 -27 1 -49 0.8 0.8 -27 -37 -3 -33 2 -49 0.1 0.3 -1 -31 -12 -21 2 -49 0.1 0.5 46 -35 23 8 2 -49 0.3 0.4 -3 -44 31 16 2 -49 0.4 0.9 8 -2 -15 -21 1 -49 0.1 0.7 47 -3 -25 -30 1 -49 0.9 0.6 24 -9 27 16 2 -49 0.3 0.2 0 -28 41 -32 2 -49 0.4 0.1 -6 -11 3 -48 1 -49 0.4 0.9 43 -47 48 -46 2 -49 0.5 0.8 9 -50 13 -7 2 -49 0.2 0.4 -11 -15 29 -31 2 -49 0.2 0.5 10 -5 37 12 2 -49 0.1 0.8 33 -44 -3 -13 2 -49 0.6 0.8 35 -20 2 -49 1 -49 0.5 0.1 -8 -46 47 -44 1 -49 0.7 0.6 -11 -44 29 -22 2 -49 0.6 0.2 -31 -47 37 -11 2 -49 0.1 0.9 -26 -28 35 21 2 -49 0.3 0.3 26 -44 39 -40 1 -49 0.6 0.4 38 -23 -17 -30 1 -49 0.8 0.8 38 14 39 31 2 -49 0.8 0.7 1 -40 48 -34 2 -49 0.1 0.7 -12 -34 45 -45 2 -49 0.1 0.4 9 -29 7 6 2 -49 0.2 0.4 21 14 0 -11 1 -49 0.8 0.6 -13 -37 36 -42 2 -49 0.4 0.1 38 5 17 -45 1 -49 0.6 0.7 47 37 -34 -44 1 -49 0.7 0.7 -13 -41 48 39 2 -49 0.6 0.2 32 -20 -9 -18 1 -49 0.8 0.6 43 -24 5 -5 1 -49 0.2 0.2 8 -20 12 -27 1 -49 0.3 0.8 13 -42 20 15 2 -49 0.7 0.9 -4 -13 24 -50 2 -49 0.5 0.9 40 31 50 34 2 -49 0.5 0.3 -39 -42 -35 -39 1 -49 0.6 0.7 -12 -31 25 11 2 -49 0.2 0.5 -6 -29 9 -36 2 -49 0.6 0.8 7 -43 -7 -49 2 -49 0.4 0.3 50 -28 26 16 2 -49 0.5 0.6 37 -2 43 8 2 -49 0.3 0.7 35 9 -26 -38 1 -49 0.5 0.5 47 38 -8 -24 1 -49 0.8 0.1 30 23 30 -47 1 -49 0.1 0.2 24 12 38 -14 1 -49 0.6 0.6 -10 -20 -35 -47 1 -49 0.7 0.4 1 -35 -6 -44 1 -49 0.7 0.3 36 -19 -38 -44 1 -49 0.6 0.5 8 -22 4 -46 1 -49 0.9 0.3 35 27 1 -3 1 -49 0.9 0.2 1 -10 47 35 2 -49 0.9 0.1 13 5 43 11 2 -49 0.3 0.6 10 -18 -22 -40 1 -49 0.3 0.2 -14 -40 29 9 2 -49 0.4 0.3 30 21 48 16 1 -49 0.9 0.2 42 -50 13 3 1 -49 0.3 0.9 38 19 20 1 1 -49 0.6 0.9 -10 -27 48 -45 2 -49 0.2 0.1 22 1 -7 -24 1 -49 0.9 0.8 50 -42 38 12 1 -49 0.2 0.9 -27 -41 0 -50 2 -49 0.2 0.7 -19 -21 -29 -43 2 -49 0.6 0.7 -15 -43 -9 -48 1 -49 0.3 0.2 36 11 -38 -40 1 -49 0.8 0.5 49 20 -18 -22 1 -49 0.1 0.5 22 -5 -5 -9 1 -49 0.2 0.4 10 -25 7 -42 1 -49 0.3 0.2 27 -20 48 -36 1 -49 0.8 0.5 24 -1 42 -31 2 -49 0.7 0.9 12 -4 20 14 2 -49 0.4 0.3 22 -14 -3 -28 1 -49 0.8 0.5 30 -23 1 -12 1 -49 0.4 0.4 -11 -44 23 -15 2 -49 0.5 0.8 13 -42 21 -12 2 -49 0.5 0.5 27 -36 -5 -22 1 -49 0.6 0.4 48 19 5 -39 1 -49 0.6 0.7 25 -14 -35 -48 1 -49 0.1 0.3 48 -22 -3 -6 2 -49 0.4 0.2 -11 -39 -34 -43 1 -49 0.2 0.3 -1 -24 -1 -49 1 -49 0.3 0.8 -29 -43 7 -9 2 -49 0.7 0.5 33 18 -15 -34 1 -49 0.2 0.5 30 -33 26 20 2 -49 0.2 0.7 29 -41 44 3 2 -49 0.5 0.5 43 37 46 20 1 -49 0.9 0.6 40 -40 -32 -46 1 -49 0.7 0.7 26 0 12 -40 1 -49 0.7 0.9 30 13 10 -24 1 -49 0.4 0.5 -19 -29 9 -9 2 -49 0.2 0.5 -26 -29 10 3 2 -49 0.2 0.4 -12 -39 16 -4 2 -49 0.9 0.8 -4 -38 -23 -41 1 -50 0.7 0.4 12 4 39 -2 1 -50 0.4 0.4 18 -27 17 -10 2 -50 0.3 0.4 -6 -21 -15 -38 1 -50 0.6 0.4 32 1 -37 -50 1 -50 0.1 0.6 45 31 22 -39 1 -50 0.3 0.7 -45 -49 8 3 2 -50 0.2 0.1 12 -4 -37 -49 1 -50 0.3 0.3 -9 -46 -6 -44 1 -50 0.1 0.6 33 -43 42 5 2 -50 0.9 0.1 45 -24 49 -12 1 -50 0.9 0.8 23 -16 -18 -25 1 -50 0.9 0.5 42 -35 26 -13 1 -50 0.4 0.4 19 -41 37 -39 2 -50 0.1 0.8 -29 -37 -37 -40 1 -50 0.2 0.4 44 22 33 10 1 -50 0.6 0.6 -36 -50 37 22 2 -50 0.2 0.1 9 -42 41 -22 2 -50 0.7 0.4 43 -31 -33 -46 1 -50 0.5 0.4 -20 -32 36 -39 2 -50 0.5 0.2 32 -17 37 13 2 -50 0.9 0.2 7 -8 48 7 2 -50 0.1 0.5 35 -6 32 -7 2 -50 0.8 0.5 36 -27 -24 -32 1 -50 0.4 0.9 17 -47 -34 -39 1 -50 0.3 0.4 11 -45 -38 -49 1 -50 0.6 0.3 -7 -8 49 10 2 -50 0.8 0.5 5 -18 35 6 2 -50 0.6 0.8 17 -11 25 -30 2 -50 0.1 0.8 48 -29 47 40 2 -50 0.6 0.4 44 -2 48 -15 1 -50 0.1 0.7 30 -1 -3 -25 1 -50 0.7 0.8 44 -10 -4 -26 1 -50 0.8 0.2 17 4 -13 -21 1 -50 0.9 0.3 11 -33 22 -15 1 -50 0.7 0.2 -33 -43 39 0 2 -50 0.7 0.6 25 -25 38 -32 1 -50 0.1 0.4 -2 -29 12 -40 2 -50 0.5 0.2 -28 -39 31 -28 2 -50 0.5 0.5 46 -1 13 11 1 -50 0.8 0.9 18 2 26 -41 2 -50 0.3 0.4 21 12 49 -13 1 -50 0.5 0.3 42 -33 31 -24 1 -50 0.1 0.9 -37 -48 43 22 2 -50 0.7 0.1 47 30 21 -3 1 -50 0.8 0.8 19 -50 39 -42 2 -50 0.4 0.7 16 1 -7 -35 1 -50 0.6 0.7 34 -13 22 12 2 -50 0.2 0.8 -11 -32 20 15 2 -50 0.9 0.4 -17 -25 35 -47 2 -50 0.4 0.5 14 -27 -22 -36 1 -50 0.6 0.8 6 -39 -29 -45 1 -50 0.6 0.1 -12 -20 21 10 2 -50 0.8 0.5 35 -35 45 28 2 -50 0.1 0.7 22 -47 26 -12 2 -50 0.3 0.8 1 -15 -10 -36 2 -50 0.1 0.4 15 -38 40 18 2 -50 0.6 0.4 -37 -44 36 -39 2 -50 0.2 0.5 46 39 29 -9 1 -50 0.5 0.2 37 -33 20 -9 1 -50 0.9 0.2 -9 -35 26 -35 1 -50 0.6 0.4 19 -22 -23 -32 1 -50 0.1 0.8 28 -5 47 46 2 -50 0.5 0.2 45 9 -5 -14 1 -50 0.3 0.3 37 -50 32 -50 1 -50 0.3 0.1 41 9 -22 -47 1 -50 0.9 0.3 2 -23 48 -8 1 -50 0.9 0.6 34 7 23 -49 1 -50 0.7 0.7 38 -34 15 -28 1 -50 0.7 0.6 10 -14 30 -45 2 -50 0.9 0.7 14 -4 -15 -33 1 -50 0.6 0.3 -23 -35 46 36 2 -50 0.6 0.2 35 -38 8 -11 1 -50 0.4 0.4 31 -33 27 25 2 -50 0.3 0.8 0 -28 31 4 2 -50 0.3 0.3 22 -6 23 -35 1 -50 0.1 0.3 32 6 34 33 2 -50 0.6 0.9 27 -40 -1 -33 1 -50 0.2 0.9 28 -28 27 6 2 -50 0.7 0.7 45 27 -10 -47 1 -50 0.8 0.6 -1 -5 -21 -26 1 -50 0.6 0.9 44 18 33 -43 1 -50 0.8 0.5 27 -24 29 -8 1 -50 0.8 0.3 -22 -39 47 29 2 -50 0.1 0.9 -3 -14 27 -19 2 -50 0.5 0.2 -37 -44 -6 -11 2 -50 0.1 0.2 8 -41 11 -40 1 -50 0.8 0.4 46 42 21 -12 1 -50 0.1 0.6 25 -38 48 15 2 -50 0.5 0.8 36 -44 37 -41 2 -50 0.9 0.2 34 1 15 -5 1 -50 0.2 0.9 -12 -19 4 -17 2 -50 0.5 0.8 20 -26 30 -1 2 -50 0.8 0.7 19 -33 -2 -3 1 -50 0.3 0.1 -19 -24 13 -4 2 -50 0.6 0.4 48 -33 -35 -42 1 -50 0.9 0.7 31 5 45 22 2 -50 0.1 0.5 7 6 12 -41 1 -50 0.5 0.4 3 -46 -5 -6 2 -50 0.2 0.1 18 6 10 -42 1 -50 0.9 0.2 -3 -50 -15 -23 1 -50 0.2 0.6 41 30 -1 -7 1 -50 0.3 0.6 41 22 28 -26 1 -50 0.6 0.8 37 -29 38 -5 2 -50 0.8 0.3 14 1 31 30 2 -50 0.9 0.4 -4 -15 15 -8 2 -50 0.9 0.8 41 -17 10 3 1 -50 0.2 0.9 36 32 20 -11 1 -50 0.7 0.3 43 -37 26 24 1 -50 0.1 0.7 7 -25 35 -49 2 -50 0.9 0.5 23 1 2 -7 1 -50 0.1 0.2 -22 -38 48 20 2 -50 0.3 0.9 -12 -50 20 13 2 -50 0.1 0.1 39 -35 -43 -44 1 -50 0.1 0.5 17 -40 16 -2 2 -50 0.3 0.3 32 -10 26 -14 2 -50 0.4 0.3 24 19 25 19 1 -50 0.8 0.8 40 -5 10 8 1 -50 0.6 0.7 14 -23 21 10 2 -50 0.3 0.4 39 -20 44 -3 2 -50 0.1 0.4 27 -9 42 17 2 -50 0.4 0.9 22 -36 20 -2 2 -50 0.6 0.3 22 -12 32 -39 1 -50 0.1 0.3 32 -17 26 -15 2 -50 0.5 0.2 49 36 28 -50 1 -50 0.8 0.9 26 -20 43 40 2 -50 0.5 0.5 43 29 24 1 1 -50 0.5 0.8 -22 -27 50 29 2 -50 0.2 0.2 20 -50 28 -11 2 -50 0.5 0.3 21 -30 37 24 2 -50 0.9 0.3 15 13 6 -39 1 -50 0.9 0.7 -25 -26 26 -8 2 -50 0.7 0.7 25 7 -11 -19 1 -50 0.6 0.7 -5 -38 29 25 2 -50 0.3 0.3 40 13 25 -1 1 -50 0.9 0.8 8 -46 -18 -32 1 -50 0.8 0.7 -15 -38 27 5 2 -50 0.2 0.2 49 -33 -1 -6 2 -50 0.6 0.2 39 -3 -14 -25 1 -50 0.3 0.8 3 -39 4 -37 2 -50 0.3 0.9 39 37 48 -8 1 -50 0.2 0.6 43 -47 50 4 2 -50 0.6 0.7 -25 -30 -8 -47 2 -50 0.3 0.1 -8 -29 43 -7 2 -50 0.7 0.1 29 -18 -6 -26 1 -50 0.7 0.2 15 -19 24 -5 1 -50 0.1 0.6 -3 -17 9 -36 2 -50 0.4 0.4 -12 -30 21 -35 2 -50 0.6 0.7 0 -50 9 -45 2 -50 0.1 0.8 44 6 23 -25 2 -50 0.1 0.1 25 -11 40 -13 1 -50 0.8 0.4 15 -13 0 -43 1 -50 0.3 0.1 -28 -40 18 -36 1 -50 0.6 0.6 38 -5 -6 -16 1 -50 0.9 0.4 17 12 -7 -28 1 -50 0.9 0.9 48 -23 49 44 2 -50 0.6 0.7 -20 -29 32 -3 2 -50 0.7 0.6 -3 -33 1 -21 2 -50 0.8 0.8 6 -36 37 32 2 -50 0.3 0.9 18 -42 47 31 2 -50 0.7 0.4 28 -49 34 -18 1 -50 0.1 0.6 -7 -43 41 15 2 -50 0.7 0.1 14 -28 -1 -16 1 -50 0.7 0.3 44 0 12 -21 1 -50 0.9 0.8 6 -41 20 -37 2 -50 0.6 0.2 31 -31 42 27 2 -50 0.2 0.2 35 -2 27 8 2 -50 0.2 0.5 -31 -32 44 5 2 -50 0.1 0.6 49 -24 40 -6 2 -50 0.3 0.8 7 -45 40 -31 2 -50 0.4 0.3 43 13 35 13 1 -50 0.9 0.8 23 -9 -5 -39 1 -50 0.8 0.4 42 -37 -8 -28 1 -50 0.4 0.9 -16 -19 30 24 2 -50 0.3 0.6 35 33 39 -12 1 -50 0.2 0.6 28 -36 5 -25 2 -50 0.3 0.3 50 20 9 -38 1 -50 0.4 0.1 2 -14 -2 -42 1 -50 0.2 0.3 -32 -40 9 -11 2 -50 0.7 0.7 39 33 31 2 1 -50 0.3 0.8 23 -50 -21 -49 1 -50 0.6 0.3 22 -33 0 -8 1 -50 0.9 0.1 -5 -48 -17 -26 1 -50 0.7 0.7 46 -30 -30 -40 1 -50 0.8 0.9 12 -9 12 -9 1 -50 0.9 0.4 37 -27 -1 -22 1 -50 0.9 0.2 -16 -38 36 -37 2 -50 0.3 0.2 -14 -25 8 -31 2 -50 0.5 0.9 2 -14 43 20 2 -50 0.8 0.6 0 -3 28 -19 2 -50 0.6 0.5 16 11 44 -2 1 -50 0.8 0.2 6 -39 43 40 2 -50 0.5 0.7 50 3 21 -14 1 -50 0.9 0.3 42 -14 38 -45 1 -50 0.8 0.7 19 -11 18 -25 1 -50 0.7 0.6 22 -13 39 31 2 -50 0.5 0.7 -30 -42 40 -22 2 -50 0.3 0.1 49 9 34 29 2 -50 0.6 0.3 -17 -36 35 0 2 -50 0.7 0.6 11 -43 46 -2 2 -50 0.2 0.1 -30 -49 40 3 2 -50 0.9 0.3 25 24 45 14 1 -50 0.2 0.4 47 29 -2 -7 1 -50 0.2 0.4 34 33 39 -14 1 -50 0.4 0.5 1 -33 18 -34 2 -50 0.3 0.6 49 -31 49 35 2 -50 0.5 0.2 -6 -37 36 30 2 -50 0.3 0.6 9 3 4 3 2 -50 0.3 0.6 -11 -19 3 -33 2 -50 0.6 0.2 43 17 17 -31 1 -50 0.3 0.8 -32 -36 -10 -14 2 -50 0.1 0.8 15 -2 0 -30 1 -50 0.7 0.4 13 -26 32 15 2 -50 0.2 0.8 -9 -18 43 -3 2 -50 0.3 0.4 17 -48 46 13 2 -50 0.9 0.5 46 -7 44 -26 1 -50 0.1 0.7 47 17 26 -27 1 -50 0.9 0.4 -13 -50 41 -17 2 -50 0.5 0.5 28 14 1 -6 1 -50 0.4 0.6 26 -16 37 25 2 -50 0.1 0.5 41 14 -6 -32 1 -50 0.8 0.4 28 -31 45 -18 2 -50 0.8 0.1 33 -6 20 19 1 -50 0.4 0.4 -2 -47 20 3 2 -50 0.8 0.9 32 -9 6 -47 1 -50 0.3 0.2 -11 -30 42 16 2 -50 0.2 0.4 15 2 9 4 1 -50 0.5 0.9 7 -15 38 -45 2 -50 0.4 0.3 31 26 49 -41 1 -50 0.9 0.2 -17 -23 33 -37 2 -50 0.9 0.3 -21 -45 -15 -38 1 -50 0.3 0.1 35 -13 46 24 2 -50 0.8 0.9 38 -46 -12 -17 1 -50 0.5 0.5 22 -22 -3 -33 1 -50 0.6 0.6 -6 -27 20 12 2 -50 0.1 0.7 2 -33 29 -31 2 -50 0.5 0.3 36 -47 2 -29 1 -50 0.2 0.2 -2 -24 -32 -49 1 -50 0.7 0.6 28 26 2 -17 1 -50 0.6 0.9 49 42 31 -50 1 -50 0.9 0.2 -7 -48 42 7 2 -50 0.9 0.6 -1 -12 13 -1 2 -50 0.9 0.1 22 -39 17 -34 1 -50 0.7 0.9 1 -25 25 20 2 -50 0.1 0.1 -20 -48 39 -32 2 -50 0.3 0.8 18 -2 8 -4 2 -50 0.5 0.7 49 -27 -12 -33 1 -50 0.9 0.7 -11 -32 8 -1 2 -50 0.1 0.5 -4 -27 -1 -44 1 -50 0.3 0.5 37 35 17 -4 1 -50 0.8 0.5 23 4 -9 -50 1 -50 0.6 0.2 25 -41 50 41 2 -50 0.8 0.4 40 1 -1 -36 1 -50 0.7 0.5 -16 -48 27 -46 2 -50 0.6 0.8 -29 -42 4 3 2 -50 0.3 0.8 -8 -13 -19 -22 1 -50 0.1 0.8 18 -5 29 -1 2 -50 0.5 0.2 18 14 42 -39 1 -50 0.1 0.1 -1 -37 13 0 2 -50 0.2 0.7 -28 -30 5 -35 2 -50 0.6 0.1 -29 -33 47 -31 2 -50 0.8 0.3 0 -29 50 32 2 -50 0.8 0.4 -18 -26 27 -26 2 -50 0.8 0.2 10 -17 6 -20 1 -50 0.7 0.8 24 -39 27 17 2 -50 0.6 0.3 -3 -43 -26 -43 1 -50 0.2 0.8 38 -11 -1 -38 1 -50 0.5 0.7 5 -1 16 15 2 -50 0.7 0.4 36 23 26 -44 1 -50 0.7 0.7 37 -48 -7 -35 1 -50 0.7 0.5 -25 -44 38 0 2 -50 0.8 0.7 -26 -35 -38 -41 1 -50 0.9 0.4 -9 -43 46 -19 2 -50 0.2 0.8 31 -19 33 -23 2 -50 0.6 0.3 30 23 46 -24 1 -50 0.2 0.9 48 -10 32 -31 2 -50 0.1 0.8 25 7 27 -47 2 -50 0.3 0.4 2 -4 9 -38 1 -50 0.4 0.9 -4 -48 27 -27 2 -50 0.1 0.1 1 -47 11 3 2 -50 0.3 0.7 21 13 21 -18 2 -50 0.9 0.6 28 5 26 10 1 -50 0.2 0.2 35 29 49 -14 1 -50 0.3 0.6 39 12 50 17 2 -50 0.4 0.2 -18 -33 0 -26 2 -50 0.7 0.1 34 32 -18 -32 1 -50 0.9 0.7 21 -28 17 -7 1 -50 0.9 0.5 -7 -25 10 -48 1 -50 0.4 0.8 -11 -28 6 -14 2 -50 0.9 0.4 33 -16 38 -44 1 -50 0.1 0.9 13 11 31 -9 2 -50 0.1 0.1 -3 -44 39 -23 2 -50 0.9 0.2 15 -23 34 -38 1 -50 0.1 0.3 43 -3 21 -19 1 -50 0.2 0.5 -13 -34 33 -23 2 -50 0.5 0.3 28 25 43 21 2 -50 0.2 0.6 32 20 25 -2 1 -50 0.1 0.1 22 7 40 -32 1 -50 0.6 0.7 29 -21 -34 -46 1 -50 0.9 0.3 -23 -46 -4 -49 1 -50 0.9 0.8 42 -26 13 -38 1 diff --git a/R/inst/extdata/dd_exampleData.txt b/R/inst/extdata/dd_exampleData.txt deleted file mode 100644 index d90c64c1..00000000 --- a/R/inst/extdata/dd_exampleData.txt +++ /dev/null @@ -1,2161 +0,0 @@ -subjID trial delay_later amount_later delay_sooner amount_sooner choice -1 1 6 10.5 0 10 1 -1 2 170 38.3 0 10 1 -1 3 28 13.4 0 10 1 -1 4 28 31.4 0 10 1 -1 5 85 30.9 0 10 1 -1 6 28 21.1 0 10 1 -1 7 28 13 0 10 1 -1 8 1 21.3 0 10 1 -1 9 28 21.1 0 10 1 -1 10 15 30.1 0 10 1 -1 11 1 10.7 0 10 1 -1 12 85 36.1 0 10 1 -1 13 15 10.5 0 10 1 -1 14 6 16.7 0 10 1 -1 15 1 11 0 10 1 -1 16 15 14.2 0 10 1 -1 17 15 12.5 0 10 1 -1 18 15 20.7 0 10 1 -1 19 6 11 0 10 0 -1 20 28 16.9 0 10 1 -1 21 15 30.1 0 10 1 -1 22 85 24.4 0 10 1 -1 23 170 41.3 0 10 1 -1 24 15 14.2 0 10 1 -1 25 6 10.5 0 10 1 -1 26 170 24.4 0 10 1 -1 27 15 49 0 10 1 -1 28 170 29.7 0 10 1 -1 29 1 11.8 0 10 0 -1 30 6 13.2 0 10 0 -1 31 85 30.9 0 10 1 -1 32 6 44 0 10 1 -1 33 6 35.1 0 10 1 -1 34 28 15.5 0 10 1 -1 35 170 43.3 0 10 1 -1 36 170 33.9 0 10 1 -1 37 1 11 0 10 1 -1 38 1 21.3 0 10 1 -1 39 85 45 0 10 1 -1 40 15 39.6 0 10 1 -1 41 85 10.5 0 10 0 -1 42 170 15 0 10 1 -1 43 170 49.8 0 10 1 -1 44 170 24.4 0 10 1 -1 45 28 13.4 0 10 1 -1 46 1 31.6 0 10 1 -1 47 170 35.6 0 10 1 -1 48 1 41.9 0 10 1 -1 49 6 17.4 0 10 1 -1 50 85 18.4 0 10 1 -1 51 85 27.3 0 10 1 -1 52 85 26 0 10 1 -1 53 170 38.3 0 10 1 -1 54 28 21.7 0 10 1 -1 55 1 10.7 0 10 1 -1 56 170 49.8 0 10 1 -1 57 1 11.2 0 10 1 -1 58 15 20.7 0 10 1 -1 59 6 44 0 10 1 -1 60 28 41.1 0 10 1 -1 61 28 16.9 0 10 1 -1 62 6 14 0 10 1 -1 63 1 31.6 0 10 1 -1 64 15 18.6 0 10 1 -1 65 28 12 0 10 1 -1 66 6 13.2 0 10 1 -1 67 170 43.3 0 10 1 -1 68 28 31.4 0 10 1 -1 69 85 19.5 0 10 1 -1 70 170 35.6 0 10 1 -1 71 85 18.4 0 10 1 -1 72 1 12.5 0 10 1 -1 73 170 41.3 0 10 1 -1 74 170 15 0 10 0 -1 75 28 12 0 10 0 -1 76 85 36.1 0 10 1 -1 77 1 18 0 10 1 -1 78 85 10.5 0 10 0 -1 79 170 33.9 0 10 1 -1 80 6 26.3 0 10 1 -1 81 85 45 0 10 1 -1 82 28 21.7 0 10 1 -1 83 28 13 0 10 0 -1 84 85 27.3 0 10 1 -1 85 15 18.6 0 10 1 -1 86 15 12.5 0 10 1 -1 87 6 26.3 0 10 1 -1 88 6 11 0 10 1 -1 89 15 10.7 0 10 0 -1 90 6 16.7 0 10 1 -1 91 28 41.1 0 10 1 -1 92 85 26 0 10 1 -1 93 85 24.4 0 10 1 -1 94 1 12.5 0 10 1 -1 95 6 17.4 0 10 1 -1 96 6 35.1 0 10 1 -1 97 6 14 0 10 1 -1 98 15 10.5 0 10 0 -1 99 1 11.8 0 10 1 -1 100 15 10.7 0 10 1 -1 101 15 39.6 0 10 1 -1 102 85 19.5 0 10 1 -1 103 1 11.2 0 10 1 -1 104 170 29.7 0 10 1 -1 105 15 49 0 10 1 -1 106 1 41.9 0 10 1 -1 107 1 18 0 10 1 -1 108 28 15.5 0 10 1 -2 1 1 11.8 0 10 0 -2 2 170 35.6 0 10 0 -2 3 85 10.5 0 10 0 -2 4 28 21.1 0 10 1 -2 5 28 13 0 10 0 -2 6 6 10.5 0 10 0 -2 7 15 10.5 0 10 0 -2 8 6 17.4 0 10 1 -2 9 85 26 0 10 1 -2 10 6 35.1 0 10 1 -2 11 28 21.7 0 10 1 -2 12 6 14 0 10 1 -2 13 15 14.2 0 10 0 -2 14 1 12.5 0 10 1 -2 15 170 38.3 0 10 0 -2 16 1 18 0 10 1 -2 17 15 39.6 0 10 1 -2 18 85 18.4 0 10 0 -2 19 28 21.1 0 10 0 -2 20 85 19.5 0 10 0 -2 21 6 11 0 10 1 -2 22 85 30.9 0 10 1 -2 23 1 10.7 0 10 1 -2 24 28 16.9 0 10 0 -2 25 170 29.7 0 10 1 -2 26 170 43.3 0 10 0 -2 27 6 14 0 10 1 -2 28 6 11 0 10 1 -2 29 28 41.1 0 10 1 -2 30 1 31.6 0 10 1 -2 31 15 18.6 0 10 1 -2 32 15 14.2 0 10 1 -2 33 28 12 0 10 0 -2 34 1 21.3 0 10 1 -2 35 85 36.1 0 10 1 -2 36 85 26 0 10 1 -2 37 15 49 0 10 1 -2 38 1 41.9 0 10 1 -2 39 1 21.3 0 10 1 -2 40 170 41.3 0 10 0 -2 41 170 43.3 0 10 0 -2 42 15 18.6 0 10 1 -2 43 15 49 0 10 1 -2 44 170 15 0 10 0 -2 45 85 24.4 0 10 1 -2 46 15 30.1 0 10 1 -2 47 85 18.4 0 10 0 -2 48 170 41.3 0 10 0 -2 49 28 41.1 0 10 1 -2 50 28 31.4 0 10 1 -2 51 6 35.1 0 10 1 -2 52 1 11.2 0 10 0 -2 53 170 33.9 0 10 0 -2 54 28 15.5 0 10 0 -2 55 1 18 0 10 1 -2 56 15 10.7 0 10 0 -2 57 85 45 0 10 1 -2 58 85 19.5 0 10 0 -2 59 6 44 0 10 1 -2 60 85 30.9 0 10 0 -2 61 1 11 0 10 0 -2 62 170 35.6 0 10 1 -2 63 170 29.7 0 10 0 -2 64 6 16.7 0 10 1 -2 65 28 15.5 0 10 0 -2 66 6 44 0 10 1 -2 67 85 10.5 0 10 0 -2 68 85 45 0 10 1 -2 69 1 11 0 10 0 -2 70 15 10.5 0 10 0 -2 71 170 49.8 0 10 0 -2 72 15 20.7 0 10 0 -2 73 6 13.2 0 10 1 -2 74 15 12.5 0 10 1 -2 75 28 13 0 10 1 -2 76 1 10.7 0 10 1 -2 77 28 13.4 0 10 1 -2 78 15 39.6 0 10 1 -2 79 15 20.7 0 10 1 -2 80 1 11.2 0 10 1 -2 81 85 24.4 0 10 1 -2 82 1 12.5 0 10 0 -2 83 170 49.8 0 10 1 -2 84 170 33.9 0 10 1 -2 85 85 27.3 0 10 0 -2 86 170 24.4 0 10 0 -2 87 15 10.7 0 10 0 -2 88 6 16.7 0 10 1 -2 89 1 11.8 0 10 0 -2 90 6 10.5 0 10 0 -2 91 28 12 0 10 0 -2 92 6 17.4 0 10 1 -2 93 28 16.9 0 10 0 -2 94 28 13.4 0 10 1 -2 95 1 31.6 0 10 1 -2 96 85 36.1 0 10 1 -2 97 15 30.1 0 10 0 -2 98 170 15 0 10 0 -2 99 85 27.3 0 10 0 -2 100 170 38.3 0 10 0 -2 101 15 12.5 0 10 0 -2 102 6 26.3 0 10 1 -2 103 1 41.9 0 10 1 -2 104 6 13.2 0 10 1 -2 105 28 21.7 0 10 0 -2 106 170 24.4 0 10 0 -2 107 28 31.4 0 10 0 -2 108 6 26.3 0 10 1 -3 1 28 16.9 0 10 0 -3 2 1 21.3 0 10 1 -3 3 6 44 0 10 1 -3 4 170 49.8 0 10 1 -3 5 28 13.4 0 10 1 -3 6 28 21.1 0 10 1 -3 7 15 14.2 0 10 1 -3 8 6 26.3 0 10 1 -3 9 85 24.4 0 10 0 -3 10 170 41.3 0 10 1 -3 11 28 12 0 10 0 -3 12 15 39.6 0 10 1 -3 13 85 30.9 0 10 1 -3 14 28 31.4 0 10 1 -3 15 85 10.5 0 10 1 -3 16 1 10.7 0 10 1 -3 17 28 31.4 0 10 1 -3 18 6 26.3 0 10 1 -3 19 1 41.9 0 10 1 -3 20 6 13.2 0 10 0 -3 21 28 41.1 0 10 1 -3 22 15 12.5 0 10 1 -3 23 15 39.6 0 10 1 -3 24 85 10.5 0 10 0 -3 25 28 12 0 10 1 -3 26 170 38.3 0 10 1 -3 27 85 36.1 0 10 1 -3 28 1 41.9 0 10 1 -3 29 15 10.5 0 10 0 -3 30 85 19.5 0 10 0 -3 31 85 26 0 10 1 -3 32 85 45 0 10 1 -3 33 1 12.5 0 10 1 -3 34 6 13.2 0 10 1 -3 35 15 10.7 0 10 1 -3 36 1 11 0 10 1 -3 37 15 30.1 0 10 0 -3 38 15 20.7 0 10 1 -3 39 6 17.4 0 10 1 -3 40 6 10.5 0 10 0 -3 41 170 15 0 10 1 -3 42 15 12.5 0 10 1 -3 43 1 31.6 0 10 1 -3 44 15 10.5 0 10 0 -3 45 170 41.3 0 10 1 -3 46 170 15 0 10 0 -3 47 15 18.6 0 10 1 -3 48 6 17.4 0 10 1 -3 49 85 18.4 0 10 0 -3 50 170 43.3 0 10 1 -3 51 28 21.7 0 10 0 -3 52 6 16.7 0 10 0 -3 53 170 33.9 0 10 0 -3 54 1 18 0 10 1 -3 55 1 18 0 10 1 -3 56 15 30.1 0 10 1 -3 57 1 10.7 0 10 0 -3 58 85 27.3 0 10 1 -3 59 6 35.1 0 10 1 -3 60 85 30.9 0 10 1 -3 61 85 24.4 0 10 1 -3 62 85 19.5 0 10 0 -3 63 170 33.9 0 10 1 -3 64 6 10.5 0 10 0 -3 65 85 27.3 0 10 1 -3 66 28 16.9 0 10 0 -3 67 6 35.1 0 10 1 -3 68 15 49 0 10 1 -3 69 85 26 0 10 1 -3 70 85 45 0 10 1 -3 71 1 11.8 0 10 1 -3 72 170 35.6 0 10 1 -3 73 1 31.6 0 10 1 -3 74 28 13 0 10 0 -3 75 28 21.1 0 10 1 -3 76 15 20.7 0 10 1 -3 77 15 10.7 0 10 0 -3 78 28 15.5 0 10 0 -3 79 1 21.3 0 10 1 -3 80 6 14 0 10 1 -3 81 170 49.8 0 10 1 -3 82 85 36.1 0 10 1 -3 83 1 11.2 0 10 0 -3 84 28 15.5 0 10 0 -3 85 170 29.7 0 10 1 -3 86 170 24.4 0 10 1 -3 87 170 24.4 0 10 1 -3 88 28 13.4 0 10 0 -3 89 15 18.6 0 10 1 -3 90 28 21.7 0 10 1 -3 91 85 18.4 0 10 1 -3 92 6 16.7 0 10 1 -3 93 6 11 0 10 1 -3 94 28 41.1 0 10 1 -3 95 170 43.3 0 10 1 -3 96 6 44 0 10 1 -3 97 1 11.2 0 10 1 -3 98 6 11 0 10 1 -3 99 170 35.6 0 10 1 -3 100 15 49 0 10 1 -3 101 170 38.3 0 10 1 -3 102 28 13 0 10 0 -3 103 170 29.7 0 10 1 -3 104 1 12.5 0 10 1 -3 105 1 11 0 10 1 -3 106 1 11.8 0 10 1 -3 107 6 14 0 10 0 -3 108 15 14.2 0 10 1 -4 1 170 41.3 0 10 1 -4 2 170 38.3 0 10 1 -4 3 28 21.1 0 10 1 -4 4 15 20.7 0 10 1 -4 5 85 45 0 10 1 -4 6 85 45 0 10 1 -4 7 28 21.7 0 10 1 -4 8 1 11.2 0 10 1 -4 9 170 49.8 0 10 1 -4 10 6 14 0 10 0 -4 11 28 21.7 0 10 1 -4 12 1 11.2 0 10 1 -4 13 1 31.6 0 10 1 -4 14 6 10.5 0 10 1 -4 15 1 21.3 0 10 1 -4 16 170 43.3 0 10 1 -4 17 1 18 0 10 1 -4 18 15 10.5 0 10 0 -4 19 15 20.7 0 10 1 -4 20 15 39.6 0 10 1 -4 21 170 33.9 0 10 1 -4 22 1 21.3 0 10 1 -4 23 85 30.9 0 10 1 -4 24 15 18.6 0 10 1 -4 25 28 13.4 0 10 1 -4 26 170 15 0 10 1 -4 27 170 41.3 0 10 1 -4 28 85 27.3 0 10 0 -4 29 1 11.8 0 10 0 -4 30 85 24.4 0 10 1 -4 31 15 49 0 10 1 -4 32 6 17.4 0 10 1 -4 33 6 35.1 0 10 1 -4 34 170 15 0 10 0 -4 35 6 26.3 0 10 1 -4 36 170 35.6 0 10 0 -4 37 6 13.2 0 10 1 -4 38 28 15.5 0 10 1 -4 39 1 11 0 10 1 -4 40 15 12.5 0 10 1 -4 41 6 13.2 0 10 0 -4 42 1 10.7 0 10 1 -4 43 6 17.4 0 10 1 -4 44 85 10.5 0 10 0 -4 45 28 13.4 0 10 1 -4 46 1 41.9 0 10 1 -4 47 28 13 0 10 1 -4 48 28 16.9 0 10 0 -4 49 85 36.1 0 10 1 -4 50 15 18.6 0 10 1 -4 51 85 27.3 0 10 1 -4 52 15 49 0 10 1 -4 53 15 30.1 0 10 1 -4 54 170 29.7 0 10 1 -4 55 6 14 0 10 1 -4 56 28 41.1 0 10 1 -4 57 15 30.1 0 10 1 -4 58 15 12.5 0 10 1 -4 59 85 30.9 0 10 1 -4 60 28 21.1 0 10 1 -4 61 6 44 0 10 1 -4 62 28 16.9 0 10 1 -4 63 6 11 0 10 0 -4 64 170 38.3 0 10 1 -4 65 85 18.4 0 10 1 -4 66 85 19.5 0 10 1 -4 67 170 33.9 0 10 0 -4 68 170 35.6 0 10 1 -4 69 15 14.2 0 10 1 -4 70 28 13 0 10 0 -4 71 28 31.4 0 10 1 -4 72 1 11.8 0 10 0 -4 73 1 12.5 0 10 0 -4 74 28 31.4 0 10 1 -4 75 1 12.5 0 10 1 -4 76 28 41.1 0 10 1 -4 77 1 10.7 0 10 1 -4 78 170 24.4 0 10 1 -4 79 6 16.7 0 10 1 -4 80 170 24.4 0 10 1 -4 81 6 35.1 0 10 1 -4 82 1 11 0 10 0 -4 83 28 12 0 10 0 -4 84 15 10.5 0 10 0 -4 85 15 10.7 0 10 0 -4 86 28 12 0 10 1 -4 87 85 19.5 0 10 1 -4 88 6 16.7 0 10 1 -4 89 6 11 0 10 0 -4 90 15 39.6 0 10 1 -4 91 85 24.4 0 10 0 -4 92 6 26.3 0 10 1 -4 93 85 18.4 0 10 1 -4 94 15 14.2 0 10 0 -4 95 6 10.5 0 10 0 -4 96 1 41.9 0 10 1 -4 97 85 36.1 0 10 1 -4 98 85 26 0 10 1 -4 99 28 15.5 0 10 0 -4 100 1 31.6 0 10 1 -4 101 6 44 0 10 1 -4 102 85 26 0 10 1 -4 103 170 29.7 0 10 1 -4 104 170 43.3 0 10 1 -4 105 170 49.8 0 10 1 -4 106 85 10.5 0 10 0 -4 107 1 18 0 10 1 -4 108 15 10.7 0 10 1 -5 1 170 41.3 0 10 1 -5 2 85 18.4 0 10 1 -5 3 28 21.7 0 10 1 -5 4 85 10.5 0 10 0 -5 5 15 14.2 0 10 1 -5 6 28 21.7 0 10 1 -5 7 85 30.9 0 10 1 -5 8 85 26 0 10 1 -5 9 1 10.7 0 10 1 -5 10 28 13 0 10 0 -5 11 170 33.9 0 10 1 -5 12 85 36.1 0 10 0 -5 13 15 30.1 0 10 1 -5 14 1 31.6 0 10 1 -5 15 6 13.2 0 10 1 -5 16 1 11 0 10 1 -5 17 85 24.4 0 10 1 -5 18 1 41.9 0 10 1 -5 19 15 14.2 0 10 0 -5 20 15 20.7 0 10 1 -5 21 15 10.5 0 10 0 -5 22 6 10.5 0 10 1 -5 23 85 45 0 10 1 -5 24 28 16.9 0 10 1 -5 25 1 21.3 0 10 1 -5 26 6 14 0 10 1 -5 27 28 13.4 0 10 0 -5 28 6 17.4 0 10 1 -5 29 170 33.9 0 10 0 -5 30 15 18.6 0 10 1 -5 31 85 45 0 10 1 -5 32 28 13.4 0 10 0 -5 33 15 10.5 0 10 0 -5 34 15 49 0 10 1 -5 35 170 43.3 0 10 1 -5 36 15 39.6 0 10 1 -5 37 85 18.4 0 10 0 -5 38 170 49.8 0 10 1 -5 39 15 10.7 0 10 0 -5 40 170 24.4 0 10 0 -5 41 15 39.6 0 10 1 -5 42 28 41.1 0 10 1 -5 43 85 27.3 0 10 1 -5 44 1 18 0 10 1 -5 45 1 12.5 0 10 1 -5 46 1 11.8 0 10 0 -5 47 28 15.5 0 10 0 -5 48 170 15 0 10 0 -5 49 28 21.1 0 10 1 -5 50 6 11 0 10 0 -5 51 28 31.4 0 10 1 -5 52 1 31.6 0 10 1 -5 53 15 20.7 0 10 1 -5 54 28 31.4 0 10 1 -5 55 1 11.2 0 10 1 -5 56 6 11 0 10 1 -5 57 6 10.5 0 10 1 -5 58 15 10.7 0 10 1 -5 59 28 13 0 10 0 -5 60 85 26 0 10 1 -5 61 6 35.1 0 10 1 -5 62 170 35.6 0 10 1 -5 63 85 27.3 0 10 1 -5 64 85 30.9 0 10 1 -5 65 1 41.9 0 10 1 -5 66 170 35.6 0 10 1 -5 67 28 15.5 0 10 1 -5 68 1 11.2 0 10 1 -5 69 170 49.8 0 10 1 -5 70 15 12.5 0 10 0 -5 71 85 19.5 0 10 1 -5 72 6 16.7 0 10 1 -5 73 1 10.7 0 10 1 -5 74 6 44 0 10 1 -5 75 170 29.7 0 10 1 -5 76 6 17.4 0 10 1 -5 77 1 21.3 0 10 1 -5 78 170 38.3 0 10 0 -5 79 170 24.4 0 10 0 -5 80 6 35.1 0 10 1 -5 81 1 12.5 0 10 1 -5 82 1 11.8 0 10 1 -5 83 28 12 0 10 1 -5 84 28 12 0 10 1 -5 85 85 36.1 0 10 1 -5 86 170 29.7 0 10 1 -5 87 170 43.3 0 10 1 -5 88 1 11 0 10 0 -5 89 85 24.4 0 10 0 -5 90 15 30.1 0 10 1 -5 91 6 14 0 10 0 -5 92 170 38.3 0 10 1 -5 93 6 44 0 10 1 -5 94 6 16.7 0 10 1 -5 95 6 26.3 0 10 1 -5 96 28 16.9 0 10 0 -5 97 85 10.5 0 10 0 -5 98 15 18.6 0 10 1 -5 99 28 21.1 0 10 1 -5 100 170 15 0 10 0 -5 101 15 49 0 10 1 -5 102 170 41.3 0 10 1 -5 103 6 13.2 0 10 1 -5 104 85 19.5 0 10 1 -5 105 6 26.3 0 10 1 -5 106 28 41.1 0 10 1 -5 107 1 18 0 10 1 -5 108 15 12.5 0 10 0 -6 1 15 18.6 0 10 1 -6 2 1 10.7 0 10 1 -6 3 1 11.2 0 10 1 -6 4 15 18.6 0 10 1 -6 5 28 16.9 0 10 1 -6 6 85 27.3 0 10 1 -6 7 28 13 0 10 1 -6 8 15 10.7 0 10 0 -6 9 170 33.9 0 10 1 -6 10 15 14.2 0 10 1 -6 11 15 10.5 0 10 1 -6 12 170 33.9 0 10 1 -6 13 15 39.6 0 10 1 -6 14 1 11.8 0 10 1 -6 15 15 10.7 0 10 0 -6 16 28 21.1 0 10 1 -6 17 85 18.4 0 10 0 -6 18 1 18 0 10 1 -6 19 1 11 0 10 1 -6 20 15 12.5 0 10 1 -6 21 170 38.3 0 10 1 -6 22 1 11 0 10 0 -6 23 6 16.7 0 10 0 -6 24 28 16.9 0 10 1 -6 25 6 17.4 0 10 1 -6 26 1 12.5 0 10 1 -6 27 85 18.4 0 10 0 -6 28 28 31.4 0 10 1 -6 29 6 26.3 0 10 1 -6 30 85 45 0 10 1 -6 31 85 24.4 0 10 1 -6 32 6 16.7 0 10 1 -6 33 85 10.5 0 10 0 -6 34 6 44 0 10 1 -6 35 1 12.5 0 10 1 -6 36 170 15 0 10 0 -6 37 170 15 0 10 0 -6 38 15 39.6 0 10 1 -6 39 85 19.5 0 10 1 -6 40 15 10.5 0 10 1 -6 41 85 27.3 0 10 1 -6 42 170 29.7 0 10 1 -6 43 170 24.4 0 10 1 -6 44 15 14.2 0 10 0 -6 45 6 11 0 10 1 -6 46 1 41.9 0 10 1 -6 47 1 31.6 0 10 1 -6 48 28 13.4 0 10 1 -6 49 15 30.1 0 10 1 -6 50 28 41.1 0 10 1 -6 51 28 13 0 10 1 -6 52 85 19.5 0 10 1 -6 53 170 43.3 0 10 1 -6 54 28 41.1 0 10 1 -6 55 6 17.4 0 10 1 -6 56 15 20.7 0 10 1 -6 57 15 30.1 0 10 1 -6 58 170 49.8 0 10 1 -6 59 85 36.1 0 10 1 -6 60 85 30.9 0 10 1 -6 61 170 35.6 0 10 1 -6 62 15 20.7 0 10 1 -6 63 1 11.2 0 10 0 -6 64 170 24.4 0 10 1 -6 65 28 21.7 0 10 1 -6 66 1 10.7 0 10 1 -6 67 85 45 0 10 1 -6 68 6 10.5 0 10 1 -6 69 15 12.5 0 10 1 -6 70 28 31.4 0 10 1 -6 71 170 38.3 0 10 1 -6 72 1 18 0 10 1 -6 73 1 21.3 0 10 1 -6 74 6 35.1 0 10 1 -6 75 28 13.4 0 10 0 -6 76 85 10.5 0 10 0 -6 77 28 12 0 10 1 -6 78 6 10.5 0 10 1 -6 79 1 11.8 0 10 1 -6 80 6 13.2 0 10 1 -6 81 1 41.9 0 10 1 -6 82 85 36.1 0 10 1 -6 83 28 15.5 0 10 1 -6 84 85 30.9 0 10 1 -6 85 170 43.3 0 10 1 -6 86 85 26 0 10 1 -6 87 28 21.1 0 10 1 -6 88 28 15.5 0 10 0 -6 89 6 11 0 10 1 -6 90 1 31.6 0 10 1 -6 91 170 49.8 0 10 1 -6 92 1 21.3 0 10 1 -6 93 28 21.7 0 10 1 -6 94 170 41.3 0 10 1 -6 95 15 49 0 10 1 -6 96 6 35.1 0 10 1 -6 97 15 49 0 10 1 -6 98 6 26.3 0 10 1 -6 99 28 12 0 10 1 -6 100 6 14 0 10 1 -6 101 6 44 0 10 1 -6 102 170 29.7 0 10 1 -6 103 6 14 0 10 1 -6 104 170 35.6 0 10 1 -6 105 85 26 0 10 1 -6 106 6 13.2 0 10 1 -6 107 170 41.3 0 10 1 -6 108 85 24.4 0 10 1 -7 1 28 13 0 10 1 -7 2 28 41.1 0 10 1 -7 3 170 29.7 0 10 0 -7 4 1 10.7 0 10 1 -7 5 6 17.4 0 10 1 -7 6 15 12.5 0 10 1 -7 7 15 18.6 0 10 1 -7 8 170 24.4 0 10 0 -7 9 1 11 0 10 1 -7 10 28 16.9 0 10 1 -7 11 170 41.3 0 10 1 -7 12 15 10.5 0 10 0 -7 13 6 10.5 0 10 1 -7 14 28 12 0 10 1 -7 15 170 24.4 0 10 1 -7 16 1 10.7 0 10 0 -7 17 6 35.1 0 10 1 -7 18 85 19.5 0 10 0 -7 19 6 26.3 0 10 1 -7 20 85 26 0 10 1 -7 21 1 11.2 0 10 1 -7 22 6 16.7 0 10 1 -7 23 28 31.4 0 10 1 -7 24 170 35.6 0 10 0 -7 25 1 21.3 0 10 1 -7 26 15 20.7 0 10 1 -7 27 15 14.2 0 10 1 -7 28 85 24.4 0 10 1 -7 29 1 11 0 10 1 -7 30 85 27.3 0 10 1 -7 31 15 18.6 0 10 1 -7 32 6 16.7 0 10 1 -7 33 28 21.1 0 10 1 -7 34 15 39.6 0 10 1 -7 35 28 31.4 0 10 1 -7 36 1 11.8 0 10 1 -7 37 170 38.3 0 10 1 -7 38 1 12.5 0 10 1 -7 39 1 11.8 0 10 1 -7 40 28 21.7 0 10 1 -7 41 28 21.1 0 10 1 -7 42 170 33.9 0 10 0 -7 43 6 14 0 10 1 -7 44 15 12.5 0 10 1 -7 45 15 10.7 0 10 1 -7 46 1 41.9 0 10 1 -7 47 1 18 0 10 1 -7 48 15 14.2 0 10 1 -7 49 6 11 0 10 0 -7 50 85 30.9 0 10 1 -7 51 170 49.8 0 10 1 -7 52 6 44 0 10 1 -7 53 85 45 0 10 1 -7 54 170 49.8 0 10 1 -7 55 85 10.5 0 10 0 -7 56 15 49 0 10 1 -7 57 170 15 0 10 0 -7 58 6 13.2 0 10 1 -7 59 170 35.6 0 10 1 -7 60 170 29.7 0 10 0 -7 61 170 15 0 10 0 -7 62 28 15.5 0 10 1 -7 63 28 21.7 0 10 1 -7 64 85 45 0 10 1 -7 65 28 13.4 0 10 0 -7 66 6 44 0 10 1 -7 67 6 10.5 0 10 1 -7 68 85 36.1 0 10 1 -7 69 6 14 0 10 1 -7 70 170 43.3 0 10 1 -7 71 28 12 0 10 0 -7 72 85 24.4 0 10 1 -7 73 85 18.4 0 10 0 -7 74 15 10.7 0 10 0 -7 75 6 35.1 0 10 1 -7 76 15 49 0 10 1 -7 77 85 19.5 0 10 0 -7 78 1 12.5 0 10 1 -7 79 1 18 0 10 1 -7 80 28 13 0 10 0 -7 81 6 17.4 0 10 1 -7 82 1 21.3 0 10 1 -7 83 15 30.1 0 10 1 -7 84 85 26 0 10 0 -7 85 85 30.9 0 10 1 -7 86 170 33.9 0 10 0 -7 87 15 39.6 0 10 1 -7 88 1 41.9 0 10 1 -7 89 170 43.3 0 10 1 -7 90 28 16.9 0 10 0 -7 91 85 10.5 0 10 0 -7 92 1 31.6 0 10 1 -7 93 6 26.3 0 10 1 -7 94 15 30.1 0 10 1 -7 95 1 31.6 0 10 1 -7 96 6 13.2 0 10 1 -7 97 170 38.3 0 10 1 -7 98 85 36.1 0 10 1 -7 99 170 41.3 0 10 1 -7 100 28 13.4 0 10 1 -7 101 28 15.5 0 10 0 -7 102 15 10.5 0 10 0 -7 103 6 11 0 10 0 -7 104 15 20.7 0 10 1 -7 105 85 27.3 0 10 0 -7 106 28 41.1 0 10 1 -7 107 85 18.4 0 10 1 -7 108 1 11.2 0 10 0 -8 1 85 19.5 0 10 0 -8 2 85 19.5 0 10 0 -8 3 28 21.1 0 10 0 -8 4 1 11.2 0 10 0 -8 5 170 33.9 0 10 0 -8 6 85 18.4 0 10 1 -8 7 15 20.7 0 10 1 -8 8 1 21.3 0 10 1 -8 9 15 14.2 0 10 0 -8 10 85 30.9 0 10 0 -8 11 1 11 0 10 1 -8 12 170 49.8 0 10 1 -8 13 1 41.9 0 10 1 -8 14 6 44 0 10 1 -8 15 170 38.3 0 10 1 -8 16 28 12 0 10 0 -8 17 6 10.5 0 10 0 -8 18 28 13 0 10 0 -8 19 6 14 0 10 1 -8 20 170 43.3 0 10 0 -8 21 6 17.4 0 10 1 -8 22 1 18 0 10 1 -8 23 85 36.1 0 10 0 -8 24 15 10.5 0 10 0 -8 25 85 24.4 0 10 1 -8 26 170 29.7 0 10 0 -8 27 6 14 0 10 1 -8 28 15 12.5 0 10 0 -8 29 28 15.5 0 10 0 -8 30 85 45 0 10 1 -8 31 28 13.4 0 10 0 -8 32 6 16.7 0 10 1 -8 33 170 49.8 0 10 0 -8 34 6 17.4 0 10 1 -8 35 85 26 0 10 1 -8 36 1 10.7 0 10 0 -8 37 6 11 0 10 1 -8 38 1 11.8 0 10 1 -8 39 1 12.5 0 10 0 -8 40 85 26 0 10 0 -8 41 15 10.7 0 10 0 -8 42 170 35.6 0 10 1 -8 43 85 27.3 0 10 1 -8 44 170 43.3 0 10 0 -8 45 28 13.4 0 10 0 -8 46 28 12 0 10 0 -8 47 1 31.6 0 10 1 -8 48 6 13.2 0 10 1 -8 49 85 36.1 0 10 1 -8 50 28 21.7 0 10 1 -8 51 15 18.6 0 10 0 -8 52 85 27.3 0 10 0 -8 53 6 26.3 0 10 1 -8 54 1 41.9 0 10 1 -8 55 15 30.1 0 10 1 -8 56 1 10.7 0 10 0 -8 57 170 15 0 10 0 -8 58 6 10.5 0 10 0 -8 59 28 31.4 0 10 1 -8 60 28 41.1 0 10 1 -8 61 170 29.7 0 10 0 -8 62 1 11.8 0 10 0 -8 63 15 18.6 0 10 0 -8 64 1 11 0 10 0 -8 65 170 41.3 0 10 1 -8 66 15 39.6 0 10 1 -8 67 28 31.4 0 10 0 -8 68 6 16.7 0 10 1 -8 69 15 49 0 10 1 -8 70 85 45 0 10 1 -8 71 170 24.4 0 10 1 -8 72 85 24.4 0 10 1 -8 73 1 18 0 10 1 -8 74 85 10.5 0 10 0 -8 75 28 21.7 0 10 1 -8 76 28 16.9 0 10 0 -8 77 6 44 0 10 1 -8 78 170 33.9 0 10 1 -8 79 6 11 0 10 1 -8 80 28 13 0 10 1 -8 81 28 41.1 0 10 1 -8 82 6 13.2 0 10 1 -8 83 28 15.5 0 10 0 -8 84 15 49 0 10 1 -8 85 15 14.2 0 10 1 -8 86 170 41.3 0 10 1 -8 87 15 12.5 0 10 0 -8 88 85 18.4 0 10 1 -8 89 1 12.5 0 10 1 -8 90 15 20.7 0 10 0 -8 91 6 26.3 0 10 1 -8 92 170 24.4 0 10 0 -8 93 28 21.1 0 10 1 -8 94 15 10.5 0 10 0 -8 95 6 35.1 0 10 1 -8 96 85 30.9 0 10 1 -8 97 1 21.3 0 10 1 -8 98 15 39.6 0 10 1 -8 99 170 35.6 0 10 1 -8 100 15 10.7 0 10 1 -8 101 85 10.5 0 10 0 -8 102 28 16.9 0 10 0 -8 103 170 15 0 10 0 -8 104 170 38.3 0 10 0 -8 105 6 35.1 0 10 1 -8 106 1 31.6 0 10 1 -8 107 15 30.1 0 10 1 -8 108 1 11.2 0 10 1 -9 1 1 11.2 0 10 1 -9 2 6 10.5 0 10 0 -9 3 28 31.4 0 10 1 -9 4 15 49 0 10 1 -9 5 15 12.5 0 10 1 -9 6 170 33.9 0 10 1 -9 7 170 35.6 0 10 0 -9 8 6 17.4 0 10 1 -9 9 1 21.3 0 10 1 -9 10 1 10.7 0 10 0 -9 11 1 11.8 0 10 1 -9 12 1 31.6 0 10 1 -9 13 6 16.7 0 10 0 -9 14 1 10.7 0 10 1 -9 15 170 15 0 10 0 -9 16 170 43.3 0 10 1 -9 17 85 27.3 0 10 0 -9 18 28 21.7 0 10 1 -9 19 1 11 0 10 0 -9 20 1 11.8 0 10 1 -9 21 1 12.5 0 10 1 -9 22 6 16.7 0 10 1 -9 23 170 35.6 0 10 1 -9 24 6 11 0 10 1 -9 25 85 30.9 0 10 0 -9 26 28 13 0 10 0 -9 27 28 41.1 0 10 1 -9 28 85 10.5 0 10 0 -9 29 1 11.2 0 10 1 -9 30 85 36.1 0 10 1 -9 31 1 12.5 0 10 1 -9 32 6 26.3 0 10 1 -9 33 170 33.9 0 10 1 -9 34 170 43.3 0 10 0 -9 35 85 10.5 0 10 0 -9 36 170 49.8 0 10 0 -9 37 15 18.6 0 10 1 -9 38 6 14 0 10 1 -9 39 6 11 0 10 0 -9 40 15 39.6 0 10 1 -9 41 85 19.5 0 10 0 -9 42 15 10.7 0 10 0 -9 43 85 36.1 0 10 1 -9 44 1 18 0 10 0 -9 45 170 49.8 0 10 1 -9 46 15 20.7 0 10 1 -9 47 1 11 0 10 1 -9 48 28 13.4 0 10 1 -9 49 15 20.7 0 10 1 -9 50 1 18 0 10 1 -9 51 85 18.4 0 10 1 -9 52 85 18.4 0 10 0 -9 53 85 26 0 10 1 -9 54 28 31.4 0 10 1 -9 55 6 44 0 10 1 -9 56 6 13.2 0 10 0 -9 57 6 10.5 0 10 0 -9 58 28 12 0 10 0 -9 59 15 10.5 0 10 0 -9 60 6 17.4 0 10 1 -9 61 170 24.4 0 10 0 -9 62 15 30.1 0 10 1 -9 63 6 35.1 0 10 1 -9 64 15 10.7 0 10 1 -9 65 15 14.2 0 10 1 -9 66 170 41.3 0 10 1 -9 67 28 21.1 0 10 1 -9 68 6 26.3 0 10 1 -9 69 15 14.2 0 10 1 -9 70 85 24.4 0 10 0 -9 71 85 27.3 0 10 0 -9 72 28 13.4 0 10 1 -9 73 170 29.7 0 10 0 -9 74 28 15.5 0 10 0 -9 75 85 45 0 10 1 -9 76 170 38.3 0 10 0 -9 77 28 16.9 0 10 1 -9 78 6 35.1 0 10 1 -9 79 85 19.5 0 10 0 -9 80 15 18.6 0 10 1 -9 81 15 12.5 0 10 1 -9 82 85 30.9 0 10 0 -9 83 28 12 0 10 1 -9 84 28 21.7 0 10 1 -9 85 28 13 0 10 0 -9 86 1 41.9 0 10 1 -9 87 15 39.6 0 10 1 -9 88 6 13.2 0 10 0 -9 89 1 21.3 0 10 1 -9 90 170 15 0 10 0 -9 91 15 30.1 0 10 1 -9 92 85 26 0 10 0 -9 93 15 49 0 10 1 -9 94 85 45 0 10 1 -9 95 6 14 0 10 0 -9 96 170 38.3 0 10 1 -9 97 170 29.7 0 10 0 -9 98 28 16.9 0 10 0 -9 99 6 44 0 10 1 -9 100 1 31.6 0 10 1 -9 101 15 10.5 0 10 0 -9 102 28 41.1 0 10 1 -9 103 85 24.4 0 10 0 -9 104 28 15.5 0 10 0 -9 105 28 21.1 0 10 1 -9 106 1 41.9 0 10 1 -9 107 170 41.3 0 10 1 -9 108 170 24.4 0 10 0 -10 1 170 41.3 0 10 0 -10 2 6 10.5 0 10 0 -10 3 170 15 0 10 0 -10 4 85 27.3 0 10 0 -10 5 170 15 0 10 0 -10 6 28 13 0 10 0 -10 7 6 35.1 0 10 1 -10 8 15 14.2 0 10 1 -10 9 85 19.5 0 10 0 -10 10 170 43.3 0 10 1 -10 11 85 45 0 10 1 -10 12 1 41.9 0 10 1 -10 13 15 30.1 0 10 1 -10 14 85 26 0 10 1 -10 15 28 12 0 10 1 -10 16 1 11.8 0 10 1 -10 17 15 10.7 0 10 0 -10 18 6 44 0 10 1 -10 19 1 18 0 10 1 -10 20 28 12 0 10 0 -10 21 15 20.7 0 10 1 -10 22 28 41.1 0 10 1 -10 23 15 39.6 0 10 1 -10 24 85 26 0 10 0 -10 25 6 26.3 0 10 1 -10 26 6 35.1 0 10 1 -10 27 6 26.3 0 10 1 -10 28 15 10.5 0 10 0 -10 29 1 31.6 0 10 1 -10 30 170 41.3 0 10 0 -10 31 6 10.5 0 10 1 -10 32 1 11.2 0 10 0 -10 33 170 29.7 0 10 0 -10 34 85 45 0 10 0 -10 35 15 12.5 0 10 0 -10 36 170 38.3 0 10 0 -10 37 85 19.5 0 10 0 -10 38 28 13.4 0 10 0 -10 39 28 13.4 0 10 0 -10 40 15 30.1 0 10 1 -10 41 28 41.1 0 10 1 -10 42 15 10.5 0 10 0 -10 43 170 33.9 0 10 0 -10 44 6 14 0 10 1 -10 45 170 35.6 0 10 1 -10 46 85 10.5 0 10 0 -10 47 85 30.9 0 10 1 -10 48 28 15.5 0 10 0 -10 49 15 39.6 0 10 1 -10 50 6 13.2 0 10 1 -10 51 1 10.7 0 10 1 -10 52 15 14.2 0 10 1 -10 53 6 11 0 10 0 -10 54 6 17.4 0 10 1 -10 55 170 24.4 0 10 1 -10 56 85 18.4 0 10 1 -10 57 28 31.4 0 10 1 -10 58 28 21.7 0 10 1 -10 59 15 18.6 0 10 1 -10 60 85 10.5 0 10 0 -10 61 6 16.7 0 10 1 -10 62 85 18.4 0 10 0 -10 63 6 44 0 10 1 -10 64 1 18 0 10 1 -10 65 28 16.9 0 10 0 -10 66 15 10.7 0 10 0 -10 67 1 10.7 0 10 1 -10 68 15 49 0 10 1 -10 69 170 38.3 0 10 1 -10 70 28 15.5 0 10 0 -10 71 28 31.4 0 10 1 -10 72 6 14 0 10 1 -10 73 170 35.6 0 10 0 -10 74 1 12.5 0 10 1 -10 75 15 18.6 0 10 0 -10 76 1 31.6 0 10 1 -10 77 28 16.9 0 10 1 -10 78 1 21.3 0 10 1 -10 79 15 12.5 0 10 0 -10 80 170 49.8 0 10 0 -10 81 85 27.3 0 10 0 -10 82 6 16.7 0 10 1 -10 83 85 36.1 0 10 0 -10 84 85 36.1 0 10 1 -10 85 6 17.4 0 10 1 -10 86 1 11 0 10 0 -10 87 6 13.2 0 10 0 -10 88 170 29.7 0 10 0 -10 89 1 11.2 0 10 0 -10 90 1 41.9 0 10 1 -10 91 170 33.9 0 10 0 -10 92 1 11.8 0 10 0 -10 93 15 49 0 10 1 -10 94 1 21.3 0 10 0 -10 95 85 30.9 0 10 0 -10 96 15 20.7 0 10 1 -10 97 28 21.1 0 10 0 -10 98 170 24.4 0 10 0 -10 99 85 24.4 0 10 0 -10 100 85 24.4 0 10 0 -10 101 28 21.1 0 10 0 -10 102 28 21.7 0 10 1 -10 103 170 49.8 0 10 1 -10 104 6 11 0 10 1 -10 105 1 12.5 0 10 1 -10 106 28 13 0 10 0 -10 107 170 43.3 0 10 0 -10 108 1 11 0 10 0 -11 1 6 10.5 0 10 0 -11 2 85 36.1 0 10 1 -11 3 85 27.3 0 10 0 -11 4 6 16.7 0 10 1 -11 5 1 31.6 0 10 1 -11 6 170 33.9 0 10 0 -11 7 15 10.5 0 10 0 -11 8 170 35.6 0 10 0 -11 9 15 10.7 0 10 0 -11 10 15 10.7 0 10 1 -11 11 170 15 0 10 0 -11 12 85 26 0 10 0 -11 13 28 21.1 0 10 1 -11 14 170 24.4 0 10 0 -11 15 28 13 0 10 0 -11 16 15 12.5 0 10 1 -11 17 85 19.5 0 10 0 -11 18 85 26 0 10 0 -11 19 6 11 0 10 0 -11 20 6 13.2 0 10 0 -11 21 28 15.5 0 10 0 -11 22 170 41.3 0 10 0 -11 23 6 14 0 10 1 -11 24 1 21.3 0 10 1 -11 25 85 18.4 0 10 1 -11 26 28 12 0 10 1 -11 27 15 49 0 10 1 -11 28 85 45 0 10 1 -11 29 170 41.3 0 10 0 -11 30 170 33.9 0 10 0 -11 31 28 21.7 0 10 1 -11 32 15 18.6 0 10 1 -11 33 1 12.5 0 10 0 -11 34 1 10.7 0 10 1 -11 35 28 21.1 0 10 0 -11 36 170 35.6 0 10 0 -11 37 1 11.2 0 10 1 -11 38 85 19.5 0 10 1 -11 39 1 41.9 0 10 1 -11 40 28 16.9 0 10 0 -11 41 15 30.1 0 10 1 -11 42 15 20.7 0 10 0 -11 43 15 14.2 0 10 1 -11 44 28 13 0 10 1 -11 45 15 12.5 0 10 1 -11 46 170 43.3 0 10 1 -11 47 170 49.8 0 10 1 -11 48 6 10.5 0 10 1 -11 49 15 30.1 0 10 1 -11 50 28 41.1 0 10 1 -11 51 28 41.1 0 10 1 -11 52 6 26.3 0 10 1 -11 53 85 10.5 0 10 0 -11 54 6 26.3 0 10 1 -11 55 6 44 0 10 1 -11 56 85 30.9 0 10 1 -11 57 85 24.4 0 10 0 -11 58 15 39.6 0 10 1 -11 59 1 41.9 0 10 1 -11 60 170 49.8 0 10 0 -11 61 28 31.4 0 10 1 -11 62 28 15.5 0 10 1 -11 63 28 12 0 10 0 -11 64 6 35.1 0 10 1 -11 65 85 24.4 0 10 0 -11 66 15 49 0 10 1 -11 67 15 39.6 0 10 1 -11 68 1 31.6 0 10 1 -11 69 85 36.1 0 10 0 -11 70 15 14.2 0 10 1 -11 71 28 16.9 0 10 0 -11 72 6 35.1 0 10 1 -11 73 170 15 0 10 0 -11 74 1 12.5 0 10 1 -11 75 15 20.7 0 10 0 -11 76 170 24.4 0 10 0 -11 77 85 18.4 0 10 0 -11 78 6 17.4 0 10 1 -11 79 28 31.4 0 10 1 -11 80 1 10.7 0 10 1 -11 81 6 11 0 10 1 -11 82 1 11.8 0 10 0 -11 83 170 43.3 0 10 1 -11 84 1 18 0 10 1 -11 85 1 11.8 0 10 1 -11 86 6 14 0 10 1 -11 87 85 10.5 0 10 0 -11 88 85 30.9 0 10 0 -11 89 85 27.3 0 10 0 -11 90 28 13.4 0 10 0 -11 91 6 17.4 0 10 1 -11 92 170 38.3 0 10 0 -11 93 6 16.7 0 10 1 -11 94 170 38.3 0 10 0 -11 95 1 18 0 10 1 -11 96 1 11 0 10 1 -11 97 170 29.7 0 10 0 -11 98 170 29.7 0 10 0 -11 99 15 18.6 0 10 1 -11 100 15 10.5 0 10 0 -11 101 1 21.3 0 10 1 -11 102 1 11.2 0 10 0 -11 103 28 13.4 0 10 0 -11 104 85 45 0 10 1 -11 105 28 21.7 0 10 1 -11 106 1 11 0 10 0 -11 107 6 13.2 0 10 1 -11 108 6 44 0 10 1 -12 1 1 11.2 0 10 0 -12 2 15 20.7 0 10 1 -12 3 6 10.5 0 10 0 -12 4 6 35.1 0 10 1 -12 5 28 13 0 10 0 -12 6 1 21.3 0 10 1 -12 7 170 35.6 0 10 0 -12 8 1 11 0 10 0 -12 9 1 31.6 0 10 1 -12 10 85 10.5 0 10 0 -12 11 28 13 0 10 0 -12 12 170 43.3 0 10 0 -12 13 170 29.7 0 10 0 -12 14 85 24.4 0 10 0 -12 15 85 27.3 0 10 1 -12 16 85 27.3 0 10 1 -12 17 28 16.9 0 10 1 -12 18 170 41.3 0 10 0 -12 19 28 13.4 0 10 0 -12 20 170 38.3 0 10 0 -12 21 170 43.3 0 10 1 -12 22 15 12.5 0 10 0 -12 23 15 10.7 0 10 0 -12 24 85 45 0 10 1 -12 25 170 15 0 10 0 -12 26 28 12 0 10 1 -12 27 1 41.9 0 10 1 -12 28 15 39.6 0 10 1 -12 29 6 11 0 10 1 -12 30 170 29.7 0 10 0 -12 31 170 49.8 0 10 1 -12 32 15 10.7 0 10 1 -12 33 85 10.5 0 10 1 -12 34 170 15 0 10 0 -12 35 170 41.3 0 10 1 -12 36 6 16.7 0 10 1 -12 37 15 18.6 0 10 1 -12 38 15 14.2 0 10 1 -12 39 6 35.1 0 10 1 -12 40 6 13.2 0 10 1 -12 41 1 12.5 0 10 1 -12 42 6 17.4 0 10 1 -12 43 1 18 0 10 1 -12 44 1 21.3 0 10 1 -12 45 1 11.2 0 10 0 -12 46 1 12.5 0 10 1 -12 47 1 41.9 0 10 1 -12 48 15 30.1 0 10 1 -12 49 6 17.4 0 10 1 -12 50 15 10.5 0 10 0 -12 51 15 14.2 0 10 0 -12 52 28 41.1 0 10 1 -12 53 85 45 0 10 1 -12 54 15 39.6 0 10 1 -12 55 28 15.5 0 10 0 -12 56 85 30.9 0 10 0 -12 57 85 36.1 0 10 1 -12 58 170 35.6 0 10 0 -12 59 6 16.7 0 10 1 -12 60 6 13.2 0 10 0 -12 61 85 30.9 0 10 1 -12 62 15 10.5 0 10 0 -12 63 28 12 0 10 0 -12 64 1 11 0 10 1 -12 65 15 18.6 0 10 1 -12 66 6 10.5 0 10 1 -12 67 6 11 0 10 0 -12 68 15 20.7 0 10 1 -12 69 28 13.4 0 10 1 -12 70 1 10.7 0 10 1 -12 71 6 44 0 10 1 -12 72 170 38.3 0 10 0 -12 73 28 31.4 0 10 1 -12 74 15 12.5 0 10 1 -12 75 170 33.9 0 10 0 -12 76 15 49 0 10 1 -12 77 85 26 0 10 0 -12 78 85 18.4 0 10 0 -12 79 1 11.8 0 10 0 -12 80 85 18.4 0 10 0 -12 81 85 24.4 0 10 1 -12 82 170 49.8 0 10 0 -12 83 28 21.7 0 10 1 -12 84 28 16.9 0 10 1 -12 85 1 18 0 10 0 -12 86 6 26.3 0 10 0 -12 87 28 21.7 0 10 1 -12 88 6 26.3 0 10 1 -12 89 6 44 0 10 1 -12 90 28 21.1 0 10 1 -12 91 85 36.1 0 10 1 -12 92 85 26 0 10 0 -12 93 28 41.1 0 10 1 -12 94 28 21.1 0 10 1 -12 95 28 31.4 0 10 1 -12 96 1 10.7 0 10 0 -12 97 15 30.1 0 10 1 -12 98 1 31.6 0 10 1 -12 99 85 19.5 0 10 0 -12 100 170 24.4 0 10 0 -12 101 15 49 0 10 1 -12 102 6 14 0 10 1 -12 103 85 19.5 0 10 1 -12 104 28 15.5 0 10 0 -12 105 170 24.4 0 10 0 -12 106 1 11.8 0 10 0 -12 107 6 14 0 10 1 -12 108 170 33.9 0 10 0 -13 1 170 41.3 0 10 0 -13 2 15 10.5 0 10 0 -13 3 170 15 0 10 0 -13 4 15 12.5 0 10 1 -13 5 85 45 0 10 1 -13 6 6 44 0 10 1 -13 7 1 11.2 0 10 1 -13 8 170 29.7 0 10 0 -13 9 85 27.3 0 10 1 -13 10 1 12.5 0 10 1 -13 11 15 20.7 0 10 1 -13 12 1 18 0 10 1 -13 13 6 16.7 0 10 1 -13 14 28 12 0 10 0 -13 15 6 35.1 0 10 1 -13 16 15 39.6 0 10 1 -13 17 28 41.1 0 10 1 -13 18 15 18.6 0 10 1 -13 19 1 11.2 0 10 1 -13 20 85 36.1 0 10 0 -13 21 15 10.5 0 10 0 -13 22 170 41.3 0 10 1 -13 23 28 16.9 0 10 1 -13 24 85 26 0 10 0 -13 25 28 16.9 0 10 1 -13 26 6 35.1 0 10 1 -13 27 85 24.4 0 10 1 -13 28 85 45 0 10 1 -13 29 1 11.8 0 10 1 -13 30 170 49.8 0 10 1 -13 31 170 33.9 0 10 0 -13 32 28 13.4 0 10 1 -13 33 1 41.9 0 10 1 -13 34 6 26.3 0 10 1 -13 35 170 35.6 0 10 1 -13 36 6 13.2 0 10 1 -13 37 170 29.7 0 10 0 -13 38 1 11.8 0 10 0 -13 39 85 27.3 0 10 1 -13 40 28 21.7 0 10 1 -13 41 6 14 0 10 0 -13 42 1 11 0 10 1 -13 43 6 14 0 10 1 -13 44 170 43.3 0 10 1 -13 45 15 10.7 0 10 1 -13 46 170 24.4 0 10 0 -13 47 28 21.1 0 10 1 -13 48 6 11 0 10 1 -13 49 15 39.6 0 10 1 -13 50 6 13.2 0 10 1 -13 51 15 10.7 0 10 0 -13 52 85 10.5 0 10 0 -13 53 85 18.4 0 10 1 -13 54 1 12.5 0 10 1 -13 55 15 30.1 0 10 1 -13 56 85 24.4 0 10 0 -13 57 28 12 0 10 0 -13 58 15 49 0 10 1 -13 59 28 41.1 0 10 1 -13 60 170 15 0 10 0 -13 61 85 26 0 10 1 -13 62 15 18.6 0 10 1 -13 63 28 13 0 10 0 -13 64 28 15.5 0 10 0 -13 65 28 31.4 0 10 1 -13 66 85 30.9 0 10 1 -13 67 28 13.4 0 10 0 -13 68 85 10.5 0 10 0 -13 69 1 18 0 10 1 -13 70 28 31.4 0 10 1 -13 71 170 33.9 0 10 0 -13 72 1 31.6 0 10 1 -13 73 28 21.1 0 10 1 -13 74 6 17.4 0 10 1 -13 75 1 21.3 0 10 1 -13 76 6 44 0 10 1 -13 77 85 36.1 0 10 1 -13 78 170 38.3 0 10 0 -13 79 85 30.9 0 10 1 -13 80 170 24.4 0 10 0 -13 81 15 14.2 0 10 0 -13 82 85 19.5 0 10 0 -13 83 85 19.5 0 10 1 -13 84 1 11 0 10 0 -13 85 170 49.8 0 10 1 -13 86 1 41.9 0 10 1 -13 87 6 11 0 10 0 -13 88 28 13 0 10 1 -13 89 15 14.2 0 10 1 -13 90 15 20.7 0 10 1 -13 91 170 35.6 0 10 0 -13 92 28 21.7 0 10 1 -13 93 15 49 0 10 1 -13 94 1 10.7 0 10 1 -13 95 15 12.5 0 10 1 -13 96 28 15.5 0 10 1 -13 97 170 43.3 0 10 1 -13 98 1 21.3 0 10 1 -13 99 6 10.5 0 10 0 -13 100 15 30.1 0 10 1 -13 101 6 17.4 0 10 1 -13 102 6 10.5 0 10 1 -13 103 1 10.7 0 10 1 -13 104 1 31.6 0 10 1 -13 105 6 16.7 0 10 1 -13 106 6 26.3 0 10 1 -13 107 170 38.3 0 10 0 -13 108 85 18.4 0 10 0 -14 1 28 21.7 0 10 1 -14 2 15 14.2 0 10 0 -14 3 6 11 0 10 0 -14 4 15 14.2 0 10 0 -14 5 15 10.7 0 10 1 -14 6 85 30.9 0 10 1 -14 7 6 16.7 0 10 0 -14 8 1 11.8 0 10 1 -14 9 28 13.4 0 10 1 -14 10 1 18 0 10 1 -14 11 15 39.6 0 10 1 -14 12 15 30.1 0 10 1 -14 13 1 11 0 10 0 -14 14 170 41.3 0 10 0 -14 15 6 16.7 0 10 1 -14 16 170 43.3 0 10 0 -14 17 6 35.1 0 10 1 -14 18 15 20.7 0 10 1 -14 19 85 26 0 10 1 -14 20 28 16.9 0 10 1 -14 21 85 19.5 0 10 0 -14 22 28 21.1 0 10 1 -14 23 1 31.6 0 10 1 -14 24 6 26.3 0 10 1 -14 25 28 21.7 0 10 1 -14 26 6 10.5 0 10 0 -14 27 85 24.4 0 10 0 -14 28 85 10.5 0 10 0 -14 29 15 49 0 10 1 -14 30 85 45 0 10 1 -14 31 170 29.7 0 10 1 -14 32 85 27.3 0 10 1 -14 33 170 35.6 0 10 0 -14 34 1 11.8 0 10 1 -14 35 1 18 0 10 1 -14 36 85 27.3 0 10 1 -14 37 6 14 0 10 0 -14 38 28 15.5 0 10 0 -14 39 28 12 0 10 0 -14 40 170 38.3 0 10 1 -14 41 6 13.2 0 10 1 -14 42 85 45 0 10 1 -14 43 6 17.4 0 10 1 -14 44 85 10.5 0 10 0 -14 45 15 10.5 0 10 0 -14 46 15 30.1 0 10 1 -14 47 170 24.4 0 10 0 -14 48 1 12.5 0 10 1 -14 49 15 10.5 0 10 0 -14 50 170 38.3 0 10 1 -14 51 85 18.4 0 10 1 -14 52 1 11 0 10 1 -14 53 170 24.4 0 10 1 -14 54 1 11.2 0 10 1 -14 55 6 10.5 0 10 0 -14 56 1 10.7 0 10 1 -14 57 6 35.1 0 10 1 -14 58 28 13 0 10 1 -14 59 170 29.7 0 10 0 -14 60 28 12 0 10 0 -14 61 85 36.1 0 10 1 -14 62 15 10.7 0 10 1 -14 63 28 21.1 0 10 1 -14 64 15 18.6 0 10 1 -14 65 170 43.3 0 10 1 -14 66 15 18.6 0 10 0 -14 67 85 26 0 10 1 -14 68 28 13.4 0 10 1 -14 69 1 21.3 0 10 1 -14 70 6 11 0 10 1 -14 71 170 35.6 0 10 1 -14 72 170 49.8 0 10 1 -14 73 1 41.9 0 10 1 -14 74 15 12.5 0 10 0 -14 75 1 10.7 0 10 1 -14 76 170 49.8 0 10 1 -14 77 1 31.6 0 10 1 -14 78 85 36.1 0 10 1 -14 79 28 15.5 0 10 1 -14 80 6 44 0 10 1 -14 81 28 13 0 10 0 -14 82 6 14 0 10 1 -14 83 85 18.4 0 10 0 -14 84 15 12.5 0 10 0 -14 85 1 11.2 0 10 0 -14 86 15 49 0 10 1 -14 87 170 33.9 0 10 1 -14 88 85 19.5 0 10 0 -14 89 6 17.4 0 10 1 -14 90 28 41.1 0 10 1 -14 91 6 44 0 10 1 -14 92 170 15 0 10 0 -14 93 28 31.4 0 10 1 -14 94 1 12.5 0 10 1 -14 95 28 16.9 0 10 1 -14 96 85 24.4 0 10 1 -14 97 15 39.6 0 10 1 -14 98 170 41.3 0 10 1 -14 99 1 21.3 0 10 1 -14 100 170 15 0 10 0 -14 101 170 33.9 0 10 1 -14 102 85 30.9 0 10 1 -14 103 28 41.1 0 10 1 -14 104 6 26.3 0 10 1 -14 105 28 31.4 0 10 1 -14 106 6 13.2 0 10 1 -14 107 15 20.7 0 10 1 -14 108 1 41.9 0 10 1 -15 1 15 10.7 0 10 0 -15 2 28 13.4 0 10 0 -15 3 170 33.9 0 10 0 -15 4 15 49 0 10 1 -15 5 28 21.7 0 10 0 -15 6 170 15 0 10 0 -15 7 28 41.1 0 10 1 -15 8 85 45 0 10 1 -15 9 28 13 0 10 1 -15 10 170 33.9 0 10 1 -15 11 6 11 0 10 0 -15 12 85 27.3 0 10 1 -15 13 1 11.8 0 10 1 -15 14 1 10.7 0 10 1 -15 15 28 12 0 10 0 -15 16 6 14 0 10 0 -15 17 1 11.2 0 10 0 -15 18 15 39.6 0 10 1 -15 19 15 30.1 0 10 0 -15 20 15 20.7 0 10 1 -15 21 28 13 0 10 0 -15 22 6 44 0 10 1 -15 23 170 38.3 0 10 0 -15 24 15 18.6 0 10 1 -15 25 15 14.2 0 10 1 -15 26 15 18.6 0 10 1 -15 27 170 41.3 0 10 0 -15 28 28 21.1 0 10 1 -15 29 6 14 0 10 1 -15 30 28 15.5 0 10 0 -15 31 170 24.4 0 10 0 -15 32 1 31.6 0 10 1 -15 33 6 35.1 0 10 1 -15 34 15 30.1 0 10 1 -15 35 170 49.8 0 10 1 -15 36 85 18.4 0 10 0 -15 37 15 10.5 0 10 1 -15 38 170 38.3 0 10 0 -15 39 6 26.3 0 10 1 -15 40 170 41.3 0 10 1 -15 41 85 10.5 0 10 0 -15 42 1 18 0 10 1 -15 43 6 10.5 0 10 1 -15 44 85 19.5 0 10 0 -15 45 1 21.3 0 10 1 -15 46 28 13.4 0 10 1 -15 47 15 39.6 0 10 1 -15 48 170 15 0 10 0 -15 49 85 24.4 0 10 0 -15 50 15 12.5 0 10 0 -15 51 85 30.9 0 10 0 -15 52 28 12 0 10 0 -15 53 85 18.4 0 10 0 -15 54 28 31.4 0 10 1 -15 55 170 35.6 0 10 0 -15 56 1 41.9 0 10 1 -15 57 15 10.7 0 10 0 -15 58 6 44 0 10 1 -15 59 85 26 0 10 0 -15 60 6 26.3 0 10 1 -15 61 170 29.7 0 10 0 -15 62 6 17.4 0 10 1 -15 63 85 36.1 0 10 0 -15 64 1 11 0 10 1 -15 65 1 11.2 0 10 1 -15 66 15 20.7 0 10 1 -15 67 6 10.5 0 10 0 -15 68 28 16.9 0 10 0 -15 69 170 43.3 0 10 0 -15 70 1 21.3 0 10 0 -15 71 1 31.6 0 10 1 -15 72 170 24.4 0 10 0 -15 73 170 35.6 0 10 0 -15 74 1 10.7 0 10 1 -15 75 170 29.7 0 10 0 -15 76 85 36.1 0 10 0 -15 77 6 11 0 10 1 -15 78 1 12.5 0 10 1 -15 79 15 49 0 10 1 -15 80 85 45 0 10 1 -15 81 28 41.1 0 10 1 -15 82 85 10.5 0 10 0 -15 83 1 12.5 0 10 1 -15 84 85 30.9 0 10 1 -15 85 28 16.9 0 10 0 -15 86 85 24.4 0 10 1 -15 87 1 41.9 0 10 1 -15 88 6 16.7 0 10 1 -15 89 170 43.3 0 10 1 -15 90 1 11 0 10 1 -15 91 170 49.8 0 10 1 -15 92 15 12.5 0 10 0 -15 93 1 11.8 0 10 0 -15 94 6 17.4 0 10 0 -15 95 28 21.1 0 10 1 -15 96 28 21.7 0 10 1 -15 97 85 27.3 0 10 0 -15 98 28 31.4 0 10 1 -15 99 6 13.2 0 10 0 -15 100 28 15.5 0 10 1 -15 101 1 18 0 10 0 -15 102 85 19.5 0 10 0 -15 103 6 16.7 0 10 1 -15 104 15 14.2 0 10 0 -15 105 6 13.2 0 10 1 -15 106 6 35.1 0 10 1 -15 107 15 10.5 0 10 1 -15 108 85 26 0 10 0 -16 1 85 10.5 0 10 0 -16 2 85 36.1 0 10 0 -16 3 28 41.1 0 10 1 -16 4 15 12.5 0 10 0 -16 5 6 17.4 0 10 1 -16 6 6 44 0 10 1 -16 7 6 14 0 10 0 -16 8 28 12 0 10 0 -16 9 28 41.1 0 10 1 -16 10 15 18.6 0 10 0 -16 11 85 27.3 0 10 0 -16 12 1 31.6 0 10 1 -16 13 85 45 0 10 1 -16 14 170 38.3 0 10 0 -16 15 28 16.9 0 10 0 -16 16 170 29.7 0 10 0 -16 17 170 15 0 10 0 -16 18 6 14 0 10 1 -16 19 85 18.4 0 10 0 -16 20 170 43.3 0 10 0 -16 21 170 33.9 0 10 0 -16 22 85 26 0 10 0 -16 23 15 10.7 0 10 0 -16 24 15 10.5 0 10 1 -16 25 6 13.2 0 10 0 -16 26 1 10.7 0 10 1 -16 27 28 15.5 0 10 0 -16 28 28 13.4 0 10 0 -16 29 170 35.6 0 10 0 -16 30 170 41.3 0 10 1 -16 31 1 31.6 0 10 1 -16 32 28 15.5 0 10 0 -16 33 85 10.5 0 10 0 -16 34 28 21.7 0 10 0 -16 35 1 21.3 0 10 1 -16 36 170 43.3 0 10 0 -16 37 15 49 0 10 1 -16 38 85 30.9 0 10 0 -16 39 1 11 0 10 0 -16 40 170 41.3 0 10 1 -16 41 6 13.2 0 10 0 -16 42 85 24.4 0 10 0 -16 43 170 15 0 10 1 -16 44 1 11.8 0 10 0 -16 45 85 26 0 10 0 -16 46 15 39.6 0 10 1 -16 47 15 39.6 0 10 1 -16 48 6 26.3 0 10 1 -16 49 1 10.7 0 10 0 -16 50 85 24.4 0 10 1 -16 51 15 20.7 0 10 0 -16 52 1 11 0 10 1 -16 53 1 12.5 0 10 1 -16 54 1 11.2 0 10 1 -16 55 28 21.1 0 10 0 -16 56 170 49.8 0 10 1 -16 57 1 21.3 0 10 1 -16 58 28 13.4 0 10 0 -16 59 15 10.5 0 10 0 -16 60 6 17.4 0 10 1 -16 61 28 31.4 0 10 1 -16 62 85 19.5 0 10 0 -16 63 85 36.1 0 10 1 -16 64 15 14.2 0 10 0 -16 65 6 35.1 0 10 1 -16 66 6 10.5 0 10 1 -16 67 15 18.6 0 10 0 -16 68 1 41.9 0 10 1 -16 69 1 18 0 10 1 -16 70 28 21.1 0 10 1 -16 71 170 24.4 0 10 1 -16 72 15 10.7 0 10 1 -16 73 6 16.7 0 10 0 -16 74 170 49.8 0 10 0 -16 75 15 30.1 0 10 1 -16 76 15 14.2 0 10 0 -16 77 15 20.7 0 10 1 -16 78 28 21.7 0 10 1 -16 79 85 27.3 0 10 0 -16 80 170 35.6 0 10 0 -16 81 28 16.9 0 10 0 -16 82 85 18.4 0 10 0 -16 83 28 13 0 10 0 -16 84 6 11 0 10 0 -16 85 6 35.1 0 10 1 -16 86 1 41.9 0 10 1 -16 87 1 12.5 0 10 1 -16 88 6 11 0 10 1 -16 89 6 26.3 0 10 1 -16 90 170 24.4 0 10 0 -16 91 15 30.1 0 10 1 -16 92 6 44 0 10 1 -16 93 15 12.5 0 10 0 -16 94 85 45 0 10 1 -16 95 15 49 0 10 1 -16 96 170 29.7 0 10 0 -16 97 1 11.2 0 10 1 -16 98 6 10.5 0 10 1 -16 99 170 33.9 0 10 0 -16 100 28 13 0 10 0 -16 101 85 19.5 0 10 0 -16 102 170 38.3 0 10 0 -16 103 28 31.4 0 10 1 -16 104 1 18 0 10 1 -16 105 28 12 0 10 0 -16 106 6 16.7 0 10 1 -16 107 1 11.8 0 10 0 -16 108 85 30.9 0 10 0 -17 1 28 21.7 0 10 0 -17 2 170 43.3 0 10 0 -17 3 28 21.7 0 10 0 -17 4 170 15 0 10 0 -17 5 170 43.3 0 10 0 -17 6 15 18.6 0 10 1 -17 7 85 27.3 0 10 0 -17 8 6 11 0 10 0 -17 9 28 16.9 0 10 0 -17 10 15 30.1 0 10 1 -17 11 15 20.7 0 10 1 -17 12 6 26.3 0 10 1 -17 13 28 12 0 10 0 -17 14 6 10.5 0 10 1 -17 15 1 21.3 0 10 1 -17 16 85 36.1 0 10 1 -17 17 15 18.6 0 10 1 -17 18 28 12 0 10 0 -17 19 170 15 0 10 0 -17 20 28 41.1 0 10 1 -17 21 28 31.4 0 10 1 -17 22 85 45 0 10 1 -17 23 15 12.5 0 10 0 -17 24 6 16.7 0 10 1 -17 25 15 20.7 0 10 0 -17 26 1 11.2 0 10 1 -17 27 15 39.6 0 10 1 -17 28 6 35.1 0 10 1 -17 29 1 10.7 0 10 1 -17 30 15 30.1 0 10 1 -17 31 28 13.4 0 10 0 -17 32 6 16.7 0 10 1 -17 33 170 41.3 0 10 1 -17 34 6 10.5 0 10 0 -17 35 85 19.5 0 10 0 -17 36 6 13.2 0 10 0 -17 37 6 26.3 0 10 1 -17 38 170 49.8 0 10 0 -17 39 1 31.6 0 10 1 -17 40 15 10.7 0 10 1 -17 41 170 24.4 0 10 0 -17 42 6 11 0 10 0 -17 43 15 10.5 0 10 1 -17 44 170 29.7 0 10 0 -17 45 28 15.5 0 10 0 -17 46 85 18.4 0 10 0 -17 47 85 18.4 0 10 0 -17 48 6 14 0 10 1 -17 49 170 38.3 0 10 0 -17 50 15 39.6 0 10 1 -17 51 1 18 0 10 1 -17 52 1 18 0 10 1 -17 53 1 11.8 0 10 1 -17 54 85 45 0 10 1 -17 55 170 33.9 0 10 0 -17 56 170 35.6 0 10 0 -17 57 1 12.5 0 10 0 -17 58 6 44 0 10 1 -17 59 1 11 0 10 0 -17 60 28 15.5 0 10 0 -17 61 15 49 0 10 1 -17 62 170 33.9 0 10 0 -17 63 85 26 0 10 0 -17 64 1 10.7 0 10 1 -17 65 28 16.9 0 10 0 -17 66 6 14 0 10 1 -17 67 15 10.5 0 10 1 -17 68 15 49 0 10 1 -17 69 85 36.1 0 10 0 -17 70 1 31.6 0 10 1 -17 71 1 11 0 10 1 -17 72 28 21.1 0 10 0 -17 73 85 30.9 0 10 0 -17 74 6 44 0 10 1 -17 75 15 12.5 0 10 1 -17 76 170 49.8 0 10 0 -17 77 28 13 0 10 1 -17 78 85 10.5 0 10 0 -17 79 28 13.4 0 10 0 -17 80 1 12.5 0 10 1 -17 81 28 41.1 0 10 1 -17 82 170 38.3 0 10 0 -17 83 170 35.6 0 10 0 -17 84 28 21.1 0 10 1 -17 85 15 10.7 0 10 1 -17 86 1 41.9 0 10 1 -17 87 28 31.4 0 10 1 -17 88 85 10.5 0 10 0 -17 89 1 11.8 0 10 1 -17 90 15 14.2 0 10 1 -17 91 85 24.4 0 10 0 -17 92 6 13.2 0 10 1 -17 93 85 19.5 0 10 0 -17 94 6 17.4 0 10 1 -17 95 85 30.9 0 10 1 -17 96 170 24.4 0 10 0 -17 97 28 13 0 10 0 -17 98 6 17.4 0 10 1 -17 99 170 41.3 0 10 0 -17 100 85 26 0 10 1 -17 101 85 24.4 0 10 0 -17 102 1 11.2 0 10 1 -17 103 85 27.3 0 10 1 -17 104 6 35.1 0 10 1 -17 105 170 29.7 0 10 0 -17 106 1 41.9 0 10 1 -17 107 1 21.3 0 10 1 -17 108 15 14.2 0 10 1 -18 1 170 43.3 0 10 1 -18 2 85 30.9 0 10 1 -18 3 6 14 0 10 1 -18 4 28 31.4 0 10 1 -18 5 170 38.3 0 10 1 -18 6 15 14.2 0 10 1 -18 7 6 44 0 10 1 -18 8 6 11 0 10 1 -18 9 85 19.5 0 10 1 -18 10 15 20.7 0 10 1 -18 11 6 13.2 0 10 1 -18 12 170 15 0 10 0 -18 13 85 26 0 10 1 -18 14 1 18 0 10 1 -18 15 15 14.2 0 10 1 -18 16 85 36.1 0 10 1 -18 17 1 18 0 10 0 -18 18 15 49 0 10 1 -18 19 170 49.8 0 10 1 -18 20 6 35.1 0 10 1 -18 21 85 10.5 0 10 0 -18 22 28 13.4 0 10 0 -18 23 15 20.7 0 10 1 -18 24 85 45 0 10 1 -18 25 15 39.6 0 10 1 -18 26 15 12.5 0 10 1 -18 27 1 11.8 0 10 1 -18 28 1 21.3 0 10 1 -18 29 6 26.3 0 10 1 -18 30 15 12.5 0 10 1 -18 31 6 17.4 0 10 1 -18 32 28 16.9 0 10 1 -18 33 170 41.3 0 10 0 -18 34 170 24.4 0 10 0 -18 35 15 10.7 0 10 0 -18 36 1 10.7 0 10 0 -18 37 6 35.1 0 10 1 -18 38 170 38.3 0 10 1 -18 39 6 44 0 10 1 -18 40 15 30.1 0 10 1 -18 41 28 13 0 10 0 -18 42 15 49 0 10 1 -18 43 6 11 0 10 0 -18 44 15 39.6 0 10 1 -18 45 15 10.7 0 10 0 -18 46 1 11 0 10 1 -18 47 28 21.1 0 10 1 -18 48 28 13 0 10 0 -18 49 1 11.2 0 10 1 -18 50 28 12 0 10 1 -18 51 6 16.7 0 10 1 -18 52 85 27.3 0 10 1 -18 53 170 49.8 0 10 1 -18 54 28 21.7 0 10 1 -18 55 15 10.5 0 10 0 -18 56 170 29.7 0 10 0 -18 57 85 10.5 0 10 0 -18 58 1 11 0 10 1 -18 59 6 14 0 10 1 -18 60 170 33.9 0 10 0 -18 61 170 35.6 0 10 1 -18 62 15 18.6 0 10 1 -18 63 6 26.3 0 10 1 -18 64 85 18.4 0 10 0 -18 65 1 41.9 0 10 1 -18 66 28 12 0 10 1 -18 67 6 16.7 0 10 1 -18 68 170 24.4 0 10 1 -18 69 15 18.6 0 10 1 -18 70 6 17.4 0 10 1 -18 71 85 18.4 0 10 0 -18 72 1 21.3 0 10 1 -18 73 28 41.1 0 10 1 -18 74 85 27.3 0 10 0 -18 75 85 36.1 0 10 1 -18 76 170 35.6 0 10 0 -18 77 28 21.1 0 10 1 -18 78 170 43.3 0 10 1 -18 79 28 21.7 0 10 1 -18 80 85 24.4 0 10 1 -18 81 28 31.4 0 10 1 -18 82 85 45 0 10 1 -18 83 15 10.5 0 10 0 -18 84 6 13.2 0 10 1 -18 85 1 31.6 0 10 1 -18 86 1 31.6 0 10 1 -18 87 85 30.9 0 10 1 -18 88 85 19.5 0 10 1 -18 89 85 24.4 0 10 1 -18 90 28 13.4 0 10 0 -18 91 170 29.7 0 10 1 -18 92 170 33.9 0 10 1 -18 93 28 41.1 0 10 1 -18 94 170 15 0 10 0 -18 95 85 26 0 10 0 -18 96 170 41.3 0 10 1 -18 97 1 12.5 0 10 1 -18 98 1 12.5 0 10 1 -18 99 28 15.5 0 10 1 -18 100 1 11.2 0 10 0 -18 101 6 10.5 0 10 0 -18 102 1 10.7 0 10 1 -18 103 1 11.8 0 10 1 -18 104 28 16.9 0 10 1 -18 105 6 10.5 0 10 1 -18 106 1 41.9 0 10 1 -18 107 28 15.5 0 10 0 -18 108 15 30.1 0 10 1 -19 1 28 41.1 0 10 1 -19 2 170 41.3 0 10 0 -19 3 6 10.5 0 10 0 -19 4 6 44 0 10 1 -19 5 15 12.5 0 10 1 -19 6 28 16.9 0 10 1 -19 7 6 14 0 10 0 -19 8 6 16.7 0 10 1 -19 9 1 31.6 0 10 1 -19 10 1 10.7 0 10 1 -19 11 85 19.5 0 10 1 -19 12 28 16.9 0 10 0 -19 13 170 24.4 0 10 0 -19 14 15 14.2 0 10 1 -19 15 85 26 0 10 0 -19 16 85 36.1 0 10 1 -19 17 15 30.1 0 10 1 -19 18 6 10.5 0 10 0 -19 19 170 24.4 0 10 0 -19 20 15 12.5 0 10 0 -19 21 28 21.7 0 10 0 -19 22 170 15 0 10 0 -19 23 85 10.5 0 10 0 -19 24 1 11.8 0 10 0 -19 25 1 12.5 0 10 1 -19 26 28 15.5 0 10 0 -19 27 6 26.3 0 10 1 -19 28 6 35.1 0 10 1 -19 29 15 18.6 0 10 0 -19 30 170 29.7 0 10 0 -19 31 85 19.5 0 10 0 -19 32 170 43.3 0 10 1 -19 33 28 31.4 0 10 1 -19 34 28 13.4 0 10 1 -19 35 85 26 0 10 0 -19 36 85 10.5 0 10 0 -19 37 85 45 0 10 1 -19 38 28 13 0 10 0 -19 39 170 38.3 0 10 0 -19 40 1 11 0 10 0 -19 41 1 10.7 0 10 0 -19 42 170 29.7 0 10 1 -19 43 6 26.3 0 10 1 -19 44 1 11.2 0 10 0 -19 45 28 41.1 0 10 1 -19 46 1 12.5 0 10 0 -19 47 85 30.9 0 10 1 -19 48 170 33.9 0 10 0 -19 49 28 13 0 10 0 -19 50 170 33.9 0 10 1 -19 51 170 49.8 0 10 1 -19 52 170 35.6 0 10 0 -19 53 15 49 0 10 1 -19 54 1 11.2 0 10 1 -19 55 6 11 0 10 0 -19 56 6 17.4 0 10 1 -19 57 15 49 0 10 1 -19 58 1 11 0 10 0 -19 59 85 18.4 0 10 0 -19 60 15 20.7 0 10 1 -19 61 170 38.3 0 10 0 -19 62 15 39.6 0 10 1 -19 63 6 35.1 0 10 1 -19 64 28 21.1 0 10 1 -19 65 15 39.6 0 10 1 -19 66 15 10.7 0 10 0 -19 67 1 31.6 0 10 1 -19 68 1 41.9 0 10 1 -19 69 170 49.8 0 10 0 -19 70 170 35.6 0 10 0 -19 71 85 36.1 0 10 0 -19 72 28 13.4 0 10 1 -19 73 1 18 0 10 1 -19 74 85 18.4 0 10 1 -19 75 85 24.4 0 10 0 -19 76 170 43.3 0 10 1 -19 77 15 18.6 0 10 1 -19 78 6 13.2 0 10 0 -19 79 6 44 0 10 1 -19 80 15 10.5 0 10 0 -19 81 6 14 0 10 1 -19 82 85 27.3 0 10 1 -19 83 15 30.1 0 10 1 -19 84 6 16.7 0 10 1 -19 85 28 31.4 0 10 1 -19 86 28 21.1 0 10 1 -19 87 15 10.7 0 10 0 -19 88 6 13.2 0 10 1 -19 89 170 41.3 0 10 0 -19 90 28 21.7 0 10 1 -19 91 85 24.4 0 10 1 -19 92 28 12 0 10 0 -19 93 1 11.8 0 10 0 -19 94 28 12 0 10 0 -19 95 1 18 0 10 1 -19 96 28 15.5 0 10 0 -19 97 1 21.3 0 10 1 -19 98 1 21.3 0 10 1 -19 99 1 41.9 0 10 1 -19 100 85 45 0 10 1 -19 101 15 10.5 0 10 1 -19 102 6 11 0 10 1 -19 103 15 14.2 0 10 0 -19 104 15 20.7 0 10 1 -19 105 85 30.9 0 10 0 -19 106 85 27.3 0 10 1 -19 107 6 17.4 0 10 1 -19 108 170 15 0 10 0 -20 1 6 14 0 10 1 -20 2 1 12.5 0 10 0 -20 3 6 16.7 0 10 1 -20 4 15 14.2 0 10 0 -20 5 170 24.4 0 10 0 -20 6 85 18.4 0 10 0 -20 7 28 41.1 0 10 1 -20 8 170 43.3 0 10 1 -20 9 1 21.3 0 10 1 -20 10 85 26 0 10 0 -20 11 1 11 0 10 0 -20 12 6 10.5 0 10 0 -20 13 15 20.7 0 10 1 -20 14 28 13.4 0 10 1 -20 15 170 35.6 0 10 1 -20 16 1 11 0 10 1 -20 17 6 44 0 10 1 -20 18 6 26.3 0 10 1 -20 19 15 39.6 0 10 1 -20 20 28 41.1 0 10 1 -20 21 85 10.5 0 10 0 -20 22 6 16.7 0 10 0 -20 23 1 11.8 0 10 1 -20 24 28 12 0 10 1 -20 25 1 18 0 10 1 -20 26 170 29.7 0 10 0 -20 27 28 21.7 0 10 1 -20 28 15 10.7 0 10 1 -20 29 170 41.3 0 10 1 -20 30 85 19.5 0 10 0 -20 31 85 45 0 10 1 -20 32 170 33.9 0 10 1 -20 33 28 13.4 0 10 0 -20 34 85 27.3 0 10 1 -20 35 28 13 0 10 0 -20 36 15 18.6 0 10 0 -20 37 15 12.5 0 10 1 -20 38 170 24.4 0 10 0 -20 39 6 44 0 10 1 -20 40 85 30.9 0 10 1 -20 41 6 35.1 0 10 1 -20 42 6 26.3 0 10 1 -20 43 6 13.2 0 10 1 -20 44 15 10.7 0 10 1 -20 45 28 21.7 0 10 1 -20 46 170 33.9 0 10 1 -20 47 15 20.7 0 10 1 -20 48 1 10.7 0 10 1 -20 49 28 16.9 0 10 1 -20 50 1 11.2 0 10 0 -20 51 1 12.5 0 10 1 -20 52 15 18.6 0 10 0 -20 53 28 21.1 0 10 1 -20 54 15 14.2 0 10 1 -20 55 85 18.4 0 10 0 -20 56 170 29.7 0 10 0 -20 57 85 45 0 10 1 -20 58 28 31.4 0 10 1 -20 59 15 30.1 0 10 1 -20 60 1 11.8 0 10 1 -20 61 28 31.4 0 10 1 -20 62 85 19.5 0 10 0 -20 63 6 14 0 10 1 -20 64 1 31.6 0 10 1 -20 65 1 10.7 0 10 1 -20 66 15 49 0 10 1 -20 67 1 21.3 0 10 1 -20 68 6 35.1 0 10 1 -20 69 15 10.5 0 10 1 -20 70 85 10.5 0 10 0 -20 71 6 13.2 0 10 1 -20 72 170 49.8 0 10 1 -20 73 170 35.6 0 10 1 -20 74 85 24.4 0 10 1 -20 75 6 11 0 10 1 -20 76 170 49.8 0 10 1 -20 77 15 30.1 0 10 1 -20 78 85 36.1 0 10 1 -20 79 85 26 0 10 1 -20 80 6 17.4 0 10 1 -20 81 170 15 0 10 0 -20 82 15 12.5 0 10 1 -20 83 85 30.9 0 10 0 -20 84 6 10.5 0 10 1 -20 85 1 41.9 0 10 1 -20 86 15 39.6 0 10 1 -20 87 170 43.3 0 10 1 -20 88 28 13 0 10 0 -20 89 28 15.5 0 10 0 -20 90 85 27.3 0 10 1 -20 91 28 15.5 0 10 1 -20 92 170 38.3 0 10 1 -20 93 15 10.5 0 10 1 -20 94 170 15 0 10 1 -20 95 1 41.9 0 10 1 -20 96 1 31.6 0 10 1 -20 97 6 11 0 10 0 -20 98 1 11.2 0 10 0 -20 99 170 41.3 0 10 1 -20 100 1 18 0 10 1 -20 101 28 12 0 10 1 -20 102 28 21.1 0 10 1 -20 103 28 16.9 0 10 1 -20 104 85 36.1 0 10 1 -20 105 85 24.4 0 10 1 -20 106 6 17.4 0 10 1 -20 107 170 38.3 0 10 1 -20 108 15 49 0 10 1 \ No newline at end of file diff --git a/R/inst/extdata/dd_single_exampleData.txt b/R/inst/extdata/dd_single_exampleData.txt deleted file mode 100644 index a729477e..00000000 --- a/R/inst/extdata/dd_single_exampleData.txt +++ /dev/null @@ -1,109 +0,0 @@ -subjID trial delay_later amount_later delay_sooner amount_sooner choice -1 1 6 10.5 0 10 1 -1 2 170 38.3 0 10 1 -1 3 28 13.4 0 10 1 -1 4 28 31.4 0 10 1 -1 5 85 30.9 0 10 1 -1 6 28 21.1 0 10 1 -1 7 28 13 0 10 1 -1 8 1 21.3 0 10 1 -1 9 28 21.1 0 10 1 -1 10 15 30.1 0 10 1 -1 11 1 10.7 0 10 1 -1 12 85 36.1 0 10 1 -1 13 15 10.5 0 10 1 -1 14 6 16.7 0 10 1 -1 15 1 11 0 10 1 -1 16 15 14.2 0 10 1 -1 17 15 12.5 0 10 1 -1 18 15 20.7 0 10 1 -1 19 6 11 0 10 0 -1 20 28 16.9 0 10 1 -1 21 15 30.1 0 10 1 -1 22 85 24.4 0 10 1 -1 23 170 41.3 0 10 1 -1 24 15 14.2 0 10 1 -1 25 6 10.5 0 10 1 -1 26 170 24.4 0 10 1 -1 27 15 49 0 10 1 -1 28 170 29.7 0 10 1 -1 29 1 11.8 0 10 0 -1 30 6 13.2 0 10 0 -1 31 85 30.9 0 10 1 -1 32 6 44 0 10 1 -1 33 6 35.1 0 10 1 -1 34 28 15.5 0 10 1 -1 35 170 43.3 0 10 1 -1 36 170 33.9 0 10 1 -1 37 1 11 0 10 1 -1 38 1 21.3 0 10 1 -1 39 85 45 0 10 1 -1 40 15 39.6 0 10 1 -1 41 85 10.5 0 10 0 -1 42 170 15 0 10 1 -1 43 170 49.8 0 10 1 -1 44 170 24.4 0 10 1 -1 45 28 13.4 0 10 1 -1 46 1 31.6 0 10 1 -1 47 170 35.6 0 10 1 -1 48 1 41.9 0 10 1 -1 49 6 17.4 0 10 1 -1 50 85 18.4 0 10 1 -1 51 85 27.3 0 10 1 -1 52 85 26 0 10 1 -1 53 170 38.3 0 10 1 -1 54 28 21.7 0 10 1 -1 55 1 10.7 0 10 1 -1 56 170 49.8 0 10 1 -1 57 1 11.2 0 10 1 -1 58 15 20.7 0 10 1 -1 59 6 44 0 10 1 -1 60 28 41.1 0 10 1 -1 61 28 16.9 0 10 1 -1 62 6 14 0 10 1 -1 63 1 31.6 0 10 1 -1 64 15 18.6 0 10 1 -1 65 28 12 0 10 1 -1 66 6 13.2 0 10 1 -1 67 170 43.3 0 10 1 -1 68 28 31.4 0 10 1 -1 69 85 19.5 0 10 1 -1 70 170 35.6 0 10 1 -1 71 85 18.4 0 10 1 -1 72 1 12.5 0 10 1 -1 73 170 41.3 0 10 1 -1 74 170 15 0 10 0 -1 75 28 12 0 10 0 -1 76 85 36.1 0 10 1 -1 77 1 18 0 10 1 -1 78 85 10.5 0 10 0 -1 79 170 33.9 0 10 1 -1 80 6 26.3 0 10 1 -1 81 85 45 0 10 1 -1 82 28 21.7 0 10 1 -1 83 28 13 0 10 0 -1 84 85 27.3 0 10 1 -1 85 15 18.6 0 10 1 -1 86 15 12.5 0 10 1 -1 87 6 26.3 0 10 1 -1 88 6 11 0 10 1 -1 89 15 10.7 0 10 0 -1 90 6 16.7 0 10 1 -1 91 28 41.1 0 10 1 -1 92 85 26 0 10 1 -1 93 85 24.4 0 10 1 -1 94 1 12.5 0 10 1 -1 95 6 17.4 0 10 1 -1 96 6 35.1 0 10 1 -1 97 6 14 0 10 1 -1 98 15 10.5 0 10 0 -1 99 1 11.8 0 10 1 -1 100 15 10.7 0 10 1 -1 101 15 39.6 0 10 1 -1 102 85 19.5 0 10 1 -1 103 1 11.2 0 10 1 -1 104 170 29.7 0 10 1 -1 105 15 49 0 10 1 -1 106 1 41.9 0 10 1 -1 107 1 18 0 10 1 -1 108 28 15.5 0 10 1 \ No newline at end of file diff --git a/R/inst/extdata/gng_exampleData.txt b/R/inst/extdata/gng_exampleData.txt deleted file mode 100644 index 40e0982a..00000000 --- a/R/inst/extdata/gng_exampleData.txt +++ /dev/null @@ -1,2401 +0,0 @@ -trialNum cue keyPressed success congruentOutcome outcome subjID -1 1 1 1 2 0 1 -2 2 0 1 1 1 1 -3 4 0 1 1 0 1 -4 4 1 0 1 -1 1 -5 4 0 1 1 0 1 -6 1 1 1 1 1 1 -7 3 0 0 1 -1 1 -8 1 1 1 1 1 1 -9 3 1 1 1 0 1 -10 3 0 0 1 -1 1 -11 4 0 1 1 0 1 -12 4 0 1 1 0 1 -13 4 0 1 1 0 1 -14 1 1 1 1 1 1 -15 1 1 1 1 1 1 -16 2 0 1 1 1 1 -17 2 0 1 1 1 1 -18 4 0 1 1 0 1 -19 2 0 1 1 1 1 -20 3 1 1 1 0 1 -21 3 1 1 1 0 1 -22 4 1 0 2 0 1 -23 2 0 1 1 1 1 -24 3 0 0 1 -1 1 -25 1 1 1 1 1 1 -26 3 0 0 1 -1 1 -27 4 0 1 1 0 1 -28 1 1 1 1 1 1 -29 4 1 0 2 0 1 -30 2 0 1 2 0 1 -31 2 0 1 1 1 1 -32 4 1 0 2 0 1 -33 2 0 1 1 1 1 -34 2 0 1 1 1 1 -35 3 1 1 1 0 1 -36 2 0 1 1 1 1 -37 1 1 1 1 1 1 -38 4 0 1 1 0 1 -39 4 0 1 1 0 1 -40 4 1 0 1 -1 1 -41 3 1 1 1 0 1 -42 2 0 1 1 1 1 -43 2 0 1 1 1 1 -44 2 0 1 1 1 1 -45 2 0 1 1 1 1 -46 3 1 1 1 0 1 -47 2 0 1 1 1 1 -48 2 0 1 1 1 1 -49 1 1 1 1 1 1 -50 3 1 1 2 -1 1 -51 2 1 0 1 0 1 -52 1 1 1 1 1 1 -53 3 1 1 1 0 1 -54 4 0 1 1 0 1 -55 3 1 1 2 -1 1 -56 1 1 1 1 1 1 -57 3 0 0 1 -1 1 -58 1 1 1 1 1 1 -59 3 1 1 2 -1 1 -60 1 1 1 1 1 1 -61 3 1 1 1 0 1 -62 4 1 0 1 -1 1 -63 1 1 1 1 1 1 -64 1 1 1 1 1 1 -65 4 0 1 1 0 1 -66 1 1 1 1 1 1 -67 3 1 1 1 0 1 -68 2 0 1 1 1 1 -69 3 1 1 2 -1 1 -70 1 1 1 1 1 1 -71 2 0 1 1 1 1 -72 2 0 1 1 1 1 -73 1 1 1 1 1 1 -74 4 0 1 2 -1 1 -75 2 0 1 2 0 1 -76 1 1 1 2 0 1 -77 4 1 0 1 -1 1 -78 1 1 1 1 1 1 -79 3 1 1 2 -1 1 -80 3 1 1 1 0 1 -81 1 1 1 1 1 1 -82 1 1 1 1 1 1 -83 3 0 0 1 -1 1 -84 2 0 1 1 1 1 -85 4 0 1 1 0 1 -86 3 1 1 1 0 1 -87 4 0 1 1 0 1 -88 2 0 1 1 1 1 -89 1 1 1 1 1 1 -90 4 0 1 1 0 1 -91 1 1 1 2 0 1 -92 2 0 1 2 0 1 -93 1 1 1 1 1 1 -94 4 0 1 1 0 1 -95 2 0 1 1 1 1 -96 4 1 0 1 -1 1 -97 3 1 1 1 0 1 -98 3 1 1 1 0 1 -99 3 1 1 1 0 1 -100 1 1 1 1 1 1 -101 2 0 1 1 1 1 -102 4 0 1 2 -1 1 -103 4 0 1 1 0 1 -104 3 0 0 1 -1 1 -105 1 1 1 1 1 1 -106 4 0 1 1 0 1 -107 2 0 1 1 1 1 -108 2 0 1 1 1 1 -109 3 1 1 1 0 1 -110 4 0 1 1 0 1 -111 3 1 1 1 0 1 -112 3 1 1 1 0 1 -113 1 1 1 1 1 1 -114 3 1 1 1 0 1 -115 4 0 1 2 -1 1 -116 1 0 0 1 0 1 -117 1 1 1 1 1 1 -118 1 1 1 1 1 1 -119 3 0 0 1 -1 1 -120 2 0 1 1 1 1 -121 2 0 1 2 0 1 -122 4 0 1 1 0 1 -123 1 1 1 2 0 1 -124 4 0 1 1 0 1 -125 3 1 1 2 -1 1 -126 2 0 1 1 1 1 -127 4 0 1 1 0 1 -128 4 0 1 1 0 1 -129 4 0 1 1 0 1 -130 4 1 0 1 -1 1 -131 2 0 1 1 1 1 -132 3 1 1 2 -1 1 -133 1 0 0 1 0 1 -134 1 1 1 1 1 1 -135 3 1 1 1 0 1 -136 3 1 1 1 0 1 -137 4 0 1 2 -1 1 -138 4 0 1 1 0 1 -139 3 1 1 1 0 1 -140 3 1 1 2 -1 1 -141 3 0 0 1 -1 1 -142 2 0 1 2 0 1 -143 2 0 1 2 0 1 -144 2 0 1 2 0 1 -145 4 0 1 1 0 1 -146 1 1 1 2 0 1 -147 3 1 1 1 0 1 -148 3 1 1 1 0 1 -149 2 0 1 2 0 1 -150 1 1 1 1 1 1 -151 1 1 1 2 0 1 -152 1 1 1 1 1 1 -153 3 1 1 1 0 1 -154 4 0 1 1 0 1 -155 1 1 1 1 1 1 -156 3 1 1 1 0 1 -157 4 1 0 1 -1 1 -158 4 0 1 1 0 1 -159 3 1 1 1 0 1 -160 2 0 1 1 1 1 -161 2 0 1 1 1 1 -162 2 0 1 2 0 1 -163 3 1 1 1 0 1 -164 4 0 1 2 -1 1 -165 3 1 1 1 0 1 -166 4 0 1 2 -1 1 -167 2 0 1 1 1 1 -168 2 0 1 2 0 1 -169 1 1 1 1 1 1 -170 4 0 1 1 0 1 -171 3 1 1 2 -1 1 -172 3 1 1 2 -1 1 -173 2 0 1 1 1 1 -174 3 1 1 1 0 1 -175 4 1 0 1 -1 1 -176 2 0 1 1 1 1 -177 4 0 1 1 0 1 -178 2 0 1 2 0 1 -179 4 0 1 1 0 1 -180 1 1 1 2 0 1 -181 1 1 1 1 1 1 -182 3 1 1 1 0 1 -183 2 0 1 1 1 1 -184 1 1 1 1 1 1 -185 4 0 1 1 0 1 -186 3 1 1 1 0 1 -187 1 1 1 1 1 1 -188 3 1 1 2 -1 1 -189 4 0 1 1 0 1 -190 4 0 1 1 0 1 -191 2 0 1 1 1 1 -192 2 0 1 1 1 1 -193 1 1 1 1 1 1 -194 2 0 1 1 1 1 -195 2 0 1 2 0 1 -196 2 0 1 1 1 1 -197 1 1 1 1 1 1 -198 4 0 1 1 0 1 -199 3 1 1 1 0 1 -200 3 1 1 1 0 1 -201 3 1 1 1 0 1 -202 1 1 1 1 1 1 -203 3 1 1 1 0 1 -204 2 0 1 1 1 1 -205 4 0 1 2 -1 1 -206 2 0 1 1 1 1 -207 4 0 1 1 0 1 -208 4 0 1 1 0 1 -209 1 1 1 1 1 1 -210 3 1 1 1 0 1 -211 3 1 1 1 0 1 -212 1 1 1 1 1 1 -213 1 1 1 1 1 1 -214 2 0 1 1 1 1 -215 1 1 1 1 1 1 -216 3 1 1 1 0 1 -217 4 0 1 2 -1 1 -218 2 0 1 1 1 1 -219 2 0 1 1 1 1 -220 1 1 1 2 0 1 -221 2 0 1 1 1 1 -222 1 1 1 1 1 1 -223 1 1 1 1 1 1 -224 4 0 1 2 -1 1 -225 1 1 1 1 1 1 -226 2 0 1 1 1 1 -227 4 1 0 1 -1 1 -228 3 1 1 1 0 1 -229 4 1 0 1 -1 1 -230 1 1 1 1 1 1 -231 2 0 1 1 1 1 -232 1 1 1 1 1 1 -233 3 1 1 1 0 1 -234 2 0 1 1 1 1 -235 1 1 1 1 1 1 -236 4 1 0 1 -1 1 -237 2 0 1 1 1 1 -238 1 1 1 1 1 1 -239 4 0 1 1 0 1 -240 1 1 1 1 1 1 -1 1 1 1 1 1 2 -2 1 1 1 1 1 2 -3 3 1 1 1 0 2 -4 3 0 0 1 -1 2 -5 1 1 1 1 1 2 -6 4 1 0 1 -1 2 -7 4 0 1 1 0 2 -8 3 1 1 1 0 2 -9 3 1 1 1 0 2 -10 3 0 0 1 -1 2 -11 1 1 1 1 1 2 -12 4 0 1 1 0 2 -13 1 1 1 1 1 2 -14 4 0 1 1 0 2 -15 4 0 1 1 0 2 -16 3 1 1 1 0 2 -17 2 0 1 1 1 2 -18 4 0 1 2 -1 2 -19 1 1 1 1 1 2 -20 2 0 1 1 1 2 -21 4 0 1 1 0 2 -22 4 1 0 2 0 2 -23 1 1 1 2 0 2 -24 4 0 1 1 0 2 -25 2 0 1 1 1 2 -26 2 0 1 1 1 2 -27 2 0 1 2 0 2 -28 1 1 1 1 1 2 -29 2 0 1 1 1 2 -30 1 1 1 1 1 2 -31 4 0 1 2 -1 2 -32 2 0 1 2 0 2 -33 3 1 1 2 -1 2 -34 3 1 1 2 -1 2 -35 2 0 1 1 1 2 -36 3 0 0 1 -1 2 -37 4 1 0 1 -1 2 -38 4 1 0 1 -1 2 -39 4 0 1 1 0 2 -40 1 1 1 1 1 2 -41 4 0 1 1 0 2 -42 3 1 1 1 0 2 -43 3 0 0 1 -1 2 -44 1 1 1 2 0 2 -45 3 1 1 1 0 2 -46 4 0 1 1 0 2 -47 4 0 1 1 0 2 -48 2 0 1 1 1 2 -49 2 0 1 1 1 2 -50 2 0 1 1 1 2 -51 1 1 1 1 1 2 -52 3 1 1 1 0 2 -53 3 1 1 1 0 2 -54 4 1 0 1 -1 2 -55 1 1 1 1 1 2 -56 1 1 1 1 1 2 -57 2 0 1 1 1 2 -58 1 1 1 1 1 2 -59 1 1 1 2 0 2 -60 3 1 1 1 0 2 -61 2 0 1 1 1 2 -62 1 1 1 1 1 2 -63 3 1 1 2 -1 2 -64 3 1 1 1 0 2 -65 1 1 1 2 0 2 -66 2 0 1 2 0 2 -67 2 0 1 2 0 2 -68 4 0 1 1 0 2 -69 3 1 1 1 0 2 -70 2 0 1 1 1 2 -71 4 0 1 1 0 2 -72 4 0 1 1 0 2 -73 4 0 1 2 -1 2 -74 1 1 1 1 1 2 -75 4 1 0 1 -1 2 -76 4 0 1 1 0 2 -77 3 1 1 1 0 2 -78 4 0 1 2 -1 2 -79 3 0 0 1 -1 2 -80 4 0 1 1 0 2 -81 3 1 1 2 -1 2 -82 3 1 1 1 0 2 -83 3 1 1 2 -1 2 -84 3 1 1 1 0 2 -85 3 1 1 1 0 2 -86 3 1 1 1 0 2 -87 2 0 1 1 1 2 -88 4 0 1 1 0 2 -89 4 0 1 1 0 2 -90 2 0 1 2 0 2 -91 4 1 0 1 -1 2 -92 1 1 1 2 0 2 -93 4 0 1 2 -1 2 -94 2 0 1 2 0 2 -95 2 0 1 1 1 2 -96 3 1 1 2 -1 2 -97 2 0 1 1 1 2 -98 1 1 1 1 1 2 -99 1 1 1 1 1 2 -100 1 1 1 2 0 2 -101 1 1 1 1 1 2 -102 1 1 1 2 0 2 -103 1 1 1 1 1 2 -104 4 1 0 2 0 2 -105 4 0 1 1 0 2 -106 2 0 1 2 0 2 -107 3 1 1 1 0 2 -108 3 1 1 2 -1 2 -109 3 1 1 1 0 2 -110 1 1 1 2 0 2 -111 3 1 1 1 0 2 -112 2 0 1 1 1 2 -113 3 1 1 2 -1 2 -114 1 1 1 1 1 2 -115 3 1 1 2 -1 2 -116 3 0 0 2 0 2 -117 4 1 0 1 -1 2 -118 2 0 1 1 1 2 -119 2 0 1 2 0 2 -120 4 0 1 1 0 2 -121 1 1 1 1 1 2 -122 2 0 1 1 1 2 -123 4 0 1 2 -1 2 -124 3 0 0 1 -1 2 -125 3 1 1 1 0 2 -126 4 0 1 1 0 2 -127 2 0 1 2 0 2 -128 3 1 1 1 0 2 -129 4 1 0 1 -1 2 -130 4 0 1 1 0 2 -131 2 0 1 1 1 2 -132 2 0 1 2 0 2 -133 3 1 1 1 0 2 -134 3 0 0 1 -1 2 -135 1 1 1 1 1 2 -136 4 0 1 2 -1 2 -137 2 0 1 1 1 2 -138 4 0 1 1 0 2 -139 4 0 1 1 0 2 -140 1 1 1 1 1 2 -141 3 1 1 2 -1 2 -142 2 0 1 1 1 2 -143 2 1 0 2 1 2 -144 4 0 1 1 0 2 -145 2 0 1 1 1 2 -146 4 0 1 2 -1 2 -147 2 0 1 2 0 2 -148 2 0 1 2 0 2 -149 1 1 1 1 1 2 -150 3 1 1 2 -1 2 -151 2 0 1 1 1 2 -152 4 1 0 2 0 2 -153 4 1 0 2 0 2 -154 3 1 1 1 0 2 -155 3 1 1 1 0 2 -156 2 0 1 1 1 2 -157 1 1 1 1 1 2 -158 2 0 1 2 0 2 -159 1 1 1 2 0 2 -160 1 1 1 1 1 2 -161 1 1 1 1 1 2 -162 1 1 1 1 1 2 -163 3 1 1 2 -1 2 -164 3 0 0 1 -1 2 -165 1 1 1 2 0 2 -166 3 1 1 2 -1 2 -167 2 0 1 1 1 2 -168 4 1 0 1 -1 2 -169 2 0 1 1 1 2 -170 1 1 1 1 1 2 -171 3 1 1 1 0 2 -172 1 1 1 1 1 2 -173 4 1 0 1 -1 2 -174 1 1 1 1 1 2 -175 3 1 1 2 -1 2 -176 1 1 1 1 1 2 -177 4 0 1 1 0 2 -178 2 0 1 1 1 2 -179 3 1 1 2 -1 2 -180 2 0 1 1 1 2 -181 3 1 1 2 -1 2 -182 1 1 1 1 1 2 -183 3 0 0 1 -1 2 -184 4 0 1 1 0 2 -185 3 1 1 2 -1 2 -186 2 0 1 1 1 2 -187 2 0 1 1 1 2 -188 1 1 1 1 1 2 -189 1 1 1 1 1 2 -190 1 1 1 2 0 2 -191 2 0 1 1 1 2 -192 2 0 1 1 1 2 -193 3 1 1 2 -1 2 -194 1 1 1 1 1 2 -195 2 0 1 1 1 2 -196 2 0 1 1 1 2 -197 1 1 1 1 1 2 -198 3 0 0 1 -1 2 -199 1 1 1 1 1 2 -200 4 0 1 1 0 2 -201 2 0 1 1 1 2 -202 3 0 0 1 -1 2 -203 4 0 1 2 -1 2 -204 1 1 1 1 1 2 -205 1 1 1 1 1 2 -206 2 0 1 1 1 2 -207 3 1 1 1 0 2 -208 1 1 1 1 1 2 -209 2 0 1 1 1 2 -210 1 1 1 1 1 2 -211 4 1 0 2 0 2 -212 4 0 1 1 0 2 -213 4 1 0 1 -1 2 -214 1 1 1 1 1 2 -215 3 1 1 1 0 2 -216 2 0 1 1 1 2 -217 1 1 1 1 1 2 -218 2 0 1 1 1 2 -219 4 0 1 1 0 2 -220 4 0 1 2 -1 2 -221 4 1 0 1 -1 2 -222 4 1 0 1 -1 2 -223 1 1 1 2 0 2 -224 2 0 1 1 1 2 -225 1 1 1 1 1 2 -226 1 1 1 2 0 2 -227 1 1 1 1 1 2 -228 2 1 0 1 0 2 -229 2 0 1 1 1 2 -230 2 0 1 1 1 2 -231 2 0 1 1 1 2 -232 4 1 0 1 -1 2 -233 3 1 1 1 0 2 -234 3 1 1 1 0 2 -235 4 0 1 2 -1 2 -236 1 1 1 1 1 2 -237 4 0 1 1 0 2 -238 2 0 1 2 0 2 -239 3 1 1 1 0 2 -240 2 0 1 1 1 2 -1 3 0 0 1 -1 3 -2 2 0 1 1 1 3 -3 1 0 0 1 0 3 -4 3 1 1 1 0 3 -5 2 0 1 1 1 3 -6 1 0 0 1 0 3 -7 1 1 1 1 1 3 -8 1 1 1 1 1 3 -9 1 1 1 2 0 3 -10 1 1 1 1 1 3 -11 1 0 0 2 1 3 -12 4 1 0 1 -1 3 -13 1 1 1 1 1 3 -14 4 0 1 1 0 3 -15 2 0 1 2 0 3 -16 3 1 1 1 0 3 -17 2 0 1 1 1 3 -18 3 1 1 2 -1 3 -19 3 1 1 2 -1 3 -20 1 1 1 1 1 3 -21 1 1 1 1 1 3 -22 1 1 1 1 1 3 -23 3 0 0 2 0 3 -24 3 0 0 2 0 3 -25 1 1 1 1 1 3 -26 4 0 1 1 0 3 -27 4 0 1 1 0 3 -28 1 1 1 1 1 3 -29 3 1 1 1 0 3 -30 4 0 1 1 0 3 -31 2 0 1 1 1 3 -32 3 0 0 1 -1 3 -33 2 0 1 1 1 3 -34 4 0 1 1 0 3 -35 4 1 0 1 -1 3 -36 3 0 0 2 0 3 -37 1 1 1 1 1 3 -38 1 1 1 1 1 3 -39 1 0 0 1 0 3 -40 3 0 0 1 -1 3 -41 4 1 0 1 -1 3 -42 1 1 1 1 1 3 -43 3 0 0 1 -1 3 -44 2 0 1 1 1 3 -45 1 1 1 1 1 3 -46 2 1 0 2 1 3 -47 2 0 1 1 1 3 -48 4 0 1 2 -1 3 -49 2 0 1 1 1 3 -50 3 1 1 2 -1 3 -51 1 1 1 2 0 3 -52 4 0 1 2 -1 3 -53 1 1 1 1 1 3 -54 4 1 0 1 -1 3 -55 2 0 1 1 1 3 -56 3 1 1 1 0 3 -57 3 1 1 1 0 3 -58 2 0 1 1 1 3 -59 3 0 0 2 0 3 -60 4 0 1 1 0 3 -61 1 1 1 1 1 3 -62 2 0 1 1 1 3 -63 3 1 1 1 0 3 -64 2 1 0 1 0 3 -65 2 0 1 1 1 3 -66 4 0 1 2 -1 3 -67 2 0 1 1 1 3 -68 2 0 1 1 1 3 -69 4 1 0 1 -1 3 -70 4 0 1 1 0 3 -71 2 0 1 2 0 3 -72 1 1 1 1 1 3 -73 4 0 1 1 0 3 -74 3 0 0 2 0 3 -75 3 1 1 1 0 3 -76 2 0 1 1 1 3 -77 3 1 1 1 0 3 -78 4 1 0 1 -1 3 -79 3 1 1 2 -1 3 -80 4 0 1 2 -1 3 -81 3 1 1 1 0 3 -82 1 1 1 1 1 3 -83 2 0 1 1 1 3 -84 3 0 0 1 -1 3 -85 2 0 1 1 1 3 -86 3 0 0 1 -1 3 -87 2 0 1 1 1 3 -88 2 0 1 1 1 3 -89 3 1 1 1 0 3 -90 4 0 1 2 -1 3 -91 4 1 0 1 -1 3 -92 2 0 1 1 1 3 -93 4 0 1 1 0 3 -94 1 1 1 1 1 3 -95 2 0 1 2 0 3 -96 1 1 1 1 1 3 -97 2 0 1 2 0 3 -98 4 0 1 1 0 3 -99 4 0 1 1 0 3 -100 4 0 1 2 -1 3 -101 3 1 1 1 0 3 -102 2 0 1 1 1 3 -103 2 0 1 1 1 3 -104 4 1 0 2 0 3 -105 2 0 1 1 1 3 -106 1 1 1 2 0 3 -107 1 1 1 1 1 3 -108 2 0 1 1 1 3 -109 2 0 1 2 0 3 -110 2 0 1 2 0 3 -111 2 0 1 2 0 3 -112 1 1 1 1 1 3 -113 2 0 1 1 1 3 -114 4 0 1 1 0 3 -115 1 1 1 1 1 3 -116 2 0 1 1 1 3 -117 4 0 1 2 -1 3 -118 3 0 0 1 -1 3 -119 3 1 1 2 -1 3 -120 1 1 1 1 1 3 -121 4 0 1 1 0 3 -122 1 1 1 1 1 3 -123 2 0 1 2 0 3 -124 1 1 1 1 1 3 -125 4 0 1 2 -1 3 -126 3 1 1 1 0 3 -127 2 0 1 2 0 3 -128 3 1 1 1 0 3 -129 4 0 1 1 0 3 -130 3 1 1 2 -1 3 -131 2 0 1 1 1 3 -132 3 1 1 1 0 3 -133 2 0 1 1 1 3 -134 2 0 1 2 0 3 -135 3 1 1 1 0 3 -136 3 1 1 2 -1 3 -137 1 1 1 1 1 3 -138 2 0 1 1 1 3 -139 1 1 1 2 0 3 -140 4 0 1 2 -1 3 -141 2 0 1 1 1 3 -142 1 1 1 2 0 3 -143 3 1 1 2 -1 3 -144 3 1 1 1 0 3 -145 2 0 1 1 1 3 -146 3 1 1 1 0 3 -147 2 1 0 1 0 3 -148 4 0 1 1 0 3 -149 1 1 1 1 1 3 -150 1 1 1 2 0 3 -151 1 1 1 1 1 3 -152 2 0 1 1 1 3 -153 3 0 0 1 -1 3 -154 1 1 1 1 1 3 -155 4 1 0 2 0 3 -156 1 1 1 2 0 3 -157 4 1 0 1 -1 3 -158 3 1 1 1 0 3 -159 1 1 1 1 1 3 -160 4 0 1 1 0 3 -161 1 1 1 1 1 3 -162 4 1 0 1 -1 3 -163 1 1 1 2 0 3 -164 4 0 1 1 0 3 -165 4 0 1 1 0 3 -166 1 1 1 2 0 3 -167 3 1 1 1 0 3 -168 2 0 1 1 1 3 -169 4 0 1 1 0 3 -170 2 0 1 1 1 3 -171 4 1 0 1 -1 3 -172 3 0 0 1 -1 3 -173 4 0 1 2 -1 3 -174 2 1 0 1 0 3 -175 2 0 1 1 1 3 -176 1 1 1 1 1 3 -177 4 0 1 1 0 3 -178 3 1 1 1 0 3 -179 3 1 1 1 0 3 -180 2 0 1 1 1 3 -181 1 1 1 1 1 3 -182 4 0 1 1 0 3 -183 3 0 0 1 -1 3 -184 3 1 1 1 0 3 -185 4 0 1 1 0 3 -186 4 0 1 1 0 3 -187 1 1 1 1 1 3 -188 4 0 1 1 0 3 -189 3 1 1 2 -1 3 -190 2 0 1 1 1 3 -191 1 1 1 1 1 3 -192 3 1 1 1 0 3 -193 4 0 1 1 0 3 -194 3 1 1 1 0 3 -195 2 0 1 1 1 3 -196 2 0 1 2 0 3 -197 2 0 1 1 1 3 -198 1 1 1 1 1 3 -199 4 0 1 2 -1 3 -200 4 1 0 1 -1 3 -201 2 0 1 2 0 3 -202 3 1 1 1 0 3 -203 3 1 1 1 0 3 -204 1 1 1 1 1 3 -205 4 0 1 1 0 3 -206 1 1 1 1 1 3 -207 3 0 0 1 -1 3 -208 3 1 1 2 -1 3 -209 3 1 1 1 0 3 -210 1 1 1 1 1 3 -211 4 0 1 1 0 3 -212 4 0 1 2 -1 3 -213 4 0 1 1 0 3 -214 1 1 1 1 1 3 -215 1 1 1 1 1 3 -216 4 0 1 2 -1 3 -217 2 0 1 1 1 3 -218 3 1 1 1 0 3 -219 4 0 1 1 0 3 -220 3 1 1 2 -1 3 -221 1 1 1 2 0 3 -222 1 1 1 1 1 3 -223 3 1 1 1 0 3 -224 1 1 1 1 1 3 -225 2 0 1 1 1 3 -226 1 1 1 2 0 3 -227 3 0 0 1 -1 3 -228 2 0 1 1 1 3 -229 3 0 0 1 -1 3 -230 4 0 1 1 0 3 -231 3 1 1 2 -1 3 -232 4 0 1 1 0 3 -233 4 0 1 1 0 3 -234 1 1 1 1 1 3 -235 4 0 1 1 0 3 -236 4 0 1 1 0 3 -237 2 0 1 1 1 3 -238 2 0 1 1 1 3 -239 3 1 1 2 -1 3 -240 1 1 1 2 0 3 -1 3 1 1 1 0 4 -2 3 0 0 1 -1 4 -3 2 1 0 2 1 4 -4 4 0 1 1 0 4 -5 1 0 0 1 0 4 -6 4 1 0 1 -1 4 -7 2 1 0 1 0 4 -8 2 0 1 1 1 4 -9 3 1 1 1 0 4 -10 4 1 0 1 -1 4 -11 2 0 1 1 1 4 -12 4 0 1 2 -1 4 -13 1 0 0 1 0 4 -14 4 0 1 1 0 4 -15 4 1 0 1 -1 4 -16 3 1 1 1 0 4 -17 1 1 1 1 1 4 -18 3 1 1 1 0 4 -19 2 0 1 2 0 4 -20 2 0 1 1 1 4 -21 2 1 0 1 0 4 -22 3 1 1 2 -1 4 -23 3 0 0 1 -1 4 -24 4 0 1 1 0 4 -25 1 1 1 1 1 4 -26 3 1 1 1 0 4 -27 2 0 1 1 1 4 -28 3 1 1 2 -1 4 -29 4 0 1 1 0 4 -30 4 0 1 1 0 4 -31 3 1 1 1 0 4 -32 1 1 1 2 0 4 -33 3 1 1 1 0 4 -34 2 0 1 1 1 4 -35 4 1 0 1 -1 4 -36 3 0 0 2 0 4 -37 3 0 0 1 -1 4 -38 2 0 1 1 1 4 -39 4 0 1 1 0 4 -40 2 0 1 2 0 4 -41 1 1 1 1 1 4 -42 4 0 1 1 0 4 -43 3 1 1 1 0 4 -44 2 0 1 2 0 4 -45 1 1 1 1 1 4 -46 3 1 1 1 0 4 -47 3 1 1 1 0 4 -48 2 1 0 1 0 4 -49 1 1 1 1 1 4 -50 1 1 1 1 1 4 -51 1 1 1 1 1 4 -52 2 0 1 2 0 4 -53 3 1 1 1 0 4 -54 2 0 1 1 1 4 -55 1 1 1 1 1 4 -56 1 1 1 1 1 4 -57 3 0 0 1 -1 4 -58 4 0 1 2 -1 4 -59 2 0 1 1 1 4 -60 1 1 1 2 0 4 -61 1 1 1 1 1 4 -62 2 0 1 1 1 4 -63 1 1 1 2 0 4 -64 4 0 1 1 0 4 -65 2 0 1 1 1 4 -66 1 1 1 1 1 4 -67 2 0 1 1 1 4 -68 3 1 1 2 -1 4 -69 2 0 1 1 1 4 -70 4 0 1 1 0 4 -71 4 0 1 2 -1 4 -72 1 1 1 2 0 4 -73 2 0 1 1 1 4 -74 2 0 1 1 1 4 -75 4 0 1 1 0 4 -76 4 0 1 1 0 4 -77 1 1 1 1 1 4 -78 1 1 1 1 1 4 -79 2 0 1 1 1 4 -80 4 1 0 1 -1 4 -81 4 0 1 1 0 4 -82 1 1 1 1 1 4 -83 2 0 1 1 1 4 -84 1 1 1 1 1 4 -85 1 1 1 2 0 4 -86 4 0 1 1 0 4 -87 4 0 1 1 0 4 -88 1 1 1 1 1 4 -89 2 0 1 1 1 4 -90 3 0 0 1 -1 4 -91 3 1 1 1 0 4 -92 1 1 1 1 1 4 -93 3 1 1 2 -1 4 -94 4 0 1 1 0 4 -95 2 0 1 1 1 4 -96 3 1 1 1 0 4 -97 2 0 1 1 1 4 -98 1 1 1 2 0 4 -99 3 1 1 1 0 4 -100 3 1 1 1 0 4 -101 2 0 1 1 1 4 -102 4 0 1 1 0 4 -103 2 0 1 1 1 4 -104 3 1 1 1 0 4 -105 4 0 1 1 0 4 -106 3 1 1 1 0 4 -107 1 1 1 1 1 4 -108 3 1 1 1 0 4 -109 2 0 1 1 1 4 -110 2 0 1 2 0 4 -111 3 1 1 2 -1 4 -112 1 1 1 1 1 4 -113 4 0 1 1 0 4 -114 2 0 1 1 1 4 -115 3 0 0 1 -1 4 -116 1 1 1 1 1 4 -117 3 1 1 1 0 4 -118 1 1 1 1 1 4 -119 2 0 1 1 1 4 -120 3 1 1 1 0 4 -121 3 0 0 1 -1 4 -122 4 0 1 2 -1 4 -123 2 0 1 1 1 4 -124 2 0 1 1 1 4 -125 4 0 1 1 0 4 -126 4 1 0 1 -1 4 -127 1 1 1 1 1 4 -128 1 1 1 1 1 4 -129 2 0 1 1 1 4 -130 1 1 1 1 1 4 -131 4 0 1 1 0 4 -132 3 1 1 1 0 4 -133 4 0 1 1 0 4 -134 1 1 1 1 1 4 -135 4 0 1 1 0 4 -136 2 0 1 2 0 4 -137 4 0 1 1 0 4 -138 1 1 1 1 1 4 -139 2 0 1 1 1 4 -140 1 1 1 1 1 4 -141 2 0 1 1 1 4 -142 3 1 1 1 0 4 -143 2 0 1 1 1 4 -144 4 0 1 1 0 4 -145 2 0 1 1 1 4 -146 1 1 1 2 0 4 -147 3 1 1 1 0 4 -148 2 0 1 1 1 4 -149 2 0 1 1 1 4 -150 1 1 1 1 1 4 -151 3 1 1 2 -1 4 -152 3 1 1 2 -1 4 -153 1 1 1 1 1 4 -154 1 1 1 1 1 4 -155 3 1 1 1 0 4 -156 3 1 1 1 0 4 -157 2 0 1 1 1 4 -158 1 1 1 1 1 4 -159 4 0 1 1 0 4 -160 4 0 1 1 0 4 -161 3 1 1 1 0 4 -162 3 1 1 1 0 4 -163 4 0 1 1 0 4 -164 2 0 1 1 1 4 -165 4 0 1 1 0 4 -166 4 0 1 1 0 4 -167 3 1 1 1 0 4 -168 1 1 1 1 1 4 -169 4 0 1 1 0 4 -170 2 0 1 1 1 4 -171 1 1 1 2 0 4 -172 4 0 1 1 0 4 -173 1 1 1 1 1 4 -174 4 0 1 2 -1 4 -175 3 1 1 2 -1 4 -176 4 0 1 1 0 4 -177 4 0 1 1 0 4 -178 4 0 1 1 0 4 -179 2 0 1 1 1 4 -180 3 1 1 2 -1 4 -181 2 0 1 1 1 4 -182 1 1 1 1 1 4 -183 1 1 1 1 1 4 -184 2 0 1 1 1 4 -185 3 1 1 2 -1 4 -186 4 0 1 2 -1 4 -187 2 0 1 2 0 4 -188 1 1 1 1 1 4 -189 2 0 1 1 1 4 -190 2 0 1 1 1 4 -191 4 0 1 1 0 4 -192 1 1 1 1 1 4 -193 2 0 1 1 1 4 -194 2 0 1 1 1 4 -195 2 0 1 1 1 4 -196 4 0 1 2 -1 4 -197 3 0 0 2 0 4 -198 1 1 1 1 1 4 -199 3 1 1 1 0 4 -200 3 1 1 1 0 4 -201 2 0 1 1 1 4 -202 3 1 1 1 0 4 -203 3 1 1 1 0 4 -204 2 0 1 1 1 4 -205 1 1 1 1 1 4 -206 1 1 1 2 0 4 -207 4 0 1 1 0 4 -208 4 0 1 2 -1 4 -209 4 1 0 1 -1 4 -210 1 1 1 1 1 4 -211 3 1 1 2 -1 4 -212 4 1 0 1 -1 4 -213 3 0 0 1 -1 4 -214 1 1 1 2 0 4 -215 3 1 1 1 0 4 -216 1 1 1 1 1 4 -217 2 0 1 1 1 4 -218 1 1 1 2 0 4 -219 4 1 0 1 -1 4 -220 3 1 1 1 0 4 -221 3 1 1 1 0 4 -222 3 1 1 1 0 4 -223 4 0 1 1 0 4 -224 3 1 1 1 0 4 -225 4 1 0 1 -1 4 -226 3 1 1 2 -1 4 -227 4 0 1 2 -1 4 -228 4 1 0 1 -1 4 -229 2 0 1 1 1 4 -230 1 1 1 1 1 4 -231 4 0 1 1 0 4 -232 1 1 1 1 1 4 -233 4 0 1 1 0 4 -234 1 1 1 1 1 4 -235 2 0 1 1 1 4 -236 3 1 1 2 -1 4 -237 4 0 1 1 0 4 -238 1 1 1 1 1 4 -239 1 1 1 1 1 4 -240 1 1 1 1 1 4 -1 2 0 1 1 1 5 -2 1 1 1 1 1 5 -3 4 1 0 1 -1 5 -4 1 1 1 1 1 5 -5 4 1 0 2 0 5 -6 1 1 1 1 1 5 -7 4 0 1 2 -1 5 -8 3 0 0 1 -1 5 -9 3 1 1 1 0 5 -10 4 1 0 2 0 5 -11 1 1 1 2 0 5 -12 1 1 1 1 1 5 -13 4 1 0 1 -1 5 -14 2 0 1 1 1 5 -15 2 0 1 1 1 5 -16 3 1 1 1 0 5 -17 2 0 1 1 1 5 -18 4 1 0 1 -1 5 -19 2 0 1 1 1 5 -20 1 1 1 1 1 5 -21 1 1 1 1 1 5 -22 2 0 1 1 1 5 -23 1 1 1 1 1 5 -24 3 1 1 1 0 5 -25 4 0 1 1 0 5 -26 3 1 1 1 0 5 -27 4 0 1 2 -1 5 -28 4 0 1 1 0 5 -29 1 1 1 1 1 5 -30 4 0 1 1 0 5 -31 2 0 1 1 1 5 -32 3 1 1 2 -1 5 -33 3 1 1 1 0 5 -34 4 0 1 1 0 5 -35 2 0 1 1 1 5 -36 4 0 1 1 0 5 -37 3 0 0 1 -1 5 -38 1 1 1 1 1 5 -39 3 0 0 1 -1 5 -40 3 1 1 1 0 5 -41 1 1 1 2 0 5 -42 4 0 1 1 0 5 -43 4 0 1 1 0 5 -44 1 1 1 1 1 5 -45 3 1 1 1 0 5 -46 2 0 1 2 0 5 -47 4 0 1 1 0 5 -48 4 0 1 1 0 5 -49 4 0 1 2 -1 5 -50 3 1 1 1 0 5 -51 2 0 1 1 1 5 -52 1 1 1 2 0 5 -53 4 0 1 1 0 5 -54 4 0 1 1 0 5 -55 1 1 1 1 1 5 -56 3 1 1 1 0 5 -57 2 0 1 1 1 5 -58 1 1 1 1 1 5 -59 2 0 1 1 1 5 -60 3 1 1 2 -1 5 -61 1 1 1 1 1 5 -62 1 1 1 2 0 5 -63 3 0 0 1 -1 5 -64 2 0 1 1 1 5 -65 4 1 0 2 0 5 -66 3 0 0 1 -1 5 -67 4 1 0 1 -1 5 -68 2 0 1 1 1 5 -69 1 1 1 2 0 5 -70 1 1 1 1 1 5 -71 4 0 1 1 0 5 -72 3 0 0 1 -1 5 -73 2 0 1 2 0 5 -74 3 1 1 1 0 5 -75 4 0 1 1 0 5 -76 4 0 1 2 -1 5 -77 1 1 1 2 0 5 -78 3 1 1 1 0 5 -79 2 0 1 1 1 5 -80 1 1 1 1 1 5 -81 4 0 1 2 -1 5 -82 1 1 1 1 1 5 -83 4 1 0 1 -1 5 -84 2 0 1 1 1 5 -85 1 1 1 1 1 5 -86 1 1 1 1 1 5 -87 2 0 1 2 0 5 -88 3 1 1 2 -1 5 -89 3 0 0 1 -1 5 -90 4 0 1 1 0 5 -91 2 0 1 1 1 5 -92 3 1 1 1 0 5 -93 2 0 1 1 1 5 -94 1 1 1 1 1 5 -95 2 0 1 1 1 5 -96 1 1 1 2 0 5 -97 3 1 1 1 0 5 -98 3 0 0 1 -1 5 -99 4 0 1 1 0 5 -100 1 1 1 2 0 5 -101 4 0 1 1 0 5 -102 1 1 1 1 1 5 -103 4 0 1 1 0 5 -104 1 1 1 1 1 5 -105 1 1 1 1 1 5 -106 4 1 0 1 -1 5 -107 2 0 1 1 1 5 -108 1 1 1 1 1 5 -109 1 1 1 1 1 5 -110 3 1 1 2 -1 5 -111 2 0 1 1 1 5 -112 3 1 1 2 -1 5 -113 1 1 1 1 1 5 -114 1 1 1 1 1 5 -115 2 0 1 1 1 5 -116 1 1 1 1 1 5 -117 4 0 1 1 0 5 -118 4 0 1 1 0 5 -119 4 0 1 1 0 5 -120 1 1 1 1 1 5 -121 4 0 1 2 -1 5 -122 2 0 1 1 1 5 -123 1 1 1 1 1 5 -124 2 0 1 1 1 5 -125 3 1 1 2 -1 5 -126 4 0 1 1 0 5 -127 2 0 1 1 1 5 -128 3 1 1 1 0 5 -129 3 1 1 1 0 5 -130 3 1 1 1 0 5 -131 3 1 1 1 0 5 -132 3 1 1 2 -1 5 -133 1 1 1 1 1 5 -134 2 0 1 1 1 5 -135 4 0 1 1 0 5 -136 1 1 1 2 0 5 -137 2 0 1 1 1 5 -138 2 0 1 1 1 5 -139 2 0 1 1 1 5 -140 2 0 1 2 0 5 -141 2 0 1 1 1 5 -142 2 0 1 1 1 5 -143 3 1 1 1 0 5 -144 1 1 1 2 0 5 -145 1 1 1 1 1 5 -146 1 1 1 2 0 5 -147 3 1 1 1 0 5 -148 2 0 1 1 1 5 -149 1 1 1 2 0 5 -150 2 0 1 1 1 5 -151 4 0 1 2 -1 5 -152 4 0 1 2 -1 5 -153 1 1 1 1 1 5 -154 2 0 1 2 0 5 -155 2 0 1 1 1 5 -156 4 0 1 1 0 5 -157 1 1 1 1 1 5 -158 4 0 1 1 0 5 -159 1 1 1 1 1 5 -160 2 0 1 1 1 5 -161 3 1 1 1 0 5 -162 2 0 1 1 1 5 -163 4 0 1 1 0 5 -164 1 1 1 1 1 5 -165 2 0 1 1 1 5 -166 3 1 1 2 -1 5 -167 3 1 1 2 -1 5 -168 3 1 1 1 0 5 -169 4 0 1 1 0 5 -170 2 0 1 1 1 5 -171 2 0 1 2 0 5 -172 4 1 0 2 0 5 -173 3 1 1 1 0 5 -174 4 1 0 2 0 5 -175 2 0 1 1 1 5 -176 1 1 1 1 1 5 -177 2 0 1 1 1 5 -178 3 1 1 1 0 5 -179 2 0 1 1 1 5 -180 1 0 0 2 1 5 -181 1 1 1 1 1 5 -182 4 0 1 1 0 5 -183 1 1 1 1 1 5 -184 1 1 1 1 1 5 -185 1 1 1 1 1 5 -186 3 1 1 1 0 5 -187 3 1 1 1 0 5 -188 3 1 1 2 -1 5 -189 4 0 1 1 0 5 -190 4 0 1 1 0 5 -191 4 0 1 2 -1 5 -192 2 0 1 2 0 5 -193 2 0 1 1 1 5 -194 1 1 1 1 1 5 -195 2 0 1 1 1 5 -196 3 1 1 1 0 5 -197 3 1 1 1 0 5 -198 2 0 1 1 1 5 -199 2 0 1 1 1 5 -200 3 1 1 1 0 5 -201 4 0 1 1 0 5 -202 3 1 1 2 -1 5 -203 2 0 1 1 1 5 -204 2 0 1 1 1 5 -205 3 1 1 1 0 5 -206 2 0 1 1 1 5 -207 2 0 1 2 0 5 -208 3 1 1 1 0 5 -209 2 0 1 1 1 5 -210 1 1 1 1 1 5 -211 3 1 1 1 0 5 -212 4 1 0 1 -1 5 -213 4 1 0 2 0 5 -214 4 0 1 1 0 5 -215 1 1 1 1 1 5 -216 3 0 0 2 0 5 -217 1 1 1 2 0 5 -218 2 0 1 1 1 5 -219 4 0 1 1 0 5 -220 3 0 0 2 0 5 -221 3 0 0 1 -1 5 -222 4 0 1 1 0 5 -223 3 1 1 1 0 5 -224 4 0 1 1 0 5 -225 3 1 1 1 0 5 -226 3 1 1 1 0 5 -227 1 1 1 1 1 5 -228 4 0 1 1 0 5 -229 1 1 1 2 0 5 -230 2 0 1 2 0 5 -231 3 1 1 1 0 5 -232 2 0 1 1 1 5 -233 3 1 1 1 0 5 -234 4 0 1 1 0 5 -235 1 1 1 2 0 5 -236 2 0 1 1 1 5 -237 3 1 1 1 0 5 -238 3 1 1 1 0 5 -239 4 1 0 2 0 5 -240 4 1 0 1 -1 5 -1 3 1 1 2 -1 6 -2 1 0 0 2 1 6 -3 2 1 0 2 1 6 -4 1 0 0 1 0 6 -5 4 1 0 1 -1 6 -6 4 0 1 2 -1 6 -7 2 1 0 2 1 6 -8 4 1 0 1 -1 6 -9 1 0 0 1 0 6 -10 2 1 0 2 1 6 -11 2 1 0 2 1 6 -12 2 1 0 1 0 6 -13 2 1 0 1 0 6 -14 4 0 1 2 -1 6 -15 3 0 0 1 -1 6 -16 3 0 0 1 -1 6 -17 4 1 0 1 -1 6 -18 1 1 1 1 1 6 -19 1 1 1 1 1 6 -20 3 1 1 1 0 6 -21 4 0 1 1 0 6 -22 3 1 1 1 0 6 -23 4 0 1 1 0 6 -24 1 1 1 1 1 6 -25 3 1 1 1 0 6 -26 1 1 1 2 0 6 -27 1 1 1 1 1 6 -28 1 1 1 2 0 6 -29 1 1 1 1 1 6 -30 1 1 1 1 1 6 -31 4 0 1 1 0 6 -32 1 1 1 1 1 6 -33 2 1 0 1 0 6 -34 3 1 1 1 0 6 -35 4 0 1 2 -1 6 -36 3 1 1 2 -1 6 -37 4 1 0 2 0 6 -38 4 1 0 2 0 6 -39 3 1 1 2 -1 6 -40 4 1 0 2 0 6 -41 2 1 0 1 0 6 -42 1 1 1 1 1 6 -43 3 1 1 2 -1 6 -44 1 1 1 1 1 6 -45 4 1 0 1 -1 6 -46 2 1 0 2 1 6 -47 3 0 0 1 -1 6 -48 2 1 0 2 1 6 -49 1 1 1 2 0 6 -50 1 1 1 2 0 6 -51 4 0 1 1 0 6 -52 3 1 1 1 0 6 -53 1 1 1 1 1 6 -54 2 1 0 1 0 6 -55 2 1 0 2 1 6 -56 2 1 0 1 0 6 -57 1 1 1 1 1 6 -58 1 1 1 1 1 6 -59 3 1 1 1 0 6 -60 2 1 0 1 0 6 -61 4 1 0 2 0 6 -62 2 1 0 1 0 6 -63 3 1 1 2 -1 6 -64 3 0 0 2 0 6 -65 2 1 0 1 0 6 -66 3 1 1 1 0 6 -67 4 1 0 1 -1 6 -68 4 0 1 1 0 6 -69 4 0 1 1 0 6 -70 1 1 1 2 0 6 -71 2 1 0 1 0 6 -72 4 0 1 1 0 6 -73 3 1 1 1 0 6 -74 1 1 1 2 0 6 -75 4 1 0 1 -1 6 -76 1 1 1 2 0 6 -77 3 1 1 1 0 6 -78 2 1 0 1 0 6 -79 4 0 1 1 0 6 -80 4 1 0 2 0 6 -81 2 1 0 1 0 6 -82 1 1 1 1 1 6 -83 4 0 1 2 -1 6 -84 2 1 0 2 1 6 -85 2 1 0 1 0 6 -86 4 1 0 1 -1 6 -87 3 1 1 1 0 6 -88 4 0 1 1 0 6 -89 2 1 0 2 1 6 -90 1 1 1 1 1 6 -91 1 1 1 1 1 6 -92 3 1 1 1 0 6 -93 1 1 1 1 1 6 -94 1 1 1 1 1 6 -95 4 0 1 1 0 6 -96 3 1 1 1 0 6 -97 4 0 1 1 0 6 -98 4 0 1 2 -1 6 -99 2 1 0 1 0 6 -100 1 1 1 1 1 6 -101 4 0 1 1 0 6 -102 4 0 1 1 0 6 -103 3 1 1 1 0 6 -104 4 0 1 1 0 6 -105 2 1 0 1 0 6 -106 3 1 1 1 0 6 -107 2 1 0 1 0 6 -108 3 1 1 1 0 6 -109 3 1 1 1 0 6 -110 4 0 1 1 0 6 -111 1 1 1 2 0 6 -112 2 1 0 1 0 6 -113 1 1 1 1 1 6 -114 4 1 0 1 -1 6 -115 1 1 1 2 0 6 -116 4 1 0 1 -1 6 -117 4 0 1 1 0 6 -118 3 1 1 1 0 6 -119 3 0 0 1 -1 6 -120 2 1 0 1 0 6 -121 4 0 1 2 -1 6 -122 3 1 1 1 0 6 -123 4 1 0 1 -1 6 -124 3 1 1 2 -1 6 -125 2 0 1 1 1 6 -126 2 1 0 1 0 6 -127 2 1 0 1 0 6 -128 1 1 1 1 1 6 -129 4 1 0 1 -1 6 -130 3 1 1 1 0 6 -131 4 0 1 1 0 6 -132 2 1 0 1 0 6 -133 2 0 1 1 1 6 -134 2 0 1 1 1 6 -135 3 1 1 1 0 6 -136 3 1 1 1 0 6 -137 2 0 1 1 1 6 -138 4 0 1 1 0 6 -139 1 1 1 2 0 6 -140 2 0 1 1 1 6 -141 2 0 1 2 0 6 -142 4 0 1 1 0 6 -143 1 1 1 1 1 6 -144 4 0 1 1 0 6 -145 4 0 1 2 -1 6 -146 1 1 1 1 1 6 -147 3 0 0 1 -1 6 -148 4 0 1 1 0 6 -149 1 0 0 2 1 6 -150 1 1 1 2 0 6 -151 4 0 1 1 0 6 -152 1 1 1 2 0 6 -153 3 1 1 1 0 6 -154 3 1 1 1 0 6 -155 2 0 1 2 0 6 -156 2 0 1 1 1 6 -157 1 1 1 2 0 6 -158 3 1 1 1 0 6 -159 3 0 0 1 -1 6 -160 3 1 1 1 0 6 -161 3 1 1 1 0 6 -162 1 0 0 1 0 6 -163 4 0 1 1 0 6 -164 3 0 0 1 -1 6 -165 3 1 1 1 0 6 -166 3 1 1 1 0 6 -167 2 0 1 2 0 6 -168 3 1 1 2 -1 6 -169 2 0 1 1 1 6 -170 2 0 1 2 0 6 -171 1 1 1 1 1 6 -172 2 0 1 1 1 6 -173 1 1 1 1 1 6 -174 1 1 1 1 1 6 -175 2 0 1 1 1 6 -176 2 0 1 1 1 6 -177 1 1 1 1 1 6 -178 2 0 1 1 1 6 -179 4 0 1 1 0 6 -180 1 1 1 1 1 6 -181 3 1 1 1 0 6 -182 3 1 1 2 -1 6 -183 3 1 1 1 0 6 -184 4 1 0 1 -1 6 -185 3 1 1 1 0 6 -186 4 0 1 1 0 6 -187 3 1 1 2 -1 6 -188 4 0 1 1 0 6 -189 1 1 1 1 1 6 -190 4 0 1 2 -1 6 -191 1 1 1 1 1 6 -192 3 1 1 1 0 6 -193 3 1 1 2 -1 6 -194 2 0 1 1 1 6 -195 1 1 1 1 1 6 -196 1 1 1 1 1 6 -197 2 0 1 2 0 6 -198 1 1 1 2 0 6 -199 2 1 0 1 0 6 -200 3 1 1 1 0 6 -201 2 0 1 1 1 6 -202 3 1 1 1 0 6 -203 1 1 1 1 1 6 -204 3 1 1 1 0 6 -205 1 1 1 2 0 6 -206 3 1 1 1 0 6 -207 2 0 1 1 1 6 -208 3 1 1 1 0 6 -209 2 0 1 1 1 6 -210 4 1 0 1 -1 6 -211 2 0 1 1 1 6 -212 2 0 1 1 1 6 -213 1 1 1 1 1 6 -214 3 1 1 1 0 6 -215 1 1 1 1 1 6 -216 3 1 1 1 0 6 -217 1 1 1 1 1 6 -218 2 0 1 1 1 6 -219 2 0 1 1 1 6 -220 1 1 1 1 1 6 -221 1 1 1 1 1 6 -222 4 0 1 2 -1 6 -223 1 1 1 1 1 6 -224 4 0 1 1 0 6 -225 4 0 1 1 0 6 -226 4 0 1 1 0 6 -227 3 1 1 1 0 6 -228 2 0 1 1 1 6 -229 2 0 1 2 0 6 -230 3 1 1 1 0 6 -231 2 0 1 1 1 6 -232 2 0 1 1 1 6 -233 4 0 1 1 0 6 -234 2 0 1 1 1 6 -235 1 1 1 2 0 6 -236 4 0 1 2 -1 6 -237 4 0 1 1 0 6 -238 4 0 1 1 0 6 -239 3 0 0 1 -1 6 -240 1 1 1 1 1 6 -1 2 0 1 1 1 7 -2 4 1 0 1 -1 7 -3 4 0 1 1 0 7 -4 3 1 1 1 0 7 -5 3 1 1 1 0 7 -6 3 0 0 1 -1 7 -7 4 0 1 1 0 7 -8 2 0 1 1 1 7 -9 3 1 1 1 0 7 -10 4 0 1 2 -1 7 -11 2 0 1 1 1 7 -12 4 0 1 1 0 7 -13 3 1 1 2 -1 7 -14 1 1 1 1 1 7 -15 1 1 1 1 1 7 -16 1 1 1 1 1 7 -17 1 1 1 1 1 7 -18 2 0 1 1 1 7 -19 1 1 1 1 1 7 -20 3 1 1 1 0 7 -21 2 0 1 1 1 7 -22 3 0 0 1 -1 7 -23 2 1 0 1 0 7 -24 4 0 1 1 0 7 -25 4 1 0 1 -1 7 -26 3 1 1 1 0 7 -27 4 1 0 1 -1 7 -28 1 1 1 1 1 7 -29 1 1 1 1 1 7 -30 3 1 1 2 -1 7 -31 4 0 1 1 0 7 -32 2 0 1 1 1 7 -33 4 0 1 1 0 7 -34 3 1 1 1 0 7 -35 3 0 0 1 -1 7 -36 3 1 1 1 0 7 -37 1 1 1 1 1 7 -38 3 1 1 1 0 7 -39 3 0 0 1 -1 7 -40 4 1 0 1 -1 7 -41 4 0 1 1 0 7 -42 1 1 1 1 1 7 -43 4 0 1 1 0 7 -44 2 0 1 1 1 7 -45 1 1 1 1 1 7 -46 2 0 1 2 0 7 -47 1 1 1 1 1 7 -48 3 1 1 1 0 7 -49 2 0 1 2 0 7 -50 3 1 1 1 0 7 -51 2 0 1 1 1 7 -52 2 0 1 2 0 7 -53 2 0 1 1 1 7 -54 2 1 0 1 0 7 -55 1 1 1 1 1 7 -56 1 1 1 1 1 7 -57 4 0 1 1 0 7 -58 2 0 1 1 1 7 -59 4 0 1 1 0 7 -60 1 1 1 1 1 7 -61 3 1 1 2 -1 7 -62 2 0 1 1 1 7 -63 3 0 0 1 -1 7 -64 4 0 1 1 0 7 -65 3 1 1 1 0 7 -66 4 0 1 1 0 7 -67 2 0 1 2 0 7 -68 4 0 1 1 0 7 -69 2 0 1 2 0 7 -70 1 1 1 1 1 7 -71 4 1 0 2 0 7 -72 2 0 1 2 0 7 -73 3 1 1 1 0 7 -74 4 0 1 1 0 7 -75 3 1 1 1 0 7 -76 1 1 1 1 1 7 -77 2 0 1 1 1 7 -78 4 0 1 1 0 7 -79 2 0 1 1 1 7 -80 4 1 0 2 0 7 -81 3 1 1 2 -1 7 -82 3 1 1 2 -1 7 -83 2 0 1 1 1 7 -84 3 1 1 2 -1 7 -85 2 0 1 2 0 7 -86 3 1 1 2 -1 7 -87 2 0 1 1 1 7 -88 2 0 1 2 0 7 -89 1 1 1 1 1 7 -90 4 0 1 1 0 7 -91 2 0 1 1 1 7 -92 1 1 1 1 1 7 -93 4 1 0 1 -1 7 -94 1 0 0 1 0 7 -95 3 1 1 2 -1 7 -96 1 1 1 1 1 7 -97 3 0 0 1 -1 7 -98 1 1 1 1 1 7 -99 4 0 1 1 0 7 -100 1 1 1 1 1 7 -101 3 1 1 1 0 7 -102 2 0 1 1 1 7 -103 1 1 1 1 1 7 -104 3 1 1 1 0 7 -105 1 1 1 1 1 7 -106 2 1 0 1 0 7 -107 3 1 1 1 0 7 -108 3 1 1 1 0 7 -109 4 0 1 1 0 7 -110 4 1 0 1 -1 7 -111 2 0 1 1 1 7 -112 4 0 1 1 0 7 -113 2 0 1 1 1 7 -114 1 1 1 1 1 7 -115 4 0 1 1 0 7 -116 1 1 1 2 0 7 -117 2 0 1 1 1 7 -118 2 0 1 1 1 7 -119 4 0 1 1 0 7 -120 3 1 1 1 0 7 -121 1 1 1 1 1 7 -122 1 1 1 1 1 7 -123 2 0 1 1 1 7 -124 1 1 1 2 0 7 -125 4 0 1 1 0 7 -126 1 1 1 1 1 7 -127 3 0 0 1 -1 7 -128 4 0 1 1 0 7 -129 3 1 1 2 -1 7 -130 2 0 1 2 0 7 -131 1 1 1 2 0 7 -132 2 1 0 1 0 7 -133 4 0 1 1 0 7 -134 1 1 1 1 1 7 -135 1 1 1 2 0 7 -136 3 1 1 1 0 7 -137 2 0 1 1 1 7 -138 3 1 1 1 0 7 -139 4 0 1 1 0 7 -140 2 0 1 2 0 7 -141 3 1 1 1 0 7 -142 4 0 1 1 0 7 -143 1 1 1 1 1 7 -144 3 1 1 2 -1 7 -145 1 1 1 2 0 7 -146 1 1 1 1 1 7 -147 2 0 1 1 1 7 -148 2 0 1 1 1 7 -149 3 1 1 1 0 7 -150 4 0 1 1 0 7 -151 4 0 1 1 0 7 -152 4 0 1 1 0 7 -153 2 0 1 1 1 7 -154 4 0 1 2 -1 7 -155 4 0 1 2 -1 7 -156 4 0 1 2 -1 7 -157 1 1 1 2 0 7 -158 3 1 1 1 0 7 -159 2 0 1 2 0 7 -160 2 0 1 1 1 7 -161 3 1 1 1 0 7 -162 1 1 1 2 0 7 -163 1 1 1 1 1 7 -164 4 1 0 1 -1 7 -165 4 1 0 1 -1 7 -166 1 1 1 1 1 7 -167 4 1 0 1 -1 7 -168 1 1 1 2 0 7 -169 4 0 1 1 0 7 -170 4 0 1 1 0 7 -171 2 0 1 1 1 7 -172 4 0 1 1 0 7 -173 2 0 1 1 1 7 -174 1 1 1 1 1 7 -175 4 0 1 1 0 7 -176 4 0 1 1 0 7 -177 2 0 1 2 0 7 -178 4 0 1 1 0 7 -179 2 0 1 1 1 7 -180 3 1 1 1 0 7 -181 1 1 1 2 0 7 -182 3 1 1 2 -1 7 -183 3 1 1 1 0 7 -184 1 1 1 1 1 7 -185 3 1 1 2 -1 7 -186 4 0 1 1 0 7 -187 1 1 1 1 1 7 -188 1 1 1 1 1 7 -189 3 0 0 1 -1 7 -190 2 0 1 1 1 7 -191 1 1 1 1 1 7 -192 1 1 1 2 0 7 -193 4 0 1 1 0 7 -194 4 0 1 2 -1 7 -195 1 1 1 2 0 7 -196 4 0 1 1 0 7 -197 2 0 1 1 1 7 -198 2 0 1 1 1 7 -199 2 0 1 1 1 7 -200 1 1 1 1 1 7 -201 4 0 1 2 -1 7 -202 2 0 1 1 1 7 -203 2 0 1 1 1 7 -204 3 0 0 1 -1 7 -205 3 1 1 1 0 7 -206 1 1 1 2 0 7 -207 2 0 1 1 1 7 -208 3 1 1 1 0 7 -209 2 0 1 1 1 7 -210 3 1 1 1 0 7 -211 3 1 1 2 -1 7 -212 4 0 1 2 -1 7 -213 1 1 1 1 1 7 -214 3 1 1 2 -1 7 -215 1 1 1 2 0 7 -216 2 0 1 1 1 7 -217 3 1 1 1 0 7 -218 1 1 1 2 0 7 -219 1 1 1 1 1 7 -220 2 0 1 1 1 7 -221 3 1 1 1 0 7 -222 2 0 1 1 1 7 -223 2 0 1 1 1 7 -224 2 0 1 2 0 7 -225 1 0 0 1 0 7 -226 3 1 1 1 0 7 -227 1 1 1 1 1 7 -228 3 1 1 1 0 7 -229 1 1 1 2 0 7 -230 1 0 0 1 0 7 -231 4 0 1 2 -1 7 -232 2 0 1 1 1 7 -233 3 1 1 2 -1 7 -234 3 0 0 1 -1 7 -235 3 0 0 1 -1 7 -236 4 0 1 1 0 7 -237 3 1 1 1 0 7 -238 4 0 1 1 0 7 -239 1 1 1 1 1 7 -240 4 0 1 1 0 7 -1 3 1 1 1 0 8 -2 2 0 1 1 1 8 -3 3 0 0 1 -1 8 -4 3 1 1 1 0 8 -5 1 0 0 2 1 8 -6 3 1 1 2 -1 8 -7 2 1 0 1 0 8 -8 1 0 0 1 0 8 -9 2 0 1 2 0 8 -10 2 1 0 2 1 8 -11 1 1 1 2 0 8 -12 3 0 0 1 -1 8 -13 4 1 0 2 0 8 -14 3 0 0 1 -1 8 -15 4 1 0 2 0 8 -16 3 1 1 2 -1 8 -17 1 0 0 1 0 8 -18 2 1 0 2 1 8 -19 2 1 0 1 0 8 -20 3 1 1 1 0 8 -21 2 1 0 1 0 8 -22 4 1 0 1 -1 8 -23 2 0 1 2 0 8 -24 3 1 1 1 0 8 -25 2 1 0 1 0 8 -26 3 1 1 1 0 8 -27 3 1 1 1 0 8 -28 4 0 1 2 -1 8 -29 1 1 1 1 1 8 -30 3 1 1 2 -1 8 -31 1 1 1 1 1 8 -32 1 1 1 1 1 8 -33 3 1 1 1 0 8 -34 4 1 0 1 -1 8 -35 4 0 1 1 0 8 -36 2 1 0 2 1 8 -37 3 1 1 2 -1 8 -38 1 1 1 1 1 8 -39 4 1 0 1 -1 8 -40 2 1 0 1 0 8 -41 2 1 0 1 0 8 -42 4 0 1 1 0 8 -43 3 1 1 1 0 8 -44 1 1 1 1 1 8 -45 1 1 1 1 1 8 -46 4 0 1 2 -1 8 -47 3 0 0 1 -1 8 -48 2 1 0 2 1 8 -49 2 1 0 1 0 8 -50 3 1 1 1 0 8 -51 3 1 1 1 0 8 -52 1 1 1 1 1 8 -53 4 0 1 1 0 8 -54 4 1 0 2 0 8 -55 3 1 1 2 -1 8 -56 2 1 0 2 1 8 -57 4 0 1 1 0 8 -58 2 1 0 2 1 8 -59 1 1 1 2 0 8 -60 1 1 1 1 1 8 -61 1 1 1 2 0 8 -62 2 1 0 1 0 8 -63 3 1 1 1 0 8 -64 3 1 1 2 -1 8 -65 4 0 1 1 0 8 -66 3 1 1 1 0 8 -67 3 0 0 2 0 8 -68 1 1 1 1 1 8 -69 4 0 1 1 0 8 -70 1 1 1 2 0 8 -71 4 1 0 1 -1 8 -72 4 0 1 2 -1 8 -73 3 1 1 1 0 8 -74 3 1 1 2 -1 8 -75 4 1 0 1 -1 8 -76 1 1 1 1 1 8 -77 4 0 1 1 0 8 -78 2 1 0 1 0 8 -79 1 1 1 1 1 8 -80 1 1 1 1 1 8 -81 1 1 1 2 0 8 -82 3 0 0 1 -1 8 -83 4 0 1 1 0 8 -84 2 1 0 2 1 8 -85 3 0 0 1 -1 8 -86 4 0 1 1 0 8 -87 2 1 0 1 0 8 -88 1 1 1 1 1 8 -89 2 1 0 1 0 8 -90 4 0 1 1 0 8 -91 4 0 1 1 0 8 -92 4 0 1 1 0 8 -93 1 1 1 2 0 8 -94 2 1 0 1 0 8 -95 4 1 0 2 0 8 -96 2 1 0 1 0 8 -97 2 1 0 2 1 8 -98 4 0 1 1 0 8 -99 2 1 0 1 0 8 -100 4 0 1 1 0 8 -101 1 1 1 1 1 8 -102 2 1 0 2 1 8 -103 1 1 1 1 1 8 -104 4 0 1 1 0 8 -105 4 0 1 1 0 8 -106 4 0 1 1 0 8 -107 1 1 1 1 1 8 -108 2 1 0 1 0 8 -109 2 1 0 1 0 8 -110 3 1 1 1 0 8 -111 3 1 1 1 0 8 -112 1 1 1 1 1 8 -113 3 1 1 1 0 8 -114 4 0 1 1 0 8 -115 2 0 1 1 1 8 -116 2 1 0 1 0 8 -117 4 1 0 1 -1 8 -118 4 0 1 2 -1 8 -119 1 1 1 1 1 8 -120 1 1 1 1 1 8 -121 1 1 1 2 0 8 -122 3 1 1 1 0 8 -123 3 1 1 1 0 8 -124 3 1 1 2 -1 8 -125 2 0 1 1 1 8 -126 2 0 1 1 1 8 -127 1 1 1 1 1 8 -128 1 1 1 1 1 8 -129 2 0 1 1 1 8 -130 1 1 1 1 1 8 -131 2 0 1 1 1 8 -132 3 1 1 1 0 8 -133 4 1 0 1 -1 8 -134 1 1 1 1 1 8 -135 3 1 1 1 0 8 -136 4 0 1 1 0 8 -137 1 1 1 1 1 8 -138 2 0 1 1 1 8 -139 4 0 1 1 0 8 -140 4 0 1 2 -1 8 -141 2 0 1 1 1 8 -142 1 1 1 1 1 8 -143 3 1 1 1 0 8 -144 3 1 1 1 0 8 -145 3 1 1 2 -1 8 -146 3 1 1 1 0 8 -147 4 0 1 1 0 8 -148 1 1 1 2 0 8 -149 4 1 0 2 0 8 -150 2 0 1 1 1 8 -151 4 0 1 1 0 8 -152 1 1 1 1 1 8 -153 2 0 1 1 1 8 -154 4 1 0 1 -1 8 -155 1 1 1 1 1 8 -156 4 0 1 1 0 8 -157 2 0 1 1 1 8 -158 2 0 1 2 0 8 -159 2 0 1 1 1 8 -160 3 1 1 1 0 8 -161 1 1 1 1 1 8 -162 4 0 1 1 0 8 -163 3 1 1 1 0 8 -164 1 1 1 1 1 8 -165 2 0 1 2 0 8 -166 4 0 1 1 0 8 -167 2 0 1 1 1 8 -168 2 0 1 2 0 8 -169 2 0 1 1 1 8 -170 3 1 1 1 0 8 -171 3 1 1 1 0 8 -172 4 0 1 1 0 8 -173 1 1 1 1 1 8 -174 3 1 1 2 -1 8 -175 1 1 1 1 1 8 -176 3 1 1 1 0 8 -177 3 1 1 1 0 8 -178 3 1 1 1 0 8 -179 2 0 1 1 1 8 -180 1 1 1 1 1 8 -181 1 1 1 1 1 8 -182 3 1 1 1 0 8 -183 2 0 1 1 1 8 -184 4 1 0 1 -1 8 -185 4 0 1 2 -1 8 -186 4 0 1 1 0 8 -187 3 1 1 1 0 8 -188 2 0 1 1 1 8 -189 1 1 1 1 1 8 -190 2 0 1 1 1 8 -191 1 1 1 1 1 8 -192 2 0 1 1 1 8 -193 2 0 1 2 0 8 -194 2 0 1 1 1 8 -195 1 1 1 2 0 8 -196 3 1 1 1 0 8 -197 2 0 1 1 1 8 -198 4 0 1 2 -1 8 -199 4 0 1 2 -1 8 -200 2 0 1 1 1 8 -201 3 1 1 2 -1 8 -202 4 0 1 1 0 8 -203 3 1 1 1 0 8 -204 4 0 1 1 0 8 -205 4 1 0 1 -1 8 -206 3 1 1 1 0 8 -207 1 1 1 2 0 8 -208 3 1 1 1 0 8 -209 1 1 1 2 0 8 -210 3 1 1 1 0 8 -211 4 0 1 1 0 8 -212 2 0 1 2 0 8 -213 3 1 1 1 0 8 -214 1 1 1 1 1 8 -215 2 0 1 1 1 8 -216 1 1 1 1 1 8 -217 2 1 0 1 0 8 -218 1 1 1 1 1 8 -219 1 1 1 2 0 8 -220 1 1 1 2 0 8 -221 4 0 1 2 -1 8 -222 3 1 1 1 0 8 -223 1 1 1 1 1 8 -224 3 1 1 1 0 8 -225 4 0 1 2 -1 8 -226 1 1 1 2 0 8 -227 4 1 0 1 -1 8 -228 2 0 1 1 1 8 -229 1 1 1 1 1 8 -230 2 0 1 1 1 8 -231 4 0 1 1 0 8 -232 4 0 1 2 -1 8 -233 2 0 1 1 1 8 -234 1 1 1 1 1 8 -235 4 0 1 1 0 8 -236 1 1 1 1 1 8 -237 3 1 1 2 -1 8 -238 4 0 1 2 -1 8 -239 1 1 1 2 0 8 -240 3 0 0 1 -1 8 -1 2 1 0 1 0 9 -2 3 0 0 2 0 9 -3 3 1 1 1 0 9 -4 1 1 1 1 1 9 -5 4 0 1 1 0 9 -6 1 1 1 1 1 9 -7 3 1 1 1 0 9 -8 4 1 0 1 -1 9 -9 3 1 1 1 0 9 -10 4 0 1 1 0 9 -11 4 1 0 1 -1 9 -12 2 1 0 1 0 9 -13 1 0 0 1 0 9 -14 3 0 0 1 -1 9 -15 3 1 1 1 0 9 -16 2 1 0 1 0 9 -17 2 1 0 1 0 9 -18 1 1 1 1 1 9 -19 2 0 1 2 0 9 -20 4 0 1 2 -1 9 -21 2 1 0 2 1 9 -22 2 1 0 1 0 9 -23 4 1 0 1 -1 9 -24 2 1 0 1 0 9 -25 1 1 1 1 1 9 -26 3 1 1 2 -1 9 -27 2 1 0 1 0 9 -28 3 0 0 1 -1 9 -29 4 1 0 1 -1 9 -30 1 1 1 1 1 9 -31 2 0 1 2 0 9 -32 3 0 0 1 -1 9 -33 3 1 1 1 0 9 -34 3 1 1 1 0 9 -35 2 1 0 1 0 9 -36 2 1 0 1 0 9 -37 2 1 0 1 0 9 -38 4 0 1 1 0 9 -39 1 1 1 1 1 9 -40 2 1 0 1 0 9 -41 4 0 1 1 0 9 -42 3 1 1 1 0 9 -43 1 1 1 1 1 9 -44 4 0 1 1 0 9 -45 4 1 0 1 -1 9 -46 3 1 1 1 0 9 -47 2 0 1 1 1 9 -48 3 1 1 2 -1 9 -49 3 1 1 1 0 9 -50 4 0 1 2 -1 9 -51 2 0 1 1 1 9 -52 4 0 1 1 0 9 -53 4 0 1 1 0 9 -54 1 1 1 1 1 9 -55 1 1 1 1 1 9 -56 1 1 1 2 0 9 -57 4 0 1 1 0 9 -58 1 1 1 2 0 9 -59 4 0 1 1 0 9 -60 1 1 1 1 1 9 -61 4 0 1 1 0 9 -62 2 0 1 1 1 9 -63 3 0 0 1 -1 9 -64 3 1 1 2 -1 9 -65 3 1 1 2 -1 9 -66 4 1 0 1 -1 9 -67 2 0 1 2 0 9 -68 3 0 0 2 0 9 -69 4 0 1 2 -1 9 -70 4 0 1 1 0 9 -71 3 1 1 1 0 9 -72 1 1 1 1 1 9 -73 2 0 1 1 1 9 -74 3 1 1 1 0 9 -75 3 1 1 1 0 9 -76 1 1 1 2 0 9 -77 2 0 1 1 1 9 -78 1 1 1 1 1 9 -79 4 0 1 1 0 9 -80 2 0 1 1 1 9 -81 3 1 1 2 -1 9 -82 2 0 1 1 1 9 -83 2 0 1 1 1 9 -84 1 1 1 1 1 9 -85 2 0 1 2 0 9 -86 3 0 0 1 -1 9 -87 4 0 1 1 0 9 -88 3 0 0 1 -1 9 -89 3 1 1 1 0 9 -90 2 0 1 2 0 9 -91 2 0 1 2 0 9 -92 2 0 1 2 0 9 -93 4 0 1 1 0 9 -94 2 0 1 1 1 9 -95 3 1 1 1 0 9 -96 4 0 1 1 0 9 -97 2 0 1 1 1 9 -98 3 0 0 1 -1 9 -99 4 0 1 1 0 9 -100 3 1 1 1 0 9 -101 2 0 1 1 1 9 -102 4 0 1 1 0 9 -103 2 0 1 1 1 9 -104 4 0 1 1 0 9 -105 2 0 1 2 0 9 -106 4 1 0 2 0 9 -107 1 1 1 1 1 9 -108 4 0 1 1 0 9 -109 4 0 1 1 0 9 -110 2 0 1 2 0 9 -111 2 0 1 1 1 9 -112 3 1 1 1 0 9 -113 2 0 1 2 0 9 -114 3 1 1 1 0 9 -115 3 1 1 2 -1 9 -116 4 0 1 1 0 9 -117 3 1 1 1 0 9 -118 1 0 0 1 0 9 -119 2 0 1 1 1 9 -120 3 1 1 1 0 9 -121 1 1 1 2 0 9 -122 1 1 1 1 1 9 -123 2 0 1 1 1 9 -124 2 0 1 1 1 9 -125 2 0 1 1 1 9 -126 1 1 1 1 1 9 -127 1 1 1 2 0 9 -128 4 0 1 1 0 9 -129 4 0 1 1 0 9 -130 4 0 1 2 -1 9 -131 2 0 1 2 0 9 -132 1 1 1 1 1 9 -133 1 1 1 1 1 9 -134 2 0 1 1 1 9 -135 1 1 1 2 0 9 -136 3 1 1 1 0 9 -137 2 0 1 1 1 9 -138 3 1 1 1 0 9 -139 1 1 1 1 1 9 -140 1 1 1 1 1 9 -141 4 0 1 1 0 9 -142 1 1 1 1 1 9 -143 1 1 1 1 1 9 -144 4 0 1 1 0 9 -145 3 1 1 1 0 9 -146 4 1 0 1 -1 9 -147 3 1 1 2 -1 9 -148 4 0 1 1 0 9 -149 1 1 1 1 1 9 -150 3 1 1 1 0 9 -151 1 1 1 2 0 9 -152 2 0 1 1 1 9 -153 1 0 0 2 1 9 -154 2 0 1 1 1 9 -155 1 0 0 1 0 9 -156 4 0 1 2 -1 9 -157 2 0 1 1 1 9 -158 4 0 1 1 0 9 -159 1 1 1 1 1 9 -160 3 1 1 2 -1 9 -161 2 0 1 1 1 9 -162 3 1 1 1 0 9 -163 2 0 1 1 1 9 -164 2 0 1 1 1 9 -165 4 0 1 1 0 9 -166 2 0 1 1 1 9 -167 4 0 1 1 0 9 -168 1 1 1 1 1 9 -169 3 1 1 1 0 9 -170 1 1 1 2 0 9 -171 2 0 1 2 0 9 -172 4 0 1 1 0 9 -173 4 0 1 1 0 9 -174 4 0 1 1 0 9 -175 3 1 1 1 0 9 -176 2 0 1 1 1 9 -177 4 0 1 1 0 9 -178 1 1 1 2 0 9 -179 4 0 1 1 0 9 -180 1 1 1 1 1 9 -181 3 1 1 1 0 9 -182 4 0 1 1 0 9 -183 4 0 1 1 0 9 -184 1 1 1 1 1 9 -185 3 0 0 1 -1 9 -186 4 0 1 1 0 9 -187 4 0 1 1 0 9 -188 3 1 1 1 0 9 -189 1 1 1 1 1 9 -190 4 1 0 1 -1 9 -191 3 1 1 1 0 9 -192 4 0 1 2 -1 9 -193 3 1 1 1 0 9 -194 4 0 1 1 0 9 -195 2 0 1 1 1 9 -196 1 1 1 1 1 9 -197 3 1 1 1 0 9 -198 1 1 1 1 1 9 -199 2 0 1 1 1 9 -200 2 0 1 2 0 9 -201 2 0 1 1 1 9 -202 3 1 1 1 0 9 -203 4 1 0 1 -1 9 -204 3 1 1 1 0 9 -205 3 1 1 1 0 9 -206 2 0 1 2 0 9 -207 2 0 1 1 1 9 -208 1 1 1 1 1 9 -209 2 0 1 1 1 9 -210 3 1 1 1 0 9 -211 1 1 1 1 1 9 -212 3 1 1 1 0 9 -213 1 1 1 2 0 9 -214 3 1 1 1 0 9 -215 3 1 1 1 0 9 -216 4 0 1 1 0 9 -217 3 1 1 1 0 9 -218 1 1 1 1 1 9 -219 1 1 1 1 1 9 -220 4 0 1 1 0 9 -221 1 1 1 1 1 9 -222 3 1 1 1 0 9 -223 4 1 0 1 -1 9 -224 3 1 1 1 0 9 -225 1 1 1 1 1 9 -226 4 0 1 1 0 9 -227 1 1 1 1 1 9 -228 1 1 1 2 0 9 -229 1 1 1 1 1 9 -230 4 0 1 1 0 9 -231 2 0 1 1 1 9 -232 1 1 1 1 1 9 -233 1 1 1 1 1 9 -234 1 1 1 1 1 9 -235 1 1 1 1 1 9 -236 1 1 1 2 0 9 -237 3 1 1 1 0 9 -238 1 1 1 1 1 9 -239 2 0 1 1 1 9 -240 1 1 1 1 1 9 -1 1 0 0 1 0 10 -2 1 1 1 1 1 10 -3 1 1 1 1 1 10 -4 4 1 0 1 -1 10 -5 4 1 0 1 -1 10 -6 1 1 1 1 1 10 -7 4 0 1 1 0 10 -8 4 0 1 1 0 10 -9 2 0 1 1 1 10 -10 4 0 1 1 0 10 -11 1 1 1 1 1 10 -12 4 0 1 1 0 10 -13 1 1 1 1 1 10 -14 1 1 1 1 1 10 -15 4 1 0 2 0 10 -16 4 1 0 1 -1 10 -17 1 1 1 2 0 10 -18 1 1 1 1 1 10 -19 4 0 1 1 0 10 -20 4 0 1 1 0 10 -21 1 1 1 1 1 10 -22 3 0 0 1 -1 10 -23 3 1 1 2 -1 10 -24 4 0 1 2 -1 10 -25 2 0 1 1 1 10 -26 4 1 0 1 -1 10 -27 1 1 1 1 1 10 -28 3 1 1 1 0 10 -29 3 0 0 1 -1 10 -30 2 1 0 1 0 10 -31 1 1 1 1 1 10 -32 3 1 1 1 0 10 -33 2 1 0 1 0 10 -34 3 1 1 1 0 10 -35 2 0 1 1 1 10 -36 2 0 1 1 1 10 -37 2 0 1 1 1 10 -38 4 1 0 1 -1 10 -39 3 1 1 2 -1 10 -40 1 1 1 1 1 10 -41 3 0 0 1 -1 10 -42 3 0 0 2 0 10 -43 3 0 0 2 0 10 -44 1 1 1 1 1 10 -45 2 0 1 1 1 10 -46 3 0 0 1 -1 10 -47 3 1 1 1 0 10 -48 2 0 1 1 1 10 -49 4 1 0 1 -1 10 -50 3 0 0 1 -1 10 -51 2 1 0 1 0 10 -52 3 1 1 1 0 10 -53 4 0 1 1 0 10 -54 3 1 1 1 0 10 -55 2 0 1 1 1 10 -56 1 1 1 1 1 10 -57 4 0 1 1 0 10 -58 3 1 1 2 -1 10 -59 1 1 1 1 1 10 -60 3 1 1 1 0 10 -61 4 0 1 1 0 10 -62 3 1 1 1 0 10 -63 2 0 1 1 1 10 -64 2 0 1 1 1 10 -65 2 0 1 2 0 10 -66 1 1 1 1 1 10 -67 3 0 0 2 0 10 -68 1 1 1 1 1 10 -69 2 0 1 2 0 10 -70 4 0 1 1 0 10 -71 2 0 1 1 1 10 -72 1 1 1 2 0 10 -73 1 1 1 1 1 10 -74 2 0 1 1 1 10 -75 1 1 1 1 1 10 -76 4 0 1 1 0 10 -77 4 0 1 1 0 10 -78 4 0 1 1 0 10 -79 1 1 1 1 1 10 -80 2 0 1 1 1 10 -81 2 0 1 2 0 10 -82 3 1 1 2 -1 10 -83 2 1 0 1 0 10 -84 3 0 0 2 0 10 -85 3 0 0 2 0 10 -86 3 0 0 1 -1 10 -87 2 0 1 2 0 10 -88 4 0 1 2 -1 10 -89 2 0 1 1 1 10 -90 4 0 1 1 0 10 -91 4 1 0 1 -1 10 -92 3 1 1 1 0 10 -93 4 1 0 2 0 10 -94 3 1 1 1 0 10 -95 1 1 1 1 1 10 -96 3 1 1 1 0 10 -97 2 0 1 1 1 10 -98 2 0 1 1 1 10 -99 2 0 1 1 1 10 -100 1 1 1 1 1 10 -101 4 0 1 1 0 10 -102 2 0 1 1 1 10 -103 1 1 1 1 1 10 -104 2 0 1 1 1 10 -105 1 1 1 1 1 10 -106 3 1 1 1 0 10 -107 4 1 0 1 -1 10 -108 2 0 1 1 1 10 -109 1 1 1 2 0 10 -110 3 1 1 1 0 10 -111 4 0 1 1 0 10 -112 4 0 1 1 0 10 -113 2 0 1 2 0 10 -114 3 0 0 1 -1 10 -115 1 1 1 2 0 10 -116 1 1 1 1 1 10 -117 2 0 1 1 1 10 -118 4 0 1 1 0 10 -119 4 0 1 1 0 10 -120 4 0 1 1 0 10 -121 4 0 1 2 -1 10 -122 2 0 1 2 0 10 -123 4 0 1 1 0 10 -124 3 1 1 1 0 10 -125 2 0 1 2 0 10 -126 3 1 1 1 0 10 -127 1 1 1 1 1 10 -128 4 0 1 1 0 10 -129 2 0 1 1 1 10 -130 4 0 1 2 -1 10 -131 4 0 1 1 0 10 -132 1 1 1 1 1 10 -133 3 0 0 1 -1 10 -134 4 0 1 1 0 10 -135 1 1 1 1 1 10 -136 2 0 1 1 1 10 -137 1 1 1 1 1 10 -138 1 1 1 1 1 10 -139 4 0 1 1 0 10 -140 3 1 1 1 0 10 -141 2 0 1 1 1 10 -142 4 0 1 1 0 10 -143 1 1 1 2 0 10 -144 2 0 1 1 1 10 -145 3 1 1 1 0 10 -146 4 0 1 1 0 10 -147 1 1 1 1 1 10 -148 2 0 1 1 1 10 -149 1 1 1 1 1 10 -150 3 0 0 1 -1 10 -151 2 0 1 1 1 10 -152 2 0 1 1 1 10 -153 3 1 1 1 0 10 -154 3 0 0 2 0 10 -155 2 0 1 2 0 10 -156 2 0 1 1 1 10 -157 4 0 1 1 0 10 -158 3 1 1 1 0 10 -159 4 1 0 2 0 10 -160 3 1 1 1 0 10 -161 1 1 1 1 1 10 -162 2 0 1 1 1 10 -163 1 1 1 1 1 10 -164 2 0 1 1 1 10 -165 1 1 1 1 1 10 -166 4 0 1 1 0 10 -167 3 1 1 1 0 10 -168 3 1 1 1 0 10 -169 1 0 0 1 0 10 -170 3 1 1 1 0 10 -171 3 1 1 1 0 10 -172 2 1 0 1 0 10 -173 4 0 1 1 0 10 -174 1 1 1 1 1 10 -175 2 0 1 1 1 10 -176 4 0 1 2 -1 10 -177 3 1 1 1 0 10 -178 1 1 1 1 1 10 -179 1 1 1 1 1 10 -180 1 1 1 2 0 10 -181 1 1 1 1 1 10 -182 1 1 1 1 1 10 -183 4 0 1 2 -1 10 -184 4 1 0 1 -1 10 -185 2 0 1 1 1 10 -186 1 0 0 1 0 10 -187 2 0 1 2 0 10 -188 2 0 1 1 1 10 -189 3 1 1 1 0 10 -190 4 1 0 1 -1 10 -191 2 0 1 1 1 10 -192 4 0 1 1 0 10 -193 1 1 1 2 0 10 -194 2 0 1 1 1 10 -195 3 1 1 1 0 10 -196 2 0 1 1 1 10 -197 3 1 1 1 0 10 -198 4 0 1 1 0 10 -199 3 1 1 1 0 10 -200 2 0 1 1 1 10 -201 1 1 1 1 1 10 -202 3 1 1 1 0 10 -203 1 1 1 2 0 10 -204 3 1 1 1 0 10 -205 4 0 1 1 0 10 -206 3 1 1 2 -1 10 -207 1 1 1 2 0 10 -208 1 1 1 1 1 10 -209 1 1 1 1 1 10 -210 4 0 1 1 0 10 -211 1 1 1 1 1 10 -212 3 1 1 1 0 10 -213 2 0 1 2 0 10 -214 2 0 1 2 0 10 -215 4 0 1 1 0 10 -216 3 1 1 1 0 10 -217 4 0 1 1 0 10 -218 1 1 1 1 1 10 -219 4 0 1 1 0 10 -220 1 1 1 2 0 10 -221 3 0 0 1 -1 10 -222 2 0 1 2 0 10 -223 2 0 1 1 1 10 -224 1 1 1 1 1 10 -225 4 0 1 1 0 10 -226 1 1 1 2 0 10 -227 2 0 1 1 1 10 -228 4 0 1 1 0 10 -229 2 0 1 1 1 10 -230 3 1 1 1 0 10 -231 3 1 1 1 0 10 -232 3 1 1 1 0 10 -233 3 1 1 1 0 10 -234 2 1 0 1 0 10 -235 3 1 1 2 -1 10 -236 1 1 1 1 1 10 -237 3 0 0 1 -1 10 -238 4 0 1 1 0 10 -239 4 0 1 1 0 10 -240 2 0 1 1 1 10 diff --git a/R/inst/extdata/igt_exampleData.txt b/R/inst/extdata/igt_exampleData.txt deleted file mode 100644 index 3a6252af..00000000 --- a/R/inst/extdata/igt_exampleData.txt +++ /dev/null @@ -1,401 +0,0 @@ -trial choice gain loss subjID -1 3 50 0 1001 -2 2 100 0 1001 -3 3 50 0 1001 -4 4 50 0 1001 -5 4 50 0 1001 -6 4 50 0 1001 -7 4 50 0 1001 -8 3 50 -50 1001 -9 4 50 0 1001 -10 4 50 0 1001 -11 3 50 0 1001 -12 4 50 0 1001 -13 4 50 0 1001 -14 4 50 0 1001 -15 4 50 -250 1001 -16 4 50 0 1001 -17 2 100 0 1001 -18 4 50 0 1001 -19 1 100 0 1001 -20 2 100 0 1001 -21 2 100 0 1001 -22 2 100 0 1001 -23 3 50 -50 1001 -24 2 100 0 1001 -25 4 50 0 1001 -26 1 100 0 1001 -27 1 100 -150 1001 -28 2 100 0 1001 -29 2 100 0 1001 -30 2 100 -1250 1001 -31 1 100 0 1001 -32 4 50 0 1001 -33 1 100 -300 1001 -34 4 50 0 1001 -35 1 100 0 1001 -36 4 50 0 1001 -37 1 100 -200 1001 -38 2 100 0 1001 -39 1 100 0 1001 -40 4 50 0 1001 -41 4 50 0 1001 -42 2 100 0 1001 -43 4 50 0 1001 -44 4 50 -250 1001 -45 4 50 0 1001 -46 2 100 0 1001 -47 4 50 0 1001 -48 1 100 -250 1001 -49 4 50 0 1001 -50 4 50 0 1001 -51 4 50 0 1001 -52 3 50 0 1001 -53 3 50 -50 1001 -54 3 50 0 1001 -55 3 50 -50 1001 -56 3 50 -50 1001 -57 2 100 0 1001 -58 2 100 -1250 1001 -59 4 50 0 1001 -60 4 50 0 1001 -61 4 50 0 1001 -62 4 50 -250 1001 -63 4 50 0 1001 -64 4 50 0 1001 -65 3 50 0 1001 -66 3 50 -25 1001 -67 3 50 -75 1001 -68 4 50 0 1001 -69 4 50 0 1001 -70 4 50 0 1001 -71 4 50 -250 1001 -72 4 50 0 1001 -73 4 50 0 1001 -74 4 50 0 1001 -75 4 50 0 1001 -76 4 50 0 1001 -77 4 50 0 1001 -78 3 50 0 1001 -79 4 50 0 1001 -80 4 50 0 1001 -81 4 50 0 1001 -82 4 50 0 1001 -83 4 50 0 1001 -84 4 50 0 1001 -85 4 50 0 1001 -86 4 50 0 1001 -87 4 50 -250 1001 -88 4 50 0 1001 -89 4 50 0 1001 -90 4 50 0 1001 -91 4 50 0 1001 -92 4 50 0 1001 -93 4 50 0 1001 -94 4 50 0 1001 -95 4 50 0 1001 -96 4 50 0 1001 -97 4 50 -250 1001 -98 4 50 0 1001 -99 4 50 0 1001 -100 4 50 0 1001 -1 3 50 0 1002 -2 3 50 0 1002 -3 3 50 -50 1002 -4 3 50 0 1002 -5 3 50 -50 1002 -6 1 100 0 1002 -7 3 50 0 1002 -8 2 100 0 1002 -9 3 50 -50 1002 -10 3 50 0 1002 -11 4 50 0 1002 -12 3 50 -50 1002 -13 3 50 -50 1002 -14 1 100 0 1002 -15 1 100 -150 1002 -16 3 50 0 1002 -17 4 50 0 1002 -18 4 50 0 1002 -19 4 50 0 1002 -20 4 50 0 1002 -21 4 50 0 1002 -22 3 50 -25 1002 -23 4 50 0 1002 -24 4 50 0 1002 -25 3 50 -75 1002 -26 3 50 0 1002 -27 4 50 0 1002 -28 4 50 -250 1002 -29 4 50 0 1002 -30 4 50 0 1002 -31 4 50 0 1002 -32 4 50 0 1002 -33 4 50 0 1002 -34 4 50 0 1002 -35 4 50 0 1002 -36 4 50 0 1002 -37 4 50 0 1002 -38 4 50 -250 1002 -39 1 100 0 1002 -40 3 50 0 1002 -41 3 50 0 1002 -42 3 50 -25 1002 -43 3 50 -75 1002 -44 1 100 -300 1002 -45 1 100 0 1002 -46 3 50 0 1002 -47 4 50 0 1002 -48 4 50 0 1002 -49 4 50 0 1002 -50 4 50 0 1002 -51 4 50 0 1002 -52 4 50 0 1002 -53 4 50 0 1002 -54 4 50 0 1002 -55 4 50 -250 1002 -56 4 50 0 1002 -57 4 50 0 1002 -58 4 50 0 1002 -59 4 50 0 1002 -60 4 50 0 1002 -61 4 50 -250 1002 -62 4 50 0 1002 -63 4 50 0 1002 -64 4 50 0 1002 -65 4 50 0 1002 -66 4 50 0 1002 -67 4 50 0 1002 -68 4 50 0 1002 -69 4 50 0 1002 -70 4 50 0 1002 -71 4 50 0 1002 -72 4 50 0 1002 -73 4 50 0 1002 -74 4 50 0 1002 -75 4 50 0 1002 -76 1 100 -200 1002 -77 4 50 -250 1002 -78 4 50 0 1002 -79 4 50 0 1002 -80 4 50 0 1002 -81 4 50 0 1002 -82 4 50 0 1002 -83 4 50 0 1002 -84 4 50 0 1002 -85 4 50 0 1002 -86 4 50 0 1002 -87 4 50 -250 1002 -88 4 50 0 1002 -89 4 50 0 1002 -90 4 50 0 1002 -91 4 50 0 1002 -92 4 50 0 1002 -93 4 50 0 1002 -94 4 50 0 1002 -95 4 50 0 1002 -96 4 50 -250 1002 -97 4 50 0 1002 -98 4 50 0 1002 -99 4 50 0 1002 -100 4 50 0 1002 -1 4 50 0 1003 -2 4 50 0 1003 -3 4 50 0 1003 -4 4 50 0 1003 -5 4 50 0 1003 -6 4 50 0 1003 -7 2 100 0 1003 -8 4 50 0 1003 -9 2 100 0 1003 -10 4 50 0 1003 -11 4 50 0 1003 -12 4 50 -250 1003 -13 4 50 0 1003 -14 2 100 0 1003 -15 1 100 0 1003 -16 3 50 0 1003 -17 2 100 0 1003 -18 1 100 0 1003 -19 2 100 0 1003 -20 2 100 0 1003 -21 2 100 0 1003 -22 2 100 0 1003 -23 2 100 -1250 1003 -24 2 100 0 1003 -25 1 100 -150 1003 -26 4 50 0 1003 -27 2 100 0 1003 -28 2 100 0 1003 -29 4 50 0 1003 -30 2 100 0 1003 -31 4 50 0 1003 -32 1 100 0 1003 -33 1 100 -300 1003 -34 4 50 0 1003 -35 4 50 0 1003 -36 3 50 0 1003 -37 4 50 0 1003 -38 4 50 0 1003 -39 4 50 0 1003 -40 4 50 -250 1003 -41 2 100 -1250 1003 -42 3 50 -50 1003 -43 1 100 0 1003 -44 3 50 0 1003 -45 3 50 -50 1003 -46 4 50 0 1003 -47 4 50 0 1003 -48 4 50 0 1003 -49 3 50 0 1003 -50 4 50 0 1003 -51 2 100 0 1003 -52 4 50 0 1003 -53 1 100 -200 1003 -54 4 50 0 1003 -55 4 50 0 1003 -56 2 100 0 1003 -57 4 50 0 1003 -58 4 50 -250 1003 -59 4 50 0 1003 -60 1 100 0 1003 -61 1 100 -250 1003 -62 2 100 0 1003 -63 4 50 0 1003 -64 3 50 -50 1003 -65 1 100 -350 1003 -66 4 50 0 1003 -67 4 50 0 1003 -68 3 50 0 1003 -69 3 50 -50 1003 -70 4 50 0 1003 -71 3 50 -50 1003 -72 3 50 0 1003 -73 4 50 -250 1003 -74 3 50 -25 1003 -75 1 100 0 1003 -76 1 100 -350 1003 -77 2 100 0 1003 -78 3 50 -75 1003 -79 2 100 0 1003 -80 2 100 0 1003 -81 3 50 0 1003 -82 2 100 -1250 1003 -83 3 50 0 1003 -84 3 50 0 1003 -85 4 50 0 1003 -86 3 50 -25 1003 -87 4 50 0 1003 -88 1 100 0 1003 -89 3 50 -75 1003 -90 3 50 0 1003 -91 3 50 -50 1003 -92 3 50 0 1003 -93 3 50 0 1003 -94 3 50 0 1003 -95 3 50 -50 1003 -96 1 100 -250 1003 -97 3 50 -25 1003 -98 3 50 -50 1003 -99 3 50 0 1003 -100 4 50 0 1003 -1 3 50 0 1004 -2 4 50 0 1004 -3 1 100 0 1004 -4 4 50 0 1004 -5 4 50 0 1004 -6 4 50 0 1004 -7 4 50 0 1004 -8 1 100 0 1004 -9 3 50 0 1004 -10 3 50 -50 1004 -11 1 100 -150 1004 -12 1 100 0 1004 -13 1 100 -300 1004 -14 4 50 0 1004 -15 1 100 0 1004 -16 4 50 0 1004 -17 4 50 0 1004 -18 2 100 0 1004 -19 4 50 0 1004 -20 4 50 -250 1004 -21 1 100 -200 1004 -22 2 100 0 1004 -23 3 50 0 1004 -24 4 50 0 1004 -25 2 100 0 1004 -26 2 100 0 1004 -27 2 100 0 1004 -28 2 100 0 1004 -29 2 100 0 1004 -30 2 100 0 1004 -31 2 100 -1250 1004 -32 3 50 -50 1004 -33 4 50 0 1004 -34 1 100 0 1004 -35 2 100 0 1004 -36 3 50 0 1004 -37 1 100 -250 1004 -38 3 50 -50 1004 -39 3 50 0 1004 -40 3 50 -50 1004 -41 4 50 0 1004 -42 4 50 0 1004 -43 4 50 0 1004 -44 4 50 0 1004 -45 4 50 0 1004 -46 4 50 0 1004 -47 4 50 0 1004 -48 4 50 -250 1004 -49 1 100 -350 1004 -50 4 50 0 1004 -51 4 50 0 1004 -52 4 50 0 1004 -53 3 50 -50 1004 -54 4 50 0 1004 -55 3 50 0 1004 -56 3 50 -25 1004 -57 4 50 0 1004 -58 4 50 0 1004 -59 4 50 0 1004 -60 4 50 0 1004 -61 4 50 -250 1004 -62 3 50 -75 1004 -63 3 50 0 1004 -64 3 50 0 1004 -65 4 50 0 1004 -66 3 50 0 1004 -67 2 100 0 1004 -68 4 50 0 1004 -69 1 100 0 1004 -70 4 50 0 1004 -71 3 50 -25 1004 -72 3 50 -75 1004 -73 4 50 0 1004 -74 4 50 0 1004 -75 3 50 0 1004 -76 4 50 -250 1004 -77 3 50 -50 1004 -78 3 50 0 1004 -79 3 50 0 1004 -80 2 100 0 1004 -81 4 50 0 1004 -82 4 50 0 1004 -83 3 50 0 1004 -84 3 50 -50 1004 -85 2 100 0 1004 -86 2 100 -1250 1004 -87 3 50 -25 1004 -88 2 100 0 1004 -89 3 50 -50 1004 -90 3 50 0 1004 -91 3 50 0 1004 -92 4 50 0 1004 -93 4 50 0 1004 -94 4 50 0 1004 -95 3 50 -75 1004 -96 4 50 0 1004 -97 4 50 0 1004 -98 3 50 -50 1004 -99 4 50 0 1004 -100 1 100 -350 1004 \ No newline at end of file diff --git a/R/inst/extdata/peer_exampleData.txt b/R/inst/extdata/peer_exampleData.txt deleted file mode 100644 index d4e222eb..00000000 --- a/R/inst/extdata/peer_exampleData.txt +++ /dev/null @@ -1,361 +0,0 @@ -trial condition p_gamble risky_Lpayoff risky_Hpayoff safe_Lpayoff safe_Hpayoff risky_color total_presses choice bonus subjID -1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 -2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 -5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 1 -7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 1 -8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 -14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 -28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 -32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 -35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 1 -39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 1 -44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 1 -47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 1 -54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 1 -55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 1 -66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 1 -68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 1 -69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 1 -70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 1 -72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 1 -1 1 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 -2 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -3 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -4 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -5 1 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 -6 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -7 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -8 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -9 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -10 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -11 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -12 0 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 -13 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -14 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -15 0 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 -16 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -17 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -18 2 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -19 0 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 -20 3 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 -21 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -22 2 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 -23 1 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 -24 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -25 0 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 -26 0 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -27 2 0.6 1.2 51.1 23 24.4 orange 1 0 2.68 2 -28 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -29 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -30 3 0.4 1.2 51.1 23 24.4 orange 1 1 2.68 2 -31 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -32 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -33 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -34 1 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 -35 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -36 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -37 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -38 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -39 0 0.8 1.2 51.1 23 24.4 orange 1 0 2.68 2 -40 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -41 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -42 1 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 -43 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -44 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -45 3 0.7 1.2 51.1 23 24.4 orange 1 1 2.68 2 -46 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -47 3 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 -48 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -49 2 0.5 1.2 51.1 23 24.4 orange 1 0 2.68 2 -50 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -51 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -52 2 0.6 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -53 1 0.8 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -54 2 0.6 1.7 56.8 23.1 33.2 orange 1 0 2.68 2 -55 2 0.7 1.2 51.1 23 24.4 orange 1 0 2.68 2 -56 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -57 2 0.9 1.2 51.1 23 24.4 orange 1 1 2.68 2 -58 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -59 1 0.9 1.2 51.1 23 24.4 orange 1 0 2.68 2 -60 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -61 3 0.8 1.2 51.1 23 24.4 orange 1 1 2.68 2 -62 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -63 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -64 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -65 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -66 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -67 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 2.68 2 -68 2 0.4 1.2 51.1 23 24.4 orange 1 0 2.68 2 -69 0 0.6 1.2 51.1 23 24.4 orange 1 1 2.68 2 -70 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 2.68 2 -71 3 0.5 1.2 51.1 23 24.4 orange 1 1 2.68 2 -72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 2.68 2 -1 0 0.8 1.2 51.1 23 24.4 orange 2 0 0.25 3 -2 1 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -3 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -4 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -5 3 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -6 0 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 -7 3 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 -8 0 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -9 2 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -10 0 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -11 2 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 -12 2 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -13 1 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 -14 0 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -15 2 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -16 1 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -17 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -18 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -19 0 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -20 3 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -21 3 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -22 2 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -23 3 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -24 0 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -25 2 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 -26 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -27 1 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -28 2 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -29 0 0.6 1.2 51.1 23 24.4 orange 2 1 0.25 3 -30 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -31 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -32 2 0.9 1.2 51.1 23 24.4 orange 1 0 0.25 3 -33 0 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -34 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -35 1 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -36 3 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 -37 2 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -38 2 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -39 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -40 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -41 3 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -42 0 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -43 0 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -44 3 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -45 3 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -46 2 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -47 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -48 3 0.8 1.2 51.1 23 24.4 orange 1 1 0.25 3 -49 1 0.6 1.2 51.1 23 24.4 orange 1 1 0.25 3 -50 1 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -51 1 0.5 1.2 51.1 23 24.4 orange 1 0 0.25 3 -52 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -53 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -54 1 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -55 1 0.9 1.2 51.1 23 24.4 orange 1 1 0.25 3 -56 3 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 3 -57 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -58 2 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -59 0 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -60 2 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -61 1 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -62 1 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -63 0 0.7 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -64 2 0.4 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -65 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -66 0 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -67 0 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -68 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 3 -69 3 0.4 1.2 51.1 23 24.4 orange 1 0 0.25 3 -70 0 0.7 1.2 51.1 23 24.4 orange 1 1 0.25 3 -71 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 3 -72 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 3 -1 1 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 -2 1 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -3 1 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -4 3 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 -5 2 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -6 3 0.8 0.9 38.6 18 19.6 orange 1 1 0.25 4 -7 3 0.7 0.9 38.6 18 19.6 orange 1 1 0.25 4 -8 0 0.6 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -9 1 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -10 1 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -11 0 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -12 0 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -13 1 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 -14 3 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -15 2 0.5 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -16 2 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -17 2 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -18 1 0.8 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -19 0 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -20 0 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -21 3 0.9 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -22 1 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -23 0 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -24 3 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -25 3 0.4 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -26 1 0.5 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -27 2 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 -28 1 0.6 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -29 1 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -30 2 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -31 2 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 -32 3 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -33 0 0.8 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -34 0 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 -35 0 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -36 2 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -37 3 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -38 0 0.6 0.9 38.6 18 19.6 orange 1 0 0.25 4 -39 2 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -40 2 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -41 0 0.4 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -42 0 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -43 1 0.5 0.9 38.6 18 19.6 orange 1 0 0.25 4 -44 1 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -45 3 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -46 0 0.8 0.9 38.6 18 19.6 orange 1 0 0.25 4 -47 2 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -48 1 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -49 1 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -50 1 0.6 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -51 3 0.5 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -52 3 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -53 3 0.4 0.9 38.6 18 19.6 orange 1 0 0.25 4 -54 0 0.5 0.9 38.6 18 19.6 orange 1 1 0.25 4 -55 1 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -56 2 0.5 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -57 2 0.7 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -58 2 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -59 2 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -60 2 0.7 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -61 3 0.9 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -62 0 0.9 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -63 1 0.9 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -64 3 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -65 0 0.4 1.3 50.8 24.9 25.5 orange 1 1 0.25 4 -66 0 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -67 3 0.9 0.9 38.6 18 19.6 orange 1 0 0.25 4 -68 2 0.8 1.3 50.8 24.9 25.5 orange 1 0 0.25 4 -69 0 0.8 1.7 56.8 23.1 33.2 orange 1 0 0.25 4 -70 3 0.7 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -71 2 0.7 0.9 38.6 18 19.6 orange 1 0 0.25 4 -72 3 0.6 1.7 56.8 23.1 33.2 orange 1 1 0.25 4 -1 1 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -2 1 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -3 1 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -4 1 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -5 2 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -6 1 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -7 3 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -8 0 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -9 2 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -10 3 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -11 2 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -12 0 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -13 1 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -14 3 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -15 2 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -16 2 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -17 2 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -18 0 0.7 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -19 0 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -20 2 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -21 2 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -22 1 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -23 2 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -24 0 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -25 1 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -26 3 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -27 0 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -28 1 0.6 1.6 55.5 26.6 28.3 darkcyan 2 0 0.25 5 -29 3 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -30 0 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -31 0 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -32 1 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -33 1 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -34 0 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -35 2 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -36 3 0.5 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -37 3 0.6 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -38 3 0.8 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -39 3 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -40 3 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -41 1 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -42 2 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -43 1 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -44 3 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -45 3 0.7 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -46 0 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -47 1 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -48 2 0.9 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -49 2 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -50 0 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -51 2 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -52 3 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -53 0 0.6 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -54 1 0.4 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -55 0 0.7 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -56 2 0.6 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -57 3 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -58 3 0.9 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -59 2 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -60 0 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -61 2 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -62 0 0.8 1.3 50.8 24.9 25.5 darkcyan 1 1 0.25 5 -63 0 0.4 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -64 0 0.7 1.6 55.5 26.6 28.3 darkcyan 1 1 0.25 5 -65 3 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -66 3 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -67 1 0.4 1.6 55.5 26.6 28.3 darkcyan 1 0 0.25 5 -68 2 0.5 1.2 51.1 23 24.4 darkcyan 1 0 0.25 5 -69 0 0.5 1.3 50.8 24.9 25.5 darkcyan 1 0 0.25 5 -70 3 0.9 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -71 1 0.7 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 -72 1 0.8 1.2 51.1 23 24.4 darkcyan 1 1 0.25 5 diff --git a/R/inst/extdata/prl_exampleData.txt b/R/inst/extdata/prl_exampleData.txt deleted file mode 100644 index 21abeae7..00000000 --- a/R/inst/extdata/prl_exampleData.txt +++ /dev/null @@ -1,2001 +0,0 @@ -subjID trial choice outcome -1 1 1 1 -1 2 2 1 -1 3 1 1 -1 4 1 1 -1 5 1 1 -1 6 1 1 -1 7 1 1 -1 8 1 1 -1 9 1 -1 -1 10 2 1 -1 11 2 1 -1 12 2 1 -1 13 2 1 -1 14 2 -1 -1 15 1 1 -1 16 2 1 -1 17 1 -1 -1 18 2 1 -1 19 2 -1 -1 20 2 -1 -1 21 1 1 -1 22 1 -1 -1 23 1 -1 -1 24 2 -1 -1 25 2 1 -1 26 2 1 -1 27 2 -1 -1 28 1 -1 -1 29 2 1 -1 30 2 1 -1 31 2 1 -1 32 2 1 -1 33 2 -1 -1 34 2 1 -1 35 2 -1 -1 36 2 1 -1 37 2 1 -1 38 1 -1 -1 39 1 -1 -1 40 2 -1 -1 41 2 -1 -1 42 2 -1 -1 43 1 1 -1 44 1 1 -1 45 1 -1 -1 46 1 1 -1 47 1 1 -1 48 1 1 -1 49 1 -1 -1 50 1 1 -1 51 1 1 -1 52 1 1 -1 53 1 1 -1 54 1 1 -1 55 1 1 -1 56 1 -1 -1 57 1 -1 -1 58 2 1 -1 59 2 1 -1 60 2 -1 -1 61 1 1 -1 62 2 -1 -1 63 1 1 -1 64 1 1 -1 65 1 1 -1 66 1 1 -1 67 1 1 -1 68 1 1 -1 69 1 -1 -1 70 2 1 -1 71 1 -1 -1 72 1 1 -1 73 1 -1 -1 74 2 1 -1 75 2 1 -1 76 2 -1 -1 77 1 1 -1 78 2 1 -1 79 1 1 -1 80 1 -1 -1 81 1 1 -1 82 1 1 -1 83 2 -1 -1 84 1 -1 -1 85 2 -1 -1 86 2 1 -1 87 2 -1 -1 88 1 1 -1 89 1 1 -1 90 1 -1 -1 91 1 -1 -1 92 2 1 -1 93 2 1 -1 94 1 1 -1 95 2 1 -1 96 2 1 -1 97 2 1 -1 98 2 -1 -1 99 1 -1 -1 100 2 1 -2 1 1 1 -2 2 1 -1 -2 3 1 -1 -2 4 2 -1 -2 5 2 -1 -2 6 1 1 -2 7 2 -1 -2 8 2 1 -2 9 1 -1 -2 10 1 1 -2 11 1 -1 -2 12 2 -1 -2 13 2 1 -2 14 2 -1 -2 15 1 1 -2 16 2 1 -2 17 1 1 -2 18 1 1 -2 19 2 1 -2 20 1 1 -2 21 1 1 -2 22 2 1 -2 23 1 1 -2 24 1 -1 -2 25 2 -1 -2 26 1 1 -2 27 1 -1 -2 28 1 1 -2 29 1 -1 -2 30 2 1 -2 31 1 -1 -2 32 1 1 -2 33 2 -1 -2 34 1 -1 -2 35 2 1 -2 36 2 1 -2 37 2 1 -2 38 1 -1 -2 39 2 1 -2 40 1 -1 -2 41 2 -1 -2 42 2 1 -2 43 2 -1 -2 44 2 -1 -2 45 2 1 -2 46 2 -1 -2 47 2 1 -2 48 2 1 -2 49 2 1 -2 50 2 1 -2 51 2 1 -2 52 2 1 -2 53 2 1 -2 54 2 1 -2 55 2 1 -2 56 2 1 -2 57 2 -1 -2 58 2 -1 -2 59 2 1 -2 60 1 -1 -2 61 2 1 -2 62 2 1 -2 63 2 -1 -2 64 2 -1 -2 65 1 1 -2 66 1 1 -2 67 1 1 -2 68 1 -1 -2 69 1 -1 -2 70 1 -1 -2 71 2 -1 -2 72 2 1 -2 73 1 -1 -2 74 1 1 -2 75 2 1 -2 76 2 -1 -2 77 1 1 -2 78 1 -1 -2 79 1 1 -2 80 2 -1 -2 81 1 -1 -2 82 1 -1 -2 83 1 1 -2 84 1 1 -2 85 1 1 -2 86 1 1 -2 87 2 -1 -2 88 1 1 -2 89 1 1 -2 90 1 -1 -2 91 1 -1 -2 92 1 -1 -2 93 1 1 -2 94 2 1 -2 95 1 -1 -2 96 2 -1 -2 97 1 -1 -2 98 1 -1 -2 99 1 1 -2 100 1 1 -3 1 2 -1 -3 2 1 -1 -3 3 1 1 -3 4 1 1 -3 5 2 1 -3 6 2 1 -3 7 1 -1 -3 8 2 1 -3 9 2 -1 -3 10 2 1 -3 11 2 1 -3 12 2 1 -3 13 2 1 -3 14 1 -1 -3 15 2 -1 -3 16 2 -1 -3 17 1 1 -3 18 1 -1 -3 19 2 1 -3 20 2 -1 -3 21 2 1 -3 22 1 -1 -3 23 2 1 -3 24 2 -1 -3 25 1 1 -3 26 1 1 -3 27 2 -1 -3 28 1 1 -3 29 1 -1 -3 30 2 1 -3 31 1 -1 -3 32 2 -1 -3 33 2 -1 -3 34 1 1 -3 35 2 1 -3 36 2 1 -3 37 2 -1 -3 38 2 1 -3 39 2 -1 -3 40 1 -1 -3 41 1 1 -3 42 1 1 -3 43 1 1 -3 44 1 1 -3 45 1 1 -3 46 1 1 -3 47 1 1 -3 48 1 -1 -3 49 1 1 -3 50 2 -1 -3 51 1 -1 -3 52 1 -1 -3 53 1 -1 -3 54 2 1 -3 55 2 -1 -3 56 2 1 -3 57 2 -1 -3 58 1 -1 -3 59 2 1 -3 60 1 -1 -3 61 2 -1 -3 62 1 1 -3 63 1 1 -3 64 2 1 -3 65 2 -1 -3 66 1 1 -3 67 1 1 -3 68 1 -1 -3 69 2 1 -3 70 2 1 -3 71 2 1 -3 72 2 -1 -3 73 2 1 -3 74 1 1 -3 75 2 -1 -3 76 2 -1 -3 77 1 -1 -3 78 1 1 -3 79 1 1 -3 80 2 1 -3 81 2 -1 -3 82 1 1 -3 83 1 1 -3 84 1 1 -3 85 1 -1 -3 86 2 1 -3 87 2 -1 -3 88 1 1 -3 89 1 1 -3 90 2 1 -3 91 2 -1 -3 92 1 1 -3 93 1 -1 -3 94 1 -1 -3 95 1 -1 -3 96 2 1 -3 97 2 -1 -3 98 1 -1 -3 99 2 1 -3 100 2 1 -4 1 2 -1 -4 2 1 1 -4 3 1 -1 -4 4 1 -1 -4 5 1 -1 -4 6 2 -1 -4 7 2 -1 -4 8 2 -1 -4 9 1 -1 -4 10 2 -1 -4 11 1 -1 -4 12 1 -1 -4 13 2 1 -4 14 2 -1 -4 15 1 1 -4 16 1 -1 -4 17 2 -1 -4 18 2 1 -4 19 2 1 -4 20 1 1 -4 21 2 1 -4 22 1 1 -4 23 2 -1 -4 24 1 1 -4 25 1 1 -4 26 1 -1 -4 27 2 -1 -4 28 1 1 -4 29 1 1 -4 30 2 1 -4 31 2 1 -4 32 2 -1 -4 33 2 1 -4 34 2 -1 -4 35 2 1 -4 36 2 1 -4 37 2 1 -4 38 1 1 -4 39 1 -1 -4 40 1 1 -4 41 1 -1 -4 42 2 1 -4 43 2 -1 -4 44 1 -1 -4 45 2 -1 -4 46 1 1 -4 47 1 -1 -4 48 1 -1 -4 49 1 1 -4 50 1 1 -4 51 1 1 -4 52 2 1 -4 53 1 -1 -4 54 1 1 -4 55 2 1 -4 56 2 -1 -4 57 1 1 -4 58 1 1 -4 59 1 -1 -4 60 1 1 -4 61 1 1 -4 62 1 1 -4 63 1 1 -4 64 1 1 -4 65 1 1 -4 66 1 1 -4 67 1 -1 -4 68 1 -1 -4 69 1 1 -4 70 1 1 -4 71 1 -1 -4 72 1 -1 -4 73 2 -1 -4 74 1 -1 -4 75 1 1 -4 76 2 -1 -4 77 1 1 -4 78 1 -1 -4 79 1 1 -4 80 1 1 -4 81 1 -1 -4 82 1 1 -4 83 1 1 -4 84 1 1 -4 85 1 -1 -4 86 2 1 -4 87 2 -1 -4 88 2 -1 -4 89 1 -1 -4 90 2 1 -4 91 2 -1 -4 92 1 1 -4 93 1 -1 -4 94 1 1 -4 95 1 -1 -4 96 2 1 -4 97 2 -1 -4 98 1 1 -4 99 1 -1 -4 100 2 1 -5 1 1 1 -5 2 1 1 -5 3 1 1 -5 4 1 1 -5 5 1 -1 -5 6 1 1 -5 7 1 1 -5 8 1 -1 -5 9 1 1 -5 10 1 -1 -5 11 2 1 -5 12 1 -1 -5 13 2 1 -5 14 2 1 -5 15 2 1 -5 16 2 1 -5 17 2 -1 -5 18 2 -1 -5 19 2 -1 -5 20 1 -1 -5 21 2 1 -5 22 2 -1 -5 23 1 1 -5 24 2 -1 -5 25 1 1 -5 26 1 -1 -5 27 1 1 -5 28 1 -1 -5 29 2 1 -5 30 2 1 -5 31 2 1 -5 32 2 -1 -5 33 1 -1 -5 34 2 1 -5 35 2 1 -5 36 1 1 -5 37 1 -1 -5 38 2 -1 -5 39 2 1 -5 40 2 -1 -5 41 1 -1 -5 42 2 1 -5 43 1 1 -5 44 2 -1 -5 45 2 -1 -5 46 1 1 -5 47 1 1 -5 48 1 -1 -5 49 1 1 -5 50 1 1 -5 51 1 -1 -5 52 1 -1 -5 53 2 -1 -5 54 1 1 -5 55 2 -1 -5 56 1 -1 -5 57 2 1 -5 58 2 -1 -5 59 2 1 -5 60 2 -1 -5 61 1 1 -5 62 2 -1 -5 63 1 1 -5 64 1 1 -5 65 1 1 -5 66 1 -1 -5 67 1 -1 -5 68 1 -1 -5 69 2 1 -5 70 2 1 -5 71 2 -1 -5 72 1 -1 -5 73 2 1 -5 74 2 -1 -5 75 2 1 -5 76 1 1 -5 77 1 -1 -5 78 1 -1 -5 79 2 -1 -5 80 1 1 -5 81 2 -1 -5 82 1 1 -5 83 1 1 -5 84 1 -1 -5 85 1 1 -5 86 1 1 -5 87 2 1 -5 88 1 1 -5 89 2 -1 -5 90 1 -1 -5 91 1 -1 -5 92 1 -1 -5 93 2 1 -5 94 2 1 -5 95 1 1 -5 96 2 -1 -5 97 1 -1 -5 98 1 -1 -5 99 2 -1 -5 100 2 1 -6 1 1 -1 -6 2 1 1 -6 3 1 1 -6 4 1 -1 -6 5 2 -1 -6 6 1 1 -6 7 1 1 -6 8 1 1 -6 9 1 -1 -6 10 1 -1 -6 11 2 -1 -6 12 1 -1 -6 13 2 -1 -6 14 2 1 -6 15 2 -1 -6 16 1 -1 -6 17 1 -1 -6 18 2 -1 -6 19 1 1 -6 20 1 1 -6 21 1 -1 -6 22 2 1 -6 23 2 1 -6 24 2 -1 -6 25 1 -1 -6 26 2 -1 -6 27 1 -1 -6 28 2 -1 -6 29 2 -1 -6 30 1 -1 -6 31 2 1 -6 32 2 -1 -6 33 1 -1 -6 34 2 1 -6 35 2 1 -6 36 2 1 -6 37 2 -1 -6 38 2 -1 -6 39 1 -1 -6 40 1 1 -6 41 2 -1 -6 42 1 1 -6 43 2 -1 -6 44 1 1 -6 45 1 1 -6 46 2 -1 -6 47 1 1 -6 48 2 1 -6 49 2 -1 -6 50 2 -1 -6 51 1 1 -6 52 1 1 -6 53 1 -1 -6 54 1 -1 -6 55 2 1 -6 56 2 -1 -6 57 2 -1 -6 58 1 -1 -6 59 1 1 -6 60 2 -1 -6 61 1 -1 -6 62 1 1 -6 63 2 -1 -6 64 2 -1 -6 65 2 -1 -6 66 1 1 -6 67 1 1 -6 68 1 1 -6 69 1 -1 -6 70 2 1 -6 71 2 1 -6 72 1 1 -6 73 1 -1 -6 74 1 1 -6 75 1 -1 -6 76 2 -1 -6 77 2 -1 -6 78 1 1 -6 79 1 1 -6 80 2 1 -6 81 2 1 -6 82 2 -1 -6 83 2 1 -6 84 2 -1 -6 85 1 1 -6 86 2 -1 -6 87 1 -1 -6 88 2 -1 -6 89 1 1 -6 90 1 1 -6 91 1 1 -6 92 1 -1 -6 93 1 -1 -6 94 1 1 -6 95 1 1 -6 96 1 -1 -6 97 2 1 -6 98 2 -1 -6 99 1 -1 -6 100 1 1 -7 1 2 1 -7 2 2 -1 -7 3 1 -1 -7 4 1 1 -7 5 1 1 -7 6 2 -1 -7 7 1 1 -7 8 1 1 -7 9 1 -1 -7 10 1 -1 -7 11 2 1 -7 12 1 -1 -7 13 2 -1 -7 14 1 1 -7 15 1 -1 -7 16 2 1 -7 17 2 -1 -7 18 1 1 -7 19 1 -1 -7 20 1 -1 -7 21 2 1 -7 22 2 -1 -7 23 1 1 -7 24 1 1 -7 25 1 -1 -7 26 2 -1 -7 27 2 -1 -7 28 1 -1 -7 29 1 -1 -7 30 2 1 -7 31 2 1 -7 32 2 -1 -7 33 2 1 -7 34 1 -1 -7 35 2 1 -7 36 2 -1 -7 37 1 -1 -7 38 2 -1 -7 39 1 1 -7 40 2 -1 -7 41 1 1 -7 42 1 1 -7 43 1 1 -7 44 1 1 -7 45 1 1 -7 46 1 1 -7 47 1 1 -7 48 1 1 -7 49 1 -1 -7 50 1 -1 -7 51 2 1 -7 52 2 1 -7 53 2 1 -7 54 2 1 -7 55 2 1 -7 56 1 -1 -7 57 2 -1 -7 58 2 1 -7 59 2 -1 -7 60 1 -1 -7 61 1 1 -7 62 1 -1 -7 63 2 -1 -7 64 2 -1 -7 65 1 1 -7 66 1 1 -7 67 1 1 -7 68 1 -1 -7 69 2 1 -7 70 2 1 -7 71 2 1 -7 72 2 -1 -7 73 2 1 -7 74 1 -1 -7 75 2 1 -7 76 1 -1 -7 77 2 -1 -7 78 2 1 -7 79 2 1 -7 80 2 -1 -7 81 1 1 -7 82 1 1 -7 83 1 1 -7 84 1 1 -7 85 1 1 -7 86 1 1 -7 87 1 -1 -7 88 2 1 -7 89 2 1 -7 90 2 -1 -7 91 1 1 -7 92 1 -1 -7 93 2 1 -7 94 1 -1 -7 95 1 -1 -7 96 1 -1 -7 97 2 -1 -7 98 2 1 -7 99 2 1 -7 100 2 1 -8 1 2 -1 -8 2 2 -1 -8 3 2 -1 -8 4 1 1 -8 5 1 1 -8 6 1 1 -8 7 1 -1 -8 8 2 -1 -8 9 1 -1 -8 10 1 1 -8 11 1 1 -8 12 1 1 -8 13 2 1 -8 14 2 -1 -8 15 1 -1 -8 16 2 1 -8 17 2 -1 -8 18 2 -1 -8 19 1 1 -8 20 1 -1 -8 21 1 -1 -8 22 2 -1 -8 23 1 -1 -8 24 1 -1 -8 25 2 -1 -8 26 1 1 -8 27 1 1 -8 28 1 1 -8 29 1 -1 -8 30 1 -1 -8 31 2 -1 -8 32 1 -1 -8 33 1 -1 -8 34 2 -1 -8 35 1 1 -8 36 1 -1 -8 37 2 1 -8 38 2 -1 -8 39 2 -1 -8 40 1 1 -8 41 1 1 -8 42 1 1 -8 43 1 -1 -8 44 1 1 -8 45 1 -1 -8 46 2 1 -8 47 1 -1 -8 48 2 -1 -8 49 1 1 -8 50 1 1 -8 51 1 -1 -8 52 1 -1 -8 53 2 1 -8 54 2 1 -8 55 2 1 -8 56 2 1 -8 57 2 1 -8 58 2 1 -8 59 2 1 -8 60 2 1 -8 61 2 -1 -8 62 2 -1 -8 63 1 1 -8 64 1 1 -8 65 1 1 -8 66 1 1 -8 67 1 -1 -8 68 1 -1 -8 69 2 -1 -8 70 2 1 -8 71 2 1 -8 72 1 -1 -8 73 2 1 -8 74 2 -1 -8 75 2 1 -8 76 2 -1 -8 77 2 -1 -8 78 1 -1 -8 79 2 -1 -8 80 1 -1 -8 81 1 -1 -8 82 1 1 -8 83 1 -1 -8 84 1 -1 -8 85 1 -1 -8 86 1 1 -8 87 1 1 -8 88 1 -1 -8 89 1 -1 -8 90 1 1 -8 91 1 1 -8 92 1 -1 -8 93 2 1 -8 94 2 1 -8 95 2 1 -8 96 2 1 -8 97 2 1 -8 98 2 1 -8 99 2 1 -8 100 2 1 -9 1 1 1 -9 2 2 -1 -9 3 1 1 -9 4 1 1 -9 5 1 1 -9 6 1 -1 -9 7 1 -1 -9 8 2 -1 -9 9 1 -1 -9 10 2 1 -9 11 2 -1 -9 12 2 1 -9 13 2 1 -9 14 1 1 -9 15 1 1 -9 16 2 -1 -9 17 2 1 -9 18 2 -1 -9 19 1 -1 -9 20 2 1 -9 21 2 -1 -9 22 2 1 -9 23 2 -1 -9 24 1 1 -9 25 2 -1 -9 26 1 1 -9 27 1 -1 -9 28 2 -1 -9 29 1 1 -9 30 2 1 -9 31 2 1 -9 32 2 1 -9 33 2 1 -9 34 2 1 -9 35 2 1 -9 36 1 1 -9 37 2 -1 -9 38 1 1 -9 39 1 1 -9 40 1 1 -9 41 1 -1 -9 42 2 1 -9 43 2 -1 -9 44 2 1 -9 45 2 1 -9 46 1 -1 -9 47 2 -1 -9 48 1 -1 -9 49 2 1 -9 50 1 -1 -9 51 2 1 -9 52 2 1 -9 53 2 1 -9 54 2 1 -9 55 2 1 -9 56 1 1 -9 57 1 1 -9 58 2 -1 -9 59 1 1 -9 60 1 1 -9 61 2 1 -9 62 1 1 -9 63 2 -1 -9 64 1 1 -9 65 1 -1 -9 66 2 -1 -9 67 1 1 -9 68 2 1 -9 69 2 -1 -9 70 1 -1 -9 71 2 -1 -9 72 2 1 -9 73 2 -1 -9 74 2 1 -9 75 1 1 -9 76 1 1 -9 77 2 1 -9 78 1 -1 -9 79 1 1 -9 80 1 -1 -9 81 1 -1 -9 82 2 -1 -9 83 1 1 -9 84 1 1 -9 85 1 1 -9 86 2 -1 -9 87 1 -1 -9 88 1 1 -9 89 1 -1 -9 90 1 -1 -9 91 2 1 -9 92 2 1 -9 93 2 1 -9 94 2 1 -9 95 2 1 -9 96 1 -1 -9 97 2 1 -9 98 2 -1 -9 99 2 -1 -9 100 1 -1 -10 1 2 -1 -10 2 1 1 -10 3 1 -1 -10 4 1 -1 -10 5 2 -1 -10 6 2 1 -10 7 2 -1 -10 8 1 -1 -10 9 2 1 -10 10 1 1 -10 11 1 1 -10 12 1 -1 -10 13 1 -1 -10 14 2 1 -10 15 2 1 -10 16 2 1 -10 17 2 -1 -10 18 2 -1 -10 19 1 1 -10 20 1 1 -10 21 1 1 -10 22 1 1 -10 23 1 -1 -10 24 2 1 -10 25 2 -1 -10 26 2 1 -10 27 2 -1 -10 28 1 1 -10 29 1 -1 -10 30 1 -1 -10 31 2 -1 -10 32 1 -1 -10 33 2 1 -10 34 2 1 -10 35 1 1 -10 36 1 1 -10 37 2 -1 -10 38 1 1 -10 39 1 1 -10 40 1 1 -10 41 1 1 -10 42 1 1 -10 43 2 -1 -10 44 2 1 -10 45 2 -1 -10 46 1 -1 -10 47 1 -1 -10 48 2 1 -10 49 2 1 -10 50 2 1 -10 51 2 -1 -10 52 1 1 -10 53 1 -1 -10 54 1 -1 -10 55 2 1 -10 56 2 1 -10 57 2 1 -10 58 2 1 -10 59 2 -1 -10 60 1 1 -10 61 1 -1 -10 62 1 1 -10 63 1 1 -10 64 1 1 -10 65 1 1 -10 66 1 -1 -10 67 1 -1 -10 68 2 1 -10 69 2 1 -10 70 2 -1 -10 71 2 1 -10 72 2 1 -10 73 2 1 -10 74 2 1 -10 75 2 1 -10 76 2 -1 -10 77 2 1 -10 78 2 -1 -10 79 2 -1 -10 80 2 1 -10 81 1 -1 -10 82 2 -1 -10 83 1 1 -10 84 1 1 -10 85 1 -1 -10 86 2 -1 -10 87 1 1 -10 88 2 1 -10 89 1 1 -10 90 1 1 -10 91 1 -1 -10 92 1 1 -10 93 1 -1 -10 94 2 1 -10 95 2 1 -10 96 2 1 -10 97 2 -1 -10 98 2 -1 -10 99 1 1 -10 100 1 -1 -11 1 1 -1 -11 2 1 1 -11 3 1 1 -11 4 1 1 -11 5 2 -1 -11 6 1 1 -11 7 1 -1 -11 8 2 1 -11 9 1 -1 -11 10 2 -1 -11 11 1 1 -11 12 1 1 -11 13 1 -1 -11 14 1 1 -11 15 2 -1 -11 16 2 -1 -11 17 1 1 -11 18 1 1 -11 19 1 1 -11 20 1 1 -11 21 2 -1 -11 22 1 1 -11 23 2 1 -11 24 1 1 -11 25 1 1 -11 26 2 1 -11 27 1 1 -11 28 1 1 -11 29 2 -1 -11 30 2 -1 -11 31 1 -1 -11 32 1 -1 -11 33 1 -1 -11 34 2 1 -11 35 2 1 -11 36 2 1 -11 37 2 -1 -11 38 1 1 -11 39 1 1 -11 40 1 1 -11 41 1 1 -11 42 2 -1 -11 43 1 1 -11 44 1 -1 -11 45 2 -1 -11 46 1 -1 -11 47 1 1 -11 48 2 1 -11 49 2 1 -11 50 2 -1 -11 51 2 -1 -11 52 2 -1 -11 53 1 -1 -11 54 1 1 -11 55 1 1 -11 56 1 -1 -11 57 1 -1 -11 58 2 -1 -11 59 1 -1 -11 60 2 -1 -11 61 2 -1 -11 62 2 1 -11 63 2 -1 -11 64 2 1 -11 65 2 1 -11 66 2 1 -11 67 2 1 -11 68 2 1 -11 69 2 -1 -11 70 2 1 -11 71 2 -1 -11 72 2 1 -11 73 2 1 -11 74 1 -1 -11 75 2 1 -11 76 2 1 -11 77 2 -1 -11 78 2 -1 -11 79 1 1 -11 80 1 -1 -11 81 1 1 -11 82 2 -1 -11 83 1 1 -11 84 1 1 -11 85 1 1 -11 86 1 1 -11 87 1 -1 -11 88 1 -1 -11 89 2 -1 -11 90 2 -1 -11 91 1 -1 -11 92 2 1 -11 93 2 -1 -11 94 2 1 -11 95 1 -1 -11 96 2 -1 -11 97 2 1 -11 98 2 1 -11 99 1 -1 -11 100 2 -1 -12 1 1 1 -12 2 2 1 -12 3 1 1 -12 4 2 -1 -12 5 1 1 -12 6 2 -1 -12 7 1 -1 -12 8 2 -1 -12 9 1 1 -12 10 1 -1 -12 11 1 -1 -12 12 2 1 -12 13 2 1 -12 14 1 -1 -12 15 2 1 -12 16 2 1 -12 17 1 -1 -12 18 1 1 -12 19 1 -1 -12 20 2 -1 -12 21 2 -1 -12 22 2 -1 -12 23 1 1 -12 24 2 -1 -12 25 1 1 -12 26 1 1 -12 27 1 1 -12 28 1 1 -12 29 1 -1 -12 30 2 -1 -12 31 1 -1 -12 32 1 1 -12 33 1 1 -12 34 1 1 -12 35 2 1 -12 36 1 1 -12 37 2 -1 -12 38 1 1 -12 39 1 1 -12 40 1 -1 -12 41 2 -1 -12 42 1 -1 -12 43 2 1 -12 44 1 1 -12 45 1 1 -12 46 1 -1 -12 47 2 -1 -12 48 1 -1 -12 49 1 -1 -12 50 1 1 -12 51 1 -1 -12 52 1 1 -12 53 1 1 -12 54 1 -1 -12 55 1 -1 -12 56 2 -1 -12 57 2 1 -12 58 1 -1 -12 59 2 1 -12 60 2 -1 -12 61 2 -1 -12 62 2 -1 -12 63 2 -1 -12 64 1 1 -12 65 2 1 -12 66 2 -1 -12 67 1 -1 -12 68 2 1 -12 69 1 -1 -12 70 2 1 -12 71 1 1 -12 72 2 -1 -12 73 1 1 -12 74 1 -1 -12 75 2 -1 -12 76 1 -1 -12 77 2 1 -12 78 2 1 -12 79 2 -1 -12 80 2 -1 -12 81 1 -1 -12 82 2 -1 -12 83 1 1 -12 84 1 1 -12 85 1 -1 -12 86 1 1 -12 87 2 -1 -12 88 1 1 -12 89 1 1 -12 90 1 1 -12 91 1 -1 -12 92 1 -1 -12 93 1 1 -12 94 1 1 -12 95 1 1 -12 96 1 -1 -12 97 2 1 -12 98 1 -1 -12 99 2 1 -12 100 1 -1 -13 1 1 1 -13 2 2 -1 -13 3 1 1 -13 4 2 1 -13 5 2 -1 -13 6 1 1 -13 7 1 1 -13 8 1 -1 -13 9 1 -1 -13 10 2 -1 -13 11 1 -1 -13 12 1 -1 -13 13 2 1 -13 14 2 1 -13 15 2 1 -13 16 2 1 -13 17 2 1 -13 18 2 1 -13 19 2 -1 -13 20 2 -1 -13 21 2 1 -13 22 2 1 -13 23 2 1 -13 24 2 -1 -13 25 1 -1 -13 26 2 -1 -13 27 2 1 -13 28 2 1 -13 29 2 1 -13 30 2 1 -13 31 2 -1 -13 32 1 1 -13 33 2 1 -13 34 2 1 -13 35 2 -1 -13 36 1 -1 -13 37 2 1 -13 38 1 1 -13 39 2 -1 -13 40 2 -1 -13 41 2 -1 -13 42 1 1 -13 43 1 1 -13 44 1 1 -13 45 1 1 -13 46 1 1 -13 47 2 -1 -13 48 1 1 -13 49 1 1 -13 50 1 -1 -13 51 1 1 -13 52 1 -1 -13 53 2 -1 -13 54 1 1 -13 55 1 -1 -13 56 1 -1 -13 57 2 -1 -13 58 2 1 -13 59 2 1 -13 60 2 -1 -13 61 1 -1 -13 62 2 1 -13 63 2 -1 -13 64 1 -1 -13 65 2 -1 -13 66 2 1 -13 67 1 1 -13 68 1 1 -13 69 1 -1 -13 70 1 -1 -13 71 2 1 -13 72 2 1 -13 73 2 1 -13 74 1 -1 -13 75 2 1 -13 76 2 -1 -13 77 2 1 -13 78 2 1 -13 79 2 -1 -13 80 2 -1 -13 81 1 1 -13 82 1 1 -13 83 1 -1 -13 84 2 -1 -13 85 1 -1 -13 86 2 1 -13 87 2 -1 -13 88 1 -1 -13 89 1 1 -13 90 1 1 -13 91 1 -1 -13 92 2 1 -13 93 1 1 -13 94 2 1 -13 95 2 1 -13 96 1 -1 -13 97 1 -1 -13 98 2 1 -13 99 2 1 -13 100 2 -1 -14 1 2 -1 -14 2 1 -1 -14 3 1 1 -14 4 2 -1 -14 5 1 1 -14 6 1 -1 -14 7 1 1 -14 8 1 1 -14 9 1 -1 -14 10 2 -1 -14 11 2 -1 -14 12 1 1 -14 13 1 -1 -14 14 2 1 -14 15 2 -1 -14 16 2 1 -14 17 2 -1 -14 18 1 1 -14 19 1 -1 -14 20 1 -1 -14 21 2 -1 -14 22 2 -1 -14 23 2 1 -14 24 1 -1 -14 25 2 1 -14 26 1 1 -14 27 2 -1 -14 28 1 1 -14 29 1 -1 -14 30 1 -1 -14 31 2 -1 -14 32 1 1 -14 33 1 -1 -14 34 2 -1 -14 35 1 -1 -14 36 1 1 -14 37 2 -1 -14 38 2 -1 -14 39 1 -1 -14 40 1 1 -14 41 1 -1 -14 42 1 1 -14 43 1 1 -14 44 1 1 -14 45 1 -1 -14 46 1 1 -14 47 2 -1 -14 48 2 -1 -14 49 2 1 -14 50 2 -1 -14 51 2 1 -14 52 1 -1 -14 53 2 1 -14 54 2 1 -14 55 2 1 -14 56 2 -1 -14 57 2 1 -14 58 1 -1 -14 59 2 -1 -14 60 1 1 -14 61 1 1 -14 62 1 -1 -14 63 2 -1 -14 64 1 1 -14 65 2 -1 -14 66 1 -1 -14 67 1 -1 -14 68 1 -1 -14 69 2 1 -14 70 2 1 -14 71 2 -1 -14 72 2 1 -14 73 2 -1 -14 74 2 -1 -14 75 1 1 -14 76 2 1 -14 77 2 1 -14 78 2 1 -14 79 2 -1 -14 80 1 1 -14 81 2 -1 -14 82 1 1 -14 83 1 1 -14 84 1 -1 -14 85 2 1 -14 86 1 1 -14 87 2 -1 -14 88 1 1 -14 89 1 -1 -14 90 1 -1 -14 91 2 -1 -14 92 1 1 -14 93 1 1 -14 94 1 -1 -14 95 1 1 -14 96 1 -1 -14 97 1 1 -14 98 1 1 -14 99 1 -1 -14 100 2 1 -15 1 1 1 -15 2 1 -1 -15 3 1 1 -15 4 2 -1 -15 5 1 1 -15 6 1 -1 -15 7 2 -1 -15 8 1 1 -15 9 1 -1 -15 10 2 -1 -15 11 1 -1 -15 12 2 1 -15 13 2 1 -15 14 1 1 -15 15 2 -1 -15 16 1 -1 -15 17 1 1 -15 18 1 1 -15 19 1 1 -15 20 2 1 -15 21 2 -1 -15 22 2 -1 -15 23 1 -1 -15 24 1 -1 -15 25 1 -1 -15 26 2 -1 -15 27 2 -1 -15 28 1 -1 -15 29 2 1 -15 30 2 1 -15 31 2 1 -15 32 2 -1 -15 33 1 -1 -15 34 1 -1 -15 35 2 -1 -15 36 2 1 -15 37 2 1 -15 38 2 -1 -15 39 2 1 -15 40 1 1 -15 41 1 -1 -15 42 2 -1 -15 43 1 1 -15 44 2 -1 -15 45 1 1 -15 46 1 1 -15 47 1 1 -15 48 1 -1 -15 49 1 -1 -15 50 2 1 -15 51 2 1 -15 52 2 -1 -15 53 1 -1 -15 54 2 1 -15 55 2 1 -15 56 1 1 -15 57 2 -1 -15 58 1 -1 -15 59 2 1 -15 60 2 1 -15 61 1 1 -15 62 2 1 -15 63 2 -1 -15 64 2 -1 -15 65 1 -1 -15 66 1 1 -15 67 2 -1 -15 68 1 -1 -15 69 1 1 -15 70 1 -1 -15 71 1 -1 -15 72 2 1 -15 73 2 1 -15 74 1 1 -15 75 2 1 -15 76 2 -1 -15 77 2 1 -15 78 2 -1 -15 79 1 1 -15 80 1 1 -15 81 1 1 -15 82 1 1 -15 83 1 1 -15 84 1 1 -15 85 1 -1 -15 86 1 1 -15 87 1 1 -15 88 1 -1 -15 89 2 -1 -15 90 1 -1 -15 91 1 -1 -15 92 2 1 -15 93 2 1 -15 94 2 1 -15 95 1 -1 -15 96 2 -1 -15 97 2 1 -15 98 1 -1 -15 99 2 1 -15 100 2 1 -16 1 2 1 -16 2 2 -1 -16 3 1 1 -16 4 2 1 -16 5 1 1 -16 6 1 1 -16 7 2 -1 -16 8 1 1 -16 9 1 1 -16 10 1 -1 -16 11 1 1 -16 12 1 -1 -16 13 2 -1 -16 14 1 -1 -16 15 2 1 -16 16 2 1 -16 17 2 -1 -16 18 2 -1 -16 19 1 1 -16 20 1 1 -16 21 1 -1 -16 22 1 1 -16 23 2 1 -16 24 2 -1 -16 25 1 -1 -16 26 1 1 -16 27 1 1 -16 28 1 -1 -16 29 2 -1 -16 30 2 -1 -16 31 1 1 -16 32 1 -1 -16 33 2 1 -16 34 2 1 -16 35 2 1 -16 36 2 -1 -16 37 2 -1 -16 38 1 -1 -16 39 2 -1 -16 40 1 1 -16 41 1 -1 -16 42 2 -1 -16 43 1 1 -16 44 1 1 -16 45 1 -1 -16 46 2 -1 -16 47 1 1 -16 48 1 -1 -16 49 1 1 -16 50 1 -1 -16 51 1 -1 -16 52 2 -1 -16 53 2 1 -16 54 2 1 -16 55 2 -1 -16 56 2 -1 -16 57 1 -1 -16 58 2 1 -16 59 2 1 -16 60 2 -1 -16 61 2 -1 -16 62 1 1 -16 63 1 -1 -16 64 1 1 -16 65 1 -1 -16 66 2 -1 -16 67 1 -1 -16 68 2 -1 -16 69 2 1 -16 70 2 1 -16 71 1 1 -16 72 2 1 -16 73 2 1 -16 74 2 1 -16 75 2 -1 -16 76 2 -1 -16 77 1 -1 -16 78 1 -1 -16 79 2 -1 -16 80 2 1 -16 81 2 -1 -16 82 1 -1 -16 83 2 1 -16 84 2 -1 -16 85 1 1 -16 86 1 1 -16 87 1 -1 -16 88 1 1 -16 89 1 1 -16 90 1 1 -16 91 1 1 -16 92 1 -1 -16 93 1 1 -16 94 2 1 -16 95 2 -1 -16 96 1 1 -16 97 1 -1 -16 98 1 -1 -16 99 2 1 -16 100 2 1 -17 1 2 -1 -17 2 1 1 -17 3 1 1 -17 4 1 -1 -17 5 1 1 -17 6 1 1 -17 7 1 -1 -17 8 1 -1 -17 9 1 -1 -17 10 2 1 -17 11 2 -1 -17 12 2 1 -17 13 2 1 -17 14 2 1 -17 15 1 -1 -17 16 2 1 -17 17 1 1 -17 18 2 1 -17 19 1 1 -17 20 2 -1 -17 21 2 -1 -17 22 1 1 -17 23 1 1 -17 24 1 1 -17 25 1 1 -17 26 1 1 -17 27 1 1 -17 28 1 1 -17 29 1 -1 -17 30 2 -1 -17 31 1 -1 -17 32 1 -1 -17 33 1 -1 -17 34 2 1 -17 35 2 1 -17 36 2 1 -17 37 2 -1 -17 38 1 1 -17 39 1 1 -17 40 1 1 -17 41 1 -1 -17 42 1 1 -17 43 1 1 -17 44 1 1 -17 45 1 1 -17 46 1 -1 -17 47 1 -1 -17 48 1 1 -17 49 2 1 -17 50 1 1 -17 51 2 -1 -17 52 2 1 -17 53 2 1 -17 54 2 -1 -17 55 2 1 -17 56 2 1 -17 57 1 1 -17 58 2 1 -17 59 2 1 -17 60 2 -1 -17 61 2 -1 -17 62 2 1 -17 63 1 1 -17 64 1 -1 -17 65 1 -1 -17 66 2 -1 -17 67 1 1 -17 68 2 1 -17 69 2 -1 -17 70 1 -1 -17 71 1 1 -17 72 1 -1 -17 73 2 -1 -17 74 1 -1 -17 75 2 -1 -17 76 1 -1 -17 77 2 -1 -17 78 2 -1 -17 79 1 1 -17 80 1 1 -17 81 1 1 -17 82 1 1 -17 83 1 1 -17 84 1 -1 -17 85 1 1 -17 86 1 1 -17 87 1 1 -17 88 1 1 -17 89 1 1 -17 90 1 1 -17 91 1 1 -17 92 1 -1 -17 93 1 1 -17 94 1 -1 -17 95 1 -1 -17 96 2 1 -17 97 2 -1 -17 98 2 1 -17 99 2 -1 -17 100 1 -1 -18 1 1 -1 -18 2 1 1 -18 3 1 1 -18 4 1 1 -18 5 1 1 -18 6 1 -1 -18 7 1 -1 -18 8 2 1 -18 9 2 1 -18 10 2 -1 -18 11 1 1 -18 12 1 1 -18 13 2 1 -18 14 2 1 -18 15 1 1 -18 16 1 1 -18 17 1 1 -18 18 1 1 -18 19 1 1 -18 20 1 1 -18 21 1 1 -18 22 2 -1 -18 23 2 -1 -18 24 1 1 -18 25 1 1 -18 26 1 1 -18 27 1 1 -18 28 1 1 -18 29 1 1 -18 30 1 -1 -18 31 1 1 -18 32 1 -1 -18 33 2 1 -18 34 2 1 -18 35 2 1 -18 36 2 -1 -18 37 2 1 -18 38 2 -1 -18 39 2 1 -18 40 2 -1 -18 41 2 -1 -18 42 2 -1 -18 43 1 -1 -18 44 2 1 -18 45 2 -1 -18 46 1 1 -18 47 1 1 -18 48 1 -1 -18 49 2 1 -18 50 2 -1 -18 51 1 1 -18 52 1 -1 -18 53 1 -1 -18 54 2 1 -18 55 2 1 -18 56 2 1 -18 57 2 -1 -18 58 2 1 -18 59 2 1 -18 60 2 -1 -18 61 2 -1 -18 62 2 -1 -18 63 1 1 -18 64 1 -1 -18 65 1 1 -18 66 1 1 -18 67 2 1 -18 68 1 -1 -18 69 1 1 -18 70 2 -1 -18 71 1 1 -18 72 2 -1 -18 73 1 -1 -18 74 1 -1 -18 75 2 1 -18 76 2 1 -18 77 1 -1 -18 78 2 1 -18 79 2 -1 -18 80 2 -1 -18 81 1 1 -18 82 1 -1 -18 83 2 -1 -18 84 2 1 -18 85 1 -1 -18 86 2 1 -18 87 2 -1 -18 88 1 -1 -18 89 1 -1 -18 90 2 -1 -18 91 1 -1 -18 92 1 -1 -18 93 1 -1 -18 94 1 1 -18 95 2 1 -18 96 2 1 -18 97 2 1 -18 98 1 1 -18 99 2 -1 -18 100 1 1 -19 1 2 -1 -19 2 2 -1 -19 3 2 -1 -19 4 1 -1 -19 5 2 -1 -19 6 1 1 -19 7 1 -1 -19 8 1 -1 -19 9 1 1 -19 10 1 -1 -19 11 2 -1 -19 12 2 1 -19 13 2 1 -19 14 2 1 -19 15 2 1 -19 16 2 -1 -19 17 2 -1 -19 18 1 1 -19 19 1 -1 -19 20 1 1 -19 21 1 -1 -19 22 1 1 -19 23 1 1 -19 24 1 1 -19 25 2 1 -19 26 2 -1 -19 27 1 -1 -19 28 1 1 -19 29 1 -1 -19 30 1 -1 -19 31 1 1 -19 32 2 1 -19 33 1 -1 -19 34 1 -1 -19 35 2 1 -19 36 2 1 -19 37 2 1 -19 38 1 1 -19 39 2 -1 -19 40 2 -1 -19 41 2 -1 -19 42 2 -1 -19 43 1 1 -19 44 1 1 -19 45 1 1 -19 46 1 1 -19 47 1 1 -19 48 1 1 -19 49 1 -1 -19 50 1 -1 -19 51 1 -1 -19 52 2 -1 -19 53 1 1 -19 54 1 -1 -19 55 1 -1 -19 56 2 -1 -19 57 2 1 -19 58 2 1 -19 59 2 1 -19 60 2 -1 -19 61 2 -1 -19 62 1 1 -19 63 1 1 -19 64 2 -1 -19 65 2 -1 -19 66 2 1 -19 67 1 1 -19 68 1 -1 -19 69 2 1 -19 70 2 1 -19 71 2 1 -19 72 1 -1 -19 73 2 1 -19 74 2 -1 -19 75 1 1 -19 76 1 -1 -19 77 2 1 -19 78 2 1 -19 79 2 -1 -19 80 1 1 -19 81 2 -1 -19 82 1 1 -19 83 1 1 -19 84 2 -1 -19 85 1 -1 -19 86 1 1 -19 87 2 1 -19 88 1 1 -19 89 2 -1 -19 90 1 1 -19 91 1 -1 -19 92 1 -1 -19 93 1 -1 -19 94 2 1 -19 95 2 1 -19 96 2 -1 -19 97 1 -1 -19 98 2 1 -19 99 2 1 -19 100 2 1 -20 1 1 1 -20 2 1 -1 -20 3 2 -1 -20 4 1 1 -20 5 1 -1 -20 6 1 1 -20 7 1 -1 -20 8 2 1 -20 9 2 1 -20 10 1 -1 -20 11 2 1 -20 12 2 1 -20 13 1 1 -20 14 2 -1 -20 15 2 1 -20 16 1 1 -20 17 1 1 -20 18 1 1 -20 19 1 -1 -20 20 2 -1 -20 21 1 1 -20 22 1 -1 -20 23 2 -1 -20 24 1 1 -20 25 1 1 -20 26 1 1 -20 27 1 1 -20 28 1 1 -20 29 1 1 -20 30 1 -1 -20 31 2 -1 -20 32 1 -1 -20 33 2 1 -20 34 2 1 -20 35 2 -1 -20 36 2 1 -20 37 2 -1 -20 38 1 -1 -20 39 2 -1 -20 40 1 1 -20 41 1 -1 -20 42 1 1 -20 43 1 1 -20 44 2 1 -20 45 2 -1 -20 46 1 -1 -20 47 2 -1 -20 48 1 -1 -20 49 1 1 -20 50 2 1 -20 51 1 -1 -20 52 2 -1 -20 53 1 -1 -20 54 2 1 -20 55 2 -1 -20 56 2 1 -20 57 2 1 -20 58 2 -1 -20 59 2 1 -20 60 2 -1 -20 61 2 1 -20 62 2 -1 -20 63 2 -1 -20 64 1 1 -20 65 1 1 -20 66 1 1 -20 67 1 1 -20 68 1 -1 -20 69 1 -1 -20 70 2 1 -20 71 2 1 -20 72 2 -1 -20 73 1 1 -20 74 1 -1 -20 75 1 -1 -20 76 1 -1 -20 77 2 1 -20 78 2 1 -20 79 2 1 -20 80 2 1 -20 81 2 1 -20 82 2 1 -20 83 2 1 -20 84 2 -1 -20 85 2 -1 -20 86 1 1 -20 87 1 -1 -20 88 1 1 -20 89 1 -1 -20 90 2 1 -20 91 2 -1 -20 92 1 1 -20 93 2 -1 -20 94 1 -1 -20 95 1 -1 -20 96 2 1 -20 97 2 1 -20 98 2 1 -20 99 2 1 -20 100 2 1 diff --git a/R/inst/extdata/prl_multipleB_exampleData.txt b/R/inst/extdata/prl_multipleB_exampleData.txt deleted file mode 100644 index 16725497..00000000 --- a/R/inst/extdata/prl_multipleB_exampleData.txt +++ /dev/null @@ -1,1801 +0,0 @@ -ExperimentName subjID block trial choice outcome choiceSwitch choice.ACC choice.RT Subject_Block -PRL_Young_Final 5038 1 1 2 25 1 1 1430 5038_1 -PRL_Young_Final 5038 1 2 2 25 0 1 439 5038_1 -PRL_Young_Final 5038 1 3 2 -25 0 1 374 5038_1 -PRL_Young_Final 5038 1 4 2 25 0 1 267 5038_1 -PRL_Young_Final 5038 1 5 2 25 0 1 331 5038_1 -PRL_Young_Final 5038 1 6 2 -25 0 1 316 5038_1 -PRL_Young_Final 5038 1 7 2 25 0 1 325 5038_1 -PRL_Young_Final 5038 1 8 2 25 0 0 264 5038_1 -PRL_Young_Final 5038 1 9 2 -25 0 0 343 5038_1 -PRL_Young_Final 5038 1 10 2 -25 0 0 292 5038_1 -PRL_Young_Final 5038 1 11 2 -25 0 0 288 5038_1 -PRL_Young_Final 5038 1 12 1 25 1 1 308 5038_1 -PRL_Young_Final 5038 1 13 1 25 0 1 383 5038_1 -PRL_Young_Final 5038 1 14 1 25 0 1 322 5038_1 -PRL_Young_Final 5038 1 15 1 25 0 1 297 5038_1 -PRL_Young_Final 5038 1 16 1 -25 0 1 350 5038_1 -PRL_Young_Final 5038 1 17 1 -25 0 0 484 5038_1 -PRL_Young_Final 5038 1 18 2 -25 1 1 442 5038_1 -PRL_Young_Final 5038 1 19 1 -25 1 0 298 5038_1 -PRL_Young_Final 5038 1 20 1 -25 0 0 312 5038_1 -PRL_Young_Final 5038 1 21 2 25 1 1 358 5038_1 -PRL_Young_Final 5038 1 22 2 25 0 1 397 5038_1 -PRL_Young_Final 5038 1 23 2 25 0 1 563 5038_1 -PRL_Young_Final 5038 1 24 2 25 0 1 351 5038_1 -PRL_Young_Final 5038 1 25 2 25 0 1 278 5038_1 -PRL_Young_Final 5038 1 26 2 25 0 1 222 5038_1 -PRL_Young_Final 5038 1 27 2 -25 0 1 391 5038_1 -PRL_Young_Final 5038 1 28 2 25 0 1 231 5038_1 -PRL_Young_Final 5038 1 29 2 25 0 0 281 5038_1 -PRL_Young_Final 5038 1 30 2 25 0 0 363 5038_1 -PRL_Young_Final 5038 1 31 2 -25 0 0 282 5038_1 -PRL_Young_Final 5038 1 32 2 -25 0 0 308 5038_1 -PRL_Young_Final 5038 1 33 2 -25 0 0 270 5038_1 -PRL_Young_Final 5038 1 34 1 25 1 1 291 5038_1 -PRL_Young_Final 5038 1 35 1 25 0 1 350 5038_1 -PRL_Young_Final 5038 1 36 1 25 0 1 271 5038_1 -PRL_Young_Final 5038 1 37 1 25 0 1 310 5038_1 -PRL_Young_Final 5038 1 38 1 25 0 1 341 5038_1 -PRL_Young_Final 5038 1 39 1 25 0 1 291 5038_1 -PRL_Young_Final 5038 1 40 1 -25 0 1 338 5038_1 -PRL_Young_Final 5038 1 41 1 25 0 1 296 5038_1 -PRL_Young_Final 5038 1 42 1 -25 0 0 419 5038_1 -PRL_Young_Final 5038 1 43 1 -25 0 0 356 5038_1 -PRL_Young_Final 5038 1 44 2 25 1 1 239 5038_1 -PRL_Young_Final 5038 1 45 2 -25 0 1 241 5038_1 -PRL_Young_Final 5038 1 46 2 25 0 1 386 5038_1 -PRL_Young_Final 5038 1 47 2 25 0 1 282 5038_1 -PRL_Young_Final 5038 1 48 2 25 0 1 276 5038_1 -PRL_Young_Final 5038 1 49 2 25 0 1 353 5038_1 -PRL_Young_Final 5038 1 50 2 25 0 1 264 5038_1 -PRL_Young_Final 5038 1 51 2 -25 0 0 412 5038_1 -PRL_Young_Final 5038 1 52 2 25 0 0 381 5038_1 -PRL_Young_Final 5038 1 53 2 -25 0 0 272 5038_1 -PRL_Young_Final 5038 1 54 2 -25 0 0 315 5038_1 -PRL_Young_Final 5038 1 55 2 -25 0 0 343 5038_1 -PRL_Young_Final 5038 1 56 1 -25 1 1 339 5038_1 -PRL_Young_Final 5038 1 57 1 -25 0 1 276 5038_1 -PRL_Young_Final 5038 1 58 2 -25 1 0 302 5038_1 -PRL_Young_Final 5038 1 59 2 -25 0 0 294 5038_1 -PRL_Young_Final 5038 1 60 1 25 1 1 382 5038_1 -PRL_Young_Final 5038 1 61 1 25 0 1 408 5038_1 -PRL_Young_Final 5038 1 62 1 25 0 1 475 5038_1 -PRL_Young_Final 5038 1 63 1 25 0 1 279 5038_1 -PRL_Young_Final 5038 1 64 1 25 0 1 678 5038_1 -PRL_Young_Final 5038 1 65 1 -25 0 0 319 5038_1 -PRL_Young_Final 5038 1 66 1 -25 0 0 1048 5038_1 -PRL_Young_Final 5038 1 67 2 25 1 1 385 5038_1 -PRL_Young_Final 5038 1 68 2 -25 0 1 465 5038_1 -PRL_Young_Final 5038 1 69 2 25 0 1 347 5038_1 -PRL_Young_Final 5038 1 70 2 25 0 1 462 5038_1 -PRL_Young_Final 5038 1 71 2 25 0 1 402 5038_1 -PRL_Young_Final 5038 1 72 2 25 0 1 426 5038_1 -PRL_Young_Final 5038 1 73 2 25 0 0 330 5038_1 -PRL_Young_Final 5038 1 74 2 -25 0 0 337 5038_1 -PRL_Young_Final 5038 1 75 2 25 0 0 236 5038_1 -PRL_Young_Final 5038 1 76 2 -25 0 0 385 5038_1 -PRL_Young_Final 5038 1 77 2 -25 0 0 391 5038_1 -PRL_Young_Final 5038 1 78 1 25 1 1 647 5038_1 -PRL_Young_Final 5038 1 79 1 25 0 1 410 5038_1 -PRL_Young_Final 5038 1 80 1 25 0 1 351 5038_1 -PRL_Young_Final 5038 1 81 1 -25 0 1 286 5038_1 -PRL_Young_Final 5038 1 82 1 25 0 1 359 5038_1 -PRL_Young_Final 5038 1 83 1 25 0 1 295 5038_1 -PRL_Young_Final 5038 1 84 1 -25 0 0 344 5038_1 -PRL_Young_Final 5038 1 85 1 -25 0 0 282 5038_1 -PRL_Young_Final 5038 1 86 2 -25 1 1 667 5038_1 -PRL_Young_Final 5038 1 87 2 25 0 1 331 5038_1 -PRL_Young_Final 5038 1 88 2 25 0 1 382 5038_1 -PRL_Young_Final 5038 1 89 2 25 0 1 300 5038_1 -PRL_Young_Final 5038 1 90 2 25 0 1 307 5038_1 -PRL_Young_Final 5038 1 91 2 25 0 1 329 5038_1 -PRL_Young_Final 5038 1 92 2 -25 0 1 375 5038_1 -PRL_Young_Final 5038 1 93 2 -25 0 1 193 5038_1 -PRL_Young_Final 5038 1 94 1 25 1 1 658 5038_1 -PRL_Young_Final 5038 1 95 1 25 0 1 466 5038_1 -PRL_Young_Final 5038 1 96 1 25 0 1 394 5038_1 -PRL_Young_Final 5038 1 97 1 25 0 1 272 5038_1 -PRL_Young_Final 5038 1 98 1 25 0 1 336 5038_1 -PRL_Young_Final 5038 1 99 1 25 0 1 446 5038_1 -PRL_Young_Final 5038 1 100 1 -25 0 1 387 5038_1 -PRL_Young_Final 5038 1 101 1 25 0 1 415 5038_1 -PRL_Young_Final 5038 1 102 1 -25 0 0 434 5038_1 -PRL_Young_Final 5038 1 103 1 -25 0 0 383 5038_1 -PRL_Young_Final 5038 1 104 2 25 1 1 341 5038_1 -PRL_Young_Final 5038 1 105 2 25 0 1 575 5038_1 -PRL_Young_Final 5038 1 106 2 25 0 1 332 5038_1 -PRL_Young_Final 5038 1 107 2 25 0 1 411 5038_1 -PRL_Young_Final 5038 1 108 2 25 0 1 408 5038_1 -PRL_Young_Final 5038 1 109 2 25 0 1 364 5038_1 -PRL_Young_Final 5038 1 110 2 25 0 0 429 5038_1 -PRL_Young_Final 5038 1 111 2 25 0 0 342 5038_1 -PRL_Young_Final 5038 1 112 2 -25 0 0 56 5038_1 -PRL_Young_Final 5038 1 113 2 -25 0 0 339 5038_1 -PRL_Young_Final 5038 1 114 1 -25 1 1 369 5038_1 -PRL_Young_Final 5038 1 115 1 25 0 1 779 5038_1 -PRL_Young_Final 5038 1 116 1 25 0 1 529 5038_1 -PRL_Young_Final 5038 1 117 1 -25 0 1 397 5038_1 -PRL_Young_Final 5038 1 118 1 25 0 1 414 5038_1 -PRL_Young_Final 5038 1 119 2 -25 1 0 392 5038_1 -PRL_Young_Final 5038 1 120 1 25 1 1 518 5038_1 -PRL_Young_Final 5038 1 121 1 25 0 1 470 5038_1 -PRL_Young_Final 5038 1 122 1 25 0 1 587 5038_1 -PRL_Young_Final 5038 1 123 1 25 0 1 377 5038_1 -PRL_Young_Final 5038 1 124 1 -25 0 1 351 5038_1 -PRL_Young_Final 5038 1 125 1 -25 0 1 331 5038_1 -PRL_Young_Final 5038 1 126 2 -25 1 0 265 5038_1 -PRL_Young_Final 5038 1 127 2 -25 0 0 327 5038_1 -PRL_Young_Final 5038 1 128 1 25 1 1 244 5038_1 -PRL_Young_Final 5038 1 129 1 25 0 1 363 5038_1 -PRL_Young_Final 5038 1 130 1 25 0 1 639 5038_1 -PRL_Young_Final 5038 1 131 1 25 0 1 435 5038_1 -PRL_Young_Final 5038 1 132 1 25 0 1 436 5038_1 -PRL_Young_Final 5038 1 133 1 25 0 1 559 5038_1 -PRL_Young_Final 5038 1 134 1 -25 0 1 388 5038_1 -PRL_Young_Final 5038 1 135 1 25 0 1 271 5038_1 -PRL_Young_Final 5038 1 136 1 -25 0 0 430 5038_1 -PRL_Young_Final 5038 1 137 1 25 0 0 536 5038_1 -PRL_Young_Final 5038 1 138 1 -25 0 0 281 5038_1 -PRL_Young_Final 5038 1 139 2 25 1 1 370 5038_1 -PRL_Young_Final 5038 1 140 2 25 0 1 335 5038_1 -PRL_Young_Final 5038 1 141 2 25 0 1 409 5038_1 -PRL_Young_Final 5038 1 142 2 25 0 1 358 5038_1 -PRL_Young_Final 5038 1 143 2 25 0 1 261 5038_1 -PRL_Young_Final 5038 1 144 2 -25 0 0 294 5038_1 -PRL_Young_Final 5038 1 145 2 -25 0 0 395 5038_1 -PRL_Young_Final 5038 1 146 1 25 1 1 417 5038_1 -PRL_Young_Final 5038 1 147 1 -25 0 1 410 5038_1 -PRL_Young_Final 5038 1 148 2 -25 1 0 348 5038_1 -PRL_Young_Final 5038 1 149 2 -25 0 0 336 5038_1 -PRL_Young_Final 5038 1 150 1 25 1 1 322 5038_1 -PRL_Young_Final 5038 1 151 1 25 0 1 363 5038_1 -PRL_Young_Final 5038 1 152 1 -25 0 1 353 5038_1 -PRL_Young_Final 5038 1 153 1 25 0 1 247 5038_1 -PRL_Young_Final 5038 1 154 1 25 0 1 347 5038_1 -PRL_Young_Final 5038 1 155 1 -25 0 0 341 5038_1 -PRL_Young_Final 5038 1 156 1 -25 0 0 358 5038_1 -PRL_Young_Final 5038 1 157 2 25 1 1 268 5038_1 -PRL_Young_Final 5038 1 158 2 25 0 1 312 5038_1 -PRL_Young_Final 5038 1 159 2 25 0 1 559 5038_1 -PRL_Young_Final 5038 1 160 2 -25 0 1 468 5038_1 -PRL_Young_Final 5038 1 161 2 -25 0 1 938 5038_1 -PRL_Young_Final 5038 1 162 1 25 1 0 277 5038_1 -PRL_Young_Final 5038 1 163 1 -25 0 0 385 5038_1 -PRL_Young_Final 5038 1 164 1 25 0 0 642 5038_1 -PRL_Young_Final 5038 1 165 1 -25 0 0 420 5038_1 -PRL_Young_Final 5038 1 166 1 -25 0 0 307 5038_1 -PRL_Young_Final 5038 1 167 2 25 1 1 260 5038_1 -PRL_Young_Final 5038 1 168 2 25 0 1 312 5038_1 -PRL_Young_Final 5038 1 169 2 25 0 1 305 5038_1 -PRL_Young_Final 5038 1 170 2 25 0 1 354 5038_1 -PRL_Young_Final 5038 1 171 2 25 0 1 341 5038_1 -PRL_Young_Final 5038 1 172 2 25 0 1 355 5038_1 -PRL_Young_Final 5038 1 173 2 -25 0 1 305 5038_1 -PRL_Young_Final 5038 1 174 2 25 0 1 279 5038_1 -PRL_Young_Final 5038 1 175 2 -25 0 0 342 5038_1 -PRL_Young_Final 5038 1 176 2 -25 0 0 116 5038_1 -PRL_Young_Final 5038 1 177 1 25 1 1 38 5038_1 -PRL_Young_Final 5038 1 178 1 25 0 1 326 5038_1 -PRL_Young_Final 5038 1 179 1 25 0 1 368 5038_1 -PRL_Young_Final 5038 1 180 1 25 0 1 373 5038_1 -PRL_Young_Final 5038 1 181 1 25 0 1 313 5038_1 -PRL_Young_Final 5038 1 182 1 25 0 1 300 5038_1 -PRL_Young_Final 5038 1 183 1 -25 0 1 296 5038_1 -PRL_Young_Final 5038 1 184 1 25 0 1 355 5038_1 -PRL_Young_Final 5038 1 185 1 -25 0 0 314 5038_1 -PRL_Young_Final 5038 1 186 2 25 1 1 286 5038_1 -PRL_Young_Final 5038 1 187 2 -25 0 1 442 5038_1 -PRL_Young_Final 5038 1 188 2 25 0 1 364 5038_1 -PRL_Young_Final 5038 1 189 2 25 0 1 336 5038_1 -PRL_Young_Final 5038 1 190 2 25 0 1 464 5038_1 -PRL_Young_Final 5038 1 191 2 25 0 1 367 5038_1 -PRL_Young_Final 5038 1 192 2 25 0 1 356 5038_1 -PRL_Young_Final 5038 1 193 2 -25 0 1 577 5038_1 -PRL_Young_Final 5038 1 194 2 -25 0 0 327 5038_1 -PRL_Young_Final 5038 1 195 1 -25 1 1 919 5038_1 -PRL_Young_Final 5038 1 196 1 25 0 1 292 5038_1 -PRL_Young_Final 5038 1 197 1 25 0 1 570 5038_1 -PRL_Young_Final 5038 1 198 1 25 0 1 397 5038_1 -PRL_Young_Final 5038 1 199 1 25 0 1 611 5038_1 -PRL_Young_Final 5038 1 200 1 25 0 0 373 5038_1 -PRL_Young_Final 5038 2 1 2 25 1 1 884 5038_2 -PRL_Young_Final 5038 2 2 2 25 0 1 435 5038_2 -PRL_Young_Final 5038 2 3 2 -25 0 1 376 5038_2 -PRL_Young_Final 5038 2 4 2 25 0 1 342 5038_2 -PRL_Young_Final 5038 2 5 2 25 0 1 300 5038_2 -PRL_Young_Final 5038 2 6 2 25 0 1 394 5038_2 -PRL_Young_Final 5038 2 7 2 25 0 0 363 5038_2 -PRL_Young_Final 5038 2 8 2 -25 0 0 356 5038_2 -PRL_Young_Final 5038 2 9 2 -25 0 0 348 5038_2 -PRL_Young_Final 5038 2 10 1 25 1 1 305 5038_2 -PRL_Young_Final 5038 2 11 1 25 0 1 448 5038_2 -PRL_Young_Final 5038 2 12 1 25 0 1 328 5038_2 -PRL_Young_Final 5038 2 13 1 25 0 1 752 5038_2 -PRL_Young_Final 5038 2 14 1 -25 0 1 647 5038_2 -PRL_Young_Final 5038 2 15 1 25 0 1 386 5038_2 -PRL_Young_Final 5038 2 16 1 25 0 1 579 5038_2 -PRL_Young_Final 5038 2 17 1 -25 0 1 530 5038_2 -PRL_Young_Final 5038 2 18 1 -25 0 0 646 5038_2 -PRL_Young_Final 5038 2 19 2 25 1 1 279 5038_2 -PRL_Young_Final 5038 2 20 2 25 0 1 401 5038_2 -PRL_Young_Final 5038 2 21 2 25 0 1 476 5038_2 -PRL_Young_Final 5038 2 22 2 25 0 1 363 5038_2 -PRL_Young_Final 5038 2 23 2 25 0 1 435 5038_2 -PRL_Young_Final 5038 2 24 2 -25 0 1 363 5038_2 -PRL_Young_Final 5038 2 25 2 -25 0 1 268 5038_2 -PRL_Young_Final 5038 2 26 1 -25 1 0 426 5038_2 -PRL_Young_Final 5038 2 27 1 -25 0 0 259 5038_2 -PRL_Young_Final 5038 2 28 2 25 1 1 315 5038_2 -PRL_Young_Final 5038 2 29 2 25 0 1 372 5038_2 -PRL_Young_Final 5038 2 30 2 25 0 1 432 5038_2 -PRL_Young_Final 5038 2 31 2 25 0 1 349 5038_2 -PRL_Young_Final 5038 2 32 2 25 0 1 346 5038_2 -PRL_Young_Final 5038 2 33 2 -25 0 0 340 5038_2 -PRL_Young_Final 5038 2 34 2 25 0 0 332 5038_2 -PRL_Young_Final 5038 2 35 2 -25 0 0 348 5038_2 -PRL_Young_Final 5038 2 36 2 -25 0 0 362 5038_2 -PRL_Young_Final 5038 2 37 1 25 1 1 245 5038_2 -PRL_Young_Final 5038 2 38 1 -25 0 1 316 5038_2 -PRL_Young_Final 5038 2 39 1 25 0 1 336 5038_2 -PRL_Young_Final 5038 2 40 1 25 0 1 312 5038_2 -PRL_Young_Final 5038 2 41 1 25 0 1 423 5038_2 -PRL_Young_Final 5038 2 42 1 25 0 1 461 5038_2 -PRL_Young_Final 5038 2 43 1 25 0 1 332 5038_2 -PRL_Young_Final 5038 2 44 1 25 0 1 336 5038_2 -PRL_Young_Final 5038 2 45 1 -25 0 0 361 5038_2 -PRL_Young_Final 5038 2 46 1 -25 0 0 738 5038_2 -PRL_Young_Final 5038 2 47 2 25 1 1 256 5038_2 -PRL_Young_Final 5038 2 48 2 -25 0 1 293 5038_2 -PRL_Young_Final 5038 2 49 2 25 0 1 732 5038_2 -PRL_Young_Final 5038 2 50 2 25 0 1 346 5038_2 -PRL_Young_Final 5038 2 51 2 -25 0 1 503 5038_2 -PRL_Young_Final 5038 2 52 2 -25 0 0 338 5038_2 -PRL_Young_Final 5038 2 53 1 25 1 1 381 5038_2 -PRL_Young_Final 5038 2 54 1 25 0 1 279 5038_2 -PRL_Young_Final 5038 2 55 1 25 0 1 393 5038_2 -PRL_Young_Final 5038 2 56 1 25 0 1 425 5038_2 -PRL_Young_Final 5038 2 57 1 25 0 1 296 5038_2 -PRL_Young_Final 5038 2 58 1 -25 0 1 445 5038_2 -PRL_Young_Final 5038 2 59 1 -25 0 1 279 5038_2 -PRL_Young_Final 5038 2 60 2 -25 1 0 23 5038_2 -PRL_Young_Final 5038 2 61 2 -25 0 0 342 5038_2 -PRL_Young_Final 5038 2 62 1 25 1 1 411 5038_2 -PRL_Young_Final 5038 2 63 1 25 0 1 359 5038_2 -PRL_Young_Final 5038 2 64 1 25 0 1 265 5038_2 -PRL_Young_Final 5038 2 65 1 25 0 1 421 5038_2 -PRL_Young_Final 5038 2 66 1 25 0 1 561 5038_2 -PRL_Young_Final 5038 2 67 1 25 0 0 325 5038_2 -PRL_Young_Final 5038 2 68 1 -25 0 0 356 5038_2 -PRL_Young_Final 5038 2 69 1 25 0 0 343 5038_2 -PRL_Young_Final 5038 2 70 1 -25 0 0 411 5038_2 -PRL_Young_Final 5038 2 71 1 -25 0 0 278 5038_2 -PRL_Young_Final 5038 2 72 2 25 1 1 329 5038_2 -PRL_Young_Final 5038 2 73 2 -25 0 1 347 5038_2 -PRL_Young_Final 5038 2 74 1 -25 1 0 314 5038_2 -PRL_Young_Final 5038 2 75 2 25 1 1 271 5038_2 -PRL_Young_Final 5038 2 76 2 25 0 1 357 5038_2 -PRL_Young_Final 5038 2 77 2 25 0 1 391 5038_2 -PRL_Young_Final 5038 2 78 2 25 0 1 371 5038_2 -PRL_Young_Final 5038 2 79 2 25 0 1 263 5038_2 -PRL_Young_Final 5038 2 80 2 25 0 1 306 5038_2 -PRL_Young_Final 5038 2 81 2 25 0 1 366 5038_2 -PRL_Young_Final 5038 2 82 2 -25 0 1 313 5038_2 -PRL_Young_Final 5038 2 83 2 -25 0 0 379 5038_2 -PRL_Young_Final 5038 2 84 1 25 1 1 328 5038_2 -PRL_Young_Final 5038 2 85 1 25 0 1 388 5038_2 -PRL_Young_Final 5038 2 86 1 -25 0 1 273 5038_2 -PRL_Young_Final 5038 2 87 1 25 0 1 324 5038_2 -PRL_Young_Final 5038 2 88 1 25 0 1 592 5038_2 -PRL_Young_Final 5038 2 89 1 25 0 1 467 5038_2 -PRL_Young_Final 5038 2 90 1 25 0 1 336 5038_2 -PRL_Young_Final 5038 2 91 1 -25 0 0 347 5038_2 -PRL_Young_Final 5038 2 92 1 -25 0 0 320 5038_2 -PRL_Young_Final 5038 2 93 2 25 1 1 447 5038_2 -PRL_Young_Final 5038 2 94 2 -25 0 1 494 5038_2 -PRL_Young_Final 5038 2 95 2 -25 0 1 456 5038_2 -PRL_Young_Final 5038 2 96 1 25 1 0 309 5038_2 -PRL_Young_Final 5038 2 97 1 25 0 0 430 5038_2 -PRL_Young_Final 5038 2 98 1 -25 0 0 315 5038_2 -PRL_Young_Final 5038 2 99 1 -25 0 0 471 5038_2 -PRL_Young_Final 5038 2 100 2 25 1 1 344 5038_2 -PRL_Young_Final 5038 2 101 2 25 0 1 325 5038_2 -PRL_Young_Final 5038 2 102 2 25 0 1 367 5038_2 -PRL_Young_Final 5038 2 103 2 25 0 1 353 5038_2 -PRL_Young_Final 5038 2 104 2 25 0 1 262 5038_2 -PRL_Young_Final 5038 2 105 2 -25 0 0 225 5038_2 -PRL_Young_Final 5038 2 106 2 -25 0 0 435 5038_2 -PRL_Young_Final 5038 2 107 1 25 1 1 319 5038_2 -PRL_Young_Final 5038 2 108 1 -25 0 1 330 5038_2 -PRL_Young_Final 5038 2 109 1 25 0 1 161 5038_2 -PRL_Young_Final 5038 2 110 1 25 0 1 347 5038_2 -PRL_Young_Final 5038 2 111 1 25 0 1 374 5038_2 -PRL_Young_Final 5038 2 112 1 25 0 1 358 5038_2 -PRL_Young_Final 5038 2 113 1 25 0 1 260 5038_2 -PRL_Young_Final 5038 2 114 1 -25 0 0 297 5038_2 -PRL_Young_Final 5038 2 115 1 -25 0 0 329 5038_2 -PRL_Young_Final 5038 2 116 2 25 1 1 295 5038_2 -PRL_Young_Final 5038 2 117 2 25 0 1 318 5038_2 -PRL_Young_Final 5038 2 118 2 -25 0 1 322 5038_2 -PRL_Young_Final 5038 2 119 2 25 0 1 16 5038_2 -PRL_Young_Final 5038 2 120 2 25 0 1 310 5038_2 -PRL_Young_Final 5038 2 121 2 -25 0 1 327 5038_2 -PRL_Young_Final 5038 2 122 1 25 1 0 334 5038_2 -PRL_Young_Final 5038 2 123 1 -25 0 0 330 5038_2 -PRL_Young_Final 5038 2 124 1 -25 0 0 355 5038_2 -PRL_Young_Final 5038 2 125 2 25 1 1 378 5038_2 -PRL_Young_Final 5038 2 126 2 25 0 1 411 5038_2 -PRL_Young_Final 5038 2 127 2 25 0 1 357 5038_2 -PRL_Young_Final 5038 2 128 2 25 0 1 400 5038_2 -PRL_Young_Final 5038 2 129 2 25 0 1 516 5038_2 -PRL_Young_Final 5038 2 130 2 -25 0 1 392 5038_2 -PRL_Young_Final 5038 2 131 2 -25 0 1 294 5038_2 -PRL_Young_Final 5038 2 132 1 -25 1 0 299 5038_2 -PRL_Young_Final 5038 2 133 2 25 1 1 506 5038_2 -PRL_Young_Final 5038 2 134 2 25 0 1 329 5038_2 -PRL_Young_Final 5038 2 135 2 25 0 1 379 5038_2 -PRL_Young_Final 5038 2 136 2 25 0 1 314 5038_2 -PRL_Young_Final 5038 2 137 2 25 0 1 398 5038_2 -PRL_Young_Final 5038 2 138 2 25 0 1 425 5038_2 -PRL_Young_Final 5038 2 139 2 -25 0 1 351 5038_2 -PRL_Young_Final 5038 2 140 2 25 0 1 342 5038_2 -PRL_Young_Final 5038 2 141 2 -25 0 0 335 5038_2 -PRL_Young_Final 5038 2 142 2 -25 0 0 392 5038_2 -PRL_Young_Final 5038 2 143 2 -25 0 0 776 5038_2 -PRL_Young_Final 5038 2 144 1 25 1 1 310 5038_2 -PRL_Young_Final 5038 2 145 1 25 0 1 304 5038_2 -PRL_Young_Final 5038 2 146 1 25 0 1 329 5038_2 -PRL_Young_Final 5038 2 147 1 25 0 1 448 5038_2 -PRL_Young_Final 5038 2 148 1 25 0 1 943 5038_2 -PRL_Young_Final 5038 2 149 1 25 0 1 370 5038_2 -PRL_Young_Final 5038 2 150 1 -25 0 1 356 5038_2 -PRL_Young_Final 5038 2 151 1 -25 0 0 431 5038_2 -PRL_Young_Final 5038 2 152 2 25 1 1 275 5038_2 -PRL_Young_Final 5038 2 153 2 25 0 1 345 5038_2 -PRL_Young_Final 5038 2 154 2 -25 0 1 506 5038_2 -PRL_Young_Final 5038 2 155 2 25 0 1 376 5038_2 -PRL_Young_Final 5038 2 156 2 25 0 1 346 5038_2 -PRL_Young_Final 5038 2 157 2 25 0 1 227 5038_2 -PRL_Young_Final 5038 2 158 2 25 0 0 512 5038_2 -PRL_Young_Final 5038 2 159 2 -25 0 0 383 5038_2 -PRL_Young_Final 5038 2 160 1 25 1 1 1125 5038_2 -PRL_Young_Final 5038 2 161 1 25 0 1 393 5038_2 -PRL_Young_Final 5038 2 162 1 -25 0 1 497 5038_2 -PRL_Young_Final 5038 2 163 1 -25 0 1 375 5038_2 -PRL_Young_Final 5038 2 164 2 25 1 0 291 5038_2 -PRL_Young_Final 5038 2 165 2 -25 0 0 390 5038_2 -PRL_Young_Final 5038 2 166 2 -25 0 0 389 5038_2 -PRL_Young_Final 5038 2 167 1 25 1 1 333 5038_2 -PRL_Young_Final 5038 2 168 1 25 0 1 383 5038_2 -PRL_Young_Final 5038 2 169 1 25 0 1 1005 5038_2 -PRL_Young_Final 5038 2 170 1 25 0 1 618 5038_2 -PRL_Young_Final 5038 2 171 1 25 0 1 448 5038_2 -PRL_Young_Final 5038 2 172 1 25 0 1 391 5038_2 -PRL_Young_Final 5038 2 173 1 -25 0 1 448 5038_2 -PRL_Young_Final 5038 2 174 1 -25 0 0 131 5038_2 -PRL_Young_Final 5038 2 175 2 25 1 1 364 5038_2 -PRL_Young_Final 5038 2 176 2 25 0 1 335 5038_2 -PRL_Young_Final 5038 2 177 2 25 0 1 473 5038_2 -PRL_Young_Final 5038 2 178 2 25 0 1 376 5038_2 -PRL_Young_Final 5038 2 179 2 25 0 1 423 5038_2 -PRL_Young_Final 5038 2 180 2 25 0 1 509 5038_2 -PRL_Young_Final 5038 2 181 2 25 0 1 659 5038_2 -PRL_Young_Final 5038 2 182 2 -25 0 1 450 5038_2 -PRL_Young_Final 5038 2 183 2 -25 0 0 415 5038_2 -PRL_Young_Final 5038 2 184 2 -25 0 0 610 5038_2 -PRL_Young_Final 5038 2 185 1 25 1 1 328 5038_2 -PRL_Young_Final 5038 2 186 1 25 0 1 313 5038_2 -PRL_Young_Final 5038 2 187 1 -25 0 1 344 5038_2 -PRL_Young_Final 5038 2 188 1 25 0 1 399 5038_2 -PRL_Young_Final 5038 2 189 1 25 0 1 436 5038_2 -PRL_Young_Final 5038 2 190 1 25 0 1 588 5038_2 -PRL_Young_Final 5038 2 191 1 25 0 1 523 5038_2 -PRL_Young_Final 5038 2 192 1 25 0 1 735 5038_2 -PRL_Young_Final 5038 2 193 1 -25 0 0 2022 5038_2 -PRL_Young_Final 5038 2 194 1 25 0 0 338 5038_2 -PRL_Young_Final 5038 2 195 1 25 0 0 458 5038_2 -PRL_Young_Final 5038 2 196 1 -25 0 0 535 5038_2 -PRL_Young_Final 5038 2 197 1 -25 0 0 325 5038_2 -PRL_Young_Final 5038 2 198 2 -25 1 1 286 5038_2 -PRL_Young_Final 5038 2 199 2 -25 0 1 355 5038_2 -PRL_Young_Final 5038 2 200 1 -25 1 0 360 5038_2 -PRL_Young_Final 5038 3 1 2 25 1 1 486 5038_3 -PRL_Young_Final 5038 3 2 2 25 0 1 366 5038_3 -PRL_Young_Final 5038 3 3 2 25 0 1 364 5038_3 -PRL_Young_Final 5038 3 4 2 25 0 1 396 5038_3 -PRL_Young_Final 5038 3 5 2 25 0 1 324 5038_3 -PRL_Young_Final 5038 3 6 2 25 0 1 460 5038_3 -PRL_Young_Final 5038 3 7 2 -25 0 1 320 5038_3 -PRL_Young_Final 5038 3 8 2 25 0 1 377 5038_3 -PRL_Young_Final 5038 3 9 2 -25 0 0 370 5038_3 -PRL_Young_Final 5038 3 10 2 -25 0 0 1010 5038_3 -PRL_Young_Final 5038 3 11 1 25 1 1 369 5038_3 -PRL_Young_Final 5038 3 12 1 25 0 1 358 5038_3 -PRL_Young_Final 5038 3 13 1 25 0 1 373 5038_3 -PRL_Young_Final 5038 3 14 1 25 0 1 56 5038_3 -PRL_Young_Final 5038 3 15 1 25 0 1 285 5038_3 -PRL_Young_Final 5038 3 16 1 25 0 1 320 5038_3 -PRL_Young_Final 5038 3 17 1 -25 0 1 319 5038_3 -PRL_Young_Final 5038 3 18 1 -25 0 0 306 5038_3 -PRL_Young_Final 5038 3 19 2 25 1 1 321 5038_3 -PRL_Young_Final 5038 3 20 2 25 0 1 351 5038_3 -PRL_Young_Final 5038 3 21 2 -25 0 1 375 5038_3 -PRL_Young_Final 5038 3 22 2 25 0 1 360 5038_3 -PRL_Young_Final 5038 3 23 2 25 0 1 387 5038_3 -PRL_Young_Final 5038 3 24 2 25 0 1 321 5038_3 -PRL_Young_Final 5038 3 25 2 25 0 1 339 5038_3 -PRL_Young_Final 5038 3 26 2 25 0 1 299 5038_3 -PRL_Young_Final 5038 3 27 2 25 0 0 214 5038_3 -PRL_Young_Final 5038 3 28 2 -25 0 0 441 5038_3 -PRL_Young_Final 5038 3 29 2 -25 0 0 476 5038_3 -PRL_Young_Final 5038 3 30 1 -25 1 1 307 5038_3 -PRL_Young_Final 5038 3 31 2 -25 1 0 597 5038_3 -PRL_Young_Final 5038 3 32 1 -25 1 1 631 5038_3 -PRL_Young_Final 5038 3 33 1 25 0 1 419 5038_3 -PRL_Young_Final 5038 3 34 1 25 0 1 289 5038_3 -PRL_Young_Final 5038 3 35 1 25 0 1 279 5038_3 -PRL_Young_Final 5038 3 36 1 25 0 1 424 5038_3 -PRL_Young_Final 5038 3 37 1 25 0 1 335 5038_3 -PRL_Young_Final 5038 3 38 1 25 0 1 522 5038_3 -PRL_Young_Final 5038 3 39 1 -25 0 0 485 5038_3 -PRL_Young_Final 5038 3 40 1 -25 0 0 401 5038_3 -PRL_Young_Final 5038 3 41 2 -25 1 1 377 5038_3 -PRL_Young_Final 5038 3 42 2 25 0 1 305 5038_3 -PRL_Young_Final 5038 3 43 2 25 0 1 19 5038_3 -PRL_Young_Final 5038 3 44 2 25 0 1 296 5038_3 -PRL_Young_Final 5038 3 45 2 25 0 1 254 5038_3 -PRL_Young_Final 5038 3 46 2 25 0 1 212 5038_3 -PRL_Young_Final 5038 3 47 2 -25 0 0 201 5038_3 -PRL_Young_Final 5038 3 48 2 -25 0 0 164 5038_3 -PRL_Young_Final 5038 3 49 1 25 1 1 727 5038_3 -PRL_Young_Final 5038 3 50 1 25 0 1 323 5038_3 -PRL_Young_Final 5038 3 51 1 -25 0 1 440 5038_3 -PRL_Young_Final 5038 3 52 2 25 1 0 705 5038_3 -PRL_Young_Final 5038 3 53 2 -25 0 0 320 5038_3 -PRL_Young_Final 5038 3 54 2 25 0 0 329 5038_3 -PRL_Young_Final 5038 3 55 2 -25 0 0 349 5038_3 -PRL_Young_Final 5038 3 56 2 -25 0 0 528 5038_3 -PRL_Young_Final 5038 3 57 1 25 1 1 338 5038_3 -PRL_Young_Final 5038 3 58 1 25 0 1 380 5038_3 -PRL_Young_Final 5038 3 59 1 -25 0 1 406 5038_3 -PRL_Young_Final 5038 3 60 1 25 0 1 419 5038_3 -PRL_Young_Final 5038 3 61 1 25 0 1 381 5038_3 -PRL_Young_Final 5038 3 62 1 25 0 1 432 5038_3 -PRL_Young_Final 5038 3 63 1 25 0 1 443 5038_3 -PRL_Young_Final 5038 3 64 1 -25 0 0 273 5038_3 -PRL_Young_Final 5038 3 65 1 -25 0 0 246 5038_3 -PRL_Young_Final 5038 3 66 2 25 1 1 321 5038_3 -PRL_Young_Final 5038 3 67 2 -25 0 1 317 5038_3 -PRL_Young_Final 5038 3 68 2 -25 0 1 409 5038_3 -PRL_Young_Final 5038 3 69 1 -25 1 0 293 5038_3 -PRL_Young_Final 5038 3 70 2 25 1 1 963 5038_3 -PRL_Young_Final 5038 3 71 2 25 0 1 398 5038_3 -PRL_Young_Final 5038 3 72 2 25 0 1 395 5038_3 -PRL_Young_Final 5038 3 73 2 25 0 1 355 5038_3 -PRL_Young_Final 5038 3 74 2 25 0 1 315 5038_3 -PRL_Young_Final 5038 3 75 2 25 0 1 467 5038_3 -PRL_Young_Final 5038 3 76 2 -25 0 0 758 5038_3 -PRL_Young_Final 5038 3 77 2 25 0 0 547 5038_3 -PRL_Young_Final 5038 3 78 2 25 0 0 339 5038_3 -PRL_Young_Final 5038 3 79 2 -25 0 0 442 5038_3 -PRL_Young_Final 5038 3 80 2 -25 0 0 471 5038_3 -PRL_Young_Final 5038 3 81 1 -25 1 1 497 5038_3 -PRL_Young_Final 5038 3 82 2 -25 1 0 2279 5038_3 -PRL_Young_Final 5038 3 83 1 25 1 1 328 5038_3 -PRL_Young_Final 5038 3 84 1 25 0 1 397 5038_3 -PRL_Young_Final 5038 3 85 1 25 0 1 531 5038_3 -PRL_Young_Final 5038 3 86 1 25 0 1 343 5038_3 -PRL_Young_Final 5038 3 87 1 25 0 1 472 5038_3 -PRL_Young_Final 5038 3 88 1 25 0 1 543 5038_3 -PRL_Young_Final 5038 3 89 1 -25 0 0 574 5038_3 -PRL_Young_Final 5038 3 90 1 -25 0 0 975 5038_3 -PRL_Young_Final 5038 3 91 2 25 1 1 1035 5038_3 -PRL_Young_Final 5038 3 92 2 -25 0 1 454 5038_3 -PRL_Young_Final 5038 3 93 2 25 0 1 370 5038_3 -PRL_Young_Final 5038 3 94 2 25 0 1 583 5038_3 -PRL_Young_Final 5038 3 95 2 -25 0 1 333 5038_3 -PRL_Young_Final 5038 3 96 2 25 0 1 508 5038_3 -PRL_Young_Final 5038 3 97 2 25 0 1 262 5038_3 -PRL_Young_Final 5038 3 98 2 -25 0 0 645 5038_3 -PRL_Young_Final 5038 3 99 2 25 0 0 1085 5038_3 -PRL_Young_Final 5038 3 100 2 -25 0 0 423 5038_3 -PRL_Young_Final 5038 3 101 2 -25 0 0 1003 5038_3 -PRL_Young_Final 5038 3 102 1 25 1 1 530 5038_3 -PRL_Young_Final 5038 3 103 1 25 0 1 388 5038_3 -PRL_Young_Final 5038 3 104 1 25 0 1 424 5038_3 -PRL_Young_Final 5038 3 105 1 -25 0 1 536 5038_3 -PRL_Young_Final 5038 3 106 1 -25 0 1 748 5038_3 -PRL_Young_Final 5038 3 107 2 -25 1 0 1117 5038_3 -PRL_Young_Final 5038 3 108 1 25 1 1 1623 5038_3 -PRL_Young_Final 5038 3 109 1 25 0 1 553 5038_3 -PRL_Young_Final 5038 3 110 1 25 0 1 348 5038_3 -PRL_Young_Final 5038 3 111 1 25 0 1 325 5038_3 -PRL_Young_Final 5038 3 112 1 25 0 1 388 5038_3 -PRL_Young_Final 5038 3 113 1 25 0 1 349 5038_3 -PRL_Young_Final 5038 3 114 1 -25 0 0 406 5038_3 -PRL_Young_Final 5038 3 115 1 -25 0 0 1710 5038_3 -PRL_Young_Final 5038 3 116 2 -25 1 1 553 5038_3 -PRL_Young_Final 5038 3 117 1 -25 1 0 356 5038_3 -PRL_Young_Final 5038 3 118 2 25 1 1 290 5038_3 -PRL_Young_Final 5038 3 119 2 25 0 1 167 5038_3 -PRL_Young_Final 5038 3 120 2 25 0 1 250 5038_3 -PRL_Young_Final 5038 3 121 2 25 0 1 278 5038_3 -PRL_Young_Final 5038 3 122 2 25 0 1 344 5038_3 -PRL_Young_Final 5038 3 123 2 -25 0 0 348 5038_3 -PRL_Young_Final 5038 3 124 2 25 0 0 511 5038_3 -PRL_Young_Final 5038 3 125 2 -25 0 0 660 5038_3 -PRL_Young_Final 5038 3 126 2 25 0 0 509 5038_3 -PRL_Young_Final 5038 3 127 2 -25 0 0 293 5038_3 -PRL_Young_Final 5038 3 128 1 25 1 1 492 5038_3 -PRL_Young_Final 5038 3 129 1 25 0 1 353 5038_3 -PRL_Young_Final 5038 3 130 1 -25 0 1 412 5038_3 -PRL_Young_Final 5038 3 131 1 25 0 1 683 5038_3 -PRL_Young_Final 5038 3 132 1 25 0 1 1084 5038_3 -PRL_Young_Final 5038 3 133 1 -25 0 1 1205 5038_3 -PRL_Young_Final 5038 3 134 1 25 0 1 292 5038_3 -PRL_Young_Final 5038 3 135 1 -25 0 0 496 5038_3 -PRL_Young_Final 5038 3 136 2 25 1 1 882 5038_3 -PRL_Young_Final 5038 3 137 2 25 0 1 419 5038_3 -PRL_Young_Final 5038 3 138 2 25 0 1 425 5038_3 -PRL_Young_Final 5038 3 139 2 25 0 1 488 5038_3 -PRL_Young_Final 5038 3 140 2 -25 0 1 625 5038_3 -PRL_Young_Final 5038 3 141 2 -25 0 1 149 5038_3 -PRL_Young_Final 5038 3 142 1 25 1 1 726 5038_3 -PRL_Young_Final 5038 3 143 1 25 0 1 479 5038_3 -PRL_Young_Final 5038 3 144 1 25 0 1 640 5038_3 -PRL_Young_Final 5038 3 145 1 25 0 1 547 5038_3 -PRL_Young_Final 5038 3 146 1 25 0 1 1157 5038_3 -PRL_Young_Final 5038 3 147 1 25 0 1 610 5038_3 -PRL_Young_Final 5038 3 148 1 -25 0 0 398 5038_3 -PRL_Young_Final 5038 3 149 1 -25 0 0 443 5038_3 -PRL_Young_Final 5038 3 150 2 -25 1 1 341 5038_3 -PRL_Young_Final 5038 3 151 2 25 0 1 453 5038_3 -PRL_Young_Final 5038 3 152 2 25 0 1 847 5038_3 -PRL_Young_Final 5038 3 153 2 25 0 1 394 5038_3 -PRL_Young_Final 5038 3 154 2 25 0 1 323 5038_3 -PRL_Young_Final 5038 3 155 2 -25 0 0 465 5038_3 -PRL_Young_Final 5038 3 156 2 -25 0 0 528 5038_3 -PRL_Young_Final 5038 3 157 1 25 1 1 628 5038_3 -PRL_Young_Final 5038 3 158 1 25 0 1 369 5038_3 -PRL_Young_Final 5038 3 159 1 25 0 1 366 5038_3 -PRL_Young_Final 5038 3 160 1 -25 0 1 420 5038_3 -PRL_Young_Final 5038 3 161 1 25 0 1 497 5038_3 -PRL_Young_Final 5038 3 162 1 25 0 0 1019 5038_3 -PRL_Young_Final 5038 3 163 1 25 0 0 468 5038_3 -PRL_Young_Final 5038 3 164 1 -25 0 0 319 5038_3 -PRL_Young_Final 5038 3 165 1 -25 0 0 819 5038_3 -PRL_Young_Final 5038 3 166 2 25 1 1 683 5038_3 -PRL_Young_Final 5038 3 167 2 -25 0 1 434 5038_3 -PRL_Young_Final 5038 3 168 2 25 0 1 417 5038_3 -PRL_Young_Final 5038 3 169 2 25 0 1 564 5038_3 -PRL_Young_Final 5038 3 170 2 25 0 1 431 5038_3 -PRL_Young_Final 5038 3 171 2 25 0 1 391 5038_3 -PRL_Young_Final 5038 3 172 2 25 0 1 331 5038_3 -PRL_Young_Final 5038 3 173 2 -25 0 1 332 5038_3 -PRL_Young_Final 5038 3 174 2 -25 0 0 561 5038_3 -PRL_Young_Final 5038 3 175 1 -25 1 1 345 5038_3 -PRL_Young_Final 5038 3 176 1 25 0 1 290 5038_3 -PRL_Young_Final 5038 3 177 1 25 0 1 514 5038_3 -PRL_Young_Final 5038 3 178 1 25 0 1 451 5038_3 -PRL_Young_Final 5038 3 179 1 25 0 1 459 5038_3 -PRL_Young_Final 5038 3 180 1 25 0 1 90 5038_3 -PRL_Young_Final 5038 3 181 1 25 0 1 449 5038_3 -PRL_Young_Final 5038 3 182 1 -25 0 1 452 5038_3 -PRL_Young_Final 5038 3 183 1 -25 0 0 161 5038_3 -PRL_Young_Final 5038 3 184 2 25 1 1 1073 5038_3 -PRL_Young_Final 5038 3 185 2 25 0 1 702 5038_3 -PRL_Young_Final 5038 3 186 2 25 0 1 1401 5038_3 -PRL_Young_Final 5038 3 187 2 25 0 1 567 5038_3 -PRL_Young_Final 5038 3 188 2 25 0 1 1081 5038_3 -PRL_Young_Final 5038 3 189 2 -25 0 0 659 5038_3 -PRL_Young_Final 5038 3 190 2 -25 0 0 977 5038_3 -PRL_Young_Final 5038 3 191 1 25 1 1 361 5038_3 -PRL_Young_Final 5038 3 192 1 25 0 1 625 5038_3 -PRL_Young_Final 5038 3 193 1 -25 0 1 355 5038_3 -PRL_Young_Final 5038 3 194 2 25 1 0 519 5038_3 -PRL_Young_Final 5038 3 195 2 -25 0 0 348 5038_3 -PRL_Young_Final 5038 3 196 1 25 1 1 616 5038_3 -PRL_Young_Final 5038 3 197 1 25 0 1 322 5038_3 -PRL_Young_Final 5038 3 198 1 -25 0 1 652 5038_3 -PRL_Young_Final 5038 3 199 2 -25 1 0 321 5038_3 -PRL_Young_Final 5038 3 200 1 25 1 1 863 5038_3 -PRL_Young_Final 5036 1 1 1 25 1 1 1282 5036_1 -PRL_Young_Final 5036 1 2 1 25 0 1 1282 5036_1 -PRL_Young_Final 5036 1 3 1 25 0 1 628 5036_1 -PRL_Young_Final 5036 1 4 1 -25 0 1 595 5036_1 -PRL_Young_Final 5036 1 5 1 25 0 1 817 5036_1 -PRL_Young_Final 5036 1 6 1 25 0 1 437 5036_1 -PRL_Young_Final 5036 1 7 1 -25 0 1 472 5036_1 -PRL_Young_Final 5036 1 8 1 25 0 0 459 5036_1 -PRL_Young_Final 5036 1 9 1 -25 0 0 739 5036_1 -PRL_Young_Final 5036 1 10 1 -25 0 0 541 5036_1 -PRL_Young_Final 5036 1 11 1 -25 0 0 538 5036_1 -PRL_Young_Final 5036 1 12 2 25 1 1 1258 5036_1 -PRL_Young_Final 5036 1 13 2 25 0 1 441 5036_1 -PRL_Young_Final 5036 1 14 2 25 0 1 485 5036_1 -PRL_Young_Final 5036 1 15 2 25 0 1 463 5036_1 -PRL_Young_Final 5036 1 16 2 25 0 1 466 5036_1 -PRL_Young_Final 5036 1 17 2 -25 0 1 610 5036_1 -PRL_Young_Final 5036 1 18 2 -25 0 0 421 5036_1 -PRL_Young_Final 5036 1 19 2 -25 0 0 455 5036_1 -PRL_Young_Final 5036 1 20 2 -25 0 0 1076 5036_1 -PRL_Young_Final 5036 1 21 1 -25 1 1 653 5036_1 -PRL_Young_Final 5036 1 22 2 25 1 0 433 5036_1 -PRL_Young_Final 5036 1 23 2 25 0 0 406 5036_1 -PRL_Young_Final 5036 1 24 2 -25 0 0 468 5036_1 -PRL_Young_Final 5036 1 25 2 -25 0 0 422 5036_1 -PRL_Young_Final 5036 1 26 2 -25 0 0 352 5036_1 -PRL_Young_Final 5036 1 27 2 -25 0 0 265 5036_1 -PRL_Young_Final 5036 1 28 2 -25 0 0 475 5036_1 -PRL_Young_Final 5036 1 29 1 25 1 1 454 5036_1 -PRL_Young_Final 5036 1 30 1 25 0 1 310 5036_1 -PRL_Young_Final 5036 1 31 1 25 0 1 289 5036_1 -PRL_Young_Final 5036 1 32 1 25 0 1 330 5036_1 -PRL_Young_Final 5036 1 33 1 25 0 1 494 5036_1 -PRL_Young_Final 5036 1 34 1 25 0 1 305 5036_1 -PRL_Young_Final 5036 1 35 1 -25 0 1 478 5036_1 -PRL_Young_Final 5036 1 36 1 25 0 1 433 5036_1 -PRL_Young_Final 5036 1 37 1 -25 0 0 172 5036_1 -PRL_Young_Final 5036 1 38 1 25 0 0 400 5036_1 -PRL_Young_Final 5036 1 39 1 -25 0 0 402 5036_1 -PRL_Young_Final 5036 1 40 1 -25 0 0 195 5036_1 -PRL_Young_Final 5036 1 41 1 -25 0 0 333 5036_1 -PRL_Young_Final 5036 1 42 1 -25 0 0 197 5036_1 -PRL_Young_Final 5036 1 43 1 -25 0 0 281 5036_1 -PRL_Young_Final 5036 1 44 1 -25 0 0 85 5036_1 -PRL_Young_Final 5036 1 45 1 -25 0 0 160 5036_1 -PRL_Young_Final 5036 1 46 2 25 1 1 857 5036_1 -PRL_Young_Final 5036 1 47 2 25 0 1 598 5036_1 -PRL_Young_Final 5036 1 48 2 25 0 1 217 5036_1 -PRL_Young_Final 5036 1 49 2 25 0 1 93 5036_1 -PRL_Young_Final 5036 1 50 2 25 0 1 450 5036_1 -PRL_Young_Final 5036 1 51 2 25 0 0 459 5036_1 -PRL_Young_Final 5036 1 52 2 -25 0 0 514 5036_1 -PRL_Young_Final 5036 1 53 2 25 0 0 1401 5036_1 -PRL_Young_Final 5036 1 54 2 -25 0 0 503 5036_1 -PRL_Young_Final 5036 1 55 2 -25 0 0 116 5036_1 -PRL_Young_Final 5036 1 56 1 25 1 1 463 5036_1 -PRL_Young_Final 5036 1 57 1 -25 0 1 377 5036_1 -PRL_Young_Final 5036 1 58 1 25 0 1 447 5036_1 -PRL_Young_Final 5036 1 59 1 25 0 1 274 5036_1 -PRL_Young_Final 5036 1 60 1 -25 0 1 434 5036_1 -PRL_Young_Final 5036 1 61 1 25 0 1 251 5036_1 -PRL_Young_Final 5036 1 62 1 25 0 1 301 5036_1 -PRL_Young_Final 5036 1 63 1 25 0 1 319 5036_1 -PRL_Young_Final 5036 1 64 1 -25 0 0 24 5036_1 -PRL_Young_Final 5036 1 65 1 -25 0 0 219 5036_1 -PRL_Young_Final 5036 1 66 1 -25 0 0 463 5036_1 -PRL_Young_Final 5036 1 67 2 25 1 1 541 5036_1 -PRL_Young_Final 5036 1 68 2 25 0 1 243 5036_1 -PRL_Young_Final 5036 1 69 2 -25 0 1 109 5036_1 -PRL_Young_Final 5036 1 70 2 -25 0 1 415 5036_1 -PRL_Young_Final 5036 1 71 1 -25 1 0 557 5036_1 -PRL_Young_Final 5036 1 72 1 25 0 0 331 5036_1 -PRL_Young_Final 5036 1 73 1 25 0 0 495 5036_1 -PRL_Young_Final 5036 1 74 1 -25 0 0 216 5036_1 -PRL_Young_Final 5036 1 75 1 -25 0 0 356 5036_1 -PRL_Young_Final 5036 1 76 1 -25 0 0 417 5036_1 -PRL_Young_Final 5036 1 77 2 25 1 1 457 5036_1 -PRL_Young_Final 5036 1 78 2 25 0 1 490 5036_1 -PRL_Young_Final 5036 1 79 2 25 0 1 196 5036_1 -PRL_Young_Final 5036 1 80 2 25 0 1 452 5036_1 -PRL_Young_Final 5036 1 81 2 25 0 1 224 5036_1 -PRL_Young_Final 5036 1 82 2 25 0 1 583 5036_1 -PRL_Young_Final 5036 1 83 2 -25 0 1 500 5036_1 -PRL_Young_Final 5036 1 84 1 -25 1 0 1289 5036_1 -PRL_Young_Final 5036 1 85 2 25 1 1 604 5036_1 -PRL_Young_Final 5036 1 86 2 25 0 1 485 5036_1 -PRL_Young_Final 5036 1 87 2 25 0 1 513 5036_1 -PRL_Young_Final 5036 1 88 2 25 0 1 1284 5036_1 -PRL_Young_Final 5036 1 89 2 25 0 1 801 5036_1 -PRL_Young_Final 5036 1 90 2 -25 0 0 686 5036_1 -PRL_Young_Final 5036 1 91 1 25 1 1 1769 5036_1 -PRL_Young_Final 5036 1 92 1 25 0 1 301 5036_1 -PRL_Young_Final 5036 1 93 1 -25 0 1 402 5036_1 -PRL_Young_Final 5036 1 94 2 -25 1 0 1137 5036_1 -PRL_Young_Final 5036 1 95 1 25 1 1 591 5036_1 -PRL_Young_Final 5036 1 96 1 25 0 1 199 5036_1 -PRL_Young_Final 5036 1 97 1 -25 0 1 263 5036_1 -PRL_Young_Final 5036 1 98 2 25 1 0 678 5036_1 -PRL_Young_Final 5036 1 99 2 -25 0 0 434 5036_1 -PRL_Young_Final 5036 1 100 2 -25 0 0 1157 5036_1 -PRL_Young_Final 5036 1 101 2 -25 0 0 1457 5036_1 -PRL_Young_Final 5036 1 102 1 25 1 1 492 5036_1 -PRL_Young_Final 5036 1 103 1 25 0 1 1344 5036_1 -PRL_Young_Final 5036 1 104 1 25 0 1 586 5036_1 -PRL_Young_Final 5036 1 105 1 25 0 1 666 5036_1 -PRL_Young_Final 5036 1 106 1 25 0 1 710 5036_1 -PRL_Young_Final 5036 1 107 1 -25 0 1 449 5036_1 -PRL_Young_Final 5036 1 108 2 -25 1 1 1025 5036_1 -PRL_Young_Final 5036 1 109 1 -25 1 0 484 5036_1 -PRL_Young_Final 5036 1 110 1 -25 0 0 427 5036_1 -PRL_Young_Final 5036 1 111 1 -25 0 0 9 5036_1 -PRL_Young_Final 5036 1 112 2 25 1 1 225 5036_1 -PRL_Young_Final 5036 1 113 2 25 0 1 519 5036_1 -PRL_Young_Final 5036 1 114 2 25 0 1 457 5036_1 -PRL_Young_Final 5036 1 115 2 25 0 1 91 5036_1 -PRL_Young_Final 5036 1 116 2 25 0 1 268 5036_1 -PRL_Young_Final 5036 1 117 2 25 0 1 535 5036_1 -PRL_Young_Final 5036 1 118 2 -25 0 1 590 5036_1 -PRL_Young_Final 5036 1 119 1 -25 1 0 727 5036_1 -PRL_Young_Final 5036 1 120 2 25 1 1 980 5036_1 -PRL_Young_Final 5036 1 121 2 25 0 1 399 5036_1 -PRL_Young_Final 5036 1 122 2 25 0 1 386 5036_1 -PRL_Young_Final 5036 1 123 2 25 0 1 294 5036_1 -PRL_Young_Final 5036 1 124 2 25 0 1 1345 5036_1 -PRL_Young_Final 5036 1 125 2 25 0 1 555 5036_1 -PRL_Young_Final 5036 1 126 2 25 0 1 516 5036_1 -PRL_Young_Final 5036 1 127 2 -25 0 1 707 5036_1 -PRL_Young_Final 5036 1 128 2 25 0 0 496 5036_1 -PRL_Young_Final 5036 1 129 2 -25 0 0 487 5036_1 -PRL_Young_Final 5036 1 130 2 25 0 0 237 5036_1 -PRL_Young_Final 5036 1 131 2 -25 0 0 455 5036_1 -PRL_Young_Final 5036 1 132 2 -25 0 0 537 5036_1 -PRL_Young_Final 5036 1 133 2 -25 0 0 514 5036_1 -PRL_Young_Final 5036 1 134 2 -25 0 0 1835 5036_1 -PRL_Young_Final 5036 1 135 2 -25 0 0 456 5036_1 -PRL_Young_Final 5036 1 136 2 -25 0 0 534 5036_1 -PRL_Young_Final 5036 1 137 1 25 1 1 1129 5036_1 -PRL_Young_Final 5036 1 138 1 25 0 1 140 5036_1 -PRL_Young_Final 5036 1 139 1 -25 0 1 409 5036_1 -PRL_Young_Final 5036 1 140 1 25 0 1 210 5036_1 -PRL_Young_Final 5036 1 141 1 25 0 1 242 5036_1 -PRL_Young_Final 5036 1 142 1 25 0 0 57 5036_1 -PRL_Young_Final 5036 1 143 1 25 0 0 49 5036_1 -PRL_Young_Final 5036 1 144 1 -25 0 0 167 5036_1 -PRL_Young_Final 5036 1 145 1 -25 0 0 1150 5036_1 -PRL_Young_Final 5036 1 146 1 -25 0 0 272 5036_1 -PRL_Young_Final 5036 1 147 1 -25 0 0 448 5036_1 -PRL_Young_Final 5036 1 148 1 -25 0 0 112 5036_1 -PRL_Young_Final 5036 1 149 1 -25 0 0 697 5036_1 -PRL_Young_Final 5036 1 150 1 25 0 0 566 5036_1 -PRL_Young_Final 5036 1 151 1 -25 0 0 570 5036_1 -PRL_Young_Final 5036 1 152 1 -25 0 0 425 5036_1 -PRL_Young_Final 5036 1 153 1 -25 0 0 551 5036_1 -PRL_Young_Final 5036 1 154 2 25 1 1 382 5036_1 -PRL_Young_Final 5036 1 155 2 25 0 1 1614 5036_1 -PRL_Young_Final 5036 1 156 2 25 0 1 297 5036_1 -PRL_Young_Final 5036 1 157 2 -25 0 1 118 5036_1 -PRL_Young_Final 5036 1 158 2 -25 0 1 445 5036_1 -PRL_Young_Final 5036 1 159 2 25 0 1 145 5036_1 -PRL_Young_Final 5036 1 160 2 25 0 1 400 5036_1 -PRL_Young_Final 5036 1 161 2 -25 0 0 112 5036_1 -PRL_Young_Final 5036 1 162 1 25 1 1 491 5036_1 -PRL_Young_Final 5036 1 163 1 25 0 1 157 5036_1 -PRL_Young_Final 5036 1 164 1 25 0 1 433 5036_1 -PRL_Young_Final 5036 1 165 1 25 0 1 401 5036_1 -PRL_Young_Final 5036 1 166 1 -25 0 1 433 5036_1 -PRL_Young_Final 5036 1 167 2 25 1 1 484 5036_1 -PRL_Young_Final 5036 1 168 2 25 0 1 595 5036_1 -PRL_Young_Final 5036 1 169 2 25 0 1 422 5036_1 -PRL_Young_Final 5036 1 170 2 25 0 1 369 5036_1 -PRL_Young_Final 5036 1 171 2 25 0 1 411 5036_1 -PRL_Young_Final 5036 1 172 2 25 0 1 450 5036_1 -PRL_Young_Final 5036 1 173 2 25 0 1 161 5036_1 -PRL_Young_Final 5036 1 174 2 -25 0 1 1909 5036_1 -PRL_Young_Final 5036 1 175 1 25 1 1 1234 5036_1 -PRL_Young_Final 5036 1 176 1 25 0 1 477 5036_1 -PRL_Young_Final 5036 1 177 1 -25 0 1 406 5036_1 -PRL_Young_Final 5036 1 178 2 -25 1 0 495 5036_1 -PRL_Young_Final 5036 1 179 2 -25 0 0 475 5036_1 -PRL_Young_Final 5036 1 180 2 -25 0 0 776 5036_1 -PRL_Young_Final 5036 1 181 2 25 0 0 410 5036_1 -PRL_Young_Final 5036 1 182 2 -25 0 0 626 5036_1 -PRL_Young_Final 5036 1 183 2 25 0 0 2067 5036_1 -PRL_Young_Final 5036 1 184 2 -25 0 0 160 5036_1 -PRL_Young_Final 5036 1 185 2 -25 0 0 633 5036_1 -PRL_Young_Final 5036 1 186 2 -25 0 0 1419 5036_1 -PRL_Young_Final 5036 1 187 1 25 1 1 1555 5036_1 -PRL_Young_Final 5036 1 188 1 25 0 1 410 5036_1 -PRL_Young_Final 5036 1 189 1 25 0 1 542 5036_1 -PRL_Young_Final 5036 1 190 1 25 0 1 441 5036_1 -PRL_Young_Final 5036 1 191 1 25 0 1 189 5036_1 -PRL_Young_Final 5036 1 192 1 -25 0 0 395 5036_1 -PRL_Young_Final 5036 1 193 1 -25 0 0 130 5036_1 -PRL_Young_Final 5036 1 194 2 -25 1 1 67 5036_1 -PRL_Young_Final 5036 1 195 2 -25 0 1 55 5036_1 -PRL_Young_Final 5036 1 196 2 25 0 1 1132 5036_1 -PRL_Young_Final 5036 1 197 2 25 0 1 2044 5036_1 -PRL_Young_Final 5036 1 198 2 25 0 1 481 5036_1 -PRL_Young_Final 5036 1 199 2 -25 0 0 482 5036_1 -PRL_Young_Final 5036 1 200 2 25 0 0 950 5036_1 -PRL_Young_Final 5036 2 1 1 25 1 1 3389 5036_2 -PRL_Young_Final 5036 2 2 1 25 0 1 450 5036_2 -PRL_Young_Final 5036 2 3 1 25 0 1 452 5036_2 -PRL_Young_Final 5036 2 4 1 -25 0 1 400 5036_2 -PRL_Young_Final 5036 2 5 1 25 0 1 391 5036_2 -PRL_Young_Final 5036 2 6 1 25 0 1 2144 5036_2 -PRL_Young_Final 5036 2 7 1 25 0 1 429 5036_2 -PRL_Young_Final 5036 2 8 1 25 0 1 502 5036_2 -PRL_Young_Final 5036 2 9 1 25 0 0 372 5036_2 -PRL_Young_Final 5036 2 10 1 -25 0 0 107 5036_2 -PRL_Young_Final 5036 2 11 1 -25 0 0 486 5036_2 -PRL_Young_Final 5036 2 12 2 25 1 1 674 5036_2 -PRL_Young_Final 5036 2 13 2 25 0 1 478 5036_2 -PRL_Young_Final 5036 2 14 2 25 0 1 506 5036_2 -PRL_Young_Final 5036 2 15 2 -25 0 1 505 5036_2 -PRL_Young_Final 5036 2 16 2 25 0 1 285 5036_2 -PRL_Young_Final 5036 2 17 2 -25 0 0 485 5036_2 -PRL_Young_Final 5036 2 18 2 -25 0 0 497 5036_2 -PRL_Young_Final 5036 2 19 1 25 1 1 392 5036_2 -PRL_Young_Final 5036 2 20 1 -25 0 1 546 5036_2 -PRL_Young_Final 5036 2 21 1 25 0 1 107 5036_2 -PRL_Young_Final 5036 2 22 1 25 0 1 1539 5036_2 -PRL_Young_Final 5036 2 23 1 25 0 1 485 5036_2 -PRL_Young_Final 5036 2 24 1 25 0 1 490 5036_2 -PRL_Young_Final 5036 2 25 1 25 0 1 272 5036_2 -PRL_Young_Final 5036 2 26 1 -25 0 1 321 5036_2 -PRL_Young_Final 5036 2 27 2 -25 1 1 1991 5036_2 -PRL_Young_Final 5036 2 28 1 -25 1 0 1638 5036_2 -PRL_Young_Final 5036 2 29 2 25 1 1 968 5036_2 -PRL_Young_Final 5036 2 30 2 25 0 1 514 5036_2 -PRL_Young_Final 5036 2 31 2 25 0 1 508 5036_2 -PRL_Young_Final 5036 2 32 2 25 0 1 308 5036_2 -PRL_Young_Final 5036 2 33 2 25 0 1 311 5036_2 -PRL_Young_Final 5036 2 34 2 25 0 1 240 5036_2 -PRL_Young_Final 5036 2 35 2 -25 0 0 499 5036_2 -PRL_Young_Final 5036 2 36 2 25 0 0 880 5036_2 -PRL_Young_Final 5036 2 37 2 -25 0 0 42 5036_2 -PRL_Young_Final 5036 2 38 1 -25 1 1 1118 5036_2 -PRL_Young_Final 5036 2 39 1 25 0 1 2073 5036_2 -PRL_Young_Final 5036 2 40 1 25 0 1 1534 5036_2 -PRL_Young_Final 5036 2 41 1 25 0 1 1537 5036_2 -PRL_Young_Final 5036 2 42 1 25 0 1 1447 5036_2 -PRL_Young_Final 5036 2 43 1 25 0 1 392 5036_2 -PRL_Young_Final 5036 2 44 1 25 0 1 447 5036_2 -PRL_Young_Final 5036 2 45 1 25 0 1 1778 5036_2 -PRL_Young_Final 5036 2 46 1 -25 0 0 1085 5036_2 -PRL_Young_Final 5036 2 47 1 -25 0 0 1708 5036_2 -PRL_Young_Final 5036 2 48 2 -25 1 1 469 5036_2 -PRL_Young_Final 5036 2 49 2 25 0 1 1336 5036_2 -PRL_Young_Final 5036 2 50 2 25 0 1 723 5036_2 -PRL_Young_Final 5036 2 51 2 -25 0 1 507 5036_2 -PRL_Young_Final 5036 2 52 2 25 0 1 261 5036_2 -PRL_Young_Final 5036 2 53 2 25 0 1 506 5036_2 -PRL_Young_Final 5036 2 54 2 25 0 1 437 5036_2 -PRL_Young_Final 5036 2 55 2 -25 0 0 178 5036_2 -PRL_Young_Final 5036 2 56 2 -25 0 0 1540 5036_2 -PRL_Young_Final 5036 2 57 1 25 1 1 458 5036_2 -PRL_Young_Final 5036 2 58 1 25 0 1 445 5036_2 -PRL_Young_Final 5036 2 59 1 -25 0 1 506 5036_2 -PRL_Young_Final 5036 2 60 1 -25 0 1 300 5036_2 -PRL_Young_Final 5036 2 61 2 -25 1 0 1634 5036_2 -PRL_Young_Final 5036 2 62 2 -25 0 0 268 5036_2 -PRL_Young_Final 5036 2 63 2 25 0 0 408 5036_2 -PRL_Young_Final 5036 2 64 2 -25 0 0 525 5036_2 -PRL_Young_Final 5036 2 65 2 25 0 0 88 5036_2 -PRL_Young_Final 5036 2 66 2 -25 0 0 1491 5036_2 -PRL_Young_Final 5036 2 67 2 -25 0 0 815 5036_2 -PRL_Young_Final 5036 2 68 1 25 1 1 829 5036_2 -PRL_Young_Final 5036 2 69 1 25 0 1 459 5036_2 -PRL_Young_Final 5036 2 70 1 25 0 1 808 5036_2 -PRL_Young_Final 5036 2 71 1 25 0 1 798 5036_2 -PRL_Young_Final 5036 2 72 1 25 0 1 541 5036_2 -PRL_Young_Final 5036 2 73 1 25 0 1 710 5036_2 -PRL_Young_Final 5036 2 74 1 -25 0 1 629 5036_2 -PRL_Young_Final 5036 2 75 1 -25 0 0 547 5036_2 -PRL_Young_Final 5036 2 76 2 25 1 1 2264 5036_2 -PRL_Young_Final 5036 2 77 2 25 0 1 443 5036_2 -PRL_Young_Final 5036 2 78 2 25 0 1 569 5036_2 -PRL_Young_Final 5036 2 79 2 25 0 1 371 5036_2 -PRL_Young_Final 5036 2 80 2 25 0 1 495 5036_2 -PRL_Young_Final 5036 2 81 2 25 0 1 464 5036_2 -PRL_Young_Final 5036 2 82 2 25 0 1 24 5036_2 -PRL_Young_Final 5036 2 83 2 -25 0 1 517 5036_2 -PRL_Young_Final 5036 2 84 2 -25 0 0 562 5036_2 -PRL_Young_Final 5036 2 85 1 25 1 1 1933 5036_2 -PRL_Young_Final 5036 2 86 1 25 0 1 485 5036_2 -PRL_Young_Final 5036 2 87 1 -25 0 1 79 5036_2 -PRL_Young_Final 5036 2 88 1 25 0 1 874 5036_2 -PRL_Young_Final 5036 2 89 1 25 0 1 125 5036_2 -PRL_Young_Final 5036 2 90 1 25 0 1 602 5036_2 -PRL_Young_Final 5036 2 91 1 25 0 1 622 5036_2 -PRL_Young_Final 5036 2 92 1 25 0 1 425 5036_2 -PRL_Young_Final 5036 2 93 1 -25 0 0 512 5036_2 -PRL_Young_Final 5036 2 94 1 -25 0 0 318 5036_2 -PRL_Young_Final 5036 2 95 2 -25 1 1 654 5036_2 -PRL_Young_Final 5036 2 96 2 -25 0 1 83 5036_2 -PRL_Young_Final 5036 2 97 2 25 0 1 195 5036_2 -PRL_Young_Final 5036 2 98 2 25 0 1 301 5036_2 -PRL_Young_Final 5036 2 99 2 25 0 1 201 5036_2 -PRL_Young_Final 5036 2 100 2 25 0 0 498 5036_2 -PRL_Young_Final 5036 2 101 2 25 0 0 467 5036_2 -PRL_Young_Final 5036 2 102 2 -25 0 0 521 5036_2 -PRL_Young_Final 5036 2 103 2 -25 0 0 529 5036_2 -PRL_Young_Final 5036 2 104 2 -25 0 0 252 5036_2 -PRL_Young_Final 5036 2 105 1 25 1 1 424 5036_2 -PRL_Young_Final 5036 2 106 1 25 0 1 448 5036_2 -PRL_Young_Final 5036 2 107 1 25 0 1 403 5036_2 -PRL_Young_Final 5036 2 108 1 -25 0 1 130 5036_2 -PRL_Young_Final 5036 2 109 1 25 0 1 200 5036_2 -PRL_Young_Final 5036 2 110 1 -25 0 0 308 5036_2 -PRL_Young_Final 5036 2 111 1 -25 0 0 566 5036_2 -PRL_Young_Final 5036 2 112 2 25 1 1 196 5036_2 -PRL_Young_Final 5036 2 113 2 25 0 1 387 5036_2 -PRL_Young_Final 5036 2 114 2 25 0 1 1008 5036_2 -PRL_Young_Final 5036 2 115 2 25 0 1 1355 5036_2 -PRL_Young_Final 5036 2 116 2 25 0 1 153 5036_2 -PRL_Young_Final 5036 2 117 2 25 0 1 319 5036_2 -PRL_Young_Final 5036 2 118 2 -25 0 1 453 5036_2 -PRL_Young_Final 5036 2 119 2 25 0 1 228 5036_2 -PRL_Young_Final 5036 2 120 2 -25 0 0 1982 5036_2 -PRL_Young_Final 5036 2 121 2 25 0 0 247 5036_2 -PRL_Young_Final 5036 2 122 2 -25 0 0 1437 5036_2 -PRL_Young_Final 5036 2 123 2 -25 0 0 287 5036_2 -PRL_Young_Final 5036 2 124 2 -25 0 0 898 5036_2 -PRL_Young_Final 5036 2 125 2 -25 0 0 451 5036_2 -PRL_Young_Final 5036 2 126 1 25 1 1 416 5036_2 -PRL_Young_Final 5036 2 127 1 -25 0 1 1363 5036_2 -PRL_Young_Final 5036 2 128 1 25 0 1 383 5036_2 -PRL_Young_Final 5036 2 129 1 25 0 1 508 5036_2 -PRL_Young_Final 5036 2 130 1 25 0 1 562 5036_2 -PRL_Young_Final 5036 2 131 1 25 0 1 546 5036_2 -PRL_Young_Final 5036 2 132 1 -25 0 0 162 5036_2 -PRL_Young_Final 5036 2 133 1 -25 0 0 88 5036_2 -PRL_Young_Final 5036 2 134 2 25 1 1 560 5036_2 -PRL_Young_Final 5036 2 135 2 -25 0 1 730 5036_2 -PRL_Young_Final 5036 2 136 2 -25 0 1 163 5036_2 -PRL_Young_Final 5036 2 137 1 -25 1 0 1807 5036_2 -PRL_Young_Final 5036 2 138 1 25 0 0 506 5036_2 -PRL_Young_Final 5036 2 139 1 -25 0 0 521 5036_2 -PRL_Young_Final 5036 2 140 1 25 0 0 350 5036_2 -PRL_Young_Final 5036 2 141 1 -25 0 0 73 5036_2 -PRL_Young_Final 5036 2 142 2 25 1 1 600 5036_2 -PRL_Young_Final 5036 2 143 2 25 0 1 441 5036_2 -PRL_Young_Final 5036 2 144 2 25 0 1 131 5036_2 -PRL_Young_Final 5036 2 145 2 25 0 1 360 5036_2 -PRL_Young_Final 5036 2 146 2 25 0 1 553 5036_2 -PRL_Young_Final 5036 2 147 2 25 0 1 36 5036_2 -PRL_Young_Final 5036 2 148 2 -25 0 1 460 5036_2 -PRL_Young_Final 5036 2 149 1 -25 1 0 1844 5036_2 -PRL_Young_Final 5036 2 150 2 25 1 1 761 5036_2 -PRL_Young_Final 5036 2 151 2 25 0 1 334 5036_2 -PRL_Young_Final 5036 2 152 2 25 0 1 407 5036_2 -PRL_Young_Final 5036 2 153 2 25 0 1 255 5036_2 -PRL_Young_Final 5036 2 154 2 25 0 1 1566 5036_2 -PRL_Young_Final 5036 2 155 2 25 0 1 656 5036_2 -PRL_Young_Final 5036 2 156 2 25 0 1 514 5036_2 -PRL_Young_Final 5036 2 157 2 -25 0 1 111 5036_2 -PRL_Young_Final 5036 2 158 1 25 1 1 1447 5036_2 -PRL_Young_Final 5036 2 159 1 25 0 1 494 5036_2 -PRL_Young_Final 5036 2 160 1 -25 0 1 629 5036_2 -PRL_Young_Final 5036 2 161 1 25 0 1 973 5036_2 -PRL_Young_Final 5036 2 162 1 25 0 1 183 5036_2 -PRL_Young_Final 5036 2 163 1 -25 0 0 75 5036_2 -PRL_Young_Final 5036 2 164 2 25 1 1 1017 5036_2 -PRL_Young_Final 5036 2 165 2 25 0 1 513 5036_2 -PRL_Young_Final 5036 2 166 2 25 0 1 1553 5036_2 -PRL_Young_Final 5036 2 167 2 -25 0 1 920 5036_2 -PRL_Young_Final 5036 2 168 2 -25 0 1 509 5036_2 -PRL_Young_Final 5036 2 169 1 -25 1 0 1115 5036_2 -PRL_Young_Final 5036 2 170 1 -25 0 0 409 5036_2 -PRL_Young_Final 5036 2 171 2 25 1 1 737 5036_2 -PRL_Young_Final 5036 2 172 2 25 0 1 19 5036_2 -PRL_Young_Final 5036 2 173 2 25 0 1 556 5036_2 -PRL_Young_Final 5036 2 174 2 25 0 1 461 5036_2 -PRL_Young_Final 5036 2 175 2 25 0 1 740 5036_2 -PRL_Young_Final 5036 2 176 2 25 0 1 483 5036_2 -PRL_Young_Final 5036 2 177 2 -25 0 1 488 5036_2 -PRL_Young_Final 5036 2 178 2 25 0 1 143 5036_2 -PRL_Young_Final 5036 2 179 2 -25 0 0 701 5036_2 -PRL_Young_Final 5036 2 180 1 25 1 1 1436 5036_2 -PRL_Young_Final 5036 2 181 1 25 0 1 471 5036_2 -PRL_Young_Final 5036 2 182 1 25 0 1 213 5036_2 -PRL_Young_Final 5036 2 183 1 25 0 1 377 5036_2 -PRL_Young_Final 5036 2 184 1 25 0 1 490 5036_2 -PRL_Young_Final 5036 2 185 1 25 0 1 631 5036_2 -PRL_Young_Final 5036 2 186 1 25 0 0 245 5036_2 -PRL_Young_Final 5036 2 187 1 25 0 0 995 5036_2 -PRL_Young_Final 5036 2 188 1 -25 0 0 974 5036_2 -PRL_Young_Final 5036 2 189 2 -25 1 1 1840 5036_2 -PRL_Young_Final 5036 2 190 1 -25 1 0 1510 5036_2 -PRL_Young_Final 5036 2 191 1 -25 0 0 412 5036_2 -PRL_Young_Final 5036 2 192 2 25 1 1 377 5036_2 -PRL_Young_Final 5036 2 193 2 25 0 1 160 5036_2 -PRL_Young_Final 5036 2 194 2 -25 0 1 410 5036_2 -PRL_Young_Final 5036 2 195 2 25 0 1 82 5036_2 -PRL_Young_Final 5036 2 196 2 25 0 1 509 5036_2 -PRL_Young_Final 5036 2 197 2 25 0 1 430 5036_2 -PRL_Young_Final 5036 2 198 2 -25 0 0 414 5036_2 -PRL_Young_Final 5036 2 199 1 25 1 1 282 5036_2 -PRL_Young_Final 5036 2 200 1 25 0 1 400 5036_2 -PRL_Young_Final 5036 3 1 1 -25 0 1 2267 5036_3 -PRL_Young_Final 5036 3 2 1 -25 0 1 628 5036_3 -PRL_Young_Final 5036 3 3 2 -25 1 0 1419 5036_3 -PRL_Young_Final 5036 3 4 2 -25 0 0 940 5036_3 -PRL_Young_Final 5036 3 5 2 25 0 0 556 5036_3 -PRL_Young_Final 5036 3 6 2 -25 0 0 378 5036_3 -PRL_Young_Final 5036 3 7 2 -25 0 0 304 5036_3 -PRL_Young_Final 5036 3 8 2 -25 0 0 819 5036_3 -PRL_Young_Final 5036 3 9 1 25 1 1 770 5036_3 -PRL_Young_Final 5036 3 10 1 25 0 1 1243 5036_3 -PRL_Young_Final 5036 3 11 1 25 0 1 587 5036_3 -PRL_Young_Final 5036 3 12 1 25 0 1 109 5036_3 -PRL_Young_Final 5036 3 13 1 25 0 1 710 5036_3 -PRL_Young_Final 5036 3 14 1 -25 0 0 446 5036_3 -PRL_Young_Final 5036 3 15 1 -25 0 0 174 5036_3 -PRL_Young_Final 5036 3 16 1 -25 0 0 946 5036_3 -PRL_Young_Final 5036 3 17 2 25 1 1 453 5036_3 -PRL_Young_Final 5036 3 18 2 -25 0 1 496 5036_3 -PRL_Young_Final 5036 3 19 2 25 0 1 447 5036_3 -PRL_Young_Final 5036 3 20 2 25 0 1 464 5036_3 -PRL_Young_Final 5036 3 21 2 25 0 1 263 5036_3 -PRL_Young_Final 5036 3 22 2 25 0 1 321 5036_3 -PRL_Young_Final 5036 3 23 2 25 0 1 326 5036_3 -PRL_Young_Final 5036 3 24 2 -25 0 0 421 5036_3 -PRL_Young_Final 5036 3 25 1 25 1 1 801 5036_3 -PRL_Young_Final 5036 3 26 1 25 0 1 452 5036_3 -PRL_Young_Final 5036 3 27 1 -25 0 1 510 5036_3 -PRL_Young_Final 5036 3 28 1 25 0 1 490 5036_3 -PRL_Young_Final 5036 3 29 1 25 0 1 464 5036_3 -PRL_Young_Final 5036 3 30 1 25 0 0 476 5036_3 -PRL_Young_Final 5036 3 31 1 -25 0 0 610 5036_3 -PRL_Young_Final 5036 3 32 2 -25 1 1 877 5036_3 -PRL_Young_Final 5036 3 33 2 25 0 1 769 5036_3 -PRL_Young_Final 5036 3 34 2 25 0 1 1131 5036_3 -PRL_Young_Final 5036 3 35 2 25 0 1 512 5036_3 -PRL_Young_Final 5036 3 36 2 25 0 1 465 5036_3 -PRL_Young_Final 5036 3 37 2 25 0 1 486 5036_3 -PRL_Young_Final 5036 3 38 2 -25 0 1 669 5036_3 -PRL_Young_Final 5036 3 39 2 -25 0 1 949 5036_3 -PRL_Young_Final 5036 3 40 1 25 1 1 830 5036_3 -PRL_Young_Final 5036 3 41 1 25 0 1 490 5036_3 -PRL_Young_Final 5036 3 42 1 25 0 1 229 5036_3 -PRL_Young_Final 5036 3 43 1 25 0 1 331 5036_3 -PRL_Young_Final 5036 3 44 1 25 0 1 462 5036_3 -PRL_Young_Final 5036 3 45 1 25 0 1 272 5036_3 -PRL_Young_Final 5036 3 46 1 25 0 0 1480 5036_3 -PRL_Young_Final 5036 3 47 1 -25 0 0 562 5036_3 -PRL_Young_Final 5036 3 48 1 -25 0 0 908 5036_3 -PRL_Young_Final 5036 3 49 2 -25 1 1 467 5036_3 -PRL_Young_Final 5036 3 50 2 25 0 1 392 5036_3 -PRL_Young_Final 5036 3 51 2 25 0 1 457 5036_3 -PRL_Young_Final 5036 3 52 2 25 0 1 667 5036_3 -PRL_Young_Final 5036 3 53 2 25 0 1 576 5036_3 -PRL_Young_Final 5036 3 54 2 -25 0 0 196 5036_3 -PRL_Young_Final 5036 3 55 2 -25 0 0 213 5036_3 -PRL_Young_Final 5036 3 56 1 25 1 1 867 5036_3 -PRL_Young_Final 5036 3 57 1 25 0 1 183 5036_3 -PRL_Young_Final 5036 3 58 1 25 0 1 403 5036_3 -PRL_Young_Final 5036 3 59 1 -25 0 1 460 5036_3 -PRL_Young_Final 5036 3 60 1 25 0 1 747 5036_3 -PRL_Young_Final 5036 3 61 1 25 0 1 758 5036_3 -PRL_Young_Final 5036 3 62 1 -25 0 0 62 5036_3 -PRL_Young_Final 5036 3 63 1 -25 0 0 715 5036_3 -PRL_Young_Final 5036 3 64 1 25 0 0 231 5036_3 -PRL_Young_Final 5036 3 65 1 25 0 0 458 5036_3 -PRL_Young_Final 5036 3 66 1 -25 0 0 487 5036_3 -PRL_Young_Final 5036 3 67 1 -25 0 0 59 5036_3 -PRL_Young_Final 5036 3 68 2 -25 1 1 52 5036_3 -PRL_Young_Final 5036 3 69 2 25 0 1 789 5036_3 -PRL_Young_Final 5036 3 70 2 25 0 1 432 5036_3 -PRL_Young_Final 5036 3 71 2 25 0 1 759 5036_3 -PRL_Young_Final 5036 3 72 2 25 0 1 3790 5036_3 -PRL_Young_Final 5036 3 73 2 -25 0 0 638 5036_3 -PRL_Young_Final 5036 3 74 2 -25 0 0 1516 5036_3 -PRL_Young_Final 5036 3 75 1 25 1 1 759 5036_3 -PRL_Young_Final 5036 3 76 1 -25 0 1 455 5036_3 -PRL_Young_Final 5036 3 77 1 -25 0 1 582 5036_3 -PRL_Young_Final 5036 3 78 1 25 0 1 133 5036_3 -PRL_Young_Final 5036 3 79 1 25 0 1 456 5036_3 -PRL_Young_Final 5036 3 80 1 25 0 1 619 5036_3 -PRL_Young_Final 5036 3 81 1 25 0 1 513 5036_3 -PRL_Young_Final 5036 3 82 1 25 0 1 985 5036_3 -PRL_Young_Final 5036 3 83 1 -25 0 0 134 5036_3 -PRL_Young_Final 5036 3 84 1 -25 0 0 569 5036_3 -PRL_Young_Final 5036 3 85 2 25 1 1 473 5036_3 -PRL_Young_Final 5036 3 86 2 -25 0 1 891 5036_3 -PRL_Young_Final 5036 3 87 2 25 0 1 487 5036_3 -PRL_Young_Final 5036 3 88 2 25 0 1 333 5036_3 -PRL_Young_Final 5036 3 89 2 25 0 1 424 5036_3 -PRL_Young_Final 5036 3 90 2 25 0 1 474 5036_3 -PRL_Young_Final 5036 3 91 2 25 0 1 407 5036_3 -PRL_Young_Final 5036 3 92 2 25 0 1 236 5036_3 -PRL_Young_Final 5036 3 93 2 25 0 0 57 5036_3 -PRL_Young_Final 5036 3 94 2 -25 0 0 785 5036_3 -PRL_Young_Final 5036 3 95 1 25 1 1 933 5036_3 -PRL_Young_Final 5036 3 96 1 -25 0 1 522 5036_3 -PRL_Young_Final 5036 3 97 1 25 0 1 243 5036_3 -PRL_Young_Final 5036 3 98 1 25 0 1 58 5036_3 -PRL_Young_Final 5036 3 99 1 -25 0 1 498 5036_3 -PRL_Young_Final 5036 3 100 2 -25 1 0 587 5036_3 -PRL_Young_Final 5036 3 101 2 -25 0 0 89 5036_3 -PRL_Young_Final 5036 3 102 2 -25 0 0 201 5036_3 -PRL_Young_Final 5036 3 103 1 25 1 1 446 5036_3 -PRL_Young_Final 5036 3 104 1 25 0 1 540 5036_3 -PRL_Young_Final 5036 3 105 1 25 0 1 507 5036_3 -PRL_Young_Final 5036 3 106 1 25 0 1 432 5036_3 -PRL_Young_Final 5036 3 107 1 25 0 1 525 5036_3 -PRL_Young_Final 5036 3 108 1 -25 0 1 47 5036_3 -PRL_Young_Final 5036 3 109 1 -25 0 1 210 5036_3 -PRL_Young_Final 5036 3 110 2 25 1 1 661 5036_3 -PRL_Young_Final 5036 3 111 2 25 0 1 225 5036_3 -PRL_Young_Final 5036 3 112 2 25 0 1 443 5036_3 -PRL_Young_Final 5036 3 113 2 25 0 1 289 5036_3 -PRL_Young_Final 5036 3 114 2 25 0 1 596 5036_3 -PRL_Young_Final 5036 3 115 2 -25 0 0 505 5036_3 -PRL_Young_Final 5036 3 116 2 -25 0 0 649 5036_3 -PRL_Young_Final 5036 3 117 1 25 1 1 1022 5036_3 -PRL_Young_Final 5036 3 118 1 -25 0 1 1342 5036_3 -PRL_Young_Final 5036 3 119 1 25 0 1 354 5036_3 -PRL_Young_Final 5036 3 120 1 25 0 1 1119 5036_3 -PRL_Young_Final 5036 3 121 1 25 0 1 911 5036_3 -PRL_Young_Final 5036 3 122 1 25 0 1 2367 5036_3 -PRL_Young_Final 5036 3 123 1 25 0 1 834 5036_3 -PRL_Young_Final 5036 3 124 1 25 0 1 1194 5036_3 -PRL_Young_Final 5036 3 125 1 -25 0 0 2371 5036_3 -PRL_Young_Final 5036 3 126 1 25 0 0 2397 5036_3 -PRL_Young_Final 5036 3 127 1 -25 0 0 313 5036_3 -PRL_Young_Final 5036 3 128 1 25 0 0 547 5036_3 -PRL_Young_Final 5036 3 129 1 -25 0 0 1113 5036_3 -PRL_Young_Final 5036 3 130 1 -25 0 0 283 5036_3 -PRL_Young_Final 5036 3 131 2 25 1 1 73 5036_3 -PRL_Young_Final 5036 3 132 2 -25 0 1 2660 5036_3 -PRL_Young_Final 5036 3 133 2 25 0 1 426 5036_3 -PRL_Young_Final 5036 3 134 2 25 0 1 950 5036_3 -PRL_Young_Final 5036 3 135 2 -25 0 1 744 5036_3 -PRL_Young_Final 5036 3 136 2 25 0 1 637 5036_3 -PRL_Young_Final 5036 3 137 2 -25 0 0 1133 5036_3 -PRL_Young_Final 5036 3 138 1 25 1 1 1031 5036_3 -PRL_Young_Final 5036 3 139 1 25 0 1 540 5036_3 -PRL_Young_Final 5036 3 140 1 25 0 1 447 5036_3 -PRL_Young_Final 5036 3 141 1 25 0 1 594 5036_3 -PRL_Young_Final 5036 3 142 1 -25 0 1 400 5036_3 -PRL_Young_Final 5036 3 143 1 -25 0 1 509 5036_3 -PRL_Young_Final 5036 3 144 2 -25 1 0 1377 5036_3 -PRL_Young_Final 5036 3 145 2 -25 0 0 503 5036_3 -PRL_Young_Final 5036 3 146 1 25 1 1 861 5036_3 -PRL_Young_Final 5036 3 147 1 25 0 1 457 5036_3 -PRL_Young_Final 5036 3 148 1 25 0 1 486 5036_3 -PRL_Young_Final 5036 3 149 1 25 0 1 470 5036_3 -PRL_Young_Final 5036 3 150 1 25 0 1 438 5036_3 -PRL_Young_Final 5036 3 151 1 25 0 1 977 5036_3 -PRL_Young_Final 5036 3 152 1 -25 0 1 762 5036_3 -PRL_Young_Final 5036 3 153 1 25 0 1 1138 5036_3 -PRL_Young_Final 5036 3 154 1 -25 0 0 578 5036_3 -PRL_Young_Final 5036 3 155 2 25 1 1 754 5036_3 -PRL_Young_Final 5036 3 156 2 25 0 1 482 5036_3 -PRL_Young_Final 5036 3 157 2 25 0 1 447 5036_3 -PRL_Young_Final 5036 3 158 2 25 0 1 952 5036_3 -PRL_Young_Final 5036 3 159 2 25 0 1 1078 5036_3 -PRL_Young_Final 5036 3 160 2 25 0 1 934 5036_3 -PRL_Young_Final 5036 3 161 2 -25 0 1 481 5036_3 -PRL_Young_Final 5036 3 162 2 25 0 0 563 5036_3 -PRL_Young_Final 5036 3 163 2 25 0 0 699 5036_3 -PRL_Young_Final 5036 3 164 2 -25 0 0 509 5036_3 -PRL_Young_Final 5036 3 165 1 25 1 1 1000 5036_3 -PRL_Young_Final 5036 3 166 1 25 0 1 488 5036_3 -PRL_Young_Final 5036 3 167 1 -25 0 1 1019 5036_3 -PRL_Young_Final 5036 3 168 1 25 0 1 613 5036_3 -PRL_Young_Final 5036 3 169 1 25 0 1 470 5036_3 -PRL_Young_Final 5036 3 170 1 25 0 1 436 5036_3 -PRL_Young_Final 5036 3 171 1 -25 0 0 269 5036_3 -PRL_Young_Final 5036 3 172 2 25 1 1 1473 5036_3 -PRL_Young_Final 5036 3 173 2 25 0 1 458 5036_3 -PRL_Young_Final 5036 3 174 2 -25 0 1 433 5036_3 -PRL_Young_Final 5036 3 175 2 -25 0 1 556 5036_3 -PRL_Young_Final 5036 3 176 1 -25 1 0 532 5036_3 -PRL_Young_Final 5036 3 177 1 -25 0 0 395 5036_3 -PRL_Young_Final 5036 3 178 1 -25 0 0 393 5036_3 -PRL_Young_Final 5036 3 179 2 25 1 1 421 5036_3 -PRL_Young_Final 5036 3 180 2 25 0 1 342 5036_3 -PRL_Young_Final 5036 3 181 2 25 0 1 436 5036_3 -PRL_Young_Final 5036 3 182 2 25 0 1 126 5036_3 -PRL_Young_Final 5036 3 183 2 25 0 1 533 5036_3 -PRL_Young_Final 5036 3 184 2 -25 0 0 474 5036_3 -PRL_Young_Final 5036 3 185 2 25 0 0 524 5036_3 -PRL_Young_Final 5036 3 186 2 -25 0 0 805 5036_3 -PRL_Young_Final 5036 3 187 1 25 1 1 1009 5036_3 -PRL_Young_Final 5036 3 188 1 -25 0 1 490 5036_3 -PRL_Young_Final 5036 3 189 1 25 0 1 795 5036_3 -PRL_Young_Final 5036 3 190 1 25 0 1 487 5036_3 -PRL_Young_Final 5036 3 191 1 25 0 1 946 5036_3 -PRL_Young_Final 5036 3 192 1 25 0 1 1127 5036_3 -PRL_Young_Final 5036 3 193 1 -25 0 0 677 5036_3 -PRL_Young_Final 5036 3 194 1 -25 0 0 782 5036_3 -PRL_Young_Final 5036 3 195 2 25 1 1 521 5036_3 -PRL_Young_Final 5036 3 196 2 25 0 1 480 5036_3 -PRL_Young_Final 5036 3 197 2 25 0 1 450 5036_3 -PRL_Young_Final 5036 3 198 2 -25 0 1 429 5036_3 -PRL_Young_Final 5036 3 199 2 25 0 1 585 5036_3 -PRL_Young_Final 5036 3 200 2 -25 0 0 102 5036_3 -PRL_Young_Final 5035 1 1 1 25 0 0 753 5035_1 -PRL_Young_Final 5035 1 2 1 -25 0 0 321 5035_1 -PRL_Young_Final 5035 1 3 1 -25 0 0 283 5035_1 -PRL_Young_Final 5035 1 4 2 25 1 1 300 5035_1 -PRL_Young_Final 5035 1 5 2 25 0 1 337 5035_1 -PRL_Young_Final 5035 1 6 2 -25 0 1 285 5035_1 -PRL_Young_Final 5035 1 7 2 25 0 1 363 5035_1 -PRL_Young_Final 5035 1 8 2 25 0 1 281 5035_1 -PRL_Young_Final 5035 1 9 2 -25 0 0 287 5035_1 -PRL_Young_Final 5035 1 10 1 -25 1 1 310 5035_1 -PRL_Young_Final 5035 1 11 2 -25 1 0 906 5035_1 -PRL_Young_Final 5035 1 12 2 -25 0 0 584 5035_1 -PRL_Young_Final 5035 1 13 1 25 1 1 239 5035_1 -PRL_Young_Final 5035 1 14 1 25 0 1 273 5035_1 -PRL_Young_Final 5035 1 15 1 25 0 1 698 5035_1 -PRL_Young_Final 5035 1 16 1 25 0 1 365 5035_1 -PRL_Young_Final 5035 1 17 1 25 0 1 295 5035_1 -PRL_Young_Final 5035 1 18 1 -25 0 0 305 5035_1 -PRL_Young_Final 5035 1 19 1 25 0 0 284 5035_1 -PRL_Young_Final 5035 1 20 1 25 0 0 278 5035_1 -PRL_Young_Final 5035 1 21 1 -25 0 0 276 5035_1 -PRL_Young_Final 5035 1 22 1 -25 0 0 239 5035_1 -PRL_Young_Final 5035 1 23 2 -25 1 1 342 5035_1 -PRL_Young_Final 5035 1 24 2 -25 0 1 536 5035_1 -PRL_Young_Final 5035 1 25 1 -25 1 0 464 5035_1 -PRL_Young_Final 5035 1 26 1 -25 0 0 277 5035_1 -PRL_Young_Final 5035 1 27 1 -25 0 0 412 5035_1 -PRL_Young_Final 5035 1 28 2 25 1 1 371 5035_1 -PRL_Young_Final 5035 1 29 2 25 0 1 311 5035_1 -PRL_Young_Final 5035 1 30 2 25 0 1 303 5035_1 -PRL_Young_Final 5035 1 31 2 25 0 1 410 5035_1 -PRL_Young_Final 5035 1 32 2 25 0 1 293 5035_1 -PRL_Young_Final 5035 1 33 2 25 0 1 706 5035_1 -PRL_Young_Final 5035 1 34 2 -25 0 1 484 5035_1 -PRL_Young_Final 5035 1 35 2 25 0 1 349 5035_1 -PRL_Young_Final 5035 1 36 2 -25 0 0 482 5035_1 -PRL_Young_Final 5035 1 37 2 25 0 0 649 5035_1 -PRL_Young_Final 5035 1 38 2 -25 0 0 543 5035_1 -PRL_Young_Final 5035 1 39 2 -25 0 0 419 5035_1 -PRL_Young_Final 5035 1 40 1 25 1 1 337 5035_1 -PRL_Young_Final 5035 1 41 1 25 0 1 331 5035_1 -PRL_Young_Final 5035 1 42 1 25 0 1 654 5035_1 -PRL_Young_Final 5035 1 43 1 25 0 1 301 5035_1 -PRL_Young_Final 5035 1 44 1 25 0 1 278 5035_1 -PRL_Young_Final 5035 1 45 1 -25 0 0 610 5035_1 -PRL_Young_Final 5035 1 46 1 -25 0 0 427 5035_1 -PRL_Young_Final 5035 1 47 2 25 1 1 336 5035_1 -PRL_Young_Final 5035 1 48 2 -25 0 1 271 5035_1 -PRL_Young_Final 5035 1 49 2 25 0 1 244 5035_1 -PRL_Young_Final 5035 1 50 2 25 0 1 577 5035_1 -PRL_Young_Final 5035 1 51 2 -25 0 1 291 5035_1 -PRL_Young_Final 5035 1 52 2 25 0 1 653 5035_1 -PRL_Young_Final 5035 1 53 2 25 0 1 327 5035_1 -PRL_Young_Final 5035 1 54 2 -25 0 0 306 5035_1 -PRL_Young_Final 5035 1 55 2 -25 0 0 295 5035_1 -PRL_Young_Final 5035 1 56 1 25 1 1 294 5035_1 -PRL_Young_Final 5035 1 57 1 25 0 1 350 5035_1 -PRL_Young_Final 5035 1 58 1 25 0 1 737 5035_1 -PRL_Young_Final 5035 1 59 1 -25 0 1 577 5035_1 -PRL_Young_Final 5035 1 60 1 -25 0 1 331 5035_1 -PRL_Young_Final 5035 1 61 2 -25 1 0 271 5035_1 -PRL_Young_Final 5035 1 62 2 25 0 0 381 5035_1 -PRL_Young_Final 5035 1 63 2 -25 0 0 332 5035_1 -PRL_Young_Final 5035 1 64 2 25 0 0 425 5035_1 -PRL_Young_Final 5035 1 65 2 -25 0 0 290 5035_1 -PRL_Young_Final 5035 1 66 2 -25 0 0 300 5035_1 -PRL_Young_Final 5035 1 67 1 25 1 1 625 5035_1 -PRL_Young_Final 5035 1 68 1 25 0 1 834 5035_1 -PRL_Young_Final 5035 1 69 1 25 0 1 448 5035_1 -PRL_Young_Final 5035 1 70 1 25 0 1 348 5035_1 -PRL_Young_Final 5035 1 71 1 25 0 1 367 5035_1 -PRL_Young_Final 5035 1 72 1 -25 0 0 296 5035_1 -PRL_Young_Final 5035 1 73 1 -25 0 0 624 5035_1 -PRL_Young_Final 5035 1 74 2 25 1 1 371 5035_1 -PRL_Young_Final 5035 1 75 2 -25 0 1 282 5035_1 -PRL_Young_Final 5035 1 76 2 25 0 1 302 5035_1 -PRL_Young_Final 5035 1 77 2 25 0 1 647 5035_1 -PRL_Young_Final 5035 1 78 2 25 0 1 374 5035_1 -PRL_Young_Final 5035 1 79 2 25 0 1 461 5035_1 -PRL_Young_Final 5035 1 80 2 25 0 1 260 5035_1 -PRL_Young_Final 5035 1 81 2 -25 0 0 342 5035_1 -PRL_Young_Final 5035 1 82 2 -25 0 0 438 5035_1 -PRL_Young_Final 5035 1 83 1 25 1 1 290 5035_1 -PRL_Young_Final 5035 1 84 1 25 0 1 350 5035_1 -PRL_Young_Final 5035 1 85 1 -25 0 1 202 5035_1 -PRL_Young_Final 5035 1 86 1 25 0 1 333 5035_1 -PRL_Young_Final 5035 1 87 1 25 0 1 687 5035_1 -PRL_Young_Final 5035 1 88 1 25 0 0 280 5035_1 -PRL_Young_Final 5035 1 89 1 25 0 0 358 5035_1 -PRL_Young_Final 5035 1 90 1 -25 0 0 570 5035_1 -PRL_Young_Final 5035 1 91 1 -25 0 0 632 5035_1 -PRL_Young_Final 5035 1 92 2 -25 1 1 334 5035_1 -PRL_Young_Final 5035 1 93 2 25 0 1 546 5035_1 -PRL_Young_Final 5035 1 94 2 25 0 1 649 5035_1 -PRL_Young_Final 5035 1 95 2 25 0 1 570 5035_1 -PRL_Young_Final 5035 1 96 2 25 0 1 651 5035_1 -PRL_Young_Final 5035 1 97 2 25 0 1 598 5035_1 -PRL_Young_Final 5035 1 98 2 -25 0 0 548 5035_1 -PRL_Young_Final 5035 1 99 2 -25 0 0 505 5035_1 -PRL_Young_Final 5035 1 100 1 -25 1 1 305 5035_1 -PRL_Young_Final 5035 1 101 1 -25 0 1 489 5035_1 -PRL_Young_Final 5035 1 102 2 -25 1 0 250 5035_1 -PRL_Young_Final 5035 1 103 2 -25 0 0 311 5035_1 -PRL_Young_Final 5035 1 104 1 25 1 1 342 5035_1 -PRL_Young_Final 5035 1 105 1 25 0 1 305 5035_1 -PRL_Young_Final 5035 1 106 1 25 0 1 310 5035_1 -PRL_Young_Final 5035 1 107 1 25 0 1 251 5035_1 -PRL_Young_Final 5035 1 108 1 25 0 1 254 5035_1 -PRL_Young_Final 5035 1 109 1 25 0 1 561 5035_1 -PRL_Young_Final 5035 1 110 1 25 0 0 287 5035_1 -PRL_Young_Final 5035 1 111 1 -25 0 0 261 5035_1 -PRL_Young_Final 5035 1 112 1 -25 0 0 276 5035_1 -PRL_Young_Final 5035 1 113 1 -25 0 0 74 5035_1 -PRL_Young_Final 5035 1 114 2 -25 1 1 257 5035_1 -PRL_Young_Final 5035 1 115 2 25 0 1 593 5035_1 -PRL_Young_Final 5035 1 116 2 25 0 1 565 5035_1 -PRL_Young_Final 5035 1 117 2 25 0 1 276 5035_1 -PRL_Young_Final 5035 1 118 2 25 0 1 614 5035_1 -PRL_Young_Final 5035 1 119 2 25 0 1 290 5035_1 -PRL_Young_Final 5035 1 120 2 25 0 1 385 5035_1 -PRL_Young_Final 5035 1 121 2 25 0 1 280 5035_1 -PRL_Young_Final 5035 1 122 2 -25 0 0 426 5035_1 -PRL_Young_Final 5035 1 123 2 -25 0 0 278 5035_1 -PRL_Young_Final 5035 1 124 2 -25 0 0 298 5035_1 -PRL_Young_Final 5035 1 125 1 -25 1 1 283 5035_1 -PRL_Young_Final 5035 1 126 1 25 0 1 338 5035_1 -PRL_Young_Final 5035 1 127 1 25 0 1 284 5035_1 -PRL_Young_Final 5035 1 128 1 -25 0 1 316 5035_1 -PRL_Young_Final 5035 1 129 1 25 0 1 287 5035_1 -PRL_Young_Final 5035 1 130 1 25 0 1 259 5035_1 -PRL_Young_Final 5035 1 131 1 -25 0 0 293 5035_1 -PRL_Young_Final 5035 1 132 1 25 0 0 301 5035_1 -PRL_Young_Final 5035 1 133 1 -25 0 0 360 5035_1 -PRL_Young_Final 5035 1 134 1 25 0 0 285 5035_1 -PRL_Young_Final 5035 1 135 1 -25 0 0 308 5035_1 -PRL_Young_Final 5035 1 136 1 -25 0 0 579 5035_1 -PRL_Young_Final 5035 1 137 1 -25 0 0 447 5035_1 -PRL_Young_Final 5035 1 138 2 25 1 1 257 5035_1 -PRL_Young_Final 5035 1 139 2 25 0 1 263 5035_1 -PRL_Young_Final 5035 1 140 2 25 0 1 268 5035_1 -PRL_Young_Final 5035 1 141 2 -25 0 1 268 5035_1 -PRL_Young_Final 5035 1 142 2 -25 0 1 268 5035_1 -PRL_Young_Final 5035 1 143 2 25 0 1 528 5035_1 -PRL_Young_Final 5035 1 144 2 25 0 1 267 5035_1 -PRL_Young_Final 5035 1 145 2 -25 0 0 314 5035_1 -PRL_Young_Final 5035 1 146 2 -25 0 0 567 5035_1 -PRL_Young_Final 5035 1 147 2 -25 0 0 797 5035_1 -PRL_Young_Final 5035 1 148 1 25 1 1 338 5035_1 -PRL_Young_Final 5035 1 149 1 25 0 1 296 5035_1 -PRL_Young_Final 5035 1 150 1 25 0 1 567 5035_1 -PRL_Young_Final 5035 1 151 1 25 0 1 579 5035_1 -PRL_Young_Final 5035 1 152 1 -25 0 1 303 5035_1 -PRL_Young_Final 5035 1 153 1 25 0 0 286 5035_1 -PRL_Young_Final 5035 1 154 1 25 0 0 278 5035_1 -PRL_Young_Final 5035 1 155 1 -25 0 0 359 5035_1 -PRL_Young_Final 5035 1 156 1 -25 0 0 571 5035_1 -PRL_Young_Final 5035 1 157 2 25 1 1 517 5035_1 -PRL_Young_Final 5035 1 158 2 25 0 1 354 5035_1 -PRL_Young_Final 5035 1 159 2 25 0 1 342 5035_1 -PRL_Young_Final 5035 1 160 2 25 0 1 289 5035_1 -PRL_Young_Final 5035 1 161 2 25 0 1 273 5035_1 -PRL_Young_Final 5035 1 162 2 25 0 1 286 5035_1 -PRL_Young_Final 5035 1 163 2 25 0 1 579 5035_1 -PRL_Young_Final 5035 1 164 2 -25 0 1 571 5035_1 -PRL_Young_Final 5035 1 165 2 -25 0 0 349 5035_1 -PRL_Young_Final 5035 1 166 1 25 1 1 289 5035_1 -PRL_Young_Final 5035 1 167 1 25 0 1 510 5035_1 -PRL_Young_Final 5035 1 168 1 -25 0 1 287 5035_1 -PRL_Young_Final 5035 1 169 1 25 0 1 289 5035_1 -PRL_Young_Final 5035 1 170 1 25 0 1 281 5035_1 -PRL_Young_Final 5035 1 171 1 25 0 1 258 5035_1 -PRL_Young_Final 5035 1 172 1 25 0 1 302 5035_1 -PRL_Young_Final 5035 1 173 1 25 0 1 590 5035_1 -PRL_Young_Final 5035 1 174 1 -25 0 0 298 5035_1 -PRL_Young_Final 5035 1 175 1 -25 0 0 261 5035_1 -PRL_Young_Final 5035 1 176 2 -25 1 1 387 5035_1 -PRL_Young_Final 5035 1 177 2 -25 0 1 362 5035_1 -PRL_Young_Final 5035 1 178 1 -25 1 0 258 5035_1 -PRL_Young_Final 5035 1 179 1 25 0 0 346 5035_1 -PRL_Young_Final 5035 1 180 1 -25 0 0 299 5035_1 -PRL_Young_Final 5035 1 181 1 -25 0 0 311 5035_1 -PRL_Young_Final 5035 1 182 2 25 1 1 274 5035_1 -PRL_Young_Final 5035 1 183 2 25 0 1 295 5035_1 -PRL_Young_Final 5035 1 184 2 25 0 1 325 5035_1 -PRL_Young_Final 5035 1 185 2 25 0 1 330 5035_1 -PRL_Young_Final 5035 1 186 2 25 0 1 278 5035_1 -PRL_Young_Final 5035 1 187 2 -25 0 0 602 5035_1 -PRL_Young_Final 5035 1 188 2 -25 0 0 594 5035_1 -PRL_Young_Final 5035 1 189 1 25 1 1 269 5035_1 -PRL_Young_Final 5035 1 190 1 -25 0 1 274 5035_1 -PRL_Young_Final 5035 1 191 1 25 0 1 271 5035_1 -PRL_Young_Final 5035 1 192 1 25 0 1 301 5035_1 -PRL_Young_Final 5035 1 193 1 25 0 1 322 5035_1 -PRL_Young_Final 5035 1 194 1 25 0 1 332 5035_1 -PRL_Young_Final 5035 1 195 1 25 0 1 337 5035_1 -PRL_Young_Final 5035 1 196 1 25 0 1 274 5035_1 -PRL_Young_Final 5035 1 197 1 -25 0 0 279 5035_1 -PRL_Young_Final 5035 1 198 1 -25 0 0 577 5035_1 -PRL_Young_Final 5035 1 199 2 25 1 1 350 5035_1 -PRL_Young_Final 5035 1 200 2 -25 0 1 262 5035_1 -PRL_Young_Final 5035 2 1 1 -25 1 0 838 5035_2 -PRL_Young_Final 5035 2 2 1 25 0 0 413 5035_2 -PRL_Young_Final 5035 2 3 1 -25 0 0 491 5035_2 -PRL_Young_Final 5035 2 4 1 25 0 0 276 5035_2 -PRL_Young_Final 5035 2 5 1 -25 0 0 381 5035_2 -PRL_Young_Final 5035 2 6 1 -25 0 0 279 5035_2 -PRL_Young_Final 5035 2 7 2 25 1 1 556 5035_2 -PRL_Young_Final 5035 2 8 2 25 0 1 297 5035_2 -PRL_Young_Final 5035 2 9 2 -25 0 1 344 5035_2 -PRL_Young_Final 5035 2 10 2 25 0 1 285 5035_2 -PRL_Young_Final 5035 2 11 2 25 0 1 306 5035_2 -PRL_Young_Final 5035 2 12 2 -25 0 0 567 5035_2 -PRL_Young_Final 5035 2 13 2 -25 0 0 597 5035_2 -PRL_Young_Final 5035 2 14 1 25 1 1 266 5035_2 -PRL_Young_Final 5035 2 15 1 25 0 1 482 5035_2 -PRL_Young_Final 5035 2 16 1 25 0 1 346 5035_2 -PRL_Young_Final 5035 2 17 1 -25 0 1 1135 5035_2 -PRL_Young_Final 5035 2 18 1 -25 0 1 294 5035_2 -PRL_Young_Final 5035 2 19 2 -25 1 0 283 5035_2 -PRL_Young_Final 5035 2 20 2 -25 0 0 356 5035_2 -PRL_Young_Final 5035 2 21 1 25 1 1 312 5035_2 -PRL_Young_Final 5035 2 22 1 25 0 1 322 5035_2 -PRL_Young_Final 5035 2 23 1 25 0 1 664 5035_2 -PRL_Young_Final 5035 2 24 1 25 0 1 586 5035_2 -PRL_Young_Final 5035 2 25 1 25 0 1 429 5035_2 -PRL_Young_Final 5035 2 26 1 25 0 0 479 5035_2 -PRL_Young_Final 5035 2 27 1 25 0 0 348 5035_2 -PRL_Young_Final 5035 2 28 1 -25 0 0 619 5035_2 -PRL_Young_Final 5035 2 29 1 -25 0 0 293 5035_2 -PRL_Young_Final 5035 2 30 2 25 1 1 272 5035_2 -PRL_Young_Final 5035 2 31 2 -25 0 1 306 5035_2 -PRL_Young_Final 5035 2 32 2 25 0 1 452 5035_2 -PRL_Young_Final 5035 2 33 2 25 0 1 262 5035_2 -PRL_Young_Final 5035 2 34 2 25 0 1 269 5035_2 -PRL_Young_Final 5035 2 35 2 25 0 1 272 5035_2 -PRL_Young_Final 5035 2 36 2 -25 0 0 294 5035_2 -PRL_Young_Final 5035 2 37 2 -25 0 0 416 5035_2 -PRL_Young_Final 5035 2 38 2 -25 0 0 368 5035_2 -PRL_Young_Final 5035 2 39 1 25 1 1 252 5035_2 -PRL_Young_Final 5035 2 40 1 25 0 1 241 5035_2 -PRL_Young_Final 5035 2 41 1 25 0 1 309 5035_2 -PRL_Young_Final 5035 2 42 1 -25 0 1 342 5035_2 -PRL_Young_Final 5035 2 43 1 25 0 1 318 5035_2 -PRL_Young_Final 5035 2 44 1 25 0 1 94 5035_2 -PRL_Young_Final 5035 2 45 1 -25 0 0 285 5035_2 -PRL_Young_Final 5035 2 46 1 25 0 0 258 5035_2 -PRL_Young_Final 5035 2 47 1 -25 0 0 363 5035_2 -PRL_Young_Final 5035 2 48 1 -25 0 0 316 5035_2 -PRL_Young_Final 5035 2 49 2 -25 1 1 310 5035_2 -PRL_Young_Final 5035 2 50 2 25 0 1 373 5035_2 -PRL_Young_Final 5035 2 51 2 25 0 1 571 5035_2 -PRL_Young_Final 5035 2 52 2 25 0 1 294 5035_2 -PRL_Young_Final 5035 2 53 2 25 0 1 314 5035_2 -PRL_Young_Final 5035 2 54 2 -25 0 0 628 5035_2 -PRL_Young_Final 5035 2 55 2 -25 0 0 545 5035_2 -PRL_Young_Final 5035 2 56 1 25 1 1 304 5035_2 -PRL_Young_Final 5035 2 57 1 -25 0 1 323 5035_2 -PRL_Young_Final 5035 2 58 1 -25 0 1 295 5035_2 -PRL_Young_Final 5035 2 59 2 -25 1 0 366 5035_2 -PRL_Young_Final 5035 2 60 2 -25 0 0 276 5035_2 -PRL_Young_Final 5035 2 61 1 25 1 1 282 5035_2 -PRL_Young_Final 5035 2 62 1 25 0 1 399 5035_2 -PRL_Young_Final 5035 2 63 1 25 0 1 334 5035_2 -PRL_Young_Final 5035 2 64 1 25 0 1 310 5035_2 -PRL_Young_Final 5035 2 65 1 25 0 1 416 5035_2 -PRL_Young_Final 5035 2 66 1 25 0 1 651 5035_2 -PRL_Young_Final 5035 2 67 1 -25 0 0 739 5035_2 -PRL_Young_Final 5035 2 68 1 25 0 0 368 5035_2 -PRL_Young_Final 5035 2 69 1 -25 0 0 274 5035_2 -PRL_Young_Final 5035 2 70 1 25 0 0 583 5035_2 -PRL_Young_Final 5035 2 71 1 -25 0 0 618 5035_2 -PRL_Young_Final 5035 2 72 1 -25 0 0 350 5035_2 -PRL_Young_Final 5035 2 73 2 -25 1 1 256 5035_2 -PRL_Young_Final 5035 2 74 2 25 0 1 289 5035_2 -PRL_Young_Final 5035 2 75 2 25 0 1 356 5035_2 -PRL_Young_Final 5035 2 76 2 25 0 1 399 5035_2 -PRL_Young_Final 5035 2 77 2 25 0 1 314 5035_2 -PRL_Young_Final 5035 2 78 2 25 0 1 325 5035_2 -PRL_Young_Final 5035 2 79 2 25 0 1 272 5035_2 -PRL_Young_Final 5035 2 80 2 25 0 1 536 5035_2 -PRL_Young_Final 5035 2 81 2 -25 0 0 372 5035_2 -PRL_Young_Final 5035 2 82 2 -25 0 0 308 5035_2 -PRL_Young_Final 5035 2 83 1 -25 1 1 460 5035_2 -PRL_Young_Final 5035 2 84 1 25 0 1 408 5035_2 -PRL_Young_Final 5035 2 85 1 25 0 1 307 5035_2 -PRL_Young_Final 5035 2 86 1 -25 0 1 635 5035_2 -PRL_Young_Final 5035 2 87 1 25 0 1 643 5035_2 -PRL_Young_Final 5035 2 88 1 25 0 1 331 5035_2 -PRL_Young_Final 5035 2 89 1 25 0 1 599 5035_2 -PRL_Young_Final 5035 2 90 1 25 0 1 288 5035_2 -PRL_Young_Final 5035 2 91 1 -25 0 0 271 5035_2 -PRL_Young_Final 5035 2 92 1 -25 0 0 324 5035_2 -PRL_Young_Final 5035 2 93 2 25 1 1 356 5035_2 -PRL_Young_Final 5035 2 94 2 -25 0 1 812 5035_2 -PRL_Young_Final 5035 2 95 2 -25 0 1 767 5035_2 -PRL_Young_Final 5035 2 96 1 25 1 0 309 5035_2 -PRL_Young_Final 5035 2 97 1 25 0 0 278 5035_2 -PRL_Young_Final 5035 2 98 1 -25 0 0 367 5035_2 -PRL_Young_Final 5035 2 99 1 -25 0 0 279 5035_2 -PRL_Young_Final 5035 2 100 1 -25 0 0 489 5035_2 -PRL_Young_Final 5035 2 101 2 25 1 1 336 5035_2 -PRL_Young_Final 5035 2 102 2 25 0 1 285 5035_2 -PRL_Young_Final 5035 2 103 2 25 0 1 299 5035_2 -PRL_Young_Final 5035 2 104 2 25 0 1 455 5035_2 -PRL_Young_Final 5035 2 105 2 25 0 1 381 5035_2 -PRL_Young_Final 5035 2 106 2 25 0 1 327 5035_2 -PRL_Young_Final 5035 2 107 2 -25 0 1 644 5035_2 -PRL_Young_Final 5035 2 108 2 -25 0 0 608 5035_2 -PRL_Young_Final 5035 2 109 2 -25 0 0 280 5035_2 -PRL_Young_Final 5035 2 110 1 25 1 1 315 5035_2 -PRL_Young_Final 5035 2 111 1 25 0 1 677 5035_2 -PRL_Young_Final 5035 2 112 1 25 0 1 260 5035_2 -PRL_Young_Final 5035 2 113 1 25 0 1 652 5035_2 -PRL_Young_Final 5035 2 114 1 25 0 1 565 5035_2 -PRL_Young_Final 5035 2 115 1 -25 0 0 283 5035_2 -PRL_Young_Final 5035 2 116 1 25 0 0 321 5035_2 -PRL_Young_Final 5035 2 117 1 -25 0 0 617 5035_2 -PRL_Young_Final 5035 2 118 1 -25 0 0 477 5035_2 -PRL_Young_Final 5035 2 119 2 25 1 1 336 5035_2 -PRL_Young_Final 5035 2 120 2 25 0 1 379 5035_2 -PRL_Young_Final 5035 2 121 2 -25 0 1 341 5035_2 -PRL_Young_Final 5035 2 122 2 25 0 1 494 5035_2 -PRL_Young_Final 5035 2 123 2 25 0 1 412 5035_2 -PRL_Young_Final 5035 2 124 2 -25 0 0 344 5035_2 -PRL_Young_Final 5035 2 125 2 -25 0 0 654 5035_2 -PRL_Young_Final 5035 2 126 1 -25 1 1 348 5035_2 -PRL_Young_Final 5035 2 127 1 25 0 1 314 5035_2 -PRL_Young_Final 5035 2 128 1 25 0 1 622 5035_2 -PRL_Young_Final 5035 2 129 1 25 0 1 298 5035_2 -PRL_Young_Final 5035 2 130 1 25 0 1 406 5035_2 -PRL_Young_Final 5035 2 131 1 25 0 1 413 5035_2 -PRL_Young_Final 5035 2 132 1 -25 0 1 479 5035_2 -PRL_Young_Final 5035 2 133 1 -25 0 0 390 5035_2 -PRL_Young_Final 5035 2 134 2 -25 1 1 1168 5035_2 -PRL_Young_Final 5035 2 135 2 25 0 1 1025 5035_2 -PRL_Young_Final 5035 2 136 2 25 0 1 383 5035_2 -PRL_Young_Final 5035 2 137 2 25 0 1 415 5035_2 -PRL_Young_Final 5035 2 138 2 25 0 1 334 5035_2 -PRL_Young_Final 5035 2 139 2 25 0 1 369 5035_2 -PRL_Young_Final 5035 2 140 2 25 0 1 428 5035_2 -PRL_Young_Final 5035 2 141 2 -25 0 1 345 5035_2 -PRL_Young_Final 5035 2 142 2 -25 0 0 326 5035_2 -PRL_Young_Final 5035 2 143 1 25 1 1 548 5035_2 -PRL_Young_Final 5035 2 144 1 25 0 1 690 5035_2 -PRL_Young_Final 5035 2 145 1 25 0 1 635 5035_2 -PRL_Young_Final 5035 2 146 1 25 0 1 1661 5035_2 -PRL_Young_Final 5035 2 147 1 25 0 1 358 5035_2 -PRL_Young_Final 5035 2 148 1 25 0 1 443 5035_2 -PRL_Young_Final 5035 2 149 1 25 0 1 353 5035_2 -PRL_Young_Final 5035 2 150 1 -25 0 0 310 5035_2 -PRL_Young_Final 5035 2 151 1 25 0 0 664 5035_2 -PRL_Young_Final 5035 2 152 1 -25 0 0 320 5035_2 -PRL_Young_Final 5035 2 153 1 25 0 0 318 5035_2 -PRL_Young_Final 5035 2 154 1 -25 0 0 630 5035_2 -PRL_Young_Final 5035 2 155 1 -25 0 0 373 5035_2 -PRL_Young_Final 5035 2 156 2 -25 1 1 385 5035_2 -PRL_Young_Final 5035 2 157 2 25 0 1 477 5035_2 -PRL_Young_Final 5035 2 158 2 25 0 1 360 5035_2 -PRL_Young_Final 5035 2 159 2 -25 0 1 524 5035_2 -PRL_Young_Final 5035 2 160 2 25 0 1 495 5035_2 -PRL_Young_Final 5035 2 161 2 -25 0 0 447 5035_2 -PRL_Young_Final 5035 2 162 2 -25 0 0 596 5035_2 -PRL_Young_Final 5035 2 163 1 25 1 1 598 5035_2 -PRL_Young_Final 5035 2 164 1 25 0 1 246 5035_2 -PRL_Young_Final 5035 2 165 1 25 0 1 283 5035_2 -PRL_Young_Final 5035 2 166 1 25 0 1 604 5035_2 -PRL_Young_Final 5035 2 167 1 -25 0 1 261 5035_2 -PRL_Young_Final 5035 2 168 1 -25 0 1 343 5035_2 -PRL_Young_Final 5035 2 169 2 -25 1 0 182 5035_2 -PRL_Young_Final 5035 2 170 2 -25 0 0 346 5035_2 -PRL_Young_Final 5035 2 171 1 25 1 1 432 5035_2 -PRL_Young_Final 5035 2 172 1 25 0 1 264 5035_2 -PRL_Young_Final 5035 2 173 1 25 0 1 347 5035_2 -PRL_Young_Final 5035 2 174 1 25 0 1 724 5035_2 -PRL_Young_Final 5035 2 175 1 25 0 1 607 5035_2 -PRL_Young_Final 5035 2 176 1 25 0 1 298 5035_2 -PRL_Young_Final 5035 2 177 1 25 0 0 292 5035_2 -PRL_Young_Final 5035 2 178 1 25 0 0 377 5035_2 -PRL_Young_Final 5035 2 179 1 -25 0 0 368 5035_2 -PRL_Young_Final 5035 2 180 1 -25 0 0 579 5035_2 -PRL_Young_Final 5035 2 181 2 -25 1 1 580 5035_2 -PRL_Young_Final 5035 2 182 2 25 0 1 371 5035_2 -PRL_Young_Final 5035 2 183 2 25 0 1 840 5035_2 -PRL_Young_Final 5035 2 184 2 25 0 1 642 5035_2 -PRL_Young_Final 5035 2 185 2 25 0 1 14 5035_2 -PRL_Young_Final 5035 2 186 2 25 0 1 262 5035_2 -PRL_Young_Final 5035 2 187 2 25 0 1 532 5035_2 -PRL_Young_Final 5035 2 188 2 25 0 1 379 5035_2 -PRL_Young_Final 5035 2 189 2 -25 0 0 327 5035_2 -PRL_Young_Final 5035 2 190 2 -25 0 0 616 5035_2 -PRL_Young_Final 5035 2 191 1 -25 1 1 319 5035_2 -PRL_Young_Final 5035 2 192 1 25 0 1 292 5035_2 -PRL_Young_Final 5035 2 193 1 25 0 1 620 5035_2 -PRL_Young_Final 5035 2 194 1 -25 0 1 318 5035_2 -PRL_Young_Final 5035 2 195 1 25 0 1 349 5035_2 -PRL_Young_Final 5035 2 196 1 -25 0 0 320 5035_2 -PRL_Young_Final 5035 2 197 1 -25 0 0 289 5035_2 -PRL_Young_Final 5035 2 198 2 25 1 1 641 5035_2 -PRL_Young_Final 5035 2 199 2 25 0 1 600 5035_2 -PRL_Young_Final 5035 2 200 2 25 0 1 597 5035_2 -PRL_Young_Final 5035 3 1 1 25 1 0 553 5035_3 -PRL_Young_Final 5035 3 2 1 -25 0 0 296 5035_3 -PRL_Young_Final 5035 3 3 1 -25 0 0 572 5035_3 -PRL_Young_Final 5035 3 4 2 25 1 1 278 5035_3 -PRL_Young_Final 5035 3 5 2 -25 0 1 527 5035_3 -PRL_Young_Final 5035 3 6 2 -25 0 1 313 5035_3 -PRL_Young_Final 5035 3 7 1 -25 1 0 293 5035_3 -PRL_Young_Final 5035 3 8 1 -25 0 0 267 5035_3 -PRL_Young_Final 5035 3 9 2 25 1 1 345 5035_3 -PRL_Young_Final 5035 3 10 2 25 0 1 314 5035_3 -PRL_Young_Final 5035 3 11 2 25 0 1 611 5035_3 -PRL_Young_Final 5035 3 12 2 25 0 1 280 5035_3 -PRL_Young_Final 5035 3 13 2 25 0 1 250 5035_3 -PRL_Young_Final 5035 3 14 2 25 0 1 266 5035_3 -PRL_Young_Final 5035 3 15 2 -25 0 1 267 5035_3 -PRL_Young_Final 5035 3 16 2 25 0 1 333 5035_3 -PRL_Young_Final 5035 3 17 2 -25 0 0 297 5035_3 -PRL_Young_Final 5035 3 18 2 -25 0 0 701 5035_3 -PRL_Young_Final 5035 3 19 1 25 1 1 311 5035_3 -PRL_Young_Final 5035 3 20 1 25 0 1 285 5035_3 -PRL_Young_Final 5035 3 21 1 25 0 1 470 5035_3 -PRL_Young_Final 5035 3 22 1 25 0 1 1365 5035_3 -PRL_Young_Final 5035 3 23 1 25 0 1 261 5035_3 -PRL_Young_Final 5035 3 24 1 25 0 1 266 5035_3 -PRL_Young_Final 5035 3 25 1 -25 0 1 298 5035_3 -PRL_Young_Final 5035 3 26 1 -25 0 0 412 5035_3 -PRL_Young_Final 5035 3 27 2 25 1 1 283 5035_3 -PRL_Young_Final 5035 3 28 2 25 0 1 499 5035_3 -PRL_Young_Final 5035 3 29 2 -25 0 1 51 5035_3 -PRL_Young_Final 5035 3 30 2 25 0 1 425 5035_3 -PRL_Young_Final 5035 3 31 2 25 0 1 597 5035_3 -PRL_Young_Final 5035 3 32 2 25 0 1 354 5035_3 -PRL_Young_Final 5035 3 33 2 25 0 1 318 5035_3 -PRL_Young_Final 5035 3 34 2 25 0 1 270 5035_3 -PRL_Young_Final 5035 3 35 2 25 0 0 322 5035_3 -PRL_Young_Final 5035 3 36 2 -25 0 0 319 5035_3 -PRL_Young_Final 5035 3 37 2 25 0 0 332 5035_3 -PRL_Young_Final 5035 3 38 2 -25 0 0 340 5035_3 -PRL_Young_Final 5035 3 39 2 -25 0 0 306 5035_3 -PRL_Young_Final 5035 3 40 1 -25 1 1 357 5035_3 -PRL_Young_Final 5035 3 41 1 -25 0 1 311 5035_3 -PRL_Young_Final 5035 3 42 2 -25 1 0 289 5035_3 -PRL_Young_Final 5035 3 43 2 -25 0 0 348 5035_3 -PRL_Young_Final 5035 3 44 1 25 1 1 397 5035_3 -PRL_Young_Final 5035 3 45 1 25 0 1 664 5035_3 -PRL_Young_Final 5035 3 46 1 25 0 1 965 5035_3 -PRL_Young_Final 5035 3 47 1 25 0 1 301 5035_3 -PRL_Young_Final 5035 3 48 1 25 0 1 277 5035_3 -PRL_Young_Final 5035 3 49 1 25 0 1 430 5035_3 -PRL_Young_Final 5035 3 50 1 -25 0 1 399 5035_3 -PRL_Young_Final 5035 3 51 1 25 0 1 398 5035_3 -PRL_Young_Final 5035 3 52 1 -25 0 0 718 5035_3 -PRL_Young_Final 5035 3 53 1 -25 0 0 388 5035_3 -PRL_Young_Final 5035 3 54 1 25 0 0 395 5035_3 -PRL_Young_Final 5035 3 55 1 25 0 0 506 5035_3 -PRL_Young_Final 5035 3 56 1 -25 0 0 343 5035_3 -PRL_Young_Final 5035 3 57 1 -25 0 0 923 5035_3 -PRL_Young_Final 5035 3 58 2 25 1 1 522 5035_3 -PRL_Young_Final 5035 3 59 2 25 0 1 294 5035_3 -PRL_Young_Final 5035 3 60 2 25 0 1 322 5035_3 -PRL_Young_Final 5035 3 61 2 25 0 1 607 5035_3 -PRL_Young_Final 5035 3 62 2 25 0 1 284 5035_3 -PRL_Young_Final 5035 3 63 2 25 0 1 461 5035_3 -PRL_Young_Final 5035 3 64 2 -25 0 1 318 5035_3 -PRL_Young_Final 5035 3 65 2 25 0 1 531 5035_3 -PRL_Young_Final 5035 3 66 2 -25 0 0 274 5035_3 -PRL_Young_Final 5035 3 67 2 -25 0 0 341 5035_3 -PRL_Young_Final 5035 3 68 1 25 1 1 390 5035_3 -PRL_Young_Final 5035 3 69 1 -25 0 1 356 5035_3 -PRL_Young_Final 5035 3 70 1 25 0 1 521 5035_3 -PRL_Young_Final 5035 3 71 1 25 0 1 302 5035_3 -PRL_Young_Final 5035 3 72 1 25 0 1 308 5035_3 -PRL_Young_Final 5035 3 73 1 25 0 1 338 5035_3 -PRL_Young_Final 5035 3 74 1 25 0 1 268 5035_3 -PRL_Young_Final 5035 3 75 1 -25 0 1 128 5035_3 -PRL_Young_Final 5035 3 76 1 -25 0 0 445 5035_3 -PRL_Young_Final 5035 3 77 2 -25 1 1 277 5035_3 -PRL_Young_Final 5035 3 78 2 25 0 1 584 5035_3 -PRL_Young_Final 5035 3 79 2 25 0 1 487 5035_3 -PRL_Young_Final 5035 3 80 2 25 0 1 368 5035_3 -PRL_Young_Final 5035 3 81 2 25 0 1 584 5035_3 -PRL_Young_Final 5035 3 82 2 25 0 1 300 5035_3 -PRL_Young_Final 5035 3 83 2 25 0 1 553 5035_3 -PRL_Young_Final 5035 3 84 2 -25 0 1 296 5035_3 -PRL_Young_Final 5035 3 85 2 -25 0 0 320 5035_3 -PRL_Young_Final 5035 3 86 2 25 0 0 299 5035_3 -PRL_Young_Final 5035 3 87 2 -25 0 0 661 5035_3 -PRL_Young_Final 5035 3 88 2 -25 0 0 304 5035_3 -PRL_Young_Final 5035 3 89 1 25 1 1 312 5035_3 -PRL_Young_Final 5035 3 90 1 25 0 1 631 5035_3 -PRL_Young_Final 5035 3 91 1 25 0 1 658 5035_3 -PRL_Young_Final 5035 3 92 1 25 0 1 248 5035_3 -PRL_Young_Final 5035 3 93 1 25 0 1 301 5035_3 -PRL_Young_Final 5035 3 94 1 25 0 1 551 5035_3 -PRL_Young_Final 5035 3 95 1 -25 0 0 597 5035_3 -PRL_Young_Final 5035 3 96 1 -25 0 0 605 5035_3 -PRL_Young_Final 5035 3 97 2 25 1 1 294 5035_3 -PRL_Young_Final 5035 3 98 2 -25 0 1 461 5035_3 -PRL_Young_Final 5035 3 99 2 25 0 1 313 5035_3 -PRL_Young_Final 5035 3 100 2 25 0 1 370 5035_3 -PRL_Young_Final 5035 3 101 2 -25 0 1 144 5035_3 -PRL_Young_Final 5035 3 102 2 -25 0 0 343 5035_3 -PRL_Young_Final 5035 3 103 1 25 1 1 334 5035_3 -PRL_Young_Final 5035 3 104 1 25 0 1 333 5035_3 -PRL_Young_Final 5035 3 105 1 25 0 1 645 5035_3 -PRL_Young_Final 5035 3 106 1 25 0 1 308 5035_3 -PRL_Young_Final 5035 3 107 1 25 0 1 334 5035_3 -PRL_Young_Final 5035 3 108 1 -25 0 0 305 5035_3 -PRL_Young_Final 5035 3 109 1 -25 0 0 313 5035_3 -PRL_Young_Final 5035 3 110 2 -25 1 1 614 5035_3 -PRL_Young_Final 5035 3 111 2 -25 0 1 585 5035_3 -PRL_Young_Final 5035 3 112 2 25 0 1 273 5035_3 -PRL_Young_Final 5035 3 113 2 25 0 1 626 5035_3 -PRL_Young_Final 5035 3 114 2 25 0 1 790 5035_3 -PRL_Young_Final 5035 3 115 2 25 0 1 402 5035_3 -PRL_Young_Final 5035 3 116 2 25 0 0 591 5035_3 -PRL_Young_Final 5035 3 117 2 -25 0 0 289 5035_3 -PRL_Young_Final 5035 3 118 2 25 0 0 404 5035_3 -PRL_Young_Final 5035 3 119 2 -25 0 0 343 5035_3 -PRL_Young_Final 5035 3 120 2 -25 0 0 635 5035_3 -PRL_Young_Final 5035 3 121 1 25 1 1 298 5035_3 -PRL_Young_Final 5035 3 122 1 25 0 1 804 5035_3 -PRL_Young_Final 5035 3 123 1 -25 0 1 304 5035_3 -PRL_Young_Final 5035 3 124 1 25 0 1 336 5035_3 -PRL_Young_Final 5035 3 125 1 25 0 1 683 5035_3 -PRL_Young_Final 5035 3 126 1 -25 0 0 290 5035_3 -PRL_Young_Final 5035 3 127 1 -25 0 0 403 5035_3 -PRL_Young_Final 5035 3 128 2 25 1 1 291 5035_3 -PRL_Young_Final 5035 3 129 2 25 0 1 311 5035_3 -PRL_Young_Final 5035 3 130 2 25 0 1 327 5035_3 -PRL_Young_Final 5035 3 131 2 25 0 1 303 5035_3 -PRL_Young_Final 5035 3 132 2 25 0 1 267 5035_3 -PRL_Young_Final 5035 3 133 2 -25 0 1 360 5035_3 -PRL_Young_Final 5035 3 134 2 25 0 1 351 5035_3 -PRL_Young_Final 5035 3 135 2 -25 0 0 358 5035_3 -PRL_Young_Final 5035 3 136 2 -25 0 0 354 5035_3 -PRL_Young_Final 5035 3 137 1 25 1 1 615 5035_3 -PRL_Young_Final 5035 3 138 1 -25 0 1 329 5035_3 -PRL_Young_Final 5035 3 139 1 25 0 1 314 5035_3 -PRL_Young_Final 5035 3 140 1 25 0 1 351 5035_3 -PRL_Young_Final 5035 3 141 1 25 0 1 356 5035_3 -PRL_Young_Final 5035 3 142 1 25 0 0 304 5035_3 -PRL_Young_Final 5035 3 143 1 25 0 0 278 5035_3 -PRL_Young_Final 5035 3 144 1 -25 0 0 346 5035_3 -PRL_Young_Final 5035 3 145 1 -25 0 0 378 5035_3 -PRL_Young_Final 5035 3 146 2 25 1 1 253 5035_3 -PRL_Young_Final 5035 3 147 2 25 0 1 336 5035_3 -PRL_Young_Final 5035 3 148 2 -25 0 1 796 5035_3 -PRL_Young_Final 5035 3 149 2 -25 0 1 621 5035_3 -PRL_Young_Final 5035 3 150 1 -25 1 0 329 5035_3 -PRL_Young_Final 5035 3 151 2 25 1 1 249 5035_3 -PRL_Young_Final 5035 3 152 2 25 0 1 302 5035_3 -PRL_Young_Final 5035 3 153 2 25 0 1 390 5035_3 -PRL_Young_Final 5035 3 154 2 25 0 1 341 5035_3 -PRL_Young_Final 5035 3 155 2 25 0 1 260 5035_3 -PRL_Young_Final 5035 3 156 2 25 0 1 278 5035_3 -PRL_Young_Final 5035 3 157 2 -25 0 1 432 5035_3 -PRL_Young_Final 5035 3 158 2 -25 0 0 276 5035_3 -PRL_Young_Final 5035 3 159 1 25 1 1 558 5035_3 -PRL_Young_Final 5035 3 160 1 25 0 1 313 5035_3 -PRL_Young_Final 5035 3 161 1 25 0 1 360 5035_3 -PRL_Young_Final 5035 3 162 1 25 0 1 557 5035_3 -PRL_Young_Final 5035 3 163 1 25 0 1 612 5035_3 -PRL_Young_Final 5035 3 164 1 25 0 1 388 5035_3 -PRL_Young_Final 5035 3 165 1 25 0 1 613 5035_3 -PRL_Young_Final 5035 3 166 1 -25 0 1 260 5035_3 -PRL_Young_Final 5035 3 167 1 -25 0 0 856 5035_3 -PRL_Young_Final 5035 3 168 2 25 1 1 586 5035_3 -PRL_Young_Final 5035 3 169 2 25 0 1 705 5035_3 -PRL_Young_Final 5035 3 170 2 -25 0 1 446 5035_3 -PRL_Young_Final 5035 3 171 2 25 0 1 266 5035_3 -PRL_Young_Final 5035 3 172 2 25 0 1 365 5035_3 -PRL_Young_Final 5035 3 173 2 25 0 1 285 5035_3 -PRL_Young_Final 5035 3 174 2 -25 0 0 268 5035_3 -PRL_Young_Final 5035 3 175 2 25 0 0 255 5035_3 -PRL_Young_Final 5035 3 176 2 -25 0 0 533 5035_3 -PRL_Young_Final 5035 3 177 1 25 1 1 320 5035_3 -PRL_Young_Final 5035 3 178 1 25 0 1 285 5035_3 -PRL_Young_Final 5035 3 179 1 -25 0 1 271 5035_3 -PRL_Young_Final 5035 3 180 1 -25 0 1 553 5035_3 -PRL_Young_Final 5035 3 181 2 -25 1 0 275 5035_3 -PRL_Young_Final 5035 3 182 1 25 1 1 293 5035_3 -PRL_Young_Final 5035 3 183 1 25 0 1 554 5035_3 -PRL_Young_Final 5035 3 184 1 25 0 1 300 5035_3 -PRL_Young_Final 5035 3 185 1 25 0 1 274 5035_3 -PRL_Young_Final 5035 3 186 1 25 0 1 289 5035_3 -PRL_Young_Final 5035 3 187 1 25 0 1 320 5035_3 -PRL_Young_Final 5035 3 188 1 -25 0 1 303 5035_3 -PRL_Young_Final 5035 3 189 1 -25 0 0 390 5035_3 -PRL_Young_Final 5035 3 190 2 25 1 1 272 5035_3 -PRL_Young_Final 5035 3 191 2 25 0 1 673 5035_3 -PRL_Young_Final 5035 3 192 2 25 0 1 263 5035_3 -PRL_Young_Final 5035 3 193 2 25 0 1 274 5035_3 -PRL_Young_Final 5035 3 194 2 25 0 1 578 5035_3 -PRL_Young_Final 5035 3 195 2 25 0 1 483 5035_3 -PRL_Young_Final 5035 3 196 2 -25 0 0 324 5035_3 -PRL_Young_Final 5035 3 197 2 -25 0 0 324 5035_3 -PRL_Young_Final 5035 3 198 1 25 1 1 299 5035_3 -PRL_Young_Final 5035 3 199 1 -25 0 1 406 5035_3 -PRL_Young_Final 5035 3 200 1 25 0 1 272 5035_3 diff --git a/R/inst/extdata/pst_exampleData.txt b/R/inst/extdata/pst_exampleData.txt deleted file mode 100644 index 76f91700..00000000 --- a/R/inst/extdata/pst_exampleData.txt +++ /dev/null @@ -1,1021 +0,0 @@ -subjID type choice reward -1 12 0 0 -1 56 1 0 -1 34 0 0 -1 34 1 1 -1 12 1 1 -1 56 1 0 -1 56 0 1 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 34 0 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 0 -1 56 0 1 -1 34 1 1 -1 12 1 0 -1 56 0 0 -1 12 0 0 -1 34 1 0 -1 56 0 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 1 -1 12 0 0 -1 34 1 1 -1 56 0 1 -1 12 0 0 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 1 -1 56 0 1 -1 34 1 0 -1 12 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 56 0 1 -1 12 0 1 -1 34 0 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 12 1 0 -1 34 1 1 -1 56 0 1 -1 34 1 0 -1 12 1 0 -1 56 0 1 -1 12 1 0 -1 56 0 0 -1 34 1 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 0 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 0 -1 56 0 1 -1 12 0 0 -1 34 1 0 -1 34 0 0 -1 12 0 0 -1 56 0 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 0 0 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 0 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 34 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 56 0 1 -1 12 1 0 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 0 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 12 1 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 1 -1 34 1 0 -1 56 0 0 -1 34 1 0 -1 12 1 0 -1 56 1 1 -1 12 0 0 -1 34 1 1 -1 56 0 1 -1 34 1 0 -1 12 0 1 -1 12 1 0 -1 56 0 1 -1 34 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 56 1 1 -1 34 1 1 -1 12 1 1 -1 12 1 0 -1 56 0 0 -1 34 0 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 0 0 -1 56 0 1 -1 34 1 0 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 12 1 0 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 56 0 0 -1 12 1 0 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 0 0 -1 56 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 0 1 -1 12 1 1 -1 56 1 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 56 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 34 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 56 1 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 12 1 0 -1 34 0 0 -1 56 0 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 0 -1 56 0 0 -1 34 0 0 -1 34 1 1 -1 12 1 1 -1 56 0 0 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 34 1 0 -1 56 0 1 -1 12 1 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 1 -1 56 0 1 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 34 1 1 -1 12 1 0 -1 56 0 0 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 1 -1 56 0 1 -1 12 1 1 -1 56 0 1 -1 34 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 12 1 0 -1 34 1 0 -1 56 0 0 -1 12 1 0 -1 56 0 1 -1 34 1 1 -1 12 1 1 -1 34 1 1 -1 56 0 0 -1 34 1 0 -1 12 1 1 -1 56 0 0 -1 12 1 1 -1 34 1 0 -1 56 0 0 -1 12 1 1 -1 34 1 1 -1 56 0 0 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 34 1 1 -2 12 0 0 -2 56 0 0 -2 56 1 0 -2 34 1 1 -2 12 1 1 -2 56 1 0 -2 12 1 0 -2 34 1 1 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 34 1 1 -2 12 1 0 -2 56 1 0 -2 56 0 0 -2 34 1 1 -2 12 0 1 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 56 0 0 -2 12 1 0 -2 34 1 1 -2 56 1 0 -2 34 1 1 -2 12 0 0 -2 34 1 1 -2 56 1 1 -2 12 0 1 -2 56 0 1 -2 12 1 1 -2 34 1 0 -2 34 1 1 -2 56 0 1 -2 12 1 0 -2 34 1 0 -2 12 1 0 -2 56 1 1 -2 12 1 1 -2 34 1 0 -2 56 1 0 -2 12 1 1 -2 34 0 0 -2 56 1 1 -2 34 1 1 -2 56 0 0 -2 12 1 1 -2 34 1 1 -2 56 1 1 -2 12 1 1 -2 56 1 1 -2 34 1 1 -2 12 1 1 -2 56 1 0 -2 12 1 1 -2 34 1 0 -3 34 1 0 -3 56 1 1 -3 12 0 0 -3 56 1 1 -3 12 0 0 -3 34 1 1 -3 56 0 1 -3 34 1 1 -3 12 0 1 -3 12 0 0 -3 34 1 1 -3 56 0 0 -3 12 0 0 -3 34 1 1 -3 56 0 1 -3 56 0 0 -3 34 1 1 -3 12 0 0 -3 34 1 1 -3 56 0 0 -3 12 0 1 -3 34 1 1 -3 56 1 0 -3 12 0 1 -3 12 1 1 -3 56 0 0 -3 34 1 0 -3 56 0 0 -3 12 0 0 -3 34 1 0 -3 56 1 0 -3 34 0 0 -3 12 0 1 -3 12 0 0 -3 56 0 0 -3 34 1 1 -3 34 1 1 -3 12 1 1 -3 56 0 0 -3 34 1 1 -3 12 0 0 -3 56 1 0 -3 12 0 0 -3 56 0 0 -3 34 1 1 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 0 1 -3 34 1 0 -3 56 0 0 -3 34 1 0 -3 56 0 0 -3 12 1 0 -3 56 1 1 -3 34 1 0 -3 12 0 1 -3 56 0 0 -3 34 1 1 -3 12 0 0 -3 12 1 1 -3 34 1 0 -3 56 1 1 -3 56 1 1 -3 34 0 0 -3 12 1 1 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 1 1 -3 56 1 0 -3 34 1 0 -3 34 1 0 -3 12 1 0 -3 56 1 1 -3 12 1 1 -3 56 1 1 -3 34 1 1 -3 56 1 0 -3 12 1 1 -3 34 1 1 -3 12 1 1 -3 34 1 1 -3 56 1 1 -3 56 1 1 -3 12 0 0 -3 34 1 1 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 0 0 -3 56 1 1 -3 34 1 1 -3 34 1 0 -3 12 1 1 -3 56 1 1 -3 34 1 0 -3 12 1 1 -3 56 1 1 -3 34 1 1 -3 56 1 1 -3 12 1 1 -3 34 1 1 -3 12 1 1 -3 56 1 0 -3 34 1 1 -3 56 1 1 -3 12 0 0 -3 56 1 1 -3 34 1 1 -3 12 1 0 -3 12 0 0 -3 56 1 1 -3 34 1 1 -3 56 1 1 -3 12 0 0 -3 34 1 0 -3 12 1 1 -3 56 0 0 -3 34 1 1 -4 12 0 0 -4 34 0 0 -4 56 1 1 -4 34 1 1 -4 56 0 1 -4 12 0 0 -4 56 1 0 -4 34 0 1 -4 12 1 1 -4 34 1 0 -4 12 0 1 -4 56 0 1 -4 56 1 0 -4 12 1 1 -4 34 1 1 -4 12 0 0 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 34 0 1 -4 56 1 1 -4 12 0 1 -4 34 0 0 -4 56 0 0 -4 12 0 0 -4 34 1 1 -4 56 1 0 -4 12 0 0 -4 56 0 1 -4 34 0 0 -4 56 1 1 -4 34 0 0 -4 12 1 0 -4 12 1 1 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 56 1 0 -4 34 0 0 -4 12 0 0 -4 56 1 1 -4 34 1 1 -4 56 0 1 -4 12 0 0 -4 34 1 1 -4 12 1 1 -4 34 1 0 -4 56 0 0 -4 56 0 0 -4 34 0 0 -4 12 1 1 -4 12 1 1 -4 56 0 1 -4 34 1 0 -4 12 0 0 -4 34 1 1 -4 56 0 1 -4 34 1 0 -4 56 0 0 -4 12 1 1 -4 56 0 0 -4 12 0 0 -4 34 1 1 -4 34 1 0 -4 12 1 1 -4 56 1 0 -4 12 0 0 -4 34 1 1 -4 56 0 1 -4 34 1 1 -4 56 0 0 -4 12 1 1 -4 56 0 0 -4 12 1 1 -4 34 1 0 -4 12 1 1 -4 34 1 0 -4 56 0 1 -4 34 0 0 -4 12 0 1 -4 56 0 1 -4 34 0 0 -4 56 0 0 -4 12 0 0 -4 34 0 0 -4 12 0 0 -4 56 0 0 -4 56 0 0 -4 34 1 1 -4 12 1 1 -4 12 1 0 -4 34 1 1 -4 56 0 1 -4 12 1 1 -4 34 1 0 -4 56 0 1 -4 34 1 0 -4 12 1 0 -4 56 0 1 -4 12 0 1 -4 56 0 1 -4 34 0 0 -4 34 1 1 -4 12 0 0 -4 56 0 1 -4 12 0 0 -4 34 1 0 -4 56 0 0 -4 34 1 1 -4 12 1 0 -4 56 0 1 -4 12 0 0 -4 56 0 1 -4 34 1 0 -4 12 0 0 -4 56 0 0 -4 34 0 0 -4 12 0 0 -4 56 1 1 -4 34 0 1 -4 56 0 1 -4 12 0 0 -4 34 0 1 -4 34 0 0 -4 12 1 1 -4 56 0 0 -4 56 1 0 -4 12 1 0 -4 34 0 1 -4 56 1 1 -4 12 1 1 -4 34 0 1 -4 12 1 1 -4 56 1 1 -4 34 0 1 -4 34 0 0 -4 12 0 0 -4 56 1 1 -4 12 0 0 -4 56 1 1 -4 34 0 0 -4 56 1 0 -4 12 0 0 -4 34 1 1 -4 12 0 0 -4 34 1 1 -4 56 0 1 -4 56 0 0 -4 34 1 1 -4 12 0 0 -4 56 0 1 -4 12 1 1 -4 34 1 1 -4 34 1 1 -4 56 0 0 -4 12 0 1 -4 34 0 0 -4 56 1 0 -4 12 1 0 -4 12 0 0 -4 56 1 1 -4 34 0 1 -4 56 1 1 -4 12 1 0 -4 34 0 0 -4 56 1 1 -4 12 0 0 -4 34 1 1 -4 34 1 0 -4 56 0 1 -4 12 1 0 -4 34 1 0 -4 56 0 0 -4 12 0 0 -4 34 0 1 -4 56 0 1 -4 12 1 1 -4 12 0 1 -4 56 0 1 -4 34 0 1 -4 12 1 1 -4 34 0 1 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 1 0 -4 34 0 0 -4 56 1 1 -4 12 1 1 -4 34 1 0 -4 56 1 0 -4 12 1 1 -4 12 1 1 -4 56 1 1 -4 34 0 0 -4 56 1 1 -4 12 1 1 -4 34 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 34 1 0 -4 12 0 0 -4 56 1 1 -4 34 1 0 -4 56 1 0 -4 12 1 1 -4 12 1 1 -4 34 1 0 -4 56 1 1 -4 34 0 1 -4 56 1 1 -4 12 1 1 -4 12 1 0 -4 34 0 1 -4 56 1 1 -4 56 1 0 -4 12 0 0 -4 34 0 0 -4 56 1 1 -4 34 1 1 -4 12 1 1 -4 12 1 0 -4 34 1 0 -4 56 1 0 -4 34 0 0 -4 12 1 1 -4 56 1 0 -4 56 0 0 -4 12 1 1 -4 34 1 1 -4 34 1 0 -4 12 0 1 -4 56 0 0 -4 34 0 1 -4 56 0 0 -4 12 1 1 -4 12 0 0 -4 34 0 1 -4 56 1 1 -4 56 0 1 -4 34 0 1 -4 12 1 1 -4 56 0 1 -4 12 1 1 -4 34 0 0 -4 12 1 0 -4 56 0 0 -4 34 1 0 -4 56 0 0 -4 34 1 0 -4 12 0 0 -4 56 0 1 -4 12 1 1 -4 34 0 0 -4 56 1 0 -4 34 1 0 -4 12 0 0 -4 34 0 1 -4 12 1 1 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 0 1 -4 34 0 1 -4 12 1 1 -4 56 0 0 -4 12 1 0 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 0 1 -4 34 0 0 -4 56 0 0 -4 12 1 1 -4 56 0 1 -4 34 0 0 -4 12 1 1 -4 34 0 1 -4 12 1 1 -4 56 0 0 -4 12 1 1 -4 34 0 0 -4 56 0 0 -4 12 1 0 -4 34 0 0 -4 56 0 1 -4 56 0 0 -4 12 1 0 -4 34 1 1 -4 34 1 1 -4 12 0 0 -4 56 1 1 -4 56 1 0 -4 12 1 0 -4 34 0 0 -4 12 0 0 -4 34 0 0 -4 56 0 0 -4 56 0 0 -4 34 0 0 -4 12 1 0 -4 56 0 0 -4 12 0 0 -4 34 0 0 -4 56 0 0 -4 12 0 0 -4 34 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 56 0 0 -4 34 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 12 1 0 -4 34 1 0 -4 12 1 1 -4 56 1 1 -4 56 1 1 -4 34 1 0 -4 12 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 56 1 1 -4 12 1 1 -4 34 1 1 -4 56 1 0 -4 12 1 1 -4 34 1 1 -4 12 1 1 -4 34 1 0 -4 56 1 1 -4 56 1 1 -4 34 1 1 -4 12 1 0 -4 12 1 1 -4 56 1 0 -4 34 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 12 1 1 -4 12 1 1 -4 34 1 1 -4 56 1 1 -4 12 1 1 -4 56 1 1 -4 34 1 1 -4 56 1 0 -4 12 1 1 -4 34 1 1 -4 34 1 1 -4 12 1 1 -4 56 1 0 -5 12 1 1 -5 34 0 0 -5 56 0 1 -5 34 0 1 -5 56 0 1 -5 12 0 0 -5 34 0 0 -5 12 1 1 -5 56 0 0 -5 12 1 1 -5 56 0 0 -5 34 0 0 -5 34 0 0 -5 12 1 0 -5 56 0 0 -5 56 0 1 -5 34 0 1 -5 12 1 1 -5 34 0 0 -5 12 1 1 -5 56 1 1 -5 34 0 1 -5 12 1 1 -5 56 0 0 -5 34 0 1 -5 56 0 0 -5 12 1 1 -5 34 0 1 -5 12 0 0 -5 56 0 0 -5 34 0 0 -5 56 0 1 -5 12 1 1 -5 12 1 1 -5 56 0 0 -5 34 0 0 -5 56 1 0 -5 12 1 0 -5 34 0 1 -5 34 0 0 -5 12 1 1 -5 56 1 1 -5 56 0 1 -5 34 0 0 -5 12 1 1 -5 34 0 1 -5 12 1 1 -5 56 0 0 -5 12 1 0 -5 56 0 0 -5 34 0 0 -5 34 0 1 -5 56 0 0 -5 12 1 1 -5 12 1 1 -5 56 1 1 -5 34 1 0 -5 56 0 1 -5 12 1 1 -5 34 0 1 -5 34 0 1 -5 56 0 0 -5 12 1 0 -5 12 1 1 -5 56 0 1 -5 34 0 0 -5 12 1 1 -5 34 1 0 -5 56 0 0 -5 34 0 1 -5 56 0 0 -5 12 1 1 -5 56 1 0 -5 34 1 1 -5 12 1 1 -5 56 1 1 -5 12 1 0 -5 34 1 1 -5 34 0 1 -5 56 0 0 -5 12 1 0 -5 34 0 0 -5 56 0 0 -5 12 1 1 -5 12 1 1 -5 34 0 0 -5 56 0 0 -5 56 0 0 -5 12 1 1 -5 34 1 0 -5 12 1 1 -5 56 1 0 -5 34 0 0 -5 34 0 0 -5 56 0 0 -5 12 1 1 -5 12 1 0 -5 56 0 0 -5 34 1 0 -5 56 1 1 -5 34 1 1 -5 12 1 1 -5 56 1 1 -5 34 1 1 -5 12 1 1 -5 56 0 1 -5 12 1 1 -5 34 1 0 -5 56 1 0 -5 12 1 1 -5 34 1 1 -5 56 1 1 -5 34 1 1 -5 12 1 1 -5 56 1 0 -5 34 1 1 -5 12 1 1 -5 34 1 0 -5 56 1 1 -5 12 1 0 diff --git a/R/inst/extdata/ra_data_attend.txt b/R/inst/extdata/ra_data_attend.txt deleted file mode 100644 index 131612dd..00000000 --- a/R/inst/extdata/ra_data_attend.txt +++ /dev/null @@ -1,4192 +0,0 @@ -gain loss cert gamble outcome cond subjID -2 -1 0 1 2 0 1 -9 -13.5 0 0 0 0 1 -5 -6.88 0 0 0 0 1 -10 -10 0 0 0 0 1 -6 -2.25 0 1 6 0 1 -6 -6.75 0 0 0 0 1 -9 -4.5 0 1 -4.5 0 1 -10 -13.75 0 0 0 0 1 -6 -8.25 0 0 0 0 1 -5 -10 0 0 0 0 1 -10 -6.25 0 1 10 0 1 -12 -3 0 1 -3 0 1 -12 -9 0 0 0 0 1 -8 -7 0 0 0 0 1 -6 -12 0 0 0 0 1 -8 -2 0 1 -2 0 1 -12 -6 0 1 -6 0 1 -3 0 1 0 1 0 1 -10 -20 0 1 10 0 1 -5 -3.75 0 0 0 0 1 -2 -1.75 0 0 0 0 1 -6 -3.75 0 1 -3.75 0 1 -9 -12.38 0 0 0 0 1 -5 -6.25 0 0 0 0 1 -12 0 4 1 12 0 1 -2 -1.5 0 0 0 0 1 -6 -5.25 0 0 0 0 1 -10 -18.75 0 0 0 0 1 -6 -6 0 0 0 0 1 -12 0 5 1 12 0 1 -4 -2 0 1 4 0 1 -2 -4 0 0 0 0 1 -5 -2.5 0 1 5 0 1 -2 -3.75 0 0 0 0 1 -9 -15.75 0 0 0 0 1 -8 -4 0 1 8 0 1 -26 0 12 1 26 0 1 -6 -1.5 0 1 6 0 1 -4 -6 0 0 0 0 1 -10 -2.5 0 1 -2.5 0 1 -8 -12 0 0 0 0 1 -2 -3.5 0 0 0 0 1 -5 -5.63 0 0 0 0 1 -12 -24 0 0 0 0 1 -25 0 10 1 0 0 1 -4 -6.5 0 0 0 0 1 -5 -9.38 0 0 0 0 1 -5 -7.5 0 0 0 0 1 -4 -4 0 1 -4 0 1 -6 -10.5 0 0 0 0 1 -13 0 6 1 13 0 1 -12 -22.5 0 0 0 0 1 -4 -7.5 0 0 0 0 1 -5 0 2 1 5 0 1 -10 -15 0 0 0 0 1 -9 -16.88 0 0 0 0 1 -2 -2.5 0 0 0 0 1 -10 -16.25 0 0 0 0 1 -6 -11.25 0 0 0 0 1 -4 -1.5 0 1 4 0 1 -6 -9 0 0 0 0 1 -12 -19.5 0 0 0 0 1 -10 -12.5 0 0 0 0 1 -2 -3 0 0 0 0 1 -8 -16 0 0 0 0 1 -4 0 2 1 4 0 1 -12 -7.5 0 1 -7.5 0 1 -12 -13.5 0 0 0 0 1 -22 0 10 1 22 0 1 -12 -21 0 0 0 0 1 -7 0 3 1 7 0 1 -10 -8.75 0 0 0 0 1 -2 -1.25 0 0 0 0 1 -9 -6.75 0 0 0 0 1 -12 0 6 1 12 0 1 -28 0 13 1 28 0 1 -9 -10.13 0 0 0 0 1 -2 -0.5 0 1 2 0 1 -25 0 9 1 25 0 1 -6 -7.5 0 0 0 0 1 -4 -3 0 0 0 0 1 -10 -3.75 0 1 10 0 1 -12 -4.5 0 1 -4.5 0 1 -12 -15 0 0 0 0 1 -6 -3 0 0 0 0 1 -9 -14.63 0 0 0 0 1 -5 -1.25 0 1 -1.25 0 1 -8 -11 0 0 0 0 1 -10 -17.5 0 0 0 0 1 -8 -10 0 0 0 0 1 -9 -9 0 0 0 0 1 -10 -11.25 0 0 0 0 1 -12 -12 0 0 0 0 1 -8 -14 0 0 0 0 1 -12 -16.5 0 1 -16.5 0 1 -4 -7 0 1 -7 0 1 -4 -1 0 1 -1 0 1 -5 -1.88 0 1 5 0 1 -8 0 3 1 0 0 1 -2 -3.25 0 0 0 0 1 -5 -5 0 0 0 0 1 -26 0 10 1 0 0 1 -12 -10.5 0 0 0 0 1 -2 0 1 1 0 0 1 -6 -9.75 0 0 0 0 1 -8 -3 0 1 8 0 1 -13 0 5 1 13 0 1 -10 -7.5 0 0 0 0 1 -8 -13 0 0 0 0 1 -9 -3.38 0 1 -3.38 0 1 -30 0 12 1 0 0 1 -8 -8 0 0 0 0 1 -8 -5 0 0 0 0 1 -12 -18 0 0 0 0 1 -10 -5 0 1 -5 0 1 -9 -11.25 0 0 0 0 1 -9 -7.88 0 0 0 0 1 -8 -6 0 1 -6 0 1 -6 -4.5 0 0 0 0 1 -8 -9 0 0 0 0 1 -4 -5.5 0 0 0 0 1 -4 -5 0 0 0 0 1 -9 -2.25 0 1 -2.25 0 1 -23 0 10 1 0 0 1 -9 -5.63 0 1 -5.63 0 1 -4 -8 0 0 0 0 1 -19 0 8 1 19 0 1 -2 -2 0 0 0 0 1 -5 -8.13 0 0 0 0 1 -5 -4.38 0 0 0 0 1 -2 -2.25 0 0 0 0 1 -2 -0.75 0 1 -0.75 0 1 -2 -2.75 0 0 0 0 1 -5 -8.75 0 0 0 0 1 -9 -18 0 0 0 0 1 -4 -3.5 0 0 0 0 1 -9 -6.75 0 1 -6.75 0 2 -6 -6.75 0 0 0 0 2 -6 -3 0 1 6 0 2 -2 -1.5 0 0 0 0 2 -4 -3 0 0 0 0 2 -5 -6.88 0 0 0 0 2 -12 -9 0 1 12 0 2 -4 -5 0 0 0 0 2 -5 -7.5 0 0 0 0 2 -4 -4 0 1 -4 0 2 -9 -5.63 0 1 -5.63 0 2 -9 -14.63 0 0 0 0 2 -5 -9.38 0 0 0 0 2 -6 -4.5 0 1 6 0 2 -8 -7 0 0 0 0 2 -10 -16.25 0 0 0 0 2 -10 -17.5 0 0 0 0 2 -9 -16.88 0 0 0 0 2 -8 -5 0 1 8 0 2 -6 -1.5 0 1 6 0 2 -12 -18 0 0 0 0 2 -5 -6.25 0 0 0 0 2 -8 -4 0 1 8 0 2 -9 -15.75 0 0 0 0 2 -9 -13.5 0 0 0 0 2 -5 -8.13 0 0 0 0 2 -2 0 1 1 0 0 2 -2 -3.75 0 0 0 0 2 -4 -6.5 0 0 0 0 2 -10 -5 0 1 -5 0 2 -12 -22.5 0 0 0 0 2 -2 -1 0 1 2 0 2 -13 0 6 1 13 0 2 -5 -2.5 0 0 0 0 2 -2 -0.5 0 1 2 0 2 -2 -3.25 0 1 -3.25 0 2 -30 0 12 1 0 0 2 -8 -8 0 1 8 0 2 -4 -5.5 0 0 0 0 2 -23 0 10 1 0 0 2 -4 -3.5 0 0 0 0 2 -5 0 2 1 5 0 2 -8 0 3 1 0 0 2 -9 -10.13 0 0 0 0 2 -8 -16 0 0 0 0 2 -12 -24 0 0 0 0 2 -9 -3.38 0 1 -3.38 0 2 -6 -5.25 0 1 6 0 2 -2 -4 0 0 0 0 2 -4 -1 0 1 -1 0 2 -6 -11.25 0 0 0 0 2 -5 -4.38 0 1 -4.38 0 2 -6 -2.25 0 1 6 0 2 -12 -10.5 0 1 12 0 2 -9 -18 0 0 0 0 2 -10 -20 0 0 0 0 2 -4 -4.5 0 0 0 0 2 -9 -2.25 0 1 -2.25 0 2 -4 -6 0 0 0 0 2 -8 -10 0 1 -10 0 2 -5 -5 0 1 -5 0 2 -5 -8.75 0 0 0 0 2 -8 -6 0 1 -6 0 2 -10 -13.75 0 0 0 0 2 -2 -2.5 0 0 0 0 2 -8 -11 0 1 -11 0 2 -4 -2 0 1 4 0 2 -10 -7.5 0 1 -7.5 0 2 -22 0 10 1 22 0 2 -25 0 10 1 0 0 2 -6 -9.75 0 0 0 0 2 -12 0 5 1 12 0 2 -4 -2.5 0 1 -2.5 0 2 -8 -3 0 1 8 0 2 -10 -11.25 0 1 -11.25 0 2 -5 -10 0 1 5 0 2 -10 -15 0 0 0 0 2 -2 -3.5 0 0 0 0 2 -12 0 4 1 12 0 2 -13 0 5 0 5 0 2 -5 -3.75 0 1 5 0 2 -26 0 12 0 12 0 2 -5 -5.63 0 0 0 0 2 -8 -2 0 1 -2 0 2 -2 -3 0 0 0 0 2 -6 -9 0 0 0 0 2 -9 -7.88 0 0 0 0 2 -8 -14 0 0 0 0 2 -28 0 13 1 28 0 2 -9 -12.38 0 0 0 0 2 -8 -15 0 1 -15 0 2 -10 -2.5 0 1 -2.5 0 2 -4 0 2 1 4 0 2 -12 -6 0 1 -6 0 2 -12 -16.5 0 1 -16.5 0 2 -4 -7.5 0 0 0 0 2 -10 -8.75 0 1 -8.75 0 2 -10 -18.75 0 1 10 0 2 -26 0 10 1 0 0 2 -12 -21 0 1 12 0 2 -2 -0.75 0 1 -0.75 0 2 -9 -9 0 1 -9 0 2 -10 -6.25 0 1 10 0 2 -8 -12 0 1 -12 0 2 -3 0 1 1 0 0 2 -5 -1.88 0 1 5 0 2 -6 -7.5 0 1 -7.5 0 2 -12 -13.5 0 1 12 0 2 -4 -7 0 0 0 0 2 -6 -8.25 0 1 -8.25 0 2 -6 -12 0 0 0 0 2 -6 -10.5 0 0 0 0 2 -4 -8 0 0 0 0 2 -6 -6 0 1 -6 0 2 -12 0 6 1 12 0 2 -12 -19.5 0 1 12 0 2 -19 0 8 1 19 0 2 -12 -15 0 0 0 0 2 -2 -1.75 0 0 0 0 2 -6 -3.75 0 0 0 0 2 -2 -1.25 0 0 0 0 2 -5 -1.25 0 1 -1.25 0 2 -4 -1.5 0 1 4 0 2 -8 -13 0 0 0 0 2 -12 -7.5 0 1 -7.5 0 2 -12 -3 0 1 -3 0 2 -2 -2.75 0 0 0 0 2 -7 0 3 1 7 0 2 -25 0 9 1 25 0 2 -2 -2 0 0 0 0 2 -12 -4.5 0 1 -4.5 0 2 -12 -12 0 1 12 0 2 -5 -3.13 0 1 5 0 2 -9 -11.25 0 0 0 0 2 -8 -9 0 1 -9 0 2 -2 -2.25 0 0 0 0 2 -9 -4.5 0 1 -4.5 0 2 -10 -3.75 0 1 10 0 2 -10 -10 0 1 10 0 2 -10 -12.5 0 0 0 0 2 -2 -2.5 0 0 0 0 3 -5 -5.63 0 0 0 0 3 -6 -7.5 0 0 0 0 3 -26 0 10 1 0 0 3 -9 -4.5 0 0 0 0 3 -2 -1.25 0 0 0 0 3 -8 -3 0 0 0 0 3 -25 0 9 0 9 0 3 -4 -4.5 0 0 0 0 3 -5 -10 0 0 0 0 3 -6 -9 0 0 0 0 3 -10 -6.25 0 0 0 0 3 -4 -4 0 0 0 0 3 -12 -3 0 0 0 0 3 -5 -5 0 0 0 0 3 -12 0 5 0 5 0 3 -6 -9.75 0 0 0 0 3 -19 0 8 0 8 0 3 -4 -7.5 0 0 0 0 3 -12 -9 0 0 0 0 3 -4 -6.5 0 0 0 0 3 -9 -5.63 0 0 0 0 3 -9 -18 0 0 0 0 3 -10 -11.25 0 0 0 0 3 -10 -13.75 0 0 0 0 3 -6 -12 0 0 0 0 3 -10 -12.5 0 0 0 0 3 -4 -7 0 0 0 0 3 -10 -7.5 0 0 0 0 3 -4 -8 0 0 0 0 3 -8 -11 0 0 0 0 3 -12 0 4 1 12 0 3 -9 -3.38 0 0 0 0 3 -10 -18.75 0 0 0 0 3 -2 -3.5 0 0 0 0 3 -2 -1 0 0 0 0 3 -2 -3.25 0 0 0 0 3 -2 0 1 0 1 0 3 -7 0 3 0 3 0 3 -8 0 3 0 3 0 3 -12 -6 0 0 0 0 3 -2 -0.5 0 1 2 0 3 -9 -7.88 0 0 0 0 3 -8 -15 0 0 0 0 3 -2 -1.5 0 0 0 0 3 -12 -22.5 0 0 0 0 3 -8 -7 0 0 0 0 3 -4 -5.5 0 0 0 0 3 -10 -8.75 0 0 0 0 3 -8 -9 0 0 0 0 3 -2 -4 0 0 0 0 3 -4 0 2 1 4 0 3 -8 -8 0 0 0 0 3 -9 -13.5 0 0 0 0 3 -9 -9 0 0 0 0 3 -6 -3.75 0 0 0 0 3 -13 0 6 0 6 0 3 -5 -1.88 0 1 5 0 3 -6 -6 0 0 0 0 3 -5 -6.88 0 0 0 0 3 -8 -16 0 0 0 0 3 -12 -7.5 0 0 0 0 3 -5 -1.25 0 1 -1.25 0 3 -9 -14.63 0 0 0 0 3 -8 -4 0 0 0 0 3 -10 -17.5 0 0 0 0 3 -5 -3.75 0 0 0 0 3 -6 -10.5 0 0 0 0 3 -13 0 5 1 13 0 3 -10 -16.25 0 0 0 0 3 -5 -7.5 0 0 0 0 3 -2 -1.75 0 0 0 0 3 -5 -9.38 0 0 0 0 3 -2 -2.75 0 0 0 0 3 -2 -0.75 0 1 -0.75 0 3 -5 -8.13 0 0 0 0 3 -9 -11.25 0 0 0 0 3 -8 -13 0 0 0 0 3 -9 -16.88 0 0 0 0 3 -2 -2 0 0 0 0 3 -12 -18 0 0 0 0 3 -8 -2 0 1 -2 0 3 -2 -3 0 0 0 0 3 -6 -4.5 0 0 0 0 3 -5 0 2 1 5 0 3 -12 -19.5 0 0 0 0 3 -9 -15.75 0 0 0 0 3 -8 -6 0 0 0 0 3 -10 -2.5 0 1 -2.5 0 3 -9 -6.75 0 0 0 0 3 -6 -6.75 0 0 0 0 3 -2 -3.75 0 0 0 0 3 -10 -5 0 0 0 0 3 -2 -2.25 0 0 0 0 3 -26 0 12 0 12 0 3 -12 -13.5 0 0 0 0 3 -8 -5 0 0 0 0 3 -6 -3 0 0 0 0 3 -10 -3.75 0 0 0 0 3 -12 -10.5 0 0 0 0 3 -4 -5 0 0 0 0 3 -9 -2.25 0 0 0 0 3 -4 -3 0 0 0 0 3 -9 -10.13 0 0 0 0 3 -28 0 13 0 13 0 3 -22 0 10 0 10 0 3 -10 -10 0 0 0 0 3 -4 -1 0 0 0 0 3 -4 -2.5 0 0 0 0 3 -12 -24 0 0 0 0 3 -8 -12 0 0 0 0 3 -3 0 1 1 0 0 3 -9 -12.38 0 0 0 0 3 -23 0 10 0 10 0 3 -4 -3.5 0 0 0 0 3 -4 -1.5 0 0 0 0 3 -8 -10 0 0 0 0 3 -8 -14 0 0 0 0 3 -4 -6 0 0 0 0 3 -25 0 10 0 10 0 3 -12 -16.5 0 0 0 0 3 -12 -12 0 0 0 0 3 -5 -2.5 0 0 0 0 3 -5 -8.75 0 0 0 0 3 -12 -4.5 0 0 0 0 3 -12 -15 0 0 0 0 3 -5 -3.13 0 0 0 0 3 -12 -21 0 0 0 0 3 -5 -4.38 0 0 0 0 3 -6 -11.25 0 0 0 0 3 -30 0 12 0 12 0 3 -6 -1.5 0 1 6 0 3 -12 0 6 0 6 0 3 -4 -2 0 0 0 0 3 -10 -15 0 0 0 0 3 -6 -2.25 0 0 0 0 3 -10 -20 0 0 0 0 3 -6 -5.25 0 0 0 0 3 -5 -6.25 0 0 0 0 3 -6 -8.25 0 0 0 0 3 -4 -4.5 0 1 -4.5 0 4 -10 -12.5 0 0 0 0 4 -26 0 12 1 26 0 4 -6 -7.5 0 1 -7.5 0 4 -4 -6.5 0 0 0 0 4 -12 -4.5 0 1 -4.5 0 4 -5 -2.5 0 1 5 0 4 -6 -12 0 0 0 0 4 -9 -14.63 0 1 9 0 4 -6 -6 0 0 0 0 4 -22 0 10 1 22 0 4 -2 -1 0 1 2 0 4 -8 -3 0 1 8 0 4 -12 -9 0 0 0 0 4 -5 -3.75 0 1 5 0 4 -6 -3 0 1 6 0 4 -4 0 2 0 2 0 4 -28 0 13 1 28 0 4 -12 -15 0 0 0 0 4 -9 -11.25 0 0 0 0 4 -12 -10.5 0 1 12 0 4 -5 -1.88 0 1 5 0 4 -2 -2.75 0 0 0 0 4 -4 -7 0 0 0 0 4 -8 -4 0 1 8 0 4 -2 0 1 1 0 0 4 -2 -3.5 0 0 0 0 4 -2 -1.75 0 1 2 0 4 -5 -5 0 0 0 0 4 -12 -12 0 1 12 0 4 -12 0 6 1 12 0 4 -6 -4.5 0 0 0 0 4 -30 0 12 0 12 0 4 -12 -16.5 0 0 0 0 4 -6 -9.75 0 1 6 0 4 -12 -22.5 0 0 0 0 4 -6 -9 0 1 -9 0 4 -5 -3.13 0 0 0 0 4 -5 -9.38 0 0 0 0 4 -12 -7.5 0 1 -7.5 0 4 -5 0 2 1 5 0 4 -10 -15 0 0 0 0 4 -12 -3 0 1 -3 0 4 -13 0 6 0 6 0 4 -9 -16.88 0 0 0 0 4 -6 -11.25 0 0 0 0 4 -8 -5 0 1 8 0 4 -8 -14 0 0 0 0 4 -12 -24 0 1 -24 0 4 -12 0 5 1 12 0 4 -9 -13.5 0 0 0 0 4 -6 -1.5 0 1 6 0 4 -2 -3 0 0 0 0 4 -10 -2.5 0 1 -2.5 0 4 -2 -0.75 0 0 0 0 4 -6 -10.5 0 0 0 0 4 -2 -0.5 0 1 2 0 4 -10 -10 0 0 0 0 4 -8 -10 0 1 -10 0 4 -9 -12.38 0 0 0 0 4 -4 -6 0 0 0 0 4 -6 -2.25 0 1 6 0 4 -9 -15.75 0 0 0 0 4 -12 -13.5 0 0 0 0 4 -8 -6 0 0 0 0 4 -10 -18.75 0 0 0 0 4 -4 -2 0 0 0 0 4 -5 -1.25 0 1 -1.25 0 4 -6 -5.25 0 0 0 0 4 -4 -8 0 1 4 0 4 -25 0 9 1 25 0 4 -2 -3.25 0 0 0 0 4 -10 -11.25 0 1 -11.25 0 4 -4 -7.5 0 0 0 0 4 -9 -5.63 0 1 -5.63 0 4 -6 -6.75 0 0 0 0 4 -8 -2 0 1 -2 0 4 -5 -6.25 0 0 0 0 4 -23 0 10 0 10 0 4 -8 -13 0 0 0 0 4 -10 -13.75 0 0 0 0 4 -5 -10 0 1 5 0 4 -12 0 4 1 12 0 4 -2 -2.5 0 0 0 0 4 -19 0 8 1 19 0 4 -4 -4 0 0 0 0 4 -4 -1 0 1 -1 0 4 -4 -2.5 0 1 -2.5 0 4 -5 -8.13 0 0 0 0 4 -10 -3.75 0 1 10 0 4 -5 -8.75 0 0 0 0 4 -10 -7.5 0 1 -7.5 0 4 -10 -5 0 1 -5 0 4 -10 -20 0 0 0 0 4 -13 0 5 0 5 0 4 -8 -9 0 0 0 0 4 -8 -12 0 0 0 0 4 -10 -16.25 0 0 0 0 4 -5 -6.88 0 1 5 0 4 -4 -5.5 0 0 0 0 4 -5 -7.5 0 0 0 0 4 -9 -10.13 0 0 0 0 4 -6 -8.25 0 0 0 0 4 -26 0 10 0 10 0 4 -4 -5 0 0 0 0 4 -2 -2.25 0 1 2 0 4 -6 -3.75 0 1 -3.75 0 4 -8 -8 0 1 8 0 4 -9 -6.75 0 0 0 0 4 -8 -15 0 1 -15 0 4 -12 -6 0 1 -6 0 4 -25 0 10 0 10 0 4 -12 -19.5 0 0 0 0 4 -9 -7.88 0 0 0 0 4 -4 -1.5 0 1 4 0 4 -8 -7 0 0 0 0 4 -12 -18 0 1 -18 0 4 -2 -2 0 1 2 0 4 -9 -18 0 0 0 0 4 -2 -1.25 0 0 0 0 4 -8 -16 0 0 0 0 4 -5 -4.38 0 0 0 0 4 -2 -4 0 0 0 0 4 -5 -5.63 0 0 0 0 4 -8 0 3 1 0 0 4 -10 -17.5 0 0 0 0 4 -8 -11 0 0 0 0 4 -2 -1.5 0 1 2 0 4 -4 -3.5 0 0 0 0 4 -2 -3.75 0 0 0 0 4 -3 0 1 1 0 0 4 -12 -21 0 0 0 0 4 -10 -8.75 0 0 0 0 4 -9 -9 0 1 -9 0 4 -4 -3 0 0 0 0 4 -7 0 3 1 7 0 4 -9 -3.38 0 1 -3.38 0 4 -9 -2.25 0 1 -2.25 0 4 -10 -6.25 0 0 0 0 4 -9 -4.5 0 1 -4.5 0 4 -2 -1 0 1 2 0 5 -9 -13.5 0 0 0 0 5 -5 -6.88 0 1 5 0 5 -10 -10 0 1 10 0 5 -6 -2.25 0 0 0 0 5 -6 -6.75 0 1 -6.75 0 5 -9 -4.5 0 0 0 0 5 -10 -13.75 0 0 0 0 5 -6 -8.25 0 0 0 0 5 -5 -10 0 0 0 0 5 -10 -6.25 0 1 10 0 5 -12 -3 0 1 -3 0 5 -12 -9 0 0 0 0 5 -8 -7 0 1 -7 0 5 -6 -12 0 0 0 0 5 -8 -2 0 1 -2 0 5 -12 -6 0 1 -6 0 5 -3 0 1 1 0 0 5 -10 -20 0 1 10 0 5 -5 -3.75 0 1 5 0 5 -2 -1.75 0 0 0 0 5 -6 -3.75 0 0 0 0 5 -9 -12.38 0 0 0 0 5 -5 -6.25 0 0 0 0 5 -12 0 4 0 4 0 5 -2 -1.5 0 1 2 0 5 -6 -5.25 0 0 0 0 5 -10 -18.75 0 0 0 0 5 -6 -6 0 1 -6 0 5 -12 0 5 0 5 0 5 -4 -2 0 1 4 0 5 -2 -4 0 0 0 0 5 -5 -2.5 0 1 5 0 5 -2 -3.75 0 0 0 0 5 -9 -15.75 0 0 0 0 5 -8 -4 0 1 8 0 5 -26 0 12 0 12 0 5 -6 -1.5 0 1 6 0 5 -4 -6 0 0 0 0 5 -10 -2.5 0 1 -2.5 0 5 -8 -12 0 0 0 0 5 -2 -3.5 0 0 0 0 5 -5 -5.63 0 1 -5.63 0 5 -12 -24 0 0 0 0 5 -25 0 10 0 10 0 5 -4 -6.5 0 0 0 0 5 -5 -9.38 0 0 0 0 5 -5 -7.5 0 1 -7.5 0 5 -4 -4 0 1 -4 0 5 -6 -10.5 0 1 -10.5 0 5 -13 0 6 1 13 0 5 -12 -22.5 0 0 0 0 5 -4 -7.5 0 0 0 0 5 -5 0 2 1 5 0 5 -10 -15 0 0 0 0 5 -9 -16.88 0 0 0 0 5 -2 -2.5 0 1 2 0 5 -10 -16.25 0 0 0 0 5 -6 -11.25 0 0 0 0 5 -4 -1.5 0 1 4 0 5 -5 -3.13 0 1 5 0 5 -6 -9 0 0 0 0 5 -12 -19.5 0 0 0 0 5 -10 -12.5 0 0 0 0 5 -2 -3 0 0 0 0 5 -8 -16 0 0 0 0 5 -4 0 2 1 4 0 5 -12 -7.5 0 0 0 0 5 -12 -13.5 0 0 0 0 5 -22 0 10 0 10 0 5 -12 -21 0 0 0 0 5 -7 0 3 0 3 0 5 -10 -8.75 0 1 -8.75 0 5 -2 -1.25 0 1 2 0 5 -9 -6.75 0 1 -6.75 0 5 -12 0 6 0 6 0 5 -28 0 13 0 13 0 5 -9 -10.13 0 0 0 0 5 -2 -0.5 0 1 2 0 5 -25 0 9 0 9 0 5 -6 -7.5 0 1 -7.5 0 5 -4 -3 0 1 4 0 5 -10 -3.75 0 1 10 0 5 -12 -4.5 0 1 -4.5 0 5 -12 -15 0 0 0 0 5 -6 -3 0 1 6 0 5 -9 -14.63 0 0 0 0 5 -5 -1.25 0 1 -1.25 0 5 -8 -11 0 0 0 0 5 -10 -17.5 0 0 0 0 5 -8 -10 0 0 0 0 5 -9 -9 0 0 0 0 5 -10 -11.25 0 0 0 0 5 -12 -12 0 0 0 0 5 -8 -14 0 0 0 0 5 -12 -16.5 0 0 0 0 5 -4 -7 0 0 0 0 5 -4 -1 0 1 -1 0 5 -5 -1.88 0 1 5 0 5 -8 0 3 0 3 0 5 -2 -3.25 0 1 -3.25 0 5 -5 -5 0 1 -5 0 5 -26 0 10 0 10 0 5 -12 -10.5 0 0 0 0 5 -2 0 1 0 1 0 5 -6 -9.75 0 0 0 0 5 -8 -3 0 1 8 0 5 -13 0 5 1 13 0 5 -10 -7.5 0 0 0 0 5 -8 -13 0 0 0 0 5 -9 -3.38 0 1 -3.38 0 5 -8 -15 0 0 0 0 5 -30 0 12 0 12 0 5 -8 -8 0 0 0 0 5 -8 -5 0 1 8 0 5 -12 -18 0 0 0 0 5 -10 -5 0 1 -5 0 5 -9 -11.25 0 1 9 0 5 -9 -7.88 0 0 0 0 5 -8 -6 0 1 -6 0 5 -6 -4.5 0 1 6 0 5 -8 -9 0 0 0 0 5 -4 -5.5 0 0 0 0 5 -4 -5 0 1 4 0 5 -9 -2.25 0 1 -2.25 0 5 -9 -5.63 0 1 -5.63 0 5 -4 -4.5 0 0 0 0 5 -4 -8 0 0 0 0 5 -19 0 8 0 8 0 5 -2 -2 0 1 2 0 5 -5 -8.13 0 1 5 0 5 -5 -4.38 0 1 -4.38 0 5 -2 -2.25 0 0 0 0 5 -2 -0.75 0 1 -0.75 0 5 -2 -2.75 0 0 0 0 5 -5 -8.75 0 0 0 0 5 -9 -18 0 0 0 0 5 -4 -3.5 0 1 4 0 5 -4 -2.5 0 1 -2.5 0 5 -9 -6.75 0 1 -6.75 0 6 -6 -6.75 0 1 -6.75 0 6 -6 -3 0 1 6 0 6 -2 -1.5 0 1 2 0 6 -4 -3 0 1 4 0 6 -5 -6.88 0 0 0 0 6 -12 -9 0 0 0 0 6 -4 -5 0 0 0 0 6 -5 -7.5 0 0 0 0 6 -4 -4 0 1 -4 0 6 -9 -5.63 0 1 -5.63 0 6 -9 -14.63 0 0 0 0 6 -5 -9.38 0 0 0 0 6 -6 -4.5 0 1 6 0 6 -8 -7 0 1 -7 0 6 -10 -16.25 0 0 0 0 6 -10 -17.5 0 0 0 0 6 -9 -16.88 0 0 0 0 6 -8 -5 0 1 8 0 6 -6 -1.5 0 1 6 0 6 -12 -18 0 0 0 0 6 -5 -6.25 0 0 0 0 6 -8 -4 0 1 8 0 6 -9 -15.75 0 0 0 0 6 -9 -13.5 0 0 0 0 6 -5 -8.13 0 0 0 0 6 -2 0 1 1 0 0 6 -2 -3.75 0 1 -3.75 0 6 -4 -6.5 0 0 0 0 6 -10 -5 0 1 -5 0 6 -12 -22.5 0 0 0 0 6 -2 -1 0 1 2 0 6 -13 0 6 0 6 0 6 -5 -2.5 0 1 5 0 6 -2 -0.5 0 1 2 0 6 -2 -3.25 0 0 0 0 6 -30 0 12 1 0 0 6 -8 -8 0 0 0 0 6 -4 -5.5 0 0 0 0 6 -23 0 10 1 0 0 6 -4 -3.5 0 1 4 0 6 -5 0 2 1 5 0 6 -8 0 3 1 0 0 6 -9 -10.13 0 0 0 0 6 -8 -16 0 0 0 0 6 -12 -24 0 0 0 0 6 -9 -3.38 0 1 -3.38 0 6 -6 -5.25 0 0 0 0 6 -2 -4 0 0 0 0 6 -4 -1 0 1 -1 0 6 -6 -11.25 0 0 0 0 6 -5 -4.38 0 1 -4.38 0 6 -6 -2.25 0 1 6 0 6 -12 -10.5 0 0 0 0 6 -9 -18 0 0 0 0 6 -10 -20 0 0 0 0 6 -4 -4.5 0 1 -4.5 0 6 -9 -2.25 0 1 -2.25 0 6 -4 -6 0 0 0 0 6 -8 -10 0 0 0 0 6 -5 -5 0 1 -5 0 6 -5 -8.75 0 0 0 0 6 -8 -6 0 1 -6 0 6 -10 -13.75 0 0 0 0 6 -2 -2.5 0 1 2 0 6 -8 -11 0 0 0 0 6 -4 -2 0 1 4 0 6 -10 -7.5 0 1 -7.5 0 6 -22 0 10 0 10 0 6 -25 0 10 1 0 0 6 -6 -9.75 0 0 0 0 6 -12 0 5 0 5 0 6 -4 -2.5 0 1 -2.5 0 6 -8 -3 0 1 8 0 6 -10 -11.25 0 0 0 0 6 -5 -10 0 0 0 0 6 -10 -15 0 0 0 0 6 -2 -3.5 0 1 -3.5 0 6 -12 0 4 1 12 0 6 -13 0 5 0 5 0 6 -5 -3.75 0 1 5 0 6 -26 0 12 1 26 0 6 -5 -5.63 0 1 -5.63 0 6 -8 -2 0 1 -2 0 6 -2 -3 0 1 -3 0 6 -6 -9 0 0 0 0 6 -9 -7.88 0 1 -7.88 0 6 -8 -14 0 0 0 0 6 -28 0 13 0 13 0 6 -9 -12.38 0 0 0 0 6 -8 -15 0 0 0 0 6 -10 -2.5 0 1 -2.5 0 6 -4 0 2 1 4 0 6 -12 -6 0 1 -6 0 6 -12 -16.5 0 0 0 0 6 -4 -7.5 0 0 0 0 6 -10 -8.75 0 1 -8.75 0 6 -10 -18.75 0 0 0 0 6 -26 0 10 1 0 0 6 -12 -21 0 0 0 0 6 -2 -0.75 0 1 -0.75 0 6 -9 -9 0 1 -9 0 6 -10 -6.25 0 1 10 0 6 -8 -12 0 0 0 0 6 -3 0 1 1 0 0 6 -5 -1.88 0 1 5 0 6 -6 -7.5 0 0 0 0 6 -12 -13.5 0 1 12 0 6 -4 -7 0 0 0 0 6 -6 -8.25 0 0 0 0 6 -6 -12 0 0 0 0 6 -6 -10.5 0 0 0 0 6 -4 -8 0 0 0 0 6 -6 -6 0 1 -6 0 6 -12 0 6 0 6 0 6 -12 -19.5 0 0 0 0 6 -19 0 8 1 19 0 6 -12 -15 0 0 0 0 6 -2 -1.75 0 1 2 0 6 -6 -3.75 0 1 -3.75 0 6 -2 -1.25 0 1 2 0 6 -5 -1.25 0 1 -1.25 0 6 -4 -1.5 0 1 4 0 6 -8 -13 0 0 0 0 6 -12 -7.5 0 1 -7.5 0 6 -12 -3 0 1 -3 0 6 -2 -2.75 0 1 2 0 6 -7 0 3 1 7 0 6 -25 0 9 1 25 0 6 -2 -2 0 1 2 0 6 -12 -4.5 0 1 -4.5 0 6 -12 -12 0 0 0 0 6 -5 -3.13 0 1 5 0 6 -9 -11.25 0 0 0 0 6 -8 -9 0 0 0 0 6 -2 -2.25 0 1 2 0 6 -9 -4.5 0 1 -4.5 0 6 -10 -3.75 0 1 10 0 6 -10 -10 0 0 0 0 6 -10 -12.5 0 0 0 0 6 -2 -2.5 0 1 2 0 7 -5 -5.63 0 0 0 0 7 -6 -7.5 0 0 0 0 7 -26 0 10 1 0 0 7 -9 -4.5 0 1 -4.5 0 7 -2 -1.25 0 1 2 0 7 -8 -3 0 1 8 0 7 -25 0 9 1 25 0 7 -4 -4.5 0 1 -4.5 0 7 -5 -10 0 0 0 0 7 -6 -9 0 0 0 0 7 -10 -6.25 0 0 0 0 7 -4 -4 0 1 -4 0 7 -12 -3 0 1 -3 0 7 -5 -5 0 0 0 0 7 -12 0 5 1 12 0 7 -6 -9.75 0 0 0 0 7 -19 0 8 1 19 0 7 -4 -7.5 0 0 0 0 7 -12 -9 0 0 0 0 7 -4 -6.5 0 0 0 0 7 -9 -5.63 0 1 -5.63 0 7 -9 -18 0 0 0 0 7 -10 -11.25 0 0 0 0 7 -10 -13.75 0 0 0 0 7 -6 -12 0 0 0 0 7 -10 -12.5 0 0 0 0 7 -4 -7 0 0 0 0 7 -10 -7.5 0 0 0 0 7 -4 -8 0 0 0 0 7 -8 -11 0 0 0 0 7 -12 0 4 1 12 0 7 -9 -3.38 0 1 -3.38 0 7 -10 -18.75 0 0 0 0 7 -2 -3.5 0 0 0 0 7 -2 -1 0 1 2 0 7 -2 -3.25 0 0 0 0 7 -2 0 1 1 0 0 7 -7 0 3 1 7 0 7 -8 0 3 1 0 0 7 -12 -6 0 1 -6 0 7 -2 -0.5 0 1 2 0 7 -9 -7.88 0 0 0 0 7 -8 -15 0 0 0 0 7 -2 -1.5 0 1 2 0 7 -12 -22.5 0 0 0 0 7 -8 -7 0 1 -7 0 7 -4 -5.5 0 0 0 0 7 -10 -8.75 0 0 0 0 7 -8 -9 0 0 0 0 7 -2 -4 0 0 0 0 7 -4 0 2 1 4 0 7 -8 -8 0 0 0 0 7 -9 -13.5 0 0 0 0 7 -9 -9 0 0 0 0 7 -6 -3.75 0 1 -3.75 0 7 -13 0 6 0 6 0 7 -5 -1.88 0 1 5 0 7 -6 -6 0 0 0 0 7 -5 -6.88 0 0 0 0 7 -8 -16 0 0 0 0 7 -12 -7.5 0 1 -7.5 0 7 -5 -1.25 0 1 -1.25 0 7 -9 -14.63 0 0 0 0 7 -8 -4 0 1 8 0 7 -10 -17.5 0 0 0 0 7 -5 -3.75 0 1 5 0 7 -6 -10.5 0 0 0 0 7 -13 0 5 1 13 0 7 -10 -16.25 0 0 0 0 7 -5 -7.5 0 0 0 0 7 -2 -1.75 0 1 2 0 7 -5 -9.38 0 0 0 0 7 -2 -2.75 0 0 0 0 7 -2 -0.75 0 1 -0.75 0 7 -5 -8.13 0 0 0 0 7 -9 -11.25 0 0 0 0 7 -8 -13 0 0 0 0 7 -9 -16.88 0 0 0 0 7 -2 -2 0 0 0 0 7 -12 -18 0 0 0 0 7 -8 -2 0 1 -2 0 7 -2 -3 0 0 0 0 7 -6 -4.5 0 1 6 0 7 -5 0 2 1 5 0 7 -12 -19.5 0 0 0 0 7 -9 -15.75 0 0 0 0 7 -8 -6 0 0 0 0 7 -10 -2.5 0 1 -2.5 0 7 -9 -6.75 0 0 0 0 7 -6 -6.75 0 0 0 0 7 -2 -3.75 0 0 0 0 7 -10 -5 0 1 -5 0 7 -2 -2.25 0 0 0 0 7 -26 0 12 1 26 0 7 -12 -13.5 0 0 0 0 7 -8 -5 0 0 0 0 7 -6 -3 0 1 6 0 7 -10 -3.75 0 1 10 0 7 -12 -10.5 0 0 0 0 7 -4 -5 0 0 0 0 7 -9 -2.25 0 1 -2.25 0 7 -4 -3 0 0 0 0 7 -9 -10.13 0 0 0 0 7 -28 0 13 0 13 0 7 -22 0 10 1 22 0 7 -10 -10 0 0 0 0 7 -4 -1 0 1 -1 0 7 -4 -2.5 0 0 0 0 7 -12 -24 0 0 0 0 7 -8 -12 0 0 0 0 7 -3 0 1 1 0 0 7 -9 -12.38 0 0 0 0 7 -23 0 10 1 0 0 7 -4 -3.5 0 0 0 0 7 -4 -1.5 0 1 4 0 7 -8 -10 0 0 0 0 7 -8 -14 0 0 0 0 7 -4 -6 0 0 0 0 7 -25 0 10 1 0 0 7 -12 -16.5 0 0 0 0 7 -12 -12 0 0 0 0 7 -5 -2.5 0 1 5 0 7 -5 -8.75 0 0 0 0 7 -12 -4.5 0 1 -4.5 0 7 -12 -15 0 0 0 0 7 -5 -3.13 0 0 0 0 7 -12 -21 0 1 12 0 7 -5 -4.38 0 0 0 0 7 -6 -11.25 0 0 0 0 7 -30 0 12 1 0 0 7 -6 -1.5 0 1 6 0 7 -12 0 6 1 12 0 7 -4 -2 0 1 4 0 7 -10 -15 0 0 0 0 7 -6 -2.25 0 1 6 0 7 -10 -20 0 0 0 0 7 -6 -5.25 0 0 0 0 7 -5 -6.25 0 0 0 0 7 -6 -8.25 0 0 0 0 7 -4 -4.5 0 1 -4.5 0 8 -10 -12.5 0 0 0 0 8 -26 0 12 0 12 0 8 -6 -7.5 0 0 0 0 8 -4 -6.5 0 0 0 0 8 -12 -4.5 0 1 -4.5 0 8 -5 -2.5 0 1 5 0 8 -6 -12 0 0 0 0 8 -9 -14.63 0 0 0 0 8 -6 -6 0 1 -6 0 8 -22 0 10 0 10 0 8 -2 -1 0 1 2 0 8 -8 -3 0 1 8 0 8 -12 -9 0 1 12 0 8 -5 -3.75 0 1 5 0 8 -6 -3 0 1 6 0 8 -4 0 2 0 2 0 8 -28 0 13 1 28 0 8 -12 -15 0 0 0 0 8 -9 -11.25 0 0 0 0 8 -12 -10.5 0 1 12 0 8 -5 -1.88 0 1 5 0 8 -2 -2.75 0 0 0 0 8 -4 -7 0 0 0 0 8 -8 -4 0 1 8 0 8 -2 0 1 0 1 0 8 -2 -3.5 0 0 0 0 8 -2 -1.75 0 1 2 0 8 -5 -5 0 1 -5 0 8 -12 -12 0 0 0 0 8 -12 0 6 0 6 0 8 -6 -4.5 0 1 6 0 8 -30 0 12 1 0 0 8 -12 -16.5 0 0 0 0 8 -6 -9.75 0 0 0 0 8 -12 -22.5 0 0 0 0 8 -6 -9 0 0 0 0 8 -5 -3.13 0 1 5 0 8 -5 -9.38 0 0 0 0 8 -12 -7.5 0 1 -7.5 0 8 -5 0 2 1 5 0 8 -10 -15 0 0 0 0 8 -12 -3 0 1 -3 0 8 -13 0 6 0 6 0 8 -9 -16.88 0 0 0 0 8 -6 -11.25 0 0 0 0 8 -8 -5 0 1 8 0 8 -8 -14 0 0 0 0 8 -12 -24 0 0 0 0 8 -12 0 5 0 5 0 8 -9 -13.5 0 0 0 0 8 -6 -1.5 0 1 6 0 8 -2 -3 0 0 0 0 8 -10 -2.5 0 1 -2.5 0 8 -2 -0.75 0 1 -0.75 0 8 -6 -10.5 0 0 0 0 8 -2 -0.5 0 1 2 0 8 -10 -10 0 0 0 0 8 -8 -10 0 0 0 0 8 -9 -12.38 0 0 0 0 8 -4 -6 0 0 0 0 8 -6 -2.25 0 1 6 0 8 -9 -15.75 0 0 0 0 8 -12 -13.5 0 0 0 0 8 -8 -6 0 1 -6 0 8 -10 -18.75 0 0 0 0 8 -4 -2 0 1 4 0 8 -5 -1.25 0 1 -1.25 0 8 -6 -5.25 0 1 6 0 8 -4 -8 0 0 0 0 8 -25 0 9 1 25 0 8 -2 -3.25 0 0 0 0 8 -10 -11.25 0 0 0 0 8 -4 -7.5 0 0 0 0 8 -9 -5.63 0 1 -5.63 0 8 -6 -6.75 0 1 -6.75 0 8 -8 -2 0 1 -2 0 8 -5 -6.25 0 0 0 0 8 -23 0 10 0 10 0 8 -8 -13 0 0 0 0 8 -10 -13.75 0 0 0 0 8 -5 -10 0 0 0 0 8 -12 0 4 1 12 0 8 -2 -2.5 0 1 2 0 8 -19 0 8 1 19 0 8 -4 -4 0 0 0 0 8 -4 -1 0 1 -1 0 8 -4 -2.5 0 1 -2.5 0 8 -5 -8.13 0 0 0 0 8 -10 -3.75 0 1 10 0 8 -5 -8.75 0 0 0 0 8 -10 -7.5 0 1 -7.5 0 8 -10 -5 0 1 -5 0 8 -10 -20 0 0 0 0 8 -13 0 5 0 5 0 8 -8 -9 0 0 0 0 8 -8 -12 0 0 0 0 8 -10 -16.25 0 0 0 0 8 -5 -6.88 0 0 0 0 8 -4 -5.5 0 0 0 0 8 -5 -7.5 0 0 0 0 8 -9 -10.13 0 0 0 0 8 -6 -8.25 0 0 0 0 8 -26 0 10 0 10 0 8 -4 -5 0 1 4 0 8 -2 -2.25 0 1 2 0 8 -6 -3.75 0 1 -3.75 0 8 -8 -8 0 1 8 0 8 -9 -6.75 0 1 -6.75 0 8 -8 -15 0 0 0 0 8 -12 -6 0 1 -6 0 8 -25 0 10 1 0 0 8 -12 -19.5 0 0 0 0 8 -9 -7.88 0 1 -7.88 0 8 -4 -1.5 0 1 4 0 8 -8 -7 0 1 -7 0 8 -12 -18 0 0 0 0 8 -2 -2 0 1 2 0 8 -9 -18 0 0 0 0 8 -2 -1.25 0 1 2 0 8 -8 -16 0 0 0 0 8 -5 -4.38 0 1 -4.38 0 8 -2 -4 0 0 0 0 8 -5 -5.63 0 1 -5.63 0 8 -8 0 3 0 3 0 8 -10 -17.5 0 0 0 0 8 -8 -11 0 0 0 0 8 -2 -1.5 0 1 2 0 8 -4 -3.5 0 1 4 0 8 -2 -3.75 0 0 0 0 8 -3 0 1 1 0 0 8 -12 -21 0 0 0 0 8 -10 -8.75 0 1 -8.75 0 8 -9 -9 0 1 -9 0 8 -4 -3 0 1 4 0 8 -7 0 3 0 3 0 8 -9 -3.38 0 1 -3.38 0 8 -9 -2.25 0 1 -2.25 0 8 -10 -6.25 0 1 10 0 8 -9 -4.5 0 1 -4.5 0 8 -2 -1 0 1 2 0 9 -9 -13.5 0 0 0 0 9 -5 -6.88 0 0 0 0 9 -10 -10 0 0 0 0 9 -6 -2.25 0 1 6 0 9 -6 -6.75 0 1 -6.75 0 9 -9 -4.5 0 1 -4.5 0 9 -10 -13.75 0 0 0 0 9 -6 -8.25 0 0 0 0 9 -5 -10 0 0 0 0 9 -10 -6.25 0 1 10 0 9 -12 -3 0 1 -3 0 9 -12 -9 0 1 12 0 9 -8 -7 0 1 -7 0 9 -6 -12 0 0 0 0 9 -8 -2 0 1 -2 0 9 -12 -6 0 1 -6 0 9 -3 0 1 1 0 0 9 -10 -20 0 0 0 0 9 -5 -3.75 0 1 5 0 9 -2 -1.75 0 1 2 0 9 -6 -3.75 0 1 -3.75 0 9 -9 -12.38 0 0 0 0 9 -5 -6.25 0 0 0 0 9 -12 0 4 1 12 0 9 -2 -1.5 0 1 2 0 9 -6 -5.25 0 1 6 0 9 -10 -18.75 0 0 0 0 9 -6 -6 0 1 -6 0 9 -12 0 5 1 12 0 9 -4 -2 0 1 4 0 9 -2 -4 0 0 0 0 9 -5 -2.5 0 1 5 0 9 -9 -15.75 0 0 0 0 9 -8 -4 0 1 8 0 9 -26 0 12 1 26 0 9 -6 -1.5 0 1 6 0 9 -4 -6 0 1 4 0 9 -10 -2.5 0 1 -2.5 0 9 -8 -12 0 0 0 0 9 -2 -3.5 0 0 0 0 9 -5 -5.63 0 0 0 0 9 -12 -24 0 0 0 0 9 -25 0 10 1 0 0 9 -4 -6.5 0 0 0 0 9 -5 -9.38 0 0 0 0 9 -5 -7.5 0 0 0 0 9 -4 -4 0 1 -4 0 9 -6 -10.5 0 0 0 0 9 -13 0 6 1 13 0 9 -12 -22.5 0 0 0 0 9 -4 -7.5 0 0 0 0 9 -5 0 2 1 5 0 9 -10 -15 0 0 0 0 9 -9 -16.88 0 0 0 0 9 -2 -2.5 0 0 0 0 9 -10 -16.25 0 0 0 0 9 -6 -11.25 0 0 0 0 9 -4 -1.5 0 1 4 0 9 -5 -3.13 0 1 5 0 9 -6 -9 0 0 0 0 9 -12 -19.5 0 0 0 0 9 -10 -12.5 0 0 0 0 9 -2 -3 0 1 -3 0 9 -8 -16 0 0 0 0 9 -4 0 2 1 4 0 9 -12 -7.5 0 1 -7.5 0 9 -12 -13.5 0 0 0 0 9 -22 0 10 1 22 0 9 -12 -21 0 0 0 0 9 -7 0 3 1 7 0 9 -10 -8.75 0 0 0 0 9 -2 -1.25 0 1 2 0 9 -9 -6.75 0 1 -6.75 0 9 -12 0 6 1 12 0 9 -28 0 13 1 28 0 9 -9 -10.13 0 0 0 0 9 -2 -0.5 0 1 2 0 9 -25 0 9 1 25 0 9 -6 -7.5 0 1 -7.5 0 9 -4 -3 0 1 4 0 9 -10 -3.75 0 1 10 0 9 -12 -4.5 0 1 -4.5 0 9 -12 -15 0 0 0 0 9 -6 -3 0 1 6 0 9 -9 -14.63 0 0 0 0 9 -5 -1.25 0 1 -1.25 0 9 -8 -11 0 0 0 0 9 -10 -17.5 0 0 0 0 9 -8 -10 0 0 0 0 9 -9 -9 0 0 0 0 9 -10 -11.25 0 0 0 0 9 -12 -12 0 0 0 0 9 -8 -14 0 0 0 0 9 -12 -16.5 0 0 0 0 9 -4 -7 0 0 0 0 9 -4 -1 0 1 -1 0 9 -5 -1.88 0 1 5 0 9 -8 0 3 1 0 0 9 -2 -3.25 0 1 -3.25 0 9 -5 -5 0 1 -5 0 9 -26 0 10 1 0 0 9 -12 -10.5 0 0 0 0 9 -2 0 1 1 0 0 9 -6 -9.75 0 0 0 0 9 -8 -3 0 1 8 0 9 -13 0 5 1 13 0 9 -10 -7.5 0 1 -7.5 0 9 -8 -13 0 0 0 0 9 -9 -3.38 0 1 -3.38 0 9 -8 -15 0 0 0 0 9 -30 0 12 1 0 0 9 -8 -8 0 1 8 0 9 -8 -5 0 1 8 0 9 -12 -18 0 0 0 0 9 -10 -5 0 1 -5 0 9 -9 -11.25 0 0 0 0 9 -9 -7.88 0 1 -7.88 0 9 -8 -6 0 1 -6 0 9 -6 -4.5 0 1 6 0 9 -8 -9 0 0 0 0 9 -4 -5.5 0 1 -5.5 0 9 -4 -5 0 1 4 0 9 -9 -2.25 0 1 -2.25 0 9 -23 0 10 1 0 0 9 -9 -5.63 0 1 -5.63 0 9 -4 -4.5 0 1 -4.5 0 9 -4 -8 0 0 0 0 9 -19 0 8 1 19 0 9 -2 -2 0 1 2 0 9 -5 -8.13 0 0 0 0 9 -5 -4.38 0 1 -4.38 0 9 -2 -2.25 0 1 2 0 9 -2 -0.75 0 1 -0.75 0 9 -2 -2.75 0 0 0 0 9 -5 -8.75 0 0 0 0 9 -9 -18 0 0 0 0 9 -4 -3.5 0 1 4 0 9 -4 -2.5 0 1 -2.5 0 9 -4 -4.5 0 0 0 0 10 -10 -12.5 0 1 -12.5 0 10 -26 0 12 1 26 0 10 -6 -7.5 0 1 -7.5 0 10 -4 -6.5 0 1 4 0 10 -12 -4.5 0 1 -4.5 0 10 -5 -2.5 0 1 5 0 10 -6 -12 0 0 0 0 10 -9 -14.63 0 1 9 0 10 -6 -6 0 0 0 0 10 -22 0 10 0 10 0 10 -2 -1 0 1 2 0 10 -8 -3 0 1 8 0 10 -12 -9 0 1 12 0 10 -5 -3.75 0 1 5 0 10 -6 -3 0 1 6 0 10 -4 0 2 0 2 0 10 -28 0 13 0 13 0 10 -12 -15 0 1 -15 0 10 -9 -11.25 0 1 9 0 10 -12 -10.5 0 1 12 0 10 -5 -1.88 0 1 5 0 10 -2 -2.75 0 1 2 0 10 -4 -7 0 1 -7 0 10 -8 -4 0 1 8 0 10 -2 0 1 0 1 0 10 -2 -3.5 0 1 -3.5 0 10 -2 -1.75 0 1 2 0 10 -5 -5 0 1 -5 0 10 -12 -12 0 1 12 0 10 -12 0 6 0 6 0 10 -6 -4.5 0 1 6 0 10 -30 0 12 0 12 0 10 -12 -16.5 0 1 -16.5 0 10 -6 -9.75 0 1 6 0 10 -12 -22.5 0 0 0 0 10 -6 -9 0 1 -9 0 10 -5 -3.13 0 1 5 0 10 -5 -9.38 0 0 0 0 10 -12 -7.5 0 1 -7.5 0 10 -5 0 2 0 2 0 10 -10 -15 0 1 -15 0 10 -12 -3 0 1 -3 0 10 -13 0 6 1 13 0 10 -9 -16.88 0 1 9 0 10 -6 -11.25 0 1 6 0 10 -8 -5 0 1 8 0 10 -8 -14 0 1 8 0 10 -12 -24 0 1 -24 0 10 -12 0 5 1 12 0 10 -9 -13.5 0 1 9 0 10 -6 -1.5 0 1 6 0 10 -2 -3 0 1 -3 0 10 -10 -2.5 0 1 -2.5 0 10 -2 -0.75 0 1 -0.75 0 10 -6 -10.5 0 1 -10.5 0 10 -2 -0.5 0 1 2 0 10 -10 -10 0 1 10 0 10 -8 -10 0 1 -10 0 10 -9 -12.38 0 1 -12.38 0 10 -4 -6 0 1 4 0 10 -6 -2.25 0 1 6 0 10 -9 -15.75 0 1 -15.75 0 10 -12 -13.5 0 1 12 0 10 -8 -6 0 1 -6 0 10 -10 -18.75 0 1 10 0 10 -4 -2 0 1 4 0 10 -5 -1.25 0 1 -1.25 0 10 -6 -5.25 0 1 6 0 10 -4 -8 0 1 4 0 10 -25 0 9 0 9 0 10 -2 -3.25 0 0 0 0 10 -10 -11.25 0 1 -11.25 0 10 -4 -7.5 0 1 -7.5 0 10 -9 -5.63 0 1 -5.63 0 10 -6 -6.75 0 1 -6.75 0 10 -8 -2 0 1 -2 0 10 -5 -6.25 0 1 5 0 10 -23 0 10 1 0 0 10 -8 -13 0 1 -13 0 10 -10 -13.75 0 1 -13.75 0 10 -5 -10 0 1 5 0 10 -12 0 4 1 12 0 10 -2 -2.5 0 1 2 0 10 -19 0 8 1 19 0 10 -4 -4 0 1 -4 0 10 -4 -1 0 1 -1 0 10 -4 -2.5 0 1 -2.5 0 10 -5 -8.13 0 1 5 0 10 -10 -3.75 0 1 10 0 10 -5 -8.75 0 1 5 0 10 -10 -7.5 0 1 -7.5 0 10 -10 -5 0 1 -5 0 10 -10 -20 0 1 10 0 10 -13 0 5 1 13 0 10 -8 -9 0 1 -9 0 10 -8 -12 0 0 0 0 10 -10 -16.25 0 0 0 0 10 -5 -6.88 0 1 5 0 10 -4 -5.5 0 1 -5.5 0 10 -5 -7.5 0 0 0 0 10 -9 -10.13 0 0 0 0 10 -6 -8.25 0 1 -8.25 0 10 -26 0 10 1 0 0 10 -4 -5 0 1 4 0 10 -2 -2.25 0 1 2 0 10 -6 -3.75 0 1 -3.75 0 10 -9 -6.75 0 1 -6.75 0 10 -8 -15 0 0 0 0 10 -12 -6 0 1 -6 0 10 -25 0 10 1 0 0 10 -12 -19.5 0 0 0 0 10 -9 -7.88 0 1 -7.88 0 10 -4 -1.5 0 1 4 0 10 -8 -7 0 1 -7 0 10 -12 -18 0 1 -18 0 10 -2 -2 0 1 2 0 10 -9 -18 0 1 -18 0 10 -2 -1.25 0 1 2 0 10 -8 -16 0 1 -16 0 10 -5 -4.38 0 1 -4.38 0 10 -2 -4 0 1 2 0 10 -5 -5.63 0 1 -5.63 0 10 -8 0 3 1 0 0 10 -10 -17.5 0 1 -17.5 0 10 -8 -11 0 1 -11 0 10 -2 -1.5 0 1 2 0 10 -4 -3.5 0 1 4 0 10 -2 -3.75 0 1 -3.75 0 10 -3 0 1 1 0 0 10 -12 -21 0 1 12 0 10 -10 -8.75 0 1 -8.75 0 10 -9 -9 0 1 -9 0 10 -4 -3 0 1 4 0 10 -7 0 3 1 7 0 10 -9 -3.38 0 1 -3.38 0 10 -9 -2.25 0 1 -2.25 0 10 -10 -6.25 0 1 10 0 10 -9 -4.5 0 1 -4.5 0 10 -2 -1 0 1 2 0 11 -9 -13.5 0 0 0 0 11 -5 -6.88 0 0 0 0 11 -10 -10 0 1 10 0 11 -6 -2.25 0 1 6 0 11 -6 -6.75 0 0 0 0 11 -9 -4.5 0 1 -4.5 0 11 -10 -13.75 0 0 0 0 11 -6 -8.25 0 0 0 0 11 -5 -10 0 0 0 0 11 -10 -6.25 0 1 10 0 11 -12 -3 0 1 -3 0 11 -12 -9 0 1 12 0 11 -8 -7 0 0 0 0 11 -6 -12 0 0 0 0 11 -8 -2 0 1 -2 0 11 -12 -6 0 1 -6 0 11 -3 0 1 1 0 0 11 -10 -20 0 0 0 0 11 -5 -3.75 0 1 5 0 11 -2 -1.75 0 1 2 0 11 -6 -3.75 0 1 -3.75 0 11 -9 -12.38 0 0 0 0 11 -5 -6.25 0 1 5 0 11 -12 0 4 0 4 0 11 -2 -1.5 0 1 2 0 11 -6 -5.25 0 0 0 0 11 -10 -18.75 0 0 0 0 11 -6 -6 0 1 -6 0 11 -12 0 5 0 5 0 11 -4 -2 0 1 4 0 11 -2 -4 0 0 0 0 11 -5 -2.5 0 1 5 0 11 -2 -3.75 0 0 0 0 11 -9 -15.75 0 0 0 0 11 -8 -4 0 1 8 0 11 -26 0 12 0 12 0 11 -6 -1.5 0 1 6 0 11 -4 -6 0 0 0 0 11 -10 -2.5 0 1 -2.5 0 11 -8 -12 0 0 0 0 11 -2 -3.5 0 0 0 0 11 -5 -5.63 0 0 0 0 11 -12 -24 0 0 0 0 11 -25 0 10 1 0 0 11 -4 -6.5 0 0 0 0 11 -5 -9.38 0 0 0 0 11 -5 -7.5 0 0 0 0 11 -4 -4 0 1 -4 0 11 -6 -10.5 0 0 0 0 11 -13 0 6 0 6 0 11 -12 -22.5 0 0 0 0 11 -4 -7.5 0 0 0 0 11 -10 -15 0 0 0 0 11 -9 -16.88 0 0 0 0 11 -2 -2.5 0 0 0 0 11 -10 -16.25 0 0 0 0 11 -6 -11.25 0 0 0 0 11 -4 -1.5 0 1 4 0 11 -5 -3.13 0 1 5 0 11 -6 -9 0 0 0 0 11 -12 -19.5 0 0 0 0 11 -10 -12.5 0 0 0 0 11 -2 -3 0 1 -3 0 11 -8 -16 0 0 0 0 11 -4 0 2 0 2 0 11 -12 -7.5 0 1 -7.5 0 11 -12 -13.5 0 0 0 0 11 -22 0 10 0 10 0 11 -12 -21 0 0 0 0 11 -7 0 3 0 3 0 11 -10 -8.75 0 1 -8.75 0 11 -2 -1.25 0 1 2 0 11 -9 -6.75 0 1 -6.75 0 11 -12 0 6 1 12 0 11 -28 0 13 0 13 0 11 -9 -10.13 0 0 0 0 11 -2 -0.5 0 1 2 0 11 -25 0 9 1 25 0 11 -6 -7.5 0 0 0 0 11 -4 -3 0 1 4 0 11 -10 -3.75 0 1 10 0 11 -12 -4.5 0 1 -4.5 0 11 -12 -15 0 0 0 0 11 -6 -3 0 1 6 0 11 -9 -14.63 0 0 0 0 11 -5 -1.25 0 1 -1.25 0 11 -8 -11 0 1 -11 0 11 -10 -17.5 0 0 0 0 11 -8 -10 0 0 0 0 11 -9 -9 0 1 -9 0 11 -10 -11.25 0 0 0 0 11 -12 -12 0 0 0 0 11 -8 -14 0 0 0 0 11 -12 -16.5 0 0 0 0 11 -4 -7 0 0 0 0 11 -4 -1 0 1 -1 0 11 -5 -1.88 0 1 5 0 11 -8 0 3 1 0 0 11 -2 -3.25 0 0 0 0 11 -5 -5 0 1 -5 0 11 -26 0 10 0 10 0 11 -12 -10.5 0 1 12 0 11 -2 0 1 0 1 0 11 -6 -9.75 0 0 0 0 11 -8 -3 0 1 8 0 11 -13 0 5 1 13 0 11 -10 -7.5 0 1 -7.5 0 11 -8 -13 0 0 0 0 11 -9 -3.38 0 1 -3.38 0 11 -8 -15 0 0 0 0 11 -30 0 12 1 0 0 11 -8 -8 0 1 8 0 11 -8 -5 0 1 8 0 11 -12 -18 0 0 0 0 11 -10 -5 0 1 -5 0 11 -9 -11.25 0 0 0 0 11 -9 -7.88 0 1 -7.88 0 11 -8 -6 0 1 -6 0 11 -6 -4.5 0 1 6 0 11 -8 -9 0 0 0 0 11 -4 -5.5 0 0 0 0 11 -4 -5 0 0 0 0 11 -9 -2.25 0 1 -2.25 0 11 -23 0 10 0 10 0 11 -9 -5.63 0 1 -5.63 0 11 -4 -4.5 0 0 0 0 11 -4 -8 0 0 0 0 11 -19 0 8 1 19 0 11 -2 -2 0 1 2 0 11 -5 -8.13 0 0 0 0 11 -5 -4.38 0 1 -4.38 0 11 -2 -2.25 0 0 0 0 11 -2 -0.75 0 1 -0.75 0 11 -2 -2.75 0 0 0 0 11 -5 -8.75 0 0 0 0 11 -9 -18 0 0 0 0 11 -4 -3.5 0 1 4 0 11 -4 -2.5 0 1 -2.5 0 11 -9 -6.75 0 0 0 0 12 -6 -6.75 0 0 0 0 12 -6 -3 0 1 6 0 12 -2 -1.5 0 1 2 0 12 -4 -3 0 0 0 0 12 -5 -6.88 0 0 0 0 12 -12 -9 0 0 0 0 12 -4 -5 0 0 0 0 12 -5 -7.5 0 0 0 0 12 -4 -4 0 0 0 0 12 -9 -5.63 0 1 -5.63 0 12 -9 -14.63 0 1 9 0 12 -5 -9.38 0 0 0 0 12 -6 -4.5 0 0 0 0 12 -8 -7 0 0 0 0 12 -10 -16.25 0 0 0 0 12 -10 -17.5 0 0 0 0 12 -9 -16.88 0 0 0 0 12 -8 -5 0 1 8 0 12 -6 -1.5 0 1 6 0 12 -12 -18 0 0 0 0 12 -5 -6.25 0 0 0 0 12 -8 -4 0 1 8 0 12 -9 -15.75 0 0 0 0 12 -9 -13.5 0 0 0 0 12 -5 -8.13 0 0 0 0 12 -2 0 1 0 1 0 12 -2 -3.75 0 0 0 0 12 -4 -6.5 0 0 0 0 12 -10 -5 0 1 -5 0 12 -12 -22.5 0 0 0 0 12 -2 -1 0 1 2 0 12 -13 0 6 0 6 0 12 -5 -2.5 0 1 5 0 12 -2 -0.5 0 1 2 0 12 -2 -3.25 0 0 0 0 12 -30 0 12 0 12 0 12 -8 -8 0 0 0 0 12 -4 -5.5 0 0 0 0 12 -23 0 10 0 10 0 12 -4 -3.5 0 0 0 0 12 -5 0 2 0 2 0 12 -8 0 3 0 3 0 12 -9 -10.13 0 0 0 0 12 -8 -16 0 0 0 0 12 -12 -24 0 0 0 0 12 -9 -3.38 0 1 -3.38 0 12 -6 -5.25 0 0 0 0 12 -2 -4 0 0 0 0 12 -4 -1 0 1 -1 0 12 -6 -11.25 0 0 0 0 12 -5 -4.38 0 0 0 0 12 -6 -2.25 0 1 6 0 12 -12 -10.5 0 0 0 0 12 -9 -18 0 1 -18 0 12 -10 -20 0 1 10 0 12 -4 -4.5 0 0 0 0 12 -9 -2.25 0 1 -2.25 0 12 -4 -6 0 0 0 0 12 -8 -10 0 0 0 0 12 -5 -5 0 0 0 0 12 -5 -8.75 0 0 0 0 12 -8 -6 0 0 0 0 12 -10 -13.75 0 0 0 0 12 -2 -2.5 0 0 0 0 12 -8 -11 0 0 0 0 12 -4 -2 0 1 4 0 12 -10 -7.5 0 1 -7.5 0 12 -22 0 10 1 22 0 12 -25 0 10 0 10 0 12 -6 -9.75 0 0 0 0 12 -12 0 5 0 5 0 12 -4 -2.5 0 1 -2.5 0 12 -8 -3 0 1 8 0 12 -10 -11.25 0 0 0 0 12 -5 -10 0 1 5 0 12 -10 -15 0 1 -15 0 12 -2 -3.5 0 0 0 0 12 -12 0 4 1 12 0 12 -13 0 5 0 5 0 12 -5 -3.75 0 1 5 0 12 -26 0 12 0 12 0 12 -5 -5.63 0 0 0 0 12 -8 -2 0 1 -2 0 12 -2 -3 0 0 0 0 12 -6 -9 0 0 0 0 12 -9 -7.88 0 0 0 0 12 -8 -14 0 0 0 0 12 -28 0 13 1 28 0 12 -9 -12.38 0 0 0 0 12 -8 -15 0 0 0 0 12 -10 -2.5 0 1 -2.5 0 12 -4 0 2 0 2 0 12 -12 -6 0 1 -6 0 12 -12 -16.5 0 1 -16.5 0 12 -4 -7.5 0 0 0 0 12 -10 -8.75 0 1 -8.75 0 12 -10 -18.75 0 1 10 0 12 -26 0 10 0 10 0 12 -12 -21 0 0 0 0 12 -2 -0.75 0 1 -0.75 0 12 -9 -9 0 0 0 0 12 -10 -6.25 0 0 0 0 12 -8 -12 0 0 0 0 12 -3 0 1 1 0 0 12 -5 -1.88 0 1 5 0 12 -6 -7.5 0 0 0 0 12 -12 -13.5 0 0 0 0 12 -4 -7 0 0 0 0 12 -6 -8.25 0 0 0 0 12 -6 -12 0 0 0 0 12 -6 -10.5 0 0 0 0 12 -4 -8 0 0 0 0 12 -6 -6 0 0 0 0 12 -12 0 6 0 6 0 12 -12 -19.5 0 0 0 0 12 -19 0 8 0 8 0 12 -12 -15 0 0 0 0 12 -2 -1.75 0 0 0 0 12 -6 -3.75 0 1 -3.75 0 12 -2 -1.25 0 0 0 0 12 -5 -1.25 0 1 -1.25 0 12 -4 -1.5 0 1 4 0 12 -8 -13 0 0 0 0 12 -12 -7.5 0 0 0 0 12 -12 -3 0 0 0 0 12 -2 -2.75 0 0 0 0 12 -7 0 3 1 7 0 12 -25 0 9 0 9 0 12 -2 -2 0 0 0 0 12 -12 -4.5 0 1 -4.5 0 12 -12 -12 0 1 12 0 12 -5 -3.13 0 0 0 0 12 -9 -11.25 0 0 0 0 12 -8 -9 0 0 0 0 12 -2 -2.25 0 0 0 0 12 -9 -4.5 0 0 0 0 12 -10 -3.75 0 1 10 0 12 -10 -10 0 0 0 0 12 -10 -12.5 0 0 0 0 12 -2 -2.5 0 0 0 0 13 -5 -5.63 0 0 0 0 13 -6 -7.5 0 0 0 0 13 -26 0 10 1 0 0 13 -9 -4.5 0 1 -4.5 0 13 -2 -1.25 0 1 2 0 13 -8 -3 0 1 8 0 13 -25 0 9 0 9 0 13 -4 -4.5 0 0 0 0 13 -5 -10 0 0 0 0 13 -6 -9 0 0 0 0 13 -10 -6.25 0 1 10 0 13 -4 -4 0 1 -4 0 13 -12 -3 0 1 -3 0 13 -5 -5 0 1 -5 0 13 -12 0 5 0 5 0 13 -6 -9.75 0 0 0 0 13 -19 0 8 0 8 0 13 -4 -7.5 0 0 0 0 13 -12 -9 0 1 12 0 13 -4 -6.5 0 0 0 0 13 -9 -5.63 0 1 -5.63 0 13 -9 -18 0 0 0 0 13 -10 -11.25 0 0 0 0 13 -10 -13.75 0 0 0 0 13 -6 -12 0 0 0 0 13 -10 -12.5 0 0 0 0 13 -4 -7 0 0 0 0 13 -10 -7.5 0 0 0 0 13 -4 -8 0 0 0 0 13 -8 -11 0 0 0 0 13 -12 0 4 0 4 0 13 -9 -3.38 0 1 -3.38 0 13 -10 -18.75 0 0 0 0 13 -2 -3.5 0 0 0 0 13 -2 -1 0 0 0 0 13 -2 -3.25 0 0 0 0 13 -2 0 1 0 1 0 13 -7 0 3 0 3 0 13 -8 0 3 0 3 0 13 -12 -6 0 1 -6 0 13 -2 -0.5 0 1 2 0 13 -9 -7.88 0 0 0 0 13 -8 -15 0 0 0 0 13 -2 -1.5 0 0 0 0 13 -12 -22.5 0 0 0 0 13 -8 -7 0 1 -7 0 13 -4 -5.5 0 0 0 0 13 -10 -8.75 0 0 0 0 13 -8 -9 0 0 0 0 13 -2 -4 0 0 0 0 13 -4 0 2 0 2 0 13 -8 -8 0 0 0 0 13 -9 -13.5 0 0 0 0 13 -9 -9 0 0 0 0 13 -6 -3.75 0 0 0 0 13 -13 0 6 0 6 0 13 -5 -1.88 0 1 5 0 13 -6 -6 0 0 0 0 13 -5 -6.88 0 0 0 0 13 -8 -16 0 0 0 0 13 -12 -7.5 0 0 0 0 13 -5 -1.25 0 0 0 0 13 -9 -14.63 0 0 0 0 13 -8 -4 0 1 8 0 13 -10 -17.5 0 0 0 0 13 -5 -3.75 0 0 0 0 13 -6 -10.5 0 0 0 0 13 -13 0 5 0 5 0 13 -10 -16.25 0 1 10 0 13 -5 -7.5 0 0 0 0 13 -2 -1.75 0 0 0 0 13 -5 -9.38 0 0 0 0 13 -2 -2.75 0 0 0 0 13 -2 -0.75 0 1 -0.75 0 13 -5 -8.13 0 0 0 0 13 -9 -11.25 0 0 0 0 13 -8 -13 0 0 0 0 13 -9 -16.88 0 0 0 0 13 -2 -2 0 0 0 0 13 -12 -18 0 0 0 0 13 -8 -2 0 0 0 0 13 -2 -3 0 0 0 0 13 -6 -4.5 0 1 6 0 13 -5 0 2 0 2 0 13 -12 -19.5 0 0 0 0 13 -9 -15.75 0 0 0 0 13 -8 -6 0 0 0 0 13 -10 -2.5 0 1 -2.5 0 13 -9 -6.75 0 1 -6.75 0 13 -6 -6.75 0 0 0 0 13 -2 -3.75 0 0 0 0 13 -10 -5 0 0 0 0 13 -2 -2.25 0 0 0 0 13 -26 0 12 0 12 0 13 -12 -13.5 0 0 0 0 13 -8 -5 0 0 0 0 13 -6 -3 0 1 6 0 13 -10 -3.75 0 0 0 0 13 -12 -10.5 0 0 0 0 13 -4 -5 0 0 0 0 13 -9 -2.25 0 1 -2.25 0 13 -4 -3 0 0 0 0 13 -9 -10.13 0 0 0 0 13 -28 0 13 0 13 0 13 -22 0 10 0 10 0 13 -10 -10 0 0 0 0 13 -4 -1 0 1 -1 0 13 -4 -2.5 0 0 0 0 13 -12 -24 0 0 0 0 13 -8 -12 0 0 0 0 13 -3 0 1 0 1 0 13 -9 -12.38 0 0 0 0 13 -23 0 10 0 10 0 13 -4 -3.5 0 0 0 0 13 -4 -1.5 0 0 0 0 13 -8 -10 0 0 0 0 13 -8 -14 0 0 0 0 13 -4 -6 0 0 0 0 13 -25 0 10 0 10 0 13 -12 -16.5 0 0 0 0 13 -12 -12 0 0 0 0 13 -5 -2.5 0 0 0 0 13 -5 -8.75 0 0 0 0 13 -12 -4.5 0 0 0 0 13 -12 -15 0 0 0 0 13 -5 -3.13 0 0 0 0 13 -12 -21 0 0 0 0 13 -5 -4.38 0 0 0 0 13 -6 -11.25 0 0 0 0 13 -30 0 12 0 12 0 13 -6 -1.5 0 0 0 0 13 -12 0 6 0 6 0 13 -4 -2 0 0 0 0 13 -10 -15 0 0 0 0 13 -6 -2.25 0 1 6 0 13 -10 -20 0 0 0 0 13 -6 -5.25 0 0 0 0 13 -5 -6.25 0 0 0 0 13 -6 -8.25 0 0 0 0 13 -2 -1 0 1 2 0 14 -9 -13.5 0 0 0 0 14 -5 -6.88 0 1 5 0 14 -10 -10 0 1 10 0 14 -6 -2.25 0 1 6 0 14 -6 -6.75 0 0 0 0 14 -9 -4.5 0 1 -4.5 0 14 -10 -13.75 0 1 -13.75 0 14 -6 -8.25 0 0 0 0 14 -5 -10 0 0 0 0 14 -10 -6.25 0 1 10 0 14 -12 -3 0 1 -3 0 14 -12 -9 0 0 0 0 14 -8 -7 0 1 -7 0 14 -6 -12 0 1 6 0 14 -8 -2 0 1 -2 0 14 -12 -6 0 1 -6 0 14 -3 0 1 0 1 0 14 -10 -20 0 1 10 0 14 -5 -3.75 0 1 5 0 14 -2 -1.75 0 1 2 0 14 -6 -3.75 0 1 -3.75 0 14 -9 -12.38 0 0 0 0 14 -5 -6.25 0 0 0 0 14 -12 0 4 1 12 0 14 -2 -1.5 0 1 2 0 14 -6 -5.25 0 0 0 0 14 -10 -18.75 0 0 0 0 14 -6 -6 0 1 -6 0 14 -12 0 5 0 5 0 14 -4 -2 0 1 4 0 14 -2 -4 0 0 0 0 14 -5 -2.5 0 1 5 0 14 -2 -3.75 0 0 0 0 14 -9 -15.75 0 1 -15.75 0 14 -8 -4 0 1 8 0 14 -26 0 12 0 12 0 14 -6 -1.5 0 1 6 0 14 -4 -6 0 0 0 0 14 -10 -2.5 0 1 -2.5 0 14 -8 -12 0 0 0 0 14 -2 -3.5 0 0 0 0 14 -5 -5.63 0 1 -5.63 0 14 -12 -24 0 0 0 0 14 -25 0 10 1 0 0 14 -4 -6.5 0 0 0 0 14 -5 -9.38 0 0 0 0 14 -5 -7.5 0 0 0 0 14 -4 -4 0 0 0 0 14 -6 -10.5 0 1 -10.5 0 14 -13 0 6 0 6 0 14 -12 -22.5 0 0 0 0 14 -4 -7.5 0 0 0 0 14 -5 0 2 1 5 0 14 -10 -15 0 0 0 0 14 -9 -16.88 0 0 0 0 14 -2 -2.5 0 0 0 0 14 -10 -16.25 0 1 10 0 14 -6 -11.25 0 0 0 0 14 -4 -1.5 0 1 4 0 14 -5 -3.13 0 1 5 0 14 -6 -9 0 0 0 0 14 -12 -19.5 0 0 0 0 14 -10 -12.5 0 1 -12.5 0 14 -2 -3 0 0 0 0 14 -8 -16 0 1 -16 0 14 -4 0 2 1 4 0 14 -12 -7.5 0 1 -7.5 0 14 -12 -13.5 0 1 12 0 14 -22 0 10 0 10 0 14 -12 -21 0 0 0 0 14 -7 0 3 1 7 0 14 -10 -8.75 0 1 -8.75 0 14 -2 -1.25 0 0 0 0 14 -9 -6.75 0 1 -6.75 0 14 -12 0 6 1 12 0 14 -28 0 13 0 13 0 14 -9 -10.13 0 1 -10.13 0 14 -2 -0.5 0 1 2 0 14 -25 0 9 1 25 0 14 -6 -7.5 0 0 0 0 14 -4 -3 0 1 4 0 14 -10 -3.75 0 1 10 0 14 -12 -4.5 0 1 -4.5 0 14 -12 -15 0 1 -15 0 14 -6 -3 0 1 6 0 14 -9 -14.63 0 0 0 0 14 -5 -1.25 0 1 -1.25 0 14 -8 -11 0 0 0 0 14 -10 -17.5 0 0 0 0 14 -8 -10 0 0 0 0 14 -9 -9 0 1 -9 0 14 -10 -11.25 0 0 0 0 14 -12 -12 0 1 12 0 14 -8 -14 0 0 0 0 14 -12 -16.5 0 0 0 0 14 -4 -7 0 1 -7 0 14 -4 -1 0 1 -1 0 14 -5 -1.88 0 1 5 0 14 -8 0 3 1 0 0 14 -2 -3.25 0 0 0 0 14 -5 -5 0 1 -5 0 14 -26 0 10 1 0 0 14 -12 -10.5 0 1 12 0 14 -2 0 1 0 1 0 14 -6 -9.75 0 0 0 0 14 -8 -3 0 1 8 0 14 -13 0 5 0 5 0 14 -10 -7.5 0 1 -7.5 0 14 -8 -13 0 0 0 0 14 -9 -3.38 0 1 -3.38 0 14 -8 -15 0 0 0 0 14 -30 0 12 1 0 0 14 -8 -8 0 0 0 0 14 -8 -5 0 1 8 0 14 -12 -18 0 0 0 0 14 -10 -5 0 1 -5 0 14 -9 -11.25 0 0 0 0 14 -9 -7.88 0 1 -7.88 0 14 -8 -6 0 1 -6 0 14 -6 -4.5 0 1 6 0 14 -8 -9 0 1 -9 0 14 -4 -5.5 0 0 0 0 14 -4 -5 0 0 0 0 14 -9 -2.25 0 1 -2.25 0 14 -23 0 10 1 0 0 14 -9 -5.63 0 1 -5.63 0 14 -4 -4.5 0 1 -4.5 0 14 -4 -8 0 1 4 0 14 -19 0 8 0 8 0 14 -2 -2 0 1 2 0 14 -5 -8.13 0 1 5 0 14 -5 -4.38 0 1 -4.38 0 14 -2 -2.25 0 0 0 0 14 -2 -0.75 0 1 -0.75 0 14 -2 -2.75 0 0 0 0 14 -5 -8.75 0 0 0 0 14 -9 -18 0 0 0 0 14 -4 -3.5 0 1 4 0 14 -4 -2.5 0 1 -2.5 0 14 -9 -6.75 0 1 -6.75 0 15 -6 -6.75 0 1 -6.75 0 15 -6 -3 0 1 6 0 15 -2 -1.5 0 1 2 0 15 -4 -3 0 1 4 0 15 -5 -6.88 0 1 5 0 15 -12 -9 0 1 12 0 15 -4 -5 0 1 4 0 15 -5 -7.5 0 1 -7.5 0 15 -4 -4 0 1 -4 0 15 -9 -5.63 0 1 -5.63 0 15 -9 -14.63 0 0 0 0 15 -5 -9.38 0 1 5 0 15 -6 -4.5 0 1 6 0 15 -8 -7 0 1 -7 0 15 -10 -16.25 0 0 0 0 15 -10 -17.5 0 0 0 0 15 -9 -16.88 0 0 0 0 15 -8 -5 0 1 8 0 15 -6 -1.5 0 1 6 0 15 -12 -18 0 1 -18 0 15 -5 -6.25 0 1 5 0 15 -8 -4 0 1 8 0 15 -9 -15.75 0 0 0 0 15 -9 -13.5 0 1 9 0 15 -5 -8.13 0 1 5 0 15 -2 0 1 0 1 0 15 -2 -3.75 0 1 -3.75 0 15 -4 -6.5 0 1 4 0 15 -10 -5 0 1 -5 0 15 -12 -22.5 0 0 0 0 15 -2 -1 0 1 2 0 15 -13 0 6 0 6 0 15 -5 -2.5 0 1 5 0 15 -2 -0.5 0 1 2 0 15 -2 -3.25 0 1 -3.25 0 15 -30 0 12 1 0 0 15 -8 -8 0 1 8 0 15 -4 -5.5 0 1 -5.5 0 15 -23 0 10 1 0 0 15 -4 -3.5 0 1 4 0 15 -5 0 2 1 5 0 15 -8 0 3 1 0 0 15 -9 -10.13 0 0 0 0 15 -8 -16 0 0 0 0 15 -12 -24 0 0 0 0 15 -9 -3.38 0 1 -3.38 0 15 -6 -5.25 0 1 6 0 15 -2 -4 0 0 0 0 15 -4 -1 0 1 -1 0 15 -6 -11.25 0 0 0 0 15 -5 -4.38 0 1 -4.38 0 15 -6 -2.25 0 1 6 0 15 -12 -10.5 0 0 0 0 15 -9 -18 0 0 0 0 15 -10 -20 0 0 0 0 15 -4 -4.5 0 1 -4.5 0 15 -9 -2.25 0 1 -2.25 0 15 -4 -6 0 1 4 0 15 -8 -10 0 1 -10 0 15 -5 -5 0 1 -5 0 15 -5 -8.75 0 1 5 0 15 -8 -6 0 1 -6 0 15 -10 -13.75 0 0 0 0 15 -2 -2.5 0 1 2 0 15 -8 -11 0 0 0 0 15 -4 -2 0 1 4 0 15 -10 -7.5 0 1 -7.5 0 15 -22 0 10 1 22 0 15 -25 0 10 0 10 0 15 -6 -9.75 0 0 0 0 15 -12 0 5 1 12 0 15 -4 -2.5 0 1 -2.5 0 15 -8 -3 0 1 8 0 15 -10 -11.25 0 0 0 0 15 -5 -10 0 0 0 0 15 -10 -15 0 0 0 0 15 -2 -3.5 0 1 -3.5 0 15 -12 0 4 1 12 0 15 -13 0 5 1 13 0 15 -5 -3.75 0 1 5 0 15 -26 0 12 0 12 0 15 -5 -5.63 0 1 -5.63 0 15 -8 -2 0 1 -2 0 15 -2 -3 0 1 -3 0 15 -6 -9 0 0 0 0 15 -9 -7.88 0 1 -7.88 0 15 -8 -14 0 1 8 0 15 -28 0 13 1 28 0 15 -9 -12.38 0 0 0 0 15 -8 -15 0 0 0 0 15 -10 -2.5 0 1 -2.5 0 15 -4 0 2 0 2 0 15 -12 -6 0 1 -6 0 15 -12 -16.5 0 0 0 0 15 -4 -7.5 0 1 -7.5 0 15 -10 -8.75 0 1 -8.75 0 15 -10 -18.75 0 1 10 0 15 -26 0 10 1 0 0 15 -12 -21 0 0 0 0 15 -2 -0.75 0 1 -0.75 0 15 -9 -9 0 1 -9 0 15 -10 -6.25 0 1 10 0 15 -8 -12 0 0 0 0 15 -3 0 1 1 0 0 15 -5 -1.88 0 1 5 0 15 -6 -7.5 0 1 -7.5 0 15 -12 -13.5 0 1 12 0 15 -4 -7 0 0 0 0 15 -6 -8.25 0 0 0 0 15 -6 -12 0 0 0 0 15 -6 -10.5 0 1 -10.5 0 15 -4 -8 0 1 4 0 15 -6 -6 0 1 -6 0 15 -12 0 6 0 6 0 15 -12 -19.5 0 0 0 0 15 -19 0 8 1 19 0 15 -12 -15 0 0 0 0 15 -2 -1.75 0 1 2 0 15 -6 -3.75 0 1 -3.75 0 15 -2 -1.25 0 1 2 0 15 -5 -1.25 0 1 -1.25 0 15 -4 -1.5 0 1 4 0 15 -8 -13 0 0 0 0 15 -12 -7.5 0 1 -7.5 0 15 -12 -3 0 1 -3 0 15 -2 -2.75 0 1 2 0 15 -7 0 3 1 7 0 15 -25 0 9 1 25 0 15 -2 -2 0 1 2 0 15 -12 -4.5 0 1 -4.5 0 15 -12 -12 0 1 12 0 15 -5 -3.13 0 1 5 0 15 -9 -11.25 0 0 0 0 15 -8 -9 0 1 -9 0 15 -2 -2.25 0 1 2 0 15 -9 -4.5 0 1 -4.5 0 15 -10 -3.75 0 1 10 0 15 -10 -10 0 0 0 0 15 -10 -12.5 0 0 0 0 15 -2 -2.5 0 1 2 0 16 -5 -5.63 0 0 0 0 16 -6 -7.5 0 0 0 0 16 -26 0 10 1 0 0 16 -9 -4.5 0 1 -4.5 0 16 -2 -1.25 0 1 2 0 16 -8 -3 0 1 8 0 16 -25 0 9 1 25 0 16 -4 -4.5 0 0 0 0 16 -5 -10 0 0 0 0 16 -6 -9 0 0 0 0 16 -10 -6.25 0 1 10 0 16 -4 -4 0 0 0 0 16 -12 -3 0 1 -3 0 16 -5 -5 0 0 0 0 16 -12 0 5 1 12 0 16 -6 -9.75 0 0 0 0 16 -19 0 8 1 19 0 16 -4 -7.5 0 0 0 0 16 -12 -9 0 0 0 0 16 -4 -6.5 0 0 0 0 16 -9 -5.63 0 1 -5.63 0 16 -9 -18 0 1 -18 0 16 -10 -11.25 0 1 -11.25 0 16 -10 -13.75 0 1 -13.75 0 16 -6 -12 0 0 0 0 16 -10 -12.5 0 0 0 0 16 -4 -7 0 0 0 0 16 -10 -7.5 0 1 -7.5 0 16 -4 -8 0 0 0 0 16 -8 -11 0 1 -11 0 16 -12 0 4 1 12 0 16 -9 -3.38 0 1 -3.38 0 16 -10 -18.75 0 0 0 0 16 -2 -3.5 0 0 0 0 16 -2 -1 0 1 2 0 16 -2 -3.25 0 1 -3.25 0 16 -2 0 1 1 0 0 16 -7 0 3 1 7 0 16 -8 0 3 0 3 0 16 -12 -6 0 1 -6 0 16 -2 -0.5 0 1 2 0 16 -9 -7.88 0 0 0 0 16 -8 -15 0 0 0 0 16 -2 -1.5 0 1 2 0 16 -12 -22.5 0 0 0 0 16 -8 -7 0 1 -7 0 16 -4 -5.5 0 1 -5.5 0 16 -10 -8.75 0 1 -8.75 0 16 -8 -9 0 0 0 0 16 -2 -4 0 0 0 0 16 -4 0 2 1 4 0 16 -8 -8 0 0 0 0 16 -9 -13.5 0 0 0 0 16 -9 -9 0 0 0 0 16 -6 -3.75 0 1 -3.75 0 16 -13 0 6 1 13 0 16 -5 -1.88 0 1 5 0 16 -6 -6 0 0 0 0 16 -5 -6.88 0 0 0 0 16 -8 -16 0 0 0 0 16 -12 -7.5 0 1 -7.5 0 16 -5 -1.25 0 1 -1.25 0 16 -9 -14.63 0 0 0 0 16 -8 -4 0 1 8 0 16 -10 -17.5 0 0 0 0 16 -5 -3.75 0 1 5 0 16 -6 -10.5 0 0 0 0 16 -13 0 5 1 13 0 16 -10 -16.25 0 0 0 0 16 -5 -7.5 0 1 -7.5 0 16 -2 -1.75 0 1 2 0 16 -5 -9.38 0 0 0 0 16 -2 -2.75 0 1 2 0 16 -2 -0.75 0 1 -0.75 0 16 -5 -8.13 0 1 5 0 16 -9 -11.25 0 1 9 0 16 -8 -13 0 0 0 0 16 -9 -16.88 0 1 9 0 16 -2 -2 0 1 2 0 16 -12 -18 0 1 -18 0 16 -8 -2 0 1 -2 0 16 -2 -3 0 0 0 0 16 -6 -4.5 0 1 6 0 16 -5 0 2 1 5 0 16 -12 -19.5 0 1 12 0 16 -9 -15.75 0 1 -15.75 0 16 -8 -6 0 1 -6 0 16 -10 -2.5 0 1 -2.5 0 16 -9 -6.75 0 1 -6.75 0 16 -6 -6.75 0 0 0 0 16 -2 -3.75 0 1 -3.75 0 16 -10 -5 0 1 -5 0 16 -2 -2.25 0 1 2 0 16 -26 0 12 1 26 0 16 -12 -13.5 0 1 12 0 16 -8 -5 0 1 8 0 16 -6 -3 0 1 6 0 16 -10 -3.75 0 1 10 0 16 -12 -10.5 0 1 12 0 16 -4 -5 0 1 4 0 16 -9 -2.25 0 1 -2.25 0 16 -4 -3 0 1 4 0 16 -9 -10.13 0 1 -10.13 0 16 -28 0 13 1 28 0 16 -22 0 10 0 10 0 16 -10 -10 0 0 0 0 16 -4 -1 0 1 -1 0 16 -4 -2.5 0 1 -2.5 0 16 -12 -24 0 0 0 0 16 -8 -12 0 0 0 0 16 -3 0 1 1 0 0 16 -9 -12.38 0 0 0 0 16 -23 0 10 1 0 0 16 -4 -3.5 0 1 4 0 16 -4 -1.5 0 1 4 0 16 -8 -10 0 1 -10 0 16 -8 -14 0 0 0 0 16 -4 -6 0 1 4 0 16 -25 0 10 1 0 0 16 -12 -16.5 0 0 0 0 16 -12 -12 0 1 12 0 16 -5 -2.5 0 1 5 0 16 -5 -8.75 0 1 5 0 16 -12 -4.5 0 1 -4.5 0 16 -12 -15 0 1 -15 0 16 -5 -3.13 0 1 5 0 16 -12 -21 0 0 0 0 16 -5 -4.38 0 1 -4.38 0 16 -6 -11.25 0 0 0 0 16 -30 0 12 1 0 0 16 -6 -1.5 0 1 6 0 16 -12 0 6 1 12 0 16 -4 -2 0 1 4 0 16 -10 -15 0 0 0 0 16 -6 -2.25 0 1 6 0 16 -10 -20 0 0 0 0 16 -6 -5.25 0 1 6 0 16 -5 -6.25 0 1 5 0 16 -6 -8.25 0 1 -8.25 0 16 -4 -4.5 0 0 0 0 17 -10 -12.5 0 0 0 0 17 -26 0 12 0 12 0 17 -6 -7.5 0 0 0 0 17 -4 -6.5 0 0 0 0 17 -12 -4.5 0 0 0 0 17 -5 -2.5 0 0 0 0 17 -6 -12 0 0 0 0 17 -9 -14.63 0 0 0 0 17 -6 -6 0 0 0 0 17 -22 0 10 1 22 0 17 -2 -1 0 0 0 0 17 -8 -3 0 0 0 0 17 -12 -9 0 0 0 0 17 -5 -3.75 0 0 0 0 17 -6 -3 0 0 0 0 17 -4 0 2 1 4 0 17 -28 0 13 1 28 0 17 -12 -15 0 0 0 0 17 -9 -11.25 0 0 0 0 17 -12 -10.5 0 0 0 0 17 -5 -1.88 0 0 0 0 17 -2 -2.75 0 0 0 0 17 -4 -7 0 0 0 0 17 -8 -4 0 0 0 0 17 -2 0 1 1 0 0 17 -2 -3.5 0 0 0 0 17 -2 -1.75 0 0 0 0 17 -5 -5 0 0 0 0 17 -12 -12 0 0 0 0 17 -12 0 6 1 12 0 17 -6 -4.5 0 0 0 0 17 -30 0 12 1 0 0 17 -12 -16.5 0 0 0 0 17 -6 -9.75 0 0 0 0 17 -12 -22.5 0 0 0 0 17 -6 -9 0 0 0 0 17 -5 -3.13 0 0 0 0 17 -5 -9.38 0 0 0 0 17 -12 -7.5 0 1 -7.5 0 17 -5 0 2 1 5 0 17 -10 -15 0 0 0 0 17 -12 -3 0 0 0 0 17 -13 0 6 1 13 0 17 -9 -16.88 0 0 0 0 17 -6 -11.25 0 0 0 0 17 -8 -5 0 0 0 0 17 -8 -14 0 0 0 0 17 -12 -24 0 0 0 0 17 -12 0 5 1 12 0 17 -9 -13.5 0 0 0 0 17 -6 -1.5 0 1 6 0 17 -2 -3 0 0 0 0 17 -10 -2.5 0 1 -2.5 0 17 -2 -0.75 0 0 0 0 17 -6 -10.5 0 0 0 0 17 -2 -0.5 0 0 0 0 17 -10 -10 0 0 0 0 17 -8 -10 0 0 0 0 17 -9 -12.38 0 0 0 0 17 -4 -6 0 0 0 0 17 -6 -2.25 0 0 0 0 17 -9 -15.75 0 0 0 0 17 -12 -13.5 0 0 0 0 17 -8 -6 0 0 0 0 17 -10 -18.75 0 0 0 0 17 -4 -2 0 0 0 0 17 -5 -1.25 0 1 -1.25 0 17 -6 -5.25 0 0 0 0 17 -4 -8 0 0 0 0 17 -25 0 9 1 25 0 17 -2 -3.25 0 0 0 0 17 -10 -11.25 0 0 0 0 17 -4 -7.5 0 0 0 0 17 -9 -5.63 0 0 0 0 17 -6 -6.75 0 0 0 0 17 -8 -2 0 0 0 0 17 -5 -6.25 0 0 0 0 17 -23 0 10 1 0 0 17 -8 -13 0 0 0 0 17 -10 -13.75 0 0 0 0 17 -5 -10 0 0 0 0 17 -12 0 4 1 12 0 17 -2 -2.5 0 0 0 0 17 -19 0 8 1 19 0 17 -4 -4 0 0 0 0 17 -4 -1 0 1 -1 0 17 -4 -2.5 0 0 0 0 17 -5 -8.13 0 0 0 0 17 -10 -3.75 0 0 0 0 17 -5 -8.75 0 0 0 0 17 -10 -7.5 0 0 0 0 17 -10 -5 0 0 0 0 17 -10 -20 0 0 0 0 17 -13 0 5 1 13 0 17 -8 -9 0 0 0 0 17 -8 -12 0 0 0 0 17 -10 -16.25 0 0 0 0 17 -5 -6.88 0 0 0 0 17 -4 -5.5 0 0 0 0 17 -5 -7.5 0 0 0 0 17 -9 -10.13 0 0 0 0 17 -6 -8.25 0 0 0 0 17 -26 0 10 1 0 0 17 -4 -5 0 0 0 0 17 -2 -2.25 0 0 0 0 17 -6 -3.75 0 0 0 0 17 -8 -8 0 0 0 0 17 -9 -6.75 0 0 0 0 17 -8 -15 0 0 0 0 17 -12 -6 0 0 0 0 17 -25 0 10 1 0 0 17 -12 -19.5 0 0 0 0 17 -9 -7.88 0 0 0 0 17 -4 -1.5 0 0 0 0 17 -8 -7 0 0 0 0 17 -12 -18 0 0 0 0 17 -2 -2 0 0 0 0 17 -9 -18 0 0 0 0 17 -2 -1.25 0 0 0 0 17 -8 -16 0 0 0 0 17 -5 -4.38 0 0 0 0 17 -2 -4 0 0 0 0 17 -5 -5.63 0 0 0 0 17 -8 0 3 1 0 0 17 -10 -17.5 0 0 0 0 17 -8 -11 0 0 0 0 17 -2 -1.5 0 0 0 0 17 -4 -3.5 0 0 0 0 17 -2 -3.75 0 0 0 0 17 -3 0 1 1 0 0 17 -12 -21 0 0 0 0 17 -10 -8.75 0 0 0 0 17 -9 -9 0 0 0 0 17 -4 -3 0 0 0 0 17 -7 0 3 1 7 0 17 -9 -3.38 0 0 0 0 17 -9 -2.25 0 0 0 0 17 -10 -6.25 0 0 0 0 17 -9 -4.5 0 0 0 0 17 -2 -1 0 1 2 0 18 -9 -13.5 0 0 0 0 18 -5 -6.88 0 1 5 0 18 -10 -10 0 0 0 0 18 -6 -2.25 0 1 6 0 18 -6 -6.75 0 0 0 0 18 -9 -4.5 0 1 -4.5 0 18 -10 -13.75 0 1 -13.75 0 18 -6 -8.25 0 1 -8.25 0 18 -5 -10 0 0 0 0 18 -10 -6.25 0 1 10 0 18 -12 -3 0 1 -3 0 18 -12 -9 0 1 12 0 18 -8 -7 0 0 0 0 18 -6 -12 0 0 0 0 18 -8 -2 0 1 -2 0 18 -12 -6 0 1 -6 0 18 -3 0 1 1 0 0 18 -10 -20 0 0 0 0 18 -5 -3.75 0 1 5 0 18 -2 -1.75 0 1 2 0 18 -6 -3.75 0 1 -3.75 0 18 -9 -12.38 0 0 0 0 18 -5 -6.25 0 0 0 0 18 -12 0 4 0 4 0 18 -2 -1.5 0 1 2 0 18 -6 -5.25 0 1 6 0 18 -10 -18.75 0 0 0 0 18 -6 -6 0 0 0 0 18 -12 0 5 0 5 0 18 -4 -2 0 1 4 0 18 -2 -4 0 0 0 0 18 -5 -2.5 0 1 5 0 18 -2 -3.75 0 0 0 0 18 -9 -15.75 0 0 0 0 18 -8 -4 0 1 8 0 18 -26 0 12 0 12 0 18 -6 -1.5 0 1 6 0 18 -4 -6 0 0 0 0 18 -10 -2.5 0 1 -2.5 0 18 -8 -12 0 0 0 0 18 -2 -3.5 0 1 -3.5 0 18 -5 -5.63 0 0 0 0 18 -12 -24 0 0 0 0 18 -25 0 10 0 10 0 18 -4 -6.5 0 0 0 0 18 -5 -9.38 0 0 0 0 18 -5 -7.5 0 0 0 0 18 -4 -4 0 0 0 0 18 -6 -10.5 0 0 0 0 18 -13 0 6 0 6 0 18 -12 -22.5 0 0 0 0 18 -4 -7.5 0 0 0 0 18 -5 0 2 1 5 0 18 -10 -15 0 0 0 0 18 -9 -16.88 0 0 0 0 18 -2 -2.5 0 1 2 0 18 -10 -16.25 0 0 0 0 18 -6 -11.25 0 0 0 0 18 -4 -1.5 0 1 4 0 18 -5 -3.13 0 1 5 0 18 -6 -9 0 0 0 0 18 -12 -19.5 0 0 0 0 18 -10 -12.5 0 0 0 0 18 -8 -16 0 0 0 0 18 -4 0 2 1 4 0 18 -12 -7.5 0 1 -7.5 0 18 -12 -13.5 0 0 0 0 18 -22 0 10 0 10 0 18 -12 -21 0 0 0 0 18 -7 0 3 1 7 0 18 -10 -8.75 0 1 -8.75 0 18 -2 -1.25 0 1 2 0 18 -9 -6.75 0 1 -6.75 0 18 -12 0 6 0 6 0 18 -28 0 13 0 13 0 18 -9 -10.13 0 0 0 0 18 -2 -0.5 0 1 2 0 18 -25 0 9 1 25 0 18 -6 -7.5 0 0 0 0 18 -4 -3 0 1 4 0 18 -10 -3.75 0 1 10 0 18 -12 -4.5 0 1 -4.5 0 18 -12 -15 0 0 0 0 18 -6 -3 0 1 6 0 18 -9 -14.63 0 0 0 0 18 -5 -1.25 0 1 -1.25 0 18 -8 -11 0 0 0 0 18 -10 -17.5 0 0 0 0 18 -8 -10 0 0 0 0 18 -9 -9 0 1 -9 0 18 -10 -11.25 0 0 0 0 18 -12 -12 0 1 12 0 18 -8 -14 0 0 0 0 18 -12 -16.5 0 0 0 0 18 -4 -7 0 0 0 0 18 -4 -1 0 1 -1 0 18 -5 -1.88 0 1 5 0 18 -8 0 3 1 0 0 18 -2 -3.25 0 0 0 0 18 -5 -5 0 0 0 0 18 -26 0 10 0 10 0 18 -12 -10.5 0 1 12 0 18 -2 0 1 1 0 0 18 -6 -9.75 0 0 0 0 18 -8 -3 0 1 8 0 18 -13 0 5 0 5 0 18 -10 -7.5 0 0 0 0 18 -8 -13 0 0 0 0 18 -9 -3.38 0 1 -3.38 0 18 -8 -15 0 0 0 0 18 -30 0 12 1 0 0 18 -8 -8 0 0 0 0 18 -8 -5 0 1 8 0 18 -12 -18 0 0 0 0 18 -10 -5 0 1 -5 0 18 -9 -11.25 0 0 0 0 18 -9 -7.88 0 1 -7.88 0 18 -8 -6 0 1 -6 0 18 -6 -4.5 0 1 6 0 18 -8 -9 0 0 0 0 18 -4 -5.5 0 0 0 0 18 -4 -5 0 0 0 0 18 -9 -2.25 0 1 -2.25 0 18 -23 0 10 0 10 0 18 -9 -5.63 0 1 -5.63 0 18 -4 -4.5 0 0 0 0 18 -4 -8 0 0 0 0 18 -19 0 8 0 8 0 18 -2 -2 0 0 0 0 18 -5 -8.13 0 0 0 0 18 -5 -4.38 0 1 -4.38 0 18 -2 -2.25 0 1 2 0 18 -2 -0.75 0 1 -0.75 0 18 -2 -2.75 0 0 0 0 18 -5 -8.75 0 0 0 0 18 -9 -18 0 0 0 0 18 -4 -3.5 0 0 0 0 18 -4 -2.5 0 1 -2.5 0 18 -9 -6.75 0 1 -6.75 0 19 -6 -6.75 0 0 0 0 19 -6 -3 0 1 6 0 19 -2 -1.5 0 0 0 0 19 -4 -3 0 0 0 0 19 -5 -6.88 0 0 0 0 19 -12 -9 0 1 12 0 19 -4 -5 0 0 0 0 19 -5 -7.5 0 0 0 0 19 -4 -4 0 1 -4 0 19 -9 -5.63 0 1 -5.63 0 19 -9 -14.63 0 0 0 0 19 -5 -9.38 0 0 0 0 19 -6 -4.5 0 1 6 0 19 -8 -7 0 1 -7 0 19 -10 -16.25 0 0 0 0 19 -10 -17.5 0 0 0 0 19 -9 -16.88 0 0 0 0 19 -8 -5 0 1 8 0 19 -6 -1.5 0 1 6 0 19 -12 -18 0 0 0 0 19 -5 -6.25 0 0 0 0 19 -8 -4 0 1 8 0 19 -9 -15.75 0 0 0 0 19 -9 -13.5 0 0 0 0 19 -5 -8.13 0 0 0 0 19 -2 0 1 0 1 0 19 -2 -3.75 0 0 0 0 19 -4 -6.5 0 0 0 0 19 -10 -5 0 1 -5 0 19 -12 -22.5 0 0 0 0 19 -2 -1 0 1 2 0 19 -13 0 6 1 13 0 19 -5 -2.5 0 1 5 0 19 -2 -0.5 0 1 2 0 19 -2 -3.25 0 0 0 0 19 -30 0 12 1 0 0 19 -8 -8 0 0 0 0 19 -4 -5.5 0 0 0 0 19 -23 0 10 1 0 0 19 -4 -3.5 0 0 0 0 19 -5 0 2 1 5 0 19 -8 0 3 1 0 0 19 -9 -10.13 0 0 0 0 19 -8 -16 0 0 0 0 19 -12 -24 0 0 0 0 19 -9 -3.38 0 1 -3.38 0 19 -6 -5.25 0 0 0 0 19 -2 -4 0 0 0 0 19 -4 -1 0 1 -1 0 19 -6 -11.25 0 0 0 0 19 -5 -4.38 0 0 0 0 19 -6 -2.25 0 1 6 0 19 -12 -10.5 0 1 12 0 19 -9 -18 0 0 0 0 19 -10 -20 0 0 0 0 19 -4 -4.5 0 0 0 0 19 -9 -2.25 0 1 -2.25 0 19 -4 -6 0 0 0 0 19 -8 -10 0 1 -10 0 19 -5 -5 0 0 0 0 19 -5 -8.75 0 0 0 0 19 -8 -6 0 1 -6 0 19 -10 -13.75 0 0 0 0 19 -2 -2.5 0 0 0 0 19 -8 -11 0 0 0 0 19 -4 -2 0 1 4 0 19 -10 -7.5 0 1 -7.5 0 19 -22 0 10 1 22 0 19 -25 0 10 1 0 0 19 -6 -9.75 0 0 0 0 19 -12 0 5 1 12 0 19 -4 -2.5 0 0 0 0 19 -8 -3 0 1 8 0 19 -10 -11.25 0 0 0 0 19 -5 -10 0 0 0 0 19 -10 -15 0 0 0 0 19 -2 -3.5 0 0 0 0 19 -12 0 4 1 12 0 19 -13 0 5 1 13 0 19 -5 -3.75 0 0 0 0 19 -26 0 12 1 26 0 19 -5 -5.63 0 0 0 0 19 -8 -2 0 1 -2 0 19 -2 -3 0 0 0 0 19 -6 -9 0 0 0 0 19 -9 -7.88 0 0 0 0 19 -8 -14 0 0 0 0 19 -28 0 13 1 28 0 19 -9 -12.38 0 0 0 0 19 -8 -15 0 0 0 0 19 -10 -2.5 0 1 -2.5 0 19 -4 0 2 1 4 0 19 -12 -6 0 1 -6 0 19 -12 -16.5 0 0 0 0 19 -4 -7.5 0 0 0 0 19 -10 -8.75 0 0 0 0 19 -10 -18.75 0 0 0 0 19 -26 0 10 1 0 0 19 -12 -21 0 0 0 0 19 -2 -0.75 0 1 -0.75 0 19 -9 -9 0 0 0 0 19 -10 -6.25 0 1 10 0 19 -8 -12 0 0 0 0 19 -3 0 1 1 0 0 19 -5 -1.88 0 1 5 0 19 -6 -7.5 0 0 0 0 19 -12 -13.5 0 0 0 0 19 -4 -7 0 0 0 0 19 -6 -8.25 0 0 0 0 19 -6 -12 0 0 0 0 19 -6 -10.5 0 0 0 0 19 -4 -8 0 0 0 0 19 -6 -6 0 0 0 0 19 -12 0 6 0 6 0 19 -12 -19.5 0 0 0 0 19 -19 0 8 1 19 0 19 -12 -15 0 0 0 0 19 -2 -1.75 0 1 2 0 19 -6 -3.75 0 0 0 0 19 -2 -1.25 0 0 0 0 19 -5 -1.25 0 1 -1.25 0 19 -4 -1.5 0 1 4 0 19 -8 -13 0 0 0 0 19 -12 -7.5 0 1 -7.5 0 19 -12 -3 0 1 -3 0 19 -2 -2.75 0 0 0 0 19 -7 0 3 1 7 0 19 -25 0 9 1 25 0 19 -2 -2 0 0 0 0 19 -12 -4.5 0 1 -4.5 0 19 -12 -12 0 0 0 0 19 -5 -3.13 0 1 5 0 19 -9 -11.25 0 0 0 0 19 -8 -9 0 0 0 0 19 -2 -2.25 0 0 0 0 19 -9 -4.5 0 1 -4.5 0 19 -10 -3.75 0 1 10 0 19 -10 -10 0 0 0 0 19 -10 -12.5 0 0 0 0 19 -2 -2.5 0 1 2 0 20 -5 -5.63 0 1 -5.63 0 20 -6 -7.5 0 0 0 0 20 -26 0 10 0 10 0 20 -9 -4.5 0 1 -4.5 0 20 -2 -1.25 0 1 2 0 20 -8 -3 0 1 8 0 20 -25 0 9 0 9 0 20 -4 -4.5 0 1 -4.5 0 20 -5 -10 0 0 0 0 20 -6 -9 0 1 -9 0 20 -10 -6.25 0 1 10 0 20 -4 -4 0 1 -4 0 20 -12 -3 0 1 -3 0 20 -5 -5 0 0 0 0 20 -12 0 5 1 12 0 20 -6 -9.75 0 0 0 0 20 -19 0 8 0 8 0 20 -4 -7.5 0 0 0 0 20 -12 -9 0 1 12 0 20 -4 -6.5 0 0 0 0 20 -9 -5.63 0 1 -5.63 0 20 -9 -18 0 1 -18 0 20 -10 -11.25 0 1 -11.25 0 20 -10 -13.75 0 0 0 0 20 -6 -12 0 0 0 0 20 -10 -12.5 0 1 -12.5 0 20 -4 -7 0 0 0 0 20 -10 -7.5 0 1 -7.5 0 20 -4 -8 0 0 0 0 20 -8 -11 0 1 -11 0 20 -12 0 4 0 4 0 20 -9 -3.38 0 1 -3.38 0 20 -10 -18.75 0 0 0 0 20 -2 -3.5 0 1 -3.5 0 20 -2 -1 0 1 2 0 20 -2 -3.25 0 0 0 0 20 -2 0 1 0 1 0 20 -7 0 3 1 7 0 20 -8 0 3 0 3 0 20 -12 -6 0 1 -6 0 20 -2 -0.5 0 1 2 0 20 -9 -7.88 0 0 0 0 20 -8 -15 0 0 0 0 20 -2 -1.5 0 0 0 0 20 -12 -22.5 0 1 -22.5 0 20 -8 -7 0 1 -7 0 20 -4 -5.5 0 0 0 0 20 -10 -8.75 0 0 0 0 20 -8 -9 0 1 -9 0 20 -2 -4 0 0 0 0 20 -4 0 2 0 2 0 20 -8 -8 0 1 8 0 20 -9 -13.5 0 0 0 0 20 -9 -9 0 0 0 0 20 -6 -3.75 0 1 -3.75 0 20 -13 0 6 0 6 0 20 -5 -1.88 0 1 5 0 20 -6 -6 0 1 -6 0 20 -5 -6.88 0 0 0 0 20 -8 -16 0 0 0 0 20 -12 -7.5 0 1 -7.5 0 20 -5 -1.25 0 1 -1.25 0 20 -9 -14.63 0 0 0 0 20 -8 -4 0 1 8 0 20 -10 -17.5 0 0 0 0 20 -5 -3.75 0 0 0 0 20 -6 -10.5 0 0 0 0 20 -13 0 5 0 5 0 20 -10 -16.25 0 0 0 0 20 -5 -7.5 0 0 0 0 20 -2 -1.75 0 0 0 0 20 -5 -9.38 0 0 0 0 20 -2 -2.75 0 1 2 0 20 -2 -0.75 0 1 -0.75 0 20 -5 -8.13 0 0 0 0 20 -9 -11.25 0 0 0 0 20 -8 -13 0 0 0 0 20 -9 -16.88 0 0 0 0 20 -2 -2 0 1 2 0 20 -12 -18 0 0 0 0 20 -8 -2 0 1 -2 0 20 -2 -3 0 1 -3 0 20 -6 -4.5 0 1 6 0 20 -5 0 2 0 2 0 20 -12 -19.5 0 0 0 0 20 -9 -15.75 0 0 0 0 20 -8 -6 0 1 -6 0 20 -10 -2.5 0 1 -2.5 0 20 -9 -6.75 0 0 0 0 20 -6 -6.75 0 0 0 0 20 -2 -3.75 0 0 0 0 20 -10 -5 0 1 -5 0 20 -2 -2.25 0 1 2 0 20 -26 0 12 0 12 0 20 -12 -13.5 0 0 0 0 20 -8 -5 0 0 0 0 20 -6 -3 0 1 6 0 20 -10 -3.75 0 1 10 0 20 -12 -10.5 0 0 0 0 20 -4 -5 0 0 0 0 20 -9 -2.25 0 1 -2.25 0 20 -4 -3 0 0 0 0 20 -9 -10.13 0 0 0 0 20 -28 0 13 0 13 0 20 -22 0 10 1 22 0 20 -10 -10 0 0 0 0 20 -4 -1 0 1 -1 0 20 -4 -2.5 0 1 -2.5 0 20 -12 -24 0 1 -24 0 20 -8 -12 0 1 -12 0 20 -3 0 1 0 1 0 20 -9 -12.38 0 0 0 0 20 -23 0 10 0 10 0 20 -4 -3.5 0 0 0 0 20 -4 -1.5 0 1 4 0 20 -8 -10 0 0 0 0 20 -8 -14 0 0 0 0 20 -4 -6 0 0 0 0 20 -25 0 10 0 10 0 20 -12 -16.5 0 0 0 0 20 -12 -12 0 1 12 0 20 -5 -2.5 0 0 0 0 20 -5 -8.75 0 0 0 0 20 -12 -4.5 0 1 -4.5 0 20 -12 -15 0 0 0 0 20 -5 -3.13 0 0 0 0 20 -12 -21 0 0 0 0 20 -5 -4.38 0 0 0 0 20 -6 -11.25 0 0 0 0 20 -30 0 12 0 12 0 20 -6 -1.5 0 1 6 0 20 -12 0 6 0 6 0 20 -4 -2 0 1 4 0 20 -10 -15 0 0 0 0 20 -6 -2.25 0 1 6 0 20 -10 -20 0 0 0 0 20 -6 -5.25 0 0 0 0 20 -5 -6.25 0 0 0 0 20 -6 -8.25 0 0 0 0 20 -4 -4.5 0 0 0 0 21 -10 -12.5 0 0 0 0 21 -26 0 12 1 26 0 21 -6 -7.5 0 0 0 0 21 -4 -6.5 0 0 0 0 21 -12 -4.5 0 1 -4.5 0 21 -5 -2.5 0 1 5 0 21 -6 -12 0 0 0 0 21 -9 -14.63 0 0 0 0 21 -6 -6 0 0 0 0 21 -22 0 10 1 22 0 21 -2 -1 0 1 2 0 21 -8 -3 0 1 8 0 21 -12 -9 0 0 0 0 21 -5 -3.75 0 0 0 0 21 -6 -3 0 1 6 0 21 -4 0 2 1 4 0 21 -28 0 13 1 28 0 21 -12 -15 0 0 0 0 21 -9 -11.25 0 0 0 0 21 -12 -10.5 0 0 0 0 21 -5 -1.88 0 1 5 0 21 -2 -2.75 0 0 0 0 21 -4 -7 0 0 0 0 21 -8 -4 0 1 8 0 21 -2 0 1 0 1 0 21 -2 -3.5 0 0 0 0 21 -2 -1.75 0 0 0 0 21 -5 -5 0 0 0 0 21 -12 -12 0 0 0 0 21 -12 0 6 1 12 0 21 -6 -4.5 0 0 0 0 21 -30 0 12 1 0 0 21 -12 -16.5 0 0 0 0 21 -6 -9.75 0 0 0 0 21 -12 -22.5 0 0 0 0 21 -6 -9 0 0 0 0 21 -5 -3.13 0 0 0 0 21 -5 -9.38 0 0 0 0 21 -12 -7.5 0 0 0 0 21 -5 0 2 1 5 0 21 -10 -15 0 0 0 0 21 -12 -3 0 1 -3 0 21 -13 0 6 1 13 0 21 -9 -16.88 0 0 0 0 21 -6 -11.25 0 0 0 0 21 -8 -5 0 0 0 0 21 -8 -14 0 0 0 0 21 -12 -24 0 0 0 0 21 -12 0 5 1 12 0 21 -9 -13.5 0 0 0 0 21 -6 -1.5 0 1 6 0 21 -2 -3 0 0 0 0 21 -10 -2.5 0 1 -2.5 0 21 -2 -0.75 0 1 -0.75 0 21 -6 -10.5 0 0 0 0 21 -2 -0.5 0 1 2 0 21 -10 -10 0 0 0 0 21 -8 -10 0 0 0 0 21 -9 -12.38 0 0 0 0 21 -4 -6 0 0 0 0 21 -6 -2.25 0 1 6 0 21 -9 -15.75 0 0 0 0 21 -12 -13.5 0 0 0 0 21 -8 -6 0 0 0 0 21 -10 -18.75 0 0 0 0 21 -4 -2 0 1 4 0 21 -5 -1.25 0 1 -1.25 0 21 -6 -5.25 0 0 0 0 21 -4 -8 0 0 0 0 21 -25 0 9 1 25 0 21 -2 -3.25 0 0 0 0 21 -10 -11.25 0 0 0 0 21 -4 -7.5 0 0 0 0 21 -9 -5.63 0 0 0 0 21 -6 -6.75 0 0 0 0 21 -8 -2 0 1 -2 0 21 -5 -6.25 0 0 0 0 21 -23 0 10 1 0 0 21 -8 -13 0 0 0 0 21 -10 -13.75 0 0 0 0 21 -5 -10 0 0 0 0 21 -12 0 4 1 12 0 21 -2 -2.5 0 0 0 0 21 -19 0 8 1 19 0 21 -4 -4 0 0 0 0 21 -4 -1 0 1 -1 0 21 -4 -2.5 0 1 -2.5 0 21 -5 -8.13 0 0 0 0 21 -10 -3.75 0 1 10 0 21 -5 -8.75 0 0 0 0 21 -10 -7.5 0 0 0 0 21 -10 -5 0 0 0 0 21 -10 -20 0 0 0 0 21 -13 0 5 0 5 0 21 -8 -9 0 0 0 0 21 -8 -12 0 0 0 0 21 -10 -16.25 0 0 0 0 21 -5 -6.88 0 0 0 0 21 -4 -5.5 0 0 0 0 21 -5 -7.5 0 0 0 0 21 -9 -10.13 0 0 0 0 21 -6 -8.25 0 0 0 0 21 -26 0 10 0 10 0 21 -4 -5 0 0 0 0 21 -2 -2.25 0 0 0 0 21 -6 -3.75 0 1 -3.75 0 21 -8 -8 0 0 0 0 21 -9 -6.75 0 0 0 0 21 -8 -15 0 0 0 0 21 -12 -6 0 1 -6 0 21 -25 0 10 0 10 0 21 -12 -19.5 0 0 0 0 21 -9 -7.88 0 0 0 0 21 -4 -1.5 0 1 4 0 21 -8 -7 0 0 0 0 21 -12 -18 0 0 0 0 21 -2 -2 0 0 0 0 21 -9 -18 0 0 0 0 21 -2 -1.25 0 0 0 0 21 -8 -16 0 0 0 0 21 -5 -4.38 0 0 0 0 21 -2 -4 0 0 0 0 21 -5 -5.63 0 0 0 0 21 -8 0 3 1 0 0 21 -10 -17.5 0 0 0 0 21 -8 -11 0 0 0 0 21 -2 -1.5 0 0 0 0 21 -4 -3.5 0 0 0 0 21 -2 -3.75 0 0 0 0 21 -3 0 1 1 0 0 21 -12 -21 0 0 0 0 21 -10 -8.75 0 0 0 0 21 -9 -9 0 0 0 0 21 -4 -3 0 0 0 0 21 -7 0 3 1 7 0 21 -9 -3.38 0 1 -3.38 0 21 -9 -2.25 0 1 -2.25 0 21 -10 -6.25 0 0 0 0 21 -9 -4.5 0 0 0 0 21 -2 -1 0 0 0 0 22 -9 -13.5 0 0 0 0 22 -5 -6.88 0 0 0 0 22 -10 -10 0 0 0 0 22 -6 -2.25 0 1 6 0 22 -6 -6.75 0 0 0 0 22 -9 -4.5 0 1 -4.5 0 22 -10 -13.75 0 0 0 0 22 -6 -8.25 0 0 0 0 22 -5 -10 0 0 0 0 22 -10 -6.25 0 1 10 0 22 -12 -3 0 1 -3 0 22 -12 -9 0 0 0 0 22 -8 -7 0 0 0 0 22 -6 -12 0 0 0 0 22 -8 -2 0 1 -2 0 22 -12 -6 0 1 -6 0 22 -3 0 1 1 0 0 22 -10 -20 0 0 0 0 22 -5 -3.75 0 0 0 0 22 -2 -1.75 0 0 0 0 22 -6 -3.75 0 0 0 0 22 -9 -12.38 0 0 0 0 22 -5 -6.25 0 0 0 0 22 -12 0 4 1 12 0 22 -2 -1.5 0 0 0 0 22 -6 -5.25 0 0 0 0 22 -10 -18.75 0 0 0 0 22 -6 -6 0 0 0 0 22 -12 0 5 1 12 0 22 -4 -2 0 0 0 0 22 -2 -4 0 0 0 0 22 -5 -2.5 0 0 0 0 22 -2 -3.75 0 0 0 0 22 -9 -15.75 0 0 0 0 22 -8 -4 0 0 0 0 22 -26 0 12 1 26 0 22 -6 -1.5 0 0 0 0 22 -4 -6 0 0 0 0 22 -10 -2.5 0 1 -2.5 0 22 -8 -12 0 0 0 0 22 -2 -3.5 0 0 0 0 22 -5 -5.63 0 1 -5.63 0 22 -12 -24 0 0 0 0 22 -25 0 10 1 0 0 22 -4 -6.5 0 0 0 0 22 -5 -9.38 0 0 0 0 22 -5 -7.5 0 0 0 0 22 -4 -4 0 1 -4 0 22 -6 -10.5 0 0 0 0 22 -13 0 6 1 13 0 22 -12 -22.5 0 0 0 0 22 -4 -7.5 0 0 0 0 22 -5 0 2 1 5 0 22 -10 -15 0 0 0 0 22 -9 -16.88 0 0 0 0 22 -2 -2.5 0 0 0 0 22 -10 -16.25 0 0 0 0 22 -6 -11.25 0 0 0 0 22 -4 -1.5 0 0 0 0 22 -5 -3.13 0 0 0 0 22 -6 -9 0 0 0 0 22 -12 -19.5 0 0 0 0 22 -10 -12.5 0 0 0 0 22 -2 -3 0 0 0 0 22 -8 -16 0 0 0 0 22 -4 0 2 1 4 0 22 -12 -7.5 0 1 -7.5 0 22 -12 -13.5 0 0 0 0 22 -22 0 10 1 22 0 22 -12 -21 0 0 0 0 22 -7 0 3 1 7 0 22 -10 -8.75 0 0 0 0 22 -2 -1.25 0 0 0 0 22 -9 -6.75 0 0 0 0 22 -12 0 6 1 12 0 22 -28 0 13 0 13 0 22 -9 -10.13 0 0 0 0 22 -2 -0.5 0 1 2 0 22 -25 0 9 1 25 0 22 -6 -7.5 0 0 0 0 22 -4 -3 0 0 0 0 22 -10 -3.75 0 1 10 0 22 -12 -4.5 0 0 0 0 22 -12 -15 0 1 -15 0 22 -6 -3 0 0 0 0 22 -9 -14.63 0 0 0 0 22 -5 -1.25 0 1 -1.25 0 22 -8 -11 0 0 0 0 22 -10 -17.5 0 0 0 0 22 -8 -10 0 0 0 0 22 -9 -9 0 0 0 0 22 -10 -11.25 0 0 0 0 22 -12 -12 0 1 12 0 22 -8 -14 0 0 0 0 22 -12 -16.5 0 0 0 0 22 -4 -7 0 0 0 0 22 -4 -1 0 1 -1 0 22 -5 -1.88 0 1 5 0 22 -8 0 3 1 0 0 22 -2 -3.25 0 0 0 0 22 -5 -5 0 0 0 0 22 -26 0 10 1 0 0 22 -12 -10.5 0 1 12 0 22 -2 0 1 0 1 0 22 -6 -9.75 0 0 0 0 22 -8 -3 0 1 8 0 22 -13 0 5 1 13 0 22 -10 -7.5 0 0 0 0 22 -8 -13 0 0 0 0 22 -9 -3.38 0 1 -3.38 0 22 -8 -15 0 0 0 0 22 -30 0 12 1 0 0 22 -8 -8 0 0 0 0 22 -8 -5 0 0 0 0 22 -12 -18 0 0 0 0 22 -10 -5 0 1 -5 0 22 -9 -11.25 0 0 0 0 22 -9 -7.88 0 0 0 0 22 -8 -6 0 0 0 0 22 -6 -4.5 0 0 0 0 22 -8 -9 0 0 0 0 22 -4 -5.5 0 0 0 0 22 -4 -5 0 0 0 0 22 -9 -2.25 0 1 -2.25 0 22 -23 0 10 1 0 0 22 -9 -5.63 0 0 0 0 22 -4 -4.5 0 0 0 0 22 -4 -8 0 0 0 0 22 -19 0 8 1 19 0 22 -2 -2 0 0 0 0 22 -5 -8.13 0 0 0 0 22 -5 -4.38 0 0 0 0 22 -2 -2.25 0 0 0 0 22 -2 -0.75 0 0 0 0 22 -2 -2.75 0 0 0 0 22 -5 -8.75 0 0 0 0 22 -9 -18 0 0 0 0 22 -4 -3.5 0 1 4 0 22 -4 -2.5 0 0 0 0 22 -9 -6.75 0 1 -6.75 0 23 -6 -6.75 0 0 0 0 23 -6 -3 0 1 6 0 23 -2 -1.5 0 1 2 0 23 -4 -3 0 1 4 0 23 -5 -6.88 0 0 0 0 23 -12 -9 0 1 12 0 23 -4 -5 0 0 0 0 23 -5 -7.5 0 0 0 0 23 -4 -4 0 1 -4 0 23 -9 -5.63 0 1 -5.63 0 23 -9 -14.63 0 0 0 0 23 -5 -9.38 0 0 0 0 23 -6 -4.5 0 1 6 0 23 -8 -7 0 0 0 0 23 -10 -16.25 0 1 10 0 23 -10 -17.5 0 1 -17.5 0 23 -9 -16.88 0 0 0 0 23 -8 -5 0 1 8 0 23 -6 -1.5 0 1 6 0 23 -12 -18 0 0 0 0 23 -5 -6.25 0 0 0 0 23 -8 -4 0 1 8 0 23 -9 -15.75 0 0 0 0 23 -9 -13.5 0 0 0 0 23 -5 -8.13 0 0 0 0 23 -2 0 1 1 0 0 23 -2 -3.75 0 1 -3.75 0 23 -4 -6.5 0 0 0 0 23 -10 -5 0 1 -5 0 23 -12 -22.5 0 0 0 0 23 -2 -1 0 1 2 0 23 -13 0 6 1 13 0 23 -5 -2.5 0 1 5 0 23 -2 -0.5 0 1 2 0 23 -2 -3.25 0 1 -3.25 0 23 -30 0 12 1 0 0 23 -8 -8 0 0 0 0 23 -4 -5.5 0 0 0 0 23 -23 0 10 1 0 0 23 -4 -3.5 0 1 4 0 23 -5 0 2 1 5 0 23 -8 0 3 1 0 0 23 -9 -10.13 0 0 0 0 23 -8 -16 0 0 0 0 23 -12 -24 0 0 0 0 23 -9 -3.38 0 1 -3.38 0 23 -6 -5.25 0 0 0 0 23 -2 -4 0 0 0 0 23 -4 -1 0 1 -1 0 23 -6 -11.25 0 0 0 0 23 -5 -4.38 0 1 -4.38 0 23 -6 -2.25 0 1 6 0 23 -12 -10.5 0 1 12 0 23 -9 -18 0 0 0 0 23 -10 -20 0 0 0 0 23 -4 -4.5 0 1 -4.5 0 23 -9 -2.25 0 1 -2.25 0 23 -4 -6 0 0 0 0 23 -8 -10 0 0 0 0 23 -5 -5 0 1 -5 0 23 -5 -8.75 0 0 0 0 23 -8 -6 0 0 0 0 23 -10 -13.75 0 0 0 0 23 -2 -2.5 0 1 2 0 23 -8 -11 0 0 0 0 23 -4 -2 0 1 4 0 23 -10 -7.5 0 0 0 0 23 -22 0 10 1 22 0 23 -25 0 10 1 0 0 23 -6 -9.75 0 0 0 0 23 -12 0 5 1 12 0 23 -4 -2.5 0 1 -2.5 0 23 -8 -3 0 1 8 0 23 -10 -11.25 0 0 0 0 23 -5 -10 0 0 0 0 23 -10 -15 0 0 0 0 23 -2 -3.5 0 1 -3.5 0 23 -12 0 4 1 12 0 23 -13 0 5 0 5 0 23 -5 -3.75 0 1 5 0 23 -26 0 12 0 12 0 23 -5 -5.63 0 0 0 0 23 -8 -2 0 1 -2 0 23 -2 -3 0 1 -3 0 23 -6 -9 0 0 0 0 23 -9 -7.88 0 0 0 0 23 -8 -14 0 0 0 0 23 -28 0 13 0 13 0 23 -9 -12.38 0 0 0 0 23 -8 -15 0 0 0 0 23 -10 -2.5 0 1 -2.5 0 23 -4 0 2 0 2 0 23 -12 -6 0 1 -6 0 23 -12 -16.5 0 0 0 0 23 -4 -7.5 0 0 0 0 23 -10 -8.75 0 0 0 0 23 -10 -18.75 0 0 0 0 23 -26 0 10 0 10 0 23 -12 -21 0 0 0 0 23 -2 -0.75 0 1 -0.75 0 23 -9 -9 0 0 0 0 23 -10 -6.25 0 0 0 0 23 -8 -12 0 0 0 0 23 -3 0 1 1 0 0 23 -5 -1.88 0 1 5 0 23 -6 -7.5 0 0 0 0 23 -12 -13.5 0 0 0 0 23 -4 -7 0 0 0 0 23 -6 -8.25 0 0 0 0 23 -6 -12 0 0 0 0 23 -6 -10.5 0 0 0 0 23 -4 -8 0 0 0 0 23 -6 -6 0 0 0 0 23 -12 0 6 0 6 0 23 -12 -19.5 0 0 0 0 23 -19 0 8 1 19 0 23 -12 -15 0 0 0 0 23 -2 -1.75 0 1 2 0 23 -6 -3.75 0 1 -3.75 0 23 -2 -1.25 0 1 2 0 23 -5 -1.25 0 1 -1.25 0 23 -4 -1.5 0 1 4 0 23 -8 -13 0 0 0 0 23 -12 -7.5 0 0 0 0 23 -12 -3 0 1 -3 0 23 -2 -2.75 0 1 2 0 23 -7 0 3 0 3 0 23 -25 0 9 0 9 0 23 -2 -2 0 1 2 0 23 -12 -4.5 0 1 -4.5 0 23 -12 -12 0 0 0 0 23 -5 -3.13 0 1 5 0 23 -9 -11.25 0 0 0 0 23 -8 -9 0 0 0 0 23 -2 -2.25 0 1 2 0 23 -9 -4.5 0 1 -4.5 0 23 -10 -3.75 0 1 10 0 23 -10 -10 0 0 0 0 23 -10 -12.5 0 0 0 0 23 -2 -2.5 0 0 0 0 24 -5 -5.63 0 1 -5.63 0 24 -6 -7.5 0 1 -7.5 0 24 -26 0 10 1 0 0 24 -9 -4.5 0 1 -4.5 0 24 -2 -1.25 0 1 2 0 24 -8 -3 0 1 8 0 24 -25 0 9 1 25 0 24 -4 -4.5 0 1 -4.5 0 24 -5 -10 0 0 0 0 24 -6 -9 0 0 0 0 24 -10 -6.25 0 1 10 0 24 -4 -4 0 0 0 0 24 -12 -3 0 1 -3 0 24 -5 -5 0 1 -5 0 24 -12 0 5 1 12 0 24 -6 -9.75 0 0 0 0 24 -19 0 8 1 19 0 24 -4 -7.5 0 0 0 0 24 -12 -9 0 1 12 0 24 -4 -6.5 0 0 0 0 24 -9 -5.63 0 1 -5.63 0 24 -9 -18 0 0 0 0 24 -10 -11.25 0 1 -11.25 0 24 -10 -13.75 0 0 0 0 24 -6 -12 0 0 0 0 24 -10 -12.5 0 1 -12.5 0 24 -4 -7 0 0 0 0 24 -10 -7.5 0 1 -7.5 0 24 -4 -8 0 0 0 0 24 -8 -11 0 0 0 0 24 -12 0 4 1 12 0 24 -9 -3.38 0 1 -3.38 0 24 -10 -18.75 0 0 0 0 24 -2 -3.5 0 0 0 0 24 -2 -1 0 1 2 0 24 -2 -3.25 0 0 0 0 24 -2 0 1 1 0 0 24 -7 0 3 1 7 0 24 -8 0 3 1 0 0 24 -12 -6 0 1 -6 0 24 -2 -0.5 0 1 2 0 24 -9 -7.88 0 1 -7.88 0 24 -8 -15 0 0 0 0 24 -2 -1.5 0 1 2 0 24 -12 -22.5 0 0 0 0 24 -8 -7 0 1 -7 0 24 -4 -5.5 0 0 0 0 24 -10 -8.75 0 1 -8.75 0 24 -8 -9 0 0 0 0 24 -2 -4 0 0 0 0 24 -4 0 2 1 4 0 24 -8 -8 0 1 8 0 24 -9 -13.5 0 0 0 0 24 -9 -9 0 1 -9 0 24 -6 -3.75 0 1 -3.75 0 24 -13 0 6 1 13 0 24 -5 -1.88 0 1 5 0 24 -6 -6 0 1 -6 0 24 -5 -6.88 0 0 0 0 24 -8 -16 0 0 0 0 24 -12 -7.5 0 1 -7.5 0 24 -5 -1.25 0 1 -1.25 0 24 -9 -14.63 0 0 0 0 24 -8 -4 0 1 8 0 24 -10 -17.5 0 0 0 0 24 -5 -3.75 0 1 5 0 24 -6 -10.5 0 0 0 0 24 -13 0 5 1 13 0 24 -10 -16.25 0 0 0 0 24 -5 -7.5 0 0 0 0 24 -2 -1.75 0 1 2 0 24 -5 -9.38 0 0 0 0 24 -2 -2.75 0 0 0 0 24 -2 -0.75 0 1 -0.75 0 24 -5 -8.13 0 0 0 0 24 -9 -11.25 0 0 0 0 24 -8 -13 0 0 0 0 24 -9 -16.88 0 0 0 0 24 -2 -2 0 1 2 0 24 -12 -18 0 0 0 0 24 -8 -2 0 1 -2 0 24 -2 -3 0 0 0 0 24 -6 -4.5 0 1 6 0 24 -5 0 2 1 5 0 24 -12 -19.5 0 0 0 0 24 -9 -15.75 0 0 0 0 24 -8 -6 0 1 -6 0 24 -10 -2.5 0 1 -2.5 0 24 -9 -6.75 0 1 -6.75 0 24 -6 -6.75 0 0 0 0 24 -2 -3.75 0 0 0 0 24 -10 -5 0 1 -5 0 24 -2 -2.25 0 0 0 0 24 -26 0 12 1 26 0 24 -12 -13.5 0 0 0 0 24 -8 -5 0 1 8 0 24 -6 -3 0 1 6 0 24 -10 -3.75 0 1 10 0 24 -12 -10.5 0 1 12 0 24 -4 -5 0 1 4 0 24 -9 -2.25 0 1 -2.25 0 24 -4 -3 0 1 4 0 24 -9 -10.13 0 0 0 0 24 -28 0 13 1 28 0 24 -22 0 10 1 22 0 24 -10 -10 0 1 10 0 24 -4 -1 0 1 -1 0 24 -4 -2.5 0 1 -2.5 0 24 -12 -24 0 0 0 0 24 -8 -12 0 0 0 0 24 -3 0 1 1 0 0 24 -9 -12.38 0 0 0 0 24 -23 0 10 1 0 0 24 -4 -3.5 0 1 4 0 24 -4 -1.5 0 1 4 0 24 -8 -10 0 0 0 0 24 -8 -14 0 0 0 0 24 -4 -6 0 0 0 0 24 -25 0 10 1 0 0 24 -12 -16.5 0 0 0 0 24 -12 -12 0 1 12 0 24 -5 -2.5 0 1 5 0 24 -5 -8.75 0 0 0 0 24 -12 -4.5 0 1 -4.5 0 24 -12 -15 0 0 0 0 24 -5 -3.13 0 1 5 0 24 -12 -21 0 0 0 0 24 -5 -4.38 0 1 -4.38 0 24 -6 -11.25 0 0 0 0 24 -30 0 12 1 0 0 24 -6 -1.5 0 1 6 0 24 -12 0 6 1 12 0 24 -4 -2 0 1 4 0 24 -10 -15 0 0 0 0 24 -6 -2.25 0 1 6 0 24 -10 -20 0 0 0 0 24 -6 -5.25 0 1 6 0 24 -5 -6.25 0 1 5 0 24 -6 -8.25 0 0 0 0 24 -4 -4.5 0 0 0 0 25 -10 -12.5 0 1 -12.5 0 25 -26 0 12 1 26 0 25 -6 -7.5 0 0 0 0 25 -4 -6.5 0 0 0 0 25 -12 -4.5 0 1 -4.5 0 25 -5 -2.5 0 1 5 0 25 -6 -12 0 0 0 0 25 -9 -14.63 0 0 0 0 25 -6 -6 0 1 -6 0 25 -22 0 10 1 22 0 25 -2 -1 0 1 2 0 25 -8 -3 0 1 8 0 25 -12 -9 0 0 0 0 25 -5 -3.75 0 1 5 0 25 -6 -3 0 1 6 0 25 -4 0 2 1 4 0 25 -28 0 13 1 28 0 25 -12 -15 0 0 0 0 25 -9 -11.25 0 0 0 0 25 -12 -10.5 0 0 0 0 25 -5 -1.88 0 1 5 0 25 -2 -2.75 0 1 2 0 25 -4 -7 0 0 0 0 25 -8 -4 0 1 8 0 25 -2 0 1 1 0 0 25 -2 -3.5 0 1 -3.5 0 25 -2 -1.75 0 1 2 0 25 -5 -5 0 1 -5 0 25 -12 -12 0 0 0 0 25 -12 0 6 1 12 0 25 -6 -4.5 0 1 6 0 25 -30 0 12 1 0 0 25 -12 -16.5 0 0 0 0 25 -6 -9.75 0 0 0 0 25 -12 -22.5 0 0 0 0 25 -6 -9 0 0 0 0 25 -5 -3.13 0 1 5 0 25 -5 -9.38 0 1 5 0 25 -12 -7.5 0 1 -7.5 0 25 -5 0 2 1 5 0 25 -10 -15 0 0 0 0 25 -12 -3 0 1 -3 0 25 -13 0 6 1 13 0 25 -9 -16.88 0 0 0 0 25 -6 -11.25 0 0 0 0 25 -8 -5 0 1 8 0 25 -8 -14 0 0 0 0 25 -12 -24 0 0 0 0 25 -12 0 5 1 12 0 25 -9 -13.5 0 0 0 0 25 -6 -1.5 0 1 6 0 25 -2 -3 0 1 -3 0 25 -10 -2.5 0 1 -2.5 0 25 -2 -0.75 0 1 -0.75 0 25 -6 -10.5 0 0 0 0 25 -2 -0.5 0 1 2 0 25 -10 -10 0 1 10 0 25 -8 -10 0 0 0 0 25 -9 -12.38 0 0 0 0 25 -4 -6 0 0 0 0 25 -6 -2.25 0 1 6 0 25 -9 -15.75 0 0 0 0 25 -12 -13.5 0 0 0 0 25 -8 -6 0 1 -6 0 25 -10 -18.75 0 0 0 0 25 -4 -2 0 1 4 0 25 -5 -1.25 0 1 -1.25 0 25 -6 -5.25 0 1 6 0 25 -4 -8 0 0 0 0 25 -25 0 9 1 25 0 25 -2 -3.25 0 1 -3.25 0 25 -10 -11.25 0 1 -11.25 0 25 -4 -7.5 0 1 -7.5 0 25 -9 -5.63 0 1 -5.63 0 25 -6 -6.75 0 1 -6.75 0 25 -8 -2 0 1 -2 0 25 -5 -6.25 0 1 5 0 25 -23 0 10 1 0 0 25 -8 -13 0 0 0 0 25 -10 -13.75 0 0 0 0 25 -5 -10 0 0 0 0 25 -12 0 4 1 12 0 25 -2 -2.5 0 1 2 0 25 -19 0 8 1 19 0 25 -4 -4 0 1 -4 0 25 -4 -1 0 1 -1 0 25 -4 -2.5 0 1 -2.5 0 25 -5 -8.13 0 1 5 0 25 -10 -3.75 0 1 10 0 25 -5 -8.75 0 0 0 0 25 -10 -7.5 0 1 -7.5 0 25 -10 -5 0 1 -5 0 25 -10 -20 0 0 0 0 25 -13 0 5 1 13 0 25 -8 -9 0 0 0 0 25 -8 -12 0 0 0 0 25 -10 -16.25 0 0 0 0 25 -5 -6.88 0 0 0 0 25 -4 -5.5 0 1 -5.5 0 25 -5 -7.5 0 0 0 0 25 -9 -10.13 0 0 0 0 25 -6 -8.25 0 0 0 0 25 -26 0 10 1 0 0 25 -4 -5 0 1 4 0 25 -2 -2.25 0 1 2 0 25 -6 -3.75 0 1 -3.75 0 25 -8 -8 0 1 8 0 25 -9 -6.75 0 1 -6.75 0 25 -8 -15 0 0 0 0 25 -12 -6 0 1 -6 0 25 -25 0 10 0 10 0 25 -12 -19.5 0 0 0 0 25 -9 -7.88 0 1 -7.88 0 25 -4 -1.5 0 1 4 0 25 -8 -7 0 1 -7 0 25 -12 -18 0 0 0 0 25 -2 -2 0 1 2 0 25 -9 -18 0 0 0 0 25 -2 -1.25 0 1 2 0 25 -8 -16 0 0 0 0 25 -5 -4.38 0 1 -4.38 0 25 -2 -4 0 0 0 0 25 -5 -5.63 0 1 -5.63 0 25 -8 0 3 0 3 0 25 -10 -17.5 0 0 0 0 25 -8 -11 0 0 0 0 25 -2 -1.5 0 1 2 0 25 -4 -3.5 0 1 4 0 25 -2 -3.75 0 1 -3.75 0 25 -3 0 1 0 1 0 25 -12 -21 0 0 0 0 25 -10 -8.75 0 1 -8.75 0 25 -9 -9 0 1 -9 0 25 -4 -3 0 1 4 0 25 -7 0 3 1 7 0 25 -9 -3.38 0 1 -3.38 0 25 -9 -2.25 0 1 -2.25 0 25 -10 -6.25 0 1 10 0 25 -9 -4.5 0 1 -4.5 0 25 -2 -1 0 1 2 0 26 -9 -13.5 0 0 0 0 26 -5 -6.88 0 0 0 0 26 -10 -10 0 1 10 0 26 -6 -2.25 0 1 6 0 26 -6 -6.75 0 0 0 0 26 -9 -4.5 0 1 -4.5 0 26 -10 -13.75 0 0 0 0 26 -6 -8.25 0 0 0 0 26 -5 -10 0 1 5 0 26 -10 -6.25 0 1 10 0 26 -12 -3 0 1 -3 0 26 -12 -9 0 0 0 0 26 -8 -7 0 0 0 0 26 -6 -12 0 0 0 0 26 -8 -2 0 1 -2 0 26 -12 -6 0 1 -6 0 26 -3 0 1 0 1 0 26 -10 -20 0 0 0 0 26 -5 -3.75 0 1 5 0 26 -2 -1.75 0 1 2 0 26 -6 -3.75 0 0 0 0 26 -9 -12.38 0 0 0 0 26 -5 -6.25 0 0 0 0 26 -12 0 4 0 4 0 26 -2 -1.5 0 1 2 0 26 -6 -5.25 0 0 0 0 26 -10 -18.75 0 0 0 0 26 -6 -6 0 0 0 0 26 -12 0 5 1 12 0 26 -4 -2 0 1 4 0 26 -2 -4 0 0 0 0 26 -5 -2.5 0 1 5 0 26 -2 -3.75 0 0 0 0 26 -9 -15.75 0 0 0 0 26 -8 -4 0 1 8 0 26 -26 0 12 0 12 0 26 -6 -1.5 0 1 6 0 26 -4 -6 0 0 0 0 26 -10 -2.5 0 1 -2.5 0 26 -8 -12 0 0 0 0 26 -2 -3.5 0 0 0 0 26 -5 -5.63 0 1 -5.63 0 26 -12 -24 0 0 0 0 26 -25 0 10 1 0 0 26 -4 -6.5 0 0 0 0 26 -5 -9.38 0 0 0 0 26 -5 -7.5 0 1 -7.5 0 26 -4 -4 0 1 -4 0 26 -6 -10.5 0 0 0 0 26 -13 0 6 0 6 0 26 -12 -22.5 0 0 0 0 26 -4 -7.5 0 0 0 0 26 -5 0 2 1 5 0 26 -10 -15 0 0 0 0 26 -9 -16.88 0 0 0 0 26 -2 -2.5 0 0 0 0 26 -10 -16.25 0 0 0 0 26 -6 -11.25 0 0 0 0 26 -4 -1.5 0 1 4 0 26 -5 -3.13 0 1 5 0 26 -6 -9 0 0 0 0 26 -12 -19.5 0 0 0 0 26 -10 -12.5 0 0 0 0 26 -2 -3 0 0 0 0 26 -8 -16 0 0 0 0 26 -4 0 2 1 4 0 26 -12 -7.5 0 1 -7.5 0 26 -12 -13.5 0 0 0 0 26 -22 0 10 1 22 0 26 -12 -21 0 0 0 0 26 -7 0 3 0 3 0 26 -10 -8.75 0 0 0 0 26 -2 -1.25 0 1 2 0 26 -9 -6.75 0 0 0 0 26 -12 0 6 0 6 0 26 -28 0 13 1 28 0 26 -9 -10.13 0 0 0 0 26 -2 -0.5 0 1 2 0 26 -25 0 9 0 9 0 26 -6 -7.5 0 0 0 0 26 -4 -3 0 1 4 0 26 -10 -3.75 0 0 0 0 26 -12 -4.5 0 1 -4.5 0 26 -12 -15 0 0 0 0 26 -6 -3 0 1 6 0 26 -9 -14.63 0 0 0 0 26 -5 -1.25 0 1 -1.25 0 26 -8 -11 0 0 0 0 26 -10 -17.5 0 0 0 0 26 -8 -10 0 0 0 0 26 -9 -9 0 0 0 0 26 -10 -11.25 0 0 0 0 26 -12 -12 0 0 0 0 26 -8 -14 0 0 0 0 26 -12 -16.5 0 0 0 0 26 -4 -7 0 0 0 0 26 -4 -1 0 1 -1 0 26 -5 -1.88 0 1 5 0 26 -8 0 3 1 0 0 26 -2 -3.25 0 0 0 0 26 -5 -5 0 0 0 0 26 -26 0 10 0 10 0 26 -12 -10.5 0 0 0 0 26 -2 0 1 0 1 0 26 -6 -9.75 0 0 0 0 26 -8 -3 0 0 0 0 26 -13 0 5 0 5 0 26 -10 -7.5 0 0 0 0 26 -8 -13 0 0 0 0 26 -9 -3.38 0 1 -3.38 0 26 -8 -15 0 0 0 0 26 -30 0 12 1 0 0 26 -8 -8 0 0 0 0 26 -8 -5 0 1 8 0 26 -12 -18 0 0 0 0 26 -10 -5 0 1 -5 0 26 -9 -11.25 0 0 0 0 26 -9 -7.88 0 0 0 0 26 -8 -6 0 0 0 0 26 -6 -4.5 0 1 6 0 26 -8 -9 0 0 0 0 26 -4 -5.5 0 0 0 0 26 -4 -5 0 0 0 0 26 -9 -2.25 0 1 -2.25 0 26 -23 0 10 1 0 0 26 -9 -5.63 0 1 -5.63 0 26 -4 -4.5 0 0 0 0 26 -4 -8 0 1 4 0 26 -19 0 8 1 19 0 26 -2 -2 0 0 0 0 26 -5 -8.13 0 0 0 0 26 -5 -4.38 0 0 0 0 26 -2 -2.25 0 0 0 0 26 -2 -0.75 0 1 -0.75 0 26 -2 -2.75 0 0 0 0 26 -5 -8.75 0 0 0 0 26 -9 -18 0 0 0 0 26 -4 -3.5 0 0 0 0 26 -4 -2.5 0 1 -2.5 0 26 -9 -6.75 0 1 -6.75 0 27 -6 -6.75 0 1 -6.75 0 27 -6 -3 0 1 6 0 27 -2 -1.5 0 1 2 0 27 -4 -3 0 0 0 0 27 -5 -6.88 0 1 5 0 27 -12 -9 0 1 12 0 27 -4 -5 0 0 0 0 27 -5 -7.5 0 1 -7.5 0 27 -4 -4 0 1 -4 0 27 -9 -5.63 0 1 -5.63 0 27 -9 -14.63 0 1 9 0 27 -5 -9.38 0 0 0 0 27 -6 -4.5 0 1 6 0 27 -8 -7 0 1 -7 0 27 -10 -16.25 0 0 0 0 27 -10 -17.5 0 1 -17.5 0 27 -9 -16.88 0 0 0 0 27 -8 -5 0 1 8 0 27 -6 -1.5 0 1 6 0 27 -12 -18 0 1 -18 0 27 -5 -6.25 0 1 5 0 27 -8 -4 0 1 8 0 27 -9 -15.75 0 1 -15.75 0 27 -9 -13.5 0 0 0 0 27 -5 -8.13 0 1 5 0 27 -2 0 1 1 0 0 27 -2 -3.75 0 0 0 0 27 -4 -6.5 0 1 4 0 27 -10 -5 0 1 -5 0 27 -12 -22.5 0 0 0 0 27 -2 -1 0 1 2 0 27 -13 0 6 1 13 0 27 -5 -2.5 0 1 5 0 27 -2 -0.5 0 1 2 0 27 -2 -3.25 0 1 -3.25 0 27 -30 0 12 1 0 0 27 -8 -8 0 1 8 0 27 -4 -5.5 0 0 0 0 27 -23 0 10 1 0 0 27 -4 -3.5 0 0 0 0 27 -5 0 2 1 5 0 27 -8 0 3 0 3 0 27 -9 -10.13 0 1 -10.13 0 27 -8 -16 0 1 -16 0 27 -12 -24 0 1 -24 0 27 -9 -3.38 0 1 -3.38 0 27 -6 -5.25 0 1 6 0 27 -2 -4 0 1 2 0 27 -4 -1 0 1 -1 0 27 -6 -11.25 0 0 0 0 27 -5 -4.38 0 1 -4.38 0 27 -6 -2.25 0 1 6 0 27 -12 -10.5 0 1 12 0 27 -9 -18 0 1 -18 0 27 -10 -20 0 0 0 0 27 -4 -4.5 0 1 -4.5 0 27 -9 -2.25 0 1 -2.25 0 27 -4 -6 0 1 4 0 27 -8 -10 0 1 -10 0 27 -5 -5 0 1 -5 0 27 -5 -8.75 0 0 0 0 27 -8 -6 0 1 -6 0 27 -10 -13.75 0 0 0 0 27 -2 -2.5 0 1 2 0 27 -8 -11 0 1 -11 0 27 -4 -2 0 1 4 0 27 -10 -7.5 0 1 -7.5 0 27 -22 0 10 1 22 0 27 -25 0 10 1 0 0 27 -6 -9.75 0 1 6 0 27 -12 0 5 1 12 0 27 -4 -2.5 0 1 -2.5 0 27 -8 -3 0 1 8 0 27 -10 -11.25 0 1 -11.25 0 27 -5 -10 0 1 5 0 27 -10 -15 0 0 0 0 27 -2 -3.5 0 0 0 0 27 -12 0 4 0 4 0 27 -13 0 5 1 13 0 27 -5 -3.75 0 1 5 0 27 -26 0 12 1 26 0 27 -5 -5.63 0 1 -5.63 0 27 -8 -2 0 1 -2 0 27 -2 -3 0 1 -3 0 27 -6 -9 0 1 -9 0 27 -9 -7.88 0 1 -7.88 0 27 -8 -14 0 0 0 0 27 -28 0 13 1 28 0 27 -9 -12.38 0 0 0 0 27 -8 -15 0 0 0 0 27 -10 -2.5 0 1 -2.5 0 27 -4 0 2 0 2 0 27 -12 -6 0 1 -6 0 27 -12 -16.5 0 1 -16.5 0 27 -4 -7.5 0 1 -7.5 0 27 -10 -8.75 0 1 -8.75 0 27 -10 -18.75 0 0 0 0 27 -26 0 10 1 0 0 27 -12 -21 0 1 12 0 27 -2 -0.75 0 1 -0.75 0 27 -9 -9 0 1 -9 0 27 -10 -6.25 0 1 10 0 27 -8 -12 0 1 -12 0 27 -3 0 1 1 0 0 27 -5 -1.88 0 1 5 0 27 -6 -7.5 0 1 -7.5 0 27 -12 -13.5 0 1 12 0 27 -4 -7 0 0 0 0 27 -6 -8.25 0 1 -8.25 0 27 -6 -12 0 0 0 0 27 -6 -10.5 0 0 0 0 27 -4 -8 0 1 4 0 27 -6 -6 0 1 -6 0 27 -12 0 6 0 6 0 27 -12 -19.5 0 0 0 0 27 -19 0 8 1 19 0 27 -12 -15 0 0 0 0 27 -2 -1.75 0 1 2 0 27 -6 -3.75 0 1 -3.75 0 27 -2 -1.25 0 1 2 0 27 -5 -1.25 0 1 -1.25 0 27 -4 -1.5 0 1 4 0 27 -8 -13 0 0 0 0 27 -12 -7.5 0 1 -7.5 0 27 -12 -3 0 1 -3 0 27 -2 -2.75 0 0 0 0 27 -7 0 3 1 7 0 27 -25 0 9 1 25 0 27 -2 -2 0 0 0 0 27 -12 -4.5 0 1 -4.5 0 27 -12 -12 0 1 12 0 27 -5 -3.13 0 1 5 0 27 -9 -11.25 0 1 9 0 27 -8 -9 0 0 0 0 27 -2 -2.25 0 1 2 0 27 -9 -4.5 0 1 -4.5 0 27 -10 -3.75 0 1 10 0 27 -10 -10 0 1 10 0 27 -10 -12.5 0 0 0 0 27 -2 -2.5 0 0 0 0 28 -5 -5.63 0 0 0 0 28 -6 -7.5 0 0 0 0 28 -26 0 10 1 0 0 28 -9 -4.5 0 0 0 0 28 -2 -1.25 0 0 0 0 28 -8 -3 0 1 8 0 28 -25 0 9 0 9 0 28 -4 -4.5 0 0 0 0 28 -5 -10 0 0 0 0 28 -6 -9 0 0 0 0 28 -10 -6.25 0 1 10 0 28 -4 -4 0 1 -4 0 28 -12 -3 0 1 -3 0 28 -5 -5 0 0 0 0 28 -12 0 5 0 5 0 28 -6 -9.75 0 0 0 0 28 -19 0 8 0 8 0 28 -4 -7.5 0 0 0 0 28 -12 -9 0 0 0 0 28 -4 -6.5 0 0 0 0 28 -9 -5.63 0 0 0 0 28 -9 -18 0 0 0 0 28 -10 -11.25 0 0 0 0 28 -10 -13.75 0 0 0 0 28 -6 -12 0 0 0 0 28 -10 -12.5 0 0 0 0 28 -4 -7 0 0 0 0 28 -10 -7.5 0 1 -7.5 0 28 -4 -8 0 0 0 0 28 -8 -11 0 0 0 0 28 -12 0 4 0 4 0 28 -9 -3.38 0 1 -3.38 0 28 -10 -18.75 0 0 0 0 28 -2 -3.5 0 0 0 0 28 -2 -1 0 0 0 0 28 -2 -3.25 0 0 0 0 28 -2 0 1 0 1 0 28 -7 0 3 0 3 0 28 -8 0 3 0 3 0 28 -12 -6 0 1 -6 0 28 -2 -0.5 0 1 2 0 28 -9 -7.88 0 0 0 0 28 -8 -15 0 0 0 0 28 -2 -1.5 0 0 0 0 28 -12 -22.5 0 0 0 0 28 -8 -7 0 0 0 0 28 -4 -5.5 0 0 0 0 28 -10 -8.75 0 0 0 0 28 -8 -9 0 0 0 0 28 -2 -4 0 0 0 0 28 -4 0 2 1 4 0 28 -8 -8 0 0 0 0 28 -9 -13.5 0 0 0 0 28 -9 -9 0 0 0 0 28 -6 -3.75 0 0 0 0 28 -13 0 6 0 6 0 28 -5 -1.88 0 1 5 0 28 -6 -6 0 0 0 0 28 -5 -6.88 0 0 0 0 28 -8 -16 0 0 0 0 28 -12 -7.5 0 0 0 0 28 -5 -1.25 0 0 0 0 28 -9 -14.63 0 0 0 0 28 -8 -4 0 0 0 0 28 -10 -17.5 0 0 0 0 28 -5 -3.75 0 0 0 0 28 -6 -10.5 0 0 0 0 28 -13 0 5 0 5 0 28 -10 -16.25 0 0 0 0 28 -5 -7.5 0 0 0 0 28 -2 -1.75 0 0 0 0 28 -5 -9.38 0 0 0 0 28 -2 -2.75 0 0 0 0 28 -2 -0.75 0 1 -0.75 0 28 -5 -8.13 0 0 0 0 28 -9 -11.25 0 0 0 0 28 -8 -13 0 0 0 0 28 -9 -16.88 0 0 0 0 28 -2 -2 0 0 0 0 28 -12 -18 0 0 0 0 28 -8 -2 0 1 -2 0 28 -2 -3 0 0 0 0 28 -6 -4.5 0 0 0 0 28 -5 0 2 0 2 0 28 -12 -19.5 0 0 0 0 28 -9 -15.75 0 0 0 0 28 -8 -6 0 0 0 0 28 -10 -2.5 0 1 -2.5 0 28 -9 -6.75 0 0 0 0 28 -6 -6.75 0 0 0 0 28 -2 -3.75 0 0 0 0 28 -10 -5 0 0 0 0 28 -2 -2.25 0 0 0 0 28 -26 0 12 0 12 0 28 -12 -13.5 0 0 0 0 28 -8 -5 0 0 0 0 28 -6 -3 0 0 0 0 28 -10 -3.75 0 1 10 0 28 -12 -10.5 0 0 0 0 28 -4 -5 0 0 0 0 28 -9 -2.25 0 1 -2.25 0 28 -4 -3 0 0 0 0 28 -9 -10.13 0 0 0 0 28 -28 0 13 0 13 0 28 -22 0 10 0 10 0 28 -10 -10 0 0 0 0 28 -4 -1 0 1 -1 0 28 -4 -2.5 0 0 0 0 28 -12 -24 0 0 0 0 28 -8 -12 0 0 0 0 28 -3 0 1 1 0 0 28 -9 -12.38 0 0 0 0 28 -23 0 10 0 10 0 28 -4 -3.5 0 0 0 0 28 -4 -1.5 0 1 4 0 28 -8 -10 0 0 0 0 28 -8 -14 0 0 0 0 28 -4 -6 0 0 0 0 28 -25 0 10 1 0 0 28 -12 -16.5 0 0 0 0 28 -12 -12 0 0 0 0 28 -5 -2.5 0 0 0 0 28 -5 -8.75 0 0 0 0 28 -12 -4.5 0 0 0 0 28 -12 -15 0 0 0 0 28 -5 -3.13 0 0 0 0 28 -12 -21 0 0 0 0 28 -5 -4.38 0 0 0 0 28 -6 -11.25 0 0 0 0 28 -30 0 12 0 12 0 28 -6 -1.5 0 1 6 0 28 -12 0 6 0 6 0 28 -4 -2 0 0 0 0 28 -10 -15 0 0 0 0 28 -6 -2.25 0 0 0 0 28 -10 -20 0 0 0 0 28 -6 -5.25 0 0 0 0 28 -5 -6.25 0 0 0 0 28 -6 -8.25 0 0 0 0 28 -4 -4.5 0 1 -4.5 0 29 -10 -12.5 0 1 -12.5 0 29 -26 0 12 1 26 0 29 -6 -7.5 0 0 0 0 29 -4 -6.5 0 0 0 0 29 -12 -4.5 0 1 -4.5 0 29 -5 -2.5 0 1 5 0 29 -6 -12 0 0 0 0 29 -9 -14.63 0 1 9 0 29 -6 -6 0 0 0 0 29 -22 0 10 1 22 0 29 -2 -1 0 1 2 0 29 -8 -3 0 0 0 0 29 -12 -9 0 0 0 0 29 -5 -3.75 0 1 5 0 29 -6 -3 0 0 0 0 29 -4 0 2 0 2 0 29 -28 0 13 0 13 0 29 -12 -15 0 0 0 0 29 -9 -11.25 0 0 0 0 29 -12 -10.5 0 0 0 0 29 -5 -1.88 0 1 5 0 29 -2 -2.75 0 0 0 0 29 -4 -7 0 0 0 0 29 -8 -4 0 1 8 0 29 -2 0 1 0 1 0 29 -2 -3.5 0 0 0 0 29 -2 -1.75 0 1 2 0 29 -5 -5 0 0 0 0 29 -12 -12 0 0 0 0 29 -12 0 6 1 12 0 29 -6 -4.5 0 0 0 0 29 -30 0 12 1 0 0 29 -12 -16.5 0 0 0 0 29 -6 -9.75 0 0 0 0 29 -12 -22.5 0 0 0 0 29 -6 -9 0 0 0 0 29 -5 -3.13 0 1 5 0 29 -5 -9.38 0 0 0 0 29 -12 -7.5 0 1 -7.5 0 29 -5 0 2 0 2 0 29 -10 -15 0 0 0 0 29 -12 -3 0 1 -3 0 29 -13 0 6 1 13 0 29 -9 -16.88 0 0 0 0 29 -6 -11.25 0 0 0 0 29 -8 -5 0 0 0 0 29 -8 -14 0 0 0 0 29 -12 -24 0 0 0 0 29 -12 0 5 0 5 0 29 -9 -13.5 0 0 0 0 29 -6 -1.5 0 1 6 0 29 -2 -3 0 0 0 0 29 -10 -2.5 0 1 -2.5 0 29 -2 -0.75 0 1 -0.75 0 29 -6 -10.5 0 0 0 0 29 -2 -0.5 0 1 2 0 29 -10 -10 0 0 0 0 29 -8 -10 0 1 -10 0 29 -9 -12.38 0 0 0 0 29 -4 -6 0 0 0 0 29 -6 -2.25 0 1 6 0 29 -9 -15.75 0 0 0 0 29 -12 -13.5 0 0 0 0 29 -8 -6 0 0 0 0 29 -10 -18.75 0 0 0 0 29 -4 -2 0 1 4 0 29 -5 -1.25 0 1 -1.25 0 29 -6 -5.25 0 1 6 0 29 -4 -8 0 0 0 0 29 -25 0 9 0 9 0 29 -2 -3.25 0 1 -3.25 0 29 -10 -11.25 0 1 -11.25 0 29 -4 -7.5 0 1 -7.5 0 29 -9 -5.63 0 1 -5.63 0 29 -6 -6.75 0 1 -6.75 0 29 -8 -2 0 1 -2 0 29 -5 -6.25 0 0 0 0 29 -23 0 10 1 0 0 29 -8 -13 0 0 0 0 29 -10 -13.75 0 0 0 0 29 -5 -10 0 0 0 0 29 -12 0 4 0 4 0 29 -2 -2.5 0 0 0 0 29 -19 0 8 0 8 0 29 -4 -4 0 0 0 0 29 -4 -1 0 1 -1 0 29 -4 -2.5 0 1 -2.5 0 29 -5 -8.13 0 0 0 0 29 -10 -3.75 0 1 10 0 29 -5 -8.75 0 0 0 0 29 -10 -7.5 0 0 0 0 29 -10 -5 0 1 -5 0 29 -10 -20 0 0 0 0 29 -13 0 5 0 5 0 29 -8 -9 0 0 0 0 29 -8 -12 0 0 0 0 29 -10 -16.25 0 0 0 0 29 -5 -6.88 0 1 5 0 29 -4 -5.5 0 0 0 0 29 -5 -7.5 0 0 0 0 29 -9 -10.13 0 0 0 0 29 -6 -8.25 0 0 0 0 29 -26 0 10 0 10 0 29 -4 -5 0 1 4 0 29 -2 -2.25 0 0 0 0 29 -6 -3.75 0 0 0 0 29 -8 -8 0 0 0 0 29 -9 -6.75 0 0 0 0 29 -8 -15 0 0 0 0 29 -12 -6 0 1 -6 0 29 -25 0 10 0 10 0 29 -12 -19.5 0 0 0 0 29 -9 -7.88 0 0 0 0 29 -4 -1.5 0 1 4 0 29 -8 -7 0 0 0 0 29 -12 -18 0 0 0 0 29 -2 -2 0 0 0 0 29 -9 -18 0 0 0 0 29 -2 -1.25 0 1 2 0 29 -8 -16 0 0 0 0 29 -5 -4.38 0 1 -4.38 0 29 -2 -4 0 0 0 0 29 -5 -5.63 0 0 0 0 29 -8 0 3 0 3 0 29 -10 -17.5 0 0 0 0 29 -8 -11 0 0 0 0 29 -2 -1.5 0 0 0 0 29 -4 -3.5 0 0 0 0 29 -2 -3.75 0 0 0 0 29 -3 0 1 0 1 0 29 -12 -21 0 0 0 0 29 -10 -8.75 0 0 0 0 29 -9 -9 0 0 0 0 29 -4 -3 0 0 0 0 29 -7 0 3 0 3 0 29 -9 -3.38 0 0 0 0 29 -9 -2.25 0 1 -2.25 0 29 -10 -6.25 0 1 10 0 29 -9 -4.5 0 0 0 0 29 -2 -1 0 0 0 0 30 -9 -13.5 0 0 0 0 30 -5 -6.88 0 1 5 0 30 -10 -10 0 0 0 0 30 -6 -2.25 0 1 6 0 30 -6 -6.75 0 0 0 0 30 -9 -4.5 0 1 -4.5 0 30 -10 -13.75 0 0 0 0 30 -6 -8.25 0 0 0 0 30 -5 -10 0 0 0 0 30 -10 -6.25 0 1 10 0 30 -12 -3 0 1 -3 0 30 -12 -9 0 0 0 0 30 -8 -7 0 1 -7 0 30 -6 -12 0 0 0 0 30 -8 -2 0 1 -2 0 30 -12 -6 0 0 0 0 30 -3 0 1 0 1 0 30 -10 -20 0 0 0 0 30 -5 -3.75 0 1 5 0 30 -2 -1.75 0 0 0 0 30 -6 -3.75 0 1 -3.75 0 30 -9 -12.38 0 0 0 0 30 -5 -6.25 0 0 0 0 30 -12 0 4 0 4 0 30 -2 -1.5 0 0 0 0 30 -6 -5.25 0 0 0 0 30 -10 -18.75 0 0 0 0 30 -6 -6 0 0 0 0 30 -12 0 5 0 5 0 30 -4 -2 0 0 0 0 30 -2 -4 0 0 0 0 30 -5 -2.5 0 1 5 0 30 -2 -3.75 0 0 0 0 30 -9 -15.75 0 0 0 0 30 -8 -4 0 1 8 0 30 -26 0 12 1 26 0 30 -6 -1.5 0 0 0 0 30 -4 -6 0 0 0 0 30 -10 -2.5 0 1 -2.5 0 30 -8 -12 0 0 0 0 30 -2 -3.5 0 0 0 0 30 -5 -5.63 0 0 0 0 30 -12 -24 0 0 0 0 30 -25 0 10 1 0 0 30 -4 -6.5 0 0 0 0 30 -5 -9.38 0 0 0 0 30 -5 -7.5 0 0 0 0 30 -4 -4 0 0 0 0 30 -6 -10.5 0 0 0 0 30 -13 0 6 1 13 0 30 -12 -22.5 0 0 0 0 30 -4 -7.5 0 0 0 0 30 -5 0 2 0 2 0 30 -10 -15 0 0 0 0 30 -9 -16.88 0 0 0 0 30 -2 -2.5 0 0 0 0 30 -10 -16.25 0 0 0 0 30 -6 -11.25 0 0 0 0 30 -4 -1.5 0 1 4 0 30 -5 -3.13 0 0 0 0 30 -6 -9 0 0 0 0 30 -12 -19.5 0 0 0 0 30 -10 -12.5 0 0 0 0 30 -2 -3 0 0 0 0 30 -8 -16 0 0 0 0 30 -4 0 2 0 2 0 30 -12 -7.5 0 0 0 0 30 -12 -13.5 0 0 0 0 30 -22 0 10 0 10 0 30 -12 -21 0 0 0 0 30 -7 0 3 0 3 0 30 -10 -8.75 0 0 0 0 30 -2 -1.25 0 0 0 0 30 -9 -6.75 0 0 0 0 30 -12 0 6 1 12 0 30 -28 0 13 0 13 0 30 -9 -10.13 0 0 0 0 30 -2 -0.5 0 1 2 0 30 -25 0 9 0 9 0 30 -6 -7.5 0 0 0 0 30 -4 -3 0 0 0 0 30 -10 -3.75 0 1 10 0 30 -12 -4.5 0 1 -4.5 0 30 -12 -15 0 0 0 0 30 -6 -3 0 0 0 0 30 -9 -14.63 0 0 0 0 30 -5 -1.25 0 0 0 0 30 -8 -11 0 0 0 0 30 -10 -17.5 0 0 0 0 30 -8 -10 0 0 0 0 30 -9 -9 0 0 0 0 30 -10 -11.25 0 0 0 0 30 -12 -12 0 0 0 0 30 -8 -14 0 0 0 0 30 -12 -16.5 0 0 0 0 30 -4 -7 0 0 0 0 30 -4 -1 0 0 0 0 30 -5 -1.88 0 0 0 0 30 -8 0 3 0 3 0 30 -2 -3.25 0 0 0 0 30 -5 -5 0 0 0 0 30 -26 0 10 0 10 0 30 -12 -10.5 0 0 0 0 30 -2 0 1 0 1 0 30 -6 -9.75 0 0 0 0 30 -8 -3 0 0 0 0 30 -13 0 5 0 5 0 30 -10 -7.5 0 0 0 0 30 -8 -13 0 0 0 0 30 -9 -3.38 0 0 0 0 30 -8 -15 0 0 0 0 30 -30 0 12 0 12 0 30 -8 -8 0 0 0 0 30 -8 -5 0 0 0 0 30 -12 -18 0 0 0 0 30 -10 -5 0 0 0 0 30 -9 -11.25 0 0 0 0 30 -9 -7.88 0 0 0 0 30 -8 -6 0 0 0 0 30 -6 -4.5 0 0 0 0 30 -8 -9 0 0 0 0 30 -4 -5.5 0 0 0 0 30 -4 -5 0 0 0 0 30 -9 -2.25 0 1 -2.25 0 30 -23 0 10 0 10 0 30 -9 -5.63 0 0 0 0 30 -4 -4.5 0 0 0 0 30 -4 -8 0 0 0 0 30 -19 0 8 0 8 0 30 -2 -2 0 0 0 0 30 -5 -8.13 0 0 0 0 30 -5 -4.38 0 0 0 0 30 -2 -2.25 0 0 0 0 30 -2 -0.75 0 0 0 0 30 -2 -2.75 0 0 0 0 30 -5 -8.75 0 0 0 0 30 -9 -18 0 0 0 0 30 -4 -3.5 0 0 0 0 30 -4 -2.5 0 1 -2.5 0 30 \ No newline at end of file diff --git a/R/inst/extdata/ra_data_reappraisal.txt b/R/inst/extdata/ra_data_reappraisal.txt deleted file mode 100644 index b67f642b..00000000 --- a/R/inst/extdata/ra_data_reappraisal.txt +++ /dev/null @@ -1,4190 +0,0 @@ -gain loss cert gamble outcome cond subjID -9 -11.25 0 1 9 1 1 -8 -16 0 0 0 1 1 -9 -5.63 0 1 -5.63 1 1 -9 -4.5 0 1 9 1 1 -2 -2 0 1 2 1 1 -12 -19.5 0 0 0 1 1 -4 -4.5 0 1 4 1 1 -2 -3.75 0 1 -3.75 1 1 -2 -2.25 0 0 0 1 1 -12 -4.5 0 1 -4.5 1 1 -9 -10.13 0 0 0 1 1 -12 -3 0 1 12 1 1 -10 -17.5 0 0 0 1 1 -5 -4.38 0 1 -4.38 1 1 -5 -7.5 0 0 0 1 1 -6 -11.25 0 0 0 1 1 -2 -1.5 0 1 -1.5 1 1 -9 -6.75 0 1 -6.75 1 1 -4 -7 0 0 0 1 1 -8 -7 0 1 8 1 1 -2 -1.75 0 1 2 1 1 -2 -1 0 1 2 1 1 -10 -6.25 0 1 -6.25 1 1 -6 -6.75 0 0 0 1 1 -9 -2.25 0 1 -2.25 1 1 -2 -0.75 0 1 2 1 1 -12 0 4 1 12 1 1 -6 -3 0 1 -3 1 1 -3 0 1 1 3 1 1 -2 -3 0 0 0 1 1 -10 -13.75 0 1 -13.75 1 1 -6 -2.25 0 1 6 1 1 -5 -1.88 0 1 -1.88 1 1 -12 -13.5 0 1 12 1 1 -22 0 10 1 22 1 1 -9 -12.38 0 0 0 1 1 -26 0 10 1 26 1 1 -12 -10.5 0 1 -10.5 1 1 -10 -2.5 0 1 -2.5 1 1 -25 0 10 1 25 1 1 -9 -15.75 0 1 9 1 1 -7 0 3 1 0 1 1 -10 -10 0 1 10 1 1 -12 -15 0 0 0 1 1 -12 0 6 1 0 1 1 -6 -4.5 0 1 -4.5 1 1 -8 -13 0 0 0 1 1 -10 -16.25 0 0 0 1 1 -5 -1.25 0 1 5 1 1 -4 -4 0 1 4 1 1 -5 -3.75 0 1 5 1 1 -6 -8.25 0 0 0 1 1 -8 -15 0 0 0 1 1 -8 -8 0 1 -8 1 1 -2 -2.75 0 1 -2.75 1 1 -6 -12 0 0 0 1 1 -2 0 1 1 2 1 1 -2 -1.25 0 1 -1.25 1 1 -9 -18 0 0 0 1 1 -6 -9 0 1 -9 1 1 -10 -8.75 0 1 -8.75 1 1 -4 -7.5 0 0 0 1 1 -13 0 6 1 0 1 1 -10 -11.25 0 0 0 1 1 -4 -3 0 1 4 1 1 -10 -5 0 1 10 1 1 -8 -2 0 1 -2 1 1 -4 -2.5 0 0 0 1 1 -2 -3.5 0 0 0 1 1 -2 -2.5 0 1 2 1 1 -6 -3.75 0 0 0 1 1 -8 -3 0 1 8 1 1 -2 -3.25 0 0 0 1 1 -8 -9 0 0 0 1 1 -6 -6 0 0 0 1 1 -8 -11 0 0 0 1 1 -5 -8.75 0 0 0 1 1 -6 -9.75 0 0 0 1 1 -12 -24 0 0 0 1 1 -4 -6.5 0 0 0 1 1 -5 -10 0 0 0 1 1 -30 0 12 1 0 1 1 -12 -18 0 0 0 1 1 -9 -9 0 0 0 1 1 -5 -5 0 1 -5 1 1 -5 -9.38 0 0 0 1 1 -10 -12.5 0 0 0 1 1 -10 -18.75 0 0 0 1 1 -5 -2.5 0 1 -2.5 1 1 -9 -14.63 0 0 0 1 1 -28 0 13 1 0 1 1 -5 -6.88 0 0 0 1 1 -4 -3.5 0 0 0 1 1 -12 -16.5 0 0 0 1 1 -5 -8.13 0 0 0 1 1 -9 -16.88 0 0 0 1 1 -9 -3.38 0 1 -3.38 1 1 -12 0 5 1 0 1 1 -4 -8 0 0 0 1 1 -8 -12 0 0 0 1 1 -8 -4 0 0 0 1 1 -2 -4 0 0 0 1 1 -12 -9 0 1 -9 1 1 -4 -1.5 0 1 4 1 1 -6 -10.5 0 0 0 1 1 -5 -3.13 0 1 5 1 1 -10 -15 0 0 0 1 1 -23 0 10 0 10 1 1 -12 -7.5 0 1 -7.5 1 1 -2 -0.5 0 1 -0.5 1 1 -4 0 2 0 2 1 1 -6 -1.5 0 1 -1.5 1 1 -4 -1 0 1 4 1 1 -10 -20 0 0 0 1 1 -12 -22.5 0 0 0 1 1 -25 0 9 1 0 1 1 -13 0 5 0 5 1 1 -6 -5.25 0 0 0 1 1 -9 -13.5 0 0 0 1 1 -5 0 2 0 2 1 1 -12 -6 0 1 -6 1 1 -5 -6.25 0 0 0 1 1 -10 -3.75 0 1 10 1 1 -9 -7.88 0 0 0 1 1 -8 -6 0 0 0 1 1 -4 -5.5 0 0 0 1 1 -19 0 8 0 8 1 1 -10 -7.5 0 0 0 1 1 -4 -6 0 0 0 1 1 -8 0 3 0 3 1 1 -12 -21 0 0 0 1 1 -4 -2 0 0 0 1 1 -4 -5 0 0 0 1 1 -12 -12 0 0 0 1 1 -8 -5 0 1 -5 1 1 -26 0 12 1 0 1 1 -8 -10 0 0 0 1 1 -5 -5.63 0 0 0 1 1 -2 -1 0 1 2 1 2 -9 -6.75 0 1 -6.75 1 2 -2 -4 0 0 0 1 2 -2 -3.25 0 0 0 1 2 -4 -6.5 0 1 -6.5 1 2 -5 -5.63 0 0 0 1 2 -8 -8 0 1 -8 1 2 -12 -18 0 1 12 1 2 -2 -2.5 0 0 0 1 2 -3 0 1 1 3 1 2 -12 -16.5 0 1 12 1 2 -10 -12.5 0 1 -12.5 1 2 -5 -1.25 0 1 5 1 2 -19 0 8 1 19 1 2 -8 -9 0 0 0 1 2 -5 -10 0 0 0 1 2 -25 0 10 1 25 1 2 -7 0 3 0 3 1 2 -6 -11.25 0 0 0 1 2 -6 -1.5 0 1 -1.5 1 2 -4 -1.5 0 1 4 1 2 -10 -5 0 1 10 1 2 -10 -3.75 0 1 10 1 2 -6 -4.5 0 0 0 1 2 -12 -19.5 0 0 0 1 2 -5 -4.38 0 0 0 1 2 -8 -11 0 0 0 1 2 -2 -0.75 0 1 2 1 2 -2 -1.5 0 0 0 1 2 -6 -6.75 0 0 0 1 2 -4 -6 0 0 0 1 2 -10 -16.25 0 1 -16.25 1 2 -12 -15 0 1 -15 1 2 -6 -5.25 0 1 6 1 2 -12 -21 0 1 12 1 2 -4 -3 0 1 4 1 2 -12 -22.5 0 1 12 1 2 -2 -3.75 0 0 0 1 2 -6 -12 0 1 -12 1 2 -5 -8.13 0 1 5 1 2 -10 -8.75 0 1 -8.75 1 2 -12 -6 0 1 -6 1 2 -5 -5 0 1 -5 1 2 -22 0 10 1 22 1 2 -12 -13.5 0 1 12 1 2 -8 -7 0 1 8 1 2 -4 -3.5 0 0 0 1 2 -9 -12.38 0 1 9 1 2 -10 -7.5 0 1 -7.5 1 2 -26 0 10 1 26 1 2 -12 -4.5 0 1 -4.5 1 2 -8 -15 0 0 0 1 2 -2 -1.75 0 0 0 1 2 -12 0 6 1 0 1 2 -9 -3.38 0 1 -3.38 1 2 -2 -3 0 0 0 1 2 -9 -5.63 0 0 0 1 2 -2 -3.5 0 0 0 1 2 -8 -12 0 0 0 1 2 -10 -18.75 0 1 10 1 2 -4 0 2 1 4 1 2 -2 -2.25 0 0 0 1 2 -9 -2.25 0 1 -2.25 1 2 -10 -13.75 0 1 -13.75 1 2 -28 0 13 1 0 1 2 -4 -2.5 0 1 4 1 2 -9 -15.75 0 1 9 1 2 -10 -15 0 0 0 1 2 -10 -10 0 1 10 1 2 -9 -18 0 0 0 1 2 -12 -24 0 1 -24 1 2 -13 0 5 1 13 1 2 -5 -1.88 0 1 -1.88 1 2 -4 -4.5 0 1 4 1 2 -9 -7.88 0 1 9 1 2 -9 -9 0 1 9 1 2 -25 0 9 1 0 1 2 -12 -12 0 1 -12 1 2 -6 -2.25 0 1 6 1 2 -8 -5 0 1 -5 1 2 -4 -5.5 0 1 -5.5 1 2 -2 -1.25 0 1 -1.25 1 2 -9 -13.5 0 1 -13.5 1 2 -9 -4.5 0 1 9 1 2 -10 -11.25 0 1 10 1 2 -6 -3 0 1 -3 1 2 -10 -2.5 0 1 -2.5 1 2 -12 0 4 1 12 1 2 -10 -20 0 1 10 1 2 -5 -3.75 0 1 5 1 2 -9 -10.13 0 1 -10.13 1 2 -4 -7 0 1 -7 1 2 -12 -10.5 0 1 -10.5 1 2 -8 -16 0 1 8 1 2 -4 -7.5 0 0 0 1 2 -8 0 3 1 8 1 2 -6 -10.5 0 0 0 1 2 -6 -9.75 0 0 0 1 2 -5 -8.75 0 0 0 1 2 -5 -2.5 0 1 -2.5 1 2 -13 0 6 1 0 1 2 -23 0 10 1 0 1 2 -8 -4 0 1 -4 1 2 -9 -11.25 0 1 9 1 2 -5 -6.88 0 0 0 1 2 -4 -4 0 1 4 1 2 -10 -17.5 0 0 0 1 2 -8 -13 0 0 0 1 2 -26 0 12 1 0 1 2 -6 -8.25 0 1 -8.25 1 2 -9 -14.63 0 1 9 1 2 -8 -2 0 1 -2 1 2 -10 -6.25 0 1 -6.25 1 2 -8 -14 0 0 0 1 2 -12 0 5 1 0 1 2 -8 -10 0 0 0 1 2 -30 0 12 1 0 1 2 -5 -7.5 0 0 0 1 2 -5 0 2 1 0 1 2 -6 -3.75 0 1 6 1 2 -6 -6 0 1 -6 1 2 -4 -2 0 1 -2 1 2 -12 -7.5 0 1 -7.5 1 2 -5 -6.25 0 1 5 1 2 -4 -5 0 1 4 1 2 -2 -2.75 0 1 -2.75 1 2 -2 -2 0 1 2 1 2 -6 -9 0 1 -9 1 2 -5 -3.13 0 1 5 1 2 -12 -9 0 1 -9 1 2 -4 -8 0 1 -8 1 2 -4 -1 0 1 4 1 2 -2 0 1 1 2 1 2 -9 -16.88 0 1 9 1 2 -8 -6 0 1 -6 1 2 -2 -0.5 0 1 -0.5 1 2 -6 -7.5 0 1 -7.5 1 2 -8 -3 0 1 8 1 2 -12 -3 0 1 12 1 2 -5 -9.38 0 1 -9.38 1 2 -6 -9.75 0 0 0 1 3 -12 -13.5 0 0 0 1 3 -8 -7 0 1 8 1 3 -10 -7.5 0 0 0 1 3 -2 -2.25 0 0 0 1 3 -6 -8.25 0 0 0 1 3 -10 -16.25 0 0 0 1 3 -3 0 1 1 3 1 3 -4 -3 0 0 0 1 3 -8 -2 0 1 -2 1 3 -4 -2.5 0 0 0 1 3 -5 -5.63 0 0 0 1 3 -5 0 2 1 0 1 3 -30 0 12 1 0 1 3 -9 -4.5 0 0 0 1 3 -4 -7.5 0 0 0 1 3 -26 0 10 0 10 1 3 -10 -6.25 0 0 0 1 3 -2 -4 0 0 0 1 3 -4 -5 0 0 0 1 3 -5 -1.88 0 1 -1.88 1 3 -23 0 10 1 0 1 3 -8 -3 0 0 0 1 3 -8 -12 0 0 0 1 3 -10 -2.5 0 0 0 1 3 -5 -8.13 0 0 0 1 3 -8 -9 0 0 0 1 3 -2 -3 0 0 0 1 3 -9 -11.25 0 0 0 1 3 -9 -12.38 0 0 0 1 3 -12 -15 0 0 0 1 3 -8 -10 0 0 0 1 3 -4 -1 0 1 4 1 3 -8 0 3 1 8 1 3 -4 -3.5 0 0 0 1 3 -8 -8 0 0 0 1 3 -10 -11.25 0 0 0 1 3 -10 -5 0 1 10 1 3 -9 -13.5 0 0 0 1 3 -2 -0.75 0 1 2 1 3 -5 -4.38 0 0 0 1 3 -2 -1.5 0 0 0 1 3 -2 -3.75 0 0 0 1 3 -5 -3.75 0 0 0 1 3 -9 -16.88 0 0 0 1 3 -9 -3.38 0 1 -3.38 1 3 -5 -10 0 0 0 1 3 -26 0 12 0 12 1 3 -5 -9.38 0 0 0 1 3 -6 -1.5 0 1 -1.5 1 3 -10 -10 0 0 0 1 3 -2 -1.25 0 1 -1.25 1 3 -9 -14.63 0 0 0 1 3 -6 -4.5 0 0 0 1 3 -5 -5 0 0 0 1 3 -5 -7.5 0 0 0 1 3 -8 -13 0 0 0 1 3 -5 -3.13 0 0 0 1 3 -8 -5 0 0 0 1 3 -8 -11 0 0 0 1 3 -6 -6.75 0 0 0 1 3 -5 -8.75 0 0 0 1 3 -2 0 1 1 2 1 3 -9 -5.63 0 0 0 1 3 -6 -6 0 0 0 1 3 -4 -5.5 0 0 0 1 3 -6 -3 0 0 0 1 3 -12 -19.5 0 0 0 1 3 -10 -13.75 0 0 0 1 3 -10 -8.75 0 0 0 1 3 -5 -6.88 0 0 0 1 3 -6 -7.5 0 0 0 1 3 -10 -12.5 0 0 0 1 3 -9 -6.75 0 0 0 1 3 -4 -6 0 0 0 1 3 -8 -4 0 1 -4 1 3 -2 -1 0 1 2 1 3 -12 -24 0 0 0 1 3 -12 -6 0 0 0 1 3 -2 -2 0 0 0 1 3 -4 -7 0 0 0 1 3 -12 -9 0 0 0 1 3 -6 -11.25 0 0 0 1 3 -25 0 10 0 10 1 3 -28 0 13 0 13 1 3 -2 -2.75 0 0 0 1 3 -12 -10.5 0 0 0 1 3 -8 -14 0 0 0 1 3 -4 -6.5 0 0 0 1 3 -4 0 2 1 4 1 3 -10 -15 0 0 0 1 3 -12 0 5 1 0 1 3 -10 -18.75 0 0 0 1 3 -12 -3 0 1 12 1 3 -4 -4 0 0 0 1 3 -9 -7.88 0 0 0 1 3 -9 -2.25 0 1 -2.25 1 3 -2 -1.75 0 0 0 1 3 -12 0 6 1 0 1 3 -5 -2.5 0 0 0 1 3 -4 -4.5 0 0 0 1 3 -8 -6 0 0 0 1 3 -12 -18 0 0 0 1 3 -12 -16.5 0 0 0 1 3 -22 0 10 0 10 1 3 -12 -21 0 0 0 1 3 -12 -4.5 0 0 0 1 3 -12 -12 0 0 0 1 3 -19 0 8 0 8 1 3 -2 -2.5 0 0 0 1 3 -12 0 4 1 12 1 3 -4 -2 0 0 0 1 3 -9 -9 0 0 0 1 3 -9 -10.13 0 0 0 1 3 -6 -2.25 0 1 6 1 3 -2 -0.5 0 1 -0.5 1 3 -10 -3.75 0 1 10 1 3 -13 0 5 1 13 1 3 -4 -1.5 0 1 4 1 3 -5 -1.25 0 1 5 1 3 -6 -9 0 0 0 1 3 -10 -17.5 0 0 0 1 3 -6 -12 0 0 0 1 3 -6 -5.25 0 0 0 1 3 -12 -22.5 0 0 0 1 3 -8 -16 0 0 0 1 3 -9 -15.75 0 0 0 1 3 -10 -20 0 0 0 1 3 -13 0 6 1 0 1 3 -4 -8 0 0 0 1 3 -12 -7.5 0 0 0 1 3 -9 -18 0 0 0 1 3 -2 -3.25 0 0 0 1 3 -7 0 3 0 3 1 3 -6 -3.75 0 0 0 1 3 -5 -6.25 0 0 0 1 3 -8 -15 0 0 0 1 3 -25 0 9 0 9 1 3 -2 -3.5 0 0 0 1 3 -6 -10.5 0 0 0 1 3 -9 -10.13 0 1 -10.13 1 4 -12 -10.5 0 0 0 1 4 -25 0 10 1 25 1 4 -4 -7 0 1 -7 1 4 -9 -7.88 0 0 0 1 4 -5 -3.13 0 1 5 1 4 -5 -8.13 0 1 5 1 4 -8 -7 0 0 0 1 4 -12 -6 0 1 -6 1 4 -12 -24 0 0 0 1 4 -12 -21 0 0 0 1 4 -4 -2.5 0 1 4 1 4 -6 -9 0 0 0 1 4 -10 -15 0 1 10 1 4 -8 -6 0 1 -6 1 4 -13 0 6 1 0 1 4 -6 -12 0 1 -12 1 4 -6 -4.5 0 0 0 1 4 -9 -16.88 0 0 0 1 4 -10 -18.75 0 1 10 1 4 -9 -3.38 0 1 -3.38 1 4 -6 -9.75 0 1 -9.75 1 4 -2 -1.75 0 0 0 1 4 -5 0 2 0 2 1 4 -8 -5 0 1 -5 1 4 -8 -9 0 0 0 1 4 -12 0 6 1 0 1 4 -12 0 4 1 12 1 4 -2 -2.5 0 0 0 1 4 -6 -3 0 1 -3 1 4 -10 -7.5 0 1 -7.5 1 4 -5 -2.5 0 1 -2.5 1 4 -5 -3.75 0 1 5 1 4 -10 -3.75 0 1 10 1 4 -2 -3 0 0 0 1 4 -10 -6.25 0 1 -6.25 1 4 -4 -7.5 0 0 0 1 4 -8 -16 0 0 0 1 4 -5 -6.25 0 1 5 1 4 -4 0 2 1 4 1 4 -10 -11.25 0 1 10 1 4 -5 -6.88 0 0 0 1 4 -5 -7.5 0 1 5 1 4 -26 0 12 0 12 1 4 -8 -13 0 1 8 1 4 -4 -4.5 0 0 0 1 4 -8 -10 0 1 -10 1 4 -6 -3.75 0 1 6 1 4 -5 -5.63 0 0 0 1 4 -9 -18 0 0 0 1 4 -12 -13.5 0 1 12 1 4 -7 0 3 0 3 1 4 -8 -14 0 1 8 1 4 -2 -1.5 0 0 0 1 4 -10 -2.5 0 1 -2.5 1 4 -13 0 5 1 13 1 4 -9 -15.75 0 0 0 1 4 -8 -12 0 0 0 1 4 -28 0 13 1 0 1 4 -6 -7.5 0 0 0 1 4 -10 -16.25 0 0 0 1 4 -12 -7.5 0 1 -7.5 1 4 -5 -5 0 0 0 1 4 -2 -2 0 1 2 1 4 -22 0 10 0 10 1 4 -2 -1 0 1 2 1 4 -3 0 1 1 3 1 4 -4 -5.5 0 0 0 1 4 -2 -2.25 0 1 2 1 4 -6 -2.25 0 1 6 1 4 -4 -6.5 0 1 -6.5 1 4 -9 -12.38 0 0 0 1 4 -10 -13.75 0 1 -13.75 1 4 -10 -17.5 0 0 0 1 4 -4 -5 0 1 4 1 4 -9 -11.25 0 1 9 1 4 -10 -10 0 0 0 1 4 -2 -3.25 0 0 0 1 4 -5 -8.75 0 1 5 1 4 -5 -10 0 0 0 1 4 -9 -2.25 0 1 -2.25 1 4 -6 -6.75 0 1 6 1 4 -12 -16.5 0 1 12 1 4 -9 -14.63 0 0 0 1 4 -4 -8 0 0 0 1 4 -6 -5.25 0 1 6 1 4 -9 -6.75 0 1 -6.75 1 4 -12 -12 0 1 -12 1 4 -4 -1 0 1 4 1 4 -12 -15 0 1 -15 1 4 -4 -3.5 0 1 -3.5 1 4 -2 -1.25 0 1 -1.25 1 4 -30 0 12 0 12 1 4 -12 -19.5 0 1 12 1 4 -12 -3 0 1 12 1 4 -5 -1.25 0 0 0 1 4 -5 -1.88 0 1 -1.88 1 4 -2 -3.5 0 0 0 1 4 -12 -9 0 1 -9 1 4 -10 -20 0 0 0 1 4 -8 -4 0 1 -4 1 4 -12 0 5 1 0 1 4 -2 0 1 0 1 1 4 -4 -1.5 0 1 4 1 4 -2 -3.75 0 0 0 1 4 -6 -10.5 0 1 -10.5 1 4 -4 -2 0 1 -2 1 4 -23 0 10 1 0 1 4 -12 -18 0 1 12 1 4 -6 -8.25 0 0 0 1 4 -26 0 10 1 26 1 4 -10 -8.75 0 1 -8.75 1 4 -2 -0.75 0 1 2 1 4 -5 -9.38 0 1 -9.38 1 4 -25 0 9 1 0 1 4 -9 -4.5 0 0 0 1 4 -10 -5 0 1 10 1 4 -2 -4 0 0 0 1 4 -2 -2.75 0 1 -2.75 1 4 -4 -6 0 0 0 1 4 -10 -12.5 0 1 -12.5 1 4 -12 -22.5 0 0 0 1 4 -4 -4 0 1 4 1 4 -2 -0.5 0 1 -0.5 1 4 -8 -2 0 1 -2 1 4 -4 -3 0 0 0 1 4 -6 -11.25 0 1 6 1 4 -8 -15 0 1 -15 1 4 -8 -11 0 0 0 1 4 -12 -4.5 0 1 -4.5 1 4 -19 0 8 1 19 1 4 -6 -6 0 1 -6 1 4 -5 -4.38 0 0 0 1 4 -9 -9 0 1 9 1 4 -6 -1.5 0 1 -1.5 1 4 -9 -13.5 0 0 0 1 4 -9 -5.63 0 1 -5.63 1 4 -8 -8 0 1 -8 1 4 -8 0 3 0 3 1 4 -8 -3 0 0 0 1 4 -9 -11.25 0 1 9 1 5 -8 -16 0 0 0 1 5 -9 -5.63 0 0 0 1 5 -9 -4.5 0 1 9 1 5 -2 -2 0 1 2 1 5 -12 -19.5 0 0 0 1 5 -4 -4.5 0 0 0 1 5 -2 -3.75 0 1 -3.75 1 5 -2 -2.25 0 0 0 1 5 -12 -4.5 0 1 -4.5 1 5 -9 -10.13 0 0 0 1 5 -12 -3 0 1 12 1 5 -10 -17.5 0 0 0 1 5 -5 -4.38 0 1 -4.38 1 5 -5 -7.5 0 1 5 1 5 -6 -11.25 0 0 0 1 5 -2 -1.5 0 1 -1.5 1 5 -9 -6.75 0 1 -6.75 1 5 -4 -7 0 1 -7 1 5 -8 -7 0 1 8 1 5 -2 -1.75 0 0 0 1 5 -2 -1 0 1 2 1 5 -10 -6.25 0 1 -6.25 1 5 -6 -6.75 0 1 6 1 5 -9 -2.25 0 1 -2.25 1 5 -2 -0.75 0 1 2 1 5 -12 0 4 0 4 1 5 -6 -3 0 1 -3 1 5 -3 0 1 1 3 1 5 -2 -3 0 0 0 1 5 -10 -13.75 0 0 0 1 5 -6 -2.25 0 1 6 1 5 -5 -1.88 0 1 -1.88 1 5 -12 -13.5 0 0 0 1 5 -22 0 10 0 10 1 5 -9 -12.38 0 0 0 1 5 -26 0 10 0 10 1 5 -12 -10.5 0 0 0 1 5 -10 -2.5 0 1 -2.5 1 5 -25 0 10 0 10 1 5 -9 -15.75 0 0 0 1 5 -7 0 3 0 3 1 5 -10 -10 0 0 0 1 5 -12 -15 0 0 0 1 5 -12 0 6 0 6 1 5 -6 -4.5 0 0 0 1 5 -8 -13 0 0 0 1 5 -10 -16.25 0 0 0 1 5 -5 -1.25 0 1 5 1 5 -4 -4 0 1 4 1 5 -5 -3.75 0 1 5 1 5 -6 -8.25 0 0 0 1 5 -8 -15 0 0 0 1 5 -8 -8 0 1 -8 1 5 -2 -2.75 0 1 -2.75 1 5 -6 -12 0 1 -12 1 5 -2 0 1 1 2 1 5 -2 -1.25 0 1 -1.25 1 5 -9 -18 0 0 0 1 5 -6 -9 0 0 0 1 5 -10 -8.75 0 1 -8.75 1 5 -4 -7.5 0 0 0 1 5 -13 0 6 1 0 1 5 -10 -11.25 0 0 0 1 5 -4 -3 0 1 4 1 5 -10 -5 0 1 10 1 5 -8 -2 0 1 -2 1 5 -4 -2.5 0 1 4 1 5 -2 -3.5 0 0 0 1 5 -2 -2.5 0 0 0 1 5 -6 -3.75 0 1 6 1 5 -8 -3 0 1 8 1 5 -2 -3.25 0 0 0 1 5 -8 -9 0 0 0 1 5 -6 -6 0 1 -6 1 5 -8 -11 0 0 0 1 5 -5 -8.75 0 1 5 1 5 -6 -9.75 0 0 0 1 5 -12 -24 0 0 0 1 5 -4 -6.5 0 0 0 1 5 -5 -10 0 0 0 1 5 -30 0 12 0 12 1 5 -12 -18 0 0 0 1 5 -9 -9 0 1 9 1 5 -5 -5 0 1 -5 1 5 -5 -9.38 0 0 0 1 5 -10 -12.5 0 0 0 1 5 -10 -18.75 0 0 0 1 5 -5 -2.5 0 1 -2.5 1 5 -9 -14.63 0 0 0 1 5 -28 0 13 0 13 1 5 -5 -6.88 0 0 0 1 5 -4 -3.5 0 1 -3.5 1 5 -12 -16.5 0 0 0 1 5 -5 -8.13 0 0 0 1 5 -9 -16.88 0 0 0 1 5 -9 -3.38 0 1 -3.38 1 5 -12 0 5 1 0 1 5 -4 -8 0 0 0 1 5 -8 -12 0 1 8 1 5 -8 -4 0 1 -4 1 5 -2 -4 0 0 0 1 5 -12 -9 0 1 -9 1 5 -4 -1.5 0 1 4 1 5 -6 -10.5 0 0 0 1 5 -5 -3.13 0 1 5 1 5 -10 -15 0 0 0 1 5 -23 0 10 0 10 1 5 -12 -7.5 0 1 -7.5 1 5 -2 -0.5 0 1 -0.5 1 5 -4 0 2 0 2 1 5 -6 -1.5 0 1 -1.5 1 5 -4 -1 0 1 4 1 5 -10 -20 0 0 0 1 5 -12 -22.5 0 0 0 1 5 -25 0 9 0 9 1 5 -13 0 5 1 13 1 5 -6 -5.25 0 0 0 1 5 -9 -13.5 0 0 0 1 5 -5 0 2 0 2 1 5 -12 -6 0 1 -6 1 5 -5 -6.25 0 1 5 1 5 -10 -3.75 0 1 10 1 5 -9 -7.88 0 0 0 1 5 -8 -6 0 1 -6 1 5 -4 -5.5 0 0 0 1 5 -19 0 8 0 8 1 5 -10 -7.5 0 1 -7.5 1 5 -4 -6 0 0 0 1 5 -8 -14 0 0 0 1 5 -8 0 3 0 3 1 5 -12 -21 0 0 0 1 5 -4 -2 0 1 -2 1 5 -4 -5 0 0 0 1 5 -6 -7.5 0 1 -7.5 1 5 -12 -12 0 1 -12 1 5 -8 -5 0 1 -5 1 5 -26 0 12 0 12 1 5 -8 -10 0 0 0 1 5 -5 -5.63 0 0 0 1 5 -2 -1 0 1 2 1 6 -9 -6.75 0 1 -6.75 1 6 -2 -4 0 0 0 1 6 -2 -3.25 0 1 2 1 6 -4 -6.5 0 0 0 1 6 -5 -5.63 0 1 -5.63 1 6 -8 -8 0 1 -8 1 6 -12 -18 0 0 0 1 6 -2 -2.5 0 1 2 1 6 -3 0 1 1 3 1 6 -12 -16.5 0 1 12 1 6 -10 -12.5 0 1 -12.5 1 6 -5 -1.25 0 1 5 1 6 -19 0 8 1 19 1 6 -8 -9 0 1 -9 1 6 -5 -10 0 0 0 1 6 -25 0 10 1 25 1 6 -7 0 3 1 0 1 6 -6 -11.25 0 1 6 1 6 -6 -1.5 0 1 -1.5 1 6 -4 -1.5 0 1 4 1 6 -10 -5 0 1 10 1 6 -10 -3.75 0 1 10 1 6 -6 -4.5 0 1 -4.5 1 6 -12 -19.5 0 1 12 1 6 -5 -4.38 0 1 -4.38 1 6 -8 -11 0 0 0 1 6 -2 -0.75 0 1 2 1 6 -2 -1.5 0 1 -1.5 1 6 -6 -6.75 0 1 6 1 6 -4 -6 0 1 4 1 6 -10 -16.25 0 1 -16.25 1 6 -12 -15 0 1 -15 1 6 -6 -5.25 0 1 6 1 6 -12 -21 0 0 0 1 6 -4 -3 0 1 4 1 6 -12 -22.5 0 0 0 1 6 -2 -3.75 0 1 -3.75 1 6 -6 -12 0 1 -12 1 6 -5 -8.13 0 1 5 1 6 -10 -8.75 0 1 -8.75 1 6 -12 -6 0 1 -6 1 6 -5 -5 0 1 -5 1 6 -22 0 10 0 10 1 6 -12 -13.5 0 0 0 1 6 -8 -7 0 1 8 1 6 -4 -3.5 0 1 -3.5 1 6 -9 -12.38 0 1 9 1 6 -10 -7.5 0 1 -7.5 1 6 -26 0 10 1 26 1 6 -12 -4.5 0 1 -4.5 1 6 -8 -15 0 1 -15 1 6 -2 -1.75 0 1 2 1 6 -12 0 6 1 0 1 6 -9 -3.38 0 1 -3.38 1 6 -2 -3 0 1 -3 1 6 -9 -5.63 0 1 -5.63 1 6 -2 -3.5 0 1 -3.5 1 6 -8 -12 0 0 0 1 6 -10 -18.75 0 0 0 1 6 -4 0 2 1 4 1 6 -2 -2.25 0 1 2 1 6 -9 -2.25 0 1 -2.25 1 6 -10 -13.75 0 0 0 1 6 -28 0 13 0 13 1 6 -4 -2.5 0 1 4 1 6 -9 -15.75 0 0 0 1 6 -10 -15 0 1 10 1 6 -10 -10 0 1 10 1 6 -9 -18 0 0 0 1 6 -12 -24 0 0 0 1 6 -13 0 5 1 13 1 6 -5 -1.88 0 1 -1.88 1 6 -4 -4.5 0 1 4 1 6 -9 -7.88 0 1 9 1 6 -9 -9 0 1 9 1 6 -25 0 9 1 0 1 6 -12 -12 0 1 -12 1 6 -6 -2.25 0 1 6 1 6 -8 -5 0 1 -5 1 6 -4 -5.5 0 1 -5.5 1 6 -2 -1.25 0 1 -1.25 1 6 -9 -13.5 0 0 0 1 6 -9 -4.5 0 1 9 1 6 -10 -11.25 0 1 10 1 6 -6 -3 0 1 -3 1 6 -10 -2.5 0 1 -2.5 1 6 -12 0 4 1 12 1 6 -10 -20 0 0 0 1 6 -5 -3.75 0 1 5 1 6 -9 -10.13 0 1 -10.13 1 6 -4 -7 0 0 0 1 6 -12 -10.5 0 1 -10.5 1 6 -8 -16 0 0 0 1 6 -4 -7.5 0 1 4 1 6 -8 0 3 1 8 1 6 -6 -10.5 0 0 0 1 6 -6 -9.75 0 1 -9.75 1 6 -5 -8.75 0 0 0 1 6 -5 -2.5 0 1 -2.5 1 6 -13 0 6 1 0 1 6 -23 0 10 1 0 1 6 -8 -4 0 1 -4 1 6 -9 -11.25 0 1 9 1 6 -5 -6.88 0 1 -6.88 1 6 -4 -4 0 1 4 1 6 -10 -17.5 0 0 0 1 6 -26 0 12 0 12 1 6 -6 -8.25 0 1 -8.25 1 6 -9 -14.63 0 1 9 1 6 -8 -2 0 1 -2 1 6 -10 -6.25 0 1 -6.25 1 6 -8 -14 0 1 8 1 6 -12 0 5 0 5 1 6 -8 -10 0 1 -10 1 6 -30 0 12 1 0 1 6 -5 -7.5 0 1 5 1 6 -5 0 2 1 0 1 6 -6 -3.75 0 1 6 1 6 -6 -6 0 1 -6 1 6 -4 -2 0 1 -2 1 6 -12 -7.5 0 1 -7.5 1 6 -5 -6.25 0 0 0 1 6 -4 -5 0 1 4 1 6 -2 -2.75 0 1 -2.75 1 6 -2 -2 0 1 2 1 6 -6 -9 0 1 -9 1 6 -5 -3.13 0 1 5 1 6 -12 -9 0 1 -9 1 6 -4 -8 0 1 -8 1 6 -4 -1 0 1 4 1 6 -2 0 1 1 2 1 6 -9 -16.88 0 0 0 1 6 -8 -6 0 1 -6 1 6 -2 -0.5 0 1 -0.5 1 6 -6 -7.5 0 1 -7.5 1 6 -8 -3 0 1 8 1 6 -12 -3 0 1 12 1 6 -5 -9.38 0 1 -9.38 1 6 -6 -9.75 0 0 0 1 7 -12 -13.5 0 0 0 1 7 -8 -7 0 1 8 1 7 -10 -7.5 0 1 -7.5 1 7 -2 -2.25 0 1 2 1 7 -6 -8.25 0 0 0 1 7 -10 -16.25 0 0 0 1 7 -3 0 1 1 3 1 7 -4 -3 0 1 4 1 7 -8 -2 0 1 -2 1 7 -4 -2.5 0 1 4 1 7 -5 -5.63 0 1 -5.63 1 7 -5 0 2 0 2 1 7 -30 0 12 1 0 1 7 -9 -4.5 0 1 9 1 7 -4 -7.5 0 0 0 1 7 -26 0 10 1 26 1 7 -10 -6.25 0 1 -6.25 1 7 -2 -4 0 0 0 1 7 -4 -5 0 0 0 1 7 -5 -1.88 0 1 -1.88 1 7 -23 0 10 0 10 1 7 -8 -3 0 1 8 1 7 -8 -12 0 0 0 1 7 -10 -2.5 0 1 -2.5 1 7 -5 -8.13 0 0 0 1 7 -8 -9 0 1 -9 1 7 -2 -3 0 0 0 1 7 -9 -11.25 0 0 0 1 7 -9 -12.38 0 0 0 1 7 -12 -15 0 1 -15 1 7 -8 -10 0 0 0 1 7 -4 -1 0 1 4 1 7 -8 0 3 1 8 1 7 -4 -3.5 0 0 0 1 7 -8 -8 0 1 -8 1 7 -10 -11.25 0 0 0 1 7 -10 -5 0 1 10 1 7 -9 -13.5 0 0 0 1 7 -2 -0.75 0 1 2 1 7 -5 -4.38 0 0 0 1 7 -2 -1.5 0 1 -1.5 1 7 -2 -3.75 0 0 0 1 7 -5 -3.75 0 1 5 1 7 -9 -16.88 0 0 0 1 7 -9 -3.38 0 1 -3.38 1 7 -5 -10 0 0 0 1 7 -26 0 12 1 0 1 7 -5 -9.38 0 0 0 1 7 -6 -1.5 0 1 -1.5 1 7 -10 -10 0 1 10 1 7 -2 -1.25 0 1 -1.25 1 7 -9 -14.63 0 0 0 1 7 -6 -4.5 0 1 -4.5 1 7 -5 -5 0 0 0 1 7 -5 -7.5 0 0 0 1 7 -8 -13 0 0 0 1 7 -5 -3.13 0 1 5 1 7 -8 -5 0 1 -5 1 7 -8 -11 0 0 0 1 7 -6 -6.75 0 0 0 1 7 -2 0 1 1 2 1 7 -9 -5.63 0 0 0 1 7 -6 -6 0 0 0 1 7 -4 -5.5 0 0 0 1 7 -6 -3 0 1 -3 1 7 -12 -19.5 0 0 0 1 7 -10 -13.75 0 0 0 1 7 -10 -8.75 0 0 0 1 7 -5 -6.88 0 0 0 1 7 -6 -7.5 0 0 0 1 7 -10 -12.5 0 0 0 1 7 -9 -6.75 0 1 -6.75 1 7 -4 -6 0 0 0 1 7 -8 -4 0 1 -4 1 7 -2 -1 0 1 2 1 7 -12 -24 0 0 0 1 7 -12 -6 0 1 -6 1 7 -2 -2 0 0 0 1 7 -4 -7 0 0 0 1 7 -12 -9 0 1 -9 1 7 -6 -11.25 0 0 0 1 7 -25 0 10 1 25 1 7 -28 0 13 0 13 1 7 -2 -2.75 0 1 -2.75 1 7 -12 -10.5 0 1 -10.5 1 7 -8 -14 0 0 0 1 7 -4 -6.5 0 0 0 1 7 -4 0 2 1 4 1 7 -10 -15 0 0 0 1 7 -12 0 5 1 0 1 7 -10 -18.75 0 0 0 1 7 -12 -3 0 1 12 1 7 -4 -4 0 0 0 1 7 -9 -7.88 0 0 0 1 7 -9 -2.25 0 1 -2.25 1 7 -2 -1.75 0 0 0 1 7 -12 0 6 1 0 1 7 -5 -2.5 0 1 -2.5 1 7 -4 -4.5 0 0 0 1 7 -8 -6 0 0 0 1 7 -12 -18 0 0 0 1 7 -12 -16.5 0 0 0 1 7 -22 0 10 1 22 1 7 -12 -21 0 0 0 1 7 -12 -4.5 0 1 -4.5 1 7 -12 -12 0 0 0 1 7 -19 0 8 1 19 1 7 -2 -2.5 0 0 0 1 7 -12 0 4 1 12 1 7 -4 -2 0 0 0 1 7 -9 -9 0 1 9 1 7 -9 -10.13 0 0 0 1 7 -6 -2.25 0 1 6 1 7 -2 -0.5 0 1 -0.5 1 7 -10 -3.75 0 1 10 1 7 -13 0 5 1 13 1 7 -4 -1.5 0 1 4 1 7 -5 -1.25 0 1 5 1 7 -6 -9 0 0 0 1 7 -10 -17.5 0 0 0 1 7 -6 -12 0 0 0 1 7 -6 -5.25 0 0 0 1 7 -12 -22.5 0 0 0 1 7 -8 -16 0 0 0 1 7 -9 -15.75 0 0 0 1 7 -10 -20 0 0 0 1 7 -13 0 6 1 0 1 7 -4 -8 0 0 0 1 7 -12 -7.5 0 1 -7.5 1 7 -9 -18 0 0 0 1 7 -2 -3.25 0 0 0 1 7 -7 0 3 1 0 1 7 -6 -3.75 0 0 0 1 7 -5 -6.25 0 0 0 1 7 -8 -15 0 0 0 1 7 -25 0 9 1 0 1 7 -2 -3.5 0 0 0 1 7 -6 -10.5 0 0 0 1 7 -9 -10.13 0 0 0 1 8 -12 -10.5 0 1 -10.5 1 8 -25 0 10 1 25 1 8 -4 -7 0 0 0 1 8 -9 -7.88 0 1 9 1 8 -5 -3.13 0 1 5 1 8 -5 -8.13 0 0 0 1 8 -8 -7 0 1 8 1 8 -12 -6 0 1 -6 1 8 -12 -24 0 0 0 1 8 -12 -21 0 0 0 1 8 -4 -2.5 0 1 4 1 8 -6 -9 0 0 0 1 8 -10 -15 0 0 0 1 8 -8 -6 0 1 -6 1 8 -13 0 6 1 0 1 8 -6 -12 0 0 0 1 8 -6 -4.5 0 1 -4.5 1 8 -9 -16.88 0 0 0 1 8 -10 -18.75 0 0 0 1 8 -9 -3.38 0 1 -3.38 1 8 -6 -9.75 0 0 0 1 8 -2 -1.75 0 1 2 1 8 -5 0 2 1 0 1 8 -8 -5 0 1 -5 1 8 -8 -9 0 0 0 1 8 -12 0 6 0 6 1 8 -12 0 4 1 12 1 8 -2 -2.5 0 0 0 1 8 -6 -3 0 1 -3 1 8 -10 -7.5 0 1 -7.5 1 8 -5 -2.5 0 1 -2.5 1 8 -5 -3.75 0 0 0 1 8 -10 -3.75 0 1 10 1 8 -2 -3 0 0 0 1 8 -10 -6.25 0 1 -6.25 1 8 -4 -7.5 0 0 0 1 8 -8 -16 0 0 0 1 8 -5 -6.25 0 0 0 1 8 -4 0 2 1 4 1 8 -10 -11.25 0 0 0 1 8 -5 -6.88 0 0 0 1 8 -5 -7.5 0 0 0 1 8 -26 0 12 1 0 1 8 -8 -13 0 0 0 1 8 -4 -4.5 0 1 4 1 8 -8 -10 0 0 0 1 8 -6 -3.75 0 1 6 1 8 -5 -5.63 0 1 -5.63 1 8 -9 -18 0 0 0 1 8 -12 -13.5 0 1 12 1 8 -7 0 3 1 0 1 8 -8 -14 0 0 0 1 8 -2 -1.5 0 1 -1.5 1 8 -10 -2.5 0 1 -2.5 1 8 -13 0 5 1 13 1 8 -9 -15.75 0 0 0 1 8 -8 -12 0 0 0 1 8 -28 0 13 1 0 1 8 -6 -7.5 0 1 -7.5 1 8 -10 -16.25 0 0 0 1 8 -12 -7.5 0 1 -7.5 1 8 -5 -5 0 1 -5 1 8 -2 -2 0 1 2 1 8 -22 0 10 1 22 1 8 -2 -1 0 1 2 1 8 -3 0 1 1 3 1 8 -4 -5.5 0 1 -5.5 1 8 -2 -2.25 0 1 2 1 8 -6 -2.25 0 1 6 1 8 -4 -6.5 0 1 -6.5 1 8 -9 -12.38 0 0 0 1 8 -10 -13.75 0 0 0 1 8 -10 -17.5 0 1 10 1 8 -4 -5 0 1 4 1 8 -9 -11.25 0 0 0 1 8 -10 -10 0 1 10 1 8 -2 -3.25 0 0 0 1 8 -5 -8.75 0 0 0 1 8 -5 -10 0 0 0 1 8 -9 -2.25 0 1 -2.25 1 8 -6 -6.75 0 1 6 1 8 -12 -16.5 0 1 12 1 8 -9 -14.63 0 0 0 1 8 -4 -8 0 0 0 1 8 -6 -5.25 0 1 6 1 8 -9 -6.75 0 1 -6.75 1 8 -12 -12 0 1 -12 1 8 -4 -1 0 1 4 1 8 -12 -15 0 1 -15 1 8 -4 -3.5 0 1 -3.5 1 8 -2 -1.25 0 1 -1.25 1 8 -30 0 12 1 0 1 8 -12 -19.5 0 0 0 1 8 -12 -3 0 1 12 1 8 -5 -1.25 0 1 5 1 8 -5 -1.88 0 1 -1.88 1 8 -2 -3.5 0 0 0 1 8 -12 -9 0 1 -9 1 8 -10 -20 0 0 0 1 8 -8 -4 0 1 -4 1 8 -12 0 5 1 0 1 8 -2 0 1 0 1 1 8 -4 -1.5 0 1 4 1 8 -2 -3.75 0 1 -3.75 1 8 -6 -10.5 0 0 0 1 8 -4 -2 0 1 -2 1 8 -23 0 10 0 10 1 8 -12 -18 0 1 12 1 8 -6 -8.25 0 1 -8.25 1 8 -26 0 10 1 26 1 8 -10 -8.75 0 1 -8.75 1 8 -2 -0.75 0 1 2 1 8 -5 -9.38 0 0 0 1 8 -25 0 9 1 0 1 8 -9 -4.5 0 1 9 1 8 -10 -5 0 1 10 1 8 -2 -4 0 1 -4 1 8 -2 -2.75 0 1 -2.75 1 8 -4 -6 0 1 4 1 8 -10 -12.5 0 1 -12.5 1 8 -12 -22.5 0 0 0 1 8 -4 -4 0 1 4 1 8 -2 -0.5 0 1 -0.5 1 8 -8 -2 0 1 -2 1 8 -4 -3 0 1 4 1 8 -6 -11.25 0 0 0 1 8 -8 -15 0 0 0 1 8 -8 -11 0 1 8 1 8 -12 -4.5 0 1 -4.5 1 8 -19 0 8 1 19 1 8 -6 -6 0 1 -6 1 8 -5 -4.38 0 1 -4.38 1 8 -9 -9 0 1 9 1 8 -6 -1.5 0 1 -1.5 1 8 -9 -13.5 0 0 0 1 8 -9 -5.63 0 1 -5.63 1 8 -8 -8 0 1 -8 1 8 -8 0 3 1 8 1 8 -8 -3 0 1 8 1 8 -9 -11.25 0 1 9 1 9 -8 -16 0 0 0 1 9 -9 -5.63 0 1 -5.63 1 9 -9 -4.5 0 1 9 1 9 -2 -2 0 1 2 1 9 -12 -19.5 0 0 0 1 9 -4 -4.5 0 1 4 1 9 -2 -3.75 0 1 -3.75 1 9 -2 -2.25 0 1 2 1 9 -12 -4.5 0 1 -4.5 1 9 -9 -10.13 0 0 0 1 9 -12 -3 0 1 12 1 9 -10 -17.5 0 0 0 1 9 -5 -4.38 0 1 -4.38 1 9 -5 -7.5 0 1 5 1 9 -6 -11.25 0 0 0 1 9 -2 -1.5 0 1 -1.5 1 9 -9 -6.75 0 1 -6.75 1 9 -4 -7 0 0 0 1 9 -8 -7 0 1 8 1 9 -2 -1.75 0 1 2 1 9 -2 -1 0 1 2 1 9 -10 -6.25 0 1 -6.25 1 9 -6 -6.75 0 1 6 1 9 -9 -2.25 0 1 -2.25 1 9 -2 -0.75 0 1 2 1 9 -12 0 4 1 12 1 9 -6 -3 0 1 -3 1 9 -3 0 1 1 3 1 9 -2 -3 0 1 -3 1 9 -10 -13.75 0 0 0 1 9 -6 -2.25 0 1 6 1 9 -5 -1.88 0 1 -1.88 1 9 -12 -13.5 0 0 0 1 9 -22 0 10 1 22 1 9 -9 -12.38 0 0 0 1 9 -26 0 10 1 26 1 9 -12 -10.5 0 0 0 1 9 -10 -2.5 0 1 -2.5 1 9 -25 0 10 1 25 1 9 -9 -15.75 0 0 0 1 9 -7 0 3 1 0 1 9 -10 -10 0 0 0 1 9 -12 -15 0 0 0 1 9 -12 0 6 1 0 1 9 -6 -4.5 0 1 -4.5 1 9 -8 -13 0 0 0 1 9 -10 -16.25 0 0 0 1 9 -5 -1.25 0 1 5 1 9 -4 -4 0 1 4 1 9 -5 -3.75 0 1 5 1 9 -6 -8.25 0 0 0 1 9 -8 -15 0 0 0 1 9 -8 -8 0 1 -8 1 9 -2 -2.75 0 1 -2.75 1 9 -6 -12 0 0 0 1 9 -2 0 1 1 2 1 9 -2 -1.25 0 1 -1.25 1 9 -9 -18 0 0 0 1 9 -6 -9 0 0 0 1 9 -10 -8.75 0 0 0 1 9 -4 -7.5 0 0 0 1 9 -13 0 6 1 0 1 9 -10 -11.25 0 0 0 1 9 -4 -3 0 1 4 1 9 -10 -5 0 1 10 1 9 -8 -2 0 1 -2 1 9 -4 -2.5 0 1 4 1 9 -2 -3.5 0 1 -3.5 1 9 -2 -2.5 0 1 2 1 9 -6 -3.75 0 1 6 1 9 -8 -3 0 1 8 1 9 -2 -3.25 0 1 2 1 9 -8 -9 0 1 -9 1 9 -6 -6 0 1 -6 1 9 -8 -11 0 0 0 1 9 -5 -8.75 0 0 0 1 9 -6 -9.75 0 0 0 1 9 -12 -24 0 0 0 1 9 -5 -10 0 0 0 1 9 -30 0 12 1 0 1 9 -12 -18 0 0 0 1 9 -9 -9 0 1 9 1 9 -5 -5 0 1 -5 1 9 -5 -9.38 0 0 0 1 9 -10 -12.5 0 0 0 1 9 -10 -18.75 0 0 0 1 9 -5 -2.5 0 1 -2.5 1 9 -9 -14.63 0 0 0 1 9 -28 0 13 1 0 1 9 -5 -6.88 0 1 -6.88 1 9 -4 -3.5 0 1 -3.5 1 9 -12 -16.5 0 0 0 1 9 -5 -8.13 0 0 0 1 9 -9 -16.88 0 0 0 1 9 -9 -3.38 0 1 -3.38 1 9 -12 0 5 1 0 1 9 -4 -8 0 0 0 1 9 -8 -12 0 0 0 1 9 -8 -4 0 1 -4 1 9 -2 -4 0 1 -4 1 9 -12 -9 0 1 -9 1 9 -4 -1.5 0 1 4 1 9 -6 -10.5 0 0 0 1 9 -5 -3.13 0 1 5 1 9 -10 -15 0 0 0 1 9 -23 0 10 1 0 1 9 -12 -7.5 0 1 -7.5 1 9 -2 -0.5 0 1 -0.5 1 9 -4 0 2 1 4 1 9 -6 -1.5 0 1 -1.5 1 9 -4 -1 0 1 4 1 9 -10 -20 0 0 0 1 9 -12 -22.5 0 0 0 1 9 -25 0 9 1 0 1 9 -13 0 5 1 13 1 9 -6 -5.25 0 1 6 1 9 -9 -13.5 0 0 0 1 9 -5 0 2 1 0 1 9 -12 -6 0 1 -6 1 9 -5 -6.25 0 1 5 1 9 -10 -3.75 0 1 10 1 9 -9 -7.88 0 1 9 1 9 -8 -6 0 1 -6 1 9 -4 -5.5 0 1 -5.5 1 9 -19 0 8 1 19 1 9 -10 -7.5 0 1 -7.5 1 9 -4 -6 0 1 4 1 9 -8 -14 0 0 0 1 9 -8 0 3 1 8 1 9 -12 -21 0 0 0 1 9 -4 -2 0 1 -2 1 9 -4 -5 0 1 4 1 9 -6 -7.5 0 0 0 1 9 -12 -12 0 0 0 1 9 -8 -5 0 1 -5 1 9 -26 0 12 1 0 1 9 -8 -10 0 0 0 1 9 -5 -5.63 0 0 0 1 9 -9 -10.13 0 1 -10.13 1 10 -12 -10.5 0 1 -10.5 1 10 -25 0 10 1 25 1 10 -4 -7 0 1 -7 1 10 -9 -7.88 0 1 9 1 10 -5 -3.13 0 1 5 1 10 -5 -8.13 0 0 0 1 10 -8 -7 0 1 8 1 10 -12 -6 0 1 -6 1 10 -12 -24 0 0 0 1 10 -12 -21 0 0 0 1 10 -4 -2.5 0 1 4 1 10 -6 -9 0 0 0 1 10 -10 -15 0 0 0 1 10 -8 -6 0 1 -6 1 10 -13 0 6 1 0 1 10 -6 -12 0 1 -12 1 10 -6 -4.5 0 1 -4.5 1 10 -9 -16.88 0 1 9 1 10 -10 -18.75 0 1 10 1 10 -9 -3.38 0 1 -3.38 1 10 -6 -9.75 0 1 -9.75 1 10 -2 -1.75 0 1 2 1 10 -5 0 2 1 0 1 10 -8 -5 0 1 -5 1 10 -8 -9 0 1 -9 1 10 -12 0 6 1 0 1 10 -12 0 4 1 12 1 10 -2 -2.5 0 1 2 1 10 -6 -3 0 1 -3 1 10 -10 -7.5 0 1 -7.5 1 10 -5 -2.5 0 1 -2.5 1 10 -5 -3.75 0 1 5 1 10 -10 -3.75 0 1 10 1 10 -2 -3 0 1 -3 1 10 -10 -6.25 0 1 -6.25 1 10 -4 -7.5 0 1 4 1 10 -8 -16 0 1 8 1 10 -5 -6.25 0 1 5 1 10 -4 0 2 1 4 1 10 -10 -11.25 0 1 10 1 10 -5 -6.88 0 1 -6.88 1 10 -5 -7.5 0 1 5 1 10 -26 0 12 0 12 1 10 -8 -13 0 0 0 1 10 -4 -4.5 0 1 4 1 10 -8 -10 0 1 -10 1 10 -6 -3.75 0 1 6 1 10 -5 -5.63 0 1 -5.63 1 10 -9 -18 0 1 9 1 10 -12 -13.5 0 1 12 1 10 -7 0 3 1 0 1 10 -8 -14 0 1 8 1 10 -2 -1.5 0 1 -1.5 1 10 -10 -2.5 0 1 -2.5 1 10 -13 0 5 0 5 1 10 -9 -15.75 0 1 9 1 10 -8 -12 0 1 8 1 10 -28 0 13 0 13 1 10 -6 -7.5 0 1 -7.5 1 10 -10 -16.25 0 1 -16.25 1 10 -12 -7.5 0 1 -7.5 1 10 -5 -5 0 1 -5 1 10 -2 -2 0 1 2 1 10 -22 0 10 1 22 1 10 -2 -1 0 1 2 1 10 -3 0 1 1 3 1 10 -4 -5.5 0 1 -5.5 1 10 -6 -2.25 0 1 6 1 10 -4 -6.5 0 1 -6.5 1 10 -9 -12.38 0 1 9 1 10 -10 -13.75 0 1 -13.75 1 10 -10 -17.5 0 1 10 1 10 -4 -5 0 1 4 1 10 -9 -11.25 0 1 9 1 10 -10 -10 0 1 10 1 10 -2 -3.25 0 1 2 1 10 -5 -8.75 0 1 5 1 10 -5 -10 0 1 5 1 10 -9 -2.25 0 1 -2.25 1 10 -12 -16.5 0 0 0 1 10 -9 -14.63 0 0 0 1 10 -4 -8 0 1 -8 1 10 -6 -5.25 0 1 6 1 10 -9 -6.75 0 1 -6.75 1 10 -12 -12 0 1 -12 1 10 -4 -1 0 1 4 1 10 -12 -15 0 0 0 1 10 -4 -3.5 0 1 -3.5 1 10 -2 -1.25 0 1 -1.25 1 10 -30 0 12 0 12 1 10 -12 -19.5 0 0 0 1 10 -12 -3 0 1 12 1 10 -5 -1.25 0 1 5 1 10 -5 -1.88 0 1 -1.88 1 10 -2 -3.5 0 1 -3.5 1 10 -12 -9 0 1 -9 1 10 -10 -20 0 0 0 1 10 -8 -4 0 1 -4 1 10 -12 0 5 1 0 1 10 -2 0 1 1 2 1 10 -4 -1.5 0 1 4 1 10 -2 -3.75 0 1 -3.75 1 10 -6 -10.5 0 1 -10.5 1 10 -4 -2 0 1 -2 1 10 -23 0 10 1 0 1 10 -12 -18 0 0 0 1 10 -6 -8.25 0 1 -8.25 1 10 -26 0 10 1 26 1 10 -10 -8.75 0 1 -8.75 1 10 -2 -0.75 0 1 2 1 10 -5 -9.38 0 1 -9.38 1 10 -25 0 9 1 0 1 10 -9 -4.5 0 1 9 1 10 -10 -5 0 1 10 1 10 -2 -4 0 1 -4 1 10 -2 -2.75 0 1 -2.75 1 10 -4 -6 0 1 4 1 10 -10 -12.5 0 1 -12.5 1 10 -12 -22.5 0 1 12 1 10 -4 -4 0 1 4 1 10 -2 -0.5 0 1 -0.5 1 10 -8 -2 0 1 -2 1 10 -4 -3 0 1 4 1 10 -6 -11.25 0 1 6 1 10 -8 -15 0 1 -15 1 10 -8 -11 0 1 8 1 10 -12 -4.5 0 1 -4.5 1 10 -19 0 8 1 19 1 10 -6 -6 0 1 -6 1 10 -5 -4.38 0 1 -4.38 1 10 -9 -9 0 1 9 1 10 -6 -1.5 0 1 -1.5 1 10 -9 -13.5 0 1 -13.5 1 10 -9 -5.63 0 1 -5.63 1 10 -8 -8 0 1 -8 1 10 -8 0 3 1 8 1 10 -8 -3 0 1 8 1 10 -9 -11.25 0 0 0 1 11 -8 -16 0 0 0 1 11 -9 -5.63 0 1 -5.63 1 11 -9 -4.5 0 1 9 1 11 -2 -2 0 1 2 1 11 -12 -19.5 0 0 0 1 11 -4 -4.5 0 0 0 1 11 -2 -3.75 0 0 0 1 11 -2 -2.25 0 0 0 1 11 -12 -4.5 0 1 -4.5 1 11 -9 -10.13 0 0 0 1 11 -12 -3 0 1 12 1 11 -10 -17.5 0 0 0 1 11 -5 -4.38 0 1 -4.38 1 11 -5 -7.5 0 0 0 1 11 -6 -11.25 0 0 0 1 11 -2 -1.5 0 1 -1.5 1 11 -9 -6.75 0 1 -6.75 1 11 -4 -7 0 0 0 1 11 -8 -7 0 1 8 1 11 -2 -1.75 0 1 2 1 11 -2 -1 0 1 2 1 11 -10 -6.25 0 1 -6.25 1 11 -6 -6.75 0 1 6 1 11 -9 -2.25 0 1 -2.25 1 11 -2 -0.75 0 1 2 1 11 -12 0 4 1 12 1 11 -6 -3 0 1 -3 1 11 -3 0 1 0 1 1 11 -2 -3 0 1 -3 1 11 -10 -13.75 0 0 0 1 11 -6 -2.25 0 1 6 1 11 -5 -1.88 0 1 -1.88 1 11 -12 -13.5 0 1 12 1 11 -22 0 10 1 22 1 11 -9 -12.38 0 0 0 1 11 -26 0 10 0 10 1 11 -10 -2.5 0 1 -2.5 1 11 -25 0 10 1 25 1 11 -9 -15.75 0 0 0 1 11 -7 0 3 0 3 1 11 -10 -10 0 1 10 1 11 -12 -15 0 0 0 1 11 -12 0 6 0 6 1 11 -6 -4.5 0 1 -4.5 1 11 -8 -13 0 0 0 1 11 -10 -16.25 0 0 0 1 11 -5 -1.25 0 1 5 1 11 -4 -4 0 1 4 1 11 -5 -3.75 0 1 5 1 11 -6 -8.25 0 0 0 1 11 -8 -15 0 0 0 1 11 -8 -8 0 0 0 1 11 -2 -2.75 0 0 0 1 11 -6 -12 0 0 0 1 11 -2 0 1 0 1 1 11 -2 -1.25 0 0 0 1 11 -9 -18 0 0 0 1 11 -6 -9 0 0 0 1 11 -10 -8.75 0 1 -8.75 1 11 -4 -7.5 0 0 0 1 11 -13 0 6 1 0 1 11 -10 -11.25 0 0 0 1 11 -4 -3 0 1 4 1 11 -10 -5 0 1 10 1 11 -8 -2 0 1 -2 1 11 -4 -2.5 0 1 4 1 11 -2 -3.5 0 0 0 1 11 -2 -2.5 0 0 0 1 11 -6 -3.75 0 1 6 1 11 -8 -3 0 1 8 1 11 -2 -3.25 0 0 0 1 11 -8 -9 0 0 0 1 11 -6 -6 0 1 -6 1 11 -8 -11 0 0 0 1 11 -5 -8.75 0 0 0 1 11 -6 -9.75 0 0 0 1 11 -12 -24 0 0 0 1 11 -4 -6.5 0 0 0 1 11 -5 -10 0 0 0 1 11 -30 0 12 1 0 1 11 -12 -18 0 0 0 1 11 -9 -9 0 0 0 1 11 -5 -5 0 1 -5 1 11 -5 -9.38 0 0 0 1 11 -10 -12.5 0 0 0 1 11 -10 -18.75 0 0 0 1 11 -5 -2.5 0 1 -2.5 1 11 -9 -14.63 0 0 0 1 11 -28 0 13 0 13 1 11 -5 -6.88 0 0 0 1 11 -4 -3.5 0 1 -3.5 1 11 -12 -16.5 0 0 0 1 11 -5 -8.13 0 0 0 1 11 -9 -16.88 0 0 0 1 11 -9 -3.38 0 1 -3.38 1 11 -12 0 5 0 5 1 11 -4 -8 0 0 0 1 11 -8 -12 0 0 0 1 11 -8 -4 0 1 -4 1 11 -2 -4 0 0 0 1 11 -12 -9 0 1 -9 1 11 -4 -1.5 0 1 4 1 11 -6 -10.5 0 0 0 1 11 -5 -3.13 0 1 5 1 11 -10 -15 0 0 0 1 11 -23 0 10 0 10 1 11 -12 -7.5 0 1 -7.5 1 11 -2 -0.5 0 1 -0.5 1 11 -4 0 2 0 2 1 11 -6 -1.5 0 1 -1.5 1 11 -4 -1 0 1 4 1 11 -10 -20 0 0 0 1 11 -12 -22.5 0 0 0 1 11 -25 0 9 1 0 1 11 -13 0 5 0 5 1 11 -6 -5.25 0 1 6 1 11 -9 -13.5 0 0 0 1 11 -5 0 2 1 0 1 11 -12 -6 0 1 -6 1 11 -5 -6.25 0 0 0 1 11 -10 -3.75 0 1 10 1 11 -9 -7.88 0 1 9 1 11 -8 -6 0 1 -6 1 11 -4 -5.5 0 0 0 1 11 -19 0 8 1 19 1 11 -10 -7.5 0 1 -7.5 1 11 -4 -6 0 0 0 1 11 -8 -14 0 0 0 1 11 -8 0 3 1 8 1 11 -12 -21 0 0 0 1 11 -4 -2 0 1 -2 1 11 -4 -5 0 0 0 1 11 -6 -7.5 0 0 0 1 11 -12 -12 0 0 0 1 11 -8 -5 0 1 -5 1 11 -26 0 12 0 12 1 11 -8 -10 0 0 0 1 11 -5 -5.63 0 0 0 1 11 -2 -1 0 1 2 1 12 -9 -6.75 0 1 -6.75 1 12 -2 -4 0 0 0 1 12 -2 -3.25 0 0 0 1 12 -4 -6.5 0 0 0 1 12 -5 -5.63 0 0 0 1 12 -8 -8 0 1 -8 1 12 -12 -18 0 1 12 1 12 -2 -2.5 0 0 0 1 12 -3 0 1 0 1 1 12 -12 -16.5 0 1 12 1 12 -10 -12.5 0 0 0 1 12 -5 -1.25 0 1 5 1 12 -19 0 8 0 8 1 12 -8 -9 0 0 0 1 12 -5 -10 0 0 0 1 12 -25 0 10 0 10 1 12 -7 0 3 1 0 1 12 -6 -11.25 0 0 0 1 12 -6 -1.5 0 1 -1.5 1 12 -4 -1.5 0 1 4 1 12 -10 -5 0 1 10 1 12 -10 -3.75 0 0 0 1 12 -6 -4.5 0 0 0 1 12 -12 -19.5 0 0 0 1 12 -5 -4.38 0 0 0 1 12 -8 -11 0 0 0 1 12 -2 -0.75 0 1 2 1 12 -2 -1.5 0 1 -1.5 1 12 -6 -6.75 0 1 6 1 12 -4 -6 0 1 4 1 12 -10 -16.25 0 1 -16.25 1 12 -12 -15 0 1 -15 1 12 -6 -5.25 0 1 6 1 12 -12 -21 0 0 0 1 12 -4 -3 0 0 0 1 12 -12 -22.5 0 1 12 1 12 -2 -3.75 0 0 0 1 12 -6 -12 0 0 0 1 12 -5 -8.13 0 1 5 1 12 -10 -8.75 0 1 -8.75 1 12 -12 -6 0 1 -6 1 12 -5 -5 0 1 -5 1 12 -22 0 10 0 10 1 12 -12 -13.5 0 1 12 1 12 -8 -7 0 1 8 1 12 -4 -3.5 0 0 0 1 12 -9 -12.38 0 0 0 1 12 -10 -7.5 0 1 -7.5 1 12 -26 0 10 1 26 1 12 -12 -4.5 0 1 -4.5 1 12 -8 -15 0 1 -15 1 12 -2 -1.75 0 1 2 1 12 -12 0 6 0 6 1 12 -9 -3.38 0 1 -3.38 1 12 -2 -3 0 0 0 1 12 -9 -5.63 0 1 -5.63 1 12 -2 -3.5 0 0 0 1 12 -8 -12 0 1 8 1 12 -10 -18.75 0 1 10 1 12 -4 0 2 0 2 1 12 -2 -2.25 0 0 0 1 12 -9 -2.25 0 1 -2.25 1 12 -10 -13.75 0 1 -13.75 1 12 -28 0 13 1 0 1 12 -4 -2.5 0 1 4 1 12 -9 -15.75 0 1 9 1 12 -10 -15 0 0 0 1 12 -10 -10 0 1 10 1 12 -9 -18 0 0 0 1 12 -12 -24 0 1 -24 1 12 -13 0 5 1 13 1 12 -5 -1.88 0 1 -1.88 1 12 -4 -4.5 0 0 0 1 12 -9 -7.88 0 1 9 1 12 -9 -9 0 0 0 1 12 -25 0 9 0 9 1 12 -12 -12 0 0 0 1 12 -6 -2.25 0 1 6 1 12 -8 -5 0 0 0 1 12 -4 -5.5 0 0 0 1 12 -2 -1.25 0 0 0 1 12 -9 -13.5 0 0 0 1 12 -9 -4.5 0 1 9 1 12 -10 -11.25 0 0 0 1 12 -6 -3 0 1 -3 1 12 -10 -2.5 0 1 -2.5 1 12 -12 0 4 1 12 1 12 -10 -20 0 0 0 1 12 -5 -3.75 0 0 0 1 12 -9 -10.13 0 0 0 1 12 -4 -7 0 0 0 1 12 -12 -10.5 0 1 -10.5 1 12 -8 -16 0 1 8 1 12 -4 -7.5 0 0 0 1 12 -8 0 3 1 8 1 12 -6 -10.5 0 0 0 1 12 -6 -9.75 0 0 0 1 12 -5 -8.75 0 0 0 1 12 -5 -2.5 0 1 -2.5 1 12 -13 0 6 0 6 1 12 -23 0 10 0 10 1 12 -8 -4 0 1 -4 1 12 -9 -11.25 0 0 0 1 12 -5 -6.88 0 0 0 1 12 -4 -4 0 0 0 1 12 -10 -17.5 0 0 0 1 12 -8 -13 0 0 0 1 12 -26 0 12 0 12 1 12 -6 -8.25 0 0 0 1 12 -9 -14.63 0 0 0 1 12 -8 -2 0 1 -2 1 12 -10 -6.25 0 1 -6.25 1 12 -8 -14 0 0 0 1 12 -12 0 5 1 0 1 12 -8 -10 0 0 0 1 12 -30 0 12 1 0 1 12 -5 -7.5 0 0 0 1 12 -5 0 2 1 0 1 12 -6 -3.75 0 1 6 1 12 -6 -6 0 0 0 1 12 -4 -2 0 1 -2 1 12 -12 -7.5 0 1 -7.5 1 12 -5 -6.25 0 1 5 1 12 -4 -5 0 0 0 1 12 -2 -2.75 0 0 0 1 12 -2 -2 0 0 0 1 12 -6 -9 0 0 0 1 12 -5 -3.13 0 1 5 1 12 -12 -9 0 0 0 1 12 -4 -8 0 0 0 1 12 -4 -1 0 1 4 1 12 -2 0 1 0 1 1 12 -9 -16.88 0 1 9 1 12 -8 -6 0 0 0 1 12 -2 -0.5 0 1 -0.5 1 12 -6 -7.5 0 0 0 1 12 -8 -3 0 1 8 1 12 -12 -3 0 1 12 1 12 -5 -9.38 0 0 0 1 12 -6 -9.75 0 0 0 1 13 -12 -13.5 0 1 12 1 13 -8 -7 0 1 8 1 13 -10 -7.5 0 1 -7.5 1 13 -2 -2.25 0 0 0 1 13 -6 -8.25 0 0 0 1 13 -10 -16.25 0 0 0 1 13 -3 0 1 1 3 1 13 -4 -3 0 1 4 1 13 -8 -2 0 1 -2 1 13 -4 -2.5 0 1 4 1 13 -5 -5.63 0 0 0 1 13 -5 0 2 1 0 1 13 -30 0 12 0 12 1 13 -9 -4.5 0 1 9 1 13 -4 -7.5 0 0 0 1 13 -26 0 10 0 10 1 13 -10 -6.25 0 1 -6.25 1 13 -2 -4 0 0 0 1 13 -4 -5 0 0 0 1 13 -5 -1.88 0 1 -1.88 1 13 -23 0 10 0 10 1 13 -8 -3 0 1 8 1 13 -8 -12 0 0 0 1 13 -10 -2.5 0 1 -2.5 1 13 -5 -8.13 0 0 0 1 13 -8 -9 0 0 0 1 13 -2 -3 0 0 0 1 13 -9 -11.25 0 0 0 1 13 -9 -12.38 0 0 0 1 13 -12 -15 0 0 0 1 13 -8 -10 0 0 0 1 13 -4 -1 0 1 4 1 13 -8 0 3 0 3 1 13 -4 -3.5 0 0 0 1 13 -8 -8 0 1 -8 1 13 -10 -11.25 0 0 0 1 13 -10 -5 0 1 10 1 13 -9 -13.5 0 0 0 1 13 -2 -0.75 0 1 2 1 13 -5 -4.38 0 1 -4.38 1 13 -2 -1.5 0 1 -1.5 1 13 -2 -3.75 0 0 0 1 13 -5 -3.75 0 1 5 1 13 -9 -16.88 0 0 0 1 13 -9 -3.38 0 1 -3.38 1 13 -5 -10 0 0 0 1 13 -26 0 12 0 12 1 13 -5 -9.38 0 0 0 1 13 -6 -1.5 0 1 -1.5 1 13 -10 -10 0 0 0 1 13 -2 -1.25 0 0 0 1 13 -9 -14.63 0 0 0 1 13 -6 -4.5 0 1 -4.5 1 13 -5 -5 0 1 -5 1 13 -5 -7.5 0 0 0 1 13 -8 -13 0 0 0 1 13 -5 -3.13 0 0 0 1 13 -8 -5 0 0 0 1 13 -8 -11 0 0 0 1 13 -6 -6.75 0 0 0 1 13 -5 -8.75 0 0 0 1 13 -2 0 1 0 1 1 13 -6 -6 0 0 0 1 13 -4 -5.5 0 0 0 1 13 -6 -3 0 1 -3 1 13 -12 -19.5 0 0 0 1 13 -10 -13.75 0 0 0 1 13 -10 -8.75 0 0 0 1 13 -5 -6.88 0 0 0 1 13 -6 -7.5 0 0 0 1 13 -10 -12.5 0 0 0 1 13 -9 -6.75 0 1 -6.75 1 13 -8 -4 0 1 -4 1 13 -2 -1 0 1 2 1 13 -12 -24 0 0 0 1 13 -12 -6 0 1 -6 1 13 -2 -2 0 0 0 1 13 -4 -7 0 0 0 1 13 -12 -9 0 1 -9 1 13 -6 -11.25 0 0 0 1 13 -25 0 10 0 10 1 13 -28 0 13 0 13 1 13 -2 -2.75 0 0 0 1 13 -12 -10.5 0 0 0 1 13 -8 -14 0 0 0 1 13 -4 -6.5 0 0 0 1 13 -4 0 2 0 2 1 13 -10 -15 0 0 0 1 13 -12 0 5 0 5 1 13 -10 -18.75 0 0 0 1 13 -12 -3 0 1 12 1 13 -4 -4 0 0 0 1 13 -9 -7.88 0 0 0 1 13 -9 -2.25 0 0 0 1 13 -2 -1.75 0 0 0 1 13 -12 0 6 0 6 1 13 -5 -2.5 0 0 0 1 13 -4 -4.5 0 0 0 1 13 -8 -6 0 0 0 1 13 -12 -18 0 0 0 1 13 -12 -16.5 0 0 0 1 13 -22 0 10 0 10 1 13 -12 -21 0 0 0 1 13 -12 -4.5 0 0 0 1 13 -12 -12 0 0 0 1 13 -19 0 8 0 8 1 13 -2 -2.5 0 0 0 1 13 -12 0 4 0 4 1 13 -4 -2 0 0 0 1 13 -9 -9 0 0 0 1 13 -9 -10.13 0 0 0 1 13 -6 -2.25 0 1 6 1 13 -2 -0.5 0 1 -0.5 1 13 -10 -3.75 0 1 10 1 13 -13 0 5 0 5 1 13 -4 -1.5 0 1 4 1 13 -5 -1.25 0 1 5 1 13 -6 -9 0 0 0 1 13 -10 -17.5 0 0 0 1 13 -6 -12 0 0 0 1 13 -6 -5.25 0 0 0 1 13 -12 -22.5 0 0 0 1 13 -8 -16 0 0 0 1 13 -9 -15.75 0 0 0 1 13 -10 -20 0 0 0 1 13 -13 0 6 0 6 1 13 -4 -8 0 0 0 1 13 -12 -7.5 0 0 0 1 13 -9 -18 0 0 0 1 13 -2 -3.25 0 0 0 1 13 -7 0 3 0 3 1 13 -6 -3.75 0 1 6 1 13 -5 -6.25 0 0 0 1 13 -8 -15 0 0 0 1 13 -25 0 9 0 9 1 13 -2 -3.5 0 0 0 1 13 -6 -10.5 0 0 0 1 13 -9 -11.25 0 0 0 1 14 -8 -16 0 0 0 1 14 -9 -5.63 0 1 -5.63 1 14 -9 -4.5 0 1 9 1 14 -2 -2 0 1 2 1 14 -12 -19.5 0 0 0 1 14 -4 -4.5 0 0 0 1 14 -2 -3.75 0 0 0 1 14 -2 -2.25 0 0 0 1 14 -12 -4.5 0 1 -4.5 1 14 -9 -10.13 0 0 0 1 14 -12 -3 0 1 12 1 14 -10 -17.5 0 0 0 1 14 -5 -4.38 0 1 -4.38 1 14 -5 -7.5 0 1 5 1 14 -6 -11.25 0 0 0 1 14 -2 -1.5 0 1 -1.5 1 14 -9 -6.75 0 1 -6.75 1 14 -4 -7 0 0 0 1 14 -8 -7 0 1 8 1 14 -2 -1.75 0 1 2 1 14 -2 -1 0 1 2 1 14 -10 -6.25 0 1 -6.25 1 14 -6 -6.75 0 1 6 1 14 -9 -2.25 0 1 -2.25 1 14 -2 -0.75 0 1 2 1 14 -12 0 4 1 12 1 14 -6 -3 0 1 -3 1 14 -3 0 1 1 3 1 14 -2 -3 0 0 0 1 14 -10 -13.75 0 1 -13.75 1 14 -6 -2.25 0 1 6 1 14 -5 -1.88 0 1 -1.88 1 14 -12 -13.5 0 1 12 1 14 -22 0 10 0 10 1 14 -9 -12.38 0 1 9 1 14 -26 0 10 1 26 1 14 -12 -10.5 0 1 -10.5 1 14 -10 -2.5 0 1 -2.5 1 14 -25 0 10 0 10 1 14 -9 -15.75 0 1 9 1 14 -7 0 3 1 0 1 14 -10 -10 0 1 10 1 14 -12 -15 0 0 0 1 14 -12 0 6 1 0 1 14 -6 -4.5 0 1 -4.5 1 14 -8 -13 0 0 0 1 14 -10 -16.25 0 0 0 1 14 -5 -1.25 0 1 5 1 14 -4 -4 0 1 4 1 14 -5 -3.75 0 1 5 1 14 -6 -8.25 0 1 -8.25 1 14 -8 -15 0 1 -15 1 14 -8 -8 0 1 -8 1 14 -2 -2.75 0 0 0 1 14 -6 -12 0 1 -12 1 14 -2 0 1 1 2 1 14 -2 -1.25 0 1 -1.25 1 14 -9 -18 0 1 9 1 14 -6 -9 0 0 0 1 14 -10 -8.75 0 1 -8.75 1 14 -4 -7.5 0 0 0 1 14 -13 0 6 1 0 1 14 -10 -11.25 0 1 10 1 14 -4 -3 0 0 0 1 14 -10 -5 0 1 10 1 14 -8 -2 0 1 -2 1 14 -4 -2.5 0 0 0 1 14 -2 -3.5 0 0 0 1 14 -2 -2.5 0 1 2 1 14 -6 -3.75 0 1 6 1 14 -8 -3 0 1 8 1 14 -2 -3.25 0 0 0 1 14 -8 -9 0 1 -9 1 14 -6 -6 0 1 -6 1 14 -8 -11 0 1 8 1 14 -5 -8.75 0 0 0 1 14 -6 -9.75 0 0 0 1 14 -12 -24 0 1 -24 1 14 -4 -6.5 0 1 -6.5 1 14 -5 -10 0 1 5 1 14 -30 0 12 1 0 1 14 -12 -18 0 1 12 1 14 -9 -9 0 0 0 1 14 -5 -5 0 1 -5 1 14 -5 -9.38 0 1 -9.38 1 14 -10 -12.5 0 1 -12.5 1 14 -10 -18.75 0 0 0 1 14 -5 -2.5 0 1 -2.5 1 14 -9 -14.63 0 1 9 1 14 -28 0 13 1 0 1 14 -5 -6.88 0 1 -6.88 1 14 -4 -3.5 0 1 -3.5 1 14 -12 -16.5 0 1 12 1 14 -5 -8.13 0 0 0 1 14 -9 -16.88 0 0 0 1 14 -9 -3.38 0 1 -3.38 1 14 -12 0 5 1 0 1 14 -4 -8 0 0 0 1 14 -8 -12 0 1 8 1 14 -8 -4 0 1 -4 1 14 -2 -4 0 0 0 1 14 -12 -9 0 1 -9 1 14 -4 -1.5 0 1 4 1 14 -6 -10.5 0 0 0 1 14 -5 -3.13 0 1 5 1 14 -10 -15 0 1 10 1 14 -23 0 10 0 10 1 14 -12 -7.5 0 1 -7.5 1 14 -2 -0.5 0 1 -0.5 1 14 -4 0 2 0 2 1 14 -6 -1.5 0 1 -1.5 1 14 -4 -1 0 1 4 1 14 -10 -20 0 0 0 1 14 -12 -22.5 0 1 12 1 14 -25 0 9 0 9 1 14 -13 0 5 1 13 1 14 -6 -5.25 0 1 6 1 14 -9 -13.5 0 0 0 1 14 -5 0 2 0 2 1 14 -12 -6 0 1 -6 1 14 -5 -6.25 0 1 5 1 14 -10 -3.75 0 1 10 1 14 -9 -7.88 0 0 0 1 14 -8 -6 0 1 -6 1 14 -4 -5.5 0 0 0 1 14 -19 0 8 0 8 1 14 -10 -7.5 0 1 -7.5 1 14 -4 -6 0 0 0 1 14 -8 -14 0 0 0 1 14 -8 0 3 0 3 1 14 -12 -21 0 0 0 1 14 -4 -2 0 1 -2 1 14 -4 -5 0 1 4 1 14 -6 -7.5 0 1 -7.5 1 14 -12 -12 0 1 -12 1 14 -8 -5 0 1 -5 1 14 -26 0 12 0 12 1 14 -8 -10 0 1 -10 1 14 -5 -5.63 0 1 -5.63 1 14 -2 -1 0 1 2 1 15 -9 -6.75 0 1 -6.75 1 15 -2 -4 0 0 0 1 15 -2 -3.25 0 1 2 1 15 -4 -6.5 0 1 -6.5 1 15 -5 -5.63 0 1 -5.63 1 15 -8 -8 0 1 -8 1 15 -12 -18 0 1 12 1 15 -2 -2.5 0 1 2 1 15 -3 0 1 0 1 1 15 -12 -16.5 0 1 12 1 15 -10 -12.5 0 1 -12.5 1 15 -5 -1.25 0 1 5 1 15 -19 0 8 0 8 1 15 -8 -9 0 1 -9 1 15 -5 -10 0 1 5 1 15 -25 0 10 1 25 1 15 -7 0 3 0 3 1 15 -6 -11.25 0 0 0 1 15 -6 -1.5 0 1 -1.5 1 15 -4 -1.5 0 1 4 1 15 -10 -5 0 1 10 1 15 -10 -3.75 0 1 10 1 15 -6 -4.5 0 1 -4.5 1 15 -12 -19.5 0 1 12 1 15 -5 -4.38 0 1 -4.38 1 15 -8 -11 0 0 0 1 15 -2 -0.75 0 1 2 1 15 -2 -1.5 0 1 -1.5 1 15 -6 -6.75 0 1 6 1 15 -4 -6 0 1 4 1 15 -10 -16.25 0 1 -16.25 1 15 -12 -15 0 1 -15 1 15 -6 -5.25 0 1 6 1 15 -12 -21 0 1 12 1 15 -4 -3 0 1 4 1 15 -12 -22.5 0 0 0 1 15 -2 -3.75 0 1 -3.75 1 15 -6 -12 0 0 0 1 15 -5 -8.13 0 1 5 1 15 -10 -8.75 0 1 -8.75 1 15 -12 -6 0 1 -6 1 15 -5 -5 0 1 -5 1 15 -22 0 10 1 22 1 15 -12 -13.5 0 1 12 1 15 -8 -7 0 0 0 1 15 -4 -3.5 0 1 -3.5 1 15 -9 -12.38 0 1 9 1 15 -10 -7.5 0 1 -7.5 1 15 -26 0 10 0 10 1 15 -12 -4.5 0 1 -4.5 1 15 -8 -15 0 1 -15 1 15 -2 -1.75 0 1 2 1 15 -12 0 6 1 0 1 15 -9 -3.38 0 1 -3.38 1 15 -2 -3 0 1 -3 1 15 -9 -5.63 0 1 -5.63 1 15 -2 -3.5 0 1 -3.5 1 15 -8 -12 0 1 8 1 15 -10 -18.75 0 1 10 1 15 -4 0 2 1 4 1 15 -2 -2.25 0 1 2 1 15 -9 -2.25 0 1 -2.25 1 15 -10 -13.75 0 1 -13.75 1 15 -28 0 13 1 0 1 15 -4 -2.5 0 1 4 1 15 -9 -15.75 0 0 0 1 15 -10 -15 0 0 0 1 15 -10 -10 0 1 10 1 15 -9 -18 0 0 0 1 15 -12 -24 0 0 0 1 15 -13 0 5 0 5 1 15 -5 -1.88 0 1 -1.88 1 15 -4 -4.5 0 1 4 1 15 -9 -7.88 0 1 9 1 15 -9 -9 0 1 9 1 15 -25 0 9 1 0 1 15 -12 -12 0 1 -12 1 15 -6 -2.25 0 1 6 1 15 -8 -5 0 1 -5 1 15 -4 -5.5 0 1 -5.5 1 15 -2 -1.25 0 1 -1.25 1 15 -9 -13.5 0 1 -13.5 1 15 -9 -4.5 0 1 9 1 15 -10 -11.25 0 1 10 1 15 -6 -3 0 1 -3 1 15 -10 -2.5 0 1 -2.5 1 15 -12 0 4 1 12 1 15 -10 -20 0 0 0 1 15 -5 -3.75 0 1 5 1 15 -9 -10.13 0 1 -10.13 1 15 -4 -7 0 1 -7 1 15 -12 -10.5 0 1 -10.5 1 15 -8 -16 0 1 8 1 15 -4 -7.5 0 1 4 1 15 -8 0 3 0 3 1 15 -6 -10.5 0 0 0 1 15 -6 -9.75 0 1 -9.75 1 15 -5 -8.75 0 1 5 1 15 -5 -2.5 0 1 -2.5 1 15 -13 0 6 0 6 1 15 -23 0 10 1 0 1 15 -8 -4 0 1 -4 1 15 -9 -11.25 0 1 9 1 15 -5 -6.88 0 1 -6.88 1 15 -4 -4 0 1 4 1 15 -10 -17.5 0 0 0 1 15 -8 -13 0 1 8 1 15 -26 0 12 0 12 1 15 -6 -8.25 0 0 0 1 15 -9 -14.63 0 0 0 1 15 -8 -2 0 1 -2 1 15 -10 -6.25 0 1 -6.25 1 15 -8 -14 0 1 8 1 15 -12 0 5 1 0 1 15 -8 -10 0 1 -10 1 15 -30 0 12 1 0 1 15 -5 -7.5 0 1 5 1 15 -5 0 2 1 0 1 15 -6 -3.75 0 1 6 1 15 -6 -6 0 1 -6 1 15 -4 -2 0 1 -2 1 15 -12 -7.5 0 1 -7.5 1 15 -5 -6.25 0 1 5 1 15 -4 -5 0 1 4 1 15 -2 -2.75 0 1 -2.75 1 15 -2 -2 0 1 2 1 15 -6 -9 0 1 -9 1 15 -5 -3.13 0 1 5 1 15 -12 -9 0 1 -9 1 15 -4 -8 0 1 -8 1 15 -4 -1 0 1 4 1 15 -2 0 1 0 1 1 15 -9 -16.88 0 1 9 1 15 -8 -6 0 1 -6 1 15 -2 -0.5 0 1 -0.5 1 15 -6 -7.5 0 1 -7.5 1 15 -8 -3 0 1 8 1 15 -12 -3 0 1 12 1 15 -5 -9.38 0 0 0 1 15 -6 -9.75 0 0 0 1 16 -12 -13.5 0 1 12 1 16 -8 -7 0 0 0 1 16 -10 -7.5 0 1 -7.5 1 16 -2 -2.25 0 1 2 1 16 -6 -8.25 0 0 0 1 16 -10 -16.25 0 1 -16.25 1 16 -3 0 1 1 3 1 16 -4 -3 0 1 4 1 16 -8 -2 0 1 -2 1 16 -4 -2.5 0 1 4 1 16 -5 -5.63 0 0 0 1 16 -5 0 2 0 2 1 16 -30 0 12 1 0 1 16 -9 -4.5 0 1 9 1 16 -4 -7.5 0 0 0 1 16 -26 0 10 1 26 1 16 -10 -6.25 0 1 -6.25 1 16 -2 -4 0 1 -4 1 16 -4 -5 0 0 0 1 16 -5 -1.88 0 1 -1.88 1 16 -23 0 10 1 0 1 16 -8 -3 0 1 8 1 16 -8 -12 0 1 8 1 16 -10 -2.5 0 0 0 1 16 -5 -8.13 0 0 0 1 16 -8 -9 0 1 -9 1 16 -2 -3 0 1 -3 1 16 -9 -11.25 0 1 9 1 16 -9 -12.38 0 0 0 1 16 -12 -15 0 1 -15 1 16 -8 -10 0 0 0 1 16 -4 -1 0 1 4 1 16 -8 0 3 1 8 1 16 -4 -3.5 0 0 0 1 16 -8 -8 0 0 0 1 16 -10 -11.25 0 0 0 1 16 -10 -5 0 1 10 1 16 -9 -13.5 0 0 0 1 16 -2 -0.75 0 1 2 1 16 -5 -4.38 0 1 -4.38 1 16 -2 -1.5 0 1 -1.5 1 16 -2 -3.75 0 1 -3.75 1 16 -5 -3.75 0 1 5 1 16 -9 -16.88 0 0 0 1 16 -9 -3.38 0 1 -3.38 1 16 -5 -10 0 0 0 1 16 -26 0 12 1 0 1 16 -5 -9.38 0 0 0 1 16 -6 -1.5 0 1 -1.5 1 16 -10 -10 0 1 10 1 16 -2 -1.25 0 1 -1.25 1 16 -9 -14.63 0 0 0 1 16 -6 -4.5 0 1 -4.5 1 16 -5 -5 0 1 -5 1 16 -5 -7.5 0 0 0 1 16 -8 -13 0 0 0 1 16 -5 -3.13 0 1 5 1 16 -8 -5 0 1 -5 1 16 -8 -11 0 1 8 1 16 -6 -6.75 0 1 6 1 16 -5 -8.75 0 1 5 1 16 -2 0 1 1 2 1 16 -9 -5.63 0 1 -5.63 1 16 -6 -6 0 1 -6 1 16 -4 -5.5 0 0 0 1 16 -6 -3 0 1 -3 1 16 -12 -19.5 0 1 12 1 16 -10 -13.75 0 0 0 1 16 -10 -8.75 0 1 -8.75 1 16 -5 -6.88 0 1 -6.88 1 16 -6 -7.5 0 0 0 1 16 -10 -12.5 0 1 -12.5 1 16 -9 -6.75 0 1 -6.75 1 16 -4 -6 0 1 4 1 16 -8 -4 0 1 -4 1 16 -2 -1 0 1 2 1 16 -12 -24 0 0 0 1 16 -12 -6 0 1 -6 1 16 -2 -2 0 1 2 1 16 -4 -7 0 0 0 1 16 -12 -9 0 1 -9 1 16 -6 -11.25 0 0 0 1 16 -25 0 10 1 25 1 16 -28 0 13 1 0 1 16 -2 -2.75 0 1 -2.75 1 16 -12 -10.5 0 0 0 1 16 -8 -14 0 0 0 1 16 -4 -6.5 0 0 0 1 16 -4 0 2 1 4 1 16 -10 -15 0 0 0 1 16 -12 0 5 1 0 1 16 -10 -18.75 0 0 0 1 16 -12 -3 0 1 12 1 16 -4 -4 0 1 4 1 16 -9 -7.88 0 1 9 1 16 -9 -2.25 0 1 -2.25 1 16 -2 -1.75 0 1 2 1 16 -12 0 6 1 0 1 16 -5 -2.5 0 1 -2.5 1 16 -4 -4.5 0 1 4 1 16 -8 -6 0 1 -6 1 16 -12 -18 0 0 0 1 16 -12 -16.5 0 1 12 1 16 -22 0 10 0 10 1 16 -12 -21 0 0 0 1 16 -12 -4.5 0 1 -4.5 1 16 -12 -12 0 1 -12 1 16 -19 0 8 1 19 1 16 -2 -2.5 0 1 2 1 16 -12 0 4 1 12 1 16 -4 -2 0 1 -2 1 16 -9 -9 0 0 0 1 16 -9 -10.13 0 0 0 1 16 -6 -2.25 0 1 6 1 16 -2 -0.5 0 1 -0.5 1 16 -10 -3.75 0 1 10 1 16 -13 0 5 1 13 1 16 -4 -1.5 0 1 4 1 16 -5 -1.25 0 1 5 1 16 -6 -9 0 1 -9 1 16 -10 -17.5 0 0 0 1 16 -6 -12 0 0 0 1 16 -6 -5.25 0 1 6 1 16 -12 -22.5 0 1 12 1 16 -8 -16 0 0 0 1 16 -9 -15.75 0 1 9 1 16 -10 -20 0 1 10 1 16 -13 0 6 1 0 1 16 -4 -8 0 0 0 1 16 -12 -7.5 0 1 -7.5 1 16 -9 -18 0 0 0 1 16 -2 -3.25 0 1 2 1 16 -7 0 3 1 0 1 16 -6 -3.75 0 1 6 1 16 -5 -6.25 0 1 5 1 16 -8 -15 0 0 0 1 16 -25 0 9 1 0 1 16 -2 -3.5 0 1 -3.5 1 16 -6 -10.5 0 1 -10.5 1 16 -9 -10.13 0 0 0 1 17 -12 -10.5 0 0 0 1 17 -25 0 10 0 10 1 17 -4 -7 0 0 0 1 17 -9 -7.88 0 0 0 1 17 -5 -3.13 0 0 0 1 17 -5 -8.13 0 0 0 1 17 -8 -7 0 0 0 1 17 -12 -6 0 0 0 1 17 -12 -24 0 0 0 1 17 -12 -21 0 0 0 1 17 -4 -2.5 0 0 0 1 17 -6 -9 0 0 0 1 17 -10 -15 0 0 0 1 17 -8 -6 0 0 0 1 17 -13 0 6 1 0 1 17 -6 -12 0 0 0 1 17 -6 -4.5 0 0 0 1 17 -9 -16.88 0 0 0 1 17 -10 -18.75 0 0 0 1 17 -9 -3.38 0 1 -3.38 1 17 -6 -9.75 0 0 0 1 17 -2 -1.75 0 0 0 1 17 -5 0 2 1 0 1 17 -8 -5 0 0 0 1 17 -8 -9 0 0 0 1 17 -12 0 6 1 0 1 17 -12 0 4 1 12 1 17 -2 -2.5 0 0 0 1 17 -6 -3 0 0 0 1 17 -10 -7.5 0 0 0 1 17 -5 -2.5 0 0 0 1 17 -5 -3.75 0 0 0 1 17 -10 -3.75 0 0 0 1 17 -2 -3 0 0 0 1 17 -10 -6.25 0 0 0 1 17 -4 -7.5 0 0 0 1 17 -8 -16 0 0 0 1 17 -5 -6.25 0 0 0 1 17 -4 0 2 1 4 1 17 -10 -11.25 0 0 0 1 17 -5 -6.88 0 0 0 1 17 -5 -7.5 0 0 0 1 17 -26 0 12 1 0 1 17 -8 -13 0 0 0 1 17 -4 -4.5 0 0 0 1 17 -8 -10 0 0 0 1 17 -6 -3.75 0 0 0 1 17 -5 -5.63 0 0 0 1 17 -9 -18 0 0 0 1 17 -12 -13.5 0 0 0 1 17 -7 0 3 1 0 1 17 -8 -14 0 0 0 1 17 -2 -1.5 0 0 0 1 17 -10 -2.5 0 1 -2.5 1 17 -13 0 5 1 13 1 17 -9 -15.75 0 0 0 1 17 -8 -12 0 0 0 1 17 -28 0 13 1 0 1 17 -6 -7.5 0 0 0 1 17 -10 -16.25 0 0 0 1 17 -12 -7.5 0 0 0 1 17 -5 -5 0 0 0 1 17 -2 -2 0 0 0 1 17 -22 0 10 1 22 1 17 -2 -1 0 0 0 1 17 -3 0 1 1 3 1 17 -4 -5.5 0 0 0 1 17 -2 -2.25 0 0 0 1 17 -6 -2.25 0 0 0 1 17 -4 -6.5 0 0 0 1 17 -9 -12.38 0 0 0 1 17 -10 -13.75 0 0 0 1 17 -10 -17.5 0 0 0 1 17 -4 -5 0 0 0 1 17 -9 -11.25 0 0 0 1 17 -10 -10 0 0 0 1 17 -2 -3.25 0 0 0 1 17 -5 -8.75 0 0 0 1 17 -5 -10 0 0 0 1 17 -9 -2.25 0 1 -2.25 1 17 -6 -6.75 0 0 0 1 17 -12 -16.5 0 0 0 1 17 -9 -14.63 0 0 0 1 17 -4 -8 0 0 0 1 17 -6 -5.25 0 0 0 1 17 -9 -6.75 0 0 0 1 17 -12 -12 0 0 0 1 17 -4 -1 0 1 4 1 17 -12 -15 0 0 0 1 17 -4 -3.5 0 0 0 1 17 -2 -1.25 0 0 0 1 17 -30 0 12 1 0 1 17 -12 -19.5 0 0 0 1 17 -12 -3 0 0 0 1 17 -5 -1.25 0 0 0 1 17 -5 -1.88 0 0 0 1 17 -2 -3.5 0 0 0 1 17 -12 -9 0 0 0 1 17 -10 -20 0 0 0 1 17 -8 -4 0 0 0 1 17 -12 0 5 1 0 1 17 -2 0 1 1 2 1 17 -4 -1.5 0 0 0 1 17 -2 -3.75 0 0 0 1 17 -6 -10.5 0 0 0 1 17 -4 -2 0 0 0 1 17 -23 0 10 1 0 1 17 -12 -18 0 0 0 1 17 -6 -8.25 0 0 0 1 17 -26 0 10 1 26 1 17 -10 -8.75 0 0 0 1 17 -2 -0.75 0 0 0 1 17 -5 -9.38 0 0 0 1 17 -25 0 9 1 0 1 17 -9 -4.5 0 0 0 1 17 -10 -5 0 0 0 1 17 -2 -4 0 0 0 1 17 -2 -2.75 0 0 0 1 17 -4 -6 0 0 0 1 17 -10 -12.5 0 0 0 1 17 -12 -22.5 0 0 0 1 17 -4 -4 0 0 0 1 17 -2 -0.5 0 0 0 1 17 -8 -2 0 1 -2 1 17 -4 -3 0 0 0 1 17 -6 -11.25 0 0 0 1 17 -8 -15 0 0 0 1 17 -8 -11 0 0 0 1 17 -12 -4.5 0 0 0 1 17 -19 0 8 1 19 1 17 -6 -6 0 0 0 1 17 -5 -4.38 0 0 0 1 17 -9 -9 0 0 0 1 17 -6 -1.5 0 0 0 1 17 -9 -13.5 0 0 0 1 17 -9 -5.63 0 0 0 1 17 -8 -8 0 0 0 1 17 -8 0 3 1 8 1 17 -8 -3 0 0 0 1 17 -9 -11.25 0 1 9 1 18 -8 -16 0 0 0 1 18 -9 -5.63 0 1 -5.63 1 18 -9 -4.5 0 1 9 1 18 -2 -2 0 1 2 1 18 -12 -19.5 0 0 0 1 18 -4 -4.5 0 1 4 1 18 -2 -3.75 0 0 0 1 18 -2 -2.25 0 0 0 1 18 -12 -4.5 0 1 -4.5 1 18 -9 -10.13 0 1 -10.13 1 18 -12 -3 0 1 12 1 18 -10 -17.5 0 0 0 1 18 -5 -4.38 0 1 -4.38 1 18 -5 -7.5 0 0 0 1 18 -6 -11.25 0 0 0 1 18 -2 -1.5 0 1 -1.5 1 18 -9 -6.75 0 1 -6.75 1 18 -4 -7 0 1 -7 1 18 -8 -7 0 1 8 1 18 -2 -1.75 0 1 2 1 18 -2 -1 0 1 2 1 18 -10 -6.25 0 1 -6.25 1 18 -6 -6.75 0 1 6 1 18 -9 -2.25 0 1 -2.25 1 18 -2 -0.75 0 1 2 1 18 -12 0 4 0 4 1 18 -6 -3 0 1 -3 1 18 -3 0 1 1 3 1 18 -2 -3 0 0 0 1 18 -10 -13.75 0 0 0 1 18 -6 -2.25 0 1 6 1 18 -5 -1.88 0 1 -1.88 1 18 -12 -13.5 0 1 12 1 18 -22 0 10 0 10 1 18 -9 -12.38 0 0 0 1 18 -26 0 10 0 10 1 18 -12 -10.5 0 1 -10.5 1 18 -10 -2.5 0 1 -2.5 1 18 -25 0 10 0 10 1 18 -9 -15.75 0 0 0 1 18 -7 0 3 1 0 1 18 -10 -10 0 0 0 1 18 -12 -15 0 0 0 1 18 -12 0 6 0 6 1 18 -6 -4.5 0 1 -4.5 1 18 -8 -13 0 0 0 1 18 -10 -16.25 0 0 0 1 18 -5 -1.25 0 1 5 1 18 -4 -4 0 1 4 1 18 -5 -3.75 0 1 5 1 18 -6 -8.25 0 0 0 1 18 -8 -15 0 0 0 1 18 -8 -8 0 1 -8 1 18 -2 -2.75 0 1 -2.75 1 18 -6 -12 0 0 0 1 18 -2 0 1 1 2 1 18 -2 -1.25 0 1 -1.25 1 18 -9 -18 0 0 0 1 18 -6 -9 0 0 0 1 18 -10 -8.75 0 1 -8.75 1 18 -4 -7.5 0 0 0 1 18 -13 0 6 0 6 1 18 -10 -11.25 0 1 10 1 18 -4 -3 0 1 4 1 18 -10 -5 0 1 10 1 18 -8 -2 0 1 -2 1 18 -4 -2.5 0 1 4 1 18 -2 -3.5 0 0 0 1 18 -2 -2.5 0 0 0 1 18 -6 -3.75 0 1 6 1 18 -8 -3 0 1 8 1 18 -2 -3.25 0 0 0 1 18 -8 -9 0 0 0 1 18 -6 -6 0 0 0 1 18 -8 -11 0 0 0 1 18 -5 -8.75 0 0 0 1 18 -6 -9.75 0 0 0 1 18 -12 -24 0 0 0 1 18 -4 -6.5 0 0 0 1 18 -5 -10 0 0 0 1 18 -30 0 12 1 0 1 18 -12 -18 0 0 0 1 18 -9 -9 0 0 0 1 18 -5 -5 0 1 -5 1 18 -5 -9.38 0 0 0 1 18 -10 -12.5 0 0 0 1 18 -10 -18.75 0 0 0 1 18 -5 -2.5 0 1 -2.5 1 18 -9 -14.63 0 0 0 1 18 -28 0 13 0 13 1 18 -5 -6.88 0 0 0 1 18 -4 -3.5 0 1 -3.5 1 18 -12 -16.5 0 0 0 1 18 -5 -8.13 0 0 0 1 18 -9 -16.88 0 0 0 1 18 -9 -3.38 0 1 -3.38 1 18 -12 0 5 1 0 1 18 -4 -8 0 0 0 1 18 -8 -12 0 0 0 1 18 -8 -4 0 1 -4 1 18 -2 -4 0 1 -4 1 18 -12 -9 0 1 -9 1 18 -4 -1.5 0 1 4 1 18 -6 -10.5 0 0 0 1 18 -5 -3.13 0 1 5 1 18 -10 -15 0 0 0 1 18 -23 0 10 0 10 1 18 -12 -7.5 0 1 -7.5 1 18 -2 -0.5 0 1 -0.5 1 18 -4 0 2 1 4 1 18 -6 -1.5 0 1 -1.5 1 18 -4 -1 0 1 4 1 18 -10 -20 0 0 0 1 18 -12 -22.5 0 0 0 1 18 -25 0 9 1 0 1 18 -13 0 5 1 13 1 18 -6 -5.25 0 1 6 1 18 -9 -13.5 0 0 0 1 18 -5 0 2 1 0 1 18 -12 -6 0 1 -6 1 18 -5 -6.25 0 0 0 1 18 -10 -3.75 0 1 10 1 18 -9 -7.88 0 1 9 1 18 -8 -6 0 1 -6 1 18 -4 -5.5 0 0 0 1 18 -19 0 8 0 8 1 18 -10 -7.5 0 1 -7.5 1 18 -4 -6 0 0 0 1 18 -8 -14 0 0 0 1 18 -8 0 3 1 8 1 18 -12 -21 0 0 0 1 18 -4 -2 0 1 -2 1 18 -4 -5 0 0 0 1 18 -6 -7.5 0 0 0 1 18 -12 -12 0 0 0 1 18 -8 -5 0 1 -5 1 18 -26 0 12 0 12 1 18 -8 -10 0 0 0 1 18 -5 -5.63 0 0 0 1 18 -2 -1 0 1 2 1 19 -9 -6.75 0 1 -6.75 1 19 -2 -4 0 0 0 1 19 -2 -3.25 0 0 0 1 19 -4 -6.5 0 1 -6.5 1 19 -5 -5.63 0 1 -5.63 1 19 -8 -8 0 1 -8 1 19 -12 -18 0 1 12 1 19 -2 -2.5 0 0 0 1 19 -3 0 1 1 3 1 19 -12 -16.5 0 1 12 1 19 -10 -12.5 0 0 0 1 19 -5 -1.25 0 1 5 1 19 -19 0 8 1 19 1 19 -8 -9 0 0 0 1 19 -5 -10 0 0 0 1 19 -25 0 10 1 25 1 19 -7 0 3 1 0 1 19 -6 -11.25 0 0 0 1 19 -6 -1.5 0 1 -1.5 1 19 -4 -1.5 0 1 4 1 19 -10 -5 0 1 10 1 19 -10 -3.75 0 1 10 1 19 -6 -4.5 0 1 -4.5 1 19 -12 -19.5 0 0 0 1 19 -5 -4.38 0 1 -4.38 1 19 -8 -11 0 0 0 1 19 -2 -0.75 0 1 2 1 19 -2 -1.5 0 1 -1.5 1 19 -6 -6.75 0 0 0 1 19 -4 -6 0 0 0 1 19 -10 -16.25 0 1 -16.25 1 19 -12 -15 0 1 -15 1 19 -6 -5.25 0 1 6 1 19 -12 -21 0 0 0 1 19 -4 -3 0 1 4 1 19 -12 -22.5 0 1 12 1 19 -2 -3.75 0 0 0 1 19 -6 -12 0 0 0 1 19 -5 -8.13 0 0 0 1 19 -10 -8.75 0 1 -8.75 1 19 -12 -6 0 1 -6 1 19 -5 -5 0 1 -5 1 19 -22 0 10 1 22 1 19 -12 -13.5 0 1 12 1 19 -8 -7 0 1 8 1 19 -4 -3.5 0 1 -3.5 1 19 -9 -12.38 0 0 0 1 19 -10 -7.5 0 1 -7.5 1 19 -26 0 10 1 26 1 19 -12 -4.5 0 1 -4.5 1 19 -8 -15 0 0 0 1 19 -2 -1.75 0 1 2 1 19 -12 0 6 1 0 1 19 -9 -3.38 0 1 -3.38 1 19 -2 -3 0 0 0 1 19 -9 -5.63 0 1 -5.63 1 19 -2 -3.5 0 0 0 1 19 -8 -12 0 0 0 1 19 -10 -18.75 0 0 0 1 19 -4 0 2 1 4 1 19 -2 -2.25 0 0 0 1 19 -9 -2.25 0 1 -2.25 1 19 -10 -13.75 0 0 0 1 19 -28 0 13 1 0 1 19 -4 -2.5 0 1 4 1 19 -9 -15.75 0 0 0 1 19 -10 -15 0 0 0 1 19 -10 -10 0 1 10 1 19 -9 -18 0 0 0 1 19 -12 -24 0 0 0 1 19 -13 0 5 1 13 1 19 -5 -1.88 0 1 -1.88 1 19 -4 -4.5 0 0 0 1 19 -9 -7.88 0 0 0 1 19 -9 -9 0 0 0 1 19 -25 0 9 1 0 1 19 -12 -12 0 0 0 1 19 -6 -2.25 0 1 6 1 19 -8 -5 0 1 -5 1 19 -4 -5.5 0 0 0 1 19 -2 -1.25 0 0 0 1 19 -9 -13.5 0 0 0 1 19 -9 -4.5 0 1 9 1 19 -10 -11.25 0 0 0 1 19 -6 -3 0 1 -3 1 19 -10 -2.5 0 1 -2.5 1 19 -12 0 4 1 12 1 19 -10 -20 0 0 0 1 19 -5 -3.75 0 0 0 1 19 -9 -10.13 0 0 0 1 19 -4 -7 0 0 0 1 19 -12 -10.5 0 1 -10.5 1 19 -8 -16 0 0 0 1 19 -4 -7.5 0 0 0 1 19 -8 0 3 1 8 1 19 -6 -10.5 0 0 0 1 19 -6 -9.75 0 0 0 1 19 -5 -8.75 0 0 0 1 19 -5 -2.5 0 1 -2.5 1 19 -13 0 6 1 0 1 19 -23 0 10 0 10 1 19 -8 -4 0 1 -4 1 19 -9 -11.25 0 0 0 1 19 -5 -6.88 0 0 0 1 19 -4 -4 0 1 4 1 19 -10 -17.5 0 0 0 1 19 -8 -13 0 0 0 1 19 -26 0 12 1 0 1 19 -6 -8.25 0 0 0 1 19 -9 -14.63 0 0 0 1 19 -8 -2 0 1 -2 1 19 -10 -6.25 0 1 -6.25 1 19 -8 -14 0 0 0 1 19 -12 0 5 1 0 1 19 -8 -10 0 0 0 1 19 -30 0 12 1 0 1 19 -5 -7.5 0 0 0 1 19 -5 0 2 1 0 1 19 -6 -3.75 0 1 6 1 19 -6 -6 0 1 -6 1 19 -4 -2 0 1 -2 1 19 -12 -7.5 0 1 -7.5 1 19 -5 -6.25 0 0 0 1 19 -4 -5 0 0 0 1 19 -2 -2.75 0 0 0 1 19 -2 -2 0 1 2 1 19 -6 -9 0 0 0 1 19 -5 -3.13 0 1 5 1 19 -12 -9 0 1 -9 1 19 -4 -8 0 0 0 1 19 -4 -1 0 1 4 1 19 -2 0 1 1 2 1 19 -9 -16.88 0 0 0 1 19 -8 -6 0 1 -6 1 19 -2 -0.5 0 1 -0.5 1 19 -6 -7.5 0 0 0 1 19 -8 -3 0 1 8 1 19 -12 -3 0 1 12 1 19 -5 -9.38 0 0 0 1 19 -6 -9.75 0 0 0 1 20 -12 -13.5 0 0 0 1 20 -8 -7 0 1 8 1 20 -10 -7.5 0 1 -7.5 1 20 -2 -2.25 0 1 2 1 20 -6 -8.25 0 1 -8.25 1 20 -10 -16.25 0 0 0 1 20 -3 0 1 0 1 1 20 -4 -3 0 1 4 1 20 -8 -2 0 1 -2 1 20 -4 -2.5 0 1 4 1 20 -5 -5.63 0 1 -5.63 1 20 -5 0 2 0 2 1 20 -30 0 12 0 12 1 20 -9 -4.5 0 1 9 1 20 -4 -7.5 0 0 0 1 20 -26 0 10 1 26 1 20 -10 -6.25 0 1 -6.25 1 20 -2 -4 0 0 0 1 20 -4 -5 0 1 4 1 20 -5 -1.88 0 1 -1.88 1 20 -23 0 10 0 10 1 20 -8 -3 0 1 8 1 20 -8 -12 0 0 0 1 20 -10 -2.5 0 1 -2.5 1 20 -5 -8.13 0 0 0 1 20 -8 -9 0 1 -9 1 20 -2 -3 0 1 -3 1 20 -9 -11.25 0 1 9 1 20 -9 -12.38 0 0 0 1 20 -12 -15 0 1 -15 1 20 -8 -10 0 0 0 1 20 -4 -1 0 1 4 1 20 -8 0 3 0 3 1 20 -4 -3.5 0 0 0 1 20 -8 -8 0 1 -8 1 20 -10 -11.25 0 1 10 1 20 -10 -5 0 1 10 1 20 -9 -13.5 0 0 0 1 20 -2 -0.75 0 1 2 1 20 -5 -4.38 0 0 0 1 20 -2 -1.5 0 1 -1.5 1 20 -2 -3.75 0 0 0 1 20 -5 -3.75 0 1 5 1 20 -9 -16.88 0 0 0 1 20 -9 -3.38 0 1 -3.38 1 20 -5 -10 0 0 0 1 20 -26 0 12 0 12 1 20 -5 -9.38 0 0 0 1 20 -6 -1.5 0 1 -1.5 1 20 -10 -10 0 1 10 1 20 -2 -1.25 0 0 0 1 20 -9 -14.63 0 0 0 1 20 -6 -4.5 0 1 -4.5 1 20 -5 -5 0 1 -5 1 20 -5 -7.5 0 0 0 1 20 -8 -13 0 0 0 1 20 -5 -3.13 0 1 5 1 20 -8 -5 0 1 -5 1 20 -8 -11 0 1 8 1 20 -6 -6.75 0 1 6 1 20 -5 -8.75 0 0 0 1 20 -2 0 1 0 1 1 20 -9 -5.63 0 1 -5.63 1 20 -6 -6 0 1 -6 1 20 -4 -5.5 0 0 0 1 20 -6 -3 0 1 -3 1 20 -12 -19.5 0 0 0 1 20 -10 -13.75 0 0 0 1 20 -10 -8.75 0 1 -8.75 1 20 -5 -6.88 0 1 -6.88 1 20 -6 -7.5 0 0 0 1 20 -10 -12.5 0 1 -12.5 1 20 -9 -6.75 0 1 -6.75 1 20 -4 -6 0 0 0 1 20 -8 -4 0 1 -4 1 20 -2 -1 0 0 0 1 20 -12 -24 0 0 0 1 20 -12 -6 0 1 -6 1 20 -2 -2 0 1 2 1 20 -4 -7 0 0 0 1 20 -12 -9 0 0 0 1 20 -6 -11.25 0 1 6 1 20 -25 0 10 0 10 1 20 -28 0 13 0 13 1 20 -2 -2.75 0 1 -2.75 1 20 -12 -10.5 0 0 0 1 20 -8 -14 0 0 0 1 20 -4 -6.5 0 0 0 1 20 -4 0 2 0 2 1 20 -10 -15 0 1 10 1 20 -12 0 5 0 5 1 20 -10 -18.75 0 0 0 1 20 -12 -3 0 1 12 1 20 -4 -4 0 0 0 1 20 -9 -7.88 0 0 0 1 20 -9 -2.25 0 1 -2.25 1 20 -2 -1.75 0 0 0 1 20 -12 0 6 0 6 1 20 -5 -2.5 0 1 -2.5 1 20 -4 -4.5 0 1 4 1 20 -8 -6 0 0 0 1 20 -12 -18 0 0 0 1 20 -12 -16.5 0 0 0 1 20 -22 0 10 0 10 1 20 -12 -21 0 0 0 1 20 -12 -4.5 0 1 -4.5 1 20 -12 -12 0 1 -12 1 20 -19 0 8 0 8 1 20 -2 -2.5 0 1 2 1 20 -12 0 4 0 4 1 20 -4 -2 0 1 -2 1 20 -9 -9 0 0 0 1 20 -9 -10.13 0 0 0 1 20 -6 -2.25 0 1 6 1 20 -2 -0.5 0 1 -0.5 1 20 -10 -3.75 0 1 10 1 20 -13 0 5 0 5 1 20 -4 -1.5 0 1 4 1 20 -5 -1.25 0 1 5 1 20 -6 -9 0 0 0 1 20 -10 -17.5 0 0 0 1 20 -6 -12 0 0 0 1 20 -6 -5.25 0 0 0 1 20 -12 -22.5 0 0 0 1 20 -8 -16 0 0 0 1 20 -9 -15.75 0 0 0 1 20 -10 -20 0 0 0 1 20 -13 0 6 0 6 1 20 -4 -8 0 0 0 1 20 -12 -7.5 0 1 -7.5 1 20 -9 -18 0 0 0 1 20 -2 -3.25 0 1 2 1 20 -7 0 3 0 3 1 20 -6 -3.75 0 1 6 1 20 -5 -6.25 0 0 0 1 20 -8 -15 0 0 0 1 20 -25 0 9 0 9 1 20 -2 -3.5 0 0 0 1 20 -6 -10.5 0 0 0 1 20 -9 -10.13 0 0 0 1 21 -12 -10.5 0 0 0 1 21 -25 0 10 1 25 1 21 -4 -7 0 0 0 1 21 -9 -7.88 0 0 0 1 21 -5 -3.13 0 1 5 1 21 -5 -8.13 0 0 0 1 21 -8 -7 0 0 0 1 21 -12 -6 0 1 -6 1 21 -12 -24 0 0 0 1 21 -12 -21 0 0 0 1 21 -4 -2.5 0 1 4 1 21 -6 -9 0 0 0 1 21 -10 -15 0 0 0 1 21 -8 -6 0 1 -6 1 21 -13 0 6 1 0 1 21 -6 -12 0 0 0 1 21 -6 -4.5 0 1 -4.5 1 21 -9 -16.88 0 0 0 1 21 -10 -18.75 0 0 0 1 21 -9 -3.38 0 1 -3.38 1 21 -6 -9.75 0 0 0 1 21 -2 -1.75 0 0 0 1 21 -5 0 2 1 0 1 21 -8 -5 0 1 -5 1 21 -8 -9 0 0 0 1 21 -12 0 6 1 0 1 21 -12 0 4 1 12 1 21 -2 -2.5 0 0 0 1 21 -6 -3 0 1 -3 1 21 -10 -7.5 0 0 0 1 21 -5 -2.5 0 1 -2.5 1 21 -5 -3.75 0 0 0 1 21 -10 -3.75 0 1 10 1 21 -2 -3 0 0 0 1 21 -10 -6.25 0 0 0 1 21 -4 -7.5 0 0 0 1 21 -8 -16 0 0 0 1 21 -5 -6.25 0 0 0 1 21 -4 0 2 1 4 1 21 -10 -11.25 0 0 0 1 21 -5 -6.88 0 0 0 1 21 -5 -7.5 0 0 0 1 21 -26 0 12 1 0 1 21 -8 -13 0 0 0 1 21 -4 -4.5 0 0 0 1 21 -8 -10 0 0 0 1 21 -6 -3.75 0 1 6 1 21 -5 -5.63 0 0 0 1 21 -9 -18 0 0 0 1 21 -12 -13.5 0 0 0 1 21 -7 0 3 1 0 1 21 -8 -14 0 0 0 1 21 -2 -1.5 0 1 -1.5 1 21 -10 -2.5 0 1 -2.5 1 21 -13 0 5 1 13 1 21 -9 -15.75 0 0 0 1 21 -8 -12 0 0 0 1 21 -28 0 13 1 0 1 21 -6 -7.5 0 0 0 1 21 -10 -16.25 0 0 0 1 21 -12 -7.5 0 0 0 1 21 -5 -5 0 0 0 1 21 -2 -2 0 0 0 1 21 -22 0 10 1 22 1 21 -2 -1 0 1 2 1 21 -3 0 1 1 3 1 21 -4 -5.5 0 0 0 1 21 -2 -2.25 0 0 0 1 21 -6 -2.25 0 1 6 1 21 -4 -6.5 0 0 0 1 21 -9 -12.38 0 0 0 1 21 -10 -13.75 0 0 0 1 21 -10 -17.5 0 0 0 1 21 -4 -5 0 0 0 1 21 -9 -11.25 0 0 0 1 21 -10 -10 0 0 0 1 21 -2 -3.25 0 0 0 1 21 -5 -8.75 0 0 0 1 21 -5 -10 0 0 0 1 21 -9 -2.25 0 1 -2.25 1 21 -6 -6.75 0 0 0 1 21 -12 -16.5 0 0 0 1 21 -9 -14.63 0 0 0 1 21 -4 -8 0 0 0 1 21 -6 -5.25 0 0 0 1 21 -12 -12 0 0 0 1 21 -4 -1 0 1 4 1 21 -12 -15 0 0 0 1 21 -4 -3.5 0 0 0 1 21 -2 -1.25 0 1 -1.25 1 21 -30 0 12 1 0 1 21 -12 -19.5 0 0 0 1 21 -12 -3 0 1 12 1 21 -5 -1.25 0 1 5 1 21 -5 -1.88 0 1 -1.88 1 21 -2 -3.5 0 0 0 1 21 -12 -9 0 0 0 1 21 -10 -20 0 0 0 1 21 -8 -4 0 1 -4 1 21 -12 0 5 1 0 1 21 -2 0 1 1 2 1 21 -4 -1.5 0 1 4 1 21 -2 -3.75 0 0 0 1 21 -6 -10.5 0 0 0 1 21 -4 -2 0 1 -2 1 21 -23 0 10 1 0 1 21 -12 -18 0 0 0 1 21 -6 -8.25 0 0 0 1 21 -26 0 10 1 26 1 21 -10 -8.75 0 0 0 1 21 -2 -0.75 0 1 2 1 21 -5 -9.38 0 0 0 1 21 -25 0 9 1 0 1 21 -9 -4.5 0 1 9 1 21 -10 -5 0 1 10 1 21 -2 -4 0 0 0 1 21 -2 -2.75 0 0 0 1 21 -4 -6 0 0 0 1 21 -10 -12.5 0 0 0 1 21 -12 -22.5 0 0 0 1 21 -4 -4 0 0 0 1 21 -2 -0.5 0 1 -0.5 1 21 -8 -2 0 1 -2 1 21 -4 -3 0 0 0 1 21 -6 -11.25 0 0 0 1 21 -8 -15 0 0 0 1 21 -8 -11 0 0 0 1 21 -12 -4.5 0 1 -4.5 1 21 -19 0 8 1 19 1 21 -6 -6 0 0 0 1 21 -5 -4.38 0 0 0 1 21 -9 -9 0 0 0 1 21 -6 -1.5 0 1 -1.5 1 21 -9 -13.5 0 0 0 1 21 -9 -5.63 0 0 0 1 21 -8 -8 0 0 0 1 21 -8 0 3 1 8 1 21 -8 -3 0 1 8 1 21 -9 -11.25 0 0 0 1 22 -8 -16 0 0 0 1 22 -9 -5.63 0 0 0 1 22 -9 -4.5 0 1 9 1 22 -2 -2 0 0 0 1 22 -12 -19.5 0 0 0 1 22 -4 -4.5 0 0 0 1 22 -2 -3.75 0 0 0 1 22 -2 -2.25 0 0 0 1 22 -12 -4.5 0 1 -4.5 1 22 -9 -10.13 0 0 0 1 22 -12 -3 0 1 12 1 22 -10 -17.5 0 0 0 1 22 -5 -4.38 0 0 0 1 22 -5 -7.5 0 0 0 1 22 -6 -11.25 0 0 0 1 22 -2 -1.5 0 0 0 1 22 -9 -6.75 0 0 0 1 22 -4 -7 0 0 0 1 22 -8 -7 0 0 0 1 22 -2 -1.75 0 0 0 1 22 -2 -1 0 0 0 1 22 -10 -6.25 0 1 -6.25 1 22 -6 -6.75 0 0 0 1 22 -9 -2.25 0 1 -2.25 1 22 -2 -0.75 0 1 2 1 22 -12 0 4 1 12 1 22 -6 -3 0 0 0 1 22 -3 0 1 1 3 1 22 -2 -3 0 0 0 1 22 -10 -13.75 0 1 -13.75 1 22 -6 -2.25 0 1 6 1 22 -5 -1.88 0 1 -1.88 1 22 -12 -13.5 0 0 0 1 22 -22 0 10 1 22 1 22 -9 -12.38 0 0 0 1 22 -26 0 10 0 10 1 22 -12 -10.5 0 0 0 1 22 -10 -2.5 0 1 -2.5 1 22 -25 0 10 1 25 1 22 -9 -15.75 0 0 0 1 22 -7 0 3 1 0 1 22 -10 -10 0 1 10 1 22 -12 -15 0 0 0 1 22 -12 0 6 0 6 1 22 -6 -4.5 0 0 0 1 22 -8 -13 0 0 0 1 22 -10 -16.25 0 0 0 1 22 -5 -1.25 0 0 0 1 22 -4 -4 0 0 0 1 22 -5 -3.75 0 0 0 1 22 -6 -8.25 0 0 0 1 22 -8 -15 0 0 0 1 22 -8 -8 0 0 0 1 22 -2 -2.75 0 0 0 1 22 -6 -12 0 0 0 1 22 -2 0 1 0 1 1 22 -2 -1.25 0 1 -1.25 1 22 -9 -18 0 0 0 1 22 -6 -9 0 0 0 1 22 -10 -8.75 0 0 0 1 22 -4 -7.5 0 0 0 1 22 -13 0 6 1 0 1 22 -10 -11.25 0 0 0 1 22 -4 -3 0 1 4 1 22 -10 -5 0 0 0 1 22 -8 -2 0 1 -2 1 22 -4 -2.5 0 1 4 1 22 -2 -3.5 0 0 0 1 22 -2 -2.5 0 0 0 1 22 -6 -3.75 0 0 0 1 22 -8 -3 0 1 8 1 22 -2 -3.25 0 1 2 1 22 -8 -9 0 0 0 1 22 -6 -6 0 0 0 1 22 -8 -11 0 0 0 1 22 -5 -8.75 0 0 0 1 22 -6 -9.75 0 0 0 1 22 -12 -24 0 0 0 1 22 -4 -6.5 0 0 0 1 22 -5 -10 0 0 0 1 22 -30 0 12 1 0 1 22 -12 -18 0 0 0 1 22 -9 -9 0 1 9 1 22 -5 -5 0 0 0 1 22 -5 -9.38 0 0 0 1 22 -10 -12.5 0 0 0 1 22 -10 -18.75 0 0 0 1 22 -5 -2.5 0 1 -2.5 1 22 -9 -14.63 0 0 0 1 22 -28 0 13 0 13 1 22 -5 -6.88 0 0 0 1 22 -4 -3.5 0 1 -3.5 1 22 -12 -16.5 0 1 12 1 22 -5 -8.13 0 0 0 1 22 -9 -16.88 0 0 0 1 22 -9 -3.38 0 1 -3.38 1 22 -12 0 5 1 0 1 22 -4 -8 0 0 0 1 22 -8 -12 0 0 0 1 22 -8 -4 0 0 0 1 22 -2 -4 0 0 0 1 22 -12 -9 0 0 0 1 22 -4 -1.5 0 1 4 1 22 -6 -10.5 0 0 0 1 22 -5 -3.13 0 0 0 1 22 -10 -15 0 1 10 1 22 -23 0 10 0 10 1 22 -12 -7.5 0 0 0 1 22 -2 -0.5 0 1 -0.5 1 22 -4 0 2 1 4 1 22 -6 -1.5 0 0 0 1 22 -4 -1 0 1 4 1 22 -10 -20 0 0 0 1 22 -12 -22.5 0 0 0 1 22 -25 0 9 1 0 1 22 -13 0 5 1 13 1 22 -6 -5.25 0 0 0 1 22 -9 -13.5 0 0 0 1 22 -5 0 2 0 2 1 22 -12 -6 0 1 -6 1 22 -5 -6.25 0 0 0 1 22 -10 -3.75 0 1 10 1 22 -9 -7.88 0 0 0 1 22 -8 -6 0 0 0 1 22 -4 -5.5 0 0 0 1 22 -19 0 8 1 19 1 22 -10 -7.5 0 0 0 1 22 -4 -6 0 1 4 1 22 -8 -14 0 0 0 1 22 -8 0 3 1 8 1 22 -12 -21 0 0 0 1 22 -4 -2 0 0 0 1 22 -4 -5 0 0 0 1 22 -6 -7.5 0 0 0 1 22 -12 -12 0 0 0 1 22 -8 -5 0 1 -5 1 22 -26 0 12 1 0 1 22 -8 -10 0 1 -10 1 22 -5 -5.63 0 1 -5.63 1 22 -2 -1 0 1 2 1 23 -9 -6.75 0 1 -6.75 1 23 -2 -4 0 0 0 1 23 -2 -3.25 0 0 0 1 23 -4 -6.5 0 1 -6.5 1 23 -5 -5.63 0 0 0 1 23 -8 -8 0 1 -8 1 23 -12 -18 0 1 12 1 23 -2 -2.5 0 1 2 1 23 -3 0 1 1 3 1 23 -12 -16.5 0 1 12 1 23 -10 -12.5 0 0 0 1 23 -5 -1.25 0 1 5 1 23 -19 0 8 1 19 1 23 -8 -9 0 0 0 1 23 -5 -10 0 0 0 1 23 -25 0 10 1 25 1 23 -7 0 3 1 0 1 23 -6 -11.25 0 0 0 1 23 -6 -1.5 0 1 -1.5 1 23 -4 -1.5 0 1 4 1 23 -10 -5 0 1 10 1 23 -10 -3.75 0 1 10 1 23 -6 -4.5 0 1 -4.5 1 23 -12 -19.5 0 0 0 1 23 -5 -4.38 0 1 -4.38 1 23 -8 -11 0 0 0 1 23 -2 -0.75 0 1 2 1 23 -2 -1.5 0 1 -1.5 1 23 -6 -6.75 0 0 0 1 23 -4 -6 0 1 4 1 23 -10 -16.25 0 0 0 1 23 -12 -15 0 0 0 1 23 -6 -5.25 0 1 6 1 23 -12 -21 0 0 0 1 23 -4 -3 0 1 4 1 23 -12 -22.5 0 0 0 1 23 -2 -3.75 0 1 -3.75 1 23 -6 -12 0 0 0 1 23 -5 -8.13 0 0 0 1 23 -10 -8.75 0 1 -8.75 1 23 -12 -6 0 0 0 1 23 -5 -5 0 1 -5 1 23 -22 0 10 0 10 1 23 -12 -13.5 0 0 0 1 23 -8 -7 0 0 0 1 23 -4 -3.5 0 1 -3.5 1 23 -9 -12.38 0 0 0 1 23 -10 -7.5 0 0 0 1 23 -26 0 10 1 26 1 23 -12 -4.5 0 1 -4.5 1 23 -8 -15 0 0 0 1 23 -2 -1.75 0 1 2 1 23 -12 0 6 0 6 1 23 -9 -3.38 0 1 -3.38 1 23 -2 -3 0 1 -3 1 23 -9 -5.63 0 0 0 1 23 -2 -3.5 0 1 -3.5 1 23 -8 -12 0 1 8 1 23 -10 -18.75 0 0 0 1 23 -4 0 2 1 4 1 23 -2 -2.25 0 1 2 1 23 -9 -2.25 0 1 -2.25 1 23 -10 -13.75 0 0 0 1 23 -28 0 13 0 13 1 23 -4 -2.5 0 1 4 1 23 -9 -15.75 0 0 0 1 23 -10 -15 0 0 0 1 23 -10 -10 0 1 10 1 23 -9 -18 0 0 0 1 23 -12 -24 0 0 0 1 23 -13 0 5 1 13 1 23 -5 -1.88 0 1 -1.88 1 23 -4 -4.5 0 1 4 1 23 -9 -7.88 0 0 0 1 23 -9 -9 0 0 0 1 23 -25 0 9 1 0 1 23 -12 -12 0 0 0 1 23 -6 -2.25 0 1 6 1 23 -8 -5 0 1 -5 1 23 -4 -5.5 0 0 0 1 23 -2 -1.25 0 1 -1.25 1 23 -9 -13.5 0 0 0 1 23 -9 -4.5 0 1 9 1 23 -10 -11.25 0 0 0 1 23 -6 -3 0 1 -3 1 23 -10 -2.5 0 1 -2.5 1 23 -12 0 4 0 4 1 23 -10 -20 0 0 0 1 23 -5 -3.75 0 1 5 1 23 -9 -10.13 0 0 0 1 23 -4 -7 0 0 0 1 23 -12 -10.5 0 0 0 1 23 -8 -16 0 0 0 1 23 -4 -7.5 0 0 0 1 23 -8 0 3 1 8 1 23 -6 -10.5 0 0 0 1 23 -6 -9.75 0 0 0 1 23 -5 -8.75 0 0 0 1 23 -5 -2.5 0 1 -2.5 1 23 -13 0 6 0 6 1 23 -23 0 10 0 10 1 23 -8 -4 0 1 -4 1 23 -9 -11.25 0 0 0 1 23 -5 -6.88 0 0 0 1 23 -4 -4 0 1 4 1 23 -10 -17.5 0 0 0 1 23 -8 -13 0 0 0 1 23 -26 0 12 0 12 1 23 -6 -8.25 0 0 0 1 23 -9 -14.63 0 0 0 1 23 -8 -2 0 1 -2 1 23 -10 -6.25 0 0 0 1 23 -8 -14 0 0 0 1 23 -12 0 5 1 0 1 23 -8 -10 0 0 0 1 23 -30 0 12 0 12 1 23 -5 -7.5 0 0 0 1 23 -5 0 2 1 0 1 23 -6 -3.75 0 0 0 1 23 -6 -6 0 0 0 1 23 -4 -2 0 1 -2 1 23 -12 -7.5 0 0 0 1 23 -5 -6.25 0 0 0 1 23 -4 -5 0 1 4 1 23 -2 -2.75 0 0 0 1 23 -2 -2 0 1 2 1 23 -6 -9 0 0 0 1 23 -5 -3.13 0 1 5 1 23 -12 -9 0 0 0 1 23 -4 -8 0 0 0 1 23 -4 -1 0 1 4 1 23 -2 0 1 1 2 1 23 -9 -16.88 0 0 0 1 23 -8 -6 0 0 0 1 23 -2 -0.5 0 1 -0.5 1 23 -6 -7.5 0 0 0 1 23 -8 -3 0 1 8 1 23 -12 -3 0 1 12 1 23 -5 -9.38 0 0 0 1 23 -6 -9.75 0 0 0 1 24 -12 -13.5 0 1 12 1 24 -8 -7 0 1 8 1 24 -10 -7.5 0 1 -7.5 1 24 -2 -2.25 0 0 0 1 24 -6 -8.25 0 0 0 1 24 -10 -16.25 0 0 0 1 24 -3 0 1 1 3 1 24 -4 -3 0 1 4 1 24 -8 -2 0 1 -2 1 24 -4 -2.5 0 1 4 1 24 -5 -5.63 0 1 -5.63 1 24 -5 0 2 1 0 1 24 -30 0 12 1 0 1 24 -9 -4.5 0 1 9 1 24 -4 -7.5 0 0 0 1 24 -26 0 10 1 26 1 24 -10 -6.25 0 1 -6.25 1 24 -2 -4 0 0 0 1 24 -4 -5 0 0 0 1 24 -5 -1.88 0 1 -1.88 1 24 -23 0 10 0 10 1 24 -8 -3 0 1 8 1 24 -8 -12 0 0 0 1 24 -10 -2.5 0 1 -2.5 1 24 -5 -8.13 0 0 0 1 24 -8 -9 0 1 -9 1 24 -2 -3 0 1 -3 1 24 -9 -11.25 0 0 0 1 24 -9 -12.38 0 0 0 1 24 -12 -15 0 1 -15 1 24 -8 -10 0 0 0 1 24 -4 -1 0 1 4 1 24 -8 0 3 1 8 1 24 -4 -3.5 0 1 -3.5 1 24 -8 -8 0 1 -8 1 24 -10 -11.25 0 0 0 1 24 -10 -5 0 1 10 1 24 -9 -13.5 0 0 0 1 24 -2 -0.75 0 1 2 1 24 -5 -4.38 0 1 -4.38 1 24 -2 -1.5 0 1 -1.5 1 24 -2 -3.75 0 0 0 1 24 -5 -3.75 0 1 5 1 24 -9 -16.88 0 0 0 1 24 -9 -3.38 0 1 -3.38 1 24 -5 -10 0 0 0 1 24 -26 0 12 1 0 1 24 -5 -9.38 0 0 0 1 24 -6 -1.5 0 1 -1.5 1 24 -10 -10 0 1 10 1 24 -2 -1.25 0 1 -1.25 1 24 -9 -14.63 0 0 0 1 24 -6 -4.5 0 1 -4.5 1 24 -5 -5 0 1 -5 1 24 -5 -7.5 0 0 0 1 24 -8 -13 0 0 0 1 24 -5 -3.13 0 1 5 1 24 -8 -5 0 1 -5 1 24 -8 -11 0 0 0 1 24 -6 -6.75 0 0 0 1 24 -5 -8.75 0 0 0 1 24 -2 0 1 1 2 1 24 -9 -5.63 0 1 -5.63 1 24 -6 -6 0 1 -6 1 24 -4 -5.5 0 0 0 1 24 -6 -3 0 1 -3 1 24 -12 -19.5 0 0 0 1 24 -10 -13.75 0 0 0 1 24 -10 -8.75 0 1 -8.75 1 24 -5 -6.88 0 0 0 1 24 -6 -7.5 0 0 0 1 24 -10 -12.5 0 0 0 1 24 -9 -6.75 0 1 -6.75 1 24 -4 -6 0 0 0 1 24 -8 -4 0 1 -4 1 24 -2 -1 0 1 2 1 24 -12 -24 0 0 0 1 24 -12 -6 0 1 -6 1 24 -2 -2 0 1 2 1 24 -4 -7 0 0 0 1 24 -12 -9 0 1 -9 1 24 -6 -11.25 0 0 0 1 24 -25 0 10 1 25 1 24 -28 0 13 1 0 1 24 -2 -2.75 0 0 0 1 24 -12 -10.5 0 1 -10.5 1 24 -8 -14 0 0 0 1 24 -4 -6.5 0 0 0 1 24 -4 0 2 1 4 1 24 -10 -15 0 0 0 1 24 -12 0 5 1 0 1 24 -10 -18.75 0 0 0 1 24 -12 -3 0 1 12 1 24 -4 -4 0 1 4 1 24 -9 -7.88 0 1 9 1 24 -9 -2.25 0 1 -2.25 1 24 -2 -1.75 0 1 2 1 24 -12 0 6 1 0 1 24 -5 -2.5 0 1 -2.5 1 24 -4 -4.5 0 0 0 1 24 -8 -6 0 1 -6 1 24 -12 -18 0 0 0 1 24 -12 -16.5 0 0 0 1 24 -22 0 10 1 22 1 24 -12 -21 0 0 0 1 24 -12 -4.5 0 1 -4.5 1 24 -12 -12 0 1 -12 1 24 -19 0 8 1 19 1 24 -2 -2.5 0 0 0 1 24 -12 0 4 1 12 1 24 -4 -2 0 1 -2 1 24 -9 -9 0 1 9 1 24 -9 -10.13 0 0 0 1 24 -6 -2.25 0 1 6 1 24 -2 -0.5 0 1 -0.5 1 24 -10 -3.75 0 1 10 1 24 -13 0 5 1 13 1 24 -4 -1.5 0 1 4 1 24 -5 -1.25 0 1 5 1 24 -6 -9 0 0 0 1 24 -10 -17.5 0 0 0 1 24 -6 -12 0 0 0 1 24 -6 -5.25 0 1 6 1 24 -12 -22.5 0 0 0 1 24 -8 -16 0 0 0 1 24 -9 -15.75 0 0 0 1 24 -10 -20 0 0 0 1 24 -13 0 6 1 0 1 24 -4 -8 0 0 0 1 24 -12 -7.5 0 1 -7.5 1 24 -9 -18 0 0 0 1 24 -2 -3.25 0 0 0 1 24 -7 0 3 1 0 1 24 -6 -3.75 0 1 6 1 24 -5 -6.25 0 0 0 1 24 -8 -15 0 0 0 1 24 -25 0 9 1 0 1 24 -2 -3.5 0 0 0 1 24 -6 -10.5 0 0 0 1 24 -9 -10.13 0 1 -10.13 1 25 -12 -10.5 0 1 -10.5 1 25 -25 0 10 1 25 1 25 -4 -7 0 0 0 1 25 -9 -7.88 0 1 9 1 25 -5 -3.13 0 1 5 1 25 -5 -8.13 0 0 0 1 25 -8 -7 0 1 8 1 25 -12 -6 0 1 -6 1 25 -12 -24 0 0 0 1 25 -12 -21 0 0 0 1 25 -4 -2.5 0 1 4 1 25 -6 -9 0 1 -9 1 25 -10 -15 0 1 10 1 25 -8 -6 0 1 -6 1 25 -13 0 6 1 0 1 25 -6 -12 0 0 0 1 25 -6 -4.5 0 0 0 1 25 -9 -16.88 0 1 9 1 25 -10 -18.75 0 0 0 1 25 -9 -3.38 0 1 -3.38 1 25 -6 -9.75 0 1 -9.75 1 25 -2 -1.75 0 1 2 1 25 -5 0 2 1 0 1 25 -8 -5 0 1 -5 1 25 -8 -9 0 1 -9 1 25 -12 0 6 1 0 1 25 -12 0 4 1 12 1 25 -2 -2.5 0 1 2 1 25 -6 -3 0 0 0 1 25 -10 -7.5 0 1 -7.5 1 25 -5 -2.5 0 1 -2.5 1 25 -5 -3.75 0 1 5 1 25 -10 -3.75 0 1 10 1 25 -2 -3 0 0 0 1 25 -10 -6.25 0 1 -6.25 1 25 -4 -7.5 0 0 0 1 25 -8 -16 0 0 0 1 25 -5 -6.25 0 0 0 1 25 -4 0 2 1 4 1 25 -10 -11.25 0 1 10 1 25 -5 -6.88 0 1 -6.88 1 25 -5 -7.5 0 1 5 1 25 -26 0 12 1 0 1 25 -8 -13 0 0 0 1 25 -4 -4.5 0 1 4 1 25 -8 -10 0 1 -10 1 25 -6 -3.75 0 1 6 1 25 -5 -5.63 0 1 -5.63 1 25 -9 -18 0 0 0 1 25 -12 -13.5 0 0 0 1 25 -7 0 3 1 0 1 25 -8 -14 0 1 8 1 25 -2 -1.5 0 1 -1.5 1 25 -10 -2.5 0 1 -2.5 1 25 -13 0 5 1 13 1 25 -9 -15.75 0 0 0 1 25 -8 -12 0 1 8 1 25 -28 0 13 1 0 1 25 -6 -7.5 0 1 -7.5 1 25 -10 -16.25 0 0 0 1 25 -12 -7.5 0 1 -7.5 1 25 -5 -5 0 1 -5 1 25 -2 -2 0 1 2 1 25 -22 0 10 0 10 1 25 -2 -1 0 1 2 1 25 -3 0 1 1 3 1 25 -4 -5.5 0 1 -5.5 1 25 -2 -2.25 0 1 2 1 25 -6 -2.25 0 1 6 1 25 -4 -6.5 0 1 -6.5 1 25 -9 -12.38 0 0 0 1 25 -10 -13.75 0 0 0 1 25 -10 -17.5 0 1 10 1 25 -4 -5 0 1 4 1 25 -9 -11.25 0 1 9 1 25 -10 -10 0 0 0 1 25 -2 -3.25 0 1 2 1 25 -5 -8.75 0 1 5 1 25 -5 -10 0 0 0 1 25 -9 -2.25 0 1 -2.25 1 25 -6 -6.75 0 1 6 1 25 -12 -16.5 0 0 0 1 25 -9 -14.63 0 1 9 1 25 -4 -8 0 0 0 1 25 -6 -5.25 0 1 6 1 25 -9 -6.75 0 1 -6.75 1 25 -12 -12 0 1 -12 1 25 -4 -1 0 1 4 1 25 -12 -15 0 1 -15 1 25 -4 -3.5 0 1 -3.5 1 25 -2 -1.25 0 1 -1.25 1 25 -30 0 12 1 0 1 25 -12 -19.5 0 0 0 1 25 -12 -3 0 1 12 1 25 -5 -1.25 0 1 5 1 25 -5 -1.88 0 1 -1.88 1 25 -2 -3.5 0 1 -3.5 1 25 -12 -9 0 0 0 1 25 -10 -20 0 0 0 1 25 -8 -4 0 1 -4 1 25 -12 0 5 1 0 1 25 -2 0 1 0 1 1 25 -4 -1.5 0 1 4 1 25 -2 -3.75 0 0 0 1 25 -6 -10.5 0 0 0 1 25 -4 -2 0 1 -2 1 25 -23 0 10 0 10 1 25 -12 -18 0 1 12 1 25 -6 -8.25 0 0 0 1 25 -26 0 10 1 26 1 25 -10 -8.75 0 1 -8.75 1 25 -2 -0.75 0 1 2 1 25 -5 -9.38 0 1 -9.38 1 25 -25 0 9 1 0 1 25 -9 -4.5 0 1 9 1 25 -10 -5 0 1 10 1 25 -2 -4 0 1 -4 1 25 -2 -2.75 0 1 -2.75 1 25 -4 -6 0 0 0 1 25 -10 -12.5 0 1 -12.5 1 25 -12 -22.5 0 0 0 1 25 -4 -4 0 1 4 1 25 -2 -0.5 0 1 -0.5 1 25 -8 -2 0 1 -2 1 25 -4 -3 0 1 4 1 25 -6 -11.25 0 0 0 1 25 -8 -15 0 0 0 1 25 -8 -11 0 1 8 1 25 -12 -4.5 0 1 -4.5 1 25 -19 0 8 1 19 1 25 -6 -6 0 1 -6 1 25 -5 -4.38 0 1 -4.38 1 25 -9 -9 0 1 9 1 25 -6 -1.5 0 1 -1.5 1 25 -9 -13.5 0 1 -13.5 1 25 -9 -5.63 0 1 -5.63 1 25 -8 -8 0 1 -8 1 25 -8 0 3 1 8 1 25 -8 -3 0 1 8 1 25 -9 -11.25 0 1 9 1 26 -8 -16 0 0 0 1 26 -9 -5.63 0 1 -5.63 1 26 -9 -4.5 0 1 9 1 26 -2 -2 0 0 0 1 26 -12 -19.5 0 0 0 1 26 -4 -4.5 0 1 4 1 26 -2 -3.75 0 0 0 1 26 -2 -2.25 0 0 0 1 26 -12 -4.5 0 1 -4.5 1 26 -9 -10.13 0 0 0 1 26 -12 -3 0 1 12 1 26 -10 -17.5 0 0 0 1 26 -5 -4.38 0 1 -4.38 1 26 -5 -7.5 0 0 0 1 26 -6 -11.25 0 0 0 1 26 -2 -1.5 0 1 -1.5 1 26 -9 -6.75 0 1 -6.75 1 26 -4 -7 0 0 0 1 26 -8 -7 0 0 0 1 26 -2 -1.75 0 1 2 1 26 -2 -1 0 1 2 1 26 -10 -6.25 0 0 0 1 26 -6 -6.75 0 0 0 1 26 -9 -2.25 0 1 -2.25 1 26 -2 -0.75 0 1 2 1 26 -12 0 4 1 12 1 26 -6 -3 0 1 -3 1 26 -3 0 1 1 3 1 26 -2 -3 0 1 -3 1 26 -10 -13.75 0 1 -13.75 1 26 -6 -2.25 0 1 6 1 26 -5 -1.88 0 1 -1.88 1 26 -12 -13.5 0 0 0 1 26 -22 0 10 0 10 1 26 -9 -12.38 0 0 0 1 26 -26 0 10 0 10 1 26 -12 -10.5 0 0 0 1 26 -10 -2.5 0 1 -2.5 1 26 -25 0 10 0 10 1 26 -9 -15.75 0 0 0 1 26 -7 0 3 1 0 1 26 -10 -10 0 0 0 1 26 -12 -15 0 0 0 1 26 -12 0 6 0 6 1 26 -6 -4.5 0 1 -4.5 1 26 -8 -13 0 0 0 1 26 -10 -16.25 0 0 0 1 26 -5 -1.25 0 1 5 1 26 -4 -4 0 0 0 1 26 -5 -3.75 0 1 5 1 26 -6 -8.25 0 0 0 1 26 -8 -15 0 0 0 1 26 -8 -8 0 0 0 1 26 -2 -2.75 0 1 -2.75 1 26 -6 -12 0 0 0 1 26 -2 0 1 1 2 1 26 -2 -1.25 0 1 -1.25 1 26 -9 -18 0 0 0 1 26 -6 -9 0 0 0 1 26 -10 -8.75 0 1 -8.75 1 26 -4 -7.5 0 0 0 1 26 -13 0 6 1 0 1 26 -10 -11.25 0 0 0 1 26 -4 -3 0 0 0 1 26 -10 -5 0 1 10 1 26 -8 -2 0 1 -2 1 26 -4 -2.5 0 1 4 1 26 -2 -3.5 0 0 0 1 26 -2 -2.5 0 0 0 1 26 -6 -3.75 0 1 6 1 26 -8 -3 0 1 8 1 26 -2 -3.25 0 0 0 1 26 -8 -9 0 0 0 1 26 -6 -6 0 1 -6 1 26 -8 -11 0 0 0 1 26 -5 -8.75 0 0 0 1 26 -6 -9.75 0 0 0 1 26 -12 -24 0 0 0 1 26 -4 -6.5 0 0 0 1 26 -5 -10 0 0 0 1 26 -30 0 12 1 0 1 26 -12 -18 0 0 0 1 26 -9 -9 0 0 0 1 26 -5 -5 0 0 0 1 26 -5 -9.38 0 0 0 1 26 -10 -12.5 0 1 -12.5 1 26 -10 -18.75 0 0 0 1 26 -5 -2.5 0 1 -2.5 1 26 -9 -14.63 0 0 0 1 26 -28 0 13 1 0 1 26 -5 -6.88 0 0 0 1 26 -4 -3.5 0 0 0 1 26 -12 -16.5 0 1 12 1 26 -5 -8.13 0 0 0 1 26 -9 -16.88 0 0 0 1 26 -9 -3.38 0 1 -3.38 1 26 -12 0 5 1 0 1 26 -4 -8 0 0 0 1 26 -8 -12 0 0 0 1 26 -8 -4 0 1 -4 1 26 -2 -4 0 0 0 1 26 -12 -9 0 1 -9 1 26 -4 -1.5 0 1 4 1 26 -6 -10.5 0 0 0 1 26 -5 -3.13 0 1 5 1 26 -10 -15 0 0 0 1 26 -23 0 10 0 10 1 26 -12 -7.5 0 1 -7.5 1 26 -2 -0.5 0 1 -0.5 1 26 -4 0 2 1 4 1 26 -6 -1.5 0 1 -1.5 1 26 -4 -1 0 1 4 1 26 -10 -20 0 0 0 1 26 -12 -22.5 0 0 0 1 26 -25 0 9 1 0 1 26 -13 0 5 1 13 1 26 -6 -5.25 0 0 0 1 26 -9 -13.5 0 0 0 1 26 -5 0 2 1 0 1 26 -12 -6 0 1 -6 1 26 -5 -6.25 0 0 0 1 26 -10 -3.75 0 1 10 1 26 -9 -7.88 0 0 0 1 26 -8 -6 0 0 0 1 26 -4 -5.5 0 0 0 1 26 -19 0 8 1 19 1 26 -10 -7.5 0 0 0 1 26 -4 -6 0 0 0 1 26 -8 -14 0 0 0 1 26 -8 0 3 0 3 1 26 -12 -21 0 0 0 1 26 -4 -2 0 1 -2 1 26 -4 -5 0 0 0 1 26 -6 -7.5 0 0 0 1 26 -12 -12 0 0 0 1 26 -8 -5 0 1 -5 1 26 -26 0 12 0 12 1 26 -8 -10 0 0 0 1 26 -5 -5.63 0 0 0 1 26 -2 -1 0 1 2 1 27 -9 -6.75 0 1 -6.75 1 27 -2 -4 0 1 -4 1 27 -2 -3.25 0 1 2 1 27 -4 -6.5 0 1 -6.5 1 27 -5 -5.63 0 1 -5.63 1 27 -8 -8 0 1 -8 1 27 -12 -18 0 1 12 1 27 -2 -2.5 0 0 0 1 27 -3 0 1 1 3 1 27 -12 -16.5 0 0 0 1 27 -10 -12.5 0 1 -12.5 1 27 -5 -1.25 0 1 5 1 27 -19 0 8 1 19 1 27 -8 -9 0 1 -9 1 27 -5 -10 0 1 5 1 27 -25 0 10 0 10 1 27 -7 0 3 1 0 1 27 -6 -11.25 0 1 6 1 27 -6 -1.5 0 1 -1.5 1 27 -4 -1.5 0 1 4 1 27 -10 -5 0 1 10 1 27 -10 -3.75 0 1 10 1 27 -6 -4.5 0 1 -4.5 1 27 -12 -19.5 0 1 12 1 27 -5 -4.38 0 0 0 1 27 -8 -11 0 1 8 1 27 -2 -0.75 0 1 2 1 27 -2 -1.5 0 0 0 1 27 -6 -6.75 0 1 6 1 27 -4 -6 0 1 4 1 27 -10 -16.25 0 1 -16.25 1 27 -12 -15 0 1 -15 1 27 -6 -5.25 0 1 6 1 27 -12 -21 0 1 12 1 27 -4 -3 0 1 4 1 27 -12 -22.5 0 1 12 1 27 -2 -3.75 0 1 -3.75 1 27 -6 -12 0 1 -12 1 27 -5 -8.13 0 0 0 1 27 -10 -8.75 0 1 -8.75 1 27 -12 -6 0 1 -6 1 27 -5 -5 0 1 -5 1 27 -22 0 10 1 22 1 27 -12 -13.5 0 1 12 1 27 -8 -7 0 1 8 1 27 -4 -3.5 0 1 -3.5 1 27 -9 -12.38 0 1 9 1 27 -10 -7.5 0 1 -7.5 1 27 -26 0 10 1 26 1 27 -12 -4.5 0 1 -4.5 1 27 -8 -15 0 1 -15 1 27 -2 -1.75 0 1 2 1 27 -12 0 6 1 0 1 27 -9 -3.38 0 1 -3.38 1 27 -2 -3 0 1 -3 1 27 -9 -5.63 0 1 -5.63 1 27 -2 -3.5 0 0 0 1 27 -8 -12 0 1 8 1 27 -10 -18.75 0 0 0 1 27 -4 0 2 1 4 1 27 -2 -2.25 0 1 2 1 27 -9 -2.25 0 1 -2.25 1 27 -10 -13.75 0 1 -13.75 1 27 -28 0 13 1 0 1 27 -4 -2.5 0 1 4 1 27 -9 -15.75 0 1 9 1 27 -10 -15 0 1 10 1 27 -10 -10 0 1 10 1 27 -9 -18 0 0 0 1 27 -12 -24 0 0 0 1 27 -13 0 5 0 5 1 27 -5 -1.88 0 1 -1.88 1 27 -4 -4.5 0 1 4 1 27 -9 -7.88 0 1 9 1 27 -9 -9 0 1 9 1 27 -25 0 9 1 0 1 27 -12 -12 0 1 -12 1 27 -6 -2.25 0 1 6 1 27 -8 -5 0 1 -5 1 27 -4 -5.5 0 1 -5.5 1 27 -2 -1.25 0 1 -1.25 1 27 -9 -13.5 0 1 -13.5 1 27 -9 -4.5 0 1 9 1 27 -10 -11.25 0 1 10 1 27 -6 -3 0 1 -3 1 27 -10 -2.5 0 1 -2.5 1 27 -12 0 4 1 12 1 27 -10 -20 0 0 0 1 27 -5 -3.75 0 1 5 1 27 -9 -10.13 0 1 -10.13 1 27 -4 -7 0 1 -7 1 27 -12 -10.5 0 1 -10.5 1 27 -8 -16 0 0 0 1 27 -4 -7.5 0 1 4 1 27 -8 0 3 1 8 1 27 -6 -10.5 0 1 -10.5 1 27 -6 -9.75 0 1 -9.75 1 27 -5 -8.75 0 1 5 1 27 -5 -2.5 0 1 -2.5 1 27 -13 0 6 0 6 1 27 -23 0 10 1 0 1 27 -8 -4 0 1 -4 1 27 -9 -11.25 0 1 9 1 27 -5 -6.88 0 1 -6.88 1 27 -4 -4 0 1 4 1 27 -10 -17.5 0 1 10 1 27 -8 -13 0 0 0 1 27 -26 0 12 1 0 1 27 -6 -8.25 0 1 -8.25 1 27 -9 -14.63 0 1 9 1 27 -8 -2 0 1 -2 1 27 -10 -6.25 0 1 -6.25 1 27 -8 -14 0 0 0 1 27 -12 0 5 1 0 1 27 -8 -10 0 1 -10 1 27 -30 0 12 1 0 1 27 -5 -7.5 0 0 0 1 27 -5 0 2 1 0 1 27 -6 -3.75 0 1 6 1 27 -6 -6 0 1 -6 1 27 -4 -2 0 1 -2 1 27 -12 -7.5 0 1 -7.5 1 27 -5 -6.25 0 1 5 1 27 -4 -5 0 1 4 1 27 -2 -2.75 0 1 -2.75 1 27 -2 -2 0 1 2 1 27 -6 -9 0 1 -9 1 27 -5 -3.13 0 1 5 1 27 -12 -9 0 1 -9 1 27 -4 -8 0 1 -8 1 27 -4 -1 0 1 4 1 27 -2 0 1 0 1 1 27 -9 -16.88 0 0 0 1 27 -8 -6 0 1 -6 1 27 -2 -0.5 0 1 -0.5 1 27 -6 -7.5 0 1 -7.5 1 27 -8 -3 0 1 8 1 27 -12 -3 0 1 12 1 27 -5 -9.38 0 1 -9.38 1 27 -6 -9.75 0 0 0 1 28 -12 -13.5 0 1 12 1 28 -8 -7 0 1 8 1 28 -10 -7.5 0 1 -7.5 1 28 -2 -2.25 0 0 0 1 28 -6 -8.25 0 1 -8.25 1 28 -10 -16.25 0 0 0 1 28 -3 0 1 0 1 1 28 -4 -3 0 1 4 1 28 -8 -2 0 1 -2 1 28 -4 -2.5 0 1 4 1 28 -5 -5.63 0 0 0 1 28 -5 0 2 1 0 1 28 -30 0 12 1 0 1 28 -9 -4.5 0 0 0 1 28 -4 -7.5 0 0 0 1 28 -26 0 10 1 26 1 28 -10 -6.25 0 1 -6.25 1 28 -2 -4 0 0 0 1 28 -4 -5 0 0 0 1 28 -5 -1.88 0 1 -1.88 1 28 -23 0 10 0 10 1 28 -8 -3 0 1 8 1 28 -8 -12 0 0 0 1 28 -10 -2.5 0 0 0 1 28 -5 -8.13 0 0 0 1 28 -8 -9 0 0 0 1 28 -2 -3 0 0 0 1 28 -9 -11.25 0 0 0 1 28 -9 -12.38 0 0 0 1 28 -12 -15 0 0 0 1 28 -8 -10 0 0 0 1 28 -4 -1 0 1 4 1 28 -8 0 3 1 8 1 28 -4 -3.5 0 0 0 1 28 -8 -8 0 0 0 1 28 -10 -11.25 0 0 0 1 28 -10 -5 0 1 10 1 28 -9 -13.5 0 0 0 1 28 -2 -0.75 0 1 2 1 28 -5 -4.38 0 0 0 1 28 -2 -1.5 0 0 0 1 28 -2 -3.75 0 0 0 1 28 -5 -3.75 0 1 5 1 28 -9 -16.88 0 0 0 1 28 -9 -3.38 0 1 -3.38 1 28 -5 -10 0 0 0 1 28 -26 0 12 0 12 1 28 -5 -9.38 0 0 0 1 28 -6 -1.5 0 1 -1.5 1 28 -10 -10 0 0 0 1 28 -2 -1.25 0 0 0 1 28 -9 -14.63 0 0 0 1 28 -6 -4.5 0 0 0 1 28 -5 -5 0 0 0 1 28 -5 -7.5 0 0 0 1 28 -8 -13 0 0 0 1 28 -5 -3.13 0 0 0 1 28 -8 -5 0 1 -5 1 28 -8 -11 0 0 0 1 28 -6 -6.75 0 0 0 1 28 -5 -8.75 0 0 0 1 28 -2 0 1 1 2 1 28 -9 -5.63 0 0 0 1 28 -6 -6 0 0 0 1 28 -4 -5.5 0 0 0 1 28 -6 -3 0 1 -3 1 28 -12 -19.5 0 0 0 1 28 -10 -13.75 0 0 0 1 28 -10 -8.75 0 0 0 1 28 -5 -6.88 0 0 0 1 28 -6 -7.5 0 0 0 1 28 -10 -12.5 0 0 0 1 28 -9 -6.75 0 0 0 1 28 -4 -6 0 0 0 1 28 -8 -4 0 0 0 1 28 -2 -1 0 1 2 1 28 -12 -24 0 0 0 1 28 -12 -6 0 0 0 1 28 -2 -2 0 0 0 1 28 -4 -7 0 0 0 1 28 -12 -9 0 0 0 1 28 -6 -11.25 0 0 0 1 28 -25 0 10 1 25 1 28 -28 0 13 1 0 1 28 -2 -2.75 0 0 0 1 28 -12 -10.5 0 0 0 1 28 -8 -14 0 0 0 1 28 -4 -6.5 0 0 0 1 28 -4 0 2 1 4 1 28 -10 -15 0 0 0 1 28 -12 0 5 1 0 1 28 -10 -18.75 0 0 0 1 28 -12 -3 0 1 12 1 28 -4 -4 0 0 0 1 28 -9 -7.88 0 0 0 1 28 -9 -2.25 0 1 -2.25 1 28 -2 -1.75 0 0 0 1 28 -12 0 6 1 0 1 28 -5 -2.5 0 0 0 1 28 -4 -4.5 0 0 0 1 28 -8 -6 0 0 0 1 28 -12 -18 0 0 0 1 28 -12 -16.5 0 0 0 1 28 -22 0 10 1 22 1 28 -12 -21 0 0 0 1 28 -12 -4.5 0 0 0 1 28 -12 -12 0 0 0 1 28 -19 0 8 1 19 1 28 -2 -2.5 0 0 0 1 28 -12 0 4 1 12 1 28 -4 -2 0 0 0 1 28 -9 -9 0 0 0 1 28 -9 -10.13 0 0 0 1 28 -6 -2.25 0 1 6 1 28 -2 -0.5 0 1 -0.5 1 28 -10 -3.75 0 0 0 1 28 -13 0 5 1 13 1 28 -4 -1.5 0 1 4 1 28 -5 -1.25 0 1 5 1 28 -6 -9 0 0 0 1 28 -10 -17.5 0 0 0 1 28 -6 -12 0 0 0 1 28 -6 -5.25 0 0 0 1 28 -12 -22.5 0 0 0 1 28 -8 -16 0 0 0 1 28 -9 -15.75 0 0 0 1 28 -10 -20 0 0 0 1 28 -13 0 6 0 6 1 28 -4 -8 0 0 0 1 28 -12 -7.5 0 0 0 1 28 -9 -18 0 0 0 1 28 -2 -3.25 0 0 0 1 28 -7 0 3 1 0 1 28 -6 -3.75 0 0 0 1 28 -5 -6.25 0 0 0 1 28 -8 -15 0 0 0 1 28 -25 0 9 1 0 1 28 -2 -3.5 0 0 0 1 28 -6 -10.5 0 0 0 1 28 -9 -10.13 0 0 0 1 29 -12 -10.5 0 1 -10.5 1 29 -25 0 10 1 25 1 29 -4 -7 0 0 0 1 29 -9 -7.88 0 0 0 1 29 -5 -3.13 0 1 5 1 29 -5 -8.13 0 0 0 1 29 -8 -7 0 1 8 1 29 -12 -6 0 1 -6 1 29 -12 -24 0 0 0 1 29 -12 -21 0 0 0 1 29 -4 -2.5 0 1 4 1 29 -6 -9 0 0 0 1 29 -10 -15 0 0 0 1 29 -8 -6 0 1 -6 1 29 -13 0 6 1 0 1 29 -6 -12 0 0 0 1 29 -6 -4.5 0 1 -4.5 1 29 -9 -16.88 0 0 0 1 29 -10 -18.75 0 0 0 1 29 -9 -3.38 0 1 -3.38 1 29 -6 -9.75 0 1 -9.75 1 29 -2 -1.75 0 1 2 1 29 -5 0 2 1 0 1 29 -8 -5 0 0 0 1 29 -8 -9 0 1 -9 1 29 -12 0 6 1 0 1 29 -12 0 4 1 12 1 29 -2 -2.5 0 0 0 1 29 -6 -3 0 1 -3 1 29 -10 -7.5 0 1 -7.5 1 29 -5 -2.5 0 1 -2.5 1 29 -5 -3.75 0 1 5 1 29 -10 -3.75 0 1 10 1 29 -2 -3 0 0 0 1 29 -10 -6.25 0 0 0 1 29 -4 -7.5 0 0 0 1 29 -8 -16 0 0 0 1 29 -5 -6.25 0 0 0 1 29 -4 0 2 1 4 1 29 -10 -11.25 0 0 0 1 29 -5 -6.88 0 0 0 1 29 -5 -7.5 0 1 5 1 29 -26 0 12 0 12 1 29 -8 -13 0 0 0 1 29 -4 -4.5 0 1 4 1 29 -8 -10 0 0 0 1 29 -6 -3.75 0 1 6 1 29 -5 -5.63 0 0 0 1 29 -9 -18 0 0 0 1 29 -12 -13.5 0 1 12 1 29 -7 0 3 0 3 1 29 -8 -14 0 0 0 1 29 -2 -1.5 0 1 -1.5 1 29 -10 -2.5 0 1 -2.5 1 29 -13 0 5 0 5 1 29 -9 -15.75 0 0 0 1 29 -8 -12 0 0 0 1 29 -28 0 13 1 0 1 29 -6 -7.5 0 0 0 1 29 -10 -16.25 0 0 0 1 29 -12 -7.5 0 0 0 1 29 -5 -5 0 1 -5 1 29 -2 -2 0 1 2 1 29 -22 0 10 0 10 1 29 -2 -1 0 1 2 1 29 -3 0 1 0 1 1 29 -4 -5.5 0 0 0 1 29 -2 -2.25 0 0 0 1 29 -6 -2.25 0 1 6 1 29 -4 -6.5 0 0 0 1 29 -9 -12.38 0 0 0 1 29 -10 -13.75 0 0 0 1 29 -10 -17.5 0 0 0 1 29 -4 -5 0 1 4 1 29 -9 -11.25 0 0 0 1 29 -10 -10 0 0 0 1 29 -2 -3.25 0 1 2 1 29 -5 -8.75 0 0 0 1 29 -5 -10 0 0 0 1 29 -9 -2.25 0 1 -2.25 1 29 -6 -6.75 0 0 0 1 29 -12 -16.5 0 0 0 1 29 -9 -14.63 0 0 0 1 29 -4 -8 0 0 0 1 29 -6 -5.25 0 0 0 1 29 -9 -6.75 0 1 -6.75 1 29 -12 -12 0 1 -12 1 29 -4 -1 0 1 4 1 29 -12 -15 0 0 0 1 29 -4 -3.5 0 0 0 1 29 -2 -1.25 0 0 0 1 29 -30 0 12 0 12 1 29 -12 -19.5 0 0 0 1 29 -12 -3 0 0 0 1 29 -5 -1.25 0 0 0 1 29 -5 -1.88 0 1 -1.88 1 29 -2 -3.5 0 1 -3.5 1 29 -12 -9 0 0 0 1 29 -10 -20 0 0 0 1 29 -8 -4 0 1 -4 1 29 -12 0 5 0 5 1 29 -2 0 1 0 1 1 29 -4 -1.5 0 1 4 1 29 -2 -3.75 0 0 0 1 29 -6 -10.5 0 0 0 1 29 -4 -2 0 0 0 1 29 -23 0 10 0 10 1 29 -12 -18 0 0 0 1 29 -6 -8.25 0 0 0 1 29 -26 0 10 0 10 1 29 -10 -8.75 0 0 0 1 29 -2 -0.75 0 1 2 1 29 -5 -9.38 0 0 0 1 29 -25 0 9 0 9 1 29 -9 -4.5 0 0 0 1 29 -10 -5 0 1 10 1 29 -2 -4 0 0 0 1 29 -2 -2.75 0 1 -2.75 1 29 -4 -6 0 0 0 1 29 -10 -12.5 0 0 0 1 29 -12 -22.5 0 0 0 1 29 -4 -4 0 1 4 1 29 -2 -0.5 0 1 -0.5 1 29 -8 -2 0 1 -2 1 29 -4 -3 0 1 4 1 29 -6 -11.25 0 0 0 1 29 -8 -15 0 0 0 1 29 -8 -11 0 0 0 1 29 -12 -4.5 0 1 -4.5 1 29 -19 0 8 0 8 1 29 -6 -6 0 0 0 1 29 -5 -4.38 0 0 0 1 29 -9 -9 0 0 0 1 29 -6 -1.5 0 1 -1.5 1 29 -9 -13.5 0 1 -13.5 1 29 -9 -5.63 0 0 0 1 29 -8 -8 0 0 0 1 29 -8 0 3 0 3 1 29 -8 -3 0 1 8 1 29 -9 -11.25 0 0 0 1 30 -8 -16 0 0 0 1 30 -9 -5.63 0 1 -5.63 1 30 -9 -4.5 0 1 9 1 30 -2 -2 0 0 0 1 30 -12 -19.5 0 0 0 1 30 -4 -4.5 0 0 0 1 30 -2 -3.75 0 1 -3.75 1 30 -2 -2.25 0 0 0 1 30 -12 -4.5 0 1 -4.5 1 30 -9 -10.13 0 0 0 1 30 -12 -3 0 1 12 1 30 -10 -17.5 0 0 0 1 30 -5 -4.38 0 0 0 1 30 -5 -7.5 0 0 0 1 30 -6 -11.25 0 0 0 1 30 -2 -1.5 0 0 0 1 30 -9 -6.75 0 1 -6.75 1 30 -4 -7 0 0 0 1 30 -8 -7 0 0 0 1 30 -2 -1.75 0 0 0 1 30 -2 -1 0 0 0 1 30 -10 -6.25 0 1 -6.25 1 30 -6 -6.75 0 0 0 1 30 -9 -2.25 0 1 -2.25 1 30 -2 -0.75 0 0 0 1 30 -12 0 4 0 4 1 30 -6 -3 0 1 -3 1 30 -3 0 1 0 1 1 30 -2 -3 0 0 0 1 30 -10 -13.75 0 0 0 1 30 -6 -2.25 0 1 6 1 30 -5 -1.88 0 1 -1.88 1 30 -12 -13.5 0 0 0 1 30 -22 0 10 0 10 1 30 -9 -12.38 0 0 0 1 30 -26 0 10 0 10 1 30 -12 -10.5 0 0 0 1 30 -10 -2.5 0 1 -2.5 1 30 -25 0 10 0 10 1 30 -9 -15.75 0 0 0 1 30 -7 0 3 0 3 1 30 -10 -10 0 0 0 1 30 -12 -15 0 0 0 1 30 -12 0 6 0 6 1 30 -6 -4.5 0 0 0 1 30 -8 -13 0 0 0 1 30 -10 -16.25 0 0 0 1 30 -5 -1.25 0 1 5 1 30 -4 -4 0 0 0 1 30 -5 -3.75 0 0 0 1 30 -6 -8.25 0 0 0 1 30 -8 -15 0 0 0 1 30 -8 -8 0 0 0 1 30 -2 -2.75 0 0 0 1 30 -6 -12 0 0 0 1 30 -2 0 1 0 1 1 30 -2 -1.25 0 0 0 1 30 -9 -18 0 0 0 1 30 -6 -9 0 0 0 1 30 -10 -8.75 0 0 0 1 30 -4 -7.5 0 0 0 1 30 -13 0 6 0 6 1 30 -10 -11.25 0 0 0 1 30 -4 -3 0 0 0 1 30 -10 -5 0 0 0 1 30 -8 -2 0 0 0 1 30 -4 -2.5 0 0 0 1 30 -2 -3.5 0 0 0 1 30 -2 -2.5 0 0 0 1 30 -6 -3.75 0 0 0 1 30 -8 -3 0 0 0 1 30 -2 -3.25 0 0 0 1 30 -8 -9 0 0 0 1 30 -6 -6 0 0 0 1 30 -8 -11 0 0 0 1 30 -5 -8.75 0 0 0 1 30 -6 -9.75 0 0 0 1 30 -12 -24 0 0 0 1 30 -4 -6.5 0 0 0 1 30 -5 -10 0 0 0 1 30 -30 0 12 1 0 1 30 -12 -18 0 0 0 1 30 -9 -9 0 0 0 1 30 -5 -5 0 0 0 1 30 -5 -9.38 0 0 0 1 30 -10 -12.5 0 0 0 1 30 -10 -18.75 0 0 0 1 30 -5 -2.5 0 1 -2.5 1 30 -9 -14.63 0 0 0 1 30 -28 0 13 1 0 1 30 -5 -6.88 0 1 -6.88 1 30 -4 -3.5 0 0 0 1 30 -12 -16.5 0 0 0 1 30 -5 -8.13 0 0 0 1 30 -9 -16.88 0 0 0 1 30 -9 -3.38 0 0 0 1 30 -12 0 5 1 0 1 30 -4 -8 0 0 0 1 30 -8 -12 0 0 0 1 30 -8 -4 0 0 0 1 30 -2 -4 0 0 0 1 30 -12 -9 0 1 -9 1 30 -4 -1.5 0 0 0 1 30 -6 -10.5 0 0 0 1 30 -5 -3.13 0 0 0 1 30 -10 -15 0 0 0 1 30 -23 0 10 0 10 1 30 -12 -7.5 0 0 0 1 30 -2 -0.5 0 0 0 1 30 -4 0 2 0 2 1 30 -6 -1.5 0 0 0 1 30 -4 -1 0 0 0 1 30 -10 -20 0 0 0 1 30 -12 -22.5 0 0 0 1 30 -25 0 9 0 9 1 30 -13 0 5 0 5 1 30 -6 -5.25 0 0 0 1 30 -9 -13.5 0 0 0 1 30 -5 0 2 0 2 1 30 -12 -6 0 0 0 1 30 -5 -6.25 0 0 0 1 30 -10 -3.75 0 0 0 1 30 -9 -7.88 0 0 0 1 30 -8 -6 0 0 0 1 30 -4 -5.5 0 0 0 1 30 -19 0 8 0 8 1 30 -10 -7.5 0 0 0 1 30 -4 -6 0 0 0 1 30 -8 -14 0 0 0 1 30 -8 0 3 0 3 1 30 -12 -21 0 0 0 1 30 -4 -2 0 0 0 1 30 -4 -5 0 0 0 1 30 -6 -7.5 0 0 0 1 30 -12 -12 0 0 0 1 30 -8 -5 0 0 0 1 30 -26 0 12 1 0 1 30 -8 -10 0 0 0 1 30 -5 -5.63 0 0 0 1 30 \ No newline at end of file diff --git a/R/inst/extdata/ra_exampleData.txt b/R/inst/extdata/ra_exampleData.txt deleted file mode 100644 index dd6e3536..00000000 --- a/R/inst/extdata/ra_exampleData.txt +++ /dev/null @@ -1,701 +0,0 @@ -gain loss cert gamble outcome cond subjID -9 -6.75 0 1 -6.75 0 2 -6 -6.75 0 0 0 0 2 -6 -3 0 1 6 0 2 -2 -1.5 0 0 0 0 2 -4 -3 0 0 0 0 2 -5 -6.88 0 0 0 0 2 -12 -9 0 1 12 0 2 -4 -5 0 0 0 0 2 -5 -7.5 0 0 0 0 2 -4 -4 0 1 -4 0 2 -9 -5.63 0 1 -5.63 0 2 -9 -14.63 0 0 0 0 2 -5 -9.38 0 0 0 0 2 -6 -4.5 0 1 6 0 2 -8 -7 0 0 0 0 2 -10 -16.25 0 0 0 0 2 -10 -17.5 0 0 0 0 2 -9 -16.88 0 0 0 0 2 -8 -5 0 1 8 0 2 -6 -1.5 0 1 6 0 2 -12 -18 0 0 0 0 2 -5 -6.25 0 0 0 0 2 -8 -4 0 1 8 0 2 -9 -15.75 0 0 0 0 2 -9 -13.5 0 0 0 0 2 -5 -8.13 0 0 0 0 2 -2 0 1 1 0 0 2 -2 -3.75 0 0 0 0 2 -4 -6.5 0 0 0 0 2 -10 -5 0 1 -5 0 2 -12 -22.5 0 0 0 0 2 -2 -1 0 1 2 0 2 -13 0 6 1 13 0 2 -5 -2.5 0 0 0 0 2 -2 -0.5 0 1 2 0 2 -2 -3.25 0 1 -3.25 0 2 -30 0 12 1 0 0 2 -8 -8 0 1 8 0 2 -4 -5.5 0 0 0 0 2 -23 0 10 1 0 0 2 -4 -3.5 0 0 0 0 2 -5 0 2 1 5 0 2 -8 0 3 1 0 0 2 -9 -10.13 0 0 0 0 2 -8 -16 0 0 0 0 2 -12 -24 0 0 0 0 2 -9 -3.38 0 1 -3.38 0 2 -6 -5.25 0 1 6 0 2 -2 -4 0 0 0 0 2 -4 -1 0 1 -1 0 2 -6 -11.25 0 0 0 0 2 -5 -4.38 0 1 -4.38 0 2 -6 -2.25 0 1 6 0 2 -12 -10.5 0 1 12 0 2 -9 -18 0 0 0 0 2 -10 -20 0 0 0 0 2 -4 -4.5 0 0 0 0 2 -9 -2.25 0 1 -2.25 0 2 -4 -6 0 0 0 0 2 -8 -10 0 1 -10 0 2 -5 -5 0 1 -5 0 2 -5 -8.75 0 0 0 0 2 -8 -6 0 1 -6 0 2 -10 -13.75 0 0 0 0 2 -2 -2.5 0 0 0 0 2 -8 -11 0 1 -11 0 2 -4 -2 0 1 4 0 2 -10 -7.5 0 1 -7.5 0 2 -22 0 10 1 22 0 2 -25 0 10 1 0 0 2 -6 -9.75 0 0 0 0 2 -12 0 5 1 12 0 2 -4 -2.5 0 1 -2.5 0 2 -8 -3 0 1 8 0 2 -10 -11.25 0 1 -11.25 0 2 -5 -10 0 1 5 0 2 -10 -15 0 0 0 0 2 -2 -3.5 0 0 0 0 2 -12 0 4 1 12 0 2 -13 0 5 0 5 0 2 -5 -3.75 0 1 5 0 2 -26 0 12 0 12 0 2 -5 -5.63 0 0 0 0 2 -8 -2 0 1 -2 0 2 -2 -3 0 0 0 0 2 -6 -9 0 0 0 0 2 -9 -7.88 0 0 0 0 2 -8 -14 0 0 0 0 2 -28 0 13 1 28 0 2 -9 -12.38 0 0 0 0 2 -8 -15 0 1 -15 0 2 -10 -2.5 0 1 -2.5 0 2 -4 0 2 1 4 0 2 -12 -6 0 1 -6 0 2 -12 -16.5 0 1 -16.5 0 2 -4 -7.5 0 0 0 0 2 -10 -8.75 0 1 -8.75 0 2 -10 -18.75 0 1 10 0 2 -26 0 10 1 0 0 2 -12 -21 0 1 12 0 2 -2 -0.75 0 1 -0.75 0 2 -9 -9 0 1 -9 0 2 -10 -6.25 0 1 10 0 2 -8 -12 0 1 -12 0 2 -3 0 1 1 0 0 2 -5 -1.88 0 1 5 0 2 -6 -7.5 0 1 -7.5 0 2 -12 -13.5 0 1 12 0 2 -4 -7 0 0 0 0 2 -6 -8.25 0 1 -8.25 0 2 -6 -12 0 0 0 0 2 -6 -10.5 0 0 0 0 2 -4 -8 0 0 0 0 2 -6 -6 0 1 -6 0 2 -12 0 6 1 12 0 2 -12 -19.5 0 1 12 0 2 -19 0 8 1 19 0 2 -12 -15 0 0 0 0 2 -2 -1.75 0 0 0 0 2 -6 -3.75 0 0 0 0 2 -2 -1.25 0 0 0 0 2 -5 -1.25 0 1 -1.25 0 2 -4 -1.5 0 1 4 0 2 -8 -13 0 0 0 0 2 -12 -7.5 0 1 -7.5 0 2 -12 -3 0 1 -3 0 2 -2 -2.75 0 0 0 0 2 -7 0 3 1 7 0 2 -25 0 9 1 25 0 2 -2 -2 0 0 0 0 2 -12 -4.5 0 1 -4.5 0 2 -12 -12 0 1 12 0 2 -5 -3.13 0 1 5 0 2 -9 -11.25 0 0 0 0 2 -8 -9 0 1 -9 0 2 -2 -2.25 0 0 0 0 2 -9 -4.5 0 1 -4.5 0 2 -10 -3.75 0 1 10 0 2 -10 -10 0 1 10 0 2 -10 -12.5 0 0 0 0 2 -2 -2.5 0 0 0 0 3 -5 -5.63 0 0 0 0 3 -6 -7.5 0 0 0 0 3 -26 0 10 1 0 0 3 -9 -4.5 0 0 0 0 3 -2 -1.25 0 0 0 0 3 -8 -3 0 0 0 0 3 -25 0 9 0 9 0 3 -4 -4.5 0 0 0 0 3 -5 -10 0 0 0 0 3 -6 -9 0 0 0 0 3 -10 -6.25 0 0 0 0 3 -4 -4 0 0 0 0 3 -12 -3 0 0 0 0 3 -5 -5 0 0 0 0 3 -12 0 5 0 5 0 3 -6 -9.75 0 0 0 0 3 -19 0 8 0 8 0 3 -4 -7.5 0 0 0 0 3 -12 -9 0 0 0 0 3 -4 -6.5 0 0 0 0 3 -9 -5.63 0 0 0 0 3 -9 -18 0 0 0 0 3 -10 -11.25 0 0 0 0 3 -10 -13.75 0 0 0 0 3 -6 -12 0 0 0 0 3 -10 -12.5 0 0 0 0 3 -4 -7 0 0 0 0 3 -10 -7.5 0 0 0 0 3 -4 -8 0 0 0 0 3 -8 -11 0 0 0 0 3 -12 0 4 1 12 0 3 -9 -3.38 0 0 0 0 3 -10 -18.75 0 0 0 0 3 -2 -3.5 0 0 0 0 3 -2 -1 0 0 0 0 3 -2 -3.25 0 0 0 0 3 -2 0 1 0 1 0 3 -7 0 3 0 3 0 3 -8 0 3 0 3 0 3 -12 -6 0 0 0 0 3 -2 -0.5 0 1 2 0 3 -9 -7.88 0 0 0 0 3 -8 -15 0 0 0 0 3 -2 -1.5 0 0 0 0 3 -12 -22.5 0 0 0 0 3 -8 -7 0 0 0 0 3 -4 -5.5 0 0 0 0 3 -10 -8.75 0 0 0 0 3 -8 -9 0 0 0 0 3 -2 -4 0 0 0 0 3 -4 0 2 1 4 0 3 -8 -8 0 0 0 0 3 -9 -13.5 0 0 0 0 3 -9 -9 0 0 0 0 3 -6 -3.75 0 0 0 0 3 -13 0 6 0 6 0 3 -5 -1.88 0 1 5 0 3 -6 -6 0 0 0 0 3 -5 -6.88 0 0 0 0 3 -8 -16 0 0 0 0 3 -12 -7.5 0 0 0 0 3 -5 -1.25 0 1 -1.25 0 3 -9 -14.63 0 0 0 0 3 -8 -4 0 0 0 0 3 -10 -17.5 0 0 0 0 3 -5 -3.75 0 0 0 0 3 -6 -10.5 0 0 0 0 3 -13 0 5 1 13 0 3 -10 -16.25 0 0 0 0 3 -5 -7.5 0 0 0 0 3 -2 -1.75 0 0 0 0 3 -5 -9.38 0 0 0 0 3 -2 -2.75 0 0 0 0 3 -2 -0.75 0 1 -0.75 0 3 -5 -8.13 0 0 0 0 3 -9 -11.25 0 0 0 0 3 -8 -13 0 0 0 0 3 -9 -16.88 0 0 0 0 3 -2 -2 0 0 0 0 3 -12 -18 0 0 0 0 3 -8 -2 0 1 -2 0 3 -2 -3 0 0 0 0 3 -6 -4.5 0 0 0 0 3 -5 0 2 1 5 0 3 -12 -19.5 0 0 0 0 3 -9 -15.75 0 0 0 0 3 -8 -6 0 0 0 0 3 -10 -2.5 0 1 -2.5 0 3 -9 -6.75 0 0 0 0 3 -6 -6.75 0 0 0 0 3 -2 -3.75 0 0 0 0 3 -10 -5 0 0 0 0 3 -2 -2.25 0 0 0 0 3 -26 0 12 0 12 0 3 -12 -13.5 0 0 0 0 3 -8 -5 0 0 0 0 3 -6 -3 0 0 0 0 3 -10 -3.75 0 0 0 0 3 -12 -10.5 0 0 0 0 3 -4 -5 0 0 0 0 3 -9 -2.25 0 0 0 0 3 -4 -3 0 0 0 0 3 -9 -10.13 0 0 0 0 3 -28 0 13 0 13 0 3 -22 0 10 0 10 0 3 -10 -10 0 0 0 0 3 -4 -1 0 0 0 0 3 -4 -2.5 0 0 0 0 3 -12 -24 0 0 0 0 3 -8 -12 0 0 0 0 3 -3 0 1 1 0 0 3 -9 -12.38 0 0 0 0 3 -23 0 10 0 10 0 3 -4 -3.5 0 0 0 0 3 -4 -1.5 0 0 0 0 3 -8 -10 0 0 0 0 3 -8 -14 0 0 0 0 3 -4 -6 0 0 0 0 3 -25 0 10 0 10 0 3 -12 -16.5 0 0 0 0 3 -12 -12 0 0 0 0 3 -5 -2.5 0 0 0 0 3 -5 -8.75 0 0 0 0 3 -12 -4.5 0 0 0 0 3 -12 -15 0 0 0 0 3 -5 -3.13 0 0 0 0 3 -12 -21 0 0 0 0 3 -5 -4.38 0 0 0 0 3 -6 -11.25 0 0 0 0 3 -30 0 12 0 12 0 3 -6 -1.5 0 1 6 0 3 -12 0 6 0 6 0 3 -4 -2 0 0 0 0 3 -10 -15 0 0 0 0 3 -6 -2.25 0 0 0 0 3 -10 -20 0 0 0 0 3 -6 -5.25 0 0 0 0 3 -5 -6.25 0 0 0 0 3 -6 -8.25 0 0 0 0 3 -4 -4.5 0 1 -4.5 0 4 -10 -12.5 0 0 0 0 4 -26 0 12 1 26 0 4 -6 -7.5 0 1 -7.5 0 4 -4 -6.5 0 0 0 0 4 -12 -4.5 0 1 -4.5 0 4 -5 -2.5 0 1 5 0 4 -6 -12 0 0 0 0 4 -9 -14.63 0 1 9 0 4 -6 -6 0 0 0 0 4 -22 0 10 1 22 0 4 -2 -1 0 1 2 0 4 -8 -3 0 1 8 0 4 -12 -9 0 0 0 0 4 -5 -3.75 0 1 5 0 4 -6 -3 0 1 6 0 4 -4 0 2 0 2 0 4 -28 0 13 1 28 0 4 -12 -15 0 0 0 0 4 -9 -11.25 0 0 0 0 4 -12 -10.5 0 1 12 0 4 -5 -1.88 0 1 5 0 4 -2 -2.75 0 0 0 0 4 -4 -7 0 0 0 0 4 -8 -4 0 1 8 0 4 -2 0 1 1 0 0 4 -2 -3.5 0 0 0 0 4 -2 -1.75 0 1 2 0 4 -5 -5 0 0 0 0 4 -12 -12 0 1 12 0 4 -12 0 6 1 12 0 4 -6 -4.5 0 0 0 0 4 -30 0 12 0 12 0 4 -12 -16.5 0 0 0 0 4 -6 -9.75 0 1 6 0 4 -12 -22.5 0 0 0 0 4 -6 -9 0 1 -9 0 4 -5 -3.13 0 0 0 0 4 -5 -9.38 0 0 0 0 4 -12 -7.5 0 1 -7.5 0 4 -5 0 2 1 5 0 4 -10 -15 0 0 0 0 4 -12 -3 0 1 -3 0 4 -13 0 6 0 6 0 4 -9 -16.88 0 0 0 0 4 -6 -11.25 0 0 0 0 4 -8 -5 0 1 8 0 4 -8 -14 0 0 0 0 4 -12 -24 0 1 -24 0 4 -12 0 5 1 12 0 4 -9 -13.5 0 0 0 0 4 -6 -1.5 0 1 6 0 4 -2 -3 0 0 0 0 4 -10 -2.5 0 1 -2.5 0 4 -2 -0.75 0 0 0 0 4 -6 -10.5 0 0 0 0 4 -2 -0.5 0 1 2 0 4 -10 -10 0 0 0 0 4 -8 -10 0 1 -10 0 4 -9 -12.38 0 0 0 0 4 -4 -6 0 0 0 0 4 -6 -2.25 0 1 6 0 4 -9 -15.75 0 0 0 0 4 -12 -13.5 0 0 0 0 4 -8 -6 0 0 0 0 4 -10 -18.75 0 0 0 0 4 -4 -2 0 0 0 0 4 -5 -1.25 0 1 -1.25 0 4 -6 -5.25 0 0 0 0 4 -4 -8 0 1 4 0 4 -25 0 9 1 25 0 4 -2 -3.25 0 0 0 0 4 -10 -11.25 0 1 -11.25 0 4 -4 -7.5 0 0 0 0 4 -9 -5.63 0 1 -5.63 0 4 -6 -6.75 0 0 0 0 4 -8 -2 0 1 -2 0 4 -5 -6.25 0 0 0 0 4 -23 0 10 0 10 0 4 -8 -13 0 0 0 0 4 -10 -13.75 0 0 0 0 4 -5 -10 0 1 5 0 4 -12 0 4 1 12 0 4 -2 -2.5 0 0 0 0 4 -19 0 8 1 19 0 4 -4 -4 0 0 0 0 4 -4 -1 0 1 -1 0 4 -4 -2.5 0 1 -2.5 0 4 -5 -8.13 0 0 0 0 4 -10 -3.75 0 1 10 0 4 -5 -8.75 0 0 0 0 4 -10 -7.5 0 1 -7.5 0 4 -10 -5 0 1 -5 0 4 -10 -20 0 0 0 0 4 -13 0 5 0 5 0 4 -8 -9 0 0 0 0 4 -8 -12 0 0 0 0 4 -10 -16.25 0 0 0 0 4 -5 -6.88 0 1 5 0 4 -4 -5.5 0 0 0 0 4 -5 -7.5 0 0 0 0 4 -9 -10.13 0 0 0 0 4 -6 -8.25 0 0 0 0 4 -26 0 10 0 10 0 4 -4 -5 0 0 0 0 4 -2 -2.25 0 1 2 0 4 -6 -3.75 0 1 -3.75 0 4 -8 -8 0 1 8 0 4 -9 -6.75 0 0 0 0 4 -8 -15 0 1 -15 0 4 -12 -6 0 1 -6 0 4 -25 0 10 0 10 0 4 -12 -19.5 0 0 0 0 4 -9 -7.88 0 0 0 0 4 -4 -1.5 0 1 4 0 4 -8 -7 0 0 0 0 4 -12 -18 0 1 -18 0 4 -2 -2 0 1 2 0 4 -9 -18 0 0 0 0 4 -2 -1.25 0 0 0 0 4 -8 -16 0 0 0 0 4 -5 -4.38 0 0 0 0 4 -2 -4 0 0 0 0 4 -5 -5.63 0 0 0 0 4 -8 0 3 1 0 0 4 -10 -17.5 0 0 0 0 4 -8 -11 0 0 0 0 4 -2 -1.5 0 1 2 0 4 -4 -3.5 0 0 0 0 4 -2 -3.75 0 0 0 0 4 -3 0 1 1 0 0 4 -12 -21 0 0 0 0 4 -10 -8.75 0 0 0 0 4 -9 -9 0 1 -9 0 4 -4 -3 0 0 0 0 4 -7 0 3 1 7 0 4 -9 -3.38 0 1 -3.38 0 4 -9 -2.25 0 1 -2.25 0 4 -10 -6.25 0 0 0 0 4 -9 -4.5 0 1 -4.5 0 4 -9 -6.75 0 1 -6.75 0 6 -6 -6.75 0 1 -6.75 0 6 -6 -3 0 1 6 0 6 -2 -1.5 0 1 2 0 6 -4 -3 0 1 4 0 6 -5 -6.88 0 0 0 0 6 -12 -9 0 0 0 0 6 -4 -5 0 0 0 0 6 -5 -7.5 0 0 0 0 6 -4 -4 0 1 -4 0 6 -9 -5.63 0 1 -5.63 0 6 -9 -14.63 0 0 0 0 6 -5 -9.38 0 0 0 0 6 -6 -4.5 0 1 6 0 6 -8 -7 0 1 -7 0 6 -10 -16.25 0 0 0 0 6 -10 -17.5 0 0 0 0 6 -9 -16.88 0 0 0 0 6 -8 -5 0 1 8 0 6 -6 -1.5 0 1 6 0 6 -12 -18 0 0 0 0 6 -5 -6.25 0 0 0 0 6 -8 -4 0 1 8 0 6 -9 -15.75 0 0 0 0 6 -9 -13.5 0 0 0 0 6 -5 -8.13 0 0 0 0 6 -2 0 1 1 0 0 6 -2 -3.75 0 1 -3.75 0 6 -4 -6.5 0 0 0 0 6 -10 -5 0 1 -5 0 6 -12 -22.5 0 0 0 0 6 -2 -1 0 1 2 0 6 -13 0 6 0 6 0 6 -5 -2.5 0 1 5 0 6 -2 -0.5 0 1 2 0 6 -2 -3.25 0 0 0 0 6 -30 0 12 1 0 0 6 -8 -8 0 0 0 0 6 -4 -5.5 0 0 0 0 6 -23 0 10 1 0 0 6 -4 -3.5 0 1 4 0 6 -5 0 2 1 5 0 6 -8 0 3 1 0 0 6 -9 -10.13 0 0 0 0 6 -8 -16 0 0 0 0 6 -12 -24 0 0 0 0 6 -9 -3.38 0 1 -3.38 0 6 -6 -5.25 0 0 0 0 6 -2 -4 0 0 0 0 6 -4 -1 0 1 -1 0 6 -6 -11.25 0 0 0 0 6 -5 -4.38 0 1 -4.38 0 6 -6 -2.25 0 1 6 0 6 -12 -10.5 0 0 0 0 6 -9 -18 0 0 0 0 6 -10 -20 0 0 0 0 6 -4 -4.5 0 1 -4.5 0 6 -9 -2.25 0 1 -2.25 0 6 -4 -6 0 0 0 0 6 -8 -10 0 0 0 0 6 -5 -5 0 1 -5 0 6 -5 -8.75 0 0 0 0 6 -8 -6 0 1 -6 0 6 -10 -13.75 0 0 0 0 6 -2 -2.5 0 1 2 0 6 -8 -11 0 0 0 0 6 -4 -2 0 1 4 0 6 -10 -7.5 0 1 -7.5 0 6 -22 0 10 0 10 0 6 -25 0 10 1 0 0 6 -6 -9.75 0 0 0 0 6 -12 0 5 0 5 0 6 -4 -2.5 0 1 -2.5 0 6 -8 -3 0 1 8 0 6 -10 -11.25 0 0 0 0 6 -5 -10 0 0 0 0 6 -10 -15 0 0 0 0 6 -2 -3.5 0 1 -3.5 0 6 -12 0 4 1 12 0 6 -13 0 5 0 5 0 6 -5 -3.75 0 1 5 0 6 -26 0 12 1 26 0 6 -5 -5.63 0 1 -5.63 0 6 -8 -2 0 1 -2 0 6 -2 -3 0 1 -3 0 6 -6 -9 0 0 0 0 6 -9 -7.88 0 1 -7.88 0 6 -8 -14 0 0 0 0 6 -28 0 13 0 13 0 6 -9 -12.38 0 0 0 0 6 -8 -15 0 0 0 0 6 -10 -2.5 0 1 -2.5 0 6 -4 0 2 1 4 0 6 -12 -6 0 1 -6 0 6 -12 -16.5 0 0 0 0 6 -4 -7.5 0 0 0 0 6 -10 -8.75 0 1 -8.75 0 6 -10 -18.75 0 0 0 0 6 -26 0 10 1 0 0 6 -12 -21 0 0 0 0 6 -2 -0.75 0 1 -0.75 0 6 -9 -9 0 1 -9 0 6 -10 -6.25 0 1 10 0 6 -8 -12 0 0 0 0 6 -3 0 1 1 0 0 6 -5 -1.88 0 1 5 0 6 -6 -7.5 0 0 0 0 6 -12 -13.5 0 1 12 0 6 -4 -7 0 0 0 0 6 -6 -8.25 0 0 0 0 6 -6 -12 0 0 0 0 6 -6 -10.5 0 0 0 0 6 -4 -8 0 0 0 0 6 -6 -6 0 1 -6 0 6 -12 0 6 0 6 0 6 -12 -19.5 0 0 0 0 6 -19 0 8 1 19 0 6 -12 -15 0 0 0 0 6 -2 -1.75 0 1 2 0 6 -6 -3.75 0 1 -3.75 0 6 -2 -1.25 0 1 2 0 6 -5 -1.25 0 1 -1.25 0 6 -4 -1.5 0 1 4 0 6 -8 -13 0 0 0 0 6 -12 -7.5 0 1 -7.5 0 6 -12 -3 0 1 -3 0 6 -2 -2.75 0 1 2 0 6 -7 0 3 1 7 0 6 -25 0 9 1 25 0 6 -2 -2 0 1 2 0 6 -12 -4.5 0 1 -4.5 0 6 -12 -12 0 0 0 0 6 -5 -3.13 0 1 5 0 6 -9 -11.25 0 0 0 0 6 -8 -9 0 0 0 0 6 -2 -2.25 0 1 2 0 6 -9 -4.5 0 1 -4.5 0 6 -10 -3.75 0 1 10 0 6 -10 -10 0 0 0 0 6 -10 -12.5 0 0 0 0 6 -2 -2.5 0 1 2 0 7 -5 -5.63 0 0 0 0 7 -6 -7.5 0 0 0 0 7 -26 0 10 1 0 0 7 -9 -4.5 0 1 -4.5 0 7 -2 -1.25 0 1 2 0 7 -8 -3 0 1 8 0 7 -25 0 9 1 25 0 7 -4 -4.5 0 1 -4.5 0 7 -5 -10 0 0 0 0 7 -6 -9 0 0 0 0 7 -10 -6.25 0 0 0 0 7 -4 -4 0 1 -4 0 7 -12 -3 0 1 -3 0 7 -5 -5 0 0 0 0 7 -12 0 5 1 12 0 7 -6 -9.75 0 0 0 0 7 -19 0 8 1 19 0 7 -4 -7.5 0 0 0 0 7 -12 -9 0 0 0 0 7 -4 -6.5 0 0 0 0 7 -9 -5.63 0 1 -5.63 0 7 -9 -18 0 0 0 0 7 -10 -11.25 0 0 0 0 7 -10 -13.75 0 0 0 0 7 -6 -12 0 0 0 0 7 -10 -12.5 0 0 0 0 7 -4 -7 0 0 0 0 7 -10 -7.5 0 0 0 0 7 -4 -8 0 0 0 0 7 -8 -11 0 0 0 0 7 -12 0 4 1 12 0 7 -9 -3.38 0 1 -3.38 0 7 -10 -18.75 0 0 0 0 7 -2 -3.5 0 0 0 0 7 -2 -1 0 1 2 0 7 -2 -3.25 0 0 0 0 7 -2 0 1 1 0 0 7 -7 0 3 1 7 0 7 -8 0 3 1 0 0 7 -12 -6 0 1 -6 0 7 -2 -0.5 0 1 2 0 7 -9 -7.88 0 0 0 0 7 -8 -15 0 0 0 0 7 -2 -1.5 0 1 2 0 7 -12 -22.5 0 0 0 0 7 -8 -7 0 1 -7 0 7 -4 -5.5 0 0 0 0 7 -10 -8.75 0 0 0 0 7 -8 -9 0 0 0 0 7 -2 -4 0 0 0 0 7 -4 0 2 1 4 0 7 -8 -8 0 0 0 0 7 -9 -13.5 0 0 0 0 7 -9 -9 0 0 0 0 7 -6 -3.75 0 1 -3.75 0 7 -13 0 6 0 6 0 7 -5 -1.88 0 1 5 0 7 -6 -6 0 0 0 0 7 -5 -6.88 0 0 0 0 7 -8 -16 0 0 0 0 7 -12 -7.5 0 1 -7.5 0 7 -5 -1.25 0 1 -1.25 0 7 -9 -14.63 0 0 0 0 7 -8 -4 0 1 8 0 7 -10 -17.5 0 0 0 0 7 -5 -3.75 0 1 5 0 7 -6 -10.5 0 0 0 0 7 -13 0 5 1 13 0 7 -10 -16.25 0 0 0 0 7 -5 -7.5 0 0 0 0 7 -2 -1.75 0 1 2 0 7 -5 -9.38 0 0 0 0 7 -2 -2.75 0 0 0 0 7 -2 -0.75 0 1 -0.75 0 7 -5 -8.13 0 0 0 0 7 -9 -11.25 0 0 0 0 7 -8 -13 0 0 0 0 7 -9 -16.88 0 0 0 0 7 -2 -2 0 0 0 0 7 -12 -18 0 0 0 0 7 -8 -2 0 1 -2 0 7 -2 -3 0 0 0 0 7 -6 -4.5 0 1 6 0 7 -5 0 2 1 5 0 7 -12 -19.5 0 0 0 0 7 -9 -15.75 0 0 0 0 7 -8 -6 0 0 0 0 7 -10 -2.5 0 1 -2.5 0 7 -9 -6.75 0 0 0 0 7 -6 -6.75 0 0 0 0 7 -2 -3.75 0 0 0 0 7 -10 -5 0 1 -5 0 7 -2 -2.25 0 0 0 0 7 -26 0 12 1 26 0 7 -12 -13.5 0 0 0 0 7 -8 -5 0 0 0 0 7 -6 -3 0 1 6 0 7 -10 -3.75 0 1 10 0 7 -12 -10.5 0 0 0 0 7 -4 -5 0 0 0 0 7 -9 -2.25 0 1 -2.25 0 7 -4 -3 0 0 0 0 7 -9 -10.13 0 0 0 0 7 -28 0 13 0 13 0 7 -22 0 10 1 22 0 7 -10 -10 0 0 0 0 7 -4 -1 0 1 -1 0 7 -4 -2.5 0 0 0 0 7 -12 -24 0 0 0 0 7 -8 -12 0 0 0 0 7 -3 0 1 1 0 0 7 -9 -12.38 0 0 0 0 7 -23 0 10 1 0 0 7 -4 -3.5 0 0 0 0 7 -4 -1.5 0 1 4 0 7 -8 -10 0 0 0 0 7 -8 -14 0 0 0 0 7 -4 -6 0 0 0 0 7 -25 0 10 1 0 0 7 -12 -16.5 0 0 0 0 7 -12 -12 0 0 0 0 7 -5 -2.5 0 1 5 0 7 -5 -8.75 0 0 0 0 7 -12 -4.5 0 1 -4.5 0 7 -12 -15 0 0 0 0 7 -5 -3.13 0 0 0 0 7 -12 -21 0 1 12 0 7 -5 -4.38 0 0 0 0 7 -6 -11.25 0 0 0 0 7 -30 0 12 1 0 0 7 -6 -1.5 0 1 6 0 7 -12 0 6 1 12 0 7 -4 -2 0 1 4 0 7 -10 -15 0 0 0 0 7 -6 -2.25 0 1 6 0 7 -10 -20 0 0 0 0 7 -6 -5.25 0 0 0 0 7 -5 -6.25 0 0 0 0 7 -6 -8.25 0 0 0 0 7 \ No newline at end of file diff --git a/R/inst/extdata/rdt_exampleData.txt b/R/inst/extdata/rdt_exampleData.txt deleted file mode 100644 index 79d99830..00000000 --- a/R/inst/extdata/rdt_exampleData.txt +++ /dev/null @@ -1,901 +0,0 @@ -subjID trial_number gamble_cha RT cert gain loss type_cha trial_payoff outcome happy RT_happy gamble type -1 1 safe 1935 40 0 88 loss 0 -40 0 689 0 -1 -1 2 safe 5581 0 103 198 mixed 0 0 0 689 0 0 -1 3 safe 5871 56 0 116 loss 0 -56 0 689 0 -1 -1 4 safe 3932 0 61 124 mixed 0 0 -1 3353 0 0 -1 5 risky 3838 0 60 48 mixed 0 60 -1 3353 1 0 -1 6 risky 1228 0 304 302 mixed 0 -302 -1 3353 1 0 -1 7 safe 2443 76 0 255 loss 0 -76 -1 1064 0 -1 -1 8 safe 1024 96 197 0 gain 0 96 -1 1064 0 1 -1 9 safe 1107 60 190 0 gain 0 60 1 692 0 1 -1 10 safe 1546 80 254 0 gain 0 80 1 692 0 1 -1 11 safe 3902 37 0 70 loss 0 -37 1 944 0 -1 -1 12 risky 1349 0 158 79 mixed 0 158 1 944 1 0 -1 13 safe 794 116 0 598 loss 0 -116 2 811 0 -1 -1 14 safe 1330 60 0 144 loss 0 -60 2 811 0 -1 -1 15 risky 1210 81 228 0 gain 0 0 0 411 1 1 -1 16 risky 1138 0 303 247 mixed 0 303 0 411 1 0 -1 17 risky 996 81 148 0 gain 0 0 1 600 1 1 -1 18 risky 3145 0 101 50 mixed 0 -50 1 600 1 0 -1 19 risky 138 82 335 0 gain 0 0 1 600 1 1 -1 20 risky 3909 104 0 182 loss 0 0 -1 1103 1 -1 -1 21 safe 1575 0 301 449 mixed 0 0 -1 1103 0 0 -1 22 safe 2616 36 74 0 gain 0 36 1 756 0 1 -1 23 risky 2635 59 0 106 loss 0 0 1 756 1 -1 -1 24 risky 3355 0 102 41 mixed 0 -41 1 756 1 0 -1 25 safe 1038 101 0 419 loss 0 -101 -1 955 0 -1 -1 26 safe 893 83 0 284 loss 0 -83 -1 955 0 -1 -1 27 risky 636 39 197 0 gain 0 197 -1 955 1 1 -1 28 safe 139 100 0 503 loss 0 -100 1 629 0 -1 -1 29 risky 333 103 357 0 gain 0 0 1 629 1 1 -1 30 safe 636 117 220 0 gain 0 117 -1 611 0 1 -1 31 risky 1001 64 0 101 loss 0 0 -1 611 1 -1 -1 32 safe 2614 99 182 0 gain 0 99 1 503 0 1 -1 33 safe 596 97 0 281 loss 0 -97 1 503 0 -1 -1 34 risky 96 77 401 0 gain 0 0 0 589 1 1 -1 35 safe 215 98 0 222 loss 0 -98 0 589 0 -1 -1 36 risky 920 0 58 15 mixed 0 -15 -1 490 1 0 -1 37 safe 537 40 0 143 loss 0 -40 -1 490 0 -1 -1 38 risky 1164 0 223 113 mixed 0 -113 -1 939 1 0 -1 39 risky 3247 124 268 0 gain 0 268 -1 939 1 1 -1 40 risky 42 0 63 16 mixed 0 -16 -1 939 1 0 -1 41 risky 131 96 225 0 gain 0 0 -1 667 1 1 -1 42 risky 920 0 223 149 mixed 0 -149 -1 667 1 0 -1 43 risky 612 0 104 28 mixed 0 -28 -1 667 1 0 -1 44 risky 3925 77 0 133 loss 0 -133 -2 639 1 -1 -1 45 safe 1912 64 0 136 loss 0 -64 -2 639 0 -1 -1 46 safe 162 120 0 433 loss 0 -120 -2 639 0 -1 -1 47 risky 369 104 319 0 gain 0 0 -2 641 1 1 -1 48 risky 1531 43 0 77 loss 0 0 -2 641 1 -1 -1 49 risky 640 0 61 40 mixed 0 -40 -1 772 1 0 -1 50 risky 635 0 160 131 mixed 0 -131 -1 772 1 0 -1 51 risky 41 58 151 0 gain 0 151 -1 772 1 1 -1 52 risky 305 0 304 201 mixed 0 304 -2 684 1 0 -1 53 safe 2651 78 0 178 loss 0 -78 -2 684 0 -1 -1 54 risky 239 0 304 153 mixed 0 -153 0 248 1 0 -1 55 risky 805 116 0 200 loss 0 0 0 248 1 -1 -1 56 risky 222 0 59 34 mixed 0 -34 0 463 1 0 -1 57 safe 338 80 0 336 loss 0 -80 0 463 0 -1 -1 58 risky 248 62 252 0 gain 0 0 0 1483 1 1 -1 59 safe 216 80 0 162 loss 0 -80 0 1483 0 -1 -1 60 risky 538 0 97 19 mixed 0 97 0 1129 1 0 -1 61 safe 351 0 102 120 mixed 0 0 0 1129 0 0 -1 62 risky 2484 119 381 0 gain 0 381 0 262 1 1 -1 63 safe 217 123 0 383 loss 0 -123 0 262 0 -1 -1 64 risky 18 117 298 0 gain 0 298 0 1059 1 1 -1 65 safe 71 0 98 154 mixed 0 0 0 1059 0 0 -1 66 safe 373 83 0 221 loss 0 -83 2 753 0 -1 -1 67 risky 619 62 217 0 gain 0 217 2 753 1 1 -1 68 safe 1612 0 61 58 mixed 0 0 2 753 0 0 -1 69 risky 34 37 84 0 gain 0 0 1 874 1 1 -1 70 safe 134 100 0 252 loss 0 -100 1 874 0 -1 -1 71 safe 714 99 169 0 gain 0 99 1 874 0 1 -1 72 safe 828 0 217 179 mixed 0 0 0 3052 0 0 -1 73 safe 88 0 297 364 mixed 0 0 0 3052 0 0 -1 74 risky 558 41 171 0 gain 0 171 0 3052 1 1 -1 75 risky 364 44 79 0 gain 0 0 0 865 1 1 -1 76 risky 501 123 200 0 gain 0 200 0 865 1 1 -1 77 risky 43 82 181 0 gain 0 181 0 552 1 1 -1 78 risky 120 0 61 24 mixed 0 61 0 552 1 0 -1 79 safe 2328 102 0 203 loss 0 -102 2 583 0 -1 -1 80 risky 263 0 303 92 mixed 0 -92 2 583 1 0 -1 81 safe 358 37 0 199 loss 0 -37 -1 611 0 -1 -1 82 safe 1306 82 0 144 loss 0 -82 -1 611 0 -1 -1 83 safe 101 0 218 438 mixed 0 0 0 824 0 0 -1 84 risky 598 119 430 0 gain 0 430 0 824 1 1 -1 85 risky 541 39 110 0 gain 0 0 0 279 1 1 -1 86 risky 6345 0 103 62 mixed 0 -62 0 279 1 0 -1 87 risky 208 44 143 0 gain 0 0 0 279 1 1 -1 88 safe 142 81 0 398 loss 0 -81 -1 618 0 -1 -1 89 safe 105 63 0 215 loss 0 -63 -1 618 0 -1 -1 90 safe 1436 0 218 263 mixed 0 0 -1 618 0 0 -1 91 safe 1136 43 0 108 loss 0 -43 -1 579 0 -1 -1 92 safe 229 79 0 202 loss 0 -79 -1 579 0 -1 -1 93 safe 595 97 0 318 loss 0 -97 0 1311 0 -1 -1 94 safe 5488 0 158 191 mixed 0 0 0 1311 0 0 -1 95 safe 1038 56 0 302 loss 0 -56 0 1311 0 -1 -1 96 safe 124 103 0 364 loss 0 -103 -1 1420 0 -1 -1 97 safe 788 117 0 293 loss 0 -117 -1 1420 0 -1 -1 98 safe 270 0 301 59 mixed 0 0 -1 1420 0 0 -1 99 risky 348 99 248 0 gain 0 248 0 476 1 1 -1 100 risky 2651 0 163 45 mixed 0 -45 0 476 1 0 -1 101 safe 84 42 0 172 loss 0 -42 -1 1537 0 -1 -1 102 safe 40 0 156 243 mixed 0 0 -1 1537 0 0 -1 103 risky 204 0 157 101 mixed 0 157 -1 1537 1 0 -1 104 risky 24 57 133 0 gain 0 133 0 5156 1 1 -1 105 safe 3897 99 417 0 gain 0 99 0 5156 0 1 -1 106 safe 3165 78 130 0 gain 0 78 0 5156 0 1 -1 107 risky 157 83 202 0 gain 0 0 1 595 1 1 -1 108 risky 628 39 123 0 gain 0 0 1 595 1 1 -1 109 safe 225 0 102 78 mixed 0 0 1 595 0 0 -1 110 safe 512 61 0 252 loss 0 -61 -2 700 0 -1 -1 111 safe 185 118 0 219 loss 0 -118 -2 700 0 -1 -1 112 risky 381 56 303 0 gain 0 303 -2 700 1 1 -1 113 risky 410 83 157 0 gain 0 157 -1 1091 1 1 -1 114 risky 1205 0 303 124 mixed 0 303 -1 1091 1 0 -1 115 risky 2050 98 0 166 loss 0 -166 -1 1091 1 -1 -1 116 risky 171 0 159 61 mixed 0 159 0 258 1 0 -1 117 safe 147 0 162 161 mixed 0 0 0 258 0 0 -1 118 safe 410 0 216 334 mixed 0 0 0 258 0 0 -1 119 safe 372 61 0 185 loss 0 -61 0 2066 0 -1 -1 120 safe 1952 62 103 0 gain 0 62 0 2066 0 1 -1 121 safe 516 117 0 503 loss 0 -117 0 2621 0 -1 -1 122 safe 479 0 296 604 mixed 0 0 0 2621 0 0 -1 123 risky 154 119 335 0 gain 0 0 -1 557 1 1 -1 124 safe 3132 0 64 92 mixed 0 0 -1 557 0 0 -1 125 risky 266 0 224 65 mixed 0 224 -1 557 1 0 -1 126 risky 4022 116 240 0 gain 1 240 0 1164 1 1 -1 127 risky 123 56 166 0 gain 0 0 0 1164 1 1 -1 128 safe 499 39 71 0 gain 0 39 0 1164 0 1 -1 129 risky 611 44 101 0 gain 0 101 -1 1045 1 1 -1 130 safe 267 0 104 97 mixed 0 0 -1 1045 0 0 -1 131 safe 423 39 0 100 loss 0 -39 -1 1045 0 -1 -1 132 risky 129 0 219 40 mixed 0 -40 -1 626 1 0 -1 133 safe 903 122 0 339 loss 0 -122 -1 626 0 -1 -1 134 safe 771 0 58 73 mixed 0 0 -1 442 0 0 -1 135 risky 1178 101 497 0 gain 0 497 -1 442 1 1 -1 136 safe 156 123 0 239 loss 0 -123 -1 442 0 -1 -1 137 risky 50 123 601 0 gain 0 0 1 826 1 1 -1 138 risky 4906 83 291 0 gain 0 291 1 826 1 1 -1 139 risky 11109 0 156 31 mixed 0 -31 1 826 1 0 -1 140 risky 795 121 504 0 gain 0 0 -1 651 1 1 -1 141 risky 715 40 0 68 loss 0 0 -1 651 1 -1 -1 142 safe 449 43 0 126 loss 0 -43 -1 651 0 -1 -1 143 risky 13105 0 222 84 mixed 0 -84 0 5028 1 0 -1 144 safe 188 0 158 318 mixed 0 0 0 5028 0 0 -1 145 risky 2599 61 111 0 gain 0 111 0 5028 1 1 -1 146 risky 546 59 124 0 gain 0 124 0 816 1 1 -1 147 safe 405 0 220 223 mixed 0 0 0 816 0 0 -1 148 safe 787 61 0 172 loss 0 -61 0 816 0 -1 -1 149 safe 742 124 0 263 loss 0 -124 -1 1037 0 -1 -1 150 risky 786 103 280 0 gain 0 280 -1 1037 1 1 -2 1 risky 923 64 113 0 gain 0 113 1 4009 1 1 -2 2 safe 854 44 0 75 loss 0 -44 1 4009 0 -1 -2 3 safe 1204 0 220 440 mixed 0 0 1 4009 0 0 -2 4 risky 207 104 416 0 gain 0 416 1 2004 1 1 -2 5 safe 328 124 340 0 gain 0 124 1 2004 0 1 -2 6 risky 521 0 298 87 mixed 0 298 2 1635 1 0 -2 7 safe 488 83 136 0 gain 0 83 2 1635 0 1 -2 8 safe 49 64 145 0 gain 0 64 2 1635 0 1 -2 9 safe 420 42 0 91 loss 0 -42 3 1663 0 -1 -2 10 risky 668 0 223 149 mixed 0 223 3 1663 1 0 -2 11 safe 124 0 223 216 mixed 0 0 3 1663 0 0 -2 12 safe 304 0 224 183 mixed 0 0 4 2299 0 0 -2 13 safe 64 0 220 332 mixed 0 0 4 2299 0 0 -2 14 safe 551 0 102 41 mixed 0 0 4 2299 0 0 -2 15 risky 253 0 161 128 mixed 0 -128 3 1977 1 0 -2 16 safe 717 82 0 143 loss 0 -82 3 1977 0 -1 -2 17 risky 263 82 397 0 gain 0 0 3 1977 1 1 -2 18 safe 35 0 97 199 mixed 0 0 2 1359 0 0 -2 19 safe 251 124 297 0 gain 0 124 2 1359 0 1 -2 20 safe 339 119 0 244 loss 0 -119 3 1376 0 -1 -2 21 safe 252 100 0 503 loss 0 -100 3 1376 0 -1 -2 22 safe 305 79 253 0 gain 0 79 3 1376 0 1 -2 23 safe 50 57 0 296 loss 0 -57 2 1831 0 -1 -2 24 risky 734 77 201 0 gain 0 0 2 1831 1 1 -2 25 safe 496 122 0 505 loss 0 -122 1 1231 0 -1 -2 26 safe 484 98 0 224 loss 0 -98 1 1231 0 -1 -2 27 risky 813 0 303 364 mixed 0 -364 1 1231 1 0 -2 28 safe 447 0 161 194 mixed 0 0 -1 923 0 0 -2 29 safe 297 98 182 0 gain 0 98 -1 923 0 1 -2 30 safe 438 101 318 0 gain 0 101 -1 923 0 1 -2 31 risky 357 62 96 0 gain 0 0 0 1046 1 1 -2 32 safe 369 0 57 124 mixed 0 0 0 1046 0 0 -2 33 risky 357 117 205 0 gain 0 205 0 585 1 1 -2 34 safe 548 0 62 73 mixed 0 0 0 585 0 0 -2 35 safe 354 44 64 0 gain 0 44 1 800 0 1 -2 36 safe 955 0 156 51 mixed 0 0 1 800 0 0 -2 37 safe 752 116 0 215 loss 0 -116 0 693 0 -1 -2 38 safe 323 40 0 67 loss 0 -40 0 693 0 -1 -2 39 safe 286 79 161 0 gain 0 79 1 1575 0 1 -2 40 safe 563 0 60 92 mixed 0 0 1 1575 0 0 -2 41 safe 380 0 301 118 mixed 0 0 1 1575 0 0 -2 42 safe 374 56 0 147 loss 0 -56 1 1684 0 -1 -2 43 safe 309 99 203 0 gain 0 99 1 1684 0 1 -2 44 safe 518 103 362 0 gain 0 103 2 947 0 1 -2 45 safe 255 0 96 51 mixed 0 0 2 947 0 0 -2 46 safe 305 0 63 64 mixed 0 0 2 947 0 0 -2 47 safe 1508 61 0 193 loss 0 -61 2 1786 0 -1 -2 48 safe 241 123 500 0 gain 0 123 2 1786 0 1 -2 49 safe 131 0 102 64 mixed 0 0 2 6084 0 0 -2 50 safe 219 103 0 417 loss 0 -103 2 6084 0 -1 -2 51 safe 502 0 219 48 mixed 0 0 2 6084 0 0 -2 52 safe 326 0 57 18 mixed 0 0 2 1639 0 0 -2 53 safe 292 59 0 117 loss 0 -59 2 1639 0 -1 -2 54 safe 684 76 333 0 gain 0 76 -1 5341 0 1 -2 55 safe 3649 117 218 0 gain 0 117 -1 5341 0 1 -2 56 safe 792 98 0 167 loss 0 -98 1 1275 0 -1 -2 57 safe 296 0 223 67 mixed 0 0 1 1275 0 0 -2 58 risky 323 121 0 380 loss 0 -380 1 1275 1 -1 -2 59 safe 667 40 0 123 loss 0 -40 -2 2709 0 -1 -2 60 safe 2437 40 78 0 gain 0 40 -2 2709 0 1 -2 61 safe 338 98 0 199 loss 0 -98 -2 2709 0 -1 -2 62 safe 264 100 0 282 loss 0 -100 1 2729 0 -1 -2 63 safe 181 0 162 320 mixed 0 0 1 2729 0 0 -2 64 safe 321 0 300 156 mixed 0 0 1 2728 0 0 -2 65 safe 1519 0 299 194 mixed 0 0 1 2728 0 0 -2 66 safe 452 100 0 312 loss 0 -100 1 2728 0 -1 -2 67 safe 215 57 304 0 gain 0 57 2 3944 0 1 -2 68 safe 224 122 0 262 loss 0 -122 2 3944 0 -1 -2 69 safe 911 43 108 0 gain 0 43 1 8538 0 1 -2 70 safe 263 99 276 0 gain 0 99 1 8538 0 1 -2 71 safe 140 0 220 261 mixed 0 0 1 1240 0 0 -2 72 safe 290 81 0 251 loss 0 -81 1 1240 0 -1 -2 73 safe 262 98 496 0 gain 0 98 0 536 0 1 -2 74 safe 452 0 299 304 mixed 0 0 0 536 0 0 -2 75 safe 3302 61 0 250 loss 0 -61 0 536 0 -1 -2 76 safe 281 40 0 199 loss 0 -40 -5 2275 0 -1 -2 77 safe 413 121 0 435 loss 0 -121 -5 2275 0 -1 -2 78 safe 5118 39 127 0 gain 0 39 -5 2275 0 1 -2 79 safe 95 123 236 0 gain 0 123 -2 2182 0 1 -2 80 safe 274 39 0 167 loss 0 -39 -2 2182 0 -1 -2 81 safe 616 62 213 0 gain 0 62 -2 2182 0 1 -2 82 safe 4258 37 0 111 loss 0 -37 3 4483 0 -1 -2 83 safe 273 118 431 0 gain 0 118 3 4483 0 1 -2 84 safe 5294 0 100 104 mixed 0 0 3 4483 0 0 -2 85 safe 264 0 161 67 mixed 0 0 1 5178 0 0 -2 86 safe 344 121 381 0 gain 0 121 1 5178 0 1 -2 87 safe 311 0 158 34 mixed 0 0 -4 970 0 0 -2 88 safe 263 78 0 179 loss 1 -78 -4 970 0 -1 -2 89 safe 298 84 284 0 gain 0 84 -3 949 0 1 -2 90 risky 39 61 0 165 loss 0 -165 -3 949 1 -1 -2 91 safe 4045 43 95 0 gain 0 43 -3 949 0 1 -2 92 safe 269 0 163 79 mixed 0 0 -3 885 0 0 -2 93 safe 41 44 72 0 gain 0 44 -3 885 0 1 -2 94 safe 3629 62 189 0 gain 0 62 -3 885 0 1 -2 95 safe 924 0 103 148 mixed 0 0 -4 1299 0 0 -2 96 safe 268 81 0 404 loss 0 -81 -4 1299 0 -1 -2 97 safe 348 0 98 86 mixed 0 0 0 1213 0 0 -2 98 safe 286 37 148 0 gain 0 37 0 1213 0 1 -2 99 safe 282 0 156 104 mixed 0 0 0 1213 0 0 -2 100 safe 1223 38 0 140 loss 0 -38 -2 2135 0 -1 -2 101 safe 322 118 0 600 loss 0 -118 -2 2135 0 -1 -2 102 safe 425 0 58 42 mixed 0 0 -2 2135 0 0 -2 103 safe 288 0 96 27 mixed 0 0 -4 1296 0 0 -2 104 safe 267 0 100 20 mixed 0 0 -4 1296 0 0 -2 105 risky 957 121 0 295 loss 0 0 -4 1296 1 -1 -2 106 safe 451 0 96 121 mixed 0 0 0 1416 0 0 -2 107 safe 188 0 61 24 mixed 0 0 0 1416 0 0 -2 108 risky 1784 37 0 81 loss 0 -81 0 1416 1 -1 -2 109 safe 327 0 296 454 mixed 0 0 -1 1369 0 0 -2 110 safe 278 38 0 95 loss 0 -38 -1 1369 0 -1 -2 111 safe 406 104 0 182 loss 0 -104 -1 774 0 -1 -2 112 safe 757 56 137 0 gain 0 56 -1 774 0 1 -2 113 safe 295 0 299 249 mixed 0 0 -1 774 0 0 -2 114 safe 2575 80 224 0 gain 0 80 -1 596 0 1 -2 115 safe 499 83 0 340 loss 0 -83 -1 596 0 -1 -2 116 safe 261 60 250 0 gain 0 60 -1 596 0 1 -2 117 safe 174 60 0 112 loss 0 -60 1 1222 0 -1 -2 118 safe 224 44 197 0 gain 0 44 1 1222 0 1 -2 119 safe 272 83 0 227 loss 0 -83 -1 505 0 -1 -2 120 safe 267 0 299 57 mixed 0 0 -1 505 0 0 -2 121 safe 341 56 116 0 gain 0 56 -1 505 0 1 -2 122 safe 155 119 0 204 loss 0 -119 1 1054 0 -1 -2 123 safe 480 41 86 0 gain 0 41 1 1054 0 1 -2 124 safe 271 39 168 0 gain 0 39 1 1054 0 1 -2 125 safe 250 117 263 0 gain 0 117 -5 993 0 1 -2 126 safe 334 0 223 88 mixed 0 0 -5 993 0 0 -2 127 safe 308 63 171 0 gain 0 63 -4 1144 0 1 -2 128 safe 260 0 220 112 mixed 0 0 -4 1144 0 0 -2 129 safe 245 98 219 0 gain 0 98 -4 1023 0 1 -2 130 safe 322 103 169 0 gain 0 103 -4 1023 0 1 -2 131 safe 314 83 0 290 loss 0 -83 -4 1023 0 -1 -2 132 safe 265 0 62 46 mixed 0 0 -5 1193 0 0 -2 133 safe 454 117 0 337 loss 0 -117 -5 1193 0 -1 -2 134 safe 1290 82 146 0 gain 0 82 0 268 0 1 -2 135 safe 9 59 0 135 loss 0 -59 0 268 0 -1 -2 136 safe 233 83 174 0 gain 0 83 -2 779 0 1 -2 137 safe 314 57 0 98 loss 0 -57 -2 779 0 -1 -2 138 safe 267 99 0 249 loss 0 -99 -2 779 0 -1 -2 139 safe 245 104 252 0 gain 0 104 -3 870 0 1 -2 140 safe 79 58 0 215 loss 0 -58 -3 870 0 -1 -2 141 safe 280 76 0 158 loss 0 -76 -3 809 0 -1 -2 142 safe 255 0 164 157 mixed 0 0 -3 809 0 0 -2 143 safe 550 77 0 196 loss 0 -77 -3 969 0 -1 -2 144 safe 77 79 0 135 loss 0 -79 -3 969 0 -1 -2 145 safe 265 0 303 604 mixed 0 0 -2 1203 0 0 -2 146 safe 292 0 156 244 mixed 0 0 -2 1203 0 0 -2 147 safe 300 100 0 364 loss 0 -100 -5 2008 0 -1 -2 148 safe 369 0 64 12 mixed 0 0 -5 2008 0 0 -2 149 risky 300 0 62 27 mixed 0 -27 -3 967 1 0 -2 150 safe 125 123 603 0 gain 0 123 -3 967 0 1 -3 1 risky 1331 0 161 80 mixed 0 -80 0 2245 1 0 -3 2 risky 791 39 69 0 gain 0 0 0 2245 1 1 -3 3 risky 774 120 266 0 gain 0 0 -1 3256 1 1 -3 4 risky 818 63 169 0 gain 0 0 -1 3256 1 1 -3 5 safe 1108 0 304 89 mixed 0 0 -2 2171 0 0 -3 6 safe 1853 0 303 356 mixed 0 0 -2 2171 0 0 -3 7 safe 672 38 103 0 gain 0 38 -2 2171 0 1 -3 8 risky 1258 124 222 0 gain 0 0 1 1230 1 1 -3 9 safe 1401 0 61 30 mixed 0 0 1 1230 0 0 -3 10 risky 2620 116 0 222 loss 0 -222 1 1230 1 -1 -3 11 risky 4004 76 0 292 loss 0 0 0 2470 1 -1 -3 12 safe 2848 96 0 245 loss 0 -96 0 2470 0 -1 -3 13 safe 496 116 198 0 gain 0 116 0 1497 0 1 -3 14 risky 3183 103 0 496 loss 0 0 0 1497 1 -1 -3 15 safe 1640 123 0 300 loss 0 -123 0 1497 0 -1 -3 16 risky 2010 36 201 0 gain 0 0 1 966 1 1 -3 17 safe 1860 79 159 0 gain 0 79 1 966 0 1 -3 18 safe 731 41 170 0 gain 0 41 1 966 0 1 -3 19 safe 3531 118 0 338 loss 0 -118 2 1359 0 -1 -3 20 safe 1121 0 158 68 mixed 0 0 2 1359 0 0 -3 21 risky 642 62 0 101 loss 0 -101 2 1359 1 -1 -3 22 safe 892 64 0 130 loss 0 -64 2 1610 0 -1 -3 23 safe 2057 102 315 0 gain 0 102 2 1610 0 1 -3 24 safe 369 96 172 0 gain 0 96 2 2510 0 1 -3 25 safe 675 37 0 113 loss 0 -37 2 2510 0 -1 -3 26 risky 1180 0 102 17 mixed 0 -17 2 2510 1 0 -3 27 safe 484 0 99 62 mixed 0 0 2 1207 0 0 -3 28 safe 1374 36 0 143 loss 0 -36 2 1207 0 -1 -3 29 safe 243 42 0 167 loss 0 -42 2 1207 0 -1 -3 30 risky 5007 0 102 55 mixed 0 102 1 1043 1 0 -3 31 safe 382 43 0 197 loss 0 -43 1 1043 0 -1 -3 32 safe 1432 118 501 0 gain 0 118 1 1043 0 1 -3 33 safe 3694 60 0 149 loss 0 -60 2 1278 0 -1 -3 34 risky 930 0 160 102 mixed 0 -102 2 1278 1 0 -3 35 safe 2289 81 221 0 gain 0 81 1 1652 0 1 -3 36 safe 1958 61 218 0 gain 0 61 1 1652 0 1 -3 37 safe 2900 0 58 88 mixed 0 0 2 3032 0 0 -3 38 safe 772 122 0 506 loss 0 -122 2 3032 0 -1 -3 39 safe 560 0 223 224 mixed 0 0 2 938 0 0 -3 40 safe 691 0 300 602 mixed 0 0 2 938 0 0 -3 41 safe 843 77 142 0 gain 0 77 2 938 0 1 -3 42 risky 2174 0 300 243 mixed 0 300 2 1234 1 0 -3 43 safe 2380 44 110 0 gain 0 44 2 1234 0 1 -3 44 safe 887 83 253 0 gain 0 83 2 931 0 1 -3 45 safe 329 119 433 0 gain 0 119 2 931 0 1 -3 46 safe 2179 0 297 157 mixed 0 0 2 931 0 0 -3 47 risky 2493 59 111 0 gain 0 0 3 1445 1 1 -3 48 safe 250 0 221 443 mixed 0 0 3 1445 0 0 -3 49 safe 909 37 0 85 loss 0 -37 3 1445 0 -1 -3 50 risky 2574 57 0 117 loss 0 -117 2 1088 1 -1 -3 51 risky 2379 0 102 34 mixed 0 -34 2 1088 1 0 -3 52 risky 902 0 156 29 mixed 0 156 1 839 1 0 -3 53 safe 2553 63 0 166 loss 0 -63 1 839 0 -1 -3 54 risky 2147 100 0 178 loss 0 -178 1 839 1 -1 -3 55 safe 816 84 0 141 loss 0 -84 0 1472 0 -1 -3 56 safe 557 0 99 198 mixed 0 0 0 1472 0 0 -3 57 safe 703 120 0 244 loss 0 -120 0 1472 0 -1 -3 58 safe 404 78 0 401 loss 0 -78 0 1263 0 -1 -3 59 safe 2504 0 220 111 mixed 0 0 0 1263 0 0 -3 60 safe 2346 116 298 0 gain 0 116 -1 2409 0 1 -3 61 safe 2756 63 249 0 gain 0 63 -1 2409 0 1 -3 62 safe 1211 0 220 178 mixed 0 0 -1 2409 0 0 -3 63 safe 1053 0 304 297 mixed 0 0 1 852 0 0 -3 64 risky 1740 116 0 201 loss 0 0 1 852 1 -1 -3 65 safe 1039 83 338 0 gain 0 83 2 1081 0 1 -3 66 safe 522 82 0 197 loss 0 -82 2 1081 0 -1 -3 67 safe 1987 38 123 0 gain 0 38 2 1081 0 1 -3 68 safe 475 61 0 108 loss 0 -61 2 1125 0 -1 -3 69 safe 198 43 0 128 loss 0 -43 2 1125 0 -1 -3 70 safe 4435 0 104 97 mixed 0 0 1 876 0 0 -3 71 safe 477 122 0 429 loss 0 -122 1 876 0 -1 -3 72 safe 2442 0 157 188 mixed 0 0 1 876 0 0 -3 73 risky 2927 38 91 0 gain 0 0 1 8932 1 1 -3 74 safe 641 41 0 68 loss 0 -41 1 8932 0 -1 -3 75 safe 7035 0 101 150 mixed 0 0 2 1656 0 0 -3 76 risky 4390 104 0 165 loss 0 0 2 1656 1 -1 -3 77 risky 3113 0 57 9 mixed 0 -9 3 1419 1 0 -3 78 risky 839 0 223 45 mixed 0 -45 3 1419 1 0 -3 79 safe 576 62 134 0 gain 0 62 3 1419 0 1 -3 80 safe 774 100 0 422 loss 0 -100 3 1130 0 -1 -3 81 safe 1030 57 0 190 loss 0 -57 3 1130 0 -1 -3 82 safe 713 0 58 76 mixed 0 0 2 2386 0 0 -3 83 safe 1322 0 62 63 mixed 0 0 2 2386 0 0 -3 84 risky 2147 98 501 0 gain 0 501 3 1623 1 1 -3 85 safe 1100 120 0 376 loss 0 -120 3 1623 0 -1 -3 86 safe 417 79 0 336 loss 0 -79 3 1445 0 -1 -3 87 safe 851 0 58 23 mixed 0 0 3 1445 0 0 -3 88 safe 168 0 61 124 mixed 0 0 3 1445 0 0 -3 89 safe 93 119 336 0 gain 0 119 3 1825 0 1 -3 90 safe 993 0 156 158 mixed 0 0 3 1825 0 0 -3 91 safe 299 101 178 0 gain 0 101 3 1825 0 1 -3 92 safe 1636 0 304 123 mixed 0 0 4 5059 0 0 -3 93 risky 3777 0 62 21 mixed 0 62 4 5059 1 0 -3 94 safe 525 0 299 447 mixed 0 0 3 1872 0 0 -3 95 safe 588 102 222 0 gain 0 102 3 1872 0 1 -3 96 safe 551 0 217 87 mixed 0 0 2 1089 0 0 -3 97 safe 630 42 68 0 gain 0 42 2 1089 0 1 -3 98 risky 1675 0 96 120 mixed 0 -120 3 1358 1 0 -3 99 safe 133 83 0 137 loss 0 -83 3 1358 0 -1 -3 100 safe 210 118 598 0 gain 0 118 3 1358 0 1 -3 101 safe 355 100 0 219 loss 0 -100 3 1032 0 -1 -3 102 safe 656 63 100 0 gain 0 63 3 1032 0 1 -3 103 safe 210 0 164 324 mixed 0 0 3 1032 0 0 -3 104 safe 4184 83 0 174 loss 0 -83 2 4160 0 -1 -3 105 risky 1178 56 297 0 gain 0 297 2 4160 1 1 -3 106 risky 2517 36 143 0 gain 0 143 3 2632 1 1 -3 107 safe 1485 101 283 0 gain 0 101 3 2632 0 1 -3 108 safe 388 41 0 66 loss 0 -41 3 2632 0 -1 -3 109 safe 1077 0 303 197 mixed 0 0 3 3534 0 0 -3 110 safe 330 41 0 98 loss 0 -41 3 3534 0 -1 -3 111 risky 975 59 191 0 gain 0 0 2 956 1 1 -3 112 risky 568 56 120 0 gain 0 0 2 956 1 1 -3 113 safe 397 58 0 248 loss 0 -58 2 956 0 -1 -3 114 risky 645 0 304 59 mixed 0 304 1 804 1 0 -3 115 safe 940 0 99 38 mixed 0 0 1 804 0 0 -3 116 risky 2733 102 0 281 loss 0 -281 2 1208 1 -1 -3 117 safe 1662 104 0 357 loss 0 -104 2 1208 0 -1 -3 118 risky 2275 83 197 0 gain 0 197 2 1208 1 1 -3 119 safe 846 0 58 38 mixed 0 0 2 1291 0 0 -3 120 risky 1609 81 179 0 gain 0 179 2 1291 1 1 -3 121 safe 163 103 0 316 loss 0 -103 2 1291 0 -1 -3 122 risky 787 98 416 0 gain 0 416 3 1814 1 1 -3 123 safe 237 102 0 202 loss 0 -102 3 1814 0 -1 -3 124 safe 1772 0 161 131 mixed 0 0 3 1104 0 0 -3 125 safe 303 102 248 0 gain 0 102 3 1104 0 1 -3 126 safe 654 124 0 600 loss 0 -124 3 1195 0 -1 -3 127 safe 842 81 0 156 loss 0 -81 3 1195 0 -1 -3 128 risky 1931 0 217 70 mixed 0 217 3 2163 1 0 -3 129 safe 450 0 57 52 mixed 0 0 3 2163 0 0 -3 130 risky 1135 0 159 50 mixed 0 -50 3 2163 1 0 -3 131 risky 522 79 396 0 gain 0 396 2 1997 1 1 -3 132 safe 678 42 78 0 gain 0 42 2 1997 0 1 -3 133 safe 296 0 158 236 mixed 0 0 2 1997 0 0 -3 134 risky 560 98 364 0 gain 0 364 2 1955 1 1 -3 135 safe 617 118 236 0 gain 0 118 2 1955 0 1 -3 136 safe 618 81 0 227 loss 0 -81 2 1955 0 -1 -3 137 safe 1294 0 219 327 mixed 0 0 2 1935 0 0 -3 138 safe 407 61 0 304 loss 0 -61 2 1935 0 -1 -3 139 safe 2348 120 378 0 gain 0 120 2 1630 0 1 -3 140 safe 527 76 284 0 gain 0 76 2 1630 0 1 -3 141 safe 1267 76 137 0 gain 0 76 2 1588 0 1 -3 142 safe 19 36 0 83 loss 0 -36 2 1588 0 -1 -3 143 safe 168 82 0 256 loss 0 -82 2 1588 0 -1 -3 144 safe 540 0 99 79 mixed 0 0 3 1509 0 0 -3 145 safe 601 59 149 0 gain 0 59 3 1509 0 1 -3 146 safe 27 0 216 266 mixed 0 0 3 1509 0 0 -3 147 safe 863 61 0 217 loss 0 -61 2 1697 0 -1 -3 148 safe 326 0 218 146 mixed 0 0 2 1697 0 0 -3 149 risky 2092 101 196 0 gain 0 0 2 1563 1 1 -3 150 safe 451 124 0 265 loss 1 -124 2 1563 0 -1 -4 1 risky 1858 103 0 200 loss 0 0 0 750 1 -1 -4 2 risky 579 101 498 0 gain 0 498 0 750 1 1 -4 3 safe 898 0 100 103 mixed 0 0 0 750 0 0 -4 4 safe 903 99 0 363 loss 0 -99 2 3404 0 -1 -4 5 risky 444 57 300 0 gain 0 300 2 3404 1 1 -4 6 risky 207 79 337 0 gain 0 0 2 2347 1 1 -4 7 safe 584 36 0 82 loss 0 -36 2 2347 0 -1 -4 8 risky 261 42 99 0 gain 0 0 0 1647 1 1 -4 9 risky 77 0 220 326 mixed 0 -326 0 1647 1 0 -4 10 safe 1259 122 0 435 loss 0 -122 0 1647 0 -1 -4 11 safe 848 77 0 143 loss 0 -77 -2 1605 0 -1 -4 12 risky 93 0 303 599 mixed 0 -599 -2 1605 1 0 -4 13 safe 460 38 0 128 loss 0 -38 -2 911 0 -1 -4 14 risky 21 0 299 244 mixed 0 -244 -2 911 1 0 -4 15 risky 865 116 0 500 loss 0 -500 -2 911 1 -1 -4 16 risky 253 120 265 0 gain 0 0 0 1210 1 1 -4 17 risky 837 39 87 0 gain 0 87 0 1210 1 1 -4 18 risky 1292 56 0 215 loss 0 -215 0 1210 1 -1 -4 19 risky 904 124 0 198 loss 0 0 -3 1807 1 -1 -4 20 risky 769 124 0 294 loss 0 0 -3 1807 1 -1 -4 21 risky 1280 116 0 376 loss 0 0 0 606 1 -1 -4 22 risky 1474 64 0 102 loss 0 -102 0 606 1 -1 -4 23 risky 91 0 161 49 mixed 0 161 0 3006 1 0 -4 24 risky 558 119 430 0 gain 0 0 0 3006 1 1 -4 25 risky 393 122 377 0 gain 0 377 0 554 1 1 -4 26 risky 426 96 169 0 gain 0 169 0 554 1 1 -4 27 risky 171 98 247 0 gain 0 0 0 554 1 1 -4 28 risky 890 62 99 0 gain 0 99 1 1038 1 1 -4 29 risky 611 82 227 0 gain 0 0 1 1038 1 1 -4 30 risky 464 0 222 178 mixed 0 222 1 1038 1 0 -4 31 risky 1210 0 103 16 mixed 0 103 1 1159 1 0 -4 32 risky 41 0 300 124 mixed 0 300 1 1159 1 0 -4 33 risky 474 0 300 300 mixed 0 -300 1 1159 1 0 -4 34 safe 438 0 63 58 mixed 0 0 -1 617 0 0 -4 35 risky 437 64 0 252 loss 0 0 -1 617 1 -1 -4 36 risky 507 0 222 260 mixed 0 -260 0 315 1 0 -4 37 risky 1028 63 0 121 loss 0 0 0 315 1 -1 -4 38 risky 420 77 195 0 gain 0 0 0 315 1 1 -4 39 risky 944 0 61 71 mixed 0 61 0 1350 1 0 -4 40 risky 623 39 0 90 loss 0 0 0 1350 1 -1 -4 41 risky 320 79 400 0 gain 0 400 0 469 1 1 -4 42 risky 477 63 187 0 gain 0 187 0 469 1 1 -4 43 risky 605 96 182 0 gain 0 182 0 469 1 1 -4 44 risky 1141 96 0 277 loss 0 -277 2 1110 1 -1 -4 45 risky 452 120 240 0 gain 0 240 2 1110 1 1 -4 46 risky 694 0 60 17 mixed 0 -17 2 1110 1 0 -4 47 risky 861 44 0 68 loss 0 0 0 815 1 -1 -4 48 risky 52 0 156 30 mixed 0 156 0 815 1 0 -4 49 risky 429 0 157 133 mixed 0 -133 0 815 1 0 -4 50 risky 579 0 61 15 mixed 0 61 0 1373 1 0 -4 51 risky 533 120 508 0 gain 0 508 0 1373 1 1 -4 52 risky 112 101 277 0 gain 0 277 0 1373 1 1 -4 53 risky 642 123 596 0 gain 0 0 2 907 1 1 -4 54 safe 707 0 164 237 mixed 0 0 2 907 0 0 -4 55 risky 1610 0 98 80 mixed 0 98 0 497 1 0 -4 56 safe 1042 63 0 152 loss 0 -63 0 497 0 -1 -4 57 safe 1029 79 0 201 loss 0 -79 0 497 0 -1 -4 58 risky 385 0 162 82 mixed 0 -82 0 424 1 0 -4 59 risky 399 38 201 0 gain 0 201 0 424 1 1 -4 60 risky 549 0 301 358 mixed 0 301 0 976 1 0 -4 61 risky 453 79 251 0 gain 0 251 0 976 1 1 -4 62 risky 662 56 111 0 gain 0 111 2 894 1 1 -4 63 risky 613 103 360 0 gain 0 0 2 894 1 1 -4 64 safe 891 36 0 172 loss 0 -36 2 894 0 -1 -4 65 risky 1229 76 0 179 loss 0 -179 0 1002 1 -1 -4 66 safe 1915 0 98 149 mixed 0 0 0 1002 0 0 -4 67 risky 928 123 0 243 loss 0 0 0 1002 1 -1 -4 68 risky 883 0 159 158 mixed 0 159 1 571 1 0 -4 69 risky 411 37 127 0 gain 0 0 1 571 1 1 -4 70 risky 1488 83 0 249 loss 0 0 0 1771 1 -1 -4 71 safe 436 37 0 203 loss 0 -37 0 1771 0 -1 -4 72 risky 630 0 156 106 mixed 0 -106 0 1771 1 0 -4 73 risky 497 0 223 144 mixed 0 -144 0 593 1 0 -4 74 risky 477 0 221 117 mixed 0 221 0 593 1 0 -4 75 risky 447 81 173 0 gain 0 0 0 593 1 1 -4 76 risky 491 124 299 0 gain 0 0 0 448 1 1 -4 77 risky 3490 0 101 67 mixed 0 -67 0 448 1 0 -4 78 risky 603 0 57 34 mixed 0 -34 0 808 1 0 -4 79 risky 1740 96 0 245 loss 0 -245 0 808 1 -1 -4 80 risky 60 0 62 117 mixed 0 62 -1 2861 1 0 -4 81 risky 503 60 213 0 gain 1 0 -1 2861 1 1 -4 82 risky 184 41 108 0 gain 0 0 -1 1771 1 1 -4 83 safe 1134 57 0 105 loss 0 -57 -1 1771 0 -1 -4 84 risky 1143 39 0 74 loss 0 -74 -1 1771 1 -1 -4 85 safe 776 84 0 333 loss 0 -84 -2 650 0 -1 -4 86 risky 441 57 256 0 gain 0 0 -2 650 1 1 -4 87 risky 2638 77 131 0 gain 0 0 -2 2262 1 1 -4 88 risky 376 0 104 56 mixed 0 -56 -2 2262 1 0 -4 89 risky 1022 97 0 315 loss 0 -315 -3 827 1 -1 -4 90 safe 261 99 0 219 loss 0 -99 -3 827 0 -1 -4 91 risky 847 98 0 184 loss 0 0 -2 938 1 -1 -4 92 risky 1344 57 133 0 gain 0 133 -2 938 1 1 -4 93 risky 409 0 101 42 mixed 0 -42 -2 938 1 0 -4 94 risky 425 82 156 0 gain 0 0 0 1091 1 1 -4 95 risky 499 0 222 441 mixed 0 222 0 1091 1 0 -4 96 safe 951 76 147 0 gain 0 76 0 1091 0 1 -4 97 risky 82 0 300 92 mixed 0 300 1 1894 1 0 -4 98 risky 460 58 149 0 gain 0 0 1 1894 1 1 -4 99 safe 91 0 104 197 mixed 0 0 1 1894 0 0 -4 100 risky 876 56 165 0 gain 0 0 -1 1129 1 1 -4 101 risky 1097 43 168 0 gain 0 168 -1 1129 1 1 -4 102 safe 860 58 0 186 loss 0 -58 1 1686 0 -1 -4 103 safe 1226 81 0 286 loss 0 -81 1 1686 0 -1 -4 104 risky 1043 0 63 41 mixed 0 -41 0 1775 1 0 -4 105 risky 543 0 218 45 mixed 0 218 0 1775 1 0 -4 106 risky 414 0 217 65 mixed 0 -65 0 1205 1 0 -4 107 risky 408 42 67 0 gain 0 0 0 1205 1 1 -4 108 safe 412 63 0 171 loss 0 -63 0 1205 0 -1 -4 109 risky 428 0 163 191 mixed 0 163 -1 742 1 0 -4 110 risky 528 0 302 160 mixed 0 302 -1 742 1 0 -4 111 risky 368 103 197 0 gain 0 197 -1 742 1 1 -4 112 safe 565 117 0 601 loss 0 -117 3 912 0 -1 -4 113 risky 435 0 163 66 mixed 0 -66 3 912 1 0 -4 114 risky 1003 0 299 59 mixed 0 -59 -1 695 1 0 -4 115 risky 595 99 423 0 gain 0 423 -1 695 1 1 -4 116 risky 324 0 221 88 mixed 0 221 1 1538 1 0 -4 117 risky 1551 123 0 264 loss 0 0 1 1538 1 -1 -4 118 safe 1445 43 0 98 loss 0 -43 2 661 0 -1 -4 119 risky 364 102 226 0 gain 0 226 2 661 1 1 -4 120 risky 1307 80 0 134 loss 0 -134 1 1858 1 -1 -4 121 risky 394 0 302 447 mixed 0 -447 1 1858 1 0 -4 122 risky 519 0 299 196 mixed 0 299 -2 1036 1 0 -4 123 risky 1050 0 57 26 mixed 0 57 -2 1036 1 0 -4 124 safe 624 80 0 221 loss 0 -80 -2 1036 0 -1 -4 125 risky 720 118 220 0 gain 0 0 0 725 1 1 -4 126 risky 449 118 340 0 gain 0 340 0 725 1 1 -4 127 safe 669 0 96 121 mixed 0 0 0 725 0 0 -4 128 safe 337 37 0 116 loss 0 -37 0 490 0 -1 -4 129 risky 1343 0 60 86 mixed 0 60 0 490 1 0 -4 130 safe 484 80 0 397 loss 0 -80 0 533 0 -1 -4 131 risky 335 0 217 224 mixed 0 -224 0 533 1 0 -4 132 safe 368 56 0 302 loss 0 -56 0 327 0 -1 -4 133 safe 331 77 0 160 loss 0 -77 0 327 0 -1 -4 134 risky 24 43 84 0 gain 0 84 0 327 1 1 -4 135 safe 389 97 0 422 loss 0 -97 0 327 0 -1 -4 136 safe 781 60 0 131 loss 0 -60 0 327 0 -1 -4 137 safe 250 121 0 220 loss 0 -121 0 327 0 -1 -4 138 risky 353 58 122 0 gain 0 0 0 480 1 1 -4 139 risky 447 99 0 170 loss 0 -170 0 480 1 -1 -4 140 risky 266 123 198 0 gain 0 0 0 480 1 1 -4 141 safe 643 38 0 145 loss 0 -38 0 527 0 -1 -4 142 risky 321 79 289 0 gain 0 0 0 527 1 1 -4 143 risky 370 40 75 0 gain 0 0 0 527 1 1 -4 144 risky 347 0 62 53 mixed 0 62 -2 2527 1 0 -4 145 risky 468 103 313 0 gain 0 0 -2 2527 1 1 -4 146 risky 743 0 162 323 mixed 0 -323 0 1309 1 0 -4 147 risky 2941 40 141 0 gain 0 141 0 1309 1 1 -4 148 risky 1030 0 103 27 mixed 0 103 0 446 1 0 -4 149 safe 611 99 0 503 loss 0 -99 0 446 0 -1 -4 150 safe 1960 122 0 334 loss 0 -122 0 446 0 -1 -5 1 risky 1413 103 0 501 loss 0 0 0 2372 1 -1 -5 2 safe 288 41 0 85 loss 0 -41 0 2372 0 -1 -5 3 safe 915 79 0 291 loss 0 -79 0 2372 0 -1 -5 4 risky 990 80 145 0 gain 0 145 0 1917 1 1 -5 5 risky 704 0 221 224 mixed 0 -224 0 1917 1 0 -5 6 risky 141 0 303 596 mixed 0 -596 -1 1218 1 0 -5 7 risky 434 0 157 44 mixed 0 -44 -1 1218 1 0 -5 8 safe 734 36 86 0 gain 0 36 -1 1218 0 1 -5 9 risky 750 84 0 141 loss 0 0 -1 1715 1 -1 -5 10 safe 844 63 169 0 gain 0 63 -1 1715 0 1 -5 11 risky 435 0 220 149 mixed 0 220 -1 1715 1 0 -5 12 risky 640 0 303 200 mixed 0 -200 1 1348 1 0 -5 13 risky 1055 0 161 31 mixed 0 161 1 1348 1 0 -5 14 risky 539 84 226 0 gain 0 0 1 1814 1 1 -5 15 risky 149 0 100 48 mixed 0 -48 1 1814 1 0 -5 16 safe 772 0 158 159 mixed 0 0 1 1814 0 0 -5 17 risky 890 123 0 238 loss 0 0 0 1531 1 -1 -5 18 risky 636 0 301 244 mixed 0 301 0 1531 1 0 -5 19 risky 1118 0 164 190 mixed 0 164 0 1531 1 0 -5 20 risky 220 0 104 41 mixed 0 104 2 1240 1 0 -5 21 safe 849 0 63 70 mixed 0 0 2 1240 0 0 -5 22 risky 885 60 0 171 loss 0 0 1 1120 1 -1 -5 23 risky 222 38 200 0 gain 0 0 1 1120 1 1 -5 24 risky 590 0 220 41 mixed 0 -41 1 1120 1 0 -5 25 risky 816 76 177 0 gain 0 0 0 2236 1 1 -5 26 safe 579 0 223 182 mixed 0 0 0 2236 0 0 -5 27 risky 656 84 287 0 gain 0 287 0 1454 1 1 -5 28 safe 864 97 0 361 loss 0 -97 0 1454 0 -1 -5 29 risky 970 37 0 76 loss 0 0 -1 1625 1 -1 -5 30 risky 1232 123 0 221 loss 0 0 -1 1625 1 -1 -5 31 safe 821 38 0 164 loss 0 -38 3 1293 0 -1 -5 32 safe 938 39 0 203 loss 0 -39 3 1293 0 -1 -5 33 safe 789 123 0 503 loss 0 -123 3 1293 0 -1 -5 34 risky 931 0 60 88 mixed 0 60 -1 1131 1 0 -5 35 risky 430 60 118 0 gain 0 0 -1 1131 1 1 -5 36 safe 1011 116 0 340 loss 0 -116 -1 1131 0 -1 -5 37 safe 895 79 0 173 loss 0 -79 1 1222 0 -1 -5 38 risky 1029 101 0 314 loss 0 -314 1 1222 1 -1 -5 39 risky 790 0 224 110 mixed 0 224 1 1222 1 0 -5 40 risky 1217 60 0 120 loss 0 -120 -1 1166 1 -1 -5 41 risky 676 0 218 261 mixed 0 218 -1 1166 1 0 -5 42 risky 725 43 71 0 gain 0 71 -1 1166 1 1 -5 43 safe 839 98 0 284 loss 0 -98 2 1245 0 -1 -5 44 risky 1430 58 0 187 loss 0 0 2 1245 1 -1 -5 45 risky 786 98 423 0 gain 0 0 2 1245 1 1 -5 46 safe 997 0 156 130 mixed 0 0 1 2042 0 0 -5 47 risky 601 43 165 0 gain 0 0 1 2042 1 1 -5 48 risky 18 59 215 0 gain 0 215 1 2042 1 1 -5 49 risky 1843 120 0 301 loss 0 0 3 2216 1 -1 -5 50 safe 976 0 63 42 mixed 0 0 3 2216 0 0 -5 51 risky 742 42 116 0 gain 0 0 0 1216 1 1 -5 52 safe 1086 39 0 112 loss 0 -39 0 1216 0 -1 -5 53 risky 942 43 0 64 loss 0 -64 0 1216 1 -1 -5 54 risky 1535 77 160 0 gain 0 160 -1 573 1 1 -5 55 risky 1100 37 128 0 gain 0 128 -1 573 1 1 -5 56 risky 1199 121 265 0 gain 0 265 3 1015 1 1 -5 57 risky 800 123 205 0 gain 0 0 3 1015 1 1 -5 58 safe 1077 0 162 322 mixed 0 0 1 1827 0 0 -5 59 risky 705 116 335 0 gain 0 335 1 1827 1 1 -5 60 risky 817 58 252 0 gain 0 252 1 1827 1 1 -5 61 safe 1068 0 98 62 mixed 0 0 3 1060 0 0 -5 62 safe 1034 117 0 428 loss 0 -117 3 1060 0 -1 -5 63 risky 1105 0 103 151 mixed 0 -151 3 1060 1 0 -5 64 risky 1023 124 293 0 gain 0 293 -2 707 1 1 -5 65 safe 857 0 60 61 mixed 0 0 -2 707 0 0 -5 66 safe 1198 0 300 298 mixed 0 0 0 1657 0 0 -5 67 risky 917 100 497 0 gain 0 497 0 1657 1 1 -5 68 safe 839 63 0 301 loss 0 -63 0 1657 0 -1 -5 69 safe 1020 119 0 379 loss 0 -119 0 662 0 -1 -5 70 risky 830 78 335 0 gain 0 335 0 662 1 1 -5 71 risky 1145 0 164 83 mixed 0 164 1 1074 1 0 -5 72 risky 946 0 102 23 mixed 0 -23 1 1074 1 0 -5 73 risky 1198 60 0 129 loss 0 0 0 947 1 -1 -5 74 risky 189 0 301 156 mixed 0 -156 0 947 1 0 -5 75 risky 889 0 63 28 mixed 0 63 0 947 1 0 -5 76 risky 607 83 138 0 gain 0 138 1 1973 1 1 -5 77 safe 1003 99 0 222 loss 0 -99 1 1973 0 -1 -5 78 risky 875 80 400 0 gain 0 0 1 1973 1 1 -5 79 risky 1292 100 0 179 loss 0 0 1 2038 1 -1 -5 80 safe 1613 80 0 164 loss 0 -80 1 2038 0 -1 -5 81 risky 623 0 218 66 mixed 0 218 1 2038 1 0 -5 82 safe 1006 57 0 110 loss 0 -57 1 543 0 -1 -5 83 risky 849 116 428 0 gain 0 0 1 543 1 1 -5 84 safe 1007 43 0 130 loss 0 -43 1 2127 0 -1 -5 85 risky 1154 39 0 96 loss 0 0 1 2127 1 -1 -5 86 risky 954 61 190 0 gain 0 0 1 2127 1 1 -5 87 safe 832 96 0 244 loss 0 -96 1 720 0 -1 -5 88 risky 688 59 148 0 gain 0 0 1 720 1 1 -5 89 risky 835 44 143 0 gain 0 0 1 720 1 1 -5 90 safe 686 0 57 118 mixed 0 0 -1 839 0 0 -5 91 risky 600 96 170 0 gain 0 0 -1 839 1 1 -5 92 risky 717 0 62 19 mixed 0 -19 0 1046 1 0 -5 93 safe 885 0 158 240 mixed 0 0 0 1046 0 0 -5 94 risky 840 40 68 0 gain 0 68 0 1046 1 1 -5 95 risky 905 120 0 205 loss 0 0 1 1086 1 -1 -5 96 risky 766 104 222 0 gain 0 0 1 1086 1 1 -5 97 safe 763 60 0 249 loss 0 -60 1 1344 0 -1 -5 98 safe 939 0 217 443 mixed 0 0 1 1344 0 0 -5 99 risky 815 0 56 9 mixed 0 56 1 1344 1 0 -5 100 safe 631 76 0 403 loss 0 -76 1 1411 0 -1 -5 101 risky 756 0 102 199 mixed 0 102 1 1411 1 0 -5 102 risky 1026 0 222 331 mixed 0 -331 1 1411 1 0 -5 103 safe 1070 104 0 419 loss 0 -104 -2 1054 0 -1 -5 104 risky 811 62 132 0 gain 0 132 -2 1054 1 1 -5 105 risky 1091 64 303 0 gain 0 303 2 1088 1 1 -5 106 risky 667 0 298 116 mixed 0 298 2 1088 1 0 -5 107 safe 880 80 0 194 loss 0 -80 1 587 0 -1 -5 108 safe 1294 0 97 81 mixed 0 0 1 587 0 0 -5 109 risky 689 0 100 31 mixed 0 -31 -1 808 1 0 -5 110 risky 820 57 104 0 gain 0 0 -1 808 1 1 -5 111 safe 639 0 61 46 mixed 0 0 -2 961 0 0 -5 112 risky 783 118 377 0 gain 0 0 -2 961 1 1 -5 113 risky 816 101 358 0 gain 0 358 -1 862 1 1 -5 114 risky 747 84 0 136 loss 0 -136 -1 862 1 -1 -5 115 safe 954 76 0 253 loss 0 -76 -1 862 0 -1 -5 116 risky 1464 122 242 0 gain 0 0 -3 1262 1 1 -5 117 risky 845 82 194 0 gain 0 0 -3 1262 1 1 -5 118 safe 907 58 0 214 loss 0 -58 -3 1262 0 -1 -5 119 safe 973 96 0 164 loss 0 -96 -4 1169 0 -1 -5 120 risky 801 122 503 0 gain 0 0 -4 1169 1 1 -5 121 risky 839 0 157 64 mixed 0 157 -4 1169 1 0 -5 122 safe 1103 36 0 141 loss 0 -36 0 1215 0 -1 -5 123 risky 875 0 100 119 mixed 0 -119 0 1215 1 0 -5 124 risky 873 41 98 0 gain 0 0 -1 796 1 1 -5 125 risky 803 96 283 0 gain 0 0 -1 796 1 1 -5 126 risky 734 103 250 0 gain 0 0 -1 796 1 1 -5 127 safe 994 83 0 221 loss 0 -83 -1 576 0 -1 -5 128 risky 574 80 251 0 gain 0 0 -1 576 1 1 -5 129 risky 1024 40 0 79 loss 0 0 -2 929 1 -1 -5 130 risky 582 97 314 0 gain 0 314 -2 929 1 1 -5 131 risky 594 61 107 0 gain 0 107 -2 929 1 1 -5 132 risky 688 100 198 0 gain 0 0 2 1097 1 1 -5 133 safe 971 0 99 99 mixed 0 0 2 1097 0 0 -5 134 risky 726 0 301 90 mixed 0 301 0 734 1 0 -5 135 risky 1385 0 163 109 mixed 0 -109 0 734 1 0 -5 136 risky 753 122 0 269 loss 1 0 -1 970 1 -1 -5 137 risky 844 60 0 98 loss 0 -98 -1 970 1 -1 -5 138 risky 832 103 184 0 gain 0 0 -1 731 1 1 -5 139 safe 737 97 0 201 loss 0 -97 -1 731 0 -1 -5 140 risky 770 0 304 60 mixed 0 -60 -2 890 1 0 -5 141 safe 832 64 0 151 loss 0 -64 -2 890 0 -1 -5 142 safe 740 77 0 334 loss 0 -77 -3 1066 0 -1 -5 143 risky 696 0 61 24 mixed 0 61 -3 1066 1 0 -5 144 risky 818 0 219 84 mixed 0 219 -1 680 1 0 -5 145 safe 707 120 0 600 loss 0 -120 -1 680 0 -1 -5 146 risky 910 0 303 359 mixed 0 303 -1 489 1 0 -5 147 risky 651 122 598 0 gain 0 598 -1 489 1 1 -5 148 safe 967 0 301 454 mixed 0 0 -1 489 0 0 -5 149 risky 826 119 216 0 gain 0 216 2 808 1 1 -5 150 risky 876 41 80 0 gain 0 80 2 808 1 1 -6 1 risky 8683 0 63 10 mixed 0 -10 0 38019 1 0 -6 2 risky 6548 119 236 0 gain 0 0 0 38019 1 1 -6 3 risky 11388 99 312 0 gain 0 312 -1 2893 1 1 -6 4 risky 4920 0 302 56 mixed 0 -56 -1 2893 1 0 -6 5 risky 5889 0 221 48 mixed 0 -48 -1 1767 1 0 -6 6 safe 1711 0 299 356 mixed 0 0 -1 1767 0 0 -6 7 safe 2687 63 120 0 gain 0 63 0 3312 0 1 -6 8 risky 2482 77 340 0 gain 0 0 0 3312 1 1 -6 9 risky 1313 79 222 0 gain 0 222 -1 4237 1 1 -6 10 risky 3219 43 0 70 loss 0 -70 -1 4237 1 -1 -6 11 risky 840 101 416 0 gain 0 0 -1 2893 1 1 -6 12 risky 2690 0 304 158 mixed 0 304 -1 2893 1 0 -6 13 risky 5480 57 170 0 gain 0 170 1 3609 1 1 -6 14 risky 6195 117 506 0 gain 0 0 1 3609 1 1 -6 15 safe 1799 44 0 204 loss 0 -44 1 3609 0 -1 -6 16 risky 3339 101 0 182 loss 0 -182 0 1761 1 -1 -6 17 safe 1679 37 0 108 loss 0 -37 0 1761 0 -1 -6 18 safe 2923 121 0 378 loss 0 -121 0 1761 0 -1 -6 19 safe 3155 0 64 76 mixed 0 0 -2 2754 0 0 -6 20 safe 4114 58 0 192 loss 0 -58 -2 2754 0 -1 -6 21 risky 3842 59 191 0 gain 0 191 -2 2754 1 1 -6 22 risky 1839 83 396 0 gain 0 396 0 2197 1 1 -6 23 risky 3984 0 101 84 mixed 0 101 0 2197 1 0 -6 24 safe 1262 57 0 254 loss 0 -57 0 2197 0 -1 -6 25 safe 2746 42 0 147 loss 0 -42 0 2171 0 -1 -6 26 risky 2941 0 220 111 mixed 0 -111 0 2171 1 0 -6 27 risky 3376 37 130 0 gain 0 130 0 2171 1 1 -6 28 risky 1224 116 602 0 gain 0 0 1 1356 1 1 -6 29 risky 2118 56 212 0 gain 0 212 1 1356 1 1 -6 30 risky 3519 100 0 165 loss 0 0 1 1356 1 -1 -6 31 safe 1538 97 0 501 loss 0 -97 2 959 0 -1 -6 32 risky 1818 119 378 0 gain 0 0 2 959 1 1 -6 33 safe 2836 98 0 359 loss 0 -98 -1 1881 0 -1 -6 34 risky 6887 83 0 179 loss 0 -179 -1 1881 1 -1 -6 35 risky 972 36 114 0 gain 0 0 -1 1176 1 1 -6 36 risky 4252 0 97 67 mixed 0 97 -1 1176 1 0 -6 37 safe 2055 0 156 164 mixed 0 0 0 1240 0 0 -6 38 risky 1016 0 158 35 mixed 0 158 0 1240 1 0 -6 39 safe 1023 58 0 217 loss 0 -58 0 1240 0 -1 -6 40 risky 3274 44 0 71 loss 0 -71 0 3700 1 -1 -6 41 risky 1661 102 276 0 gain 1 276 0 3700 1 1 -6 42 risky 2404 0 304 247 mixed 0 304 1 884 1 0 -6 43 safe 5414 38 66 0 gain 0 38 1 884 0 1 -6 44 risky 4757 119 263 0 gain 0 263 1 884 1 1 -6 45 risky 2906 100 0 223 loss 0 0 2 5593 1 -1 -6 46 risky 2634 0 104 44 mixed 0 104 2 5593 1 0 -6 47 risky 900 43 142 0 gain 0 0 2 5593 1 1 -6 48 risky 804 0 296 94 mixed 0 296 1 1667 1 0 -6 49 risky 7162 0 224 264 mixed 0 224 1 1667 1 0 -6 50 safe 1344 80 0 252 loss 0 -80 2 1556 0 -1 -6 51 risky 2165 103 356 0 gain 0 0 2 1556 1 1 -6 52 risky 2000 44 197 0 gain 0 0 -1 836 1 1 -6 53 safe 2276 101 198 0 gain 0 101 -1 836 0 1 -6 54 risky 4122 38 103 0 gain 0 0 -1 836 1 1 -6 55 risky 3216 117 0 215 loss 0 0 0 2517 1 -1 -6 56 risky 2745 42 88 0 gain 0 0 0 2517 1 1 -6 57 risky 4050 80 0 156 loss 0 0 0 2517 1 -1 -6 58 safe 4801 98 165 0 gain 0 98 1 1145 0 1 -6 59 risky 4523 104 502 0 gain 0 502 1 1145 1 1 -6 60 risky 2626 0 100 23 mixed 0 100 1 1145 1 0 -6 61 safe 2020 0 59 89 mixed 0 0 2 927 0 0 -6 62 safe 1931 0 96 117 mixed 0 0 2 927 0 0 -6 63 safe 1416 77 0 334 loss 0 -77 1 1456 0 -1 -6 64 safe 1401 97 0 282 loss 0 -97 1 1456 0 -1 -6 65 risky 3129 102 0 198 loss 0 -198 1 1456 1 -1 -6 66 safe 1867 122 0 505 loss 0 -122 0 3611 0 -1 -6 67 risky 3273 0 104 98 mixed 0 104 0 3611 1 0 -6 68 risky 2041 99 218 0 gain 0 218 2 2511 1 1 -6 69 risky 1201 0 58 29 mixed 0 -29 2 2511 1 0 -6 70 safe 5182 63 0 134 loss 0 -63 1 1679 0 -1 -6 71 safe 3177 124 218 0 gain 0 124 1 1679 0 1 -6 72 safe 584 77 0 287 loss 0 -77 1 1261 0 -1 -6 73 risky 2418 82 291 0 gain 0 0 1 1261 1 1 -6 74 risky 3964 44 0 102 loss 0 0 0 1136 1 -1 -6 75 risky 3794 82 0 148 loss 0 0 0 1136 1 -1 -6 76 safe 2515 36 0 89 loss 0 -36 2 1383 0 -1 -6 77 risky 265 117 435 0 gain 0 0 2 1383 1 1 -6 78 risky 6888 57 0 164 loss 0 -164 2 1383 1 -1 -6 79 safe 2396 0 57 63 mixed 0 0 -1 1026 0 0 -6 80 risky 1278 0 297 121 mixed 0 -121 -1 1026 1 0 -6 81 risky 5505 0 296 297 mixed 0 -297 -1 1026 1 0 -6 82 risky 3784 0 61 36 mixed 0 61 0 1676 1 0 -6 83 risky 1794 77 0 136 loss 0 0 0 1676 1 -1 -6 84 risky 2456 119 0 264 loss 0 -264 1 3992 1 -1 -6 85 safe 4586 101 0 313 loss 0 -101 1 3992 0 -1 -6 86 risky 1296 96 244 0 gain 0 0 1 3992 1 1 -6 87 safe 9041 0 98 197 mixed 0 0 1 5878 0 0 -6 88 safe 1707 103 0 252 loss 0 -103 1 5878 0 -1 -6 89 safe 1719 0 99 149 mixed 0 0 1 5878 0 0 -6 90 safe 5513 36 69 0 gain 0 36 -1 1086 0 1 -6 91 safe 2391 61 105 0 gain 0 61 -1 1086 0 1 -6 92 safe 857 64 0 304 loss 0 -64 -1 1086 0 -1 -6 93 safe 5282 118 0 299 loss 0 -118 0 4927 0 -1 -6 94 risky 6335 79 181 0 gain 0 0 0 4927 1 1 -6 95 risky 2827 123 0 198 loss 0 0 -2 1551 1 -1 -6 96 risky 1180 0 159 86 mixed 0 -86 -2 1551 1 0 -6 97 risky 1852 0 56 46 mixed 0 -46 0 7386 1 0 -6 98 risky 2467 0 220 224 mixed 0 220 0 7386 1 0 -6 99 safe 3255 99 178 0 gain 0 99 2 941 0 1 -6 100 safe 1427 59 0 102 loss 0 -59 2 941 0 -1 -6 101 safe 3837 83 0 197 loss 0 -83 2 941 0 -1 -6 102 safe 1837 0 217 436 mixed 0 0 -1 843 0 0 -6 103 risky 3683 0 220 70 mixed 0 220 -1 843 1 0 -6 104 safe 747 61 104 0 gain 0 61 1 1050 0 1 -6 105 safe 2730 0 160 190 mixed 0 0 1 1050 0 0 -6 106 safe 1027 83 144 0 gain 0 83 -1 1258 0 1 -6 107 safe 1506 0 300 603 mixed 0 0 -1 1258 0 0 -6 108 safe 2397 84 161 0 gain 0 84 0 704 0 1 -6 109 safe 852 120 0 432 loss 0 -120 0 704 0 -1 -6 110 risky 1251 64 301 0 gain 0 301 -1 1932 1 1 -6 111 risky 1520 43 166 0 gain 0 166 -1 1932 1 1 -6 112 risky 5107 39 0 77 loss 0 -77 -1 1932 1 -1 -6 113 safe 1632 123 0 597 loss 0 -123 -1 1352 0 -1 -6 114 risky 3292 81 196 0 gain 0 0 -1 1352 1 1 -6 115 safe 789 0 60 119 mixed 0 0 -1 1352 0 0 -6 116 risky 164 0 297 198 mixed 0 297 -2 1108 1 0 -6 117 risky 1374 0 64 19 mixed 0 64 -2 1108 1 0 -6 118 safe 4612 119 0 334 loss 0 -119 -2 1108 0 -1 -6 119 risky 441 0 158 134 mixed 0 158 -1 5816 1 0 -6 120 risky 224 0 56 23 mixed 0 56 -1 5816 1 0 -6 121 safe 6709 62 0 151 loss 0 -62 1 992 0 -1 -6 122 risky 1360 0 161 45 mixed 0 -45 1 992 1 0 -6 123 risky 223 0 156 68 mixed 0 -68 1 992 1 0 -6 124 safe 2146 40 0 165 loss 0 -40 -1 928 0 -1 -6 125 risky 4720 0 219 146 mixed 0 -146 -1 928 1 0 -6 126 risky 1186 0 161 102 mixed 0 161 -1 820 1 0 -6 127 safe 1117 123 201 0 gain 0 123 -1 820 0 1 -6 128 safe 3741 101 0 422 loss 0 -101 -1 820 0 -1 -6 129 risky 3362 59 0 116 loss 0 -116 -1 1713 1 -1 -6 130 risky 3963 0 216 327 mixed 0 -327 -1 1713 1 0 -6 131 risky 2562 57 146 0 gain 0 0 -1 1713 1 1 -6 132 risky 54 58 250 0 gain 0 250 -1 3406 1 1 -6 133 risky 2451 40 83 0 gain 0 0 -1 3406 1 1 -6 134 risky 56 116 340 0 gain 0 0 -1 3406 1 1 -6 135 risky 3118 124 0 244 loss 0 -244 -1 994 1 -1 -6 136 safe 1001 0 300 453 mixed 0 0 -1 994 0 0 -6 137 safe 558 77 0 225 loss 0 -77 0 548 0 -1 -6 138 risky 2346 0 100 51 mixed 0 -51 0 548 1 0 -6 139 safe 5850 0 158 242 mixed 0 0 0 548 0 0 -6 140 risky 1415 0 222 85 mixed 0 222 -1 1771 1 0 -6 141 safe 2947 36 0 129 loss 0 -36 -1 1771 0 -1 -6 142 risky 3290 119 299 0 gain 0 299 -1 1771 1 1 -6 143 safe 6930 78 130 0 gain 0 78 1 1182 0 1 -6 144 risky 5721 59 0 110 loss 0 -110 1 1182 1 -1 -6 145 risky 641 0 221 177 mixed 0 221 1 1182 1 0 -6 146 safe 1530 0 161 323 mixed 0 0 1 1031 0 0 -6 147 risky 336 0 99 30 mixed 0 -30 1 1031 1 0 -6 148 risky 190 61 133 0 gain 0 0 1 1031 1 1 -6 149 risky 26 76 255 0 gain 0 255 -2 823 1 1 -6 150 safe 1139 80 0 396 loss 0 -80 -2 823 0 -1 diff --git a/R/inst/extdata/ts_exampleData.txt b/R/inst/extdata/ts_exampleData.txt deleted file mode 100644 index 648f94b9..00000000 --- a/R/inst/extdata/ts_exampleData.txt +++ /dev/null @@ -1,2191 +0,0 @@ -subjID trial level1_choice level2_choice reward A1prob A2prob B1prob B2prob -1 2 1 4 1 0.73174 0.44094 0.28525 0.42124 -1 3 1 1 1 0.72582 0.3864 0.30663 0.39319 -1 4 2 1 1 0.7296 0.41459 0.30549 0.34948 -1 5 1 3 0 0.77339 0.40618 0.31232 0.3926 -1 6 1 1 1 0.75457 0.45989 0.30146 0.39908 -1 7 1 1 1 0.799 0.47671 0.30695 0.4193 -1 8 1 3 1 0.8 0.4705 0.28921 0.43012 -1 9 1 4 1 0.8 0.4414 0.32746 0.40748 -1 10 2 4 0 0.79121 0.44951 0.34192 0.4238 -1 11 2 1 0 0.8 0.45063 0.30527 0.41502 -1 12 1 3 0 0.8 0.46023 0.30255 0.43582 -1 13 1 2 0 0.7713 0.45539 0.3145 0.41748 -1 14 2 1 1 0.77967 0.46743 0.33255 0.41147 -1 15 2 4 1 0.8 0.44997 0.33142 0.43247 -1 16 1 1 1 0.8 0.46545 0.38953 0.40187 -1 17 2 4 0 0.78989 0.43383 0.44462 0.39286 -1 18 1 1 1 0.8 0.45304 0.45707 0.41177 -1 19 1 1 1 0.8 0.46451 0.4644 0.35639 -1 20 1 1 1 0.8 0.46125 0.49334 0.33543 -1 21 1 1 0 0.8 0.49285 0.47484 0.36058 -1 22 1 4 0 0.8 0.49623 0.48841 0.34768 -1 23 2 3 1 0.77469 0.54065 0.50539 0.32396 -1 24 2 2 1 0.77481 0.58668 0.50524 0.32207 -1 25 2 3 1 0.78178 0.62035 0.46226 0.32988 -1 26 2 3 1 0.7996 0.59698 0.5076 0.37398 -1 27 2 3 0 0.8 0.61101 0.51855 0.37097 -1 28 2 4 0 0.8 0.57941 0.49362 0.33811 -1 29 1 1 1 0.75907 0.58061 0.49262 0.34061 -1 30 1 1 1 0.78157 0.60034 0.47932 0.32465 -1 31 1 4 1 0.73941 0.57595 0.41336 0.31351 -1 32 1 1 1 0.78407 0.57293 0.40238 0.31508 -1 33 1 1 1 0.7673 0.55497 0.44794 0.32404 -1 34 1 1 1 0.74815 0.57301 0.45619 0.30755 -1 35 1 1 1 0.76077 0.55076 0.45351 0.23356 -1 36 1 1 0 0.78983 0.53785 0.45 0.2218 -1 37 1 4 0 0.79931 0.53644 0.43941 0.25251 -1 38 1 2 0 0.78409 0.52744 0.44277 0.25328 -1 39 2 3 0 0.79235 0.54545 0.42458 0.28172 -1 40 2 4 1 0.7884 0.53537 0.40774 0.30555 -1 41 2 4 0 0.8 0.5217 0.44137 0.30486 -1 42 1 3 0 0.7987 0.53313 0.44258 0.29581 -1 43 1 1 0 0.75319 0.5575 0.46962 0.29889 -1 44 1 2 0 0.75826 0.57211 0.49623 0.34481 -1 45 2 4 0 0.8 0.59358 0.50784 0.33974 -1 46 1 2 1 0.8 0.58261 0.49178 0.31495 -1 47 1 2 0 0.76387 0.51143 0.50769 0.34591 -1 48 1 3 1 0.7373 0.55849 0.4958 0.34391 -1 49 2 3 1 0.71163 0.55437 0.50188 0.37737 -1 50 2 3 1 0.7274 0.55684 0.49608 0.42051 -1 51 2 3 1 0.74133 0.51026 0.50806 0.39224 -1 52 2 3 0 0.78899 0.52159 0.53676 0.39005 -1 53 2 4 0 0.8 0.5142 0.57107 0.33701 -1 54 1 3 1 0.8 0.55215 0.56694 0.31545 -1 55 1 2 1 0.8 0.53609 0.53305 0.30683 -1 56 1 1 1 0.8 0.51736 0.51624 0.29661 -1 57 1 1 1 0.8 0.55649 0.57046 0.30073 -1 58 1 4 0 0.77863 0.54926 0.57542 0.31415 -1 59 1 3 0 0.78765 0.57095 0.5805 0.28316 -1 60 1 1 0 0.7736 0.54228 0.58221 0.23798 -1 61 1 2 0 0.8 0.55273 0.52453 0.2241 -1 62 2 2 1 0.77377 0.54429 0.52093 0.24853 -1 63 1 2 1 0.8 0.53118 0.48452 0.22815 -1 64 1 3 0 0.8 0.5621 0.5142 0.24439 -1 65 1 3 0 0.8 0.58121 0.52545 0.24843 -1 66 1 2 1 0.8 0.59505 0.53803 0.23704 -1 67 2 4 0 0.8 0.61952 0.54213 0.20897 -1 68 1 2 1 0.8 0.5983 0.5531 0.24432 -1 69 1 2 1 0.78218 0.65305 0.57632 0.26855 -1 70 1 2 1 0.74435 0.68187 0.58155 0.30696 -1 71 1 2 1 0.75476 0.68078 0.57166 0.31697 -1 72 1 2 0 0.7518 0.67198 0.59557 0.30499 -1 73 1 1 1 0.77418 0.6968 0.58319 0.32965 -1 74 1 1 1 0.74976 0.71575 0.64715 0.2999 -1 75 1 4 1 0.76123 0.70332 0.63275 0.30766 -1 76 2 4 0 0.75946 0.70432 0.61657 0.30659 -1 77 2 1 1 0.8 0.69223 0.64135 0.32633 -1 78 1 1 1 0.8 0.67848 0.62949 0.29921 -1 79 1 1 0 0.76968 0.66689 0.64594 0.31559 -1 80 1 4 0 0.767 0.66963 0.62129 0.32788 -1 81 2 3 1 0.75012 0.63656 0.60248 0.34237 -1 82 2 3 1 0.7351 0.68337 0.63189 0.30771 -1 83 2 3 1 0.74526 0.67142 0.6594 0.30594 -1 84 2 1 1 0.76226 0.68819 0.6318 0.27628 -1 85 1 1 1 0.7758 0.73023 0.58491 0.29002 -1 86 1 1 1 0.77074 0.74821 0.58291 0.28925 -1 87 1 1 1 0.77089 0.79434 0.57504 0.32894 -1 88 1 1 1 0.74567 0.8 0.55285 0.30923 -1 89 1 1 1 0.7727 0.8 0.59163 0.31176 -1 90 1 1 1 0.79157 0.8 0.5741 0.33049 -1 91 1 1 1 0.8 0.8 0.56745 0.33548 -1 92 1 3 0 0.8 0.77512 0.59173 0.36604 -1 93 1 1 0 0.77964 0.77689 0.65552 0.29529 -1 94 1 4 0 0.72323 0.77346 0.68053 0.28964 -1 95 1 4 0 0.7587 0.79182 0.68303 0.28661 -1 96 1 2 1 0.76904 0.78153 0.69918 0.25219 -1 97 1 2 1 0.77612 0.8 0.7122 0.27558 -1 98 1 4 1 0.79077 0.79734 0.71788 0.28339 -1 99 2 4 1 0.76885 0.778 0.73227 0.29194 -1 100 2 2 0 0.72235 0.76099 0.72207 0.28469 -1 101 2 1 1 0.75343 0.75863 0.68128 0.29834 -1 102 1 1 1 0.77836 0.75896 0.6992 0.29074 -1 103 1 1 0 0.76782 0.74809 0.67502 0.27929 -1 104 2 4 0 0.76299 0.79317 0.66158 0.31297 -1 105 2 3 1 0.76924 0.8 0.64813 0.30434 -1 106 2 1 0 0.79236 0.76987 0.63234 0.29248 -1 107 2 3 0 0.76225 0.74234 0.62737 0.34844 -1 108 1 2 1 0.75963 0.71965 0.63631 0.31392 -1 109 1 2 0 0.78157 0.65906 0.63594 0.29344 -1 110 1 4 0 0.8 0.6691 0.63189 0.33999 -1 111 2 3 1 0.76426 0.64471 0.60207 0.27577 -1 112 2 2 1 0.74667 0.66462 0.62046 0.26335 -1 113 1 3 1 0.78458 0.63884 0.64195 0.27218 -1 114 2 3 0 0.79243 0.63824 0.63688 0.27592 -1 115 1 2 1 0.79322 0.65028 0.62034 0.25584 -1 116 1 4 1 0.79914 0.66745 0.60886 0.25548 -1 117 2 3 1 0.79739 0.61932 0.61802 0.28086 -1 118 2 4 0 0.79022 0.61075 0.61969 0.26407 -1 119 2 3 1 0.8 0.62074 0.62673 0.27659 -1 120 2 1 1 0.8 0.62032 0.57944 0.28841 -1 121 1 4 1 0.79253 0.61165 0.555 0.26186 -1 122 2 4 0 0.8 0.62946 0.54182 0.25526 -1 123 1 3 1 0.79597 0.60834 0.5357 0.2 -1 124 2 3 0 0.78078 0.60309 0.55323 0.22367 -1 125 1 1 1 0.78059 0.59006 0.5389 0.20545 -1 126 1 1 1 0.7415 0.5477 0.53843 0.2 -1 127 1 1 1 0.72498 0.55081 0.54774 0.2 -1 128 1 3 0 0.7273 0.53482 0.54397 0.23411 -1 129 1 1 1 0.6983 0.53396 0.57112 0.26527 -1 130 1 3 0 0.67184 0.55217 0.54923 0.26093 -1 131 1 1 1 0.64299 0.4833 0.56131 0.27607 -1 132 1 4 0 0.64678 0.48409 0.55659 0.26744 -1 133 1 1 1 0.66958 0.48672 0.55672 0.28704 -1 134 1 4 0 0.71353 0.43812 0.54296 0.26765 -1 135 1 1 1 0.72913 0.45831 0.55595 0.26157 -1 136 1 3 1 0.71214 0.40894 0.57912 0.27759 -1 137 2 3 1 0.72246 0.3716 0.5666 0.25731 -1 138 2 3 0 0.70016 0.33562 0.53811 0.26686 -1 139 1 1 0 0.68348 0.29021 0.5032 0.2907 -1 141 1 2 1 0.70413 0.24533 0.53268 0.31855 -1 142 1 2 0 0.74585 0.23758 0.54789 0.32516 -1 143 2 3 0 0.75878 0.20683 0.54172 0.32643 -1 144 1 1 1 0.75508 0.2 0.54123 0.33066 -1 145 1 1 1 0.75405 0.2 0.50283 0.33762 -1 146 1 4 0 0.72616 0.21818 0.51489 0.34734 -1 147 1 1 1 0.72165 0.2146 0.52902 0.33863 -1 148 1 1 1 0.76338 0.22901 0.53995 0.32508 -1 149 1 3 0 0.8 0.24977 0.55147 0.34688 -1 150 1 4 1 0.8 0.22491 0.55515 0.38301 -1 151 2 4 0 0.76821 0.26234 0.54065 0.37305 -1 152 2 3 0 0.77307 0.22488 0.58349 0.37869 -1 153 1 1 1 0.77173 0.21431 0.53551 0.42413 -1 154 1 1 0 0.75927 0.20014 0.50704 0.42257 -1 155 1 3 0 0.75921 0.21264 0.50199 0.38167 -1 156 1 2 0 0.74445 0.22054 0.51196 0.33042 -1 157 2 1 0 0.72395 0.21222 0.48676 0.33988 -1 158 2 1 0 0.71999 0.2298 0.51039 0.3507 -1 159 2 4 1 0.72939 0.2308 0.54111 0.32357 -1 160 2 2 0 0.69386 0.21052 0.54663 0.27117 -1 161 2 4 1 0.69174 0.2 0.53472 0.28176 -1 162 2 4 0 0.71402 0.2 0.59491 0.26687 -1 163 2 3 0 0.71077 0.2 0.5787 0.29751 -1 164 1 4 0 0.70963 0.2 0.60455 0.28655 -1 165 1 1 1 0.73785 0.2 0.60482 0.27845 -1 166 1 1 1 0.75026 0.2 0.60278 0.29223 -1 167 1 1 1 0.78057 0.2 0.59516 0.29242 -1 168 1 1 1 0.7938 0.20923 0.53569 0.27625 -1 169 1 4 0 0.77124 0.25164 0.47943 0.29059 -1 170 1 3 0 0.77023 0.2788 0.50377 0.25799 -1 171 1 1 1 0.76646 0.27905 0.51914 0.26122 -1 172 1 1 1 0.74042 0.24415 0.5069 0.27107 -1 173 1 1 1 0.73021 0.27041 0.4785 0.26917 -1 174 1 4 0 0.71286 0.28303 0.4701 0.29255 -1 175 1 1 0 0.67608 0.30914 0.48553 0.27482 -1 176 1 2 0 0.72568 0.28528 0.46698 0.28983 -1 177 2 3 0 0.75068 0.32288 0.51553 0.32661 -1 178 1 3 1 0.68976 0.33437 0.57487 0.30929 -1 179 2 3 1 0.63552 0.32788 0.56683 0.28999 -1 180 2 3 0 0.65651 0.29706 0.64643 0.32216 -1 181 1 1 0 0.63992 0.28636 0.65593 0.30065 -1 182 1 1 1 0.63118 0.29203 0.61181 0.24868 -1 183 1 1 0 0.61433 0.30691 0.58943 0.26967 -1 184 1 1 1 0.64362 0.28234 0.59775 0.25273 -1 185 2 4 0 0.65589 0.2 0.63046 0.22552 -1 186 2 4 0 0.64753 0.21033 0.62343 0.23167 -1 187 2 3 1 0.6708 0.23303 0.58866 0.24963 -1 188 2 3 1 0.68793 0.2 0.59113 0.30878 -1 189 2 3 1 0.70132 0.2 0.57037 0.30299 -1 191 1 4 1 0.70615 0.23807 0.57935 0.30751 -1 192 2 4 0 0.69038 0.24958 0.56007 0.27807 -1 193 2 4 0 0.72402 0.24868 0.58419 0.29444 -1 194 1 1 1 0.74722 0.22597 0.57091 0.27845 -1 195 1 1 1 0.77007 0.25026 0.59727 0.26951 -1 196 1 3 0 0.75861 0.24017 0.58072 0.24954 -1 197 2 4 0 0.74568 0.2 0.58408 0.24979 -1 198 1 1 1 0.78681 0.21341 0.56264 0.20372 -1 199 1 1 1 0.7694 0.24506 0.54298 0.2 -1 200 1 1 1 0.8 0.22759 0.49432 0.2 -1 201 1 1 0 0.8 0.22705 0.48005 0.2179 -2 1 2 1 1 0.24366 0.21338 0.7897 0.36247 -2 3 2 1 0 0.24195 0.22465 0.7635 0.37649 -2 4 2 2 1 0.24137 0.22427 0.79877 0.3744 -2 5 2 2 0 0.24103 0.2 0.8 0.38687 -2 6 2 3 1 0.2 0.2 0.79295 0.35462 -2 7 2 3 1 0.21009 0.22935 0.79064 0.34995 -2 8 2 2 0 0.2 0.25825 0.79677 0.32497 -2 9 2 1 0 0.2 0.27439 0.77263 0.31415 -2 10 2 2 0 0.25693 0.28699 0.8 0.35165 -2 11 2 3 1 0.23686 0.27897 0.8 0.33176 -2 12 2 3 0 0.2 0.29644 0.78883 0.34925 -2 13 2 2 0 0.21085 0.29313 0.78698 0.38282 -2 14 2 4 0 0.20371 0.30914 0.78273 0.39991 -2 15 2 1 0 0.2 0.27436 0.79031 0.37668 -2 16 2 1 0 0.2 0.31162 0.783 0.38107 -2 17 2 4 1 0.2 0.33142 0.78508 0.39967 -2 18 2 3 1 0.20132 0.3441 0.79349 0.41119 -2 19 2 3 1 0.2 0.2921 0.7947 0.39435 -2 20 2 3 0 0.2 0.28001 0.8 0.38265 -2 21 2 3 0 0.23446 0.29161 0.7848 0.40374 -2 22 2 1 1 0.24324 0.30684 0.78655 0.36654 -2 23 1 3 1 0.25357 0.28896 0.8 0.36812 -2 24 1 2 0 0.247 0.31968 0.778 0.39979 -2 25 2 1 0 0.26191 0.29039 0.78188 0.42514 -2 26 2 4 0 0.24009 0.26705 0.77572 0.43339 -2 27 1 2 0 0.23637 0.27463 0.8 0.44448 -2 28 2 3 1 0.2 0.26527 0.79768 0.43536 -2 29 2 3 0 0.2 0.2249 0.8 0.45377 -2 30 2 1 0 0.27119 0.24548 0.77507 0.47467 -2 31 1 2 0 0.25741 0.25583 0.8 0.43019 -2 32 1 1 0 0.25833 0.25345 0.7833 0.45546 -2 33 1 2 1 0.29274 0.2548 0.75592 0.48444 -2 34 1 2 0 0.24411 0.2674 0.69707 0.50089 -2 35 2 1 0 0.25087 0.29031 0.69606 0.51711 -2 36 1 1 1 0.29422 0.24655 0.7281 0.55837 -2 37 2 2 0 0.28983 0.24619 0.73075 0.64885 -2 38 1 1 0 0.28961 0.22933 0.76907 0.64365 -2 39 2 3 1 0.32305 0.2115 0.72785 0.66863 -2 40 2 3 1 0.32795 0.21391 0.75703 0.68245 -2 41 2 3 1 0.33668 0.2 0.8 0.69042 -2 42 1 4 1 0.32341 0.2 0.7744 0.76419 -2 43 2 3 1 0.2924 0.2 0.77229 0.77877 -2 44 2 2 0 0.29488 0.21148 0.8 0.77328 -2 45 2 3 1 0.32204 0.25048 0.77766 0.8 -2 46 2 2 0 0.29959 0.27915 0.78361 0.8 -2 47 2 2 1 0.30354 0.35484 0.77031 0.77172 -2 48 2 4 1 0.32089 0.33943 0.76879 0.8 -2 49 2 4 1 0.31639 0.30386 0.71735 0.8 -2 50 2 4 0 0.32926 0.34595 0.68555 0.7724 -2 51 1 1 0 0.29947 0.30318 0.6959 0.78212 -2 52 2 1 0 0.29323 0.29421 0.69798 0.8 -2 53 2 4 1 0.31145 0.28711 0.67731 0.8 -2 54 2 4 1 0.35715 0.26453 0.66623 0.8 -2 55 2 4 1 0.34242 0.25018 0.65922 0.76883 -2 56 2 4 1 0.34459 0.25371 0.68819 0.76716 -2 57 2 2 0 0.39018 0.26396 0.63748 0.78614 -2 58 1 1 1 0.3358 0.23748 0.60919 0.8 -2 59 1 1 0 0.31958 0.21064 0.63817 0.8 -2 60 1 2 0 0.29338 0.2 0.68027 0.79001 -2 61 2 1 1 0.27116 0.2 0.709 0.8 -2 62 2 3 0 0.25717 0.2 0.69624 0.77628 -2 63 2 4 1 0.27483 0.2 0.66719 0.75931 -2 64 2 4 1 0.23855 0.2 0.61004 0.74309 -2 65 2 2 1 0.21736 0.2 0.65247 0.77225 -2 66 1 1 0 0.25099 0.2 0.70211 0.74655 -2 67 2 4 1 0.2702 0.2 0.71121 0.7433 -2 68 2 2 0 0.27338 0.2358 0.65203 0.71806 -2 69 2 4 1 0.2925 0.2 0.65285 0.72883 -2 70 2 2 0 0.31246 0.22217 0.65929 0.75781 -2 71 2 1 0 0.32305 0.2 0.66168 0.75266 -2 72 2 1 0 0.28378 0.2 0.65774 0.78056 -2 73 2 2 0 0.26524 0.20141 0.59448 0.77223 -2 74 1 4 0 0.27387 0.2 0.57972 0.76982 -2 75 1 1 1 0.33482 0.2 0.5624 0.8 -2 76 1 3 1 0.30843 0.22087 0.52495 0.77129 -2 77 2 4 1 0.29104 0.24487 0.53711 0.7695 -2 78 1 2 0 0.26102 0.24152 0.50456 0.77789 -2 79 2 4 1 0.2445 0.24204 0.50356 0.75557 -2 80 2 1 1 0.26642 0.23341 0.50453 0.72099 -2 81 2 4 1 0.27563 0.23117 0.51365 0.73239 -2 82 2 2 0 0.24556 0.23887 0.49212 0.76062 -2 83 1 2 0 0.21118 0.22106 0.54552 0.79201 -2 84 1 2 1 0.2 0.26054 0.52037 0.79404 -2 85 2 4 1 0.23536 0.24661 0.57319 0.8 -2 86 2 2 0 0.23971 0.21726 0.60673 0.7575 -2 87 2 4 1 0.27447 0.21378 0.58475 0.7807 -2 88 2 1 0 0.23447 0.22887 0.53945 0.8 -2 89 2 4 1 0.23122 0.2 0.56969 0.8 -2 90 2 4 1 0.21434 0.2 0.58063 0.8 -2 91 2 4 1 0.20412 0.2 0.5776 0.77905 -2 92 2 1 0 0.23715 0.20107 0.59502 0.78801 -2 93 1 2 0 0.2 0.20172 0.56694 0.8 -2 94 2 4 1 0.2 0.23888 0.56918 0.8 -2 95 2 1 0 0.2 0.22836 0.54608 0.79578 -2 96 2 4 1 0.21792 0.22493 0.55862 0.8 -2 97 2 4 0 0.25765 0.26661 0.57298 0.76303 -2 98 2 1 1 0.25462 0.26054 0.58158 0.76424 -2 99 2 4 1 0.25058 0.2355 0.56115 0.77487 -2 100 2 4 1 0.2352 0.2 0.57613 0.77472 -2 101 2 4 0 0.24936 0.20905 0.55364 0.75352 -2 102 2 1 0 0.2433 0.2 0.55993 0.78065 -2 103 2 2 0 0.25461 0.23537 0.58316 0.7884 -2 104 2 2 0 0.25684 0.24005 0.54965 0.7952 -2 105 1 1 0 0.29907 0.2506 0.55251 0.8 -2 106 1 2 1 0.26851 0.2435 0.54227 0.8 -2 107 1 4 1 0.24851 0.22888 0.55616 0.79765 -2 108 2 4 1 0.26537 0.25165 0.56028 0.77126 -2 109 2 1 0 0.26116 0.25402 0.55846 0.73255 -2 110 2 4 0 0.2603 0.24673 0.58361 0.7276 -2 111 2 2 0 0.28591 0.22322 0.64084 0.7201 -2 112 1 4 0 0.26526 0.20484 0.6863 0.712 -2 113 1 4 1 0.26692 0.2 0.70522 0.72084 -2 114 2 4 1 0.27249 0.21392 0.68892 0.72746 -2 115 2 1 0 0.22902 0.20045 0.74818 0.71253 -2 116 2 4 1 0.2353 0.2 0.77855 0.69805 -2 117 2 4 1 0.20838 0.2 0.78606 0.68928 -2 118 2 1 0 0.20182 0.20659 0.79165 0.67785 -2 119 2 4 1 0.21032 0.247 0.77601 0.74302 -2 120 2 4 1 0.20034 0.25251 0.8 0.70396 -2 121 2 4 1 0.2 0.24629 0.79537 0.68448 -2 122 2 4 1 0.21398 0.29466 0.75251 0.66879 -2 123 2 1 0 0.2 0.31706 0.76204 0.6732 -2 124 2 4 1 0.2 0.30489 0.7534 0.71219 -2 125 2 4 1 0.2 0.32492 0.76137 0.71172 -2 126 2 1 0 0.2 0.35076 0.7997 0.71048 -2 127 2 4 0 0.20503 0.31678 0.79524 0.70346 -2 128 2 4 1 0.20516 0.29861 0.76553 0.69496 -2 129 2 4 1 0.22588 0.30163 0.7683 0.72198 -2 130 2 2 0 0.21011 0.32075 0.77334 0.72815 -2 131 2 4 1 0.21068 0.30684 0.76088 0.73397 -2 132 2 4 1 0.2087 0.30048 0.79883 0.74999 -2 133 2 4 1 0.22202 0.30679 0.8 0.7297 -2 134 2 4 0 0.20441 0.28039 0.77104 0.6871 -2 135 2 4 0 0.2029 0.26801 0.75639 0.66139 -2 136 2 4 0 0.20636 0.2252 0.741 0.63109 -2 137 2 1 0 0.24226 0.2 0.78649 0.65203 -2 138 2 4 1 0.25766 0.2 0.7582 0.643 -2 139 1 4 0 0.29617 0.2 0.7412 0.59132 -2 140 2 4 1 0.30146 0.2 0.76005 0.61217 -2 141 2 4 0 0.27104 0.2159 0.75701 0.60006 -2 142 2 4 0 0.26798 0.24948 0.7371 0.61118 -2 143 2 4 0 0.25651 0.23851 0.73358 0.60815 -2 144 2 1 0 0.26757 0.27016 0.72062 0.64522 -2 145 2 4 1 0.28294 0.2391 0.75141 0.62282 -2 146 2 1 0 0.28259 0.23563 0.69756 0.61478 -2 147 2 4 1 0.2582 0.24803 0.70625 0.58711 -2 148 2 2 1 0.28571 0.26536 0.70991 0.60658 -2 149 2 2 0 0.29377 0.23557 0.72483 0.59885 -2 150 2 4 0 0.3194 0.25725 0.74524 0.59905 -2 151 2 4 0 0.30979 0.2444 0.74963 0.58005 -2 152 2 4 1 0.35056 0.22948 0.73684 0.58931 -2 153 2 4 0 0.34977 0.22911 0.72578 0.58484 -2 154 2 4 1 0.34519 0.21168 0.71921 0.60472 -2 155 2 4 1 0.36661 0.23326 0.72028 0.57828 -2 156 2 3 0 0.40117 0.25436 0.71302 0.56412 -2 157 2 4 1 0.40102 0.27823 0.66922 0.56995 -2 158 2 3 1 0.35642 0.26836 0.67426 0.55094 -2 159 1 1 1 0.37148 0.29016 0.67501 0.51965 -2 160 2 1 1 0.3358 0.24635 0.66468 0.50215 -2 161 1 4 0 0.35501 0.24552 0.69507 0.50197 -2 162 2 3 0 0.31346 0.23161 0.66735 0.51181 -2 163 1 2 1 0.30964 0.232 0.6475 0.53865 -2 164 2 3 0 0.30373 0.22914 0.62935 0.55306 -2 165 2 3 1 0.31736 0.22369 0.62071 0.54398 -2 166 2 4 0 0.30014 0.25322 0.61517 0.55492 -2 167 2 4 1 0.34385 0.2456 0.58311 0.5534 -2 168 2 4 0 0.3473 0.2477 0.58684 0.57142 -2 169 2 4 0 0.34401 0.27733 0.59587 0.55711 -2 170 1 1 1 0.33799 0.29646 0.62267 0.58141 -2 171 2 2 0 0.36342 0.31122 0.63888 0.60783 -2 172 2 2 0 0.34621 0.32128 0.63943 0.54333 -2 173 2 4 0 0.32895 0.34686 0.68134 0.49852 -2 174 1 1 1 0.37522 0.31644 0.61196 0.4386 -2 175 2 4 0 0.39076 0.33159 0.65 0.44614 -2 176 2 2 0 0.4096 0.34605 0.68745 0.44148 -2 177 2 2 0 0.46425 0.33531 0.66985 0.44431 -2 178 2 4 0 0.48127 0.34427 0.65921 0.43196 -2 179 2 4 0 0.46951 0.32875 0.66862 0.42214 -2 180 2 4 0 0.45978 0.3009 0.65382 0.42035 -2 181 2 1 1 0.46639 0.31441 0.66291 0.41407 -2 182 2 4 0 0.49453 0.3332 0.6395 0.40546 -2 183 2 2 0 0.48048 0.32783 0.637 0.39346 -2 184 1 1 0 0.50093 0.33951 0.60778 0.42871 -2 185 1 4 1 0.47675 0.33238 0.61487 0.43485 -2 186 2 4 0 0.46652 0.35543 0.62031 0.40333 -2 187 1 4 1 0.50299 0.34544 0.60978 0.38389 -2 188 1 1 1 0.51908 0.35843 0.61294 0.38385 -2 189 2 4 1 0.56691 0.37283 0.60469 0.39722 -2 190 2 4 0 0.57641 0.40698 0.65272 0.40517 -2 191 1 1 0 0.61806 0.40434 0.62457 0.38315 -2 192 2 4 1 0.6387 0.43436 0.59972 0.37162 -2 193 2 4 0 0.6537 0.47132 0.56371 0.36873 -2 194 1 4 1 0.64354 0.44272 0.53871 0.37205 -2 195 1 1 0 0.68281 0.4423 0.53232 0.37961 -2 196 1 1 0 0.68423 0.48885 0.52515 0.38681 -2 197 2 4 0 0.69172 0.49761 0.51816 0.37109 -2 198 1 4 0 0.68823 0.49309 0.51419 0.36965 -2 199 1 2 1 0.68377 0.4935 0.50005 0.35935 -2 200 2 4 0 0.67325 0.48124 0.48284 0.34656 -2 201 2 4 1 0.68844 0.47268 0.52266 0.36539 -3 1 1 4 1 0.66883 0.37325 0.76919 0.69293 -3 3 1 2 0 0.67015 0.3856 0.76941 0.72175 -3 4 2 4 1 0.65867 0.38996 0.73512 0.76353 -3 5 2 4 1 0.61271 0.4136 0.70859 0.77052 -3 6 2 4 0 0.61433 0.42465 0.70933 0.8 -3 7 2 4 1 0.5804 0.39622 0.69341 0.8 -3 8 2 4 0 0.51841 0.38227 0.73289 0.8 -3 9 1 2 0 0.53659 0.3558 0.74592 0.8 -3 10 1 4 1 0.52065 0.38466 0.78221 0.8 -3 11 1 1 0 0.5127 0.37854 0.7661 0.78401 -3 12 2 4 1 0.49501 0.43971 0.7905 0.7796 -3 13 2 4 1 0.49142 0.46183 0.74579 0.78366 -3 14 2 4 1 0.49081 0.46637 0.74794 0.77315 -3 15 2 2 1 0.50132 0.47586 0.74207 0.8 -3 16 2 4 0 0.56473 0.46072 0.79825 0.79796 -3 17 1 4 0 0.54207 0.46664 0.8 0.74878 -3 18 1 2 0 0.58164 0.44106 0.79297 0.72317 -3 19 2 4 1 0.59149 0.45774 0.79293 0.76953 -3 20 2 4 1 0.61672 0.45676 0.77379 0.79815 -3 21 2 2 1 0.62121 0.44059 0.76258 0.8 -3 22 2 2 1 0.63551 0.4599 0.75005 0.76542 -3 23 2 4 1 0.63114 0.46266 0.75579 0.8 -3 24 2 4 0 0.61963 0.49526 0.7527 0.79561 -3 25 2 2 0 0.57841 0.49419 0.72627 0.8 -3 26 2 2 1 0.56152 0.52013 0.78467 0.8 -3 27 2 3 1 0.57869 0.51671 0.78265 0.8 -3 28 2 3 1 0.56639 0.50541 0.75377 0.76181 -3 29 2 2 1 0.53607 0.53711 0.73607 0.77493 -3 30 2 3 1 0.50742 0.57529 0.74619 0.74033 -3 31 2 3 0 0.49056 0.52378 0.73618 0.71541 -3 32 1 3 1 0.4976 0.52854 0.72432 0.70405 -3 33 1 2 1 0.53769 0.5559 0.68652 0.70031 -3 34 1 2 0 0.53378 0.54603 0.67969 0.70818 -3 35 2 3 1 0.52224 0.53683 0.70012 0.73016 -3 36 2 3 0 0.54336 0.51652 0.69302 0.7253 -3 37 1 1 0 0.50921 0.56155 0.67768 0.72735 -3 38 1 2 0 0.52346 0.5659 0.67873 0.73461 -3 39 2 2 1 0.56296 0.54234 0.64272 0.72261 -3 40 2 4 1 0.57085 0.5206 0.67906 0.72352 -3 41 2 4 1 0.58499 0.53196 0.69191 0.72011 -3 42 2 2 0 0.57616 0.51196 0.674 0.74266 -3 43 2 2 0 0.576 0.53392 0.65332 0.75823 -3 44 1 1 1 0.57044 0.52995 0.61126 0.7968 -3 45 1 1 1 0.60101 0.54231 0.60942 0.78605 -3 46 1 1 0 0.57728 0.55258 0.59843 0.8 -3 47 2 1 0 0.55056 0.54806 0.56974 0.8 -3 48 2 4 1 0.55445 0.59867 0.58828 0.7958 -3 49 2 4 0 0.56397 0.57727 0.55507 0.7543 -3 50 2 3 1 0.57406 0.59639 0.54868 0.76199 -3 51 2 3 1 0.5561 0.59867 0.58165 0.75913 -3 52 2 1 1 0.48821 0.63845 0.58467 0.79374 -3 53 1 2 0 0.47204 0.62393 0.60018 0.75774 -3 54 2 4 0 0.48959 0.6457 0.62181 0.73965 -3 55 1 3 1 0.52759 0.60195 0.61241 0.70988 -3 56 1 3 1 0.52772 0.62054 0.57173 0.7234 -3 57 1 1 0 0.50986 0.59709 0.54509 0.73144 -3 58 1 3 0 0.46038 0.60037 0.52496 0.75924 -3 59 2 3 0 0.4769 0.6381 0.50502 0.73557 -3 60 1 1 1 0.4638 0.63734 0.53088 0.73204 -3 61 1 1 0 0.44397 0.62479 0.55098 0.7317 -3 62 1 1 1 0.45771 0.64205 0.56085 0.7122 -3 63 1 3 1 0.41829 0.61723 0.53791 0.65224 -3 64 1 1 0 0.44906 0.58146 0.55191 0.66344 -3 65 2 3 1 0.47217 0.57877 0.525 0.6597 -3 66 2 1 1 0.48396 0.57911 0.48678 0.66715 -3 67 2 3 0 0.48087 0.55254 0.46851 0.68836 -3 68 2 1 0 0.40167 0.54104 0.40646 0.67455 -3 69 1 2 1 0.41253 0.55343 0.41672 0.65517 -3 70 1 2 1 0.42959 0.59563 0.41995 0.68402 -3 71 1 2 1 0.43857 0.59709 0.42729 0.70901 -3 72 1 2 1 0.44418 0.62363 0.4246 0.71959 -3 73 2 4 1 0.4767 0.64183 0.38548 0.78097 -3 74 1 2 1 0.5276 0.62719 0.3852 0.8 -3 75 1 2 1 0.49319 0.624 0.35591 0.8 -3 76 2 4 1 0.54732 0.5904 0.29778 0.8 -3 77 2 4 1 0.54944 0.58123 0.32742 0.77967 -3 78 1 2 1 0.56733 0.53663 0.30483 0.8 -3 79 1 2 0 0.57654 0.53186 0.30929 0.76943 -3 80 2 2 1 0.59232 0.54615 0.32875 0.77195 -3 81 2 4 1 0.6407 0.52331 0.29697 0.8 -3 82 2 4 1 0.63453 0.50234 0.2913 0.76079 -3 83 1 2 1 0.63164 0.53699 0.30748 0.77895 -3 84 1 2 0 0.6282 0.5123 0.30934 0.77445 -3 85 2 4 1 0.60935 0.49884 0.33065 0.74279 -3 86 2 2 0 0.61729 0.54562 0.34929 0.74988 -3 87 1 4 1 0.63495 0.52927 0.31141 0.73159 -3 88 1 1 1 0.6246 0.52432 0.34703 0.73015 -3 89 1 1 1 0.64368 0.48815 0.27377 0.73239 -3 90 2 1 0 0.59542 0.45566 0.26969 0.72239 -3 91 2 1 0 0.59224 0.4519 0.27504 0.69281 -3 92 2 4 1 0.59509 0.5055 0.24022 0.66945 -3 93 2 4 1 0.64672 0.53689 0.22287 0.66914 -3 94 2 4 1 0.63177 0.54698 0.21258 0.68408 -3 95 1 2 0 0.67391 0.57384 0.2072 0.68711 -3 96 2 4 1 0.66292 0.52497 0.2 0.70323 -3 97 2 2 1 0.6416 0.53087 0.20378 0.6965 -3 98 2 4 1 0.60641 0.50909 0.26903 0.6806 -3 99 1 2 1 0.6134 0.48996 0.27622 0.70435 -3 100 2 2 1 0.62637 0.48483 0.31202 0.73029 -3 101 2 4 1 0.58895 0.44496 0.3198 0.72504 -3 102 1 4 1 0.59891 0.50268 0.29841 0.72913 -3 103 1 2 0 0.63238 0.5181 0.2929 0.73254 -3 104 2 2 1 0.64532 0.51598 0.29077 0.79193 -3 105 2 2 0 0.64278 0.47902 0.28531 0.79905 -3 106 2 4 1 0.62308 0.49617 0.30022 0.8 -3 107 2 1 0 0.66055 0.47591 0.30855 0.78427 -3 108 2 4 1 0.66069 0.49633 0.31414 0.8 -3 109 2 4 1 0.66532 0.51261 0.33326 0.8 -3 110 2 1 1 0.66496 0.51259 0.30694 0.79976 -3 111 2 4 1 0.63477 0.50855 0.35965 0.8 -3 112 2 4 0 0.64212 0.47413 0.32055 0.7694 -3 113 2 4 1 0.60057 0.42494 0.35101 0.77125 -3 114 1 1 1 0.56903 0.38249 0.35041 0.76236 -3 115 1 1 1 0.6015 0.39316 0.36371 0.77496 -3 116 1 1 1 0.60273 0.42415 0.42261 0.77538 -3 117 1 4 1 0.64753 0.36608 0.46082 0.74709 -3 118 2 4 1 0.64442 0.40509 0.48388 0.71915 -3 119 1 4 1 0.65391 0.42951 0.48458 0.7488 -3 120 1 1 1 0.68116 0.4308 0.49861 0.71676 -3 121 1 1 0 0.65563 0.46113 0.47371 0.72506 -3 122 2 1 1 0.69349 0.49043 0.47868 0.73556 -3 123 2 4 1 0.66198 0.48623 0.51209 0.74302 -3 124 2 1 0 0.62501 0.50053 0.52244 0.73455 -3 125 2 4 1 0.65673 0.44638 0.51138 0.75814 -3 126 2 2 1 0.64113 0.45613 0.4999 0.7822 -3 127 2 2 1 0.61183 0.47796 0.47914 0.78129 -3 128 2 4 0 0.62885 0.48371 0.46325 0.76828 -3 129 1 2 0 0.65825 0.46961 0.48531 0.7496 -3 130 1 3 0 0.6435 0.48994 0.53024 0.72654 -3 131 1 2 1 0.66244 0.51286 0.52535 0.7488 -3 132 1 2 0 0.68476 0.54099 0.51799 0.7379 -3 133 2 4 1 0.68301 0.55496 0.51328 0.74206 -3 134 2 2 0 0.67316 0.55361 0.48301 0.75786 -3 135 2 2 1 0.67376 0.53684 0.49156 0.76391 -3 136 2 4 0 0.70431 0.5375 0.49248 0.72144 -3 137 2 2 0 0.73911 0.51031 0.50981 0.69143 -3 138 2 3 0 0.73501 0.54236 0.48455 0.65323 -3 139 1 2 1 0.70711 0.53633 0.51912 0.68392 -3 140 1 2 0 0.68128 0.55276 0.48967 0.66202 -3 141 1 2 1 0.66796 0.51312 0.48063 0.67974 -3 142 1 2 0 0.68706 0.52262 0.45528 0.69269 -3 143 2 4 1 0.67081 0.50414 0.40634 0.68221 -3 144 2 2 1 0.60688 0.4994 0.38689 0.68965 -3 145 2 4 1 0.64122 0.47853 0.39266 0.71406 -3 146 2 4 1 0.66933 0.47368 0.37491 0.69829 -3 147 2 2 0 0.6751 0.52406 0.38091 0.70497 -3 148 2 4 1 0.66144 0.49961 0.37475 0.69052 -3 149 2 4 0 0.63735 0.5103 0.36973 0.69204 -3 150 1 2 1 0.63358 0.48455 0.37815 0.68046 -3 151 1 4 1 0.65883 0.47061 0.3947 0.65703 -3 152 1 2 1 0.6302 0.50495 0.39799 0.65565 -3 153 2 4 1 0.62789 0.48344 0.39312 0.63916 -3 154 1 2 1 0.6335 0.46165 0.41299 0.64529 -3 155 1 2 1 0.64593 0.46122 0.38794 0.66622 -3 156 1 2 0 0.646 0.44097 0.3853 0.6999 -3 157 1 2 0 0.63902 0.45708 0.35352 0.70509 -3 158 2 4 1 0.66877 0.4357 0.31695 0.71684 -3 159 2 4 1 0.66383 0.44026 0.28375 0.73352 -3 160 2 4 1 0.6475 0.43008 0.26323 0.68252 -3 161 2 4 1 0.62258 0.43133 0.24392 0.69062 -3 162 2 1 1 0.65065 0.46271 0.22707 0.71892 -3 163 2 4 1 0.60723 0.44933 0.2092 0.71241 -3 164 1 4 1 0.59875 0.43997 0.21956 0.6914 -3 165 1 1 0 0.55818 0.40711 0.2 0.72182 -3 166 1 1 1 0.60092 0.38929 0.25299 0.74315 -3 167 1 1 1 0.6077 0.36729 0.2275 0.74274 -3 168 1 1 1 0.58144 0.36602 0.24947 0.70624 -3 169 1 1 1 0.58884 0.34827 0.2796 0.71898 -3 170 1 4 1 0.61215 0.37417 0.2637 0.74439 -3 171 1 1 1 0.63596 0.36185 0.26624 0.73248 -3 172 1 1 1 0.61559 0.37883 0.22076 0.73546 -3 173 1 1 1 0.58784 0.39491 0.20025 0.73755 -3 174 1 2 1 0.6596 0.38477 0.24322 0.77936 -3 175 1 1 1 0.64983 0.43784 0.27238 0.77963 -3 176 1 1 0 0.63608 0.43822 0.26457 0.78278 -3 177 2 4 1 0.61948 0.43996 0.23311 0.75512 -3 178 2 4 1 0.61418 0.38824 0.23349 0.75632 -3 179 2 1 0 0.6146 0.37627 0.25115 0.8 -3 180 2 2 0 0.62001 0.34678 0.2671 0.79487 -3 181 1 2 1 0.615 0.33971 0.28171 0.79225 -3 182 2 2 0 0.61682 0.32518 0.34198 0.79845 -3 183 2 4 1 0.63967 0.31349 0.3434 0.78929 -3 184 2 4 1 0.62524 0.30235 0.32921 0.78309 -3 185 2 4 1 0.65432 0.28414 0.3005 0.77878 -3 186 2 4 1 0.6499 0.28287 0.3494 0.7755 -3 187 2 4 1 0.6312 0.29965 0.3589 0.739 -3 188 2 4 1 0.60689 0.31089 0.35521 0.74163 -3 189 2 4 0 0.62744 0.29311 0.34019 0.75455 -3 190 2 2 0 0.62018 0.30403 0.37572 0.75018 -3 191 1 1 1 0.59118 0.32691 0.3682 0.74053 -3 192 1 3 0 0.62218 0.31464 0.37339 0.72332 -3 193 1 1 1 0.60768 0.30155 0.3907 0.73393 -3 194 1 1 1 0.62445 0.25367 0.40889 0.7381 -3 195 1 4 1 0.58264 0.27604 0.38269 0.73848 -3 196 1 4 1 0.5586 0.23074 0.38086 0.77833 -3 197 1 4 0 0.54563 0.22598 0.36843 0.73306 -3 198 1 1 0 0.5992 0.24965 0.35665 0.72907 -3 199 2 4 0 0.63541 0.24274 0.35439 0.68775 -3 200 1 2 0 0.64018 0.24858 0.36565 0.6627 -3 201 2 3 1 0.65081 0.25388 0.39391 0.67241 -4 1 2 4 0 0.21199 0.54628 0.68794 0.47466 -4 2 2 4 0 0.2 0.534 0.65541 0.47102 -4 3 2 3 1 0.2 0.57876 0.65958 0.47067 -4 4 2 3 1 0.2 0.56797 0.63188 0.42063 -4 6 2 1 1 0.2 0.54476 0.60146 0.47798 -4 7 2 1 0 0.2 0.52605 0.60722 0.47527 -4 8 2 3 0 0.2 0.52271 0.63572 0.47881 -4 9 2 1 0 0.21722 0.55743 0.64484 0.49461 -4 10 2 3 0 0.22466 0.5777 0.67382 0.49864 -4 11 1 4 0 0.22561 0.58815 0.68153 0.5709 -4 12 1 4 0 0.21568 0.56781 0.67591 0.5935 -4 13 1 2 1 0.22308 0.52535 0.71702 0.60735 -4 14 1 2 1 0.20689 0.53131 0.70323 0.60971 -4 15 1 2 1 0.21517 0.57119 0.70512 0.60726 -4 16 1 2 1 0.2067 0.56055 0.72736 0.59942 -4 17 1 2 0 0.2 0.55337 0.74002 0.60865 -4 18 2 3 1 0.2 0.57323 0.78656 0.53756 -4 19 2 3 0 0.24501 0.57013 0.8 0.55964 -4 20 1 1 0 0.22025 0.59802 0.78074 0.58471 -4 21 1 2 1 0.24619 0.58994 0.78157 0.62366 -4 22 1 2 1 0.2425 0.60365 0.77134 0.67168 -4 23 2 4 1 0.26134 0.62914 0.73476 0.66238 -4 24 1 2 1 0.2523 0.67328 0.76341 0.6803 -4 25 2 4 1 0.21905 0.66907 0.74752 0.67259 -4 26 2 1 0 0.2 0.63148 0.74129 0.64534 -4 27 1 2 1 0.22706 0.64474 0.7103 0.64379 -4 28 2 3 1 0.24795 0.71816 0.73498 0.65137 -4 29 2 3 0 0.25824 0.72237 0.71915 0.64535 -4 30 2 4 1 0.2023 0.73493 0.72564 0.63744 -4 31 1 4 1 0.21389 0.75741 0.72671 0.64195 -4 32 2 2 1 0.2 0.77333 0.72097 0.6328 -4 33 2 2 1 0.2 0.77558 0.69994 0.6418 -4 34 2 2 1 0.2 0.78144 0.67943 0.63996 -4 35 1 3 0 0.2 0.8 0.67301 0.61008 -4 36 1 2 0 0.2 0.8 0.68831 0.63528 -4 37 1 2 1 0.2 0.8 0.69131 0.6034 -4 38 2 3 1 0.20971 0.8 0.70393 0.57568 -4 39 2 3 1 0.21621 0.76178 0.71917 0.55994 -4 40 2 1 0 0.22745 0.74196 0.70603 0.55248 -4 41 2 4 1 0.23812 0.75206 0.68627 0.56138 -4 42 1 2 1 0.26515 0.73895 0.69746 0.57138 -4 43 2 4 0 0.28398 0.76918 0.73134 0.59407 -4 44 2 1 1 0.30592 0.75416 0.70629 0.5629 -4 45 1 4 1 0.31918 0.76789 0.70929 0.56458 -4 46 2 3 1 0.29707 0.71374 0.71305 0.56137 -4 47 1 4 0 0.30046 0.66943 0.72947 0.55543 -4 48 1 2 1 0.27898 0.69381 0.70074 0.5995 -4 49 2 4 1 0.26535 0.69816 0.68161 0.58912 -4 50 1 2 1 0.30351 0.72021 0.67091 0.56377 -4 51 2 4 1 0.33934 0.65248 0.66959 0.56201 -4 52 2 2 1 0.3872 0.63757 0.68541 0.58033 -4 53 1 2 0 0.3944 0.6497 0.67534 0.57672 -4 54 2 3 0 0.41049 0.65797 0.6736 0.57501 -4 55 1 2 1 0.36902 0.61196 0.70441 0.62472 -4 56 1 2 1 0.39708 0.658 0.73746 0.67287 -4 57 1 3 0 0.33752 0.69203 0.75871 0.63325 -4 58 1 4 1 0.33235 0.68372 0.79774 0.65964 -4 59 2 1 0 0.29321 0.66003 0.8 0.68977 -4 60 2 2 1 0.2689 0.68852 0.79386 0.6942 -4 61 1 4 0 0.27333 0.70509 0.78656 0.68412 -4 62 1 1 1 0.29007 0.6768 0.8 0.72379 -4 63 1 1 1 0.27771 0.69015 0.79024 0.74027 -4 64 1 1 0 0.27473 0.72906 0.76889 0.73509 -4 65 1 1 1 0.27454 0.7235 0.75293 0.74844 -4 66 2 3 1 0.27372 0.71364 0.77559 0.75522 -4 67 2 4 1 0.29453 0.64955 0.77966 0.74915 -4 68 2 3 1 0.28646 0.64986 0.79155 0.78968 -4 69 2 4 1 0.26537 0.63016 0.77741 0.78975 -4 70 2 3 1 0.28141 0.66991 0.74299 0.79249 -4 71 1 1 0 0.29099 0.66493 0.79439 0.79014 -4 72 1 4 1 0.31207 0.64723 0.79159 0.78607 -4 73 2 3 1 0.26992 0.64794 0.762 0.79788 -4 74 1 1 0 0.28006 0.57867 0.78492 0.78075 -4 75 2 3 1 0.25879 0.61897 0.77092 0.7282 -4 76 2 1 0 0.21374 0.6422 0.77857 0.7214 -4 77 1 2 1 0.2 0.66219 0.76089 0.71271 -4 78 1 4 1 0.23095 0.63052 0.78842 0.74988 -4 79 2 1 0 0.21211 0.67373 0.75575 0.77481 -4 80 2 4 1 0.22245 0.67839 0.71743 0.78994 -4 81 1 3 1 0.22854 0.67643 0.72384 0.76479 -4 82 2 2 1 0.2 0.65877 0.69777 0.8 -4 83 2 3 1 0.2 0.66073 0.69603 0.8 -4 84 1 1 0 0.2 0.68394 0.70717 0.8 -4 85 2 2 1 0.20384 0.66684 0.75085 0.8 -4 86 1 3 1 0.21624 0.64553 0.77035 0.76305 -4 87 2 4 1 0.22371 0.66605 0.71853 0.79022 -4 88 2 3 1 0.22751 0.70415 0.75329 0.76656 -4 89 2 2 1 0.226 0.71427 0.73792 0.75358 -4 90 2 4 1 0.25551 0.73673 0.75205 0.73508 -4 91 2 1 0 0.24871 0.75519 0.77856 0.70971 -4 92 2 4 1 0.22538 0.69685 0.77893 0.72328 -4 93 2 1 0 0.24222 0.68194 0.77438 0.65775 -4 94 2 4 1 0.25815 0.70205 0.8 0.63861 -4 95 1 1 0 0.27333 0.6861 0.8 0.65481 -4 96 1 4 0 0.27917 0.73356 0.7416 0.67907 -4 97 1 3 1 0.28182 0.71244 0.72781 0.65051 -4 98 1 2 1 0.29413 0.72278 0.7606 0.68453 -4 99 1 2 1 0.2932 0.73863 0.75846 0.68132 -4 100 1 3 1 0.31532 0.69763 0.75898 0.69651 -4 101 1 2 1 0.31612 0.70769 0.74336 0.70307 -4 102 1 2 1 0.3108 0.75304 0.76022 0.6906 -4 103 1 1 0 0.33191 0.79851 0.7261 0.709 -4 104 1 2 1 0.34414 0.79383 0.74593 0.71874 -4 105 1 2 1 0.34368 0.8 0.77512 0.71896 -4 106 1 2 1 0.34419 0.77415 0.78079 0.71189 -4 107 1 2 1 0.37746 0.79259 0.78847 0.70569 -4 108 1 2 1 0.37835 0.79968 0.77385 0.69216 -4 109 1 4 1 0.38553 0.8 0.70916 0.66968 -4 110 1 2 1 0.38058 0.8 0.69244 0.67389 -4 111 1 2 1 0.41382 0.79577 0.70813 0.67588 -4 112 1 2 0 0.36934 0.8 0.66458 0.68569 -4 113 2 3 0 0.35152 0.79807 0.65552 0.63742 -4 114 1 2 0 0.34184 0.8 0.66402 0.60133 -4 115 2 4 1 0.32713 0.8 0.70044 0.5724 -4 116 2 4 1 0.34862 0.8 0.76034 0.54769 -4 117 2 2 1 0.38828 0.8 0.79676 0.5328 -4 118 2 3 1 0.39307 0.8 0.8 0.53451 -4 119 2 4 1 0.39582 0.79676 0.79137 0.50423 -4 120 2 4 0 0.40118 0.8 0.75272 0.46582 -4 121 2 4 0 0.43031 0.8 0.74693 0.48711 -4 122 2 3 1 0.4908 0.8 0.7287 0.48293 -4 123 2 3 0 0.46163 0.8 0.68921 0.46915 -4 124 1 2 1 0.46082 0.76616 0.6904 0.44279 -4 125 1 2 1 0.46621 0.77326 0.68577 0.45188 -4 126 1 2 1 0.41896 0.77596 0.6704 0.42 -4 127 1 3 1 0.40602 0.76215 0.63875 0.37658 -4 128 1 2 1 0.42846 0.78743 0.63211 0.36063 -4 129 2 4 1 0.41213 0.75659 0.6251 0.33481 -4 130 2 4 0 0.41481 0.77493 0.59454 0.28814 -4 131 2 3 1 0.41472 0.79236 0.61594 0.2509 -4 132 2 3 1 0.39245 0.8 0.56165 0.30671 -4 133 1 1 0 0.40761 0.8 0.60698 0.33748 -4 134 1 3 1 0.42713 0.79083 0.64562 0.28783 -4 135 1 4 0 0.42478 0.7864 0.64 0.3166 -4 136 2 2 1 0.41485 0.76076 0.63457 0.29308 -4 137 2 3 1 0.4325 0.79865 0.6661 0.27684 -4 138 1 4 1 0.4363 0.75789 0.66885 0.25926 -4 139 1 2 1 0.4181 0.77731 0.68794 0.28972 -4 140 2 4 0 0.40627 0.76115 0.7093 0.31961 -4 141 2 2 1 0.37519 0.76241 0.69879 0.28667 -4 142 1 3 0 0.3901 0.7591 0.69174 0.30751 -4 143 2 4 1 0.36338 0.71133 0.69568 0.33997 -4 144 1 1 1 0.39841 0.73696 0.72225 0.34169 -4 145 1 1 1 0.41371 0.69938 0.72395 0.36836 -4 146 1 1 0 0.37714 0.71863 0.68051 0.39311 -4 147 1 1 0 0.32263 0.78138 0.72232 0.35715 -4 148 2 2 1 0.3333 0.76573 0.69665 0.40039 -4 149 1 4 0 0.3169 0.77223 0.65767 0.42938 -4 150 1 3 1 0.27789 0.78937 0.68047 0.46507 -4 151 1 2 1 0.23163 0.77209 0.72142 0.47408 -4 152 2 3 1 0.23568 0.76247 0.73256 0.46965 -4 153 1 2 1 0.26304 0.7484 0.73707 0.48612 -4 154 1 4 0 0.21324 0.72897 0.73612 0.50978 -4 155 1 2 1 0.21614 0.74213 0.72873 0.47975 -4 156 1 2 1 0.22546 0.75149 0.73982 0.51567 -4 157 2 4 0 0.2 0.76702 0.73213 0.50302 -4 158 1 2 0 0.2 0.75791 0.76103 0.49764 -4 159 1 2 0 0.24648 0.74262 0.75323 0.48225 -4 160 2 1 0 0.29166 0.7449 0.75737 0.49812 -4 161 1 2 1 0.29722 0.74881 0.78415 0.49579 -4 162 1 2 1 0.29739 0.748 0.75971 0.49409 -4 163 1 2 1 0.29061 0.74109 0.75713 0.47148 -4 164 1 2 1 0.3241 0.8 0.7562 0.49 -4 165 1 1 0 0.30304 0.8 0.74852 0.43331 -4 166 1 3 1 0.30686 0.8 0.79876 0.4492 -4 167 1 2 1 0.27929 0.79193 0.8 0.45587 -4 168 1 2 1 0.27502 0.8 0.8 0.47165 -4 169 1 2 1 0.27626 0.76784 0.8 0.45688 -4 170 1 2 1 0.26103 0.8 0.79842 0.50098 -4 171 2 1 0 0.25009 0.8 0.8 0.52076 -4 172 2 4 0 0.27084 0.76792 0.79399 0.53205 -4 173 1 4 0 0.25985 0.8 0.8 0.52452 -4 174 1 2 1 0.3034 0.8 0.76969 0.53788 -4 175 1 3 1 0.31203 0.8 0.76893 0.55553 -4 176 1 4 1 0.29759 0.79857 0.8 0.54718 -4 177 1 3 1 0.30694 0.8 0.77632 0.50738 -4 178 1 2 1 0.32205 0.79845 0.75379 0.51164 -4 179 1 2 1 0.3711 0.79228 0.75654 0.49837 -4 180 1 2 0 0.36351 0.75311 0.76007 0.52005 -4 181 1 2 1 0.43037 0.78817 0.7648 0.52297 -4 182 1 2 1 0.38317 0.8 0.76207 0.53649 -4 183 1 2 1 0.40583 0.76667 0.7691 0.51703 -4 184 1 2 1 0.37856 0.74345 0.78541 0.54304 -4 185 1 2 0 0.35465 0.75525 0.76958 0.50629 -4 186 1 1 0 0.34375 0.75051 0.8 0.51524 -4 187 1 2 1 0.32132 0.75855 0.79423 0.53117 -4 188 1 2 1 0.3636 0.77127 0.78654 0.58878 -4 189 1 2 0 0.3275 0.78351 0.77677 0.58923 -4 190 1 2 0 0.27943 0.77737 0.76301 0.61983 -4 191 1 2 1 0.27087 0.77048 0.76726 0.63355 -4 192 2 2 1 0.2608 0.7859 0.79498 0.67274 -4 193 1 2 0 0.24295 0.77068 0.8 0.6974 -4 194 1 1 1 0.21104 0.76327 0.74363 0.68911 -4 195 1 2 1 0.2056 0.77968 0.75447 0.67363 -4 196 1 2 1 0.2 0.78194 0.71332 0.67214 -4 197 1 3 1 0.2 0.79051 0.73342 0.72048 -4 198 1 3 1 0.2 0.8 0.75775 0.73538 -4 199 1 3 1 0.2 0.8 0.71951 0.74666 -4 200 1 1 0 0.2 0.79957 0.72178 0.77312 -4 201 1 3 0 0.2 0.77904 0.76431 0.79704 -5 1 2 3 0 0.52965 0.6281 0.39177 0.2627 -5 2 2 1 1 0.50844 0.64534 0.43629 0.25243 -5 3 2 3 1 0.49916 0.65298 0.43716 0.25631 -5 4 1 1 1 0.46066 0.65858 0.45227 0.24514 -5 5 1 4 0 0.46583 0.67651 0.42093 0.28374 -5 6 1 3 1 0.48888 0.66179 0.441 0.31545 -5 7 1 1 1 0.46957 0.67537 0.39708 0.31115 -5 8 1 1 0 0.47773 0.63281 0.41152 0.30077 -5 9 1 1 0 0.42138 0.60612 0.41382 0.35085 -5 10 2 3 0 0.39058 0.65772 0.43496 0.34415 -5 11 2 4 0 0.36632 0.65864 0.38564 0.32864 -5 12 1 2 1 0.3353 0.67799 0.36702 0.30257 -5 13 1 3 1 0.33005 0.67692 0.37394 0.31 -5 14 1 3 0 0.30704 0.70034 0.38948 0.29147 -5 15 1 2 0 0.30188 0.67684 0.37848 0.30749 -5 16 2 3 0 0.31777 0.70184 0.37308 0.33226 -5 17 2 4 0 0.30146 0.68226 0.36574 0.35462 -5 18 1 2 0 0.30953 0.70445 0.36379 0.35352 -5 19 1 1 0 0.29945 0.71922 0.36604 0.36233 -5 21 1 1 0 0.26136 0.76773 0.33342 0.40977 -5 22 1 2 1 0.25017 0.79726 0.26784 0.44439 -5 23 1 4 1 0.25924 0.8 0.24602 0.39868 -5 24 1 2 1 0.25417 0.8 0.26601 0.39396 -5 25 1 2 1 0.24727 0.8 0.28345 0.40097 -5 26 1 2 1 0.24463 0.8 0.27493 0.41779 -5 27 1 2 1 0.22767 0.75664 0.25281 0.37704 -5 28 1 4 1 0.24347 0.75487 0.25652 0.36365 -5 29 1 2 1 0.25231 0.72268 0.27731 0.35213 -5 30 1 4 0 0.25335 0.6809 0.32021 0.34899 -5 31 1 4 1 0.26974 0.64092 0.25591 0.36438 -5 32 1 2 1 0.26745 0.66799 0.2717 0.34281 -5 33 1 2 1 0.28884 0.69135 0.26879 0.34217 -5 34 1 4 0 0.29497 0.69864 0.25664 0.33734 -5 35 1 2 1 0.30562 0.64968 0.24518 0.32997 -5 36 1 2 1 0.28868 0.66533 0.30171 0.30097 -5 37 1 2 0 0.28809 0.7375 0.3034 0.30363 -5 38 1 4 0 0.31865 0.71161 0.30639 0.28397 -5 39 1 1 0 0.31807 0.64858 0.29697 0.30764 -5 40 1 1 0 0.31593 0.65613 0.33298 0.31225 -5 41 1 3 1 0.33026 0.61258 0.33138 0.32014 -5 42 1 2 1 0.32697 0.61912 0.34696 0.33464 -5 43 1 3 0 0.27858 0.63891 0.35506 0.31018 -5 44 1 4 1 0.32086 0.66656 0.41123 0.28709 -5 45 1 2 1 0.34782 0.66129 0.41197 0.2836 -5 46 1 2 1 0.3288 0.70515 0.42619 0.30467 -5 47 1 2 1 0.31461 0.7261 0.46665 0.28781 -5 48 1 2 1 0.29798 0.75841 0.45923 0.24544 -5 49 1 1 0 0.32415 0.74721 0.45376 0.23062 -5 50 1 4 1 0.30859 0.73631 0.42276 0.25451 -5 51 1 4 1 0.30114 0.70529 0.43194 0.24206 -5 52 1 4 0 0.29249 0.67129 0.43607 0.20447 -5 53 1 2 1 0.28941 0.65402 0.47464 0.20202 -5 54 1 4 0 0.28255 0.65782 0.44258 0.24802 -5 55 1 2 0 0.29205 0.65442 0.42603 0.2763 -5 56 1 2 1 0.28681 0.68052 0.43304 0.25667 -5 57 1 2 1 0.28534 0.69036 0.43969 0.30449 -5 58 1 4 1 0.28727 0.72614 0.40972 0.28317 -5 59 1 2 0 0.29809 0.73427 0.40003 0.25991 -5 60 1 2 1 0.32128 0.72385 0.38134 0.25928 -5 61 1 2 1 0.2904 0.77418 0.40214 0.22237 -5 62 1 2 1 0.31291 0.76574 0.39228 0.23189 -5 63 1 2 0 0.31813 0.74611 0.37152 0.21661 -5 64 1 4 1 0.34169 0.72641 0.37578 0.23515 -5 65 1 1 0 0.39352 0.70822 0.32018 0.23678 -5 66 1 4 0 0.35239 0.70569 0.33043 0.25038 -5 67 1 1 0 0.31002 0.73202 0.30254 0.22323 -5 68 1 2 1 0.32702 0.73928 0.32406 0.22419 -5 69 1 2 1 0.32569 0.74191 0.3323 0.2288 -5 70 1 2 0 0.31631 0.75926 0.35622 0.20484 -5 71 1 2 1 0.34697 0.7608 0.3981 0.2 -5 72 1 4 0 0.36965 0.74103 0.41356 0.20749 -5 73 1 4 1 0.33203 0.75547 0.40478 0.24049 -5 74 1 2 0 0.3359 0.8 0.41224 0.23604 -5 75 1 2 1 0.38071 0.77505 0.40267 0.23514 -5 76 1 2 0 0.35913 0.7656 0.44632 0.22138 -5 77 1 2 1 0.32985 0.79312 0.47177 0.24763 -5 78 1 2 1 0.34528 0.72516 0.45731 0.25059 -5 79 1 2 1 0.42887 0.70956 0.52762 0.22566 -5 80 1 2 1 0.50375 0.70408 0.55354 0.24068 -5 81 1 2 1 0.49584 0.69185 0.52126 0.21029 -5 82 1 2 1 0.49765 0.68081 0.51965 0.21723 -5 83 1 4 0 0.47827 0.70016 0.5245 0.22204 -5 84 1 2 0 0.49644 0.72369 0.54001 0.22711 -5 85 1 2 1 0.50782 0.73512 0.5403 0.24375 -5 86 1 2 1 0.48393 0.6719 0.54166 0.22529 -5 87 1 2 0 0.48789 0.6832 0.54899 0.23012 -5 88 1 2 1 0.45357 0.68183 0.54698 0.23454 -5 89 1 2 1 0.43108 0.6934 0.50771 0.28144 -5 90 1 2 1 0.41876 0.69745 0.50987 0.29576 -5 91 1 3 0 0.38172 0.67906 0.49969 0.29294 -5 92 1 2 1 0.37691 0.68526 0.47025 0.2863 -5 93 1 2 1 0.3854 0.66665 0.42952 0.27794 -5 94 1 2 1 0.4366 0.66658 0.43534 0.29518 -5 95 1 2 0 0.42289 0.69998 0.41894 0.35091 -5 96 1 4 0 0.40661 0.68082 0.40679 0.35538 -5 97 1 4 0 0.40432 0.69434 0.43249 0.38228 -5 98 1 4 0 0.39867 0.7264 0.39862 0.32107 -5 99 1 2 1 0.41113 0.6887 0.459 0.32047 -5 100 2 3 0 0.37729 0.72178 0.47418 0.33235 -5 101 1 2 1 0.3844 0.77754 0.48317 0.28709 -5 102 1 2 1 0.36407 0.79627 0.47854 0.29967 -5 103 1 2 1 0.37211 0.8 0.49278 0.26266 -5 104 1 4 0 0.36476 0.8 0.51316 0.2918 -5 105 1 2 1 0.37656 0.7832 0.52443 0.31781 -5 106 1 4 0 0.361 0.75417 0.51713 0.3391 -5 107 1 2 1 0.34127 0.69674 0.51345 0.33678 -5 108 1 3 1 0.36536 0.69128 0.54557 0.37853 -5 109 1 2 0 0.36782 0.68772 0.50025 0.3886 -5 110 1 3 1 0.37694 0.66622 0.52168 0.3531 -5 111 2 3 0 0.40396 0.67503 0.51225 0.35866 -5 112 1 3 0 0.39044 0.77402 0.48213 0.36963 -5 113 1 2 1 0.41819 0.76111 0.42435 0.36787 -5 114 1 3 0 0.43218 0.74342 0.45394 0.37659 -5 115 1 2 1 0.41543 0.7167 0.43029 0.37865 -5 116 1 2 1 0.43204 0.74695 0.47116 0.35511 -5 117 1 2 0 0.42545 0.73504 0.48081 0.38071 -5 118 1 4 1 0.40956 0.76826 0.48392 0.37526 -5 119 1 2 1 0.44331 0.7724 0.493 0.35941 -5 120 1 2 1 0.42941 0.74261 0.48721 0.32865 -5 121 1 2 1 0.46223 0.7079 0.495 0.34236 -5 122 1 4 1 0.45196 0.74791 0.51239 0.30726 -5 123 1 2 1 0.46976 0.7212 0.50553 0.29633 -5 124 1 2 1 0.49744 0.72772 0.47922 0.32832 -5 125 1 4 1 0.48511 0.70999 0.44181 0.35508 -5 126 1 2 1 0.49698 0.72154 0.4094 0.33259 -5 127 1 2 0 0.55174 0.72168 0.34913 0.29959 -5 128 1 2 1 0.56839 0.74423 0.36314 0.29836 -5 129 1 2 1 0.56329 0.74977 0.30709 0.29901 -5 130 1 4 0 0.53117 0.71506 0.30289 0.29889 -5 131 1 2 1 0.53059 0.72266 0.29907 0.27074 -5 132 2 4 0 0.52097 0.73037 0.31229 0.26118 -5 133 1 2 1 0.52505 0.73778 0.30595 0.26641 -5 134 1 2 1 0.51804 0.74373 0.29208 0.22722 -5 135 1 4 0 0.50817 0.69914 0.29086 0.2444 -5 136 1 2 0 0.46426 0.64347 0.29607 0.24786 -5 137 1 2 1 0.45112 0.65173 0.28418 0.2684 -5 138 1 2 1 0.46582 0.63202 0.27425 0.2506 -5 139 1 2 1 0.52614 0.64221 0.30124 0.26622 -5 140 1 2 1 0.48691 0.62286 0.24835 0.26166 -5 141 1 2 1 0.49674 0.65637 0.26224 0.26184 -5 142 1 2 1 0.50251 0.66832 0.27825 0.22906 -5 143 1 2 0 0.52561 0.63081 0.2888 0.25572 -5 144 1 2 0 0.5582 0.63455 0.31322 0.2142 -5 145 1 1 0 0.56369 0.67699 0.28798 0.25044 -5 146 1 2 0 0.59611 0.68733 0.30053 0.26272 -5 147 2 4 0 0.62406 0.68451 0.27671 0.28084 -5 148 2 3 0 0.62594 0.66005 0.2575 0.23694 -5 149 1 2 1 0.60473 0.64258 0.26584 0.2 -5 150 1 2 0 0.55418 0.64354 0.25955 0.2 -5 151 1 2 1 0.49225 0.65877 0.22367 0.2 -5 152 1 4 0 0.48977 0.67444 0.22502 0.2 -5 153 1 2 1 0.49144 0.68793 0.23937 0.2251 -5 154 1 2 1 0.51661 0.67634 0.25181 0.23167 -5 155 1 3 0 0.49595 0.61461 0.27478 0.25776 -5 156 1 2 0 0.51275 0.58055 0.29726 0.29554 -5 157 1 2 1 0.54337 0.57698 0.34097 0.29997 -5 158 1 2 0 0.54004 0.56746 0.33991 0.34567 -5 159 1 2 1 0.5505 0.58749 0.37857 0.33782 -5 160 1 2 0 0.55734 0.58047 0.31952 0.32997 -5 161 1 2 1 0.57778 0.58586 0.32935 0.29741 -5 162 1 3 0 0.61467 0.56721 0.34121 0.29956 -5 163 1 2 0 0.61045 0.60386 0.33623 0.31601 -5 164 1 2 1 0.61667 0.65245 0.37916 0.34916 -5 165 1 4 0 0.60631 0.64049 0.37032 0.32187 -5 166 1 4 1 0.56741 0.6463 0.39292 0.26194 -5 167 1 4 1 0.55779 0.66226 0.35642 0.30488 -5 168 1 2 1 0.60508 0.65471 0.33749 0.31078 -5 169 2 4 0 0.58856 0.68126 0.3558 0.29629 -5 170 1 4 0 0.6058 0.67863 0.36828 0.29849 -5 171 1 4 0 0.63924 0.72809 0.37309 0.27935 -5 172 1 2 0 0.61086 0.76142 0.32803 0.31131 -5 173 1 4 0 0.56857 0.72348 0.4022 0.2991 -5 174 1 2 1 0.57425 0.75776 0.38847 0.31192 -5 175 1 2 1 0.58034 0.73465 0.38833 0.33734 -5 176 1 3 1 0.54472 0.70354 0.39372 0.35991 -5 177 1 3 0 0.58202 0.70963 0.34559 0.35314 -5 178 1 2 1 0.55976 0.69322 0.34919 0.33378 -5 179 1 2 0 0.58258 0.70533 0.3781 0.31117 -5 180 1 2 1 0.55612 0.70875 0.43954 0.32156 -5 181 1 3 1 0.56855 0.67537 0.44562 0.32888 -5 182 1 4 1 0.59863 0.68329 0.42186 0.35163 -5 183 1 3 0 0.60997 0.68519 0.4303 0.37683 -5 184 1 2 0 0.60248 0.71019 0.41902 0.38277 -5 185 1 2 1 0.59809 0.76062 0.43002 0.38323 -5 186 1 4 1 0.6081 0.77644 0.47993 0.38275 -5 187 1 2 1 0.61408 0.79223 0.45948 0.39387 -5 188 1 2 0 0.60928 0.8 0.45733 0.43246 -5 189 1 3 1 0.60294 0.78355 0.42614 0.43557 -5 190 1 3 0 0.5558 0.78433 0.36131 0.4455 -5 191 2 4 0 0.60722 0.77838 0.36265 0.45639 -5 192 1 4 1 0.56958 0.74974 0.39301 0.46816 -5 193 1 2 1 0.57706 0.73897 0.38343 0.45477 -5 194 1 2 1 0.58157 0.70094 0.39122 0.42132 -5 195 1 2 1 0.60293 0.70089 0.38323 0.39178 -5 196 1 2 1 0.57593 0.66786 0.43664 0.4349 -5 197 1 2 0 0.60465 0.64527 0.44414 0.40635 -5 198 1 4 0 0.5616 0.64003 0.46539 0.42425 -5 199 1 2 1 0.58718 0.58884 0.45605 0.43693 -5 200 1 2 0 0.58107 0.59477 0.40883 0.42763 -5 201 1 2 0 0.60801 0.56536 0.38925 0.43773 -6 2 2 4 0 0.73844 0.64629 0.26467 0.27395 -6 3 2 3 0 0.69228 0.64001 0.24449 0.2622 -6 4 1 2 0 0.68433 0.59579 0.25212 0.29312 -6 5 1 1 1 0.69212 0.58937 0.24595 0.3045 -6 6 1 1 1 0.6607 0.53947 0.24873 0.32325 -6 7 1 1 1 0.66944 0.47849 0.24066 0.2849 -6 8 1 1 1 0.63452 0.50521 0.25215 0.28294 -6 9 1 3 0 0.65345 0.53719 0.26617 0.28657 -6 10 2 4 0 0.64699 0.52901 0.22662 0.27622 -6 11 1 1 1 0.61869 0.5252 0.2135 0.26357 -6 12 1 1 0 0.60212 0.53859 0.24008 0.24272 -6 13 1 2 1 0.65511 0.58001 0.26076 0.25046 -6 14 1 2 0 0.63622 0.5112 0.20641 0.28391 -6 15 1 2 1 0.61028 0.53644 0.2 0.26336 -6 16 1 2 1 0.60128 0.53024 0.22805 0.27728 -6 17 1 2 0 0.65173 0.56066 0.2 0.25872 -6 18 1 2 1 0.67288 0.5652 0.2 0.21137 -6 19 1 2 1 0.66971 0.49274 0.2218 0.22404 -6 20 1 2 0 0.68494 0.53305 0.22901 0.22827 -6 21 1 2 0 0.70233 0.55296 0.2461 0.22885 -6 22 2 2 1 0.70664 0.58177 0.2493 0.27281 -6 23 2 4 0 0.68894 0.55935 0.20401 0.2927 -6 24 1 2 0 0.63312 0.52932 0.2 0.25501 -6 26 2 3 1 0.67007 0.49809 0.2 0.29855 -6 27 2 1 1 0.70116 0.47996 0.20695 0.28267 -6 28 2 1 1 0.71325 0.49454 0.2 0.26681 -6 29 1 1 1 0.72645 0.43744 0.2 0.3196 -6 30 1 1 1 0.74659 0.44562 0.2 0.31482 -6 31 1 1 1 0.76222 0.42447 0.2 0.32081 -6 32 1 1 1 0.73448 0.43376 0.2 0.34296 -6 33 1 1 1 0.72797 0.43597 0.2 0.35101 -6 34 1 1 1 0.75074 0.46387 0.2 0.37183 -6 36 1 3 0 0.7328 0.45374 0.20442 0.42417 -6 37 2 4 0 0.7252 0.48332 0.2 0.40393 -6 38 1 1 1 0.73625 0.48365 0.2 0.41363 -6 39 1 4 1 0.70231 0.49133 0.21054 0.38507 -6 40 1 1 1 0.72107 0.49519 0.2 0.39185 -6 41 1 4 0 0.71986 0.48078 0.24709 0.37263 -6 42 1 1 1 0.7031 0.49023 0.2 0.3371 -6 43 1 1 1 0.72264 0.50759 0.2 0.37898 -6 44 1 1 1 0.71507 0.49493 0.20433 0.39462 -6 45 1 1 1 0.7487 0.48231 0.21516 0.36209 -6 46 1 4 1 0.77837 0.4936 0.2 0.37251 -6 47 1 1 0 0.8 0.4782 0.21425 0.36112 -6 48 1 4 0 0.79292 0.46445 0.21974 0.38153 -6 49 2 4 0 0.79228 0.43 0.21181 0.33966 -6 50 1 1 1 0.8 0.42849 0.21877 0.30777 -6 51 1 1 1 0.8 0.41144 0.21003 0.32592 -6 52 1 1 1 0.74326 0.40335 0.2 0.33955 -6 53 1 1 0 0.75236 0.42683 0.2552 0.33861 -6 54 1 1 0 0.77285 0.43779 0.28617 0.32257 -6 55 1 2 1 0.78413 0.42864 0.31439 0.26735 -6 56 1 2 1 0.75446 0.43688 0.29642 0.21333 -6 57 1 4 0 0.75827 0.46732 0.29081 0.2 -6 58 1 2 0 0.76712 0.44268 0.29863 0.20607 -6 59 1 1 1 0.76093 0.46029 0.29629 0.22507 -6 60 1 1 0 0.75172 0.4466 0.27871 0.20603 -6 61 1 4 0 0.76716 0.45762 0.29448 0.20525 -6 62 1 2 1 0.77346 0.48289 0.27256 0.2 -6 63 1 2 0 0.78575 0.49385 0.28216 0.20283 -6 64 1 3 0 0.8 0.53941 0.2776 0.2 -6 65 1 3 0 0.79417 0.55878 0.26483 0.20866 -6 66 1 2 1 0.8 0.55414 0.30446 0.21184 -6 67 1 2 0 0.76477 0.53706 0.30028 0.21075 -6 68 2 2 0 0.79557 0.50808 0.32894 0.24184 -6 69 1 1 1 0.79729 0.50847 0.34599 0.2038 -6 70 1 1 1 0.77915 0.52111 0.36398 0.2 -6 71 1 1 1 0.75315 0.4976 0.37342 0.2 -6 72 1 4 0 0.79673 0.52013 0.36636 0.2 -6 73 1 1 1 0.79215 0.53026 0.36133 0.20624 -6 74 1 1 1 0.79701 0.4543 0.3809 0.2 -6 75 1 1 1 0.78056 0.44464 0.38713 0.2141 -6 76 1 1 1 0.76446 0.46656 0.34142 0.2 -6 77 1 3 1 0.74452 0.47535 0.3358 0.2 -6 78 2 3 0 0.76853 0.48442 0.32546 0.21197 -6 79 1 1 1 0.8 0.43554 0.32101 0.22591 -6 80 1 1 1 0.8 0.41821 0.30145 0.23128 -6 81 1 1 1 0.7939 0.41732 0.32749 0.23821 -6 82 1 3 0 0.8 0.42489 0.40595 0.23603 -6 83 1 1 1 0.772 0.37394 0.36878 0.2 -6 84 1 1 1 0.8 0.38051 0.35345 0.20285 -6 85 1 1 1 0.76923 0.37481 0.35245 0.21921 -6 86 1 4 0 0.78747 0.3609 0.3549 0.21975 -6 87 1 1 1 0.7787 0.36849 0.38117 0.21094 -6 88 1 1 1 0.77307 0.36926 0.38108 0.22898 -6 89 1 3 1 0.75185 0.354 0.37251 0.21602 -6 90 1 3 1 0.73574 0.34884 0.37385 0.23082 -6 91 1 1 1 0.75935 0.35762 0.36155 0.22633 -6 92 1 3 0 0.7433 0.35534 0.38664 0.2 -6 93 1 1 1 0.73787 0.38545 0.38875 0.24564 -6 94 1 1 1 0.75313 0.388 0.36282 0.23557 -6 95 1 1 1 0.76958 0.40559 0.35864 0.20505 -6 96 1 1 0 0.74335 0.37624 0.33644 0.21493 -6 97 1 3 1 0.76881 0.38935 0.31529 0.23183 -6 98 1 3 1 0.8 0.44251 0.3035 0.29254 -6 99 1 1 1 0.79365 0.46767 0.2849 0.29204 -6 100 1 1 1 0.76134 0.45193 0.25515 0.333 -6 101 1 1 1 0.75759 0.47852 0.28939 0.3302 -6 102 1 1 1 0.74885 0.45144 0.32843 0.29179 -6 103 1 3 0 0.746 0.47158 0.35982 0.25733 -6 104 1 3 1 0.69118 0.51695 0.38424 0.2 -6 105 1 1 1 0.70272 0.4799 0.41213 0.2 -6 106 1 1 1 0.74316 0.49883 0.42373 0.22409 -6 107 1 1 1 0.76885 0.49184 0.44116 0.2596 -6 108 1 1 1 0.72596 0.45403 0.42147 0.29699 -6 109 1 1 1 0.73449 0.46472 0.43617 0.25409 -6 110 1 3 1 0.74143 0.43229 0.45642 0.26774 -6 111 1 1 1 0.73912 0.41536 0.47221 0.28046 -6 112 1 1 1 0.76965 0.4156 0.48204 0.28448 -6 113 1 1 0 0.8 0.43366 0.4671 0.25635 -6 114 1 3 0 0.8 0.44463 0.45515 0.23894 -6 115 1 1 1 0.76283 0.42759 0.44516 0.26106 -6 116 1 3 1 0.76396 0.39602 0.43198 0.26465 -6 117 1 1 1 0.72628 0.40751 0.44361 0.23025 -6 118 1 1 1 0.71607 0.41871 0.44249 0.22063 -6 119 1 3 1 0.70541 0.43852 0.43407 0.20998 -6 120 1 3 0 0.70211 0.43268 0.39145 0.21564 -6 121 1 1 1 0.70631 0.43664 0.41911 0.21776 -6 122 1 1 0 0.68467 0.50343 0.397 0.24363 -6 123 2 1 0 0.66341 0.43698 0.40924 0.22622 -6 124 2 3 0 0.66634 0.40102 0.37127 0.24413 -6 125 1 1 1 0.67439 0.3999 0.35503 0.21975 -6 126 2 3 0 0.64118 0.41188 0.36435 0.22777 -6 127 1 4 0 0.61951 0.41806 0.29839 0.2 -6 128 1 1 1 0.61455 0.40946 0.28627 0.20417 -6 129 1 1 0 0.63085 0.36319 0.3193 0.23203 -6 130 1 1 0 0.65582 0.38843 0.35002 0.22153 -6 131 1 2 0 0.66217 0.33886 0.35366 0.2 -6 132 1 1 1 0.68178 0.35875 0.3396 0.21985 -6 133 1 1 1 0.65448 0.34927 0.361 0.21653 -6 134 1 1 0 0.64916 0.3723 0.35015 0.2 -6 135 1 4 0 0.62925 0.37902 0.38279 0.20361 -6 136 1 1 0 0.59687 0.40627 0.42257 0.2 -6 137 1 1 1 0.60549 0.42107 0.39855 0.2 -6 138 1 1 0 0.62875 0.43311 0.41428 0.20039 -6 139 1 4 0 0.59721 0.3936 0.43017 0.2 -6 140 1 4 0 0.59071 0.34912 0.48425 0.20947 -6 141 2 3 0 0.60092 0.34219 0.47539 0.2 -6 142 1 3 0 0.59808 0.34948 0.43629 0.2 -6 143 1 1 0 0.62534 0.41234 0.3883 0.20448 -6 144 1 2 1 0.53923 0.44827 0.40399 0.20877 -6 145 1 2 1 0.54857 0.46654 0.40463 0.24354 -6 146 1 4 0 0.52858 0.4551 0.3712 0.30885 -6 147 1 4 1 0.52064 0.43257 0.35697 0.30294 -6 148 1 2 0 0.54765 0.41182 0.37944 0.29994 -6 149 1 1 0 0.54236 0.37342 0.38255 0.29 -6 150 1 4 0 0.54737 0.41134 0.43858 0.30156 -6 151 2 3 1 0.54518 0.44957 0.48541 0.29498 -6 152 2 3 0 0.54049 0.45317 0.52725 0.27868 -6 153 1 2 0 0.53167 0.48579 0.53491 0.32645 -6 154 1 3 0 0.56726 0.53268 0.52742 0.3149 -6 155 2 3 1 0.52608 0.52041 0.53536 0.36929 -6 156 2 2 1 0.5284 0.49763 0.55458 0.3797 -6 157 1 3 0 0.50124 0.48963 0.54688 0.38372 -6 158 1 2 0 0.49942 0.5344 0.5345 0.33314 -6 159 1 3 1 0.53981 0.51409 0.52742 0.3483 -6 160 2 3 0 0.53189 0.45857 0.54323 0.35816 -6 161 1 3 0 0.5403 0.41014 0.56219 0.37759 -6 162 2 3 0 0.48437 0.37569 0.56655 0.38003 -6 163 2 3 1 0.48562 0.37159 0.52849 0.40983 -6 164 2 2 1 0.51432 0.34938 0.5522 0.36539 -6 165 1 2 0 0.50373 0.31786 0.55346 0.37918 -6 166 1 3 1 0.50697 0.32369 0.53195 0.36926 -6 167 1 3 0 0.50027 0.31776 0.55123 0.40146 -6 168 2 3 1 0.49311 0.36849 0.54299 0.4182 -6 169 2 3 0 0.46946 0.39455 0.55376 0.39565 -6 170 1 2 1 0.45765 0.41354 0.53639 0.36544 -6 171 1 2 0 0.48573 0.43632 0.57664 0.31938 -6 172 2 3 0 0.49735 0.42353 0.52452 0.29386 -6 173 1 2 1 0.48042 0.42558 0.49732 0.29756 -6 174 1 4 0 0.48046 0.36183 0.5306 0.3231 -6 175 1 3 0 0.42026 0.35278 0.49506 0.30487 -6 176 2 3 0 0.35582 0.392 0.51456 0.30989 -6 177 2 3 0 0.40432 0.4218 0.49302 0.3005 -6 178 1 3 0 0.41173 0.45867 0.51064 0.34797 -6 179 1 2 1 0.41262 0.47837 0.52873 0.34487 -6 180 1 2 0 0.45323 0.49058 0.51499 0.33723 -6 181 1 2 1 0.45695 0.48239 0.52702 0.32631 -6 182 1 3 1 0.4087 0.48923 0.52477 0.29357 -6 183 1 3 1 0.37603 0.45321 0.51015 0.29681 -6 184 1 3 0 0.34802 0.45232 0.52372 0.29781 -6 185 1 4 1 0.33154 0.43127 0.55039 0.28834 -6 186 1 2 0 0.30693 0.43095 0.56428 0.31368 -6 187 2 2 0 0.32836 0.44521 0.56101 0.324 -6 188 1 1 0 0.33058 0.44933 0.59314 0.34103 -6 189 2 4 0 0.37315 0.48423 0.55358 0.32311 -6 190 2 3 1 0.38488 0.4916 0.53274 0.35724 -6 191 2 3 0 0.31661 0.52983 0.50062 0.36964 -6 192 1 1 1 0.30432 0.53627 0.48407 0.3694 -6 193 1 1 0 0.31336 0.60341 0.45643 0.38103 -6 194 2 4 0 0.2759 0.58892 0.51222 0.35864 -6 195 1 3 0 0.29853 0.59536 0.49481 0.29334 -6 196 1 1 0 0.29928 0.61646 0.47757 0.26909 -6 197 1 2 0 0.32504 0.56206 0.48146 0.26988 -6 198 1 1 1 0.31099 0.53143 0.4545 0.26122 -6 199 1 1 0 0.31326 0.55127 0.45337 0.26451 -6 200 2 4 0 0.35566 0.53366 0.46122 0.22951 -6 201 2 1 0 0.31696 0.49235 0.45925 0.21454 -7 2 1 2 1 0.73652 0.77382 0.28119 0.41581 -7 3 1 2 1 0.73617 0.72485 0.33184 0.39733 -7 4 1 2 1 0.73024 0.74529 0.34602 0.40709 -7 5 1 2 0 0.75685 0.75935 0.35679 0.43301 -7 6 1 2 1 0.72626 0.7789 0.3369 0.40037 -7 7 1 4 0 0.74553 0.76397 0.32571 0.4309 -7 8 1 3 1 0.77443 0.72212 0.35922 0.41842 -7 9 1 1 1 0.77449 0.75619 0.34035 0.4397 -7 10 1 3 0 0.8 0.7567 0.35428 0.46373 -7 11 2 3 1 0.77508 0.69086 0.37354 0.47258 -7 12 2 3 0 0.76829 0.67967 0.40001 0.50458 -7 13 1 1 0 0.76672 0.69934 0.41569 0.5421 -7 14 2 4 0 0.77127 0.71321 0.37702 0.5448 -7 15 1 2 0 0.77979 0.68525 0.36089 0.55744 -7 16 1 1 0 0.76202 0.71828 0.41109 0.5703 -7 17 1 2 0 0.78828 0.71386 0.44286 0.60376 -7 18 2 1 1 0.78199 0.74743 0.44158 0.59931 -7 19 2 1 1 0.8 0.7754 0.47841 0.56621 -7 20 2 1 1 0.77974 0.78389 0.42621 0.54334 -7 21 2 3 0 0.77194 0.8 0.42647 0.55248 -7 22 2 1 1 0.77586 0.78019 0.42254 0.56588 -7 23 2 3 0 0.76585 0.77611 0.42221 0.54927 -7 24 2 1 1 0.76275 0.78298 0.39476 0.53483 -7 25 2 4 1 0.7644 0.8 0.36015 0.50291 -7 26 2 4 1 0.6958 0.77858 0.37691 0.47691 -7 27 2 4 1 0.68262 0.8 0.36378 0.46858 -7 28 2 4 0 0.6817 0.78741 0.34606 0.45713 -7 29 2 4 1 0.63409 0.7914 0.34804 0.42898 -7 30 2 4 0 0.62849 0.72416 0.34811 0.38981 -7 31 2 4 1 0.62123 0.72053 0.3406 0.42393 -7 32 2 4 0 0.60729 0.72962 0.3686 0.43583 -7 33 1 1 0 0.63289 0.73121 0.36294 0.44422 -7 34 1 2 1 0.66452 0.70818 0.38126 0.50299 -7 35 1 4 1 0.65276 0.69137 0.34886 0.4978 -7 36 1 4 0 0.67635 0.67695 0.36245 0.48723 -7 37 1 2 1 0.6364 0.72169 0.37522 0.46262 -7 38 1 2 0 0.62136 0.68738 0.37165 0.50667 -7 39 1 2 0 0.62684 0.66038 0.3311 0.4098 -7 40 2 4 1 0.62214 0.66569 0.30715 0.39592 -7 41 2 1 1 0.64773 0.65643 0.32032 0.41724 -7 42 2 4 0 0.62275 0.63917 0.30505 0.42642 -7 43 2 2 0 0.62059 0.60141 0.28548 0.37703 -7 44 2 4 0 0.61911 0.58772 0.32664 0.37639 -7 45 1 1 1 0.59969 0.56746 0.32019 0.41392 -7 46 1 3 0 0.60794 0.56318 0.33423 0.41813 -7 47 1 1 1 0.58478 0.55692 0.30522 0.42107 -7 48 1 1 1 0.59251 0.52936 0.31046 0.42059 -7 49 1 1 0 0.62059 0.52717 0.28554 0.43034 -7 50 1 3 0 0.6537 0.58194 0.31337 0.4118 -7 51 1 1 1 0.67234 0.59347 0.33659 0.42892 -7 52 1 4 0 0.68842 0.60563 0.31471 0.44322 -7 53 1 3 0 0.64103 0.6188 0.3388 0.46047 -7 54 1 1 0 0.64114 0.62285 0.37748 0.42213 -7 55 1 3 0 0.59619 0.62455 0.40678 0.41438 -7 56 2 4 0 0.61538 0.57985 0.42666 0.43971 -7 57 2 3 0 0.60028 0.56839 0.42793 0.45859 -7 58 1 2 1 0.60161 0.55339 0.43555 0.46169 -7 59 1 4 0 0.56162 0.53272 0.42529 0.4359 -7 60 1 2 0 0.55868 0.52705 0.4066 0.45736 -7 61 1 4 1 0.58758 0.56185 0.43095 0.48555 -7 62 1 2 1 0.63743 0.55867 0.43084 0.49401 -7 63 1 2 0 0.63078 0.52857 0.47546 0.4925 -7 64 2 1 1 0.66642 0.52672 0.47409 0.51444 -7 65 2 4 1 0.63518 0.50386 0.52161 0.46751 -7 66 2 4 1 0.64562 0.46239 0.51872 0.47957 -7 67 2 4 0 0.64799 0.47066 0.52357 0.45758 -7 68 2 4 0 0.667 0.46017 0.53543 0.48102 -7 69 1 3 0 0.63757 0.46475 0.51101 0.47566 -7 70 1 1 0 0.64153 0.46559 0.5019 0.49674 -7 71 1 2 1 0.65082 0.46107 0.50177 0.46355 -7 72 1 2 0 0.69254 0.45669 0.48715 0.4624 -7 73 1 2 1 0.70784 0.45809 0.45837 0.45079 -7 74 2 4 1 0.68494 0.50599 0.4562 0.46241 -7 75 1 2 1 0.665 0.48582 0.41982 0.45064 -7 76 1 2 0 0.62657 0.47462 0.40381 0.49046 -7 77 1 2 1 0.63548 0.45809 0.43495 0.51459 -7 78 1 2 1 0.62649 0.45812 0.44179 0.52481 -7 79 1 2 0 0.63757 0.44622 0.43598 0.49922 -7 80 1 4 1 0.67383 0.43832 0.4147 0.49838 -7 81 2 4 0 0.67993 0.40577 0.46088 0.45564 -7 82 1 2 0 0.67973 0.40914 0.49205 0.4278 -7 83 1 1 0 0.70219 0.4152 0.51664 0.38622 -7 84 2 3 0 0.67299 0.4017 0.52281 0.38776 -7 85 2 4 0 0.68044 0.43412 0.51532 0.37247 -7 86 1 2 0 0.68635 0.37774 0.51596 0.37094 -7 87 1 1 0 0.70487 0.42532 0.50949 0.33338 -7 88 1 1 1 0.71384 0.40387 0.57109 0.33162 -7 89 1 3 1 0.71681 0.41858 0.56217 0.35235 -7 90 1 1 1 0.71665 0.43533 0.56571 0.3589 -7 91 1 1 0 0.69869 0.4295 0.56662 0.34501 -7 92 1 1 1 0.6688 0.42876 0.56326 0.33795 -7 93 1 1 1 0.65901 0.40379 0.51785 0.31292 -7 94 1 3 0 0.69655 0.47237 0.52794 0.28493 -7 95 1 4 0 0.70413 0.48688 0.5342 0.28189 -7 96 1 1 1 0.72048 0.47089 0.60726 0.28497 -7 97 1 1 1 0.72608 0.48122 0.62543 0.32596 -7 98 1 1 1 0.719 0.52294 0.63479 0.31364 -7 99 1 1 0 0.75845 0.54401 0.6308 0.30227 -7 100 1 1 1 0.77583 0.50055 0.58914 0.31341 -7 101 1 1 1 0.7629 0.5101 0.60475 0.30806 -7 102 1 1 1 0.75136 0.53288 0.60111 0.28629 -7 103 1 4 0 0.7449 0.53476 0.63764 0.26279 -7 104 1 1 1 0.75735 0.5178 0.60022 0.26465 -7 105 1 1 1 0.74006 0.5208 0.6246 0.28593 -7 106 1 4 0 0.74655 0.50826 0.62744 0.27347 -7 107 2 1 0 0.73213 0.49834 0.6107 0.26635 -7 108 1 1 0 0.74252 0.49545 0.58829 0.29655 -7 109 2 3 1 0.75246 0.50399 0.57983 0.30671 -7 110 2 3 1 0.75356 0.53978 0.58352 0.3279 -7 111 2 3 1 0.74623 0.54892 0.61842 0.38178 -7 112 2 1 1 0.73508 0.53497 0.62784 0.38015 -7 113 2 3 1 0.67071 0.53694 0.60079 0.38059 -7 114 2 3 1 0.65294 0.56661 0.61227 0.41993 -7 115 2 3 0 0.6813 0.53256 0.57688 0.42939 -7 116 2 3 0 0.65403 0.48876 0.56384 0.42955 -7 117 1 2 0 0.65483 0.45313 0.58652 0.44558 -7 118 2 3 0 0.62252 0.40745 0.56623 0.45398 -7 119 2 4 0 0.61672 0.43266 0.54611 0.40321 -7 120 1 1 0 0.62597 0.47728 0.55853 0.42995 -7 121 1 1 1 0.62411 0.52183 0.51648 0.40554 -7 122 1 1 0 0.61963 0.46424 0.51519 0.40227 -7 123 1 4 1 0.64348 0.4852 0.5114 0.42042 -7 124 1 1 1 0.64939 0.459 0.51596 0.44302 -7 125 1 1 1 0.62903 0.46474 0.5682 0.42705 -7 126 1 1 1 0.64059 0.44304 0.54478 0.43085 -7 127 1 1 1 0.64861 0.45858 0.59115 0.45264 -7 128 1 1 1 0.67555 0.44122 0.57081 0.45509 -7 129 1 1 1 0.69338 0.48416 0.57055 0.41559 -7 130 1 3 1 0.706 0.52255 0.58431 0.44688 -7 131 1 1 1 0.68307 0.53763 0.58693 0.44044 -7 132 1 1 1 0.74351 0.54766 0.56273 0.43455 -7 133 1 1 1 0.79692 0.52775 0.58414 0.41248 -7 134 1 3 1 0.8 0.54525 0.59477 0.39839 -7 135 1 1 1 0.78379 0.51418 0.54199 0.39777 -7 136 1 1 1 0.76691 0.50612 0.56284 0.41139 -7 137 1 1 1 0.77781 0.51617 0.5476 0.36561 -7 138 1 1 0 0.8 0.52935 0.55226 0.38513 -7 139 1 1 1 0.77705 0.5329 0.50855 0.39285 -7 140 1 1 1 0.8 0.53265 0.55625 0.42593 -7 141 1 1 1 0.78731 0.54815 0.55623 0.4615 -7 142 1 3 0 0.8 0.5041 0.54073 0.42958 -7 143 1 1 1 0.79062 0.47687 0.57157 0.44331 -7 144 1 1 1 0.79089 0.51494 0.59257 0.40092 -7 145 1 4 0 0.8 0.54651 0.57069 0.40706 -7 146 1 3 1 0.8 0.51781 0.5596 0.4409 -7 147 1 1 1 0.8 0.51138 0.56482 0.42916 -7 148 1 1 1 0.79077 0.51298 0.5507 0.4566 -7 149 1 1 1 0.8 0.50618 0.5763 0.46124 -7 150 1 1 1 0.78315 0.5093 0.54557 0.41467 -7 151 1 1 1 0.76184 0.50378 0.51372 0.4155 -7 152 1 1 1 0.8 0.51599 0.54876 0.4234 -7 153 1 4 1 0.77191 0.50295 0.54092 0.41225 -7 154 1 1 0 0.73744 0.53739 0.56046 0.41314 -7 155 1 1 1 0.73295 0.52323 0.60109 0.39622 -7 156 1 1 1 0.71701 0.56102 0.57213 0.37529 -7 157 1 1 1 0.68905 0.56982 0.58759 0.36813 -7 158 1 1 1 0.66678 0.5476 0.54347 0.39532 -7 159 1 1 1 0.64494 0.49831 0.56258 0.41057 -7 160 1 4 1 0.67528 0.52884 0.5608 0.42352 -7 161 1 1 1 0.65777 0.52094 0.52869 0.40421 -7 162 1 4 1 0.66188 0.5178 0.51787 0.40349 -7 163 1 1 1 0.6381 0.5756 0.48972 0.38619 -7 164 1 4 0 0.6234 0.58792 0.50411 0.40694 -7 165 1 1 0 0.59122 0.56722 0.51712 0.40979 -7 166 1 1 1 0.56838 0.58774 0.50826 0.41299 -7 167 1 1 1 0.5784 0.59008 0.54381 0.40637 -7 168 1 1 0 0.60385 0.60007 0.53611 0.4377 -7 169 1 1 0 0.58775 0.58172 0.5346 0.43027 -7 170 2 2 1 0.62321 0.55708 0.52841 0.46651 -7 171 1 3 0 0.63522 0.53954 0.49567 0.44104 -7 172 1 2 1 0.61149 0.53715 0.49165 0.39184 -7 173 1 2 1 0.59542 0.5716 0.50583 0.36408 -7 174 1 2 1 0.59577 0.55592 0.49924 0.35093 -7 175 1 2 0 0.57984 0.54968 0.50682 0.35664 -7 176 1 3 0 0.55362 0.52279 0.50539 0.38891 -7 177 1 2 0 0.53257 0.51765 0.49096 0.37394 -7 178 2 3 1 0.51265 0.48652 0.47513 0.33797 -7 179 1 1 1 0.54152 0.48191 0.49638 0.28258 -7 180 2 2 0 0.51037 0.44645 0.50537 0.29273 -7 181 2 4 0 0.51899 0.47024 0.49558 0.30212 -7 182 2 3 0 0.55107 0.44902 0.49999 0.31915 -7 184 2 3 0 0.53488 0.41045 0.50085 0.24677 -7 185 1 2 1 0.51874 0.40118 0.4794 0.23449 -7 186 1 4 0 0.50501 0.35356 0.48161 0.21752 -7 187 1 2 0 0.51149 0.30729 0.4853 0.24047 -7 188 1 3 1 0.51032 0.32082 0.51876 0.24294 -7 189 1 3 1 0.49661 0.33353 0.54357 0.22793 -7 190 2 2 0 0.507 0.32073 0.57431 0.22494 -7 191 2 3 0 0.52917 0.31669 0.53771 0.22422 -7 192 2 4 0 0.4745 0.28164 0.58907 0.23844 -7 193 1 3 0 0.46175 0.31539 0.56068 0.25404 -7 194 1 1 1 0.46292 0.35015 0.49346 0.26066 -7 195 1 1 1 0.47478 0.30222 0.49708 0.24947 -7 196 1 1 1 0.45377 0.31007 0.50207 0.23864 -7 197 1 1 1 0.43098 0.27325 0.5128 0.2 -7 198 1 3 1 0.44254 0.29017 0.50301 0.2 -7 199 1 1 0 0.4086 0.28842 0.50102 0.20853 -7 200 2 3 1 0.40769 0.24911 0.51045 0.20608 -7 201 2 3 1 0.41026 0.23245 0.5347 0.2 -8 2 2 1 1 0.72272 0.36159 0.66489 0.27837 -8 3 1 2 1 0.71968 0.36712 0.68895 0.30518 -8 4 2 4 0 0.72708 0.34154 0.69245 0.3467 -8 5 2 3 0 0.75828 0.38545 0.69597 0.34304 -8 6 1 2 1 0.76986 0.38524 0.72058 0.32163 -8 7 1 2 1 0.76545 0.40963 0.73387 0.34507 -8 8 1 2 1 0.7734 0.39018 0.76278 0.31589 -8 9 1 2 0 0.78821 0.40092 0.72354 0.33535 -8 10 2 4 0 0.74599 0.38238 0.71034 0.33525 -8 11 1 2 0 0.73145 0.3622 0.71513 0.34081 -8 12 1 1 1 0.76426 0.39595 0.77204 0.32657 -8 13 2 2 1 0.76519 0.34091 0.77712 0.31212 -8 14 2 2 0 0.8 0.35002 0.71746 0.31764 -8 15 2 4 0 0.8 0.39403 0.75616 0.2627 -8 16 1 2 1 0.7785 0.35638 0.72783 0.28618 -8 17 1 4 0 0.75761 0.35088 0.73161 0.26425 -8 18 1 2 0 0.71263 0.33098 0.77363 0.21803 -8 19 1 1 1 0.71657 0.35143 0.77443 0.20496 -8 20 1 1 1 0.72809 0.39404 0.75392 0.22908 -8 21 1 4 0 0.71511 0.39616 0.74218 0.2512 -8 22 1 4 0 0.71898 0.42119 0.73278 0.20837 -8 23 1 1 1 0.69772 0.43582 0.74177 0.2 -8 24 1 1 0 0.68991 0.46405 0.70513 0.20777 -8 25 1 1 0 0.68773 0.42401 0.68911 0.2 -8 26 1 1 1 0.72672 0.40715 0.70028 0.2 -8 27 1 1 1 0.73579 0.46605 0.64975 0.22909 -8 28 1 1 1 0.78535 0.49908 0.62345 0.29034 -8 29 1 1 1 0.78645 0.53396 0.63025 0.26708 -8 30 1 1 0 0.8 0.57474 0.62491 0.27928 -8 31 1 2 0 0.8 0.55664 0.59219 0.25888 -8 32 1 1 1 0.77736 0.51533 0.56281 0.28132 -8 33 1 3 1 0.78572 0.49382 0.58043 0.28552 -8 34 1 2 1 0.8 0.48829 0.60231 0.26588 -8 35 1 2 0 0.7994 0.51443 0.58259 0.22521 -8 36 1 2 1 0.8 0.51975 0.57607 0.27626 -8 37 1 1 1 0.73863 0.45143 0.56481 0.30938 -8 38 1 3 0 0.74349 0.46602 0.53322 0.32414 -8 39 2 1 1 0.78366 0.45325 0.49959 0.3419 -8 40 2 3 0 0.75988 0.47175 0.49019 0.37422 -8 41 2 4 0 0.75219 0.46767 0.48061 0.37156 -8 42 1 1 1 0.79631 0.46215 0.50231 0.3728 -8 43 1 1 1 0.79028 0.4427 0.51929 0.40784 -8 44 1 4 1 0.77799 0.41938 0.46929 0.44665 -8 45 1 1 1 0.8 0.46747 0.44668 0.44329 -8 46 1 1 1 0.7941 0.42631 0.45533 0.45917 -8 47 1 1 1 0.7867 0.45833 0.48318 0.44614 -8 48 1 2 1 0.78057 0.47779 0.49572 0.46559 -8 49 1 2 1 0.75523 0.4499 0.52687 0.44407 -8 50 1 1 1 0.76751 0.45794 0.52563 0.46388 -8 51 1 4 0 0.73705 0.40908 0.55823 0.47995 -8 52 1 1 1 0.74353 0.3851 0.57003 0.45614 -8 53 1 1 1 0.74428 0.35783 0.5685 0.50084 -8 54 1 1 1 0.73822 0.34854 0.55082 0.48659 -8 55 1 1 0 0.70796 0.35846 0.56603 0.50308 -8 56 1 1 1 0.70021 0.33682 0.55176 0.50902 -8 57 2 4 0 0.68065 0.38184 0.54209 0.51248 -8 58 1 1 0 0.70286 0.38027 0.55712 0.5348 -8 59 1 3 1 0.73473 0.36571 0.56786 0.53329 -8 60 1 3 0 0.73215 0.39102 0.57982 0.55559 -8 61 2 1 0 0.75962 0.3911 0.54845 0.54965 -8 62 2 4 0 0.8 0.44048 0.5297 0.53859 -8 63 1 1 1 0.8 0.40466 0.52765 0.54916 -8 64 1 1 0 0.77938 0.41566 0.5246 0.59296 -8 65 1 3 0 0.75575 0.37063 0.51426 0.60145 -8 66 1 4 0 0.74651 0.40092 0.5261 0.58375 -8 67 1 1 0 0.72621 0.37283 0.53563 0.56422 -8 68 1 1 1 0.75576 0.38204 0.53792 0.55336 -8 69 1 1 0 0.75841 0.41184 0.53299 0.53414 -8 70 1 4 1 0.77846 0.39225 0.51454 0.58763 -8 71 1 2 1 0.74423 0.38544 0.5424 0.58411 -8 72 1 1 1 0.73713 0.42122 0.5801 0.57288 -8 73 1 4 1 0.74995 0.41 0.55732 0.61644 -8 74 1 1 1 0.71944 0.44072 0.59852 0.63972 -8 75 1 1 1 0.70632 0.43435 0.62256 0.67195 -8 76 1 1 0 0.7103 0.46444 0.61044 0.66563 -8 77 1 1 1 0.7446 0.4446 0.62344 0.68054 -8 78 1 1 1 0.75614 0.41919 0.6287 0.68762 -8 79 1 1 1 0.75506 0.38343 0.62226 0.65367 -8 80 1 1 1 0.75653 0.38598 0.60912 0.66087 -8 81 1 4 0 0.73538 0.42836 0.62309 0.66879 -8 82 1 1 1 0.75823 0.44314 0.6169 0.62751 -8 83 2 4 0 0.79074 0.456 0.63763 0.61644 -8 84 1 1 1 0.78747 0.44861 0.65821 0.61868 -8 85 1 1 0 0.79994 0.44257 0.63397 0.61364 -8 86 1 4 0 0.79493 0.44211 0.60813 0.59908 -8 87 1 1 0 0.79839 0.40118 0.54373 0.57622 -8 88 1 3 1 0.78054 0.38787 0.53546 0.57043 -8 89 1 2 0 0.8 0.40677 0.51008 0.55589 -8 90 1 4 0 0.8 0.39744 0.49598 0.54377 -8 91 1 1 1 0.76859 0.38977 0.49369 0.54716 -8 92 1 1 1 0.8 0.41353 0.49306 0.53437 -8 93 1 1 1 0.74998 0.4016 0.48395 0.56719 -8 94 1 1 1 0.71606 0.42625 0.42105 0.54828 -8 95 1 1 0 0.76205 0.4315 0.36728 0.54724 -8 96 1 1 1 0.76019 0.45926 0.40119 0.499 -8 97 1 1 1 0.72664 0.45703 0.39698 0.49327 -8 98 2 3 0 0.72215 0.47076 0.34439 0.49322 -8 99 1 3 1 0.69887 0.45542 0.28961 0.49125 -8 100 1 3 0 0.7013 0.44834 0.24887 0.485 -8 101 1 3 0 0.69343 0.45486 0.20955 0.4761 -8 102 1 1 1 0.65582 0.46996 0.2 0.46113 -8 103 1 1 0 0.63285 0.47212 0.2 0.48287 -8 104 1 1 1 0.62979 0.47559 0.2 0.45542 -8 105 1 1 1 0.63382 0.46579 0.2 0.461 -8 106 2 4 1 0.61825 0.46631 0.2 0.50011 -8 107 1 1 0 0.58195 0.48072 0.21164 0.50888 -8 108 1 2 1 0.58192 0.47029 0.26596 0.54347 -8 109 1 4 0 0.54768 0.47038 0.2649 0.52854 -8 110 1 1 0 0.60211 0.48349 0.22934 0.53953 -8 111 1 1 1 0.59398 0.44655 0.23532 0.53892 -8 112 2 3 0 0.57221 0.49309 0.24537 0.52478 -8 113 2 3 0 0.59708 0.47542 0.27066 0.53401 -8 114 1 1 1 0.5744 0.44636 0.29851 0.51063 -8 115 1 4 1 0.58123 0.42844 0.28975 0.5042 -8 116 1 1 0 0.55918 0.40677 0.30295 0.48247 -8 117 1 4 0 0.60221 0.39264 0.25755 0.48985 -8 118 1 1 1 0.62322 0.38096 0.25997 0.47628 -8 119 1 1 0 0.60216 0.40376 0.31645 0.47469 -8 120 1 1 1 0.63668 0.38762 0.32407 0.50604 -8 121 1 4 0 0.64281 0.37437 0.29351 0.49155 -8 122 1 1 1 0.64757 0.40178 0.30047 0.53058 -8 123 1 1 1 0.64206 0.48089 0.34117 0.51659 -8 124 1 1 0 0.61338 0.48418 0.34044 0.52231 -8 125 1 3 0 0.63249 0.48818 0.33356 0.54288 -8 126 1 1 0 0.64622 0.50893 0.3428 0.56352 -8 127 2 4 0 0.65173 0.5071 0.34521 0.56125 -8 128 1 1 1 0.6364 0.49866 0.31433 0.49489 -8 129 1 1 1 0.6426 0.51138 0.30761 0.49526 -8 130 1 3 0 0.65601 0.51237 0.33386 0.49921 -8 132 1 1 1 0.70711 0.53832 0.30634 0.51059 -8 133 1 1 0 0.69874 0.56144 0.3293 0.51964 -8 134 1 4 1 0.67403 0.56679 0.34472 0.55133 -8 135 1 4 0 0.6696 0.58442 0.34797 0.54135 -8 136 1 1 1 0.63877 0.59711 0.34372 0.51172 -8 137 1 1 1 0.61784 0.63393 0.36052 0.52338 -8 138 2 4 1 0.68505 0.66969 0.36838 0.51195 -8 139 1 1 1 0.66854 0.66013 0.37246 0.52443 -8 140 1 1 0 0.67228 0.64484 0.38002 0.51714 -8 141 1 1 0 0.65464 0.58997 0.39632 0.56173 -8 142 1 1 0 0.61896 0.5957 0.37331 0.58496 -8 143 1 1 1 0.64018 0.59859 0.35183 0.58706 -8 144 1 4 0 0.62543 0.63388 0.33652 0.561 -8 145 1 3 1 0.61792 0.61865 0.30557 0.56444 -8 146 1 1 1 0.64088 0.65857 0.36269 0.55997 -8 147 1 3 1 0.66423 0.67844 0.31305 0.55433 -8 148 1 1 1 0.6649 0.75876 0.30007 0.56184 -8 149 1 1 0 0.64727 0.70167 0.27646 0.51868 -8 150 2 4 1 0.66209 0.68147 0.25647 0.51973 -8 151 1 1 1 0.64634 0.65699 0.26901 0.49803 -8 152 1 4 1 0.65724 0.65722 0.25335 0.4673 -8 153 1 2 1 0.65488 0.6575 0.26338 0.47573 -8 154 1 3 0 0.65342 0.68414 0.25197 0.49104 -8 155 2 1 1 0.64961 0.69631 0.27227 0.50288 -8 156 1 3 0 0.64466 0.64614 0.29655 0.50133 -8 157 1 1 1 0.63516 0.66127 0.26392 0.45544 -8 158 1 1 1 0.65012 0.69597 0.26444 0.44891 -8 159 1 1 1 0.63283 0.72946 0.25341 0.44042 -8 160 1 1 0 0.62373 0.74285 0.26646 0.4657 -8 161 1 2 0 0.64012 0.6942 0.25127 0.42213 -8 162 1 1 1 0.62625 0.72194 0.27347 0.38954 -8 163 1 1 1 0.65363 0.73949 0.25458 0.42168 -8 164 1 1 1 0.62054 0.70957 0.25526 0.39074 -8 165 2 4 1 0.61797 0.72743 0.24834 0.39418 -8 166 1 1 1 0.62556 0.73738 0.22064 0.40057 -8 167 1 1 0 0.60339 0.71756 0.2 0.39676 -8 168 1 1 1 0.58168 0.71372 0.21836 0.38182 -8 169 1 4 0 0.62985 0.75977 0.2 0.38573 -8 170 1 1 1 0.65444 0.72687 0.2 0.44398 -8 171 1 1 0 0.67974 0.71431 0.20057 0.4605 -8 172 1 1 1 0.72836 0.69795 0.2 0.42323 -8 173 1 1 1 0.69442 0.67182 0.2 0.41072 -8 174 1 1 0 0.68041 0.71198 0.2 0.42484 -8 175 1 1 1 0.68333 0.72709 0.2 0.39269 -8 176 1 1 1 0.67668 0.71537 0.2 0.38646 -8 177 1 1 0 0.65651 0.72791 0.2 0.39812 -8 178 1 4 1 0.711 0.75634 0.2 0.38376 -8 179 1 1 1 0.71544 0.72615 0.2 0.34603 -8 180 1 1 0 0.72974 0.74606 0.22999 0.31491 -8 181 2 2 1 0.70108 0.7698 0.25176 0.30004 -8 182 1 1 1 0.67697 0.74005 0.24558 0.29188 -8 183 1 1 1 0.76022 0.73588 0.27982 0.29037 -8 184 2 2 1 0.72328 0.73178 0.29401 0.27617 -8 185 2 3 0 0.73633 0.7141 0.3185 0.32137 -8 186 1 1 1 0.71002 0.7047 0.3316 0.28957 -8 187 1 1 0 0.75527 0.70136 0.34886 0.30559 -8 188 1 1 1 0.78373 0.68291 0.3439 0.28045 -8 189 1 1 0 0.7765 0.64914 0.36649 0.25248 -8 190 1 1 1 0.78414 0.6469 0.3685 0.25279 -8 191 1 3 1 0.78078 0.61753 0.33387 0.26636 -8 192 1 1 1 0.77767 0.63941 0.36395 0.27625 -8 193 1 3 0 0.76366 0.6321 0.35046 0.24559 -8 194 1 1 1 0.7597 0.64028 0.36988 0.21377 -8 195 1 4 0 0.73823 0.66169 0.32258 0.2 -8 196 1 1 1 0.73748 0.64822 0.31795 0.2 -8 197 1 1 1 0.75941 0.62476 0.2908 0.2 -8 198 1 1 1 0.77609 0.66111 0.32098 0.2278 -8 199 1 1 0 0.77967 0.62898 0.34737 0.22289 -8 200 1 4 0 0.73928 0.63717 0.33916 0.22653 -8 201 1 1 1 0.78518 0.6633 0.30215 0.23086 -9 1 1 3 0 0.75589 0.74958 0.4954 0.50785 -9 2 1 4 0 0.77074 0.74753 0.49091 0.51417 -9 3 2 3 0 0.75312 0.7488 0.51292 0.51973 -9 4 2 4 1 0.75142 0.73553 0.49914 0.51276 -9 5 2 2 1 0.75532 0.74618 0.51337 0.54812 -9 6 2 4 0 0.7778 0.75498 0.50815 0.57844 -9 7 2 2 1 0.8 0.79755 0.54204 0.61636 -9 8 2 3 1 0.8 0.8 0.5334 0.58682 -9 9 1 2 1 0.79834 0.8 0.49191 0.61194 -9 10 1 2 1 0.8 0.8 0.49474 0.59715 -9 11 1 3 0 0.79249 0.8 0.51127 0.59645 -9 12 1 2 1 0.75369 0.8 0.50994 0.59665 -9 13 2 4 0 0.76151 0.79994 0.53482 0.58578 -9 14 1 2 1 0.74082 0.8 0.53074 0.54457 -9 15 1 3 0 0.72549 0.79943 0.52863 0.5383 -9 16 1 2 1 0.70023 0.8 0.53768 0.52082 -9 17 1 3 1 0.73274 0.8 0.53512 0.50637 -9 18 1 2 1 0.72325 0.8 0.51422 0.47233 -9 19 1 2 1 0.6987 0.7603 0.52669 0.57786 -9 20 1 2 1 0.70606 0.7665 0.50784 0.54725 -9 21 1 3 0 0.74568 0.8 0.47528 0.48847 -9 22 1 2 1 0.73602 0.71019 0.47623 0.46379 -9 23 1 2 1 0.6995 0.78311 0.50349 0.45863 -9 24 2 3 0 0.73815 0.78553 0.48827 0.51755 -9 25 1 2 0 0.73645 0.8 0.46058 0.51965 -9 26 1 2 1 0.70921 0.8 0.43961 0.46746 -9 27 1 2 1 0.70937 0.79914 0.40737 0.52818 -9 28 1 2 1 0.72713 0.79376 0.409 0.51479 -9 29 1 2 1 0.74288 0.8 0.41613 0.5544 -9 30 1 2 1 0.7556 0.78558 0.39214 0.61406 -9 31 1 2 1 0.76292 0.8 0.40288 0.60137 -9 32 1 2 1 0.78922 0.8 0.38361 0.56391 -9 33 2 4 1 0.8 0.75956 0.33854 0.54354 -9 34 2 1 1 0.8 0.74209 0.34263 0.53183 -9 35 2 1 1 0.76504 0.6834 0.30965 0.49896 -9 36 2 4 0 0.77918 0.67152 0.31268 0.50109 -9 37 1 4 0 0.79817 0.67787 0.33283 0.51364 -9 38 1 3 0 0.8 0.65923 0.32099 0.51672 -9 39 1 2 1 0.79847 0.66575 0.28866 0.49839 -9 40 1 2 1 0.8 0.62933 0.24114 0.46222 -9 41 1 3 1 0.8 0.58321 0.25503 0.45842 -9 42 1 3 0 0.8 0.55711 0.26149 0.43178 -9 43 1 2 0 0.76933 0.54428 0.26785 0.45191 -9 44 1 2 0 0.76406 0.56701 0.25289 0.4396 -9 45 2 1 1 0.74421 0.53342 0.26598 0.44684 -9 46 2 3 1 0.8 0.52758 0.2609 0.45764 -9 47 2 3 1 0.77372 0.53752 0.28265 0.40529 -9 48 2 3 1 0.7658 0.52954 0.28731 0.40651 -9 49 2 3 1 0.78144 0.52903 0.29352 0.37285 -9 50 2 3 0 0.76062 0.50885 0.27632 0.38626 -9 51 1 2 1 0.77829 0.50797 0.25896 0.43098 -9 52 1 3 0 0.79212 0.48195 0.2946 0.42884 -9 53 2 3 1 0.7989 0.48888 0.27181 0.44709 -9 54 1 1 0 0.8 0.47165 0.28961 0.47398 -9 55 2 3 0 0.76304 0.48877 0.28486 0.45241 -9 56 2 3 1 0.76383 0.48391 0.2639 0.48111 -9 57 2 3 0 0.77142 0.42317 0.21136 0.47006 -9 58 1 3 0 0.78932 0.43569 0.20504 0.4815 -9 59 1 4 1 0.8 0.44924 0.21098 0.46179 -9 60 1 2 0 0.8 0.42929 0.2 0.49275 -9 61 1 2 0 0.8 0.4061 0.20125 0.50836 -9 62 2 2 1 0.7939 0.43189 0.2 0.45065 -9 63 2 2 0 0.77599 0.42752 0.2 0.43872 -9 64 1 2 1 0.7732 0.41677 0.22647 0.40868 -9 65 2 4 0 0.78238 0.43176 0.22137 0.41566 -9 66 2 3 0 0.76354 0.45853 0.24257 0.41647 -9 67 1 4 0 0.74726 0.45721 0.2302 0.42024 -9 68 1 3 1 0.71946 0.39431 0.25842 0.3981 -9 69 1 1 1 0.7615 0.3867 0.23508 0.41493 -9 70 1 3 0 0.769 0.40698 0.21787 0.4461 -9 71 1 1 1 0.7758 0.38461 0.27463 0.50449 -9 72 2 4 1 0.75578 0.3849 0.28129 0.51699 -9 73 1 4 0 0.75883 0.37006 0.27127 0.51168 -9 74 1 1 0 0.77596 0.34578 0.20299 0.4647 -9 75 2 3 0 0.79787 0.36426 0.2 0.45844 -9 76 1 1 1 0.79981 0.34511 0.20752 0.45614 -9 77 2 4 1 0.8 0.27499 0.2198 0.4812 -9 78 1 1 1 0.8 0.27516 0.20862 0.49358 -9 79 2 1 1 0.8 0.24672 0.2 0.47946 -9 80 2 1 1 0.76502 0.24829 0.21118 0.4635 -9 81 2 4 0 0.77632 0.27321 0.20222 0.45215 -9 82 2 3 1 0.77724 0.24133 0.22068 0.46303 -9 83 2 3 0 0.75954 0.25195 0.2 0.46168 -9 84 2 4 1 0.75151 0.23705 0.22881 0.49423 -9 85 1 1 1 0.75631 0.2 0.2 0.44735 -9 86 1 1 1 0.7591 0.2 0.20426 0.48861 -9 87 1 1 1 0.69645 0.2 0.21528 0.50524 -9 88 1 4 1 0.64578 0.2 0.2 0.51599 -9 89 1 1 0 0.61847 0.24198 0.2 0.51325 -9 90 1 4 1 0.60891 0.22231 0.20642 0.52709 -9 91 2 1 1 0.56935 0.2 0.2 0.53635 -9 92 2 4 0 0.59362 0.22588 0.20084 0.50491 -9 93 1 4 0 0.58413 0.22833 0.20064 0.52309 -9 94 1 1 0 0.59485 0.28355 0.2 0.53893 -9 95 1 3 0 0.63423 0.26507 0.2 0.52302 -9 96 2 3 0 0.60383 0.2807 0.2 0.51331 -9 97 1 2 0 0.58153 0.28407 0.2 0.50771 -9 98 1 3 0 0.58763 0.30448 0.20177 0.51951 -9 99 1 1 0 0.60168 0.29137 0.22474 0.5328 -9 100 2 4 0 0.60534 0.30633 0.22142 0.5267 -9 101 2 4 0 0.61744 0.32021 0.2 0.51496 -9 102 1 2 0 0.60371 0.33654 0.2 0.48967 -9 103 1 2 1 0.61568 0.31611 0.24811 0.48848 -9 104 2 2 1 0.64658 0.29741 0.23902 0.49939 -9 105 2 4 0 0.63724 0.34588 0.24255 0.54398 -9 106 1 3 1 0.68848 0.34246 0.21635 0.59986 -9 107 1 2 0 0.70055 0.35615 0.22945 0.65007 -9 108 1 2 1 0.72668 0.4019 0.22144 0.67431 -9 109 1 2 0 0.73614 0.37601 0.22303 0.65517 -9 110 2 4 0 0.70871 0.38318 0.21633 0.64814 -9 111 2 3 0 0.72082 0.4259 0.22911 0.64106 -9 112 1 1 1 0.74365 0.39062 0.2159 0.67835 -9 113 2 4 0 0.74488 0.42965 0.21564 0.64292 -9 114 1 1 0 0.76232 0.43526 0.23636 0.66444 -9 115 2 1 1 0.77108 0.43833 0.21982 0.72201 -9 116 1 4 1 0.7764 0.38469 0.2197 0.68629 -9 117 2 4 1 0.76204 0.37327 0.22346 0.73047 -9 118 2 4 1 0.73289 0.37751 0.24291 0.76032 -9 119 2 4 1 0.68179 0.37169 0.26213 0.7443 -9 120 2 4 1 0.66835 0.38497 0.29941 0.77099 -9 121 2 1 0 0.69142 0.35369 0.32598 0.75279 -9 122 2 4 1 0.67488 0.37777 0.34756 0.7462 -9 123 2 2 0 0.68119 0.34416 0.31133 0.73815 -9 124 2 4 1 0.66546 0.35682 0.29042 0.77882 -9 125 2 4 0 0.67253 0.39233 0.31288 0.8 -9 127 1 1 1 0.66914 0.36629 0.3453 0.73957 -9 128 2 4 1 0.61275 0.37034 0.35161 0.75057 -9 129 2 4 1 0.62213 0.36436 0.33584 0.72704 -9 130 2 4 1 0.62269 0.34953 0.30498 0.76824 -9 131 1 2 0 0.60851 0.32652 0.30339 0.72964 -9 132 2 4 1 0.60786 0.32857 0.29987 0.72062 -9 133 2 4 1 0.58617 0.33267 0.3049 0.69372 -9 134 2 1 1 0.56321 0.33668 0.30934 0.63765 -9 135 2 1 1 0.56034 0.34 0.32951 0.62249 -9 136 2 4 1 0.59695 0.35349 0.33372 0.64054 -9 137 2 4 0 0.5942 0.33099 0.2794 0.68589 -9 138 2 4 1 0.55607 0.27542 0.26988 0.68944 -9 139 2 1 0 0.55672 0.2687 0.26835 0.71414 -9 140 2 4 1 0.561 0.27377 0.26538 0.71706 -9 141 2 4 1 0.57517 0.25666 0.28595 0.71656 -9 142 2 1 1 0.61529 0.2424 0.26603 0.70067 -9 143 2 4 1 0.64528 0.27415 0.22106 0.73397 -9 144 2 1 0 0.61906 0.28121 0.23817 0.75352 -9 145 2 4 1 0.66645 0.25274 0.27652 0.78179 -9 146 2 2 1 0.63504 0.22433 0.27043 0.78352 -9 147 2 4 1 0.65923 0.22031 0.24342 0.8 -9 148 2 2 0 0.66101 0.24213 0.2582 0.79169 -9 149 2 4 0 0.68021 0.24563 0.27224 0.74907 -9 150 2 2 0 0.64328 0.24075 0.23307 0.70195 -9 151 2 2 0 0.65067 0.22962 0.23184 0.70443 -9 152 1 3 0 0.67629 0.2 0.26824 0.6873 -9 153 1 2 0 0.70799 0.20293 0.26597 0.68027 -9 154 1 1 1 0.72808 0.2 0.2558 0.69619 -9 155 1 3 0 0.67266 0.2 0.26307 0.75591 -9 156 1 4 1 0.66292 0.2 0.23587 0.79649 -9 157 1 1 0 0.66732 0.20346 0.22775 0.79178 -9 158 1 1 1 0.68327 0.20605 0.22413 0.78059 -9 159 1 4 1 0.69984 0.2 0.20639 0.79022 -9 160 1 1 1 0.67476 0.2 0.21937 0.76964 -9 161 2 4 1 0.68327 0.21611 0.20076 0.7332 -9 162 2 4 1 0.67228 0.20292 0.20451 0.72464 -9 163 2 4 1 0.67515 0.25669 0.2 0.76848 -9 164 2 4 1 0.63301 0.25602 0.2 0.77518 -9 165 2 4 1 0.6279 0.28699 0.25009 0.76979 -9 166 2 1 0 0.65378 0.28915 0.21989 0.76559 -9 167 2 4 1 0.6211 0.31583 0.24256 0.79443 -9 168 2 4 1 0.61409 0.30287 0.28766 0.8 -9 169 2 4 1 0.65255 0.35784 0.29354 0.8 -9 170 1 1 0 0.6344 0.31885 0.31769 0.79089 -9 171 2 4 1 0.65216 0.33764 0.36155 0.76434 -9 172 2 4 1 0.67819 0.39544 0.3474 0.74471 -9 173 2 4 1 0.68263 0.40543 0.33819 0.74302 -9 174 2 1 1 0.66787 0.37755 0.3207 0.77943 -9 175 2 1 1 0.65956 0.36316 0.29664 0.76993 -9 176 2 4 1 0.65754 0.35879 0.28283 0.7549 -9 177 2 1 0 0.63822 0.38018 0.27172 0.8 -9 178 1 4 0 0.63985 0.42095 0.2254 0.8 -9 179 1 2 0 0.64619 0.42085 0.21304 0.75356 -9 180 1 1 1 0.59991 0.42416 0.2385 0.75872 -9 181 2 3 0 0.59123 0.44978 0.2418 0.7741 -9 182 1 1 0 0.58685 0.47005 0.26321 0.76732 -9 183 2 1 0 0.59901 0.50363 0.23276 0.75541 -9 184 2 1 0 0.61453 0.50371 0.28101 0.75982 -9 185 1 2 1 0.6131 0.53879 0.26269 0.74025 -9 186 1 2 1 0.62768 0.50382 0.26046 0.71628 -9 187 1 3 0 0.66219 0.52766 0.26739 0.70902 -9 188 1 2 0 0.63736 0.56407 0.29745 0.72158 -9 189 1 4 0 0.62284 0.55832 0.31837 0.72099 -9 190 1 1 1 0.65609 0.53865 0.2909 0.69863 -9 191 1 4 1 0.63598 0.56449 0.29165 0.74445 -9 192 1 4 1 0.65425 0.55375 0.28118 0.76522 -9 193 2 4 1 0.68603 0.57074 0.27263 0.76083 -9 194 2 2 1 0.64706 0.56614 0.2661 0.76867 -9 195 2 4 1 0.68318 0.51971 0.26064 0.74966 -9 196 2 4 0 0.68059 0.497 0.25007 0.75177 -9 197 2 2 1 0.68456 0.51478 0.30823 0.74594 -9 198 1 2 0 0.71844 0.52649 0.33788 0.74639 -9 199 2 4 1 0.71123 0.551 0.35148 0.74464 -9 200 2 3 0 0.72001 0.52342 0.33865 0.72155 -9 201 2 2 1 0.74859 0.49433 0.34542 0.70368 -10 2 1 2 0 0.79533 0.61486 0.33702 0.64806 -10 3 1 3 0 0.77911 0.60512 0.33215 0.66138 -10 4 1 1 1 0.78337 0.61632 0.35911 0.63626 -10 5 1 4 1 0.7892 0.61803 0.31638 0.62637 -10 6 1 4 0 0.78802 0.6319 0.33504 0.61932 -10 7 1 1 1 0.8 0.62275 0.35163 0.64329 -10 8 1 1 1 0.8 0.67669 0.35841 0.6561 -10 9 1 1 0 0.79212 0.63642 0.32298 0.65235 -10 10 2 1 1 0.79185 0.65624 0.33595 0.61702 -10 11 1 1 0 0.74926 0.72931 0.34746 0.58345 -10 12 1 2 0 0.75632 0.76559 0.34628 0.58341 -10 13 1 2 1 0.7639 0.74425 0.36349 0.58836 -10 14 1 4 1 0.76054 0.77998 0.3378 0.60271 -10 15 1 1 1 0.8 0.78498 0.35739 0.61118 -10 16 1 1 1 0.8 0.8 0.35731 0.59052 -10 17 1 1 1 0.8 0.78324 0.35449 0.58059 -10 18 1 1 1 0.8 0.8 0.37265 0.5926 -10 19 1 1 1 0.8 0.79256 0.37965 0.59253 -10 20 1 2 1 0.8 0.8 0.36541 0.59428 -10 21 1 3 1 0.8 0.77415 0.41601 0.60457 -10 22 1 1 1 0.77449 0.77168 0.42524 0.55168 -10 23 1 1 0 0.8 0.75067 0.38646 0.55036 -10 24 2 3 1 0.8 0.7915 0.40166 0.48547 -10 25 1 2 1 0.76914 0.8 0.42448 0.49218 -10 26 1 4 0 0.77758 0.79562 0.44998 0.51029 -10 27 1 1 1 0.8 0.8 0.42856 0.52042 -10 28 1 4 0 0.8 0.8 0.4345 0.50575 -10 29 1 1 1 0.8 0.75882 0.4448 0.48349 -10 30 1 3 0 0.78175 0.8 0.40258 0.44428 -10 31 1 1 1 0.76652 0.8 0.39384 0.39106 -10 32 1 1 0 0.79291 0.78602 0.37873 0.40843 -10 33 1 4 1 0.8 0.76245 0.39005 0.41561 -10 34 1 1 1 0.76776 0.8 0.35845 0.37649 -10 35 1 1 0 0.76479 0.8 0.38451 0.39538 -10 36 1 1 1 0.78675 0.8 0.43522 0.37399 -10 37 1 2 1 0.76881 0.8 0.42214 0.3937 -10 38 1 1 1 0.76719 0.77319 0.46031 0.41895 -10 39 1 1 1 0.76835 0.76873 0.48054 0.4191 -10 40 1 3 1 0.73738 0.8 0.48358 0.43343 -10 41 1 1 1 0.72322 0.8 0.47422 0.44369 -10 42 1 2 1 0.71153 0.8 0.46335 0.42345 -10 43 2 2 1 0.68255 0.8 0.45305 0.44001 -10 44 1 1 1 0.69089 0.78104 0.47197 0.42788 -10 45 1 4 0 0.66732 0.79469 0.44878 0.44591 -10 46 1 3 1 0.65766 0.79189 0.46314 0.44544 -10 47 1 1 0 0.5976 0.7684 0.47052 0.42774 -10 48 1 1 0 0.55005 0.74119 0.42341 0.48673 -10 49 1 1 0 0.53023 0.72798 0.41138 0.52417 -10 50 2 4 1 0.52611 0.77921 0.39979 0.53658 -10 51 1 3 0 0.49055 0.8 0.42019 0.55675 -10 52 1 2 0 0.52201 0.8 0.38315 0.53647 -10 53 1 4 1 0.48311 0.79404 0.38362 0.52962 -10 54 1 1 1 0.47851 0.8 0.37604 0.53622 -10 55 2 3 0 0.47114 0.78186 0.32555 0.54864 -10 56 1 3 0 0.52498 0.8 0.34821 0.50809 -10 57 2 3 1 0.5568 0.73854 0.32226 0.50379 -10 58 1 1 1 0.59107 0.77508 0.35782 0.48549 -10 59 1 2 1 0.57859 0.75152 0.36376 0.45497 -10 60 1 2 1 0.58538 0.78769 0.35096 0.45682 -10 61 1 2 1 0.5831 0.77991 0.36414 0.42087 -10 62 1 2 0 0.62814 0.73505 0.34981 0.38763 -10 63 2 2 1 0.6263 0.70483 0.30452 0.36641 -10 64 2 4 1 0.59054 0.67293 0.31781 0.38955 -10 65 1 1 1 0.59062 0.68057 0.37094 0.38191 -10 66 2 2 0 0.6 0.69509 0.37937 0.3568 -10 67 2 2 1 0.56786 0.66458 0.3822 0.36675 -10 68 1 1 1 0.5604 0.70035 0.38482 0.35478 -10 69 2 2 1 0.57104 0.71245 0.39957 0.35664 -10 70 2 1 0 0.54359 0.71196 0.42378 0.3571 -10 72 2 4 0 0.52826 0.71692 0.47957 0.39249 -10 73 1 1 0 0.52679 0.7008 0.51601 0.41836 -10 74 1 4 0 0.51832 0.69455 0.52682 0.45528 -10 75 1 1 1 0.51664 0.69787 0.52532 0.41025 -10 76 1 3 1 0.47841 0.70222 0.54519 0.40828 -10 77 1 4 1 0.52181 0.7075 0.58058 0.4413 -10 78 1 1 0 0.53371 0.70709 0.61293 0.43702 -10 79 1 3 0 0.51187 0.69805 0.59148 0.41795 -10 80 1 1 0 0.54662 0.71333 0.60554 0.38272 -10 81 1 1 1 0.56164 0.69111 0.62202 0.35641 -10 82 1 3 1 0.58091 0.68219 0.60688 0.36051 -10 83 2 1 1 0.57962 0.68056 0.65021 0.36674 -10 84 1 3 1 0.55223 0.72185 0.6166 0.38495 -10 85 1 3 1 0.53855 0.73672 0.61654 0.40548 -10 86 1 3 0 0.57337 0.74868 0.60743 0.41265 -10 87 2 1 0 0.58888 0.77335 0.60741 0.42571 -10 88 1 2 1 0.57303 0.74299 0.64668 0.39972 -10 89 1 4 0 0.59801 0.76723 0.67844 0.37176 -10 90 1 1 0 0.59176 0.76852 0.67096 0.35297 -10 91 1 4 0 0.57924 0.77291 0.6587 0.31125 -10 92 1 1 1 0.51333 0.72898 0.67478 0.32844 -10 93 1 1 1 0.48641 0.7256 0.70118 0.32468 -10 94 1 1 0 0.5281 0.75115 0.7171 0.31097 -10 95 1 1 0 0.54681 0.72528 0.66602 0.3031 -10 96 1 3 1 0.55095 0.75749 0.68133 0.27906 -10 97 1 1 1 0.51773 0.75759 0.70518 0.28213 -10 98 1 1 0 0.55385 0.74856 0.71976 0.32485 -10 99 1 1 0 0.52698 0.73911 0.72153 0.34305 -10 100 1 3 0 0.52457 0.68834 0.6949 0.33497 -10 101 1 1 1 0.50922 0.70878 0.68 0.32982 -10 102 2 1 1 0.48483 0.64737 0.69028 0.34193 -10 103 2 1 0 0.45722 0.6843 0.7382 0.38723 -10 104 1 1 0 0.40999 0.68738 0.71106 0.40523 -10 105 2 4 1 0.40716 0.6782 0.72897 0.428 -10 106 1 1 1 0.37528 0.64565 0.72724 0.41672 -10 107 1 3 1 0.34385 0.62057 0.75223 0.40683 -10 108 1 2 1 0.34604 0.63244 0.75108 0.39212 -10 109 1 2 0 0.33945 0.59741 0.76341 0.40183 -10 110 2 4 1 0.33353 0.64323 0.77437 0.41816 -10 111 1 1 1 0.31872 0.64159 0.78477 0.39964 -10 112 1 4 0 0.33592 0.63011 0.75495 0.40405 -10 113 1 1 1 0.31717 0.66784 0.76968 0.42643 -10 114 2 1 0 0.29305 0.64843 0.77733 0.43549 -10 115 1 3 1 0.31068 0.65657 0.75338 0.4475 -10 116 1 4 0 0.30067 0.63712 0.75722 0.44616 -10 117 2 4 0 0.29281 0.63177 0.77055 0.45544 -10 118 2 4 1 0.27562 0.63623 0.74456 0.49864 -10 119 2 4 1 0.28123 0.6148 0.7298 0.50748 -10 120 1 1 0 0.29372 0.60176 0.74845 0.48459 -10 121 1 4 1 0.31879 0.61007 0.75153 0.51382 -10 122 1 1 0 0.34325 0.6292 0.78053 0.49692 -10 123 1 3 0 0.33735 0.59963 0.78052 0.45578 -10 124 2 4 0 0.3061 0.6266 0.78756 0.43671 -10 125 1 2 0 0.2692 0.61377 0.78148 0.46674 -10 126 2 4 0 0.28086 0.60337 0.74509 0.49568 -10 127 1 2 1 0.25623 0.61038 0.72673 0.50664 -10 128 1 2 1 0.22991 0.60778 0.69596 0.50548 -10 129 1 2 0 0.21403 0.62861 0.74918 0.56429 -10 130 2 3 1 0.2 0.61951 0.74461 0.5241 -10 131 1 2 0 0.2016 0.57322 0.73803 0.53287 -10 132 2 2 1 0.2 0.57338 0.76688 0.57414 -10 133 2 3 1 0.24192 0.54513 0.78845 0.5805 -10 134 2 3 1 0.22449 0.53931 0.76575 0.54908 -10 135 1 1 0 0.23756 0.49219 0.79371 0.58829 -10 136 2 4 0 0.2 0.51495 0.8 0.59757 -10 137 1 4 1 0.25076 0.50844 0.79613 0.56615 -10 138 1 1 0 0.21233 0.51514 0.79756 0.56617 -10 139 2 3 1 0.21396 0.53692 0.8 0.5165 -10 140 1 2 1 0.24112 0.5471 0.79205 0.53283 -10 141 1 1 0 0.2593 0.56942 0.79459 0.5476 -10 142 1 2 1 0.26206 0.52408 0.8 0.54105 -10 143 1 2 0 0.27497 0.5153 0.78909 0.49856 -10 144 2 4 1 0.27226 0.48823 0.8 0.51845 -10 145 1 3 1 0.25567 0.45553 0.79816 0.52488 -10 146 1 1 1 0.25052 0.42584 0.8 0.5299 -10 147 1 1 0 0.26973 0.42681 0.75384 0.53273 -10 148 2 2 0 0.2731 0.42783 0.77287 0.53278 -10 149 1 1 0 0.27367 0.44849 0.79407 0.55838 -10 150 2 1 0 0.2931 0.49084 0.8 0.5316 -10 151 1 3 0 0.28059 0.52164 0.74324 0.53733 -10 152 1 1 0 0.28957 0.53893 0.75354 0.54824 -10 153 2 3 1 0.28571 0.5029 0.75907 0.54966 -10 154 1 1 0 0.31502 0.49518 0.76867 0.53628 -10 155 1 2 1 0.32785 0.49781 0.7489 0.53514 -10 156 1 1 1 0.40513 0.47091 0.77671 0.51453 -10 157 1 1 1 0.43223 0.45748 0.77637 0.51789 -10 158 1 3 1 0.46163 0.47294 0.74615 0.50768 -10 159 2 4 0 0.46601 0.48429 0.74986 0.49598 -10 160 2 2 0 0.47188 0.49195 0.71257 0.45374 -10 161 2 1 1 0.44031 0.49121 0.68641 0.47482 -10 162 2 3 0 0.41901 0.49526 0.70796 0.44658 -10 163 2 4 1 0.43251 0.46366 0.70508 0.42232 -10 164 1 2 1 0.41364 0.48043 0.70468 0.42756 -10 165 1 2 1 0.40867 0.48836 0.7352 0.39318 -10 166 1 2 1 0.38058 0.5313 0.7467 0.39111 -10 167 2 2 1 0.36313 0.57037 0.70606 0.40742 -10 168 2 1 1 0.35806 0.52469 0.72541 0.39842 -10 169 2 1 1 0.3965 0.55552 0.70164 0.39636 -10 170 1 4 1 0.39881 0.58211 0.7039 0.39538 -10 171 1 2 0 0.40161 0.64315 0.66595 0.43253 -10 172 1 2 1 0.40527 0.63142 0.68344 0.47814 -10 173 1 2 1 0.41818 0.64688 0.68375 0.49872 -10 174 2 4 1 0.43695 0.66292 0.67543 0.48242 -10 175 2 3 1 0.39064 0.70592 0.72162 0.50853 -10 176 2 3 1 0.41418 0.72227 0.7404 0.50848 -10 177 1 4 0 0.42828 0.73365 0.74399 0.50729 -10 178 1 1 0 0.4385 0.714 0.75182 0.52906 -10 179 1 1 1 0.38462 0.71399 0.78125 0.51557 -10 180 1 1 0 0.38243 0.6897 0.8 0.56269 -10 181 1 2 1 0.41956 0.6537 0.78119 0.53999 -10 182 1 3 1 0.46125 0.60926 0.78334 0.5355 -10 183 1 4 1 0.5079 0.58234 0.79041 0.53978 -10 185 2 3 1 0.58773 0.61974 0.77567 0.58005 -10 186 2 1 1 0.56994 0.61598 0.77697 0.5845 -10 187 1 4 1 0.58265 0.65108 0.75528 0.55322 -10 188 1 1 1 0.61641 0.63525 0.73075 0.53583 -10 189 2 3 0 0.61089 0.68513 0.74215 0.55119 -10 190 1 4 0 0.67254 0.67502 0.74304 0.52344 -10 191 2 3 1 0.71849 0.66315 0.75994 0.54305 -10 192 1 2 1 0.73545 0.6294 0.79004 0.53427 -10 194 2 1 0 0.76957 0.61338 0.74831 0.52514 -10 195 1 3 0 0.73544 0.61721 0.75629 0.4949 -10 196 1 2 1 0.7462 0.65698 0.72142 0.46658 -10 197 1 2 0 0.74487 0.67712 0.70299 0.4334 -10 198 1 4 0 0.75813 0.63902 0.7132 0.38684 -10 199 1 3 1 0.77195 0.65874 0.73815 0.4148 -10 200 2 4 0 0.75457 0.63628 0.72188 0.43059 -10 201 1 2 1 0.72843 0.646 0.71037 0.46274 -11 1 2 3 0 0.55043 0.5779 0.35359 0.35152 -11 3 2 3 1 0.56558 0.52928 0.27501 0.34064 -11 4 2 1 0 0.53392 0.562 0.31016 0.31646 -11 5 2 3 1 0.53131 0.54329 0.26991 0.33176 -11 6 2 2 0 0.51301 0.54949 0.31655 0.3123 -11 7 2 2 1 0.49465 0.56101 0.30935 0.29295 -11 8 2 3 1 0.49977 0.59981 0.32269 0.29142 -11 9 2 2 1 0.5337 0.64168 0.27847 0.23984 -11 10 2 2 1 0.54034 0.62302 0.28238 0.25339 -11 11 2 3 0 0.54457 0.62412 0.2874 0.25673 -11 12 2 3 0 0.56074 0.61232 0.31517 0.23104 -11 13 2 1 0 0.52356 0.59777 0.32553 0.237 -11 14 2 3 1 0.51113 0.58218 0.2904 0.24704 -11 15 2 4 0 0.49545 0.58171 0.29121 0.22882 -11 16 2 3 1 0.4855 0.57166 0.29957 0.24484 -11 17 2 3 0 0.46832 0.58533 0.29036 0.26997 -11 18 2 3 0 0.48141 0.5609 0.29432 0.20644 -11 19 2 2 0 0.48431 0.57657 0.27478 0.22073 -11 20 2 4 0 0.5241 0.57124 0.29602 0.24342 -11 21 1 1 0 0.52299 0.60777 0.32382 0.27311 -11 22 1 2 1 0.55617 0.6093 0.31869 0.25054 -11 23 2 3 0 0.53125 0.56702 0.37266 0.23961 -11 24 2 3 1 0.54199 0.54908 0.40232 0.24116 -11 25 1 3 0 0.57052 0.54133 0.35003 0.22522 -11 26 2 2 0 0.55783 0.57273 0.32456 0.2 -11 27 1 1 1 0.58077 0.60062 0.32838 0.2236 -11 28 1 1 1 0.55735 0.57087 0.29145 0.22587 -11 29 1 1 1 0.57661 0.59907 0.24968 0.25374 -11 30 1 4 0 0.54437 0.63611 0.24601 0.24774 -11 31 1 1 0 0.52996 0.60176 0.21448 0.27302 -11 32 2 3 0 0.50178 0.63469 0.2 0.24459 -11 33 1 1 1 0.52212 0.64377 0.20389 0.2 -11 34 1 4 0 0.49338 0.67082 0.2 0.2053 -11 35 1 3 0 0.47172 0.66299 0.2 0.25588 -11 36 1 1 1 0.48227 0.66312 0.21198 0.24343 -11 37 1 1 1 0.4579 0.66234 0.2 0.2323 -11 38 1 1 0 0.45205 0.60848 0.24113 0.28058 -11 39 1 1 0 0.46492 0.65123 0.21024 0.2792 -11 40 1 2 0 0.46757 0.63588 0.2 0.27274 -11 41 2 2 0 0.46378 0.61741 0.2 0.20137 -11 42 2 1 1 0.43549 0.60662 0.2191 0.25492 -11 43 1 1 0 0.44882 0.58923 0.20251 0.23947 -11 44 2 4 0 0.44739 0.61681 0.2209 0.26471 -11 45 2 4 1 0.46384 0.59305 0.25319 0.24613 -11 46 2 2 1 0.51119 0.59576 0.24912 0.24026 -11 47 2 2 1 0.51119 0.61554 0.27732 0.25702 -11 48 2 4 0 0.46991 0.64183 0.23582 0.22444 -11 49 1 2 1 0.48296 0.63369 0.20535 0.27083 -11 50 1 2 0 0.5426 0.70366 0.2 0.32619 -11 51 2 4 0 0.50892 0.69992 0.2 0.32438 -11 52 1 4 1 0.4954 0.71494 0.23481 0.31973 -11 53 1 4 0 0.45152 0.75401 0.22295 0.31486 -11 54 1 4 1 0.44303 0.76902 0.2 0.33248 -11 55 1 2 1 0.44439 0.76276 0.2 0.35535 -11 56 1 2 1 0.47164 0.77372 0.20991 0.30229 -11 57 1 4 1 0.42252 0.76981 0.20823 0.27657 -11 58 1 2 0 0.4626 0.77064 0.22131 0.23807 -11 59 2 4 0 0.47876 0.74553 0.21937 0.2 -11 60 1 2 0 0.47492 0.77258 0.21553 0.20372 -11 61 2 1 1 0.50097 0.797 0.22088 0.2 -11 62 2 1 0 0.54239 0.79098 0.2 0.21974 -11 63 2 3 0 0.52331 0.79352 0.23114 0.23316 -11 64 1 1 0 0.49528 0.76334 0.22954 0.25331 -11 65 1 4 0 0.49876 0.73095 0.26264 0.27285 -11 66 1 1 1 0.53242 0.72036 0.23656 0.25391 -11 67 1 1 1 0.52189 0.67882 0.27964 0.2393 -11 68 1 1 0 0.53176 0.68164 0.25737 0.27294 -11 69 2 3 0 0.54291 0.73061 0.2078 0.27341 -11 70 2 3 1 0.54529 0.72522 0.24896 0.28794 -11 71 2 3 0 0.55668 0.75084 0.26641 0.26938 -11 72 1 1 0 0.53982 0.71284 0.25302 0.27195 -11 73 2 4 0 0.57059 0.72955 0.21981 0.28124 -11 74 2 3 0 0.60017 0.75338 0.2 0.26189 -11 75 2 3 0 0.56987 0.79254 0.23086 0.26063 -11 76 2 4 1 0.53471 0.77639 0.25951 0.27258 -11 77 1 2 1 0.577 0.75346 0.31801 0.28561 -11 78 1 2 1 0.54832 0.71056 0.29922 0.29029 -11 79 1 1 0 0.53751 0.7274 0.31287 0.29789 -11 80 1 2 1 0.54485 0.71156 0.31588 0.2826 -11 81 1 2 0 0.54433 0.69123 0.36089 0.26533 -11 82 2 4 1 0.56687 0.76312 0.36796 0.23672 -11 83 2 4 0 0.60786 0.7831 0.38363 0.23398 -11 84 2 2 1 0.60936 0.78368 0.36804 0.23087 -11 85 1 2 1 0.6211 0.74615 0.36371 0.20084 -11 86 2 4 0 0.5845 0.77236 0.3 0.2 -11 87 1 2 1 0.60376 0.8 0.32359 0.2 -11 88 1 2 1 0.62269 0.77985 0.32173 0.2031 -11 89 1 2 1 0.67831 0.79703 0.35091 0.20138 -11 90 1 2 1 0.69167 0.8 0.37196 0.2 -11 91 1 2 1 0.69856 0.7645 0.36825 0.21451 -11 92 1 2 1 0.69443 0.73286 0.37839 0.24046 -11 93 1 2 1 0.69619 0.72385 0.376 0.28831 -11 94 1 2 1 0.69996 0.7384 0.34066 0.30731 -11 95 1 2 1 0.69694 0.77351 0.34487 0.33476 -11 96 1 4 0 0.66827 0.79268 0.35615 0.27297 -11 97 1 2 0 0.67878 0.8 0.33973 0.27352 -11 98 1 1 0 0.6799 0.8 0.34456 0.25026 -11 99 2 3 0 0.62187 0.8 0.28067 0.22623 -11 100 2 2 1 0.6054 0.77345 0.25924 0.2 -11 101 1 4 0 0.65383 0.77114 0.27204 0.2008 -11 102 1 3 0 0.64727 0.76172 0.25281 0.21862 -11 103 2 4 0 0.6635 0.77711 0.305 0.2 -11 104 2 3 0 0.67197 0.79161 0.28616 0.23714 -11 105 1 2 1 0.64376 0.8 0.30672 0.23704 -11 106 1 2 0 0.63178 0.8 0.29251 0.23955 -11 107 1 2 1 0.6538 0.79894 0.29693 0.24725 -11 108 1 3 0 0.64423 0.7928 0.28867 0.21739 -11 109 1 2 0 0.61983 0.76918 0.29843 0.27407 -11 111 1 4 0 0.54281 0.76111 0.30887 0.26801 -11 112 1 2 1 0.52811 0.74021 0.30316 0.29007 -11 113 1 3 0 0.53029 0.77255 0.2792 0.28303 -11 114 1 2 1 0.53651 0.79728 0.29597 0.30773 -11 115 1 2 0 0.55624 0.78973 0.25582 0.31867 -11 116 1 2 1 0.53997 0.8 0.22837 0.31713 -11 117 1 2 1 0.52671 0.8 0.24036 0.38512 -11 118 1 2 1 0.51438 0.8 0.23973 0.37043 -11 119 1 3 1 0.53182 0.79605 0.27918 0.36907 -11 120 1 2 1 0.50286 0.8 0.28253 0.35504 -11 121 1 2 1 0.50235 0.8 0.29495 0.29598 -11 122 1 3 0 0.48894 0.79331 0.30969 0.31204 -11 123 1 2 1 0.47871 0.79948 0.3263 0.31954 -11 124 1 2 1 0.45365 0.8 0.29099 0.33413 -11 125 1 2 1 0.46635 0.8 0.26215 0.31968 -11 126 1 2 1 0.45381 0.74542 0.23663 0.31977 -11 127 1 2 1 0.44701 0.77734 0.26106 0.32745 -11 128 1 3 0 0.42234 0.8 0.25779 0.32023 -11 129 1 2 1 0.40726 0.79306 0.29614 0.30255 -11 130 1 2 1 0.36714 0.79442 0.30191 0.2763 -11 131 1 4 0 0.36376 0.79681 0.32273 0.27065 -11 132 1 2 1 0.3524 0.79521 0.28949 0.2766 -11 133 1 2 1 0.32952 0.78195 0.24297 0.25312 -11 134 1 2 1 0.28499 0.8 0.24966 0.28736 -11 135 1 2 0 0.26727 0.79224 0.22348 0.3366 -11 136 1 2 1 0.25006 0.79156 0.26094 0.27667 -11 137 1 2 1 0.21646 0.76325 0.24907 0.25544 -11 138 1 2 1 0.2 0.78589 0.26922 0.27831 -11 139 1 2 0 0.2 0.76936 0.2926 0.24553 -11 140 1 2 1 0.23753 0.79722 0.33088 0.24868 -11 141 1 2 0 0.22556 0.8 0.31513 0.27674 -11 142 1 2 0 0.21033 0.8 0.30497 0.28001 -11 143 2 3 1 0.22537 0.8 0.27327 0.29592 -11 144 2 3 0 0.24274 0.79907 0.27427 0.28701 -11 145 2 3 0 0.21851 0.8 0.26859 0.30582 -11 146 2 1 0 0.2178 0.8 0.22666 0.31128 -11 147 2 1 1 0.24484 0.8 0.20365 0.30276 -11 148 2 3 0 0.22017 0.8 0.21582 0.29858 -11 149 2 4 1 0.20838 0.8 0.20549 0.3077 -11 150 1 1 1 0.26729 0.8 0.24835 0.33951 -11 151 2 4 1 0.25459 0.78232 0.22098 0.37519 -11 152 1 1 0 0.30189 0.77343 0.22356 0.39847 -11 153 2 2 0 0.277 0.77971 0.21763 0.41577 -11 154 2 1 0 0.33926 0.74634 0.2 0.37846 -11 155 2 4 1 0.33888 0.74843 0.2 0.38359 -11 156 2 4 1 0.36489 0.74266 0.2 0.33555 -11 157 2 4 0 0.36922 0.72864 0.21039 0.36898 -11 158 2 4 0 0.37651 0.71557 0.25706 0.3556 -11 159 2 1 0 0.3562 0.70798 0.27144 0.39188 -11 160 2 4 0 0.37324 0.73197 0.24524 0.39553 -11 161 2 4 1 0.33247 0.71528 0.25213 0.40268 -11 162 2 4 0 0.32664 0.75746 0.24262 0.39697 -11 163 2 4 0 0.32737 0.75497 0.20079 0.37946 -11 164 2 1 1 0.32241 0.8 0.2 0.40464 -11 165 1 1 1 0.33485 0.8 0.20678 0.40402 -11 166 1 1 1 0.35907 0.8 0.22123 0.40926 -11 167 2 4 1 0.34949 0.8 0.27264 0.41539 -11 168 2 4 0 0.37267 0.8 0.30797 0.436 -11 169 1 4 0 0.39837 0.76644 0.29919 0.41091 -11 170 1 3 0 0.38419 0.74126 0.2761 0.42169 -11 171 1 4 0 0.38649 0.73896 0.24769 0.45184 -11 172 1 1 1 0.3735 0.79988 0.21732 0.43546 -11 173 1 3 0 0.36009 0.8 0.22321 0.44074 -11 174 1 4 0 0.38648 0.8 0.21086 0.45846 -11 175 1 1 0 0.37753 0.7835 0.20936 0.47693 -11 176 1 1 0 0.32905 0.77017 0.20358 0.42754 -11 177 1 1 0 0.3851 0.78473 0.20506 0.44052 -11 178 2 3 1 0.36796 0.78051 0.2 0.3813 -11 179 2 2 1 0.35199 0.79315 0.22019 0.36803 -11 180 2 2 1 0.31801 0.8 0.24509 0.37891 -11 181 2 3 1 0.30674 0.8 0.23432 0.38882 -11 182 2 3 0 0.33107 0.75848 0.2 0.38555 -11 183 2 3 0 0.33389 0.73813 0.2101 0.38101 -11 184 2 3 0 0.3633 0.73729 0.2 0.40103 -11 185 1 2 1 0.35394 0.73968 0.2 0.36388 -11 186 1 2 1 0.3625 0.78628 0.2 0.38816 -11 187 1 2 1 0.37426 0.8 0.2 0.46508 -11 188 1 3 1 0.34876 0.8 0.22985 0.47632 -11 189 1 3 0 0.33813 0.78029 0.2056 0.47281 -11 190 1 4 0 0.37915 0.79114 0.2 0.45265 -11 191 1 2 1 0.40233 0.8 0.24541 0.49344 -11 192 1 2 1 0.42529 0.78379 0.25813 0.51788 -11 193 1 3 0 0.44473 0.8 0.26037 0.55478 -11 194 1 2 1 0.42335 0.77949 0.25251 0.56739 -11 195 1 2 0 0.41433 0.74312 0.25601 0.56701 -11 196 1 2 0 0.3768 0.76441 0.25915 0.56346 -11 197 1 2 1 0.36343 0.78095 0.20219 0.55189 -11 198 1 2 1 0.32245 0.76431 0.2 0.52809 -11 199 1 3 0 0.28258 0.8 0.23392 0.52097 -11 200 1 4 0 0.29243 0.79748 0.22832 0.51401 -11 201 1 2 1 0.30384 0.79552 0.23835 0.52741 \ No newline at end of file diff --git a/R/inst/extdata/ug_exampleData.txt b/R/inst/extdata/ug_exampleData.txt deleted file mode 100644 index 257795cc..00000000 --- a/R/inst/extdata/ug_exampleData.txt +++ /dev/null @@ -1,1801 +0,0 @@ -trial offer accept subjID group -1 3 0 1 LM -2 3 0 1 LM -3 5 0 1 LM -4 4 0 1 LM -5 2 0 1 LM -6 4 0 1 LM -7 3 0 1 LM -8 4 0 1 LM -9 3 0 1 LM -10 4 0 1 LM -11 5 1 1 LM -12 3 0 1 LM -13 5 1 1 LM -14 3 0 1 LM -15 1 0 1 LM -16 2 0 1 LM -17 3 0 1 LM -18 6 1 1 LM -19 2 0 1 LM -20 2 0 1 LM -21 4 1 1 LM -22 3 0 1 LM -23 5 1 1 LM -24 2 0 1 LM -25 4 1 1 LM -26 4 1 1 LM -27 2 0 1 LM -28 6 1 1 LM -29 4 1 1 LM -30 7 1 1 LM -31 9 1 1 LM -32 7 1 1 LM -33 10 1 1 LM -34 7 1 1 LM -35 8 1 1 LM -36 8 1 1 LM -37 11 1 1 LM -38 7 1 1 LM -39 6 1 1 LM -40 6 1 1 LM -41 12 1 1 LM -42 9 1 1 LM -43 5 1 1 LM -44 8 1 1 LM -45 6 1 1 LM -46 7 1 1 LM -47 8 1 1 LM -48 7 1 1 LM -49 8 1 1 LM -50 6 1 1 LM -51 8 1 1 LM -52 7 1 1 LM -53 9 1 1 LM -54 9 1 1 LM -55 8 1 1 LM -56 10 1 1 LM -57 6 1 1 LM -58 10 1 1 LM -59 10 1 1 LM -60 8 1 1 LM -1 3 0 2 LM -2 3 0 2 LM -3 5 0 2 LM -4 4 0 2 LM -5 2 0 2 LM -6 4 0 2 LM -7 3 0 2 LM -8 4 0 2 LM -9 3 0 2 LM -10 4 0 2 LM -11 5 1 2 LM -12 3 0 2 LM -13 5 1 2 LM -14 3 0 2 LM -15 1 0 2 LM -16 2 0 2 LM -17 3 0 2 LM -18 6 1 2 LM -19 2 0 2 LM -20 2 0 2 LM -21 4 0 2 LM -22 3 0 2 LM -23 5 0 2 LM -24 2 0 2 LM -25 4 0 2 LM -26 4 0 2 LM -27 2 0 2 LM -28 6 1 2 LM -29 4 0 2 LM -30 7 1 2 LM -31 9 1 2 LM -32 7 1 2 LM -33 10 1 2 LM -34 7 1 2 LM -35 8 1 2 LM -36 8 1 2 LM -37 11 1 2 LM -38 7 1 2 LM -39 6 1 2 LM -40 6 1 2 LM -41 12 1 2 LM -42 9 1 2 LM -43 5 1 2 LM -44 8 1 2 LM -45 6 1 2 LM -46 7 1 2 LM -47 8 1 2 LM -48 7 1 2 LM -49 8 1 2 LM -50 6 1 2 LM -51 8 1 2 LM -52 7 1 2 LM -53 9 1 2 LM -54 9 1 2 LM -55 8 1 2 LM -56 10 1 2 LM -57 6 1 2 LM -58 10 1 2 LM -59 10 1 2 LM -60 8 1 2 LM -1 3 0 3 LM -2 3 0 3 LM -3 5 1 3 LM -4 4 0 3 LM -5 2 0 3 LM -6 4 0 3 LM -7 3 0 3 LM -8 4 0 3 LM -9 3 0 3 LM -10 4 0 3 LM -11 5 1 3 LM -12 3 0 3 LM -13 5 1 3 LM -14 3 0 3 LM -15 1 0 3 LM -16 2 0 3 LM -17 3 0 3 LM -18 6 1 3 LM -19 2 0 3 LM -20 2 0 3 LM -21 4 0 3 LM -22 3 0 3 LM -23 5 1 3 LM -24 2 0 3 LM -25 4 0 3 LM -26 4 0 3 LM -27 2 0 3 LM -28 6 1 3 LM -29 4 0 3 LM -30 7 1 3 LM -31 9 1 3 LM -32 7 1 3 LM -33 10 1 3 LM -34 7 1 3 LM -35 8 1 3 LM -36 8 1 3 LM -37 11 1 3 LM -38 7 1 3 LM -39 6 1 3 LM -40 6 1 3 LM -41 12 1 3 LM -42 9 1 3 LM -43 5 1 3 LM -44 8 1 3 LM -45 6 1 3 LM -46 7 1 3 LM -47 8 1 3 LM -48 7 1 3 LM -49 8 1 3 LM -50 6 1 3 LM -51 8 1 3 LM -52 7 1 3 LM -53 9 1 3 LM -54 9 1 3 LM -55 8 1 3 LM -56 10 1 3 LM -57 6 1 3 LM -58 10 1 3 LM -59 10 1 3 LM -60 8 1 3 LM -1 3 0 4 LM -2 3 0 4 LM -3 5 1 4 LM -4 4 0 4 LM -5 2 0 4 LM -6 4 0 4 LM -7 3 0 4 LM -8 4 0 4 LM -9 3 0 4 LM -10 4 0 4 LM -11 5 1 4 LM -12 3 0 4 LM -13 5 1 4 LM -14 3 0 4 LM -15 1 0 4 LM -16 2 0 4 LM -17 3 0 4 LM -18 6 1 4 LM -19 2 0 4 LM -20 2 0 4 LM -21 4 0 4 LM -22 3 0 4 LM -23 5 1 4 LM -24 2 0 4 LM -25 4 0 4 LM -26 4 0 4 LM -27 2 0 4 LM -28 6 1 4 LM -29 4 0 4 LM -30 7 1 4 LM -31 9 1 4 LM -32 7 1 4 LM -33 10 1 4 LM -34 7 1 4 LM -35 8 1 4 LM -36 8 1 4 LM -37 11 1 4 LM -38 7 1 4 LM -39 6 1 4 LM -40 6 1 4 LM -41 12 1 4 LM -42 9 1 4 LM -43 5 1 4 LM -44 8 1 4 LM -45 6 0 4 LM -46 7 1 4 LM -47 8 1 4 LM -48 7 1 4 LM -49 8 1 4 LM -50 6 1 4 LM -51 8 1 4 LM -52 7 1 4 LM -53 9 1 4 LM -54 9 1 4 LM -55 8 1 4 LM -56 10 1 4 LM -57 6 1 4 LM -58 10 1 4 LM -59 10 1 4 LM -60 8 1 4 LM -1 3 0 5 LM -2 3 0 5 LM -3 5 1 5 LM -4 4 1 5 LM -5 2 0 5 LM -6 4 0 5 LM -7 3 0 5 LM -8 4 0 5 LM -9 3 0 5 LM -10 4 0 5 LM -11 5 1 5 LM -12 3 0 5 LM -13 5 1 5 LM -14 3 0 5 LM -15 1 0 5 LM -16 2 0 5 LM -17 3 0 5 LM -18 6 1 5 LM -19 2 0 5 LM -20 2 0 5 LM -21 4 0 5 LM -22 3 0 5 LM -23 5 1 5 LM -24 2 0 5 LM -25 4 0 5 LM -26 4 0 5 LM -27 2 0 5 LM -28 6 1 5 LM -29 4 0 5 LM -30 7 1 5 LM -31 9 1 5 LM -32 7 1 5 LM -33 10 1 5 LM -34 7 1 5 LM -35 8 1 5 LM -36 8 1 5 LM -37 11 1 5 LM -38 7 1 5 LM -39 6 1 5 LM -40 6 1 5 LM -41 12 1 5 LM -42 9 1 5 LM -43 5 1 5 LM -44 8 1 5 LM -45 6 1 5 LM -46 7 1 5 LM -47 8 1 5 LM -48 7 1 5 LM -49 8 1 5 LM -50 6 1 5 LM -51 8 1 5 LM -52 7 1 5 LM -53 9 1 5 LM -54 9 1 5 LM -55 8 1 5 LM -56 10 1 5 LM -57 6 1 5 LM -58 10 1 5 LM -59 10 1 5 LM -60 8 1 5 LM -1 3 0 6 LM -2 3 0 6 LM -3 5 1 6 LM -4 4 0 6 LM -5 2 0 6 LM -6 4 0 6 LM -7 3 0 6 LM -8 4 0 6 LM -9 3 0 6 LM -10 4 0 6 LM -11 5 1 6 LM -12 3 0 6 LM -13 5 1 6 LM -14 3 0 6 LM -15 1 0 6 LM -16 2 0 6 LM -17 3 0 6 LM -18 6 1 6 LM -19 2 0 6 LM -20 2 0 6 LM -21 4 0 6 LM -22 3 0 6 LM -23 5 1 6 LM -24 2 0 6 LM -25 4 0 6 LM -26 4 0 6 LM -27 2 0 6 LM -28 6 1 6 LM -29 4 0 6 LM -30 7 1 6 LM -31 9 1 6 LM -32 7 1 6 LM -33 10 1 6 LM -34 7 1 6 LM -35 8 1 6 LM -36 8 1 6 LM -37 11 1 6 LM -38 7 1 6 LM -39 6 1 6 LM -40 6 1 6 LM -41 12 1 6 LM -42 9 1 6 LM -43 5 0 6 LM -44 8 1 6 LM -45 6 1 6 LM -46 7 1 6 LM -47 8 1 6 LM -48 7 1 6 LM -49 8 1 6 LM -50 6 1 6 LM -51 8 1 6 LM -52 7 1 6 LM -53 9 1 6 LM -54 9 1 6 LM -55 8 1 6 LM -56 10 1 6 LM -57 6 1 6 LM -58 10 1 6 LM -59 10 1 6 LM -60 8 1 6 LM -1 3 0 7 LM -2 3 0 7 LM -3 5 0 7 LM -4 4 0 7 LM -5 2 0 7 LM -6 4 0 7 LM -7 3 0 7 LM -8 4 0 7 LM -9 3 0 7 LM -10 4 0 7 LM -11 5 0 7 LM -12 3 0 7 LM -13 5 0 7 LM -14 3 0 7 LM -15 1 0 7 LM -16 2 0 7 LM -17 3 0 7 LM -18 6 1 7 LM -19 2 0 7 LM -20 2 0 7 LM -21 4 0 7 LM -22 3 0 7 LM -23 5 0 7 LM -24 2 0 7 LM -25 4 0 7 LM -26 4 0 7 LM -27 2 0 7 LM -28 6 1 7 LM -29 4 0 7 LM -30 7 1 7 LM -31 9 1 7 LM -32 7 1 7 LM -33 10 1 7 LM -34 7 1 7 LM -35 8 1 7 LM -36 8 1 7 LM -37 11 1 7 LM -38 7 1 7 LM -39 6 1 7 LM -40 6 1 7 LM -41 12 1 7 LM -42 9 1 7 LM -43 5 1 7 LM -44 8 1 7 LM -45 6 1 7 LM -46 7 1 7 LM -47 8 1 7 LM -48 7 1 7 LM -49 8 1 7 LM -50 6 1 7 LM -51 8 1 7 LM -52 7 1 7 LM -53 9 1 7 LM -54 9 1 7 LM -55 8 1 7 LM -56 10 1 7 LM -57 6 1 7 LM -58 10 1 7 LM -59 10 1 7 LM -60 8 1 7 LM -1 3 0 8 LM -2 3 0 8 LM -3 5 1 8 LM -4 4 0 8 LM -5 2 0 8 LM -6 4 0 8 LM -7 3 0 8 LM -8 4 0 8 LM -9 3 0 8 LM -10 4 0 8 LM -11 5 1 8 LM -12 3 0 8 LM -13 5 0 8 LM -14 3 0 8 LM -15 1 0 8 LM -16 2 0 8 LM -17 3 0 8 LM -18 6 1 8 LM -19 2 0 8 LM -20 2 0 8 LM -21 4 0 8 LM -22 3 0 8 LM -23 5 1 8 LM -24 2 0 8 LM -25 4 0 8 LM -26 4 0 8 LM -27 2 0 8 LM -28 6 1 8 LM -29 4 0 8 LM -30 7 1 8 LM -31 9 1 8 LM -32 7 1 8 LM -33 10 1 8 LM -34 7 1 8 LM -35 8 1 8 LM -36 8 1 8 LM -37 11 1 8 LM -38 7 1 8 LM -39 6 1 8 LM -40 6 1 8 LM -41 12 1 8 LM -42 9 1 8 LM -43 5 1 8 LM -44 8 1 8 LM -45 6 1 8 LM -46 7 1 8 LM -47 8 1 8 LM -48 7 1 8 LM -49 8 1 8 LM -50 6 1 8 LM -51 8 1 8 LM -52 7 1 8 LM -53 9 1 8 LM -54 9 1 8 LM -55 8 1 8 LM -56 10 1 8 LM -57 6 1 8 LM -58 10 1 8 LM -59 10 1 8 LM -60 8 1 8 LM -1 3 0 9 LM -2 3 0 9 LM -3 5 1 9 LM -4 4 0 9 LM -5 2 0 9 LM -6 4 0 9 LM -7 3 0 9 LM -8 4 0 9 LM -9 3 0 9 LM -10 4 0 9 LM -11 5 0 9 LM -12 3 0 9 LM -13 5 1 9 LM -14 3 0 9 LM -15 1 0 9 LM -16 2 0 9 LM -17 3 0 9 LM -18 6 1 9 LM -19 2 0 9 LM -20 2 0 9 LM -21 4 0 9 LM -22 3 0 9 LM -23 5 1 9 LM -24 2 0 9 LM -25 4 0 9 LM -26 4 0 9 LM -27 2 0 9 LM -28 6 1 9 LM -29 4 0 9 LM -30 7 1 9 LM -31 9 1 9 LM -32 7 1 9 LM -33 10 1 9 LM -34 7 1 9 LM -35 8 1 9 LM -36 8 1 9 LM -37 11 1 9 LM -38 7 1 9 LM -39 6 1 9 LM -40 6 1 9 LM -41 12 1 9 LM -42 9 1 9 LM -43 5 1 9 LM -44 8 1 9 LM -45 6 1 9 LM -46 7 1 9 LM -47 8 1 9 LM -48 7 1 9 LM -49 8 1 9 LM -50 6 1 9 LM -51 8 1 9 LM -52 7 1 9 LM -53 9 1 9 LM -54 9 1 9 LM -55 8 1 9 LM -56 10 1 9 LM -57 6 1 9 LM -58 10 1 9 LM -59 10 1 9 LM -60 8 1 9 LM -1 3 0 10 LM -2 3 0 10 LM -3 5 0 10 LM -4 4 0 10 LM -5 2 0 10 LM -6 4 0 10 LM -7 3 0 10 LM -8 4 0 10 LM -9 3 0 10 LM -10 4 0 10 LM -11 5 0 10 LM -12 3 0 10 LM -13 5 1 10 LM -14 3 0 10 LM -15 1 0 10 LM -16 2 0 10 LM -17 3 0 10 LM -18 6 1 10 LM -19 2 0 10 LM -20 2 0 10 LM -21 4 0 10 LM -22 3 0 10 LM -23 5 1 10 LM -24 2 0 10 LM -25 4 0 10 LM -26 4 0 10 LM -27 2 0 10 LM -28 6 1 10 LM -29 4 0 10 LM -30 7 1 10 LM -31 9 1 10 LM -32 7 1 10 LM -33 10 1 10 LM -34 7 1 10 LM -35 8 1 10 LM -36 8 1 10 LM -37 11 1 10 LM -38 7 1 10 LM -39 6 1 10 LM -40 6 1 10 LM -41 12 1 10 LM -42 9 1 10 LM -43 5 1 10 LM -44 8 1 10 LM -45 6 1 10 LM -46 7 1 10 LM -47 8 1 10 LM -48 7 1 10 LM -49 8 1 10 LM -50 6 1 10 LM -51 8 1 10 LM -52 7 1 10 LM -53 9 1 10 LM -54 9 1 10 LM -55 8 1 10 LM -56 10 1 10 LM -57 6 1 10 LM -58 10 1 10 LM -59 10 1 10 LM -60 8 1 10 LM -1 3 0 11 LM -2 3 0 11 LM -3 5 1 11 LM -4 4 0 11 LM -5 2 0 11 LM -6 4 0 11 LM -7 3 0 11 LM -8 4 0 11 LM -9 3 0 11 LM -10 4 0 11 LM -11 5 1 11 LM -12 3 0 11 LM -13 5 1 11 LM -14 3 0 11 LM -15 1 0 11 LM -16 2 0 11 LM -17 3 0 11 LM -18 6 1 11 LM -19 2 0 11 LM -20 2 0 11 LM -21 4 0 11 LM -22 3 0 11 LM -23 5 1 11 LM -24 2 0 11 LM -25 4 0 11 LM -26 4 0 11 LM -27 2 0 11 LM -28 6 1 11 LM -29 4 0 11 LM -30 7 1 11 LM -31 9 1 11 LM -32 7 1 11 LM -33 10 1 11 LM -34 7 1 11 LM -35 8 1 11 LM -36 8 1 11 LM -37 11 1 11 LM -38 7 1 11 LM -39 6 1 11 LM -40 6 1 11 LM -41 12 1 11 LM -42 9 1 11 LM -43 5 1 11 LM -44 8 1 11 LM -45 6 1 11 LM -46 7 1 11 LM -47 8 1 11 LM -48 7 1 11 LM -49 8 1 11 LM -50 6 1 11 LM -51 8 1 11 LM -52 7 1 11 LM -53 9 1 11 LM -54 9 1 11 LM -55 8 1 11 LM -56 10 1 11 LM -57 6 1 11 LM -58 10 1 11 LM -59 10 1 11 LM -60 8 1 11 LM -1 3 0 12 LM -2 3 0 12 LM -3 5 1 12 LM -4 4 0 12 LM -5 2 0 12 LM -6 4 0 12 LM -7 3 0 12 LM -8 4 0 12 LM -9 3 0 12 LM -10 4 0 12 LM -11 5 1 12 LM -12 3 0 12 LM -13 5 1 12 LM -14 3 0 12 LM -15 1 0 12 LM -16 2 0 12 LM -17 3 0 12 LM -18 6 1 12 LM -19 2 0 12 LM -20 2 0 12 LM -21 4 0 12 LM -22 3 0 12 LM -23 5 1 12 LM -24 2 0 12 LM -25 4 0 12 LM -26 4 0 12 LM -27 2 0 12 LM -28 6 1 12 LM -29 4 0 12 LM -30 7 1 12 LM -31 9 1 12 LM -32 7 1 12 LM -33 10 1 12 LM -34 7 1 12 LM -35 8 1 12 LM -36 8 1 12 LM -37 11 1 12 LM -38 7 1 12 LM -39 6 1 12 LM -40 6 1 12 LM -41 12 1 12 LM -42 9 1 12 LM -43 5 1 12 LM -44 8 1 12 LM -45 6 1 12 LM -46 7 1 12 LM -47 8 1 12 LM -48 7 1 12 LM -49 8 1 12 LM -50 6 1 12 LM -51 8 1 12 LM -52 7 1 12 LM -53 9 1 12 LM -54 9 1 12 LM -55 8 1 12 LM -56 10 1 12 LM -57 6 1 12 LM -58 10 1 12 LM -59 10 1 12 LM -60 8 1 12 LM -1 3 0 13 LM -2 3 0 13 LM -3 5 0 13 LM -4 4 0 13 LM -5 2 0 13 LM -6 4 0 13 LM -7 3 0 13 LM -8 4 0 13 LM -9 3 0 13 LM -10 4 0 13 LM -11 5 1 13 LM -12 3 0 13 LM -13 5 0 13 LM -14 3 0 13 LM -15 1 0 13 LM -16 2 0 13 LM -17 3 0 13 LM -18 6 1 13 LM -19 2 0 13 LM -20 2 0 13 LM -21 4 0 13 LM -22 3 0 13 LM -23 5 1 13 LM -24 2 0 13 LM -25 4 0 13 LM -26 4 0 13 LM -27 2 0 13 LM -28 6 1 13 LM -29 4 0 13 LM -30 7 1 13 LM -31 9 1 13 LM -32 7 1 13 LM -33 10 1 13 LM -34 7 1 13 LM -35 8 1 13 LM -36 8 1 13 LM -37 11 1 13 LM -38 7 1 13 LM -39 6 1 13 LM -40 6 1 13 LM -41 12 1 13 LM -42 9 1 13 LM -43 5 1 13 LM -44 8 1 13 LM -45 6 1 13 LM -46 7 1 13 LM -47 8 1 13 LM -48 7 1 13 LM -49 8 1 13 LM -50 6 1 13 LM -51 8 1 13 LM -52 7 1 13 LM -53 9 1 13 LM -54 9 1 13 LM -55 8 1 13 LM -56 10 1 13 LM -57 6 1 13 LM -58 10 1 13 LM -59 10 1 13 LM -60 8 1 13 LM -1 3 0 14 LM -2 3 0 14 LM -3 5 1 14 LM -4 4 0 14 LM -5 2 0 14 LM -6 4 0 14 LM -7 3 0 14 LM -8 4 0 14 LM -9 3 0 14 LM -10 4 0 14 LM -11 5 1 14 LM -12 3 0 14 LM -13 5 1 14 LM -14 3 0 14 LM -15 1 0 14 LM -16 2 0 14 LM -17 3 0 14 LM -18 6 1 14 LM -19 2 0 14 LM -20 2 0 14 LM -21 4 0 14 LM -22 3 0 14 LM -23 5 1 14 LM -24 2 0 14 LM -25 4 0 14 LM -26 4 0 14 LM -27 2 0 14 LM -28 6 1 14 LM -29 4 0 14 LM -30 7 1 14 LM -31 9 1 14 LM -32 7 1 14 LM -33 10 1 14 LM -34 7 1 14 LM -35 8 1 14 LM -36 8 1 14 LM -37 11 1 14 LM -38 7 1 14 LM -39 6 1 14 LM -40 6 1 14 LM -41 12 1 14 LM -42 9 1 14 LM -43 5 1 14 LM -44 8 1 14 LM -45 6 1 14 LM -46 7 1 14 LM -47 8 1 14 LM -48 7 1 14 LM -49 8 1 14 LM -50 6 1 14 LM -51 8 1 14 LM -52 7 1 14 LM -53 9 1 14 LM -54 9 1 14 LM -55 8 1 14 LM -56 10 1 14 LM -57 6 1 14 LM -58 10 1 14 LM -59 10 1 14 LM -60 8 1 14 LM -1 3 0 15 LM -2 3 0 15 LM -3 5 1 15 LM -4 4 0 15 LM -5 2 0 15 LM -6 4 0 15 LM -7 3 0 15 LM -8 4 0 15 LM -9 3 0 15 LM -10 4 0 15 LM -11 5 1 15 LM -12 3 0 15 LM -13 5 1 15 LM -14 3 0 15 LM -15 1 0 15 LM -16 2 0 15 LM -17 3 0 15 LM -18 6 1 15 LM -19 2 0 15 LM -20 2 0 15 LM -21 4 0 15 LM -22 3 0 15 LM -23 5 0 15 LM -24 2 0 15 LM -25 4 0 15 LM -26 4 0 15 LM -27 2 0 15 LM -28 6 1 15 LM -29 4 0 15 LM -30 7 1 15 LM -31 9 1 15 LM -32 7 1 15 LM -33 10 1 15 LM -34 7 1 15 LM -35 8 1 15 LM -36 8 1 15 LM -37 11 1 15 LM -38 7 1 15 LM -39 6 1 15 LM -40 6 1 15 LM -41 12 1 15 LM -42 9 1 15 LM -43 5 1 15 LM -44 8 1 15 LM -45 6 1 15 LM -46 7 1 15 LM -47 8 1 15 LM -48 7 1 15 LM -49 8 1 15 LM -50 6 1 15 LM -51 8 1 15 LM -52 7 1 15 LM -53 9 1 15 LM -54 9 1 15 LM -55 8 1 15 LM -56 10 1 15 LM -57 6 1 15 LM -58 10 1 15 LM -59 10 1 15 LM -60 8 1 15 LM -1 3 0 16 LM -2 3 0 16 LM -3 5 1 16 LM -4 4 0 16 LM -5 2 0 16 LM -6 4 0 16 LM -7 3 0 16 LM -8 4 0 16 LM -9 3 0 16 LM -10 4 0 16 LM -11 5 0 16 LM -12 3 0 16 LM -13 5 1 16 LM -14 3 0 16 LM -15 1 0 16 LM -16 2 0 16 LM -17 3 0 16 LM -18 6 1 16 LM -19 2 0 16 LM -20 2 0 16 LM -21 4 0 16 LM -22 3 0 16 LM -23 5 1 16 LM -24 2 0 16 LM -25 4 0 16 LM -26 4 0 16 LM -27 2 0 16 LM -28 6 1 16 LM -29 4 0 16 LM -30 7 1 16 LM -31 9 1 16 LM -32 7 1 16 LM -33 10 1 16 LM -34 7 1 16 LM -35 8 1 16 LM -36 8 1 16 LM -37 11 1 16 LM -38 7 1 16 LM -39 6 1 16 LM -40 6 1 16 LM -41 12 1 16 LM -42 9 1 16 LM -43 5 1 16 LM -44 8 1 16 LM -45 6 1 16 LM -46 7 1 16 LM -47 8 1 16 LM -48 7 1 16 LM -49 8 1 16 LM -50 6 1 16 LM -51 8 1 16 LM -52 7 1 16 LM -53 9 1 16 LM -54 9 1 16 LM -55 8 1 16 LM -56 10 1 16 LM -57 6 1 16 LM -58 10 1 16 LM -59 10 1 16 LM -60 8 1 16 LM -1 3 0 17 LM -2 3 0 17 LM -3 5 1 17 LM -4 4 0 17 LM -5 2 0 17 LM -6 4 0 17 LM -7 3 0 17 LM -8 4 0 17 LM -9 3 0 17 LM -10 4 0 17 LM -11 5 1 17 LM -12 3 0 17 LM -13 5 1 17 LM -14 3 0 17 LM -15 1 0 17 LM -16 2 0 17 LM -17 3 0 17 LM -18 6 1 17 LM -19 2 0 17 LM -20 2 0 17 LM -21 4 0 17 LM -22 3 0 17 LM -23 5 1 17 LM -24 2 0 17 LM -25 4 0 17 LM -26 4 0 17 LM -27 2 0 17 LM -28 6 1 17 LM -29 4 0 17 LM -30 7 1 17 LM -31 9 1 17 LM -32 7 1 17 LM -33 10 1 17 LM -34 7 1 17 LM -35 8 1 17 LM -36 8 1 17 LM -37 11 1 17 LM -38 7 1 17 LM -39 6 1 17 LM -40 6 1 17 LM -41 12 1 17 LM -42 9 1 17 LM -43 5 1 17 LM -44 8 1 17 LM -45 6 1 17 LM -46 7 1 17 LM -47 8 1 17 LM -48 7 1 17 LM -49 8 1 17 LM -50 6 1 17 LM -51 8 1 17 LM -52 7 1 17 LM -53 9 1 17 LM -54 9 1 17 LM -55 8 1 17 LM -56 10 1 17 LM -57 6 1 17 LM -58 10 1 17 LM -59 10 1 17 LM -60 8 1 17 LM -1 3 0 18 LM -2 3 0 18 LM -3 5 1 18 LM -4 4 0 18 LM -5 2 0 18 LM -6 4 0 18 LM -7 3 0 18 LM -8 4 0 18 LM -9 3 0 18 LM -10 4 0 18 LM -11 5 0 18 LM -12 3 0 18 LM -13 5 1 18 LM -14 3 0 18 LM -15 1 0 18 LM -16 2 0 18 LM -17 3 0 18 LM -18 6 1 18 LM -19 2 0 18 LM -20 2 0 18 LM -21 4 0 18 LM -22 3 0 18 LM -23 5 1 18 LM -24 2 0 18 LM -25 4 0 18 LM -26 4 0 18 LM -27 2 0 18 LM -28 6 1 18 LM -29 4 0 18 LM -30 7 1 18 LM -31 9 1 18 LM -32 7 1 18 LM -33 10 1 18 LM -34 7 1 18 LM -35 8 1 18 LM -36 8 1 18 LM -37 11 1 18 LM -38 7 1 18 LM -39 6 1 18 LM -40 6 1 18 LM -41 12 1 18 LM -42 9 1 18 LM -43 5 1 18 LM -44 8 1 18 LM -45 6 1 18 LM -46 7 1 18 LM -47 8 1 18 LM -48 7 1 18 LM -49 8 0 18 LM -50 6 1 18 LM -51 8 1 18 LM -52 7 1 18 LM -53 9 1 18 LM -54 9 1 18 LM -55 8 1 18 LM -56 10 1 18 LM -57 6 1 18 LM -58 10 1 18 LM -59 10 1 18 LM -60 8 1 18 LM -1 3 0 19 LM -2 3 0 19 LM -3 5 1 19 LM -4 4 0 19 LM -5 2 0 19 LM -6 4 0 19 LM -7 3 0 19 LM -8 4 0 19 LM -9 3 0 19 LM -10 4 0 19 LM -11 5 1 19 LM -12 3 0 19 LM -13 5 1 19 LM -14 3 0 19 LM -15 1 0 19 LM -16 2 0 19 LM -17 3 0 19 LM -18 6 1 19 LM -19 2 0 19 LM -20 2 0 19 LM -21 4 0 19 LM -22 3 0 19 LM -23 5 1 19 LM -24 2 0 19 LM -25 4 0 19 LM -26 4 0 19 LM -27 2 0 19 LM -28 6 1 19 LM -29 4 0 19 LM -30 7 1 19 LM -31 9 1 19 LM -32 7 1 19 LM -33 10 1 19 LM -34 7 1 19 LM -35 8 1 19 LM -36 8 1 19 LM -37 11 1 19 LM -38 7 1 19 LM -39 6 1 19 LM -40 6 1 19 LM -41 12 1 19 LM -42 9 1 19 LM -43 5 1 19 LM -44 8 1 19 LM -45 6 1 19 LM -46 7 1 19 LM -47 8 1 19 LM -48 7 1 19 LM -49 8 1 19 LM -50 6 1 19 LM -51 8 1 19 LM -52 7 1 19 LM -53 9 1 19 LM -54 9 1 19 LM -55 8 1 19 LM -56 10 1 19 LM -57 6 1 19 LM -58 10 1 19 LM -59 10 1 19 LM -60 8 1 19 LM -1 3 0 20 LM -2 3 0 20 LM -3 5 0 20 LM -4 4 0 20 LM -5 2 0 20 LM -6 4 0 20 LM -7 3 0 20 LM -8 4 0 20 LM -9 3 0 20 LM -10 4 0 20 LM -11 5 1 20 LM -12 3 0 20 LM -13 5 1 20 LM -14 3 0 20 LM -15 1 0 20 LM -16 2 0 20 LM -17 3 0 20 LM -18 6 1 20 LM -19 2 0 20 LM -20 2 0 20 LM -21 4 0 20 LM -22 3 0 20 LM -23 5 1 20 LM -24 2 0 20 LM -25 4 0 20 LM -26 4 0 20 LM -27 2 0 20 LM -28 6 1 20 LM -29 4 0 20 LM -30 7 1 20 LM -31 9 1 20 LM -32 7 1 20 LM -33 10 1 20 LM -34 7 1 20 LM -35 8 1 20 LM -36 8 1 20 LM -37 11 1 20 LM -38 7 1 20 LM -39 6 1 20 LM -40 6 1 20 LM -41 12 1 20 LM -42 9 1 20 LM -43 5 1 20 LM -44 8 1 20 LM -45 6 1 20 LM -46 7 1 20 LM -47 8 1 20 LM -48 7 1 20 LM -49 8 1 20 LM -50 6 1 20 LM -51 8 1 20 LM -52 7 1 20 LM -53 9 1 20 LM -54 9 1 20 LM -55 8 1 20 LM -56 10 1 20 LM -57 6 1 20 LM -58 10 1 20 LM -59 10 1 20 LM -60 8 1 20 LM -1 3 0 21 LM -2 3 0 21 LM -3 5 1 21 LM -4 4 0 21 LM -5 2 0 21 LM -6 4 0 21 LM -7 3 0 21 LM -8 4 0 21 LM -9 3 0 21 LM -10 4 0 21 LM -11 5 1 21 LM -12 3 0 21 LM -13 5 1 21 LM -14 3 0 21 LM -15 1 0 21 LM -16 2 0 21 LM -17 3 0 21 LM -18 6 1 21 LM -19 2 0 21 LM -20 2 0 21 LM -21 4 0 21 LM -22 3 0 21 LM -23 5 0 21 LM -24 2 0 21 LM -25 4 0 21 LM -26 4 0 21 LM -27 2 0 21 LM -28 6 1 21 LM -29 4 0 21 LM -30 7 1 21 LM -31 9 1 21 LM -32 7 1 21 LM -33 10 1 21 LM -34 7 1 21 LM -35 8 1 21 LM -36 8 1 21 LM -37 11 1 21 LM -38 7 1 21 LM -39 6 1 21 LM -40 6 1 21 LM -41 12 1 21 LM -42 9 1 21 LM -43 5 1 21 LM -44 8 1 21 LM -45 6 1 21 LM -46 7 1 21 LM -47 8 1 21 LM -48 7 1 21 LM -49 8 1 21 LM -50 6 1 21 LM -51 8 1 21 LM -52 7 1 21 LM -53 9 1 21 LM -54 9 1 21 LM -55 8 1 21 LM -56 10 1 21 LM -57 6 1 21 LM -58 10 1 21 LM -59 10 1 21 LM -60 8 1 21 LM -1 3 0 22 LM -2 3 0 22 LM -3 5 1 22 LM -4 4 0 22 LM -5 2 0 22 LM -6 4 0 22 LM -7 3 0 22 LM -8 4 0 22 LM -9 3 0 22 LM -10 4 0 22 LM -11 5 1 22 LM -12 3 0 22 LM -13 5 1 22 LM -14 3 0 22 LM -15 1 0 22 LM -16 2 0 22 LM -17 3 0 22 LM -18 6 1 22 LM -19 2 0 22 LM -20 2 0 22 LM -21 4 0 22 LM -22 3 0 22 LM -23 5 1 22 LM -24 2 0 22 LM -25 4 0 22 LM -26 4 0 22 LM -27 2 0 22 LM -28 6 1 22 LM -29 4 0 22 LM -30 7 1 22 LM -31 9 1 22 LM -32 7 1 22 LM -33 10 1 22 LM -34 7 1 22 LM -35 8 1 22 LM -36 8 1 22 LM -37 11 1 22 LM -38 7 1 22 LM -39 6 1 22 LM -40 6 1 22 LM -41 12 1 22 LM -42 9 1 22 LM -43 5 0 22 LM -44 8 1 22 LM -45 6 1 22 LM -46 7 1 22 LM -47 8 1 22 LM -48 7 1 22 LM -49 8 1 22 LM -50 6 1 22 LM -51 8 1 22 LM -52 7 1 22 LM -53 9 1 22 LM -54 9 1 22 LM -55 8 1 22 LM -56 10 1 22 LM -57 6 1 22 LM -58 10 1 22 LM -59 10 1 22 LM -60 8 1 22 LM -1 3 0 23 LM -2 3 0 23 LM -3 5 1 23 LM -4 4 0 23 LM -5 2 0 23 LM -6 4 0 23 LM -7 3 0 23 LM -8 4 0 23 LM -9 3 0 23 LM -10 4 0 23 LM -11 5 1 23 LM -12 3 0 23 LM -13 5 1 23 LM -14 3 0 23 LM -15 1 0 23 LM -16 2 0 23 LM -17 3 0 23 LM -18 6 1 23 LM -19 2 0 23 LM -20 2 0 23 LM -21 4 0 23 LM -22 3 0 23 LM -23 5 1 23 LM -24 2 0 23 LM -25 4 0 23 LM -26 4 0 23 LM -27 2 0 23 LM -28 6 1 23 LM -29 4 0 23 LM -30 7 1 23 LM -31 9 1 23 LM -32 7 1 23 LM -33 10 1 23 LM -34 7 1 23 LM -35 8 1 23 LM -36 8 1 23 LM -37 11 1 23 LM -38 7 1 23 LM -39 6 1 23 LM -40 6 1 23 LM -41 12 1 23 LM -42 9 1 23 LM -43 5 1 23 LM -44 8 1 23 LM -45 6 1 23 LM -46 7 1 23 LM -47 8 1 23 LM -48 7 1 23 LM -49 8 1 23 LM -50 6 1 23 LM -51 8 1 23 LM -52 7 1 23 LM -53 9 1 23 LM -54 9 1 23 LM -55 8 1 23 LM -56 10 1 23 LM -57 6 1 23 LM -58 10 1 23 LM -59 10 1 23 LM -60 8 1 23 LM -1 3 0 24 LM -2 3 0 24 LM -3 5 1 24 LM -4 4 0 24 LM -5 2 0 24 LM -6 4 0 24 LM -7 3 0 24 LM -8 4 0 24 LM -9 3 0 24 LM -10 4 0 24 LM -11 5 0 24 LM -12 3 0 24 LM -13 5 1 24 LM -14 3 0 24 LM -15 1 0 24 LM -16 2 0 24 LM -17 3 0 24 LM -18 6 1 24 LM -19 2 0 24 LM -20 2 0 24 LM -21 4 0 24 LM -22 3 0 24 LM -23 5 0 24 LM -24 2 0 24 LM -25 4 0 24 LM -26 4 0 24 LM -27 2 0 24 LM -28 6 1 24 LM -29 4 0 24 LM -30 7 1 24 LM -31 9 1 24 LM -32 7 1 24 LM -33 10 1 24 LM -34 7 1 24 LM -35 8 1 24 LM -36 8 1 24 LM -37 11 1 24 LM -38 7 1 24 LM -39 6 1 24 LM -40 6 1 24 LM -41 12 1 24 LM -42 9 1 24 LM -43 5 1 24 LM -44 8 1 24 LM -45 6 1 24 LM -46 7 1 24 LM -47 8 1 24 LM -48 7 1 24 LM -49 8 1 24 LM -50 6 1 24 LM -51 8 1 24 LM -52 7 1 24 LM -53 9 1 24 LM -54 9 1 24 LM -55 8 1 24 LM -56 10 1 24 LM -57 6 1 24 LM -58 10 1 24 LM -59 10 1 24 LM -60 8 1 24 LM -1 3 0 25 LM -2 3 0 25 LM -3 5 1 25 LM -4 4 0 25 LM -5 2 0 25 LM -6 4 0 25 LM -7 3 0 25 LM -8 4 0 25 LM -9 3 0 25 LM -10 4 0 25 LM -11 5 0 25 LM -12 3 0 25 LM -13 5 0 25 LM -14 3 0 25 LM -15 1 0 25 LM -16 2 0 25 LM -17 3 0 25 LM -18 6 1 25 LM -19 2 0 25 LM -20 2 0 25 LM -21 4 0 25 LM -22 3 0 25 LM -23 5 1 25 LM -24 2 0 25 LM -25 4 0 25 LM -26 4 0 25 LM -27 2 0 25 LM -28 6 1 25 LM -29 4 0 25 LM -30 7 1 25 LM -31 9 1 25 LM -32 7 1 25 LM -33 10 1 25 LM -34 7 1 25 LM -35 8 1 25 LM -36 8 1 25 LM -37 11 1 25 LM -38 7 1 25 LM -39 6 1 25 LM -40 6 1 25 LM -41 12 1 25 LM -42 9 1 25 LM -43 5 1 25 LM -44 8 1 25 LM -45 6 1 25 LM -46 7 1 25 LM -47 8 1 25 LM -48 7 1 25 LM -49 8 1 25 LM -50 6 1 25 LM -51 8 1 25 LM -52 7 1 25 LM -53 9 1 25 LM -54 9 1 25 LM -55 8 1 25 LM -56 10 1 25 LM -57 6 1 25 LM -58 10 1 25 LM -59 10 1 25 LM -60 8 1 25 LM -1 3 0 26 LM -2 3 0 26 LM -3 5 0 26 LM -4 4 0 26 LM -5 2 0 26 LM -6 4 0 26 LM -7 3 0 26 LM -8 4 0 26 LM -9 3 0 26 LM -10 4 0 26 LM -11 5 1 26 LM -12 3 0 26 LM -13 5 1 26 LM -14 3 0 26 LM -15 1 0 26 LM -16 2 0 26 LM -17 3 0 26 LM -18 6 1 26 LM -19 2 0 26 LM -20 2 0 26 LM -21 4 0 26 LM -22 3 0 26 LM -23 5 1 26 LM -24 2 0 26 LM -25 4 0 26 LM -26 4 0 26 LM -27 2 0 26 LM -28 6 1 26 LM -29 4 0 26 LM -30 7 1 26 LM -31 9 1 26 LM -32 7 1 26 LM -33 10 1 26 LM -34 7 1 26 LM -35 8 1 26 LM -36 8 1 26 LM -37 11 1 26 LM -38 7 1 26 LM -39 6 1 26 LM -40 6 1 26 LM -41 12 1 26 LM -42 9 1 26 LM -43 5 1 26 LM -44 8 1 26 LM -45 6 1 26 LM -46 7 1 26 LM -47 8 1 26 LM -48 7 1 26 LM -49 8 1 26 LM -50 6 1 26 LM -51 8 1 26 LM -52 7 1 26 LM -53 9 1 26 LM -54 9 1 26 LM -55 8 1 26 LM -56 10 1 26 LM -57 6 1 26 LM -58 10 1 26 LM -59 10 1 26 LM -60 8 1 26 LM -1 3 0 27 LM -2 3 0 27 LM -3 5 1 27 LM -4 4 0 27 LM -5 2 0 27 LM -6 4 0 27 LM -7 3 0 27 LM -8 4 0 27 LM -9 3 0 27 LM -10 4 0 27 LM -11 5 1 27 LM -12 3 0 27 LM -13 5 1 27 LM -14 3 0 27 LM -15 1 0 27 LM -16 2 0 27 LM -17 3 0 27 LM -18 6 1 27 LM -19 2 0 27 LM -20 2 0 27 LM -21 4 0 27 LM -22 3 0 27 LM -23 5 1 27 LM -24 2 0 27 LM -25 4 0 27 LM -26 4 0 27 LM -27 2 0 27 LM -28 6 1 27 LM -29 4 0 27 LM -30 7 1 27 LM -31 9 1 27 LM -32 7 1 27 LM -33 10 1 27 LM -34 7 1 27 LM -35 8 1 27 LM -36 8 1 27 LM -37 11 1 27 LM -38 7 1 27 LM -39 6 1 27 LM -40 6 1 27 LM -41 12 1 27 LM -42 9 1 27 LM -43 5 1 27 LM -44 8 1 27 LM -45 6 1 27 LM -46 7 1 27 LM -47 8 1 27 LM -48 7 1 27 LM -49 8 1 27 LM -50 6 1 27 LM -51 8 1 27 LM -52 7 1 27 LM -53 9 1 27 LM -54 9 1 27 LM -55 8 1 27 LM -56 10 1 27 LM -57 6 1 27 LM -58 10 1 27 LM -59 10 1 27 LM -60 8 1 27 LM -1 3 0 28 LM -2 3 0 28 LM -3 5 0 28 LM -4 4 0 28 LM -5 2 0 28 LM -6 4 0 28 LM -7 3 0 28 LM -8 4 0 28 LM -9 3 0 28 LM -10 4 0 28 LM -11 5 0 28 LM -12 3 0 28 LM -13 5 1 28 LM -14 3 0 28 LM -15 1 0 28 LM -16 2 0 28 LM -17 3 0 28 LM -18 6 1 28 LM -19 2 0 28 LM -20 2 0 28 LM -21 4 0 28 LM -22 3 0 28 LM -23 5 1 28 LM -24 2 0 28 LM -25 4 0 28 LM -26 4 0 28 LM -27 2 0 28 LM -28 6 1 28 LM -29 4 0 28 LM -30 7 1 28 LM -31 9 1 28 LM -32 7 1 28 LM -33 10 1 28 LM -34 7 1 28 LM -35 8 1 28 LM -36 8 1 28 LM -37 11 1 28 LM -38 7 1 28 LM -39 6 1 28 LM -40 6 1 28 LM -41 12 1 28 LM -42 9 1 28 LM -43 5 0 28 LM -44 8 1 28 LM -45 6 1 28 LM -46 7 1 28 LM -47 8 1 28 LM -48 7 1 28 LM -49 8 1 28 LM -50 6 1 28 LM -51 8 1 28 LM -52 7 1 28 LM -53 9 1 28 LM -54 9 1 28 LM -55 8 1 28 LM -56 10 1 28 LM -57 6 1 28 LM -58 10 1 28 LM -59 10 1 28 LM -60 8 1 28 LM -1 3 0 29 LM -2 3 0 29 LM -3 5 1 29 LM -4 4 0 29 LM -5 2 0 29 LM -6 4 0 29 LM -7 3 0 29 LM -8 4 0 29 LM -9 3 0 29 LM -10 4 0 29 LM -11 5 1 29 LM -12 3 0 29 LM -13 5 1 29 LM -14 3 0 29 LM -15 1 0 29 LM -16 2 0 29 LM -17 3 0 29 LM -18 6 1 29 LM -19 2 0 29 LM -20 2 0 29 LM -21 4 0 29 LM -22 3 0 29 LM -23 5 0 29 LM -24 2 0 29 LM -25 4 0 29 LM -26 4 0 29 LM -27 2 0 29 LM -28 6 1 29 LM -29 4 0 29 LM -30 7 1 29 LM -31 9 1 29 LM -32 7 1 29 LM -33 10 1 29 LM -34 7 1 29 LM -35 8 1 29 LM -36 8 1 29 LM -37 11 1 29 LM -38 7 1 29 LM -39 6 1 29 LM -40 6 1 29 LM -41 12 1 29 LM -42 9 1 29 LM -43 5 0 29 LM -44 8 1 29 LM -45 6 1 29 LM -46 7 1 29 LM -47 8 1 29 LM -48 7 1 29 LM -49 8 1 29 LM -50 6 1 29 LM -51 8 1 29 LM -52 7 1 29 LM -53 9 1 29 LM -54 9 1 29 LM -55 8 1 29 LM -56 10 1 29 LM -57 6 1 29 LM -58 10 1 29 LM -59 10 1 29 LM -60 8 1 29 LM -1 3 0 30 LM -2 3 0 30 LM -3 5 1 30 LM -4 4 0 30 LM -5 2 0 30 LM -6 4 0 30 LM -7 3 0 30 LM -8 4 0 30 LM -9 3 0 30 LM -10 4 0 30 LM -11 5 1 30 LM -12 3 0 30 LM -13 5 1 30 LM -14 3 0 30 LM -15 1 0 30 LM -16 2 0 30 LM -17 3 0 30 LM -18 6 1 30 LM -19 2 0 30 LM -20 2 0 30 LM -21 4 0 30 LM -22 3 0 30 LM -23 5 1 30 LM -24 2 0 30 LM -25 4 0 30 LM -26 4 0 30 LM -27 2 0 30 LM -28 6 1 30 LM -29 4 0 30 LM -30 7 1 30 LM -31 9 1 30 LM -32 7 1 30 LM -33 10 1 30 LM -34 7 1 30 LM -35 8 1 30 LM -36 8 1 30 LM -37 11 1 30 LM -38 7 1 30 LM -39 6 0 30 LM -40 6 1 30 LM -41 12 1 30 LM -42 9 1 30 LM -43 5 1 30 LM -44 8 1 30 LM -45 6 1 30 LM -46 7 1 30 LM -47 8 1 30 LM -48 7 1 30 LM -49 8 1 30 LM -50 6 1 30 LM -51 8 1 30 LM -52 7 1 30 LM -53 9 1 30 LM -54 9 1 30 LM -55 8 1 30 LM -56 10 1 30 LM -57 6 1 30 LM -58 10 1 30 LM -59 10 1 30 LM -60 8 1 30 LM diff --git a/R/inst/extdata/wcs_answersheet.txt b/R/inst/extdata/wcs_answersheet.txt deleted file mode 100644 index 207ac3a1..00000000 --- a/R/inst/extdata/wcs_answersheet.txt +++ /dev/null @@ -1,4 +0,0 @@ - 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 -Color 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 2 1 4 1 2 3 4 1 2 3 4 1 4 3 2 4 1 3 1 3 4 2 3 4 1 3 4 1 2 1 2 1 4 3 2 3 4 3 2 4 1 4 3 2 1 3 4 3 2 3 4 2 1 2 4 2 1 3 1 2 4 3 1 2 -Form 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 1 3 1 4 2 3 1 4 3 4 2 1 3 2 1 4 2 4 1 4 2 3 2 1 2 1 3 4 2 3 4 2 4 3 2 1 4 2 4 3 1 2 4 1 2 3 4 1 3 2 1 2 4 1 3 4 3 1 3 4 2 3 1 3 -Number 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 1 4 2 1 4 1 4 3 4 2 1 3 2 1 3 4 2 3 4 1 2 3 2 3 4 2 3 4 2 1 4 1 3 4 1 4 2 3 1 4 1 3 4 2 3 2 1 3 2 4 1 3 2 4 1 3 2 1 3 2 4 3 2 1 diff --git a/R/inst/extdata/wcs_exampleData.txt b/R/inst/extdata/wcs_exampleData.txt deleted file mode 100644 index 80cf86db..00000000 --- a/R/inst/extdata/wcs_exampleData.txt +++ /dev/null @@ -1,1158 +0,0 @@ -choice outcome subjID trial -1 0 1 1 -1 1 1 2 -4 1 1 3 -1 1 1 4 -2 1 1 5 -3 1 1 6 -4 1 1 7 -1 1 1 8 -2 1 1 9 -3 1 1 10 -4 1 1 11 -1 1 1 12 -4 0 1 13 -3 0 1 14 -1 1 1 15 -4 1 1 16 -2 1 1 17 -4 1 1 18 -1 1 1 19 -4 1 1 20 -2 1 1 21 -3 1 1 22 -2 1 1 23 -1 1 1 24 -2 0 1 25 -2 1 1 26 -3 1 1 27 -4 1 1 28 -2 1 1 29 -1 1 1 30 -4 1 1 31 -1 1 1 32 -3 1 1 33 -4 1 1 34 -1 1 1 35 -4 0 1 36 -2 0 1 37 -3 1 1 38 -1 0 1 39 -4 1 1 40 -1 1 1 41 -3 0 1 42 -4 0 1 43 -2 1 1 44 -1 1 1 45 -3 1 1 46 -4 1 1 47 -3 1 1 48 -2 1 1 49 -3 1 1 50 -4 1 1 51 -2 1 1 52 -1 1 1 53 -2 0 1 54 -4 0 1 55 -2 0 1 56 -1 0 1 57 -3 0 1 58 -3 1 1 59 -2 0 1 60 -4 0 1 61 -3 1 1 62 -1 1 1 63 -3 1 1 64 -1 1 1 65 -3 1 1 66 -1 1 1 67 -4 1 1 68 -2 1 1 69 -3 1 1 70 -1 1 1 71 -3 1 1 72 -3 0 1 73 -2 1 1 74 -1 1 1 75 -3 1 1 76 -2 1 1 77 -1 1 1 78 -3 1 1 79 -4 1 1 80 -2 1 1 81 -3 1 1 82 -4 1 1 83 -1 0 2 1 -1 1 2 2 -1 0 2 3 -1 1 2 4 -2 1 2 5 -3 1 2 6 -4 1 2 7 -1 1 2 8 -2 1 2 9 -3 1 2 10 -4 1 2 11 -1 1 2 12 -3 0 2 13 -3 1 2 14 -2 1 2 15 -4 1 2 16 -1 1 2 17 -3 1 2 18 -1 1 2 19 -3 1 2 20 -4 1 2 21 -2 1 2 22 -3 1 2 23 -4 0 2 24 -1 0 2 25 -3 0 2 26 -4 0 2 27 -1 0 2 28 -2 1 2 29 -3 1 2 30 -4 1 2 31 -1 0 2 32 -3 0 2 33 -3 1 2 34 -2 1 2 35 -4 0 2 36 -4 1 2 37 -3 0 2 38 -4 1 2 39 -3 1 2 40 -1 1 2 41 -4 0 2 42 -3 0 2 43 -1 1 2 44 -2 1 2 45 -3 1 2 46 -4 1 2 47 -1 1 2 48 -2 0 2 49 -2 1 2 50 -1 1 2 51 -2 1 2 52 -4 1 2 53 -2 0 2 54 -3 1 2 55 -4 1 2 56 -3 1 2 57 -1 1 2 58 -3 1 2 59 -4 1 2 60 -2 1 2 61 -3 1 2 62 -1 1 2 63 -3 1 2 64 -2 0 2 65 -3 0 2 66 -1 0 2 67 -4 0 2 68 -2 0 2 69 -3 0 2 70 -4 1 2 71 -3 1 2 72 -3 0 2 73 -4 0 2 74 -2 0 2 75 -3 1 2 76 -4 0 2 77 -1 1 2 78 -1 0 2 79 -4 1 2 80 -2 1 2 81 -3 1 2 82 -4 1 2 83 -1 1 2 84 -4 0 2 85 -3 1 2 86 -2 1 2 87 -4 0 2 88 -1 0 2 89 -3 0 2 90 -4 0 2 91 -1 0 2 92 -2 1 2 93 -1 1 2 94 -4 1 2 95 -1 1 2 96 -4 0 2 97 -3 0 2 98 -2 0 2 99 -3 0 2 100 -4 0 2 101 -3 1 2 102 -2 0 2 103 -3 0 2 104 -1 1 2 105 -4 0 2 106 -3 0 2 107 -1 0 2 108 -1 0 2 109 -3 0 2 110 -4 0 2 111 -3 1 2 112 -3 0 2 113 -2 0 2 114 -1 1 2 115 -2 0 2 116 -1 0 2 117 -1 0 2 118 -3 0 2 119 -4 0 2 120 -3 0 2 121 -3 0 2 122 -3 1 2 123 -4 0 2 124 -2 0 2 125 -3 1 2 126 -1 0 2 127 -3 0 2 128 -1 0 3 1 -1 1 3 2 -4 1 3 3 -1 1 3 4 -2 1 3 5 -3 1 3 6 -4 1 3 7 -1 1 3 8 -2 1 3 9 -3 1 3 10 -4 1 3 11 -1 1 3 12 -4 0 3 13 -3 0 3 14 -2 0 3 15 -4 1 3 16 -1 0 3 17 -3 0 3 18 -1 1 3 19 -3 0 3 20 -4 0 3 21 -3 1 3 22 -2 1 3 23 -1 1 3 24 -4 0 3 25 -1 1 3 26 -3 1 3 27 -4 1 3 28 -2 1 3 29 -3 1 3 30 -4 1 3 31 -2 1 3 32 -4 1 3 33 -3 1 3 34 -1 0 3 35 -4 0 3 36 -2 0 3 37 -3 0 3 38 -1 0 3 39 -4 0 3 40 -1 1 3 41 -3 0 3 42 -4 1 3 43 -2 0 3 44 -3 0 3 45 -3 1 3 46 -4 1 3 47 -3 0 3 48 -2 0 3 49 -3 0 3 50 -4 0 3 51 -2 1 3 52 -1 0 3 53 -2 0 3 54 -4 0 3 55 -2 0 3 56 -1 0 3 57 -1 1 3 58 -3 1 3 59 -4 1 3 60 -2 1 3 61 -3 1 3 62 -1 1 3 63 -3 1 3 64 -1 1 3 65 -3 1 3 66 -1 1 3 67 -1 1 3 68 -2 0 3 69 -1 1 3 70 -4 1 3 71 -1 0 3 72 -4 1 3 73 -2 1 3 74 -1 1 3 75 -3 1 3 76 -2 1 3 77 -1 1 3 78 -3 1 3 79 -4 1 3 80 -2 1 3 81 -3 1 3 82 -4 0 3 83 -1 0 3 84 -4 1 3 85 -2 1 3 86 -3 1 3 87 -4 1 3 88 -1 1 3 89 -3 1 3 90 -4 1 3 91 -1 1 3 92 -2 1 3 93 -1 1 3 94 -2 0 3 95 -1 0 3 96 -4 1 3 97 -3 1 3 98 -2 1 3 99 -1 1 3 100 -4 1 3 101 -2 1 3 102 -4 1 3 103 -3 1 3 104 -1 1 3 105 -2 1 3 106 -3 0 3 107 -2 1 3 108 -2 0 3 109 -2 1 3 110 -1 1 3 111 -3 1 3 112 -2 1 3 113 -4 1 3 114 -1 1 3 115 -3 1 3 116 -2 1 3 117 -4 1 3 118 -1 1 3 119 -1 0 4 1 -1 1 4 2 -4 1 4 3 -1 1 4 4 -2 1 4 5 -3 1 4 6 -4 1 4 7 -1 1 4 8 -2 1 4 9 -3 1 4 10 -4 1 4 11 -1 1 4 12 -3 1 4 13 -2 1 4 14 -1 1 4 15 -4 1 4 16 -2 1 4 17 -4 1 4 18 -1 1 4 19 -4 1 4 20 -2 1 4 21 -3 1 4 22 -2 1 4 23 -1 0 4 24 -4 1 4 25 -3 0 4 26 -3 1 4 27 -4 1 4 28 -2 1 4 29 -1 1 4 30 -4 1 4 31 -1 1 4 32 -3 1 4 33 -4 1 4 34 -1 1 4 35 -4 1 4 36 -2 0 4 37 -3 1 4 38 -1 0 4 39 -4 1 4 40 -1 1 4 41 -3 0 4 42 -4 0 4 43 -2 1 4 44 -3 0 4 45 -2 0 4 46 -4 1 4 47 -3 1 4 48 -2 1 4 49 -3 1 4 50 -4 1 4 51 -2 1 4 52 -1 1 4 53 -2 1 4 54 -4 1 4 55 -2 1 4 56 -1 0 4 57 -1 1 4 58 -3 1 4 59 -2 0 4 60 -2 1 4 61 -3 1 4 62 -1 1 4 63 -3 1 4 64 -1 1 4 65 -3 1 4 66 -1 1 4 67 -4 1 4 68 -2 1 4 69 -1 0 4 70 -4 0 4 71 -3 0 4 72 -4 0 4 73 -2 0 4 74 -1 0 4 75 -3 0 4 76 -2 0 4 77 -1 0 4 78 -3 0 4 79 -4 1 4 80 -2 1 4 81 -3 0 4 82 -1 1 4 83 -3 0 4 84 -4 0 4 85 -2 0 4 86 -3 0 4 87 -4 0 4 88 -1 0 4 89 -3 0 4 90 -4 0 4 91 -1 0 4 92 -2 1 4 93 -1 0 4 94 -2 0 4 95 -2 1 4 96 -4 1 4 97 -3 1 4 98 -2 1 4 99 -1 1 4 100 -4 1 4 101 -2 1 4 102 -4 1 4 103 -3 1 4 104 -1 1 4 105 -2 0 4 106 -3 0 4 107 -1 0 4 108 -3 1 4 109 -2 1 4 110 -1 1 4 111 -3 1 4 112 -2 1 4 113 -4 1 4 114 -1 1 4 115 -3 1 4 116 -2 1 4 117 -1 0 4 118 -4 0 4 119 -2 0 4 120 -1 0 4 121 -3 0 4 122 -1 0 4 123 -2 1 4 124 -4 1 4 125 -3 1 4 126 -1 0 4 127 -2 0 4 128 -1 0 5 1 -1 1 5 2 -4 1 5 3 -1 1 5 4 -2 1 5 5 -3 1 5 6 -4 1 5 7 -1 1 5 8 -2 1 5 9 -3 1 5 10 -4 1 5 11 -1 1 5 12 -3 1 5 13 -2 1 5 14 -1 1 5 15 -4 1 5 16 -2 1 5 17 -4 1 5 18 -1 1 5 19 -4 1 5 20 -2 1 5 21 -3 1 5 22 -2 1 5 23 -1 0 5 24 -2 0 5 25 -2 1 5 26 -4 0 5 27 -1 0 5 28 -2 1 5 29 -1 1 5 30 -4 1 5 31 -1 1 5 32 -3 1 5 33 -4 1 5 34 -1 1 5 35 -4 1 5 36 -2 1 5 37 -3 1 5 38 -1 0 5 39 -4 1 5 40 -1 1 5 41 -3 0 5 42 -4 0 5 43 -2 1 5 44 -3 0 5 45 -2 0 5 46 -4 1 5 47 -1 0 5 48 -2 1 5 49 -2 0 5 50 -4 1 5 51 -2 1 5 52 -1 1 5 53 -2 1 5 54 -4 1 5 55 -2 1 5 56 -1 1 5 57 -3 1 5 58 -1 1 5 59 -2 1 5 60 -4 0 5 61 -3 1 5 62 -1 1 5 63 -3 1 5 64 -1 1 5 65 -4 0 5 66 -1 1 5 67 -4 1 5 68 -2 1 5 69 -3 1 5 70 -1 1 5 71 -4 1 5 72 -3 1 5 73 -4 1 5 74 -2 1 5 75 -1 1 5 76 -3 0 5 77 -2 0 5 78 -1 0 5 79 -4 1 5 80 -2 1 5 81 -4 0 5 82 -1 0 5 83 -4 0 5 84 -2 1 5 85 -3 1 5 86 -2 1 5 87 -1 0 5 88 -2 0 5 89 -2 1 5 90 -3 1 5 91 -4 1 5 92 -2 1 5 93 -1 1 5 94 -4 1 5 95 -1 1 5 96 -3 1 5 97 -4 1 5 98 -1 1 5 99 -1 0 6 1 -4 0 6 2 -2 0 6 3 -1 1 6 4 -2 1 6 5 -3 1 6 6 -4 1 6 7 -1 1 6 8 -2 1 6 9 -3 1 6 10 -4 1 6 11 -1 1 6 12 -4 1 6 13 -3 0 6 14 -2 0 6 15 -4 1 6 16 -1 0 6 17 -3 0 6 18 -1 1 6 19 -4 1 6 20 -2 1 6 21 -3 1 6 22 -2 1 6 23 -1 1 6 24 -2 1 6 25 -1 1 6 26 -3 1 6 27 -4 1 6 28 -2 1 6 29 -3 0 6 30 -4 1 6 31 -2 0 6 32 -4 0 6 33 -3 0 6 34 -1 1 6 35 -4 1 6 36 -2 1 6 37 -3 1 6 38 -1 1 6 39 -4 1 6 40 -1 1 6 41 -3 1 6 42 -4 1 6 43 -2 1 6 44 -3 0 6 45 -2 0 6 46 -1 0 6 47 -3 1 6 48 -2 1 6 49 -4 0 6 50 -1 0 6 51 -2 1 6 52 -2 0 6 53 -2 1 6 54 -4 1 6 55 -2 1 6 56 -1 1 6 57 -3 1 6 58 -1 1 6 59 -2 1 6 60 -4 1 6 61 -3 1 6 62 -1 1 6 63 -2 0 6 64 -1 1 6 65 -3 1 6 66 -4 0 6 67 -4 1 6 68 -2 1 6 69 -3 1 6 70 -1 1 6 71 -4 1 6 72 -3 1 6 73 -4 1 6 74 -2 1 6 75 -1 1 6 76 -3 1 6 77 -2 0 6 78 -2 0 6 79 -4 1 6 80 -2 1 6 81 -3 1 6 82 -4 1 6 83 -1 1 6 84 -2 1 6 85 -3 1 6 86 -2 1 6 87 -3 1 6 88 -4 1 6 89 -2 1 7 1 -3 0 7 2 -4 1 7 3 -4 0 7 4 -2 1 7 5 -3 1 7 6 -4 1 7 7 -1 1 7 8 -2 1 7 9 -3 1 7 10 -4 1 7 11 -1 1 7 12 -4 1 7 13 -3 1 7 14 -2 0 7 15 -4 1 7 16 -1 0 7 17 -3 0 7 18 -1 1 7 19 -4 1 7 20 -2 1 7 21 -2 0 7 22 -2 1 7 23 -1 1 7 24 -2 1 7 25 -1 1 7 26 -3 1 7 27 -4 1 7 28 -2 1 7 29 -1 0 7 30 -4 1 7 31 -2 1 7 32 -4 1 7 33 -3 1 7 34 -1 0 7 35 -4 0 7 36 -2 0 7 37 -3 0 7 38 -1 0 7 39 -4 0 7 40 -1 1 7 41 -4 0 7 42 -4 1 7 43 -2 0 7 44 -3 0 7 45 -3 1 7 46 -4 1 7 47 -3 0 7 48 -3 1 7 49 -2 1 7 50 -1 1 7 51 -2 1 7 52 -4 1 7 53 -1 1 7 54 -4 0 7 55 -2 0 7 56 -1 0 7 57 -3 0 7 58 -1 0 7 59 -2 0 7 60 -4 0 7 61 -3 1 7 62 -1 1 7 63 -2 0 7 64 -2 0 7 65 -1 0 7 66 -4 0 7 67 -4 1 7 68 -2 1 7 69 -3 1 7 70 -1 1 7 71 -1 0 7 72 -2 0 7 73 -4 1 7 74 -2 1 7 75 -1 1 7 76 -3 1 7 77 -2 1 7 78 -1 1 7 79 -4 1 7 80 -2 1 7 81 -3 0 7 82 -1 1 7 83 -1 0 7 84 -2 1 7 85 -3 1 7 86 -2 1 7 87 -3 0 7 88 -4 0 7 89 -2 0 7 90 -3 1 7 91 -4 1 7 92 -2 1 7 93 -1 0 7 94 -4 1 7 95 -1 0 7 96 -3 0 7 97 -4 0 7 98 -1 0 7 99 -4 0 7 100 -2 0 7 101 -3 0 7 102 -4 1 7 103 -4 0 7 104 -1 1 7 105 -4 0 7 106 -3 0 7 107 -2 0 7 108 -1 0 7 109 -2 0 7 110 -4 1 7 111 -3 0 7 112 -2 0 7 113 -3 0 7 114 -1 1 7 115 -2 1 7 116 -1 0 7 117 -1 1 7 118 -3 1 7 119 -4 1 7 120 -3 1 7 121 -1 1 7 122 -3 1 7 123 -4 1 7 124 -2 1 7 125 -3 1 7 126 -2 0 7 127 -2 0 7 128 -1 0 8 1 -3 0 8 2 -4 1 8 3 -1 1 8 4 -2 1 8 5 -3 1 8 6 -4 1 8 7 -1 1 8 8 -2 1 8 9 -3 1 8 10 -4 1 8 11 -1 1 8 12 -3 1 8 13 -3 0 8 14 -1 1 8 15 -4 1 8 16 -1 0 8 17 -4 1 8 18 -1 1 8 19 -4 1 8 20 -2 1 8 21 -3 1 8 22 -2 1 8 23 -1 1 8 24 -1 0 8 25 -1 1 8 26 -4 0 8 27 -4 1 8 28 -2 1 8 29 -1 0 8 30 -4 1 8 31 -1 0 8 32 -3 0 8 33 -4 0 8 34 -1 0 8 35 -4 0 8 36 -2 0 8 37 -3 0 8 38 -1 0 8 39 -4 0 8 40 -1 1 8 41 -3 0 8 42 -4 1 8 43 -2 0 8 44 -1 0 8 45 -2 0 8 46 -4 1 8 47 -3 0 8 48 -2 0 8 49 -3 0 8 50 -4 0 8 51 -2 1 8 52 -1 0 8 53 -2 0 8 54 -4 0 8 55 -2 0 8 56 -3 1 8 57 -1 1 8 58 -3 1 8 59 -4 1 8 60 -2 1 8 61 -3 1 8 62 -1 1 8 63 -3 1 8 64 -1 1 8 65 -3 1 8 66 -1 0 8 67 -1 1 8 68 -4 1 8 69 -1 1 8 70 -4 1 8 71 -3 1 8 72 -4 1 8 73 -2 1 8 74 -1 1 8 75 -3 1 8 76 -2 1 8 77 -1 0 8 78 -3 0 8 79 -4 1 8 80 -2 0 8 81 -3 1 8 82 -4 0 8 83 -1 0 8 84 -4 1 8 85 -2 1 8 86 -3 1 8 87 -4 1 8 88 -1 1 8 89 -3 1 8 90 -4 1 8 91 -1 1 8 92 -2 1 8 93 -1 1 8 94 -2 0 8 95 -1 0 8 96 -4 1 8 97 -3 1 8 98 -2 1 8 99 -1 1 8 100 -4 1 8 101 -2 1 8 102 -4 1 8 103 -3 1 8 104 -1 1 8 105 -4 0 8 106 -4 1 8 107 -1 1 8 108 -2 1 8 109 -3 1 8 110 -4 1 8 111 -1 1 8 112 -3 1 8 113 -2 1 8 114 -1 1 8 115 -2 1 8 116 -4 0 8 117 -4 1 8 118 -1 1 8 119 -3 1 8 120 -2 1 8 121 -1 1 8 122 -3 1 8 123 -2 1 8 124 -4 1 8 125 -3 1 8 126 -2 1 8 127 -2 1 9 1 -1 1 9 2 -4 1 9 3 -1 1 9 4 -2 1 9 5 -3 1 9 6 -4 1 9 7 -1 1 9 8 -2 1 9 9 -3 1 9 10 -4 0 9 11 -1 1 9 12 -4 0 9 13 -3 0 9 14 -1 1 9 15 -4 1 9 16 -1 0 9 17 -4 1 9 18 -1 1 9 19 -4 1 9 20 -2 1 9 21 -3 1 9 22 -2 1 9 23 -1 1 9 24 -2 1 9 25 -1 1 9 26 -3 1 9 27 -4 1 9 28 -2 1 9 29 -3 0 9 30 -4 1 9 31 -1 1 9 32 -3 1 9 33 -4 1 9 34 -1 1 9 35 -4 1 9 36 -4 0 9 37 -3 1 9 38 -1 1 9 39 -4 1 9 40 -1 1 9 41 -3 1 9 42 -4 1 9 43 -2 1 9 44 -1 0 9 45 -2 1 9 46 -4 0 9 47 -3 1 9 48 -2 1 9 49 -3 0 9 50 -4 0 9 51 -2 0 9 52 -1 0 9 53 -1 0 9 54 -4 0 9 55 -2 0 9 56 -1 0 9 57 -3 0 9 58 -1 0 9 59 -2 1 9 60 -4 1 9 61 -3 1 9 62 -1 0 9 63 -2 0 9 64 -2 0 9 65 -1 0 9 66 -4 0 9 67 -4 0 9 68 -4 1 9 69 -1 1 9 70 -1 0 9 71 -4 0 9 72 -3 0 9 73 -4 0 9 74 -2 0 9 75 -1 0 9 76 -3 0 9 77 -2 0 9 78 -1 0 9 79 -4 1 9 80 -2 1 9 81 -4 0 9 82 -1 0 9 83 -4 0 9 84 -2 1 9 85 -3 1 9 86 -2 1 9 87 -1 0 9 88 -4 1 9 89 -2 1 9 90 -3 1 9 91 -4 1 9 92 -2 1 9 93 -1 1 9 94 -4 1 9 95 -1 1 9 96 -3 1 9 97 -4 1 9 98 -1 0 9 99 -4 0 9 100 -2 0 9 101 -3 1 9 102 -4 0 9 103 -4 1 9 104 -1 1 9 105 -4 1 9 106 -3 1 9 107 -2 1 9 108 -1 1 9 109 -3 1 9 110 -4 1 9 111 -3 1 9 112 -2 1 9 113 -3 0 9 114 -1 1 9 115 -3 0 9 116 -2 0 9 117 -1 1 9 118 -4 0 9 119 -4 1 9 120 -3 1 9 121 -1 1 9 122 -3 1 9 123 -4 1 9 124 -2 1 9 125 -3 1 9 126 -1 1 9 127 -1 0 9 128 -1 0 10 1 -1 1 10 2 -2 0 10 3 -1 1 10 4 -4 0 10 5 -3 1 10 6 -4 1 10 7 -1 1 10 8 -2 1 10 9 -3 1 10 10 -2 0 10 11 -1 1 10 12 -4 1 10 13 -3 1 10 14 -2 1 10 15 -4 1 10 16 -1 1 10 17 -3 1 10 18 -1 1 10 19 -4 0 10 20 -2 0 10 21 -2 1 10 22 -2 0 10 23 -1 0 10 24 -4 0 10 25 -1 0 10 26 -3 0 10 27 -4 0 10 28 -2 1 10 29 -3 0 10 30 -4 0 10 31 -2 0 10 32 -4 1 10 33 -3 1 10 34 -2 1 10 35 -1 0 10 36 -4 1 10 37 -2 0 10 38 -4 0 10 39 -3 0 10 40 -1 1 10 41 -2 0 10 42 -3 1 10 43 -1 0 10 44 -2 0 10 45 -3 1 10 46 -1 0 10 47 -3 1 10 48 -2 1 10 49 -2 0 10 50 -4 1 10 51 -3 0 10 52 -2 0 10 53 -1 0 10 54 -1 0 10 55 -4 0 10 56 -2 0 10 57 -1 0 10 58 -3 0 10 59 -2 1 10 60 -4 1 10 61 -3 1 10 62 -2 0 10 63 -1 0 10 64 -1 0 10 65 -4 0 10 66 -2 0 10 67 -1 1 10 68 -4 0 10 69 -3 1 10 70 -4 1 10 71 -1 1 10 72 -2 1 10 73 -3 1 10 74 -4 1 10 75 -1 1 10 76 -4 1 10 77 -3 1 10 78 -2 1 10 79 -4 1 10 80 -1 0 10 81 -3 0 10 82 -1 1 10 83 -3 0 10 84 -4 0 10 85 -3 1 10 86 -3 0 10 87 -4 0 10 88 -4 0 10 89 -3 0 10 90 -4 0 10 91 -1 0 10 92 -2 1 10 93 -1 0 10 94 -4 1 10 95 -1 0 10 96 -4 1 10 97 -3 1 10 98 -2 1 10 99 -3 0 10 100 -4 1 10 101 -3 0 10 102 -2 0 10 103 -4 0 10 104 -1 1 10 105 -3 0 10 106 -4 1 10 107 -2 0 10 108 -2 1 10 109 -3 1 10 110 -4 1 10 111 -1 1 10 112 -3 1 10 113 -2 1 10 114 -1 1 10 115 -2 1 10 116 -4 1 10 117 -1 1 10 118 -3 0 10 119 -4 0 10 120 -3 0 10 121 -1 1 10 122 -3 1 10 123 -4 0 10 124 -4 1 10 125 -3 1 10 126 -2 1 10 127 -2 0 10 128 diff --git a/R/inst/stan_files b/R/inst/stan_files new file mode 120000 index 00000000..1183b17a --- /dev/null +++ b/R/inst/stan_files @@ -0,0 +1 @@ +../../commons/stan_files \ No newline at end of file diff --git a/R/inst/stan_files/bandit2arm_delta.stan b/R/inst/stan_files/bandit2arm_delta.stan deleted file mode 100644 index 3c44ddde..00000000 --- a/R/inst/stan_files/bandit2arm_delta.stan +++ /dev/null @@ -1,109 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; // no lower and upper bounds -} -transformed data { - vector[2] initV; // initial values for EV - initV = rep_vector(0.0, 2); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; // learning rate - vector[N] tau_pr; // inverse temperature -} -transformed parameters { - // subject-level parameters - vector[N] A; - vector[N] tau; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 5; - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1); - tau_pr ~ normal(0, 1); - - // subject loop and trial loop - for (i in 1:N) { - vector[2] ev; // expected value - real PE; // prediction error - - ev = initV; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - choice[i, t] ~ categorical_logit(tau[i] * ev); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - - // value updating (learning) - ev[choice[i, t]] += A[i] * PE; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_tau; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_tau = Phi_approx(mu_pr[2]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - vector[2] ev; // expected value - real PE; // prediction error - - // Initialize values - ev = initV; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute log likelihood of current trial - log_lik[i] += categorical_logit_lpmf(choice[i, t] | tau[i] * ev); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(tau[i] * ev)); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - - // value updating (learning) - ev[choice[i, t]] += A[i] * PE; - } - } - } -} - diff --git a/R/inst/stan_files/bandit4arm2_kalman_filter.stan b/R/inst/stan_files/bandit4arm2_kalman_filter.stan deleted file mode 100644 index 15d36c63..00000000 --- a/R/inst/stan_files/bandit4arm2_kalman_filter.stan +++ /dev/null @@ -1,163 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N,T]; - real outcome[N,T]; -} - -transformed data { - real sigmaO; // sigma_O = 4 - sigmaO = 4; -} - -parameters { - // group-level parameters - vector[6] mu_pr; - vector[6] sigma; - - // subject-level raw parameters, follows norm(0,1), for later Matt Trick - vector[N] lambda_pr; // decay factor - vector[N] theta_pr; // decay center - vector[N] beta_pr; // inverse softmax temperature - vector[N] mu0_pr; // anticipated initial mean of all 4 options - vector[N] sigma0_pr; // anticipated initial sd^2 (uncertainty factor) of all 4 options - vector[N] sigmaD_pr; // sd^2 of diffusion noise -} - -transformed parameters { - // subject-level parameters - vector[N] lambda; - vector[N] theta; - vector[N] beta; - vector[N] mu0; - vector[N] sigma0; - vector[N] sigmaD; - - // Matt Trick - for (i in 1:N) { - lambda[i] = Phi_approx( mu_pr[1] + sigma[1] * lambda_pr[i] ); - theta[i] = Phi_approx( mu_pr[2] + sigma[2] * theta_pr[i] ) * 100; - beta[i] = Phi_approx( mu_pr[3] + sigma[3] * beta_pr[i] ); - mu0[i] = Phi_approx( mu_pr[4] + sigma[4] * mu0_pr[i] ) * 100; - sigma0[i] = Phi_approx( mu_pr[5] + sigma[5] * sigma0_pr[i] ) * 15; - sigmaD[i] = Phi_approx( mu_pr[6] + sigma[6] * sigmaD_pr[i] ) * 15; - } -} - -model { - // prior: hyperparameters - mu_pr ~ normal(0,1); - sigma ~ cauchy(0,5); - - // prior: individual parameters - lambda_pr ~ normal(0,1);; - theta_pr ~ normal(0,1);; - beta_pr ~ normal(0,1);; - mu0_pr ~ normal(0,1);; - sigma0_pr ~ normal(0,1);; - sigmaD_pr ~ normal(0,1);; - - // subject loop and trial loop - for (i in 1:N) { - vector[4] mu_ev; // estimated mean for each option - vector[4] sd_ev_sq; // estimated sd^2 for each option - real pe; // prediction error - real k; // learning rate - - mu_ev = rep_vector(mu0[i] ,4); - sd_ev_sq = rep_vector(sigma0[i]^2, 4); - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - choice[i,t] ~ categorical_logit( beta[i] * mu_ev ); - - // learning rate - k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2 ); - - // prediction error - pe = outcome[i,t] - mu_ev[choice[i,t]]; - - // value updating (learning) - mu_ev[choice[i,t]] += k * pe; - sd_ev_sq[choice[i,t]] *= (1-k); - - // diffusion process - { - mu_ev *= lambda[i]; - mu_ev += (1 - lambda[i]) * theta[i]; - } - { - sd_ev_sq *= lambda[i]^2; - sd_ev_sq += sigmaD[i]^2; - } - } - } -} - -generated quantities { - real mu_lambda; - real mu_theta; - real mu_beta; - real mu_mu0; - real mu_sigma0; - real mu_sigmaD; - real log_lik[N]; - real y_pred[N,T]; - - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_lambda = Phi_approx(mu_pr[1]); - mu_theta = Phi_approx(mu_pr[2]) * 100; - mu_beta = Phi_approx(mu_pr[3]); - mu_mu0 = Phi_approx(mu_pr[4]) * 100; - mu_sigma0 = Phi_approx(mu_pr[5]) * 15; - mu_sigmaD = Phi_approx(mu_pr[6]) * 15; - - { // local block - for (i in 1:N) { - vector[4] mu_ev; // estimated mean for each option - vector[4] sd_ev_sq; // estimated sd^2 for each option - real pe; // prediction error - real k; // learning rate - - log_lik[i] = 0; - mu_ev = rep_vector(mu0[i] ,4); - sd_ev_sq = rep_vector(sigma0[i]^2, 4); - - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - log_lik[i] += categorical_logit_lpmf( choice[i,t] | beta[i] * mu_ev ); - y_pred[i, t] = categorical_rng(softmax(beta[i] * mu_ev)); - - // learning rate - k = sd_ev_sq[choice[i,t]] / ( sd_ev_sq[choice[i,t]] + sigmaO^2); - - // prediction error - pe = outcome[i,t] - mu_ev[choice[i,t]]; - - // value updating (learning) - mu_ev[choice[i,t]] += k * pe; - sd_ev_sq[choice[i,t]] *= (1-k); - - // diffusion process - { - mu_ev *= lambda[i]; - mu_ev += (1 - lambda[i]) * theta[i]; - } - { - sd_ev_sq *= lambda[i]^2; - sd_ev_sq += sigmaD[i]^2; - } - } - } - } // local block END -} - diff --git a/R/inst/stan_files/bandit4arm_2par_lapse.stan b/R/inst/stan_files/bandit4arm_2par_lapse.stan deleted file mode 100644 index b95da5ce..00000000 --- a/R/inst/stan_files/bandit4arm_2par_lapse.stan +++ /dev/null @@ -1,173 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) -// w/o reward sensitivity and punishment sensitivity -// in sum, there are three parameters - Arew, Apun, xi -// Aylward et al., 2018, PsyArXiv -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] xi_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] xi; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - xi[i] = Phi_approx(mu_pr[3] + sigma[3] * xi_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = rew[i, t] - Qr[choice[i, t]]; - PEp = los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_xi; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_xi = Phi_approx(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = rew[i, t] - Qr[choice[i, t]]; - PEp = los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/R/inst/stan_files/bandit4arm_4par.stan b/R/inst/stan_files/bandit4arm_4par.stan deleted file mode 100644 index 18d6acf9..00000000 --- a/R/inst/stan_files/bandit4arm_4par.stan +++ /dev/null @@ -1,176 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] R_pr; - vector[N] P_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] R; - vector[N] P; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(Qsum); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_R; - real mu_P; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_R = Phi_approx(mu_pr[3]) * 30; - mu_P = Phi_approx(mu_pr[4]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_logit_lpmf(choice[i, t] | Qsum); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum)); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/R/inst/stan_files/bandit4arm_lapse.stan b/R/inst/stan_files/bandit4arm_lapse.stan deleted file mode 100644 index 161ce311..00000000 --- a/R/inst/stan_files/bandit4arm_lapse.stan +++ /dev/null @@ -1,182 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate) -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[5] mu_pr; - vector[5] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] R_pr; - vector[N] P_pr; - vector[N] xi_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] R; - vector[N] P; - vector[N] xi; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; - xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_R; - real mu_P; - real mu_xi; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_R = Phi_approx(mu_pr[3]) * 30; - mu_P = Phi_approx(mu_pr[4]) * 30; - mu_xi = Phi_approx(mu_pr[5]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += Arew[i] * PEr_fic; - Qp += Apun[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/R/inst/stan_files/bandit4arm_lapse_decay.stan b/R/inst/stan_files/bandit4arm_lapse_decay.stan deleted file mode 100644 index b089ee21..00000000 --- a/R/inst/stan_files/bandit4arm_lapse_decay.stan +++ /dev/null @@ -1,201 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Added decay rate (Niv et al., 2015, J. Neuro) -// Aylward et al., 2018, PsyArXiv -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[6] mu_pr; - vector[6] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] R_pr; - vector[N] P_pr; - vector[N] xi_pr; - vector[N] d_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] R; - vector[N] P; - vector[N] xi; - vector[N] d; - - for (i in 1:N) { - Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); - Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); - R[i] = Phi_approx(mu_pr[3] + sigma[3] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[4] + sigma[4] * P_pr[i]) * 30; - xi[i] = Phi_approx(mu_pr[5] + sigma[5] * xi_pr[i]); - d[i] = Phi_approx(mu_pr[6] + sigma[6] * d_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - d_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - vector[4] tmp; // temporary vector for Qr and Qp - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - //PEr_fic = -Qr; - //PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ decay rate - //Qr += Arew[i] * PEr_fic; - //Qp += Apun[i] * PEp_fic; - tmp = (1-d[i]) * Qr; - Qr = tmp; - tmp = (1-d[i]) * Qp; - Qp = tmp; - - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_R; - real mu_P; - real mu_xi; - real mu_d; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_R = Phi_approx(mu_pr[3]) * 30; - mu_P = Phi_approx(mu_pr[4]) * 30; - mu_xi = Phi_approx(mu_pr[5]); - mu_d = Phi_approx(mu_pr[6]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - //vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - //vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - vector[4] tmp; // temporary vector for Qr and Qp - - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - //PEr_fic = -Qr; - //PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ decay rate - //Qr += Arew[i] * PEr_fic; - //Qp += Apun[i] * PEp_fic; - tmp = (1-d[i]) * Qr; - Qr = tmp; - tmp = (1-d[i]) * Qp; - Qp = tmp; - - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + Arew[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + Apun[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} diff --git a/R/inst/stan_files/bandit4arm_singleA_lapse.stan b/R/inst/stan_files/bandit4arm_singleA_lapse.stan deleted file mode 100644 index b383f389..00000000 --- a/R/inst/stan_files/bandit4arm_singleA_lapse.stan +++ /dev/null @@ -1,177 +0,0 @@ -#include /pre/license.stan - -// Seymour et al 2012 J neuro model, w/o C (chioce perseveration) but with xi (lapse rate). Single learning rate both for R and P. -// Aylward et al., 2018, PsyArXiv -data { - int N; - int T; - int Tsubj[N]; - real rew[N, T]; - real los[N, T]; - int choice[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] R_pr; - vector[N] P_pr; - vector[N] xi_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] R; - vector[N] P; - vector[N] xi; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - R[i] = Phi_approx(mu_pr[2] + sigma[2] * R_pr[i]) * 30; - P[i] = Phi_approx(mu_pr[3] + sigma[3] * P_pr[i]) * 30; - xi[i] = Phi_approx(mu_pr[4] + sigma[4] * xi_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1.0); - R_pr ~ normal(0, 1.0); - P_pr ~ normal(0, 1.0); - xi_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice + xi (noise) - choice[i, t] ~ categorical(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += A[i] * PEr_fic; - Qp += A[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_R; - real mu_P; - real mu_xi; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_R = Phi_approx(mu_pr[2]) * 30; - mu_P = Phi_approx(mu_pr[3]) * 30; - mu_xi = Phi_approx(mu_pr[4]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] Qr; - vector[4] Qp; - vector[4] PEr_fic; // prediction error - for reward fictive updating (for unchosen options) - vector[4] PEp_fic; // prediction error - for punishment fictive updating (for unchosen options) - vector[4] Qsum; // Qsum = Qrew + Qpun + perseverance - - real Qr_chosen; - real Qp_chosen; - real PEr; // prediction error - for reward of the chosen option - real PEp; // prediction error - for punishment of the chosen option - - // Initialize values - Qr = initV; - Qp = initV; - Qsum = initV; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // compute log likelihood of current trial - log_lik[i] += categorical_lpmf(choice[i, t] | softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(Qsum) * (1-xi[i]) + xi[i]/4); - - // Prediction error signals - PEr = R[i] * rew[i, t] - Qr[choice[i, t]]; - PEp = P[i] * los[i, t] - Qp[choice[i, t]]; - PEr_fic = -Qr; - PEp_fic = -Qp; - - // store chosen deck Q values (rew and pun) - Qr_chosen = Qr[choice[i, t]]; - Qp_chosen = Qp[choice[i, t]]; - - // First, update Qr & Qp for all decks w/ fictive updating - Qr += A[i] * PEr_fic; - Qp += A[i] * PEp_fic; - // Replace Q values of chosen deck with correct values using stored values - Qr[choice[i, t]] = Qr_chosen + A[i] * PEr; - Qp[choice[i, t]] = Qp_chosen + A[i] * PEp; - - // Q(sum) - Qsum = Qr + Qp; - } - } - } -} - diff --git a/R/inst/stan_files/bart_par4.stan b/R/inst/stan_files/bart_par4.stan deleted file mode 100644 index 2049a200..00000000 --- a/R/inst/stan_files/bart_par4.stan +++ /dev/null @@ -1,131 +0,0 @@ -#include /pre/license.stan - -data { - int N; // Number of subjects - int T; // Maximum number of trials - int Tsubj[N]; // Number of trials for each subject - int P; // Number of max pump + 1 ** CAUTION ** - int pumps[N, T]; // Number of pump - int explosion[N, T]; // Whether the balloon exploded (0 or 1) -} - -transformed data{ - // Whether a subject pump the button or not (0 or 1) - int d[N, T, P]; - - for (j in 1:N) { - for (k in 1:Tsubj[j]) { - for (l in 1:P) { - if (l <= pumps[j, k]) - d[j, k, l] = 1; - else - d[j, k, l] = 0; - } - } - } -} - -parameters { - // Group-level parameters - vector[4] mu_pr; - vector[4] sigma; - - // Normally distributed error for Matt trick - vector[N] phi_pr; - vector[N] eta_pr; - vector[N] gam_pr; - vector[N] tau_pr; -} - -transformed parameters { - // Subject-level parameters with Matt trick - vector[N] phi; - vector[N] eta; - vector[N] gam; - vector[N] tau; - - phi = Phi_approx(mu_pr[1] + sigma[1] * phi_pr); - eta = exp(mu_pr[2] + sigma[2] * eta_pr); - gam = exp(mu_pr[3] + sigma[3] * gam_pr); - tau = exp(mu_pr[4] + sigma[4] * tau_pr); -} - -model { - // Prior - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - phi_pr ~ normal(0, 1); - eta_pr ~ normal(0, 1); - gam_pr ~ normal(0, 1); - tau_pr ~ normal(0, 1); - - // Likelihood - for (j in 1:N) { - // Initialize n_succ and n_pump for a subject - int n_succ = 0; // Number of successful pumps - int n_pump = 0; // Number of total pumps - - for (k in 1:Tsubj[j]) { - real p_burst; // Belief on a balloon to be burst - real omega; // Optimal number of pumps - - p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); - omega = -gam[j] / log1m(p_burst); - - // Calculate likelihood with bernoulli distribution - for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) - d[j, k, l] ~ bernoulli_logit(tau[j] * (omega - l)); - - // Update n_succ and n_pump after each trial ends - n_succ += pumps[j, k] - explosion[j, k]; - n_pump += pumps[j, k]; - } - } -} - -generated quantities { - // Actual group-level mean - real mu_phi = Phi_approx(mu_pr[1]); - real mu_eta = exp(mu_pr[2]); - real mu_gam = exp(mu_pr[3]); - real mu_tau = exp(mu_pr[4]); - - // Log-likelihood for model fit - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T, P]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (j in 1:N) - for (k in 1:T) - for(l in 1:P) - y_pred[j, k, l] = -1; - - { // Local section to save time and space - for (j in 1:N) { - int n_succ = 0; - int n_pump = 0; - - log_lik[j] = 0; - - for (k in 1:Tsubj[j]) { - real p_burst; // Belief on a balloon to be burst - real omega; // Optimal number of pumps - - p_burst = 1 - ((phi[j] + eta[j] * n_succ) / (1 + eta[j] * n_pump)); - omega = -gam[j] / log1m(p_burst); - - for (l in 1:(pumps[j, k] + 1 - explosion[j, k])) { - log_lik[j] += bernoulli_logit_lpmf(d[j, k, l] | tau[j] * (omega - l)); - y_pred[j, k, l] = bernoulli_logit_rng(tau[j] * (omega - l)); - } - - n_succ += pumps[j, k] - explosion[j, k]; - n_pump += pumps[j, k]; - } - } - } -} - diff --git a/R/inst/stan_files/choiceRT_ddm.stan b/R/inst/stan_files/choiceRT_ddm.stan deleted file mode 100644 index 58baaec6..00000000 --- a/R/inst/stan_files/choiceRT_ddm.stan +++ /dev/null @@ -1,98 +0,0 @@ -#include /pre/license.stan - -// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists -data { - int N; // Number of subjects - int Nu_max; // Max (across subjects) number of upper boundary responses - int Nl_max; // Max (across subjects) number of lower boundary responses - int Nu[N]; // Number of upper boundary responses for each subj - int Nl[N]; // Number of lower boundary responses for each subj - real RTu[N, Nu_max]; // upper boundary response times - real RTl[N, Nl_max]; // lower boundary response times - real minRT[N]; // minimum RT for each subject of the observed data - real RTbound; // lower bound or RT across all subjects (e.g., 0.1 second) -} - -parameters { - // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R - // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ - // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 - // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 - // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta - // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) - ///* upper boundary of tau must be smaller than minimum RT - //to avoid zero likelihood for fast responses. - //tau can for physiological reasone not be faster than 0.1 s.*/ - - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; - vector[N] beta_pr; - vector[N] delta_pr; - vector[N] tau_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] alpha; // boundary separation - vector[N] beta; // initial bias - vector[N] delta; // drift rate - vector[N] tau; // nondecision time - - for (i in 1:N) { - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]); - tau[i] = Phi_approx(mu_pr[4] + sigma[4] * tau_pr[i]) * (minRT[i] - RTbound) + RTbound; - } - alpha = exp(mu_pr[1] + sigma[1] * alpha_pr); - delta = exp(mu_pr[3] + sigma[3] * delta_pr); -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Individual parameters for non-centered parameterization - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - delta_pr ~ normal(0, 1); - tau_pr ~ normal(0, 1); - - // Begin subject loop - for (i in 1:N) { - // Response time distributed along wiener first passage time distribution - RTu[i, :Nu[i]] ~ wiener(alpha[i], tau[i], beta[i], delta[i]); - RTl[i, :Nl[i]] ~ wiener(alpha[i], tau[i], 1-beta[i], -delta[i]); - - } // end of subject loop -} - -generated quantities { - // For group level parameters - real mu_alpha; // boundary separation - real mu_beta; // initial bias - real mu_delta; // drift rate - real mu_tau; // nondecision time - - // For log likelihood calculation - real log_lik[N]; - - // Assign group level parameter values - mu_alpha = exp(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]); - mu_delta = exp(mu_pr[3]); - mu_tau = Phi_approx(mu_pr[4]) * (mean(minRT)-RTbound) + RTbound; - - { // local section, this saves time and space - // Begin subject loop - for (i in 1:N) { - log_lik[i] = wiener_lpdf(RTu[i, :Nu[i]] | alpha[i], tau[i], beta[i], delta[i]); - log_lik[i] += wiener_lpdf(RTl[i, :Nl[i]] | alpha[i], tau[i], 1-beta[i], -delta[i]); - } - } -} - diff --git a/R/inst/stan_files/choiceRT_ddm_single.stan b/R/inst/stan_files/choiceRT_ddm_single.stan deleted file mode 100644 index 6bacd18a..00000000 --- a/R/inst/stan_files/choiceRT_ddm_single.stan +++ /dev/null @@ -1,58 +0,0 @@ -#include /pre/license.stan - -// based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potentially others @ Stan mailing lists -data { - int Nu; // of upper boundary responses - int Nl; // of lower boundary responses - real RTu[Nu]; // upper boundary response times - real RTl[Nl]; // lower boundary response times - real minRT; // minimum RT of the observed data - real RTbound; // lower bound or RT (e.g., 0.1 second) -} - -parameters { - // parameters of the DDM (parameter names in Ratcliffs DDM), from https://github.com/gbiele/stan_wiener_test/blob/master/stan_wiener_test.R - // also see: https://groups.google.com/forum///!searchin/stan-users/wiener%7Csort:relevance/stan-users/-6wJfA-t2cQ/Q8HS-DXgBgAJ - // alpha (a): Boundary separation or Speed-accuracy trade-off (high alpha means high accuracy). alpha > 0 - // beta (b): Initial bias Bias for either response (beta > 0.5 means bias towards "upper" response 'A'). 0 < beta < 1 - // delta (v): Drift rate Quality of the stimulus (delta close to 0 means ambiguous stimulus or weak ability). 0 < delta - // tau (ter): Nondecision time + Motor response time + encoding time (high means slow encoding, execution). 0 < ter (in seconds) - ///* upper boundary of tau must be smaller than minimum RT - //to avoid zero likelihood for fast responses. - //tau can for physiological reasone not be faster than 0.1 s.*/ - - real alpha; // boundary separation - real beta; // initial bias - real delta; // drift rate - real tau; // nondecision time -} - -model { - alpha ~ uniform(0, 5); - beta ~ uniform(0, 1); - delta ~ normal(0, 2); - tau ~ uniform(RTbound, minRT); - - RTu ~ wiener(alpha, tau, beta, delta); - RTl ~ wiener(alpha, tau, 1-beta, -delta); -} - -generated quantities { - - // For log likelihood calculation - real log_lik; - - // For posterior predictive check (Not implementeed yet) - // vector[Nu] y_pred_upper; - // vector[Nl] y_pred_lower; - - { // local section, this saves time and space - log_lik = wiener_lpdf(RTu | alpha, tau, beta, delta); - log_lik += wiener_lpdf(RTl | alpha, tau, 1-beta, -delta); - - // generate posterior predictions (Not implemented yet) - // y_pred_upper = wiener_rng(alpha, tau, beta, delta); - // y_pred_lower = wiener_rng(alpha, tau, 1-beta, -delta); - } -} - diff --git a/R/inst/stan_files/choiceRT_lba.stan b/R/inst/stan_files/choiceRT_lba.stan deleted file mode 100644 index 222e5a27..00000000 --- a/R/inst/stan_files/choiceRT_lba.stan +++ /dev/null @@ -1,278 +0,0 @@ -#include /pre/license.stan - -// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). -// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. -functions { - real lba_pdf(real t, real b, real A, real v_pdf, real s) { - //PDF of the LBA model - real b_A_tv_ts; - real b_tv_ts; - real term_1b; - real term_2b; - real term_3b; - real term_4b; - real pdf; - - b_A_tv_ts = (b - A - t * v_pdf)/(t * s); - b_tv_ts = (b - t * v_pdf)/(t * s); - - term_1b = v_pdf * Phi(b_A_tv_ts); - term_2b = s * exp(normal_lpdf(fabs(b_A_tv_ts) | 0, 1)); - term_3b = v_pdf * Phi(b_tv_ts); - term_4b = s * exp(normal_lpdf(fabs(b_tv_ts) | 0, 1)); - - pdf = (1/A) * (-term_1b + term_2b + term_3b - term_4b); - - return pdf; - } - - real lba_cdf(real t, real b, real A, real v_cdf, real s) { - //CDF of the LBA model - real b_A_tv; - real b_tv; - real ts; - real term_1a; - real term_2a; - real term_3a; - real term_4a; - real cdf; - - b_A_tv = b - A - t * v_cdf; - b_tv = b - t * v_cdf; - ts = t * s; - - term_1a = b_A_tv/A * Phi(b_A_tv/ts); - term_2a = b_tv/A * Phi(b_tv/ts); - term_3a = ts/A * exp(normal_lpdf(fabs(b_A_tv/ts) | 0, 1)); - term_4a = ts/A * exp(normal_lpdf(fabs(b_tv/ts) | 0, 1)); - - cdf = 1 + term_1a - term_2a + term_3a - term_4a; - - return cdf; - } - - real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { - - real t; - real b; - real cdf; - real pdf; - vector[cols(RT)] prob; - real out; - real prob_neg; - - b = A + d; - for (i in 1:cols(RT)) { - t = RT[1, i] - tau; - if (t > 0) { - cdf = 1; - for (j in 1:num_elements(v)) { - if (RT[2, i] == j) { - pdf = lba_pdf(t, b, A, v[j], s); - } else { - cdf *= lba_cdf(t, b, A, v[j], s); - } - } - prob_neg = 1; - for (j in 1:num_elements(v)) { - prob_neg *= Phi(-v[j]/s); - } - prob[i] = pdf * (1-cdf); - prob[i] /= (1-prob_neg); - if (prob[i] < 1e-10) { - prob[i] = 1e-10; - } - - } else { - prob[i] = 1e-10; - } - } - out = sum(log(prob)); - return out; - } - - vector lba_rng(real d, real A, vector v, real s, real tau) { - - int get_pos_drift; - int no_pos_drift; - int get_first_pos; - vector[num_elements(v)] drift; - int max_iter; - int iter; - real start[num_elements(v)]; - real ttf[num_elements(v)]; - int resp[num_elements(v)]; - real rt; - vector[2] pred; - real b; - - //try to get a positive drift rate - get_pos_drift = 1; - no_pos_drift = 0; - max_iter = 1000; - iter = 0; - while(get_pos_drift) { - for (j in 1:num_elements(v)) { - drift[j] = normal_rng(v[j], s); - if (drift[j] > 0) { - get_pos_drift = 0; - } - } - iter += 1; - if (iter > max_iter) { - get_pos_drift = 0; - no_pos_drift = 1; - } - } - //if both drift rates are <= 0 - //return an infinite response time - if (no_pos_drift) { - pred[1] = -1; - pred[2] = -1; - } else { - b = A + d; - for (i in 1:num_elements(v)) { - //start time of each accumulator - start[i] = uniform_rng(0, A); - //finish times - ttf[i] = (b-start[i])/drift[i]; - } - //rt is the fastest accumulator finish time - //if one is negative get the positive drift - resp = sort_indices_asc(ttf); - { - real temp_ttf[num_elements(v)]; - temp_ttf = sort_asc(ttf); - ttf = temp_ttf; - } - get_first_pos = 1; - iter = 1; - while(get_first_pos) { - if (ttf[iter] > 0) { - pred[1] = ttf[iter]; - pred[2] = resp[iter]; - get_first_pos = 0; - } - iter += 1; - } - } - return pred; - } -} -data { - int N; - int Max_tr; - int N_choices; - int N_cond; - int N_tr_cond[N, N_cond]; - matrix[2, Max_tr] RT[N, N_cond]; - -} - -parameters { - // Hyperparameter means - real mu_d; - real mu_A; - real mu_tau; - vector[N_choices] mu_v[N_cond]; - - // Hyperparameter sigmas - real sigma_d; - real sigma_A; - real sigma_tau; - vector[N_choices] sigma_v[N_cond]; - - // Individual parameters - real d[N]; - real A[N]; - real tau[N]; - vector[N_choices] v[N, N_cond]; -} -transformed parameters { - // s is set to 1 to make model identifiable - real s; - s = 1; -} -model { - // Hyperparameter means - mu_d ~ normal(.5, 1)T[0,]; - mu_A ~ normal(.5, 1)T[0,]; - mu_tau ~ normal(.5, .5)T[0,]; - - // Hyperparameter sigmas - sigma_d ~ gamma(1, 1); - sigma_A ~ gamma(1, 1); - sigma_tau ~ gamma(1, 1); - - // Hyperparameter means and sigmas for multiple drift rates - for (j in 1:N_cond) { - for (n in 1:N_choices) { - mu_v[j, n] ~ normal(2, 1)T[0,]; - sigma_v[j, n] ~ gamma(1, 1); - } - } - - for (i in 1:N) { - // Declare variables - int n_trials; - - // Individual parameters - d[i] ~ normal(mu_d, sigma_d)T[0,]; - A[i] ~ normal(mu_A, sigma_A)T[0,]; - tau[i] ~ normal(mu_tau, sigma_tau)T[0,]; - - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = N_tr_cond[i, j]; - - for (n in 1:N_choices) { - // Drift rate is normally distributed - v[i, j, n] ~ normal(mu_v[j, n], sigma_v[j, n])T[0,]; - } - // Likelihood of RT x Choice - RT[i, j, , 1:n_trials] ~ lba(d[i], A[i], v[i, j,], s, tau[i]); - } - } -} - -generated quantities { - // Declare variables - int n_trials; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - matrix[2, Max_tr] y_pred[N, N_cond]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (j in 1:N_cond) { - for (t in 1:Max_tr) { - y_pred[i, j, , t] = rep_vector(-1, 2); - } - } - } - - { // local section, this saves time and space - for (i in 1:N) { - // Initialize variables - log_lik[i] = 0; - - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = N_tr_cond[i, j]; - - // Sum likelihood over conditions within subjects - log_lik[i] += lba_lpdf(RT[i, j, , 1:n_trials] | d[i], A[i], v[i, j,], s, tau[i]); - - for (t in 1:n_trials) { - // generate posterior predictions - y_pred[i, j, , t] = lba_rng(d[i], A[i], v[i, j,], s, tau[i]); - } - } - } - // end of subject loop - } -} - diff --git a/R/inst/stan_files/choiceRT_lba_single.stan b/R/inst/stan_files/choiceRT_lba_single.stan deleted file mode 100644 index 1d5fd992..00000000 --- a/R/inst/stan_files/choiceRT_lba_single.stan +++ /dev/null @@ -1,239 +0,0 @@ -#include /pre/license.stan - -// The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). -// Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24. -functions { - real lba_pdf(real t, real b, real A, real v, real s) { - //PDF of the LBA model - real b_A_tv_ts; - real b_tv_ts; - real term_1; - real term_2; - real term_3; - real term_4; - real pdf; - - b_A_tv_ts = (b - A - t * v)/(t * s); - b_tv_ts = (b - t * v)/(t * s); - - term_1 = v * Phi(b_A_tv_ts); - term_2 = s * exp(normal_lpdf(b_A_tv_ts | 0, 1)); - term_3 = v * Phi(b_tv_ts); - term_4 = s * exp(normal_lpdf(b_tv_ts | 0, 1)); - - pdf = (1/A) * (-term_1 + term_2 + term_3 - term_4); - - return pdf; - } - - real lba_cdf(real t, real b, real A, real v, real s) { - //CDF of the LBA model - real b_A_tv; - real b_tv; - real ts; - real term_1; - real term_2; - real term_3; - real term_4; - real cdf; - - b_A_tv = b - A - t * v; - b_tv = b - t * v; - ts = t * s; - - term_1 = b_A_tv/A * Phi(b_A_tv/ts); - term_2 = b_tv/A * Phi(b_tv/ts); - term_3 = ts/A * exp(normal_lpdf(b_A_tv/ts | 0, 1)); - term_4 = ts/A * exp(normal_lpdf(b_tv/ts | 0, 1)); - - cdf = 1 + term_1 - term_2 + term_3 - term_4; - - return cdf; - - } - - real lba_lpdf(matrix RT, real d, real A, vector v, real s, real tau) { - - real t; - real b; - real cdf; - real pdf; - vector[rows(RT)] prob; - real out; - real prob_neg; - - b = A + d; - for (i in 1:rows(RT)) { - t = RT[1, i] - tau; - if (t > 0) { - cdf = 1; - - for (j in 1:num_elements(v)) { - if (RT[2, i] == j) { - pdf = lba_pdf(t, b, A, v[j], s); - } else { - cdf *= (1-lba_cdf(t, b, A, v[j], s)); - } - } - prob_neg = 1; - for (j in 1:num_elements(v)) { - prob_neg *= Phi(-v[j]/s); - } - prob[i] = pdf * cdf; - prob[i] /= (1-prob_neg); - if (prob[i] < 1e-10) { - prob[i] = 1e-10; - } - - } else { - prob[i] = 1e-10; - } - } - out = sum(log(prob)); - return out; - } - - vector lba_rng(real d, real A, vector v, real s, real tau) { - - int get_pos_drift; - int no_pos_drift; - int get_first_pos; - vector[num_elements(v)] drift; - int max_iter; - int iter; - real start[num_elements(v)]; - real ttf[num_elements(v)]; - int resp[num_elements(v)]; - real rt; - vector[2] pred; - real b; - - //try to get a positive drift rate - get_pos_drift = 1; - no_pos_drift = 0; - max_iter = 1000; - iter = 0; - while(get_pos_drift) { - for (j in 1:num_elements(v)) { - drift[j] = normal_rng(v[j], s); - if (drift[j] > 0) { - get_pos_drift = 0; - } - } - iter += 1; - if (iter > max_iter) { - get_pos_drift = 0; - no_pos_drift = 1; - } - } - //if both drift rates are <= 0 - //return an infinite response time - if (no_pos_drift) { - pred[1] = -1; - pred[2] = -1; - } else { - b = A + d; - for (i in 1:num_elements(v)) { - //start time of each accumulator - start[i] = uniform_rng(0, A); - //finish times - ttf[i] = (b-start[i])/drift[i]; - } - //rt is the fastest accumulator finish time - //if one is negative get the positive drift - resp = sort_indices_asc(ttf); - { - real temp_ttf[num_elements(v)]; - temp_ttf = sort_asc(ttf); - ttf = temp_ttf; - } - get_first_pos = 1; - iter = 1; - while(get_first_pos) { - if (ttf[iter] > 0) { - pred[1] = ttf[iter] + tau; - pred[2] = resp[iter]; - get_first_pos = 0; - } - iter += 1; - } - } - return pred; - } -} -data { - int N_choice; - int N_cond; - int tr_cond[N_cond]; - int max_tr; - matrix[2, max_tr] RT[N_cond]; -} - -parameters { - real d; - real A; - real tau; - vector[N_choice] v[N_cond]; -} -transformed parameters { - real s; - s = 1; -} -model { - // Declare variables - int n_trials; - - // Individual parameters - d ~ normal(.5, 1)T[0,]; - A ~ normal(.5, 1)T[0,]; - tau ~ normal(.5, .5)T[0,]; - - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = tr_cond[j]; - - for (n in 1:N_choice) { - // Drift rate is normally distributed - v[j, n] ~ normal(2, 1)T[0,]; - } - // Likelihood of RT x Choice - RT[j, , 1:n_trials] ~ lba(d, A, v[j,], s, tau); - } -} - -generated quantities { - // Declare variables - int n_trials; - - // For log likelihood calculation - real log_lik; - - // For posterior predictive check - matrix[2, max_tr] y_pred[N_cond]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (j in 1:N_cond) { - for (t in 1:max_tr) { - y_pred[j, , t] = rep_vector(-1, 2); - } - } - - // initialize log_lik - log_lik = 0; - - { // local section, this saves time and space - for (j in 1:N_cond) { - // Store number of trials for subject/condition pair - n_trials = tr_cond[j]; - - // Sum likelihood over conditions within subjects - log_lik += lba_lpdf(RT[j, , 1:n_trials] | d, A, v[j,], s, tau); - - for (t in 1:n_trials) { - // generate posterior predictions - y_pred[j, , t] = lba_rng(d, A, v[j,], s, tau); - } - } - } -} - diff --git a/R/inst/stan_files/cra_exp.stan b/R/inst/stan_files/cra_exp.stan deleted file mode 100644 index 86a44a0e..00000000 --- a/R/inst/stan_files/cra_exp.stan +++ /dev/null @@ -1,134 +0,0 @@ -#include /pre/license.stan - -/** - * Choice under Risk and Ambiguity Task - * - * Exponential model in Hsu et al. (2005) Science - */ - -functions { - /** - * Subjective value function with the exponential equation form - */ - real subjective_value(real alpha, real beta, real p, real a, real v) { - return pow(p, 1 + beta * a) * pow(v, alpha); - } -} - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/block for each subject - - int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) - real prob[N, T]; // The objective probability of the variable lottery - real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) - real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) - real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // risk attitude parameter - vector[N] beta_pr; // ambiguity attitude parameter - vector[N] gamma_pr; // inverse temperature parameter -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] alpha; - vector[N] beta; - vector[N] gamma; - - alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; - beta = mu_pr[2] + sigma[2] * beta_pr; - gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); -} - -model { - // hyper parameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 5); - - // individual parameters w/ Matt trick - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - gamma_pr ~ normal(0, 1); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - real p_var; // probability of choosing the variable option - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var = inv_logit(gamma[i] * (u_var - u_fix)); - - target += bernoulli_lpmf(choice[i, t] | p_var); - } - } -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_beta; - real mu_gamma; - - // For log likelihood calculation for each subject - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Model regressors - real sv[N, T]; - real sv_fix[N, T]; - real sv_var[N, T]; - real p_var[N, T]; - - // Set all posterior predictions to -1 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - sv[i, t] = 0; - sv_fix[i, t] = 0; - sv_var[i, t] = 0; - p_var[i, t] = 0; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 2; - mu_beta = mu_pr[2]; - mu_gamma = exp(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Initialize the log likelihood variable to 0. - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); - - sv_fix[i, t] = u_fix; - sv_var[i, t] = u_var; - sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; - - log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); - y_pred[i, t] = bernoulli_rng(p_var[i, t]); - } - } - } -} - diff --git a/R/inst/stan_files/cra_linear.stan b/R/inst/stan_files/cra_linear.stan deleted file mode 100644 index b8653c85..00000000 --- a/R/inst/stan_files/cra_linear.stan +++ /dev/null @@ -1,130 +0,0 @@ -#include /pre/license.stan - -/** - * Choice under Risk and Ambiguity Task - * - * Linear model in Levy et al. (2010) J Neurophysiol - */ - -functions { - /** - * Subjective value function with the linear equation form - */ - real subjective_value(real alpha, real beta, real p, real a, real v) { - return (p - beta * a / 2) * pow(v, alpha); - } -} - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/block for each subject - - int choice[N, T]; // The options subjects choose (0: fixed / 1: variable) - real prob[N, T]; // The objective probability of the variable lottery - real ambig[N, T]; // The ambiguity level of the variable lottery (0 for risky lottery) - real reward_var[N, T]; // The amount of reward values on variable lotteries (risky and ambiguity conditions) - real reward_fix[N, T]; // The amount of reward values on fixed lotteries (reference) -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // risk attitude parameter - vector[N] beta_pr; // ambiguity attitude parameter - vector[N] gamma_pr; // inverse temperature parameter -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] alpha; - vector[N] beta; - vector[N] gamma; - - alpha = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr) * 2; - beta = mu_pr[2] + sigma[2] * beta_pr; - gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); -} - -model { - // hyper parameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 5); - - // individual parameters w/ Matt trick - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - gamma_pr ~ normal(0, 1); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - real p_var; // probability of choosing the variable option - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var = inv_logit(gamma[i] * (u_var - u_fix)); - - target += bernoulli_lpmf(choice[i, t] | p_var); - } - } -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_beta; - real mu_gamma; - - // For log likelihood calculation for each subject - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Model regressors - real sv[N, T]; - real sv_fix[N, T]; - real sv_var[N, T]; - real p_var[N, T]; - - // Set all posterior predictions to -1 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 2; - mu_beta = mu_pr[2]; - mu_gamma = exp(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Initialize the log likelihood variable to 0. - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - real u_fix; // subjective value of the fixed lottery - real u_var; // subjective value of the variable lottery - - u_fix = subjective_value(alpha[i], beta[i], 0.5, 0, reward_fix[i, t]); - u_var = subjective_value(alpha[i], beta[i], prob[i, t], ambig[i, t], reward_var[i, t]); - p_var[i, t] = inv_logit(gamma[i] * (u_var - u_fix)); - - sv_fix[i, t] = u_fix; - sv_var[i, t] = u_var; - sv[i, t] = (choice[i, t] == 1) ? u_var : u_fix; - - log_lik[i] += bernoulli_lpmf(choice[i, t] | p_var[i, t]); - y_pred[i, t] = bernoulli_rng(p_var[i, t]); - } - } - } -} - diff --git a/R/inst/stan_files/dbdm_prob_weight.stan b/R/inst/stan_files/dbdm_prob_weight.stan deleted file mode 100644 index ee248835..00000000 --- a/R/inst/stan_files/dbdm_prob_weight.stan +++ /dev/null @@ -1,154 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real opt1hprob[N, T]; - real opt2hprob[N, T]; - real opt1hval[N, T]; - real opt1lval[N, T]; - real opt2hval[N, T]; - real opt2lval[N, T]; -} -transformed data { -} -parameters{ - //group-level parameters - vector[4] mu_pr; - vector[4] sigma; - - //subject-level raw parameters, follows norm(0,1), for later Matt Trick - vector[N] tau_pr; //probability weight parameter - vector[N] rho_pr; //subject utility parameter - vector[N] lambda_pr; //loss aversion parameter - vector[N] beta_pr; //inverse softmax temperature -} - -transformed parameters { - //subject-level parameters - vector[N] tau; - vector[N] rho; - vector[N] lambda; - vector[N] beta; - - //Matt Trick - for (i in 1:N) { - tau[i] = Phi_approx( mu_pr[1] + sigma[1] * tau_pr[i] ); - rho[i] = Phi_approx( mu_pr[2] + sigma[2] * rho_pr[i] )*2; - lambda[i] = Phi_approx( mu_pr[3] + sigma[3] * lambda_pr[i] )*5; - beta[i] = Phi_approx( mu_pr[4] + sigma[4] * beta_pr[i] ); - } -} - -model { - //prior : hyperparameters - mu_pr ~ normal(0,1); - sigma ~ cauchy(0,5); - - //prior : individual parameters - tau_pr ~ normal(0,1); - rho_pr ~ normal(0,1); - lambda_pr ~ normal(0,1); - beta_pr ~ normal(0,1); - - //subject loop and trial loop - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - vector[4] w_prob; - vector[2] U_opt; - - //probability weight function - w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); - w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); - w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); - w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); - - if (opt1hval[i,t]>0) { - if (opt1lval[i,t]>= 0) { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); - } else { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - - if (opt2hval[i,t] > 0) { - if (opt2lval[i,t] >= 0) { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); - } else { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - // compute action probabilities - choice[i, t] ~ categorical_logit(U_opt*beta[i]); - } - } -} - -generated quantities { - real mu_tau; - real mu_rho; - real mu_lambda; - real mu_beta; - real log_lik[N]; - // For posterior predictive check - real y_pred[N,T]; - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_tau = Phi_approx(mu_pr[1]); - mu_rho = Phi_approx(mu_pr[2])*2; - mu_lambda = Phi_approx(mu_pr[3])*5; - mu_beta = Phi_approx(mu_pr[4]); - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - vector[4] w_prob; - vector[2] U_opt; - - //probability weight function - w_prob[1] = exp(-(-log(opt1hprob[i,t]))^tau[i]); - w_prob[2] = exp(-(-log(1-opt1hprob[i,t]))^tau[i]); - w_prob[3] = exp(-(-log(opt2hprob[i,t]))^tau[i]); - w_prob[4] = exp(-(-log(1-opt2hprob[i,t]))^tau[i]); - - if (opt1hval[i,t]>0) { - if (opt1lval[i,t]>= 0) { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) + w_prob[2]*(opt1lval[i,t]^rho[i]); - } else { - U_opt[1] = w_prob[1]*(opt1hval[i,t]^rho[i]) - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[1] = -w_prob[1]*(fabs(opt1hval[i,t])^rho[i])*lambda[i] - w_prob[2]*(fabs(opt1lval[i,t])^rho[i])*lambda[i]; - } - - if (opt2hval[i,t] > 0) { - if (opt2lval[i,t] >= 0) { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) + w_prob[4]*(opt2lval[i,t]^rho[i]); - } else { - U_opt[2] = w_prob[3]*(opt2hval[i,t]^rho[i]) - w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - } else { - U_opt[2] = -w_prob[3]*(fabs(opt2hval[i,t])^rho[i])*lambda[i] -w_prob[4]*(fabs(opt2lval[i,t])^rho[i])*lambda[i]; - } - - // compute action probabilities - log_lik[i] += categorical_logit_lpmf(choice[i,t] | U_opt*beta[i]); - y_pred[i, t] = categorical_rng(softmax(U_opt*beta[i])); - - } - } - } -} - diff --git a/R/inst/stan_files/dd_cs.stan b/R/inst/stan_files/dd_cs.stan deleted file mode 100644 index d221d34a..00000000 --- a/R/inst/stan_files/dd_cs.stan +++ /dev/null @@ -1,107 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real delay_later[N, T]; - real amount_later[N, T]; - real delay_sooner[N, T]; - real amount_sooner[N, T]; - int choice[N, T]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] r_pr; // (exponential) discounting rate (Impatience) - vector[N] s_pr; // time-sensitivity - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] r; - vector[N] s; - vector[N] beta; - - for (i in 1:N) { - r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); - s[i] = Phi_approx(mu_pr[2] + sigma[2] * s_pr[i]) * 10; - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 5; - } -} - -model { -// Constant-sensitivity model (Ebert & Prelec, 2007) - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - r_pr ~ normal(0, 1); - s_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); - ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); - choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); - } - } -} -generated quantities { - // For group level parameters - real mu_r; - real mu_s; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_r = Phi_approx(mu_pr[1]); - mu_s = Phi_approx(mu_pr[2]) * 10; - mu_beta = Phi_approx(mu_pr[3]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1* (pow(r[i] * delay_later[i, t], s[i]))); - ev_sooner = amount_sooner[i, t] * exp(-1* (pow(r[i] * delay_sooner[i, t], s[i]))); - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); - } - } - } -} - diff --git a/R/inst/stan_files/dd_cs_single.stan b/R/inst/stan_files/dd_cs_single.stan deleted file mode 100644 index 2436b8b1..00000000 --- a/R/inst/stan_files/dd_cs_single.stan +++ /dev/null @@ -1,63 +0,0 @@ -#include /pre/license.stan - -data { - int Tsubj; - real delay_later[Tsubj]; - real amount_later[Tsubj]; - real delay_sooner[Tsubj]; - real amount_sooner[Tsubj]; - int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { - real r; // (exponential) discounting rate - real s; // impatience - real beta; // inverse temperature -} - -transformed parameters { - real ev_later[Tsubj]; - real ev_sooner[Tsubj]; - - for (t in 1:Tsubj) { - ev_later[t] = amount_later[t] * exp(-1* (pow(r * delay_later[t], s))); - ev_sooner[t] = amount_sooner[t] * exp(-1* (pow(r * delay_sooner[t], s))); - } -} - -model { - // constant-sensitivity model (Ebert & Prelec, 2007) - // hyperparameters - r ~ uniform(0, 1); - s ~ uniform(0, 10); - beta ~ uniform(0, 5); - - for (t in 1:Tsubj) { - choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); - } -} - -generated quantities { - real logR; - real log_lik; - - // For posterior predictive check - real y_pred[Tsubj]; - - logR = log(r); - - { // local section, this saves time and space - log_lik = 0; - - for (t in 1:Tsubj) { - log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); - - // generate posterior prediction for current trial - y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); - } - } -} - diff --git a/R/inst/stan_files/dd_exp.stan b/R/inst/stan_files/dd_exp.stan deleted file mode 100644 index 3d772a5a..00000000 --- a/R/inst/stan_files/dd_exp.stan +++ /dev/null @@ -1,101 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real delay_later[N, T]; - real amount_later[N, T]; - real delay_sooner[N, T]; - real amount_sooner[N, T]; - int choice[N, T]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] r_pr; - vector[N] beta_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] r; - vector[N] beta; - - for (i in 1:N) { - r[i] = Phi_approx(mu_pr[1] + sigma[1] * r_pr[i]); - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; - } -} - -model { -// Exponential function - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - r_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); - choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); - } - } -} -generated quantities { - // For group level parameters - real mu_r; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_r = Phi_approx(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] * exp(-1 * r[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] * exp(-1 * r[i] * delay_sooner[i, t]); - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); - } - } - } -} - diff --git a/R/inst/stan_files/dd_hyperbolic.stan b/R/inst/stan_files/dd_hyperbolic.stan deleted file mode 100644 index 1551304a..00000000 --- a/R/inst/stan_files/dd_hyperbolic.stan +++ /dev/null @@ -1,101 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real delay_later[N, T]; - real amount_later[N, T]; - real delay_sooner[N, T]; - real amount_sooner[N, T]; - int choice[N, T]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] k_pr; - vector[N] beta_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] k; - vector[N] beta; - - for (i in 1:N) { - k[i] = Phi_approx(mu_pr[1] + sigma[1] * k_pr[i]); - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 5; - } -} - -model { -// Hyperbolic function - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - k_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); - choice[i, t] ~ bernoulli_logit(beta[i] * (ev_later - ev_sooner)); - } - } -} -generated quantities { - // For group level parameters - real mu_k; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_k = Phi_approx(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real ev_later; - real ev_sooner; - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - ev_later = amount_later[i, t] / (1 + k[i] * delay_later[i, t]); - ev_sooner = amount_sooner[i, t] / (1 + k[i] * delay_sooner[i, t]); - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * (ev_later - ev_sooner)); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(beta[i] * (ev_later - ev_sooner))); - } - } - } -} - diff --git a/R/inst/stan_files/dd_hyperbolic_single.stan b/R/inst/stan_files/dd_hyperbolic_single.stan deleted file mode 100644 index be3011f0..00000000 --- a/R/inst/stan_files/dd_hyperbolic_single.stan +++ /dev/null @@ -1,57 +0,0 @@ -#include /pre/license.stan - -data { - int Tsubj; - real delay_later[Tsubj]; - real amount_later[Tsubj]; - real delay_sooner[Tsubj]; - real amount_sooner[Tsubj]; - int choice[Tsubj]; // 0 for instant reward, 1 for delayed reward -} - -transformed data { -} - -parameters { - real k; // discounting rate - real beta; // inverse temperature -} - -transformed parameters { - real ev_later[Tsubj]; - real ev_sooner[Tsubj]; - - for (t in 1:Tsubj) { - ev_later[t] = amount_later[t] / (1 + k * delay_later[t]); - ev_sooner[t] = amount_sooner[t] / (1 + k * delay_sooner[t]); - } -} - -model { - k ~ uniform(0, 1); - beta ~ uniform(0, 5); - - for (t in 1:Tsubj) { - choice[t] ~ bernoulli_logit(beta * (ev_later[t] - ev_sooner[t])); - } -} -generated quantities { - real logK; - real log_lik; - - // For posterior predictive check - real y_pred[Tsubj]; - - logK = log(k); - - { // local section, this saves time and space - log_lik = 0; - for (t in 1:Tsubj) { - log_lik += bernoulli_logit_lpmf(choice[t] | beta * (ev_later[t] - ev_sooner[t])); - - // generate posterior prediction for current trial - y_pred[t] = bernoulli_rng(inv_logit(beta * (ev_later[t] - ev_sooner[t]))); - } - } -} - diff --git a/R/inst/stan_files/gng_m1.stan b/R/inst/stan_files/gng_m1.stan deleted file mode 100644 index 5ac8abd0..00000000 --- a/R/inst/stan_files/gng_m1.stan +++ /dev/null @@ -1,149 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[3] mu_pr; - vector[3] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] rho_pr; // rho, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] rho; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - rho = exp(mu_pr[3] + sigma[3] * rho_pr); -} - -model { -// gng_m1: RW + noise model in Guitart-Masip et al 2012 - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - rho_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_rho; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_rho = exp(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/R/inst/stan_files/gng_m2.stan b/R/inst/stan_files/gng_m2.stan deleted file mode 100644 index c9a8ced8..00000000 --- a/R/inst/stan_files/gng_m2.stan +++ /dev/null @@ -1,160 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[4] mu_pr; - vector[4] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] b_pr; // go bias - vector[N] rho_pr; // rho, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] b; - vector[N] rho; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - b = mu_pr[3] + sigma[3] * b_pr; // vectorization - rho = exp(mu_pr[4] + sigma[4] * rho_pr); -} - -model { -// gng_m2: RW + noise + bias model in Guitart-Masip et al 2012 - // hyper parameters - mu_pr[1] ~ normal(0, 1.0); - mu_pr[2] ~ normal(0, 1.0); - mu_pr[3] ~ normal(0, 10.0); - mu_pr[4] ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3] ~ cauchy(0, 1.0); - sigma[4] ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - b_pr ~ normal(0, 1.0); - rho_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_b; - real mu_rho; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_b = mu_pr[3]; - mu_rho = exp(mu_pr[4]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/R/inst/stan_files/gng_m3.stan b/R/inst/stan_files/gng_m3.stan deleted file mode 100644 index 2368ea1a..00000000 --- a/R/inst/stan_files/gng_m3.stan +++ /dev/null @@ -1,179 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[5] mu_pr; - vector[5] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] b_pr; // go bias - vector[N] pi_pr; // pavlovian bias - vector[N] rho_pr; // rho, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] b; - vector[N] pi; - vector[N] rho; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - b = mu_pr[3] + sigma[3] * b_pr; // vectorization - pi = mu_pr[4] + sigma[4] * pi_pr; - rho = exp(mu_pr[5] + sigma[5] * rho_pr); -} - -model { -// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) - // hyper parameters - mu_pr[1] ~ normal(0, 1.0); - mu_pr[2] ~ normal(0, 1.0); - mu_pr[3] ~ normal(0, 10.0); - mu_pr[4] ~ normal(0, 10.0); - mu_pr[5] ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3:4] ~ cauchy(0, 1.0); - sigma[5] ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - b_pr ~ normal(0, 1.0); - pi_pr ~ normal(0, 1.0); - rho_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // after receiving feedback, update sv[t + 1] - sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_b; - real mu_pi; - real mu_rho; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - real SV[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_b = mu_pr[3]; - mu_pi = mu_pr[4]; - mu_rho = exp(mu_pr[5]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - SV[i, t] = sv[cue[i, t]]; - - // after receiving feedback, update sv[t + 1] - sv[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - sv[cue[i, t]]); - - // update action values - if (pressed[i, t]) { // update go value - qv_g[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { // update no-go value - qv_ng[cue[i, t]] += ep[i] * (rho[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/R/inst/stan_files/gng_m4.stan b/R/inst/stan_files/gng_m4.stan deleted file mode 100644 index 73e30cb1..00000000 --- a/R/inst/stan_files/gng_m4.stan +++ /dev/null @@ -1,210 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int cue[N, T]; - int pressed[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { - // declare as vectors for vectorizing - vector[6] mu_pr; - vector[6] sigma; - vector[N] xi_pr; // noise - vector[N] ep_pr; // learning rate - vector[N] b_pr; // go bias - vector[N] pi_pr; // pavlovian bias - vector[N] rhoRew_pr; // rho reward, inv temp - vector[N] rhoPun_pr; // rho punishment, inv temp -} - -transformed parameters { - vector[N] xi; - vector[N] ep; - vector[N] b; - vector[N] pi; - vector[N] rhoRew; - vector[N] rhoPun; - - for (i in 1:N) { - xi[i] = Phi_approx(mu_pr[1] + sigma[1] * xi_pr[i]); - ep[i] = Phi_approx(mu_pr[2] + sigma[2] * ep_pr[i]); - } - b = mu_pr[3] + sigma[3] * b_pr; // vectorization - pi = mu_pr[4] + sigma[4] * pi_pr; - rhoRew = exp(mu_pr[5] + sigma[5] * rhoRew_pr); - rhoPun = exp(mu_pr[6] + sigma[6] * rhoPun_pr); -} - -model { -// gng_m4: RW(rew/pun) + noise + bias + pi model (M5 in Cavanagh et al 2013 J Neuro) - // hyper parameters - mu_pr[1] ~ normal(0, 1.0); - mu_pr[2] ~ normal(0, 1.0); - mu_pr[3] ~ normal(0, 10.0); - mu_pr[4] ~ normal(0, 10.0); - mu_pr[5] ~ normal(0, 1.0); - mu_pr[6] ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3:4] ~ cauchy(0, 1.0); - sigma[5:6] ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - xi_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - b_pr ~ normal(0, 1.0); - pi_pr ~ normal(0, 1.0); - rhoRew_pr ~ normal(0, 1.0); - rhoPun_pr ~ normal(0, 1.0); - - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - pressed[i, t] ~ bernoulli(pGo[cue[i, t]]); - - // after receiving feedback, update sv[t + 1] - if (outcome[i, t] >= 0) { - sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); - } else { - sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); - } - - // update action values - if (pressed[i, t]) { // update go value - if (outcome[i, t] >=0) { - qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { - qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); - } - } else { // update no-go value - if (outcome[i, t] >=0) { - qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } else { - qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } - } // end of t loop - } // end of i loop -} - -generated quantities { - real mu_xi; - real mu_ep; - real mu_b; - real mu_pi; - real mu_rhoRew; - real mu_rhoPun; - real log_lik[N]; - real Qgo[N, T]; - real Qnogo[N, T]; - real Wgo[N, T]; - real Wnogo[N, T]; - real SV[N, T]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_xi = Phi_approx(mu_pr[1]); - mu_ep = Phi_approx(mu_pr[2]); - mu_b = mu_pr[3]; - mu_pi = mu_pr[4]; - mu_rhoRew = exp(mu_pr[5]); - mu_rhoPun = exp(mu_pr[6]); - - { // local section, this saves time and space - for (i in 1:N) { - vector[4] wv_g; // action weight for go - vector[4] wv_ng; // action weight for nogo - vector[4] qv_g; // Q value for go - vector[4] qv_ng; // Q value for nogo - vector[4] sv; // stimulus value - vector[4] pGo; // prob of go (press) - - wv_g = initV; - wv_ng = initV; - qv_g = initV; - qv_ng = initV; - sv = initV; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - wv_g[cue[i, t]] = qv_g[cue[i, t]] + b[i] + pi[i] * sv[cue[i, t]]; - wv_ng[cue[i, t]] = qv_ng[cue[i, t]]; // qv_ng is always equal to wv_ng (regardless of action) - pGo[cue[i, t]] = inv_logit(wv_g[cue[i, t]] - wv_ng[cue[i, t]]); - { // noise - pGo[cue[i, t]] *= (1 - xi[i]); - pGo[cue[i, t]] += xi[i]/2; - } - log_lik[i] += bernoulli_lpmf(pressed[i, t] | pGo[cue[i, t]]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGo[cue[i, t]]); - - // Model regressors --> store values before being updated - Qgo[i, t] = qv_g[cue[i, t]]; - Qnogo[i, t] = qv_ng[cue[i, t]]; - Wgo[i, t] = wv_g[cue[i, t]]; - Wnogo[i, t] = wv_ng[cue[i, t]]; - SV[i, t] = sv[cue[i, t]]; - - // after receiving feedback, update sv[t + 1] - if (outcome[i, t] >= 0) { - sv[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - sv[cue[i, t]]); - } else { - sv[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - sv[cue[i, t]]); - } - - // update action values - if (pressed[i, t]) { // update go value - if (outcome[i, t] >=0) { - qv_g[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_g[cue[i, t]]); - } else { - qv_g[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_g[cue[i, t]]); - } - } else { // update no-go value - if (outcome[i, t] >=0) { - qv_ng[cue[i, t]] += ep[i] * (rhoRew[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } else { - qv_ng[cue[i, t]] += ep[i] * (rhoPun[i] * outcome[i, t] - qv_ng[cue[i, t]]); - } - } - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/R/inst/stan_files/igt_orl.stan b/R/inst/stan_files/igt_orl.stan deleted file mode 100644 index a560de27..00000000 --- a/R/inst/stan_files/igt_orl.stan +++ /dev/null @@ -1,207 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; - real sign_out[N, T]; -} -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[5] mu_pr; - vector[5] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Arew_pr; - vector[N] Apun_pr; - vector[N] K_pr; - vector[N] betaF_pr; - vector[N] betaP_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] Arew; - vector[N] Apun; - vector[N] K; - vector[N] betaF; - vector[N] betaP; - - for (i in 1:N) { - Arew[i] = Phi_approx( mu_pr[1] + sigma[1] * Arew_pr[i] ); - Apun[i] = Phi_approx( mu_pr[2] + sigma[2] * Apun_pr[i] ); - K[i] = Phi_approx(mu_pr[3] + sigma[3] + K_pr[i]) * 5; - } - betaF = mu_pr[4] + sigma[4] * betaF_pr; - betaP = mu_pr[5] + sigma[5] * betaP_pr; -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1:3] ~ normal(0, 0.2); - sigma[4:5] ~ cauchy(0, 1.0); - - // individual parameters - Arew_pr ~ normal(0, 1.0); - Apun_pr ~ normal(0, 1.0); - K_pr ~ normal(0, 1.0); - betaF_pr ~ normal(0, 1.0); - betaP_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] ef; - vector[4] ev; - vector[4] PEfreq_fic; - vector[4] PEval_fic; - vector[4] pers; // perseverance - vector[4] util; - - real PEval; - real PEfreq; - real efChosen; - real evChosen; - real K_tr; - - // Initialize values - ef = initV; - ev = initV; - pers = initV; // initial pers values - util = initV; - K_tr = pow(3, K[i]) - 1; - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit( util ); - - // Prediction error - PEval = outcome[i,t] - ev[ choice[i,t]]; - PEfreq = sign_out[i,t] - ef[ choice[i,t]]; - PEfreq_fic = -sign_out[i,t]/3 - ef; - - // store chosen deck ev - efChosen = ef[ choice[i,t]]; - evChosen = ev[ choice[i,t]]; - - if (outcome[i,t] >= 0) { - // Update ev for all decks - ef += Apun[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Arew[i] * PEval; - } else { - // Update ev for all decks - ef += Arew[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Apun[i] * PEval; - } - - // Perseverance updating - pers[ choice[i,t] ] = 1; // perseverance term - pers /= (1 + K_tr); // decay - - // Utility of expected value and perseverance - util = ev + ef * betaF[i] + pers * betaP[i]; - } - } -} - -generated quantities { - // For group level parameters - real mu_Arew; - real mu_Apun; - real mu_K; - real mu_betaF; - real mu_betaP; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N,T]; - - // Set all posterior predictions to -1 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i,t] = -1; - } - } - - mu_Arew = Phi_approx(mu_pr[1]); - mu_Apun = Phi_approx(mu_pr[2]); - mu_K = Phi_approx(mu_pr[3]) * 5; - mu_betaF = mu_pr[4]; - mu_betaP = mu_pr[5]; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ef; - vector[4] ev; - vector[4] PEfreq_fic; - vector[4] PEval_fic; - vector[4] pers; // perseverance - vector[4] util; - - real PEval; - real PEfreq; - real efChosen; - real evChosen; - real K_tr; - - // Initialize values - log_lik[i] = 0; - ef = initV; - ev = initV; - pers = initV; // initial pers values - util = initV; - K_tr = pow(3, K[i]) - 1; - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf( choice[i, t] | util ); - - // generate posterior prediction for current trial - y_pred[i,t] = categorical_rng(softmax(util)); - - // Prediction error - PEval = outcome[i,t] - ev[ choice[i,t]]; - PEfreq = sign_out[i,t] - ef[ choice[i,t]]; - PEfreq_fic = -sign_out[i,t]/3 - ef; - - // store chosen deck ev - efChosen = ef[ choice[i,t]]; - evChosen = ev[ choice[i,t]]; - - if (outcome[i,t] >= 0) { - // Update ev for all decks - ef += Apun[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Arew[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Arew[i] * PEval; - } else { - // Update ev for all decks - ef += Arew[i] * PEfreq_fic; - // Update chosendeck with stored value - ef[ choice[i,t]] = efChosen + Apun[i] * PEfreq; - ev[ choice[i,t]] = evChosen + Apun[i] * PEval; - } - - // Perseverance updating - pers[ choice[i,t] ] = 1; // perseverance term - pers /= (1 + K_tr); // decay - - // Utility of expected value and perseverance - util = ev + ef * betaF[i] + pers * betaP[i]; - } - } - } -} - diff --git a/R/inst/stan_files/igt_pvl_decay.stan b/R/inst/stan_files/igt_pvl_decay.stan deleted file mode 100644 index 2d908a19..00000000 --- a/R/inst/stan_files/igt_pvl_decay.stan +++ /dev/null @@ -1,134 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; -} -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] alpha_pr; - vector[N] cons_pr; - vector[N] lambda_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] alpha; - vector[N] cons; - vector[N] lambda; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; - cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; - lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - cons_pr ~ normal(0, 1); - lambda_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(theta * ev); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // decay-RI - ev *= A[i]; - ev[choice[i, t]] += curUtil; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_alpha; - real mu_cons; - real mu_lambda; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_alpha = Phi_approx(mu_pr[2]) * 2; - mu_cons = Phi_approx(mu_pr[3]) * 5; - mu_lambda = Phi_approx(mu_pr[4]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - log_lik[i] = 0; - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(theta * ev)); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // decay-RI - ev *= A[i]; - ev[choice[i, t]] += curUtil; - } - } - } -} - diff --git a/R/inst/stan_files/igt_pvl_delta.stan b/R/inst/stan_files/igt_pvl_delta.stan deleted file mode 100644 index 05c6e870..00000000 --- a/R/inst/stan_files/igt_pvl_delta.stan +++ /dev/null @@ -1,132 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; -} -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] alpha_pr; - vector[N] cons_pr; - vector[N] lambda_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] alpha; - vector[N] cons; - vector[N] lambda; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; - cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; - lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; - } -} -model { -// Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - cons_pr ~ normal(0, 1); - lambda_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(theta * ev); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // delta - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_alpha; - real mu_cons; - real mu_lambda; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_alpha = Phi_approx(mu_pr[2]) * 2; - mu_cons = Phi_approx(mu_pr[3]) * 5; - mu_lambda = Phi_approx(mu_pr[4]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ev; - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - log_lik[i] = 0; - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * ev); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(theta * ev)); - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - } - - // delta - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - } - } - } -} - diff --git a/R/inst/stan_files/igt_vpp.stan b/R/inst/stan_files/igt_vpp.stan deleted file mode 100644 index 61c2b831..00000000 --- a/R/inst/stan_files/igt_vpp.stan +++ /dev/null @@ -1,188 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int choice[N, T]; - real outcome[N, T]; -} - -transformed data { - vector[4] initV; - initV = rep_vector(0.0, 4); -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[8] mu_pr; - vector[8] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] A_pr; - vector[N] alpha_pr; - vector[N] cons_pr; - vector[N] lambda_pr; - vector[N] epP_pr; - vector[N] epN_pr; - vector[N] K_pr; - vector[N] w_pr; -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] A; - vector[N] alpha; - vector[N] cons; - vector[N] lambda; - vector[N] epP; - vector[N] epN; - vector[N] K; - vector[N] w; - - for (i in 1:N) { - A[i] = Phi_approx(mu_pr[1] + sigma[1] * A_pr[i]); - alpha[i] = Phi_approx(mu_pr[2] + sigma[2] * alpha_pr[i]) * 2; - cons[i] = Phi_approx(mu_pr[3] + sigma[3] * cons_pr[i]) * 5; - lambda[i] = Phi_approx(mu_pr[4] + sigma[4] * lambda_pr[i]) * 10; - K[i] = Phi_approx(mu_pr[7] + sigma[7] * K_pr[i]); - w[i] = Phi_approx(mu_pr[8] + sigma[8] * w_pr[i]); - } - epP = mu_pr[5] + sigma[5] * epP_pr; - epN = mu_pr[6] + sigma[6] * epN_pr; -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1.0); - sigma[1:4] ~ normal(0, 0.2); - sigma[5:6] ~ cauchy(0, 1.0); - sigma[7:8] ~ normal(0, 0.2); - - // individual parameters - A_pr ~ normal(0, 1.0); - alpha_pr ~ normal(0, 1.0); - cons_pr ~ normal(0, 1.0); - lambda_pr ~ normal(0, 1.0); - epP_pr ~ normal(0, 1.0); - epN_pr ~ normal(0, 1.0); - K_pr ~ normal(0, 1.0); - w_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - vector[4] ev; - vector[4] p_next; - vector[4] str; - vector[4] pers; // perseverance - vector[4] V; // weighted sum of ev and pers - - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - pers = initV; // initial pers values - V = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice - choice[i, t] ~ categorical_logit(theta * V); - - // perseverance decay - pers *= K[i]; // decay - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - pers[choice[i, t]] += epP[i]; // perseverance term - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - pers[choice[i, t]] += epN[i]; // perseverance term - } - - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - // calculate V - V = w[i] * ev + (1-w[i]) * pers; - } - } -} -generated quantities { - // For group level parameters - real mu_A; - real mu_alpha; - real mu_cons; - real mu_lambda; - real mu_epP; - real mu_epN; - real mu_K; - real mu_w; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_A = Phi_approx(mu_pr[1]); - mu_alpha = Phi_approx(mu_pr[2]) * 2; - mu_cons = Phi_approx(mu_pr[3]) * 5; - mu_lambda = Phi_approx(mu_pr[4]) * 10; - mu_epP = mu_pr[5]; - mu_epN = mu_pr[6]; - mu_K = Phi_approx(mu_pr[7]); - mu_w = Phi_approx(mu_pr[8]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[4] ev; - vector[4] p_next; - vector[4] str; - vector[4] pers; // perseverance - vector[4] V; // weighted sum of ev and pers - - real curUtil; // utility of curFb - real theta; // theta = 3^c - 1 - - // Initialize values - log_lik[i] = 0; - theta = pow(3, cons[i]) -1; - ev = initV; // initial ev values - pers = initV; // initial pers values - V = initV; - - for (t in 1:Tsubj[i]) { - // softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | theta * V); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(theta * V)); - - // perseverance decay - pers *= K[i]; // decay - - if (outcome[i, t] >= 0) { // x(t) >= 0 - curUtil = pow(outcome[i, t], alpha[i]); - pers[choice[i, t]] += epP[i]; // perseverance term - } else { // x(t) < 0 - curUtil = -1 * lambda[i] * pow(-1 * outcome[i, t], alpha[i]); - pers[choice[i, t]] += epN[i]; // perseverance term - } - - ev[choice[i, t]] += A[i] * (curUtil - ev[choice[i, t]]); - // calculate V - V = w[i] * ev + (1-w[i]) * pers; - } - } - } -} - diff --git a/R/inst/stan_files/peer_ocu.stan b/R/inst/stan_files/peer_ocu.stan deleted file mode 100644 index cd0c52d5..00000000 --- a/R/inst/stan_files/peer_ocu.stan +++ /dev/null @@ -1,115 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int condition[N, T]; // 0: solo, 1: ss, 2: mix, 3: rr - real p_gamble[N, T]; - real safe_Hpayoff[N, T]; - real safe_Lpayoff[N, T]; - real risky_Hpayoff[N, T]; - real risky_Lpayoff[N, T]; - int choice[N, T]; -} - -transformed data { -} - -parameters { - vector[3] mu_pr; - vector[3] sigma; - vector[N] rho_pr; - vector[N] tau_pr; - vector[N] ocu_pr; -} - -transformed parameters { - vector[N] rho; - vector[N] tau; - vector[N] ocu; - - for (i in 1:N) { - rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - } - tau = exp(mu_pr[2] + sigma[2] * tau_pr); - ocu = mu_pr[3] + sigma[3] * ocu_pr; -} - -model { - // peer_ocu - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma[1:2] ~ normal(0, 0.2); - sigma[3] ~ cauchy(0, 1.0); - - // individual parameters w/ Matt trick - rho_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - ocu_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real U_safe; - real U_risky; - - U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); - U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); - if (condition[i, t] == 1) { // safe-safe - U_safe += ocu[i]; - } - if (condition[i, t] == 3) { // risky-risky - U_risky += ocu[i]; - } - choice[i, t] ~ bernoulli_logit(tau[i] * (U_risky - U_safe)); - } - } -} -generated quantities { - real mu_rho; - real mu_tau; - real mu_ocu; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_tau = exp(mu_pr[2]); - mu_ocu = mu_pr[3]; - - { // local section, this saves time and space - for (i in 1:N) { - - // Initialize values - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - real U_safe; - real U_risky; - - U_safe = p_gamble[i, t] * pow(safe_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(safe_Lpayoff[i, t], rho[i]); - U_risky = p_gamble[i, t] * pow(risky_Hpayoff[i, t], rho[i]) + (1-p_gamble[i, t]) * pow(risky_Lpayoff[i, t], rho[i]); - if (condition[i, t] == 1) { // safe-safe - U_safe += ocu[i]; - } - if (condition[i, t] == 3) { // risky-risky - U_risky += ocu[i]; - } - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | tau[i] * (U_risky - U_safe)); - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(tau[i] * (U_risky - U_safe))); - } - } - } -} - diff --git a/R/inst/stan_files/pre/license.stan b/R/inst/stan_files/pre/license.stan deleted file mode 100644 index dec428a6..00000000 --- a/R/inst/stan_files/pre/license.stan +++ /dev/null @@ -1,14 +0,0 @@ -/* - hBayesDM is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - hBayesDM is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with hBayesDM. If not, see . -*/ diff --git a/R/inst/stan_files/prl_ewa.stan b/R/inst/stan_files/prl_ewa.stan deleted file mode 100644 index 234cf467..00000000 --- a/R/inst/stan_files/prl_ewa.stan +++ /dev/null @@ -1,179 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Experience-Weighted Attraction model by Ouden et al. (2013) Neuron - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] phi_pr; // 1-learning rate - vector[N] rho_pr; // experience decay factor - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] phi; - vector[N] rho; - vector[N] beta; - - for (i in 1:N) { - phi[i] = Phi_approx(mu_pr[1] + sigma[1] * phi_pr[i]); - rho[i] = Phi_approx(mu_pr[2] + sigma[2] * rho_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Individual parameters - phi_pr ~ normal(0, 1); - rho_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // Expected value - vector[2] ew; // Experience weight - - real ewt1; // Experience weight of trial (t - 1) - - // Initialize values - ev = initV; // initial ev values - ew = initV; // initial ew values - - for (t in 1:Tsubj[i]) { - // Softmax choice - choice[i, t] ~ categorical_logit(ev * beta[i]); - - // Store previous experience weight value - ewt1 = ew[choice[i, t]]; - - // Update experience weight for chosen stimulus - { - ew[choice[i, t]] *= rho[i]; - ew[choice[i, t]] += 1; - } - - // Update expected value of chosen stimulus - { - ev[choice[i, t]] *= phi[i] * ewt1; - ev[choice[i, t]] += outcome[i, t]; - ev[choice[i, t]] /= ew[choice[i, t]]; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_phi; - real mu_rho; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - //real mr_ev[N, T, 2]; // Expected value - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - //real mr_ew[N, T, 2]; // Experience weight - real ew_c[N, T]; // Experience weight of the chosen option - real ew_nc[N, T]; // Experience weight of the non-chosen option - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - ew_c[i, t] = 0; - ew_nc[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_phi = Phi_approx(mu_pr[1]); - mu_rho = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // Expected value - vector[2] ew; // Experience weight - - real ewt1; // Experience weight of trial (t-1) - - // Initialize values - ev = initV; // initial ev values - ew = initV; // initial ew values - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // Softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); - - // Store values for model regressors - //mr_ev[i, t] = ev; - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - - //mr_ew[i, t] = ew; - ew_c[i, t] = ew[choice[i, t]]; - ew_nc[i, t] = ew[3 - choice[i, t]]; - - // Store previous experience weight value - ewt1 = ew[choice[i, t]]; - - // Update experience weight for chosen stimulus - { - ew[choice[i, t]] *= rho[i]; - ew[choice[i, t]] += 1; - } - - // Update expected value of chosen stimulus - { - ev[choice[i, t]] *= phi[i] * ewt1; - ev[choice[i, t]] += outcome[i, t]; - ev[choice[i, t]] /= ew[choice[i, t]]; - } - } - } - } -} - diff --git a/R/inst/stan_files/prl_fictitious.stan b/R/inst/stan_files/prl_fictitious.stan deleted file mode 100644 index 0fb8d486..00000000 --- a/R/inst/stan_files/prl_fictitious.stan +++ /dev/null @@ -1,173 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pr; // learning rate - vector[N] alpha_pr; // indecision point - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta; - vector[N] alpha; - vector[N] beta; - - for (i in 1:N) { - eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } - alpha = mu_pr[2] + sigma[2] * alpha_pr; -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1] ~ normal(0, 0.2); - sigma[2] ~ cauchy(0, 1.0); - sigma[3] ~ normal(0, 0.2); - - // Individual parameters - eta_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // Compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // Prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } -} - -generated quantities { - // For group level parameters - real mu_eta; - real mu_alpha; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; //Prediction error of the chosen option - real pe_nc[N, T]; //Prediction error of the non-chosen option - real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_eta = Phi_approx(mu_pr[1]); - mu_alpha = mu_pr[2]; - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } - } -} - diff --git a/R/inst/stan_files/prl_fictitious_multipleB.stan b/R/inst/stan_files/prl_fictitious_multipleB.stan deleted file mode 100644 index 264d6c8f..00000000 --- a/R/inst/stan_files/prl_fictitious_multipleB.stan +++ /dev/null @@ -1,185 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) - */ - -data { - int N; // Number of subjects - - int B; // Max number of blocks across subjects - int Bsubj[N]; // Number of blocks for each subject - - int T; // Max number of trials across subjects - int Tsubj[N, B]; // Number of trials/block for each subject - - int choice[N, B, T]; // Choice for each subject-block-trial - real outcome[N, B, T]; // Outcome (reward/loss) for each subject-block-trial -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pr; // learning rate - vector[N] alpha_pr; // indecision point - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta; - vector[N] alpha; - vector[N] beta; - - for (i in 1:N) { - eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } - alpha = mu_pr[2] + sigma[2] * alpha_pr; -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1] ~ normal(0, 0.2); - sigma[2] ~ cauchy(0, 1.0); - sigma[3] ~ normal(0, 0.2); - - // individual parameters - eta_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - for (bIdx in 1:Bsubj[i]) { // new - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i, bIdx])) { // new - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, bIdx, t] ~ categorical(prob); - //choice[i, t] ~ bernoulli(prob); - - // prediction error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new - PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new - - // value updating (learning) - ev[choice[i, bIdx, t]] += eta[i] * PE; //new - ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new - } // end of t loop - } // end of bIdx loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_eta; - real mu_alpha; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, B, T]; // Expected value of the chosen option - real ev_nc[N, B, T]; // Expected value of the non-chosen option - - real pe_c[N, B, T]; //Prediction error of the chosen option - real pe_nc[N, B, T]; //Prediction error of the non-chosen option - real dv[N, B, T]; //Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, B, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (b in 1:B) { - for (t in 1:T) { - ev_c[i, b, t] = 0; - ev_nc[i, b, t] = 0; - - pe_c[i, b, t] = 0; - pe_nc[i, b, t] = 0; - dv[i, b, t] = 0; - - y_pred[i, b, t] = -1; - } - } - } - - mu_eta = Phi_approx(mu_pr[1]); - mu_alpha = mu_pr[2]; - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - - log_lik[i] = 0; - - for (bIdx in 1:Bsubj[i]) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i, bIdx])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, bIdx, t] | prob); //new - - // generate posterior prediction for current trial - y_pred[i, bIdx, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; //new - PEnc = -outcome[i, bIdx, t] - ev[3-choice[i, bIdx, t]]; //new - - // Store values for model regressors - ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; - ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; - - pe_c[i, bIdx, t] = PE; - pe_nc[i, bIdx, t] = PEnc; - dv[i, bIdx, t] = PE - PEnc; - - // value updating (learning) - ev[choice[i, bIdx, t]] += eta[i] * PE; //new - ev[3-choice[i, bIdx, t]] += eta[i] * PEnc; //new - } // end of t loop - } // end of bIdx loop - } - } -} - diff --git a/R/inst/stan_files/prl_fictitious_rp.stan b/R/inst/stan_files/prl_fictitious_rp.stan deleted file mode 100644 index daa0779c..00000000 --- a/R/inst/stan_files/prl_fictitious_rp.stan +++ /dev/null @@ -1,188 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) - */ - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pos_pr; // learning rate, positive PE - vector[N] eta_neg_pr; // learning rate, negative PE - vector[N] alpha_pr; // indecision point - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta_pos; - vector[N] eta_neg; - vector[N] alpha; - vector[N] beta; - - for (i in 1:N) { - eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); - eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); - beta[i] = Phi_approx(mu_pr[4] + sigma[4] * beta_pr[i]) * 10; - } - alpha = mu_pr[3] + sigma[3] * alpha_pr; -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma[1:2] ~ normal(0, 0.2); - sigma[3] ~ cauchy(0, 1.0); - sigma[4] ~ normal(0, 0.2); - - // individual parameters - eta_pos_pr ~ normal(0, 1); - eta_neg_pr ~ normal(0, 1); - alpha_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_eta_pos; - real mu_eta_neg; - real mu_alpha; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; // Prediction error of the chosen option - real pe_nc[N, T]; // Prediction error of the non-chosen option - - real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_eta_pos = Phi_approx(mu_pr[1]); - mu_eta_neg = Phi_approx(mu_pr[2]); - mu_alpha = mu_pr[3]; - mu_beta = Phi_approx(mu_pr[4]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (alpha[i] - (ev[1] - ev[2])))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // Value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } - } -} - diff --git a/R/inst/stan_files/prl_fictitious_rp_woa.stan b/R/inst/stan_files/prl_fictitious_rp_woa.stan deleted file mode 100644 index 48f78a42..00000000 --- a/R/inst/stan_files/prl_fictitious_rp_woa.stan +++ /dev/null @@ -1,180 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) with separate learning rates for +PE and -PE & without alpha (indecision point) - */ - -data { - int N; // Number of subjects - int T; // Max number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pos_pr; // learning rate, positive PE - vector[N] eta_neg_pr; // learning rate, negative PE - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta_pos; - vector[N] eta_neg; - vector[N] beta; - - for (i in 1:N) { - eta_pos[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pos_pr[i]); - eta_neg[i] = Phi_approx(mu_pr[2] + sigma[2] * eta_neg_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - eta_pos_pr ~ normal(0, 1); - eta_neg_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_eta_pos; - real mu_eta_neg; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; // Prediction error of the chosen option - real pe_nc[N, T]; // Prediction error of the non-chosen option - - real dv[N, T]; // Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_eta_pos = Phi_approx(mu_pr[1]); - mu_eta_neg = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3 - choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // Value updating (learning) - if (PE >= 0) { - ev[choice[i, t]] += eta_pos[i] * PE; - ev[3 - choice[i, t]] += eta_pos[i] * PEnc; - } else { - ev[choice[i, t]] += eta_neg[i] * PE; - ev[3 - choice[i, t]] += eta_neg[i] * PEnc; - } - } - } - } -} - diff --git a/R/inst/stan_files/prl_fictitious_woa.stan b/R/inst/stan_files/prl_fictitious_woa.stan deleted file mode 100644 index 58a4053f..00000000 --- a/R/inst/stan_files/prl_fictitious_woa.stan +++ /dev/null @@ -1,165 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Fictitious Update Model (Glascher et al., 2008, Cerebral Cortex) without alpha (indecision point) - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[2] mu_pr; - vector[2] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] eta_pr; // learning rate - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] eta; - vector[N] beta; - - for (i in 1:N) { - eta[i] = Phi_approx(mu_pr[1] + sigma[1] * eta_pr[i]); - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Individual parameters - eta_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:(Tsubj[i])) { - // Compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - choice[i, t] ~ categorical(prob); - - // Prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } -} - -generated quantities { - // For group level parameters - real mu_eta; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - - real pe_c[N, T]; //Prediction error of the chosen option - real pe_nc[N, T]; //Prediction error of the non-chosen option - real dv[N, T]; //Decision value = PE_chosen - PE_non-chosen - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions, model regressors to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - - pe_c[i, t] = 0; - pe_nc[i, t] = 0; - dv[i, t] =0; - - y_pred[i, t] = -1; - } - } - - mu_eta = Phi_approx(mu_pr[1]); - mu_beta = Phi_approx(mu_pr[2]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // expected value - vector[2] prob; // probability - real prob_1_; - - real PE; // prediction error - real PEnc; // fictitious prediction error (PE-non-chosen) - - // Initialize values - ev = initV; // initial ev values - - log_lik[i] = 0; - - for (t in 1:(Tsubj[i])) { - // compute action probabilities - prob[1] = 1 / (1 + exp(beta[i] * (ev[2] - ev[1]))); - prob_1_ = prob[1]; - prob[2] = 1 - prob_1_; - - log_lik[i] += categorical_lpmf(choice[i, t] | prob); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(prob); - - // prediction error - PE = outcome[i, t] - ev[choice[i, t]]; - PEnc = -outcome[i, t] - ev[3-choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - - pe_c[i, t] = PE; - pe_nc[i, t] = PEnc; - dv[i, t] = PE - PEnc; - - // value updating (learning) - ev[choice[i, t]] += eta[i] * PE; - ev[3-choice[i, t]] += eta[i] * PEnc; - } - } - } -} - diff --git a/R/inst/stan_files/prl_rp.stan b/R/inst/stan_files/prl_rp.stan deleted file mode 100644 index a7303744..00000000 --- a/R/inst/stan_files/prl_rp.stan +++ /dev/null @@ -1,149 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Reward-Punishment Model by Ouden et al. (2013) Neuron - */ - -data { - int N; // Number of subjects - int T; // Maximum number of trials across subjects - int Tsubj[N]; // Number of trials/blocks for each subject - - int choice[N, T]; // The choices subjects made - real outcome[N, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Apun_pr; // learning rate (punishment) - vector[N] Arew_pr; // learning rate (reward) - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Apun; - vector[N] Arew; - vector[N] beta; - - for (i in 1:N) { - Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); - Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Apun_pr ~ normal(0, 1); - Arew_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define Values - vector[2] ev; // Expected value - real PE; // prediction error - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:Tsubj[i]) { - // Softmax choice - choice[i, t] ~ categorical_logit(ev * beta[i]); - - // Prediction Error - PE = outcome[i, t] - ev[choice[i, t]]; - - // Update expected value of chosen stimulus - if (outcome[i, t] > 0) - ev[choice[i, t]] += Arew[i] * PE; - else - ev[choice[i, t]] += Apun[i] * PE; - } - } -} - -generated quantities { - // For group level parameters - real mu_Apun; - real mu_Arew; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, T]; // Expected value of the chosen option - real ev_nc[N, T]; // Expected value of the non-chosen option - real pe[N, T]; // Prediction error - - // For posterior predictive check - real y_pred[N, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - ev_c[i, t] = 0; - ev_nc[i, t] = 0; - pe[i, t] = 0; - - y_pred[i, t] = -1; - } - } - - mu_Apun = Phi_approx(mu_pr[1]); - mu_Arew = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] ev; // Expected value - real PE; // Prediction error - - // Initialize values - ev = initV; // initial ev values - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // Softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, t] | ev * beta[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = categorical_rng(softmax(ev * beta[i])); - - // Prediction Error - PE = outcome[i, t] - ev[choice[i, t]]; - - // Store values for model regressors - ev_c[i, t] = ev[choice[i, t]]; - ev_nc[i, t] = ev[3 - choice[i, t]]; - pe[i, t] = PE; - - // Update expected value of chosen stimulus - if (outcome[i, t] > 0) - ev[choice[i, t]] += Arew[i] * PE; - else - ev[choice[i, t]] += Apun[i] * PE; - } - } - } -} - diff --git a/R/inst/stan_files/prl_rp_multipleB.stan b/R/inst/stan_files/prl_rp_multipleB.stan deleted file mode 100644 index 8cd77c43..00000000 --- a/R/inst/stan_files/prl_rp_multipleB.stan +++ /dev/null @@ -1,161 +0,0 @@ -#include /pre/license.stan - -/** - * Probabilistic Reversal Learning (PRL) Task - * - * Reward-Punishment Model with multiple blocks per subject by Ouden et al. (2013) Neuron - */ - -data { - int N; // Number of subjects - - int B; // Maximum number of blocks across subjects - int Bsubj[N]; // Number of blocks for each subject - - int T; // Maximum number of trials across subjects - int Tsubj[N, B]; // Number of trials/blocks for each subject - - int choice[N, B, T]; // The choices subjects made - real outcome[N, B, T]; // The outcome -} - -transformed data { - // Default value for (re-)initializing parameter vectors - vector[2] initV; - initV = rep_vector(0.0, 2); -} - -// Declare all parameters as vectors for vectorizing -parameters { - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] Apun_pr; // learning rate (punishment) - vector[N] Arew_pr; // learning rate (reward) - vector[N] beta_pr; // inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - vector[N] Apun; - vector[N] Arew; - vector[N] beta; - - for (i in 1:N) { - Apun[i] = Phi_approx(mu_pr[1] + sigma[1] * Apun_pr[i]); - Arew[i] = Phi_approx(mu_pr[2] + sigma[2] * Arew_pr[i]); - beta[i] = Phi_approx(mu_pr[3] + sigma[3] * beta_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - Apun_pr ~ normal(0, 1); - Arew_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - for (bIdx in 1:Bsubj[i]) { // new - // Define Values - vector[2] ev; // Expected value - real PE; // Prediction error - - // Initialize values - ev = initV; // Initial ev values - - for (t in 1:Tsubj[i, bIdx]) { - // Softmax choice - choice[i, bIdx, t] ~ categorical_logit(ev * beta[i]); - - // Prediction Error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; - - // Update expected value of chosen stimulus - if (outcome[i, bIdx, t] > 0) - ev[choice[i, bIdx, t]] += Arew[i] * PE; - else - ev[choice[i, bIdx, t]] += Apun[i] * PE; - } - } - } -} - -generated quantities { - // For group level parameters - real mu_Apun; - real mu_Arew; - real mu_beta; - - // For log likelihood calculation - real log_lik[N]; - - // For model regressors - real ev_c[N, B, T]; // Expected value of the chosen option - real ev_nc[N, B, T]; // Expected value of the non-chosen option - real pe[N, B, T]; // Prediction error - - // For posterior predictive check - real y_pred[N, B, T]; - - // Initialize all the variables to avoid NULL values - for (i in 1:N) { - for (b in 1:B) { - for (t in 1:T) { - ev_c[i, b, t] = 0; - ev_nc[i, b, t] = 0; - pe[i, b, t] = 0; - - y_pred[i, b, t] = -1; - } - } - } - - mu_Apun = Phi_approx(mu_pr[1]); - mu_Arew = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - - log_lik[i] = 0; - - for (bIdx in 1:Bsubj[i]) { // new - // Define values - vector[2] ev; // Expected value - real PE; // prediction error - - // Initialize values - ev = initV; // initial ev values - - for (t in 1:Tsubj[i, bIdx]) { - // Softmax choice - log_lik[i] += categorical_logit_lpmf(choice[i, bIdx, t] | ev * beta[i]); - - // generate posterior prediction for current trial - y_pred[i, bIdx, t] = categorical_rng(softmax(ev * beta[i])); - - // Prediction Error - PE = outcome[i, bIdx, t] - ev[choice[i, bIdx, t]]; - - // Store values for model regressors - ev_c[i, bIdx, t] = ev[choice[i, bIdx, t]]; - ev_nc[i, bIdx, t] = ev[3 - choice[i, bIdx, t]]; - pe[i, bIdx, t] = PE; - - // Update expected value of chosen stimulus - if (outcome[i, bIdx, t] > 0) - ev[choice[i, bIdx, t]] += Arew[i] * PE; - else - ev[choice[i, bIdx, t]] += Apun[i] * PE; - } - } - } - } -} - diff --git a/R/inst/stan_files/pst_gainloss_Q.stan b/R/inst/stan_files/pst_gainloss_Q.stan deleted file mode 100644 index 788b9a4e..00000000 --- a/R/inst/stan_files/pst_gainloss_Q.stan +++ /dev/null @@ -1,114 +0,0 @@ -#include /pre/license.stan - -data { - int N; // Number of subjects - int T; // Maximum # of trials - int Tsubj[N]; // # of trials for acquisition phase - - int option1[N, T]; - int option2[N, T]; - int choice[N, T]; - real reward[N, T]; -} - -transformed data { - // Default values to initialize the vector of expected values - vector[6] initial_values; - initial_values = rep_vector(0, 6); -} - -parameters { - // Group-level parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level parameters for Matt trick - vector[N] alpha_pos_pr; - vector[N] alpha_neg_pr; - vector[N] beta_pr; -} - -transformed parameters { - vector[N] alpha_pos; - vector[N] alpha_neg; - vector[N] beta; - - alpha_pos = Phi_approx(mu_pr[1] + sigma[1] * alpha_pos_pr); - alpha_neg = Phi_approx(mu_pr[2] + sigma[2] * alpha_neg_pr); - beta = Phi_approx(mu_pr[3] + sigma[3] * beta_pr) * 10; -} - -model { - // Priors for group-level parameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // Priors for subject-level parameters - alpha_pos_pr ~ normal(0, 1); - alpha_neg_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - - for (i in 1:N) { - int co; // Chosen option - real delta; // Difference between two options - real pe; // Prediction error - real alpha; - vector[6] ev; // Expected values - - ev = initial_values; - - // Acquisition Phase - for (t in 1:Tsubj[i]) { - co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; - - // Luce choice rule - delta = ev[option1[i, t]] - ev[option2[i, t]]; - target += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); - - pe = reward[i, t] - ev[co]; - alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; - ev[co] += alpha * pe; - } - } -} - -generated quantities { - // For group-level parameters - real mu_alpha_pos; - real mu_alpha_neg; - real mu_beta; - - // For log-likelihood calculation - real log_lik[N]; - - mu_alpha_pos = Phi_approx(mu_pr[1]); - mu_alpha_neg = Phi_approx(mu_pr[2]); - mu_beta = Phi_approx(mu_pr[3]) * 10; - - { - for (i in 1:N) { - int co; // Chosen option - real delta; // Difference between two options - real pe; // Prediction error - real alpha; - vector[6] ev; // Expected values - - ev = initial_values; - log_lik[i] = 0; - - // Acquisition Phase - for (t in 1:Tsubj[i]) { - co = (choice[i, t] > 0) ? option1[i, t] : option2[i, t]; - - // Luce choice rule - delta = ev[option1[i, t]] - ev[option2[i, t]]; - log_lik[i] += bernoulli_logit_lpmf(choice[i, t] | beta[i] * delta); - - pe = reward[i, t] - ev[co]; - alpha = (pe >= 0) ? alpha_pos[i] : alpha_neg[i]; - ev[co] += alpha * pe; - } - } - } -} - diff --git a/R/inst/stan_files/ra_noLA.stan b/R/inst/stan_files/ra_noLA.stan deleted file mode 100644 index c5c599c4..00000000 --- a/R/inst/stan_files/ra_noLA.stan +++ /dev/null @@ -1,95 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int gamble[N, T]; -} - -transformed data { -} - -parameters { - vector[2] mu_pr; - vector[2] sigma; - vector[N] rho_pr; - vector[N] tau_pr; -} - -transformed parameters { - vector[N] rho; - vector[N] tau; - - for (i in 1:N) { - rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; - } -} - -model { - // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - rho_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - gamble[i, t] ~ bernoulli(pGamble); - } - } -} -generated quantities { - real mu_rho; - real mu_tau; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_tau = Phi_approx(mu_pr[2]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - // loss[i, t]=absolute amount of loss (pre-converted in R) - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - pow(loss[i, t], rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGamble); - } - } - } -} - diff --git a/R/inst/stan_files/ra_noRA.stan b/R/inst/stan_files/ra_noRA.stan deleted file mode 100644 index 0f36c3be..00000000 --- a/R/inst/stan_files/ra_noRA.stan +++ /dev/null @@ -1,95 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int gamble[N, T]; -} - -transformed data { -} - -parameters { - vector[2] mu_pr; - vector[2] sigma; - vector[N] lambda_pr; - vector[N] tau_pr; -} - -transformed parameters { - vector[N] lambda; - vector[N] tau; - - for (i in 1:N) { - lambda[i] = Phi_approx(mu_pr[1] + sigma[1] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 30; - } -} - -model { - // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - lambda_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - // loss[i, t]=absolute amount of loss (pre-converted in R) - evSafe = cert[i, t]; - evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - gamble[i, t] ~ bernoulli(pGamble); - } - } -} -generated quantities { - real mu_lambda; - real mu_tau; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_lambda = Phi_approx(mu_pr[1]) * 5; - mu_tau = Phi_approx(mu_pr[2]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - evSafe = cert[i, t]; - evGamble = 0.5 * (gain[i, t] - lambda[i] * loss[i, t]); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGamble); - } - } - } -} - diff --git a/R/inst/stan_files/ra_prospect.stan b/R/inst/stan_files/ra_prospect.stan deleted file mode 100644 index 542ea460..00000000 --- a/R/inst/stan_files/ra_prospect.stan +++ /dev/null @@ -1,97 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int gamble[N, T]; -} -transformed data { -} -parameters { - vector[3] mu_pr; - vector[3] sigma; - vector[N] rho_pr; - vector[N] lambda_pr; - vector[N] tau_pr; -} -transformed parameters { - vector[N] rho; - vector[N] lambda; - vector[N] tau; - - for (i in 1:N) { - rho[i] = Phi_approx(mu_pr[1] + sigma[1] * rho_pr[i]) * 2; - lambda[i] = Phi_approx(mu_pr[2] + sigma[2] * lambda_pr[i]) * 5; - tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 30; - } -} -model { - // ra_prospect: Original model in Soko-Hessner et al 2009 PNAS - // hyper parameters - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - rho_pr ~ normal(0, 1.0); - lambda_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - // loss[i, t]=absolute amount of loss (pre-converted in R) - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(loss[i, t], rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - gamble[i, t] ~ bernoulli(pGamble); - } - } -} -generated quantities { - real mu_rho; - real mu_lambda; - real mu_tau; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_rho = Phi_approx(mu_pr[1]) * 2; - mu_lambda = Phi_approx(mu_pr[2]) * 5; - mu_tau = Phi_approx(mu_pr[3]) * 30; - - { // local section, this saves time and space - for (i in 1:N) { - log_lik[i] = 0; - for (t in 1:Tsubj[i]) { - real evSafe; // evSafe, evGamble, pGamble can be a scalar to save memory and increase speed. - real evGamble; // they are left as arrays as an example for RL models. - real pGamble; - - evSafe = pow(cert[i, t], rho[i]); - evGamble = 0.5 * (pow(gain[i, t], rho[i]) - lambda[i] * pow(fabs(loss[i, t]), rho[i])); - pGamble = inv_logit(tau[i] * (evGamble - evSafe)); - log_lik[i] += bernoulli_lpmf(gamble[i, t] | pGamble); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(pGamble); - } - } - } -} - diff --git a/R/inst/stan_files/rdt_happiness.stan b/R/inst/stan_files/rdt_happiness.stan deleted file mode 100644 index 3abb9e18..00000000 --- a/R/inst/stan_files/rdt_happiness.stan +++ /dev/null @@ -1,146 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real gain[N, T]; - real loss[N, T]; // absolute loss amount - real cert[N, T]; - int type[N, T]; - int gamble[N, T]; - real outcome[N, T]; - real happy[N, T]; - real RT_happy[N, T]; -} -transformed data { -} -parameters { - vector[6] mu_pr; - vector[6] sigma; - vector[N] w0_pr; - vector[N] w1_pr; - vector[N] w2_pr; - vector[N] w3_pr; - vector[N] gam_pr; - vector[N] sig_pr; -} -transformed parameters { - vector[N] w0; - vector[N] w1; - vector[N] w2; - vector[N] w3; - vector[N] gam; - vector[N] sig; - - w0 = mu_pr[1] + sigma[1] * w0_pr; - w1 = mu_pr[2] + sigma[2] * w1_pr; - w2 = mu_pr[3] + sigma[3] * w2_pr; - w3 = mu_pr[4] + sigma[4] * w3_pr; - - for (i in 1:N) { - gam[i] = Phi_approx(mu_pr[5] + sigma[5] * gam_pr[i]); - } - sig = exp(mu_pr[6] + sigma[6] * sig_pr); -} -model { - mu_pr ~ normal(0, 1.0); - sigma ~ normal(0, 0.2); - - // individual parameters w/ Matt trick - w0_pr ~ normal(0, 1.0); - w1_pr ~ normal(0, 1.0); - w2_pr ~ normal(0, 1.0); - w3_pr ~ normal(0, 1.0); - gam_pr ~ normal(0, 1.0); - sig_pr ~ normal(0, 1.0); - - for (i in 1:N) { - real cert_sum; - real ev_sum; - real rpe_sum; - - - cert_sum = 0; - ev_sum = 0; - rpe_sum = 0; - - for (t in 1:Tsubj[i]) { - if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ - happy[i,t] ~ normal(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); - } - - if(gamble[i,t] == 0){ - cert_sum += type[i,t] * cert[i,t]; - } else { - ev_sum += 0.5 * (gain[i,t] - loss[i,t]); - rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); - } - - cert_sum *= gam[i]; - ev_sum *= gam[i]; - rpe_sum *= gam[i]; - } - } -} -generated quantities { - real mu_w0; - real mu_w1; - real mu_w2; - real mu_w3; - real mu_gam; - real mu_sig; - - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_w0 = mu_pr[1]; - mu_w1 = mu_pr[2]; - mu_w2 = mu_pr[3]; - mu_w3 = mu_pr[4]; - mu_gam = Phi_approx(mu_pr[5]); - mu_sig = exp(mu_pr[6]); - - - { // local section, this saves time and space - for (i in 1:N) { - real cert_sum; - real ev_sum; - real rpe_sum; - - log_lik[i] = 0; - - cert_sum = 0; - ev_sum = 0; - rpe_sum = 0; - - for (t in 1:Tsubj[i]) { - if(t == 1 || t > 1 && RT_happy[i,t] != RT_happy[i,t-1]){ - log_lik[i] += normal_lpdf(happy[i, t] | w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); - y_pred[i, t] = normal_rng(w0[i] + w1[i] * cert_sum + w2[i] * ev_sum + w3[i] * rpe_sum, sig[i]); - } - - if(gamble[i,t] == 0){ - cert_sum += type[i,t] * cert[i,t]; - } else { - ev_sum += 0.5 * (gain[i,t] - loss[i,t]); - rpe_sum += outcome[i,t] - 0.5 * (gain[i,t] - loss[i,t]); - } - - cert_sum *= gam[i]; - ev_sum *= gam[i]; - rpe_sum *= gam[i]; - } - } - } -} - diff --git a/R/inst/stan_files/ts_par4.stan b/R/inst/stan_files/ts_par4.stan deleted file mode 100644 index c615f6d0..00000000 --- a/R/inst/stan_files/ts_par4.stan +++ /dev/null @@ -1,204 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int level1_choice[N,T]; // 1: left, 2: right - int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 - int reward[N,T]; - real trans_prob; -} -transformed data { -} -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[4] mu_pr; - vector[4] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] a_pr; - vector[N] beta_pr; - vector[N] pi_pr; - vector[N] w_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] a; - vector[N] beta; - vector[N] pi; - vector[N] w; - - for (i in 1:N) { - a[i] = Phi_approx( mu_pr[1] + sigma[1] * a_pr[i] ); - beta[i] = exp( mu_pr[2] + sigma[2] * beta_pr[i] ); - pi[i] = Phi_approx( mu_pr[3] + sigma[3] * pi_pr[i] ) * 5; - w[i] = Phi_approx( mu_pr[4] + sigma[4] * w_pr[i] ); - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - a_pr ~ normal(0, 1); - beta_pr ~ normal(0, 1); - pi_pr ~ normal(0, 1); - w_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); - } - level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_a; - real mu_beta; - real mu_pi; - real mu_w; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred_step1[N,T]; - real y_pred_step2[N,T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred_step1[i,t] = -1; - y_pred_step2[i,t] = -1; - } - } - - // Generate group level parameter values - mu_a = Phi_approx( mu_pr[1] ); - mu_beta = exp( mu_pr[2] ); - mu_pi = Phi_approx( mu_pr[3] ) * 5; - mu_w = Phi_approx( mu_pr[4] ); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 - // Level 2 --> choose one of two level 2 options - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta[i]*( v_mf[4] - v_mf[3] ) ); - } - log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); - - // generate posterior prediction for current trial - y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); - y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop - } -} - diff --git a/R/inst/stan_files/ts_par6.stan b/R/inst/stan_files/ts_par6.stan deleted file mode 100644 index b472afa0..00000000 --- a/R/inst/stan_files/ts_par6.stan +++ /dev/null @@ -1,213 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int level1_choice[N,T]; // 1: left, 2: right - int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 - int reward[N,T]; - real trans_prob; -} -transformed data { -} -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[6] mu_pr; - vector[6] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] a1_pr; - vector[N] beta1_pr; - vector[N] a2_pr; - vector[N] beta2_pr; - vector[N] pi_pr; - vector[N] w_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] a1; - vector[N] beta1; - vector[N] a2; - vector[N] beta2; - vector[N] pi; - vector[N] w; - - for (i in 1:N) { - a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); - beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); - a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); - beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); - pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; - w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - a1_pr ~ normal(0, 1); - beta1_pr ~ normal(0, 1); - a2_pr ~ normal(0, 1); - beta2_pr ~ normal(0, 1); - pi_pr ~ normal(0, 1); - w_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_a1; - real mu_beta1; - real mu_a2; - real mu_beta2; - real mu_pi; - real mu_w; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred_step1[N,T]; - real y_pred_step2[N,T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred_step1[i,t] = -1; - y_pred_step2[i,t] = -1; - } - } - - // Generate group level parameter values - mu_a1 = Phi_approx( mu_pr[1] ); - mu_beta1 = exp( mu_pr[2] ); - mu_a2 = Phi_approx( mu_pr[3] ); - mu_beta2 = exp( mu_pr[4] ); - mu_pi = Phi_approx( mu_pr[5] ) * 5; - mu_w = Phi_approx( mu_pr[6] ); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 - // Level 2 --> choose one of two level 2 options - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); - - // generate posterior prediction for current trial - y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); - y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - - } // end of t loop - } // end of i loop - } -} - diff --git a/R/inst/stan_files/ts_par7.stan b/R/inst/stan_files/ts_par7.stan deleted file mode 100644 index 089042c2..00000000 --- a/R/inst/stan_files/ts_par7.stan +++ /dev/null @@ -1,217 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - int level1_choice[N,T]; // 1: left, 2: right - int level2_choice[N,T]; // 1-4: 1/2: commonly associated with level1=1, 3/4: commonly associated with level1=2 - int reward[N,T]; - real trans_prob; -} -transformed data { -} -parameters { - // Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[7] mu_pr; - vector[7] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] a1_pr; - vector[N] beta1_pr; - vector[N] a2_pr; - vector[N] beta2_pr; - vector[N] pi_pr; - vector[N] w_pr; - vector[N] lambda_pr; -} -transformed parameters { - // Transform subject-level raw parameters - vector[N] a1; - vector[N] beta1; - vector[N] a2; - vector[N] beta2; - vector[N] pi; - vector[N] w; - vector[N] lambda; - - for (i in 1:N) { - a1[i] = Phi_approx( mu_pr[1] + sigma[1] * a1_pr[i] ); - beta1[i] = exp( mu_pr[2] + sigma[2] * beta1_pr[i] ); - a2[i] = Phi_approx( mu_pr[3] + sigma[3] * a2_pr[i] ); - beta2[i] = exp( mu_pr[4] + sigma[4] * beta2_pr[i] ); - pi[i] = Phi_approx( mu_pr[5] + sigma[5] * pi_pr[i] ) * 5; - w[i] = Phi_approx( mu_pr[6] + sigma[6] * w_pr[i] ); - lambda[i] = Phi_approx( mu_pr[7] + sigma[7] * lambda_pr[i] ); - } -} -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - a1_pr ~ normal(0, 1); - beta1_pr ~ normal(0, 1); - a2_pr ~ normal(0, 1); - beta2_pr ~ normal(0, 1); - pi_pr ~ normal(0, 1); - w_pr ~ normal(0, 1); - lambda_pr ~ normal(0, 1); - - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // Initialize prob. of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - level1_choice_01 ~ bernoulli( level1_prob_choice2 ); // level 1, prob. of choosing 2 in level 1 - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 --> 1 - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - level2_choice_01 ~ bernoulli( level2_prob_choice2 ); // level 2, prob of choosing right option in level 2 - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_a1; - real mu_beta1; - real mu_a2; - real mu_beta2; - real mu_pi; - real mu_w; - real mu_lambda; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred_step1[N,T]; - real y_pred_step2[N,T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred_step1[i,t] = -1; - y_pred_step2[i,t] = -1; - } - } - - // Generate group level parameter values - mu_a1 = Phi_approx( mu_pr[1] ); - mu_beta1 = exp( mu_pr[2] ); - mu_a2 = Phi_approx( mu_pr[3] ); - mu_beta2 = exp( mu_pr[4] ); - mu_pi = Phi_approx( mu_pr[5] ) * 5; - mu_w = Phi_approx( mu_pr[6] ); - mu_lambda = Phi_approx( mu_pr[7] ); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - vector[2] v_mb; // model-based stimulus values for level 1 (2 stimuli) - vector[6] v_mf; // model-free stimulus values for level 1&2 (1,2--> level 1, 3-6--> level 2) - vector[2] v_hybrid; // hybrid stimulus values for level 1 (2 stimuli) - real level1_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 1 - real level2_prob_choice2; // prob of choosing stim 2 (0 or 1) in level 2 - int level1_choice_01; - int level2_choice_01; - - // Initialize values - v_mb = rep_vector(0.0, 2); - v_mf = rep_vector(0.0, 6); - v_hybrid = rep_vector(0.0, 2); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - // compute v_mb - v_mb[1] = trans_prob * fmax(v_mf[3], v_mf[4]) + (1 - trans_prob) * fmax(v_mf[5], v_mf[6]); // for level1, stim 1 - v_mb[2] = (1 - trans_prob) * fmax(v_mf[3], v_mf[4]) + trans_prob * fmax(v_mf[5], v_mf[6]); // for level1, stim 2 - - // compute v_hybrid - v_hybrid[1] = w[i] * v_mb[1] + (1-w[i]) * v_mf[1]; // hybrid stim 1= weighted sum - v_hybrid[2] = w[i] * v_mb[2] + (1-w[i]) * v_mf[2]; // hybrid stim 2= weighted sum - - // Prob of choosing stimulus 2 in ** Level 1 ** --> to be used on the next trial - // level1_choice=1 --> -1, level1_choice=2 --> 1 - level1_choice_01 = level1_choice[i,t] - 1; // convert 1,2 --> 0,1 - if(t == 1){ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1])); - } else{ - level1_prob_choice2 = inv_logit( beta1[i]*(v_hybrid[2]-v_hybrid[1]) + pi[i]*(2*level1_choice[i,t-1] -3) ); - } - log_lik[i] += bernoulli_lpmf( level1_choice_01 | level1_prob_choice2 ); - - // Observe Level2 and update Level1 of the chosen option - v_mf[level1_choice[i,t]] += a1[i]*(v_mf[2+ level2_choice[i,t]] - v_mf[ level1_choice[i,t]]); - - // Prob of choosing stim 2 (2 from [1,2] OR 4 from [3,4]) in ** Level (step) 2 ** - level2_choice_01 = 1 - modulus(level2_choice[i,t], 2); // 1,3 --> 0; 2,4 - // Level 2 --> choose one of two level 2 options - if (level2_choice[i,t] > 2) { // level2_choice = 3 or 4 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[6] - v_mf[5] ) ); - } else { // level2_choice = 1 or 2 - level2_prob_choice2 = inv_logit( beta2[i]*( v_mf[4] - v_mf[3] ) ); - } - log_lik[i] += bernoulli_lpmf( level2_choice_01 | level2_prob_choice2 ); - - // generate posterior prediction for current trial - y_pred_step1[i,t] = bernoulli_rng(level1_prob_choice2); - y_pred_step2[i,t] = bernoulli_rng(level2_prob_choice2); - - // After observing the reward at Level 2... - // Update Level 2 v_mf of the chosen option. Level 2--> choose one of level 2 options and observe reward - v_mf[2+ level2_choice[i,t]] += a2[i]*(reward[i,t] - v_mf[2+ level2_choice[i,t] ] ); - - // Update Level 1 v_mf - v_mf[level1_choice[i,t]] += lambda[i] * a1[i] * (reward[i,t] - v_mf[2+level2_choice[i,t]]); - } // end of t loop - } // end of i loop - } -} - diff --git a/R/inst/stan_files/ug_bayes.stan b/R/inst/stan_files/ug_bayes.stan deleted file mode 100644 index 6136e708..00000000 --- a/R/inst/stan_files/ug_bayes.stan +++ /dev/null @@ -1,167 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real offer[N, T]; - int accept[N, T]; -} - -transformed data { - real initV; - real mu0; - real k0; - real sig20; - real nu0; - - initV = 0.0; - mu0 = 10.0; // initial expectation - k0 = 4.0; - sig20 = 4.0; - nu0 = 10.0; -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // alpha: envy - vector[N] beta_pr; // beta: guilt - vector[N] tau_pr; // tau: inverse temperature -} - -transformed parameters { - // Transform subject-level raw parameters - real alpha[N]; - real beta[N]; - real tau[N]; - - for (i in 1:N) { - alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; - beta[i] = Phi_approx(mu_pr[2] + sigma[2] * beta_pr[i]) * 10; - tau[i] = Phi_approx(mu_pr[3] + sigma[3] * tau_pr[i]) * 10; - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - alpha_pr ~ normal(0, 1.0); - beta_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - real util; - real mu_old; - real mu_new; - real k_old; - real k_new; - real sig2_old; - real sig2_new; - real nu_old; - real nu_new; - real PE; // not required for computation - - // Initialize values - mu_old = mu0; - k_old = k0; - sig2_old = sig20; - nu_old = nu0; - - for (t in 1:Tsubj[i]) { - k_new = k_old + 1; - nu_new = nu_old + 1; - mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; - sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); - - PE = offer[i, t] - mu_old; - util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); - - accept[i, t] ~ bernoulli_logit(util * tau[i]); - - // replace old ones with new ones - mu_old = mu_new; - sig2_old = sig2_new; - k_old = k_new; - nu_old = nu_new; - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_beta; - real mu_tau; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 20; - mu_beta = Phi_approx(mu_pr[2]) * 10; - mu_tau = Phi_approx(mu_pr[3]) * 10; - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real util; - real mu_old; - real mu_new; - real k_old; - real k_new; - real sig2_old; - real sig2_new; - real nu_old; - real nu_new; - real PE; // not required for computation - - // Initialize values - mu_old = mu0; - k_old = k0; - sig2_old = sig20; - nu_old = nu0; - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - k_new = k_old + 1; - nu_new = nu_old + 1; - mu_new = (k_old/k_new) * mu_old + (1/k_new) * offer[i, t]; - sig2_new = (nu_old/nu_new) * sig2_old + (1/nu_new) * (k_old/k_new) * pow((offer[i, t] - mu_old), 2); - - PE = offer[i, t] - mu_old; - util = offer[i, t] - alpha[i] * fmax(mu_new - offer[i, t], 0.0) - beta[i] * fmax(offer[i, t] - mu_new, 0.0); - - log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); - - // replace old ones with new ones - mu_old = mu_new; - sig2_old = sig2_new; - k_old = k_new; - nu_old = nu_new; - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/R/inst/stan_files/ug_delta.stan b/R/inst/stan_files/ug_delta.stan deleted file mode 100644 index 9bb70e0a..00000000 --- a/R/inst/stan_files/ug_delta.stan +++ /dev/null @@ -1,129 +0,0 @@ -#include /pre/license.stan - -data { - int N; - int T; - int Tsubj[N]; - real offer[N, T]; - int accept[N, T]; -} - -transformed data { -} - -parameters { -// Declare all parameters as vectors for vectorizing - // Hyper(group)-parameters - vector[3] mu_pr; - vector[3] sigma; - - // Subject-level raw parameters (for Matt trick) - vector[N] alpha_pr; // alpha: Envy (sensitivity to norm prediction error) - vector[N] tau_pr; // tau: Inverse temperature - vector[N] ep_pr; // ep: Norm adaptation rate -} - -transformed parameters { - // Transform subject-level raw parameters - real alpha[N]; - real tau[N]; - real ep[N]; - - for (i in 1:N) { - alpha[i] = Phi_approx(mu_pr[1] + sigma[1] * alpha_pr[i]) * 20; - tau[i] = Phi_approx(mu_pr[2] + sigma[2] * tau_pr[i]) * 10; - ep[i] = Phi_approx(mu_pr[3] + sigma[3] * ep_pr[i]); - } -} - -model { - // Hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - alpha_pr ~ normal(0, 1.0); - tau_pr ~ normal(0, 1.0); - ep_pr ~ normal(0, 1.0); - - for (i in 1:N) { - // Define values - real f; // Internal norm - real PE; // Prediction error - real util; // Utility of offer - - // Initialize values - f = 10.0; - - for (t in 1:Tsubj[i]) { - // calculate prediction error - PE = offer[i, t] - f; - - // Update utility - util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); - - // Sampling statement - accept[i, t] ~ bernoulli_logit(util * tau[i]); - - // Update internal norm - f += ep[i] * PE; - - } // end of t loop - } // end of i loop -} - -generated quantities { - // For group level parameters - real mu_alpha; - real mu_tau; - real mu_ep; - - // For log likelihood calculation - real log_lik[N]; - - // For posterior predictive check - real y_pred[N, T]; - - // Set all posterior predictions to 0 (avoids NULL values) - for (i in 1:N) { - for (t in 1:T) { - y_pred[i, t] = -1; - } - } - - mu_alpha = Phi_approx(mu_pr[1]) * 20; - mu_tau = Phi_approx(mu_pr[2]) * 10; - mu_ep = Phi_approx(mu_pr[3]); - - { // local section, this saves time and space - for (i in 1:N) { - // Define values - real f; // Internal norm - real PE; // prediction error - real util; // Utility of offer - - // Initialize values - f = 10.0; - log_lik[i] = 0.0; - - for (t in 1:Tsubj[i]) { - // calculate prediction error - PE = offer[i, t] - f; - - // Update utility - util = offer[i, t] - alpha[i] * fmax(f - offer[i, t], 0.0); - - // Calculate log likelihood - log_lik[i] += bernoulli_logit_lpmf(accept[i, t] | util * tau[i]); - - // generate posterior prediction for current trial - y_pred[i, t] = bernoulli_rng(inv_logit(util * tau[i])); - - // Update internal norm - f += ep[i] * PE; - - } // end of t loop - } // end of i loop - } // end of local section -} - diff --git a/R/inst/stan_files/wcs_sql.stan b/R/inst/stan_files/wcs_sql.stan deleted file mode 100644 index 81b8ce17..00000000 --- a/R/inst/stan_files/wcs_sql.stan +++ /dev/null @@ -1,176 +0,0 @@ -#include /pre/license.stan - -data { - int N; // number of subjects - int T; // max trial - int Tsubj[N]; // number of max trials per subject - - int choice[N, 4, T]; // subject's deck choice within a trial (1, 2, 3 and 4) - int outcome[N, T]; // whether subject's choice is correct or not within a trial (1 and 0) - matrix[1, 3] choice_match_att[N, T]; // indicates which dimension the chosen card matches to within a trial - matrix[3, 4] deck_match_rule[T]; // indicates which dimension(color, form, number) each of the 4 decks matches to within a trial -} - -transformed data { - matrix[1, 3] initAtt; // each subject start with an even attention to each dimension - matrix[1, 3] unit; // used to flip attention after punishing feedback inside the model - - initAtt = rep_matrix(1.0/3.0, 1, 3); - unit = rep_matrix(1.0, 1, 3); -} - -parameters { - // hyper parameters - vector[3] mu_pr; - vector[3] sigma; - - // subject-level raw parameters (for Matt trick) - vector[N] r_pr; // sensitivity to rewarding feedback (reward learning rate) - vector[N] p_pr; // sensitivity to punishing feedback (punishment learning rate) - vector[N] d_pr; // decision consistency (inverse temperature) -} - -transformed parameters { - // transform subject-level raw parameters - vector[N] r; - vector[N] p; - vector[N] d; - - for (i in 1:N) { - r[i] = Phi_approx( mu_pr[1] + sigma[1] * r_pr[i] ); - p[i] = Phi_approx( mu_pr[2] + sigma[2] * p_pr[i] ); - d[i] = Phi_approx( mu_pr[3] + sigma[3] * d_pr[i] ) * 5; - } -} - -model { - // hyperparameters - mu_pr ~ normal(0, 1); - sigma ~ normal(0, 0.2); - - // individual parameters - r_pr ~ normal(0, 1); - p_pr ~ normal(0, 1); - d_pr ~ normal(0, 1); - - for (i in 1:N) { - // define values - vector[4] pred_prob_mat; // predicted probability of choosing a deck in each trial based on attention - matrix[1, 3] subj_att; // subject's attention to each dimension - matrix[1, 3] att_signal; // signal where a subject has to pay attention after reward/punishment - real sum_att_signal; // temporary variable to calculate sum(att_signal) - matrix[1, 3] tmpatt; // temporary variable to calculate subj_att - vector[4] tmpp; // temporary variable to calculate pred_prob_mat - - // initiate values - subj_att = initAtt; - pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); - - for (t in 1:Tsubj[i]) { - // multinomial choice - choice[i,,t] ~ multinomial(pred_prob_mat); - - // re-distribute attention after getting a feedback - if (outcome[i,t] == 1) { - att_signal = subj_att .* choice_match_att[i,t]; - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; - } else { - att_signal = subj_att .* (unit - choice_match_att[i,t]); - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; - } - - // scaling to avoid log(0) - subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; - - tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); - tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); - tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); - - // repeat until the final trial - if (t < Tsubj[i]) { - tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; - pred_prob_mat = tmpp/sum(tmpp); - } - - } // end of trial loop - } // end of subject loop -} -generated quantities { - // for group level parameters - real mu_r; - real mu_p; - real mu_d; - - // for log-likelihood calculation - real log_lik[N]; - - // for posterior predictive check - int y_pred[N, 4, T]; - - // initiate the variable to avoid NULL values - for (i in 1:N) { - for (t in 1:T) { - for (deck in 1:4) { - y_pred[i,deck,t] = -1; - } - } - } - - mu_r = Phi_approx(mu_pr[1]); - mu_p = Phi_approx(mu_pr[2]); - mu_d = Phi_approx(mu_pr[3]) * 5; - - { // local section, this saves time and space - for (i in 1:N) { - matrix[1, 3] subj_att; - matrix[1, 3] att_signal; - vector[4] pred_prob_mat; - - matrix[1, 3] tmpatt; - vector[4] tmpp; - - real sum_att_signal; - - subj_att = initAtt; - pred_prob_mat = to_vector(subj_att*deck_match_rule[1,,]); - - log_lik[i] = 0; - - for (t in 1:Tsubj[i]) { - - log_lik[i] += multinomial_lpmf(choice[i,,t] | pred_prob_mat); - - y_pred[i,,t] = multinomial_rng(pred_prob_mat, 1); - - if(outcome[i,t] == 1) { - att_signal = subj_att .* choice_match_att[i,t]; - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - r[i])*subj_att + r[i]*att_signal; - } else { - att_signal = subj_att .* (unit - choice_match_att[i,t]); - sum_att_signal = sum(att_signal); - att_signal /= sum_att_signal; - tmpatt = (1.0 - p[i])*subj_att + p[i]*att_signal; - } - - subj_att = (tmpatt/sum(tmpatt))*.9998+.0001; - - tmpatt[1, 1] = pow(subj_att[1, 1],d[i]); - tmpatt[1, 2] = pow(subj_att[1, 2],d[i]); - tmpatt[1, 3] = pow(subj_att[1, 3],d[i]); - - if(t < Tsubj[i]) { - tmpp = to_vector(tmpatt*deck_match_rule[t+1,,])*.9998+.0001; - pred_prob_mat = tmpp/sum(tmpp); - } - - } // end of trial loop - } // end of subject loop - } // end of local section -} - diff --git a/Python/hbayesdm/common/extdata/bandit2arm_exampleData.txt b/commons/extdata/bandit2arm_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/bandit2arm_exampleData.txt rename to commons/extdata/bandit2arm_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/bandit4arm2_exampleData.txt b/commons/extdata/bandit4arm2_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/bandit4arm2_exampleData.txt rename to commons/extdata/bandit4arm2_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/bandit4arm_exampleData.txt b/commons/extdata/bandit4arm_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/bandit4arm_exampleData.txt rename to commons/extdata/bandit4arm_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/bart_exampleData.txt b/commons/extdata/bart_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/bart_exampleData.txt rename to commons/extdata/bart_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/choiceRT_exampleData.txt b/commons/extdata/choiceRT_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/choiceRT_exampleData.txt rename to commons/extdata/choiceRT_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/choiceRT_single_exampleData.txt b/commons/extdata/choiceRT_single_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/choiceRT_single_exampleData.txt rename to commons/extdata/choiceRT_single_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/cra_exampleData.txt b/commons/extdata/cra_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/cra_exampleData.txt rename to commons/extdata/cra_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/dbdm_exampleData.txt b/commons/extdata/dbdm_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/dbdm_exampleData.txt rename to commons/extdata/dbdm_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/dd_exampleData.txt b/commons/extdata/dd_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/dd_exampleData.txt rename to commons/extdata/dd_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/dd_single_exampleData.txt b/commons/extdata/dd_single_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/dd_single_exampleData.txt rename to commons/extdata/dd_single_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/gng_exampleData.txt b/commons/extdata/gng_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/gng_exampleData.txt rename to commons/extdata/gng_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/igt_exampleData.txt b/commons/extdata/igt_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/igt_exampleData.txt rename to commons/extdata/igt_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/peer_exampleData.txt b/commons/extdata/peer_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/peer_exampleData.txt rename to commons/extdata/peer_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/prl_exampleData.txt b/commons/extdata/prl_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/prl_exampleData.txt rename to commons/extdata/prl_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/prl_multipleB_exampleData.txt b/commons/extdata/prl_multipleB_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/prl_multipleB_exampleData.txt rename to commons/extdata/prl_multipleB_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/pst_exampleData.txt b/commons/extdata/pst_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/pst_exampleData.txt rename to commons/extdata/pst_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/ra_data_attend.txt b/commons/extdata/ra_data_attend.txt similarity index 100% rename from Python/hbayesdm/common/extdata/ra_data_attend.txt rename to commons/extdata/ra_data_attend.txt diff --git a/Python/hbayesdm/common/extdata/ra_data_reappraisal.txt b/commons/extdata/ra_data_reappraisal.txt similarity index 100% rename from Python/hbayesdm/common/extdata/ra_data_reappraisal.txt rename to commons/extdata/ra_data_reappraisal.txt diff --git a/Python/hbayesdm/common/extdata/ra_exampleData.txt b/commons/extdata/ra_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/ra_exampleData.txt rename to commons/extdata/ra_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/rdt_exampleData.txt b/commons/extdata/rdt_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/rdt_exampleData.txt rename to commons/extdata/rdt_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/ts_exampleData.txt b/commons/extdata/ts_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/ts_exampleData.txt rename to commons/extdata/ts_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/ug_exampleData.txt b/commons/extdata/ug_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/ug_exampleData.txt rename to commons/extdata/ug_exampleData.txt diff --git a/Python/hbayesdm/common/extdata/wcs_answersheet.txt b/commons/extdata/wcs_answersheet.txt similarity index 100% rename from Python/hbayesdm/common/extdata/wcs_answersheet.txt rename to commons/extdata/wcs_answersheet.txt diff --git a/Python/hbayesdm/common/extdata/wcs_exampleData.txt b/commons/extdata/wcs_exampleData.txt similarity index 100% rename from Python/hbayesdm/common/extdata/wcs_exampleData.txt rename to commons/extdata/wcs_exampleData.txt diff --git a/Python/hbayesdm/common/stan_files/bandit2arm_delta.stan b/commons/stan_files/bandit2arm_delta.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit2arm_delta.stan rename to commons/stan_files/bandit2arm_delta.stan diff --git a/Python/hbayesdm/common/stan_files/bandit4arm2_kalman_filter.stan b/commons/stan_files/bandit4arm2_kalman_filter.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit4arm2_kalman_filter.stan rename to commons/stan_files/bandit4arm2_kalman_filter.stan diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_2par_lapse.stan b/commons/stan_files/bandit4arm_2par_lapse.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit4arm_2par_lapse.stan rename to commons/stan_files/bandit4arm_2par_lapse.stan diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_4par.stan b/commons/stan_files/bandit4arm_4par.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit4arm_4par.stan rename to commons/stan_files/bandit4arm_4par.stan diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_lapse.stan b/commons/stan_files/bandit4arm_lapse.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit4arm_lapse.stan rename to commons/stan_files/bandit4arm_lapse.stan diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_lapse_decay.stan b/commons/stan_files/bandit4arm_lapse_decay.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit4arm_lapse_decay.stan rename to commons/stan_files/bandit4arm_lapse_decay.stan diff --git a/Python/hbayesdm/common/stan_files/bandit4arm_singleA_lapse.stan b/commons/stan_files/bandit4arm_singleA_lapse.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bandit4arm_singleA_lapse.stan rename to commons/stan_files/bandit4arm_singleA_lapse.stan diff --git a/Python/hbayesdm/common/stan_files/bart_par4.stan b/commons/stan_files/bart_par4.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/bart_par4.stan rename to commons/stan_files/bart_par4.stan diff --git a/Python/hbayesdm/common/stan_files/choiceRT_ddm.stan b/commons/stan_files/choiceRT_ddm.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/choiceRT_ddm.stan rename to commons/stan_files/choiceRT_ddm.stan diff --git a/Python/hbayesdm/common/stan_files/choiceRT_ddm_single.stan b/commons/stan_files/choiceRT_ddm_single.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/choiceRT_ddm_single.stan rename to commons/stan_files/choiceRT_ddm_single.stan diff --git a/Python/hbayesdm/common/stan_files/choiceRT_lba.stan b/commons/stan_files/choiceRT_lba.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/choiceRT_lba.stan rename to commons/stan_files/choiceRT_lba.stan diff --git a/Python/hbayesdm/common/stan_files/choiceRT_lba_single.stan b/commons/stan_files/choiceRT_lba_single.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/choiceRT_lba_single.stan rename to commons/stan_files/choiceRT_lba_single.stan diff --git a/Python/hbayesdm/common/stan_files/cra_exp.stan b/commons/stan_files/cra_exp.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/cra_exp.stan rename to commons/stan_files/cra_exp.stan diff --git a/Python/hbayesdm/common/stan_files/cra_linear.stan b/commons/stan_files/cra_linear.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/cra_linear.stan rename to commons/stan_files/cra_linear.stan diff --git a/Python/hbayesdm/common/stan_files/dbdm_prob_weight.stan b/commons/stan_files/dbdm_prob_weight.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/dbdm_prob_weight.stan rename to commons/stan_files/dbdm_prob_weight.stan diff --git a/Python/hbayesdm/common/stan_files/dd_cs.stan b/commons/stan_files/dd_cs.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/dd_cs.stan rename to commons/stan_files/dd_cs.stan diff --git a/Python/hbayesdm/common/stan_files/dd_cs_single.stan b/commons/stan_files/dd_cs_single.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/dd_cs_single.stan rename to commons/stan_files/dd_cs_single.stan diff --git a/Python/hbayesdm/common/stan_files/dd_exp.stan b/commons/stan_files/dd_exp.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/dd_exp.stan rename to commons/stan_files/dd_exp.stan diff --git a/Python/hbayesdm/common/stan_files/dd_hyperbolic.stan b/commons/stan_files/dd_hyperbolic.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/dd_hyperbolic.stan rename to commons/stan_files/dd_hyperbolic.stan diff --git a/Python/hbayesdm/common/stan_files/dd_hyperbolic_single.stan b/commons/stan_files/dd_hyperbolic_single.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/dd_hyperbolic_single.stan rename to commons/stan_files/dd_hyperbolic_single.stan diff --git a/Python/hbayesdm/common/stan_files/gng_m1.stan b/commons/stan_files/gng_m1.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/gng_m1.stan rename to commons/stan_files/gng_m1.stan diff --git a/Python/hbayesdm/common/stan_files/gng_m2.stan b/commons/stan_files/gng_m2.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/gng_m2.stan rename to commons/stan_files/gng_m2.stan diff --git a/Python/hbayesdm/common/stan_files/gng_m3.stan b/commons/stan_files/gng_m3.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/gng_m3.stan rename to commons/stan_files/gng_m3.stan diff --git a/Python/hbayesdm/common/stan_files/gng_m4.stan b/commons/stan_files/gng_m4.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/gng_m4.stan rename to commons/stan_files/gng_m4.stan diff --git a/Python/hbayesdm/common/stan_files/igt_orl.stan b/commons/stan_files/igt_orl.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/igt_orl.stan rename to commons/stan_files/igt_orl.stan diff --git a/Python/hbayesdm/common/stan_files/igt_pvl_decay.stan b/commons/stan_files/igt_pvl_decay.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/igt_pvl_decay.stan rename to commons/stan_files/igt_pvl_decay.stan diff --git a/Python/hbayesdm/common/stan_files/igt_pvl_delta.stan b/commons/stan_files/igt_pvl_delta.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/igt_pvl_delta.stan rename to commons/stan_files/igt_pvl_delta.stan diff --git a/Python/hbayesdm/common/stan_files/igt_vpp.stan b/commons/stan_files/igt_vpp.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/igt_vpp.stan rename to commons/stan_files/igt_vpp.stan diff --git a/Python/hbayesdm/common/stan_files/peer_ocu.stan b/commons/stan_files/peer_ocu.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/peer_ocu.stan rename to commons/stan_files/peer_ocu.stan diff --git a/Python/hbayesdm/common/stan_files/pre/license.stan b/commons/stan_files/pre/license.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/pre/license.stan rename to commons/stan_files/pre/license.stan diff --git a/Python/hbayesdm/common/stan_files/prl_ewa.stan b/commons/stan_files/prl_ewa.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_ewa.stan rename to commons/stan_files/prl_ewa.stan diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious.stan b/commons/stan_files/prl_fictitious.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_fictitious.stan rename to commons/stan_files/prl_fictitious.stan diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_multipleB.stan b/commons/stan_files/prl_fictitious_multipleB.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_fictitious_multipleB.stan rename to commons/stan_files/prl_fictitious_multipleB.stan diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_rp.stan b/commons/stan_files/prl_fictitious_rp.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_fictitious_rp.stan rename to commons/stan_files/prl_fictitious_rp.stan diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_rp_woa.stan b/commons/stan_files/prl_fictitious_rp_woa.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_fictitious_rp_woa.stan rename to commons/stan_files/prl_fictitious_rp_woa.stan diff --git a/Python/hbayesdm/common/stan_files/prl_fictitious_woa.stan b/commons/stan_files/prl_fictitious_woa.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_fictitious_woa.stan rename to commons/stan_files/prl_fictitious_woa.stan diff --git a/Python/hbayesdm/common/stan_files/prl_rp.stan b/commons/stan_files/prl_rp.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_rp.stan rename to commons/stan_files/prl_rp.stan diff --git a/Python/hbayesdm/common/stan_files/prl_rp_multipleB.stan b/commons/stan_files/prl_rp_multipleB.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/prl_rp_multipleB.stan rename to commons/stan_files/prl_rp_multipleB.stan diff --git a/Python/hbayesdm/common/stan_files/pst_gainloss_Q.stan b/commons/stan_files/pst_gainloss_Q.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/pst_gainloss_Q.stan rename to commons/stan_files/pst_gainloss_Q.stan diff --git a/Python/hbayesdm/common/stan_files/ra_noLA.stan b/commons/stan_files/ra_noLA.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ra_noLA.stan rename to commons/stan_files/ra_noLA.stan diff --git a/Python/hbayesdm/common/stan_files/ra_noRA.stan b/commons/stan_files/ra_noRA.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ra_noRA.stan rename to commons/stan_files/ra_noRA.stan diff --git a/Python/hbayesdm/common/stan_files/ra_prospect.stan b/commons/stan_files/ra_prospect.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ra_prospect.stan rename to commons/stan_files/ra_prospect.stan diff --git a/Python/hbayesdm/common/stan_files/rdt_happiness.stan b/commons/stan_files/rdt_happiness.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/rdt_happiness.stan rename to commons/stan_files/rdt_happiness.stan diff --git a/Python/hbayesdm/common/stan_files/ts_par4.stan b/commons/stan_files/ts_par4.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ts_par4.stan rename to commons/stan_files/ts_par4.stan diff --git a/Python/hbayesdm/common/stan_files/ts_par6.stan b/commons/stan_files/ts_par6.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ts_par6.stan rename to commons/stan_files/ts_par6.stan diff --git a/Python/hbayesdm/common/stan_files/ts_par7.stan b/commons/stan_files/ts_par7.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ts_par7.stan rename to commons/stan_files/ts_par7.stan diff --git a/Python/hbayesdm/common/stan_files/ug_bayes.stan b/commons/stan_files/ug_bayes.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ug_bayes.stan rename to commons/stan_files/ug_bayes.stan diff --git a/Python/hbayesdm/common/stan_files/ug_delta.stan b/commons/stan_files/ug_delta.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/ug_delta.stan rename to commons/stan_files/ug_delta.stan diff --git a/Python/hbayesdm/common/stan_files/wcs_sql.stan b/commons/stan_files/wcs_sql.stan similarity index 100% rename from Python/hbayesdm/common/stan_files/wcs_sql.stan rename to commons/stan_files/wcs_sql.stan From 403426dd01ab31bb35f5a20464f645258be4d81b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 23:15:08 +0900 Subject: [PATCH 099/163] Update Rd --- R/man/extract_ic.Rd | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/R/man/extract_ic.Rd b/R/man/extract_ic.Rd index db2f5d80..2ae1361a 100644 --- a/R/man/extract_ic.Rd +++ b/R/man/extract_ic.Rd @@ -4,10 +4,10 @@ \alias{extract_ic} \title{Extract Model Comparison Estimates} \usage{ -extract_ic(modelData = NULL, ic = "looic", ncore = 2) +extract_ic(model_data = NULL, ic = "looic", ncore = 2) } \arguments{ -\item{modelData}{Object returned by \code{'hBayesDM'} model function} +\item{model_data}{Object returned by \code{'hBayesDM'} model function} \item{ic}{Information Criterion. 'looic', 'waic', or 'both'} From bd89ab23246bc3b2dae2528b1cbd8376e7d80655 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 23:17:36 +0900 Subject: [PATCH 100/163] No need to check sync now --- .travis.yml | 2 -- travis/script.sh | 5 ----- 2 files changed, 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index b16d029c..7a4321d2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -35,8 +35,6 @@ addons: matrix: include: - - name: 'Test sync on models and data' - env: TARGET='Sync' - name: 'Test R codes' env: TARGET='R' - name: 'Test Python codes (Python 3.5)' diff --git a/travis/script.sh b/travis/script.sh index f77a10dd..485906c2 100755 --- a/travis/script.sh +++ b/travis/script.sh @@ -9,11 +9,6 @@ if [ "$TARGET" = "R" ]; then elif [ "$TARGET" = "Python" ]; then travis_wait 30 pytest tests/test_ra_prospect.py -# Check sync for models and data -elif [ "$TARGET" = "Sync" ]; then - diff -r Python/hbayesdm/common/extdata R/inst/extdata - diff -r Python/hbayesdm/common/stan_files R/inst/stan_files - # Otherwise else echo 'No script required' From 695d09230247a57fbea21e486f1db8ad5a3c6940 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 22 Aug 2019 23:35:04 +0900 Subject: [PATCH 101/163] Replace os.path with pathlib --- Python/hbayesdm/base.py | 12 +++++++----- Python/hbayesdm/preprocess_funcs.py | 4 ++-- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 18e3a8b4..1cb068bf 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import pickle import multiprocessing from abc import ABCMeta, abstractmethod @@ -16,7 +17,8 @@ __all__ = ['TaskModel'] -_common = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'common') +PATH_ROOT = Path(__file__).absolute().parent +PATH_COMMON = PATH_ROOT / 'common' class TaskModel(metaclass=ABCMeta): @@ -267,7 +269,7 @@ def _handle_data_args(self, else: filename = '%s_%s_exampleData.txt' % ( self.task_name, self.model_type) - example_data = os.path.join(_common, 'extdata', filename) + example_data = PATH_COMMON / 'extdata' / filename raw_data = pd.read_csv(example_data, sep='\t') # Save initial column names of raw data for later @@ -652,8 +654,8 @@ def _designate_stan_model(self, model: str) -> StanModel: sm Compiled StanModel obj to use for sampling & fitting. """ - stan_files = os.path.join(_common, 'stan_files') - model_path = os.path.join(stan_files, model + '.stan') + PATH_STAN = PATH_COMMON / 'stan_files' + model_path = PATH_STAN / (model + '.stan') cache_file = 'cached-%s-pystan_%s.pkl' % (model, _pystan_version) if os.path.exists(cache_file): @@ -675,7 +677,7 @@ def _designate_stan_model(self, model: str) -> StanModel: print('Using cached StanModel:', cache_file) else: sm = StanModel(file=model_path, model_name=model, - include_paths=[stan_files]) + include_paths=[str(PATH_STAN)]) with open(cache_file, 'wb') as f: pickle.dump(sm, f) diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py index ffe8e000..c3ec34c1 100644 --- a/Python/hbayesdm/preprocess_funcs.py +++ b/Python/hbayesdm/preprocess_funcs.py @@ -2,7 +2,7 @@ import numpy as np import pandas as pd -from hbayesdm.base import _common +from hbayesdm.base import PATH_COMMON def bandit2arm_preprocess_func(self, raw_data, general_info, additional_args): @@ -811,7 +811,7 @@ def wcs_preprocess_func(self, raw_data, general_info, additional_args): t_max = 128 # Read from predefined answer sheet - answersheet = os.path.join(_common, 'extdata', 'wcs_answersheet.txt') + answersheet = PATH_COMMON / 'extdata' / 'wcs_answersheet.txt' answer = pd.read_csv( answersheet, sep='\t', header=0, index_col=0).to_numpy() - 1 From 75f4996144907ca9140529b6980cdb13e5830446 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 00:09:23 +0900 Subject: [PATCH 102/163] Fix codes for symbolic links --- Python/hbayesdm/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 1cb068bf..276184f2 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -19,6 +19,8 @@ PATH_ROOT = Path(__file__).absolute().parent PATH_COMMON = PATH_ROOT / 'common' +PATH_STAN = (PATH_COMMON / 'stan_files').absolute() +PATH_EXTDATA = (PATH_COMMON / 'extdata').absolute() class TaskModel(metaclass=ABCMeta): @@ -269,7 +271,7 @@ def _handle_data_args(self, else: filename = '%s_%s_exampleData.txt' % ( self.task_name, self.model_type) - example_data = PATH_COMMON / 'extdata' / filename + example_data = PATH_EXTDATA / filename raw_data = pd.read_csv(example_data, sep='\t') # Save initial column names of raw data for later @@ -654,7 +656,6 @@ def _designate_stan_model(self, model: str) -> StanModel: sm Compiled StanModel obj to use for sampling & fitting. """ - PATH_STAN = PATH_COMMON / 'stan_files' model_path = PATH_STAN / (model + '.stan') cache_file = 'cached-%s-pystan_%s.pkl' % (model, _pystan_version) From c013554f0e40175e2a95800aa696b72ba56a5ecd Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 00:24:48 +0900 Subject: [PATCH 103/163] Use resolved path --- Python/hbayesdm/base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 276184f2..2cd3ffe1 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -19,8 +19,8 @@ PATH_ROOT = Path(__file__).absolute().parent PATH_COMMON = PATH_ROOT / 'common' -PATH_STAN = (PATH_COMMON / 'stan_files').absolute() -PATH_EXTDATA = (PATH_COMMON / 'extdata').absolute() +PATH_STAN = (PATH_COMMON / 'stan_files').resolve() +PATH_EXTDATA = (PATH_COMMON / 'extdata').resolve() class TaskModel(metaclass=ABCMeta): From d30e9ec99b9ababe423f9a75992d3771340f186b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 01:01:36 +0900 Subject: [PATCH 104/163] Modify MANIFEST to recursively include *.stan and *.txt files --- Python/MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/Python/MANIFEST.in b/Python/MANIFEST.in index 2d9dd84b..d5ba5547 100644 --- a/Python/MANIFEST.in +++ b/Python/MANIFEST.in @@ -1,4 +1,5 @@ include README.rst include LICENSE +recursive-include hbayesdm/common *.stan *.txt graft hbayesdm/common exclude hbayesdm/version.py From 3255e1f5c448b9dcd21e69734e24f5ded1a02e14 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 01:06:44 +0900 Subject: [PATCH 105/163] Use to install hbayesdm-py --- travis/setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/travis/setup.sh b/travis/setup.sh index 9ba855c5..5144a74d 100755 --- a/travis/setup.sh +++ b/travis/setup.sh @@ -59,7 +59,7 @@ elif [ "$TARGET" = "Python" ]; then # Install dependencies pip install -r requirements.txt --upgrade - pip install . + python setup.py install # Otherwise else From 6c00c07069608bcc9f8470f6222bb1cce5b4dcaa Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 01:11:10 +0900 Subject: [PATCH 106/163] Convert the path for a stan file as a string --- Python/hbayesdm/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 2cd3ffe1..54da6b49 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -656,7 +656,7 @@ def _designate_stan_model(self, model: str) -> StanModel: sm Compiled StanModel obj to use for sampling & fitting. """ - model_path = PATH_STAN / (model + '.stan') + model_path = str(PATH_STAN / (model + '.stan')) cache_file = 'cached-%s-pystan_%s.pkl' % (model, _pystan_version) if os.path.exists(cache_file): From 9fab693dca6d36b4cbab28ed62667ea87e7ff1f5 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 01:30:02 +0900 Subject: [PATCH 107/163] Re-run pytest up to 5 times --- Python/docs/requirements.txt | 13 +++++++------ Python/requirements.txt | 11 ++++++----- travis/script.sh | 2 +- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/Python/docs/requirements.txt b/Python/docs/requirements.txt index 291ce917..a3443e0c 100644 --- a/Python/docs/requirements.txt +++ b/Python/docs/requirements.txt @@ -1,13 +1,14 @@ -arviz -flake8 -matplotlib numpy +scipy pandas -pylint +matplotlib pystan +arviz +pylint +flake8 pytest -scipy +pytest-rerunfailures sphinx sphinx-autodoc-typehints -sphinx_rtd_theme +sphinx-rtd-theme ./Python diff --git a/Python/requirements.txt b/Python/requirements.txt index e14c2660..1d533008 100644 --- a/Python/requirements.txt +++ b/Python/requirements.txt @@ -1,12 +1,13 @@ -arviz -flake8 -matplotlib numpy +scipy pandas -pylint +matplotlib pystan +arviz +pylint +flake8 pytest -scipy +pytest-rerunfailures sphinx sphinx-autodoc-typehints sphinx-rtd-theme diff --git a/travis/script.sh b/travis/script.sh index 485906c2..ba59fe38 100755 --- a/travis/script.sh +++ b/travis/script.sh @@ -7,7 +7,7 @@ if [ "$TARGET" = "R" ]; then # Scripts for Python elif [ "$TARGET" = "Python" ]; then - travis_wait 30 pytest tests/test_ra_prospect.py + travis_wait 30 pytest tests/test_ra_prospect.py --reruns 5 # Otherwise else From f08e1d379c27ac1f88039d7bdb8c914fc66f849c Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 10:45:03 +0900 Subject: [PATCH 108/163] Update README for R --- R/README.Rmd | 6 +++--- R/README.md | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/R/README.Rmd b/R/README.Rmd index 59334f72..459f7472 100644 --- a/R/README.Rmd +++ b/R/README.Rmd @@ -46,16 +46,16 @@ or you can also install from GitHub with: # `devtools` is required to install hBayesDM from GitHub if (!require(devtools)) install.packages("devtools") -devtools::install_github("CCS-Lab/hBayesDM/R") +devtools::install_github("CCS-Lab/hBayesDM", subdir="R") ``` -If you want to use the lastest *development* version of hBayesDM, run the following in R: +If you want to use the latest *development* version of hBayesDM, run the following in R: ```r # `devtools` is required to install hBayesDM from GitHub if (!require(devtools)) install.packages("devtools") -devtools::install_github("CCS-Lab/hBayesDM/R@develop") +devtools::install_github("CCS-Lab/hBayesDM", ref="develop", subdir="R") ``` ### Building at once diff --git a/R/README.md b/R/README.md index 159bfeee..7fc55125 100644 --- a/R/README.md +++ b/R/README.md @@ -50,17 +50,17 @@ or you can also install from GitHub with: # `devtools` is required to install hBayesDM from GitHub if (!require(devtools)) install.packages("devtools") -devtools::install_github("CCS-Lab/hBayesDM/R") +devtools::install_github("CCS-Lab/hBayesDM", subdir="R") ``` -If you want to use the lastest *development* version of hBayesDM, run -the following in R: +If you want to use the latest *development* version of hBayesDM, run the +following in R: ``` r # `devtools` is required to install hBayesDM from GitHub if (!require(devtools)) install.packages("devtools") -devtools::install_github("CCS-Lab/hBayesDM/R@develop") +devtools::install_github("CCS-Lab/hBayesDM", ref="develop", subdir="R") ``` ### Building at once From 53c19a94d4a68cd6134b386f6fe7d431192d4f64 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 10:50:50 +0900 Subject: [PATCH 109/163] Do not export the current version of choiceRT_lba models --- R/NAMESPACE | 2 -- R/R/choiceRT_lba.R | 2 -- R/R/choiceRT_lba_single.R | 2 -- 3 files changed, 6 deletions(-) diff --git a/R/NAMESPACE b/R/NAMESPACE index 983d7a4f..a1066198 100644 --- a/R/NAMESPACE +++ b/R/NAMESPACE @@ -12,8 +12,6 @@ export(bandit4arm_singleA_lapse) export(bart_par4) export(choiceRT_ddm) export(choiceRT_ddm_single) -export(choiceRT_lba) -export(choiceRT_lba_single) export(cra_exp) export(cra_linear) export(dbdm_prob_weight) diff --git a/R/R/choiceRT_lba.R b/R/R/choiceRT_lba.R index 8e69d54e..dbec2044 100644 --- a/R/R/choiceRT_lba.R +++ b/R/R/choiceRT_lba.R @@ -84,8 +84,6 @@ #' more information on the functioning of the sampler control parameters. One can also refer to section 58.2 of the #' \href{http://mc-stan.org/documentation/}{Stan User's Manual} for a less technical description of these arguments. #' -#' @export -#' #' @references #' Brown, S. D., & Heathcote, A. (2008). The simplest complete model of choice response time: Linear ballistic accumulation. #' Cognitive Psychology, 57(3), 153-178. http://doi.org/10.1016/j.cogpsych.2007.12.002 diff --git a/R/R/choiceRT_lba_single.R b/R/R/choiceRT_lba_single.R index 1800fd8f..37b63b96 100644 --- a/R/R/choiceRT_lba_single.R +++ b/R/R/choiceRT_lba_single.R @@ -84,8 +84,6 @@ #' more information on the functioning of the sampler control parameters. One can also refer to section 58.2 of the #' \href{http://mc-stan.org/documentation/}{Stan User's Manual} for a less technical description of these arguments. #' -#' @export -#' #' @references #' Brown, S. D., & Heathcote, A. (2008). The simplest complete model of choice response time: Linear ballistic accumulation. #' Cognitive Psychology, 57(3), 153-178. http://doi.org/10.1016/j.cogpsych.2007.12.002 From 23cf973cb15d0f8eb442609b4c8cb16faa73dbfd Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 10:55:22 +0900 Subject: [PATCH 110/163] Add a keyword internal not to generate docs --- R/R/choiceRT_lba.R | 2 ++ R/R/choiceRT_lba_single.R | 2 ++ 2 files changed, 4 insertions(+) diff --git a/R/R/choiceRT_lba.R b/R/R/choiceRT_lba.R index dbec2044..d92f3acd 100644 --- a/R/R/choiceRT_lba.R +++ b/R/R/choiceRT_lba.R @@ -84,6 +84,8 @@ #' more information on the functioning of the sampler control parameters. One can also refer to section 58.2 of the #' \href{http://mc-stan.org/documentation/}{Stan User's Manual} for a less technical description of these arguments. #' +#' @keywords internal +#' #' @references #' Brown, S. D., & Heathcote, A. (2008). The simplest complete model of choice response time: Linear ballistic accumulation. #' Cognitive Psychology, 57(3), 153-178. http://doi.org/10.1016/j.cogpsych.2007.12.002 diff --git a/R/R/choiceRT_lba_single.R b/R/R/choiceRT_lba_single.R index 37b63b96..51a81193 100644 --- a/R/R/choiceRT_lba_single.R +++ b/R/R/choiceRT_lba_single.R @@ -84,6 +84,8 @@ #' more information on the functioning of the sampler control parameters. One can also refer to section 58.2 of the #' \href{http://mc-stan.org/documentation/}{Stan User's Manual} for a less technical description of these arguments. #' +#' @keywords internal +#' #' @references #' Brown, S. D., & Heathcote, A. (2008). The simplest complete model of choice response time: Linear ballistic accumulation. #' Cognitive Psychology, 57(3), 153-178. http://doi.org/10.1016/j.cogpsych.2007.12.002 From 120ad3106dd2ed75ac373199ef0b6c68f42445f8 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 23 Aug 2019 11:13:06 +0900 Subject: [PATCH 111/163] Re-generate docs with pkgdown --- R/docs/LICENSE-text.html | 796 --------- R/docs/authors.html | 3 + R/docs/index.html | 55 +- R/docs/news/index.html | 15 +- R/docs/pkgdown.yml | 2 +- R/docs/reference/HDIofMCMC.html | 3 + R/docs/reference/bandit2arm.html | 333 ---- R/docs/reference/bandit2arm_delta.html | 15 +- R/docs/reference/bandit4arm.html | 1029 ----------- R/docs/reference/bandit4arm2.html | 334 ---- .../reference/bandit4arm2_kalman_filter.html | 15 +- R/docs/reference/bandit4arm_2par_lapse.html | 15 +- R/docs/reference/bandit4arm_4par.html | 15 +- R/docs/reference/bandit4arm_lapse.html | 15 +- R/docs/reference/bandit4arm_lapse_decay.html | 15 +- .../reference/bandit4arm_singleA_lapse.html | 15 +- R/docs/reference/bart.html | 334 ---- R/docs/reference/bart_par4.html | 15 +- R/docs/reference/choiceRT.html | 518 ------ R/docs/reference/choiceRT_ddm.html | 3 + R/docs/reference/choiceRT_ddm_single.html | 3 + R/docs/reference/choiceRT_lba.html | 3 + R/docs/reference/choiceRT_lba_single.html | 3 + R/docs/reference/cra.html | 515 ------ R/docs/reference/cra_exp.html | 15 +- R/docs/reference/cra_linear.html | 15 +- R/docs/reference/dbdm.html | 341 ---- R/docs/reference/dbdm_prob_weight.html | 15 +- R/docs/reference/dd.html | 1039 ----------- R/docs/reference/dd_cs.html | 15 +- R/docs/reference/dd_cs_single.html | 15 +- R/docs/reference/dd_exp.html | 15 +- R/docs/reference/dd_hyperbolic.html | 15 +- R/docs/reference/dd_hyperbolic_single.html | 15 +- R/docs/reference/estimate_mode.html | 3 + R/docs/reference/extract_ic.html | 7 +- R/docs/reference/gng.html | 855 --------- R/docs/reference/gng_m1.html | 15 +- R/docs/reference/gng_m2.html | 15 +- R/docs/reference/gng_m3.html | 15 +- R/docs/reference/gng_m4.html | 15 +- R/docs/reference/hBayesDM-package.html | 3 + R/docs/reference/hBayesDM_model.html | 3 + R/docs/reference/igt.html | 861 --------- R/docs/reference/igt_orl.html | 3 + R/docs/reference/igt_pvl_decay.html | 3 + R/docs/reference/igt_pvl_delta.html | 3 + R/docs/reference/igt_vpp.html | 3 + R/docs/reference/index.html | 15 +- R/docs/reference/multiplot.html | 3 + R/docs/reference/peer.html | 339 ---- R/docs/reference/peer_ocu.html | 15 +- R/docs/reference/plot.hBayesDM.html | 3 + R/docs/reference/plotDist.html | 3 + R/docs/reference/plotHDI.html | 3 + R/docs/reference/plotInd.html | 3 + R/docs/reference/printFit.html | 3 + R/docs/reference/prl.html | 1563 ----------------- R/docs/reference/prl_ewa.html | 15 +- R/docs/reference/prl_fictitious.html | 15 +- .../reference/prl_fictitious_multipleB.html | 15 +- R/docs/reference/prl_fictitious_rp.html | 15 +- R/docs/reference/prl_fictitious_rp_woa.html | 15 +- R/docs/reference/prl_fictitious_woa.html | 15 +- R/docs/reference/prl_rp.html | 15 +- R/docs/reference/prl_rp_multipleB.html | 15 +- R/docs/reference/pst.html | 334 ---- R/docs/reference/pst_gainloss_Q.html | 15 +- R/docs/reference/ra.html | 684 -------- R/docs/reference/ra_noLA.html | 15 +- R/docs/reference/ra_noRA.html | 15 +- R/docs/reference/ra_prospect.html | 15 +- R/docs/reference/rdt.html | 340 ---- R/docs/reference/rdt_happiness.html | 15 +- R/docs/reference/rhat.html | 3 + R/docs/reference/ts.html | 691 -------- R/docs/reference/ts_par4.html | 3 + R/docs/reference/ts_par6.html | 3 + R/docs/reference/ts_par7.html | 3 + R/docs/reference/ug.html | 505 ------ R/docs/reference/ug_bayes.html | 15 +- R/docs/reference/ug_delta.html | 15 +- R/docs/reference/wcs.html | 334 ---- R/docs/reference/wcs_sql.html | 15 +- R/man/choiceRT_lba.Rd | 1 + R/man/choiceRT_lba_single.Rd | 1 + 86 files changed, 266 insertions(+), 12199 deletions(-) delete mode 100644 R/docs/LICENSE-text.html delete mode 100644 R/docs/reference/bandit2arm.html delete mode 100644 R/docs/reference/bandit4arm.html delete mode 100644 R/docs/reference/bandit4arm2.html delete mode 100644 R/docs/reference/bart.html delete mode 100644 R/docs/reference/choiceRT.html delete mode 100644 R/docs/reference/cra.html delete mode 100644 R/docs/reference/dbdm.html delete mode 100644 R/docs/reference/dd.html delete mode 100644 R/docs/reference/gng.html delete mode 100644 R/docs/reference/igt.html delete mode 100644 R/docs/reference/peer.html delete mode 100644 R/docs/reference/prl.html delete mode 100644 R/docs/reference/pst.html delete mode 100644 R/docs/reference/ra.html delete mode 100644 R/docs/reference/rdt.html delete mode 100644 R/docs/reference/ts.html delete mode 100644 R/docs/reference/ug.html delete mode 100644 R/docs/reference/wcs.html diff --git a/R/docs/LICENSE-text.html b/R/docs/LICENSE-text.html deleted file mode 100644 index 768354e0..00000000 --- a/R/docs/LICENSE-text.html +++ /dev/null @@ -1,796 +0,0 @@ - - - - - - - - -License • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
                        GNU GENERAL PUBLIC LICENSE
    -                       Version 3, 29 June 2007
    -
    - Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
    - Everyone is permitted to copy and distribute verbatim copies
    - of this license document, but changing it is not allowed.
    -
    -                            Preamble
    -
    -  The GNU General Public License is a free, copyleft license for
    -software and other kinds of works.
    -
    -  The licenses for most software and other practical works are designed
    -to take away your freedom to share and change the works.  By contrast,
    -the GNU General Public License is intended to guarantee your freedom to
    -share and change all versions of a program--to make sure it remains free
    -software for all its users.  We, the Free Software Foundation, use the
    -GNU General Public License for most of our software; it applies also to
    -any other work released this way by its authors.  You can apply it to
    -your programs, too.
    -
    -  When we speak of free software, we are referring to freedom, not
    -price.  Our General Public Licenses are designed to make sure that you
    -have the freedom to distribute copies of free software (and charge for
    -them if you wish), that you receive source code or can get it if you
    -want it, that you can change the software or use pieces of it in new
    -free programs, and that you know you can do these things.
    -
    -  To protect your rights, we need to prevent others from denying you
    -these rights or asking you to surrender the rights.  Therefore, you have
    -certain responsibilities if you distribute copies of the software, or if
    -you modify it: responsibilities to respect the freedom of others.
    -
    -  For example, if you distribute copies of such a program, whether
    -gratis or for a fee, you must pass on to the recipients the same
    -freedoms that you received.  You must make sure that they, too, receive
    -or can get the source code.  And you must show them these terms so they
    -know their rights.
    -
    -  Developers that use the GNU GPL protect your rights with two steps:
    -(1) assert copyright on the software, and (2) offer you this License
    -giving you legal permission to copy, distribute and/or modify it.
    -
    -  For the developers' and authors' protection, the GPL clearly explains
    -that there is no warranty for this free software.  For both users' and
    -authors' sake, the GPL requires that modified versions be marked as
    -changed, so that their problems will not be attributed erroneously to
    -authors of previous versions.
    -
    -  Some devices are designed to deny users access to install or run
    -modified versions of the software inside them, although the manufacturer
    -can do so.  This is fundamentally incompatible with the aim of
    -protecting users' freedom to change the software.  The systematic
    -pattern of such abuse occurs in the area of products for individuals to
    -use, which is precisely where it is most unacceptable.  Therefore, we
    -have designed this version of the GPL to prohibit the practice for those
    -products.  If such problems arise substantially in other domains, we
    -stand ready to extend this provision to those domains in future versions
    -of the GPL, as needed to protect the freedom of users.
    -
    -  Finally, every program is threatened constantly by software patents.
    -States should not allow patents to restrict development and use of
    -software on general-purpose computers, but in those that do, we wish to
    -avoid the special danger that patents applied to a free program could
    -make it effectively proprietary.  To prevent this, the GPL assures that
    -patents cannot be used to render the program non-free.
    -
    -  The precise terms and conditions for copying, distribution and
    -modification follow.
    -
    -                       TERMS AND CONDITIONS
    -
    -  0. Definitions.
    -
    -  "This License" refers to version 3 of the GNU General Public License.
    -
    -  "Copyright" also means copyright-like laws that apply to other kinds of
    -works, such as semiconductor masks.
    -
    -  "The Program" refers to any copyrightable work licensed under this
    -License.  Each licensee is addressed as "you".  "Licensees" and
    -"recipients" may be individuals or organizations.
    -
    -  To "modify" a work means to copy from or adapt all or part of the work
    -in a fashion requiring copyright permission, other than the making of an
    -exact copy.  The resulting work is called a "modified version" of the
    -earlier work or a work "based on" the earlier work.
    -
    -  A "covered work" means either the unmodified Program or a work based
    -on the Program.
    -
    -  To "propagate" a work means to do anything with it that, without
    -permission, would make you directly or secondarily liable for
    -infringement under applicable copyright law, except executing it on a
    -computer or modifying a private copy.  Propagation includes copying,
    -distribution (with or without modification), making available to the
    -public, and in some countries other activities as well.
    -
    -  To "convey" a work means any kind of propagation that enables other
    -parties to make or receive copies.  Mere interaction with a user through
    -a computer network, with no transfer of a copy, is not conveying.
    -
    -  An interactive user interface displays "Appropriate Legal Notices"
    -to the extent that it includes a convenient and prominently visible
    -feature that (1) displays an appropriate copyright notice, and (2)
    -tells the user that there is no warranty for the work (except to the
    -extent that warranties are provided), that licensees may convey the
    -work under this License, and how to view a copy of this License.  If
    -the interface presents a list of user commands or options, such as a
    -menu, a prominent item in the list meets this criterion.
    -
    -  1. Source Code.
    -
    -  The "source code" for a work means the preferred form of the work
    -for making modifications to it.  "Object code" means any non-source
    -form of a work.
    -
    -  A "Standard Interface" means an interface that either is an official
    -standard defined by a recognized standards body, or, in the case of
    -interfaces specified for a particular programming language, one that
    -is widely used among developers working in that language.
    -
    -  The "System Libraries" of an executable work include anything, other
    -than the work as a whole, that (a) is included in the normal form of
    -packaging a Major Component, but which is not part of that Major
    -Component, and (b) serves only to enable use of the work with that
    -Major Component, or to implement a Standard Interface for which an
    -implementation is available to the public in source code form.  A
    -"Major Component", in this context, means a major essential component
    -(kernel, window system, and so on) of the specific operating system
    -(if any) on which the executable work runs, or a compiler used to
    -produce the work, or an object code interpreter used to run it.
    -
    -  The "Corresponding Source" for a work in object code form means all
    -the source code needed to generate, install, and (for an executable
    -work) run the object code and to modify the work, including scripts to
    -control those activities.  However, it does not include the work's
    -System Libraries, or general-purpose tools or generally available free
    -programs which are used unmodified in performing those activities but
    -which are not part of the work.  For example, Corresponding Source
    -includes interface definition files associated with source files for
    -the work, and the source code for shared libraries and dynamically
    -linked subprograms that the work is specifically designed to require,
    -such as by intimate data communication or control flow between those
    -subprograms and other parts of the work.
    -
    -  The Corresponding Source need not include anything that users
    -can regenerate automatically from other parts of the Corresponding
    -Source.
    -
    -  The Corresponding Source for a work in source code form is that
    -same work.
    -
    -  2. Basic Permissions.
    -
    -  All rights granted under this License are granted for the term of
    -copyright on the Program, and are irrevocable provided the stated
    -conditions are met.  This License explicitly affirms your unlimited
    -permission to run the unmodified Program.  The output from running a
    -covered work is covered by this License only if the output, given its
    -content, constitutes a covered work.  This License acknowledges your
    -rights of fair use or other equivalent, as provided by copyright law.
    -
    -  You may make, run and propagate covered works that you do not
    -convey, without conditions so long as your license otherwise remains
    -in force.  You may convey covered works to others for the sole purpose
    -of having them make modifications exclusively for you, or provide you
    -with facilities for running those works, provided that you comply with
    -the terms of this License in conveying all material for which you do
    -not control copyright.  Those thus making or running the covered works
    -for you must do so exclusively on your behalf, under your direction
    -and control, on terms that prohibit them from making any copies of
    -your copyrighted material outside their relationship with you.
    -
    -  Conveying under any other circumstances is permitted solely under
    -the conditions stated below.  Sublicensing is not allowed; section 10
    -makes it unnecessary.
    -
    -  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
    -
    -  No covered work shall be deemed part of an effective technological
    -measure under any applicable law fulfilling obligations under article
    -11 of the WIPO copyright treaty adopted on 20 December 1996, or
    -similar laws prohibiting or restricting circumvention of such
    -measures.
    -
    -  When you convey a covered work, you waive any legal power to forbid
    -circumvention of technological measures to the extent such circumvention
    -is effected by exercising rights under this License with respect to
    -the covered work, and you disclaim any intention to limit operation or
    -modification of the work as a means of enforcing, against the work's
    -users, your or third parties' legal rights to forbid circumvention of
    -technological measures.
    -
    -  4. Conveying Verbatim Copies.
    -
    -  You may convey verbatim copies of the Program's source code as you
    -receive it, in any medium, provided that you conspicuously and
    -appropriately publish on each copy an appropriate copyright notice;
    -keep intact all notices stating that this License and any
    -non-permissive terms added in accord with section 7 apply to the code;
    -keep intact all notices of the absence of any warranty; and give all
    -recipients a copy of this License along with the Program.
    -
    -  You may charge any price or no price for each copy that you convey,
    -and you may offer support or warranty protection for a fee.
    -
    -  5. Conveying Modified Source Versions.
    -
    -  You may convey a work based on the Program, or the modifications to
    -produce it from the Program, in the form of source code under the
    -terms of section 4, provided that you also meet all of these conditions:
    -
    -    a) The work must carry prominent notices stating that you modified
    -    it, and giving a relevant date.
    -
    -    b) The work must carry prominent notices stating that it is
    -    released under this License and any conditions added under section
    -    7.  This requirement modifies the requirement in section 4 to
    -    "keep intact all notices".
    -
    -    c) You must license the entire work, as a whole, under this
    -    License to anyone who comes into possession of a copy.  This
    -    License will therefore apply, along with any applicable section 7
    -    additional terms, to the whole of the work, and all its parts,
    -    regardless of how they are packaged.  This License gives no
    -    permission to license the work in any other way, but it does not
    -    invalidate such permission if you have separately received it.
    -
    -    d) If the work has interactive user interfaces, each must display
    -    Appropriate Legal Notices; however, if the Program has interactive
    -    interfaces that do not display Appropriate Legal Notices, your
    -    work need not make them do so.
    -
    -  A compilation of a covered work with other separate and independent
    -works, which are not by their nature extensions of the covered work,
    -and which are not combined with it such as to form a larger program,
    -in or on a volume of a storage or distribution medium, is called an
    -"aggregate" if the compilation and its resulting copyright are not
    -used to limit the access or legal rights of the compilation's users
    -beyond what the individual works permit.  Inclusion of a covered work
    -in an aggregate does not cause this License to apply to the other
    -parts of the aggregate.
    -
    -  6. Conveying Non-Source Forms.
    -
    -  You may convey a covered work in object code form under the terms
    -of sections 4 and 5, provided that you also convey the
    -machine-readable Corresponding Source under the terms of this License,
    -in one of these ways:
    -
    -    a) Convey the object code in, or embodied in, a physical product
    -    (including a physical distribution medium), accompanied by the
    -    Corresponding Source fixed on a durable physical medium
    -    customarily used for software interchange.
    -
    -    b) Convey the object code in, or embodied in, a physical product
    -    (including a physical distribution medium), accompanied by a
    -    written offer, valid for at least three years and valid for as
    -    long as you offer spare parts or customer support for that product
    -    model, to give anyone who possesses the object code either (1) a
    -    copy of the Corresponding Source for all the software in the
    -    product that is covered by this License, on a durable physical
    -    medium customarily used for software interchange, for a price no
    -    more than your reasonable cost of physically performing this
    -    conveying of source, or (2) access to copy the
    -    Corresponding Source from a network server at no charge.
    -
    -    c) Convey individual copies of the object code with a copy of the
    -    written offer to provide the Corresponding Source.  This
    -    alternative is allowed only occasionally and noncommercially, and
    -    only if you received the object code with such an offer, in accord
    -    with subsection 6b.
    -
    -    d) Convey the object code by offering access from a designated
    -    place (gratis or for a charge), and offer equivalent access to the
    -    Corresponding Source in the same way through the same place at no
    -    further charge.  You need not require recipients to copy the
    -    Corresponding Source along with the object code.  If the place to
    -    copy the object code is a network server, the Corresponding Source
    -    may be on a different server (operated by you or a third party)
    -    that supports equivalent copying facilities, provided you maintain
    -    clear directions next to the object code saying where to find the
    -    Corresponding Source.  Regardless of what server hosts the
    -    Corresponding Source, you remain obligated to ensure that it is
    -    available for as long as needed to satisfy these requirements.
    -
    -    e) Convey the object code using peer-to-peer transmission, provided
    -    you inform other peers where the object code and Corresponding
    -    Source of the work are being offered to the general public at no
    -    charge under subsection 6d.
    -
    -  A separable portion of the object code, whose source code is excluded
    -from the Corresponding Source as a System Library, need not be
    -included in conveying the object code work.
    -
    -  A "User Product" is either (1) a "consumer product", which means any
    -tangible personal property which is normally used for personal, family,
    -or household purposes, or (2) anything designed or sold for incorporation
    -into a dwelling.  In determining whether a product is a consumer product,
    -doubtful cases shall be resolved in favor of coverage.  For a particular
    -product received by a particular user, "normally used" refers to a
    -typical or common use of that class of product, regardless of the status
    -of the particular user or of the way in which the particular user
    -actually uses, or expects or is expected to use, the product.  A product
    -is a consumer product regardless of whether the product has substantial
    -commercial, industrial or non-consumer uses, unless such uses represent
    -the only significant mode of use of the product.
    -
    -  "Installation Information" for a User Product means any methods,
    -procedures, authorization keys, or other information required to install
    -and execute modified versions of a covered work in that User Product from
    -a modified version of its Corresponding Source.  The information must
    -suffice to ensure that the continued functioning of the modified object
    -code is in no case prevented or interfered with solely because
    -modification has been made.
    -
    -  If you convey an object code work under this section in, or with, or
    -specifically for use in, a User Product, and the conveying occurs as
    -part of a transaction in which the right of possession and use of the
    -User Product is transferred to the recipient in perpetuity or for a
    -fixed term (regardless of how the transaction is characterized), the
    -Corresponding Source conveyed under this section must be accompanied
    -by the Installation Information.  But this requirement does not apply
    -if neither you nor any third party retains the ability to install
    -modified object code on the User Product (for example, the work has
    -been installed in ROM).
    -
    -  The requirement to provide Installation Information does not include a
    -requirement to continue to provide support service, warranty, or updates
    -for a work that has been modified or installed by the recipient, or for
    -the User Product in which it has been modified or installed.  Access to a
    -network may be denied when the modification itself materially and
    -adversely affects the operation of the network or violates the rules and
    -protocols for communication across the network.
    -
    -  Corresponding Source conveyed, and Installation Information provided,
    -in accord with this section must be in a format that is publicly
    -documented (and with an implementation available to the public in
    -source code form), and must require no special password or key for
    -unpacking, reading or copying.
    -
    -  7. Additional Terms.
    -
    -  "Additional permissions" are terms that supplement the terms of this
    -License by making exceptions from one or more of its conditions.
    -Additional permissions that are applicable to the entire Program shall
    -be treated as though they were included in this License, to the extent
    -that they are valid under applicable law.  If additional permissions
    -apply only to part of the Program, that part may be used separately
    -under those permissions, but the entire Program remains governed by
    -this License without regard to the additional permissions.
    -
    -  When you convey a copy of a covered work, you may at your option
    -remove any additional permissions from that copy, or from any part of
    -it.  (Additional permissions may be written to require their own
    -removal in certain cases when you modify the work.)  You may place
    -additional permissions on material, added by you to a covered work,
    -for which you have or can give appropriate copyright permission.
    -
    -  Notwithstanding any other provision of this License, for material you
    -add to a covered work, you may (if authorized by the copyright holders of
    -that material) supplement the terms of this License with terms:
    -
    -    a) Disclaiming warranty or limiting liability differently from the
    -    terms of sections 15 and 16 of this License; or
    -
    -    b) Requiring preservation of specified reasonable legal notices or
    -    author attributions in that material or in the Appropriate Legal
    -    Notices displayed by works containing it; or
    -
    -    c) Prohibiting misrepresentation of the origin of that material, or
    -    requiring that modified versions of such material be marked in
    -    reasonable ways as different from the original version; or
    -
    -    d) Limiting the use for publicity purposes of names of licensors or
    -    authors of the material; or
    -
    -    e) Declining to grant rights under trademark law for use of some
    -    trade names, trademarks, or service marks; or
    -
    -    f) Requiring indemnification of licensors and authors of that
    -    material by anyone who conveys the material (or modified versions of
    -    it) with contractual assumptions of liability to the recipient, for
    -    any liability that these contractual assumptions directly impose on
    -    those licensors and authors.
    -
    -  All other non-permissive additional terms are considered "further
    -restrictions" within the meaning of section 10.  If the Program as you
    -received it, or any part of it, contains a notice stating that it is
    -governed by this License along with a term that is a further
    -restriction, you may remove that term.  If a license document contains
    -a further restriction but permits relicensing or conveying under this
    -License, you may add to a covered work material governed by the terms
    -of that license document, provided that the further restriction does
    -not survive such relicensing or conveying.
    -
    -  If you add terms to a covered work in accord with this section, you
    -must place, in the relevant source files, a statement of the
    -additional terms that apply to those files, or a notice indicating
    -where to find the applicable terms.
    -
    -  Additional terms, permissive or non-permissive, may be stated in the
    -form of a separately written license, or stated as exceptions;
    -the above requirements apply either way.
    -
    -  8. Termination.
    -
    -  You may not propagate or modify a covered work except as expressly
    -provided under this License.  Any attempt otherwise to propagate or
    -modify it is void, and will automatically terminate your rights under
    -this License (including any patent licenses granted under the third
    -paragraph of section 11).
    -
    -  However, if you cease all violation of this License, then your
    -license from a particular copyright holder is reinstated (a)
    -provisionally, unless and until the copyright holder explicitly and
    -finally terminates your license, and (b) permanently, if the copyright
    -holder fails to notify you of the violation by some reasonable means
    -prior to 60 days after the cessation.
    -
    -  Moreover, your license from a particular copyright holder is
    -reinstated permanently if the copyright holder notifies you of the
    -violation by some reasonable means, this is the first time you have
    -received notice of violation of this License (for any work) from that
    -copyright holder, and you cure the violation prior to 30 days after
    -your receipt of the notice.
    -
    -  Termination of your rights under this section does not terminate the
    -licenses of parties who have received copies or rights from you under
    -this License.  If your rights have been terminated and not permanently
    -reinstated, you do not qualify to receive new licenses for the same
    -material under section 10.
    -
    -  9. Acceptance Not Required for Having Copies.
    -
    -  You are not required to accept this License in order to receive or
    -run a copy of the Program.  Ancillary propagation of a covered work
    -occurring solely as a consequence of using peer-to-peer transmission
    -to receive a copy likewise does not require acceptance.  However,
    -nothing other than this License grants you permission to propagate or
    -modify any covered work.  These actions infringe copyright if you do
    -not accept this License.  Therefore, by modifying or propagating a
    -covered work, you indicate your acceptance of this License to do so.
    -
    -  10. Automatic Licensing of Downstream Recipients.
    -
    -  Each time you convey a covered work, the recipient automatically
    -receives a license from the original licensors, to run, modify and
    -propagate that work, subject to this License.  You are not responsible
    -for enforcing compliance by third parties with this License.
    -
    -  An "entity transaction" is a transaction transferring control of an
    -organization, or substantially all assets of one, or subdividing an
    -organization, or merging organizations.  If propagation of a covered
    -work results from an entity transaction, each party to that
    -transaction who receives a copy of the work also receives whatever
    -licenses to the work the party's predecessor in interest had or could
    -give under the previous paragraph, plus a right to possession of the
    -Corresponding Source of the work from the predecessor in interest, if
    -the predecessor has it or can get it with reasonable efforts.
    -
    -  You may not impose any further restrictions on the exercise of the
    -rights granted or affirmed under this License.  For example, you may
    -not impose a license fee, royalty, or other charge for exercise of
    -rights granted under this License, and you may not initiate litigation
    -(including a cross-claim or counterclaim in a lawsuit) alleging that
    -any patent claim is infringed by making, using, selling, offering for
    -sale, or importing the Program or any portion of it.
    -
    -  11. Patents.
    -
    -  A "contributor" is a copyright holder who authorizes use under this
    -License of the Program or a work on which the Program is based.  The
    -work thus licensed is called the contributor's "contributor version".
    -
    -  A contributor's "essential patent claims" are all patent claims
    -owned or controlled by the contributor, whether already acquired or
    -hereafter acquired, that would be infringed by some manner, permitted
    -by this License, of making, using, or selling its contributor version,
    -but do not include claims that would be infringed only as a
    -consequence of further modification of the contributor version.  For
    -purposes of this definition, "control" includes the right to grant
    -patent sublicenses in a manner consistent with the requirements of
    -this License.
    -
    -  Each contributor grants you a non-exclusive, worldwide, royalty-free
    -patent license under the contributor's essential patent claims, to
    -make, use, sell, offer for sale, import and otherwise run, modify and
    -propagate the contents of its contributor version.
    -
    -  In the following three paragraphs, a "patent license" is any express
    -agreement or commitment, however denominated, not to enforce a patent
    -(such as an express permission to practice a patent or covenant not to
    -sue for patent infringement).  To "grant" such a patent license to a
    -party means to make such an agreement or commitment not to enforce a
    -patent against the party.
    -
    -  If you convey a covered work, knowingly relying on a patent license,
    -and the Corresponding Source of the work is not available for anyone
    -to copy, free of charge and under the terms of this License, through a
    -publicly available network server or other readily accessible means,
    -then you must either (1) cause the Corresponding Source to be so
    -available, or (2) arrange to deprive yourself of the benefit of the
    -patent license for this particular work, or (3) arrange, in a manner
    -consistent with the requirements of this License, to extend the patent
    -license to downstream recipients.  "Knowingly relying" means you have
    -actual knowledge that, but for the patent license, your conveying the
    -covered work in a country, or your recipient's use of the covered work
    -in a country, would infringe one or more identifiable patents in that
    -country that you have reason to believe are valid.
    -
    -  If, pursuant to or in connection with a single transaction or
    -arrangement, you convey, or propagate by procuring conveyance of, a
    -covered work, and grant a patent license to some of the parties
    -receiving the covered work authorizing them to use, propagate, modify
    -or convey a specific copy of the covered work, then the patent license
    -you grant is automatically extended to all recipients of the covered
    -work and works based on it.
    -
    -  A patent license is "discriminatory" if it does not include within
    -the scope of its coverage, prohibits the exercise of, or is
    -conditioned on the non-exercise of one or more of the rights that are
    -specifically granted under this License.  You may not convey a covered
    -work if you are a party to an arrangement with a third party that is
    -in the business of distributing software, under which you make payment
    -to the third party based on the extent of your activity of conveying
    -the work, and under which the third party grants, to any of the
    -parties who would receive the covered work from you, a discriminatory
    -patent license (a) in connection with copies of the covered work
    -conveyed by you (or copies made from those copies), or (b) primarily
    -for and in connection with specific products or compilations that
    -contain the covered work, unless you entered into that arrangement,
    -or that patent license was granted, prior to 28 March 2007.
    -
    -  Nothing in this License shall be construed as excluding or limiting
    -any implied license or other defenses to infringement that may
    -otherwise be available to you under applicable patent law.
    -
    -  12. No Surrender of Others' Freedom.
    -
    -  If conditions are imposed on you (whether by court order, agreement or
    -otherwise) that contradict the conditions of this License, they do not
    -excuse you from the conditions of this License.  If you cannot convey a
    -covered work so as to satisfy simultaneously your obligations under this
    -License and any other pertinent obligations, then as a consequence you may
    -not convey it at all.  For example, if you agree to terms that obligate you
    -to collect a royalty for further conveying from those to whom you convey
    -the Program, the only way you could satisfy both those terms and this
    -License would be to refrain entirely from conveying the Program.
    -
    -  13. Use with the GNU Affero General Public License.
    -
    -  Notwithstanding any other provision of this License, you have
    -permission to link or combine any covered work with a work licensed
    -under version 3 of the GNU Affero General Public License into a single
    -combined work, and to convey the resulting work.  The terms of this
    -License will continue to apply to the part which is the covered work,
    -but the special requirements of the GNU Affero General Public License,
    -section 13, concerning interaction through a network will apply to the
    -combination as such.
    -
    -  14. Revised Versions of this License.
    -
    -  The Free Software Foundation may publish revised and/or new versions of
    -the GNU General Public License from time to time.  Such new versions will
    -be similar in spirit to the present version, but may differ in detail to
    -address new problems or concerns.
    -
    -  Each version is given a distinguishing version number.  If the
    -Program specifies that a certain numbered version of the GNU General
    -Public License "or any later version" applies to it, you have the
    -option of following the terms and conditions either of that numbered
    -version or of any later version published by the Free Software
    -Foundation.  If the Program does not specify a version number of the
    -GNU General Public License, you may choose any version ever published
    -by the Free Software Foundation.
    -
    -  If the Program specifies that a proxy can decide which future
    -versions of the GNU General Public License can be used, that proxy's
    -public statement of acceptance of a version permanently authorizes you
    -to choose that version for the Program.
    -
    -  Later license versions may give you additional or different
    -permissions.  However, no additional obligations are imposed on any
    -author or copyright holder as a result of your choosing to follow a
    -later version.
    -
    -  15. Disclaimer of Warranty.
    -
    -  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
    -APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
    -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
    -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
    -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
    -PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
    -IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
    -ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
    -
    -  16. Limitation of Liability.
    -
    -  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
    -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
    -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
    -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
    -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
    -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
    -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
    -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
    -SUCH DAMAGES.
    -
    -  17. Interpretation of Sections 15 and 16.
    -
    -  If the disclaimer of warranty and limitation of liability provided
    -above cannot be given local legal effect according to their terms,
    -reviewing courts shall apply local law that most closely approximates
    -an absolute waiver of all civil liability in connection with the
    -Program, unless a warranty or assumption of liability accompanies a
    -copy of the Program in return for a fee.
    -
    -                     END OF TERMS AND CONDITIONS
    -
    -            How to Apply These Terms to Your New Programs
    -
    -  If you develop a new program, and you want it to be of the greatest
    -possible use to the public, the best way to achieve this is to make it
    -free software which everyone can redistribute and change under these terms.
    -
    -  To do so, attach the following notices to the program.  It is safest
    -to attach them to the start of each source file to most effectively
    -state the exclusion of warranty; and each file should have at least
    -the "copyright" line and a pointer to where the full notice is found.
    -
    -    hBayesDM: An R package for hierarchical Bayesian modeling of RLDM tasks.
    -    Copyright (C) 2018  CCS-Lab
    -
    -    This program is free software: you can redistribute it and/or modify
    -    it under the terms of the GNU General Public License as published by
    -    the Free Software Foundation, either version 3 of the License, or
    -    (at your option) any later version.
    -
    -    This program is distributed in the hope that it will be useful,
    -    but WITHOUT ANY WARRANTY; without even the implied warranty of
    -    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    -    GNU General Public License for more details.
    -
    -    You should have received a copy of the GNU General Public License
    -    along with this program.  If not, see <https://www.gnu.org/licenses/>.
    -
    -Also add information on how to contact you by electronic and paper mail.
    -
    -  If the program does terminal interaction, make it output a short
    -notice like this when it starts in an interactive mode:
    -
    -    hBayesDM  Copyright (C) 2018  CCS-Lab
    -    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
    -    This is free software, and you are welcome to redistribute it
    -    under certain conditions; type `show c' for details.
    -
    -The hypothetical commands `show w' and `show c' should show the appropriate
    -parts of the General Public License.  Of course, your program's commands
    -might be different; for a GUI interface, you would use an "about box".
    -
    -  You should also get your employer (if you work as a programmer) or school,
    -if any, to sign a "copyright disclaimer" for the program, if necessary.
    -For more information on this, and how to apply and follow the GNU GPL, see
    -<https://www.gnu.org/licenses/>.
    -
    -  The GNU General Public License does not permit incorporating your program
    -into proprietary programs.  If your program is a subroutine library, you
    -may consider it more useful to permit linking proprietary applications with
    -the library.  If this is what you want to do, use the GNU Lesser General
    -Public License instead of this License.  But first, please read
    -<https://www.gnu.org/licenses/why-not-lgpl.html>.
    -
    - -
    - -
    - - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/authors.html b/R/docs/authors.html index 9f30df5c..e8f4aec2 100644 --- a/R/docs/authors.html +++ b/R/docs/authors.html @@ -74,6 +74,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/index.html b/R/docs/index.html index 17c562c5..ef1226c3 100644 --- a/R/docs/index.html +++ b/R/docs/index.html @@ -48,6 +48,9 @@
  • Reference +
  • +
  • + Changelog
  • +

    If you want to use the latest development version of hBayesDM, run the following in R:

    -
    -

    -Building at once

    +devtools::install_github("CCS-Lab/hBayesDM", ref="develop", subdir="R")
    +
    +
    +

    +Building at once

    By default, you will have to wait for compilation when you run each model for the first time. If you plan on runnning several different models and want to pre-build all models during installation time, set an environment variable BUILD_ALL to true, like the following. We highly recommend you only do so when you have multiple cores available, since building all models at once takes quite a long time to complete.

    Sys.setenv(BUILD_ALL = "true")  # Build *all* models at installation time
     Sys.setenv(MAKEFLAGS = "-j 4")  # Use 4 cores for build (or any other number you want)
    @@ -113,24 +132,6 @@ 

    devtools::install_github("CCS-Lab/hBayesDM/R") # Install from GitHub

    - -

    Citation

    diff --git a/R/docs/news/index.html b/R/docs/news/index.html index fd154add..372017fa 100644 --- a/R/docs/news/index.html +++ b/R/docs/news/index.html @@ -60,7 +60,7 @@ hBayesDM - 0.7.2 + 0.7.2.9000
    @@ -81,7 +81,12 @@ @@ -95,7 +100,7 @@
    @@ -124,7 +129,7 @@

  • Now, in default, you should build a Stan file into a binary for the first time to use it. To build all the models on installation, you should set an environmental variable BUILD_ALL to true before installation.
  • Now all the implemented models are refactored using hBayesDM_model function. You don’t have to change anything to use them, but developers can easily implement new models now!
  • We added a Kalman filter model for 4-armed bandit task (bandit4arm2_kalman_filter; Daw et al., 2006) and a probability weighting function for general description-based tasks (dbdm_prob_weight; Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008).
  • -
  • Initial values of parameter estimation for some models are updated as plausible values, and the parameter boundaries of several models are fixed (see more on issue #63 and #64 in Github).
  • +
  • Initial values of parameter estimation for some models are updated as plausible values, and the parameter boundaries of several models are fixed (see more on issue #63 and #64 in Github).
  • Exponential and linear models for choice under risk and ambiguity task now have four model regressors: sv, sv_fix, sv_var, and p_var.
  • Fix the Travix CI settings and related codes to be properly passed.
  • @@ -143,7 +148,7 @@

    hBayesDM 0.6.2 Unreleased

      -
    • Fix an error on choiceRT_ddm (#44)
    • +
    • Fix an error on choiceRT_ddm (#44)
    diff --git a/R/docs/pkgdown.yml b/R/docs/pkgdown.yml index 254fe914..1996019a 100644 --- a/R/docs/pkgdown.yml +++ b/R/docs/pkgdown.yml @@ -1,4 +1,4 @@ -pandoc: 2.3.1 +pandoc: 2.2.3.2 pkgdown: 1.3.0 pkgdown_sha: ~ articles: [] diff --git a/R/docs/reference/HDIofMCMC.html b/R/docs/reference/HDIofMCMC.html index 58a623a5..f74c1071 100644 --- a/R/docs/reference/HDIofMCMC.html +++ b/R/docs/reference/HDIofMCMC.html @@ -78,6 +78,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/bandit2arm.html b/R/docs/reference/bandit2arm.html deleted file mode 100644 index 4b832c75..00000000 --- a/R/docs/reference/bandit2arm.html +++ /dev/null @@ -1,333 +0,0 @@ - - - - - - - - -Rescorla-Wagner (Delta) Model — bandit2arm_delta • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 2-Armed Bandit Task using <$= MODEL_NAME -It has the following parameters: "A" (learning rate), "tau" (inverse temperature).

    -
      -
    • Task: 2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004)

    • -
    • Model: Rescorla-Wagner (Delta) Model

    • -
    - -
    - -
    bandit2arm_delta(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit2arm_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 2-Armed Bandit Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683

    -

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit2arm_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit2arm_delta.html b/R/docs/reference/bandit2arm_delta.html index ce5e3d51..da32d282 100644 --- a/R/docs/reference/bandit2arm_delta.html +++ b/R/docs/reference/bandit2arm_delta.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bandit4arm.html b/R/docs/reference/bandit4arm.html deleted file mode 100644 index 109e6ae5..00000000 --- a/R/docs/reference/bandit4arm.html +++ /dev/null @@ -1,1029 +0,0 @@ - - - - - - - - -3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) — bandit4arm_2par_lapse • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME -It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "xi" (noise).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018)

    • -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME -It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012)

    • -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME -It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012)

    • -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME -It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise), "d" (decay rate).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018)

    • -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using <$= MODEL_NAME -It has the following parameters: "A" (learning rate), "R" (reward sensitivity), "P" (punishment sensitivity), "xi" (noise).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018)

    • -
    - -
    - -
    bandit4arm_2par_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -bandit4arm_4par(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -bandit4arm_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -bandit4arm_lapse_decay(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -bandit4arm_singleA_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_2par_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_4par").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_lapse_decay").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm_singleA_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    -

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    -

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_2par_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_4par("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_lapse_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm_singleA_lapse("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm2.html b/R/docs/reference/bandit4arm2.html deleted file mode 100644 index e90f7731..00000000 --- a/R/docs/reference/bandit4arm2.html +++ /dev/null @@ -1,334 +0,0 @@ - - - - - - - - -Kalman Filter — bandit4arm2_kalman_filter • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) using <$= MODEL_NAME -It has the following parameters: "lambda" (decay factor), "theta" (decay center), "beta" (inverse softmax temperature), "mu0" (anticipated initial mean of all 4 options), "sigma0" (anticipated initial sd (uncertainty factor) of all 4 options), "sigmaD" (sd of diffusion noise).

    - - -
    - -
    bandit4arm2_kalman_filter(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bandit4arm2_kalman_filter").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task (modified), there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bandit4arm2_kalman_filter("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm2_kalman_filter.html b/R/docs/reference/bandit4arm2_kalman_filter.html index 6e1f07d0..fe42cab6 100644 --- a/R/docs/reference/bandit4arm2_kalman_filter.html +++ b/R/docs/reference/bandit4arm2_kalman_filter.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bandit4arm_2par_lapse.html b/R/docs/reference/bandit4arm_2par_lapse.html index 8e980adf..ede180aa 100644 --- a/R/docs/reference/bandit4arm_2par_lapse.html +++ b/R/docs/reference/bandit4arm_2par_lapse.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bandit4arm_4par.html b/R/docs/reference/bandit4arm_4par.html index 51cfc179..98c401a8 100644 --- a/R/docs/reference/bandit4arm_4par.html +++ b/R/docs/reference/bandit4arm_4par.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bandit4arm_lapse.html b/R/docs/reference/bandit4arm_lapse.html index 06784806..e25fc19f 100644 --- a/R/docs/reference/bandit4arm_lapse.html +++ b/R/docs/reference/bandit4arm_lapse.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bandit4arm_lapse_decay.html b/R/docs/reference/bandit4arm_lapse_decay.html index c93d7073..3a06139a 100644 --- a/R/docs/reference/bandit4arm_lapse_decay.html +++ b/R/docs/reference/bandit4arm_lapse_decay.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bandit4arm_singleA_lapse.html b/R/docs/reference/bandit4arm_singleA_lapse.html index fcb66468..11394abb 100644 --- a/R/docs/reference/bandit4arm_singleA_lapse.html +++ b/R/docs/reference/bandit4arm_singleA_lapse.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/bart.html b/R/docs/reference/bart.html deleted file mode 100644 index 7d8385ff..00000000 --- a/R/docs/reference/bart.html +++ /dev/null @@ -1,334 +0,0 @@ - - - - - - - - -Re-parameterized version of BART Model with 4 parameters (Ravenzwaaij et al., 2011) — bart_par4 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task using <$= MODEL_NAME -It has the following parameters: "phi" (prior belief of balloon not bursting), "eta" (updating rate), "gam" (risk-taking parameter), "tau" (inverse temperature).

    - - -
    - -
    bart_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "pumps", "explosion". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("bart_par4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Balloon Analogue Risk Task, there should be 3 columns of data with the - labels "subjID", "pumps", "explosion". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    pumps

    The number of pumps.

    -
    explosion

    0: intact, 1: burst

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- bart_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bart_par4.html b/R/docs/reference/bart_par4.html index c62b271e..f9756dfc 100644 --- a/R/docs/reference/bart_par4.html +++ b/R/docs/reference/bart_par4.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/choiceRT.html b/R/docs/reference/choiceRT.html deleted file mode 100644 index b6ff0eda..00000000 --- a/R/docs/reference/choiceRT.html +++ /dev/null @@ -1,518 +0,0 @@ - - - - - - - - -Drift Diffusion Model — choiceRT_ddm • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Choice Reaction Time Task using <$= MODEL_NAME -It has the following parameters: "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time).

    -
      -
    • Task: Choice Reaction Time Task

    • -
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • -
    - -

    Individual Bayesian Modeling of the Choice Reaction Time Task using <$= MODEL_NAME -It has the following parameters: "alpha" (boundary separation), "beta" (bias), "delta" (drift rate), "tau" (non-decision time).

    -
      -
    • Task: Choice Reaction Time Task

    • -
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • -
    - -
    - -
    choiceRT_ddm(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -choiceRT_ddm_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "RT". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    - -

    (Not available for this model)

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "RT". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    - -

    (Not available for this model)

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("choiceRT_ddm").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("choiceRT_ddm_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Reaction Time Task, there should be 3 columns of data with the - labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    -
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Reaction Time Task, there should be 3 columns of data with the - labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    -
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    Note

    - -

    Notes: -Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. -Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    -

    Notes: -Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. -Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    - -

    References

    - -

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    -

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- choiceRT_ddm("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- choiceRT_ddm_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/choiceRT_ddm.html b/R/docs/reference/choiceRT_ddm.html index 14914da6..57b1b766 100644 --- a/R/docs/reference/choiceRT_ddm.html +++ b/R/docs/reference/choiceRT_ddm.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/choiceRT_ddm_single.html b/R/docs/reference/choiceRT_ddm_single.html index 55376920..cc5de20c 100644 --- a/R/docs/reference/choiceRT_ddm_single.html +++ b/R/docs/reference/choiceRT_ddm_single.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/choiceRT_lba.html b/R/docs/reference/choiceRT_lba.html index 2c7c0e0a..f1a6b2d1 100644 --- a/R/docs/reference/choiceRT_lba.html +++ b/R/docs/reference/choiceRT_lba.html @@ -83,6 +83,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/choiceRT_lba_single.html b/R/docs/reference/choiceRT_lba_single.html index 388dae56..f4bb4b10 100644 --- a/R/docs/reference/choiceRT_lba_single.html +++ b/R/docs/reference/choiceRT_lba_single.html @@ -83,6 +83,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/cra.html b/R/docs/reference/cra.html deleted file mode 100644 index 420b62a0..00000000 --- a/R/docs/reference/cra.html +++ /dev/null @@ -1,515 +0,0 @@ - - - - - - - - -Exponential Subjective Value Model — cra_exp • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using <$= MODEL_NAME -It has the following parameters: "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature).

    - - -

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using <$= MODEL_NAME -It has the following parameters: "alpha" (risk attitude), "beta" (ambiguity attitude), "gamma" (inverse temperature).

    - - -
    - -
    cra_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -cra_linear(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("cra_exp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("cra_linear").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the - labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    prob

    Objective probability of the variable lottery.

    -
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    -
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    -
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    -
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the - labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    prob

    Objective probability of the variable lottery.

    -
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    -
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    -
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    -
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327

    -

    Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- cra_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- cra_linear("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/cra_exp.html b/R/docs/reference/cra_exp.html index b33544e5..3b6fbba1 100644 --- a/R/docs/reference/cra_exp.html +++ b/R/docs/reference/cra_exp.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/cra_linear.html b/R/docs/reference/cra_linear.html index c1543d12..3eae6ec4 100644 --- a/R/docs/reference/cra_linear.html +++ b/R/docs/reference/cra_linear.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/dbdm.html b/R/docs/reference/dbdm.html deleted file mode 100644 index 457a37ad..00000000 --- a/R/docs/reference/dbdm.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Probability Weight Function — dbdm_prob_weight • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Description Based Decison Making Task using <$= MODEL_NAME -It has the following parameters: "tau" (probability weight function), "rho" (subject utility function), "lambda" (loss aversion parameter), "beta" (inverse softmax temperature).

    -
      -
    • Task: Description Based Decison Making Task

    • -
    • Model: Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008)

    • -
    • Contributor: Yoonseo Zoh <zohyos7@gmail.com>

    • -
    - -
    - -
    dbdm_prob_weight(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dbdm_prob_weight").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Description Based Decison Making Task, there should be 8 columns of data with the - labels "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    opt1hprob

    Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.

    -
    opt2hprob

    Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.

    -
    opt1hval

    Possible (with opt1hprob probability) outcome of option 1.

    -
    opt1lval

    Possible (with (1 - opt1hprob) probability) outcome of option 1.

    -
    opt2hval

    Possible (with opt2hprob probability) outcome of option 2.

    -
    opt2lval

    Possible (with (1 - opt2hprob) probability) outcome of option 2.

    -
    choice

    If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47.

    -

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539.

    -

    Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dbdm_prob_weight("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dbdm_prob_weight.html b/R/docs/reference/dbdm_prob_weight.html index e38bca3f..ba73d260 100644 --- a/R/docs/reference/dbdm_prob_weight.html +++ b/R/docs/reference/dbdm_prob_weight.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/dd.html b/R/docs/reference/dd.html deleted file mode 100644 index eb41c0f0..00000000 --- a/R/docs/reference/dd.html +++ /dev/null @@ -1,1039 +0,0 @@ - - - - - - - - -Constant-Sensitivity (CS) Model — dd_cs • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME -It has the following parameters: "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • -
    - -

    Individual Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME -It has the following parameters: "r" (exponential discounting rate), "s" (impatience), "beta" (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • -
    - -

    Hierarchical Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME -It has the following parameters: "r" (exponential discounting rate), "beta" (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Exponential Model (Samuelson, 1937)

    • -
    - -

    Hierarchical Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME -It has the following parameters: "k" (discounting rate), "beta" (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Hyperbolic Model (Mazur, 1987)

    • -
    - -

    Individual Bayesian Modeling of the Delay Discounting Task using <$= MODEL_NAME -It has the following parameters: "k" (discounting rate), "beta" (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Hyperbolic Model (Mazur, 1987)

    • -
    - -
    - -
    dd_cs(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -dd_cs_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -dd_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -dd_hyperbolic(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -dd_hyperbolic_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_cs").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_cs_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_exp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_hyperbolic").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("dd_hyperbolic_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    -

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    -

    Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612

    -

    Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.

    -

    Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_cs("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_cs_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_exp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_hyperbolic("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- dd_hyperbolic_single("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dd_cs.html b/R/docs/reference/dd_cs.html index 0fab6033..059668b8 100644 --- a/R/docs/reference/dd_cs.html +++ b/R/docs/reference/dd_cs.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/dd_cs_single.html b/R/docs/reference/dd_cs_single.html index eb09e91a..6bbe5686 100644 --- a/R/docs/reference/dd_cs_single.html +++ b/R/docs/reference/dd_cs_single.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/dd_exp.html b/R/docs/reference/dd_exp.html index 99ea87f9..bdf625a1 100644 --- a/R/docs/reference/dd_exp.html +++ b/R/docs/reference/dd_exp.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/dd_hyperbolic.html b/R/docs/reference/dd_hyperbolic.html index 0ae39300..873acce7 100644 --- a/R/docs/reference/dd_hyperbolic.html +++ b/R/docs/reference/dd_hyperbolic.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/dd_hyperbolic_single.html b/R/docs/reference/dd_hyperbolic_single.html index 8eb82cd2..2f2a4453 100644 --- a/R/docs/reference/dd_hyperbolic_single.html +++ b/R/docs/reference/dd_hyperbolic_single.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/estimate_mode.html b/R/docs/reference/estimate_mode.html index 57e3b6ee..9c4ac7cd 100644 --- a/R/docs/reference/estimate_mode.html +++ b/R/docs/reference/estimate_mode.html @@ -78,6 +78,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/extract_ic.html b/R/docs/reference/extract_ic.html index a7e00e9c..733edc1a 100644 --- a/R/docs/reference/extract_ic.html +++ b/R/docs/reference/extract_ic.html @@ -77,6 +77,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -110,13 +113,13 @@

    Extract Model Comparison Estimates

    -
    extract_ic(modelData = NULL, ic = "looic", ncore = 2)
    +
    extract_ic(model_data = NULL, ic = "looic", ncore = 2)

    Arguments

    - + diff --git a/R/docs/reference/gng.html b/R/docs/reference/gng.html deleted file mode 100644 index 9a6fc51c..00000000 --- a/R/docs/reference/gng.html +++ /dev/null @@ -1,855 +0,0 @@ - - - - - - - - -RW + noise — gng_m1 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME -It has the following parameters: "xi" (noise), "ep" (learning rate), "rho" (effective size).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW + noise (Guitart-Masip et al., 2012)

    • -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME -It has the following parameters: "xi" (noise), "ep" (learning rate), "b" (action bias), "rho" (effective size).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW + noise + bias (Guitart-Masip et al., 2012)

    • -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME -It has the following parameters: "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rho" (effective size).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW + noise + bias + pi (Guitart-Masip et al., 2012)

    • -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using <$= MODEL_NAME -It has the following parameters: "xi" (noise), "ep" (learning rate), "b" (action bias), "pi" (Pavlovian bias), "rhoRew" (reward sensitivity), "rhoPun" (punishment sensitivity).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013)

    • -
    - -
    - -
    gng_m1(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -gng_m2(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -gng_m3(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -gng_m4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    -
    modelDatamodel_data

    Object returned by 'hBayesDM' model function

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m1").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m2").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m3").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("gng_m4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    -

    Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m1("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m2("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m3("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- gng_m4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - - - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    - - - - - - - diff --git a/R/docs/reference/gng_m1.html b/R/docs/reference/gng_m1.html index c3656746..1ffa9903 100644 --- a/R/docs/reference/gng_m1.html +++ b/R/docs/reference/gng_m1.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/gng_m2.html b/R/docs/reference/gng_m2.html index e040c9cb..28679698 100644 --- a/R/docs/reference/gng_m2.html +++ b/R/docs/reference/gng_m2.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/gng_m3.html b/R/docs/reference/gng_m3.html index 6f10acd3..28ecb899 100644 --- a/R/docs/reference/gng_m3.html +++ b/R/docs/reference/gng_m3.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/gng_m4.html b/R/docs/reference/gng_m4.html index e0c69d84..00475968 100644 --- a/R/docs/reference/gng_m4.html +++ b/R/docs/reference/gng_m4.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/hBayesDM-package.html b/R/docs/reference/hBayesDM-package.html index 73ffedfe..722fabb9 100644 --- a/R/docs/reference/hBayesDM-package.html +++ b/R/docs/reference/hBayesDM-package.html @@ -124,6 +124,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/hBayesDM_model.html b/R/docs/reference/hBayesDM_model.html index 5bf36545..c8b83478 100644 --- a/R/docs/reference/hBayesDM_model.html +++ b/R/docs/reference/hBayesDM_model.html @@ -78,6 +78,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/igt.html b/R/docs/reference/igt.html deleted file mode 100644 index 0a1ed428..00000000 --- a/R/docs/reference/igt.html +++ /dev/null @@ -1,861 +0,0 @@ - - - - - - - - -Outcome-Representation Learning Model — igt_orl • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME -It has the following parameters: "Arew" (reward learning rate), "Apun" (punishment learning rate), "K" (perseverance decay), "betaF" (outcome frequency weight), "betaP" (perseverance weight).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Outcome-Representation Learning Model (Haines et al., 2018)

    • -
    • Contributor: Nate Haines <haines.175@osu.edu>

    • -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME -It has the following parameters: "A" (decay rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014)

    • -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME -It has the following parameters: "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Prospect Valence Learning (PVL) Delta (Ahn et al., 2008)

    • -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using <$= MODEL_NAME -It has the following parameters: "A" (learning rate), "alpha" (outcome sensitivity), "cons" (response consistency), "lambda" (loss aversion), "epP" (gain impact), "epN" (loss impact), "K" (decay rate), "w" (RL weight).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Value-Plus-Perseverance (Worthy et al., 2013)

    • -
    - -
    - -
    igt_orl(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -igt_pvl_decay(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -igt_pvl_delta(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -igt_vpp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_orl").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_pvl_decay").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_pvl_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("igt_vpp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688

    -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849

    -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_orl("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_pvl_decay("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_pvl_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- igt_vpp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/igt_orl.html b/R/docs/reference/igt_orl.html index 73c4eee2..f8acacad 100644 --- a/R/docs/reference/igt_orl.html +++ b/R/docs/reference/igt_orl.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/igt_pvl_decay.html b/R/docs/reference/igt_pvl_decay.html index 8cdd00f3..2a0b7572 100644 --- a/R/docs/reference/igt_pvl_decay.html +++ b/R/docs/reference/igt_pvl_decay.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/igt_pvl_delta.html b/R/docs/reference/igt_pvl_delta.html index be2db542..b2feba4e 100644 --- a/R/docs/reference/igt_pvl_delta.html +++ b/R/docs/reference/igt_pvl_delta.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/igt_vpp.html b/R/docs/reference/igt_vpp.html index f5e90177..f6f66f90 100644 --- a/R/docs/reference/igt_vpp.html +++ b/R/docs/reference/igt_vpp.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/index.html b/R/docs/reference/index.html index 4895f067..4a4ac4df 100644 --- a/R/docs/reference/index.html +++ b/R/docs/reference/index.html @@ -74,6 +74,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -176,18 +179,6 @@

    Drift Diffusion Model

    - -

    choiceRT_lba()

    - -

    Choice Reaction Time task, linear ballistic accumulator modeling

    - - - -

    choiceRT_lba_single()

    - -

    Choice Reaction Time task, linear ballistic accumulator modeling

    - -

    cra_exp()

    diff --git a/R/docs/reference/multiplot.html b/R/docs/reference/multiplot.html index 9ddaa923..f579d563 100644 --- a/R/docs/reference/multiplot.html +++ b/R/docs/reference/multiplot.html @@ -78,6 +78,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/peer.html b/R/docs/reference/peer.html deleted file mode 100644 index a9be34d5..00000000 --- a/R/docs/reference/peer.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Other-Conferred Utility (OCU) Model — peer_ocu • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Peer Influence Task using <$= MODEL_NAME -It has the following parameters: "rho" (risk preference), "tau" (inverse temperature), "ocu" (other-conferred utility).

    - - -
    - -
    peer_ocu(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("peer_ocu").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Peer Influence Task, there should be 8 columns of data with the - labels "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    condition

    0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).

    -
    p_gamble

    Probability of receiving a high payoff (same for both options).

    -
    safe_Hpayoff

    High payoff of the safe option.

    -
    safe_Lpayoff

    Low payoff of the safe option.

    -
    risky_Hpayoff

    High payoff of the risky option.

    -
    risky_Lpayoff

    Low payoff of the risky option.

    -
    choice

    Which option was chosen? 0: safe, 1: risky.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- peer_ocu("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/peer_ocu.html b/R/docs/reference/peer_ocu.html index 17ca7132..72deef93 100644 --- a/R/docs/reference/peer_ocu.html +++ b/R/docs/reference/peer_ocu.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/plot.hBayesDM.html b/R/docs/reference/plot.hBayesDM.html index fc061be4..10e25214 100644 --- a/R/docs/reference/plot.hBayesDM.html +++ b/R/docs/reference/plot.hBayesDM.html @@ -77,6 +77,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/plotDist.html b/R/docs/reference/plotDist.html index d80fef70..3683cb9e 100644 --- a/R/docs/reference/plotDist.html +++ b/R/docs/reference/plotDist.html @@ -77,6 +77,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/plotHDI.html b/R/docs/reference/plotHDI.html index eb73107e..b320a415 100644 --- a/R/docs/reference/plotHDI.html +++ b/R/docs/reference/plotHDI.html @@ -77,6 +77,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/plotInd.html b/R/docs/reference/plotInd.html index d7e1c28b..af1239a0 100644 --- a/R/docs/reference/plotInd.html +++ b/R/docs/reference/plotInd.html @@ -77,6 +77,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/printFit.html b/R/docs/reference/printFit.html index bf891801..55f858f7 100644 --- a/R/docs/reference/printFit.html +++ b/R/docs/reference/printFit.html @@ -77,6 +77,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/prl.html b/R/docs/reference/prl.html deleted file mode 100644 index 259f8114..00000000 --- a/R/docs/reference/prl.html +++ /dev/null @@ -1,1563 +0,0 @@ - - - - - - - - -Experience-Weighted Attraction Model — prl_ewa • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "phi" (1 - learning rate), "rho" (experience decay factor), "beta" (inverse temperature).

    - - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature).

    - - -

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "eta" (learning rate), "alpha" (indecision point), "beta" (inverse temperature).

    - - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "alpha" (indecision point), "beta" (inverse temperature).

    - - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "eta_pos" (learning rate, +PE), "eta_neg" (learning rate, -PE), "beta" (inverse temperature).

    - - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "eta" (learning rate), "beta" (inverse temperature).

    - - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature).

    - - -

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using <$= MODEL_NAME -It has the following parameters: "Apun" (punishment learning rate), "Arew" (reward learning rate), "beta" (inverse temperature).

    - - -
    - -
    prl_ewa(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_fictitious(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_fictitious_multipleB(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_fictitious_rp(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_fictitious_rp_woa(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_fictitious_woa(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_rp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -prl_rp_multipleB(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "ew_c", "ew_nc".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "block", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "block", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_ewa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_multipleB").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_rp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_rp_woa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_fictitious_woa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_rp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("prl_rp_multipleB").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the - labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    block

    A unique identifier for each of the multiple blocks within each subject.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the - labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    block

    A unique identifier for each of the multiple blocks within each subject.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_ewa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_rp_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_fictitious_woa("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_rp("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- prl_rp_multipleB("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_ewa.html b/R/docs/reference/prl_ewa.html index 2a1b27b3..2649eb21 100644 --- a/R/docs/reference/prl_ewa.html +++ b/R/docs/reference/prl_ewa.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_fictitious.html b/R/docs/reference/prl_fictitious.html index 7b4a3718..b84cba8f 100644 --- a/R/docs/reference/prl_fictitious.html +++ b/R/docs/reference/prl_fictitious.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_fictitious_multipleB.html b/R/docs/reference/prl_fictitious_multipleB.html index 019b43b1..cf536c6d 100644 --- a/R/docs/reference/prl_fictitious_multipleB.html +++ b/R/docs/reference/prl_fictitious_multipleB.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_fictitious_rp.html b/R/docs/reference/prl_fictitious_rp.html index 174f5e27..541130d6 100644 --- a/R/docs/reference/prl_fictitious_rp.html +++ b/R/docs/reference/prl_fictitious_rp.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_fictitious_rp_woa.html b/R/docs/reference/prl_fictitious_rp_woa.html index 0f2c7bd8..e085521e 100644 --- a/R/docs/reference/prl_fictitious_rp_woa.html +++ b/R/docs/reference/prl_fictitious_rp_woa.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_fictitious_woa.html b/R/docs/reference/prl_fictitious_woa.html index 316c3c99..55e8e93f 100644 --- a/R/docs/reference/prl_fictitious_woa.html +++ b/R/docs/reference/prl_fictitious_woa.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_rp.html b/R/docs/reference/prl_rp.html index dc4805b6..e8cbd299 100644 --- a/R/docs/reference/prl_rp.html +++ b/R/docs/reference/prl_rp.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/prl_rp_multipleB.html b/R/docs/reference/prl_rp_multipleB.html index 5d5d5f56..2917599a 100644 --- a/R/docs/reference/prl_rp_multipleB.html +++ b/R/docs/reference/prl_rp_multipleB.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/pst.html b/R/docs/reference/pst.html deleted file mode 100644 index 40aa8168..00000000 --- a/R/docs/reference/pst.html +++ /dev/null @@ -1,334 +0,0 @@ - - - - - - - - -Gain-Loss Q Learning Model — pst_gainloss_Q • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Selection Task using <$= MODEL_NAME -It has the following parameters: "alpha_pos" (learning rate for positive feedbacks), "alpha_neg" (learning rate for negative feedbacks), "beta" (inverse temperature).

    - - -
    - -
    pst_gainloss_Q(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "type", "choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("pst_gainloss_Q").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Selection Task, there should be 4 columns of data with the - labels "subjID", "type", "choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).

    -
    reward

    Amount of reward earned as a result of the trial.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- pst_gainloss_Q("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/pst_gainloss_Q.html b/R/docs/reference/pst_gainloss_Q.html index 2a8e6a9b..841f2aa3 100644 --- a/R/docs/reference/pst_gainloss_Q.html +++ b/R/docs/reference/pst_gainloss_Q.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/ra.html b/R/docs/reference/ra.html deleted file mode 100644 index 22262559..00000000 --- a/R/docs/reference/ra.html +++ /dev/null @@ -1,684 +0,0 @@ - - - - - - - - -Prospect Theory, without loss aversion (LA) parameter — ra_noLA • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Risk Aversion Task using <$= MODEL_NAME -It has the following parameters: "rho" (risk aversion), "tau" (inverse temperature).

    -
      -
    • Task: Risk Aversion Task

    • -
    • Model: Prospect Theory, without loss aversion (LA) parameter (Sokol-Hessner et al., 2009)

    • -
    - -

    Hierarchical Bayesian Modeling of the Risk Aversion Task using <$= MODEL_NAME -It has the following parameters: "lambda" (loss aversion), "tau" (inverse temperature).

    -
      -
    • Task: Risk Aversion Task

    • -
    • Model: Prospect Theory, without risk aversion (RA) parameter (Sokol-Hessner et al., 2009)

    • -
    - -

    Hierarchical Bayesian Modeling of the Risk Aversion Task using <$= MODEL_NAME -It has the following parameters: "rho" (risk aversion), "lambda" (loss aversion), "tau" (inverse temperature).

    -
      -
    • Task: Risk Aversion Task

    • -
    • Model: Prospect Theory (Sokol-Hessner et al., 2009)

    • -
    - -
    - -
    ra_noLA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -ra_noRA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -ra_prospect(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ra_noLA").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ra_noRA").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ra_prospect").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risk Aversion Task, there should be 5 columns of data with the - labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risk Aversion Task, there should be 5 columns of data with the - labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risk Aversion Task, there should be 5 columns of data with the - labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ra_noLA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- ra_noRA("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- ra_prospect("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ra_noLA.html b/R/docs/reference/ra_noLA.html index 6ae49c92..99fbef5a 100644 --- a/R/docs/reference/ra_noLA.html +++ b/R/docs/reference/ra_noLA.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/ra_noRA.html b/R/docs/reference/ra_noRA.html index 1b502f6b..2ddb3c72 100644 --- a/R/docs/reference/ra_noRA.html +++ b/R/docs/reference/ra_noRA.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/ra_prospect.html b/R/docs/reference/ra_prospect.html index 05cfc5b9..c29b35e0 100644 --- a/R/docs/reference/ra_prospect.html +++ b/R/docs/reference/ra_prospect.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/rdt.html b/R/docs/reference/rdt.html deleted file mode 100644 index e33d0ba4..00000000 --- a/R/docs/reference/rdt.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Happiness Computational Model — rdt_happiness • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Risky Decision Task using <$= MODEL_NAME -It has the following parameters: "w0" (baseline), "w1" (weight of certain rewards), "w2" (weight of expected values), "w3" (weight of reward prediction errors), "gam" (forgetting factor), "sig" (standard deviation of error).

    - - -
    - -
    rdt_happiness(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("rdt_happiness").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risky Decision Task, there should be 9 columns of data with the - labels "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option.

    -
    type

    loss == -1, mixed == 0, gain == 1

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -
    outcome

    Result of the trial.

    -
    happy

    Happiness score.

    -
    RT_happy

    Reaction time for answering the happiness score.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- rdt_happiness("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/rdt_happiness.html b/R/docs/reference/rdt_happiness.html index a3895350..69d0ef5a 100644 --- a/R/docs/reference/rdt_happiness.html +++ b/R/docs/reference/rdt_happiness.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/rhat.html b/R/docs/reference/rhat.html index 9ce1133a..5e703778 100644 --- a/R/docs/reference/rhat.html +++ b/R/docs/reference/rhat.html @@ -79,6 +79,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/ts.html b/R/docs/reference/ts.html deleted file mode 100644 index 6c5eec19..00000000 --- a/R/docs/reference/ts.html +++ /dev/null @@ -1,691 +0,0 @@ - - - - - - - - -Hybrid Model, with 4 parameters — ts_par4 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Two-Step Task using <$= MODEL_NAME -It has the following parameters: "a" (learning rate for both stages 1 & 2), "beta" (inverse temperature for both stages 1 & 2), "pi" (perseverance), "w" (model-based weight).

    -
      -
    • Task: Two-Step Task (Daw et al., 2011)

    • -
    • Model: Hybrid Model, with 4 parameters (Daw et al., 2011; Wunderlich et al., 2012)

    • -
    • Contributor: Harhim Park <hrpark12@gmail.com>

    • -
    - -

    Hierarchical Bayesian Modeling of the Two-Step Task using <$= MODEL_NAME -It has the following parameters: "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight).

    -
      -
    • Task: Two-Step Task (Daw et al., 2011)

    • -
    • Model: Hybrid Model, with 6 parameters (Daw et al., 2011)

    • -
    • Contributor: Harhim Park <hrpark12@gmail.com>

    • -
    - -

    Hierarchical Bayesian Modeling of the Two-Step Task using <$= MODEL_NAME -It has the following parameters: "a1" (learning rate in stage 1), "beta1" (inverse temperature in stage 1), "a2" (learning rate in stage 2), "beta2" (inverse temperature in stage 2), "pi" (perseverance), "w" (model-based weight), "lambda" (eligibility trace).

    -
      -
    • Task: Two-Step Task (Daw et al., 2011)

    • -
    • Model: Hybrid Model, with 7 parameters (original model) (Daw et al., 2011)

    • -
    • Contributor: Harhim Park <hrpark12@gmail.com>

    • -
    - -
    - -
    ts_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -ts_par6(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -ts_par7(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ts_par4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ts_par6").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ts_par7").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Two-Step Task, there should be 4 columns of data with the - labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    -
    reward

    Reward after Level 2 (0 or 1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Two-Step Task, there should be 4 columns of data with the - labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    -
    reward

    Reward after Level 2 (0 or 1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Two-Step Task, there should be 4 columns of data with the - labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    -
    reward

    Reward after Level 2 (0 or 1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424.

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ts_par4("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- ts_par6("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- ts_par7("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ts_par4.html b/R/docs/reference/ts_par4.html index d0c3ec95..23737565 100644 --- a/R/docs/reference/ts_par4.html +++ b/R/docs/reference/ts_par4.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/ts_par6.html b/R/docs/reference/ts_par6.html index e61bf458..00eb5314 100644 --- a/R/docs/reference/ts_par6.html +++ b/R/docs/reference/ts_par6.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/ts_par7.html b/R/docs/reference/ts_par7.html index d2375356..7c3331ed 100644 --- a/R/docs/reference/ts_par7.html +++ b/R/docs/reference/ts_par7.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • diff --git a/R/docs/reference/ug.html b/R/docs/reference/ug.html deleted file mode 100644 index f65c64f3..00000000 --- a/R/docs/reference/ug.html +++ /dev/null @@ -1,505 +0,0 @@ - - - - - - - - -Ideal Observer Model — ug_bayes • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using <$= MODEL_NAME -It has the following parameters: "alpha" (envy), "beta" (guilt), "tau" (inverse temperature).

    -
      -
    • Task: Norm-Training Ultimatum Game

    • -
    • Model: Ideal Observer Model (Xiang et al., 2013)

    • -
    - -

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using <$= MODEL_NAME -It has the following parameters: "alpha" (envy), "tau" (inverse temperature), "ep" (norm adaptation rate).

    -
      -
    • Task: Norm-Training Ultimatum Game

    • -
    • Model: Rescorla-Wagner (Delta) Model (Gu et al., 2015)

    • -
    - -
    - -
    ug_bayes(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    -
    -ug_delta(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "offer", "accept". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "offer", "accept". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ug_bayes").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("ug_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Norm-Training Ultimatum Game, there should be 3 columns of data with the - labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    -
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Norm-Training Ultimatum Game, there should be 3 columns of data with the - labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    -
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013

    -

    Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- ug_bayes("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }# NOT RUN {
    -# Run the model and store results in "output"
    -output <- ug_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ug_bayes.html b/R/docs/reference/ug_bayes.html index 4b9e56c4..93c83662 100644 --- a/R/docs/reference/ug_bayes.html +++ b/R/docs/reference/ug_bayes.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/ug_delta.html b/R/docs/reference/ug_delta.html index 81ae5bf4..e6c41212 100644 --- a/R/docs/reference/ug_delta.html +++ b/R/docs/reference/ug_delta.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/docs/reference/wcs.html b/R/docs/reference/wcs.html deleted file mode 100644 index 6245caf2..00000000 --- a/R/docs/reference/wcs.html +++ /dev/null @@ -1,334 +0,0 @@ - - - - - - - - -Sequential Learning Model — wcs_sql • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task using <$= MODEL_NAME -It has the following parameters: "r" (reward sensitivity), "p" (punishment sensitivity), "d" (decision consistency or inverse temperature).

    - - -
    - -
    wcs_sql(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labeled as: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Export model-based regressors? TRUE or FALSE. -Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE.

    -

    If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model ("wcs_sql").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Wisconsin Card Sorting Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.

    -
    outcome

    1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- wcs_sql("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/wcs_sql.html b/R/docs/reference/wcs_sql.html index 504a5026..48c1a951 100644 --- a/R/docs/reference/wcs_sql.html +++ b/R/docs/reference/wcs_sql.html @@ -82,6 +82,9 @@
  • Reference +
  • +
  • + Changelog
  • @@ -209,17 +212,7 @@

    Arg ... -

    For this model, there is no model-specific argument.

    - - - - - - - - - -
    +

    For this model, there is no model-specific argument.

    diff --git a/R/man/choiceRT_lba.Rd b/R/man/choiceRT_lba.Rd index 88663f5e..49f1e94a 100644 --- a/R/man/choiceRT_lba.Rd +++ b/R/man/choiceRT_lba.Rd @@ -135,3 +135,4 @@ Journal of Machine Learning Research, 15(1), 1593-1623. \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: \url{https://rpubs.com/CCSL/hBayesDM} } +\keyword{internal} diff --git a/R/man/choiceRT_lba_single.Rd b/R/man/choiceRT_lba_single.Rd index 592505e0..9d23f467 100644 --- a/R/man/choiceRT_lba_single.Rd +++ b/R/man/choiceRT_lba_single.Rd @@ -135,3 +135,4 @@ Journal of Machine Learning Research, 15(1), 1593-1623. \seealso{ We refer users to our in-depth tutorial for an example of using hBayesDM: \url{https://rpubs.com/CCSL/hBayesDM} } +\keyword{internal} From 5115a22cbae16a61923a566c36df25cb3c473cf9 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 01:40:20 +0900 Subject: [PATCH 112/163] Add an example YAML file --- commons/example.yml | 116 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 116 insertions(+) create mode 100644 commons/example.yml diff --git a/commons/example.yml b/commons/example.yml new file mode 100644 index 00000000..54e8b154 --- /dev/null +++ b/commons/example.yml @@ -0,0 +1,116 @@ +################################################################################ +# +# hBayesDM model definition +# +# Model information for hBayesDM should be defined as a YAML file. If you are +# not familiar to YAML, see more on the following links: +# - https://en.wikipedia.org/wiki/YAML +# - https://yaml-multiline.info +# - https://www.tutorialspoint.com/yaml/index.htm +# +# In short, using a YAML-formatted model information, the function is defined as +# {['task_name']['code']}_{['model_name']['code']} +# or if model_type is specified other than 'Hierarchical', +# {['task_name']['code']}_{['model_name']['code']}_{['model_type']['code']} +# +################################################################################ + +# Task information. +task_name: + code: task_a # code for the task + desc: 2-Armed Bandit Task # description (title-case) + cite: # A list of citations. They should be APA-formatted. + - Doe, J., & Doe, J. (2019). A great paper. Good journal, 1(1), 1-2. + +# Model information. +model_name: + code: delta # code for the model + desc: Rescorla-Wagner (Delta) Model # description (title-case) + cite: # A list of citations. They should be APA-formatted. + +# Model type. +# For now, it should be one among three types: +# 1) Hierarchical +# code: '' +# desc: 'Hierarchical' +# 2) Individual +# code: 'single' +# desc: 'Individual' +# 3) Multiple-block Hierarchical +# code: 'multipleB' +# desc: 'Multiple-block Hierarchical' +model_type: + code: # code for the model type + desc: Hierarchical # description + +# Data columns that must be included in a input data. +# For each column, it should be defined as: +# {column_name}: {one-line description} +# +# Note: `subjID` must always be included. +# Also, if `model_type` is "multipleB", `block` must be included, too. +data_columns: + subjID: A unique identifier for each subject in the data-set. # Required +# block: A unique identifier for each of the multiple blocks within each subject. # Required for multipleB type + choice: Integer value representing the option chosen on the given trial (1 or 2). + outcome: Integer value representing the outcome of the given trial (where reward + == 1, and loss == -1). + +# Model parameters. +# For each parameter, it should be defined as: +# {parameter_name}: +# desc: {description} +# info: [{lower_bound}, {plausible_value}, {upper_bound}] +# +# `info` is defined for a fixed initial value of the parameter. +# `lower_bound`, `plausible_value`, `upper_bound` can be numbers, strings +# (e.g., 'Inf', '-Inf', 'exp([0-9.]+)'), where plausible_value should be +# neither 'Inf' nor '-Inf'. +parameters: + alpha: + desc: learning rate + info: [0, 0.5, 1] + tau: + desc: inverse temperature + info: [0, 1, 5] + +# (optional) Model regressors. +# If exists, for each regressor, it should be defined as: +# {regressor}: {number_of_dimension} +regressors: + ev_risk: 2 # shape: [N, T] + ev_safe: 2 # shape: [N, T] + +# (optional) response variables for posterior predictive checks (PPC). +# Should be defined in a generated quantity block. +postpreds: +- y_pred + +# (optional) a list of additional arguments. +# For each additional argument, it should be defined as: +# - code: {code} +# default: {default_value} +# desc: {description} +additional_args: +- code: RTbound + default: 0.1 + desc: Floating point value representing the lower bound (i.e., minimum allowed) + reaction time. Defaults to 0.1 (100 milliseconds). + +# (optional) notes on the model. Should be given as a list of notes. +notes: +- > + This is a note for this model. +- > + If you want to write long notes for the model, + you'd be better to check out how to write multiline strings in YAML + (https://yaml-multiline.info) + +# (optional) a list of contributors. To specify who wrote this model codes for hBayesDM. +contributors: +- name: Jane Doe + email: jane-doe@gmail.com + link: https://jane-doe.com/ +- name: John Doe + email: john-doe@gmail.com + link: https://john-doe.com/ From 1903ea0be494c9f5942c112392d9b607ab5cf609 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 01:44:03 +0900 Subject: [PATCH 113/163] Remove generated files after copying to the package directories --- commons/generate-codes.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/commons/generate-codes.sh b/commons/generate-codes.sh index 38fa16e1..bd8d3b38 100755 --- a/commons/generate-codes.sh +++ b/commons/generate-codes.sh @@ -7,3 +7,6 @@ cp _r-tests/*.R ../R/tests/testthat/ python3 convert-to-py.py cp _py-codes/_*.py ../Python/hbayesdm/models/ cp _py-tests/*.py ../Python/tests/ + +rm -rf _r-codes _r-tests _py-codes _py-tests + From a9bc419c843e19a69b4774220e365ada42372ac5 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 02:15:24 +0900 Subject: [PATCH 114/163] Make new README --- commons/README-json.md | 154 +++++++++++++++++++++++++++++++++++++ commons/README.md | 168 ++++------------------------------------- 2 files changed, 169 insertions(+), 153 deletions(-) create mode 100644 commons/README-json.md diff --git a/commons/README-json.md b/commons/README-json.md new file mode 100644 index 00000000..e400583d --- /dev/null +++ b/commons/README-json.md @@ -0,0 +1,154 @@ +# Model Information JSON Files + +## JSON Schema + +Schema for the Model Information JSON files is stored in `ModelInformation.schema.json` as a JSON Schema format. + +| Property | Type | Description +|-------------------|---------------------|----------------------------------| +| `task_name` | Object | Informations regarding the task. *See below for **Keys** and **Values**.* +| `model_name` | Object | Informations regarding the model. *See below for **Keys** and **Values**.* +| `model_type` | Object | Modeling-type information. Should be one of the following three:
    - `{"code": "", "desc": "Hierarchical"}`
    - `{"code": "single", "desc": "Individual"}`
    - `{"code": "multipleB", "desc": "Multiple-Block Hierarchical"}` +| `notes` | Array of Strings | Optional notes about the task/model. Give empty array `[]` if unused. +| `contributors` | Array of Objects | Optional specifying of contributors. Give empty array `[]` if unused. +| `data_columns` | Object | **Keys**: names of the necessary data columns for user data.
    - `"subjID"` must always be included.
    - Also include `"block"`, if modeling-type is "multipleB".
    **Values**: one-line descriptions about each data column. +| `parameters` | Object (of Objects) | **Keys**: names of the parameters of this model.
    **Values**: inner-level Object specifying desc and info for each parameter. +| `regressors` | Object | *(Give empty object `{}` if not supported.)*
    **Keys**: names of the regressors of this model.
    **Values**: extracted dimension-size for each regressor. +| `postpreds` | Array of Strings | Name(s) of posterior predictions. Give empty array `[]` if not supported. +| `additional_args` | Array of Objects | Specifying of additional arguments, if any. Give empty array `[]` if unused. + +*\* Note that all outermost-level properties are required properties. Assign empty values (`[]` or `{}`) to them if unused.* +*\* Refer below for inner-level Object specifications.* + +
    task_name & model_name Object

    + +| Keys | Values +|----------|-------------------------------------| +| `"code"` | *(String)* Code for the task/model. +| `"desc"` | *(String)* Name of the task/model in title-case. +| `"cite"` | *(Array of Strings)* Citation(s) for the task/model. + +

    + +
    model_type Object

    + +One of the following three: + +```json +{ + "code": "", + "desc": "Hierarchical" +} +``` +```json +{ + "code": "single", + "desc": "Individual" +} +``` +```json +{ + "code": "multipleB", + "desc": "Multiple-Block Hierarchical" +} +``` + +

    + +
    (Inner-level) Contributor Object

    + +| Keys | Values +|-----------|-------------------------------------| +| `"name"` | *(String)* Name of the contributor. +| `"email"` | *(String)* Email address of the contributor. +| `"link"` | *(String)* Link to the contributor's page. + +

    + +
    (Inner-level) Parameter Object

    + +| Keys | Values +|----------|---------------------------------------------------------| +| `"desc"` | *(String)* Description of the parameter in a few words. +| `"info"` | *(Length-3-Array)* **Lower bound**, **plausible value**, and **upper bound** of the parameter.
    *\* See right below for allowed values.* + +*\* Allowed values (lower bound, plausible value, upper bound):* +- Numbers +- Strings: `"Inf"`, `"-Inf"`, `"exp([0-9.]+)"` +- `null` + +

    + +
    (Inner-level) Additional_arg Object

    + +| Keys | Values +|-------------|----------------------------------------------| +| `"code"` | *(String)* Code for the additional argument. +| `"default"` | *(Number)* Default value of the additional argument. +| `"desc"` | *(String)* One-line description about the additional argument. + +

    + +## JSON Examples + +These are some good examples to start with, if you are completely new. + +| [`gng_m1.json`](./gng_m1.json) | [`choiceRT_ddm_single.json`](./choiceRT_ddm_single.json) | [`prl_fictitious_multipleB.json`](./prl_fictitious_multipleB.json) | [`ts_par4.json`](./ts_par4.json) +|-|-|-|-| +|`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    ~~`contributors`~~
    `data_columns`
    `parameters`
    `regressors`
    `postpreds`
    ~~`additional_args`~~ |`task_name`
    `model_name`
    `model_type`
    `notes`
    ~~`contributors`~~
    `data_columns`
    `parameters`
    ~~`regressors`~~
    ~~`postpreds`~~
    `additional_args` |`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    `contributors`
    `data_columns`
    `parameters`
    `regressors`
    `postpreds`
    ~~`additional_args`~~ |`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    `contributors`
    `data_columns`
    `parameters`
    ~~`regressors`~~
    `postpreds`
    `additional_args` + +## JSON Validation + +Validating against the current Schema file is a good basis to see if you've written the model JSON file correctly. +To validate JSON files, you need to have [`jsonschema`][jsonschema] installed; you can install it with `pip install jsonschema`. + +[jsonschema]: https://github.com/Julian/jsonschema + +To validate a single JSON file (e.g. `models/gng_m1.json`): +``` +$ jsonschema -i models/gng_m1.json ModelInformation.schema.json +``` + +To validate all JSON files in directory, use following shell script: +``` +$ ./ValidateAll.sh +``` + +## Automated Python Code Generation + +Once you've (correctly) written the JSON file for a new model, +it's possible to automatically generate the corresponding python code for the new model, +using the python script `WritePython.py`: + +``` +$ ./WritePython.py -h +usage: WritePython.py [-h] [-a] [-v] json_file + +positional arguments: + json_file JSON file of the model to generate corresponding python code + +optional arguments: + -h, --help show this help message and exit + -a, --all write for all json files in directory + -v, --verbose print output to stdout instead of writing to file +``` + +E.g. (to generate `_gng_m1.py` from `gng_m1.json`): +``` +$ ./WritePython.py gng_m1.json +Created file: _gng_m1.py +``` + +To generate python codes for all json files in directory: +``` +$ ./WritePython.py --all . +Created file: _bandit2arm_delta.py +... +Created file: _wcs_sql.py +``` + +## Contribution + +Largely contributed by [Jethro Lee][jethro-lee]. + +[jethro-lee]: https://github.com/dlemfh diff --git a/commons/README.md b/commons/README.md index e400583d..a3ac2fda 100644 --- a/commons/README.md +++ b/commons/README.md @@ -1,154 +1,16 @@ -# Model Information JSON Files +# hBayesDM core files + +- `extdata/`: example data for each task + - `{task_name}_exampleData.txt` +- `models/`: YAML files for model information + - `{task_name}_{model_name}[_{model_type}].yml` +- `stan_files/`: Stan files corresponding to YAML files + - `{task_name}_{model_name}[_{model_type}].stan` +- `templates/`: code templates for R and Python package + - `PY_CODE_TEMPLATE.txt` + - `PY_DOCS_TEMPLATE.txt` + - `PY_TEST_TEMPLATE.txt` + - `R_CODE_TEMPLATE.txt` + - `R_DOCS_TEMPLATE.txt` + - `R_TEST_TEMPLATE.txt` -## JSON Schema - -Schema for the Model Information JSON files is stored in `ModelInformation.schema.json` as a JSON Schema format. - -| Property | Type | Description -|-------------------|---------------------|----------------------------------| -| `task_name` | Object | Informations regarding the task. *See below for **Keys** and **Values**.* -| `model_name` | Object | Informations regarding the model. *See below for **Keys** and **Values**.* -| `model_type` | Object | Modeling-type information. Should be one of the following three:
    - `{"code": "", "desc": "Hierarchical"}`
    - `{"code": "single", "desc": "Individual"}`
    - `{"code": "multipleB", "desc": "Multiple-Block Hierarchical"}` -| `notes` | Array of Strings | Optional notes about the task/model. Give empty array `[]` if unused. -| `contributors` | Array of Objects | Optional specifying of contributors. Give empty array `[]` if unused. -| `data_columns` | Object | **Keys**: names of the necessary data columns for user data.
    - `"subjID"` must always be included.
    - Also include `"block"`, if modeling-type is "multipleB".
    **Values**: one-line descriptions about each data column. -| `parameters` | Object (of Objects) | **Keys**: names of the parameters of this model.
    **Values**: inner-level Object specifying desc and info for each parameter. -| `regressors` | Object | *(Give empty object `{}` if not supported.)*
    **Keys**: names of the regressors of this model.
    **Values**: extracted dimension-size for each regressor. -| `postpreds` | Array of Strings | Name(s) of posterior predictions. Give empty array `[]` if not supported. -| `additional_args` | Array of Objects | Specifying of additional arguments, if any. Give empty array `[]` if unused. - -*\* Note that all outermost-level properties are required properties. Assign empty values (`[]` or `{}`) to them if unused.* -*\* Refer below for inner-level Object specifications.* - -
    task_name & model_name Object

    - -| Keys | Values -|----------|-------------------------------------| -| `"code"` | *(String)* Code for the task/model. -| `"desc"` | *(String)* Name of the task/model in title-case. -| `"cite"` | *(Array of Strings)* Citation(s) for the task/model. - -

    - -
    model_type Object

    - -One of the following three: - -```json -{ - "code": "", - "desc": "Hierarchical" -} -``` -```json -{ - "code": "single", - "desc": "Individual" -} -``` -```json -{ - "code": "multipleB", - "desc": "Multiple-Block Hierarchical" -} -``` - -

    - -
    (Inner-level) Contributor Object

    - -| Keys | Values -|-----------|-------------------------------------| -| `"name"` | *(String)* Name of the contributor. -| `"email"` | *(String)* Email address of the contributor. -| `"link"` | *(String)* Link to the contributor's page. - -

    - -
    (Inner-level) Parameter Object

    - -| Keys | Values -|----------|---------------------------------------------------------| -| `"desc"` | *(String)* Description of the parameter in a few words. -| `"info"` | *(Length-3-Array)* **Lower bound**, **plausible value**, and **upper bound** of the parameter.
    *\* See right below for allowed values.* - -*\* Allowed values (lower bound, plausible value, upper bound):* -- Numbers -- Strings: `"Inf"`, `"-Inf"`, `"exp([0-9.]+)"` -- `null` - -

    - -
    (Inner-level) Additional_arg Object

    - -| Keys | Values -|-------------|----------------------------------------------| -| `"code"` | *(String)* Code for the additional argument. -| `"default"` | *(Number)* Default value of the additional argument. -| `"desc"` | *(String)* One-line description about the additional argument. - -

    - -## JSON Examples - -These are some good examples to start with, if you are completely new. - -| [`gng_m1.json`](./gng_m1.json) | [`choiceRT_ddm_single.json`](./choiceRT_ddm_single.json) | [`prl_fictitious_multipleB.json`](./prl_fictitious_multipleB.json) | [`ts_par4.json`](./ts_par4.json) -|-|-|-|-| -|`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    ~~`contributors`~~
    `data_columns`
    `parameters`
    `regressors`
    `postpreds`
    ~~`additional_args`~~ |`task_name`
    `model_name`
    `model_type`
    `notes`
    ~~`contributors`~~
    `data_columns`
    `parameters`
    ~~`regressors`~~
    ~~`postpreds`~~
    `additional_args` |`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    `contributors`
    `data_columns`
    `parameters`
    `regressors`
    `postpreds`
    ~~`additional_args`~~ |`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    `contributors`
    `data_columns`
    `parameters`
    ~~`regressors`~~
    `postpreds`
    `additional_args` - -## JSON Validation - -Validating against the current Schema file is a good basis to see if you've written the model JSON file correctly. -To validate JSON files, you need to have [`jsonschema`][jsonschema] installed; you can install it with `pip install jsonschema`. - -[jsonschema]: https://github.com/Julian/jsonschema - -To validate a single JSON file (e.g. `models/gng_m1.json`): -``` -$ jsonschema -i models/gng_m1.json ModelInformation.schema.json -``` - -To validate all JSON files in directory, use following shell script: -``` -$ ./ValidateAll.sh -``` - -## Automated Python Code Generation - -Once you've (correctly) written the JSON file for a new model, -it's possible to automatically generate the corresponding python code for the new model, -using the python script `WritePython.py`: - -``` -$ ./WritePython.py -h -usage: WritePython.py [-h] [-a] [-v] json_file - -positional arguments: - json_file JSON file of the model to generate corresponding python code - -optional arguments: - -h, --help show this help message and exit - -a, --all write for all json files in directory - -v, --verbose print output to stdout instead of writing to file -``` - -E.g. (to generate `_gng_m1.py` from `gng_m1.json`): -``` -$ ./WritePython.py gng_m1.json -Created file: _gng_m1.py -``` - -To generate python codes for all json files in directory: -``` -$ ./WritePython.py --all . -Created file: _bandit2arm_delta.py -... -Created file: _wcs_sql.py -``` - -## Contribution - -Largely contributed by [Jethro Lee][jethro-lee]. - -[jethro-lee]: https://github.com/dlemfh From 594fcd721b3609a5b891b058166edb6dbaee29e5 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 21:55:14 +0900 Subject: [PATCH 115/163] Update README --- commons/README.md | 43 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/commons/README.md b/commons/README.md index a3ac2fda..16edce6b 100644 --- a/commons/README.md +++ b/commons/README.md @@ -1,7 +1,7 @@ # hBayesDM core files - `extdata/`: example data for each task - - `{task_name}_exampleData.txt` + - `{task_name}[_{model_type}]_exampleData.txt` - `models/`: YAML files for model information - `{task_name}_{model_name}[_{model_type}].yml` - `stan_files/`: Stan files corresponding to YAML files @@ -14,3 +14,44 @@ - `R_DOCS_TEMPLATE.txt` - `R_TEST_TEMPLATE.txt` +## How to add a model + +1. Clone the repository and make new branch from `develop`. +```bash +# Clone the repository +git clone https://github.com/CCS-Lab/hBayesDM +cd hbayesdm + +git checkout develop # Check out the develop branch +git checkout -b feature/{branch_name} # Make new branch from develop +``` +2. Write a Stan code and a YAML file for model information, and append its example data. + - `/commons/stan_files/{task_name}_{model_name}[_{model_type}].stan` + - `/commons/models/{task_name}_{model_name}[_{model_type}].yml` + - `/commons/extdata/{task_name}[_{model_type}]_exampleData.txt` +3. Run `/commons/generate-codes.sh` to generate R and Python codes. Note that your Python +version should be above 3.5, and [`PyYAML`][pyyaml] should be pre-installed. +```bash +cd commons +./generate-codes.sh +``` +4. Implement a function to preprocess data for the model. + - R: `/R/R/preprocess_funcs.R` + - Python: `/Python/hbayesdm/preprocess_funcs.R` +5. (For R) Run `devtools::document()` to apply the new function. +```bash +cd ../R +Rscript -e 'devtools::document()' +``` +6. Install R and Python packages. +```bash +# For R +cd ../R +Rscript -e 'devtools::install()' + +# For Python +cd ../Python +python setup.py install +``` + +[pyyaml]: https://pyyaml.org/wiki/PyYAMLDocumentation From cf2910565edf9b84f4948d59825a43b7c86049a3 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 21:58:10 +0900 Subject: [PATCH 116/163] Update README --- commons/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/commons/README.md b/commons/README.md index 16edce6b..9504e7ae 100644 --- a/commons/README.md +++ b/commons/README.md @@ -26,6 +26,7 @@ git checkout develop # Check out the develop branch git checkout -b feature/{branch_name} # Make new branch from develop ``` 2. Write a Stan code and a YAML file for model information, and append its example data. +You can check out [an example YAML file](./example.yml) for model information. - `/commons/stan_files/{task_name}_{model_name}[_{model_type}].stan` - `/commons/models/{task_name}_{model_name}[_{model_type}].yml` - `/commons/extdata/{task_name}[_{model_type}]_exampleData.txt` From d02d4eb9e8b919f8e665072f58086dce2f56597b Mon Sep 17 00:00:00 2001 From: Jaeyeong Jayce Yang Date: Sat, 24 Aug 2019 21:59:56 +0900 Subject: [PATCH 117/163] Update README.md --- commons/README.md | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/commons/README.md b/commons/README.md index 9504e7ae..058de6aa 100644 --- a/commons/README.md +++ b/commons/README.md @@ -1,12 +1,12 @@ # hBayesDM core files -- `extdata/`: example data for each task +- **`extdata/`: example data for each task** - `{task_name}[_{model_type}]_exampleData.txt` -- `models/`: YAML files for model information +- **`models/`: YAML files for model information** - `{task_name}_{model_name}[_{model_type}].yml` -- `stan_files/`: Stan files corresponding to YAML files +- **`stan_files/`: Stan files corresponding to YAML files** - `{task_name}_{model_name}[_{model_type}].stan` -- `templates/`: code templates for R and Python package +- **`templates/`: code templates for R and Python package** - `PY_CODE_TEMPLATE.txt` - `PY_DOCS_TEMPLATE.txt` - `PY_TEST_TEMPLATE.txt` @@ -16,7 +16,7 @@ ## How to add a model -1. Clone the repository and make new branch from `develop`. +1. **Clone the repository and make new branch from `develop`.** ```bash # Clone the repository git clone https://github.com/CCS-Lab/hBayesDM @@ -25,26 +25,26 @@ cd hbayesdm git checkout develop # Check out the develop branch git checkout -b feature/{branch_name} # Make new branch from develop ``` -2. Write a Stan code and a YAML file for model information, and append its example data. -You can check out [an example YAML file](./example.yml) for model information. +2. **Write a Stan code and a YAML file for model information, and append its example data. +You can check out [an example YAML file](./example.yml) for model information.** - `/commons/stan_files/{task_name}_{model_name}[_{model_type}].stan` - `/commons/models/{task_name}_{model_name}[_{model_type}].yml` - `/commons/extdata/{task_name}[_{model_type}]_exampleData.txt` -3. Run `/commons/generate-codes.sh` to generate R and Python codes. Note that your Python -version should be above 3.5, and [`PyYAML`][pyyaml] should be pre-installed. +3. **Run `/commons/generate-codes.sh` to generate R and Python codes. Note that your Python +version should be above 3.5, and [`PyYAML`][pyyaml] should be pre-installed.** ```bash cd commons ./generate-codes.sh ``` -4. Implement a function to preprocess data for the model. +4. **Implement a function to preprocess data for the model.** - R: `/R/R/preprocess_funcs.R` - Python: `/Python/hbayesdm/preprocess_funcs.R` -5. (For R) Run `devtools::document()` to apply the new function. +5. **(For R) Run `devtools::document()` to apply the new function.** ```bash cd ../R Rscript -e 'devtools::document()' ``` -6. Install R and Python packages. +6. **Install R and Python packages.** ```bash # For R cd ../R From 9be4c8b3b168772892587ab3d45f9d1e9019fddd Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 22:00:59 +0900 Subject: [PATCH 118/163] Update example --- commons/example.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/commons/example.yml b/commons/example.yml index 54e8b154..c63b1353 100644 --- a/commons/example.yml +++ b/commons/example.yml @@ -21,12 +21,15 @@ task_name: desc: 2-Armed Bandit Task # description (title-case) cite: # A list of citations. They should be APA-formatted. - Doe, J., & Doe, J. (2019). A great paper. Good journal, 1(1), 1-2. + - Doe, J., & Doe, J. (2019). A great paper. Good journal, 1(1), 1-2. # Model information. model_name: code: delta # code for the model desc: Rescorla-Wagner (Delta) Model # description (title-case) cite: # A list of citations. They should be APA-formatted. + - Doe, J., & Doe, J. (2019). A great paper. Good journal, 1(1), 1-2. + - Doe, J., & Doe, J. (2019). A great paper. Good journal, 1(1), 1-2. # Model type. # For now, it should be one among three types: From 4802c23dc553b49054f4121fd36c42609eba36e3 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Sat, 24 Aug 2019 22:01:45 +0900 Subject: [PATCH 119/163] Remove old files --- commons/README-json.md | 154 ------------------- commons/misc/ModelInformation.schema.json | 171 ---------------------- commons/misc/validate-models.sh | 7 - 3 files changed, 332 deletions(-) delete mode 100644 commons/README-json.md delete mode 100644 commons/misc/ModelInformation.schema.json delete mode 100755 commons/misc/validate-models.sh diff --git a/commons/README-json.md b/commons/README-json.md deleted file mode 100644 index e400583d..00000000 --- a/commons/README-json.md +++ /dev/null @@ -1,154 +0,0 @@ -# Model Information JSON Files - -## JSON Schema - -Schema for the Model Information JSON files is stored in `ModelInformation.schema.json` as a JSON Schema format. - -| Property | Type | Description -|-------------------|---------------------|----------------------------------| -| `task_name` | Object | Informations regarding the task. *See below for **Keys** and **Values**.* -| `model_name` | Object | Informations regarding the model. *See below for **Keys** and **Values**.* -| `model_type` | Object | Modeling-type information. Should be one of the following three:
    - `{"code": "", "desc": "Hierarchical"}`
    - `{"code": "single", "desc": "Individual"}`
    - `{"code": "multipleB", "desc": "Multiple-Block Hierarchical"}` -| `notes` | Array of Strings | Optional notes about the task/model. Give empty array `[]` if unused. -| `contributors` | Array of Objects | Optional specifying of contributors. Give empty array `[]` if unused. -| `data_columns` | Object | **Keys**: names of the necessary data columns for user data.
    - `"subjID"` must always be included.
    - Also include `"block"`, if modeling-type is "multipleB".
    **Values**: one-line descriptions about each data column. -| `parameters` | Object (of Objects) | **Keys**: names of the parameters of this model.
    **Values**: inner-level Object specifying desc and info for each parameter. -| `regressors` | Object | *(Give empty object `{}` if not supported.)*
    **Keys**: names of the regressors of this model.
    **Values**: extracted dimension-size for each regressor. -| `postpreds` | Array of Strings | Name(s) of posterior predictions. Give empty array `[]` if not supported. -| `additional_args` | Array of Objects | Specifying of additional arguments, if any. Give empty array `[]` if unused. - -*\* Note that all outermost-level properties are required properties. Assign empty values (`[]` or `{}`) to them if unused.* -*\* Refer below for inner-level Object specifications.* - -
    task_name & model_name Object

    - -| Keys | Values -|----------|-------------------------------------| -| `"code"` | *(String)* Code for the task/model. -| `"desc"` | *(String)* Name of the task/model in title-case. -| `"cite"` | *(Array of Strings)* Citation(s) for the task/model. - -

    - -
    model_type Object

    - -One of the following three: - -```json -{ - "code": "", - "desc": "Hierarchical" -} -``` -```json -{ - "code": "single", - "desc": "Individual" -} -``` -```json -{ - "code": "multipleB", - "desc": "Multiple-Block Hierarchical" -} -``` - -

    - -
    (Inner-level) Contributor Object

    - -| Keys | Values -|-----------|-------------------------------------| -| `"name"` | *(String)* Name of the contributor. -| `"email"` | *(String)* Email address of the contributor. -| `"link"` | *(String)* Link to the contributor's page. - -

    - -
    (Inner-level) Parameter Object

    - -| Keys | Values -|----------|---------------------------------------------------------| -| `"desc"` | *(String)* Description of the parameter in a few words. -| `"info"` | *(Length-3-Array)* **Lower bound**, **plausible value**, and **upper bound** of the parameter.
    *\* See right below for allowed values.* - -*\* Allowed values (lower bound, plausible value, upper bound):* -- Numbers -- Strings: `"Inf"`, `"-Inf"`, `"exp([0-9.]+)"` -- `null` - -

    - -
    (Inner-level) Additional_arg Object

    - -| Keys | Values -|-------------|----------------------------------------------| -| `"code"` | *(String)* Code for the additional argument. -| `"default"` | *(Number)* Default value of the additional argument. -| `"desc"` | *(String)* One-line description about the additional argument. - -

    - -## JSON Examples - -These are some good examples to start with, if you are completely new. - -| [`gng_m1.json`](./gng_m1.json) | [`choiceRT_ddm_single.json`](./choiceRT_ddm_single.json) | [`prl_fictitious_multipleB.json`](./prl_fictitious_multipleB.json) | [`ts_par4.json`](./ts_par4.json) -|-|-|-|-| -|`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    ~~`contributors`~~
    `data_columns`
    `parameters`
    `regressors`
    `postpreds`
    ~~`additional_args`~~ |`task_name`
    `model_name`
    `model_type`
    `notes`
    ~~`contributors`~~
    `data_columns`
    `parameters`
    ~~`regressors`~~
    ~~`postpreds`~~
    `additional_args` |`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    `contributors`
    `data_columns`
    `parameters`
    `regressors`
    `postpreds`
    ~~`additional_args`~~ |`task_name`
    `model_name`
    `model_type`
    ~~`notes`~~
    `contributors`
    `data_columns`
    `parameters`
    ~~`regressors`~~
    `postpreds`
    `additional_args` - -## JSON Validation - -Validating against the current Schema file is a good basis to see if you've written the model JSON file correctly. -To validate JSON files, you need to have [`jsonschema`][jsonschema] installed; you can install it with `pip install jsonschema`. - -[jsonschema]: https://github.com/Julian/jsonschema - -To validate a single JSON file (e.g. `models/gng_m1.json`): -``` -$ jsonschema -i models/gng_m1.json ModelInformation.schema.json -``` - -To validate all JSON files in directory, use following shell script: -``` -$ ./ValidateAll.sh -``` - -## Automated Python Code Generation - -Once you've (correctly) written the JSON file for a new model, -it's possible to automatically generate the corresponding python code for the new model, -using the python script `WritePython.py`: - -``` -$ ./WritePython.py -h -usage: WritePython.py [-h] [-a] [-v] json_file - -positional arguments: - json_file JSON file of the model to generate corresponding python code - -optional arguments: - -h, --help show this help message and exit - -a, --all write for all json files in directory - -v, --verbose print output to stdout instead of writing to file -``` - -E.g. (to generate `_gng_m1.py` from `gng_m1.json`): -``` -$ ./WritePython.py gng_m1.json -Created file: _gng_m1.py -``` - -To generate python codes for all json files in directory: -``` -$ ./WritePython.py --all . -Created file: _bandit2arm_delta.py -... -Created file: _wcs_sql.py -``` - -## Contribution - -Largely contributed by [Jethro Lee][jethro-lee]. - -[jethro-lee]: https://github.com/dlemfh diff --git a/commons/misc/ModelInformation.schema.json b/commons/misc/ModelInformation.schema.json deleted file mode 100644 index dd760b3f..00000000 --- a/commons/misc/ModelInformation.schema.json +++ /dev/null @@ -1,171 +0,0 @@ -{ - "title": "Model Information Schema", - "description": "Written by Jethro Lee", - "type": "object", - "required": ["task_name", "model_name", "model_type", "notes", "contributors", "data_columns", "parameters", "regressors", "postpreds", "additional_args"], - "properties": { - "task_name": { - "$ref": "#/definitions/_name" - }, - "model_name": { - "$ref": "#/definitions/_name" - }, - "model_type": { - "type": "object", - "enum": [ - { - "code": "", - "desc": "Hierarchical" - }, - { - "code": "single", - "desc": "Individual" - }, - { - "code": "multipleB", - "desc": "Multiple-Block Hierarchical" - } - ] - }, - "notes": { - "type": "array", - "items": { - "type": "string", - "minLength": 1 - } - }, - "contributors": { - "type": "array", - "items": { - "type": "object", - "required": ["name", "email", "link"], - "properties": { - "name": { - "type": "string", - "minLength": 1 - }, - "email": { - "type": "string", - "minLength": 1, - "format": "email" - }, - "link": { - "type": "string", - "minLength": 1, - "format": "uri" - } - }, - "additionalProperties": false - } - }, - "data_columns": { - "type": "object", - "required": ["subjID"], - "patternProperties": { - "^[a-zA-Z0-9_]+$": { - "type": "string", - "minLength": 1 - } - }, - "additionalProperties": false - }, - "parameters": { - "type": "object", - "patternProperties": { - "^[a-zA-Z0-9_]+$": { - "type": "object", - "required": ["desc", "info"], - "properties": { - "desc": { - "type": "string", - "minLength": 1 - }, - "info": { - "type": "array", - "minItems": 3, - "maxItems": 3, - "items": { - "type": ["number", "string", "null"], - "description": "**Edit below to add more allowed patterns**", - "pattern": "^(-?Inf|exp\\([0-9.]+\\))$" - } - } - }, - "additionalProperties": false - } - }, - "additionalProperties": false - }, - "regressors": { - "type": "object", - "patternProperties": { - "^[a-zA-Z0-9_]+$": { - "type": "integer", - "description": "**Edit below to add more allowed values**", - "enum": [2, 3] - } - }, - "additionalProperties": false - }, - "postpreds": { - "type": "array", - "items": { - "type": "string", - "description": "**Edit below to add more allowed values**", - "enum": ["y_pred", "y_pred_step1", "y_pred_step2"] - } - }, - "additional_args": { - "type": "array", - "items": { - "type": "object", - "required": ["code", "default", "desc"], - "properties": { - "code": { - "type": "string", - "pattern": "^[a-zA-Z0-9_]+$" - }, - "default": { - "type": "number" - }, - "desc": { - "type": "string", - "minLength": 1 - } - }, - "additionalProperties": false - } - } - }, - "additionalProperties": false, - "if": { - "properties": {"model_type": {"properties": {"code": {"const": "multipleB"}}}} - }, - "then": { - "properties": {"data_columns": {"required": ["block"]}} - }, - "definitions": { - "_name": { - "type": "object", - "required": ["code", "desc", "cite"], - "properties": { - "code": { - "type": "string", - "pattern": "^[a-zA-Z0-9_]+$" - }, - "desc": { - "type": "string", - "minLength": 1 - }, - "cite": { - "type": "array", - "items": { - "type": "string", - "minLength": 1 - } - } - }, - "additionalProperties": false - } - } -} diff --git a/commons/misc/validate-models.sh b/commons/misc/validate-models.sh deleted file mode 100755 index 3dbbc108..00000000 --- a/commons/misc/validate-models.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash -# Written by Jetho Lee - -for i in `ls models/[a-z]*.json`; do - echo "========== $i ==========" - jsonschema -i "$i" ModelInformation.schema.json -done From ac3731b619ad7e2ab9a5f0d5045b963329f0b1f6 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 01:38:35 +0900 Subject: [PATCH 120/163] Combine data, datafile, use_example, choose_data into data --- Python/hbayesdm/base.py | 103 +++++++----------- Python/hbayesdm/models/_bandit2arm_delta.py | 6 +- .../models/_bandit4arm2_kalman_filter.py | 6 +- .../hbayesdm/models/_bandit4arm_2par_lapse.py | 6 +- Python/hbayesdm/models/_bandit4arm_4par.py | 6 +- Python/hbayesdm/models/_bandit4arm_lapse.py | 6 +- .../models/_bandit4arm_lapse_decay.py | 6 +- .../models/_bandit4arm_singleA_lapse.py | 6 +- Python/hbayesdm/models/_bart_par4.py | 6 +- Python/hbayesdm/models/_choiceRT_ddm.py | 6 +- .../hbayesdm/models/_choiceRT_ddm_single.py | 6 +- Python/hbayesdm/models/_cra_exp.py | 6 +- Python/hbayesdm/models/_cra_linear.py | 6 +- Python/hbayesdm/models/_dbdm_prob_weight.py | 6 +- Python/hbayesdm/models/_dd_cs.py | 6 +- Python/hbayesdm/models/_dd_cs_single.py | 6 +- Python/hbayesdm/models/_dd_exp.py | 6 +- Python/hbayesdm/models/_dd_hyperbolic.py | 6 +- .../hbayesdm/models/_dd_hyperbolic_single.py | 6 +- Python/hbayesdm/models/_gng_m1.py | 6 +- Python/hbayesdm/models/_gng_m2.py | 6 +- Python/hbayesdm/models/_gng_m3.py | 6 +- Python/hbayesdm/models/_gng_m4.py | 6 +- Python/hbayesdm/models/_igt_orl.py | 6 +- Python/hbayesdm/models/_igt_pvl_decay.py | 6 +- Python/hbayesdm/models/_igt_pvl_delta.py | 6 +- Python/hbayesdm/models/_igt_vpp.py | 6 +- Python/hbayesdm/models/_peer_ocu.py | 6 +- Python/hbayesdm/models/_prl_ewa.py | 6 +- Python/hbayesdm/models/_prl_fictitious.py | 6 +- .../models/_prl_fictitious_multipleB.py | 6 +- Python/hbayesdm/models/_prl_fictitious_rp.py | 6 +- .../hbayesdm/models/_prl_fictitious_rp_woa.py | 6 +- Python/hbayesdm/models/_prl_fictitious_woa.py | 6 +- Python/hbayesdm/models/_prl_rp.py | 6 +- Python/hbayesdm/models/_prl_rp_multipleB.py | 6 +- Python/hbayesdm/models/_pst_gainloss_Q.py | 6 +- Python/hbayesdm/models/_ra_noLA.py | 6 +- Python/hbayesdm/models/_ra_noRA.py | 6 +- Python/hbayesdm/models/_ra_prospect.py | 6 +- Python/hbayesdm/models/_rdt_happiness.py | 6 +- Python/hbayesdm/models/_ts_par4.py | 6 +- Python/hbayesdm/models/_ts_par6.py | 6 +- Python/hbayesdm/models/_ts_par7.py | 6 +- Python/hbayesdm/models/_ug_bayes.py | 6 +- Python/hbayesdm/models/_ug_delta.py | 6 +- Python/hbayesdm/models/_wcs_sql.py | 6 +- Python/tests/test_bandit2arm_delta.py | 2 +- .../tests/test_bandit4arm2_kalman_filter.py | 2 +- Python/tests/test_bandit4arm_2par_lapse.py | 2 +- Python/tests/test_bandit4arm_4par.py | 2 +- Python/tests/test_bandit4arm_lapse.py | 2 +- Python/tests/test_bandit4arm_lapse_decay.py | 2 +- Python/tests/test_bandit4arm_singleA_lapse.py | 2 +- Python/tests/test_bart_par4.py | 2 +- Python/tests/test_choiceRT_ddm.py | 2 +- Python/tests/test_choiceRT_ddm_single.py | 2 +- Python/tests/test_cra_exp.py | 2 +- Python/tests/test_cra_linear.py | 2 +- Python/tests/test_dbdm_prob_weight.py | 2 +- Python/tests/test_dd_cs.py | 2 +- Python/tests/test_dd_cs_single.py | 2 +- Python/tests/test_dd_exp.py | 2 +- Python/tests/test_dd_hyperbolic.py | 2 +- Python/tests/test_dd_hyperbolic_single.py | 2 +- Python/tests/test_gng_m1.py | 2 +- Python/tests/test_gng_m2.py | 2 +- Python/tests/test_gng_m3.py | 2 +- Python/tests/test_gng_m4.py | 2 +- Python/tests/test_igt_orl.py | 2 +- Python/tests/test_igt_pvl_decay.py | 2 +- Python/tests/test_igt_pvl_delta.py | 2 +- Python/tests/test_igt_vpp.py | 2 +- Python/tests/test_peer_ocu.py | 2 +- Python/tests/test_prl_ewa.py | 2 +- Python/tests/test_prl_fictitious.py | 2 +- Python/tests/test_prl_fictitious_multipleB.py | 2 +- Python/tests/test_prl_fictitious_rp.py | 2 +- Python/tests/test_prl_fictitious_rp_woa.py | 2 +- Python/tests/test_prl_fictitious_woa.py | 2 +- Python/tests/test_prl_rp.py | 2 +- Python/tests/test_prl_rp_multipleB.py | 2 +- Python/tests/test_pst_gainloss_Q.py | 2 +- Python/tests/test_ra_noLA.py | 2 +- Python/tests/test_ra_noRA.py | 2 +- Python/tests/test_ra_prospect.py | 2 +- Python/tests/test_rdt_happiness.py | 2 +- Python/tests/test_ts_par4.py | 2 +- Python/tests/test_ts_par6.py | 2 +- Python/tests/test_ts_par7.py | 2 +- Python/tests/test_ug_bayes.py | 2 +- Python/tests/test_ug_delta.py | 2 +- Python/tests/test_wcs_sql.py | 2 +- R/R/hBayesDM_model.R | 40 ++++--- R/docs/reference/bandit2arm_delta.html | 29 ++--- .../reference/bandit4arm2_kalman_filter.html | 29 ++--- R/docs/reference/bandit4arm_2par_lapse.html | 29 ++--- R/docs/reference/bandit4arm_4par.html | 29 ++--- R/docs/reference/bandit4arm_lapse.html | 29 ++--- R/docs/reference/bandit4arm_lapse_decay.html | 29 ++--- .../reference/bandit4arm_singleA_lapse.html | 29 ++--- R/docs/reference/bart_par4.html | 32 +++--- R/docs/reference/choiceRT_ddm.html | 32 +++--- R/docs/reference/choiceRT_ddm_single.html | 29 ++--- R/docs/reference/cra_exp.html | 32 +++--- R/docs/reference/cra_linear.html | 32 +++--- R/docs/reference/dbdm_prob_weight.html | 29 ++--- R/docs/reference/dd_cs.html | 32 +++--- R/docs/reference/dd_cs_single.html | 32 +++--- R/docs/reference/dd_exp.html | 32 +++--- R/docs/reference/dd_hyperbolic.html | 29 ++--- R/docs/reference/dd_hyperbolic_single.html | 29 ++--- R/docs/reference/gng_m1.html | 32 +++--- R/docs/reference/gng_m2.html | 32 +++--- R/docs/reference/gng_m3.html | 32 +++--- R/docs/reference/gng_m4.html | 32 +++--- R/docs/reference/igt_orl.html | 32 +++--- R/docs/reference/igt_pvl_decay.html | 29 ++--- R/docs/reference/igt_pvl_delta.html | 29 ++--- R/docs/reference/igt_vpp.html | 32 +++--- R/docs/reference/peer_ocu.html | 32 +++--- R/docs/reference/prl_ewa.html | 32 +++--- R/docs/reference/prl_fictitious.html | 29 ++--- .../reference/prl_fictitious_multipleB.html | 29 ++--- R/docs/reference/prl_fictitious_rp.html | 29 ++--- R/docs/reference/prl_fictitious_rp_woa.html | 29 ++--- R/docs/reference/prl_fictitious_woa.html | 29 ++--- R/docs/reference/prl_rp.html | 32 +++--- R/docs/reference/prl_rp_multipleB.html | 29 ++--- R/docs/reference/pst_gainloss_Q.html | 29 ++--- R/docs/reference/ra_noLA.html | 32 +++--- R/docs/reference/ra_noRA.html | 32 +++--- R/docs/reference/ra_prospect.html | 32 +++--- R/docs/reference/rdt_happiness.html | 29 ++--- R/docs/reference/ts_par4.html | 32 +++--- R/docs/reference/ts_par6.html | 32 +++--- R/docs/reference/ts_par7.html | 32 +++--- R/docs/reference/ug_bayes.html | 32 +++--- R/docs/reference/ug_delta.html | 32 +++--- R/docs/reference/wcs_sql.html | 32 +++--- R/man-roxygen/model-documentation.R | 15 +-- R/man/bandit2arm_delta.Rd | 23 ++-- R/man/bandit4arm2_kalman_filter.Rd | 23 ++-- R/man/bandit4arm_2par_lapse.Rd | 23 ++-- R/man/bandit4arm_4par.Rd | 23 ++-- R/man/bandit4arm_lapse.Rd | 23 ++-- R/man/bandit4arm_lapse_decay.Rd | 23 ++-- R/man/bandit4arm_singleA_lapse.Rd | 23 ++-- R/man/bart_par4.Rd | 26 ++--- R/man/choiceRT_ddm.Rd | 26 ++--- R/man/choiceRT_ddm_single.Rd | 23 ++-- R/man/cra_exp.Rd | 26 ++--- R/man/cra_linear.Rd | 26 ++--- R/man/dbdm_prob_weight.Rd | 23 ++-- R/man/dd_cs.Rd | 26 ++--- R/man/dd_cs_single.Rd | 26 ++--- R/man/dd_exp.Rd | 26 ++--- R/man/dd_hyperbolic.Rd | 23 ++-- R/man/dd_hyperbolic_single.Rd | 23 ++-- R/man/gng_m1.Rd | 26 ++--- R/man/gng_m2.Rd | 26 ++--- R/man/gng_m3.Rd | 26 ++--- R/man/gng_m4.Rd | 26 ++--- R/man/igt_orl.Rd | 26 ++--- R/man/igt_pvl_decay.Rd | 23 ++-- R/man/igt_pvl_delta.Rd | 23 ++-- R/man/igt_vpp.Rd | 26 ++--- R/man/peer_ocu.Rd | 26 ++--- R/man/prl_ewa.Rd | 26 ++--- R/man/prl_fictitious.Rd | 23 ++-- R/man/prl_fictitious_multipleB.Rd | 23 ++-- R/man/prl_fictitious_rp.Rd | 23 ++-- R/man/prl_fictitious_rp_woa.Rd | 23 ++-- R/man/prl_fictitious_woa.Rd | 23 ++-- R/man/prl_rp.Rd | 26 ++--- R/man/prl_rp_multipleB.Rd | 23 ++-- R/man/pst_gainloss_Q.Rd | 23 ++-- R/man/ra_noLA.Rd | 26 ++--- R/man/ra_noRA.Rd | 26 ++--- R/man/ra_prospect.Rd | 26 ++--- R/man/rdt_happiness.Rd | 23 ++-- R/man/ts_par4.Rd | 26 ++--- R/man/ts_par6.Rd | 26 ++--- R/man/ts_par7.Rd | 26 ++--- R/man/ug_bayes.Rd | 26 ++--- R/man/ug_delta.Rd | 26 ++--- R/man/wcs_sql.Rd | 26 ++--- R/tests/testthat/test_bandit2arm_delta.R | 3 +- .../testthat/test_bandit4arm2_kalman_filter.R | 3 +- R/tests/testthat/test_bandit4arm_2par_lapse.R | 3 +- R/tests/testthat/test_bandit4arm_4par.R | 3 +- R/tests/testthat/test_bandit4arm_lapse.R | 3 +- .../testthat/test_bandit4arm_lapse_decay.R | 3 +- .../testthat/test_bandit4arm_singleA_lapse.R | 3 +- R/tests/testthat/test_bart_par4.R | 3 +- R/tests/testthat/test_choiceRT_ddm.R | 3 +- R/tests/testthat/test_choiceRT_ddm_single.R | 3 +- R/tests/testthat/test_cra_exp.R | 3 +- R/tests/testthat/test_cra_linear.R | 3 +- R/tests/testthat/test_dbdm_prob_weight.R | 3 +- R/tests/testthat/test_dd_cs.R | 3 +- R/tests/testthat/test_dd_cs_single.R | 3 +- R/tests/testthat/test_dd_exp.R | 3 +- R/tests/testthat/test_dd_hyperbolic.R | 3 +- R/tests/testthat/test_dd_hyperbolic_single.R | 3 +- R/tests/testthat/test_gng_m1.R | 3 +- R/tests/testthat/test_gng_m2.R | 3 +- R/tests/testthat/test_gng_m3.R | 3 +- R/tests/testthat/test_gng_m4.R | 3 +- R/tests/testthat/test_igt_orl.R | 3 +- R/tests/testthat/test_igt_pvl_decay.R | 3 +- R/tests/testthat/test_igt_pvl_delta.R | 3 +- R/tests/testthat/test_igt_vpp.R | 3 +- R/tests/testthat/test_peer_ocu.R | 3 +- R/tests/testthat/test_prl_ewa.R | 3 +- R/tests/testthat/test_prl_fictitious.R | 3 +- .../testthat/test_prl_fictitious_multipleB.R | 3 +- R/tests/testthat/test_prl_fictitious_rp.R | 3 +- R/tests/testthat/test_prl_fictitious_rp_woa.R | 3 +- R/tests/testthat/test_prl_fictitious_woa.R | 3 +- R/tests/testthat/test_prl_rp.R | 3 +- R/tests/testthat/test_prl_rp_multipleB.R | 3 +- R/tests/testthat/test_pst_gainloss_Q.R | 3 +- R/tests/testthat/test_ra_noLA.R | 3 +- R/tests/testthat/test_ra_noRA.R | 3 +- R/tests/testthat/test_ra_prospect.R | 3 +- R/tests/testthat/test_rdt_happiness.R | 3 +- R/tests/testthat/test_ts_par4.R | 3 +- R/tests/testthat/test_ts_par6.R | 3 +- R/tests/testthat/test_ts_par7.R | 3 +- R/tests/testthat/test_ug_bayes.R | 3 +- R/tests/testthat/test_ug_delta.R | 3 +- R/tests/testthat/test_wcs_sql.R | 3 +- commons/templates/PY_CODE_TEMPLATE.txt | 6 +- commons/templates/PY_TEST_TEMPLATE.txt | 2 +- commons/templates/R_TEST_TEMPLATE.txt | 3 +- 236 files changed, 1322 insertions(+), 1895 deletions(-) diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 54da6b49..54dc4164 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -61,14 +61,6 @@ def __init__(self, model, all_ind_pars, par_vals, fit, raw_data, model_regressor \ = self._run(**kwargs) - # Assign results as attributes - self.__model = model - self.__all_ind_pars = all_ind_pars - self.__par_vals = par_vals - self.__fit = fit - self.__raw_data = raw_data - self.__model_regressor = model_regressor - @property def task_name(self) -> str: return self.__task_name @@ -130,8 +122,6 @@ def model_regressor(self) -> Dict: return self.__model_regressor def _run(self, - example: bool = False, - datafile: str = None, data: pd.DataFrame = None, niter: int = 4000, nwarmup: int = 1000, @@ -146,18 +136,13 @@ def _run(self, adapt_delta: float = 0.95, stepsize: float = 1, max_treedepth: int = 10, - **additional_args: Any) -> Tuple[str, - pd.DataFrame, - OrderedDict, - Any, - pd.DataFrame, - Dict]: + **additional_args: Any) \ + -> Tuple[str, pd.DataFrame, OrderedDict, Any, Dict]: """Run the hbayesdm modeling function.""" self._check_regressor(model_regressor) self._check_postpred(inc_postpred) - raw_data, initial_columns = self._handle_data_args( - example, datafile, data) + raw_data, initial_columns = self._handle_data_args(data) insensitive_data_columns = self._get_insensitive_data_columns() self._check_data_columns(raw_data, insensitive_data_columns) @@ -179,7 +164,7 @@ def _run(self, ncore = self._set_number_of_cores(ncore) self._print_for_user( - model, example, datafile, data, vb, nchain, ncore, niter, nwarmup, + model, data, vb, nchain, ncore, niter, nwarmup, general_info, additional_args, model_regressor) sm = self._designate_stan_model(model) @@ -197,6 +182,14 @@ def _run(self, self._revert_initial_columns(raw_data, initial_columns) self._inform_completion() + # Assign results as attributes + self.__model = model + self.__all_ind_pars = all_ind_pars + self.__par_vals = par_vals + self.__fit = fit + self.__raw_data = raw_data + self.__model_regressor = model_regressor + return model, all_ind_pars, par_vals, fit, raw_data, model_regressor def _check_regressor(self, requested_by_user: bool): @@ -223,20 +216,14 @@ def _check_postpred(self, requested_by_user: bool): raise RuntimeError( 'Posterior predictions are not yet available for this model.') - def _handle_data_args(self, - example: bool, - datafile: str, - data: pd.DataFrame) -> Tuple[pd.DataFrame, List]: + def _handle_data_args(self, data) -> Tuple[pd.DataFrame, List]: """Handle user data arguments and return raw_data. Parameters ---------- - example : bool - Whether to use example data. - datafile : str - String of filepath for the data file. - data : pandas.DataFrame + data : Union[pandas.DataFrame, str] Pandas DataFrame object that holds the data. + String of filepath for the data file. Returns ------- @@ -245,34 +232,30 @@ def _handle_data_args(self, initial_columns : List Initial column names of raw data, as given by the user. """ - # Check the number of valid arguments (which should be 1) - if int(example) \ - + int(datafile is not None) \ - + int(data is not None) != 1: - raise RuntimeError( - 'Please give one of these arguments: ' - 'example, datafile, or data.') - - if data is not None: # Use given data as raw_data + if isinstance(data, pd.DataFrame): if not isinstance(data, pd.DataFrame): raise RuntimeError( 'Please provide `data` argument as a pandas.DataFrame.') raw_data = data - elif datafile is not None: # Load data from given filepath - if datafile.endswith('.csv'): - raw_data = pd.read_csv(datafile) - else: # Read the file as a tsv format - raw_data = pd.read_csv(datafile, sep='\t') - - else: # (example == True) Load example data - if self.model_type == '': - filename = '%s_exampleData.txt' % self.task_name + elif isinstance(data, str): + if data == "example": + if self.model_type == '': + filename = '%s_exampleData.txt' % self.task_name + else: + filename = '%s_%s_exampleData.txt' % ( + self.task_name, self.model_type) + example_data = PATH_EXTDATA / filename + raw_data = pd.read_csv(example_data, sep='\t') else: - filename = '%s_%s_exampleData.txt' % ( - self.task_name, self.model_type) - example_data = PATH_EXTDATA / filename - raw_data = pd.read_csv(example_data, sep='\t') + if data.endswith('.csv'): + raw_data = pd.read_csv(data) + else: # Read the file as a tsv format + raw_data = pd.read_csv(data, sep='\t') + + else: + raise RuntimeError( + 'Invalid `data` argument given: ' + str(data)) # Save initial column names of raw data for later initial_columns = list(raw_data.columns) @@ -566,20 +549,16 @@ def _set_number_of_cores(self, ncore: int) -> int: return local_cores return ncore - def _print_for_user(self, model: str, example: bool, datafile: str, - data: pd.DataFrame, vb: bool, nchain: int, ncore: int, - niter: int, nwarmup: int, general_info: Dict, - additional_args: Dict, model_regressor: bool): + def _print_for_user(self, model: str, data: pd.DataFrame, vb: bool, + nchain: int, ncore: int, niter: int, nwarmup: int, + general_info: Dict, additional_args: Dict, + model_regressor: bool): """Print information for user. Parameters ---------- model Full name of model. - example - Whether to use example data. - datafile - String of filepath for data file. data Pandas DataFrame object holding user data. vb @@ -601,12 +580,10 @@ def _print_for_user(self, model: str, example: bool, datafile: str, """ print() print('Model =', model) - if example: - print('Data = example') - elif datafile: - print('Data =', datafile) + if isinstance(data, pd.DataFrame): + print('Data = ') else: - print('Data =', object.__repr__(data)) + print('Data =', str(data)) print() print('Details:') if vb: diff --git a/Python/hbayesdm/models/_bandit2arm_delta.py b/Python/hbayesdm/models/_bandit2arm_delta.py index ee8f36b5..5d1eec0d 100644 --- a/Python/hbayesdm/models/_bandit2arm_delta.py +++ b/Python/hbayesdm/models/_bandit2arm_delta.py @@ -43,9 +43,7 @@ def __init__(self, **kwargs): def bandit2arm_delta( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -220,8 +218,6 @@ def bandit2arm_delta( print_fit(output) """ return Bandit2ArmDelta( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py index 06b31c0d..5b664a14 100644 --- a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py +++ b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py @@ -51,9 +51,7 @@ def __init__(self, **kwargs): def bandit4arm2_kalman_filter( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def bandit4arm2_kalman_filter( print_fit(output) """ return Bandit4Arm2KalmanFilter( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py index e642cc9a..afb0c61b 100644 --- a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py @@ -46,9 +46,7 @@ def __init__(self, **kwargs): def bandit4arm_2par_lapse( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -223,8 +221,6 @@ def bandit4arm_2par_lapse( print_fit(output) """ return Bandit4Arm2ParLapse( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bandit4arm_4par.py b/Python/hbayesdm/models/_bandit4arm_4par.py index 5ef259fb..9a5556e3 100644 --- a/Python/hbayesdm/models/_bandit4arm_4par.py +++ b/Python/hbayesdm/models/_bandit4arm_4par.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def bandit4arm_4par( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def bandit4arm_4par( print_fit(output) """ return Bandit4Arm4Par( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bandit4arm_lapse.py b/Python/hbayesdm/models/_bandit4arm_lapse.py index f982d17a..63b80a7c 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse.py @@ -50,9 +50,7 @@ def __init__(self, **kwargs): def bandit4arm_lapse( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def bandit4arm_lapse( print_fit(output) """ return Bandit4ArmLapse( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py index 907f7e6e..9c916ec6 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py @@ -52,9 +52,7 @@ def __init__(self, **kwargs): def bandit4arm_lapse_decay( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -229,8 +227,6 @@ def bandit4arm_lapse_decay( print_fit(output) """ return Bandit4ArmLapseDecay( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py index 41bef90a..d25435ea 100644 --- a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def bandit4arm_singleA_lapse( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def bandit4arm_singleA_lapse( print_fit(output) """ return Bandit4ArmSingleaLapse( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py index 95b67396..3397e91b 100644 --- a/Python/hbayesdm/models/_bart_par4.py +++ b/Python/hbayesdm/models/_bart_par4.py @@ -47,9 +47,7 @@ def __init__(self, **kwargs): def bart_par4( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -228,8 +226,6 @@ def bart_par4( print_fit(output) """ return BartPar4( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py index dedfc782..85275f8a 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm.py +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -47,9 +47,7 @@ def __init__(self, **kwargs): def choiceRT_ddm( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -229,8 +227,6 @@ def choiceRT_ddm( print_fit(output) """ return ChoicertDdm( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py index 5f453e28..a45e472b 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm_single.py +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -47,9 +47,7 @@ def __init__(self, **kwargs): def choiceRT_ddm_single( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -229,8 +227,6 @@ def choiceRT_ddm_single( print_fit(output) """ return ChoicertDdmSingle( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_cra_exp.py b/Python/hbayesdm/models/_cra_exp.py index e59fce9e..54133f11 100644 --- a/Python/hbayesdm/models/_cra_exp.py +++ b/Python/hbayesdm/models/_cra_exp.py @@ -51,9 +51,7 @@ def __init__(self, **kwargs): def cra_exp( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -230,8 +228,6 @@ def cra_exp( print_fit(output) """ return CraExp( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_cra_linear.py b/Python/hbayesdm/models/_cra_linear.py index 8ff9c1de..709080cd 100644 --- a/Python/hbayesdm/models/_cra_linear.py +++ b/Python/hbayesdm/models/_cra_linear.py @@ -51,9 +51,7 @@ def __init__(self, **kwargs): def cra_linear( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -230,8 +228,6 @@ def cra_linear( print_fit(output) """ return CraLinear( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_dbdm_prob_weight.py b/Python/hbayesdm/models/_dbdm_prob_weight.py index 7d1b8407..e8e8f514 100644 --- a/Python/hbayesdm/models/_dbdm_prob_weight.py +++ b/Python/hbayesdm/models/_dbdm_prob_weight.py @@ -52,9 +52,7 @@ def __init__(self, **kwargs): def dbdm_prob_weight( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -235,8 +233,6 @@ def dbdm_prob_weight( print_fit(output) """ return DbdmProbWeight( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_dd_cs.py b/Python/hbayesdm/models/_dd_cs.py index 6e30b128..7ad63c94 100644 --- a/Python/hbayesdm/models/_dd_cs.py +++ b/Python/hbayesdm/models/_dd_cs.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def dd_cs( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def dd_cs( print_fit(output) """ return DdCs( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_dd_cs_single.py b/Python/hbayesdm/models/_dd_cs_single.py index f2185110..158401dc 100644 --- a/Python/hbayesdm/models/_dd_cs_single.py +++ b/Python/hbayesdm/models/_dd_cs_single.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def dd_cs_single( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def dd_cs_single( print_fit(output) """ return DdCsSingle( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_dd_exp.py b/Python/hbayesdm/models/_dd_exp.py index 619fbca8..da015bc4 100644 --- a/Python/hbayesdm/models/_dd_exp.py +++ b/Python/hbayesdm/models/_dd_exp.py @@ -46,9 +46,7 @@ def __init__(self, **kwargs): def dd_exp( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def dd_exp( print_fit(output) """ return DdExp( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_dd_hyperbolic.py b/Python/hbayesdm/models/_dd_hyperbolic.py index 7d24d0c5..f76e735f 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic.py +++ b/Python/hbayesdm/models/_dd_hyperbolic.py @@ -46,9 +46,7 @@ def __init__(self, **kwargs): def dd_hyperbolic( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def dd_hyperbolic( print_fit(output) """ return DdHyperbolic( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_dd_hyperbolic_single.py b/Python/hbayesdm/models/_dd_hyperbolic_single.py index 87738db8..cefaf5bf 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic_single.py +++ b/Python/hbayesdm/models/_dd_hyperbolic_single.py @@ -46,9 +46,7 @@ def __init__(self, **kwargs): def dd_hyperbolic_single( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def dd_hyperbolic_single( print_fit(output) """ return DdHyperbolicSingle( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_gng_m1.py b/Python/hbayesdm/models/_gng_m1.py index a61841f7..47cf7597 100644 --- a/Python/hbayesdm/models/_gng_m1.py +++ b/Python/hbayesdm/models/_gng_m1.py @@ -49,9 +49,7 @@ def __init__(self, **kwargs): def gng_m1( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -226,8 +224,6 @@ def gng_m1( print_fit(output) """ return GngM1( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_gng_m2.py b/Python/hbayesdm/models/_gng_m2.py index 3a3a599f..fc2aff4e 100644 --- a/Python/hbayesdm/models/_gng_m2.py +++ b/Python/hbayesdm/models/_gng_m2.py @@ -51,9 +51,7 @@ def __init__(self, **kwargs): def gng_m2( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -228,8 +226,6 @@ def gng_m2( print_fit(output) """ return GngM2( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_gng_m3.py b/Python/hbayesdm/models/_gng_m3.py index 48cf2123..b68af254 100644 --- a/Python/hbayesdm/models/_gng_m3.py +++ b/Python/hbayesdm/models/_gng_m3.py @@ -54,9 +54,7 @@ def __init__(self, **kwargs): def gng_m3( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -231,8 +229,6 @@ def gng_m3( print_fit(output) """ return GngM3( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_gng_m4.py b/Python/hbayesdm/models/_gng_m4.py index 6de30765..f23e8980 100644 --- a/Python/hbayesdm/models/_gng_m4.py +++ b/Python/hbayesdm/models/_gng_m4.py @@ -56,9 +56,7 @@ def __init__(self, **kwargs): def gng_m4( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -233,8 +231,6 @@ def gng_m4( print_fit(output) """ return GngM4( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_igt_orl.py b/Python/hbayesdm/models/_igt_orl.py index e7c5cc88..ba5b5df7 100644 --- a/Python/hbayesdm/models/_igt_orl.py +++ b/Python/hbayesdm/models/_igt_orl.py @@ -50,9 +50,7 @@ def __init__(self, **kwargs): def igt_orl( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -229,8 +227,6 @@ def igt_orl( print_fit(output) """ return IgtOrl( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_igt_pvl_decay.py b/Python/hbayesdm/models/_igt_pvl_decay.py index 50a86d00..7226598a 100644 --- a/Python/hbayesdm/models/_igt_pvl_decay.py +++ b/Python/hbayesdm/models/_igt_pvl_decay.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def igt_pvl_decay( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def igt_pvl_decay( print_fit(output) """ return IgtPvlDecay( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_igt_pvl_delta.py b/Python/hbayesdm/models/_igt_pvl_delta.py index f41f3f12..4c1589d8 100644 --- a/Python/hbayesdm/models/_igt_pvl_delta.py +++ b/Python/hbayesdm/models/_igt_pvl_delta.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def igt_pvl_delta( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def igt_pvl_delta( print_fit(output) """ return IgtPvlDelta( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_igt_vpp.py b/Python/hbayesdm/models/_igt_vpp.py index 77e324cf..46656f7b 100644 --- a/Python/hbayesdm/models/_igt_vpp.py +++ b/Python/hbayesdm/models/_igt_vpp.py @@ -56,9 +56,7 @@ def __init__(self, **kwargs): def igt_vpp( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -235,8 +233,6 @@ def igt_vpp( print_fit(output) """ return IgtVpp( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_peer_ocu.py b/Python/hbayesdm/models/_peer_ocu.py index 72ae7235..67e093fb 100644 --- a/Python/hbayesdm/models/_peer_ocu.py +++ b/Python/hbayesdm/models/_peer_ocu.py @@ -50,9 +50,7 @@ def __init__(self, **kwargs): def peer_ocu( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -231,8 +229,6 @@ def peer_ocu( print_fit(output) """ return PeerOcu( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_ewa.py b/Python/hbayesdm/models/_prl_ewa.py index 6d96f561..a7137045 100644 --- a/Python/hbayesdm/models/_prl_ewa.py +++ b/Python/hbayesdm/models/_prl_ewa.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def prl_ewa( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def prl_ewa( print_fit(output) """ return PrlEwa( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_fictitious.py b/Python/hbayesdm/models/_prl_fictitious.py index f64a5acb..1ce3214d 100644 --- a/Python/hbayesdm/models/_prl_fictitious.py +++ b/Python/hbayesdm/models/_prl_fictitious.py @@ -49,9 +49,7 @@ def __init__(self, **kwargs): def prl_fictitious( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -226,8 +224,6 @@ def prl_fictitious( print_fit(output) """ return PrlFictitious( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_fictitious_multipleB.py b/Python/hbayesdm/models/_prl_fictitious_multipleB.py index ac1dec0d..5f80390f 100644 --- a/Python/hbayesdm/models/_prl_fictitious_multipleB.py +++ b/Python/hbayesdm/models/_prl_fictitious_multipleB.py @@ -50,9 +50,7 @@ def __init__(self, **kwargs): def prl_fictitious_multipleB( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -228,8 +226,6 @@ def prl_fictitious_multipleB( print_fit(output) """ return PrlFictitiousMultipleb( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_fictitious_rp.py b/Python/hbayesdm/models/_prl_fictitious_rp.py index 99610f17..06524a50 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp.py @@ -51,9 +51,7 @@ def __init__(self, **kwargs): def prl_fictitious_rp( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -229,8 +227,6 @@ def prl_fictitious_rp( print_fit(output) """ return PrlFictitiousRp( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py index 945fbce3..32934584 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py @@ -49,9 +49,7 @@ def __init__(self, **kwargs): def prl_fictitious_rp_woa( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -227,8 +225,6 @@ def prl_fictitious_rp_woa( print_fit(output) """ return PrlFictitiousRpWoa( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_fictitious_woa.py b/Python/hbayesdm/models/_prl_fictitious_woa.py index 4150ae91..432ead92 100644 --- a/Python/hbayesdm/models/_prl_fictitious_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_woa.py @@ -47,9 +47,7 @@ def __init__(self, **kwargs): def prl_fictitious_woa( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -224,8 +222,6 @@ def prl_fictitious_woa( print_fit(output) """ return PrlFictitiousWoa( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_rp.py b/Python/hbayesdm/models/_prl_rp.py index a6d16305..52bdff84 100644 --- a/Python/hbayesdm/models/_prl_rp.py +++ b/Python/hbayesdm/models/_prl_rp.py @@ -47,9 +47,7 @@ def __init__(self, **kwargs): def prl_rp( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -224,8 +222,6 @@ def prl_rp( print_fit(output) """ return PrlRp( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_prl_rp_multipleB.py b/Python/hbayesdm/models/_prl_rp_multipleB.py index 987b2922..11b0a9fe 100644 --- a/Python/hbayesdm/models/_prl_rp_multipleB.py +++ b/Python/hbayesdm/models/_prl_rp_multipleB.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def prl_rp_multipleB( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -226,8 +224,6 @@ def prl_rp_multipleB( print_fit(output) """ return PrlRpMultipleb( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py index 3af97a4d..fd18b510 100644 --- a/Python/hbayesdm/models/_pst_gainloss_Q.py +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -46,9 +46,7 @@ def __init__(self, **kwargs): def pst_gainloss_Q( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -223,8 +221,6 @@ def pst_gainloss_Q( print_fit(output) """ return PstGainlossQ( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py index d7554bcf..f1b6599b 100644 --- a/Python/hbayesdm/models/_ra_noLA.py +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -45,9 +45,7 @@ def __init__(self, **kwargs): def ra_noLA( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -223,8 +221,6 @@ def ra_noLA( print_fit(output) """ return RaNola( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py index 8b611445..d286bf1c 100644 --- a/Python/hbayesdm/models/_ra_noRA.py +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -45,9 +45,7 @@ def __init__(self, **kwargs): def ra_noRA( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -223,8 +221,6 @@ def ra_noRA( print_fit(output) """ return RaNora( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py index 7bf1f93b..80a6f155 100644 --- a/Python/hbayesdm/models/_ra_prospect.py +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -47,9 +47,7 @@ def __init__(self, **kwargs): def ra_prospect( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -225,8 +223,6 @@ def ra_prospect( print_fit(output) """ return RaProspect( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_rdt_happiness.py b/Python/hbayesdm/models/_rdt_happiness.py index ad75ecb1..c25d71f9 100644 --- a/Python/hbayesdm/models/_rdt_happiness.py +++ b/Python/hbayesdm/models/_rdt_happiness.py @@ -57,9 +57,7 @@ def __init__(self, **kwargs): def rdt_happiness( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -239,8 +237,6 @@ def rdt_happiness( print_fit(output) """ return RdtHappiness( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ts_par4.py b/Python/hbayesdm/models/_ts_par4.py index ea8c59f6..2edf465b 100644 --- a/Python/hbayesdm/models/_ts_par4.py +++ b/Python/hbayesdm/models/_ts_par4.py @@ -48,9 +48,7 @@ def __init__(self, **kwargs): def ts_par4( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -228,8 +226,6 @@ def ts_par4( print_fit(output) """ return TsPar4( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ts_par6.py b/Python/hbayesdm/models/_ts_par6.py index a2106171..8b2fdf48 100644 --- a/Python/hbayesdm/models/_ts_par6.py +++ b/Python/hbayesdm/models/_ts_par6.py @@ -52,9 +52,7 @@ def __init__(self, **kwargs): def ts_par6( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -232,8 +230,6 @@ def ts_par6( print_fit(output) """ return TsPar6( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ts_par7.py b/Python/hbayesdm/models/_ts_par7.py index 88c91705..eaa60394 100644 --- a/Python/hbayesdm/models/_ts_par7.py +++ b/Python/hbayesdm/models/_ts_par7.py @@ -54,9 +54,7 @@ def __init__(self, **kwargs): def ts_par7( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -234,8 +232,6 @@ def ts_par7( print_fit(output) """ return TsPar7( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ug_bayes.py b/Python/hbayesdm/models/_ug_bayes.py index d61250ad..e821a68c 100644 --- a/Python/hbayesdm/models/_ug_bayes.py +++ b/Python/hbayesdm/models/_ug_bayes.py @@ -45,9 +45,7 @@ def __init__(self, **kwargs): def ug_bayes( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -221,8 +219,6 @@ def ug_bayes( print_fit(output) """ return UgBayes( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_ug_delta.py b/Python/hbayesdm/models/_ug_delta.py index d2f7e697..20a16504 100644 --- a/Python/hbayesdm/models/_ug_delta.py +++ b/Python/hbayesdm/models/_ug_delta.py @@ -45,9 +45,7 @@ def __init__(self, **kwargs): def ug_delta( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -221,8 +219,6 @@ def ug_delta( print_fit(output) """ return UgDelta( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/hbayesdm/models/_wcs_sql.py b/Python/hbayesdm/models/_wcs_sql.py index fd662a48..f462148b 100644 --- a/Python/hbayesdm/models/_wcs_sql.py +++ b/Python/hbayesdm/models/_wcs_sql.py @@ -45,9 +45,7 @@ def __init__(self, **kwargs): def wcs_sql( - example: bool = False, - datafile: str = None, - data: pd.DataFrame = None, + data: Union[pd.DataFrame, str, None] = None, niter: int = 4000, nwarmup: int = 1000, nchain: int = 4, @@ -221,8 +219,6 @@ def wcs_sql( print_fit(output) """ return WcsSql( - example=example, - datafile=datafile, data=data, niter=niter, nwarmup=nwarmup, diff --git a/Python/tests/test_bandit2arm_delta.py b/Python/tests/test_bandit2arm_delta.py index 4be4dafa..bd2ab625 100644 --- a/Python/tests/test_bandit2arm_delta.py +++ b/Python/tests/test_bandit2arm_delta.py @@ -5,7 +5,7 @@ def test_bandit2arm_delta(): _ = bandit2arm_delta( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm2_kalman_filter.py b/Python/tests/test_bandit4arm2_kalman_filter.py index f6ea30be..7f9b550b 100644 --- a/Python/tests/test_bandit4arm2_kalman_filter.py +++ b/Python/tests/test_bandit4arm2_kalman_filter.py @@ -5,7 +5,7 @@ def test_bandit4arm2_kalman_filter(): _ = bandit4arm2_kalman_filter( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_2par_lapse.py b/Python/tests/test_bandit4arm_2par_lapse.py index 8a1a235e..d801d436 100644 --- a/Python/tests/test_bandit4arm_2par_lapse.py +++ b/Python/tests/test_bandit4arm_2par_lapse.py @@ -5,7 +5,7 @@ def test_bandit4arm_2par_lapse(): _ = bandit4arm_2par_lapse( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_4par.py b/Python/tests/test_bandit4arm_4par.py index f8307fd8..53e75bb1 100644 --- a/Python/tests/test_bandit4arm_4par.py +++ b/Python/tests/test_bandit4arm_4par.py @@ -5,7 +5,7 @@ def test_bandit4arm_4par(): _ = bandit4arm_4par( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_lapse.py b/Python/tests/test_bandit4arm_lapse.py index 56fde745..c9ae7b0c 100644 --- a/Python/tests/test_bandit4arm_lapse.py +++ b/Python/tests/test_bandit4arm_lapse.py @@ -5,7 +5,7 @@ def test_bandit4arm_lapse(): _ = bandit4arm_lapse( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_lapse_decay.py b/Python/tests/test_bandit4arm_lapse_decay.py index 71b80d5d..8dbe349c 100644 --- a/Python/tests/test_bandit4arm_lapse_decay.py +++ b/Python/tests/test_bandit4arm_lapse_decay.py @@ -5,7 +5,7 @@ def test_bandit4arm_lapse_decay(): _ = bandit4arm_lapse_decay( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bandit4arm_singleA_lapse.py b/Python/tests/test_bandit4arm_singleA_lapse.py index 0245d532..37f161c5 100644 --- a/Python/tests/test_bandit4arm_singleA_lapse.py +++ b/Python/tests/test_bandit4arm_singleA_lapse.py @@ -5,7 +5,7 @@ def test_bandit4arm_singleA_lapse(): _ = bandit4arm_singleA_lapse( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_bart_par4.py b/Python/tests/test_bart_par4.py index a9aa1ea9..8f3b4990 100644 --- a/Python/tests/test_bart_par4.py +++ b/Python/tests/test_bart_par4.py @@ -5,7 +5,7 @@ def test_bart_par4(): _ = bart_par4( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_choiceRT_ddm.py b/Python/tests/test_choiceRT_ddm.py index 40aa6376..30d71351 100644 --- a/Python/tests/test_choiceRT_ddm.py +++ b/Python/tests/test_choiceRT_ddm.py @@ -5,7 +5,7 @@ def test_choiceRT_ddm(): _ = choiceRT_ddm( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_choiceRT_ddm_single.py b/Python/tests/test_choiceRT_ddm_single.py index 6330f26b..68cd4a3e 100644 --- a/Python/tests/test_choiceRT_ddm_single.py +++ b/Python/tests/test_choiceRT_ddm_single.py @@ -5,7 +5,7 @@ def test_choiceRT_ddm_single(): _ = choiceRT_ddm_single( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_cra_exp.py b/Python/tests/test_cra_exp.py index 79557cb5..a743e03a 100644 --- a/Python/tests/test_cra_exp.py +++ b/Python/tests/test_cra_exp.py @@ -5,7 +5,7 @@ def test_cra_exp(): _ = cra_exp( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_cra_linear.py b/Python/tests/test_cra_linear.py index 191cb199..3d94816b 100644 --- a/Python/tests/test_cra_linear.py +++ b/Python/tests/test_cra_linear.py @@ -5,7 +5,7 @@ def test_cra_linear(): _ = cra_linear( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dbdm_prob_weight.py b/Python/tests/test_dbdm_prob_weight.py index 084e159a..24e16270 100644 --- a/Python/tests/test_dbdm_prob_weight.py +++ b/Python/tests/test_dbdm_prob_weight.py @@ -5,7 +5,7 @@ def test_dbdm_prob_weight(): _ = dbdm_prob_weight( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_cs.py b/Python/tests/test_dd_cs.py index 5d897973..117a6000 100644 --- a/Python/tests/test_dd_cs.py +++ b/Python/tests/test_dd_cs.py @@ -5,7 +5,7 @@ def test_dd_cs(): _ = dd_cs( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_cs_single.py b/Python/tests/test_dd_cs_single.py index 0882f866..d5a824d2 100644 --- a/Python/tests/test_dd_cs_single.py +++ b/Python/tests/test_dd_cs_single.py @@ -5,7 +5,7 @@ def test_dd_cs_single(): _ = dd_cs_single( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_exp.py b/Python/tests/test_dd_exp.py index 4591b7ba..a573f127 100644 --- a/Python/tests/test_dd_exp.py +++ b/Python/tests/test_dd_exp.py @@ -5,7 +5,7 @@ def test_dd_exp(): _ = dd_exp( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_hyperbolic.py b/Python/tests/test_dd_hyperbolic.py index 129a1310..1df3f739 100644 --- a/Python/tests/test_dd_hyperbolic.py +++ b/Python/tests/test_dd_hyperbolic.py @@ -5,7 +5,7 @@ def test_dd_hyperbolic(): _ = dd_hyperbolic( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_dd_hyperbolic_single.py b/Python/tests/test_dd_hyperbolic_single.py index a22eb2fa..5f2fd6ad 100644 --- a/Python/tests/test_dd_hyperbolic_single.py +++ b/Python/tests/test_dd_hyperbolic_single.py @@ -5,7 +5,7 @@ def test_dd_hyperbolic_single(): _ = dd_hyperbolic_single( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m1.py b/Python/tests/test_gng_m1.py index 239ade8f..002c806e 100644 --- a/Python/tests/test_gng_m1.py +++ b/Python/tests/test_gng_m1.py @@ -5,7 +5,7 @@ def test_gng_m1(): _ = gng_m1( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m2.py b/Python/tests/test_gng_m2.py index 82beb979..930574de 100644 --- a/Python/tests/test_gng_m2.py +++ b/Python/tests/test_gng_m2.py @@ -5,7 +5,7 @@ def test_gng_m2(): _ = gng_m2( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m3.py b/Python/tests/test_gng_m3.py index 24947f57..3f839fad 100644 --- a/Python/tests/test_gng_m3.py +++ b/Python/tests/test_gng_m3.py @@ -5,7 +5,7 @@ def test_gng_m3(): _ = gng_m3( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_gng_m4.py b/Python/tests/test_gng_m4.py index 5157cd91..101a4797 100644 --- a/Python/tests/test_gng_m4.py +++ b/Python/tests/test_gng_m4.py @@ -5,7 +5,7 @@ def test_gng_m4(): _ = gng_m4( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_orl.py b/Python/tests/test_igt_orl.py index fdbec617..b3519454 100644 --- a/Python/tests/test_igt_orl.py +++ b/Python/tests/test_igt_orl.py @@ -5,7 +5,7 @@ def test_igt_orl(): _ = igt_orl( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_pvl_decay.py b/Python/tests/test_igt_pvl_decay.py index dbd575f7..b3c4cfea 100644 --- a/Python/tests/test_igt_pvl_decay.py +++ b/Python/tests/test_igt_pvl_decay.py @@ -5,7 +5,7 @@ def test_igt_pvl_decay(): _ = igt_pvl_decay( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_pvl_delta.py b/Python/tests/test_igt_pvl_delta.py index 25785e8b..a3e27d81 100644 --- a/Python/tests/test_igt_pvl_delta.py +++ b/Python/tests/test_igt_pvl_delta.py @@ -5,7 +5,7 @@ def test_igt_pvl_delta(): _ = igt_pvl_delta( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_igt_vpp.py b/Python/tests/test_igt_vpp.py index 54e55b21..98e7b879 100644 --- a/Python/tests/test_igt_vpp.py +++ b/Python/tests/test_igt_vpp.py @@ -5,7 +5,7 @@ def test_igt_vpp(): _ = igt_vpp( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_peer_ocu.py b/Python/tests/test_peer_ocu.py index 71fcede3..2f24fc1e 100644 --- a/Python/tests/test_peer_ocu.py +++ b/Python/tests/test_peer_ocu.py @@ -5,7 +5,7 @@ def test_peer_ocu(): _ = peer_ocu( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_ewa.py b/Python/tests/test_prl_ewa.py index e75974dc..bea31edd 100644 --- a/Python/tests/test_prl_ewa.py +++ b/Python/tests/test_prl_ewa.py @@ -5,7 +5,7 @@ def test_prl_ewa(): _ = prl_ewa( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious.py b/Python/tests/test_prl_fictitious.py index 46d456e5..fa0c5c4d 100644 --- a/Python/tests/test_prl_fictitious.py +++ b/Python/tests/test_prl_fictitious.py @@ -5,7 +5,7 @@ def test_prl_fictitious(): _ = prl_fictitious( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_multipleB.py b/Python/tests/test_prl_fictitious_multipleB.py index b5f8eee6..5aa4f21e 100644 --- a/Python/tests/test_prl_fictitious_multipleB.py +++ b/Python/tests/test_prl_fictitious_multipleB.py @@ -5,7 +5,7 @@ def test_prl_fictitious_multipleB(): _ = prl_fictitious_multipleB( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_rp.py b/Python/tests/test_prl_fictitious_rp.py index e4d61e9c..81a007bd 100644 --- a/Python/tests/test_prl_fictitious_rp.py +++ b/Python/tests/test_prl_fictitious_rp.py @@ -5,7 +5,7 @@ def test_prl_fictitious_rp(): _ = prl_fictitious_rp( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_rp_woa.py b/Python/tests/test_prl_fictitious_rp_woa.py index 72192f62..36a48147 100644 --- a/Python/tests/test_prl_fictitious_rp_woa.py +++ b/Python/tests/test_prl_fictitious_rp_woa.py @@ -5,7 +5,7 @@ def test_prl_fictitious_rp_woa(): _ = prl_fictitious_rp_woa( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_fictitious_woa.py b/Python/tests/test_prl_fictitious_woa.py index 34c5605e..e356f054 100644 --- a/Python/tests/test_prl_fictitious_woa.py +++ b/Python/tests/test_prl_fictitious_woa.py @@ -5,7 +5,7 @@ def test_prl_fictitious_woa(): _ = prl_fictitious_woa( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_rp.py b/Python/tests/test_prl_rp.py index fdc80ca8..28a80745 100644 --- a/Python/tests/test_prl_rp.py +++ b/Python/tests/test_prl_rp.py @@ -5,7 +5,7 @@ def test_prl_rp(): _ = prl_rp( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_prl_rp_multipleB.py b/Python/tests/test_prl_rp_multipleB.py index 880c554d..8304d50b 100644 --- a/Python/tests/test_prl_rp_multipleB.py +++ b/Python/tests/test_prl_rp_multipleB.py @@ -5,7 +5,7 @@ def test_prl_rp_multipleB(): _ = prl_rp_multipleB( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_pst_gainloss_Q.py b/Python/tests/test_pst_gainloss_Q.py index 6be734d4..26a771d9 100644 --- a/Python/tests/test_pst_gainloss_Q.py +++ b/Python/tests/test_pst_gainloss_Q.py @@ -5,7 +5,7 @@ def test_pst_gainloss_Q(): _ = pst_gainloss_Q( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ra_noLA.py b/Python/tests/test_ra_noLA.py index 776a0ff7..67c61282 100644 --- a/Python/tests/test_ra_noLA.py +++ b/Python/tests/test_ra_noLA.py @@ -5,7 +5,7 @@ def test_ra_noLA(): _ = ra_noLA( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ra_noRA.py b/Python/tests/test_ra_noRA.py index 14e43a94..2544ad28 100644 --- a/Python/tests/test_ra_noRA.py +++ b/Python/tests/test_ra_noRA.py @@ -5,7 +5,7 @@ def test_ra_noRA(): _ = ra_noRA( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ra_prospect.py b/Python/tests/test_ra_prospect.py index 779386f0..a88c5205 100644 --- a/Python/tests/test_ra_prospect.py +++ b/Python/tests/test_ra_prospect.py @@ -5,7 +5,7 @@ def test_ra_prospect(): _ = ra_prospect( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_rdt_happiness.py b/Python/tests/test_rdt_happiness.py index 9a0a1d4c..034b2eb3 100644 --- a/Python/tests/test_rdt_happiness.py +++ b/Python/tests/test_rdt_happiness.py @@ -5,7 +5,7 @@ def test_rdt_happiness(): _ = rdt_happiness( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ts_par4.py b/Python/tests/test_ts_par4.py index f6e1b7c8..ecf03723 100644 --- a/Python/tests/test_ts_par4.py +++ b/Python/tests/test_ts_par4.py @@ -5,7 +5,7 @@ def test_ts_par4(): _ = ts_par4( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ts_par6.py b/Python/tests/test_ts_par6.py index e293d92b..617e454a 100644 --- a/Python/tests/test_ts_par6.py +++ b/Python/tests/test_ts_par6.py @@ -5,7 +5,7 @@ def test_ts_par6(): _ = ts_par6( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ts_par7.py b/Python/tests/test_ts_par7.py index 69c0880c..e884a1d2 100644 --- a/Python/tests/test_ts_par7.py +++ b/Python/tests/test_ts_par7.py @@ -5,7 +5,7 @@ def test_ts_par7(): _ = ts_par7( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ug_bayes.py b/Python/tests/test_ug_bayes.py index bbffb4d3..cd4119c2 100644 --- a/Python/tests/test_ug_bayes.py +++ b/Python/tests/test_ug_bayes.py @@ -5,7 +5,7 @@ def test_ug_bayes(): _ = ug_bayes( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_ug_delta.py b/Python/tests/test_ug_delta.py index fb2ed0c8..5cf348f2 100644 --- a/Python/tests/test_ug_delta.py +++ b/Python/tests/test_ug_delta.py @@ -5,7 +5,7 @@ def test_ug_delta(): _ = ug_delta( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/Python/tests/test_wcs_sql.py b/Python/tests/test_wcs_sql.py index 6753eba3..ee5417c5 100644 --- a/Python/tests/test_wcs_sql.py +++ b/Python/tests/test_wcs_sql.py @@ -5,7 +5,7 @@ def test_wcs_sql(): _ = wcs_sql( - example=True, niter=10, nwarmup=5, nchain=1, ncore=1) + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) if __name__ == '__main__': diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 7ea47691..669ed66e 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -114,7 +114,6 @@ hBayesDM_model <- function(task_name, # The resulting hBayesDM model function to be returned function(data = NULL, - datafile = "", niter = 4000, nwarmup = 1000, nchain = 4, @@ -128,8 +127,6 @@ hBayesDM_model <- function(task_name, adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, - use_example = FALSE, - choose_data = FALSE, ...) { ############### Stop checks ############### @@ -144,21 +141,28 @@ hBayesDM_model <- function(task_name, stop("** Posterior predictions are not yet available for this model. **\n") } - if (!is.null(data)) { + if (is.null(data) || is.na(data) || data == "") { + stop("Invalid input for the 'data' value. ", + "You should pass a data.frame, or a filepath for a data file,", + "\"example\" for an example dataset, ", + "or \"choose\" to choose it in a prompt.") + + } else if ("data.frame" %in% class(data)) { # Use the given data object raw_data <- data.table::as.data.table(data) - } else { - if (!is.na(datafile) && datafile != '') { - # Use the datafile to read data - } else if (use_example) { - if (model_type == "") { - exampleData <- paste0(task_name, "_", "exampleData.txt") - } else { - exampleData <- paste0(task_name, "_", model_type, "_", "exampleData.txt") - } - datafile <- system.file("extdata", exampleData, package = "hBayesDM") - } else if (choose_data) { + + } else if ("character" %in% class(data)) { + # Set + if (data == "example") { + example_data <- + ifelse(model_type == "", + paste0(task_name, "_", "exampleData.txt"), + paste0(task_name, "_", model_type, "_", "exampleData.txt")) + datafile <- system.file("extdata", example_data, package = "hBayesDM") + } else if (data == "choose") { datafile <- file.choose() + } else { + datafile <- data } # Check if data file exists @@ -172,6 +176,12 @@ hBayesDM_model <- function(task_name, fill = TRUE, stringsAsFactors = TRUE, logical01 = FALSE) # NOTE: Separator is fixed to "\t" because fread() has trouble reading space delimited files # that have missing values. + + } else { + stop("Invalid input for the 'data' value. ", + "You should pass a data.frame, or a filepath for a data file,", + "\"example\" for an example dataset, ", + "or \"choose\" to choose it in a prompt.") } # Save initial colnames of raw_data for later diff --git a/R/docs/reference/bandit2arm_delta.html b/R/docs/reference/bandit2arm_delta.html index da32d282..149413a4 100644 --- a/R/docs/reference/bandit2arm_delta.html +++ b/R/docs/reference/bandit2arm_delta.html @@ -123,19 +123,21 @@

    Rescorla-Wagner (Delta) Model

    -
    bandit2arm_delta(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit2arm_delta(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -289,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit2arm_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit2arm_delta(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit2arm_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit2arm_delta(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm2_kalman_filter.html b/R/docs/reference/bandit4arm2_kalman_filter.html
    index fe42cab6..3f394817 100644
    --- a/R/docs/reference/bandit4arm2_kalman_filter.html
    +++ b/R/docs/reference/bandit4arm2_kalman_filter.html
    @@ -123,19 +123,21 @@ 

    Kalman Filter

    -
    bandit4arm2_kalman_filter(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit4arm2_kalman_filter(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -290,10 +283,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit4arm2_kalman_filter(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm2_kalman_filter(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm2_kalman_filter(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm2_kalman_filter(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_2par_lapse.html b/R/docs/reference/bandit4arm_2par_lapse.html
    index ede180aa..d6b730e9 100644
    --- a/R/docs/reference/bandit4arm_2par_lapse.html
    +++ b/R/docs/reference/bandit4arm_2par_lapse.html
    @@ -123,19 +123,21 @@ 

    3 Parameter Model, without C (choice perseveration), R (reward sensitivity), -
    bandit4arm_2par_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit4arm_2par_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -289,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit4arm_2par_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_2par_lapse(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_2par_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_2par_lapse(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_4par.html b/R/docs/reference/bandit4arm_4par.html
    index 98c401a8..8284c8a5 100644
    --- a/R/docs/reference/bandit4arm_4par.html
    +++ b/R/docs/reference/bandit4arm_4par.html
    @@ -123,19 +123,21 @@ 

    4 Parameter Model, without C (choice perseveration)

    -
    bandit4arm_4par(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit4arm_4par(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -289,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit4arm_4par(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_4par(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_4par(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_4par(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_lapse.html b/R/docs/reference/bandit4arm_lapse.html
    index e25fc19f..fb52bd32 100644
    --- a/R/docs/reference/bandit4arm_lapse.html
    +++ b/R/docs/reference/bandit4arm_lapse.html
    @@ -123,19 +123,21 @@ 

    5 Parameter Model, without C (choice perseveration) but with xi (noise)

    -
    bandit4arm_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit4arm_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -289,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit4arm_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_lapse_decay.html b/R/docs/reference/bandit4arm_lapse_decay.html
    index 3a06139a..be2afec1 100644
    --- a/R/docs/reference/bandit4arm_lapse_decay.html
    +++ b/R/docs/reference/bandit4arm_lapse_decay.html
    @@ -123,19 +123,21 @@ 

    5 Parameter Model, without C (choice perseveration) but with xi (noise). Add -
    bandit4arm_lapse_decay(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit4arm_lapse_decay(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -289,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit4arm_lapse_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse_decay(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_lapse_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse_decay(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bandit4arm_singleA_lapse.html b/R/docs/reference/bandit4arm_singleA_lapse.html
    index 11394abb..aa92fa48 100644
    --- a/R/docs/reference/bandit4arm_singleA_lapse.html
    +++ b/R/docs/reference/bandit4arm_singleA_lapse.html
    @@ -123,19 +123,21 @@ 

    4 Parameter Model, without C (choice perseveration) but with xi (noise). Sin -
    bandit4arm_singleA_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    bandit4arm_singleA_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -289,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bandit4arm_singleA_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_singleA_lapse(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_singleA_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_singleA_lapse(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/bart_par4.html b/R/docs/reference/bart_par4.html
    index f9756dfc..399edfeb 100644
    --- a/R/docs/reference/bart_par4.html
    +++ b/R/docs/reference/bart_par4.html
    @@ -123,19 +123,20 @@ 

    Re-parameterized version of BART model with 4 parameters

    -
    bart_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    bart_par4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- bart_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bart_par4(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bart_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bart_par4(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/choiceRT_ddm.html b/R/docs/reference/choiceRT_ddm.html
    index 57b1b766..43e2383e 100644
    --- a/R/docs/reference/choiceRT_ddm.html
    +++ b/R/docs/reference/choiceRT_ddm.html
    @@ -123,19 +123,20 @@ 

    Drift Diffusion Model

    -
    choiceRT_ddm(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    choiceRT_ddm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "pumps", "explosion". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -304,10 +296,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- choiceRT_ddm(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- choiceRT_ddm(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/choiceRT_ddm_single.html b/R/docs/reference/choiceRT_ddm_single.html
    index cc5de20c..c26bb481 100644
    --- a/R/docs/reference/choiceRT_ddm_single.html
    +++ b/R/docs/reference/choiceRT_ddm_single.html
    @@ -123,19 +123,21 @@ 

    Drift Diffusion Model

    -
    choiceRT_ddm_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    choiceRT_ddm_single(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "RT". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -304,10 +297,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- choiceRT_ddm_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm_single(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- choiceRT_ddm_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm_single(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/cra_exp.html b/R/docs/reference/cra_exp.html
    index 3b6fbba1..7ce45d56 100644
    --- a/R/docs/reference/cra_exp.html
    +++ b/R/docs/reference/cra_exp.html
    @@ -123,19 +123,20 @@ 

    Exponential Subjective Value Model

    -
    cra_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    cra_exp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "RT". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -293,10 +285,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- cra_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_exp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- cra_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_exp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/cra_linear.html b/R/docs/reference/cra_linear.html
    index 3eae6ec4..c491ea34 100644
    --- a/R/docs/reference/cra_linear.html
    +++ b/R/docs/reference/cra_linear.html
    @@ -123,19 +123,20 @@ 

    Linear Subjective Value Model

    -
    cra_linear(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    cra_linear(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -293,10 +285,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- cra_linear(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_linear(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- cra_linear(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_linear(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dbdm_prob_weight.html b/R/docs/reference/dbdm_prob_weight.html
    index ba73d260..9666bf72 100644
    --- a/R/docs/reference/dbdm_prob_weight.html
    +++ b/R/docs/reference/dbdm_prob_weight.html
    @@ -123,19 +123,21 @@ 

    Probability Weight Function

    -
    dbdm_prob_weight(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    dbdm_prob_weight(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -297,10 +290,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- dbdm_prob_weight(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dbdm_prob_weight(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dbdm_prob_weight(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dbdm_prob_weight(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_cs.html b/R/docs/reference/dd_cs.html
    index 059668b8..cf8e80fd 100644
    --- a/R/docs/reference/dd_cs.html
    +++ b/R/docs/reference/dd_cs.html
    @@ -123,19 +123,20 @@ 

    Constant-Sensitivity (CS) Model

    -
    dd_cs(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    dd_cs(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -291,10 +283,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- dd_cs(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_cs(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_cs_single.html b/R/docs/reference/dd_cs_single.html
    index 6bbe5686..285dec0b 100644
    --- a/R/docs/reference/dd_cs_single.html
    +++ b/R/docs/reference/dd_cs_single.html
    @@ -123,19 +123,20 @@ 

    Constant-Sensitivity (CS) Model

    -
    dd_cs_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    dd_cs_single(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -291,10 +283,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- dd_cs_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs_single(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_cs_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs_single(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_exp.html b/R/docs/reference/dd_exp.html
    index bdf625a1..3d357bee 100644
    --- a/R/docs/reference/dd_exp.html
    +++ b/R/docs/reference/dd_exp.html
    @@ -123,19 +123,20 @@ 

    Exponential Model

    -
    dd_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    dd_exp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -291,10 +283,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- dd_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_exp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_exp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_hyperbolic.html b/R/docs/reference/dd_hyperbolic.html
    index 873acce7..52627905 100644
    --- a/R/docs/reference/dd_hyperbolic.html
    +++ b/R/docs/reference/dd_hyperbolic.html
    @@ -123,19 +123,21 @@ 

    Hyperbolic Model

    -
    dd_hyperbolic(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    dd_hyperbolic(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- dd_hyperbolic(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_hyperbolic(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/dd_hyperbolic_single.html b/R/docs/reference/dd_hyperbolic_single.html
    index 2f2a4453..8bb9db43 100644
    --- a/R/docs/reference/dd_hyperbolic_single.html
    +++ b/R/docs/reference/dd_hyperbolic_single.html
    @@ -123,19 +123,21 @@ 

    Hyperbolic Model

    -
    dd_hyperbolic_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    dd_hyperbolic_single(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- dd_hyperbolic_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic_single(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_hyperbolic_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic_single(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m1.html b/R/docs/reference/gng_m1.html
    index 1ffa9903..e6e0264e 100644
    --- a/R/docs/reference/gng_m1.html
    +++ b/R/docs/reference/gng_m1.html
    @@ -123,19 +123,20 @@ 

    RW + noise

    -
    gng_m1(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    gng_m1(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -289,10 +281,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- gng_m1(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m1(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m1(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m1(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m2.html b/R/docs/reference/gng_m2.html
    index 28679698..1066b42b 100644
    --- a/R/docs/reference/gng_m2.html
    +++ b/R/docs/reference/gng_m2.html
    @@ -123,19 +123,20 @@ 

    RW + noise + bias

    -
    gng_m2(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    gng_m2(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -289,10 +281,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- gng_m2(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m2(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m2(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m2(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m3.html b/R/docs/reference/gng_m3.html
    index 28ecb899..a269287b 100644
    --- a/R/docs/reference/gng_m3.html
    +++ b/R/docs/reference/gng_m3.html
    @@ -123,19 +123,20 @@ 

    RW + noise + bias + pi

    -
    gng_m3(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    gng_m3(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -289,10 +281,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- gng_m3(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m3(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m3(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m3(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/gng_m4.html b/R/docs/reference/gng_m4.html
    index 00475968..3ef5da22 100644
    --- a/R/docs/reference/gng_m4.html
    +++ b/R/docs/reference/gng_m4.html
    @@ -123,19 +123,20 @@ 

    RW (rew/pun) + noise + bias + pi

    -
    gng_m4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    gng_m4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -289,10 +281,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- gng_m4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m4(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m4(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_orl.html b/R/docs/reference/igt_orl.html
    index f8acacad..bf19b132 100644
    --- a/R/docs/reference/igt_orl.html
    +++ b/R/docs/reference/igt_orl.html
    @@ -123,19 +123,20 @@ 

    Outcome-Representation Learning Model

    -
    igt_orl(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    igt_orl(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -302,10 +294,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- igt_orl(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_orl(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_orl(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_orl(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_pvl_decay.html b/R/docs/reference/igt_pvl_decay.html
    index 2a0b7572..a0ea74c6 100644
    --- a/R/docs/reference/igt_pvl_decay.html
    +++ b/R/docs/reference/igt_pvl_decay.html
    @@ -123,19 +123,21 @@ 

    Prospect Valence Learning (PVL) Decay-RI

    -
    igt_pvl_decay(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    igt_pvl_decay(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -300,10 +293,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- igt_pvl_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_decay(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_pvl_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_decay(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_pvl_delta.html b/R/docs/reference/igt_pvl_delta.html
    index b2feba4e..3f674958 100644
    --- a/R/docs/reference/igt_pvl_delta.html
    +++ b/R/docs/reference/igt_pvl_delta.html
    @@ -123,19 +123,21 @@ 

    Prospect Valence Learning (PVL) Delta

    -
    igt_pvl_delta(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    igt_pvl_delta(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -300,10 +293,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- igt_pvl_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_delta(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_pvl_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_delta(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/igt_vpp.html b/R/docs/reference/igt_vpp.html
    index f6f66f90..b576e9c3 100644
    --- a/R/docs/reference/igt_vpp.html
    +++ b/R/docs/reference/igt_vpp.html
    @@ -123,19 +123,20 @@ 

    Value-Plus-Perseverance

    -
    igt_vpp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    igt_vpp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -300,10 +292,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- igt_vpp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_vpp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_vpp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_vpp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/peer_ocu.html b/R/docs/reference/peer_ocu.html
    index 72deef93..a2aa70fc 100644
    --- a/R/docs/reference/peer_ocu.html
    +++ b/R/docs/reference/peer_ocu.html
    @@ -123,19 +123,20 @@ 

    Other-Conferred Utility (OCU) Model

    -
    peer_ocu(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    peer_ocu(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "gain", "loss". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -295,10 +287,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- peer_ocu(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- peer_ocu(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- peer_ocu(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- peer_ocu(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_ewa.html b/R/docs/reference/prl_ewa.html
    index 2649eb21..c69228aa 100644
    --- a/R/docs/reference/prl_ewa.html
    +++ b/R/docs/reference/prl_ewa.html
    @@ -123,19 +123,20 @@ 

    Experience-Weighted Attraction Model

    -
    prl_ewa(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    prl_ewa(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_ewa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_ewa(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_ewa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_ewa(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious.html b/R/docs/reference/prl_fictitious.html
    index b84cba8f..5b6b966e 100644
    --- a/R/docs/reference/prl_fictitious.html
    +++ b/R/docs/reference/prl_fictitious.html
    @@ -123,19 +123,21 @@ 

    Fictitious Update Model

    -
    prl_fictitious(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    prl_fictitious(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -290,10 +283,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_fictitious(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_multipleB.html b/R/docs/reference/prl_fictitious_multipleB.html
    index cf536c6d..9c66060a 100644
    --- a/R/docs/reference/prl_fictitious_multipleB.html
    +++ b/R/docs/reference/prl_fictitious_multipleB.html
    @@ -123,19 +123,21 @@ 

    Fictitious Update Model

    -
    prl_fictitious_multipleB(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    prl_fictitious_multipleB(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_multipleB(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_multipleB(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_rp.html b/R/docs/reference/prl_fictitious_rp.html
    index 541130d6..05ac0746 100644
    --- a/R/docs/reference/prl_fictitious_rp.html
    +++ b/R/docs/reference/prl_fictitious_rp.html
    @@ -123,19 +123,21 @@ 

    Fictitious Update Model, with separate learning rates for positive and negat -
    prl_fictitious_rp(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    prl_fictitious_rp(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "block", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_rp_woa.html b/R/docs/reference/prl_fictitious_rp_woa.html
    index e085521e..9d60f809 100644
    --- a/R/docs/reference/prl_fictitious_rp_woa.html
    +++ b/R/docs/reference/prl_fictitious_rp_woa.html
    @@ -123,19 +123,21 @@ 

    Fictitious Update Model, with separate learning rates for positive and negat -
    prl_fictitious_rp_woa(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    prl_fictitious_rp_woa(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_rp_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp_woa(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_rp_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp_woa(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_fictitious_woa.html b/R/docs/reference/prl_fictitious_woa.html
    index 55e8e93f..0a99b75d 100644
    --- a/R/docs/reference/prl_fictitious_woa.html
    +++ b/R/docs/reference/prl_fictitious_woa.html
    @@ -123,19 +123,21 @@ 

    Fictitious Update Model, without alpha (indecision point)

    -
    prl_fictitious_woa(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    prl_fictitious_woa(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -290,10 +283,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_woa(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_woa(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_rp.html b/R/docs/reference/prl_rp.html
    index e8cbd299..f17779dc 100644
    --- a/R/docs/reference/prl_rp.html
    +++ b/R/docs/reference/prl_rp.html
    @@ -123,19 +123,20 @@ 

    Reward-Punishment Model

    -
    prl_rp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    prl_rp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/prl_rp_multipleB.html b/R/docs/reference/prl_rp_multipleB.html
    index 2917599a..51a48789 100644
    --- a/R/docs/reference/prl_rp_multipleB.html
    +++ b/R/docs/reference/prl_rp_multipleB.html
    @@ -123,19 +123,21 @@ 

    Reward-Punishment Model

    -
    prl_rp_multipleB(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    prl_rp_multipleB(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- prl_rp_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp_multipleB(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_rp_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp_multipleB(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/pst_gainloss_Q.html b/R/docs/reference/pst_gainloss_Q.html
    index 841f2aa3..909aad90 100644
    --- a/R/docs/reference/pst_gainloss_Q.html
    +++ b/R/docs/reference/pst_gainloss_Q.html
    @@ -123,19 +123,21 @@ 

    Gain-Loss Q Learning Model

    -
    pst_gainloss_Q(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    pst_gainloss_Q(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "block", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -291,10 +284,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- pst_gainloss_Q(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- pst_gainloss_Q(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- pst_gainloss_Q(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- pst_gainloss_Q(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ra_noLA.html b/R/docs/reference/ra_noLA.html
    index 99fbef5a..bd24774a 100644
    --- a/R/docs/reference/ra_noLA.html
    +++ b/R/docs/reference/ra_noLA.html
    @@ -123,19 +123,20 @@ 

    Prospect Theory, without loss aversion (LA) parameter

    -
    ra_noLA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ra_noLA(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "type", "choice", "reward". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ra_noLA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noLA(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ra_noLA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noLA(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ra_noRA.html b/R/docs/reference/ra_noRA.html
    index 2ddb3c72..fe13541f 100644
    --- a/R/docs/reference/ra_noRA.html
    +++ b/R/docs/reference/ra_noRA.html
    @@ -123,19 +123,20 @@ 

    Prospect Theory, without risk aversion (RA) parameter

    -
    ra_noRA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ra_noRA(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ra_noRA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noRA(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ra_noRA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noRA(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ra_prospect.html b/R/docs/reference/ra_prospect.html
    index c29b35e0..c6c9427c 100644
    --- a/R/docs/reference/ra_prospect.html
    +++ b/R/docs/reference/ra_prospect.html
    @@ -123,19 +123,20 @@ 

    Prospect Theory

    -
    ra_prospect(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ra_prospect(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ra_prospect(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_prospect(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ra_prospect(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_prospect(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/rdt_happiness.html b/R/docs/reference/rdt_happiness.html
    index 69d0ef5a..0be764b8 100644
    --- a/R/docs/reference/rdt_happiness.html
    +++ b/R/docs/reference/rdt_happiness.html
    @@ -123,19 +123,21 @@ 

    Happiness Computational Model

    -
    rdt_happiness(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +    
    rdt_happiness(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    + max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +202,6 @@

    Arg

    - - - - - - - - @@ -296,10 +289,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- rdt_happiness(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- rdt_happiness(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- rdt_happiness(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- rdt_happiness(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ts_par4.html b/R/docs/reference/ts_par4.html
    index 23737565..fabfe5b7 100644
    --- a/R/docs/reference/ts_par4.html
    +++ b/R/docs/reference/ts_par4.html
    @@ -123,19 +123,20 @@ 

    Hybrid Model, with 4 parameters

    -
    ts_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ts_par4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -303,10 +295,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ts_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par4(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ts_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par4(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ts_par6.html b/R/docs/reference/ts_par6.html
    index 00eb5314..dbe9e3a3 100644
    --- a/R/docs/reference/ts_par6.html
    +++ b/R/docs/reference/ts_par6.html
    @@ -123,19 +123,20 @@ 

    Hybrid Model, with 6 parameters

    -
    ts_par6(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ts_par6(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -302,10 +294,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ts_par6(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par6(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ts_par6(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par6(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ts_par7.html b/R/docs/reference/ts_par7.html
    index 7c3331ed..19e8c5fc 100644
    --- a/R/docs/reference/ts_par7.html
    +++ b/R/docs/reference/ts_par7.html
    @@ -123,19 +123,20 @@ 

    Hybrid Model, with 7 parameters (original model)

    -
    ts_par7(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ts_par7(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -302,10 +294,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ts_par7(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par7(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ts_par7(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par7(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ug_bayes.html b/R/docs/reference/ug_bayes.html
    index 93c83662..43aaa99a 100644
    --- a/R/docs/reference/ug_bayes.html
    +++ b/R/docs/reference/ug_bayes.html
    @@ -123,19 +123,20 @@ 

    Ideal Observer Model

    -
    ug_bayes(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ug_bayes(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -288,10 +280,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ug_bayes(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_bayes(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ug_bayes(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_bayes(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/ug_delta.html b/R/docs/reference/ug_delta.html
    index e6c41212..5d7dbd26 100644
    --- a/R/docs/reference/ug_delta.html
    +++ b/R/docs/reference/ug_delta.html
    @@ -123,19 +123,20 @@ 

    Rescorla-Wagner (Delta) Model

    -
    ug_delta(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    ug_delta(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "offer", "accept". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -288,10 +280,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- ug_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_delta(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ug_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_delta(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/docs/reference/wcs_sql.html b/R/docs/reference/wcs_sql.html
    index 48c1a951..01e902f1 100644
    --- a/R/docs/reference/wcs_sql.html
    +++ b/R/docs/reference/wcs_sql.html
    @@ -123,19 +123,20 @@ 

    Sequential Learning Model

    -
    wcs_sql(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +
    wcs_sql(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)

    Arguments

    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "offer", "accept". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    - - + @@ -200,15 +201,6 @@

    Arg

    - - - - - - - - @@ -290,10 +282,12 @@

    See a

    Examples

    # NOT RUN {
     # Run the model with a given data.frame as df
    -output <- wcs_sql(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- wcs_sql(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- wcs_sql(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- wcs_sql(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man-roxygen/model-documentation.R b/R/man-roxygen/model-documentation.R
    index 8073fbd3..037da518 100644
    --- a/R/man-roxygen/model-documentation.R
    +++ b/R/man-roxygen/model-documentation.R
    @@ -9,12 +9,11 @@
     #'   \item \strong{Model}: <%= MODEL_NAME %> <%= ifelse(!is.na(MODEL_CITE), MODEL_CITE, '') %>
     #' }
     #'
    -#' @param data,datafile A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -#'   containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +#' @param data Data to be modeled. It should be given as a data.frame object,
    +#'   a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +#'   \code{"choose"} to choose data with an interactive window.
    +#'   Columns in the dataset must include:
     #'   <%= DATA_COLUMNS %>. See \bold{Details} below for more information.
    -#' @param use_example Whether to use example data. By default, set to \code{FALSE}.
    -#' @param choose_data Whether to choose data with an interactive window.
    -#'   By default, set to \code{FALSE}.
     #' @param niter Number of iterations, including warm-up. Defaults to 4000.
     #' @param nwarmup Number of iterations used for warm-up only. Defaults to 1000.
     #' @param nchain Number of Markov chains to run. Defaults to 4.
    @@ -142,10 +141,12 @@
     #' @examples
     #' \dontrun{
     #' # Run the model with a given data.frame as df
    -#' output <- <%= MODEL_FUNCTION %>(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +#' output <- <%= MODEL_FUNCTION %>(
    +#'   data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     #'
     #' # Run the model with example data
    -#' output <- <%= MODEL_FUNCTION %>(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +#' output <- <%= MODEL_FUNCTION %>(
    +#'   data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     #'
     #' # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     #' plot(output, type = "trace")
    diff --git a/R/man/bandit2arm_delta.Rd b/R/man/bandit2arm_delta.Rd
    index 54cd1cf6..06668db0 100644
    --- a/R/man/bandit2arm_delta.Rd
    +++ b/R/man/bandit2arm_delta.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit2arm_delta}
     \title{Rescorla-Wagner (Delta) Model}
     \usage{
    -bandit2arm_delta(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit2arm_delta(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the 2-Armed Bandit Task, there should be 3 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit2arm_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit2arm_delta(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit2arm_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit2arm_delta(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bandit4arm2_kalman_filter.Rd b/R/man/bandit4arm2_kalman_filter.Rd
    index 2d57e414..f20850fa 100644
    --- a/R/man/bandit4arm2_kalman_filter.Rd
    +++ b/R/man/bandit4arm2_kalman_filter.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit4arm2_kalman_filter}
     \title{Kalman Filter}
     \usage{
    -bandit4arm2_kalman_filter(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit4arm2_kalman_filter(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the 4-Armed Bandit Task (modified), there should be 3 columns of data with t
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit4arm2_kalman_filter(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm2_kalman_filter(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm2_kalman_filter(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm2_kalman_filter(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bandit4arm_2par_lapse.Rd b/R/man/bandit4arm_2par_lapse.Rd
    index 9d414aa0..3e31f967 100644
    --- a/R/man/bandit4arm_2par_lapse.Rd
    +++ b/R/man/bandit4arm_2par_lapse.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit4arm_2par_lapse}
     \title{3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)}
     \usage{
    -bandit4arm_2par_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit4arm_2par_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit4arm_2par_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_2par_lapse(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_2par_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_2par_lapse(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bandit4arm_4par.Rd b/R/man/bandit4arm_4par.Rd
    index 1575f987..701c616b 100644
    --- a/R/man/bandit4arm_4par.Rd
    +++ b/R/man/bandit4arm_4par.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit4arm_4par}
     \title{4 Parameter Model, without C (choice perseveration)}
     \usage{
    -bandit4arm_4par(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit4arm_4par(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit4arm_4par(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_4par(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_4par(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_4par(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bandit4arm_lapse.Rd b/R/man/bandit4arm_lapse.Rd
    index 7cbbcea2..b9afac79 100644
    --- a/R/man/bandit4arm_lapse.Rd
    +++ b/R/man/bandit4arm_lapse.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit4arm_lapse}
     \title{5 Parameter Model, without C (choice perseveration) but with xi (noise)}
     \usage{
    -bandit4arm_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit4arm_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit4arm_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bandit4arm_lapse_decay.Rd b/R/man/bandit4arm_lapse_decay.Rd
    index 47980565..6115602d 100644
    --- a/R/man/bandit4arm_lapse_decay.Rd
    +++ b/R/man/bandit4arm_lapse_decay.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit4arm_lapse_decay}
     \title{5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).}
     \usage{
    -bandit4arm_lapse_decay(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit4arm_lapse_decay(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit4arm_lapse_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse_decay(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_lapse_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_lapse_decay(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bandit4arm_singleA_lapse.Rd b/R/man/bandit4arm_singleA_lapse.Rd
    index 8aeac789..6fbe8a94 100644
    --- a/R/man/bandit4arm_singleA_lapse.Rd
    +++ b/R/man/bandit4arm_singleA_lapse.Rd
    @@ -4,15 +4,17 @@
     \alias{bandit4arm_singleA_lapse}
     \title{4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.}
     \usage{
    -bandit4arm_singleA_lapse(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +bandit4arm_singleA_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the 4-Armed Bandit Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bandit4arm_singleA_lapse(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_singleA_lapse(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bandit4arm_singleA_lapse(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bandit4arm_singleA_lapse(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/bart_par4.Rd b/R/man/bart_par4.Rd
    index 086f3548..8be17cb4 100644
    --- a/R/man/bart_par4.Rd
    +++ b/R/man/bart_par4.Rd
    @@ -4,15 +4,16 @@
     \alias{bart_par4}
     \title{Re-parameterized version of BART model with 4 parameters}
     \usage{
    -bart_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +bart_par4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "pumps", "explosion". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Balloon Analogue Risk Task, there should be 3 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- bart_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bart_par4(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- bart_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- bart_par4(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/choiceRT_ddm.Rd b/R/man/choiceRT_ddm.Rd
    index 306578eb..614fe42d 100644
    --- a/R/man/choiceRT_ddm.Rd
    +++ b/R/man/choiceRT_ddm.Rd
    @@ -4,15 +4,16 @@
     \alias{choiceRT_ddm}
     \title{Drift Diffusion Model}
     \usage{
    -choiceRT_ddm(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +choiceRT_ddm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "RT". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{RTbound}{Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).}
    @@ -157,10 +153,12 @@ Code for this model is based on codes/comments by Guido Biele, Joseph Burling, A
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- choiceRT_ddm(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- choiceRT_ddm(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/choiceRT_ddm_single.Rd b/R/man/choiceRT_ddm_single.Rd
    index a8f0e6d2..12610b97 100644
    --- a/R/man/choiceRT_ddm_single.Rd
    +++ b/R/man/choiceRT_ddm_single.Rd
    @@ -4,15 +4,17 @@
     \alias{choiceRT_ddm_single}
     \title{Drift Diffusion Model}
     \usage{
    -choiceRT_ddm_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +choiceRT_ddm_single(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "RT". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{RTbound}{Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).}
    @@ -157,10 +154,12 @@ Code for this model is based on codes/comments by Guido Biele, Joseph Burling, A
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- choiceRT_ddm_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm_single(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- choiceRT_ddm_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- choiceRT_ddm_single(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/cra_exp.Rd b/R/man/cra_exp.Rd
    index f4fe81c2..b8e10534 100644
    --- a/R/man/cra_exp.Rd
    +++ b/R/man/cra_exp.Rd
    @@ -4,15 +4,16 @@
     \alias{cra_exp}
     \title{Exponential Subjective Value Model}
     \usage{
    -cra_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +cra_exp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- cra_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_exp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- cra_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_exp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/cra_linear.Rd b/R/man/cra_linear.Rd
    index 9f8dbc7c..7fb9a904 100644
    --- a/R/man/cra_linear.Rd
    +++ b/R/man/cra_linear.Rd
    @@ -4,15 +4,16 @@
     \alias{cra_linear}
     \title{Linear Subjective Value Model}
     \usage{
    -cra_linear(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +cra_linear(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- cra_linear(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_linear(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- cra_linear(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- cra_linear(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/dbdm_prob_weight.Rd b/R/man/dbdm_prob_weight.Rd
    index 9a151a92..4c30128e 100644
    --- a/R/man/dbdm_prob_weight.Rd
    +++ b/R/man/dbdm_prob_weight.Rd
    @@ -4,15 +4,17 @@
     \alias{dbdm_prob_weight}
     \title{Probability Weight Function}
     \usage{
    -dbdm_prob_weight(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +dbdm_prob_weight(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Description Based Decison Making Task, there should be 8 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- dbdm_prob_weight(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dbdm_prob_weight(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dbdm_prob_weight(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dbdm_prob_weight(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/dd_cs.Rd b/R/man/dd_cs.Rd
    index 2048694c..eb36f904 100644
    --- a/R/man/dd_cs.Rd
    +++ b/R/man/dd_cs.Rd
    @@ -4,15 +4,16 @@
     \alias{dd_cs}
     \title{Constant-Sensitivity (CS) Model}
     \usage{
    -dd_cs(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +dd_cs(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- dd_cs(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_cs(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/dd_cs_single.Rd b/R/man/dd_cs_single.Rd
    index 4726f175..cacb8e42 100644
    --- a/R/man/dd_cs_single.Rd
    +++ b/R/man/dd_cs_single.Rd
    @@ -4,15 +4,16 @@
     \alias{dd_cs_single}
     \title{Constant-Sensitivity (CS) Model}
     \usage{
    -dd_cs_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +dd_cs_single(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- dd_cs_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs_single(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_cs_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_cs_single(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/dd_exp.Rd b/R/man/dd_exp.Rd
    index 740d963a..0ea44271 100644
    --- a/R/man/dd_exp.Rd
    +++ b/R/man/dd_exp.Rd
    @@ -4,15 +4,16 @@
     \alias{dd_exp}
     \title{Exponential Model}
     \usage{
    -dd_exp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +dd_exp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- dd_exp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_exp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_exp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_exp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/dd_hyperbolic.Rd b/R/man/dd_hyperbolic.Rd
    index 617187c4..c4b74707 100644
    --- a/R/man/dd_hyperbolic.Rd
    +++ b/R/man/dd_hyperbolic.Rd
    @@ -4,15 +4,17 @@
     \alias{dd_hyperbolic}
     \title{Hyperbolic Model}
     \usage{
    -dd_hyperbolic(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +dd_hyperbolic(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- dd_hyperbolic(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_hyperbolic(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/dd_hyperbolic_single.Rd b/R/man/dd_hyperbolic_single.Rd
    index aef3c683..d51fb26b 100644
    --- a/R/man/dd_hyperbolic_single.Rd
    +++ b/R/man/dd_hyperbolic_single.Rd
    @@ -4,15 +4,17 @@
     \alias{dd_hyperbolic_single}
     \title{Hyperbolic Model}
     \usage{
    -dd_hyperbolic_single(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +dd_hyperbolic_single(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +138,12 @@ For the Delay Discounting Task, there should be 6 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- dd_hyperbolic_single(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic_single(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- dd_hyperbolic_single(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- dd_hyperbolic_single(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/gng_m1.Rd b/R/man/gng_m1.Rd
    index 3ce5edfa..5732762f 100644
    --- a/R/man/gng_m1.Rd
    +++ b/R/man/gng_m1.Rd
    @@ -4,15 +4,16 @@
     \alias{gng_m1}
     \title{RW + noise}
     \usage{
    -gng_m1(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +gng_m1(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- gng_m1(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m1(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m1(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m1(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/gng_m2.Rd b/R/man/gng_m2.Rd
    index 6e61bcbe..bb1640d3 100644
    --- a/R/man/gng_m2.Rd
    +++ b/R/man/gng_m2.Rd
    @@ -4,15 +4,16 @@
     \alias{gng_m2}
     \title{RW + noise + bias}
     \usage{
    -gng_m2(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +gng_m2(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- gng_m2(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m2(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m2(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m2(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/gng_m3.Rd b/R/man/gng_m3.Rd
    index 0d8b59ca..81f52d88 100644
    --- a/R/man/gng_m3.Rd
    +++ b/R/man/gng_m3.Rd
    @@ -4,15 +4,16 @@
     \alias{gng_m3}
     \title{RW + noise + bias + pi}
     \usage{
    -gng_m3(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +gng_m3(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- gng_m3(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m3(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m3(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m3(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/gng_m4.Rd b/R/man/gng_m4.Rd
    index 3d35d4ea..f4765ec5 100644
    --- a/R/man/gng_m4.Rd
    +++ b/R/man/gng_m4.Rd
    @@ -4,15 +4,16 @@
     \alias{gng_m4}
     \title{RW (rew/pun) + noise + bias + pi}
     \usage{
    -gng_m4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +gng_m4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "cue", "keyPressed", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- gng_m4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m4(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- gng_m4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- gng_m4(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/igt_orl.Rd b/R/man/igt_orl.Rd
    index 75f17365..79c303d8 100644
    --- a/R/man/igt_orl.Rd
    +++ b/R/man/igt_orl.Rd
    @@ -4,15 +4,16 @@
     \alias{igt_orl}
     \title{Outcome-Representation Learning Model}
     \usage{
    -igt_orl(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +igt_orl(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.}
    @@ -154,10 +150,12 @@ For the Iowa Gambling Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- igt_orl(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_orl(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_orl(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_orl(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/igt_pvl_decay.Rd b/R/man/igt_pvl_decay.Rd
    index d1f4d262..cd96665c 100644
    --- a/R/man/igt_pvl_decay.Rd
    +++ b/R/man/igt_pvl_decay.Rd
    @@ -4,15 +4,17 @@
     \alias{igt_pvl_decay}
     \title{Prospect Valence Learning (PVL) Decay-RI}
     \usage{
    -igt_pvl_decay(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +igt_pvl_decay(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.}
    @@ -152,10 +149,12 @@ For the Iowa Gambling Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- igt_pvl_decay(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_decay(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_pvl_decay(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_decay(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/igt_pvl_delta.Rd b/R/man/igt_pvl_delta.Rd
    index 081c7156..838f08b8 100644
    --- a/R/man/igt_pvl_delta.Rd
    +++ b/R/man/igt_pvl_delta.Rd
    @@ -4,15 +4,17 @@
     \alias{igt_pvl_delta}
     \title{Prospect Valence Learning (PVL) Delta}
     \usage{
    -igt_pvl_delta(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +igt_pvl_delta(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.}
    @@ -152,10 +149,12 @@ For the Iowa Gambling Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- igt_pvl_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_delta(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_pvl_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_pvl_delta(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/igt_vpp.Rd b/R/man/igt_vpp.Rd
    index 8123b8cf..b754047d 100644
    --- a/R/man/igt_vpp.Rd
    +++ b/R/man/igt_vpp.Rd
    @@ -4,15 +4,16 @@
     \alias{igt_vpp}
     \title{Value-Plus-Perseverance}
     \usage{
    -igt_vpp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +igt_vpp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "gain", "loss". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{payscale}{Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.}
    @@ -152,10 +148,12 @@ For the Iowa Gambling Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- igt_vpp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_vpp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- igt_vpp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- igt_vpp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/peer_ocu.Rd b/R/man/peer_ocu.Rd
    index 2ff4a582..9e19c9f9 100644
    --- a/R/man/peer_ocu.Rd
    +++ b/R/man/peer_ocu.Rd
    @@ -4,15 +4,16 @@
     \alias{peer_ocu}
     \title{Other-Conferred Utility (OCU) Model}
     \usage{
    -peer_ocu(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +peer_ocu(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Peer Influence Task, there should be 8 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- peer_ocu(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- peer_ocu(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- peer_ocu(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- peer_ocu(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_ewa.Rd b/R/man/prl_ewa.Rd
    index be7d8222..a2b6a6ee 100644
    --- a/R/man/prl_ewa.Rd
    +++ b/R/man/prl_ewa.Rd
    @@ -4,15 +4,16 @@
     \alias{prl_ewa}
     \title{Experience-Weighted Attraction Model}
     \usage{
    -prl_ewa(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +prl_ewa(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_ewa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_ewa(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_ewa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_ewa(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_fictitious.Rd b/R/man/prl_fictitious.Rd
    index 5b62e2a8..d6a13bfd 100644
    --- a/R/man/prl_fictitious.Rd
    +++ b/R/man/prl_fictitious.Rd
    @@ -4,15 +4,17 @@
     \alias{prl_fictitious}
     \title{Fictitious Update Model}
     \usage{
    -prl_fictitious(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +prl_fictitious(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_fictitious(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_fictitious_multipleB.Rd b/R/man/prl_fictitious_multipleB.Rd
    index 0d70365b..2f752fb7 100644
    --- a/R/man/prl_fictitious_multipleB.Rd
    +++ b/R/man/prl_fictitious_multipleB.Rd
    @@ -4,15 +4,17 @@
     \alias{prl_fictitious_multipleB}
     \title{Fictitious Update Model}
     \usage{
    -prl_fictitious_multipleB(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +prl_fictitious_multipleB(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "block", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Reversal Learning Task, there should be 4 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_multipleB(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_multipleB(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_fictitious_rp.Rd b/R/man/prl_fictitious_rp.Rd
    index 6942fbe4..3642ccb2 100644
    --- a/R/man/prl_fictitious_rp.Rd
    +++ b/R/man/prl_fictitious_rp.Rd
    @@ -4,15 +4,17 @@
     \alias{prl_fictitious_rp}
     \title{Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)}
     \usage{
    -prl_fictitious_rp(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +prl_fictitious_rp(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_fictitious_rp_woa.Rd b/R/man/prl_fictitious_rp_woa.Rd
    index 9a22fa46..d79e960e 100644
    --- a/R/man/prl_fictitious_rp_woa.Rd
    +++ b/R/man/prl_fictitious_rp_woa.Rd
    @@ -4,15 +4,17 @@
     \alias{prl_fictitious_rp_woa}
     \title{Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)}
     \usage{
    -prl_fictitious_rp_woa(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +prl_fictitious_rp_woa(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_rp_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp_woa(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_rp_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_rp_woa(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_fictitious_woa.Rd b/R/man/prl_fictitious_woa.Rd
    index 3689e3cd..67943d0a 100644
    --- a/R/man/prl_fictitious_woa.Rd
    +++ b/R/man/prl_fictitious_woa.Rd
    @@ -4,15 +4,17 @@
     \alias{prl_fictitious_woa}
     \title{Fictitious Update Model, without alpha (indecision point)}
     \usage{
    -prl_fictitious_woa(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +prl_fictitious_woa(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_fictitious_woa(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_woa(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_fictitious_woa(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_fictitious_woa(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_rp.Rd b/R/man/prl_rp.Rd
    index cd568712..68f7724e 100644
    --- a/R/man/prl_rp.Rd
    +++ b/R/man/prl_rp.Rd
    @@ -4,15 +4,16 @@
     \alias{prl_rp}
     \title{Reward-Punishment Model}
     \usage{
    -prl_rp(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +prl_rp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Probabilistic Reversal Learning Task, there should be 3 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_rp(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_rp(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/prl_rp_multipleB.Rd b/R/man/prl_rp_multipleB.Rd
    index 3cedc4f9..96fc79ed 100644
    --- a/R/man/prl_rp_multipleB.Rd
    +++ b/R/man/prl_rp_multipleB.Rd
    @@ -4,15 +4,17 @@
     \alias{prl_rp_multipleB}
     \title{Reward-Punishment Model}
     \usage{
    -prl_rp_multipleB(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +prl_rp_multipleB(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "block", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Reversal Learning Task, there should be 4 columns of data
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- prl_rp_multipleB(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp_multipleB(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- prl_rp_multipleB(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- prl_rp_multipleB(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/pst_gainloss_Q.Rd b/R/man/pst_gainloss_Q.Rd
    index d7d51e19..2b81a26f 100644
    --- a/R/man/pst_gainloss_Q.Rd
    +++ b/R/man/pst_gainloss_Q.Rd
    @@ -4,15 +4,17 @@
     \alias{pst_gainloss_Q}
     \title{Gain-Loss Q Learning Model}
     \usage{
    -pst_gainloss_Q(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +pst_gainloss_Q(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "type", "choice", "reward". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Probabilistic Selection Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- pst_gainloss_Q(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- pst_gainloss_Q(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- pst_gainloss_Q(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- pst_gainloss_Q(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ra_noLA.Rd b/R/man/ra_noLA.Rd
    index 829f701d..25d1938e 100644
    --- a/R/man/ra_noLA.Rd
    +++ b/R/man/ra_noLA.Rd
    @@ -4,15 +4,16 @@
     \alias{ra_noLA}
     \title{Prospect Theory, without loss aversion (LA) parameter}
     \usage{
    -ra_noLA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ra_noLA(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "gain", "loss", "cert", "gamble". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Risk Aversion Task, there should be 5 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ra_noLA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noLA(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ra_noLA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noLA(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ra_noRA.Rd b/R/man/ra_noRA.Rd
    index 0c56c482..de827092 100644
    --- a/R/man/ra_noRA.Rd
    +++ b/R/man/ra_noRA.Rd
    @@ -4,15 +4,16 @@
     \alias{ra_noRA}
     \title{Prospect Theory, without risk aversion (RA) parameter}
     \usage{
    -ra_noRA(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ra_noRA(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "gain", "loss", "cert", "gamble". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Risk Aversion Task, there should be 5 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ra_noRA(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noRA(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ra_noRA(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_noRA(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ra_prospect.Rd b/R/man/ra_prospect.Rd
    index 9e62c63d..f2755477 100644
    --- a/R/man/ra_prospect.Rd
    +++ b/R/man/ra_prospect.Rd
    @@ -4,15 +4,16 @@
     \alias{ra_prospect}
     \title{Prospect Theory}
     \usage{
    -ra_prospect(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ra_prospect(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "gain", "loss", "cert", "gamble". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Risk Aversion Task, there should be 5 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ra_prospect(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_prospect(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ra_prospect(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ra_prospect(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/rdt_happiness.Rd b/R/man/rdt_happiness.Rd
    index f26afe6f..73325834 100644
    --- a/R/man/rdt_happiness.Rd
    +++ b/R/man/rdt_happiness.Rd
    @@ -4,15 +4,17 @@
     \alias{rdt_happiness}
     \title{Happiness Computational Model}
     \usage{
    -rdt_happiness(data = NULL, datafile = "", niter = 4000,
    -  nwarmup = 1000, nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    +rdt_happiness(data = NULL, niter = 4000, nwarmup = 1000,
    +  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
       indPars = "mean", modelRegressor = FALSE, vb = FALSE,
       inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +  max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +54,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +140,12 @@ For the Risky Decision Task, there should be 9 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- rdt_happiness(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- rdt_happiness(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- rdt_happiness(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- rdt_happiness(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ts_par4.Rd b/R/man/ts_par4.Rd
    index 097b14fc..8f377736 100644
    --- a/R/man/ts_par4.Rd
    +++ b/R/man/ts_par4.Rd
    @@ -4,15 +4,16 @@
     \alias{ts_par4}
     \title{Hybrid Model, with 4 parameters}
     \usage{
    -ts_par4(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ts_par4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "level1_choice", "level2_choice", "reward". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.}
    @@ -154,10 +150,12 @@ For the Two-Step Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ts_par4(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par4(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ts_par4(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par4(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ts_par6.Rd b/R/man/ts_par6.Rd
    index 60fc2802..e799fcb0 100644
    --- a/R/man/ts_par6.Rd
    +++ b/R/man/ts_par6.Rd
    @@ -4,15 +4,16 @@
     \alias{ts_par6}
     \title{Hybrid Model, with 6 parameters}
     \usage{
    -ts_par6(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ts_par6(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "level1_choice", "level2_choice", "reward". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.}
    @@ -154,10 +150,12 @@ For the Two-Step Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ts_par6(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par6(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ts_par6(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par6(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ts_par7.Rd b/R/man/ts_par7.Rd
    index 625c3d30..3f4126d7 100644
    --- a/R/man/ts_par7.Rd
    +++ b/R/man/ts_par7.Rd
    @@ -4,15 +4,16 @@
     \alias{ts_par7}
     \title{Hybrid Model, with 7 parameters (original model)}
     \usage{
    -ts_par7(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ts_par7(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "level1_choice", "level2_choice", "reward". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, it's possible to set \strong{model-specific argument(s)} as follows: 
     \describe{
       \item{trans_prob}{Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.}
    @@ -154,10 +150,12 @@ For the Two-Step Task, there should be 4 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ts_par7(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par7(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ts_par7(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ts_par7(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ug_bayes.Rd b/R/man/ug_bayes.Rd
    index 72fd2768..e8bba7a5 100644
    --- a/R/man/ug_bayes.Rd
    +++ b/R/man/ug_bayes.Rd
    @@ -4,15 +4,16 @@
     \alias{ug_bayes}
     \title{Ideal Observer Model}
     \usage{
    -ug_bayes(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ug_bayes(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "offer", "accept". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Norm-Training Ultimatum Game, there should be 3 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ug_bayes(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_bayes(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ug_bayes(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_bayes(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/ug_delta.Rd b/R/man/ug_delta.Rd
    index 5e31111b..f9b03a7b 100644
    --- a/R/man/ug_delta.Rd
    +++ b/R/man/ug_delta.Rd
    @@ -4,15 +4,16 @@
     \alias{ug_delta}
     \title{Rescorla-Wagner (Delta) Model}
     \usage{
    -ug_delta(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +ug_delta(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "offer", "accept". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -141,10 +137,12 @@ For the Norm-Training Ultimatum Game, there should be 3 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- ug_delta(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_delta(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- ug_delta(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- ug_delta(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/man/wcs_sql.Rd b/R/man/wcs_sql.Rd
    index 0175135f..cdc53d80 100644
    --- a/R/man/wcs_sql.Rd
    +++ b/R/man/wcs_sql.Rd
    @@ -4,15 +4,16 @@
     \alias{wcs_sql}
     \title{Sequential Learning Model}
     \usage{
    -wcs_sql(data = NULL, datafile = "", niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, use_example = FALSE, choose_data = FALSE, ...)
    +wcs_sql(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
     }
     \arguments{
    -\item{data, datafile}{A data.frame object (\code{data}) or a filepath for a tab-seperated txt file
    -containing the data (\code{datafile}) to be modeled. Data columns should be labeled as:
    +\item{data}{Data to be modeled. It should be given as a data.frame object,
    +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or
    +\code{"choose"} to choose data with an interactive window.
    +Columns in the dataset must include:
     "subjID", "choice", "outcome". See \bold{Details} below for more information.}
     
     \item{niter}{Number of iterations, including warm-up. Defaults to 4000.}
    @@ -52,11 +53,6 @@ take on each new iteration. See \bold{Details} below.}
     \item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take
     on each new iteration. See \bold{Details} below.}
     
    -\item{use_example}{Whether to use example data. By default, set to \code{FALSE}.}
    -
    -\item{choose_data}{Whether to choose data with an interactive window.
    -By default, set to \code{FALSE}.}
    -
     \item{...}{For this model, there is no model-specific argument.}
     }
     \value{
    @@ -143,10 +139,12 @@ For the Wisconsin Card Sorting Task, there should be 3 columns of data with the
     \examples{
     \dontrun{
     # Run the model with a given data.frame as df
    -output <- wcs_sql(data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- wcs_sql(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Run the model with example data
    -output <- wcs_sql(use_example = TRUE, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +output <- wcs_sql(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
     
     # Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
     plot(output, type = "trace")
    diff --git a/R/tests/testthat/test_bandit2arm_delta.R b/R/tests/testthat/test_bandit2arm_delta.R
    index 88568325..a6950cbe 100644
    --- a/R/tests/testthat/test_bandit2arm_delta.R
    +++ b/R/tests/testthat/test_bandit2arm_delta.R
    @@ -6,6 +6,5 @@ test_that("Test bandit2arm_delta", {
       skip_on_cran()
     
       expect_output(bandit2arm_delta(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bandit4arm2_kalman_filter.R b/R/tests/testthat/test_bandit4arm2_kalman_filter.R
    index 1bb865c6..7e31e77e 100644
    --- a/R/tests/testthat/test_bandit4arm2_kalman_filter.R
    +++ b/R/tests/testthat/test_bandit4arm2_kalman_filter.R
    @@ -6,6 +6,5 @@ test_that("Test bandit4arm2_kalman_filter", {
       skip_on_cran()
     
       expect_output(bandit4arm2_kalman_filter(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bandit4arm_2par_lapse.R b/R/tests/testthat/test_bandit4arm_2par_lapse.R
    index 119cf6c7..91428a9e 100644
    --- a/R/tests/testthat/test_bandit4arm_2par_lapse.R
    +++ b/R/tests/testthat/test_bandit4arm_2par_lapse.R
    @@ -6,6 +6,5 @@ test_that("Test bandit4arm_2par_lapse", {
       skip_on_cran()
     
       expect_output(bandit4arm_2par_lapse(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bandit4arm_4par.R b/R/tests/testthat/test_bandit4arm_4par.R
    index 725876ba..6fa55e6a 100644
    --- a/R/tests/testthat/test_bandit4arm_4par.R
    +++ b/R/tests/testthat/test_bandit4arm_4par.R
    @@ -6,6 +6,5 @@ test_that("Test bandit4arm_4par", {
       skip_on_cran()
     
       expect_output(bandit4arm_4par(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bandit4arm_lapse.R b/R/tests/testthat/test_bandit4arm_lapse.R
    index a5baa890..6cf104db 100644
    --- a/R/tests/testthat/test_bandit4arm_lapse.R
    +++ b/R/tests/testthat/test_bandit4arm_lapse.R
    @@ -6,6 +6,5 @@ test_that("Test bandit4arm_lapse", {
       skip_on_cran()
     
       expect_output(bandit4arm_lapse(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bandit4arm_lapse_decay.R b/R/tests/testthat/test_bandit4arm_lapse_decay.R
    index ae2b2a0b..971253d3 100644
    --- a/R/tests/testthat/test_bandit4arm_lapse_decay.R
    +++ b/R/tests/testthat/test_bandit4arm_lapse_decay.R
    @@ -6,6 +6,5 @@ test_that("Test bandit4arm_lapse_decay", {
       skip_on_cran()
     
       expect_output(bandit4arm_lapse_decay(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bandit4arm_singleA_lapse.R b/R/tests/testthat/test_bandit4arm_singleA_lapse.R
    index 30be92fa..8a2cb2f1 100644
    --- a/R/tests/testthat/test_bandit4arm_singleA_lapse.R
    +++ b/R/tests/testthat/test_bandit4arm_singleA_lapse.R
    @@ -6,6 +6,5 @@ test_that("Test bandit4arm_singleA_lapse", {
       skip_on_cran()
     
       expect_output(bandit4arm_singleA_lapse(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_bart_par4.R b/R/tests/testthat/test_bart_par4.R
    index 87fd06bb..df1e8749 100644
    --- a/R/tests/testthat/test_bart_par4.R
    +++ b/R/tests/testthat/test_bart_par4.R
    @@ -6,6 +6,5 @@ test_that("Test bart_par4", {
       skip_on_cran()
     
       expect_output(bart_par4(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_choiceRT_ddm.R b/R/tests/testthat/test_choiceRT_ddm.R
    index 1e6d85e2..c9b490d3 100644
    --- a/R/tests/testthat/test_choiceRT_ddm.R
    +++ b/R/tests/testthat/test_choiceRT_ddm.R
    @@ -6,6 +6,5 @@ test_that("Test choiceRT_ddm", {
       skip_on_cran()
     
       expect_output(choiceRT_ddm(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_choiceRT_ddm_single.R b/R/tests/testthat/test_choiceRT_ddm_single.R
    index 8c1f90d4..4b93e693 100644
    --- a/R/tests/testthat/test_choiceRT_ddm_single.R
    +++ b/R/tests/testthat/test_choiceRT_ddm_single.R
    @@ -6,6 +6,5 @@ test_that("Test choiceRT_ddm_single", {
       skip_on_cran()
     
       expect_output(choiceRT_ddm_single(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_cra_exp.R b/R/tests/testthat/test_cra_exp.R
    index ded8cb29..c1e4127d 100644
    --- a/R/tests/testthat/test_cra_exp.R
    +++ b/R/tests/testthat/test_cra_exp.R
    @@ -6,6 +6,5 @@ test_that("Test cra_exp", {
       skip_on_cran()
     
       expect_output(cra_exp(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_cra_linear.R b/R/tests/testthat/test_cra_linear.R
    index 40ffee53..462ecb30 100644
    --- a/R/tests/testthat/test_cra_linear.R
    +++ b/R/tests/testthat/test_cra_linear.R
    @@ -6,6 +6,5 @@ test_that("Test cra_linear", {
       skip_on_cran()
     
       expect_output(cra_linear(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_dbdm_prob_weight.R b/R/tests/testthat/test_dbdm_prob_weight.R
    index 0091edcc..85300a27 100644
    --- a/R/tests/testthat/test_dbdm_prob_weight.R
    +++ b/R/tests/testthat/test_dbdm_prob_weight.R
    @@ -6,6 +6,5 @@ test_that("Test dbdm_prob_weight", {
       skip_on_cran()
     
       expect_output(dbdm_prob_weight(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_dd_cs.R b/R/tests/testthat/test_dd_cs.R
    index 7b9a552f..81d8ab69 100644
    --- a/R/tests/testthat/test_dd_cs.R
    +++ b/R/tests/testthat/test_dd_cs.R
    @@ -6,6 +6,5 @@ test_that("Test dd_cs", {
       skip_on_cran()
     
       expect_output(dd_cs(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_dd_cs_single.R b/R/tests/testthat/test_dd_cs_single.R
    index 5b69b365..6b65c708 100644
    --- a/R/tests/testthat/test_dd_cs_single.R
    +++ b/R/tests/testthat/test_dd_cs_single.R
    @@ -6,6 +6,5 @@ test_that("Test dd_cs_single", {
       skip_on_cran()
     
       expect_output(dd_cs_single(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_dd_exp.R b/R/tests/testthat/test_dd_exp.R
    index 86e81f1a..d4fb35e1 100644
    --- a/R/tests/testthat/test_dd_exp.R
    +++ b/R/tests/testthat/test_dd_exp.R
    @@ -6,6 +6,5 @@ test_that("Test dd_exp", {
       skip_on_cran()
     
       expect_output(dd_exp(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_dd_hyperbolic.R b/R/tests/testthat/test_dd_hyperbolic.R
    index 71246980..31c20d43 100644
    --- a/R/tests/testthat/test_dd_hyperbolic.R
    +++ b/R/tests/testthat/test_dd_hyperbolic.R
    @@ -6,6 +6,5 @@ test_that("Test dd_hyperbolic", {
       skip_on_cran()
     
       expect_output(dd_hyperbolic(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_dd_hyperbolic_single.R b/R/tests/testthat/test_dd_hyperbolic_single.R
    index de624a5c..d0dd53a6 100644
    --- a/R/tests/testthat/test_dd_hyperbolic_single.R
    +++ b/R/tests/testthat/test_dd_hyperbolic_single.R
    @@ -6,6 +6,5 @@ test_that("Test dd_hyperbolic_single", {
       skip_on_cran()
     
       expect_output(dd_hyperbolic_single(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_gng_m1.R b/R/tests/testthat/test_gng_m1.R
    index 292b5093..ffbea4de 100644
    --- a/R/tests/testthat/test_gng_m1.R
    +++ b/R/tests/testthat/test_gng_m1.R
    @@ -6,6 +6,5 @@ test_that("Test gng_m1", {
       skip_on_cran()
     
       expect_output(gng_m1(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_gng_m2.R b/R/tests/testthat/test_gng_m2.R
    index a4e82763..0d84ff23 100644
    --- a/R/tests/testthat/test_gng_m2.R
    +++ b/R/tests/testthat/test_gng_m2.R
    @@ -6,6 +6,5 @@ test_that("Test gng_m2", {
       skip_on_cran()
     
       expect_output(gng_m2(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_gng_m3.R b/R/tests/testthat/test_gng_m3.R
    index 7388aa78..c4da3591 100644
    --- a/R/tests/testthat/test_gng_m3.R
    +++ b/R/tests/testthat/test_gng_m3.R
    @@ -6,6 +6,5 @@ test_that("Test gng_m3", {
       skip_on_cran()
     
       expect_output(gng_m3(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_gng_m4.R b/R/tests/testthat/test_gng_m4.R
    index b32bdafb..1c252b51 100644
    --- a/R/tests/testthat/test_gng_m4.R
    +++ b/R/tests/testthat/test_gng_m4.R
    @@ -6,6 +6,5 @@ test_that("Test gng_m4", {
       skip_on_cran()
     
       expect_output(gng_m4(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_igt_orl.R b/R/tests/testthat/test_igt_orl.R
    index f7cfbe7f..15b1dee8 100644
    --- a/R/tests/testthat/test_igt_orl.R
    +++ b/R/tests/testthat/test_igt_orl.R
    @@ -6,6 +6,5 @@ test_that("Test igt_orl", {
       skip_on_cran()
     
       expect_output(igt_orl(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_igt_pvl_decay.R b/R/tests/testthat/test_igt_pvl_decay.R
    index e42dba06..93af7088 100644
    --- a/R/tests/testthat/test_igt_pvl_decay.R
    +++ b/R/tests/testthat/test_igt_pvl_decay.R
    @@ -6,6 +6,5 @@ test_that("Test igt_pvl_decay", {
       skip_on_cran()
     
       expect_output(igt_pvl_decay(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_igt_pvl_delta.R b/R/tests/testthat/test_igt_pvl_delta.R
    index 7b7c48c9..ca498a71 100644
    --- a/R/tests/testthat/test_igt_pvl_delta.R
    +++ b/R/tests/testthat/test_igt_pvl_delta.R
    @@ -6,6 +6,5 @@ test_that("Test igt_pvl_delta", {
       skip_on_cran()
     
       expect_output(igt_pvl_delta(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_igt_vpp.R b/R/tests/testthat/test_igt_vpp.R
    index 384d1e7d..b5ab9d41 100644
    --- a/R/tests/testthat/test_igt_vpp.R
    +++ b/R/tests/testthat/test_igt_vpp.R
    @@ -6,6 +6,5 @@ test_that("Test igt_vpp", {
       skip_on_cran()
     
       expect_output(igt_vpp(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_peer_ocu.R b/R/tests/testthat/test_peer_ocu.R
    index 09842e8b..8f80c272 100644
    --- a/R/tests/testthat/test_peer_ocu.R
    +++ b/R/tests/testthat/test_peer_ocu.R
    @@ -6,6 +6,5 @@ test_that("Test peer_ocu", {
       skip_on_cran()
     
       expect_output(peer_ocu(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_ewa.R b/R/tests/testthat/test_prl_ewa.R
    index bdf4d261..b1cde38c 100644
    --- a/R/tests/testthat/test_prl_ewa.R
    +++ b/R/tests/testthat/test_prl_ewa.R
    @@ -6,6 +6,5 @@ test_that("Test prl_ewa", {
       skip_on_cran()
     
       expect_output(prl_ewa(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_fictitious.R b/R/tests/testthat/test_prl_fictitious.R
    index 21ef0a0d..0202f394 100644
    --- a/R/tests/testthat/test_prl_fictitious.R
    +++ b/R/tests/testthat/test_prl_fictitious.R
    @@ -6,6 +6,5 @@ test_that("Test prl_fictitious", {
       skip_on_cran()
     
       expect_output(prl_fictitious(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_fictitious_multipleB.R b/R/tests/testthat/test_prl_fictitious_multipleB.R
    index 71eb8641..861cd04d 100644
    --- a/R/tests/testthat/test_prl_fictitious_multipleB.R
    +++ b/R/tests/testthat/test_prl_fictitious_multipleB.R
    @@ -6,6 +6,5 @@ test_that("Test prl_fictitious_multipleB", {
       skip_on_cran()
     
       expect_output(prl_fictitious_multipleB(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_fictitious_rp.R b/R/tests/testthat/test_prl_fictitious_rp.R
    index 57600992..76dd6289 100644
    --- a/R/tests/testthat/test_prl_fictitious_rp.R
    +++ b/R/tests/testthat/test_prl_fictitious_rp.R
    @@ -6,6 +6,5 @@ test_that("Test prl_fictitious_rp", {
       skip_on_cran()
     
       expect_output(prl_fictitious_rp(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_fictitious_rp_woa.R b/R/tests/testthat/test_prl_fictitious_rp_woa.R
    index d0794e3a..90aa2342 100644
    --- a/R/tests/testthat/test_prl_fictitious_rp_woa.R
    +++ b/R/tests/testthat/test_prl_fictitious_rp_woa.R
    @@ -6,6 +6,5 @@ test_that("Test prl_fictitious_rp_woa", {
       skip_on_cran()
     
       expect_output(prl_fictitious_rp_woa(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_fictitious_woa.R b/R/tests/testthat/test_prl_fictitious_woa.R
    index 46e0c65f..b52c15b4 100644
    --- a/R/tests/testthat/test_prl_fictitious_woa.R
    +++ b/R/tests/testthat/test_prl_fictitious_woa.R
    @@ -6,6 +6,5 @@ test_that("Test prl_fictitious_woa", {
       skip_on_cran()
     
       expect_output(prl_fictitious_woa(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_rp.R b/R/tests/testthat/test_prl_rp.R
    index 0a20c1ec..79f1f2e7 100644
    --- a/R/tests/testthat/test_prl_rp.R
    +++ b/R/tests/testthat/test_prl_rp.R
    @@ -6,6 +6,5 @@ test_that("Test prl_rp", {
       skip_on_cran()
     
       expect_output(prl_rp(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_prl_rp_multipleB.R b/R/tests/testthat/test_prl_rp_multipleB.R
    index 9e007d14..c4d0a00d 100644
    --- a/R/tests/testthat/test_prl_rp_multipleB.R
    +++ b/R/tests/testthat/test_prl_rp_multipleB.R
    @@ -6,6 +6,5 @@ test_that("Test prl_rp_multipleB", {
       skip_on_cran()
     
       expect_output(prl_rp_multipleB(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_pst_gainloss_Q.R b/R/tests/testthat/test_pst_gainloss_Q.R
    index a26616ee..9d6f644a 100644
    --- a/R/tests/testthat/test_pst_gainloss_Q.R
    +++ b/R/tests/testthat/test_pst_gainloss_Q.R
    @@ -6,6 +6,5 @@ test_that("Test pst_gainloss_Q", {
       skip_on_cran()
     
       expect_output(pst_gainloss_Q(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ra_noLA.R b/R/tests/testthat/test_ra_noLA.R
    index 5dd55dc7..287029e2 100644
    --- a/R/tests/testthat/test_ra_noLA.R
    +++ b/R/tests/testthat/test_ra_noLA.R
    @@ -6,6 +6,5 @@ test_that("Test ra_noLA", {
       skip_on_cran()
     
       expect_output(ra_noLA(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ra_noRA.R b/R/tests/testthat/test_ra_noRA.R
    index 8a27453a..a40a5bde 100644
    --- a/R/tests/testthat/test_ra_noRA.R
    +++ b/R/tests/testthat/test_ra_noRA.R
    @@ -6,6 +6,5 @@ test_that("Test ra_noRA", {
       skip_on_cran()
     
       expect_output(ra_noRA(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ra_prospect.R b/R/tests/testthat/test_ra_prospect.R
    index ba57f2b9..efebea75 100644
    --- a/R/tests/testthat/test_ra_prospect.R
    +++ b/R/tests/testthat/test_ra_prospect.R
    @@ -6,6 +6,5 @@ test_that("Test ra_prospect", {
       skip_on_cran()
     
       expect_output(ra_prospect(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_rdt_happiness.R b/R/tests/testthat/test_rdt_happiness.R
    index 751bbf02..d338a124 100644
    --- a/R/tests/testthat/test_rdt_happiness.R
    +++ b/R/tests/testthat/test_rdt_happiness.R
    @@ -6,6 +6,5 @@ test_that("Test rdt_happiness", {
       skip_on_cran()
     
       expect_output(rdt_happiness(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ts_par4.R b/R/tests/testthat/test_ts_par4.R
    index b2ae4d7c..5d5bc257 100644
    --- a/R/tests/testthat/test_ts_par4.R
    +++ b/R/tests/testthat/test_ts_par4.R
    @@ -6,6 +6,5 @@ test_that("Test ts_par4", {
       skip_on_cran()
     
       expect_output(ts_par4(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ts_par6.R b/R/tests/testthat/test_ts_par6.R
    index 7c220fb8..ad7046af 100644
    --- a/R/tests/testthat/test_ts_par6.R
    +++ b/R/tests/testthat/test_ts_par6.R
    @@ -6,6 +6,5 @@ test_that("Test ts_par6", {
       skip_on_cran()
     
       expect_output(ts_par6(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ts_par7.R b/R/tests/testthat/test_ts_par7.R
    index 60b41b12..501eeff6 100644
    --- a/R/tests/testthat/test_ts_par7.R
    +++ b/R/tests/testthat/test_ts_par7.R
    @@ -6,6 +6,5 @@ test_that("Test ts_par7", {
       skip_on_cran()
     
       expect_output(ts_par7(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ug_bayes.R b/R/tests/testthat/test_ug_bayes.R
    index 4834d264..0ead0080 100644
    --- a/R/tests/testthat/test_ug_bayes.R
    +++ b/R/tests/testthat/test_ug_bayes.R
    @@ -6,6 +6,5 @@ test_that("Test ug_bayes", {
       skip_on_cran()
     
       expect_output(ug_bayes(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_ug_delta.R b/R/tests/testthat/test_ug_delta.R
    index eabbc43a..7014320b 100644
    --- a/R/tests/testthat/test_ug_delta.R
    +++ b/R/tests/testthat/test_ug_delta.R
    @@ -6,6 +6,5 @@ test_that("Test ug_delta", {
       skip_on_cran()
     
       expect_output(ug_delta(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/R/tests/testthat/test_wcs_sql.R b/R/tests/testthat/test_wcs_sql.R
    index 8bb7c3c8..2e48d7d5 100644
    --- a/R/tests/testthat/test_wcs_sql.R
    +++ b/R/tests/testthat/test_wcs_sql.R
    @@ -6,6 +6,5 @@ test_that("Test wcs_sql", {
       skip_on_cran()
     
       expect_output(wcs_sql(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    diff --git a/commons/templates/PY_CODE_TEMPLATE.txt b/commons/templates/PY_CODE_TEMPLATE.txt
    index de2b12f8..4521019a 100644
    --- a/commons/templates/PY_CODE_TEMPLATE.txt
    +++ b/commons/templates/PY_CODE_TEMPLATE.txt
    @@ -39,9 +39,7 @@ class {class_name}(TaskModel):
     
     
     def {model_function}(
    -        example: bool = False,
    -        datafile: str = None,
    -        data: pd.DataFrame = None,
    +        data: Union[pd.DataFrame, str, None] = None,
             niter: int = 4000,
             nwarmup: int = 1000,
             nchain: int = 4,
    @@ -58,8 +56,6 @@ def {model_function}(
             **additional_args: Any) -> TaskModel:
         """{docstring_template}    """
         return {class_name}(
    -        example=example,
    -        datafile=datafile,
             data=data,
             niter=niter,
             nwarmup=nwarmup,
    diff --git a/commons/templates/PY_TEST_TEMPLATE.txt b/commons/templates/PY_TEST_TEMPLATE.txt
    index c1d89d9a..418bd68a 100644
    --- a/commons/templates/PY_TEST_TEMPLATE.txt
    +++ b/commons/templates/PY_TEST_TEMPLATE.txt
    @@ -5,7 +5,7 @@ from hbayesdm.models import {model_function}
     
     def test_{model_function}():
         _ = {model_function}(
    -        example=True, niter=10, nwarmup=5, nchain=1, ncore=1)
    +        data="example", niter=10, nwarmup=5, nchain=1, ncore=1)
     
     
     if __name__ == '__main__':
    diff --git a/commons/templates/R_TEST_TEMPLATE.txt b/commons/templates/R_TEST_TEMPLATE.txt
    index 9dee0a58..e7870c5f 100644
    --- a/commons/templates/R_TEST_TEMPLATE.txt
    +++ b/commons/templates/R_TEST_TEMPLATE.txt
    @@ -6,6 +6,5 @@ test_that("Test %(model_function)s", {
       skip_on_cran()
     
       expect_output(%(model_function)s(
    -      use_example = TRUE,
    -      niter=10, nwarmup=5, nchain=1, ncore=1))
    +      data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1))
     })
    
    From 537cb9b72ebbe89c6dbee07056297129c0b69907 Mon Sep 17 00:00:00 2001
    From: Jaeyeong Yang 
    Date: Mon, 26 Aug 2019 03:17:41 +0900
    Subject: [PATCH 121/163] Add extract_id for Python package
    
    ---
     Python/hbayesdm/__init__.py    |  6 +++--
     Python/hbayesdm/diagnostics.py | 48 +++++++++++++++++++++++++++++++---
     2 files changed, 48 insertions(+), 6 deletions(-)
    
    diff --git a/Python/hbayesdm/__init__.py b/Python/hbayesdm/__init__.py
    index 1461b292..99a99cfa 100644
    --- a/Python/hbayesdm/__init__.py
    +++ b/Python/hbayesdm/__init__.py
    @@ -1,3 +1,5 @@
    -from hbayesdm.diagnostics import rhat, print_fit, hdi, plot_hdi
    +import hbayesdm
    +from hbayesdm.diagnostics import *
     
    -__all__ = ['rhat', 'print_fit', 'hdi', 'plot_hdi']
    +__all__ = []
    +__all__ += hbayesdm.diagnostics.__all__
    diff --git a/Python/hbayesdm/diagnostics.py b/Python/hbayesdm/diagnostics.py
    index 4897a9a6..6f25cfb9 100644
    --- a/Python/hbayesdm/diagnostics.py
    +++ b/Python/hbayesdm/diagnostics.py
    @@ -7,7 +7,7 @@
     
     from hbayesdm.base import TaskModel
     
    -__all__ = ['rhat', 'print_fit', 'hdi', 'plot_hdi']
    +__all__ = ['rhat', 'print_fit', 'hdi', 'plot_hdi', 'extract_ic']
     
     
     def rhat(model_data: TaskModel,
    @@ -40,7 +40,7 @@ def rhat(model_data: TaskModel,
                     for v in (rhat_data.max() <= less).data_vars.values()}
     
     
    -def print_fit(*args: TaskModel, ic: str = 'loo') -> pd.DataFrame:
    +def print_fit(*args: TaskModel, ic: str = 'looic') -> pd.DataFrame:
         """Print model-fits (mean LOOIC or WAIC values) of hbayesdm models.
     
         Parameters
    @@ -48,14 +48,14 @@ def print_fit(*args: TaskModel, ic: str = 'loo') -> pd.DataFrame:
         args
             Output instances of running hbayesdm model functions.
         ic
    -        Information criterion (defaults to 'loo').
    +        Information criterion (defaults to 'looic').
     
         Returns
         -------
         pd.DataFrame
             Model-fit info per each hbayesdm output given as argument(s).
         """
    -    ic_options = ('loo', 'waic')
    +    ic_options = ('looic', 'waic')
         if ic not in ic_options:
             raise RuntimeError(
                 'Information Criterion (ic) must be one of ' + repr(ic_options))
    @@ -64,6 +64,8 @@ def print_fit(*args: TaskModel, ic: str = 'loo') -> pd.DataFrame:
                 az.from_pystan(model_data.fit, log_likelihood='log_lik')
             for model_data in args
         }
    +
    +    ic = 'loo' if ic == 'looic' else 'waic'
         return az.compare(dataset_dict=dataset_dict, ic=ic)
     
     
    @@ -134,3 +136,41 @@ def plot_hdi(x: np.ndarray,
         ax.set_xlabel(xlabel)
         ax.set_ylabel(ylabel)
         plt.show()
    +
    +
    +def extract_ic(model_data: TaskModel,
    +               ic: str = 'both',
    +               ncore: int = 2) \
    +        -> Dict:
    +    """Extract model comparison estimates.
    +
    +    Parameters
    +    ----------
    +    model_data
    +        hBayesDM output objects from running model functions.
    +    ic
    +        Information criterion. 'looic', 'waic', or 'both'. Defaults to 'both'.
    +    ncore
    +        Number of cores to use when computing LOOIC. Defaults to 2.
    +
    +    Returns
    +    -------
    +    Dict
    +        Leave-One-Out and/or Watanabe-Akaike information criterion estimates.
    +    """
    +    ic_options = ('looic', 'waic', 'both')
    +    if ic not in ic_options:
    +        raise RuntimeError(
    +            'Information Criterion (ic) must be one of ' + repr(ic_options))
    +
    +    dat = az.from_pystan(model_data.fit, log_likelihood='log_lik')
    +
    +    ret = {}
    +
    +    if ic in ['looic', 'both']:
    +        ret['looic'] = az.loo(dat)['loo']
    +
    +    if ic in ['waic', 'both']:
    +        ret['waic'] = az.waic(dat)['waic']
    +
    +    return ret
    
    From 873281cec4b2469b29e2c4d71c376fb1ffacdbaf Mon Sep 17 00:00:00 2001
    From: Jaeyeong Yang 
    Date: Mon, 26 Aug 2019 03:18:01 +0900
    Subject: [PATCH 122/163] Fix typos in the documentation of extract_ic
    
    ---
     R/R/extract_ic.R                 | 2 +-
     R/docs/reference/extract_ic.html | 2 +-
     R/man/extract_ic.Rd              | 2 +-
     3 files changed, 3 insertions(+), 3 deletions(-)
    
    diff --git a/R/R/extract_ic.R b/R/R/extract_ic.R
    index ddafa217..23dcd55e 100644
    --- a/R/R/extract_ic.R
    +++ b/R/R/extract_ic.R
    @@ -2,7 +2,7 @@
     #'
     #' @param model_data Object returned by \code{'hBayesDM'} model function
     #' @param ic Information Criterion. 'looic', 'waic', or 'both'
    -#' @param ncore Number of corse to use when computing LOOIC
    +#' @param ncore Number of cores to use when computing LOOIC
     #'
     #' @importFrom loo extract_log_lik relative_eff loo waic
     #'
    diff --git a/R/docs/reference/extract_ic.html b/R/docs/reference/extract_ic.html
    index 733edc1a..1abf870e 100644
    --- a/R/docs/reference/extract_ic.html
    +++ b/R/docs/reference/extract_ic.html
    @@ -128,7 +128,7 @@ 

    Arg

    - +
    data, datafile

    A data.frame object (data) or a filepath for a tab-seperated txt file -containing the data (datafile) to be modeled. Data columns should be labeled as: +

    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: "subjID", "choice", "outcome". See Details below for more information.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take on each new iteration. See Details below.

    use_example

    Whether to use example data. By default, set to FALSE.

    choose_data

    Whether to choose data with an interactive window. -By default, set to FALSE.

    ...
    ncore

    Number of corse to use when computing LOOIC

    Number of cores to use when computing LOOIC

    diff --git a/R/man/extract_ic.Rd b/R/man/extract_ic.Rd index 2ae1361a..e0483aad 100644 --- a/R/man/extract_ic.Rd +++ b/R/man/extract_ic.Rd @@ -11,7 +11,7 @@ extract_ic(model_data = NULL, ic = "looic", ncore = 2) \item{ic}{Information Criterion. 'looic', 'waic', or 'both'} -\item{ncore}{Number of corse to use when computing LOOIC} +\item{ncore}{Number of cores to use when computing LOOIC} } \value{ IC Leave-One-Out and/or Watanabe-Akaike information criterion estimates. From e30e6a712246ecc81d3c6e724b527070dcb0a87d Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 03:43:13 +0900 Subject: [PATCH 123/163] Use [-Inf, Inf] for a drift rate --- commons/models/choiceRT_ddm.yml | 2 +- commons/models/choiceRT_ddm_single.yml | 2 +- commons/stan_files/choiceRT_ddm.stan | 16 ++++++++-------- commons/stan_files/choiceRT_ddm_single.stan | 2 +- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/commons/models/choiceRT_ddm.yml b/commons/models/choiceRT_ddm.yml index d4c848f1..b4af5151 100644 --- a/commons/models/choiceRT_ddm.yml +++ b/commons/models/choiceRT_ddm.yml @@ -34,7 +34,7 @@ parameters: info: [0, 0.5, 1] delta: desc: drift rate - info: [0, 0.5, Inf] + info: [-Inf, 0, Inf] tau: desc: non-decision time info: [0, 0.15, 1] diff --git a/commons/models/choiceRT_ddm_single.yml b/commons/models/choiceRT_ddm_single.yml index b5ed8e76..aaf93807 100644 --- a/commons/models/choiceRT_ddm_single.yml +++ b/commons/models/choiceRT_ddm_single.yml @@ -34,7 +34,7 @@ parameters: info: [0, 0.5, 1] delta: desc: drift rate - info: [0, 0.5, Inf] + info: [-Inf, 0, Inf] tau: desc: non-decision time info: [0, 0.15, 1] diff --git a/commons/stan_files/choiceRT_ddm.stan b/commons/stan_files/choiceRT_ddm.stan index 58baaec6..dde21623 100644 --- a/commons/stan_files/choiceRT_ddm.stan +++ b/commons/stan_files/choiceRT_ddm.stan @@ -38,9 +38,9 @@ parameters { transformed parameters { // Transform subject-level raw parameters - vector[N] alpha; // boundary separation - vector[N] beta; // initial bias - vector[N] delta; // drift rate + vector[N] alpha; // boundary separation + vector[N] beta; // initial bias + vector[N] delta; // drift rate vector[N] tau; // nondecision time for (i in 1:N) { @@ -48,7 +48,7 @@ transformed parameters { tau[i] = Phi_approx(mu_pr[4] + sigma[4] * tau_pr[i]) * (minRT[i] - RTbound) + RTbound; } alpha = exp(mu_pr[1] + sigma[1] * alpha_pr); - delta = exp(mu_pr[3] + sigma[3] * delta_pr); + delta = mu_pr[3] + sigma[3] * delta_pr; } model { @@ -73,9 +73,9 @@ model { generated quantities { // For group level parameters - real mu_alpha; // boundary separation - real mu_beta; // initial bias - real mu_delta; // drift rate + real mu_alpha; // boundary separation + real mu_beta; // initial bias + real mu_delta; // drift rate real mu_tau; // nondecision time // For log likelihood calculation @@ -84,7 +84,7 @@ generated quantities { // Assign group level parameter values mu_alpha = exp(mu_pr[1]); mu_beta = Phi_approx(mu_pr[2]); - mu_delta = exp(mu_pr[3]); + mu_delta = mu_pr[3]; mu_tau = Phi_approx(mu_pr[4]) * (mean(minRT)-RTbound) + RTbound; { // local section, this saves time and space diff --git a/commons/stan_files/choiceRT_ddm_single.stan b/commons/stan_files/choiceRT_ddm_single.stan index 6bacd18a..95e0bc1b 100644 --- a/commons/stan_files/choiceRT_ddm_single.stan +++ b/commons/stan_files/choiceRT_ddm_single.stan @@ -23,7 +23,7 @@ parameters { real alpha; // boundary separation real beta; // initial bias - real delta; // drift rate + real delta; // drift rate real tau; // nondecision time } From 21e8379a0e5b3b590599df606cd116c5c1863ae1 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 03:43:47 +0900 Subject: [PATCH 124/163] Re-generate codes --- Python/hbayesdm/models/_choiceRT_ddm.py | 2 +- Python/hbayesdm/models/_choiceRT_ddm_single.py | 2 +- R/R/choiceRT_ddm.R | 2 +- R/R/choiceRT_ddm_single.R | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py index 85275f8a..2b6e8ef8 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm.py +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -24,7 +24,7 @@ def __init__(self, **kwargs): parameters=OrderedDict([ ('alpha', (0, 0.5, Inf)), ('beta', (0, 0.5, 1)), - ('delta', (0, 0.5, Inf)), + ('delta', (-Inf, 0, Inf)), ('tau', (0, 0.15, 1)), ]), regressors=OrderedDict([ diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py index a45e472b..a179e5d5 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm_single.py +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -24,7 +24,7 @@ def __init__(self, **kwargs): parameters=OrderedDict([ ('alpha', (0, 0.5, Inf)), ('beta', (0, 0.5, 1)), - ('delta', (0, 0.5, Inf)), + ('delta', (-Inf, 0, Inf)), ('tau', (0, 0.15, 1)), ]), regressors=OrderedDict([ diff --git a/R/R/choiceRT_ddm.R b/R/R/choiceRT_ddm.R index 1c0383b0..73a88edc 100644 --- a/R/R/choiceRT_ddm.R +++ b/R/R/choiceRT_ddm.R @@ -41,7 +41,7 @@ choiceRT_ddm <- hBayesDM_model( parameters = list( "alpha" = c(0, 0.5, Inf), "beta" = c(0, 0.5, 1), - "delta" = c(0, 0.5, Inf), + "delta" = c(-Inf, 0, Inf), "tau" = c(0, 0.15, 1) ), regressors = NULL, diff --git a/R/R/choiceRT_ddm_single.R b/R/R/choiceRT_ddm_single.R index 1437459d..1d70b978 100644 --- a/R/R/choiceRT_ddm_single.R +++ b/R/R/choiceRT_ddm_single.R @@ -41,7 +41,7 @@ choiceRT_ddm_single <- hBayesDM_model( parameters = list( "alpha" = c(0, 0.5, Inf), "beta" = c(0, 0.5, 1), - "delta" = c(0, 0.5, Inf), + "delta" = c(-Inf, 0, Inf), "tau" = c(0, 0.15, 1) ), regressors = NULL, From 73174007e52325af6c7c03d28bd82cd1c8251f9a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 04:01:26 +0900 Subject: [PATCH 125/163] Update version to v1.0.0 --- Python/setup.py | 8 ++++---- R/DESCRIPTION | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Python/setup.py b/Python/setup.py index 3be7f4b0..4c2a79bf 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -12,10 +12,10 @@ PATH_ROOT = Path(__file__).absolute().parent -MAJOR = 0 -MINOR = 7 -MICRO = 2 -ISRELEASED = False +MAJOR = 1 +MINOR = 0 +MICRO = 0 +ISRELEASED = True VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) VERSION += '' if ISRELEASED else '.9000' diff --git a/R/DESCRIPTION b/R/DESCRIPTION index a54f1dfa..e11c6e9a 100644 --- a/R/DESCRIPTION +++ b/R/DESCRIPTION @@ -1,6 +1,6 @@ Package: hBayesDM Title: Hierarchical Bayesian Modeling of Decision-Making Tasks -Version: 0.7.2.9000 +Version: 1.0.0 Date: 2019-02-11 Author: Woo-Young Ahn [aut, cre], @@ -112,5 +112,5 @@ Collate: 'ug_delta.R' 'wcs_sql.R' 'zzz.R' -Suggests: +Suggests: testthat From 39670c1ff760eca746838bd0a4d627b9c27acf64 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 04:04:07 +0900 Subject: [PATCH 126/163] Update docs using pkgdown --- R/docs/authors.html | 2 +- R/docs/index.html | 2 +- R/docs/news/index.html | 2 +- R/docs/reference/HDIofMCMC.html | 2 +- R/docs/reference/bandit2arm_delta.html | 2 +- R/docs/reference/bandit4arm2_kalman_filter.html | 2 +- R/docs/reference/bandit4arm_2par_lapse.html | 2 +- R/docs/reference/bandit4arm_4par.html | 2 +- R/docs/reference/bandit4arm_lapse.html | 2 +- R/docs/reference/bandit4arm_lapse_decay.html | 2 +- R/docs/reference/bandit4arm_singleA_lapse.html | 2 +- R/docs/reference/bart_par4.html | 2 +- R/docs/reference/choiceRT_ddm.html | 2 +- R/docs/reference/choiceRT_ddm_single.html | 2 +- R/docs/reference/choiceRT_lba.html | 2 +- R/docs/reference/choiceRT_lba_single.html | 2 +- R/docs/reference/cra_exp.html | 2 +- R/docs/reference/cra_linear.html | 2 +- R/docs/reference/dbdm_prob_weight.html | 2 +- R/docs/reference/dd_cs.html | 2 +- R/docs/reference/dd_cs_single.html | 2 +- R/docs/reference/dd_exp.html | 2 +- R/docs/reference/dd_hyperbolic.html | 2 +- R/docs/reference/dd_hyperbolic_single.html | 2 +- R/docs/reference/estimate_mode.html | 2 +- R/docs/reference/extract_ic.html | 2 +- R/docs/reference/gng_m1.html | 2 +- R/docs/reference/gng_m2.html | 2 +- R/docs/reference/gng_m3.html | 2 +- R/docs/reference/gng_m4.html | 2 +- R/docs/reference/hBayesDM-package.html | 2 +- R/docs/reference/hBayesDM_model.html | 2 +- R/docs/reference/igt_orl.html | 2 +- R/docs/reference/igt_pvl_decay.html | 2 +- R/docs/reference/igt_pvl_delta.html | 2 +- R/docs/reference/igt_vpp.html | 2 +- R/docs/reference/index.html | 2 +- R/docs/reference/multiplot.html | 2 +- R/docs/reference/peer_ocu.html | 2 +- R/docs/reference/plot.hBayesDM.html | 2 +- R/docs/reference/plotDist.html | 2 +- R/docs/reference/plotHDI.html | 2 +- R/docs/reference/plotInd.html | 2 +- R/docs/reference/printFit.html | 2 +- R/docs/reference/prl_ewa.html | 2 +- R/docs/reference/prl_fictitious.html | 2 +- R/docs/reference/prl_fictitious_multipleB.html | 2 +- R/docs/reference/prl_fictitious_rp.html | 2 +- R/docs/reference/prl_fictitious_rp_woa.html | 2 +- R/docs/reference/prl_fictitious_woa.html | 2 +- R/docs/reference/prl_rp.html | 2 +- R/docs/reference/prl_rp_multipleB.html | 2 +- R/docs/reference/pst_gainloss_Q.html | 2 +- R/docs/reference/ra_noLA.html | 2 +- R/docs/reference/ra_noRA.html | 2 +- R/docs/reference/ra_prospect.html | 2 +- R/docs/reference/rdt_happiness.html | 2 +- R/docs/reference/rhat.html | 2 +- R/docs/reference/ts_par4.html | 2 +- R/docs/reference/ts_par6.html | 2 +- R/docs/reference/ts_par7.html | 2 +- R/docs/reference/ug_bayes.html | 2 +- R/docs/reference/ug_delta.html | 2 +- R/docs/reference/wcs_sql.html | 2 +- 64 files changed, 64 insertions(+), 64 deletions(-) diff --git a/R/docs/authors.html b/R/docs/authors.html index e8f4aec2..3174b5fa 100644 --- a/R/docs/authors.html +++ b/R/docs/authors.html @@ -60,7 +60,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/index.html b/R/docs/index.html index ef1226c3..386e1f71 100644 --- a/R/docs/index.html +++ b/R/docs/index.html @@ -34,7 +34,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/news/index.html b/R/docs/news/index.html index 372017fa..feb5d59f 100644 --- a/R/docs/news/index.html +++ b/R/docs/news/index.html @@ -60,7 +60,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/HDIofMCMC.html b/R/docs/reference/HDIofMCMC.html index f74c1071..f4f9a3ec 100644 --- a/R/docs/reference/HDIofMCMC.html +++ b/R/docs/reference/HDIofMCMC.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit2arm_delta.html b/R/docs/reference/bandit2arm_delta.html index 149413a4..a1b01a4d 100644 --- a/R/docs/reference/bandit2arm_delta.html +++ b/R/docs/reference/bandit2arm_delta.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit4arm2_kalman_filter.html b/R/docs/reference/bandit4arm2_kalman_filter.html index 3f394817..f85e16a7 100644 --- a/R/docs/reference/bandit4arm2_kalman_filter.html +++ b/R/docs/reference/bandit4arm2_kalman_filter.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit4arm_2par_lapse.html b/R/docs/reference/bandit4arm_2par_lapse.html index d6b730e9..2af77a80 100644 --- a/R/docs/reference/bandit4arm_2par_lapse.html +++ b/R/docs/reference/bandit4arm_2par_lapse.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit4arm_4par.html b/R/docs/reference/bandit4arm_4par.html index 8284c8a5..be3717d1 100644 --- a/R/docs/reference/bandit4arm_4par.html +++ b/R/docs/reference/bandit4arm_4par.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit4arm_lapse.html b/R/docs/reference/bandit4arm_lapse.html index fb52bd32..4fce923e 100644 --- a/R/docs/reference/bandit4arm_lapse.html +++ b/R/docs/reference/bandit4arm_lapse.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit4arm_lapse_decay.html b/R/docs/reference/bandit4arm_lapse_decay.html index be2afec1..c4d61b73 100644 --- a/R/docs/reference/bandit4arm_lapse_decay.html +++ b/R/docs/reference/bandit4arm_lapse_decay.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bandit4arm_singleA_lapse.html b/R/docs/reference/bandit4arm_singleA_lapse.html index aa92fa48..ae2fa35d 100644 --- a/R/docs/reference/bandit4arm_singleA_lapse.html +++ b/R/docs/reference/bandit4arm_singleA_lapse.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/bart_par4.html b/R/docs/reference/bart_par4.html index 399edfeb..1babdb6a 100644 --- a/R/docs/reference/bart_par4.html +++ b/R/docs/reference/bart_par4.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/choiceRT_ddm.html b/R/docs/reference/choiceRT_ddm.html index 43e2383e..760bbbb7 100644 --- a/R/docs/reference/choiceRT_ddm.html +++ b/R/docs/reference/choiceRT_ddm.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/choiceRT_ddm_single.html b/R/docs/reference/choiceRT_ddm_single.html index c26bb481..d78001c8 100644 --- a/R/docs/reference/choiceRT_ddm_single.html +++ b/R/docs/reference/choiceRT_ddm_single.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/choiceRT_lba.html b/R/docs/reference/choiceRT_lba.html index f1a6b2d1..05802d60 100644 --- a/R/docs/reference/choiceRT_lba.html +++ b/R/docs/reference/choiceRT_lba.html @@ -69,7 +69,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/choiceRT_lba_single.html b/R/docs/reference/choiceRT_lba_single.html index f4bb4b10..bfa95fbb 100644 --- a/R/docs/reference/choiceRT_lba_single.html +++ b/R/docs/reference/choiceRT_lba_single.html @@ -69,7 +69,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/cra_exp.html b/R/docs/reference/cra_exp.html index 7ce45d56..c8293eae 100644 --- a/R/docs/reference/cra_exp.html +++ b/R/docs/reference/cra_exp.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/cra_linear.html b/R/docs/reference/cra_linear.html index c491ea34..1fc9bf13 100644 --- a/R/docs/reference/cra_linear.html +++ b/R/docs/reference/cra_linear.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/dbdm_prob_weight.html b/R/docs/reference/dbdm_prob_weight.html index 9666bf72..db16b04a 100644 --- a/R/docs/reference/dbdm_prob_weight.html +++ b/R/docs/reference/dbdm_prob_weight.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/dd_cs.html b/R/docs/reference/dd_cs.html index cf8e80fd..3c87802a 100644 --- a/R/docs/reference/dd_cs.html +++ b/R/docs/reference/dd_cs.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/dd_cs_single.html b/R/docs/reference/dd_cs_single.html index 285dec0b..518bdc76 100644 --- a/R/docs/reference/dd_cs_single.html +++ b/R/docs/reference/dd_cs_single.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/dd_exp.html b/R/docs/reference/dd_exp.html index 3d357bee..ea56ddb0 100644 --- a/R/docs/reference/dd_exp.html +++ b/R/docs/reference/dd_exp.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/dd_hyperbolic.html b/R/docs/reference/dd_hyperbolic.html index 52627905..7e98ab21 100644 --- a/R/docs/reference/dd_hyperbolic.html +++ b/R/docs/reference/dd_hyperbolic.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/dd_hyperbolic_single.html b/R/docs/reference/dd_hyperbolic_single.html index 8bb9db43..6b365334 100644 --- a/R/docs/reference/dd_hyperbolic_single.html +++ b/R/docs/reference/dd_hyperbolic_single.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/estimate_mode.html b/R/docs/reference/estimate_mode.html index 9c4ac7cd..0a08a4fc 100644 --- a/R/docs/reference/estimate_mode.html +++ b/R/docs/reference/estimate_mode.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/extract_ic.html b/R/docs/reference/extract_ic.html index 1abf870e..32229540 100644 --- a/R/docs/reference/extract_ic.html +++ b/R/docs/reference/extract_ic.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/gng_m1.html b/R/docs/reference/gng_m1.html index e6e0264e..a8b823cc 100644 --- a/R/docs/reference/gng_m1.html +++ b/R/docs/reference/gng_m1.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/gng_m2.html b/R/docs/reference/gng_m2.html index 1066b42b..acd414ab 100644 --- a/R/docs/reference/gng_m2.html +++ b/R/docs/reference/gng_m2.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/gng_m3.html b/R/docs/reference/gng_m3.html index a269287b..15f42206 100644 --- a/R/docs/reference/gng_m3.html +++ b/R/docs/reference/gng_m3.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/gng_m4.html b/R/docs/reference/gng_m4.html index 3ef5da22..4d7a8a61 100644 --- a/R/docs/reference/gng_m4.html +++ b/R/docs/reference/gng_m4.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/hBayesDM-package.html b/R/docs/reference/hBayesDM-package.html index 722fabb9..4560a2d8 100644 --- a/R/docs/reference/hBayesDM-package.html +++ b/R/docs/reference/hBayesDM-package.html @@ -110,7 +110,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/hBayesDM_model.html b/R/docs/reference/hBayesDM_model.html index c8b83478..3d17ca81 100644 --- a/R/docs/reference/hBayesDM_model.html +++ b/R/docs/reference/hBayesDM_model.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/igt_orl.html b/R/docs/reference/igt_orl.html index bf19b132..b4f4f365 100644 --- a/R/docs/reference/igt_orl.html +++ b/R/docs/reference/igt_orl.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/igt_pvl_decay.html b/R/docs/reference/igt_pvl_decay.html index a0ea74c6..12c9a964 100644 --- a/R/docs/reference/igt_pvl_decay.html +++ b/R/docs/reference/igt_pvl_decay.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/igt_pvl_delta.html b/R/docs/reference/igt_pvl_delta.html index 3f674958..21942462 100644 --- a/R/docs/reference/igt_pvl_delta.html +++ b/R/docs/reference/igt_pvl_delta.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/igt_vpp.html b/R/docs/reference/igt_vpp.html index b576e9c3..9a538fca 100644 --- a/R/docs/reference/igt_vpp.html +++ b/R/docs/reference/igt_vpp.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/index.html b/R/docs/reference/index.html index 4a4ac4df..181147b8 100644 --- a/R/docs/reference/index.html +++ b/R/docs/reference/index.html @@ -60,7 +60,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/multiplot.html b/R/docs/reference/multiplot.html index f579d563..5e4b9563 100644 --- a/R/docs/reference/multiplot.html +++ b/R/docs/reference/multiplot.html @@ -64,7 +64,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/peer_ocu.html b/R/docs/reference/peer_ocu.html index a2aa70fc..7b559757 100644 --- a/R/docs/reference/peer_ocu.html +++ b/R/docs/reference/peer_ocu.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/plot.hBayesDM.html b/R/docs/reference/plot.hBayesDM.html index 10e25214..68d4705a 100644 --- a/R/docs/reference/plot.hBayesDM.html +++ b/R/docs/reference/plot.hBayesDM.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/plotDist.html b/R/docs/reference/plotDist.html index 3683cb9e..34a3318a 100644 --- a/R/docs/reference/plotDist.html +++ b/R/docs/reference/plotDist.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/plotHDI.html b/R/docs/reference/plotHDI.html index b320a415..aa397b75 100644 --- a/R/docs/reference/plotHDI.html +++ b/R/docs/reference/plotHDI.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/plotInd.html b/R/docs/reference/plotInd.html index af1239a0..99c5918c 100644 --- a/R/docs/reference/plotInd.html +++ b/R/docs/reference/plotInd.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/printFit.html b/R/docs/reference/printFit.html index 55f858f7..08c2bfd5 100644 --- a/R/docs/reference/printFit.html +++ b/R/docs/reference/printFit.html @@ -63,7 +63,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_ewa.html b/R/docs/reference/prl_ewa.html index c69228aa..32fc41c6 100644 --- a/R/docs/reference/prl_ewa.html +++ b/R/docs/reference/prl_ewa.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_fictitious.html b/R/docs/reference/prl_fictitious.html index 5b6b966e..7f56cbab 100644 --- a/R/docs/reference/prl_fictitious.html +++ b/R/docs/reference/prl_fictitious.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_fictitious_multipleB.html b/R/docs/reference/prl_fictitious_multipleB.html index 9c66060a..82f53316 100644 --- a/R/docs/reference/prl_fictitious_multipleB.html +++ b/R/docs/reference/prl_fictitious_multipleB.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_fictitious_rp.html b/R/docs/reference/prl_fictitious_rp.html index 05ac0746..87733062 100644 --- a/R/docs/reference/prl_fictitious_rp.html +++ b/R/docs/reference/prl_fictitious_rp.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_fictitious_rp_woa.html b/R/docs/reference/prl_fictitious_rp_woa.html index 9d60f809..3f3ec12a 100644 --- a/R/docs/reference/prl_fictitious_rp_woa.html +++ b/R/docs/reference/prl_fictitious_rp_woa.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_fictitious_woa.html b/R/docs/reference/prl_fictitious_woa.html index 0a99b75d..05d0119b 100644 --- a/R/docs/reference/prl_fictitious_woa.html +++ b/R/docs/reference/prl_fictitious_woa.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_rp.html b/R/docs/reference/prl_rp.html index f17779dc..6dcf65af 100644 --- a/R/docs/reference/prl_rp.html +++ b/R/docs/reference/prl_rp.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/prl_rp_multipleB.html b/R/docs/reference/prl_rp_multipleB.html index 51a48789..769ff020 100644 --- a/R/docs/reference/prl_rp_multipleB.html +++ b/R/docs/reference/prl_rp_multipleB.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/pst_gainloss_Q.html b/R/docs/reference/pst_gainloss_Q.html index 909aad90..dd7e057c 100644 --- a/R/docs/reference/pst_gainloss_Q.html +++ b/R/docs/reference/pst_gainloss_Q.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ra_noLA.html b/R/docs/reference/ra_noLA.html index bd24774a..c9f08f8b 100644 --- a/R/docs/reference/ra_noLA.html +++ b/R/docs/reference/ra_noLA.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ra_noRA.html b/R/docs/reference/ra_noRA.html index fe13541f..277d55eb 100644 --- a/R/docs/reference/ra_noRA.html +++ b/R/docs/reference/ra_noRA.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ra_prospect.html b/R/docs/reference/ra_prospect.html index c6c9427c..7c7fa27a 100644 --- a/R/docs/reference/ra_prospect.html +++ b/R/docs/reference/ra_prospect.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/rdt_happiness.html b/R/docs/reference/rdt_happiness.html index 0be764b8..03105a5a 100644 --- a/R/docs/reference/rdt_happiness.html +++ b/R/docs/reference/rdt_happiness.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/rhat.html b/R/docs/reference/rhat.html index 5e703778..8d44cc95 100644 --- a/R/docs/reference/rhat.html +++ b/R/docs/reference/rhat.html @@ -65,7 +65,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ts_par4.html b/R/docs/reference/ts_par4.html index fabfe5b7..a322c627 100644 --- a/R/docs/reference/ts_par4.html +++ b/R/docs/reference/ts_par4.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ts_par6.html b/R/docs/reference/ts_par6.html index dbe9e3a3..640fd994 100644 --- a/R/docs/reference/ts_par6.html +++ b/R/docs/reference/ts_par6.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ts_par7.html b/R/docs/reference/ts_par7.html index 19e8c5fc..4ffc3b28 100644 --- a/R/docs/reference/ts_par7.html +++ b/R/docs/reference/ts_par7.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ug_bayes.html b/R/docs/reference/ug_bayes.html index 43aaa99a..2ce7c902 100644 --- a/R/docs/reference/ug_bayes.html +++ b/R/docs/reference/ug_bayes.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/ug_delta.html b/R/docs/reference/ug_delta.html index 5d7dbd26..80c69926 100644 --- a/R/docs/reference/ug_delta.html +++ b/R/docs/reference/ug_delta.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 diff --git a/R/docs/reference/wcs_sql.html b/R/docs/reference/wcs_sql.html index 01e902f1..dc613dc5 100644 --- a/R/docs/reference/wcs_sql.html +++ b/R/docs/reference/wcs_sql.html @@ -68,7 +68,7 @@ hBayesDM - 0.7.2.9000 + 1.0.0 From d3438e2f79d425bc4af8187e3b4460be3297760a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 17:45:22 +0900 Subject: [PATCH 127/163] Add model, a Stan file, example data for CGT --- commons/extdata/cgt_exampleData.txt | 7750 +++++++++++++++++++++++++++ commons/models/cgt_cm.yml | 105 + commons/stan_files/cgt_cm.stan | 170 + 3 files changed, 8025 insertions(+) create mode 100644 commons/extdata/cgt_exampleData.txt create mode 100644 commons/models/cgt_cm.yml create mode 100644 commons/stan_files/cgt_cm.stan diff --git a/commons/extdata/cgt_exampleData.txt b/commons/extdata/cgt_exampleData.txt new file mode 100644 index 00000000..53b4b05d --- /dev/null +++ b/commons/extdata/cgt_exampleData.txt @@ -0,0 +1,7750 @@ +"groupID" "subjID" "stage" "assessment.stage" "includes.gamble" "gamble.type" "block" "completed" "block.initial.points" "block.final.points" "trial.initial.points" "trial.final.points" "choice.latency" "n.left.colour.boxes" "token.box" "left.colour.chosen" "response.latency" "stake.index" "percentage.staked" "points.staked" "left.won" "subject.won" +"24100" 4 102 2 0 1 1 1 1 100 140 100 150 16114 8 3 1 3602 3 50 50 1 1 +"24101" 4 102 2 0 1 1 1 1 100 140 150 225 7371 3 7 0 1634 3 50 75 0 1 +"24102" 4 102 2 0 1 1 1 1 100 140 225 281 2698 8 2 1 1575 2 25 56 1 1 +"24103" 4 102 2 0 1 1 1 1 100 140 281 140 1585 2 1 0 892 3 50 141 1 0 +"24104" 4 102 3 1 1 1 1 1 100 485 100 195 2346 9 7 1 1113 5 95 95 1 1 +"24105" 4 102 3 1 1 1 1 1 100 485 195 244 2225 4 8 0 1348 2 25 49 0 1 +"24106" 4 102 3 1 1 1 1 1 100 485 244 305 1817 8 2 1 1793 2 25 61 1 1 +"24107" 4 102 3 1 1 1 1 1 100 485 305 229 1248 2 1 0 754 2 25 76 1 0 +"24108" 4 102 3 1 1 1 1 1 100 485 229 286 1571 7 6 1 1246 2 25 57 1 1 +"24109" 4 102 3 1 1 1 1 1 100 485 286 272 2672 5 3 0 654 1 5 14 1 0 +"24110" 4 102 3 1 1 1 1 1 100 485 272 340 1284 3 6 0 460 2 25 68 0 1 +"24111" 4 102 3 1 1 1 1 1 100 485 340 323 1843 6 9 1 689 1 5 17 0 0 +"24112" 4 102 3 1 1 1 1 1 100 485 323 485 1125 1 4 0 654 3 50 162 0 1 +"24113" 4 102 3 1 1 1 2 1 100 535 100 150 3689 2 9 0 735 3 50 50 0 1 +"24114" 4 102 3 1 1 1 2 1 100 535 150 158 2506 4 10 0 824 1 5 8 0 1 +"24115" 4 102 3 1 1 1 2 1 100 535 158 118 1414 3 1 0 481 2 25 40 1 0 +"24116" 4 102 3 1 1 1 2 1 100 535 118 207 1271 8 6 1 616 4 75 89 1 1 +"24117" 4 102 3 1 1 1 2 1 100 535 207 217 2024 5 7 0 765 1 5 10 0 1 +"24118" 4 102 3 1 1 1 2 1 100 535 217 163 1214 6 8 1 594 2 25 54 0 0 +"24119" 4 102 3 1 1 1 2 1 100 535 163 285 1363 7 5 1 729 4 75 122 1 1 +"24120" 4 102 3 1 1 1 2 1 100 535 285 428 1315 1 10 0 555 3 50 143 0 1 +"24121" 4 102 3 1 1 1 2 1 100 535 428 535 1589 9 3 1 631 2 25 107 1 1 +"24122" 4 102 3 1 1 1 3 1 100 499 100 150 1871 3 5 0 1060 3 50 50 0 1 +"24123" 4 102 3 1 1 1 3 1 100 499 150 75 1374 8 9 1 739 3 50 75 0 0 +"24124" 4 102 3 1 1 1 3 1 100 499 75 131 1628 2 4 0 837 4 75 56 0 1 +"24125" 4 102 3 1 1 1 3 1 100 499 131 164 1755 6 3 1 1996 2 25 33 1 1 +"24126" 4 102 3 1 1 1 3 1 100 499 164 205 2266 7 6 1 652 2 25 41 1 1 +"24127" 4 102 3 1 1 1 3 1 100 499 205 154 1371 4 2 0 639 2 25 51 1 0 +"24128" 4 102 3 1 1 1 3 1 100 499 154 300 1357 1 8 0 867 5 95 146 0 1 +"24129" 4 102 3 1 1 1 3 1 100 499 300 285 3127 5 7 1 2096 1 5 15 0 0 +"24130" 4 102 3 1 1 1 3 1 100 499 285 499 1314 9 5 1 881 4 75 214 1 1 +"24131" 4 102 3 1 1 1 4 1 100 588 100 150 1821 8 7 1 852 3 50 50 1 1 +"24132" 4 102 3 1 1 1 4 1 100 588 150 188 1473 3 10 0 785 2 25 38 0 1 +"24133" 4 102 3 1 1 1 4 1 100 588 188 141 1889 7 9 1 1108 2 25 47 0 0 +"24134" 4 102 3 1 1 1 4 1 100 588 141 275 1412 9 1 1 1063 5 95 134 1 1 +"24135" 4 102 3 1 1 1 4 1 100 588 275 413 1562 2 3 0 1094 3 50 138 0 1 +"24136" 4 102 3 1 1 1 4 1 100 588 413 620 1718 1 8 0 1427 3 50 207 0 1 +"24137" 4 102 3 1 1 1 4 1 100 588 620 589 1844 5 4 0 841 1 5 31 1 0 +"24138" 4 102 3 1 1 1 4 1 100 588 589 560 1526 4 2 0 843 1 5 29 1 0 +"24139" 4 102 3 1 1 1 4 1 100 588 560 588 1520 6 3 1 1141 1 5 28 1 1 +"24140" 4 102 4 0 1 0 1 1 100 26 100 175 11028 2 7 0 1452 2 75 75 0 1 +"24141" 4 102 4 0 1 0 1 1 100 26 175 341 1732 3 7 0 1944 1 95 166 0 1 +"24142" 4 102 4 0 1 0 1 1 100 26 341 512 1425 2 8 0 827 3 50 171 0 1 +"24143" 4 102 4 0 1 0 1 1 100 26 512 26 1544 8 9 1 780 1 95 486 0 0 +"24144" 4 102 5 1 1 0 1 1 100 1429 100 195 1486 1 3 0 1197 1 95 95 0 1 +"24145" 4 102 5 1 1 0 1 1 100 1429 195 244 1729 6 5 1 581 4 25 49 1 1 +"24146" 4 102 5 1 1 0 1 1 100 1429 244 476 1302 2 8 0 894 1 95 232 0 1 +"24147" 4 102 5 1 1 0 1 1 100 1429 476 238 2128 8 9 1 729 3 50 238 0 0 +"24148" 4 102 5 1 1 0 1 1 100 1429 238 464 1283 3 4 0 732 1 95 226 0 1 +"24149" 4 102 5 1 1 0 1 1 100 1429 464 441 1662 5 7 1 942 5 5 23 0 0 +"24150" 4 102 5 1 1 0 1 1 100 1429 441 772 1742 7 4 1 776 2 75 331 1 1 +"24151" 4 102 5 1 1 0 1 1 100 1429 772 733 1544 4 1 0 949 5 5 39 1 0 +"24152" 4 102 5 1 1 0 1 1 100 1429 733 1429 1517 9 6 1 819 1 95 696 1 1 +"24153" 4 102 5 1 1 0 2 1 100 1654 100 195 1549 8 1 1 914 1 95 95 1 1 +"24154" 4 102 5 1 1 0 2 1 100 1654 195 244 1667 6 2 1 466 4 25 49 1 1 +"24155" 4 102 5 1 1 0 2 1 100 1654 244 122 2442 7 9 1 900 3 50 122 0 0 +"24156" 4 102 5 1 1 0 2 1 100 1654 122 238 1214 2 10 0 882 1 95 116 0 1 +"24157" 4 102 5 1 1 0 2 1 100 1654 238 298 2709 5 3 1 867 4 25 60 1 1 +"24158" 4 102 5 1 1 0 2 1 100 1654 298 223 1631 4 2 0 880 4 25 75 1 0 +"24159" 4 102 5 1 1 0 2 1 100 1654 223 435 1371 3 5 0 1359 1 95 212 0 1 +"24160" 4 102 5 1 1 0 2 1 100 1654 435 848 1356 9 4 1 801 1 95 413 1 1 +"24161" 4 102 5 1 1 0 2 1 100 1654 848 1654 1168 1 7 0 1006 1 95 806 0 1 +"24162" 4 102 5 1 1 0 3 1 100 31 100 195 1908 7 5 1 1295 1 95 95 1 1 +"24163" 4 102 5 1 1 0 3 1 100 31 195 10 1310 2 1 0 693 1 95 185 1 0 +"24164" 4 102 5 1 1 0 3 1 100 31 10 20 1337 8 6 1 735 1 95 10 1 1 +"24165" 4 102 5 1 1 0 3 1 100 31 20 39 1346 4 7 0 919 1 95 19 0 1 +"24166" 4 102 5 1 1 0 3 1 100 31 39 76 1105 3 10 0 666 1 95 37 0 1 +"24167" 4 102 5 1 1 0 3 1 100 31 76 4 1312 6 8 1 655 1 95 72 0 0 +"24168" 4 102 5 1 1 0 3 1 100 31 4 8 1261 9 2 1 958 1 95 4 1 1 +"24169" 4 102 5 1 1 0 3 1 100 31 8 16 1374 5 3 1 466 1 95 8 1 1 +"24170" 4 102 5 1 1 0 3 1 100 31 16 31 1137 1 10 0 576 1 95 15 0 1 +"24171" 4 102 5 1 1 0 4 1 100 749 100 195 1788 2 3 0 669 1 95 95 0 1 +"24172" 4 102 5 1 1 0 4 1 100 749 195 341 1114 7 4 1 787 2 75 146 1 1 +"24173" 4 102 5 1 1 0 4 1 100 749 341 170 1211 3 1 0 1672 3 50 171 1 0 +"24174" 4 102 5 1 1 0 4 1 100 749 170 332 1189 1 9 0 530 1 95 162 0 1 +"24175" 4 102 5 1 1 0 4 1 100 749 332 647 1022 8 7 1 691 1 95 315 1 1 +"24176" 4 102 5 1 1 0 4 1 100 749 647 1262 1135 9 2 1 647 1 95 615 1 1 +"24177" 4 102 5 1 1 0 4 1 100 749 1262 1199 1525 5 6 1 747 5 5 63 0 0 +"24178" 4 102 5 1 1 0 4 1 100 749 1199 599 1212 6 8 1 857 3 50 600 0 0 +"24179" 4 102 5 1 1 0 4 1 100 749 599 749 1069 4 7 0 864 4 25 150 0 1 +"24180" 4 110 2 0 1 1 1 1 100 268 100 150 12113 8 3 1 838 3 50 50 1 1 +"24181" 4 110 2 0 1 1 1 1 100 268 150 188 10723 3 7 0 795 2 25 38 0 1 +"24182" 4 110 2 0 1 1 1 1 100 268 188 282 2554 8 2 1 743 3 50 94 1 1 +"24183" 4 110 2 0 1 1 1 1 100 268 282 268 3184 2 1 0 1438 1 5 14 1 0 +"24184" 4 110 3 1 1 1 1 1 100 480 100 150 3670 9 7 1 2124 3 50 50 1 1 +"24185" 4 110 3 1 1 1 1 1 100 480 150 188 3301 4 8 0 2021 2 25 38 0 1 +"24186" 4 110 3 1 1 1 1 1 100 480 188 235 1802 8 2 1 2546 2 25 47 1 1 +"24187" 4 110 3 1 1 1 1 1 100 480 235 223 1350 2 1 0 1028 1 5 12 1 0 +"24188" 4 110 3 1 1 1 1 1 100 480 223 279 1512 7 6 1 558 2 25 56 1 1 +"24189" 4 110 3 1 1 1 1 1 100 480 279 293 2832 5 3 1 535 1 5 14 1 1 +"24190" 4 110 3 1 1 1 1 1 100 480 293 366 1930 3 6 0 364 2 25 73 0 1 +"24191" 4 110 3 1 1 1 1 1 100 480 366 384 2136 6 9 0 536 1 5 18 0 1 +"24192" 4 110 3 1 1 1 1 1 100 480 384 480 1795 1 4 0 364 2 25 96 0 1 +"24193" 4 110 3 1 1 1 2 1 100 383 100 175 1671 2 9 0 637 4 75 75 0 1 +"24194" 4 110 3 1 1 1 2 1 100 383 175 184 1316 4 10 0 523 1 5 9 0 1 +"24195" 4 110 3 1 1 1 2 1 100 383 184 138 1583 3 1 0 730 2 25 46 1 0 +"24196" 4 110 3 1 1 1 2 1 100 383 138 207 1076 8 6 1 391 3 50 69 1 1 +"24197" 4 110 3 1 1 1 2 1 100 383 207 217 2922 5 7 0 484 1 5 10 0 1 +"24198" 4 110 3 1 1 1 2 1 100 383 217 163 1747 6 8 1 389 2 25 54 0 0 +"24199" 4 110 3 1 1 1 2 1 100 383 163 245 1496 7 5 1 391 3 50 82 1 1 +"24200" 4 110 3 1 1 1 2 1 100 383 245 306 1433 1 10 0 1211 2 25 61 0 1 +"24201" 4 110 3 1 1 1 2 1 100 383 306 383 1294 9 3 1 574 2 25 77 1 1 +"24202" 4 110 3 1 1 1 3 1 100 385 100 150 1910 3 5 0 593 3 50 50 0 1 +"24203" 4 110 3 1 1 1 3 1 100 385 150 75 1249 8 9 1 468 3 50 75 0 0 +"24204" 4 110 3 1 1 1 3 1 100 385 75 131 908 2 4 0 622 4 75 56 0 1 +"24205" 4 110 3 1 1 1 3 1 100 385 131 164 1742 6 3 1 568 2 25 33 1 1 +"24206" 4 110 3 1 1 1 3 1 100 385 164 205 1075 7 6 1 1686 2 25 41 1 1 +"24207" 4 110 3 1 1 1 3 1 100 385 205 195 1858 4 2 0 887 1 5 10 1 0 +"24208" 4 110 3 1 1 1 3 1 100 385 195 293 1018 1 8 0 607 3 50 98 0 1 +"24209" 4 110 3 1 1 1 3 1 100 385 293 308 2439 5 7 0 689 1 5 15 0 1 +"24210" 4 110 3 1 1 1 3 1 100 385 308 385 1061 9 5 1 1944 2 25 77 1 1 +"24211" 4 110 3 1 1 1 4 1 100 575 100 175 1386 8 7 1 406 4 75 75 1 1 +"24212" 4 110 3 1 1 1 4 1 100 575 175 219 1336 3 10 0 365 2 25 44 0 1 +"24213" 4 110 3 1 1 1 4 1 100 575 219 164 658 7 9 1 288 2 25 55 0 0 +"24214" 4 110 3 1 1 1 4 1 100 575 164 246 871 9 1 1 1252 3 50 82 1 1 +"24215" 4 110 3 1 1 1 4 1 100 575 246 369 3382 2 3 0 427 3 50 123 0 1 +"24216" 4 110 3 1 1 1 4 1 100 575 369 461 1324 1 8 0 762 2 25 92 0 1 +"24217" 4 110 3 1 1 1 4 1 100 575 461 484 2075 5 4 1 254 1 5 23 1 1 +"24218" 4 110 3 1 1 1 4 1 100 575 484 460 2631 4 2 0 271 1 5 24 1 0 +"24219" 4 110 3 1 1 1 4 1 100 575 460 575 1031 6 3 1 308 2 25 115 1 1 +"24220" 4 110 4 0 1 0 1 1 100 166 100 175 4491 2 7 0 1068 2 75 75 0 1 +"24221" 4 110 4 0 1 0 1 1 100 166 175 341 3062 3 7 0 1338 1 95 166 0 1 +"24222" 4 110 4 0 1 0 1 1 100 166 341 665 3504 2 8 0 879 1 95 324 0 1 +"24223" 4 110 4 0 1 0 1 1 100 166 665 166 1290 8 9 1 1234 2 75 499 0 0 +"24224" 4 110 5 1 1 0 1 1 100 989 100 195 1309 1 3 0 444 1 95 95 0 1 +"24225" 4 110 5 1 1 0 1 1 100 989 195 293 1116 6 5 1 596 3 50 98 1 1 +"24226" 4 110 5 1 1 0 1 1 100 989 293 440 837 2 8 0 3659 3 50 147 0 1 +"24227" 4 110 5 1 1 0 1 1 100 989 440 330 1051 8 9 1 406 4 25 110 0 0 +"24228" 4 110 5 1 1 0 1 1 100 989 330 644 1208 3 4 0 2476 1 95 314 0 1 +"24229" 4 110 5 1 1 0 1 1 100 989 644 676 1796 5 7 0 380 5 5 32 0 1 +"24230" 4 110 5 1 1 0 1 1 100 989 676 1014 1168 7 4 1 606 3 50 338 1 1 +"24231" 4 110 5 1 1 0 1 1 100 989 1014 507 910 4 1 0 464 3 50 507 1 0 +"24232" 4 110 5 1 1 0 1 1 100 989 507 989 1000 9 6 1 333 1 95 482 1 1 +"24233" 4 110 5 1 1 0 2 0 100 1 100 195 1302 8 1 1 366 1 95 95 1 1 +"24234" 4 110 5 1 1 0 2 0 100 1 195 146 914 6 2 0 341 4 25 49 1 0 +"24235" 4 110 5 1 1 0 2 0 100 1 146 7 1610 7 9 1 1042 1 95 139 0 0 +"24236" 4 110 5 1 1 0 2 0 100 1 7 14 910 2 10 0 328 1 95 7 0 1 +"24237" 4 110 5 1 1 0 2 0 100 1 14 1 5318 5 3 0 598 1 95 13 1 0 +"24238" 4 110 5 1 1 0 3 1 100 8 100 195 1722 7 5 1 365 1 95 95 1 1 +"24239" 4 110 5 1 1 0 3 1 100 8 195 10 1121 2 1 0 401 1 95 185 1 0 +"24240" 4 110 5 1 1 0 3 1 100 8 10 20 1035 8 6 1 391 1 95 10 1 1 +"24241" 4 110 5 1 1 0 3 1 100 8 20 39 1129 4 7 0 529 1 95 19 0 1 +"24242" 4 110 5 1 1 0 3 1 100 8 39 76 885 3 10 0 481 1 95 37 0 1 +"24243" 4 110 5 1 1 0 3 1 100 8 76 38 1039 6 8 1 436 3 50 38 0 0 +"24244" 4 110 5 1 1 0 3 1 100 8 38 74 1260 9 2 1 269 1 95 36 1 1 +"24245" 4 110 5 1 1 0 3 1 100 8 74 4 1509 5 3 0 275 1 95 70 1 0 +"24246" 4 110 5 1 1 0 3 1 100 8 4 8 1107 1 10 0 300 1 95 4 0 1 +"24247" 4 110 5 1 1 0 4 1 100 2313 100 195 1153 2 3 0 272 1 95 95 0 1 +"24248" 4 110 5 1 1 0 4 1 100 2313 195 380 968 7 4 1 351 1 95 185 1 1 +"24249" 4 110 5 1 1 0 4 1 100 2313 380 190 924 3 1 0 374 3 50 190 1 0 +"24250" 4 110 5 1 1 0 4 1 100 2313 190 371 681 1 9 0 206 1 95 181 0 1 +"24251" 4 110 5 1 1 0 4 1 100 2313 371 723 905 8 7 1 268 1 95 352 1 1 +"24252" 4 110 5 1 1 0 4 1 100 2313 723 1265 964 9 2 1 2300 2 75 542 1 1 +"24253" 4 110 5 1 1 0 4 1 100 2313 1265 1581 1092 5 6 0 1766 4 25 316 0 1 +"24254" 4 110 5 1 1 0 4 1 100 2313 1581 1186 1415 6 8 1 962 4 25 395 0 0 +"24255" 4 110 5 1 1 0 4 1 100 2313 1186 2313 921 4 7 0 251 1 95 1127 0 1 +"24256" 4 111 2 0 1 1 1 1 100 351 100 150 17156 8 3 1 3106 3 50 50 1 1 +"24257" 4 111 2 0 1 1 1 1 100 351 150 225 4203 3 7 0 1585 3 50 75 0 1 +"24258" 4 111 2 0 1 1 1 1 100 351 225 281 2675 8 2 1 1897 2 25 56 1 1 +"24259" 4 111 2 0 1 1 1 1 100 351 281 351 4468 2 1 1 1157 2 25 70 1 1 +"24260" 4 111 3 1 1 1 1 0 100 1 100 125 8822 9 7 1 1000 2 25 25 1 1 +"24261" 4 111 3 1 1 1 1 0 100 1 125 156 8869 4 8 0 2541 2 25 31 0 1 +"24262" 4 111 3 1 1 1 1 0 100 1 156 78 4537 8 2 0 343 3 50 78 1 0 +"24263" 4 111 3 1 1 1 1 0 100 1 78 39 5764 2 1 0 1128 3 50 39 1 0 +"24264" 4 111 3 1 1 1 1 0 100 1 39 68 4031 7 6 1 1413 4 75 29 1 1 +"24265" 4 111 3 1 1 1 1 0 100 1 68 17 13159 5 3 0 779 4 75 51 1 0 +"24266" 4 111 3 1 1 1 1 0 100 1 17 1 4643 3 6 1 796 5 95 16 0 0 +"24267" 4 111 3 1 1 1 2 1 100 266 100 150 4567 2 9 0 275 3 50 50 0 1 +"24268" 4 111 3 1 1 1 2 1 100 266 150 225 6587 4 10 0 1144 3 50 75 0 1 +"24269" 4 111 3 1 1 1 2 1 100 266 225 338 3134 3 1 1 983 3 50 113 1 1 +"24270" 4 111 3 1 1 1 2 1 100 266 338 253 20437 8 6 0 1422 2 25 85 1 0 +"24271" 4 111 3 1 1 1 2 1 100 266 253 126 2088 5 7 1 947 3 50 127 0 0 +"24272" 4 111 3 1 1 1 2 1 100 266 126 189 5307 6 8 0 300 3 50 63 0 1 +"24273" 4 111 3 1 1 1 2 1 100 266 189 284 6507 7 5 1 979 3 50 95 1 1 +"24274" 4 111 3 1 1 1 2 1 100 266 284 355 9474 1 10 0 2205 2 25 71 0 1 +"24275" 4 111 3 1 1 1 2 1 100 266 355 266 3185 9 3 0 1791 2 25 89 1 0 +"24276" 4 111 3 1 1 1 3 1 100 215 100 50 12524 3 5 1 309 3 50 50 0 0 +"24277" 4 111 3 1 1 1 3 1 100 215 50 75 9436 8 9 0 296 3 50 25 0 1 +"24278" 4 111 3 1 1 1 3 1 100 215 75 113 4360 2 4 0 1166 3 50 38 0 1 +"24279" 4 111 3 1 1 1 3 1 100 215 113 170 2811 6 3 1 724 3 50 57 1 1 +"24280" 4 111 3 1 1 1 3 1 100 215 170 255 5406 7 6 1 1011 3 50 85 1 1 +"24281" 4 111 3 1 1 1 3 1 100 215 255 383 8424 4 2 1 756 3 50 128 1 1 +"24282" 4 111 3 1 1 1 3 1 100 215 383 287 13461 1 8 1 1219 2 25 96 0 0 +"24283" 4 111 3 1 1 1 3 1 100 215 287 431 5326 5 7 0 967 3 50 144 0 1 +"24284" 4 111 3 1 1 1 3 1 100 215 431 215 6574 9 5 0 1403 3 50 216 1 0 +"24285" 4 111 3 1 1 1 4 1 100 39 100 75 2903 8 7 0 1994 2 25 25 1 0 +"24286" 4 111 3 1 1 1 4 1 100 39 75 37 3921 3 10 1 356 3 50 38 0 0 +"24287" 4 111 3 1 1 1 4 1 100 39 37 18 6136 7 9 1 1571 3 50 19 0 0 +"24288" 4 111 3 1 1 1 4 1 100 39 18 9 9245 9 1 0 411 3 50 9 1 0 +"24289" 4 111 3 1 1 1 4 1 100 39 9 18 4917 2 3 0 1697 5 95 9 0 1 +"24290" 4 111 3 1 1 1 4 1 100 39 18 35 6213 1 8 0 0 5 95 17 0 1 +"24291" 4 111 3 1 1 1 4 1 100 39 35 17 1995 5 4 0 1242 3 50 18 1 0 +"24292" 4 111 3 1 1 1 4 1 100 39 17 26 5740 4 2 1 881 3 50 9 1 1 +"24293" 4 111 3 1 1 1 4 1 100 39 26 39 5700 6 3 1 1251 3 50 13 1 1 +"24294" 4 111 4 0 1 0 1 1 100 1 100 175 6444 2 7 0 807 2 75 75 0 1 +"24295" 4 111 4 0 1 0 1 1 100 1 175 341 7150 3 7 0 623 1 95 166 0 1 +"24296" 4 111 4 0 1 0 1 1 100 1 341 17 5598 2 8 1 545 1 95 324 0 0 +"24297" 4 111 4 0 1 0 1 1 100 1 17 1 10381 8 9 1 684 1 95 16 0 0 +"24298" 4 111 5 1 1 0 1 1 100 139 100 175 2361 1 3 0 694 2 75 75 0 1 +"24299" 4 111 5 1 1 0 1 1 100 139 175 263 2330 6 5 1 1113 3 50 88 1 1 +"24300" 4 111 5 1 1 0 1 1 100 139 263 131 7617 2 8 1 724 3 50 132 0 0 +"24301" 4 111 5 1 1 0 1 1 100 139 131 65 3775 8 9 1 1201 3 50 66 0 0 +"24302" 4 111 5 1 1 0 1 1 100 139 65 127 9253 3 4 0 586 1 95 62 0 1 +"24303" 4 111 5 1 1 0 1 1 100 139 127 248 1572 5 7 0 590 1 95 121 0 1 +"24304" 4 111 5 1 1 0 1 1 100 139 248 186 5383 7 4 0 949 4 25 62 1 0 +"24305" 4 111 5 1 1 0 1 1 100 139 186 279 2669 4 1 1 492 3 50 93 1 1 +"24306" 4 111 5 1 1 0 1 1 100 139 279 139 5569 9 6 0 966 3 50 140 1 0 +"24307" 4 111 5 1 1 0 2 0 100 0 100 50 6810 8 1 0 708 3 50 50 1 0 +"24308" 4 111 5 1 1 0 2 0 100 0 50 98 1968 6 2 1 760 1 95 48 1 1 +"24309" 4 111 5 1 1 0 2 0 100 0 98 5 2137 7 9 1 777 1 95 93 0 0 +"24310" 4 111 5 1 1 0 2 0 100 0 5 10 3001 2 10 0 663 1 95 5 0 1 +"24311" 4 111 5 1 1 0 2 0 100 0 10 0 2757 5 3 0 648 1 95 10 1 0 +"24312" 4 111 5 1 1 0 3 0 100 1 100 175 3103 7 5 1 1353 2 75 75 1 1 +"24313" 4 111 5 1 1 0 3 0 100 1 175 87 1838 2 1 0 804 3 50 88 1 0 +"24314" 4 111 5 1 1 0 3 0 100 1 87 4 5524 8 6 0 570 1 95 83 1 0 +"24315" 4 111 5 1 1 0 3 0 100 1 4 8 4836 4 7 0 499 1 95 4 0 1 +"24316" 4 111 5 1 1 0 3 0 100 1 8 16 1281 3 10 0 530 1 95 8 0 1 +"24317" 4 111 5 1 1 0 3 0 100 1 16 1 2230 6 8 1 569 1 95 15 0 0 +"24318" 4 111 5 1 1 0 4 1 100 782 100 175 3179 2 3 0 2282 2 75 75 0 1 +"24319" 4 111 5 1 1 0 4 1 100 782 175 263 2322 7 4 1 1058 3 50 88 1 1 +"24320" 4 111 5 1 1 0 4 1 100 782 263 329 5100 3 1 1 823 4 25 66 1 1 +"24321" 4 111 5 1 1 0 4 1 100 782 329 494 2333 1 9 0 756 3 50 165 0 1 +"24322" 4 111 5 1 1 0 4 1 100 782 494 618 2389 8 7 1 986 4 25 124 1 1 +"24323" 4 111 5 1 1 0 4 1 100 782 618 463 10736 9 2 0 1192 4 25 155 1 0 +"24324" 4 111 5 1 1 0 4 1 100 782 463 695 1932 5 6 0 997 3 50 232 0 1 +"24325" 4 111 5 1 1 0 4 1 100 782 695 521 2331 6 8 1 1090 4 25 174 0 0 +"24326" 4 111 5 1 1 0 4 1 100 782 521 782 10133 4 7 0 1608 3 50 261 0 1 +"24327" 4 134 2 0 1 1 1 1 100 107 100 150 5401 8 3 1 933 3 50 50 1 1 +"24328" 4 134 2 0 1 1 1 1 100 107 150 225 7726 3 7 0 1356 3 50 75 0 1 +"24329" 4 134 2 0 1 1 1 1 100 107 225 214 2257 8 2 0 604 1 5 11 1 0 +"24330" 4 134 2 0 1 1 1 1 100 107 214 107 2260 2 1 0 2302 3 50 107 1 0 +"24331" 4 134 3 1 1 1 1 1 100 422 100 150 9045 9 7 1 1636 3 50 50 1 1 +"24332" 4 134 3 1 1 1 1 1 100 422 150 225 915 4 8 0 517 3 50 75 0 1 +"24333" 4 134 3 1 1 1 1 1 100 422 225 338 2154 8 2 1 586 3 50 113 1 1 +"24334" 4 134 3 1 1 1 1 1 100 422 338 253 2787 2 1 0 812 2 25 85 1 0 +"24335" 4 134 3 1 1 1 1 1 100 422 253 240 1906 7 6 0 341 1 5 13 1 0 +"24336" 4 134 3 1 1 1 1 1 100 422 240 300 2458 5 3 1 1603 2 25 60 1 1 +"24337" 4 134 3 1 1 1 1 1 100 422 300 375 1876 3 6 0 904 2 25 75 0 1 +"24338" 4 134 3 1 1 1 1 1 100 422 375 281 1954 6 9 1 787 2 25 94 0 0 +"24339" 4 134 3 1 1 1 1 1 100 422 281 422 1699 1 4 0 2536 3 50 141 0 1 +"24340" 4 134 3 1 1 1 2 1 100 709 100 195 5216 2 9 0 613 5 95 95 0 1 +"24341" 4 134 3 1 1 1 2 1 100 709 195 205 1677 4 10 0 866 1 5 10 0 1 +"24342" 4 134 3 1 1 1 2 1 100 709 205 102 1550 3 1 0 1459 3 50 103 1 0 +"24343" 4 134 3 1 1 1 2 1 100 709 102 199 1283 8 6 1 0 5 95 97 1 1 +"24344" 4 134 3 1 1 1 2 1 100 709 199 299 3716 5 7 0 1201 3 50 100 0 1 +"24345" 4 134 3 1 1 1 2 1 100 709 299 149 2229 6 8 1 2081 3 50 150 0 0 +"24346" 4 134 3 1 1 1 2 1 100 709 149 291 1267 7 5 1 0 5 95 142 1 1 +"24347" 4 134 3 1 1 1 2 1 100 709 291 567 1988 1 10 0 0 5 95 276 0 1 +"24348" 4 134 3 1 1 1 2 1 100 709 567 709 2300 9 3 1 4604 2 25 142 1 1 +"24349" 4 134 3 1 1 1 3 0 100 0 100 175 1899 3 5 0 903 4 75 75 0 1 +"24350" 4 134 3 1 1 1 3 0 100 0 175 9 1410 8 9 1 0 5 95 166 0 0 +"24351" 4 134 3 1 1 1 3 0 100 0 9 18 2260 2 4 0 0 5 95 9 0 1 +"24352" 4 134 3 1 1 1 3 0 100 0 18 35 2846 6 3 1 0 5 95 17 1 1 +"24353" 4 134 3 1 1 1 3 0 100 0 35 68 2948 7 6 1 0 5 95 33 1 1 +"24354" 4 134 3 1 1 1 3 0 100 0 68 3 2677 4 2 0 0 5 95 65 1 0 +"24355" 4 134 3 1 1 1 3 0 100 0 3 6 2291 1 8 0 0 5 95 3 0 1 +"24356" 4 134 3 1 1 1 3 0 100 0 6 0 2332 5 7 1 0 5 95 6 0 0 +"24357" 4 134 3 1 1 1 4 1 100 284 100 195 2809 8 7 1 0 5 95 95 1 1 +"24358" 4 134 3 1 1 1 4 1 100 284 195 293 1809 3 10 0 1817 3 50 98 0 1 +"24359" 4 134 3 1 1 1 4 1 100 284 293 15 1296 7 9 1 1888 5 95 278 0 0 +"24360" 4 134 3 1 1 1 4 1 100 284 15 29 1700 9 1 1 0 5 95 14 1 1 +"24361" 4 134 3 1 1 1 4 1 100 284 29 57 1945 2 3 0 0 5 95 28 0 1 +"24362" 4 134 3 1 1 1 4 1 100 284 57 111 1682 1 8 0 0 5 95 54 0 1 +"24363" 4 134 3 1 1 1 4 1 100 284 111 216 2679 5 4 1 0 5 95 105 1 1 +"24364" 4 134 3 1 1 1 4 1 100 284 216 162 2917 4 2 0 3024 2 25 54 1 0 +"24365" 4 134 3 1 1 1 4 1 100 284 162 284 3925 6 3 1 960 4 75 122 1 1 +"24366" 4 134 4 0 1 0 1 1 100 29 100 150 4987 2 7 0 628 3 50 50 0 1 +"24367" 4 134 4 0 1 0 1 1 100 29 150 293 4291 3 7 0 804 1 95 143 0 1 +"24368" 4 134 4 0 1 0 1 1 100 29 293 571 1683 2 8 0 433 1 95 278 0 1 +"24369" 4 134 4 0 1 0 1 1 100 29 571 29 1482 8 9 1 409 1 95 542 0 0 +"24370" 4 134 5 1 1 0 1 1 100 18 100 195 2061 1 3 0 305 1 95 95 0 1 +"24371" 4 134 5 1 1 0 1 1 100 18 195 244 2719 6 5 1 524 4 25 49 1 1 +"24372" 4 134 5 1 1 0 1 1 100 18 244 476 1032 2 8 0 300 1 95 232 0 1 +"24373" 4 134 5 1 1 0 1 1 100 18 476 24 1595 8 9 1 405 1 95 452 0 0 +"24374" 4 134 5 1 1 0 1 1 100 18 24 47 1982 3 4 0 2654 1 95 23 0 1 +"24375" 4 134 5 1 1 0 1 1 100 18 47 92 1092 5 7 0 326 1 95 45 0 1 +"24376" 4 134 5 1 1 0 1 1 100 18 92 179 1860 7 4 1 321 1 95 87 1 1 +"24377" 4 134 5 1 1 0 1 1 100 18 179 9 1550 4 1 0 490 1 95 170 1 0 +"24378" 4 134 5 1 1 0 1 1 100 18 9 18 1528 9 6 1 461 1 95 9 1 1 +"24379" 4 134 5 1 1 0 2 0 100 0 100 195 3008 8 1 1 406 1 95 95 1 1 +"24380" 4 134 5 1 1 0 2 0 100 0 195 380 1416 6 2 1 458 1 95 185 1 1 +"24381" 4 134 5 1 1 0 2 0 100 0 380 19 1394 7 9 1 469 1 95 361 0 0 +"24382" 4 134 5 1 1 0 2 0 100 0 19 37 1323 2 10 0 562 1 95 18 0 1 +"24383" 4 134 5 1 1 0 2 0 100 0 37 2 1452 5 3 0 381 1 95 35 1 0 +"24384" 4 134 5 1 1 0 2 0 100 0 2 0 1464 4 2 0 384 1 95 2 1 0 +"24385" 4 134 5 1 1 0 3 0 100 0 100 195 3370 7 5 1 631 1 95 95 1 1 +"24386" 4 134 5 1 1 0 3 0 100 0 195 10 1326 2 1 0 380 1 95 185 1 0 +"24387" 4 134 5 1 1 0 3 0 100 0 10 20 1336 8 6 1 429 1 95 10 1 1 +"24388" 4 134 5 1 1 0 3 0 100 0 20 39 2679 4 7 0 556 1 95 19 0 1 +"24389" 4 134 5 1 1 0 3 0 100 0 39 76 1279 3 10 0 543 1 95 37 0 1 +"24390" 4 134 5 1 1 0 3 0 100 0 76 4 1339 6 8 1 392 1 95 72 0 0 +"24391" 4 134 5 1 1 0 3 0 100 0 4 8 1587 9 2 1 581 1 95 4 1 1 +"24392" 4 134 5 1 1 0 3 0 100 0 8 0 1350 5 3 0 572 1 95 8 1 0 +"24393" 4 134 5 1 1 0 4 1 100 20 100 195 1320 2 3 0 359 1 95 95 0 1 +"24394" 4 134 5 1 1 0 4 1 100 20 195 380 1243 7 4 1 405 1 95 185 1 1 +"24395" 4 134 5 1 1 0 4 1 100 20 380 19 1087 3 1 0 305 1 95 361 1 0 +"24396" 4 134 5 1 1 0 4 1 100 20 19 37 1647 1 9 0 350 1 95 18 0 1 +"24397" 4 134 5 1 1 0 4 1 100 20 37 72 1627 8 7 1 499 1 95 35 1 1 +"24398" 4 134 5 1 1 0 4 1 100 20 72 140 1114 9 2 1 394 1 95 68 1 1 +"24399" 4 134 5 1 1 0 4 1 100 20 140 210 2380 5 6 0 1033 3 50 70 0 1 +"24400" 4 134 5 1 1 0 4 1 100 20 210 10 1517 6 8 1 395 1 95 200 0 0 +"24401" 4 134 5 1 1 0 4 1 100 20 10 20 1078 4 7 0 520 1 95 10 0 1 +"24402" 4 140 2 0 1 1 1 1 100 247 100 125 6991 8 3 1 2160 2 25 25 1 1 +"24403" 4 140 2 0 1 1 1 1 100 247 125 219 9271 3 7 0 1324 4 75 94 0 1 +"24404" 4 140 2 0 1 1 1 1 100 247 219 329 8530 8 2 1 706 3 50 110 1 1 +"24405" 4 140 2 0 1 1 1 1 100 247 329 247 2221 2 1 0 1594 2 25 82 1 0 +"24406" 4 140 3 1 1 1 1 1 100 525 100 175 2567 9 7 1 1139 4 75 75 1 1 +"24407" 4 140 3 1 1 1 1 1 100 525 175 219 3090 4 8 0 885 2 25 44 0 1 +"24408" 4 140 3 1 1 1 1 1 100 525 219 274 1296 8 2 1 1816 2 25 55 1 1 +"24409" 4 140 3 1 1 1 1 1 100 525 274 205 1687 2 1 0 341 2 25 69 1 0 +"24410" 4 140 3 1 1 1 1 1 100 525 205 256 1722 7 6 1 1093 2 25 51 1 1 +"24411" 4 140 3 1 1 1 1 1 100 525 256 320 3094 5 3 1 737 2 25 64 1 1 +"24412" 4 140 3 1 1 1 1 1 100 525 320 400 2027 3 6 0 704 2 25 80 0 1 +"24413" 4 140 3 1 1 1 1 1 100 525 400 420 3395 6 9 0 872 1 5 20 0 1 +"24414" 4 140 3 1 1 1 1 1 100 525 420 525 1171 1 4 0 1081 2 25 105 0 1 +"24415" 4 140 3 1 1 1 2 1 100 570 100 175 2476 2 9 0 548 4 75 75 0 1 +"24416" 4 140 3 1 1 1 2 1 100 570 175 219 3391 4 10 0 412 2 25 44 0 1 +"24417" 4 140 3 1 1 1 2 1 100 570 219 164 1909 3 1 0 392 2 25 55 1 0 +"24418" 4 140 3 1 1 1 2 1 100 570 164 205 1459 8 6 1 1395 2 25 41 1 1 +"24419" 4 140 3 1 1 1 2 1 100 570 205 256 2525 5 7 0 595 2 25 51 0 1 +"24420" 4 140 3 1 1 1 2 1 100 570 256 243 1332 6 8 1 1419 1 5 13 0 0 +"24421" 4 140 3 1 1 1 2 1 100 570 243 304 1276 7 5 1 1029 2 25 61 1 1 +"24422" 4 140 3 1 1 1 2 1 100 570 304 456 1001 1 10 0 535 3 50 152 0 1 +"24423" 4 140 3 1 1 1 2 1 100 570 456 570 1115 9 3 1 629 2 25 114 1 1 +"24424" 4 140 3 1 1 1 3 1 100 554 100 150 2729 3 5 0 692 3 50 50 0 1 +"24425" 4 140 3 1 1 1 3 1 100 554 150 75 1177 8 9 1 783 3 50 75 0 0 +"24426" 4 140 3 1 1 1 3 1 100 554 75 131 1167 2 4 0 1357 4 75 56 0 1 +"24427" 4 140 3 1 1 1 3 1 100 554 131 197 1522 6 3 1 1239 3 50 66 1 1 +"24428" 4 140 3 1 1 1 3 1 100 554 197 296 1417 7 6 1 476 3 50 99 1 1 +"24429" 4 140 3 1 1 1 3 1 100 554 296 281 1083 4 2 0 772 1 5 15 1 0 +"24430" 4 140 3 1 1 1 3 1 100 554 281 492 892 1 8 0 504 4 75 211 0 1 +"24431" 4 140 3 1 1 1 3 1 100 554 492 369 3149 5 7 1 293 2 25 123 0 0 +"24432" 4 140 3 1 1 1 3 1 100 554 369 554 1372 9 5 1 488 3 50 185 1 1 +"24433" 4 140 3 1 1 1 4 1 100 844 100 175 3846 8 7 1 701 4 75 75 1 1 +"24434" 4 140 3 1 1 1 4 1 100 844 175 219 1305 3 10 0 848 2 25 44 0 1 +"24435" 4 140 3 1 1 1 4 1 100 844 219 208 1381 7 9 1 809 1 5 11 0 0 +"24436" 4 140 3 1 1 1 4 1 100 844 208 364 960 9 1 1 936 4 75 156 1 1 +"24437" 4 140 3 1 1 1 4 1 100 844 364 455 1441 2 3 0 1498 2 25 91 0 1 +"24438" 4 140 3 1 1 1 4 1 100 844 455 569 1176 1 8 0 1846 2 25 114 0 1 +"24439" 4 140 3 1 1 1 4 1 100 844 569 711 1955 5 4 1 748 2 25 142 1 1 +"24440" 4 140 3 1 1 1 4 1 100 844 711 675 1277 4 2 0 982 1 5 36 1 0 +"24441" 4 140 3 1 1 1 4 1 100 844 675 844 1085 6 3 1 383 2 25 169 1 1 +"24442" 4 140 4 0 1 0 1 1 100 14 100 175 5978 2 7 0 675 2 75 75 0 1 +"24443" 4 140 4 0 1 0 1 1 100 14 175 219 1432 3 7 0 914 4 25 44 0 1 +"24444" 4 140 4 0 1 0 1 1 100 14 219 274 1059 2 8 0 863 4 25 55 0 1 +"24445" 4 140 4 0 1 0 1 1 100 14 274 14 1387 8 9 1 1087 1 95 260 0 0 +"24446" 4 140 5 1 1 0 1 1 100 1596 100 195 1287 1 3 0 2119 1 95 95 0 1 +"24447" 4 140 5 1 1 0 1 1 100 1596 195 293 1271 6 5 1 1345 3 50 98 1 1 +"24448" 4 140 5 1 1 0 1 1 100 1596 293 513 1109 2 8 0 671 2 75 220 0 1 +"24449" 4 140 5 1 1 0 1 1 100 1596 513 385 1171 8 9 1 1906 4 25 128 0 0 +"24450" 4 140 5 1 1 0 1 1 100 1596 385 674 1028 3 4 0 1305 2 75 289 0 1 +"24451" 4 140 5 1 1 0 1 1 100 1596 674 640 2848 5 7 1 647 5 5 34 0 0 +"24452" 4 140 5 1 1 0 1 1 100 1596 640 1120 1021 7 4 1 370 2 75 480 1 1 +"24453" 4 140 5 1 1 0 1 1 100 1596 1120 1064 1878 4 1 0 458 5 5 56 1 0 +"24454" 4 140 5 1 1 0 1 1 100 1596 1064 1596 1114 9 6 1 443 3 50 532 1 1 +"24455" 4 140 5 1 1 0 2 1 100 228 100 175 1770 8 1 1 444 2 75 75 1 1 +"24456" 4 140 5 1 1 0 2 1 100 228 175 131 2420 6 2 0 751 4 25 44 1 0 +"24457" 4 140 5 1 1 0 2 1 100 228 131 33 1033 7 9 1 525 2 75 98 0 0 +"24458" 4 140 5 1 1 0 2 1 100 228 33 64 1017 2 10 0 366 1 95 31 0 1 +"24459" 4 140 5 1 1 0 2 1 100 228 64 125 1954 5 3 1 611 1 95 61 1 1 +"24460" 4 140 5 1 1 0 2 1 100 228 125 31 1235 4 2 0 422 2 75 94 1 0 +"24461" 4 140 5 1 1 0 2 1 100 228 31 60 1079 3 5 0 401 1 95 29 0 1 +"24462" 4 140 5 1 1 0 2 1 100 228 60 117 1100 9 4 1 532 1 95 57 1 1 +"24463" 4 140 5 1 1 0 2 1 100 228 117 228 1105 1 7 0 561 1 95 111 0 1 +"24464" 4 140 5 1 1 0 3 1 100 322 100 175 1248 7 5 1 651 2 75 75 1 1 +"24465" 4 140 5 1 1 0 3 1 100 322 175 44 897 2 1 0 397 2 75 131 1 0 +"24466" 4 140 5 1 1 0 3 1 100 322 44 86 911 8 6 1 442 1 95 42 1 1 +"24467" 4 140 5 1 1 0 3 1 100 322 86 151 1242 4 7 0 716 2 75 65 0 1 +"24468" 4 140 5 1 1 0 3 1 100 322 151 227 952 3 10 0 968 3 50 76 0 1 +"24469" 4 140 5 1 1 0 3 1 100 322 227 113 1127 6 8 1 1408 3 50 114 0 0 +"24470" 4 140 5 1 1 0 3 1 100 322 113 220 1066 9 2 1 741 1 95 107 1 1 +"24471" 4 140 5 1 1 0 3 1 100 322 220 165 1438 5 3 0 504 4 25 55 1 0 +"24472" 4 140 5 1 1 0 3 1 100 322 165 322 1405 1 10 0 615 1 95 157 0 1 +"24473" 4 140 5 1 1 0 4 1 100 818 100 195 68835 2 3 0 788 1 95 95 0 1 +"24474" 4 140 5 1 1 0 4 1 100 818 195 205 2172 7 4 1 339 5 5 10 1 1 +"24475" 4 140 5 1 1 0 4 1 100 818 205 154 1200 3 1 0 413 4 25 51 1 0 +"24476" 4 140 5 1 1 0 4 1 100 818 154 300 1398 1 9 0 905 1 95 146 0 1 +"24477" 4 140 5 1 1 0 4 1 100 818 300 375 1179 8 7 1 785 4 25 75 1 1 +"24478" 4 140 5 1 1 0 4 1 100 818 375 656 2761 9 2 1 492 2 75 281 1 1 +"24479" 4 140 5 1 1 0 4 1 100 818 656 820 1306 5 6 0 1754 4 25 164 0 1 +"24480" 4 140 5 1 1 0 4 1 100 818 820 779 1731 6 8 1 1223 5 5 41 0 0 +"24481" 4 140 5 1 1 0 4 1 100 818 779 818 1386 4 7 0 806 5 5 39 0 1 +"24482" 4 140 2 0 1 1 1 1 100 414 100 150 8510 8 3 1 1175 3 50 50 1 1 +"24483" 4 140 2 0 1 1 1 1 100 414 150 225 9676 3 7 0 1165 3 50 75 0 1 +"24484" 4 140 2 0 1 1 1 1 100 414 225 394 2123 8 2 1 676 4 75 169 1 1 +"24485" 4 140 2 0 1 1 1 1 100 414 394 414 1825 2 1 1 871 1 5 20 1 1 +"24486" 4 140 3 1 1 1 1 1 100 790 100 195 9955 9 7 1 0 5 95 95 1 1 +"24487" 4 140 3 1 1 1 1 1 100 790 195 244 3575 4 8 0 1178 2 25 49 0 1 +"24488" 4 140 3 1 1 1 1 1 100 790 244 366 1423 8 2 1 921 3 50 122 1 1 +"24489" 4 140 3 1 1 1 1 1 100 790 366 274 1522 2 1 0 911 2 25 92 1 0 +"24490" 4 140 3 1 1 1 1 1 100 790 274 411 2113 7 6 1 930 3 50 137 1 1 +"24491" 4 140 3 1 1 1 1 1 100 790 411 432 2355 5 3 1 1449 1 5 21 1 1 +"24492" 4 140 3 1 1 1 1 1 100 790 432 540 1585 3 6 0 995 2 25 108 0 1 +"24493" 4 140 3 1 1 1 1 1 100 790 540 405 1425 6 9 1 995 2 25 135 0 0 +"24494" 4 140 3 1 1 1 1 1 100 790 405 790 1425 1 4 0 0 5 95 385 0 1 +"24495" 4 140 3 1 1 1 2 0 100 1 100 195 3531 2 9 0 2082 5 95 95 0 1 +"24496" 4 140 3 1 1 1 2 0 100 1 195 97 3211 4 10 1 617 3 50 98 0 0 +"24497" 4 140 3 1 1 1 2 0 100 1 97 5 1513 3 1 0 1711 5 95 92 1 0 +"24498" 4 140 3 1 1 1 2 0 100 1 5 10 1323 8 6 1 0 5 95 5 1 1 +"24499" 4 140 3 1 1 1 2 0 100 1 10 20 2474 5 7 0 0 5 95 10 0 1 +"24500" 4 140 3 1 1 1 2 0 100 1 20 1 1906 6 8 1 0 5 95 19 0 0 +"24501" 4 140 3 1 1 1 3 1 100 250 100 150 2122 3 5 0 1643 3 50 50 0 1 +"24502" 4 140 3 1 1 1 3 1 100 250 150 75 2211 8 9 1 1503 3 50 75 0 0 +"24503" 4 140 3 1 1 1 3 1 100 250 75 146 1405 2 4 0 968 5 95 71 0 1 +"24504" 4 140 3 1 1 1 3 1 100 250 146 219 1591 6 3 1 3265 3 50 73 1 1 +"24505" 4 140 3 1 1 1 3 1 100 250 219 329 1547 7 6 1 727 3 50 110 1 1 +"24506" 4 140 3 1 1 1 3 1 100 250 329 164 2034 4 2 0 903 3 50 165 1 0 +"24507" 4 140 3 1 1 1 3 1 100 250 164 287 1858 1 8 0 815 4 75 123 0 1 +"24508" 4 140 3 1 1 1 3 1 100 250 287 143 1869 5 7 1 2131 3 50 144 0 0 +"24509" 4 140 3 1 1 1 3 1 100 250 143 250 1587 9 5 1 1309 4 75 107 1 1 +"24510" 4 140 3 1 1 1 4 1 100 70 100 195 3423 8 7 1 1090 5 95 95 1 1 +"24511" 4 140 3 1 1 1 4 1 100 70 195 341 2340 3 10 0 1171 4 75 146 0 1 +"24512" 4 140 3 1 1 1 4 1 100 70 341 512 2608 7 9 0 914 3 50 171 0 1 +"24513" 4 140 3 1 1 1 4 1 100 70 512 768 2148 9 1 1 1508 3 50 256 1 1 +"24514" 4 140 3 1 1 1 4 1 100 70 768 960 2529 2 3 0 1598 2 25 192 0 1 +"24515" 4 140 3 1 1 1 4 1 100 70 960 1440 2495 1 8 0 734 3 50 480 0 1 +"24516" 4 140 3 1 1 1 4 1 100 70 1440 720 1865 5 4 0 750 3 50 720 1 0 +"24517" 4 140 3 1 1 1 4 1 100 70 720 36 4168 4 2 0 608 5 95 684 1 0 +"24518" 4 140 3 1 1 1 4 1 100 70 36 70 1680 6 3 1 0 5 95 34 1 1 +"24519" 4 140 4 0 1 0 1 1 100 26 100 150 5009 2 7 0 649 3 50 50 0 1 +"24520" 4 140 4 0 1 0 1 1 100 26 150 263 2415 3 7 0 890 2 75 113 0 1 +"24521" 4 140 4 0 1 0 1 1 100 26 263 513 1724 2 8 0 1560 1 95 250 0 1 +"24522" 4 140 4 0 1 0 1 1 100 26 513 26 1747 8 9 1 1389 1 95 487 0 0 +"24523" 4 140 5 1 1 0 1 0 100 1 100 195 1720 1 3 0 2020 1 95 95 0 1 +"24524" 4 140 5 1 1 0 1 0 100 1 195 97 1401 6 5 0 1973 3 50 98 1 0 +"24525" 4 140 5 1 1 0 1 0 100 1 97 189 1284 2 8 0 1260 1 95 92 0 1 +"24526" 4 140 5 1 1 0 1 0 100 1 189 9 1109 8 9 1 590 1 95 180 0 0 +"24527" 4 140 5 1 1 0 1 0 100 1 9 18 1279 3 4 0 1344 1 95 9 0 1 +"24528" 4 140 5 1 1 0 1 0 100 1 18 1 1333 5 7 1 643 1 95 17 0 0 +"24529" 4 140 5 1 1 0 2 0 100 0 100 195 1423 8 1 1 825 1 95 95 1 1 +"24530" 4 140 5 1 1 0 2 0 100 0 195 380 1175 6 2 1 444 1 95 185 1 1 +"24531" 4 140 5 1 1 0 2 0 100 0 380 19 1148 7 9 1 612 1 95 361 0 0 +"24532" 4 140 5 1 1 0 2 0 100 0 19 37 1116 2 10 0 716 1 95 18 0 1 +"24533" 4 140 5 1 1 0 2 0 100 0 37 2 1247 5 3 0 722 1 95 35 1 0 +"24534" 4 140 5 1 1 0 2 0 100 0 2 0 1268 4 2 0 829 1 95 2 1 0 +"24535" 4 140 5 1 1 0 3 0 100 0 100 195 1303 7 5 1 614 1 95 95 1 1 +"24536" 4 140 5 1 1 0 3 0 100 0 195 10 950 2 1 0 550 1 95 185 1 0 +"24537" 4 140 5 1 1 0 3 0 100 0 10 20 1458 8 6 1 698 1 95 10 1 1 +"24538" 4 140 5 1 1 0 3 0 100 0 20 39 1030 4 7 0 603 1 95 19 0 1 +"24539" 4 140 5 1 1 0 3 0 100 0 39 76 1124 3 10 0 524 1 95 37 0 1 +"24540" 4 140 5 1 1 0 3 0 100 0 76 4 1360 6 8 1 509 1 95 72 0 0 +"24541" 4 140 5 1 1 0 3 0 100 0 4 8 1361 9 2 1 1022 1 95 4 1 1 +"24542" 4 140 5 1 1 0 3 0 100 0 8 0 1507 5 3 0 577 1 95 8 1 0 +"24543" 4 140 5 1 1 0 4 1 100 351 100 195 1805 2 3 0 1020 1 95 95 0 1 +"24544" 4 140 5 1 1 0 4 1 100 351 195 293 1073 7 4 1 804 3 50 98 1 1 +"24545" 4 140 5 1 1 0 4 1 100 351 293 571 1531 3 1 1 1280 1 95 278 1 1 +"24546" 4 140 5 1 1 0 4 1 100 351 571 1113 1576 1 9 0 675 1 95 542 0 1 +"24547" 4 140 5 1 1 0 4 1 100 351 1113 1057 2016 8 7 0 1486 5 5 56 1 0 +"24548" 4 140 5 1 1 0 4 1 100 351 1057 2061 1397 9 2 1 864 1 95 1004 1 1 +"24549" 4 140 5 1 1 0 4 1 100 351 2061 3607 1388 5 6 0 800 2 75 1546 0 1 +"24550" 4 140 5 1 1 0 4 1 100 351 3607 180 1895 6 8 1 1154 1 95 3427 0 0 +"24551" 4 140 5 1 1 0 4 1 100 351 180 351 1649 4 7 0 808 1 95 171 0 1 +"24552" 4 143 2 0 1 1 1 1 100 267 100 150 6524 8 3 1 915 3 50 50 1 1 +"24553" 4 143 2 0 1 1 1 1 100 267 150 225 8805 3 7 0 944 3 50 75 0 1 +"24554" 4 143 2 0 1 1 1 1 100 267 225 281 3232 8 2 1 1029 2 25 56 1 1 +"24555" 4 143 2 0 1 1 1 1 100 267 281 267 1777 2 1 0 1746 1 5 14 1 0 +"24556" 4 143 3 1 1 1 1 1 100 605 100 150 10559 9 7 1 807 3 50 50 1 1 +"24557" 4 143 3 1 1 1 1 1 100 605 150 112 1709 4 8 1 1449 2 25 38 0 0 +"24558" 4 143 3 1 1 1 1 1 100 605 112 168 1233 8 2 1 1436 3 50 56 1 1 +"24559" 4 143 3 1 1 1 1 1 100 605 168 84 1621 2 1 0 902 3 50 84 1 0 +"24560" 4 143 3 1 1 1 1 1 100 605 84 147 2336 7 6 1 1343 4 75 63 1 1 +"24561" 4 143 3 1 1 1 1 1 100 605 147 221 1084 5 3 1 670 3 50 74 1 1 +"24562" 4 143 3 1 1 1 1 1 100 605 221 387 1420 3 6 0 2046 4 75 166 0 1 +"24563" 4 143 3 1 1 1 1 1 100 605 387 484 2322 6 9 0 1057 2 25 97 0 1 +"24564" 4 143 3 1 1 1 1 1 100 605 484 605 3332 1 4 0 1803 2 25 121 0 1 +"24565" 4 143 3 1 1 1 2 1 100 788 100 150 2087 2 9 0 1450 3 50 50 0 1 +"24566" 4 143 3 1 1 1 2 1 100 788 150 188 3498 4 10 0 1073 2 25 38 0 1 +"24567" 4 143 3 1 1 1 2 1 100 788 188 235 3379 3 1 1 928 2 25 47 1 1 +"24568" 4 143 3 1 1 1 2 1 100 788 235 353 1073 8 6 1 652 3 50 118 1 1 +"24569" 4 143 3 1 1 1 2 1 100 788 353 265 1474 5 7 1 610 2 25 88 0 0 +"24570" 4 143 3 1 1 1 2 1 100 788 265 132 1706 6 8 1 494 3 50 133 0 0 +"24571" 4 143 3 1 1 1 2 1 100 788 132 257 1340 7 5 1 0 5 95 125 1 1 +"24572" 4 143 3 1 1 1 2 1 100 788 257 450 1546 1 10 0 910 4 75 193 0 1 +"24573" 4 143 3 1 1 1 2 1 100 788 450 788 1376 9 3 1 661 4 75 338 1 1 +"24574" 4 143 3 1 1 1 3 1 100 357 100 150 4268 3 5 0 655 3 50 50 0 1 +"24575" 4 143 3 1 1 1 3 1 100 357 150 75 1053 8 9 1 480 3 50 75 0 0 +"24576" 4 143 3 1 1 1 3 1 100 357 75 146 1423 2 4 0 0 5 95 71 0 1 +"24577" 4 143 3 1 1 1 3 1 100 357 146 256 2992 6 3 1 680 4 75 110 1 1 +"24578" 4 143 3 1 1 1 3 1 100 357 256 192 2893 7 6 0 1081 2 25 64 1 0 +"24579" 4 143 3 1 1 1 3 1 100 357 192 48 1011 4 2 0 1513 4 75 144 1 0 +"24580" 4 143 3 1 1 1 3 1 100 357 48 94 1094 1 8 0 1208 5 95 46 0 1 +"24581" 4 143 3 1 1 1 3 1 100 357 94 183 3788 5 7 0 613 5 95 89 0 1 +"24582" 4 143 3 1 1 1 3 1 100 357 183 357 1173 9 5 1 2436 5 95 174 1 1 +"24583" 4 143 3 1 1 1 4 1 100 44 100 150 5764 8 7 1 968 3 50 50 1 1 +"24584" 4 143 3 1 1 1 4 1 100 44 150 263 1215 3 10 0 588 4 75 113 0 1 +"24585" 4 143 3 1 1 1 4 1 100 44 263 131 924 7 9 1 374 3 50 132 0 0 +"24586" 4 143 3 1 1 1 4 1 100 44 131 255 986 9 1 1 809 5 95 124 1 1 +"24587" 4 143 3 1 1 1 4 1 100 44 255 242 2943 2 3 1 1841 1 5 13 0 0 +"24588" 4 143 3 1 1 1 4 1 100 44 242 472 735 1 8 0 1626 5 95 230 0 1 +"24589" 4 143 3 1 1 1 4 1 100 44 472 354 2178 5 4 0 640 2 25 118 1 0 +"24590" 4 143 3 1 1 1 4 1 100 44 354 177 1366 4 2 0 604 3 50 177 1 0 +"24591" 4 143 3 1 1 1 4 1 100 44 177 44 3692 6 3 0 378 4 75 133 1 0 +"24592" 4 143 4 0 1 0 1 1 100 197 100 150 5081 2 7 0 1588 3 50 50 0 1 +"24593" 4 143 4 0 1 0 1 1 100 197 150 225 2718 3 7 0 530 3 50 75 0 1 +"24594" 4 143 4 0 1 0 1 1 100 197 225 394 1438 2 8 0 904 2 75 169 0 1 +"24595" 4 143 4 0 1 0 1 1 100 197 394 197 1466 8 9 1 1314 3 50 197 0 0 +"24596" 4 143 5 1 1 0 1 1 100 2118 100 195 4495 1 3 0 567 1 95 95 0 1 +"24597" 4 143 5 1 1 0 1 1 100 2118 195 293 1217 6 5 1 1349 3 50 98 1 1 +"24598" 4 143 5 1 1 0 1 1 100 2118 293 571 946 2 8 0 403 1 95 278 0 1 +"24599" 4 143 5 1 1 0 1 1 100 2118 571 285 884 8 9 1 2384 3 50 286 0 0 +"24600" 4 143 5 1 1 0 1 1 100 2118 285 556 876 3 4 0 440 1 95 271 0 1 +"24601" 4 143 5 1 1 0 1 1 100 2118 556 695 1376 5 7 0 941 4 25 139 0 1 +"24602" 4 143 5 1 1 0 1 1 100 2118 695 869 1173 7 4 1 619 4 25 174 1 1 +"24603" 4 143 5 1 1 0 1 1 100 2118 869 1086 1291 4 1 1 732 4 25 217 1 1 +"24604" 4 143 5 1 1 0 1 1 100 2118 1086 2118 1070 9 6 1 401 1 95 1032 1 1 +"24605" 4 143 5 1 1 0 2 0 100 1 100 195 1307 8 1 1 658 1 95 95 1 1 +"24606" 4 143 5 1 1 0 2 0 100 1 195 380 920 6 2 1 561 1 95 185 1 1 +"24607" 4 143 5 1 1 0 2 0 100 1 380 190 1348 7 9 1 410 3 50 190 0 0 +"24608" 4 143 5 1 1 0 2 0 100 1 190 371 699 2 10 0 351 1 95 181 0 1 +"24609" 4 143 5 1 1 0 2 0 100 1 371 19 1028 5 3 0 448 1 95 352 1 0 +"24610" 4 143 5 1 1 0 2 0 100 1 19 1 1344 4 2 0 374 1 95 18 1 0 +"24611" 4 143 5 1 1 0 3 1 100 31 100 195 1117 7 5 1 453 1 95 95 1 1 +"24612" 4 143 5 1 1 0 3 1 100 31 195 10 908 2 1 0 255 1 95 185 1 0 +"24613" 4 143 5 1 1 0 3 1 100 31 10 20 855 8 6 1 364 1 95 10 1 1 +"24614" 4 143 5 1 1 0 3 1 100 31 20 39 1408 4 7 0 435 1 95 19 0 1 +"24615" 4 143 5 1 1 0 3 1 100 31 39 76 748 3 10 0 309 1 95 37 0 1 +"24616" 4 143 5 1 1 0 3 1 100 31 76 4 1100 6 8 1 301 1 95 72 0 0 +"24617" 4 143 5 1 1 0 3 1 100 31 4 8 860 9 2 1 377 1 95 4 1 1 +"24618" 4 143 5 1 1 0 3 1 100 31 8 16 908 5 3 1 375 1 95 8 1 1 +"24619" 4 143 5 1 1 0 3 1 100 31 16 31 562 1 10 0 293 1 95 15 0 1 +"24620" 4 143 5 1 1 0 4 1 100 650 100 195 1424 2 3 0 370 1 95 95 0 1 +"24621" 4 143 5 1 1 0 4 1 100 650 195 341 2517 7 4 1 1459 2 75 146 1 1 +"24622" 4 143 5 1 1 0 4 1 100 650 341 256 1450 3 1 0 1011 4 25 85 1 0 +"24623" 4 143 5 1 1 0 4 1 100 650 256 499 1336 1 9 0 502 1 95 243 0 1 +"24624" 4 143 5 1 1 0 4 1 100 650 499 624 1068 8 7 1 399 4 25 125 1 1 +"24625" 4 143 5 1 1 0 4 1 100 650 624 1217 1561 9 2 1 340 1 95 593 1 1 +"24626" 4 143 5 1 1 0 4 1 100 650 1217 1156 5073 5 6 1 630 5 5 61 0 0 +"24627" 4 143 5 1 1 0 4 1 100 650 1156 867 1125 6 8 1 638 4 25 289 0 0 +"24628" 4 143 5 1 1 0 4 1 100 650 867 650 5252 4 7 1 864 4 25 217 0 0 +"24629" 4 145 2 0 1 1 1 1 100 140 100 150 5930 8 3 1 1246 3 50 50 1 1 +"24630" 4 145 2 0 1 1 1 1 100 140 150 225 7072 3 7 0 1197 3 50 75 0 1 +"24631" 4 145 2 0 1 1 1 1 100 140 225 281 2313 8 2 1 1857 2 25 56 1 1 +"24632" 4 145 2 0 1 1 1 1 100 140 281 140 1942 2 1 0 815 3 50 141 1 0 +"24633" 4 145 3 1 1 1 1 1 100 330 100 125 14744 9 7 1 1096 2 25 25 1 1 +"24634" 4 145 3 1 1 1 1 1 100 330 125 119 5177 4 8 1 776 1 5 6 0 0 +"24635" 4 145 3 1 1 1 1 1 100 330 119 149 2392 8 2 1 1268 2 25 30 1 1 +"24636" 4 145 3 1 1 1 1 1 100 330 149 142 2709 2 1 0 1426 1 5 7 1 0 +"24637" 4 145 3 1 1 1 1 1 100 330 142 178 2651 7 6 1 387 2 25 36 1 1 +"24638" 4 145 3 1 1 1 1 1 100 330 178 169 3573 5 3 0 1057 1 5 9 1 0 +"24639" 4 145 3 1 1 1 1 1 100 330 169 211 4691 3 6 0 794 2 25 42 0 1 +"24640" 4 145 3 1 1 1 1 1 100 330 211 264 5441 6 9 0 524 2 25 53 0 1 +"24641" 4 145 3 1 1 1 1 1 100 330 264 330 1608 1 4 0 601 2 25 66 0 1 +"24642" 4 145 3 1 1 1 2 1 100 441 100 125 4791 2 9 0 737 2 25 25 0 1 +"24643" 4 145 3 1 1 1 2 1 100 441 125 131 2547 4 10 0 1568 1 5 6 0 1 +"24644" 4 145 3 1 1 1 2 1 100 441 131 164 2922 3 1 1 621 2 25 33 1 1 +"24645" 4 145 3 1 1 1 2 1 100 441 164 205 2988 8 6 1 580 2 25 41 1 1 +"24646" 4 145 3 1 1 1 2 1 100 441 205 215 3013 5 7 0 1220 1 5 10 0 1 +"24647" 4 145 3 1 1 1 2 1 100 441 215 269 2287 6 8 0 552 2 25 54 0 1 +"24648" 4 145 3 1 1 1 2 1 100 441 269 336 3307 7 5 1 548 2 25 67 1 1 +"24649" 4 145 3 1 1 1 2 1 100 441 336 420 1119 1 10 0 599 2 25 84 0 1 +"24650" 4 145 3 1 1 1 2 1 100 441 420 441 2456 9 3 1 1593 1 5 21 1 1 +"24651" 4 145 3 1 1 1 3 1 100 189 100 125 3734 3 5 0 466 2 25 25 0 1 +"24652" 4 145 3 1 1 1 3 1 100 189 125 94 2818 8 9 1 929 2 25 31 0 0 +"24653" 4 145 3 1 1 1 3 1 100 189 94 118 1634 2 4 0 554 2 25 24 0 1 +"24654" 4 145 3 1 1 1 3 1 100 189 118 88 9469 6 3 0 296 2 25 30 1 0 +"24655" 4 145 3 1 1 1 3 1 100 189 88 110 4903 7 6 1 331 2 25 22 1 1 +"24656" 4 145 3 1 1 1 3 1 100 189 110 82 2609 4 2 0 419 2 25 28 1 0 +"24657" 4 145 3 1 1 1 3 1 100 189 82 144 3284 1 8 0 1941 4 75 62 0 1 +"24658" 4 145 3 1 1 1 3 1 100 189 144 108 2806 5 7 1 286 2 25 36 0 0 +"24659" 4 145 3 1 1 1 3 1 100 189 108 189 2822 9 5 1 2254 4 75 81 1 1 +"24660" 4 145 3 1 1 1 4 1 100 307 100 125 1584 8 7 1 563 2 25 25 1 1 +"24661" 4 145 3 1 1 1 4 1 100 307 125 131 2253 3 10 0 1760 1 5 6 0 1 +"24662" 4 145 3 1 1 1 4 1 100 307 131 98 6562 7 9 1 583 2 25 33 0 0 +"24663" 4 145 3 1 1 1 4 1 100 307 98 172 1633 9 1 1 665 4 75 74 1 1 +"24664" 4 145 3 1 1 1 4 1 100 307 172 215 1447 2 3 0 1400 2 25 43 0 1 +"24665" 4 145 3 1 1 1 4 1 100 307 215 323 1138 1 8 0 1602 3 50 108 0 1 +"24666" 4 145 3 1 1 1 4 1 100 307 323 307 5763 5 4 0 846 1 5 16 1 0 +"24667" 4 145 3 1 1 1 4 1 100 307 307 292 1899 4 2 0 871 1 5 15 1 0 +"24668" 4 145 3 1 1 1 4 1 100 307 292 307 1539 6 3 1 663 1 5 15 1 1 +"24669" 4 145 4 0 1 0 1 1 100 29 100 150 4605 2 7 0 1183 3 50 50 0 1 +"24670" 4 145 4 0 1 0 1 1 100 29 150 293 4060 3 7 0 921 1 95 143 0 1 +"24671" 4 145 4 0 1 0 1 1 100 29 293 571 1882 2 8 0 1918 1 95 278 0 1 +"24672" 4 145 4 0 1 0 1 1 100 29 571 29 1888 8 9 1 1228 1 95 542 0 0 +"24673" 4 145 5 1 1 0 1 1 100 31 100 195 8200 1 3 0 968 1 95 95 0 1 +"24674" 4 145 5 1 1 0 1 1 100 31 195 380 1991 6 5 1 870 1 95 185 1 1 +"24675" 4 145 5 1 1 0 1 1 100 31 380 741 1138 2 8 0 953 1 95 361 0 1 +"24676" 4 145 5 1 1 0 1 1 100 31 741 37 1993 8 9 1 864 1 95 704 0 0 +"24677" 4 145 5 1 1 0 1 1 100 31 37 72 1660 3 4 0 650 1 95 35 0 1 +"24678" 4 145 5 1 1 0 1 1 100 31 72 4 2217 5 7 1 541 1 95 68 0 0 +"24679" 4 145 5 1 1 0 1 1 100 31 4 8 1320 7 4 1 567 1 95 4 1 1 +"24680" 4 145 5 1 1 0 1 1 100 31 8 16 2353 4 1 1 1127 1 95 8 1 1 +"24681" 4 145 5 1 1 0 1 1 100 31 16 31 1326 9 6 1 941 1 95 15 1 1 +"24682" 4 145 5 1 1 0 2 1 100 31 100 195 1572 8 1 1 655 1 95 95 1 1 +"24683" 4 145 5 1 1 0 2 1 100 31 195 380 1172 6 2 1 906 1 95 185 1 1 +"24684" 4 145 5 1 1 0 2 1 100 31 380 19 1276 7 9 1 1501 1 95 361 0 0 +"24685" 4 145 5 1 1 0 2 1 100 31 19 37 1474 2 10 0 810 1 95 18 0 1 +"24686" 4 145 5 1 1 0 2 1 100 31 37 2 1880 5 3 0 956 1 95 35 1 0 +"24687" 4 145 5 1 1 0 2 1 100 31 2 4 2540 4 2 1 593 1 95 2 1 1 +"24688" 4 145 5 1 1 0 2 1 100 31 4 8 1722 3 5 0 645 1 95 4 0 1 +"24689" 4 145 5 1 1 0 2 1 100 31 8 16 1068 9 4 1 958 1 95 8 1 1 +"24690" 4 145 5 1 1 0 2 1 100 31 16 31 1174 1 7 0 867 1 95 15 0 1 +"24691" 4 145 5 1 1 0 3 0 100 1 100 195 1924 7 5 1 638 1 95 95 1 1 +"24692" 4 145 5 1 1 0 3 0 100 1 195 10 949 2 1 0 621 1 95 185 1 0 +"24693" 4 145 5 1 1 0 3 0 100 1 10 20 1436 8 6 1 541 1 95 10 1 1 +"24694" 4 145 5 1 1 0 3 0 100 1 20 1 1760 4 7 1 442 1 95 19 0 0 +"24695" 4 145 5 1 1 0 4 1 100 1 100 195 1301 2 3 0 628 1 95 95 0 1 +"24696" 4 145 5 1 1 0 4 1 100 1 195 380 2291 7 4 1 460 1 95 185 1 1 +"24697" 4 145 5 1 1 0 4 1 100 1 380 19 1363 3 1 0 504 1 95 361 1 0 +"24698" 4 145 5 1 1 0 4 1 100 1 19 37 1333 1 9 0 564 1 95 18 0 1 +"24699" 4 145 5 1 1 0 4 1 100 1 37 72 1174 8 7 1 316 1 95 35 1 1 +"24700" 4 145 5 1 1 0 4 1 100 1 72 140 730 9 2 1 613 1 95 68 1 1 +"24701" 4 145 5 1 1 0 4 1 100 1 140 273 2125 5 6 0 461 1 95 133 0 1 +"24702" 4 145 5 1 1 0 4 1 100 1 273 14 1180 6 8 1 314 1 95 259 0 0 +"24703" 4 145 5 1 1 0 4 1 100 1 14 1 2208 4 7 1 357 1 95 13 0 0 +"24704" 4 146 2 0 1 1 1 1 100 156 100 125 2631 8 3 1 4424 2 25 25 1 1 +"24705" 4 146 2 0 1 1 1 1 100 156 125 131 3594 3 7 0 1405 1 5 6 0 1 +"24706" 4 146 2 0 1 1 1 1 100 156 131 164 2607 8 2 1 1009 2 25 33 1 1 +"24707" 4 146 2 0 1 1 1 1 100 156 164 156 2950 2 1 0 1359 1 5 8 1 0 +"24708" 4 146 3 1 1 1 1 1 100 213 100 150 2764 9 7 1 990 3 50 50 1 1 +"24709" 4 146 3 1 1 1 1 1 100 213 150 158 5092 4 8 0 871 1 5 8 0 1 +"24710" 4 146 3 1 1 1 1 1 100 213 158 198 2403 8 2 1 990 2 25 40 1 1 +"24711" 4 146 3 1 1 1 1 1 100 213 198 188 2124 2 1 0 1068 1 5 10 1 0 +"24712" 4 146 3 1 1 1 1 1 100 213 188 179 2982 7 6 0 694 1 5 9 1 0 +"24713" 4 146 3 1 1 1 1 1 100 213 179 170 2594 5 3 0 908 1 5 9 1 0 +"24714" 4 146 3 1 1 1 1 1 100 213 170 179 1646 3 6 0 1189 1 5 9 0 1 +"24715" 4 146 3 1 1 1 1 1 100 213 179 170 2234 6 9 1 806 1 5 9 0 0 +"24716" 4 146 3 1 1 1 1 1 100 213 170 213 1956 1 4 0 1220 2 25 43 0 1 +"24717" 4 146 3 1 1 1 2 1 100 255 100 125 3119 2 9 0 1040 2 25 25 0 1 +"24718" 4 146 3 1 1 1 2 1 100 255 125 131 2371 4 10 0 1088 1 5 6 0 1 +"24719" 4 146 3 1 1 1 2 1 100 255 131 124 1689 3 1 0 1742 1 5 7 1 0 +"24720" 4 146 3 1 1 1 2 1 100 255 124 186 2177 8 6 1 903 3 50 62 1 1 +"24721" 4 146 3 1 1 1 2 1 100 255 186 195 3969 5 7 0 474 1 5 9 0 1 +"24722" 4 146 3 1 1 1 2 1 100 255 195 185 2057 6 8 1 724 1 5 10 0 0 +"24723" 4 146 3 1 1 1 2 1 100 255 185 194 1782 7 5 1 823 1 5 9 1 1 +"24724" 4 146 3 1 1 1 2 1 100 255 194 204 1549 1 10 0 1297 1 5 10 0 1 +"24725" 4 146 3 1 1 1 2 1 100 255 204 255 1640 9 3 1 840 2 25 51 1 1 +"24726" 4 146 3 1 1 1 3 1 100 171 100 105 1919 3 5 0 833 1 5 5 0 1 +"24727" 4 146 3 1 1 1 3 1 100 171 105 79 2975 8 9 1 815 2 25 26 0 0 +"24728" 4 146 3 1 1 1 3 1 100 171 79 99 2294 2 4 0 638 2 25 20 0 1 +"24729" 4 146 3 1 1 1 3 1 100 171 99 104 2460 6 3 1 1148 1 5 5 1 1 +"24730" 4 146 3 1 1 1 3 1 100 171 104 109 2587 7 6 1 837 1 5 5 1 1 +"24731" 4 146 3 1 1 1 3 1 100 171 109 104 2267 4 2 0 845 1 5 5 1 0 +"24732" 4 146 3 1 1 1 3 1 100 171 104 130 1402 1 8 0 1020 2 25 26 0 1 +"24733" 4 146 3 1 1 1 3 1 100 171 130 137 2985 5 7 0 905 1 5 7 0 1 +"24734" 4 146 3 1 1 1 3 1 100 171 137 171 1595 9 5 1 1573 2 25 34 1 1 +"24735" 4 146 3 1 1 1 4 1 100 208 100 125 1790 8 7 1 470 2 25 25 1 1 +"24736" 4 146 3 1 1 1 4 1 100 208 125 131 1461 3 10 0 1181 1 5 6 0 1 +"24737" 4 146 3 1 1 1 4 1 100 208 131 124 1434 7 9 1 1028 1 5 7 0 0 +"24738" 4 146 3 1 1 1 4 1 100 208 124 155 1224 9 1 1 723 2 25 31 1 1 +"24739" 4 146 3 1 1 1 4 1 100 208 155 194 1598 2 3 0 735 2 25 39 0 1 +"24740" 4 146 3 1 1 1 4 1 100 208 194 243 2026 1 8 0 1263 2 25 49 0 1 +"24741" 4 146 3 1 1 1 4 1 100 208 243 231 2296 5 4 0 392 1 5 12 1 0 +"24742" 4 146 3 1 1 1 4 1 100 208 231 219 2251 4 2 0 605 1 5 12 1 0 +"24743" 4 146 3 1 1 1 4 1 100 208 219 208 2549 6 3 0 473 1 5 11 1 0 +"24744" 4 146 4 0 1 0 1 1 100 148 100 150 1950 2 7 0 2479 3 50 50 0 1 +"24745" 4 146 4 0 1 0 1 1 100 148 150 188 1422 3 7 0 1568 4 25 38 0 1 +"24746" 4 146 4 0 1 0 1 1 100 148 188 197 1267 2 8 0 1636 5 5 9 0 1 +"24747" 4 146 4 0 1 0 1 1 100 148 197 148 1692 8 9 1 1577 4 25 49 0 0 +"24748" 4 146 5 1 1 0 1 1 100 254 100 175 1611 1 3 0 1724 2 75 75 0 1 +"24749" 4 146 5 1 1 0 1 1 100 254 175 219 1903 6 5 1 1539 4 25 44 1 1 +"24750" 4 146 5 1 1 0 1 1 100 254 219 274 1487 2 8 0 1270 4 25 55 0 1 +"24751" 4 146 5 1 1 0 1 1 100 254 274 205 1535 8 9 1 1253 4 25 69 0 0 +"24752" 4 146 5 1 1 0 1 1 100 254 205 215 1926 3 4 0 1121 5 5 10 0 1 +"24753" 4 146 5 1 1 0 1 1 100 254 215 204 3601 5 7 1 1484 5 5 11 0 0 +"24754" 4 146 5 1 1 0 1 1 100 254 204 214 1890 7 4 1 2046 5 5 10 1 1 +"24755" 4 146 5 1 1 0 1 1 100 254 214 203 1730 4 1 0 766 5 5 11 1 0 +"24756" 4 146 5 1 1 0 1 1 100 254 203 254 1599 9 6 1 521 4 25 51 1 1 +"24757" 4 146 5 1 1 0 2 1 100 329 100 125 2504 8 1 1 447 4 25 25 1 1 +"24758" 4 146 5 1 1 0 2 1 100 329 125 131 1551 6 2 1 644 5 5 6 1 1 +"24759" 4 146 5 1 1 0 2 1 100 329 131 124 1466 7 9 1 547 5 5 7 0 0 +"24760" 4 146 5 1 1 0 2 1 100 329 124 155 1214 2 10 0 1339 4 25 31 0 1 +"24761" 4 146 5 1 1 0 2 1 100 329 155 147 2649 5 3 0 472 5 5 8 1 0 +"24762" 4 146 5 1 1 0 2 1 100 329 147 140 1351 4 2 0 1324 5 5 7 1 0 +"24763" 4 146 5 1 1 0 2 1 100 329 140 175 1867 3 5 0 1432 4 25 35 0 1 +"24764" 4 146 5 1 1 0 2 1 100 329 175 263 1433 9 4 1 1083 3 50 88 1 1 +"24765" 4 146 5 1 1 0 2 1 100 329 263 329 1493 1 7 0 828 4 25 66 0 1 +"24766" 4 146 5 1 1 0 3 1 100 255 100 125 4404 7 5 1 1137 4 25 25 1 1 +"24767" 4 146 5 1 1 0 3 1 100 255 125 94 1360 2 1 0 605 4 25 31 1 0 +"24768" 4 146 5 1 1 0 3 1 100 255 94 165 1589 8 6 1 479 2 75 71 1 1 +"24769" 4 146 5 1 1 0 3 1 100 255 165 173 2214 4 7 0 347 5 5 8 0 1 +"24770" 4 146 5 1 1 0 3 1 100 255 173 216 1351 3 10 0 1389 4 25 43 0 1 +"24771" 4 146 5 1 1 0 3 1 100 255 216 205 1832 6 8 1 1487 5 5 11 0 0 +"24772" 4 146 5 1 1 0 3 1 100 255 205 256 1134 9 2 1 1085 4 25 51 1 1 +"24773" 4 146 5 1 1 0 3 1 100 255 256 243 3103 5 3 0 1090 5 5 13 1 0 +"24774" 4 146 5 1 1 0 3 1 100 255 243 255 1227 1 10 0 1055 5 5 12 0 1 +"24775" 4 146 5 1 1 0 4 1 100 306 100 125 2736 2 3 0 868 4 25 25 0 1 +"24776" 4 146 5 1 1 0 4 1 100 306 125 131 1416 7 4 1 443 5 5 6 1 1 +"24777" 4 146 5 1 1 0 4 1 100 306 131 124 1491 3 1 0 618 5 5 7 1 0 +"24778" 4 146 5 1 1 0 4 1 100 306 124 186 1250 1 9 0 1020 3 50 62 0 1 +"24779" 4 146 5 1 1 0 4 1 100 306 186 233 2025 8 7 1 1247 4 25 47 1 1 +"24780" 4 146 5 1 1 0 4 1 100 306 233 291 1376 9 2 1 2557 4 25 58 1 1 +"24781" 4 146 5 1 1 0 4 1 100 306 291 306 2080 5 6 0 2339 5 5 15 0 1 +"24782" 4 146 5 1 1 0 4 1 100 306 306 291 1465 6 8 1 1650 5 5 15 0 0 +"24783" 4 146 5 1 1 0 4 1 100 306 291 306 1320 4 7 0 1496 5 5 15 0 1 +"24784" 4 151 2 0 1 1 1 1 100 164 100 150 12340 8 3 1 1662 3 50 50 1 1 +"24785" 4 151 2 0 1 1 1 1 100 164 150 188 3820 3 7 0 4580 2 25 38 0 1 +"24786" 4 151 2 0 1 1 1 1 100 164 188 329 2415 8 2 1 1274 4 75 141 1 1 +"24787" 4 151 2 0 1 1 1 1 100 164 329 164 3763 2 1 0 1926 3 50 165 1 0 +"24788" 4 151 3 1 1 1 1 1 100 138 100 195 4008 9 7 1 1327 5 95 95 1 1 +"24789" 4 151 3 1 1 1 1 1 100 138 195 341 2501 4 8 0 620 4 75 146 0 1 +"24790" 4 151 3 1 1 1 1 1 100 138 341 597 1884 8 2 1 1376 4 75 256 1 1 +"24791" 4 151 3 1 1 1 1 1 100 138 597 149 2156 2 1 0 1716 4 75 448 1 0 +"24792" 4 151 3 1 1 1 1 1 100 138 149 291 2655 7 6 1 0 5 95 142 1 1 +"24793" 4 151 3 1 1 1 1 1 100 138 291 145 1878 5 3 0 2443 3 50 146 1 0 +"24794" 4 151 3 1 1 1 1 1 100 138 145 283 2252 3 6 0 0 5 95 138 0 1 +"24795" 4 151 3 1 1 1 1 1 100 138 283 71 2414 6 9 1 817 4 75 212 0 0 +"24796" 4 151 3 1 1 1 1 1 100 138 71 138 2304 1 4 0 0 5 95 67 0 1 +"24797" 4 151 3 1 1 1 2 1 100 3840 100 195 2962 2 9 0 0 5 95 95 0 1 +"24798" 4 151 3 1 1 1 2 1 100 3840 195 293 2291 4 10 0 1988 3 50 98 0 1 +"24799" 4 151 3 1 1 1 2 1 100 3840 293 440 5579 3 1 1 671 3 50 147 1 1 +"24800" 4 151 3 1 1 1 2 1 100 3840 440 858 2900 8 6 1 3864 5 95 418 1 1 +"24801" 4 151 3 1 1 1 2 1 100 3840 858 1287 4813 5 7 0 1070 3 50 429 0 1 +"24802" 4 151 3 1 1 1 2 1 100 3840 1287 643 5472 6 8 1 1857 3 50 644 0 0 +"24803" 4 151 3 1 1 1 2 1 100 3840 643 1125 2838 7 5 1 1405 4 75 482 1 1 +"24804" 4 151 3 1 1 1 2 1 100 3840 1125 2194 2256 1 10 0 0 5 95 1069 0 1 +"24805" 4 151 3 1 1 1 2 1 100 3840 2194 3840 3114 9 3 1 2061 4 75 1646 1 1 +"24806" 4 151 3 1 1 1 3 0 100 0 100 195 2594 3 5 0 0 5 95 95 0 1 +"24807" 4 151 3 1 1 1 3 0 100 0 195 10 1780 8 9 1 0 5 95 185 0 0 +"24808" 4 151 3 1 1 1 3 0 100 0 10 20 2277 2 4 0 0 5 95 10 0 1 +"24809" 4 151 3 1 1 1 3 0 100 0 20 39 2606 6 3 1 0 5 95 19 1 1 +"24810" 4 151 3 1 1 1 3 0 100 0 39 10 3656 7 6 0 2984 4 75 29 1 0 +"24811" 4 151 3 1 1 1 3 0 100 0 10 0 5471 4 2 0 0 5 95 10 1 0 +"24812" 4 151 3 1 1 1 4 1 100 2608 100 195 2352 8 7 1 0 5 95 95 1 1 +"24813" 4 151 3 1 1 1 4 1 100 2608 195 380 1718 3 10 0 0 5 95 185 0 1 +"24814" 4 151 3 1 1 1 4 1 100 2608 380 570 2850 7 9 0 1224 3 50 190 0 1 +"24815" 4 151 3 1 1 1 4 1 100 2608 570 1112 2899 9 1 1 0 5 95 542 1 1 +"24816" 4 151 3 1 1 1 4 1 100 2608 1112 1946 5518 2 3 0 2413 4 75 834 0 1 +"24817" 4 151 3 1 1 1 4 1 100 2608 1946 3406 4016 1 8 0 1607 4 75 1460 0 1 +"24818" 4 151 3 1 1 1 4 1 100 2608 3406 5961 3007 5 4 1 1118 4 75 2555 1 1 +"24819" 4 151 3 1 1 1 4 1 100 2608 5961 1490 3383 4 2 0 1596 4 75 4471 1 0 +"24820" 4 151 3 1 1 1 4 1 100 2608 1490 2608 4029 6 3 1 1971 4 75 1118 1 1 +"24821" 4 151 4 0 1 0 1 1 100 114 100 175 7363 2 7 0 266 2 75 75 0 1 +"24822" 4 151 4 0 1 0 1 1 100 114 175 306 3715 3 7 0 832 2 75 131 0 1 +"24823" 4 151 4 0 1 0 1 1 100 114 306 229 2646 2 8 1 710 4 25 77 0 0 +"24824" 4 151 4 0 1 0 1 1 100 114 229 114 4049 8 9 1 1054 3 50 115 0 0 +"24825" 4 151 5 1 1 0 1 1 100 60 100 195 3414 1 3 0 993 1 95 95 0 1 +"24826" 4 151 5 1 1 0 1 1 100 60 195 341 2911 6 5 1 522 2 75 146 1 1 +"24827" 4 151 5 1 1 0 1 1 100 60 341 665 2414 2 8 0 3321 1 95 324 0 1 +"24828" 4 151 5 1 1 0 1 1 100 60 665 332 7856 8 9 1 2424 3 50 333 0 0 +"24829" 4 151 5 1 1 0 1 1 100 60 332 647 1922 3 4 0 1132 1 95 315 0 1 +"24830" 4 151 5 1 1 0 1 1 100 60 647 323 2331 5 7 1 714 3 50 324 0 0 +"24831" 4 151 5 1 1 0 1 1 100 60 323 630 2133 7 4 1 1557 1 95 307 1 1 +"24832" 4 151 5 1 1 0 1 1 100 60 630 31 1984 4 1 0 2336 1 95 599 1 0 +"24833" 4 151 5 1 1 0 1 1 100 60 31 60 1549 9 6 1 826 1 95 29 1 1 +"24834" 4 151 5 1 1 0 2 1 100 7213 100 195 1840 8 1 1 710 1 95 95 1 1 +"24835" 4 151 5 1 1 0 2 1 100 7213 195 380 2497 6 2 1 2824 1 95 185 1 1 +"24836" 4 151 5 1 1 0 2 1 100 7213 380 665 2374 7 9 0 1939 2 75 285 0 1 +"24837" 4 151 5 1 1 0 2 1 100 7213 665 1297 1852 2 10 0 1010 1 95 632 0 1 +"24838" 4 151 5 1 1 0 2 1 100 7213 1297 1946 2375 5 3 1 1324 3 50 649 1 1 +"24839" 4 151 5 1 1 0 2 1 100 7213 1946 973 2475 4 2 0 1459 3 50 973 1 0 +"24840" 4 151 5 1 1 0 2 1 100 7213 973 1897 1898 3 5 0 2446 1 95 924 0 1 +"24841" 4 151 5 1 1 0 2 1 100 7213 1897 3699 2143 9 4 1 2542 1 95 1802 1 1 +"24842" 4 151 5 1 1 0 2 1 100 7213 3699 7213 2338 1 7 0 2114 1 95 3514 0 1 +"24843" 4 151 5 1 1 0 3 1 100 4 100 195 2019 7 5 1 1567 1 95 95 1 1 +"24844" 4 151 5 1 1 0 3 1 100 4 195 10 2202 2 1 0 1895 1 95 185 1 0 +"24845" 4 151 5 1 1 0 3 1 100 4 10 20 1882 8 6 1 1910 1 95 10 1 1 +"24846" 4 151 5 1 1 0 3 1 100 4 20 39 2265 4 7 0 1746 1 95 19 0 1 +"24847" 4 151 5 1 1 0 3 1 100 4 39 10 2501 3 10 1 1419 2 75 29 0 0 +"24848" 4 151 5 1 1 0 3 1 100 4 10 5 6557 6 8 1 1828 3 50 5 0 0 +"24849" 4 151 5 1 1 0 3 1 100 4 5 10 2217 9 2 1 2053 1 95 5 1 1 +"24850" 4 151 5 1 1 0 3 1 100 4 10 2 2898 5 3 0 1557 2 75 8 1 0 +"24851" 4 151 5 1 1 0 3 1 100 4 2 4 2097 1 10 0 1513 1 95 2 0 1 +"24852" 4 151 5 1 1 0 4 1 100 601 100 195 2258 2 3 0 2019 1 95 95 0 1 +"24853" 4 151 5 1 1 0 4 1 100 601 195 380 2073 7 4 1 1387 1 95 185 1 1 +"24854" 4 151 5 1 1 0 4 1 100 601 380 95 3104 3 1 0 1961 2 75 285 1 0 +"24855" 4 151 5 1 1 0 4 1 100 601 95 185 1577 1 9 0 1595 1 95 90 0 1 +"24856" 4 151 5 1 1 0 4 1 100 601 185 361 1925 8 7 1 1750 1 95 176 1 1 +"24857" 4 151 5 1 1 0 4 1 100 601 361 704 2233 9 2 1 1755 1 95 343 1 1 +"24858" 4 151 5 1 1 0 4 1 100 601 704 1232 2133 5 6 0 1677 2 75 528 0 1 +"24859" 4 151 5 1 1 0 4 1 100 601 1232 308 4436 6 8 1 2118 2 75 924 0 0 +"24860" 4 151 5 1 1 0 4 1 100 601 308 601 1855 4 7 0 1620 1 95 293 0 1 +"24861" 4 154 2 0 1 1 1 1 100 134 100 150 18855 8 3 1 2068 3 50 50 1 1 +"24862" 4 154 2 0 1 1 1 1 100 134 150 188 5910 3 7 0 3531 2 25 38 0 1 +"24863" 4 154 2 0 1 1 1 1 100 134 188 179 3968 8 2 0 993 1 5 9 1 0 +"24864" 4 154 2 0 1 1 1 1 100 134 179 134 2326 2 1 0 1061 2 25 45 1 0 +"24865" 4 154 3 1 1 1 1 1 100 215 100 125 11980 9 7 1 4311 2 25 25 1 1 +"24866" 4 154 3 1 1 1 1 1 100 215 125 119 2712 4 8 1 685 1 5 6 0 0 +"24867" 4 154 3 1 1 1 1 1 100 215 119 149 1905 8 2 1 860 2 25 30 1 1 +"24868" 4 154 3 1 1 1 1 1 100 215 149 156 2096 2 1 1 434 1 5 7 1 1 +"24869" 4 154 3 1 1 1 1 1 100 215 156 164 1803 7 6 1 2499 1 5 8 1 1 +"24870" 4 154 3 1 1 1 1 1 100 215 164 156 2345 5 3 0 782 1 5 8 1 0 +"24871" 4 154 3 1 1 1 1 1 100 215 156 195 1782 3 6 0 448 2 25 39 0 1 +"24872" 4 154 3 1 1 1 1 1 100 215 195 205 1240 6 9 0 1026 1 5 10 0 1 +"24873" 4 154 3 1 1 1 1 1 100 215 205 215 1094 1 4 0 592 1 5 10 0 1 +"24874" 4 154 3 1 1 1 2 1 100 322 100 95 2434 2 9 1 436 1 5 5 0 0 +"24875" 4 154 3 1 1 1 2 1 100 322 95 100 1180 4 10 0 938 1 5 5 0 1 +"24876" 4 154 3 1 1 1 2 1 100 322 100 95 1371 3 1 0 815 1 5 5 1 0 +"24877" 4 154 3 1 1 1 2 1 100 322 95 143 1229 8 6 1 1489 3 50 48 1 1 +"24878" 4 154 3 1 1 1 2 1 100 322 143 136 1880 5 7 1 1116 1 5 7 0 0 +"24879" 4 154 3 1 1 1 2 1 100 322 136 129 1596 6 8 1 2789 1 5 7 0 0 +"24880" 4 154 3 1 1 1 2 1 100 322 129 194 1318 7 5 1 820 3 50 65 1 1 +"24881" 4 154 3 1 1 1 2 1 100 322 194 184 3683 1 10 1 1298 1 5 10 0 0 +"24882" 4 154 3 1 1 1 2 1 100 322 184 322 1721 9 3 1 629 4 75 138 1 1 +"24883" 4 154 3 1 1 1 3 1 100 446 100 105 2149 3 5 0 820 1 5 5 0 1 +"24884" 4 154 3 1 1 1 3 1 100 446 105 100 1219 8 9 1 2222 1 5 5 0 0 +"24885" 4 154 3 1 1 1 3 1 100 446 100 125 2345 2 4 0 727 2 25 25 0 1 +"24886" 4 154 3 1 1 1 3 1 100 446 125 131 1714 6 3 1 1566 1 5 6 1 1 +"24887" 4 154 3 1 1 1 3 1 100 446 131 138 2024 7 6 1 3709 1 5 7 1 1 +"24888" 4 154 3 1 1 1 3 1 100 446 138 145 2978 4 2 1 610 1 5 7 1 1 +"24889" 4 154 3 1 1 1 3 1 100 446 145 283 1330 1 8 0 0 5 95 138 0 1 +"24890" 4 154 3 1 1 1 3 1 100 446 283 297 2521 5 7 0 1343 1 5 14 0 1 +"24891" 4 154 3 1 1 1 3 1 100 446 297 446 1460 9 5 1 869 3 50 149 1 1 +"24892" 4 154 3 1 1 1 4 1 100 239 100 105 2190 8 7 1 2804 1 5 5 1 1 +"24893" 4 154 3 1 1 1 4 1 100 239 105 110 1212 3 10 0 1808 1 5 5 0 1 +"24894" 4 154 3 1 1 1 4 1 100 239 110 116 1744 7 9 0 690 1 5 6 0 1 +"24895" 4 154 3 1 1 1 4 1 100 239 116 174 1246 9 1 1 1137 3 50 58 1 1 +"24896" 4 154 3 1 1 1 4 1 100 239 174 183 2296 2 3 0 1361 1 5 9 0 1 +"24897" 4 154 3 1 1 1 4 1 100 239 183 229 1549 1 8 0 665 2 25 46 0 1 +"24898" 4 154 3 1 1 1 4 1 100 239 229 240 1215 5 4 1 2019 1 5 11 1 1 +"24899" 4 154 3 1 1 1 4 1 100 239 240 252 1424 4 2 1 561 1 5 12 1 1 +"24900" 4 154 3 1 1 1 4 1 100 239 252 239 1260 6 3 0 637 1 5 13 1 0 +"24901" 4 154 4 0 1 0 1 1 100 1 100 175 7159 2 7 0 897 2 75 75 0 1 +"24902" 4 154 4 0 1 0 1 1 100 1 175 306 2163 3 7 0 1002 2 75 131 0 1 +"24903" 4 154 4 0 1 0 1 1 100 1 306 15 2591 2 8 1 3794 1 95 291 0 0 +"24904" 4 154 4 0 1 0 1 1 100 1 15 1 1899 8 9 1 1486 1 95 14 0 0 +"24905" 4 154 5 1 1 0 1 1 100 265 100 195 3855 1 3 0 597 1 95 95 0 1 +"24906" 4 154 5 1 1 0 1 1 100 265 195 380 1830 6 5 1 532 1 95 185 1 1 +"24907" 4 154 5 1 1 0 1 1 100 265 380 741 3465 2 8 0 1945 1 95 361 0 1 +"24908" 4 154 5 1 1 0 1 1 100 265 741 37 1450 8 9 1 1555 1 95 704 0 0 +"24909" 4 154 5 1 1 0 1 1 100 265 37 72 2342 3 4 0 428 1 95 35 0 1 +"24910" 4 154 5 1 1 0 1 1 100 265 72 140 1917 5 7 0 600 1 95 68 0 1 +"24911" 4 154 5 1 1 0 1 1 100 265 140 273 825 7 4 1 2239 1 95 133 1 1 +"24912" 4 154 5 1 1 0 1 1 100 265 273 136 1514 4 1 0 728 3 50 137 1 0 +"24913" 4 154 5 1 1 0 1 1 100 265 136 265 994 9 6 1 433 1 95 129 1 1 +"24914" 4 154 5 1 1 0 2 1 100 267 100 195 1229 8 1 1 552 1 95 95 1 1 +"24915" 4 154 5 1 1 0 2 1 100 267 195 380 787 6 2 1 555 1 95 185 1 1 +"24916" 4 154 5 1 1 0 2 1 100 267 380 741 1438 7 9 0 1119 1 95 361 0 1 +"24917" 4 154 5 1 1 0 2 1 100 267 741 1445 911 2 10 0 1441 1 95 704 0 1 +"24918" 4 154 5 1 1 0 2 1 100 267 1445 722 1178 5 3 0 1032 3 50 723 1 0 +"24919" 4 154 5 1 1 0 2 1 100 267 722 36 1772 4 2 0 477 1 95 686 1 0 +"24920" 4 154 5 1 1 0 2 1 100 267 36 70 484 3 5 0 514 1 95 34 0 1 +"24921" 4 154 5 1 1 0 2 1 100 267 70 137 759 9 4 1 841 1 95 67 1 1 +"24922" 4 154 5 1 1 0 2 1 100 267 137 267 678 1 7 0 331 1 95 130 0 1 +"24923" 4 154 5 1 1 0 3 0 100 1 100 195 2132 7 5 1 484 1 95 95 1 1 +"24924" 4 154 5 1 1 0 3 0 100 1 195 10 1507 2 1 0 353 1 95 185 1 0 +"24925" 4 154 5 1 1 0 3 0 100 1 10 20 1253 8 6 1 342 1 95 10 1 1 +"24926" 4 154 5 1 1 0 3 0 100 1 20 1 547 4 7 1 423 1 95 19 0 0 +"24927" 4 154 5 1 1 0 4 1 100 0 100 195 1840 2 3 0 1006 1 95 95 0 1 +"24928" 4 154 5 1 1 0 4 1 100 0 195 380 748 7 4 1 3629 1 95 185 1 1 +"24929" 4 154 5 1 1 0 4 1 100 0 380 19 1478 3 1 0 4133 1 95 361 1 0 +"24930" 4 154 5 1 1 0 4 1 100 0 19 37 1341 1 9 0 427 1 95 18 0 1 +"24931" 4 154 5 1 1 0 4 1 100 0 37 72 1420 8 7 1 405 1 95 35 1 1 +"24932" 4 154 5 1 1 0 4 1 100 0 72 140 1231 9 2 1 370 1 95 68 1 1 +"24933" 4 154 5 1 1 0 4 1 100 0 140 35 5134 5 6 1 1503 2 75 105 0 0 +"24934" 4 154 5 1 1 0 4 1 100 0 35 2 1797 6 8 1 477 1 95 33 0 0 +"24935" 4 154 5 1 1 0 4 1 100 0 2 0 1195 4 7 1 312 1 95 2 0 0 +"24936" 4 155 2 0 1 1 1 1 100 203 100 150 6429 8 3 1 848 3 50 50 1 1 +"24937" 4 155 2 0 1 1 1 1 100 203 150 225 16045 3 7 0 1280 3 50 75 0 1 +"24938" 4 155 2 0 1 1 1 1 100 203 225 214 9328 8 2 0 938 1 5 11 1 0 +"24939" 4 155 2 0 1 1 1 1 100 203 214 203 4170 2 1 0 873 1 5 11 1 0 +"24940" 4 155 3 1 1 1 1 1 100 89 100 105 9161 9 7 1 1027 1 5 5 1 1 +"24941" 4 155 3 1 1 1 1 1 100 89 105 110 5087 4 8 0 874 1 5 5 0 1 +"24942" 4 155 3 1 1 1 1 1 100 89 110 104 3245 8 2 0 1617 1 5 6 1 0 +"24943" 4 155 3 1 1 1 1 1 100 89 104 109 2985 2 1 1 1208 1 5 5 1 1 +"24944" 4 155 3 1 1 1 1 1 100 89 109 82 4132 7 6 0 605 2 25 27 1 0 +"24945" 4 155 3 1 1 1 1 1 100 89 82 86 2853 5 3 1 819 1 5 4 1 1 +"24946" 4 155 3 1 1 1 1 1 100 89 86 90 2089 3 6 0 716 1 5 4 0 1 +"24947" 4 155 3 1 1 1 1 1 100 89 90 85 1815 6 9 1 1174 1 5 5 0 0 +"24948" 4 155 3 1 1 1 1 1 100 89 85 89 3451 1 4 0 1084 1 5 4 0 1 +"24949" 4 155 3 1 1 1 2 1 100 116 100 105 3405 2 9 0 777 1 5 5 0 1 +"24950" 4 155 3 1 1 1 2 1 100 116 105 100 2129 4 10 1 1017 1 5 5 0 0 +"24951" 4 155 3 1 1 1 2 1 100 116 100 95 2182 3 1 0 1627 1 5 5 1 0 +"24952" 4 155 3 1 1 1 2 1 100 116 95 100 1963 8 6 1 652 1 5 5 1 1 +"24953" 4 155 3 1 1 1 2 1 100 116 100 105 1578 5 7 0 1117 1 5 5 0 1 +"24954" 4 155 3 1 1 1 2 1 100 116 105 100 2461 6 8 1 749 1 5 5 0 0 +"24955" 4 155 3 1 1 1 2 1 100 116 100 105 2143 7 5 1 1470 1 5 5 1 1 +"24956" 4 155 3 1 1 1 2 1 100 116 105 110 1661 1 10 0 572 1 5 5 0 1 +"24957" 4 155 3 1 1 1 2 1 100 116 110 116 3309 9 3 1 725 1 5 6 1 1 +"24958" 4 155 3 1 1 1 3 1 100 116 100 105 2015 3 5 0 1172 1 5 5 0 1 +"24959" 4 155 3 1 1 1 3 1 100 116 105 100 2705 8 9 1 774 1 5 5 0 0 +"24960" 4 155 3 1 1 1 3 1 100 116 100 105 1878 2 4 0 642 1 5 5 0 1 +"24961" 4 155 3 1 1 1 3 1 100 116 105 100 2337 6 3 0 631 1 5 5 1 0 +"24962" 4 155 3 1 1 1 3 1 100 116 100 105 1167 7 6 1 644 1 5 5 1 1 +"24963" 4 155 3 1 1 1 3 1 100 116 105 110 2691 4 2 1 665 1 5 5 1 1 +"24964" 4 155 3 1 1 1 3 1 100 116 110 116 2054 1 8 0 562 1 5 6 0 1 +"24965" 4 155 3 1 1 1 3 1 100 116 116 110 3885 5 7 1 594 1 5 6 0 0 +"24966" 4 155 3 1 1 1 3 1 100 116 110 116 2115 9 5 1 516 1 5 6 1 1 +"24967" 4 155 3 1 1 1 4 1 100 126 100 105 2515 8 7 1 625 1 5 5 1 1 +"24968" 4 155 3 1 1 1 4 1 100 126 105 110 1976 3 10 0 551 1 5 5 0 1 +"24969" 4 155 3 1 1 1 4 1 100 126 110 104 2699 7 9 1 616 1 5 6 0 0 +"24970" 4 155 3 1 1 1 4 1 100 126 104 109 2072 9 1 1 1123 1 5 5 1 1 +"24971" 4 155 3 1 1 1 4 1 100 126 109 114 3499 2 3 0 858 1 5 5 0 1 +"24972" 4 155 3 1 1 1 4 1 100 126 114 120 2374 1 8 0 1195 1 5 6 0 1 +"24973" 4 155 3 1 1 1 4 1 100 126 120 126 1984 5 4 1 863 1 5 6 1 1 +"24974" 4 155 3 1 1 1 4 1 100 126 126 120 2923 4 2 0 845 1 5 6 1 0 +"24975" 4 155 3 1 1 1 4 1 100 126 120 126 1506 6 3 1 2376 1 5 6 1 1 +"24976" 4 155 4 0 1 0 1 1 100 69 100 150 5654 2 7 0 2142 3 50 50 0 1 +"24977" 4 155 4 0 1 0 1 1 100 69 150 263 4686 3 7 0 2477 2 75 113 0 1 +"24978" 4 155 4 0 1 0 1 1 100 69 263 276 2012 2 8 0 1322 5 5 13 0 1 +"24979" 4 155 4 0 1 0 1 1 100 69 276 69 4201 8 9 1 2143 2 75 207 0 0 +"24980" 4 155 5 1 1 0 1 1 100 330 100 150 14557 1 3 0 3230 3 50 50 0 1 +"24981" 4 155 5 1 1 0 1 1 100 330 150 188 2353 6 5 1 1096 4 25 38 1 1 +"24982" 4 155 5 1 1 0 1 1 100 330 188 235 2305 2 8 0 1561 4 25 47 0 1 +"24983" 4 155 5 1 1 0 1 1 100 330 235 223 2516 8 9 1 1114 5 5 12 0 0 +"24984" 4 155 5 1 1 0 1 1 100 330 223 279 1706 3 4 0 2527 4 25 56 0 1 +"24985" 4 155 5 1 1 0 1 1 100 330 279 265 2604 5 7 1 1270 5 5 14 0 0 +"24986" 4 155 5 1 1 0 1 1 100 330 265 331 1679 7 4 1 2045 4 25 66 1 1 +"24987" 4 155 5 1 1 0 1 1 100 330 331 314 2289 4 1 0 1315 5 5 17 1 0 +"24988" 4 155 5 1 1 0 1 1 100 330 314 330 2097 9 6 1 1268 5 5 16 1 1 +"24989" 4 155 5 1 1 0 2 1 100 74 100 150 5246 8 1 1 715 3 50 50 1 1 +"24990" 4 155 5 1 1 0 2 1 100 74 150 112 2262 6 2 0 3104 4 25 38 1 0 +"24991" 4 155 5 1 1 0 2 1 100 74 112 106 3754 7 9 1 597 5 5 6 0 0 +"24992" 4 155 5 1 1 0 2 1 100 74 106 133 1446 2 10 0 2583 4 25 27 0 1 +"24993" 4 155 5 1 1 0 2 1 100 74 133 100 2234 5 3 0 2246 4 25 33 1 0 +"24994" 4 155 5 1 1 0 2 1 100 74 100 75 2488 4 2 0 1328 4 25 25 1 0 +"24995" 4 155 5 1 1 0 2 1 100 74 75 56 3446 3 5 1 1016 4 25 19 0 0 +"24996" 4 155 5 1 1 0 2 1 100 74 56 59 1349 9 4 1 835 5 5 3 1 1 +"24997" 4 155 5 1 1 0 2 1 100 74 59 74 1705 1 7 0 1486 4 25 15 0 1 +"24998" 4 155 5 1 1 0 3 1 100 255 100 125 3562 7 5 1 1191 4 25 25 1 1 +"24999" 4 155 5 1 1 0 3 1 100 255 125 131 3604 2 1 1 902 5 5 6 1 1 +"25000" 4 155 5 1 1 0 3 1 100 255 131 164 2137 8 6 1 2217 4 25 33 1 1 +"25001" 4 155 5 1 1 0 3 1 100 255 164 205 2757 4 7 0 2392 4 25 41 0 1 +"25002" 4 155 5 1 1 0 3 1 100 255 205 195 2171 3 10 1 1338 5 5 10 0 0 +"25003" 4 155 5 1 1 0 3 1 100 255 195 205 3464 6 8 0 1882 5 5 10 0 1 +"25004" 4 155 5 1 1 0 3 1 100 255 205 215 1893 9 2 1 1041 5 5 10 1 1 +"25005" 4 155 5 1 1 0 3 1 100 255 215 204 1997 5 3 0 1147 5 5 11 1 0 +"25006" 4 155 5 1 1 0 3 1 100 255 204 255 1745 1 10 0 1556 4 25 51 0 1 +"25007" 4 155 5 1 1 0 4 1 100 281 100 125 5954 2 3 0 1524 4 25 25 0 1 +"25008" 4 155 5 1 1 0 4 1 100 281 125 156 1552 7 4 1 3468 4 25 31 1 1 +"25009" 4 155 5 1 1 0 4 1 100 281 156 148 2534 3 1 0 835 5 5 8 1 0 +"25010" 4 155 5 1 1 0 4 1 100 281 148 259 2041 1 9 0 3565 2 75 111 0 1 +"25011" 4 155 5 1 1 0 4 1 100 281 259 272 2008 8 7 1 1063 5 5 13 1 1 +"25012" 4 155 5 1 1 0 4 1 100 281 272 340 6649 9 2 1 2579 4 25 68 1 1 +"25013" 4 155 5 1 1 0 4 1 100 281 340 357 2037 5 6 0 1715 5 5 17 0 1 +"25014" 4 155 5 1 1 0 4 1 100 281 357 268 2134 6 8 1 2439 4 25 89 0 0 +"25015" 4 155 5 1 1 0 4 1 100 281 268 281 2664 4 7 0 807 5 5 13 0 1 +"25016" 4 157 2 0 1 1 1 1 100 243 100 195 25472 8 3 1 2594 5 95 95 1 1 +"25017" 4 157 2 0 1 1 1 1 100 243 195 244 8428 3 7 0 713 2 25 49 0 1 +"25018" 4 157 2 0 1 1 1 1 100 243 244 256 3428 8 2 1 1366 1 5 12 1 1 +"25019" 4 157 2 0 1 1 1 1 100 243 256 243 1283 2 1 0 883 1 5 13 1 0 +"25020" 4 157 3 1 1 1 1 1 100 58 100 75 1673 9 7 0 1183 2 25 25 1 0 +"25021" 4 157 3 1 1 1 1 1 100 58 75 71 1412 4 8 1 718 1 5 4 0 0 +"25022" 4 157 3 1 1 1 1 1 100 58 71 67 3121 8 2 0 634 1 5 4 1 0 +"25023" 4 157 3 1 1 1 1 1 100 58 67 70 1225 2 1 1 828 1 5 3 1 1 +"25024" 4 157 3 1 1 1 1 1 100 58 70 52 2674 7 6 0 632 2 25 18 1 0 +"25025" 4 157 3 1 1 1 1 1 100 58 52 55 632 5 3 1 1511 1 5 3 1 1 +"25026" 4 157 3 1 1 1 1 1 100 58 55 52 2324 3 6 1 627 1 5 3 0 0 +"25027" 4 157 3 1 1 1 1 1 100 58 52 55 1395 6 9 0 590 1 5 3 0 1 +"25028" 4 157 3 1 1 1 1 1 100 58 55 58 3071 1 4 0 1234 1 5 3 0 1 +"25029" 4 157 3 1 1 1 2 1 100 300 100 105 2860 2 9 0 1778 1 5 5 0 1 +"25030" 4 157 3 1 1 1 2 1 100 300 105 100 2018 4 10 1 648 1 5 5 0 0 +"25031" 4 157 3 1 1 1 2 1 100 300 100 95 1134 3 1 0 709 1 5 5 1 0 +"25032" 4 157 3 1 1 1 2 1 100 300 95 166 1809 8 6 1 640 4 75 71 1 1 +"25033" 4 157 3 1 1 1 2 1 100 300 166 208 3031 5 7 0 716 2 25 42 0 1 +"25034" 4 157 3 1 1 1 2 1 100 300 208 218 4479 6 8 0 1079 1 5 10 0 1 +"25035" 4 157 3 1 1 1 2 1 100 300 218 229 2737 7 5 1 2045 1 5 11 1 1 +"25036" 4 157 3 1 1 1 2 1 100 300 229 286 2777 1 10 0 1147 2 25 57 0 1 +"25037" 4 157 3 1 1 1 2 1 100 300 286 300 1337 9 3 1 591 1 5 14 1 1 +"25038" 4 157 3 1 1 1 3 1 100 191 100 105 1681 3 5 0 1302 1 5 5 0 1 +"25039" 4 157 3 1 1 1 3 1 100 191 105 79 2614 8 9 1 2175 2 25 26 0 0 +"25040" 4 157 3 1 1 1 3 1 100 191 79 83 1588 2 4 0 2870 1 5 4 0 1 +"25041" 4 157 3 1 1 1 3 1 100 191 83 87 2051 6 3 1 663 1 5 4 1 1 +"25042" 4 157 3 1 1 1 3 1 100 191 87 83 5205 7 6 0 579 1 5 4 1 0 +"25043" 4 157 3 1 1 1 3 1 100 191 83 87 1028 4 2 1 793 1 5 4 1 1 +"25044" 4 157 3 1 1 1 3 1 100 191 87 170 1825 1 8 0 0 5 95 83 0 1 +"25045" 4 157 3 1 1 1 3 1 100 191 170 127 2687 5 7 1 965 2 25 43 0 0 +"25046" 4 157 3 1 1 1 3 1 100 191 127 191 1996 9 5 1 763 3 50 64 1 1 +"25047" 4 157 3 1 1 1 4 1 100 192 100 150 1719 8 7 1 636 3 50 50 1 1 +"25048" 4 157 3 1 1 1 4 1 100 192 150 158 1491 3 10 0 1025 1 5 8 0 1 +"25049" 4 157 3 1 1 1 4 1 100 192 158 150 1230 7 9 1 713 1 5 8 0 0 +"25050" 4 157 3 1 1 1 4 1 100 192 150 142 732 9 1 0 495 1 5 8 1 0 +"25051" 4 157 3 1 1 1 4 1 100 192 142 135 492 2 3 1 1675 1 5 7 0 0 +"25052" 4 157 3 1 1 1 4 1 100 192 135 203 1105 1 8 0 1642 3 50 68 0 1 +"25053" 4 157 3 1 1 1 4 1 100 192 203 193 2049 5 4 0 725 1 5 10 1 0 +"25054" 4 157 3 1 1 1 4 1 100 192 193 183 1304 4 2 0 823 1 5 10 1 0 +"25055" 4 157 3 1 1 1 4 1 100 192 183 192 973 6 3 1 908 1 5 9 1 1 +"25056" 4 157 4 0 1 0 1 1 100 285 100 195 2871 2 7 0 1100 1 95 95 0 1 +"25057" 4 157 4 0 1 0 1 1 100 285 195 293 2853 3 7 0 1283 3 50 98 0 1 +"25058" 4 157 4 0 1 0 1 1 100 285 293 571 1395 2 8 0 855 1 95 278 0 1 +"25059" 4 157 4 0 1 0 1 1 100 285 571 285 3412 8 9 1 1135 3 50 286 0 0 +"25060" 4 157 5 1 1 0 1 1 100 27 100 195 1349 1 3 0 795 1 95 95 0 1 +"25061" 4 157 5 1 1 0 1 1 100 27 195 380 1138 6 5 1 714 1 95 185 1 1 +"25062" 4 157 5 1 1 0 1 1 100 27 380 741 1237 2 8 0 695 1 95 361 0 1 +"25063" 4 157 5 1 1 0 1 1 100 27 741 37 899 8 9 1 638 1 95 704 0 0 +"25064" 4 157 5 1 1 0 1 1 100 27 37 72 3148 3 4 0 1236 1 95 35 0 1 +"25065" 4 157 5 1 1 0 1 1 100 27 72 140 1678 5 7 0 820 1 95 68 0 1 +"25066" 4 157 5 1 1 0 1 1 100 27 140 273 2443 7 4 1 644 1 95 133 1 1 +"25067" 4 157 5 1 1 0 1 1 100 27 273 14 1483 4 1 0 634 1 95 259 1 0 +"25068" 4 157 5 1 1 0 1 1 100 27 14 27 3025 9 6 1 463 1 95 13 1 1 +"25069" 4 157 5 1 1 0 2 0 100 0 100 195 1995 8 1 1 642 1 95 95 1 1 +"25070" 4 157 5 1 1 0 2 0 100 0 195 10 2432 6 2 0 466 1 95 185 1 0 +"25071" 4 157 5 1 1 0 2 0 100 0 10 0 745 7 9 1 772 1 95 10 0 0 +"25072" 4 157 5 1 1 0 3 1 100 4 100 195 5013 7 5 1 675 1 95 95 1 1 +"25073" 4 157 5 1 1 0 3 1 100 4 195 10 1080 2 1 0 635 1 95 185 1 0 +"25074" 4 157 5 1 1 0 3 1 100 4 10 20 1746 8 6 1 932 1 95 10 1 1 +"25075" 4 157 5 1 1 0 3 1 100 4 20 39 1197 4 7 0 762 1 95 19 0 1 +"25076" 4 157 5 1 1 0 3 1 100 4 39 76 1163 3 10 0 1530 1 95 37 0 1 +"25077" 4 157 5 1 1 0 3 1 100 4 76 4 2686 6 8 1 1148 1 95 72 0 0 +"25078" 4 157 5 1 1 0 3 1 100 4 4 8 1338 9 2 1 664 1 95 4 1 1 +"25079" 4 157 5 1 1 0 3 1 100 4 8 2 2069 5 3 0 1433 2 75 6 1 0 +"25080" 4 157 5 1 1 0 3 1 100 4 2 4 1818 1 10 0 705 1 95 2 0 1 +"25081" 4 157 5 1 1 0 4 0 100 0 100 105 1966 2 3 0 799 5 5 5 0 1 +"25082" 4 157 5 1 1 0 4 0 100 0 105 205 3214 7 4 1 771 1 95 100 1 1 +"25083" 4 157 5 1 1 0 4 0 100 0 205 10 1572 3 1 0 2318 1 95 195 1 0 +"25084" 4 157 5 1 1 0 4 0 100 0 10 0 973 1 9 1 724 1 95 10 0 0 +"25085" 4 158 2 0 1 1 1 1 100 134 100 150 29172 8 3 1 1662 3 50 50 1 1 +"25086" 4 158 2 0 1 1 1 1 100 134 150 188 6052 3 7 0 2138 2 25 38 0 1 +"25087" 4 158 2 0 1 1 1 1 100 134 188 179 2178 8 2 0 1423 1 5 9 1 0 +"25088" 4 158 2 0 1 1 1 1 100 134 179 134 2553 2 1 0 1453 2 25 45 1 0 +"25089" 4 158 3 1 1 1 1 1 100 432 100 175 1912 9 7 1 835 4 75 75 1 1 +"25090" 4 158 3 1 1 1 1 1 100 432 175 131 5137 4 8 1 642 2 25 44 0 0 +"25091" 4 158 3 1 1 1 1 1 100 432 131 197 2568 8 2 1 907 3 50 66 1 1 +"25092" 4 158 3 1 1 1 1 1 100 432 197 98 2821 2 1 0 901 3 50 99 1 0 +"25093" 4 158 3 1 1 1 1 1 100 432 98 147 3047 7 6 1 581 3 50 49 1 1 +"25094" 4 158 3 1 1 1 1 1 100 432 147 184 4222 5 3 1 915 2 25 37 1 1 +"25095" 4 158 3 1 1 1 1 1 100 432 184 230 2157 3 6 0 1013 2 25 46 0 1 +"25096" 4 158 3 1 1 1 1 1 100 432 230 288 2625 6 9 0 3723 2 25 58 0 1 +"25097" 4 158 3 1 1 1 1 1 100 432 288 432 2455 1 4 0 944 3 50 144 0 1 +"25098" 4 158 3 1 1 1 2 1 100 905 100 150 2783 2 9 0 2241 3 50 50 0 1 +"25099" 4 158 3 1 1 1 2 1 100 905 150 188 2691 4 10 0 2225 2 25 38 0 1 +"25100" 4 158 3 1 1 1 2 1 100 905 188 235 3580 3 1 1 1448 2 25 47 1 1 +"25101" 4 158 3 1 1 1 2 1 100 905 235 353 1544 8 6 1 916 3 50 118 1 1 +"25102" 4 158 3 1 1 1 2 1 100 905 353 441 3734 5 7 0 754 2 25 88 0 1 +"25103" 4 158 3 1 1 1 2 1 100 905 441 463 3308 6 8 0 1124 1 5 22 0 1 +"25104" 4 158 3 1 1 1 2 1 100 905 463 579 2716 7 5 1 1056 2 25 116 1 1 +"25105" 4 158 3 1 1 1 2 1 100 905 579 724 2691 1 10 0 1593 2 25 145 0 1 +"25106" 4 158 3 1 1 1 2 1 100 905 724 905 2323 9 3 1 2034 2 25 181 1 1 +"25107" 4 158 3 1 1 1 3 1 100 260 100 75 2672 3 5 1 526 2 25 25 0 0 +"25108" 4 158 3 1 1 1 3 1 100 260 75 37 1618 8 9 1 1100 3 50 38 0 0 +"25109" 4 158 3 1 1 1 3 1 100 260 37 56 1532 2 4 0 1026 3 50 19 0 1 +"25110" 4 158 3 1 1 1 3 1 100 260 56 42 2958 6 3 0 4999 2 25 14 1 0 +"25111" 4 158 3 1 1 1 3 1 100 260 42 63 1886 7 6 1 1387 3 50 21 1 1 +"25112" 4 158 3 1 1 1 3 1 100 260 63 79 5238 4 2 1 2882 2 25 16 1 1 +"25113" 4 158 3 1 1 1 3 1 100 260 79 138 1447 1 8 0 1152 4 75 59 0 1 +"25114" 4 158 3 1 1 1 3 1 100 260 138 173 1982 5 7 0 2214 2 25 35 0 1 +"25115" 4 158 3 1 1 1 3 1 100 260 173 260 1538 9 5 1 3734 3 50 87 1 1 +"25116" 4 158 3 1 1 1 4 1 100 388 100 150 1865 8 7 1 750 3 50 50 1 1 +"25117" 4 158 3 1 1 1 4 1 100 388 150 188 1943 3 10 0 1473 2 25 38 0 1 +"25118" 4 158 3 1 1 1 4 1 100 388 188 235 1672 7 9 0 1030 2 25 47 0 1 +"25119" 4 158 3 1 1 1 4 1 100 388 235 353 1522 9 1 1 991 3 50 118 1 1 +"25120" 4 158 3 1 1 1 4 1 100 388 353 441 1837 2 3 0 1483 2 25 88 0 1 +"25121" 4 158 3 1 1 1 4 1 100 388 441 551 2322 1 8 0 1385 2 25 110 0 1 +"25122" 4 158 3 1 1 1 4 1 100 388 551 689 2301 5 4 1 1357 2 25 138 1 1 +"25123" 4 158 3 1 1 1 4 1 100 388 689 517 2474 4 2 0 1410 2 25 172 1 0 +"25124" 4 158 3 1 1 1 4 1 100 388 517 388 3476 6 3 0 1004 2 25 129 1 0 +"25125" 4 158 4 0 1 0 1 1 100 164 100 175 7022 2 7 0 2630 2 75 75 0 1 +"25126" 4 158 4 0 1 0 1 1 100 164 175 263 2134 3 7 0 1726 3 50 88 0 1 +"25127" 4 158 4 0 1 0 1 1 100 164 263 329 2346 2 8 0 1336 4 25 66 0 1 +"25128" 4 158 4 0 1 0 1 1 100 164 329 164 1829 8 9 1 4476 3 50 165 0 0 +"25129" 4 158 5 1 1 0 1 1 100 432 100 175 1806 1 3 0 3901 2 75 75 0 1 +"25130" 4 158 5 1 1 0 1 1 100 432 175 263 1953 6 5 1 3562 3 50 88 1 1 +"25131" 4 158 5 1 1 0 1 1 100 432 263 329 1565 2 8 0 745 4 25 66 0 1 +"25132" 4 158 5 1 1 0 1 1 100 432 329 164 1675 8 9 1 3121 3 50 165 0 0 +"25133" 4 158 5 1 1 0 1 1 100 432 164 205 1490 3 4 0 881 4 25 41 0 1 +"25134" 4 158 5 1 1 0 1 1 100 432 205 256 2188 5 7 0 938 4 25 51 0 1 +"25135" 4 158 5 1 1 0 1 1 100 432 256 384 1539 7 4 1 1439 3 50 128 1 1 +"25136" 4 158 5 1 1 0 1 1 100 432 384 288 2016 4 1 0 1198 4 25 96 1 0 +"25137" 4 158 5 1 1 0 1 1 100 432 288 432 1251 9 6 1 1437 3 50 144 1 1 +"25138" 4 158 5 1 1 0 2 1 100 323 100 150 1688 8 1 1 909 3 50 50 1 1 +"25139" 4 158 5 1 1 0 2 1 100 323 150 225 1703 6 2 1 3668 3 50 75 1 1 +"25140" 4 158 5 1 1 0 2 1 100 323 225 169 2851 7 9 1 993 4 25 56 0 0 +"25141" 4 158 5 1 1 0 2 1 100 323 169 254 1262 2 10 0 920 3 50 85 0 1 +"25142" 4 158 5 1 1 0 2 1 100 323 254 190 3010 5 3 0 721 4 25 64 1 0 +"25143" 4 158 5 1 1 0 2 1 100 323 190 95 2124 4 2 0 1205 3 50 95 1 0 +"25144" 4 158 5 1 1 0 2 1 100 323 95 143 5041 3 5 0 1919 3 50 48 0 1 +"25145" 4 158 5 1 1 0 2 1 100 323 143 215 1473 9 4 1 1104 3 50 72 1 1 +"25146" 4 158 5 1 1 0 2 1 100 323 215 323 1385 1 7 0 2645 3 50 108 0 1 +"25147" 4 158 5 1 1 0 3 1 100 564 100 150 1768 7 5 1 1034 3 50 50 1 1 +"25148" 4 158 5 1 1 0 3 1 100 564 150 75 1214 2 1 0 795 3 50 75 1 0 +"25149" 4 158 5 1 1 0 3 1 100 564 75 146 1442 8 6 1 1059 1 95 71 1 1 +"25150" 4 158 5 1 1 0 3 1 100 564 146 183 3286 4 7 0 735 4 25 37 0 1 +"25151" 4 158 5 1 1 0 3 1 100 564 183 229 1137 3 10 0 481 4 25 46 0 1 +"25152" 4 158 5 1 1 0 3 1 100 564 229 172 3456 6 8 1 2313 4 25 57 0 0 +"25153" 4 158 5 1 1 0 3 1 100 564 172 301 1544 9 2 1 1621 2 75 129 1 1 +"25154" 4 158 5 1 1 0 3 1 100 564 301 376 1765 5 3 1 1391 4 25 75 1 1 +"25155" 4 158 5 1 1 0 3 1 100 564 376 564 1362 1 10 0 992 3 50 188 0 1 +"25156" 4 158 5 1 1 0 4 1 100 89 100 150 1575 2 3 0 1818 3 50 50 0 1 +"25157" 4 158 5 1 1 0 4 1 100 89 150 225 2336 7 4 1 1191 3 50 75 1 1 +"25158" 4 158 5 1 1 0 4 1 100 89 225 112 1572 3 1 0 1839 3 50 113 1 0 +"25159" 4 158 5 1 1 0 4 1 100 89 112 168 1356 1 9 0 730 3 50 56 0 1 +"25160" 4 158 5 1 1 0 4 1 100 89 168 252 1460 8 7 1 1557 3 50 84 1 1 +"25161" 4 158 5 1 1 0 4 1 100 89 252 378 1600 9 2 1 786 3 50 126 1 1 +"25162" 4 158 5 1 1 0 4 1 100 89 378 359 1522 5 6 1 799 5 5 19 0 0 +"25163" 4 158 5 1 1 0 4 1 100 89 359 179 1447 6 8 1 2864 3 50 180 0 0 +"25164" 4 158 5 1 1 0 4 1 100 89 179 89 5229 4 7 1 1370 3 50 90 0 0 +"25165" 4 160 2 0 1 1 1 1 100 187 100 150 4693 8 3 1 1796 3 50 50 1 1 +"25166" 4 160 2 0 1 1 1 1 100 187 150 263 8043 3 7 0 1154 4 75 113 0 1 +"25167" 4 160 2 0 1 1 1 1 100 187 263 250 2721 8 2 0 1141 1 5 13 1 0 +"25168" 4 160 2 0 1 1 1 1 100 187 250 187 2963 2 1 0 484 2 25 63 1 0 +"25169" 4 160 3 1 1 1 1 1 100 432 100 195 3363 9 7 1 1250 5 95 95 1 1 +"25170" 4 160 3 1 1 1 1 1 100 432 195 293 2643 4 8 0 1276 3 50 98 0 1 +"25171" 4 160 3 1 1 1 1 1 100 432 293 278 28108 8 2 0 1488 1 5 15 1 0 +"25172" 4 160 3 1 1 1 1 1 100 432 278 208 70046 2 1 0 1798 2 25 70 1 0 +"25173" 4 160 3 1 1 1 1 1 100 432 208 364 2414 7 6 1 777 4 75 156 1 1 +"25174" 4 160 3 1 1 1 1 1 100 432 364 346 1521 5 3 0 1038 1 5 18 1 0 +"25175" 4 160 3 1 1 1 1 1 100 432 346 433 2818 3 6 0 977 2 25 87 0 1 +"25176" 4 160 3 1 1 1 1 1 100 432 433 455 2264 6 9 0 593 1 5 22 0 1 +"25177" 4 160 3 1 1 1 1 1 100 432 455 432 1610 1 4 1 531 1 5 23 0 0 +"25178" 4 160 3 1 1 1 2 1 100 770 100 195 3679 2 9 0 838 5 95 95 0 1 +"25179" 4 160 3 1 1 1 2 1 100 770 195 244 3035 4 10 0 880 2 25 49 0 1 +"25180" 4 160 3 1 1 1 2 1 100 770 244 232 6731 3 1 0 4480 1 5 12 1 0 +"25181" 4 160 3 1 1 1 2 1 100 770 232 348 4384 8 6 1 1846 3 50 116 1 1 +"25182" 4 160 3 1 1 1 2 1 100 770 348 522 1859 5 7 0 248 3 50 174 0 1 +"25183" 4 160 3 1 1 1 2 1 100 770 522 391 4522 6 8 1 3352 2 25 131 0 0 +"25184" 4 160 3 1 1 1 2 1 100 770 391 587 1918 7 5 1 2009 3 50 196 1 1 +"25185" 4 160 3 1 1 1 2 1 100 770 587 616 2807 1 10 0 2663 1 5 29 0 1 +"25186" 4 160 3 1 1 1 2 1 100 770 616 770 2165 9 3 1 908 2 25 154 1 1 +"25187" 4 160 3 1 1 1 3 1 100 64 100 195 1120 3 5 0 593 5 95 95 0 1 +"25188" 4 160 3 1 1 1 3 1 100 64 195 10 1258 8 9 1 700 5 95 185 0 0 +"25189" 4 160 3 1 1 1 3 1 100 64 10 20 1160 2 4 0 1047 5 95 10 0 1 +"25190" 4 160 3 1 1 1 3 1 100 64 20 39 1395 6 3 1 654 5 95 19 1 1 +"25191" 4 160 3 1 1 1 3 1 100 64 39 68 2188 7 6 1 1914 4 75 29 1 1 +"25192" 4 160 3 1 1 1 3 1 100 64 68 34 1308 4 2 0 988 3 50 34 1 0 +"25193" 4 160 3 1 1 1 3 1 100 64 34 66 1418 1 8 0 1652 5 95 32 0 1 +"25194" 4 160 3 1 1 1 3 1 100 64 66 33 5019 5 7 1 2162 3 50 33 0 0 +"25195" 4 160 3 1 1 1 3 1 100 64 33 64 1552 9 5 1 0 5 95 31 1 1 +"25196" 4 160 3 1 1 1 4 1 100 1390 100 150 1272 8 7 1 801 3 50 50 1 1 +"25197" 4 160 3 1 1 1 4 1 100 1390 150 188 712 3 10 0 333 2 25 38 0 1 +"25198" 4 160 3 1 1 1 4 1 100 1390 188 179 1162 7 9 1 1049 1 5 9 0 0 +"25199" 4 160 3 1 1 1 4 1 100 1390 179 349 1564 9 1 1 0 5 95 170 1 1 +"25200" 4 160 3 1 1 1 4 1 100 1390 349 681 1399 2 3 0 1659 5 95 332 0 1 +"25201" 4 160 3 1 1 1 4 1 100 1390 681 1328 3746 1 8 0 0 5 95 647 0 1 +"25202" 4 160 3 1 1 1 4 1 100 1390 1328 1394 3661 5 4 1 373 1 5 66 1 1 +"25203" 4 160 3 1 1 1 4 1 100 1390 1394 1324 2256 4 2 0 290 1 5 70 1 0 +"25204" 4 160 3 1 1 1 4 1 100 1390 1324 1390 1050 6 3 1 349 1 5 66 1 1 +"25205" 4 160 4 0 1 0 1 1 100 29 100 150 8619 2 7 0 503 3 50 50 0 1 +"25206" 4 160 4 0 1 0 1 1 100 29 150 293 2116 3 7 0 621 1 95 143 0 1 +"25207" 4 160 4 0 1 0 1 1 100 29 293 571 1556 2 8 0 328 1 95 278 0 1 +"25208" 4 160 4 0 1 0 1 1 100 29 571 29 1157 8 9 1 636 1 95 542 0 0 +"25209" 4 160 5 1 1 0 1 0 100 0 100 195 1861 1 3 0 362 1 95 95 0 1 +"25210" 4 160 5 1 1 0 1 0 100 0 195 380 926 6 5 1 287 1 95 185 1 1 +"25211" 4 160 5 1 1 0 1 0 100 0 380 570 919 2 8 0 1033 3 50 190 0 1 +"25212" 4 160 5 1 1 0 1 0 100 0 570 28 805 8 9 1 229 1 95 542 0 0 +"25213" 4 160 5 1 1 0 1 0 100 0 28 55 919 3 4 0 328 1 95 27 0 1 +"25214" 4 160 5 1 1 0 1 0 100 0 55 3 1134 5 7 1 253 1 95 52 0 0 +"25215" 4 160 5 1 1 0 1 0 100 0 3 6 1110 7 4 1 415 1 95 3 1 1 +"25216" 4 160 5 1 1 0 1 0 100 0 6 0 1161 4 1 0 799 1 95 6 1 0 +"25217" 4 160 5 1 1 0 2 0 100 0 100 195 6044 8 1 1 354 1 95 95 1 1 +"25218" 4 160 5 1 1 0 2 0 100 0 195 380 2118 6 2 1 223 1 95 185 1 1 +"25219" 4 160 5 1 1 0 2 0 100 0 380 19 793 7 9 1 866 1 95 361 0 0 +"25220" 4 160 5 1 1 0 2 0 100 0 19 37 1966 2 10 0 374 1 95 18 0 1 +"25221" 4 160 5 1 1 0 2 0 100 0 37 2 2465 5 3 0 265 1 95 35 1 0 +"25222" 4 160 5 1 1 0 2 0 100 0 2 0 1778 4 2 0 427 1 95 2 1 0 +"25223" 4 160 5 1 1 0 3 0 100 1 100 195 1354 7 5 1 1070 1 95 95 1 1 +"25224" 4 160 5 1 1 0 3 0 100 1 195 10 1425 2 1 0 1995 1 95 185 1 0 +"25225" 4 160 5 1 1 0 3 0 100 1 10 20 3225 8 6 1 1061 1 95 10 1 1 +"25226" 4 160 5 1 1 0 3 0 100 1 20 39 7080 4 7 0 1212 1 95 19 0 1 +"25227" 4 160 5 1 1 0 3 0 100 1 39 76 3951 3 10 0 353 1 95 37 0 1 +"25228" 4 160 5 1 1 0 3 0 100 1 76 4 933 6 8 1 254 1 95 72 0 0 +"25229" 4 160 5 1 1 0 3 0 100 1 4 1 1183 9 2 0 1668 2 75 3 1 0 +"25230" 4 160 5 1 1 0 4 1 100 971 100 195 1297 2 3 0 2169 1 95 95 0 1 +"25231" 4 160 5 1 1 0 4 1 100 971 195 341 2031 7 4 1 3108 2 75 146 1 1 +"25232" 4 160 5 1 1 0 4 1 100 971 341 85 2773 3 1 0 623 2 75 256 1 0 +"25233" 4 160 5 1 1 0 4 1 100 971 85 166 3142 1 9 0 982 1 95 81 0 1 +"25234" 4 160 5 1 1 0 4 1 100 971 166 324 1045 8 7 1 449 1 95 158 1 1 +"25235" 4 160 5 1 1 0 4 1 100 971 324 632 1153 9 2 1 311 1 95 308 1 1 +"25236" 4 160 5 1 1 0 4 1 100 971 632 664 2590 5 6 0 1137 5 5 32 0 1 +"25237" 4 160 5 1 1 0 4 1 100 971 664 498 21480 6 8 1 1968 4 25 166 0 0 +"25238" 4 160 5 1 1 0 4 1 100 971 498 971 3719 4 7 0 1308 1 95 473 0 1 +"25239" 4 161 2 0 1 1 1 1 100 4 100 175 6144 8 3 1 2826 4 75 75 1 1 +"25240" 4 161 2 0 1 1 1 1 100 4 175 87 7200 3 7 1 3559 3 50 88 0 0 +"25241" 4 161 2 0 1 1 1 1 100 4 87 83 20490 8 2 0 4073 1 5 4 1 0 +"25242" 4 161 2 0 1 1 1 1 100 4 83 4 2156 2 1 0 2828 5 95 79 1 0 +"25243" 4 161 3 1 1 1 1 1 100 117 100 95 6346 9 7 0 1226 1 5 5 1 0 +"25244" 4 161 3 1 1 1 1 1 100 117 95 119 1420 4 8 0 950 2 25 24 0 1 +"25245" 4 161 3 1 1 1 1 1 100 117 119 113 2758 8 2 0 1049 1 5 6 1 0 +"25246" 4 161 3 1 1 1 1 1 100 117 113 107 852 2 1 0 824 1 5 6 1 0 +"25247" 4 161 3 1 1 1 1 1 100 117 107 80 1382 7 6 0 1186 2 25 27 1 0 +"25248" 4 161 3 1 1 1 1 1 100 117 80 60 1183 5 3 0 1885 2 25 20 1 0 +"25249" 4 161 3 1 1 1 1 1 100 117 60 90 776 3 6 0 2385 3 50 30 0 1 +"25250" 4 161 3 1 1 1 1 1 100 117 90 67 2205 6 9 1 194 2 25 23 0 0 +"25251" 4 161 3 1 1 1 1 1 100 117 67 117 1694 1 4 0 891 4 75 50 0 1 +"25252" 4 161 3 1 1 1 2 1 100 255 100 95 4471 2 9 1 1256 1 5 5 0 0 +"25253" 4 161 3 1 1 1 2 1 100 255 95 100 761 4 10 0 931 1 5 5 0 1 +"25254" 4 161 3 1 1 1 2 1 100 255 100 105 900 3 1 1 358 1 5 5 1 1 +"25255" 4 161 3 1 1 1 2 1 100 255 105 205 546 8 6 1 637 5 95 100 1 1 +"25256" 4 161 3 1 1 1 2 1 100 255 205 215 1708 5 7 0 808 1 5 10 0 1 +"25257" 4 161 3 1 1 1 2 1 100 255 215 226 1231 6 8 0 338 1 5 11 0 1 +"25258" 4 161 3 1 1 1 2 1 100 255 226 215 847 7 5 0 404 1 5 11 1 0 +"25259" 4 161 3 1 1 1 2 1 100 255 215 204 2366 1 10 1 291 1 5 11 0 0 +"25260" 4 161 3 1 1 1 2 1 100 255 204 255 586 9 3 1 284 2 25 51 1 1 +"25261" 4 161 3 1 1 1 3 1 100 256 100 95 2249 3 5 1 580 1 5 5 0 0 +"25262" 4 161 3 1 1 1 3 1 100 256 95 119 1269 8 9 0 206 2 25 24 0 1 +"25263" 4 161 3 1 1 1 3 1 100 256 119 232 1418 2 4 0 1741 5 95 113 0 1 +"25264" 4 161 3 1 1 1 3 1 100 256 232 244 3487 6 3 1 799 1 5 12 1 1 +"25265" 4 161 3 1 1 1 3 1 100 256 244 256 470 7 6 1 405 1 5 12 1 1 +"25266" 4 161 3 1 1 1 3 1 100 256 256 269 327 4 2 1 437 1 5 13 1 1 +"25267" 4 161 3 1 1 1 3 1 100 256 269 256 261 1 8 1 350 1 5 13 0 0 +"25268" 4 161 3 1 1 1 3 1 100 256 256 269 698 5 7 0 935 1 5 13 0 1 +"25269" 4 161 3 1 1 1 3 1 100 256 269 256 719 9 5 0 291 1 5 13 1 0 +"25270" 4 161 3 1 1 1 4 1 100 397 100 95 16937 8 7 0 2105 1 5 5 1 0 +"25271" 4 161 3 1 1 1 4 1 100 397 95 90 818 3 10 1 465 1 5 5 0 0 +"25272" 4 161 3 1 1 1 4 1 100 397 90 95 440 7 9 0 409 1 5 5 0 1 +"25273" 4 161 3 1 1 1 4 1 100 397 95 90 858 9 1 0 527 1 5 5 1 0 +"25274" 4 161 3 1 1 1 4 1 100 397 90 176 1715 2 3 0 2456 5 95 86 0 1 +"25275" 4 161 3 1 1 1 4 1 100 397 176 343 1336 1 8 0 215 5 95 167 0 1 +"25276" 4 161 3 1 1 1 4 1 100 397 343 360 1674 5 4 1 656 1 5 17 1 1 +"25277" 4 161 3 1 1 1 4 1 100 397 360 378 1085 4 2 1 255 1 5 18 1 1 +"25278" 4 161 3 1 1 1 4 1 100 397 378 397 264 6 3 1 577 1 5 19 1 1 +"25279" 4 161 4 0 1 0 1 1 100 27 100 150 13988 2 7 0 600 3 50 50 0 1 +"25280" 4 161 4 0 1 0 1 1 100 27 150 7 3353 3 7 1 2455 1 95 143 0 0 +"25281" 4 161 4 0 1 0 1 1 100 27 7 14 1055 2 8 0 546 1 95 7 0 1 +"25282" 4 161 4 0 1 0 1 1 100 27 14 27 622 8 9 0 3062 1 95 13 0 1 +"25283" 4 161 5 1 1 0 1 0 100 0 100 195 1430 1 3 0 488 1 95 95 0 1 +"25284" 4 161 5 1 1 0 1 0 100 0 195 380 790 6 5 1 406 1 95 185 1 1 +"25285" 4 161 5 1 1 0 1 0 100 0 380 741 1423 2 8 0 329 1 95 361 0 1 +"25286" 4 161 5 1 1 0 1 0 100 0 741 37 783 8 9 1 447 1 95 704 0 0 +"25287" 4 161 5 1 1 0 1 0 100 0 37 2 4031 3 4 1 421 1 95 35 0 0 +"25288" 4 161 5 1 1 0 1 0 100 0 2 4 629 5 7 0 349 1 95 2 0 1 +"25289" 4 161 5 1 1 0 1 0 100 0 4 0 330 7 4 0 303 1 95 4 1 0 +"25290" 4 161 5 1 1 0 2 1 100 1100 100 195 2139 8 1 1 291 1 95 95 1 1 +"25291" 4 161 5 1 1 0 2 1 100 1100 195 10 898 6 2 0 373 1 95 185 1 0 +"25292" 4 161 5 1 1 0 2 1 100 1100 10 20 96 7 9 0 521 1 95 10 0 1 +"25293" 4 161 5 1 1 0 2 1 100 1100 20 39 413 2 10 0 311 1 95 19 0 1 +"25294" 4 161 5 1 1 0 2 1 100 1100 39 76 717 5 3 1 304 1 95 37 1 1 +"25295" 4 161 5 1 1 0 2 1 100 1100 76 148 1671 4 2 1 364 1 95 72 1 1 +"25296" 4 161 5 1 1 0 2 1 100 1100 148 289 712 3 5 0 292 1 95 141 0 1 +"25297" 4 161 5 1 1 0 2 1 100 1100 289 564 1811 9 4 1 754 1 95 275 1 1 +"25298" 4 161 5 1 1 0 2 1 100 1100 564 1100 859 1 7 0 648 1 95 536 0 1 +"25299" 4 161 5 1 1 0 3 0 100 0 100 50 2089 7 5 0 324 3 50 50 1 0 +"25300" 4 161 5 1 1 0 3 0 100 0 50 2 722 2 1 0 2349 1 95 48 1 0 +"25301" 4 161 5 1 1 0 3 0 100 0 2 0 499 8 6 0 264 1 95 2 1 0 +"25302" 4 161 5 1 1 0 4 0 100 0 100 5 2841 2 3 1 543 1 95 95 0 0 +"25303" 4 161 5 1 1 0 4 0 100 0 5 0 466 7 4 0 434 1 95 5 1 0 +"25304" 4 166 2 0 1 1 1 1 100 176 100 150 7578 8 3 1 784 3 50 50 1 1 +"25305" 4 166 2 0 1 1 1 1 100 176 150 112 7583 3 7 1 1236 2 25 38 0 0 +"25306" 4 166 2 0 1 1 1 1 100 176 112 168 3575 8 2 1 650 3 50 56 1 1 +"25307" 4 166 2 0 1 1 1 1 100 176 168 176 3585 2 1 1 1300 1 5 8 1 1 +"25308" 4 166 3 1 1 1 1 0 100 0 100 5 4077 9 7 0 0 5 95 95 1 0 +"25309" 4 166 3 1 1 1 1 0 100 0 5 4 3416 4 8 1 1892 2 25 1 0 0 +"25310" 4 166 3 1 1 1 1 0 100 0 4 3 2511 8 2 0 1605 2 25 1 1 0 +"25311" 4 166 3 1 1 1 1 0 100 0 3 0 1678 2 1 0 2156 5 95 3 1 0 +"25312" 4 166 3 1 1 1 2 1 100 440 100 125 3395 2 9 0 1279 2 25 25 0 1 +"25313" 4 166 3 1 1 1 2 1 100 440 125 131 2839 4 10 0 942 1 5 6 0 1 +"25314" 4 166 3 1 1 1 2 1 100 440 131 164 3070 3 1 1 1521 2 25 33 1 1 +"25315" 4 166 3 1 1 1 2 1 100 440 164 246 5030 8 6 1 452 3 50 82 1 1 +"25316" 4 166 3 1 1 1 2 1 100 440 246 234 2372 5 7 1 857 1 5 12 0 0 +"25317" 4 166 3 1 1 1 2 1 100 440 234 246 2349 6 8 0 355 1 5 12 0 1 +"25318" 4 166 3 1 1 1 2 1 100 440 246 308 3303 7 5 1 287 2 25 62 1 1 +"25319" 4 166 3 1 1 1 2 1 100 440 308 293 1614 1 10 1 567 1 5 15 0 0 +"25320" 4 166 3 1 1 1 2 1 100 440 293 440 1842 9 3 1 1024 3 50 147 1 1 +"25321" 4 166 3 1 1 1 3 1 100 26 100 95 2617 3 5 1 386 1 5 5 0 0 +"25322" 4 166 3 1 1 1 3 1 100 26 95 71 3988 8 9 1 420 2 25 24 0 0 +"25323" 4 166 3 1 1 1 3 1 100 26 71 67 1949 2 4 1 1630 1 5 4 0 0 +"25324" 4 166 3 1 1 1 3 1 100 26 67 64 2213 6 3 0 938 1 5 3 1 0 +"25325" 4 166 3 1 1 1 3 1 100 26 64 61 1147 7 6 0 362 1 5 3 1 0 +"25326" 4 166 3 1 1 1 3 1 100 26 61 15 918 4 2 0 680 4 75 46 1 0 +"25327" 4 166 3 1 1 1 3 1 100 26 15 23 1949 1 8 0 214 3 50 8 0 1 +"25328" 4 166 3 1 1 1 3 1 100 26 23 17 2161 5 7 1 248 2 25 6 0 0 +"25329" 4 166 3 1 1 1 3 1 100 26 17 26 1722 9 5 1 293 3 50 9 1 1 +"25330" 4 166 3 1 1 1 4 1 100 504 100 175 2828 8 7 1 596 4 75 75 1 1 +"25331" 4 166 3 1 1 1 4 1 100 504 175 184 3021 3 10 0 834 1 5 9 0 1 +"25332" 4 166 3 1 1 1 4 1 100 504 184 193 2480 7 9 0 351 1 5 9 0 1 +"25333" 4 166 3 1 1 1 4 1 100 504 193 338 1660 9 1 1 698 4 75 145 1 1 +"25334" 4 166 3 1 1 1 4 1 100 504 338 321 1895 2 3 1 418 1 5 17 0 0 +"25335" 4 166 3 1 1 1 4 1 100 504 321 337 2595 1 8 0 1884 1 5 16 0 1 +"25336" 4 166 3 1 1 1 4 1 100 504 337 354 3384 5 4 1 333 1 5 17 1 1 +"25337" 4 166 3 1 1 1 4 1 100 504 354 336 3786 4 2 0 356 1 5 18 1 0 +"25338" 4 166 3 1 1 1 4 1 100 504 336 504 3296 6 3 1 378 3 50 168 1 1 +"25339" 4 166 4 0 1 0 1 1 100 14 100 195 7122 2 7 0 4021 1 95 95 0 1 +"25340" 4 166 4 0 1 0 1 1 100 14 195 146 7202 3 7 1 978 4 25 49 0 0 +"25341" 4 166 4 0 1 0 1 1 100 14 146 285 1632 2 8 0 352 1 95 139 0 1 +"25342" 4 166 4 0 1 0 1 1 100 14 285 14 1356 8 9 1 329 1 95 271 0 0 +"25343" 4 166 5 1 1 0 1 0 100 1 100 195 2769 1 3 0 515 1 95 95 0 1 +"25344" 4 166 5 1 1 0 1 0 100 1 195 205 2936 6 5 1 1666 5 5 10 1 1 +"25345" 4 166 5 1 1 0 1 0 100 1 205 400 1632 2 8 0 384 1 95 195 0 1 +"25346" 4 166 5 1 1 0 1 0 100 1 400 20 2707 8 9 1 376 1 95 380 0 0 +"25347" 4 166 5 1 1 0 1 0 100 1 20 1 5260 3 4 1 375 1 95 19 0 0 +"25348" 4 166 5 1 1 0 2 1 100 4114 100 195 2069 8 1 1 288 1 95 95 1 1 +"25349" 4 166 5 1 1 0 2 1 100 4114 195 380 1399 6 2 1 259 1 95 185 1 1 +"25350" 4 166 5 1 1 0 2 1 100 4114 380 285 2084 7 9 1 1311 4 25 95 0 0 +"25351" 4 166 5 1 1 0 2 1 100 4114 285 556 2413 2 10 0 440 1 95 271 0 1 +"25352" 4 166 5 1 1 0 2 1 100 4114 556 1084 2392 5 3 1 348 1 95 528 1 1 +"25353" 4 166 5 1 1 0 2 1 100 4114 1084 1030 3913 4 2 0 853 5 5 54 1 0 +"25354" 4 166 5 1 1 0 2 1 100 4114 1030 2009 1738 3 5 0 293 1 95 979 0 1 +"25355" 4 166 5 1 1 0 2 1 100 4114 2009 3918 5444 9 4 1 495 1 95 1909 1 1 +"25356" 4 166 5 1 1 0 2 1 100 4114 3918 4114 5589 1 7 0 1569 5 5 196 0 1 +"25357" 4 166 5 1 1 0 3 0 100 1 100 195 4376 7 5 1 677 1 95 95 1 1 +"25358" 4 166 5 1 1 0 3 0 100 1 195 10 6034 2 1 0 505 1 95 185 1 0 +"25359" 4 166 5 1 1 0 3 0 100 1 10 20 2837 8 6 1 372 1 95 10 1 1 +"25360" 4 166 5 1 1 0 3 0 100 1 20 1 1679 4 7 1 205 1 95 19 0 0 +"25361" 4 166 5 1 1 0 4 1 100 515 100 195 2032 2 3 0 236 1 95 95 0 1 +"25362" 4 166 5 1 1 0 4 1 100 515 195 380 2462 7 4 1 238 1 95 185 1 1 +"25363" 4 166 5 1 1 0 4 1 100 515 380 570 3200 3 1 1 965 3 50 190 1 1 +"25364" 4 166 5 1 1 0 4 1 100 515 570 1112 1932 1 9 0 310 1 95 542 0 1 +"25365" 4 166 5 1 1 0 4 1 100 515 1112 2168 1493 8 7 1 779 1 95 1056 1 1 +"25366" 4 166 5 1 1 0 4 1 100 515 2168 4228 4075 9 2 1 751 1 95 2060 1 1 +"25367" 4 166 5 1 1 0 4 1 100 515 4228 5285 3200 5 6 0 2174 4 25 1057 0 1 +"25368" 4 166 5 1 1 0 4 1 100 515 5285 264 2299 6 8 1 396 1 95 5021 0 0 +"25369" 4 166 5 1 1 0 4 1 100 515 264 515 3558 4 7 0 298 1 95 251 0 1 +"25370" 4 168 2 0 1 1 1 1 100 42 100 150 5594 8 3 1 1318 3 50 50 1 1 +"25371" 4 168 2 0 1 1 1 1 100 42 150 225 6653 3 7 0 830 3 50 75 0 1 +"25372" 4 168 2 0 1 1 1 1 100 42 225 169 2796 8 2 0 693 2 25 56 1 0 +"25373" 4 168 2 0 1 1 1 1 100 42 169 42 3097 2 1 0 980 4 75 127 1 0 +"25374" 4 168 3 1 1 1 1 1 100 211 100 50 9966 9 7 0 3354 3 50 50 1 0 +"25375" 4 168 3 1 1 1 1 1 100 211 50 98 4392 4 8 0 997 5 95 48 0 1 +"25376" 4 168 3 1 1 1 1 1 100 211 98 191 1883 8 2 1 1311 5 95 93 1 1 +"25377" 4 168 3 1 1 1 1 1 100 211 191 95 3630 2 1 0 1055 3 50 96 1 0 +"25378" 4 168 3 1 1 1 1 1 100 211 95 143 2186 7 6 1 1928 3 50 48 1 1 +"25379" 4 168 3 1 1 1 1 1 100 211 143 215 1306 5 3 1 1418 3 50 72 1 1 +"25380" 4 168 3 1 1 1 1 1 100 211 215 161 2667 3 6 1 1426 2 25 54 0 0 +"25381" 4 168 3 1 1 1 1 1 100 211 161 282 1363 6 9 0 544 4 75 121 0 1 +"25382" 4 168 3 1 1 1 1 1 100 211 282 211 2708 1 4 1 1081 2 25 71 0 0 +"25383" 4 168 3 1 1 1 2 1 100 8 100 75 3748 2 9 1 1840 2 25 25 0 0 +"25384" 4 168 3 1 1 1 2 1 100 8 75 113 1725 4 10 0 905 3 50 38 0 1 +"25385" 4 168 3 1 1 1 2 1 100 8 113 85 1132 3 1 0 1692 2 25 28 1 0 +"25386" 4 168 3 1 1 1 2 1 100 8 85 21 1137 8 6 0 1585 4 75 64 1 0 +"25387" 4 168 3 1 1 1 2 1 100 8 21 37 3162 5 7 0 1418 4 75 16 0 1 +"25388" 4 168 3 1 1 1 2 1 100 8 37 18 1956 6 8 1 1253 3 50 19 0 0 +"25389" 4 168 3 1 1 1 2 1 100 8 18 35 1127 7 5 1 735 5 95 17 1 1 +"25390" 4 168 3 1 1 1 2 1 100 8 35 17 1242 1 10 1 1229 3 50 18 0 0 +"25391" 4 168 3 1 1 1 2 1 100 8 17 8 1095 9 3 0 1465 3 50 9 1 0 +"25392" 4 168 3 1 1 1 3 1 100 215 100 150 3091 3 5 0 710 3 50 50 0 1 +"25393" 4 168 3 1 1 1 3 1 100 215 150 112 1721 8 9 1 525 2 25 38 0 0 +"25394" 4 168 3 1 1 1 3 1 100 215 112 56 1209 2 4 1 1245 3 50 56 0 0 +"25395" 4 168 3 1 1 1 3 1 100 215 56 84 1415 6 3 1 522 3 50 28 1 1 +"25396" 4 168 3 1 1 1 3 1 100 215 84 105 1462 7 6 1 1102 2 25 21 1 1 +"25397" 4 168 3 1 1 1 3 1 100 215 105 131 2007 4 2 1 989 2 25 26 1 1 +"25398" 4 168 3 1 1 1 3 1 100 215 131 164 2267 1 8 0 1213 2 25 33 0 1 +"25399" 4 168 3 1 1 1 3 1 100 215 164 205 2744 5 7 0 852 2 25 41 0 1 +"25400" 4 168 3 1 1 1 3 1 100 215 205 215 3249 9 5 1 1748 1 5 10 1 1 +"25401" 4 168 3 1 1 1 4 1 100 81 100 125 2212 8 7 1 1033 2 25 25 1 1 +"25402" 4 168 3 1 1 1 4 1 100 81 125 94 1477 3 10 1 1020 2 25 31 0 0 +"25403" 4 168 3 1 1 1 4 1 100 81 94 118 825 7 9 0 872 2 25 24 0 1 +"25404" 4 168 3 1 1 1 4 1 100 81 118 112 1020 9 1 0 825 1 5 6 1 0 +"25405" 4 168 3 1 1 1 4 1 100 81 112 196 686 2 3 0 564 4 75 84 0 1 +"25406" 4 168 3 1 1 1 4 1 100 81 196 206 974 1 8 0 1718 1 5 10 0 1 +"25407" 4 168 3 1 1 1 4 1 100 81 206 309 1313 5 4 1 1688 3 50 103 1 1 +"25408" 4 168 3 1 1 1 4 1 100 81 309 324 2013 4 2 1 1283 1 5 15 1 1 +"25409" 4 168 3 1 1 1 4 1 100 81 324 81 1083 6 3 0 621 4 75 243 1 0 +"25410" 4 168 4 0 1 0 1 1 100 143 100 150 6658 2 7 0 531 3 50 50 0 1 +"25411" 4 168 4 0 1 0 1 1 100 143 150 293 1887 3 7 0 1187 1 95 143 0 1 +"25412" 4 168 4 0 1 0 1 1 100 143 293 571 1216 2 8 0 1044 1 95 278 0 1 +"25413" 4 168 4 0 1 0 1 1 100 143 571 143 1327 8 9 1 1150 2 75 428 0 0 +"25414" 4 168 5 1 1 0 1 0 100 0 100 195 1225 1 3 0 1372 1 95 95 0 1 +"25415" 4 168 5 1 1 0 1 0 100 0 195 10 848 6 5 0 1421 1 95 185 1 0 +"25416" 4 168 5 1 1 0 1 0 100 0 10 0 919 2 8 1 720 1 95 10 0 0 +"25417" 4 168 5 1 1 0 2 0 100 0 100 5 3210 8 1 0 1575 1 95 95 1 0 +"25418" 4 168 5 1 1 0 2 0 100 0 5 10 1650 6 2 1 889 1 95 5 1 1 +"25419" 4 168 5 1 1 0 2 0 100 0 10 0 1195 7 9 1 2584 1 95 10 0 0 +"25420" 4 168 5 1 1 0 3 0 100 0 100 5 2241 7 5 0 2293 1 95 95 1 0 +"25421" 4 168 5 1 1 0 3 0 100 0 5 0 1453 2 1 0 605 1 95 5 1 0 +"25422" 4 168 5 1 1 0 4 1 100 16 100 25 1303 2 3 1 1231 2 75 75 0 0 +"25423" 4 168 5 1 1 0 4 1 100 16 25 6 1409 7 4 0 884 2 75 19 1 0 +"25424" 4 168 5 1 1 0 4 1 100 16 6 12 2095 3 1 1 1297 1 95 6 1 1 +"25425" 4 168 5 1 1 0 4 1 100 16 12 21 1936 1 9 0 818 2 75 9 0 1 +"25426" 4 168 5 1 1 0 4 1 100 16 21 41 1489 8 7 1 1599 1 95 20 1 1 +"25427" 4 168 5 1 1 0 4 1 100 16 41 80 1648 9 2 1 1352 1 95 39 1 1 +"25428" 4 168 5 1 1 0 4 1 100 16 80 4 932 5 6 1 1253 1 95 76 0 0 +"25429" 4 168 5 1 1 0 4 1 100 16 4 8 1023 6 8 0 1020 1 95 4 0 1 +"25430" 4 168 5 1 1 0 4 1 100 16 8 16 761 4 7 0 778 1 95 8 0 1 +"25431" 4 170 2 0 1 1 1 1 100 117 100 150 3907 8 3 1 791 3 50 50 1 1 +"25432" 4 170 2 0 1 1 1 1 100 117 150 188 17070 3 7 0 4175 2 25 38 0 1 +"25433" 4 170 2 0 1 1 1 1 100 117 188 235 2174 8 2 1 149 2 25 47 1 1 +"25434" 4 170 2 0 1 1 1 1 100 117 235 117 2587 2 1 0 566 3 50 118 1 0 +"25435" 4 170 3 1 1 1 1 1 100 99 100 150 9071 9 7 1 1741 3 50 50 1 1 +"25436" 4 170 3 1 1 1 1 1 100 99 150 225 1912 4 8 0 923 3 50 75 0 1 +"25437" 4 170 3 1 1 1 1 1 100 99 225 338 3291 8 2 1 1109 3 50 113 1 1 +"25438" 4 170 3 1 1 1 1 1 100 99 338 169 1997 2 1 0 1084 3 50 169 1 0 +"25439" 4 170 3 1 1 1 1 1 100 99 169 211 1381 7 6 1 1858 2 25 42 1 1 +"25440" 4 170 3 1 1 1 1 1 100 99 211 105 1918 5 3 0 553 3 50 106 1 0 +"25441" 4 170 3 1 1 1 1 1 100 99 105 158 1060 3 6 0 919 3 50 53 0 1 +"25442" 4 170 3 1 1 1 1 1 100 99 158 79 1753 6 9 1 419 3 50 79 0 0 +"25443" 4 170 3 1 1 1 1 1 100 99 79 99 1212 1 4 0 2952 2 25 20 0 1 +"25444" 4 170 3 1 1 1 2 1 100 124 100 150 2255 2 9 0 482 3 50 50 0 1 +"25445" 4 170 3 1 1 1 2 1 100 124 150 225 1560 4 10 0 516 3 50 75 0 1 +"25446" 4 170 3 1 1 1 2 1 100 124 225 112 1729 3 1 0 524 3 50 113 1 0 +"25447" 4 170 3 1 1 1 2 1 100 124 112 168 1728 8 6 1 548 3 50 56 1 1 +"25448" 4 170 3 1 1 1 2 1 100 124 168 126 1103 5 7 1 1128 2 25 42 0 0 +"25449" 4 170 3 1 1 1 2 1 100 124 126 63 1712 6 8 1 556 3 50 63 0 0 +"25450" 4 170 3 1 1 1 2 1 100 124 63 79 1463 7 5 1 847 2 25 16 1 1 +"25451" 4 170 3 1 1 1 2 1 100 124 79 99 1108 1 10 0 725 2 25 20 0 1 +"25452" 4 170 3 1 1 1 2 1 100 124 99 124 2015 9 3 1 621 2 25 25 1 1 +"25453" 4 170 3 1 1 1 3 1 100 163 100 125 1392 3 5 0 1292 2 25 25 0 1 +"25454" 4 170 3 1 1 1 3 1 100 163 125 94 1579 8 9 1 1574 2 25 31 0 0 +"25455" 4 170 3 1 1 1 3 1 100 163 94 118 1400 2 4 0 924 2 25 24 0 1 +"25456" 4 170 3 1 1 1 3 1 100 163 118 148 1405 6 3 1 1127 2 25 30 1 1 +"25457" 4 170 3 1 1 1 3 1 100 163 148 185 1287 7 6 1 935 2 25 37 1 1 +"25458" 4 170 3 1 1 1 3 1 100 163 185 139 1744 4 2 0 682 2 25 46 1 0 +"25459" 4 170 3 1 1 1 3 1 100 163 139 174 1415 1 8 0 514 2 25 35 0 1 +"25460" 4 170 3 1 1 1 3 1 100 163 174 130 1519 5 7 1 741 2 25 44 0 0 +"25461" 4 170 3 1 1 1 3 1 100 163 130 163 1678 9 5 1 372 2 25 33 1 1 +"25462" 4 170 3 1 1 1 4 1 100 142 100 105 1742 8 7 1 753 1 5 5 1 1 +"25463" 4 170 3 1 1 1 4 1 100 142 105 131 1202 3 10 0 678 2 25 26 0 1 +"25464" 4 170 3 1 1 1 4 1 100 142 131 98 883 7 9 1 559 2 25 33 0 0 +"25465" 4 170 3 1 1 1 4 1 100 142 98 103 2017 9 1 1 535 1 5 5 1 1 +"25466" 4 170 3 1 1 1 4 1 100 142 103 129 1723 2 3 0 383 2 25 26 0 1 +"25467" 4 170 3 1 1 1 4 1 100 142 129 135 1114 1 8 0 939 1 5 6 0 1 +"25468" 4 170 3 1 1 1 4 1 100 142 135 142 1572 5 4 1 1110 1 5 7 1 1 +"25469" 4 170 3 1 1 1 4 1 100 142 142 135 1886 4 2 0 658 1 5 7 1 0 +"25470" 4 170 3 1 1 1 4 1 100 142 135 142 1720 6 3 1 1020 1 5 7 1 1 +"25471" 4 170 4 0 1 0 1 1 100 29 100 150 5209 2 7 0 711 3 50 50 0 1 +"25472" 4 170 4 0 1 0 1 1 100 29 150 293 3667 3 7 0 3314 1 95 143 0 1 +"25473" 4 170 4 0 1 0 1 1 100 29 293 571 1329 2 8 0 1392 1 95 278 0 1 +"25474" 4 170 4 0 1 0 1 1 100 29 571 29 1443 8 9 1 876 1 95 542 0 0 +"25475" 4 170 5 1 1 0 1 0 100 0 100 195 1639 1 3 0 895 1 95 95 0 1 +"25476" 4 170 5 1 1 0 1 0 100 0 195 380 2369 6 5 1 446 1 95 185 1 1 +"25477" 4 170 5 1 1 0 1 0 100 0 380 741 1310 2 8 0 293 1 95 361 0 1 +"25478" 4 170 5 1 1 0 1 0 100 0 741 37 1109 8 9 1 350 1 95 704 0 0 +"25479" 4 170 5 1 1 0 1 0 100 0 37 72 1411 3 4 0 501 1 95 35 0 1 +"25480" 4 170 5 1 1 0 1 0 100 0 72 4 1819 5 7 1 282 1 95 68 0 0 +"25481" 4 170 5 1 1 0 1 0 100 0 4 8 1266 7 4 1 1188 1 95 4 1 1 +"25482" 4 170 5 1 1 0 1 0 100 0 8 0 1241 4 1 0 483 1 95 8 1 0 +"25483" 4 170 5 1 1 0 2 0 100 0 100 195 7615 8 1 1 1082 1 95 95 1 1 +"25484" 4 170 5 1 1 0 2 0 100 0 195 380 1196 6 2 1 625 1 95 185 1 1 +"25485" 4 170 5 1 1 0 2 0 100 0 380 19 936 7 9 1 4329 1 95 361 0 0 +"25486" 4 170 5 1 1 0 2 0 100 0 19 37 1160 2 10 0 463 1 95 18 0 1 +"25487" 4 170 5 1 1 0 2 0 100 0 37 2 1182 5 3 0 1564 1 95 35 1 0 +"25488" 4 170 5 1 1 0 2 0 100 0 2 0 1454 4 2 0 734 1 95 2 1 0 +"25489" 4 170 5 1 1 0 3 1 100 31 100 195 1149 7 5 1 476 1 95 95 1 1 +"25490" 4 170 5 1 1 0 3 1 100 31 195 10 1403 2 1 0 541 1 95 185 1 0 +"25491" 4 170 5 1 1 0 3 1 100 31 10 20 1008 8 6 1 526 1 95 10 1 1 +"25492" 4 170 5 1 1 0 3 1 100 31 20 39 682 4 7 0 495 1 95 19 0 1 +"25493" 4 170 5 1 1 0 3 1 100 31 39 76 1125 3 10 0 381 1 95 37 0 1 +"25494" 4 170 5 1 1 0 3 1 100 31 76 4 912 6 8 1 411 1 95 72 0 0 +"25495" 4 170 5 1 1 0 3 1 100 31 4 8 980 9 2 1 420 1 95 4 1 1 +"25496" 4 170 5 1 1 0 3 1 100 31 8 16 1093 5 3 1 454 1 95 8 1 1 +"25497" 4 170 5 1 1 0 3 1 100 31 16 31 1237 1 10 0 504 1 95 15 0 1 +"25498" 4 170 5 1 1 0 4 1 100 119 100 195 2614 2 3 0 1359 1 95 95 0 1 +"25499" 4 170 5 1 1 0 4 1 100 119 195 380 884 7 4 1 383 1 95 185 1 1 +"25500" 4 170 5 1 1 0 4 1 100 119 380 19 818 3 1 0 465 1 95 361 1 0 +"25501" 4 170 5 1 1 0 4 1 100 119 19 37 840 1 9 0 326 1 95 18 0 1 +"25502" 4 170 5 1 1 0 4 1 100 119 37 72 899 8 7 1 342 1 95 35 1 1 +"25503" 4 170 5 1 1 0 4 1 100 119 72 140 1283 9 2 1 356 1 95 68 1 1 +"25504" 4 170 5 1 1 0 4 1 100 119 140 245 2856 5 6 0 558 2 75 105 0 1 +"25505" 4 170 5 1 1 0 4 1 100 119 245 61 1264 6 8 1 530 2 75 184 0 0 +"25506" 4 170 5 1 1 0 4 1 100 119 61 119 1494 4 7 0 508 1 95 58 0 1 +"25507" 4 173 2 0 1 1 1 1 100 211 100 150 10245 8 3 1 3755 3 50 50 1 1 +"25508" 4 173 2 0 1 1 1 1 100 211 150 188 2950 3 7 0 1863 2 25 38 0 1 +"25509" 4 173 2 0 1 1 1 1 100 211 188 282 1932 8 2 1 1222 3 50 94 1 1 +"25510" 4 173 2 0 1 1 1 1 100 211 282 211 5353 2 1 0 2320 2 25 71 1 0 +"25511" 4 173 3 1 1 1 1 1 100 422 100 195 2059 9 7 1 1671 5 95 95 1 1 +"25512" 4 173 3 1 1 1 1 1 100 422 195 244 3540 4 8 0 1305 2 25 49 0 1 +"25513" 4 173 3 1 1 1 1 1 100 422 244 366 4649 8 2 1 1065 3 50 122 1 1 +"25514" 4 173 3 1 1 1 1 1 100 422 366 274 2116 2 1 0 868 2 25 92 1 0 +"25515" 4 173 3 1 1 1 1 1 100 422 274 343 2560 7 6 1 1500 2 25 69 1 1 +"25516" 4 173 3 1 1 1 1 1 100 422 343 257 2698 5 3 0 1607 2 25 86 1 0 +"25517" 4 173 3 1 1 1 1 1 100 422 257 321 2272 3 6 0 1490 2 25 64 0 1 +"25518" 4 173 3 1 1 1 1 1 100 422 321 241 2505 6 9 1 1983 2 25 80 0 0 +"25519" 4 173 3 1 1 1 1 1 100 422 241 422 2214 1 4 0 1482 4 75 181 0 1 +"25520" 4 173 3 1 1 1 2 1 100 755 100 175 4245 2 9 0 833 4 75 75 0 1 +"25521" 4 173 3 1 1 1 2 1 100 755 175 184 3823 4 10 0 878 1 5 9 0 1 +"25522" 4 173 3 1 1 1 2 1 100 755 184 138 2569 3 1 0 701 2 25 46 1 0 +"25523" 4 173 3 1 1 1 2 1 100 755 138 242 1913 8 6 1 1484 4 75 104 1 1 +"25524" 4 173 3 1 1 1 2 1 100 755 242 230 3314 5 7 1 2499 1 5 12 0 0 +"25525" 4 173 3 1 1 1 2 1 100 755 230 172 2953 6 8 1 1162 2 25 58 0 0 +"25526" 4 173 3 1 1 1 2 1 100 755 172 258 1712 7 5 1 1644 3 50 86 1 1 +"25527" 4 173 3 1 1 1 2 1 100 755 258 503 1988 1 10 0 1188 5 95 245 0 1 +"25528" 4 173 3 1 1 1 2 1 100 755 503 755 1808 9 3 1 1048 3 50 252 1 1 +"25529" 4 173 3 1 1 1 3 1 100 425 100 125 4057 3 5 0 574 2 25 25 0 1 +"25530" 4 173 3 1 1 1 3 1 100 425 125 31 2007 8 9 1 797 4 75 94 0 0 +"25531" 4 173 3 1 1 1 3 1 100 425 31 60 2317 2 4 0 0 5 95 29 0 1 +"25532" 4 173 3 1 1 1 3 1 100 425 60 105 2532 6 3 1 1154 4 75 45 1 1 +"25533" 4 173 3 1 1 1 3 1 100 425 105 158 2240 7 6 1 1516 3 50 53 1 1 +"25534" 4 173 3 1 1 1 3 1 100 425 158 118 2793 4 2 0 1154 2 25 40 1 0 +"25535" 4 173 3 1 1 1 3 1 100 425 118 230 1776 1 8 0 1954 5 95 112 0 1 +"25536" 4 173 3 1 1 1 3 1 100 425 230 218 2514 5 7 1 944 1 5 12 0 0 +"25537" 4 173 3 1 1 1 3 1 100 425 218 425 1584 9 5 1 1939 5 95 207 1 1 +"25538" 4 173 3 1 1 1 4 1 100 1373 100 175 2501 8 7 1 1380 4 75 75 1 1 +"25539" 4 173 3 1 1 1 4 1 100 1373 175 263 1911 3 10 0 826 3 50 88 0 1 +"25540" 4 173 3 1 1 1 4 1 100 1373 263 197 1759 7 9 1 2057 2 25 66 0 0 +"25541" 4 173 3 1 1 1 4 1 100 1373 197 384 1485 9 1 1 1941 5 95 187 1 1 +"25542" 4 173 3 1 1 1 4 1 100 1373 384 749 1838 2 3 0 1013 5 95 365 0 1 +"25543" 4 173 3 1 1 1 4 1 100 1373 749 1311 1706 1 8 0 1510 4 75 562 0 1 +"25544" 4 173 3 1 1 1 4 1 100 1373 1311 1377 2340 5 4 1 586 1 5 66 1 1 +"25545" 4 173 3 1 1 1 4 1 100 1373 1377 1308 3060 4 2 0 984 1 5 69 1 0 +"25546" 4 173 3 1 1 1 4 1 100 1373 1308 1373 1767 6 3 1 616 1 5 65 1 1 +"25547" 4 173 4 0 1 0 1 1 100 29 100 195 2620 2 7 0 1221 1 95 95 0 1 +"25548" 4 173 4 0 1 0 1 1 100 29 195 293 2096 3 7 0 1290 3 50 98 0 1 +"25549" 4 173 4 0 1 0 1 1 100 29 293 571 2786 2 8 0 787 1 95 278 0 1 +"25550" 4 173 4 0 1 0 1 1 100 29 571 29 1395 8 9 1 975 1 95 542 0 0 +"25551" 4 173 5 1 1 0 1 1 100 527 100 195 1700 1 3 0 633 1 95 95 0 1 +"25552" 4 173 5 1 1 0 1 1 100 527 195 293 1440 6 5 1 1049 3 50 98 1 1 +"25553" 4 173 5 1 1 0 1 1 100 527 293 571 1476 2 8 0 860 1 95 278 0 1 +"25554" 4 173 5 1 1 0 1 1 100 527 571 29 1456 8 9 1 616 1 95 542 0 0 +"25555" 4 173 5 1 1 0 1 1 100 527 29 57 1320 3 4 0 519 1 95 28 0 1 +"25556" 4 173 5 1 1 0 1 1 100 527 57 111 1963 5 7 0 500 1 95 54 0 1 +"25557" 4 173 5 1 1 0 1 1 100 527 111 216 1457 7 4 1 497 1 95 105 1 1 +"25558" 4 173 5 1 1 0 1 1 100 527 216 270 2951 4 1 1 1616 4 25 54 1 1 +"25559" 4 173 5 1 1 0 1 1 100 527 270 527 1545 9 6 1 477 1 95 257 1 1 +"25560" 4 173 5 1 1 0 2 0 100 1 100 195 1550 8 1 1 515 1 95 95 1 1 +"25561" 4 173 5 1 1 0 2 0 100 1 195 244 2167 6 2 1 1030 4 25 49 1 1 +"25562" 4 173 5 1 1 0 2 0 100 1 244 12 1451 7 9 1 475 1 95 232 0 0 +"25563" 4 173 5 1 1 0 2 0 100 1 12 23 2626 2 10 0 523 1 95 11 0 1 +"25564" 4 173 5 1 1 0 2 0 100 1 23 1 2063 5 3 0 530 1 95 22 1 0 +"25565" 4 173 5 1 1 0 3 0 100 0 100 195 1631 7 5 1 1327 1 95 95 1 1 +"25566" 4 173 5 1 1 0 3 0 100 0 195 10 1445 2 1 0 590 1 95 185 1 0 +"25567" 4 173 5 1 1 0 3 0 100 0 10 20 1303 8 6 1 446 1 95 10 1 1 +"25568" 4 173 5 1 1 0 3 0 100 0 20 39 1390 4 7 0 778 1 95 19 0 1 +"25569" 4 173 5 1 1 0 3 0 100 0 39 76 1490 3 10 0 856 1 95 37 0 1 +"25570" 4 173 5 1 1 0 3 0 100 0 76 4 1636 6 8 1 700 1 95 72 0 0 +"25571" 4 173 5 1 1 0 3 0 100 0 4 8 1816 9 2 1 783 1 95 4 1 1 +"25572" 4 173 5 1 1 0 3 0 100 0 8 0 1727 5 3 0 498 1 95 8 1 0 +"25573" 4 173 5 1 1 0 4 1 100 205 100 195 1925 2 3 0 631 1 95 95 0 1 +"25574" 4 173 5 1 1 0 4 1 100 205 195 380 1726 7 4 1 945 1 95 185 1 1 +"25575" 4 173 5 1 1 0 4 1 100 205 380 19 1649 3 1 0 1081 1 95 361 1 0 +"25576" 4 173 5 1 1 0 4 1 100 205 19 37 1818 1 9 0 618 1 95 18 0 1 +"25577" 4 173 5 1 1 0 4 1 100 205 37 72 2066 8 7 1 1053 1 95 35 1 1 +"25578" 4 173 5 1 1 0 4 1 100 205 72 140 2144 9 2 1 456 1 95 68 1 1 +"25579" 4 173 5 1 1 0 4 1 100 205 140 210 2047 5 6 0 2787 3 50 70 0 1 +"25580" 4 173 5 1 1 0 4 1 100 205 210 105 2228 6 8 1 1815 3 50 105 0 0 +"25581" 4 173 5 1 1 0 4 1 100 205 105 205 2038 4 7 0 1307 1 95 100 0 1 +"25582" 4 178 2 0 1 1 1 1 100 146 100 125 8390 8 3 1 1940 2 25 25 1 1 +"25583" 4 178 2 0 1 1 1 1 100 146 125 156 16957 3 7 0 1124 2 25 31 0 1 +"25584" 4 178 2 0 1 1 1 1 100 146 156 195 1332 8 2 1 1178 2 25 39 1 1 +"25585" 4 178 2 0 1 1 1 1 100 146 195 146 1415 2 1 0 1418 2 25 49 1 0 +"25586" 4 178 3 1 1 1 1 1 100 150 100 195 4083 9 7 1 1055 5 95 95 1 1 +"25587" 4 178 3 1 1 1 1 1 100 150 195 146 4403 4 8 1 1135 2 25 49 0 0 +"25588" 4 178 3 1 1 1 1 1 100 150 146 219 2302 8 2 1 815 3 50 73 1 1 +"25589" 4 178 3 1 1 1 1 1 100 150 219 109 1701 2 1 0 786 3 50 110 1 0 +"25590" 4 178 3 1 1 1 1 1 100 150 109 164 2626 7 6 1 636 3 50 55 1 1 +"25591" 4 178 3 1 1 1 1 1 100 150 164 123 4603 5 3 0 1726 2 25 41 1 0 +"25592" 4 178 3 1 1 1 1 1 100 150 123 154 1553 3 6 0 1732 2 25 31 0 1 +"25593" 4 178 3 1 1 1 1 1 100 150 154 77 2655 6 9 1 551 3 50 77 0 0 +"25594" 4 178 3 1 1 1 1 1 100 150 77 150 3258 1 4 0 0 5 95 73 0 1 +"25595" 4 178 3 1 1 1 2 1 100 402 100 150 2576 2 9 0 972 3 50 50 0 1 +"25596" 4 178 3 1 1 1 2 1 100 402 150 112 2475 4 10 1 803 2 25 38 0 0 +"25597" 4 178 3 1 1 1 2 1 100 402 112 56 2994 3 1 0 514 3 50 56 1 0 +"25598" 4 178 3 1 1 1 2 1 100 402 56 109 1679 8 6 1 765 5 95 53 1 1 +"25599" 4 178 3 1 1 1 2 1 100 402 109 136 5942 5 7 0 697 2 25 27 0 1 +"25600" 4 178 3 1 1 1 2 1 100 402 136 102 4525 6 8 1 756 2 25 34 0 0 +"25601" 4 178 3 1 1 1 2 1 100 402 102 153 2013 7 5 1 571 3 50 51 1 1 +"25602" 4 178 3 1 1 1 2 1 100 402 153 268 1588 1 10 0 623 4 75 115 0 1 +"25603" 4 178 3 1 1 1 2 1 100 402 268 402 1838 9 3 1 765 3 50 134 1 1 +"25604" 4 178 3 1 1 1 3 1 100 465 100 125 5228 3 5 0 665 2 25 25 0 1 +"25605" 4 178 3 1 1 1 3 1 100 465 125 94 2039 8 9 1 1436 2 25 31 0 0 +"25606" 4 178 3 1 1 1 3 1 100 465 94 141 1501 2 4 0 692 3 50 47 0 1 +"25607" 4 178 3 1 1 1 3 1 100 465 141 176 3576 6 3 1 755 2 25 35 1 1 +"25608" 4 178 3 1 1 1 3 1 100 465 176 220 2892 7 6 1 1064 2 25 44 1 1 +"25609" 4 178 3 1 1 1 3 1 100 465 220 165 2219 4 2 0 687 2 25 55 1 0 +"25610" 4 178 3 1 1 1 3 1 100 465 165 248 2035 1 8 0 1697 3 50 83 0 1 +"25611" 4 178 3 1 1 1 3 1 100 465 248 310 4757 5 7 0 408 2 25 62 0 1 +"25612" 4 178 3 1 1 1 3 1 100 465 310 465 1458 9 5 1 562 3 50 155 1 1 +"25613" 4 178 3 1 1 1 4 1 100 464 100 150 1517 8 7 1 659 3 50 50 1 1 +"25614" 4 178 3 1 1 1 4 1 100 464 150 188 1465 3 10 0 649 2 25 38 0 1 +"25615" 4 178 3 1 1 1 4 1 100 464 188 179 1351 7 9 1 908 1 5 9 0 0 +"25616" 4 178 3 1 1 1 4 1 100 464 179 313 1342 9 1 1 684 4 75 134 1 1 +"25617" 4 178 3 1 1 1 4 1 100 464 313 391 1761 2 3 0 1867 2 25 78 0 1 +"25618" 4 178 3 1 1 1 4 1 100 464 391 489 1895 1 8 0 1738 2 25 98 0 1 +"25619" 4 178 3 1 1 1 4 1 100 464 489 465 3157 5 4 0 716 1 5 24 1 0 +"25620" 4 178 3 1 1 1 4 1 100 464 465 442 1912 4 2 0 618 1 5 23 1 0 +"25621" 4 178 3 1 1 1 4 1 100 464 442 464 1146 6 3 1 706 1 5 22 1 1 +"25622" 4 178 4 0 1 0 1 1 100 176 100 150 5078 2 7 0 515 3 50 50 0 1 +"25623" 4 178 4 0 1 0 1 1 100 176 150 188 1360 3 7 0 725 4 25 38 0 1 +"25624" 4 178 4 0 1 0 1 1 100 176 188 235 1207 2 8 0 972 4 25 47 0 1 +"25625" 4 178 4 0 1 0 1 1 100 176 235 176 1147 8 9 1 643 4 25 59 0 0 +"25626" 4 178 5 1 1 0 1 1 100 447 100 195 2430 1 3 0 1992 1 95 95 0 1 +"25627" 4 178 5 1 1 0 1 1 100 447 195 205 1694 6 5 1 580 5 5 10 1 1 +"25628" 4 178 5 1 1 0 1 1 100 447 205 256 1141 2 8 0 661 4 25 51 0 1 +"25629" 4 178 5 1 1 0 1 1 100 447 256 64 1363 8 9 1 2728 2 75 192 0 0 +"25630" 4 178 5 1 1 0 1 1 100 447 64 125 1795 3 4 0 835 1 95 61 0 1 +"25631" 4 178 5 1 1 0 1 1 100 447 125 244 2209 5 7 0 773 1 95 119 0 1 +"25632" 4 178 5 1 1 0 1 1 100 447 244 305 1189 7 4 1 643 4 25 61 1 1 +"25633" 4 178 5 1 1 0 1 1 100 447 305 229 1281 4 1 0 1506 4 25 76 1 0 +"25634" 4 178 5 1 1 0 1 1 100 447 229 447 1184 9 6 1 557 1 95 218 1 1 +"25635" 4 178 5 1 1 0 2 1 100 681 100 125 1841 8 1 1 655 4 25 25 1 1 +"25636" 4 178 5 1 1 0 2 1 100 681 125 156 1773 6 2 1 758 4 25 31 1 1 +"25637" 4 178 5 1 1 0 2 1 100 681 156 78 911 7 9 1 2154 3 50 78 0 0 +"25638" 4 178 5 1 1 0 2 1 100 681 78 98 1665 2 10 0 328 4 25 20 0 1 +"25639" 4 178 5 1 1 0 2 1 100 681 98 123 2245 5 3 1 520 4 25 25 1 1 +"25640" 4 178 5 1 1 0 2 1 100 681 123 92 1324 4 2 0 1467 4 25 31 1 0 +"25641" 4 178 5 1 1 0 2 1 100 681 92 179 891 3 5 0 526 1 95 87 0 1 +"25642" 4 178 5 1 1 0 2 1 100 681 179 349 1282 9 4 1 483 1 95 170 1 1 +"25643" 4 178 5 1 1 0 2 1 100 681 349 681 945 1 7 0 498 1 95 332 0 1 +"25644" 4 178 5 1 1 0 3 1 100 952 100 125 2049 7 5 1 2197 4 25 25 1 1 +"25645" 4 178 5 1 1 0 3 1 100 952 125 94 1418 2 1 0 533 4 25 31 1 0 +"25646" 4 178 5 1 1 0 3 1 100 952 94 183 1329 8 6 1 472 1 95 89 1 1 +"25647" 4 178 5 1 1 0 3 1 100 952 183 357 1142 4 7 0 478 1 95 174 0 1 +"25648" 4 178 5 1 1 0 3 1 100 952 357 446 924 3 10 0 1374 4 25 89 0 1 +"25649" 4 178 5 1 1 0 3 1 100 952 446 334 1890 6 8 1 1638 4 25 112 0 0 +"25650" 4 178 5 1 1 0 3 1 100 952 334 651 1869 9 2 1 538 1 95 317 1 1 +"25651" 4 178 5 1 1 0 3 1 100 952 651 488 1534 5 3 0 4371 4 25 163 1 0 +"25652" 4 178 5 1 1 0 3 1 100 952 488 952 1440 1 10 0 833 1 95 464 0 1 +"25653" 4 178 5 1 1 0 4 1 100 4637 100 195 1240 2 3 0 835 1 95 95 0 1 +"25654" 4 178 5 1 1 0 4 1 100 4637 195 380 1289 7 4 1 737 1 95 185 1 1 +"25655" 4 178 5 1 1 0 4 1 100 4637 380 285 1683 3 1 0 2121 4 25 95 1 0 +"25656" 4 178 5 1 1 0 4 1 100 4637 285 556 1273 1 9 0 938 1 95 271 0 1 +"25657" 4 178 5 1 1 0 4 1 100 4637 556 1084 1603 8 7 1 637 1 95 528 1 1 +"25658" 4 178 5 1 1 0 4 1 100 4637 1084 2114 6284 9 2 1 559 1 95 1030 1 1 +"25659" 4 178 5 1 1 0 4 1 100 4637 2114 3171 2001 5 6 0 744 3 50 1057 0 1 +"25660" 4 178 5 1 1 0 4 1 100 4637 3171 2378 1882 6 8 1 1198 4 25 793 0 0 +"25661" 4 178 5 1 1 0 4 1 100 4637 2378 4637 1115 4 7 0 1083 1 95 2259 0 1 +"25662" 4 180 2 0 1 1 1 1 100 355 100 150 22293 8 3 1 1110 3 50 50 1 1 +"25663" 4 180 2 0 1 1 1 1 100 355 150 225 5230 3 7 0 1195 3 50 75 0 1 +"25664" 4 180 2 0 1 1 1 1 100 355 225 338 2887 8 2 1 884 3 50 113 1 1 +"25665" 4 180 2 0 1 1 1 1 100 355 338 355 3887 2 1 1 748 1 5 17 1 1 +"25666" 4 180 3 1 1 1 1 1 100 79 100 150 2599 9 7 1 1337 3 50 50 1 1 +"25667" 4 180 3 1 1 1 1 1 100 79 150 112 4871 4 8 1 845 2 25 38 0 0 +"25668" 4 180 3 1 1 1 1 1 100 79 112 168 2546 8 2 1 870 3 50 56 1 1 +"25669" 4 180 3 1 1 1 1 1 100 79 168 84 1770 2 1 0 1372 3 50 84 1 0 +"25670" 4 180 3 1 1 1 1 1 100 79 84 80 3048 7 6 0 1212 1 5 4 1 0 +"25671" 4 180 3 1 1 1 1 1 100 79 80 60 3215 5 3 0 1076 2 25 20 1 0 +"25672" 4 180 3 1 1 1 1 1 100 79 60 90 1694 3 6 0 699 3 50 30 0 1 +"25673" 4 180 3 1 1 1 1 1 100 79 90 45 1639 6 9 1 607 3 50 45 0 0 +"25674" 4 180 3 1 1 1 1 1 100 79 45 79 1327 1 4 0 1702 4 75 34 0 1 +"25675" 4 180 3 1 1 1 2 1 100 300 100 150 3153 2 9 0 721 3 50 50 0 1 +"25676" 4 180 3 1 1 1 2 1 100 300 150 112 6043 4 10 1 1793 2 25 38 0 0 +"25677" 4 180 3 1 1 1 2 1 100 300 112 56 1849 3 1 0 927 3 50 56 1 0 +"25678" 4 180 3 1 1 1 2 1 100 300 56 84 2208 8 6 1 1029 3 50 28 1 1 +"25679" 4 180 3 1 1 1 2 1 100 300 84 80 4377 5 7 1 862 1 5 4 0 0 +"25680" 4 180 3 1 1 1 2 1 100 300 80 76 2554 6 8 1 646 1 5 4 0 0 +"25681" 4 180 3 1 1 1 2 1 100 300 76 114 1452 7 5 1 501 3 50 38 1 1 +"25682" 4 180 3 1 1 1 2 1 100 300 114 200 2483 1 10 0 1002 4 75 86 0 1 +"25683" 4 180 3 1 1 1 2 1 100 300 200 300 2280 9 3 1 897 3 50 100 1 1 +"25684" 4 180 3 1 1 1 3 1 100 414 100 125 2623 3 5 0 840 2 25 25 0 1 +"25685" 4 180 3 1 1 1 3 1 100 414 125 94 1573 8 9 1 1120 2 25 31 0 0 +"25686" 4 180 3 1 1 1 3 1 100 414 94 118 1456 2 4 0 840 2 25 24 0 1 +"25687" 4 180 3 1 1 1 3 1 100 414 118 112 1669 6 3 0 1323 1 5 6 1 0 +"25688" 4 180 3 1 1 1 3 1 100 414 112 140 1914 7 6 1 1293 2 25 28 1 1 +"25689" 4 180 3 1 1 1 3 1 100 414 140 147 2628 4 2 1 852 1 5 7 1 1 +"25690" 4 180 3 1 1 1 3 1 100 414 147 221 1345 1 8 0 999 3 50 74 0 1 +"25691" 4 180 3 1 1 1 3 1 100 414 221 276 1549 5 7 0 1377 2 25 55 0 1 +"25692" 4 180 3 1 1 1 3 1 100 414 276 414 1548 9 5 1 1713 3 50 138 1 1 +"25693" 4 180 3 1 1 1 4 1 100 1054 100 150 2683 8 7 1 1185 3 50 50 1 1 +"25694" 4 180 3 1 1 1 4 1 100 1054 150 225 1574 3 10 0 705 3 50 75 0 1 +"25695" 4 180 3 1 1 1 4 1 100 1054 225 214 2228 7 9 1 968 1 5 11 0 0 +"25696" 4 180 3 1 1 1 4 1 100 1054 214 375 1719 9 1 1 2091 4 75 161 1 1 +"25697" 4 180 3 1 1 1 4 1 100 1054 375 563 1605 2 3 0 1566 3 50 188 0 1 +"25698" 4 180 3 1 1 1 4 1 100 1054 563 845 1905 1 8 0 1340 3 50 282 0 1 +"25699" 4 180 3 1 1 1 4 1 100 1054 845 803 1953 5 4 0 802 1 5 42 1 0 +"25700" 4 180 3 1 1 1 4 1 100 1054 803 843 2458 4 2 1 715 1 5 40 1 1 +"25701" 4 180 3 1 1 1 4 1 100 1054 843 1054 1662 6 3 1 788 2 25 211 1 1 +"25702" 4 180 4 0 1 0 1 1 100 8 100 175 7493 2 7 0 1406 2 75 75 0 1 +"25703" 4 180 4 0 1 0 1 1 100 8 175 87 2361 3 7 1 846 3 50 88 0 0 +"25704" 4 180 4 0 1 0 1 1 100 8 87 170 2395 2 8 0 1302 1 95 83 0 1 +"25705" 4 180 4 0 1 0 1 1 100 8 170 8 1949 8 9 1 726 1 95 162 0 0 +"25706" 4 180 5 1 1 0 1 1 100 620 100 195 3178 1 3 0 1096 1 95 95 0 1 +"25707" 4 180 5 1 1 0 1 1 100 620 195 244 2457 6 5 1 867 4 25 49 1 1 +"25708" 4 180 5 1 1 0 1 1 100 620 244 427 3119 2 8 0 936 2 75 183 0 1 +"25709" 4 180 5 1 1 0 1 1 100 620 427 107 1441 8 9 1 890 2 75 320 0 0 +"25710" 4 180 5 1 1 0 1 1 100 620 107 161 1396 3 4 0 1072 3 50 54 0 1 +"25711" 4 180 5 1 1 0 1 1 100 620 161 169 1866 5 7 0 1128 5 5 8 0 1 +"25712" 4 180 5 1 1 0 1 1 100 620 169 254 1759 7 4 1 897 3 50 85 1 1 +"25713" 4 180 5 1 1 0 1 1 100 620 254 318 2596 4 1 1 1182 4 25 64 1 1 +"25714" 4 180 5 1 1 0 1 1 100 620 318 620 2443 9 6 1 4892 1 95 302 1 1 +"25715" 4 180 5 1 1 0 2 1 100 912 100 195 2186 8 1 1 632 1 95 95 1 1 +"25716" 4 180 5 1 1 0 2 1 100 912 195 293 2319 6 2 1 1959 3 50 98 1 1 +"25717" 4 180 5 1 1 0 2 1 100 912 293 146 3023 7 9 1 901 3 50 147 0 0 +"25718" 4 180 5 1 1 0 2 1 100 912 146 285 1888 2 10 0 2020 1 95 139 0 1 +"25719" 4 180 5 1 1 0 2 1 100 912 285 214 4473 5 3 0 4250 4 25 71 1 0 +"25720" 4 180 5 1 1 0 2 1 100 912 214 160 2677 4 2 0 1980 4 25 54 1 0 +"25721" 4 180 5 1 1 0 2 1 100 912 160 312 1451 3 5 0 2004 1 95 152 0 1 +"25722" 4 180 5 1 1 0 2 1 100 912 312 608 1682 9 4 1 975 1 95 296 1 1 +"25723" 4 180 5 1 1 0 2 1 100 912 608 912 1774 1 7 0 1383 3 50 304 0 1 +"25724" 4 180 5 1 1 0 3 1 100 4 100 150 1671 7 5 1 845 3 50 50 1 1 +"25725" 4 180 5 1 1 0 3 1 100 4 150 7 1353 2 1 0 790 1 95 143 1 0 +"25726" 4 180 5 1 1 0 3 1 100 4 7 14 1310 8 6 1 3381 1 95 7 1 1 +"25727" 4 180 5 1 1 0 3 1 100 4 14 21 1633 4 7 0 1234 3 50 7 0 1 +"25728" 4 180 5 1 1 0 3 1 100 4 21 37 1828 3 10 0 1011 2 75 16 0 1 +"25729" 4 180 5 1 1 0 3 1 100 4 37 18 2072 6 8 1 1315 3 50 19 0 0 +"25730" 4 180 5 1 1 0 3 1 100 4 18 35 1378 9 2 1 974 1 95 17 1 1 +"25731" 4 180 5 1 1 0 3 1 100 4 35 2 2444 5 3 0 725 1 95 33 1 0 +"25732" 4 180 5 1 1 0 3 1 100 4 2 4 1664 1 10 0 1014 1 95 2 0 1 +"25733" 4 180 5 1 1 0 4 1 100 1205 100 195 2733 2 3 0 1480 1 95 95 0 1 +"25734" 4 180 5 1 1 0 4 1 100 1205 195 293 1703 7 4 1 1852 3 50 98 1 1 +"25735" 4 180 5 1 1 0 4 1 100 1205 293 220 1785 3 1 0 1274 4 25 73 1 0 +"25736" 4 180 5 1 1 0 4 1 100 1205 220 385 1419 1 9 0 1173 2 75 165 0 1 +"25737" 4 180 5 1 1 0 4 1 100 1205 385 578 1971 8 7 1 1035 3 50 193 1 1 +"25738" 4 180 5 1 1 0 4 1 100 1205 578 1127 1687 9 2 1 1204 1 95 549 1 1 +"25739" 4 180 5 1 1 0 4 1 100 1205 1127 1071 1928 5 6 1 2843 5 5 56 0 0 +"25740" 4 180 5 1 1 0 4 1 100 1205 1071 803 2696 6 8 1 3046 4 25 268 0 0 +"25741" 4 180 5 1 1 0 4 1 100 1205 803 1205 2794 4 7 0 1283 3 50 402 0 1 +"25742" 4 183 2 0 1 1 1 1 100 285 100 150 7188 8 3 1 972 3 50 50 1 1 +"25743" 4 183 2 0 1 1 1 1 100 285 150 293 8683 3 7 0 0 5 95 143 0 1 +"25744" 4 183 2 0 1 1 1 1 100 285 293 571 3674 8 2 1 0 5 95 278 1 1 +"25745" 4 183 2 0 1 1 1 1 100 285 571 285 2993 2 1 0 2113 3 50 286 1 0 +"25746" 4 183 3 1 1 1 1 1 100 579 100 195 7246 9 7 1 0 5 95 95 1 1 +"25747" 4 183 3 1 1 1 1 1 100 579 195 185 6652 4 8 1 4884 1 5 10 0 0 +"25748" 4 183 3 1 1 1 1 1 100 579 185 278 2754 8 2 1 2044 3 50 93 1 1 +"25749" 4 183 3 1 1 1 1 1 100 579 278 139 3840 2 1 0 1535 3 50 139 1 0 +"25750" 4 183 3 1 1 1 1 1 100 579 139 271 3700 7 6 1 0 5 95 132 1 1 +"25751" 4 183 3 1 1 1 1 1 100 579 271 528 4034 5 3 1 0 5 95 257 1 1 +"25752" 4 183 3 1 1 1 1 1 100 579 528 396 4551 3 6 1 4437 2 25 132 0 0 +"25753" 4 183 3 1 1 1 1 1 100 579 396 297 3886 6 9 1 3283 2 25 99 0 0 +"25754" 4 183 3 1 1 1 1 1 100 579 297 579 2348 1 4 0 0 5 95 282 0 1 +"25755" 4 183 3 1 1 1 2 0 100 0 100 195 2041 2 9 0 0 5 95 95 0 1 +"25756" 4 183 3 1 1 1 2 0 100 0 195 97 6549 4 10 1 1290 3 50 98 0 0 +"25757" 4 183 3 1 1 1 2 0 100 0 97 5 2987 3 1 0 0 5 95 92 1 0 +"25758" 4 183 3 1 1 1 2 0 100 0 5 10 5882 8 6 1 0 5 95 5 1 1 +"25759" 4 183 3 1 1 1 2 0 100 0 10 0 3784 5 7 1 0 5 95 10 0 0 +"25760" 4 183 3 1 1 1 3 1 100 64 100 125 3215 3 5 0 1724 2 25 25 0 1 +"25761" 4 183 3 1 1 1 3 1 100 64 125 31 1759 8 9 1 2186 4 75 94 0 0 +"25762" 4 183 3 1 1 1 3 1 100 64 31 60 2383 2 4 0 0 5 95 29 0 1 +"25763" 4 183 3 1 1 1 3 1 100 64 60 117 3539 6 3 1 0 5 95 57 1 1 +"25764" 4 183 3 1 1 1 3 1 100 64 117 228 5080 7 6 1 0 5 95 111 1 1 +"25765" 4 183 3 1 1 1 3 1 100 64 228 342 3308 4 2 1 2444 3 50 114 1 1 +"25766" 4 183 3 1 1 1 3 1 100 64 342 667 2093 1 8 0 0 5 95 325 0 1 +"25767" 4 183 3 1 1 1 3 1 100 64 667 33 2735 5 7 1 0 5 95 634 0 0 +"25768" 4 183 3 1 1 1 3 1 100 64 33 64 3250 9 5 1 0 5 95 31 1 1 +"25769" 4 183 3 1 1 1 4 1 100 1795 100 175 2344 8 7 1 3334 4 75 75 1 1 +"25770" 4 183 3 1 1 1 4 1 100 1795 175 263 2549 3 10 0 1321 3 50 88 0 1 +"25771" 4 183 3 1 1 1 4 1 100 1795 263 66 1920 7 9 1 1147 4 75 197 0 0 +"25772" 4 183 3 1 1 1 4 1 100 1795 66 129 3761 9 1 1 0 5 95 63 1 1 +"25773" 4 183 3 1 1 1 4 1 100 1795 129 252 2204 2 3 0 0 5 95 123 0 1 +"25774" 4 183 3 1 1 1 4 1 100 1795 252 491 1881 1 8 0 0 5 95 239 0 1 +"25775" 4 183 3 1 1 1 4 1 100 1795 491 957 4525 5 4 1 0 5 95 466 1 1 +"25776" 4 183 3 1 1 1 4 1 100 1795 957 1436 4116 4 2 1 2870 3 50 479 1 1 +"25777" 4 183 3 1 1 1 4 1 100 1795 1436 1795 2802 6 3 1 1714 2 25 359 1 1 +"25778" 4 183 4 0 1 0 1 1 100 1 100 150 7035 2 7 0 569 3 50 50 0 1 +"25779" 4 183 4 0 1 0 1 1 100 1 150 293 2739 3 7 0 735 1 95 143 0 1 +"25780" 4 183 4 0 1 0 1 1 100 1 293 15 3489 2 8 1 568 1 95 278 0 0 +"25781" 4 183 4 0 1 0 1 1 100 1 15 1 1307 8 9 1 586 1 95 14 0 0 +"25782" 4 183 5 1 1 0 1 0 100 0 100 195 1281 1 3 0 474 1 95 95 0 1 +"25783" 4 183 5 1 1 0 1 0 100 0 195 97 1832 6 5 0 1527 3 50 98 1 0 +"25784" 4 183 5 1 1 0 1 0 100 0 97 189 1508 2 8 0 426 1 95 92 0 1 +"25785" 4 183 5 1 1 0 1 0 100 0 189 9 1506 8 9 1 384 1 95 180 0 0 +"25786" 4 183 5 1 1 0 1 0 100 0 9 0 1899 3 4 1 615 1 95 9 0 0 +"25787" 4 183 5 1 1 0 2 1 100 1037 100 195 2474 8 1 1 674 1 95 95 1 1 +"25788" 4 183 5 1 1 0 2 1 100 1037 195 380 1911 6 2 1 642 1 95 185 1 1 +"25789" 4 183 5 1 1 0 2 1 100 1037 380 19 3208 7 9 1 1093 1 95 361 0 0 +"25790" 4 183 5 1 1 0 2 1 100 1037 19 37 2318 2 10 0 1421 1 95 18 0 1 +"25791" 4 183 5 1 1 0 2 1 100 1037 37 72 2839 5 3 1 534 1 95 35 1 1 +"25792" 4 183 5 1 1 0 2 1 100 1037 72 140 3026 4 2 1 727 1 95 68 1 1 +"25793" 4 183 5 1 1 0 2 1 100 1037 140 273 3754 3 5 0 564 1 95 133 0 1 +"25794" 4 183 5 1 1 0 2 1 100 1037 273 532 3119 9 4 1 320 1 95 259 1 1 +"25795" 4 183 5 1 1 0 2 1 100 1037 532 1037 1478 1 7 0 477 1 95 505 0 1 +"25796" 4 183 5 1 1 0 3 0 100 1 100 195 1699 7 5 1 400 1 95 95 1 1 +"25797" 4 183 5 1 1 0 3 0 100 1 195 10 2387 2 1 0 791 1 95 185 1 0 +"25798" 4 183 5 1 1 0 3 0 100 1 10 20 2726 8 6 1 658 1 95 10 1 1 +"25799" 4 183 5 1 1 0 3 0 100 1 20 1 1336 4 7 1 1606 1 95 19 0 0 +"25800" 4 183 5 1 1 0 4 0 100 0 100 195 1516 2 3 0 658 1 95 95 0 1 +"25801" 4 183 5 1 1 0 4 0 100 0 195 380 4760 7 4 1 355 1 95 185 1 1 +"25802" 4 183 5 1 1 0 4 0 100 0 380 19 2735 3 1 0 571 1 95 361 1 0 +"25803" 4 183 5 1 1 0 4 0 100 0 19 37 2508 1 9 0 707 1 95 18 0 1 +"25804" 4 183 5 1 1 0 4 0 100 0 37 72 4230 8 7 1 530 1 95 35 1 1 +"25805" 4 183 5 1 1 0 4 0 100 0 72 140 2398 9 2 1 402 1 95 68 1 1 +"25806" 4 183 5 1 1 0 4 0 100 0 140 7 2092 5 6 1 390 1 95 133 0 0 +"25807" 4 183 5 1 1 0 4 0 100 0 7 0 2424 6 8 1 987 1 95 7 0 0 +"25808" 4 187 2 0 1 1 1 1 100 267 100 150 4404 8 3 1 1793 3 50 50 1 1 +"25809" 4 187 2 0 1 1 1 1 100 267 150 225 6697 3 7 0 1408 3 50 75 0 1 +"25810" 4 187 2 0 1 1 1 1 100 267 225 281 1754 8 2 1 3300 2 25 56 1 1 +"25811" 4 187 2 0 1 1 1 1 100 267 281 267 2180 2 1 0 1730 1 5 14 1 0 +"25812" 4 187 3 1 1 1 1 1 100 599 100 150 16493 9 7 1 4921 3 50 50 1 1 +"25813" 4 187 3 1 1 1 1 1 100 599 150 188 1708 4 8 0 3624 2 25 38 0 1 +"25814" 4 187 3 1 1 1 1 1 100 599 188 282 1641 8 2 1 1240 3 50 94 1 1 +"25815" 4 187 3 1 1 1 1 1 100 599 282 70 1483 2 1 0 634 4 75 212 1 0 +"25816" 4 187 3 1 1 1 1 1 100 599 70 137 9466 7 6 1 2533 5 95 67 1 1 +"25817" 4 187 3 1 1 1 1 1 100 599 137 240 3906 5 3 1 1558 4 75 103 1 1 +"25818" 4 187 3 1 1 1 1 1 100 599 240 420 1809 3 6 0 969 4 75 180 0 1 +"25819" 4 187 3 1 1 1 1 1 100 599 420 399 2584 6 9 1 3779 1 5 21 0 0 +"25820" 4 187 3 1 1 1 1 1 100 599 399 599 5836 1 4 0 2228 3 50 200 0 1 +"25821" 4 187 3 1 1 1 2 1 100 174 100 50 4143 2 9 1 3575 3 50 50 0 0 +"25822" 4 187 3 1 1 1 2 1 100 174 50 88 2803 4 10 0 1012 4 75 38 0 1 +"25823" 4 187 3 1 1 1 2 1 100 174 88 66 2208 3 1 0 3452 2 25 22 1 0 +"25824" 4 187 3 1 1 1 2 1 100 174 66 116 2429 8 6 1 1088 4 75 50 1 1 +"25825" 4 187 3 1 1 1 2 1 100 174 116 29 4577 5 7 1 2053 4 75 87 0 0 +"25826" 4 187 3 1 1 1 2 1 100 174 29 44 2905 6 8 0 4532 3 50 15 0 1 +"25827" 4 187 3 1 1 1 2 1 100 174 44 66 2442 7 5 1 2508 3 50 22 1 1 +"25828" 4 187 3 1 1 1 2 1 100 174 66 116 2930 1 10 0 612 4 75 50 0 1 +"25829" 4 187 3 1 1 1 2 1 100 174 116 174 4884 9 3 1 1929 3 50 58 1 1 +"25830" 4 187 3 1 1 1 3 1 100 4 100 150 2574 3 5 0 1769 3 50 50 0 1 +"25831" 4 187 3 1 1 1 3 1 100 4 150 75 3722 8 9 1 2671 3 50 75 0 0 +"25832" 4 187 3 1 1 1 3 1 100 4 75 113 2420 2 4 0 3663 3 50 38 0 1 +"25833" 4 187 3 1 1 1 3 1 100 4 113 56 2606 6 3 0 1544 3 50 57 1 0 +"25834" 4 187 3 1 1 1 3 1 100 4 56 84 3275 7 6 1 1045 3 50 28 1 1 +"25835" 4 187 3 1 1 1 3 1 100 4 84 4 1848 4 2 0 1751 5 95 80 1 0 +"25836" 4 187 3 1 1 1 3 1 100 4 4 8 3361 1 8 0 1863 5 95 4 0 1 +"25837" 4 187 3 1 1 1 3 1 100 4 8 2 6804 5 7 1 1814 4 75 6 0 0 +"25838" 4 187 3 1 1 1 3 1 100 4 2 4 2062 9 5 1 2280 4 75 2 1 1 +"25839" 4 187 3 1 1 1 4 1 100 651 100 175 5816 8 7 1 745 4 75 75 1 1 +"25840" 4 187 3 1 1 1 4 1 100 651 175 263 1618 3 10 0 1699 3 50 88 0 1 +"25841" 4 187 3 1 1 1 4 1 100 651 263 197 2528 7 9 1 1269 2 25 66 0 0 +"25842" 4 187 3 1 1 1 4 1 100 651 197 296 2190 9 1 1 1957 3 50 99 1 1 +"25843" 4 187 3 1 1 1 4 1 100 651 296 370 1917 2 3 0 2646 2 25 74 0 1 +"25844" 4 187 3 1 1 1 4 1 100 651 370 463 2502 1 8 0 2804 2 25 93 0 1 +"25845" 4 187 3 1 1 1 4 1 100 651 463 579 1637 5 4 1 1772 2 25 116 1 1 +"25846" 4 187 3 1 1 1 4 1 100 651 579 434 1806 4 2 0 1405 2 25 145 1 0 +"25847" 4 187 3 1 1 1 4 1 100 651 434 651 1632 6 3 1 1535 3 50 217 1 1 +"25848" 4 187 4 0 1 0 1 1 100 16 100 150 5665 2 7 0 858 3 50 50 0 1 +"25849" 4 187 4 0 1 0 1 1 100 16 150 263 3886 3 7 0 994 2 75 113 0 1 +"25850" 4 187 4 0 1 0 1 1 100 16 263 329 2185 2 8 0 1103 4 25 66 0 1 +"25851" 4 187 4 0 1 0 1 1 100 16 329 16 1984 8 9 1 1070 1 95 313 0 0 +"25852" 4 187 5 1 1 0 1 1 100 320 100 195 9139 1 3 0 4261 1 95 95 0 1 +"25853" 4 187 5 1 1 0 1 1 100 320 195 341 1249 6 5 1 451 2 75 146 1 1 +"25854" 4 187 5 1 1 0 1 1 100 320 341 512 1515 2 8 0 866 3 50 171 0 1 +"25855" 4 187 5 1 1 0 1 1 100 320 512 256 1494 8 9 1 980 3 50 256 0 0 +"25856" 4 187 5 1 1 0 1 1 100 320 256 499 2643 3 4 0 850 1 95 243 0 1 +"25857" 4 187 5 1 1 0 1 1 100 320 499 374 1592 5 7 1 682 4 25 125 0 0 +"25858" 4 187 5 1 1 0 1 1 100 320 374 655 1831 7 4 1 342 2 75 281 1 1 +"25859" 4 187 5 1 1 0 1 1 100 320 655 164 1531 4 1 0 3336 2 75 491 1 0 +"25860" 4 187 5 1 1 0 1 1 100 320 164 320 1582 9 6 1 656 1 95 156 1 1 +"25861" 4 187 5 1 1 0 2 0 100 0 100 195 1525 8 1 1 626 1 95 95 1 1 +"25862" 4 187 5 1 1 0 2 0 100 0 195 49 2177 6 2 0 527 2 75 146 1 0 +"25863" 4 187 5 1 1 0 2 0 100 0 49 2 1235 7 9 1 869 1 95 47 0 0 +"25864" 4 187 5 1 1 0 2 0 100 0 2 4 1474 2 10 0 561 1 95 2 0 1 +"25865" 4 187 5 1 1 0 2 0 100 0 4 8 1139 5 3 1 675 1 95 4 1 1 +"25866" 4 187 5 1 1 0 2 0 100 0 8 0 888 4 2 0 987 1 95 8 1 0 +"25867" 4 187 5 1 1 0 3 1 100 76 100 195 1393 7 5 1 1013 1 95 95 1 1 +"25868" 4 187 5 1 1 0 3 1 100 76 195 10 1426 2 1 0 835 1 95 185 1 0 +"25869" 4 187 5 1 1 0 3 1 100 76 10 20 1249 8 6 1 939 1 95 10 1 1 +"25870" 4 187 5 1 1 0 3 1 100 76 20 35 1276 4 7 0 469 2 75 15 0 1 +"25871" 4 187 5 1 1 0 3 1 100 76 35 61 1267 3 10 0 477 2 75 26 0 1 +"25872" 4 187 5 1 1 0 3 1 100 76 61 15 1171 6 8 1 569 2 75 46 0 0 +"25873" 4 187 5 1 1 0 3 1 100 76 15 26 1652 9 2 1 1387 2 75 11 1 1 +"25874" 4 187 5 1 1 0 3 1 100 76 26 39 1654 5 3 1 781 3 50 13 1 1 +"25875" 4 187 5 1 1 0 3 1 100 76 39 76 981 1 10 0 493 1 95 37 0 1 +"25876" 4 187 5 1 1 0 4 1 100 199 100 195 1184 2 3 0 605 1 95 95 0 1 +"25877" 4 187 5 1 1 0 4 1 100 199 195 380 989 7 4 1 627 1 95 185 1 1 +"25878" 4 187 5 1 1 0 4 1 100 199 380 285 995 3 1 0 970 4 25 95 1 0 +"25879" 4 187 5 1 1 0 4 1 100 199 285 556 1201 1 9 0 661 1 95 271 0 1 +"25880" 4 187 5 1 1 0 4 1 100 199 556 695 1443 8 7 1 2100 4 25 139 1 1 +"25881" 4 187 5 1 1 0 4 1 100 199 695 1355 1685 9 2 1 939 1 95 660 1 1 +"25882" 4 187 5 1 1 0 4 1 100 199 1355 2033 1244 5 6 0 1358 3 50 678 0 1 +"25883" 4 187 5 1 1 0 4 1 100 199 2033 102 1371 6 8 1 3023 1 95 1931 0 0 +"25884" 4 187 5 1 1 0 4 1 100 199 102 199 1044 4 7 0 680 1 95 97 0 1 +"25885" 4 189 2 0 1 1 1 1 100 770 100 150 10234 8 3 1 2441 3 50 50 1 1 +"25886" 4 189 2 0 1 1 1 1 100 770 150 293 14882 3 7 0 0 5 95 143 0 1 +"25887" 4 189 2 0 1 1 1 1 100 770 293 440 3051 8 2 1 2247 3 50 147 1 1 +"25888" 4 189 2 0 1 1 1 1 100 770 440 770 2962 2 1 1 899 4 75 330 1 1 +"25889" 4 189 3 1 1 1 1 1 100 248 100 150 21924 9 7 1 662 3 50 50 1 1 +"25890" 4 189 3 1 1 1 1 1 100 248 150 112 4068 4 8 1 1896 2 25 38 0 0 +"25891" 4 189 3 1 1 1 1 1 100 248 112 168 4812 8 2 1 1300 3 50 56 1 1 +"25892" 4 189 3 1 1 1 1 1 100 248 168 252 2347 2 1 1 1073 3 50 84 1 1 +"25893" 4 189 3 1 1 1 1 1 100 248 252 378 1779 7 6 1 1204 3 50 126 1 1 +"25894" 4 189 3 1 1 1 1 1 100 248 378 189 1928 5 3 0 795 3 50 189 1 0 +"25895" 4 189 3 1 1 1 1 1 100 248 189 331 1986 3 6 0 2978 4 75 142 0 1 +"25896" 4 189 3 1 1 1 1 1 100 248 331 497 2113 6 9 0 1498 3 50 166 0 1 +"25897" 4 189 3 1 1 1 1 1 100 248 497 248 1624 1 4 1 656 3 50 249 0 0 +"25898" 4 189 3 1 1 1 2 0 100 0 100 25 2111 2 9 1 1272 4 75 75 0 0 +"25899" 4 189 3 1 1 1 2 0 100 0 25 12 1825 4 10 1 2320 3 50 13 0 0 +"25900" 4 189 3 1 1 1 2 0 100 0 12 23 1446 3 1 1 0 5 95 11 1 1 +"25901" 4 189 3 1 1 1 2 0 100 0 23 45 2815 8 6 1 0 5 95 22 1 1 +"25902" 4 189 3 1 1 1 2 0 100 0 45 2 1903 5 7 1 0 5 95 43 0 0 +"25903" 4 189 3 1 1 1 2 0 100 0 2 0 1954 6 8 1 0 5 95 2 0 0 +"25904" 4 189 3 1 1 1 3 1 100 65 100 150 1892 3 5 0 792 3 50 50 0 1 +"25905" 4 189 3 1 1 1 3 1 100 65 150 225 2287 8 9 0 1368 3 50 75 0 1 +"25906" 4 189 3 1 1 1 3 1 100 65 225 439 1988 2 4 0 3260 5 95 214 0 1 +"25907" 4 189 3 1 1 1 3 1 100 65 439 659 2030 6 3 1 1019 3 50 220 1 1 +"25908" 4 189 3 1 1 1 3 1 100 65 659 989 2176 7 6 1 1958 3 50 330 1 1 +"25909" 4 189 3 1 1 1 3 1 100 65 989 1731 2294 4 2 1 679 4 75 742 1 1 +"25910" 4 189 3 1 1 1 3 1 100 65 1731 87 2996 1 8 1 0 5 95 1644 0 0 +"25911" 4 189 3 1 1 1 3 1 100 65 87 131 1499 5 7 0 594 3 50 44 0 1 +"25912" 4 189 3 1 1 1 3 1 100 65 131 65 2075 9 5 0 294 3 50 66 1 0 +"25913" 4 189 3 1 1 1 4 1 100 425 100 150 2375 8 7 1 245 3 50 50 1 1 +"25914" 4 189 3 1 1 1 4 1 100 425 150 225 1430 3 10 0 872 3 50 75 0 1 +"25915" 4 189 3 1 1 1 4 1 100 425 225 112 1036 7 9 1 336 3 50 113 0 0 +"25916" 4 189 3 1 1 1 4 1 100 425 112 168 1093 9 1 1 983 3 50 56 1 1 +"25917" 4 189 3 1 1 1 4 1 100 425 168 252 1469 2 3 0 997 3 50 84 0 1 +"25918" 4 189 3 1 1 1 4 1 100 425 252 378 1491 1 8 0 329 3 50 126 0 1 +"25919" 4 189 3 1 1 1 4 1 100 425 378 567 1735 5 4 1 731 3 50 189 1 1 +"25920" 4 189 3 1 1 1 4 1 100 425 567 283 1541 4 2 0 1354 3 50 284 1 0 +"25921" 4 189 3 1 1 1 4 1 100 425 283 425 1374 6 3 1 467 3 50 142 1 1 +"25922" 4 189 4 0 1 0 1 1 100 1 100 150 6586 2 7 0 319 3 50 50 0 1 +"25923" 4 189 4 0 1 0 1 1 100 1 150 7 2589 3 7 1 799 1 95 143 0 0 +"25924" 4 189 4 0 1 0 1 1 100 1 7 14 1814 2 8 0 1398 1 95 7 0 1 +"25925" 4 189 4 0 1 0 1 1 100 1 14 1 1416 8 9 1 635 1 95 13 0 0 +"25926" 4 189 5 1 1 0 1 0 100 0 100 195 2760 1 3 0 723 1 95 95 0 1 +"25927" 4 189 5 1 1 0 1 0 100 0 195 293 1845 6 5 1 988 3 50 98 1 1 +"25928" 4 189 5 1 1 0 1 0 100 0 293 571 1362 2 8 0 623 1 95 278 0 1 +"25929" 4 189 5 1 1 0 1 0 100 0 571 29 1198 8 9 1 339 1 95 542 0 0 +"25930" 4 189 5 1 1 0 1 0 100 0 29 57 1088 3 4 0 410 1 95 28 0 1 +"25931" 4 189 5 1 1 0 1 0 100 0 57 3 1059 5 7 1 663 1 95 54 0 0 +"25932" 4 189 5 1 1 0 1 0 100 0 3 6 988 7 4 1 715 1 95 3 1 1 +"25933" 4 189 5 1 1 0 1 0 100 0 6 0 942 4 1 0 1063 1 95 6 1 0 +"25934" 4 189 5 1 1 0 2 0 100 1 100 195 1474 8 1 1 537 1 95 95 1 1 +"25935" 4 189 5 1 1 0 2 0 100 1 195 97 1332 6 2 0 531 3 50 98 1 0 +"25936" 4 189 5 1 1 0 2 0 100 1 97 5 1071 7 9 1 457 1 95 92 0 0 +"25937" 4 189 5 1 1 0 2 0 100 1 5 10 1065 2 10 0 660 1 95 5 0 1 +"25938" 4 189 5 1 1 0 2 0 100 1 10 20 874 5 3 1 686 1 95 10 1 1 +"25939" 4 189 5 1 1 0 2 0 100 1 20 1 1027 4 2 0 272 1 95 19 1 0 +"25940" 4 189 5 1 1 0 3 0 100 1 100 195 1437 7 5 1 254 1 95 95 1 1 +"25941" 4 189 5 1 1 0 3 0 100 1 195 10 2127 2 1 0 255 1 95 185 1 0 +"25942" 4 189 5 1 1 0 3 0 100 1 10 20 1056 8 6 1 763 1 95 10 1 1 +"25943" 4 189 5 1 1 0 3 0 100 1 20 1 1436 4 7 1 1597 1 95 19 0 0 +"25944" 4 189 5 1 1 0 4 1 100 938 100 195 2407 2 3 0 1034 1 95 95 0 1 +"25945" 4 189 5 1 1 0 4 1 100 938 195 293 2030 7 4 1 1084 3 50 98 1 1 +"25946" 4 189 5 1 1 0 4 1 100 938 293 146 1381 3 1 0 1010 3 50 147 1 0 +"25947" 4 189 5 1 1 0 4 1 100 938 146 219 1271 1 9 0 344 3 50 73 0 1 +"25948" 4 189 5 1 1 0 4 1 100 938 219 427 1365 8 7 1 642 1 95 208 1 1 +"25949" 4 189 5 1 1 0 4 1 100 938 427 641 1744 9 2 1 275 3 50 214 1 1 +"25950" 4 189 5 1 1 0 4 1 100 938 641 962 1499 5 6 0 1026 3 50 321 0 1 +"25951" 4 189 5 1 1 0 4 1 100 938 962 481 2382 6 8 1 522 3 50 481 0 0 +"25952" 4 189 5 1 1 0 4 1 100 938 481 938 1046 4 7 0 1066 1 95 457 0 1 +"25953" 4 196 2 0 1 0 1 1 100 33 100 175 13459 2 7 0 1392 2 75 75 0 1 +"25954" 4 196 2 0 1 0 1 1 100 33 175 341 3699 3 7 0 2657 1 95 166 0 1 +"25955" 4 196 2 0 1 0 1 1 100 33 341 665 3223 2 8 0 2619 1 95 324 0 1 +"25956" 4 196 2 0 1 0 1 1 100 33 665 33 1659 8 9 1 1210 1 95 632 0 0 +"25957" 4 196 3 1 1 0 1 1 100 1037 100 195 3490 1 3 0 849 1 95 95 0 1 +"25958" 4 196 3 1 1 0 1 1 100 1037 195 380 2646 6 5 1 981 1 95 185 1 1 +"25959" 4 196 3 1 1 0 1 1 100 1037 380 741 1588 2 8 0 997 1 95 361 0 1 +"25960" 4 196 3 1 1 0 1 1 100 1037 741 37 1755 8 9 1 1237 1 95 704 0 0 +"25961" 4 196 3 1 1 0 1 1 100 1037 37 72 1671 3 4 0 1357 1 95 35 0 1 +"25962" 4 196 3 1 1 0 1 1 100 1037 72 140 2378 5 7 0 1174 1 95 68 0 1 +"25963" 4 196 3 1 1 0 1 1 100 1037 140 273 1940 7 4 1 2245 1 95 133 1 1 +"25964" 4 196 3 1 1 0 1 1 100 1037 273 532 2519 4 1 1 1125 1 95 259 1 1 +"25965" 4 196 3 1 1 0 1 1 100 1037 532 1037 1964 9 6 1 1444 1 95 505 1 1 +"25966" 4 196 3 1 1 0 2 1 100 31 100 195 2915 8 1 1 1565 1 95 95 1 1 +"25967" 4 196 3 1 1 0 2 1 100 31 195 380 2688 6 2 1 870 1 95 185 1 1 +"25968" 4 196 3 1 1 0 2 1 100 31 380 741 1881 7 9 0 941 1 95 361 0 1 +"25969" 4 196 3 1 1 0 2 1 100 31 741 1445 1829 2 10 0 1012 1 95 704 0 1 +"25970" 4 196 3 1 1 0 2 1 100 31 1445 72 3217 5 3 0 1877 1 95 1373 1 0 +"25971" 4 196 3 1 1 0 2 1 100 31 72 4 2057 4 2 0 1268 1 95 68 1 0 +"25972" 4 196 3 1 1 0 2 1 100 31 4 8 1101 3 5 0 926 1 95 4 0 1 +"25973" 4 196 3 1 1 0 2 1 100 31 8 16 1631 9 4 1 1031 1 95 8 1 1 +"25974" 4 196 3 1 1 0 2 1 100 31 16 31 1715 1 7 0 2863 1 95 15 0 1 +"25975" 4 196 3 1 1 0 3 0 100 0 100 195 1893 7 5 1 2816 1 95 95 1 1 +"25976" 4 196 3 1 1 0 3 0 100 0 195 10 1352 2 1 0 2133 1 95 185 1 0 +"25977" 4 196 3 1 1 0 3 0 100 0 10 20 1532 8 6 1 1049 1 95 10 1 1 +"25978" 4 196 3 1 1 0 3 0 100 0 20 39 1668 4 7 0 1131 1 95 19 0 1 +"25979" 4 196 3 1 1 0 3 0 100 0 39 76 1754 3 10 0 1124 1 95 37 0 1 +"25980" 4 196 3 1 1 0 3 0 100 0 76 4 2172 6 8 1 1986 1 95 72 0 0 +"25981" 4 196 3 1 1 0 3 0 100 0 4 8 1209 9 2 1 989 1 95 4 1 1 +"25982" 4 196 3 1 1 0 3 0 100 0 8 0 4371 5 3 0 1097 1 95 8 1 0 +"25983" 4 196 3 1 1 0 4 0 100 0 100 195 3581 2 3 0 849 1 95 95 0 1 +"25984" 4 196 3 1 1 0 4 0 100 0 195 380 1358 7 4 1 1130 1 95 185 1 1 +"25985" 4 196 3 1 1 0 4 0 100 0 380 19 2274 3 1 0 1195 1 95 361 1 0 +"25986" 4 196 3 1 1 0 4 0 100 0 19 37 2288 1 9 0 1865 1 95 18 0 1 +"25987" 4 196 3 1 1 0 4 0 100 0 37 72 1317 8 7 1 1252 1 95 35 1 1 +"25988" 4 196 3 1 1 0 4 0 100 0 72 140 1710 9 2 1 1162 1 95 68 1 1 +"25989" 4 196 3 1 1 0 4 0 100 0 140 7 2631 5 6 1 928 1 95 133 0 0 +"25990" 4 196 3 1 1 0 4 0 100 0 7 0 1872 6 8 1 1197 1 95 7 0 0 +"25991" 4 196 4 0 1 1 1 1 100 169 100 150 3349 8 3 1 420 3 50 50 1 1 +"25992" 4 196 4 0 1 1 1 1 100 169 150 225 2141 3 7 0 1589 3 50 75 0 1 +"25993" 4 196 4 0 1 1 1 1 100 169 225 338 1655 8 2 1 1045 3 50 113 1 1 +"25994" 4 196 4 0 1 1 1 1 100 169 338 169 2841 2 1 0 1192 3 50 169 1 0 +"25995" 4 196 5 1 1 1 1 1 100 233 100 150 2434 9 7 1 1604 3 50 50 1 1 +"25996" 4 196 5 1 1 1 1 1 100 233 150 142 2461 4 8 1 1069 1 5 8 0 0 +"25997" 4 196 5 1 1 1 1 1 100 233 142 249 1736 8 2 1 780 4 75 107 1 1 +"25998" 4 196 5 1 1 1 1 1 100 233 249 124 1796 2 1 0 452 3 50 125 1 0 +"25999" 4 196 5 1 1 1 1 1 100 233 124 155 1860 7 6 1 1433 2 25 31 1 1 +"26000" 4 196 5 1 1 1 1 1 100 233 155 147 1994 5 3 0 457 1 5 8 1 0 +"26001" 4 196 5 1 1 1 1 1 100 233 147 140 2480 3 6 1 1127 1 5 7 0 0 +"26002" 4 196 5 1 1 1 1 1 100 233 140 133 1351 6 9 1 988 1 5 7 0 0 +"26003" 4 196 5 1 1 1 1 1 100 233 133 233 1283 1 4 0 1148 4 75 100 0 1 +"26004" 4 196 5 1 1 1 2 1 100 435 100 175 1844 2 9 0 1979 4 75 75 0 1 +"26005" 4 196 5 1 1 1 2 1 100 435 175 219 2202 4 10 0 1191 2 25 44 0 1 +"26006" 4 196 5 1 1 1 2 1 100 435 219 164 2732 3 1 0 455 2 25 55 1 0 +"26007" 4 196 5 1 1 1 2 1 100 435 164 246 1311 8 6 1 1758 3 50 82 1 1 +"26008" 4 196 5 1 1 1 2 1 100 435 246 234 5496 5 7 1 896 1 5 12 0 0 +"26009" 4 196 5 1 1 1 2 1 100 435 234 222 1443 6 8 1 1044 1 5 12 0 0 +"26010" 4 196 5 1 1 1 2 1 100 435 222 278 1663 7 5 1 1070 2 25 56 1 1 +"26011" 4 196 5 1 1 1 2 1 100 435 278 348 1768 1 10 0 1586 2 25 70 0 1 +"26012" 4 196 5 1 1 1 2 1 100 435 348 435 1340 9 3 1 1313 2 25 87 1 1 +"26013" 4 196 5 1 1 1 3 1 100 114 100 125 2699 3 5 0 2232 2 25 25 0 1 +"26014" 4 196 5 1 1 1 3 1 100 114 125 31 1532 8 9 1 681 4 75 94 0 0 +"26015" 4 196 5 1 1 1 3 1 100 114 31 54 1718 2 4 0 410 4 75 23 0 1 +"26016" 4 196 5 1 1 1 3 1 100 114 54 57 2444 6 3 1 805 1 5 3 1 1 +"26017" 4 196 5 1 1 1 3 1 100 114 57 71 1580 7 6 1 2398 2 25 14 1 1 +"26018" 4 196 5 1 1 1 3 1 100 114 71 67 7482 4 2 0 1787 1 5 4 1 0 +"26019" 4 196 5 1 1 1 3 1 100 114 67 101 1932 1 8 0 1110 3 50 34 0 1 +"26020" 4 196 5 1 1 1 3 1 100 114 101 76 2846 5 7 1 1044 2 25 25 0 0 +"26021" 4 196 5 1 1 1 3 1 100 114 76 114 4592 9 5 1 2051 3 50 38 1 1 +"26022" 4 196 5 1 1 1 4 1 100 377 100 150 2607 8 7 1 1929 3 50 50 1 1 +"26023" 4 196 5 1 1 1 4 1 100 377 150 188 1779 3 10 0 1771 2 25 38 0 1 +"26024" 4 196 5 1 1 1 4 1 100 377 188 141 2916 7 9 1 1263 2 25 47 0 0 +"26025" 4 196 5 1 1 1 4 1 100 377 141 212 1280 9 1 1 433 3 50 71 1 1 +"26026" 4 196 5 1 1 1 4 1 100 377 212 265 2778 2 3 0 1956 2 25 53 0 1 +"26027" 4 196 5 1 1 1 4 1 100 377 265 398 1652 1 8 0 1068 3 50 133 0 1 +"26028" 4 196 5 1 1 1 4 1 100 377 398 378 3998 5 4 0 1293 1 5 20 1 0 +"26029" 4 196 5 1 1 1 4 1 100 377 378 359 2349 4 2 0 1059 1 5 19 1 0 +"26030" 4 196 5 1 1 1 4 1 100 377 359 377 1501 6 3 1 1060 1 5 18 1 1 +"26031" 4 205 2 0 1 0 1 1 100 22 100 150 20268 2 7 0 2445 3 50 50 0 1 +"26032" 4 205 2 0 1 0 1 1 100 22 150 225 3638 3 7 0 1350 3 50 75 0 1 +"26033" 4 205 2 0 1 0 1 1 100 22 225 439 4659 2 8 0 2555 1 95 214 0 1 +"26034" 4 205 2 0 1 0 1 1 100 22 439 22 1854 8 9 1 1091 1 95 417 0 0 +"26035" 4 205 3 1 1 0 1 0 100 1 100 195 2730 1 3 0 1341 1 95 95 0 1 +"26036" 4 205 3 1 1 0 1 0 100 1 195 10 2490 6 5 0 1209 1 95 185 1 0 +"26037" 4 205 3 1 1 0 1 0 100 1 10 20 2101 2 8 0 1286 1 95 10 0 1 +"26038" 4 205 3 1 1 0 1 0 100 1 20 1 1630 8 9 1 983 1 95 19 0 0 +"26039" 4 205 3 1 1 0 2 0 100 0 100 195 2753 8 1 1 899 1 95 95 1 1 +"26040" 4 205 3 1 1 0 2 0 100 0 195 10 3726 6 2 0 1290 1 95 185 1 0 +"26041" 4 205 3 1 1 0 2 0 100 0 10 0 2130 7 9 1 1442 1 95 10 0 0 +"26042" 4 205 3 1 1 0 3 0 100 0 100 195 4575 7 5 1 1659 1 95 95 1 1 +"26043" 4 205 3 1 1 0 3 0 100 0 195 10 1794 2 1 0 2258 1 95 185 1 0 +"26044" 4 205 3 1 1 0 3 0 100 0 10 0 2932 8 6 0 1269 1 95 10 1 0 +"26045" 4 205 3 1 1 0 4 1 100 1 100 195 2729 2 3 0 1029 1 95 95 0 1 +"26046" 4 205 3 1 1 0 4 1 100 1 195 380 1785 7 4 1 1034 1 95 185 1 1 +"26047" 4 205 3 1 1 0 4 1 100 1 380 19 1950 3 1 0 1239 1 95 361 1 0 +"26048" 4 205 3 1 1 0 4 1 100 1 19 37 2497 1 9 0 1304 1 95 18 0 1 +"26049" 4 205 3 1 1 0 4 1 100 1 37 72 1631 8 7 1 2615 1 95 35 1 1 +"26050" 4 205 3 1 1 0 4 1 100 1 72 140 2912 9 2 1 1348 1 95 68 1 1 +"26051" 4 205 3 1 1 0 4 1 100 1 140 7 3817 5 6 1 1601 1 95 133 0 0 +"26052" 4 205 3 1 1 0 4 1 100 1 7 14 2570 6 8 0 1269 1 95 7 0 1 +"26053" 4 205 3 1 1 0 4 1 100 1 14 1 3734 4 7 1 1226 1 95 13 0 0 +"26054" 4 205 4 0 1 1 1 1 100 230 100 175 29900 8 3 1 1139 4 75 75 1 1 +"26055" 4 205 4 0 1 1 1 1 100 230 175 263 3090 3 7 0 1620 3 50 88 0 1 +"26056" 4 205 4 0 1 1 1 1 100 230 263 460 1805 8 2 1 1476 4 75 197 1 1 +"26057" 4 205 4 0 1 1 1 1 100 230 460 230 2427 2 1 0 1333 3 50 230 1 0 +"26058" 4 205 5 1 1 1 1 0 100 0 100 195 2131 9 7 1 0 5 95 95 1 1 +"26059" 4 205 5 1 1 1 1 0 100 0 195 293 2467 4 8 0 1435 3 50 98 0 1 +"26060" 4 205 5 1 1 1 1 0 100 0 293 571 2332 8 2 1 0 5 95 278 1 1 +"26061" 4 205 5 1 1 1 1 0 100 0 571 29 1931 2 1 0 1493 5 95 542 1 0 +"26062" 4 205 5 1 1 1 1 0 100 0 29 57 2844 7 6 1 0 5 95 28 1 1 +"26063" 4 205 5 1 1 1 1 0 100 0 57 3 4138 5 3 0 0 5 95 54 1 0 +"26064" 4 205 5 1 1 1 1 0 100 0 3 6 3715 3 6 0 0 5 95 3 0 1 +"26065" 4 205 5 1 1 1 1 0 100 0 6 0 4048 6 9 1 0 5 95 6 0 0 +"26066" 4 205 5 1 1 1 2 0 100 0 100 195 2739 2 9 0 0 5 95 95 0 1 +"26067" 4 205 5 1 1 1 2 0 100 0 195 10 2638 4 10 1 0 5 95 185 0 0 +"26068" 4 205 5 1 1 1 2 0 100 0 10 0 2182 3 1 0 0 5 95 10 1 0 +"26069" 4 205 5 1 1 1 3 1 100 31 100 195 1988 3 5 0 0 5 95 95 0 1 +"26070" 4 205 5 1 1 1 3 1 100 31 195 10 2044 8 9 1 0 5 95 185 0 0 +"26071" 4 205 5 1 1 1 3 1 100 31 10 20 2448 2 4 0 0 5 95 10 0 1 +"26072" 4 205 5 1 1 1 3 1 100 31 20 39 1497 6 3 1 0 5 95 19 1 1 +"26073" 4 205 5 1 1 1 3 1 100 31 39 76 2039 7 6 1 0 5 95 37 1 1 +"26074" 4 205 5 1 1 1 3 1 100 31 76 4 1828 4 2 0 0 5 95 72 1 0 +"26075" 4 205 5 1 1 1 3 1 100 31 4 8 2083 1 8 0 0 5 95 4 0 1 +"26076" 4 205 5 1 1 1 3 1 100 31 8 16 4157 5 7 0 0 5 95 8 0 1 +"26077" 4 205 5 1 1 1 3 1 100 31 16 31 3349 9 5 1 0 5 95 15 1 1 +"26078" 4 205 5 1 1 1 4 1 100 1037 100 195 3038 8 7 1 0 5 95 95 1 1 +"26079" 4 205 5 1 1 1 4 1 100 1037 195 380 1651 3 10 0 0 5 95 185 0 1 +"26080" 4 205 5 1 1 1 4 1 100 1037 380 19 2106 7 9 1 0 5 95 361 0 0 +"26081" 4 205 5 1 1 1 4 1 100 1037 19 37 2124 9 1 1 0 5 95 18 1 1 +"26082" 4 205 5 1 1 1 4 1 100 1037 37 72 3316 2 3 0 0 5 95 35 0 1 +"26083" 4 205 5 1 1 1 4 1 100 1037 72 140 2100 1 8 0 0 5 95 68 0 1 +"26084" 4 205 5 1 1 1 4 1 100 1037 140 273 5262 5 4 1 0 5 95 133 1 1 +"26085" 4 205 5 1 1 1 4 1 100 1037 273 532 5417 4 2 1 0 5 95 259 1 1 +"26086" 4 205 5 1 1 1 4 1 100 1037 532 1037 4229 6 3 1 0 5 95 505 1 1 +"26087" 4 211 2 0 1 0 1 1 100 0 100 150 6734 2 7 0 627 3 50 50 0 1 +"26088" 4 211 2 0 1 0 1 1 100 0 150 112 4106 3 7 1 2029 4 25 38 0 0 +"26089" 4 211 2 0 1 0 1 1 100 0 112 6 3602 2 8 1 2236 1 95 106 0 0 +"26090" 4 211 2 0 1 0 1 1 100 0 6 0 1879 8 9 1 1619 1 95 6 0 0 +"26091" 4 211 3 1 1 0 1 0 100 0 100 5 1906 1 3 1 1429 1 95 95 0 0 +"26092" 4 211 3 1 1 0 1 0 100 0 5 10 5830 6 5 1 942 1 95 5 1 1 +"26093" 4 211 3 1 1 0 1 0 100 0 10 0 1286 2 8 1 1086 1 95 10 0 0 +"26094" 4 211 3 1 1 0 2 0 100 0 100 75 2040 8 1 0 1021 4 25 25 1 0 +"26095" 4 211 3 1 1 0 2 0 100 0 75 146 1265 6 2 1 1474 1 95 71 1 1 +"26096" 4 211 3 1 1 0 2 0 100 0 146 7 2114 7 9 1 1051 1 95 139 0 0 +"26097" 4 211 3 1 1 0 2 0 100 0 7 0 3566 2 10 1 1002 1 95 7 0 0 +"26098" 4 211 3 1 1 0 3 0 100 1 100 5 1236 7 5 0 1046 1 95 95 1 0 +"26099" 4 211 3 1 1 0 3 0 100 1 5 10 3074 2 1 1 301 1 95 5 1 1 +"26100" 4 211 3 1 1 0 3 0 100 1 10 20 3067 8 6 1 1415 1 95 10 1 1 +"26101" 4 211 3 1 1 0 3 0 100 1 20 1 2609 4 7 1 402 1 95 19 0 0 +"26102" 4 211 3 1 1 0 4 0 100 1 100 195 1447 2 3 0 456 1 95 95 0 1 +"26103" 4 211 3 1 1 0 4 0 100 1 195 10 2167 7 4 0 774 1 95 185 1 0 +"26104" 4 211 3 1 1 0 4 0 100 1 10 20 2398 3 1 1 753 1 95 10 1 1 +"26105" 4 211 3 1 1 0 4 0 100 1 20 1 1643 1 9 1 914 1 95 19 0 0 +"26106" 4 211 4 0 1 1 1 1 100 11 100 105 1770 8 3 1 2284 1 5 5 1 1 +"26107" 4 211 4 0 1 1 1 1 100 11 105 110 3212 3 7 0 1657 1 5 5 0 1 +"26108" 4 211 4 0 1 1 1 1 100 11 110 215 3255 8 2 1 0 5 95 105 1 1 +"26109" 4 211 4 0 1 1 1 1 100 11 215 11 2834 2 1 0 2673 5 95 204 1 0 +"26110" 4 211 5 1 1 1 1 1 100 336 100 195 1422 9 7 1 0 5 95 95 1 1 +"26111" 4 211 5 1 1 1 1 1 100 336 195 205 4424 4 8 0 2403 1 5 10 0 1 +"26112" 4 211 5 1 1 1 1 1 100 336 205 400 2022 8 2 1 0 5 95 195 1 1 +"26113" 4 211 5 1 1 1 1 1 100 336 400 200 2588 2 1 0 2246 3 50 200 1 0 +"26114" 4 211 5 1 1 1 1 1 100 336 200 300 6097 7 6 1 3741 3 50 100 1 1 +"26115" 4 211 5 1 1 1 1 1 100 336 300 315 5272 5 3 1 1459 1 5 15 1 1 +"26116" 4 211 5 1 1 1 1 1 100 336 315 299 8067 3 6 1 1169 1 5 16 0 0 +"26117" 4 211 5 1 1 1 1 1 100 336 299 224 2942 6 9 1 1040 2 25 75 0 0 +"26118" 4 211 5 1 1 1 1 1 100 336 224 336 1716 1 4 0 2200 3 50 112 0 1 +"26119" 4 211 5 1 1 1 2 1 100 819 100 195 1512 2 9 0 0 5 95 95 0 1 +"26120" 4 211 5 1 1 1 2 1 100 819 195 293 3246 4 10 0 2690 3 50 98 0 1 +"26121" 4 211 5 1 1 1 2 1 100 819 293 278 5279 3 1 0 337 1 5 15 1 0 +"26122" 4 211 5 1 1 1 2 1 100 819 278 417 3638 8 6 1 1263 3 50 139 1 1 +"26123" 4 211 5 1 1 1 2 1 100 819 417 438 5479 5 7 0 400 1 5 21 0 1 +"26124" 4 211 5 1 1 1 2 1 100 819 438 416 4635 6 8 1 380 1 5 22 0 0 +"26125" 4 211 5 1 1 1 2 1 100 819 416 437 2263 7 5 1 465 1 5 21 1 1 +"26126" 4 211 5 1 1 1 2 1 100 819 437 546 1712 1 10 0 957 2 25 109 0 1 +"26127" 4 211 5 1 1 1 2 1 100 819 546 819 2276 9 3 1 1149 3 50 273 1 1 +"26128" 4 211 5 1 1 1 3 1 100 603 100 195 2368 3 5 0 0 5 95 95 0 1 +"26129" 4 211 5 1 1 1 3 1 100 603 195 97 3653 8 9 1 1629 3 50 98 0 0 +"26130" 4 211 5 1 1 1 3 1 100 603 97 189 2668 2 4 0 0 5 95 92 0 1 +"26131" 4 211 5 1 1 1 3 1 100 603 189 198 4923 6 3 1 1710 1 5 9 1 1 +"26132" 4 211 5 1 1 1 3 1 100 603 198 248 2202 7 6 1 1360 2 25 50 1 1 +"26133" 4 211 5 1 1 1 3 1 100 603 248 260 6356 4 2 1 235 1 5 12 1 1 +"26134" 4 211 5 1 1 1 3 1 100 603 260 507 2021 1 8 0 0 5 95 247 0 1 +"26135" 4 211 5 1 1 1 3 1 100 603 507 482 5803 5 7 1 1764 1 5 25 0 0 +"26136" 4 211 5 1 1 1 3 1 100 603 482 603 1805 9 5 1 1461 2 25 121 1 1 +"26137" 4 211 5 1 1 1 4 1 100 1519 100 195 1998 8 7 1 0 5 95 95 1 1 +"26138" 4 211 5 1 1 1 4 1 100 1519 195 244 2596 3 10 0 1246 2 25 49 0 1 +"26139" 4 211 5 1 1 1 4 1 100 1519 244 232 3152 7 9 1 1463 1 5 12 0 0 +"26140" 4 211 5 1 1 1 4 1 100 1519 232 452 2047 9 1 1 0 5 95 220 1 1 +"26141" 4 211 5 1 1 1 4 1 100 1519 452 565 3307 2 3 0 339 2 25 113 0 1 +"26142" 4 211 5 1 1 1 4 1 100 1519 565 1102 3200 1 8 0 0 5 95 537 0 1 +"26143" 4 211 5 1 1 1 4 1 100 1519 1102 1157 2861 5 4 1 667 1 5 55 1 1 +"26144" 4 211 5 1 1 1 4 1 100 1519 1157 1215 2728 4 2 1 180 1 5 58 1 1 +"26145" 4 211 5 1 1 1 4 1 100 1519 1215 1519 1665 6 3 1 504 2 25 304 1 1 +"26146" 4 213 2 0 1 0 1 1 100 30 100 175 11561 2 7 0 1585 2 75 75 0 1 +"26147" 4 213 2 0 1 0 1 1 100 30 175 306 4402 3 7 0 839 2 75 131 0 1 +"26148" 4 213 2 0 1 0 1 1 100 30 306 597 2596 2 8 0 1885 1 95 291 0 1 +"26149" 4 213 2 0 1 0 1 1 100 30 597 30 2657 8 9 1 2960 1 95 567 0 0 +"26150" 4 213 3 1 1 0 1 0 100 0 100 195 1949 1 3 0 1918 1 95 95 0 1 +"26151" 4 213 3 1 1 0 1 0 100 0 195 380 2848 6 5 1 2110 1 95 185 1 1 +"26152" 4 213 3 1 1 0 1 0 100 0 380 741 2407 2 8 0 2610 1 95 361 0 1 +"26153" 4 213 3 1 1 0 1 0 100 0 741 37 2027 8 9 1 3192 1 95 704 0 0 +"26154" 4 213 3 1 1 0 1 0 100 0 37 72 2189 3 4 0 4442 1 95 35 0 1 +"26155" 4 213 3 1 1 0 1 0 100 0 72 4 4548 5 7 1 1121 1 95 68 0 0 +"26156" 4 213 3 1 1 0 1 0 100 0 4 8 2537 7 4 1 2974 1 95 4 1 1 +"26157" 4 213 3 1 1 0 1 0 100 0 8 0 2365 4 1 0 3026 1 95 8 1 0 +"26158" 4 213 3 1 1 0 2 0 100 0 100 195 2757 8 1 1 2576 1 95 95 1 1 +"26159" 4 213 3 1 1 0 2 0 100 0 195 10 2162 6 2 0 1009 1 95 185 1 0 +"26160" 4 213 3 1 1 0 2 0 100 0 10 0 1758 7 9 1 1080 1 95 10 0 0 +"26161" 4 213 3 1 1 0 3 0 100 0 100 195 1929 7 5 1 1283 1 95 95 1 1 +"26162" 4 213 3 1 1 0 3 0 100 0 195 10 2315 2 1 0 1266 1 95 185 1 0 +"26163" 4 213 3 1 1 0 3 0 100 0 10 20 1755 8 6 1 1165 1 95 10 1 1 +"26164" 4 213 3 1 1 0 3 0 100 0 20 39 2123 4 7 0 1044 1 95 19 0 1 +"26165" 4 213 3 1 1 0 3 0 100 0 39 76 2007 3 10 0 1101 1 95 37 0 1 +"26166" 4 213 3 1 1 0 3 0 100 0 76 4 1798 6 8 1 1086 1 95 72 0 0 +"26167" 4 213 3 1 1 0 3 0 100 0 4 8 1463 9 2 1 1567 1 95 4 1 1 +"26168" 4 213 3 1 1 0 3 0 100 0 8 0 1794 5 3 0 1074 1 95 8 1 0 +"26169" 4 213 3 1 1 0 4 1 100 1 100 195 2317 2 3 0 1188 1 95 95 0 1 +"26170" 4 213 3 1 1 0 4 1 100 1 195 380 3817 7 4 1 1137 1 95 185 1 1 +"26171" 4 213 3 1 1 0 4 1 100 1 380 741 1832 3 1 1 751 1 95 361 1 1 +"26172" 4 213 3 1 1 0 4 1 100 1 741 1445 2689 1 9 0 726 1 95 704 0 1 +"26173" 4 213 3 1 1 0 4 1 100 1 1445 2818 1529 8 7 1 806 1 95 1373 1 1 +"26174" 4 213 3 1 1 0 4 1 100 1 2818 141 2612 9 2 0 717 1 95 2677 1 0 +"26175" 4 213 3 1 1 0 4 1 100 1 141 7 1256 5 6 1 1026 1 95 134 0 0 +"26176" 4 213 3 1 1 0 4 1 100 1 7 14 1462 6 8 0 854 1 95 7 0 1 +"26177" 4 213 3 1 1 0 4 1 100 1 14 1 1451 4 7 1 1075 1 95 13 0 0 +"26178" 4 213 4 0 1 1 1 1 100 131 100 125 4799 8 3 1 1072 2 25 25 1 1 +"26179" 4 213 4 0 1 1 1 1 100 131 125 131 3066 3 7 0 4661 1 5 6 0 1 +"26180" 4 213 4 0 1 1 1 1 100 131 131 138 1996 8 2 1 3435 1 5 7 1 1 +"26181" 4 213 4 0 1 1 1 1 100 131 138 131 1765 2 1 0 1750 1 5 7 1 0 +"26182" 4 213 5 1 1 1 1 1 100 104 100 105 2319 9 7 1 1367 1 5 5 1 1 +"26183" 4 213 5 1 1 1 1 1 100 104 105 110 1603 4 8 0 1508 1 5 5 0 1 +"26184" 4 213 5 1 1 1 1 1 100 104 110 116 1969 8 2 1 986 1 5 6 1 1 +"26185" 4 213 5 1 1 1 1 1 100 104 116 110 1136 2 1 0 1303 1 5 6 1 0 +"26186" 4 213 5 1 1 1 1 1 100 104 110 116 1231 7 6 1 874 1 5 6 1 1 +"26187" 4 213 5 1 1 1 1 1 100 104 116 110 1549 5 3 0 945 1 5 6 1 0 +"26188" 4 213 5 1 1 1 1 1 100 104 110 104 1417 3 6 1 1156 1 5 6 0 0 +"26189" 4 213 5 1 1 1 1 1 100 104 104 99 1722 6 9 1 1382 1 5 5 0 0 +"26190" 4 213 5 1 1 1 1 1 100 104 99 104 1777 1 4 0 770 1 5 5 0 1 +"26191" 4 213 5 1 1 1 2 1 100 105 100 105 1996 2 9 0 1027 1 5 5 0 1 +"26192" 4 213 5 1 1 1 2 1 100 105 105 100 1087 4 10 1 729 1 5 5 0 0 +"26193" 4 213 5 1 1 1 2 1 100 105 100 95 1179 3 1 0 609 1 5 5 1 0 +"26194" 4 213 5 1 1 1 2 1 100 105 95 100 809 8 6 1 776 1 5 5 1 1 +"26195" 4 213 5 1 1 1 2 1 100 105 100 95 2848 5 7 1 1054 1 5 5 0 0 +"26196" 4 213 5 1 1 1 2 1 100 105 95 90 1753 6 8 1 1052 1 5 5 0 0 +"26197" 4 213 5 1 1 1 2 1 100 105 90 95 767 7 5 1 752 1 5 5 1 1 +"26198" 4 213 5 1 1 1 2 1 100 105 95 100 1602 1 10 0 1620 1 5 5 0 1 +"26199" 4 213 5 1 1 1 2 1 100 105 100 105 1492 9 3 1 786 1 5 5 1 1 +"26200" 4 213 5 1 1 1 3 1 100 116 100 105 2102 3 5 0 1495 1 5 5 0 1 +"26201" 4 213 5 1 1 1 3 1 100 116 105 100 1077 8 9 1 776 1 5 5 0 0 +"26202" 4 213 5 1 1 1 3 1 100 116 100 105 720 2 4 0 731 1 5 5 0 1 +"26203" 4 213 5 1 1 1 3 1 100 116 105 110 1461 6 3 1 973 1 5 5 1 1 +"26204" 4 213 5 1 1 1 3 1 100 116 110 116 2964 7 6 1 1124 1 5 6 1 1 +"26205" 4 213 5 1 1 1 3 1 100 116 116 110 2099 4 2 0 1373 1 5 6 1 0 +"26206" 4 213 5 1 1 1 3 1 100 116 110 116 1961 1 8 0 601 1 5 6 0 1 +"26207" 4 213 5 1 1 1 3 1 100 116 116 110 1298 5 7 1 706 1 5 6 0 0 +"26208" 4 213 5 1 1 1 3 1 100 116 110 116 2246 9 5 1 632 1 5 6 1 1 +"26209" 4 213 5 1 1 1 4 1 100 113 100 105 1658 8 7 1 1072 1 5 5 1 1 +"26210" 4 213 5 1 1 1 4 1 100 113 105 110 1043 3 10 0 935 1 5 5 0 1 +"26211" 4 213 5 1 1 1 4 1 100 113 110 104 1920 7 9 1 538 1 5 6 0 0 +"26212" 4 213 5 1 1 1 4 1 100 113 104 109 734 9 1 1 2280 1 5 5 1 1 +"26213" 4 213 5 1 1 1 4 1 100 113 109 114 1193 2 3 0 1334 1 5 5 0 1 +"26214" 4 213 5 1 1 1 4 1 100 113 114 120 1564 1 8 0 975 1 5 6 0 1 +"26215" 4 213 5 1 1 1 4 1 100 113 120 114 1470 5 4 0 1349 1 5 6 1 0 +"26216" 4 213 5 1 1 1 4 1 100 113 114 108 1514 4 2 0 655 1 5 6 1 0 +"26217" 4 213 5 1 1 1 4 1 100 113 108 113 1045 6 3 1 793 1 5 5 1 1 +"26218" 4 217 2 0 1 0 1 1 100 4 100 50 7501 2 7 1 1054 3 50 50 0 0 +"26219" 4 217 2 0 1 0 1 1 100 4 50 53 2273 3 7 0 0 5 5 3 0 1 +"26220" 4 217 2 0 1 0 1 1 100 4 53 80 3499 2 8 0 1202 3 50 27 0 1 +"26221" 4 217 2 0 1 0 1 1 100 4 80 4 2655 8 9 1 3414 1 95 76 0 0 +"26222" 4 217 3 1 1 0 1 0 100 1 100 195 2128 1 3 0 1336 1 95 95 0 1 +"26223" 4 217 3 1 1 0 1 0 100 1 195 293 2878 6 5 1 971 3 50 98 1 1 +"26224" 4 217 3 1 1 0 1 0 100 1 293 15 1242 2 8 1 3108 1 95 278 0 0 +"26225" 4 217 3 1 1 0 1 0 100 1 15 1 2549 8 9 1 4633 1 95 14 0 0 +"26226" 4 217 3 1 1 0 2 0 100 0 100 195 3700 8 1 1 3514 1 95 95 1 1 +"26227" 4 217 3 1 1 0 2 0 100 0 195 10 3368 6 2 0 2749 1 95 185 1 0 +"26228" 4 217 3 1 1 0 2 0 100 0 10 0 655 7 9 1 2381 1 95 10 0 0 +"26229" 4 217 3 1 1 0 3 0 100 0 100 195 2185 7 5 1 2750 1 95 95 1 1 +"26230" 4 217 3 1 1 0 3 0 100 0 195 10 3529 2 1 0 3327 1 95 185 1 0 +"26231" 4 217 3 1 1 0 3 0 100 0 10 9 3301 8 6 0 3270 5 5 1 1 0 +"26232" 4 217 3 1 1 0 3 0 100 0 9 18 2080 4 7 0 4018 1 95 9 0 1 +"26233" 4 217 3 1 1 0 3 0 100 0 18 35 2704 3 10 0 1381 1 95 17 0 1 +"26234" 4 217 3 1 1 0 3 0 100 0 35 2 4440 6 8 1 3195 1 95 33 0 0 +"26235" 4 217 3 1 1 0 3 0 100 0 2 0 2496 9 2 0 567 1 95 2 1 0 +"26236" 4 217 3 1 1 0 4 0 100 0 100 195 2524 2 3 0 2361 1 95 95 0 1 +"26237" 4 217 3 1 1 0 4 0 100 0 195 341 3550 7 4 1 4771 2 75 146 1 1 +"26238" 4 217 3 1 1 0 4 0 100 0 341 17 6197 3 1 0 636 1 95 324 1 0 +"26239" 4 217 3 1 1 0 4 0 100 0 17 33 2042 1 9 0 702 1 95 16 0 1 +"26240" 4 217 3 1 1 0 4 0 100 0 33 64 2419 8 7 1 557 1 95 31 1 1 +"26241" 4 217 3 1 1 0 4 0 100 0 64 3 2080 9 2 0 690 1 95 61 1 0 +"26242" 4 217 3 1 1 0 4 0 100 0 3 0 1598 5 6 1 1320 1 95 3 0 0 +"26243" 4 217 4 0 1 1 1 1 100 100 100 95 1048 8 3 0 756 1 5 5 1 0 +"26244" 4 217 4 0 1 1 1 1 100 100 95 100 1317 3 7 0 453 1 5 5 0 1 +"26245" 4 217 4 0 1 1 1 1 100 100 100 105 2388 8 2 1 2560 1 5 5 1 1 +"26246" 4 217 4 0 1 1 1 1 100 100 105 100 3315 2 1 0 3844 1 5 5 1 0 +"26247" 4 217 5 1 1 1 1 1 100 152 100 105 5781 9 7 1 1351 1 5 5 1 1 +"26248" 4 217 5 1 1 1 1 1 100 152 105 110 2116 4 8 0 2421 1 5 5 0 1 +"26249" 4 217 5 1 1 1 1 1 100 152 110 138 1343 8 2 1 3205 2 25 28 1 1 +"26250" 4 217 5 1 1 1 1 1 100 152 138 131 2842 2 1 0 744 1 5 7 1 0 +"26251" 4 217 5 1 1 1 1 1 100 152 131 138 5338 7 6 1 3921 1 5 7 1 1 +"26252" 4 217 5 1 1 1 1 1 100 152 138 131 5900 5 3 0 617 1 5 7 1 0 +"26253" 4 217 5 1 1 1 1 1 100 152 131 138 1129 3 6 0 985 1 5 7 0 1 +"26254" 4 217 5 1 1 1 1 1 100 152 138 145 1288 6 9 0 1760 1 5 7 0 1 +"26255" 4 217 5 1 1 1 1 1 100 152 145 152 2641 1 4 0 618 1 5 7 0 1 +"26256" 4 217 5 1 1 1 2 1 100 93 100 105 1738 2 9 0 596 1 5 5 0 1 +"26257" 4 217 5 1 1 1 2 1 100 93 105 110 2589 4 10 0 592 1 5 5 0 1 +"26258" 4 217 5 1 1 1 2 1 100 93 110 104 1369 3 1 0 701 1 5 6 1 0 +"26259" 4 217 5 1 1 1 2 1 100 93 104 109 1273 8 6 1 482 1 5 5 1 1 +"26260" 4 217 5 1 1 1 2 1 100 93 109 104 3096 5 7 1 499 1 5 5 0 0 +"26261" 4 217 5 1 1 1 2 1 100 93 104 99 1920 6 8 1 587 1 5 5 0 0 +"26262" 4 217 5 1 1 1 2 1 100 93 99 94 1508 7 5 0 593 1 5 5 1 0 +"26263" 4 217 5 1 1 1 2 1 100 93 94 89 2138 1 10 1 554 1 5 5 0 0 +"26264" 4 217 5 1 1 1 2 1 100 93 89 93 2413 9 3 1 503 1 5 4 1 1 +"26265" 4 217 5 1 1 1 3 1 100 141 100 105 1930 3 5 0 436 1 5 5 0 1 +"26266" 4 217 5 1 1 1 3 1 100 141 105 100 3548 8 9 1 681 1 5 5 0 0 +"26267" 4 217 5 1 1 1 3 1 100 141 100 105 886 2 4 0 705 1 5 5 0 1 +"26268" 4 217 5 1 1 1 3 1 100 141 105 110 2188 6 3 1 2018 1 5 5 1 1 +"26269" 4 217 5 1 1 1 3 1 100 141 110 116 1175 7 6 1 1173 1 5 6 1 1 +"26270" 4 217 5 1 1 1 3 1 100 141 116 122 1018 4 2 1 967 1 5 6 1 1 +"26271" 4 217 5 1 1 1 3 1 100 141 122 128 1174 1 8 0 1548 1 5 6 0 1 +"26272" 4 217 5 1 1 1 3 1 100 141 128 134 6847 5 7 0 663 1 5 6 0 1 +"26273" 4 217 5 1 1 1 3 1 100 141 134 141 2037 9 5 1 626 1 5 7 1 1 +"26274" 4 217 5 1 1 1 4 1 100 126 100 105 3348 8 7 1 651 1 5 5 1 1 +"26275" 4 217 5 1 1 1 4 1 100 126 105 110 1649 3 10 0 1120 1 5 5 0 1 +"26276" 4 217 5 1 1 1 4 1 100 126 110 104 4809 7 9 1 290 1 5 6 0 0 +"26277" 4 217 5 1 1 1 4 1 100 126 104 99 1425 9 1 0 555 1 5 5 1 0 +"26278" 4 217 5 1 1 1 4 1 100 126 99 104 7317 2 3 0 3166 1 5 5 0 1 +"26279" 4 217 5 1 1 1 4 1 100 126 104 109 1341 1 8 0 842 1 5 5 0 1 +"26280" 4 217 5 1 1 1 4 1 100 126 109 114 2959 5 4 1 976 1 5 5 1 1 +"26281" 4 217 5 1 1 1 4 1 100 126 114 120 1431 4 2 1 840 1 5 6 1 1 +"26282" 4 217 5 1 1 1 4 1 100 126 120 126 4037 6 3 1 1955 1 5 6 1 1 +"26283" 4 219 2 0 1 0 1 1 100 96 100 175 13789 2 7 0 866 2 75 75 0 1 +"26284" 4 219 2 0 1 0 1 1 100 96 175 219 7822 3 7 0 367 4 25 44 0 1 +"26285" 4 219 2 0 1 0 1 1 100 96 219 383 4755 2 8 0 70 2 75 164 0 1 +"26286" 4 219 2 0 1 0 1 1 100 96 383 96 2904 8 9 1 867 2 75 287 0 0 +"26287" 4 219 3 1 1 0 1 1 100 427 100 175 3604 1 3 0 773 2 75 75 0 1 +"26288" 4 219 3 1 1 0 1 1 100 427 175 263 4054 6 5 1 771 3 50 88 1 1 +"26289" 4 219 3 1 1 0 1 1 100 427 263 197 5092 2 8 1 1000 4 25 66 0 0 +"26290" 4 219 3 1 1 0 1 1 100 427 197 246 4669 8 9 0 1493 4 25 49 0 1 +"26291" 4 219 3 1 1 0 1 1 100 427 246 308 2803 3 4 0 547 4 25 62 0 1 +"26292" 4 219 3 1 1 0 1 1 100 427 308 154 3495 5 7 1 1094 3 50 154 0 0 +"26293" 4 219 3 1 1 0 1 1 100 427 154 231 4511 7 4 1 534 3 50 77 1 1 +"26294" 4 219 3 1 1 0 1 1 100 427 231 219 5034 4 1 0 593 5 5 12 1 0 +"26295" 4 219 3 1 1 0 1 1 100 427 219 427 2554 9 6 1 1086 1 95 208 1 1 +"26296" 4 219 3 1 1 0 2 1 100 216 100 150 2280 8 1 1 2593 3 50 50 1 1 +"26297" 4 219 3 1 1 0 2 1 100 216 150 112 4547 6 2 0 641 4 25 38 1 0 +"26298" 4 219 3 1 1 0 2 1 100 216 112 84 4207 7 9 1 1261 4 25 28 0 0 +"26299" 4 219 3 1 1 0 2 1 100 216 84 126 2662 2 10 0 820 3 50 42 0 1 +"26300" 4 219 3 1 1 0 2 1 100 216 126 94 5931 5 3 0 1207 4 25 32 1 0 +"26301" 4 219 3 1 1 0 2 1 100 216 94 47 3595 4 2 0 502 3 50 47 1 0 +"26302" 4 219 3 1 1 0 2 1 100 216 47 82 2179 3 5 0 544 2 75 35 0 1 +"26303" 4 219 3 1 1 0 2 1 100 216 82 144 1876 9 4 1 560 2 75 62 1 1 +"26304" 4 219 3 1 1 0 2 1 100 216 144 216 1747 1 7 0 997 3 50 72 0 1 +"26305" 4 219 3 1 1 0 3 1 100 64 100 150 2750 7 5 1 2280 3 50 50 1 1 +"26306" 4 219 3 1 1 0 3 1 100 64 150 75 2561 2 1 0 2013 3 50 75 1 0 +"26307" 4 219 3 1 1 0 3 1 100 64 75 146 2779 8 6 1 1148 1 95 71 1 1 +"26308" 4 219 3 1 1 0 3 1 100 64 146 183 3963 4 7 0 564 4 25 37 0 1 +"26309" 4 219 3 1 1 0 3 1 100 64 183 137 4119 3 10 1 1785 4 25 46 0 0 +"26310" 4 219 3 1 1 0 3 1 100 64 137 34 2385 6 8 1 1965 2 75 103 0 0 +"26311" 4 219 3 1 1 0 3 1 100 64 34 66 2405 9 2 1 496 1 95 32 1 1 +"26312" 4 219 3 1 1 0 3 1 100 64 66 33 4710 5 3 0 628 3 50 33 1 0 +"26313" 4 219 3 1 1 0 3 1 100 64 33 64 2427 1 10 0 581 1 95 31 0 1 +"26314" 4 219 3 1 1 0 4 1 100 471 100 150 1886 2 3 0 1202 3 50 50 0 1 +"26315" 4 219 3 1 1 0 4 1 100 471 150 188 1534 7 4 1 620 4 25 38 1 1 +"26316" 4 219 3 1 1 0 4 1 100 471 188 141 4108 3 1 0 1718 4 25 47 1 0 +"26317" 4 219 3 1 1 0 4 1 100 471 141 275 2329 1 9 0 584 1 95 134 0 1 +"26318" 4 219 3 1 1 0 4 1 100 471 275 536 2201 8 7 1 878 1 95 261 1 1 +"26319" 4 219 3 1 1 0 4 1 100 471 536 670 1801 9 2 1 530 4 25 134 1 1 +"26320" 4 219 3 1 1 0 4 1 100 471 670 838 4630 5 6 0 1807 4 25 168 0 1 +"26321" 4 219 3 1 1 0 4 1 100 471 838 628 2806 6 8 1 975 4 25 210 0 0 +"26322" 4 219 3 1 1 0 4 1 100 471 628 471 3438 4 7 1 1712 4 25 157 0 0 +"26323" 4 219 4 0 1 1 1 1 100 82 100 105 4588 8 3 1 3415 1 5 5 1 1 +"26324" 4 219 4 0 1 1 1 1 100 82 105 131 4778 3 7 0 4052 2 25 26 0 1 +"26325" 4 219 4 0 1 1 1 1 100 82 131 164 1744 8 2 1 3211 2 25 33 1 1 +"26326" 4 219 4 0 1 1 1 1 100 82 164 82 1735 2 1 0 1035 3 50 82 1 0 +"26327" 4 219 5 1 1 1 1 1 100 506 100 175 2779 9 7 1 564 4 75 75 1 1 +"26328" 4 219 5 1 1 1 1 1 100 506 175 219 3199 4 8 0 1429 2 25 44 0 1 +"26329" 4 219 5 1 1 1 1 1 100 506 219 274 1974 8 2 1 1141 2 25 55 1 1 +"26330" 4 219 5 1 1 1 1 1 100 506 274 205 1740 2 1 0 559 2 25 69 1 0 +"26331" 4 219 5 1 1 1 1 1 100 506 205 256 4033 7 6 1 423 2 25 51 1 1 +"26332" 4 219 5 1 1 1 1 1 100 506 256 243 2360 5 3 0 765 1 5 13 1 0 +"26333" 4 219 5 1 1 1 1 1 100 506 243 304 1430 3 6 0 1096 2 25 61 0 1 +"26334" 4 219 5 1 1 1 1 1 100 506 304 289 3777 6 9 1 667 1 5 15 0 0 +"26335" 4 219 5 1 1 1 1 1 100 506 289 506 1112 1 4 0 922 4 75 217 0 1 +"26336" 4 219 5 1 1 1 2 1 100 563 100 175 2832 2 9 0 1183 4 75 75 0 1 +"26337" 4 219 5 1 1 1 2 1 100 563 175 184 3548 4 10 0 545 1 5 9 0 1 +"26338" 4 219 5 1 1 1 2 1 100 563 184 193 2868 3 1 1 521 1 5 9 1 1 +"26339" 4 219 5 1 1 1 2 1 100 563 193 241 1485 8 6 1 716 2 25 48 1 1 +"26340" 4 219 5 1 1 1 2 1 100 563 241 253 2799 5 7 0 656 1 5 12 0 1 +"26341" 4 219 5 1 1 1 2 1 100 563 253 240 2429 6 8 1 863 1 5 13 0 0 +"26342" 4 219 5 1 1 1 2 1 100 563 240 300 2579 7 5 1 1518 2 25 60 1 1 +"26343" 4 219 5 1 1 1 2 1 100 563 300 450 1611 1 10 0 540 3 50 150 0 1 +"26344" 4 219 5 1 1 1 2 1 100 563 450 563 1415 9 3 1 2458 2 25 113 1 1 +"26345" 4 219 5 1 1 1 3 1 100 310 100 95 3044 3 5 1 817 1 5 5 0 0 +"26346" 4 219 5 1 1 1 3 1 100 310 95 71 1600 8 9 1 674 2 25 24 0 0 +"26347" 4 219 5 1 1 1 3 1 100 310 71 107 1351 2 4 0 893 3 50 36 0 1 +"26348" 4 219 5 1 1 1 3 1 100 310 107 134 1719 6 3 1 531 2 25 27 1 1 +"26349" 4 219 5 1 1 1 3 1 100 310 134 141 1856 7 6 1 1652 1 5 7 1 1 +"26350" 4 219 5 1 1 1 3 1 100 310 141 106 1827 4 2 0 794 2 25 35 1 0 +"26351" 4 219 5 1 1 1 3 1 100 310 106 186 1521 1 8 0 686 4 75 80 0 1 +"26352" 4 219 5 1 1 1 3 1 100 310 186 177 7426 5 7 1 2447 1 5 9 0 0 +"26353" 4 219 5 1 1 1 3 1 100 310 177 310 1392 9 5 1 643 4 75 133 1 1 +"26354" 4 219 5 1 1 1 4 1 100 525 100 150 1436 8 7 1 1245 3 50 50 1 1 +"26355" 4 219 5 1 1 1 4 1 100 525 150 225 1542 3 10 0 563 3 50 75 0 1 +"26356" 4 219 5 1 1 1 4 1 100 525 225 236 4485 7 9 0 894 1 5 11 0 1 +"26357" 4 219 5 1 1 1 4 1 100 525 236 354 1240 9 1 1 1291 3 50 118 1 1 +"26358" 4 219 5 1 1 1 4 1 100 525 354 443 1243 2 3 0 950 2 25 89 0 1 +"26359" 4 219 5 1 1 1 4 1 100 525 443 554 1110 1 8 0 647 2 25 111 0 1 +"26360" 4 219 5 1 1 1 4 1 100 525 554 582 3693 5 4 1 747 1 5 28 1 1 +"26361" 4 219 5 1 1 1 4 1 100 525 582 553 1765 4 2 0 614 1 5 29 1 0 +"26362" 4 219 5 1 1 1 4 1 100 525 553 525 1614 6 3 0 615 1 5 28 1 0 +"26363" 4 225 2 0 1 0 1 1 100 7 100 150 5971 2 7 0 1244 3 50 50 0 1 +"26364" 4 225 2 0 1 0 1 1 100 7 150 75 8774 3 7 1 690 3 50 75 0 0 +"26365" 4 225 2 0 1 0 1 1 100 7 75 146 1894 2 8 0 2090 1 95 71 0 1 +"26366" 4 225 2 0 1 0 1 1 100 7 146 7 2259 8 9 1 1725 1 95 139 0 0 +"26367" 4 225 3 1 1 0 1 0 100 0 100 175 10025 1 3 0 1778 2 75 75 0 1 +"26368" 4 225 3 1 1 0 1 0 100 0 175 9 1858 6 5 0 1735 1 95 166 1 0 +"26369" 4 225 3 1 1 0 1 0 100 0 9 0 2337 2 8 1 4288 1 95 9 0 0 +"26370" 4 225 3 1 1 0 2 0 100 0 100 5 6570 8 1 0 2007 1 95 95 1 0 +"26371" 4 225 3 1 1 0 2 0 100 0 5 8 3629 6 2 1 438 3 50 3 1 1 +"26372" 4 225 3 1 1 0 2 0 100 0 8 2 5166 7 9 1 526 2 75 6 0 0 +"26373" 4 225 3 1 1 0 2 0 100 0 2 4 2930 2 10 0 2372 1 95 2 0 1 +"26374" 4 225 3 1 1 0 2 0 100 0 4 0 2184 5 3 0 2350 1 95 4 1 0 +"26375" 4 225 3 1 1 0 3 0 100 1 100 195 4621 7 5 1 1935 1 95 95 1 1 +"26376" 4 225 3 1 1 0 3 0 100 1 195 10 2368 2 1 0 933 1 95 185 1 0 +"26377" 4 225 3 1 1 0 3 0 100 1 10 20 2686 8 6 1 1948 1 95 10 1 1 +"26378" 4 225 3 1 1 0 3 0 100 1 20 1 3652 4 7 1 1108 1 95 19 0 0 +"26379" 4 225 3 1 1 0 4 1 100 1 100 195 2401 2 3 0 2164 1 95 95 0 1 +"26380" 4 225 3 1 1 0 4 1 100 1 195 380 2386 7 4 1 886 1 95 185 1 1 +"26381" 4 225 3 1 1 0 4 1 100 1 380 19 2226 3 1 0 709 1 95 361 1 0 +"26382" 4 225 3 1 1 0 4 1 100 1 19 37 5136 1 9 0 593 1 95 18 0 1 +"26383" 4 225 3 1 1 0 4 1 100 1 37 72 3114 8 7 1 780 1 95 35 1 1 +"26384" 4 225 3 1 1 0 4 1 100 1 72 140 3980 9 2 1 692 1 95 68 1 1 +"26385" 4 225 3 1 1 0 4 1 100 1 140 273 3720 5 6 0 652 1 95 133 0 1 +"26386" 4 225 3 1 1 0 4 1 100 1 273 14 5074 6 8 1 558 1 95 259 0 0 +"26387" 4 225 3 1 1 0 4 1 100 1 14 1 3601 4 7 1 731 1 95 13 0 0 +"26388" 4 225 4 0 1 1 1 1 100 118 100 150 19845 8 3 1 1019 3 50 50 1 1 +"26389" 4 225 4 0 1 1 1 1 100 118 150 158 13464 3 7 0 1260 1 5 8 0 1 +"26390" 4 225 4 0 1 1 1 1 100 118 158 237 1989 8 2 1 1364 3 50 79 1 1 +"26391" 4 225 4 0 1 1 1 1 100 118 237 118 2360 2 1 0 813 3 50 119 1 0 +"26392" 4 225 5 1 1 1 1 1 100 907 100 195 2726 9 7 1 1171 5 95 95 1 1 +"26393" 4 225 5 1 1 1 1 1 100 907 195 293 1945 4 8 0 882 3 50 98 0 1 +"26394" 4 225 5 1 1 1 1 1 100 907 293 440 5461 8 2 1 631 3 50 147 1 1 +"26395" 4 225 5 1 1 1 1 1 100 907 440 220 2551 2 1 0 467 3 50 220 1 0 +"26396" 4 225 5 1 1 1 1 1 100 907 220 330 3018 7 6 1 4033 3 50 110 1 1 +"26397" 4 225 5 1 1 1 1 1 100 907 330 413 3262 5 3 1 872 2 25 83 1 1 +"26398" 4 225 5 1 1 1 1 1 100 907 413 620 2332 3 6 0 923 3 50 207 0 1 +"26399" 4 225 5 1 1 1 1 1 100 907 620 465 4012 6 9 1 1155 2 25 155 0 0 +"26400" 4 225 5 1 1 1 1 1 100 907 465 907 2113 1 4 0 1235 5 95 442 0 1 +"26401" 4 225 5 1 1 1 2 1 100 240 100 150 2718 2 9 0 483 3 50 50 0 1 +"26402" 4 225 5 1 1 1 2 1 100 240 150 225 3425 4 10 0 1060 3 50 75 0 1 +"26403" 4 225 5 1 1 1 2 1 100 240 225 112 3255 3 1 0 337 3 50 113 1 0 +"26404" 4 225 5 1 1 1 2 1 100 240 112 168 1606 8 6 1 983 3 50 56 1 1 +"26405" 4 225 5 1 1 1 2 1 100 240 168 84 1622 5 7 1 614 3 50 84 0 0 +"26406" 4 225 5 1 1 1 2 1 100 240 84 42 3078 6 8 1 394 3 50 42 0 0 +"26407" 4 225 5 1 1 1 2 1 100 240 42 63 2141 7 5 1 2876 3 50 21 1 1 +"26408" 4 225 5 1 1 1 2 1 100 240 63 123 2024 1 10 0 54 5 95 60 0 1 +"26409" 4 225 5 1 1 1 2 1 100 240 123 240 1552 9 3 1 644 5 95 117 1 1 +"26410" 4 225 5 1 1 1 3 1 100 115 100 150 2022 3 5 0 3324 3 50 50 0 1 +"26411" 4 225 5 1 1 1 3 1 100 115 150 37 20755 8 9 1 2749 4 75 113 0 0 +"26412" 4 225 5 1 1 1 3 1 100 115 37 56 5712 2 4 0 3798 3 50 19 0 1 +"26413" 4 225 5 1 1 1 3 1 100 115 56 84 1510 6 3 1 1604 3 50 28 1 1 +"26414" 4 225 5 1 1 1 3 1 100 115 84 126 1189 7 6 1 722 3 50 42 1 1 +"26415" 4 225 5 1 1 1 3 1 100 115 126 31 1277 4 2 0 470 4 75 95 1 0 +"26416" 4 225 5 1 1 1 3 1 100 115 31 47 3261 1 8 0 1392 3 50 16 0 1 +"26417" 4 225 5 1 1 1 3 1 100 115 47 59 3699 5 7 0 1681 2 25 12 0 1 +"26418" 4 225 5 1 1 1 3 1 100 115 59 115 1630 9 5 1 606 5 95 56 1 1 +"26419" 4 225 5 1 1 1 4 1 100 455 100 195 1905 8 7 1 612 5 95 95 1 1 +"26420" 4 225 5 1 1 1 4 1 100 455 195 380 1988 3 10 0 772 5 95 185 0 1 +"26421" 4 225 5 1 1 1 4 1 100 455 380 95 11404 7 9 1 2372 4 75 285 0 0 +"26422" 4 225 5 1 1 1 4 1 100 455 95 185 22680 9 1 1 2087 5 95 90 1 1 +"26423" 4 225 5 1 1 1 4 1 100 455 185 324 9816 2 3 0 1836 4 75 139 0 1 +"26424" 4 225 5 1 1 1 4 1 100 455 324 486 13753 1 8 0 1512 3 50 162 0 1 +"26425" 4 225 5 1 1 1 4 1 100 455 486 729 1997 5 4 1 860 3 50 243 1 1 +"26426" 4 225 5 1 1 1 4 1 100 455 729 364 1760 4 2 0 882 3 50 365 1 0 +"26427" 4 225 5 1 1 1 4 1 100 455 364 455 2294 6 3 1 1518 2 25 91 1 1 +"26428" 4 230 2 0 1 0 1 1 100 29 100 150 5547 2 7 0 705 3 50 50 0 1 +"26429" 4 230 2 0 1 0 1 1 100 29 150 293 10133 3 7 0 3040 1 95 143 0 1 +"26430" 4 230 2 0 1 0 1 1 100 29 293 571 3428 2 8 0 1183 1 95 278 0 1 +"26431" 4 230 2 0 1 0 1 1 100 29 571 29 2556 8 9 1 3952 1 95 542 0 0 +"26432" 4 230 3 1 1 0 1 1 100 133 100 195 3628 1 3 0 1045 1 95 95 0 1 +"26433" 4 230 3 1 1 0 1 1 100 133 195 380 5112 6 5 1 1201 1 95 185 1 1 +"26434" 4 230 3 1 1 0 1 1 100 133 380 741 2125 2 8 0 1405 1 95 361 0 1 +"26435" 4 230 3 1 1 0 1 1 100 133 741 37 3416 8 9 1 1058 1 95 704 0 0 +"26436" 4 230 3 1 1 0 1 1 100 133 37 72 3053 3 4 0 673 1 95 35 0 1 +"26437" 4 230 3 1 1 0 1 1 100 133 72 140 3958 5 7 0 767 1 95 68 0 1 +"26438" 4 230 3 1 1 0 1 1 100 133 140 273 6616 7 4 1 730 1 95 133 1 1 +"26439" 4 230 3 1 1 0 1 1 100 133 273 68 4949 4 1 0 1847 2 75 205 1 0 +"26440" 4 230 3 1 1 0 1 1 100 133 68 133 3076 9 6 1 583 1 95 65 1 1 +"26441" 4 230 3 1 1 0 2 0 100 0 100 195 2643 8 1 1 371 1 95 95 1 1 +"26442" 4 230 3 1 1 0 2 0 100 0 195 380 2542 6 2 1 1371 1 95 185 1 1 +"26443" 4 230 3 1 1 0 2 0 100 0 380 19 1968 7 9 1 592 1 95 361 0 0 +"26444" 4 230 3 1 1 0 2 0 100 0 19 37 1945 2 10 0 529 1 95 18 0 1 +"26445" 4 230 3 1 1 0 2 0 100 0 37 2 4502 5 3 0 685 1 95 35 1 0 +"26446" 4 230 3 1 1 0 2 0 100 0 2 0 3450 4 2 0 827 1 95 2 1 0 +"26447" 4 230 3 1 1 0 3 0 100 0 100 195 1723 7 5 1 462 1 95 95 1 1 +"26448" 4 230 3 1 1 0 3 0 100 0 195 10 1902 2 1 0 881 1 95 185 1 0 +"26449" 4 230 3 1 1 0 3 0 100 0 10 20 2428 8 6 1 286 1 95 10 1 1 +"26450" 4 230 3 1 1 0 3 0 100 0 20 39 2141 4 7 0 412 1 95 19 0 1 +"26451" 4 230 3 1 1 0 3 0 100 0 39 76 3014 3 10 0 740 1 95 37 0 1 +"26452" 4 230 3 1 1 0 3 0 100 0 76 4 2526 6 8 1 1227 1 95 72 0 0 +"26453" 4 230 3 1 1 0 3 0 100 0 4 8 4430 9 2 1 312 1 95 4 1 1 +"26454" 4 230 3 1 1 0 3 0 100 0 8 0 2887 5 3 0 1107 1 95 8 1 0 +"26455" 4 230 3 1 1 0 4 0 100 0 100 195 8647 2 3 0 937 1 95 95 0 1 +"26456" 4 230 3 1 1 0 4 0 100 0 195 380 1690 7 4 1 759 1 95 185 1 1 +"26457" 4 230 3 1 1 0 4 0 100 0 380 19 4288 3 1 0 442 1 95 361 1 0 +"26458" 4 230 3 1 1 0 4 0 100 0 19 37 2394 1 9 0 869 1 95 18 0 1 +"26459" 4 230 3 1 1 0 4 0 100 0 37 72 2001 8 7 1 848 1 95 35 1 1 +"26460" 4 230 3 1 1 0 4 0 100 0 72 140 1596 9 2 1 462 1 95 68 1 1 +"26461" 4 230 3 1 1 0 4 0 100 0 140 7 2477 5 6 1 541 1 95 133 0 0 +"26462" 4 230 3 1 1 0 4 0 100 0 7 0 2768 6 8 1 450 1 95 7 0 0 +"26463" 4 230 4 0 1 1 1 1 100 29 100 150 3840 8 3 1 990 3 50 50 1 1 +"26464" 4 230 4 0 1 1 1 1 100 29 150 293 3604 3 7 0 0 5 95 143 0 1 +"26465" 4 230 4 0 1 1 1 1 100 29 293 571 1830 8 2 1 1821 5 95 278 1 1 +"26466" 4 230 4 0 1 1 1 1 100 29 571 29 2036 2 1 0 2001 5 95 542 1 0 +"26467" 4 230 5 1 1 1 1 1 100 474 100 195 4721 9 7 1 1319 5 95 95 1 1 +"26468" 4 230 5 1 1 1 1 1 100 474 195 293 2558 4 8 0 1650 3 50 98 0 1 +"26469" 4 230 5 1 1 1 1 1 100 474 293 571 2576 8 2 1 1076 5 95 278 1 1 +"26470" 4 230 5 1 1 1 1 1 100 474 571 285 1686 2 1 0 2020 3 50 286 1 0 +"26471" 4 230 5 1 1 1 1 1 100 474 285 556 4497 7 6 1 881 5 95 271 1 1 +"26472" 4 230 5 1 1 1 1 1 100 474 556 278 3732 5 3 0 1969 3 50 278 1 0 +"26473" 4 230 5 1 1 1 1 1 100 474 278 487 4568 3 6 0 1365 4 75 209 0 1 +"26474" 4 230 5 1 1 1 1 1 100 474 487 243 3401 6 9 1 3693 3 50 244 0 0 +"26475" 4 230 5 1 1 1 1 1 100 474 243 474 1994 1 4 0 2521 5 95 231 0 1 +"26476" 4 230 5 1 1 1 2 0 100 1 100 175 3076 2 9 0 1095 4 75 75 0 1 +"26477" 4 230 5 1 1 1 2 0 100 1 175 263 1592 4 10 0 3530 3 50 88 0 1 +"26478" 4 230 5 1 1 1 2 0 100 1 263 513 4671 3 1 1 1335 5 95 250 1 1 +"26479" 4 230 5 1 1 1 2 0 100 1 513 26 5173 8 6 0 333 5 95 487 1 0 +"26480" 4 230 5 1 1 1 2 0 100 1 26 6 7099 5 7 1 839 4 75 20 0 0 +"26481" 4 230 5 1 1 1 2 0 100 1 6 1 1940 6 8 1 405 4 75 5 0 0 +"26482" 4 230 5 1 1 1 3 0 100 1 100 195 1666 3 5 0 1473 5 95 95 0 1 +"26483" 4 230 5 1 1 1 3 0 100 1 195 49 6032 8 9 1 1167 4 75 146 0 0 +"26484" 4 230 5 1 1 1 3 0 100 1 49 2 3565 2 4 1 874 5 95 47 0 0 +"26485" 4 230 5 1 1 1 3 0 100 1 2 1 5029 6 3 0 2700 3 50 1 1 0 +"26486" 4 230 5 1 1 1 4 0 100 1 100 5 4025 8 7 0 1152 5 95 95 1 0 +"26487" 4 230 5 1 1 1 4 0 100 1 5 1 3238 3 10 1 1178 4 75 4 0 0 +"26488" 4 256 2 0 1 0 1 1 100 29 100 150 7489 2 7 0 907 3 50 50 0 1 +"26489" 4 256 2 0 1 0 1 1 100 29 150 293 8676 3 7 0 3725 1 95 143 0 1 +"26490" 4 256 2 0 1 0 1 1 100 29 293 571 2641 2 8 0 1441 1 95 278 0 1 +"26491" 4 256 2 0 1 0 1 1 100 29 571 29 7122 8 9 1 2013 1 95 542 0 0 +"26492" 4 256 3 1 1 0 1 1 100 1845 100 195 6880 1 3 0 2278 1 95 95 0 1 +"26493" 4 256 3 1 1 0 1 1 100 1845 195 380 2383 6 5 1 1371 1 95 185 1 1 +"26494" 4 256 3 1 1 0 1 1 100 1845 380 665 2271 2 8 0 2657 2 75 285 0 1 +"26495" 4 256 3 1 1 0 1 1 100 1845 665 332 1927 8 9 1 529 3 50 333 0 0 +"26496" 4 256 3 1 1 0 1 1 100 1845 332 647 2069 3 4 0 937 1 95 315 0 1 +"26497" 4 256 3 1 1 0 1 1 100 1845 647 1262 5898 5 7 0 703 1 95 615 0 1 +"26498" 4 256 3 1 1 0 1 1 100 1845 1262 1893 1603 7 4 1 901 3 50 631 1 1 +"26499" 4 256 3 1 1 0 1 1 100 1845 1893 946 3999 4 1 0 2287 3 50 947 1 0 +"26500" 4 256 3 1 1 0 1 1 100 1845 946 1845 1431 9 6 1 257 1 95 899 1 1 +"26501" 4 256 3 1 1 0 2 1 100 31 100 195 2532 8 1 1 241 1 95 95 1 1 +"26502" 4 256 3 1 1 0 2 1 100 31 195 380 1611 6 2 1 212 1 95 185 1 1 +"26503" 4 256 3 1 1 0 2 1 100 31 380 19 1143 7 9 1 222 1 95 361 0 0 +"26504" 4 256 3 1 1 0 2 1 100 31 19 37 1185 2 10 0 206 1 95 18 0 1 +"26505" 4 256 3 1 1 0 2 1 100 31 37 72 1185 5 3 1 214 1 95 35 1 1 +"26506" 4 256 3 1 1 0 2 1 100 31 72 4 889 4 2 0 196 1 95 68 1 0 +"26507" 4 256 3 1 1 0 2 1 100 31 4 8 1112 3 5 0 769 1 95 4 0 1 +"26508" 4 256 3 1 1 0 2 1 100 31 8 16 1058 9 4 1 238 1 95 8 1 1 +"26509" 4 256 3 1 1 0 2 1 100 31 16 31 703 1 7 0 299 1 95 15 0 1 +"26510" 4 256 3 1 1 0 3 1 100 6 100 195 2546 7 5 1 318 1 95 95 1 1 +"26511" 4 256 3 1 1 0 3 1 100 6 195 10 908 2 1 0 198 1 95 185 1 0 +"26512" 4 256 3 1 1 0 3 1 100 6 10 20 1434 8 6 1 212 1 95 10 1 1 +"26513" 4 256 3 1 1 0 3 1 100 6 20 39 959 4 7 0 384 1 95 19 0 1 +"26514" 4 256 3 1 1 0 3 1 100 6 39 68 1275 3 10 0 708 2 75 29 0 1 +"26515" 4 256 3 1 1 0 3 1 100 6 68 34 1727 6 8 1 59 3 50 34 0 0 +"26516" 4 256 3 1 1 0 3 1 100 6 34 66 1221 9 2 1 241 1 95 32 1 1 +"26517" 4 256 3 1 1 0 3 1 100 6 66 3 3162 5 3 0 3290 1 95 63 1 0 +"26518" 4 256 3 1 1 0 3 1 100 6 3 6 1363 1 10 0 351 1 95 3 0 1 +"26519" 4 256 3 1 1 0 4 1 100 712 100 195 1443 2 3 0 380 1 95 95 0 1 +"26520" 4 256 3 1 1 0 4 1 100 712 195 293 1223 7 4 1 563 3 50 98 1 1 +"26521" 4 256 3 1 1 0 4 1 100 712 293 146 1005 3 1 0 339 3 50 147 1 0 +"26522" 4 256 3 1 1 0 4 1 100 712 146 285 976 1 9 0 255 1 95 139 0 1 +"26523" 4 256 3 1 1 0 4 1 100 712 285 499 1232 8 7 1 1644 2 75 214 1 1 +"26524" 4 256 3 1 1 0 4 1 100 712 499 973 1043 9 2 1 439 1 95 474 1 1 +"26525" 4 256 3 1 1 0 4 1 100 712 973 730 3055 5 6 1 1660 4 25 243 0 0 +"26526" 4 256 3 1 1 0 4 1 100 712 730 365 2194 6 8 1 631 3 50 365 0 0 +"26527" 4 256 3 1 1 0 4 1 100 712 365 712 1431 4 7 0 240 1 95 347 0 1 +"26528" 4 256 4 0 1 1 1 1 100 164 100 150 3521 8 3 1 1343 3 50 50 1 1 +"26529" 4 256 4 0 1 1 1 1 100 164 150 188 3203 3 7 0 3689 2 25 38 0 1 +"26530" 4 256 4 0 1 1 1 1 100 164 188 329 2014 8 2 1 2749 4 75 141 1 1 +"26531" 4 256 4 0 1 1 1 1 100 164 329 164 1175 2 1 0 3261 3 50 165 1 0 +"26532" 4 256 5 1 1 1 1 1 100 831 100 195 5686 9 7 1 1501 5 95 95 1 1 +"26533" 4 256 5 1 1 1 1 1 100 831 195 244 987 4 8 0 826 2 25 49 0 1 +"26534" 4 256 5 1 1 1 1 1 100 831 244 366 1023 8 2 1 1677 3 50 122 1 1 +"26535" 4 256 5 1 1 1 1 1 100 831 366 183 1354 2 1 0 2440 3 50 183 1 0 +"26536" 4 256 5 1 1 1 1 1 100 831 183 320 1113 7 6 1 42 4 75 137 1 1 +"26537" 4 256 5 1 1 1 1 1 100 831 320 400 4261 5 3 1 272 2 25 80 1 1 +"26538" 4 256 5 1 1 1 1 1 100 831 400 500 856 3 6 0 1061 2 25 100 0 1 +"26539" 4 256 5 1 1 1 1 1 100 831 500 475 1055 6 9 1 307 1 5 25 0 0 +"26540" 4 256 5 1 1 1 1 1 100 831 475 831 961 1 4 0 1218 4 75 356 0 1 +"26541" 4 256 5 1 1 1 2 1 100 1001 100 175 1371 2 9 0 799 4 75 75 0 1 +"26542" 4 256 5 1 1 1 2 1 100 1001 175 184 1079 4 10 0 265 1 5 9 0 1 +"26543" 4 256 5 1 1 1 2 1 100 1001 184 138 514 3 1 0 2759 2 25 46 1 0 +"26544" 4 256 5 1 1 1 2 1 100 1001 138 242 1137 8 6 1 1425 4 75 104 1 1 +"26545" 4 256 5 1 1 1 2 1 100 1001 242 230 1754 5 7 1 221 1 5 12 0 0 +"26546" 4 256 5 1 1 1 2 1 100 1001 230 218 983 6 8 1 294 1 5 12 0 0 +"26547" 4 256 5 1 1 1 2 1 100 1001 218 327 1029 7 5 1 554 3 50 109 1 1 +"26548" 4 256 5 1 1 1 2 1 100 1001 327 572 925 1 10 0 1047 4 75 245 0 1 +"26549" 4 256 5 1 1 1 2 1 100 1001 572 1001 1522 9 3 1 1479 4 75 429 1 1 +"26550" 4 256 5 1 1 1 3 1 100 217 100 125 1197 3 5 0 1162 2 25 25 0 1 +"26551" 4 256 5 1 1 1 3 1 100 217 125 31 1128 8 9 1 993 4 75 94 0 0 +"26552" 4 256 5 1 1 1 3 1 100 217 31 54 1086 2 4 0 376 4 75 23 0 1 +"26553" 4 256 5 1 1 1 3 1 100 217 54 57 1220 6 3 1 271 1 5 3 1 1 +"26554" 4 256 5 1 1 1 3 1 100 217 57 71 951 7 6 1 1151 2 25 14 1 1 +"26555" 4 256 5 1 1 1 3 1 100 217 71 67 1246 4 2 0 224 1 5 4 1 0 +"26556" 4 256 5 1 1 1 3 1 100 217 67 131 875 1 8 0 2992 5 95 64 0 1 +"26557" 4 256 5 1 1 1 3 1 100 217 131 124 1342 5 7 1 252 1 5 7 0 0 +"26558" 4 256 5 1 1 1 3 1 100 217 124 217 917 9 5 1 1335 4 75 93 1 1 +"26559" 4 256 5 1 1 1 4 1 100 550 100 150 1297 8 7 1 1025 3 50 50 1 1 +"26560" 4 256 5 1 1 1 4 1 100 550 150 225 1165 3 10 0 596 3 50 75 0 1 +"26561" 4 256 5 1 1 1 4 1 100 550 225 112 949 7 9 1 245 3 50 113 0 0 +"26562" 4 256 5 1 1 1 4 1 100 550 112 196 1007 9 1 1 2557 4 75 84 1 1 +"26563" 4 256 5 1 1 1 4 1 100 550 196 294 1360 2 3 0 758 3 50 98 0 1 +"26564" 4 256 5 1 1 1 4 1 100 550 294 441 886 1 8 0 2002 3 50 147 0 1 +"26565" 4 256 5 1 1 1 4 1 100 550 441 463 1800 5 4 1 536 1 5 22 1 1 +"26566" 4 256 5 1 1 1 4 1 100 550 463 440 859 4 2 0 272 1 5 23 1 0 +"26567" 4 256 5 1 1 1 4 1 100 550 440 550 1000 6 3 1 246 2 25 110 1 1 +"26568" 4 257 2 0 1 0 1 1 100 22 100 150 5364 2 7 0 819 3 50 50 0 1 +"26569" 4 257 2 0 1 0 1 1 100 22 150 225 10417 3 7 0 431 3 50 75 0 1 +"26570" 4 257 2 0 1 0 1 1 100 22 225 439 1568 2 8 0 1547 1 95 214 0 1 +"26571" 4 257 2 0 1 0 1 1 100 22 439 22 2007 8 9 1 1516 1 95 417 0 0 +"26572" 4 257 3 1 1 0 1 0 100 1 100 5 6362 1 3 1 1313 1 95 95 0 0 +"26573" 4 257 3 1 1 0 1 0 100 1 5 10 2455 6 5 1 776 1 95 5 1 1 +"26574" 4 257 3 1 1 0 1 0 100 1 10 20 1880 2 8 0 1125 1 95 10 0 1 +"26575" 4 257 3 1 1 0 1 0 100 1 20 1 1516 8 9 1 682 1 95 19 0 0 +"26576" 4 257 3 1 1 0 2 0 100 0 100 195 3147 8 1 1 699 1 95 95 1 1 +"26577" 4 257 3 1 1 0 2 0 100 0 195 49 2256 6 2 0 794 2 75 146 1 0 +"26578" 4 257 3 1 1 0 2 0 100 0 49 2 1225 7 9 1 837 1 95 47 0 0 +"26579" 4 257 3 1 1 0 2 0 100 0 2 4 1266 2 10 0 1036 1 95 2 0 1 +"26580" 4 257 3 1 1 0 2 0 100 0 4 0 1824 5 3 0 991 1 95 4 1 0 +"26581" 4 257 3 1 1 0 3 1 100 28 100 195 2079 7 5 1 1249 1 95 95 1 1 +"26582" 4 257 3 1 1 0 3 1 100 28 195 10 1059 2 1 0 758 1 95 185 1 0 +"26583" 4 257 3 1 1 0 3 1 100 28 10 20 1407 8 6 1 810 1 95 10 1 1 +"26584" 4 257 3 1 1 0 3 1 100 28 20 39 1376 4 7 0 724 1 95 19 0 1 +"26585" 4 257 3 1 1 0 3 1 100 28 39 76 1855 3 10 0 732 1 95 37 0 1 +"26586" 4 257 3 1 1 0 3 1 100 28 76 4 1334 6 8 1 1284 1 95 72 0 0 +"26587" 4 257 3 1 1 0 3 1 100 28 4 8 1388 9 2 1 1103 1 95 4 1 1 +"26588" 4 257 3 1 1 0 3 1 100 28 8 16 2680 5 3 1 1294 1 95 8 1 1 +"26589" 4 257 3 1 1 0 3 1 100 28 16 28 1133 1 10 0 605 2 75 12 0 1 +"26590" 4 257 3 1 1 0 4 1 100 27 100 195 1998 2 3 0 828 1 95 95 0 1 +"26591" 4 257 3 1 1 0 4 1 100 27 195 380 1466 7 4 1 690 1 95 185 1 1 +"26592" 4 257 3 1 1 0 4 1 100 27 380 19 1014 3 1 0 492 1 95 361 1 0 +"26593" 4 257 3 1 1 0 4 1 100 27 19 37 1769 1 9 0 659 1 95 18 0 1 +"26594" 4 257 3 1 1 0 4 1 100 27 37 72 2361 8 7 1 1094 1 95 35 1 1 +"26595" 4 257 3 1 1 0 4 1 100 27 72 140 1623 9 2 1 695 1 95 68 1 1 +"26596" 4 257 3 1 1 0 4 1 100 27 140 273 2715 5 6 0 1431 1 95 133 0 1 +"26597" 4 257 3 1 1 0 4 1 100 27 273 14 2400 6 8 1 1403 1 95 259 0 0 +"26598" 4 257 3 1 1 0 4 1 100 27 14 27 1246 4 7 0 1576 1 95 13 0 1 +"26599" 4 257 4 0 1 1 1 1 100 211 100 150 2762 8 3 1 1228 3 50 50 1 1 +"26600" 4 257 4 0 1 1 1 1 100 211 150 188 2127 3 7 0 1039 2 25 38 0 1 +"26601" 4 257 4 0 1 1 1 1 100 211 188 282 1803 8 2 1 563 3 50 94 1 1 +"26602" 4 257 4 0 1 1 1 1 100 211 282 211 1246 2 1 0 1033 2 25 71 1 0 +"26603" 4 257 5 1 1 1 1 1 100 334 100 150 3710 9 7 1 699 3 50 50 1 1 +"26604" 4 257 5 1 1 1 1 1 100 334 150 225 1583 4 8 0 1170 3 50 75 0 1 +"26605" 4 257 5 1 1 1 1 1 100 334 225 338 1055 8 2 1 691 3 50 113 1 1 +"26606" 4 257 5 1 1 1 1 1 100 334 338 253 1311 2 1 0 1311 2 25 85 1 0 +"26607" 4 257 5 1 1 1 1 1 100 334 253 316 1341 7 6 1 1387 2 25 63 1 1 +"26608" 4 257 5 1 1 1 1 1 100 334 316 237 2590 5 3 0 1241 2 25 79 1 0 +"26609" 4 257 5 1 1 1 1 1 100 334 237 356 3755 3 6 0 853 3 50 119 0 1 +"26610" 4 257 5 1 1 1 1 1 100 334 356 267 1826 6 9 1 1088 2 25 89 0 0 +"26611" 4 257 5 1 1 1 1 1 100 334 267 334 2098 1 4 0 1836 2 25 67 0 1 +"26612" 4 257 5 1 1 1 2 1 100 558 100 150 1733 2 9 0 1824 3 50 50 0 1 +"26613" 4 257 5 1 1 1 2 1 100 558 150 225 919 4 10 0 905 3 50 75 0 1 +"26614" 4 257 5 1 1 1 2 1 100 558 225 169 1699 3 1 0 992 2 25 56 1 0 +"26615" 4 257 5 1 1 1 2 1 100 558 169 254 1318 8 6 1 701 3 50 85 1 1 +"26616" 4 257 5 1 1 1 2 1 100 558 254 318 4873 5 7 0 1047 2 25 64 0 1 +"26617" 4 257 5 1 1 1 2 1 100 558 318 238 1690 6 8 1 1039 2 25 80 0 0 +"26618" 4 257 5 1 1 1 2 1 100 558 238 357 1635 7 5 1 804 3 50 119 1 1 +"26619" 4 257 5 1 1 1 2 1 100 558 357 446 1499 1 10 0 1074 2 25 89 0 1 +"26620" 4 257 5 1 1 1 2 1 100 558 446 558 908 9 3 1 471 2 25 112 1 1 +"26621" 4 257 5 1 1 1 3 1 100 224 100 150 1664 3 5 0 991 3 50 50 0 1 +"26622" 4 257 5 1 1 1 3 1 100 224 150 75 1956 8 9 1 197 3 50 75 0 0 +"26623" 4 257 5 1 1 1 3 1 100 224 75 113 1465 2 4 0 1073 3 50 38 0 1 +"26624" 4 257 5 1 1 1 3 1 100 224 113 170 4124 6 3 1 1317 3 50 57 1 1 +"26625" 4 257 5 1 1 1 3 1 100 224 170 255 1420 7 6 1 214 3 50 85 1 1 +"26626" 4 257 5 1 1 1 3 1 100 224 255 191 1169 4 2 0 3239 2 25 64 1 0 +"26627" 4 257 5 1 1 1 3 1 100 224 191 239 2458 1 8 0 1667 2 25 48 0 1 +"26628" 4 257 5 1 1 1 3 1 100 224 239 179 1777 5 7 1 1058 2 25 60 0 0 +"26629" 4 257 5 1 1 1 3 1 100 224 179 224 4588 9 5 1 3760 2 25 45 1 1 +"26630" 4 257 5 1 1 1 4 1 100 328 100 150 2166 8 7 1 1253 3 50 50 1 1 +"26631" 4 257 5 1 1 1 4 1 100 328 150 225 1006 3 10 0 1156 3 50 75 0 1 +"26632" 4 257 5 1 1 1 4 1 100 328 225 112 1419 7 9 1 917 3 50 113 0 0 +"26633" 4 257 5 1 1 1 4 1 100 328 112 196 1727 9 1 1 754 4 75 84 1 1 +"26634" 4 257 5 1 1 1 4 1 100 328 196 294 1596 2 3 0 1262 3 50 98 0 1 +"26635" 4 257 5 1 1 1 4 1 100 328 294 279 2511 1 8 1 1581 1 5 15 0 0 +"26636" 4 257 5 1 1 1 4 1 100 328 279 349 1510 5 4 1 1324 2 25 70 1 1 +"26637" 4 257 5 1 1 1 4 1 100 328 349 262 1771 4 2 0 813 2 25 87 1 0 +"26638" 4 257 5 1 1 1 4 1 100 328 262 328 1155 6 3 1 954 2 25 66 1 1 +"26639" 4 259 2 0 1 1 1 1 100 117 100 125 12706 8 3 1 4260 2 25 25 1 1 +"26640" 4 259 2 0 1 1 1 1 100 117 125 156 2918 3 7 0 1433 2 25 31 0 1 +"26641" 4 259 2 0 1 1 1 1 100 117 156 234 2226 8 2 1 789 3 50 78 1 1 +"26642" 4 259 2 0 1 1 1 1 100 117 234 117 1933 2 1 0 727 3 50 117 1 0 +"26643" 4 259 3 1 1 1 1 1 100 392 100 175 9878 9 7 1 566 4 75 75 1 1 +"26644" 4 259 3 1 1 1 1 1 100 392 175 219 1904 4 8 0 1092 2 25 44 0 1 +"26645" 4 259 3 1 1 1 1 1 100 392 219 329 1468 8 2 1 436 3 50 110 1 1 +"26646" 4 259 3 1 1 1 1 1 100 392 329 247 1508 2 1 0 1450 2 25 82 1 0 +"26647" 4 259 3 1 1 1 1 1 100 392 247 371 1930 7 6 1 900 3 50 124 1 1 +"26648" 4 259 3 1 1 1 1 1 100 392 371 278 2079 5 3 0 897 2 25 93 1 0 +"26649" 4 259 3 1 1 1 1 1 100 392 278 348 1980 3 6 0 747 2 25 70 0 1 +"26650" 4 259 3 1 1 1 1 1 100 392 348 261 1477 6 9 1 1437 2 25 87 0 0 +"26651" 4 259 3 1 1 1 1 1 100 392 261 392 4685 1 4 0 573 3 50 131 0 1 +"26652" 4 259 3 1 1 1 2 1 100 462 100 150 1630 2 9 0 494 3 50 50 0 1 +"26653" 4 259 3 1 1 1 2 1 100 462 150 188 1452 4 10 0 1045 2 25 38 0 1 +"26654" 4 259 3 1 1 1 2 1 100 462 188 94 1048 3 1 0 572 3 50 94 1 0 +"26655" 4 259 3 1 1 1 2 1 100 462 94 165 1406 8 6 1 681 4 75 71 1 1 +"26656" 4 259 3 1 1 1 2 1 100 462 165 173 1590 5 7 0 751 1 5 8 0 1 +"26657" 4 259 3 1 1 1 2 1 100 462 173 164 1400 6 8 1 628 1 5 9 0 0 +"26658" 4 259 3 1 1 1 2 1 100 462 164 205 1307 7 5 1 578 2 25 41 1 1 +"26659" 4 259 3 1 1 1 2 1 100 462 205 308 1389 1 10 0 1040 3 50 103 0 1 +"26660" 4 259 3 1 1 1 2 1 100 462 308 462 1204 9 3 1 768 3 50 154 1 1 +"26661" 4 259 3 1 1 1 3 1 100 392 100 125 1456 3 5 0 1183 2 25 25 0 1 +"26662" 4 259 3 1 1 1 3 1 100 392 125 62 1325 8 9 1 467 3 50 63 0 0 +"26663" 4 259 3 1 1 1 3 1 100 392 62 93 1162 2 4 0 474 3 50 31 0 1 +"26664" 4 259 3 1 1 1 3 1 100 392 93 98 1200 6 3 1 468 1 5 5 1 1 +"26665" 4 259 3 1 1 1 3 1 100 392 98 123 932 7 6 1 556 2 25 25 1 1 +"26666" 4 259 3 1 1 1 3 1 100 392 123 92 1072 4 2 0 963 2 25 31 1 0 +"26667" 4 259 3 1 1 1 3 1 100 392 92 179 1041 1 8 0 1055 5 95 87 0 1 +"26668" 4 259 3 1 1 1 3 1 100 392 179 224 1346 5 7 0 416 2 25 45 0 1 +"26669" 4 259 3 1 1 1 3 1 100 392 224 392 1169 9 5 1 575 4 75 168 1 1 +"26670" 4 259 3 1 1 1 4 1 100 530 100 175 1313 8 7 1 721 4 75 75 1 1 +"26671" 4 259 3 1 1 1 4 1 100 530 175 219 1164 3 10 0 547 2 25 44 0 1 +"26672" 4 259 3 1 1 1 4 1 100 530 219 164 822 7 9 1 805 2 25 55 0 0 +"26673" 4 259 3 1 1 1 4 1 100 530 164 287 996 9 1 1 547 4 75 123 1 1 +"26674" 4 259 3 1 1 1 4 1 100 530 287 431 1238 2 3 0 1241 3 50 144 0 1 +"26675" 4 259 3 1 1 1 4 1 100 530 431 754 1028 1 8 0 457 4 75 323 0 1 +"26676" 4 259 3 1 1 1 4 1 100 530 754 565 1184 5 4 0 1093 2 25 189 1 0 +"26677" 4 259 3 1 1 1 4 1 100 530 565 424 1038 4 2 0 538 2 25 141 1 0 +"26678" 4 259 3 1 1 1 4 1 100 530 424 530 1382 6 3 1 1810 2 25 106 1 1 +"26679" 4 259 4 0 1 0 1 1 100 99 100 175 1571 2 7 0 566 2 75 75 0 1 +"26680" 4 259 4 0 1 0 1 1 100 99 175 263 1240 3 7 0 1155 3 50 88 0 1 +"26681" 4 259 4 0 1 0 1 1 100 99 263 395 1282 2 8 0 747 3 50 132 0 1 +"26682" 4 259 4 0 1 0 1 1 100 99 395 99 1197 8 9 1 754 2 75 296 0 0 +"26683" 4 259 5 1 1 0 1 1 100 452 100 195 2623 1 3 0 1528 1 95 95 0 1 +"26684" 4 259 5 1 1 0 1 1 100 452 195 244 1116 6 5 1 565 4 25 49 1 1 +"26685" 4 259 5 1 1 0 1 1 100 452 244 366 1392 2 8 0 499 3 50 122 0 1 +"26686" 4 259 5 1 1 0 1 1 100 452 366 183 1233 8 9 1 620 3 50 183 0 0 +"26687" 4 259 5 1 1 0 1 1 100 452 183 275 1533 3 4 0 1258 3 50 92 0 1 +"26688" 4 259 5 1 1 0 1 1 100 452 275 206 1123 5 7 1 1500 4 25 69 0 0 +"26689" 4 259 5 1 1 0 1 1 100 452 206 309 1386 7 4 1 2093 3 50 103 1 1 +"26690" 4 259 5 1 1 0 1 1 100 452 309 232 1575 4 1 0 674 4 25 77 1 0 +"26691" 4 259 5 1 1 0 1 1 100 452 232 452 1231 9 6 1 1338 1 95 220 1 1 +"26692" 4 259 5 1 1 0 2 1 100 856 100 175 6874 8 1 1 744 2 75 75 1 1 +"26693" 4 259 5 1 1 0 2 1 100 856 175 219 1066 6 2 1 1261 4 25 44 1 1 +"26694" 4 259 5 1 1 0 2 1 100 856 219 164 1406 7 9 1 345 4 25 55 0 0 +"26695" 4 259 5 1 1 0 2 1 100 856 164 320 1190 2 10 0 427 1 95 156 0 1 +"26696" 4 259 5 1 1 0 2 1 100 856 320 240 1212 5 3 0 744 4 25 80 1 0 +"26697" 4 259 5 1 1 0 2 1 100 856 240 180 1098 4 2 0 1332 4 25 60 1 0 +"26698" 4 259 5 1 1 0 2 1 100 856 180 225 1242 3 5 0 629 4 25 45 0 1 +"26699" 4 259 5 1 1 0 2 1 100 856 225 439 980 9 4 1 883 1 95 214 1 1 +"26700" 4 259 5 1 1 0 2 1 100 856 439 856 1343 1 7 0 1076 1 95 417 0 1 +"26701" 4 259 5 1 1 0 3 1 100 31 100 195 1557 7 5 1 1089 1 95 95 1 1 +"26702" 4 259 5 1 1 0 3 1 100 31 195 10 1154 2 1 0 1129 1 95 185 1 0 +"26703" 4 259 5 1 1 0 3 1 100 31 10 20 1112 8 6 1 497 1 95 10 1 1 +"26704" 4 259 5 1 1 0 3 1 100 31 20 39 1095 4 7 0 712 1 95 19 0 1 +"26705" 4 259 5 1 1 0 3 1 100 31 39 76 780 3 10 0 532 1 95 37 0 1 +"26706" 4 259 5 1 1 0 3 1 100 31 76 4 977 6 8 1 832 1 95 72 0 0 +"26707" 4 259 5 1 1 0 3 1 100 31 4 8 982 9 2 1 349 1 95 4 1 1 +"26708" 4 259 5 1 1 0 3 1 100 31 8 16 1082 5 3 1 444 1 95 8 1 1 +"26709" 4 259 5 1 1 0 3 1 100 31 16 31 820 1 10 0 483 1 95 15 0 1 +"26710" 4 259 5 1 1 0 4 0 100 0 100 195 1500 2 3 0 346 1 95 95 0 1 +"26711" 4 259 5 1 1 0 4 0 100 0 195 380 1055 7 4 1 770 1 95 185 1 1 +"26712" 4 259 5 1 1 0 4 0 100 0 380 19 1019 3 1 0 864 1 95 361 1 0 +"26713" 4 259 5 1 1 0 4 0 100 0 19 37 1084 1 9 0 694 1 95 18 0 1 +"26714" 4 259 5 1 1 0 4 0 100 0 37 72 1050 8 7 1 437 1 95 35 1 1 +"26715" 4 259 5 1 1 0 4 0 100 0 72 140 685 9 2 1 242 1 95 68 1 1 +"26716" 4 259 5 1 1 0 4 0 100 0 140 7 876 5 6 1 664 1 95 133 0 0 +"26717" 4 259 5 1 1 0 4 0 100 0 7 0 1190 6 8 1 379 1 95 7 0 0 +"26718" 4 260 2 0 1 0 1 1 100 33 100 175 11140 2 7 0 1543 2 75 75 0 1 +"26719" 4 260 2 0 1 0 1 1 100 33 175 341 5507 3 7 0 747 1 95 166 0 1 +"26720" 4 260 2 0 1 0 1 1 100 33 341 665 2188 2 8 0 1562 1 95 324 0 1 +"26721" 4 260 2 0 1 0 1 1 100 33 665 33 1356 8 9 1 3513 1 95 632 0 0 +"26722" 4 260 3 1 1 0 1 1 100 1716 100 195 1980 1 3 0 515 1 95 95 0 1 +"26723" 4 260 3 1 1 0 1 1 100 1716 195 293 2656 6 5 1 615 3 50 98 1 1 +"26724" 4 260 3 1 1 0 1 1 100 1716 293 513 1983 2 8 0 4103 2 75 220 0 1 +"26725" 4 260 3 1 1 0 1 1 100 1716 513 385 1761 8 9 1 1379 4 25 128 0 0 +"26726" 4 260 3 1 1 0 1 1 100 1716 385 751 2230 3 4 0 874 1 95 366 0 1 +"26727" 4 260 3 1 1 0 1 1 100 1716 751 939 4621 5 7 0 1287 4 25 188 0 1 +"26728" 4 260 3 1 1 0 1 1 100 1716 939 1174 2671 7 4 1 653 4 25 235 1 1 +"26729" 4 260 3 1 1 0 1 1 100 1716 1174 880 2332 4 1 0 459 4 25 294 1 0 +"26730" 4 260 3 1 1 0 1 1 100 1716 880 1716 2338 9 6 1 897 1 95 836 1 1 +"26731" 4 260 3 1 1 0 2 0 100 1 100 195 2327 8 1 1 1526 1 95 95 1 1 +"26732" 4 260 3 1 1 0 2 0 100 1 195 293 3059 6 2 1 711 3 50 98 1 1 +"26733" 4 260 3 1 1 0 2 0 100 1 293 15 2122 7 9 1 971 1 95 278 0 0 +"26734" 4 260 3 1 1 0 2 0 100 1 15 29 1955 2 10 0 2596 1 95 14 0 1 +"26735" 4 260 3 1 1 0 2 0 100 1 29 1 5757 5 3 0 745 1 95 28 1 0 +"26736" 4 260 3 1 1 0 3 0 100 0 100 195 1847 7 5 1 2013 1 95 95 1 1 +"26737" 4 260 3 1 1 0 3 0 100 0 195 10 1675 2 1 0 793 1 95 185 1 0 +"26738" 4 260 3 1 1 0 3 0 100 0 10 20 1632 8 6 1 1184 1 95 10 1 1 +"26739" 4 260 3 1 1 0 3 0 100 0 20 39 1605 4 7 0 854 1 95 19 0 1 +"26740" 4 260 3 1 1 0 3 0 100 0 39 76 2322 3 10 0 681 1 95 37 0 1 +"26741" 4 260 3 1 1 0 3 0 100 0 76 4 1998 6 8 1 1989 1 95 72 0 0 +"26742" 4 260 3 1 1 0 3 0 100 0 4 8 1691 9 2 1 568 1 95 4 1 1 +"26743" 4 260 3 1 1 0 3 0 100 0 8 0 3731 5 3 0 2207 1 95 8 1 0 +"26744" 4 260 3 1 1 0 4 1 100 1653 100 195 3624 2 3 0 609 1 95 95 0 1 +"26745" 4 260 3 1 1 0 4 1 100 1653 195 380 1242 7 4 1 948 1 95 185 1 1 +"26746" 4 260 3 1 1 0 4 1 100 1653 380 190 1340 3 1 0 1106 3 50 190 1 0 +"26747" 4 260 3 1 1 0 4 1 100 1653 190 371 1299 1 9 0 659 1 95 181 0 1 +"26748" 4 260 3 1 1 0 4 1 100 1653 371 723 1873 8 7 1 2293 1 95 352 1 1 +"26749" 4 260 3 1 1 0 4 1 100 1653 723 1410 1493 9 2 1 1036 1 95 687 1 1 +"26750" 4 260 3 1 1 0 4 1 100 1653 1410 1763 2744 5 6 0 963 4 25 353 0 1 +"26751" 4 260 3 1 1 0 4 1 100 1653 1763 1322 2859 6 8 1 689 4 25 441 0 0 +"26752" 4 260 3 1 1 0 4 1 100 1653 1322 1653 1621 4 7 0 502 4 25 331 0 1 +"26753" 4 260 4 0 1 1 1 1 100 191 100 125 4399 8 3 1 2043 2 25 25 1 1 +"26754" 4 260 4 0 1 1 1 1 100 191 125 219 3959 3 7 0 745 4 75 94 0 1 +"26755" 4 260 4 0 1 1 1 1 100 191 219 383 1634 8 2 1 995 4 75 164 1 1 +"26756" 4 260 4 0 1 1 1 1 100 191 383 191 1377 2 1 0 2301 3 50 192 1 0 +"26757" 4 260 5 1 1 1 1 1 100 814 100 195 1889 9 7 1 1309 5 95 95 1 1 +"26758" 4 260 5 1 1 1 1 1 100 814 195 244 1588 4 8 0 3398 2 25 49 0 1 +"26759" 4 260 5 1 1 1 1 1 100 814 244 366 1617 8 2 1 1266 3 50 122 1 1 +"26760" 4 260 5 1 1 1 1 1 100 814 366 183 1307 2 1 0 683 3 50 183 1 0 +"26761" 4 260 5 1 1 1 1 1 100 814 183 275 1720 7 6 1 1708 3 50 92 1 1 +"26762" 4 260 5 1 1 1 1 1 100 814 275 413 2024 5 3 1 565 3 50 138 1 1 +"26763" 4 260 5 1 1 1 1 1 100 814 413 620 1360 3 6 0 1413 3 50 207 0 1 +"26764" 4 260 5 1 1 1 1 1 100 814 620 465 2511 6 9 1 1605 2 25 155 0 0 +"26765" 4 260 5 1 1 1 1 1 100 814 465 814 1518 1 4 0 820 4 75 349 0 1 +"26766" 4 260 5 1 1 1 2 1 100 755 100 195 1641 2 9 0 905 5 95 95 0 1 +"26767" 4 260 5 1 1 1 2 1 100 755 195 244 1524 4 10 0 1089 2 25 49 0 1 +"26768" 4 260 5 1 1 1 2 1 100 755 244 122 1218 3 1 0 462 3 50 122 1 0 +"26769" 4 260 5 1 1 1 2 1 100 755 122 238 1549 8 6 1 442 5 95 116 1 1 +"26770" 4 260 5 1 1 1 2 1 100 755 238 298 1906 5 7 0 861 2 25 60 0 1 +"26771" 4 260 5 1 1 1 2 1 100 755 298 223 1385 6 8 1 1133 2 25 75 0 0 +"26772" 4 260 5 1 1 1 2 1 100 755 223 335 1732 7 5 1 1960 3 50 112 1 1 +"26773" 4 260 5 1 1 1 2 1 100 755 335 503 1280 1 10 0 1377 3 50 168 0 1 +"26774" 4 260 5 1 1 1 2 1 100 755 503 755 1381 9 3 1 521 3 50 252 1 1 +"26775" 4 260 5 1 1 1 3 1 100 705 100 175 1759 3 5 0 665 4 75 75 0 1 +"26776" 4 260 5 1 1 1 3 1 100 705 175 44 1847 8 9 1 1845 4 75 131 0 0 +"26777" 4 260 5 1 1 1 3 1 100 705 44 86 1483 2 4 0 891 5 95 42 0 1 +"26778" 4 260 5 1 1 1 3 1 100 705 86 129 1701 6 3 1 1946 3 50 43 1 1 +"26779" 4 260 5 1 1 1 3 1 100 705 129 194 1403 7 6 1 2822 3 50 65 1 1 +"26780" 4 260 5 1 1 1 3 1 100 705 194 184 1333 4 2 0 1099 1 5 10 1 0 +"26781" 4 260 5 1 1 1 3 1 100 705 184 322 1284 1 8 0 989 4 75 138 0 1 +"26782" 4 260 5 1 1 1 3 1 100 705 322 403 2107 5 7 0 1021 2 25 81 0 1 +"26783" 4 260 5 1 1 1 3 1 100 705 403 705 1282 9 5 1 707 4 75 302 1 1 +"26784" 4 260 5 1 1 1 4 1 100 818 100 175 1735 8 7 1 1035 4 75 75 1 1 +"26785" 4 260 5 1 1 1 4 1 100 818 175 263 1543 3 10 0 772 3 50 88 0 1 +"26786" 4 260 5 1 1 1 4 1 100 818 263 131 1316 7 9 1 659 3 50 132 0 0 +"26787" 4 260 5 1 1 1 4 1 100 818 131 255 1417 9 1 1 2484 5 95 124 1 1 +"26788" 4 260 5 1 1 1 4 1 100 818 255 446 1532 2 3 0 625 4 75 191 0 1 +"26789" 4 260 5 1 1 1 4 1 100 818 446 781 1512 1 8 0 2094 4 75 335 0 1 +"26790" 4 260 5 1 1 1 4 1 100 818 781 820 2416 5 4 1 1363 1 5 39 1 1 +"26791" 4 260 5 1 1 1 4 1 100 818 820 779 1551 4 2 0 2067 1 5 41 1 0 +"26792" 4 260 5 1 1 1 4 1 100 818 779 818 1461 6 3 1 1381 1 5 39 1 1 +"26793" 4 261 2 0 1 0 1 1 100 3 100 150 6054 2 7 0 1460 3 50 50 0 1 +"26794" 4 261 2 0 1 0 1 1 100 3 150 225 7331 3 7 0 580 3 50 75 0 1 +"26795" 4 261 2 0 1 0 1 1 100 3 225 56 2510 2 8 1 876 2 75 169 0 0 +"26796" 4 261 2 0 1 0 1 1 100 3 56 3 2959 8 9 1 1424 1 95 53 0 0 +"26797" 4 261 3 1 1 0 1 1 100 209 100 195 5252 1 3 0 1344 1 95 95 0 1 +"26798" 4 261 3 1 1 0 1 1 100 209 195 293 1813 6 5 1 1716 3 50 98 1 1 +"26799" 4 261 3 1 1 0 1 1 100 209 293 440 2088 2 8 0 2419 3 50 147 0 1 +"26800" 4 261 3 1 1 0 1 1 100 209 440 330 2532 8 9 1 1218 4 25 110 0 0 +"26801" 4 261 3 1 1 0 1 1 100 209 330 247 4885 3 4 1 -22 4 25 83 0 0 +"26802" 4 261 3 1 1 0 1 1 100 209 247 185 2481 5 7 1 809 4 25 62 0 0 +"26803" 4 261 3 1 1 0 1 1 100 209 185 278 2479 7 4 1 887 3 50 93 1 1 +"26804" 4 261 3 1 1 0 1 1 100 209 278 139 2705 4 1 0 1575 3 50 139 1 0 +"26805" 4 261 3 1 1 0 1 1 100 209 139 209 2905 9 6 1 1957 3 50 70 1 1 +"26806" 4 261 3 1 1 0 2 1 100 841 100 195 3395 8 1 1 2183 1 95 95 1 1 +"26807" 4 261 3 1 1 0 2 1 100 841 195 293 2316 6 2 1 1716 3 50 98 1 1 +"26808" 4 261 3 1 1 0 2 1 100 841 293 366 2663 7 9 0 910 4 25 73 0 1 +"26809" 4 261 3 1 1 0 2 1 100 841 366 458 2256 2 10 0 787 4 25 92 0 1 +"26810" 4 261 3 1 1 0 2 1 100 841 458 573 1850 5 3 1 713 4 25 115 1 1 +"26811" 4 261 3 1 1 0 2 1 100 841 573 430 2356 4 2 0 930 4 25 143 1 0 +"26812" 4 261 3 1 1 0 2 1 100 841 430 538 5491 3 5 0 1020 4 25 108 0 1 +"26813" 4 261 3 1 1 0 2 1 100 841 538 673 2232 9 4 1 1364 4 25 135 1 1 +"26814" 4 261 3 1 1 0 2 1 100 841 673 841 3544 1 7 0 1038 4 25 168 0 1 +"26815" 4 261 3 1 1 0 3 1 100 39 100 150 2393 7 5 1 1157 3 50 50 1 1 +"26816" 4 261 3 1 1 0 3 1 100 39 150 75 1787 2 1 0 1413 3 50 75 1 0 +"26817" 4 261 3 1 1 0 3 1 100 39 75 113 1870 8 6 1 1260 3 50 38 1 1 +"26818" 4 261 3 1 1 0 3 1 100 39 113 56 2836 4 7 1 875 3 50 57 0 0 +"26819" 4 261 3 1 1 0 3 1 100 39 56 84 1935 3 10 0 841 3 50 28 0 1 +"26820" 4 261 3 1 1 0 3 1 100 39 84 42 1917 6 8 1 712 3 50 42 0 0 +"26821" 4 261 3 1 1 0 3 1 100 39 42 82 1492 9 2 1 1298 1 95 40 1 1 +"26822" 4 261 3 1 1 0 3 1 100 39 82 20 3089 5 3 0 1788 2 75 62 1 0 +"26823" 4 261 3 1 1 0 3 1 100 39 20 39 2084 1 10 0 1223 1 95 19 0 1 +"26824" 4 261 3 1 1 0 4 1 100 2379 100 195 2195 2 3 0 1504 1 95 95 0 1 +"26825" 4 261 3 1 1 0 4 1 100 2379 195 380 2078 7 4 1 992 1 95 185 1 1 +"26826" 4 261 3 1 1 0 4 1 100 2379 380 190 2478 3 1 0 853 3 50 190 1 0 +"26827" 4 261 3 1 1 0 4 1 100 2379 190 371 1738 1 9 0 885 1 95 181 0 1 +"26828" 4 261 3 1 1 0 4 1 100 2379 371 723 1672 8 7 1 1630 1 95 352 1 1 +"26829" 4 261 3 1 1 0 4 1 100 2379 723 1410 2493 9 2 1 884 1 95 687 1 1 +"26830" 4 261 3 1 1 0 4 1 100 2379 1410 2115 1815 5 6 0 1655 3 50 705 0 1 +"26831" 4 261 3 1 1 0 4 1 100 2379 2115 1586 1988 6 8 1 1060 4 25 529 0 0 +"26832" 4 261 3 1 1 0 4 1 100 2379 1586 2379 2367 4 7 0 1052 3 50 793 0 1 +"26833" 4 261 4 0 1 1 1 1 100 253 100 150 3863 8 3 1 1149 3 50 50 1 1 +"26834" 4 261 4 0 1 1 1 1 100 253 150 225 12284 3 7 0 1737 3 50 75 0 1 +"26835" 4 261 4 0 1 1 1 1 100 253 225 338 1340 8 2 1 770 3 50 113 1 1 +"26836" 4 261 4 0 1 1 1 1 100 253 338 253 2131 2 1 0 3827 2 25 85 1 0 +"26837" 4 261 5 1 1 1 1 1 100 216 100 150 2038 9 7 1 3396 3 50 50 1 1 +"26838" 4 261 5 1 1 1 1 1 100 216 150 225 4111 4 8 0 793 3 50 75 0 1 +"26839" 4 261 5 1 1 1 1 1 100 216 225 338 1624 8 2 1 969 3 50 113 1 1 +"26840" 4 261 5 1 1 1 1 1 100 216 338 169 1864 2 1 0 4261 3 50 169 1 0 +"26841" 4 261 5 1 1 1 1 1 100 216 169 296 1664 7 6 1 743 4 75 127 1 1 +"26842" 4 261 5 1 1 1 1 1 100 216 296 148 4625 5 3 0 1235 3 50 148 1 0 +"26843" 4 261 5 1 1 1 1 1 100 216 148 222 2157 3 6 0 1308 3 50 74 0 1 +"26844" 4 261 5 1 1 1 1 1 100 216 222 111 1443 6 9 1 900 3 50 111 0 0 +"26845" 4 261 5 1 1 1 1 1 100 216 111 216 1798 1 4 0 1999 5 95 105 0 1 +"26846" 4 261 5 1 1 1 2 1 100 684 100 150 1898 2 9 0 1158 3 50 50 0 1 +"26847" 4 261 5 1 1 1 2 1 100 684 150 225 2884 4 10 0 1568 3 50 75 0 1 +"26848" 4 261 5 1 1 1 2 1 100 684 225 112 2323 3 1 0 1618 3 50 113 1 0 +"26849" 4 261 5 1 1 1 2 1 100 684 112 168 4346 8 6 1 1193 3 50 56 1 1 +"26850" 4 261 5 1 1 1 2 1 100 684 168 160 2229 5 7 1 1345 1 5 8 0 0 +"26851" 4 261 5 1 1 1 2 1 100 684 160 120 1678 6 8 1 571 2 25 40 0 0 +"26852" 4 261 5 1 1 1 2 1 100 684 120 180 1405 7 5 1 1136 3 50 60 1 1 +"26853" 4 261 5 1 1 1 2 1 100 684 180 351 2047 1 10 0 1612 5 95 171 0 1 +"26854" 4 261 5 1 1 1 2 1 100 684 351 684 2812 9 3 1 1317 5 95 333 1 1 +"26855" 4 261 5 1 1 1 3 1 100 217 100 150 1855 3 5 0 1893 3 50 50 0 1 +"26856" 4 261 5 1 1 1 3 1 100 217 150 75 1840 8 9 1 1598 3 50 75 0 0 +"26857" 4 261 5 1 1 1 3 1 100 217 75 113 1623 2 4 0 2991 3 50 38 0 1 +"26858" 4 261 5 1 1 1 3 1 100 217 113 170 1550 6 3 1 1146 3 50 57 1 1 +"26859" 4 261 5 1 1 1 3 1 100 217 170 255 2153 7 6 1 944 3 50 85 1 1 +"26860" 4 261 5 1 1 1 3 1 100 217 255 127 3497 4 2 0 1104 3 50 128 1 0 +"26861" 4 261 5 1 1 1 3 1 100 217 127 248 1489 1 8 0 960 5 95 121 0 1 +"26862" 4 261 5 1 1 1 3 1 100 217 248 124 3040 5 7 1 1597 3 50 124 0 0 +"26863" 4 261 5 1 1 1 3 1 100 217 124 217 1278 9 5 1 1231 4 75 93 1 1 +"26864" 4 261 5 1 1 1 4 1 100 943 100 175 1499 8 7 1 1442 4 75 75 1 1 +"26865" 4 261 5 1 1 1 4 1 100 943 175 263 1866 3 10 0 3831 3 50 88 0 1 +"26866" 4 261 5 1 1 1 4 1 100 943 263 131 1222 7 9 1 1202 3 50 132 0 0 +"26867" 4 261 5 1 1 1 4 1 100 943 131 255 1366 9 1 1 1011 5 95 124 1 1 +"26868" 4 261 5 1 1 1 4 1 100 943 255 383 2080 2 3 0 1347 3 50 128 0 1 +"26869" 4 261 5 1 1 1 4 1 100 943 383 575 2544 1 8 0 2220 3 50 192 0 1 +"26870" 4 261 5 1 1 1 4 1 100 943 575 1006 4336 5 4 1 1064 4 75 431 1 1 +"26871" 4 261 5 1 1 1 4 1 100 943 1006 754 4290 4 2 0 4692 2 25 252 1 0 +"26872" 4 261 5 1 1 1 4 1 100 943 754 943 7626 6 3 1 2813 2 25 189 1 1 +"26873" 4 263 2 0 1 0 1 1 100 26 100 150 6466 2 7 0 1308 3 50 50 0 1 +"26874" 4 263 2 0 1 0 1 1 100 26 150 263 13426 3 7 0 681 2 75 113 0 1 +"26875" 4 263 2 0 1 0 1 1 100 26 263 513 3390 2 8 0 1722 1 95 250 0 1 +"26876" 4 263 2 0 1 0 1 1 100 26 513 26 2662 8 9 1 1837 1 95 487 0 0 +"26877" 4 263 3 1 1 0 1 1 100 27 100 195 5486 1 3 0 3054 1 95 95 0 1 +"26878" 4 263 3 1 1 0 1 1 100 27 195 380 3098 6 5 1 1412 1 95 185 1 1 +"26879" 4 263 3 1 1 0 1 1 100 27 380 741 3406 2 8 0 1329 1 95 361 0 1 +"26880" 4 263 3 1 1 0 1 1 100 27 741 37 2095 8 9 1 1257 1 95 704 0 0 +"26881" 4 263 3 1 1 0 1 1 100 27 37 72 2082 3 4 0 1421 1 95 35 0 1 +"26882" 4 263 3 1 1 0 1 1 100 27 72 140 3239 5 7 0 922 1 95 68 0 1 +"26883" 4 263 3 1 1 0 1 1 100 27 140 273 1828 7 4 1 754 1 95 133 1 1 +"26884" 4 263 3 1 1 0 1 1 100 27 273 14 2123 4 1 0 716 1 95 259 1 0 +"26885" 4 263 3 1 1 0 1 1 100 27 14 27 1415 9 6 1 1178 1 95 13 1 1 +"26886" 4 263 3 1 1 0 2 1 100 31 100 195 3108 8 1 1 750 1 95 95 1 1 +"26887" 4 263 3 1 1 0 2 1 100 31 195 380 2912 6 2 1 779 1 95 185 1 1 +"26888" 4 263 3 1 1 0 2 1 100 31 380 19 2219 7 9 1 439 1 95 361 0 0 +"26889" 4 263 3 1 1 0 2 1 100 31 19 37 1359 2 10 0 492 1 95 18 0 1 +"26890" 4 263 3 1 1 0 2 1 100 31 37 72 2458 5 3 1 451 1 95 35 1 1 +"26891" 4 263 3 1 1 0 2 1 100 31 72 4 2034 4 2 0 767 1 95 68 1 0 +"26892" 4 263 3 1 1 0 2 1 100 31 4 8 1224 3 5 0 571 1 95 4 0 1 +"26893" 4 263 3 1 1 0 2 1 100 31 8 16 1213 9 4 1 1577 1 95 8 1 1 +"26894" 4 263 3 1 1 0 2 1 100 31 16 31 1516 1 7 0 1121 1 95 15 0 1 +"26895" 4 263 3 1 1 0 3 0 100 1 100 195 2455 7 5 1 512 1 95 95 1 1 +"26896" 4 263 3 1 1 0 3 0 100 1 195 10 1773 2 1 0 4742 1 95 185 1 0 +"26897" 4 263 3 1 1 0 3 0 100 1 10 20 1745 8 6 1 524 1 95 10 1 1 +"26898" 4 263 3 1 1 0 3 0 100 1 20 1 2720 4 7 1 900 1 95 19 0 0 +"26899" 4 263 3 1 1 0 4 1 100 27 100 195 4154 2 3 0 578 1 95 95 0 1 +"26900" 4 263 3 1 1 0 4 1 100 27 195 380 1500 7 4 1 746 1 95 185 1 1 +"26901" 4 263 3 1 1 0 4 1 100 27 380 19 1996 3 1 0 1562 1 95 361 1 0 +"26902" 4 263 3 1 1 0 4 1 100 27 19 37 1365 1 9 0 378 1 95 18 0 1 +"26903" 4 263 3 1 1 0 4 1 100 27 37 72 1125 8 7 1 733 1 95 35 1 1 +"26904" 4 263 3 1 1 0 4 1 100 27 72 140 1488 9 2 1 646 1 95 68 1 1 +"26905" 4 263 3 1 1 0 4 1 100 27 140 273 1989 5 6 0 420 1 95 133 0 1 +"26906" 4 263 3 1 1 0 4 1 100 27 273 14 1453 6 8 1 1045 1 95 259 0 0 +"26907" 4 263 3 1 1 0 4 1 100 27 14 27 1184 4 7 0 549 1 95 13 0 1 +"26908" 4 263 4 0 1 1 1 1 100 169 100 150 5631 8 3 1 1329 3 50 50 1 1 +"26909" 4 263 4 0 1 1 1 1 100 169 150 225 3519 3 7 0 1829 3 50 75 0 1 +"26910" 4 263 4 0 1 1 1 1 100 169 225 338 2891 8 2 1 680 3 50 113 1 1 +"26911" 4 263 4 0 1 1 1 1 100 169 338 169 1661 2 1 0 939 3 50 169 1 0 +"26912" 4 263 5 1 1 1 1 1 100 627 100 150 2955 9 7 1 716 3 50 50 1 1 +"26913" 4 263 5 1 1 1 1 1 100 627 150 188 1615 4 8 0 1080 2 25 38 0 1 +"26914" 4 263 5 1 1 1 1 1 100 627 188 282 1106 8 2 1 539 3 50 94 1 1 +"26915" 4 263 5 1 1 1 1 1 100 627 282 268 1303 2 1 0 1238 1 5 14 1 0 +"26916" 4 263 5 1 1 1 1 1 100 627 268 335 1880 7 6 1 701 2 25 67 1 1 +"26917" 4 263 5 1 1 1 1 1 100 627 335 318 1385 5 3 0 485 1 5 17 1 0 +"26918" 4 263 5 1 1 1 1 1 100 627 318 477 1355 3 6 0 712 3 50 159 0 1 +"26919" 4 263 5 1 1 1 1 1 100 627 477 358 2252 6 9 1 1478 2 25 119 0 0 +"26920" 4 263 5 1 1 1 1 1 100 627 358 627 1333 1 4 0 852 4 75 269 0 1 +"26921" 4 263 5 1 1 1 2 1 100 975 100 175 2293 2 9 0 1654 4 75 75 0 1 +"26922" 4 263 5 1 1 1 2 1 100 975 175 184 3252 4 10 0 487 1 5 9 0 1 +"26923" 4 263 5 1 1 1 2 1 100 975 184 138 2612 3 1 0 894 2 25 46 1 0 +"26924" 4 263 5 1 1 1 2 1 100 975 138 207 1264 8 6 1 586 3 50 69 1 1 +"26925" 4 263 5 1 1 1 2 1 100 975 207 217 2956 5 7 0 605 1 5 10 0 1 +"26926" 4 263 5 1 1 1 2 1 100 975 217 163 2061 6 8 1 1008 2 25 54 0 0 +"26927" 4 263 5 1 1 1 2 1 100 975 163 318 1405 7 5 1 904 5 95 155 1 1 +"26928" 4 263 5 1 1 1 2 1 100 975 318 557 2227 1 10 0 711 4 75 239 0 1 +"26929" 4 263 5 1 1 1 2 1 100 975 557 975 1690 9 3 1 1615 4 75 418 1 1 +"26930" 4 263 5 1 1 1 3 1 100 565 100 150 2178 3 5 0 776 3 50 50 0 1 +"26931" 4 263 5 1 1 1 3 1 100 565 150 112 1528 8 9 1 3181 2 25 38 0 0 +"26932" 4 263 5 1 1 1 3 1 100 565 112 168 1363 2 4 0 560 3 50 56 0 1 +"26933" 4 263 5 1 1 1 3 1 100 565 168 176 1810 6 3 1 858 1 5 8 1 1 +"26934" 4 263 5 1 1 1 3 1 100 565 176 185 1366 7 6 1 571 1 5 9 1 1 +"26935" 4 263 5 1 1 1 3 1 100 565 185 176 1676 4 2 0 458 1 5 9 1 0 +"26936" 4 263 5 1 1 1 3 1 100 565 176 308 1153 1 8 0 632 4 75 132 0 1 +"26937" 4 263 5 1 1 1 3 1 100 565 308 323 2176 5 7 0 533 1 5 15 0 1 +"26938" 4 263 5 1 1 1 3 1 100 565 323 565 1178 9 5 1 528 4 75 242 1 1 +"26939" 4 263 5 1 1 1 4 1 100 231 100 175 1248 8 7 1 515 4 75 75 1 1 +"26940" 4 263 5 1 1 1 4 1 100 231 175 219 1472 3 10 0 903 2 25 44 0 1 +"26941" 4 263 5 1 1 1 4 1 100 231 219 109 1946 7 9 1 763 3 50 110 0 0 +"26942" 4 263 5 1 1 1 4 1 100 231 109 164 1500 9 1 1 443 3 50 55 1 1 +"26943" 4 263 5 1 1 1 4 1 100 231 164 246 1139 2 3 0 1306 3 50 82 0 1 +"26944" 4 263 5 1 1 1 4 1 100 231 246 308 1650 1 8 0 857 2 25 62 0 1 +"26945" 4 263 5 1 1 1 4 1 100 231 308 293 1549 5 4 0 641 1 5 15 1 0 +"26946" 4 263 5 1 1 1 4 1 100 231 293 220 1744 4 2 0 810 2 25 73 1 0 +"26947" 4 263 5 1 1 1 4 1 100 231 220 231 1225 6 3 1 516 1 5 11 1 1 +"26948" 4 273 2 0 1 0 1 1 100 1 100 175 10860 2 7 0 1645 2 75 75 0 1 +"26949" 4 273 2 0 1 0 1 1 100 1 175 9 4355 3 7 1 1826 1 95 166 0 0 +"26950" 4 273 2 0 1 0 1 1 100 1 9 18 2224 2 8 0 1876 1 95 9 0 1 +"26951" 4 273 2 0 1 0 1 1 100 1 18 1 2648 8 9 1 823 1 95 17 0 0 +"26952" 4 273 3 1 1 0 1 0 100 1 100 195 2571 1 3 0 1335 1 95 95 0 1 +"26953" 4 273 3 1 1 0 1 0 100 1 195 10 3044 6 5 0 1001 1 95 185 1 0 +"26954" 4 273 3 1 1 0 1 0 100 1 10 20 3087 2 8 0 810 1 95 10 0 1 +"26955" 4 273 3 1 1 0 1 0 100 1 20 1 2592 8 9 1 755 1 95 19 0 0 +"26956" 4 273 3 1 1 0 2 1 100 39 100 195 2116 8 1 1 1061 1 95 95 1 1 +"26957" 4 273 3 1 1 0 2 1 100 39 195 293 2256 6 2 1 1243 3 50 98 1 1 +"26958" 4 273 3 1 1 0 2 1 100 39 293 440 2596 7 9 0 966 3 50 147 0 1 +"26959" 4 273 3 1 1 0 2 1 100 39 440 220 2622 2 10 1 831 3 50 220 0 0 +"26960" 4 273 3 1 1 0 2 1 100 39 220 110 3493 5 3 0 860 3 50 110 1 0 +"26961" 4 273 3 1 1 0 2 1 100 39 110 5 2672 4 2 0 2079 1 95 105 1 0 +"26962" 4 273 3 1 1 0 2 1 100 39 5 10 2098 3 5 0 955 1 95 5 0 1 +"26963" 4 273 3 1 1 0 2 1 100 39 10 20 2139 9 4 1 643 1 95 10 1 1 +"26964" 4 273 3 1 1 0 2 1 100 39 20 39 1753 1 7 0 956 1 95 19 0 1 +"26965" 4 273 3 1 1 0 3 0 100 0 100 50 2797 7 5 0 1669 3 50 50 1 0 +"26966" 4 273 3 1 1 0 3 0 100 0 50 2 1571 2 1 0 805 1 95 48 1 0 +"26967" 4 273 3 1 1 0 3 0 100 0 2 4 1948 8 6 1 821 1 95 2 1 1 +"26968" 4 273 3 1 1 0 3 0 100 0 4 0 2435 4 7 1 597 1 95 4 0 0 +"26969" 4 273 3 1 1 0 4 1 100 405 100 175 1893 2 3 0 1041 2 75 75 0 1 +"26970" 4 273 3 1 1 0 4 1 100 405 175 306 1644 7 4 1 963 2 75 131 1 1 +"26971" 4 273 3 1 1 0 4 1 100 405 306 15 1675 3 1 0 2568 1 95 291 1 0 +"26972" 4 273 3 1 1 0 4 1 100 405 15 29 2116 1 9 0 749 1 95 14 0 1 +"26973" 4 273 3 1 1 0 4 1 100 405 29 57 1694 8 7 1 683 1 95 28 1 1 +"26974" 4 273 3 1 1 0 4 1 100 405 57 111 1896 9 2 1 771 1 95 54 1 1 +"26975" 4 273 3 1 1 0 4 1 100 405 111 216 2179 5 6 0 1385 1 95 105 0 1 +"26976" 4 273 3 1 1 0 4 1 100 405 216 270 3044 6 8 0 658 4 25 54 0 1 +"26977" 4 273 3 1 1 0 4 1 100 405 270 405 2928 4 7 0 898 3 50 135 0 1 +"26978" 4 273 4 0 1 1 1 1 100 205 100 125 3520 8 3 1 654 2 25 25 1 1 +"26979" 4 273 4 0 1 1 1 1 100 205 125 156 1516 3 7 0 1722 2 25 31 0 1 +"26980" 4 273 4 0 1 1 1 1 100 205 156 195 4101 8 2 1 1408 2 25 39 1 1 +"26981" 4 273 4 0 1 1 1 1 100 205 195 205 3114 2 1 1 612 1 5 10 1 1 +"26982" 4 273 5 1 1 1 1 1 100 532 100 175 2145 9 7 1 585 4 75 75 1 1 +"26983" 4 273 5 1 1 1 1 1 100 532 175 219 1538 4 8 0 1398 2 25 44 0 1 +"26984" 4 273 5 1 1 1 1 1 100 532 219 329 1528 8 2 1 734 3 50 110 1 1 +"26985" 4 273 5 1 1 1 1 1 100 532 329 411 1248 2 1 1 651 2 25 82 1 1 +"26986" 4 273 5 1 1 1 1 1 100 532 411 432 2025 7 6 1 1022 1 5 21 1 1 +"26987" 4 273 5 1 1 1 1 1 100 532 432 324 1298 5 3 0 571 2 25 108 1 0 +"26988" 4 273 5 1 1 1 1 1 100 532 324 405 1188 3 6 0 569 2 25 81 0 1 +"26989" 4 273 5 1 1 1 1 1 100 532 405 304 1956 6 9 1 492 2 25 101 0 0 +"26990" 4 273 5 1 1 1 1 1 100 532 304 532 1106 1 4 0 947 4 75 228 0 1 +"26991" 4 273 5 1 1 1 2 1 100 806 100 175 1795 2 9 0 692 4 75 75 0 1 +"26992" 4 273 5 1 1 1 2 1 100 806 175 219 1112 4 10 0 745 2 25 44 0 1 +"26993" 4 273 5 1 1 1 2 1 100 806 219 230 1155 3 1 1 601 1 5 11 1 1 +"26994" 4 273 5 1 1 1 2 1 100 806 230 345 1282 8 6 1 595 3 50 115 1 1 +"26995" 4 273 5 1 1 1 2 1 100 806 345 328 1420 5 7 1 686 1 5 17 0 0 +"26996" 4 273 5 1 1 1 2 1 100 806 328 344 1147 6 8 0 602 1 5 16 0 1 +"26997" 4 273 5 1 1 1 2 1 100 806 344 430 1379 7 5 1 983 2 25 86 1 1 +"26998" 4 273 5 1 1 1 2 1 100 806 430 645 1054 1 10 0 721 3 50 215 0 1 +"26999" 4 273 5 1 1 1 2 1 100 806 645 806 1388 9 3 1 567 2 25 161 1 1 +"27000" 4 273 5 1 1 1 3 1 100 242 100 150 1731 3 5 0 702 3 50 50 0 1 +"27001" 4 273 5 1 1 1 3 1 100 242 150 75 1058 8 9 1 1790 3 50 75 0 0 +"27002" 4 273 5 1 1 1 3 1 100 242 75 131 1046 2 4 0 650 4 75 56 0 1 +"27003" 4 273 5 1 1 1 3 1 100 242 131 98 1281 6 3 0 1642 2 25 33 1 0 +"27004" 4 273 5 1 1 1 3 1 100 242 98 172 1193 7 6 1 991 4 75 74 1 1 +"27005" 4 273 5 1 1 1 3 1 100 242 172 86 948 4 2 0 997 3 50 86 1 0 +"27006" 4 273 5 1 1 1 3 1 100 242 86 129 914 1 8 0 612 3 50 43 0 1 +"27007" 4 273 5 1 1 1 3 1 100 242 129 161 2853 5 7 0 874 2 25 32 0 1 +"27008" 4 273 5 1 1 1 3 1 100 242 161 242 1278 9 5 1 639 3 50 81 1 1 +"27009" 4 273 5 1 1 1 4 1 100 298 100 150 1943 8 7 1 416 3 50 50 1 1 +"27010" 4 273 5 1 1 1 4 1 100 298 150 225 930 3 10 0 572 3 50 75 0 1 +"27011" 4 273 5 1 1 1 4 1 100 298 225 236 2838 7 9 0 540 1 5 11 0 1 +"27012" 4 273 5 1 1 1 4 1 100 298 236 354 1079 9 1 1 586 3 50 118 1 1 +"27013" 4 273 5 1 1 1 4 1 100 298 354 531 1156 2 3 0 707 3 50 177 0 1 +"27014" 4 273 5 1 1 1 4 1 100 298 531 504 4118 1 8 1 448 1 5 27 0 0 +"27015" 4 273 5 1 1 1 4 1 100 298 504 529 1904 5 4 1 571 1 5 25 1 1 +"27016" 4 273 5 1 1 1 4 1 100 298 529 397 989 4 2 0 676 2 25 132 1 0 +"27017" 4 273 5 1 1 1 4 1 100 298 397 298 1376 6 3 0 525 2 25 99 1 0 +"27018" 4 277 2 0 1 0 1 1 100 98 100 150 7347 2 7 0 892 3 50 50 0 1 +"27019" 4 277 2 0 1 0 1 1 100 98 150 225 14070 3 7 0 3494 3 50 75 0 1 +"27020" 4 277 2 0 1 0 1 1 100 98 225 394 4549 2 8 0 970 2 75 169 0 1 +"27021" 4 277 2 0 1 0 1 1 100 98 394 98 3769 8 9 1 516 2 75 296 0 0 +"27022" 4 277 3 1 1 0 1 1 100 3450 100 195 10696 1 3 0 992 1 95 95 0 1 +"27023" 4 277 3 1 1 0 1 1 100 3450 195 293 2125 6 5 1 1142 3 50 98 1 1 +"27024" 4 277 3 1 1 0 1 1 100 3450 293 513 1540 2 8 0 720 2 75 220 0 1 +"27025" 4 277 3 1 1 0 1 1 100 3450 513 770 2851 8 9 0 1225 3 50 257 0 1 +"27026" 4 277 3 1 1 0 1 1 100 3450 770 1348 1948 3 4 0 885 2 75 578 0 1 +"27027" 4 277 3 1 1 0 1 1 100 3450 1348 2022 2759 5 7 0 938 3 50 674 0 1 +"27028" 4 277 3 1 1 0 1 1 100 3450 2022 3539 1683 7 4 1 441 2 75 1517 1 1 +"27029" 4 277 3 1 1 0 1 1 100 3450 3539 1769 1817 4 1 0 1152 3 50 1770 1 0 +"27030" 4 277 3 1 1 0 1 1 100 3450 1769 3450 2007 9 6 1 1045 1 95 1681 1 1 +"27031" 4 277 3 1 1 0 2 1 100 16 100 195 2858 8 1 1 2503 1 95 95 1 1 +"27032" 4 277 3 1 1 0 2 1 100 16 195 341 1677 6 2 1 579 2 75 146 1 1 +"27033" 4 277 3 1 1 0 2 1 100 16 341 85 1677 7 9 1 1090 2 75 256 0 0 +"27034" 4 277 3 1 1 0 2 1 100 16 85 166 1644 2 10 0 995 1 95 81 0 1 +"27035" 4 277 3 1 1 0 2 1 100 16 166 41 2209 5 3 0 1309 2 75 125 1 0 +"27036" 4 277 3 1 1 0 2 1 100 16 41 2 1860 4 2 0 961 1 95 39 1 0 +"27037" 4 277 3 1 1 0 2 1 100 16 2 4 2224 3 5 0 628 1 95 2 0 1 +"27038" 4 277 3 1 1 0 2 1 100 16 4 8 1676 9 4 1 529 1 95 4 1 1 +"27039" 4 277 3 1 1 0 2 1 100 16 8 16 1349 1 7 0 947 1 95 8 0 1 +"27040" 4 277 3 1 1 0 3 1 100 14 100 195 2266 7 5 1 955 1 95 95 1 1 +"27041" 4 277 3 1 1 0 3 1 100 14 195 10 1251 2 1 0 2164 1 95 185 1 0 +"27042" 4 277 3 1 1 0 3 1 100 14 10 20 1427 8 6 1 1101 1 95 10 1 1 +"27043" 4 277 3 1 1 0 3 1 100 14 20 35 1494 4 7 0 900 2 75 15 0 1 +"27044" 4 277 3 1 1 0 3 1 100 14 35 61 1559 3 10 0 1762 2 75 26 0 1 +"27045" 4 277 3 1 1 0 3 1 100 14 61 15 1860 6 8 1 761 2 75 46 0 0 +"27046" 4 277 3 1 1 0 3 1 100 14 15 29 1514 9 2 1 1048 1 95 14 1 1 +"27047" 4 277 3 1 1 0 3 1 100 14 29 7 1451 5 3 0 916 2 75 22 1 0 +"27048" 4 277 3 1 1 0 3 1 100 14 7 14 1118 1 10 0 1422 1 95 7 0 1 +"27049" 4 277 3 1 1 0 4 1 100 107 100 195 1487 2 3 0 1142 1 95 95 0 1 +"27050" 4 277 3 1 1 0 4 1 100 107 195 341 2236 7 4 1 744 2 75 146 1 1 +"27051" 4 277 3 1 1 0 4 1 100 107 341 17 1274 3 1 0 1178 1 95 324 1 0 +"27052" 4 277 3 1 1 0 4 1 100 107 17 33 2767 1 9 0 881 1 95 16 0 1 +"27053" 4 277 3 1 1 0 4 1 100 107 33 64 1308 8 7 1 540 1 95 31 1 1 +"27054" 4 277 3 1 1 0 4 1 100 107 64 125 3063 9 2 1 707 1 95 61 1 1 +"27055" 4 277 3 1 1 0 4 1 100 107 125 244 1406 5 6 0 633 1 95 119 0 1 +"27056" 4 277 3 1 1 0 4 1 100 107 244 61 5360 6 8 1 1087 2 75 183 0 0 +"27057" 4 277 3 1 1 0 4 1 100 107 61 107 1481 4 7 0 528 2 75 46 0 1 +"27058" 4 277 4 0 1 1 1 1 100 115 100 150 7750 8 3 1 483 3 50 50 1 1 +"27059" 4 277 4 0 1 1 1 1 100 115 150 263 5618 3 7 0 1013 4 75 113 0 1 +"27060" 4 277 4 0 1 1 1 1 100 115 263 460 1274 8 2 1 1275 4 75 197 1 1 +"27061" 4 277 4 0 1 1 1 1 100 115 460 115 1340 2 1 0 1290 4 75 345 1 0 +"27062" 4 277 5 1 1 1 1 1 100 131 100 195 5538 9 7 1 649 5 95 95 1 1 +"27063" 4 277 5 1 1 1 1 1 100 131 195 244 1543 4 8 0 1068 2 25 49 0 1 +"27064" 4 277 5 1 1 1 1 1 100 131 244 427 1414 8 2 1 2193 4 75 183 1 1 +"27065" 4 277 5 1 1 1 1 1 100 131 427 107 1518 2 1 0 1164 4 75 320 1 0 +"27066" 4 277 5 1 1 1 1 1 100 131 107 80 2202 7 6 0 1991 2 25 27 1 0 +"27067" 4 277 5 1 1 1 1 1 100 131 80 100 2477 5 3 1 2821 2 25 20 1 1 +"27068" 4 277 5 1 1 1 1 1 100 131 100 175 1708 3 6 0 1097 4 75 75 0 1 +"27069" 4 277 5 1 1 1 1 1 100 131 175 87 2010 6 9 1 1064 3 50 88 0 0 +"27070" 4 277 5 1 1 1 1 1 100 131 87 131 1514 1 4 0 440 3 50 44 0 1 +"27071" 4 277 5 1 1 1 2 1 100 1646 100 195 1472 2 9 0 536 5 95 95 0 1 +"27072" 4 277 5 1 1 1 2 1 100 1646 195 146 1964 4 10 1 1780 2 25 49 0 0 +"27073" 4 277 5 1 1 1 2 1 100 1646 146 219 3012 3 1 1 371 3 50 73 1 1 +"27074" 4 277 5 1 1 1 2 1 100 1646 219 329 1574 8 6 1 465 3 50 110 1 1 +"27075" 4 277 5 1 1 1 2 1 100 1646 329 494 2048 5 7 0 501 3 50 165 0 1 +"27076" 4 277 5 1 1 1 2 1 100 1646 494 247 2138 6 8 1 979 3 50 247 0 0 +"27077" 4 277 5 1 1 1 2 1 100 1646 247 482 2281 7 5 1 1179 5 95 235 1 1 +"27078" 4 277 5 1 1 1 2 1 100 1646 482 844 1536 1 10 0 1764 4 75 362 0 1 +"27079" 4 277 5 1 1 1 2 1 100 1646 844 1646 1885 9 3 1 1498 5 95 802 1 1 +"27080" 4 277 5 1 1 1 3 1 100 20 100 150 1520 3 5 0 2494 3 50 50 0 1 +"27081" 4 277 5 1 1 1 3 1 100 20 150 37 1796 8 9 1 765 4 75 113 0 0 +"27082" 4 277 5 1 1 1 3 1 100 20 37 56 1718 2 4 0 1082 3 50 19 0 1 +"27083" 4 277 5 1 1 1 3 1 100 20 56 28 3372 6 3 0 481 3 50 28 1 0 +"27084" 4 277 5 1 1 1 3 1 100 20 28 42 1808 7 6 1 375 3 50 14 1 1 +"27085" 4 277 5 1 1 1 3 1 100 20 42 10 1546 4 2 0 1711 4 75 32 1 0 +"27086" 4 277 5 1 1 1 3 1 100 20 10 20 1627 1 8 0 892 5 95 10 0 1 +"27087" 4 277 5 1 1 1 3 1 100 20 20 10 2062 5 7 1 1282 3 50 10 0 0 +"27088" 4 277 5 1 1 1 3 1 100 20 10 20 1738 9 5 1 693 5 95 10 1 1 +"27089" 4 277 5 1 1 1 4 1 100 256 100 175 2163 8 7 1 770 4 75 75 1 1 +"27090" 4 277 5 1 1 1 4 1 100 256 175 219 1931 3 10 0 1573 2 25 44 0 1 +"27091" 4 277 5 1 1 1 4 1 100 256 219 55 1663 7 9 1 862 4 75 164 0 0 +"27092" 4 277 5 1 1 1 4 1 100 256 55 96 1498 9 1 1 929 4 75 41 1 1 +"27093" 4 277 5 1 1 1 4 1 100 256 96 187 1938 2 3 0 876 5 95 91 0 1 +"27094" 4 277 5 1 1 1 4 1 100 256 187 365 1672 1 8 0 1990 5 95 178 0 1 +"27095" 4 277 5 1 1 1 4 1 100 256 365 456 2229 5 4 1 920 2 25 91 1 1 +"27096" 4 277 5 1 1 1 4 1 100 256 456 342 2426 4 2 0 1356 2 25 114 1 0 +"27097" 4 277 5 1 1 1 4 1 100 256 342 256 2233 6 3 0 1573 2 25 86 1 0 +"27098" 4 281 2 0 1 1 1 1 100 179 100 75 9889 8 3 0 1376 2 25 25 1 0 +"27099" 4 281 2 0 1 1 1 1 100 179 75 113 5531 3 7 0 794 3 50 38 0 1 +"27100" 4 281 2 0 1 1 1 1 100 179 113 119 2914 8 2 1 1148 1 5 6 1 1 +"27101" 4 281 2 0 1 1 1 1 100 179 119 179 2035 2 1 1 1848 3 50 60 1 1 +"27102" 4 281 3 1 1 1 1 0 100 1 100 175 4458 9 7 1 1379 4 75 75 1 1 +"27103" 4 281 3 1 1 1 1 0 100 1 175 131 2943 4 8 1 1747 2 25 44 0 0 +"27104" 4 281 3 1 1 1 1 0 100 1 131 98 8466 8 2 0 421 2 25 33 1 0 +"27105" 4 281 3 1 1 1 1 0 100 1 98 123 2982 2 1 1 4141 2 25 25 1 1 +"27106" 4 281 3 1 1 1 1 0 100 1 123 61 3246 7 6 0 1441 3 50 62 1 0 +"27107" 4 281 3 1 1 1 1 0 100 1 61 3 1234 5 3 0 1037 5 95 58 1 0 +"27108" 4 281 3 1 1 1 1 0 100 1 3 5 3049 3 6 0 2502 4 75 2 0 1 +"27109" 4 281 3 1 1 1 1 0 100 1 5 1 2276 6 9 1 2053 4 75 4 0 0 +"27110" 4 281 3 1 1 1 2 1 100 1063 100 195 6773 2 9 0 817 5 95 95 0 1 +"27111" 4 281 3 1 1 1 2 1 100 1063 195 293 2343 4 10 0 389 3 50 98 0 1 +"27112" 4 281 3 1 1 1 2 1 100 1063 293 366 2944 3 1 1 1658 2 25 73 1 1 +"27113" 4 281 3 1 1 1 2 1 100 1063 366 458 3264 8 6 1 4483 2 25 92 1 1 +"27114" 4 281 3 1 1 1 2 1 100 1063 458 573 2771 5 7 0 837 2 25 115 0 1 +"27115" 4 281 3 1 1 1 2 1 100 1063 573 716 1795 6 8 0 387 2 25 143 0 1 +"27116" 4 281 3 1 1 1 2 1 100 1063 716 895 1357 7 5 1 403 2 25 179 1 1 +"27117" 4 281 3 1 1 1 2 1 100 1063 895 850 3956 1 10 1 701 1 5 45 0 0 +"27118" 4 281 3 1 1 1 2 1 100 1063 850 1063 1953 9 3 1 4978 2 25 213 1 1 +"27119" 4 281 3 1 1 1 3 1 100 1002 100 175 2667 3 5 0 1498 4 75 75 0 1 +"27120" 4 281 3 1 1 1 3 1 100 1002 175 219 3887 8 9 0 421 2 25 44 0 1 +"27121" 4 281 3 1 1 1 3 1 100 1002 219 329 1141 2 4 0 293 3 50 110 0 1 +"27122" 4 281 3 1 1 1 3 1 100 1002 329 494 1812 6 3 1 308 3 50 165 1 1 +"27123" 4 281 3 1 1 1 3 1 100 1002 494 469 2298 7 6 0 731 1 5 25 1 0 +"27124" 4 281 3 1 1 1 3 1 100 1002 469 352 1269 4 2 0 974 2 25 117 1 0 +"27125" 4 281 3 1 1 1 3 1 100 1002 352 686 1469 1 8 0 1125 5 95 334 0 1 +"27126" 4 281 3 1 1 1 3 1 100 1002 686 514 3052 5 7 1 296 2 25 172 0 0 +"27127" 4 281 3 1 1 1 3 1 100 1002 514 1002 1679 9 5 1 0 5 95 488 1 1 +"27128" 4 281 3 1 1 1 4 1 100 437 100 150 2418 8 7 1 781 3 50 50 1 1 +"27129" 4 281 3 1 1 1 4 1 100 437 150 263 941 3 10 0 430 4 75 113 0 1 +"27130" 4 281 3 1 1 1 4 1 100 437 263 66 1007 7 9 1 1637 4 75 197 0 0 +"27131" 4 281 3 1 1 1 4 1 100 437 66 129 1973 9 1 1 1125 5 95 63 1 1 +"27132" 4 281 3 1 1 1 4 1 100 437 129 252 1068 2 3 0 1895 5 95 123 0 1 +"27133" 4 281 3 1 1 1 4 1 100 437 252 239 3290 1 8 1 659 1 5 13 0 0 +"27134" 4 281 3 1 1 1 4 1 100 437 239 466 1649 5 4 1 1449 5 95 227 1 1 +"27135" 4 281 3 1 1 1 4 1 100 437 466 583 2449 4 2 1 426 2 25 117 1 1 +"27136" 4 281 3 1 1 1 4 1 100 437 583 437 2307 6 3 0 2747 2 25 146 1 0 +"27137" 4 281 4 0 1 0 1 1 100 18 100 195 5891 2 7 0 3928 1 95 95 0 1 +"27138" 4 281 4 0 1 0 1 1 100 18 195 380 2387 3 7 0 791 1 95 185 0 1 +"27139" 4 281 4 0 1 0 1 1 100 18 380 361 2966 2 8 1 1687 5 5 19 0 0 +"27140" 4 281 4 0 1 0 1 1 100 18 361 18 1124 8 9 1 324 1 95 343 0 0 +"27141" 4 281 5 1 1 0 1 0 100 0 100 195 2438 1 3 0 352 1 95 95 0 1 +"27142" 4 281 5 1 1 0 1 0 100 0 195 380 1220 6 5 1 287 1 95 185 1 1 +"27143" 4 281 5 1 1 0 1 0 100 0 380 741 1182 2 8 0 476 1 95 361 0 1 +"27144" 4 281 5 1 1 0 1 0 100 0 741 37 994 8 9 1 1137 1 95 704 0 0 +"27145" 4 281 5 1 1 0 1 0 100 0 37 72 3096 3 4 0 275 1 95 35 0 1 +"27146" 4 281 5 1 1 0 1 0 100 0 72 4 3123 5 7 1 356 1 95 68 0 0 +"27147" 4 281 5 1 1 0 1 0 100 0 4 8 1928 7 4 1 218 1 95 4 1 1 +"27148" 4 281 5 1 1 0 1 0 100 0 8 0 932 4 1 0 317 1 95 8 1 0 +"27149" 4 281 5 1 1 0 2 1 100 7729 100 195 2420 8 1 1 350 1 95 95 1 1 +"27150" 4 281 5 1 1 0 2 1 100 7729 195 380 1499 6 2 1 753 1 95 185 1 1 +"27151" 4 281 5 1 1 0 2 1 100 7729 380 741 2187 7 9 0 225 1 95 361 0 1 +"27152" 4 281 5 1 1 0 2 1 100 7729 741 1112 2683 2 10 0 905 3 50 371 0 1 +"27153" 4 281 5 1 1 0 2 1 100 7729 1112 2168 2852 5 3 1 482 1 95 1056 1 1 +"27154" 4 281 5 1 1 0 2 1 100 7729 2168 1626 3424 4 2 0 2731 4 25 542 1 0 +"27155" 4 281 5 1 1 0 2 1 100 7729 1626 3171 2262 3 5 0 528 1 95 1545 0 1 +"27156" 4 281 5 1 1 0 2 1 100 7729 3171 6183 1604 9 4 1 461 1 95 3012 1 1 +"27157" 4 281 5 1 1 0 2 1 100 7729 6183 7729 2148 1 7 0 4196 4 25 1546 0 1 +"27158" 4 281 5 1 1 0 3 0 100 0 100 195 1479 7 5 1 382 1 95 95 1 1 +"27159" 4 281 5 1 1 0 3 0 100 0 195 10 1018 2 1 0 302 1 95 185 1 0 +"27160" 4 281 5 1 1 0 3 0 100 0 10 0 1085 8 6 0 887 1 95 10 1 0 +"27161" 4 281 5 1 1 0 4 1 100 2030 100 195 2196 2 3 0 408 1 95 95 0 1 +"27162" 4 281 5 1 1 0 4 1 100 2030 195 293 1178 7 4 1 208 3 50 98 1 1 +"27163" 4 281 5 1 1 0 4 1 100 2030 293 366 2835 3 1 1 3834 4 25 73 1 1 +"27164" 4 281 5 1 1 0 4 1 100 2030 366 714 1273 1 9 0 354 1 95 348 0 1 +"27165" 4 281 5 1 1 0 4 1 100 2030 714 1392 1254 8 7 1 304 1 95 678 1 1 +"27166" 4 281 5 1 1 0 4 1 100 2030 1392 1322 2894 9 2 0 971 5 5 70 1 0 +"27167" 4 281 5 1 1 0 4 1 100 2030 1322 1388 1354 5 6 0 416 5 5 66 0 1 +"27168" 4 281 5 1 1 0 4 1 100 2030 1388 1041 2801 6 8 1 1978 4 25 347 0 0 +"27169" 4 281 5 1 1 0 4 1 100 2030 1041 2030 1905 4 7 0 276 1 95 989 0 1 +"27170" 4 283 2 0 1 1 1 1 100 69 100 125 8221 8 3 1 2035 2 25 25 1 1 +"27171" 4 283 2 0 1 1 1 1 100 69 125 131 3864 3 7 0 4621 1 5 6 0 1 +"27172" 4 283 2 0 1 1 1 1 100 69 131 138 1206 8 2 1 3202 1 5 7 1 1 +"27173" 4 283 2 0 1 1 1 1 100 69 138 69 1231 2 1 0 1147 3 50 69 1 0 +"27174" 4 283 3 1 1 1 1 1 100 324 100 125 2799 9 7 1 1792 2 25 25 1 1 +"27175" 4 283 3 1 1 1 1 1 100 324 125 156 9040 4 8 0 2351 2 25 31 0 1 +"27176" 4 283 3 1 1 1 1 1 100 324 156 234 1892 8 2 1 24 3 50 78 1 1 +"27177" 4 283 3 1 1 1 1 1 100 324 234 175 1228 2 1 0 1647 2 25 59 1 0 +"27178" 4 283 3 1 1 1 1 1 100 324 175 219 1198 7 6 1 1203 2 25 44 1 1 +"27179" 4 283 3 1 1 1 1 1 100 324 219 230 1463 5 3 1 2695 1 5 11 1 1 +"27180" 4 283 3 1 1 1 1 1 100 324 230 288 932 3 6 0 3429 2 25 58 0 1 +"27181" 4 283 3 1 1 1 1 1 100 324 288 216 1111 6 9 1 792 2 25 72 0 0 +"27182" 4 283 3 1 1 1 1 1 100 324 216 324 1191 1 4 0 1621 3 50 108 0 1 +"27183" 4 283 3 1 1 1 2 1 100 656 100 150 1830 2 9 0 3357 3 50 50 0 1 +"27184" 4 283 3 1 1 1 2 1 100 656 150 263 1290 4 10 0 1059 4 75 113 0 1 +"27185" 4 283 3 1 1 1 2 1 100 656 263 197 1477 3 1 0 1565 2 25 66 1 0 +"27186" 4 283 3 1 1 1 2 1 100 656 197 296 1031 8 6 1 1557 3 50 99 1 1 +"27187" 4 283 3 1 1 1 2 1 100 656 296 311 2696 5 7 0 1349 1 5 15 0 1 +"27188" 4 283 3 1 1 1 2 1 100 656 311 233 1386 6 8 1 2877 2 25 78 0 0 +"27189" 4 283 3 1 1 1 2 1 100 656 233 291 1177 7 5 1 2727 2 25 58 1 1 +"27190" 4 283 3 1 1 1 2 1 100 656 291 437 1690 1 10 0 880 3 50 146 0 1 +"27191" 4 283 3 1 1 1 2 1 100 656 437 656 1315 9 3 1 1067 3 50 219 1 1 +"27192" 4 283 3 1 1 1 3 1 100 164 100 175 1696 3 5 0 2933 4 75 75 0 1 +"27193" 4 283 3 1 1 1 3 1 100 164 175 87 951 8 9 1 3175 3 50 88 0 0 +"27194" 4 283 3 1 1 1 3 1 100 164 87 152 1053 2 4 0 1961 4 75 65 0 1 +"27195" 4 283 3 1 1 1 3 1 100 164 152 190 809 6 3 1 4904 2 25 38 1 1 +"27196" 4 283 3 1 1 1 3 1 100 164 190 200 1100 7 6 1 2604 1 5 10 1 1 +"27197" 4 283 3 1 1 1 3 1 100 164 200 100 1085 4 2 0 1055 3 50 100 1 0 +"27198" 4 283 3 1 1 1 3 1 100 164 100 175 1755 1 8 0 2621 4 75 75 0 1 +"27199" 4 283 3 1 1 1 3 1 100 164 175 131 3813 5 7 1 661 2 25 44 0 0 +"27200" 4 283 3 1 1 1 3 1 100 164 131 164 1115 9 5 1 1988 2 25 33 1 1 +"27201" 4 283 3 1 1 1 4 1 100 996 100 175 1370 8 7 1 862 4 75 75 1 1 +"27202" 4 283 3 1 1 1 4 1 100 996 175 263 1218 3 10 0 694 3 50 88 0 1 +"27203" 4 283 3 1 1 1 4 1 100 996 263 197 914 7 9 1 3909 2 25 66 0 0 +"27204" 4 283 3 1 1 1 4 1 100 996 197 296 1328 9 1 1 463 3 50 99 1 1 +"27205" 4 283 3 1 1 1 4 1 100 996 296 444 854 2 3 0 1595 3 50 148 0 1 +"27206" 4 283 3 1 1 1 4 1 100 996 444 666 876 1 8 0 2241 3 50 222 0 1 +"27207" 4 283 3 1 1 1 4 1 100 996 666 999 2891 5 4 1 1008 3 50 333 1 1 +"27208" 4 283 3 1 1 1 4 1 100 996 999 949 1061 4 2 0 1040 1 5 50 1 0 +"27209" 4 283 3 1 1 1 4 1 100 996 949 996 915 6 3 1 1509 1 5 47 1 1 +"27210" 4 283 4 0 1 0 1 1 100 37 100 195 1114 2 7 0 915 1 95 95 0 1 +"27211" 4 283 4 0 1 0 1 1 100 37 195 380 845 3 7 0 1186 1 95 185 0 1 +"27212" 4 283 4 0 1 0 1 1 100 37 380 741 911 2 8 0 2146 1 95 361 0 1 +"27213" 4 283 4 0 1 0 1 1 100 37 741 37 1065 8 9 1 1180 1 95 704 0 0 +"27214" 4 283 5 1 1 0 1 0 100 0 100 175 3946 1 3 0 812 2 75 75 0 1 +"27215" 4 283 5 1 1 0 1 0 100 0 175 341 1578 6 5 1 1861 1 95 166 1 1 +"27216" 4 283 5 1 1 0 1 0 100 0 341 665 794 2 8 0 722 1 95 324 0 1 +"27217" 4 283 5 1 1 0 1 0 100 0 665 33 1100 8 9 1 4085 1 95 632 0 0 +"27218" 4 283 5 1 1 0 1 0 100 0 33 64 744 3 4 0 1418 1 95 31 0 1 +"27219" 4 283 5 1 1 0 1 0 100 0 64 3 948 5 7 1 3404 1 95 61 0 0 +"27220" 4 283 5 1 1 0 1 0 100 0 3 6 896 7 4 1 1123 1 95 3 1 1 +"27221" 4 283 5 1 1 0 1 0 100 0 6 0 1091 4 1 0 4237 1 95 6 1 0 +"27222" 4 283 5 1 1 0 2 0 100 0 100 195 3026 8 1 1 1336 1 95 95 1 1 +"27223" 4 283 5 1 1 0 2 0 100 0 195 380 901 6 2 1 1464 1 95 185 1 1 +"27224" 4 283 5 1 1 0 2 0 100 0 380 19 871 7 9 1 4955 1 95 361 0 0 +"27225" 4 283 5 1 1 0 2 0 100 0 19 37 988 2 10 0 1203 1 95 18 0 1 +"27226" 4 283 5 1 1 0 2 0 100 0 37 2 2239 5 3 0 1682 1 95 35 1 0 +"27227" 4 283 5 1 1 0 2 0 100 0 2 0 1175 4 2 0 1333 1 95 2 1 0 +"27228" 4 283 5 1 1 0 3 1 100 31 100 195 1208 7 5 1 628 1 95 95 1 1 +"27229" 4 283 5 1 1 0 3 1 100 31 195 10 947 2 1 0 3356 1 95 185 1 0 +"27230" 4 283 5 1 1 0 3 1 100 31 10 20 1242 8 6 1 3269 1 95 10 1 1 +"27231" 4 283 5 1 1 0 3 1 100 31 20 39 3342 4 7 0 2117 1 95 19 0 1 +"27232" 4 283 5 1 1 0 3 1 100 31 39 76 1373 3 10 0 1329 1 95 37 0 1 +"27233" 4 283 5 1 1 0 3 1 100 31 76 4 951 6 8 1 1351 1 95 72 0 0 +"27234" 4 283 5 1 1 0 3 1 100 31 4 8 948 9 2 1 2604 1 95 4 1 1 +"27235" 4 283 5 1 1 0 3 1 100 31 8 16 1517 5 3 1 1659 1 95 8 1 1 +"27236" 4 283 5 1 1 0 3 1 100 31 16 31 1201 1 10 0 2189 1 95 15 0 1 +"27237" 4 283 5 1 1 0 4 0 100 0 100 195 1165 2 3 0 1291 1 95 95 0 1 +"27238" 4 283 5 1 1 0 4 0 100 0 195 380 1691 7 4 1 2864 1 95 185 1 1 +"27239" 4 283 5 1 1 0 4 0 100 0 380 19 1589 3 1 0 2295 1 95 361 1 0 +"27240" 4 283 5 1 1 0 4 0 100 0 19 37 799 1 9 0 1910 1 95 18 0 1 +"27241" 4 283 5 1 1 0 4 0 100 0 37 72 865 8 7 1 2501 1 95 35 1 1 +"27242" 4 283 5 1 1 0 4 0 100 0 72 140 1187 9 2 1 1328 1 95 68 1 1 +"27243" 4 283 5 1 1 0 4 0 100 0 140 7 1246 5 6 1 728 1 95 133 0 0 +"27244" 4 283 5 1 1 0 4 0 100 0 7 0 1331 6 8 1 1129 1 95 7 0 0 +"27245" 4 285 2 0 1 0 1 1 100 230 100 150 7729 2 7 0 807 3 50 50 0 1 +"27246" 4 285 2 0 1 0 1 1 100 230 150 263 10565 3 7 0 3981 2 75 113 0 1 +"27247" 4 285 2 0 1 0 1 1 100 230 263 460 3028 2 8 0 746 2 75 197 0 1 +"27248" 4 285 2 0 1 0 1 1 100 230 460 230 2284 8 9 1 765 3 50 230 0 0 +"27249" 4 285 3 1 1 0 1 1 100 1887 100 150 4158 1 3 0 822 3 50 50 0 1 +"27250" 4 285 3 1 1 0 1 1 100 1887 150 263 4692 6 5 1 907 2 75 113 1 1 +"27251" 4 285 3 1 1 0 1 1 100 1887 263 460 2069 2 8 0 1184 2 75 197 0 1 +"27252" 4 285 3 1 1 0 1 1 100 1887 460 115 3197 8 9 1 771 2 75 345 0 0 +"27253" 4 285 3 1 1 0 1 1 100 1887 115 201 3531 3 4 0 369 2 75 86 0 1 +"27254" 4 285 3 1 1 0 1 1 100 1887 201 352 5004 5 7 0 228 2 75 151 0 1 +"27255" 4 285 3 1 1 0 1 1 100 1887 352 616 3566 7 4 1 635 2 75 264 1 1 +"27256" 4 285 3 1 1 0 1 1 100 1887 616 1078 4170 4 1 1 777 2 75 462 1 1 +"27257" 4 285 3 1 1 0 1 1 100 1887 1078 1887 5129 9 6 1 387 2 75 809 1 1 +"27258" 4 285 3 1 1 0 2 1 100 76 100 175 3594 8 1 1 757 2 75 75 1 1 +"27259" 4 285 3 1 1 0 2 1 100 76 175 306 3599 6 2 1 609 2 75 131 1 1 +"27260" 4 285 3 1 1 0 2 1 100 76 306 536 4529 7 9 0 447 2 75 230 0 1 +"27261" 4 285 3 1 1 0 2 1 100 76 536 804 2655 2 10 0 1039 3 50 268 0 1 +"27262" 4 285 3 1 1 0 2 1 100 76 804 40 5739 5 3 0 2207 1 95 764 1 0 +"27263" 4 285 3 1 1 0 2 1 100 76 40 10 2531 4 2 0 289 2 75 30 1 0 +"27264" 4 285 3 1 1 0 2 1 100 76 10 20 2152 3 5 0 3519 1 95 10 0 1 +"27265" 4 285 3 1 1 0 2 1 100 76 20 39 1781 9 4 1 2710 1 95 19 1 1 +"27266" 4 285 3 1 1 0 2 1 100 76 39 76 1906 1 7 0 605 1 95 37 0 1 +"27267" 4 285 3 1 1 0 3 1 100 31 100 195 1865 7 5 1 1166 1 95 95 1 1 +"27268" 4 285 3 1 1 0 3 1 100 31 195 10 1955 2 1 0 492 1 95 185 1 0 +"27269" 4 285 3 1 1 0 3 1 100 31 10 20 2346 8 6 1 616 1 95 10 1 1 +"27270" 4 285 3 1 1 0 3 1 100 31 20 39 1640 4 7 0 631 1 95 19 0 1 +"27271" 4 285 3 1 1 0 3 1 100 31 39 76 2502 3 10 0 581 1 95 37 0 1 +"27272" 4 285 3 1 1 0 3 1 100 31 76 4 1783 6 8 1 815 1 95 72 0 0 +"27273" 4 285 3 1 1 0 3 1 100 31 4 8 1830 9 2 1 892 1 95 4 1 1 +"27274" 4 285 3 1 1 0 3 1 100 31 8 16 1879 5 3 1 747 1 95 8 1 1 +"27275" 4 285 3 1 1 0 3 1 100 31 16 31 1661 1 10 0 2003 1 95 15 0 1 +"27276" 4 285 3 1 1 0 4 1 100 2332 100 195 1935 2 3 0 800 1 95 95 0 1 +"27277" 4 285 3 1 1 0 4 1 100 2332 195 49 3048 7 4 0 661 2 75 146 1 0 +"27278" 4 285 3 1 1 0 4 1 100 2332 49 96 2290 3 1 1 1579 1 95 47 1 1 +"27279" 4 285 3 1 1 0 4 1 100 2332 96 187 2205 1 9 0 1266 1 95 91 0 1 +"27280" 4 285 3 1 1 0 4 1 100 2332 187 327 2635 8 7 1 787 2 75 140 1 1 +"27281" 4 285 3 1 1 0 4 1 100 2332 327 638 4027 9 2 1 833 1 95 311 1 1 +"27282" 4 285 3 1 1 0 4 1 100 2332 638 957 2703 5 6 0 572 3 50 319 0 1 +"27283" 4 285 3 1 1 0 4 1 100 2332 957 1196 5550 6 8 0 948 4 25 239 0 1 +"27284" 4 285 3 1 1 0 4 1 100 2332 1196 2332 4060 4 7 0 1000 1 95 1136 0 1 +"27285" 4 285 4 0 1 1 1 1 100 296 100 150 4677 8 3 1 3581 3 50 50 1 1 +"27286" 4 285 4 0 1 1 1 1 100 296 150 263 4648 3 7 0 352 4 75 113 0 1 +"27287" 4 285 4 0 1 1 1 1 100 296 263 395 2740 8 2 1 888 3 50 132 1 1 +"27288" 4 285 4 0 1 1 1 1 100 296 395 296 2195 2 1 0 2352 2 25 99 1 0 +"27289" 4 285 5 1 1 1 1 1 100 98 100 195 3839 9 7 1 270 5 95 95 1 1 +"27290" 4 285 5 1 1 1 1 1 100 98 195 293 2646 4 8 0 638 3 50 98 0 1 +"27291" 4 285 5 1 1 1 1 1 100 98 293 513 1876 8 2 1 1625 4 75 220 1 1 +"27292" 4 285 5 1 1 1 1 1 100 98 513 26 2017 2 1 0 431 5 95 487 1 0 +"27293" 4 285 5 1 1 1 1 1 100 98 26 39 1696 7 6 1 1029 3 50 13 1 1 +"27294" 4 285 5 1 1 1 1 1 100 98 39 19 1658 5 3 0 280 3 50 20 1 0 +"27295" 4 285 5 1 1 1 1 1 100 98 19 33 1368 3 6 0 309 4 75 14 0 1 +"27296" 4 285 5 1 1 1 1 1 100 98 33 50 2251 6 9 0 1358 3 50 17 0 1 +"27297" 4 285 5 1 1 1 1 1 100 98 50 98 2163 1 4 0 690 5 95 48 0 1 +"27298" 4 285 5 1 1 1 2 1 100 837 100 195 1868 2 9 0 506 5 95 95 0 1 +"27299" 4 285 5 1 1 1 2 1 100 837 195 244 2310 4 10 0 414 2 25 49 0 1 +"27300" 4 285 5 1 1 1 2 1 100 837 244 256 2144 3 1 1 959 1 5 12 1 1 +"27301" 4 285 5 1 1 1 2 1 100 837 256 384 2062 8 6 1 834 3 50 128 1 1 +"27302" 4 285 5 1 1 1 2 1 100 837 384 365 2240 5 7 1 1219 1 5 19 0 0 +"27303" 4 285 5 1 1 1 2 1 100 837 365 182 1558 6 8 1 266 3 50 183 0 0 +"27304" 4 285 5 1 1 1 2 1 100 837 182 319 1571 7 5 1 454 4 75 137 1 1 +"27305" 4 285 5 1 1 1 2 1 100 837 319 558 2301 1 10 0 638 4 75 239 0 1 +"27306" 4 285 5 1 1 1 2 1 100 837 558 837 2060 9 3 1 320 3 50 279 1 1 +"27307" 4 285 5 1 1 1 3 1 100 32 100 175 1903 3 5 0 907 4 75 75 0 1 +"27308" 4 285 5 1 1 1 3 1 100 32 175 87 2308 8 9 1 1125 3 50 88 0 0 +"27309" 4 285 5 1 1 1 3 1 100 32 87 152 1476 2 4 0 212 4 75 65 0 1 +"27310" 4 285 5 1 1 1 3 1 100 32 152 38 2687 6 3 0 413 4 75 114 1 0 +"27311" 4 285 5 1 1 1 3 1 100 32 38 28 1487 7 6 0 326 2 25 10 1 0 +"27312" 4 285 5 1 1 1 3 1 100 32 28 21 1971 4 2 0 996 2 25 7 1 0 +"27313" 4 285 5 1 1 1 3 1 100 32 21 32 1729 1 8 0 661 3 50 11 0 1 +"27314" 4 285 5 1 1 1 3 1 100 32 32 30 2795 5 7 1 3092 1 5 2 0 0 +"27315" 4 285 5 1 1 1 3 1 100 32 30 32 1173 9 5 1 1411 1 5 2 1 1 +"27316" 4 285 5 1 1 1 4 1 100 848 100 175 8501 8 7 1 1694 4 75 75 1 1 +"27317" 4 285 5 1 1 1 4 1 100 848 175 263 2203 3 10 0 769 3 50 88 0 1 +"27318" 4 285 5 1 1 1 4 1 100 848 263 276 2873 7 9 0 4110 1 5 13 0 1 +"27319" 4 285 5 1 1 1 4 1 100 848 276 483 1840 9 1 1 846 4 75 207 1 1 +"27320" 4 285 5 1 1 1 4 1 100 848 483 459 3491 2 3 1 499 1 5 24 0 0 +"27321" 4 285 5 1 1 1 4 1 100 848 459 895 1368 1 8 0 461 5 95 436 0 1 +"27322" 4 285 5 1 1 1 4 1 100 848 895 940 3252 5 4 1 1863 1 5 45 1 1 +"27323" 4 285 5 1 1 1 4 1 100 848 940 893 2228 4 2 0 1015 1 5 47 1 0 +"27324" 4 285 5 1 1 1 4 1 100 848 893 848 2172 6 3 0 459 1 5 45 1 0 +"27325" 4 287 2 0 1 0 1 1 100 115 100 150 6128 2 7 0 1554 3 50 50 0 1 +"27326" 4 287 2 0 1 0 1 1 100 115 150 263 16557 3 7 0 3775 2 75 113 0 1 +"27327" 4 287 2 0 1 0 1 1 100 115 263 460 2353 2 8 0 1583 2 75 197 0 1 +"27328" 4 287 2 0 1 0 1 1 100 115 460 115 1640 8 9 1 1520 2 75 345 0 0 +"27329" 4 287 3 1 1 0 1 1 100 16 100 150 6485 1 3 0 854 3 50 50 0 1 +"27330" 4 287 3 1 1 0 1 1 100 16 150 263 1963 6 5 1 1891 2 75 113 1 1 +"27331" 4 287 3 1 1 0 1 1 100 16 263 460 2407 2 8 0 674 2 75 197 0 1 +"27332" 4 287 3 1 1 0 1 1 100 16 460 23 1863 8 9 1 1228 1 95 437 0 0 +"27333" 4 287 3 1 1 0 1 1 100 16 23 45 2038 3 4 0 2443 1 95 22 0 1 +"27334" 4 287 3 1 1 0 1 1 100 16 45 79 5340 5 7 0 612 2 75 34 0 1 +"27335" 4 287 3 1 1 0 1 1 100 16 79 154 1828 7 4 1 1250 1 95 75 1 1 +"27336" 4 287 3 1 1 0 1 1 100 16 154 8 1668 4 1 0 1347 1 95 146 1 0 +"27337" 4 287 3 1 1 0 1 1 100 16 8 16 1319 9 6 1 989 1 95 8 1 1 +"27338" 4 287 3 1 1 0 2 0 100 1 100 195 1646 8 1 1 1269 1 95 95 1 1 +"27339" 4 287 3 1 1 0 2 0 100 1 195 293 2542 6 2 1 1380 3 50 98 1 1 +"27340" 4 287 3 1 1 0 2 0 100 1 293 15 3135 7 9 1 1495 1 95 278 0 0 +"27341" 4 287 3 1 1 0 2 0 100 1 15 29 1269 2 10 0 334 1 95 14 0 1 +"27342" 4 287 3 1 1 0 2 0 100 1 29 1 1659 5 3 0 458 1 95 28 1 0 +"27343" 4 287 3 1 1 0 3 0 100 0 100 195 1689 7 5 1 494 1 95 95 1 1 +"27344" 4 287 3 1 1 0 3 0 100 0 195 10 1460 2 1 0 1172 1 95 185 1 0 +"27345" 4 287 3 1 1 0 3 0 100 0 10 20 2658 8 6 1 329 1 95 10 1 1 +"27346" 4 287 3 1 1 0 3 0 100 0 20 39 1207 4 7 0 715 1 95 19 0 1 +"27347" 4 287 3 1 1 0 3 0 100 0 39 76 4054 3 10 0 298 1 95 37 0 1 +"27348" 4 287 3 1 1 0 3 0 100 0 76 4 1705 6 8 1 670 1 95 72 0 0 +"27349" 4 287 3 1 1 0 3 0 100 0 4 0 1834 9 2 0 657 1 95 4 1 0 +"27350" 4 287 3 1 1 0 4 1 100 27 100 195 1330 2 3 0 904 1 95 95 0 1 +"27351" 4 287 3 1 1 0 4 1 100 27 195 380 1547 7 4 1 431 1 95 185 1 1 +"27352" 4 287 3 1 1 0 4 1 100 27 380 19 1536 3 1 0 380 1 95 361 1 0 +"27353" 4 287 3 1 1 0 4 1 100 27 19 37 1007 1 9 0 597 1 95 18 0 1 +"27354" 4 287 3 1 1 0 4 1 100 27 37 72 1190 8 7 1 307 1 95 35 1 1 +"27355" 4 287 3 1 1 0 4 1 100 27 72 140 2561 9 2 1 397 1 95 68 1 1 +"27356" 4 287 3 1 1 0 4 1 100 27 140 273 1991 5 6 0 638 1 95 133 0 1 +"27357" 4 287 3 1 1 0 4 1 100 27 273 14 1628 6 8 1 700 1 95 259 0 0 +"27358" 4 287 3 1 1 0 4 1 100 27 14 27 1088 4 7 0 370 1 95 13 0 1 +"27359" 4 287 4 0 1 1 1 1 100 115 100 150 4293 8 3 1 326 3 50 50 1 1 +"27360" 4 287 4 0 1 1 1 1 100 115 150 263 5515 3 7 0 2355 4 75 113 0 1 +"27361" 4 287 4 0 1 1 1 1 100 115 263 460 1672 8 2 1 1134 4 75 197 1 1 +"27362" 4 287 4 0 1 1 1 1 100 115 460 115 2166 2 1 0 1309 4 75 345 1 0 +"27363" 4 287 5 1 1 1 1 1 100 6 100 195 2919 9 7 1 658 5 95 95 1 1 +"27364" 4 287 5 1 1 1 1 1 100 6 195 293 1415 4 8 0 3186 3 50 98 0 1 +"27365" 4 287 5 1 1 1 1 1 100 6 293 513 1976 8 2 1 1096 4 75 220 1 1 +"27366" 4 287 5 1 1 1 1 1 100 6 513 26 1745 2 1 0 1732 5 95 487 1 0 +"27367" 4 287 5 1 1 1 1 1 100 6 26 27 1950 7 6 1 343 1 5 1 1 1 +"27368" 4 287 5 1 1 1 1 1 100 6 27 26 1652 5 3 0 699 1 5 1 1 0 +"27369" 4 287 5 1 1 1 1 1 100 6 26 27 1463 3 6 0 2360 1 5 1 0 1 +"27370" 4 287 5 1 1 1 1 1 100 6 27 26 1445 6 9 1 541 1 5 1 0 0 +"27371" 4 287 5 1 1 1 1 1 100 6 26 6 1313 1 4 1 1048 4 75 20 0 0 +"27372" 4 287 5 1 1 1 2 1 100 2414 100 175 6118 2 9 0 1562 4 75 75 0 1 +"27373" 4 287 5 1 1 1 2 1 100 2414 175 341 5180 4 10 0 908 5 95 166 0 1 +"27374" 4 287 5 1 1 1 2 1 100 2414 341 426 1710 3 1 1 786 2 25 85 1 1 +"27375" 4 287 5 1 1 1 2 1 100 2414 426 746 1970 8 6 1 2275 4 75 320 1 1 +"27376" 4 287 5 1 1 1 2 1 100 2414 746 1306 2916 5 7 0 2759 4 75 560 0 1 +"27377" 4 287 5 1 1 1 2 1 100 2414 1306 1241 2160 6 8 1 1087 1 5 65 0 0 +"27378" 4 287 5 1 1 1 2 1 100 2414 1241 1179 1401 7 5 0 828 1 5 62 1 0 +"27379" 4 287 5 1 1 1 2 1 100 2414 1179 1238 970 1 10 0 417 1 5 59 0 1 +"27380" 4 287 5 1 1 1 2 1 100 2414 1238 2414 1691 9 3 1 1336 5 95 1176 1 1 +"27381" 4 287 5 1 1 1 3 1 100 1 100 105 1592 3 5 0 1570 1 5 5 0 1 +"27382" 4 287 5 1 1 1 3 1 100 1 105 100 1117 8 9 1 517 1 5 5 0 0 +"27383" 4 287 5 1 1 1 3 1 100 1 100 175 938 2 4 0 1403 4 75 75 0 1 +"27384" 4 287 5 1 1 1 3 1 100 1 175 131 1374 6 3 0 1554 2 25 44 1 0 +"27385" 4 287 5 1 1 1 3 1 100 1 131 255 3549 7 6 1 1635 5 95 124 1 1 +"27386" 4 287 5 1 1 1 3 1 100 1 255 13 930 4 2 0 1284 5 95 242 1 0 +"27387" 4 287 5 1 1 1 3 1 100 1 13 3 1444 1 8 1 974 4 75 10 0 0 +"27388" 4 287 5 1 1 1 3 1 100 1 3 5 1700 5 7 0 681 3 50 2 0 1 +"27389" 4 287 5 1 1 1 3 1 100 1 5 1 1462 9 5 0 1351 4 75 4 1 0 +"27390" 4 287 5 1 1 1 4 0 100 1 100 5 3769 8 7 0 2332 5 95 95 1 0 +"27391" 4 287 5 1 1 1 4 0 100 1 5 1 6771 3 10 1 718 4 75 4 0 0 +"27392" 4 288 2 0 1 0 1 1 100 140 100 150 8844 2 7 0 1165 3 50 50 0 1 +"27393" 4 288 2 0 1 0 1 1 100 140 150 225 14515 3 7 0 1662 3 50 75 0 1 +"27394" 4 288 2 0 1 0 1 1 100 140 225 281 2020 2 8 0 626 4 25 56 0 1 +"27395" 4 288 2 0 1 0 1 1 100 140 281 140 1513 8 9 1 1055 3 50 141 0 0 +"27396" 4 288 3 1 1 0 1 1 100 209 100 150 7978 1 3 0 972 3 50 50 0 1 +"27397" 4 288 3 1 1 0 1 1 100 209 150 75 1971 6 5 0 1196 3 50 75 1 0 +"27398" 4 288 3 1 1 0 1 1 100 209 75 113 1616 2 8 0 1457 3 50 38 0 1 +"27399" 4 288 3 1 1 0 1 1 100 209 113 56 1929 8 9 1 1219 3 50 57 0 0 +"27400" 4 288 3 1 1 0 1 1 100 209 56 98 1422 3 4 0 1104 2 75 42 0 1 +"27401" 4 288 3 1 1 0 1 1 100 209 98 123 2889 5 7 0 806 4 25 25 0 1 +"27402" 4 288 3 1 1 0 1 1 100 209 123 215 1596 7 4 1 1022 2 75 92 1 1 +"27403" 4 288 3 1 1 0 1 1 100 209 215 107 3387 4 1 0 1231 3 50 108 1 0 +"27404" 4 288 3 1 1 0 1 1 100 209 107 209 1698 9 6 1 1623 1 95 102 1 1 +"27405" 4 288 3 1 1 0 2 1 100 515 100 195 2558 8 1 1 1202 1 95 95 1 1 +"27406" 4 288 3 1 1 0 2 1 100 515 195 293 1630 6 2 1 1247 3 50 98 1 1 +"27407" 4 288 3 1 1 0 2 1 100 515 293 146 2073 7 9 1 1436 3 50 147 0 0 +"27408" 4 288 3 1 1 0 2 1 100 515 146 256 1477 2 10 0 1205 2 75 110 0 1 +"27409" 4 288 3 1 1 0 2 1 100 515 256 192 1921 5 3 0 1052 4 25 64 1 0 +"27410" 4 288 3 1 1 0 2 1 100 515 192 96 1631 4 2 0 1087 3 50 96 1 0 +"27411" 4 288 3 1 1 0 2 1 100 515 96 168 1281 3 5 0 938 2 75 72 0 1 +"27412" 4 288 3 1 1 0 2 1 100 515 168 294 1474 9 4 1 1002 2 75 126 1 1 +"27413" 4 288 3 1 1 0 2 1 100 515 294 515 1183 1 7 0 1173 2 75 221 0 1 +"27414" 4 288 3 1 1 0 3 1 100 481 100 175 1598 7 5 1 1216 2 75 75 1 1 +"27415" 4 288 3 1 1 0 3 1 100 481 175 44 1323 2 1 0 1122 2 75 131 1 0 +"27416" 4 288 3 1 1 0 3 1 100 481 44 86 1218 8 6 1 1653 1 95 42 1 1 +"27417" 4 288 3 1 1 0 3 1 100 481 86 151 1788 4 7 0 1453 2 75 65 0 1 +"27418" 4 288 3 1 1 0 3 1 100 481 151 227 1373 3 10 0 1649 3 50 76 0 1 +"27419" 4 288 3 1 1 0 3 1 100 481 227 113 1472 6 8 1 1264 3 50 114 0 0 +"27420" 4 288 3 1 1 0 3 1 100 481 113 220 1752 9 2 1 1363 1 95 107 1 1 +"27421" 4 288 3 1 1 0 3 1 100 481 220 275 1815 5 3 1 1151 4 25 55 1 1 +"27422" 4 288 3 1 1 0 3 1 100 481 275 481 1237 1 10 0 1304 2 75 206 0 1 +"27423" 4 288 3 1 1 0 4 1 100 951 100 175 1485 2 3 0 1108 2 75 75 0 1 +"27424" 4 288 3 1 1 0 4 1 100 951 175 306 1133 7 4 1 1709 2 75 131 1 1 +"27425" 4 288 3 1 1 0 4 1 100 951 306 76 1369 3 1 0 1269 2 75 230 1 0 +"27426" 4 288 3 1 1 0 4 1 100 951 76 148 1374 1 9 0 1306 1 95 72 0 1 +"27427" 4 288 3 1 1 0 4 1 100 951 148 289 1385 8 7 1 1933 1 95 141 1 1 +"27428" 4 288 3 1 1 0 4 1 100 951 289 564 1222 9 2 1 1081 1 95 275 1 1 +"27429" 4 288 3 1 1 0 4 1 100 951 564 846 1971 5 6 0 1115 3 50 282 0 1 +"27430" 4 288 3 1 1 0 4 1 100 951 846 634 1411 6 8 1 918 4 25 212 0 0 +"27431" 4 288 3 1 1 0 4 1 100 951 634 951 1312 4 7 0 815 3 50 317 0 1 +"27432" 4 288 4 0 1 1 1 1 100 223 100 150 4923 8 3 1 1487 3 50 50 1 1 +"27433" 4 288 4 0 1 1 1 1 100 223 150 188 3997 3 7 0 1789 2 25 38 0 1 +"27434" 4 288 4 0 1 1 1 1 100 223 188 235 1509 8 2 1 851 2 25 47 1 1 +"27435" 4 288 4 0 1 1 1 1 100 223 235 223 951 2 1 0 1568 1 5 12 1 0 +"27436" 4 288 5 1 1 1 1 1 100 112 100 150 1757 9 7 1 1079 3 50 50 1 1 +"27437" 4 288 5 1 1 1 1 1 100 112 150 158 1233 4 8 0 932 1 5 8 0 1 +"27438" 4 288 5 1 1 1 1 1 100 112 158 198 989 8 2 1 1644 2 25 40 1 1 +"27439" 4 288 5 1 1 1 1 1 100 112 198 99 1211 2 1 0 848 3 50 99 1 0 +"27440" 4 288 5 1 1 1 1 1 100 112 99 173 1285 7 6 1 469 4 75 74 1 1 +"27441" 4 288 5 1 1 1 1 1 100 112 173 86 1460 5 3 0 645 3 50 87 1 0 +"27442" 4 288 5 1 1 1 1 1 100 112 86 129 1243 3 6 0 1398 3 50 43 0 1 +"27443" 4 288 5 1 1 1 1 1 100 112 129 64 1217 6 9 1 853 3 50 65 0 0 +"27444" 4 288 5 1 1 1 1 1 100 112 64 112 914 1 4 0 1429 4 75 48 0 1 +"27445" 4 288 5 1 1 1 2 1 100 494 100 175 1580 2 9 0 991 4 75 75 0 1 +"27446" 4 288 5 1 1 1 2 1 100 494 175 219 1221 4 10 0 1496 2 25 44 0 1 +"27447" 4 288 5 1 1 1 2 1 100 494 219 164 1454 3 1 0 4694 2 25 55 1 0 +"27448" 4 288 5 1 1 1 2 1 100 494 164 246 1380 8 6 1 992 3 50 82 1 1 +"27449" 4 288 5 1 1 1 2 1 100 494 246 234 1710 5 7 1 850 1 5 12 0 0 +"27450" 4 288 5 1 1 1 2 1 100 494 234 175 1135 6 8 1 1759 2 25 59 0 0 +"27451" 4 288 5 1 1 1 2 1 100 494 175 219 1208 7 5 1 1485 2 25 44 1 1 +"27452" 4 288 5 1 1 1 2 1 100 494 219 329 1139 1 10 0 747 3 50 110 0 1 +"27453" 4 288 5 1 1 1 2 1 100 494 329 494 1076 9 3 1 967 3 50 165 1 1 +"27454" 4 288 5 1 1 1 3 1 100 328 100 125 1169 3 5 0 983 2 25 25 0 1 +"27455" 4 288 5 1 1 1 3 1 100 328 125 62 1125 8 9 1 1830 3 50 63 0 0 +"27456" 4 288 5 1 1 1 3 1 100 328 62 109 1107 2 4 0 541 4 75 47 0 1 +"27457" 4 288 5 1 1 1 3 1 100 328 109 164 1184 6 3 1 845 3 50 55 1 1 +"27458" 4 288 5 1 1 1 3 1 100 328 164 246 1106 7 6 1 1051 3 50 82 1 1 +"27459" 4 288 5 1 1 1 3 1 100 328 246 184 1287 4 2 0 1023 2 25 62 1 0 +"27460" 4 288 5 1 1 1 3 1 100 328 184 276 1139 1 8 0 563 3 50 92 0 1 +"27461" 4 288 5 1 1 1 3 1 100 328 276 262 1965 5 7 1 1445 1 5 14 0 0 +"27462" 4 288 5 1 1 1 3 1 100 328 262 328 1045 9 5 1 1417 2 25 66 1 1 +"27463" 4 288 5 1 1 1 4 1 100 339 100 150 1408 8 7 1 1115 3 50 50 1 1 +"27464" 4 288 5 1 1 1 4 1 100 339 150 225 3395 3 10 0 999 3 50 75 0 1 +"27465" 4 288 5 1 1 1 4 1 100 339 225 169 1069 7 9 1 1624 2 25 56 0 0 +"27466" 4 288 5 1 1 1 4 1 100 339 169 254 1128 9 1 1 1232 3 50 85 1 1 +"27467" 4 288 5 1 1 1 4 1 100 339 254 381 1355 2 3 0 1024 3 50 127 0 1 +"27468" 4 288 5 1 1 1 4 1 100 339 381 572 1340 1 8 0 1338 3 50 191 0 1 +"27469" 4 288 5 1 1 1 4 1 100 339 572 543 1552 5 4 0 1908 1 5 29 1 0 +"27470" 4 288 5 1 1 1 4 1 100 339 543 271 1406 4 2 0 1193 3 50 272 1 0 +"27471" 4 288 5 1 1 1 4 1 100 339 271 339 1157 6 3 1 1508 2 25 68 1 1 +"27472" 4 292 2 0 1 1 1 1 100 146 100 125 10871 8 3 1 2367 2 25 25 1 1 +"27473" 4 292 2 0 1 1 1 1 100 146 125 156 6667 3 7 0 4444 2 25 31 0 1 +"27474" 4 292 2 0 1 1 1 1 100 146 156 195 4926 8 2 1 1297 2 25 39 1 1 +"27475" 4 292 2 0 1 1 1 1 100 146 195 146 2088 2 1 0 2491 2 25 49 1 0 +"27476" 4 292 3 1 1 1 1 1 100 129 100 125 2019 9 7 1 1009 2 25 25 1 1 +"27477" 4 292 3 1 1 1 1 1 100 129 125 156 1622 4 8 0 2975 2 25 31 0 1 +"27478" 4 292 3 1 1 1 1 1 100 129 156 164 7062 8 2 1 3275 1 5 8 1 1 +"27479" 4 292 3 1 1 1 1 1 100 129 164 123 2396 2 1 0 1071 2 25 41 1 0 +"27480" 4 292 3 1 1 1 1 1 100 129 123 129 2786 7 6 1 4499 1 5 6 1 1 +"27481" 4 292 3 1 1 1 1 1 100 129 129 123 3370 5 3 0 2223 1 5 6 1 0 +"27482" 4 292 3 1 1 1 1 1 100 129 123 129 2987 3 6 0 2714 1 5 6 0 1 +"27483" 4 292 3 1 1 1 1 1 100 129 129 123 1830 6 9 1 1757 1 5 6 0 0 +"27484" 4 292 3 1 1 1 1 1 100 129 123 129 2582 1 4 0 2281 1 5 6 0 1 +"27485" 4 292 3 1 1 1 2 1 100 165 100 105 2030 2 9 0 2819 1 5 5 0 1 +"27486" 4 292 3 1 1 1 2 1 100 165 105 158 4650 4 10 0 1675 3 50 53 0 1 +"27487" 4 292 3 1 1 1 2 1 100 165 158 150 3416 3 1 0 2188 1 5 8 1 0 +"27488" 4 292 3 1 1 1 2 1 100 165 150 158 2745 8 6 1 2724 1 5 8 1 1 +"27489" 4 292 3 1 1 1 2 1 100 165 158 166 1135 5 7 0 3703 1 5 8 0 1 +"27490" 4 292 3 1 1 1 2 1 100 165 166 158 4988 6 8 1 2796 1 5 8 0 0 +"27491" 4 292 3 1 1 1 2 1 100 165 158 166 3148 7 5 1 1860 1 5 8 1 1 +"27492" 4 292 3 1 1 1 2 1 100 165 166 174 2001 1 10 0 1056 1 5 8 0 1 +"27493" 4 292 3 1 1 1 2 1 100 165 174 165 4179 9 3 0 1289 1 5 9 1 0 +"27494" 4 292 3 1 1 1 3 1 100 105 100 95 1378 3 5 1 1223 1 5 5 0 0 +"27495" 4 292 3 1 1 1 3 1 100 105 95 100 1906 8 9 0 748 1 5 5 0 1 +"27496" 4 292 3 1 1 1 3 1 100 105 100 95 1174 2 4 1 644 1 5 5 0 0 +"27497" 4 292 3 1 1 1 3 1 100 105 95 100 1642 6 3 1 594 1 5 5 1 1 +"27498" 4 292 3 1 1 1 3 1 100 105 100 105 1378 7 6 1 607 1 5 5 1 1 +"27499" 4 292 3 1 1 1 3 1 100 105 105 100 1063 4 2 0 682 1 5 5 1 0 +"27500" 4 292 3 1 1 1 3 1 100 105 100 95 823 1 8 1 677 1 5 5 0 0 +"27501" 4 292 3 1 1 1 3 1 100 105 95 100 1250 5 7 0 676 1 5 5 0 1 +"27502" 4 292 3 1 1 1 3 1 100 105 100 105 8153 9 5 1 571 1 5 5 1 1 +"27503" 4 292 3 1 1 1 4 1 100 93 100 105 1167 8 7 1 757 1 5 5 1 1 +"27504" 4 292 3 1 1 1 4 1 100 93 105 110 1405 3 10 0 728 1 5 5 0 1 +"27505" 4 292 3 1 1 1 4 1 100 93 110 104 1138 7 9 1 682 1 5 6 0 0 +"27506" 4 292 3 1 1 1 4 1 100 93 104 99 775 9 1 0 759 1 5 5 1 0 +"27507" 4 292 3 1 1 1 4 1 100 93 99 104 1240 2 3 0 603 1 5 5 0 1 +"27508" 4 292 3 1 1 1 4 1 100 93 104 99 1092 1 8 1 813 1 5 5 0 0 +"27509" 4 292 3 1 1 1 4 1 100 93 99 94 1036 5 4 0 840 1 5 5 1 0 +"27510" 4 292 3 1 1 1 4 1 100 93 94 89 1533 4 2 0 618 1 5 5 1 0 +"27511" 4 292 3 1 1 1 4 1 100 93 89 93 602 6 3 1 619 1 5 4 1 1 +"27512" 4 292 4 0 1 0 1 0 100 0 100 5 1005 2 7 1 674 1 95 95 0 0 +"27513" 4 292 4 0 1 0 1 0 100 0 5 10 1999 3 7 0 942 1 95 5 0 1 +"27514" 4 292 4 0 1 0 1 0 100 0 10 0 3062 2 8 1 2986 1 95 10 0 0 +"27515" 4 292 5 1 1 0 1 0 100 0 100 50 3822 1 3 1 4717 3 50 50 0 0 +"27516" 4 292 5 1 1 0 1 0 100 0 50 98 2530 6 5 1 2567 1 95 48 1 1 +"27517" 4 292 5 1 1 0 1 0 100 0 98 191 4115 2 8 0 692 1 95 93 0 1 +"27518" 4 292 5 1 1 0 1 0 100 0 191 10 1225 8 9 1 732 1 95 181 0 0 +"27519" 4 292 5 1 1 0 1 0 100 0 10 20 1454 3 4 0 830 1 95 10 0 1 +"27520" 4 292 5 1 1 0 1 0 100 0 20 5 2172 5 7 1 2931 2 75 15 0 0 +"27521" 4 292 5 1 1 0 1 0 100 0 5 10 2181 7 4 1 666 1 95 5 1 1 +"27522" 4 292 5 1 1 0 1 0 100 0 10 0 2048 4 1 0 772 1 95 10 1 0 +"27523" 4 292 5 1 1 0 2 1 100 4 100 150 1141 8 1 1 1485 3 50 50 1 1 +"27524" 4 292 5 1 1 0 2 1 100 4 150 225 1235 6 2 1 2886 3 50 75 1 1 +"27525" 4 292 5 1 1 0 2 1 100 4 225 11 35218 7 9 1 916 1 95 214 0 0 +"27526" 4 292 5 1 1 0 2 1 100 4 11 21 2358 2 10 0 1219 1 95 10 0 1 +"27527" 4 292 5 1 1 0 2 1 100 4 21 26 5567 5 3 1 2052 4 25 5 1 1 +"27528" 4 292 5 1 1 0 2 1 100 4 26 6 9000 4 2 0 1801 2 75 20 1 0 +"27529" 4 292 5 1 1 0 2 1 100 4 6 9 1008 3 5 0 1366 3 50 3 0 1 +"27530" 4 292 5 1 1 0 2 1 100 4 9 2 1554 9 4 0 1134 2 75 7 1 0 +"27531" 4 292 5 1 1 0 2 1 100 4 2 4 1909 1 7 0 592 2 75 2 0 1 +"27532" 4 292 5 1 1 0 3 0 100 0 100 105 2003 7 5 1 1243 5 5 5 1 1 +"27533" 4 292 5 1 1 0 3 0 100 0 105 5 2196 2 1 0 4529 1 95 100 1 0 +"27534" 4 292 5 1 1 0 3 0 100 0 5 10 2751 8 6 1 801 1 95 5 1 1 +"27535" 4 292 5 1 1 0 3 0 100 0 10 0 1433 4 7 1 626 1 95 10 0 0 +"27536" 4 292 5 1 1 0 4 0 100 0 100 5 1546 2 3 1 542 1 95 95 0 0 +"27537" 4 292 5 1 1 0 4 0 100 0 5 0 1689 7 4 0 942 1 95 5 1 0 +"27538" 4 298 2 0 1 1 1 1 100 93 100 75 17488 8 3 0 3822 2 25 25 1 0 +"27539" 4 298 2 0 1 1 1 1 100 93 75 94 7549 3 7 0 2732 2 25 19 0 1 +"27540" 4 298 2 0 1 1 1 1 100 93 94 89 4095 8 2 0 3306 1 5 5 1 0 +"27541" 4 298 2 0 1 1 1 1 100 93 89 93 4370 2 1 1 2176 1 5 4 1 1 +"27542" 4 298 3 1 1 1 1 1 100 181 100 125 5924 9 7 1 1143 2 25 25 1 1 +"27543" 4 298 3 1 1 1 1 1 100 181 125 119 5334 4 8 1 1485 1 5 6 0 0 +"27544" 4 298 3 1 1 1 1 1 100 181 119 149 4828 8 2 1 1052 2 25 30 1 1 +"27545" 4 298 3 1 1 1 1 1 100 181 149 156 6343 2 1 1 1607 1 5 7 1 1 +"27546" 4 298 3 1 1 1 1 1 100 181 156 164 4496 7 6 1 903 1 5 8 1 1 +"27547" 4 298 3 1 1 1 1 1 100 181 164 156 6922 5 3 0 1542 1 5 8 1 0 +"27548" 4 298 3 1 1 1 1 1 100 181 156 164 4066 3 6 0 1198 1 5 8 0 1 +"27549" 4 298 3 1 1 1 1 1 100 181 164 172 4602 6 9 0 757 1 5 8 0 1 +"27550" 4 298 3 1 1 1 1 1 100 181 172 181 14486 1 4 0 578 1 5 9 0 1 +"27551" 4 298 3 1 1 1 2 1 100 221 100 125 5104 2 9 0 1195 2 25 25 0 1 +"27552" 4 298 3 1 1 1 2 1 100 221 125 131 3095 4 10 0 1945 1 5 6 0 1 +"27553" 4 298 3 1 1 1 2 1 100 221 131 164 3050 3 1 1 974 2 25 33 1 1 +"27554" 4 298 3 1 1 1 2 1 100 221 164 172 4172 8 6 1 1332 1 5 8 1 1 +"27555" 4 298 3 1 1 1 2 1 100 221 172 181 3657 5 7 0 993 1 5 9 0 1 +"27556" 4 298 3 1 1 1 2 1 100 221 181 190 4966 6 8 0 1399 1 5 9 0 1 +"27557" 4 298 3 1 1 1 2 1 100 221 190 200 4090 7 5 1 789 1 5 10 1 1 +"27558" 4 298 3 1 1 1 2 1 100 221 200 210 5724 1 10 0 1001 1 5 10 0 1 +"27559" 4 298 3 1 1 1 2 1 100 221 210 221 4143 9 3 1 2247 1 5 11 1 1 +"27560" 4 298 3 1 1 1 3 1 100 365 100 125 4813 3 5 0 1749 2 25 25 0 1 +"27561" 4 298 3 1 1 1 3 1 100 365 125 119 1863 8 9 1 2092 1 5 6 0 0 +"27562" 4 298 3 1 1 1 3 1 100 365 119 149 3517 2 4 0 1316 2 25 30 0 1 +"27563" 4 298 3 1 1 1 3 1 100 365 149 186 3099 6 3 1 1261 2 25 37 1 1 +"27564" 4 298 3 1 1 1 3 1 100 365 186 195 2693 7 6 1 867 1 5 9 1 1 +"27565" 4 298 3 1 1 1 3 1 100 365 195 185 2567 4 2 0 1403 1 5 10 1 0 +"27566" 4 298 3 1 1 1 3 1 100 365 185 231 2085 1 8 0 976 2 25 46 0 1 +"27567" 4 298 3 1 1 1 3 1 100 365 231 243 3302 5 7 0 1102 1 5 12 0 1 +"27568" 4 298 3 1 1 1 3 1 100 365 243 365 2543 9 5 1 789 3 50 122 1 1 +"27569" 4 298 3 1 1 1 4 1 100 313 100 125 3382 8 7 1 435 2 25 25 1 1 +"27570" 4 298 3 1 1 1 4 1 100 313 125 156 1916 3 10 0 990 2 25 31 0 1 +"27571" 4 298 3 1 1 1 4 1 100 313 156 117 3892 7 9 1 1176 2 25 39 0 0 +"27572" 4 298 3 1 1 1 4 1 100 313 117 205 2821 9 1 1 2237 4 75 88 1 1 +"27573" 4 298 3 1 1 1 4 1 100 313 205 215 1783 2 3 0 936 1 5 10 0 1 +"27574" 4 298 3 1 1 1 4 1 100 313 215 419 1689 1 8 0 3183 5 95 204 0 1 +"27575" 4 298 3 1 1 1 4 1 100 313 419 398 2957 5 4 0 1877 1 5 21 1 0 +"27576" 4 298 3 1 1 1 4 1 100 313 398 298 2042 4 2 0 966 2 25 100 1 0 +"27577" 4 298 3 1 1 1 4 1 100 313 298 313 4805 6 3 1 4286 1 5 15 1 1 +"27578" 4 298 4 0 1 0 1 1 100 1 100 195 2071 2 7 0 2531 1 95 95 0 1 +"27579" 4 298 4 0 1 0 1 1 100 1 195 380 2392 3 7 0 1652 1 95 185 0 1 +"27580" 4 298 4 0 1 0 1 1 100 1 380 19 2050 2 8 1 1254 1 95 361 0 0 +"27581" 4 298 4 0 1 0 1 1 100 1 19 1 1984 8 9 1 1007 1 95 18 0 0 +"27582" 4 298 5 1 1 0 1 1 100 1201 100 195 1785 1 3 0 1530 1 95 95 0 1 +"27583" 4 298 5 1 1 0 1 1 100 1201 195 380 7302 6 5 1 920 1 95 185 1 1 +"27584" 4 298 5 1 1 0 1 1 100 1201 380 665 2450 2 8 0 1459 2 75 285 0 1 +"27585" 4 298 5 1 1 0 1 1 100 1201 665 166 2467 8 9 1 1235 2 75 499 0 0 +"27586" 4 298 5 1 1 0 1 1 100 1201 166 324 3542 3 4 0 2619 1 95 158 0 1 +"27587" 4 298 5 1 1 0 1 1 100 1201 324 632 1668 5 7 0 986 1 95 308 0 1 +"27588" 4 298 5 1 1 0 1 1 100 1201 632 1232 4097 7 4 1 1659 1 95 600 1 1 +"27589" 4 298 5 1 1 0 1 1 100 1201 1232 616 3394 4 1 0 1686 3 50 616 1 0 +"27590" 4 298 5 1 1 0 1 1 100 1201 616 1201 1851 9 6 1 1177 1 95 585 1 1 +"27591" 4 298 5 1 1 0 2 0 100 0 100 195 2085 8 1 1 1287 1 95 95 1 1 +"27592" 4 298 5 1 1 0 2 0 100 0 195 10 2160 6 2 0 1030 1 95 185 1 0 +"27593" 4 298 5 1 1 0 2 0 100 0 10 20 1682 7 9 0 2448 1 95 10 0 1 +"27594" 4 298 5 1 1 0 2 0 100 0 20 39 2417 2 10 0 1387 1 95 19 0 1 +"27595" 4 298 5 1 1 0 2 0 100 0 39 76 1887 5 3 1 1411 1 95 37 1 1 +"27596" 4 298 5 1 1 0 2 0 100 0 76 4 2941 4 2 0 1158 1 95 72 1 0 +"27597" 4 298 5 1 1 0 2 0 100 0 4 0 2068 3 5 1 983 1 95 4 0 0 +"27598" 4 298 5 1 1 0 3 0 100 0 100 195 2418 7 5 1 2836 1 95 95 1 1 +"27599" 4 298 5 1 1 0 3 0 100 0 195 10 2203 2 1 0 1040 1 95 185 1 0 +"27600" 4 298 5 1 1 0 3 0 100 0 10 0 2107 8 6 0 2788 1 95 10 1 0 +"27601" 4 298 5 1 1 0 4 1 100 3851 100 195 2023 2 3 0 1148 1 95 95 0 1 +"27602" 4 298 5 1 1 0 4 1 100 3851 195 380 4547 7 4 1 1240 1 95 185 1 1 +"27603" 4 298 5 1 1 0 4 1 100 3851 380 665 3010 3 1 1 1097 2 75 285 1 1 +"27604" 4 298 5 1 1 0 4 1 100 3851 665 1297 1586 1 9 0 924 1 95 632 0 1 +"27605" 4 298 5 1 1 0 4 1 100 3851 1297 1621 2413 8 7 1 1464 4 25 324 1 1 +"27606" 4 298 5 1 1 0 4 1 100 3851 1621 3161 1776 9 2 1 1017 1 95 1540 1 1 +"27607" 4 298 5 1 1 0 4 1 100 3851 3161 3951 3005 5 6 0 740 4 25 790 0 1 +"27608" 4 298 5 1 1 0 4 1 100 3851 3951 1975 2094 6 8 1 1288 3 50 1976 0 0 +"27609" 4 298 5 1 1 0 4 1 100 3851 1975 3851 1456 4 7 0 2440 1 95 1876 0 1 +"27610" 4 310 2 0 1 0 1 1 100 23 100 150 6150 2 7 0 1223 3 50 50 0 1 +"27611" 4 310 2 0 1 0 1 1 100 23 150 263 15075 3 7 0 4324 2 75 113 0 1 +"27612" 4 310 2 0 1 0 1 1 100 23 263 460 3314 2 8 0 762 2 75 197 0 1 +"27613" 4 310 2 0 1 0 1 1 100 23 460 23 3459 8 9 1 2260 1 95 437 0 0 +"27614" 4 310 3 1 1 0 1 1 100 339 100 195 11653 1 3 0 2567 1 95 95 0 1 +"27615" 4 310 3 1 1 0 1 1 100 339 195 293 2883 6 5 1 2375 3 50 98 1 1 +"27616" 4 310 3 1 1 0 1 1 100 339 293 308 2075 2 8 0 830 5 5 15 0 1 +"27617" 4 310 3 1 1 0 1 1 100 339 308 154 1525 8 9 1 1993 3 50 154 0 0 +"27618" 4 310 3 1 1 0 1 1 100 339 154 193 1200 3 4 0 420 4 25 39 0 1 +"27619" 4 310 3 1 1 0 1 1 100 339 193 241 2765 5 7 0 487 4 25 48 0 1 +"27620" 4 310 3 1 1 0 1 1 100 339 241 301 2813 7 4 1 493 4 25 60 1 1 +"27621" 4 310 3 1 1 0 1 1 100 339 301 226 3108 4 1 0 549 4 25 75 1 0 +"27622" 4 310 3 1 1 0 1 1 100 339 226 339 2193 9 6 1 1918 3 50 113 1 1 +"27623" 4 310 3 1 1 0 2 1 100 312 100 150 4103 8 1 1 602 3 50 50 1 1 +"27624" 4 310 3 1 1 0 2 1 100 312 150 225 1451 6 2 1 812 3 50 75 1 1 +"27625" 4 310 3 1 1 0 2 1 100 312 225 169 2451 7 9 1 796 4 25 56 0 0 +"27626" 4 310 3 1 1 0 2 1 100 312 169 211 1828 2 10 0 450 4 25 42 0 1 +"27627" 4 310 3 1 1 0 2 1 100 312 211 158 1579 5 3 0 558 4 25 53 1 0 +"27628" 4 310 3 1 1 0 2 1 100 312 158 79 1720 4 2 0 2364 3 50 79 1 0 +"27629" 4 310 3 1 1 0 2 1 100 312 79 119 1921 3 5 0 2228 3 50 40 0 1 +"27630" 4 310 3 1 1 0 2 1 100 312 119 208 1624 9 4 1 640 2 75 89 1 1 +"27631" 4 310 3 1 1 0 2 1 100 312 208 312 2662 1 7 0 1693 3 50 104 0 1 +"27632" 4 310 3 1 1 0 3 1 100 375 100 150 1764 7 5 1 655 3 50 50 1 1 +"27633" 4 310 3 1 1 0 3 1 100 375 150 75 1092 2 1 0 2142 3 50 75 1 0 +"27634" 4 310 3 1 1 0 3 1 100 375 75 113 1174 8 6 1 1172 3 50 38 1 1 +"27635" 4 310 3 1 1 0 3 1 100 375 113 170 1185 4 7 0 662 3 50 57 0 1 +"27636" 4 310 3 1 1 0 3 1 100 375 170 255 1180 3 10 0 2530 3 50 85 0 1 +"27637" 4 310 3 1 1 0 3 1 100 375 255 191 3831 6 8 1 1511 4 25 64 0 0 +"27638" 4 310 3 1 1 0 3 1 100 375 191 334 1228 9 2 1 559 2 75 143 1 1 +"27639" 4 310 3 1 1 0 3 1 100 375 334 250 1458 5 3 0 1170 4 25 84 1 0 +"27640" 4 310 3 1 1 0 3 1 100 375 250 375 1696 1 10 0 789 3 50 125 0 1 +"27641" 4 310 3 1 1 0 4 1 100 291 100 175 1571 2 3 0 558 2 75 75 0 1 +"27642" 4 310 3 1 1 0 4 1 100 291 175 263 1128 7 4 1 1704 3 50 88 1 1 +"27643" 4 310 3 1 1 0 4 1 100 291 263 131 1269 3 1 0 1422 3 50 132 1 0 +"27644" 4 310 3 1 1 0 4 1 100 291 131 255 1944 1 9 0 792 1 95 124 0 1 +"27645" 4 310 3 1 1 0 4 1 100 291 255 319 1528 8 7 1 629 4 25 64 1 1 +"27646" 4 310 3 1 1 0 4 1 100 291 319 399 2212 9 2 1 593 4 25 80 1 1 +"27647" 4 310 3 1 1 0 4 1 100 291 399 299 2130 5 6 1 1482 4 25 100 0 0 +"27648" 4 310 3 1 1 0 4 1 100 291 299 149 1873 6 8 1 2493 3 50 150 0 0 +"27649" 4 310 3 1 1 0 4 1 100 291 149 291 2980 4 7 0 660 1 95 142 0 1 +"27650" 4 310 4 0 1 1 1 1 100 178 100 150 4870 8 3 1 924 3 50 50 1 1 +"27651" 4 310 4 0 1 1 1 1 100 178 150 158 3205 3 7 0 2386 1 5 8 0 1 +"27652" 4 310 4 0 1 1 1 1 100 178 158 237 3605 8 2 1 375 3 50 79 1 1 +"27653" 4 310 4 0 1 1 1 1 100 178 237 178 1578 2 1 0 925 2 25 59 1 0 +"27654" 4 310 5 1 1 1 1 1 100 333 100 150 4166 9 7 1 502 3 50 50 1 1 +"27655" 4 310 5 1 1 1 1 1 100 333 150 225 1371 4 8 0 560 3 50 75 0 1 +"27656" 4 310 5 1 1 1 1 1 100 333 225 338 1410 8 2 1 1117 3 50 113 1 1 +"27657" 4 310 5 1 1 1 1 1 100 333 338 253 965 2 1 0 733 2 25 85 1 0 +"27658" 4 310 5 1 1 1 1 1 100 333 253 316 1623 7 6 1 663 2 25 63 1 1 +"27659" 4 310 5 1 1 1 1 1 100 333 316 237 2253 5 3 0 672 2 25 79 1 0 +"27660" 4 310 5 1 1 1 1 1 100 333 237 296 1237 3 6 0 631 2 25 59 0 1 +"27661" 4 310 5 1 1 1 1 1 100 333 296 222 1727 6 9 1 746 2 25 74 0 0 +"27662" 4 310 5 1 1 1 1 1 100 333 222 333 1132 1 4 0 612 3 50 111 0 1 +"27663" 4 310 5 1 1 1 2 1 100 559 100 150 1301 2 9 0 453 3 50 50 0 1 +"27664" 4 310 5 1 1 1 2 1 100 559 150 225 1972 4 10 0 404 3 50 75 0 1 +"27665" 4 310 5 1 1 1 2 1 100 559 225 169 2062 3 1 0 554 2 25 56 1 0 +"27666" 4 310 5 1 1 1 2 1 100 559 169 254 1300 8 6 1 1220 3 50 85 1 1 +"27667" 4 310 5 1 1 1 2 1 100 559 254 318 1488 5 7 0 1694 2 25 64 0 1 +"27668" 4 310 5 1 1 1 2 1 100 559 318 238 1747 6 8 1 598 2 25 80 0 0 +"27669" 4 310 5 1 1 1 2 1 100 559 238 298 975 7 5 1 702 2 25 60 1 1 +"27670" 4 310 5 1 1 1 2 1 100 559 298 447 906 1 10 0 573 3 50 149 0 1 +"27671" 4 310 5 1 1 1 2 1 100 559 447 559 1159 9 3 1 598 2 25 112 1 1 +"27672" 4 310 5 1 1 1 3 1 100 451 100 125 940 3 5 0 573 2 25 25 0 1 +"27673" 4 310 5 1 1 1 3 1 100 451 125 94 1391 8 9 1 1250 2 25 31 0 0 +"27674" 4 310 5 1 1 1 3 1 100 451 94 141 1028 2 4 0 649 3 50 47 0 1 +"27675" 4 310 5 1 1 1 3 1 100 451 141 176 1256 6 3 1 1465 2 25 35 1 1 +"27676" 4 310 5 1 1 1 3 1 100 451 176 220 1217 7 6 1 1189 2 25 44 1 1 +"27677" 4 310 5 1 1 1 3 1 100 451 220 165 1074 4 2 0 509 2 25 55 1 0 +"27678" 4 310 5 1 1 1 3 1 100 451 165 289 1225 1 8 0 453 4 75 124 0 1 +"27679" 4 310 5 1 1 1 3 1 100 451 289 361 1372 5 7 0 513 2 25 72 0 1 +"27680" 4 310 5 1 1 1 3 1 100 451 361 451 1910 9 5 1 1304 2 25 90 1 1 +"27681" 4 310 5 1 1 1 4 1 100 528 100 150 1633 8 7 1 330 3 50 50 1 1 +"27682" 4 310 5 1 1 1 4 1 100 528 150 188 911 3 10 0 482 2 25 38 0 1 +"27683" 4 310 5 1 1 1 4 1 100 528 188 141 1296 7 9 1 609 2 25 47 0 0 +"27684" 4 310 5 1 1 1 4 1 100 528 141 247 1297 9 1 1 681 4 75 106 1 1 +"27685" 4 310 5 1 1 1 4 1 100 528 247 371 1182 2 3 0 564 3 50 124 0 1 +"27686" 4 310 5 1 1 1 4 1 100 528 371 557 1431 1 8 0 569 3 50 186 0 1 +"27687" 4 310 5 1 1 1 4 1 100 528 557 529 1443 5 4 0 2956 1 5 28 1 0 +"27688" 4 310 5 1 1 1 4 1 100 528 529 503 1348 4 2 0 1218 1 5 26 1 0 +"27689" 4 310 5 1 1 1 4 1 100 528 503 528 1413 6 3 1 912 1 5 25 1 1 +"27690" 4 311 2 0 1 0 1 1 100 115 100 150 5540 2 7 0 1122 3 50 50 0 1 +"27691" 4 311 2 0 1 0 1 1 100 115 150 263 13299 3 7 0 2319 2 75 113 0 1 +"27692" 4 311 2 0 1 0 1 1 100 115 263 460 2590 2 8 0 1976 2 75 197 0 1 +"27693" 4 311 2 0 1 0 1 1 100 115 460 115 2535 8 9 1 1868 2 75 345 0 0 +"27694" 4 311 3 1 1 0 1 1 100 351 100 175 26903 1 3 0 780 2 75 75 0 1 +"27695" 4 311 3 1 1 0 1 1 100 351 175 306 2901 6 5 1 2416 2 75 131 1 1 +"27696" 4 311 3 1 1 0 1 1 100 351 306 536 3131 2 8 0 940 2 75 230 0 1 +"27697" 4 311 3 1 1 0 1 1 100 351 536 134 3724 8 9 1 1336 2 75 402 0 0 +"27698" 4 311 3 1 1 0 1 1 100 351 134 235 4854 3 4 0 2213 2 75 101 0 1 +"27699" 4 311 3 1 1 0 1 1 100 351 235 59 2313 5 7 1 1544 2 75 176 0 0 +"27700" 4 311 3 1 1 0 1 1 100 351 59 103 4747 7 4 1 399 2 75 44 1 1 +"27701" 4 311 3 1 1 0 1 1 100 351 103 180 2514 4 1 1 679 2 75 77 1 1 +"27702" 4 311 3 1 1 0 1 1 100 351 180 351 4700 9 6 1 658 1 95 171 1 1 +"27703" 4 311 3 1 1 0 2 1 100 1790 100 175 5839 8 1 1 527 2 75 75 1 1 +"27704" 4 311 3 1 1 0 2 1 100 1790 175 87 3987 6 2 0 747 3 50 88 1 0 +"27705" 4 311 3 1 1 0 2 1 100 1790 87 152 3429 7 9 0 784 2 75 65 0 1 +"27706" 4 311 3 1 1 0 2 1 100 1790 152 266 1680 2 10 0 622 2 75 114 0 1 +"27707" 4 311 3 1 1 0 2 1 100 1790 266 466 3453 5 3 1 742 2 75 200 1 1 +"27708" 4 311 3 1 1 0 2 1 100 1790 466 816 3744 4 2 1 1027 2 75 350 1 1 +"27709" 4 311 3 1 1 0 2 1 100 1790 816 1224 2815 3 5 0 647 3 50 408 0 1 +"27710" 4 311 3 1 1 0 2 1 100 1790 1224 918 7005 9 4 0 790 4 25 306 1 0 +"27711" 4 311 3 1 1 0 2 1 100 1790 918 1790 2878 1 7 0 930 1 95 872 0 1 +"27712" 4 311 3 1 1 0 3 0 100 1 100 25 2198 7 5 0 643 2 75 75 1 0 +"27713" 4 311 3 1 1 0 3 0 100 1 25 1 2049 2 1 0 3942 1 95 24 1 0 +"27714" 4 311 3 1 1 0 4 1 100 49 100 175 2604 2 3 0 630 2 75 75 0 1 +"27715" 4 311 3 1 1 0 4 1 100 49 175 44 2309 7 4 0 917 2 75 131 1 0 +"27716" 4 311 3 1 1 0 4 1 100 49 44 11 1911 3 1 0 1402 2 75 33 1 0 +"27717" 4 311 3 1 1 0 4 1 100 49 11 21 5012 1 9 0 555 1 95 10 0 1 +"27718" 4 311 3 1 1 0 4 1 100 49 21 37 4558 8 7 1 771 2 75 16 1 1 +"27719" 4 311 3 1 1 0 4 1 100 49 37 65 3924 9 2 1 799 2 75 28 1 1 +"27720" 4 311 3 1 1 0 4 1 100 49 65 114 3286 5 6 0 575 2 75 49 0 1 +"27721" 4 311 3 1 1 0 4 1 100 49 114 28 5609 6 8 1 660 2 75 86 0 0 +"27722" 4 311 3 1 1 0 4 1 100 49 28 49 2537 4 7 0 726 2 75 21 0 1 +"27723" 4 311 4 0 1 1 1 1 100 141 100 150 4113 8 3 1 1171 3 50 50 1 1 +"27724" 4 311 4 0 1 1 1 1 100 141 150 188 3953 3 7 0 875 2 25 38 0 1 +"27725" 4 311 4 0 1 1 1 1 100 141 188 282 1926 8 2 1 888 3 50 94 1 1 +"27726" 4 311 4 0 1 1 1 1 100 141 282 141 2475 2 1 0 940 3 50 141 1 0 +"27727" 4 311 5 1 1 1 1 1 100 37 100 150 5421 9 7 1 649 3 50 50 1 1 +"27728" 4 311 5 1 1 1 1 1 100 37 150 225 3992 4 8 0 591 3 50 75 0 1 +"27729" 4 311 5 1 1 1 1 1 100 37 225 281 1808 8 2 1 1256 2 25 56 1 1 +"27730" 4 311 5 1 1 1 1 1 100 37 281 211 3025 2 1 0 2119 2 25 70 1 0 +"27731" 4 311 5 1 1 1 1 1 100 37 211 158 3383 7 6 0 1029 2 25 53 1 0 +"27732" 4 311 5 1 1 1 1 1 100 37 158 79 3353 5 3 0 735 3 50 79 1 0 +"27733" 4 311 5 1 1 1 1 1 100 37 79 59 2945 3 6 1 1067 2 25 20 0 0 +"27734" 4 311 5 1 1 1 1 1 100 37 59 74 2118 6 9 0 855 2 25 15 0 1 +"27735" 4 311 5 1 1 1 1 1 100 37 74 37 2964 1 4 1 693 3 50 37 0 0 +"27736" 4 311 5 1 1 1 2 1 100 50 100 75 2983 2 9 1 2190 2 25 25 0 0 +"27737" 4 311 5 1 1 1 2 1 100 50 75 94 3179 4 10 0 2730 2 25 19 0 1 +"27738" 4 311 5 1 1 1 2 1 100 50 94 47 1373 3 1 0 529 3 50 47 1 0 +"27739" 4 311 5 1 1 1 2 1 100 50 47 71 3086 8 6 1 632 3 50 24 1 1 +"27740" 4 311 5 1 1 1 2 1 100 50 71 35 2888 5 7 1 876 3 50 36 0 0 +"27741" 4 311 5 1 1 1 2 1 100 50 35 44 1967 6 8 0 688 2 25 9 0 1 +"27742" 4 311 5 1 1 1 2 1 100 50 44 22 2533 7 5 0 626 3 50 22 1 0 +"27743" 4 311 5 1 1 1 2 1 100 50 22 33 2633 1 10 0 922 3 50 11 0 1 +"27744" 4 311 5 1 1 1 2 1 100 50 33 50 1795 9 3 1 1020 3 50 17 1 1 +"27745" 4 311 5 1 1 1 3 1 100 35 100 150 2827 3 5 0 835 3 50 50 0 1 +"27746" 4 311 5 1 1 1 3 1 100 35 150 112 1576 8 9 1 1971 2 25 38 0 0 +"27747" 4 311 5 1 1 1 3 1 100 35 112 168 1426 2 4 0 804 3 50 56 0 1 +"27748" 4 311 5 1 1 1 3 1 100 35 168 126 4404 6 3 0 1728 2 25 42 1 0 +"27749" 4 311 5 1 1 1 3 1 100 35 126 63 2163 7 6 0 834 3 50 63 1 0 +"27750" 4 311 5 1 1 1 3 1 100 35 63 95 3768 4 2 1 539 3 50 32 1 1 +"27751" 4 311 5 1 1 1 3 1 100 35 95 47 3286 1 8 1 1119 3 50 48 0 0 +"27752" 4 311 5 1 1 1 3 1 100 35 47 23 3231 5 7 1 964 3 50 24 0 0 +"27753" 4 311 5 1 1 1 3 1 100 35 23 35 1907 9 5 1 966 3 50 12 1 1 +"27754" 4 311 5 1 1 1 4 1 100 373 100 150 2681 8 7 1 1279 3 50 50 1 1 +"27755" 4 311 5 1 1 1 4 1 100 373 150 188 2024 3 10 0 1397 2 25 38 0 1 +"27756" 4 311 5 1 1 1 4 1 100 373 188 94 2471 7 9 1 760 3 50 94 0 0 +"27757" 4 311 5 1 1 1 4 1 100 373 94 141 2256 9 1 1 803 3 50 47 1 1 +"27758" 4 311 5 1 1 1 4 1 100 373 141 212 2434 2 3 0 676 3 50 71 0 1 +"27759" 4 311 5 1 1 1 4 1 100 373 212 318 2241 1 8 0 932 3 50 106 0 1 +"27760" 4 311 5 1 1 1 4 1 100 373 318 398 2525 5 4 1 1435 2 25 80 1 1 +"27761" 4 311 5 1 1 1 4 1 100 373 398 498 3676 4 2 1 1443 2 25 100 1 1 +"27762" 4 311 5 1 1 1 4 1 100 373 498 373 2371 6 3 0 648 2 25 125 1 0 +"27763" 4 312 2 0 1 0 1 1 100 29 100 150 6651 2 7 0 892 3 50 50 0 1 +"27764" 4 312 2 0 1 0 1 1 100 29 150 293 13358 3 7 0 4584 1 95 143 0 1 +"27765" 4 312 2 0 1 0 1 1 100 29 293 571 4236 2 8 0 2640 1 95 278 0 1 +"27766" 4 312 2 0 1 0 1 1 100 29 571 29 2639 8 9 1 4088 1 95 542 0 0 +"27767" 4 312 3 1 1 0 1 0 100 1 100 195 7082 1 3 0 3871 1 95 95 0 1 +"27768" 4 312 3 1 1 0 1 0 100 1 195 380 3107 6 5 1 3037 1 95 185 1 1 +"27769" 4 312 3 1 1 0 1 0 100 1 380 19 3649 2 8 1 3529 1 95 361 0 0 +"27770" 4 312 3 1 1 0 1 0 100 1 19 1 4286 8 9 1 2216 1 95 18 0 0 +"27771" 4 312 3 1 1 0 2 0 100 0 100 5 3262 8 1 0 3273 1 95 95 1 0 +"27772" 4 312 3 1 1 0 2 0 100 0 5 0 4130 6 2 0 2123 1 95 5 1 0 +"27773" 4 312 3 1 1 0 3 0 100 0 100 195 3316 7 5 1 1765 1 95 95 1 1 +"27774" 4 312 3 1 1 0 3 0 100 0 195 380 2894 2 1 1 2850 1 95 185 1 1 +"27775" 4 312 3 1 1 0 3 0 100 0 380 741 3487 8 6 1 1639 1 95 361 1 1 +"27776" 4 312 3 1 1 0 3 0 100 0 741 1445 3190 4 7 0 2300 1 95 704 0 1 +"27777" 4 312 3 1 1 0 3 0 100 0 1445 72 2683 3 10 1 1361 1 95 1373 0 0 +"27778" 4 312 3 1 1 0 3 0 100 0 72 4 3285 6 8 1 1742 1 95 68 0 0 +"27779" 4 312 3 1 1 0 3 0 100 0 4 8 2929 9 2 1 1067 1 95 4 1 1 +"27780" 4 312 3 1 1 0 3 0 100 0 8 0 2830 5 3 0 1701 1 95 8 1 0 +"27781" 4 312 3 1 1 0 4 1 100 1037 100 195 2513 2 3 0 2359 1 95 95 0 1 +"27782" 4 312 3 1 1 0 4 1 100 1037 195 380 2884 7 4 1 1608 1 95 185 1 1 +"27783" 4 312 3 1 1 0 4 1 100 1037 380 19 3825 3 1 0 1936 1 95 361 1 0 +"27784" 4 312 3 1 1 0 4 1 100 1037 19 37 2585 1 9 0 1020 1 95 18 0 1 +"27785" 4 312 3 1 1 0 4 1 100 1037 37 72 2577 8 7 1 1516 1 95 35 1 1 +"27786" 4 312 3 1 1 0 4 1 100 1037 72 140 3553 9 2 1 2509 1 95 68 1 1 +"27787" 4 312 3 1 1 0 4 1 100 1037 140 273 2475 5 6 0 1339 1 95 133 0 1 +"27788" 4 312 3 1 1 0 4 1 100 1037 273 532 3545 6 8 0 1366 1 95 259 0 1 +"27789" 4 312 3 1 1 0 4 1 100 1037 532 1037 2674 4 7 0 1720 1 95 505 0 1 +"27790" 4 312 4 0 1 1 1 1 100 140 100 150 4877 8 3 1 1179 3 50 50 1 1 +"27791" 4 312 4 0 1 1 1 1 100 140 150 225 3832 3 7 0 608 3 50 75 0 1 +"27792" 4 312 4 0 1 1 1 1 100 140 225 281 3040 8 2 1 1493 2 25 56 1 1 +"27793" 4 312 4 0 1 1 1 1 100 140 281 140 3188 2 1 0 554 3 50 141 1 0 +"27794" 4 312 5 1 1 1 1 1 100 312 100 175 3490 9 7 1 554 4 75 75 1 1 +"27795" 4 312 5 1 1 1 1 1 100 312 175 263 2060 4 8 0 792 3 50 88 0 1 +"27796" 4 312 5 1 1 1 1 1 100 312 263 395 2119 8 2 1 904 3 50 132 1 1 +"27797" 4 312 5 1 1 1 1 1 100 312 395 197 2107 2 1 0 701 3 50 198 1 0 +"27798" 4 312 5 1 1 1 1 1 100 312 197 296 2329 7 6 1 368 3 50 99 1 1 +"27799" 4 312 5 1 1 1 1 1 100 312 296 370 2516 5 3 1 2290 2 25 74 1 1 +"27800" 4 312 5 1 1 1 1 1 100 312 370 277 2090 3 6 1 717 2 25 93 0 0 +"27801" 4 312 5 1 1 1 1 1 100 312 277 208 2288 6 9 1 1393 2 25 69 0 0 +"27802" 4 312 5 1 1 1 1 1 100 312 208 312 1964 1 4 0 821 3 50 104 0 1 +"27803" 4 312 5 1 1 1 2 1 100 1211 100 195 3060 2 9 0 385 5 95 95 0 1 +"27804" 4 312 5 1 1 1 2 1 100 1211 195 293 2444 4 10 0 885 3 50 98 0 1 +"27805" 4 312 5 1 1 1 2 1 100 1211 293 366 2753 3 1 1 1147 2 25 73 1 1 +"27806" 4 312 5 1 1 1 2 1 100 1211 366 458 2042 8 6 1 1822 2 25 92 1 1 +"27807" 4 312 5 1 1 1 2 1 100 1211 458 573 3525 5 7 0 1747 2 25 115 0 1 +"27808" 4 312 5 1 1 1 2 1 100 1211 573 430 2372 6 8 1 1017 2 25 143 0 0 +"27809" 4 312 5 1 1 1 2 1 100 1211 430 538 1982 7 5 1 678 2 25 108 1 1 +"27810" 4 312 5 1 1 1 2 1 100 1211 538 807 2283 1 10 0 548 3 50 269 0 1 +"27811" 4 312 5 1 1 1 2 1 100 1211 807 1211 2535 9 3 1 604 3 50 404 1 1 +"27812" 4 312 5 1 1 1 3 1 100 933 100 125 3019 3 5 0 633 2 25 25 0 1 +"27813" 4 312 5 1 1 1 3 1 100 933 125 94 1944 8 9 1 2241 2 25 31 0 0 +"27814" 4 312 5 1 1 1 3 1 100 933 94 141 2004 2 4 0 595 3 50 47 0 1 +"27815" 4 312 5 1 1 1 3 1 100 933 141 212 1907 6 3 1 774 3 50 71 1 1 +"27816" 4 312 5 1 1 1 3 1 100 933 212 318 2422 7 6 1 393 3 50 106 1 1 +"27817" 4 312 5 1 1 1 3 1 100 933 318 398 2460 4 2 1 2269 2 25 80 1 1 +"27818" 4 312 5 1 1 1 3 1 100 933 398 597 1734 1 8 0 641 3 50 199 0 1 +"27819" 4 312 5 1 1 1 3 1 100 933 597 746 2832 5 7 0 582 2 25 149 0 1 +"27820" 4 312 5 1 1 1 3 1 100 933 746 933 1816 9 5 1 1640 2 25 187 1 1 +"27821" 4 312 5 1 1 1 4 1 100 624 100 175 2337 8 7 1 624 4 75 75 1 1 +"27822" 4 312 5 1 1 1 4 1 100 624 175 263 2143 3 10 0 479 3 50 88 0 1 +"27823" 4 312 5 1 1 1 4 1 100 624 263 131 1693 7 9 1 658 3 50 132 0 0 +"27824" 4 312 5 1 1 1 4 1 100 624 131 197 1744 9 1 1 1355 3 50 66 1 1 +"27825" 4 312 5 1 1 1 4 1 100 624 197 296 1928 2 3 0 762 3 50 99 0 1 +"27826" 4 312 5 1 1 1 4 1 100 624 296 444 2107 1 8 0 877 3 50 148 0 1 +"27827" 4 312 5 1 1 1 4 1 100 624 444 666 2368 5 4 1 341 3 50 222 1 1 +"27828" 4 312 5 1 1 1 4 1 100 624 666 499 3066 4 2 0 2221 2 25 167 1 0 +"27829" 4 312 5 1 1 1 4 1 100 624 499 624 2177 6 3 1 1663 2 25 125 1 1 +"27830" 4 317 2 0 1 0 1 1 100 285 100 150 4437 2 7 0 1016 3 50 50 0 1 +"27831" 4 317 2 0 1 0 1 1 100 285 150 293 8813 3 7 0 831 1 95 143 0 1 +"27832" 4 317 2 0 1 0 1 1 100 285 293 571 8989 2 8 0 1254 1 95 278 0 1 +"27833" 4 317 2 0 1 0 1 1 100 285 571 285 3432 8 9 1 603 3 50 286 0 0 +"27834" 4 317 3 1 1 0 1 1 100 133 100 195 3718 1 3 0 635 1 95 95 0 1 +"27835" 4 317 3 1 1 0 1 1 100 133 195 380 2835 6 5 1 376 1 95 185 1 1 +"27836" 4 317 3 1 1 0 1 1 100 133 380 741 1572 2 8 0 310 1 95 361 0 1 +"27837" 4 317 3 1 1 0 1 1 100 133 741 926 3397 8 9 0 825 4 25 185 0 1 +"27838" 4 317 3 1 1 0 1 1 100 133 926 1389 1391 3 4 0 1011 3 50 463 0 1 +"27839" 4 317 3 1 1 0 1 1 100 133 1389 694 2576 5 7 1 635 3 50 695 0 0 +"27840" 4 317 3 1 1 0 1 1 100 133 694 1353 1522 7 4 1 431 1 95 659 1 1 +"27841" 4 317 3 1 1 0 1 1 100 133 1353 68 1312 4 1 0 368 1 95 1285 1 0 +"27842" 4 317 3 1 1 0 1 1 100 133 68 133 2468 9 6 1 2091 1 95 65 1 1 +"27843" 4 317 3 1 1 0 2 0 100 0 100 195 3123 8 1 1 1084 1 95 95 1 1 +"27844" 4 317 3 1 1 0 2 0 100 0 195 380 2494 6 2 1 853 1 95 185 1 1 +"27845" 4 317 3 1 1 0 2 0 100 0 380 19 1521 7 9 1 630 1 95 361 0 0 +"27846" 4 317 3 1 1 0 2 0 100 0 19 37 1238 2 10 0 384 1 95 18 0 1 +"27847" 4 317 3 1 1 0 2 0 100 0 37 2 1531 5 3 0 375 1 95 35 1 0 +"27848" 4 317 3 1 1 0 2 0 100 0 2 0 1043 4 2 0 919 1 95 2 1 0 +"27849" 4 317 3 1 1 0 3 0 100 0 100 195 1812 7 5 1 467 1 95 95 1 1 +"27850" 4 317 3 1 1 0 3 0 100 0 195 10 995 2 1 0 345 1 95 185 1 0 +"27851" 4 317 3 1 1 0 3 0 100 0 10 20 1014 8 6 1 388 1 95 10 1 1 +"27852" 4 317 3 1 1 0 3 0 100 0 20 39 998 4 7 0 356 1 95 19 0 1 +"27853" 4 317 3 1 1 0 3 0 100 0 39 76 736 3 10 0 378 1 95 37 0 1 +"27854" 4 317 3 1 1 0 3 0 100 0 76 4 1854 6 8 1 395 1 95 72 0 0 +"27855" 4 317 3 1 1 0 3 0 100 0 4 8 1054 9 2 1 377 1 95 4 1 1 +"27856" 4 317 3 1 1 0 3 0 100 0 8 0 1532 5 3 0 385 1 95 8 1 0 +"27857" 4 317 3 1 1 0 4 1 100 211 100 195 1581 2 3 0 328 1 95 95 0 1 +"27858" 4 317 3 1 1 0 4 1 100 211 195 293 1046 7 4 1 434 3 50 98 1 1 +"27859" 4 317 3 1 1 0 4 1 100 211 293 15 835 3 1 0 305 1 95 278 1 0 +"27860" 4 317 3 1 1 0 4 1 100 211 15 29 2047 1 9 0 1083 1 95 14 0 1 +"27861" 4 317 3 1 1 0 4 1 100 211 29 57 981 8 7 1 468 1 95 28 1 1 +"27862" 4 317 3 1 1 0 4 1 100 211 57 111 819 9 2 1 826 1 95 54 1 1 +"27863" 4 317 3 1 1 0 4 1 100 211 111 216 2728 5 6 0 452 1 95 105 0 1 +"27864" 4 317 3 1 1 0 4 1 100 211 216 108 2412 6 8 1 1063 3 50 108 0 0 +"27865" 4 317 3 1 1 0 4 1 100 211 108 211 761 4 7 0 747 1 95 103 0 1 +"27866" 4 317 4 0 1 1 1 1 100 84 100 150 4149 8 3 1 1462 3 50 50 1 1 +"27867" 4 317 4 0 1 1 1 1 100 84 150 225 2085 3 7 0 672 3 50 75 0 1 +"27868" 4 317 4 0 1 1 1 1 100 84 225 338 1301 8 2 1 404 3 50 113 1 1 +"27869" 4 317 4 0 1 1 1 1 100 84 338 84 1366 2 1 0 612 4 75 254 1 0 +"27870" 4 317 5 1 1 1 1 1 100 1351 100 195 3704 9 7 1 918 5 95 95 1 1 +"27871" 4 317 5 1 1 1 1 1 100 1351 195 205 1512 4 8 0 797 1 5 10 0 1 +"27872" 4 317 5 1 1 1 1 1 100 1351 205 359 1021 8 2 1 1338 4 75 154 1 1 +"27873" 4 317 5 1 1 1 1 1 100 1351 359 341 1184 2 1 0 373 1 5 18 1 0 +"27874" 4 317 5 1 1 1 1 1 100 1351 341 512 1160 7 6 1 871 3 50 171 1 1 +"27875" 4 317 5 1 1 1 1 1 100 1351 512 486 1771 5 3 0 340 1 5 26 1 0 +"27876" 4 317 5 1 1 1 1 1 100 1351 486 729 1345 3 6 0 439 3 50 243 0 1 +"27877" 4 317 5 1 1 1 1 1 100 1351 729 693 1255 6 9 1 1016 1 5 36 0 0 +"27878" 4 317 5 1 1 1 1 1 100 1351 693 1351 1173 1 4 0 1268 5 95 658 0 1 +"27879" 4 317 5 1 1 1 2 1 100 1383 100 195 2011 2 9 0 513 5 95 95 0 1 +"27880" 4 317 5 1 1 1 2 1 100 1383 195 205 1398 4 10 0 897 1 5 10 0 1 +"27881" 4 317 5 1 1 1 2 1 100 1383 205 154 1092 3 1 0 1406 2 25 51 1 0 +"27882" 4 317 5 1 1 1 2 1 100 1383 154 270 1240 8 6 1 883 4 75 116 1 1 +"27883" 4 317 5 1 1 1 2 1 100 1383 270 284 1223 5 7 0 506 1 5 14 0 1 +"27884" 4 317 5 1 1 1 2 1 100 1383 284 270 967 6 8 1 902 1 5 14 0 0 +"27885" 4 317 5 1 1 1 2 1 100 1383 270 405 993 7 5 1 535 3 50 135 1 1 +"27886" 4 317 5 1 1 1 2 1 100 1383 405 709 915 1 10 0 1742 4 75 304 0 1 +"27887" 4 317 5 1 1 1 2 1 100 1383 709 1383 1232 9 3 1 636 5 95 674 1 1 +"27888" 4 317 5 1 1 1 3 1 100 226 100 125 1317 3 5 0 236 2 25 25 0 1 +"27889" 4 317 5 1 1 1 3 1 100 226 125 31 965 8 9 1 1093 4 75 94 0 0 +"27890" 4 317 5 1 1 1 3 1 100 226 31 47 1126 2 4 0 478 3 50 16 0 1 +"27891" 4 317 5 1 1 1 3 1 100 226 47 49 1461 6 3 1 381 1 5 2 1 1 +"27892" 4 317 5 1 1 1 3 1 100 226 49 74 968 7 6 1 832 3 50 25 1 1 +"27893" 4 317 5 1 1 1 3 1 100 226 74 70 2077 4 2 0 448 1 5 4 1 0 +"27894" 4 317 5 1 1 1 3 1 100 226 70 123 720 1 8 0 1116 4 75 53 0 1 +"27895" 4 317 5 1 1 1 3 1 100 226 123 129 1420 5 7 0 726 1 5 6 0 1 +"27896" 4 317 5 1 1 1 3 1 100 226 129 226 1008 9 5 1 710 4 75 97 1 1 +"27897" 4 317 5 1 1 1 4 1 100 888 100 175 1455 8 7 1 800 4 75 75 1 1 +"27898" 4 317 5 1 1 1 4 1 100 888 175 184 935 3 10 0 416 1 5 9 0 1 +"27899" 4 317 5 1 1 1 4 1 100 888 184 175 1122 7 9 1 322 1 5 9 0 0 +"27900" 4 317 5 1 1 1 4 1 100 888 175 306 943 9 1 1 785 4 75 131 1 1 +"27901" 4 317 5 1 1 1 4 1 100 888 306 536 1081 2 3 0 556 4 75 230 0 1 +"27902" 4 317 5 1 1 1 4 1 100 888 536 938 1502 1 8 0 1548 4 75 402 0 1 +"27903" 4 317 5 1 1 1 4 1 100 888 938 891 1088 5 4 0 521 1 5 47 1 0 +"27904" 4 317 5 1 1 1 4 1 100 888 891 846 877 4 2 0 450 1 5 45 1 0 +"27905" 4 317 5 1 1 1 4 1 100 888 846 888 1218 6 3 1 621 1 5 42 1 1 +"27906" 4 324 2 0 1 0 1 1 100 18 100 150 10055 2 7 0 1685 3 50 50 0 1 +"27907" 4 324 2 0 1 0 1 1 100 18 150 188 11794 3 7 0 1463 4 25 38 0 1 +"27908" 4 324 2 0 1 0 1 1 100 18 188 367 3646 2 8 0 3494 1 95 179 0 1 +"27909" 4 324 2 0 1 0 1 1 100 18 367 18 2373 8 9 1 2906 1 95 349 0 0 +"27910" 4 324 3 1 1 0 1 1 100 1628 100 195 3139 1 3 0 2651 1 95 95 0 1 +"27911" 4 324 3 1 1 0 1 1 100 1628 195 293 3163 6 5 1 778 3 50 98 1 1 +"27912" 4 324 3 1 1 0 1 1 100 1628 293 440 2034 2 8 0 576 3 50 147 0 1 +"27913" 4 324 3 1 1 0 1 1 100 1628 440 220 2497 8 9 1 504 3 50 220 0 0 +"27914" 4 324 3 1 1 0 1 1 100 1628 220 330 1933 3 4 0 431 3 50 110 0 1 +"27915" 4 324 3 1 1 0 1 1 100 1628 330 413 5888 5 7 0 981 4 25 83 0 1 +"27916" 4 324 3 1 1 0 1 1 100 1628 413 620 3652 7 4 1 772 3 50 207 1 1 +"27917" 4 324 3 1 1 0 1 1 100 1628 620 930 2428 4 1 1 1838 3 50 310 1 1 +"27918" 4 324 3 1 1 0 1 1 100 1628 930 1628 3180 9 6 1 571 2 75 698 1 1 +"27919" 4 324 3 1 1 0 2 1 100 76 100 195 3393 8 1 1 2492 1 95 95 1 1 +"27920" 4 324 3 1 1 0 2 1 100 76 195 97 4219 6 2 0 592 3 50 98 1 0 +"27921" 4 324 3 1 1 0 2 1 100 76 97 48 1761 7 9 1 611 3 50 49 0 0 +"27922" 4 324 3 1 1 0 2 1 100 76 48 84 1747 2 10 0 4445 2 75 36 0 1 +"27923" 4 324 3 1 1 0 2 1 100 76 84 42 3367 5 3 0 965 3 50 42 1 0 +"27924" 4 324 3 1 1 0 2 1 100 76 42 10 2348 4 2 0 418 2 75 32 1 0 +"27925" 4 324 3 1 1 0 2 1 100 76 10 20 1955 3 5 0 1188 1 95 10 0 1 +"27926" 4 324 3 1 1 0 2 1 100 76 20 39 1297 9 4 1 881 1 95 19 1 1 +"27927" 4 324 3 1 1 0 2 1 100 76 39 76 1237 1 7 0 766 1 95 37 0 1 +"27928" 4 324 3 1 1 0 3 1 100 411 100 175 1949 7 5 1 1278 2 75 75 1 1 +"27929" 4 324 3 1 1 0 3 1 100 411 175 44 1502 2 1 0 932 2 75 131 1 0 +"27930" 4 324 3 1 1 0 3 1 100 411 44 77 1581 8 6 1 296 2 75 33 1 1 +"27931" 4 324 3 1 1 0 3 1 100 411 77 38 2136 4 7 1 439 3 50 39 0 0 +"27932" 4 324 3 1 1 0 3 1 100 411 38 74 2119 3 10 0 3264 1 95 36 0 1 +"27933" 4 324 3 1 1 0 3 1 100 411 74 144 2434 6 8 0 716 1 95 70 0 1 +"27934" 4 324 3 1 1 0 3 1 100 411 144 281 1495 9 2 1 710 1 95 137 1 1 +"27935" 4 324 3 1 1 0 3 1 100 411 281 211 2282 5 3 0 994 4 25 70 1 0 +"27936" 4 324 3 1 1 0 3 1 100 411 211 411 1425 1 10 0 2714 1 95 200 0 1 +"27937" 4 324 3 1 1 0 4 1 100 1020 100 175 1977 2 3 0 473 2 75 75 0 1 +"27938" 4 324 3 1 1 0 4 1 100 1020 175 263 4638 7 4 1 999 3 50 88 1 1 +"27939" 4 324 3 1 1 0 4 1 100 1020 263 131 2374 3 1 0 1050 3 50 132 1 0 +"27940" 4 324 3 1 1 0 4 1 100 1020 131 255 1255 1 9 0 656 1 95 124 0 1 +"27941" 4 324 3 1 1 0 4 1 100 1020 255 446 1782 8 7 1 718 2 75 191 1 1 +"27942" 4 324 3 1 1 0 4 1 100 1020 446 870 2810 9 2 1 3330 1 95 424 1 1 +"27943" 4 324 3 1 1 0 4 1 100 1020 870 1088 3129 5 6 0 2163 4 25 218 0 1 +"27944" 4 324 3 1 1 0 4 1 100 1020 1088 1360 5361 6 8 0 664 4 25 272 0 1 +"27945" 4 324 3 1 1 0 4 1 100 1020 1360 1020 2550 4 7 1 1194 4 25 340 0 0 +"27946" 4 324 4 0 1 1 1 1 100 156 100 125 4168 8 3 1 615 2 25 25 1 1 +"27947" 4 324 4 0 1 1 1 1 100 156 125 131 5151 3 7 0 3159 1 5 6 0 1 +"27948" 4 324 4 0 1 1 1 1 100 156 131 164 2558 8 2 1 1706 2 25 33 1 1 +"27949" 4 324 4 0 1 1 1 1 100 156 164 156 3486 2 1 0 3518 1 5 8 1 0 +"27950" 4 324 5 1 1 1 1 1 100 419 100 150 2624 9 7 1 529 3 50 50 1 1 +"27951" 4 324 5 1 1 1 1 1 100 419 150 158 2349 4 8 0 1587 1 5 8 0 1 +"27952" 4 324 5 1 1 1 1 1 100 419 158 237 1668 8 2 1 475 3 50 79 1 1 +"27953" 4 324 5 1 1 1 1 1 100 419 237 178 1338 2 1 0 728 2 25 59 1 0 +"27954" 4 324 5 1 1 1 1 1 100 419 178 187 1970 7 6 1 754 1 5 9 1 1 +"27955" 4 324 5 1 1 1 1 1 100 419 187 196 3372 5 3 1 3105 1 5 9 1 1 +"27956" 4 324 5 1 1 1 1 1 100 419 196 294 1755 3 6 0 381 3 50 98 0 1 +"27957" 4 324 5 1 1 1 1 1 100 419 294 279 3169 6 9 1 655 1 5 15 0 0 +"27958" 4 324 5 1 1 1 1 1 100 419 279 419 1237 1 4 0 888 3 50 140 0 1 +"27959" 4 324 5 1 1 1 2 1 100 248 100 125 1678 2 9 0 1451 2 25 25 0 1 +"27960" 4 324 5 1 1 1 2 1 100 248 125 94 2875 4 10 1 449 2 25 31 0 0 +"27961" 4 324 5 1 1 1 2 1 100 248 94 70 1321 3 1 0 743 2 25 24 1 0 +"27962" 4 324 5 1 1 1 2 1 100 248 70 105 1417 8 6 1 532 3 50 35 1 1 +"27963" 4 324 5 1 1 1 2 1 100 248 105 100 2632 5 7 1 1120 1 5 5 0 0 +"27964" 4 324 5 1 1 1 2 1 100 248 100 75 1360 6 8 1 869 2 25 25 0 0 +"27965" 4 324 5 1 1 1 2 1 100 248 75 94 1209 7 5 1 894 2 25 19 1 1 +"27966" 4 324 5 1 1 1 2 1 100 248 94 165 1360 1 10 0 493 4 75 71 0 1 +"27967" 4 324 5 1 1 1 2 1 100 248 165 248 1123 9 3 1 739 3 50 83 1 1 +"27968" 4 324 5 1 1 1 3 1 100 329 100 125 1651 3 5 0 810 2 25 25 0 1 +"27969" 4 324 5 1 1 1 3 1 100 329 125 62 1337 8 9 1 543 3 50 63 0 0 +"27970" 4 324 5 1 1 1 3 1 100 329 62 93 1314 2 4 0 914 3 50 31 0 1 +"27971" 4 324 5 1 1 1 3 1 100 329 93 88 3011 6 3 0 706 1 5 5 1 0 +"27972" 4 324 5 1 1 1 3 1 100 329 88 132 2717 7 6 1 410 3 50 44 1 1 +"27973" 4 324 5 1 1 1 3 1 100 329 132 139 2443 4 2 1 757 1 5 7 1 1 +"27974" 4 324 5 1 1 1 3 1 100 329 139 209 1214 1 8 0 1650 3 50 70 0 1 +"27975" 4 324 5 1 1 1 3 1 100 329 209 219 1809 5 7 0 680 1 5 10 0 1 +"27976" 4 324 5 1 1 1 3 1 100 329 219 329 1277 9 5 1 886 3 50 110 1 1 +"27977" 4 324 5 1 1 1 4 1 100 377 100 150 1226 8 7 1 478 3 50 50 1 1 +"27978" 4 324 5 1 1 1 4 1 100 377 150 188 1508 3 10 0 562 2 25 38 0 1 +"27979" 4 324 5 1 1 1 4 1 100 377 188 141 2581 7 9 1 656 2 25 47 0 0 +"27980" 4 324 5 1 1 1 4 1 100 377 141 212 1149 9 1 1 501 3 50 71 1 1 +"27981" 4 324 5 1 1 1 4 1 100 377 212 265 1210 2 3 0 1119 2 25 53 0 1 +"27982" 4 324 5 1 1 1 4 1 100 377 265 398 1591 1 8 0 985 3 50 133 0 1 +"27983" 4 324 5 1 1 1 4 1 100 377 398 378 1583 5 4 0 586 1 5 20 1 0 +"27984" 4 324 5 1 1 1 4 1 100 377 378 359 1500 4 2 0 658 1 5 19 1 0 +"27985" 4 324 5 1 1 1 4 1 100 377 359 377 1392 6 3 1 715 1 5 18 1 1 +"27986" 4 330 2 0 1 0 1 1 100 141 100 150 6481 2 7 0 1395 3 50 50 0 1 +"27987" 4 330 2 0 1 0 1 1 100 141 150 188 7613 3 7 0 897 4 25 38 0 1 +"27988" 4 330 2 0 1 0 1 1 100 141 188 282 2468 2 8 0 1020 3 50 94 0 1 +"27989" 4 330 2 0 1 0 1 1 100 141 282 141 1713 8 9 1 1702 3 50 141 0 0 +"27990" 4 330 3 1 1 0 1 1 100 632 100 195 29748 1 3 0 1168 1 95 95 0 1 +"27991" 4 330 3 1 1 0 1 1 100 632 195 293 1951 6 5 1 3288 3 50 98 1 1 +"27992" 4 330 3 1 1 0 1 1 100 632 293 440 2097 2 8 0 779 3 50 147 0 1 +"27993" 4 330 3 1 1 0 1 1 100 632 440 330 1810 8 9 1 810 4 25 110 0 0 +"27994" 4 330 3 1 1 0 1 1 100 632 330 495 1402 3 4 0 1437 3 50 165 0 1 +"27995" 4 330 3 1 1 0 1 1 100 632 495 371 3757 5 7 1 784 4 25 124 0 0 +"27996" 4 330 3 1 1 0 1 1 100 632 371 649 1616 7 4 1 993 2 75 278 1 1 +"27997" 4 330 3 1 1 0 1 1 100 632 649 324 2783 4 1 0 680 3 50 325 1 0 +"27998" 4 330 3 1 1 0 1 1 100 632 324 632 1277 9 6 1 1366 1 95 308 1 1 +"27999" 4 330 3 1 1 0 2 1 100 544 100 175 4815 8 1 1 2000 2 75 75 1 1 +"28000" 4 330 3 1 1 0 2 1 100 544 175 263 2885 6 2 1 1362 3 50 88 1 1 +"28001" 4 330 3 1 1 0 2 1 100 544 263 131 4278 7 9 1 2656 3 50 132 0 0 +"28002" 4 330 3 1 1 0 2 1 100 544 131 255 1735 2 10 0 874 1 95 124 0 1 +"28003" 4 330 3 1 1 0 2 1 100 544 255 191 2621 5 3 0 1087 4 25 64 1 0 +"28004" 4 330 3 1 1 0 2 1 100 544 191 95 2184 4 2 0 742 3 50 96 1 0 +"28005" 4 330 3 1 1 0 2 1 100 544 95 143 2784 3 5 0 1689 3 50 48 0 1 +"28006" 4 330 3 1 1 0 2 1 100 544 143 279 1686 9 4 1 700 1 95 136 1 1 +"28007" 4 330 3 1 1 0 2 1 100 544 279 544 1704 1 7 0 1229 1 95 265 0 1 +"28008" 4 330 3 1 1 0 3 1 100 64 100 175 3277 7 5 1 1823 2 75 75 1 1 +"28009" 4 330 3 1 1 0 3 1 100 64 175 9 1390 2 1 0 1849 1 95 166 1 0 +"28010" 4 330 3 1 1 0 3 1 100 64 9 18 1694 8 6 1 787 1 95 9 1 1 +"28011" 4 330 3 1 1 0 3 1 100 64 18 35 2594 4 7 0 1005 1 95 17 0 1 +"28012" 4 330 3 1 1 0 3 1 100 64 35 68 2706 3 10 0 1047 1 95 33 0 1 +"28013" 4 330 3 1 1 0 3 1 100 64 68 34 1346 6 8 1 656 3 50 34 0 0 +"28014" 4 330 3 1 1 0 3 1 100 64 34 66 2502 9 2 1 548 1 95 32 1 1 +"28015" 4 330 3 1 1 0 3 1 100 64 66 33 2696 5 3 0 1269 3 50 33 1 0 +"28016" 4 330 3 1 1 0 3 1 100 64 33 64 2438 1 10 0 2664 1 95 31 0 1 +"28017" 4 330 3 1 1 0 4 1 100 1213 100 195 2826 2 3 0 681 1 95 95 0 1 +"28018" 4 330 3 1 1 0 4 1 100 1213 195 293 1718 7 4 1 955 3 50 98 1 1 +"28019" 4 330 3 1 1 0 4 1 100 1213 293 146 4739 3 1 0 471 3 50 147 1 0 +"28020" 4 330 3 1 1 0 4 1 100 1213 146 285 1654 1 9 0 741 1 95 139 0 1 +"28021" 4 330 3 1 1 0 4 1 100 1213 285 499 1879 8 7 1 699 2 75 214 1 1 +"28022" 4 330 3 1 1 0 4 1 100 1213 499 973 1727 9 2 1 434 1 95 474 1 1 +"28023" 4 330 3 1 1 0 4 1 100 1213 973 1216 3171 5 6 0 2434 4 25 243 0 1 +"28024" 4 330 3 1 1 0 4 1 100 1213 1216 1155 2307 6 8 1 764 5 5 61 0 0 +"28025" 4 330 3 1 1 0 4 1 100 1213 1155 1213 1813 4 7 0 653 5 5 58 0 1 +"28026" 4 330 4 0 1 1 1 1 100 169 100 150 4687 8 3 1 1718 3 50 50 1 1 +"28027" 4 330 4 0 1 1 1 1 100 169 150 225 2881 3 7 0 701 3 50 75 0 1 +"28028" 4 330 4 0 1 1 1 1 100 169 225 338 2346 8 2 1 1272 3 50 113 1 1 +"28029" 4 330 4 0 1 1 1 1 100 169 338 169 1568 2 1 0 1116 3 50 169 1 0 +"28030" 4 330 5 1 1 1 1 1 100 322 100 175 2044 9 7 1 680 4 75 75 1 1 +"28031" 4 330 5 1 1 1 1 1 100 322 175 219 1533 4 8 0 706 2 25 44 0 1 +"28032" 4 330 5 1 1 1 1 1 100 322 219 274 1333 8 2 1 1124 2 25 55 1 1 +"28033" 4 330 5 1 1 1 1 1 100 322 274 137 2251 2 1 0 590 3 50 137 1 0 +"28034" 4 330 5 1 1 1 1 1 100 322 137 206 1991 7 6 1 614 3 50 69 1 1 +"28035" 4 330 5 1 1 1 1 1 100 322 206 196 2270 5 3 0 920 1 5 10 1 0 +"28036" 4 330 5 1 1 1 1 1 100 322 196 245 1769 3 6 0 731 2 25 49 0 1 +"28037" 4 330 5 1 1 1 1 1 100 322 245 184 1470 6 9 1 1061 2 25 61 0 0 +"28038" 4 330 5 1 1 1 1 1 100 322 184 322 1424 1 4 0 2258 4 75 138 0 1 +"28039" 4 330 5 1 1 1 2 1 100 392 100 125 1866 2 9 0 1399 2 25 25 0 1 +"28040" 4 330 5 1 1 1 2 1 100 392 125 156 1314 4 10 0 3457 2 25 31 0 1 +"28041" 4 330 5 1 1 1 2 1 100 392 156 78 890 3 1 0 1190 3 50 78 1 0 +"28042" 4 330 5 1 1 1 2 1 100 392 78 117 2098 8 6 1 821 3 50 39 1 1 +"28043" 4 330 5 1 1 1 2 1 100 392 117 123 1171 5 7 0 505 1 5 6 0 1 +"28044" 4 330 5 1 1 1 2 1 100 392 123 92 2238 6 8 1 673 2 25 31 0 0 +"28045" 4 330 5 1 1 1 2 1 100 392 92 115 1118 7 5 1 686 2 25 23 1 1 +"28046" 4 330 5 1 1 1 2 1 100 392 115 224 947 1 10 0 1005 5 95 109 0 1 +"28047" 4 330 5 1 1 1 2 1 100 392 224 392 1146 9 3 1 794 4 75 168 1 1 +"28048" 4 330 5 1 1 1 3 1 100 270 100 150 1209 3 5 0 598 3 50 50 0 1 +"28049" 4 330 5 1 1 1 3 1 100 270 150 75 1159 8 9 1 1342 3 50 75 0 0 +"28050" 4 330 5 1 1 1 3 1 100 270 75 113 1067 2 4 0 598 3 50 38 0 1 +"28051" 4 330 5 1 1 1 3 1 100 270 113 170 1407 6 3 1 2106 3 50 57 1 1 +"28052" 4 330 5 1 1 1 3 1 100 270 170 213 3627 7 6 1 2564 2 25 43 1 1 +"28053" 4 330 5 1 1 1 3 1 100 270 213 160 1087 4 2 0 833 2 25 53 1 0 +"28054" 4 330 5 1 1 1 3 1 100 270 160 240 1543 1 8 0 739 3 50 80 0 1 +"28055" 4 330 5 1 1 1 3 1 100 270 240 180 8277 5 7 1 784 2 25 60 0 0 +"28056" 4 330 5 1 1 1 3 1 100 270 180 270 1913 9 5 1 659 3 50 90 1 1 +"28057" 4 330 5 1 1 1 4 1 100 508 100 150 1544 8 7 1 588 3 50 50 1 1 +"28058" 4 330 5 1 1 1 4 1 100 508 150 225 1541 3 10 0 841 3 50 75 0 1 +"28059" 4 330 5 1 1 1 4 1 100 508 225 112 1710 7 9 1 643 3 50 113 0 0 +"28060" 4 330 5 1 1 1 4 1 100 508 112 196 1257 9 1 1 1131 4 75 84 1 1 +"28061" 4 330 5 1 1 1 4 1 100 508 196 343 1307 2 3 0 626 4 75 147 0 1 +"28062" 4 330 5 1 1 1 4 1 100 508 343 515 1354 1 8 0 1759 3 50 172 0 1 +"28063" 4 330 5 1 1 1 4 1 100 508 515 541 2193 5 4 1 1774 1 5 26 1 1 +"28064" 4 330 5 1 1 1 4 1 100 508 541 406 1766 4 2 0 1080 2 25 135 1 0 +"28065" 4 330 5 1 1 1 4 1 100 508 406 508 1874 6 3 1 830 2 25 102 1 1 +"28066" 4 341 2 0 1 1 1 1 100 296 100 150 5331 8 3 1 1172 3 50 50 1 1 +"28067" 4 341 2 0 1 1 1 1 100 296 150 188 10634 3 7 0 1205 2 25 38 0 1 +"28068" 4 341 2 0 1 1 1 1 100 296 188 282 4857 8 2 1 864 3 50 94 1 1 +"28069" 4 341 2 0 1 1 1 1 100 296 282 296 2716 2 1 1 936 1 5 14 1 1 +"28070" 4 341 3 1 1 1 1 1 100 353 100 150 4362 9 7 1 3248 3 50 50 1 1 +"28071" 4 341 3 1 1 1 1 1 100 353 150 188 4506 4 8 0 417 2 25 38 0 1 +"28072" 4 341 3 1 1 1 1 1 100 353 188 235 2155 8 2 1 1498 2 25 47 1 1 +"28073" 4 341 3 1 1 1 1 1 100 353 235 223 4480 2 1 0 1008 1 5 12 1 0 +"28074" 4 341 3 1 1 1 1 1 100 353 223 335 2173 7 6 1 812 3 50 112 1 1 +"28075" 4 341 3 1 1 1 1 1 100 353 335 251 3346 5 3 0 289 2 25 84 1 0 +"28076" 4 341 3 1 1 1 1 1 100 353 251 314 1312 3 6 0 880 2 25 63 0 1 +"28077" 4 341 3 1 1 1 1 1 100 353 314 235 2339 6 9 1 1010 2 25 79 0 0 +"28078" 4 341 3 1 1 1 1 1 100 353 235 353 5327 1 4 0 455 3 50 118 0 1 +"28079" 4 341 3 1 1 1 2 1 100 115 100 150 3019 2 9 0 777 3 50 50 0 1 +"28080" 4 341 3 1 1 1 2 1 100 115 150 112 6514 4 10 1 289 2 25 38 0 0 +"28081" 4 341 3 1 1 1 2 1 100 115 112 140 1584 3 1 1 339 2 25 28 1 1 +"28082" 4 341 3 1 1 1 2 1 100 115 140 245 2656 8 6 1 832 4 75 105 1 1 +"28083" 4 341 3 1 1 1 2 1 100 115 245 184 3341 5 7 1 964 2 25 61 0 0 +"28084" 4 341 3 1 1 1 2 1 100 115 184 46 1403 6 8 1 710 4 75 138 0 0 +"28085" 4 341 3 1 1 1 2 1 100 115 46 69 1234 7 5 1 517 3 50 23 1 1 +"28086" 4 341 3 1 1 1 2 1 100 115 69 121 3602 1 10 0 1789 4 75 52 0 1 +"28087" 4 341 3 1 1 1 2 1 100 115 121 115 3357 9 3 0 515 1 5 6 1 0 +"28088" 4 341 3 1 1 1 3 1 100 129 100 125 1598 3 5 0 888 2 25 25 0 1 +"28089" 4 341 3 1 1 1 3 1 100 129 125 62 1939 8 9 1 357 3 50 63 0 0 +"28090" 4 341 3 1 1 1 3 1 100 129 62 93 1313 2 4 0 1153 3 50 31 0 1 +"28091" 4 341 3 1 1 1 3 1 100 129 93 116 1199 6 3 1 332 2 25 23 1 1 +"28092" 4 341 3 1 1 1 3 1 100 129 116 145 1202 7 6 1 702 2 25 29 1 1 +"28093" 4 341 3 1 1 1 3 1 100 129 145 109 3730 4 2 0 646 2 25 36 1 0 +"28094" 4 341 3 1 1 1 3 1 100 129 109 164 1341 1 8 0 291 3 50 55 0 1 +"28095" 4 341 3 1 1 1 3 1 100 129 164 123 1602 5 7 1 276 2 25 41 0 0 +"28096" 4 341 3 1 1 1 3 1 100 129 123 129 1524 9 5 1 593 1 5 6 1 1 +"28097" 4 341 3 1 1 1 4 1 100 510 100 195 2525 8 7 1 1307 5 95 95 1 1 +"28098" 4 341 3 1 1 1 4 1 100 510 195 293 3403 3 10 0 326 3 50 98 0 1 +"28099" 4 341 3 1 1 1 4 1 100 510 293 308 1908 7 9 0 727 1 5 15 0 1 +"28100" 4 341 3 1 1 1 4 1 100 510 308 539 1329 9 1 1 400 4 75 231 1 1 +"28101" 4 341 3 1 1 1 4 1 100 510 539 566 1515 2 3 0 1000 1 5 27 0 1 +"28102" 4 341 3 1 1 1 4 1 100 510 566 538 1902 1 8 1 265 1 5 28 0 0 +"28103" 4 341 3 1 1 1 4 1 100 510 538 565 1738 5 4 1 2757 1 5 27 1 1 +"28104" 4 341 3 1 1 1 4 1 100 510 565 537 1526 4 2 0 1491 1 5 28 1 0 +"28105" 4 341 3 1 1 1 4 1 100 510 537 510 1954 6 3 0 811 1 5 27 1 0 +"28106" 4 341 4 0 1 0 1 1 100 28 100 195 9060 2 7 0 4555 1 95 95 0 1 +"28107" 4 341 4 0 1 0 1 1 100 28 195 380 11495 3 7 0 1130 1 95 185 0 1 +"28108" 4 341 4 0 1 0 1 1 100 28 380 570 4657 2 8 0 2554 3 50 190 0 1 +"28109" 4 341 4 0 1 0 1 1 100 28 570 28 2757 8 9 1 319 1 95 542 0 0 +"28110" 4 341 5 1 1 0 1 0 100 0 100 195 1498 1 3 0 280 1 95 95 0 1 +"28111" 4 341 5 1 1 0 1 0 100 0 195 380 1304 6 5 1 1264 1 95 185 1 1 +"28112" 4 341 5 1 1 0 1 0 100 0 380 665 2225 2 8 0 805 2 75 285 0 1 +"28113" 4 341 5 1 1 0 1 0 100 0 665 33 2267 8 9 1 309 1 95 632 0 0 +"28114" 4 341 5 1 1 0 1 0 100 0 33 64 2260 3 4 0 1301 1 95 31 0 1 +"28115" 4 341 5 1 1 0 1 0 100 0 64 3 2349 5 7 1 266 1 95 61 0 0 +"28116" 4 341 5 1 1 0 1 0 100 0 3 6 2299 7 4 1 363 1 95 3 1 1 +"28117" 4 341 5 1 1 0 1 0 100 0 6 0 1239 4 1 0 1010 1 95 6 1 0 +"28118" 4 341 5 1 1 0 2 1 100 53 100 195 1376 8 1 1 260 1 95 95 1 1 +"28119" 4 341 5 1 1 0 2 1 100 53 195 293 1085 6 2 1 903 3 50 98 1 1 +"28120" 4 341 5 1 1 0 2 1 100 53 293 146 977 7 9 1 1657 3 50 147 0 0 +"28121" 4 341 5 1 1 0 2 1 100 53 146 285 983 2 10 0 230 1 95 139 0 1 +"28122" 4 341 5 1 1 0 2 1 100 53 285 142 1120 5 3 0 441 3 50 143 1 0 +"28123" 4 341 5 1 1 0 2 1 100 53 142 7 2305 4 2 0 345 1 95 135 1 0 +"28124" 4 341 5 1 1 0 2 1 100 53 7 14 1622 3 5 0 574 1 95 7 0 1 +"28125" 4 341 5 1 1 0 2 1 100 53 14 27 951 9 4 1 248 1 95 13 1 1 +"28126" 4 341 5 1 1 0 2 1 100 53 27 53 1601 1 7 0 275 1 95 26 0 1 +"28127" 4 341 5 1 1 0 3 1 100 31 100 195 1577 7 5 1 203 1 95 95 1 1 +"28128" 4 341 5 1 1 0 3 1 100 31 195 10 1238 2 1 0 177 1 95 185 1 0 +"28129" 4 341 5 1 1 0 3 1 100 31 10 20 934 8 6 1 167 1 95 10 1 1 +"28130" 4 341 5 1 1 0 3 1 100 31 20 39 900 4 7 0 182 1 95 19 0 1 +"28131" 4 341 5 1 1 0 3 1 100 31 39 76 715 3 10 0 718 1 95 37 0 1 +"28132" 4 341 5 1 1 0 3 1 100 31 76 4 2193 6 8 1 454 1 95 72 0 0 +"28133" 4 341 5 1 1 0 3 1 100 31 4 8 822 9 2 1 252 1 95 4 1 1 +"28134" 4 341 5 1 1 0 3 1 100 31 8 16 717 5 3 1 200 1 95 8 1 1 +"28135" 4 341 5 1 1 0 3 1 100 31 16 31 612 1 10 0 204 1 95 15 0 1 +"28136" 4 341 5 1 1 0 4 1 100 125 100 195 1361 2 3 0 277 1 95 95 0 1 +"28137" 4 341 5 1 1 0 4 1 100 125 195 380 1910 7 4 1 696 1 95 185 1 1 +"28138" 4 341 5 1 1 0 4 1 100 125 380 285 780 3 1 0 853 4 25 95 1 0 +"28139" 4 341 5 1 1 0 4 1 100 125 285 556 1258 1 9 0 179 1 95 271 0 1 +"28140" 4 341 5 1 1 0 4 1 100 125 556 1084 912 8 7 1 211 1 95 528 1 1 +"28141" 4 341 5 1 1 0 4 1 100 125 1084 1355 5198 9 2 1 709 4 25 271 1 1 +"28142" 4 341 5 1 1 0 4 1 100 125 1355 2642 2480 5 6 0 192 1 95 1287 0 1 +"28143" 4 341 5 1 1 0 4 1 100 125 2642 2510 3404 6 8 1 312 5 5 132 0 0 +"28144" 4 341 5 1 1 0 4 1 100 125 2510 125 2230 4 7 1 249 1 95 2385 0 0 +"28145" 4 343 2 0 1 0 1 1 100 1 100 175 18556 2 7 0 1343 2 75 75 0 1 +"28146" 4 343 2 0 1 0 1 1 100 1 175 341 8020 3 7 0 3676 1 95 166 0 1 +"28147" 4 343 2 0 1 0 1 1 100 1 341 17 8306 2 8 1 2924 1 95 324 0 0 +"28148" 4 343 2 0 1 0 1 1 100 1 17 1 5166 8 9 1 1149 1 95 16 0 0 +"28149" 4 343 3 1 1 0 1 0 100 1 100 195 8129 1 3 0 1251 1 95 95 0 1 +"28150" 4 343 3 1 1 0 1 0 100 1 195 10 15117 6 5 0 1003 1 95 185 1 0 +"28151" 4 343 3 1 1 0 1 0 100 1 10 20 3105 2 8 0 1018 1 95 10 0 1 +"28152" 4 343 3 1 1 0 1 0 100 1 20 1 2175 8 9 1 1045 1 95 19 0 0 +"28153" 4 343 3 1 1 0 2 1 100 1 100 195 3908 8 1 1 776 1 95 95 1 1 +"28154" 4 343 3 1 1 0 2 1 100 1 195 10 9752 6 2 0 1189 1 95 185 1 0 +"28155" 4 343 3 1 1 0 2 1 100 1 10 20 3458 7 9 0 923 1 95 10 0 1 +"28156" 4 343 3 1 1 0 2 1 100 1 20 39 2316 2 10 0 1083 1 95 19 0 1 +"28157" 4 343 3 1 1 0 2 1 100 1 39 76 8381 5 3 1 1067 1 95 37 1 1 +"28158" 4 343 3 1 1 0 2 1 100 1 76 4 6110 4 2 0 1048 1 95 72 1 0 +"28159" 4 343 3 1 1 0 2 1 100 1 4 8 5500 3 5 0 1052 1 95 4 0 1 +"28160" 4 343 3 1 1 0 2 1 100 1 8 16 6223 9 4 1 1244 1 95 8 1 1 +"28161" 4 343 3 1 1 0 2 1 100 1 16 1 3510 1 7 1 1093 1 95 15 0 0 +"28162" 4 343 3 1 1 0 3 0 100 0 100 195 6092 7 5 1 1375 1 95 95 1 1 +"28163" 4 343 3 1 1 0 3 0 100 0 195 10 1939 2 1 0 1028 1 95 185 1 0 +"28164" 4 343 3 1 1 0 3 0 100 0 10 0 2344 8 6 0 989 1 95 10 1 0 +"28165" 4 343 3 1 1 0 4 1 100 27 100 195 2927 2 3 0 1474 1 95 95 0 1 +"28166" 4 343 3 1 1 0 4 1 100 27 195 380 1745 7 4 1 1164 1 95 185 1 1 +"28167" 4 343 3 1 1 0 4 1 100 27 380 741 3265 3 1 1 972 1 95 361 1 1 +"28168" 4 343 3 1 1 0 4 1 100 27 741 1445 3635 1 9 0 1180 1 95 704 0 1 +"28169" 4 343 3 1 1 0 4 1 100 27 1445 72 7153 8 7 0 866 1 95 1373 1 0 +"28170" 4 343 3 1 1 0 4 1 100 27 72 140 1445 9 2 1 942 1 95 68 1 1 +"28171" 4 343 3 1 1 0 4 1 100 27 140 273 5086 5 6 0 1658 1 95 133 0 1 +"28172" 4 343 3 1 1 0 4 1 100 27 273 532 6064 6 8 0 1453 1 95 259 0 1 +"28173" 4 343 3 1 1 0 4 1 100 27 532 27 2286 4 7 1 991 1 95 505 0 0 +"28174" 4 343 4 0 1 1 1 1 100 130 100 125 4263 8 3 1 4506 2 25 25 1 1 +"28175" 4 343 4 0 1 1 1 1 100 130 125 131 2296 3 7 0 1440 1 5 6 0 1 +"28176" 4 343 4 0 1 1 1 1 100 130 131 124 2529 8 2 0 918 1 5 7 1 0 +"28177" 4 343 4 0 1 1 1 1 100 130 124 130 6272 2 1 1 1243 1 5 6 1 1 +"28178" 4 343 5 1 1 1 1 1 100 116 100 105 4036 9 7 1 1441 1 5 5 1 1 +"28179" 4 343 5 1 1 1 1 1 100 116 105 110 8354 4 8 0 1027 1 5 5 0 1 +"28180" 4 343 5 1 1 1 1 1 100 116 110 116 3911 8 2 1 1274 1 5 6 1 1 +"28181" 4 343 5 1 1 1 1 1 100 116 116 122 5874 2 1 1 1552 1 5 6 1 1 +"28182" 4 343 5 1 1 1 1 1 100 116 122 128 4123 7 6 1 1212 1 5 6 1 1 +"28183" 4 343 5 1 1 1 1 1 100 116 128 122 4616 5 3 0 1297 1 5 6 1 0 +"28184" 4 343 5 1 1 1 1 1 100 116 122 116 4225 3 6 1 1157 1 5 6 0 0 +"28185" 4 343 5 1 1 1 1 1 100 116 116 122 2643 6 9 0 1276 1 5 6 0 1 +"28186" 4 343 5 1 1 1 1 1 100 116 122 116 4972 1 4 1 1501 1 5 6 0 0 +"28187" 4 343 5 1 1 1 2 1 100 141 100 105 5388 2 9 0 1273 1 5 5 0 1 +"28188" 4 343 5 1 1 1 2 1 100 141 105 100 3626 4 10 1 1115 1 5 5 0 0 +"28189" 4 343 5 1 1 1 2 1 100 141 100 105 3483 3 1 1 1115 1 5 5 1 1 +"28190" 4 343 5 1 1 1 2 1 100 141 105 110 3339 8 6 1 1018 1 5 5 1 1 +"28191" 4 343 5 1 1 1 2 1 100 141 110 116 4702 5 7 0 1348 1 5 6 0 1 +"28192" 4 343 5 1 1 1 2 1 100 141 116 122 11937 6 8 0 1350 1 5 6 0 1 +"28193" 4 343 5 1 1 1 2 1 100 141 122 128 4899 7 5 1 1207 1 5 6 1 1 +"28194" 4 343 5 1 1 1 2 1 100 141 128 134 6932 1 10 0 1183 1 5 6 0 1 +"28195" 4 343 5 1 1 1 2 1 100 141 134 141 7116 9 3 1 1279 1 5 7 1 1 +"28196" 4 343 5 1 1 1 3 1 100 116 100 105 4738 3 5 0 1254 1 5 5 0 1 +"28197" 4 343 5 1 1 1 3 1 100 116 105 100 3216 8 9 1 1044 1 5 5 0 0 +"28198" 4 343 5 1 1 1 3 1 100 116 100 105 2663 2 4 0 1174 1 5 5 0 1 +"28199" 4 343 5 1 1 1 3 1 100 116 105 100 3044 6 3 0 1137 1 5 5 1 0 +"28200" 4 343 5 1 1 1 3 1 100 116 100 95 3872 7 6 0 1285 1 5 5 1 0 +"28201" 4 343 5 1 1 1 3 1 100 116 95 100 5261 4 2 1 1216 1 5 5 1 1 +"28202" 4 343 5 1 1 1 3 1 100 116 100 105 3524 1 8 0 1426 1 5 5 0 1 +"28203" 4 343 5 1 1 1 3 1 100 116 105 110 7650 5 7 0 1060 1 5 5 0 1 +"28204" 4 343 5 1 1 1 3 1 100 116 110 116 3279 9 5 1 1337 1 5 6 1 1 +"28205" 4 343 5 1 1 1 4 1 100 125 100 105 3541 8 7 1 1234 1 5 5 1 1 +"28206" 4 343 5 1 1 1 4 1 100 125 105 110 3916 3 10 0 1264 1 5 5 0 1 +"28207" 4 343 5 1 1 1 4 1 100 125 110 104 3345 7 9 1 1191 1 5 6 0 0 +"28208" 4 343 5 1 1 1 4 1 100 125 104 109 4283 9 1 1 653 1 5 5 1 1 +"28209" 4 343 5 1 1 1 4 1 100 125 109 114 7939 2 3 0 1110 1 5 5 0 1 +"28210" 4 343 5 1 1 1 4 1 100 125 114 108 6310 1 8 1 1039 1 5 6 0 0 +"28211" 4 343 5 1 1 1 4 1 100 125 108 113 3443 5 4 1 981 1 5 5 1 1 +"28212" 4 343 5 1 1 1 4 1 100 125 113 119 15084 4 2 1 1035 1 5 6 1 1 +"28213" 4 343 5 1 1 1 4 1 100 125 119 125 5232 6 3 1 1036 1 5 6 1 1 +"28214" 4 350 2 0 1 1 1 1 100 205 100 125 9031 8 3 1 2414 2 25 25 1 1 +"28215" 4 350 2 0 1 1 1 1 100 205 125 156 4145 3 7 0 2953 2 25 31 0 1 +"28216" 4 350 2 0 1 1 1 1 100 205 156 164 2882 8 2 1 4998 1 5 8 1 1 +"28217" 4 350 2 0 1 1 1 1 100 205 164 205 1958 2 1 1 302 2 25 41 1 1 +"28218" 4 350 3 1 1 1 1 1 100 72 100 150 2574 9 7 1 498 3 50 50 1 1 +"28219" 4 350 3 1 1 1 1 1 100 72 150 188 1312 4 8 0 767 2 25 38 0 1 +"28220" 4 350 3 1 1 1 1 1 100 72 188 141 1416 8 2 0 1075 2 25 47 1 0 +"28221" 4 350 3 1 1 1 1 1 100 72 141 70 1740 2 1 0 407 3 50 71 1 0 +"28222" 4 350 3 1 1 1 1 1 100 72 70 88 1633 7 6 1 465 2 25 18 1 1 +"28223" 4 350 3 1 1 1 1 1 100 72 88 66 2560 5 3 0 900 2 25 22 1 0 +"28224" 4 350 3 1 1 1 1 1 100 72 66 49 1614 3 6 1 671 2 25 17 0 0 +"28225" 4 350 3 1 1 1 1 1 100 72 49 37 1156 6 9 1 667 2 25 12 0 0 +"28226" 4 350 3 1 1 1 1 1 100 72 37 72 1321 1 4 0 1120 5 95 35 0 1 +"28227" 4 350 3 1 1 1 2 1 100 686 100 125 5297 2 9 0 795 2 25 25 0 1 +"28228" 4 350 3 1 1 1 2 1 100 686 125 156 1578 4 10 0 564 2 25 31 0 1 +"28229" 4 350 3 1 1 1 2 1 100 686 156 234 1351 3 1 1 540 3 50 78 1 1 +"28230" 4 350 3 1 1 1 2 1 100 686 234 293 2703 8 6 1 533 2 25 59 1 1 +"28231" 4 350 3 1 1 1 2 1 100 686 293 366 2393 5 7 0 852 2 25 73 0 1 +"28232" 4 350 3 1 1 1 2 1 100 686 366 458 1761 6 8 0 419 2 25 92 0 1 +"28233" 4 350 3 1 1 1 2 1 100 686 458 481 1348 7 5 1 1025 1 5 23 1 1 +"28234" 4 350 3 1 1 1 2 1 100 686 481 722 2110 1 10 0 415 3 50 241 0 1 +"28235" 4 350 3 1 1 1 2 1 100 686 722 686 2496 9 3 0 1031 1 5 36 1 0 +"28236" 4 350 3 1 1 1 3 1 100 63 100 150 2622 3 5 0 1192 3 50 50 0 1 +"28237" 4 350 3 1 1 1 3 1 100 63 150 37 1260 8 9 1 502 4 75 113 0 0 +"28238" 4 350 3 1 1 1 3 1 100 63 37 65 1283 2 4 0 380 4 75 28 0 1 +"28239" 4 350 3 1 1 1 3 1 100 63 65 49 1607 6 3 0 593 2 25 16 1 0 +"28240" 4 350 3 1 1 1 3 1 100 63 49 74 5113 7 6 1 492 3 50 25 1 1 +"28241" 4 350 3 1 1 1 3 1 100 63 74 37 1658 4 2 0 481 3 50 37 1 0 +"28242" 4 350 3 1 1 1 3 1 100 63 37 72 1599 1 8 0 1378 5 95 35 0 1 +"28243" 4 350 3 1 1 1 3 1 100 63 72 36 1573 5 7 1 320 3 50 36 0 0 +"28244" 4 350 3 1 1 1 3 1 100 63 36 63 1637 9 5 1 725 4 75 27 1 1 +"28245" 4 350 3 1 1 1 4 1 100 791 100 195 2970 8 7 1 691 5 95 95 1 1 +"28246" 4 350 3 1 1 1 4 1 100 791 195 244 2063 3 10 0 747 2 25 49 0 1 +"28247" 4 350 3 1 1 1 4 1 100 791 244 183 1828 7 9 1 907 2 25 61 0 0 +"28248" 4 350 3 1 1 1 4 1 100 791 183 357 933 9 1 1 462 5 95 174 1 1 +"28249" 4 350 3 1 1 1 4 1 100 791 357 536 2437 2 3 0 775 3 50 179 0 1 +"28250" 4 350 3 1 1 1 4 1 100 791 536 804 4096 1 8 0 450 3 50 268 0 1 +"28251" 4 350 3 1 1 1 4 1 100 791 804 1005 1899 5 4 1 390 2 25 201 1 1 +"28252" 4 350 3 1 1 1 4 1 100 791 1005 1055 2072 4 2 1 556 1 5 50 1 1 +"28253" 4 350 3 1 1 1 4 1 100 791 1055 791 1630 6 3 0 503 2 25 264 1 0 +"28254" 4 350 4 0 1 0 1 1 100 5 100 195 5444 2 7 0 3431 1 95 95 0 1 +"28255" 4 350 4 0 1 0 1 1 100 5 195 380 7289 3 7 0 1147 1 95 185 0 1 +"28256" 4 350 4 0 1 0 1 1 100 5 380 95 2402 2 8 1 453 2 75 285 0 0 +"28257" 4 350 4 0 1 0 1 1 100 5 95 5 1469 8 9 1 1508 1 95 90 0 0 +"28258" 4 350 5 1 1 0 1 1 100 6 100 195 1917 1 3 0 776 1 95 95 0 1 +"28259" 4 350 5 1 1 0 1 1 100 6 195 380 1528 6 5 1 453 1 95 185 1 1 +"28260" 4 350 5 1 1 0 1 1 100 6 380 665 2812 2 8 0 484 2 75 285 0 1 +"28261" 4 350 5 1 1 0 1 1 100 6 665 33 1971 8 9 1 299 1 95 632 0 0 +"28262" 4 350 5 1 1 0 1 1 100 6 33 64 1178 3 4 0 608 1 95 31 0 1 +"28263" 4 350 5 1 1 0 1 1 100 6 64 32 1912 5 7 1 387 3 50 32 0 0 +"28264" 4 350 5 1 1 0 1 1 100 6 32 62 1117 7 4 1 293 1 95 30 1 1 +"28265" 4 350 5 1 1 0 1 1 100 6 62 3 1324 4 1 0 315 1 95 59 1 0 +"28266" 4 350 5 1 1 0 1 1 100 6 3 6 1137 9 6 1 377 1 95 3 1 1 +"28267" 4 350 5 1 1 0 2 1 100 14 100 195 1877 8 1 1 529 1 95 95 1 1 +"28268" 4 350 5 1 1 0 2 1 100 14 195 380 1133 6 2 1 420 1 95 185 1 1 +"28269" 4 350 5 1 1 0 2 1 100 14 380 570 1165 7 9 0 1116 3 50 190 0 1 +"28270" 4 350 5 1 1 0 2 1 100 14 570 1112 1540 2 10 0 288 1 95 542 0 1 +"28271" 4 350 5 1 1 0 2 1 100 14 1112 1390 1168 5 3 1 438 4 25 278 1 1 +"28272" 4 350 5 1 1 0 2 1 100 14 1390 69 1446 4 2 0 391 1 95 1321 1 0 +"28273" 4 350 5 1 1 0 2 1 100 14 69 135 1001 3 5 0 303 1 95 66 0 1 +"28274" 4 350 5 1 1 0 2 1 100 14 135 7 2549 9 4 0 265 1 95 128 1 0 +"28275" 4 350 5 1 1 0 2 1 100 14 7 14 986 1 7 0 233 1 95 7 0 1 +"28276" 4 350 5 1 1 0 3 0 100 0 100 195 1567 7 5 1 962 1 95 95 1 1 +"28277" 4 350 5 1 1 0 3 0 100 0 195 10 937 2 1 0 242 1 95 185 1 0 +"28278" 4 350 5 1 1 0 3 0 100 0 10 20 1452 8 6 1 356 1 95 10 1 1 +"28279" 4 350 5 1 1 0 3 0 100 0 20 39 1472 4 7 0 455 1 95 19 0 1 +"28280" 4 350 5 1 1 0 3 0 100 0 39 76 838 3 10 0 316 1 95 37 0 1 +"28281" 4 350 5 1 1 0 3 0 100 0 76 4 1171 6 8 1 256 1 95 72 0 0 +"28282" 4 350 5 1 1 0 3 0 100 0 4 8 993 9 2 1 304 1 95 4 1 1 +"28283" 4 350 5 1 1 0 3 0 100 0 8 0 870 5 3 0 362 1 95 8 1 0 +"28284" 4 350 5 1 1 0 4 1 100 3568 100 195 1558 2 3 0 353 1 95 95 0 1 +"28285" 4 350 5 1 1 0 4 1 100 3568 195 380 1149 7 4 1 244 1 95 185 1 1 +"28286" 4 350 5 1 1 0 4 1 100 3568 380 570 1358 3 1 1 1115 3 50 190 1 1 +"28287" 4 350 5 1 1 0 4 1 100 3568 570 1112 1699 1 9 0 216 1 95 542 0 1 +"28288" 4 350 5 1 1 0 4 1 100 3568 1112 834 1523 8 7 0 329 4 25 278 1 0 +"28289" 4 350 5 1 1 0 4 1 100 3568 834 1626 1558 9 2 1 202 1 95 792 1 1 +"28290" 4 350 5 1 1 0 4 1 100 3568 1626 3171 1349 5 6 0 259 1 95 1545 0 1 +"28291" 4 350 5 1 1 0 4 1 100 3568 3171 4757 1804 6 8 0 1132 3 50 1586 0 1 +"28292" 4 350 5 1 1 0 4 1 100 3568 4757 3568 1163 4 7 1 690 4 25 1189 0 0 +"28293" 4 352 2 0 1 1 1 1 100 98 100 150 4724 8 3 1 1033 3 50 50 1 1 +"28294" 4 352 2 0 1 1 1 1 100 98 150 225 6101 3 7 0 1721 3 50 75 0 1 +"28295" 4 352 2 0 1 1 1 1 100 98 225 394 3420 8 2 1 807 4 75 169 1 1 +"28296" 4 352 2 0 1 1 1 1 100 98 394 98 2318 2 1 0 2797 4 75 296 1 0 +"28297" 4 352 3 1 1 1 1 1 100 10 100 175 16609 9 7 1 2880 4 75 75 1 1 +"28298" 4 352 3 1 1 1 1 1 100 10 175 306 3620 4 8 0 734 4 75 131 0 1 +"28299" 4 352 3 1 1 1 1 1 100 10 306 597 1832 8 2 1 2719 5 95 291 1 1 +"28300" 4 352 3 1 1 1 1 1 100 10 597 30 3756 2 1 0 959 5 95 567 1 0 +"28301" 4 352 3 1 1 1 1 1 100 10 30 53 3846 7 6 1 2245 4 75 23 1 1 +"28302" 4 352 3 1 1 1 1 1 100 10 53 13 2328 5 3 0 463 4 75 40 1 0 +"28303" 4 352 3 1 1 1 1 1 100 10 13 20 2502 3 6 0 4325 3 50 7 0 1 +"28304" 4 352 3 1 1 1 1 1 100 10 20 5 2324 6 9 1 1140 4 75 15 0 0 +"28305" 4 352 3 1 1 1 1 1 100 10 5 10 1635 1 4 0 1139 5 95 5 0 1 +"28306" 4 352 3 1 1 1 2 1 100 313 100 175 4107 2 9 0 541 4 75 75 0 1 +"28307" 4 352 3 1 1 1 2 1 100 313 175 306 4352 4 10 0 1303 4 75 131 0 1 +"28308" 4 352 3 1 1 1 2 1 100 313 306 76 1759 3 1 0 1777 4 75 230 1 0 +"28309" 4 352 3 1 1 1 2 1 100 313 76 133 3386 8 6 1 1038 4 75 57 1 1 +"28310" 4 352 3 1 1 1 2 1 100 313 133 233 1979 5 7 0 1634 4 75 100 0 1 +"28311" 4 352 3 1 1 1 2 1 100 313 233 58 3402 6 8 1 693 4 75 175 0 0 +"28312" 4 352 3 1 1 1 2 1 100 313 58 102 2586 7 5 1 661 4 75 44 1 1 +"28313" 4 352 3 1 1 1 2 1 100 313 102 179 1602 1 10 0 1684 4 75 77 0 1 +"28314" 4 352 3 1 1 1 2 1 100 313 179 313 2569 9 3 1 1053 4 75 134 1 1 +"28315" 4 352 3 1 1 1 3 1 100 337 100 175 1619 3 5 0 748 4 75 75 0 1 +"28316" 4 352 3 1 1 1 3 1 100 337 175 44 1814 8 9 1 2214 4 75 131 0 0 +"28317" 4 352 3 1 1 1 3 1 100 337 44 77 1506 2 4 0 1960 4 75 33 0 1 +"28318" 4 352 3 1 1 1 3 1 100 337 77 135 1952 6 3 1 2233 4 75 58 1 1 +"28319" 4 352 3 1 1 1 3 1 100 337 135 236 3044 7 6 1 1321 4 75 101 1 1 +"28320" 4 352 3 1 1 1 3 1 100 337 236 59 3712 4 2 0 2605 4 75 177 1 0 +"28321" 4 352 3 1 1 1 3 1 100 337 59 115 782 1 8 0 1071 5 95 56 0 1 +"28322" 4 352 3 1 1 1 3 1 100 337 115 173 2817 5 7 0 729 3 50 58 0 1 +"28323" 4 352 3 1 1 1 3 1 100 337 173 337 1244 9 5 1 1047 5 95 164 1 1 +"28324" 4 352 3 1 1 1 4 1 100 49 100 175 1625 8 7 1 897 4 75 75 1 1 +"28325" 4 352 3 1 1 1 4 1 100 49 175 306 1556 3 10 0 919 4 75 131 0 1 +"28326" 4 352 3 1 1 1 4 1 100 49 306 76 1793 7 9 1 2202 4 75 230 0 0 +"28327" 4 352 3 1 1 1 4 1 100 49 76 148 1534 9 1 1 1027 5 95 72 1 1 +"28328" 4 352 3 1 1 1 4 1 100 49 148 259 2215 2 3 0 1692 4 75 111 0 1 +"28329" 4 352 3 1 1 1 4 1 100 49 259 453 1273 1 8 0 914 4 75 194 0 1 +"28330" 4 352 3 1 1 1 4 1 100 49 453 113 1964 5 4 0 820 4 75 340 1 0 +"28331" 4 352 3 1 1 1 4 1 100 49 113 28 1627 4 2 0 740 4 75 85 1 0 +"28332" 4 352 3 1 1 1 4 1 100 49 28 49 1381 6 3 1 1834 4 75 21 1 1 +"28333" 4 352 4 0 1 0 1 1 100 26 100 150 3708 2 7 0 1082 3 50 50 0 1 +"28334" 4 352 4 0 1 0 1 1 100 26 150 263 4699 3 7 0 1625 2 75 113 0 1 +"28335" 4 352 4 0 1 0 1 1 100 26 263 513 1705 2 8 0 1630 1 95 250 0 1 +"28336" 4 352 4 0 1 0 1 1 100 26 513 26 1501 8 9 1 1411 1 95 487 0 0 +"28337" 4 352 5 1 1 0 1 1 100 4 100 195 2409 1 3 0 1868 1 95 95 0 1 +"28338" 4 352 5 1 1 0 1 1 100 4 195 341 1560 6 5 1 1451 2 75 146 1 1 +"28339" 4 352 5 1 1 0 1 1 100 4 341 665 1147 2 8 0 971 1 95 324 0 1 +"28340" 4 352 5 1 1 0 1 1 100 4 665 166 1874 8 9 1 806 2 75 499 0 0 +"28341" 4 352 5 1 1 0 1 1 100 4 166 324 3733 3 4 0 3049 1 95 158 0 1 +"28342" 4 352 5 1 1 0 1 1 100 4 324 632 3813 5 7 0 909 1 95 308 0 1 +"28343" 4 352 5 1 1 0 1 1 100 4 632 32 3631 7 4 0 696 1 95 600 1 0 +"28344" 4 352 5 1 1 0 1 1 100 4 32 2 2548 4 1 0 1872 1 95 30 1 0 +"28345" 4 352 5 1 1 0 1 1 100 4 2 4 1724 9 6 1 816 1 95 2 1 1 +"28346" 4 352 5 1 1 0 2 0 100 0 100 195 1545 8 1 1 685 1 95 95 1 1 +"28347" 4 352 5 1 1 0 2 0 100 0 195 10 3794 6 2 0 579 1 95 185 1 0 +"28348" 4 352 5 1 1 0 2 0 100 0 10 0 981 7 9 1 948 1 95 10 0 0 +"28349" 4 352 5 1 1 0 3 0 100 0 100 195 1425 7 5 1 755 1 95 95 1 1 +"28350" 4 352 5 1 1 0 3 0 100 0 195 10 1076 2 1 0 590 1 95 185 1 0 +"28351" 4 352 5 1 1 0 3 0 100 0 10 20 1247 8 6 1 498 1 95 10 1 1 +"28352" 4 352 5 1 1 0 3 0 100 0 20 39 1319 4 7 0 494 1 95 19 0 1 +"28353" 4 352 5 1 1 0 3 0 100 0 39 76 876 3 10 0 776 1 95 37 0 1 +"28354" 4 352 5 1 1 0 3 0 100 0 76 4 1912 6 8 1 1426 1 95 72 0 0 +"28355" 4 352 5 1 1 0 3 0 100 0 4 8 1067 9 2 1 422 1 95 4 1 1 +"28356" 4 352 5 1 1 0 3 0 100 0 8 0 1271 5 3 0 463 1 95 8 1 0 +"28357" 4 352 5 1 1 0 4 1 100 27 100 195 1349 2 3 0 443 1 95 95 0 1 +"28358" 4 352 5 1 1 0 4 1 100 27 195 380 1019 7 4 1 424 1 95 185 1 1 +"28359" 4 352 5 1 1 0 4 1 100 27 380 19 817 3 1 0 335 1 95 361 1 0 +"28360" 4 352 5 1 1 0 4 1 100 27 19 37 1052 1 9 0 407 1 95 18 0 1 +"28361" 4 352 5 1 1 0 4 1 100 27 37 72 1217 8 7 1 556 1 95 35 1 1 +"28362" 4 352 5 1 1 0 4 1 100 27 72 140 879 9 2 1 563 1 95 68 1 1 +"28363" 4 352 5 1 1 0 4 1 100 27 140 273 1349 5 6 0 963 1 95 133 0 1 +"28364" 4 352 5 1 1 0 4 1 100 27 273 14 1426 6 8 1 476 1 95 259 0 0 +"28365" 4 352 5 1 1 0 4 1 100 27 14 27 883 4 7 0 1022 1 95 13 0 1 +"28366" 4 355 2 0 1 0 1 1 100 8 100 175 12851 2 7 0 3949 2 75 75 0 1 +"28367" 4 355 2 0 1 0 1 1 100 8 175 87 4694 3 7 1 1709 3 50 88 0 0 +"28368" 4 355 2 0 1 0 1 1 100 8 87 170 3177 2 8 0 1391 1 95 83 0 1 +"28369" 4 355 2 0 1 0 1 1 100 8 170 8 3707 8 9 1 944 1 95 162 0 0 +"28370" 4 355 3 1 1 0 1 1 100 6 100 195 2277 1 3 0 1062 1 95 95 0 1 +"28371" 4 355 3 1 1 0 1 1 100 6 195 380 2040 6 5 1 860 1 95 185 1 1 +"28372" 4 355 3 1 1 0 1 1 100 6 380 399 3488 2 8 0 1280 5 5 19 0 1 +"28373" 4 355 3 1 1 0 1 1 100 6 399 299 1872 8 9 1 1189 4 25 100 0 0 +"28374" 4 355 3 1 1 0 1 1 100 6 299 15 2650 3 4 1 1129 1 95 284 0 0 +"28375" 4 355 3 1 1 0 1 1 100 6 15 29 2920 5 7 0 755 1 95 14 0 1 +"28376" 4 355 3 1 1 0 1 1 100 6 29 57 1948 7 4 1 685 1 95 28 1 1 +"28377" 4 355 3 1 1 0 1 1 100 6 57 111 2242 4 1 1 388 1 95 54 1 1 +"28378" 4 355 3 1 1 0 1 1 100 6 111 6 3543 9 6 0 1068 1 95 105 1 0 +"28379" 4 355 3 1 1 0 2 0 100 0 100 195 6633 8 1 1 563 1 95 95 1 1 +"28380" 4 355 3 1 1 0 2 0 100 0 195 10 2373 6 2 0 674 1 95 185 1 0 +"28381" 4 355 3 1 1 0 2 0 100 0 10 0 1367 7 9 1 500 1 95 10 0 0 +"28382" 4 355 3 1 1 0 3 1 100 720 100 195 2811 7 5 1 1411 1 95 95 1 1 +"28383" 4 355 3 1 1 0 3 1 100 720 195 146 2396 2 1 0 1032 4 25 49 1 0 +"28384" 4 355 3 1 1 0 3 1 100 720 146 285 1252 8 6 1 701 1 95 139 1 1 +"28385" 4 355 3 1 1 0 3 1 100 720 285 214 4233 4 7 1 478 4 25 71 0 0 +"28386" 4 355 3 1 1 0 3 1 100 720 214 375 1236 3 10 0 1615 2 75 161 0 1 +"28387" 4 355 3 1 1 0 3 1 100 720 375 469 2655 6 8 0 935 4 25 94 0 1 +"28388" 4 355 3 1 1 0 3 1 100 720 469 492 3240 9 2 1 628 5 5 23 1 1 +"28389" 4 355 3 1 1 0 3 1 100 720 492 369 3683 5 3 0 522 4 25 123 1 0 +"28390" 4 355 3 1 1 0 3 1 100 720 369 720 2198 1 10 0 603 1 95 351 0 1 +"28391" 4 355 3 1 1 0 4 1 100 3311 100 195 1898 2 3 0 389 1 95 95 0 1 +"28392" 4 355 3 1 1 0 4 1 100 3311 195 380 1088 7 4 1 409 1 95 185 1 1 +"28393" 4 355 3 1 1 0 4 1 100 3311 380 665 2059 3 1 1 1358 2 75 285 1 1 +"28394" 4 355 3 1 1 0 4 1 100 3311 665 698 1555 1 9 0 487 5 5 33 0 1 +"28395" 4 355 3 1 1 0 4 1 100 3311 698 873 2981 8 7 1 693 4 25 175 1 1 +"28396" 4 355 3 1 1 0 4 1 100 3311 873 1702 5329 9 2 1 1865 1 95 829 1 1 +"28397" 4 355 3 1 1 0 4 1 100 3311 1702 1787 4309 5 6 0 670 5 5 85 0 1 +"28398" 4 355 3 1 1 0 4 1 100 3311 1787 1698 1460 6 8 1 572 5 5 89 0 0 +"28399" 4 355 3 1 1 0 4 1 100 3311 1698 3311 3070 4 7 0 633 1 95 1613 0 1 +"28400" 4 355 4 0 1 1 1 1 100 313 100 125 8175 8 3 1 2023 2 25 25 1 1 +"28401" 4 355 4 0 1 1 1 1 100 313 125 188 5812 3 7 0 429 3 50 63 0 1 +"28402" 4 355 4 0 1 1 1 1 100 313 188 329 1814 8 2 1 622 4 75 141 1 1 +"28403" 4 355 4 0 1 1 1 1 100 313 329 313 2421 2 1 0 1336 1 5 16 1 0 +"28404" 4 355 5 1 1 1 1 1 100 786 100 195 1596 9 7 1 1366 5 95 95 1 1 +"28405" 4 355 5 1 1 1 1 1 100 786 195 341 1354 4 8 0 552 4 75 146 0 1 +"28406" 4 355 5 1 1 1 1 1 100 786 341 358 1947 8 2 1 492 1 5 17 1 1 +"28407" 4 355 5 1 1 1 1 1 100 786 358 340 2074 2 1 0 372 1 5 18 1 0 +"28408" 4 355 5 1 1 1 1 1 100 786 340 357 1104 7 6 1 1104 1 5 17 1 1 +"28409" 4 355 5 1 1 1 1 1 100 786 357 339 1689 5 3 0 566 1 5 18 1 0 +"28410" 4 355 5 1 1 1 1 1 100 786 339 424 2617 3 6 0 794 2 25 85 0 1 +"28411" 4 355 5 1 1 1 1 1 100 786 424 403 3297 6 9 1 1034 1 5 21 0 0 +"28412" 4 355 5 1 1 1 1 1 100 786 403 786 1262 1 4 0 0 5 95 383 0 1 +"28413" 4 355 5 1 1 1 2 1 100 1037 100 175 1478 2 9 0 325 4 75 75 0 1 +"28414" 4 355 5 1 1 1 2 1 100 1037 175 131 2203 4 10 1 536 2 25 44 0 0 +"28415" 4 355 5 1 1 1 2 1 100 1037 131 124 1116 3 1 0 628 1 5 7 1 0 +"28416" 4 355 5 1 1 1 2 1 100 1037 124 242 1092 8 6 1 2086 5 95 118 1 1 +"28417" 4 355 5 1 1 1 2 1 100 1037 242 230 2400 5 7 1 563 1 5 12 0 0 +"28418" 4 355 5 1 1 1 2 1 100 1037 230 218 1301 6 8 1 637 1 5 12 0 0 +"28419" 4 355 5 1 1 1 2 1 100 1037 218 273 4996 7 5 1 1700 2 25 55 1 1 +"28420" 4 355 5 1 1 1 2 1 100 1037 273 532 2360 1 10 0 0 5 95 259 0 1 +"28421" 4 355 5 1 1 1 2 1 100 1037 532 1037 1741 9 3 1 0 5 95 505 1 1 +"28422" 4 355 5 1 1 1 3 1 100 53 100 195 3014 3 5 0 861 5 95 95 0 1 +"28423" 4 355 5 1 1 1 3 1 100 53 195 10 889 8 9 1 0 5 95 185 0 0 +"28424" 4 355 5 1 1 1 3 1 100 53 10 18 1572 2 4 0 879 4 75 8 0 1 +"28425" 4 355 5 1 1 1 3 1 100 53 18 19 1490 6 3 1 585 1 5 1 1 1 +"28426" 4 355 5 1 1 1 3 1 100 53 19 20 1155 7 6 1 1801 1 5 1 1 1 +"28427" 4 355 5 1 1 1 3 1 100 53 20 15 1592 4 2 0 802 2 25 5 1 0 +"28428" 4 355 5 1 1 1 3 1 100 53 15 29 1386 1 8 0 781 5 95 14 0 1 +"28429" 4 355 5 1 1 1 3 1 100 53 29 30 1550 5 7 0 762 1 5 1 0 1 +"28430" 4 355 5 1 1 1 3 1 100 53 30 53 2395 9 5 1 1142 4 75 23 1 1 +"28431" 4 355 5 1 1 1 4 1 100 2448 100 195 2040 8 7 1 589 5 95 95 1 1 +"28432" 4 355 5 1 1 1 4 1 100 2448 195 380 1310 3 10 0 558 5 95 185 0 1 +"28433" 4 355 5 1 1 1 4 1 100 2448 380 285 1536 7 9 1 860 2 25 95 0 0 +"28434" 4 355 5 1 1 1 4 1 100 2448 285 556 1488 9 1 1 0 5 95 271 1 1 +"28435" 4 355 5 1 1 1 4 1 100 2448 556 1084 1831 2 3 0 0 5 95 528 0 1 +"28436" 4 355 5 1 1 1 4 1 100 2448 1084 2114 1653 1 8 0 0 5 95 1030 0 1 +"28437" 4 355 5 1 1 1 4 1 100 2448 2114 2220 4413 5 4 1 616 1 5 106 1 1 +"28438" 4 355 5 1 1 1 4 1 100 2448 2220 2331 3824 4 2 1 544 1 5 111 1 1 +"28439" 4 355 5 1 1 1 4 1 100 2448 2331 2448 1395 6 3 1 846 1 5 117 1 1 +"28440" 4 358 2 0 1 1 1 1 100 224 100 150 14386 8 3 1 323 3 50 50 1 1 +"28441" 4 358 2 0 1 1 1 1 100 224 150 225 6529 3 7 0 1824 3 50 75 0 1 +"28442" 4 358 2 0 1 1 1 1 100 224 225 236 2207 8 2 1 1148 1 5 11 1 1 +"28443" 4 358 2 0 1 1 1 1 100 224 236 224 1450 2 1 0 2435 1 5 12 1 0 +"28444" 4 358 3 1 1 1 1 1 100 242 100 175 2001 9 7 1 1736 4 75 75 1 1 +"28445" 4 358 3 1 1 1 1 1 100 242 175 166 4386 4 8 1 815 1 5 9 0 0 +"28446" 4 358 3 1 1 1 1 1 100 242 166 249 1846 8 2 1 2084 3 50 83 1 1 +"28447" 4 358 3 1 1 1 1 1 100 242 249 62 2750 2 1 0 1129 4 75 187 1 0 +"28448" 4 358 3 1 1 1 1 1 100 242 62 93 2024 7 6 1 733 3 50 31 1 1 +"28449" 4 358 3 1 1 1 1 1 100 242 93 88 4976 5 3 0 1585 1 5 5 1 0 +"28450" 4 358 3 1 1 1 1 1 100 242 88 110 1818 3 6 0 2266 2 25 22 0 1 +"28451" 4 358 3 1 1 1 1 1 100 242 110 138 2131 6 9 0 675 2 25 28 0 1 +"28452" 4 358 3 1 1 1 1 1 100 242 138 242 3520 1 4 0 902 4 75 104 0 1 +"28453" 4 358 3 1 1 1 2 1 100 388 100 195 3042 2 9 0 439 5 95 95 0 1 +"28454" 4 358 3 1 1 1 2 1 100 388 195 185 3327 4 10 1 743 1 5 10 0 0 +"28455" 4 358 3 1 1 1 2 1 100 388 185 46 2888 3 1 0 629 4 75 139 1 0 +"28456" 4 358 3 1 1 1 2 1 100 388 46 81 1428 8 6 1 1438 4 75 35 1 1 +"28457" 4 358 3 1 1 1 2 1 100 388 81 85 2599 5 7 0 465 1 5 4 0 1 +"28458" 4 358 3 1 1 1 2 1 100 388 85 106 3608 6 8 0 1114 2 25 21 0 1 +"28459" 4 358 3 1 1 1 2 1 100 388 106 159 2114 7 5 1 1012 3 50 53 1 1 +"28460" 4 358 3 1 1 1 2 1 100 388 159 310 2424 1 10 0 660 5 95 151 0 1 +"28461" 4 358 3 1 1 1 2 1 100 388 310 388 1577 9 3 1 836 2 25 78 1 1 +"28462" 4 358 3 1 1 1 3 1 100 115 100 150 2414 3 5 0 2409 3 50 50 0 1 +"28463" 4 358 3 1 1 1 3 1 100 115 150 7 1451 8 9 1 1325 5 95 143 0 0 +"28464" 4 358 3 1 1 1 3 1 100 115 7 11 1379 2 4 0 1528 3 50 4 0 1 +"28465" 4 358 3 1 1 1 3 1 100 115 11 14 2244 6 3 1 4502 2 25 3 1 1 +"28466" 4 358 3 1 1 1 3 1 100 115 14 21 1301 7 6 1 773 3 50 7 1 1 +"28467" 4 358 3 1 1 1 3 1 100 115 21 32 2669 4 2 1 2260 3 50 11 1 1 +"28468" 4 358 3 1 1 1 3 1 100 115 32 56 1918 1 8 0 897 4 75 24 0 1 +"28469" 4 358 3 1 1 1 3 1 100 115 56 59 4339 5 7 0 435 1 5 3 0 1 +"28470" 4 358 3 1 1 1 3 1 100 115 59 115 1684 9 5 1 882 5 95 56 1 1 +"28471" 4 358 3 1 1 1 4 1 100 798 100 150 1659 8 7 1 594 3 50 50 1 1 +"28472" 4 358 3 1 1 1 4 1 100 798 150 225 2323 3 10 0 690 3 50 75 0 1 +"28473" 4 358 3 1 1 1 4 1 100 798 225 169 2423 7 9 1 1617 2 25 56 0 0 +"28474" 4 358 3 1 1 1 4 1 100 798 169 330 2545 9 1 1 0 5 95 161 1 1 +"28475" 4 358 3 1 1 1 4 1 100 798 330 495 1581 2 3 0 888 3 50 165 0 1 +"28476" 4 358 3 1 1 1 4 1 100 798 495 965 1683 1 8 0 0 5 95 470 0 1 +"28477" 4 358 3 1 1 1 4 1 100 798 965 1013 2849 5 4 1 878 1 5 48 1 1 +"28478" 4 358 3 1 1 1 4 1 100 798 1013 760 2810 4 2 0 658 2 25 253 1 0 +"28479" 4 358 3 1 1 1 4 1 100 798 760 798 1592 6 3 1 401 1 5 38 1 1 +"28480" 4 358 4 0 1 0 1 1 100 30 100 195 4730 2 7 0 3182 1 95 95 0 1 +"28481" 4 358 4 0 1 0 1 1 100 30 195 341 6140 3 7 0 2725 2 75 146 0 1 +"28482" 4 358 4 0 1 0 1 1 100 30 341 597 2232 2 8 0 2372 2 75 256 0 1 +"28483" 4 358 4 0 1 0 1 1 100 30 597 30 1664 8 9 1 1334 1 95 567 0 0 +"28484" 4 358 5 1 1 0 1 0 100 1 100 195 3578 1 3 0 1430 1 95 95 0 1 +"28485" 4 358 5 1 1 0 1 0 100 1 195 146 3407 6 5 0 883 4 25 49 1 0 +"28486" 4 358 5 1 1 0 1 0 100 1 146 285 2966 2 8 0 374 1 95 139 0 1 +"28487" 4 358 5 1 1 0 1 0 100 1 285 14 2105 8 9 1 299 1 95 271 0 0 +"28488" 4 358 5 1 1 0 1 0 100 1 14 27 1354 3 4 0 367 1 95 13 0 1 +"28489" 4 358 5 1 1 0 1 0 100 1 27 1 2363 5 7 1 2992 1 95 26 0 0 +"28490" 4 358 5 1 1 0 2 1 100 296 100 195 1501 8 1 1 1812 1 95 95 1 1 +"28491" 4 358 5 1 1 0 2 1 100 296 195 244 3171 6 2 1 898 4 25 49 1 1 +"28492" 4 358 5 1 1 0 2 1 100 296 244 427 3637 7 9 0 1645 2 75 183 0 1 +"28493" 4 358 5 1 1 0 2 1 100 296 427 833 1603 2 10 0 1220 1 95 406 0 1 +"28494" 4 358 5 1 1 0 2 1 100 296 833 791 2952 5 3 0 905 5 5 42 1 0 +"28495" 4 358 5 1 1 0 2 1 100 296 791 40 1964 4 2 0 740 1 95 751 1 0 +"28496" 4 358 5 1 1 0 2 1 100 296 40 78 2413 3 5 0 1088 1 95 38 0 1 +"28497" 4 358 5 1 1 0 2 1 100 296 78 152 1482 9 4 1 305 1 95 74 1 1 +"28498" 4 358 5 1 1 0 2 1 100 296 152 296 1543 1 7 0 268 1 95 144 0 1 +"28499" 4 358 5 1 1 0 3 1 100 25 100 195 1438 7 5 1 727 1 95 95 1 1 +"28500" 4 358 5 1 1 0 3 1 100 25 195 10 1386 2 1 0 381 1 95 185 1 0 +"28501" 4 358 5 1 1 0 3 1 100 25 10 20 2336 8 6 1 263 1 95 10 1 1 +"28502" 4 358 5 1 1 0 3 1 100 25 20 15 3560 4 7 1 548 4 25 5 0 0 +"28503" 4 358 5 1 1 0 3 1 100 25 15 29 1135 3 10 0 183 1 95 14 0 1 +"28504" 4 358 5 1 1 0 3 1 100 25 29 14 1540 6 8 1 2192 3 50 15 0 0 +"28505" 4 358 5 1 1 0 3 1 100 25 14 27 1389 9 2 1 456 1 95 13 1 1 +"28506" 4 358 5 1 1 0 3 1 100 25 27 13 1735 5 3 0 1067 3 50 14 1 0 +"28507" 4 358 5 1 1 0 3 1 100 25 13 25 1174 1 10 0 251 1 95 12 0 1 +"28508" 4 358 5 1 1 0 4 1 100 6096 100 195 1707 2 3 0 959 1 95 95 0 1 +"28509" 4 358 5 1 1 0 4 1 100 6096 195 380 3702 7 4 1 1524 1 95 185 1 1 +"28510" 4 358 5 1 1 0 4 1 100 6096 380 475 2505 3 1 1 1648 4 25 95 1 1 +"28511" 4 358 5 1 1 0 4 1 100 6096 475 831 2062 1 9 0 722 2 75 356 0 1 +"28512" 4 358 5 1 1 0 4 1 100 6096 831 1454 1292 8 7 1 1381 2 75 623 1 1 +"28513" 4 358 5 1 1 0 4 1 100 6096 1454 2835 1216 9 2 1 588 1 95 1381 1 1 +"28514" 4 358 5 1 1 0 4 1 100 6096 2835 2977 3493 5 6 0 933 5 5 142 0 1 +"28515" 4 358 5 1 1 0 4 1 100 6096 2977 3126 2454 6 8 0 743 5 5 149 0 1 +"28516" 4 358 5 1 1 0 4 1 100 6096 3126 6096 1361 4 7 0 790 1 95 2970 0 1 +"28517" 4 359 2 0 1 0 1 1 100 27 100 150 6161 2 7 0 1138 3 50 50 0 1 +"28518" 4 359 2 0 1 0 1 1 100 27 150 7 10625 3 7 1 3589 1 95 143 0 0 +"28519" 4 359 2 0 1 0 1 1 100 27 7 14 2255 2 8 0 2778 1 95 7 0 1 +"28520" 4 359 2 0 1 0 1 1 100 27 14 27 3563 8 9 0 1759 1 95 13 0 1 +"28521" 4 359 3 1 1 0 1 0 100 0 100 195 3909 1 3 0 1424 1 95 95 0 1 +"28522" 4 359 3 1 1 0 1 0 100 0 195 10 2242 6 5 0 868 1 95 185 1 0 +"28523" 4 359 3 1 1 0 1 0 100 0 10 0 2632 2 8 1 992 1 95 10 0 0 +"28524" 4 359 3 1 1 0 2 0 100 1 100 195 3394 8 1 1 974 1 95 95 1 1 +"28525" 4 359 3 1 1 0 2 0 100 1 195 10 3181 6 2 0 4579 1 95 185 1 0 +"28526" 4 359 3 1 1 0 2 0 100 1 10 20 1766 7 9 0 1056 1 95 10 0 1 +"28527" 4 359 3 1 1 0 2 0 100 1 20 1 2691 2 10 1 817 1 95 19 0 0 +"28528" 4 359 3 1 1 0 3 0 100 0 100 5 3015 7 5 0 820 1 95 95 1 0 +"28529" 4 359 3 1 1 0 3 0 100 0 5 10 2603 2 1 1 3333 1 95 5 1 1 +"28530" 4 359 3 1 1 0 3 0 100 0 10 0 4203 8 6 0 1097 1 95 10 1 0 +"28531" 4 359 3 1 1 0 4 0 100 0 100 195 1946 2 3 0 819 1 95 95 0 1 +"28532" 4 359 3 1 1 0 4 0 100 0 195 10 2257 7 4 0 612 1 95 185 1 0 +"28533" 4 359 3 1 1 0 4 0 100 0 10 20 924 3 1 1 822 1 95 10 1 1 +"28534" 4 359 3 1 1 0 4 0 100 0 20 39 1260 1 9 0 814 1 95 19 0 1 +"28535" 4 359 3 1 1 0 4 0 100 0 39 76 917 8 7 1 550 1 95 37 1 1 +"28536" 4 359 3 1 1 0 4 0 100 0 76 4 713 9 2 0 1541 1 95 72 1 0 +"28537" 4 359 3 1 1 0 4 0 100 0 4 8 1665 5 6 0 1431 1 95 4 0 1 +"28538" 4 359 3 1 1 0 4 0 100 0 8 0 1816 6 8 1 927 1 95 8 0 0 +"28539" 4 359 4 0 1 1 1 1 100 124 100 150 3107 8 3 1 1165 3 50 50 1 1 +"28540" 4 359 4 0 1 1 1 1 100 124 150 158 3571 3 7 0 3639 1 5 8 0 1 +"28541" 4 359 4 0 1 1 1 1 100 124 158 166 4270 8 2 1 3697 1 5 8 1 1 +"28542" 4 359 4 0 1 1 1 1 100 124 166 124 5222 2 1 0 4107 2 25 42 1 0 +"28543" 4 359 5 1 1 1 1 1 100 359 100 105 1471 9 7 1 3494 1 5 5 1 1 +"28544" 4 359 5 1 1 1 1 1 100 359 105 184 2340 4 8 0 3187 4 75 79 0 1 +"28545" 4 359 5 1 1 1 1 1 100 359 184 175 3064 8 2 0 596 1 5 9 1 0 +"28546" 4 359 5 1 1 1 1 1 100 359 175 219 2317 2 1 1 1332 2 25 44 1 1 +"28547" 4 359 5 1 1 1 1 1 100 359 219 164 2874 7 6 0 831 2 25 55 1 0 +"28548" 4 359 5 1 1 1 1 1 100 359 164 287 3365 5 3 1 643 4 75 123 1 1 +"28549" 4 359 5 1 1 1 1 1 100 359 287 273 2089 3 6 1 556 1 5 14 0 0 +"28550" 4 359 5 1 1 1 1 1 100 359 273 205 942 6 9 1 1063 2 25 68 0 0 +"28551" 4 359 5 1 1 1 1 1 100 359 205 359 2874 1 4 0 1671 4 75 154 0 1 +"28552" 4 359 5 1 1 1 2 1 100 20 100 25 3075 2 9 1 2664 4 75 75 0 0 +"28553" 4 359 5 1 1 1 2 1 100 20 25 49 6965 4 10 0 1352 5 95 24 0 1 +"28554" 4 359 5 1 1 1 2 1 100 20 49 51 2380 3 1 1 1273 1 5 2 1 1 +"28555" 4 359 5 1 1 1 2 1 100 20 51 13 985 8 6 0 3144 4 75 38 1 0 +"28556" 4 359 5 1 1 1 2 1 100 20 13 16 3713 5 7 0 245 2 25 3 0 1 +"28557" 4 359 5 1 1 1 2 1 100 20 16 12 1344 6 8 1 1188 2 25 4 0 0 +"28558" 4 359 5 1 1 1 2 1 100 20 12 11 1210 7 5 0 4365 1 5 1 1 0 +"28559" 4 359 5 1 1 1 2 1 100 20 11 19 1091 1 10 0 1685 4 75 8 0 1 +"28560" 4 359 5 1 1 1 2 1 100 20 19 20 3538 9 3 1 4359 1 5 1 1 1 +"28561" 4 359 5 1 1 1 3 0 100 0 100 50 1573 3 5 1 3303 3 50 50 0 0 +"28562" 4 359 5 1 1 1 3 0 100 0 50 53 704 8 9 0 3532 1 5 3 0 1 +"28563" 4 359 5 1 1 1 3 0 100 0 53 50 1745 2 4 1 3162 1 5 3 0 0 +"28564" 4 359 5 1 1 1 3 0 100 0 50 37 986 6 3 0 327 2 25 13 1 0 +"28565" 4 359 5 1 1 1 3 0 100 0 37 2 1821 7 6 0 1270 5 95 35 1 0 +"28566" 4 359 5 1 1 1 3 0 100 0 2 0 2119 4 2 0 736 4 75 2 1 0 +"28567" 4 359 5 1 1 1 4 0 100 0 100 5 5488 8 7 0 0 5 95 95 1 0 +"28568" 4 359 5 1 1 1 4 0 100 0 5 10 1537 3 10 0 0 5 95 5 0 1 +"28569" 4 359 5 1 1 1 4 0 100 0 10 9 1781 7 9 1 2118 1 5 1 0 0 +"28570" 4 359 5 1 1 1 4 0 100 0 9 2 918 9 1 0 496 4 75 7 1 0 +"28571" 4 359 5 1 1 1 4 0 100 0 2 0 2310 2 3 1 581 4 75 2 0 0 +"28572" 4 360 2 0 1 1 1 1 100 211 100 150 11652 8 3 1 391 3 50 50 1 1 +"28573" 4 360 2 0 1 1 1 1 100 211 150 188 4986 3 7 0 2314 2 25 38 0 1 +"28574" 4 360 2 0 1 1 1 1 100 211 188 282 2425 8 2 1 292 3 50 94 1 1 +"28575" 4 360 2 0 1 1 1 1 100 211 282 211 2764 2 1 0 853 2 25 71 1 0 +"28576" 4 360 3 1 1 1 1 1 100 437 100 150 3825 9 7 1 1249 3 50 50 1 1 +"28577" 4 360 3 1 1 1 1 1 100 437 150 158 2746 4 8 0 2149 1 5 8 0 1 +"28578" 4 360 3 1 1 1 1 1 100 437 158 237 2291 8 2 1 765 3 50 79 1 1 +"28579" 4 360 3 1 1 1 1 1 100 437 237 118 1733 2 1 0 992 3 50 119 1 0 +"28580" 4 360 3 1 1 1 1 1 100 437 118 207 2030 7 6 1 715 4 75 89 1 1 +"28581" 4 360 3 1 1 1 1 1 100 437 207 155 4295 5 3 0 648 2 25 52 1 0 +"28582" 4 360 3 1 1 1 1 1 100 437 155 233 1430 3 6 0 320 3 50 78 0 1 +"28583" 4 360 3 1 1 1 1 1 100 437 233 291 1934 6 9 0 624 2 25 58 0 1 +"28584" 4 360 3 1 1 1 1 1 100 437 291 437 2119 1 4 0 600 3 50 146 0 1 +"28585" 4 360 3 1 1 1 2 1 100 435 100 150 2421 2 9 0 287 3 50 50 0 1 +"28586" 4 360 3 1 1 1 2 1 100 435 150 112 2167 4 10 1 309 2 25 38 0 0 +"28587" 4 360 3 1 1 1 2 1 100 435 112 84 2012 3 1 0 1090 2 25 28 1 0 +"28588" 4 360 3 1 1 1 2 1 100 435 84 164 1867 8 6 1 1097 5 95 80 1 1 +"28589" 4 360 3 1 1 1 2 1 100 435 164 123 2218 5 7 1 1766 2 25 41 0 0 +"28590" 4 360 3 1 1 1 2 1 100 435 123 154 1471 6 8 0 639 2 25 31 0 1 +"28591" 4 360 3 1 1 1 2 1 100 435 154 193 1629 7 5 1 661 2 25 39 1 1 +"28592" 4 360 3 1 1 1 2 1 100 435 193 290 1763 1 10 0 717 3 50 97 0 1 +"28593" 4 360 3 1 1 1 2 1 100 435 290 435 1920 9 3 1 531 3 50 145 1 1 +"28594" 4 360 3 1 1 1 3 1 100 49 100 75 3701 3 5 1 323 2 25 25 0 0 +"28595" 4 360 3 1 1 1 3 1 100 49 75 4 1736 8 9 1 0 5 95 71 0 0 +"28596" 4 360 3 1 1 1 3 1 100 49 4 8 1508 2 4 0 0 5 95 4 0 1 +"28597" 4 360 3 1 1 1 3 1 100 49 8 12 2858 6 3 1 600 3 50 4 1 1 +"28598" 4 360 3 1 1 1 3 1 100 49 12 21 1504 7 6 1 735 4 75 9 1 1 +"28599" 4 360 3 1 1 1 3 1 100 49 21 10 2553 4 2 0 470 3 50 11 1 0 +"28600" 4 360 3 1 1 1 3 1 100 49 10 20 1412 1 8 0 0 5 95 10 0 1 +"28601" 4 360 3 1 1 1 3 1 100 49 20 25 2023 5 7 0 1012 2 25 5 0 1 +"28602" 4 360 3 1 1 1 3 1 100 49 25 49 1553 9 5 1 1693 5 95 24 1 1 +"28603" 4 360 3 1 1 1 4 1 100 301 100 150 3132 8 7 1 292 3 50 50 1 1 +"28604" 4 360 3 1 1 1 4 1 100 301 150 112 3060 3 10 1 264 2 25 38 0 0 +"28605" 4 360 3 1 1 1 4 1 100 301 112 56 1193 7 9 1 558 3 50 56 0 0 +"28606" 4 360 3 1 1 1 4 1 100 301 56 98 1185 9 1 1 446 4 75 42 1 1 +"28607" 4 360 3 1 1 1 4 1 100 301 98 147 1318 2 3 0 341 3 50 49 0 1 +"28608" 4 360 3 1 1 1 4 1 100 301 147 257 2466 1 8 0 826 4 75 110 0 1 +"28609" 4 360 3 1 1 1 4 1 100 301 257 321 2984 5 4 1 367 2 25 64 1 1 +"28610" 4 360 3 1 1 1 4 1 100 301 321 241 1779 4 2 0 333 2 25 80 1 0 +"28611" 4 360 3 1 1 1 4 1 100 301 241 301 1426 6 3 1 468 2 25 60 1 1 +"28612" 4 360 4 0 1 0 1 1 100 33 100 175 9873 2 7 0 609 2 75 75 0 1 +"28613" 4 360 4 0 1 0 1 1 100 33 175 341 2577 3 7 0 4900 1 95 166 0 1 +"28614" 4 360 4 0 1 0 1 1 100 33 341 665 1177 2 8 0 838 1 95 324 0 1 +"28615" 4 360 4 0 1 0 1 1 100 33 665 33 1148 8 9 1 2555 1 95 632 0 0 +"28616" 4 360 5 1 1 0 1 1 100 144 100 195 3066 1 3 0 1102 1 95 95 0 1 +"28617" 4 360 5 1 1 0 1 1 100 144 195 244 1310 6 5 1 966 4 25 49 1 1 +"28618" 4 360 5 1 1 0 1 1 100 144 244 427 1190 2 8 0 1107 2 75 183 0 1 +"28619" 4 360 5 1 1 0 1 1 100 144 427 21 1155 8 9 1 578 1 95 406 0 0 +"28620" 4 360 5 1 1 0 1 1 100 144 21 41 1524 3 4 0 3033 1 95 20 0 1 +"28621" 4 360 5 1 1 0 1 1 100 144 41 51 2846 5 7 0 397 4 25 10 0 1 +"28622" 4 360 5 1 1 0 1 1 100 144 51 99 1263 7 4 1 1169 1 95 48 1 1 +"28623" 4 360 5 1 1 0 1 1 100 144 99 74 1686 4 1 0 997 4 25 25 1 0 +"28624" 4 360 5 1 1 0 1 1 100 144 74 144 1130 9 6 1 546 1 95 70 1 1 +"28625" 4 360 5 1 1 0 2 1 100 164 100 195 1648 8 1 1 871 1 95 95 1 1 +"28626" 4 360 5 1 1 0 2 1 100 164 195 244 1219 6 2 1 214 4 25 49 1 1 +"28627" 4 360 5 1 1 0 2 1 100 164 244 61 1466 7 9 1 1050 2 75 183 0 0 +"28628" 4 360 5 1 1 0 2 1 100 164 61 119 1821 2 10 0 476 1 95 58 0 1 +"28629" 4 360 5 1 1 0 2 1 100 164 119 89 1218 5 3 0 390 4 25 30 1 0 +"28630" 4 360 5 1 1 0 2 1 100 164 89 22 1888 4 2 0 892 2 75 67 1 0 +"28631" 4 360 5 1 1 0 2 1 100 164 22 43 1318 3 5 0 285 1 95 21 0 1 +"28632" 4 360 5 1 1 0 2 1 100 164 43 84 1396 9 4 1 304 1 95 41 1 1 +"28633" 4 360 5 1 1 0 2 1 100 164 84 164 830 1 7 0 292 1 95 80 0 1 +"28634" 4 360 5 1 1 0 3 1 100 127 100 195 1098 7 5 1 252 1 95 95 1 1 +"28635" 4 360 5 1 1 0 3 1 100 127 195 10 1061 2 1 0 584 1 95 185 1 0 +"28636" 4 360 5 1 1 0 3 1 100 127 10 20 914 8 6 1 236 1 95 10 1 1 +"28637" 4 360 5 1 1 0 3 1 100 127 20 39 1190 4 7 0 315 1 95 19 0 1 +"28638" 4 360 5 1 1 0 3 1 100 127 39 76 921 3 10 0 307 1 95 37 0 1 +"28639" 4 360 5 1 1 0 3 1 100 127 76 19 881 6 8 1 325 2 75 57 0 0 +"28640" 4 360 5 1 1 0 3 1 100 127 19 37 1107 9 2 1 747 1 95 18 1 1 +"28641" 4 360 5 1 1 0 3 1 100 127 37 65 1725 5 3 1 1073 2 75 28 1 1 +"28642" 4 360 5 1 1 0 3 1 100 127 65 127 1115 1 10 0 884 1 95 62 0 1 +"28643" 4 360 5 1 1 0 4 1 100 64 100 195 1041 2 3 0 685 1 95 95 0 1 +"28644" 4 360 5 1 1 0 4 1 100 64 195 380 890 7 4 1 669 1 95 185 1 1 +"28645" 4 360 5 1 1 0 4 1 100 64 380 95 1011 3 1 0 1025 2 75 285 1 0 +"28646" 4 360 5 1 1 0 4 1 100 64 95 185 677 1 9 0 306 1 95 90 0 1 +"28647" 4 360 5 1 1 0 4 1 100 64 185 361 865 8 7 1 333 1 95 176 1 1 +"28648" 4 360 5 1 1 0 4 1 100 64 361 704 803 9 2 1 386 1 95 343 1 1 +"28649" 4 360 5 1 1 0 4 1 100 64 704 669 1292 5 6 1 333 5 5 35 0 0 +"28650" 4 360 5 1 1 0 4 1 100 64 669 33 971 6 8 1 1354 1 95 636 0 0 +"28651" 4 360 5 1 1 0 4 1 100 64 33 64 847 4 7 0 3108 1 95 31 0 1 +"28652" 4 361 2 0 1 1 1 1 100 246 100 150 4747 8 3 1 1735 3 50 50 1 1 +"28653" 4 361 2 0 1 1 1 1 100 246 150 188 13778 3 7 0 56 2 25 38 0 1 +"28654" 4 361 2 0 1 1 1 1 100 246 188 197 2674 8 2 1 3600 1 5 9 1 1 +"28655" 4 361 2 0 1 1 1 1 100 246 197 246 4690 2 1 1 3480 2 25 49 1 1 +"28656" 4 361 3 1 1 1 1 1 100 306 100 105 9803 9 7 1 1150 1 5 5 1 1 +"28657" 4 361 3 1 1 1 1 1 100 306 105 131 1636 4 8 0 780 2 25 26 0 1 +"28658" 4 361 3 1 1 1 1 1 100 306 131 164 4092 8 2 1 663 2 25 33 1 1 +"28659" 4 361 3 1 1 1 1 1 100 306 164 156 4036 2 1 0 1018 1 5 8 1 0 +"28660" 4 361 3 1 1 1 1 1 100 306 156 234 2303 7 6 1 647 3 50 78 1 1 +"28661" 4 361 3 1 1 1 1 1 100 306 234 246 3046 5 3 1 715 1 5 12 1 1 +"28662" 4 361 3 1 1 1 1 1 100 306 246 234 2910 3 6 1 514 1 5 12 0 0 +"28663" 4 361 3 1 1 1 1 1 100 306 234 175 1758 6 9 1 544 2 25 59 0 0 +"28664" 4 361 3 1 1 1 1 1 100 306 175 306 1396 1 4 0 847 4 75 131 0 1 +"28665" 4 361 3 1 1 1 2 1 100 425 100 175 13340 2 9 0 535 4 75 75 0 1 +"28666" 4 361 3 1 1 1 2 1 100 425 175 219 2018 4 10 0 528 2 25 44 0 1 +"28667" 4 361 3 1 1 1 2 1 100 425 219 208 3620 3 1 0 645 1 5 11 1 0 +"28668" 4 361 3 1 1 1 2 1 100 425 208 260 1372 8 6 1 448 2 25 52 1 1 +"28669" 4 361 3 1 1 1 2 1 100 425 260 273 2720 5 7 0 468 1 5 13 0 1 +"28670" 4 361 3 1 1 1 2 1 100 425 273 259 2699 6 8 1 448 1 5 14 0 0 +"28671" 4 361 3 1 1 1 2 1 100 425 259 324 1776 7 5 1 1682 2 25 65 1 1 +"28672" 4 361 3 1 1 1 2 1 100 425 324 405 1718 1 10 0 667 2 25 81 0 1 +"28673" 4 361 3 1 1 1 2 1 100 425 405 425 1784 9 3 1 665 1 5 20 1 1 +"28674" 4 361 3 1 1 1 3 1 100 4 100 125 1462 3 5 0 1558 2 25 25 0 1 +"28675" 4 361 3 1 1 1 3 1 100 4 125 94 1769 8 9 1 1755 2 25 31 0 0 +"28676" 4 361 3 1 1 1 3 1 100 4 94 183 1549 2 4 0 0 5 95 89 0 1 +"28677" 4 361 3 1 1 1 3 1 100 4 183 174 4409 6 3 0 356 1 5 9 1 0 +"28678" 4 361 3 1 1 1 3 1 100 4 174 305 2496 7 6 1 559 4 75 131 1 1 +"28679" 4 361 3 1 1 1 3 1 100 4 305 152 1954 4 2 0 659 3 50 153 1 0 +"28680" 4 361 3 1 1 1 3 1 100 4 152 296 4401 1 8 0 0 5 95 144 0 1 +"28681" 4 361 3 1 1 1 3 1 100 4 296 74 2405 5 7 1 1017 4 75 222 0 0 +"28682" 4 361 3 1 1 1 3 1 100 4 74 4 3342 9 5 0 0 5 95 70 1 0 +"28683" 4 361 3 1 1 1 4 1 100 403 100 175 1581 8 7 1 928 4 75 75 1 1 +"28684" 4 361 3 1 1 1 4 1 100 403 175 219 1697 3 10 0 638 2 25 44 0 1 +"28685" 4 361 3 1 1 1 4 1 100 403 219 164 2300 7 9 1 925 2 25 55 0 0 +"28686" 4 361 3 1 1 1 4 1 100 403 164 205 1518 9 1 1 811 2 25 41 1 1 +"28687" 4 361 3 1 1 1 4 1 100 403 205 308 3402 2 3 0 438 3 50 103 0 1 +"28688" 4 361 3 1 1 1 4 1 100 403 308 385 1910 1 8 0 560 2 25 77 0 1 +"28689" 4 361 3 1 1 1 4 1 100 403 385 404 2587 5 4 1 525 1 5 19 1 1 +"28690" 4 361 3 1 1 1 4 1 100 403 404 384 1785 4 2 0 450 1 5 20 1 0 +"28691" 4 361 3 1 1 1 4 1 100 403 384 403 1978 6 3 1 498 1 5 19 1 1 +"28692" 4 361 4 0 1 0 1 1 100 11 100 150 4653 2 7 0 1238 3 50 50 0 1 +"28693" 4 361 4 0 1 0 1 1 100 11 150 112 5424 3 7 1 628 4 25 38 0 0 +"28694" 4 361 4 0 1 0 1 1 100 11 112 218 1708 2 8 0 1152 1 95 106 0 1 +"28695" 4 361 4 0 1 0 1 1 100 11 218 11 3286 8 9 1 1031 1 95 207 0 0 +"28696" 4 361 5 1 1 0 1 1 100 332 100 195 3648 1 3 0 496 1 95 95 0 1 +"28697" 4 361 5 1 1 0 1 1 100 332 195 244 1606 6 5 1 1399 4 25 49 1 1 +"28698" 4 361 5 1 1 0 1 1 100 332 244 476 1346 2 8 0 408 1 95 232 0 1 +"28699" 4 361 5 1 1 0 1 1 100 332 476 24 2129 8 9 1 505 1 95 452 0 0 +"28700" 4 361 5 1 1 0 1 1 100 332 24 47 5814 3 4 0 423 1 95 23 0 1 +"28701" 4 361 5 1 1 0 1 1 100 332 47 92 1416 5 7 0 563 1 95 45 0 1 +"28702" 4 361 5 1 1 0 1 1 100 332 92 179 1933 7 4 1 487 1 95 87 1 1 +"28703" 4 361 5 1 1 0 1 1 100 332 179 170 1253 4 1 0 0 5 5 9 1 0 +"28704" 4 361 5 1 1 0 1 1 100 332 170 332 1974 9 6 1 550 1 95 162 1 1 +"28705" 4 361 5 1 1 0 2 1 100 1070 100 195 1459 8 1 1 611 1 95 95 1 1 +"28706" 4 361 5 1 1 0 2 1 100 1070 195 380 2859 6 2 1 513 1 95 185 1 1 +"28707" 4 361 5 1 1 0 2 1 100 1070 380 399 3326 7 9 0 0 5 5 19 0 1 +"28708" 4 361 5 1 1 0 2 1 100 1070 399 419 2677 2 10 0 0 5 5 20 0 1 +"28709" 4 361 5 1 1 0 2 1 100 1070 419 440 2283 5 3 1 0 5 5 21 1 1 +"28710" 4 361 5 1 1 0 2 1 100 1070 440 418 3772 4 2 0 0 5 5 22 1 0 +"28711" 4 361 5 1 1 0 2 1 100 1070 418 815 1726 3 5 0 546 1 95 397 0 1 +"28712" 4 361 5 1 1 0 2 1 100 1070 815 1019 2773 9 4 1 2119 4 25 204 1 1 +"28713" 4 361 5 1 1 0 2 1 100 1070 1019 1070 3247 1 7 0 0 5 5 51 0 1 +"28714" 4 361 5 1 1 0 3 0 100 0 100 195 2105 7 5 1 475 1 95 95 1 1 +"28715" 4 361 5 1 1 0 3 0 100 0 195 10 1553 2 1 0 501 1 95 185 1 0 +"28716" 4 361 5 1 1 0 3 0 100 0 10 20 1778 8 6 1 611 1 95 10 1 1 +"28717" 4 361 5 1 1 0 3 0 100 0 20 39 1531 4 7 0 528 1 95 19 0 1 +"28718" 4 361 5 1 1 0 3 0 100 0 39 76 2668 3 10 0 779 1 95 37 0 1 +"28719" 4 361 5 1 1 0 3 0 100 0 76 4 1787 6 8 1 876 1 95 72 0 0 +"28720" 4 361 5 1 1 0 3 0 100 0 4 0 3103 9 2 0 497 1 95 4 1 0 +"28721" 4 361 5 1 1 0 4 1 100 1709 100 195 1939 2 3 0 1268 1 95 95 0 1 +"28722" 4 361 5 1 1 0 4 1 100 1709 195 293 2861 7 4 1 1843 3 50 98 1 1 +"28723" 4 361 5 1 1 0 4 1 100 1709 293 220 1585 3 1 0 1836 4 25 73 1 0 +"28724" 4 361 5 1 1 0 4 1 100 1709 220 429 2066 1 9 0 565 1 95 209 0 1 +"28725" 4 361 5 1 1 0 4 1 100 1709 429 837 1503 8 7 1 637 1 95 408 1 1 +"28726" 4 361 5 1 1 0 4 1 100 1709 837 1632 2052 9 2 1 553 1 95 795 1 1 +"28727" 4 361 5 1 1 0 4 1 100 1709 1632 1714 3539 5 6 0 0 5 5 82 0 1 +"28728" 4 361 5 1 1 0 4 1 100 1709 1714 1628 2280 6 8 1 0 5 5 86 0 0 +"28729" 4 361 5 1 1 0 4 1 100 1709 1628 1709 1897 4 7 0 0 5 5 81 0 1 +"28730" 4 366 2 0 1 1 1 1 100 223 100 150 7778 8 3 1 1246 3 50 50 1 1 +"28731" 4 366 2 0 1 1 1 1 100 223 150 188 12024 3 7 0 1225 2 25 38 0 1 +"28732" 4 366 2 0 1 1 1 1 100 223 188 235 3948 8 2 1 973 2 25 47 1 1 +"28733" 4 366 2 0 1 1 1 1 100 223 235 223 2324 2 1 0 1395 1 5 12 1 0 +"28734" 4 366 3 1 1 1 1 1 100 414 100 150 4358 9 7 1 764 3 50 50 1 1 +"28735" 4 366 3 1 1 1 1 1 100 414 150 225 4037 4 8 0 763 3 50 75 0 1 +"28736" 4 366 3 1 1 1 1 1 100 414 225 281 3340 8 2 1 1627 2 25 56 1 1 +"28737" 4 366 3 1 1 1 1 1 100 414 281 295 4011 2 1 1 1241 1 5 14 1 1 +"28738" 4 366 3 1 1 1 1 1 100 414 295 310 1811 7 6 1 578 1 5 15 1 1 +"28739" 4 366 3 1 1 1 1 1 100 414 310 388 4132 5 3 1 989 2 25 78 1 1 +"28740" 4 366 3 1 1 1 1 1 100 414 388 582 2275 3 6 0 1108 3 50 194 0 1 +"28741" 4 366 3 1 1 1 1 1 100 414 582 436 1836 6 9 1 1579 2 25 146 0 0 +"28742" 4 366 3 1 1 1 1 1 100 414 436 414 3141 1 4 1 1309 1 5 22 0 0 +"28743" 4 366 3 1 1 1 2 1 100 109 100 125 2238 2 9 0 1393 2 25 25 0 1 +"28744" 4 366 3 1 1 1 2 1 100 109 125 94 2860 4 10 1 1226 2 25 31 0 0 +"28745" 4 366 3 1 1 1 2 1 100 109 94 70 2788 3 1 0 3093 2 25 24 1 0 +"28746" 4 366 3 1 1 1 2 1 100 109 70 88 1671 8 6 1 869 2 25 18 1 1 +"28747" 4 366 3 1 1 1 2 1 100 109 88 66 4817 5 7 1 953 2 25 22 0 0 +"28748" 4 366 3 1 1 1 2 1 100 109 66 33 3537 6 8 1 503 3 50 33 0 0 +"28749" 4 366 3 1 1 1 2 1 100 109 33 41 2676 7 5 1 1178 2 25 8 1 1 +"28750" 4 366 3 1 1 1 2 1 100 109 41 62 4479 1 10 0 624 3 50 21 0 1 +"28751" 4 366 3 1 1 1 2 1 100 109 62 109 1439 9 3 1 576 4 75 47 1 1 +"28752" 4 366 3 1 1 1 3 1 100 164 100 75 3570 3 5 1 454 2 25 25 0 0 +"28753" 4 366 3 1 1 1 3 1 100 164 75 94 2141 8 9 0 588 2 25 19 0 1 +"28754" 4 366 3 1 1 1 3 1 100 164 94 118 1589 2 4 0 503 2 25 24 0 1 +"28755" 4 366 3 1 1 1 3 1 100 164 118 177 1756 6 3 1 598 3 50 59 1 1 +"28756" 4 366 3 1 1 1 3 1 100 164 177 221 1719 7 6 1 443 2 25 44 1 1 +"28757" 4 366 3 1 1 1 3 1 100 164 221 210 3283 4 2 0 588 1 5 11 1 0 +"28758" 4 366 3 1 1 1 3 1 100 164 210 263 2253 1 8 0 696 2 25 53 0 1 +"28759" 4 366 3 1 1 1 3 1 100 164 263 131 1636 5 7 1 388 3 50 132 0 0 +"28760" 4 366 3 1 1 1 3 1 100 164 131 164 1783 9 5 1 432 2 25 33 1 1 +"28761" 4 366 3 1 1 1 4 1 100 93 100 125 1106 8 7 1 579 2 25 25 1 1 +"28762" 4 366 3 1 1 1 4 1 100 93 125 156 2700 3 10 0 906 2 25 31 0 1 +"28763" 4 366 3 1 1 1 4 1 100 93 156 117 2292 7 9 1 404 2 25 39 0 0 +"28764" 4 366 3 1 1 1 4 1 100 93 117 146 1542 9 1 1 850 2 25 29 1 1 +"28765" 4 366 3 1 1 1 4 1 100 93 146 139 5759 2 3 1 995 1 5 7 0 0 +"28766" 4 366 3 1 1 1 4 1 100 93 139 132 3568 1 8 1 396 1 5 7 0 0 +"28767" 4 366 3 1 1 1 4 1 100 93 132 165 2359 5 4 1 508 2 25 33 1 1 +"28768" 4 366 3 1 1 1 4 1 100 93 165 124 2713 4 2 0 509 2 25 41 1 0 +"28769" 4 366 3 1 1 1 4 1 100 93 124 93 2514 6 3 0 720 2 25 31 1 0 +"28770" 4 366 4 0 1 0 1 1 100 16 100 150 4039 2 7 0 2084 3 50 50 0 1 +"28771" 4 366 4 0 1 0 1 1 100 16 150 263 2406 3 7 0 1401 2 75 113 0 1 +"28772" 4 366 4 0 1 0 1 1 100 16 263 66 3004 2 8 1 738 2 75 197 0 0 +"28773" 4 366 4 0 1 0 1 1 100 16 66 16 1772 8 9 1 861 2 75 50 0 0 +"28774" 4 366 5 1 1 0 1 1 100 14 100 50 3580 1 3 1 631 3 50 50 0 0 +"28775" 4 366 5 1 1 0 1 1 100 14 50 75 1479 6 5 1 1278 3 50 25 1 1 +"28776" 4 366 5 1 1 0 1 1 100 14 75 37 3774 2 8 1 728 3 50 38 0 0 +"28777" 4 366 5 1 1 0 1 1 100 14 37 9 1019 8 9 1 408 2 75 28 0 0 +"28778" 4 366 5 1 1 0 1 1 100 14 9 16 1946 3 4 0 811 2 75 7 0 1 +"28779" 4 366 5 1 1 0 1 1 100 14 16 8 1569 5 7 1 1117 3 50 8 0 0 +"28780" 4 366 5 1 1 0 1 1 100 14 8 16 1429 7 4 1 1527 1 95 8 1 1 +"28781" 4 366 5 1 1 0 1 1 100 14 16 8 1419 4 1 0 661 3 50 8 1 0 +"28782" 4 366 5 1 1 0 1 1 100 14 8 14 963 9 6 1 787 2 75 6 1 1 +"28783" 4 366 5 1 1 0 2 1 100 1694 100 175 2277 8 1 1 606 2 75 75 1 1 +"28784" 4 366 5 1 1 0 2 1 100 1694 175 306 2726 6 2 1 723 2 75 131 1 1 +"28785" 4 366 5 1 1 0 2 1 100 1694 306 459 3589 7 9 0 672 3 50 153 0 1 +"28786" 4 366 5 1 1 0 2 1 100 1694 459 803 2125 2 10 0 1093 2 75 344 0 1 +"28787" 4 366 5 1 1 0 2 1 100 1694 803 1205 2166 5 3 1 1029 3 50 402 1 1 +"28788" 4 366 5 1 1 0 2 1 100 1694 1205 602 2001 4 2 0 688 3 50 603 1 0 +"28789" 4 366 5 1 1 0 2 1 100 1694 602 903 1612 3 5 0 525 3 50 301 0 1 +"28790" 4 366 5 1 1 0 2 1 100 1694 903 1129 1609 9 4 1 522 4 25 226 1 1 +"28791" 4 366 5 1 1 0 2 1 100 1694 1129 1694 3343 1 7 0 669 3 50 565 0 1 +"28792" 4 366 5 1 1 0 3 1 100 19 100 175 1775 7 5 1 888 2 75 75 1 1 +"28793" 4 366 5 1 1 0 3 1 100 19 175 44 1179 2 1 0 921 2 75 131 1 0 +"28794" 4 366 5 1 1 0 3 1 100 19 44 66 1474 8 6 1 794 3 50 22 1 1 +"28795" 4 366 5 1 1 0 3 1 100 19 66 116 2281 4 7 0 834 2 75 50 0 1 +"28796" 4 366 5 1 1 0 3 1 100 19 116 203 3528 3 10 0 798 2 75 87 0 1 +"28797" 4 366 5 1 1 0 3 1 100 19 203 10 1293 6 8 1 1373 1 95 193 0 0 +"28798" 4 366 5 1 1 0 3 1 100 19 10 20 2954 9 2 1 595 1 95 10 1 1 +"28799" 4 366 5 1 1 0 3 1 100 19 20 39 1419 5 3 1 741 1 95 19 1 1 +"28800" 4 366 5 1 1 0 3 1 100 19 39 19 1679 1 10 1 829 3 50 20 0 0 +"28801" 4 366 5 1 1 0 4 1 100 0 100 50 1479 2 3 1 838 3 50 50 0 0 +"28802" 4 366 5 1 1 0 4 1 100 0 50 88 1219 7 4 1 639 2 75 38 1 1 +"28803" 4 366 5 1 1 0 4 1 100 0 88 4 1117 3 1 0 1661 1 95 84 1 0 +"28804" 4 366 5 1 1 0 4 1 100 0 4 8 1756 1 9 0 613 1 95 4 0 1 +"28805" 4 366 5 1 1 0 4 1 100 0 8 16 1469 8 7 1 1061 1 95 8 1 1 +"28806" 4 366 5 1 1 0 4 1 100 0 16 31 6719 9 2 1 693 1 95 15 1 1 +"28807" 4 366 5 1 1 0 4 1 100 0 31 60 1377 5 6 0 845 1 95 29 0 1 +"28808" 4 366 5 1 1 0 4 1 100 0 60 3 1250 6 8 1 908 1 95 57 0 0 +"28809" 4 366 5 1 1 0 4 1 100 0 3 0 2358 4 7 1 1083 1 95 3 0 0 +"28810" 4 368 2 0 1 1 1 1 100 211 100 150 4648 8 3 1 1576 3 50 50 1 1 +"28811" 4 368 2 0 1 1 1 1 100 211 150 225 10107 3 7 0 1262 3 50 75 0 1 +"28812" 4 368 2 0 1 1 1 1 100 211 225 281 4616 8 2 1 1137 2 25 56 1 1 +"28813" 4 368 2 0 1 1 1 1 100 211 281 211 3705 2 1 0 775 2 25 70 1 0 +"28814" 4 368 3 1 1 1 1 0 100 1 100 175 11041 9 7 1 3330 4 75 75 1 1 +"28815" 4 368 3 1 1 1 1 0 100 1 175 87 4769 4 8 1 1084 3 50 88 0 0 +"28816" 4 368 3 1 1 1 1 0 100 1 87 109 3157 8 2 1 1444 2 25 22 1 1 +"28817" 4 368 3 1 1 1 1 0 100 1 109 27 3045 2 1 0 698 4 75 82 1 0 +"28818" 4 368 3 1 1 1 1 0 100 1 27 47 5366 7 6 1 541 4 75 20 1 1 +"28819" 4 368 3 1 1 1 1 0 100 1 47 12 2401 5 3 0 1708 4 75 35 1 0 +"28820" 4 368 3 1 1 1 1 0 100 1 12 21 8985 3 6 0 1115 4 75 9 0 1 +"28821" 4 368 3 1 1 1 1 0 100 1 21 1 2683 6 9 1 2185 5 95 20 0 0 +"28822" 4 368 3 1 1 1 2 0 100 1 100 195 3838 2 9 0 654 5 95 95 0 1 +"28823" 4 368 3 1 1 1 2 0 100 1 195 341 3164 4 10 0 1596 4 75 146 0 1 +"28824" 4 368 3 1 1 1 2 0 100 1 341 85 3661 3 1 0 1101 4 75 256 1 0 +"28825" 4 368 3 1 1 1 2 0 100 1 85 149 4166 8 6 1 1221 4 75 64 1 1 +"28826" 4 368 3 1 1 1 2 0 100 1 149 37 3457 5 7 1 954 4 75 112 0 0 +"28827" 4 368 3 1 1 1 2 0 100 1 37 2 3042 6 8 1 1192 5 95 35 0 0 +"28828" 4 368 3 1 1 1 2 0 100 1 2 1 9732 7 5 0 1155 3 50 1 1 0 +"28829" 4 368 3 1 1 1 3 0 100 1 100 195 2969 3 5 0 458 5 95 95 0 1 +"28830" 4 368 3 1 1 1 3 0 100 1 195 10 2865 8 9 1 1503 5 95 185 0 0 +"28831" 4 368 3 1 1 1 3 0 100 1 10 2 5773 2 4 1 1552 4 75 8 0 0 +"28832" 4 368 3 1 1 1 3 0 100 1 2 1 1779 6 3 0 396 2 25 1 1 0 +"28833" 4 368 3 1 1 1 4 1 100 1 100 175 7746 8 7 1 1480 4 75 75 1 1 +"28834" 4 368 3 1 1 1 4 1 100 1 175 131 2790 3 10 1 815 2 25 44 0 0 +"28835" 4 368 3 1 1 1 4 1 100 1 131 33 2174 7 9 1 1157 4 75 98 0 0 +"28836" 4 368 3 1 1 1 4 1 100 1 33 50 1963 9 1 1 1692 3 50 17 1 1 +"28837" 4 368 3 1 1 1 4 1 100 1 50 98 3313 2 3 0 1017 5 95 48 0 1 +"28838" 4 368 3 1 1 1 4 1 100 1 98 191 1806 1 8 0 1222 5 95 93 0 1 +"28839" 4 368 3 1 1 1 4 1 100 1 191 48 2553 5 4 0 1302 4 75 143 1 0 +"28840" 4 368 3 1 1 1 4 1 100 1 48 2 2533 4 2 0 0 5 95 46 1 0 +"28841" 4 368 3 1 1 1 4 1 100 1 2 1 2178 6 3 0 648 2 25 1 1 0 +"28842" 4 368 4 0 1 0 1 1 100 7 100 150 2826 2 7 0 370 3 50 50 0 1 +"28843" 4 368 4 0 1 0 1 1 100 7 150 75 2525 3 7 1 1035 3 50 75 0 0 +"28844" 4 368 4 0 1 0 1 1 100 7 75 131 5183 2 8 0 1334 2 75 56 0 1 +"28845" 4 368 4 0 1 0 1 1 100 7 131 7 3307 8 9 1 772 1 95 124 0 0 +"28846" 4 368 5 1 1 0 1 1 100 4329 100 195 8504 1 3 0 2129 1 95 95 0 1 +"28847" 4 368 5 1 1 0 1 1 100 4329 195 380 2625 6 5 1 1546 1 95 185 1 1 +"28848" 4 368 5 1 1 0 1 1 100 4329 380 570 1429 2 8 0 559 3 50 190 0 1 +"28849" 4 368 5 1 1 0 1 1 100 4329 570 285 2290 8 9 1 635 3 50 285 0 0 +"28850" 4 368 5 1 1 0 1 1 100 4329 285 556 1210 3 4 0 2047 1 95 271 0 1 +"28851" 4 368 5 1 1 0 1 1 100 4329 556 1084 2067 5 7 0 934 1 95 528 0 1 +"28852" 4 368 5 1 1 0 1 1 100 4329 1084 2114 2463 7 4 1 1407 1 95 1030 1 1 +"28853" 4 368 5 1 1 0 1 1 100 4329 2114 2220 2282 4 1 1 1275 5 5 106 1 1 +"28854" 4 368 5 1 1 0 1 1 100 4329 2220 4329 2412 9 6 1 879 1 95 2109 1 1 +"28855" 4 368 5 1 1 0 2 0 100 0 100 195 2045 8 1 1 1545 1 95 95 1 1 +"28856" 4 368 5 1 1 0 2 0 100 0 195 380 3813 6 2 1 965 1 95 185 1 1 +"28857" 4 368 5 1 1 0 2 0 100 0 380 19 2876 7 9 1 2881 1 95 361 0 0 +"28858" 4 368 5 1 1 0 2 0 100 0 19 37 2405 2 10 0 1161 1 95 18 0 1 +"28859" 4 368 5 1 1 0 2 0 100 0 37 9 1430 5 3 0 1056 2 75 28 1 0 +"28860" 4 368 5 1 1 0 2 0 100 0 9 0 1563 4 2 0 1111 1 95 9 1 0 +"28861" 4 368 5 1 1 0 3 1 100 1100 100 195 2897 7 5 1 1405 1 95 95 1 1 +"28862" 4 368 5 1 1 0 3 1 100 1100 195 10 1647 2 1 0 1142 1 95 185 1 0 +"28863" 4 368 5 1 1 0 3 1 100 1100 10 20 1893 8 6 1 1968 1 95 10 1 1 +"28864" 4 368 5 1 1 0 3 1 100 1100 20 39 2096 4 7 0 857 1 95 19 0 1 +"28865" 4 368 5 1 1 0 3 1 100 1100 39 76 1813 3 10 0 805 1 95 37 0 1 +"28866" 4 368 5 1 1 0 3 1 100 1100 76 148 2723 6 8 0 423 1 95 72 0 1 +"28867" 4 368 5 1 1 0 3 1 100 1100 148 289 1144 9 2 1 560 1 95 141 1 1 +"28868" 4 368 5 1 1 0 3 1 100 1100 289 564 1210 5 3 1 782 1 95 275 1 1 +"28869" 4 368 5 1 1 0 3 1 100 1100 564 1100 1230 1 10 0 866 1 95 536 0 1 +"28870" 4 368 5 1 1 0 4 0 100 0 100 195 1873 2 3 0 1325 1 95 95 0 1 +"28871" 4 368 5 1 1 0 4 0 100 0 195 97 1619 7 4 0 796 3 50 98 1 0 +"28872" 4 368 5 1 1 0 4 0 100 0 97 5 1995 3 1 0 570 1 95 92 1 0 +"28873" 4 368 5 1 1 0 4 0 100 0 5 0 1876 1 9 1 336 1 95 5 0 0 +"28874" 4 374 2 0 1 1 1 1 100 123 100 125 8561 8 3 1 2615 2 25 25 1 1 +"28875" 4 374 2 0 1 1 1 1 100 123 125 156 8872 3 7 0 1249 2 25 31 0 1 +"28876" 4 374 2 0 1 1 1 1 100 123 156 164 2761 8 2 1 1284 1 5 8 1 1 +"28877" 4 374 2 0 1 1 1 1 100 123 164 123 4948 2 1 0 462 2 25 41 1 0 +"28878" 4 374 3 1 1 1 1 1 100 420 100 175 3855 9 7 1 973 4 75 75 1 1 +"28879" 4 374 3 1 1 1 1 1 100 420 175 166 3387 4 8 1 959 1 5 9 0 0 +"28880" 4 374 3 1 1 1 1 1 100 420 166 208 6727 8 2 1 1804 2 25 42 1 1 +"28881" 4 374 3 1 1 1 1 1 100 420 208 198 2499 2 1 0 978 1 5 10 1 0 +"28882" 4 374 3 1 1 1 1 1 100 420 198 248 2120 7 6 1 329 2 25 50 1 1 +"28883" 4 374 3 1 1 1 1 1 100 420 248 236 2895 5 3 0 363 1 5 12 1 0 +"28884" 4 374 3 1 1 1 1 1 100 420 236 295 2007 3 6 0 1189 2 25 59 0 1 +"28885" 4 374 3 1 1 1 1 1 100 420 295 280 4234 6 9 1 598 1 5 15 0 0 +"28886" 4 374 3 1 1 1 1 1 100 420 280 420 1831 1 4 0 1014 3 50 140 0 1 +"28887" 4 374 3 1 1 1 2 1 100 396 100 150 3904 2 9 0 300 3 50 50 0 1 +"28888" 4 374 3 1 1 1 2 1 100 396 150 142 2011 4 10 1 593 1 5 8 0 0 +"28889" 4 374 3 1 1 1 2 1 100 396 142 135 1977 3 1 0 761 1 5 7 1 0 +"28890" 4 374 3 1 1 1 2 1 100 396 135 169 1226 8 6 1 453 2 25 34 1 1 +"28891" 4 374 3 1 1 1 2 1 100 396 169 177 1844 5 7 0 290 1 5 8 0 1 +"28892" 4 374 3 1 1 1 2 1 100 396 177 168 2057 6 8 1 940 1 5 9 0 0 +"28893" 4 374 3 1 1 1 2 1 100 396 168 176 3188 7 5 1 394 1 5 8 1 1 +"28894" 4 374 3 1 1 1 2 1 100 396 176 264 1340 1 10 0 346 3 50 88 0 1 +"28895" 4 374 3 1 1 1 2 1 100 396 264 396 1518 9 3 1 284 3 50 132 1 1 +"28896" 4 374 3 1 1 1 3 1 100 306 100 95 2237 3 5 1 976 1 5 5 0 0 +"28897" 4 374 3 1 1 1 3 1 100 306 95 47 1398 8 9 1 298 3 50 48 0 0 +"28898" 4 374 3 1 1 1 3 1 100 306 47 59 3449 2 4 0 763 2 25 12 0 1 +"28899" 4 374 3 1 1 1 3 1 100 306 59 89 2645 6 3 1 396 3 50 30 1 1 +"28900" 4 374 3 1 1 1 3 1 100 306 89 111 1479 7 6 1 2767 2 25 22 1 1 +"28901" 4 374 3 1 1 1 3 1 100 306 111 105 5385 4 2 0 325 1 5 6 1 0 +"28902" 4 374 3 1 1 1 3 1 100 306 105 184 1419 1 8 0 485 4 75 79 0 1 +"28903" 4 374 3 1 1 1 3 1 100 306 184 175 2340 5 7 1 641 1 5 9 0 0 +"28904" 4 374 3 1 1 1 3 1 100 306 175 306 1408 9 5 1 979 4 75 131 1 1 +"28905" 4 374 3 1 1 1 4 1 100 125 100 105 1930 8 7 1 292 1 5 5 1 1 +"28906" 4 374 3 1 1 1 4 1 100 125 105 110 1187 3 10 0 320 1 5 5 0 1 +"28907" 4 374 3 1 1 1 4 1 100 125 110 104 1766 7 9 1 567 1 5 6 0 0 +"28908" 4 374 3 1 1 1 4 1 100 125 104 109 2042 9 1 1 918 1 5 5 1 1 +"28909" 4 374 3 1 1 1 4 1 100 125 109 114 3464 2 3 0 314 1 5 5 0 1 +"28910" 4 374 3 1 1 1 4 1 100 125 114 120 1247 1 8 0 378 1 5 6 0 1 +"28911" 4 374 3 1 1 1 4 1 100 125 120 126 1629 5 4 1 397 1 5 6 1 1 +"28912" 4 374 3 1 1 1 4 1 100 125 126 132 1383 4 2 1 364 1 5 6 1 1 +"28913" 4 374 3 1 1 1 4 1 100 125 132 125 1384 6 3 0 460 1 5 7 1 0 +"28914" 4 374 4 0 1 0 1 1 100 128 100 175 9354 2 7 0 297 2 75 75 0 1 +"28915" 4 374 4 0 1 0 1 1 100 128 175 263 2857 3 7 0 1079 3 50 88 0 1 +"28916" 4 374 4 0 1 0 1 1 100 128 263 513 1592 2 8 0 1810 1 95 250 0 1 +"28917" 4 374 4 0 1 0 1 1 100 128 513 128 2985 8 9 1 537 2 75 385 0 0 +"28918" 4 374 5 1 1 0 1 1 100 107 100 195 3089 1 3 0 2587 1 95 95 0 1 +"28919" 4 374 5 1 1 0 1 1 100 107 195 244 4405 6 5 1 353 4 25 49 1 1 +"28920" 4 374 5 1 1 0 1 1 100 107 244 305 2189 2 8 0 468 4 25 61 0 1 +"28921" 4 374 5 1 1 0 1 1 100 107 305 152 1787 8 9 1 572 3 50 153 0 0 +"28922" 4 374 5 1 1 0 1 1 100 107 152 228 1574 3 4 0 352 3 50 76 0 1 +"28923" 4 374 5 1 1 0 1 1 100 107 228 57 1509 5 7 1 682 2 75 171 0 0 +"28924" 4 374 5 1 1 0 1 1 100 107 57 111 1360 7 4 1 2780 1 95 54 1 1 +"28925" 4 374 5 1 1 0 1 1 100 107 111 55 1469 4 1 0 686 3 50 56 1 0 +"28926" 4 374 5 1 1 0 1 1 100 107 55 107 1875 9 6 1 2278 1 95 52 1 1 +"28927" 4 374 5 1 1 0 2 1 100 365 100 195 3114 8 1 1 1819 1 95 95 1 1 +"28928" 4 374 5 1 1 0 2 1 100 365 195 380 1681 6 2 1 303 1 95 185 1 1 +"28929" 4 374 5 1 1 0 2 1 100 365 380 19 1142 7 9 1 334 1 95 361 0 0 +"28930" 4 374 5 1 1 0 2 1 100 365 19 37 1147 2 10 0 1298 1 95 18 0 1 +"28931" 4 374 5 1 1 0 2 1 100 365 37 65 1217 5 3 1 2622 2 75 28 1 1 +"28932" 4 374 5 1 1 0 2 1 100 365 65 49 1126 4 2 0 262 4 25 16 1 0 +"28933" 4 374 5 1 1 0 2 1 100 365 49 96 1990 3 5 0 333 1 95 47 0 1 +"28934" 4 374 5 1 1 0 2 1 100 365 96 187 1075 9 4 1 285 1 95 91 1 1 +"28935" 4 374 5 1 1 0 2 1 100 365 187 365 1277 1 7 0 2045 1 95 178 0 1 +"28936" 4 374 5 1 1 0 3 1 100 4 100 175 1213 7 5 1 1673 2 75 75 1 1 +"28937" 4 374 5 1 1 0 3 1 100 4 175 9 1399 2 1 0 329 1 95 166 1 0 +"28938" 4 374 5 1 1 0 3 1 100 4 9 16 1400 8 6 1 419 2 75 7 1 1 +"28939" 4 374 5 1 1 0 3 1 100 4 16 24 2107 4 7 0 1724 3 50 8 0 1 +"28940" 4 374 5 1 1 0 3 1 100 4 24 42 1304 3 10 0 416 2 75 18 0 1 +"28941" 4 374 5 1 1 0 3 1 100 4 42 2 1697 6 8 1 590 1 95 40 0 0 +"28942" 4 374 5 1 1 0 3 1 100 4 2 4 1799 9 2 1 992 1 95 2 1 1 +"28943" 4 374 5 1 1 0 3 1 100 4 4 2 3893 5 3 0 386 3 50 2 1 0 +"28944" 4 374 5 1 1 0 3 1 100 4 2 4 1436 1 10 0 284 1 95 2 0 1 +"28945" 4 374 5 1 1 0 4 1 100 2308 100 175 1552 2 3 0 4595 2 75 75 0 1 +"28946" 4 374 5 1 1 0 4 1 100 2308 175 341 1286 7 4 1 375 1 95 166 1 1 +"28947" 4 374 5 1 1 0 4 1 100 2308 341 170 1498 3 1 0 2041 3 50 171 1 0 +"28948" 4 374 5 1 1 0 4 1 100 2308 170 332 1333 1 9 0 275 1 95 162 0 1 +"28949" 4 374 5 1 1 0 4 1 100 2308 332 647 1211 8 7 1 375 1 95 315 1 1 +"28950" 4 374 5 1 1 0 4 1 100 2308 647 1262 1443 9 2 1 331 1 95 615 1 1 +"28951" 4 374 5 1 1 0 4 1 100 2308 1262 2461 2561 5 6 0 1197 1 95 1199 0 1 +"28952" 4 374 5 1 1 0 4 1 100 2308 2461 1846 2113 6 8 1 914 4 25 615 0 0 +"28953" 4 374 5 1 1 0 4 1 100 2308 1846 2308 1116 4 7 0 2509 4 25 462 0 1 +"28954" 4 379 2 0 1 0 1 1 100 33 100 175 10901 2 7 0 4076 2 75 75 0 1 +"28955" 4 379 2 0 1 0 1 1 100 33 175 341 6527 3 7 0 1726 1 95 166 0 1 +"28956" 4 379 2 0 1 0 1 1 100 33 341 665 2657 2 8 0 624 1 95 324 0 1 +"28957" 4 379 2 0 1 0 1 1 100 33 665 33 1303 8 9 1 574 1 95 632 0 0 +"28958" 4 379 3 1 1 0 1 1 100 31 100 195 5623 1 3 0 1145 1 95 95 0 1 +"28959" 4 379 3 1 1 0 1 1 100 31 195 380 2248 6 5 1 466 1 95 185 1 1 +"28960" 4 379 3 1 1 0 1 1 100 31 380 19 2185 2 8 1 471 1 95 361 0 0 +"28961" 4 379 3 1 1 0 1 1 100 31 19 37 1710 8 9 0 1954 1 95 18 0 1 +"28962" 4 379 3 1 1 0 1 1 100 31 37 72 4818 3 4 0 745 1 95 35 0 1 +"28963" 4 379 3 1 1 0 1 1 100 31 72 4 2979 5 7 1 707 1 95 68 0 0 +"28964" 4 379 3 1 1 0 1 1 100 31 4 8 1894 7 4 1 810 1 95 4 1 1 +"28965" 4 379 3 1 1 0 1 1 100 31 8 16 3552 4 1 1 1036 1 95 8 1 1 +"28966" 4 379 3 1 1 0 1 1 100 31 16 31 1545 9 6 1 869 1 95 15 1 1 +"28967" 4 379 3 1 1 0 2 0 100 0 100 195 3685 8 1 1 683 1 95 95 1 1 +"28968" 4 379 3 1 1 0 2 0 100 0 195 10 2512 6 2 0 470 1 95 185 1 0 +"28969" 4 379 3 1 1 0 2 0 100 0 10 0 1661 7 9 1 627 1 95 10 0 0 +"28970" 4 379 3 1 1 0 3 1 100 1100 100 195 2106 7 5 1 699 1 95 95 1 1 +"28971" 4 379 3 1 1 0 3 1 100 1100 195 10 1879 2 1 0 547 1 95 185 1 0 +"28972" 4 379 3 1 1 0 3 1 100 1100 10 20 1182 8 6 1 475 1 95 10 1 1 +"28973" 4 379 3 1 1 0 3 1 100 1100 20 39 2057 4 7 0 405 1 95 19 0 1 +"28974" 4 379 3 1 1 0 3 1 100 1100 39 76 1382 3 10 0 443 1 95 37 0 1 +"28975" 4 379 3 1 1 0 3 1 100 1100 76 148 2827 6 8 0 530 1 95 72 0 1 +"28976" 4 379 3 1 1 0 3 1 100 1100 148 289 1734 9 2 1 665 1 95 141 1 1 +"28977" 4 379 3 1 1 0 3 1 100 1100 289 564 1615 5 3 1 616 1 95 275 1 1 +"28978" 4 379 3 1 1 0 3 1 100 1100 564 1100 1472 1 10 0 639 1 95 536 0 1 +"28979" 4 379 3 1 1 0 4 1 100 1 100 195 3390 2 3 0 514 1 95 95 0 1 +"28980" 4 379 3 1 1 0 4 1 100 1 195 380 2319 7 4 1 526 1 95 185 1 1 +"28981" 4 379 3 1 1 0 4 1 100 1 380 19 1258 3 1 0 444 1 95 361 1 0 +"28982" 4 379 3 1 1 0 4 1 100 1 19 37 1604 1 9 0 517 1 95 18 0 1 +"28983" 4 379 3 1 1 0 4 1 100 1 37 72 1018 8 7 1 503 1 95 35 1 1 +"28984" 4 379 3 1 1 0 4 1 100 1 72 140 1076 9 2 1 446 1 95 68 1 1 +"28985" 4 379 3 1 1 0 4 1 100 1 140 273 3238 5 6 0 538 1 95 133 0 1 +"28986" 4 379 3 1 1 0 4 1 100 1 273 14 949 6 8 1 813 1 95 259 0 0 +"28987" 4 379 3 1 1 0 4 1 100 1 14 1 2108 4 7 1 557 1 95 13 0 0 +"28988" 4 379 4 0 1 1 1 1 100 175 100 125 4618 8 3 1 1068 2 25 25 1 1 +"28989" 4 379 4 0 1 1 1 1 100 175 125 156 2471 3 7 0 1855 2 25 31 0 1 +"28990" 4 379 4 0 1 1 1 1 100 175 156 234 1044 8 2 1 624 3 50 78 1 1 +"28991" 4 379 4 0 1 1 1 1 100 175 234 175 904 2 1 0 3898 2 25 59 1 0 +"28992" 4 379 5 1 1 1 1 1 100 320 100 125 2662 9 7 1 605 2 25 25 1 1 +"28993" 4 379 5 1 1 1 1 1 100 320 125 188 1365 4 8 0 570 3 50 63 0 1 +"28994" 4 379 5 1 1 1 1 1 100 320 188 235 1964 8 2 1 493 2 25 47 1 1 +"28995" 4 379 5 1 1 1 1 1 100 320 235 176 946 2 1 0 483 2 25 59 1 0 +"28996" 4 379 5 1 1 1 1 1 100 320 176 167 1898 7 6 0 533 1 5 9 1 0 +"28997" 4 379 5 1 1 1 1 1 100 320 167 175 3433 5 3 1 2140 1 5 8 1 1 +"28998" 4 379 5 1 1 1 1 1 100 320 175 219 1275 3 6 0 844 2 25 44 0 1 +"28999" 4 379 5 1 1 1 1 1 100 320 219 164 2497 6 9 1 2360 2 25 55 0 0 +"29000" 4 379 5 1 1 1 1 1 100 320 164 320 1141 1 4 0 0 5 95 156 0 1 +"29001" 4 379 5 1 1 1 2 1 100 1351 100 175 3277 2 9 0 439 4 75 75 0 1 +"29002" 4 379 5 1 1 1 2 1 100 1351 175 219 1743 4 10 0 474 2 25 44 0 1 +"29003" 4 379 5 1 1 1 2 1 100 1351 219 164 2016 3 1 0 838 2 25 55 1 0 +"29004" 4 379 5 1 1 1 2 1 100 1351 164 287 1155 8 6 1 474 4 75 123 1 1 +"29005" 4 379 5 1 1 1 2 1 100 1351 287 301 4369 5 7 0 1414 1 5 14 0 1 +"29006" 4 379 5 1 1 1 2 1 100 1351 301 226 2376 6 8 1 1524 2 25 75 0 0 +"29007" 4 379 5 1 1 1 2 1 100 1351 226 396 1089 7 5 1 547 4 75 170 1 1 +"29008" 4 379 5 1 1 1 2 1 100 1351 396 772 1479 1 10 0 711 5 95 376 0 1 +"29009" 4 379 5 1 1 1 2 1 100 1351 772 1351 1338 9 3 1 680 4 75 579 1 1 +"29010" 4 379 5 1 1 1 3 1 100 364 100 150 6481 3 5 0 1251 3 50 50 0 1 +"29011" 4 379 5 1 1 1 3 1 100 364 150 37 1108 8 9 1 1566 4 75 113 0 0 +"29012" 4 379 5 1 1 1 3 1 100 364 37 72 1520 2 4 0 926 5 95 35 0 1 +"29013" 4 379 5 1 1 1 3 1 100 364 72 108 2422 6 3 1 809 3 50 36 1 1 +"29014" 4 379 5 1 1 1 3 1 100 364 108 189 1280 7 6 1 1833 4 75 81 1 1 +"29015" 4 379 5 1 1 1 3 1 100 364 189 142 1919 4 2 0 2098 2 25 47 1 0 +"29016" 4 379 5 1 1 1 3 1 100 364 142 277 1059 1 8 0 719 5 95 135 0 1 +"29017" 4 379 5 1 1 1 3 1 100 364 277 208 1442 5 7 1 588 2 25 69 0 0 +"29018" 4 379 5 1 1 1 3 1 100 364 208 364 916 9 5 1 2823 4 75 156 1 1 +"29019" 4 379 5 1 1 1 4 1 100 1661 100 150 1750 8 7 1 1905 3 50 50 1 1 +"29020" 4 379 5 1 1 1 4 1 100 1661 150 225 1379 3 10 0 592 3 50 75 0 1 +"29021" 4 379 5 1 1 1 4 1 100 1661 225 169 1121 7 9 1 2961 2 25 56 0 0 +"29022" 4 379 5 1 1 1 4 1 100 1661 169 330 1105 9 1 1 643 5 95 161 1 1 +"29023" 4 379 5 1 1 1 4 1 100 1661 330 578 1321 2 3 0 784 4 75 248 0 1 +"29024" 4 379 5 1 1 1 4 1 100 1661 578 1012 1616 1 8 0 2753 4 75 434 0 1 +"29025" 4 379 5 1 1 1 4 1 100 1661 1012 1063 2636 5 4 1 2916 1 5 51 1 1 +"29026" 4 379 5 1 1 1 4 1 100 1661 1063 1329 2271 4 2 1 505 2 25 266 1 1 +"29027" 4 379 5 1 1 1 4 1 100 1661 1329 1661 1621 6 3 1 662 2 25 332 1 1 +"29028" 4 383 2 0 1 1 1 1 100 115 100 150 4644 8 3 1 1437 3 50 50 1 1 +"29029" 4 383 2 0 1 1 1 1 100 115 150 263 13086 3 7 0 1045 4 75 113 0 1 +"29030" 4 383 2 0 1 1 1 1 100 115 263 460 13376 8 2 1 874 4 75 197 1 1 +"29031" 4 383 2 0 1 1 1 1 100 115 460 115 2725 2 1 0 1634 4 75 345 1 0 +"29032" 4 383 3 1 1 1 1 0 100 0 100 150 6437 9 7 1 3992 3 50 50 1 1 +"29033" 4 383 3 1 1 1 1 0 100 0 150 263 5592 4 8 0 3468 4 75 113 0 1 +"29034" 4 383 3 1 1 1 1 0 100 0 263 460 7620 8 2 1 1839 4 75 197 1 1 +"29035" 4 383 3 1 1 1 1 0 100 0 460 23 4205 2 1 0 2027 5 95 437 1 0 +"29036" 4 383 3 1 1 1 1 0 100 0 23 45 6336 7 6 1 0 5 95 22 1 1 +"29037" 4 383 3 1 1 1 1 0 100 0 45 2 6968 5 3 0 0 5 95 43 1 0 +"29038" 4 383 3 1 1 1 1 0 100 0 2 4 8346 3 6 0 0 5 95 2 0 1 +"29039" 4 383 3 1 1 1 1 0 100 0 4 0 11379 6 9 1 0 5 95 4 0 0 +"29040" 4 383 3 1 1 1 2 1 100 540 100 195 7158 2 9 0 0 5 95 95 0 1 +"29041" 4 383 3 1 1 1 2 1 100 540 195 341 10599 4 10 0 1564 4 75 146 0 1 +"29042" 4 383 3 1 1 1 2 1 100 540 341 85 3391 3 1 0 3451 4 75 256 1 0 +"29043" 4 383 3 1 1 1 2 1 100 540 85 166 6908 8 6 1 0 5 95 81 1 1 +"29044" 4 383 3 1 1 1 2 1 100 540 166 291 18113 5 7 0 2890 4 75 125 0 1 +"29045" 4 383 3 1 1 1 2 1 100 540 291 73 12541 6 8 1 982 4 75 218 0 0 +"29046" 4 383 3 1 1 1 2 1 100 540 73 142 3012 7 5 1 0 5 95 69 1 1 +"29047" 4 383 3 1 1 1 2 1 100 540 142 277 5099 1 10 0 0 5 95 135 0 1 +"29048" 4 383 3 1 1 1 2 1 100 540 277 540 2966 9 3 1 0 5 95 263 1 1 +"29049" 4 383 3 1 1 1 3 1 100 31 100 195 4812 3 5 0 0 5 95 95 0 1 +"29050" 4 383 3 1 1 1 3 1 100 31 195 10 7797 8 9 1 0 5 95 185 0 0 +"29051" 4 383 3 1 1 1 3 1 100 31 10 20 1927 2 4 0 0 5 95 10 0 1 +"29052" 4 383 3 1 1 1 3 1 100 31 20 39 5777 6 3 1 0 5 95 19 1 1 +"29053" 4 383 3 1 1 1 3 1 100 31 39 76 3068 7 6 1 0 5 95 37 1 1 +"29054" 4 383 3 1 1 1 3 1 100 31 76 4 6303 4 2 0 0 5 95 72 1 0 +"29055" 4 383 3 1 1 1 3 1 100 31 4 8 5961 1 8 0 0 5 95 4 0 1 +"29056" 4 383 3 1 1 1 3 1 100 31 8 16 2235 5 7 0 0 5 95 8 0 1 +"29057" 4 383 3 1 1 1 3 1 100 31 16 31 2614 9 5 1 0 5 95 15 1 1 +"29058" 4 383 3 1 1 1 4 1 100 27 100 195 7437 8 7 1 0 5 95 95 1 1 +"29059" 4 383 3 1 1 1 4 1 100 27 195 380 2024 3 10 0 0 5 95 185 0 1 +"29060" 4 383 3 1 1 1 4 1 100 27 380 19 2013 7 9 1 0 5 95 361 0 0 +"29061" 4 383 3 1 1 1 4 1 100 27 19 37 2059 9 1 1 0 5 95 18 1 1 +"29062" 4 383 3 1 1 1 4 1 100 27 37 72 2422 2 3 0 0 5 95 35 0 1 +"29063" 4 383 3 1 1 1 4 1 100 27 72 140 7041 1 8 0 0 5 95 68 0 1 +"29064" 4 383 3 1 1 1 4 1 100 27 140 273 5888 5 4 1 0 5 95 133 1 1 +"29065" 4 383 3 1 1 1 4 1 100 27 273 14 3341 4 2 0 0 5 95 259 1 0 +"29066" 4 383 3 1 1 1 4 1 100 27 14 27 2019 6 3 1 0 5 95 13 1 1 +"29067" 4 383 4 0 1 0 1 1 100 29 100 150 6083 2 7 0 1463 3 50 50 0 1 +"29068" 4 383 4 0 1 0 1 1 100 29 150 293 7207 3 7 0 3813 1 95 143 0 1 +"29069" 4 383 4 0 1 0 1 1 100 29 293 571 1725 2 8 0 1824 1 95 278 0 1 +"29070" 4 383 4 0 1 0 1 1 100 29 571 29 4383 8 9 1 956 1 95 542 0 0 +"29071" 4 383 5 1 1 0 1 0 100 0 100 195 9621 1 3 0 1265 1 95 95 0 1 +"29072" 4 383 5 1 1 0 1 0 100 0 195 380 4186 6 5 1 1336 1 95 185 1 1 +"29073" 4 383 5 1 1 0 1 0 100 0 380 741 1806 2 8 0 645 1 95 361 0 1 +"29074" 4 383 5 1 1 0 1 0 100 0 741 37 2660 8 9 1 1403 1 95 704 0 0 +"29075" 4 383 5 1 1 0 1 0 100 0 37 72 2838 3 4 0 372 1 95 35 0 1 +"29076" 4 383 5 1 1 0 1 0 100 0 72 4 3952 5 7 1 465 1 95 68 0 0 +"29077" 4 383 5 1 1 0 1 0 100 0 4 8 1513 7 4 1 342 1 95 4 1 1 +"29078" 4 383 5 1 1 0 1 0 100 0 8 0 3099 4 1 0 501 1 95 8 1 0 +"29079" 4 383 5 1 1 0 2 0 100 0 100 195 5059 8 1 1 656 1 95 95 1 1 +"29080" 4 383 5 1 1 0 2 0 100 0 195 380 2445 6 2 1 374 1 95 185 1 1 +"29081" 4 383 5 1 1 0 2 0 100 0 380 19 1634 7 9 1 378 1 95 361 0 0 +"29082" 4 383 5 1 1 0 2 0 100 0 19 37 1405 2 10 0 312 1 95 18 0 1 +"29083" 4 383 5 1 1 0 2 0 100 0 37 2 2166 5 3 0 333 1 95 35 1 0 +"29084" 4 383 5 1 1 0 2 0 100 0 2 0 1866 4 2 0 239 1 95 2 1 0 +"29085" 4 383 5 1 1 0 3 0 100 0 100 195 2702 7 5 1 267 1 95 95 1 1 +"29086" 4 383 5 1 1 0 3 0 100 0 195 10 1596 2 1 0 613 1 95 185 1 0 +"29087" 4 383 5 1 1 0 3 0 100 0 10 0 2021 8 6 0 371 1 95 10 1 0 +"29088" 4 383 5 1 1 0 4 1 100 1037 100 195 1374 2 3 0 459 1 95 95 0 1 +"29089" 4 383 5 1 1 0 4 1 100 1037 195 380 4386 7 4 1 383 1 95 185 1 1 +"29090" 4 383 5 1 1 0 4 1 100 1037 380 19 1066 3 1 0 1013 1 95 361 1 0 +"29091" 4 383 5 1 1 0 4 1 100 1037 19 37 3682 1 9 0 1024 1 95 18 0 1 +"29092" 4 383 5 1 1 0 4 1 100 1037 37 72 3060 8 7 1 984 1 95 35 1 1 +"29093" 4 383 5 1 1 0 4 1 100 1037 72 140 1615 9 2 1 384 1 95 68 1 1 +"29094" 4 383 5 1 1 0 4 1 100 1037 140 273 4034 5 6 0 336 1 95 133 0 1 +"29095" 4 383 5 1 1 0 4 1 100 1037 273 532 3341 6 8 0 243 1 95 259 0 1 +"29096" 4 383 5 1 1 0 4 1 100 1037 532 1037 1229 4 7 0 328 1 95 505 0 1 +"29097" 4 384 2 0 1 1 1 1 100 197 100 150 6181 8 3 1 1757 3 50 50 1 1 +"29098" 4 384 2 0 1 1 1 1 100 197 150 263 8878 3 7 0 797 4 75 113 0 1 +"29099" 4 384 2 0 1 1 1 1 100 197 263 395 3749 8 2 1 1152 3 50 132 1 1 +"29100" 4 384 2 0 1 1 1 1 100 197 395 197 2831 2 1 0 750 3 50 198 1 0 +"29101" 4 384 3 1 1 1 1 1 100 242 100 175 8749 9 7 1 747 4 75 75 1 1 +"29102" 4 384 3 1 1 1 1 1 100 242 175 263 5932 4 8 0 1568 3 50 88 0 1 +"29103" 4 384 3 1 1 1 1 1 100 242 263 395 4523 8 2 1 746 3 50 132 1 1 +"29104" 4 384 3 1 1 1 1 1 100 242 395 296 3363 2 1 0 2273 2 25 99 1 0 +"29105" 4 384 3 1 1 1 1 1 100 242 296 222 6714 7 6 0 1912 2 25 74 1 0 +"29106" 4 384 3 1 1 1 1 1 100 242 222 166 5195 5 3 0 1273 2 25 56 1 0 +"29107" 4 384 3 1 1 1 1 1 100 242 166 249 2250 3 6 0 740 3 50 83 0 1 +"29108" 4 384 3 1 1 1 1 1 100 242 249 124 3366 6 9 1 1732 3 50 125 0 0 +"29109" 4 384 3 1 1 1 1 1 100 242 124 242 2831 1 4 0 1326 5 95 118 0 1 +"29110" 4 384 3 1 1 1 2 1 100 507 100 195 4724 2 9 0 1242 5 95 95 0 1 +"29111" 4 384 3 1 1 1 2 1 100 507 195 244 5236 4 10 0 568 2 25 49 0 1 +"29112" 4 384 3 1 1 1 2 1 100 507 244 183 5643 3 1 0 1332 2 25 61 1 0 +"29113" 4 384 3 1 1 1 2 1 100 507 183 229 2001 8 6 1 3544 2 25 46 1 1 +"29114" 4 384 3 1 1 1 2 1 100 507 229 240 5892 5 7 0 681 1 5 11 0 1 +"29115" 4 384 3 1 1 1 2 1 100 507 240 180 4382 6 8 1 435 2 25 60 0 0 +"29116" 4 384 3 1 1 1 2 1 100 507 180 225 2483 7 5 1 1083 2 25 45 1 1 +"29117" 4 384 3 1 1 1 2 1 100 507 225 338 3413 1 10 0 1528 3 50 113 0 1 +"29118" 4 384 3 1 1 1 2 1 100 507 338 507 2300 9 3 1 2165 3 50 169 1 1 +"29119" 4 384 3 1 1 1 3 1 100 72 100 95 3536 3 5 1 821 1 5 5 0 0 +"29120" 4 384 3 1 1 1 3 1 100 72 95 5 4716 8 9 1 1073 5 95 90 0 0 +"29121" 4 384 3 1 1 1 3 1 100 72 5 9 3684 2 4 0 2091 4 75 4 0 1 +"29122" 4 384 3 1 1 1 3 1 100 72 9 14 4277 6 3 1 1679 3 50 5 1 1 +"29123" 4 384 3 1 1 1 3 1 100 72 14 27 4196 7 6 1 1138 5 95 13 1 1 +"29124" 4 384 3 1 1 1 3 1 100 72 27 20 2913 4 2 0 1628 2 25 7 1 0 +"29125" 4 384 3 1 1 1 3 1 100 72 20 39 4379 1 8 0 1265 5 95 19 0 1 +"29126" 4 384 3 1 1 1 3 1 100 72 39 37 3218 5 7 1 1014 1 5 2 0 0 +"29127" 4 384 3 1 1 1 3 1 100 72 37 72 2923 9 5 1 1286 5 95 35 1 1 +"29128" 4 384 3 1 1 1 4 1 100 384 100 195 3643 8 7 1 780 5 95 95 1 1 +"29129" 4 384 3 1 1 1 4 1 100 384 195 341 3612 3 10 0 1182 4 75 146 0 1 +"29130" 4 384 3 1 1 1 4 1 100 384 341 85 3355 7 9 1 1209 4 75 256 0 0 +"29131" 4 384 3 1 1 1 4 1 100 384 85 166 3557 9 1 1 0 5 95 81 1 1 +"29132" 4 384 3 1 1 1 4 1 100 384 166 291 2505 2 3 0 2208 4 75 125 0 1 +"29133" 4 384 3 1 1 1 4 1 100 384 291 567 3548 1 8 0 1249 5 95 276 0 1 +"29134" 4 384 3 1 1 1 4 1 100 384 567 539 3819 5 4 0 771 1 5 28 1 0 +"29135" 4 384 3 1 1 1 4 1 100 384 539 404 3371 4 2 0 913 2 25 135 1 0 +"29136" 4 384 3 1 1 1 4 1 100 384 404 384 3434 6 3 0 668 1 5 20 1 0 +"29137" 4 384 4 0 1 0 1 1 100 128 100 150 5356 2 7 0 1731 3 50 50 0 1 +"29138" 4 384 4 0 1 0 1 1 100 128 150 263 5734 3 7 0 1379 2 75 113 0 1 +"29139" 4 384 4 0 1 0 1 1 100 128 263 513 7670 2 8 0 3038 1 95 250 0 1 +"29140" 4 384 4 0 1 0 1 1 100 128 513 128 2595 8 9 1 885 2 75 385 0 0 +"29141" 4 384 5 1 1 0 1 1 100 747 100 195 12923 1 3 0 1074 1 95 95 0 1 +"29142" 4 384 5 1 1 0 1 1 100 747 195 380 2612 6 5 1 2021 1 95 185 1 1 +"29143" 4 384 5 1 1 0 1 1 100 747 380 741 1794 2 8 0 1018 1 95 361 0 1 +"29144" 4 384 5 1 1 0 1 1 100 747 741 185 3911 8 9 1 1507 2 75 556 0 0 +"29145" 4 384 5 1 1 0 1 1 100 747 185 361 2833 3 4 0 1223 1 95 176 0 1 +"29146" 4 384 5 1 1 0 1 1 100 747 361 379 5042 5 7 0 2865 5 5 18 0 1 +"29147" 4 384 5 1 1 0 1 1 100 747 379 569 2494 7 4 1 1693 3 50 190 1 1 +"29148" 4 384 5 1 1 0 1 1 100 747 569 427 2458 4 1 0 739 4 25 142 1 0 +"29149" 4 384 5 1 1 0 1 1 100 747 427 747 3725 9 6 1 1017 2 75 320 1 1 +"29150" 4 384 5 1 1 0 2 1 100 497 100 195 2738 8 1 1 1429 1 95 95 1 1 +"29151" 4 384 5 1 1 0 2 1 100 497 195 293 3750 6 2 1 1290 3 50 98 1 1 +"29152" 4 384 5 1 1 0 2 1 100 497 293 73 4378 7 9 1 2807 2 75 220 0 0 +"29153" 4 384 5 1 1 0 2 1 100 497 73 142 2518 2 10 0 1204 1 95 69 0 1 +"29154" 4 384 5 1 1 0 2 1 100 497 142 135 4640 5 3 0 986 5 5 7 1 0 +"29155" 4 384 5 1 1 0 2 1 100 497 135 67 4184 4 2 0 3007 3 50 68 1 0 +"29156" 4 384 5 1 1 0 2 1 100 497 67 131 3213 3 5 0 1258 1 95 64 0 1 +"29157" 4 384 5 1 1 0 2 1 100 497 131 255 2067 9 4 1 862 1 95 124 1 1 +"29158" 4 384 5 1 1 0 2 1 100 497 255 497 2258 1 7 0 676 1 95 242 0 1 +"29159" 4 384 5 1 1 0 3 1 100 6 100 195 3953 7 5 1 1219 1 95 95 1 1 +"29160" 4 384 5 1 1 0 3 1 100 6 195 10 2502 2 1 0 469 1 95 185 1 0 +"29161" 4 384 5 1 1 0 3 1 100 6 10 20 1500 8 6 1 594 1 95 10 1 1 +"29162" 4 384 5 1 1 0 3 1 100 6 20 35 2235 4 7 0 391 2 75 15 0 1 +"29163" 4 384 5 1 1 0 3 1 100 6 35 68 2745 3 10 0 3044 1 95 33 0 1 +"29164" 4 384 5 1 1 0 3 1 100 6 68 3 2200 6 8 1 1677 1 95 65 0 0 +"29165" 4 384 5 1 1 0 3 1 100 6 3 6 5411 9 2 1 3503 1 95 3 1 1 +"29166" 4 384 5 1 1 0 3 1 100 6 6 3 12605 5 3 0 860 3 50 3 1 0 +"29167" 4 384 5 1 1 0 3 1 100 6 3 6 6144 1 10 0 3281 1 95 3 0 1 +"29168" 4 384 5 1 1 0 4 1 100 1135 100 195 8000 2 3 0 1858 1 95 95 0 1 +"29169" 4 384 5 1 1 0 4 1 100 1135 195 293 3318 7 4 1 998 3 50 98 1 1 +"29170" 4 384 5 1 1 0 4 1 100 1135 293 146 4636 3 1 0 2957 3 50 147 1 0 +"29171" 4 384 5 1 1 0 4 1 100 1135 146 285 2261 1 9 0 1455 1 95 139 0 1 +"29172" 4 384 5 1 1 0 4 1 100 1135 285 556 3127 8 7 1 1192 1 95 271 1 1 +"29173" 4 384 5 1 1 0 4 1 100 1135 556 1084 2172 9 2 1 1347 1 95 528 1 1 +"29174" 4 384 5 1 1 0 4 1 100 1135 1084 1138 3784 5 6 0 0 5 5 54 0 1 +"29175" 4 384 5 1 1 0 4 1 100 1135 1138 1081 6673 6 8 1 0 5 5 57 0 0 +"29176" 4 384 5 1 1 0 4 1 100 1135 1081 1135 5058 4 7 0 0 5 5 54 0 1 +"29177" 4 385 2 0 1 0 1 1 100 65 100 175 7690 2 7 0 1273 2 75 75 0 1 +"29178" 4 385 2 0 1 0 1 1 100 65 175 263 6394 3 7 0 1816 3 50 88 0 1 +"29179" 4 385 2 0 1 0 1 1 100 65 263 131 4670 2 8 1 982 3 50 132 0 0 +"29180" 4 385 2 0 1 0 1 1 100 65 131 65 6260 8 9 1 992 3 50 66 0 0 +"29181" 4 385 3 1 1 0 1 1 100 418 100 150 5760 1 3 0 1548 3 50 50 0 1 +"29182" 4 385 3 1 1 0 1 1 100 418 150 112 2111 6 5 0 1057 4 25 38 1 0 +"29183" 4 385 3 1 1 0 1 1 100 418 112 196 3219 2 8 0 840 2 75 84 0 1 +"29184" 4 385 3 1 1 0 1 1 100 418 196 98 5861 8 9 1 1123 3 50 98 0 0 +"29185" 4 385 3 1 1 0 1 1 100 418 98 191 4259 3 4 0 917 1 95 93 0 1 +"29186" 4 385 3 1 1 0 1 1 100 418 191 372 3159 5 7 0 742 1 95 181 0 1 +"29187" 4 385 3 1 1 0 1 1 100 418 372 558 3224 7 4 1 869 3 50 186 1 1 +"29188" 4 385 3 1 1 0 1 1 100 418 558 837 1817 4 1 1 2352 3 50 279 1 1 +"29189" 4 385 3 1 1 0 1 1 100 418 837 418 2365 9 6 0 914 3 50 419 1 0 +"29190" 4 385 3 1 1 0 2 1 100 820 100 175 2257 8 1 1 2164 2 75 75 1 1 +"29191" 4 385 3 1 1 0 2 1 100 820 175 341 3050 6 2 1 1310 1 95 166 1 1 +"29192" 4 385 3 1 1 0 2 1 100 820 341 665 2237 7 9 0 696 1 95 324 0 1 +"29193" 4 385 3 1 1 0 2 1 100 820 665 998 3436 2 10 0 2750 3 50 333 0 1 +"29194" 4 385 3 1 1 0 2 1 100 820 998 748 3017 5 3 0 1135 4 25 250 1 0 +"29195" 4 385 3 1 1 0 2 1 100 820 748 1122 3942 4 2 1 1025 3 50 374 1 1 +"29196" 4 385 3 1 1 0 2 1 100 820 1122 561 3037 3 5 1 2642 3 50 561 0 0 +"29197" 4 385 3 1 1 0 2 1 100 820 561 1094 2729 9 4 1 701 1 95 533 1 1 +"29198" 4 385 3 1 1 0 2 1 100 820 1094 820 3793 1 7 1 890 4 25 274 0 0 +"29199" 4 385 3 1 1 0 3 0 100 0 100 5 2872 7 5 0 1501 1 95 95 1 0 +"29200" 4 385 3 1 1 0 3 0 100 0 5 0 1931 2 1 0 692 1 95 5 1 0 +"29201" 4 385 3 1 1 0 4 1 100 936 100 150 2925 2 3 0 1519 3 50 50 0 1 +"29202" 4 385 3 1 1 0 4 1 100 936 150 225 5116 7 4 1 2742 3 50 75 1 1 +"29203" 4 385 3 1 1 0 4 1 100 936 225 338 1761 3 1 1 3251 3 50 113 1 1 +"29204" 4 385 3 1 1 0 4 1 100 936 338 507 2734 1 9 0 1425 3 50 169 0 1 +"29205" 4 385 3 1 1 0 4 1 100 936 507 253 2371 8 7 0 2736 3 50 254 1 0 +"29206" 4 385 3 1 1 0 4 1 100 936 253 493 4635 9 2 1 852 1 95 240 1 1 +"29207" 4 385 3 1 1 0 4 1 100 936 493 246 5193 5 6 1 2704 3 50 247 0 0 +"29208" 4 385 3 1 1 0 4 1 100 936 246 480 3284 6 8 0 1133 1 95 234 0 1 +"29209" 4 385 3 1 1 0 4 1 100 936 480 936 1717 4 7 0 841 1 95 456 0 1 +"29210" 4 385 4 0 1 1 1 1 100 148 100 125 4216 8 3 1 1301 2 25 25 1 1 +"29211" 4 385 4 0 1 1 1 1 100 148 125 188 3197 3 7 0 1243 3 50 63 0 1 +"29212" 4 385 4 0 1 1 1 1 100 148 188 141 1549 8 2 0 3913 2 25 47 1 0 +"29213" 4 385 4 0 1 1 1 1 100 148 141 148 2740 2 1 1 824 1 5 7 1 1 +"29214" 4 385 5 1 1 1 1 1 100 361 100 125 2148 9 7 1 859 2 25 25 1 1 +"29215" 4 385 5 1 1 1 1 1 100 361 125 188 1496 4 8 0 1230 3 50 63 0 1 +"29216" 4 385 5 1 1 1 1 1 100 361 188 141 4873 8 2 0 1447 2 25 47 1 0 +"29217" 4 385 5 1 1 1 1 1 100 361 141 70 1031 2 1 0 1372 3 50 71 1 0 +"29218" 4 385 5 1 1 1 1 1 100 361 70 123 2008 7 6 1 822 4 75 53 1 1 +"29219" 4 385 5 1 1 1 1 1 100 361 123 185 8060 5 3 1 930 3 50 62 1 1 +"29220" 4 385 5 1 1 1 1 1 100 361 185 231 3195 3 6 0 761 2 25 46 0 1 +"29221" 4 385 5 1 1 1 1 1 100 361 231 289 1698 6 9 0 1527 2 25 58 0 1 +"29222" 4 385 5 1 1 1 1 1 100 361 289 361 2900 1 4 0 695 2 25 72 0 1 +"29223" 4 385 5 1 1 1 2 1 100 247 100 75 3601 2 9 1 716 2 25 25 0 0 +"29224" 4 385 5 1 1 1 2 1 100 247 75 71 3015 4 10 1 861 1 5 4 0 0 +"29225" 4 385 5 1 1 1 2 1 100 247 71 53 681 3 1 0 758 2 25 18 1 0 +"29226" 4 385 5 1 1 1 2 1 100 247 53 93 1655 8 6 1 1254 4 75 40 1 1 +"29227" 4 385 5 1 1 1 2 1 100 247 93 140 1664 5 7 0 951 3 50 47 0 1 +"29228" 4 385 5 1 1 1 2 1 100 247 140 175 2543 6 8 0 724 2 25 35 0 1 +"29229" 4 385 5 1 1 1 2 1 100 247 175 219 1622 7 5 1 1398 2 25 44 1 1 +"29230" 4 385 5 1 1 1 2 1 100 247 219 329 5281 1 10 0 1516 3 50 110 0 1 +"29231" 4 385 5 1 1 1 2 1 100 247 329 247 2017 9 3 0 1031 2 25 82 1 0 +"29232" 4 385 5 1 1 1 3 1 100 130 100 125 2120 3 5 0 969 2 25 25 0 1 +"29233" 4 385 5 1 1 1 3 1 100 130 125 62 1561 8 9 1 836 3 50 63 0 0 +"29234" 4 385 5 1 1 1 3 1 100 130 62 78 1762 2 4 0 2548 2 25 16 0 1 +"29235" 4 385 5 1 1 1 3 1 100 130 78 39 4656 6 3 0 849 3 50 39 1 0 +"29236" 4 385 5 1 1 1 3 1 100 130 39 37 1589 7 6 0 831 1 5 2 1 0 +"29237" 4 385 5 1 1 1 3 1 100 130 37 39 1381 4 2 1 905 1 5 2 1 1 +"29238" 4 385 5 1 1 1 3 1 100 130 39 59 899 1 8 0 1701 3 50 20 0 1 +"29239" 4 385 5 1 1 1 3 1 100 130 59 74 1602 5 7 0 755 2 25 15 0 1 +"29240" 4 385 5 1 1 1 3 1 100 130 74 130 2460 9 5 1 869 4 75 56 1 1 +"29241" 4 385 5 1 1 1 4 1 100 536 100 175 2618 8 7 1 963 4 75 75 1 1 +"29242" 4 385 5 1 1 1 4 1 100 536 175 131 2987 3 10 1 2434 2 25 44 0 0 +"29243" 4 385 5 1 1 1 4 1 100 536 131 98 1708 7 9 1 880 2 25 33 0 0 +"29244" 4 385 5 1 1 1 4 1 100 536 98 147 1531 9 1 1 1649 3 50 49 1 1 +"29245" 4 385 5 1 1 1 4 1 100 536 147 257 1863 2 3 0 1412 4 75 110 0 1 +"29246" 4 385 5 1 1 1 4 1 100 536 257 386 2214 1 8 0 1835 3 50 129 0 1 +"29247" 4 385 5 1 1 1 4 1 100 536 386 289 1752 5 4 0 1579 2 25 97 1 0 +"29248" 4 385 5 1 1 1 4 1 100 536 289 275 1109 4 2 0 1006 1 5 14 1 0 +"29249" 4 385 5 1 1 1 4 1 100 536 275 536 4218 6 3 1 2992 5 95 261 1 1 +"29250" 4 386 2 0 1 1 1 1 100 94 100 125 21936 8 3 1 1434 2 25 25 1 1 +"29251" 4 386 2 0 1 1 1 1 100 94 125 119 3291 3 7 1 4956 1 5 6 0 0 +"29252" 4 386 2 0 1 1 1 1 100 94 119 125 2365 8 2 1 1274 1 5 6 1 1 +"29253" 4 386 2 0 1 1 1 1 100 94 125 94 1874 2 1 0 927 2 25 31 1 0 +"29254" 4 386 3 1 1 1 1 1 100 155 100 105 3269 9 7 1 1350 1 5 5 1 1 +"29255" 4 386 3 1 1 1 1 1 100 155 105 110 1299 4 8 0 1155 1 5 5 0 1 +"29256" 4 386 3 1 1 1 1 1 100 155 110 116 1730 8 2 1 893 1 5 6 1 1 +"29257" 4 386 3 1 1 1 1 1 100 155 116 110 1624 2 1 0 760 1 5 6 1 0 +"29258" 4 386 3 1 1 1 1 1 100 155 110 138 2065 7 6 1 620 2 25 28 1 1 +"29259" 4 386 3 1 1 1 1 1 100 155 138 131 1827 5 3 0 482 1 5 7 1 0 +"29260" 4 386 3 1 1 1 1 1 100 155 131 138 4128 3 6 0 905 1 5 7 0 1 +"29261" 4 386 3 1 1 1 1 1 100 155 138 103 2027 6 9 1 560 2 25 35 0 0 +"29262" 4 386 3 1 1 1 1 1 100 155 103 155 1843 1 4 0 753 3 50 52 0 1 +"29263" 4 386 3 1 1 1 2 1 100 186 100 75 2834 2 9 1 593 2 25 25 0 0 +"29264" 4 386 3 1 1 1 2 1 100 186 75 94 1202 4 10 0 446 2 25 19 0 1 +"29265" 4 386 3 1 1 1 2 1 100 186 94 99 1614 3 1 1 447 1 5 5 1 1 +"29266" 4 386 3 1 1 1 2 1 100 186 99 94 1663 8 6 0 454 1 5 5 1 0 +"29267" 4 386 3 1 1 1 2 1 100 186 94 99 1741 5 7 0 448 1 5 5 0 1 +"29268" 4 386 3 1 1 1 2 1 100 186 99 104 3392 6 8 0 460 1 5 5 0 1 +"29269" 4 386 3 1 1 1 2 1 100 186 104 99 1579 7 5 0 1142 1 5 5 1 0 +"29270" 4 386 3 1 1 1 2 1 100 186 99 149 2308 1 10 0 2045 3 50 50 0 1 +"29271" 4 386 3 1 1 1 2 1 100 186 149 186 2177 9 3 1 903 2 25 37 1 1 +"29272" 4 386 3 1 1 1 3 1 100 142 100 95 2064 3 5 1 739 1 5 5 0 0 +"29273" 4 386 3 1 1 1 3 1 100 142 95 100 1661 8 9 0 739 1 5 5 0 1 +"29274" 4 386 3 1 1 1 3 1 100 142 100 95 1589 2 4 1 718 1 5 5 0 0 +"29275" 4 386 3 1 1 1 3 1 100 142 95 71 1114 6 3 0 451 2 25 24 1 0 +"29276" 4 386 3 1 1 1 3 1 100 142 71 89 1387 7 6 1 670 2 25 18 1 1 +"29277" 4 386 3 1 1 1 3 1 100 142 89 44 1700 4 2 0 1337 3 50 45 1 0 +"29278" 4 386 3 1 1 1 3 1 100 142 44 77 3191 1 8 0 1157 4 75 33 0 1 +"29279" 4 386 3 1 1 1 3 1 100 142 77 81 1788 5 7 0 565 1 5 4 0 1 +"29280" 4 386 3 1 1 1 3 1 100 142 81 142 1679 9 5 1 1189 4 75 61 1 1 +"29281" 4 386 3 1 1 1 4 1 100 335 100 105 2113 8 7 1 833 1 5 5 1 1 +"29282" 4 386 3 1 1 1 4 1 100 335 105 100 2063 3 10 1 394 1 5 5 0 0 +"29283" 4 386 3 1 1 1 4 1 100 335 100 75 1317 7 9 1 1036 2 25 25 0 0 +"29284" 4 386 3 1 1 1 4 1 100 335 75 146 1696 9 1 1 1414 5 95 71 1 1 +"29285" 4 386 3 1 1 1 4 1 100 335 146 183 5632 2 3 0 658 2 25 37 0 1 +"29286" 4 386 3 1 1 1 4 1 100 335 183 320 2095 1 8 0 1170 4 75 137 0 1 +"29287" 4 386 3 1 1 1 4 1 100 335 320 336 2279 5 4 1 728 1 5 16 1 1 +"29288" 4 386 3 1 1 1 4 1 100 335 336 353 2083 4 2 1 821 1 5 17 1 1 +"29289" 4 386 3 1 1 1 4 1 100 335 353 335 2220 6 3 0 488 1 5 18 1 0 +"29290" 4 386 4 0 1 0 1 1 100 37 100 195 3334 2 7 0 1517 1 95 95 0 1 +"29291" 4 386 4 0 1 0 1 1 100 37 195 380 6394 3 7 0 1207 1 95 185 0 1 +"29292" 4 386 4 0 1 0 1 1 100 37 380 741 2066 2 8 0 717 1 95 361 0 1 +"29293" 4 386 4 0 1 0 1 1 100 37 741 37 1736 8 9 1 1284 1 95 704 0 0 +"29294" 4 386 5 1 1 0 1 0 100 0 100 195 1890 1 3 0 757 1 95 95 0 1 +"29295" 4 386 5 1 1 0 1 0 100 0 195 380 1398 6 5 1 473 1 95 185 1 1 +"29296" 4 386 5 1 1 0 1 0 100 0 380 741 1784 2 8 0 704 1 95 361 0 1 +"29297" 4 386 5 1 1 0 1 0 100 0 741 37 1356 8 9 1 577 1 95 704 0 0 +"29298" 4 386 5 1 1 0 1 0 100 0 37 72 3018 3 4 0 856 1 95 35 0 1 +"29299" 4 386 5 1 1 0 1 0 100 0 72 4 2100 5 7 1 1550 1 95 68 0 0 +"29300" 4 386 5 1 1 0 1 0 100 0 4 8 1497 7 4 1 1176 1 95 4 1 1 +"29301" 4 386 5 1 1 0 1 0 100 0 8 0 1685 4 1 0 1498 1 95 8 1 0 +"29302" 4 386 5 1 1 0 2 0 100 0 100 195 2830 8 1 1 988 1 95 95 1 1 +"29303" 4 386 5 1 1 0 2 0 100 0 195 380 1595 6 2 1 428 1 95 185 1 1 +"29304" 4 386 5 1 1 0 2 0 100 0 380 19 1757 7 9 1 903 1 95 361 0 0 +"29305" 4 386 5 1 1 0 2 0 100 0 19 37 1714 2 10 0 686 1 95 18 0 1 +"29306" 4 386 5 1 1 0 2 0 100 0 37 2 1059 5 3 0 422 1 95 35 1 0 +"29307" 4 386 5 1 1 0 2 0 100 0 2 0 863 4 2 0 916 1 95 2 1 0 +"29308" 4 386 5 1 1 0 3 1 100 31 100 195 1572 7 5 1 358 1 95 95 1 1 +"29309" 4 386 5 1 1 0 3 1 100 31 195 10 1274 2 1 0 427 1 95 185 1 0 +"29310" 4 386 5 1 1 0 3 1 100 31 10 20 1188 8 6 1 305 1 95 10 1 1 +"29311" 4 386 5 1 1 0 3 1 100 31 20 39 1000 4 7 0 317 1 95 19 0 1 +"29312" 4 386 5 1 1 0 3 1 100 31 39 76 1572 3 10 0 446 1 95 37 0 1 +"29313" 4 386 5 1 1 0 3 1 100 31 76 4 1550 6 8 1 314 1 95 72 0 0 +"29314" 4 386 5 1 1 0 3 1 100 31 4 8 1313 9 2 1 360 1 95 4 1 1 +"29315" 4 386 5 1 1 0 3 1 100 31 8 16 1148 5 3 1 532 1 95 8 1 1 +"29316" 4 386 5 1 1 0 3 1 100 31 16 31 544 1 10 0 292 1 95 15 0 1 +"29317" 4 386 5 1 1 0 4 0 100 0 100 195 2231 2 3 0 786 1 95 95 0 1 +"29318" 4 386 5 1 1 0 4 0 100 0 195 49 1954 7 4 0 1380 2 75 146 1 0 +"29319" 4 386 5 1 1 0 4 0 100 0 49 2 1935 3 1 0 505 1 95 47 1 0 +"29320" 4 386 5 1 1 0 4 0 100 0 2 4 1245 1 9 0 776 1 95 2 0 1 +"29321" 4 386 5 1 1 0 4 0 100 0 4 0 1344 8 7 0 630 1 95 4 1 0 +"29322" 4 389 2 0 1 1 1 1 100 542 100 150 8000 8 3 1 986 3 50 50 1 1 +"29323" 4 389 2 0 1 1 1 1 100 542 150 293 7880 3 7 0 0 5 95 143 0 1 +"29324" 4 389 2 0 1 1 1 1 100 542 293 571 1943 8 2 1 0 5 95 278 1 1 +"29325" 4 389 2 0 1 1 1 1 100 542 571 542 3152 2 1 0 2247 1 5 29 1 0 +"29326" 4 389 3 1 1 1 1 1 100 437 100 195 9345 9 7 1 0 5 95 95 1 1 +"29327" 4 389 3 1 1 1 1 1 100 437 195 205 2418 4 8 0 1949 1 5 10 0 1 +"29328" 4 389 3 1 1 1 1 1 100 437 205 215 2253 8 2 1 1079 1 5 10 1 1 +"29329" 4 389 3 1 1 1 1 1 100 437 215 226 5121 2 1 1 451 1 5 11 1 1 +"29330" 4 389 3 1 1 1 1 1 100 437 226 237 1947 7 6 1 836 1 5 11 1 1 +"29331" 4 389 3 1 1 1 1 1 100 437 237 225 2050 5 3 0 390 1 5 12 1 0 +"29332" 4 389 3 1 1 1 1 1 100 437 225 236 1280 3 6 0 985 1 5 11 0 1 +"29333" 4 389 3 1 1 1 1 1 100 437 236 224 3904 6 9 1 500 1 5 12 0 0 +"29334" 4 389 3 1 1 1 1 1 100 437 224 437 3210 1 4 0 0 5 95 213 0 1 +"29335" 4 389 3 1 1 1 2 1 100 256 100 105 1686 2 9 0 476 1 5 5 0 1 +"29336" 4 389 3 1 1 1 2 1 100 256 105 100 1866 4 10 1 391 1 5 5 0 0 +"29337" 4 389 3 1 1 1 2 1 100 256 100 95 833 3 1 0 319 1 5 5 1 0 +"29338" 4 389 3 1 1 1 2 1 100 256 95 119 1685 8 6 1 1411 2 25 24 1 1 +"29339" 4 389 3 1 1 1 2 1 100 256 119 125 908 5 7 0 343 1 5 6 0 1 +"29340" 4 389 3 1 1 1 2 1 100 256 125 119 1196 6 8 1 457 1 5 6 0 0 +"29341" 4 389 3 1 1 1 2 1 100 256 119 125 1480 7 5 1 351 1 5 6 1 1 +"29342" 4 389 3 1 1 1 2 1 100 256 125 244 2027 1 10 0 0 5 95 119 0 1 +"29343" 4 389 3 1 1 1 2 1 100 256 244 256 2344 9 3 1 382 1 5 12 1 1 +"29344" 4 389 3 1 1 1 3 1 100 116 100 105 1768 3 5 0 378 1 5 5 0 1 +"29345" 4 389 3 1 1 1 3 1 100 116 105 100 1101 8 9 1 294 1 5 5 0 0 +"29346" 4 389 3 1 1 1 3 1 100 116 100 105 965 2 4 0 460 1 5 5 0 1 +"29347" 4 389 3 1 1 1 3 1 100 116 105 110 1076 6 3 1 262 1 5 5 1 1 +"29348" 4 389 3 1 1 1 3 1 100 116 110 116 871 7 6 1 883 1 5 6 1 1 +"29349" 4 389 3 1 1 1 3 1 100 116 116 110 1118 4 2 0 310 1 5 6 1 0 +"29350" 4 389 3 1 1 1 3 1 100 116 110 116 1493 1 8 0 403 1 5 6 0 1 +"29351" 4 389 3 1 1 1 3 1 100 116 116 110 2021 5 7 1 307 1 5 6 0 0 +"29352" 4 389 3 1 1 1 3 1 100 116 110 116 1745 9 5 1 804 1 5 6 1 1 +"29353" 4 389 3 1 1 1 4 1 100 125 100 105 1702 8 7 1 1259 1 5 5 1 1 +"29354" 4 389 3 1 1 1 4 1 100 125 105 110 1196 3 10 0 433 1 5 5 0 1 +"29355" 4 389 3 1 1 1 4 1 100 125 110 104 890 7 9 1 405 1 5 6 0 0 +"29356" 4 389 3 1 1 1 4 1 100 125 104 109 2983 9 1 1 366 1 5 5 1 1 +"29357" 4 389 3 1 1 1 4 1 100 125 109 114 898 2 3 0 437 1 5 5 0 1 +"29358" 4 389 3 1 1 1 4 1 100 125 114 120 837 1 8 0 843 1 5 6 0 1 +"29359" 4 389 3 1 1 1 4 1 100 125 120 126 1599 5 4 1 1249 1 5 6 1 1 +"29360" 4 389 3 1 1 1 4 1 100 125 126 132 3131 4 2 1 348 1 5 6 1 1 +"29361" 4 389 3 1 1 1 4 1 100 125 132 125 1841 6 3 0 390 1 5 7 1 0 +"29362" 4 389 4 0 1 0 1 1 100 33 100 175 2787 2 7 0 3404 2 75 75 0 1 +"29363" 4 389 4 0 1 0 1 1 100 33 175 341 4079 3 7 0 714 1 95 166 0 1 +"29364" 4 389 4 0 1 0 1 1 100 33 341 665 1278 2 8 0 1015 1 95 324 0 1 +"29365" 4 389 4 0 1 0 1 1 100 33 665 33 1524 8 9 1 528 1 95 632 0 0 +"29366" 4 389 5 1 1 0 1 0 100 0 100 195 1292 1 3 0 388 1 95 95 0 1 +"29367" 4 389 5 1 1 0 1 0 100 0 195 380 942 6 5 1 375 1 95 185 1 1 +"29368" 4 389 5 1 1 0 1 0 100 0 380 741 947 2 8 0 403 1 95 361 0 1 +"29369" 4 389 5 1 1 0 1 0 100 0 741 37 1053 8 9 1 464 1 95 704 0 0 +"29370" 4 389 5 1 1 0 1 0 100 0 37 72 1088 3 4 0 457 1 95 35 0 1 +"29371" 4 389 5 1 1 0 1 0 100 0 72 4 1105 5 7 1 242 1 95 68 0 0 +"29372" 4 389 5 1 1 0 1 0 100 0 4 0 771 7 4 0 284 1 95 4 1 0 +"29373" 4 389 5 1 1 0 2 1 100 31 100 195 1046 8 1 1 299 1 95 95 1 1 +"29374" 4 389 5 1 1 0 2 1 100 31 195 380 1009 6 2 1 287 1 95 185 1 1 +"29375" 4 389 5 1 1 0 2 1 100 31 380 19 1014 7 9 1 892 1 95 361 0 0 +"29376" 4 389 5 1 1 0 2 1 100 31 19 37 886 2 10 0 842 1 95 18 0 1 +"29377" 4 389 5 1 1 0 2 1 100 31 37 72 1823 5 3 1 325 1 95 35 1 1 +"29378" 4 389 5 1 1 0 2 1 100 31 72 4 1123 4 2 0 403 1 95 68 1 0 +"29379" 4 389 5 1 1 0 2 1 100 31 4 8 1125 3 5 0 269 1 95 4 0 1 +"29380" 4 389 5 1 1 0 2 1 100 31 8 16 877 9 4 1 313 1 95 8 1 1 +"29381" 4 389 5 1 1 0 2 1 100 31 16 31 831 1 7 0 245 1 95 15 0 1 +"29382" 4 389 5 1 1 0 3 0 100 0 100 195 1815 7 5 1 215 1 95 95 1 1 +"29383" 4 389 5 1 1 0 3 0 100 0 195 10 690 2 1 0 991 1 95 185 1 0 +"29384" 4 389 5 1 1 0 3 0 100 0 10 20 917 8 6 1 414 1 95 10 1 1 +"29385" 4 389 5 1 1 0 3 0 100 0 20 39 1248 4 7 0 354 1 95 19 0 1 +"29386" 4 389 5 1 1 0 3 0 100 0 39 76 759 3 10 0 320 1 95 37 0 1 +"29387" 4 389 5 1 1 0 3 0 100 0 76 4 830 6 8 1 284 1 95 72 0 0 +"29388" 4 389 5 1 1 0 3 0 100 0 4 8 804 9 2 1 307 1 95 4 1 1 +"29389" 4 389 5 1 1 0 3 0 100 0 8 0 1217 5 3 0 405 1 95 8 1 0 +"29390" 4 389 5 1 1 0 4 1 100 27 100 195 1821 2 3 0 435 1 95 95 0 1 +"29391" 4 389 5 1 1 0 4 1 100 27 195 380 2044 7 4 1 552 1 95 185 1 1 +"29392" 4 389 5 1 1 0 4 1 100 27 380 19 1123 3 1 0 291 1 95 361 1 0 +"29393" 4 389 5 1 1 0 4 1 100 27 19 37 826 1 9 0 417 1 95 18 0 1 +"29394" 4 389 5 1 1 0 4 1 100 27 37 72 762 8 7 1 364 1 95 35 1 1 +"29395" 4 389 5 1 1 0 4 1 100 27 72 140 816 9 2 1 295 1 95 68 1 1 +"29396" 4 389 5 1 1 0 4 1 100 27 140 273 1118 5 6 0 334 1 95 133 0 1 +"29397" 4 389 5 1 1 0 4 1 100 27 273 14 1531 6 8 1 336 1 95 259 0 0 +"29398" 4 389 5 1 1 0 4 1 100 27 14 27 1325 4 7 0 299 1 95 13 0 1 +"29399" 4 393 2 0 1 1 1 1 100 176 100 150 4732 8 3 1 2809 3 50 50 1 1 +"29400" 4 393 2 0 1 1 1 1 100 176 150 188 9672 3 7 0 1186 2 25 38 0 1 +"29401" 4 393 2 0 1 1 1 1 100 176 188 235 2799 8 2 1 679 2 25 47 1 1 +"29402" 4 393 2 0 1 1 1 1 100 176 235 176 2116 2 1 0 1621 2 25 59 1 0 +"29403" 4 393 3 1 1 1 1 1 100 381 100 150 10631 9 7 1 1059 3 50 50 1 1 +"29404" 4 393 3 1 1 1 1 1 100 381 150 188 2614 4 8 0 1975 2 25 38 0 1 +"29405" 4 393 3 1 1 1 1 1 100 381 188 235 3238 8 2 1 666 2 25 47 1 1 +"29406" 4 393 3 1 1 1 1 1 100 381 235 294 2311 2 1 1 727 2 25 59 1 1 +"29407" 4 393 3 1 1 1 1 1 100 381 294 309 4809 7 6 1 1250 1 5 15 1 1 +"29408" 4 393 3 1 1 1 1 1 100 381 309 232 2308 5 3 0 764 2 25 77 1 0 +"29409" 4 393 3 1 1 1 1 1 100 381 232 290 10485 3 6 0 1200 2 25 58 0 1 +"29410" 4 393 3 1 1 1 1 1 100 381 290 305 6348 6 9 0 1260 1 5 15 0 1 +"29411" 4 393 3 1 1 1 1 1 100 381 305 381 2293 1 4 0 1021 2 25 76 0 1 +"29412" 4 393 3 1 1 1 2 1 100 804 100 150 5529 2 9 0 648 3 50 50 0 1 +"29413" 4 393 3 1 1 1 2 1 100 804 150 188 3319 4 10 0 2230 2 25 38 0 1 +"29414" 4 393 3 1 1 1 2 1 100 804 188 282 4250 3 1 1 781 3 50 94 1 1 +"29415" 4 393 3 1 1 1 2 1 100 804 282 211 3877 8 6 0 522 2 25 71 1 0 +"29416" 4 393 3 1 1 1 2 1 100 804 211 317 2375 5 7 0 796 3 50 106 0 1 +"29417" 4 393 3 1 1 1 2 1 100 804 317 238 3147 6 8 1 1147 2 25 79 0 0 +"29418" 4 393 3 1 1 1 2 1 100 804 238 357 1938 7 5 1 732 3 50 119 1 1 +"29419" 4 393 3 1 1 1 2 1 100 804 357 536 2278 1 10 0 1112 3 50 179 0 1 +"29420" 4 393 3 1 1 1 2 1 100 804 536 804 1976 9 3 1 903 3 50 268 1 1 +"29421" 4 393 3 1 1 1 3 1 100 42 100 195 3069 3 5 0 929 5 95 95 0 1 +"29422" 4 393 3 1 1 1 3 1 100 42 195 49 1650 8 9 1 634 4 75 146 0 0 +"29423" 4 393 3 1 1 1 3 1 100 42 49 96 2737 2 4 0 709 5 95 47 0 1 +"29424" 4 393 3 1 1 1 3 1 100 42 96 144 2184 6 3 1 734 3 50 48 1 1 +"29425" 4 393 3 1 1 1 3 1 100 42 144 216 2825 7 6 1 560 3 50 72 1 1 +"29426" 4 393 3 1 1 1 3 1 100 42 216 54 1523 4 2 0 720 4 75 162 1 0 +"29427" 4 393 3 1 1 1 3 1 100 42 54 95 1972 1 8 0 1356 4 75 41 0 1 +"29428" 4 393 3 1 1 1 3 1 100 42 95 24 3833 5 7 1 689 4 75 71 0 0 +"29429" 4 393 3 1 1 1 3 1 100 42 24 42 1837 9 5 1 1108 4 75 18 1 1 +"29430" 4 393 3 1 1 1 4 1 100 725 100 150 3238 8 7 1 481 3 50 50 1 1 +"29431" 4 393 3 1 1 1 4 1 100 725 150 225 1720 3 10 0 700 3 50 75 0 1 +"29432" 4 393 3 1 1 1 4 1 100 725 225 169 1920 7 9 1 1275 2 25 56 0 0 +"29433" 4 393 3 1 1 1 4 1 100 725 169 330 1707 9 1 1 616 5 95 161 1 1 +"29434" 4 393 3 1 1 1 4 1 100 725 330 495 1664 2 3 0 599 3 50 165 0 1 +"29435" 4 393 3 1 1 1 4 1 100 725 495 619 2304 1 8 0 560 2 25 124 0 1 +"29436" 4 393 3 1 1 1 4 1 100 725 619 774 1618 5 4 1 707 2 25 155 1 1 +"29437" 4 393 3 1 1 1 4 1 100 725 774 580 2311 4 2 0 910 2 25 194 1 0 +"29438" 4 393 3 1 1 1 4 1 100 725 580 725 1944 6 3 1 497 2 25 145 1 1 +"29439" 4 393 4 0 1 0 1 1 100 220 100 150 2281 2 7 0 1123 3 50 50 0 1 +"29440" 4 393 4 0 1 0 1 1 100 220 150 293 8707 3 7 0 2410 1 95 143 0 1 +"29441" 4 393 4 0 1 0 1 1 100 220 293 440 1765 2 8 0 1127 3 50 147 0 1 +"29442" 4 393 4 0 1 0 1 1 100 220 440 220 2430 8 9 1 2680 3 50 220 0 0 +"29443" 4 393 5 1 1 0 1 1 100 55 100 195 1890 1 3 0 837 1 95 95 0 1 +"29444" 4 393 5 1 1 0 1 1 100 55 195 341 1371 6 5 1 543 2 75 146 1 1 +"29445" 4 393 5 1 1 0 1 1 100 55 341 597 1475 2 8 0 898 2 75 256 0 1 +"29446" 4 393 5 1 1 0 1 1 100 55 597 298 1341 8 9 1 435 3 50 299 0 0 +"29447" 4 393 5 1 1 0 1 1 100 55 298 581 1507 3 4 0 2752 1 95 283 0 1 +"29448" 4 393 5 1 1 0 1 1 100 55 581 290 2475 5 7 1 443 3 50 291 0 0 +"29449" 4 393 5 1 1 0 1 1 100 55 290 566 1955 7 4 1 1676 1 95 276 1 1 +"29450" 4 393 5 1 1 0 1 1 100 55 566 28 1946 4 1 0 675 1 95 538 1 0 +"29451" 4 393 5 1 1 0 1 1 100 55 28 55 1712 9 6 1 444 1 95 27 1 1 +"29452" 4 393 5 1 1 0 2 0 100 0 100 195 1702 8 1 1 795 1 95 95 1 1 +"29453" 4 393 5 1 1 0 2 0 100 0 195 97 1314 6 2 0 482 3 50 98 1 0 +"29454" 4 393 5 1 1 0 2 0 100 0 97 5 1208 7 9 1 556 1 95 92 0 0 +"29455" 4 393 5 1 1 0 2 0 100 0 5 10 1767 2 10 0 728 1 95 5 0 1 +"29456" 4 393 5 1 1 0 2 0 100 0 10 0 3515 5 3 0 449 1 95 10 1 0 +"29457" 4 393 5 1 1 0 3 0 100 0 100 195 2129 7 5 1 1001 1 95 95 1 1 +"29458" 4 393 5 1 1 0 3 0 100 0 195 10 1335 2 1 0 444 1 95 185 1 0 +"29459" 4 393 5 1 1 0 3 0 100 0 10 20 1220 8 6 1 352 1 95 10 1 1 +"29460" 4 393 5 1 1 0 3 0 100 0 20 39 1196 4 7 0 625 1 95 19 0 1 +"29461" 4 393 5 1 1 0 3 0 100 0 39 76 904 3 10 0 530 1 95 37 0 1 +"29462" 4 393 5 1 1 0 3 0 100 0 76 4 967 6 8 1 646 1 95 72 0 0 +"29463" 4 393 5 1 1 0 3 0 100 0 4 8 1172 9 2 1 362 1 95 4 1 1 +"29464" 4 393 5 1 1 0 3 0 100 0 8 0 1230 5 3 0 417 1 95 8 1 0 +"29465" 4 393 5 1 1 0 4 1 100 2681 100 195 12804 2 3 0 2121 1 95 95 0 1 +"29466" 4 393 5 1 1 0 4 1 100 2681 195 380 1017 7 4 1 742 1 95 185 1 1 +"29467" 4 393 5 1 1 0 4 1 100 2681 380 190 1015 3 1 0 469 3 50 190 1 0 +"29468" 4 393 5 1 1 0 4 1 100 2681 190 371 1123 1 9 0 659 1 95 181 0 1 +"29469" 4 393 5 1 1 0 4 1 100 2681 371 723 1545 8 7 1 1437 1 95 352 1 1 +"29470" 4 393 5 1 1 0 4 1 100 2681 723 1410 1134 9 2 1 445 1 95 687 1 1 +"29471" 4 393 5 1 1 0 4 1 100 2681 1410 2750 1285 5 6 0 613 1 95 1340 0 1 +"29472" 4 393 5 1 1 0 4 1 100 2681 2750 1375 4174 6 8 1 469 3 50 1375 0 0 +"29473" 4 393 5 1 1 0 4 1 100 2681 1375 2681 1604 4 7 0 1365 1 95 1306 0 1 +"29474" 4 401 2 0 1 1 1 1 100 185 100 125 12727 8 3 1 1182 2 25 25 1 1 +"29475" 4 401 2 0 1 1 1 1 100 185 125 156 4339 3 7 0 1571 2 25 31 0 1 +"29476" 4 401 2 0 1 1 1 1 100 185 156 195 4925 8 2 1 1065 2 25 39 1 1 +"29477" 4 401 2 0 1 1 1 1 100 185 195 185 3795 2 1 0 3220 1 5 10 1 0 +"29478" 4 401 3 1 1 1 1 1 100 275 100 105 3165 9 7 1 1651 1 5 5 1 1 +"29479" 4 401 3 1 1 1 1 1 100 275 105 110 5266 4 8 0 1284 1 5 5 0 1 +"29480" 4 401 3 1 1 1 1 1 100 275 110 138 5413 8 2 1 1313 2 25 28 1 1 +"29481" 4 401 3 1 1 1 1 1 100 275 138 103 2231 2 1 0 2099 2 25 35 1 0 +"29482" 4 401 3 1 1 1 1 1 100 275 103 129 4340 7 6 1 1403 2 25 26 1 1 +"29483" 4 401 3 1 1 1 1 1 100 275 129 97 2318 5 3 0 1405 2 25 32 1 0 +"29484" 4 401 3 1 1 1 1 1 100 275 97 146 5440 3 6 0 1235 3 50 49 0 1 +"29485" 4 401 3 1 1 1 1 1 100 275 146 183 2661 6 9 0 1204 2 25 37 0 1 +"29486" 4 401 3 1 1 1 1 1 100 275 183 275 4648 1 4 0 969 3 50 92 0 1 +"29487" 4 401 3 1 1 1 2 1 100 506 100 150 3529 2 9 0 2215 3 50 50 0 1 +"29488" 4 401 3 1 1 1 2 1 100 506 150 225 2155 4 10 0 867 3 50 75 0 1 +"29489" 4 401 3 1 1 1 2 1 100 506 225 112 3143 3 1 0 1111 3 50 113 1 0 +"29490" 4 401 3 1 1 1 2 1 100 506 112 168 2951 8 6 1 2768 3 50 56 1 1 +"29491" 4 401 3 1 1 1 2 1 100 506 168 126 2146 5 7 1 1641 2 25 42 0 0 +"29492" 4 401 3 1 1 1 2 1 100 506 126 94 4240 6 8 1 2548 2 25 32 0 0 +"29493" 4 401 3 1 1 1 2 1 100 506 94 165 3840 7 5 1 1527 4 75 71 1 1 +"29494" 4 401 3 1 1 1 2 1 100 506 165 289 1723 1 10 0 863 4 75 124 0 1 +"29495" 4 401 3 1 1 1 2 1 100 506 289 506 1955 9 3 1 1556 4 75 217 1 1 +"29496" 4 401 3 1 1 1 3 1 100 62 100 195 3253 3 5 0 1648 5 95 95 0 1 +"29497" 4 401 3 1 1 1 3 1 100 62 195 49 1670 8 9 1 1013 4 75 146 0 0 +"29498" 4 401 3 1 1 1 3 1 100 62 49 96 2770 2 4 0 0 5 95 47 0 1 +"29499" 4 401 3 1 1 1 3 1 100 62 96 187 3667 6 3 1 0 5 95 91 1 1 +"29500" 4 401 3 1 1 1 3 1 100 62 187 365 3131 7 6 1 0 5 95 178 1 1 +"29501" 4 401 3 1 1 1 3 1 100 62 365 18 3812 4 2 0 2380 5 95 347 1 0 +"29502" 4 401 3 1 1 1 3 1 100 62 18 27 1937 1 8 0 1907 3 50 9 0 1 +"29503" 4 401 3 1 1 1 3 1 100 62 27 41 3509 5 7 0 3660 3 50 14 0 1 +"29504" 4 401 3 1 1 1 3 1 100 62 41 62 1507 9 5 1 1169 3 50 21 1 1 +"29505" 4 401 3 1 1 1 4 1 100 402 100 150 3734 8 7 1 2642 3 50 50 1 1 +"29506" 4 401 3 1 1 1 4 1 100 402 150 225 2073 3 10 0 1500 3 50 75 0 1 +"29507" 4 401 3 1 1 1 4 1 100 402 225 169 2178 7 9 1 3096 2 25 56 0 0 +"29508" 4 401 3 1 1 1 4 1 100 402 169 254 1618 9 1 1 1043 3 50 85 1 1 +"29509" 4 401 3 1 1 1 4 1 100 402 254 381 1366 2 3 0 635 3 50 127 0 1 +"29510" 4 401 3 1 1 1 4 1 100 402 381 572 3251 1 8 0 918 3 50 191 0 1 +"29511" 4 401 3 1 1 1 4 1 100 402 572 715 1839 5 4 1 813 2 25 143 1 1 +"29512" 4 401 3 1 1 1 4 1 100 402 715 536 3380 4 2 0 969 2 25 179 1 0 +"29513" 4 401 3 1 1 1 4 1 100 402 536 402 2696 6 3 0 813 2 25 134 1 0 +"29514" 4 401 4 0 1 0 1 1 100 37 100 195 2606 2 7 0 2184 1 95 95 0 1 +"29515" 4 401 4 0 1 0 1 1 100 37 195 380 1621 3 7 0 973 1 95 185 0 1 +"29516" 4 401 4 0 1 0 1 1 100 37 380 741 1491 2 8 0 810 1 95 361 0 1 +"29517" 4 401 4 0 1 0 1 1 100 37 741 37 1216 8 9 1 828 1 95 704 0 0 +"29518" 4 401 5 1 1 0 1 1 100 18 100 195 1842 1 3 0 808 1 95 95 0 1 +"29519" 4 401 5 1 1 0 1 1 100 18 195 380 2127 6 5 1 759 1 95 185 1 1 +"29520" 4 401 5 1 1 0 1 1 100 18 380 741 1136 2 8 0 767 1 95 361 0 1 +"29521" 4 401 5 1 1 0 1 1 100 18 741 185 1184 8 9 1 2981 2 75 556 0 0 +"29522" 4 401 5 1 1 0 1 1 100 18 185 361 1316 3 4 0 2451 1 95 176 0 1 +"29523" 4 401 5 1 1 0 1 1 100 18 361 90 1404 5 7 1 623 2 75 271 0 0 +"29524" 4 401 5 1 1 0 1 1 100 18 90 176 1422 7 4 1 1352 1 95 86 1 1 +"29525" 4 401 5 1 1 0 1 1 100 18 176 9 1197 4 1 0 722 1 95 167 1 0 +"29526" 4 401 5 1 1 0 1 1 100 18 9 18 1268 9 6 1 1421 1 95 9 1 1 +"29527" 4 401 5 1 1 0 2 1 100 31 100 195 1533 8 1 1 2817 1 95 95 1 1 +"29528" 4 401 5 1 1 0 2 1 100 31 195 380 1113 6 2 1 874 1 95 185 1 1 +"29529" 4 401 5 1 1 0 2 1 100 31 380 19 1413 7 9 1 719 1 95 361 0 0 +"29530" 4 401 5 1 1 0 2 1 100 31 19 37 1899 2 10 0 694 1 95 18 0 1 +"29531" 4 401 5 1 1 0 2 1 100 31 37 2 1058 5 3 0 685 1 95 35 1 0 +"29532" 4 401 5 1 1 0 2 1 100 31 2 4 1226 4 2 1 590 1 95 2 1 1 +"29533" 4 401 5 1 1 0 2 1 100 31 4 8 1167 3 5 0 660 1 95 4 0 1 +"29534" 4 401 5 1 1 0 2 1 100 31 8 16 1474 9 4 1 681 1 95 8 1 1 +"29535" 4 401 5 1 1 0 2 1 100 31 16 31 1158 1 7 0 1053 1 95 15 0 1 +"29536" 4 401 5 1 1 0 3 0 100 1 100 195 2748 7 5 1 766 1 95 95 1 1 +"29537" 4 401 5 1 1 0 3 0 100 1 195 10 1527 2 1 0 3676 1 95 185 1 0 +"29538" 4 401 5 1 1 0 3 0 100 1 10 18 2139 8 6 1 827 2 75 8 1 1 +"29539" 4 401 5 1 1 0 3 0 100 1 18 1 2809 4 7 1 721 1 95 17 0 0 +"29540" 4 401 5 1 1 0 4 1 100 14 100 195 1458 2 3 0 882 1 95 95 0 1 +"29541" 4 401 5 1 1 0 4 1 100 14 195 380 1373 7 4 1 2257 1 95 185 1 1 +"29542" 4 401 5 1 1 0 4 1 100 14 380 95 1131 3 1 0 1179 2 75 285 1 0 +"29543" 4 401 5 1 1 0 4 1 100 14 95 185 1296 1 9 0 2304 1 95 90 0 1 +"29544" 4 401 5 1 1 0 4 1 100 14 185 324 2408 8 7 1 739 2 75 139 1 1 +"29545" 4 401 5 1 1 0 4 1 100 14 324 567 1325 9 2 1 780 2 75 243 1 1 +"29546" 4 401 5 1 1 0 4 1 100 14 567 142 1492 5 6 1 754 2 75 425 0 0 +"29547" 4 401 5 1 1 0 4 1 100 14 142 7 1604 6 8 1 877 1 95 135 0 0 +"29548" 4 401 5 1 1 0 4 1 100 14 7 14 1056 4 7 0 739 1 95 7 0 1 +"29549" 4 413 2 0 1 1 1 1 100 176 100 150 6138 8 3 1 2524 3 50 50 1 1 +"29550" 4 413 2 0 1 1 1 1 100 176 150 188 12633 3 7 0 2494 2 25 38 0 1 +"29551" 4 413 2 0 1 1 1 1 100 176 188 235 2004 8 2 1 1411 2 25 47 1 1 +"29552" 4 413 2 0 1 1 1 1 100 176 235 176 1701 2 1 0 1346 2 25 59 1 0 +"29553" 4 413 3 1 1 1 1 1 100 524 100 195 7287 9 7 1 1075 5 95 95 1 1 +"29554" 4 413 3 1 1 1 1 1 100 524 195 205 3060 4 8 0 617 1 5 10 0 1 +"29555" 4 413 3 1 1 1 1 1 100 524 205 256 1737 8 2 1 748 2 25 51 1 1 +"29556" 4 413 3 1 1 1 1 1 100 524 256 269 1310 2 1 1 557 1 5 13 1 1 +"29557" 4 413 3 1 1 1 1 1 100 524 269 336 1512 7 6 1 669 2 25 67 1 1 +"29558" 4 413 3 1 1 1 1 1 100 524 336 353 2107 5 3 1 1100 1 5 17 1 1 +"29559" 4 413 3 1 1 1 1 1 100 524 353 441 1485 3 6 0 788 2 25 88 0 1 +"29560" 4 413 3 1 1 1 1 1 100 524 441 419 1700 6 9 1 1081 1 5 22 0 0 +"29561" 4 413 3 1 1 1 1 1 100 524 419 524 1258 1 4 0 796 2 25 105 0 1 +"29562" 4 413 3 1 1 1 2 1 100 240 100 125 2917 2 9 0 1416 2 25 25 0 1 +"29563" 4 413 3 1 1 1 2 1 100 240 125 119 1146 4 10 1 1785 1 5 6 0 0 +"29564" 4 413 3 1 1 1 2 1 100 240 119 89 1289 3 1 0 705 2 25 30 1 0 +"29565" 4 413 3 1 1 1 2 1 100 240 89 174 1733 8 6 1 0 5 95 85 1 1 +"29566" 4 413 3 1 1 1 2 1 100 240 174 183 2128 5 7 0 928 1 5 9 0 1 +"29567" 4 413 3 1 1 1 2 1 100 240 183 174 1689 6 8 1 1974 1 5 9 0 0 +"29568" 4 413 3 1 1 1 2 1 100 240 174 183 1117 7 5 1 1973 1 5 9 1 1 +"29569" 4 413 3 1 1 1 2 1 100 240 183 192 1277 1 10 0 614 1 5 9 0 1 +"29570" 4 413 3 1 1 1 2 1 100 240 192 240 1798 9 3 1 854 2 25 48 1 1 +"29571" 4 413 3 1 1 1 3 1 100 116 100 105 1992 3 5 0 502 1 5 5 0 1 +"29572" 4 413 3 1 1 1 3 1 100 116 105 100 1012 8 9 1 1418 1 5 5 0 0 +"29573" 4 413 3 1 1 1 3 1 100 116 100 105 1283 2 4 0 1228 1 5 5 0 1 +"29574" 4 413 3 1 1 1 3 1 100 116 105 110 1007 6 3 1 1220 1 5 5 1 1 +"29575" 4 413 3 1 1 1 3 1 100 116 110 116 1496 7 6 1 582 1 5 6 1 1 +"29576" 4 413 3 1 1 1 3 1 100 116 116 110 1088 4 2 0 562 1 5 6 1 0 +"29577" 4 413 3 1 1 1 3 1 100 116 110 116 788 1 8 0 572 1 5 6 0 1 +"29578" 4 413 3 1 1 1 3 1 100 116 116 110 918 5 7 1 912 1 5 6 0 0 +"29579" 4 413 3 1 1 1 3 1 100 116 110 116 849 9 5 1 670 1 5 6 1 1 +"29580" 4 413 3 1 1 1 4 1 100 125 100 105 1721 8 7 1 781 1 5 5 1 1 +"29581" 4 413 3 1 1 1 4 1 100 125 105 110 717 3 10 0 800 1 5 5 0 1 +"29582" 4 413 3 1 1 1 4 1 100 125 110 104 1215 7 9 1 948 1 5 6 0 0 +"29583" 4 413 3 1 1 1 4 1 100 125 104 109 780 9 1 1 444 1 5 5 1 1 +"29584" 4 413 3 1 1 1 4 1 100 125 109 114 700 2 3 0 840 1 5 5 0 1 +"29585" 4 413 3 1 1 1 4 1 100 125 114 108 877 1 8 1 337 1 5 6 0 0 +"29586" 4 413 3 1 1 1 4 1 100 125 108 113 1134 5 4 1 1061 1 5 5 1 1 +"29587" 4 413 3 1 1 1 4 1 100 125 113 119 1064 4 2 1 435 1 5 6 1 1 +"29588" 4 413 3 1 1 1 4 1 100 125 119 125 812 6 3 1 939 1 5 6 1 1 +"29589" 4 413 4 0 1 0 1 1 100 23 100 150 4573 2 7 0 489 3 50 50 0 1 +"29590" 4 413 4 0 1 0 1 1 100 23 150 263 2951 3 7 0 920 2 75 113 0 1 +"29591" 4 413 4 0 1 0 1 1 100 23 263 460 1298 2 8 0 554 2 75 197 0 1 +"29592" 4 413 4 0 1 0 1 1 100 23 460 23 1269 8 9 1 1062 1 95 437 0 0 +"29593" 4 413 5 1 1 0 1 0 100 0 100 195 4121 1 3 0 403 1 95 95 0 1 +"29594" 4 413 5 1 1 0 1 0 100 0 195 380 908 6 5 1 943 1 95 185 1 1 +"29595" 4 413 5 1 1 0 1 0 100 0 380 741 863 2 8 0 484 1 95 361 0 1 +"29596" 4 413 5 1 1 0 1 0 100 0 741 37 858 8 9 1 599 1 95 704 0 0 +"29597" 4 413 5 1 1 0 1 0 100 0 37 72 2136 3 4 0 520 1 95 35 0 1 +"29598" 4 413 5 1 1 0 1 0 100 0 72 4 1036 5 7 1 856 1 95 68 0 0 +"29599" 4 413 5 1 1 0 1 0 100 0 4 8 860 7 4 1 489 1 95 4 1 1 +"29600" 4 413 5 1 1 0 1 0 100 0 8 0 1239 4 1 0 458 1 95 8 1 0 +"29601" 4 413 5 1 1 0 2 1 100 31 100 195 2238 8 1 1 2329 1 95 95 1 1 +"29602" 4 413 5 1 1 0 2 1 100 31 195 380 1192 6 2 1 532 1 95 185 1 1 +"29603" 4 413 5 1 1 0 2 1 100 31 380 19 849 7 9 1 349 1 95 361 0 0 +"29604" 4 413 5 1 1 0 2 1 100 31 19 37 802 2 10 0 778 1 95 18 0 1 +"29605" 4 413 5 1 1 0 2 1 100 31 37 72 986 5 3 1 960 1 95 35 1 1 +"29606" 4 413 5 1 1 0 2 1 100 31 72 4 847 4 2 0 372 1 95 68 1 0 +"29607" 4 413 5 1 1 0 2 1 100 31 4 8 984 3 5 0 366 1 95 4 0 1 +"29608" 4 413 5 1 1 0 2 1 100 31 8 16 802 9 4 1 369 1 95 8 1 1 +"29609" 4 413 5 1 1 0 2 1 100 31 16 31 669 1 7 0 1010 1 95 15 0 1 +"29610" 4 413 5 1 1 0 3 0 100 0 100 195 1347 7 5 1 385 1 95 95 1 1 +"29611" 4 413 5 1 1 0 3 0 100 0 195 10 756 2 1 0 422 1 95 185 1 0 +"29612" 4 413 5 1 1 0 3 0 100 0 10 20 807 8 6 1 734 1 95 10 1 1 +"29613" 4 413 5 1 1 0 3 0 100 0 20 39 601 4 7 0 434 1 95 19 0 1 +"29614" 4 413 5 1 1 0 3 0 100 0 39 76 1082 3 10 0 764 1 95 37 0 1 +"29615" 4 413 5 1 1 0 3 0 100 0 76 4 789 6 8 1 749 1 95 72 0 0 +"29616" 4 413 5 1 1 0 3 0 100 0 4 8 1343 9 2 1 740 1 95 4 1 1 +"29617" 4 413 5 1 1 0 3 0 100 0 8 0 1262 5 3 0 576 1 95 8 1 0 +"29618" 4 413 5 1 1 0 4 1 100 20 100 195 1306 2 3 0 538 1 95 95 0 1 +"29619" 4 413 5 1 1 0 4 1 100 20 195 380 805 7 4 1 404 1 95 185 1 1 +"29620" 4 413 5 1 1 0 4 1 100 20 380 19 614 3 1 0 334 1 95 361 1 0 +"29621" 4 413 5 1 1 0 4 1 100 20 19 37 612 1 9 0 429 1 95 18 0 1 +"29622" 4 413 5 1 1 0 4 1 100 20 37 72 781 8 7 1 846 1 95 35 1 1 +"29623" 4 413 5 1 1 0 4 1 100 20 72 140 954 9 2 1 386 1 95 68 1 1 +"29624" 4 413 5 1 1 0 4 1 100 20 140 210 1117 5 6 0 685 3 50 70 0 1 +"29625" 4 413 5 1 1 0 4 1 100 20 210 10 786 6 8 1 282 1 95 200 0 0 +"29626" 4 413 5 1 1 0 4 1 100 20 10 20 978 4 7 0 386 1 95 10 0 1 +"29627" 4 414 2 0 1 1 1 1 100 295 100 150 12353 8 3 1 59 3 50 50 1 1 +"29628" 4 414 2 0 1 1 1 1 100 295 150 225 4118 3 7 0 2192 3 50 75 0 1 +"29629" 4 414 2 0 1 1 1 1 100 295 225 281 2861 8 2 1 410 2 25 56 1 1 +"29630" 4 414 2 0 1 1 1 1 100 295 281 295 3020 2 1 1 1218 1 5 14 1 1 +"29631" 4 414 3 1 1 1 1 1 100 1047 100 195 3010 9 7 1 0 5 95 95 1 1 +"29632" 4 414 3 1 1 1 1 1 100 1047 195 244 1448 4 8 0 1465 2 25 49 0 1 +"29633" 4 414 3 1 1 1 1 1 100 1047 244 366 2017 8 2 1 687 3 50 122 1 1 +"29634" 4 414 3 1 1 1 1 1 100 1047 366 384 1923 2 1 1 2109 1 5 18 1 1 +"29635" 4 414 3 1 1 1 1 1 100 1047 384 480 4198 7 6 1 1516 2 25 96 1 1 +"29636" 4 414 3 1 1 1 1 1 100 1047 480 504 3353 5 3 1 2599 1 5 24 1 1 +"29637" 4 414 3 1 1 1 1 1 100 1047 504 630 1376 3 6 0 909 2 25 126 0 1 +"29638" 4 414 3 1 1 1 1 1 100 1047 630 598 3438 6 9 1 1056 1 5 32 0 0 +"29639" 4 414 3 1 1 1 1 1 100 1047 598 1047 1859 1 4 0 1224 4 75 449 0 1 +"29640" 4 414 3 1 1 1 2 1 100 364 100 150 3260 2 9 0 614 3 50 50 0 1 +"29641" 4 414 3 1 1 1 2 1 100 364 150 225 1455 4 10 0 447 3 50 75 0 1 +"29642" 4 414 3 1 1 1 2 1 100 364 225 169 3335 3 1 0 4214 2 25 56 1 0 +"29643" 4 414 3 1 1 1 2 1 100 364 169 254 1643 8 6 1 1234 3 50 85 1 1 +"29644" 4 414 3 1 1 1 2 1 100 364 254 190 3525 5 7 1 2803 2 25 64 0 0 +"29645" 4 414 3 1 1 1 2 1 100 364 190 95 1547 6 8 1 687 3 50 95 0 0 +"29646" 4 414 3 1 1 1 2 1 100 364 95 166 1307 7 5 1 1611 4 75 71 1 1 +"29647" 4 414 3 1 1 1 2 1 100 364 166 291 1635 1 10 0 1036 4 75 125 0 1 +"29648" 4 414 3 1 1 1 2 1 100 364 291 364 1520 9 3 1 1637 2 25 73 1 1 +"29649" 4 414 3 1 1 1 3 1 100 368 100 175 2813 3 5 0 1756 4 75 75 0 1 +"29650" 4 414 3 1 1 1 3 1 100 368 175 87 1462 8 9 1 1046 3 50 88 0 0 +"29651" 4 414 3 1 1 1 3 1 100 368 87 91 2874 2 4 0 567 1 5 4 0 1 +"29652" 4 414 3 1 1 1 3 1 100 368 91 96 3465 6 3 1 442 1 5 5 1 1 +"29653" 4 414 3 1 1 1 3 1 100 368 96 168 2250 7 6 1 813 4 75 72 1 1 +"29654" 4 414 3 1 1 1 3 1 100 368 168 126 2020 4 2 0 626 2 25 42 1 0 +"29655" 4 414 3 1 1 1 3 1 100 368 126 221 2169 1 8 0 752 4 75 95 0 1 +"29656" 4 414 3 1 1 1 3 1 100 368 221 210 3658 5 7 1 561 1 5 11 0 0 +"29657" 4 414 3 1 1 1 3 1 100 368 210 368 1411 9 5 1 526 4 75 158 1 1 +"29658" 4 414 3 1 1 1 4 1 100 1927 100 175 3500 8 7 1 628 4 75 75 1 1 +"29659" 4 414 3 1 1 1 4 1 100 1927 175 263 4243 3 10 0 1455 3 50 88 0 1 +"29660" 4 414 3 1 1 1 4 1 100 1927 263 250 2575 7 9 1 498 1 5 13 0 0 +"29661" 4 414 3 1 1 1 4 1 100 1927 250 488 984 9 1 1 1463 5 95 238 1 1 +"29662" 4 414 3 1 1 1 4 1 100 1927 488 854 2190 2 3 0 1053 4 75 366 0 1 +"29663" 4 414 3 1 1 1 4 1 100 1927 854 1665 2032 1 8 0 2566 5 95 811 0 1 +"29664" 4 414 3 1 1 1 4 1 100 1927 1665 1748 1801 5 4 1 921 1 5 83 1 1 +"29665" 4 414 3 1 1 1 4 1 100 1927 1748 1835 1587 4 2 1 452 1 5 87 1 1 +"29666" 4 414 3 1 1 1 4 1 100 1927 1835 1927 1436 6 3 1 1613 1 5 92 1 1 +"29667" 4 414 4 0 1 0 1 1 100 37 100 195 8025 2 7 0 4200 1 95 95 0 1 +"29668" 4 414 4 0 1 0 1 1 100 37 195 380 4781 3 7 0 703 1 95 185 0 1 +"29669" 4 414 4 0 1 0 1 1 100 37 380 741 2315 2 8 0 1939 1 95 361 0 1 +"29670" 4 414 4 0 1 0 1 1 100 37 741 37 1195 8 9 1 336 1 95 704 0 0 +"29671" 4 414 5 1 1 0 1 1 100 68 100 195 1844 1 3 0 394 1 95 95 0 1 +"29672" 4 414 5 1 1 0 1 1 100 68 195 380 1980 6 5 1 1028 1 95 185 1 1 +"29673" 4 414 5 1 1 0 1 1 100 68 380 741 1712 2 8 0 356 1 95 361 0 1 +"29674" 4 414 5 1 1 0 1 1 100 68 741 370 2952 8 9 1 1313 3 50 371 0 0 +"29675" 4 414 5 1 1 0 1 1 100 68 370 722 2112 3 4 0 1081 1 95 352 0 1 +"29676" 4 414 5 1 1 0 1 1 100 68 722 361 2114 5 7 1 444 3 50 361 0 0 +"29677" 4 414 5 1 1 0 1 1 100 68 361 704 2076 7 4 1 262 1 95 343 1 1 +"29678" 4 414 5 1 1 0 1 1 100 68 704 35 1311 4 1 0 286 1 95 669 1 0 +"29679" 4 414 5 1 1 0 1 1 100 68 35 68 1326 9 6 1 372 1 95 33 1 1 +"29680" 4 414 5 1 1 0 2 0 100 0 100 195 1596 8 1 1 605 1 95 95 1 1 +"29681" 4 414 5 1 1 0 2 0 100 0 195 380 1173 6 2 1 187 1 95 185 1 1 +"29682" 4 414 5 1 1 0 2 0 100 0 380 19 2623 7 9 1 240 1 95 361 0 0 +"29683" 4 414 5 1 1 0 2 0 100 0 19 37 1474 2 10 0 209 1 95 18 0 1 +"29684" 4 414 5 1 1 0 2 0 100 0 37 9 2484 5 3 0 1220 2 75 28 1 0 +"29685" 4 414 5 1 1 0 2 0 100 0 9 0 1167 4 2 0 245 1 95 9 1 0 +"29686" 4 414 5 1 1 0 3 0 100 1 100 195 2273 7 5 1 283 1 95 95 1 1 +"29687" 4 414 5 1 1 0 3 0 100 1 195 10 1203 2 1 0 247 1 95 185 1 0 +"29688" 4 414 5 1 1 0 3 0 100 1 10 20 1312 8 6 1 1288 1 95 10 1 1 +"29689" 4 414 5 1 1 0 3 0 100 1 20 1 2472 4 7 1 240 1 95 19 0 0 +"29690" 4 414 5 1 1 0 4 1 100 1544 100 195 1611 2 3 0 948 1 95 95 0 1 +"29691" 4 414 5 1 1 0 4 1 100 1544 195 380 1324 7 4 1 611 1 95 185 1 1 +"29692" 4 414 5 1 1 0 4 1 100 1544 380 285 2131 3 1 0 644 4 25 95 1 0 +"29693" 4 414 5 1 1 0 4 1 100 1544 285 556 1386 1 9 0 725 1 95 271 0 1 +"29694" 4 414 5 1 1 0 4 1 100 1544 556 1084 1232 8 7 1 267 1 95 528 1 1 +"29695" 4 414 5 1 1 0 4 1 100 1544 1084 2114 987 9 2 1 274 1 95 1030 1 1 +"29696" 4 414 5 1 1 0 4 1 100 1544 2114 1585 2618 5 6 1 3131 4 25 529 0 0 +"29697" 4 414 5 1 1 0 4 1 100 1544 1585 792 1675 6 8 1 841 3 50 793 0 0 +"29698" 4 414 5 1 1 0 4 1 100 1544 792 1544 1814 4 7 0 422 1 95 752 0 1 +"29699" 4 416 2 0 1 1 1 1 100 247 100 150 3968 8 3 1 946 3 50 50 1 1 +"29700" 4 416 2 0 1 1 1 1 100 247 150 263 7850 3 7 0 619 4 75 113 0 1 +"29701" 4 416 2 0 1 1 1 1 100 247 263 329 1514 8 2 1 880 2 25 66 1 1 +"29702" 4 416 2 0 1 1 1 1 100 247 329 247 1283 2 1 0 772 2 25 82 1 0 +"29703" 4 416 3 1 1 1 1 1 100 173 100 150 6927 9 7 1 2035 3 50 50 1 1 +"29704" 4 416 3 1 1 1 1 1 100 173 150 225 1535 4 8 0 741 3 50 75 0 1 +"29705" 4 416 3 1 1 1 1 1 100 173 225 236 1750 8 2 1 1273 1 5 11 1 1 +"29706" 4 416 3 1 1 1 1 1 100 173 236 177 682 2 1 0 543 2 25 59 1 0 +"29707" 4 416 3 1 1 1 1 1 100 173 177 168 866 7 6 0 950 1 5 9 1 0 +"29708" 4 416 3 1 1 1 1 1 100 173 168 210 46 5 3 1 914 2 25 42 1 1 +"29709" 4 416 3 1 1 1 1 1 100 173 210 199 2931 3 6 1 861 1 5 11 0 0 +"29710" 4 416 3 1 1 1 1 1 100 173 199 99 2529 6 9 1 726 3 50 100 0 0 +"29711" 4 416 3 1 1 1 1 1 100 173 99 173 513 1 4 0 654 4 75 74 0 1 +"29712" 4 416 3 1 1 1 2 1 100 497 100 195 4322 2 9 0 859 5 95 95 0 1 +"29713" 4 416 3 1 1 1 2 1 100 497 195 244 3167 4 10 0 1141 2 25 49 0 1 +"29714" 4 416 3 1 1 1 2 1 100 497 244 256 2292 3 1 1 573 1 5 12 1 1 +"29715" 4 416 3 1 1 1 2 1 100 497 256 320 2015 8 6 1 1332 2 25 64 1 1 +"29716" 4 416 3 1 1 1 2 1 100 497 320 336 2898 5 7 0 677 1 5 16 0 1 +"29717" 4 416 3 1 1 1 2 1 100 497 336 252 2306 6 8 1 491 2 25 84 0 0 +"29718" 4 416 3 1 1 1 2 1 100 497 252 378 1784 7 5 1 842 3 50 126 1 1 +"29719" 4 416 3 1 1 1 2 1 100 497 378 473 2302 1 10 0 767 2 25 95 0 1 +"29720" 4 416 3 1 1 1 2 1 100 497 473 497 2456 9 3 1 1217 1 5 24 1 1 +"29721" 4 416 3 1 1 1 3 1 100 174 100 195 3258 3 5 0 910 5 95 95 0 1 +"29722" 4 416 3 1 1 1 3 1 100 174 195 49 1647 8 9 1 695 4 75 146 0 0 +"29723" 4 416 3 1 1 1 3 1 100 174 49 74 1099 2 4 0 2149 3 50 25 0 1 +"29724" 4 416 3 1 1 1 3 1 100 174 74 130 2500 6 3 1 2003 4 75 56 1 1 +"29725" 4 416 3 1 1 1 3 1 100 174 130 123 3433 7 6 0 428 1 5 7 1 0 +"29726" 4 416 3 1 1 1 3 1 100 174 123 92 1669 4 2 0 667 2 25 31 1 0 +"29727" 4 416 3 1 1 1 3 1 100 174 92 179 1337 1 8 0 1567 5 95 87 0 1 +"29728" 4 416 3 1 1 1 3 1 100 174 179 89 2578 5 7 1 1005 3 50 90 0 0 +"29729" 4 416 3 1 1 1 3 1 100 174 89 174 1618 9 5 1 1060 5 95 85 1 1 +"29730" 4 416 3 1 1 1 4 1 100 648 100 195 2898 8 7 1 1158 5 95 95 1 1 +"29731" 4 416 3 1 1 1 4 1 100 648 195 341 1716 3 10 0 1785 4 75 146 0 1 +"29732" 4 416 3 1 1 1 4 1 100 648 341 170 1433 7 9 1 851 3 50 171 0 0 +"29733" 4 416 3 1 1 1 4 1 100 648 170 332 1808 9 1 1 752 5 95 162 1 1 +"29734" 4 416 3 1 1 1 4 1 100 648 332 415 1657 2 3 0 1056 2 25 83 0 1 +"29735" 4 416 3 1 1 1 4 1 100 648 415 519 2626 1 8 0 1059 2 25 104 0 1 +"29736" 4 416 3 1 1 1 4 1 100 648 519 545 2022 5 4 1 436 1 5 26 1 1 +"29737" 4 416 3 1 1 1 4 1 100 648 545 518 1964 4 2 0 932 1 5 27 1 0 +"29738" 4 416 3 1 1 1 4 1 100 648 518 648 1253 6 3 1 575 2 25 130 1 1 +"29739" 4 416 4 0 1 0 1 1 100 285 100 150 3470 2 7 0 590 3 50 50 0 1 +"29740" 4 416 4 0 1 0 1 1 100 285 150 293 8943 3 7 0 1364 1 95 143 0 1 +"29741" 4 416 4 0 1 0 1 1 100 285 293 571 2496 2 8 0 1270 1 95 278 0 1 +"29742" 4 416 4 0 1 0 1 1 100 285 571 285 1447 8 9 1 1032 3 50 286 0 0 +"29743" 4 416 5 1 1 0 1 1 100 2634 100 195 2453 1 3 0 502 1 95 95 0 1 +"29744" 4 416 5 1 1 0 1 1 100 2634 195 341 2356 6 5 1 674 2 75 146 1 1 +"29745" 4 416 5 1 1 0 1 1 100 2634 341 665 1559 2 8 0 1234 1 95 324 0 1 +"29746" 4 416 5 1 1 0 1 1 100 2634 665 499 1340 8 9 1 899 4 25 166 0 0 +"29747" 4 416 5 1 1 0 1 1 100 2634 499 973 1133 3 4 0 492 1 95 474 0 1 +"29748" 4 416 5 1 1 0 1 1 100 2634 973 924 3362 5 7 1 1064 5 5 49 0 0 +"29749" 4 416 5 1 1 0 1 1 100 2634 924 1802 1755 7 4 1 1109 1 95 878 1 1 +"29750" 4 416 5 1 1 0 1 1 100 2634 1802 1351 1485 4 1 0 1316 4 25 451 1 0 +"29751" 4 416 5 1 1 0 1 1 100 2634 1351 2634 1595 9 6 1 421 1 95 1283 1 1 +"29752" 4 416 5 1 1 0 2 0 100 0 100 195 2920 8 1 1 619 1 95 95 1 1 +"29753" 4 416 5 1 1 0 2 0 100 0 195 341 4363 6 2 1 1041 2 75 146 1 1 +"29754" 4 416 5 1 1 0 2 0 100 0 341 17 1516 7 9 1 514 1 95 324 0 0 +"29755" 4 416 5 1 1 0 2 0 100 0 17 33 1022 2 10 0 995 1 95 16 0 1 +"29756" 4 416 5 1 1 0 2 0 100 0 33 2 1273 5 3 0 538 1 95 31 1 0 +"29757" 4 416 5 1 1 0 2 0 100 0 2 0 1221 4 2 0 617 1 95 2 1 0 +"29758" 4 416 5 1 1 0 3 0 100 0 100 195 3078 7 5 1 442 1 95 95 1 1 +"29759" 4 416 5 1 1 0 3 0 100 0 195 10 1162 2 1 0 944 1 95 185 1 0 +"29760" 4 416 5 1 1 0 3 0 100 0 10 20 1568 8 6 1 476 1 95 10 1 1 +"29761" 4 416 5 1 1 0 3 0 100 0 20 39 1714 4 7 0 1006 1 95 19 0 1 +"29762" 4 416 5 1 1 0 3 0 100 0 39 76 1332 3 10 0 650 1 95 37 0 1 +"29763" 4 416 5 1 1 0 3 0 100 0 76 4 1793 6 8 1 785 1 95 72 0 0 +"29764" 4 416 5 1 1 0 3 0 100 0 4 8 1033 9 2 1 1373 1 95 4 1 1 +"29765" 4 416 5 1 1 0 3 0 100 0 8 0 1529 5 3 0 598 1 95 8 1 0 +"29766" 4 416 5 1 1 0 4 1 100 4 100 25 1492 2 3 1 3747 2 75 75 0 0 +"29767" 4 416 5 1 1 0 4 1 100 4 25 49 1685 7 4 1 563 1 95 24 1 1 +"29768" 4 416 5 1 1 0 4 1 100 4 49 2 1149 3 1 0 549 1 95 47 1 0 +"29769" 4 416 5 1 1 0 4 1 100 4 2 4 1504 1 9 0 1382 1 95 2 0 1 +"29770" 4 416 5 1 1 0 4 1 100 4 4 8 1297 8 7 1 793 1 95 4 1 1 +"29771" 4 416 5 1 1 0 4 1 100 4 8 16 2000 9 2 1 583 1 95 8 1 1 +"29772" 4 416 5 1 1 0 4 1 100 4 16 31 1528 5 6 0 1133 1 95 15 0 1 +"29773" 4 416 5 1 1 0 4 1 100 4 31 2 1493 6 8 1 659 1 95 29 0 0 +"29774" 4 416 5 1 1 0 4 1 100 4 2 4 1129 4 7 0 721 1 95 2 0 1 +"29775" 4 420 2 0 1 1 1 1 100 185 100 125 12750 8 3 1 2021 2 25 25 1 1 +"29776" 4 420 2 0 1 1 1 1 100 185 125 156 12185 3 7 0 892 2 25 31 0 1 +"29777" 4 420 2 0 1 1 1 1 100 185 156 148 2510 8 2 0 1437 1 5 8 1 0 +"29778" 4 420 2 0 1 1 1 1 100 185 148 185 1750 2 1 1 760 2 25 37 1 1 +"29779" 4 420 3 1 1 1 1 1 100 33 100 75 6372 9 7 0 1130 2 25 25 1 0 +"29780" 4 420 3 1 1 1 1 1 100 33 75 113 3290 4 8 0 1683 3 50 38 0 1 +"29781" 4 420 3 1 1 1 1 1 100 33 113 220 2504 8 2 1 0 5 95 107 1 1 +"29782" 4 420 3 1 1 1 1 1 100 33 220 11 10678 2 1 0 0 5 95 209 1 0 +"29783" 4 420 3 1 1 1 1 1 100 33 11 17 6599 7 6 1 3027 3 50 6 1 1 +"29784" 4 420 3 1 1 1 1 1 100 33 17 26 2880 5 3 1 1861 3 50 9 1 1 +"29785" 4 420 3 1 1 1 1 1 100 33 26 39 2954 3 6 0 2099 3 50 13 0 1 +"29786" 4 420 3 1 1 1 1 1 100 33 39 19 1580 6 9 1 1351 3 50 20 0 0 +"29787" 4 420 3 1 1 1 1 1 100 33 19 33 2588 1 4 0 1519 4 75 14 0 1 +"29788" 4 420 3 1 1 1 2 1 100 153 100 150 3405 2 9 0 602 3 50 50 0 1 +"29789" 4 420 3 1 1 1 2 1 100 153 150 112 10731 4 10 1 1394 2 25 38 0 0 +"29790" 4 420 3 1 1 1 2 1 100 153 112 106 4020 3 1 0 1483 1 5 6 1 0 +"29791" 4 420 3 1 1 1 2 1 100 153 106 159 2633 8 6 1 1850 3 50 53 1 1 +"29792" 4 420 3 1 1 1 2 1 100 153 159 79 4959 5 7 1 1465 3 50 80 0 0 +"29793" 4 420 3 1 1 1 2 1 100 153 79 39 3048 6 8 1 1445 3 50 40 0 0 +"29794" 4 420 3 1 1 1 2 1 100 153 39 68 2949 7 5 1 1559 4 75 29 1 1 +"29795" 4 420 3 1 1 1 2 1 100 153 68 102 2314 1 10 0 778 3 50 34 0 1 +"29796" 4 420 3 1 1 1 2 1 100 153 102 153 3490 9 3 1 762 3 50 51 1 1 +"29797" 4 420 3 1 1 1 3 1 100 259 100 50 2505 3 5 1 1576 3 50 50 0 0 +"29798" 4 420 3 1 1 1 3 1 100 259 50 75 2194 8 9 0 3320 3 50 25 0 1 +"29799" 4 420 3 1 1 1 3 1 100 259 75 146 2919 2 4 0 2082 5 95 71 0 1 +"29800" 4 420 3 1 1 1 3 1 100 259 146 73 3268 6 3 0 2254 3 50 73 1 0 +"29801" 4 420 3 1 1 1 3 1 100 259 73 18 2520 7 6 0 2158 4 75 55 1 0 +"29802" 4 420 3 1 1 1 3 1 100 259 18 35 4004 4 2 1 0 5 95 17 1 1 +"29803" 4 420 3 1 1 1 3 1 100 259 35 68 5004 1 8 0 0 5 95 33 0 1 +"29804" 4 420 3 1 1 1 3 1 100 259 68 133 4755 5 7 0 2231 5 95 65 0 1 +"29805" 4 420 3 1 1 1 3 1 100 259 133 259 7715 9 5 1 0 5 95 126 1 1 +"29806" 4 420 3 1 1 1 4 0 100 0 100 5 2005 8 7 0 0 5 95 95 1 0 +"29807" 4 420 3 1 1 1 4 0 100 0 5 0 4139 3 10 1 0 5 95 5 0 0 +"29808" 4 420 4 0 1 0 1 0 100 1 100 25 1996 2 7 1 3995 2 75 75 0 0 +"29809" 4 420 4 0 1 0 1 0 100 1 25 1 2090 3 7 1 1732 1 95 24 0 0 +"29810" 4 420 5 1 1 0 1 0 100 0 100 5 1524 1 3 1 1225 1 95 95 0 0 +"29811" 4 420 5 1 1 0 1 0 100 0 5 0 2331 6 5 0 786 1 95 5 1 0 +"29812" 4 420 5 1 1 0 2 0 100 1 100 5 2283 8 1 0 620 1 95 95 1 0 +"29813" 4 420 5 1 1 0 2 0 100 1 5 10 1660 6 2 1 538 1 95 5 1 1 +"29814" 4 420 5 1 1 0 2 0 100 1 10 20 3686 7 9 0 1020 1 95 10 0 1 +"29815" 4 420 5 1 1 0 2 0 100 1 20 1 2553 2 10 1 1541 1 95 19 0 0 +"29816" 4 420 5 1 1 0 3 0 100 0 100 5 1613 7 5 0 1334 1 95 95 1 0 +"29817" 4 420 5 1 1 0 3 0 100 0 5 10 2043 2 1 1 968 1 95 5 1 1 +"29818" 4 420 5 1 1 0 3 0 100 0 10 0 2175 8 6 0 1077 1 95 10 1 0 +"29819" 4 420 5 1 1 0 4 1 100 40743 100 195 2740 2 3 0 727 1 95 95 0 1 +"29820" 4 420 5 1 1 0 4 1 100 40743 195 380 1602 7 4 1 819 1 95 185 1 1 +"29821" 4 420 5 1 1 0 4 1 100 40743 380 741 2442 3 1 1 1204 1 95 361 1 1 +"29822" 4 420 5 1 1 0 4 1 100 40743 741 1445 2024 1 9 0 749 1 95 704 0 1 +"29823" 4 420 5 1 1 0 4 1 100 40743 1445 2818 1574 8 7 1 1113 1 95 1373 1 1 +"29824" 4 420 5 1 1 0 4 1 100 40743 2818 5495 4321 9 2 1 1153 1 95 2677 1 1 +"29825" 4 420 5 1 1 0 4 1 100 40743 5495 10715 4888 5 6 0 1141 1 95 5220 0 1 +"29826" 4 420 5 1 1 0 4 1 100 40743 10715 20894 3092 6 8 0 967 1 95 10179 0 1 +"29827" 4 420 5 1 1 0 4 1 100 40743 20894 40743 4326 4 7 0 797 1 95 19849 0 1 +"29828" 4 424 2 0 1 1 1 1 100 140 100 150 3754 8 3 1 1501 3 50 50 1 1 +"29829" 4 424 2 0 1 1 1 1 100 140 150 225 10594 3 7 0 984 3 50 75 0 1 +"29830" 4 424 2 0 1 1 1 1 100 140 225 281 1742 8 2 1 993 2 25 56 1 1 +"29831" 4 424 2 0 1 1 1 1 100 140 281 140 3783 2 1 0 1633 3 50 141 1 0 +"29832" 4 424 3 1 1 1 1 1 100 495 100 150 4654 9 7 1 1134 3 50 50 1 1 +"29833" 4 424 3 1 1 1 1 1 100 495 150 188 1262 4 8 0 2223 2 25 38 0 1 +"29834" 4 424 3 1 1 1 1 1 100 495 188 282 1075 8 2 1 790 3 50 94 1 1 +"29835" 4 424 3 1 1 1 1 1 100 495 282 211 933 2 1 0 1638 2 25 71 1 0 +"29836" 4 424 3 1 1 1 1 1 100 495 211 264 2125 7 6 1 1389 2 25 53 1 1 +"29837" 4 424 3 1 1 1 1 1 100 495 264 251 1841 5 3 0 638 1 5 13 1 0 +"29838" 4 424 3 1 1 1 1 1 100 495 251 377 1525 3 6 0 850 3 50 126 0 1 +"29839" 4 424 3 1 1 1 1 1 100 495 377 283 1543 6 9 1 4491 2 25 94 0 0 +"29840" 4 424 3 1 1 1 1 1 100 495 283 495 1323 1 4 0 2410 4 75 212 0 1 +"29841" 4 424 3 1 1 1 2 1 100 709 100 150 3047 2 9 0 2260 3 50 50 0 1 +"29842" 4 424 3 1 1 1 2 1 100 709 150 158 1230 4 10 0 1289 1 5 8 0 1 +"29843" 4 424 3 1 1 1 2 1 100 709 158 118 1321 3 1 0 1672 2 25 40 1 0 +"29844" 4 424 3 1 1 1 2 1 100 709 118 207 1237 8 6 1 1224 4 75 89 1 1 +"29845" 4 424 3 1 1 1 2 1 100 709 207 155 1308 5 7 1 1267 2 25 52 0 0 +"29846" 4 424 3 1 1 1 2 1 100 709 155 194 2132 6 8 0 980 2 25 39 0 1 +"29847" 4 424 3 1 1 1 2 1 100 709 194 291 2227 7 5 1 1942 3 50 97 1 1 +"29848" 4 424 3 1 1 1 2 1 100 709 291 567 1375 1 10 0 1428 5 95 276 0 1 +"29849" 4 424 3 1 1 1 2 1 100 709 567 709 2229 9 3 1 1560 2 25 142 1 1 +"29850" 4 424 3 1 1 1 3 1 100 536 100 125 1410 3 5 0 1453 2 25 25 0 1 +"29851" 4 424 3 1 1 1 3 1 100 536 125 62 997 8 9 1 868 3 50 63 0 0 +"29852" 4 424 3 1 1 1 3 1 100 536 62 93 1824 2 4 0 1015 3 50 31 0 1 +"29853" 4 424 3 1 1 1 3 1 100 536 93 163 1076 6 3 1 895 4 75 70 1 1 +"29854" 4 424 3 1 1 1 3 1 100 536 163 245 1001 7 6 1 863 3 50 82 1 1 +"29855" 4 424 3 1 1 1 3 1 100 536 245 233 859 4 2 0 1320 1 5 12 1 0 +"29856" 4 424 3 1 1 1 3 1 100 536 233 408 991 1 8 0 1205 4 75 175 0 1 +"29857" 4 424 3 1 1 1 3 1 100 536 408 306 1794 5 7 1 2730 2 25 102 0 0 +"29858" 4 424 3 1 1 1 3 1 100 536 306 536 1205 9 5 1 986 4 75 230 1 1 +"29859" 4 424 3 1 1 1 4 1 100 966 100 150 1453 8 7 1 1901 3 50 50 1 1 +"29860" 4 424 3 1 1 1 4 1 100 966 150 225 856 3 10 0 1301 3 50 75 0 1 +"29861" 4 424 3 1 1 1 4 1 100 966 225 169 1156 7 9 1 1091 2 25 56 0 0 +"29862" 4 424 3 1 1 1 4 1 100 966 169 330 1532 9 1 1 1401 5 95 161 1 1 +"29863" 4 424 3 1 1 1 4 1 100 966 330 578 980 2 3 0 883 4 75 248 0 1 +"29864" 4 424 3 1 1 1 4 1 100 966 578 1127 863 1 8 0 1980 5 95 549 0 1 +"29865" 4 424 3 1 1 1 4 1 100 966 1127 1071 1443 5 4 0 1628 1 5 56 1 0 +"29866" 4 424 3 1 1 1 4 1 100 966 1071 1017 828 4 2 0 1498 1 5 54 1 0 +"29867" 4 424 3 1 1 1 4 1 100 966 1017 966 1472 6 3 0 1727 1 5 51 1 0 +"29868" 4 424 4 0 1 0 1 1 100 110 100 150 2404 2 7 0 728 3 50 50 0 1 +"29869" 4 424 4 0 1 0 1 1 100 110 150 225 2770 3 7 0 1916 3 50 75 0 1 +"29870" 4 424 4 0 1 0 1 1 100 110 225 439 1086 2 8 0 1656 1 95 214 0 1 +"29871" 4 424 4 0 1 0 1 1 100 110 439 110 1950 8 9 1 1367 2 75 329 0 0 +"29872" 4 424 5 1 1 0 1 0 100 0 100 195 1567 1 3 0 1018 1 95 95 0 1 +"29873" 4 424 5 1 1 0 1 0 100 0 195 293 1119 6 5 1 1695 3 50 98 1 1 +"29874" 4 424 5 1 1 0 1 0 100 0 293 571 756 2 8 0 613 1 95 278 0 1 +"29875" 4 424 5 1 1 0 1 0 100 0 571 29 965 8 9 1 469 1 95 542 0 0 +"29876" 4 424 5 1 1 0 1 0 100 0 29 57 1004 3 4 0 658 1 95 28 0 1 +"29877" 4 424 5 1 1 0 1 0 100 0 57 3 1007 5 7 1 493 1 95 54 0 0 +"29878" 4 424 5 1 1 0 1 0 100 0 3 6 963 7 4 1 731 1 95 3 1 1 +"29879" 4 424 5 1 1 0 1 0 100 0 6 0 717 4 1 0 609 1 95 6 1 0 +"29880" 4 424 5 1 1 0 2 0 100 0 100 195 1394 8 1 1 734 1 95 95 1 1 +"29881" 4 424 5 1 1 0 2 0 100 0 195 380 1008 6 2 1 614 1 95 185 1 1 +"29882" 4 424 5 1 1 0 2 0 100 0 380 19 1046 7 9 1 482 1 95 361 0 0 +"29883" 4 424 5 1 1 0 2 0 100 0 19 37 736 2 10 0 699 1 95 18 0 1 +"29884" 4 424 5 1 1 0 2 0 100 0 37 9 1463 5 3 0 815 2 75 28 1 0 +"29885" 4 424 5 1 1 0 2 0 100 0 9 0 830 4 2 0 988 1 95 9 1 0 +"29886" 4 424 5 1 1 0 3 0 100 0 100 195 1233 7 5 1 1348 1 95 95 1 1 +"29887" 4 424 5 1 1 0 3 0 100 0 195 10 984 2 1 0 487 1 95 185 1 0 +"29888" 4 424 5 1 1 0 3 0 100 0 10 20 973 8 6 1 468 1 95 10 1 1 +"29889" 4 424 5 1 1 0 3 0 100 0 20 39 771 4 7 0 460 1 95 19 0 1 +"29890" 4 424 5 1 1 0 3 0 100 0 39 76 1427 3 10 0 525 1 95 37 0 1 +"29891" 4 424 5 1 1 0 3 0 100 0 76 4 993 6 8 1 488 1 95 72 0 0 +"29892" 4 424 5 1 1 0 3 0 100 0 4 8 1821 9 2 1 3940 1 95 4 1 1 +"29893" 4 424 5 1 1 0 3 0 100 0 8 0 2454 5 3 0 2647 1 95 8 1 0 +"29894" 4 424 5 1 1 0 4 1 100 27 100 195 1231 2 3 0 578 1 95 95 0 1 +"29895" 4 424 5 1 1 0 4 1 100 27 195 380 1164 7 4 1 762 1 95 185 1 1 +"29896" 4 424 5 1 1 0 4 1 100 27 380 19 939 3 1 0 2252 1 95 361 1 0 +"29897" 4 424 5 1 1 0 4 1 100 27 19 37 798 1 9 0 381 1 95 18 0 1 +"29898" 4 424 5 1 1 0 4 1 100 27 37 72 1264 8 7 1 442 1 95 35 1 1 +"29899" 4 424 5 1 1 0 4 1 100 27 72 140 882 9 2 1 413 1 95 68 1 1 +"29900" 4 424 5 1 1 0 4 1 100 27 140 273 943 5 6 0 464 1 95 133 0 1 +"29901" 4 424 5 1 1 0 4 1 100 27 273 14 1623 6 8 1 2223 1 95 259 0 0 +"29902" 4 424 5 1 1 0 4 1 100 27 14 27 691 4 7 0 504 1 95 13 0 1 +"29903" 4 434 2 0 1 1 1 1 100 140 100 150 3776 8 3 1 1538 3 50 50 1 1 +"29904" 4 434 2 0 1 1 1 1 100 140 150 225 11302 3 7 0 2074 3 50 75 0 1 +"29905" 4 434 2 0 1 1 1 1 100 140 225 281 1925 8 2 1 2831 2 25 56 1 1 +"29906" 4 434 2 0 1 1 1 1 100 140 281 140 2789 2 1 0 727 3 50 141 1 0 +"29907" 4 434 3 1 1 1 1 1 100 651 100 150 6866 9 7 1 4089 3 50 50 1 1 +"29908" 4 434 3 1 1 1 1 1 100 651 150 188 5699 4 8 0 1458 2 25 38 0 1 +"29909" 4 434 3 1 1 1 1 1 100 651 188 282 2938 8 2 1 620 3 50 94 1 1 +"29910" 4 434 3 1 1 1 1 1 100 651 282 211 2604 2 1 0 326 2 25 71 1 0 +"29911" 4 434 3 1 1 1 1 1 100 651 211 264 4603 7 6 1 590 2 25 53 1 1 +"29912" 4 434 3 1 1 1 1 1 100 651 264 330 1927 5 3 1 1185 2 25 66 1 1 +"29913" 4 434 3 1 1 1 1 1 100 651 330 413 1422 3 6 0 616 2 25 83 0 1 +"29914" 4 434 3 1 1 1 1 1 100 651 413 434 2496 6 9 0 462 1 5 21 0 1 +"29915" 4 434 3 1 1 1 1 1 100 651 434 651 2381 1 4 0 682 3 50 217 0 1 +"29916" 4 434 3 1 1 1 2 1 100 602 100 150 9773 2 9 0 1146 3 50 50 0 1 +"29917" 4 434 3 1 1 1 2 1 100 602 150 158 5729 4 10 0 1118 1 5 8 0 1 +"29918" 4 434 3 1 1 1 2 1 100 602 158 166 4574 3 1 1 530 1 5 8 1 1 +"29919" 4 434 3 1 1 1 2 1 100 602 166 249 2206 8 6 1 483 3 50 83 1 1 +"29920" 4 434 3 1 1 1 2 1 100 602 249 187 1690 5 7 1 447 2 25 62 0 0 +"29921" 4 434 3 1 1 1 2 1 100 602 187 178 3663 6 8 1 750 1 5 9 0 0 +"29922" 4 434 3 1 1 1 2 1 100 602 178 267 1640 7 5 1 1832 3 50 89 1 1 +"29923" 4 434 3 1 1 1 2 1 100 602 267 401 1533 1 10 0 1394 3 50 134 0 1 +"29924" 4 434 3 1 1 1 2 1 100 602 401 602 2541 9 3 1 970 3 50 201 1 1 +"29925" 4 434 3 1 1 1 3 1 100 284 100 150 8040 3 5 0 551 3 50 50 0 1 +"29926" 4 434 3 1 1 1 3 1 100 284 150 75 1802 8 9 1 858 3 50 75 0 0 +"29927" 4 434 3 1 1 1 3 1 100 284 75 113 1782 2 4 0 839 3 50 38 0 1 +"29928" 4 434 3 1 1 1 3 1 100 284 113 107 2748 6 3 0 463 1 5 6 1 0 +"29929" 4 434 3 1 1 1 3 1 100 284 107 112 2142 7 6 1 613 1 5 5 1 1 +"29930" 4 434 3 1 1 1 3 1 100 284 112 168 3461 4 2 1 887 3 50 56 1 1 +"29931" 4 434 3 1 1 1 3 1 100 284 168 252 1442 1 8 0 2499 3 50 84 0 1 +"29932" 4 434 3 1 1 1 3 1 100 284 252 189 3178 5 7 1 416 2 25 63 0 0 +"29933" 4 434 3 1 1 1 3 1 100 284 189 284 1714 9 5 1 1519 3 50 95 1 1 +"29934" 4 434 3 1 1 1 4 1 100 749 100 150 2869 8 7 1 1083 3 50 50 1 1 +"29935" 4 434 3 1 1 1 4 1 100 749 150 225 1860 3 10 0 266 3 50 75 0 1 +"29936" 4 434 3 1 1 1 4 1 100 749 225 112 2146 7 9 1 298 3 50 113 0 0 +"29937" 4 434 3 1 1 1 4 1 100 749 112 196 1673 9 1 1 391 4 75 84 1 1 +"29938" 4 434 3 1 1 1 4 1 100 749 196 343 1826 2 3 0 449 4 75 147 0 1 +"29939" 4 434 3 1 1 1 4 1 100 749 343 600 2513 1 8 0 479 4 75 257 0 1 +"29940" 4 434 3 1 1 1 4 1 100 749 600 630 3526 5 4 1 1027 1 5 30 1 1 +"29941" 4 434 3 1 1 1 4 1 100 749 630 788 2488 4 2 1 391 2 25 158 1 1 +"29942" 4 434 3 1 1 1 4 1 100 749 788 749 3906 6 3 0 453 1 5 39 1 0 +"29943" 4 434 4 0 1 0 1 1 100 26 100 150 3389 2 7 0 522 3 50 50 0 1 +"29944" 4 434 4 0 1 0 1 1 100 26 150 293 4582 3 7 0 4681 1 95 143 0 1 +"29945" 4 434 4 0 1 0 1 1 100 26 293 513 5000 2 8 0 1495 2 75 220 0 1 +"29946" 4 434 4 0 1 0 1 1 100 26 513 26 2917 8 9 1 472 1 95 487 0 0 +"29947" 4 434 5 1 1 0 1 1 100 10 100 195 5131 1 3 0 516 1 95 95 0 1 +"29948" 4 434 5 1 1 0 1 1 100 10 195 146 4825 6 5 0 1250 4 25 49 1 0 +"29949" 4 434 5 1 1 0 1 1 100 10 146 285 1563 2 8 0 279 1 95 139 0 1 +"29950" 4 434 5 1 1 0 1 1 100 10 285 14 1263 8 9 1 312 1 95 271 0 0 +"29951" 4 434 5 1 1 0 1 1 100 10 14 27 3415 3 4 0 354 1 95 13 0 1 +"29952" 4 434 5 1 1 0 1 1 100 10 27 53 1662 5 7 0 365 1 95 26 0 1 +"29953" 4 434 5 1 1 0 1 1 100 10 53 103 1417 7 4 1 323 1 95 50 1 1 +"29954" 4 434 5 1 1 0 1 1 100 10 103 5 2553 4 1 0 469 1 95 98 1 0 +"29955" 4 434 5 1 1 0 1 1 100 10 5 10 3644 9 6 1 420 1 95 5 1 1 +"29956" 4 434 5 1 1 0 2 1 100 9305 100 195 2456 8 1 1 481 1 95 95 1 1 +"29957" 4 434 5 1 1 0 2 1 100 9305 195 293 2679 6 2 1 3189 3 50 98 1 1 +"29958" 4 434 5 1 1 0 2 1 100 9305 293 440 2258 7 9 0 420 3 50 147 0 1 +"29959" 4 434 5 1 1 0 2 1 100 9305 440 858 1517 2 10 0 347 1 95 418 0 1 +"29960" 4 434 5 1 1 0 2 1 100 9305 858 1673 2288 5 3 1 825 1 95 815 1 1 +"29961" 4 434 5 1 1 0 2 1 100 9305 1673 1255 3543 4 2 0 820 4 25 418 1 0 +"29962" 4 434 5 1 1 0 2 1 100 9305 1255 2447 3290 3 5 0 403 1 95 1192 0 1 +"29963" 4 434 5 1 1 0 2 1 100 9305 2447 4772 2085 9 4 1 432 1 95 2325 1 1 +"29964" 4 434 5 1 1 0 2 1 100 9305 4772 9305 2112 1 7 0 425 1 95 4533 0 1 +"29965" 4 434 5 1 1 0 3 1 100 844 100 195 2809 7 5 1 433 1 95 95 1 1 +"29966" 4 434 5 1 1 0 3 1 100 844 195 10 1370 2 1 0 390 1 95 185 1 0 +"29967" 4 434 5 1 1 0 3 1 100 844 10 20 2080 8 6 1 1643 1 95 10 1 1 +"29968" 4 434 5 1 1 0 3 1 100 844 20 39 2826 4 7 0 906 1 95 19 0 1 +"29969" 4 434 5 1 1 0 3 1 100 844 39 76 3460 3 10 0 853 1 95 37 0 1 +"29970" 4 434 5 1 1 0 3 1 100 844 76 114 1698 6 8 0 1631 3 50 38 0 1 +"29971" 4 434 5 1 1 0 3 1 100 844 114 222 1638 9 2 1 326 1 95 108 1 1 +"29972" 4 434 5 1 1 0 3 1 100 844 222 433 2187 5 3 1 409 1 95 211 1 1 +"29973" 4 434 5 1 1 0 3 1 100 844 433 844 1075 1 10 0 333 1 95 411 0 1 +"29974" 4 434 5 1 1 0 4 1 100 6851 100 195 2390 2 3 0 706 1 95 95 0 1 +"29975" 4 434 5 1 1 0 4 1 100 6851 195 380 1461 7 4 1 797 1 95 185 1 1 +"29976" 4 434 5 1 1 0 4 1 100 6851 380 741 1708 3 1 1 856 1 95 361 1 1 +"29977" 4 434 5 1 1 0 4 1 100 6851 741 1445 1189 1 9 0 425 1 95 704 0 1 +"29978" 4 434 5 1 1 0 4 1 100 6851 1445 2818 1266 8 7 1 852 1 95 1373 1 1 +"29979" 4 434 5 1 1 0 4 1 100 6851 2818 2677 3777 9 2 0 0 5 5 141 1 0 +"29980" 4 434 5 1 1 0 4 1 100 6851 2677 5220 3075 5 6 0 406 1 95 2543 0 1 +"29981" 4 434 5 1 1 0 4 1 100 6851 5220 5481 3576 6 8 0 1356 5 5 261 0 1 +"29982" 4 434 5 1 1 0 4 1 100 6851 5481 6851 2585 4 7 0 1540 4 25 1370 0 1 +"29983" 4 505 2 0 1 0 1 1 100 99 100 150 23020 2 7 0 2011 3 50 50 0 1 +"29984" 4 505 2 0 1 0 1 1 100 99 150 263 25486 3 7 0 4106 2 75 113 0 1 +"29985" 4 505 2 0 1 0 1 1 100 99 263 395 2104 2 8 0 517 3 50 132 0 1 +"29986" 4 505 2 0 1 0 1 1 100 99 395 99 2491 8 9 1 1225 2 75 296 0 0 +"29987" 4 505 3 1 1 0 1 1 100 47 100 195 2511 1 3 0 3590 1 95 95 0 1 +"29988" 4 505 3 1 1 0 1 1 100 47 195 97 4134 6 5 0 621 3 50 98 1 0 +"29989" 4 505 3 1 1 0 1 1 100 47 97 146 2274 2 8 0 529 3 50 49 0 1 +"29990" 4 505 3 1 1 0 1 1 100 47 146 73 3957 8 9 1 1247 3 50 73 0 0 +"29991" 4 505 3 1 1 0 1 1 100 47 73 36 3056 3 4 1 1218 3 50 37 0 0 +"29992" 4 505 3 1 1 0 1 1 100 47 36 54 3023 5 7 0 455 3 50 18 0 1 +"29993" 4 505 3 1 1 0 1 1 100 47 54 95 2258 7 4 1 952 2 75 41 1 1 +"29994" 4 505 3 1 1 0 1 1 100 47 95 24 2658 4 1 0 2069 2 75 71 1 0 +"29995" 4 505 3 1 1 0 1 1 100 47 24 47 2374 9 6 1 1851 1 95 23 1 1 +"29996" 4 505 3 1 1 0 2 1 100 1337 100 195 3901 8 1 1 2423 1 95 95 1 1 +"29997" 4 505 3 1 1 0 2 1 100 1337 195 293 2521 6 2 1 424 3 50 98 1 1 +"29998" 4 505 3 1 1 0 2 1 100 1337 293 146 2264 7 9 1 444 3 50 147 0 0 +"29999" 4 505 3 1 1 0 2 1 100 1337 146 285 2026 2 10 0 3459 1 95 139 0 1 +"30000" 4 505 3 1 1 0 2 1 100 1337 285 499 3399 5 3 1 1733 2 75 214 1 1 +"30001" 4 505 3 1 1 0 2 1 100 1337 499 873 2709 4 2 1 2642 2 75 374 1 1 +"30002" 4 505 3 1 1 0 2 1 100 1337 873 1528 2027 3 5 0 2055 2 75 655 0 1 +"30003" 4 505 3 1 1 0 2 1 100 1337 1528 764 2877 9 4 0 829 3 50 764 1 0 +"30004" 4 505 3 1 1 0 2 1 100 1337 764 1337 2439 1 7 0 1404 2 75 573 0 1 +"30005" 4 505 3 1 1 0 3 1 100 2244 100 195 2094 7 5 1 2246 1 95 95 1 1 +"30006" 4 505 3 1 1 0 3 1 100 2244 195 293 2660 2 1 1 1911 3 50 98 1 1 +"30007" 4 505 3 1 1 0 3 1 100 2244 293 513 2329 8 6 1 2225 2 75 220 1 1 +"30008" 4 505 3 1 1 0 3 1 100 2244 513 128 2528 4 7 1 2841 2 75 385 0 0 +"30009" 4 505 3 1 1 0 3 1 100 2244 128 250 2678 3 10 0 1493 1 95 122 0 1 +"30010" 4 505 3 1 1 0 3 1 100 2244 250 438 2336 6 8 0 486 2 75 188 0 1 +"30011" 4 505 3 1 1 0 3 1 100 2244 438 767 1981 9 2 1 538 2 75 329 1 1 +"30012" 4 505 3 1 1 0 3 1 100 2244 767 1151 2063 5 3 1 637 3 50 384 1 1 +"30013" 4 505 3 1 1 0 3 1 100 2244 1151 2244 1831 1 10 0 2806 1 95 1093 0 1 +"30014" 4 505 3 1 1 0 4 1 100 30 100 50 2590 2 3 1 1055 3 50 50 0 0 +"30015" 4 505 3 1 1 0 4 1 100 30 50 98 1544 7 4 1 2026 1 95 48 1 1 +"30016" 4 505 3 1 1 0 4 1 100 30 98 5 3247 3 1 0 2287 1 95 93 1 0 +"30017" 4 505 3 1 1 0 4 1 100 30 5 10 2362 1 9 0 624 1 95 5 0 1 +"30018" 4 505 3 1 1 0 4 1 100 30 10 20 2124 8 7 1 1209 1 95 10 1 1 +"30019" 4 505 3 1 1 0 4 1 100 30 20 39 2190 9 2 1 1761 1 95 19 1 1 +"30020" 4 505 3 1 1 0 4 1 100 30 39 68 2002 5 6 0 2438 2 75 29 0 1 +"30021" 4 505 3 1 1 0 4 1 100 30 68 17 2115 6 8 1 446 2 75 51 0 0 +"30022" 4 505 3 1 1 0 4 1 100 30 17 30 1943 4 7 0 1036 2 75 13 0 1 +"30023" 4 505 4 0 1 1 1 1 100 169 100 150 16536 8 3 1 1411 3 50 50 1 1 +"30024" 4 505 4 0 1 1 1 1 100 169 150 225 30023 3 7 0 1067 3 50 75 0 1 +"30025" 4 505 4 0 1 1 1 1 100 169 225 338 1992 8 2 1 1361 3 50 113 1 1 +"30026" 4 505 4 0 1 1 1 1 100 169 338 169 1661 2 1 0 444 3 50 169 1 0 +"30027" 4 505 5 1 1 1 1 1 100 414 100 150 1824 9 7 1 3185 3 50 50 1 1 +"30028" 4 505 5 1 1 1 1 1 100 414 150 75 2438 4 8 1 500 3 50 75 0 0 +"30029" 4 505 5 1 1 1 1 1 100 414 75 131 1666 8 2 1 1111 4 75 56 1 1 +"30030" 4 505 5 1 1 1 1 1 100 414 131 65 1729 2 1 0 3561 3 50 66 1 0 +"30031" 4 505 5 1 1 1 1 1 100 414 65 98 2122 7 6 1 1132 3 50 33 1 1 +"30032" 4 505 5 1 1 1 1 1 100 414 98 147 1825 5 3 1 465 3 50 49 1 1 +"30033" 4 505 5 1 1 1 1 1 100 414 147 221 1783 3 6 0 2392 3 50 74 0 1 +"30034" 4 505 5 1 1 1 1 1 100 414 221 276 1896 6 9 0 1619 2 25 55 0 1 +"30035" 4 505 5 1 1 1 1 1 100 414 276 414 2264 1 4 0 433 3 50 138 0 1 +"30036" 4 505 5 1 1 1 2 1 100 1234 100 175 2221 2 9 0 1914 4 75 75 0 1 +"30037" 4 505 5 1 1 1 2 1 100 1234 175 219 1668 4 10 0 3388 2 25 44 0 1 +"30038" 4 505 5 1 1 1 2 1 100 1234 219 274 1794 3 1 1 1394 2 25 55 1 1 +"30039" 4 505 5 1 1 1 2 1 100 1234 274 343 1579 8 6 1 2649 2 25 69 1 1 +"30040" 4 505 5 1 1 1 2 1 100 1234 343 429 1684 5 7 0 1057 2 25 86 0 1 +"30041" 4 505 5 1 1 1 2 1 100 1234 429 322 1751 6 8 1 1600 2 25 107 0 0 +"30042" 4 505 5 1 1 1 2 1 100 1234 322 403 1761 7 5 1 2145 2 25 81 1 1 +"30043" 4 505 5 1 1 1 2 1 100 1234 403 705 2528 1 10 0 2179 4 75 302 0 1 +"30044" 4 505 5 1 1 1 2 1 100 1234 705 1234 1673 9 3 1 964 4 75 529 1 1 +"30045" 4 505 5 1 1 1 3 1 100 343 100 150 1990 3 5 0 904 3 50 50 0 1 +"30046" 4 505 5 1 1 1 3 1 100 343 150 75 1783 8 9 1 827 3 50 75 0 0 +"30047" 4 505 5 1 1 1 3 1 100 343 75 113 1785 2 4 0 427 3 50 38 0 1 +"30048" 4 505 5 1 1 1 3 1 100 343 113 170 1669 6 3 1 539 3 50 57 1 1 +"30049" 4 505 5 1 1 1 3 1 100 343 170 298 2350 7 6 1 488 4 75 128 1 1 +"30050" 4 505 5 1 1 1 3 1 100 343 298 149 1893 4 2 0 1928 3 50 149 1 0 +"30051" 4 505 5 1 1 1 3 1 100 343 149 261 2214 1 8 0 1431 4 75 112 0 1 +"30052" 4 505 5 1 1 1 3 1 100 343 261 196 2017 5 7 1 2468 2 25 65 0 0 +"30053" 4 505 5 1 1 1 3 1 100 343 196 343 1649 9 5 1 1820 4 75 147 1 1 +"30054" 4 505 5 1 1 1 4 1 100 225 100 175 1933 8 7 1 621 4 75 75 1 1 +"30055" 4 505 5 1 1 1 4 1 100 225 175 263 1664 3 10 0 1403 3 50 88 0 1 +"30056" 4 505 5 1 1 1 4 1 100 225 263 131 1557 7 9 1 1788 3 50 132 0 0 +"30057" 4 505 5 1 1 1 4 1 100 225 131 229 1637 9 1 1 1314 4 75 98 1 1 +"30058" 4 505 5 1 1 1 4 1 100 225 229 344 2133 2 3 0 977 3 50 115 0 1 +"30059" 4 505 5 1 1 1 4 1 100 225 344 602 1965 1 8 0 821 4 75 258 0 1 +"30060" 4 505 5 1 1 1 4 1 100 225 602 301 2068 5 4 0 2094 3 50 301 1 0 +"30061" 4 505 5 1 1 1 4 1 100 225 301 150 2408 4 2 0 588 3 50 151 1 0 +"30062" 4 505 5 1 1 1 4 1 100 225 150 225 1781 6 3 1 1744 3 50 75 1 1 +"30063" 4 508 2 0 1 0 1 1 100 219 100 150 14504 2 7 0 2193 3 50 50 0 1 +"30064" 4 508 2 0 1 0 1 1 100 219 150 225 14080 3 7 0 854 3 50 75 0 1 +"30065" 4 508 2 0 1 0 1 1 100 219 225 439 5026 2 8 0 2650 1 95 214 0 1 +"30066" 4 508 2 0 1 0 1 1 100 219 439 219 4146 8 9 1 708 3 50 220 0 0 +"30067" 4 508 3 1 1 0 1 1 100 161 100 195 2550 1 3 0 2036 1 95 95 0 1 +"30068" 4 508 3 1 1 0 1 1 100 161 195 244 2273 6 5 1 568 4 25 49 1 1 +"30069" 4 508 3 1 1 0 1 1 100 161 244 366 2105 2 8 0 666 3 50 122 0 1 +"30070" 4 508 3 1 1 0 1 1 100 161 366 183 2174 8 9 1 548 3 50 183 0 0 +"30071" 4 508 3 1 1 0 1 1 100 161 183 275 3104 3 4 0 1778 3 50 92 0 1 +"30072" 4 508 3 1 1 0 1 1 100 161 275 206 4466 5 7 1 467 4 25 69 0 0 +"30073" 4 508 3 1 1 0 1 1 100 161 206 258 2627 7 4 1 715 4 25 52 1 1 +"30074" 4 508 3 1 1 0 1 1 100 161 258 129 3694 4 1 0 1305 3 50 129 1 0 +"30075" 4 508 3 1 1 0 1 1 100 161 129 161 2058 9 6 1 470 4 25 32 1 1 +"30076" 4 508 3 1 1 0 2 1 100 944 100 150 3898 8 1 1 464 3 50 50 1 1 +"30077" 4 508 3 1 1 0 2 1 100 944 150 188 2258 6 2 1 1327 4 25 38 1 1 +"30078" 4 508 3 1 1 0 2 1 100 944 188 141 4199 7 9 1 801 4 25 47 0 0 +"30079" 4 508 3 1 1 0 2 1 100 944 141 275 1781 2 10 0 2655 1 95 134 0 1 +"30080" 4 508 3 1 1 0 2 1 100 944 275 344 1608 5 3 1 1611 4 25 69 1 1 +"30081" 4 508 3 1 1 0 2 1 100 944 344 258 4073 4 2 0 715 4 25 86 1 0 +"30082" 4 508 3 1 1 0 2 1 100 944 258 503 1188 3 5 0 3033 1 95 245 0 1 +"30083" 4 508 3 1 1 0 2 1 100 944 503 629 2691 9 4 1 1171 4 25 126 1 1 +"30084" 4 508 3 1 1 0 2 1 100 944 629 944 2408 1 7 0 853 3 50 315 0 1 +"30085" 4 508 3 1 1 0 3 1 100 21 100 150 2688 7 5 1 408 3 50 50 1 1 +"30086" 4 508 3 1 1 0 3 1 100 21 150 75 2566 2 1 0 441 3 50 75 1 0 +"30087" 4 508 3 1 1 0 3 1 100 21 75 131 2601 8 6 1 1554 2 75 56 1 1 +"30088" 4 508 3 1 1 0 3 1 100 21 131 255 2120 4 7 0 1355 1 95 124 0 1 +"30089" 4 508 3 1 1 0 3 1 100 21 255 497 2199 3 10 0 1709 1 95 242 0 1 +"30090" 4 508 3 1 1 0 3 1 100 21 497 124 2078 6 8 1 2913 2 75 373 0 0 +"30091" 4 508 3 1 1 0 3 1 100 21 124 242 3977 9 2 1 1521 1 95 118 1 1 +"30092" 4 508 3 1 1 0 3 1 100 21 242 12 2985 5 3 0 2004 1 95 230 1 0 +"30093" 4 508 3 1 1 0 3 1 100 21 12 21 3220 1 10 0 1022 2 75 9 0 1 +"30094" 4 508 3 1 1 0 4 1 100 1443 100 195 2950 2 3 0 987 1 95 95 0 1 +"30095" 4 508 3 1 1 0 4 1 100 1443 195 380 915 7 4 1 818 1 95 185 1 1 +"30096" 4 508 3 1 1 0 4 1 100 1443 380 190 1117 3 1 0 617 3 50 190 1 0 +"30097" 4 508 3 1 1 0 4 1 100 1443 190 371 992 1 9 0 1059 1 95 181 0 1 +"30098" 4 508 3 1 1 0 4 1 100 1443 371 723 870 8 7 1 1413 1 95 352 1 1 +"30099" 4 508 3 1 1 0 4 1 100 1443 723 1410 1548 9 2 1 1610 1 95 687 1 1 +"30100" 4 508 3 1 1 0 4 1 100 1443 1410 1481 3665 5 6 0 733 5 5 71 0 1 +"30101" 4 508 3 1 1 0 4 1 100 1443 1481 740 1527 6 8 1 1549 3 50 741 0 0 +"30102" 4 508 3 1 1 0 4 1 100 1443 740 1443 3543 4 7 0 1145 1 95 703 0 1 +"30103" 4 508 4 0 1 1 1 1 100 177 100 150 13234 8 3 1 836 3 50 50 1 1 +"30104" 4 508 4 0 1 1 1 1 100 177 150 225 4354 3 7 0 2382 3 50 75 0 1 +"30105" 4 508 4 0 1 1 1 1 100 177 225 236 1911 8 2 1 1000 1 5 11 1 1 +"30106" 4 508 4 0 1 1 1 1 100 177 236 177 1121 2 1 0 2142 2 25 59 1 0 +"30107" 4 508 5 1 1 1 1 1 100 858 100 150 2292 9 7 1 853 3 50 50 1 1 +"30108" 4 508 5 1 1 1 1 1 100 858 150 188 1910 4 8 0 927 2 25 38 0 1 +"30109" 4 508 5 1 1 1 1 1 100 858 188 282 1149 8 2 1 616 3 50 94 1 1 +"30110" 4 508 5 1 1 1 1 1 100 858 282 141 929 2 1 0 402 3 50 141 1 0 +"30111" 4 508 5 1 1 1 1 1 100 858 141 275 854 7 6 1 1101 5 95 134 1 1 +"30112" 4 508 5 1 1 1 1 1 100 858 275 344 1704 5 3 1 903 2 25 69 1 1 +"30113" 4 508 5 1 1 1 1 1 100 858 344 516 1157 3 6 0 1116 3 50 172 0 1 +"30114" 4 508 5 1 1 1 1 1 100 858 516 490 1241 6 9 1 1536 1 5 26 0 0 +"30115" 4 508 5 1 1 1 1 1 100 858 490 858 1048 1 4 0 1009 4 75 368 0 1 +"30116" 4 508 5 1 1 1 2 1 100 868 100 150 2679 2 9 0 729 3 50 50 0 1 +"30117" 4 508 5 1 1 1 2 1 100 868 150 188 1099 4 10 0 996 2 25 38 0 1 +"30118" 4 508 5 1 1 1 2 1 100 868 188 94 1102 3 1 0 822 3 50 94 1 0 +"30119" 4 508 5 1 1 1 2 1 100 868 94 183 1037 8 6 1 0 5 95 89 1 1 +"30120" 4 508 5 1 1 1 2 1 100 868 183 192 4488 5 7 0 4426 1 5 9 0 1 +"30121" 4 508 5 1 1 1 2 1 100 868 192 182 998 6 8 1 1420 1 5 10 0 0 +"30122" 4 508 5 1 1 1 2 1 100 868 182 228 2635 7 5 1 988 2 25 46 1 1 +"30123" 4 508 5 1 1 1 2 1 100 868 228 445 980 1 10 0 0 5 95 217 0 1 +"30124" 4 508 5 1 1 1 2 1 100 868 445 868 1787 9 3 1 0 5 95 423 1 1 +"30125" 4 508 5 1 1 1 3 1 100 1237 100 125 1605 3 5 0 1530 2 25 25 0 1 +"30126" 4 508 5 1 1 1 3 1 100 1237 125 62 1144 8 9 1 432 3 50 63 0 0 +"30127" 4 508 5 1 1 1 3 1 100 1237 62 121 976 2 4 0 0 5 95 59 0 1 +"30128" 4 508 5 1 1 1 3 1 100 1237 121 236 1145 6 3 1 1667 5 95 115 1 1 +"30129" 4 508 5 1 1 1 3 1 100 1237 236 460 1475 7 6 1 0 5 95 224 1 1 +"30130" 4 508 5 1 1 1 3 1 100 1237 460 345 1468 4 2 0 1110 2 25 115 1 0 +"30131" 4 508 5 1 1 1 3 1 100 1237 345 673 1038 1 8 0 0 5 95 328 0 1 +"30132" 4 508 5 1 1 1 3 1 100 1237 673 707 1847 5 7 0 1850 1 5 34 0 1 +"30133" 4 508 5 1 1 1 3 1 100 1237 707 1237 1004 9 5 1 812 4 75 530 1 1 +"30134" 4 508 5 1 1 1 4 1 100 587 100 150 1517 8 7 1 451 3 50 50 1 1 +"30135" 4 508 5 1 1 1 4 1 100 587 150 188 826 3 10 0 834 2 25 38 0 1 +"30136" 4 508 5 1 1 1 4 1 100 587 188 94 756 7 9 1 421 3 50 94 0 0 +"30137" 4 508 5 1 1 1 4 1 100 587 94 183 1078 9 1 1 0 5 95 89 1 1 +"30138" 4 508 5 1 1 1 4 1 100 587 183 357 1247 2 3 0 1404 5 95 174 0 1 +"30139" 4 508 5 1 1 1 4 1 100 587 357 696 1468 1 8 0 0 5 95 339 0 1 +"30140" 4 508 5 1 1 1 4 1 100 587 696 522 1225 5 4 0 1237 2 25 174 1 0 +"30141" 4 508 5 1 1 1 4 1 100 587 522 391 800 4 2 0 888 2 25 131 1 0 +"30142" 4 508 5 1 1 1 4 1 100 587 391 587 827 6 3 1 416 3 50 196 1 1 +"30143" 4 527 2 0 1 0 1 1 100 256 100 150 11943 2 7 0 1600 3 50 50 0 1 +"30144" 4 527 2 0 1 0 1 1 100 256 150 263 18400 3 7 0 974 2 75 113 0 1 +"30145" 4 527 2 0 1 0 1 1 100 256 263 513 3529 2 8 0 659 1 95 250 0 1 +"30146" 4 527 2 0 1 0 1 1 100 256 513 256 4869 8 9 1 1506 3 50 257 0 0 +"30147" 4 527 3 1 1 0 1 0 100 1 100 195 1728 1 3 0 497 1 95 95 0 1 +"30148" 4 527 3 1 1 0 1 0 100 1 195 380 3246 6 5 1 373 1 95 185 1 1 +"30149" 4 527 3 1 1 0 1 0 100 1 380 190 5065 2 8 1 849 3 50 190 0 0 +"30150" 4 527 3 1 1 0 1 0 100 1 190 95 1801 8 9 1 1315 3 50 95 0 0 +"30151" 4 527 3 1 1 0 1 0 100 1 95 185 2030 3 4 0 394 1 95 90 0 1 +"30152" 4 527 3 1 1 0 1 0 100 1 185 9 5380 5 7 1 852 1 95 176 0 0 +"30153" 4 527 3 1 1 0 1 0 100 1 9 18 2634 7 4 1 383 1 95 9 1 1 +"30154" 4 527 3 1 1 0 1 0 100 1 18 1 3677 4 1 0 330 1 95 17 1 0 +"30155" 4 527 3 1 1 0 2 1 100 1386 100 195 2043 8 1 1 487 1 95 95 1 1 +"30156" 4 527 3 1 1 0 2 1 100 1386 195 380 2340 6 2 1 1539 1 95 185 1 1 +"30157" 4 527 3 1 1 0 2 1 100 1386 380 285 2518 7 9 1 505 4 25 95 0 0 +"30158" 4 527 3 1 1 0 2 1 100 1386 285 556 1731 2 10 0 2021 1 95 271 0 1 +"30159" 4 527 3 1 1 0 2 1 100 1386 556 278 2452 5 3 0 1499 3 50 278 1 0 +"30160" 4 527 3 1 1 0 2 1 100 1386 278 208 3036 4 2 0 803 4 25 70 1 0 +"30161" 4 527 3 1 1 0 2 1 100 1386 208 406 1574 3 5 0 430 1 95 198 0 1 +"30162" 4 527 3 1 1 0 2 1 100 1386 406 792 2273 9 4 1 488 1 95 386 1 1 +"30163" 4 527 3 1 1 0 2 1 100 1386 792 1386 1224 1 7 0 1187 2 75 594 0 1 +"30164" 4 527 3 1 1 0 3 1 100 31 100 195 3363 7 5 1 933 1 95 95 1 1 +"30165" 4 527 3 1 1 0 3 1 100 31 195 10 1235 2 1 0 951 1 95 185 1 0 +"30166" 4 527 3 1 1 0 3 1 100 31 10 20 6321 8 6 1 891 1 95 10 1 1 +"30167" 4 527 3 1 1 0 3 1 100 31 20 39 2644 4 7 0 424 1 95 19 0 1 +"30168" 4 527 3 1 1 0 3 1 100 31 39 68 1399 3 10 0 1682 2 75 29 0 1 +"30169" 4 527 3 1 1 0 3 1 100 31 68 34 2488 6 8 1 523 3 50 34 0 0 +"30170" 4 527 3 1 1 0 3 1 100 31 34 66 2099 9 2 1 391 1 95 32 1 1 +"30171" 4 527 3 1 1 0 3 1 100 31 66 16 3000 5 3 0 1335 2 75 50 1 0 +"30172" 4 527 3 1 1 0 3 1 100 31 16 31 1623 1 10 0 521 1 95 15 0 1 +"30173" 4 527 3 1 1 0 4 1 100 1586 100 195 3353 2 3 0 448 1 95 95 0 1 +"30174" 4 527 3 1 1 0 4 1 100 1586 195 380 1658 7 4 1 356 1 95 185 1 1 +"30175" 4 527 3 1 1 0 4 1 100 1586 380 190 1528 3 1 0 1273 3 50 190 1 0 +"30176" 4 527 3 1 1 0 4 1 100 1586 190 371 1487 1 9 0 447 1 95 181 0 1 +"30177" 4 527 3 1 1 0 4 1 100 1586 371 723 1799 8 7 1 387 1 95 352 1 1 +"30178" 4 527 3 1 1 0 4 1 100 1586 723 1410 1523 9 2 1 387 1 95 687 1 1 +"30179" 4 527 3 1 1 0 4 1 100 1586 1410 2115 3223 5 6 0 1106 3 50 705 0 1 +"30180" 4 527 3 1 1 0 4 1 100 1586 2115 1057 5834 6 8 1 768 3 50 1058 0 0 +"30181" 4 527 3 1 1 0 4 1 100 1586 1057 1586 2537 4 7 0 2524 3 50 529 0 1 +"30182" 4 527 4 0 1 1 1 1 100 17 100 150 9554 8 3 1 1515 3 50 50 1 1 +"30183" 4 527 4 0 1 1 1 1 100 17 150 225 3280 3 7 0 1511 3 50 75 0 1 +"30184" 4 527 4 0 1 1 1 1 100 17 225 338 1917 8 2 1 827 3 50 113 1 1 +"30185" 4 527 4 0 1 1 1 1 100 17 338 17 3144 2 1 0 0 5 95 321 1 0 +"30186" 4 527 5 1 1 1 1 1 100 542 100 195 2093 9 7 1 0 5 95 95 1 1 +"30187" 4 527 5 1 1 1 1 1 100 542 195 293 2648 4 8 0 702 3 50 98 0 1 +"30188" 4 527 5 1 1 1 1 1 100 542 293 440 1891 8 2 1 1068 3 50 147 1 1 +"30189" 4 527 5 1 1 1 1 1 100 542 440 220 1427 2 1 0 1012 3 50 220 1 0 +"30190" 4 527 5 1 1 1 1 1 100 542 220 330 1881 7 6 1 1220 3 50 110 1 1 +"30191" 4 527 5 1 1 1 1 1 100 542 330 247 2822 5 3 0 1117 2 25 83 1 0 +"30192" 4 527 5 1 1 1 1 1 100 542 247 371 1922 3 6 0 555 3 50 124 0 1 +"30193" 4 527 5 1 1 1 1 1 100 542 371 278 1811 6 9 1 1392 2 25 93 0 0 +"30194" 4 527 5 1 1 1 1 1 100 542 278 542 1528 1 4 0 1298 5 95 264 0 1 +"30195" 4 527 5 1 1 1 2 1 100 913 100 195 2746 2 9 0 380 5 95 95 0 1 +"30196" 4 527 5 1 1 1 2 1 100 913 195 293 2623 4 10 0 608 3 50 98 0 1 +"30197" 4 527 5 1 1 1 2 1 100 913 293 220 1660 3 1 0 1149 2 25 73 1 0 +"30198" 4 527 5 1 1 1 2 1 100 913 220 429 1847 8 6 1 1175 5 95 209 1 1 +"30199" 4 527 5 1 1 1 2 1 100 913 429 214 2749 5 7 1 673 3 50 215 0 0 +"30200" 4 527 5 1 1 1 2 1 100 913 214 160 1751 6 8 1 1803 2 25 54 0 0 +"30201" 4 527 5 1 1 1 2 1 100 913 160 240 1615 7 5 1 481 3 50 80 1 1 +"30202" 4 527 5 1 1 1 2 1 100 913 240 468 1467 1 10 0 712 5 95 228 0 1 +"30203" 4 527 5 1 1 1 2 1 100 913 468 913 1556 9 3 1 1136 5 95 445 1 1 +"30204" 4 527 5 1 1 1 3 1 100 23 100 150 2892 3 5 0 560 3 50 50 0 1 +"30205" 4 527 5 1 1 1 3 1 100 23 150 7 1608 8 9 1 0 5 95 143 0 0 +"30206" 4 527 5 1 1 1 3 1 100 23 7 12 2918 2 4 0 1122 4 75 5 0 1 +"30207" 4 527 5 1 1 1 3 1 100 23 12 18 1692 6 3 1 461 3 50 6 1 1 +"30208" 4 527 5 1 1 1 3 1 100 23 18 27 1287 7 6 1 482 3 50 9 1 1 +"30209" 4 527 5 1 1 1 3 1 100 23 27 13 1917 4 2 0 731 3 50 14 1 0 +"30210" 4 527 5 1 1 1 3 1 100 23 13 25 1486 1 8 0 0 5 95 12 0 1 +"30211" 4 527 5 1 1 1 3 1 100 23 25 12 2777 5 7 1 520 3 50 13 0 0 +"30212" 4 527 5 1 1 1 3 1 100 23 12 23 1758 9 5 1 0 5 95 11 1 1 +"30213" 4 527 5 1 1 1 4 1 100 1133 100 195 2643 8 7 1 0 5 95 95 1 1 +"30214" 4 527 5 1 1 1 4 1 100 1133 195 293 2176 3 10 0 1691 3 50 98 0 1 +"30215" 4 527 5 1 1 1 4 1 100 1133 293 220 1779 7 9 1 405 2 25 73 0 0 +"30216" 4 527 5 1 1 1 4 1 100 1133 220 429 1639 9 1 1 814 5 95 209 1 1 +"30217" 4 527 5 1 1 1 4 1 100 1133 429 644 1642 2 3 0 957 3 50 215 0 1 +"30218" 4 527 5 1 1 1 4 1 100 1133 644 966 1477 1 8 0 623 3 50 322 0 1 +"30219" 4 527 5 1 1 1 4 1 100 1133 966 1208 4235 5 4 1 440 2 25 242 1 1 +"30220" 4 527 5 1 1 1 4 1 100 1133 1208 906 1744 4 2 0 361 2 25 302 1 0 +"30221" 4 527 5 1 1 1 4 1 100 1133 906 1133 1425 6 3 1 363 2 25 227 1 1 +"30222" 4 533 2 0 1 0 1 1 100 128 100 150 18787 2 7 0 2383 3 50 50 0 1 +"30223" 4 533 2 0 1 0 1 1 100 128 150 293 18142 3 7 0 1050 1 95 143 0 1 +"30224" 4 533 2 0 1 0 1 1 100 128 293 513 5852 2 8 0 1299 2 75 220 0 1 +"30225" 4 533 2 0 1 0 1 1 100 128 513 128 5532 8 9 1 704 2 75 385 0 0 +"30226" 4 533 3 1 1 0 1 1 100 158 100 175 7947 1 3 0 1180 2 75 75 0 1 +"30227" 4 533 3 1 1 0 1 1 100 158 175 263 12419 6 5 1 1528 3 50 88 1 1 +"30228" 4 533 3 1 1 0 1 1 100 158 263 460 7679 2 8 0 2397 2 75 197 0 1 +"30229" 4 533 3 1 1 0 1 1 100 158 460 690 5472 8 9 0 731 3 50 230 0 1 +"30230" 4 533 3 1 1 0 1 1 100 158 690 345 6619 3 4 1 392 3 50 345 0 0 +"30231" 4 533 3 1 1 0 1 1 100 158 345 431 2742 5 7 0 1250 4 25 86 0 1 +"30232" 4 533 3 1 1 0 1 1 100 158 431 323 4135 7 4 0 617 4 25 108 1 0 +"30233" 4 533 3 1 1 0 1 1 100 158 323 81 4431 4 1 0 927 2 75 242 1 0 +"30234" 4 533 3 1 1 0 1 1 100 158 81 158 4757 9 6 1 1822 1 95 77 1 1 +"30235" 4 533 3 1 1 0 2 0 100 0 100 195 6859 8 1 1 1248 1 95 95 1 1 +"30236" 4 533 3 1 1 0 2 0 100 0 195 97 5375 6 2 0 904 3 50 98 1 0 +"30237" 4 533 3 1 1 0 2 0 100 0 97 189 2679 7 9 0 1640 1 95 92 0 1 +"30238" 4 533 3 1 1 0 2 0 100 0 189 9 6249 2 10 1 1252 1 95 180 0 0 +"30239" 4 533 3 1 1 0 2 0 100 0 9 0 5288 5 3 0 933 1 95 9 1 0 +"30240" 4 533 3 1 1 0 3 0 100 1 100 175 4086 7 5 1 514 2 75 75 1 1 +"30241" 4 533 3 1 1 0 3 0 100 1 175 87 2758 2 1 0 646 3 50 88 1 0 +"30242" 4 533 3 1 1 0 3 0 100 1 87 170 7227 8 6 1 1186 1 95 83 1 1 +"30243" 4 533 3 1 1 0 3 0 100 1 170 332 3335 4 7 0 697 1 95 162 0 1 +"30244" 4 533 3 1 1 0 3 0 100 1 332 249 6588 3 10 1 565 4 25 83 0 0 +"30245" 4 533 3 1 1 0 3 0 100 1 249 12 4985 6 8 1 1179 1 95 237 0 0 +"30246" 4 533 3 1 1 0 3 0 100 1 12 1 1366 9 2 0 646 1 95 11 1 0 +"30247" 4 533 3 1 1 0 4 0 100 0 100 5 3418 2 3 1 1688 1 95 95 0 0 +"30248" 4 533 3 1 1 0 4 0 100 0 5 0 5107 7 4 0 972 1 95 5 1 0 +"30249" 4 533 4 0 1 1 1 1 100 641 100 150 9038 8 3 1 817 3 50 50 1 1 +"30250" 4 533 4 0 1 1 1 1 100 641 150 263 7718 3 7 0 1220 4 75 113 0 1 +"30251" 4 533 4 0 1 1 1 1 100 641 263 513 4797 8 2 1 629 5 95 250 1 1 +"30252" 4 533 4 0 1 1 1 1 100 641 513 641 9963 2 1 1 626 2 25 128 1 1 +"30253" 4 533 5 1 1 1 1 1 100 1248 100 195 3304 9 7 1 871 5 95 95 1 1 +"30254" 4 533 5 1 1 1 1 1 100 1248 195 341 5058 4 8 0 1166 4 75 146 0 1 +"30255" 4 533 5 1 1 1 1 1 100 1248 341 512 6052 8 2 1 1016 3 50 171 1 1 +"30256" 4 533 5 1 1 1 1 1 100 1248 512 640 9128 2 1 1 423 2 25 128 1 1 +"30257" 4 533 5 1 1 1 1 1 100 1248 640 800 5670 7 6 1 2216 2 25 160 1 1 +"30258" 4 533 5 1 1 1 1 1 100 1248 800 1000 11787 5 3 1 1176 2 25 200 1 1 +"30259" 4 533 5 1 1 1 1 1 100 1248 1000 950 17636 3 6 1 1278 1 5 50 0 0 +"30260" 4 533 5 1 1 1 1 1 100 1248 950 998 3643 6 9 0 844 1 5 48 0 1 +"30261" 4 533 5 1 1 1 1 1 100 1248 998 1248 4258 1 4 0 987 2 25 250 0 1 +"30262" 4 533 5 1 1 1 2 0 100 1 100 95 4070 2 9 1 922 1 5 5 0 0 +"30263" 4 533 5 1 1 1 2 0 100 1 95 100 3041 4 10 0 849 1 5 5 0 1 +"30264" 4 533 5 1 1 1 2 0 100 1 100 50 5999 3 1 0 539 3 50 50 1 0 +"30265" 4 533 5 1 1 1 2 0 100 1 50 63 3251 8 6 1 2229 2 25 13 1 1 +"30266" 4 533 5 1 1 1 2 0 100 1 63 95 5407 5 7 0 680 3 50 32 0 1 +"30267" 4 533 5 1 1 1 2 0 100 1 95 24 4640 6 8 1 1535 4 75 71 0 0 +"30268" 4 533 5 1 1 1 2 0 100 1 24 1 4924 7 5 0 1187 5 95 23 1 0 +"30269" 4 533 5 1 1 1 3 0 100 0 100 95 2527 3 5 1 1070 1 5 5 0 0 +"30270" 4 533 5 1 1 1 3 0 100 0 95 5 2101 8 9 1 1142 5 95 90 0 0 +"30271" 4 533 5 1 1 1 3 0 100 0 5 10 3415 2 4 0 664 5 95 5 0 1 +"30272" 4 533 5 1 1 1 3 0 100 0 10 5 4620 6 3 0 1743 3 50 5 1 0 +"30273" 4 533 5 1 1 1 3 0 100 0 5 10 2885 7 6 1 516 5 95 5 1 1 +"30274" 4 533 5 1 1 1 3 0 100 0 10 0 5338 4 2 0 831 5 95 10 1 0 +"30275" 4 533 5 1 1 1 4 1 100 457 100 195 3912 8 7 1 0 5 95 95 1 1 +"30276" 4 533 5 1 1 1 4 1 100 457 195 341 18198 3 10 0 2043 4 75 146 0 1 +"30277" 4 533 5 1 1 1 4 1 100 457 341 426 14967 7 9 0 2340 2 25 85 0 1 +"30278" 4 533 5 1 1 1 4 1 100 457 426 405 5709 9 1 0 1811 1 5 21 1 0 +"30279" 4 533 5 1 1 1 4 1 100 457 405 385 4938 2 3 1 1283 1 5 20 0 0 +"30280" 4 533 5 1 1 1 4 1 100 457 385 366 2460 1 8 1 1191 1 5 19 0 0 +"30281" 4 533 5 1 1 1 4 1 100 457 366 458 3951 5 4 1 1402 2 25 92 1 1 +"30282" 4 533 5 1 1 1 4 1 100 457 458 481 4516 4 2 1 1097 1 5 23 1 1 +"30283" 4 533 5 1 1 1 4 1 100 457 481 457 2074 6 3 0 983 1 5 24 1 0 +"30284" 4 551 2 0 1 1 1 1 100 253 100 150 13444 8 3 1 1647 3 50 50 1 1 +"30285" 4 551 2 0 1 1 1 1 100 253 150 225 3200 3 7 0 2418 3 50 75 0 1 +"30286" 4 551 2 0 1 1 1 1 100 253 225 338 1118 8 2 1 763 3 50 113 1 1 +"30287" 4 551 2 0 1 1 1 1 100 253 338 253 1530 2 1 0 2527 2 25 85 1 0 +"30288" 4 551 3 1 1 1 1 1 100 401 100 150 1487 9 7 1 1348 3 50 50 1 1 +"30289" 4 551 3 1 1 1 1 1 100 401 150 225 3165 4 8 0 801 3 50 75 0 1 +"30290" 4 551 3 1 1 1 1 1 100 401 225 338 1536 8 2 1 542 3 50 113 1 1 +"30291" 4 551 3 1 1 1 1 1 100 401 338 253 1401 2 1 0 2429 2 25 85 1 0 +"30292" 4 551 3 1 1 1 1 1 100 401 253 380 2146 7 6 1 559 3 50 127 1 1 +"30293" 4 551 3 1 1 1 1 1 100 401 380 285 2083 5 3 0 1582 2 25 95 1 0 +"30294" 4 551 3 1 1 1 1 1 100 401 285 356 1410 3 6 0 1340 2 25 71 0 1 +"30295" 4 551 3 1 1 1 1 1 100 401 356 267 1537 6 9 1 1254 2 25 89 0 0 +"30296" 4 551 3 1 1 1 1 1 100 401 267 401 1330 1 4 0 877 3 50 134 0 1 +"30297" 4 551 3 1 1 1 2 1 100 498 100 150 1736 2 9 0 1597 3 50 50 0 1 +"30298" 4 551 3 1 1 1 2 1 100 498 150 225 1292 4 10 0 841 3 50 75 0 1 +"30299" 4 551 3 1 1 1 2 1 100 498 225 112 2380 3 1 0 783 3 50 113 1 0 +"30300" 4 551 3 1 1 1 2 1 100 498 112 196 1275 8 6 1 736 4 75 84 1 1 +"30301" 4 551 3 1 1 1 2 1 100 498 196 294 1557 5 7 0 1210 3 50 98 0 1 +"30302" 4 551 3 1 1 1 2 1 100 498 294 147 1548 6 8 1 936 3 50 147 0 0 +"30303" 4 551 3 1 1 1 2 1 100 498 147 221 1606 7 5 1 1295 3 50 74 1 1 +"30304" 4 551 3 1 1 1 2 1 100 498 221 332 1363 1 10 0 1087 3 50 111 0 1 +"30305" 4 551 3 1 1 1 2 1 100 498 332 498 1265 9 3 1 969 3 50 166 1 1 +"30306" 4 551 3 1 1 1 3 1 100 226 100 150 1483 3 5 0 1026 3 50 50 0 1 +"30307" 4 551 3 1 1 1 3 1 100 226 150 75 2242 8 9 1 1067 3 50 75 0 0 +"30308" 4 551 3 1 1 1 3 1 100 226 75 131 1195 2 4 0 599 4 75 56 0 1 +"30309" 4 551 3 1 1 1 3 1 100 226 131 197 1179 6 3 1 698 3 50 66 1 1 +"30310" 4 551 3 1 1 1 3 1 100 226 197 296 1099 7 6 1 563 3 50 99 1 1 +"30311" 4 551 3 1 1 1 3 1 100 226 296 148 1146 4 2 0 438 3 50 148 1 0 +"30312" 4 551 3 1 1 1 3 1 100 226 148 259 984 1 8 0 921 4 75 111 0 1 +"30313" 4 551 3 1 1 1 3 1 100 226 259 129 1163 5 7 1 538 3 50 130 0 0 +"30314" 4 551 3 1 1 1 3 1 100 226 129 226 1151 9 5 1 1481 4 75 97 1 1 +"30315" 4 551 3 1 1 1 4 1 100 539 100 175 1718 8 7 1 1829 4 75 75 1 1 +"30316" 4 551 3 1 1 1 4 1 100 539 175 263 1083 3 10 0 674 3 50 88 0 1 +"30317" 4 551 3 1 1 1 4 1 100 539 263 131 1143 7 9 1 700 3 50 132 0 0 +"30318" 4 551 3 1 1 1 4 1 100 539 131 255 1359 9 1 1 710 5 95 124 1 1 +"30319" 4 551 3 1 1 1 4 1 100 539 255 383 1197 2 3 0 970 3 50 128 0 1 +"30320" 4 551 3 1 1 1 4 1 100 539 383 575 1269 1 8 0 866 3 50 192 0 1 +"30321" 4 551 3 1 1 1 4 1 100 539 575 719 1474 5 4 1 2577 2 25 144 1 1 +"30322" 4 551 3 1 1 1 4 1 100 539 719 359 1419 4 2 0 936 3 50 360 1 0 +"30323" 4 551 3 1 1 1 4 1 100 539 359 539 1039 6 3 1 1353 3 50 180 1 1 +"30324" 4 551 4 0 1 0 1 1 100 149 100 175 7473 2 7 0 1595 2 75 75 0 1 +"30325" 4 551 4 0 1 0 1 1 100 149 175 341 1419 3 7 0 1207 1 95 166 0 1 +"30326" 4 551 4 0 1 0 1 1 100 149 341 597 1314 2 8 0 2199 2 75 256 0 1 +"30327" 4 551 4 0 1 0 1 1 100 149 597 149 1135 8 9 1 899 2 75 448 0 0 +"30328" 4 551 5 1 1 0 1 1 100 217 100 195 2367 1 3 0 3060 1 95 95 0 1 +"30329" 4 551 5 1 1 0 1 1 100 217 195 293 998 6 5 1 1334 3 50 98 1 1 +"30330" 4 551 5 1 1 0 1 1 100 217 293 440 1162 2 8 0 987 3 50 147 0 1 +"30331" 4 551 5 1 1 0 1 1 100 217 440 220 1030 8 9 1 984 3 50 220 0 0 +"30332" 4 551 5 1 1 0 1 1 100 217 220 330 1102 3 4 0 926 3 50 110 0 1 +"30333" 4 551 5 1 1 0 1 1 100 217 330 165 1542 5 7 1 1310 3 50 165 0 0 +"30334" 4 551 5 1 1 0 1 1 100 217 165 248 1343 7 4 1 809 3 50 83 1 1 +"30335" 4 551 5 1 1 0 1 1 100 217 248 124 1381 4 1 0 788 3 50 124 1 0 +"30336" 4 551 5 1 1 0 1 1 100 217 124 217 1096 9 6 1 1633 2 75 93 1 1 +"30337" 4 551 5 1 1 0 2 1 100 245 100 150 1582 8 1 1 640 3 50 50 1 1 +"30338" 4 551 5 1 1 0 2 1 100 245 150 225 1243 6 2 1 902 3 50 75 1 1 +"30339" 4 551 5 1 1 0 2 1 100 245 225 169 1019 7 9 1 682 4 25 56 0 0 +"30340" 4 551 5 1 1 0 2 1 100 245 169 330 1017 2 10 0 1659 1 95 161 0 1 +"30341" 4 551 5 1 1 0 2 1 100 245 330 165 1437 5 3 0 1123 3 50 165 1 0 +"30342" 4 551 5 1 1 0 2 1 100 245 165 41 1145 4 2 0 2353 2 75 124 1 0 +"30343" 4 551 5 1 1 0 2 1 100 245 41 72 1205 3 5 0 996 2 75 31 0 1 +"30344" 4 551 5 1 1 0 2 1 100 245 72 140 1051 9 4 1 1542 1 95 68 1 1 +"30345" 4 551 5 1 1 0 2 1 100 245 140 245 1009 1 7 0 889 2 75 105 0 1 +"30346" 4 551 5 1 1 0 3 1 100 681 100 175 1347 7 5 1 1437 2 75 75 1 1 +"30347" 4 551 5 1 1 0 3 1 100 681 175 87 1153 2 1 0 536 3 50 88 1 0 +"30348" 4 551 5 1 1 0 3 1 100 681 87 170 906 8 6 1 3150 1 95 83 1 1 +"30349" 4 551 5 1 1 0 3 1 100 681 170 255 872 4 7 0 901 3 50 85 0 1 +"30350" 4 551 5 1 1 0 3 1 100 681 255 319 1751 3 10 0 444 4 25 64 0 1 +"30351" 4 551 5 1 1 0 3 1 100 681 319 239 1636 6 8 1 1239 4 25 80 0 0 +"30352" 4 551 5 1 1 0 3 1 100 681 239 466 931 9 2 1 1543 1 95 227 1 1 +"30353" 4 551 5 1 1 0 3 1 100 681 466 349 1292 5 3 0 725 4 25 117 1 0 +"30354" 4 551 5 1 1 0 3 1 100 681 349 681 924 1 10 0 1248 1 95 332 0 1 +"30355" 4 551 5 1 1 0 4 1 100 823 100 175 1378 2 3 0 846 2 75 75 0 1 +"30356" 4 551 5 1 1 0 4 1 100 823 175 263 1131 7 4 1 879 3 50 88 1 1 +"30357" 4 551 5 1 1 0 4 1 100 823 263 131 908 3 1 0 861 3 50 132 1 0 +"30358" 4 551 5 1 1 0 4 1 100 823 131 229 1209 1 9 0 1705 2 75 98 0 1 +"30359" 4 551 5 1 1 0 4 1 100 823 229 401 958 8 7 1 849 2 75 172 1 1 +"30360" 4 551 5 1 1 0 4 1 100 823 401 702 1106 9 2 1 1562 2 75 301 1 1 +"30361" 4 551 5 1 1 0 4 1 100 823 702 878 1367 5 6 0 762 4 25 176 0 1 +"30362" 4 551 5 1 1 0 4 1 100 823 878 658 1048 6 8 1 782 4 25 220 0 0 +"30363" 4 551 5 1 1 0 4 1 100 823 658 823 824 4 7 0 756 4 25 165 0 1 +"30364" 4 561 2 0 1 0 1 1 100 56 100 150 28191 2 7 0 706 3 50 50 0 1 +"30365" 4 561 2 0 1 0 1 1 100 56 150 225 11705 3 7 0 1083 3 50 75 0 1 +"30366" 4 561 2 0 1 0 1 1 100 56 225 112 3486 2 8 1 1236 3 50 113 0 0 +"30367" 4 561 2 0 1 0 1 1 100 56 112 56 3419 8 9 1 954 3 50 56 0 0 +"30368" 4 561 3 1 1 0 1 1 100 250 100 150 4944 1 3 0 881 3 50 50 0 1 +"30369" 4 561 3 1 1 0 1 1 100 250 150 112 6696 6 5 0 1236 4 25 38 1 0 +"30370" 4 561 3 1 1 0 1 1 100 250 112 168 2280 2 8 0 1916 3 50 56 0 1 +"30371" 4 561 3 1 1 0 1 1 100 250 168 84 2425 8 9 1 1131 3 50 84 0 0 +"30372" 4 561 3 1 1 0 1 1 100 250 84 126 2577 3 4 0 2360 3 50 42 0 1 +"30373" 4 561 3 1 1 0 1 1 100 250 126 63 4044 5 7 1 2351 3 50 63 0 0 +"30374" 4 561 3 1 1 0 1 1 100 250 63 95 2437 7 4 1 923 3 50 32 1 1 +"30375" 4 561 3 1 1 0 1 1 100 250 95 143 2604 4 1 1 1245 3 50 48 1 1 +"30376" 4 561 3 1 1 0 1 1 100 250 143 250 3028 9 6 1 1453 2 75 107 1 1 +"30377" 4 561 3 1 1 0 2 1 100 313 100 175 3434 8 1 1 1749 2 75 75 1 1 +"30378" 4 561 3 1 1 0 2 1 100 313 175 131 6157 6 2 0 1227 4 25 44 1 0 +"30379" 4 561 3 1 1 0 2 1 100 313 131 98 1812 7 9 1 763 4 25 33 0 0 +"30380" 4 561 3 1 1 0 2 1 100 313 98 191 1618 2 10 0 1230 1 95 93 0 1 +"30381" 4 561 3 1 1 0 2 1 100 313 191 95 3585 5 3 0 1077 3 50 96 1 0 +"30382" 4 561 3 1 1 0 2 1 100 313 95 47 4897 4 2 0 1126 3 50 48 1 0 +"30383" 4 561 3 1 1 0 2 1 100 313 47 92 2315 3 5 0 1579 1 95 45 0 1 +"30384" 4 561 3 1 1 0 2 1 100 313 92 179 1767 9 4 1 1551 1 95 87 1 1 +"30385" 4 561 3 1 1 0 2 1 100 313 179 313 2946 1 7 0 1086 2 75 134 0 1 +"30386" 4 561 3 1 1 0 3 1 100 1365 100 195 2896 7 5 1 1940 1 95 95 1 1 +"30387" 4 561 3 1 1 0 3 1 100 1365 195 97 2648 2 1 0 1183 3 50 98 1 0 +"30388" 4 561 3 1 1 0 3 1 100 1365 97 189 1938 8 6 1 1055 1 95 92 1 1 +"30389" 4 561 3 1 1 0 3 1 100 1365 189 284 2104 4 7 0 1402 3 50 95 0 1 +"30390" 4 561 3 1 1 0 3 1 100 1365 284 554 2174 3 10 0 1205 1 95 270 0 1 +"30391" 4 561 3 1 1 0 3 1 100 1365 554 693 5324 6 8 0 1190 4 25 139 0 1 +"30392" 4 561 3 1 1 0 3 1 100 1365 693 1040 1835 9 2 1 1417 3 50 347 1 1 +"30393" 4 561 3 1 1 0 3 1 100 1365 1040 780 4035 5 3 0 2024 4 25 260 1 0 +"30394" 4 561 3 1 1 0 3 1 100 1365 780 1365 1721 1 10 0 975 2 75 585 0 1 +"30395" 4 561 3 1 1 0 4 1 100 182 100 195 5336 2 3 0 2396 1 95 95 0 1 +"30396" 4 561 3 1 1 0 4 1 100 182 195 293 1878 7 4 1 921 3 50 98 1 1 +"30397" 4 561 3 1 1 0 4 1 100 182 293 146 2421 3 1 0 1988 3 50 147 1 0 +"30398" 4 561 3 1 1 0 4 1 100 182 146 285 1532 1 9 0 835 1 95 139 0 1 +"30399" 4 561 3 1 1 0 4 1 100 182 285 556 1582 8 7 1 1620 1 95 271 1 1 +"30400" 4 561 3 1 1 0 4 1 100 182 556 973 1708 9 2 1 985 2 75 417 1 1 +"30401" 4 561 3 1 1 0 4 1 100 182 973 486 2414 5 6 1 1278 3 50 487 0 0 +"30402" 4 561 3 1 1 0 4 1 100 182 486 121 1868 6 8 1 1270 2 75 365 0 0 +"30403" 4 561 3 1 1 0 4 1 100 182 121 182 1753 4 7 0 933 3 50 61 0 1 +"30404" 4 561 4 0 1 1 1 1 100 82 100 125 10679 8 3 1 603 2 25 25 1 1 +"30405" 4 561 4 0 1 1 1 1 100 82 125 188 6034 3 7 0 1368 3 50 63 0 1 +"30406" 4 561 4 0 1 1 1 1 100 82 188 329 1805 8 2 1 670 4 75 141 1 1 +"30407" 4 561 4 0 1 1 1 1 100 82 329 82 1806 2 1 0 1128 4 75 247 1 0 +"30408" 4 561 5 1 1 1 1 1 100 728 100 175 2185 9 7 1 1142 4 75 75 1 1 +"30409" 4 561 5 1 1 1 1 1 100 728 175 263 1881 4 8 0 1087 3 50 88 0 1 +"30410" 4 561 5 1 1 1 1 1 100 728 263 395 1460 8 2 1 1651 3 50 132 1 1 +"30411" 4 561 5 1 1 1 1 1 100 728 395 197 1720 2 1 0 1184 3 50 198 1 0 +"30412" 4 561 5 1 1 1 1 1 100 728 197 296 1450 7 6 1 987 3 50 99 1 1 +"30413" 4 561 5 1 1 1 1 1 100 728 296 444 1918 5 3 1 1076 3 50 148 1 1 +"30414" 4 561 5 1 1 1 1 1 100 728 444 555 1644 3 6 0 1134 2 25 111 0 1 +"30415" 4 561 5 1 1 1 1 1 100 728 555 416 1498 6 9 1 1400 2 25 139 0 0 +"30416" 4 561 5 1 1 1 1 1 100 728 416 728 1245 1 4 0 1193 4 75 312 0 1 +"30417" 4 561 5 1 1 1 2 1 100 632 100 150 1660 2 9 0 997 3 50 50 0 1 +"30418" 4 561 5 1 1 1 2 1 100 632 150 188 1728 4 10 0 1118 2 25 38 0 1 +"30419" 4 561 5 1 1 1 2 1 100 632 188 94 1415 3 1 0 686 3 50 94 1 0 +"30420" 4 561 5 1 1 1 2 1 100 632 94 141 1068 8 6 1 1998 3 50 47 1 1 +"30421" 4 561 5 1 1 1 2 1 100 632 141 247 2043 5 7 0 1250 4 75 106 0 1 +"30422" 4 561 5 1 1 1 2 1 100 632 247 123 1707 6 8 1 1073 3 50 124 0 0 +"30423" 4 561 5 1 1 1 2 1 100 632 123 185 1455 7 5 1 1012 3 50 62 1 1 +"30424" 4 561 5 1 1 1 2 1 100 632 185 361 2144 1 10 0 1570 5 95 176 0 1 +"30425" 4 561 5 1 1 1 2 1 100 632 361 632 1480 9 3 1 1717 4 75 271 1 1 +"30426" 4 561 5 1 1 1 3 1 100 399 100 150 2024 3 5 0 536 3 50 50 0 1 +"30427" 4 561 5 1 1 1 3 1 100 399 150 37 1130 8 9 1 1244 4 75 113 0 0 +"30428" 4 561 5 1 1 1 3 1 100 399 37 65 1094 2 4 0 1324 4 75 28 0 1 +"30429" 4 561 5 1 1 1 3 1 100 399 65 81 1739 6 3 1 2112 2 25 16 1 1 +"30430" 4 561 5 1 1 1 3 1 100 399 81 122 1332 7 6 1 1205 3 50 41 1 1 +"30431" 4 561 5 1 1 1 3 1 100 399 122 91 1449 4 2 0 2390 2 25 31 1 0 +"30432" 4 561 5 1 1 1 3 1 100 399 91 177 1159 1 8 0 1136 5 95 86 0 1 +"30433" 4 561 5 1 1 1 3 1 100 399 177 266 1727 5 7 0 1063 3 50 89 0 1 +"30434" 4 561 5 1 1 1 3 1 100 399 266 399 1444 9 5 1 1976 3 50 133 1 1 +"30435" 4 561 5 1 1 1 4 1 100 263 100 150 2603 8 7 1 963 3 50 50 1 1 +"30436" 4 561 5 1 1 1 4 1 100 263 150 225 1318 3 10 0 591 3 50 75 0 1 +"30437" 4 561 5 1 1 1 4 1 100 263 225 169 1300 7 9 1 1467 2 25 56 0 0 +"30438" 4 561 5 1 1 1 4 1 100 263 169 296 1092 9 1 1 1124 4 75 127 1 1 +"30439" 4 561 5 1 1 1 4 1 100 263 296 444 1588 2 3 0 1123 3 50 148 0 1 +"30440" 4 561 5 1 1 1 4 1 100 263 444 666 1224 1 8 0 1139 3 50 222 0 1 +"30441" 4 561 5 1 1 1 4 1 100 263 666 333 1729 5 4 0 962 3 50 333 1 0 +"30442" 4 561 5 1 1 1 4 1 100 263 333 250 1330 4 2 0 951 2 25 83 1 0 +"30443" 4 561 5 1 1 1 4 1 100 263 250 263 1194 6 3 1 1668 1 5 13 1 1 +"30444" 4 562 2 0 1 1 1 1 100 134 100 150 5119 8 3 1 1185 3 50 50 1 1 +"30445" 4 562 2 0 1 1 1 1 100 134 150 188 7797 3 7 0 3529 2 25 38 0 1 +"30446" 4 562 2 0 1 1 1 1 100 134 188 179 2860 8 2 0 2585 1 5 9 1 0 +"30447" 4 562 2 0 1 1 1 1 100 134 179 134 2396 2 1 0 1338 2 25 45 1 0 +"30448" 4 562 3 1 1 1 1 1 100 501 100 150 4546 9 7 1 582 3 50 50 1 1 +"30449" 4 562 3 1 1 1 1 1 100 501 150 225 3535 4 8 0 664 3 50 75 0 1 +"30450" 4 562 3 1 1 1 1 1 100 501 225 338 3017 8 2 1 687 3 50 113 1 1 +"30451" 4 562 3 1 1 1 1 1 100 501 338 169 2110 2 1 0 775 3 50 169 1 0 +"30452" 4 562 3 1 1 1 1 1 100 501 169 254 2595 7 6 1 710 3 50 85 1 1 +"30453" 4 562 3 1 1 1 1 1 100 501 254 381 2489 5 3 1 883 3 50 127 1 1 +"30454" 4 562 3 1 1 1 1 1 100 501 381 572 1614 3 6 0 625 3 50 191 0 1 +"30455" 4 562 3 1 1 1 1 1 100 501 572 286 2231 6 9 1 683 3 50 286 0 0 +"30456" 4 562 3 1 1 1 1 1 100 501 286 501 2083 1 4 0 818 4 75 215 0 1 +"30457" 4 562 3 1 1 1 2 1 100 488 100 150 3519 2 9 0 442 3 50 50 0 1 +"30458" 4 562 3 1 1 1 2 1 100 488 150 225 2658 4 10 0 807 3 50 75 0 1 +"30459" 4 562 3 1 1 1 2 1 100 488 225 169 2224 3 1 0 598 2 25 56 1 0 +"30460" 4 562 3 1 1 1 2 1 100 488 169 254 3210 8 6 1 835 3 50 85 1 1 +"30461" 4 562 3 1 1 1 2 1 100 488 254 381 4185 5 7 0 854 3 50 127 0 1 +"30462" 4 562 3 1 1 1 2 1 100 488 381 95 2381 6 8 1 548 4 75 286 0 0 +"30463" 4 562 3 1 1 1 2 1 100 488 95 143 2072 7 5 1 424 3 50 48 1 1 +"30464" 4 562 3 1 1 1 2 1 100 488 143 250 2380 1 10 0 921 4 75 107 0 1 +"30465" 4 562 3 1 1 1 2 1 100 488 250 488 2136 9 3 1 914 5 95 238 1 1 +"30466" 4 562 3 1 1 1 3 1 100 273 100 150 2361 3 5 0 716 3 50 50 0 1 +"30467" 4 562 3 1 1 1 3 1 100 273 150 37 1619 8 9 1 1160 4 75 113 0 0 +"30468" 4 562 3 1 1 1 3 1 100 273 37 56 1667 2 4 0 805 3 50 19 0 1 +"30469" 4 562 3 1 1 1 3 1 100 273 56 84 1736 6 3 1 1431 3 50 28 1 1 +"30470" 4 562 3 1 1 1 3 1 100 273 84 147 1297 7 6 1 1248 4 75 63 1 1 +"30471" 4 562 3 1 1 1 3 1 100 273 147 37 1208 4 2 0 908 4 75 110 1 0 +"30472" 4 562 3 1 1 1 3 1 100 273 37 72 2601 1 8 0 1178 5 95 35 0 1 +"30473" 4 562 3 1 1 1 3 1 100 273 72 140 1852 5 7 0 839 5 95 68 0 1 +"30474" 4 562 3 1 1 1 3 1 100 273 140 273 1630 9 5 1 973 5 95 133 1 1 +"30475" 4 562 3 1 1 1 4 1 100 710 100 195 2423 8 7 1 997 5 95 95 1 1 +"30476" 4 562 3 1 1 1 4 1 100 710 195 341 1404 3 10 0 755 4 75 146 0 1 +"30477" 4 562 3 1 1 1 4 1 100 710 341 170 1558 7 9 1 522 3 50 171 0 0 +"30478" 4 562 3 1 1 1 4 1 100 710 170 332 1570 9 1 1 1315 5 95 162 1 1 +"30479" 4 562 3 1 1 1 4 1 100 710 332 647 1800 2 3 0 753 5 95 315 0 1 +"30480" 4 562 3 1 1 1 4 1 100 710 647 971 1993 1 8 0 932 3 50 324 0 1 +"30481" 4 562 3 1 1 1 4 1 100 710 971 728 1915 5 4 0 483 2 25 243 1 0 +"30482" 4 562 3 1 1 1 4 1 100 710 728 364 1751 4 2 0 718 3 50 364 1 0 +"30483" 4 562 3 1 1 1 4 1 100 710 364 710 2293 6 3 1 749 5 95 346 1 1 +"30484" 4 562 4 0 1 0 1 1 100 197 100 150 4222 2 7 0 703 3 50 50 0 1 +"30485" 4 562 4 0 1 0 1 1 100 197 150 263 2296 3 7 0 815 2 75 113 0 1 +"30486" 4 562 4 0 1 0 1 1 100 197 263 395 2055 2 8 0 782 3 50 132 0 1 +"30487" 4 562 4 0 1 0 1 1 100 197 395 197 1479 8 9 1 887 3 50 198 0 0 +"30488" 4 562 5 1 1 0 1 0 100 1 100 195 2995 1 3 0 414 1 95 95 0 1 +"30489" 4 562 5 1 1 0 1 0 100 1 195 293 1367 6 5 1 684 3 50 98 1 1 +"30490" 4 562 5 1 1 0 1 0 100 1 293 440 1557 2 8 0 587 3 50 147 0 1 +"30491" 4 562 5 1 1 0 1 0 100 1 440 110 1155 8 9 1 727 2 75 330 0 0 +"30492" 4 562 5 1 1 0 1 0 100 1 110 215 1372 3 4 0 530 1 95 105 0 1 +"30493" 4 562 5 1 1 0 1 0 100 1 215 11 1448 5 7 1 620 1 95 204 0 0 +"30494" 4 562 5 1 1 0 1 0 100 1 11 1 1815 7 4 0 786 1 95 10 1 0 +"30495" 4 562 5 1 1 0 2 0 100 0 100 195 1772 8 1 1 728 1 95 95 1 1 +"30496" 4 562 5 1 1 0 2 0 100 0 195 49 2147 6 2 0 910 2 75 146 1 0 +"30497" 4 562 5 1 1 0 2 0 100 0 49 2 1914 7 9 1 671 1 95 47 0 0 +"30498" 4 562 5 1 1 0 2 0 100 0 2 4 3906 2 10 0 386 1 95 2 0 1 +"30499" 4 562 5 1 1 0 2 0 100 0 4 0 1369 5 3 0 623 1 95 4 1 0 +"30500" 4 562 5 1 1 0 3 1 100 181 100 150 1444 7 5 1 667 3 50 50 1 1 +"30501" 4 562 5 1 1 0 3 1 100 181 150 75 1241 2 1 0 818 3 50 75 1 0 +"30502" 4 562 5 1 1 0 3 1 100 181 75 146 1322 8 6 1 926 1 95 71 1 1 +"30503" 4 562 5 1 1 0 3 1 100 181 146 256 1294 4 7 0 1269 2 75 110 0 1 +"30504" 4 562 5 1 1 0 3 1 100 181 256 384 1571 3 10 0 793 3 50 128 0 1 +"30505" 4 562 5 1 1 0 3 1 100 181 384 192 1377 6 8 1 768 3 50 192 0 0 +"30506" 4 562 5 1 1 0 3 1 100 181 192 374 1392 9 2 1 435 1 95 182 1 1 +"30507" 4 562 5 1 1 0 3 1 100 181 374 93 1594 5 3 0 777 2 75 281 1 0 +"30508" 4 562 5 1 1 0 3 1 100 181 93 181 1530 1 10 0 444 1 95 88 0 1 +"30509" 4 562 5 1 1 0 4 1 100 943 100 195 1714 2 3 0 661 1 95 95 0 1 +"30510" 4 562 5 1 1 0 4 1 100 943 195 293 1404 7 4 1 624 3 50 98 1 1 +"30511" 4 562 5 1 1 0 4 1 100 943 293 73 1191 3 1 0 1484 2 75 220 1 0 +"30512" 4 562 5 1 1 0 4 1 100 943 73 142 1188 1 9 0 431 1 95 69 0 1 +"30513" 4 562 5 1 1 0 4 1 100 943 142 277 1134 8 7 1 576 1 95 135 1 1 +"30514" 4 562 5 1 1 0 4 1 100 943 277 540 1240 9 2 1 595 1 95 263 1 1 +"30515" 4 562 5 1 1 0 4 1 100 943 540 945 1532 5 6 0 807 2 75 405 0 1 +"30516" 4 562 5 1 1 0 4 1 100 943 945 898 2164 6 8 1 1038 5 5 47 0 0 +"30517" 4 562 5 1 1 0 4 1 100 943 898 943 1343 4 7 0 541 5 5 45 0 1 +"30518" 4 566 2 0 1 1 1 1 100 224 100 150 16291 8 3 1 2887 3 50 50 1 1 +"30519" 4 566 2 0 1 1 1 1 100 224 150 225 6406 3 7 0 763 3 50 75 0 1 +"30520" 4 566 2 0 1 1 1 1 100 224 225 236 2459 8 2 1 3080 1 5 11 1 1 +"30521" 4 566 2 0 1 1 1 1 100 224 236 224 2244 2 1 0 372 1 5 12 1 0 +"30522" 4 566 3 1 1 1 1 1 100 1537 100 195 15007 9 7 1 0 5 95 95 1 1 +"30523" 4 566 3 1 1 1 1 1 100 1537 195 293 14142 4 8 0 1262 3 50 98 0 1 +"30524" 4 566 3 1 1 1 1 1 100 1537 293 571 4904 8 2 1 1221 5 95 278 1 1 +"30525" 4 566 3 1 1 1 1 1 100 1537 571 285 2267 2 1 0 387 3 50 286 1 0 +"30526" 4 566 3 1 1 1 1 1 100 1537 285 499 2523 7 6 1 832 4 75 214 1 1 +"30527" 4 566 3 1 1 1 1 1 100 1537 499 474 5059 5 3 0 428 1 5 25 1 0 +"30528" 4 566 3 1 1 1 1 1 100 1537 474 830 1801 3 6 0 869 4 75 356 0 1 +"30529" 4 566 3 1 1 1 1 1 100 1537 830 788 3843 6 9 1 797 1 5 42 0 0 +"30530" 4 566 3 1 1 1 1 1 100 1537 788 1537 2806 1 4 0 0 5 95 749 0 1 +"30531" 4 566 3 1 1 1 2 1 100 1244 100 195 9301 2 9 0 2769 5 95 95 0 1 +"30532" 4 566 3 1 1 1 2 1 100 1244 195 244 3575 4 10 0 1817 2 25 49 0 1 +"30533" 4 566 3 1 1 1 2 1 100 1244 244 183 2133 3 1 0 440 2 25 61 1 0 +"30534" 4 566 3 1 1 1 2 1 100 1244 183 357 2468 8 6 1 0 5 95 174 1 1 +"30535" 4 566 3 1 1 1 2 1 100 1244 357 375 6384 5 7 0 412 1 5 18 0 1 +"30536" 4 566 3 1 1 1 2 1 100 1244 375 187 2515 6 8 1 936 3 50 188 0 0 +"30537" 4 566 3 1 1 1 2 1 100 1244 187 327 2082 7 5 1 3547 4 75 140 1 1 +"30538" 4 566 3 1 1 1 2 1 100 1244 327 638 2004 1 10 0 0 5 95 311 0 1 +"30539" 4 566 3 1 1 1 2 1 100 1244 638 1244 4045 9 3 1 1518 5 95 606 1 1 +"30540" 4 566 3 1 1 1 3 1 100 152 100 195 4762 3 5 0 0 5 95 95 0 1 +"30541" 4 566 3 1 1 1 3 1 100 152 195 10 2092 8 9 1 0 5 95 185 0 0 +"30542" 4 566 3 1 1 1 3 1 100 152 10 18 2240 2 4 0 515 4 75 8 0 1 +"30543" 4 566 3 1 1 1 3 1 100 152 18 35 2007 6 3 1 0 5 95 17 1 1 +"30544" 4 566 3 1 1 1 3 1 100 152 35 44 10395 7 6 1 785 2 25 9 1 1 +"30545" 4 566 3 1 1 1 3 1 100 152 44 42 1771 4 2 0 1004 1 5 2 1 0 +"30546" 4 566 3 1 1 1 3 1 100 152 42 82 1398 1 8 0 0 5 95 40 0 1 +"30547" 4 566 3 1 1 1 3 1 100 152 82 78 3552 5 7 1 550 1 5 4 0 0 +"30548" 4 566 3 1 1 1 3 1 100 152 78 152 2295 9 5 1 0 5 95 74 1 1 +"30549" 4 566 3 1 1 1 4 1 100 2843 100 195 5646 8 7 1 0 5 95 95 1 1 +"30550" 4 566 3 1 1 1 4 1 100 2843 195 293 1962 3 10 0 2545 3 50 98 0 1 +"30551" 4 566 3 1 1 1 4 1 100 2843 293 366 3445 7 9 0 446 2 25 73 0 1 +"30552" 4 566 3 1 1 1 4 1 100 2843 366 714 1904 9 1 1 0 5 95 348 1 1 +"30553" 4 566 3 1 1 1 4 1 100 2843 714 1392 1929 2 3 0 2993 5 95 678 0 1 +"30554" 4 566 3 1 1 1 4 1 100 2843 1392 2714 2149 1 8 0 1557 5 95 1322 0 1 +"30555" 4 566 3 1 1 1 4 1 100 2843 2714 2850 2834 5 4 1 359 1 5 136 1 1 +"30556" 4 566 3 1 1 1 4 1 100 2843 2850 2993 3716 4 2 1 504 1 5 143 1 1 +"30557" 4 566 3 1 1 1 4 1 100 2843 2993 2843 6158 6 3 0 251 1 5 150 1 0 +"30558" 4 566 4 0 1 0 1 1 100 1297 100 175 10697 2 7 0 4166 2 75 75 0 1 +"30559" 4 566 4 0 1 0 1 1 100 1297 175 341 5504 3 7 0 380 1 95 166 0 1 +"30560" 4 566 4 0 1 0 1 1 100 1297 341 665 3960 2 8 0 1655 1 95 324 0 1 +"30561" 4 566 4 0 1 0 1 1 100 1297 665 1297 1756 8 9 0 391 1 95 632 0 1 +"30562" 4 566 5 1 1 0 1 1 100 6240 100 195 3107 1 3 0 415 1 95 95 0 1 +"30563" 4 566 5 1 1 0 1 1 100 6240 195 293 1510 6 5 1 1078 3 50 98 1 1 +"30564" 4 566 5 1 1 0 1 1 100 6240 293 513 1538 2 8 0 659 2 75 220 0 1 +"30565" 4 566 5 1 1 0 1 1 100 6240 513 641 6903 8 9 0 2333 4 25 128 0 1 +"30566" 4 566 5 1 1 0 1 1 100 6240 641 1250 1680 3 4 0 4257 1 95 609 0 1 +"30567" 4 566 5 1 1 0 1 1 100 6240 1250 1313 3692 5 7 0 0 5 5 63 0 1 +"30568" 4 566 5 1 1 0 1 1 100 6240 1313 1641 4840 7 4 1 4185 4 25 328 1 1 +"30569" 4 566 5 1 1 0 1 1 100 6240 1641 3200 4258 4 1 1 300 1 95 1559 1 1 +"30570" 4 566 5 1 1 0 1 1 100 6240 3200 6240 1918 9 6 1 2649 1 95 3040 1 1 +"30571" 4 566 5 1 1 0 2 1 100 228 100 195 3074 8 1 1 366 1 95 95 1 1 +"30572" 4 566 5 1 1 0 2 1 100 228 195 293 4413 6 2 1 414 3 50 98 1 1 +"30573" 4 566 5 1 1 0 2 1 100 228 293 366 6687 7 9 0 1648 4 25 73 0 1 +"30574" 4 566 5 1 1 0 2 1 100 228 366 714 1252 2 10 0 1003 1 95 348 0 1 +"30575" 4 566 5 1 1 0 2 1 100 228 714 678 3942 5 3 0 0 5 5 36 1 0 +"30576" 4 566 5 1 1 0 2 1 100 228 678 34 4293 4 2 0 281 1 95 644 1 0 +"30577" 4 566 5 1 1 0 2 1 100 228 34 60 1439 3 5 0 366 2 75 26 0 1 +"30578" 4 566 5 1 1 0 2 1 100 228 60 117 1359 9 4 1 349 1 95 57 1 1 +"30579" 4 566 5 1 1 0 2 1 100 228 117 228 1239 1 7 0 333 1 95 111 0 1 +"30580" 4 566 5 1 1 0 3 1 100 43 100 195 1994 7 5 1 401 1 95 95 1 1 +"30581" 4 566 5 1 1 0 3 1 100 43 195 10 1531 2 1 0 393 1 95 185 1 0 +"30582" 4 566 5 1 1 0 3 1 100 43 10 18 1303 8 6 1 1336 2 75 8 1 1 +"30583" 4 566 5 1 1 0 3 1 100 43 18 32 7049 4 7 0 2253 2 75 14 0 1 +"30584" 4 566 5 1 1 0 3 1 100 43 32 8 4101 3 10 1 921 2 75 24 0 0 +"30585" 4 566 5 1 1 0 3 1 100 43 8 12 6132 6 8 0 419 3 50 4 0 1 +"30586" 4 566 5 1 1 0 3 1 100 43 12 21 1729 9 2 1 442 2 75 9 1 1 +"30587" 4 566 5 1 1 0 3 1 100 43 21 22 3286 5 3 1 0 5 5 1 1 1 +"30588" 4 566 5 1 1 0 3 1 100 43 22 43 1510 1 10 0 2573 1 95 21 0 1 +"30589" 4 566 5 1 1 0 4 1 100 10604 100 195 5499 2 3 0 399 1 95 95 0 1 +"30590" 4 566 5 1 1 0 4 1 100 10604 195 380 1297 7 4 1 319 1 95 185 1 1 +"30591" 4 566 5 1 1 0 4 1 100 10604 380 741 7405 3 1 1 432 1 95 361 1 1 +"30592" 4 566 5 1 1 0 4 1 100 10604 741 1445 1655 1 9 0 612 1 95 704 0 1 +"30593" 4 566 5 1 1 0 4 1 100 10604 1445 2529 1446 8 7 1 365 2 75 1084 1 1 +"30594" 4 566 5 1 1 0 4 1 100 10604 2529 4932 1559 9 2 1 306 1 95 2403 1 1 +"30595" 4 566 5 1 1 0 4 1 100 10604 4932 5179 3056 5 6 0 0 5 5 247 0 1 +"30596" 4 566 5 1 1 0 4 1 100 10604 5179 10099 9579 6 8 0 686 1 95 4920 0 1 +"30597" 4 566 5 1 1 0 4 1 100 10604 10099 10604 7305 4 7 0 0 5 5 505 0 1 +"30598" 4 575 2 0 1 0 1 1 100 115 100 150 25348 2 7 0 2334 3 50 50 0 1 +"30599" 4 575 2 0 1 0 1 1 100 115 150 263 6329 3 7 0 1724 2 75 113 0 1 +"30600" 4 575 2 0 1 0 1 1 100 115 263 460 2623 2 8 0 1605 2 75 197 0 1 +"30601" 4 575 2 0 1 0 1 1 100 115 460 115 2431 8 9 1 812 2 75 345 0 0 +"30602" 4 575 3 1 1 0 1 1 100 1183 100 195 2777 1 3 0 4954 1 95 95 0 1 +"30603" 4 575 3 1 1 0 1 1 100 1183 195 293 2672 6 5 1 878 3 50 98 1 1 +"30604" 4 575 3 1 1 0 1 1 100 1183 293 513 2153 2 8 0 1179 2 75 220 0 1 +"30605" 4 575 3 1 1 0 1 1 100 1183 513 641 2996 8 9 0 1222 4 25 128 0 1 +"30606" 4 575 3 1 1 0 1 1 100 1183 641 962 2242 3 4 0 1422 3 50 321 0 1 +"30607" 4 575 3 1 1 0 1 1 100 1183 962 721 4596 5 7 1 2267 4 25 241 0 0 +"30608" 4 575 3 1 1 0 1 1 100 1183 721 901 2300 7 4 1 1766 4 25 180 1 1 +"30609" 4 575 3 1 1 0 1 1 100 1183 901 676 2897 4 1 0 1270 4 25 225 1 0 +"30610" 4 575 3 1 1 0 1 1 100 1183 676 1183 3922 9 6 1 841 2 75 507 1 1 +"30611" 4 575 3 1 1 0 2 1 100 1975 100 175 4082 8 1 1 1768 2 75 75 1 1 +"30612" 4 575 3 1 1 0 2 1 100 1975 175 219 2981 6 2 1 848 4 25 44 1 1 +"30613" 4 575 3 1 1 0 2 1 100 1975 219 274 2927 7 9 0 1439 4 25 55 0 1 +"30614" 4 575 3 1 1 0 2 1 100 1975 274 411 2347 2 10 0 1234 3 50 137 0 1 +"30615" 4 575 3 1 1 0 2 1 100 1975 411 617 2502 5 3 1 1113 3 50 206 1 1 +"30616" 4 575 3 1 1 0 2 1 100 1975 617 463 2256 4 2 0 874 4 25 154 1 0 +"30617" 4 575 3 1 1 0 2 1 100 1975 463 579 2114 3 5 0 1057 4 25 116 0 1 +"30618" 4 575 3 1 1 0 2 1 100 1975 579 1013 2364 9 4 1 2262 2 75 434 1 1 +"30619" 4 575 3 1 1 0 2 1 100 1975 1013 1975 2220 1 7 0 2408 1 95 962 0 1 +"30620" 4 575 3 1 1 0 3 1 100 47 100 125 2185 7 5 1 933 4 25 25 1 1 +"30621" 4 575 3 1 1 0 3 1 100 47 125 62 1770 2 1 0 784 3 50 63 1 0 +"30622" 4 575 3 1 1 0 3 1 100 47 62 109 1980 8 6 1 2020 2 75 47 1 1 +"30623" 4 575 3 1 1 0 3 1 100 47 109 164 2431 4 7 0 1788 3 50 55 0 1 +"30624" 4 575 3 1 1 0 3 1 100 47 164 123 2367 3 10 1 1543 4 25 41 0 0 +"30625" 4 575 3 1 1 0 3 1 100 47 123 61 1620 6 8 1 1154 3 50 62 0 0 +"30626" 4 575 3 1 1 0 3 1 100 47 61 107 2020 9 2 1 831 2 75 46 1 1 +"30627" 4 575 3 1 1 0 3 1 100 47 107 27 2248 5 3 0 1355 2 75 80 1 0 +"30628" 4 575 3 1 1 0 3 1 100 47 27 47 1928 1 10 0 1413 2 75 20 0 1 +"30629" 4 575 3 1 1 0 4 1 100 2733 100 175 2338 2 3 0 1116 2 75 75 0 1 +"30630" 4 575 3 1 1 0 4 1 100 2733 175 263 2187 7 4 1 883 3 50 88 1 1 +"30631" 4 575 3 1 1 0 4 1 100 2733 263 329 2063 3 1 1 1741 4 25 66 1 1 +"30632" 4 575 3 1 1 0 4 1 100 2733 329 642 1656 1 9 0 2326 1 95 313 0 1 +"30633" 4 575 3 1 1 0 4 1 100 2733 642 1124 2015 8 7 1 1755 2 75 482 1 1 +"30634" 4 575 3 1 1 0 4 1 100 2733 1124 2192 2128 9 2 1 1410 1 95 1068 1 1 +"30635" 4 575 3 1 1 0 4 1 100 2733 2192 2740 2001 5 6 0 1977 4 25 548 0 1 +"30636" 4 575 3 1 1 0 4 1 100 2733 2740 2877 2072 6 8 0 1354 5 5 137 0 1 +"30637" 4 575 3 1 1 0 4 1 100 2733 2877 2733 2160 4 7 1 913 5 5 144 0 0 +"30638" 4 575 4 0 1 1 1 1 100 207 100 150 10607 8 3 1 1154 3 50 50 1 1 +"30639" 4 575 4 0 1 1 1 1 100 207 150 188 2863 3 7 0 805 2 25 38 0 1 +"30640" 4 575 4 0 1 1 1 1 100 207 188 197 2809 8 2 1 846 1 5 9 1 1 +"30641" 4 575 4 0 1 1 1 1 100 207 197 207 2078 2 1 1 675 1 5 10 1 1 +"30642" 4 575 5 1 1 1 1 1 100 557 100 150 1885 9 7 1 665 3 50 50 1 1 +"30643" 4 575 5 1 1 1 1 1 100 557 150 188 1484 4 8 0 2330 2 25 38 0 1 +"30644" 4 575 5 1 1 1 1 1 100 557 188 282 1054 8 2 1 1183 3 50 94 1 1 +"30645" 4 575 5 1 1 1 1 1 100 557 282 211 1246 2 1 0 1331 2 25 71 1 0 +"30646" 4 575 5 1 1 1 1 1 100 557 211 264 1259 7 6 1 1697 2 25 53 1 1 +"30647" 4 575 5 1 1 1 1 1 100 557 264 198 1114 5 3 0 972 2 25 66 1 0 +"30648" 4 575 5 1 1 1 1 1 100 557 198 297 1136 3 6 0 2171 3 50 99 0 1 +"30649" 4 575 5 1 1 1 1 1 100 557 297 371 1560 6 9 0 671 2 25 74 0 1 +"30650" 4 575 5 1 1 1 1 1 100 557 371 557 1199 1 4 0 1085 3 50 186 0 1 +"30651" 4 575 5 1 1 1 2 1 100 374 100 95 2913 2 9 1 782 1 5 5 0 0 +"30652" 4 575 5 1 1 1 2 1 100 374 95 119 1036 4 10 0 878 2 25 24 0 1 +"30653" 4 575 5 1 1 1 2 1 100 374 119 149 1459 3 1 1 845 2 25 30 1 1 +"30654" 4 575 5 1 1 1 2 1 100 374 149 224 1402 8 6 1 940 3 50 75 1 1 +"30655" 4 575 5 1 1 1 2 1 100 374 224 280 1253 5 7 0 3443 2 25 56 0 1 +"30656" 4 575 5 1 1 1 2 1 100 374 280 210 1879 6 8 1 2503 2 25 70 0 0 +"30657" 4 575 5 1 1 1 2 1 100 374 210 315 1412 7 5 1 1168 3 50 105 1 1 +"30658" 4 575 5 1 1 1 2 1 100 374 315 394 1194 1 10 0 1461 2 25 79 0 1 +"30659" 4 575 5 1 1 1 2 1 100 374 394 374 2313 9 3 0 765 1 5 20 1 0 +"30660" 4 575 5 1 1 1 3 1 100 87 100 75 2371 3 5 1 785 2 25 25 0 0 +"30661" 4 575 5 1 1 1 3 1 100 87 75 37 1388 8 9 1 1379 3 50 38 0 0 +"30662" 4 575 5 1 1 1 3 1 100 87 37 56 1418 2 4 0 974 3 50 19 0 1 +"30663" 4 575 5 1 1 1 3 1 100 87 56 84 1384 6 3 1 927 3 50 28 1 1 +"30664" 4 575 5 1 1 1 3 1 100 87 84 105 1458 7 6 1 1587 2 25 21 1 1 +"30665" 4 575 5 1 1 1 3 1 100 87 105 52 1040 4 2 0 1287 3 50 53 1 0 +"30666" 4 575 5 1 1 1 3 1 100 87 52 78 1283 1 8 0 1082 3 50 26 0 1 +"30667" 4 575 5 1 1 1 3 1 100 87 78 58 1391 5 7 1 1632 2 25 20 0 0 +"30668" 4 575 5 1 1 1 3 1 100 87 58 87 2682 9 5 1 864 3 50 29 1 1 +"30669" 4 575 5 1 1 1 4 1 100 187 100 150 1616 8 7 1 788 3 50 50 1 1 +"30670" 4 575 5 1 1 1 4 1 100 187 150 188 1267 3 10 0 828 2 25 38 0 1 +"30671" 4 575 5 1 1 1 4 1 100 187 188 94 1665 7 9 1 903 3 50 94 0 0 +"30672" 4 575 5 1 1 1 4 1 100 187 94 118 1164 9 1 1 1111 2 25 24 1 1 +"30673" 4 575 5 1 1 1 4 1 100 187 118 177 1061 2 3 0 880 3 50 59 0 1 +"30674" 4 575 5 1 1 1 4 1 100 187 177 266 1539 1 8 0 645 3 50 89 0 1 +"30675" 4 575 5 1 1 1 4 1 100 187 266 333 1156 5 4 1 842 2 25 67 1 1 +"30676" 4 575 5 1 1 1 4 1 100 187 333 250 2349 4 2 0 745 2 25 83 1 0 +"30677" 4 575 5 1 1 1 4 1 100 187 250 187 1373 6 3 0 813 2 25 63 1 0 +"30678" 4 583 2 0 1 1 1 1 100 106 100 105 25011 8 3 1 1579 1 5 5 1 1 +"30679" 4 583 2 0 1 1 1 1 100 106 105 52 9333 3 7 1 1317 3 50 53 0 0 +"30680" 4 583 2 0 1 1 1 1 100 106 52 101 3376 8 2 1 1380 5 95 49 1 1 +"30681" 4 583 2 0 1 1 1 1 100 106 101 106 4077 2 1 1 916 1 5 5 1 1 +"30682" 4 583 3 1 1 1 1 1 100 513 100 175 2921 9 7 1 1045 4 75 75 1 1 +"30683" 4 583 3 1 1 1 1 1 100 513 175 166 4370 4 8 1 1374 1 5 9 0 0 +"30684" 4 583 3 1 1 1 1 1 100 513 166 208 2669 8 2 1 694 2 25 42 1 1 +"30685" 4 583 3 1 1 1 1 1 100 513 208 198 3879 2 1 0 2492 1 5 10 1 0 +"30686" 4 583 3 1 1 1 1 1 100 513 198 297 1784 7 6 1 662 3 50 99 1 1 +"30687" 4 583 3 1 1 1 1 1 100 513 297 312 4839 5 3 1 1220 1 5 15 1 1 +"30688" 4 583 3 1 1 1 1 1 100 513 312 390 1793 3 6 0 1294 2 25 78 0 1 +"30689" 4 583 3 1 1 1 1 1 100 513 390 410 2208 6 9 0 1018 1 5 20 0 1 +"30690" 4 583 3 1 1 1 1 1 100 513 410 513 1555 1 4 0 2619 2 25 103 0 1 +"30691" 4 583 3 1 1 1 2 1 100 521 100 125 2475 2 9 0 678 2 25 25 0 1 +"30692" 4 583 3 1 1 1 2 1 100 521 125 156 1770 4 10 0 1146 2 25 31 0 1 +"30693" 4 583 3 1 1 1 2 1 100 521 156 164 1982 3 1 1 3746 1 5 8 1 1 +"30694" 4 583 3 1 1 1 2 1 100 521 164 205 3186 8 6 1 1863 2 25 41 1 1 +"30695" 4 583 3 1 1 1 2 1 100 521 205 195 1817 5 7 1 970 1 5 10 0 0 +"30696" 4 583 3 1 1 1 2 1 100 521 195 185 1402 6 8 1 1085 1 5 10 0 0 +"30697" 4 583 3 1 1 1 2 1 100 521 185 278 1762 7 5 1 1377 3 50 93 1 1 +"30698" 4 583 3 1 1 1 2 1 100 521 278 417 1835 1 10 0 1073 3 50 139 0 1 +"30699" 4 583 3 1 1 1 2 1 100 521 417 521 1993 9 3 1 920 2 25 104 1 1 +"30700" 4 583 3 1 1 1 3 1 100 380 100 105 2665 3 5 0 859 1 5 5 0 1 +"30701" 4 583 3 1 1 1 3 1 100 380 105 110 2646 8 9 0 676 1 5 5 0 1 +"30702" 4 583 3 1 1 1 3 1 100 380 110 193 1977 2 4 0 524 4 75 83 0 1 +"30703" 4 583 3 1 1 1 3 1 100 380 193 183 2349 6 3 0 1121 1 5 10 1 0 +"30704" 4 583 3 1 1 1 3 1 100 380 183 229 1415 7 6 1 1234 2 25 46 1 1 +"30705" 4 583 3 1 1 1 3 1 100 380 229 218 2015 4 2 0 2820 1 5 11 1 0 +"30706" 4 583 3 1 1 1 3 1 100 380 218 207 2320 1 8 1 1206 1 5 11 0 0 +"30707" 4 583 3 1 1 1 3 1 100 380 207 217 2030 5 7 0 921 1 5 10 0 1 +"30708" 4 583 3 1 1 1 3 1 100 380 217 380 1333 9 5 1 552 4 75 163 1 1 +"30709" 4 583 3 1 1 1 4 1 100 218 100 105 2167 8 7 1 1180 1 5 5 1 1 +"30710" 4 583 3 1 1 1 4 1 100 218 105 131 1164 3 10 0 461 2 25 26 0 1 +"30711" 4 583 3 1 1 1 4 1 100 218 131 98 1682 7 9 1 1116 2 25 33 0 0 +"30712" 4 583 3 1 1 1 4 1 100 218 98 172 1597 9 1 1 1401 4 75 74 1 1 +"30713" 4 583 3 1 1 1 4 1 100 218 172 258 1721 2 3 0 819 3 50 86 0 1 +"30714" 4 583 3 1 1 1 4 1 100 218 258 245 2129 1 8 1 1260 1 5 13 0 0 +"30715" 4 583 3 1 1 1 4 1 100 218 245 306 1607 5 4 1 935 2 25 61 1 1 +"30716" 4 583 3 1 1 1 4 1 100 218 306 291 2057 4 2 0 978 1 5 15 1 0 +"30717" 4 583 3 1 1 1 4 1 100 218 291 218 942 6 3 0 1862 2 25 73 1 0 +"30718" 4 583 4 0 1 0 1 1 100 13 100 195 3557 2 7 0 1766 1 95 95 0 1 +"30719" 4 583 4 0 1 0 1 1 100 13 195 341 2588 3 7 0 1427 2 75 146 0 1 +"30720" 4 583 4 0 1 0 1 1 100 13 341 256 1990 2 8 1 1412 4 25 85 0 0 +"30721" 4 583 4 0 1 0 1 1 100 13 256 13 1551 8 9 1 1152 1 95 243 0 0 +"30722" 4 583 5 1 1 0 1 1 100 68 100 195 1481 1 3 0 1338 1 95 95 0 1 +"30723" 4 583 5 1 1 0 1 1 100 68 195 380 4040 6 5 1 1342 1 95 185 1 1 +"30724" 4 583 5 1 1 0 1 1 100 68 380 741 3894 2 8 0 763 1 95 361 0 1 +"30725" 4 583 5 1 1 0 1 1 100 68 741 370 1266 8 9 1 1225 3 50 371 0 0 +"30726" 4 583 5 1 1 0 1 1 100 68 370 722 2212 3 4 0 1296 1 95 352 0 1 +"30727" 4 583 5 1 1 0 1 1 100 68 722 361 2542 5 7 1 1164 3 50 361 0 0 +"30728" 4 583 5 1 1 0 1 1 100 68 361 704 1844 7 4 1 1065 1 95 343 1 1 +"30729" 4 583 5 1 1 0 1 1 100 68 704 35 2384 4 1 0 2151 1 95 669 1 0 +"30730" 4 583 5 1 1 0 1 1 100 68 35 68 1424 9 6 1 2331 1 95 33 1 1 +"30731" 4 583 5 1 1 0 2 1 100 9635 100 195 4301 8 1 1 1735 1 95 95 1 1 +"30732" 4 583 5 1 1 0 2 1 100 9635 195 380 1656 6 2 1 1559 1 95 185 1 1 +"30733" 4 583 5 1 1 0 2 1 100 9635 380 475 1811 7 9 0 632 4 25 95 0 1 +"30734" 4 583 5 1 1 0 2 1 100 9635 475 926 1953 2 10 0 1028 1 95 451 0 1 +"30735" 4 583 5 1 1 0 2 1 100 9635 926 1158 1363 5 3 1 1306 4 25 232 1 1 +"30736" 4 583 5 1 1 0 2 1 100 9635 1158 2027 2239 4 2 1 1314 2 75 869 1 1 +"30737" 4 583 5 1 1 0 2 1 100 9635 2027 2534 1641 3 5 0 735 4 25 507 0 1 +"30738" 4 583 5 1 1 0 2 1 100 9635 2534 4941 2670 9 4 1 1025 1 95 2407 1 1 +"30739" 4 583 5 1 1 0 2 1 100 9635 4941 9635 1362 1 7 0 1095 1 95 4694 0 1 +"30740" 4 583 5 1 1 0 3 1 100 15 100 195 2787 7 5 1 973 1 95 95 1 1 +"30741" 4 583 5 1 1 0 3 1 100 15 195 10 1389 2 1 0 943 1 95 185 1 0 +"30742" 4 583 5 1 1 0 3 1 100 15 10 20 1399 8 6 1 633 1 95 10 1 1 +"30743" 4 583 5 1 1 0 3 1 100 15 20 39 1920 4 7 0 763 1 95 19 0 1 +"30744" 4 583 5 1 1 0 3 1 100 15 39 76 1349 3 10 0 1055 1 95 37 0 1 +"30745" 4 583 5 1 1 0 3 1 100 15 76 4 1258 6 8 1 969 1 95 72 0 0 +"30746" 4 583 5 1 1 0 3 1 100 15 4 8 2426 9 2 1 922 1 95 4 1 1 +"30747" 4 583 5 1 1 0 3 1 100 15 8 16 1246 5 3 1 1272 1 95 8 1 1 +"30748" 4 583 5 1 1 0 3 1 100 15 16 15 812 1 10 1 730 5 5 1 0 0 +"30749" 4 583 5 1 1 0 4 1 100 1045 100 195 1892 2 3 0 1436 1 95 95 0 1 +"30750" 4 583 5 1 1 0 4 1 100 1045 195 380 1127 7 4 1 896 1 95 185 1 1 +"30751" 4 583 5 1 1 0 4 1 100 1045 380 741 1737 3 1 1 973 1 95 361 1 1 +"30752" 4 583 5 1 1 0 4 1 100 1045 741 1445 1469 1 9 0 962 1 95 704 0 1 +"30753" 4 583 5 1 1 0 4 1 100 1045 1445 2818 3524 8 7 1 2442 1 95 1373 1 1 +"30754" 4 583 5 1 1 0 4 1 100 1045 2818 5495 2290 9 2 1 1250 1 95 2677 1 1 +"30755" 4 583 5 1 1 0 4 1 100 1045 5495 10715 1634 5 6 0 1047 1 95 5220 0 1 +"30756" 4 583 5 1 1 0 4 1 100 1045 10715 20894 2126 6 8 0 952 1 95 10179 0 1 +"30757" 4 583 5 1 1 0 4 1 100 1045 20894 1045 2444 4 7 1 868 1 95 19849 0 0 +"30758" 4 602 2 0 1 1 1 1 100 246 100 150 18222 8 3 1 2612 3 50 50 1 1 +"30759" 4 602 2 0 1 1 1 1 100 246 150 263 31168 3 7 0 2755 4 75 113 0 1 +"30760" 4 602 2 0 1 1 1 1 100 246 263 197 8006 8 2 0 639 2 25 66 1 0 +"30761" 4 602 2 0 1 1 1 1 100 246 197 246 4123 2 1 1 1099 2 25 49 1 1 +"30762" 4 602 3 1 1 1 1 1 100 304 100 175 1767 9 7 1 980 4 75 75 1 1 +"30763" 4 602 3 1 1 1 1 1 100 304 175 219 3059 4 8 0 607 2 25 44 0 1 +"30764" 4 602 3 1 1 1 1 1 100 304 219 274 1893 8 2 1 2351 2 25 55 1 1 +"30765" 4 602 3 1 1 1 1 1 100 304 274 205 2409 2 1 0 881 2 25 69 1 0 +"30766" 4 602 3 1 1 1 1 1 100 304 205 256 2068 7 6 1 1233 2 25 51 1 1 +"30767" 4 602 3 1 1 1 1 1 100 304 256 269 3252 5 3 1 687 1 5 13 1 1 +"30768" 4 602 3 1 1 1 1 1 100 304 269 256 1596 3 6 1 465 1 5 13 0 0 +"30769" 4 602 3 1 1 1 1 1 100 304 256 243 2747 6 9 1 454 1 5 13 0 0 +"30770" 4 602 3 1 1 1 1 1 100 304 243 304 1601 1 4 0 620 2 25 61 0 1 +"30771" 4 602 3 1 1 1 2 1 100 438 100 150 2885 2 9 0 535 3 50 50 0 1 +"30772" 4 602 3 1 1 1 2 1 100 438 150 188 2643 4 10 0 3544 2 25 38 0 1 +"30773" 4 602 3 1 1 1 2 1 100 438 188 235 1716 3 1 1 444 2 25 47 1 1 +"30774" 4 602 3 1 1 1 2 1 100 438 235 294 2036 8 6 1 644 2 25 59 1 1 +"30775" 4 602 3 1 1 1 2 1 100 438 294 279 3288 5 7 1 1289 1 5 15 0 0 +"30776" 4 602 3 1 1 1 2 1 100 438 279 265 1979 6 8 1 556 1 5 14 0 0 +"30777" 4 602 3 1 1 1 2 1 100 438 265 278 1640 7 5 1 591 1 5 13 1 1 +"30778" 4 602 3 1 1 1 2 1 100 438 278 417 1352 1 10 0 985 3 50 139 0 1 +"30779" 4 602 3 1 1 1 2 1 100 438 417 438 1841 9 3 1 1772 1 5 21 1 1 +"30780" 4 602 3 1 1 1 3 1 100 201 100 95 4784 3 5 1 857 1 5 5 0 0 +"30781" 4 602 3 1 1 1 3 1 100 201 95 47 1511 8 9 1 874 3 50 48 0 0 +"30782" 4 602 3 1 1 1 3 1 100 201 47 59 3839 2 4 0 1926 2 25 12 0 1 +"30783" 4 602 3 1 1 1 3 1 100 201 59 74 3036 6 3 1 666 2 25 15 1 1 +"30784" 4 602 3 1 1 1 3 1 100 201 74 93 2851 7 6 1 633 2 25 19 1 1 +"30785" 4 602 3 1 1 1 3 1 100 201 93 70 2149 4 2 0 553 2 25 23 1 0 +"30786" 4 602 3 1 1 1 3 1 100 201 70 137 3691 1 8 0 1372 5 95 67 0 1 +"30787" 4 602 3 1 1 1 3 1 100 201 137 103 1588 5 7 1 1078 2 25 34 0 0 +"30788" 4 602 3 1 1 1 3 1 100 201 103 201 1725 9 5 1 1287 5 95 98 1 1 +"30789" 4 602 3 1 1 1 4 1 100 615 100 150 2247 8 7 1 537 3 50 50 1 1 +"30790" 4 602 3 1 1 1 4 1 100 615 150 188 1714 3 10 0 1011 2 25 38 0 1 +"30791" 4 602 3 1 1 1 4 1 100 615 188 179 1333 7 9 1 1269 1 5 9 0 0 +"30792" 4 602 3 1 1 1 4 1 100 615 179 269 1891 9 1 1 1074 3 50 90 1 1 +"30793" 4 602 3 1 1 1 4 1 100 615 269 336 1594 2 3 0 1087 2 25 67 0 1 +"30794" 4 602 3 1 1 1 4 1 100 615 336 588 3072 1 8 0 1156 4 75 252 0 1 +"30795" 4 602 3 1 1 1 4 1 100 615 588 617 1659 5 4 1 970 1 5 29 1 1 +"30796" 4 602 3 1 1 1 4 1 100 615 617 586 4061 4 2 0 783 1 5 31 1 0 +"30797" 4 602 3 1 1 1 4 1 100 615 586 615 1585 6 3 1 849 1 5 29 1 1 +"30798" 4 602 4 0 1 0 1 1 100 29 100 150 9347 2 7 0 1788 3 50 50 0 1 +"30799" 4 602 4 0 1 0 1 1 100 29 150 293 11499 3 7 0 726 1 95 143 0 1 +"30800" 4 602 4 0 1 0 1 1 100 29 293 571 2212 2 8 0 1348 1 95 278 0 1 +"30801" 4 602 4 0 1 0 1 1 100 29 571 29 1604 8 9 1 654 1 95 542 0 0 +"30802" 4 602 5 1 1 0 1 1 100 1043 100 195 1488 1 3 0 755 1 95 95 0 1 +"30803" 4 602 5 1 1 0 1 1 100 1043 195 293 1570 6 5 1 646 3 50 98 1 1 +"30804" 4 602 5 1 1 0 1 1 100 1043 293 440 2287 2 8 0 1462 3 50 147 0 1 +"30805" 4 602 5 1 1 0 1 1 100 1043 440 220 1337 8 9 1 1334 3 50 220 0 0 +"30806" 4 602 5 1 1 0 1 1 100 1043 220 429 2267 3 4 0 2330 1 95 209 0 1 +"30807" 4 602 5 1 1 0 1 1 100 1043 429 450 1563 5 7 0 1570 5 5 21 0 1 +"30808" 4 602 5 1 1 0 1 1 100 1043 450 563 1544 7 4 1 687 4 25 113 1 1 +"30809" 4 602 5 1 1 0 1 1 100 1043 563 535 2183 4 1 0 625 5 5 28 1 0 +"30810" 4 602 5 1 1 0 1 1 100 1043 535 1043 1335 9 6 1 796 1 95 508 1 1 +"30811" 4 602 5 1 1 0 2 1 100 452 100 195 14419 8 1 1 883 1 95 95 1 1 +"30812" 4 602 5 1 1 0 2 1 100 452 195 293 2343 6 2 1 1188 3 50 98 1 1 +"30813" 4 602 5 1 1 0 2 1 100 452 293 278 2355 7 9 1 452 5 5 15 0 0 +"30814" 4 602 5 1 1 0 2 1 100 452 278 487 1451 2 10 0 866 2 75 209 0 1 +"30815" 4 602 5 1 1 0 2 1 100 452 487 243 1630 5 3 0 1389 3 50 244 1 0 +"30816" 4 602 5 1 1 0 2 1 100 452 243 61 2327 4 2 0 590 2 75 182 1 0 +"30817" 4 602 5 1 1 0 2 1 100 452 61 119 4741 3 5 0 1844 1 95 58 0 1 +"30818" 4 602 5 1 1 0 2 1 100 452 119 232 1456 9 4 1 842 1 95 113 1 1 +"30819" 4 602 5 1 1 0 2 1 100 452 232 452 1528 1 7 0 1051 1 95 220 0 1 +"30820" 4 602 5 1 1 0 3 1 100 560 100 195 1470 7 5 1 516 1 95 95 1 1 +"30821" 4 602 5 1 1 0 3 1 100 560 195 49 1332 2 1 0 732 2 75 146 1 0 +"30822" 4 602 5 1 1 0 3 1 100 560 49 96 1618 8 6 1 662 1 95 47 1 1 +"30823" 4 602 5 1 1 0 3 1 100 560 96 101 5467 4 7 0 598 5 5 5 0 1 +"30824" 4 602 5 1 1 0 3 1 100 560 101 126 2021 3 10 0 558 4 25 25 0 1 +"30825" 4 602 5 1 1 0 3 1 100 560 126 94 2394 6 8 1 2624 4 25 32 0 0 +"30826" 4 602 5 1 1 0 3 1 100 560 94 183 1551 9 2 1 948 1 95 89 1 1 +"30827" 4 602 5 1 1 0 3 1 100 560 183 320 1574 5 3 1 974 2 75 137 1 1 +"30828" 4 602 5 1 1 0 3 1 100 560 320 560 1058 1 10 0 527 2 75 240 0 1 +"30829" 4 602 5 1 1 0 4 1 100 1533 100 195 2204 2 3 0 474 1 95 95 0 1 +"30830" 4 602 5 1 1 0 4 1 100 1533 195 293 1973 7 4 1 1275 3 50 98 1 1 +"30831" 4 602 5 1 1 0 4 1 100 1533 293 220 1469 3 1 0 834 4 25 73 1 0 +"30832" 4 602 5 1 1 0 4 1 100 1533 220 429 1347 1 9 0 533 1 95 209 0 1 +"30833" 4 602 5 1 1 0 4 1 100 1533 429 751 1473 8 7 1 757 2 75 322 1 1 +"30834" 4 602 5 1 1 0 4 1 100 1533 751 1464 1726 9 2 1 573 1 95 713 1 1 +"30835" 4 602 5 1 1 0 4 1 100 1533 1464 1537 3671 5 6 0 0 5 5 73 0 1 +"30836" 4 602 5 1 1 0 4 1 100 1533 1537 1460 5048 6 8 1 0 5 5 77 0 0 +"30837" 4 602 5 1 1 0 4 1 100 1533 1460 1533 4986 4 7 0 0 5 5 73 0 1 +"30838" 4 621 2 0 1 0 1 1 100 141 100 125 18602 2 7 0 9 4 25 25 0 1 +"30839" 4 621 2 0 1 0 1 1 100 141 125 188 3720 3 7 0 1078 3 50 63 0 1 +"30840" 4 621 2 0 1 0 1 1 100 141 188 282 3261 2 8 0 1628 3 50 94 0 1 +"30841" 4 621 2 0 1 0 1 1 100 141 282 141 2610 8 9 1 1692 3 50 141 0 0 +"30842" 4 621 3 1 1 0 1 1 100 51 100 195 3338 1 3 0 942 1 95 95 0 1 +"30843" 4 621 3 1 1 0 1 1 100 51 195 293 4663 6 5 1 927 3 50 98 1 1 +"30844" 4 621 3 1 1 0 1 1 100 51 293 146 2148 2 8 1 1047 3 50 147 0 0 +"30845" 4 621 3 1 1 0 1 1 100 51 146 7 3908 8 9 1 1023 1 95 139 0 0 +"30846" 4 621 3 1 1 0 1 1 100 51 7 14 2537 3 4 0 919 1 95 7 0 1 +"30847" 4 621 3 1 1 0 1 1 100 51 14 27 4175 5 7 0 1014 1 95 13 0 1 +"30848" 4 621 3 1 1 0 1 1 100 51 27 53 2343 7 4 1 1712 1 95 26 1 1 +"30849" 4 621 3 1 1 0 1 1 100 51 53 26 3470 4 1 0 949 3 50 27 1 0 +"30850" 4 621 3 1 1 0 1 1 100 51 26 51 2756 9 6 1 787 1 95 25 1 1 +"30851" 4 621 3 1 1 0 2 1 100 2284 100 195 2980 8 1 1 899 1 95 95 1 1 +"30852" 4 621 3 1 1 0 2 1 100 2284 195 293 2468 6 2 1 1180 3 50 98 1 1 +"30853" 4 621 3 1 1 0 2 1 100 2284 293 366 4409 7 9 0 999 4 25 73 0 1 +"30854" 4 621 3 1 1 0 2 1 100 2284 366 549 1989 2 10 0 868 3 50 183 0 1 +"30855" 4 621 3 1 1 0 2 1 100 2284 549 686 3917 5 3 1 581 4 25 137 1 1 +"30856" 4 621 3 1 1 0 2 1 100 2284 686 343 2731 4 2 0 1812 3 50 343 1 0 +"30857" 4 621 3 1 1 0 2 1 100 2284 343 669 2029 3 5 0 767 1 95 326 0 1 +"30858" 4 621 3 1 1 0 2 1 100 2284 669 1305 2666 9 4 1 811 1 95 636 1 1 +"30859" 4 621 3 1 1 0 2 1 100 2284 1305 2284 1839 1 7 0 1234 2 75 979 0 1 +"30860" 4 621 3 1 1 0 3 1 100 320 100 175 2289 7 5 1 843 2 75 75 1 1 +"30861" 4 621 3 1 1 0 3 1 100 320 175 44 1765 2 1 0 823 2 75 131 1 0 +"30862" 4 621 3 1 1 0 3 1 100 320 44 86 1749 8 6 1 682 1 95 42 1 1 +"30863" 4 621 3 1 1 0 3 1 100 320 86 129 2466 4 7 0 894 3 50 43 0 1 +"30864" 4 621 3 1 1 0 3 1 100 320 129 226 1897 3 10 0 1098 2 75 97 0 1 +"30865" 4 621 3 1 1 0 3 1 100 320 226 56 2109 6 8 1 1274 2 75 170 0 0 +"30866" 4 621 3 1 1 0 3 1 100 320 56 109 1832 9 2 1 634 1 95 53 1 1 +"30867" 4 621 3 1 1 0 3 1 100 320 109 164 2722 5 3 1 982 3 50 55 1 1 +"30868" 4 621 3 1 1 0 3 1 100 320 164 320 1905 1 10 0 807 1 95 156 0 1 +"30869" 4 621 3 1 1 0 4 1 100 212 100 195 1769 2 3 0 1378 1 95 95 0 1 +"30870" 4 621 3 1 1 0 4 1 100 212 195 293 1927 7 4 1 658 3 50 98 1 1 +"30871" 4 621 3 1 1 0 4 1 100 212 293 146 1802 3 1 0 510 3 50 147 1 0 +"30872" 4 621 3 1 1 0 4 1 100 212 146 285 1803 1 9 0 670 1 95 139 0 1 +"30873" 4 621 3 1 1 0 4 1 100 212 285 499 1648 8 7 1 995 2 75 214 1 1 +"30874" 4 621 3 1 1 0 4 1 100 212 499 973 1726 9 2 1 889 1 95 474 1 1 +"30875" 4 621 3 1 1 0 4 1 100 212 973 486 3122 5 6 1 950 3 50 487 0 0 +"30876" 4 621 3 1 1 0 4 1 100 212 486 121 1796 6 8 1 1601 2 75 365 0 0 +"30877" 4 621 3 1 1 0 4 1 100 212 121 212 1639 4 7 0 1511 2 75 91 0 1 +"30878" 4 621 4 0 1 1 1 1 100 117 100 125 8780 8 3 1 2591 2 25 25 1 1 +"30879" 4 621 4 0 1 1 1 1 100 117 125 156 3386 3 7 0 1073 2 25 31 0 1 +"30880" 4 621 4 0 1 1 1 1 100 117 156 234 1638 8 2 1 961 3 50 78 1 1 +"30881" 4 621 4 0 1 1 1 1 100 117 234 117 1592 2 1 0 1373 3 50 117 1 0 +"30882" 4 621 5 1 1 1 1 1 100 408 100 195 2771 9 7 1 1482 5 95 95 1 1 +"30883" 4 621 5 1 1 1 1 1 100 408 195 244 1750 4 8 0 862 2 25 49 0 1 +"30884" 4 621 5 1 1 1 1 1 100 408 244 305 1341 8 2 1 978 2 25 61 1 1 +"30885" 4 621 5 1 1 1 1 1 100 408 305 229 1455 2 1 0 1038 2 25 76 1 0 +"30886" 4 621 5 1 1 1 1 1 100 408 229 286 1513 7 6 1 554 2 25 57 1 1 +"30887" 4 621 5 1 1 1 1 1 100 408 286 272 2215 5 3 0 653 1 5 14 1 0 +"30888" 4 621 5 1 1 1 1 1 100 408 272 286 1339 3 6 0 749 1 5 14 0 1 +"30889" 4 621 5 1 1 1 1 1 100 408 286 272 1692 6 9 1 678 1 5 14 0 0 +"30890" 4 621 5 1 1 1 1 1 100 408 272 408 1417 1 4 0 1571 3 50 136 0 1 +"30891" 4 621 5 1 1 1 2 1 100 527 100 150 1462 2 9 0 475 3 50 50 0 1 +"30892" 4 621 5 1 1 1 2 1 100 527 150 188 1586 4 10 0 829 2 25 38 0 1 +"30893" 4 621 5 1 1 1 2 1 100 527 188 179 1989 3 1 0 681 1 5 9 1 0 +"30894" 4 621 5 1 1 1 2 1 100 527 179 224 1412 8 6 1 1537 2 25 45 1 1 +"30895" 4 621 5 1 1 1 2 1 100 527 224 235 2869 5 7 0 1223 1 5 11 0 1 +"30896" 4 621 5 1 1 1 2 1 100 527 235 223 1679 6 8 1 692 1 5 12 0 0 +"30897" 4 621 5 1 1 1 2 1 100 527 223 234 1396 7 5 1 789 1 5 11 1 1 +"30898" 4 621 5 1 1 1 2 1 100 527 234 351 1475 1 10 0 1024 3 50 117 0 1 +"30899" 4 621 5 1 1 1 2 1 100 527 351 527 1504 9 3 1 1037 3 50 176 1 1 +"30900" 4 621 5 1 1 1 3 1 100 274 100 125 1417 3 5 0 1048 2 25 25 0 1 +"30901" 4 621 5 1 1 1 3 1 100 274 125 62 1547 8 9 1 811 3 50 63 0 0 +"30902" 4 621 5 1 1 1 3 1 100 274 62 109 1147 2 4 0 543 4 75 47 0 1 +"30903" 4 621 5 1 1 1 3 1 100 274 109 114 1604 6 3 1 691 1 5 5 1 1 +"30904" 4 621 5 1 1 1 3 1 100 274 114 143 1296 7 6 1 794 2 25 29 1 1 +"30905" 4 621 5 1 1 1 3 1 100 274 143 107 1304 4 2 0 802 2 25 36 1 0 +"30906" 4 621 5 1 1 1 3 1 100 274 107 209 1322 1 8 0 4588 5 95 102 0 1 +"30907" 4 621 5 1 1 1 3 1 100 274 209 219 1845 5 7 0 687 1 5 10 0 1 +"30908" 4 621 5 1 1 1 3 1 100 274 219 274 1416 9 5 1 1493 2 25 55 1 1 +"30909" 4 621 5 1 1 1 4 1 100 494 100 150 1680 8 7 1 2447 3 50 50 1 1 +"30910" 4 621 5 1 1 1 4 1 100 494 150 225 1619 3 10 0 400 3 50 75 0 1 +"30911" 4 621 5 1 1 1 4 1 100 494 225 169 1700 7 9 1 1033 2 25 56 0 0 +"30912" 4 621 5 1 1 1 4 1 100 494 169 296 1485 9 1 1 1246 4 75 127 1 1 +"30913" 4 621 5 1 1 1 4 1 100 494 296 370 1480 2 3 0 1509 2 25 74 0 1 +"30914" 4 621 5 1 1 1 4 1 100 494 370 555 2030 1 8 0 931 3 50 185 0 1 +"30915" 4 621 5 1 1 1 4 1 100 494 555 527 3325 5 4 0 1509 1 5 28 1 0 +"30916" 4 621 5 1 1 1 4 1 100 494 527 395 2172 4 2 0 885 2 25 132 1 0 +"30917" 4 621 5 1 1 1 4 1 100 494 395 494 1275 6 3 1 840 2 25 99 1 1 +"30918" 4 623 2 0 1 1 1 1 100 99 100 150 17729 8 3 1 3487 3 50 50 1 1 +"30919" 4 623 2 0 1 1 1 1 100 99 150 263 12456 3 7 0 1074 4 75 113 0 1 +"30920" 4 623 2 0 1 1 1 1 100 99 263 395 1751 8 2 1 1550 3 50 132 1 1 +"30921" 4 623 2 0 1 1 1 1 100 99 395 99 1532 2 1 0 1365 4 75 296 1 0 +"30922" 4 623 3 1 1 1 1 1 100 483 100 195 2127 9 7 1 0 5 95 95 1 1 +"30923" 4 623 3 1 1 1 1 1 100 483 195 244 3656 4 8 0 2641 2 25 49 0 1 +"30924" 4 623 3 1 1 1 1 1 100 483 244 366 1785 8 2 1 1514 3 50 122 1 1 +"30925" 4 623 3 1 1 1 1 1 100 483 366 183 1639 2 1 0 1280 3 50 183 1 0 +"30926" 4 623 3 1 1 1 1 1 100 483 183 275 1682 7 6 1 796 3 50 92 1 1 +"30927" 4 623 3 1 1 1 1 1 100 483 275 206 3522 5 3 0 1052 2 25 69 1 0 +"30928" 4 623 3 1 1 1 1 1 100 483 206 309 1953 3 6 0 950 3 50 103 0 1 +"30929" 4 623 3 1 1 1 1 1 100 483 309 386 2632 6 9 0 1440 2 25 77 0 1 +"30930" 4 623 3 1 1 1 1 1 100 483 386 483 1766 1 4 0 732 2 25 97 0 1 +"30931" 4 623 3 1 1 1 2 1 100 389 100 150 2702 2 9 0 2528 3 50 50 0 1 +"30932" 4 623 3 1 1 1 2 1 100 389 150 112 2937 4 10 1 647 2 25 38 0 0 +"30933" 4 623 3 1 1 1 2 1 100 389 112 84 1288 3 1 0 2103 2 25 28 1 0 +"30934" 4 623 3 1 1 1 2 1 100 389 84 147 1432 8 6 1 623 4 75 63 1 1 +"30935" 4 623 3 1 1 1 2 1 100 389 147 184 2582 5 7 0 924 2 25 37 0 1 +"30936" 4 623 3 1 1 1 2 1 100 389 184 138 2090 6 8 1 1540 2 25 46 0 0 +"30937" 4 623 3 1 1 1 2 1 100 389 138 207 1552 7 5 1 1224 3 50 69 1 1 +"30938" 4 623 3 1 1 1 2 1 100 389 207 311 1492 1 10 0 744 3 50 104 0 1 +"30939" 4 623 3 1 1 1 2 1 100 389 311 389 1973 9 3 1 1577 2 25 78 1 1 +"30940" 4 623 3 1 1 1 3 1 100 543 100 150 1860 3 5 0 684 3 50 50 0 1 +"30941" 4 623 3 1 1 1 3 1 100 543 150 112 1722 8 9 1 1866 2 25 38 0 0 +"30942" 4 623 3 1 1 1 3 1 100 543 112 168 1554 2 4 0 871 3 50 56 0 1 +"30943" 4 623 3 1 1 1 3 1 100 543 168 210 1304 6 3 1 1409 2 25 42 1 1 +"30944" 4 623 3 1 1 1 3 1 100 543 210 263 1745 7 6 1 1556 2 25 53 1 1 +"30945" 4 623 3 1 1 1 3 1 100 543 263 197 2488 4 2 0 1555 2 25 66 1 0 +"30946" 4 623 3 1 1 1 3 1 100 543 197 345 1437 1 8 0 957 4 75 148 0 1 +"30947" 4 623 3 1 1 1 3 1 100 543 345 362 2154 5 7 0 2792 1 5 17 0 1 +"30948" 4 623 3 1 1 1 3 1 100 543 362 543 1621 9 5 1 744 3 50 181 1 1 +"30949" 4 623 3 1 1 1 4 1 100 708 100 175 2021 8 7 1 901 4 75 75 1 1 +"30950" 4 623 3 1 1 1 4 1 100 708 175 219 1394 3 10 0 2145 2 25 44 0 1 +"30951" 4 623 3 1 1 1 4 1 100 708 219 109 1405 7 9 1 852 3 50 110 0 0 +"30952" 4 623 3 1 1 1 4 1 100 708 109 191 1195 9 1 1 2104 4 75 82 1 1 +"30953" 4 623 3 1 1 1 4 1 100 708 191 287 1264 2 3 0 2427 3 50 96 0 1 +"30954" 4 623 3 1 1 1 4 1 100 708 287 431 1393 1 8 0 842 3 50 144 0 1 +"30955" 4 623 3 1 1 1 4 1 100 708 431 453 2885 5 4 1 2335 1 5 22 1 1 +"30956" 4 623 3 1 1 1 4 1 100 708 453 566 2074 4 2 1 1278 2 25 113 1 1 +"30957" 4 623 3 1 1 1 4 1 100 708 566 708 1838 6 3 1 1497 2 25 142 1 1 +"30958" 4 623 4 0 1 0 1 1 100 256 100 150 11172 2 7 0 2196 3 50 50 0 1 +"30959" 4 623 4 0 1 0 1 1 100 256 150 263 5082 3 7 0 1465 2 75 113 0 1 +"30960" 4 623 4 0 1 0 1 1 100 256 263 513 1400 2 8 0 2522 1 95 250 0 1 +"30961" 4 623 4 0 1 0 1 1 100 256 513 256 2758 8 9 1 1130 3 50 257 0 0 +"30962" 4 623 5 1 1 0 1 1 100 1026 100 195 1565 1 3 0 1862 1 95 95 0 1 +"30963" 4 623 5 1 1 0 1 1 100 1026 195 244 5653 6 5 1 377 4 25 49 1 1 +"30964" 4 623 5 1 1 0 1 1 100 1026 244 366 1109 2 8 0 744 3 50 122 0 1 +"30965" 4 623 5 1 1 0 1 1 100 1026 366 274 854 8 9 1 395 4 25 92 0 0 +"30966" 4 623 5 1 1 0 1 1 100 1026 274 534 912 3 4 0 990 1 95 260 0 1 +"30967" 4 623 5 1 1 0 1 1 100 1026 534 561 1268 5 7 0 1449 5 5 27 0 1 +"30968" 4 623 5 1 1 0 1 1 100 1026 561 701 1142 7 4 1 585 4 25 140 1 1 +"30969" 4 623 5 1 1 0 1 1 100 1026 701 526 1350 4 1 0 2372 4 25 175 1 0 +"30970" 4 623 5 1 1 0 1 1 100 1026 526 1026 1400 9 6 1 386 1 95 500 1 1 +"30971" 4 623 5 1 1 0 2 1 100 951 100 195 1218 8 1 1 612 1 95 95 1 1 +"30972" 4 623 5 1 1 0 2 1 100 951 195 244 1042 6 2 1 311 4 25 49 1 1 +"30973" 4 623 5 1 1 0 2 1 100 951 244 122 1055 7 9 1 905 3 50 122 0 0 +"30974" 4 623 5 1 1 0 2 1 100 951 122 238 967 2 10 0 251 1 95 116 0 1 +"30975" 4 623 5 1 1 0 2 1 100 951 238 298 1192 5 3 1 1848 4 25 60 1 1 +"30976" 4 623 5 1 1 0 2 1 100 951 298 223 1694 4 2 0 409 4 25 75 1 0 +"30977" 4 623 5 1 1 0 2 1 100 951 223 435 1032 3 5 0 483 1 95 212 0 1 +"30978" 4 623 5 1 1 0 2 1 100 951 435 761 1271 9 4 1 544 2 75 326 1 1 +"30979" 4 623 5 1 1 0 2 1 100 951 761 951 1386 1 7 0 1237 4 25 190 0 1 +"30980" 4 623 5 1 1 0 3 1 100 1347 100 195 1264 7 5 1 283 1 95 95 1 1 +"30981" 4 623 5 1 1 0 3 1 100 1347 195 97 705 2 1 0 2128 3 50 98 1 0 +"30982" 4 623 5 1 1 0 3 1 100 1347 97 189 1017 8 6 1 386 1 95 92 1 1 +"30983" 4 623 5 1 1 0 3 1 100 1347 189 331 1040 4 7 0 1795 2 75 142 0 1 +"30984" 4 623 5 1 1 0 3 1 100 1347 331 497 1444 3 10 0 891 3 50 166 0 1 +"30985" 4 623 5 1 1 0 3 1 100 1347 497 373 1013 6 8 1 587 4 25 124 0 0 +"30986" 4 623 5 1 1 0 3 1 100 1347 373 727 1129 9 2 1 364 1 95 354 1 1 +"30987" 4 623 5 1 1 0 3 1 100 1347 727 691 1610 5 3 0 1071 5 5 36 1 0 +"30988" 4 623 5 1 1 0 3 1 100 1347 691 1347 1100 1 10 0 379 1 95 656 0 1 +"30989" 4 623 5 1 1 0 4 1 100 1690 100 195 1280 2 3 0 410 1 95 95 0 1 +"30990" 4 623 5 1 1 0 4 1 100 1690 195 380 1014 7 4 1 606 1 95 185 1 1 +"30991" 4 623 5 1 1 0 4 1 100 1690 380 285 1097 3 1 0 349 4 25 95 1 0 +"30992" 4 623 5 1 1 0 4 1 100 1690 285 556 1196 1 9 0 327 1 95 271 0 1 +"30993" 4 623 5 1 1 0 4 1 100 1690 556 1084 1086 8 7 1 409 1 95 528 1 1 +"30994" 4 623 5 1 1 0 4 1 100 1690 1084 1355 1134 9 2 1 1601 4 25 271 1 1 +"30995" 4 623 5 1 1 0 4 1 100 1690 1355 1423 1347 5 6 0 1011 5 5 68 0 1 +"30996" 4 623 5 1 1 0 4 1 100 1690 1423 1352 1334 6 8 1 684 5 5 71 0 0 +"30997" 4 623 5 1 1 0 4 1 100 1690 1352 1690 1315 4 7 0 1115 4 25 338 0 1 +"30998" 4 642 2 0 1 1 1 1 100 176 100 150 15659 8 3 1 1862 3 50 50 1 1 +"30999" 4 642 2 0 1 1 1 1 100 176 150 188 17722 3 7 0 1041 2 25 38 0 1 +"31000" 4 642 2 0 1 1 1 1 100 176 188 235 2855 8 2 1 1216 2 25 47 1 1 +"31001" 4 642 2 0 1 1 1 1 100 176 235 176 2142 2 1 0 1131 2 25 59 1 0 +"31002" 4 642 3 1 1 1 1 1 100 259 100 150 24469 9 7 1 991 3 50 50 1 1 +"31003" 4 642 3 1 1 1 1 1 100 259 150 188 2833 4 8 0 1085 2 25 38 0 1 +"31004" 4 642 3 1 1 1 1 1 100 259 188 282 2238 8 2 1 904 3 50 94 1 1 +"31005" 4 642 3 1 1 1 1 1 100 259 282 141 2147 2 1 0 907 3 50 141 1 0 +"31006" 4 642 3 1 1 1 1 1 100 259 141 176 1870 7 6 1 1136 2 25 35 1 1 +"31007" 4 642 3 1 1 1 1 1 100 259 176 132 3060 5 3 0 882 2 25 44 1 0 +"31008" 4 642 3 1 1 1 1 1 100 259 132 198 1955 3 6 0 824 3 50 66 0 1 +"31009" 4 642 3 1 1 1 1 1 100 259 198 148 2188 6 9 1 1061 2 25 50 0 0 +"31010" 4 642 3 1 1 1 1 1 100 259 148 259 1783 1 4 0 1045 4 75 111 0 1 +"31011" 4 642 3 1 1 1 2 1 100 468 100 150 2057 2 9 0 1154 3 50 50 0 1 +"31012" 4 642 3 1 1 1 2 1 100 468 150 188 2738 4 10 0 1358 2 25 38 0 1 +"31013" 4 642 3 1 1 1 2 1 100 468 188 141 2484 3 1 0 1762 2 25 47 1 0 +"31014" 4 642 3 1 1 1 2 1 100 468 141 212 1986 8 6 1 1085 3 50 71 1 1 +"31015" 4 642 3 1 1 1 2 1 100 468 212 159 3343 5 7 1 1094 2 25 53 0 0 +"31016" 4 642 3 1 1 1 2 1 100 468 159 119 2394 6 8 1 1394 2 25 40 0 0 +"31017" 4 642 3 1 1 1 2 1 100 468 119 208 2162 7 5 1 1069 4 75 89 1 1 +"31018" 4 642 3 1 1 1 2 1 100 468 208 312 1719 1 10 0 1523 3 50 104 0 1 +"31019" 4 642 3 1 1 1 2 1 100 468 312 468 1818 9 3 1 1873 3 50 156 1 1 +"31020" 4 642 3 1 1 1 3 1 100 313 100 150 1882 3 5 0 1272 3 50 50 0 1 +"31021" 4 642 3 1 1 1 3 1 100 313 150 75 1699 8 9 1 943 3 50 75 0 0 +"31022" 4 642 3 1 1 1 3 1 100 313 75 94 1920 2 4 0 1834 2 25 19 0 1 +"31023" 4 642 3 1 1 1 3 1 100 313 94 141 1907 6 3 1 873 3 50 47 1 1 +"31024" 4 642 3 1 1 1 3 1 100 313 141 212 2137 7 6 1 1378 3 50 71 1 1 +"31025" 4 642 3 1 1 1 3 1 100 313 212 159 1955 4 2 0 1191 2 25 53 1 0 +"31026" 4 642 3 1 1 1 3 1 100 313 159 239 1977 1 8 0 1392 3 50 80 0 1 +"31027" 4 642 3 1 1 1 3 1 100 313 239 179 2408 5 7 1 1144 2 25 60 0 0 +"31028" 4 642 3 1 1 1 3 1 100 313 179 313 1901 9 5 1 988 4 75 134 1 1 +"31029" 4 642 3 1 1 1 4 1 100 305 100 150 4028 8 7 1 1305 3 50 50 1 1 +"31030" 4 642 3 1 1 1 4 1 100 305 150 188 5124 3 10 0 2025 2 25 38 0 1 +"31031" 4 642 3 1 1 1 4 1 100 305 188 94 2945 7 9 1 1437 3 50 94 0 0 +"31032" 4 642 3 1 1 1 4 1 100 305 94 165 5109 9 1 1 987 4 75 71 1 1 +"31033" 4 642 3 1 1 1 4 1 100 305 165 248 3929 2 3 0 1145 3 50 83 0 1 +"31034" 4 642 3 1 1 1 4 1 100 305 248 434 1707 1 8 0 1132 4 75 186 0 1 +"31035" 4 642 3 1 1 1 4 1 100 305 434 325 1681 5 4 0 1040 2 25 109 1 0 +"31036" 4 642 3 1 1 1 4 1 100 305 325 244 1619 4 2 0 2235 2 25 81 1 0 +"31037" 4 642 3 1 1 1 4 1 100 305 244 305 1638 6 3 1 1312 2 25 61 1 1 +"31038" 4 642 4 0 1 0 1 1 100 296 100 175 3728 2 7 0 3401 2 75 75 0 1 +"31039" 4 642 4 0 1 0 1 1 100 296 175 263 4530 3 7 0 1257 3 50 88 0 1 +"31040" 4 642 4 0 1 0 1 1 100 296 263 395 2901 2 8 0 1522 3 50 132 0 1 +"31041" 4 642 4 0 1 0 1 1 100 296 395 296 1682 8 9 1 1056 4 25 99 0 0 +"31042" 4 642 5 1 1 0 1 1 100 761 100 175 2382 1 3 0 2319 2 75 75 0 1 +"31043" 4 642 5 1 1 0 1 1 100 761 175 219 2025 6 5 1 893 4 25 44 1 1 +"31044" 4 642 5 1 1 0 1 1 100 761 219 329 1920 2 8 0 1068 3 50 110 0 1 +"31045" 4 642 5 1 1 0 1 1 100 761 329 247 1543 8 9 1 930 4 25 82 0 0 +"31046" 4 642 5 1 1 0 1 1 100 761 247 371 1913 3 4 0 2507 3 50 124 0 1 +"31047" 4 642 5 1 1 0 1 1 100 761 371 464 2496 5 7 0 1138 4 25 93 0 1 +"31048" 4 642 5 1 1 0 1 1 100 761 464 580 1660 7 4 1 1036 4 25 116 1 1 +"31049" 4 642 5 1 1 0 1 1 100 761 580 435 1578 4 1 0 1585 4 25 145 1 0 +"31050" 4 642 5 1 1 0 1 1 100 761 435 761 1554 9 6 1 3175 2 75 326 1 1 +"31051" 4 642 5 1 1 0 2 1 100 763 100 150 3474 8 1 1 932 3 50 50 1 1 +"31052" 4 642 5 1 1 0 2 1 100 763 150 188 3034 6 2 1 790 4 25 38 1 1 +"31053" 4 642 5 1 1 0 2 1 100 763 188 141 1639 7 9 1 1893 4 25 47 0 0 +"31054" 4 642 5 1 1 0 2 1 100 763 141 212 1389 2 10 0 1879 3 50 71 0 1 +"31055" 4 642 5 1 1 0 2 1 100 763 212 265 2894 5 3 1 1593 4 25 53 1 1 +"31056" 4 642 5 1 1 0 2 1 100 763 265 199 3586 4 2 0 636 4 25 66 1 0 +"31057" 4 642 5 1 1 0 2 1 100 763 199 249 1426 3 5 0 1129 4 25 50 0 1 +"31058" 4 642 5 1 1 0 2 1 100 763 249 436 1229 9 4 1 1348 2 75 187 1 1 +"31059" 4 642 5 1 1 0 2 1 100 763 436 763 1415 1 7 0 2676 2 75 327 0 1 +"31060" 4 642 5 1 1 0 3 1 100 269 100 150 2537 7 5 1 1199 3 50 50 1 1 +"31061" 4 642 5 1 1 0 3 1 100 269 150 75 2682 2 1 0 2200 3 50 75 1 0 +"31062" 4 642 5 1 1 0 3 1 100 269 75 113 1359 8 6 1 1844 3 50 38 1 1 +"31063" 4 642 5 1 1 0 3 1 100 269 113 141 1359 4 7 0 1467 4 25 28 0 1 +"31064" 4 642 5 1 1 0 3 1 100 269 141 212 2516 3 10 0 722 3 50 71 0 1 +"31065" 4 642 5 1 1 0 3 1 100 269 212 159 2049 6 8 1 1073 4 25 53 0 0 +"31066" 4 642 5 1 1 0 3 1 100 269 159 239 7150 9 2 1 2279 3 50 80 1 1 +"31067" 4 642 5 1 1 0 3 1 100 269 239 179 1581 5 3 0 1271 4 25 60 1 0 +"31068" 4 642 5 1 1 0 3 1 100 269 179 269 1898 1 10 0 544 3 50 90 0 1 +"31069" 4 642 5 1 1 0 4 1 100 697 100 150 2856 2 3 0 2313 3 50 50 0 1 +"31070" 4 642 5 1 1 0 4 1 100 697 150 225 1577 7 4 1 1099 3 50 75 1 1 +"31071" 4 642 5 1 1 0 4 1 100 697 225 169 1478 3 1 0 1069 4 25 56 1 0 +"31072" 4 642 5 1 1 0 4 1 100 697 169 296 1436 1 9 0 1077 2 75 127 0 1 +"31073" 4 642 5 1 1 0 4 1 100 697 296 444 2048 8 7 1 1688 3 50 148 1 1 +"31074" 4 642 5 1 1 0 4 1 100 697 444 666 1825 9 2 1 1299 3 50 222 1 1 +"31075" 4 642 5 1 1 0 4 1 100 697 666 699 2014 5 6 0 1458 5 5 33 0 1 +"31076" 4 642 5 1 1 0 4 1 100 697 699 664 1700 6 8 1 1110 5 5 35 0 0 +"31077" 4 642 5 1 1 0 4 1 100 697 664 697 1612 4 7 0 1465 5 5 33 0 1 +"31078" 4 647 2 0 1 1 1 1 100 355 100 150 11504 8 3 1 2218 3 50 50 1 1 +"31079" 4 647 2 0 1 1 1 1 100 355 150 225 10691 3 7 0 866 3 50 75 0 1 +"31080" 4 647 2 0 1 1 1 1 100 355 225 338 3021 8 2 1 746 3 50 113 1 1 +"31081" 4 647 2 0 1 1 1 1 100 355 338 355 2573 2 1 1 408 1 5 17 1 1 +"31082" 4 647 3 1 1 1 1 1 100 486 100 150 8265 9 7 1 1042 3 50 50 1 1 +"31083" 4 647 3 1 1 1 1 1 100 486 150 188 2245 4 8 0 604 2 25 38 0 1 +"31084" 4 647 3 1 1 1 1 1 100 486 188 282 1945 8 2 1 395 3 50 94 1 1 +"31085" 4 647 3 1 1 1 1 1 100 486 282 353 1567 2 1 1 762 2 25 71 1 1 +"31086" 4 647 3 1 1 1 1 1 100 486 353 371 2881 7 6 1 519 1 5 18 1 1 +"31087" 4 647 3 1 1 1 1 1 100 486 371 352 2219 5 3 0 445 1 5 19 1 0 +"31088" 4 647 3 1 1 1 1 1 100 486 352 370 2913 3 6 0 259 1 5 18 0 1 +"31089" 4 647 3 1 1 1 1 1 100 486 370 389 3046 6 9 0 262 1 5 19 0 1 +"31090" 4 647 3 1 1 1 1 1 100 486 389 486 1012 1 4 0 1550 2 25 97 0 1 +"31091" 4 647 3 1 1 1 2 1 100 420 100 150 1709 2 9 0 1009 3 50 50 0 1 +"31092" 4 647 3 1 1 1 2 1 100 420 150 188 977 4 10 0 2606 2 25 38 0 1 +"31093" 4 647 3 1 1 1 2 1 100 420 188 94 1403 3 1 0 988 3 50 94 1 0 +"31094" 4 647 3 1 1 1 2 1 100 420 94 118 2775 8 6 1 1429 2 25 24 1 1 +"31095" 4 647 3 1 1 1 2 1 100 420 118 124 1742 5 7 0 300 1 5 6 0 1 +"31096" 4 647 3 1 1 1 2 1 100 420 124 130 2734 6 8 0 300 1 5 6 0 1 +"31097" 4 647 3 1 1 1 2 1 100 420 130 137 1672 7 5 1 306 1 5 7 1 1 +"31098" 4 647 3 1 1 1 2 1 100 420 137 240 1881 1 10 0 1727 4 75 103 0 1 +"31099" 4 647 3 1 1 1 2 1 100 420 240 420 2044 9 3 1 585 4 75 180 1 1 +"31100" 4 647 3 1 1 1 3 1 100 176 100 195 1610 3 5 0 898 5 95 95 0 1 +"31101" 4 647 3 1 1 1 3 1 100 176 195 49 1380 8 9 1 1411 4 75 146 0 0 +"31102" 4 647 3 1 1 1 3 1 100 176 49 96 1430 2 4 0 850 5 95 47 0 1 +"31103" 4 647 3 1 1 1 3 1 100 176 96 91 4393 6 3 0 241 1 5 5 1 0 +"31104" 4 647 3 1 1 1 3 1 100 176 91 177 1835 7 6 1 393 5 95 86 1 1 +"31105" 4 647 3 1 1 1 3 1 100 176 177 44 1932 4 2 0 2043 4 75 133 1 0 +"31106" 4 647 3 1 1 1 3 1 100 176 44 86 1827 1 8 0 760 5 95 42 0 1 +"31107" 4 647 3 1 1 1 3 1 100 176 86 90 4316 5 7 0 275 1 5 4 0 1 +"31108" 4 647 3 1 1 1 3 1 100 176 90 176 2104 9 5 1 1626 5 95 86 1 1 +"31109" 4 647 3 1 1 1 4 1 100 396 100 125 2153 8 7 1 653 2 25 25 1 1 +"31110" 4 647 3 1 1 1 4 1 100 396 125 188 1307 3 10 0 680 3 50 63 0 1 +"31111" 4 647 3 1 1 1 4 1 100 396 188 141 1521 7 9 1 593 2 25 47 0 0 +"31112" 4 647 3 1 1 1 4 1 100 396 141 148 1933 9 1 1 1094 1 5 7 1 1 +"31113" 4 647 3 1 1 1 4 1 100 396 148 155 1637 2 3 0 394 1 5 7 0 1 +"31114" 4 647 3 1 1 1 4 1 100 396 155 302 1793 1 8 0 1314 5 95 147 0 1 +"31115" 4 647 3 1 1 1 4 1 100 396 302 529 2483 5 4 1 1918 4 75 227 1 1 +"31116" 4 647 3 1 1 1 4 1 100 396 529 264 2478 4 2 0 3169 3 50 265 1 0 +"31117" 4 647 3 1 1 1 4 1 100 396 264 396 2071 6 3 1 699 3 50 132 1 1 +"31118" 4 647 4 0 1 0 1 1 100 33 100 175 6728 2 7 0 748 2 75 75 0 1 +"31119" 4 647 4 0 1 0 1 1 100 33 175 341 7124 3 7 0 640 1 95 166 0 1 +"31120" 4 647 4 0 1 0 1 1 100 33 341 665 988 2 8 0 478 1 95 324 0 1 +"31121" 4 647 4 0 1 0 1 1 100 33 665 33 1206 8 9 1 370 1 95 632 0 0 +"31122" 4 647 5 1 1 0 1 1 100 10688 100 195 3879 1 3 0 390 1 95 95 0 1 +"31123" 4 647 5 1 1 0 1 1 100 10688 195 380 1778 6 5 1 305 1 95 185 1 1 +"31124" 4 647 5 1 1 0 1 1 100 10688 380 741 873 2 8 0 328 1 95 361 0 1 +"31125" 4 647 5 1 1 0 1 1 100 10688 741 1445 1743 8 9 0 332 1 95 704 0 1 +"31126" 4 647 5 1 1 0 1 1 100 10688 1445 2818 1199 3 4 0 365 1 95 1373 0 1 +"31127" 4 647 5 1 1 0 1 1 100 10688 2818 2959 5102 5 7 0 0 5 5 141 0 1 +"31128" 4 647 5 1 1 0 1 1 100 10688 2959 5770 2641 7 4 1 1789 1 95 2811 1 1 +"31129" 4 647 5 1 1 0 1 1 100 10688 5770 5481 1708 4 1 0 0 5 5 289 1 0 +"31130" 4 647 5 1 1 0 1 1 100 10688 5481 10688 2401 9 6 1 390 1 95 5207 1 1 +"31131" 4 647 5 1 1 0 2 0 100 0 100 195 1833 8 1 1 1012 1 95 95 1 1 +"31132" 4 647 5 1 1 0 2 0 100 0 195 380 1535 6 2 1 297 1 95 185 1 1 +"31133" 4 647 5 1 1 0 2 0 100 0 380 19 1452 7 9 1 309 1 95 361 0 0 +"31134" 4 647 5 1 1 0 2 0 100 0 19 37 1138 2 10 0 285 1 95 18 0 1 +"31135" 4 647 5 1 1 0 2 0 100 0 37 2 1373 5 3 0 274 1 95 35 1 0 +"31136" 4 647 5 1 1 0 2 0 100 0 2 0 1193 4 2 0 250 1 95 2 1 0 +"31137" 4 647 5 1 1 0 3 0 100 0 100 195 1293 7 5 1 324 1 95 95 1 1 +"31138" 4 647 5 1 1 0 3 0 100 0 195 10 707 2 1 0 228 1 95 185 1 0 +"31139" 4 647 5 1 1 0 3 0 100 0 10 20 1095 8 6 1 235 1 95 10 1 1 +"31140" 4 647 5 1 1 0 3 0 100 0 20 39 813 4 7 0 336 1 95 19 0 1 +"31141" 4 647 5 1 1 0 3 0 100 0 39 76 886 3 10 0 268 1 95 37 0 1 +"31142" 4 647 5 1 1 0 3 0 100 0 76 4 1297 6 8 1 282 1 95 72 0 0 +"31143" 4 647 5 1 1 0 3 0 100 0 4 8 1089 9 2 1 636 1 95 4 1 1 +"31144" 4 647 5 1 1 0 3 0 100 0 8 0 653 5 3 0 350 1 95 8 1 0 +"31145" 4 647 5 1 1 0 4 0 100 0 100 195 940 2 3 0 291 1 95 95 0 1 +"31146" 4 647 5 1 1 0 4 0 100 0 195 10 650 7 4 0 246 1 95 185 1 0 +"31147" 4 647 5 1 1 0 4 0 100 0 10 0 674 3 1 0 258 1 95 10 1 0 +"31148" 4 654 2 0 1 0 1 1 100 211 100 150 13325 2 7 0 1531 3 50 50 0 1 +"31149" 4 654 2 0 1 0 1 1 100 211 150 188 8574 3 7 0 656 4 25 38 0 1 +"31150" 4 654 2 0 1 0 1 1 100 211 188 282 2497 2 8 0 1131 3 50 94 0 1 +"31151" 4 654 2 0 1 0 1 1 100 211 282 211 3329 8 9 1 659 4 25 71 0 0 +"31152" 4 654 3 1 1 0 1 1 100 610 100 195 2351 1 3 0 958 1 95 95 0 1 +"31153" 4 654 3 1 1 0 1 1 100 610 195 293 5308 6 5 1 1054 3 50 98 1 1 +"31154" 4 654 3 1 1 0 1 1 100 610 293 440 4169 2 8 0 2543 3 50 147 0 1 +"31155" 4 654 3 1 1 0 1 1 100 610 440 330 2676 8 9 1 851 4 25 110 0 0 +"31156" 4 654 3 1 1 0 1 1 100 610 330 413 3717 3 4 0 1299 4 25 83 0 1 +"31157" 4 654 3 1 1 0 1 1 100 610 413 310 4608 5 7 1 765 4 25 103 0 0 +"31158" 4 654 3 1 1 0 1 1 100 610 310 465 2365 7 4 1 3929 3 50 155 1 1 +"31159" 4 654 3 1 1 0 1 1 100 610 465 581 4677 4 1 1 1491 4 25 116 1 1 +"31160" 4 654 3 1 1 0 1 1 100 610 581 610 9328 9 6 1 913 5 5 29 1 1 +"31161" 4 654 3 1 1 0 2 0 100 0 100 25 2884 8 1 0 706 2 75 75 1 0 +"31162" 4 654 3 1 1 0 2 0 100 0 25 44 2734 6 2 1 972 2 75 19 1 1 +"31163" 4 654 3 1 1 0 2 0 100 0 44 22 2580 7 9 1 1039 3 50 22 0 0 +"31164" 4 654 3 1 1 0 2 0 100 0 22 39 1729 2 10 0 1254 2 75 17 0 1 +"31165" 4 654 3 1 1 0 2 0 100 0 39 10 3643 5 3 0 1942 2 75 29 1 0 +"31166" 4 654 3 1 1 0 2 0 100 0 10 5 3811 4 2 0 471 3 50 5 1 0 +"31167" 4 654 3 1 1 0 2 0 100 0 5 9 2078 3 5 0 943 2 75 4 0 1 +"31168" 4 654 3 1 1 0 2 0 100 0 9 0 2021 9 4 0 857 1 95 9 1 0 +"31169" 4 654 3 1 1 0 3 1 100 271 100 175 4926 7 5 1 1039 2 75 75 1 1 +"31170" 4 654 3 1 1 0 3 1 100 271 175 87 1643 2 1 0 851 3 50 88 1 0 +"31171" 4 654 3 1 1 0 3 1 100 271 87 170 2481 8 6 1 2234 1 95 83 1 1 +"31172" 4 654 3 1 1 0 3 1 100 271 170 127 3554 4 7 1 688 4 25 43 0 0 +"31173" 4 654 3 1 1 0 3 1 100 271 127 191 1553 3 10 0 1078 3 50 64 0 1 +"31174" 4 654 3 1 1 0 3 1 100 271 191 143 3854 6 8 1 904 4 25 48 0 0 +"31175" 4 654 3 1 1 0 3 1 100 271 143 279 2836 9 2 1 640 1 95 136 1 1 +"31176" 4 654 3 1 1 0 3 1 100 271 279 139 11054 5 3 0 922 3 50 140 1 0 +"31177" 4 654 3 1 1 0 3 1 100 271 139 271 4667 1 10 0 378 1 95 132 0 1 +"31178" 4 654 3 1 1 0 4 1 100 2414 100 195 2071 2 3 0 1948 1 95 95 0 1 +"31179" 4 654 3 1 1 0 4 1 100 2414 195 380 2012 7 4 1 398 1 95 185 1 1 +"31180" 4 654 3 1 1 0 4 1 100 2414 380 285 1861 3 1 0 316 4 25 95 1 0 +"31181" 4 654 3 1 1 0 4 1 100 2414 285 556 1919 1 9 0 425 1 95 271 0 1 +"31182" 4 654 3 1 1 0 4 1 100 2414 556 834 1920 8 7 1 1039 3 50 278 1 1 +"31183" 4 654 3 1 1 0 4 1 100 2414 834 1626 2914 9 2 1 573 1 95 792 1 1 +"31184" 4 654 3 1 1 0 4 1 100 2414 1626 2033 8756 5 6 0 1410 4 25 407 0 1 +"31185" 4 654 3 1 1 0 4 1 100 2414 2033 1931 2159 6 8 1 695 5 5 102 0 0 +"31186" 4 654 3 1 1 0 4 1 100 2414 1931 2414 1795 4 7 0 661 4 25 483 0 1 +"31187" 4 654 4 0 1 1 1 1 100 141 100 125 4533 8 3 1 1534 2 25 25 1 1 +"31188" 4 654 4 0 1 1 1 1 100 141 125 188 12158 3 7 0 2824 3 50 63 0 1 +"31189" 4 654 4 0 1 1 1 1 100 141 188 282 1557 8 2 1 4106 3 50 94 1 1 +"31190" 4 654 4 0 1 1 1 1 100 141 282 141 1533 2 1 0 616 3 50 141 1 0 +"31191" 4 654 5 1 1 1 1 1 100 270 100 195 2539 9 7 1 1608 5 95 95 1 1 +"31192" 4 654 5 1 1 1 1 1 100 270 195 205 2398 4 8 0 827 1 5 10 0 1 +"31193" 4 654 5 1 1 1 1 1 100 270 205 256 1528 8 2 1 2417 2 25 51 1 1 +"31194" 4 654 5 1 1 1 1 1 100 270 256 128 1478 2 1 0 623 3 50 128 1 0 +"31195" 4 654 5 1 1 1 1 1 100 270 128 160 2261 7 6 1 603 2 25 32 1 1 +"31196" 4 654 5 1 1 1 1 1 100 270 160 152 2133 5 3 0 367 1 5 8 1 0 +"31197" 4 654 5 1 1 1 1 1 100 270 152 190 1639 3 6 0 645 2 25 38 0 1 +"31198" 4 654 5 1 1 1 1 1 100 270 190 180 1844 6 9 1 1631 1 5 10 0 0 +"31199" 4 654 5 1 1 1 1 1 100 270 180 270 1109 1 4 0 4089 3 50 90 0 1 +"31200" 4 654 5 1 1 1 2 1 100 581 100 175 1805 2 9 0 1480 4 75 75 0 1 +"31201" 4 654 5 1 1 1 2 1 100 581 175 184 2166 4 10 0 800 1 5 9 0 1 +"31202" 4 654 5 1 1 1 2 1 100 581 184 138 1491 3 1 0 438 2 25 46 1 0 +"31203" 4 654 5 1 1 1 2 1 100 581 138 207 1343 8 6 1 910 3 50 69 1 1 +"31204" 4 654 5 1 1 1 2 1 100 581 207 217 1681 5 7 0 585 1 5 10 0 1 +"31205" 4 654 5 1 1 1 2 1 100 581 217 206 2074 6 8 1 451 1 5 11 0 0 +"31206" 4 654 5 1 1 1 2 1 100 581 206 258 1697 7 5 1 691 2 25 52 1 1 +"31207" 4 654 5 1 1 1 2 1 100 581 258 387 1535 1 10 0 1289 3 50 129 0 1 +"31208" 4 654 5 1 1 1 2 1 100 581 387 581 1726 9 3 1 1106 3 50 194 1 1 +"31209" 4 654 5 1 1 1 3 1 100 297 100 150 2216 3 5 0 405 3 50 50 0 1 +"31210" 4 654 5 1 1 1 3 1 100 297 150 37 1419 8 9 1 574 4 75 113 0 0 +"31211" 4 654 5 1 1 1 3 1 100 297 37 65 1650 2 4 0 642 4 75 28 0 1 +"31212" 4 654 5 1 1 1 3 1 100 297 65 68 2209 6 3 1 590 1 5 3 1 1 +"31213" 4 654 5 1 1 1 3 1 100 297 68 102 1764 7 6 1 529 3 50 34 1 1 +"31214" 4 654 5 1 1 1 3 1 100 297 102 97 1824 4 2 0 775 1 5 5 1 0 +"31215" 4 654 5 1 1 1 3 1 100 297 97 189 1335 1 8 0 911 5 95 92 0 1 +"31216" 4 654 5 1 1 1 3 1 100 297 189 198 1726 5 7 0 372 1 5 9 0 1 +"31217" 4 654 5 1 1 1 3 1 100 297 198 297 1397 9 5 1 3706 3 50 99 1 1 +"31218" 4 654 5 1 1 1 4 1 100 956 100 175 1442 8 7 1 398 4 75 75 1 1 +"31219" 4 654 5 1 1 1 4 1 100 956 175 263 2121 3 10 0 410 3 50 88 0 1 +"31220" 4 654 5 1 1 1 4 1 100 956 263 197 1938 7 9 1 994 2 25 66 0 0 +"31221" 4 654 5 1 1 1 4 1 100 956 197 384 1555 9 1 1 675 5 95 187 1 1 +"31222" 4 654 5 1 1 1 4 1 100 956 384 576 1348 2 3 0 768 3 50 192 0 1 +"31223" 4 654 5 1 1 1 4 1 100 956 576 1008 1579 1 8 0 794 4 75 432 0 1 +"31224" 4 654 5 1 1 1 4 1 100 956 1008 958 1834 5 4 0 523 1 5 50 1 0 +"31225" 4 654 5 1 1 1 4 1 100 956 958 910 1503 4 2 0 874 1 5 48 1 0 +"31226" 4 654 5 1 1 1 4 1 100 956 910 956 1750 6 3 1 619 1 5 46 1 1 +"31227" 4 656 2 0 1 1 1 1 100 689 100 175 19213 8 3 1 1175 4 75 75 1 1 +"31228" 4 656 2 0 1 1 1 1 100 689 175 306 14940 3 7 0 1645 4 75 131 0 1 +"31229" 4 656 2 0 1 1 1 1 100 689 306 459 4781 8 2 1 929 3 50 153 1 1 +"31230" 4 656 2 0 1 1 1 1 100 689 459 689 2323 2 1 1 604 3 50 230 1 1 +"31231" 4 656 3 1 1 1 1 1 100 918 100 195 24337 9 7 1 826 5 95 95 1 1 +"31232" 4 656 3 1 1 1 1 1 100 918 195 293 7027 4 8 0 569 3 50 98 0 1 +"31233" 4 656 3 1 1 1 1 1 100 918 293 366 4158 8 2 1 1219 2 25 73 1 1 +"31234" 4 656 3 1 1 1 1 1 100 918 366 458 4505 2 1 1 816 2 25 92 1 1 +"31235" 4 656 3 1 1 1 1 1 100 918 458 573 2915 7 6 1 2873 2 25 115 1 1 +"31236" 4 656 3 1 1 1 1 1 100 918 573 544 3884 5 3 0 1136 1 5 29 1 0 +"31237" 4 656 3 1 1 1 1 1 100 918 544 816 2685 3 6 0 1002 3 50 272 0 1 +"31238" 4 656 3 1 1 1 1 1 100 918 816 612 3439 6 9 1 1155 2 25 204 0 0 +"31239" 4 656 3 1 1 1 1 1 100 918 612 918 2506 1 4 0 703 3 50 306 0 1 +"31240" 4 656 3 1 1 1 2 1 100 1998 100 175 2582 2 9 0 3234 4 75 75 0 1 +"31241" 4 656 3 1 1 1 2 1 100 1998 175 306 2117 4 10 0 539 4 75 131 0 1 +"31242" 4 656 3 1 1 1 2 1 100 1998 306 291 3397 3 1 0 1011 1 5 15 1 0 +"31243" 4 656 3 1 1 1 2 1 100 1998 291 509 1881 8 6 1 617 4 75 218 1 1 +"31244" 4 656 3 1 1 1 2 1 100 1998 509 534 4777 5 7 0 1373 1 5 25 0 1 +"31245" 4 656 3 1 1 1 2 1 100 1998 534 507 1953 6 8 1 1124 1 5 27 0 0 +"31246" 4 656 3 1 1 1 2 1 100 1998 507 761 2011 7 5 1 1602 3 50 254 1 1 +"31247" 4 656 3 1 1 1 2 1 100 1998 761 1332 2395 1 10 0 1239 4 75 571 0 1 +"31248" 4 656 3 1 1 1 2 1 100 1998 1332 1998 2085 9 3 1 1827 3 50 666 1 1 +"31249" 4 656 3 1 1 1 3 1 100 31 100 195 2679 3 5 0 1157 5 95 95 0 1 +"31250" 4 656 3 1 1 1 3 1 100 31 195 10 1401 8 9 1 756 5 95 185 0 0 +"31251" 4 656 3 1 1 1 3 1 100 31 10 20 2035 2 4 0 1082 5 95 10 0 1 +"31252" 4 656 3 1 1 1 3 1 100 31 20 39 5248 6 3 1 2172 5 95 19 1 1 +"31253" 4 656 3 1 1 1 3 1 100 31 39 76 2244 7 6 1 1065 5 95 37 1 1 +"31254" 4 656 3 1 1 1 3 1 100 31 76 4 3493 4 2 0 3922 5 95 72 1 0 +"31255" 4 656 3 1 1 1 3 1 100 31 4 8 2634 1 8 0 1192 5 95 4 0 1 +"31256" 4 656 3 1 1 1 3 1 100 31 8 16 3760 5 7 0 1414 5 95 8 0 1 +"31257" 4 656 3 1 1 1 3 1 100 31 16 31 2031 9 5 1 1312 5 95 15 1 1 +"31258" 4 656 3 1 1 1 4 1 100 2838 100 195 2414 8 7 1 0 5 95 95 1 1 +"31259" 4 656 3 1 1 1 4 1 100 2838 195 380 2496 3 10 0 1772 5 95 185 0 1 +"31260" 4 656 3 1 1 1 4 1 100 2838 380 361 2199 7 9 1 1563 1 5 19 0 0 +"31261" 4 656 3 1 1 1 4 1 100 2838 361 704 1653 9 1 1 1456 5 95 343 1 1 +"31262" 4 656 3 1 1 1 4 1 100 2838 704 1373 1906 2 3 0 856 5 95 669 0 1 +"31263" 4 656 3 1 1 1 4 1 100 2838 1373 2403 4575 1 8 0 4023 4 75 1030 0 1 +"31264" 4 656 3 1 1 1 4 1 100 2838 2403 2523 2993 5 4 1 2967 1 5 120 1 1 +"31265" 4 656 3 1 1 1 4 1 100 2838 2523 1892 2172 4 2 0 2370 2 25 631 1 0 +"31266" 4 656 3 1 1 1 4 1 100 2838 1892 2838 3691 6 3 1 4383 3 50 946 1 1 +"31267" 4 656 4 0 1 0 1 1 100 33 100 175 11455 2 7 0 2597 2 75 75 0 1 +"31268" 4 656 4 0 1 0 1 1 100 33 175 341 4365 3 7 0 1143 1 95 166 0 1 +"31269" 4 656 4 0 1 0 1 1 100 33 341 665 7232 2 8 0 817 1 95 324 0 1 +"31270" 4 656 4 0 1 0 1 1 100 33 665 33 1670 8 9 1 1508 1 95 632 0 0 +"31271" 4 656 5 1 1 0 1 0 100 0 100 195 2080 1 3 0 1200 1 95 95 0 1 +"31272" 4 656 5 1 1 0 1 0 100 0 195 380 2303 6 5 1 929 1 95 185 1 1 +"31273" 4 656 5 1 1 0 1 0 100 0 380 741 1486 2 8 0 2626 1 95 361 0 1 +"31274" 4 656 5 1 1 0 1 0 100 0 741 37 1715 8 9 1 854 1 95 704 0 0 +"31275" 4 656 5 1 1 0 1 0 100 0 37 72 1744 3 4 0 912 1 95 35 0 1 +"31276" 4 656 5 1 1 0 1 0 100 0 72 4 1389 5 7 1 383 1 95 68 0 0 +"31277" 4 656 5 1 1 0 1 0 100 0 4 8 1868 7 4 1 398 1 95 4 1 1 +"31278" 4 656 5 1 1 0 1 0 100 0 8 0 1046 4 1 0 934 1 95 8 1 0 +"31279" 4 656 5 1 1 0 2 1 100 31 100 195 1238 8 1 1 464 1 95 95 1 1 +"31280" 4 656 5 1 1 0 2 1 100 31 195 380 1495 6 2 1 574 1 95 185 1 1 +"31281" 4 656 5 1 1 0 2 1 100 31 380 19 1420 7 9 1 973 1 95 361 0 0 +"31282" 4 656 5 1 1 0 2 1 100 31 19 37 1369 2 10 0 916 1 95 18 0 1 +"31283" 4 656 5 1 1 0 2 1 100 31 37 72 1331 5 3 1 665 1 95 35 1 1 +"31284" 4 656 5 1 1 0 2 1 100 31 72 4 1114 4 2 0 1443 1 95 68 1 0 +"31285" 4 656 5 1 1 0 2 1 100 31 4 8 1428 3 5 0 735 1 95 4 0 1 +"31286" 4 656 5 1 1 0 2 1 100 31 8 16 1231 9 4 1 853 1 95 8 1 1 +"31287" 4 656 5 1 1 0 2 1 100 31 16 31 1168 1 7 0 807 1 95 15 0 1 +"31288" 4 656 5 1 1 0 3 0 100 0 100 195 1226 7 5 1 828 1 95 95 1 1 +"31289" 4 656 5 1 1 0 3 0 100 0 195 10 1176 2 1 0 600 1 95 185 1 0 +"31290" 4 656 5 1 1 0 3 0 100 0 10 20 1302 8 6 1 762 1 95 10 1 1 +"31291" 4 656 5 1 1 0 3 0 100 0 20 39 1558 4 7 0 996 1 95 19 0 1 +"31292" 4 656 5 1 1 0 3 0 100 0 39 76 1044 3 10 0 1116 1 95 37 0 1 +"31293" 4 656 5 1 1 0 3 0 100 0 76 4 2096 6 8 1 1009 1 95 72 0 0 +"31294" 4 656 5 1 1 0 3 0 100 0 4 8 1287 9 2 1 929 1 95 4 1 1 +"31295" 4 656 5 1 1 0 3 0 100 0 8 0 4639 5 3 0 428 1 95 8 1 0 +"31296" 4 656 5 1 1 0 4 0 100 0 100 195 1272 2 3 0 843 1 95 95 0 1 +"31297" 4 656 5 1 1 0 4 0 100 0 195 380 1217 7 4 1 838 1 95 185 1 1 +"31298" 4 656 5 1 1 0 4 0 100 0 380 19 1187 3 1 0 908 1 95 361 1 0 +"31299" 4 656 5 1 1 0 4 0 100 0 19 37 1580 1 9 0 961 1 95 18 0 1 +"31300" 4 656 5 1 1 0 4 0 100 0 37 72 1301 8 7 1 997 1 95 35 1 1 +"31301" 4 656 5 1 1 0 4 0 100 0 72 140 1469 9 2 1 802 1 95 68 1 1 +"31302" 4 656 5 1 1 0 4 0 100 0 140 7 1958 5 6 1 399 1 95 133 0 0 +"31303" 4 656 5 1 1 0 4 0 100 0 7 0 1546 6 8 1 1797 1 95 7 0 0 +"31304" 4 658 2 0 1 1 1 1 100 84 100 150 12953 8 3 1 1900 3 50 50 1 1 +"31305" 4 658 2 0 1 1 1 1 100 84 150 225 43564 3 7 0 574 3 50 75 0 1 +"31306" 4 658 2 0 1 1 1 1 100 84 225 338 5276 8 2 1 622 3 50 113 1 1 +"31307" 4 658 2 0 1 1 1 1 100 84 338 84 1442 2 1 0 620 4 75 254 1 0 +"31308" 4 658 3 1 1 1 1 1 100 1035 100 195 1650 9 7 1 837 5 95 95 1 1 +"31309" 4 658 3 1 1 1 1 1 100 1035 195 205 2085 4 8 0 1336 1 5 10 0 1 +"31310" 4 658 3 1 1 1 1 1 100 1035 205 359 1921 8 2 1 781 4 75 154 1 1 +"31311" 4 658 3 1 1 1 1 1 100 1035 359 341 1490 2 1 0 2629 1 5 18 1 0 +"31312" 4 658 3 1 1 1 1 1 100 1035 341 426 2069 7 6 1 1431 2 25 85 1 1 +"31313" 4 658 3 1 1 1 1 1 100 1035 426 447 3419 5 3 1 425 1 5 21 1 1 +"31314" 4 658 3 1 1 1 1 1 100 1035 447 559 1271 3 6 0 1447 2 25 112 0 1 +"31315" 4 658 3 1 1 1 1 1 100 1035 559 531 2734 6 9 1 627 1 5 28 0 0 +"31316" 4 658 3 1 1 1 1 1 100 1035 531 1035 1298 1 4 0 1359 5 95 504 0 1 +"31317" 4 658 3 1 1 1 2 1 100 1312 100 175 4061 2 9 0 624 4 75 75 0 1 +"31318" 4 658 3 1 1 1 2 1 100 1312 175 184 2091 4 10 0 515 1 5 9 0 1 +"31319" 4 658 3 1 1 1 2 1 100 1312 184 175 3335 3 1 0 3786 1 5 9 1 0 +"31320" 4 658 3 1 1 1 2 1 100 1312 175 306 1915 8 6 1 1877 4 75 131 1 1 +"31321" 4 658 3 1 1 1 2 1 100 1312 306 291 3044 5 7 1 584 1 5 15 0 0 +"31322" 4 658 3 1 1 1 2 1 100 1312 291 276 2857 6 8 1 427 1 5 15 0 0 +"31323" 4 658 3 1 1 1 2 1 100 1312 276 345 1947 7 5 1 1144 2 25 69 1 1 +"31324" 4 658 3 1 1 1 2 1 100 1312 345 673 1803 1 10 0 1189 5 95 328 0 1 +"31325" 4 658 3 1 1 1 2 1 100 1312 673 1312 1433 9 3 1 933 5 95 639 1 1 +"31326" 4 658 3 1 1 1 3 1 100 513 100 150 1388 3 5 0 2349 3 50 50 0 1 +"31327" 4 658 3 1 1 1 3 1 100 513 150 37 1509 8 9 1 2056 4 75 113 0 0 +"31328" 4 658 3 1 1 1 3 1 100 513 37 72 1231 2 4 0 0 5 95 35 0 1 +"31329" 4 658 3 1 1 1 3 1 100 513 72 108 1640 6 3 1 509 3 50 36 1 1 +"31330" 4 658 3 1 1 1 3 1 100 513 108 189 1817 7 6 1 531 4 75 81 1 1 +"31331" 4 658 3 1 1 1 3 1 100 513 189 142 1383 4 2 0 774 2 25 47 1 0 +"31332" 4 658 3 1 1 1 3 1 100 513 142 277 1519 1 8 0 0 5 95 135 0 1 +"31333" 4 658 3 1 1 1 3 1 100 513 277 263 2473 5 7 1 903 1 5 14 0 0 +"31334" 4 658 3 1 1 1 3 1 100 513 263 513 1696 9 5 1 0 5 95 250 1 1 +"31335" 4 658 3 1 1 1 4 1 100 366 100 175 2329 8 7 1 343 4 75 75 1 1 +"31336" 4 658 3 1 1 1 4 1 100 366 175 263 1798 3 10 0 3101 3 50 88 0 1 +"31337" 4 658 3 1 1 1 4 1 100 366 263 66 1409 7 9 1 808 4 75 197 0 0 +"31338" 4 658 3 1 1 1 4 1 100 366 66 129 1547 9 1 1 0 5 95 63 1 1 +"31339" 4 658 3 1 1 1 4 1 100 366 129 252 1565 2 3 0 0 5 95 123 0 1 +"31340" 4 658 3 1 1 1 4 1 100 366 252 491 1542 1 8 0 0 5 95 239 0 1 +"31341" 4 658 3 1 1 1 4 1 100 366 491 466 2084 5 4 0 401 1 5 25 1 0 +"31342" 4 658 3 1 1 1 4 1 100 366 466 349 1417 4 2 0 656 2 25 117 1 0 +"31343" 4 658 3 1 1 1 4 1 100 366 349 366 2151 6 3 1 1300 1 5 17 1 1 +"31344" 4 658 4 0 1 0 1 1 100 143 100 195 3302 2 7 0 2786 1 95 95 0 1 +"31345" 4 658 4 0 1 0 1 1 100 143 195 293 1430 3 7 0 565 3 50 98 0 1 +"31346" 4 658 4 0 1 0 1 1 100 143 293 571 1261 2 8 0 504 1 95 278 0 1 +"31347" 4 658 4 0 1 0 1 1 100 143 571 143 1218 8 9 1 3248 2 75 428 0 0 +"31348" 4 658 5 1 1 0 1 1 100 20 100 195 1859 1 3 0 433 1 95 95 0 1 +"31349" 4 658 5 1 1 0 1 1 100 20 195 293 2649 6 5 1 1730 3 50 98 1 1 +"31350" 4 658 5 1 1 0 1 1 100 20 293 220 8089 2 8 1 2705 4 25 73 0 0 +"31351" 4 658 5 1 1 0 1 1 100 20 220 110 2410 8 9 1 262 3 50 110 0 0 +"31352" 4 658 5 1 1 0 1 1 100 20 110 215 2973 3 4 0 296 1 95 105 0 1 +"31353" 4 658 5 1 1 0 1 1 100 20 215 107 2791 5 7 1 266 3 50 108 0 0 +"31354" 4 658 5 1 1 0 1 1 100 20 107 209 1222 7 4 1 726 1 95 102 1 1 +"31355" 4 658 5 1 1 0 1 1 100 20 209 10 1338 4 1 0 365 1 95 199 1 0 +"31356" 4 658 5 1 1 0 1 1 100 20 10 20 1978 9 6 1 327 1 95 10 1 1 +"31357" 4 658 5 1 1 0 2 1 100 133 100 195 2348 8 1 1 309 1 95 95 1 1 +"31358" 4 658 5 1 1 0 2 1 100 133 195 244 1051 6 2 1 1787 4 25 49 1 1 +"31359" 4 658 5 1 1 0 2 1 100 133 244 122 1494 7 9 1 299 3 50 122 0 0 +"31360" 4 658 5 1 1 0 2 1 100 133 122 238 1579 2 10 0 1125 1 95 116 0 1 +"31361" 4 658 5 1 1 0 2 1 100 133 238 357 2105 5 3 1 1254 3 50 119 1 1 +"31362" 4 658 5 1 1 0 2 1 100 133 357 18 1267 4 2 0 2792 1 95 339 1 0 +"31363" 4 658 5 1 1 0 2 1 100 133 18 35 1359 3 5 0 391 1 95 17 0 1 +"31364" 4 658 5 1 1 0 2 1 100 133 35 68 1051 9 4 1 292 1 95 33 1 1 +"31365" 4 658 5 1 1 0 2 1 100 133 68 133 1268 1 7 0 234 1 95 65 0 1 +"31366" 4 658 5 1 1 0 3 0 100 0 100 195 2298 7 5 1 227 1 95 95 1 1 +"31367" 4 658 5 1 1 0 3 0 100 0 195 10 918 2 1 0 312 1 95 185 1 0 +"31368" 4 658 5 1 1 0 3 0 100 0 10 20 1266 8 6 1 530 1 95 10 1 1 +"31369" 4 658 5 1 1 0 3 0 100 0 20 39 1015 4 7 0 521 1 95 19 0 1 +"31370" 4 658 5 1 1 0 3 0 100 0 39 76 1702 3 10 0 307 1 95 37 0 1 +"31371" 4 658 5 1 1 0 3 0 100 0 76 4 1332 6 8 1 229 1 95 72 0 0 +"31372" 4 658 5 1 1 0 3 0 100 0 4 8 1139 9 2 1 917 1 95 4 1 1 +"31373" 4 658 5 1 1 0 3 0 100 0 8 0 2249 5 3 0 223 1 95 8 1 0 +"31374" 4 658 5 1 1 0 4 1 100 789 100 195 1625 2 3 0 218 1 95 95 0 1 +"31375" 4 658 5 1 1 0 4 1 100 789 195 341 1268 7 4 1 251 2 75 146 1 1 +"31376" 4 658 5 1 1 0 4 1 100 789 341 85 971 3 1 0 299 2 75 256 1 0 +"31377" 4 658 5 1 1 0 4 1 100 789 85 166 1144 1 9 0 891 1 95 81 0 1 +"31378" 4 658 5 1 1 0 4 1 100 789 166 324 1088 8 7 1 370 1 95 158 1 1 +"31379" 4 658 5 1 1 0 4 1 100 789 324 632 1125 9 2 1 334 1 95 308 1 1 +"31380" 4 658 5 1 1 0 4 1 100 789 632 664 3396 5 6 0 2655 5 5 32 0 1 +"31381" 4 658 5 1 1 0 4 1 100 789 664 631 1964 6 8 1 575 5 5 33 0 0 +"31382" 4 658 5 1 1 0 4 1 100 789 631 789 1403 4 7 0 721 4 25 158 0 1 +"31383" 4 662 2 0 1 0 1 1 100 8 100 150 19425 2 7 0 369 3 50 50 0 1 +"31384" 4 662 2 0 1 0 1 1 100 8 150 225 6001 3 7 0 930 3 50 75 0 1 +"31385" 4 662 2 0 1 0 1 1 100 8 225 169 3645 2 8 1 1544 4 25 56 0 0 +"31386" 4 662 2 0 1 0 1 1 100 8 169 8 1914 8 9 1 611 1 95 161 0 0 +"31387" 4 662 3 1 1 0 1 0 100 1 100 195 14201 1 3 0 4241 1 95 95 0 1 +"31388" 4 662 3 1 1 0 1 0 100 1 195 146 3864 6 5 0 459 4 25 49 1 0 +"31389" 4 662 3 1 1 0 1 0 100 1 146 219 1580 2 8 0 490 3 50 73 0 1 +"31390" 4 662 3 1 1 0 1 0 100 1 219 55 6781 8 9 1 1393 2 75 164 0 0 +"31391" 4 662 3 1 1 0 1 0 100 1 55 107 3263 3 4 0 461 1 95 52 0 1 +"31392" 4 662 3 1 1 0 1 0 100 1 107 53 3122 5 7 1 1667 3 50 54 0 0 +"31393" 4 662 3 1 1 0 1 0 100 1 53 13 2352 7 4 0 708 2 75 40 1 0 +"31394" 4 662 3 1 1 0 1 0 100 1 13 1 2754 4 1 0 1177 1 95 12 1 0 +"31395" 4 662 3 1 1 0 2 1 100 1921 100 195 3194 8 1 1 1052 1 95 95 1 1 +"31396" 4 662 3 1 1 0 2 1 100 1921 195 293 5292 6 2 1 707 3 50 98 1 1 +"31397" 4 662 3 1 1 0 2 1 100 1921 293 513 3056 7 9 0 2564 2 75 220 0 1 +"31398" 4 662 3 1 1 0 2 1 100 1921 513 385 6086 2 10 1 698 4 25 128 0 0 +"31399" 4 662 3 1 1 0 2 1 100 1921 385 674 2296 5 3 1 958 2 75 289 1 1 +"31400" 4 662 3 1 1 0 2 1 100 1921 674 1011 6386 4 2 1 1386 3 50 337 1 1 +"31401" 4 662 3 1 1 0 2 1 100 1921 1011 505 6342 3 5 1 711 3 50 506 0 0 +"31402" 4 662 3 1 1 0 2 1 100 1921 505 985 4580 9 4 1 399 1 95 480 1 1 +"31403" 4 662 3 1 1 0 2 1 100 1921 985 1921 1884 1 7 0 520 1 95 936 0 1 +"31404" 4 662 3 1 1 0 3 0 100 0 100 195 2228 7 5 1 448 1 95 95 1 1 +"31405" 4 662 3 1 1 0 3 0 100 0 195 10 2389 2 1 0 1376 1 95 185 1 0 +"31406" 4 662 3 1 1 0 3 0 100 0 10 20 1485 8 6 1 416 1 95 10 1 1 +"31407" 4 662 3 1 1 0 3 0 100 0 20 39 1803 4 7 0 339 1 95 19 0 1 +"31408" 4 662 3 1 1 0 3 0 100 0 39 2 1861 3 10 1 358 1 95 37 0 0 +"31409" 4 662 3 1 1 0 3 0 100 0 2 0 6751 6 8 1 475 1 95 2 0 0 +"31410" 4 662 3 1 1 0 4 1 100 985 100 195 3257 2 3 0 362 1 95 95 0 1 +"31411" 4 662 3 1 1 0 4 1 100 985 195 10 2244 7 4 0 573 1 95 185 1 0 +"31412" 4 662 3 1 1 0 4 1 100 985 10 20 1444 3 1 1 332 1 95 10 1 1 +"31413" 4 662 3 1 1 0 4 1 100 985 20 39 1344 1 9 0 427 1 95 19 0 1 +"31414" 4 662 3 1 1 0 4 1 100 985 39 76 1466 8 7 1 317 1 95 37 1 1 +"31415" 4 662 3 1 1 0 4 1 100 985 76 148 1735 9 2 1 369 1 95 72 1 1 +"31416" 4 662 3 1 1 0 4 1 100 985 148 259 4985 5 6 0 409 2 75 111 0 1 +"31417" 4 662 3 1 1 0 4 1 100 985 259 505 1808 6 8 0 361 1 95 246 0 1 +"31418" 4 662 3 1 1 0 4 1 100 985 505 985 4101 4 7 0 318 1 95 480 0 1 +"31419" 4 662 4 0 1 1 1 1 100 3 100 125 8851 8 3 1 547 2 25 25 1 1 +"31420" 4 662 4 0 1 1 1 1 100 3 125 31 4639 3 7 1 485 4 75 94 0 0 +"31421" 4 662 4 0 1 1 1 1 100 3 31 60 1650 8 2 1 1439 5 95 29 1 1 +"31422" 4 662 4 0 1 1 1 1 100 3 60 3 1628 2 1 0 1055 5 95 57 1 0 +"31423" 4 662 5 1 1 1 1 1 100 1154 100 195 5693 9 7 1 1575 5 95 95 1 1 +"31424" 4 662 5 1 1 1 1 1 100 1154 195 293 6311 4 8 0 545 3 50 98 0 1 +"31425" 4 662 5 1 1 1 1 1 100 1154 293 440 1506 8 2 1 904 3 50 147 1 1 +"31426" 4 662 5 1 1 1 1 1 100 1154 440 220 2384 2 1 0 2285 3 50 220 1 0 +"31427" 4 662 5 1 1 1 1 1 100 1154 220 110 2138 7 6 0 687 3 50 110 1 0 +"31428" 4 662 5 1 1 1 1 1 100 1154 110 193 1995 5 3 1 2206 4 75 83 1 1 +"31429" 4 662 5 1 1 1 1 1 100 1154 193 338 5708 3 6 0 3071 4 75 145 0 1 +"31430" 4 662 5 1 1 1 1 1 100 1154 338 592 2414 6 9 0 1783 4 75 254 0 1 +"31431" 4 662 5 1 1 1 1 1 100 1154 592 1154 11523 1 4 0 932 5 95 562 0 1 +"31432" 4 662 5 1 1 1 2 1 100 23 100 150 1842 2 9 0 854 3 50 50 0 1 +"31433" 4 662 5 1 1 1 2 1 100 23 150 142 2273 4 10 1 999 1 5 8 0 0 +"31434" 4 662 5 1 1 1 2 1 100 23 142 149 1320 3 1 1 431 1 5 7 1 1 +"31435" 4 662 5 1 1 1 2 1 100 23 149 261 1317 8 6 1 619 4 75 112 1 1 +"31436" 4 662 5 1 1 1 2 1 100 23 261 65 2336 5 7 1 814 4 75 196 0 0 +"31437" 4 662 5 1 1 1 2 1 100 23 65 3 2051 6 8 1 0 5 95 62 0 0 +"31438" 4 662 5 1 1 1 2 1 100 23 3 6 4559 7 5 1 0 5 95 3 1 1 +"31439" 4 662 5 1 1 1 2 1 100 23 6 12 1569 1 10 0 484 5 95 6 0 1 +"31440" 4 662 5 1 1 1 2 1 100 23 12 23 1651 9 3 1 0 5 95 11 1 1 +"31441" 4 662 5 1 1 1 3 1 100 4 100 150 2205 3 5 0 550 3 50 50 0 1 +"31442" 4 662 5 1 1 1 3 1 100 4 150 37 1187 8 9 1 4010 4 75 113 0 0 +"31443" 4 662 5 1 1 1 3 1 100 4 37 39 1546 2 4 0 887 1 5 2 0 1 +"31444" 4 662 5 1 1 1 3 1 100 4 39 29 1385 6 3 0 2196 2 25 10 1 0 +"31445" 4 662 5 1 1 1 3 1 100 4 29 14 2125 7 6 0 653 3 50 15 1 0 +"31446" 4 662 5 1 1 1 3 1 100 4 14 10 1477 4 2 0 1762 2 25 4 1 0 +"31447" 4 662 5 1 1 1 3 1 100 4 10 18 1056 1 8 0 2475 4 75 8 0 1 +"31448" 4 662 5 1 1 1 3 1 100 4 18 4 1997 5 7 1 1304 4 75 14 0 0 +"31449" 4 662 5 1 1 1 3 1 100 4 4 4 1807 9 5 1 494 1 5 0 1 1 +"31450" 4 662 5 1 1 1 4 1 100 912 100 175 25936 8 7 1 449 4 75 75 1 1 +"31451" 4 662 5 1 1 1 4 1 100 912 175 263 1169 3 10 0 801 3 50 88 0 1 +"31452" 4 662 5 1 1 1 4 1 100 912 263 131 1145 7 9 1 690 3 50 132 0 0 +"31453" 4 662 5 1 1 1 4 1 100 912 131 255 1292 9 1 1 507 5 95 124 1 1 +"31454" 4 662 5 1 1 1 4 1 100 912 255 497 1104 2 3 0 1374 5 95 242 0 1 +"31455" 4 662 5 1 1 1 4 1 100 912 497 870 2771 1 8 0 614 4 75 373 0 1 +"31456" 4 662 5 1 1 1 4 1 100 912 870 914 1501 5 4 1 1028 1 5 44 1 1 +"31457" 4 662 5 1 1 1 4 1 100 912 914 960 2271 4 2 1 671 1 5 46 1 1 +"31458" 4 662 5 1 1 1 4 1 100 912 960 912 2525 6 3 0 697 1 5 48 1 0 +"31459" 4 669 2 0 1 1 1 1 100 140 100 150 17316 8 3 1 3563 3 50 50 1 1 +"31460" 4 669 2 0 1 1 1 1 100 140 150 225 7874 3 7 0 882 3 50 75 0 1 +"31461" 4 669 2 0 1 1 1 1 100 140 225 281 3960 8 2 1 2013 2 25 56 1 1 +"31462" 4 669 2 0 1 1 1 1 100 140 281 140 18097 2 1 0 640 3 50 141 1 0 +"31463" 4 669 3 1 1 1 1 1 100 753 100 195 3508 9 7 1 439 5 95 95 1 1 +"31464" 4 669 3 1 1 1 1 1 100 753 195 185 4769 4 8 1 418 1 5 10 0 0 +"31465" 4 669 3 1 1 1 1 1 100 753 185 361 1777 8 2 1 526 5 95 176 1 1 +"31466" 4 669 3 1 1 1 1 1 100 753 361 271 2639 2 1 0 1796 2 25 90 1 0 +"31467" 4 669 3 1 1 1 1 1 100 753 271 407 4610 7 6 1 1140 3 50 136 1 1 +"31468" 4 669 3 1 1 1 1 1 100 753 407 387 5310 5 3 0 310 1 5 20 1 0 +"31469" 4 669 3 1 1 1 1 1 100 753 387 406 2136 3 6 0 757 1 5 19 0 1 +"31470" 4 669 3 1 1 1 1 1 100 753 406 386 2425 6 9 1 259 1 5 20 0 0 +"31471" 4 669 3 1 1 1 1 1 100 753 386 753 1603 1 4 0 0 5 95 367 0 1 +"31472" 4 669 3 1 1 1 2 1 100 1193 100 195 2371 2 9 0 0 5 95 95 0 1 +"31473" 4 669 3 1 1 1 2 1 100 1193 195 185 1806 4 10 1 324 1 5 10 0 0 +"31474" 4 669 3 1 1 1 2 1 100 1193 185 139 2027 3 1 0 878 2 25 46 1 0 +"31475" 4 669 3 1 1 1 2 1 100 1193 139 209 1726 8 6 1 2044 3 50 70 1 1 +"31476" 4 669 3 1 1 1 2 1 100 1193 209 199 3142 5 7 1 301 1 5 10 0 0 +"31477" 4 669 3 1 1 1 2 1 100 1193 199 209 2244 6 8 0 251 1 5 10 0 1 +"31478" 4 669 3 1 1 1 2 1 100 1193 209 314 1795 7 5 1 522 3 50 105 1 1 +"31479" 4 669 3 1 1 1 2 1 100 1193 314 612 1514 1 10 0 0 5 95 298 0 1 +"31480" 4 669 3 1 1 1 2 1 100 1193 612 1193 2972 9 3 1 0 5 95 581 1 1 +"31481" 4 669 3 1 1 1 3 1 100 277 100 175 2525 3 5 0 428 4 75 75 0 1 +"31482" 4 669 3 1 1 1 3 1 100 277 175 44 1920 8 9 1 429 4 75 131 0 0 +"31483" 4 669 3 1 1 1 3 1 100 277 44 86 2603 2 4 0 1419 5 95 42 0 1 +"31484" 4 669 3 1 1 1 3 1 100 277 86 82 3351 6 3 0 250 1 5 4 1 0 +"31485" 4 669 3 1 1 1 3 1 100 277 82 103 2040 7 6 1 358 2 25 21 1 1 +"31486" 4 669 3 1 1 1 3 1 100 277 103 77 1532 4 2 0 351 2 25 26 1 0 +"31487" 4 669 3 1 1 1 3 1 100 277 77 150 1432 1 8 0 583 5 95 73 0 1 +"31488" 4 669 3 1 1 1 3 1 100 277 150 142 4763 5 7 1 1464 1 5 8 0 0 +"31489" 4 669 3 1 1 1 3 1 100 277 142 277 1667 9 5 1 1072 5 95 135 1 1 +"31490" 4 669 3 1 1 1 4 1 100 465 100 105 2035 8 7 1 2340 1 5 5 1 1 +"31491" 4 669 3 1 1 1 4 1 100 465 105 110 944 3 10 0 288 1 5 5 0 1 +"31492" 4 669 3 1 1 1 4 1 100 465 110 104 1246 7 9 1 234 1 5 6 0 0 +"31493" 4 669 3 1 1 1 4 1 100 465 104 203 1053 9 1 1 471 5 95 99 1 1 +"31494" 4 669 3 1 1 1 4 1 100 465 203 355 1154 2 3 0 927 4 75 152 0 1 +"31495" 4 669 3 1 1 1 4 1 100 465 355 621 2038 1 8 0 1685 4 75 266 0 1 +"31496" 4 669 3 1 1 1 4 1 100 465 621 652 1831 5 4 1 299 1 5 31 1 1 +"31497" 4 669 3 1 1 1 4 1 100 465 652 489 1341 4 2 0 279 2 25 163 1 0 +"31498" 4 669 3 1 1 1 4 1 100 465 489 465 1845 6 3 0 513 1 5 24 1 0 +"31499" 4 669 4 0 1 0 1 1 100 37 100 195 5683 2 7 0 4216 1 95 95 0 1 +"31500" 4 669 4 0 1 0 1 1 100 37 195 380 2225 3 7 0 306 1 95 185 0 1 +"31501" 4 669 4 0 1 0 1 1 100 37 380 741 1085 2 8 0 285 1 95 361 0 1 +"31502" 4 669 4 0 1 0 1 1 100 37 741 37 1443 8 9 1 258 1 95 704 0 0 +"31503" 4 669 5 1 1 0 1 0 100 0 100 195 1831 1 3 0 448 1 95 95 0 1 +"31504" 4 669 5 1 1 0 1 0 100 0 195 380 3678 6 5 1 985 1 95 185 1 1 +"31505" 4 669 5 1 1 0 1 0 100 0 380 741 1705 2 8 0 327 1 95 361 0 1 +"31506" 4 669 5 1 1 0 1 0 100 0 741 37 1358 8 9 1 291 1 95 704 0 0 +"31507" 4 669 5 1 1 0 1 0 100 0 37 2 1182 3 4 1 290 1 95 35 0 0 +"31508" 4 669 5 1 1 0 1 0 100 0 2 4 1198 5 7 0 270 1 95 2 0 1 +"31509" 4 669 5 1 1 0 1 0 100 0 4 8 1316 7 4 1 310 1 95 4 1 1 +"31510" 4 669 5 1 1 0 1 0 100 0 8 0 1248 4 1 0 296 1 95 8 1 0 +"31511" 4 669 5 1 1 0 2 1 100 2498 100 195 1193 8 1 1 327 1 95 95 1 1 +"31512" 4 669 5 1 1 0 2 1 100 2498 195 293 1698 6 2 1 3143 3 50 98 1 1 +"31513" 4 669 5 1 1 0 2 1 100 2498 293 220 2203 7 9 1 406 4 25 73 0 0 +"31514" 4 669 5 1 1 0 2 1 100 2498 220 429 1161 2 10 0 494 1 95 209 0 1 +"31515" 4 669 5 1 1 0 2 1 100 2498 429 450 2424 5 3 1 353 5 5 21 1 1 +"31516" 4 669 5 1 1 0 2 1 100 2498 450 337 1974 4 2 0 636 4 25 113 1 0 +"31517" 4 669 5 1 1 0 2 1 100 2498 337 657 1224 3 5 0 271 1 95 320 0 1 +"31518" 4 669 5 1 1 0 2 1 100 2498 657 1281 1098 9 4 1 373 1 95 624 1 1 +"31519" 4 669 5 1 1 0 2 1 100 2498 1281 2498 1056 1 7 0 313 1 95 1217 0 1 +"31520" 4 669 5 1 1 0 3 0 100 1 100 195 1714 7 5 1 252 1 95 95 1 1 +"31521" 4 669 5 1 1 0 3 0 100 1 195 10 1375 2 1 0 369 1 95 185 1 0 +"31522" 4 669 5 1 1 0 3 0 100 1 10 20 1076 8 6 1 320 1 95 10 1 1 +"31523" 4 669 5 1 1 0 3 0 100 1 20 1 1901 4 7 1 826 1 95 19 0 0 +"31524" 4 669 5 1 1 0 4 1 100 257 100 195 1059 2 3 0 280 1 95 95 0 1 +"31525" 4 669 5 1 1 0 4 1 100 257 195 244 1194 7 4 1 581 4 25 49 1 1 +"31526" 4 669 5 1 1 0 4 1 100 257 244 183 1170 3 1 0 1004 4 25 61 1 0 +"31527" 4 669 5 1 1 0 4 1 100 257 183 357 1285 1 9 0 340 1 95 174 0 1 +"31528" 4 669 5 1 1 0 4 1 100 257 357 696 1290 8 7 1 294 1 95 339 1 1 +"31529" 4 669 5 1 1 0 4 1 100 257 696 1357 1361 9 2 1 294 1 95 661 1 1 +"31530" 4 669 5 1 1 0 4 1 100 257 1357 2646 1268 5 6 0 928 1 95 1289 0 1 +"31531" 4 669 5 1 1 0 4 1 100 257 2646 132 1465 6 8 1 654 1 95 2514 0 0 +"31532" 4 669 5 1 1 0 4 1 100 257 132 257 953 4 7 0 227 1 95 125 0 1 +"31533" 4 674 2 0 1 0 1 1 100 99 100 150 13174 2 7 0 1499 3 50 50 0 1 +"31534" 4 674 2 0 1 0 1 1 100 99 150 263 19405 3 7 0 1360 2 75 113 0 1 +"31535" 4 674 2 0 1 0 1 1 100 99 263 395 2640 2 8 0 1615 3 50 132 0 1 +"31536" 4 674 2 0 1 0 1 1 100 99 395 99 2792 8 9 1 1380 2 75 296 0 0 +"31537" 4 674 3 1 1 0 1 1 100 844 100 195 3961 1 3 0 1056 1 95 95 0 1 +"31538" 4 674 3 1 1 0 1 1 100 844 195 244 2646 6 5 1 579 4 25 49 1 1 +"31539" 4 674 3 1 1 0 1 1 100 844 244 366 3315 2 8 0 648 3 50 122 0 1 +"31540" 4 674 3 1 1 0 1 1 100 844 366 274 1950 8 9 1 619 4 25 92 0 0 +"31541" 4 674 3 1 1 0 1 1 100 844 274 411 1944 3 4 0 455 3 50 137 0 1 +"31542" 4 674 3 1 1 0 1 1 100 844 411 514 4736 5 7 0 1157 4 25 103 0 1 +"31543" 4 674 3 1 1 0 1 1 100 844 514 643 1978 7 4 1 569 4 25 129 1 1 +"31544" 4 674 3 1 1 0 1 1 100 844 643 482 2468 4 1 0 959 4 25 161 1 0 +"31545" 4 674 3 1 1 0 1 1 100 844 482 844 2772 9 6 1 921 2 75 362 1 1 +"31546" 4 674 3 1 1 0 2 1 100 1644 100 195 3252 8 1 1 1256 1 95 95 1 1 +"31547" 4 674 3 1 1 0 2 1 100 1644 195 293 2893 6 2 1 920 3 50 98 1 1 +"31548" 4 674 3 1 1 0 2 1 100 1644 293 146 1721 7 9 1 2395 3 50 147 0 0 +"31549" 4 674 3 1 1 0 2 1 100 1644 146 285 1847 2 10 0 1222 1 95 139 0 1 +"31550" 4 674 3 1 1 0 2 1 100 1644 285 428 2472 5 3 1 1633 3 50 143 1 1 +"31551" 4 674 3 1 1 0 2 1 100 1644 428 321 1884 4 2 0 561 4 25 107 1 0 +"31552" 4 674 3 1 1 0 2 1 100 1644 321 562 1699 3 5 0 2564 2 75 241 0 1 +"31553" 4 674 3 1 1 0 2 1 100 1644 562 1096 2498 9 4 1 1381 1 95 534 1 1 +"31554" 4 674 3 1 1 0 2 1 100 1644 1096 1644 2625 1 7 0 1194 3 50 548 0 1 +"31555" 4 674 3 1 1 0 3 1 100 720 100 195 3483 7 5 1 1464 1 95 95 1 1 +"31556" 4 674 3 1 1 0 3 1 100 720 195 49 2704 2 1 0 974 2 75 146 1 0 +"31557" 4 674 3 1 1 0 3 1 100 720 49 96 1469 8 6 1 1008 1 95 47 1 1 +"31558" 4 674 3 1 1 0 3 1 100 720 96 144 2020 4 7 0 1130 3 50 48 0 1 +"31559" 4 674 3 1 1 0 3 1 100 720 144 252 1944 3 10 0 449 2 75 108 0 1 +"31560" 4 674 3 1 1 0 3 1 100 720 252 126 4702 6 8 1 626 3 50 126 0 0 +"31561" 4 674 3 1 1 0 3 1 100 720 126 246 1509 9 2 1 1211 1 95 120 1 1 +"31562" 4 674 3 1 1 0 3 1 100 720 246 369 4631 5 3 1 865 3 50 123 1 1 +"31563" 4 674 3 1 1 0 3 1 100 720 369 720 1746 1 10 0 1212 1 95 351 0 1 +"31564" 4 674 3 1 1 0 4 1 100 1606 100 195 2461 2 3 0 902 1 95 95 0 1 +"31565" 4 674 3 1 1 0 4 1 100 1606 195 293 1630 7 4 1 447 3 50 98 1 1 +"31566" 4 674 3 1 1 0 4 1 100 1606 293 220 1953 3 1 0 1056 4 25 73 1 0 +"31567" 4 674 3 1 1 0 4 1 100 1606 220 429 2430 1 9 0 966 1 95 209 0 1 +"31568" 4 674 3 1 1 0 4 1 100 1606 429 837 1581 8 7 1 848 1 95 408 1 1 +"31569" 4 674 3 1 1 0 4 1 100 1606 837 1632 1838 9 2 1 836 1 95 795 1 1 +"31570" 4 674 3 1 1 0 4 1 100 1606 1632 1714 3551 5 6 0 647 5 5 82 0 1 +"31571" 4 674 3 1 1 0 4 1 100 1606 1714 1285 1886 6 8 1 630 4 25 429 0 0 +"31572" 4 674 3 1 1 0 4 1 100 1606 1285 1606 2103 4 7 0 552 4 25 321 0 1 +"31573" 4 674 4 0 1 1 1 1 100 329 100 150 7702 8 3 1 1228 3 50 50 1 1 +"31574" 4 674 4 0 1 1 1 1 100 329 150 225 5931 3 7 0 2654 3 50 75 0 1 +"31575" 4 674 4 0 1 1 1 1 100 329 225 439 1938 8 2 1 874 5 95 214 1 1 +"31576" 4 674 4 0 1 1 1 1 100 329 439 329 1558 2 1 0 786 2 25 110 1 0 +"31577" 4 674 5 1 1 1 1 1 100 133 100 195 3218 9 7 1 630 5 95 95 1 1 +"31578" 4 674 5 1 1 1 1 1 100 133 195 244 2063 4 8 0 1197 2 25 49 0 1 +"31579" 4 674 5 1 1 1 1 1 100 133 244 476 2101 8 2 1 576 5 95 232 1 1 +"31580" 4 674 5 1 1 1 1 1 100 133 476 119 1348 2 1 0 1384 4 75 357 1 0 +"31581" 4 674 5 1 1 1 1 1 100 133 119 208 1710 7 6 1 545 4 75 89 1 1 +"31582" 4 674 5 1 1 1 1 1 100 133 208 156 1885 5 3 0 544 2 25 52 1 0 +"31583" 4 674 5 1 1 1 1 1 100 133 156 273 1748 3 6 0 877 4 75 117 0 1 +"31584" 4 674 5 1 1 1 1 1 100 133 273 68 2257 6 9 1 1327 4 75 205 0 0 +"31585" 4 674 5 1 1 1 1 1 100 133 68 133 1495 1 4 0 793 5 95 65 0 1 +"31586" 4 674 5 1 1 1 2 1 100 2418 100 195 2355 2 9 0 1006 5 95 95 0 1 +"31587" 4 674 5 1 1 1 2 1 100 2418 195 244 1405 4 10 0 499 2 25 49 0 1 +"31588" 4 674 5 1 1 1 2 1 100 2418 244 183 1523 3 1 0 461 2 25 61 1 0 +"31589" 4 674 5 1 1 1 2 1 100 2418 183 357 1318 8 6 1 1242 5 95 174 1 1 +"31590" 4 674 5 1 1 1 2 1 100 2418 357 446 2768 5 7 0 604 2 25 89 0 1 +"31591" 4 674 5 1 1 1 2 1 100 2418 446 424 1441 6 8 1 678 1 5 22 0 0 +"31592" 4 674 5 1 1 1 2 1 100 2418 424 636 1383 7 5 1 460 3 50 212 1 1 +"31593" 4 674 5 1 1 1 2 1 100 2418 636 1240 1257 1 10 0 657 5 95 604 0 1 +"31594" 4 674 5 1 1 1 2 1 100 2418 1240 2418 1472 9 3 1 666 5 95 1178 1 1 +"31595" 4 674 5 1 1 1 3 1 100 211 100 175 1836 3 5 0 721 4 75 75 0 1 +"31596" 4 674 5 1 1 1 3 1 100 211 175 9 1691 8 9 1 688 5 95 166 0 0 +"31597" 4 674 5 1 1 1 3 1 100 211 9 18 1916 2 4 0 568 5 95 9 0 1 +"31598" 4 674 5 1 1 1 3 1 100 211 18 23 1669 6 3 1 730 2 25 5 1 1 +"31599" 4 674 5 1 1 1 3 1 100 211 23 35 1386 7 6 1 419 3 50 12 1 1 +"31600" 4 674 5 1 1 1 3 1 100 211 35 44 2308 4 2 1 675 2 25 9 1 1 +"31601" 4 674 5 1 1 1 3 1 100 211 44 86 1286 1 8 0 1232 5 95 42 0 1 +"31602" 4 674 5 1 1 1 3 1 100 211 86 108 2013 5 7 0 542 2 25 22 0 1 +"31603" 4 674 5 1 1 1 3 1 100 211 108 211 1240 9 5 1 793 5 95 103 1 1 +"31604" 4 674 5 1 1 1 4 1 100 806 100 195 2812 8 7 1 533 5 95 95 1 1 +"31605" 4 674 5 1 1 1 4 1 100 806 195 244 1310 3 10 0 2268 2 25 49 0 1 +"31606" 4 674 5 1 1 1 4 1 100 806 244 122 1486 7 9 1 456 3 50 122 0 0 +"31607" 4 674 5 1 1 1 4 1 100 806 122 238 1092 9 1 1 443 5 95 116 1 1 +"31608" 4 674 5 1 1 1 4 1 100 806 238 464 1353 2 3 0 553 5 95 226 0 1 +"31609" 4 674 5 1 1 1 4 1 100 806 464 905 1464 1 8 0 528 5 95 441 0 1 +"31610" 4 674 5 1 1 1 4 1 100 806 905 860 4306 5 4 0 756 1 5 45 1 0 +"31611" 4 674 5 1 1 1 4 1 100 806 860 645 1512 4 2 0 782 2 25 215 1 0 +"31612" 4 674 5 1 1 1 4 1 100 806 645 806 1733 6 3 1 681 2 25 161 1 1 +"31613" 4 675 2 0 1 0 1 1 100 285 100 150 9471 2 7 0 2281 3 50 50 0 1 +"31614" 4 675 2 0 1 0 1 1 100 285 150 293 13897 3 7 0 1149 1 95 143 0 1 +"31615" 4 675 2 0 1 0 1 1 100 285 293 571 2666 2 8 0 2690 1 95 278 0 1 +"31616" 4 675 2 0 1 0 1 1 100 285 571 285 1639 8 9 1 3971 3 50 286 0 0 +"31617" 4 675 3 1 1 0 1 1 100 610 100 195 1651 1 3 0 1436 1 95 95 0 1 +"31618" 4 675 3 1 1 0 1 1 100 610 195 380 1976 6 5 1 689 1 95 185 1 1 +"31619" 4 675 3 1 1 0 1 1 100 610 380 285 2191 2 8 1 1165 4 25 95 0 0 +"31620" 4 675 3 1 1 0 1 1 100 610 285 214 1540 8 9 1 939 4 25 71 0 0 +"31621" 4 675 3 1 1 0 1 1 100 610 214 417 1451 3 4 0 506 1 95 203 0 1 +"31622" 4 675 3 1 1 0 1 1 100 610 417 521 4112 5 7 0 1043 4 25 104 0 1 +"31623" 4 675 3 1 1 0 1 1 100 610 521 651 2951 7 4 1 754 4 25 130 1 1 +"31624" 4 675 3 1 1 0 1 1 100 610 651 488 1260 4 1 0 1237 4 25 163 1 0 +"31625" 4 675 3 1 1 0 1 1 100 610 488 610 2004 9 6 1 969 4 25 122 1 1 +"31626" 4 675 3 1 1 0 2 0 100 0 100 195 4263 8 1 1 1784 1 95 95 1 1 +"31627" 4 675 3 1 1 0 2 0 100 0 195 97 2717 6 2 0 923 3 50 98 1 0 +"31628" 4 675 3 1 1 0 2 0 100 0 97 24 2443 7 9 1 2031 2 75 73 0 0 +"31629" 4 675 3 1 1 0 2 0 100 0 24 47 1209 2 10 0 729 1 95 23 0 1 +"31630" 4 675 3 1 1 0 2 0 100 0 47 2 1993 5 3 0 605 1 95 45 1 0 +"31631" 4 675 3 1 1 0 2 0 100 0 2 0 1532 4 2 0 595 1 95 2 1 0 +"31632" 4 675 3 1 1 0 3 0 100 0 100 175 1918 7 5 1 837 2 75 75 1 1 +"31633" 4 675 3 1 1 0 3 0 100 0 175 9 1193 2 1 0 575 1 95 166 1 0 +"31634" 4 675 3 1 1 0 3 0 100 0 9 18 2024 8 6 1 941 1 95 9 1 1 +"31635" 4 675 3 1 1 0 3 0 100 0 18 35 1490 4 7 0 574 1 95 17 0 1 +"31636" 4 675 3 1 1 0 3 0 100 0 35 68 998 3 10 0 581 1 95 33 0 1 +"31637" 4 675 3 1 1 0 3 0 100 0 68 3 1321 6 8 1 555 1 95 65 0 0 +"31638" 4 675 3 1 1 0 3 0 100 0 3 6 1295 9 2 1 431 1 95 3 1 1 +"31639" 4 675 3 1 1 0 3 0 100 0 6 0 1162 5 3 0 480 1 95 6 1 0 +"31640" 4 675 3 1 1 0 4 1 100 2061 100 195 1402 2 3 0 558 1 95 95 0 1 +"31641" 4 675 3 1 1 0 4 1 100 2061 195 293 1617 7 4 1 993 3 50 98 1 1 +"31642" 4 675 3 1 1 0 4 1 100 2061 293 146 1187 3 1 0 1842 3 50 147 1 0 +"31643" 4 675 3 1 1 0 4 1 100 2061 146 285 1305 1 9 0 492 1 95 139 0 1 +"31644" 4 675 3 1 1 0 4 1 100 2061 285 556 1196 8 7 1 472 1 95 271 1 1 +"31645" 4 675 3 1 1 0 4 1 100 2061 556 1084 1780 9 2 1 671 1 95 528 1 1 +"31646" 4 675 3 1 1 0 4 1 100 2061 1084 2114 1355 5 6 0 844 1 95 1030 0 1 +"31647" 4 675 3 1 1 0 4 1 100 2061 2114 1057 1312 6 8 1 689 3 50 1057 0 0 +"31648" 4 675 3 1 1 0 4 1 100 2061 1057 2061 1229 4 7 0 434 1 95 1004 0 1 +"31649" 4 675 4 0 1 1 1 1 100 117 100 150 11175 8 3 1 566 3 50 50 1 1 +"31650" 4 675 4 0 1 1 1 1 100 117 150 188 1570 3 7 0 984 2 25 38 0 1 +"31651" 4 675 4 0 1 1 1 1 100 117 188 235 1188 8 2 1 1069 2 25 47 1 1 +"31652" 4 675 4 0 1 1 1 1 100 117 235 117 1074 2 1 0 641 3 50 118 1 0 +"31653" 4 675 5 1 1 1 1 1 100 335 100 150 1478 9 7 1 838 3 50 50 1 1 +"31654" 4 675 5 1 1 1 1 1 100 335 150 225 1220 4 8 0 571 3 50 75 0 1 +"31655" 4 675 5 1 1 1 1 1 100 335 225 281 1492 8 2 1 1398 2 25 56 1 1 +"31656" 4 675 5 1 1 1 1 1 100 335 281 211 1073 2 1 0 1060 2 25 70 1 0 +"31657" 4 675 5 1 1 1 1 1 100 335 211 264 1608 7 6 1 848 2 25 53 1 1 +"31658" 4 675 5 1 1 1 1 1 100 335 264 198 1789 5 3 0 827 2 25 66 1 0 +"31659" 4 675 5 1 1 1 1 1 100 335 198 297 1218 3 6 0 653 3 50 99 0 1 +"31660" 4 675 5 1 1 1 1 1 100 335 297 223 1470 6 9 1 965 2 25 74 0 0 +"31661" 4 675 5 1 1 1 1 1 100 335 223 335 1071 1 4 0 527 3 50 112 0 1 +"31662" 4 675 5 1 1 1 2 1 100 452 100 150 6301 2 9 0 549 3 50 50 0 1 +"31663" 4 675 5 1 1 1 2 1 100 452 150 188 2118 4 10 0 1037 2 25 38 0 1 +"31664" 4 675 5 1 1 1 2 1 100 452 188 141 976 3 1 0 903 2 25 47 1 0 +"31665" 4 675 5 1 1 1 2 1 100 452 141 212 1466 8 6 1 557 3 50 71 1 1 +"31666" 4 675 5 1 1 1 2 1 100 452 212 318 1682 5 7 0 771 3 50 106 0 1 +"31667" 4 675 5 1 1 1 2 1 100 452 318 79 1419 6 8 1 586 4 75 239 0 0 +"31668" 4 675 5 1 1 1 2 1 100 452 79 119 2582 7 5 1 710 3 50 40 1 1 +"31669" 4 675 5 1 1 1 2 1 100 452 119 232 1592 1 10 0 1086 5 95 113 0 1 +"31670" 4 675 5 1 1 1 2 1 100 452 232 452 1568 9 3 1 669 5 95 220 1 1 +"31671" 4 675 5 1 1 1 3 1 100 772 100 175 2747 3 5 0 841 4 75 75 0 1 +"31672" 4 675 5 1 1 1 3 1 100 772 175 87 1454 8 9 1 2485 3 50 88 0 0 +"31673" 4 675 5 1 1 1 3 1 100 772 87 152 1323 2 4 0 558 4 75 65 0 1 +"31674" 4 675 5 1 1 1 3 1 100 772 152 266 1445 6 3 1 631 4 75 114 1 1 +"31675" 4 675 5 1 1 1 3 1 100 772 266 466 1371 7 6 1 1230 4 75 200 1 1 +"31676" 4 675 5 1 1 1 3 1 100 772 466 116 1498 4 2 0 1478 4 75 350 1 0 +"31677" 4 675 5 1 1 1 3 1 100 772 116 203 1545 1 8 0 1958 4 75 87 0 1 +"31678" 4 675 5 1 1 1 3 1 100 772 203 396 1999 5 7 0 736 5 95 193 0 1 +"31679" 4 675 5 1 1 1 3 1 100 772 396 772 1313 9 5 1 1329 5 95 376 1 1 +"31680" 4 675 5 1 1 1 4 1 100 144 100 150 3798 8 7 1 448 3 50 50 1 1 +"31681" 4 675 5 1 1 1 4 1 100 144 150 263 1058 3 10 0 1578 4 75 113 0 1 +"31682" 4 675 5 1 1 1 4 1 100 144 263 66 1382 7 9 1 922 4 75 197 0 0 +"31683" 4 675 5 1 1 1 4 1 100 144 66 116 1852 9 1 1 911 4 75 50 1 1 +"31684" 4 675 5 1 1 1 4 1 100 144 116 203 1260 2 3 0 1754 4 75 87 0 1 +"31685" 4 675 5 1 1 1 4 1 100 144 203 396 1308 1 8 0 976 5 95 193 0 1 +"31686" 4 675 5 1 1 1 4 1 100 144 396 297 1670 5 4 0 684 2 25 99 1 0 +"31687" 4 675 5 1 1 1 4 1 100 144 297 74 951 4 2 0 2061 4 75 223 1 0 +"31688" 4 675 5 1 1 1 4 1 100 144 74 144 1324 6 3 1 741 5 95 70 1 1 +"31689" 4 686 2 0 1 1 1 1 100 148 100 150 4324 8 3 1 1880 3 50 50 1 1 +"31690" 4 686 2 0 1 1 1 1 100 148 150 112 14842 3 7 1 1727 2 25 38 0 0 +"31691" 4 686 2 0 1 1 1 1 100 148 112 118 2022 8 2 1 2138 1 5 6 1 1 +"31692" 4 686 2 0 1 1 1 1 100 148 118 148 2327 2 1 1 1303 2 25 30 1 1 +"31693" 4 686 3 1 1 1 1 1 100 104 100 125 6574 9 7 1 1003 2 25 25 1 1 +"31694" 4 686 3 1 1 1 1 1 100 104 125 156 3783 4 8 0 1269 2 25 31 0 1 +"31695" 4 686 3 1 1 1 1 1 100 104 156 117 2018 8 2 0 2520 2 25 39 1 0 +"31696" 4 686 3 1 1 1 1 1 100 104 117 88 2971 2 1 0 967 2 25 29 1 0 +"31697" 4 686 3 1 1 1 1 1 100 104 88 92 2645 7 6 1 1641 1 5 4 1 1 +"31698" 4 686 3 1 1 1 1 1 100 104 92 69 5462 5 3 0 1437 2 25 23 1 0 +"31699" 4 686 3 1 1 1 1 1 100 104 69 66 2131 3 6 1 743 1 5 3 0 0 +"31700" 4 686 3 1 1 1 1 1 100 104 66 83 2241 6 9 0 1364 2 25 17 0 1 +"31701" 4 686 3 1 1 1 1 1 100 104 83 104 3061 1 4 0 1127 2 25 21 0 1 +"31702" 4 686 3 1 1 1 2 1 100 90 100 95 3942 2 9 1 741 1 5 5 0 0 +"31703" 4 686 3 1 1 1 2 1 100 90 95 119 1965 4 10 0 1773 2 25 24 0 1 +"31704" 4 686 3 1 1 1 2 1 100 90 119 89 4774 3 1 0 1290 2 25 30 1 0 +"31705" 4 686 3 1 1 1 2 1 100 90 89 85 2546 8 6 0 819 1 5 4 1 0 +"31706" 4 686 3 1 1 1 2 1 100 90 85 64 2692 5 7 1 993 2 25 21 0 0 +"31707" 4 686 3 1 1 1 2 1 100 90 64 32 2063 6 8 1 898 3 50 32 0 0 +"31708" 4 686 3 1 1 1 2 1 100 90 32 48 2600 7 5 1 994 3 50 16 1 1 +"31709" 4 686 3 1 1 1 2 1 100 90 48 72 1893 1 10 0 1225 3 50 24 0 1 +"31710" 4 686 3 1 1 1 2 1 100 90 72 90 2626 9 3 1 2177 2 25 18 1 1 +"31711" 4 686 3 1 1 1 3 1 100 135 100 125 3414 3 5 0 2211 2 25 25 0 1 +"31712" 4 686 3 1 1 1 3 1 100 135 125 62 2975 8 9 1 723 3 50 63 0 0 +"31713" 4 686 3 1 1 1 3 1 100 135 62 93 6232 2 4 0 628 3 50 31 0 1 +"31714" 4 686 3 1 1 1 3 1 100 135 93 140 3468 6 3 1 640 3 50 47 1 1 +"31715" 4 686 3 1 1 1 3 1 100 135 140 133 5334 7 6 0 1921 1 5 7 1 0 +"31716" 4 686 3 1 1 1 3 1 100 135 133 126 4234 4 2 0 1041 1 5 7 1 0 +"31717" 4 686 3 1 1 1 3 1 100 135 126 189 1453 1 8 0 1181 3 50 63 0 1 +"31718" 4 686 3 1 1 1 3 1 100 135 189 142 2469 5 7 1 847 2 25 47 0 0 +"31719" 4 686 3 1 1 1 3 1 100 135 142 135 2526 9 5 0 3317 1 5 7 1 0 +"31720" 4 686 3 1 1 1 4 1 100 318 100 95 2663 8 7 0 673 1 5 5 1 0 +"31721" 4 686 3 1 1 1 4 1 100 318 95 100 1634 3 10 0 719 1 5 5 0 1 +"31722" 4 686 3 1 1 1 4 1 100 318 100 75 1770 7 9 1 731 2 25 25 0 0 +"31723" 4 686 3 1 1 1 4 1 100 318 75 113 1688 9 1 1 1179 3 50 38 1 1 +"31724" 4 686 3 1 1 1 4 1 100 318 113 170 2740 2 3 0 723 3 50 57 0 1 +"31725" 4 686 3 1 1 1 4 1 100 318 170 255 3193 1 8 0 918 3 50 85 0 1 +"31726" 4 686 3 1 1 1 4 1 100 318 255 268 2980 5 4 1 773 1 5 13 1 1 +"31727" 4 686 3 1 1 1 4 1 100 318 268 335 2936 4 2 1 501 2 25 67 1 1 +"31728" 4 686 3 1 1 1 4 1 100 318 335 318 3048 6 3 0 1367 1 5 17 1 0 +"31729" 4 686 4 0 1 0 1 1 100 9 100 150 4936 2 7 0 2036 3 50 50 0 1 +"31730" 4 686 4 0 1 0 1 1 100 9 150 188 5138 3 7 0 1112 4 25 38 0 1 +"31731" 4 686 4 0 1 0 1 1 100 9 188 179 1468 2 8 1 1162 5 5 9 0 0 +"31732" 4 686 4 0 1 0 1 1 100 9 179 9 2166 8 9 1 2447 1 95 170 0 0 +"31733" 4 686 5 1 1 0 1 1 100 1181 100 195 4323 1 3 0 943 1 95 95 0 1 +"31734" 4 686 5 1 1 0 1 1 100 1181 195 341 3273 6 5 1 2165 2 75 146 1 1 +"31735" 4 686 5 1 1 0 1 1 100 1181 341 512 2823 2 8 0 3097 3 50 171 0 1 +"31736" 4 686 5 1 1 0 1 1 100 1181 512 384 4462 8 9 1 1034 4 25 128 0 0 +"31737" 4 686 5 1 1 0 1 1 100 1181 384 480 1929 3 4 0 597 4 25 96 0 1 +"31738" 4 686 5 1 1 0 1 1 100 1181 480 720 2123 5 7 0 1711 3 50 240 0 1 +"31739" 4 686 5 1 1 0 1 1 100 1181 720 900 3360 7 4 1 954 4 25 180 1 1 +"31740" 4 686 5 1 1 0 1 1 100 1181 900 675 2407 4 1 0 1823 4 25 225 1 0 +"31741" 4 686 5 1 1 0 1 1 100 1181 675 1181 1476 9 6 1 2270 2 75 506 1 1 +"31742" 4 686 5 1 1 0 2 1 100 935 100 150 4483 8 1 1 456 3 50 50 1 1 +"31743" 4 686 5 1 1 0 2 1 100 935 150 225 3382 6 2 1 832 3 50 75 1 1 +"31744" 4 686 5 1 1 0 2 1 100 935 225 281 3299 7 9 0 1305 4 25 56 0 1 +"31745" 4 686 5 1 1 0 2 1 100 935 281 422 5006 2 10 0 978 3 50 141 0 1 +"31746" 4 686 5 1 1 0 2 1 100 935 422 443 5820 5 3 1 785 5 5 21 1 1 +"31747" 4 686 5 1 1 0 2 1 100 935 443 332 2404 4 2 0 1249 4 25 111 1 0 +"31748" 4 686 5 1 1 0 2 1 100 935 332 498 1817 3 5 0 3011 3 50 166 0 1 +"31749" 4 686 5 1 1 0 2 1 100 935 498 623 1902 9 4 1 1106 4 25 125 1 1 +"31750" 4 686 5 1 1 0 2 1 100 935 623 935 4949 1 7 0 2407 3 50 312 0 1 +"31751" 4 686 5 1 1 0 3 1 100 523 100 150 7432 7 5 1 905 3 50 50 1 1 +"31752" 4 686 5 1 1 0 3 1 100 523 150 75 1558 2 1 0 750 3 50 75 1 0 +"31753" 4 686 5 1 1 0 3 1 100 523 75 113 1855 8 6 1 1341 3 50 38 1 1 +"31754" 4 686 5 1 1 0 3 1 100 523 113 170 1706 4 7 0 716 3 50 57 0 1 +"31755" 4 686 5 1 1 0 3 1 100 523 170 255 2308 3 10 0 2336 3 50 85 0 1 +"31756" 4 686 5 1 1 0 3 1 100 523 255 191 2276 6 8 1 1046 4 25 64 0 0 +"31757" 4 686 5 1 1 0 3 1 100 523 191 334 1575 9 2 1 1890 2 75 143 1 1 +"31758" 4 686 5 1 1 0 3 1 100 523 334 418 2115 5 3 1 1350 4 25 84 1 1 +"31759" 4 686 5 1 1 0 3 1 100 523 418 523 1644 1 10 0 623 4 25 105 0 1 +"31760" 4 686 5 1 1 0 4 1 100 613 100 195 1435 2 3 0 1930 1 95 95 0 1 +"31761" 4 686 5 1 1 0 4 1 100 613 195 146 2753 7 4 0 1404 4 25 49 1 0 +"31762" 4 686 5 1 1 0 4 1 100 613 146 73 2361 3 1 0 1234 3 50 73 1 0 +"31763" 4 686 5 1 1 0 4 1 100 613 73 142 1710 1 9 0 1970 1 95 69 0 1 +"31764" 4 686 5 1 1 0 4 1 100 613 142 249 1778 8 7 1 2102 2 75 107 1 1 +"31765" 4 686 5 1 1 0 4 1 100 613 249 436 4044 9 2 1 1002 2 75 187 1 1 +"31766" 4 686 5 1 1 0 4 1 100 613 436 654 4949 5 6 0 1464 3 50 218 0 1 +"31767" 4 686 5 1 1 0 4 1 100 613 654 490 3340 6 8 1 1149 4 25 164 0 0 +"31768" 4 686 5 1 1 0 4 1 100 613 490 613 1994 4 7 0 860 4 25 123 0 1 +"31769" 4 691 2 0 1 0 1 1 100 4 100 150 5759 2 7 0 1424 3 50 50 0 1 +"31770" 4 691 2 0 1 0 1 1 100 4 150 37 12265 3 7 1 1833 2 75 113 0 0 +"31771" 4 691 2 0 1 0 1 1 100 4 37 72 3559 2 8 0 1835 1 95 35 0 1 +"31772" 4 691 2 0 1 0 1 1 100 4 72 4 2611 8 9 1 2057 1 95 68 0 0 +"31773" 4 691 3 1 1 0 1 1 100 424 100 150 3244 1 3 0 1041 3 50 50 0 1 +"31774" 4 691 3 1 1 0 1 1 100 424 150 188 3912 6 5 1 882 4 25 38 1 1 +"31775" 4 691 3 1 1 0 1 1 100 424 188 367 2221 2 8 0 1367 1 95 179 0 1 +"31776" 4 691 3 1 1 0 1 1 100 424 367 275 5145 8 9 1 812 4 25 92 0 0 +"31777" 4 691 3 1 1 0 1 1 100 424 275 344 2294 3 4 0 1169 4 25 69 0 1 +"31778" 4 691 3 1 1 0 1 1 100 424 344 430 1917 5 7 0 1316 4 25 86 0 1 +"31779" 4 691 3 1 1 0 1 1 100 424 430 452 2449 7 4 1 732 5 5 22 1 1 +"31780" 4 691 3 1 1 0 1 1 100 424 452 339 2880 4 1 0 1944 4 25 113 1 0 +"31781" 4 691 3 1 1 0 1 1 100 424 339 424 2632 9 6 1 1393 4 25 85 1 1 +"31782" 4 691 3 1 1 0 2 1 100 176 100 150 3379 8 1 1 1216 3 50 50 1 1 +"31783" 4 691 3 1 1 0 2 1 100 176 150 75 1839 6 2 0 1137 3 50 75 1 0 +"31784" 4 691 3 1 1 0 2 1 100 176 75 37 1721 7 9 1 943 3 50 38 0 0 +"31785" 4 691 3 1 1 0 2 1 100 176 37 56 1312 2 10 0 1095 3 50 19 0 1 +"31786" 4 691 3 1 1 0 2 1 100 176 56 70 1365 5 3 1 921 4 25 14 1 1 +"31787" 4 691 3 1 1 0 2 1 100 176 70 52 1961 4 2 0 1013 4 25 18 1 0 +"31788" 4 691 3 1 1 0 2 1 100 176 52 78 1760 3 5 0 1442 3 50 26 0 1 +"31789" 4 691 3 1 1 0 2 1 100 176 78 117 1784 9 4 1 3747 3 50 39 1 1 +"31790" 4 691 3 1 1 0 2 1 100 176 117 176 1371 1 7 0 3302 3 50 59 0 1 +"31791" 4 691 3 1 1 0 3 1 100 1413 100 195 2330 7 5 1 2267 1 95 95 1 1 +"31792" 4 691 3 1 1 0 3 1 100 1413 195 244 1634 2 1 1 1796 4 25 49 1 1 +"31793" 4 691 3 1 1 0 3 1 100 1413 244 183 2862 8 6 0 1124 4 25 61 1 0 +"31794" 4 691 3 1 1 0 3 1 100 1413 183 275 2028 4 7 0 1203 3 50 92 0 1 +"31795" 4 691 3 1 1 0 3 1 100 1413 275 344 1950 3 10 0 759 4 25 69 0 1 +"31796" 4 691 3 1 1 0 3 1 100 1413 344 430 2425 6 8 0 2261 4 25 86 0 1 +"31797" 4 691 3 1 1 0 3 1 100 1413 430 753 2259 9 2 1 1035 2 75 323 1 1 +"31798" 4 691 3 1 1 0 3 1 100 1413 753 1130 2004 5 3 1 1517 3 50 377 1 1 +"31799" 4 691 3 1 1 0 3 1 100 1413 1130 1413 2382 1 10 0 1227 4 25 283 0 1 +"31800" 4 691 3 1 1 0 4 1 100 334 100 195 2821 2 3 0 2225 1 95 95 0 1 +"31801" 4 691 3 1 1 0 4 1 100 334 195 293 3389 7 4 1 583 3 50 98 1 1 +"31802" 4 691 3 1 1 0 4 1 100 334 293 73 2060 3 1 0 1266 2 75 220 1 0 +"31803" 4 691 3 1 1 0 4 1 100 334 73 128 1658 1 9 0 1004 2 75 55 0 1 +"31804" 4 691 3 1 1 0 4 1 100 334 128 224 1376 8 7 1 1615 2 75 96 1 1 +"31805" 4 691 3 1 1 0 4 1 100 334 224 437 2371 9 2 1 2207 1 95 213 1 1 +"31806" 4 691 3 1 1 0 4 1 100 334 437 765 2139 5 6 0 882 2 75 328 0 1 +"31807" 4 691 3 1 1 0 4 1 100 334 765 191 1863 6 8 1 1947 2 75 574 0 0 +"31808" 4 691 3 1 1 0 4 1 100 334 191 334 1596 4 7 0 1637 2 75 143 0 1 +"31809" 4 691 4 0 1 1 1 1 100 158 100 150 5103 8 3 1 987 3 50 50 1 1 +"31810" 4 691 4 0 1 1 1 1 100 158 150 158 2243 3 7 0 729 1 5 8 0 1 +"31811" 4 691 4 0 1 1 1 1 100 158 158 166 2013 8 2 1 1011 1 5 8 1 1 +"31812" 4 691 4 0 1 1 1 1 100 158 166 158 1882 2 1 0 4637 1 5 8 1 0 +"31813" 4 691 5 1 1 1 1 1 100 232 100 150 1913 9 7 1 1040 3 50 50 1 1 +"31814" 4 691 5 1 1 1 1 1 100 232 150 225 1527 4 8 0 1195 3 50 75 0 1 +"31815" 4 691 5 1 1 1 1 1 100 232 225 338 2404 8 2 1 1075 3 50 113 1 1 +"31816" 4 691 5 1 1 1 1 1 100 232 338 169 1608 2 1 0 1619 3 50 169 1 0 +"31817" 4 691 5 1 1 1 1 1 100 232 169 211 2648 7 6 1 1282 2 25 42 1 1 +"31818" 4 691 5 1 1 1 1 1 100 232 211 222 1779 5 3 1 1191 1 5 11 1 1 +"31819" 4 691 5 1 1 1 1 1 100 232 222 233 1745 3 6 0 1670 1 5 11 0 1 +"31820" 4 691 5 1 1 1 1 1 100 232 233 221 2102 6 9 1 682 1 5 12 0 0 +"31821" 4 691 5 1 1 1 1 1 100 232 221 232 1302 1 4 0 834 1 5 11 0 1 +"31822" 4 691 5 1 1 1 2 1 100 166 100 150 1756 2 9 0 802 3 50 50 0 1 +"31823" 4 691 5 1 1 1 2 1 100 166 150 188 1823 4 10 0 889 2 25 38 0 1 +"31824" 4 691 5 1 1 1 2 1 100 166 188 141 2223 3 1 0 1574 2 25 47 1 0 +"31825" 4 691 5 1 1 1 2 1 100 166 141 148 1683 8 6 1 802 1 5 7 1 1 +"31826" 4 691 5 1 1 1 2 1 100 166 148 141 1449 5 7 1 784 1 5 7 0 0 +"31827" 4 691 5 1 1 1 2 1 100 166 141 134 1901 6 8 1 779 1 5 7 0 0 +"31828" 4 691 5 1 1 1 2 1 100 166 134 127 797 7 5 0 670 1 5 7 1 0 +"31829" 4 691 5 1 1 1 2 1 100 166 127 133 1178 1 10 0 726 1 5 6 0 1 +"31830" 4 691 5 1 1 1 2 1 100 166 133 166 1403 9 3 1 1463 2 25 33 1 1 +"31831" 4 691 5 1 1 1 3 1 100 246 100 125 2213 3 5 0 1162 2 25 25 0 1 +"31832" 4 691 5 1 1 1 3 1 100 246 125 94 1455 8 9 1 1264 2 25 31 0 0 +"31833" 4 691 5 1 1 1 3 1 100 246 94 141 1628 2 4 0 679 3 50 47 0 1 +"31834" 4 691 5 1 1 1 3 1 100 246 141 212 1778 6 3 1 865 3 50 71 1 1 +"31835" 4 691 5 1 1 1 3 1 100 246 212 223 1226 7 6 1 1861 1 5 11 1 1 +"31836" 4 691 5 1 1 1 3 1 100 246 223 212 1444 4 2 0 1212 1 5 11 1 0 +"31837" 4 691 5 1 1 1 3 1 100 246 212 223 1159 1 8 0 1105 1 5 11 0 1 +"31838" 4 691 5 1 1 1 3 1 100 246 223 234 1845 5 7 0 1688 1 5 11 0 1 +"31839" 4 691 5 1 1 1 3 1 100 246 234 246 1308 9 5 1 946 1 5 12 1 1 +"31840" 4 691 5 1 1 1 4 1 100 257 100 175 1767 8 7 1 1278 4 75 75 1 1 +"31841" 4 691 5 1 1 1 4 1 100 257 175 263 1736 3 10 0 1188 3 50 88 0 1 +"31842" 4 691 5 1 1 1 4 1 100 257 263 197 1782 7 9 1 1260 2 25 66 0 0 +"31843" 4 691 5 1 1 1 4 1 100 257 197 246 1587 9 1 1 1607 2 25 49 1 1 +"31844" 4 691 5 1 1 1 4 1 100 257 246 258 1524 2 3 0 826 1 5 12 0 1 +"31845" 4 691 5 1 1 1 4 1 100 257 258 271 1298 1 8 0 890 1 5 13 0 1 +"31846" 4 691 5 1 1 1 4 1 100 257 271 285 1164 5 4 1 815 1 5 14 1 1 +"31847" 4 691 5 1 1 1 4 1 100 257 285 271 1105 4 2 0 706 1 5 14 1 0 +"31848" 4 691 5 1 1 1 4 1 100 257 271 257 843 6 3 0 691 1 5 14 1 0 diff --git a/commons/models/cgt_cm.yml b/commons/models/cgt_cm.yml new file mode 100644 index 00000000..cdf067cd --- /dev/null +++ b/commons/models/cgt_cm.yml @@ -0,0 +1,105 @@ +# Task information. +task_name: + code: cgt # code for the task + desc: Cambridge Gambling Task # description (title-case) + cite: # A list of citations. They should be APA-formatted. + - > + Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., + Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., + London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). + Dissociable deficits in the decision-making cognition of chronic + amphetamine abusers, opiate abusers, patients with focal damage to + prefrontal cortex, and tryptophan-depleted normal volunteers: + evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339. + +# Model information. +model_name: + code: cm # code for the model + desc: Cumulative Model # description (title-case) + cite: # A list of citations. They should be APA-formatted. + +# Model type. +# For now, it should be one among three types: +# 1) Hierarchical +# code: '' +# desc: 'Hierarchical' +# 2) Individual +# code: 'single' +# desc: 'Individual' +# 3) Multiple-block Hierarchical +# code: 'multipleB' +# desc: 'Multiple-block Hierarchical' +model_type: + code: # code for the model type + desc: Hierarchical # description + +# Data columns that must be included in a input data. +# For each column, it should be defined as: +# {column_name}: {one-line description} +# +# Note: `subjID` must always be included. +# Also, if `model_type` is "multipleB", `block` must be included, too. +data_columns: + subjID: A unique identifier for each subject in the data-set. # Required +# block: A unique identifier for each of the multiple blocks within each subject. # Required for multipleB type + gamble_type: '' + percentage_staked: '' + trial_initial_points: '' + assessment_stages: '' + left_colour_chosen: '' + n_left_colour_boxes: '' + +# Model parameters. +# For each parameter, it should be defined as: +# {parameter_name}: +# desc: {description} +# info: [{lower_bound}, {plausible_value}, {upper_bound}] +# +# `info` is defined for a fixed initial value of the parameter. +# `lower_bound`, `plausible_value`, `upper_bound` can be numbers, strings +# (e.g., 'Inf', '-Inf', 'exp([0-9.]+)'), where plausible_value should be +# neither 'Inf' nor '-Inf'. +parameters: + alpha: + desc: probability distortion + info: [0, 1, 5] + c: + desc: color bias + info: [0, 0.5, 1] + rho: + desc: relative loss sensitivity + info: [0, 1, Inf] + beta: + desc: discounting rate + info: [0, 1, Inf] + gamma: + desc: choice sensitivity + info: [0, 1, Inf] + +# (optional) Model regressors. +# If exists, for each regressor, it should be defined as: +# {regressor}: {number_of_dimension} +regressors: + y_hat_col: 2 + y_hat_bet: 2 + bet_utils: 3 + +# (optional) response variables for posterior predictive checks (PPC). +# Should be defined in a generated quantity block. +postpreds: + +# (optional) a list of additional arguments. +# For each additional argument, it should be defined as: +# - code: {code} +# default: {default_value} +# desc: {description} +additional_args: + +# (optional) notes on the model. Should be given as a list of notes. +notes: + +# (optional) a list of contributors. To specify who wrote this model codes for hBayesDM. +contributors: +- name: Nathaniel Haines + email: haines.175@osu.edu + link: http://haines-lab.com/ diff --git a/commons/stan_files/cgt_cm.stan b/commons/stan_files/cgt_cm.stan new file mode 100644 index 00000000..495f0139 --- /dev/null +++ b/commons/stan_files/cgt_cm.stan @@ -0,0 +1,170 @@ +data { + int N; // Number of subjects + int T; // Max trials per subject + int B; // Number of bet options + int Tsubj[N]; // number of trials/subject + int col_chosen[N,T]; // chosen color index + int bet_chosen[N,T]; // chosen bet indexs + vector[B] bet_delay; // vector of bet delays + real gain[N,T,B]; // gain: (capital + capital * bet_prop) + real loss[N,T,B]; // loss: (capital - capital * bet_prop) + real prop_red[N,T]; // proportion of red boxes + real prop_chosen[N,T]; // proportion of chosen boxes +} + +parameters { + // Declare all parameters as vectors for vectorizing + // Hyper(group)-parameters + vector[5] mu_p; + vector[5] sigma; + + // Subject-level raw parameters (for Matt trick) + vector[N] alpha_pr; + vector[N] rho_pr; + vector[N] gamma_pr; + vector[N] c_pr; + vector[N] beta_pr; +} + +transformed parameters { + // subject-level parameters + vector[N] alpha; + vector[N] rho; + vector[N] gamma; + vector[N] c; + vector[N] beta; + + for (i in 1:N) { + alpha[i] = Phi_approx( mu_p[1] + sigma[1] * alpha_pr[i] ) * 5; + c[i] = Phi_approx( mu_p[4] + sigma[4] * c_pr[i] ); + } + rho = exp(mu_p[2] + sigma[2] * rho_pr); + gamma = exp(mu_p[3] + sigma[3] * gamma_pr); + beta = exp(mu_p[5] + sigma[5] * beta_pr); +} + +model { + // Hyperpriors (vectorized) + mu_p ~ normal(0, 1); + sigma ~ normal(0, 0.2); + + // Individual parameters + alpha_pr ~ normal(0,1); + rho_pr ~ normal(0,1); + gamma_pr ~ normal(0,1); + c_pr ~ normal(0,1); + beta_pr ~ normal(0,1); + + // subject loop and trial loop + for (i in 1:N) { + // Define vectors + vector[B] gain_util; + vector[B] loss_util; + vector[B] bet_util; + vector[2] col_util; + + // Model + for (t in 1:Tsubj[i]) { // Need to set trial/vectorized parts correctly + // Assign probability of choosing color (fix bias red) + col_util[1] = (c[i] * pow(prop_red[i,t], alpha[i])) / (c[i] * pow(prop_red[i,t], alpha[i]) + ((1 - c[i]) * pow(1 - prop_red[i,t], alpha[i]))); + col_util[2] = 1 - col_util[1]; + + // Increment log likelihood for color choice + col_chosen[i,t] ~ categorical(col_util); + + // For each bet option + for (b in 1:B) { + // Assign gain/loss utilities + gain_util[b] = log(1 + gain[i,t,b] * 1); + loss_util[b] = log(1 + loss[i,t,b] * rho[i]); + } + + // Utility of all bets + bet_util = gain_util * prop_chosen[i,t] + loss_util * (1 - prop_chosen[i,t]); + // Utility of bet with delays + // bet_util = ((beta[i] + bet_util) / beta[i]) - beta[i] * bet_delay; + bet_util = bet_util - beta[i] * bet_delay; + + // Increment log likelihood for choosing bet + bet_chosen[i,t] ~ categorical_logit(bet_util * gamma[i]); + } + } +} + +generated quantities { + // Define group level parameters + real mu_alpha; + real mu_rho; + real mu_gamma; + real mu_c; + real mu_beta; + + // Define log likelihood vector + real log_lik[N]; + real y_hat_col[N,T]; + real y_hat_bet[N,T]; + real bet_utils[N,T,B]; + + for (j in 1:N) { + for (k in 1:T) { + y_hat_col[j,k] = 0; + y_hat_bet[j,k] = 0; + for (b in 1:B) { + bet_utils[j,k,b] = 0; + } + } + } + + // Assign group level parameters + mu_alpha = Phi_approx(mu_p[1]) * 5; + mu_rho = exp(mu_p[2]); + mu_gamma = exp(mu_p[3]); + mu_c = Phi_approx(mu_p[4]); + mu_beta = exp(mu_p[5]); + + { // local section, this saves time and space + for (i in 1:N) { + // Define vectors + vector[B] gain_util; + vector[B] loss_util; + vector[B] bet_util; + vector[2] col_util; + + log_lik[i] = 0; + + // Model + for (t in 1:Tsubj[i]) { // Need to set trial/vectorized parts correctly + // Assign probability of choosing color (fix bias red) + col_util[1] = (c[i] * pow(prop_red[i,t], alpha[i])) / (c[i] * pow(prop_red[i,t], alpha[i]) + ((1 - c[i]) * pow(1 - prop_red[i,t], alpha[i]))); + col_util[2] = 1 - col_util[1]; + + // Increment log likelihood for color choice + log_lik[i] = log_lik[i] + categorical_lpmf(col_chosen[i,t] | col_util); + // Posterior prediction for bet + y_hat_col[i,t] = categorical_rng(col_util); + + // For each bet option + for (b in 1:B) { + // Assign gain/loss utilities + gain_util[b] = log(1 + gain[i,t,b] * 1); + loss_util[b] = log(1 + loss[i,t,b] * rho[i]); + } + + // Utility of all bets + bet_util = gain_util * prop_chosen[i,t] + loss_util * (1 - prop_chosen[i,t]); + // Utility of bet with delays + // bet_util = ((beta[i] + bet_util) / beta[i]) - beta[i] * bet_delay; + bet_util = bet_util - beta[i] * bet_delay; + + // Increment log likelihood for choosing bet + log_lik[i] += categorical_logit_lpmf(bet_chosen[i,t] | bet_util * gamma[i]); + // Posterior prediction for bet + y_hat_bet[i,t] = categorical_rng(softmax(bet_util * gamma[i])); + // Save bet utility + for (b in 1:B) { + bet_utils[i,t,b] = bet_util[b]; + } + } + } + } +} From af24d1aaa168e71fefa95ac1c88562ddaccb7291 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 17:46:07 +0900 Subject: [PATCH 128/163] Avoid using "'" without quotes --- commons/example.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/commons/example.yml b/commons/example.yml index c63b1353..553dc650 100644 --- a/commons/example.yml +++ b/commons/example.yml @@ -106,7 +106,7 @@ notes: This is a note for this model. - > If you want to write long notes for the model, - you'd be better to check out how to write multiline strings in YAML + you would be better to check out how to write multiline strings in YAML (https://yaml-multiline.info) # (optional) a list of contributors. To specify who wrote this model codes for hBayesDM. From 00fa84a7572264d4542afd522bf652b8769c344d Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 18:11:09 +0900 Subject: [PATCH 129/163] Add cgt_preproc_func --- Python/hbayesdm/preprocess_funcs.py | 68 +++++++++++++++++++++++++++++ R/R/preprocess_funcs.R | 59 +++++++++++++++++++++++++ 2 files changed, 127 insertions(+) diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py index c3ec34c1..171d83d9 100644 --- a/Python/hbayesdm/preprocess_funcs.py +++ b/Python/hbayesdm/preprocess_funcs.py @@ -852,3 +852,71 @@ def wcs_preprocess_func(self, raw_data, general_info, additional_args): # Returned data_dict will directly be passed to pystan return data_dict + + +def cgt_preprocess_func(self, raw_data, general_info, additional_args): + # Iterate through grouped_data + subj_group = iter(general_info['grouped_data']) + + # Use general_info(s) about raw_data + # subjs = general_info['subjs'] + n_subj = general_info['n_subj'] + t_subjs = general_info['t_subjs'] + t_max = general_info['t_max'] + + uniq_bets = np.unique(raw_data['percentage_staked']) + n_bets = len(uniq_bets) + bets_asc = np.sort(uniq_bets / 100) + bets_dsc = np.flip(np.sort(uniq_bets / 100)) + + bet_time = raw_data['percentage_staked'] / 100 + for b in range(n_bets): + bet_time[bet_time == bets_asc[b]] = b + 1 + raw_data['bet_time'] = np.where(raw_data['gamble_type'] == 0, + n_bets + 1 - bet_time, + bet_time) + + col_chosen = bet_chosen = np.full((n_subj, t_max), 0, dtype=int) + prop_red = prop_chosen = np.full((n_subj, t_max), 0, dtype=float) + gain = loss = np.full((n_subj, t_max, n_bets), 0, dtype=float) + + for s in range(n_subj): + t = t_subjs[s] + _, subj_data = next(subj_group) + + col_chosen[s, :t] = np.where(subj_data['left_colour_chosen'] == 1, + 1, 2) + bet_chosen[s, :t] = subj_data['bet_time'] + prop_red[s, :t] = subj_data['n_left_colour_boxes'] / 10 + prop_chosen[s, :t] = np.where(subj_data['left_colour_chosen'] == 1, + prop_red[s][:t], + 1 - prop_red[s][:t]) + + for b in range(n_bets): + gain[s, :t, b] = subj_data['trial_initial_points'] / 100 \ + + subj_data['trial_initial_points'] / 100 \ + * np.where(subj_data['gamble_type'] == 1, + bets_asc[b], + bets_dsc[b]) + loss[s, :t, b] = subj_data['trial_initial_points'] / 100 \ + - subj_data['trial_initial_points'] / 100 \ + * np.where(subj_data['gamble_type'] == 1, + bets_asc[b], + bets_dsc[b]) + + # Wrap into a dict for pystan + data_dict = { + 'N': n_subj, + 'T': t_max, + 'B': n_bets, + 'Tsubj': t_subjs, + 'gain': gain, + 'loss': loss, + 'prop_red': prop_red, + 'prop_chosen': prop_chosen, + 'col_chosen': col_chosen, + 'bet_chosen': bet_chosen + } + + # Returned data_dict will directly be passed to pystan + return data_dict diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index d71778c1..c31cf036 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -849,3 +849,62 @@ wcs_preprocess_func <- function(raw_data, general_info) { return(data_list) } +cgt_preprocess_func <- function(raw_data, general_info) { + # Currently class(raw_data) == "data.table" + + # Use general_info of raw_data + subjs <- general_info$subjs + n_subj <- general_info$n_subj + t_subjs <- general_info$t_subjs + t_max <- general_info$t_max + + n_bets <- length(unique(raw_data$percentage_staked)) + bets_asc <- sort(unique(raw_data$percentage_staked) / 100) + bets_dsc <- sort(unique(raw_data$percentage_staked) / 100, decreasing = T) + + bet_time <- raw_data$percentage_staked / 100 + for (b in 1:n_bets) { + bet_time[bet_time == bets_asc[b]] <- b + } + raw_data$bet_time <- ifelse(raw_data$gamble_type == 0, + n_bets + 1 - bet_time, + bet_time) + + col_chosen <- bet_chosen <- prop_red <- prop_chosen <- + array(0, c(n_subj, t_max)) + gain <- loss <- array(0, c(n_subj, t_max, n_bets)) + + for (i in 1:n_subj) { + t <- t_subj[i] + DT_subj <- raw_data[subjid == subjs[i]] + + col_chosen [i, 1:t] <- ifelse(DT_subj$left_colour_chosen == 1, 1, 2) + bet_chosen [i, 1:t] <- DT_subj$bet_time + prop_red [i, 1:t] <- DT_subj$n_left_colour_boxes / 10 + prop_chosen[i, 1:t] <- ifelse(DT_subj$left_colour_chosen == 1, + prop_red[i, 1:t], + 1 - prop_red[i, 1:t]) + + for (b in 1:n_bets) { + gain[i, 1:t, b] <- with(DT_subj, trial_initial_points / 100 + trial_initial_points / 100 * ifelse(gamble_type == 1, bets_asc[b], bets_dsc[b])) + loss[i, 1:t, b] <- with(DT_subj, trial_initial_points / 100 - trial_initial_points / 100 * ifelse(gamble_type == 1, bets_asc[b], bets_dsc[b])) + } + } + + # Wrap into a list for Stan + data_list <- list( + N = n_subj, + T = t_max, + B = n_bets, + Tsubj = t_subjs, + gain = gain, + loss = loss, + prop_red = prop_red, + prop_chosen = prop_chosen, + col_chosen = col_chosen, + bet_chosen = bet_chosen + ) + + # Returned data_list will directly be passed to Stan + return(data_list) +} From 2c2fd485afa6747c73dbb46b12747701c5c4070c Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 18:16:27 +0900 Subject: [PATCH 130/163] Use underscores for delimeters instead of dots --- commons/extdata/cgt_exampleData.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/commons/extdata/cgt_exampleData.txt b/commons/extdata/cgt_exampleData.txt index 53b4b05d..b33adf82 100644 --- a/commons/extdata/cgt_exampleData.txt +++ b/commons/extdata/cgt_exampleData.txt @@ -1,4 +1,4 @@ -"groupID" "subjID" "stage" "assessment.stage" "includes.gamble" "gamble.type" "block" "completed" "block.initial.points" "block.final.points" "trial.initial.points" "trial.final.points" "choice.latency" "n.left.colour.boxes" "token.box" "left.colour.chosen" "response.latency" "stake.index" "percentage.staked" "points.staked" "left.won" "subject.won" +"groupID" "subjID" "stage" "assessment_stage" "includes_gamble" "gamble_type" "block" "completed" "block_initial_points" "block_final_points" "trial_initial_points" "trial_final_points" "choice_latency" "n_left_colour_boxes" "token_box" "left_colour_chosen" "response_latency" "stake_index" "percentage_staked" "points_staked" "left_won" "subject_won" "24100" 4 102 2 0 1 1 1 1 100 140 100 150 16114 8 3 1 3602 3 50 50 1 1 "24101" 4 102 2 0 1 1 1 1 100 140 150 225 7371 3 7 0 1634 3 50 75 0 1 "24102" 4 102 2 0 1 1 1 1 100 140 225 281 2698 8 2 1 1575 2 25 56 1 1 From 4890d3787fa27f931cedab3843b8336ae6c9d287 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 19:49:56 +0900 Subject: [PATCH 131/163] Update cgt model information --- commons/extdata/cgt_exampleData.txt | 8122 ++------------------------- commons/models/cgt_cm.yml | 2 +- commons/stan_files/cgt_cm.stan | 24 +- 3 files changed, 385 insertions(+), 7763 deletions(-) diff --git a/commons/extdata/cgt_exampleData.txt b/commons/extdata/cgt_exampleData.txt index b33adf82..c0db0a41 100644 --- a/commons/extdata/cgt_exampleData.txt +++ b/commons/extdata/cgt_exampleData.txt @@ -1,7750 +1,372 @@ -"groupID" "subjID" "stage" "assessment_stage" "includes_gamble" "gamble_type" "block" "completed" "block_initial_points" "block_final_points" "trial_initial_points" "trial_final_points" "choice_latency" "n_left_colour_boxes" "token_box" "left_colour_chosen" "response_latency" "stake_index" "percentage_staked" "points_staked" "left_won" "subject_won" -"24100" 4 102 2 0 1 1 1 1 100 140 100 150 16114 8 3 1 3602 3 50 50 1 1 -"24101" 4 102 2 0 1 1 1 1 100 140 150 225 7371 3 7 0 1634 3 50 75 0 1 -"24102" 4 102 2 0 1 1 1 1 100 140 225 281 2698 8 2 1 1575 2 25 56 1 1 -"24103" 4 102 2 0 1 1 1 1 100 140 281 140 1585 2 1 0 892 3 50 141 1 0 -"24104" 4 102 3 1 1 1 1 1 100 485 100 195 2346 9 7 1 1113 5 95 95 1 1 -"24105" 4 102 3 1 1 1 1 1 100 485 195 244 2225 4 8 0 1348 2 25 49 0 1 -"24106" 4 102 3 1 1 1 1 1 100 485 244 305 1817 8 2 1 1793 2 25 61 1 1 -"24107" 4 102 3 1 1 1 1 1 100 485 305 229 1248 2 1 0 754 2 25 76 1 0 -"24108" 4 102 3 1 1 1 1 1 100 485 229 286 1571 7 6 1 1246 2 25 57 1 1 -"24109" 4 102 3 1 1 1 1 1 100 485 286 272 2672 5 3 0 654 1 5 14 1 0 -"24110" 4 102 3 1 1 1 1 1 100 485 272 340 1284 3 6 0 460 2 25 68 0 1 -"24111" 4 102 3 1 1 1 1 1 100 485 340 323 1843 6 9 1 689 1 5 17 0 0 -"24112" 4 102 3 1 1 1 1 1 100 485 323 485 1125 1 4 0 654 3 50 162 0 1 -"24113" 4 102 3 1 1 1 2 1 100 535 100 150 3689 2 9 0 735 3 50 50 0 1 -"24114" 4 102 3 1 1 1 2 1 100 535 150 158 2506 4 10 0 824 1 5 8 0 1 -"24115" 4 102 3 1 1 1 2 1 100 535 158 118 1414 3 1 0 481 2 25 40 1 0 -"24116" 4 102 3 1 1 1 2 1 100 535 118 207 1271 8 6 1 616 4 75 89 1 1 -"24117" 4 102 3 1 1 1 2 1 100 535 207 217 2024 5 7 0 765 1 5 10 0 1 -"24118" 4 102 3 1 1 1 2 1 100 535 217 163 1214 6 8 1 594 2 25 54 0 0 -"24119" 4 102 3 1 1 1 2 1 100 535 163 285 1363 7 5 1 729 4 75 122 1 1 -"24120" 4 102 3 1 1 1 2 1 100 535 285 428 1315 1 10 0 555 3 50 143 0 1 -"24121" 4 102 3 1 1 1 2 1 100 535 428 535 1589 9 3 1 631 2 25 107 1 1 -"24122" 4 102 3 1 1 1 3 1 100 499 100 150 1871 3 5 0 1060 3 50 50 0 1 -"24123" 4 102 3 1 1 1 3 1 100 499 150 75 1374 8 9 1 739 3 50 75 0 0 -"24124" 4 102 3 1 1 1 3 1 100 499 75 131 1628 2 4 0 837 4 75 56 0 1 -"24125" 4 102 3 1 1 1 3 1 100 499 131 164 1755 6 3 1 1996 2 25 33 1 1 -"24126" 4 102 3 1 1 1 3 1 100 499 164 205 2266 7 6 1 652 2 25 41 1 1 -"24127" 4 102 3 1 1 1 3 1 100 499 205 154 1371 4 2 0 639 2 25 51 1 0 -"24128" 4 102 3 1 1 1 3 1 100 499 154 300 1357 1 8 0 867 5 95 146 0 1 -"24129" 4 102 3 1 1 1 3 1 100 499 300 285 3127 5 7 1 2096 1 5 15 0 0 -"24130" 4 102 3 1 1 1 3 1 100 499 285 499 1314 9 5 1 881 4 75 214 1 1 -"24131" 4 102 3 1 1 1 4 1 100 588 100 150 1821 8 7 1 852 3 50 50 1 1 -"24132" 4 102 3 1 1 1 4 1 100 588 150 188 1473 3 10 0 785 2 25 38 0 1 -"24133" 4 102 3 1 1 1 4 1 100 588 188 141 1889 7 9 1 1108 2 25 47 0 0 -"24134" 4 102 3 1 1 1 4 1 100 588 141 275 1412 9 1 1 1063 5 95 134 1 1 -"24135" 4 102 3 1 1 1 4 1 100 588 275 413 1562 2 3 0 1094 3 50 138 0 1 -"24136" 4 102 3 1 1 1 4 1 100 588 413 620 1718 1 8 0 1427 3 50 207 0 1 -"24137" 4 102 3 1 1 1 4 1 100 588 620 589 1844 5 4 0 841 1 5 31 1 0 -"24138" 4 102 3 1 1 1 4 1 100 588 589 560 1526 4 2 0 843 1 5 29 1 0 -"24139" 4 102 3 1 1 1 4 1 100 588 560 588 1520 6 3 1 1141 1 5 28 1 1 -"24140" 4 102 4 0 1 0 1 1 100 26 100 175 11028 2 7 0 1452 2 75 75 0 1 -"24141" 4 102 4 0 1 0 1 1 100 26 175 341 1732 3 7 0 1944 1 95 166 0 1 -"24142" 4 102 4 0 1 0 1 1 100 26 341 512 1425 2 8 0 827 3 50 171 0 1 -"24143" 4 102 4 0 1 0 1 1 100 26 512 26 1544 8 9 1 780 1 95 486 0 0 -"24144" 4 102 5 1 1 0 1 1 100 1429 100 195 1486 1 3 0 1197 1 95 95 0 1 -"24145" 4 102 5 1 1 0 1 1 100 1429 195 244 1729 6 5 1 581 4 25 49 1 1 -"24146" 4 102 5 1 1 0 1 1 100 1429 244 476 1302 2 8 0 894 1 95 232 0 1 -"24147" 4 102 5 1 1 0 1 1 100 1429 476 238 2128 8 9 1 729 3 50 238 0 0 -"24148" 4 102 5 1 1 0 1 1 100 1429 238 464 1283 3 4 0 732 1 95 226 0 1 -"24149" 4 102 5 1 1 0 1 1 100 1429 464 441 1662 5 7 1 942 5 5 23 0 0 -"24150" 4 102 5 1 1 0 1 1 100 1429 441 772 1742 7 4 1 776 2 75 331 1 1 -"24151" 4 102 5 1 1 0 1 1 100 1429 772 733 1544 4 1 0 949 5 5 39 1 0 -"24152" 4 102 5 1 1 0 1 1 100 1429 733 1429 1517 9 6 1 819 1 95 696 1 1 -"24153" 4 102 5 1 1 0 2 1 100 1654 100 195 1549 8 1 1 914 1 95 95 1 1 -"24154" 4 102 5 1 1 0 2 1 100 1654 195 244 1667 6 2 1 466 4 25 49 1 1 -"24155" 4 102 5 1 1 0 2 1 100 1654 244 122 2442 7 9 1 900 3 50 122 0 0 -"24156" 4 102 5 1 1 0 2 1 100 1654 122 238 1214 2 10 0 882 1 95 116 0 1 -"24157" 4 102 5 1 1 0 2 1 100 1654 238 298 2709 5 3 1 867 4 25 60 1 1 -"24158" 4 102 5 1 1 0 2 1 100 1654 298 223 1631 4 2 0 880 4 25 75 1 0 -"24159" 4 102 5 1 1 0 2 1 100 1654 223 435 1371 3 5 0 1359 1 95 212 0 1 -"24160" 4 102 5 1 1 0 2 1 100 1654 435 848 1356 9 4 1 801 1 95 413 1 1 -"24161" 4 102 5 1 1 0 2 1 100 1654 848 1654 1168 1 7 0 1006 1 95 806 0 1 -"24162" 4 102 5 1 1 0 3 1 100 31 100 195 1908 7 5 1 1295 1 95 95 1 1 -"24163" 4 102 5 1 1 0 3 1 100 31 195 10 1310 2 1 0 693 1 95 185 1 0 -"24164" 4 102 5 1 1 0 3 1 100 31 10 20 1337 8 6 1 735 1 95 10 1 1 -"24165" 4 102 5 1 1 0 3 1 100 31 20 39 1346 4 7 0 919 1 95 19 0 1 -"24166" 4 102 5 1 1 0 3 1 100 31 39 76 1105 3 10 0 666 1 95 37 0 1 -"24167" 4 102 5 1 1 0 3 1 100 31 76 4 1312 6 8 1 655 1 95 72 0 0 -"24168" 4 102 5 1 1 0 3 1 100 31 4 8 1261 9 2 1 958 1 95 4 1 1 -"24169" 4 102 5 1 1 0 3 1 100 31 8 16 1374 5 3 1 466 1 95 8 1 1 -"24170" 4 102 5 1 1 0 3 1 100 31 16 31 1137 1 10 0 576 1 95 15 0 1 -"24171" 4 102 5 1 1 0 4 1 100 749 100 195 1788 2 3 0 669 1 95 95 0 1 -"24172" 4 102 5 1 1 0 4 1 100 749 195 341 1114 7 4 1 787 2 75 146 1 1 -"24173" 4 102 5 1 1 0 4 1 100 749 341 170 1211 3 1 0 1672 3 50 171 1 0 -"24174" 4 102 5 1 1 0 4 1 100 749 170 332 1189 1 9 0 530 1 95 162 0 1 -"24175" 4 102 5 1 1 0 4 1 100 749 332 647 1022 8 7 1 691 1 95 315 1 1 -"24176" 4 102 5 1 1 0 4 1 100 749 647 1262 1135 9 2 1 647 1 95 615 1 1 -"24177" 4 102 5 1 1 0 4 1 100 749 1262 1199 1525 5 6 1 747 5 5 63 0 0 -"24178" 4 102 5 1 1 0 4 1 100 749 1199 599 1212 6 8 1 857 3 50 600 0 0 -"24179" 4 102 5 1 1 0 4 1 100 749 599 749 1069 4 7 0 864 4 25 150 0 1 -"24180" 4 110 2 0 1 1 1 1 100 268 100 150 12113 8 3 1 838 3 50 50 1 1 -"24181" 4 110 2 0 1 1 1 1 100 268 150 188 10723 3 7 0 795 2 25 38 0 1 -"24182" 4 110 2 0 1 1 1 1 100 268 188 282 2554 8 2 1 743 3 50 94 1 1 -"24183" 4 110 2 0 1 1 1 1 100 268 282 268 3184 2 1 0 1438 1 5 14 1 0 -"24184" 4 110 3 1 1 1 1 1 100 480 100 150 3670 9 7 1 2124 3 50 50 1 1 -"24185" 4 110 3 1 1 1 1 1 100 480 150 188 3301 4 8 0 2021 2 25 38 0 1 -"24186" 4 110 3 1 1 1 1 1 100 480 188 235 1802 8 2 1 2546 2 25 47 1 1 -"24187" 4 110 3 1 1 1 1 1 100 480 235 223 1350 2 1 0 1028 1 5 12 1 0 -"24188" 4 110 3 1 1 1 1 1 100 480 223 279 1512 7 6 1 558 2 25 56 1 1 -"24189" 4 110 3 1 1 1 1 1 100 480 279 293 2832 5 3 1 535 1 5 14 1 1 -"24190" 4 110 3 1 1 1 1 1 100 480 293 366 1930 3 6 0 364 2 25 73 0 1 -"24191" 4 110 3 1 1 1 1 1 100 480 366 384 2136 6 9 0 536 1 5 18 0 1 -"24192" 4 110 3 1 1 1 1 1 100 480 384 480 1795 1 4 0 364 2 25 96 0 1 -"24193" 4 110 3 1 1 1 2 1 100 383 100 175 1671 2 9 0 637 4 75 75 0 1 -"24194" 4 110 3 1 1 1 2 1 100 383 175 184 1316 4 10 0 523 1 5 9 0 1 -"24195" 4 110 3 1 1 1 2 1 100 383 184 138 1583 3 1 0 730 2 25 46 1 0 -"24196" 4 110 3 1 1 1 2 1 100 383 138 207 1076 8 6 1 391 3 50 69 1 1 -"24197" 4 110 3 1 1 1 2 1 100 383 207 217 2922 5 7 0 484 1 5 10 0 1 -"24198" 4 110 3 1 1 1 2 1 100 383 217 163 1747 6 8 1 389 2 25 54 0 0 -"24199" 4 110 3 1 1 1 2 1 100 383 163 245 1496 7 5 1 391 3 50 82 1 1 -"24200" 4 110 3 1 1 1 2 1 100 383 245 306 1433 1 10 0 1211 2 25 61 0 1 -"24201" 4 110 3 1 1 1 2 1 100 383 306 383 1294 9 3 1 574 2 25 77 1 1 -"24202" 4 110 3 1 1 1 3 1 100 385 100 150 1910 3 5 0 593 3 50 50 0 1 -"24203" 4 110 3 1 1 1 3 1 100 385 150 75 1249 8 9 1 468 3 50 75 0 0 -"24204" 4 110 3 1 1 1 3 1 100 385 75 131 908 2 4 0 622 4 75 56 0 1 -"24205" 4 110 3 1 1 1 3 1 100 385 131 164 1742 6 3 1 568 2 25 33 1 1 -"24206" 4 110 3 1 1 1 3 1 100 385 164 205 1075 7 6 1 1686 2 25 41 1 1 -"24207" 4 110 3 1 1 1 3 1 100 385 205 195 1858 4 2 0 887 1 5 10 1 0 -"24208" 4 110 3 1 1 1 3 1 100 385 195 293 1018 1 8 0 607 3 50 98 0 1 -"24209" 4 110 3 1 1 1 3 1 100 385 293 308 2439 5 7 0 689 1 5 15 0 1 -"24210" 4 110 3 1 1 1 3 1 100 385 308 385 1061 9 5 1 1944 2 25 77 1 1 -"24211" 4 110 3 1 1 1 4 1 100 575 100 175 1386 8 7 1 406 4 75 75 1 1 -"24212" 4 110 3 1 1 1 4 1 100 575 175 219 1336 3 10 0 365 2 25 44 0 1 -"24213" 4 110 3 1 1 1 4 1 100 575 219 164 658 7 9 1 288 2 25 55 0 0 -"24214" 4 110 3 1 1 1 4 1 100 575 164 246 871 9 1 1 1252 3 50 82 1 1 -"24215" 4 110 3 1 1 1 4 1 100 575 246 369 3382 2 3 0 427 3 50 123 0 1 -"24216" 4 110 3 1 1 1 4 1 100 575 369 461 1324 1 8 0 762 2 25 92 0 1 -"24217" 4 110 3 1 1 1 4 1 100 575 461 484 2075 5 4 1 254 1 5 23 1 1 -"24218" 4 110 3 1 1 1 4 1 100 575 484 460 2631 4 2 0 271 1 5 24 1 0 -"24219" 4 110 3 1 1 1 4 1 100 575 460 575 1031 6 3 1 308 2 25 115 1 1 -"24220" 4 110 4 0 1 0 1 1 100 166 100 175 4491 2 7 0 1068 2 75 75 0 1 -"24221" 4 110 4 0 1 0 1 1 100 166 175 341 3062 3 7 0 1338 1 95 166 0 1 -"24222" 4 110 4 0 1 0 1 1 100 166 341 665 3504 2 8 0 879 1 95 324 0 1 -"24223" 4 110 4 0 1 0 1 1 100 166 665 166 1290 8 9 1 1234 2 75 499 0 0 -"24224" 4 110 5 1 1 0 1 1 100 989 100 195 1309 1 3 0 444 1 95 95 0 1 -"24225" 4 110 5 1 1 0 1 1 100 989 195 293 1116 6 5 1 596 3 50 98 1 1 -"24226" 4 110 5 1 1 0 1 1 100 989 293 440 837 2 8 0 3659 3 50 147 0 1 -"24227" 4 110 5 1 1 0 1 1 100 989 440 330 1051 8 9 1 406 4 25 110 0 0 -"24228" 4 110 5 1 1 0 1 1 100 989 330 644 1208 3 4 0 2476 1 95 314 0 1 -"24229" 4 110 5 1 1 0 1 1 100 989 644 676 1796 5 7 0 380 5 5 32 0 1 -"24230" 4 110 5 1 1 0 1 1 100 989 676 1014 1168 7 4 1 606 3 50 338 1 1 -"24231" 4 110 5 1 1 0 1 1 100 989 1014 507 910 4 1 0 464 3 50 507 1 0 -"24232" 4 110 5 1 1 0 1 1 100 989 507 989 1000 9 6 1 333 1 95 482 1 1 -"24233" 4 110 5 1 1 0 2 0 100 1 100 195 1302 8 1 1 366 1 95 95 1 1 -"24234" 4 110 5 1 1 0 2 0 100 1 195 146 914 6 2 0 341 4 25 49 1 0 -"24235" 4 110 5 1 1 0 2 0 100 1 146 7 1610 7 9 1 1042 1 95 139 0 0 -"24236" 4 110 5 1 1 0 2 0 100 1 7 14 910 2 10 0 328 1 95 7 0 1 -"24237" 4 110 5 1 1 0 2 0 100 1 14 1 5318 5 3 0 598 1 95 13 1 0 -"24238" 4 110 5 1 1 0 3 1 100 8 100 195 1722 7 5 1 365 1 95 95 1 1 -"24239" 4 110 5 1 1 0 3 1 100 8 195 10 1121 2 1 0 401 1 95 185 1 0 -"24240" 4 110 5 1 1 0 3 1 100 8 10 20 1035 8 6 1 391 1 95 10 1 1 -"24241" 4 110 5 1 1 0 3 1 100 8 20 39 1129 4 7 0 529 1 95 19 0 1 -"24242" 4 110 5 1 1 0 3 1 100 8 39 76 885 3 10 0 481 1 95 37 0 1 -"24243" 4 110 5 1 1 0 3 1 100 8 76 38 1039 6 8 1 436 3 50 38 0 0 -"24244" 4 110 5 1 1 0 3 1 100 8 38 74 1260 9 2 1 269 1 95 36 1 1 -"24245" 4 110 5 1 1 0 3 1 100 8 74 4 1509 5 3 0 275 1 95 70 1 0 -"24246" 4 110 5 1 1 0 3 1 100 8 4 8 1107 1 10 0 300 1 95 4 0 1 -"24247" 4 110 5 1 1 0 4 1 100 2313 100 195 1153 2 3 0 272 1 95 95 0 1 -"24248" 4 110 5 1 1 0 4 1 100 2313 195 380 968 7 4 1 351 1 95 185 1 1 -"24249" 4 110 5 1 1 0 4 1 100 2313 380 190 924 3 1 0 374 3 50 190 1 0 -"24250" 4 110 5 1 1 0 4 1 100 2313 190 371 681 1 9 0 206 1 95 181 0 1 -"24251" 4 110 5 1 1 0 4 1 100 2313 371 723 905 8 7 1 268 1 95 352 1 1 -"24252" 4 110 5 1 1 0 4 1 100 2313 723 1265 964 9 2 1 2300 2 75 542 1 1 -"24253" 4 110 5 1 1 0 4 1 100 2313 1265 1581 1092 5 6 0 1766 4 25 316 0 1 -"24254" 4 110 5 1 1 0 4 1 100 2313 1581 1186 1415 6 8 1 962 4 25 395 0 0 -"24255" 4 110 5 1 1 0 4 1 100 2313 1186 2313 921 4 7 0 251 1 95 1127 0 1 -"24256" 4 111 2 0 1 1 1 1 100 351 100 150 17156 8 3 1 3106 3 50 50 1 1 -"24257" 4 111 2 0 1 1 1 1 100 351 150 225 4203 3 7 0 1585 3 50 75 0 1 -"24258" 4 111 2 0 1 1 1 1 100 351 225 281 2675 8 2 1 1897 2 25 56 1 1 -"24259" 4 111 2 0 1 1 1 1 100 351 281 351 4468 2 1 1 1157 2 25 70 1 1 -"24260" 4 111 3 1 1 1 1 0 100 1 100 125 8822 9 7 1 1000 2 25 25 1 1 -"24261" 4 111 3 1 1 1 1 0 100 1 125 156 8869 4 8 0 2541 2 25 31 0 1 -"24262" 4 111 3 1 1 1 1 0 100 1 156 78 4537 8 2 0 343 3 50 78 1 0 -"24263" 4 111 3 1 1 1 1 0 100 1 78 39 5764 2 1 0 1128 3 50 39 1 0 -"24264" 4 111 3 1 1 1 1 0 100 1 39 68 4031 7 6 1 1413 4 75 29 1 1 -"24265" 4 111 3 1 1 1 1 0 100 1 68 17 13159 5 3 0 779 4 75 51 1 0 -"24266" 4 111 3 1 1 1 1 0 100 1 17 1 4643 3 6 1 796 5 95 16 0 0 -"24267" 4 111 3 1 1 1 2 1 100 266 100 150 4567 2 9 0 275 3 50 50 0 1 -"24268" 4 111 3 1 1 1 2 1 100 266 150 225 6587 4 10 0 1144 3 50 75 0 1 -"24269" 4 111 3 1 1 1 2 1 100 266 225 338 3134 3 1 1 983 3 50 113 1 1 -"24270" 4 111 3 1 1 1 2 1 100 266 338 253 20437 8 6 0 1422 2 25 85 1 0 -"24271" 4 111 3 1 1 1 2 1 100 266 253 126 2088 5 7 1 947 3 50 127 0 0 -"24272" 4 111 3 1 1 1 2 1 100 266 126 189 5307 6 8 0 300 3 50 63 0 1 -"24273" 4 111 3 1 1 1 2 1 100 266 189 284 6507 7 5 1 979 3 50 95 1 1 -"24274" 4 111 3 1 1 1 2 1 100 266 284 355 9474 1 10 0 2205 2 25 71 0 1 -"24275" 4 111 3 1 1 1 2 1 100 266 355 266 3185 9 3 0 1791 2 25 89 1 0 -"24276" 4 111 3 1 1 1 3 1 100 215 100 50 12524 3 5 1 309 3 50 50 0 0 -"24277" 4 111 3 1 1 1 3 1 100 215 50 75 9436 8 9 0 296 3 50 25 0 1 -"24278" 4 111 3 1 1 1 3 1 100 215 75 113 4360 2 4 0 1166 3 50 38 0 1 -"24279" 4 111 3 1 1 1 3 1 100 215 113 170 2811 6 3 1 724 3 50 57 1 1 -"24280" 4 111 3 1 1 1 3 1 100 215 170 255 5406 7 6 1 1011 3 50 85 1 1 -"24281" 4 111 3 1 1 1 3 1 100 215 255 383 8424 4 2 1 756 3 50 128 1 1 -"24282" 4 111 3 1 1 1 3 1 100 215 383 287 13461 1 8 1 1219 2 25 96 0 0 -"24283" 4 111 3 1 1 1 3 1 100 215 287 431 5326 5 7 0 967 3 50 144 0 1 -"24284" 4 111 3 1 1 1 3 1 100 215 431 215 6574 9 5 0 1403 3 50 216 1 0 -"24285" 4 111 3 1 1 1 4 1 100 39 100 75 2903 8 7 0 1994 2 25 25 1 0 -"24286" 4 111 3 1 1 1 4 1 100 39 75 37 3921 3 10 1 356 3 50 38 0 0 -"24287" 4 111 3 1 1 1 4 1 100 39 37 18 6136 7 9 1 1571 3 50 19 0 0 -"24288" 4 111 3 1 1 1 4 1 100 39 18 9 9245 9 1 0 411 3 50 9 1 0 -"24289" 4 111 3 1 1 1 4 1 100 39 9 18 4917 2 3 0 1697 5 95 9 0 1 -"24290" 4 111 3 1 1 1 4 1 100 39 18 35 6213 1 8 0 0 5 95 17 0 1 -"24291" 4 111 3 1 1 1 4 1 100 39 35 17 1995 5 4 0 1242 3 50 18 1 0 -"24292" 4 111 3 1 1 1 4 1 100 39 17 26 5740 4 2 1 881 3 50 9 1 1 -"24293" 4 111 3 1 1 1 4 1 100 39 26 39 5700 6 3 1 1251 3 50 13 1 1 -"24294" 4 111 4 0 1 0 1 1 100 1 100 175 6444 2 7 0 807 2 75 75 0 1 -"24295" 4 111 4 0 1 0 1 1 100 1 175 341 7150 3 7 0 623 1 95 166 0 1 -"24296" 4 111 4 0 1 0 1 1 100 1 341 17 5598 2 8 1 545 1 95 324 0 0 -"24297" 4 111 4 0 1 0 1 1 100 1 17 1 10381 8 9 1 684 1 95 16 0 0 -"24298" 4 111 5 1 1 0 1 1 100 139 100 175 2361 1 3 0 694 2 75 75 0 1 -"24299" 4 111 5 1 1 0 1 1 100 139 175 263 2330 6 5 1 1113 3 50 88 1 1 -"24300" 4 111 5 1 1 0 1 1 100 139 263 131 7617 2 8 1 724 3 50 132 0 0 -"24301" 4 111 5 1 1 0 1 1 100 139 131 65 3775 8 9 1 1201 3 50 66 0 0 -"24302" 4 111 5 1 1 0 1 1 100 139 65 127 9253 3 4 0 586 1 95 62 0 1 -"24303" 4 111 5 1 1 0 1 1 100 139 127 248 1572 5 7 0 590 1 95 121 0 1 -"24304" 4 111 5 1 1 0 1 1 100 139 248 186 5383 7 4 0 949 4 25 62 1 0 -"24305" 4 111 5 1 1 0 1 1 100 139 186 279 2669 4 1 1 492 3 50 93 1 1 -"24306" 4 111 5 1 1 0 1 1 100 139 279 139 5569 9 6 0 966 3 50 140 1 0 -"24307" 4 111 5 1 1 0 2 0 100 0 100 50 6810 8 1 0 708 3 50 50 1 0 -"24308" 4 111 5 1 1 0 2 0 100 0 50 98 1968 6 2 1 760 1 95 48 1 1 -"24309" 4 111 5 1 1 0 2 0 100 0 98 5 2137 7 9 1 777 1 95 93 0 0 -"24310" 4 111 5 1 1 0 2 0 100 0 5 10 3001 2 10 0 663 1 95 5 0 1 -"24311" 4 111 5 1 1 0 2 0 100 0 10 0 2757 5 3 0 648 1 95 10 1 0 -"24312" 4 111 5 1 1 0 3 0 100 1 100 175 3103 7 5 1 1353 2 75 75 1 1 -"24313" 4 111 5 1 1 0 3 0 100 1 175 87 1838 2 1 0 804 3 50 88 1 0 -"24314" 4 111 5 1 1 0 3 0 100 1 87 4 5524 8 6 0 570 1 95 83 1 0 -"24315" 4 111 5 1 1 0 3 0 100 1 4 8 4836 4 7 0 499 1 95 4 0 1 -"24316" 4 111 5 1 1 0 3 0 100 1 8 16 1281 3 10 0 530 1 95 8 0 1 -"24317" 4 111 5 1 1 0 3 0 100 1 16 1 2230 6 8 1 569 1 95 15 0 0 -"24318" 4 111 5 1 1 0 4 1 100 782 100 175 3179 2 3 0 2282 2 75 75 0 1 -"24319" 4 111 5 1 1 0 4 1 100 782 175 263 2322 7 4 1 1058 3 50 88 1 1 -"24320" 4 111 5 1 1 0 4 1 100 782 263 329 5100 3 1 1 823 4 25 66 1 1 -"24321" 4 111 5 1 1 0 4 1 100 782 329 494 2333 1 9 0 756 3 50 165 0 1 -"24322" 4 111 5 1 1 0 4 1 100 782 494 618 2389 8 7 1 986 4 25 124 1 1 -"24323" 4 111 5 1 1 0 4 1 100 782 618 463 10736 9 2 0 1192 4 25 155 1 0 -"24324" 4 111 5 1 1 0 4 1 100 782 463 695 1932 5 6 0 997 3 50 232 0 1 -"24325" 4 111 5 1 1 0 4 1 100 782 695 521 2331 6 8 1 1090 4 25 174 0 0 -"24326" 4 111 5 1 1 0 4 1 100 782 521 782 10133 4 7 0 1608 3 50 261 0 1 -"24327" 4 134 2 0 1 1 1 1 100 107 100 150 5401 8 3 1 933 3 50 50 1 1 -"24328" 4 134 2 0 1 1 1 1 100 107 150 225 7726 3 7 0 1356 3 50 75 0 1 -"24329" 4 134 2 0 1 1 1 1 100 107 225 214 2257 8 2 0 604 1 5 11 1 0 -"24330" 4 134 2 0 1 1 1 1 100 107 214 107 2260 2 1 0 2302 3 50 107 1 0 -"24331" 4 134 3 1 1 1 1 1 100 422 100 150 9045 9 7 1 1636 3 50 50 1 1 -"24332" 4 134 3 1 1 1 1 1 100 422 150 225 915 4 8 0 517 3 50 75 0 1 -"24333" 4 134 3 1 1 1 1 1 100 422 225 338 2154 8 2 1 586 3 50 113 1 1 -"24334" 4 134 3 1 1 1 1 1 100 422 338 253 2787 2 1 0 812 2 25 85 1 0 -"24335" 4 134 3 1 1 1 1 1 100 422 253 240 1906 7 6 0 341 1 5 13 1 0 -"24336" 4 134 3 1 1 1 1 1 100 422 240 300 2458 5 3 1 1603 2 25 60 1 1 -"24337" 4 134 3 1 1 1 1 1 100 422 300 375 1876 3 6 0 904 2 25 75 0 1 -"24338" 4 134 3 1 1 1 1 1 100 422 375 281 1954 6 9 1 787 2 25 94 0 0 -"24339" 4 134 3 1 1 1 1 1 100 422 281 422 1699 1 4 0 2536 3 50 141 0 1 -"24340" 4 134 3 1 1 1 2 1 100 709 100 195 5216 2 9 0 613 5 95 95 0 1 -"24341" 4 134 3 1 1 1 2 1 100 709 195 205 1677 4 10 0 866 1 5 10 0 1 -"24342" 4 134 3 1 1 1 2 1 100 709 205 102 1550 3 1 0 1459 3 50 103 1 0 -"24343" 4 134 3 1 1 1 2 1 100 709 102 199 1283 8 6 1 0 5 95 97 1 1 -"24344" 4 134 3 1 1 1 2 1 100 709 199 299 3716 5 7 0 1201 3 50 100 0 1 -"24345" 4 134 3 1 1 1 2 1 100 709 299 149 2229 6 8 1 2081 3 50 150 0 0 -"24346" 4 134 3 1 1 1 2 1 100 709 149 291 1267 7 5 1 0 5 95 142 1 1 -"24347" 4 134 3 1 1 1 2 1 100 709 291 567 1988 1 10 0 0 5 95 276 0 1 -"24348" 4 134 3 1 1 1 2 1 100 709 567 709 2300 9 3 1 4604 2 25 142 1 1 -"24349" 4 134 3 1 1 1 3 0 100 0 100 175 1899 3 5 0 903 4 75 75 0 1 -"24350" 4 134 3 1 1 1 3 0 100 0 175 9 1410 8 9 1 0 5 95 166 0 0 -"24351" 4 134 3 1 1 1 3 0 100 0 9 18 2260 2 4 0 0 5 95 9 0 1 -"24352" 4 134 3 1 1 1 3 0 100 0 18 35 2846 6 3 1 0 5 95 17 1 1 -"24353" 4 134 3 1 1 1 3 0 100 0 35 68 2948 7 6 1 0 5 95 33 1 1 -"24354" 4 134 3 1 1 1 3 0 100 0 68 3 2677 4 2 0 0 5 95 65 1 0 -"24355" 4 134 3 1 1 1 3 0 100 0 3 6 2291 1 8 0 0 5 95 3 0 1 -"24356" 4 134 3 1 1 1 3 0 100 0 6 0 2332 5 7 1 0 5 95 6 0 0 -"24357" 4 134 3 1 1 1 4 1 100 284 100 195 2809 8 7 1 0 5 95 95 1 1 -"24358" 4 134 3 1 1 1 4 1 100 284 195 293 1809 3 10 0 1817 3 50 98 0 1 -"24359" 4 134 3 1 1 1 4 1 100 284 293 15 1296 7 9 1 1888 5 95 278 0 0 -"24360" 4 134 3 1 1 1 4 1 100 284 15 29 1700 9 1 1 0 5 95 14 1 1 -"24361" 4 134 3 1 1 1 4 1 100 284 29 57 1945 2 3 0 0 5 95 28 0 1 -"24362" 4 134 3 1 1 1 4 1 100 284 57 111 1682 1 8 0 0 5 95 54 0 1 -"24363" 4 134 3 1 1 1 4 1 100 284 111 216 2679 5 4 1 0 5 95 105 1 1 -"24364" 4 134 3 1 1 1 4 1 100 284 216 162 2917 4 2 0 3024 2 25 54 1 0 -"24365" 4 134 3 1 1 1 4 1 100 284 162 284 3925 6 3 1 960 4 75 122 1 1 -"24366" 4 134 4 0 1 0 1 1 100 29 100 150 4987 2 7 0 628 3 50 50 0 1 -"24367" 4 134 4 0 1 0 1 1 100 29 150 293 4291 3 7 0 804 1 95 143 0 1 -"24368" 4 134 4 0 1 0 1 1 100 29 293 571 1683 2 8 0 433 1 95 278 0 1 -"24369" 4 134 4 0 1 0 1 1 100 29 571 29 1482 8 9 1 409 1 95 542 0 0 -"24370" 4 134 5 1 1 0 1 1 100 18 100 195 2061 1 3 0 305 1 95 95 0 1 -"24371" 4 134 5 1 1 0 1 1 100 18 195 244 2719 6 5 1 524 4 25 49 1 1 -"24372" 4 134 5 1 1 0 1 1 100 18 244 476 1032 2 8 0 300 1 95 232 0 1 -"24373" 4 134 5 1 1 0 1 1 100 18 476 24 1595 8 9 1 405 1 95 452 0 0 -"24374" 4 134 5 1 1 0 1 1 100 18 24 47 1982 3 4 0 2654 1 95 23 0 1 -"24375" 4 134 5 1 1 0 1 1 100 18 47 92 1092 5 7 0 326 1 95 45 0 1 -"24376" 4 134 5 1 1 0 1 1 100 18 92 179 1860 7 4 1 321 1 95 87 1 1 -"24377" 4 134 5 1 1 0 1 1 100 18 179 9 1550 4 1 0 490 1 95 170 1 0 -"24378" 4 134 5 1 1 0 1 1 100 18 9 18 1528 9 6 1 461 1 95 9 1 1 -"24379" 4 134 5 1 1 0 2 0 100 0 100 195 3008 8 1 1 406 1 95 95 1 1 -"24380" 4 134 5 1 1 0 2 0 100 0 195 380 1416 6 2 1 458 1 95 185 1 1 -"24381" 4 134 5 1 1 0 2 0 100 0 380 19 1394 7 9 1 469 1 95 361 0 0 -"24382" 4 134 5 1 1 0 2 0 100 0 19 37 1323 2 10 0 562 1 95 18 0 1 -"24383" 4 134 5 1 1 0 2 0 100 0 37 2 1452 5 3 0 381 1 95 35 1 0 -"24384" 4 134 5 1 1 0 2 0 100 0 2 0 1464 4 2 0 384 1 95 2 1 0 -"24385" 4 134 5 1 1 0 3 0 100 0 100 195 3370 7 5 1 631 1 95 95 1 1 -"24386" 4 134 5 1 1 0 3 0 100 0 195 10 1326 2 1 0 380 1 95 185 1 0 -"24387" 4 134 5 1 1 0 3 0 100 0 10 20 1336 8 6 1 429 1 95 10 1 1 -"24388" 4 134 5 1 1 0 3 0 100 0 20 39 2679 4 7 0 556 1 95 19 0 1 -"24389" 4 134 5 1 1 0 3 0 100 0 39 76 1279 3 10 0 543 1 95 37 0 1 -"24390" 4 134 5 1 1 0 3 0 100 0 76 4 1339 6 8 1 392 1 95 72 0 0 -"24391" 4 134 5 1 1 0 3 0 100 0 4 8 1587 9 2 1 581 1 95 4 1 1 -"24392" 4 134 5 1 1 0 3 0 100 0 8 0 1350 5 3 0 572 1 95 8 1 0 -"24393" 4 134 5 1 1 0 4 1 100 20 100 195 1320 2 3 0 359 1 95 95 0 1 -"24394" 4 134 5 1 1 0 4 1 100 20 195 380 1243 7 4 1 405 1 95 185 1 1 -"24395" 4 134 5 1 1 0 4 1 100 20 380 19 1087 3 1 0 305 1 95 361 1 0 -"24396" 4 134 5 1 1 0 4 1 100 20 19 37 1647 1 9 0 350 1 95 18 0 1 -"24397" 4 134 5 1 1 0 4 1 100 20 37 72 1627 8 7 1 499 1 95 35 1 1 -"24398" 4 134 5 1 1 0 4 1 100 20 72 140 1114 9 2 1 394 1 95 68 1 1 -"24399" 4 134 5 1 1 0 4 1 100 20 140 210 2380 5 6 0 1033 3 50 70 0 1 -"24400" 4 134 5 1 1 0 4 1 100 20 210 10 1517 6 8 1 395 1 95 200 0 0 -"24401" 4 134 5 1 1 0 4 1 100 20 10 20 1078 4 7 0 520 1 95 10 0 1 -"24402" 4 140 2 0 1 1 1 1 100 247 100 125 6991 8 3 1 2160 2 25 25 1 1 -"24403" 4 140 2 0 1 1 1 1 100 247 125 219 9271 3 7 0 1324 4 75 94 0 1 -"24404" 4 140 2 0 1 1 1 1 100 247 219 329 8530 8 2 1 706 3 50 110 1 1 -"24405" 4 140 2 0 1 1 1 1 100 247 329 247 2221 2 1 0 1594 2 25 82 1 0 -"24406" 4 140 3 1 1 1 1 1 100 525 100 175 2567 9 7 1 1139 4 75 75 1 1 -"24407" 4 140 3 1 1 1 1 1 100 525 175 219 3090 4 8 0 885 2 25 44 0 1 -"24408" 4 140 3 1 1 1 1 1 100 525 219 274 1296 8 2 1 1816 2 25 55 1 1 -"24409" 4 140 3 1 1 1 1 1 100 525 274 205 1687 2 1 0 341 2 25 69 1 0 -"24410" 4 140 3 1 1 1 1 1 100 525 205 256 1722 7 6 1 1093 2 25 51 1 1 -"24411" 4 140 3 1 1 1 1 1 100 525 256 320 3094 5 3 1 737 2 25 64 1 1 -"24412" 4 140 3 1 1 1 1 1 100 525 320 400 2027 3 6 0 704 2 25 80 0 1 -"24413" 4 140 3 1 1 1 1 1 100 525 400 420 3395 6 9 0 872 1 5 20 0 1 -"24414" 4 140 3 1 1 1 1 1 100 525 420 525 1171 1 4 0 1081 2 25 105 0 1 -"24415" 4 140 3 1 1 1 2 1 100 570 100 175 2476 2 9 0 548 4 75 75 0 1 -"24416" 4 140 3 1 1 1 2 1 100 570 175 219 3391 4 10 0 412 2 25 44 0 1 -"24417" 4 140 3 1 1 1 2 1 100 570 219 164 1909 3 1 0 392 2 25 55 1 0 -"24418" 4 140 3 1 1 1 2 1 100 570 164 205 1459 8 6 1 1395 2 25 41 1 1 -"24419" 4 140 3 1 1 1 2 1 100 570 205 256 2525 5 7 0 595 2 25 51 0 1 -"24420" 4 140 3 1 1 1 2 1 100 570 256 243 1332 6 8 1 1419 1 5 13 0 0 -"24421" 4 140 3 1 1 1 2 1 100 570 243 304 1276 7 5 1 1029 2 25 61 1 1 -"24422" 4 140 3 1 1 1 2 1 100 570 304 456 1001 1 10 0 535 3 50 152 0 1 -"24423" 4 140 3 1 1 1 2 1 100 570 456 570 1115 9 3 1 629 2 25 114 1 1 -"24424" 4 140 3 1 1 1 3 1 100 554 100 150 2729 3 5 0 692 3 50 50 0 1 -"24425" 4 140 3 1 1 1 3 1 100 554 150 75 1177 8 9 1 783 3 50 75 0 0 -"24426" 4 140 3 1 1 1 3 1 100 554 75 131 1167 2 4 0 1357 4 75 56 0 1 -"24427" 4 140 3 1 1 1 3 1 100 554 131 197 1522 6 3 1 1239 3 50 66 1 1 -"24428" 4 140 3 1 1 1 3 1 100 554 197 296 1417 7 6 1 476 3 50 99 1 1 -"24429" 4 140 3 1 1 1 3 1 100 554 296 281 1083 4 2 0 772 1 5 15 1 0 -"24430" 4 140 3 1 1 1 3 1 100 554 281 492 892 1 8 0 504 4 75 211 0 1 -"24431" 4 140 3 1 1 1 3 1 100 554 492 369 3149 5 7 1 293 2 25 123 0 0 -"24432" 4 140 3 1 1 1 3 1 100 554 369 554 1372 9 5 1 488 3 50 185 1 1 -"24433" 4 140 3 1 1 1 4 1 100 844 100 175 3846 8 7 1 701 4 75 75 1 1 -"24434" 4 140 3 1 1 1 4 1 100 844 175 219 1305 3 10 0 848 2 25 44 0 1 -"24435" 4 140 3 1 1 1 4 1 100 844 219 208 1381 7 9 1 809 1 5 11 0 0 -"24436" 4 140 3 1 1 1 4 1 100 844 208 364 960 9 1 1 936 4 75 156 1 1 -"24437" 4 140 3 1 1 1 4 1 100 844 364 455 1441 2 3 0 1498 2 25 91 0 1 -"24438" 4 140 3 1 1 1 4 1 100 844 455 569 1176 1 8 0 1846 2 25 114 0 1 -"24439" 4 140 3 1 1 1 4 1 100 844 569 711 1955 5 4 1 748 2 25 142 1 1 -"24440" 4 140 3 1 1 1 4 1 100 844 711 675 1277 4 2 0 982 1 5 36 1 0 -"24441" 4 140 3 1 1 1 4 1 100 844 675 844 1085 6 3 1 383 2 25 169 1 1 -"24442" 4 140 4 0 1 0 1 1 100 14 100 175 5978 2 7 0 675 2 75 75 0 1 -"24443" 4 140 4 0 1 0 1 1 100 14 175 219 1432 3 7 0 914 4 25 44 0 1 -"24444" 4 140 4 0 1 0 1 1 100 14 219 274 1059 2 8 0 863 4 25 55 0 1 -"24445" 4 140 4 0 1 0 1 1 100 14 274 14 1387 8 9 1 1087 1 95 260 0 0 -"24446" 4 140 5 1 1 0 1 1 100 1596 100 195 1287 1 3 0 2119 1 95 95 0 1 -"24447" 4 140 5 1 1 0 1 1 100 1596 195 293 1271 6 5 1 1345 3 50 98 1 1 -"24448" 4 140 5 1 1 0 1 1 100 1596 293 513 1109 2 8 0 671 2 75 220 0 1 -"24449" 4 140 5 1 1 0 1 1 100 1596 513 385 1171 8 9 1 1906 4 25 128 0 0 -"24450" 4 140 5 1 1 0 1 1 100 1596 385 674 1028 3 4 0 1305 2 75 289 0 1 -"24451" 4 140 5 1 1 0 1 1 100 1596 674 640 2848 5 7 1 647 5 5 34 0 0 -"24452" 4 140 5 1 1 0 1 1 100 1596 640 1120 1021 7 4 1 370 2 75 480 1 1 -"24453" 4 140 5 1 1 0 1 1 100 1596 1120 1064 1878 4 1 0 458 5 5 56 1 0 -"24454" 4 140 5 1 1 0 1 1 100 1596 1064 1596 1114 9 6 1 443 3 50 532 1 1 -"24455" 4 140 5 1 1 0 2 1 100 228 100 175 1770 8 1 1 444 2 75 75 1 1 -"24456" 4 140 5 1 1 0 2 1 100 228 175 131 2420 6 2 0 751 4 25 44 1 0 -"24457" 4 140 5 1 1 0 2 1 100 228 131 33 1033 7 9 1 525 2 75 98 0 0 -"24458" 4 140 5 1 1 0 2 1 100 228 33 64 1017 2 10 0 366 1 95 31 0 1 -"24459" 4 140 5 1 1 0 2 1 100 228 64 125 1954 5 3 1 611 1 95 61 1 1 -"24460" 4 140 5 1 1 0 2 1 100 228 125 31 1235 4 2 0 422 2 75 94 1 0 -"24461" 4 140 5 1 1 0 2 1 100 228 31 60 1079 3 5 0 401 1 95 29 0 1 -"24462" 4 140 5 1 1 0 2 1 100 228 60 117 1100 9 4 1 532 1 95 57 1 1 -"24463" 4 140 5 1 1 0 2 1 100 228 117 228 1105 1 7 0 561 1 95 111 0 1 -"24464" 4 140 5 1 1 0 3 1 100 322 100 175 1248 7 5 1 651 2 75 75 1 1 -"24465" 4 140 5 1 1 0 3 1 100 322 175 44 897 2 1 0 397 2 75 131 1 0 -"24466" 4 140 5 1 1 0 3 1 100 322 44 86 911 8 6 1 442 1 95 42 1 1 -"24467" 4 140 5 1 1 0 3 1 100 322 86 151 1242 4 7 0 716 2 75 65 0 1 -"24468" 4 140 5 1 1 0 3 1 100 322 151 227 952 3 10 0 968 3 50 76 0 1 -"24469" 4 140 5 1 1 0 3 1 100 322 227 113 1127 6 8 1 1408 3 50 114 0 0 -"24470" 4 140 5 1 1 0 3 1 100 322 113 220 1066 9 2 1 741 1 95 107 1 1 -"24471" 4 140 5 1 1 0 3 1 100 322 220 165 1438 5 3 0 504 4 25 55 1 0 -"24472" 4 140 5 1 1 0 3 1 100 322 165 322 1405 1 10 0 615 1 95 157 0 1 -"24473" 4 140 5 1 1 0 4 1 100 818 100 195 68835 2 3 0 788 1 95 95 0 1 -"24474" 4 140 5 1 1 0 4 1 100 818 195 205 2172 7 4 1 339 5 5 10 1 1 -"24475" 4 140 5 1 1 0 4 1 100 818 205 154 1200 3 1 0 413 4 25 51 1 0 -"24476" 4 140 5 1 1 0 4 1 100 818 154 300 1398 1 9 0 905 1 95 146 0 1 -"24477" 4 140 5 1 1 0 4 1 100 818 300 375 1179 8 7 1 785 4 25 75 1 1 -"24478" 4 140 5 1 1 0 4 1 100 818 375 656 2761 9 2 1 492 2 75 281 1 1 -"24479" 4 140 5 1 1 0 4 1 100 818 656 820 1306 5 6 0 1754 4 25 164 0 1 -"24480" 4 140 5 1 1 0 4 1 100 818 820 779 1731 6 8 1 1223 5 5 41 0 0 -"24481" 4 140 5 1 1 0 4 1 100 818 779 818 1386 4 7 0 806 5 5 39 0 1 -"24482" 4 140 2 0 1 1 1 1 100 414 100 150 8510 8 3 1 1175 3 50 50 1 1 -"24483" 4 140 2 0 1 1 1 1 100 414 150 225 9676 3 7 0 1165 3 50 75 0 1 -"24484" 4 140 2 0 1 1 1 1 100 414 225 394 2123 8 2 1 676 4 75 169 1 1 -"24485" 4 140 2 0 1 1 1 1 100 414 394 414 1825 2 1 1 871 1 5 20 1 1 -"24486" 4 140 3 1 1 1 1 1 100 790 100 195 9955 9 7 1 0 5 95 95 1 1 -"24487" 4 140 3 1 1 1 1 1 100 790 195 244 3575 4 8 0 1178 2 25 49 0 1 -"24488" 4 140 3 1 1 1 1 1 100 790 244 366 1423 8 2 1 921 3 50 122 1 1 -"24489" 4 140 3 1 1 1 1 1 100 790 366 274 1522 2 1 0 911 2 25 92 1 0 -"24490" 4 140 3 1 1 1 1 1 100 790 274 411 2113 7 6 1 930 3 50 137 1 1 -"24491" 4 140 3 1 1 1 1 1 100 790 411 432 2355 5 3 1 1449 1 5 21 1 1 -"24492" 4 140 3 1 1 1 1 1 100 790 432 540 1585 3 6 0 995 2 25 108 0 1 -"24493" 4 140 3 1 1 1 1 1 100 790 540 405 1425 6 9 1 995 2 25 135 0 0 -"24494" 4 140 3 1 1 1 1 1 100 790 405 790 1425 1 4 0 0 5 95 385 0 1 -"24495" 4 140 3 1 1 1 2 0 100 1 100 195 3531 2 9 0 2082 5 95 95 0 1 -"24496" 4 140 3 1 1 1 2 0 100 1 195 97 3211 4 10 1 617 3 50 98 0 0 -"24497" 4 140 3 1 1 1 2 0 100 1 97 5 1513 3 1 0 1711 5 95 92 1 0 -"24498" 4 140 3 1 1 1 2 0 100 1 5 10 1323 8 6 1 0 5 95 5 1 1 -"24499" 4 140 3 1 1 1 2 0 100 1 10 20 2474 5 7 0 0 5 95 10 0 1 -"24500" 4 140 3 1 1 1 2 0 100 1 20 1 1906 6 8 1 0 5 95 19 0 0 -"24501" 4 140 3 1 1 1 3 1 100 250 100 150 2122 3 5 0 1643 3 50 50 0 1 -"24502" 4 140 3 1 1 1 3 1 100 250 150 75 2211 8 9 1 1503 3 50 75 0 0 -"24503" 4 140 3 1 1 1 3 1 100 250 75 146 1405 2 4 0 968 5 95 71 0 1 -"24504" 4 140 3 1 1 1 3 1 100 250 146 219 1591 6 3 1 3265 3 50 73 1 1 -"24505" 4 140 3 1 1 1 3 1 100 250 219 329 1547 7 6 1 727 3 50 110 1 1 -"24506" 4 140 3 1 1 1 3 1 100 250 329 164 2034 4 2 0 903 3 50 165 1 0 -"24507" 4 140 3 1 1 1 3 1 100 250 164 287 1858 1 8 0 815 4 75 123 0 1 -"24508" 4 140 3 1 1 1 3 1 100 250 287 143 1869 5 7 1 2131 3 50 144 0 0 -"24509" 4 140 3 1 1 1 3 1 100 250 143 250 1587 9 5 1 1309 4 75 107 1 1 -"24510" 4 140 3 1 1 1 4 1 100 70 100 195 3423 8 7 1 1090 5 95 95 1 1 -"24511" 4 140 3 1 1 1 4 1 100 70 195 341 2340 3 10 0 1171 4 75 146 0 1 -"24512" 4 140 3 1 1 1 4 1 100 70 341 512 2608 7 9 0 914 3 50 171 0 1 -"24513" 4 140 3 1 1 1 4 1 100 70 512 768 2148 9 1 1 1508 3 50 256 1 1 -"24514" 4 140 3 1 1 1 4 1 100 70 768 960 2529 2 3 0 1598 2 25 192 0 1 -"24515" 4 140 3 1 1 1 4 1 100 70 960 1440 2495 1 8 0 734 3 50 480 0 1 -"24516" 4 140 3 1 1 1 4 1 100 70 1440 720 1865 5 4 0 750 3 50 720 1 0 -"24517" 4 140 3 1 1 1 4 1 100 70 720 36 4168 4 2 0 608 5 95 684 1 0 -"24518" 4 140 3 1 1 1 4 1 100 70 36 70 1680 6 3 1 0 5 95 34 1 1 -"24519" 4 140 4 0 1 0 1 1 100 26 100 150 5009 2 7 0 649 3 50 50 0 1 -"24520" 4 140 4 0 1 0 1 1 100 26 150 263 2415 3 7 0 890 2 75 113 0 1 -"24521" 4 140 4 0 1 0 1 1 100 26 263 513 1724 2 8 0 1560 1 95 250 0 1 -"24522" 4 140 4 0 1 0 1 1 100 26 513 26 1747 8 9 1 1389 1 95 487 0 0 -"24523" 4 140 5 1 1 0 1 0 100 1 100 195 1720 1 3 0 2020 1 95 95 0 1 -"24524" 4 140 5 1 1 0 1 0 100 1 195 97 1401 6 5 0 1973 3 50 98 1 0 -"24525" 4 140 5 1 1 0 1 0 100 1 97 189 1284 2 8 0 1260 1 95 92 0 1 -"24526" 4 140 5 1 1 0 1 0 100 1 189 9 1109 8 9 1 590 1 95 180 0 0 -"24527" 4 140 5 1 1 0 1 0 100 1 9 18 1279 3 4 0 1344 1 95 9 0 1 -"24528" 4 140 5 1 1 0 1 0 100 1 18 1 1333 5 7 1 643 1 95 17 0 0 -"24529" 4 140 5 1 1 0 2 0 100 0 100 195 1423 8 1 1 825 1 95 95 1 1 -"24530" 4 140 5 1 1 0 2 0 100 0 195 380 1175 6 2 1 444 1 95 185 1 1 -"24531" 4 140 5 1 1 0 2 0 100 0 380 19 1148 7 9 1 612 1 95 361 0 0 -"24532" 4 140 5 1 1 0 2 0 100 0 19 37 1116 2 10 0 716 1 95 18 0 1 -"24533" 4 140 5 1 1 0 2 0 100 0 37 2 1247 5 3 0 722 1 95 35 1 0 -"24534" 4 140 5 1 1 0 2 0 100 0 2 0 1268 4 2 0 829 1 95 2 1 0 -"24535" 4 140 5 1 1 0 3 0 100 0 100 195 1303 7 5 1 614 1 95 95 1 1 -"24536" 4 140 5 1 1 0 3 0 100 0 195 10 950 2 1 0 550 1 95 185 1 0 -"24537" 4 140 5 1 1 0 3 0 100 0 10 20 1458 8 6 1 698 1 95 10 1 1 -"24538" 4 140 5 1 1 0 3 0 100 0 20 39 1030 4 7 0 603 1 95 19 0 1 -"24539" 4 140 5 1 1 0 3 0 100 0 39 76 1124 3 10 0 524 1 95 37 0 1 -"24540" 4 140 5 1 1 0 3 0 100 0 76 4 1360 6 8 1 509 1 95 72 0 0 -"24541" 4 140 5 1 1 0 3 0 100 0 4 8 1361 9 2 1 1022 1 95 4 1 1 -"24542" 4 140 5 1 1 0 3 0 100 0 8 0 1507 5 3 0 577 1 95 8 1 0 -"24543" 4 140 5 1 1 0 4 1 100 351 100 195 1805 2 3 0 1020 1 95 95 0 1 -"24544" 4 140 5 1 1 0 4 1 100 351 195 293 1073 7 4 1 804 3 50 98 1 1 -"24545" 4 140 5 1 1 0 4 1 100 351 293 571 1531 3 1 1 1280 1 95 278 1 1 -"24546" 4 140 5 1 1 0 4 1 100 351 571 1113 1576 1 9 0 675 1 95 542 0 1 -"24547" 4 140 5 1 1 0 4 1 100 351 1113 1057 2016 8 7 0 1486 5 5 56 1 0 -"24548" 4 140 5 1 1 0 4 1 100 351 1057 2061 1397 9 2 1 864 1 95 1004 1 1 -"24549" 4 140 5 1 1 0 4 1 100 351 2061 3607 1388 5 6 0 800 2 75 1546 0 1 -"24550" 4 140 5 1 1 0 4 1 100 351 3607 180 1895 6 8 1 1154 1 95 3427 0 0 -"24551" 4 140 5 1 1 0 4 1 100 351 180 351 1649 4 7 0 808 1 95 171 0 1 -"24552" 4 143 2 0 1 1 1 1 100 267 100 150 6524 8 3 1 915 3 50 50 1 1 -"24553" 4 143 2 0 1 1 1 1 100 267 150 225 8805 3 7 0 944 3 50 75 0 1 -"24554" 4 143 2 0 1 1 1 1 100 267 225 281 3232 8 2 1 1029 2 25 56 1 1 -"24555" 4 143 2 0 1 1 1 1 100 267 281 267 1777 2 1 0 1746 1 5 14 1 0 -"24556" 4 143 3 1 1 1 1 1 100 605 100 150 10559 9 7 1 807 3 50 50 1 1 -"24557" 4 143 3 1 1 1 1 1 100 605 150 112 1709 4 8 1 1449 2 25 38 0 0 -"24558" 4 143 3 1 1 1 1 1 100 605 112 168 1233 8 2 1 1436 3 50 56 1 1 -"24559" 4 143 3 1 1 1 1 1 100 605 168 84 1621 2 1 0 902 3 50 84 1 0 -"24560" 4 143 3 1 1 1 1 1 100 605 84 147 2336 7 6 1 1343 4 75 63 1 1 -"24561" 4 143 3 1 1 1 1 1 100 605 147 221 1084 5 3 1 670 3 50 74 1 1 -"24562" 4 143 3 1 1 1 1 1 100 605 221 387 1420 3 6 0 2046 4 75 166 0 1 -"24563" 4 143 3 1 1 1 1 1 100 605 387 484 2322 6 9 0 1057 2 25 97 0 1 -"24564" 4 143 3 1 1 1 1 1 100 605 484 605 3332 1 4 0 1803 2 25 121 0 1 -"24565" 4 143 3 1 1 1 2 1 100 788 100 150 2087 2 9 0 1450 3 50 50 0 1 -"24566" 4 143 3 1 1 1 2 1 100 788 150 188 3498 4 10 0 1073 2 25 38 0 1 -"24567" 4 143 3 1 1 1 2 1 100 788 188 235 3379 3 1 1 928 2 25 47 1 1 -"24568" 4 143 3 1 1 1 2 1 100 788 235 353 1073 8 6 1 652 3 50 118 1 1 -"24569" 4 143 3 1 1 1 2 1 100 788 353 265 1474 5 7 1 610 2 25 88 0 0 -"24570" 4 143 3 1 1 1 2 1 100 788 265 132 1706 6 8 1 494 3 50 133 0 0 -"24571" 4 143 3 1 1 1 2 1 100 788 132 257 1340 7 5 1 0 5 95 125 1 1 -"24572" 4 143 3 1 1 1 2 1 100 788 257 450 1546 1 10 0 910 4 75 193 0 1 -"24573" 4 143 3 1 1 1 2 1 100 788 450 788 1376 9 3 1 661 4 75 338 1 1 -"24574" 4 143 3 1 1 1 3 1 100 357 100 150 4268 3 5 0 655 3 50 50 0 1 -"24575" 4 143 3 1 1 1 3 1 100 357 150 75 1053 8 9 1 480 3 50 75 0 0 -"24576" 4 143 3 1 1 1 3 1 100 357 75 146 1423 2 4 0 0 5 95 71 0 1 -"24577" 4 143 3 1 1 1 3 1 100 357 146 256 2992 6 3 1 680 4 75 110 1 1 -"24578" 4 143 3 1 1 1 3 1 100 357 256 192 2893 7 6 0 1081 2 25 64 1 0 -"24579" 4 143 3 1 1 1 3 1 100 357 192 48 1011 4 2 0 1513 4 75 144 1 0 -"24580" 4 143 3 1 1 1 3 1 100 357 48 94 1094 1 8 0 1208 5 95 46 0 1 -"24581" 4 143 3 1 1 1 3 1 100 357 94 183 3788 5 7 0 613 5 95 89 0 1 -"24582" 4 143 3 1 1 1 3 1 100 357 183 357 1173 9 5 1 2436 5 95 174 1 1 -"24583" 4 143 3 1 1 1 4 1 100 44 100 150 5764 8 7 1 968 3 50 50 1 1 -"24584" 4 143 3 1 1 1 4 1 100 44 150 263 1215 3 10 0 588 4 75 113 0 1 -"24585" 4 143 3 1 1 1 4 1 100 44 263 131 924 7 9 1 374 3 50 132 0 0 -"24586" 4 143 3 1 1 1 4 1 100 44 131 255 986 9 1 1 809 5 95 124 1 1 -"24587" 4 143 3 1 1 1 4 1 100 44 255 242 2943 2 3 1 1841 1 5 13 0 0 -"24588" 4 143 3 1 1 1 4 1 100 44 242 472 735 1 8 0 1626 5 95 230 0 1 -"24589" 4 143 3 1 1 1 4 1 100 44 472 354 2178 5 4 0 640 2 25 118 1 0 -"24590" 4 143 3 1 1 1 4 1 100 44 354 177 1366 4 2 0 604 3 50 177 1 0 -"24591" 4 143 3 1 1 1 4 1 100 44 177 44 3692 6 3 0 378 4 75 133 1 0 -"24592" 4 143 4 0 1 0 1 1 100 197 100 150 5081 2 7 0 1588 3 50 50 0 1 -"24593" 4 143 4 0 1 0 1 1 100 197 150 225 2718 3 7 0 530 3 50 75 0 1 -"24594" 4 143 4 0 1 0 1 1 100 197 225 394 1438 2 8 0 904 2 75 169 0 1 -"24595" 4 143 4 0 1 0 1 1 100 197 394 197 1466 8 9 1 1314 3 50 197 0 0 -"24596" 4 143 5 1 1 0 1 1 100 2118 100 195 4495 1 3 0 567 1 95 95 0 1 -"24597" 4 143 5 1 1 0 1 1 100 2118 195 293 1217 6 5 1 1349 3 50 98 1 1 -"24598" 4 143 5 1 1 0 1 1 100 2118 293 571 946 2 8 0 403 1 95 278 0 1 -"24599" 4 143 5 1 1 0 1 1 100 2118 571 285 884 8 9 1 2384 3 50 286 0 0 -"24600" 4 143 5 1 1 0 1 1 100 2118 285 556 876 3 4 0 440 1 95 271 0 1 -"24601" 4 143 5 1 1 0 1 1 100 2118 556 695 1376 5 7 0 941 4 25 139 0 1 -"24602" 4 143 5 1 1 0 1 1 100 2118 695 869 1173 7 4 1 619 4 25 174 1 1 -"24603" 4 143 5 1 1 0 1 1 100 2118 869 1086 1291 4 1 1 732 4 25 217 1 1 -"24604" 4 143 5 1 1 0 1 1 100 2118 1086 2118 1070 9 6 1 401 1 95 1032 1 1 -"24605" 4 143 5 1 1 0 2 0 100 1 100 195 1307 8 1 1 658 1 95 95 1 1 -"24606" 4 143 5 1 1 0 2 0 100 1 195 380 920 6 2 1 561 1 95 185 1 1 -"24607" 4 143 5 1 1 0 2 0 100 1 380 190 1348 7 9 1 410 3 50 190 0 0 -"24608" 4 143 5 1 1 0 2 0 100 1 190 371 699 2 10 0 351 1 95 181 0 1 -"24609" 4 143 5 1 1 0 2 0 100 1 371 19 1028 5 3 0 448 1 95 352 1 0 -"24610" 4 143 5 1 1 0 2 0 100 1 19 1 1344 4 2 0 374 1 95 18 1 0 -"24611" 4 143 5 1 1 0 3 1 100 31 100 195 1117 7 5 1 453 1 95 95 1 1 -"24612" 4 143 5 1 1 0 3 1 100 31 195 10 908 2 1 0 255 1 95 185 1 0 -"24613" 4 143 5 1 1 0 3 1 100 31 10 20 855 8 6 1 364 1 95 10 1 1 -"24614" 4 143 5 1 1 0 3 1 100 31 20 39 1408 4 7 0 435 1 95 19 0 1 -"24615" 4 143 5 1 1 0 3 1 100 31 39 76 748 3 10 0 309 1 95 37 0 1 -"24616" 4 143 5 1 1 0 3 1 100 31 76 4 1100 6 8 1 301 1 95 72 0 0 -"24617" 4 143 5 1 1 0 3 1 100 31 4 8 860 9 2 1 377 1 95 4 1 1 -"24618" 4 143 5 1 1 0 3 1 100 31 8 16 908 5 3 1 375 1 95 8 1 1 -"24619" 4 143 5 1 1 0 3 1 100 31 16 31 562 1 10 0 293 1 95 15 0 1 -"24620" 4 143 5 1 1 0 4 1 100 650 100 195 1424 2 3 0 370 1 95 95 0 1 -"24621" 4 143 5 1 1 0 4 1 100 650 195 341 2517 7 4 1 1459 2 75 146 1 1 -"24622" 4 143 5 1 1 0 4 1 100 650 341 256 1450 3 1 0 1011 4 25 85 1 0 -"24623" 4 143 5 1 1 0 4 1 100 650 256 499 1336 1 9 0 502 1 95 243 0 1 -"24624" 4 143 5 1 1 0 4 1 100 650 499 624 1068 8 7 1 399 4 25 125 1 1 -"24625" 4 143 5 1 1 0 4 1 100 650 624 1217 1561 9 2 1 340 1 95 593 1 1 -"24626" 4 143 5 1 1 0 4 1 100 650 1217 1156 5073 5 6 1 630 5 5 61 0 0 -"24627" 4 143 5 1 1 0 4 1 100 650 1156 867 1125 6 8 1 638 4 25 289 0 0 -"24628" 4 143 5 1 1 0 4 1 100 650 867 650 5252 4 7 1 864 4 25 217 0 0 -"24629" 4 145 2 0 1 1 1 1 100 140 100 150 5930 8 3 1 1246 3 50 50 1 1 -"24630" 4 145 2 0 1 1 1 1 100 140 150 225 7072 3 7 0 1197 3 50 75 0 1 -"24631" 4 145 2 0 1 1 1 1 100 140 225 281 2313 8 2 1 1857 2 25 56 1 1 -"24632" 4 145 2 0 1 1 1 1 100 140 281 140 1942 2 1 0 815 3 50 141 1 0 -"24633" 4 145 3 1 1 1 1 1 100 330 100 125 14744 9 7 1 1096 2 25 25 1 1 -"24634" 4 145 3 1 1 1 1 1 100 330 125 119 5177 4 8 1 776 1 5 6 0 0 -"24635" 4 145 3 1 1 1 1 1 100 330 119 149 2392 8 2 1 1268 2 25 30 1 1 -"24636" 4 145 3 1 1 1 1 1 100 330 149 142 2709 2 1 0 1426 1 5 7 1 0 -"24637" 4 145 3 1 1 1 1 1 100 330 142 178 2651 7 6 1 387 2 25 36 1 1 -"24638" 4 145 3 1 1 1 1 1 100 330 178 169 3573 5 3 0 1057 1 5 9 1 0 -"24639" 4 145 3 1 1 1 1 1 100 330 169 211 4691 3 6 0 794 2 25 42 0 1 -"24640" 4 145 3 1 1 1 1 1 100 330 211 264 5441 6 9 0 524 2 25 53 0 1 -"24641" 4 145 3 1 1 1 1 1 100 330 264 330 1608 1 4 0 601 2 25 66 0 1 -"24642" 4 145 3 1 1 1 2 1 100 441 100 125 4791 2 9 0 737 2 25 25 0 1 -"24643" 4 145 3 1 1 1 2 1 100 441 125 131 2547 4 10 0 1568 1 5 6 0 1 -"24644" 4 145 3 1 1 1 2 1 100 441 131 164 2922 3 1 1 621 2 25 33 1 1 -"24645" 4 145 3 1 1 1 2 1 100 441 164 205 2988 8 6 1 580 2 25 41 1 1 -"24646" 4 145 3 1 1 1 2 1 100 441 205 215 3013 5 7 0 1220 1 5 10 0 1 -"24647" 4 145 3 1 1 1 2 1 100 441 215 269 2287 6 8 0 552 2 25 54 0 1 -"24648" 4 145 3 1 1 1 2 1 100 441 269 336 3307 7 5 1 548 2 25 67 1 1 -"24649" 4 145 3 1 1 1 2 1 100 441 336 420 1119 1 10 0 599 2 25 84 0 1 -"24650" 4 145 3 1 1 1 2 1 100 441 420 441 2456 9 3 1 1593 1 5 21 1 1 -"24651" 4 145 3 1 1 1 3 1 100 189 100 125 3734 3 5 0 466 2 25 25 0 1 -"24652" 4 145 3 1 1 1 3 1 100 189 125 94 2818 8 9 1 929 2 25 31 0 0 -"24653" 4 145 3 1 1 1 3 1 100 189 94 118 1634 2 4 0 554 2 25 24 0 1 -"24654" 4 145 3 1 1 1 3 1 100 189 118 88 9469 6 3 0 296 2 25 30 1 0 -"24655" 4 145 3 1 1 1 3 1 100 189 88 110 4903 7 6 1 331 2 25 22 1 1 -"24656" 4 145 3 1 1 1 3 1 100 189 110 82 2609 4 2 0 419 2 25 28 1 0 -"24657" 4 145 3 1 1 1 3 1 100 189 82 144 3284 1 8 0 1941 4 75 62 0 1 -"24658" 4 145 3 1 1 1 3 1 100 189 144 108 2806 5 7 1 286 2 25 36 0 0 -"24659" 4 145 3 1 1 1 3 1 100 189 108 189 2822 9 5 1 2254 4 75 81 1 1 -"24660" 4 145 3 1 1 1 4 1 100 307 100 125 1584 8 7 1 563 2 25 25 1 1 -"24661" 4 145 3 1 1 1 4 1 100 307 125 131 2253 3 10 0 1760 1 5 6 0 1 -"24662" 4 145 3 1 1 1 4 1 100 307 131 98 6562 7 9 1 583 2 25 33 0 0 -"24663" 4 145 3 1 1 1 4 1 100 307 98 172 1633 9 1 1 665 4 75 74 1 1 -"24664" 4 145 3 1 1 1 4 1 100 307 172 215 1447 2 3 0 1400 2 25 43 0 1 -"24665" 4 145 3 1 1 1 4 1 100 307 215 323 1138 1 8 0 1602 3 50 108 0 1 -"24666" 4 145 3 1 1 1 4 1 100 307 323 307 5763 5 4 0 846 1 5 16 1 0 -"24667" 4 145 3 1 1 1 4 1 100 307 307 292 1899 4 2 0 871 1 5 15 1 0 -"24668" 4 145 3 1 1 1 4 1 100 307 292 307 1539 6 3 1 663 1 5 15 1 1 -"24669" 4 145 4 0 1 0 1 1 100 29 100 150 4605 2 7 0 1183 3 50 50 0 1 -"24670" 4 145 4 0 1 0 1 1 100 29 150 293 4060 3 7 0 921 1 95 143 0 1 -"24671" 4 145 4 0 1 0 1 1 100 29 293 571 1882 2 8 0 1918 1 95 278 0 1 -"24672" 4 145 4 0 1 0 1 1 100 29 571 29 1888 8 9 1 1228 1 95 542 0 0 -"24673" 4 145 5 1 1 0 1 1 100 31 100 195 8200 1 3 0 968 1 95 95 0 1 -"24674" 4 145 5 1 1 0 1 1 100 31 195 380 1991 6 5 1 870 1 95 185 1 1 -"24675" 4 145 5 1 1 0 1 1 100 31 380 741 1138 2 8 0 953 1 95 361 0 1 -"24676" 4 145 5 1 1 0 1 1 100 31 741 37 1993 8 9 1 864 1 95 704 0 0 -"24677" 4 145 5 1 1 0 1 1 100 31 37 72 1660 3 4 0 650 1 95 35 0 1 -"24678" 4 145 5 1 1 0 1 1 100 31 72 4 2217 5 7 1 541 1 95 68 0 0 -"24679" 4 145 5 1 1 0 1 1 100 31 4 8 1320 7 4 1 567 1 95 4 1 1 -"24680" 4 145 5 1 1 0 1 1 100 31 8 16 2353 4 1 1 1127 1 95 8 1 1 -"24681" 4 145 5 1 1 0 1 1 100 31 16 31 1326 9 6 1 941 1 95 15 1 1 -"24682" 4 145 5 1 1 0 2 1 100 31 100 195 1572 8 1 1 655 1 95 95 1 1 -"24683" 4 145 5 1 1 0 2 1 100 31 195 380 1172 6 2 1 906 1 95 185 1 1 -"24684" 4 145 5 1 1 0 2 1 100 31 380 19 1276 7 9 1 1501 1 95 361 0 0 -"24685" 4 145 5 1 1 0 2 1 100 31 19 37 1474 2 10 0 810 1 95 18 0 1 -"24686" 4 145 5 1 1 0 2 1 100 31 37 2 1880 5 3 0 956 1 95 35 1 0 -"24687" 4 145 5 1 1 0 2 1 100 31 2 4 2540 4 2 1 593 1 95 2 1 1 -"24688" 4 145 5 1 1 0 2 1 100 31 4 8 1722 3 5 0 645 1 95 4 0 1 -"24689" 4 145 5 1 1 0 2 1 100 31 8 16 1068 9 4 1 958 1 95 8 1 1 -"24690" 4 145 5 1 1 0 2 1 100 31 16 31 1174 1 7 0 867 1 95 15 0 1 -"24691" 4 145 5 1 1 0 3 0 100 1 100 195 1924 7 5 1 638 1 95 95 1 1 -"24692" 4 145 5 1 1 0 3 0 100 1 195 10 949 2 1 0 621 1 95 185 1 0 -"24693" 4 145 5 1 1 0 3 0 100 1 10 20 1436 8 6 1 541 1 95 10 1 1 -"24694" 4 145 5 1 1 0 3 0 100 1 20 1 1760 4 7 1 442 1 95 19 0 0 -"24695" 4 145 5 1 1 0 4 1 100 1 100 195 1301 2 3 0 628 1 95 95 0 1 -"24696" 4 145 5 1 1 0 4 1 100 1 195 380 2291 7 4 1 460 1 95 185 1 1 -"24697" 4 145 5 1 1 0 4 1 100 1 380 19 1363 3 1 0 504 1 95 361 1 0 -"24698" 4 145 5 1 1 0 4 1 100 1 19 37 1333 1 9 0 564 1 95 18 0 1 -"24699" 4 145 5 1 1 0 4 1 100 1 37 72 1174 8 7 1 316 1 95 35 1 1 -"24700" 4 145 5 1 1 0 4 1 100 1 72 140 730 9 2 1 613 1 95 68 1 1 -"24701" 4 145 5 1 1 0 4 1 100 1 140 273 2125 5 6 0 461 1 95 133 0 1 -"24702" 4 145 5 1 1 0 4 1 100 1 273 14 1180 6 8 1 314 1 95 259 0 0 -"24703" 4 145 5 1 1 0 4 1 100 1 14 1 2208 4 7 1 357 1 95 13 0 0 -"24704" 4 146 2 0 1 1 1 1 100 156 100 125 2631 8 3 1 4424 2 25 25 1 1 -"24705" 4 146 2 0 1 1 1 1 100 156 125 131 3594 3 7 0 1405 1 5 6 0 1 -"24706" 4 146 2 0 1 1 1 1 100 156 131 164 2607 8 2 1 1009 2 25 33 1 1 -"24707" 4 146 2 0 1 1 1 1 100 156 164 156 2950 2 1 0 1359 1 5 8 1 0 -"24708" 4 146 3 1 1 1 1 1 100 213 100 150 2764 9 7 1 990 3 50 50 1 1 -"24709" 4 146 3 1 1 1 1 1 100 213 150 158 5092 4 8 0 871 1 5 8 0 1 -"24710" 4 146 3 1 1 1 1 1 100 213 158 198 2403 8 2 1 990 2 25 40 1 1 -"24711" 4 146 3 1 1 1 1 1 100 213 198 188 2124 2 1 0 1068 1 5 10 1 0 -"24712" 4 146 3 1 1 1 1 1 100 213 188 179 2982 7 6 0 694 1 5 9 1 0 -"24713" 4 146 3 1 1 1 1 1 100 213 179 170 2594 5 3 0 908 1 5 9 1 0 -"24714" 4 146 3 1 1 1 1 1 100 213 170 179 1646 3 6 0 1189 1 5 9 0 1 -"24715" 4 146 3 1 1 1 1 1 100 213 179 170 2234 6 9 1 806 1 5 9 0 0 -"24716" 4 146 3 1 1 1 1 1 100 213 170 213 1956 1 4 0 1220 2 25 43 0 1 -"24717" 4 146 3 1 1 1 2 1 100 255 100 125 3119 2 9 0 1040 2 25 25 0 1 -"24718" 4 146 3 1 1 1 2 1 100 255 125 131 2371 4 10 0 1088 1 5 6 0 1 -"24719" 4 146 3 1 1 1 2 1 100 255 131 124 1689 3 1 0 1742 1 5 7 1 0 -"24720" 4 146 3 1 1 1 2 1 100 255 124 186 2177 8 6 1 903 3 50 62 1 1 -"24721" 4 146 3 1 1 1 2 1 100 255 186 195 3969 5 7 0 474 1 5 9 0 1 -"24722" 4 146 3 1 1 1 2 1 100 255 195 185 2057 6 8 1 724 1 5 10 0 0 -"24723" 4 146 3 1 1 1 2 1 100 255 185 194 1782 7 5 1 823 1 5 9 1 1 -"24724" 4 146 3 1 1 1 2 1 100 255 194 204 1549 1 10 0 1297 1 5 10 0 1 -"24725" 4 146 3 1 1 1 2 1 100 255 204 255 1640 9 3 1 840 2 25 51 1 1 -"24726" 4 146 3 1 1 1 3 1 100 171 100 105 1919 3 5 0 833 1 5 5 0 1 -"24727" 4 146 3 1 1 1 3 1 100 171 105 79 2975 8 9 1 815 2 25 26 0 0 -"24728" 4 146 3 1 1 1 3 1 100 171 79 99 2294 2 4 0 638 2 25 20 0 1 -"24729" 4 146 3 1 1 1 3 1 100 171 99 104 2460 6 3 1 1148 1 5 5 1 1 -"24730" 4 146 3 1 1 1 3 1 100 171 104 109 2587 7 6 1 837 1 5 5 1 1 -"24731" 4 146 3 1 1 1 3 1 100 171 109 104 2267 4 2 0 845 1 5 5 1 0 -"24732" 4 146 3 1 1 1 3 1 100 171 104 130 1402 1 8 0 1020 2 25 26 0 1 -"24733" 4 146 3 1 1 1 3 1 100 171 130 137 2985 5 7 0 905 1 5 7 0 1 -"24734" 4 146 3 1 1 1 3 1 100 171 137 171 1595 9 5 1 1573 2 25 34 1 1 -"24735" 4 146 3 1 1 1 4 1 100 208 100 125 1790 8 7 1 470 2 25 25 1 1 -"24736" 4 146 3 1 1 1 4 1 100 208 125 131 1461 3 10 0 1181 1 5 6 0 1 -"24737" 4 146 3 1 1 1 4 1 100 208 131 124 1434 7 9 1 1028 1 5 7 0 0 -"24738" 4 146 3 1 1 1 4 1 100 208 124 155 1224 9 1 1 723 2 25 31 1 1 -"24739" 4 146 3 1 1 1 4 1 100 208 155 194 1598 2 3 0 735 2 25 39 0 1 -"24740" 4 146 3 1 1 1 4 1 100 208 194 243 2026 1 8 0 1263 2 25 49 0 1 -"24741" 4 146 3 1 1 1 4 1 100 208 243 231 2296 5 4 0 392 1 5 12 1 0 -"24742" 4 146 3 1 1 1 4 1 100 208 231 219 2251 4 2 0 605 1 5 12 1 0 -"24743" 4 146 3 1 1 1 4 1 100 208 219 208 2549 6 3 0 473 1 5 11 1 0 -"24744" 4 146 4 0 1 0 1 1 100 148 100 150 1950 2 7 0 2479 3 50 50 0 1 -"24745" 4 146 4 0 1 0 1 1 100 148 150 188 1422 3 7 0 1568 4 25 38 0 1 -"24746" 4 146 4 0 1 0 1 1 100 148 188 197 1267 2 8 0 1636 5 5 9 0 1 -"24747" 4 146 4 0 1 0 1 1 100 148 197 148 1692 8 9 1 1577 4 25 49 0 0 -"24748" 4 146 5 1 1 0 1 1 100 254 100 175 1611 1 3 0 1724 2 75 75 0 1 -"24749" 4 146 5 1 1 0 1 1 100 254 175 219 1903 6 5 1 1539 4 25 44 1 1 -"24750" 4 146 5 1 1 0 1 1 100 254 219 274 1487 2 8 0 1270 4 25 55 0 1 -"24751" 4 146 5 1 1 0 1 1 100 254 274 205 1535 8 9 1 1253 4 25 69 0 0 -"24752" 4 146 5 1 1 0 1 1 100 254 205 215 1926 3 4 0 1121 5 5 10 0 1 -"24753" 4 146 5 1 1 0 1 1 100 254 215 204 3601 5 7 1 1484 5 5 11 0 0 -"24754" 4 146 5 1 1 0 1 1 100 254 204 214 1890 7 4 1 2046 5 5 10 1 1 -"24755" 4 146 5 1 1 0 1 1 100 254 214 203 1730 4 1 0 766 5 5 11 1 0 -"24756" 4 146 5 1 1 0 1 1 100 254 203 254 1599 9 6 1 521 4 25 51 1 1 -"24757" 4 146 5 1 1 0 2 1 100 329 100 125 2504 8 1 1 447 4 25 25 1 1 -"24758" 4 146 5 1 1 0 2 1 100 329 125 131 1551 6 2 1 644 5 5 6 1 1 -"24759" 4 146 5 1 1 0 2 1 100 329 131 124 1466 7 9 1 547 5 5 7 0 0 -"24760" 4 146 5 1 1 0 2 1 100 329 124 155 1214 2 10 0 1339 4 25 31 0 1 -"24761" 4 146 5 1 1 0 2 1 100 329 155 147 2649 5 3 0 472 5 5 8 1 0 -"24762" 4 146 5 1 1 0 2 1 100 329 147 140 1351 4 2 0 1324 5 5 7 1 0 -"24763" 4 146 5 1 1 0 2 1 100 329 140 175 1867 3 5 0 1432 4 25 35 0 1 -"24764" 4 146 5 1 1 0 2 1 100 329 175 263 1433 9 4 1 1083 3 50 88 1 1 -"24765" 4 146 5 1 1 0 2 1 100 329 263 329 1493 1 7 0 828 4 25 66 0 1 -"24766" 4 146 5 1 1 0 3 1 100 255 100 125 4404 7 5 1 1137 4 25 25 1 1 -"24767" 4 146 5 1 1 0 3 1 100 255 125 94 1360 2 1 0 605 4 25 31 1 0 -"24768" 4 146 5 1 1 0 3 1 100 255 94 165 1589 8 6 1 479 2 75 71 1 1 -"24769" 4 146 5 1 1 0 3 1 100 255 165 173 2214 4 7 0 347 5 5 8 0 1 -"24770" 4 146 5 1 1 0 3 1 100 255 173 216 1351 3 10 0 1389 4 25 43 0 1 -"24771" 4 146 5 1 1 0 3 1 100 255 216 205 1832 6 8 1 1487 5 5 11 0 0 -"24772" 4 146 5 1 1 0 3 1 100 255 205 256 1134 9 2 1 1085 4 25 51 1 1 -"24773" 4 146 5 1 1 0 3 1 100 255 256 243 3103 5 3 0 1090 5 5 13 1 0 -"24774" 4 146 5 1 1 0 3 1 100 255 243 255 1227 1 10 0 1055 5 5 12 0 1 -"24775" 4 146 5 1 1 0 4 1 100 306 100 125 2736 2 3 0 868 4 25 25 0 1 -"24776" 4 146 5 1 1 0 4 1 100 306 125 131 1416 7 4 1 443 5 5 6 1 1 -"24777" 4 146 5 1 1 0 4 1 100 306 131 124 1491 3 1 0 618 5 5 7 1 0 -"24778" 4 146 5 1 1 0 4 1 100 306 124 186 1250 1 9 0 1020 3 50 62 0 1 -"24779" 4 146 5 1 1 0 4 1 100 306 186 233 2025 8 7 1 1247 4 25 47 1 1 -"24780" 4 146 5 1 1 0 4 1 100 306 233 291 1376 9 2 1 2557 4 25 58 1 1 -"24781" 4 146 5 1 1 0 4 1 100 306 291 306 2080 5 6 0 2339 5 5 15 0 1 -"24782" 4 146 5 1 1 0 4 1 100 306 306 291 1465 6 8 1 1650 5 5 15 0 0 -"24783" 4 146 5 1 1 0 4 1 100 306 291 306 1320 4 7 0 1496 5 5 15 0 1 -"24784" 4 151 2 0 1 1 1 1 100 164 100 150 12340 8 3 1 1662 3 50 50 1 1 -"24785" 4 151 2 0 1 1 1 1 100 164 150 188 3820 3 7 0 4580 2 25 38 0 1 -"24786" 4 151 2 0 1 1 1 1 100 164 188 329 2415 8 2 1 1274 4 75 141 1 1 -"24787" 4 151 2 0 1 1 1 1 100 164 329 164 3763 2 1 0 1926 3 50 165 1 0 -"24788" 4 151 3 1 1 1 1 1 100 138 100 195 4008 9 7 1 1327 5 95 95 1 1 -"24789" 4 151 3 1 1 1 1 1 100 138 195 341 2501 4 8 0 620 4 75 146 0 1 -"24790" 4 151 3 1 1 1 1 1 100 138 341 597 1884 8 2 1 1376 4 75 256 1 1 -"24791" 4 151 3 1 1 1 1 1 100 138 597 149 2156 2 1 0 1716 4 75 448 1 0 -"24792" 4 151 3 1 1 1 1 1 100 138 149 291 2655 7 6 1 0 5 95 142 1 1 -"24793" 4 151 3 1 1 1 1 1 100 138 291 145 1878 5 3 0 2443 3 50 146 1 0 -"24794" 4 151 3 1 1 1 1 1 100 138 145 283 2252 3 6 0 0 5 95 138 0 1 -"24795" 4 151 3 1 1 1 1 1 100 138 283 71 2414 6 9 1 817 4 75 212 0 0 -"24796" 4 151 3 1 1 1 1 1 100 138 71 138 2304 1 4 0 0 5 95 67 0 1 -"24797" 4 151 3 1 1 1 2 1 100 3840 100 195 2962 2 9 0 0 5 95 95 0 1 -"24798" 4 151 3 1 1 1 2 1 100 3840 195 293 2291 4 10 0 1988 3 50 98 0 1 -"24799" 4 151 3 1 1 1 2 1 100 3840 293 440 5579 3 1 1 671 3 50 147 1 1 -"24800" 4 151 3 1 1 1 2 1 100 3840 440 858 2900 8 6 1 3864 5 95 418 1 1 -"24801" 4 151 3 1 1 1 2 1 100 3840 858 1287 4813 5 7 0 1070 3 50 429 0 1 -"24802" 4 151 3 1 1 1 2 1 100 3840 1287 643 5472 6 8 1 1857 3 50 644 0 0 -"24803" 4 151 3 1 1 1 2 1 100 3840 643 1125 2838 7 5 1 1405 4 75 482 1 1 -"24804" 4 151 3 1 1 1 2 1 100 3840 1125 2194 2256 1 10 0 0 5 95 1069 0 1 -"24805" 4 151 3 1 1 1 2 1 100 3840 2194 3840 3114 9 3 1 2061 4 75 1646 1 1 -"24806" 4 151 3 1 1 1 3 0 100 0 100 195 2594 3 5 0 0 5 95 95 0 1 -"24807" 4 151 3 1 1 1 3 0 100 0 195 10 1780 8 9 1 0 5 95 185 0 0 -"24808" 4 151 3 1 1 1 3 0 100 0 10 20 2277 2 4 0 0 5 95 10 0 1 -"24809" 4 151 3 1 1 1 3 0 100 0 20 39 2606 6 3 1 0 5 95 19 1 1 -"24810" 4 151 3 1 1 1 3 0 100 0 39 10 3656 7 6 0 2984 4 75 29 1 0 -"24811" 4 151 3 1 1 1 3 0 100 0 10 0 5471 4 2 0 0 5 95 10 1 0 -"24812" 4 151 3 1 1 1 4 1 100 2608 100 195 2352 8 7 1 0 5 95 95 1 1 -"24813" 4 151 3 1 1 1 4 1 100 2608 195 380 1718 3 10 0 0 5 95 185 0 1 -"24814" 4 151 3 1 1 1 4 1 100 2608 380 570 2850 7 9 0 1224 3 50 190 0 1 -"24815" 4 151 3 1 1 1 4 1 100 2608 570 1112 2899 9 1 1 0 5 95 542 1 1 -"24816" 4 151 3 1 1 1 4 1 100 2608 1112 1946 5518 2 3 0 2413 4 75 834 0 1 -"24817" 4 151 3 1 1 1 4 1 100 2608 1946 3406 4016 1 8 0 1607 4 75 1460 0 1 -"24818" 4 151 3 1 1 1 4 1 100 2608 3406 5961 3007 5 4 1 1118 4 75 2555 1 1 -"24819" 4 151 3 1 1 1 4 1 100 2608 5961 1490 3383 4 2 0 1596 4 75 4471 1 0 -"24820" 4 151 3 1 1 1 4 1 100 2608 1490 2608 4029 6 3 1 1971 4 75 1118 1 1 -"24821" 4 151 4 0 1 0 1 1 100 114 100 175 7363 2 7 0 266 2 75 75 0 1 -"24822" 4 151 4 0 1 0 1 1 100 114 175 306 3715 3 7 0 832 2 75 131 0 1 -"24823" 4 151 4 0 1 0 1 1 100 114 306 229 2646 2 8 1 710 4 25 77 0 0 -"24824" 4 151 4 0 1 0 1 1 100 114 229 114 4049 8 9 1 1054 3 50 115 0 0 -"24825" 4 151 5 1 1 0 1 1 100 60 100 195 3414 1 3 0 993 1 95 95 0 1 -"24826" 4 151 5 1 1 0 1 1 100 60 195 341 2911 6 5 1 522 2 75 146 1 1 -"24827" 4 151 5 1 1 0 1 1 100 60 341 665 2414 2 8 0 3321 1 95 324 0 1 -"24828" 4 151 5 1 1 0 1 1 100 60 665 332 7856 8 9 1 2424 3 50 333 0 0 -"24829" 4 151 5 1 1 0 1 1 100 60 332 647 1922 3 4 0 1132 1 95 315 0 1 -"24830" 4 151 5 1 1 0 1 1 100 60 647 323 2331 5 7 1 714 3 50 324 0 0 -"24831" 4 151 5 1 1 0 1 1 100 60 323 630 2133 7 4 1 1557 1 95 307 1 1 -"24832" 4 151 5 1 1 0 1 1 100 60 630 31 1984 4 1 0 2336 1 95 599 1 0 -"24833" 4 151 5 1 1 0 1 1 100 60 31 60 1549 9 6 1 826 1 95 29 1 1 -"24834" 4 151 5 1 1 0 2 1 100 7213 100 195 1840 8 1 1 710 1 95 95 1 1 -"24835" 4 151 5 1 1 0 2 1 100 7213 195 380 2497 6 2 1 2824 1 95 185 1 1 -"24836" 4 151 5 1 1 0 2 1 100 7213 380 665 2374 7 9 0 1939 2 75 285 0 1 -"24837" 4 151 5 1 1 0 2 1 100 7213 665 1297 1852 2 10 0 1010 1 95 632 0 1 -"24838" 4 151 5 1 1 0 2 1 100 7213 1297 1946 2375 5 3 1 1324 3 50 649 1 1 -"24839" 4 151 5 1 1 0 2 1 100 7213 1946 973 2475 4 2 0 1459 3 50 973 1 0 -"24840" 4 151 5 1 1 0 2 1 100 7213 973 1897 1898 3 5 0 2446 1 95 924 0 1 -"24841" 4 151 5 1 1 0 2 1 100 7213 1897 3699 2143 9 4 1 2542 1 95 1802 1 1 -"24842" 4 151 5 1 1 0 2 1 100 7213 3699 7213 2338 1 7 0 2114 1 95 3514 0 1 -"24843" 4 151 5 1 1 0 3 1 100 4 100 195 2019 7 5 1 1567 1 95 95 1 1 -"24844" 4 151 5 1 1 0 3 1 100 4 195 10 2202 2 1 0 1895 1 95 185 1 0 -"24845" 4 151 5 1 1 0 3 1 100 4 10 20 1882 8 6 1 1910 1 95 10 1 1 -"24846" 4 151 5 1 1 0 3 1 100 4 20 39 2265 4 7 0 1746 1 95 19 0 1 -"24847" 4 151 5 1 1 0 3 1 100 4 39 10 2501 3 10 1 1419 2 75 29 0 0 -"24848" 4 151 5 1 1 0 3 1 100 4 10 5 6557 6 8 1 1828 3 50 5 0 0 -"24849" 4 151 5 1 1 0 3 1 100 4 5 10 2217 9 2 1 2053 1 95 5 1 1 -"24850" 4 151 5 1 1 0 3 1 100 4 10 2 2898 5 3 0 1557 2 75 8 1 0 -"24851" 4 151 5 1 1 0 3 1 100 4 2 4 2097 1 10 0 1513 1 95 2 0 1 -"24852" 4 151 5 1 1 0 4 1 100 601 100 195 2258 2 3 0 2019 1 95 95 0 1 -"24853" 4 151 5 1 1 0 4 1 100 601 195 380 2073 7 4 1 1387 1 95 185 1 1 -"24854" 4 151 5 1 1 0 4 1 100 601 380 95 3104 3 1 0 1961 2 75 285 1 0 -"24855" 4 151 5 1 1 0 4 1 100 601 95 185 1577 1 9 0 1595 1 95 90 0 1 -"24856" 4 151 5 1 1 0 4 1 100 601 185 361 1925 8 7 1 1750 1 95 176 1 1 -"24857" 4 151 5 1 1 0 4 1 100 601 361 704 2233 9 2 1 1755 1 95 343 1 1 -"24858" 4 151 5 1 1 0 4 1 100 601 704 1232 2133 5 6 0 1677 2 75 528 0 1 -"24859" 4 151 5 1 1 0 4 1 100 601 1232 308 4436 6 8 1 2118 2 75 924 0 0 -"24860" 4 151 5 1 1 0 4 1 100 601 308 601 1855 4 7 0 1620 1 95 293 0 1 -"24861" 4 154 2 0 1 1 1 1 100 134 100 150 18855 8 3 1 2068 3 50 50 1 1 -"24862" 4 154 2 0 1 1 1 1 100 134 150 188 5910 3 7 0 3531 2 25 38 0 1 -"24863" 4 154 2 0 1 1 1 1 100 134 188 179 3968 8 2 0 993 1 5 9 1 0 -"24864" 4 154 2 0 1 1 1 1 100 134 179 134 2326 2 1 0 1061 2 25 45 1 0 -"24865" 4 154 3 1 1 1 1 1 100 215 100 125 11980 9 7 1 4311 2 25 25 1 1 -"24866" 4 154 3 1 1 1 1 1 100 215 125 119 2712 4 8 1 685 1 5 6 0 0 -"24867" 4 154 3 1 1 1 1 1 100 215 119 149 1905 8 2 1 860 2 25 30 1 1 -"24868" 4 154 3 1 1 1 1 1 100 215 149 156 2096 2 1 1 434 1 5 7 1 1 -"24869" 4 154 3 1 1 1 1 1 100 215 156 164 1803 7 6 1 2499 1 5 8 1 1 -"24870" 4 154 3 1 1 1 1 1 100 215 164 156 2345 5 3 0 782 1 5 8 1 0 -"24871" 4 154 3 1 1 1 1 1 100 215 156 195 1782 3 6 0 448 2 25 39 0 1 -"24872" 4 154 3 1 1 1 1 1 100 215 195 205 1240 6 9 0 1026 1 5 10 0 1 -"24873" 4 154 3 1 1 1 1 1 100 215 205 215 1094 1 4 0 592 1 5 10 0 1 -"24874" 4 154 3 1 1 1 2 1 100 322 100 95 2434 2 9 1 436 1 5 5 0 0 -"24875" 4 154 3 1 1 1 2 1 100 322 95 100 1180 4 10 0 938 1 5 5 0 1 -"24876" 4 154 3 1 1 1 2 1 100 322 100 95 1371 3 1 0 815 1 5 5 1 0 -"24877" 4 154 3 1 1 1 2 1 100 322 95 143 1229 8 6 1 1489 3 50 48 1 1 -"24878" 4 154 3 1 1 1 2 1 100 322 143 136 1880 5 7 1 1116 1 5 7 0 0 -"24879" 4 154 3 1 1 1 2 1 100 322 136 129 1596 6 8 1 2789 1 5 7 0 0 -"24880" 4 154 3 1 1 1 2 1 100 322 129 194 1318 7 5 1 820 3 50 65 1 1 -"24881" 4 154 3 1 1 1 2 1 100 322 194 184 3683 1 10 1 1298 1 5 10 0 0 -"24882" 4 154 3 1 1 1 2 1 100 322 184 322 1721 9 3 1 629 4 75 138 1 1 -"24883" 4 154 3 1 1 1 3 1 100 446 100 105 2149 3 5 0 820 1 5 5 0 1 -"24884" 4 154 3 1 1 1 3 1 100 446 105 100 1219 8 9 1 2222 1 5 5 0 0 -"24885" 4 154 3 1 1 1 3 1 100 446 100 125 2345 2 4 0 727 2 25 25 0 1 -"24886" 4 154 3 1 1 1 3 1 100 446 125 131 1714 6 3 1 1566 1 5 6 1 1 -"24887" 4 154 3 1 1 1 3 1 100 446 131 138 2024 7 6 1 3709 1 5 7 1 1 -"24888" 4 154 3 1 1 1 3 1 100 446 138 145 2978 4 2 1 610 1 5 7 1 1 -"24889" 4 154 3 1 1 1 3 1 100 446 145 283 1330 1 8 0 0 5 95 138 0 1 -"24890" 4 154 3 1 1 1 3 1 100 446 283 297 2521 5 7 0 1343 1 5 14 0 1 -"24891" 4 154 3 1 1 1 3 1 100 446 297 446 1460 9 5 1 869 3 50 149 1 1 -"24892" 4 154 3 1 1 1 4 1 100 239 100 105 2190 8 7 1 2804 1 5 5 1 1 -"24893" 4 154 3 1 1 1 4 1 100 239 105 110 1212 3 10 0 1808 1 5 5 0 1 -"24894" 4 154 3 1 1 1 4 1 100 239 110 116 1744 7 9 0 690 1 5 6 0 1 -"24895" 4 154 3 1 1 1 4 1 100 239 116 174 1246 9 1 1 1137 3 50 58 1 1 -"24896" 4 154 3 1 1 1 4 1 100 239 174 183 2296 2 3 0 1361 1 5 9 0 1 -"24897" 4 154 3 1 1 1 4 1 100 239 183 229 1549 1 8 0 665 2 25 46 0 1 -"24898" 4 154 3 1 1 1 4 1 100 239 229 240 1215 5 4 1 2019 1 5 11 1 1 -"24899" 4 154 3 1 1 1 4 1 100 239 240 252 1424 4 2 1 561 1 5 12 1 1 -"24900" 4 154 3 1 1 1 4 1 100 239 252 239 1260 6 3 0 637 1 5 13 1 0 -"24901" 4 154 4 0 1 0 1 1 100 1 100 175 7159 2 7 0 897 2 75 75 0 1 -"24902" 4 154 4 0 1 0 1 1 100 1 175 306 2163 3 7 0 1002 2 75 131 0 1 -"24903" 4 154 4 0 1 0 1 1 100 1 306 15 2591 2 8 1 3794 1 95 291 0 0 -"24904" 4 154 4 0 1 0 1 1 100 1 15 1 1899 8 9 1 1486 1 95 14 0 0 -"24905" 4 154 5 1 1 0 1 1 100 265 100 195 3855 1 3 0 597 1 95 95 0 1 -"24906" 4 154 5 1 1 0 1 1 100 265 195 380 1830 6 5 1 532 1 95 185 1 1 -"24907" 4 154 5 1 1 0 1 1 100 265 380 741 3465 2 8 0 1945 1 95 361 0 1 -"24908" 4 154 5 1 1 0 1 1 100 265 741 37 1450 8 9 1 1555 1 95 704 0 0 -"24909" 4 154 5 1 1 0 1 1 100 265 37 72 2342 3 4 0 428 1 95 35 0 1 -"24910" 4 154 5 1 1 0 1 1 100 265 72 140 1917 5 7 0 600 1 95 68 0 1 -"24911" 4 154 5 1 1 0 1 1 100 265 140 273 825 7 4 1 2239 1 95 133 1 1 -"24912" 4 154 5 1 1 0 1 1 100 265 273 136 1514 4 1 0 728 3 50 137 1 0 -"24913" 4 154 5 1 1 0 1 1 100 265 136 265 994 9 6 1 433 1 95 129 1 1 -"24914" 4 154 5 1 1 0 2 1 100 267 100 195 1229 8 1 1 552 1 95 95 1 1 -"24915" 4 154 5 1 1 0 2 1 100 267 195 380 787 6 2 1 555 1 95 185 1 1 -"24916" 4 154 5 1 1 0 2 1 100 267 380 741 1438 7 9 0 1119 1 95 361 0 1 -"24917" 4 154 5 1 1 0 2 1 100 267 741 1445 911 2 10 0 1441 1 95 704 0 1 -"24918" 4 154 5 1 1 0 2 1 100 267 1445 722 1178 5 3 0 1032 3 50 723 1 0 -"24919" 4 154 5 1 1 0 2 1 100 267 722 36 1772 4 2 0 477 1 95 686 1 0 -"24920" 4 154 5 1 1 0 2 1 100 267 36 70 484 3 5 0 514 1 95 34 0 1 -"24921" 4 154 5 1 1 0 2 1 100 267 70 137 759 9 4 1 841 1 95 67 1 1 -"24922" 4 154 5 1 1 0 2 1 100 267 137 267 678 1 7 0 331 1 95 130 0 1 -"24923" 4 154 5 1 1 0 3 0 100 1 100 195 2132 7 5 1 484 1 95 95 1 1 -"24924" 4 154 5 1 1 0 3 0 100 1 195 10 1507 2 1 0 353 1 95 185 1 0 -"24925" 4 154 5 1 1 0 3 0 100 1 10 20 1253 8 6 1 342 1 95 10 1 1 -"24926" 4 154 5 1 1 0 3 0 100 1 20 1 547 4 7 1 423 1 95 19 0 0 -"24927" 4 154 5 1 1 0 4 1 100 0 100 195 1840 2 3 0 1006 1 95 95 0 1 -"24928" 4 154 5 1 1 0 4 1 100 0 195 380 748 7 4 1 3629 1 95 185 1 1 -"24929" 4 154 5 1 1 0 4 1 100 0 380 19 1478 3 1 0 4133 1 95 361 1 0 -"24930" 4 154 5 1 1 0 4 1 100 0 19 37 1341 1 9 0 427 1 95 18 0 1 -"24931" 4 154 5 1 1 0 4 1 100 0 37 72 1420 8 7 1 405 1 95 35 1 1 -"24932" 4 154 5 1 1 0 4 1 100 0 72 140 1231 9 2 1 370 1 95 68 1 1 -"24933" 4 154 5 1 1 0 4 1 100 0 140 35 5134 5 6 1 1503 2 75 105 0 0 -"24934" 4 154 5 1 1 0 4 1 100 0 35 2 1797 6 8 1 477 1 95 33 0 0 -"24935" 4 154 5 1 1 0 4 1 100 0 2 0 1195 4 7 1 312 1 95 2 0 0 -"24936" 4 155 2 0 1 1 1 1 100 203 100 150 6429 8 3 1 848 3 50 50 1 1 -"24937" 4 155 2 0 1 1 1 1 100 203 150 225 16045 3 7 0 1280 3 50 75 0 1 -"24938" 4 155 2 0 1 1 1 1 100 203 225 214 9328 8 2 0 938 1 5 11 1 0 -"24939" 4 155 2 0 1 1 1 1 100 203 214 203 4170 2 1 0 873 1 5 11 1 0 -"24940" 4 155 3 1 1 1 1 1 100 89 100 105 9161 9 7 1 1027 1 5 5 1 1 -"24941" 4 155 3 1 1 1 1 1 100 89 105 110 5087 4 8 0 874 1 5 5 0 1 -"24942" 4 155 3 1 1 1 1 1 100 89 110 104 3245 8 2 0 1617 1 5 6 1 0 -"24943" 4 155 3 1 1 1 1 1 100 89 104 109 2985 2 1 1 1208 1 5 5 1 1 -"24944" 4 155 3 1 1 1 1 1 100 89 109 82 4132 7 6 0 605 2 25 27 1 0 -"24945" 4 155 3 1 1 1 1 1 100 89 82 86 2853 5 3 1 819 1 5 4 1 1 -"24946" 4 155 3 1 1 1 1 1 100 89 86 90 2089 3 6 0 716 1 5 4 0 1 -"24947" 4 155 3 1 1 1 1 1 100 89 90 85 1815 6 9 1 1174 1 5 5 0 0 -"24948" 4 155 3 1 1 1 1 1 100 89 85 89 3451 1 4 0 1084 1 5 4 0 1 -"24949" 4 155 3 1 1 1 2 1 100 116 100 105 3405 2 9 0 777 1 5 5 0 1 -"24950" 4 155 3 1 1 1 2 1 100 116 105 100 2129 4 10 1 1017 1 5 5 0 0 -"24951" 4 155 3 1 1 1 2 1 100 116 100 95 2182 3 1 0 1627 1 5 5 1 0 -"24952" 4 155 3 1 1 1 2 1 100 116 95 100 1963 8 6 1 652 1 5 5 1 1 -"24953" 4 155 3 1 1 1 2 1 100 116 100 105 1578 5 7 0 1117 1 5 5 0 1 -"24954" 4 155 3 1 1 1 2 1 100 116 105 100 2461 6 8 1 749 1 5 5 0 0 -"24955" 4 155 3 1 1 1 2 1 100 116 100 105 2143 7 5 1 1470 1 5 5 1 1 -"24956" 4 155 3 1 1 1 2 1 100 116 105 110 1661 1 10 0 572 1 5 5 0 1 -"24957" 4 155 3 1 1 1 2 1 100 116 110 116 3309 9 3 1 725 1 5 6 1 1 -"24958" 4 155 3 1 1 1 3 1 100 116 100 105 2015 3 5 0 1172 1 5 5 0 1 -"24959" 4 155 3 1 1 1 3 1 100 116 105 100 2705 8 9 1 774 1 5 5 0 0 -"24960" 4 155 3 1 1 1 3 1 100 116 100 105 1878 2 4 0 642 1 5 5 0 1 -"24961" 4 155 3 1 1 1 3 1 100 116 105 100 2337 6 3 0 631 1 5 5 1 0 -"24962" 4 155 3 1 1 1 3 1 100 116 100 105 1167 7 6 1 644 1 5 5 1 1 -"24963" 4 155 3 1 1 1 3 1 100 116 105 110 2691 4 2 1 665 1 5 5 1 1 -"24964" 4 155 3 1 1 1 3 1 100 116 110 116 2054 1 8 0 562 1 5 6 0 1 -"24965" 4 155 3 1 1 1 3 1 100 116 116 110 3885 5 7 1 594 1 5 6 0 0 -"24966" 4 155 3 1 1 1 3 1 100 116 110 116 2115 9 5 1 516 1 5 6 1 1 -"24967" 4 155 3 1 1 1 4 1 100 126 100 105 2515 8 7 1 625 1 5 5 1 1 -"24968" 4 155 3 1 1 1 4 1 100 126 105 110 1976 3 10 0 551 1 5 5 0 1 -"24969" 4 155 3 1 1 1 4 1 100 126 110 104 2699 7 9 1 616 1 5 6 0 0 -"24970" 4 155 3 1 1 1 4 1 100 126 104 109 2072 9 1 1 1123 1 5 5 1 1 -"24971" 4 155 3 1 1 1 4 1 100 126 109 114 3499 2 3 0 858 1 5 5 0 1 -"24972" 4 155 3 1 1 1 4 1 100 126 114 120 2374 1 8 0 1195 1 5 6 0 1 -"24973" 4 155 3 1 1 1 4 1 100 126 120 126 1984 5 4 1 863 1 5 6 1 1 -"24974" 4 155 3 1 1 1 4 1 100 126 126 120 2923 4 2 0 845 1 5 6 1 0 -"24975" 4 155 3 1 1 1 4 1 100 126 120 126 1506 6 3 1 2376 1 5 6 1 1 -"24976" 4 155 4 0 1 0 1 1 100 69 100 150 5654 2 7 0 2142 3 50 50 0 1 -"24977" 4 155 4 0 1 0 1 1 100 69 150 263 4686 3 7 0 2477 2 75 113 0 1 -"24978" 4 155 4 0 1 0 1 1 100 69 263 276 2012 2 8 0 1322 5 5 13 0 1 -"24979" 4 155 4 0 1 0 1 1 100 69 276 69 4201 8 9 1 2143 2 75 207 0 0 -"24980" 4 155 5 1 1 0 1 1 100 330 100 150 14557 1 3 0 3230 3 50 50 0 1 -"24981" 4 155 5 1 1 0 1 1 100 330 150 188 2353 6 5 1 1096 4 25 38 1 1 -"24982" 4 155 5 1 1 0 1 1 100 330 188 235 2305 2 8 0 1561 4 25 47 0 1 -"24983" 4 155 5 1 1 0 1 1 100 330 235 223 2516 8 9 1 1114 5 5 12 0 0 -"24984" 4 155 5 1 1 0 1 1 100 330 223 279 1706 3 4 0 2527 4 25 56 0 1 -"24985" 4 155 5 1 1 0 1 1 100 330 279 265 2604 5 7 1 1270 5 5 14 0 0 -"24986" 4 155 5 1 1 0 1 1 100 330 265 331 1679 7 4 1 2045 4 25 66 1 1 -"24987" 4 155 5 1 1 0 1 1 100 330 331 314 2289 4 1 0 1315 5 5 17 1 0 -"24988" 4 155 5 1 1 0 1 1 100 330 314 330 2097 9 6 1 1268 5 5 16 1 1 -"24989" 4 155 5 1 1 0 2 1 100 74 100 150 5246 8 1 1 715 3 50 50 1 1 -"24990" 4 155 5 1 1 0 2 1 100 74 150 112 2262 6 2 0 3104 4 25 38 1 0 -"24991" 4 155 5 1 1 0 2 1 100 74 112 106 3754 7 9 1 597 5 5 6 0 0 -"24992" 4 155 5 1 1 0 2 1 100 74 106 133 1446 2 10 0 2583 4 25 27 0 1 -"24993" 4 155 5 1 1 0 2 1 100 74 133 100 2234 5 3 0 2246 4 25 33 1 0 -"24994" 4 155 5 1 1 0 2 1 100 74 100 75 2488 4 2 0 1328 4 25 25 1 0 -"24995" 4 155 5 1 1 0 2 1 100 74 75 56 3446 3 5 1 1016 4 25 19 0 0 -"24996" 4 155 5 1 1 0 2 1 100 74 56 59 1349 9 4 1 835 5 5 3 1 1 -"24997" 4 155 5 1 1 0 2 1 100 74 59 74 1705 1 7 0 1486 4 25 15 0 1 -"24998" 4 155 5 1 1 0 3 1 100 255 100 125 3562 7 5 1 1191 4 25 25 1 1 -"24999" 4 155 5 1 1 0 3 1 100 255 125 131 3604 2 1 1 902 5 5 6 1 1 -"25000" 4 155 5 1 1 0 3 1 100 255 131 164 2137 8 6 1 2217 4 25 33 1 1 -"25001" 4 155 5 1 1 0 3 1 100 255 164 205 2757 4 7 0 2392 4 25 41 0 1 -"25002" 4 155 5 1 1 0 3 1 100 255 205 195 2171 3 10 1 1338 5 5 10 0 0 -"25003" 4 155 5 1 1 0 3 1 100 255 195 205 3464 6 8 0 1882 5 5 10 0 1 -"25004" 4 155 5 1 1 0 3 1 100 255 205 215 1893 9 2 1 1041 5 5 10 1 1 -"25005" 4 155 5 1 1 0 3 1 100 255 215 204 1997 5 3 0 1147 5 5 11 1 0 -"25006" 4 155 5 1 1 0 3 1 100 255 204 255 1745 1 10 0 1556 4 25 51 0 1 -"25007" 4 155 5 1 1 0 4 1 100 281 100 125 5954 2 3 0 1524 4 25 25 0 1 -"25008" 4 155 5 1 1 0 4 1 100 281 125 156 1552 7 4 1 3468 4 25 31 1 1 -"25009" 4 155 5 1 1 0 4 1 100 281 156 148 2534 3 1 0 835 5 5 8 1 0 -"25010" 4 155 5 1 1 0 4 1 100 281 148 259 2041 1 9 0 3565 2 75 111 0 1 -"25011" 4 155 5 1 1 0 4 1 100 281 259 272 2008 8 7 1 1063 5 5 13 1 1 -"25012" 4 155 5 1 1 0 4 1 100 281 272 340 6649 9 2 1 2579 4 25 68 1 1 -"25013" 4 155 5 1 1 0 4 1 100 281 340 357 2037 5 6 0 1715 5 5 17 0 1 -"25014" 4 155 5 1 1 0 4 1 100 281 357 268 2134 6 8 1 2439 4 25 89 0 0 -"25015" 4 155 5 1 1 0 4 1 100 281 268 281 2664 4 7 0 807 5 5 13 0 1 -"25016" 4 157 2 0 1 1 1 1 100 243 100 195 25472 8 3 1 2594 5 95 95 1 1 -"25017" 4 157 2 0 1 1 1 1 100 243 195 244 8428 3 7 0 713 2 25 49 0 1 -"25018" 4 157 2 0 1 1 1 1 100 243 244 256 3428 8 2 1 1366 1 5 12 1 1 -"25019" 4 157 2 0 1 1 1 1 100 243 256 243 1283 2 1 0 883 1 5 13 1 0 -"25020" 4 157 3 1 1 1 1 1 100 58 100 75 1673 9 7 0 1183 2 25 25 1 0 -"25021" 4 157 3 1 1 1 1 1 100 58 75 71 1412 4 8 1 718 1 5 4 0 0 -"25022" 4 157 3 1 1 1 1 1 100 58 71 67 3121 8 2 0 634 1 5 4 1 0 -"25023" 4 157 3 1 1 1 1 1 100 58 67 70 1225 2 1 1 828 1 5 3 1 1 -"25024" 4 157 3 1 1 1 1 1 100 58 70 52 2674 7 6 0 632 2 25 18 1 0 -"25025" 4 157 3 1 1 1 1 1 100 58 52 55 632 5 3 1 1511 1 5 3 1 1 -"25026" 4 157 3 1 1 1 1 1 100 58 55 52 2324 3 6 1 627 1 5 3 0 0 -"25027" 4 157 3 1 1 1 1 1 100 58 52 55 1395 6 9 0 590 1 5 3 0 1 -"25028" 4 157 3 1 1 1 1 1 100 58 55 58 3071 1 4 0 1234 1 5 3 0 1 -"25029" 4 157 3 1 1 1 2 1 100 300 100 105 2860 2 9 0 1778 1 5 5 0 1 -"25030" 4 157 3 1 1 1 2 1 100 300 105 100 2018 4 10 1 648 1 5 5 0 0 -"25031" 4 157 3 1 1 1 2 1 100 300 100 95 1134 3 1 0 709 1 5 5 1 0 -"25032" 4 157 3 1 1 1 2 1 100 300 95 166 1809 8 6 1 640 4 75 71 1 1 -"25033" 4 157 3 1 1 1 2 1 100 300 166 208 3031 5 7 0 716 2 25 42 0 1 -"25034" 4 157 3 1 1 1 2 1 100 300 208 218 4479 6 8 0 1079 1 5 10 0 1 -"25035" 4 157 3 1 1 1 2 1 100 300 218 229 2737 7 5 1 2045 1 5 11 1 1 -"25036" 4 157 3 1 1 1 2 1 100 300 229 286 2777 1 10 0 1147 2 25 57 0 1 -"25037" 4 157 3 1 1 1 2 1 100 300 286 300 1337 9 3 1 591 1 5 14 1 1 -"25038" 4 157 3 1 1 1 3 1 100 191 100 105 1681 3 5 0 1302 1 5 5 0 1 -"25039" 4 157 3 1 1 1 3 1 100 191 105 79 2614 8 9 1 2175 2 25 26 0 0 -"25040" 4 157 3 1 1 1 3 1 100 191 79 83 1588 2 4 0 2870 1 5 4 0 1 -"25041" 4 157 3 1 1 1 3 1 100 191 83 87 2051 6 3 1 663 1 5 4 1 1 -"25042" 4 157 3 1 1 1 3 1 100 191 87 83 5205 7 6 0 579 1 5 4 1 0 -"25043" 4 157 3 1 1 1 3 1 100 191 83 87 1028 4 2 1 793 1 5 4 1 1 -"25044" 4 157 3 1 1 1 3 1 100 191 87 170 1825 1 8 0 0 5 95 83 0 1 -"25045" 4 157 3 1 1 1 3 1 100 191 170 127 2687 5 7 1 965 2 25 43 0 0 -"25046" 4 157 3 1 1 1 3 1 100 191 127 191 1996 9 5 1 763 3 50 64 1 1 -"25047" 4 157 3 1 1 1 4 1 100 192 100 150 1719 8 7 1 636 3 50 50 1 1 -"25048" 4 157 3 1 1 1 4 1 100 192 150 158 1491 3 10 0 1025 1 5 8 0 1 -"25049" 4 157 3 1 1 1 4 1 100 192 158 150 1230 7 9 1 713 1 5 8 0 0 -"25050" 4 157 3 1 1 1 4 1 100 192 150 142 732 9 1 0 495 1 5 8 1 0 -"25051" 4 157 3 1 1 1 4 1 100 192 142 135 492 2 3 1 1675 1 5 7 0 0 -"25052" 4 157 3 1 1 1 4 1 100 192 135 203 1105 1 8 0 1642 3 50 68 0 1 -"25053" 4 157 3 1 1 1 4 1 100 192 203 193 2049 5 4 0 725 1 5 10 1 0 -"25054" 4 157 3 1 1 1 4 1 100 192 193 183 1304 4 2 0 823 1 5 10 1 0 -"25055" 4 157 3 1 1 1 4 1 100 192 183 192 973 6 3 1 908 1 5 9 1 1 -"25056" 4 157 4 0 1 0 1 1 100 285 100 195 2871 2 7 0 1100 1 95 95 0 1 -"25057" 4 157 4 0 1 0 1 1 100 285 195 293 2853 3 7 0 1283 3 50 98 0 1 -"25058" 4 157 4 0 1 0 1 1 100 285 293 571 1395 2 8 0 855 1 95 278 0 1 -"25059" 4 157 4 0 1 0 1 1 100 285 571 285 3412 8 9 1 1135 3 50 286 0 0 -"25060" 4 157 5 1 1 0 1 1 100 27 100 195 1349 1 3 0 795 1 95 95 0 1 -"25061" 4 157 5 1 1 0 1 1 100 27 195 380 1138 6 5 1 714 1 95 185 1 1 -"25062" 4 157 5 1 1 0 1 1 100 27 380 741 1237 2 8 0 695 1 95 361 0 1 -"25063" 4 157 5 1 1 0 1 1 100 27 741 37 899 8 9 1 638 1 95 704 0 0 -"25064" 4 157 5 1 1 0 1 1 100 27 37 72 3148 3 4 0 1236 1 95 35 0 1 -"25065" 4 157 5 1 1 0 1 1 100 27 72 140 1678 5 7 0 820 1 95 68 0 1 -"25066" 4 157 5 1 1 0 1 1 100 27 140 273 2443 7 4 1 644 1 95 133 1 1 -"25067" 4 157 5 1 1 0 1 1 100 27 273 14 1483 4 1 0 634 1 95 259 1 0 -"25068" 4 157 5 1 1 0 1 1 100 27 14 27 3025 9 6 1 463 1 95 13 1 1 -"25069" 4 157 5 1 1 0 2 0 100 0 100 195 1995 8 1 1 642 1 95 95 1 1 -"25070" 4 157 5 1 1 0 2 0 100 0 195 10 2432 6 2 0 466 1 95 185 1 0 -"25071" 4 157 5 1 1 0 2 0 100 0 10 0 745 7 9 1 772 1 95 10 0 0 -"25072" 4 157 5 1 1 0 3 1 100 4 100 195 5013 7 5 1 675 1 95 95 1 1 -"25073" 4 157 5 1 1 0 3 1 100 4 195 10 1080 2 1 0 635 1 95 185 1 0 -"25074" 4 157 5 1 1 0 3 1 100 4 10 20 1746 8 6 1 932 1 95 10 1 1 -"25075" 4 157 5 1 1 0 3 1 100 4 20 39 1197 4 7 0 762 1 95 19 0 1 -"25076" 4 157 5 1 1 0 3 1 100 4 39 76 1163 3 10 0 1530 1 95 37 0 1 -"25077" 4 157 5 1 1 0 3 1 100 4 76 4 2686 6 8 1 1148 1 95 72 0 0 -"25078" 4 157 5 1 1 0 3 1 100 4 4 8 1338 9 2 1 664 1 95 4 1 1 -"25079" 4 157 5 1 1 0 3 1 100 4 8 2 2069 5 3 0 1433 2 75 6 1 0 -"25080" 4 157 5 1 1 0 3 1 100 4 2 4 1818 1 10 0 705 1 95 2 0 1 -"25081" 4 157 5 1 1 0 4 0 100 0 100 105 1966 2 3 0 799 5 5 5 0 1 -"25082" 4 157 5 1 1 0 4 0 100 0 105 205 3214 7 4 1 771 1 95 100 1 1 -"25083" 4 157 5 1 1 0 4 0 100 0 205 10 1572 3 1 0 2318 1 95 195 1 0 -"25084" 4 157 5 1 1 0 4 0 100 0 10 0 973 1 9 1 724 1 95 10 0 0 -"25085" 4 158 2 0 1 1 1 1 100 134 100 150 29172 8 3 1 1662 3 50 50 1 1 -"25086" 4 158 2 0 1 1 1 1 100 134 150 188 6052 3 7 0 2138 2 25 38 0 1 -"25087" 4 158 2 0 1 1 1 1 100 134 188 179 2178 8 2 0 1423 1 5 9 1 0 -"25088" 4 158 2 0 1 1 1 1 100 134 179 134 2553 2 1 0 1453 2 25 45 1 0 -"25089" 4 158 3 1 1 1 1 1 100 432 100 175 1912 9 7 1 835 4 75 75 1 1 -"25090" 4 158 3 1 1 1 1 1 100 432 175 131 5137 4 8 1 642 2 25 44 0 0 -"25091" 4 158 3 1 1 1 1 1 100 432 131 197 2568 8 2 1 907 3 50 66 1 1 -"25092" 4 158 3 1 1 1 1 1 100 432 197 98 2821 2 1 0 901 3 50 99 1 0 -"25093" 4 158 3 1 1 1 1 1 100 432 98 147 3047 7 6 1 581 3 50 49 1 1 -"25094" 4 158 3 1 1 1 1 1 100 432 147 184 4222 5 3 1 915 2 25 37 1 1 -"25095" 4 158 3 1 1 1 1 1 100 432 184 230 2157 3 6 0 1013 2 25 46 0 1 -"25096" 4 158 3 1 1 1 1 1 100 432 230 288 2625 6 9 0 3723 2 25 58 0 1 -"25097" 4 158 3 1 1 1 1 1 100 432 288 432 2455 1 4 0 944 3 50 144 0 1 -"25098" 4 158 3 1 1 1 2 1 100 905 100 150 2783 2 9 0 2241 3 50 50 0 1 -"25099" 4 158 3 1 1 1 2 1 100 905 150 188 2691 4 10 0 2225 2 25 38 0 1 -"25100" 4 158 3 1 1 1 2 1 100 905 188 235 3580 3 1 1 1448 2 25 47 1 1 -"25101" 4 158 3 1 1 1 2 1 100 905 235 353 1544 8 6 1 916 3 50 118 1 1 -"25102" 4 158 3 1 1 1 2 1 100 905 353 441 3734 5 7 0 754 2 25 88 0 1 -"25103" 4 158 3 1 1 1 2 1 100 905 441 463 3308 6 8 0 1124 1 5 22 0 1 -"25104" 4 158 3 1 1 1 2 1 100 905 463 579 2716 7 5 1 1056 2 25 116 1 1 -"25105" 4 158 3 1 1 1 2 1 100 905 579 724 2691 1 10 0 1593 2 25 145 0 1 -"25106" 4 158 3 1 1 1 2 1 100 905 724 905 2323 9 3 1 2034 2 25 181 1 1 -"25107" 4 158 3 1 1 1 3 1 100 260 100 75 2672 3 5 1 526 2 25 25 0 0 -"25108" 4 158 3 1 1 1 3 1 100 260 75 37 1618 8 9 1 1100 3 50 38 0 0 -"25109" 4 158 3 1 1 1 3 1 100 260 37 56 1532 2 4 0 1026 3 50 19 0 1 -"25110" 4 158 3 1 1 1 3 1 100 260 56 42 2958 6 3 0 4999 2 25 14 1 0 -"25111" 4 158 3 1 1 1 3 1 100 260 42 63 1886 7 6 1 1387 3 50 21 1 1 -"25112" 4 158 3 1 1 1 3 1 100 260 63 79 5238 4 2 1 2882 2 25 16 1 1 -"25113" 4 158 3 1 1 1 3 1 100 260 79 138 1447 1 8 0 1152 4 75 59 0 1 -"25114" 4 158 3 1 1 1 3 1 100 260 138 173 1982 5 7 0 2214 2 25 35 0 1 -"25115" 4 158 3 1 1 1 3 1 100 260 173 260 1538 9 5 1 3734 3 50 87 1 1 -"25116" 4 158 3 1 1 1 4 1 100 388 100 150 1865 8 7 1 750 3 50 50 1 1 -"25117" 4 158 3 1 1 1 4 1 100 388 150 188 1943 3 10 0 1473 2 25 38 0 1 -"25118" 4 158 3 1 1 1 4 1 100 388 188 235 1672 7 9 0 1030 2 25 47 0 1 -"25119" 4 158 3 1 1 1 4 1 100 388 235 353 1522 9 1 1 991 3 50 118 1 1 -"25120" 4 158 3 1 1 1 4 1 100 388 353 441 1837 2 3 0 1483 2 25 88 0 1 -"25121" 4 158 3 1 1 1 4 1 100 388 441 551 2322 1 8 0 1385 2 25 110 0 1 -"25122" 4 158 3 1 1 1 4 1 100 388 551 689 2301 5 4 1 1357 2 25 138 1 1 -"25123" 4 158 3 1 1 1 4 1 100 388 689 517 2474 4 2 0 1410 2 25 172 1 0 -"25124" 4 158 3 1 1 1 4 1 100 388 517 388 3476 6 3 0 1004 2 25 129 1 0 -"25125" 4 158 4 0 1 0 1 1 100 164 100 175 7022 2 7 0 2630 2 75 75 0 1 -"25126" 4 158 4 0 1 0 1 1 100 164 175 263 2134 3 7 0 1726 3 50 88 0 1 -"25127" 4 158 4 0 1 0 1 1 100 164 263 329 2346 2 8 0 1336 4 25 66 0 1 -"25128" 4 158 4 0 1 0 1 1 100 164 329 164 1829 8 9 1 4476 3 50 165 0 0 -"25129" 4 158 5 1 1 0 1 1 100 432 100 175 1806 1 3 0 3901 2 75 75 0 1 -"25130" 4 158 5 1 1 0 1 1 100 432 175 263 1953 6 5 1 3562 3 50 88 1 1 -"25131" 4 158 5 1 1 0 1 1 100 432 263 329 1565 2 8 0 745 4 25 66 0 1 -"25132" 4 158 5 1 1 0 1 1 100 432 329 164 1675 8 9 1 3121 3 50 165 0 0 -"25133" 4 158 5 1 1 0 1 1 100 432 164 205 1490 3 4 0 881 4 25 41 0 1 -"25134" 4 158 5 1 1 0 1 1 100 432 205 256 2188 5 7 0 938 4 25 51 0 1 -"25135" 4 158 5 1 1 0 1 1 100 432 256 384 1539 7 4 1 1439 3 50 128 1 1 -"25136" 4 158 5 1 1 0 1 1 100 432 384 288 2016 4 1 0 1198 4 25 96 1 0 -"25137" 4 158 5 1 1 0 1 1 100 432 288 432 1251 9 6 1 1437 3 50 144 1 1 -"25138" 4 158 5 1 1 0 2 1 100 323 100 150 1688 8 1 1 909 3 50 50 1 1 -"25139" 4 158 5 1 1 0 2 1 100 323 150 225 1703 6 2 1 3668 3 50 75 1 1 -"25140" 4 158 5 1 1 0 2 1 100 323 225 169 2851 7 9 1 993 4 25 56 0 0 -"25141" 4 158 5 1 1 0 2 1 100 323 169 254 1262 2 10 0 920 3 50 85 0 1 -"25142" 4 158 5 1 1 0 2 1 100 323 254 190 3010 5 3 0 721 4 25 64 1 0 -"25143" 4 158 5 1 1 0 2 1 100 323 190 95 2124 4 2 0 1205 3 50 95 1 0 -"25144" 4 158 5 1 1 0 2 1 100 323 95 143 5041 3 5 0 1919 3 50 48 0 1 -"25145" 4 158 5 1 1 0 2 1 100 323 143 215 1473 9 4 1 1104 3 50 72 1 1 -"25146" 4 158 5 1 1 0 2 1 100 323 215 323 1385 1 7 0 2645 3 50 108 0 1 -"25147" 4 158 5 1 1 0 3 1 100 564 100 150 1768 7 5 1 1034 3 50 50 1 1 -"25148" 4 158 5 1 1 0 3 1 100 564 150 75 1214 2 1 0 795 3 50 75 1 0 -"25149" 4 158 5 1 1 0 3 1 100 564 75 146 1442 8 6 1 1059 1 95 71 1 1 -"25150" 4 158 5 1 1 0 3 1 100 564 146 183 3286 4 7 0 735 4 25 37 0 1 -"25151" 4 158 5 1 1 0 3 1 100 564 183 229 1137 3 10 0 481 4 25 46 0 1 -"25152" 4 158 5 1 1 0 3 1 100 564 229 172 3456 6 8 1 2313 4 25 57 0 0 -"25153" 4 158 5 1 1 0 3 1 100 564 172 301 1544 9 2 1 1621 2 75 129 1 1 -"25154" 4 158 5 1 1 0 3 1 100 564 301 376 1765 5 3 1 1391 4 25 75 1 1 -"25155" 4 158 5 1 1 0 3 1 100 564 376 564 1362 1 10 0 992 3 50 188 0 1 -"25156" 4 158 5 1 1 0 4 1 100 89 100 150 1575 2 3 0 1818 3 50 50 0 1 -"25157" 4 158 5 1 1 0 4 1 100 89 150 225 2336 7 4 1 1191 3 50 75 1 1 -"25158" 4 158 5 1 1 0 4 1 100 89 225 112 1572 3 1 0 1839 3 50 113 1 0 -"25159" 4 158 5 1 1 0 4 1 100 89 112 168 1356 1 9 0 730 3 50 56 0 1 -"25160" 4 158 5 1 1 0 4 1 100 89 168 252 1460 8 7 1 1557 3 50 84 1 1 -"25161" 4 158 5 1 1 0 4 1 100 89 252 378 1600 9 2 1 786 3 50 126 1 1 -"25162" 4 158 5 1 1 0 4 1 100 89 378 359 1522 5 6 1 799 5 5 19 0 0 -"25163" 4 158 5 1 1 0 4 1 100 89 359 179 1447 6 8 1 2864 3 50 180 0 0 -"25164" 4 158 5 1 1 0 4 1 100 89 179 89 5229 4 7 1 1370 3 50 90 0 0 -"25165" 4 160 2 0 1 1 1 1 100 187 100 150 4693 8 3 1 1796 3 50 50 1 1 -"25166" 4 160 2 0 1 1 1 1 100 187 150 263 8043 3 7 0 1154 4 75 113 0 1 -"25167" 4 160 2 0 1 1 1 1 100 187 263 250 2721 8 2 0 1141 1 5 13 1 0 -"25168" 4 160 2 0 1 1 1 1 100 187 250 187 2963 2 1 0 484 2 25 63 1 0 -"25169" 4 160 3 1 1 1 1 1 100 432 100 195 3363 9 7 1 1250 5 95 95 1 1 -"25170" 4 160 3 1 1 1 1 1 100 432 195 293 2643 4 8 0 1276 3 50 98 0 1 -"25171" 4 160 3 1 1 1 1 1 100 432 293 278 28108 8 2 0 1488 1 5 15 1 0 -"25172" 4 160 3 1 1 1 1 1 100 432 278 208 70046 2 1 0 1798 2 25 70 1 0 -"25173" 4 160 3 1 1 1 1 1 100 432 208 364 2414 7 6 1 777 4 75 156 1 1 -"25174" 4 160 3 1 1 1 1 1 100 432 364 346 1521 5 3 0 1038 1 5 18 1 0 -"25175" 4 160 3 1 1 1 1 1 100 432 346 433 2818 3 6 0 977 2 25 87 0 1 -"25176" 4 160 3 1 1 1 1 1 100 432 433 455 2264 6 9 0 593 1 5 22 0 1 -"25177" 4 160 3 1 1 1 1 1 100 432 455 432 1610 1 4 1 531 1 5 23 0 0 -"25178" 4 160 3 1 1 1 2 1 100 770 100 195 3679 2 9 0 838 5 95 95 0 1 -"25179" 4 160 3 1 1 1 2 1 100 770 195 244 3035 4 10 0 880 2 25 49 0 1 -"25180" 4 160 3 1 1 1 2 1 100 770 244 232 6731 3 1 0 4480 1 5 12 1 0 -"25181" 4 160 3 1 1 1 2 1 100 770 232 348 4384 8 6 1 1846 3 50 116 1 1 -"25182" 4 160 3 1 1 1 2 1 100 770 348 522 1859 5 7 0 248 3 50 174 0 1 -"25183" 4 160 3 1 1 1 2 1 100 770 522 391 4522 6 8 1 3352 2 25 131 0 0 -"25184" 4 160 3 1 1 1 2 1 100 770 391 587 1918 7 5 1 2009 3 50 196 1 1 -"25185" 4 160 3 1 1 1 2 1 100 770 587 616 2807 1 10 0 2663 1 5 29 0 1 -"25186" 4 160 3 1 1 1 2 1 100 770 616 770 2165 9 3 1 908 2 25 154 1 1 -"25187" 4 160 3 1 1 1 3 1 100 64 100 195 1120 3 5 0 593 5 95 95 0 1 -"25188" 4 160 3 1 1 1 3 1 100 64 195 10 1258 8 9 1 700 5 95 185 0 0 -"25189" 4 160 3 1 1 1 3 1 100 64 10 20 1160 2 4 0 1047 5 95 10 0 1 -"25190" 4 160 3 1 1 1 3 1 100 64 20 39 1395 6 3 1 654 5 95 19 1 1 -"25191" 4 160 3 1 1 1 3 1 100 64 39 68 2188 7 6 1 1914 4 75 29 1 1 -"25192" 4 160 3 1 1 1 3 1 100 64 68 34 1308 4 2 0 988 3 50 34 1 0 -"25193" 4 160 3 1 1 1 3 1 100 64 34 66 1418 1 8 0 1652 5 95 32 0 1 -"25194" 4 160 3 1 1 1 3 1 100 64 66 33 5019 5 7 1 2162 3 50 33 0 0 -"25195" 4 160 3 1 1 1 3 1 100 64 33 64 1552 9 5 1 0 5 95 31 1 1 -"25196" 4 160 3 1 1 1 4 1 100 1390 100 150 1272 8 7 1 801 3 50 50 1 1 -"25197" 4 160 3 1 1 1 4 1 100 1390 150 188 712 3 10 0 333 2 25 38 0 1 -"25198" 4 160 3 1 1 1 4 1 100 1390 188 179 1162 7 9 1 1049 1 5 9 0 0 -"25199" 4 160 3 1 1 1 4 1 100 1390 179 349 1564 9 1 1 0 5 95 170 1 1 -"25200" 4 160 3 1 1 1 4 1 100 1390 349 681 1399 2 3 0 1659 5 95 332 0 1 -"25201" 4 160 3 1 1 1 4 1 100 1390 681 1328 3746 1 8 0 0 5 95 647 0 1 -"25202" 4 160 3 1 1 1 4 1 100 1390 1328 1394 3661 5 4 1 373 1 5 66 1 1 -"25203" 4 160 3 1 1 1 4 1 100 1390 1394 1324 2256 4 2 0 290 1 5 70 1 0 -"25204" 4 160 3 1 1 1 4 1 100 1390 1324 1390 1050 6 3 1 349 1 5 66 1 1 -"25205" 4 160 4 0 1 0 1 1 100 29 100 150 8619 2 7 0 503 3 50 50 0 1 -"25206" 4 160 4 0 1 0 1 1 100 29 150 293 2116 3 7 0 621 1 95 143 0 1 -"25207" 4 160 4 0 1 0 1 1 100 29 293 571 1556 2 8 0 328 1 95 278 0 1 -"25208" 4 160 4 0 1 0 1 1 100 29 571 29 1157 8 9 1 636 1 95 542 0 0 -"25209" 4 160 5 1 1 0 1 0 100 0 100 195 1861 1 3 0 362 1 95 95 0 1 -"25210" 4 160 5 1 1 0 1 0 100 0 195 380 926 6 5 1 287 1 95 185 1 1 -"25211" 4 160 5 1 1 0 1 0 100 0 380 570 919 2 8 0 1033 3 50 190 0 1 -"25212" 4 160 5 1 1 0 1 0 100 0 570 28 805 8 9 1 229 1 95 542 0 0 -"25213" 4 160 5 1 1 0 1 0 100 0 28 55 919 3 4 0 328 1 95 27 0 1 -"25214" 4 160 5 1 1 0 1 0 100 0 55 3 1134 5 7 1 253 1 95 52 0 0 -"25215" 4 160 5 1 1 0 1 0 100 0 3 6 1110 7 4 1 415 1 95 3 1 1 -"25216" 4 160 5 1 1 0 1 0 100 0 6 0 1161 4 1 0 799 1 95 6 1 0 -"25217" 4 160 5 1 1 0 2 0 100 0 100 195 6044 8 1 1 354 1 95 95 1 1 -"25218" 4 160 5 1 1 0 2 0 100 0 195 380 2118 6 2 1 223 1 95 185 1 1 -"25219" 4 160 5 1 1 0 2 0 100 0 380 19 793 7 9 1 866 1 95 361 0 0 -"25220" 4 160 5 1 1 0 2 0 100 0 19 37 1966 2 10 0 374 1 95 18 0 1 -"25221" 4 160 5 1 1 0 2 0 100 0 37 2 2465 5 3 0 265 1 95 35 1 0 -"25222" 4 160 5 1 1 0 2 0 100 0 2 0 1778 4 2 0 427 1 95 2 1 0 -"25223" 4 160 5 1 1 0 3 0 100 1 100 195 1354 7 5 1 1070 1 95 95 1 1 -"25224" 4 160 5 1 1 0 3 0 100 1 195 10 1425 2 1 0 1995 1 95 185 1 0 -"25225" 4 160 5 1 1 0 3 0 100 1 10 20 3225 8 6 1 1061 1 95 10 1 1 -"25226" 4 160 5 1 1 0 3 0 100 1 20 39 7080 4 7 0 1212 1 95 19 0 1 -"25227" 4 160 5 1 1 0 3 0 100 1 39 76 3951 3 10 0 353 1 95 37 0 1 -"25228" 4 160 5 1 1 0 3 0 100 1 76 4 933 6 8 1 254 1 95 72 0 0 -"25229" 4 160 5 1 1 0 3 0 100 1 4 1 1183 9 2 0 1668 2 75 3 1 0 -"25230" 4 160 5 1 1 0 4 1 100 971 100 195 1297 2 3 0 2169 1 95 95 0 1 -"25231" 4 160 5 1 1 0 4 1 100 971 195 341 2031 7 4 1 3108 2 75 146 1 1 -"25232" 4 160 5 1 1 0 4 1 100 971 341 85 2773 3 1 0 623 2 75 256 1 0 -"25233" 4 160 5 1 1 0 4 1 100 971 85 166 3142 1 9 0 982 1 95 81 0 1 -"25234" 4 160 5 1 1 0 4 1 100 971 166 324 1045 8 7 1 449 1 95 158 1 1 -"25235" 4 160 5 1 1 0 4 1 100 971 324 632 1153 9 2 1 311 1 95 308 1 1 -"25236" 4 160 5 1 1 0 4 1 100 971 632 664 2590 5 6 0 1137 5 5 32 0 1 -"25237" 4 160 5 1 1 0 4 1 100 971 664 498 21480 6 8 1 1968 4 25 166 0 0 -"25238" 4 160 5 1 1 0 4 1 100 971 498 971 3719 4 7 0 1308 1 95 473 0 1 -"25239" 4 161 2 0 1 1 1 1 100 4 100 175 6144 8 3 1 2826 4 75 75 1 1 -"25240" 4 161 2 0 1 1 1 1 100 4 175 87 7200 3 7 1 3559 3 50 88 0 0 -"25241" 4 161 2 0 1 1 1 1 100 4 87 83 20490 8 2 0 4073 1 5 4 1 0 -"25242" 4 161 2 0 1 1 1 1 100 4 83 4 2156 2 1 0 2828 5 95 79 1 0 -"25243" 4 161 3 1 1 1 1 1 100 117 100 95 6346 9 7 0 1226 1 5 5 1 0 -"25244" 4 161 3 1 1 1 1 1 100 117 95 119 1420 4 8 0 950 2 25 24 0 1 -"25245" 4 161 3 1 1 1 1 1 100 117 119 113 2758 8 2 0 1049 1 5 6 1 0 -"25246" 4 161 3 1 1 1 1 1 100 117 113 107 852 2 1 0 824 1 5 6 1 0 -"25247" 4 161 3 1 1 1 1 1 100 117 107 80 1382 7 6 0 1186 2 25 27 1 0 -"25248" 4 161 3 1 1 1 1 1 100 117 80 60 1183 5 3 0 1885 2 25 20 1 0 -"25249" 4 161 3 1 1 1 1 1 100 117 60 90 776 3 6 0 2385 3 50 30 0 1 -"25250" 4 161 3 1 1 1 1 1 100 117 90 67 2205 6 9 1 194 2 25 23 0 0 -"25251" 4 161 3 1 1 1 1 1 100 117 67 117 1694 1 4 0 891 4 75 50 0 1 -"25252" 4 161 3 1 1 1 2 1 100 255 100 95 4471 2 9 1 1256 1 5 5 0 0 -"25253" 4 161 3 1 1 1 2 1 100 255 95 100 761 4 10 0 931 1 5 5 0 1 -"25254" 4 161 3 1 1 1 2 1 100 255 100 105 900 3 1 1 358 1 5 5 1 1 -"25255" 4 161 3 1 1 1 2 1 100 255 105 205 546 8 6 1 637 5 95 100 1 1 -"25256" 4 161 3 1 1 1 2 1 100 255 205 215 1708 5 7 0 808 1 5 10 0 1 -"25257" 4 161 3 1 1 1 2 1 100 255 215 226 1231 6 8 0 338 1 5 11 0 1 -"25258" 4 161 3 1 1 1 2 1 100 255 226 215 847 7 5 0 404 1 5 11 1 0 -"25259" 4 161 3 1 1 1 2 1 100 255 215 204 2366 1 10 1 291 1 5 11 0 0 -"25260" 4 161 3 1 1 1 2 1 100 255 204 255 586 9 3 1 284 2 25 51 1 1 -"25261" 4 161 3 1 1 1 3 1 100 256 100 95 2249 3 5 1 580 1 5 5 0 0 -"25262" 4 161 3 1 1 1 3 1 100 256 95 119 1269 8 9 0 206 2 25 24 0 1 -"25263" 4 161 3 1 1 1 3 1 100 256 119 232 1418 2 4 0 1741 5 95 113 0 1 -"25264" 4 161 3 1 1 1 3 1 100 256 232 244 3487 6 3 1 799 1 5 12 1 1 -"25265" 4 161 3 1 1 1 3 1 100 256 244 256 470 7 6 1 405 1 5 12 1 1 -"25266" 4 161 3 1 1 1 3 1 100 256 256 269 327 4 2 1 437 1 5 13 1 1 -"25267" 4 161 3 1 1 1 3 1 100 256 269 256 261 1 8 1 350 1 5 13 0 0 -"25268" 4 161 3 1 1 1 3 1 100 256 256 269 698 5 7 0 935 1 5 13 0 1 -"25269" 4 161 3 1 1 1 3 1 100 256 269 256 719 9 5 0 291 1 5 13 1 0 -"25270" 4 161 3 1 1 1 4 1 100 397 100 95 16937 8 7 0 2105 1 5 5 1 0 -"25271" 4 161 3 1 1 1 4 1 100 397 95 90 818 3 10 1 465 1 5 5 0 0 -"25272" 4 161 3 1 1 1 4 1 100 397 90 95 440 7 9 0 409 1 5 5 0 1 -"25273" 4 161 3 1 1 1 4 1 100 397 95 90 858 9 1 0 527 1 5 5 1 0 -"25274" 4 161 3 1 1 1 4 1 100 397 90 176 1715 2 3 0 2456 5 95 86 0 1 -"25275" 4 161 3 1 1 1 4 1 100 397 176 343 1336 1 8 0 215 5 95 167 0 1 -"25276" 4 161 3 1 1 1 4 1 100 397 343 360 1674 5 4 1 656 1 5 17 1 1 -"25277" 4 161 3 1 1 1 4 1 100 397 360 378 1085 4 2 1 255 1 5 18 1 1 -"25278" 4 161 3 1 1 1 4 1 100 397 378 397 264 6 3 1 577 1 5 19 1 1 -"25279" 4 161 4 0 1 0 1 1 100 27 100 150 13988 2 7 0 600 3 50 50 0 1 -"25280" 4 161 4 0 1 0 1 1 100 27 150 7 3353 3 7 1 2455 1 95 143 0 0 -"25281" 4 161 4 0 1 0 1 1 100 27 7 14 1055 2 8 0 546 1 95 7 0 1 -"25282" 4 161 4 0 1 0 1 1 100 27 14 27 622 8 9 0 3062 1 95 13 0 1 -"25283" 4 161 5 1 1 0 1 0 100 0 100 195 1430 1 3 0 488 1 95 95 0 1 -"25284" 4 161 5 1 1 0 1 0 100 0 195 380 790 6 5 1 406 1 95 185 1 1 -"25285" 4 161 5 1 1 0 1 0 100 0 380 741 1423 2 8 0 329 1 95 361 0 1 -"25286" 4 161 5 1 1 0 1 0 100 0 741 37 783 8 9 1 447 1 95 704 0 0 -"25287" 4 161 5 1 1 0 1 0 100 0 37 2 4031 3 4 1 421 1 95 35 0 0 -"25288" 4 161 5 1 1 0 1 0 100 0 2 4 629 5 7 0 349 1 95 2 0 1 -"25289" 4 161 5 1 1 0 1 0 100 0 4 0 330 7 4 0 303 1 95 4 1 0 -"25290" 4 161 5 1 1 0 2 1 100 1100 100 195 2139 8 1 1 291 1 95 95 1 1 -"25291" 4 161 5 1 1 0 2 1 100 1100 195 10 898 6 2 0 373 1 95 185 1 0 -"25292" 4 161 5 1 1 0 2 1 100 1100 10 20 96 7 9 0 521 1 95 10 0 1 -"25293" 4 161 5 1 1 0 2 1 100 1100 20 39 413 2 10 0 311 1 95 19 0 1 -"25294" 4 161 5 1 1 0 2 1 100 1100 39 76 717 5 3 1 304 1 95 37 1 1 -"25295" 4 161 5 1 1 0 2 1 100 1100 76 148 1671 4 2 1 364 1 95 72 1 1 -"25296" 4 161 5 1 1 0 2 1 100 1100 148 289 712 3 5 0 292 1 95 141 0 1 -"25297" 4 161 5 1 1 0 2 1 100 1100 289 564 1811 9 4 1 754 1 95 275 1 1 -"25298" 4 161 5 1 1 0 2 1 100 1100 564 1100 859 1 7 0 648 1 95 536 0 1 -"25299" 4 161 5 1 1 0 3 0 100 0 100 50 2089 7 5 0 324 3 50 50 1 0 -"25300" 4 161 5 1 1 0 3 0 100 0 50 2 722 2 1 0 2349 1 95 48 1 0 -"25301" 4 161 5 1 1 0 3 0 100 0 2 0 499 8 6 0 264 1 95 2 1 0 -"25302" 4 161 5 1 1 0 4 0 100 0 100 5 2841 2 3 1 543 1 95 95 0 0 -"25303" 4 161 5 1 1 0 4 0 100 0 5 0 466 7 4 0 434 1 95 5 1 0 -"25304" 4 166 2 0 1 1 1 1 100 176 100 150 7578 8 3 1 784 3 50 50 1 1 -"25305" 4 166 2 0 1 1 1 1 100 176 150 112 7583 3 7 1 1236 2 25 38 0 0 -"25306" 4 166 2 0 1 1 1 1 100 176 112 168 3575 8 2 1 650 3 50 56 1 1 -"25307" 4 166 2 0 1 1 1 1 100 176 168 176 3585 2 1 1 1300 1 5 8 1 1 -"25308" 4 166 3 1 1 1 1 0 100 0 100 5 4077 9 7 0 0 5 95 95 1 0 -"25309" 4 166 3 1 1 1 1 0 100 0 5 4 3416 4 8 1 1892 2 25 1 0 0 -"25310" 4 166 3 1 1 1 1 0 100 0 4 3 2511 8 2 0 1605 2 25 1 1 0 -"25311" 4 166 3 1 1 1 1 0 100 0 3 0 1678 2 1 0 2156 5 95 3 1 0 -"25312" 4 166 3 1 1 1 2 1 100 440 100 125 3395 2 9 0 1279 2 25 25 0 1 -"25313" 4 166 3 1 1 1 2 1 100 440 125 131 2839 4 10 0 942 1 5 6 0 1 -"25314" 4 166 3 1 1 1 2 1 100 440 131 164 3070 3 1 1 1521 2 25 33 1 1 -"25315" 4 166 3 1 1 1 2 1 100 440 164 246 5030 8 6 1 452 3 50 82 1 1 -"25316" 4 166 3 1 1 1 2 1 100 440 246 234 2372 5 7 1 857 1 5 12 0 0 -"25317" 4 166 3 1 1 1 2 1 100 440 234 246 2349 6 8 0 355 1 5 12 0 1 -"25318" 4 166 3 1 1 1 2 1 100 440 246 308 3303 7 5 1 287 2 25 62 1 1 -"25319" 4 166 3 1 1 1 2 1 100 440 308 293 1614 1 10 1 567 1 5 15 0 0 -"25320" 4 166 3 1 1 1 2 1 100 440 293 440 1842 9 3 1 1024 3 50 147 1 1 -"25321" 4 166 3 1 1 1 3 1 100 26 100 95 2617 3 5 1 386 1 5 5 0 0 -"25322" 4 166 3 1 1 1 3 1 100 26 95 71 3988 8 9 1 420 2 25 24 0 0 -"25323" 4 166 3 1 1 1 3 1 100 26 71 67 1949 2 4 1 1630 1 5 4 0 0 -"25324" 4 166 3 1 1 1 3 1 100 26 67 64 2213 6 3 0 938 1 5 3 1 0 -"25325" 4 166 3 1 1 1 3 1 100 26 64 61 1147 7 6 0 362 1 5 3 1 0 -"25326" 4 166 3 1 1 1 3 1 100 26 61 15 918 4 2 0 680 4 75 46 1 0 -"25327" 4 166 3 1 1 1 3 1 100 26 15 23 1949 1 8 0 214 3 50 8 0 1 -"25328" 4 166 3 1 1 1 3 1 100 26 23 17 2161 5 7 1 248 2 25 6 0 0 -"25329" 4 166 3 1 1 1 3 1 100 26 17 26 1722 9 5 1 293 3 50 9 1 1 -"25330" 4 166 3 1 1 1 4 1 100 504 100 175 2828 8 7 1 596 4 75 75 1 1 -"25331" 4 166 3 1 1 1 4 1 100 504 175 184 3021 3 10 0 834 1 5 9 0 1 -"25332" 4 166 3 1 1 1 4 1 100 504 184 193 2480 7 9 0 351 1 5 9 0 1 -"25333" 4 166 3 1 1 1 4 1 100 504 193 338 1660 9 1 1 698 4 75 145 1 1 -"25334" 4 166 3 1 1 1 4 1 100 504 338 321 1895 2 3 1 418 1 5 17 0 0 -"25335" 4 166 3 1 1 1 4 1 100 504 321 337 2595 1 8 0 1884 1 5 16 0 1 -"25336" 4 166 3 1 1 1 4 1 100 504 337 354 3384 5 4 1 333 1 5 17 1 1 -"25337" 4 166 3 1 1 1 4 1 100 504 354 336 3786 4 2 0 356 1 5 18 1 0 -"25338" 4 166 3 1 1 1 4 1 100 504 336 504 3296 6 3 1 378 3 50 168 1 1 -"25339" 4 166 4 0 1 0 1 1 100 14 100 195 7122 2 7 0 4021 1 95 95 0 1 -"25340" 4 166 4 0 1 0 1 1 100 14 195 146 7202 3 7 1 978 4 25 49 0 0 -"25341" 4 166 4 0 1 0 1 1 100 14 146 285 1632 2 8 0 352 1 95 139 0 1 -"25342" 4 166 4 0 1 0 1 1 100 14 285 14 1356 8 9 1 329 1 95 271 0 0 -"25343" 4 166 5 1 1 0 1 0 100 1 100 195 2769 1 3 0 515 1 95 95 0 1 -"25344" 4 166 5 1 1 0 1 0 100 1 195 205 2936 6 5 1 1666 5 5 10 1 1 -"25345" 4 166 5 1 1 0 1 0 100 1 205 400 1632 2 8 0 384 1 95 195 0 1 -"25346" 4 166 5 1 1 0 1 0 100 1 400 20 2707 8 9 1 376 1 95 380 0 0 -"25347" 4 166 5 1 1 0 1 0 100 1 20 1 5260 3 4 1 375 1 95 19 0 0 -"25348" 4 166 5 1 1 0 2 1 100 4114 100 195 2069 8 1 1 288 1 95 95 1 1 -"25349" 4 166 5 1 1 0 2 1 100 4114 195 380 1399 6 2 1 259 1 95 185 1 1 -"25350" 4 166 5 1 1 0 2 1 100 4114 380 285 2084 7 9 1 1311 4 25 95 0 0 -"25351" 4 166 5 1 1 0 2 1 100 4114 285 556 2413 2 10 0 440 1 95 271 0 1 -"25352" 4 166 5 1 1 0 2 1 100 4114 556 1084 2392 5 3 1 348 1 95 528 1 1 -"25353" 4 166 5 1 1 0 2 1 100 4114 1084 1030 3913 4 2 0 853 5 5 54 1 0 -"25354" 4 166 5 1 1 0 2 1 100 4114 1030 2009 1738 3 5 0 293 1 95 979 0 1 -"25355" 4 166 5 1 1 0 2 1 100 4114 2009 3918 5444 9 4 1 495 1 95 1909 1 1 -"25356" 4 166 5 1 1 0 2 1 100 4114 3918 4114 5589 1 7 0 1569 5 5 196 0 1 -"25357" 4 166 5 1 1 0 3 0 100 1 100 195 4376 7 5 1 677 1 95 95 1 1 -"25358" 4 166 5 1 1 0 3 0 100 1 195 10 6034 2 1 0 505 1 95 185 1 0 -"25359" 4 166 5 1 1 0 3 0 100 1 10 20 2837 8 6 1 372 1 95 10 1 1 -"25360" 4 166 5 1 1 0 3 0 100 1 20 1 1679 4 7 1 205 1 95 19 0 0 -"25361" 4 166 5 1 1 0 4 1 100 515 100 195 2032 2 3 0 236 1 95 95 0 1 -"25362" 4 166 5 1 1 0 4 1 100 515 195 380 2462 7 4 1 238 1 95 185 1 1 -"25363" 4 166 5 1 1 0 4 1 100 515 380 570 3200 3 1 1 965 3 50 190 1 1 -"25364" 4 166 5 1 1 0 4 1 100 515 570 1112 1932 1 9 0 310 1 95 542 0 1 -"25365" 4 166 5 1 1 0 4 1 100 515 1112 2168 1493 8 7 1 779 1 95 1056 1 1 -"25366" 4 166 5 1 1 0 4 1 100 515 2168 4228 4075 9 2 1 751 1 95 2060 1 1 -"25367" 4 166 5 1 1 0 4 1 100 515 4228 5285 3200 5 6 0 2174 4 25 1057 0 1 -"25368" 4 166 5 1 1 0 4 1 100 515 5285 264 2299 6 8 1 396 1 95 5021 0 0 -"25369" 4 166 5 1 1 0 4 1 100 515 264 515 3558 4 7 0 298 1 95 251 0 1 -"25370" 4 168 2 0 1 1 1 1 100 42 100 150 5594 8 3 1 1318 3 50 50 1 1 -"25371" 4 168 2 0 1 1 1 1 100 42 150 225 6653 3 7 0 830 3 50 75 0 1 -"25372" 4 168 2 0 1 1 1 1 100 42 225 169 2796 8 2 0 693 2 25 56 1 0 -"25373" 4 168 2 0 1 1 1 1 100 42 169 42 3097 2 1 0 980 4 75 127 1 0 -"25374" 4 168 3 1 1 1 1 1 100 211 100 50 9966 9 7 0 3354 3 50 50 1 0 -"25375" 4 168 3 1 1 1 1 1 100 211 50 98 4392 4 8 0 997 5 95 48 0 1 -"25376" 4 168 3 1 1 1 1 1 100 211 98 191 1883 8 2 1 1311 5 95 93 1 1 -"25377" 4 168 3 1 1 1 1 1 100 211 191 95 3630 2 1 0 1055 3 50 96 1 0 -"25378" 4 168 3 1 1 1 1 1 100 211 95 143 2186 7 6 1 1928 3 50 48 1 1 -"25379" 4 168 3 1 1 1 1 1 100 211 143 215 1306 5 3 1 1418 3 50 72 1 1 -"25380" 4 168 3 1 1 1 1 1 100 211 215 161 2667 3 6 1 1426 2 25 54 0 0 -"25381" 4 168 3 1 1 1 1 1 100 211 161 282 1363 6 9 0 544 4 75 121 0 1 -"25382" 4 168 3 1 1 1 1 1 100 211 282 211 2708 1 4 1 1081 2 25 71 0 0 -"25383" 4 168 3 1 1 1 2 1 100 8 100 75 3748 2 9 1 1840 2 25 25 0 0 -"25384" 4 168 3 1 1 1 2 1 100 8 75 113 1725 4 10 0 905 3 50 38 0 1 -"25385" 4 168 3 1 1 1 2 1 100 8 113 85 1132 3 1 0 1692 2 25 28 1 0 -"25386" 4 168 3 1 1 1 2 1 100 8 85 21 1137 8 6 0 1585 4 75 64 1 0 -"25387" 4 168 3 1 1 1 2 1 100 8 21 37 3162 5 7 0 1418 4 75 16 0 1 -"25388" 4 168 3 1 1 1 2 1 100 8 37 18 1956 6 8 1 1253 3 50 19 0 0 -"25389" 4 168 3 1 1 1 2 1 100 8 18 35 1127 7 5 1 735 5 95 17 1 1 -"25390" 4 168 3 1 1 1 2 1 100 8 35 17 1242 1 10 1 1229 3 50 18 0 0 -"25391" 4 168 3 1 1 1 2 1 100 8 17 8 1095 9 3 0 1465 3 50 9 1 0 -"25392" 4 168 3 1 1 1 3 1 100 215 100 150 3091 3 5 0 710 3 50 50 0 1 -"25393" 4 168 3 1 1 1 3 1 100 215 150 112 1721 8 9 1 525 2 25 38 0 0 -"25394" 4 168 3 1 1 1 3 1 100 215 112 56 1209 2 4 1 1245 3 50 56 0 0 -"25395" 4 168 3 1 1 1 3 1 100 215 56 84 1415 6 3 1 522 3 50 28 1 1 -"25396" 4 168 3 1 1 1 3 1 100 215 84 105 1462 7 6 1 1102 2 25 21 1 1 -"25397" 4 168 3 1 1 1 3 1 100 215 105 131 2007 4 2 1 989 2 25 26 1 1 -"25398" 4 168 3 1 1 1 3 1 100 215 131 164 2267 1 8 0 1213 2 25 33 0 1 -"25399" 4 168 3 1 1 1 3 1 100 215 164 205 2744 5 7 0 852 2 25 41 0 1 -"25400" 4 168 3 1 1 1 3 1 100 215 205 215 3249 9 5 1 1748 1 5 10 1 1 -"25401" 4 168 3 1 1 1 4 1 100 81 100 125 2212 8 7 1 1033 2 25 25 1 1 -"25402" 4 168 3 1 1 1 4 1 100 81 125 94 1477 3 10 1 1020 2 25 31 0 0 -"25403" 4 168 3 1 1 1 4 1 100 81 94 118 825 7 9 0 872 2 25 24 0 1 -"25404" 4 168 3 1 1 1 4 1 100 81 118 112 1020 9 1 0 825 1 5 6 1 0 -"25405" 4 168 3 1 1 1 4 1 100 81 112 196 686 2 3 0 564 4 75 84 0 1 -"25406" 4 168 3 1 1 1 4 1 100 81 196 206 974 1 8 0 1718 1 5 10 0 1 -"25407" 4 168 3 1 1 1 4 1 100 81 206 309 1313 5 4 1 1688 3 50 103 1 1 -"25408" 4 168 3 1 1 1 4 1 100 81 309 324 2013 4 2 1 1283 1 5 15 1 1 -"25409" 4 168 3 1 1 1 4 1 100 81 324 81 1083 6 3 0 621 4 75 243 1 0 -"25410" 4 168 4 0 1 0 1 1 100 143 100 150 6658 2 7 0 531 3 50 50 0 1 -"25411" 4 168 4 0 1 0 1 1 100 143 150 293 1887 3 7 0 1187 1 95 143 0 1 -"25412" 4 168 4 0 1 0 1 1 100 143 293 571 1216 2 8 0 1044 1 95 278 0 1 -"25413" 4 168 4 0 1 0 1 1 100 143 571 143 1327 8 9 1 1150 2 75 428 0 0 -"25414" 4 168 5 1 1 0 1 0 100 0 100 195 1225 1 3 0 1372 1 95 95 0 1 -"25415" 4 168 5 1 1 0 1 0 100 0 195 10 848 6 5 0 1421 1 95 185 1 0 -"25416" 4 168 5 1 1 0 1 0 100 0 10 0 919 2 8 1 720 1 95 10 0 0 -"25417" 4 168 5 1 1 0 2 0 100 0 100 5 3210 8 1 0 1575 1 95 95 1 0 -"25418" 4 168 5 1 1 0 2 0 100 0 5 10 1650 6 2 1 889 1 95 5 1 1 -"25419" 4 168 5 1 1 0 2 0 100 0 10 0 1195 7 9 1 2584 1 95 10 0 0 -"25420" 4 168 5 1 1 0 3 0 100 0 100 5 2241 7 5 0 2293 1 95 95 1 0 -"25421" 4 168 5 1 1 0 3 0 100 0 5 0 1453 2 1 0 605 1 95 5 1 0 -"25422" 4 168 5 1 1 0 4 1 100 16 100 25 1303 2 3 1 1231 2 75 75 0 0 -"25423" 4 168 5 1 1 0 4 1 100 16 25 6 1409 7 4 0 884 2 75 19 1 0 -"25424" 4 168 5 1 1 0 4 1 100 16 6 12 2095 3 1 1 1297 1 95 6 1 1 -"25425" 4 168 5 1 1 0 4 1 100 16 12 21 1936 1 9 0 818 2 75 9 0 1 -"25426" 4 168 5 1 1 0 4 1 100 16 21 41 1489 8 7 1 1599 1 95 20 1 1 -"25427" 4 168 5 1 1 0 4 1 100 16 41 80 1648 9 2 1 1352 1 95 39 1 1 -"25428" 4 168 5 1 1 0 4 1 100 16 80 4 932 5 6 1 1253 1 95 76 0 0 -"25429" 4 168 5 1 1 0 4 1 100 16 4 8 1023 6 8 0 1020 1 95 4 0 1 -"25430" 4 168 5 1 1 0 4 1 100 16 8 16 761 4 7 0 778 1 95 8 0 1 -"25431" 4 170 2 0 1 1 1 1 100 117 100 150 3907 8 3 1 791 3 50 50 1 1 -"25432" 4 170 2 0 1 1 1 1 100 117 150 188 17070 3 7 0 4175 2 25 38 0 1 -"25433" 4 170 2 0 1 1 1 1 100 117 188 235 2174 8 2 1 149 2 25 47 1 1 -"25434" 4 170 2 0 1 1 1 1 100 117 235 117 2587 2 1 0 566 3 50 118 1 0 -"25435" 4 170 3 1 1 1 1 1 100 99 100 150 9071 9 7 1 1741 3 50 50 1 1 -"25436" 4 170 3 1 1 1 1 1 100 99 150 225 1912 4 8 0 923 3 50 75 0 1 -"25437" 4 170 3 1 1 1 1 1 100 99 225 338 3291 8 2 1 1109 3 50 113 1 1 -"25438" 4 170 3 1 1 1 1 1 100 99 338 169 1997 2 1 0 1084 3 50 169 1 0 -"25439" 4 170 3 1 1 1 1 1 100 99 169 211 1381 7 6 1 1858 2 25 42 1 1 -"25440" 4 170 3 1 1 1 1 1 100 99 211 105 1918 5 3 0 553 3 50 106 1 0 -"25441" 4 170 3 1 1 1 1 1 100 99 105 158 1060 3 6 0 919 3 50 53 0 1 -"25442" 4 170 3 1 1 1 1 1 100 99 158 79 1753 6 9 1 419 3 50 79 0 0 -"25443" 4 170 3 1 1 1 1 1 100 99 79 99 1212 1 4 0 2952 2 25 20 0 1 -"25444" 4 170 3 1 1 1 2 1 100 124 100 150 2255 2 9 0 482 3 50 50 0 1 -"25445" 4 170 3 1 1 1 2 1 100 124 150 225 1560 4 10 0 516 3 50 75 0 1 -"25446" 4 170 3 1 1 1 2 1 100 124 225 112 1729 3 1 0 524 3 50 113 1 0 -"25447" 4 170 3 1 1 1 2 1 100 124 112 168 1728 8 6 1 548 3 50 56 1 1 -"25448" 4 170 3 1 1 1 2 1 100 124 168 126 1103 5 7 1 1128 2 25 42 0 0 -"25449" 4 170 3 1 1 1 2 1 100 124 126 63 1712 6 8 1 556 3 50 63 0 0 -"25450" 4 170 3 1 1 1 2 1 100 124 63 79 1463 7 5 1 847 2 25 16 1 1 -"25451" 4 170 3 1 1 1 2 1 100 124 79 99 1108 1 10 0 725 2 25 20 0 1 -"25452" 4 170 3 1 1 1 2 1 100 124 99 124 2015 9 3 1 621 2 25 25 1 1 -"25453" 4 170 3 1 1 1 3 1 100 163 100 125 1392 3 5 0 1292 2 25 25 0 1 -"25454" 4 170 3 1 1 1 3 1 100 163 125 94 1579 8 9 1 1574 2 25 31 0 0 -"25455" 4 170 3 1 1 1 3 1 100 163 94 118 1400 2 4 0 924 2 25 24 0 1 -"25456" 4 170 3 1 1 1 3 1 100 163 118 148 1405 6 3 1 1127 2 25 30 1 1 -"25457" 4 170 3 1 1 1 3 1 100 163 148 185 1287 7 6 1 935 2 25 37 1 1 -"25458" 4 170 3 1 1 1 3 1 100 163 185 139 1744 4 2 0 682 2 25 46 1 0 -"25459" 4 170 3 1 1 1 3 1 100 163 139 174 1415 1 8 0 514 2 25 35 0 1 -"25460" 4 170 3 1 1 1 3 1 100 163 174 130 1519 5 7 1 741 2 25 44 0 0 -"25461" 4 170 3 1 1 1 3 1 100 163 130 163 1678 9 5 1 372 2 25 33 1 1 -"25462" 4 170 3 1 1 1 4 1 100 142 100 105 1742 8 7 1 753 1 5 5 1 1 -"25463" 4 170 3 1 1 1 4 1 100 142 105 131 1202 3 10 0 678 2 25 26 0 1 -"25464" 4 170 3 1 1 1 4 1 100 142 131 98 883 7 9 1 559 2 25 33 0 0 -"25465" 4 170 3 1 1 1 4 1 100 142 98 103 2017 9 1 1 535 1 5 5 1 1 -"25466" 4 170 3 1 1 1 4 1 100 142 103 129 1723 2 3 0 383 2 25 26 0 1 -"25467" 4 170 3 1 1 1 4 1 100 142 129 135 1114 1 8 0 939 1 5 6 0 1 -"25468" 4 170 3 1 1 1 4 1 100 142 135 142 1572 5 4 1 1110 1 5 7 1 1 -"25469" 4 170 3 1 1 1 4 1 100 142 142 135 1886 4 2 0 658 1 5 7 1 0 -"25470" 4 170 3 1 1 1 4 1 100 142 135 142 1720 6 3 1 1020 1 5 7 1 1 -"25471" 4 170 4 0 1 0 1 1 100 29 100 150 5209 2 7 0 711 3 50 50 0 1 -"25472" 4 170 4 0 1 0 1 1 100 29 150 293 3667 3 7 0 3314 1 95 143 0 1 -"25473" 4 170 4 0 1 0 1 1 100 29 293 571 1329 2 8 0 1392 1 95 278 0 1 -"25474" 4 170 4 0 1 0 1 1 100 29 571 29 1443 8 9 1 876 1 95 542 0 0 -"25475" 4 170 5 1 1 0 1 0 100 0 100 195 1639 1 3 0 895 1 95 95 0 1 -"25476" 4 170 5 1 1 0 1 0 100 0 195 380 2369 6 5 1 446 1 95 185 1 1 -"25477" 4 170 5 1 1 0 1 0 100 0 380 741 1310 2 8 0 293 1 95 361 0 1 -"25478" 4 170 5 1 1 0 1 0 100 0 741 37 1109 8 9 1 350 1 95 704 0 0 -"25479" 4 170 5 1 1 0 1 0 100 0 37 72 1411 3 4 0 501 1 95 35 0 1 -"25480" 4 170 5 1 1 0 1 0 100 0 72 4 1819 5 7 1 282 1 95 68 0 0 -"25481" 4 170 5 1 1 0 1 0 100 0 4 8 1266 7 4 1 1188 1 95 4 1 1 -"25482" 4 170 5 1 1 0 1 0 100 0 8 0 1241 4 1 0 483 1 95 8 1 0 -"25483" 4 170 5 1 1 0 2 0 100 0 100 195 7615 8 1 1 1082 1 95 95 1 1 -"25484" 4 170 5 1 1 0 2 0 100 0 195 380 1196 6 2 1 625 1 95 185 1 1 -"25485" 4 170 5 1 1 0 2 0 100 0 380 19 936 7 9 1 4329 1 95 361 0 0 -"25486" 4 170 5 1 1 0 2 0 100 0 19 37 1160 2 10 0 463 1 95 18 0 1 -"25487" 4 170 5 1 1 0 2 0 100 0 37 2 1182 5 3 0 1564 1 95 35 1 0 -"25488" 4 170 5 1 1 0 2 0 100 0 2 0 1454 4 2 0 734 1 95 2 1 0 -"25489" 4 170 5 1 1 0 3 1 100 31 100 195 1149 7 5 1 476 1 95 95 1 1 -"25490" 4 170 5 1 1 0 3 1 100 31 195 10 1403 2 1 0 541 1 95 185 1 0 -"25491" 4 170 5 1 1 0 3 1 100 31 10 20 1008 8 6 1 526 1 95 10 1 1 -"25492" 4 170 5 1 1 0 3 1 100 31 20 39 682 4 7 0 495 1 95 19 0 1 -"25493" 4 170 5 1 1 0 3 1 100 31 39 76 1125 3 10 0 381 1 95 37 0 1 -"25494" 4 170 5 1 1 0 3 1 100 31 76 4 912 6 8 1 411 1 95 72 0 0 -"25495" 4 170 5 1 1 0 3 1 100 31 4 8 980 9 2 1 420 1 95 4 1 1 -"25496" 4 170 5 1 1 0 3 1 100 31 8 16 1093 5 3 1 454 1 95 8 1 1 -"25497" 4 170 5 1 1 0 3 1 100 31 16 31 1237 1 10 0 504 1 95 15 0 1 -"25498" 4 170 5 1 1 0 4 1 100 119 100 195 2614 2 3 0 1359 1 95 95 0 1 -"25499" 4 170 5 1 1 0 4 1 100 119 195 380 884 7 4 1 383 1 95 185 1 1 -"25500" 4 170 5 1 1 0 4 1 100 119 380 19 818 3 1 0 465 1 95 361 1 0 -"25501" 4 170 5 1 1 0 4 1 100 119 19 37 840 1 9 0 326 1 95 18 0 1 -"25502" 4 170 5 1 1 0 4 1 100 119 37 72 899 8 7 1 342 1 95 35 1 1 -"25503" 4 170 5 1 1 0 4 1 100 119 72 140 1283 9 2 1 356 1 95 68 1 1 -"25504" 4 170 5 1 1 0 4 1 100 119 140 245 2856 5 6 0 558 2 75 105 0 1 -"25505" 4 170 5 1 1 0 4 1 100 119 245 61 1264 6 8 1 530 2 75 184 0 0 -"25506" 4 170 5 1 1 0 4 1 100 119 61 119 1494 4 7 0 508 1 95 58 0 1 -"25507" 4 173 2 0 1 1 1 1 100 211 100 150 10245 8 3 1 3755 3 50 50 1 1 -"25508" 4 173 2 0 1 1 1 1 100 211 150 188 2950 3 7 0 1863 2 25 38 0 1 -"25509" 4 173 2 0 1 1 1 1 100 211 188 282 1932 8 2 1 1222 3 50 94 1 1 -"25510" 4 173 2 0 1 1 1 1 100 211 282 211 5353 2 1 0 2320 2 25 71 1 0 -"25511" 4 173 3 1 1 1 1 1 100 422 100 195 2059 9 7 1 1671 5 95 95 1 1 -"25512" 4 173 3 1 1 1 1 1 100 422 195 244 3540 4 8 0 1305 2 25 49 0 1 -"25513" 4 173 3 1 1 1 1 1 100 422 244 366 4649 8 2 1 1065 3 50 122 1 1 -"25514" 4 173 3 1 1 1 1 1 100 422 366 274 2116 2 1 0 868 2 25 92 1 0 -"25515" 4 173 3 1 1 1 1 1 100 422 274 343 2560 7 6 1 1500 2 25 69 1 1 -"25516" 4 173 3 1 1 1 1 1 100 422 343 257 2698 5 3 0 1607 2 25 86 1 0 -"25517" 4 173 3 1 1 1 1 1 100 422 257 321 2272 3 6 0 1490 2 25 64 0 1 -"25518" 4 173 3 1 1 1 1 1 100 422 321 241 2505 6 9 1 1983 2 25 80 0 0 -"25519" 4 173 3 1 1 1 1 1 100 422 241 422 2214 1 4 0 1482 4 75 181 0 1 -"25520" 4 173 3 1 1 1 2 1 100 755 100 175 4245 2 9 0 833 4 75 75 0 1 -"25521" 4 173 3 1 1 1 2 1 100 755 175 184 3823 4 10 0 878 1 5 9 0 1 -"25522" 4 173 3 1 1 1 2 1 100 755 184 138 2569 3 1 0 701 2 25 46 1 0 -"25523" 4 173 3 1 1 1 2 1 100 755 138 242 1913 8 6 1 1484 4 75 104 1 1 -"25524" 4 173 3 1 1 1 2 1 100 755 242 230 3314 5 7 1 2499 1 5 12 0 0 -"25525" 4 173 3 1 1 1 2 1 100 755 230 172 2953 6 8 1 1162 2 25 58 0 0 -"25526" 4 173 3 1 1 1 2 1 100 755 172 258 1712 7 5 1 1644 3 50 86 1 1 -"25527" 4 173 3 1 1 1 2 1 100 755 258 503 1988 1 10 0 1188 5 95 245 0 1 -"25528" 4 173 3 1 1 1 2 1 100 755 503 755 1808 9 3 1 1048 3 50 252 1 1 -"25529" 4 173 3 1 1 1 3 1 100 425 100 125 4057 3 5 0 574 2 25 25 0 1 -"25530" 4 173 3 1 1 1 3 1 100 425 125 31 2007 8 9 1 797 4 75 94 0 0 -"25531" 4 173 3 1 1 1 3 1 100 425 31 60 2317 2 4 0 0 5 95 29 0 1 -"25532" 4 173 3 1 1 1 3 1 100 425 60 105 2532 6 3 1 1154 4 75 45 1 1 -"25533" 4 173 3 1 1 1 3 1 100 425 105 158 2240 7 6 1 1516 3 50 53 1 1 -"25534" 4 173 3 1 1 1 3 1 100 425 158 118 2793 4 2 0 1154 2 25 40 1 0 -"25535" 4 173 3 1 1 1 3 1 100 425 118 230 1776 1 8 0 1954 5 95 112 0 1 -"25536" 4 173 3 1 1 1 3 1 100 425 230 218 2514 5 7 1 944 1 5 12 0 0 -"25537" 4 173 3 1 1 1 3 1 100 425 218 425 1584 9 5 1 1939 5 95 207 1 1 -"25538" 4 173 3 1 1 1 4 1 100 1373 100 175 2501 8 7 1 1380 4 75 75 1 1 -"25539" 4 173 3 1 1 1 4 1 100 1373 175 263 1911 3 10 0 826 3 50 88 0 1 -"25540" 4 173 3 1 1 1 4 1 100 1373 263 197 1759 7 9 1 2057 2 25 66 0 0 -"25541" 4 173 3 1 1 1 4 1 100 1373 197 384 1485 9 1 1 1941 5 95 187 1 1 -"25542" 4 173 3 1 1 1 4 1 100 1373 384 749 1838 2 3 0 1013 5 95 365 0 1 -"25543" 4 173 3 1 1 1 4 1 100 1373 749 1311 1706 1 8 0 1510 4 75 562 0 1 -"25544" 4 173 3 1 1 1 4 1 100 1373 1311 1377 2340 5 4 1 586 1 5 66 1 1 -"25545" 4 173 3 1 1 1 4 1 100 1373 1377 1308 3060 4 2 0 984 1 5 69 1 0 -"25546" 4 173 3 1 1 1 4 1 100 1373 1308 1373 1767 6 3 1 616 1 5 65 1 1 -"25547" 4 173 4 0 1 0 1 1 100 29 100 195 2620 2 7 0 1221 1 95 95 0 1 -"25548" 4 173 4 0 1 0 1 1 100 29 195 293 2096 3 7 0 1290 3 50 98 0 1 -"25549" 4 173 4 0 1 0 1 1 100 29 293 571 2786 2 8 0 787 1 95 278 0 1 -"25550" 4 173 4 0 1 0 1 1 100 29 571 29 1395 8 9 1 975 1 95 542 0 0 -"25551" 4 173 5 1 1 0 1 1 100 527 100 195 1700 1 3 0 633 1 95 95 0 1 -"25552" 4 173 5 1 1 0 1 1 100 527 195 293 1440 6 5 1 1049 3 50 98 1 1 -"25553" 4 173 5 1 1 0 1 1 100 527 293 571 1476 2 8 0 860 1 95 278 0 1 -"25554" 4 173 5 1 1 0 1 1 100 527 571 29 1456 8 9 1 616 1 95 542 0 0 -"25555" 4 173 5 1 1 0 1 1 100 527 29 57 1320 3 4 0 519 1 95 28 0 1 -"25556" 4 173 5 1 1 0 1 1 100 527 57 111 1963 5 7 0 500 1 95 54 0 1 -"25557" 4 173 5 1 1 0 1 1 100 527 111 216 1457 7 4 1 497 1 95 105 1 1 -"25558" 4 173 5 1 1 0 1 1 100 527 216 270 2951 4 1 1 1616 4 25 54 1 1 -"25559" 4 173 5 1 1 0 1 1 100 527 270 527 1545 9 6 1 477 1 95 257 1 1 -"25560" 4 173 5 1 1 0 2 0 100 1 100 195 1550 8 1 1 515 1 95 95 1 1 -"25561" 4 173 5 1 1 0 2 0 100 1 195 244 2167 6 2 1 1030 4 25 49 1 1 -"25562" 4 173 5 1 1 0 2 0 100 1 244 12 1451 7 9 1 475 1 95 232 0 0 -"25563" 4 173 5 1 1 0 2 0 100 1 12 23 2626 2 10 0 523 1 95 11 0 1 -"25564" 4 173 5 1 1 0 2 0 100 1 23 1 2063 5 3 0 530 1 95 22 1 0 -"25565" 4 173 5 1 1 0 3 0 100 0 100 195 1631 7 5 1 1327 1 95 95 1 1 -"25566" 4 173 5 1 1 0 3 0 100 0 195 10 1445 2 1 0 590 1 95 185 1 0 -"25567" 4 173 5 1 1 0 3 0 100 0 10 20 1303 8 6 1 446 1 95 10 1 1 -"25568" 4 173 5 1 1 0 3 0 100 0 20 39 1390 4 7 0 778 1 95 19 0 1 -"25569" 4 173 5 1 1 0 3 0 100 0 39 76 1490 3 10 0 856 1 95 37 0 1 -"25570" 4 173 5 1 1 0 3 0 100 0 76 4 1636 6 8 1 700 1 95 72 0 0 -"25571" 4 173 5 1 1 0 3 0 100 0 4 8 1816 9 2 1 783 1 95 4 1 1 -"25572" 4 173 5 1 1 0 3 0 100 0 8 0 1727 5 3 0 498 1 95 8 1 0 -"25573" 4 173 5 1 1 0 4 1 100 205 100 195 1925 2 3 0 631 1 95 95 0 1 -"25574" 4 173 5 1 1 0 4 1 100 205 195 380 1726 7 4 1 945 1 95 185 1 1 -"25575" 4 173 5 1 1 0 4 1 100 205 380 19 1649 3 1 0 1081 1 95 361 1 0 -"25576" 4 173 5 1 1 0 4 1 100 205 19 37 1818 1 9 0 618 1 95 18 0 1 -"25577" 4 173 5 1 1 0 4 1 100 205 37 72 2066 8 7 1 1053 1 95 35 1 1 -"25578" 4 173 5 1 1 0 4 1 100 205 72 140 2144 9 2 1 456 1 95 68 1 1 -"25579" 4 173 5 1 1 0 4 1 100 205 140 210 2047 5 6 0 2787 3 50 70 0 1 -"25580" 4 173 5 1 1 0 4 1 100 205 210 105 2228 6 8 1 1815 3 50 105 0 0 -"25581" 4 173 5 1 1 0 4 1 100 205 105 205 2038 4 7 0 1307 1 95 100 0 1 -"25582" 4 178 2 0 1 1 1 1 100 146 100 125 8390 8 3 1 1940 2 25 25 1 1 -"25583" 4 178 2 0 1 1 1 1 100 146 125 156 16957 3 7 0 1124 2 25 31 0 1 -"25584" 4 178 2 0 1 1 1 1 100 146 156 195 1332 8 2 1 1178 2 25 39 1 1 -"25585" 4 178 2 0 1 1 1 1 100 146 195 146 1415 2 1 0 1418 2 25 49 1 0 -"25586" 4 178 3 1 1 1 1 1 100 150 100 195 4083 9 7 1 1055 5 95 95 1 1 -"25587" 4 178 3 1 1 1 1 1 100 150 195 146 4403 4 8 1 1135 2 25 49 0 0 -"25588" 4 178 3 1 1 1 1 1 100 150 146 219 2302 8 2 1 815 3 50 73 1 1 -"25589" 4 178 3 1 1 1 1 1 100 150 219 109 1701 2 1 0 786 3 50 110 1 0 -"25590" 4 178 3 1 1 1 1 1 100 150 109 164 2626 7 6 1 636 3 50 55 1 1 -"25591" 4 178 3 1 1 1 1 1 100 150 164 123 4603 5 3 0 1726 2 25 41 1 0 -"25592" 4 178 3 1 1 1 1 1 100 150 123 154 1553 3 6 0 1732 2 25 31 0 1 -"25593" 4 178 3 1 1 1 1 1 100 150 154 77 2655 6 9 1 551 3 50 77 0 0 -"25594" 4 178 3 1 1 1 1 1 100 150 77 150 3258 1 4 0 0 5 95 73 0 1 -"25595" 4 178 3 1 1 1 2 1 100 402 100 150 2576 2 9 0 972 3 50 50 0 1 -"25596" 4 178 3 1 1 1 2 1 100 402 150 112 2475 4 10 1 803 2 25 38 0 0 -"25597" 4 178 3 1 1 1 2 1 100 402 112 56 2994 3 1 0 514 3 50 56 1 0 -"25598" 4 178 3 1 1 1 2 1 100 402 56 109 1679 8 6 1 765 5 95 53 1 1 -"25599" 4 178 3 1 1 1 2 1 100 402 109 136 5942 5 7 0 697 2 25 27 0 1 -"25600" 4 178 3 1 1 1 2 1 100 402 136 102 4525 6 8 1 756 2 25 34 0 0 -"25601" 4 178 3 1 1 1 2 1 100 402 102 153 2013 7 5 1 571 3 50 51 1 1 -"25602" 4 178 3 1 1 1 2 1 100 402 153 268 1588 1 10 0 623 4 75 115 0 1 -"25603" 4 178 3 1 1 1 2 1 100 402 268 402 1838 9 3 1 765 3 50 134 1 1 -"25604" 4 178 3 1 1 1 3 1 100 465 100 125 5228 3 5 0 665 2 25 25 0 1 -"25605" 4 178 3 1 1 1 3 1 100 465 125 94 2039 8 9 1 1436 2 25 31 0 0 -"25606" 4 178 3 1 1 1 3 1 100 465 94 141 1501 2 4 0 692 3 50 47 0 1 -"25607" 4 178 3 1 1 1 3 1 100 465 141 176 3576 6 3 1 755 2 25 35 1 1 -"25608" 4 178 3 1 1 1 3 1 100 465 176 220 2892 7 6 1 1064 2 25 44 1 1 -"25609" 4 178 3 1 1 1 3 1 100 465 220 165 2219 4 2 0 687 2 25 55 1 0 -"25610" 4 178 3 1 1 1 3 1 100 465 165 248 2035 1 8 0 1697 3 50 83 0 1 -"25611" 4 178 3 1 1 1 3 1 100 465 248 310 4757 5 7 0 408 2 25 62 0 1 -"25612" 4 178 3 1 1 1 3 1 100 465 310 465 1458 9 5 1 562 3 50 155 1 1 -"25613" 4 178 3 1 1 1 4 1 100 464 100 150 1517 8 7 1 659 3 50 50 1 1 -"25614" 4 178 3 1 1 1 4 1 100 464 150 188 1465 3 10 0 649 2 25 38 0 1 -"25615" 4 178 3 1 1 1 4 1 100 464 188 179 1351 7 9 1 908 1 5 9 0 0 -"25616" 4 178 3 1 1 1 4 1 100 464 179 313 1342 9 1 1 684 4 75 134 1 1 -"25617" 4 178 3 1 1 1 4 1 100 464 313 391 1761 2 3 0 1867 2 25 78 0 1 -"25618" 4 178 3 1 1 1 4 1 100 464 391 489 1895 1 8 0 1738 2 25 98 0 1 -"25619" 4 178 3 1 1 1 4 1 100 464 489 465 3157 5 4 0 716 1 5 24 1 0 -"25620" 4 178 3 1 1 1 4 1 100 464 465 442 1912 4 2 0 618 1 5 23 1 0 -"25621" 4 178 3 1 1 1 4 1 100 464 442 464 1146 6 3 1 706 1 5 22 1 1 -"25622" 4 178 4 0 1 0 1 1 100 176 100 150 5078 2 7 0 515 3 50 50 0 1 -"25623" 4 178 4 0 1 0 1 1 100 176 150 188 1360 3 7 0 725 4 25 38 0 1 -"25624" 4 178 4 0 1 0 1 1 100 176 188 235 1207 2 8 0 972 4 25 47 0 1 -"25625" 4 178 4 0 1 0 1 1 100 176 235 176 1147 8 9 1 643 4 25 59 0 0 -"25626" 4 178 5 1 1 0 1 1 100 447 100 195 2430 1 3 0 1992 1 95 95 0 1 -"25627" 4 178 5 1 1 0 1 1 100 447 195 205 1694 6 5 1 580 5 5 10 1 1 -"25628" 4 178 5 1 1 0 1 1 100 447 205 256 1141 2 8 0 661 4 25 51 0 1 -"25629" 4 178 5 1 1 0 1 1 100 447 256 64 1363 8 9 1 2728 2 75 192 0 0 -"25630" 4 178 5 1 1 0 1 1 100 447 64 125 1795 3 4 0 835 1 95 61 0 1 -"25631" 4 178 5 1 1 0 1 1 100 447 125 244 2209 5 7 0 773 1 95 119 0 1 -"25632" 4 178 5 1 1 0 1 1 100 447 244 305 1189 7 4 1 643 4 25 61 1 1 -"25633" 4 178 5 1 1 0 1 1 100 447 305 229 1281 4 1 0 1506 4 25 76 1 0 -"25634" 4 178 5 1 1 0 1 1 100 447 229 447 1184 9 6 1 557 1 95 218 1 1 -"25635" 4 178 5 1 1 0 2 1 100 681 100 125 1841 8 1 1 655 4 25 25 1 1 -"25636" 4 178 5 1 1 0 2 1 100 681 125 156 1773 6 2 1 758 4 25 31 1 1 -"25637" 4 178 5 1 1 0 2 1 100 681 156 78 911 7 9 1 2154 3 50 78 0 0 -"25638" 4 178 5 1 1 0 2 1 100 681 78 98 1665 2 10 0 328 4 25 20 0 1 -"25639" 4 178 5 1 1 0 2 1 100 681 98 123 2245 5 3 1 520 4 25 25 1 1 -"25640" 4 178 5 1 1 0 2 1 100 681 123 92 1324 4 2 0 1467 4 25 31 1 0 -"25641" 4 178 5 1 1 0 2 1 100 681 92 179 891 3 5 0 526 1 95 87 0 1 -"25642" 4 178 5 1 1 0 2 1 100 681 179 349 1282 9 4 1 483 1 95 170 1 1 -"25643" 4 178 5 1 1 0 2 1 100 681 349 681 945 1 7 0 498 1 95 332 0 1 -"25644" 4 178 5 1 1 0 3 1 100 952 100 125 2049 7 5 1 2197 4 25 25 1 1 -"25645" 4 178 5 1 1 0 3 1 100 952 125 94 1418 2 1 0 533 4 25 31 1 0 -"25646" 4 178 5 1 1 0 3 1 100 952 94 183 1329 8 6 1 472 1 95 89 1 1 -"25647" 4 178 5 1 1 0 3 1 100 952 183 357 1142 4 7 0 478 1 95 174 0 1 -"25648" 4 178 5 1 1 0 3 1 100 952 357 446 924 3 10 0 1374 4 25 89 0 1 -"25649" 4 178 5 1 1 0 3 1 100 952 446 334 1890 6 8 1 1638 4 25 112 0 0 -"25650" 4 178 5 1 1 0 3 1 100 952 334 651 1869 9 2 1 538 1 95 317 1 1 -"25651" 4 178 5 1 1 0 3 1 100 952 651 488 1534 5 3 0 4371 4 25 163 1 0 -"25652" 4 178 5 1 1 0 3 1 100 952 488 952 1440 1 10 0 833 1 95 464 0 1 -"25653" 4 178 5 1 1 0 4 1 100 4637 100 195 1240 2 3 0 835 1 95 95 0 1 -"25654" 4 178 5 1 1 0 4 1 100 4637 195 380 1289 7 4 1 737 1 95 185 1 1 -"25655" 4 178 5 1 1 0 4 1 100 4637 380 285 1683 3 1 0 2121 4 25 95 1 0 -"25656" 4 178 5 1 1 0 4 1 100 4637 285 556 1273 1 9 0 938 1 95 271 0 1 -"25657" 4 178 5 1 1 0 4 1 100 4637 556 1084 1603 8 7 1 637 1 95 528 1 1 -"25658" 4 178 5 1 1 0 4 1 100 4637 1084 2114 6284 9 2 1 559 1 95 1030 1 1 -"25659" 4 178 5 1 1 0 4 1 100 4637 2114 3171 2001 5 6 0 744 3 50 1057 0 1 -"25660" 4 178 5 1 1 0 4 1 100 4637 3171 2378 1882 6 8 1 1198 4 25 793 0 0 -"25661" 4 178 5 1 1 0 4 1 100 4637 2378 4637 1115 4 7 0 1083 1 95 2259 0 1 -"25662" 4 180 2 0 1 1 1 1 100 355 100 150 22293 8 3 1 1110 3 50 50 1 1 -"25663" 4 180 2 0 1 1 1 1 100 355 150 225 5230 3 7 0 1195 3 50 75 0 1 -"25664" 4 180 2 0 1 1 1 1 100 355 225 338 2887 8 2 1 884 3 50 113 1 1 -"25665" 4 180 2 0 1 1 1 1 100 355 338 355 3887 2 1 1 748 1 5 17 1 1 -"25666" 4 180 3 1 1 1 1 1 100 79 100 150 2599 9 7 1 1337 3 50 50 1 1 -"25667" 4 180 3 1 1 1 1 1 100 79 150 112 4871 4 8 1 845 2 25 38 0 0 -"25668" 4 180 3 1 1 1 1 1 100 79 112 168 2546 8 2 1 870 3 50 56 1 1 -"25669" 4 180 3 1 1 1 1 1 100 79 168 84 1770 2 1 0 1372 3 50 84 1 0 -"25670" 4 180 3 1 1 1 1 1 100 79 84 80 3048 7 6 0 1212 1 5 4 1 0 -"25671" 4 180 3 1 1 1 1 1 100 79 80 60 3215 5 3 0 1076 2 25 20 1 0 -"25672" 4 180 3 1 1 1 1 1 100 79 60 90 1694 3 6 0 699 3 50 30 0 1 -"25673" 4 180 3 1 1 1 1 1 100 79 90 45 1639 6 9 1 607 3 50 45 0 0 -"25674" 4 180 3 1 1 1 1 1 100 79 45 79 1327 1 4 0 1702 4 75 34 0 1 -"25675" 4 180 3 1 1 1 2 1 100 300 100 150 3153 2 9 0 721 3 50 50 0 1 -"25676" 4 180 3 1 1 1 2 1 100 300 150 112 6043 4 10 1 1793 2 25 38 0 0 -"25677" 4 180 3 1 1 1 2 1 100 300 112 56 1849 3 1 0 927 3 50 56 1 0 -"25678" 4 180 3 1 1 1 2 1 100 300 56 84 2208 8 6 1 1029 3 50 28 1 1 -"25679" 4 180 3 1 1 1 2 1 100 300 84 80 4377 5 7 1 862 1 5 4 0 0 -"25680" 4 180 3 1 1 1 2 1 100 300 80 76 2554 6 8 1 646 1 5 4 0 0 -"25681" 4 180 3 1 1 1 2 1 100 300 76 114 1452 7 5 1 501 3 50 38 1 1 -"25682" 4 180 3 1 1 1 2 1 100 300 114 200 2483 1 10 0 1002 4 75 86 0 1 -"25683" 4 180 3 1 1 1 2 1 100 300 200 300 2280 9 3 1 897 3 50 100 1 1 -"25684" 4 180 3 1 1 1 3 1 100 414 100 125 2623 3 5 0 840 2 25 25 0 1 -"25685" 4 180 3 1 1 1 3 1 100 414 125 94 1573 8 9 1 1120 2 25 31 0 0 -"25686" 4 180 3 1 1 1 3 1 100 414 94 118 1456 2 4 0 840 2 25 24 0 1 -"25687" 4 180 3 1 1 1 3 1 100 414 118 112 1669 6 3 0 1323 1 5 6 1 0 -"25688" 4 180 3 1 1 1 3 1 100 414 112 140 1914 7 6 1 1293 2 25 28 1 1 -"25689" 4 180 3 1 1 1 3 1 100 414 140 147 2628 4 2 1 852 1 5 7 1 1 -"25690" 4 180 3 1 1 1 3 1 100 414 147 221 1345 1 8 0 999 3 50 74 0 1 -"25691" 4 180 3 1 1 1 3 1 100 414 221 276 1549 5 7 0 1377 2 25 55 0 1 -"25692" 4 180 3 1 1 1 3 1 100 414 276 414 1548 9 5 1 1713 3 50 138 1 1 -"25693" 4 180 3 1 1 1 4 1 100 1054 100 150 2683 8 7 1 1185 3 50 50 1 1 -"25694" 4 180 3 1 1 1 4 1 100 1054 150 225 1574 3 10 0 705 3 50 75 0 1 -"25695" 4 180 3 1 1 1 4 1 100 1054 225 214 2228 7 9 1 968 1 5 11 0 0 -"25696" 4 180 3 1 1 1 4 1 100 1054 214 375 1719 9 1 1 2091 4 75 161 1 1 -"25697" 4 180 3 1 1 1 4 1 100 1054 375 563 1605 2 3 0 1566 3 50 188 0 1 -"25698" 4 180 3 1 1 1 4 1 100 1054 563 845 1905 1 8 0 1340 3 50 282 0 1 -"25699" 4 180 3 1 1 1 4 1 100 1054 845 803 1953 5 4 0 802 1 5 42 1 0 -"25700" 4 180 3 1 1 1 4 1 100 1054 803 843 2458 4 2 1 715 1 5 40 1 1 -"25701" 4 180 3 1 1 1 4 1 100 1054 843 1054 1662 6 3 1 788 2 25 211 1 1 -"25702" 4 180 4 0 1 0 1 1 100 8 100 175 7493 2 7 0 1406 2 75 75 0 1 -"25703" 4 180 4 0 1 0 1 1 100 8 175 87 2361 3 7 1 846 3 50 88 0 0 -"25704" 4 180 4 0 1 0 1 1 100 8 87 170 2395 2 8 0 1302 1 95 83 0 1 -"25705" 4 180 4 0 1 0 1 1 100 8 170 8 1949 8 9 1 726 1 95 162 0 0 -"25706" 4 180 5 1 1 0 1 1 100 620 100 195 3178 1 3 0 1096 1 95 95 0 1 -"25707" 4 180 5 1 1 0 1 1 100 620 195 244 2457 6 5 1 867 4 25 49 1 1 -"25708" 4 180 5 1 1 0 1 1 100 620 244 427 3119 2 8 0 936 2 75 183 0 1 -"25709" 4 180 5 1 1 0 1 1 100 620 427 107 1441 8 9 1 890 2 75 320 0 0 -"25710" 4 180 5 1 1 0 1 1 100 620 107 161 1396 3 4 0 1072 3 50 54 0 1 -"25711" 4 180 5 1 1 0 1 1 100 620 161 169 1866 5 7 0 1128 5 5 8 0 1 -"25712" 4 180 5 1 1 0 1 1 100 620 169 254 1759 7 4 1 897 3 50 85 1 1 -"25713" 4 180 5 1 1 0 1 1 100 620 254 318 2596 4 1 1 1182 4 25 64 1 1 -"25714" 4 180 5 1 1 0 1 1 100 620 318 620 2443 9 6 1 4892 1 95 302 1 1 -"25715" 4 180 5 1 1 0 2 1 100 912 100 195 2186 8 1 1 632 1 95 95 1 1 -"25716" 4 180 5 1 1 0 2 1 100 912 195 293 2319 6 2 1 1959 3 50 98 1 1 -"25717" 4 180 5 1 1 0 2 1 100 912 293 146 3023 7 9 1 901 3 50 147 0 0 -"25718" 4 180 5 1 1 0 2 1 100 912 146 285 1888 2 10 0 2020 1 95 139 0 1 -"25719" 4 180 5 1 1 0 2 1 100 912 285 214 4473 5 3 0 4250 4 25 71 1 0 -"25720" 4 180 5 1 1 0 2 1 100 912 214 160 2677 4 2 0 1980 4 25 54 1 0 -"25721" 4 180 5 1 1 0 2 1 100 912 160 312 1451 3 5 0 2004 1 95 152 0 1 -"25722" 4 180 5 1 1 0 2 1 100 912 312 608 1682 9 4 1 975 1 95 296 1 1 -"25723" 4 180 5 1 1 0 2 1 100 912 608 912 1774 1 7 0 1383 3 50 304 0 1 -"25724" 4 180 5 1 1 0 3 1 100 4 100 150 1671 7 5 1 845 3 50 50 1 1 -"25725" 4 180 5 1 1 0 3 1 100 4 150 7 1353 2 1 0 790 1 95 143 1 0 -"25726" 4 180 5 1 1 0 3 1 100 4 7 14 1310 8 6 1 3381 1 95 7 1 1 -"25727" 4 180 5 1 1 0 3 1 100 4 14 21 1633 4 7 0 1234 3 50 7 0 1 -"25728" 4 180 5 1 1 0 3 1 100 4 21 37 1828 3 10 0 1011 2 75 16 0 1 -"25729" 4 180 5 1 1 0 3 1 100 4 37 18 2072 6 8 1 1315 3 50 19 0 0 -"25730" 4 180 5 1 1 0 3 1 100 4 18 35 1378 9 2 1 974 1 95 17 1 1 -"25731" 4 180 5 1 1 0 3 1 100 4 35 2 2444 5 3 0 725 1 95 33 1 0 -"25732" 4 180 5 1 1 0 3 1 100 4 2 4 1664 1 10 0 1014 1 95 2 0 1 -"25733" 4 180 5 1 1 0 4 1 100 1205 100 195 2733 2 3 0 1480 1 95 95 0 1 -"25734" 4 180 5 1 1 0 4 1 100 1205 195 293 1703 7 4 1 1852 3 50 98 1 1 -"25735" 4 180 5 1 1 0 4 1 100 1205 293 220 1785 3 1 0 1274 4 25 73 1 0 -"25736" 4 180 5 1 1 0 4 1 100 1205 220 385 1419 1 9 0 1173 2 75 165 0 1 -"25737" 4 180 5 1 1 0 4 1 100 1205 385 578 1971 8 7 1 1035 3 50 193 1 1 -"25738" 4 180 5 1 1 0 4 1 100 1205 578 1127 1687 9 2 1 1204 1 95 549 1 1 -"25739" 4 180 5 1 1 0 4 1 100 1205 1127 1071 1928 5 6 1 2843 5 5 56 0 0 -"25740" 4 180 5 1 1 0 4 1 100 1205 1071 803 2696 6 8 1 3046 4 25 268 0 0 -"25741" 4 180 5 1 1 0 4 1 100 1205 803 1205 2794 4 7 0 1283 3 50 402 0 1 -"25742" 4 183 2 0 1 1 1 1 100 285 100 150 7188 8 3 1 972 3 50 50 1 1 -"25743" 4 183 2 0 1 1 1 1 100 285 150 293 8683 3 7 0 0 5 95 143 0 1 -"25744" 4 183 2 0 1 1 1 1 100 285 293 571 3674 8 2 1 0 5 95 278 1 1 -"25745" 4 183 2 0 1 1 1 1 100 285 571 285 2993 2 1 0 2113 3 50 286 1 0 -"25746" 4 183 3 1 1 1 1 1 100 579 100 195 7246 9 7 1 0 5 95 95 1 1 -"25747" 4 183 3 1 1 1 1 1 100 579 195 185 6652 4 8 1 4884 1 5 10 0 0 -"25748" 4 183 3 1 1 1 1 1 100 579 185 278 2754 8 2 1 2044 3 50 93 1 1 -"25749" 4 183 3 1 1 1 1 1 100 579 278 139 3840 2 1 0 1535 3 50 139 1 0 -"25750" 4 183 3 1 1 1 1 1 100 579 139 271 3700 7 6 1 0 5 95 132 1 1 -"25751" 4 183 3 1 1 1 1 1 100 579 271 528 4034 5 3 1 0 5 95 257 1 1 -"25752" 4 183 3 1 1 1 1 1 100 579 528 396 4551 3 6 1 4437 2 25 132 0 0 -"25753" 4 183 3 1 1 1 1 1 100 579 396 297 3886 6 9 1 3283 2 25 99 0 0 -"25754" 4 183 3 1 1 1 1 1 100 579 297 579 2348 1 4 0 0 5 95 282 0 1 -"25755" 4 183 3 1 1 1 2 0 100 0 100 195 2041 2 9 0 0 5 95 95 0 1 -"25756" 4 183 3 1 1 1 2 0 100 0 195 97 6549 4 10 1 1290 3 50 98 0 0 -"25757" 4 183 3 1 1 1 2 0 100 0 97 5 2987 3 1 0 0 5 95 92 1 0 -"25758" 4 183 3 1 1 1 2 0 100 0 5 10 5882 8 6 1 0 5 95 5 1 1 -"25759" 4 183 3 1 1 1 2 0 100 0 10 0 3784 5 7 1 0 5 95 10 0 0 -"25760" 4 183 3 1 1 1 3 1 100 64 100 125 3215 3 5 0 1724 2 25 25 0 1 -"25761" 4 183 3 1 1 1 3 1 100 64 125 31 1759 8 9 1 2186 4 75 94 0 0 -"25762" 4 183 3 1 1 1 3 1 100 64 31 60 2383 2 4 0 0 5 95 29 0 1 -"25763" 4 183 3 1 1 1 3 1 100 64 60 117 3539 6 3 1 0 5 95 57 1 1 -"25764" 4 183 3 1 1 1 3 1 100 64 117 228 5080 7 6 1 0 5 95 111 1 1 -"25765" 4 183 3 1 1 1 3 1 100 64 228 342 3308 4 2 1 2444 3 50 114 1 1 -"25766" 4 183 3 1 1 1 3 1 100 64 342 667 2093 1 8 0 0 5 95 325 0 1 -"25767" 4 183 3 1 1 1 3 1 100 64 667 33 2735 5 7 1 0 5 95 634 0 0 -"25768" 4 183 3 1 1 1 3 1 100 64 33 64 3250 9 5 1 0 5 95 31 1 1 -"25769" 4 183 3 1 1 1 4 1 100 1795 100 175 2344 8 7 1 3334 4 75 75 1 1 -"25770" 4 183 3 1 1 1 4 1 100 1795 175 263 2549 3 10 0 1321 3 50 88 0 1 -"25771" 4 183 3 1 1 1 4 1 100 1795 263 66 1920 7 9 1 1147 4 75 197 0 0 -"25772" 4 183 3 1 1 1 4 1 100 1795 66 129 3761 9 1 1 0 5 95 63 1 1 -"25773" 4 183 3 1 1 1 4 1 100 1795 129 252 2204 2 3 0 0 5 95 123 0 1 -"25774" 4 183 3 1 1 1 4 1 100 1795 252 491 1881 1 8 0 0 5 95 239 0 1 -"25775" 4 183 3 1 1 1 4 1 100 1795 491 957 4525 5 4 1 0 5 95 466 1 1 -"25776" 4 183 3 1 1 1 4 1 100 1795 957 1436 4116 4 2 1 2870 3 50 479 1 1 -"25777" 4 183 3 1 1 1 4 1 100 1795 1436 1795 2802 6 3 1 1714 2 25 359 1 1 -"25778" 4 183 4 0 1 0 1 1 100 1 100 150 7035 2 7 0 569 3 50 50 0 1 -"25779" 4 183 4 0 1 0 1 1 100 1 150 293 2739 3 7 0 735 1 95 143 0 1 -"25780" 4 183 4 0 1 0 1 1 100 1 293 15 3489 2 8 1 568 1 95 278 0 0 -"25781" 4 183 4 0 1 0 1 1 100 1 15 1 1307 8 9 1 586 1 95 14 0 0 -"25782" 4 183 5 1 1 0 1 0 100 0 100 195 1281 1 3 0 474 1 95 95 0 1 -"25783" 4 183 5 1 1 0 1 0 100 0 195 97 1832 6 5 0 1527 3 50 98 1 0 -"25784" 4 183 5 1 1 0 1 0 100 0 97 189 1508 2 8 0 426 1 95 92 0 1 -"25785" 4 183 5 1 1 0 1 0 100 0 189 9 1506 8 9 1 384 1 95 180 0 0 -"25786" 4 183 5 1 1 0 1 0 100 0 9 0 1899 3 4 1 615 1 95 9 0 0 -"25787" 4 183 5 1 1 0 2 1 100 1037 100 195 2474 8 1 1 674 1 95 95 1 1 -"25788" 4 183 5 1 1 0 2 1 100 1037 195 380 1911 6 2 1 642 1 95 185 1 1 -"25789" 4 183 5 1 1 0 2 1 100 1037 380 19 3208 7 9 1 1093 1 95 361 0 0 -"25790" 4 183 5 1 1 0 2 1 100 1037 19 37 2318 2 10 0 1421 1 95 18 0 1 -"25791" 4 183 5 1 1 0 2 1 100 1037 37 72 2839 5 3 1 534 1 95 35 1 1 -"25792" 4 183 5 1 1 0 2 1 100 1037 72 140 3026 4 2 1 727 1 95 68 1 1 -"25793" 4 183 5 1 1 0 2 1 100 1037 140 273 3754 3 5 0 564 1 95 133 0 1 -"25794" 4 183 5 1 1 0 2 1 100 1037 273 532 3119 9 4 1 320 1 95 259 1 1 -"25795" 4 183 5 1 1 0 2 1 100 1037 532 1037 1478 1 7 0 477 1 95 505 0 1 -"25796" 4 183 5 1 1 0 3 0 100 1 100 195 1699 7 5 1 400 1 95 95 1 1 -"25797" 4 183 5 1 1 0 3 0 100 1 195 10 2387 2 1 0 791 1 95 185 1 0 -"25798" 4 183 5 1 1 0 3 0 100 1 10 20 2726 8 6 1 658 1 95 10 1 1 -"25799" 4 183 5 1 1 0 3 0 100 1 20 1 1336 4 7 1 1606 1 95 19 0 0 -"25800" 4 183 5 1 1 0 4 0 100 0 100 195 1516 2 3 0 658 1 95 95 0 1 -"25801" 4 183 5 1 1 0 4 0 100 0 195 380 4760 7 4 1 355 1 95 185 1 1 -"25802" 4 183 5 1 1 0 4 0 100 0 380 19 2735 3 1 0 571 1 95 361 1 0 -"25803" 4 183 5 1 1 0 4 0 100 0 19 37 2508 1 9 0 707 1 95 18 0 1 -"25804" 4 183 5 1 1 0 4 0 100 0 37 72 4230 8 7 1 530 1 95 35 1 1 -"25805" 4 183 5 1 1 0 4 0 100 0 72 140 2398 9 2 1 402 1 95 68 1 1 -"25806" 4 183 5 1 1 0 4 0 100 0 140 7 2092 5 6 1 390 1 95 133 0 0 -"25807" 4 183 5 1 1 0 4 0 100 0 7 0 2424 6 8 1 987 1 95 7 0 0 -"25808" 4 187 2 0 1 1 1 1 100 267 100 150 4404 8 3 1 1793 3 50 50 1 1 -"25809" 4 187 2 0 1 1 1 1 100 267 150 225 6697 3 7 0 1408 3 50 75 0 1 -"25810" 4 187 2 0 1 1 1 1 100 267 225 281 1754 8 2 1 3300 2 25 56 1 1 -"25811" 4 187 2 0 1 1 1 1 100 267 281 267 2180 2 1 0 1730 1 5 14 1 0 -"25812" 4 187 3 1 1 1 1 1 100 599 100 150 16493 9 7 1 4921 3 50 50 1 1 -"25813" 4 187 3 1 1 1 1 1 100 599 150 188 1708 4 8 0 3624 2 25 38 0 1 -"25814" 4 187 3 1 1 1 1 1 100 599 188 282 1641 8 2 1 1240 3 50 94 1 1 -"25815" 4 187 3 1 1 1 1 1 100 599 282 70 1483 2 1 0 634 4 75 212 1 0 -"25816" 4 187 3 1 1 1 1 1 100 599 70 137 9466 7 6 1 2533 5 95 67 1 1 -"25817" 4 187 3 1 1 1 1 1 100 599 137 240 3906 5 3 1 1558 4 75 103 1 1 -"25818" 4 187 3 1 1 1 1 1 100 599 240 420 1809 3 6 0 969 4 75 180 0 1 -"25819" 4 187 3 1 1 1 1 1 100 599 420 399 2584 6 9 1 3779 1 5 21 0 0 -"25820" 4 187 3 1 1 1 1 1 100 599 399 599 5836 1 4 0 2228 3 50 200 0 1 -"25821" 4 187 3 1 1 1 2 1 100 174 100 50 4143 2 9 1 3575 3 50 50 0 0 -"25822" 4 187 3 1 1 1 2 1 100 174 50 88 2803 4 10 0 1012 4 75 38 0 1 -"25823" 4 187 3 1 1 1 2 1 100 174 88 66 2208 3 1 0 3452 2 25 22 1 0 -"25824" 4 187 3 1 1 1 2 1 100 174 66 116 2429 8 6 1 1088 4 75 50 1 1 -"25825" 4 187 3 1 1 1 2 1 100 174 116 29 4577 5 7 1 2053 4 75 87 0 0 -"25826" 4 187 3 1 1 1 2 1 100 174 29 44 2905 6 8 0 4532 3 50 15 0 1 -"25827" 4 187 3 1 1 1 2 1 100 174 44 66 2442 7 5 1 2508 3 50 22 1 1 -"25828" 4 187 3 1 1 1 2 1 100 174 66 116 2930 1 10 0 612 4 75 50 0 1 -"25829" 4 187 3 1 1 1 2 1 100 174 116 174 4884 9 3 1 1929 3 50 58 1 1 -"25830" 4 187 3 1 1 1 3 1 100 4 100 150 2574 3 5 0 1769 3 50 50 0 1 -"25831" 4 187 3 1 1 1 3 1 100 4 150 75 3722 8 9 1 2671 3 50 75 0 0 -"25832" 4 187 3 1 1 1 3 1 100 4 75 113 2420 2 4 0 3663 3 50 38 0 1 -"25833" 4 187 3 1 1 1 3 1 100 4 113 56 2606 6 3 0 1544 3 50 57 1 0 -"25834" 4 187 3 1 1 1 3 1 100 4 56 84 3275 7 6 1 1045 3 50 28 1 1 -"25835" 4 187 3 1 1 1 3 1 100 4 84 4 1848 4 2 0 1751 5 95 80 1 0 -"25836" 4 187 3 1 1 1 3 1 100 4 4 8 3361 1 8 0 1863 5 95 4 0 1 -"25837" 4 187 3 1 1 1 3 1 100 4 8 2 6804 5 7 1 1814 4 75 6 0 0 -"25838" 4 187 3 1 1 1 3 1 100 4 2 4 2062 9 5 1 2280 4 75 2 1 1 -"25839" 4 187 3 1 1 1 4 1 100 651 100 175 5816 8 7 1 745 4 75 75 1 1 -"25840" 4 187 3 1 1 1 4 1 100 651 175 263 1618 3 10 0 1699 3 50 88 0 1 -"25841" 4 187 3 1 1 1 4 1 100 651 263 197 2528 7 9 1 1269 2 25 66 0 0 -"25842" 4 187 3 1 1 1 4 1 100 651 197 296 2190 9 1 1 1957 3 50 99 1 1 -"25843" 4 187 3 1 1 1 4 1 100 651 296 370 1917 2 3 0 2646 2 25 74 0 1 -"25844" 4 187 3 1 1 1 4 1 100 651 370 463 2502 1 8 0 2804 2 25 93 0 1 -"25845" 4 187 3 1 1 1 4 1 100 651 463 579 1637 5 4 1 1772 2 25 116 1 1 -"25846" 4 187 3 1 1 1 4 1 100 651 579 434 1806 4 2 0 1405 2 25 145 1 0 -"25847" 4 187 3 1 1 1 4 1 100 651 434 651 1632 6 3 1 1535 3 50 217 1 1 -"25848" 4 187 4 0 1 0 1 1 100 16 100 150 5665 2 7 0 858 3 50 50 0 1 -"25849" 4 187 4 0 1 0 1 1 100 16 150 263 3886 3 7 0 994 2 75 113 0 1 -"25850" 4 187 4 0 1 0 1 1 100 16 263 329 2185 2 8 0 1103 4 25 66 0 1 -"25851" 4 187 4 0 1 0 1 1 100 16 329 16 1984 8 9 1 1070 1 95 313 0 0 -"25852" 4 187 5 1 1 0 1 1 100 320 100 195 9139 1 3 0 4261 1 95 95 0 1 -"25853" 4 187 5 1 1 0 1 1 100 320 195 341 1249 6 5 1 451 2 75 146 1 1 -"25854" 4 187 5 1 1 0 1 1 100 320 341 512 1515 2 8 0 866 3 50 171 0 1 -"25855" 4 187 5 1 1 0 1 1 100 320 512 256 1494 8 9 1 980 3 50 256 0 0 -"25856" 4 187 5 1 1 0 1 1 100 320 256 499 2643 3 4 0 850 1 95 243 0 1 -"25857" 4 187 5 1 1 0 1 1 100 320 499 374 1592 5 7 1 682 4 25 125 0 0 -"25858" 4 187 5 1 1 0 1 1 100 320 374 655 1831 7 4 1 342 2 75 281 1 1 -"25859" 4 187 5 1 1 0 1 1 100 320 655 164 1531 4 1 0 3336 2 75 491 1 0 -"25860" 4 187 5 1 1 0 1 1 100 320 164 320 1582 9 6 1 656 1 95 156 1 1 -"25861" 4 187 5 1 1 0 2 0 100 0 100 195 1525 8 1 1 626 1 95 95 1 1 -"25862" 4 187 5 1 1 0 2 0 100 0 195 49 2177 6 2 0 527 2 75 146 1 0 -"25863" 4 187 5 1 1 0 2 0 100 0 49 2 1235 7 9 1 869 1 95 47 0 0 -"25864" 4 187 5 1 1 0 2 0 100 0 2 4 1474 2 10 0 561 1 95 2 0 1 -"25865" 4 187 5 1 1 0 2 0 100 0 4 8 1139 5 3 1 675 1 95 4 1 1 -"25866" 4 187 5 1 1 0 2 0 100 0 8 0 888 4 2 0 987 1 95 8 1 0 -"25867" 4 187 5 1 1 0 3 1 100 76 100 195 1393 7 5 1 1013 1 95 95 1 1 -"25868" 4 187 5 1 1 0 3 1 100 76 195 10 1426 2 1 0 835 1 95 185 1 0 -"25869" 4 187 5 1 1 0 3 1 100 76 10 20 1249 8 6 1 939 1 95 10 1 1 -"25870" 4 187 5 1 1 0 3 1 100 76 20 35 1276 4 7 0 469 2 75 15 0 1 -"25871" 4 187 5 1 1 0 3 1 100 76 35 61 1267 3 10 0 477 2 75 26 0 1 -"25872" 4 187 5 1 1 0 3 1 100 76 61 15 1171 6 8 1 569 2 75 46 0 0 -"25873" 4 187 5 1 1 0 3 1 100 76 15 26 1652 9 2 1 1387 2 75 11 1 1 -"25874" 4 187 5 1 1 0 3 1 100 76 26 39 1654 5 3 1 781 3 50 13 1 1 -"25875" 4 187 5 1 1 0 3 1 100 76 39 76 981 1 10 0 493 1 95 37 0 1 -"25876" 4 187 5 1 1 0 4 1 100 199 100 195 1184 2 3 0 605 1 95 95 0 1 -"25877" 4 187 5 1 1 0 4 1 100 199 195 380 989 7 4 1 627 1 95 185 1 1 -"25878" 4 187 5 1 1 0 4 1 100 199 380 285 995 3 1 0 970 4 25 95 1 0 -"25879" 4 187 5 1 1 0 4 1 100 199 285 556 1201 1 9 0 661 1 95 271 0 1 -"25880" 4 187 5 1 1 0 4 1 100 199 556 695 1443 8 7 1 2100 4 25 139 1 1 -"25881" 4 187 5 1 1 0 4 1 100 199 695 1355 1685 9 2 1 939 1 95 660 1 1 -"25882" 4 187 5 1 1 0 4 1 100 199 1355 2033 1244 5 6 0 1358 3 50 678 0 1 -"25883" 4 187 5 1 1 0 4 1 100 199 2033 102 1371 6 8 1 3023 1 95 1931 0 0 -"25884" 4 187 5 1 1 0 4 1 100 199 102 199 1044 4 7 0 680 1 95 97 0 1 -"25885" 4 189 2 0 1 1 1 1 100 770 100 150 10234 8 3 1 2441 3 50 50 1 1 -"25886" 4 189 2 0 1 1 1 1 100 770 150 293 14882 3 7 0 0 5 95 143 0 1 -"25887" 4 189 2 0 1 1 1 1 100 770 293 440 3051 8 2 1 2247 3 50 147 1 1 -"25888" 4 189 2 0 1 1 1 1 100 770 440 770 2962 2 1 1 899 4 75 330 1 1 -"25889" 4 189 3 1 1 1 1 1 100 248 100 150 21924 9 7 1 662 3 50 50 1 1 -"25890" 4 189 3 1 1 1 1 1 100 248 150 112 4068 4 8 1 1896 2 25 38 0 0 -"25891" 4 189 3 1 1 1 1 1 100 248 112 168 4812 8 2 1 1300 3 50 56 1 1 -"25892" 4 189 3 1 1 1 1 1 100 248 168 252 2347 2 1 1 1073 3 50 84 1 1 -"25893" 4 189 3 1 1 1 1 1 100 248 252 378 1779 7 6 1 1204 3 50 126 1 1 -"25894" 4 189 3 1 1 1 1 1 100 248 378 189 1928 5 3 0 795 3 50 189 1 0 -"25895" 4 189 3 1 1 1 1 1 100 248 189 331 1986 3 6 0 2978 4 75 142 0 1 -"25896" 4 189 3 1 1 1 1 1 100 248 331 497 2113 6 9 0 1498 3 50 166 0 1 -"25897" 4 189 3 1 1 1 1 1 100 248 497 248 1624 1 4 1 656 3 50 249 0 0 -"25898" 4 189 3 1 1 1 2 0 100 0 100 25 2111 2 9 1 1272 4 75 75 0 0 -"25899" 4 189 3 1 1 1 2 0 100 0 25 12 1825 4 10 1 2320 3 50 13 0 0 -"25900" 4 189 3 1 1 1 2 0 100 0 12 23 1446 3 1 1 0 5 95 11 1 1 -"25901" 4 189 3 1 1 1 2 0 100 0 23 45 2815 8 6 1 0 5 95 22 1 1 -"25902" 4 189 3 1 1 1 2 0 100 0 45 2 1903 5 7 1 0 5 95 43 0 0 -"25903" 4 189 3 1 1 1 2 0 100 0 2 0 1954 6 8 1 0 5 95 2 0 0 -"25904" 4 189 3 1 1 1 3 1 100 65 100 150 1892 3 5 0 792 3 50 50 0 1 -"25905" 4 189 3 1 1 1 3 1 100 65 150 225 2287 8 9 0 1368 3 50 75 0 1 -"25906" 4 189 3 1 1 1 3 1 100 65 225 439 1988 2 4 0 3260 5 95 214 0 1 -"25907" 4 189 3 1 1 1 3 1 100 65 439 659 2030 6 3 1 1019 3 50 220 1 1 -"25908" 4 189 3 1 1 1 3 1 100 65 659 989 2176 7 6 1 1958 3 50 330 1 1 -"25909" 4 189 3 1 1 1 3 1 100 65 989 1731 2294 4 2 1 679 4 75 742 1 1 -"25910" 4 189 3 1 1 1 3 1 100 65 1731 87 2996 1 8 1 0 5 95 1644 0 0 -"25911" 4 189 3 1 1 1 3 1 100 65 87 131 1499 5 7 0 594 3 50 44 0 1 -"25912" 4 189 3 1 1 1 3 1 100 65 131 65 2075 9 5 0 294 3 50 66 1 0 -"25913" 4 189 3 1 1 1 4 1 100 425 100 150 2375 8 7 1 245 3 50 50 1 1 -"25914" 4 189 3 1 1 1 4 1 100 425 150 225 1430 3 10 0 872 3 50 75 0 1 -"25915" 4 189 3 1 1 1 4 1 100 425 225 112 1036 7 9 1 336 3 50 113 0 0 -"25916" 4 189 3 1 1 1 4 1 100 425 112 168 1093 9 1 1 983 3 50 56 1 1 -"25917" 4 189 3 1 1 1 4 1 100 425 168 252 1469 2 3 0 997 3 50 84 0 1 -"25918" 4 189 3 1 1 1 4 1 100 425 252 378 1491 1 8 0 329 3 50 126 0 1 -"25919" 4 189 3 1 1 1 4 1 100 425 378 567 1735 5 4 1 731 3 50 189 1 1 -"25920" 4 189 3 1 1 1 4 1 100 425 567 283 1541 4 2 0 1354 3 50 284 1 0 -"25921" 4 189 3 1 1 1 4 1 100 425 283 425 1374 6 3 1 467 3 50 142 1 1 -"25922" 4 189 4 0 1 0 1 1 100 1 100 150 6586 2 7 0 319 3 50 50 0 1 -"25923" 4 189 4 0 1 0 1 1 100 1 150 7 2589 3 7 1 799 1 95 143 0 0 -"25924" 4 189 4 0 1 0 1 1 100 1 7 14 1814 2 8 0 1398 1 95 7 0 1 -"25925" 4 189 4 0 1 0 1 1 100 1 14 1 1416 8 9 1 635 1 95 13 0 0 -"25926" 4 189 5 1 1 0 1 0 100 0 100 195 2760 1 3 0 723 1 95 95 0 1 -"25927" 4 189 5 1 1 0 1 0 100 0 195 293 1845 6 5 1 988 3 50 98 1 1 -"25928" 4 189 5 1 1 0 1 0 100 0 293 571 1362 2 8 0 623 1 95 278 0 1 -"25929" 4 189 5 1 1 0 1 0 100 0 571 29 1198 8 9 1 339 1 95 542 0 0 -"25930" 4 189 5 1 1 0 1 0 100 0 29 57 1088 3 4 0 410 1 95 28 0 1 -"25931" 4 189 5 1 1 0 1 0 100 0 57 3 1059 5 7 1 663 1 95 54 0 0 -"25932" 4 189 5 1 1 0 1 0 100 0 3 6 988 7 4 1 715 1 95 3 1 1 -"25933" 4 189 5 1 1 0 1 0 100 0 6 0 942 4 1 0 1063 1 95 6 1 0 -"25934" 4 189 5 1 1 0 2 0 100 1 100 195 1474 8 1 1 537 1 95 95 1 1 -"25935" 4 189 5 1 1 0 2 0 100 1 195 97 1332 6 2 0 531 3 50 98 1 0 -"25936" 4 189 5 1 1 0 2 0 100 1 97 5 1071 7 9 1 457 1 95 92 0 0 -"25937" 4 189 5 1 1 0 2 0 100 1 5 10 1065 2 10 0 660 1 95 5 0 1 -"25938" 4 189 5 1 1 0 2 0 100 1 10 20 874 5 3 1 686 1 95 10 1 1 -"25939" 4 189 5 1 1 0 2 0 100 1 20 1 1027 4 2 0 272 1 95 19 1 0 -"25940" 4 189 5 1 1 0 3 0 100 1 100 195 1437 7 5 1 254 1 95 95 1 1 -"25941" 4 189 5 1 1 0 3 0 100 1 195 10 2127 2 1 0 255 1 95 185 1 0 -"25942" 4 189 5 1 1 0 3 0 100 1 10 20 1056 8 6 1 763 1 95 10 1 1 -"25943" 4 189 5 1 1 0 3 0 100 1 20 1 1436 4 7 1 1597 1 95 19 0 0 -"25944" 4 189 5 1 1 0 4 1 100 938 100 195 2407 2 3 0 1034 1 95 95 0 1 -"25945" 4 189 5 1 1 0 4 1 100 938 195 293 2030 7 4 1 1084 3 50 98 1 1 -"25946" 4 189 5 1 1 0 4 1 100 938 293 146 1381 3 1 0 1010 3 50 147 1 0 -"25947" 4 189 5 1 1 0 4 1 100 938 146 219 1271 1 9 0 344 3 50 73 0 1 -"25948" 4 189 5 1 1 0 4 1 100 938 219 427 1365 8 7 1 642 1 95 208 1 1 -"25949" 4 189 5 1 1 0 4 1 100 938 427 641 1744 9 2 1 275 3 50 214 1 1 -"25950" 4 189 5 1 1 0 4 1 100 938 641 962 1499 5 6 0 1026 3 50 321 0 1 -"25951" 4 189 5 1 1 0 4 1 100 938 962 481 2382 6 8 1 522 3 50 481 0 0 -"25952" 4 189 5 1 1 0 4 1 100 938 481 938 1046 4 7 0 1066 1 95 457 0 1 -"25953" 4 196 2 0 1 0 1 1 100 33 100 175 13459 2 7 0 1392 2 75 75 0 1 -"25954" 4 196 2 0 1 0 1 1 100 33 175 341 3699 3 7 0 2657 1 95 166 0 1 -"25955" 4 196 2 0 1 0 1 1 100 33 341 665 3223 2 8 0 2619 1 95 324 0 1 -"25956" 4 196 2 0 1 0 1 1 100 33 665 33 1659 8 9 1 1210 1 95 632 0 0 -"25957" 4 196 3 1 1 0 1 1 100 1037 100 195 3490 1 3 0 849 1 95 95 0 1 -"25958" 4 196 3 1 1 0 1 1 100 1037 195 380 2646 6 5 1 981 1 95 185 1 1 -"25959" 4 196 3 1 1 0 1 1 100 1037 380 741 1588 2 8 0 997 1 95 361 0 1 -"25960" 4 196 3 1 1 0 1 1 100 1037 741 37 1755 8 9 1 1237 1 95 704 0 0 -"25961" 4 196 3 1 1 0 1 1 100 1037 37 72 1671 3 4 0 1357 1 95 35 0 1 -"25962" 4 196 3 1 1 0 1 1 100 1037 72 140 2378 5 7 0 1174 1 95 68 0 1 -"25963" 4 196 3 1 1 0 1 1 100 1037 140 273 1940 7 4 1 2245 1 95 133 1 1 -"25964" 4 196 3 1 1 0 1 1 100 1037 273 532 2519 4 1 1 1125 1 95 259 1 1 -"25965" 4 196 3 1 1 0 1 1 100 1037 532 1037 1964 9 6 1 1444 1 95 505 1 1 -"25966" 4 196 3 1 1 0 2 1 100 31 100 195 2915 8 1 1 1565 1 95 95 1 1 -"25967" 4 196 3 1 1 0 2 1 100 31 195 380 2688 6 2 1 870 1 95 185 1 1 -"25968" 4 196 3 1 1 0 2 1 100 31 380 741 1881 7 9 0 941 1 95 361 0 1 -"25969" 4 196 3 1 1 0 2 1 100 31 741 1445 1829 2 10 0 1012 1 95 704 0 1 -"25970" 4 196 3 1 1 0 2 1 100 31 1445 72 3217 5 3 0 1877 1 95 1373 1 0 -"25971" 4 196 3 1 1 0 2 1 100 31 72 4 2057 4 2 0 1268 1 95 68 1 0 -"25972" 4 196 3 1 1 0 2 1 100 31 4 8 1101 3 5 0 926 1 95 4 0 1 -"25973" 4 196 3 1 1 0 2 1 100 31 8 16 1631 9 4 1 1031 1 95 8 1 1 -"25974" 4 196 3 1 1 0 2 1 100 31 16 31 1715 1 7 0 2863 1 95 15 0 1 -"25975" 4 196 3 1 1 0 3 0 100 0 100 195 1893 7 5 1 2816 1 95 95 1 1 -"25976" 4 196 3 1 1 0 3 0 100 0 195 10 1352 2 1 0 2133 1 95 185 1 0 -"25977" 4 196 3 1 1 0 3 0 100 0 10 20 1532 8 6 1 1049 1 95 10 1 1 -"25978" 4 196 3 1 1 0 3 0 100 0 20 39 1668 4 7 0 1131 1 95 19 0 1 -"25979" 4 196 3 1 1 0 3 0 100 0 39 76 1754 3 10 0 1124 1 95 37 0 1 -"25980" 4 196 3 1 1 0 3 0 100 0 76 4 2172 6 8 1 1986 1 95 72 0 0 -"25981" 4 196 3 1 1 0 3 0 100 0 4 8 1209 9 2 1 989 1 95 4 1 1 -"25982" 4 196 3 1 1 0 3 0 100 0 8 0 4371 5 3 0 1097 1 95 8 1 0 -"25983" 4 196 3 1 1 0 4 0 100 0 100 195 3581 2 3 0 849 1 95 95 0 1 -"25984" 4 196 3 1 1 0 4 0 100 0 195 380 1358 7 4 1 1130 1 95 185 1 1 -"25985" 4 196 3 1 1 0 4 0 100 0 380 19 2274 3 1 0 1195 1 95 361 1 0 -"25986" 4 196 3 1 1 0 4 0 100 0 19 37 2288 1 9 0 1865 1 95 18 0 1 -"25987" 4 196 3 1 1 0 4 0 100 0 37 72 1317 8 7 1 1252 1 95 35 1 1 -"25988" 4 196 3 1 1 0 4 0 100 0 72 140 1710 9 2 1 1162 1 95 68 1 1 -"25989" 4 196 3 1 1 0 4 0 100 0 140 7 2631 5 6 1 928 1 95 133 0 0 -"25990" 4 196 3 1 1 0 4 0 100 0 7 0 1872 6 8 1 1197 1 95 7 0 0 -"25991" 4 196 4 0 1 1 1 1 100 169 100 150 3349 8 3 1 420 3 50 50 1 1 -"25992" 4 196 4 0 1 1 1 1 100 169 150 225 2141 3 7 0 1589 3 50 75 0 1 -"25993" 4 196 4 0 1 1 1 1 100 169 225 338 1655 8 2 1 1045 3 50 113 1 1 -"25994" 4 196 4 0 1 1 1 1 100 169 338 169 2841 2 1 0 1192 3 50 169 1 0 -"25995" 4 196 5 1 1 1 1 1 100 233 100 150 2434 9 7 1 1604 3 50 50 1 1 -"25996" 4 196 5 1 1 1 1 1 100 233 150 142 2461 4 8 1 1069 1 5 8 0 0 -"25997" 4 196 5 1 1 1 1 1 100 233 142 249 1736 8 2 1 780 4 75 107 1 1 -"25998" 4 196 5 1 1 1 1 1 100 233 249 124 1796 2 1 0 452 3 50 125 1 0 -"25999" 4 196 5 1 1 1 1 1 100 233 124 155 1860 7 6 1 1433 2 25 31 1 1 -"26000" 4 196 5 1 1 1 1 1 100 233 155 147 1994 5 3 0 457 1 5 8 1 0 -"26001" 4 196 5 1 1 1 1 1 100 233 147 140 2480 3 6 1 1127 1 5 7 0 0 -"26002" 4 196 5 1 1 1 1 1 100 233 140 133 1351 6 9 1 988 1 5 7 0 0 -"26003" 4 196 5 1 1 1 1 1 100 233 133 233 1283 1 4 0 1148 4 75 100 0 1 -"26004" 4 196 5 1 1 1 2 1 100 435 100 175 1844 2 9 0 1979 4 75 75 0 1 -"26005" 4 196 5 1 1 1 2 1 100 435 175 219 2202 4 10 0 1191 2 25 44 0 1 -"26006" 4 196 5 1 1 1 2 1 100 435 219 164 2732 3 1 0 455 2 25 55 1 0 -"26007" 4 196 5 1 1 1 2 1 100 435 164 246 1311 8 6 1 1758 3 50 82 1 1 -"26008" 4 196 5 1 1 1 2 1 100 435 246 234 5496 5 7 1 896 1 5 12 0 0 -"26009" 4 196 5 1 1 1 2 1 100 435 234 222 1443 6 8 1 1044 1 5 12 0 0 -"26010" 4 196 5 1 1 1 2 1 100 435 222 278 1663 7 5 1 1070 2 25 56 1 1 -"26011" 4 196 5 1 1 1 2 1 100 435 278 348 1768 1 10 0 1586 2 25 70 0 1 -"26012" 4 196 5 1 1 1 2 1 100 435 348 435 1340 9 3 1 1313 2 25 87 1 1 -"26013" 4 196 5 1 1 1 3 1 100 114 100 125 2699 3 5 0 2232 2 25 25 0 1 -"26014" 4 196 5 1 1 1 3 1 100 114 125 31 1532 8 9 1 681 4 75 94 0 0 -"26015" 4 196 5 1 1 1 3 1 100 114 31 54 1718 2 4 0 410 4 75 23 0 1 -"26016" 4 196 5 1 1 1 3 1 100 114 54 57 2444 6 3 1 805 1 5 3 1 1 -"26017" 4 196 5 1 1 1 3 1 100 114 57 71 1580 7 6 1 2398 2 25 14 1 1 -"26018" 4 196 5 1 1 1 3 1 100 114 71 67 7482 4 2 0 1787 1 5 4 1 0 -"26019" 4 196 5 1 1 1 3 1 100 114 67 101 1932 1 8 0 1110 3 50 34 0 1 -"26020" 4 196 5 1 1 1 3 1 100 114 101 76 2846 5 7 1 1044 2 25 25 0 0 -"26021" 4 196 5 1 1 1 3 1 100 114 76 114 4592 9 5 1 2051 3 50 38 1 1 -"26022" 4 196 5 1 1 1 4 1 100 377 100 150 2607 8 7 1 1929 3 50 50 1 1 -"26023" 4 196 5 1 1 1 4 1 100 377 150 188 1779 3 10 0 1771 2 25 38 0 1 -"26024" 4 196 5 1 1 1 4 1 100 377 188 141 2916 7 9 1 1263 2 25 47 0 0 -"26025" 4 196 5 1 1 1 4 1 100 377 141 212 1280 9 1 1 433 3 50 71 1 1 -"26026" 4 196 5 1 1 1 4 1 100 377 212 265 2778 2 3 0 1956 2 25 53 0 1 -"26027" 4 196 5 1 1 1 4 1 100 377 265 398 1652 1 8 0 1068 3 50 133 0 1 -"26028" 4 196 5 1 1 1 4 1 100 377 398 378 3998 5 4 0 1293 1 5 20 1 0 -"26029" 4 196 5 1 1 1 4 1 100 377 378 359 2349 4 2 0 1059 1 5 19 1 0 -"26030" 4 196 5 1 1 1 4 1 100 377 359 377 1501 6 3 1 1060 1 5 18 1 1 -"26031" 4 205 2 0 1 0 1 1 100 22 100 150 20268 2 7 0 2445 3 50 50 0 1 -"26032" 4 205 2 0 1 0 1 1 100 22 150 225 3638 3 7 0 1350 3 50 75 0 1 -"26033" 4 205 2 0 1 0 1 1 100 22 225 439 4659 2 8 0 2555 1 95 214 0 1 -"26034" 4 205 2 0 1 0 1 1 100 22 439 22 1854 8 9 1 1091 1 95 417 0 0 -"26035" 4 205 3 1 1 0 1 0 100 1 100 195 2730 1 3 0 1341 1 95 95 0 1 -"26036" 4 205 3 1 1 0 1 0 100 1 195 10 2490 6 5 0 1209 1 95 185 1 0 -"26037" 4 205 3 1 1 0 1 0 100 1 10 20 2101 2 8 0 1286 1 95 10 0 1 -"26038" 4 205 3 1 1 0 1 0 100 1 20 1 1630 8 9 1 983 1 95 19 0 0 -"26039" 4 205 3 1 1 0 2 0 100 0 100 195 2753 8 1 1 899 1 95 95 1 1 -"26040" 4 205 3 1 1 0 2 0 100 0 195 10 3726 6 2 0 1290 1 95 185 1 0 -"26041" 4 205 3 1 1 0 2 0 100 0 10 0 2130 7 9 1 1442 1 95 10 0 0 -"26042" 4 205 3 1 1 0 3 0 100 0 100 195 4575 7 5 1 1659 1 95 95 1 1 -"26043" 4 205 3 1 1 0 3 0 100 0 195 10 1794 2 1 0 2258 1 95 185 1 0 -"26044" 4 205 3 1 1 0 3 0 100 0 10 0 2932 8 6 0 1269 1 95 10 1 0 -"26045" 4 205 3 1 1 0 4 1 100 1 100 195 2729 2 3 0 1029 1 95 95 0 1 -"26046" 4 205 3 1 1 0 4 1 100 1 195 380 1785 7 4 1 1034 1 95 185 1 1 -"26047" 4 205 3 1 1 0 4 1 100 1 380 19 1950 3 1 0 1239 1 95 361 1 0 -"26048" 4 205 3 1 1 0 4 1 100 1 19 37 2497 1 9 0 1304 1 95 18 0 1 -"26049" 4 205 3 1 1 0 4 1 100 1 37 72 1631 8 7 1 2615 1 95 35 1 1 -"26050" 4 205 3 1 1 0 4 1 100 1 72 140 2912 9 2 1 1348 1 95 68 1 1 -"26051" 4 205 3 1 1 0 4 1 100 1 140 7 3817 5 6 1 1601 1 95 133 0 0 -"26052" 4 205 3 1 1 0 4 1 100 1 7 14 2570 6 8 0 1269 1 95 7 0 1 -"26053" 4 205 3 1 1 0 4 1 100 1 14 1 3734 4 7 1 1226 1 95 13 0 0 -"26054" 4 205 4 0 1 1 1 1 100 230 100 175 29900 8 3 1 1139 4 75 75 1 1 -"26055" 4 205 4 0 1 1 1 1 100 230 175 263 3090 3 7 0 1620 3 50 88 0 1 -"26056" 4 205 4 0 1 1 1 1 100 230 263 460 1805 8 2 1 1476 4 75 197 1 1 -"26057" 4 205 4 0 1 1 1 1 100 230 460 230 2427 2 1 0 1333 3 50 230 1 0 -"26058" 4 205 5 1 1 1 1 0 100 0 100 195 2131 9 7 1 0 5 95 95 1 1 -"26059" 4 205 5 1 1 1 1 0 100 0 195 293 2467 4 8 0 1435 3 50 98 0 1 -"26060" 4 205 5 1 1 1 1 0 100 0 293 571 2332 8 2 1 0 5 95 278 1 1 -"26061" 4 205 5 1 1 1 1 0 100 0 571 29 1931 2 1 0 1493 5 95 542 1 0 -"26062" 4 205 5 1 1 1 1 0 100 0 29 57 2844 7 6 1 0 5 95 28 1 1 -"26063" 4 205 5 1 1 1 1 0 100 0 57 3 4138 5 3 0 0 5 95 54 1 0 -"26064" 4 205 5 1 1 1 1 0 100 0 3 6 3715 3 6 0 0 5 95 3 0 1 -"26065" 4 205 5 1 1 1 1 0 100 0 6 0 4048 6 9 1 0 5 95 6 0 0 -"26066" 4 205 5 1 1 1 2 0 100 0 100 195 2739 2 9 0 0 5 95 95 0 1 -"26067" 4 205 5 1 1 1 2 0 100 0 195 10 2638 4 10 1 0 5 95 185 0 0 -"26068" 4 205 5 1 1 1 2 0 100 0 10 0 2182 3 1 0 0 5 95 10 1 0 -"26069" 4 205 5 1 1 1 3 1 100 31 100 195 1988 3 5 0 0 5 95 95 0 1 -"26070" 4 205 5 1 1 1 3 1 100 31 195 10 2044 8 9 1 0 5 95 185 0 0 -"26071" 4 205 5 1 1 1 3 1 100 31 10 20 2448 2 4 0 0 5 95 10 0 1 -"26072" 4 205 5 1 1 1 3 1 100 31 20 39 1497 6 3 1 0 5 95 19 1 1 -"26073" 4 205 5 1 1 1 3 1 100 31 39 76 2039 7 6 1 0 5 95 37 1 1 -"26074" 4 205 5 1 1 1 3 1 100 31 76 4 1828 4 2 0 0 5 95 72 1 0 -"26075" 4 205 5 1 1 1 3 1 100 31 4 8 2083 1 8 0 0 5 95 4 0 1 -"26076" 4 205 5 1 1 1 3 1 100 31 8 16 4157 5 7 0 0 5 95 8 0 1 -"26077" 4 205 5 1 1 1 3 1 100 31 16 31 3349 9 5 1 0 5 95 15 1 1 -"26078" 4 205 5 1 1 1 4 1 100 1037 100 195 3038 8 7 1 0 5 95 95 1 1 -"26079" 4 205 5 1 1 1 4 1 100 1037 195 380 1651 3 10 0 0 5 95 185 0 1 -"26080" 4 205 5 1 1 1 4 1 100 1037 380 19 2106 7 9 1 0 5 95 361 0 0 -"26081" 4 205 5 1 1 1 4 1 100 1037 19 37 2124 9 1 1 0 5 95 18 1 1 -"26082" 4 205 5 1 1 1 4 1 100 1037 37 72 3316 2 3 0 0 5 95 35 0 1 -"26083" 4 205 5 1 1 1 4 1 100 1037 72 140 2100 1 8 0 0 5 95 68 0 1 -"26084" 4 205 5 1 1 1 4 1 100 1037 140 273 5262 5 4 1 0 5 95 133 1 1 -"26085" 4 205 5 1 1 1 4 1 100 1037 273 532 5417 4 2 1 0 5 95 259 1 1 -"26086" 4 205 5 1 1 1 4 1 100 1037 532 1037 4229 6 3 1 0 5 95 505 1 1 -"26087" 4 211 2 0 1 0 1 1 100 0 100 150 6734 2 7 0 627 3 50 50 0 1 -"26088" 4 211 2 0 1 0 1 1 100 0 150 112 4106 3 7 1 2029 4 25 38 0 0 -"26089" 4 211 2 0 1 0 1 1 100 0 112 6 3602 2 8 1 2236 1 95 106 0 0 -"26090" 4 211 2 0 1 0 1 1 100 0 6 0 1879 8 9 1 1619 1 95 6 0 0 -"26091" 4 211 3 1 1 0 1 0 100 0 100 5 1906 1 3 1 1429 1 95 95 0 0 -"26092" 4 211 3 1 1 0 1 0 100 0 5 10 5830 6 5 1 942 1 95 5 1 1 -"26093" 4 211 3 1 1 0 1 0 100 0 10 0 1286 2 8 1 1086 1 95 10 0 0 -"26094" 4 211 3 1 1 0 2 0 100 0 100 75 2040 8 1 0 1021 4 25 25 1 0 -"26095" 4 211 3 1 1 0 2 0 100 0 75 146 1265 6 2 1 1474 1 95 71 1 1 -"26096" 4 211 3 1 1 0 2 0 100 0 146 7 2114 7 9 1 1051 1 95 139 0 0 -"26097" 4 211 3 1 1 0 2 0 100 0 7 0 3566 2 10 1 1002 1 95 7 0 0 -"26098" 4 211 3 1 1 0 3 0 100 1 100 5 1236 7 5 0 1046 1 95 95 1 0 -"26099" 4 211 3 1 1 0 3 0 100 1 5 10 3074 2 1 1 301 1 95 5 1 1 -"26100" 4 211 3 1 1 0 3 0 100 1 10 20 3067 8 6 1 1415 1 95 10 1 1 -"26101" 4 211 3 1 1 0 3 0 100 1 20 1 2609 4 7 1 402 1 95 19 0 0 -"26102" 4 211 3 1 1 0 4 0 100 1 100 195 1447 2 3 0 456 1 95 95 0 1 -"26103" 4 211 3 1 1 0 4 0 100 1 195 10 2167 7 4 0 774 1 95 185 1 0 -"26104" 4 211 3 1 1 0 4 0 100 1 10 20 2398 3 1 1 753 1 95 10 1 1 -"26105" 4 211 3 1 1 0 4 0 100 1 20 1 1643 1 9 1 914 1 95 19 0 0 -"26106" 4 211 4 0 1 1 1 1 100 11 100 105 1770 8 3 1 2284 1 5 5 1 1 -"26107" 4 211 4 0 1 1 1 1 100 11 105 110 3212 3 7 0 1657 1 5 5 0 1 -"26108" 4 211 4 0 1 1 1 1 100 11 110 215 3255 8 2 1 0 5 95 105 1 1 -"26109" 4 211 4 0 1 1 1 1 100 11 215 11 2834 2 1 0 2673 5 95 204 1 0 -"26110" 4 211 5 1 1 1 1 1 100 336 100 195 1422 9 7 1 0 5 95 95 1 1 -"26111" 4 211 5 1 1 1 1 1 100 336 195 205 4424 4 8 0 2403 1 5 10 0 1 -"26112" 4 211 5 1 1 1 1 1 100 336 205 400 2022 8 2 1 0 5 95 195 1 1 -"26113" 4 211 5 1 1 1 1 1 100 336 400 200 2588 2 1 0 2246 3 50 200 1 0 -"26114" 4 211 5 1 1 1 1 1 100 336 200 300 6097 7 6 1 3741 3 50 100 1 1 -"26115" 4 211 5 1 1 1 1 1 100 336 300 315 5272 5 3 1 1459 1 5 15 1 1 -"26116" 4 211 5 1 1 1 1 1 100 336 315 299 8067 3 6 1 1169 1 5 16 0 0 -"26117" 4 211 5 1 1 1 1 1 100 336 299 224 2942 6 9 1 1040 2 25 75 0 0 -"26118" 4 211 5 1 1 1 1 1 100 336 224 336 1716 1 4 0 2200 3 50 112 0 1 -"26119" 4 211 5 1 1 1 2 1 100 819 100 195 1512 2 9 0 0 5 95 95 0 1 -"26120" 4 211 5 1 1 1 2 1 100 819 195 293 3246 4 10 0 2690 3 50 98 0 1 -"26121" 4 211 5 1 1 1 2 1 100 819 293 278 5279 3 1 0 337 1 5 15 1 0 -"26122" 4 211 5 1 1 1 2 1 100 819 278 417 3638 8 6 1 1263 3 50 139 1 1 -"26123" 4 211 5 1 1 1 2 1 100 819 417 438 5479 5 7 0 400 1 5 21 0 1 -"26124" 4 211 5 1 1 1 2 1 100 819 438 416 4635 6 8 1 380 1 5 22 0 0 -"26125" 4 211 5 1 1 1 2 1 100 819 416 437 2263 7 5 1 465 1 5 21 1 1 -"26126" 4 211 5 1 1 1 2 1 100 819 437 546 1712 1 10 0 957 2 25 109 0 1 -"26127" 4 211 5 1 1 1 2 1 100 819 546 819 2276 9 3 1 1149 3 50 273 1 1 -"26128" 4 211 5 1 1 1 3 1 100 603 100 195 2368 3 5 0 0 5 95 95 0 1 -"26129" 4 211 5 1 1 1 3 1 100 603 195 97 3653 8 9 1 1629 3 50 98 0 0 -"26130" 4 211 5 1 1 1 3 1 100 603 97 189 2668 2 4 0 0 5 95 92 0 1 -"26131" 4 211 5 1 1 1 3 1 100 603 189 198 4923 6 3 1 1710 1 5 9 1 1 -"26132" 4 211 5 1 1 1 3 1 100 603 198 248 2202 7 6 1 1360 2 25 50 1 1 -"26133" 4 211 5 1 1 1 3 1 100 603 248 260 6356 4 2 1 235 1 5 12 1 1 -"26134" 4 211 5 1 1 1 3 1 100 603 260 507 2021 1 8 0 0 5 95 247 0 1 -"26135" 4 211 5 1 1 1 3 1 100 603 507 482 5803 5 7 1 1764 1 5 25 0 0 -"26136" 4 211 5 1 1 1 3 1 100 603 482 603 1805 9 5 1 1461 2 25 121 1 1 -"26137" 4 211 5 1 1 1 4 1 100 1519 100 195 1998 8 7 1 0 5 95 95 1 1 -"26138" 4 211 5 1 1 1 4 1 100 1519 195 244 2596 3 10 0 1246 2 25 49 0 1 -"26139" 4 211 5 1 1 1 4 1 100 1519 244 232 3152 7 9 1 1463 1 5 12 0 0 -"26140" 4 211 5 1 1 1 4 1 100 1519 232 452 2047 9 1 1 0 5 95 220 1 1 -"26141" 4 211 5 1 1 1 4 1 100 1519 452 565 3307 2 3 0 339 2 25 113 0 1 -"26142" 4 211 5 1 1 1 4 1 100 1519 565 1102 3200 1 8 0 0 5 95 537 0 1 -"26143" 4 211 5 1 1 1 4 1 100 1519 1102 1157 2861 5 4 1 667 1 5 55 1 1 -"26144" 4 211 5 1 1 1 4 1 100 1519 1157 1215 2728 4 2 1 180 1 5 58 1 1 -"26145" 4 211 5 1 1 1 4 1 100 1519 1215 1519 1665 6 3 1 504 2 25 304 1 1 -"26146" 4 213 2 0 1 0 1 1 100 30 100 175 11561 2 7 0 1585 2 75 75 0 1 -"26147" 4 213 2 0 1 0 1 1 100 30 175 306 4402 3 7 0 839 2 75 131 0 1 -"26148" 4 213 2 0 1 0 1 1 100 30 306 597 2596 2 8 0 1885 1 95 291 0 1 -"26149" 4 213 2 0 1 0 1 1 100 30 597 30 2657 8 9 1 2960 1 95 567 0 0 -"26150" 4 213 3 1 1 0 1 0 100 0 100 195 1949 1 3 0 1918 1 95 95 0 1 -"26151" 4 213 3 1 1 0 1 0 100 0 195 380 2848 6 5 1 2110 1 95 185 1 1 -"26152" 4 213 3 1 1 0 1 0 100 0 380 741 2407 2 8 0 2610 1 95 361 0 1 -"26153" 4 213 3 1 1 0 1 0 100 0 741 37 2027 8 9 1 3192 1 95 704 0 0 -"26154" 4 213 3 1 1 0 1 0 100 0 37 72 2189 3 4 0 4442 1 95 35 0 1 -"26155" 4 213 3 1 1 0 1 0 100 0 72 4 4548 5 7 1 1121 1 95 68 0 0 -"26156" 4 213 3 1 1 0 1 0 100 0 4 8 2537 7 4 1 2974 1 95 4 1 1 -"26157" 4 213 3 1 1 0 1 0 100 0 8 0 2365 4 1 0 3026 1 95 8 1 0 -"26158" 4 213 3 1 1 0 2 0 100 0 100 195 2757 8 1 1 2576 1 95 95 1 1 -"26159" 4 213 3 1 1 0 2 0 100 0 195 10 2162 6 2 0 1009 1 95 185 1 0 -"26160" 4 213 3 1 1 0 2 0 100 0 10 0 1758 7 9 1 1080 1 95 10 0 0 -"26161" 4 213 3 1 1 0 3 0 100 0 100 195 1929 7 5 1 1283 1 95 95 1 1 -"26162" 4 213 3 1 1 0 3 0 100 0 195 10 2315 2 1 0 1266 1 95 185 1 0 -"26163" 4 213 3 1 1 0 3 0 100 0 10 20 1755 8 6 1 1165 1 95 10 1 1 -"26164" 4 213 3 1 1 0 3 0 100 0 20 39 2123 4 7 0 1044 1 95 19 0 1 -"26165" 4 213 3 1 1 0 3 0 100 0 39 76 2007 3 10 0 1101 1 95 37 0 1 -"26166" 4 213 3 1 1 0 3 0 100 0 76 4 1798 6 8 1 1086 1 95 72 0 0 -"26167" 4 213 3 1 1 0 3 0 100 0 4 8 1463 9 2 1 1567 1 95 4 1 1 -"26168" 4 213 3 1 1 0 3 0 100 0 8 0 1794 5 3 0 1074 1 95 8 1 0 -"26169" 4 213 3 1 1 0 4 1 100 1 100 195 2317 2 3 0 1188 1 95 95 0 1 -"26170" 4 213 3 1 1 0 4 1 100 1 195 380 3817 7 4 1 1137 1 95 185 1 1 -"26171" 4 213 3 1 1 0 4 1 100 1 380 741 1832 3 1 1 751 1 95 361 1 1 -"26172" 4 213 3 1 1 0 4 1 100 1 741 1445 2689 1 9 0 726 1 95 704 0 1 -"26173" 4 213 3 1 1 0 4 1 100 1 1445 2818 1529 8 7 1 806 1 95 1373 1 1 -"26174" 4 213 3 1 1 0 4 1 100 1 2818 141 2612 9 2 0 717 1 95 2677 1 0 -"26175" 4 213 3 1 1 0 4 1 100 1 141 7 1256 5 6 1 1026 1 95 134 0 0 -"26176" 4 213 3 1 1 0 4 1 100 1 7 14 1462 6 8 0 854 1 95 7 0 1 -"26177" 4 213 3 1 1 0 4 1 100 1 14 1 1451 4 7 1 1075 1 95 13 0 0 -"26178" 4 213 4 0 1 1 1 1 100 131 100 125 4799 8 3 1 1072 2 25 25 1 1 -"26179" 4 213 4 0 1 1 1 1 100 131 125 131 3066 3 7 0 4661 1 5 6 0 1 -"26180" 4 213 4 0 1 1 1 1 100 131 131 138 1996 8 2 1 3435 1 5 7 1 1 -"26181" 4 213 4 0 1 1 1 1 100 131 138 131 1765 2 1 0 1750 1 5 7 1 0 -"26182" 4 213 5 1 1 1 1 1 100 104 100 105 2319 9 7 1 1367 1 5 5 1 1 -"26183" 4 213 5 1 1 1 1 1 100 104 105 110 1603 4 8 0 1508 1 5 5 0 1 -"26184" 4 213 5 1 1 1 1 1 100 104 110 116 1969 8 2 1 986 1 5 6 1 1 -"26185" 4 213 5 1 1 1 1 1 100 104 116 110 1136 2 1 0 1303 1 5 6 1 0 -"26186" 4 213 5 1 1 1 1 1 100 104 110 116 1231 7 6 1 874 1 5 6 1 1 -"26187" 4 213 5 1 1 1 1 1 100 104 116 110 1549 5 3 0 945 1 5 6 1 0 -"26188" 4 213 5 1 1 1 1 1 100 104 110 104 1417 3 6 1 1156 1 5 6 0 0 -"26189" 4 213 5 1 1 1 1 1 100 104 104 99 1722 6 9 1 1382 1 5 5 0 0 -"26190" 4 213 5 1 1 1 1 1 100 104 99 104 1777 1 4 0 770 1 5 5 0 1 -"26191" 4 213 5 1 1 1 2 1 100 105 100 105 1996 2 9 0 1027 1 5 5 0 1 -"26192" 4 213 5 1 1 1 2 1 100 105 105 100 1087 4 10 1 729 1 5 5 0 0 -"26193" 4 213 5 1 1 1 2 1 100 105 100 95 1179 3 1 0 609 1 5 5 1 0 -"26194" 4 213 5 1 1 1 2 1 100 105 95 100 809 8 6 1 776 1 5 5 1 1 -"26195" 4 213 5 1 1 1 2 1 100 105 100 95 2848 5 7 1 1054 1 5 5 0 0 -"26196" 4 213 5 1 1 1 2 1 100 105 95 90 1753 6 8 1 1052 1 5 5 0 0 -"26197" 4 213 5 1 1 1 2 1 100 105 90 95 767 7 5 1 752 1 5 5 1 1 -"26198" 4 213 5 1 1 1 2 1 100 105 95 100 1602 1 10 0 1620 1 5 5 0 1 -"26199" 4 213 5 1 1 1 2 1 100 105 100 105 1492 9 3 1 786 1 5 5 1 1 -"26200" 4 213 5 1 1 1 3 1 100 116 100 105 2102 3 5 0 1495 1 5 5 0 1 -"26201" 4 213 5 1 1 1 3 1 100 116 105 100 1077 8 9 1 776 1 5 5 0 0 -"26202" 4 213 5 1 1 1 3 1 100 116 100 105 720 2 4 0 731 1 5 5 0 1 -"26203" 4 213 5 1 1 1 3 1 100 116 105 110 1461 6 3 1 973 1 5 5 1 1 -"26204" 4 213 5 1 1 1 3 1 100 116 110 116 2964 7 6 1 1124 1 5 6 1 1 -"26205" 4 213 5 1 1 1 3 1 100 116 116 110 2099 4 2 0 1373 1 5 6 1 0 -"26206" 4 213 5 1 1 1 3 1 100 116 110 116 1961 1 8 0 601 1 5 6 0 1 -"26207" 4 213 5 1 1 1 3 1 100 116 116 110 1298 5 7 1 706 1 5 6 0 0 -"26208" 4 213 5 1 1 1 3 1 100 116 110 116 2246 9 5 1 632 1 5 6 1 1 -"26209" 4 213 5 1 1 1 4 1 100 113 100 105 1658 8 7 1 1072 1 5 5 1 1 -"26210" 4 213 5 1 1 1 4 1 100 113 105 110 1043 3 10 0 935 1 5 5 0 1 -"26211" 4 213 5 1 1 1 4 1 100 113 110 104 1920 7 9 1 538 1 5 6 0 0 -"26212" 4 213 5 1 1 1 4 1 100 113 104 109 734 9 1 1 2280 1 5 5 1 1 -"26213" 4 213 5 1 1 1 4 1 100 113 109 114 1193 2 3 0 1334 1 5 5 0 1 -"26214" 4 213 5 1 1 1 4 1 100 113 114 120 1564 1 8 0 975 1 5 6 0 1 -"26215" 4 213 5 1 1 1 4 1 100 113 120 114 1470 5 4 0 1349 1 5 6 1 0 -"26216" 4 213 5 1 1 1 4 1 100 113 114 108 1514 4 2 0 655 1 5 6 1 0 -"26217" 4 213 5 1 1 1 4 1 100 113 108 113 1045 6 3 1 793 1 5 5 1 1 -"26218" 4 217 2 0 1 0 1 1 100 4 100 50 7501 2 7 1 1054 3 50 50 0 0 -"26219" 4 217 2 0 1 0 1 1 100 4 50 53 2273 3 7 0 0 5 5 3 0 1 -"26220" 4 217 2 0 1 0 1 1 100 4 53 80 3499 2 8 0 1202 3 50 27 0 1 -"26221" 4 217 2 0 1 0 1 1 100 4 80 4 2655 8 9 1 3414 1 95 76 0 0 -"26222" 4 217 3 1 1 0 1 0 100 1 100 195 2128 1 3 0 1336 1 95 95 0 1 -"26223" 4 217 3 1 1 0 1 0 100 1 195 293 2878 6 5 1 971 3 50 98 1 1 -"26224" 4 217 3 1 1 0 1 0 100 1 293 15 1242 2 8 1 3108 1 95 278 0 0 -"26225" 4 217 3 1 1 0 1 0 100 1 15 1 2549 8 9 1 4633 1 95 14 0 0 -"26226" 4 217 3 1 1 0 2 0 100 0 100 195 3700 8 1 1 3514 1 95 95 1 1 -"26227" 4 217 3 1 1 0 2 0 100 0 195 10 3368 6 2 0 2749 1 95 185 1 0 -"26228" 4 217 3 1 1 0 2 0 100 0 10 0 655 7 9 1 2381 1 95 10 0 0 -"26229" 4 217 3 1 1 0 3 0 100 0 100 195 2185 7 5 1 2750 1 95 95 1 1 -"26230" 4 217 3 1 1 0 3 0 100 0 195 10 3529 2 1 0 3327 1 95 185 1 0 -"26231" 4 217 3 1 1 0 3 0 100 0 10 9 3301 8 6 0 3270 5 5 1 1 0 -"26232" 4 217 3 1 1 0 3 0 100 0 9 18 2080 4 7 0 4018 1 95 9 0 1 -"26233" 4 217 3 1 1 0 3 0 100 0 18 35 2704 3 10 0 1381 1 95 17 0 1 -"26234" 4 217 3 1 1 0 3 0 100 0 35 2 4440 6 8 1 3195 1 95 33 0 0 -"26235" 4 217 3 1 1 0 3 0 100 0 2 0 2496 9 2 0 567 1 95 2 1 0 -"26236" 4 217 3 1 1 0 4 0 100 0 100 195 2524 2 3 0 2361 1 95 95 0 1 -"26237" 4 217 3 1 1 0 4 0 100 0 195 341 3550 7 4 1 4771 2 75 146 1 1 -"26238" 4 217 3 1 1 0 4 0 100 0 341 17 6197 3 1 0 636 1 95 324 1 0 -"26239" 4 217 3 1 1 0 4 0 100 0 17 33 2042 1 9 0 702 1 95 16 0 1 -"26240" 4 217 3 1 1 0 4 0 100 0 33 64 2419 8 7 1 557 1 95 31 1 1 -"26241" 4 217 3 1 1 0 4 0 100 0 64 3 2080 9 2 0 690 1 95 61 1 0 -"26242" 4 217 3 1 1 0 4 0 100 0 3 0 1598 5 6 1 1320 1 95 3 0 0 -"26243" 4 217 4 0 1 1 1 1 100 100 100 95 1048 8 3 0 756 1 5 5 1 0 -"26244" 4 217 4 0 1 1 1 1 100 100 95 100 1317 3 7 0 453 1 5 5 0 1 -"26245" 4 217 4 0 1 1 1 1 100 100 100 105 2388 8 2 1 2560 1 5 5 1 1 -"26246" 4 217 4 0 1 1 1 1 100 100 105 100 3315 2 1 0 3844 1 5 5 1 0 -"26247" 4 217 5 1 1 1 1 1 100 152 100 105 5781 9 7 1 1351 1 5 5 1 1 -"26248" 4 217 5 1 1 1 1 1 100 152 105 110 2116 4 8 0 2421 1 5 5 0 1 -"26249" 4 217 5 1 1 1 1 1 100 152 110 138 1343 8 2 1 3205 2 25 28 1 1 -"26250" 4 217 5 1 1 1 1 1 100 152 138 131 2842 2 1 0 744 1 5 7 1 0 -"26251" 4 217 5 1 1 1 1 1 100 152 131 138 5338 7 6 1 3921 1 5 7 1 1 -"26252" 4 217 5 1 1 1 1 1 100 152 138 131 5900 5 3 0 617 1 5 7 1 0 -"26253" 4 217 5 1 1 1 1 1 100 152 131 138 1129 3 6 0 985 1 5 7 0 1 -"26254" 4 217 5 1 1 1 1 1 100 152 138 145 1288 6 9 0 1760 1 5 7 0 1 -"26255" 4 217 5 1 1 1 1 1 100 152 145 152 2641 1 4 0 618 1 5 7 0 1 -"26256" 4 217 5 1 1 1 2 1 100 93 100 105 1738 2 9 0 596 1 5 5 0 1 -"26257" 4 217 5 1 1 1 2 1 100 93 105 110 2589 4 10 0 592 1 5 5 0 1 -"26258" 4 217 5 1 1 1 2 1 100 93 110 104 1369 3 1 0 701 1 5 6 1 0 -"26259" 4 217 5 1 1 1 2 1 100 93 104 109 1273 8 6 1 482 1 5 5 1 1 -"26260" 4 217 5 1 1 1 2 1 100 93 109 104 3096 5 7 1 499 1 5 5 0 0 -"26261" 4 217 5 1 1 1 2 1 100 93 104 99 1920 6 8 1 587 1 5 5 0 0 -"26262" 4 217 5 1 1 1 2 1 100 93 99 94 1508 7 5 0 593 1 5 5 1 0 -"26263" 4 217 5 1 1 1 2 1 100 93 94 89 2138 1 10 1 554 1 5 5 0 0 -"26264" 4 217 5 1 1 1 2 1 100 93 89 93 2413 9 3 1 503 1 5 4 1 1 -"26265" 4 217 5 1 1 1 3 1 100 141 100 105 1930 3 5 0 436 1 5 5 0 1 -"26266" 4 217 5 1 1 1 3 1 100 141 105 100 3548 8 9 1 681 1 5 5 0 0 -"26267" 4 217 5 1 1 1 3 1 100 141 100 105 886 2 4 0 705 1 5 5 0 1 -"26268" 4 217 5 1 1 1 3 1 100 141 105 110 2188 6 3 1 2018 1 5 5 1 1 -"26269" 4 217 5 1 1 1 3 1 100 141 110 116 1175 7 6 1 1173 1 5 6 1 1 -"26270" 4 217 5 1 1 1 3 1 100 141 116 122 1018 4 2 1 967 1 5 6 1 1 -"26271" 4 217 5 1 1 1 3 1 100 141 122 128 1174 1 8 0 1548 1 5 6 0 1 -"26272" 4 217 5 1 1 1 3 1 100 141 128 134 6847 5 7 0 663 1 5 6 0 1 -"26273" 4 217 5 1 1 1 3 1 100 141 134 141 2037 9 5 1 626 1 5 7 1 1 -"26274" 4 217 5 1 1 1 4 1 100 126 100 105 3348 8 7 1 651 1 5 5 1 1 -"26275" 4 217 5 1 1 1 4 1 100 126 105 110 1649 3 10 0 1120 1 5 5 0 1 -"26276" 4 217 5 1 1 1 4 1 100 126 110 104 4809 7 9 1 290 1 5 6 0 0 -"26277" 4 217 5 1 1 1 4 1 100 126 104 99 1425 9 1 0 555 1 5 5 1 0 -"26278" 4 217 5 1 1 1 4 1 100 126 99 104 7317 2 3 0 3166 1 5 5 0 1 -"26279" 4 217 5 1 1 1 4 1 100 126 104 109 1341 1 8 0 842 1 5 5 0 1 -"26280" 4 217 5 1 1 1 4 1 100 126 109 114 2959 5 4 1 976 1 5 5 1 1 -"26281" 4 217 5 1 1 1 4 1 100 126 114 120 1431 4 2 1 840 1 5 6 1 1 -"26282" 4 217 5 1 1 1 4 1 100 126 120 126 4037 6 3 1 1955 1 5 6 1 1 -"26283" 4 219 2 0 1 0 1 1 100 96 100 175 13789 2 7 0 866 2 75 75 0 1 -"26284" 4 219 2 0 1 0 1 1 100 96 175 219 7822 3 7 0 367 4 25 44 0 1 -"26285" 4 219 2 0 1 0 1 1 100 96 219 383 4755 2 8 0 70 2 75 164 0 1 -"26286" 4 219 2 0 1 0 1 1 100 96 383 96 2904 8 9 1 867 2 75 287 0 0 -"26287" 4 219 3 1 1 0 1 1 100 427 100 175 3604 1 3 0 773 2 75 75 0 1 -"26288" 4 219 3 1 1 0 1 1 100 427 175 263 4054 6 5 1 771 3 50 88 1 1 -"26289" 4 219 3 1 1 0 1 1 100 427 263 197 5092 2 8 1 1000 4 25 66 0 0 -"26290" 4 219 3 1 1 0 1 1 100 427 197 246 4669 8 9 0 1493 4 25 49 0 1 -"26291" 4 219 3 1 1 0 1 1 100 427 246 308 2803 3 4 0 547 4 25 62 0 1 -"26292" 4 219 3 1 1 0 1 1 100 427 308 154 3495 5 7 1 1094 3 50 154 0 0 -"26293" 4 219 3 1 1 0 1 1 100 427 154 231 4511 7 4 1 534 3 50 77 1 1 -"26294" 4 219 3 1 1 0 1 1 100 427 231 219 5034 4 1 0 593 5 5 12 1 0 -"26295" 4 219 3 1 1 0 1 1 100 427 219 427 2554 9 6 1 1086 1 95 208 1 1 -"26296" 4 219 3 1 1 0 2 1 100 216 100 150 2280 8 1 1 2593 3 50 50 1 1 -"26297" 4 219 3 1 1 0 2 1 100 216 150 112 4547 6 2 0 641 4 25 38 1 0 -"26298" 4 219 3 1 1 0 2 1 100 216 112 84 4207 7 9 1 1261 4 25 28 0 0 -"26299" 4 219 3 1 1 0 2 1 100 216 84 126 2662 2 10 0 820 3 50 42 0 1 -"26300" 4 219 3 1 1 0 2 1 100 216 126 94 5931 5 3 0 1207 4 25 32 1 0 -"26301" 4 219 3 1 1 0 2 1 100 216 94 47 3595 4 2 0 502 3 50 47 1 0 -"26302" 4 219 3 1 1 0 2 1 100 216 47 82 2179 3 5 0 544 2 75 35 0 1 -"26303" 4 219 3 1 1 0 2 1 100 216 82 144 1876 9 4 1 560 2 75 62 1 1 -"26304" 4 219 3 1 1 0 2 1 100 216 144 216 1747 1 7 0 997 3 50 72 0 1 -"26305" 4 219 3 1 1 0 3 1 100 64 100 150 2750 7 5 1 2280 3 50 50 1 1 -"26306" 4 219 3 1 1 0 3 1 100 64 150 75 2561 2 1 0 2013 3 50 75 1 0 -"26307" 4 219 3 1 1 0 3 1 100 64 75 146 2779 8 6 1 1148 1 95 71 1 1 -"26308" 4 219 3 1 1 0 3 1 100 64 146 183 3963 4 7 0 564 4 25 37 0 1 -"26309" 4 219 3 1 1 0 3 1 100 64 183 137 4119 3 10 1 1785 4 25 46 0 0 -"26310" 4 219 3 1 1 0 3 1 100 64 137 34 2385 6 8 1 1965 2 75 103 0 0 -"26311" 4 219 3 1 1 0 3 1 100 64 34 66 2405 9 2 1 496 1 95 32 1 1 -"26312" 4 219 3 1 1 0 3 1 100 64 66 33 4710 5 3 0 628 3 50 33 1 0 -"26313" 4 219 3 1 1 0 3 1 100 64 33 64 2427 1 10 0 581 1 95 31 0 1 -"26314" 4 219 3 1 1 0 4 1 100 471 100 150 1886 2 3 0 1202 3 50 50 0 1 -"26315" 4 219 3 1 1 0 4 1 100 471 150 188 1534 7 4 1 620 4 25 38 1 1 -"26316" 4 219 3 1 1 0 4 1 100 471 188 141 4108 3 1 0 1718 4 25 47 1 0 -"26317" 4 219 3 1 1 0 4 1 100 471 141 275 2329 1 9 0 584 1 95 134 0 1 -"26318" 4 219 3 1 1 0 4 1 100 471 275 536 2201 8 7 1 878 1 95 261 1 1 -"26319" 4 219 3 1 1 0 4 1 100 471 536 670 1801 9 2 1 530 4 25 134 1 1 -"26320" 4 219 3 1 1 0 4 1 100 471 670 838 4630 5 6 0 1807 4 25 168 0 1 -"26321" 4 219 3 1 1 0 4 1 100 471 838 628 2806 6 8 1 975 4 25 210 0 0 -"26322" 4 219 3 1 1 0 4 1 100 471 628 471 3438 4 7 1 1712 4 25 157 0 0 -"26323" 4 219 4 0 1 1 1 1 100 82 100 105 4588 8 3 1 3415 1 5 5 1 1 -"26324" 4 219 4 0 1 1 1 1 100 82 105 131 4778 3 7 0 4052 2 25 26 0 1 -"26325" 4 219 4 0 1 1 1 1 100 82 131 164 1744 8 2 1 3211 2 25 33 1 1 -"26326" 4 219 4 0 1 1 1 1 100 82 164 82 1735 2 1 0 1035 3 50 82 1 0 -"26327" 4 219 5 1 1 1 1 1 100 506 100 175 2779 9 7 1 564 4 75 75 1 1 -"26328" 4 219 5 1 1 1 1 1 100 506 175 219 3199 4 8 0 1429 2 25 44 0 1 -"26329" 4 219 5 1 1 1 1 1 100 506 219 274 1974 8 2 1 1141 2 25 55 1 1 -"26330" 4 219 5 1 1 1 1 1 100 506 274 205 1740 2 1 0 559 2 25 69 1 0 -"26331" 4 219 5 1 1 1 1 1 100 506 205 256 4033 7 6 1 423 2 25 51 1 1 -"26332" 4 219 5 1 1 1 1 1 100 506 256 243 2360 5 3 0 765 1 5 13 1 0 -"26333" 4 219 5 1 1 1 1 1 100 506 243 304 1430 3 6 0 1096 2 25 61 0 1 -"26334" 4 219 5 1 1 1 1 1 100 506 304 289 3777 6 9 1 667 1 5 15 0 0 -"26335" 4 219 5 1 1 1 1 1 100 506 289 506 1112 1 4 0 922 4 75 217 0 1 -"26336" 4 219 5 1 1 1 2 1 100 563 100 175 2832 2 9 0 1183 4 75 75 0 1 -"26337" 4 219 5 1 1 1 2 1 100 563 175 184 3548 4 10 0 545 1 5 9 0 1 -"26338" 4 219 5 1 1 1 2 1 100 563 184 193 2868 3 1 1 521 1 5 9 1 1 -"26339" 4 219 5 1 1 1 2 1 100 563 193 241 1485 8 6 1 716 2 25 48 1 1 -"26340" 4 219 5 1 1 1 2 1 100 563 241 253 2799 5 7 0 656 1 5 12 0 1 -"26341" 4 219 5 1 1 1 2 1 100 563 253 240 2429 6 8 1 863 1 5 13 0 0 -"26342" 4 219 5 1 1 1 2 1 100 563 240 300 2579 7 5 1 1518 2 25 60 1 1 -"26343" 4 219 5 1 1 1 2 1 100 563 300 450 1611 1 10 0 540 3 50 150 0 1 -"26344" 4 219 5 1 1 1 2 1 100 563 450 563 1415 9 3 1 2458 2 25 113 1 1 -"26345" 4 219 5 1 1 1 3 1 100 310 100 95 3044 3 5 1 817 1 5 5 0 0 -"26346" 4 219 5 1 1 1 3 1 100 310 95 71 1600 8 9 1 674 2 25 24 0 0 -"26347" 4 219 5 1 1 1 3 1 100 310 71 107 1351 2 4 0 893 3 50 36 0 1 -"26348" 4 219 5 1 1 1 3 1 100 310 107 134 1719 6 3 1 531 2 25 27 1 1 -"26349" 4 219 5 1 1 1 3 1 100 310 134 141 1856 7 6 1 1652 1 5 7 1 1 -"26350" 4 219 5 1 1 1 3 1 100 310 141 106 1827 4 2 0 794 2 25 35 1 0 -"26351" 4 219 5 1 1 1 3 1 100 310 106 186 1521 1 8 0 686 4 75 80 0 1 -"26352" 4 219 5 1 1 1 3 1 100 310 186 177 7426 5 7 1 2447 1 5 9 0 0 -"26353" 4 219 5 1 1 1 3 1 100 310 177 310 1392 9 5 1 643 4 75 133 1 1 -"26354" 4 219 5 1 1 1 4 1 100 525 100 150 1436 8 7 1 1245 3 50 50 1 1 -"26355" 4 219 5 1 1 1 4 1 100 525 150 225 1542 3 10 0 563 3 50 75 0 1 -"26356" 4 219 5 1 1 1 4 1 100 525 225 236 4485 7 9 0 894 1 5 11 0 1 -"26357" 4 219 5 1 1 1 4 1 100 525 236 354 1240 9 1 1 1291 3 50 118 1 1 -"26358" 4 219 5 1 1 1 4 1 100 525 354 443 1243 2 3 0 950 2 25 89 0 1 -"26359" 4 219 5 1 1 1 4 1 100 525 443 554 1110 1 8 0 647 2 25 111 0 1 -"26360" 4 219 5 1 1 1 4 1 100 525 554 582 3693 5 4 1 747 1 5 28 1 1 -"26361" 4 219 5 1 1 1 4 1 100 525 582 553 1765 4 2 0 614 1 5 29 1 0 -"26362" 4 219 5 1 1 1 4 1 100 525 553 525 1614 6 3 0 615 1 5 28 1 0 -"26363" 4 225 2 0 1 0 1 1 100 7 100 150 5971 2 7 0 1244 3 50 50 0 1 -"26364" 4 225 2 0 1 0 1 1 100 7 150 75 8774 3 7 1 690 3 50 75 0 0 -"26365" 4 225 2 0 1 0 1 1 100 7 75 146 1894 2 8 0 2090 1 95 71 0 1 -"26366" 4 225 2 0 1 0 1 1 100 7 146 7 2259 8 9 1 1725 1 95 139 0 0 -"26367" 4 225 3 1 1 0 1 0 100 0 100 175 10025 1 3 0 1778 2 75 75 0 1 -"26368" 4 225 3 1 1 0 1 0 100 0 175 9 1858 6 5 0 1735 1 95 166 1 0 -"26369" 4 225 3 1 1 0 1 0 100 0 9 0 2337 2 8 1 4288 1 95 9 0 0 -"26370" 4 225 3 1 1 0 2 0 100 0 100 5 6570 8 1 0 2007 1 95 95 1 0 -"26371" 4 225 3 1 1 0 2 0 100 0 5 8 3629 6 2 1 438 3 50 3 1 1 -"26372" 4 225 3 1 1 0 2 0 100 0 8 2 5166 7 9 1 526 2 75 6 0 0 -"26373" 4 225 3 1 1 0 2 0 100 0 2 4 2930 2 10 0 2372 1 95 2 0 1 -"26374" 4 225 3 1 1 0 2 0 100 0 4 0 2184 5 3 0 2350 1 95 4 1 0 -"26375" 4 225 3 1 1 0 3 0 100 1 100 195 4621 7 5 1 1935 1 95 95 1 1 -"26376" 4 225 3 1 1 0 3 0 100 1 195 10 2368 2 1 0 933 1 95 185 1 0 -"26377" 4 225 3 1 1 0 3 0 100 1 10 20 2686 8 6 1 1948 1 95 10 1 1 -"26378" 4 225 3 1 1 0 3 0 100 1 20 1 3652 4 7 1 1108 1 95 19 0 0 -"26379" 4 225 3 1 1 0 4 1 100 1 100 195 2401 2 3 0 2164 1 95 95 0 1 -"26380" 4 225 3 1 1 0 4 1 100 1 195 380 2386 7 4 1 886 1 95 185 1 1 -"26381" 4 225 3 1 1 0 4 1 100 1 380 19 2226 3 1 0 709 1 95 361 1 0 -"26382" 4 225 3 1 1 0 4 1 100 1 19 37 5136 1 9 0 593 1 95 18 0 1 -"26383" 4 225 3 1 1 0 4 1 100 1 37 72 3114 8 7 1 780 1 95 35 1 1 -"26384" 4 225 3 1 1 0 4 1 100 1 72 140 3980 9 2 1 692 1 95 68 1 1 -"26385" 4 225 3 1 1 0 4 1 100 1 140 273 3720 5 6 0 652 1 95 133 0 1 -"26386" 4 225 3 1 1 0 4 1 100 1 273 14 5074 6 8 1 558 1 95 259 0 0 -"26387" 4 225 3 1 1 0 4 1 100 1 14 1 3601 4 7 1 731 1 95 13 0 0 -"26388" 4 225 4 0 1 1 1 1 100 118 100 150 19845 8 3 1 1019 3 50 50 1 1 -"26389" 4 225 4 0 1 1 1 1 100 118 150 158 13464 3 7 0 1260 1 5 8 0 1 -"26390" 4 225 4 0 1 1 1 1 100 118 158 237 1989 8 2 1 1364 3 50 79 1 1 -"26391" 4 225 4 0 1 1 1 1 100 118 237 118 2360 2 1 0 813 3 50 119 1 0 -"26392" 4 225 5 1 1 1 1 1 100 907 100 195 2726 9 7 1 1171 5 95 95 1 1 -"26393" 4 225 5 1 1 1 1 1 100 907 195 293 1945 4 8 0 882 3 50 98 0 1 -"26394" 4 225 5 1 1 1 1 1 100 907 293 440 5461 8 2 1 631 3 50 147 1 1 -"26395" 4 225 5 1 1 1 1 1 100 907 440 220 2551 2 1 0 467 3 50 220 1 0 -"26396" 4 225 5 1 1 1 1 1 100 907 220 330 3018 7 6 1 4033 3 50 110 1 1 -"26397" 4 225 5 1 1 1 1 1 100 907 330 413 3262 5 3 1 872 2 25 83 1 1 -"26398" 4 225 5 1 1 1 1 1 100 907 413 620 2332 3 6 0 923 3 50 207 0 1 -"26399" 4 225 5 1 1 1 1 1 100 907 620 465 4012 6 9 1 1155 2 25 155 0 0 -"26400" 4 225 5 1 1 1 1 1 100 907 465 907 2113 1 4 0 1235 5 95 442 0 1 -"26401" 4 225 5 1 1 1 2 1 100 240 100 150 2718 2 9 0 483 3 50 50 0 1 -"26402" 4 225 5 1 1 1 2 1 100 240 150 225 3425 4 10 0 1060 3 50 75 0 1 -"26403" 4 225 5 1 1 1 2 1 100 240 225 112 3255 3 1 0 337 3 50 113 1 0 -"26404" 4 225 5 1 1 1 2 1 100 240 112 168 1606 8 6 1 983 3 50 56 1 1 -"26405" 4 225 5 1 1 1 2 1 100 240 168 84 1622 5 7 1 614 3 50 84 0 0 -"26406" 4 225 5 1 1 1 2 1 100 240 84 42 3078 6 8 1 394 3 50 42 0 0 -"26407" 4 225 5 1 1 1 2 1 100 240 42 63 2141 7 5 1 2876 3 50 21 1 1 -"26408" 4 225 5 1 1 1 2 1 100 240 63 123 2024 1 10 0 54 5 95 60 0 1 -"26409" 4 225 5 1 1 1 2 1 100 240 123 240 1552 9 3 1 644 5 95 117 1 1 -"26410" 4 225 5 1 1 1 3 1 100 115 100 150 2022 3 5 0 3324 3 50 50 0 1 -"26411" 4 225 5 1 1 1 3 1 100 115 150 37 20755 8 9 1 2749 4 75 113 0 0 -"26412" 4 225 5 1 1 1 3 1 100 115 37 56 5712 2 4 0 3798 3 50 19 0 1 -"26413" 4 225 5 1 1 1 3 1 100 115 56 84 1510 6 3 1 1604 3 50 28 1 1 -"26414" 4 225 5 1 1 1 3 1 100 115 84 126 1189 7 6 1 722 3 50 42 1 1 -"26415" 4 225 5 1 1 1 3 1 100 115 126 31 1277 4 2 0 470 4 75 95 1 0 -"26416" 4 225 5 1 1 1 3 1 100 115 31 47 3261 1 8 0 1392 3 50 16 0 1 -"26417" 4 225 5 1 1 1 3 1 100 115 47 59 3699 5 7 0 1681 2 25 12 0 1 -"26418" 4 225 5 1 1 1 3 1 100 115 59 115 1630 9 5 1 606 5 95 56 1 1 -"26419" 4 225 5 1 1 1 4 1 100 455 100 195 1905 8 7 1 612 5 95 95 1 1 -"26420" 4 225 5 1 1 1 4 1 100 455 195 380 1988 3 10 0 772 5 95 185 0 1 -"26421" 4 225 5 1 1 1 4 1 100 455 380 95 11404 7 9 1 2372 4 75 285 0 0 -"26422" 4 225 5 1 1 1 4 1 100 455 95 185 22680 9 1 1 2087 5 95 90 1 1 -"26423" 4 225 5 1 1 1 4 1 100 455 185 324 9816 2 3 0 1836 4 75 139 0 1 -"26424" 4 225 5 1 1 1 4 1 100 455 324 486 13753 1 8 0 1512 3 50 162 0 1 -"26425" 4 225 5 1 1 1 4 1 100 455 486 729 1997 5 4 1 860 3 50 243 1 1 -"26426" 4 225 5 1 1 1 4 1 100 455 729 364 1760 4 2 0 882 3 50 365 1 0 -"26427" 4 225 5 1 1 1 4 1 100 455 364 455 2294 6 3 1 1518 2 25 91 1 1 -"26428" 4 230 2 0 1 0 1 1 100 29 100 150 5547 2 7 0 705 3 50 50 0 1 -"26429" 4 230 2 0 1 0 1 1 100 29 150 293 10133 3 7 0 3040 1 95 143 0 1 -"26430" 4 230 2 0 1 0 1 1 100 29 293 571 3428 2 8 0 1183 1 95 278 0 1 -"26431" 4 230 2 0 1 0 1 1 100 29 571 29 2556 8 9 1 3952 1 95 542 0 0 -"26432" 4 230 3 1 1 0 1 1 100 133 100 195 3628 1 3 0 1045 1 95 95 0 1 -"26433" 4 230 3 1 1 0 1 1 100 133 195 380 5112 6 5 1 1201 1 95 185 1 1 -"26434" 4 230 3 1 1 0 1 1 100 133 380 741 2125 2 8 0 1405 1 95 361 0 1 -"26435" 4 230 3 1 1 0 1 1 100 133 741 37 3416 8 9 1 1058 1 95 704 0 0 -"26436" 4 230 3 1 1 0 1 1 100 133 37 72 3053 3 4 0 673 1 95 35 0 1 -"26437" 4 230 3 1 1 0 1 1 100 133 72 140 3958 5 7 0 767 1 95 68 0 1 -"26438" 4 230 3 1 1 0 1 1 100 133 140 273 6616 7 4 1 730 1 95 133 1 1 -"26439" 4 230 3 1 1 0 1 1 100 133 273 68 4949 4 1 0 1847 2 75 205 1 0 -"26440" 4 230 3 1 1 0 1 1 100 133 68 133 3076 9 6 1 583 1 95 65 1 1 -"26441" 4 230 3 1 1 0 2 0 100 0 100 195 2643 8 1 1 371 1 95 95 1 1 -"26442" 4 230 3 1 1 0 2 0 100 0 195 380 2542 6 2 1 1371 1 95 185 1 1 -"26443" 4 230 3 1 1 0 2 0 100 0 380 19 1968 7 9 1 592 1 95 361 0 0 -"26444" 4 230 3 1 1 0 2 0 100 0 19 37 1945 2 10 0 529 1 95 18 0 1 -"26445" 4 230 3 1 1 0 2 0 100 0 37 2 4502 5 3 0 685 1 95 35 1 0 -"26446" 4 230 3 1 1 0 2 0 100 0 2 0 3450 4 2 0 827 1 95 2 1 0 -"26447" 4 230 3 1 1 0 3 0 100 0 100 195 1723 7 5 1 462 1 95 95 1 1 -"26448" 4 230 3 1 1 0 3 0 100 0 195 10 1902 2 1 0 881 1 95 185 1 0 -"26449" 4 230 3 1 1 0 3 0 100 0 10 20 2428 8 6 1 286 1 95 10 1 1 -"26450" 4 230 3 1 1 0 3 0 100 0 20 39 2141 4 7 0 412 1 95 19 0 1 -"26451" 4 230 3 1 1 0 3 0 100 0 39 76 3014 3 10 0 740 1 95 37 0 1 -"26452" 4 230 3 1 1 0 3 0 100 0 76 4 2526 6 8 1 1227 1 95 72 0 0 -"26453" 4 230 3 1 1 0 3 0 100 0 4 8 4430 9 2 1 312 1 95 4 1 1 -"26454" 4 230 3 1 1 0 3 0 100 0 8 0 2887 5 3 0 1107 1 95 8 1 0 -"26455" 4 230 3 1 1 0 4 0 100 0 100 195 8647 2 3 0 937 1 95 95 0 1 -"26456" 4 230 3 1 1 0 4 0 100 0 195 380 1690 7 4 1 759 1 95 185 1 1 -"26457" 4 230 3 1 1 0 4 0 100 0 380 19 4288 3 1 0 442 1 95 361 1 0 -"26458" 4 230 3 1 1 0 4 0 100 0 19 37 2394 1 9 0 869 1 95 18 0 1 -"26459" 4 230 3 1 1 0 4 0 100 0 37 72 2001 8 7 1 848 1 95 35 1 1 -"26460" 4 230 3 1 1 0 4 0 100 0 72 140 1596 9 2 1 462 1 95 68 1 1 -"26461" 4 230 3 1 1 0 4 0 100 0 140 7 2477 5 6 1 541 1 95 133 0 0 -"26462" 4 230 3 1 1 0 4 0 100 0 7 0 2768 6 8 1 450 1 95 7 0 0 -"26463" 4 230 4 0 1 1 1 1 100 29 100 150 3840 8 3 1 990 3 50 50 1 1 -"26464" 4 230 4 0 1 1 1 1 100 29 150 293 3604 3 7 0 0 5 95 143 0 1 -"26465" 4 230 4 0 1 1 1 1 100 29 293 571 1830 8 2 1 1821 5 95 278 1 1 -"26466" 4 230 4 0 1 1 1 1 100 29 571 29 2036 2 1 0 2001 5 95 542 1 0 -"26467" 4 230 5 1 1 1 1 1 100 474 100 195 4721 9 7 1 1319 5 95 95 1 1 -"26468" 4 230 5 1 1 1 1 1 100 474 195 293 2558 4 8 0 1650 3 50 98 0 1 -"26469" 4 230 5 1 1 1 1 1 100 474 293 571 2576 8 2 1 1076 5 95 278 1 1 -"26470" 4 230 5 1 1 1 1 1 100 474 571 285 1686 2 1 0 2020 3 50 286 1 0 -"26471" 4 230 5 1 1 1 1 1 100 474 285 556 4497 7 6 1 881 5 95 271 1 1 -"26472" 4 230 5 1 1 1 1 1 100 474 556 278 3732 5 3 0 1969 3 50 278 1 0 -"26473" 4 230 5 1 1 1 1 1 100 474 278 487 4568 3 6 0 1365 4 75 209 0 1 -"26474" 4 230 5 1 1 1 1 1 100 474 487 243 3401 6 9 1 3693 3 50 244 0 0 -"26475" 4 230 5 1 1 1 1 1 100 474 243 474 1994 1 4 0 2521 5 95 231 0 1 -"26476" 4 230 5 1 1 1 2 0 100 1 100 175 3076 2 9 0 1095 4 75 75 0 1 -"26477" 4 230 5 1 1 1 2 0 100 1 175 263 1592 4 10 0 3530 3 50 88 0 1 -"26478" 4 230 5 1 1 1 2 0 100 1 263 513 4671 3 1 1 1335 5 95 250 1 1 -"26479" 4 230 5 1 1 1 2 0 100 1 513 26 5173 8 6 0 333 5 95 487 1 0 -"26480" 4 230 5 1 1 1 2 0 100 1 26 6 7099 5 7 1 839 4 75 20 0 0 -"26481" 4 230 5 1 1 1 2 0 100 1 6 1 1940 6 8 1 405 4 75 5 0 0 -"26482" 4 230 5 1 1 1 3 0 100 1 100 195 1666 3 5 0 1473 5 95 95 0 1 -"26483" 4 230 5 1 1 1 3 0 100 1 195 49 6032 8 9 1 1167 4 75 146 0 0 -"26484" 4 230 5 1 1 1 3 0 100 1 49 2 3565 2 4 1 874 5 95 47 0 0 -"26485" 4 230 5 1 1 1 3 0 100 1 2 1 5029 6 3 0 2700 3 50 1 1 0 -"26486" 4 230 5 1 1 1 4 0 100 1 100 5 4025 8 7 0 1152 5 95 95 1 0 -"26487" 4 230 5 1 1 1 4 0 100 1 5 1 3238 3 10 1 1178 4 75 4 0 0 -"26488" 4 256 2 0 1 0 1 1 100 29 100 150 7489 2 7 0 907 3 50 50 0 1 -"26489" 4 256 2 0 1 0 1 1 100 29 150 293 8676 3 7 0 3725 1 95 143 0 1 -"26490" 4 256 2 0 1 0 1 1 100 29 293 571 2641 2 8 0 1441 1 95 278 0 1 -"26491" 4 256 2 0 1 0 1 1 100 29 571 29 7122 8 9 1 2013 1 95 542 0 0 -"26492" 4 256 3 1 1 0 1 1 100 1845 100 195 6880 1 3 0 2278 1 95 95 0 1 -"26493" 4 256 3 1 1 0 1 1 100 1845 195 380 2383 6 5 1 1371 1 95 185 1 1 -"26494" 4 256 3 1 1 0 1 1 100 1845 380 665 2271 2 8 0 2657 2 75 285 0 1 -"26495" 4 256 3 1 1 0 1 1 100 1845 665 332 1927 8 9 1 529 3 50 333 0 0 -"26496" 4 256 3 1 1 0 1 1 100 1845 332 647 2069 3 4 0 937 1 95 315 0 1 -"26497" 4 256 3 1 1 0 1 1 100 1845 647 1262 5898 5 7 0 703 1 95 615 0 1 -"26498" 4 256 3 1 1 0 1 1 100 1845 1262 1893 1603 7 4 1 901 3 50 631 1 1 -"26499" 4 256 3 1 1 0 1 1 100 1845 1893 946 3999 4 1 0 2287 3 50 947 1 0 -"26500" 4 256 3 1 1 0 1 1 100 1845 946 1845 1431 9 6 1 257 1 95 899 1 1 -"26501" 4 256 3 1 1 0 2 1 100 31 100 195 2532 8 1 1 241 1 95 95 1 1 -"26502" 4 256 3 1 1 0 2 1 100 31 195 380 1611 6 2 1 212 1 95 185 1 1 -"26503" 4 256 3 1 1 0 2 1 100 31 380 19 1143 7 9 1 222 1 95 361 0 0 -"26504" 4 256 3 1 1 0 2 1 100 31 19 37 1185 2 10 0 206 1 95 18 0 1 -"26505" 4 256 3 1 1 0 2 1 100 31 37 72 1185 5 3 1 214 1 95 35 1 1 -"26506" 4 256 3 1 1 0 2 1 100 31 72 4 889 4 2 0 196 1 95 68 1 0 -"26507" 4 256 3 1 1 0 2 1 100 31 4 8 1112 3 5 0 769 1 95 4 0 1 -"26508" 4 256 3 1 1 0 2 1 100 31 8 16 1058 9 4 1 238 1 95 8 1 1 -"26509" 4 256 3 1 1 0 2 1 100 31 16 31 703 1 7 0 299 1 95 15 0 1 -"26510" 4 256 3 1 1 0 3 1 100 6 100 195 2546 7 5 1 318 1 95 95 1 1 -"26511" 4 256 3 1 1 0 3 1 100 6 195 10 908 2 1 0 198 1 95 185 1 0 -"26512" 4 256 3 1 1 0 3 1 100 6 10 20 1434 8 6 1 212 1 95 10 1 1 -"26513" 4 256 3 1 1 0 3 1 100 6 20 39 959 4 7 0 384 1 95 19 0 1 -"26514" 4 256 3 1 1 0 3 1 100 6 39 68 1275 3 10 0 708 2 75 29 0 1 -"26515" 4 256 3 1 1 0 3 1 100 6 68 34 1727 6 8 1 59 3 50 34 0 0 -"26516" 4 256 3 1 1 0 3 1 100 6 34 66 1221 9 2 1 241 1 95 32 1 1 -"26517" 4 256 3 1 1 0 3 1 100 6 66 3 3162 5 3 0 3290 1 95 63 1 0 -"26518" 4 256 3 1 1 0 3 1 100 6 3 6 1363 1 10 0 351 1 95 3 0 1 -"26519" 4 256 3 1 1 0 4 1 100 712 100 195 1443 2 3 0 380 1 95 95 0 1 -"26520" 4 256 3 1 1 0 4 1 100 712 195 293 1223 7 4 1 563 3 50 98 1 1 -"26521" 4 256 3 1 1 0 4 1 100 712 293 146 1005 3 1 0 339 3 50 147 1 0 -"26522" 4 256 3 1 1 0 4 1 100 712 146 285 976 1 9 0 255 1 95 139 0 1 -"26523" 4 256 3 1 1 0 4 1 100 712 285 499 1232 8 7 1 1644 2 75 214 1 1 -"26524" 4 256 3 1 1 0 4 1 100 712 499 973 1043 9 2 1 439 1 95 474 1 1 -"26525" 4 256 3 1 1 0 4 1 100 712 973 730 3055 5 6 1 1660 4 25 243 0 0 -"26526" 4 256 3 1 1 0 4 1 100 712 730 365 2194 6 8 1 631 3 50 365 0 0 -"26527" 4 256 3 1 1 0 4 1 100 712 365 712 1431 4 7 0 240 1 95 347 0 1 -"26528" 4 256 4 0 1 1 1 1 100 164 100 150 3521 8 3 1 1343 3 50 50 1 1 -"26529" 4 256 4 0 1 1 1 1 100 164 150 188 3203 3 7 0 3689 2 25 38 0 1 -"26530" 4 256 4 0 1 1 1 1 100 164 188 329 2014 8 2 1 2749 4 75 141 1 1 -"26531" 4 256 4 0 1 1 1 1 100 164 329 164 1175 2 1 0 3261 3 50 165 1 0 -"26532" 4 256 5 1 1 1 1 1 100 831 100 195 5686 9 7 1 1501 5 95 95 1 1 -"26533" 4 256 5 1 1 1 1 1 100 831 195 244 987 4 8 0 826 2 25 49 0 1 -"26534" 4 256 5 1 1 1 1 1 100 831 244 366 1023 8 2 1 1677 3 50 122 1 1 -"26535" 4 256 5 1 1 1 1 1 100 831 366 183 1354 2 1 0 2440 3 50 183 1 0 -"26536" 4 256 5 1 1 1 1 1 100 831 183 320 1113 7 6 1 42 4 75 137 1 1 -"26537" 4 256 5 1 1 1 1 1 100 831 320 400 4261 5 3 1 272 2 25 80 1 1 -"26538" 4 256 5 1 1 1 1 1 100 831 400 500 856 3 6 0 1061 2 25 100 0 1 -"26539" 4 256 5 1 1 1 1 1 100 831 500 475 1055 6 9 1 307 1 5 25 0 0 -"26540" 4 256 5 1 1 1 1 1 100 831 475 831 961 1 4 0 1218 4 75 356 0 1 -"26541" 4 256 5 1 1 1 2 1 100 1001 100 175 1371 2 9 0 799 4 75 75 0 1 -"26542" 4 256 5 1 1 1 2 1 100 1001 175 184 1079 4 10 0 265 1 5 9 0 1 -"26543" 4 256 5 1 1 1 2 1 100 1001 184 138 514 3 1 0 2759 2 25 46 1 0 -"26544" 4 256 5 1 1 1 2 1 100 1001 138 242 1137 8 6 1 1425 4 75 104 1 1 -"26545" 4 256 5 1 1 1 2 1 100 1001 242 230 1754 5 7 1 221 1 5 12 0 0 -"26546" 4 256 5 1 1 1 2 1 100 1001 230 218 983 6 8 1 294 1 5 12 0 0 -"26547" 4 256 5 1 1 1 2 1 100 1001 218 327 1029 7 5 1 554 3 50 109 1 1 -"26548" 4 256 5 1 1 1 2 1 100 1001 327 572 925 1 10 0 1047 4 75 245 0 1 -"26549" 4 256 5 1 1 1 2 1 100 1001 572 1001 1522 9 3 1 1479 4 75 429 1 1 -"26550" 4 256 5 1 1 1 3 1 100 217 100 125 1197 3 5 0 1162 2 25 25 0 1 -"26551" 4 256 5 1 1 1 3 1 100 217 125 31 1128 8 9 1 993 4 75 94 0 0 -"26552" 4 256 5 1 1 1 3 1 100 217 31 54 1086 2 4 0 376 4 75 23 0 1 -"26553" 4 256 5 1 1 1 3 1 100 217 54 57 1220 6 3 1 271 1 5 3 1 1 -"26554" 4 256 5 1 1 1 3 1 100 217 57 71 951 7 6 1 1151 2 25 14 1 1 -"26555" 4 256 5 1 1 1 3 1 100 217 71 67 1246 4 2 0 224 1 5 4 1 0 -"26556" 4 256 5 1 1 1 3 1 100 217 67 131 875 1 8 0 2992 5 95 64 0 1 -"26557" 4 256 5 1 1 1 3 1 100 217 131 124 1342 5 7 1 252 1 5 7 0 0 -"26558" 4 256 5 1 1 1 3 1 100 217 124 217 917 9 5 1 1335 4 75 93 1 1 -"26559" 4 256 5 1 1 1 4 1 100 550 100 150 1297 8 7 1 1025 3 50 50 1 1 -"26560" 4 256 5 1 1 1 4 1 100 550 150 225 1165 3 10 0 596 3 50 75 0 1 -"26561" 4 256 5 1 1 1 4 1 100 550 225 112 949 7 9 1 245 3 50 113 0 0 -"26562" 4 256 5 1 1 1 4 1 100 550 112 196 1007 9 1 1 2557 4 75 84 1 1 -"26563" 4 256 5 1 1 1 4 1 100 550 196 294 1360 2 3 0 758 3 50 98 0 1 -"26564" 4 256 5 1 1 1 4 1 100 550 294 441 886 1 8 0 2002 3 50 147 0 1 -"26565" 4 256 5 1 1 1 4 1 100 550 441 463 1800 5 4 1 536 1 5 22 1 1 -"26566" 4 256 5 1 1 1 4 1 100 550 463 440 859 4 2 0 272 1 5 23 1 0 -"26567" 4 256 5 1 1 1 4 1 100 550 440 550 1000 6 3 1 246 2 25 110 1 1 -"26568" 4 257 2 0 1 0 1 1 100 22 100 150 5364 2 7 0 819 3 50 50 0 1 -"26569" 4 257 2 0 1 0 1 1 100 22 150 225 10417 3 7 0 431 3 50 75 0 1 -"26570" 4 257 2 0 1 0 1 1 100 22 225 439 1568 2 8 0 1547 1 95 214 0 1 -"26571" 4 257 2 0 1 0 1 1 100 22 439 22 2007 8 9 1 1516 1 95 417 0 0 -"26572" 4 257 3 1 1 0 1 0 100 1 100 5 6362 1 3 1 1313 1 95 95 0 0 -"26573" 4 257 3 1 1 0 1 0 100 1 5 10 2455 6 5 1 776 1 95 5 1 1 -"26574" 4 257 3 1 1 0 1 0 100 1 10 20 1880 2 8 0 1125 1 95 10 0 1 -"26575" 4 257 3 1 1 0 1 0 100 1 20 1 1516 8 9 1 682 1 95 19 0 0 -"26576" 4 257 3 1 1 0 2 0 100 0 100 195 3147 8 1 1 699 1 95 95 1 1 -"26577" 4 257 3 1 1 0 2 0 100 0 195 49 2256 6 2 0 794 2 75 146 1 0 -"26578" 4 257 3 1 1 0 2 0 100 0 49 2 1225 7 9 1 837 1 95 47 0 0 -"26579" 4 257 3 1 1 0 2 0 100 0 2 4 1266 2 10 0 1036 1 95 2 0 1 -"26580" 4 257 3 1 1 0 2 0 100 0 4 0 1824 5 3 0 991 1 95 4 1 0 -"26581" 4 257 3 1 1 0 3 1 100 28 100 195 2079 7 5 1 1249 1 95 95 1 1 -"26582" 4 257 3 1 1 0 3 1 100 28 195 10 1059 2 1 0 758 1 95 185 1 0 -"26583" 4 257 3 1 1 0 3 1 100 28 10 20 1407 8 6 1 810 1 95 10 1 1 -"26584" 4 257 3 1 1 0 3 1 100 28 20 39 1376 4 7 0 724 1 95 19 0 1 -"26585" 4 257 3 1 1 0 3 1 100 28 39 76 1855 3 10 0 732 1 95 37 0 1 -"26586" 4 257 3 1 1 0 3 1 100 28 76 4 1334 6 8 1 1284 1 95 72 0 0 -"26587" 4 257 3 1 1 0 3 1 100 28 4 8 1388 9 2 1 1103 1 95 4 1 1 -"26588" 4 257 3 1 1 0 3 1 100 28 8 16 2680 5 3 1 1294 1 95 8 1 1 -"26589" 4 257 3 1 1 0 3 1 100 28 16 28 1133 1 10 0 605 2 75 12 0 1 -"26590" 4 257 3 1 1 0 4 1 100 27 100 195 1998 2 3 0 828 1 95 95 0 1 -"26591" 4 257 3 1 1 0 4 1 100 27 195 380 1466 7 4 1 690 1 95 185 1 1 -"26592" 4 257 3 1 1 0 4 1 100 27 380 19 1014 3 1 0 492 1 95 361 1 0 -"26593" 4 257 3 1 1 0 4 1 100 27 19 37 1769 1 9 0 659 1 95 18 0 1 -"26594" 4 257 3 1 1 0 4 1 100 27 37 72 2361 8 7 1 1094 1 95 35 1 1 -"26595" 4 257 3 1 1 0 4 1 100 27 72 140 1623 9 2 1 695 1 95 68 1 1 -"26596" 4 257 3 1 1 0 4 1 100 27 140 273 2715 5 6 0 1431 1 95 133 0 1 -"26597" 4 257 3 1 1 0 4 1 100 27 273 14 2400 6 8 1 1403 1 95 259 0 0 -"26598" 4 257 3 1 1 0 4 1 100 27 14 27 1246 4 7 0 1576 1 95 13 0 1 -"26599" 4 257 4 0 1 1 1 1 100 211 100 150 2762 8 3 1 1228 3 50 50 1 1 -"26600" 4 257 4 0 1 1 1 1 100 211 150 188 2127 3 7 0 1039 2 25 38 0 1 -"26601" 4 257 4 0 1 1 1 1 100 211 188 282 1803 8 2 1 563 3 50 94 1 1 -"26602" 4 257 4 0 1 1 1 1 100 211 282 211 1246 2 1 0 1033 2 25 71 1 0 -"26603" 4 257 5 1 1 1 1 1 100 334 100 150 3710 9 7 1 699 3 50 50 1 1 -"26604" 4 257 5 1 1 1 1 1 100 334 150 225 1583 4 8 0 1170 3 50 75 0 1 -"26605" 4 257 5 1 1 1 1 1 100 334 225 338 1055 8 2 1 691 3 50 113 1 1 -"26606" 4 257 5 1 1 1 1 1 100 334 338 253 1311 2 1 0 1311 2 25 85 1 0 -"26607" 4 257 5 1 1 1 1 1 100 334 253 316 1341 7 6 1 1387 2 25 63 1 1 -"26608" 4 257 5 1 1 1 1 1 100 334 316 237 2590 5 3 0 1241 2 25 79 1 0 -"26609" 4 257 5 1 1 1 1 1 100 334 237 356 3755 3 6 0 853 3 50 119 0 1 -"26610" 4 257 5 1 1 1 1 1 100 334 356 267 1826 6 9 1 1088 2 25 89 0 0 -"26611" 4 257 5 1 1 1 1 1 100 334 267 334 2098 1 4 0 1836 2 25 67 0 1 -"26612" 4 257 5 1 1 1 2 1 100 558 100 150 1733 2 9 0 1824 3 50 50 0 1 -"26613" 4 257 5 1 1 1 2 1 100 558 150 225 919 4 10 0 905 3 50 75 0 1 -"26614" 4 257 5 1 1 1 2 1 100 558 225 169 1699 3 1 0 992 2 25 56 1 0 -"26615" 4 257 5 1 1 1 2 1 100 558 169 254 1318 8 6 1 701 3 50 85 1 1 -"26616" 4 257 5 1 1 1 2 1 100 558 254 318 4873 5 7 0 1047 2 25 64 0 1 -"26617" 4 257 5 1 1 1 2 1 100 558 318 238 1690 6 8 1 1039 2 25 80 0 0 -"26618" 4 257 5 1 1 1 2 1 100 558 238 357 1635 7 5 1 804 3 50 119 1 1 -"26619" 4 257 5 1 1 1 2 1 100 558 357 446 1499 1 10 0 1074 2 25 89 0 1 -"26620" 4 257 5 1 1 1 2 1 100 558 446 558 908 9 3 1 471 2 25 112 1 1 -"26621" 4 257 5 1 1 1 3 1 100 224 100 150 1664 3 5 0 991 3 50 50 0 1 -"26622" 4 257 5 1 1 1 3 1 100 224 150 75 1956 8 9 1 197 3 50 75 0 0 -"26623" 4 257 5 1 1 1 3 1 100 224 75 113 1465 2 4 0 1073 3 50 38 0 1 -"26624" 4 257 5 1 1 1 3 1 100 224 113 170 4124 6 3 1 1317 3 50 57 1 1 -"26625" 4 257 5 1 1 1 3 1 100 224 170 255 1420 7 6 1 214 3 50 85 1 1 -"26626" 4 257 5 1 1 1 3 1 100 224 255 191 1169 4 2 0 3239 2 25 64 1 0 -"26627" 4 257 5 1 1 1 3 1 100 224 191 239 2458 1 8 0 1667 2 25 48 0 1 -"26628" 4 257 5 1 1 1 3 1 100 224 239 179 1777 5 7 1 1058 2 25 60 0 0 -"26629" 4 257 5 1 1 1 3 1 100 224 179 224 4588 9 5 1 3760 2 25 45 1 1 -"26630" 4 257 5 1 1 1 4 1 100 328 100 150 2166 8 7 1 1253 3 50 50 1 1 -"26631" 4 257 5 1 1 1 4 1 100 328 150 225 1006 3 10 0 1156 3 50 75 0 1 -"26632" 4 257 5 1 1 1 4 1 100 328 225 112 1419 7 9 1 917 3 50 113 0 0 -"26633" 4 257 5 1 1 1 4 1 100 328 112 196 1727 9 1 1 754 4 75 84 1 1 -"26634" 4 257 5 1 1 1 4 1 100 328 196 294 1596 2 3 0 1262 3 50 98 0 1 -"26635" 4 257 5 1 1 1 4 1 100 328 294 279 2511 1 8 1 1581 1 5 15 0 0 -"26636" 4 257 5 1 1 1 4 1 100 328 279 349 1510 5 4 1 1324 2 25 70 1 1 -"26637" 4 257 5 1 1 1 4 1 100 328 349 262 1771 4 2 0 813 2 25 87 1 0 -"26638" 4 257 5 1 1 1 4 1 100 328 262 328 1155 6 3 1 954 2 25 66 1 1 -"26639" 4 259 2 0 1 1 1 1 100 117 100 125 12706 8 3 1 4260 2 25 25 1 1 -"26640" 4 259 2 0 1 1 1 1 100 117 125 156 2918 3 7 0 1433 2 25 31 0 1 -"26641" 4 259 2 0 1 1 1 1 100 117 156 234 2226 8 2 1 789 3 50 78 1 1 -"26642" 4 259 2 0 1 1 1 1 100 117 234 117 1933 2 1 0 727 3 50 117 1 0 -"26643" 4 259 3 1 1 1 1 1 100 392 100 175 9878 9 7 1 566 4 75 75 1 1 -"26644" 4 259 3 1 1 1 1 1 100 392 175 219 1904 4 8 0 1092 2 25 44 0 1 -"26645" 4 259 3 1 1 1 1 1 100 392 219 329 1468 8 2 1 436 3 50 110 1 1 -"26646" 4 259 3 1 1 1 1 1 100 392 329 247 1508 2 1 0 1450 2 25 82 1 0 -"26647" 4 259 3 1 1 1 1 1 100 392 247 371 1930 7 6 1 900 3 50 124 1 1 -"26648" 4 259 3 1 1 1 1 1 100 392 371 278 2079 5 3 0 897 2 25 93 1 0 -"26649" 4 259 3 1 1 1 1 1 100 392 278 348 1980 3 6 0 747 2 25 70 0 1 -"26650" 4 259 3 1 1 1 1 1 100 392 348 261 1477 6 9 1 1437 2 25 87 0 0 -"26651" 4 259 3 1 1 1 1 1 100 392 261 392 4685 1 4 0 573 3 50 131 0 1 -"26652" 4 259 3 1 1 1 2 1 100 462 100 150 1630 2 9 0 494 3 50 50 0 1 -"26653" 4 259 3 1 1 1 2 1 100 462 150 188 1452 4 10 0 1045 2 25 38 0 1 -"26654" 4 259 3 1 1 1 2 1 100 462 188 94 1048 3 1 0 572 3 50 94 1 0 -"26655" 4 259 3 1 1 1 2 1 100 462 94 165 1406 8 6 1 681 4 75 71 1 1 -"26656" 4 259 3 1 1 1 2 1 100 462 165 173 1590 5 7 0 751 1 5 8 0 1 -"26657" 4 259 3 1 1 1 2 1 100 462 173 164 1400 6 8 1 628 1 5 9 0 0 -"26658" 4 259 3 1 1 1 2 1 100 462 164 205 1307 7 5 1 578 2 25 41 1 1 -"26659" 4 259 3 1 1 1 2 1 100 462 205 308 1389 1 10 0 1040 3 50 103 0 1 -"26660" 4 259 3 1 1 1 2 1 100 462 308 462 1204 9 3 1 768 3 50 154 1 1 -"26661" 4 259 3 1 1 1 3 1 100 392 100 125 1456 3 5 0 1183 2 25 25 0 1 -"26662" 4 259 3 1 1 1 3 1 100 392 125 62 1325 8 9 1 467 3 50 63 0 0 -"26663" 4 259 3 1 1 1 3 1 100 392 62 93 1162 2 4 0 474 3 50 31 0 1 -"26664" 4 259 3 1 1 1 3 1 100 392 93 98 1200 6 3 1 468 1 5 5 1 1 -"26665" 4 259 3 1 1 1 3 1 100 392 98 123 932 7 6 1 556 2 25 25 1 1 -"26666" 4 259 3 1 1 1 3 1 100 392 123 92 1072 4 2 0 963 2 25 31 1 0 -"26667" 4 259 3 1 1 1 3 1 100 392 92 179 1041 1 8 0 1055 5 95 87 0 1 -"26668" 4 259 3 1 1 1 3 1 100 392 179 224 1346 5 7 0 416 2 25 45 0 1 -"26669" 4 259 3 1 1 1 3 1 100 392 224 392 1169 9 5 1 575 4 75 168 1 1 -"26670" 4 259 3 1 1 1 4 1 100 530 100 175 1313 8 7 1 721 4 75 75 1 1 -"26671" 4 259 3 1 1 1 4 1 100 530 175 219 1164 3 10 0 547 2 25 44 0 1 -"26672" 4 259 3 1 1 1 4 1 100 530 219 164 822 7 9 1 805 2 25 55 0 0 -"26673" 4 259 3 1 1 1 4 1 100 530 164 287 996 9 1 1 547 4 75 123 1 1 -"26674" 4 259 3 1 1 1 4 1 100 530 287 431 1238 2 3 0 1241 3 50 144 0 1 -"26675" 4 259 3 1 1 1 4 1 100 530 431 754 1028 1 8 0 457 4 75 323 0 1 -"26676" 4 259 3 1 1 1 4 1 100 530 754 565 1184 5 4 0 1093 2 25 189 1 0 -"26677" 4 259 3 1 1 1 4 1 100 530 565 424 1038 4 2 0 538 2 25 141 1 0 -"26678" 4 259 3 1 1 1 4 1 100 530 424 530 1382 6 3 1 1810 2 25 106 1 1 -"26679" 4 259 4 0 1 0 1 1 100 99 100 175 1571 2 7 0 566 2 75 75 0 1 -"26680" 4 259 4 0 1 0 1 1 100 99 175 263 1240 3 7 0 1155 3 50 88 0 1 -"26681" 4 259 4 0 1 0 1 1 100 99 263 395 1282 2 8 0 747 3 50 132 0 1 -"26682" 4 259 4 0 1 0 1 1 100 99 395 99 1197 8 9 1 754 2 75 296 0 0 -"26683" 4 259 5 1 1 0 1 1 100 452 100 195 2623 1 3 0 1528 1 95 95 0 1 -"26684" 4 259 5 1 1 0 1 1 100 452 195 244 1116 6 5 1 565 4 25 49 1 1 -"26685" 4 259 5 1 1 0 1 1 100 452 244 366 1392 2 8 0 499 3 50 122 0 1 -"26686" 4 259 5 1 1 0 1 1 100 452 366 183 1233 8 9 1 620 3 50 183 0 0 -"26687" 4 259 5 1 1 0 1 1 100 452 183 275 1533 3 4 0 1258 3 50 92 0 1 -"26688" 4 259 5 1 1 0 1 1 100 452 275 206 1123 5 7 1 1500 4 25 69 0 0 -"26689" 4 259 5 1 1 0 1 1 100 452 206 309 1386 7 4 1 2093 3 50 103 1 1 -"26690" 4 259 5 1 1 0 1 1 100 452 309 232 1575 4 1 0 674 4 25 77 1 0 -"26691" 4 259 5 1 1 0 1 1 100 452 232 452 1231 9 6 1 1338 1 95 220 1 1 -"26692" 4 259 5 1 1 0 2 1 100 856 100 175 6874 8 1 1 744 2 75 75 1 1 -"26693" 4 259 5 1 1 0 2 1 100 856 175 219 1066 6 2 1 1261 4 25 44 1 1 -"26694" 4 259 5 1 1 0 2 1 100 856 219 164 1406 7 9 1 345 4 25 55 0 0 -"26695" 4 259 5 1 1 0 2 1 100 856 164 320 1190 2 10 0 427 1 95 156 0 1 -"26696" 4 259 5 1 1 0 2 1 100 856 320 240 1212 5 3 0 744 4 25 80 1 0 -"26697" 4 259 5 1 1 0 2 1 100 856 240 180 1098 4 2 0 1332 4 25 60 1 0 -"26698" 4 259 5 1 1 0 2 1 100 856 180 225 1242 3 5 0 629 4 25 45 0 1 -"26699" 4 259 5 1 1 0 2 1 100 856 225 439 980 9 4 1 883 1 95 214 1 1 -"26700" 4 259 5 1 1 0 2 1 100 856 439 856 1343 1 7 0 1076 1 95 417 0 1 -"26701" 4 259 5 1 1 0 3 1 100 31 100 195 1557 7 5 1 1089 1 95 95 1 1 -"26702" 4 259 5 1 1 0 3 1 100 31 195 10 1154 2 1 0 1129 1 95 185 1 0 -"26703" 4 259 5 1 1 0 3 1 100 31 10 20 1112 8 6 1 497 1 95 10 1 1 -"26704" 4 259 5 1 1 0 3 1 100 31 20 39 1095 4 7 0 712 1 95 19 0 1 -"26705" 4 259 5 1 1 0 3 1 100 31 39 76 780 3 10 0 532 1 95 37 0 1 -"26706" 4 259 5 1 1 0 3 1 100 31 76 4 977 6 8 1 832 1 95 72 0 0 -"26707" 4 259 5 1 1 0 3 1 100 31 4 8 982 9 2 1 349 1 95 4 1 1 -"26708" 4 259 5 1 1 0 3 1 100 31 8 16 1082 5 3 1 444 1 95 8 1 1 -"26709" 4 259 5 1 1 0 3 1 100 31 16 31 820 1 10 0 483 1 95 15 0 1 -"26710" 4 259 5 1 1 0 4 0 100 0 100 195 1500 2 3 0 346 1 95 95 0 1 -"26711" 4 259 5 1 1 0 4 0 100 0 195 380 1055 7 4 1 770 1 95 185 1 1 -"26712" 4 259 5 1 1 0 4 0 100 0 380 19 1019 3 1 0 864 1 95 361 1 0 -"26713" 4 259 5 1 1 0 4 0 100 0 19 37 1084 1 9 0 694 1 95 18 0 1 -"26714" 4 259 5 1 1 0 4 0 100 0 37 72 1050 8 7 1 437 1 95 35 1 1 -"26715" 4 259 5 1 1 0 4 0 100 0 72 140 685 9 2 1 242 1 95 68 1 1 -"26716" 4 259 5 1 1 0 4 0 100 0 140 7 876 5 6 1 664 1 95 133 0 0 -"26717" 4 259 5 1 1 0 4 0 100 0 7 0 1190 6 8 1 379 1 95 7 0 0 -"26718" 4 260 2 0 1 0 1 1 100 33 100 175 11140 2 7 0 1543 2 75 75 0 1 -"26719" 4 260 2 0 1 0 1 1 100 33 175 341 5507 3 7 0 747 1 95 166 0 1 -"26720" 4 260 2 0 1 0 1 1 100 33 341 665 2188 2 8 0 1562 1 95 324 0 1 -"26721" 4 260 2 0 1 0 1 1 100 33 665 33 1356 8 9 1 3513 1 95 632 0 0 -"26722" 4 260 3 1 1 0 1 1 100 1716 100 195 1980 1 3 0 515 1 95 95 0 1 -"26723" 4 260 3 1 1 0 1 1 100 1716 195 293 2656 6 5 1 615 3 50 98 1 1 -"26724" 4 260 3 1 1 0 1 1 100 1716 293 513 1983 2 8 0 4103 2 75 220 0 1 -"26725" 4 260 3 1 1 0 1 1 100 1716 513 385 1761 8 9 1 1379 4 25 128 0 0 -"26726" 4 260 3 1 1 0 1 1 100 1716 385 751 2230 3 4 0 874 1 95 366 0 1 -"26727" 4 260 3 1 1 0 1 1 100 1716 751 939 4621 5 7 0 1287 4 25 188 0 1 -"26728" 4 260 3 1 1 0 1 1 100 1716 939 1174 2671 7 4 1 653 4 25 235 1 1 -"26729" 4 260 3 1 1 0 1 1 100 1716 1174 880 2332 4 1 0 459 4 25 294 1 0 -"26730" 4 260 3 1 1 0 1 1 100 1716 880 1716 2338 9 6 1 897 1 95 836 1 1 -"26731" 4 260 3 1 1 0 2 0 100 1 100 195 2327 8 1 1 1526 1 95 95 1 1 -"26732" 4 260 3 1 1 0 2 0 100 1 195 293 3059 6 2 1 711 3 50 98 1 1 -"26733" 4 260 3 1 1 0 2 0 100 1 293 15 2122 7 9 1 971 1 95 278 0 0 -"26734" 4 260 3 1 1 0 2 0 100 1 15 29 1955 2 10 0 2596 1 95 14 0 1 -"26735" 4 260 3 1 1 0 2 0 100 1 29 1 5757 5 3 0 745 1 95 28 1 0 -"26736" 4 260 3 1 1 0 3 0 100 0 100 195 1847 7 5 1 2013 1 95 95 1 1 -"26737" 4 260 3 1 1 0 3 0 100 0 195 10 1675 2 1 0 793 1 95 185 1 0 -"26738" 4 260 3 1 1 0 3 0 100 0 10 20 1632 8 6 1 1184 1 95 10 1 1 -"26739" 4 260 3 1 1 0 3 0 100 0 20 39 1605 4 7 0 854 1 95 19 0 1 -"26740" 4 260 3 1 1 0 3 0 100 0 39 76 2322 3 10 0 681 1 95 37 0 1 -"26741" 4 260 3 1 1 0 3 0 100 0 76 4 1998 6 8 1 1989 1 95 72 0 0 -"26742" 4 260 3 1 1 0 3 0 100 0 4 8 1691 9 2 1 568 1 95 4 1 1 -"26743" 4 260 3 1 1 0 3 0 100 0 8 0 3731 5 3 0 2207 1 95 8 1 0 -"26744" 4 260 3 1 1 0 4 1 100 1653 100 195 3624 2 3 0 609 1 95 95 0 1 -"26745" 4 260 3 1 1 0 4 1 100 1653 195 380 1242 7 4 1 948 1 95 185 1 1 -"26746" 4 260 3 1 1 0 4 1 100 1653 380 190 1340 3 1 0 1106 3 50 190 1 0 -"26747" 4 260 3 1 1 0 4 1 100 1653 190 371 1299 1 9 0 659 1 95 181 0 1 -"26748" 4 260 3 1 1 0 4 1 100 1653 371 723 1873 8 7 1 2293 1 95 352 1 1 -"26749" 4 260 3 1 1 0 4 1 100 1653 723 1410 1493 9 2 1 1036 1 95 687 1 1 -"26750" 4 260 3 1 1 0 4 1 100 1653 1410 1763 2744 5 6 0 963 4 25 353 0 1 -"26751" 4 260 3 1 1 0 4 1 100 1653 1763 1322 2859 6 8 1 689 4 25 441 0 0 -"26752" 4 260 3 1 1 0 4 1 100 1653 1322 1653 1621 4 7 0 502 4 25 331 0 1 -"26753" 4 260 4 0 1 1 1 1 100 191 100 125 4399 8 3 1 2043 2 25 25 1 1 -"26754" 4 260 4 0 1 1 1 1 100 191 125 219 3959 3 7 0 745 4 75 94 0 1 -"26755" 4 260 4 0 1 1 1 1 100 191 219 383 1634 8 2 1 995 4 75 164 1 1 -"26756" 4 260 4 0 1 1 1 1 100 191 383 191 1377 2 1 0 2301 3 50 192 1 0 -"26757" 4 260 5 1 1 1 1 1 100 814 100 195 1889 9 7 1 1309 5 95 95 1 1 -"26758" 4 260 5 1 1 1 1 1 100 814 195 244 1588 4 8 0 3398 2 25 49 0 1 -"26759" 4 260 5 1 1 1 1 1 100 814 244 366 1617 8 2 1 1266 3 50 122 1 1 -"26760" 4 260 5 1 1 1 1 1 100 814 366 183 1307 2 1 0 683 3 50 183 1 0 -"26761" 4 260 5 1 1 1 1 1 100 814 183 275 1720 7 6 1 1708 3 50 92 1 1 -"26762" 4 260 5 1 1 1 1 1 100 814 275 413 2024 5 3 1 565 3 50 138 1 1 -"26763" 4 260 5 1 1 1 1 1 100 814 413 620 1360 3 6 0 1413 3 50 207 0 1 -"26764" 4 260 5 1 1 1 1 1 100 814 620 465 2511 6 9 1 1605 2 25 155 0 0 -"26765" 4 260 5 1 1 1 1 1 100 814 465 814 1518 1 4 0 820 4 75 349 0 1 -"26766" 4 260 5 1 1 1 2 1 100 755 100 195 1641 2 9 0 905 5 95 95 0 1 -"26767" 4 260 5 1 1 1 2 1 100 755 195 244 1524 4 10 0 1089 2 25 49 0 1 -"26768" 4 260 5 1 1 1 2 1 100 755 244 122 1218 3 1 0 462 3 50 122 1 0 -"26769" 4 260 5 1 1 1 2 1 100 755 122 238 1549 8 6 1 442 5 95 116 1 1 -"26770" 4 260 5 1 1 1 2 1 100 755 238 298 1906 5 7 0 861 2 25 60 0 1 -"26771" 4 260 5 1 1 1 2 1 100 755 298 223 1385 6 8 1 1133 2 25 75 0 0 -"26772" 4 260 5 1 1 1 2 1 100 755 223 335 1732 7 5 1 1960 3 50 112 1 1 -"26773" 4 260 5 1 1 1 2 1 100 755 335 503 1280 1 10 0 1377 3 50 168 0 1 -"26774" 4 260 5 1 1 1 2 1 100 755 503 755 1381 9 3 1 521 3 50 252 1 1 -"26775" 4 260 5 1 1 1 3 1 100 705 100 175 1759 3 5 0 665 4 75 75 0 1 -"26776" 4 260 5 1 1 1 3 1 100 705 175 44 1847 8 9 1 1845 4 75 131 0 0 -"26777" 4 260 5 1 1 1 3 1 100 705 44 86 1483 2 4 0 891 5 95 42 0 1 -"26778" 4 260 5 1 1 1 3 1 100 705 86 129 1701 6 3 1 1946 3 50 43 1 1 -"26779" 4 260 5 1 1 1 3 1 100 705 129 194 1403 7 6 1 2822 3 50 65 1 1 -"26780" 4 260 5 1 1 1 3 1 100 705 194 184 1333 4 2 0 1099 1 5 10 1 0 -"26781" 4 260 5 1 1 1 3 1 100 705 184 322 1284 1 8 0 989 4 75 138 0 1 -"26782" 4 260 5 1 1 1 3 1 100 705 322 403 2107 5 7 0 1021 2 25 81 0 1 -"26783" 4 260 5 1 1 1 3 1 100 705 403 705 1282 9 5 1 707 4 75 302 1 1 -"26784" 4 260 5 1 1 1 4 1 100 818 100 175 1735 8 7 1 1035 4 75 75 1 1 -"26785" 4 260 5 1 1 1 4 1 100 818 175 263 1543 3 10 0 772 3 50 88 0 1 -"26786" 4 260 5 1 1 1 4 1 100 818 263 131 1316 7 9 1 659 3 50 132 0 0 -"26787" 4 260 5 1 1 1 4 1 100 818 131 255 1417 9 1 1 2484 5 95 124 1 1 -"26788" 4 260 5 1 1 1 4 1 100 818 255 446 1532 2 3 0 625 4 75 191 0 1 -"26789" 4 260 5 1 1 1 4 1 100 818 446 781 1512 1 8 0 2094 4 75 335 0 1 -"26790" 4 260 5 1 1 1 4 1 100 818 781 820 2416 5 4 1 1363 1 5 39 1 1 -"26791" 4 260 5 1 1 1 4 1 100 818 820 779 1551 4 2 0 2067 1 5 41 1 0 -"26792" 4 260 5 1 1 1 4 1 100 818 779 818 1461 6 3 1 1381 1 5 39 1 1 -"26793" 4 261 2 0 1 0 1 1 100 3 100 150 6054 2 7 0 1460 3 50 50 0 1 -"26794" 4 261 2 0 1 0 1 1 100 3 150 225 7331 3 7 0 580 3 50 75 0 1 -"26795" 4 261 2 0 1 0 1 1 100 3 225 56 2510 2 8 1 876 2 75 169 0 0 -"26796" 4 261 2 0 1 0 1 1 100 3 56 3 2959 8 9 1 1424 1 95 53 0 0 -"26797" 4 261 3 1 1 0 1 1 100 209 100 195 5252 1 3 0 1344 1 95 95 0 1 -"26798" 4 261 3 1 1 0 1 1 100 209 195 293 1813 6 5 1 1716 3 50 98 1 1 -"26799" 4 261 3 1 1 0 1 1 100 209 293 440 2088 2 8 0 2419 3 50 147 0 1 -"26800" 4 261 3 1 1 0 1 1 100 209 440 330 2532 8 9 1 1218 4 25 110 0 0 -"26801" 4 261 3 1 1 0 1 1 100 209 330 247 4885 3 4 1 -22 4 25 83 0 0 -"26802" 4 261 3 1 1 0 1 1 100 209 247 185 2481 5 7 1 809 4 25 62 0 0 -"26803" 4 261 3 1 1 0 1 1 100 209 185 278 2479 7 4 1 887 3 50 93 1 1 -"26804" 4 261 3 1 1 0 1 1 100 209 278 139 2705 4 1 0 1575 3 50 139 1 0 -"26805" 4 261 3 1 1 0 1 1 100 209 139 209 2905 9 6 1 1957 3 50 70 1 1 -"26806" 4 261 3 1 1 0 2 1 100 841 100 195 3395 8 1 1 2183 1 95 95 1 1 -"26807" 4 261 3 1 1 0 2 1 100 841 195 293 2316 6 2 1 1716 3 50 98 1 1 -"26808" 4 261 3 1 1 0 2 1 100 841 293 366 2663 7 9 0 910 4 25 73 0 1 -"26809" 4 261 3 1 1 0 2 1 100 841 366 458 2256 2 10 0 787 4 25 92 0 1 -"26810" 4 261 3 1 1 0 2 1 100 841 458 573 1850 5 3 1 713 4 25 115 1 1 -"26811" 4 261 3 1 1 0 2 1 100 841 573 430 2356 4 2 0 930 4 25 143 1 0 -"26812" 4 261 3 1 1 0 2 1 100 841 430 538 5491 3 5 0 1020 4 25 108 0 1 -"26813" 4 261 3 1 1 0 2 1 100 841 538 673 2232 9 4 1 1364 4 25 135 1 1 -"26814" 4 261 3 1 1 0 2 1 100 841 673 841 3544 1 7 0 1038 4 25 168 0 1 -"26815" 4 261 3 1 1 0 3 1 100 39 100 150 2393 7 5 1 1157 3 50 50 1 1 -"26816" 4 261 3 1 1 0 3 1 100 39 150 75 1787 2 1 0 1413 3 50 75 1 0 -"26817" 4 261 3 1 1 0 3 1 100 39 75 113 1870 8 6 1 1260 3 50 38 1 1 -"26818" 4 261 3 1 1 0 3 1 100 39 113 56 2836 4 7 1 875 3 50 57 0 0 -"26819" 4 261 3 1 1 0 3 1 100 39 56 84 1935 3 10 0 841 3 50 28 0 1 -"26820" 4 261 3 1 1 0 3 1 100 39 84 42 1917 6 8 1 712 3 50 42 0 0 -"26821" 4 261 3 1 1 0 3 1 100 39 42 82 1492 9 2 1 1298 1 95 40 1 1 -"26822" 4 261 3 1 1 0 3 1 100 39 82 20 3089 5 3 0 1788 2 75 62 1 0 -"26823" 4 261 3 1 1 0 3 1 100 39 20 39 2084 1 10 0 1223 1 95 19 0 1 -"26824" 4 261 3 1 1 0 4 1 100 2379 100 195 2195 2 3 0 1504 1 95 95 0 1 -"26825" 4 261 3 1 1 0 4 1 100 2379 195 380 2078 7 4 1 992 1 95 185 1 1 -"26826" 4 261 3 1 1 0 4 1 100 2379 380 190 2478 3 1 0 853 3 50 190 1 0 -"26827" 4 261 3 1 1 0 4 1 100 2379 190 371 1738 1 9 0 885 1 95 181 0 1 -"26828" 4 261 3 1 1 0 4 1 100 2379 371 723 1672 8 7 1 1630 1 95 352 1 1 -"26829" 4 261 3 1 1 0 4 1 100 2379 723 1410 2493 9 2 1 884 1 95 687 1 1 -"26830" 4 261 3 1 1 0 4 1 100 2379 1410 2115 1815 5 6 0 1655 3 50 705 0 1 -"26831" 4 261 3 1 1 0 4 1 100 2379 2115 1586 1988 6 8 1 1060 4 25 529 0 0 -"26832" 4 261 3 1 1 0 4 1 100 2379 1586 2379 2367 4 7 0 1052 3 50 793 0 1 -"26833" 4 261 4 0 1 1 1 1 100 253 100 150 3863 8 3 1 1149 3 50 50 1 1 -"26834" 4 261 4 0 1 1 1 1 100 253 150 225 12284 3 7 0 1737 3 50 75 0 1 -"26835" 4 261 4 0 1 1 1 1 100 253 225 338 1340 8 2 1 770 3 50 113 1 1 -"26836" 4 261 4 0 1 1 1 1 100 253 338 253 2131 2 1 0 3827 2 25 85 1 0 -"26837" 4 261 5 1 1 1 1 1 100 216 100 150 2038 9 7 1 3396 3 50 50 1 1 -"26838" 4 261 5 1 1 1 1 1 100 216 150 225 4111 4 8 0 793 3 50 75 0 1 -"26839" 4 261 5 1 1 1 1 1 100 216 225 338 1624 8 2 1 969 3 50 113 1 1 -"26840" 4 261 5 1 1 1 1 1 100 216 338 169 1864 2 1 0 4261 3 50 169 1 0 -"26841" 4 261 5 1 1 1 1 1 100 216 169 296 1664 7 6 1 743 4 75 127 1 1 -"26842" 4 261 5 1 1 1 1 1 100 216 296 148 4625 5 3 0 1235 3 50 148 1 0 -"26843" 4 261 5 1 1 1 1 1 100 216 148 222 2157 3 6 0 1308 3 50 74 0 1 -"26844" 4 261 5 1 1 1 1 1 100 216 222 111 1443 6 9 1 900 3 50 111 0 0 -"26845" 4 261 5 1 1 1 1 1 100 216 111 216 1798 1 4 0 1999 5 95 105 0 1 -"26846" 4 261 5 1 1 1 2 1 100 684 100 150 1898 2 9 0 1158 3 50 50 0 1 -"26847" 4 261 5 1 1 1 2 1 100 684 150 225 2884 4 10 0 1568 3 50 75 0 1 -"26848" 4 261 5 1 1 1 2 1 100 684 225 112 2323 3 1 0 1618 3 50 113 1 0 -"26849" 4 261 5 1 1 1 2 1 100 684 112 168 4346 8 6 1 1193 3 50 56 1 1 -"26850" 4 261 5 1 1 1 2 1 100 684 168 160 2229 5 7 1 1345 1 5 8 0 0 -"26851" 4 261 5 1 1 1 2 1 100 684 160 120 1678 6 8 1 571 2 25 40 0 0 -"26852" 4 261 5 1 1 1 2 1 100 684 120 180 1405 7 5 1 1136 3 50 60 1 1 -"26853" 4 261 5 1 1 1 2 1 100 684 180 351 2047 1 10 0 1612 5 95 171 0 1 -"26854" 4 261 5 1 1 1 2 1 100 684 351 684 2812 9 3 1 1317 5 95 333 1 1 -"26855" 4 261 5 1 1 1 3 1 100 217 100 150 1855 3 5 0 1893 3 50 50 0 1 -"26856" 4 261 5 1 1 1 3 1 100 217 150 75 1840 8 9 1 1598 3 50 75 0 0 -"26857" 4 261 5 1 1 1 3 1 100 217 75 113 1623 2 4 0 2991 3 50 38 0 1 -"26858" 4 261 5 1 1 1 3 1 100 217 113 170 1550 6 3 1 1146 3 50 57 1 1 -"26859" 4 261 5 1 1 1 3 1 100 217 170 255 2153 7 6 1 944 3 50 85 1 1 -"26860" 4 261 5 1 1 1 3 1 100 217 255 127 3497 4 2 0 1104 3 50 128 1 0 -"26861" 4 261 5 1 1 1 3 1 100 217 127 248 1489 1 8 0 960 5 95 121 0 1 -"26862" 4 261 5 1 1 1 3 1 100 217 248 124 3040 5 7 1 1597 3 50 124 0 0 -"26863" 4 261 5 1 1 1 3 1 100 217 124 217 1278 9 5 1 1231 4 75 93 1 1 -"26864" 4 261 5 1 1 1 4 1 100 943 100 175 1499 8 7 1 1442 4 75 75 1 1 -"26865" 4 261 5 1 1 1 4 1 100 943 175 263 1866 3 10 0 3831 3 50 88 0 1 -"26866" 4 261 5 1 1 1 4 1 100 943 263 131 1222 7 9 1 1202 3 50 132 0 0 -"26867" 4 261 5 1 1 1 4 1 100 943 131 255 1366 9 1 1 1011 5 95 124 1 1 -"26868" 4 261 5 1 1 1 4 1 100 943 255 383 2080 2 3 0 1347 3 50 128 0 1 -"26869" 4 261 5 1 1 1 4 1 100 943 383 575 2544 1 8 0 2220 3 50 192 0 1 -"26870" 4 261 5 1 1 1 4 1 100 943 575 1006 4336 5 4 1 1064 4 75 431 1 1 -"26871" 4 261 5 1 1 1 4 1 100 943 1006 754 4290 4 2 0 4692 2 25 252 1 0 -"26872" 4 261 5 1 1 1 4 1 100 943 754 943 7626 6 3 1 2813 2 25 189 1 1 -"26873" 4 263 2 0 1 0 1 1 100 26 100 150 6466 2 7 0 1308 3 50 50 0 1 -"26874" 4 263 2 0 1 0 1 1 100 26 150 263 13426 3 7 0 681 2 75 113 0 1 -"26875" 4 263 2 0 1 0 1 1 100 26 263 513 3390 2 8 0 1722 1 95 250 0 1 -"26876" 4 263 2 0 1 0 1 1 100 26 513 26 2662 8 9 1 1837 1 95 487 0 0 -"26877" 4 263 3 1 1 0 1 1 100 27 100 195 5486 1 3 0 3054 1 95 95 0 1 -"26878" 4 263 3 1 1 0 1 1 100 27 195 380 3098 6 5 1 1412 1 95 185 1 1 -"26879" 4 263 3 1 1 0 1 1 100 27 380 741 3406 2 8 0 1329 1 95 361 0 1 -"26880" 4 263 3 1 1 0 1 1 100 27 741 37 2095 8 9 1 1257 1 95 704 0 0 -"26881" 4 263 3 1 1 0 1 1 100 27 37 72 2082 3 4 0 1421 1 95 35 0 1 -"26882" 4 263 3 1 1 0 1 1 100 27 72 140 3239 5 7 0 922 1 95 68 0 1 -"26883" 4 263 3 1 1 0 1 1 100 27 140 273 1828 7 4 1 754 1 95 133 1 1 -"26884" 4 263 3 1 1 0 1 1 100 27 273 14 2123 4 1 0 716 1 95 259 1 0 -"26885" 4 263 3 1 1 0 1 1 100 27 14 27 1415 9 6 1 1178 1 95 13 1 1 -"26886" 4 263 3 1 1 0 2 1 100 31 100 195 3108 8 1 1 750 1 95 95 1 1 -"26887" 4 263 3 1 1 0 2 1 100 31 195 380 2912 6 2 1 779 1 95 185 1 1 -"26888" 4 263 3 1 1 0 2 1 100 31 380 19 2219 7 9 1 439 1 95 361 0 0 -"26889" 4 263 3 1 1 0 2 1 100 31 19 37 1359 2 10 0 492 1 95 18 0 1 -"26890" 4 263 3 1 1 0 2 1 100 31 37 72 2458 5 3 1 451 1 95 35 1 1 -"26891" 4 263 3 1 1 0 2 1 100 31 72 4 2034 4 2 0 767 1 95 68 1 0 -"26892" 4 263 3 1 1 0 2 1 100 31 4 8 1224 3 5 0 571 1 95 4 0 1 -"26893" 4 263 3 1 1 0 2 1 100 31 8 16 1213 9 4 1 1577 1 95 8 1 1 -"26894" 4 263 3 1 1 0 2 1 100 31 16 31 1516 1 7 0 1121 1 95 15 0 1 -"26895" 4 263 3 1 1 0 3 0 100 1 100 195 2455 7 5 1 512 1 95 95 1 1 -"26896" 4 263 3 1 1 0 3 0 100 1 195 10 1773 2 1 0 4742 1 95 185 1 0 -"26897" 4 263 3 1 1 0 3 0 100 1 10 20 1745 8 6 1 524 1 95 10 1 1 -"26898" 4 263 3 1 1 0 3 0 100 1 20 1 2720 4 7 1 900 1 95 19 0 0 -"26899" 4 263 3 1 1 0 4 1 100 27 100 195 4154 2 3 0 578 1 95 95 0 1 -"26900" 4 263 3 1 1 0 4 1 100 27 195 380 1500 7 4 1 746 1 95 185 1 1 -"26901" 4 263 3 1 1 0 4 1 100 27 380 19 1996 3 1 0 1562 1 95 361 1 0 -"26902" 4 263 3 1 1 0 4 1 100 27 19 37 1365 1 9 0 378 1 95 18 0 1 -"26903" 4 263 3 1 1 0 4 1 100 27 37 72 1125 8 7 1 733 1 95 35 1 1 -"26904" 4 263 3 1 1 0 4 1 100 27 72 140 1488 9 2 1 646 1 95 68 1 1 -"26905" 4 263 3 1 1 0 4 1 100 27 140 273 1989 5 6 0 420 1 95 133 0 1 -"26906" 4 263 3 1 1 0 4 1 100 27 273 14 1453 6 8 1 1045 1 95 259 0 0 -"26907" 4 263 3 1 1 0 4 1 100 27 14 27 1184 4 7 0 549 1 95 13 0 1 -"26908" 4 263 4 0 1 1 1 1 100 169 100 150 5631 8 3 1 1329 3 50 50 1 1 -"26909" 4 263 4 0 1 1 1 1 100 169 150 225 3519 3 7 0 1829 3 50 75 0 1 -"26910" 4 263 4 0 1 1 1 1 100 169 225 338 2891 8 2 1 680 3 50 113 1 1 -"26911" 4 263 4 0 1 1 1 1 100 169 338 169 1661 2 1 0 939 3 50 169 1 0 -"26912" 4 263 5 1 1 1 1 1 100 627 100 150 2955 9 7 1 716 3 50 50 1 1 -"26913" 4 263 5 1 1 1 1 1 100 627 150 188 1615 4 8 0 1080 2 25 38 0 1 -"26914" 4 263 5 1 1 1 1 1 100 627 188 282 1106 8 2 1 539 3 50 94 1 1 -"26915" 4 263 5 1 1 1 1 1 100 627 282 268 1303 2 1 0 1238 1 5 14 1 0 -"26916" 4 263 5 1 1 1 1 1 100 627 268 335 1880 7 6 1 701 2 25 67 1 1 -"26917" 4 263 5 1 1 1 1 1 100 627 335 318 1385 5 3 0 485 1 5 17 1 0 -"26918" 4 263 5 1 1 1 1 1 100 627 318 477 1355 3 6 0 712 3 50 159 0 1 -"26919" 4 263 5 1 1 1 1 1 100 627 477 358 2252 6 9 1 1478 2 25 119 0 0 -"26920" 4 263 5 1 1 1 1 1 100 627 358 627 1333 1 4 0 852 4 75 269 0 1 -"26921" 4 263 5 1 1 1 2 1 100 975 100 175 2293 2 9 0 1654 4 75 75 0 1 -"26922" 4 263 5 1 1 1 2 1 100 975 175 184 3252 4 10 0 487 1 5 9 0 1 -"26923" 4 263 5 1 1 1 2 1 100 975 184 138 2612 3 1 0 894 2 25 46 1 0 -"26924" 4 263 5 1 1 1 2 1 100 975 138 207 1264 8 6 1 586 3 50 69 1 1 -"26925" 4 263 5 1 1 1 2 1 100 975 207 217 2956 5 7 0 605 1 5 10 0 1 -"26926" 4 263 5 1 1 1 2 1 100 975 217 163 2061 6 8 1 1008 2 25 54 0 0 -"26927" 4 263 5 1 1 1 2 1 100 975 163 318 1405 7 5 1 904 5 95 155 1 1 -"26928" 4 263 5 1 1 1 2 1 100 975 318 557 2227 1 10 0 711 4 75 239 0 1 -"26929" 4 263 5 1 1 1 2 1 100 975 557 975 1690 9 3 1 1615 4 75 418 1 1 -"26930" 4 263 5 1 1 1 3 1 100 565 100 150 2178 3 5 0 776 3 50 50 0 1 -"26931" 4 263 5 1 1 1 3 1 100 565 150 112 1528 8 9 1 3181 2 25 38 0 0 -"26932" 4 263 5 1 1 1 3 1 100 565 112 168 1363 2 4 0 560 3 50 56 0 1 -"26933" 4 263 5 1 1 1 3 1 100 565 168 176 1810 6 3 1 858 1 5 8 1 1 -"26934" 4 263 5 1 1 1 3 1 100 565 176 185 1366 7 6 1 571 1 5 9 1 1 -"26935" 4 263 5 1 1 1 3 1 100 565 185 176 1676 4 2 0 458 1 5 9 1 0 -"26936" 4 263 5 1 1 1 3 1 100 565 176 308 1153 1 8 0 632 4 75 132 0 1 -"26937" 4 263 5 1 1 1 3 1 100 565 308 323 2176 5 7 0 533 1 5 15 0 1 -"26938" 4 263 5 1 1 1 3 1 100 565 323 565 1178 9 5 1 528 4 75 242 1 1 -"26939" 4 263 5 1 1 1 4 1 100 231 100 175 1248 8 7 1 515 4 75 75 1 1 -"26940" 4 263 5 1 1 1 4 1 100 231 175 219 1472 3 10 0 903 2 25 44 0 1 -"26941" 4 263 5 1 1 1 4 1 100 231 219 109 1946 7 9 1 763 3 50 110 0 0 -"26942" 4 263 5 1 1 1 4 1 100 231 109 164 1500 9 1 1 443 3 50 55 1 1 -"26943" 4 263 5 1 1 1 4 1 100 231 164 246 1139 2 3 0 1306 3 50 82 0 1 -"26944" 4 263 5 1 1 1 4 1 100 231 246 308 1650 1 8 0 857 2 25 62 0 1 -"26945" 4 263 5 1 1 1 4 1 100 231 308 293 1549 5 4 0 641 1 5 15 1 0 -"26946" 4 263 5 1 1 1 4 1 100 231 293 220 1744 4 2 0 810 2 25 73 1 0 -"26947" 4 263 5 1 1 1 4 1 100 231 220 231 1225 6 3 1 516 1 5 11 1 1 -"26948" 4 273 2 0 1 0 1 1 100 1 100 175 10860 2 7 0 1645 2 75 75 0 1 -"26949" 4 273 2 0 1 0 1 1 100 1 175 9 4355 3 7 1 1826 1 95 166 0 0 -"26950" 4 273 2 0 1 0 1 1 100 1 9 18 2224 2 8 0 1876 1 95 9 0 1 -"26951" 4 273 2 0 1 0 1 1 100 1 18 1 2648 8 9 1 823 1 95 17 0 0 -"26952" 4 273 3 1 1 0 1 0 100 1 100 195 2571 1 3 0 1335 1 95 95 0 1 -"26953" 4 273 3 1 1 0 1 0 100 1 195 10 3044 6 5 0 1001 1 95 185 1 0 -"26954" 4 273 3 1 1 0 1 0 100 1 10 20 3087 2 8 0 810 1 95 10 0 1 -"26955" 4 273 3 1 1 0 1 0 100 1 20 1 2592 8 9 1 755 1 95 19 0 0 -"26956" 4 273 3 1 1 0 2 1 100 39 100 195 2116 8 1 1 1061 1 95 95 1 1 -"26957" 4 273 3 1 1 0 2 1 100 39 195 293 2256 6 2 1 1243 3 50 98 1 1 -"26958" 4 273 3 1 1 0 2 1 100 39 293 440 2596 7 9 0 966 3 50 147 0 1 -"26959" 4 273 3 1 1 0 2 1 100 39 440 220 2622 2 10 1 831 3 50 220 0 0 -"26960" 4 273 3 1 1 0 2 1 100 39 220 110 3493 5 3 0 860 3 50 110 1 0 -"26961" 4 273 3 1 1 0 2 1 100 39 110 5 2672 4 2 0 2079 1 95 105 1 0 -"26962" 4 273 3 1 1 0 2 1 100 39 5 10 2098 3 5 0 955 1 95 5 0 1 -"26963" 4 273 3 1 1 0 2 1 100 39 10 20 2139 9 4 1 643 1 95 10 1 1 -"26964" 4 273 3 1 1 0 2 1 100 39 20 39 1753 1 7 0 956 1 95 19 0 1 -"26965" 4 273 3 1 1 0 3 0 100 0 100 50 2797 7 5 0 1669 3 50 50 1 0 -"26966" 4 273 3 1 1 0 3 0 100 0 50 2 1571 2 1 0 805 1 95 48 1 0 -"26967" 4 273 3 1 1 0 3 0 100 0 2 4 1948 8 6 1 821 1 95 2 1 1 -"26968" 4 273 3 1 1 0 3 0 100 0 4 0 2435 4 7 1 597 1 95 4 0 0 -"26969" 4 273 3 1 1 0 4 1 100 405 100 175 1893 2 3 0 1041 2 75 75 0 1 -"26970" 4 273 3 1 1 0 4 1 100 405 175 306 1644 7 4 1 963 2 75 131 1 1 -"26971" 4 273 3 1 1 0 4 1 100 405 306 15 1675 3 1 0 2568 1 95 291 1 0 -"26972" 4 273 3 1 1 0 4 1 100 405 15 29 2116 1 9 0 749 1 95 14 0 1 -"26973" 4 273 3 1 1 0 4 1 100 405 29 57 1694 8 7 1 683 1 95 28 1 1 -"26974" 4 273 3 1 1 0 4 1 100 405 57 111 1896 9 2 1 771 1 95 54 1 1 -"26975" 4 273 3 1 1 0 4 1 100 405 111 216 2179 5 6 0 1385 1 95 105 0 1 -"26976" 4 273 3 1 1 0 4 1 100 405 216 270 3044 6 8 0 658 4 25 54 0 1 -"26977" 4 273 3 1 1 0 4 1 100 405 270 405 2928 4 7 0 898 3 50 135 0 1 -"26978" 4 273 4 0 1 1 1 1 100 205 100 125 3520 8 3 1 654 2 25 25 1 1 -"26979" 4 273 4 0 1 1 1 1 100 205 125 156 1516 3 7 0 1722 2 25 31 0 1 -"26980" 4 273 4 0 1 1 1 1 100 205 156 195 4101 8 2 1 1408 2 25 39 1 1 -"26981" 4 273 4 0 1 1 1 1 100 205 195 205 3114 2 1 1 612 1 5 10 1 1 -"26982" 4 273 5 1 1 1 1 1 100 532 100 175 2145 9 7 1 585 4 75 75 1 1 -"26983" 4 273 5 1 1 1 1 1 100 532 175 219 1538 4 8 0 1398 2 25 44 0 1 -"26984" 4 273 5 1 1 1 1 1 100 532 219 329 1528 8 2 1 734 3 50 110 1 1 -"26985" 4 273 5 1 1 1 1 1 100 532 329 411 1248 2 1 1 651 2 25 82 1 1 -"26986" 4 273 5 1 1 1 1 1 100 532 411 432 2025 7 6 1 1022 1 5 21 1 1 -"26987" 4 273 5 1 1 1 1 1 100 532 432 324 1298 5 3 0 571 2 25 108 1 0 -"26988" 4 273 5 1 1 1 1 1 100 532 324 405 1188 3 6 0 569 2 25 81 0 1 -"26989" 4 273 5 1 1 1 1 1 100 532 405 304 1956 6 9 1 492 2 25 101 0 0 -"26990" 4 273 5 1 1 1 1 1 100 532 304 532 1106 1 4 0 947 4 75 228 0 1 -"26991" 4 273 5 1 1 1 2 1 100 806 100 175 1795 2 9 0 692 4 75 75 0 1 -"26992" 4 273 5 1 1 1 2 1 100 806 175 219 1112 4 10 0 745 2 25 44 0 1 -"26993" 4 273 5 1 1 1 2 1 100 806 219 230 1155 3 1 1 601 1 5 11 1 1 -"26994" 4 273 5 1 1 1 2 1 100 806 230 345 1282 8 6 1 595 3 50 115 1 1 -"26995" 4 273 5 1 1 1 2 1 100 806 345 328 1420 5 7 1 686 1 5 17 0 0 -"26996" 4 273 5 1 1 1 2 1 100 806 328 344 1147 6 8 0 602 1 5 16 0 1 -"26997" 4 273 5 1 1 1 2 1 100 806 344 430 1379 7 5 1 983 2 25 86 1 1 -"26998" 4 273 5 1 1 1 2 1 100 806 430 645 1054 1 10 0 721 3 50 215 0 1 -"26999" 4 273 5 1 1 1 2 1 100 806 645 806 1388 9 3 1 567 2 25 161 1 1 -"27000" 4 273 5 1 1 1 3 1 100 242 100 150 1731 3 5 0 702 3 50 50 0 1 -"27001" 4 273 5 1 1 1 3 1 100 242 150 75 1058 8 9 1 1790 3 50 75 0 0 -"27002" 4 273 5 1 1 1 3 1 100 242 75 131 1046 2 4 0 650 4 75 56 0 1 -"27003" 4 273 5 1 1 1 3 1 100 242 131 98 1281 6 3 0 1642 2 25 33 1 0 -"27004" 4 273 5 1 1 1 3 1 100 242 98 172 1193 7 6 1 991 4 75 74 1 1 -"27005" 4 273 5 1 1 1 3 1 100 242 172 86 948 4 2 0 997 3 50 86 1 0 -"27006" 4 273 5 1 1 1 3 1 100 242 86 129 914 1 8 0 612 3 50 43 0 1 -"27007" 4 273 5 1 1 1 3 1 100 242 129 161 2853 5 7 0 874 2 25 32 0 1 -"27008" 4 273 5 1 1 1 3 1 100 242 161 242 1278 9 5 1 639 3 50 81 1 1 -"27009" 4 273 5 1 1 1 4 1 100 298 100 150 1943 8 7 1 416 3 50 50 1 1 -"27010" 4 273 5 1 1 1 4 1 100 298 150 225 930 3 10 0 572 3 50 75 0 1 -"27011" 4 273 5 1 1 1 4 1 100 298 225 236 2838 7 9 0 540 1 5 11 0 1 -"27012" 4 273 5 1 1 1 4 1 100 298 236 354 1079 9 1 1 586 3 50 118 1 1 -"27013" 4 273 5 1 1 1 4 1 100 298 354 531 1156 2 3 0 707 3 50 177 0 1 -"27014" 4 273 5 1 1 1 4 1 100 298 531 504 4118 1 8 1 448 1 5 27 0 0 -"27015" 4 273 5 1 1 1 4 1 100 298 504 529 1904 5 4 1 571 1 5 25 1 1 -"27016" 4 273 5 1 1 1 4 1 100 298 529 397 989 4 2 0 676 2 25 132 1 0 -"27017" 4 273 5 1 1 1 4 1 100 298 397 298 1376 6 3 0 525 2 25 99 1 0 -"27018" 4 277 2 0 1 0 1 1 100 98 100 150 7347 2 7 0 892 3 50 50 0 1 -"27019" 4 277 2 0 1 0 1 1 100 98 150 225 14070 3 7 0 3494 3 50 75 0 1 -"27020" 4 277 2 0 1 0 1 1 100 98 225 394 4549 2 8 0 970 2 75 169 0 1 -"27021" 4 277 2 0 1 0 1 1 100 98 394 98 3769 8 9 1 516 2 75 296 0 0 -"27022" 4 277 3 1 1 0 1 1 100 3450 100 195 10696 1 3 0 992 1 95 95 0 1 -"27023" 4 277 3 1 1 0 1 1 100 3450 195 293 2125 6 5 1 1142 3 50 98 1 1 -"27024" 4 277 3 1 1 0 1 1 100 3450 293 513 1540 2 8 0 720 2 75 220 0 1 -"27025" 4 277 3 1 1 0 1 1 100 3450 513 770 2851 8 9 0 1225 3 50 257 0 1 -"27026" 4 277 3 1 1 0 1 1 100 3450 770 1348 1948 3 4 0 885 2 75 578 0 1 -"27027" 4 277 3 1 1 0 1 1 100 3450 1348 2022 2759 5 7 0 938 3 50 674 0 1 -"27028" 4 277 3 1 1 0 1 1 100 3450 2022 3539 1683 7 4 1 441 2 75 1517 1 1 -"27029" 4 277 3 1 1 0 1 1 100 3450 3539 1769 1817 4 1 0 1152 3 50 1770 1 0 -"27030" 4 277 3 1 1 0 1 1 100 3450 1769 3450 2007 9 6 1 1045 1 95 1681 1 1 -"27031" 4 277 3 1 1 0 2 1 100 16 100 195 2858 8 1 1 2503 1 95 95 1 1 -"27032" 4 277 3 1 1 0 2 1 100 16 195 341 1677 6 2 1 579 2 75 146 1 1 -"27033" 4 277 3 1 1 0 2 1 100 16 341 85 1677 7 9 1 1090 2 75 256 0 0 -"27034" 4 277 3 1 1 0 2 1 100 16 85 166 1644 2 10 0 995 1 95 81 0 1 -"27035" 4 277 3 1 1 0 2 1 100 16 166 41 2209 5 3 0 1309 2 75 125 1 0 -"27036" 4 277 3 1 1 0 2 1 100 16 41 2 1860 4 2 0 961 1 95 39 1 0 -"27037" 4 277 3 1 1 0 2 1 100 16 2 4 2224 3 5 0 628 1 95 2 0 1 -"27038" 4 277 3 1 1 0 2 1 100 16 4 8 1676 9 4 1 529 1 95 4 1 1 -"27039" 4 277 3 1 1 0 2 1 100 16 8 16 1349 1 7 0 947 1 95 8 0 1 -"27040" 4 277 3 1 1 0 3 1 100 14 100 195 2266 7 5 1 955 1 95 95 1 1 -"27041" 4 277 3 1 1 0 3 1 100 14 195 10 1251 2 1 0 2164 1 95 185 1 0 -"27042" 4 277 3 1 1 0 3 1 100 14 10 20 1427 8 6 1 1101 1 95 10 1 1 -"27043" 4 277 3 1 1 0 3 1 100 14 20 35 1494 4 7 0 900 2 75 15 0 1 -"27044" 4 277 3 1 1 0 3 1 100 14 35 61 1559 3 10 0 1762 2 75 26 0 1 -"27045" 4 277 3 1 1 0 3 1 100 14 61 15 1860 6 8 1 761 2 75 46 0 0 -"27046" 4 277 3 1 1 0 3 1 100 14 15 29 1514 9 2 1 1048 1 95 14 1 1 -"27047" 4 277 3 1 1 0 3 1 100 14 29 7 1451 5 3 0 916 2 75 22 1 0 -"27048" 4 277 3 1 1 0 3 1 100 14 7 14 1118 1 10 0 1422 1 95 7 0 1 -"27049" 4 277 3 1 1 0 4 1 100 107 100 195 1487 2 3 0 1142 1 95 95 0 1 -"27050" 4 277 3 1 1 0 4 1 100 107 195 341 2236 7 4 1 744 2 75 146 1 1 -"27051" 4 277 3 1 1 0 4 1 100 107 341 17 1274 3 1 0 1178 1 95 324 1 0 -"27052" 4 277 3 1 1 0 4 1 100 107 17 33 2767 1 9 0 881 1 95 16 0 1 -"27053" 4 277 3 1 1 0 4 1 100 107 33 64 1308 8 7 1 540 1 95 31 1 1 -"27054" 4 277 3 1 1 0 4 1 100 107 64 125 3063 9 2 1 707 1 95 61 1 1 -"27055" 4 277 3 1 1 0 4 1 100 107 125 244 1406 5 6 0 633 1 95 119 0 1 -"27056" 4 277 3 1 1 0 4 1 100 107 244 61 5360 6 8 1 1087 2 75 183 0 0 -"27057" 4 277 3 1 1 0 4 1 100 107 61 107 1481 4 7 0 528 2 75 46 0 1 -"27058" 4 277 4 0 1 1 1 1 100 115 100 150 7750 8 3 1 483 3 50 50 1 1 -"27059" 4 277 4 0 1 1 1 1 100 115 150 263 5618 3 7 0 1013 4 75 113 0 1 -"27060" 4 277 4 0 1 1 1 1 100 115 263 460 1274 8 2 1 1275 4 75 197 1 1 -"27061" 4 277 4 0 1 1 1 1 100 115 460 115 1340 2 1 0 1290 4 75 345 1 0 -"27062" 4 277 5 1 1 1 1 1 100 131 100 195 5538 9 7 1 649 5 95 95 1 1 -"27063" 4 277 5 1 1 1 1 1 100 131 195 244 1543 4 8 0 1068 2 25 49 0 1 -"27064" 4 277 5 1 1 1 1 1 100 131 244 427 1414 8 2 1 2193 4 75 183 1 1 -"27065" 4 277 5 1 1 1 1 1 100 131 427 107 1518 2 1 0 1164 4 75 320 1 0 -"27066" 4 277 5 1 1 1 1 1 100 131 107 80 2202 7 6 0 1991 2 25 27 1 0 -"27067" 4 277 5 1 1 1 1 1 100 131 80 100 2477 5 3 1 2821 2 25 20 1 1 -"27068" 4 277 5 1 1 1 1 1 100 131 100 175 1708 3 6 0 1097 4 75 75 0 1 -"27069" 4 277 5 1 1 1 1 1 100 131 175 87 2010 6 9 1 1064 3 50 88 0 0 -"27070" 4 277 5 1 1 1 1 1 100 131 87 131 1514 1 4 0 440 3 50 44 0 1 -"27071" 4 277 5 1 1 1 2 1 100 1646 100 195 1472 2 9 0 536 5 95 95 0 1 -"27072" 4 277 5 1 1 1 2 1 100 1646 195 146 1964 4 10 1 1780 2 25 49 0 0 -"27073" 4 277 5 1 1 1 2 1 100 1646 146 219 3012 3 1 1 371 3 50 73 1 1 -"27074" 4 277 5 1 1 1 2 1 100 1646 219 329 1574 8 6 1 465 3 50 110 1 1 -"27075" 4 277 5 1 1 1 2 1 100 1646 329 494 2048 5 7 0 501 3 50 165 0 1 -"27076" 4 277 5 1 1 1 2 1 100 1646 494 247 2138 6 8 1 979 3 50 247 0 0 -"27077" 4 277 5 1 1 1 2 1 100 1646 247 482 2281 7 5 1 1179 5 95 235 1 1 -"27078" 4 277 5 1 1 1 2 1 100 1646 482 844 1536 1 10 0 1764 4 75 362 0 1 -"27079" 4 277 5 1 1 1 2 1 100 1646 844 1646 1885 9 3 1 1498 5 95 802 1 1 -"27080" 4 277 5 1 1 1 3 1 100 20 100 150 1520 3 5 0 2494 3 50 50 0 1 -"27081" 4 277 5 1 1 1 3 1 100 20 150 37 1796 8 9 1 765 4 75 113 0 0 -"27082" 4 277 5 1 1 1 3 1 100 20 37 56 1718 2 4 0 1082 3 50 19 0 1 -"27083" 4 277 5 1 1 1 3 1 100 20 56 28 3372 6 3 0 481 3 50 28 1 0 -"27084" 4 277 5 1 1 1 3 1 100 20 28 42 1808 7 6 1 375 3 50 14 1 1 -"27085" 4 277 5 1 1 1 3 1 100 20 42 10 1546 4 2 0 1711 4 75 32 1 0 -"27086" 4 277 5 1 1 1 3 1 100 20 10 20 1627 1 8 0 892 5 95 10 0 1 -"27087" 4 277 5 1 1 1 3 1 100 20 20 10 2062 5 7 1 1282 3 50 10 0 0 -"27088" 4 277 5 1 1 1 3 1 100 20 10 20 1738 9 5 1 693 5 95 10 1 1 -"27089" 4 277 5 1 1 1 4 1 100 256 100 175 2163 8 7 1 770 4 75 75 1 1 -"27090" 4 277 5 1 1 1 4 1 100 256 175 219 1931 3 10 0 1573 2 25 44 0 1 -"27091" 4 277 5 1 1 1 4 1 100 256 219 55 1663 7 9 1 862 4 75 164 0 0 -"27092" 4 277 5 1 1 1 4 1 100 256 55 96 1498 9 1 1 929 4 75 41 1 1 -"27093" 4 277 5 1 1 1 4 1 100 256 96 187 1938 2 3 0 876 5 95 91 0 1 -"27094" 4 277 5 1 1 1 4 1 100 256 187 365 1672 1 8 0 1990 5 95 178 0 1 -"27095" 4 277 5 1 1 1 4 1 100 256 365 456 2229 5 4 1 920 2 25 91 1 1 -"27096" 4 277 5 1 1 1 4 1 100 256 456 342 2426 4 2 0 1356 2 25 114 1 0 -"27097" 4 277 5 1 1 1 4 1 100 256 342 256 2233 6 3 0 1573 2 25 86 1 0 -"27098" 4 281 2 0 1 1 1 1 100 179 100 75 9889 8 3 0 1376 2 25 25 1 0 -"27099" 4 281 2 0 1 1 1 1 100 179 75 113 5531 3 7 0 794 3 50 38 0 1 -"27100" 4 281 2 0 1 1 1 1 100 179 113 119 2914 8 2 1 1148 1 5 6 1 1 -"27101" 4 281 2 0 1 1 1 1 100 179 119 179 2035 2 1 1 1848 3 50 60 1 1 -"27102" 4 281 3 1 1 1 1 0 100 1 100 175 4458 9 7 1 1379 4 75 75 1 1 -"27103" 4 281 3 1 1 1 1 0 100 1 175 131 2943 4 8 1 1747 2 25 44 0 0 -"27104" 4 281 3 1 1 1 1 0 100 1 131 98 8466 8 2 0 421 2 25 33 1 0 -"27105" 4 281 3 1 1 1 1 0 100 1 98 123 2982 2 1 1 4141 2 25 25 1 1 -"27106" 4 281 3 1 1 1 1 0 100 1 123 61 3246 7 6 0 1441 3 50 62 1 0 -"27107" 4 281 3 1 1 1 1 0 100 1 61 3 1234 5 3 0 1037 5 95 58 1 0 -"27108" 4 281 3 1 1 1 1 0 100 1 3 5 3049 3 6 0 2502 4 75 2 0 1 -"27109" 4 281 3 1 1 1 1 0 100 1 5 1 2276 6 9 1 2053 4 75 4 0 0 -"27110" 4 281 3 1 1 1 2 1 100 1063 100 195 6773 2 9 0 817 5 95 95 0 1 -"27111" 4 281 3 1 1 1 2 1 100 1063 195 293 2343 4 10 0 389 3 50 98 0 1 -"27112" 4 281 3 1 1 1 2 1 100 1063 293 366 2944 3 1 1 1658 2 25 73 1 1 -"27113" 4 281 3 1 1 1 2 1 100 1063 366 458 3264 8 6 1 4483 2 25 92 1 1 -"27114" 4 281 3 1 1 1 2 1 100 1063 458 573 2771 5 7 0 837 2 25 115 0 1 -"27115" 4 281 3 1 1 1 2 1 100 1063 573 716 1795 6 8 0 387 2 25 143 0 1 -"27116" 4 281 3 1 1 1 2 1 100 1063 716 895 1357 7 5 1 403 2 25 179 1 1 -"27117" 4 281 3 1 1 1 2 1 100 1063 895 850 3956 1 10 1 701 1 5 45 0 0 -"27118" 4 281 3 1 1 1 2 1 100 1063 850 1063 1953 9 3 1 4978 2 25 213 1 1 -"27119" 4 281 3 1 1 1 3 1 100 1002 100 175 2667 3 5 0 1498 4 75 75 0 1 -"27120" 4 281 3 1 1 1 3 1 100 1002 175 219 3887 8 9 0 421 2 25 44 0 1 -"27121" 4 281 3 1 1 1 3 1 100 1002 219 329 1141 2 4 0 293 3 50 110 0 1 -"27122" 4 281 3 1 1 1 3 1 100 1002 329 494 1812 6 3 1 308 3 50 165 1 1 -"27123" 4 281 3 1 1 1 3 1 100 1002 494 469 2298 7 6 0 731 1 5 25 1 0 -"27124" 4 281 3 1 1 1 3 1 100 1002 469 352 1269 4 2 0 974 2 25 117 1 0 -"27125" 4 281 3 1 1 1 3 1 100 1002 352 686 1469 1 8 0 1125 5 95 334 0 1 -"27126" 4 281 3 1 1 1 3 1 100 1002 686 514 3052 5 7 1 296 2 25 172 0 0 -"27127" 4 281 3 1 1 1 3 1 100 1002 514 1002 1679 9 5 1 0 5 95 488 1 1 -"27128" 4 281 3 1 1 1 4 1 100 437 100 150 2418 8 7 1 781 3 50 50 1 1 -"27129" 4 281 3 1 1 1 4 1 100 437 150 263 941 3 10 0 430 4 75 113 0 1 -"27130" 4 281 3 1 1 1 4 1 100 437 263 66 1007 7 9 1 1637 4 75 197 0 0 -"27131" 4 281 3 1 1 1 4 1 100 437 66 129 1973 9 1 1 1125 5 95 63 1 1 -"27132" 4 281 3 1 1 1 4 1 100 437 129 252 1068 2 3 0 1895 5 95 123 0 1 -"27133" 4 281 3 1 1 1 4 1 100 437 252 239 3290 1 8 1 659 1 5 13 0 0 -"27134" 4 281 3 1 1 1 4 1 100 437 239 466 1649 5 4 1 1449 5 95 227 1 1 -"27135" 4 281 3 1 1 1 4 1 100 437 466 583 2449 4 2 1 426 2 25 117 1 1 -"27136" 4 281 3 1 1 1 4 1 100 437 583 437 2307 6 3 0 2747 2 25 146 1 0 -"27137" 4 281 4 0 1 0 1 1 100 18 100 195 5891 2 7 0 3928 1 95 95 0 1 -"27138" 4 281 4 0 1 0 1 1 100 18 195 380 2387 3 7 0 791 1 95 185 0 1 -"27139" 4 281 4 0 1 0 1 1 100 18 380 361 2966 2 8 1 1687 5 5 19 0 0 -"27140" 4 281 4 0 1 0 1 1 100 18 361 18 1124 8 9 1 324 1 95 343 0 0 -"27141" 4 281 5 1 1 0 1 0 100 0 100 195 2438 1 3 0 352 1 95 95 0 1 -"27142" 4 281 5 1 1 0 1 0 100 0 195 380 1220 6 5 1 287 1 95 185 1 1 -"27143" 4 281 5 1 1 0 1 0 100 0 380 741 1182 2 8 0 476 1 95 361 0 1 -"27144" 4 281 5 1 1 0 1 0 100 0 741 37 994 8 9 1 1137 1 95 704 0 0 -"27145" 4 281 5 1 1 0 1 0 100 0 37 72 3096 3 4 0 275 1 95 35 0 1 -"27146" 4 281 5 1 1 0 1 0 100 0 72 4 3123 5 7 1 356 1 95 68 0 0 -"27147" 4 281 5 1 1 0 1 0 100 0 4 8 1928 7 4 1 218 1 95 4 1 1 -"27148" 4 281 5 1 1 0 1 0 100 0 8 0 932 4 1 0 317 1 95 8 1 0 -"27149" 4 281 5 1 1 0 2 1 100 7729 100 195 2420 8 1 1 350 1 95 95 1 1 -"27150" 4 281 5 1 1 0 2 1 100 7729 195 380 1499 6 2 1 753 1 95 185 1 1 -"27151" 4 281 5 1 1 0 2 1 100 7729 380 741 2187 7 9 0 225 1 95 361 0 1 -"27152" 4 281 5 1 1 0 2 1 100 7729 741 1112 2683 2 10 0 905 3 50 371 0 1 -"27153" 4 281 5 1 1 0 2 1 100 7729 1112 2168 2852 5 3 1 482 1 95 1056 1 1 -"27154" 4 281 5 1 1 0 2 1 100 7729 2168 1626 3424 4 2 0 2731 4 25 542 1 0 -"27155" 4 281 5 1 1 0 2 1 100 7729 1626 3171 2262 3 5 0 528 1 95 1545 0 1 -"27156" 4 281 5 1 1 0 2 1 100 7729 3171 6183 1604 9 4 1 461 1 95 3012 1 1 -"27157" 4 281 5 1 1 0 2 1 100 7729 6183 7729 2148 1 7 0 4196 4 25 1546 0 1 -"27158" 4 281 5 1 1 0 3 0 100 0 100 195 1479 7 5 1 382 1 95 95 1 1 -"27159" 4 281 5 1 1 0 3 0 100 0 195 10 1018 2 1 0 302 1 95 185 1 0 -"27160" 4 281 5 1 1 0 3 0 100 0 10 0 1085 8 6 0 887 1 95 10 1 0 -"27161" 4 281 5 1 1 0 4 1 100 2030 100 195 2196 2 3 0 408 1 95 95 0 1 -"27162" 4 281 5 1 1 0 4 1 100 2030 195 293 1178 7 4 1 208 3 50 98 1 1 -"27163" 4 281 5 1 1 0 4 1 100 2030 293 366 2835 3 1 1 3834 4 25 73 1 1 -"27164" 4 281 5 1 1 0 4 1 100 2030 366 714 1273 1 9 0 354 1 95 348 0 1 -"27165" 4 281 5 1 1 0 4 1 100 2030 714 1392 1254 8 7 1 304 1 95 678 1 1 -"27166" 4 281 5 1 1 0 4 1 100 2030 1392 1322 2894 9 2 0 971 5 5 70 1 0 -"27167" 4 281 5 1 1 0 4 1 100 2030 1322 1388 1354 5 6 0 416 5 5 66 0 1 -"27168" 4 281 5 1 1 0 4 1 100 2030 1388 1041 2801 6 8 1 1978 4 25 347 0 0 -"27169" 4 281 5 1 1 0 4 1 100 2030 1041 2030 1905 4 7 0 276 1 95 989 0 1 -"27170" 4 283 2 0 1 1 1 1 100 69 100 125 8221 8 3 1 2035 2 25 25 1 1 -"27171" 4 283 2 0 1 1 1 1 100 69 125 131 3864 3 7 0 4621 1 5 6 0 1 -"27172" 4 283 2 0 1 1 1 1 100 69 131 138 1206 8 2 1 3202 1 5 7 1 1 -"27173" 4 283 2 0 1 1 1 1 100 69 138 69 1231 2 1 0 1147 3 50 69 1 0 -"27174" 4 283 3 1 1 1 1 1 100 324 100 125 2799 9 7 1 1792 2 25 25 1 1 -"27175" 4 283 3 1 1 1 1 1 100 324 125 156 9040 4 8 0 2351 2 25 31 0 1 -"27176" 4 283 3 1 1 1 1 1 100 324 156 234 1892 8 2 1 24 3 50 78 1 1 -"27177" 4 283 3 1 1 1 1 1 100 324 234 175 1228 2 1 0 1647 2 25 59 1 0 -"27178" 4 283 3 1 1 1 1 1 100 324 175 219 1198 7 6 1 1203 2 25 44 1 1 -"27179" 4 283 3 1 1 1 1 1 100 324 219 230 1463 5 3 1 2695 1 5 11 1 1 -"27180" 4 283 3 1 1 1 1 1 100 324 230 288 932 3 6 0 3429 2 25 58 0 1 -"27181" 4 283 3 1 1 1 1 1 100 324 288 216 1111 6 9 1 792 2 25 72 0 0 -"27182" 4 283 3 1 1 1 1 1 100 324 216 324 1191 1 4 0 1621 3 50 108 0 1 -"27183" 4 283 3 1 1 1 2 1 100 656 100 150 1830 2 9 0 3357 3 50 50 0 1 -"27184" 4 283 3 1 1 1 2 1 100 656 150 263 1290 4 10 0 1059 4 75 113 0 1 -"27185" 4 283 3 1 1 1 2 1 100 656 263 197 1477 3 1 0 1565 2 25 66 1 0 -"27186" 4 283 3 1 1 1 2 1 100 656 197 296 1031 8 6 1 1557 3 50 99 1 1 -"27187" 4 283 3 1 1 1 2 1 100 656 296 311 2696 5 7 0 1349 1 5 15 0 1 -"27188" 4 283 3 1 1 1 2 1 100 656 311 233 1386 6 8 1 2877 2 25 78 0 0 -"27189" 4 283 3 1 1 1 2 1 100 656 233 291 1177 7 5 1 2727 2 25 58 1 1 -"27190" 4 283 3 1 1 1 2 1 100 656 291 437 1690 1 10 0 880 3 50 146 0 1 -"27191" 4 283 3 1 1 1 2 1 100 656 437 656 1315 9 3 1 1067 3 50 219 1 1 -"27192" 4 283 3 1 1 1 3 1 100 164 100 175 1696 3 5 0 2933 4 75 75 0 1 -"27193" 4 283 3 1 1 1 3 1 100 164 175 87 951 8 9 1 3175 3 50 88 0 0 -"27194" 4 283 3 1 1 1 3 1 100 164 87 152 1053 2 4 0 1961 4 75 65 0 1 -"27195" 4 283 3 1 1 1 3 1 100 164 152 190 809 6 3 1 4904 2 25 38 1 1 -"27196" 4 283 3 1 1 1 3 1 100 164 190 200 1100 7 6 1 2604 1 5 10 1 1 -"27197" 4 283 3 1 1 1 3 1 100 164 200 100 1085 4 2 0 1055 3 50 100 1 0 -"27198" 4 283 3 1 1 1 3 1 100 164 100 175 1755 1 8 0 2621 4 75 75 0 1 -"27199" 4 283 3 1 1 1 3 1 100 164 175 131 3813 5 7 1 661 2 25 44 0 0 -"27200" 4 283 3 1 1 1 3 1 100 164 131 164 1115 9 5 1 1988 2 25 33 1 1 -"27201" 4 283 3 1 1 1 4 1 100 996 100 175 1370 8 7 1 862 4 75 75 1 1 -"27202" 4 283 3 1 1 1 4 1 100 996 175 263 1218 3 10 0 694 3 50 88 0 1 -"27203" 4 283 3 1 1 1 4 1 100 996 263 197 914 7 9 1 3909 2 25 66 0 0 -"27204" 4 283 3 1 1 1 4 1 100 996 197 296 1328 9 1 1 463 3 50 99 1 1 -"27205" 4 283 3 1 1 1 4 1 100 996 296 444 854 2 3 0 1595 3 50 148 0 1 -"27206" 4 283 3 1 1 1 4 1 100 996 444 666 876 1 8 0 2241 3 50 222 0 1 -"27207" 4 283 3 1 1 1 4 1 100 996 666 999 2891 5 4 1 1008 3 50 333 1 1 -"27208" 4 283 3 1 1 1 4 1 100 996 999 949 1061 4 2 0 1040 1 5 50 1 0 -"27209" 4 283 3 1 1 1 4 1 100 996 949 996 915 6 3 1 1509 1 5 47 1 1 -"27210" 4 283 4 0 1 0 1 1 100 37 100 195 1114 2 7 0 915 1 95 95 0 1 -"27211" 4 283 4 0 1 0 1 1 100 37 195 380 845 3 7 0 1186 1 95 185 0 1 -"27212" 4 283 4 0 1 0 1 1 100 37 380 741 911 2 8 0 2146 1 95 361 0 1 -"27213" 4 283 4 0 1 0 1 1 100 37 741 37 1065 8 9 1 1180 1 95 704 0 0 -"27214" 4 283 5 1 1 0 1 0 100 0 100 175 3946 1 3 0 812 2 75 75 0 1 -"27215" 4 283 5 1 1 0 1 0 100 0 175 341 1578 6 5 1 1861 1 95 166 1 1 -"27216" 4 283 5 1 1 0 1 0 100 0 341 665 794 2 8 0 722 1 95 324 0 1 -"27217" 4 283 5 1 1 0 1 0 100 0 665 33 1100 8 9 1 4085 1 95 632 0 0 -"27218" 4 283 5 1 1 0 1 0 100 0 33 64 744 3 4 0 1418 1 95 31 0 1 -"27219" 4 283 5 1 1 0 1 0 100 0 64 3 948 5 7 1 3404 1 95 61 0 0 -"27220" 4 283 5 1 1 0 1 0 100 0 3 6 896 7 4 1 1123 1 95 3 1 1 -"27221" 4 283 5 1 1 0 1 0 100 0 6 0 1091 4 1 0 4237 1 95 6 1 0 -"27222" 4 283 5 1 1 0 2 0 100 0 100 195 3026 8 1 1 1336 1 95 95 1 1 -"27223" 4 283 5 1 1 0 2 0 100 0 195 380 901 6 2 1 1464 1 95 185 1 1 -"27224" 4 283 5 1 1 0 2 0 100 0 380 19 871 7 9 1 4955 1 95 361 0 0 -"27225" 4 283 5 1 1 0 2 0 100 0 19 37 988 2 10 0 1203 1 95 18 0 1 -"27226" 4 283 5 1 1 0 2 0 100 0 37 2 2239 5 3 0 1682 1 95 35 1 0 -"27227" 4 283 5 1 1 0 2 0 100 0 2 0 1175 4 2 0 1333 1 95 2 1 0 -"27228" 4 283 5 1 1 0 3 1 100 31 100 195 1208 7 5 1 628 1 95 95 1 1 -"27229" 4 283 5 1 1 0 3 1 100 31 195 10 947 2 1 0 3356 1 95 185 1 0 -"27230" 4 283 5 1 1 0 3 1 100 31 10 20 1242 8 6 1 3269 1 95 10 1 1 -"27231" 4 283 5 1 1 0 3 1 100 31 20 39 3342 4 7 0 2117 1 95 19 0 1 -"27232" 4 283 5 1 1 0 3 1 100 31 39 76 1373 3 10 0 1329 1 95 37 0 1 -"27233" 4 283 5 1 1 0 3 1 100 31 76 4 951 6 8 1 1351 1 95 72 0 0 -"27234" 4 283 5 1 1 0 3 1 100 31 4 8 948 9 2 1 2604 1 95 4 1 1 -"27235" 4 283 5 1 1 0 3 1 100 31 8 16 1517 5 3 1 1659 1 95 8 1 1 -"27236" 4 283 5 1 1 0 3 1 100 31 16 31 1201 1 10 0 2189 1 95 15 0 1 -"27237" 4 283 5 1 1 0 4 0 100 0 100 195 1165 2 3 0 1291 1 95 95 0 1 -"27238" 4 283 5 1 1 0 4 0 100 0 195 380 1691 7 4 1 2864 1 95 185 1 1 -"27239" 4 283 5 1 1 0 4 0 100 0 380 19 1589 3 1 0 2295 1 95 361 1 0 -"27240" 4 283 5 1 1 0 4 0 100 0 19 37 799 1 9 0 1910 1 95 18 0 1 -"27241" 4 283 5 1 1 0 4 0 100 0 37 72 865 8 7 1 2501 1 95 35 1 1 -"27242" 4 283 5 1 1 0 4 0 100 0 72 140 1187 9 2 1 1328 1 95 68 1 1 -"27243" 4 283 5 1 1 0 4 0 100 0 140 7 1246 5 6 1 728 1 95 133 0 0 -"27244" 4 283 5 1 1 0 4 0 100 0 7 0 1331 6 8 1 1129 1 95 7 0 0 -"27245" 4 285 2 0 1 0 1 1 100 230 100 150 7729 2 7 0 807 3 50 50 0 1 -"27246" 4 285 2 0 1 0 1 1 100 230 150 263 10565 3 7 0 3981 2 75 113 0 1 -"27247" 4 285 2 0 1 0 1 1 100 230 263 460 3028 2 8 0 746 2 75 197 0 1 -"27248" 4 285 2 0 1 0 1 1 100 230 460 230 2284 8 9 1 765 3 50 230 0 0 -"27249" 4 285 3 1 1 0 1 1 100 1887 100 150 4158 1 3 0 822 3 50 50 0 1 -"27250" 4 285 3 1 1 0 1 1 100 1887 150 263 4692 6 5 1 907 2 75 113 1 1 -"27251" 4 285 3 1 1 0 1 1 100 1887 263 460 2069 2 8 0 1184 2 75 197 0 1 -"27252" 4 285 3 1 1 0 1 1 100 1887 460 115 3197 8 9 1 771 2 75 345 0 0 -"27253" 4 285 3 1 1 0 1 1 100 1887 115 201 3531 3 4 0 369 2 75 86 0 1 -"27254" 4 285 3 1 1 0 1 1 100 1887 201 352 5004 5 7 0 228 2 75 151 0 1 -"27255" 4 285 3 1 1 0 1 1 100 1887 352 616 3566 7 4 1 635 2 75 264 1 1 -"27256" 4 285 3 1 1 0 1 1 100 1887 616 1078 4170 4 1 1 777 2 75 462 1 1 -"27257" 4 285 3 1 1 0 1 1 100 1887 1078 1887 5129 9 6 1 387 2 75 809 1 1 -"27258" 4 285 3 1 1 0 2 1 100 76 100 175 3594 8 1 1 757 2 75 75 1 1 -"27259" 4 285 3 1 1 0 2 1 100 76 175 306 3599 6 2 1 609 2 75 131 1 1 -"27260" 4 285 3 1 1 0 2 1 100 76 306 536 4529 7 9 0 447 2 75 230 0 1 -"27261" 4 285 3 1 1 0 2 1 100 76 536 804 2655 2 10 0 1039 3 50 268 0 1 -"27262" 4 285 3 1 1 0 2 1 100 76 804 40 5739 5 3 0 2207 1 95 764 1 0 -"27263" 4 285 3 1 1 0 2 1 100 76 40 10 2531 4 2 0 289 2 75 30 1 0 -"27264" 4 285 3 1 1 0 2 1 100 76 10 20 2152 3 5 0 3519 1 95 10 0 1 -"27265" 4 285 3 1 1 0 2 1 100 76 20 39 1781 9 4 1 2710 1 95 19 1 1 -"27266" 4 285 3 1 1 0 2 1 100 76 39 76 1906 1 7 0 605 1 95 37 0 1 -"27267" 4 285 3 1 1 0 3 1 100 31 100 195 1865 7 5 1 1166 1 95 95 1 1 -"27268" 4 285 3 1 1 0 3 1 100 31 195 10 1955 2 1 0 492 1 95 185 1 0 -"27269" 4 285 3 1 1 0 3 1 100 31 10 20 2346 8 6 1 616 1 95 10 1 1 -"27270" 4 285 3 1 1 0 3 1 100 31 20 39 1640 4 7 0 631 1 95 19 0 1 -"27271" 4 285 3 1 1 0 3 1 100 31 39 76 2502 3 10 0 581 1 95 37 0 1 -"27272" 4 285 3 1 1 0 3 1 100 31 76 4 1783 6 8 1 815 1 95 72 0 0 -"27273" 4 285 3 1 1 0 3 1 100 31 4 8 1830 9 2 1 892 1 95 4 1 1 -"27274" 4 285 3 1 1 0 3 1 100 31 8 16 1879 5 3 1 747 1 95 8 1 1 -"27275" 4 285 3 1 1 0 3 1 100 31 16 31 1661 1 10 0 2003 1 95 15 0 1 -"27276" 4 285 3 1 1 0 4 1 100 2332 100 195 1935 2 3 0 800 1 95 95 0 1 -"27277" 4 285 3 1 1 0 4 1 100 2332 195 49 3048 7 4 0 661 2 75 146 1 0 -"27278" 4 285 3 1 1 0 4 1 100 2332 49 96 2290 3 1 1 1579 1 95 47 1 1 -"27279" 4 285 3 1 1 0 4 1 100 2332 96 187 2205 1 9 0 1266 1 95 91 0 1 -"27280" 4 285 3 1 1 0 4 1 100 2332 187 327 2635 8 7 1 787 2 75 140 1 1 -"27281" 4 285 3 1 1 0 4 1 100 2332 327 638 4027 9 2 1 833 1 95 311 1 1 -"27282" 4 285 3 1 1 0 4 1 100 2332 638 957 2703 5 6 0 572 3 50 319 0 1 -"27283" 4 285 3 1 1 0 4 1 100 2332 957 1196 5550 6 8 0 948 4 25 239 0 1 -"27284" 4 285 3 1 1 0 4 1 100 2332 1196 2332 4060 4 7 0 1000 1 95 1136 0 1 -"27285" 4 285 4 0 1 1 1 1 100 296 100 150 4677 8 3 1 3581 3 50 50 1 1 -"27286" 4 285 4 0 1 1 1 1 100 296 150 263 4648 3 7 0 352 4 75 113 0 1 -"27287" 4 285 4 0 1 1 1 1 100 296 263 395 2740 8 2 1 888 3 50 132 1 1 -"27288" 4 285 4 0 1 1 1 1 100 296 395 296 2195 2 1 0 2352 2 25 99 1 0 -"27289" 4 285 5 1 1 1 1 1 100 98 100 195 3839 9 7 1 270 5 95 95 1 1 -"27290" 4 285 5 1 1 1 1 1 100 98 195 293 2646 4 8 0 638 3 50 98 0 1 -"27291" 4 285 5 1 1 1 1 1 100 98 293 513 1876 8 2 1 1625 4 75 220 1 1 -"27292" 4 285 5 1 1 1 1 1 100 98 513 26 2017 2 1 0 431 5 95 487 1 0 -"27293" 4 285 5 1 1 1 1 1 100 98 26 39 1696 7 6 1 1029 3 50 13 1 1 -"27294" 4 285 5 1 1 1 1 1 100 98 39 19 1658 5 3 0 280 3 50 20 1 0 -"27295" 4 285 5 1 1 1 1 1 100 98 19 33 1368 3 6 0 309 4 75 14 0 1 -"27296" 4 285 5 1 1 1 1 1 100 98 33 50 2251 6 9 0 1358 3 50 17 0 1 -"27297" 4 285 5 1 1 1 1 1 100 98 50 98 2163 1 4 0 690 5 95 48 0 1 -"27298" 4 285 5 1 1 1 2 1 100 837 100 195 1868 2 9 0 506 5 95 95 0 1 -"27299" 4 285 5 1 1 1 2 1 100 837 195 244 2310 4 10 0 414 2 25 49 0 1 -"27300" 4 285 5 1 1 1 2 1 100 837 244 256 2144 3 1 1 959 1 5 12 1 1 -"27301" 4 285 5 1 1 1 2 1 100 837 256 384 2062 8 6 1 834 3 50 128 1 1 -"27302" 4 285 5 1 1 1 2 1 100 837 384 365 2240 5 7 1 1219 1 5 19 0 0 -"27303" 4 285 5 1 1 1 2 1 100 837 365 182 1558 6 8 1 266 3 50 183 0 0 -"27304" 4 285 5 1 1 1 2 1 100 837 182 319 1571 7 5 1 454 4 75 137 1 1 -"27305" 4 285 5 1 1 1 2 1 100 837 319 558 2301 1 10 0 638 4 75 239 0 1 -"27306" 4 285 5 1 1 1 2 1 100 837 558 837 2060 9 3 1 320 3 50 279 1 1 -"27307" 4 285 5 1 1 1 3 1 100 32 100 175 1903 3 5 0 907 4 75 75 0 1 -"27308" 4 285 5 1 1 1 3 1 100 32 175 87 2308 8 9 1 1125 3 50 88 0 0 -"27309" 4 285 5 1 1 1 3 1 100 32 87 152 1476 2 4 0 212 4 75 65 0 1 -"27310" 4 285 5 1 1 1 3 1 100 32 152 38 2687 6 3 0 413 4 75 114 1 0 -"27311" 4 285 5 1 1 1 3 1 100 32 38 28 1487 7 6 0 326 2 25 10 1 0 -"27312" 4 285 5 1 1 1 3 1 100 32 28 21 1971 4 2 0 996 2 25 7 1 0 -"27313" 4 285 5 1 1 1 3 1 100 32 21 32 1729 1 8 0 661 3 50 11 0 1 -"27314" 4 285 5 1 1 1 3 1 100 32 32 30 2795 5 7 1 3092 1 5 2 0 0 -"27315" 4 285 5 1 1 1 3 1 100 32 30 32 1173 9 5 1 1411 1 5 2 1 1 -"27316" 4 285 5 1 1 1 4 1 100 848 100 175 8501 8 7 1 1694 4 75 75 1 1 -"27317" 4 285 5 1 1 1 4 1 100 848 175 263 2203 3 10 0 769 3 50 88 0 1 -"27318" 4 285 5 1 1 1 4 1 100 848 263 276 2873 7 9 0 4110 1 5 13 0 1 -"27319" 4 285 5 1 1 1 4 1 100 848 276 483 1840 9 1 1 846 4 75 207 1 1 -"27320" 4 285 5 1 1 1 4 1 100 848 483 459 3491 2 3 1 499 1 5 24 0 0 -"27321" 4 285 5 1 1 1 4 1 100 848 459 895 1368 1 8 0 461 5 95 436 0 1 -"27322" 4 285 5 1 1 1 4 1 100 848 895 940 3252 5 4 1 1863 1 5 45 1 1 -"27323" 4 285 5 1 1 1 4 1 100 848 940 893 2228 4 2 0 1015 1 5 47 1 0 -"27324" 4 285 5 1 1 1 4 1 100 848 893 848 2172 6 3 0 459 1 5 45 1 0 -"27325" 4 287 2 0 1 0 1 1 100 115 100 150 6128 2 7 0 1554 3 50 50 0 1 -"27326" 4 287 2 0 1 0 1 1 100 115 150 263 16557 3 7 0 3775 2 75 113 0 1 -"27327" 4 287 2 0 1 0 1 1 100 115 263 460 2353 2 8 0 1583 2 75 197 0 1 -"27328" 4 287 2 0 1 0 1 1 100 115 460 115 1640 8 9 1 1520 2 75 345 0 0 -"27329" 4 287 3 1 1 0 1 1 100 16 100 150 6485 1 3 0 854 3 50 50 0 1 -"27330" 4 287 3 1 1 0 1 1 100 16 150 263 1963 6 5 1 1891 2 75 113 1 1 -"27331" 4 287 3 1 1 0 1 1 100 16 263 460 2407 2 8 0 674 2 75 197 0 1 -"27332" 4 287 3 1 1 0 1 1 100 16 460 23 1863 8 9 1 1228 1 95 437 0 0 -"27333" 4 287 3 1 1 0 1 1 100 16 23 45 2038 3 4 0 2443 1 95 22 0 1 -"27334" 4 287 3 1 1 0 1 1 100 16 45 79 5340 5 7 0 612 2 75 34 0 1 -"27335" 4 287 3 1 1 0 1 1 100 16 79 154 1828 7 4 1 1250 1 95 75 1 1 -"27336" 4 287 3 1 1 0 1 1 100 16 154 8 1668 4 1 0 1347 1 95 146 1 0 -"27337" 4 287 3 1 1 0 1 1 100 16 8 16 1319 9 6 1 989 1 95 8 1 1 -"27338" 4 287 3 1 1 0 2 0 100 1 100 195 1646 8 1 1 1269 1 95 95 1 1 -"27339" 4 287 3 1 1 0 2 0 100 1 195 293 2542 6 2 1 1380 3 50 98 1 1 -"27340" 4 287 3 1 1 0 2 0 100 1 293 15 3135 7 9 1 1495 1 95 278 0 0 -"27341" 4 287 3 1 1 0 2 0 100 1 15 29 1269 2 10 0 334 1 95 14 0 1 -"27342" 4 287 3 1 1 0 2 0 100 1 29 1 1659 5 3 0 458 1 95 28 1 0 -"27343" 4 287 3 1 1 0 3 0 100 0 100 195 1689 7 5 1 494 1 95 95 1 1 -"27344" 4 287 3 1 1 0 3 0 100 0 195 10 1460 2 1 0 1172 1 95 185 1 0 -"27345" 4 287 3 1 1 0 3 0 100 0 10 20 2658 8 6 1 329 1 95 10 1 1 -"27346" 4 287 3 1 1 0 3 0 100 0 20 39 1207 4 7 0 715 1 95 19 0 1 -"27347" 4 287 3 1 1 0 3 0 100 0 39 76 4054 3 10 0 298 1 95 37 0 1 -"27348" 4 287 3 1 1 0 3 0 100 0 76 4 1705 6 8 1 670 1 95 72 0 0 -"27349" 4 287 3 1 1 0 3 0 100 0 4 0 1834 9 2 0 657 1 95 4 1 0 -"27350" 4 287 3 1 1 0 4 1 100 27 100 195 1330 2 3 0 904 1 95 95 0 1 -"27351" 4 287 3 1 1 0 4 1 100 27 195 380 1547 7 4 1 431 1 95 185 1 1 -"27352" 4 287 3 1 1 0 4 1 100 27 380 19 1536 3 1 0 380 1 95 361 1 0 -"27353" 4 287 3 1 1 0 4 1 100 27 19 37 1007 1 9 0 597 1 95 18 0 1 -"27354" 4 287 3 1 1 0 4 1 100 27 37 72 1190 8 7 1 307 1 95 35 1 1 -"27355" 4 287 3 1 1 0 4 1 100 27 72 140 2561 9 2 1 397 1 95 68 1 1 -"27356" 4 287 3 1 1 0 4 1 100 27 140 273 1991 5 6 0 638 1 95 133 0 1 -"27357" 4 287 3 1 1 0 4 1 100 27 273 14 1628 6 8 1 700 1 95 259 0 0 -"27358" 4 287 3 1 1 0 4 1 100 27 14 27 1088 4 7 0 370 1 95 13 0 1 -"27359" 4 287 4 0 1 1 1 1 100 115 100 150 4293 8 3 1 326 3 50 50 1 1 -"27360" 4 287 4 0 1 1 1 1 100 115 150 263 5515 3 7 0 2355 4 75 113 0 1 -"27361" 4 287 4 0 1 1 1 1 100 115 263 460 1672 8 2 1 1134 4 75 197 1 1 -"27362" 4 287 4 0 1 1 1 1 100 115 460 115 2166 2 1 0 1309 4 75 345 1 0 -"27363" 4 287 5 1 1 1 1 1 100 6 100 195 2919 9 7 1 658 5 95 95 1 1 -"27364" 4 287 5 1 1 1 1 1 100 6 195 293 1415 4 8 0 3186 3 50 98 0 1 -"27365" 4 287 5 1 1 1 1 1 100 6 293 513 1976 8 2 1 1096 4 75 220 1 1 -"27366" 4 287 5 1 1 1 1 1 100 6 513 26 1745 2 1 0 1732 5 95 487 1 0 -"27367" 4 287 5 1 1 1 1 1 100 6 26 27 1950 7 6 1 343 1 5 1 1 1 -"27368" 4 287 5 1 1 1 1 1 100 6 27 26 1652 5 3 0 699 1 5 1 1 0 -"27369" 4 287 5 1 1 1 1 1 100 6 26 27 1463 3 6 0 2360 1 5 1 0 1 -"27370" 4 287 5 1 1 1 1 1 100 6 27 26 1445 6 9 1 541 1 5 1 0 0 -"27371" 4 287 5 1 1 1 1 1 100 6 26 6 1313 1 4 1 1048 4 75 20 0 0 -"27372" 4 287 5 1 1 1 2 1 100 2414 100 175 6118 2 9 0 1562 4 75 75 0 1 -"27373" 4 287 5 1 1 1 2 1 100 2414 175 341 5180 4 10 0 908 5 95 166 0 1 -"27374" 4 287 5 1 1 1 2 1 100 2414 341 426 1710 3 1 1 786 2 25 85 1 1 -"27375" 4 287 5 1 1 1 2 1 100 2414 426 746 1970 8 6 1 2275 4 75 320 1 1 -"27376" 4 287 5 1 1 1 2 1 100 2414 746 1306 2916 5 7 0 2759 4 75 560 0 1 -"27377" 4 287 5 1 1 1 2 1 100 2414 1306 1241 2160 6 8 1 1087 1 5 65 0 0 -"27378" 4 287 5 1 1 1 2 1 100 2414 1241 1179 1401 7 5 0 828 1 5 62 1 0 -"27379" 4 287 5 1 1 1 2 1 100 2414 1179 1238 970 1 10 0 417 1 5 59 0 1 -"27380" 4 287 5 1 1 1 2 1 100 2414 1238 2414 1691 9 3 1 1336 5 95 1176 1 1 -"27381" 4 287 5 1 1 1 3 1 100 1 100 105 1592 3 5 0 1570 1 5 5 0 1 -"27382" 4 287 5 1 1 1 3 1 100 1 105 100 1117 8 9 1 517 1 5 5 0 0 -"27383" 4 287 5 1 1 1 3 1 100 1 100 175 938 2 4 0 1403 4 75 75 0 1 -"27384" 4 287 5 1 1 1 3 1 100 1 175 131 1374 6 3 0 1554 2 25 44 1 0 -"27385" 4 287 5 1 1 1 3 1 100 1 131 255 3549 7 6 1 1635 5 95 124 1 1 -"27386" 4 287 5 1 1 1 3 1 100 1 255 13 930 4 2 0 1284 5 95 242 1 0 -"27387" 4 287 5 1 1 1 3 1 100 1 13 3 1444 1 8 1 974 4 75 10 0 0 -"27388" 4 287 5 1 1 1 3 1 100 1 3 5 1700 5 7 0 681 3 50 2 0 1 -"27389" 4 287 5 1 1 1 3 1 100 1 5 1 1462 9 5 0 1351 4 75 4 1 0 -"27390" 4 287 5 1 1 1 4 0 100 1 100 5 3769 8 7 0 2332 5 95 95 1 0 -"27391" 4 287 5 1 1 1 4 0 100 1 5 1 6771 3 10 1 718 4 75 4 0 0 -"27392" 4 288 2 0 1 0 1 1 100 140 100 150 8844 2 7 0 1165 3 50 50 0 1 -"27393" 4 288 2 0 1 0 1 1 100 140 150 225 14515 3 7 0 1662 3 50 75 0 1 -"27394" 4 288 2 0 1 0 1 1 100 140 225 281 2020 2 8 0 626 4 25 56 0 1 -"27395" 4 288 2 0 1 0 1 1 100 140 281 140 1513 8 9 1 1055 3 50 141 0 0 -"27396" 4 288 3 1 1 0 1 1 100 209 100 150 7978 1 3 0 972 3 50 50 0 1 -"27397" 4 288 3 1 1 0 1 1 100 209 150 75 1971 6 5 0 1196 3 50 75 1 0 -"27398" 4 288 3 1 1 0 1 1 100 209 75 113 1616 2 8 0 1457 3 50 38 0 1 -"27399" 4 288 3 1 1 0 1 1 100 209 113 56 1929 8 9 1 1219 3 50 57 0 0 -"27400" 4 288 3 1 1 0 1 1 100 209 56 98 1422 3 4 0 1104 2 75 42 0 1 -"27401" 4 288 3 1 1 0 1 1 100 209 98 123 2889 5 7 0 806 4 25 25 0 1 -"27402" 4 288 3 1 1 0 1 1 100 209 123 215 1596 7 4 1 1022 2 75 92 1 1 -"27403" 4 288 3 1 1 0 1 1 100 209 215 107 3387 4 1 0 1231 3 50 108 1 0 -"27404" 4 288 3 1 1 0 1 1 100 209 107 209 1698 9 6 1 1623 1 95 102 1 1 -"27405" 4 288 3 1 1 0 2 1 100 515 100 195 2558 8 1 1 1202 1 95 95 1 1 -"27406" 4 288 3 1 1 0 2 1 100 515 195 293 1630 6 2 1 1247 3 50 98 1 1 -"27407" 4 288 3 1 1 0 2 1 100 515 293 146 2073 7 9 1 1436 3 50 147 0 0 -"27408" 4 288 3 1 1 0 2 1 100 515 146 256 1477 2 10 0 1205 2 75 110 0 1 -"27409" 4 288 3 1 1 0 2 1 100 515 256 192 1921 5 3 0 1052 4 25 64 1 0 -"27410" 4 288 3 1 1 0 2 1 100 515 192 96 1631 4 2 0 1087 3 50 96 1 0 -"27411" 4 288 3 1 1 0 2 1 100 515 96 168 1281 3 5 0 938 2 75 72 0 1 -"27412" 4 288 3 1 1 0 2 1 100 515 168 294 1474 9 4 1 1002 2 75 126 1 1 -"27413" 4 288 3 1 1 0 2 1 100 515 294 515 1183 1 7 0 1173 2 75 221 0 1 -"27414" 4 288 3 1 1 0 3 1 100 481 100 175 1598 7 5 1 1216 2 75 75 1 1 -"27415" 4 288 3 1 1 0 3 1 100 481 175 44 1323 2 1 0 1122 2 75 131 1 0 -"27416" 4 288 3 1 1 0 3 1 100 481 44 86 1218 8 6 1 1653 1 95 42 1 1 -"27417" 4 288 3 1 1 0 3 1 100 481 86 151 1788 4 7 0 1453 2 75 65 0 1 -"27418" 4 288 3 1 1 0 3 1 100 481 151 227 1373 3 10 0 1649 3 50 76 0 1 -"27419" 4 288 3 1 1 0 3 1 100 481 227 113 1472 6 8 1 1264 3 50 114 0 0 -"27420" 4 288 3 1 1 0 3 1 100 481 113 220 1752 9 2 1 1363 1 95 107 1 1 -"27421" 4 288 3 1 1 0 3 1 100 481 220 275 1815 5 3 1 1151 4 25 55 1 1 -"27422" 4 288 3 1 1 0 3 1 100 481 275 481 1237 1 10 0 1304 2 75 206 0 1 -"27423" 4 288 3 1 1 0 4 1 100 951 100 175 1485 2 3 0 1108 2 75 75 0 1 -"27424" 4 288 3 1 1 0 4 1 100 951 175 306 1133 7 4 1 1709 2 75 131 1 1 -"27425" 4 288 3 1 1 0 4 1 100 951 306 76 1369 3 1 0 1269 2 75 230 1 0 -"27426" 4 288 3 1 1 0 4 1 100 951 76 148 1374 1 9 0 1306 1 95 72 0 1 -"27427" 4 288 3 1 1 0 4 1 100 951 148 289 1385 8 7 1 1933 1 95 141 1 1 -"27428" 4 288 3 1 1 0 4 1 100 951 289 564 1222 9 2 1 1081 1 95 275 1 1 -"27429" 4 288 3 1 1 0 4 1 100 951 564 846 1971 5 6 0 1115 3 50 282 0 1 -"27430" 4 288 3 1 1 0 4 1 100 951 846 634 1411 6 8 1 918 4 25 212 0 0 -"27431" 4 288 3 1 1 0 4 1 100 951 634 951 1312 4 7 0 815 3 50 317 0 1 -"27432" 4 288 4 0 1 1 1 1 100 223 100 150 4923 8 3 1 1487 3 50 50 1 1 -"27433" 4 288 4 0 1 1 1 1 100 223 150 188 3997 3 7 0 1789 2 25 38 0 1 -"27434" 4 288 4 0 1 1 1 1 100 223 188 235 1509 8 2 1 851 2 25 47 1 1 -"27435" 4 288 4 0 1 1 1 1 100 223 235 223 951 2 1 0 1568 1 5 12 1 0 -"27436" 4 288 5 1 1 1 1 1 100 112 100 150 1757 9 7 1 1079 3 50 50 1 1 -"27437" 4 288 5 1 1 1 1 1 100 112 150 158 1233 4 8 0 932 1 5 8 0 1 -"27438" 4 288 5 1 1 1 1 1 100 112 158 198 989 8 2 1 1644 2 25 40 1 1 -"27439" 4 288 5 1 1 1 1 1 100 112 198 99 1211 2 1 0 848 3 50 99 1 0 -"27440" 4 288 5 1 1 1 1 1 100 112 99 173 1285 7 6 1 469 4 75 74 1 1 -"27441" 4 288 5 1 1 1 1 1 100 112 173 86 1460 5 3 0 645 3 50 87 1 0 -"27442" 4 288 5 1 1 1 1 1 100 112 86 129 1243 3 6 0 1398 3 50 43 0 1 -"27443" 4 288 5 1 1 1 1 1 100 112 129 64 1217 6 9 1 853 3 50 65 0 0 -"27444" 4 288 5 1 1 1 1 1 100 112 64 112 914 1 4 0 1429 4 75 48 0 1 -"27445" 4 288 5 1 1 1 2 1 100 494 100 175 1580 2 9 0 991 4 75 75 0 1 -"27446" 4 288 5 1 1 1 2 1 100 494 175 219 1221 4 10 0 1496 2 25 44 0 1 -"27447" 4 288 5 1 1 1 2 1 100 494 219 164 1454 3 1 0 4694 2 25 55 1 0 -"27448" 4 288 5 1 1 1 2 1 100 494 164 246 1380 8 6 1 992 3 50 82 1 1 -"27449" 4 288 5 1 1 1 2 1 100 494 246 234 1710 5 7 1 850 1 5 12 0 0 -"27450" 4 288 5 1 1 1 2 1 100 494 234 175 1135 6 8 1 1759 2 25 59 0 0 -"27451" 4 288 5 1 1 1 2 1 100 494 175 219 1208 7 5 1 1485 2 25 44 1 1 -"27452" 4 288 5 1 1 1 2 1 100 494 219 329 1139 1 10 0 747 3 50 110 0 1 -"27453" 4 288 5 1 1 1 2 1 100 494 329 494 1076 9 3 1 967 3 50 165 1 1 -"27454" 4 288 5 1 1 1 3 1 100 328 100 125 1169 3 5 0 983 2 25 25 0 1 -"27455" 4 288 5 1 1 1 3 1 100 328 125 62 1125 8 9 1 1830 3 50 63 0 0 -"27456" 4 288 5 1 1 1 3 1 100 328 62 109 1107 2 4 0 541 4 75 47 0 1 -"27457" 4 288 5 1 1 1 3 1 100 328 109 164 1184 6 3 1 845 3 50 55 1 1 -"27458" 4 288 5 1 1 1 3 1 100 328 164 246 1106 7 6 1 1051 3 50 82 1 1 -"27459" 4 288 5 1 1 1 3 1 100 328 246 184 1287 4 2 0 1023 2 25 62 1 0 -"27460" 4 288 5 1 1 1 3 1 100 328 184 276 1139 1 8 0 563 3 50 92 0 1 -"27461" 4 288 5 1 1 1 3 1 100 328 276 262 1965 5 7 1 1445 1 5 14 0 0 -"27462" 4 288 5 1 1 1 3 1 100 328 262 328 1045 9 5 1 1417 2 25 66 1 1 -"27463" 4 288 5 1 1 1 4 1 100 339 100 150 1408 8 7 1 1115 3 50 50 1 1 -"27464" 4 288 5 1 1 1 4 1 100 339 150 225 3395 3 10 0 999 3 50 75 0 1 -"27465" 4 288 5 1 1 1 4 1 100 339 225 169 1069 7 9 1 1624 2 25 56 0 0 -"27466" 4 288 5 1 1 1 4 1 100 339 169 254 1128 9 1 1 1232 3 50 85 1 1 -"27467" 4 288 5 1 1 1 4 1 100 339 254 381 1355 2 3 0 1024 3 50 127 0 1 -"27468" 4 288 5 1 1 1 4 1 100 339 381 572 1340 1 8 0 1338 3 50 191 0 1 -"27469" 4 288 5 1 1 1 4 1 100 339 572 543 1552 5 4 0 1908 1 5 29 1 0 -"27470" 4 288 5 1 1 1 4 1 100 339 543 271 1406 4 2 0 1193 3 50 272 1 0 -"27471" 4 288 5 1 1 1 4 1 100 339 271 339 1157 6 3 1 1508 2 25 68 1 1 -"27472" 4 292 2 0 1 1 1 1 100 146 100 125 10871 8 3 1 2367 2 25 25 1 1 -"27473" 4 292 2 0 1 1 1 1 100 146 125 156 6667 3 7 0 4444 2 25 31 0 1 -"27474" 4 292 2 0 1 1 1 1 100 146 156 195 4926 8 2 1 1297 2 25 39 1 1 -"27475" 4 292 2 0 1 1 1 1 100 146 195 146 2088 2 1 0 2491 2 25 49 1 0 -"27476" 4 292 3 1 1 1 1 1 100 129 100 125 2019 9 7 1 1009 2 25 25 1 1 -"27477" 4 292 3 1 1 1 1 1 100 129 125 156 1622 4 8 0 2975 2 25 31 0 1 -"27478" 4 292 3 1 1 1 1 1 100 129 156 164 7062 8 2 1 3275 1 5 8 1 1 -"27479" 4 292 3 1 1 1 1 1 100 129 164 123 2396 2 1 0 1071 2 25 41 1 0 -"27480" 4 292 3 1 1 1 1 1 100 129 123 129 2786 7 6 1 4499 1 5 6 1 1 -"27481" 4 292 3 1 1 1 1 1 100 129 129 123 3370 5 3 0 2223 1 5 6 1 0 -"27482" 4 292 3 1 1 1 1 1 100 129 123 129 2987 3 6 0 2714 1 5 6 0 1 -"27483" 4 292 3 1 1 1 1 1 100 129 129 123 1830 6 9 1 1757 1 5 6 0 0 -"27484" 4 292 3 1 1 1 1 1 100 129 123 129 2582 1 4 0 2281 1 5 6 0 1 -"27485" 4 292 3 1 1 1 2 1 100 165 100 105 2030 2 9 0 2819 1 5 5 0 1 -"27486" 4 292 3 1 1 1 2 1 100 165 105 158 4650 4 10 0 1675 3 50 53 0 1 -"27487" 4 292 3 1 1 1 2 1 100 165 158 150 3416 3 1 0 2188 1 5 8 1 0 -"27488" 4 292 3 1 1 1 2 1 100 165 150 158 2745 8 6 1 2724 1 5 8 1 1 -"27489" 4 292 3 1 1 1 2 1 100 165 158 166 1135 5 7 0 3703 1 5 8 0 1 -"27490" 4 292 3 1 1 1 2 1 100 165 166 158 4988 6 8 1 2796 1 5 8 0 0 -"27491" 4 292 3 1 1 1 2 1 100 165 158 166 3148 7 5 1 1860 1 5 8 1 1 -"27492" 4 292 3 1 1 1 2 1 100 165 166 174 2001 1 10 0 1056 1 5 8 0 1 -"27493" 4 292 3 1 1 1 2 1 100 165 174 165 4179 9 3 0 1289 1 5 9 1 0 -"27494" 4 292 3 1 1 1 3 1 100 105 100 95 1378 3 5 1 1223 1 5 5 0 0 -"27495" 4 292 3 1 1 1 3 1 100 105 95 100 1906 8 9 0 748 1 5 5 0 1 -"27496" 4 292 3 1 1 1 3 1 100 105 100 95 1174 2 4 1 644 1 5 5 0 0 -"27497" 4 292 3 1 1 1 3 1 100 105 95 100 1642 6 3 1 594 1 5 5 1 1 -"27498" 4 292 3 1 1 1 3 1 100 105 100 105 1378 7 6 1 607 1 5 5 1 1 -"27499" 4 292 3 1 1 1 3 1 100 105 105 100 1063 4 2 0 682 1 5 5 1 0 -"27500" 4 292 3 1 1 1 3 1 100 105 100 95 823 1 8 1 677 1 5 5 0 0 -"27501" 4 292 3 1 1 1 3 1 100 105 95 100 1250 5 7 0 676 1 5 5 0 1 -"27502" 4 292 3 1 1 1 3 1 100 105 100 105 8153 9 5 1 571 1 5 5 1 1 -"27503" 4 292 3 1 1 1 4 1 100 93 100 105 1167 8 7 1 757 1 5 5 1 1 -"27504" 4 292 3 1 1 1 4 1 100 93 105 110 1405 3 10 0 728 1 5 5 0 1 -"27505" 4 292 3 1 1 1 4 1 100 93 110 104 1138 7 9 1 682 1 5 6 0 0 -"27506" 4 292 3 1 1 1 4 1 100 93 104 99 775 9 1 0 759 1 5 5 1 0 -"27507" 4 292 3 1 1 1 4 1 100 93 99 104 1240 2 3 0 603 1 5 5 0 1 -"27508" 4 292 3 1 1 1 4 1 100 93 104 99 1092 1 8 1 813 1 5 5 0 0 -"27509" 4 292 3 1 1 1 4 1 100 93 99 94 1036 5 4 0 840 1 5 5 1 0 -"27510" 4 292 3 1 1 1 4 1 100 93 94 89 1533 4 2 0 618 1 5 5 1 0 -"27511" 4 292 3 1 1 1 4 1 100 93 89 93 602 6 3 1 619 1 5 4 1 1 -"27512" 4 292 4 0 1 0 1 0 100 0 100 5 1005 2 7 1 674 1 95 95 0 0 -"27513" 4 292 4 0 1 0 1 0 100 0 5 10 1999 3 7 0 942 1 95 5 0 1 -"27514" 4 292 4 0 1 0 1 0 100 0 10 0 3062 2 8 1 2986 1 95 10 0 0 -"27515" 4 292 5 1 1 0 1 0 100 0 100 50 3822 1 3 1 4717 3 50 50 0 0 -"27516" 4 292 5 1 1 0 1 0 100 0 50 98 2530 6 5 1 2567 1 95 48 1 1 -"27517" 4 292 5 1 1 0 1 0 100 0 98 191 4115 2 8 0 692 1 95 93 0 1 -"27518" 4 292 5 1 1 0 1 0 100 0 191 10 1225 8 9 1 732 1 95 181 0 0 -"27519" 4 292 5 1 1 0 1 0 100 0 10 20 1454 3 4 0 830 1 95 10 0 1 -"27520" 4 292 5 1 1 0 1 0 100 0 20 5 2172 5 7 1 2931 2 75 15 0 0 -"27521" 4 292 5 1 1 0 1 0 100 0 5 10 2181 7 4 1 666 1 95 5 1 1 -"27522" 4 292 5 1 1 0 1 0 100 0 10 0 2048 4 1 0 772 1 95 10 1 0 -"27523" 4 292 5 1 1 0 2 1 100 4 100 150 1141 8 1 1 1485 3 50 50 1 1 -"27524" 4 292 5 1 1 0 2 1 100 4 150 225 1235 6 2 1 2886 3 50 75 1 1 -"27525" 4 292 5 1 1 0 2 1 100 4 225 11 35218 7 9 1 916 1 95 214 0 0 -"27526" 4 292 5 1 1 0 2 1 100 4 11 21 2358 2 10 0 1219 1 95 10 0 1 -"27527" 4 292 5 1 1 0 2 1 100 4 21 26 5567 5 3 1 2052 4 25 5 1 1 -"27528" 4 292 5 1 1 0 2 1 100 4 26 6 9000 4 2 0 1801 2 75 20 1 0 -"27529" 4 292 5 1 1 0 2 1 100 4 6 9 1008 3 5 0 1366 3 50 3 0 1 -"27530" 4 292 5 1 1 0 2 1 100 4 9 2 1554 9 4 0 1134 2 75 7 1 0 -"27531" 4 292 5 1 1 0 2 1 100 4 2 4 1909 1 7 0 592 2 75 2 0 1 -"27532" 4 292 5 1 1 0 3 0 100 0 100 105 2003 7 5 1 1243 5 5 5 1 1 -"27533" 4 292 5 1 1 0 3 0 100 0 105 5 2196 2 1 0 4529 1 95 100 1 0 -"27534" 4 292 5 1 1 0 3 0 100 0 5 10 2751 8 6 1 801 1 95 5 1 1 -"27535" 4 292 5 1 1 0 3 0 100 0 10 0 1433 4 7 1 626 1 95 10 0 0 -"27536" 4 292 5 1 1 0 4 0 100 0 100 5 1546 2 3 1 542 1 95 95 0 0 -"27537" 4 292 5 1 1 0 4 0 100 0 5 0 1689 7 4 0 942 1 95 5 1 0 -"27538" 4 298 2 0 1 1 1 1 100 93 100 75 17488 8 3 0 3822 2 25 25 1 0 -"27539" 4 298 2 0 1 1 1 1 100 93 75 94 7549 3 7 0 2732 2 25 19 0 1 -"27540" 4 298 2 0 1 1 1 1 100 93 94 89 4095 8 2 0 3306 1 5 5 1 0 -"27541" 4 298 2 0 1 1 1 1 100 93 89 93 4370 2 1 1 2176 1 5 4 1 1 -"27542" 4 298 3 1 1 1 1 1 100 181 100 125 5924 9 7 1 1143 2 25 25 1 1 -"27543" 4 298 3 1 1 1 1 1 100 181 125 119 5334 4 8 1 1485 1 5 6 0 0 -"27544" 4 298 3 1 1 1 1 1 100 181 119 149 4828 8 2 1 1052 2 25 30 1 1 -"27545" 4 298 3 1 1 1 1 1 100 181 149 156 6343 2 1 1 1607 1 5 7 1 1 -"27546" 4 298 3 1 1 1 1 1 100 181 156 164 4496 7 6 1 903 1 5 8 1 1 -"27547" 4 298 3 1 1 1 1 1 100 181 164 156 6922 5 3 0 1542 1 5 8 1 0 -"27548" 4 298 3 1 1 1 1 1 100 181 156 164 4066 3 6 0 1198 1 5 8 0 1 -"27549" 4 298 3 1 1 1 1 1 100 181 164 172 4602 6 9 0 757 1 5 8 0 1 -"27550" 4 298 3 1 1 1 1 1 100 181 172 181 14486 1 4 0 578 1 5 9 0 1 -"27551" 4 298 3 1 1 1 2 1 100 221 100 125 5104 2 9 0 1195 2 25 25 0 1 -"27552" 4 298 3 1 1 1 2 1 100 221 125 131 3095 4 10 0 1945 1 5 6 0 1 -"27553" 4 298 3 1 1 1 2 1 100 221 131 164 3050 3 1 1 974 2 25 33 1 1 -"27554" 4 298 3 1 1 1 2 1 100 221 164 172 4172 8 6 1 1332 1 5 8 1 1 -"27555" 4 298 3 1 1 1 2 1 100 221 172 181 3657 5 7 0 993 1 5 9 0 1 -"27556" 4 298 3 1 1 1 2 1 100 221 181 190 4966 6 8 0 1399 1 5 9 0 1 -"27557" 4 298 3 1 1 1 2 1 100 221 190 200 4090 7 5 1 789 1 5 10 1 1 -"27558" 4 298 3 1 1 1 2 1 100 221 200 210 5724 1 10 0 1001 1 5 10 0 1 -"27559" 4 298 3 1 1 1 2 1 100 221 210 221 4143 9 3 1 2247 1 5 11 1 1 -"27560" 4 298 3 1 1 1 3 1 100 365 100 125 4813 3 5 0 1749 2 25 25 0 1 -"27561" 4 298 3 1 1 1 3 1 100 365 125 119 1863 8 9 1 2092 1 5 6 0 0 -"27562" 4 298 3 1 1 1 3 1 100 365 119 149 3517 2 4 0 1316 2 25 30 0 1 -"27563" 4 298 3 1 1 1 3 1 100 365 149 186 3099 6 3 1 1261 2 25 37 1 1 -"27564" 4 298 3 1 1 1 3 1 100 365 186 195 2693 7 6 1 867 1 5 9 1 1 -"27565" 4 298 3 1 1 1 3 1 100 365 195 185 2567 4 2 0 1403 1 5 10 1 0 -"27566" 4 298 3 1 1 1 3 1 100 365 185 231 2085 1 8 0 976 2 25 46 0 1 -"27567" 4 298 3 1 1 1 3 1 100 365 231 243 3302 5 7 0 1102 1 5 12 0 1 -"27568" 4 298 3 1 1 1 3 1 100 365 243 365 2543 9 5 1 789 3 50 122 1 1 -"27569" 4 298 3 1 1 1 4 1 100 313 100 125 3382 8 7 1 435 2 25 25 1 1 -"27570" 4 298 3 1 1 1 4 1 100 313 125 156 1916 3 10 0 990 2 25 31 0 1 -"27571" 4 298 3 1 1 1 4 1 100 313 156 117 3892 7 9 1 1176 2 25 39 0 0 -"27572" 4 298 3 1 1 1 4 1 100 313 117 205 2821 9 1 1 2237 4 75 88 1 1 -"27573" 4 298 3 1 1 1 4 1 100 313 205 215 1783 2 3 0 936 1 5 10 0 1 -"27574" 4 298 3 1 1 1 4 1 100 313 215 419 1689 1 8 0 3183 5 95 204 0 1 -"27575" 4 298 3 1 1 1 4 1 100 313 419 398 2957 5 4 0 1877 1 5 21 1 0 -"27576" 4 298 3 1 1 1 4 1 100 313 398 298 2042 4 2 0 966 2 25 100 1 0 -"27577" 4 298 3 1 1 1 4 1 100 313 298 313 4805 6 3 1 4286 1 5 15 1 1 -"27578" 4 298 4 0 1 0 1 1 100 1 100 195 2071 2 7 0 2531 1 95 95 0 1 -"27579" 4 298 4 0 1 0 1 1 100 1 195 380 2392 3 7 0 1652 1 95 185 0 1 -"27580" 4 298 4 0 1 0 1 1 100 1 380 19 2050 2 8 1 1254 1 95 361 0 0 -"27581" 4 298 4 0 1 0 1 1 100 1 19 1 1984 8 9 1 1007 1 95 18 0 0 -"27582" 4 298 5 1 1 0 1 1 100 1201 100 195 1785 1 3 0 1530 1 95 95 0 1 -"27583" 4 298 5 1 1 0 1 1 100 1201 195 380 7302 6 5 1 920 1 95 185 1 1 -"27584" 4 298 5 1 1 0 1 1 100 1201 380 665 2450 2 8 0 1459 2 75 285 0 1 -"27585" 4 298 5 1 1 0 1 1 100 1201 665 166 2467 8 9 1 1235 2 75 499 0 0 -"27586" 4 298 5 1 1 0 1 1 100 1201 166 324 3542 3 4 0 2619 1 95 158 0 1 -"27587" 4 298 5 1 1 0 1 1 100 1201 324 632 1668 5 7 0 986 1 95 308 0 1 -"27588" 4 298 5 1 1 0 1 1 100 1201 632 1232 4097 7 4 1 1659 1 95 600 1 1 -"27589" 4 298 5 1 1 0 1 1 100 1201 1232 616 3394 4 1 0 1686 3 50 616 1 0 -"27590" 4 298 5 1 1 0 1 1 100 1201 616 1201 1851 9 6 1 1177 1 95 585 1 1 -"27591" 4 298 5 1 1 0 2 0 100 0 100 195 2085 8 1 1 1287 1 95 95 1 1 -"27592" 4 298 5 1 1 0 2 0 100 0 195 10 2160 6 2 0 1030 1 95 185 1 0 -"27593" 4 298 5 1 1 0 2 0 100 0 10 20 1682 7 9 0 2448 1 95 10 0 1 -"27594" 4 298 5 1 1 0 2 0 100 0 20 39 2417 2 10 0 1387 1 95 19 0 1 -"27595" 4 298 5 1 1 0 2 0 100 0 39 76 1887 5 3 1 1411 1 95 37 1 1 -"27596" 4 298 5 1 1 0 2 0 100 0 76 4 2941 4 2 0 1158 1 95 72 1 0 -"27597" 4 298 5 1 1 0 2 0 100 0 4 0 2068 3 5 1 983 1 95 4 0 0 -"27598" 4 298 5 1 1 0 3 0 100 0 100 195 2418 7 5 1 2836 1 95 95 1 1 -"27599" 4 298 5 1 1 0 3 0 100 0 195 10 2203 2 1 0 1040 1 95 185 1 0 -"27600" 4 298 5 1 1 0 3 0 100 0 10 0 2107 8 6 0 2788 1 95 10 1 0 -"27601" 4 298 5 1 1 0 4 1 100 3851 100 195 2023 2 3 0 1148 1 95 95 0 1 -"27602" 4 298 5 1 1 0 4 1 100 3851 195 380 4547 7 4 1 1240 1 95 185 1 1 -"27603" 4 298 5 1 1 0 4 1 100 3851 380 665 3010 3 1 1 1097 2 75 285 1 1 -"27604" 4 298 5 1 1 0 4 1 100 3851 665 1297 1586 1 9 0 924 1 95 632 0 1 -"27605" 4 298 5 1 1 0 4 1 100 3851 1297 1621 2413 8 7 1 1464 4 25 324 1 1 -"27606" 4 298 5 1 1 0 4 1 100 3851 1621 3161 1776 9 2 1 1017 1 95 1540 1 1 -"27607" 4 298 5 1 1 0 4 1 100 3851 3161 3951 3005 5 6 0 740 4 25 790 0 1 -"27608" 4 298 5 1 1 0 4 1 100 3851 3951 1975 2094 6 8 1 1288 3 50 1976 0 0 -"27609" 4 298 5 1 1 0 4 1 100 3851 1975 3851 1456 4 7 0 2440 1 95 1876 0 1 -"27610" 4 310 2 0 1 0 1 1 100 23 100 150 6150 2 7 0 1223 3 50 50 0 1 -"27611" 4 310 2 0 1 0 1 1 100 23 150 263 15075 3 7 0 4324 2 75 113 0 1 -"27612" 4 310 2 0 1 0 1 1 100 23 263 460 3314 2 8 0 762 2 75 197 0 1 -"27613" 4 310 2 0 1 0 1 1 100 23 460 23 3459 8 9 1 2260 1 95 437 0 0 -"27614" 4 310 3 1 1 0 1 1 100 339 100 195 11653 1 3 0 2567 1 95 95 0 1 -"27615" 4 310 3 1 1 0 1 1 100 339 195 293 2883 6 5 1 2375 3 50 98 1 1 -"27616" 4 310 3 1 1 0 1 1 100 339 293 308 2075 2 8 0 830 5 5 15 0 1 -"27617" 4 310 3 1 1 0 1 1 100 339 308 154 1525 8 9 1 1993 3 50 154 0 0 -"27618" 4 310 3 1 1 0 1 1 100 339 154 193 1200 3 4 0 420 4 25 39 0 1 -"27619" 4 310 3 1 1 0 1 1 100 339 193 241 2765 5 7 0 487 4 25 48 0 1 -"27620" 4 310 3 1 1 0 1 1 100 339 241 301 2813 7 4 1 493 4 25 60 1 1 -"27621" 4 310 3 1 1 0 1 1 100 339 301 226 3108 4 1 0 549 4 25 75 1 0 -"27622" 4 310 3 1 1 0 1 1 100 339 226 339 2193 9 6 1 1918 3 50 113 1 1 -"27623" 4 310 3 1 1 0 2 1 100 312 100 150 4103 8 1 1 602 3 50 50 1 1 -"27624" 4 310 3 1 1 0 2 1 100 312 150 225 1451 6 2 1 812 3 50 75 1 1 -"27625" 4 310 3 1 1 0 2 1 100 312 225 169 2451 7 9 1 796 4 25 56 0 0 -"27626" 4 310 3 1 1 0 2 1 100 312 169 211 1828 2 10 0 450 4 25 42 0 1 -"27627" 4 310 3 1 1 0 2 1 100 312 211 158 1579 5 3 0 558 4 25 53 1 0 -"27628" 4 310 3 1 1 0 2 1 100 312 158 79 1720 4 2 0 2364 3 50 79 1 0 -"27629" 4 310 3 1 1 0 2 1 100 312 79 119 1921 3 5 0 2228 3 50 40 0 1 -"27630" 4 310 3 1 1 0 2 1 100 312 119 208 1624 9 4 1 640 2 75 89 1 1 -"27631" 4 310 3 1 1 0 2 1 100 312 208 312 2662 1 7 0 1693 3 50 104 0 1 -"27632" 4 310 3 1 1 0 3 1 100 375 100 150 1764 7 5 1 655 3 50 50 1 1 -"27633" 4 310 3 1 1 0 3 1 100 375 150 75 1092 2 1 0 2142 3 50 75 1 0 -"27634" 4 310 3 1 1 0 3 1 100 375 75 113 1174 8 6 1 1172 3 50 38 1 1 -"27635" 4 310 3 1 1 0 3 1 100 375 113 170 1185 4 7 0 662 3 50 57 0 1 -"27636" 4 310 3 1 1 0 3 1 100 375 170 255 1180 3 10 0 2530 3 50 85 0 1 -"27637" 4 310 3 1 1 0 3 1 100 375 255 191 3831 6 8 1 1511 4 25 64 0 0 -"27638" 4 310 3 1 1 0 3 1 100 375 191 334 1228 9 2 1 559 2 75 143 1 1 -"27639" 4 310 3 1 1 0 3 1 100 375 334 250 1458 5 3 0 1170 4 25 84 1 0 -"27640" 4 310 3 1 1 0 3 1 100 375 250 375 1696 1 10 0 789 3 50 125 0 1 -"27641" 4 310 3 1 1 0 4 1 100 291 100 175 1571 2 3 0 558 2 75 75 0 1 -"27642" 4 310 3 1 1 0 4 1 100 291 175 263 1128 7 4 1 1704 3 50 88 1 1 -"27643" 4 310 3 1 1 0 4 1 100 291 263 131 1269 3 1 0 1422 3 50 132 1 0 -"27644" 4 310 3 1 1 0 4 1 100 291 131 255 1944 1 9 0 792 1 95 124 0 1 -"27645" 4 310 3 1 1 0 4 1 100 291 255 319 1528 8 7 1 629 4 25 64 1 1 -"27646" 4 310 3 1 1 0 4 1 100 291 319 399 2212 9 2 1 593 4 25 80 1 1 -"27647" 4 310 3 1 1 0 4 1 100 291 399 299 2130 5 6 1 1482 4 25 100 0 0 -"27648" 4 310 3 1 1 0 4 1 100 291 299 149 1873 6 8 1 2493 3 50 150 0 0 -"27649" 4 310 3 1 1 0 4 1 100 291 149 291 2980 4 7 0 660 1 95 142 0 1 -"27650" 4 310 4 0 1 1 1 1 100 178 100 150 4870 8 3 1 924 3 50 50 1 1 -"27651" 4 310 4 0 1 1 1 1 100 178 150 158 3205 3 7 0 2386 1 5 8 0 1 -"27652" 4 310 4 0 1 1 1 1 100 178 158 237 3605 8 2 1 375 3 50 79 1 1 -"27653" 4 310 4 0 1 1 1 1 100 178 237 178 1578 2 1 0 925 2 25 59 1 0 -"27654" 4 310 5 1 1 1 1 1 100 333 100 150 4166 9 7 1 502 3 50 50 1 1 -"27655" 4 310 5 1 1 1 1 1 100 333 150 225 1371 4 8 0 560 3 50 75 0 1 -"27656" 4 310 5 1 1 1 1 1 100 333 225 338 1410 8 2 1 1117 3 50 113 1 1 -"27657" 4 310 5 1 1 1 1 1 100 333 338 253 965 2 1 0 733 2 25 85 1 0 -"27658" 4 310 5 1 1 1 1 1 100 333 253 316 1623 7 6 1 663 2 25 63 1 1 -"27659" 4 310 5 1 1 1 1 1 100 333 316 237 2253 5 3 0 672 2 25 79 1 0 -"27660" 4 310 5 1 1 1 1 1 100 333 237 296 1237 3 6 0 631 2 25 59 0 1 -"27661" 4 310 5 1 1 1 1 1 100 333 296 222 1727 6 9 1 746 2 25 74 0 0 -"27662" 4 310 5 1 1 1 1 1 100 333 222 333 1132 1 4 0 612 3 50 111 0 1 -"27663" 4 310 5 1 1 1 2 1 100 559 100 150 1301 2 9 0 453 3 50 50 0 1 -"27664" 4 310 5 1 1 1 2 1 100 559 150 225 1972 4 10 0 404 3 50 75 0 1 -"27665" 4 310 5 1 1 1 2 1 100 559 225 169 2062 3 1 0 554 2 25 56 1 0 -"27666" 4 310 5 1 1 1 2 1 100 559 169 254 1300 8 6 1 1220 3 50 85 1 1 -"27667" 4 310 5 1 1 1 2 1 100 559 254 318 1488 5 7 0 1694 2 25 64 0 1 -"27668" 4 310 5 1 1 1 2 1 100 559 318 238 1747 6 8 1 598 2 25 80 0 0 -"27669" 4 310 5 1 1 1 2 1 100 559 238 298 975 7 5 1 702 2 25 60 1 1 -"27670" 4 310 5 1 1 1 2 1 100 559 298 447 906 1 10 0 573 3 50 149 0 1 -"27671" 4 310 5 1 1 1 2 1 100 559 447 559 1159 9 3 1 598 2 25 112 1 1 -"27672" 4 310 5 1 1 1 3 1 100 451 100 125 940 3 5 0 573 2 25 25 0 1 -"27673" 4 310 5 1 1 1 3 1 100 451 125 94 1391 8 9 1 1250 2 25 31 0 0 -"27674" 4 310 5 1 1 1 3 1 100 451 94 141 1028 2 4 0 649 3 50 47 0 1 -"27675" 4 310 5 1 1 1 3 1 100 451 141 176 1256 6 3 1 1465 2 25 35 1 1 -"27676" 4 310 5 1 1 1 3 1 100 451 176 220 1217 7 6 1 1189 2 25 44 1 1 -"27677" 4 310 5 1 1 1 3 1 100 451 220 165 1074 4 2 0 509 2 25 55 1 0 -"27678" 4 310 5 1 1 1 3 1 100 451 165 289 1225 1 8 0 453 4 75 124 0 1 -"27679" 4 310 5 1 1 1 3 1 100 451 289 361 1372 5 7 0 513 2 25 72 0 1 -"27680" 4 310 5 1 1 1 3 1 100 451 361 451 1910 9 5 1 1304 2 25 90 1 1 -"27681" 4 310 5 1 1 1 4 1 100 528 100 150 1633 8 7 1 330 3 50 50 1 1 -"27682" 4 310 5 1 1 1 4 1 100 528 150 188 911 3 10 0 482 2 25 38 0 1 -"27683" 4 310 5 1 1 1 4 1 100 528 188 141 1296 7 9 1 609 2 25 47 0 0 -"27684" 4 310 5 1 1 1 4 1 100 528 141 247 1297 9 1 1 681 4 75 106 1 1 -"27685" 4 310 5 1 1 1 4 1 100 528 247 371 1182 2 3 0 564 3 50 124 0 1 -"27686" 4 310 5 1 1 1 4 1 100 528 371 557 1431 1 8 0 569 3 50 186 0 1 -"27687" 4 310 5 1 1 1 4 1 100 528 557 529 1443 5 4 0 2956 1 5 28 1 0 -"27688" 4 310 5 1 1 1 4 1 100 528 529 503 1348 4 2 0 1218 1 5 26 1 0 -"27689" 4 310 5 1 1 1 4 1 100 528 503 528 1413 6 3 1 912 1 5 25 1 1 -"27690" 4 311 2 0 1 0 1 1 100 115 100 150 5540 2 7 0 1122 3 50 50 0 1 -"27691" 4 311 2 0 1 0 1 1 100 115 150 263 13299 3 7 0 2319 2 75 113 0 1 -"27692" 4 311 2 0 1 0 1 1 100 115 263 460 2590 2 8 0 1976 2 75 197 0 1 -"27693" 4 311 2 0 1 0 1 1 100 115 460 115 2535 8 9 1 1868 2 75 345 0 0 -"27694" 4 311 3 1 1 0 1 1 100 351 100 175 26903 1 3 0 780 2 75 75 0 1 -"27695" 4 311 3 1 1 0 1 1 100 351 175 306 2901 6 5 1 2416 2 75 131 1 1 -"27696" 4 311 3 1 1 0 1 1 100 351 306 536 3131 2 8 0 940 2 75 230 0 1 -"27697" 4 311 3 1 1 0 1 1 100 351 536 134 3724 8 9 1 1336 2 75 402 0 0 -"27698" 4 311 3 1 1 0 1 1 100 351 134 235 4854 3 4 0 2213 2 75 101 0 1 -"27699" 4 311 3 1 1 0 1 1 100 351 235 59 2313 5 7 1 1544 2 75 176 0 0 -"27700" 4 311 3 1 1 0 1 1 100 351 59 103 4747 7 4 1 399 2 75 44 1 1 -"27701" 4 311 3 1 1 0 1 1 100 351 103 180 2514 4 1 1 679 2 75 77 1 1 -"27702" 4 311 3 1 1 0 1 1 100 351 180 351 4700 9 6 1 658 1 95 171 1 1 -"27703" 4 311 3 1 1 0 2 1 100 1790 100 175 5839 8 1 1 527 2 75 75 1 1 -"27704" 4 311 3 1 1 0 2 1 100 1790 175 87 3987 6 2 0 747 3 50 88 1 0 -"27705" 4 311 3 1 1 0 2 1 100 1790 87 152 3429 7 9 0 784 2 75 65 0 1 -"27706" 4 311 3 1 1 0 2 1 100 1790 152 266 1680 2 10 0 622 2 75 114 0 1 -"27707" 4 311 3 1 1 0 2 1 100 1790 266 466 3453 5 3 1 742 2 75 200 1 1 -"27708" 4 311 3 1 1 0 2 1 100 1790 466 816 3744 4 2 1 1027 2 75 350 1 1 -"27709" 4 311 3 1 1 0 2 1 100 1790 816 1224 2815 3 5 0 647 3 50 408 0 1 -"27710" 4 311 3 1 1 0 2 1 100 1790 1224 918 7005 9 4 0 790 4 25 306 1 0 -"27711" 4 311 3 1 1 0 2 1 100 1790 918 1790 2878 1 7 0 930 1 95 872 0 1 -"27712" 4 311 3 1 1 0 3 0 100 1 100 25 2198 7 5 0 643 2 75 75 1 0 -"27713" 4 311 3 1 1 0 3 0 100 1 25 1 2049 2 1 0 3942 1 95 24 1 0 -"27714" 4 311 3 1 1 0 4 1 100 49 100 175 2604 2 3 0 630 2 75 75 0 1 -"27715" 4 311 3 1 1 0 4 1 100 49 175 44 2309 7 4 0 917 2 75 131 1 0 -"27716" 4 311 3 1 1 0 4 1 100 49 44 11 1911 3 1 0 1402 2 75 33 1 0 -"27717" 4 311 3 1 1 0 4 1 100 49 11 21 5012 1 9 0 555 1 95 10 0 1 -"27718" 4 311 3 1 1 0 4 1 100 49 21 37 4558 8 7 1 771 2 75 16 1 1 -"27719" 4 311 3 1 1 0 4 1 100 49 37 65 3924 9 2 1 799 2 75 28 1 1 -"27720" 4 311 3 1 1 0 4 1 100 49 65 114 3286 5 6 0 575 2 75 49 0 1 -"27721" 4 311 3 1 1 0 4 1 100 49 114 28 5609 6 8 1 660 2 75 86 0 0 -"27722" 4 311 3 1 1 0 4 1 100 49 28 49 2537 4 7 0 726 2 75 21 0 1 -"27723" 4 311 4 0 1 1 1 1 100 141 100 150 4113 8 3 1 1171 3 50 50 1 1 -"27724" 4 311 4 0 1 1 1 1 100 141 150 188 3953 3 7 0 875 2 25 38 0 1 -"27725" 4 311 4 0 1 1 1 1 100 141 188 282 1926 8 2 1 888 3 50 94 1 1 -"27726" 4 311 4 0 1 1 1 1 100 141 282 141 2475 2 1 0 940 3 50 141 1 0 -"27727" 4 311 5 1 1 1 1 1 100 37 100 150 5421 9 7 1 649 3 50 50 1 1 -"27728" 4 311 5 1 1 1 1 1 100 37 150 225 3992 4 8 0 591 3 50 75 0 1 -"27729" 4 311 5 1 1 1 1 1 100 37 225 281 1808 8 2 1 1256 2 25 56 1 1 -"27730" 4 311 5 1 1 1 1 1 100 37 281 211 3025 2 1 0 2119 2 25 70 1 0 -"27731" 4 311 5 1 1 1 1 1 100 37 211 158 3383 7 6 0 1029 2 25 53 1 0 -"27732" 4 311 5 1 1 1 1 1 100 37 158 79 3353 5 3 0 735 3 50 79 1 0 -"27733" 4 311 5 1 1 1 1 1 100 37 79 59 2945 3 6 1 1067 2 25 20 0 0 -"27734" 4 311 5 1 1 1 1 1 100 37 59 74 2118 6 9 0 855 2 25 15 0 1 -"27735" 4 311 5 1 1 1 1 1 100 37 74 37 2964 1 4 1 693 3 50 37 0 0 -"27736" 4 311 5 1 1 1 2 1 100 50 100 75 2983 2 9 1 2190 2 25 25 0 0 -"27737" 4 311 5 1 1 1 2 1 100 50 75 94 3179 4 10 0 2730 2 25 19 0 1 -"27738" 4 311 5 1 1 1 2 1 100 50 94 47 1373 3 1 0 529 3 50 47 1 0 -"27739" 4 311 5 1 1 1 2 1 100 50 47 71 3086 8 6 1 632 3 50 24 1 1 -"27740" 4 311 5 1 1 1 2 1 100 50 71 35 2888 5 7 1 876 3 50 36 0 0 -"27741" 4 311 5 1 1 1 2 1 100 50 35 44 1967 6 8 0 688 2 25 9 0 1 -"27742" 4 311 5 1 1 1 2 1 100 50 44 22 2533 7 5 0 626 3 50 22 1 0 -"27743" 4 311 5 1 1 1 2 1 100 50 22 33 2633 1 10 0 922 3 50 11 0 1 -"27744" 4 311 5 1 1 1 2 1 100 50 33 50 1795 9 3 1 1020 3 50 17 1 1 -"27745" 4 311 5 1 1 1 3 1 100 35 100 150 2827 3 5 0 835 3 50 50 0 1 -"27746" 4 311 5 1 1 1 3 1 100 35 150 112 1576 8 9 1 1971 2 25 38 0 0 -"27747" 4 311 5 1 1 1 3 1 100 35 112 168 1426 2 4 0 804 3 50 56 0 1 -"27748" 4 311 5 1 1 1 3 1 100 35 168 126 4404 6 3 0 1728 2 25 42 1 0 -"27749" 4 311 5 1 1 1 3 1 100 35 126 63 2163 7 6 0 834 3 50 63 1 0 -"27750" 4 311 5 1 1 1 3 1 100 35 63 95 3768 4 2 1 539 3 50 32 1 1 -"27751" 4 311 5 1 1 1 3 1 100 35 95 47 3286 1 8 1 1119 3 50 48 0 0 -"27752" 4 311 5 1 1 1 3 1 100 35 47 23 3231 5 7 1 964 3 50 24 0 0 -"27753" 4 311 5 1 1 1 3 1 100 35 23 35 1907 9 5 1 966 3 50 12 1 1 -"27754" 4 311 5 1 1 1 4 1 100 373 100 150 2681 8 7 1 1279 3 50 50 1 1 -"27755" 4 311 5 1 1 1 4 1 100 373 150 188 2024 3 10 0 1397 2 25 38 0 1 -"27756" 4 311 5 1 1 1 4 1 100 373 188 94 2471 7 9 1 760 3 50 94 0 0 -"27757" 4 311 5 1 1 1 4 1 100 373 94 141 2256 9 1 1 803 3 50 47 1 1 -"27758" 4 311 5 1 1 1 4 1 100 373 141 212 2434 2 3 0 676 3 50 71 0 1 -"27759" 4 311 5 1 1 1 4 1 100 373 212 318 2241 1 8 0 932 3 50 106 0 1 -"27760" 4 311 5 1 1 1 4 1 100 373 318 398 2525 5 4 1 1435 2 25 80 1 1 -"27761" 4 311 5 1 1 1 4 1 100 373 398 498 3676 4 2 1 1443 2 25 100 1 1 -"27762" 4 311 5 1 1 1 4 1 100 373 498 373 2371 6 3 0 648 2 25 125 1 0 -"27763" 4 312 2 0 1 0 1 1 100 29 100 150 6651 2 7 0 892 3 50 50 0 1 -"27764" 4 312 2 0 1 0 1 1 100 29 150 293 13358 3 7 0 4584 1 95 143 0 1 -"27765" 4 312 2 0 1 0 1 1 100 29 293 571 4236 2 8 0 2640 1 95 278 0 1 -"27766" 4 312 2 0 1 0 1 1 100 29 571 29 2639 8 9 1 4088 1 95 542 0 0 -"27767" 4 312 3 1 1 0 1 0 100 1 100 195 7082 1 3 0 3871 1 95 95 0 1 -"27768" 4 312 3 1 1 0 1 0 100 1 195 380 3107 6 5 1 3037 1 95 185 1 1 -"27769" 4 312 3 1 1 0 1 0 100 1 380 19 3649 2 8 1 3529 1 95 361 0 0 -"27770" 4 312 3 1 1 0 1 0 100 1 19 1 4286 8 9 1 2216 1 95 18 0 0 -"27771" 4 312 3 1 1 0 2 0 100 0 100 5 3262 8 1 0 3273 1 95 95 1 0 -"27772" 4 312 3 1 1 0 2 0 100 0 5 0 4130 6 2 0 2123 1 95 5 1 0 -"27773" 4 312 3 1 1 0 3 0 100 0 100 195 3316 7 5 1 1765 1 95 95 1 1 -"27774" 4 312 3 1 1 0 3 0 100 0 195 380 2894 2 1 1 2850 1 95 185 1 1 -"27775" 4 312 3 1 1 0 3 0 100 0 380 741 3487 8 6 1 1639 1 95 361 1 1 -"27776" 4 312 3 1 1 0 3 0 100 0 741 1445 3190 4 7 0 2300 1 95 704 0 1 -"27777" 4 312 3 1 1 0 3 0 100 0 1445 72 2683 3 10 1 1361 1 95 1373 0 0 -"27778" 4 312 3 1 1 0 3 0 100 0 72 4 3285 6 8 1 1742 1 95 68 0 0 -"27779" 4 312 3 1 1 0 3 0 100 0 4 8 2929 9 2 1 1067 1 95 4 1 1 -"27780" 4 312 3 1 1 0 3 0 100 0 8 0 2830 5 3 0 1701 1 95 8 1 0 -"27781" 4 312 3 1 1 0 4 1 100 1037 100 195 2513 2 3 0 2359 1 95 95 0 1 -"27782" 4 312 3 1 1 0 4 1 100 1037 195 380 2884 7 4 1 1608 1 95 185 1 1 -"27783" 4 312 3 1 1 0 4 1 100 1037 380 19 3825 3 1 0 1936 1 95 361 1 0 -"27784" 4 312 3 1 1 0 4 1 100 1037 19 37 2585 1 9 0 1020 1 95 18 0 1 -"27785" 4 312 3 1 1 0 4 1 100 1037 37 72 2577 8 7 1 1516 1 95 35 1 1 -"27786" 4 312 3 1 1 0 4 1 100 1037 72 140 3553 9 2 1 2509 1 95 68 1 1 -"27787" 4 312 3 1 1 0 4 1 100 1037 140 273 2475 5 6 0 1339 1 95 133 0 1 -"27788" 4 312 3 1 1 0 4 1 100 1037 273 532 3545 6 8 0 1366 1 95 259 0 1 -"27789" 4 312 3 1 1 0 4 1 100 1037 532 1037 2674 4 7 0 1720 1 95 505 0 1 -"27790" 4 312 4 0 1 1 1 1 100 140 100 150 4877 8 3 1 1179 3 50 50 1 1 -"27791" 4 312 4 0 1 1 1 1 100 140 150 225 3832 3 7 0 608 3 50 75 0 1 -"27792" 4 312 4 0 1 1 1 1 100 140 225 281 3040 8 2 1 1493 2 25 56 1 1 -"27793" 4 312 4 0 1 1 1 1 100 140 281 140 3188 2 1 0 554 3 50 141 1 0 -"27794" 4 312 5 1 1 1 1 1 100 312 100 175 3490 9 7 1 554 4 75 75 1 1 -"27795" 4 312 5 1 1 1 1 1 100 312 175 263 2060 4 8 0 792 3 50 88 0 1 -"27796" 4 312 5 1 1 1 1 1 100 312 263 395 2119 8 2 1 904 3 50 132 1 1 -"27797" 4 312 5 1 1 1 1 1 100 312 395 197 2107 2 1 0 701 3 50 198 1 0 -"27798" 4 312 5 1 1 1 1 1 100 312 197 296 2329 7 6 1 368 3 50 99 1 1 -"27799" 4 312 5 1 1 1 1 1 100 312 296 370 2516 5 3 1 2290 2 25 74 1 1 -"27800" 4 312 5 1 1 1 1 1 100 312 370 277 2090 3 6 1 717 2 25 93 0 0 -"27801" 4 312 5 1 1 1 1 1 100 312 277 208 2288 6 9 1 1393 2 25 69 0 0 -"27802" 4 312 5 1 1 1 1 1 100 312 208 312 1964 1 4 0 821 3 50 104 0 1 -"27803" 4 312 5 1 1 1 2 1 100 1211 100 195 3060 2 9 0 385 5 95 95 0 1 -"27804" 4 312 5 1 1 1 2 1 100 1211 195 293 2444 4 10 0 885 3 50 98 0 1 -"27805" 4 312 5 1 1 1 2 1 100 1211 293 366 2753 3 1 1 1147 2 25 73 1 1 -"27806" 4 312 5 1 1 1 2 1 100 1211 366 458 2042 8 6 1 1822 2 25 92 1 1 -"27807" 4 312 5 1 1 1 2 1 100 1211 458 573 3525 5 7 0 1747 2 25 115 0 1 -"27808" 4 312 5 1 1 1 2 1 100 1211 573 430 2372 6 8 1 1017 2 25 143 0 0 -"27809" 4 312 5 1 1 1 2 1 100 1211 430 538 1982 7 5 1 678 2 25 108 1 1 -"27810" 4 312 5 1 1 1 2 1 100 1211 538 807 2283 1 10 0 548 3 50 269 0 1 -"27811" 4 312 5 1 1 1 2 1 100 1211 807 1211 2535 9 3 1 604 3 50 404 1 1 -"27812" 4 312 5 1 1 1 3 1 100 933 100 125 3019 3 5 0 633 2 25 25 0 1 -"27813" 4 312 5 1 1 1 3 1 100 933 125 94 1944 8 9 1 2241 2 25 31 0 0 -"27814" 4 312 5 1 1 1 3 1 100 933 94 141 2004 2 4 0 595 3 50 47 0 1 -"27815" 4 312 5 1 1 1 3 1 100 933 141 212 1907 6 3 1 774 3 50 71 1 1 -"27816" 4 312 5 1 1 1 3 1 100 933 212 318 2422 7 6 1 393 3 50 106 1 1 -"27817" 4 312 5 1 1 1 3 1 100 933 318 398 2460 4 2 1 2269 2 25 80 1 1 -"27818" 4 312 5 1 1 1 3 1 100 933 398 597 1734 1 8 0 641 3 50 199 0 1 -"27819" 4 312 5 1 1 1 3 1 100 933 597 746 2832 5 7 0 582 2 25 149 0 1 -"27820" 4 312 5 1 1 1 3 1 100 933 746 933 1816 9 5 1 1640 2 25 187 1 1 -"27821" 4 312 5 1 1 1 4 1 100 624 100 175 2337 8 7 1 624 4 75 75 1 1 -"27822" 4 312 5 1 1 1 4 1 100 624 175 263 2143 3 10 0 479 3 50 88 0 1 -"27823" 4 312 5 1 1 1 4 1 100 624 263 131 1693 7 9 1 658 3 50 132 0 0 -"27824" 4 312 5 1 1 1 4 1 100 624 131 197 1744 9 1 1 1355 3 50 66 1 1 -"27825" 4 312 5 1 1 1 4 1 100 624 197 296 1928 2 3 0 762 3 50 99 0 1 -"27826" 4 312 5 1 1 1 4 1 100 624 296 444 2107 1 8 0 877 3 50 148 0 1 -"27827" 4 312 5 1 1 1 4 1 100 624 444 666 2368 5 4 1 341 3 50 222 1 1 -"27828" 4 312 5 1 1 1 4 1 100 624 666 499 3066 4 2 0 2221 2 25 167 1 0 -"27829" 4 312 5 1 1 1 4 1 100 624 499 624 2177 6 3 1 1663 2 25 125 1 1 -"27830" 4 317 2 0 1 0 1 1 100 285 100 150 4437 2 7 0 1016 3 50 50 0 1 -"27831" 4 317 2 0 1 0 1 1 100 285 150 293 8813 3 7 0 831 1 95 143 0 1 -"27832" 4 317 2 0 1 0 1 1 100 285 293 571 8989 2 8 0 1254 1 95 278 0 1 -"27833" 4 317 2 0 1 0 1 1 100 285 571 285 3432 8 9 1 603 3 50 286 0 0 -"27834" 4 317 3 1 1 0 1 1 100 133 100 195 3718 1 3 0 635 1 95 95 0 1 -"27835" 4 317 3 1 1 0 1 1 100 133 195 380 2835 6 5 1 376 1 95 185 1 1 -"27836" 4 317 3 1 1 0 1 1 100 133 380 741 1572 2 8 0 310 1 95 361 0 1 -"27837" 4 317 3 1 1 0 1 1 100 133 741 926 3397 8 9 0 825 4 25 185 0 1 -"27838" 4 317 3 1 1 0 1 1 100 133 926 1389 1391 3 4 0 1011 3 50 463 0 1 -"27839" 4 317 3 1 1 0 1 1 100 133 1389 694 2576 5 7 1 635 3 50 695 0 0 -"27840" 4 317 3 1 1 0 1 1 100 133 694 1353 1522 7 4 1 431 1 95 659 1 1 -"27841" 4 317 3 1 1 0 1 1 100 133 1353 68 1312 4 1 0 368 1 95 1285 1 0 -"27842" 4 317 3 1 1 0 1 1 100 133 68 133 2468 9 6 1 2091 1 95 65 1 1 -"27843" 4 317 3 1 1 0 2 0 100 0 100 195 3123 8 1 1 1084 1 95 95 1 1 -"27844" 4 317 3 1 1 0 2 0 100 0 195 380 2494 6 2 1 853 1 95 185 1 1 -"27845" 4 317 3 1 1 0 2 0 100 0 380 19 1521 7 9 1 630 1 95 361 0 0 -"27846" 4 317 3 1 1 0 2 0 100 0 19 37 1238 2 10 0 384 1 95 18 0 1 -"27847" 4 317 3 1 1 0 2 0 100 0 37 2 1531 5 3 0 375 1 95 35 1 0 -"27848" 4 317 3 1 1 0 2 0 100 0 2 0 1043 4 2 0 919 1 95 2 1 0 -"27849" 4 317 3 1 1 0 3 0 100 0 100 195 1812 7 5 1 467 1 95 95 1 1 -"27850" 4 317 3 1 1 0 3 0 100 0 195 10 995 2 1 0 345 1 95 185 1 0 -"27851" 4 317 3 1 1 0 3 0 100 0 10 20 1014 8 6 1 388 1 95 10 1 1 -"27852" 4 317 3 1 1 0 3 0 100 0 20 39 998 4 7 0 356 1 95 19 0 1 -"27853" 4 317 3 1 1 0 3 0 100 0 39 76 736 3 10 0 378 1 95 37 0 1 -"27854" 4 317 3 1 1 0 3 0 100 0 76 4 1854 6 8 1 395 1 95 72 0 0 -"27855" 4 317 3 1 1 0 3 0 100 0 4 8 1054 9 2 1 377 1 95 4 1 1 -"27856" 4 317 3 1 1 0 3 0 100 0 8 0 1532 5 3 0 385 1 95 8 1 0 -"27857" 4 317 3 1 1 0 4 1 100 211 100 195 1581 2 3 0 328 1 95 95 0 1 -"27858" 4 317 3 1 1 0 4 1 100 211 195 293 1046 7 4 1 434 3 50 98 1 1 -"27859" 4 317 3 1 1 0 4 1 100 211 293 15 835 3 1 0 305 1 95 278 1 0 -"27860" 4 317 3 1 1 0 4 1 100 211 15 29 2047 1 9 0 1083 1 95 14 0 1 -"27861" 4 317 3 1 1 0 4 1 100 211 29 57 981 8 7 1 468 1 95 28 1 1 -"27862" 4 317 3 1 1 0 4 1 100 211 57 111 819 9 2 1 826 1 95 54 1 1 -"27863" 4 317 3 1 1 0 4 1 100 211 111 216 2728 5 6 0 452 1 95 105 0 1 -"27864" 4 317 3 1 1 0 4 1 100 211 216 108 2412 6 8 1 1063 3 50 108 0 0 -"27865" 4 317 3 1 1 0 4 1 100 211 108 211 761 4 7 0 747 1 95 103 0 1 -"27866" 4 317 4 0 1 1 1 1 100 84 100 150 4149 8 3 1 1462 3 50 50 1 1 -"27867" 4 317 4 0 1 1 1 1 100 84 150 225 2085 3 7 0 672 3 50 75 0 1 -"27868" 4 317 4 0 1 1 1 1 100 84 225 338 1301 8 2 1 404 3 50 113 1 1 -"27869" 4 317 4 0 1 1 1 1 100 84 338 84 1366 2 1 0 612 4 75 254 1 0 -"27870" 4 317 5 1 1 1 1 1 100 1351 100 195 3704 9 7 1 918 5 95 95 1 1 -"27871" 4 317 5 1 1 1 1 1 100 1351 195 205 1512 4 8 0 797 1 5 10 0 1 -"27872" 4 317 5 1 1 1 1 1 100 1351 205 359 1021 8 2 1 1338 4 75 154 1 1 -"27873" 4 317 5 1 1 1 1 1 100 1351 359 341 1184 2 1 0 373 1 5 18 1 0 -"27874" 4 317 5 1 1 1 1 1 100 1351 341 512 1160 7 6 1 871 3 50 171 1 1 -"27875" 4 317 5 1 1 1 1 1 100 1351 512 486 1771 5 3 0 340 1 5 26 1 0 -"27876" 4 317 5 1 1 1 1 1 100 1351 486 729 1345 3 6 0 439 3 50 243 0 1 -"27877" 4 317 5 1 1 1 1 1 100 1351 729 693 1255 6 9 1 1016 1 5 36 0 0 -"27878" 4 317 5 1 1 1 1 1 100 1351 693 1351 1173 1 4 0 1268 5 95 658 0 1 -"27879" 4 317 5 1 1 1 2 1 100 1383 100 195 2011 2 9 0 513 5 95 95 0 1 -"27880" 4 317 5 1 1 1 2 1 100 1383 195 205 1398 4 10 0 897 1 5 10 0 1 -"27881" 4 317 5 1 1 1 2 1 100 1383 205 154 1092 3 1 0 1406 2 25 51 1 0 -"27882" 4 317 5 1 1 1 2 1 100 1383 154 270 1240 8 6 1 883 4 75 116 1 1 -"27883" 4 317 5 1 1 1 2 1 100 1383 270 284 1223 5 7 0 506 1 5 14 0 1 -"27884" 4 317 5 1 1 1 2 1 100 1383 284 270 967 6 8 1 902 1 5 14 0 0 -"27885" 4 317 5 1 1 1 2 1 100 1383 270 405 993 7 5 1 535 3 50 135 1 1 -"27886" 4 317 5 1 1 1 2 1 100 1383 405 709 915 1 10 0 1742 4 75 304 0 1 -"27887" 4 317 5 1 1 1 2 1 100 1383 709 1383 1232 9 3 1 636 5 95 674 1 1 -"27888" 4 317 5 1 1 1 3 1 100 226 100 125 1317 3 5 0 236 2 25 25 0 1 -"27889" 4 317 5 1 1 1 3 1 100 226 125 31 965 8 9 1 1093 4 75 94 0 0 -"27890" 4 317 5 1 1 1 3 1 100 226 31 47 1126 2 4 0 478 3 50 16 0 1 -"27891" 4 317 5 1 1 1 3 1 100 226 47 49 1461 6 3 1 381 1 5 2 1 1 -"27892" 4 317 5 1 1 1 3 1 100 226 49 74 968 7 6 1 832 3 50 25 1 1 -"27893" 4 317 5 1 1 1 3 1 100 226 74 70 2077 4 2 0 448 1 5 4 1 0 -"27894" 4 317 5 1 1 1 3 1 100 226 70 123 720 1 8 0 1116 4 75 53 0 1 -"27895" 4 317 5 1 1 1 3 1 100 226 123 129 1420 5 7 0 726 1 5 6 0 1 -"27896" 4 317 5 1 1 1 3 1 100 226 129 226 1008 9 5 1 710 4 75 97 1 1 -"27897" 4 317 5 1 1 1 4 1 100 888 100 175 1455 8 7 1 800 4 75 75 1 1 -"27898" 4 317 5 1 1 1 4 1 100 888 175 184 935 3 10 0 416 1 5 9 0 1 -"27899" 4 317 5 1 1 1 4 1 100 888 184 175 1122 7 9 1 322 1 5 9 0 0 -"27900" 4 317 5 1 1 1 4 1 100 888 175 306 943 9 1 1 785 4 75 131 1 1 -"27901" 4 317 5 1 1 1 4 1 100 888 306 536 1081 2 3 0 556 4 75 230 0 1 -"27902" 4 317 5 1 1 1 4 1 100 888 536 938 1502 1 8 0 1548 4 75 402 0 1 -"27903" 4 317 5 1 1 1 4 1 100 888 938 891 1088 5 4 0 521 1 5 47 1 0 -"27904" 4 317 5 1 1 1 4 1 100 888 891 846 877 4 2 0 450 1 5 45 1 0 -"27905" 4 317 5 1 1 1 4 1 100 888 846 888 1218 6 3 1 621 1 5 42 1 1 -"27906" 4 324 2 0 1 0 1 1 100 18 100 150 10055 2 7 0 1685 3 50 50 0 1 -"27907" 4 324 2 0 1 0 1 1 100 18 150 188 11794 3 7 0 1463 4 25 38 0 1 -"27908" 4 324 2 0 1 0 1 1 100 18 188 367 3646 2 8 0 3494 1 95 179 0 1 -"27909" 4 324 2 0 1 0 1 1 100 18 367 18 2373 8 9 1 2906 1 95 349 0 0 -"27910" 4 324 3 1 1 0 1 1 100 1628 100 195 3139 1 3 0 2651 1 95 95 0 1 -"27911" 4 324 3 1 1 0 1 1 100 1628 195 293 3163 6 5 1 778 3 50 98 1 1 -"27912" 4 324 3 1 1 0 1 1 100 1628 293 440 2034 2 8 0 576 3 50 147 0 1 -"27913" 4 324 3 1 1 0 1 1 100 1628 440 220 2497 8 9 1 504 3 50 220 0 0 -"27914" 4 324 3 1 1 0 1 1 100 1628 220 330 1933 3 4 0 431 3 50 110 0 1 -"27915" 4 324 3 1 1 0 1 1 100 1628 330 413 5888 5 7 0 981 4 25 83 0 1 -"27916" 4 324 3 1 1 0 1 1 100 1628 413 620 3652 7 4 1 772 3 50 207 1 1 -"27917" 4 324 3 1 1 0 1 1 100 1628 620 930 2428 4 1 1 1838 3 50 310 1 1 -"27918" 4 324 3 1 1 0 1 1 100 1628 930 1628 3180 9 6 1 571 2 75 698 1 1 -"27919" 4 324 3 1 1 0 2 1 100 76 100 195 3393 8 1 1 2492 1 95 95 1 1 -"27920" 4 324 3 1 1 0 2 1 100 76 195 97 4219 6 2 0 592 3 50 98 1 0 -"27921" 4 324 3 1 1 0 2 1 100 76 97 48 1761 7 9 1 611 3 50 49 0 0 -"27922" 4 324 3 1 1 0 2 1 100 76 48 84 1747 2 10 0 4445 2 75 36 0 1 -"27923" 4 324 3 1 1 0 2 1 100 76 84 42 3367 5 3 0 965 3 50 42 1 0 -"27924" 4 324 3 1 1 0 2 1 100 76 42 10 2348 4 2 0 418 2 75 32 1 0 -"27925" 4 324 3 1 1 0 2 1 100 76 10 20 1955 3 5 0 1188 1 95 10 0 1 -"27926" 4 324 3 1 1 0 2 1 100 76 20 39 1297 9 4 1 881 1 95 19 1 1 -"27927" 4 324 3 1 1 0 2 1 100 76 39 76 1237 1 7 0 766 1 95 37 0 1 -"27928" 4 324 3 1 1 0 3 1 100 411 100 175 1949 7 5 1 1278 2 75 75 1 1 -"27929" 4 324 3 1 1 0 3 1 100 411 175 44 1502 2 1 0 932 2 75 131 1 0 -"27930" 4 324 3 1 1 0 3 1 100 411 44 77 1581 8 6 1 296 2 75 33 1 1 -"27931" 4 324 3 1 1 0 3 1 100 411 77 38 2136 4 7 1 439 3 50 39 0 0 -"27932" 4 324 3 1 1 0 3 1 100 411 38 74 2119 3 10 0 3264 1 95 36 0 1 -"27933" 4 324 3 1 1 0 3 1 100 411 74 144 2434 6 8 0 716 1 95 70 0 1 -"27934" 4 324 3 1 1 0 3 1 100 411 144 281 1495 9 2 1 710 1 95 137 1 1 -"27935" 4 324 3 1 1 0 3 1 100 411 281 211 2282 5 3 0 994 4 25 70 1 0 -"27936" 4 324 3 1 1 0 3 1 100 411 211 411 1425 1 10 0 2714 1 95 200 0 1 -"27937" 4 324 3 1 1 0 4 1 100 1020 100 175 1977 2 3 0 473 2 75 75 0 1 -"27938" 4 324 3 1 1 0 4 1 100 1020 175 263 4638 7 4 1 999 3 50 88 1 1 -"27939" 4 324 3 1 1 0 4 1 100 1020 263 131 2374 3 1 0 1050 3 50 132 1 0 -"27940" 4 324 3 1 1 0 4 1 100 1020 131 255 1255 1 9 0 656 1 95 124 0 1 -"27941" 4 324 3 1 1 0 4 1 100 1020 255 446 1782 8 7 1 718 2 75 191 1 1 -"27942" 4 324 3 1 1 0 4 1 100 1020 446 870 2810 9 2 1 3330 1 95 424 1 1 -"27943" 4 324 3 1 1 0 4 1 100 1020 870 1088 3129 5 6 0 2163 4 25 218 0 1 -"27944" 4 324 3 1 1 0 4 1 100 1020 1088 1360 5361 6 8 0 664 4 25 272 0 1 -"27945" 4 324 3 1 1 0 4 1 100 1020 1360 1020 2550 4 7 1 1194 4 25 340 0 0 -"27946" 4 324 4 0 1 1 1 1 100 156 100 125 4168 8 3 1 615 2 25 25 1 1 -"27947" 4 324 4 0 1 1 1 1 100 156 125 131 5151 3 7 0 3159 1 5 6 0 1 -"27948" 4 324 4 0 1 1 1 1 100 156 131 164 2558 8 2 1 1706 2 25 33 1 1 -"27949" 4 324 4 0 1 1 1 1 100 156 164 156 3486 2 1 0 3518 1 5 8 1 0 -"27950" 4 324 5 1 1 1 1 1 100 419 100 150 2624 9 7 1 529 3 50 50 1 1 -"27951" 4 324 5 1 1 1 1 1 100 419 150 158 2349 4 8 0 1587 1 5 8 0 1 -"27952" 4 324 5 1 1 1 1 1 100 419 158 237 1668 8 2 1 475 3 50 79 1 1 -"27953" 4 324 5 1 1 1 1 1 100 419 237 178 1338 2 1 0 728 2 25 59 1 0 -"27954" 4 324 5 1 1 1 1 1 100 419 178 187 1970 7 6 1 754 1 5 9 1 1 -"27955" 4 324 5 1 1 1 1 1 100 419 187 196 3372 5 3 1 3105 1 5 9 1 1 -"27956" 4 324 5 1 1 1 1 1 100 419 196 294 1755 3 6 0 381 3 50 98 0 1 -"27957" 4 324 5 1 1 1 1 1 100 419 294 279 3169 6 9 1 655 1 5 15 0 0 -"27958" 4 324 5 1 1 1 1 1 100 419 279 419 1237 1 4 0 888 3 50 140 0 1 -"27959" 4 324 5 1 1 1 2 1 100 248 100 125 1678 2 9 0 1451 2 25 25 0 1 -"27960" 4 324 5 1 1 1 2 1 100 248 125 94 2875 4 10 1 449 2 25 31 0 0 -"27961" 4 324 5 1 1 1 2 1 100 248 94 70 1321 3 1 0 743 2 25 24 1 0 -"27962" 4 324 5 1 1 1 2 1 100 248 70 105 1417 8 6 1 532 3 50 35 1 1 -"27963" 4 324 5 1 1 1 2 1 100 248 105 100 2632 5 7 1 1120 1 5 5 0 0 -"27964" 4 324 5 1 1 1 2 1 100 248 100 75 1360 6 8 1 869 2 25 25 0 0 -"27965" 4 324 5 1 1 1 2 1 100 248 75 94 1209 7 5 1 894 2 25 19 1 1 -"27966" 4 324 5 1 1 1 2 1 100 248 94 165 1360 1 10 0 493 4 75 71 0 1 -"27967" 4 324 5 1 1 1 2 1 100 248 165 248 1123 9 3 1 739 3 50 83 1 1 -"27968" 4 324 5 1 1 1 3 1 100 329 100 125 1651 3 5 0 810 2 25 25 0 1 -"27969" 4 324 5 1 1 1 3 1 100 329 125 62 1337 8 9 1 543 3 50 63 0 0 -"27970" 4 324 5 1 1 1 3 1 100 329 62 93 1314 2 4 0 914 3 50 31 0 1 -"27971" 4 324 5 1 1 1 3 1 100 329 93 88 3011 6 3 0 706 1 5 5 1 0 -"27972" 4 324 5 1 1 1 3 1 100 329 88 132 2717 7 6 1 410 3 50 44 1 1 -"27973" 4 324 5 1 1 1 3 1 100 329 132 139 2443 4 2 1 757 1 5 7 1 1 -"27974" 4 324 5 1 1 1 3 1 100 329 139 209 1214 1 8 0 1650 3 50 70 0 1 -"27975" 4 324 5 1 1 1 3 1 100 329 209 219 1809 5 7 0 680 1 5 10 0 1 -"27976" 4 324 5 1 1 1 3 1 100 329 219 329 1277 9 5 1 886 3 50 110 1 1 -"27977" 4 324 5 1 1 1 4 1 100 377 100 150 1226 8 7 1 478 3 50 50 1 1 -"27978" 4 324 5 1 1 1 4 1 100 377 150 188 1508 3 10 0 562 2 25 38 0 1 -"27979" 4 324 5 1 1 1 4 1 100 377 188 141 2581 7 9 1 656 2 25 47 0 0 -"27980" 4 324 5 1 1 1 4 1 100 377 141 212 1149 9 1 1 501 3 50 71 1 1 -"27981" 4 324 5 1 1 1 4 1 100 377 212 265 1210 2 3 0 1119 2 25 53 0 1 -"27982" 4 324 5 1 1 1 4 1 100 377 265 398 1591 1 8 0 985 3 50 133 0 1 -"27983" 4 324 5 1 1 1 4 1 100 377 398 378 1583 5 4 0 586 1 5 20 1 0 -"27984" 4 324 5 1 1 1 4 1 100 377 378 359 1500 4 2 0 658 1 5 19 1 0 -"27985" 4 324 5 1 1 1 4 1 100 377 359 377 1392 6 3 1 715 1 5 18 1 1 -"27986" 4 330 2 0 1 0 1 1 100 141 100 150 6481 2 7 0 1395 3 50 50 0 1 -"27987" 4 330 2 0 1 0 1 1 100 141 150 188 7613 3 7 0 897 4 25 38 0 1 -"27988" 4 330 2 0 1 0 1 1 100 141 188 282 2468 2 8 0 1020 3 50 94 0 1 -"27989" 4 330 2 0 1 0 1 1 100 141 282 141 1713 8 9 1 1702 3 50 141 0 0 -"27990" 4 330 3 1 1 0 1 1 100 632 100 195 29748 1 3 0 1168 1 95 95 0 1 -"27991" 4 330 3 1 1 0 1 1 100 632 195 293 1951 6 5 1 3288 3 50 98 1 1 -"27992" 4 330 3 1 1 0 1 1 100 632 293 440 2097 2 8 0 779 3 50 147 0 1 -"27993" 4 330 3 1 1 0 1 1 100 632 440 330 1810 8 9 1 810 4 25 110 0 0 -"27994" 4 330 3 1 1 0 1 1 100 632 330 495 1402 3 4 0 1437 3 50 165 0 1 -"27995" 4 330 3 1 1 0 1 1 100 632 495 371 3757 5 7 1 784 4 25 124 0 0 -"27996" 4 330 3 1 1 0 1 1 100 632 371 649 1616 7 4 1 993 2 75 278 1 1 -"27997" 4 330 3 1 1 0 1 1 100 632 649 324 2783 4 1 0 680 3 50 325 1 0 -"27998" 4 330 3 1 1 0 1 1 100 632 324 632 1277 9 6 1 1366 1 95 308 1 1 -"27999" 4 330 3 1 1 0 2 1 100 544 100 175 4815 8 1 1 2000 2 75 75 1 1 -"28000" 4 330 3 1 1 0 2 1 100 544 175 263 2885 6 2 1 1362 3 50 88 1 1 -"28001" 4 330 3 1 1 0 2 1 100 544 263 131 4278 7 9 1 2656 3 50 132 0 0 -"28002" 4 330 3 1 1 0 2 1 100 544 131 255 1735 2 10 0 874 1 95 124 0 1 -"28003" 4 330 3 1 1 0 2 1 100 544 255 191 2621 5 3 0 1087 4 25 64 1 0 -"28004" 4 330 3 1 1 0 2 1 100 544 191 95 2184 4 2 0 742 3 50 96 1 0 -"28005" 4 330 3 1 1 0 2 1 100 544 95 143 2784 3 5 0 1689 3 50 48 0 1 -"28006" 4 330 3 1 1 0 2 1 100 544 143 279 1686 9 4 1 700 1 95 136 1 1 -"28007" 4 330 3 1 1 0 2 1 100 544 279 544 1704 1 7 0 1229 1 95 265 0 1 -"28008" 4 330 3 1 1 0 3 1 100 64 100 175 3277 7 5 1 1823 2 75 75 1 1 -"28009" 4 330 3 1 1 0 3 1 100 64 175 9 1390 2 1 0 1849 1 95 166 1 0 -"28010" 4 330 3 1 1 0 3 1 100 64 9 18 1694 8 6 1 787 1 95 9 1 1 -"28011" 4 330 3 1 1 0 3 1 100 64 18 35 2594 4 7 0 1005 1 95 17 0 1 -"28012" 4 330 3 1 1 0 3 1 100 64 35 68 2706 3 10 0 1047 1 95 33 0 1 -"28013" 4 330 3 1 1 0 3 1 100 64 68 34 1346 6 8 1 656 3 50 34 0 0 -"28014" 4 330 3 1 1 0 3 1 100 64 34 66 2502 9 2 1 548 1 95 32 1 1 -"28015" 4 330 3 1 1 0 3 1 100 64 66 33 2696 5 3 0 1269 3 50 33 1 0 -"28016" 4 330 3 1 1 0 3 1 100 64 33 64 2438 1 10 0 2664 1 95 31 0 1 -"28017" 4 330 3 1 1 0 4 1 100 1213 100 195 2826 2 3 0 681 1 95 95 0 1 -"28018" 4 330 3 1 1 0 4 1 100 1213 195 293 1718 7 4 1 955 3 50 98 1 1 -"28019" 4 330 3 1 1 0 4 1 100 1213 293 146 4739 3 1 0 471 3 50 147 1 0 -"28020" 4 330 3 1 1 0 4 1 100 1213 146 285 1654 1 9 0 741 1 95 139 0 1 -"28021" 4 330 3 1 1 0 4 1 100 1213 285 499 1879 8 7 1 699 2 75 214 1 1 -"28022" 4 330 3 1 1 0 4 1 100 1213 499 973 1727 9 2 1 434 1 95 474 1 1 -"28023" 4 330 3 1 1 0 4 1 100 1213 973 1216 3171 5 6 0 2434 4 25 243 0 1 -"28024" 4 330 3 1 1 0 4 1 100 1213 1216 1155 2307 6 8 1 764 5 5 61 0 0 -"28025" 4 330 3 1 1 0 4 1 100 1213 1155 1213 1813 4 7 0 653 5 5 58 0 1 -"28026" 4 330 4 0 1 1 1 1 100 169 100 150 4687 8 3 1 1718 3 50 50 1 1 -"28027" 4 330 4 0 1 1 1 1 100 169 150 225 2881 3 7 0 701 3 50 75 0 1 -"28028" 4 330 4 0 1 1 1 1 100 169 225 338 2346 8 2 1 1272 3 50 113 1 1 -"28029" 4 330 4 0 1 1 1 1 100 169 338 169 1568 2 1 0 1116 3 50 169 1 0 -"28030" 4 330 5 1 1 1 1 1 100 322 100 175 2044 9 7 1 680 4 75 75 1 1 -"28031" 4 330 5 1 1 1 1 1 100 322 175 219 1533 4 8 0 706 2 25 44 0 1 -"28032" 4 330 5 1 1 1 1 1 100 322 219 274 1333 8 2 1 1124 2 25 55 1 1 -"28033" 4 330 5 1 1 1 1 1 100 322 274 137 2251 2 1 0 590 3 50 137 1 0 -"28034" 4 330 5 1 1 1 1 1 100 322 137 206 1991 7 6 1 614 3 50 69 1 1 -"28035" 4 330 5 1 1 1 1 1 100 322 206 196 2270 5 3 0 920 1 5 10 1 0 -"28036" 4 330 5 1 1 1 1 1 100 322 196 245 1769 3 6 0 731 2 25 49 0 1 -"28037" 4 330 5 1 1 1 1 1 100 322 245 184 1470 6 9 1 1061 2 25 61 0 0 -"28038" 4 330 5 1 1 1 1 1 100 322 184 322 1424 1 4 0 2258 4 75 138 0 1 -"28039" 4 330 5 1 1 1 2 1 100 392 100 125 1866 2 9 0 1399 2 25 25 0 1 -"28040" 4 330 5 1 1 1 2 1 100 392 125 156 1314 4 10 0 3457 2 25 31 0 1 -"28041" 4 330 5 1 1 1 2 1 100 392 156 78 890 3 1 0 1190 3 50 78 1 0 -"28042" 4 330 5 1 1 1 2 1 100 392 78 117 2098 8 6 1 821 3 50 39 1 1 -"28043" 4 330 5 1 1 1 2 1 100 392 117 123 1171 5 7 0 505 1 5 6 0 1 -"28044" 4 330 5 1 1 1 2 1 100 392 123 92 2238 6 8 1 673 2 25 31 0 0 -"28045" 4 330 5 1 1 1 2 1 100 392 92 115 1118 7 5 1 686 2 25 23 1 1 -"28046" 4 330 5 1 1 1 2 1 100 392 115 224 947 1 10 0 1005 5 95 109 0 1 -"28047" 4 330 5 1 1 1 2 1 100 392 224 392 1146 9 3 1 794 4 75 168 1 1 -"28048" 4 330 5 1 1 1 3 1 100 270 100 150 1209 3 5 0 598 3 50 50 0 1 -"28049" 4 330 5 1 1 1 3 1 100 270 150 75 1159 8 9 1 1342 3 50 75 0 0 -"28050" 4 330 5 1 1 1 3 1 100 270 75 113 1067 2 4 0 598 3 50 38 0 1 -"28051" 4 330 5 1 1 1 3 1 100 270 113 170 1407 6 3 1 2106 3 50 57 1 1 -"28052" 4 330 5 1 1 1 3 1 100 270 170 213 3627 7 6 1 2564 2 25 43 1 1 -"28053" 4 330 5 1 1 1 3 1 100 270 213 160 1087 4 2 0 833 2 25 53 1 0 -"28054" 4 330 5 1 1 1 3 1 100 270 160 240 1543 1 8 0 739 3 50 80 0 1 -"28055" 4 330 5 1 1 1 3 1 100 270 240 180 8277 5 7 1 784 2 25 60 0 0 -"28056" 4 330 5 1 1 1 3 1 100 270 180 270 1913 9 5 1 659 3 50 90 1 1 -"28057" 4 330 5 1 1 1 4 1 100 508 100 150 1544 8 7 1 588 3 50 50 1 1 -"28058" 4 330 5 1 1 1 4 1 100 508 150 225 1541 3 10 0 841 3 50 75 0 1 -"28059" 4 330 5 1 1 1 4 1 100 508 225 112 1710 7 9 1 643 3 50 113 0 0 -"28060" 4 330 5 1 1 1 4 1 100 508 112 196 1257 9 1 1 1131 4 75 84 1 1 -"28061" 4 330 5 1 1 1 4 1 100 508 196 343 1307 2 3 0 626 4 75 147 0 1 -"28062" 4 330 5 1 1 1 4 1 100 508 343 515 1354 1 8 0 1759 3 50 172 0 1 -"28063" 4 330 5 1 1 1 4 1 100 508 515 541 2193 5 4 1 1774 1 5 26 1 1 -"28064" 4 330 5 1 1 1 4 1 100 508 541 406 1766 4 2 0 1080 2 25 135 1 0 -"28065" 4 330 5 1 1 1 4 1 100 508 406 508 1874 6 3 1 830 2 25 102 1 1 -"28066" 4 341 2 0 1 1 1 1 100 296 100 150 5331 8 3 1 1172 3 50 50 1 1 -"28067" 4 341 2 0 1 1 1 1 100 296 150 188 10634 3 7 0 1205 2 25 38 0 1 -"28068" 4 341 2 0 1 1 1 1 100 296 188 282 4857 8 2 1 864 3 50 94 1 1 -"28069" 4 341 2 0 1 1 1 1 100 296 282 296 2716 2 1 1 936 1 5 14 1 1 -"28070" 4 341 3 1 1 1 1 1 100 353 100 150 4362 9 7 1 3248 3 50 50 1 1 -"28071" 4 341 3 1 1 1 1 1 100 353 150 188 4506 4 8 0 417 2 25 38 0 1 -"28072" 4 341 3 1 1 1 1 1 100 353 188 235 2155 8 2 1 1498 2 25 47 1 1 -"28073" 4 341 3 1 1 1 1 1 100 353 235 223 4480 2 1 0 1008 1 5 12 1 0 -"28074" 4 341 3 1 1 1 1 1 100 353 223 335 2173 7 6 1 812 3 50 112 1 1 -"28075" 4 341 3 1 1 1 1 1 100 353 335 251 3346 5 3 0 289 2 25 84 1 0 -"28076" 4 341 3 1 1 1 1 1 100 353 251 314 1312 3 6 0 880 2 25 63 0 1 -"28077" 4 341 3 1 1 1 1 1 100 353 314 235 2339 6 9 1 1010 2 25 79 0 0 -"28078" 4 341 3 1 1 1 1 1 100 353 235 353 5327 1 4 0 455 3 50 118 0 1 -"28079" 4 341 3 1 1 1 2 1 100 115 100 150 3019 2 9 0 777 3 50 50 0 1 -"28080" 4 341 3 1 1 1 2 1 100 115 150 112 6514 4 10 1 289 2 25 38 0 0 -"28081" 4 341 3 1 1 1 2 1 100 115 112 140 1584 3 1 1 339 2 25 28 1 1 -"28082" 4 341 3 1 1 1 2 1 100 115 140 245 2656 8 6 1 832 4 75 105 1 1 -"28083" 4 341 3 1 1 1 2 1 100 115 245 184 3341 5 7 1 964 2 25 61 0 0 -"28084" 4 341 3 1 1 1 2 1 100 115 184 46 1403 6 8 1 710 4 75 138 0 0 -"28085" 4 341 3 1 1 1 2 1 100 115 46 69 1234 7 5 1 517 3 50 23 1 1 -"28086" 4 341 3 1 1 1 2 1 100 115 69 121 3602 1 10 0 1789 4 75 52 0 1 -"28087" 4 341 3 1 1 1 2 1 100 115 121 115 3357 9 3 0 515 1 5 6 1 0 -"28088" 4 341 3 1 1 1 3 1 100 129 100 125 1598 3 5 0 888 2 25 25 0 1 -"28089" 4 341 3 1 1 1 3 1 100 129 125 62 1939 8 9 1 357 3 50 63 0 0 -"28090" 4 341 3 1 1 1 3 1 100 129 62 93 1313 2 4 0 1153 3 50 31 0 1 -"28091" 4 341 3 1 1 1 3 1 100 129 93 116 1199 6 3 1 332 2 25 23 1 1 -"28092" 4 341 3 1 1 1 3 1 100 129 116 145 1202 7 6 1 702 2 25 29 1 1 -"28093" 4 341 3 1 1 1 3 1 100 129 145 109 3730 4 2 0 646 2 25 36 1 0 -"28094" 4 341 3 1 1 1 3 1 100 129 109 164 1341 1 8 0 291 3 50 55 0 1 -"28095" 4 341 3 1 1 1 3 1 100 129 164 123 1602 5 7 1 276 2 25 41 0 0 -"28096" 4 341 3 1 1 1 3 1 100 129 123 129 1524 9 5 1 593 1 5 6 1 1 -"28097" 4 341 3 1 1 1 4 1 100 510 100 195 2525 8 7 1 1307 5 95 95 1 1 -"28098" 4 341 3 1 1 1 4 1 100 510 195 293 3403 3 10 0 326 3 50 98 0 1 -"28099" 4 341 3 1 1 1 4 1 100 510 293 308 1908 7 9 0 727 1 5 15 0 1 -"28100" 4 341 3 1 1 1 4 1 100 510 308 539 1329 9 1 1 400 4 75 231 1 1 -"28101" 4 341 3 1 1 1 4 1 100 510 539 566 1515 2 3 0 1000 1 5 27 0 1 -"28102" 4 341 3 1 1 1 4 1 100 510 566 538 1902 1 8 1 265 1 5 28 0 0 -"28103" 4 341 3 1 1 1 4 1 100 510 538 565 1738 5 4 1 2757 1 5 27 1 1 -"28104" 4 341 3 1 1 1 4 1 100 510 565 537 1526 4 2 0 1491 1 5 28 1 0 -"28105" 4 341 3 1 1 1 4 1 100 510 537 510 1954 6 3 0 811 1 5 27 1 0 -"28106" 4 341 4 0 1 0 1 1 100 28 100 195 9060 2 7 0 4555 1 95 95 0 1 -"28107" 4 341 4 0 1 0 1 1 100 28 195 380 11495 3 7 0 1130 1 95 185 0 1 -"28108" 4 341 4 0 1 0 1 1 100 28 380 570 4657 2 8 0 2554 3 50 190 0 1 -"28109" 4 341 4 0 1 0 1 1 100 28 570 28 2757 8 9 1 319 1 95 542 0 0 -"28110" 4 341 5 1 1 0 1 0 100 0 100 195 1498 1 3 0 280 1 95 95 0 1 -"28111" 4 341 5 1 1 0 1 0 100 0 195 380 1304 6 5 1 1264 1 95 185 1 1 -"28112" 4 341 5 1 1 0 1 0 100 0 380 665 2225 2 8 0 805 2 75 285 0 1 -"28113" 4 341 5 1 1 0 1 0 100 0 665 33 2267 8 9 1 309 1 95 632 0 0 -"28114" 4 341 5 1 1 0 1 0 100 0 33 64 2260 3 4 0 1301 1 95 31 0 1 -"28115" 4 341 5 1 1 0 1 0 100 0 64 3 2349 5 7 1 266 1 95 61 0 0 -"28116" 4 341 5 1 1 0 1 0 100 0 3 6 2299 7 4 1 363 1 95 3 1 1 -"28117" 4 341 5 1 1 0 1 0 100 0 6 0 1239 4 1 0 1010 1 95 6 1 0 -"28118" 4 341 5 1 1 0 2 1 100 53 100 195 1376 8 1 1 260 1 95 95 1 1 -"28119" 4 341 5 1 1 0 2 1 100 53 195 293 1085 6 2 1 903 3 50 98 1 1 -"28120" 4 341 5 1 1 0 2 1 100 53 293 146 977 7 9 1 1657 3 50 147 0 0 -"28121" 4 341 5 1 1 0 2 1 100 53 146 285 983 2 10 0 230 1 95 139 0 1 -"28122" 4 341 5 1 1 0 2 1 100 53 285 142 1120 5 3 0 441 3 50 143 1 0 -"28123" 4 341 5 1 1 0 2 1 100 53 142 7 2305 4 2 0 345 1 95 135 1 0 -"28124" 4 341 5 1 1 0 2 1 100 53 7 14 1622 3 5 0 574 1 95 7 0 1 -"28125" 4 341 5 1 1 0 2 1 100 53 14 27 951 9 4 1 248 1 95 13 1 1 -"28126" 4 341 5 1 1 0 2 1 100 53 27 53 1601 1 7 0 275 1 95 26 0 1 -"28127" 4 341 5 1 1 0 3 1 100 31 100 195 1577 7 5 1 203 1 95 95 1 1 -"28128" 4 341 5 1 1 0 3 1 100 31 195 10 1238 2 1 0 177 1 95 185 1 0 -"28129" 4 341 5 1 1 0 3 1 100 31 10 20 934 8 6 1 167 1 95 10 1 1 -"28130" 4 341 5 1 1 0 3 1 100 31 20 39 900 4 7 0 182 1 95 19 0 1 -"28131" 4 341 5 1 1 0 3 1 100 31 39 76 715 3 10 0 718 1 95 37 0 1 -"28132" 4 341 5 1 1 0 3 1 100 31 76 4 2193 6 8 1 454 1 95 72 0 0 -"28133" 4 341 5 1 1 0 3 1 100 31 4 8 822 9 2 1 252 1 95 4 1 1 -"28134" 4 341 5 1 1 0 3 1 100 31 8 16 717 5 3 1 200 1 95 8 1 1 -"28135" 4 341 5 1 1 0 3 1 100 31 16 31 612 1 10 0 204 1 95 15 0 1 -"28136" 4 341 5 1 1 0 4 1 100 125 100 195 1361 2 3 0 277 1 95 95 0 1 -"28137" 4 341 5 1 1 0 4 1 100 125 195 380 1910 7 4 1 696 1 95 185 1 1 -"28138" 4 341 5 1 1 0 4 1 100 125 380 285 780 3 1 0 853 4 25 95 1 0 -"28139" 4 341 5 1 1 0 4 1 100 125 285 556 1258 1 9 0 179 1 95 271 0 1 -"28140" 4 341 5 1 1 0 4 1 100 125 556 1084 912 8 7 1 211 1 95 528 1 1 -"28141" 4 341 5 1 1 0 4 1 100 125 1084 1355 5198 9 2 1 709 4 25 271 1 1 -"28142" 4 341 5 1 1 0 4 1 100 125 1355 2642 2480 5 6 0 192 1 95 1287 0 1 -"28143" 4 341 5 1 1 0 4 1 100 125 2642 2510 3404 6 8 1 312 5 5 132 0 0 -"28144" 4 341 5 1 1 0 4 1 100 125 2510 125 2230 4 7 1 249 1 95 2385 0 0 -"28145" 4 343 2 0 1 0 1 1 100 1 100 175 18556 2 7 0 1343 2 75 75 0 1 -"28146" 4 343 2 0 1 0 1 1 100 1 175 341 8020 3 7 0 3676 1 95 166 0 1 -"28147" 4 343 2 0 1 0 1 1 100 1 341 17 8306 2 8 1 2924 1 95 324 0 0 -"28148" 4 343 2 0 1 0 1 1 100 1 17 1 5166 8 9 1 1149 1 95 16 0 0 -"28149" 4 343 3 1 1 0 1 0 100 1 100 195 8129 1 3 0 1251 1 95 95 0 1 -"28150" 4 343 3 1 1 0 1 0 100 1 195 10 15117 6 5 0 1003 1 95 185 1 0 -"28151" 4 343 3 1 1 0 1 0 100 1 10 20 3105 2 8 0 1018 1 95 10 0 1 -"28152" 4 343 3 1 1 0 1 0 100 1 20 1 2175 8 9 1 1045 1 95 19 0 0 -"28153" 4 343 3 1 1 0 2 1 100 1 100 195 3908 8 1 1 776 1 95 95 1 1 -"28154" 4 343 3 1 1 0 2 1 100 1 195 10 9752 6 2 0 1189 1 95 185 1 0 -"28155" 4 343 3 1 1 0 2 1 100 1 10 20 3458 7 9 0 923 1 95 10 0 1 -"28156" 4 343 3 1 1 0 2 1 100 1 20 39 2316 2 10 0 1083 1 95 19 0 1 -"28157" 4 343 3 1 1 0 2 1 100 1 39 76 8381 5 3 1 1067 1 95 37 1 1 -"28158" 4 343 3 1 1 0 2 1 100 1 76 4 6110 4 2 0 1048 1 95 72 1 0 -"28159" 4 343 3 1 1 0 2 1 100 1 4 8 5500 3 5 0 1052 1 95 4 0 1 -"28160" 4 343 3 1 1 0 2 1 100 1 8 16 6223 9 4 1 1244 1 95 8 1 1 -"28161" 4 343 3 1 1 0 2 1 100 1 16 1 3510 1 7 1 1093 1 95 15 0 0 -"28162" 4 343 3 1 1 0 3 0 100 0 100 195 6092 7 5 1 1375 1 95 95 1 1 -"28163" 4 343 3 1 1 0 3 0 100 0 195 10 1939 2 1 0 1028 1 95 185 1 0 -"28164" 4 343 3 1 1 0 3 0 100 0 10 0 2344 8 6 0 989 1 95 10 1 0 -"28165" 4 343 3 1 1 0 4 1 100 27 100 195 2927 2 3 0 1474 1 95 95 0 1 -"28166" 4 343 3 1 1 0 4 1 100 27 195 380 1745 7 4 1 1164 1 95 185 1 1 -"28167" 4 343 3 1 1 0 4 1 100 27 380 741 3265 3 1 1 972 1 95 361 1 1 -"28168" 4 343 3 1 1 0 4 1 100 27 741 1445 3635 1 9 0 1180 1 95 704 0 1 -"28169" 4 343 3 1 1 0 4 1 100 27 1445 72 7153 8 7 0 866 1 95 1373 1 0 -"28170" 4 343 3 1 1 0 4 1 100 27 72 140 1445 9 2 1 942 1 95 68 1 1 -"28171" 4 343 3 1 1 0 4 1 100 27 140 273 5086 5 6 0 1658 1 95 133 0 1 -"28172" 4 343 3 1 1 0 4 1 100 27 273 532 6064 6 8 0 1453 1 95 259 0 1 -"28173" 4 343 3 1 1 0 4 1 100 27 532 27 2286 4 7 1 991 1 95 505 0 0 -"28174" 4 343 4 0 1 1 1 1 100 130 100 125 4263 8 3 1 4506 2 25 25 1 1 -"28175" 4 343 4 0 1 1 1 1 100 130 125 131 2296 3 7 0 1440 1 5 6 0 1 -"28176" 4 343 4 0 1 1 1 1 100 130 131 124 2529 8 2 0 918 1 5 7 1 0 -"28177" 4 343 4 0 1 1 1 1 100 130 124 130 6272 2 1 1 1243 1 5 6 1 1 -"28178" 4 343 5 1 1 1 1 1 100 116 100 105 4036 9 7 1 1441 1 5 5 1 1 -"28179" 4 343 5 1 1 1 1 1 100 116 105 110 8354 4 8 0 1027 1 5 5 0 1 -"28180" 4 343 5 1 1 1 1 1 100 116 110 116 3911 8 2 1 1274 1 5 6 1 1 -"28181" 4 343 5 1 1 1 1 1 100 116 116 122 5874 2 1 1 1552 1 5 6 1 1 -"28182" 4 343 5 1 1 1 1 1 100 116 122 128 4123 7 6 1 1212 1 5 6 1 1 -"28183" 4 343 5 1 1 1 1 1 100 116 128 122 4616 5 3 0 1297 1 5 6 1 0 -"28184" 4 343 5 1 1 1 1 1 100 116 122 116 4225 3 6 1 1157 1 5 6 0 0 -"28185" 4 343 5 1 1 1 1 1 100 116 116 122 2643 6 9 0 1276 1 5 6 0 1 -"28186" 4 343 5 1 1 1 1 1 100 116 122 116 4972 1 4 1 1501 1 5 6 0 0 -"28187" 4 343 5 1 1 1 2 1 100 141 100 105 5388 2 9 0 1273 1 5 5 0 1 -"28188" 4 343 5 1 1 1 2 1 100 141 105 100 3626 4 10 1 1115 1 5 5 0 0 -"28189" 4 343 5 1 1 1 2 1 100 141 100 105 3483 3 1 1 1115 1 5 5 1 1 -"28190" 4 343 5 1 1 1 2 1 100 141 105 110 3339 8 6 1 1018 1 5 5 1 1 -"28191" 4 343 5 1 1 1 2 1 100 141 110 116 4702 5 7 0 1348 1 5 6 0 1 -"28192" 4 343 5 1 1 1 2 1 100 141 116 122 11937 6 8 0 1350 1 5 6 0 1 -"28193" 4 343 5 1 1 1 2 1 100 141 122 128 4899 7 5 1 1207 1 5 6 1 1 -"28194" 4 343 5 1 1 1 2 1 100 141 128 134 6932 1 10 0 1183 1 5 6 0 1 -"28195" 4 343 5 1 1 1 2 1 100 141 134 141 7116 9 3 1 1279 1 5 7 1 1 -"28196" 4 343 5 1 1 1 3 1 100 116 100 105 4738 3 5 0 1254 1 5 5 0 1 -"28197" 4 343 5 1 1 1 3 1 100 116 105 100 3216 8 9 1 1044 1 5 5 0 0 -"28198" 4 343 5 1 1 1 3 1 100 116 100 105 2663 2 4 0 1174 1 5 5 0 1 -"28199" 4 343 5 1 1 1 3 1 100 116 105 100 3044 6 3 0 1137 1 5 5 1 0 -"28200" 4 343 5 1 1 1 3 1 100 116 100 95 3872 7 6 0 1285 1 5 5 1 0 -"28201" 4 343 5 1 1 1 3 1 100 116 95 100 5261 4 2 1 1216 1 5 5 1 1 -"28202" 4 343 5 1 1 1 3 1 100 116 100 105 3524 1 8 0 1426 1 5 5 0 1 -"28203" 4 343 5 1 1 1 3 1 100 116 105 110 7650 5 7 0 1060 1 5 5 0 1 -"28204" 4 343 5 1 1 1 3 1 100 116 110 116 3279 9 5 1 1337 1 5 6 1 1 -"28205" 4 343 5 1 1 1 4 1 100 125 100 105 3541 8 7 1 1234 1 5 5 1 1 -"28206" 4 343 5 1 1 1 4 1 100 125 105 110 3916 3 10 0 1264 1 5 5 0 1 -"28207" 4 343 5 1 1 1 4 1 100 125 110 104 3345 7 9 1 1191 1 5 6 0 0 -"28208" 4 343 5 1 1 1 4 1 100 125 104 109 4283 9 1 1 653 1 5 5 1 1 -"28209" 4 343 5 1 1 1 4 1 100 125 109 114 7939 2 3 0 1110 1 5 5 0 1 -"28210" 4 343 5 1 1 1 4 1 100 125 114 108 6310 1 8 1 1039 1 5 6 0 0 -"28211" 4 343 5 1 1 1 4 1 100 125 108 113 3443 5 4 1 981 1 5 5 1 1 -"28212" 4 343 5 1 1 1 4 1 100 125 113 119 15084 4 2 1 1035 1 5 6 1 1 -"28213" 4 343 5 1 1 1 4 1 100 125 119 125 5232 6 3 1 1036 1 5 6 1 1 -"28214" 4 350 2 0 1 1 1 1 100 205 100 125 9031 8 3 1 2414 2 25 25 1 1 -"28215" 4 350 2 0 1 1 1 1 100 205 125 156 4145 3 7 0 2953 2 25 31 0 1 -"28216" 4 350 2 0 1 1 1 1 100 205 156 164 2882 8 2 1 4998 1 5 8 1 1 -"28217" 4 350 2 0 1 1 1 1 100 205 164 205 1958 2 1 1 302 2 25 41 1 1 -"28218" 4 350 3 1 1 1 1 1 100 72 100 150 2574 9 7 1 498 3 50 50 1 1 -"28219" 4 350 3 1 1 1 1 1 100 72 150 188 1312 4 8 0 767 2 25 38 0 1 -"28220" 4 350 3 1 1 1 1 1 100 72 188 141 1416 8 2 0 1075 2 25 47 1 0 -"28221" 4 350 3 1 1 1 1 1 100 72 141 70 1740 2 1 0 407 3 50 71 1 0 -"28222" 4 350 3 1 1 1 1 1 100 72 70 88 1633 7 6 1 465 2 25 18 1 1 -"28223" 4 350 3 1 1 1 1 1 100 72 88 66 2560 5 3 0 900 2 25 22 1 0 -"28224" 4 350 3 1 1 1 1 1 100 72 66 49 1614 3 6 1 671 2 25 17 0 0 -"28225" 4 350 3 1 1 1 1 1 100 72 49 37 1156 6 9 1 667 2 25 12 0 0 -"28226" 4 350 3 1 1 1 1 1 100 72 37 72 1321 1 4 0 1120 5 95 35 0 1 -"28227" 4 350 3 1 1 1 2 1 100 686 100 125 5297 2 9 0 795 2 25 25 0 1 -"28228" 4 350 3 1 1 1 2 1 100 686 125 156 1578 4 10 0 564 2 25 31 0 1 -"28229" 4 350 3 1 1 1 2 1 100 686 156 234 1351 3 1 1 540 3 50 78 1 1 -"28230" 4 350 3 1 1 1 2 1 100 686 234 293 2703 8 6 1 533 2 25 59 1 1 -"28231" 4 350 3 1 1 1 2 1 100 686 293 366 2393 5 7 0 852 2 25 73 0 1 -"28232" 4 350 3 1 1 1 2 1 100 686 366 458 1761 6 8 0 419 2 25 92 0 1 -"28233" 4 350 3 1 1 1 2 1 100 686 458 481 1348 7 5 1 1025 1 5 23 1 1 -"28234" 4 350 3 1 1 1 2 1 100 686 481 722 2110 1 10 0 415 3 50 241 0 1 -"28235" 4 350 3 1 1 1 2 1 100 686 722 686 2496 9 3 0 1031 1 5 36 1 0 -"28236" 4 350 3 1 1 1 3 1 100 63 100 150 2622 3 5 0 1192 3 50 50 0 1 -"28237" 4 350 3 1 1 1 3 1 100 63 150 37 1260 8 9 1 502 4 75 113 0 0 -"28238" 4 350 3 1 1 1 3 1 100 63 37 65 1283 2 4 0 380 4 75 28 0 1 -"28239" 4 350 3 1 1 1 3 1 100 63 65 49 1607 6 3 0 593 2 25 16 1 0 -"28240" 4 350 3 1 1 1 3 1 100 63 49 74 5113 7 6 1 492 3 50 25 1 1 -"28241" 4 350 3 1 1 1 3 1 100 63 74 37 1658 4 2 0 481 3 50 37 1 0 -"28242" 4 350 3 1 1 1 3 1 100 63 37 72 1599 1 8 0 1378 5 95 35 0 1 -"28243" 4 350 3 1 1 1 3 1 100 63 72 36 1573 5 7 1 320 3 50 36 0 0 -"28244" 4 350 3 1 1 1 3 1 100 63 36 63 1637 9 5 1 725 4 75 27 1 1 -"28245" 4 350 3 1 1 1 4 1 100 791 100 195 2970 8 7 1 691 5 95 95 1 1 -"28246" 4 350 3 1 1 1 4 1 100 791 195 244 2063 3 10 0 747 2 25 49 0 1 -"28247" 4 350 3 1 1 1 4 1 100 791 244 183 1828 7 9 1 907 2 25 61 0 0 -"28248" 4 350 3 1 1 1 4 1 100 791 183 357 933 9 1 1 462 5 95 174 1 1 -"28249" 4 350 3 1 1 1 4 1 100 791 357 536 2437 2 3 0 775 3 50 179 0 1 -"28250" 4 350 3 1 1 1 4 1 100 791 536 804 4096 1 8 0 450 3 50 268 0 1 -"28251" 4 350 3 1 1 1 4 1 100 791 804 1005 1899 5 4 1 390 2 25 201 1 1 -"28252" 4 350 3 1 1 1 4 1 100 791 1005 1055 2072 4 2 1 556 1 5 50 1 1 -"28253" 4 350 3 1 1 1 4 1 100 791 1055 791 1630 6 3 0 503 2 25 264 1 0 -"28254" 4 350 4 0 1 0 1 1 100 5 100 195 5444 2 7 0 3431 1 95 95 0 1 -"28255" 4 350 4 0 1 0 1 1 100 5 195 380 7289 3 7 0 1147 1 95 185 0 1 -"28256" 4 350 4 0 1 0 1 1 100 5 380 95 2402 2 8 1 453 2 75 285 0 0 -"28257" 4 350 4 0 1 0 1 1 100 5 95 5 1469 8 9 1 1508 1 95 90 0 0 -"28258" 4 350 5 1 1 0 1 1 100 6 100 195 1917 1 3 0 776 1 95 95 0 1 -"28259" 4 350 5 1 1 0 1 1 100 6 195 380 1528 6 5 1 453 1 95 185 1 1 -"28260" 4 350 5 1 1 0 1 1 100 6 380 665 2812 2 8 0 484 2 75 285 0 1 -"28261" 4 350 5 1 1 0 1 1 100 6 665 33 1971 8 9 1 299 1 95 632 0 0 -"28262" 4 350 5 1 1 0 1 1 100 6 33 64 1178 3 4 0 608 1 95 31 0 1 -"28263" 4 350 5 1 1 0 1 1 100 6 64 32 1912 5 7 1 387 3 50 32 0 0 -"28264" 4 350 5 1 1 0 1 1 100 6 32 62 1117 7 4 1 293 1 95 30 1 1 -"28265" 4 350 5 1 1 0 1 1 100 6 62 3 1324 4 1 0 315 1 95 59 1 0 -"28266" 4 350 5 1 1 0 1 1 100 6 3 6 1137 9 6 1 377 1 95 3 1 1 -"28267" 4 350 5 1 1 0 2 1 100 14 100 195 1877 8 1 1 529 1 95 95 1 1 -"28268" 4 350 5 1 1 0 2 1 100 14 195 380 1133 6 2 1 420 1 95 185 1 1 -"28269" 4 350 5 1 1 0 2 1 100 14 380 570 1165 7 9 0 1116 3 50 190 0 1 -"28270" 4 350 5 1 1 0 2 1 100 14 570 1112 1540 2 10 0 288 1 95 542 0 1 -"28271" 4 350 5 1 1 0 2 1 100 14 1112 1390 1168 5 3 1 438 4 25 278 1 1 -"28272" 4 350 5 1 1 0 2 1 100 14 1390 69 1446 4 2 0 391 1 95 1321 1 0 -"28273" 4 350 5 1 1 0 2 1 100 14 69 135 1001 3 5 0 303 1 95 66 0 1 -"28274" 4 350 5 1 1 0 2 1 100 14 135 7 2549 9 4 0 265 1 95 128 1 0 -"28275" 4 350 5 1 1 0 2 1 100 14 7 14 986 1 7 0 233 1 95 7 0 1 -"28276" 4 350 5 1 1 0 3 0 100 0 100 195 1567 7 5 1 962 1 95 95 1 1 -"28277" 4 350 5 1 1 0 3 0 100 0 195 10 937 2 1 0 242 1 95 185 1 0 -"28278" 4 350 5 1 1 0 3 0 100 0 10 20 1452 8 6 1 356 1 95 10 1 1 -"28279" 4 350 5 1 1 0 3 0 100 0 20 39 1472 4 7 0 455 1 95 19 0 1 -"28280" 4 350 5 1 1 0 3 0 100 0 39 76 838 3 10 0 316 1 95 37 0 1 -"28281" 4 350 5 1 1 0 3 0 100 0 76 4 1171 6 8 1 256 1 95 72 0 0 -"28282" 4 350 5 1 1 0 3 0 100 0 4 8 993 9 2 1 304 1 95 4 1 1 -"28283" 4 350 5 1 1 0 3 0 100 0 8 0 870 5 3 0 362 1 95 8 1 0 -"28284" 4 350 5 1 1 0 4 1 100 3568 100 195 1558 2 3 0 353 1 95 95 0 1 -"28285" 4 350 5 1 1 0 4 1 100 3568 195 380 1149 7 4 1 244 1 95 185 1 1 -"28286" 4 350 5 1 1 0 4 1 100 3568 380 570 1358 3 1 1 1115 3 50 190 1 1 -"28287" 4 350 5 1 1 0 4 1 100 3568 570 1112 1699 1 9 0 216 1 95 542 0 1 -"28288" 4 350 5 1 1 0 4 1 100 3568 1112 834 1523 8 7 0 329 4 25 278 1 0 -"28289" 4 350 5 1 1 0 4 1 100 3568 834 1626 1558 9 2 1 202 1 95 792 1 1 -"28290" 4 350 5 1 1 0 4 1 100 3568 1626 3171 1349 5 6 0 259 1 95 1545 0 1 -"28291" 4 350 5 1 1 0 4 1 100 3568 3171 4757 1804 6 8 0 1132 3 50 1586 0 1 -"28292" 4 350 5 1 1 0 4 1 100 3568 4757 3568 1163 4 7 1 690 4 25 1189 0 0 -"28293" 4 352 2 0 1 1 1 1 100 98 100 150 4724 8 3 1 1033 3 50 50 1 1 -"28294" 4 352 2 0 1 1 1 1 100 98 150 225 6101 3 7 0 1721 3 50 75 0 1 -"28295" 4 352 2 0 1 1 1 1 100 98 225 394 3420 8 2 1 807 4 75 169 1 1 -"28296" 4 352 2 0 1 1 1 1 100 98 394 98 2318 2 1 0 2797 4 75 296 1 0 -"28297" 4 352 3 1 1 1 1 1 100 10 100 175 16609 9 7 1 2880 4 75 75 1 1 -"28298" 4 352 3 1 1 1 1 1 100 10 175 306 3620 4 8 0 734 4 75 131 0 1 -"28299" 4 352 3 1 1 1 1 1 100 10 306 597 1832 8 2 1 2719 5 95 291 1 1 -"28300" 4 352 3 1 1 1 1 1 100 10 597 30 3756 2 1 0 959 5 95 567 1 0 -"28301" 4 352 3 1 1 1 1 1 100 10 30 53 3846 7 6 1 2245 4 75 23 1 1 -"28302" 4 352 3 1 1 1 1 1 100 10 53 13 2328 5 3 0 463 4 75 40 1 0 -"28303" 4 352 3 1 1 1 1 1 100 10 13 20 2502 3 6 0 4325 3 50 7 0 1 -"28304" 4 352 3 1 1 1 1 1 100 10 20 5 2324 6 9 1 1140 4 75 15 0 0 -"28305" 4 352 3 1 1 1 1 1 100 10 5 10 1635 1 4 0 1139 5 95 5 0 1 -"28306" 4 352 3 1 1 1 2 1 100 313 100 175 4107 2 9 0 541 4 75 75 0 1 -"28307" 4 352 3 1 1 1 2 1 100 313 175 306 4352 4 10 0 1303 4 75 131 0 1 -"28308" 4 352 3 1 1 1 2 1 100 313 306 76 1759 3 1 0 1777 4 75 230 1 0 -"28309" 4 352 3 1 1 1 2 1 100 313 76 133 3386 8 6 1 1038 4 75 57 1 1 -"28310" 4 352 3 1 1 1 2 1 100 313 133 233 1979 5 7 0 1634 4 75 100 0 1 -"28311" 4 352 3 1 1 1 2 1 100 313 233 58 3402 6 8 1 693 4 75 175 0 0 -"28312" 4 352 3 1 1 1 2 1 100 313 58 102 2586 7 5 1 661 4 75 44 1 1 -"28313" 4 352 3 1 1 1 2 1 100 313 102 179 1602 1 10 0 1684 4 75 77 0 1 -"28314" 4 352 3 1 1 1 2 1 100 313 179 313 2569 9 3 1 1053 4 75 134 1 1 -"28315" 4 352 3 1 1 1 3 1 100 337 100 175 1619 3 5 0 748 4 75 75 0 1 -"28316" 4 352 3 1 1 1 3 1 100 337 175 44 1814 8 9 1 2214 4 75 131 0 0 -"28317" 4 352 3 1 1 1 3 1 100 337 44 77 1506 2 4 0 1960 4 75 33 0 1 -"28318" 4 352 3 1 1 1 3 1 100 337 77 135 1952 6 3 1 2233 4 75 58 1 1 -"28319" 4 352 3 1 1 1 3 1 100 337 135 236 3044 7 6 1 1321 4 75 101 1 1 -"28320" 4 352 3 1 1 1 3 1 100 337 236 59 3712 4 2 0 2605 4 75 177 1 0 -"28321" 4 352 3 1 1 1 3 1 100 337 59 115 782 1 8 0 1071 5 95 56 0 1 -"28322" 4 352 3 1 1 1 3 1 100 337 115 173 2817 5 7 0 729 3 50 58 0 1 -"28323" 4 352 3 1 1 1 3 1 100 337 173 337 1244 9 5 1 1047 5 95 164 1 1 -"28324" 4 352 3 1 1 1 4 1 100 49 100 175 1625 8 7 1 897 4 75 75 1 1 -"28325" 4 352 3 1 1 1 4 1 100 49 175 306 1556 3 10 0 919 4 75 131 0 1 -"28326" 4 352 3 1 1 1 4 1 100 49 306 76 1793 7 9 1 2202 4 75 230 0 0 -"28327" 4 352 3 1 1 1 4 1 100 49 76 148 1534 9 1 1 1027 5 95 72 1 1 -"28328" 4 352 3 1 1 1 4 1 100 49 148 259 2215 2 3 0 1692 4 75 111 0 1 -"28329" 4 352 3 1 1 1 4 1 100 49 259 453 1273 1 8 0 914 4 75 194 0 1 -"28330" 4 352 3 1 1 1 4 1 100 49 453 113 1964 5 4 0 820 4 75 340 1 0 -"28331" 4 352 3 1 1 1 4 1 100 49 113 28 1627 4 2 0 740 4 75 85 1 0 -"28332" 4 352 3 1 1 1 4 1 100 49 28 49 1381 6 3 1 1834 4 75 21 1 1 -"28333" 4 352 4 0 1 0 1 1 100 26 100 150 3708 2 7 0 1082 3 50 50 0 1 -"28334" 4 352 4 0 1 0 1 1 100 26 150 263 4699 3 7 0 1625 2 75 113 0 1 -"28335" 4 352 4 0 1 0 1 1 100 26 263 513 1705 2 8 0 1630 1 95 250 0 1 -"28336" 4 352 4 0 1 0 1 1 100 26 513 26 1501 8 9 1 1411 1 95 487 0 0 -"28337" 4 352 5 1 1 0 1 1 100 4 100 195 2409 1 3 0 1868 1 95 95 0 1 -"28338" 4 352 5 1 1 0 1 1 100 4 195 341 1560 6 5 1 1451 2 75 146 1 1 -"28339" 4 352 5 1 1 0 1 1 100 4 341 665 1147 2 8 0 971 1 95 324 0 1 -"28340" 4 352 5 1 1 0 1 1 100 4 665 166 1874 8 9 1 806 2 75 499 0 0 -"28341" 4 352 5 1 1 0 1 1 100 4 166 324 3733 3 4 0 3049 1 95 158 0 1 -"28342" 4 352 5 1 1 0 1 1 100 4 324 632 3813 5 7 0 909 1 95 308 0 1 -"28343" 4 352 5 1 1 0 1 1 100 4 632 32 3631 7 4 0 696 1 95 600 1 0 -"28344" 4 352 5 1 1 0 1 1 100 4 32 2 2548 4 1 0 1872 1 95 30 1 0 -"28345" 4 352 5 1 1 0 1 1 100 4 2 4 1724 9 6 1 816 1 95 2 1 1 -"28346" 4 352 5 1 1 0 2 0 100 0 100 195 1545 8 1 1 685 1 95 95 1 1 -"28347" 4 352 5 1 1 0 2 0 100 0 195 10 3794 6 2 0 579 1 95 185 1 0 -"28348" 4 352 5 1 1 0 2 0 100 0 10 0 981 7 9 1 948 1 95 10 0 0 -"28349" 4 352 5 1 1 0 3 0 100 0 100 195 1425 7 5 1 755 1 95 95 1 1 -"28350" 4 352 5 1 1 0 3 0 100 0 195 10 1076 2 1 0 590 1 95 185 1 0 -"28351" 4 352 5 1 1 0 3 0 100 0 10 20 1247 8 6 1 498 1 95 10 1 1 -"28352" 4 352 5 1 1 0 3 0 100 0 20 39 1319 4 7 0 494 1 95 19 0 1 -"28353" 4 352 5 1 1 0 3 0 100 0 39 76 876 3 10 0 776 1 95 37 0 1 -"28354" 4 352 5 1 1 0 3 0 100 0 76 4 1912 6 8 1 1426 1 95 72 0 0 -"28355" 4 352 5 1 1 0 3 0 100 0 4 8 1067 9 2 1 422 1 95 4 1 1 -"28356" 4 352 5 1 1 0 3 0 100 0 8 0 1271 5 3 0 463 1 95 8 1 0 -"28357" 4 352 5 1 1 0 4 1 100 27 100 195 1349 2 3 0 443 1 95 95 0 1 -"28358" 4 352 5 1 1 0 4 1 100 27 195 380 1019 7 4 1 424 1 95 185 1 1 -"28359" 4 352 5 1 1 0 4 1 100 27 380 19 817 3 1 0 335 1 95 361 1 0 -"28360" 4 352 5 1 1 0 4 1 100 27 19 37 1052 1 9 0 407 1 95 18 0 1 -"28361" 4 352 5 1 1 0 4 1 100 27 37 72 1217 8 7 1 556 1 95 35 1 1 -"28362" 4 352 5 1 1 0 4 1 100 27 72 140 879 9 2 1 563 1 95 68 1 1 -"28363" 4 352 5 1 1 0 4 1 100 27 140 273 1349 5 6 0 963 1 95 133 0 1 -"28364" 4 352 5 1 1 0 4 1 100 27 273 14 1426 6 8 1 476 1 95 259 0 0 -"28365" 4 352 5 1 1 0 4 1 100 27 14 27 883 4 7 0 1022 1 95 13 0 1 -"28366" 4 355 2 0 1 0 1 1 100 8 100 175 12851 2 7 0 3949 2 75 75 0 1 -"28367" 4 355 2 0 1 0 1 1 100 8 175 87 4694 3 7 1 1709 3 50 88 0 0 -"28368" 4 355 2 0 1 0 1 1 100 8 87 170 3177 2 8 0 1391 1 95 83 0 1 -"28369" 4 355 2 0 1 0 1 1 100 8 170 8 3707 8 9 1 944 1 95 162 0 0 -"28370" 4 355 3 1 1 0 1 1 100 6 100 195 2277 1 3 0 1062 1 95 95 0 1 -"28371" 4 355 3 1 1 0 1 1 100 6 195 380 2040 6 5 1 860 1 95 185 1 1 -"28372" 4 355 3 1 1 0 1 1 100 6 380 399 3488 2 8 0 1280 5 5 19 0 1 -"28373" 4 355 3 1 1 0 1 1 100 6 399 299 1872 8 9 1 1189 4 25 100 0 0 -"28374" 4 355 3 1 1 0 1 1 100 6 299 15 2650 3 4 1 1129 1 95 284 0 0 -"28375" 4 355 3 1 1 0 1 1 100 6 15 29 2920 5 7 0 755 1 95 14 0 1 -"28376" 4 355 3 1 1 0 1 1 100 6 29 57 1948 7 4 1 685 1 95 28 1 1 -"28377" 4 355 3 1 1 0 1 1 100 6 57 111 2242 4 1 1 388 1 95 54 1 1 -"28378" 4 355 3 1 1 0 1 1 100 6 111 6 3543 9 6 0 1068 1 95 105 1 0 -"28379" 4 355 3 1 1 0 2 0 100 0 100 195 6633 8 1 1 563 1 95 95 1 1 -"28380" 4 355 3 1 1 0 2 0 100 0 195 10 2373 6 2 0 674 1 95 185 1 0 -"28381" 4 355 3 1 1 0 2 0 100 0 10 0 1367 7 9 1 500 1 95 10 0 0 -"28382" 4 355 3 1 1 0 3 1 100 720 100 195 2811 7 5 1 1411 1 95 95 1 1 -"28383" 4 355 3 1 1 0 3 1 100 720 195 146 2396 2 1 0 1032 4 25 49 1 0 -"28384" 4 355 3 1 1 0 3 1 100 720 146 285 1252 8 6 1 701 1 95 139 1 1 -"28385" 4 355 3 1 1 0 3 1 100 720 285 214 4233 4 7 1 478 4 25 71 0 0 -"28386" 4 355 3 1 1 0 3 1 100 720 214 375 1236 3 10 0 1615 2 75 161 0 1 -"28387" 4 355 3 1 1 0 3 1 100 720 375 469 2655 6 8 0 935 4 25 94 0 1 -"28388" 4 355 3 1 1 0 3 1 100 720 469 492 3240 9 2 1 628 5 5 23 1 1 -"28389" 4 355 3 1 1 0 3 1 100 720 492 369 3683 5 3 0 522 4 25 123 1 0 -"28390" 4 355 3 1 1 0 3 1 100 720 369 720 2198 1 10 0 603 1 95 351 0 1 -"28391" 4 355 3 1 1 0 4 1 100 3311 100 195 1898 2 3 0 389 1 95 95 0 1 -"28392" 4 355 3 1 1 0 4 1 100 3311 195 380 1088 7 4 1 409 1 95 185 1 1 -"28393" 4 355 3 1 1 0 4 1 100 3311 380 665 2059 3 1 1 1358 2 75 285 1 1 -"28394" 4 355 3 1 1 0 4 1 100 3311 665 698 1555 1 9 0 487 5 5 33 0 1 -"28395" 4 355 3 1 1 0 4 1 100 3311 698 873 2981 8 7 1 693 4 25 175 1 1 -"28396" 4 355 3 1 1 0 4 1 100 3311 873 1702 5329 9 2 1 1865 1 95 829 1 1 -"28397" 4 355 3 1 1 0 4 1 100 3311 1702 1787 4309 5 6 0 670 5 5 85 0 1 -"28398" 4 355 3 1 1 0 4 1 100 3311 1787 1698 1460 6 8 1 572 5 5 89 0 0 -"28399" 4 355 3 1 1 0 4 1 100 3311 1698 3311 3070 4 7 0 633 1 95 1613 0 1 -"28400" 4 355 4 0 1 1 1 1 100 313 100 125 8175 8 3 1 2023 2 25 25 1 1 -"28401" 4 355 4 0 1 1 1 1 100 313 125 188 5812 3 7 0 429 3 50 63 0 1 -"28402" 4 355 4 0 1 1 1 1 100 313 188 329 1814 8 2 1 622 4 75 141 1 1 -"28403" 4 355 4 0 1 1 1 1 100 313 329 313 2421 2 1 0 1336 1 5 16 1 0 -"28404" 4 355 5 1 1 1 1 1 100 786 100 195 1596 9 7 1 1366 5 95 95 1 1 -"28405" 4 355 5 1 1 1 1 1 100 786 195 341 1354 4 8 0 552 4 75 146 0 1 -"28406" 4 355 5 1 1 1 1 1 100 786 341 358 1947 8 2 1 492 1 5 17 1 1 -"28407" 4 355 5 1 1 1 1 1 100 786 358 340 2074 2 1 0 372 1 5 18 1 0 -"28408" 4 355 5 1 1 1 1 1 100 786 340 357 1104 7 6 1 1104 1 5 17 1 1 -"28409" 4 355 5 1 1 1 1 1 100 786 357 339 1689 5 3 0 566 1 5 18 1 0 -"28410" 4 355 5 1 1 1 1 1 100 786 339 424 2617 3 6 0 794 2 25 85 0 1 -"28411" 4 355 5 1 1 1 1 1 100 786 424 403 3297 6 9 1 1034 1 5 21 0 0 -"28412" 4 355 5 1 1 1 1 1 100 786 403 786 1262 1 4 0 0 5 95 383 0 1 -"28413" 4 355 5 1 1 1 2 1 100 1037 100 175 1478 2 9 0 325 4 75 75 0 1 -"28414" 4 355 5 1 1 1 2 1 100 1037 175 131 2203 4 10 1 536 2 25 44 0 0 -"28415" 4 355 5 1 1 1 2 1 100 1037 131 124 1116 3 1 0 628 1 5 7 1 0 -"28416" 4 355 5 1 1 1 2 1 100 1037 124 242 1092 8 6 1 2086 5 95 118 1 1 -"28417" 4 355 5 1 1 1 2 1 100 1037 242 230 2400 5 7 1 563 1 5 12 0 0 -"28418" 4 355 5 1 1 1 2 1 100 1037 230 218 1301 6 8 1 637 1 5 12 0 0 -"28419" 4 355 5 1 1 1 2 1 100 1037 218 273 4996 7 5 1 1700 2 25 55 1 1 -"28420" 4 355 5 1 1 1 2 1 100 1037 273 532 2360 1 10 0 0 5 95 259 0 1 -"28421" 4 355 5 1 1 1 2 1 100 1037 532 1037 1741 9 3 1 0 5 95 505 1 1 -"28422" 4 355 5 1 1 1 3 1 100 53 100 195 3014 3 5 0 861 5 95 95 0 1 -"28423" 4 355 5 1 1 1 3 1 100 53 195 10 889 8 9 1 0 5 95 185 0 0 -"28424" 4 355 5 1 1 1 3 1 100 53 10 18 1572 2 4 0 879 4 75 8 0 1 -"28425" 4 355 5 1 1 1 3 1 100 53 18 19 1490 6 3 1 585 1 5 1 1 1 -"28426" 4 355 5 1 1 1 3 1 100 53 19 20 1155 7 6 1 1801 1 5 1 1 1 -"28427" 4 355 5 1 1 1 3 1 100 53 20 15 1592 4 2 0 802 2 25 5 1 0 -"28428" 4 355 5 1 1 1 3 1 100 53 15 29 1386 1 8 0 781 5 95 14 0 1 -"28429" 4 355 5 1 1 1 3 1 100 53 29 30 1550 5 7 0 762 1 5 1 0 1 -"28430" 4 355 5 1 1 1 3 1 100 53 30 53 2395 9 5 1 1142 4 75 23 1 1 -"28431" 4 355 5 1 1 1 4 1 100 2448 100 195 2040 8 7 1 589 5 95 95 1 1 -"28432" 4 355 5 1 1 1 4 1 100 2448 195 380 1310 3 10 0 558 5 95 185 0 1 -"28433" 4 355 5 1 1 1 4 1 100 2448 380 285 1536 7 9 1 860 2 25 95 0 0 -"28434" 4 355 5 1 1 1 4 1 100 2448 285 556 1488 9 1 1 0 5 95 271 1 1 -"28435" 4 355 5 1 1 1 4 1 100 2448 556 1084 1831 2 3 0 0 5 95 528 0 1 -"28436" 4 355 5 1 1 1 4 1 100 2448 1084 2114 1653 1 8 0 0 5 95 1030 0 1 -"28437" 4 355 5 1 1 1 4 1 100 2448 2114 2220 4413 5 4 1 616 1 5 106 1 1 -"28438" 4 355 5 1 1 1 4 1 100 2448 2220 2331 3824 4 2 1 544 1 5 111 1 1 -"28439" 4 355 5 1 1 1 4 1 100 2448 2331 2448 1395 6 3 1 846 1 5 117 1 1 -"28440" 4 358 2 0 1 1 1 1 100 224 100 150 14386 8 3 1 323 3 50 50 1 1 -"28441" 4 358 2 0 1 1 1 1 100 224 150 225 6529 3 7 0 1824 3 50 75 0 1 -"28442" 4 358 2 0 1 1 1 1 100 224 225 236 2207 8 2 1 1148 1 5 11 1 1 -"28443" 4 358 2 0 1 1 1 1 100 224 236 224 1450 2 1 0 2435 1 5 12 1 0 -"28444" 4 358 3 1 1 1 1 1 100 242 100 175 2001 9 7 1 1736 4 75 75 1 1 -"28445" 4 358 3 1 1 1 1 1 100 242 175 166 4386 4 8 1 815 1 5 9 0 0 -"28446" 4 358 3 1 1 1 1 1 100 242 166 249 1846 8 2 1 2084 3 50 83 1 1 -"28447" 4 358 3 1 1 1 1 1 100 242 249 62 2750 2 1 0 1129 4 75 187 1 0 -"28448" 4 358 3 1 1 1 1 1 100 242 62 93 2024 7 6 1 733 3 50 31 1 1 -"28449" 4 358 3 1 1 1 1 1 100 242 93 88 4976 5 3 0 1585 1 5 5 1 0 -"28450" 4 358 3 1 1 1 1 1 100 242 88 110 1818 3 6 0 2266 2 25 22 0 1 -"28451" 4 358 3 1 1 1 1 1 100 242 110 138 2131 6 9 0 675 2 25 28 0 1 -"28452" 4 358 3 1 1 1 1 1 100 242 138 242 3520 1 4 0 902 4 75 104 0 1 -"28453" 4 358 3 1 1 1 2 1 100 388 100 195 3042 2 9 0 439 5 95 95 0 1 -"28454" 4 358 3 1 1 1 2 1 100 388 195 185 3327 4 10 1 743 1 5 10 0 0 -"28455" 4 358 3 1 1 1 2 1 100 388 185 46 2888 3 1 0 629 4 75 139 1 0 -"28456" 4 358 3 1 1 1 2 1 100 388 46 81 1428 8 6 1 1438 4 75 35 1 1 -"28457" 4 358 3 1 1 1 2 1 100 388 81 85 2599 5 7 0 465 1 5 4 0 1 -"28458" 4 358 3 1 1 1 2 1 100 388 85 106 3608 6 8 0 1114 2 25 21 0 1 -"28459" 4 358 3 1 1 1 2 1 100 388 106 159 2114 7 5 1 1012 3 50 53 1 1 -"28460" 4 358 3 1 1 1 2 1 100 388 159 310 2424 1 10 0 660 5 95 151 0 1 -"28461" 4 358 3 1 1 1 2 1 100 388 310 388 1577 9 3 1 836 2 25 78 1 1 -"28462" 4 358 3 1 1 1 3 1 100 115 100 150 2414 3 5 0 2409 3 50 50 0 1 -"28463" 4 358 3 1 1 1 3 1 100 115 150 7 1451 8 9 1 1325 5 95 143 0 0 -"28464" 4 358 3 1 1 1 3 1 100 115 7 11 1379 2 4 0 1528 3 50 4 0 1 -"28465" 4 358 3 1 1 1 3 1 100 115 11 14 2244 6 3 1 4502 2 25 3 1 1 -"28466" 4 358 3 1 1 1 3 1 100 115 14 21 1301 7 6 1 773 3 50 7 1 1 -"28467" 4 358 3 1 1 1 3 1 100 115 21 32 2669 4 2 1 2260 3 50 11 1 1 -"28468" 4 358 3 1 1 1 3 1 100 115 32 56 1918 1 8 0 897 4 75 24 0 1 -"28469" 4 358 3 1 1 1 3 1 100 115 56 59 4339 5 7 0 435 1 5 3 0 1 -"28470" 4 358 3 1 1 1 3 1 100 115 59 115 1684 9 5 1 882 5 95 56 1 1 -"28471" 4 358 3 1 1 1 4 1 100 798 100 150 1659 8 7 1 594 3 50 50 1 1 -"28472" 4 358 3 1 1 1 4 1 100 798 150 225 2323 3 10 0 690 3 50 75 0 1 -"28473" 4 358 3 1 1 1 4 1 100 798 225 169 2423 7 9 1 1617 2 25 56 0 0 -"28474" 4 358 3 1 1 1 4 1 100 798 169 330 2545 9 1 1 0 5 95 161 1 1 -"28475" 4 358 3 1 1 1 4 1 100 798 330 495 1581 2 3 0 888 3 50 165 0 1 -"28476" 4 358 3 1 1 1 4 1 100 798 495 965 1683 1 8 0 0 5 95 470 0 1 -"28477" 4 358 3 1 1 1 4 1 100 798 965 1013 2849 5 4 1 878 1 5 48 1 1 -"28478" 4 358 3 1 1 1 4 1 100 798 1013 760 2810 4 2 0 658 2 25 253 1 0 -"28479" 4 358 3 1 1 1 4 1 100 798 760 798 1592 6 3 1 401 1 5 38 1 1 -"28480" 4 358 4 0 1 0 1 1 100 30 100 195 4730 2 7 0 3182 1 95 95 0 1 -"28481" 4 358 4 0 1 0 1 1 100 30 195 341 6140 3 7 0 2725 2 75 146 0 1 -"28482" 4 358 4 0 1 0 1 1 100 30 341 597 2232 2 8 0 2372 2 75 256 0 1 -"28483" 4 358 4 0 1 0 1 1 100 30 597 30 1664 8 9 1 1334 1 95 567 0 0 -"28484" 4 358 5 1 1 0 1 0 100 1 100 195 3578 1 3 0 1430 1 95 95 0 1 -"28485" 4 358 5 1 1 0 1 0 100 1 195 146 3407 6 5 0 883 4 25 49 1 0 -"28486" 4 358 5 1 1 0 1 0 100 1 146 285 2966 2 8 0 374 1 95 139 0 1 -"28487" 4 358 5 1 1 0 1 0 100 1 285 14 2105 8 9 1 299 1 95 271 0 0 -"28488" 4 358 5 1 1 0 1 0 100 1 14 27 1354 3 4 0 367 1 95 13 0 1 -"28489" 4 358 5 1 1 0 1 0 100 1 27 1 2363 5 7 1 2992 1 95 26 0 0 -"28490" 4 358 5 1 1 0 2 1 100 296 100 195 1501 8 1 1 1812 1 95 95 1 1 -"28491" 4 358 5 1 1 0 2 1 100 296 195 244 3171 6 2 1 898 4 25 49 1 1 -"28492" 4 358 5 1 1 0 2 1 100 296 244 427 3637 7 9 0 1645 2 75 183 0 1 -"28493" 4 358 5 1 1 0 2 1 100 296 427 833 1603 2 10 0 1220 1 95 406 0 1 -"28494" 4 358 5 1 1 0 2 1 100 296 833 791 2952 5 3 0 905 5 5 42 1 0 -"28495" 4 358 5 1 1 0 2 1 100 296 791 40 1964 4 2 0 740 1 95 751 1 0 -"28496" 4 358 5 1 1 0 2 1 100 296 40 78 2413 3 5 0 1088 1 95 38 0 1 -"28497" 4 358 5 1 1 0 2 1 100 296 78 152 1482 9 4 1 305 1 95 74 1 1 -"28498" 4 358 5 1 1 0 2 1 100 296 152 296 1543 1 7 0 268 1 95 144 0 1 -"28499" 4 358 5 1 1 0 3 1 100 25 100 195 1438 7 5 1 727 1 95 95 1 1 -"28500" 4 358 5 1 1 0 3 1 100 25 195 10 1386 2 1 0 381 1 95 185 1 0 -"28501" 4 358 5 1 1 0 3 1 100 25 10 20 2336 8 6 1 263 1 95 10 1 1 -"28502" 4 358 5 1 1 0 3 1 100 25 20 15 3560 4 7 1 548 4 25 5 0 0 -"28503" 4 358 5 1 1 0 3 1 100 25 15 29 1135 3 10 0 183 1 95 14 0 1 -"28504" 4 358 5 1 1 0 3 1 100 25 29 14 1540 6 8 1 2192 3 50 15 0 0 -"28505" 4 358 5 1 1 0 3 1 100 25 14 27 1389 9 2 1 456 1 95 13 1 1 -"28506" 4 358 5 1 1 0 3 1 100 25 27 13 1735 5 3 0 1067 3 50 14 1 0 -"28507" 4 358 5 1 1 0 3 1 100 25 13 25 1174 1 10 0 251 1 95 12 0 1 -"28508" 4 358 5 1 1 0 4 1 100 6096 100 195 1707 2 3 0 959 1 95 95 0 1 -"28509" 4 358 5 1 1 0 4 1 100 6096 195 380 3702 7 4 1 1524 1 95 185 1 1 -"28510" 4 358 5 1 1 0 4 1 100 6096 380 475 2505 3 1 1 1648 4 25 95 1 1 -"28511" 4 358 5 1 1 0 4 1 100 6096 475 831 2062 1 9 0 722 2 75 356 0 1 -"28512" 4 358 5 1 1 0 4 1 100 6096 831 1454 1292 8 7 1 1381 2 75 623 1 1 -"28513" 4 358 5 1 1 0 4 1 100 6096 1454 2835 1216 9 2 1 588 1 95 1381 1 1 -"28514" 4 358 5 1 1 0 4 1 100 6096 2835 2977 3493 5 6 0 933 5 5 142 0 1 -"28515" 4 358 5 1 1 0 4 1 100 6096 2977 3126 2454 6 8 0 743 5 5 149 0 1 -"28516" 4 358 5 1 1 0 4 1 100 6096 3126 6096 1361 4 7 0 790 1 95 2970 0 1 -"28517" 4 359 2 0 1 0 1 1 100 27 100 150 6161 2 7 0 1138 3 50 50 0 1 -"28518" 4 359 2 0 1 0 1 1 100 27 150 7 10625 3 7 1 3589 1 95 143 0 0 -"28519" 4 359 2 0 1 0 1 1 100 27 7 14 2255 2 8 0 2778 1 95 7 0 1 -"28520" 4 359 2 0 1 0 1 1 100 27 14 27 3563 8 9 0 1759 1 95 13 0 1 -"28521" 4 359 3 1 1 0 1 0 100 0 100 195 3909 1 3 0 1424 1 95 95 0 1 -"28522" 4 359 3 1 1 0 1 0 100 0 195 10 2242 6 5 0 868 1 95 185 1 0 -"28523" 4 359 3 1 1 0 1 0 100 0 10 0 2632 2 8 1 992 1 95 10 0 0 -"28524" 4 359 3 1 1 0 2 0 100 1 100 195 3394 8 1 1 974 1 95 95 1 1 -"28525" 4 359 3 1 1 0 2 0 100 1 195 10 3181 6 2 0 4579 1 95 185 1 0 -"28526" 4 359 3 1 1 0 2 0 100 1 10 20 1766 7 9 0 1056 1 95 10 0 1 -"28527" 4 359 3 1 1 0 2 0 100 1 20 1 2691 2 10 1 817 1 95 19 0 0 -"28528" 4 359 3 1 1 0 3 0 100 0 100 5 3015 7 5 0 820 1 95 95 1 0 -"28529" 4 359 3 1 1 0 3 0 100 0 5 10 2603 2 1 1 3333 1 95 5 1 1 -"28530" 4 359 3 1 1 0 3 0 100 0 10 0 4203 8 6 0 1097 1 95 10 1 0 -"28531" 4 359 3 1 1 0 4 0 100 0 100 195 1946 2 3 0 819 1 95 95 0 1 -"28532" 4 359 3 1 1 0 4 0 100 0 195 10 2257 7 4 0 612 1 95 185 1 0 -"28533" 4 359 3 1 1 0 4 0 100 0 10 20 924 3 1 1 822 1 95 10 1 1 -"28534" 4 359 3 1 1 0 4 0 100 0 20 39 1260 1 9 0 814 1 95 19 0 1 -"28535" 4 359 3 1 1 0 4 0 100 0 39 76 917 8 7 1 550 1 95 37 1 1 -"28536" 4 359 3 1 1 0 4 0 100 0 76 4 713 9 2 0 1541 1 95 72 1 0 -"28537" 4 359 3 1 1 0 4 0 100 0 4 8 1665 5 6 0 1431 1 95 4 0 1 -"28538" 4 359 3 1 1 0 4 0 100 0 8 0 1816 6 8 1 927 1 95 8 0 0 -"28539" 4 359 4 0 1 1 1 1 100 124 100 150 3107 8 3 1 1165 3 50 50 1 1 -"28540" 4 359 4 0 1 1 1 1 100 124 150 158 3571 3 7 0 3639 1 5 8 0 1 -"28541" 4 359 4 0 1 1 1 1 100 124 158 166 4270 8 2 1 3697 1 5 8 1 1 -"28542" 4 359 4 0 1 1 1 1 100 124 166 124 5222 2 1 0 4107 2 25 42 1 0 -"28543" 4 359 5 1 1 1 1 1 100 359 100 105 1471 9 7 1 3494 1 5 5 1 1 -"28544" 4 359 5 1 1 1 1 1 100 359 105 184 2340 4 8 0 3187 4 75 79 0 1 -"28545" 4 359 5 1 1 1 1 1 100 359 184 175 3064 8 2 0 596 1 5 9 1 0 -"28546" 4 359 5 1 1 1 1 1 100 359 175 219 2317 2 1 1 1332 2 25 44 1 1 -"28547" 4 359 5 1 1 1 1 1 100 359 219 164 2874 7 6 0 831 2 25 55 1 0 -"28548" 4 359 5 1 1 1 1 1 100 359 164 287 3365 5 3 1 643 4 75 123 1 1 -"28549" 4 359 5 1 1 1 1 1 100 359 287 273 2089 3 6 1 556 1 5 14 0 0 -"28550" 4 359 5 1 1 1 1 1 100 359 273 205 942 6 9 1 1063 2 25 68 0 0 -"28551" 4 359 5 1 1 1 1 1 100 359 205 359 2874 1 4 0 1671 4 75 154 0 1 -"28552" 4 359 5 1 1 1 2 1 100 20 100 25 3075 2 9 1 2664 4 75 75 0 0 -"28553" 4 359 5 1 1 1 2 1 100 20 25 49 6965 4 10 0 1352 5 95 24 0 1 -"28554" 4 359 5 1 1 1 2 1 100 20 49 51 2380 3 1 1 1273 1 5 2 1 1 -"28555" 4 359 5 1 1 1 2 1 100 20 51 13 985 8 6 0 3144 4 75 38 1 0 -"28556" 4 359 5 1 1 1 2 1 100 20 13 16 3713 5 7 0 245 2 25 3 0 1 -"28557" 4 359 5 1 1 1 2 1 100 20 16 12 1344 6 8 1 1188 2 25 4 0 0 -"28558" 4 359 5 1 1 1 2 1 100 20 12 11 1210 7 5 0 4365 1 5 1 1 0 -"28559" 4 359 5 1 1 1 2 1 100 20 11 19 1091 1 10 0 1685 4 75 8 0 1 -"28560" 4 359 5 1 1 1 2 1 100 20 19 20 3538 9 3 1 4359 1 5 1 1 1 -"28561" 4 359 5 1 1 1 3 0 100 0 100 50 1573 3 5 1 3303 3 50 50 0 0 -"28562" 4 359 5 1 1 1 3 0 100 0 50 53 704 8 9 0 3532 1 5 3 0 1 -"28563" 4 359 5 1 1 1 3 0 100 0 53 50 1745 2 4 1 3162 1 5 3 0 0 -"28564" 4 359 5 1 1 1 3 0 100 0 50 37 986 6 3 0 327 2 25 13 1 0 -"28565" 4 359 5 1 1 1 3 0 100 0 37 2 1821 7 6 0 1270 5 95 35 1 0 -"28566" 4 359 5 1 1 1 3 0 100 0 2 0 2119 4 2 0 736 4 75 2 1 0 -"28567" 4 359 5 1 1 1 4 0 100 0 100 5 5488 8 7 0 0 5 95 95 1 0 -"28568" 4 359 5 1 1 1 4 0 100 0 5 10 1537 3 10 0 0 5 95 5 0 1 -"28569" 4 359 5 1 1 1 4 0 100 0 10 9 1781 7 9 1 2118 1 5 1 0 0 -"28570" 4 359 5 1 1 1 4 0 100 0 9 2 918 9 1 0 496 4 75 7 1 0 -"28571" 4 359 5 1 1 1 4 0 100 0 2 0 2310 2 3 1 581 4 75 2 0 0 -"28572" 4 360 2 0 1 1 1 1 100 211 100 150 11652 8 3 1 391 3 50 50 1 1 -"28573" 4 360 2 0 1 1 1 1 100 211 150 188 4986 3 7 0 2314 2 25 38 0 1 -"28574" 4 360 2 0 1 1 1 1 100 211 188 282 2425 8 2 1 292 3 50 94 1 1 -"28575" 4 360 2 0 1 1 1 1 100 211 282 211 2764 2 1 0 853 2 25 71 1 0 -"28576" 4 360 3 1 1 1 1 1 100 437 100 150 3825 9 7 1 1249 3 50 50 1 1 -"28577" 4 360 3 1 1 1 1 1 100 437 150 158 2746 4 8 0 2149 1 5 8 0 1 -"28578" 4 360 3 1 1 1 1 1 100 437 158 237 2291 8 2 1 765 3 50 79 1 1 -"28579" 4 360 3 1 1 1 1 1 100 437 237 118 1733 2 1 0 992 3 50 119 1 0 -"28580" 4 360 3 1 1 1 1 1 100 437 118 207 2030 7 6 1 715 4 75 89 1 1 -"28581" 4 360 3 1 1 1 1 1 100 437 207 155 4295 5 3 0 648 2 25 52 1 0 -"28582" 4 360 3 1 1 1 1 1 100 437 155 233 1430 3 6 0 320 3 50 78 0 1 -"28583" 4 360 3 1 1 1 1 1 100 437 233 291 1934 6 9 0 624 2 25 58 0 1 -"28584" 4 360 3 1 1 1 1 1 100 437 291 437 2119 1 4 0 600 3 50 146 0 1 -"28585" 4 360 3 1 1 1 2 1 100 435 100 150 2421 2 9 0 287 3 50 50 0 1 -"28586" 4 360 3 1 1 1 2 1 100 435 150 112 2167 4 10 1 309 2 25 38 0 0 -"28587" 4 360 3 1 1 1 2 1 100 435 112 84 2012 3 1 0 1090 2 25 28 1 0 -"28588" 4 360 3 1 1 1 2 1 100 435 84 164 1867 8 6 1 1097 5 95 80 1 1 -"28589" 4 360 3 1 1 1 2 1 100 435 164 123 2218 5 7 1 1766 2 25 41 0 0 -"28590" 4 360 3 1 1 1 2 1 100 435 123 154 1471 6 8 0 639 2 25 31 0 1 -"28591" 4 360 3 1 1 1 2 1 100 435 154 193 1629 7 5 1 661 2 25 39 1 1 -"28592" 4 360 3 1 1 1 2 1 100 435 193 290 1763 1 10 0 717 3 50 97 0 1 -"28593" 4 360 3 1 1 1 2 1 100 435 290 435 1920 9 3 1 531 3 50 145 1 1 -"28594" 4 360 3 1 1 1 3 1 100 49 100 75 3701 3 5 1 323 2 25 25 0 0 -"28595" 4 360 3 1 1 1 3 1 100 49 75 4 1736 8 9 1 0 5 95 71 0 0 -"28596" 4 360 3 1 1 1 3 1 100 49 4 8 1508 2 4 0 0 5 95 4 0 1 -"28597" 4 360 3 1 1 1 3 1 100 49 8 12 2858 6 3 1 600 3 50 4 1 1 -"28598" 4 360 3 1 1 1 3 1 100 49 12 21 1504 7 6 1 735 4 75 9 1 1 -"28599" 4 360 3 1 1 1 3 1 100 49 21 10 2553 4 2 0 470 3 50 11 1 0 -"28600" 4 360 3 1 1 1 3 1 100 49 10 20 1412 1 8 0 0 5 95 10 0 1 -"28601" 4 360 3 1 1 1 3 1 100 49 20 25 2023 5 7 0 1012 2 25 5 0 1 -"28602" 4 360 3 1 1 1 3 1 100 49 25 49 1553 9 5 1 1693 5 95 24 1 1 -"28603" 4 360 3 1 1 1 4 1 100 301 100 150 3132 8 7 1 292 3 50 50 1 1 -"28604" 4 360 3 1 1 1 4 1 100 301 150 112 3060 3 10 1 264 2 25 38 0 0 -"28605" 4 360 3 1 1 1 4 1 100 301 112 56 1193 7 9 1 558 3 50 56 0 0 -"28606" 4 360 3 1 1 1 4 1 100 301 56 98 1185 9 1 1 446 4 75 42 1 1 -"28607" 4 360 3 1 1 1 4 1 100 301 98 147 1318 2 3 0 341 3 50 49 0 1 -"28608" 4 360 3 1 1 1 4 1 100 301 147 257 2466 1 8 0 826 4 75 110 0 1 -"28609" 4 360 3 1 1 1 4 1 100 301 257 321 2984 5 4 1 367 2 25 64 1 1 -"28610" 4 360 3 1 1 1 4 1 100 301 321 241 1779 4 2 0 333 2 25 80 1 0 -"28611" 4 360 3 1 1 1 4 1 100 301 241 301 1426 6 3 1 468 2 25 60 1 1 -"28612" 4 360 4 0 1 0 1 1 100 33 100 175 9873 2 7 0 609 2 75 75 0 1 -"28613" 4 360 4 0 1 0 1 1 100 33 175 341 2577 3 7 0 4900 1 95 166 0 1 -"28614" 4 360 4 0 1 0 1 1 100 33 341 665 1177 2 8 0 838 1 95 324 0 1 -"28615" 4 360 4 0 1 0 1 1 100 33 665 33 1148 8 9 1 2555 1 95 632 0 0 -"28616" 4 360 5 1 1 0 1 1 100 144 100 195 3066 1 3 0 1102 1 95 95 0 1 -"28617" 4 360 5 1 1 0 1 1 100 144 195 244 1310 6 5 1 966 4 25 49 1 1 -"28618" 4 360 5 1 1 0 1 1 100 144 244 427 1190 2 8 0 1107 2 75 183 0 1 -"28619" 4 360 5 1 1 0 1 1 100 144 427 21 1155 8 9 1 578 1 95 406 0 0 -"28620" 4 360 5 1 1 0 1 1 100 144 21 41 1524 3 4 0 3033 1 95 20 0 1 -"28621" 4 360 5 1 1 0 1 1 100 144 41 51 2846 5 7 0 397 4 25 10 0 1 -"28622" 4 360 5 1 1 0 1 1 100 144 51 99 1263 7 4 1 1169 1 95 48 1 1 -"28623" 4 360 5 1 1 0 1 1 100 144 99 74 1686 4 1 0 997 4 25 25 1 0 -"28624" 4 360 5 1 1 0 1 1 100 144 74 144 1130 9 6 1 546 1 95 70 1 1 -"28625" 4 360 5 1 1 0 2 1 100 164 100 195 1648 8 1 1 871 1 95 95 1 1 -"28626" 4 360 5 1 1 0 2 1 100 164 195 244 1219 6 2 1 214 4 25 49 1 1 -"28627" 4 360 5 1 1 0 2 1 100 164 244 61 1466 7 9 1 1050 2 75 183 0 0 -"28628" 4 360 5 1 1 0 2 1 100 164 61 119 1821 2 10 0 476 1 95 58 0 1 -"28629" 4 360 5 1 1 0 2 1 100 164 119 89 1218 5 3 0 390 4 25 30 1 0 -"28630" 4 360 5 1 1 0 2 1 100 164 89 22 1888 4 2 0 892 2 75 67 1 0 -"28631" 4 360 5 1 1 0 2 1 100 164 22 43 1318 3 5 0 285 1 95 21 0 1 -"28632" 4 360 5 1 1 0 2 1 100 164 43 84 1396 9 4 1 304 1 95 41 1 1 -"28633" 4 360 5 1 1 0 2 1 100 164 84 164 830 1 7 0 292 1 95 80 0 1 -"28634" 4 360 5 1 1 0 3 1 100 127 100 195 1098 7 5 1 252 1 95 95 1 1 -"28635" 4 360 5 1 1 0 3 1 100 127 195 10 1061 2 1 0 584 1 95 185 1 0 -"28636" 4 360 5 1 1 0 3 1 100 127 10 20 914 8 6 1 236 1 95 10 1 1 -"28637" 4 360 5 1 1 0 3 1 100 127 20 39 1190 4 7 0 315 1 95 19 0 1 -"28638" 4 360 5 1 1 0 3 1 100 127 39 76 921 3 10 0 307 1 95 37 0 1 -"28639" 4 360 5 1 1 0 3 1 100 127 76 19 881 6 8 1 325 2 75 57 0 0 -"28640" 4 360 5 1 1 0 3 1 100 127 19 37 1107 9 2 1 747 1 95 18 1 1 -"28641" 4 360 5 1 1 0 3 1 100 127 37 65 1725 5 3 1 1073 2 75 28 1 1 -"28642" 4 360 5 1 1 0 3 1 100 127 65 127 1115 1 10 0 884 1 95 62 0 1 -"28643" 4 360 5 1 1 0 4 1 100 64 100 195 1041 2 3 0 685 1 95 95 0 1 -"28644" 4 360 5 1 1 0 4 1 100 64 195 380 890 7 4 1 669 1 95 185 1 1 -"28645" 4 360 5 1 1 0 4 1 100 64 380 95 1011 3 1 0 1025 2 75 285 1 0 -"28646" 4 360 5 1 1 0 4 1 100 64 95 185 677 1 9 0 306 1 95 90 0 1 -"28647" 4 360 5 1 1 0 4 1 100 64 185 361 865 8 7 1 333 1 95 176 1 1 -"28648" 4 360 5 1 1 0 4 1 100 64 361 704 803 9 2 1 386 1 95 343 1 1 -"28649" 4 360 5 1 1 0 4 1 100 64 704 669 1292 5 6 1 333 5 5 35 0 0 -"28650" 4 360 5 1 1 0 4 1 100 64 669 33 971 6 8 1 1354 1 95 636 0 0 -"28651" 4 360 5 1 1 0 4 1 100 64 33 64 847 4 7 0 3108 1 95 31 0 1 -"28652" 4 361 2 0 1 1 1 1 100 246 100 150 4747 8 3 1 1735 3 50 50 1 1 -"28653" 4 361 2 0 1 1 1 1 100 246 150 188 13778 3 7 0 56 2 25 38 0 1 -"28654" 4 361 2 0 1 1 1 1 100 246 188 197 2674 8 2 1 3600 1 5 9 1 1 -"28655" 4 361 2 0 1 1 1 1 100 246 197 246 4690 2 1 1 3480 2 25 49 1 1 -"28656" 4 361 3 1 1 1 1 1 100 306 100 105 9803 9 7 1 1150 1 5 5 1 1 -"28657" 4 361 3 1 1 1 1 1 100 306 105 131 1636 4 8 0 780 2 25 26 0 1 -"28658" 4 361 3 1 1 1 1 1 100 306 131 164 4092 8 2 1 663 2 25 33 1 1 -"28659" 4 361 3 1 1 1 1 1 100 306 164 156 4036 2 1 0 1018 1 5 8 1 0 -"28660" 4 361 3 1 1 1 1 1 100 306 156 234 2303 7 6 1 647 3 50 78 1 1 -"28661" 4 361 3 1 1 1 1 1 100 306 234 246 3046 5 3 1 715 1 5 12 1 1 -"28662" 4 361 3 1 1 1 1 1 100 306 246 234 2910 3 6 1 514 1 5 12 0 0 -"28663" 4 361 3 1 1 1 1 1 100 306 234 175 1758 6 9 1 544 2 25 59 0 0 -"28664" 4 361 3 1 1 1 1 1 100 306 175 306 1396 1 4 0 847 4 75 131 0 1 -"28665" 4 361 3 1 1 1 2 1 100 425 100 175 13340 2 9 0 535 4 75 75 0 1 -"28666" 4 361 3 1 1 1 2 1 100 425 175 219 2018 4 10 0 528 2 25 44 0 1 -"28667" 4 361 3 1 1 1 2 1 100 425 219 208 3620 3 1 0 645 1 5 11 1 0 -"28668" 4 361 3 1 1 1 2 1 100 425 208 260 1372 8 6 1 448 2 25 52 1 1 -"28669" 4 361 3 1 1 1 2 1 100 425 260 273 2720 5 7 0 468 1 5 13 0 1 -"28670" 4 361 3 1 1 1 2 1 100 425 273 259 2699 6 8 1 448 1 5 14 0 0 -"28671" 4 361 3 1 1 1 2 1 100 425 259 324 1776 7 5 1 1682 2 25 65 1 1 -"28672" 4 361 3 1 1 1 2 1 100 425 324 405 1718 1 10 0 667 2 25 81 0 1 -"28673" 4 361 3 1 1 1 2 1 100 425 405 425 1784 9 3 1 665 1 5 20 1 1 -"28674" 4 361 3 1 1 1 3 1 100 4 100 125 1462 3 5 0 1558 2 25 25 0 1 -"28675" 4 361 3 1 1 1 3 1 100 4 125 94 1769 8 9 1 1755 2 25 31 0 0 -"28676" 4 361 3 1 1 1 3 1 100 4 94 183 1549 2 4 0 0 5 95 89 0 1 -"28677" 4 361 3 1 1 1 3 1 100 4 183 174 4409 6 3 0 356 1 5 9 1 0 -"28678" 4 361 3 1 1 1 3 1 100 4 174 305 2496 7 6 1 559 4 75 131 1 1 -"28679" 4 361 3 1 1 1 3 1 100 4 305 152 1954 4 2 0 659 3 50 153 1 0 -"28680" 4 361 3 1 1 1 3 1 100 4 152 296 4401 1 8 0 0 5 95 144 0 1 -"28681" 4 361 3 1 1 1 3 1 100 4 296 74 2405 5 7 1 1017 4 75 222 0 0 -"28682" 4 361 3 1 1 1 3 1 100 4 74 4 3342 9 5 0 0 5 95 70 1 0 -"28683" 4 361 3 1 1 1 4 1 100 403 100 175 1581 8 7 1 928 4 75 75 1 1 -"28684" 4 361 3 1 1 1 4 1 100 403 175 219 1697 3 10 0 638 2 25 44 0 1 -"28685" 4 361 3 1 1 1 4 1 100 403 219 164 2300 7 9 1 925 2 25 55 0 0 -"28686" 4 361 3 1 1 1 4 1 100 403 164 205 1518 9 1 1 811 2 25 41 1 1 -"28687" 4 361 3 1 1 1 4 1 100 403 205 308 3402 2 3 0 438 3 50 103 0 1 -"28688" 4 361 3 1 1 1 4 1 100 403 308 385 1910 1 8 0 560 2 25 77 0 1 -"28689" 4 361 3 1 1 1 4 1 100 403 385 404 2587 5 4 1 525 1 5 19 1 1 -"28690" 4 361 3 1 1 1 4 1 100 403 404 384 1785 4 2 0 450 1 5 20 1 0 -"28691" 4 361 3 1 1 1 4 1 100 403 384 403 1978 6 3 1 498 1 5 19 1 1 -"28692" 4 361 4 0 1 0 1 1 100 11 100 150 4653 2 7 0 1238 3 50 50 0 1 -"28693" 4 361 4 0 1 0 1 1 100 11 150 112 5424 3 7 1 628 4 25 38 0 0 -"28694" 4 361 4 0 1 0 1 1 100 11 112 218 1708 2 8 0 1152 1 95 106 0 1 -"28695" 4 361 4 0 1 0 1 1 100 11 218 11 3286 8 9 1 1031 1 95 207 0 0 -"28696" 4 361 5 1 1 0 1 1 100 332 100 195 3648 1 3 0 496 1 95 95 0 1 -"28697" 4 361 5 1 1 0 1 1 100 332 195 244 1606 6 5 1 1399 4 25 49 1 1 -"28698" 4 361 5 1 1 0 1 1 100 332 244 476 1346 2 8 0 408 1 95 232 0 1 -"28699" 4 361 5 1 1 0 1 1 100 332 476 24 2129 8 9 1 505 1 95 452 0 0 -"28700" 4 361 5 1 1 0 1 1 100 332 24 47 5814 3 4 0 423 1 95 23 0 1 -"28701" 4 361 5 1 1 0 1 1 100 332 47 92 1416 5 7 0 563 1 95 45 0 1 -"28702" 4 361 5 1 1 0 1 1 100 332 92 179 1933 7 4 1 487 1 95 87 1 1 -"28703" 4 361 5 1 1 0 1 1 100 332 179 170 1253 4 1 0 0 5 5 9 1 0 -"28704" 4 361 5 1 1 0 1 1 100 332 170 332 1974 9 6 1 550 1 95 162 1 1 -"28705" 4 361 5 1 1 0 2 1 100 1070 100 195 1459 8 1 1 611 1 95 95 1 1 -"28706" 4 361 5 1 1 0 2 1 100 1070 195 380 2859 6 2 1 513 1 95 185 1 1 -"28707" 4 361 5 1 1 0 2 1 100 1070 380 399 3326 7 9 0 0 5 5 19 0 1 -"28708" 4 361 5 1 1 0 2 1 100 1070 399 419 2677 2 10 0 0 5 5 20 0 1 -"28709" 4 361 5 1 1 0 2 1 100 1070 419 440 2283 5 3 1 0 5 5 21 1 1 -"28710" 4 361 5 1 1 0 2 1 100 1070 440 418 3772 4 2 0 0 5 5 22 1 0 -"28711" 4 361 5 1 1 0 2 1 100 1070 418 815 1726 3 5 0 546 1 95 397 0 1 -"28712" 4 361 5 1 1 0 2 1 100 1070 815 1019 2773 9 4 1 2119 4 25 204 1 1 -"28713" 4 361 5 1 1 0 2 1 100 1070 1019 1070 3247 1 7 0 0 5 5 51 0 1 -"28714" 4 361 5 1 1 0 3 0 100 0 100 195 2105 7 5 1 475 1 95 95 1 1 -"28715" 4 361 5 1 1 0 3 0 100 0 195 10 1553 2 1 0 501 1 95 185 1 0 -"28716" 4 361 5 1 1 0 3 0 100 0 10 20 1778 8 6 1 611 1 95 10 1 1 -"28717" 4 361 5 1 1 0 3 0 100 0 20 39 1531 4 7 0 528 1 95 19 0 1 -"28718" 4 361 5 1 1 0 3 0 100 0 39 76 2668 3 10 0 779 1 95 37 0 1 -"28719" 4 361 5 1 1 0 3 0 100 0 76 4 1787 6 8 1 876 1 95 72 0 0 -"28720" 4 361 5 1 1 0 3 0 100 0 4 0 3103 9 2 0 497 1 95 4 1 0 -"28721" 4 361 5 1 1 0 4 1 100 1709 100 195 1939 2 3 0 1268 1 95 95 0 1 -"28722" 4 361 5 1 1 0 4 1 100 1709 195 293 2861 7 4 1 1843 3 50 98 1 1 -"28723" 4 361 5 1 1 0 4 1 100 1709 293 220 1585 3 1 0 1836 4 25 73 1 0 -"28724" 4 361 5 1 1 0 4 1 100 1709 220 429 2066 1 9 0 565 1 95 209 0 1 -"28725" 4 361 5 1 1 0 4 1 100 1709 429 837 1503 8 7 1 637 1 95 408 1 1 -"28726" 4 361 5 1 1 0 4 1 100 1709 837 1632 2052 9 2 1 553 1 95 795 1 1 -"28727" 4 361 5 1 1 0 4 1 100 1709 1632 1714 3539 5 6 0 0 5 5 82 0 1 -"28728" 4 361 5 1 1 0 4 1 100 1709 1714 1628 2280 6 8 1 0 5 5 86 0 0 -"28729" 4 361 5 1 1 0 4 1 100 1709 1628 1709 1897 4 7 0 0 5 5 81 0 1 -"28730" 4 366 2 0 1 1 1 1 100 223 100 150 7778 8 3 1 1246 3 50 50 1 1 -"28731" 4 366 2 0 1 1 1 1 100 223 150 188 12024 3 7 0 1225 2 25 38 0 1 -"28732" 4 366 2 0 1 1 1 1 100 223 188 235 3948 8 2 1 973 2 25 47 1 1 -"28733" 4 366 2 0 1 1 1 1 100 223 235 223 2324 2 1 0 1395 1 5 12 1 0 -"28734" 4 366 3 1 1 1 1 1 100 414 100 150 4358 9 7 1 764 3 50 50 1 1 -"28735" 4 366 3 1 1 1 1 1 100 414 150 225 4037 4 8 0 763 3 50 75 0 1 -"28736" 4 366 3 1 1 1 1 1 100 414 225 281 3340 8 2 1 1627 2 25 56 1 1 -"28737" 4 366 3 1 1 1 1 1 100 414 281 295 4011 2 1 1 1241 1 5 14 1 1 -"28738" 4 366 3 1 1 1 1 1 100 414 295 310 1811 7 6 1 578 1 5 15 1 1 -"28739" 4 366 3 1 1 1 1 1 100 414 310 388 4132 5 3 1 989 2 25 78 1 1 -"28740" 4 366 3 1 1 1 1 1 100 414 388 582 2275 3 6 0 1108 3 50 194 0 1 -"28741" 4 366 3 1 1 1 1 1 100 414 582 436 1836 6 9 1 1579 2 25 146 0 0 -"28742" 4 366 3 1 1 1 1 1 100 414 436 414 3141 1 4 1 1309 1 5 22 0 0 -"28743" 4 366 3 1 1 1 2 1 100 109 100 125 2238 2 9 0 1393 2 25 25 0 1 -"28744" 4 366 3 1 1 1 2 1 100 109 125 94 2860 4 10 1 1226 2 25 31 0 0 -"28745" 4 366 3 1 1 1 2 1 100 109 94 70 2788 3 1 0 3093 2 25 24 1 0 -"28746" 4 366 3 1 1 1 2 1 100 109 70 88 1671 8 6 1 869 2 25 18 1 1 -"28747" 4 366 3 1 1 1 2 1 100 109 88 66 4817 5 7 1 953 2 25 22 0 0 -"28748" 4 366 3 1 1 1 2 1 100 109 66 33 3537 6 8 1 503 3 50 33 0 0 -"28749" 4 366 3 1 1 1 2 1 100 109 33 41 2676 7 5 1 1178 2 25 8 1 1 -"28750" 4 366 3 1 1 1 2 1 100 109 41 62 4479 1 10 0 624 3 50 21 0 1 -"28751" 4 366 3 1 1 1 2 1 100 109 62 109 1439 9 3 1 576 4 75 47 1 1 -"28752" 4 366 3 1 1 1 3 1 100 164 100 75 3570 3 5 1 454 2 25 25 0 0 -"28753" 4 366 3 1 1 1 3 1 100 164 75 94 2141 8 9 0 588 2 25 19 0 1 -"28754" 4 366 3 1 1 1 3 1 100 164 94 118 1589 2 4 0 503 2 25 24 0 1 -"28755" 4 366 3 1 1 1 3 1 100 164 118 177 1756 6 3 1 598 3 50 59 1 1 -"28756" 4 366 3 1 1 1 3 1 100 164 177 221 1719 7 6 1 443 2 25 44 1 1 -"28757" 4 366 3 1 1 1 3 1 100 164 221 210 3283 4 2 0 588 1 5 11 1 0 -"28758" 4 366 3 1 1 1 3 1 100 164 210 263 2253 1 8 0 696 2 25 53 0 1 -"28759" 4 366 3 1 1 1 3 1 100 164 263 131 1636 5 7 1 388 3 50 132 0 0 -"28760" 4 366 3 1 1 1 3 1 100 164 131 164 1783 9 5 1 432 2 25 33 1 1 -"28761" 4 366 3 1 1 1 4 1 100 93 100 125 1106 8 7 1 579 2 25 25 1 1 -"28762" 4 366 3 1 1 1 4 1 100 93 125 156 2700 3 10 0 906 2 25 31 0 1 -"28763" 4 366 3 1 1 1 4 1 100 93 156 117 2292 7 9 1 404 2 25 39 0 0 -"28764" 4 366 3 1 1 1 4 1 100 93 117 146 1542 9 1 1 850 2 25 29 1 1 -"28765" 4 366 3 1 1 1 4 1 100 93 146 139 5759 2 3 1 995 1 5 7 0 0 -"28766" 4 366 3 1 1 1 4 1 100 93 139 132 3568 1 8 1 396 1 5 7 0 0 -"28767" 4 366 3 1 1 1 4 1 100 93 132 165 2359 5 4 1 508 2 25 33 1 1 -"28768" 4 366 3 1 1 1 4 1 100 93 165 124 2713 4 2 0 509 2 25 41 1 0 -"28769" 4 366 3 1 1 1 4 1 100 93 124 93 2514 6 3 0 720 2 25 31 1 0 -"28770" 4 366 4 0 1 0 1 1 100 16 100 150 4039 2 7 0 2084 3 50 50 0 1 -"28771" 4 366 4 0 1 0 1 1 100 16 150 263 2406 3 7 0 1401 2 75 113 0 1 -"28772" 4 366 4 0 1 0 1 1 100 16 263 66 3004 2 8 1 738 2 75 197 0 0 -"28773" 4 366 4 0 1 0 1 1 100 16 66 16 1772 8 9 1 861 2 75 50 0 0 -"28774" 4 366 5 1 1 0 1 1 100 14 100 50 3580 1 3 1 631 3 50 50 0 0 -"28775" 4 366 5 1 1 0 1 1 100 14 50 75 1479 6 5 1 1278 3 50 25 1 1 -"28776" 4 366 5 1 1 0 1 1 100 14 75 37 3774 2 8 1 728 3 50 38 0 0 -"28777" 4 366 5 1 1 0 1 1 100 14 37 9 1019 8 9 1 408 2 75 28 0 0 -"28778" 4 366 5 1 1 0 1 1 100 14 9 16 1946 3 4 0 811 2 75 7 0 1 -"28779" 4 366 5 1 1 0 1 1 100 14 16 8 1569 5 7 1 1117 3 50 8 0 0 -"28780" 4 366 5 1 1 0 1 1 100 14 8 16 1429 7 4 1 1527 1 95 8 1 1 -"28781" 4 366 5 1 1 0 1 1 100 14 16 8 1419 4 1 0 661 3 50 8 1 0 -"28782" 4 366 5 1 1 0 1 1 100 14 8 14 963 9 6 1 787 2 75 6 1 1 -"28783" 4 366 5 1 1 0 2 1 100 1694 100 175 2277 8 1 1 606 2 75 75 1 1 -"28784" 4 366 5 1 1 0 2 1 100 1694 175 306 2726 6 2 1 723 2 75 131 1 1 -"28785" 4 366 5 1 1 0 2 1 100 1694 306 459 3589 7 9 0 672 3 50 153 0 1 -"28786" 4 366 5 1 1 0 2 1 100 1694 459 803 2125 2 10 0 1093 2 75 344 0 1 -"28787" 4 366 5 1 1 0 2 1 100 1694 803 1205 2166 5 3 1 1029 3 50 402 1 1 -"28788" 4 366 5 1 1 0 2 1 100 1694 1205 602 2001 4 2 0 688 3 50 603 1 0 -"28789" 4 366 5 1 1 0 2 1 100 1694 602 903 1612 3 5 0 525 3 50 301 0 1 -"28790" 4 366 5 1 1 0 2 1 100 1694 903 1129 1609 9 4 1 522 4 25 226 1 1 -"28791" 4 366 5 1 1 0 2 1 100 1694 1129 1694 3343 1 7 0 669 3 50 565 0 1 -"28792" 4 366 5 1 1 0 3 1 100 19 100 175 1775 7 5 1 888 2 75 75 1 1 -"28793" 4 366 5 1 1 0 3 1 100 19 175 44 1179 2 1 0 921 2 75 131 1 0 -"28794" 4 366 5 1 1 0 3 1 100 19 44 66 1474 8 6 1 794 3 50 22 1 1 -"28795" 4 366 5 1 1 0 3 1 100 19 66 116 2281 4 7 0 834 2 75 50 0 1 -"28796" 4 366 5 1 1 0 3 1 100 19 116 203 3528 3 10 0 798 2 75 87 0 1 -"28797" 4 366 5 1 1 0 3 1 100 19 203 10 1293 6 8 1 1373 1 95 193 0 0 -"28798" 4 366 5 1 1 0 3 1 100 19 10 20 2954 9 2 1 595 1 95 10 1 1 -"28799" 4 366 5 1 1 0 3 1 100 19 20 39 1419 5 3 1 741 1 95 19 1 1 -"28800" 4 366 5 1 1 0 3 1 100 19 39 19 1679 1 10 1 829 3 50 20 0 0 -"28801" 4 366 5 1 1 0 4 1 100 0 100 50 1479 2 3 1 838 3 50 50 0 0 -"28802" 4 366 5 1 1 0 4 1 100 0 50 88 1219 7 4 1 639 2 75 38 1 1 -"28803" 4 366 5 1 1 0 4 1 100 0 88 4 1117 3 1 0 1661 1 95 84 1 0 -"28804" 4 366 5 1 1 0 4 1 100 0 4 8 1756 1 9 0 613 1 95 4 0 1 -"28805" 4 366 5 1 1 0 4 1 100 0 8 16 1469 8 7 1 1061 1 95 8 1 1 -"28806" 4 366 5 1 1 0 4 1 100 0 16 31 6719 9 2 1 693 1 95 15 1 1 -"28807" 4 366 5 1 1 0 4 1 100 0 31 60 1377 5 6 0 845 1 95 29 0 1 -"28808" 4 366 5 1 1 0 4 1 100 0 60 3 1250 6 8 1 908 1 95 57 0 0 -"28809" 4 366 5 1 1 0 4 1 100 0 3 0 2358 4 7 1 1083 1 95 3 0 0 -"28810" 4 368 2 0 1 1 1 1 100 211 100 150 4648 8 3 1 1576 3 50 50 1 1 -"28811" 4 368 2 0 1 1 1 1 100 211 150 225 10107 3 7 0 1262 3 50 75 0 1 -"28812" 4 368 2 0 1 1 1 1 100 211 225 281 4616 8 2 1 1137 2 25 56 1 1 -"28813" 4 368 2 0 1 1 1 1 100 211 281 211 3705 2 1 0 775 2 25 70 1 0 -"28814" 4 368 3 1 1 1 1 0 100 1 100 175 11041 9 7 1 3330 4 75 75 1 1 -"28815" 4 368 3 1 1 1 1 0 100 1 175 87 4769 4 8 1 1084 3 50 88 0 0 -"28816" 4 368 3 1 1 1 1 0 100 1 87 109 3157 8 2 1 1444 2 25 22 1 1 -"28817" 4 368 3 1 1 1 1 0 100 1 109 27 3045 2 1 0 698 4 75 82 1 0 -"28818" 4 368 3 1 1 1 1 0 100 1 27 47 5366 7 6 1 541 4 75 20 1 1 -"28819" 4 368 3 1 1 1 1 0 100 1 47 12 2401 5 3 0 1708 4 75 35 1 0 -"28820" 4 368 3 1 1 1 1 0 100 1 12 21 8985 3 6 0 1115 4 75 9 0 1 -"28821" 4 368 3 1 1 1 1 0 100 1 21 1 2683 6 9 1 2185 5 95 20 0 0 -"28822" 4 368 3 1 1 1 2 0 100 1 100 195 3838 2 9 0 654 5 95 95 0 1 -"28823" 4 368 3 1 1 1 2 0 100 1 195 341 3164 4 10 0 1596 4 75 146 0 1 -"28824" 4 368 3 1 1 1 2 0 100 1 341 85 3661 3 1 0 1101 4 75 256 1 0 -"28825" 4 368 3 1 1 1 2 0 100 1 85 149 4166 8 6 1 1221 4 75 64 1 1 -"28826" 4 368 3 1 1 1 2 0 100 1 149 37 3457 5 7 1 954 4 75 112 0 0 -"28827" 4 368 3 1 1 1 2 0 100 1 37 2 3042 6 8 1 1192 5 95 35 0 0 -"28828" 4 368 3 1 1 1 2 0 100 1 2 1 9732 7 5 0 1155 3 50 1 1 0 -"28829" 4 368 3 1 1 1 3 0 100 1 100 195 2969 3 5 0 458 5 95 95 0 1 -"28830" 4 368 3 1 1 1 3 0 100 1 195 10 2865 8 9 1 1503 5 95 185 0 0 -"28831" 4 368 3 1 1 1 3 0 100 1 10 2 5773 2 4 1 1552 4 75 8 0 0 -"28832" 4 368 3 1 1 1 3 0 100 1 2 1 1779 6 3 0 396 2 25 1 1 0 -"28833" 4 368 3 1 1 1 4 1 100 1 100 175 7746 8 7 1 1480 4 75 75 1 1 -"28834" 4 368 3 1 1 1 4 1 100 1 175 131 2790 3 10 1 815 2 25 44 0 0 -"28835" 4 368 3 1 1 1 4 1 100 1 131 33 2174 7 9 1 1157 4 75 98 0 0 -"28836" 4 368 3 1 1 1 4 1 100 1 33 50 1963 9 1 1 1692 3 50 17 1 1 -"28837" 4 368 3 1 1 1 4 1 100 1 50 98 3313 2 3 0 1017 5 95 48 0 1 -"28838" 4 368 3 1 1 1 4 1 100 1 98 191 1806 1 8 0 1222 5 95 93 0 1 -"28839" 4 368 3 1 1 1 4 1 100 1 191 48 2553 5 4 0 1302 4 75 143 1 0 -"28840" 4 368 3 1 1 1 4 1 100 1 48 2 2533 4 2 0 0 5 95 46 1 0 -"28841" 4 368 3 1 1 1 4 1 100 1 2 1 2178 6 3 0 648 2 25 1 1 0 -"28842" 4 368 4 0 1 0 1 1 100 7 100 150 2826 2 7 0 370 3 50 50 0 1 -"28843" 4 368 4 0 1 0 1 1 100 7 150 75 2525 3 7 1 1035 3 50 75 0 0 -"28844" 4 368 4 0 1 0 1 1 100 7 75 131 5183 2 8 0 1334 2 75 56 0 1 -"28845" 4 368 4 0 1 0 1 1 100 7 131 7 3307 8 9 1 772 1 95 124 0 0 -"28846" 4 368 5 1 1 0 1 1 100 4329 100 195 8504 1 3 0 2129 1 95 95 0 1 -"28847" 4 368 5 1 1 0 1 1 100 4329 195 380 2625 6 5 1 1546 1 95 185 1 1 -"28848" 4 368 5 1 1 0 1 1 100 4329 380 570 1429 2 8 0 559 3 50 190 0 1 -"28849" 4 368 5 1 1 0 1 1 100 4329 570 285 2290 8 9 1 635 3 50 285 0 0 -"28850" 4 368 5 1 1 0 1 1 100 4329 285 556 1210 3 4 0 2047 1 95 271 0 1 -"28851" 4 368 5 1 1 0 1 1 100 4329 556 1084 2067 5 7 0 934 1 95 528 0 1 -"28852" 4 368 5 1 1 0 1 1 100 4329 1084 2114 2463 7 4 1 1407 1 95 1030 1 1 -"28853" 4 368 5 1 1 0 1 1 100 4329 2114 2220 2282 4 1 1 1275 5 5 106 1 1 -"28854" 4 368 5 1 1 0 1 1 100 4329 2220 4329 2412 9 6 1 879 1 95 2109 1 1 -"28855" 4 368 5 1 1 0 2 0 100 0 100 195 2045 8 1 1 1545 1 95 95 1 1 -"28856" 4 368 5 1 1 0 2 0 100 0 195 380 3813 6 2 1 965 1 95 185 1 1 -"28857" 4 368 5 1 1 0 2 0 100 0 380 19 2876 7 9 1 2881 1 95 361 0 0 -"28858" 4 368 5 1 1 0 2 0 100 0 19 37 2405 2 10 0 1161 1 95 18 0 1 -"28859" 4 368 5 1 1 0 2 0 100 0 37 9 1430 5 3 0 1056 2 75 28 1 0 -"28860" 4 368 5 1 1 0 2 0 100 0 9 0 1563 4 2 0 1111 1 95 9 1 0 -"28861" 4 368 5 1 1 0 3 1 100 1100 100 195 2897 7 5 1 1405 1 95 95 1 1 -"28862" 4 368 5 1 1 0 3 1 100 1100 195 10 1647 2 1 0 1142 1 95 185 1 0 -"28863" 4 368 5 1 1 0 3 1 100 1100 10 20 1893 8 6 1 1968 1 95 10 1 1 -"28864" 4 368 5 1 1 0 3 1 100 1100 20 39 2096 4 7 0 857 1 95 19 0 1 -"28865" 4 368 5 1 1 0 3 1 100 1100 39 76 1813 3 10 0 805 1 95 37 0 1 -"28866" 4 368 5 1 1 0 3 1 100 1100 76 148 2723 6 8 0 423 1 95 72 0 1 -"28867" 4 368 5 1 1 0 3 1 100 1100 148 289 1144 9 2 1 560 1 95 141 1 1 -"28868" 4 368 5 1 1 0 3 1 100 1100 289 564 1210 5 3 1 782 1 95 275 1 1 -"28869" 4 368 5 1 1 0 3 1 100 1100 564 1100 1230 1 10 0 866 1 95 536 0 1 -"28870" 4 368 5 1 1 0 4 0 100 0 100 195 1873 2 3 0 1325 1 95 95 0 1 -"28871" 4 368 5 1 1 0 4 0 100 0 195 97 1619 7 4 0 796 3 50 98 1 0 -"28872" 4 368 5 1 1 0 4 0 100 0 97 5 1995 3 1 0 570 1 95 92 1 0 -"28873" 4 368 5 1 1 0 4 0 100 0 5 0 1876 1 9 1 336 1 95 5 0 0 -"28874" 4 374 2 0 1 1 1 1 100 123 100 125 8561 8 3 1 2615 2 25 25 1 1 -"28875" 4 374 2 0 1 1 1 1 100 123 125 156 8872 3 7 0 1249 2 25 31 0 1 -"28876" 4 374 2 0 1 1 1 1 100 123 156 164 2761 8 2 1 1284 1 5 8 1 1 -"28877" 4 374 2 0 1 1 1 1 100 123 164 123 4948 2 1 0 462 2 25 41 1 0 -"28878" 4 374 3 1 1 1 1 1 100 420 100 175 3855 9 7 1 973 4 75 75 1 1 -"28879" 4 374 3 1 1 1 1 1 100 420 175 166 3387 4 8 1 959 1 5 9 0 0 -"28880" 4 374 3 1 1 1 1 1 100 420 166 208 6727 8 2 1 1804 2 25 42 1 1 -"28881" 4 374 3 1 1 1 1 1 100 420 208 198 2499 2 1 0 978 1 5 10 1 0 -"28882" 4 374 3 1 1 1 1 1 100 420 198 248 2120 7 6 1 329 2 25 50 1 1 -"28883" 4 374 3 1 1 1 1 1 100 420 248 236 2895 5 3 0 363 1 5 12 1 0 -"28884" 4 374 3 1 1 1 1 1 100 420 236 295 2007 3 6 0 1189 2 25 59 0 1 -"28885" 4 374 3 1 1 1 1 1 100 420 295 280 4234 6 9 1 598 1 5 15 0 0 -"28886" 4 374 3 1 1 1 1 1 100 420 280 420 1831 1 4 0 1014 3 50 140 0 1 -"28887" 4 374 3 1 1 1 2 1 100 396 100 150 3904 2 9 0 300 3 50 50 0 1 -"28888" 4 374 3 1 1 1 2 1 100 396 150 142 2011 4 10 1 593 1 5 8 0 0 -"28889" 4 374 3 1 1 1 2 1 100 396 142 135 1977 3 1 0 761 1 5 7 1 0 -"28890" 4 374 3 1 1 1 2 1 100 396 135 169 1226 8 6 1 453 2 25 34 1 1 -"28891" 4 374 3 1 1 1 2 1 100 396 169 177 1844 5 7 0 290 1 5 8 0 1 -"28892" 4 374 3 1 1 1 2 1 100 396 177 168 2057 6 8 1 940 1 5 9 0 0 -"28893" 4 374 3 1 1 1 2 1 100 396 168 176 3188 7 5 1 394 1 5 8 1 1 -"28894" 4 374 3 1 1 1 2 1 100 396 176 264 1340 1 10 0 346 3 50 88 0 1 -"28895" 4 374 3 1 1 1 2 1 100 396 264 396 1518 9 3 1 284 3 50 132 1 1 -"28896" 4 374 3 1 1 1 3 1 100 306 100 95 2237 3 5 1 976 1 5 5 0 0 -"28897" 4 374 3 1 1 1 3 1 100 306 95 47 1398 8 9 1 298 3 50 48 0 0 -"28898" 4 374 3 1 1 1 3 1 100 306 47 59 3449 2 4 0 763 2 25 12 0 1 -"28899" 4 374 3 1 1 1 3 1 100 306 59 89 2645 6 3 1 396 3 50 30 1 1 -"28900" 4 374 3 1 1 1 3 1 100 306 89 111 1479 7 6 1 2767 2 25 22 1 1 -"28901" 4 374 3 1 1 1 3 1 100 306 111 105 5385 4 2 0 325 1 5 6 1 0 -"28902" 4 374 3 1 1 1 3 1 100 306 105 184 1419 1 8 0 485 4 75 79 0 1 -"28903" 4 374 3 1 1 1 3 1 100 306 184 175 2340 5 7 1 641 1 5 9 0 0 -"28904" 4 374 3 1 1 1 3 1 100 306 175 306 1408 9 5 1 979 4 75 131 1 1 -"28905" 4 374 3 1 1 1 4 1 100 125 100 105 1930 8 7 1 292 1 5 5 1 1 -"28906" 4 374 3 1 1 1 4 1 100 125 105 110 1187 3 10 0 320 1 5 5 0 1 -"28907" 4 374 3 1 1 1 4 1 100 125 110 104 1766 7 9 1 567 1 5 6 0 0 -"28908" 4 374 3 1 1 1 4 1 100 125 104 109 2042 9 1 1 918 1 5 5 1 1 -"28909" 4 374 3 1 1 1 4 1 100 125 109 114 3464 2 3 0 314 1 5 5 0 1 -"28910" 4 374 3 1 1 1 4 1 100 125 114 120 1247 1 8 0 378 1 5 6 0 1 -"28911" 4 374 3 1 1 1 4 1 100 125 120 126 1629 5 4 1 397 1 5 6 1 1 -"28912" 4 374 3 1 1 1 4 1 100 125 126 132 1383 4 2 1 364 1 5 6 1 1 -"28913" 4 374 3 1 1 1 4 1 100 125 132 125 1384 6 3 0 460 1 5 7 1 0 -"28914" 4 374 4 0 1 0 1 1 100 128 100 175 9354 2 7 0 297 2 75 75 0 1 -"28915" 4 374 4 0 1 0 1 1 100 128 175 263 2857 3 7 0 1079 3 50 88 0 1 -"28916" 4 374 4 0 1 0 1 1 100 128 263 513 1592 2 8 0 1810 1 95 250 0 1 -"28917" 4 374 4 0 1 0 1 1 100 128 513 128 2985 8 9 1 537 2 75 385 0 0 -"28918" 4 374 5 1 1 0 1 1 100 107 100 195 3089 1 3 0 2587 1 95 95 0 1 -"28919" 4 374 5 1 1 0 1 1 100 107 195 244 4405 6 5 1 353 4 25 49 1 1 -"28920" 4 374 5 1 1 0 1 1 100 107 244 305 2189 2 8 0 468 4 25 61 0 1 -"28921" 4 374 5 1 1 0 1 1 100 107 305 152 1787 8 9 1 572 3 50 153 0 0 -"28922" 4 374 5 1 1 0 1 1 100 107 152 228 1574 3 4 0 352 3 50 76 0 1 -"28923" 4 374 5 1 1 0 1 1 100 107 228 57 1509 5 7 1 682 2 75 171 0 0 -"28924" 4 374 5 1 1 0 1 1 100 107 57 111 1360 7 4 1 2780 1 95 54 1 1 -"28925" 4 374 5 1 1 0 1 1 100 107 111 55 1469 4 1 0 686 3 50 56 1 0 -"28926" 4 374 5 1 1 0 1 1 100 107 55 107 1875 9 6 1 2278 1 95 52 1 1 -"28927" 4 374 5 1 1 0 2 1 100 365 100 195 3114 8 1 1 1819 1 95 95 1 1 -"28928" 4 374 5 1 1 0 2 1 100 365 195 380 1681 6 2 1 303 1 95 185 1 1 -"28929" 4 374 5 1 1 0 2 1 100 365 380 19 1142 7 9 1 334 1 95 361 0 0 -"28930" 4 374 5 1 1 0 2 1 100 365 19 37 1147 2 10 0 1298 1 95 18 0 1 -"28931" 4 374 5 1 1 0 2 1 100 365 37 65 1217 5 3 1 2622 2 75 28 1 1 -"28932" 4 374 5 1 1 0 2 1 100 365 65 49 1126 4 2 0 262 4 25 16 1 0 -"28933" 4 374 5 1 1 0 2 1 100 365 49 96 1990 3 5 0 333 1 95 47 0 1 -"28934" 4 374 5 1 1 0 2 1 100 365 96 187 1075 9 4 1 285 1 95 91 1 1 -"28935" 4 374 5 1 1 0 2 1 100 365 187 365 1277 1 7 0 2045 1 95 178 0 1 -"28936" 4 374 5 1 1 0 3 1 100 4 100 175 1213 7 5 1 1673 2 75 75 1 1 -"28937" 4 374 5 1 1 0 3 1 100 4 175 9 1399 2 1 0 329 1 95 166 1 0 -"28938" 4 374 5 1 1 0 3 1 100 4 9 16 1400 8 6 1 419 2 75 7 1 1 -"28939" 4 374 5 1 1 0 3 1 100 4 16 24 2107 4 7 0 1724 3 50 8 0 1 -"28940" 4 374 5 1 1 0 3 1 100 4 24 42 1304 3 10 0 416 2 75 18 0 1 -"28941" 4 374 5 1 1 0 3 1 100 4 42 2 1697 6 8 1 590 1 95 40 0 0 -"28942" 4 374 5 1 1 0 3 1 100 4 2 4 1799 9 2 1 992 1 95 2 1 1 -"28943" 4 374 5 1 1 0 3 1 100 4 4 2 3893 5 3 0 386 3 50 2 1 0 -"28944" 4 374 5 1 1 0 3 1 100 4 2 4 1436 1 10 0 284 1 95 2 0 1 -"28945" 4 374 5 1 1 0 4 1 100 2308 100 175 1552 2 3 0 4595 2 75 75 0 1 -"28946" 4 374 5 1 1 0 4 1 100 2308 175 341 1286 7 4 1 375 1 95 166 1 1 -"28947" 4 374 5 1 1 0 4 1 100 2308 341 170 1498 3 1 0 2041 3 50 171 1 0 -"28948" 4 374 5 1 1 0 4 1 100 2308 170 332 1333 1 9 0 275 1 95 162 0 1 -"28949" 4 374 5 1 1 0 4 1 100 2308 332 647 1211 8 7 1 375 1 95 315 1 1 -"28950" 4 374 5 1 1 0 4 1 100 2308 647 1262 1443 9 2 1 331 1 95 615 1 1 -"28951" 4 374 5 1 1 0 4 1 100 2308 1262 2461 2561 5 6 0 1197 1 95 1199 0 1 -"28952" 4 374 5 1 1 0 4 1 100 2308 2461 1846 2113 6 8 1 914 4 25 615 0 0 -"28953" 4 374 5 1 1 0 4 1 100 2308 1846 2308 1116 4 7 0 2509 4 25 462 0 1 -"28954" 4 379 2 0 1 0 1 1 100 33 100 175 10901 2 7 0 4076 2 75 75 0 1 -"28955" 4 379 2 0 1 0 1 1 100 33 175 341 6527 3 7 0 1726 1 95 166 0 1 -"28956" 4 379 2 0 1 0 1 1 100 33 341 665 2657 2 8 0 624 1 95 324 0 1 -"28957" 4 379 2 0 1 0 1 1 100 33 665 33 1303 8 9 1 574 1 95 632 0 0 -"28958" 4 379 3 1 1 0 1 1 100 31 100 195 5623 1 3 0 1145 1 95 95 0 1 -"28959" 4 379 3 1 1 0 1 1 100 31 195 380 2248 6 5 1 466 1 95 185 1 1 -"28960" 4 379 3 1 1 0 1 1 100 31 380 19 2185 2 8 1 471 1 95 361 0 0 -"28961" 4 379 3 1 1 0 1 1 100 31 19 37 1710 8 9 0 1954 1 95 18 0 1 -"28962" 4 379 3 1 1 0 1 1 100 31 37 72 4818 3 4 0 745 1 95 35 0 1 -"28963" 4 379 3 1 1 0 1 1 100 31 72 4 2979 5 7 1 707 1 95 68 0 0 -"28964" 4 379 3 1 1 0 1 1 100 31 4 8 1894 7 4 1 810 1 95 4 1 1 -"28965" 4 379 3 1 1 0 1 1 100 31 8 16 3552 4 1 1 1036 1 95 8 1 1 -"28966" 4 379 3 1 1 0 1 1 100 31 16 31 1545 9 6 1 869 1 95 15 1 1 -"28967" 4 379 3 1 1 0 2 0 100 0 100 195 3685 8 1 1 683 1 95 95 1 1 -"28968" 4 379 3 1 1 0 2 0 100 0 195 10 2512 6 2 0 470 1 95 185 1 0 -"28969" 4 379 3 1 1 0 2 0 100 0 10 0 1661 7 9 1 627 1 95 10 0 0 -"28970" 4 379 3 1 1 0 3 1 100 1100 100 195 2106 7 5 1 699 1 95 95 1 1 -"28971" 4 379 3 1 1 0 3 1 100 1100 195 10 1879 2 1 0 547 1 95 185 1 0 -"28972" 4 379 3 1 1 0 3 1 100 1100 10 20 1182 8 6 1 475 1 95 10 1 1 -"28973" 4 379 3 1 1 0 3 1 100 1100 20 39 2057 4 7 0 405 1 95 19 0 1 -"28974" 4 379 3 1 1 0 3 1 100 1100 39 76 1382 3 10 0 443 1 95 37 0 1 -"28975" 4 379 3 1 1 0 3 1 100 1100 76 148 2827 6 8 0 530 1 95 72 0 1 -"28976" 4 379 3 1 1 0 3 1 100 1100 148 289 1734 9 2 1 665 1 95 141 1 1 -"28977" 4 379 3 1 1 0 3 1 100 1100 289 564 1615 5 3 1 616 1 95 275 1 1 -"28978" 4 379 3 1 1 0 3 1 100 1100 564 1100 1472 1 10 0 639 1 95 536 0 1 -"28979" 4 379 3 1 1 0 4 1 100 1 100 195 3390 2 3 0 514 1 95 95 0 1 -"28980" 4 379 3 1 1 0 4 1 100 1 195 380 2319 7 4 1 526 1 95 185 1 1 -"28981" 4 379 3 1 1 0 4 1 100 1 380 19 1258 3 1 0 444 1 95 361 1 0 -"28982" 4 379 3 1 1 0 4 1 100 1 19 37 1604 1 9 0 517 1 95 18 0 1 -"28983" 4 379 3 1 1 0 4 1 100 1 37 72 1018 8 7 1 503 1 95 35 1 1 -"28984" 4 379 3 1 1 0 4 1 100 1 72 140 1076 9 2 1 446 1 95 68 1 1 -"28985" 4 379 3 1 1 0 4 1 100 1 140 273 3238 5 6 0 538 1 95 133 0 1 -"28986" 4 379 3 1 1 0 4 1 100 1 273 14 949 6 8 1 813 1 95 259 0 0 -"28987" 4 379 3 1 1 0 4 1 100 1 14 1 2108 4 7 1 557 1 95 13 0 0 -"28988" 4 379 4 0 1 1 1 1 100 175 100 125 4618 8 3 1 1068 2 25 25 1 1 -"28989" 4 379 4 0 1 1 1 1 100 175 125 156 2471 3 7 0 1855 2 25 31 0 1 -"28990" 4 379 4 0 1 1 1 1 100 175 156 234 1044 8 2 1 624 3 50 78 1 1 -"28991" 4 379 4 0 1 1 1 1 100 175 234 175 904 2 1 0 3898 2 25 59 1 0 -"28992" 4 379 5 1 1 1 1 1 100 320 100 125 2662 9 7 1 605 2 25 25 1 1 -"28993" 4 379 5 1 1 1 1 1 100 320 125 188 1365 4 8 0 570 3 50 63 0 1 -"28994" 4 379 5 1 1 1 1 1 100 320 188 235 1964 8 2 1 493 2 25 47 1 1 -"28995" 4 379 5 1 1 1 1 1 100 320 235 176 946 2 1 0 483 2 25 59 1 0 -"28996" 4 379 5 1 1 1 1 1 100 320 176 167 1898 7 6 0 533 1 5 9 1 0 -"28997" 4 379 5 1 1 1 1 1 100 320 167 175 3433 5 3 1 2140 1 5 8 1 1 -"28998" 4 379 5 1 1 1 1 1 100 320 175 219 1275 3 6 0 844 2 25 44 0 1 -"28999" 4 379 5 1 1 1 1 1 100 320 219 164 2497 6 9 1 2360 2 25 55 0 0 -"29000" 4 379 5 1 1 1 1 1 100 320 164 320 1141 1 4 0 0 5 95 156 0 1 -"29001" 4 379 5 1 1 1 2 1 100 1351 100 175 3277 2 9 0 439 4 75 75 0 1 -"29002" 4 379 5 1 1 1 2 1 100 1351 175 219 1743 4 10 0 474 2 25 44 0 1 -"29003" 4 379 5 1 1 1 2 1 100 1351 219 164 2016 3 1 0 838 2 25 55 1 0 -"29004" 4 379 5 1 1 1 2 1 100 1351 164 287 1155 8 6 1 474 4 75 123 1 1 -"29005" 4 379 5 1 1 1 2 1 100 1351 287 301 4369 5 7 0 1414 1 5 14 0 1 -"29006" 4 379 5 1 1 1 2 1 100 1351 301 226 2376 6 8 1 1524 2 25 75 0 0 -"29007" 4 379 5 1 1 1 2 1 100 1351 226 396 1089 7 5 1 547 4 75 170 1 1 -"29008" 4 379 5 1 1 1 2 1 100 1351 396 772 1479 1 10 0 711 5 95 376 0 1 -"29009" 4 379 5 1 1 1 2 1 100 1351 772 1351 1338 9 3 1 680 4 75 579 1 1 -"29010" 4 379 5 1 1 1 3 1 100 364 100 150 6481 3 5 0 1251 3 50 50 0 1 -"29011" 4 379 5 1 1 1 3 1 100 364 150 37 1108 8 9 1 1566 4 75 113 0 0 -"29012" 4 379 5 1 1 1 3 1 100 364 37 72 1520 2 4 0 926 5 95 35 0 1 -"29013" 4 379 5 1 1 1 3 1 100 364 72 108 2422 6 3 1 809 3 50 36 1 1 -"29014" 4 379 5 1 1 1 3 1 100 364 108 189 1280 7 6 1 1833 4 75 81 1 1 -"29015" 4 379 5 1 1 1 3 1 100 364 189 142 1919 4 2 0 2098 2 25 47 1 0 -"29016" 4 379 5 1 1 1 3 1 100 364 142 277 1059 1 8 0 719 5 95 135 0 1 -"29017" 4 379 5 1 1 1 3 1 100 364 277 208 1442 5 7 1 588 2 25 69 0 0 -"29018" 4 379 5 1 1 1 3 1 100 364 208 364 916 9 5 1 2823 4 75 156 1 1 -"29019" 4 379 5 1 1 1 4 1 100 1661 100 150 1750 8 7 1 1905 3 50 50 1 1 -"29020" 4 379 5 1 1 1 4 1 100 1661 150 225 1379 3 10 0 592 3 50 75 0 1 -"29021" 4 379 5 1 1 1 4 1 100 1661 225 169 1121 7 9 1 2961 2 25 56 0 0 -"29022" 4 379 5 1 1 1 4 1 100 1661 169 330 1105 9 1 1 643 5 95 161 1 1 -"29023" 4 379 5 1 1 1 4 1 100 1661 330 578 1321 2 3 0 784 4 75 248 0 1 -"29024" 4 379 5 1 1 1 4 1 100 1661 578 1012 1616 1 8 0 2753 4 75 434 0 1 -"29025" 4 379 5 1 1 1 4 1 100 1661 1012 1063 2636 5 4 1 2916 1 5 51 1 1 -"29026" 4 379 5 1 1 1 4 1 100 1661 1063 1329 2271 4 2 1 505 2 25 266 1 1 -"29027" 4 379 5 1 1 1 4 1 100 1661 1329 1661 1621 6 3 1 662 2 25 332 1 1 -"29028" 4 383 2 0 1 1 1 1 100 115 100 150 4644 8 3 1 1437 3 50 50 1 1 -"29029" 4 383 2 0 1 1 1 1 100 115 150 263 13086 3 7 0 1045 4 75 113 0 1 -"29030" 4 383 2 0 1 1 1 1 100 115 263 460 13376 8 2 1 874 4 75 197 1 1 -"29031" 4 383 2 0 1 1 1 1 100 115 460 115 2725 2 1 0 1634 4 75 345 1 0 -"29032" 4 383 3 1 1 1 1 0 100 0 100 150 6437 9 7 1 3992 3 50 50 1 1 -"29033" 4 383 3 1 1 1 1 0 100 0 150 263 5592 4 8 0 3468 4 75 113 0 1 -"29034" 4 383 3 1 1 1 1 0 100 0 263 460 7620 8 2 1 1839 4 75 197 1 1 -"29035" 4 383 3 1 1 1 1 0 100 0 460 23 4205 2 1 0 2027 5 95 437 1 0 -"29036" 4 383 3 1 1 1 1 0 100 0 23 45 6336 7 6 1 0 5 95 22 1 1 -"29037" 4 383 3 1 1 1 1 0 100 0 45 2 6968 5 3 0 0 5 95 43 1 0 -"29038" 4 383 3 1 1 1 1 0 100 0 2 4 8346 3 6 0 0 5 95 2 0 1 -"29039" 4 383 3 1 1 1 1 0 100 0 4 0 11379 6 9 1 0 5 95 4 0 0 -"29040" 4 383 3 1 1 1 2 1 100 540 100 195 7158 2 9 0 0 5 95 95 0 1 -"29041" 4 383 3 1 1 1 2 1 100 540 195 341 10599 4 10 0 1564 4 75 146 0 1 -"29042" 4 383 3 1 1 1 2 1 100 540 341 85 3391 3 1 0 3451 4 75 256 1 0 -"29043" 4 383 3 1 1 1 2 1 100 540 85 166 6908 8 6 1 0 5 95 81 1 1 -"29044" 4 383 3 1 1 1 2 1 100 540 166 291 18113 5 7 0 2890 4 75 125 0 1 -"29045" 4 383 3 1 1 1 2 1 100 540 291 73 12541 6 8 1 982 4 75 218 0 0 -"29046" 4 383 3 1 1 1 2 1 100 540 73 142 3012 7 5 1 0 5 95 69 1 1 -"29047" 4 383 3 1 1 1 2 1 100 540 142 277 5099 1 10 0 0 5 95 135 0 1 -"29048" 4 383 3 1 1 1 2 1 100 540 277 540 2966 9 3 1 0 5 95 263 1 1 -"29049" 4 383 3 1 1 1 3 1 100 31 100 195 4812 3 5 0 0 5 95 95 0 1 -"29050" 4 383 3 1 1 1 3 1 100 31 195 10 7797 8 9 1 0 5 95 185 0 0 -"29051" 4 383 3 1 1 1 3 1 100 31 10 20 1927 2 4 0 0 5 95 10 0 1 -"29052" 4 383 3 1 1 1 3 1 100 31 20 39 5777 6 3 1 0 5 95 19 1 1 -"29053" 4 383 3 1 1 1 3 1 100 31 39 76 3068 7 6 1 0 5 95 37 1 1 -"29054" 4 383 3 1 1 1 3 1 100 31 76 4 6303 4 2 0 0 5 95 72 1 0 -"29055" 4 383 3 1 1 1 3 1 100 31 4 8 5961 1 8 0 0 5 95 4 0 1 -"29056" 4 383 3 1 1 1 3 1 100 31 8 16 2235 5 7 0 0 5 95 8 0 1 -"29057" 4 383 3 1 1 1 3 1 100 31 16 31 2614 9 5 1 0 5 95 15 1 1 -"29058" 4 383 3 1 1 1 4 1 100 27 100 195 7437 8 7 1 0 5 95 95 1 1 -"29059" 4 383 3 1 1 1 4 1 100 27 195 380 2024 3 10 0 0 5 95 185 0 1 -"29060" 4 383 3 1 1 1 4 1 100 27 380 19 2013 7 9 1 0 5 95 361 0 0 -"29061" 4 383 3 1 1 1 4 1 100 27 19 37 2059 9 1 1 0 5 95 18 1 1 -"29062" 4 383 3 1 1 1 4 1 100 27 37 72 2422 2 3 0 0 5 95 35 0 1 -"29063" 4 383 3 1 1 1 4 1 100 27 72 140 7041 1 8 0 0 5 95 68 0 1 -"29064" 4 383 3 1 1 1 4 1 100 27 140 273 5888 5 4 1 0 5 95 133 1 1 -"29065" 4 383 3 1 1 1 4 1 100 27 273 14 3341 4 2 0 0 5 95 259 1 0 -"29066" 4 383 3 1 1 1 4 1 100 27 14 27 2019 6 3 1 0 5 95 13 1 1 -"29067" 4 383 4 0 1 0 1 1 100 29 100 150 6083 2 7 0 1463 3 50 50 0 1 -"29068" 4 383 4 0 1 0 1 1 100 29 150 293 7207 3 7 0 3813 1 95 143 0 1 -"29069" 4 383 4 0 1 0 1 1 100 29 293 571 1725 2 8 0 1824 1 95 278 0 1 -"29070" 4 383 4 0 1 0 1 1 100 29 571 29 4383 8 9 1 956 1 95 542 0 0 -"29071" 4 383 5 1 1 0 1 0 100 0 100 195 9621 1 3 0 1265 1 95 95 0 1 -"29072" 4 383 5 1 1 0 1 0 100 0 195 380 4186 6 5 1 1336 1 95 185 1 1 -"29073" 4 383 5 1 1 0 1 0 100 0 380 741 1806 2 8 0 645 1 95 361 0 1 -"29074" 4 383 5 1 1 0 1 0 100 0 741 37 2660 8 9 1 1403 1 95 704 0 0 -"29075" 4 383 5 1 1 0 1 0 100 0 37 72 2838 3 4 0 372 1 95 35 0 1 -"29076" 4 383 5 1 1 0 1 0 100 0 72 4 3952 5 7 1 465 1 95 68 0 0 -"29077" 4 383 5 1 1 0 1 0 100 0 4 8 1513 7 4 1 342 1 95 4 1 1 -"29078" 4 383 5 1 1 0 1 0 100 0 8 0 3099 4 1 0 501 1 95 8 1 0 -"29079" 4 383 5 1 1 0 2 0 100 0 100 195 5059 8 1 1 656 1 95 95 1 1 -"29080" 4 383 5 1 1 0 2 0 100 0 195 380 2445 6 2 1 374 1 95 185 1 1 -"29081" 4 383 5 1 1 0 2 0 100 0 380 19 1634 7 9 1 378 1 95 361 0 0 -"29082" 4 383 5 1 1 0 2 0 100 0 19 37 1405 2 10 0 312 1 95 18 0 1 -"29083" 4 383 5 1 1 0 2 0 100 0 37 2 2166 5 3 0 333 1 95 35 1 0 -"29084" 4 383 5 1 1 0 2 0 100 0 2 0 1866 4 2 0 239 1 95 2 1 0 -"29085" 4 383 5 1 1 0 3 0 100 0 100 195 2702 7 5 1 267 1 95 95 1 1 -"29086" 4 383 5 1 1 0 3 0 100 0 195 10 1596 2 1 0 613 1 95 185 1 0 -"29087" 4 383 5 1 1 0 3 0 100 0 10 0 2021 8 6 0 371 1 95 10 1 0 -"29088" 4 383 5 1 1 0 4 1 100 1037 100 195 1374 2 3 0 459 1 95 95 0 1 -"29089" 4 383 5 1 1 0 4 1 100 1037 195 380 4386 7 4 1 383 1 95 185 1 1 -"29090" 4 383 5 1 1 0 4 1 100 1037 380 19 1066 3 1 0 1013 1 95 361 1 0 -"29091" 4 383 5 1 1 0 4 1 100 1037 19 37 3682 1 9 0 1024 1 95 18 0 1 -"29092" 4 383 5 1 1 0 4 1 100 1037 37 72 3060 8 7 1 984 1 95 35 1 1 -"29093" 4 383 5 1 1 0 4 1 100 1037 72 140 1615 9 2 1 384 1 95 68 1 1 -"29094" 4 383 5 1 1 0 4 1 100 1037 140 273 4034 5 6 0 336 1 95 133 0 1 -"29095" 4 383 5 1 1 0 4 1 100 1037 273 532 3341 6 8 0 243 1 95 259 0 1 -"29096" 4 383 5 1 1 0 4 1 100 1037 532 1037 1229 4 7 0 328 1 95 505 0 1 -"29097" 4 384 2 0 1 1 1 1 100 197 100 150 6181 8 3 1 1757 3 50 50 1 1 -"29098" 4 384 2 0 1 1 1 1 100 197 150 263 8878 3 7 0 797 4 75 113 0 1 -"29099" 4 384 2 0 1 1 1 1 100 197 263 395 3749 8 2 1 1152 3 50 132 1 1 -"29100" 4 384 2 0 1 1 1 1 100 197 395 197 2831 2 1 0 750 3 50 198 1 0 -"29101" 4 384 3 1 1 1 1 1 100 242 100 175 8749 9 7 1 747 4 75 75 1 1 -"29102" 4 384 3 1 1 1 1 1 100 242 175 263 5932 4 8 0 1568 3 50 88 0 1 -"29103" 4 384 3 1 1 1 1 1 100 242 263 395 4523 8 2 1 746 3 50 132 1 1 -"29104" 4 384 3 1 1 1 1 1 100 242 395 296 3363 2 1 0 2273 2 25 99 1 0 -"29105" 4 384 3 1 1 1 1 1 100 242 296 222 6714 7 6 0 1912 2 25 74 1 0 -"29106" 4 384 3 1 1 1 1 1 100 242 222 166 5195 5 3 0 1273 2 25 56 1 0 -"29107" 4 384 3 1 1 1 1 1 100 242 166 249 2250 3 6 0 740 3 50 83 0 1 -"29108" 4 384 3 1 1 1 1 1 100 242 249 124 3366 6 9 1 1732 3 50 125 0 0 -"29109" 4 384 3 1 1 1 1 1 100 242 124 242 2831 1 4 0 1326 5 95 118 0 1 -"29110" 4 384 3 1 1 1 2 1 100 507 100 195 4724 2 9 0 1242 5 95 95 0 1 -"29111" 4 384 3 1 1 1 2 1 100 507 195 244 5236 4 10 0 568 2 25 49 0 1 -"29112" 4 384 3 1 1 1 2 1 100 507 244 183 5643 3 1 0 1332 2 25 61 1 0 -"29113" 4 384 3 1 1 1 2 1 100 507 183 229 2001 8 6 1 3544 2 25 46 1 1 -"29114" 4 384 3 1 1 1 2 1 100 507 229 240 5892 5 7 0 681 1 5 11 0 1 -"29115" 4 384 3 1 1 1 2 1 100 507 240 180 4382 6 8 1 435 2 25 60 0 0 -"29116" 4 384 3 1 1 1 2 1 100 507 180 225 2483 7 5 1 1083 2 25 45 1 1 -"29117" 4 384 3 1 1 1 2 1 100 507 225 338 3413 1 10 0 1528 3 50 113 0 1 -"29118" 4 384 3 1 1 1 2 1 100 507 338 507 2300 9 3 1 2165 3 50 169 1 1 -"29119" 4 384 3 1 1 1 3 1 100 72 100 95 3536 3 5 1 821 1 5 5 0 0 -"29120" 4 384 3 1 1 1 3 1 100 72 95 5 4716 8 9 1 1073 5 95 90 0 0 -"29121" 4 384 3 1 1 1 3 1 100 72 5 9 3684 2 4 0 2091 4 75 4 0 1 -"29122" 4 384 3 1 1 1 3 1 100 72 9 14 4277 6 3 1 1679 3 50 5 1 1 -"29123" 4 384 3 1 1 1 3 1 100 72 14 27 4196 7 6 1 1138 5 95 13 1 1 -"29124" 4 384 3 1 1 1 3 1 100 72 27 20 2913 4 2 0 1628 2 25 7 1 0 -"29125" 4 384 3 1 1 1 3 1 100 72 20 39 4379 1 8 0 1265 5 95 19 0 1 -"29126" 4 384 3 1 1 1 3 1 100 72 39 37 3218 5 7 1 1014 1 5 2 0 0 -"29127" 4 384 3 1 1 1 3 1 100 72 37 72 2923 9 5 1 1286 5 95 35 1 1 -"29128" 4 384 3 1 1 1 4 1 100 384 100 195 3643 8 7 1 780 5 95 95 1 1 -"29129" 4 384 3 1 1 1 4 1 100 384 195 341 3612 3 10 0 1182 4 75 146 0 1 -"29130" 4 384 3 1 1 1 4 1 100 384 341 85 3355 7 9 1 1209 4 75 256 0 0 -"29131" 4 384 3 1 1 1 4 1 100 384 85 166 3557 9 1 1 0 5 95 81 1 1 -"29132" 4 384 3 1 1 1 4 1 100 384 166 291 2505 2 3 0 2208 4 75 125 0 1 -"29133" 4 384 3 1 1 1 4 1 100 384 291 567 3548 1 8 0 1249 5 95 276 0 1 -"29134" 4 384 3 1 1 1 4 1 100 384 567 539 3819 5 4 0 771 1 5 28 1 0 -"29135" 4 384 3 1 1 1 4 1 100 384 539 404 3371 4 2 0 913 2 25 135 1 0 -"29136" 4 384 3 1 1 1 4 1 100 384 404 384 3434 6 3 0 668 1 5 20 1 0 -"29137" 4 384 4 0 1 0 1 1 100 128 100 150 5356 2 7 0 1731 3 50 50 0 1 -"29138" 4 384 4 0 1 0 1 1 100 128 150 263 5734 3 7 0 1379 2 75 113 0 1 -"29139" 4 384 4 0 1 0 1 1 100 128 263 513 7670 2 8 0 3038 1 95 250 0 1 -"29140" 4 384 4 0 1 0 1 1 100 128 513 128 2595 8 9 1 885 2 75 385 0 0 -"29141" 4 384 5 1 1 0 1 1 100 747 100 195 12923 1 3 0 1074 1 95 95 0 1 -"29142" 4 384 5 1 1 0 1 1 100 747 195 380 2612 6 5 1 2021 1 95 185 1 1 -"29143" 4 384 5 1 1 0 1 1 100 747 380 741 1794 2 8 0 1018 1 95 361 0 1 -"29144" 4 384 5 1 1 0 1 1 100 747 741 185 3911 8 9 1 1507 2 75 556 0 0 -"29145" 4 384 5 1 1 0 1 1 100 747 185 361 2833 3 4 0 1223 1 95 176 0 1 -"29146" 4 384 5 1 1 0 1 1 100 747 361 379 5042 5 7 0 2865 5 5 18 0 1 -"29147" 4 384 5 1 1 0 1 1 100 747 379 569 2494 7 4 1 1693 3 50 190 1 1 -"29148" 4 384 5 1 1 0 1 1 100 747 569 427 2458 4 1 0 739 4 25 142 1 0 -"29149" 4 384 5 1 1 0 1 1 100 747 427 747 3725 9 6 1 1017 2 75 320 1 1 -"29150" 4 384 5 1 1 0 2 1 100 497 100 195 2738 8 1 1 1429 1 95 95 1 1 -"29151" 4 384 5 1 1 0 2 1 100 497 195 293 3750 6 2 1 1290 3 50 98 1 1 -"29152" 4 384 5 1 1 0 2 1 100 497 293 73 4378 7 9 1 2807 2 75 220 0 0 -"29153" 4 384 5 1 1 0 2 1 100 497 73 142 2518 2 10 0 1204 1 95 69 0 1 -"29154" 4 384 5 1 1 0 2 1 100 497 142 135 4640 5 3 0 986 5 5 7 1 0 -"29155" 4 384 5 1 1 0 2 1 100 497 135 67 4184 4 2 0 3007 3 50 68 1 0 -"29156" 4 384 5 1 1 0 2 1 100 497 67 131 3213 3 5 0 1258 1 95 64 0 1 -"29157" 4 384 5 1 1 0 2 1 100 497 131 255 2067 9 4 1 862 1 95 124 1 1 -"29158" 4 384 5 1 1 0 2 1 100 497 255 497 2258 1 7 0 676 1 95 242 0 1 -"29159" 4 384 5 1 1 0 3 1 100 6 100 195 3953 7 5 1 1219 1 95 95 1 1 -"29160" 4 384 5 1 1 0 3 1 100 6 195 10 2502 2 1 0 469 1 95 185 1 0 -"29161" 4 384 5 1 1 0 3 1 100 6 10 20 1500 8 6 1 594 1 95 10 1 1 -"29162" 4 384 5 1 1 0 3 1 100 6 20 35 2235 4 7 0 391 2 75 15 0 1 -"29163" 4 384 5 1 1 0 3 1 100 6 35 68 2745 3 10 0 3044 1 95 33 0 1 -"29164" 4 384 5 1 1 0 3 1 100 6 68 3 2200 6 8 1 1677 1 95 65 0 0 -"29165" 4 384 5 1 1 0 3 1 100 6 3 6 5411 9 2 1 3503 1 95 3 1 1 -"29166" 4 384 5 1 1 0 3 1 100 6 6 3 12605 5 3 0 860 3 50 3 1 0 -"29167" 4 384 5 1 1 0 3 1 100 6 3 6 6144 1 10 0 3281 1 95 3 0 1 -"29168" 4 384 5 1 1 0 4 1 100 1135 100 195 8000 2 3 0 1858 1 95 95 0 1 -"29169" 4 384 5 1 1 0 4 1 100 1135 195 293 3318 7 4 1 998 3 50 98 1 1 -"29170" 4 384 5 1 1 0 4 1 100 1135 293 146 4636 3 1 0 2957 3 50 147 1 0 -"29171" 4 384 5 1 1 0 4 1 100 1135 146 285 2261 1 9 0 1455 1 95 139 0 1 -"29172" 4 384 5 1 1 0 4 1 100 1135 285 556 3127 8 7 1 1192 1 95 271 1 1 -"29173" 4 384 5 1 1 0 4 1 100 1135 556 1084 2172 9 2 1 1347 1 95 528 1 1 -"29174" 4 384 5 1 1 0 4 1 100 1135 1084 1138 3784 5 6 0 0 5 5 54 0 1 -"29175" 4 384 5 1 1 0 4 1 100 1135 1138 1081 6673 6 8 1 0 5 5 57 0 0 -"29176" 4 384 5 1 1 0 4 1 100 1135 1081 1135 5058 4 7 0 0 5 5 54 0 1 -"29177" 4 385 2 0 1 0 1 1 100 65 100 175 7690 2 7 0 1273 2 75 75 0 1 -"29178" 4 385 2 0 1 0 1 1 100 65 175 263 6394 3 7 0 1816 3 50 88 0 1 -"29179" 4 385 2 0 1 0 1 1 100 65 263 131 4670 2 8 1 982 3 50 132 0 0 -"29180" 4 385 2 0 1 0 1 1 100 65 131 65 6260 8 9 1 992 3 50 66 0 0 -"29181" 4 385 3 1 1 0 1 1 100 418 100 150 5760 1 3 0 1548 3 50 50 0 1 -"29182" 4 385 3 1 1 0 1 1 100 418 150 112 2111 6 5 0 1057 4 25 38 1 0 -"29183" 4 385 3 1 1 0 1 1 100 418 112 196 3219 2 8 0 840 2 75 84 0 1 -"29184" 4 385 3 1 1 0 1 1 100 418 196 98 5861 8 9 1 1123 3 50 98 0 0 -"29185" 4 385 3 1 1 0 1 1 100 418 98 191 4259 3 4 0 917 1 95 93 0 1 -"29186" 4 385 3 1 1 0 1 1 100 418 191 372 3159 5 7 0 742 1 95 181 0 1 -"29187" 4 385 3 1 1 0 1 1 100 418 372 558 3224 7 4 1 869 3 50 186 1 1 -"29188" 4 385 3 1 1 0 1 1 100 418 558 837 1817 4 1 1 2352 3 50 279 1 1 -"29189" 4 385 3 1 1 0 1 1 100 418 837 418 2365 9 6 0 914 3 50 419 1 0 -"29190" 4 385 3 1 1 0 2 1 100 820 100 175 2257 8 1 1 2164 2 75 75 1 1 -"29191" 4 385 3 1 1 0 2 1 100 820 175 341 3050 6 2 1 1310 1 95 166 1 1 -"29192" 4 385 3 1 1 0 2 1 100 820 341 665 2237 7 9 0 696 1 95 324 0 1 -"29193" 4 385 3 1 1 0 2 1 100 820 665 998 3436 2 10 0 2750 3 50 333 0 1 -"29194" 4 385 3 1 1 0 2 1 100 820 998 748 3017 5 3 0 1135 4 25 250 1 0 -"29195" 4 385 3 1 1 0 2 1 100 820 748 1122 3942 4 2 1 1025 3 50 374 1 1 -"29196" 4 385 3 1 1 0 2 1 100 820 1122 561 3037 3 5 1 2642 3 50 561 0 0 -"29197" 4 385 3 1 1 0 2 1 100 820 561 1094 2729 9 4 1 701 1 95 533 1 1 -"29198" 4 385 3 1 1 0 2 1 100 820 1094 820 3793 1 7 1 890 4 25 274 0 0 -"29199" 4 385 3 1 1 0 3 0 100 0 100 5 2872 7 5 0 1501 1 95 95 1 0 -"29200" 4 385 3 1 1 0 3 0 100 0 5 0 1931 2 1 0 692 1 95 5 1 0 -"29201" 4 385 3 1 1 0 4 1 100 936 100 150 2925 2 3 0 1519 3 50 50 0 1 -"29202" 4 385 3 1 1 0 4 1 100 936 150 225 5116 7 4 1 2742 3 50 75 1 1 -"29203" 4 385 3 1 1 0 4 1 100 936 225 338 1761 3 1 1 3251 3 50 113 1 1 -"29204" 4 385 3 1 1 0 4 1 100 936 338 507 2734 1 9 0 1425 3 50 169 0 1 -"29205" 4 385 3 1 1 0 4 1 100 936 507 253 2371 8 7 0 2736 3 50 254 1 0 -"29206" 4 385 3 1 1 0 4 1 100 936 253 493 4635 9 2 1 852 1 95 240 1 1 -"29207" 4 385 3 1 1 0 4 1 100 936 493 246 5193 5 6 1 2704 3 50 247 0 0 -"29208" 4 385 3 1 1 0 4 1 100 936 246 480 3284 6 8 0 1133 1 95 234 0 1 -"29209" 4 385 3 1 1 0 4 1 100 936 480 936 1717 4 7 0 841 1 95 456 0 1 -"29210" 4 385 4 0 1 1 1 1 100 148 100 125 4216 8 3 1 1301 2 25 25 1 1 -"29211" 4 385 4 0 1 1 1 1 100 148 125 188 3197 3 7 0 1243 3 50 63 0 1 -"29212" 4 385 4 0 1 1 1 1 100 148 188 141 1549 8 2 0 3913 2 25 47 1 0 -"29213" 4 385 4 0 1 1 1 1 100 148 141 148 2740 2 1 1 824 1 5 7 1 1 -"29214" 4 385 5 1 1 1 1 1 100 361 100 125 2148 9 7 1 859 2 25 25 1 1 -"29215" 4 385 5 1 1 1 1 1 100 361 125 188 1496 4 8 0 1230 3 50 63 0 1 -"29216" 4 385 5 1 1 1 1 1 100 361 188 141 4873 8 2 0 1447 2 25 47 1 0 -"29217" 4 385 5 1 1 1 1 1 100 361 141 70 1031 2 1 0 1372 3 50 71 1 0 -"29218" 4 385 5 1 1 1 1 1 100 361 70 123 2008 7 6 1 822 4 75 53 1 1 -"29219" 4 385 5 1 1 1 1 1 100 361 123 185 8060 5 3 1 930 3 50 62 1 1 -"29220" 4 385 5 1 1 1 1 1 100 361 185 231 3195 3 6 0 761 2 25 46 0 1 -"29221" 4 385 5 1 1 1 1 1 100 361 231 289 1698 6 9 0 1527 2 25 58 0 1 -"29222" 4 385 5 1 1 1 1 1 100 361 289 361 2900 1 4 0 695 2 25 72 0 1 -"29223" 4 385 5 1 1 1 2 1 100 247 100 75 3601 2 9 1 716 2 25 25 0 0 -"29224" 4 385 5 1 1 1 2 1 100 247 75 71 3015 4 10 1 861 1 5 4 0 0 -"29225" 4 385 5 1 1 1 2 1 100 247 71 53 681 3 1 0 758 2 25 18 1 0 -"29226" 4 385 5 1 1 1 2 1 100 247 53 93 1655 8 6 1 1254 4 75 40 1 1 -"29227" 4 385 5 1 1 1 2 1 100 247 93 140 1664 5 7 0 951 3 50 47 0 1 -"29228" 4 385 5 1 1 1 2 1 100 247 140 175 2543 6 8 0 724 2 25 35 0 1 -"29229" 4 385 5 1 1 1 2 1 100 247 175 219 1622 7 5 1 1398 2 25 44 1 1 -"29230" 4 385 5 1 1 1 2 1 100 247 219 329 5281 1 10 0 1516 3 50 110 0 1 -"29231" 4 385 5 1 1 1 2 1 100 247 329 247 2017 9 3 0 1031 2 25 82 1 0 -"29232" 4 385 5 1 1 1 3 1 100 130 100 125 2120 3 5 0 969 2 25 25 0 1 -"29233" 4 385 5 1 1 1 3 1 100 130 125 62 1561 8 9 1 836 3 50 63 0 0 -"29234" 4 385 5 1 1 1 3 1 100 130 62 78 1762 2 4 0 2548 2 25 16 0 1 -"29235" 4 385 5 1 1 1 3 1 100 130 78 39 4656 6 3 0 849 3 50 39 1 0 -"29236" 4 385 5 1 1 1 3 1 100 130 39 37 1589 7 6 0 831 1 5 2 1 0 -"29237" 4 385 5 1 1 1 3 1 100 130 37 39 1381 4 2 1 905 1 5 2 1 1 -"29238" 4 385 5 1 1 1 3 1 100 130 39 59 899 1 8 0 1701 3 50 20 0 1 -"29239" 4 385 5 1 1 1 3 1 100 130 59 74 1602 5 7 0 755 2 25 15 0 1 -"29240" 4 385 5 1 1 1 3 1 100 130 74 130 2460 9 5 1 869 4 75 56 1 1 -"29241" 4 385 5 1 1 1 4 1 100 536 100 175 2618 8 7 1 963 4 75 75 1 1 -"29242" 4 385 5 1 1 1 4 1 100 536 175 131 2987 3 10 1 2434 2 25 44 0 0 -"29243" 4 385 5 1 1 1 4 1 100 536 131 98 1708 7 9 1 880 2 25 33 0 0 -"29244" 4 385 5 1 1 1 4 1 100 536 98 147 1531 9 1 1 1649 3 50 49 1 1 -"29245" 4 385 5 1 1 1 4 1 100 536 147 257 1863 2 3 0 1412 4 75 110 0 1 -"29246" 4 385 5 1 1 1 4 1 100 536 257 386 2214 1 8 0 1835 3 50 129 0 1 -"29247" 4 385 5 1 1 1 4 1 100 536 386 289 1752 5 4 0 1579 2 25 97 1 0 -"29248" 4 385 5 1 1 1 4 1 100 536 289 275 1109 4 2 0 1006 1 5 14 1 0 -"29249" 4 385 5 1 1 1 4 1 100 536 275 536 4218 6 3 1 2992 5 95 261 1 1 -"29250" 4 386 2 0 1 1 1 1 100 94 100 125 21936 8 3 1 1434 2 25 25 1 1 -"29251" 4 386 2 0 1 1 1 1 100 94 125 119 3291 3 7 1 4956 1 5 6 0 0 -"29252" 4 386 2 0 1 1 1 1 100 94 119 125 2365 8 2 1 1274 1 5 6 1 1 -"29253" 4 386 2 0 1 1 1 1 100 94 125 94 1874 2 1 0 927 2 25 31 1 0 -"29254" 4 386 3 1 1 1 1 1 100 155 100 105 3269 9 7 1 1350 1 5 5 1 1 -"29255" 4 386 3 1 1 1 1 1 100 155 105 110 1299 4 8 0 1155 1 5 5 0 1 -"29256" 4 386 3 1 1 1 1 1 100 155 110 116 1730 8 2 1 893 1 5 6 1 1 -"29257" 4 386 3 1 1 1 1 1 100 155 116 110 1624 2 1 0 760 1 5 6 1 0 -"29258" 4 386 3 1 1 1 1 1 100 155 110 138 2065 7 6 1 620 2 25 28 1 1 -"29259" 4 386 3 1 1 1 1 1 100 155 138 131 1827 5 3 0 482 1 5 7 1 0 -"29260" 4 386 3 1 1 1 1 1 100 155 131 138 4128 3 6 0 905 1 5 7 0 1 -"29261" 4 386 3 1 1 1 1 1 100 155 138 103 2027 6 9 1 560 2 25 35 0 0 -"29262" 4 386 3 1 1 1 1 1 100 155 103 155 1843 1 4 0 753 3 50 52 0 1 -"29263" 4 386 3 1 1 1 2 1 100 186 100 75 2834 2 9 1 593 2 25 25 0 0 -"29264" 4 386 3 1 1 1 2 1 100 186 75 94 1202 4 10 0 446 2 25 19 0 1 -"29265" 4 386 3 1 1 1 2 1 100 186 94 99 1614 3 1 1 447 1 5 5 1 1 -"29266" 4 386 3 1 1 1 2 1 100 186 99 94 1663 8 6 0 454 1 5 5 1 0 -"29267" 4 386 3 1 1 1 2 1 100 186 94 99 1741 5 7 0 448 1 5 5 0 1 -"29268" 4 386 3 1 1 1 2 1 100 186 99 104 3392 6 8 0 460 1 5 5 0 1 -"29269" 4 386 3 1 1 1 2 1 100 186 104 99 1579 7 5 0 1142 1 5 5 1 0 -"29270" 4 386 3 1 1 1 2 1 100 186 99 149 2308 1 10 0 2045 3 50 50 0 1 -"29271" 4 386 3 1 1 1 2 1 100 186 149 186 2177 9 3 1 903 2 25 37 1 1 -"29272" 4 386 3 1 1 1 3 1 100 142 100 95 2064 3 5 1 739 1 5 5 0 0 -"29273" 4 386 3 1 1 1 3 1 100 142 95 100 1661 8 9 0 739 1 5 5 0 1 -"29274" 4 386 3 1 1 1 3 1 100 142 100 95 1589 2 4 1 718 1 5 5 0 0 -"29275" 4 386 3 1 1 1 3 1 100 142 95 71 1114 6 3 0 451 2 25 24 1 0 -"29276" 4 386 3 1 1 1 3 1 100 142 71 89 1387 7 6 1 670 2 25 18 1 1 -"29277" 4 386 3 1 1 1 3 1 100 142 89 44 1700 4 2 0 1337 3 50 45 1 0 -"29278" 4 386 3 1 1 1 3 1 100 142 44 77 3191 1 8 0 1157 4 75 33 0 1 -"29279" 4 386 3 1 1 1 3 1 100 142 77 81 1788 5 7 0 565 1 5 4 0 1 -"29280" 4 386 3 1 1 1 3 1 100 142 81 142 1679 9 5 1 1189 4 75 61 1 1 -"29281" 4 386 3 1 1 1 4 1 100 335 100 105 2113 8 7 1 833 1 5 5 1 1 -"29282" 4 386 3 1 1 1 4 1 100 335 105 100 2063 3 10 1 394 1 5 5 0 0 -"29283" 4 386 3 1 1 1 4 1 100 335 100 75 1317 7 9 1 1036 2 25 25 0 0 -"29284" 4 386 3 1 1 1 4 1 100 335 75 146 1696 9 1 1 1414 5 95 71 1 1 -"29285" 4 386 3 1 1 1 4 1 100 335 146 183 5632 2 3 0 658 2 25 37 0 1 -"29286" 4 386 3 1 1 1 4 1 100 335 183 320 2095 1 8 0 1170 4 75 137 0 1 -"29287" 4 386 3 1 1 1 4 1 100 335 320 336 2279 5 4 1 728 1 5 16 1 1 -"29288" 4 386 3 1 1 1 4 1 100 335 336 353 2083 4 2 1 821 1 5 17 1 1 -"29289" 4 386 3 1 1 1 4 1 100 335 353 335 2220 6 3 0 488 1 5 18 1 0 -"29290" 4 386 4 0 1 0 1 1 100 37 100 195 3334 2 7 0 1517 1 95 95 0 1 -"29291" 4 386 4 0 1 0 1 1 100 37 195 380 6394 3 7 0 1207 1 95 185 0 1 -"29292" 4 386 4 0 1 0 1 1 100 37 380 741 2066 2 8 0 717 1 95 361 0 1 -"29293" 4 386 4 0 1 0 1 1 100 37 741 37 1736 8 9 1 1284 1 95 704 0 0 -"29294" 4 386 5 1 1 0 1 0 100 0 100 195 1890 1 3 0 757 1 95 95 0 1 -"29295" 4 386 5 1 1 0 1 0 100 0 195 380 1398 6 5 1 473 1 95 185 1 1 -"29296" 4 386 5 1 1 0 1 0 100 0 380 741 1784 2 8 0 704 1 95 361 0 1 -"29297" 4 386 5 1 1 0 1 0 100 0 741 37 1356 8 9 1 577 1 95 704 0 0 -"29298" 4 386 5 1 1 0 1 0 100 0 37 72 3018 3 4 0 856 1 95 35 0 1 -"29299" 4 386 5 1 1 0 1 0 100 0 72 4 2100 5 7 1 1550 1 95 68 0 0 -"29300" 4 386 5 1 1 0 1 0 100 0 4 8 1497 7 4 1 1176 1 95 4 1 1 -"29301" 4 386 5 1 1 0 1 0 100 0 8 0 1685 4 1 0 1498 1 95 8 1 0 -"29302" 4 386 5 1 1 0 2 0 100 0 100 195 2830 8 1 1 988 1 95 95 1 1 -"29303" 4 386 5 1 1 0 2 0 100 0 195 380 1595 6 2 1 428 1 95 185 1 1 -"29304" 4 386 5 1 1 0 2 0 100 0 380 19 1757 7 9 1 903 1 95 361 0 0 -"29305" 4 386 5 1 1 0 2 0 100 0 19 37 1714 2 10 0 686 1 95 18 0 1 -"29306" 4 386 5 1 1 0 2 0 100 0 37 2 1059 5 3 0 422 1 95 35 1 0 -"29307" 4 386 5 1 1 0 2 0 100 0 2 0 863 4 2 0 916 1 95 2 1 0 -"29308" 4 386 5 1 1 0 3 1 100 31 100 195 1572 7 5 1 358 1 95 95 1 1 -"29309" 4 386 5 1 1 0 3 1 100 31 195 10 1274 2 1 0 427 1 95 185 1 0 -"29310" 4 386 5 1 1 0 3 1 100 31 10 20 1188 8 6 1 305 1 95 10 1 1 -"29311" 4 386 5 1 1 0 3 1 100 31 20 39 1000 4 7 0 317 1 95 19 0 1 -"29312" 4 386 5 1 1 0 3 1 100 31 39 76 1572 3 10 0 446 1 95 37 0 1 -"29313" 4 386 5 1 1 0 3 1 100 31 76 4 1550 6 8 1 314 1 95 72 0 0 -"29314" 4 386 5 1 1 0 3 1 100 31 4 8 1313 9 2 1 360 1 95 4 1 1 -"29315" 4 386 5 1 1 0 3 1 100 31 8 16 1148 5 3 1 532 1 95 8 1 1 -"29316" 4 386 5 1 1 0 3 1 100 31 16 31 544 1 10 0 292 1 95 15 0 1 -"29317" 4 386 5 1 1 0 4 0 100 0 100 195 2231 2 3 0 786 1 95 95 0 1 -"29318" 4 386 5 1 1 0 4 0 100 0 195 49 1954 7 4 0 1380 2 75 146 1 0 -"29319" 4 386 5 1 1 0 4 0 100 0 49 2 1935 3 1 0 505 1 95 47 1 0 -"29320" 4 386 5 1 1 0 4 0 100 0 2 4 1245 1 9 0 776 1 95 2 0 1 -"29321" 4 386 5 1 1 0 4 0 100 0 4 0 1344 8 7 0 630 1 95 4 1 0 -"29322" 4 389 2 0 1 1 1 1 100 542 100 150 8000 8 3 1 986 3 50 50 1 1 -"29323" 4 389 2 0 1 1 1 1 100 542 150 293 7880 3 7 0 0 5 95 143 0 1 -"29324" 4 389 2 0 1 1 1 1 100 542 293 571 1943 8 2 1 0 5 95 278 1 1 -"29325" 4 389 2 0 1 1 1 1 100 542 571 542 3152 2 1 0 2247 1 5 29 1 0 -"29326" 4 389 3 1 1 1 1 1 100 437 100 195 9345 9 7 1 0 5 95 95 1 1 -"29327" 4 389 3 1 1 1 1 1 100 437 195 205 2418 4 8 0 1949 1 5 10 0 1 -"29328" 4 389 3 1 1 1 1 1 100 437 205 215 2253 8 2 1 1079 1 5 10 1 1 -"29329" 4 389 3 1 1 1 1 1 100 437 215 226 5121 2 1 1 451 1 5 11 1 1 -"29330" 4 389 3 1 1 1 1 1 100 437 226 237 1947 7 6 1 836 1 5 11 1 1 -"29331" 4 389 3 1 1 1 1 1 100 437 237 225 2050 5 3 0 390 1 5 12 1 0 -"29332" 4 389 3 1 1 1 1 1 100 437 225 236 1280 3 6 0 985 1 5 11 0 1 -"29333" 4 389 3 1 1 1 1 1 100 437 236 224 3904 6 9 1 500 1 5 12 0 0 -"29334" 4 389 3 1 1 1 1 1 100 437 224 437 3210 1 4 0 0 5 95 213 0 1 -"29335" 4 389 3 1 1 1 2 1 100 256 100 105 1686 2 9 0 476 1 5 5 0 1 -"29336" 4 389 3 1 1 1 2 1 100 256 105 100 1866 4 10 1 391 1 5 5 0 0 -"29337" 4 389 3 1 1 1 2 1 100 256 100 95 833 3 1 0 319 1 5 5 1 0 -"29338" 4 389 3 1 1 1 2 1 100 256 95 119 1685 8 6 1 1411 2 25 24 1 1 -"29339" 4 389 3 1 1 1 2 1 100 256 119 125 908 5 7 0 343 1 5 6 0 1 -"29340" 4 389 3 1 1 1 2 1 100 256 125 119 1196 6 8 1 457 1 5 6 0 0 -"29341" 4 389 3 1 1 1 2 1 100 256 119 125 1480 7 5 1 351 1 5 6 1 1 -"29342" 4 389 3 1 1 1 2 1 100 256 125 244 2027 1 10 0 0 5 95 119 0 1 -"29343" 4 389 3 1 1 1 2 1 100 256 244 256 2344 9 3 1 382 1 5 12 1 1 -"29344" 4 389 3 1 1 1 3 1 100 116 100 105 1768 3 5 0 378 1 5 5 0 1 -"29345" 4 389 3 1 1 1 3 1 100 116 105 100 1101 8 9 1 294 1 5 5 0 0 -"29346" 4 389 3 1 1 1 3 1 100 116 100 105 965 2 4 0 460 1 5 5 0 1 -"29347" 4 389 3 1 1 1 3 1 100 116 105 110 1076 6 3 1 262 1 5 5 1 1 -"29348" 4 389 3 1 1 1 3 1 100 116 110 116 871 7 6 1 883 1 5 6 1 1 -"29349" 4 389 3 1 1 1 3 1 100 116 116 110 1118 4 2 0 310 1 5 6 1 0 -"29350" 4 389 3 1 1 1 3 1 100 116 110 116 1493 1 8 0 403 1 5 6 0 1 -"29351" 4 389 3 1 1 1 3 1 100 116 116 110 2021 5 7 1 307 1 5 6 0 0 -"29352" 4 389 3 1 1 1 3 1 100 116 110 116 1745 9 5 1 804 1 5 6 1 1 -"29353" 4 389 3 1 1 1 4 1 100 125 100 105 1702 8 7 1 1259 1 5 5 1 1 -"29354" 4 389 3 1 1 1 4 1 100 125 105 110 1196 3 10 0 433 1 5 5 0 1 -"29355" 4 389 3 1 1 1 4 1 100 125 110 104 890 7 9 1 405 1 5 6 0 0 -"29356" 4 389 3 1 1 1 4 1 100 125 104 109 2983 9 1 1 366 1 5 5 1 1 -"29357" 4 389 3 1 1 1 4 1 100 125 109 114 898 2 3 0 437 1 5 5 0 1 -"29358" 4 389 3 1 1 1 4 1 100 125 114 120 837 1 8 0 843 1 5 6 0 1 -"29359" 4 389 3 1 1 1 4 1 100 125 120 126 1599 5 4 1 1249 1 5 6 1 1 -"29360" 4 389 3 1 1 1 4 1 100 125 126 132 3131 4 2 1 348 1 5 6 1 1 -"29361" 4 389 3 1 1 1 4 1 100 125 132 125 1841 6 3 0 390 1 5 7 1 0 -"29362" 4 389 4 0 1 0 1 1 100 33 100 175 2787 2 7 0 3404 2 75 75 0 1 -"29363" 4 389 4 0 1 0 1 1 100 33 175 341 4079 3 7 0 714 1 95 166 0 1 -"29364" 4 389 4 0 1 0 1 1 100 33 341 665 1278 2 8 0 1015 1 95 324 0 1 -"29365" 4 389 4 0 1 0 1 1 100 33 665 33 1524 8 9 1 528 1 95 632 0 0 -"29366" 4 389 5 1 1 0 1 0 100 0 100 195 1292 1 3 0 388 1 95 95 0 1 -"29367" 4 389 5 1 1 0 1 0 100 0 195 380 942 6 5 1 375 1 95 185 1 1 -"29368" 4 389 5 1 1 0 1 0 100 0 380 741 947 2 8 0 403 1 95 361 0 1 -"29369" 4 389 5 1 1 0 1 0 100 0 741 37 1053 8 9 1 464 1 95 704 0 0 -"29370" 4 389 5 1 1 0 1 0 100 0 37 72 1088 3 4 0 457 1 95 35 0 1 -"29371" 4 389 5 1 1 0 1 0 100 0 72 4 1105 5 7 1 242 1 95 68 0 0 -"29372" 4 389 5 1 1 0 1 0 100 0 4 0 771 7 4 0 284 1 95 4 1 0 -"29373" 4 389 5 1 1 0 2 1 100 31 100 195 1046 8 1 1 299 1 95 95 1 1 -"29374" 4 389 5 1 1 0 2 1 100 31 195 380 1009 6 2 1 287 1 95 185 1 1 -"29375" 4 389 5 1 1 0 2 1 100 31 380 19 1014 7 9 1 892 1 95 361 0 0 -"29376" 4 389 5 1 1 0 2 1 100 31 19 37 886 2 10 0 842 1 95 18 0 1 -"29377" 4 389 5 1 1 0 2 1 100 31 37 72 1823 5 3 1 325 1 95 35 1 1 -"29378" 4 389 5 1 1 0 2 1 100 31 72 4 1123 4 2 0 403 1 95 68 1 0 -"29379" 4 389 5 1 1 0 2 1 100 31 4 8 1125 3 5 0 269 1 95 4 0 1 -"29380" 4 389 5 1 1 0 2 1 100 31 8 16 877 9 4 1 313 1 95 8 1 1 -"29381" 4 389 5 1 1 0 2 1 100 31 16 31 831 1 7 0 245 1 95 15 0 1 -"29382" 4 389 5 1 1 0 3 0 100 0 100 195 1815 7 5 1 215 1 95 95 1 1 -"29383" 4 389 5 1 1 0 3 0 100 0 195 10 690 2 1 0 991 1 95 185 1 0 -"29384" 4 389 5 1 1 0 3 0 100 0 10 20 917 8 6 1 414 1 95 10 1 1 -"29385" 4 389 5 1 1 0 3 0 100 0 20 39 1248 4 7 0 354 1 95 19 0 1 -"29386" 4 389 5 1 1 0 3 0 100 0 39 76 759 3 10 0 320 1 95 37 0 1 -"29387" 4 389 5 1 1 0 3 0 100 0 76 4 830 6 8 1 284 1 95 72 0 0 -"29388" 4 389 5 1 1 0 3 0 100 0 4 8 804 9 2 1 307 1 95 4 1 1 -"29389" 4 389 5 1 1 0 3 0 100 0 8 0 1217 5 3 0 405 1 95 8 1 0 -"29390" 4 389 5 1 1 0 4 1 100 27 100 195 1821 2 3 0 435 1 95 95 0 1 -"29391" 4 389 5 1 1 0 4 1 100 27 195 380 2044 7 4 1 552 1 95 185 1 1 -"29392" 4 389 5 1 1 0 4 1 100 27 380 19 1123 3 1 0 291 1 95 361 1 0 -"29393" 4 389 5 1 1 0 4 1 100 27 19 37 826 1 9 0 417 1 95 18 0 1 -"29394" 4 389 5 1 1 0 4 1 100 27 37 72 762 8 7 1 364 1 95 35 1 1 -"29395" 4 389 5 1 1 0 4 1 100 27 72 140 816 9 2 1 295 1 95 68 1 1 -"29396" 4 389 5 1 1 0 4 1 100 27 140 273 1118 5 6 0 334 1 95 133 0 1 -"29397" 4 389 5 1 1 0 4 1 100 27 273 14 1531 6 8 1 336 1 95 259 0 0 -"29398" 4 389 5 1 1 0 4 1 100 27 14 27 1325 4 7 0 299 1 95 13 0 1 -"29399" 4 393 2 0 1 1 1 1 100 176 100 150 4732 8 3 1 2809 3 50 50 1 1 -"29400" 4 393 2 0 1 1 1 1 100 176 150 188 9672 3 7 0 1186 2 25 38 0 1 -"29401" 4 393 2 0 1 1 1 1 100 176 188 235 2799 8 2 1 679 2 25 47 1 1 -"29402" 4 393 2 0 1 1 1 1 100 176 235 176 2116 2 1 0 1621 2 25 59 1 0 -"29403" 4 393 3 1 1 1 1 1 100 381 100 150 10631 9 7 1 1059 3 50 50 1 1 -"29404" 4 393 3 1 1 1 1 1 100 381 150 188 2614 4 8 0 1975 2 25 38 0 1 -"29405" 4 393 3 1 1 1 1 1 100 381 188 235 3238 8 2 1 666 2 25 47 1 1 -"29406" 4 393 3 1 1 1 1 1 100 381 235 294 2311 2 1 1 727 2 25 59 1 1 -"29407" 4 393 3 1 1 1 1 1 100 381 294 309 4809 7 6 1 1250 1 5 15 1 1 -"29408" 4 393 3 1 1 1 1 1 100 381 309 232 2308 5 3 0 764 2 25 77 1 0 -"29409" 4 393 3 1 1 1 1 1 100 381 232 290 10485 3 6 0 1200 2 25 58 0 1 -"29410" 4 393 3 1 1 1 1 1 100 381 290 305 6348 6 9 0 1260 1 5 15 0 1 -"29411" 4 393 3 1 1 1 1 1 100 381 305 381 2293 1 4 0 1021 2 25 76 0 1 -"29412" 4 393 3 1 1 1 2 1 100 804 100 150 5529 2 9 0 648 3 50 50 0 1 -"29413" 4 393 3 1 1 1 2 1 100 804 150 188 3319 4 10 0 2230 2 25 38 0 1 -"29414" 4 393 3 1 1 1 2 1 100 804 188 282 4250 3 1 1 781 3 50 94 1 1 -"29415" 4 393 3 1 1 1 2 1 100 804 282 211 3877 8 6 0 522 2 25 71 1 0 -"29416" 4 393 3 1 1 1 2 1 100 804 211 317 2375 5 7 0 796 3 50 106 0 1 -"29417" 4 393 3 1 1 1 2 1 100 804 317 238 3147 6 8 1 1147 2 25 79 0 0 -"29418" 4 393 3 1 1 1 2 1 100 804 238 357 1938 7 5 1 732 3 50 119 1 1 -"29419" 4 393 3 1 1 1 2 1 100 804 357 536 2278 1 10 0 1112 3 50 179 0 1 -"29420" 4 393 3 1 1 1 2 1 100 804 536 804 1976 9 3 1 903 3 50 268 1 1 -"29421" 4 393 3 1 1 1 3 1 100 42 100 195 3069 3 5 0 929 5 95 95 0 1 -"29422" 4 393 3 1 1 1 3 1 100 42 195 49 1650 8 9 1 634 4 75 146 0 0 -"29423" 4 393 3 1 1 1 3 1 100 42 49 96 2737 2 4 0 709 5 95 47 0 1 -"29424" 4 393 3 1 1 1 3 1 100 42 96 144 2184 6 3 1 734 3 50 48 1 1 -"29425" 4 393 3 1 1 1 3 1 100 42 144 216 2825 7 6 1 560 3 50 72 1 1 -"29426" 4 393 3 1 1 1 3 1 100 42 216 54 1523 4 2 0 720 4 75 162 1 0 -"29427" 4 393 3 1 1 1 3 1 100 42 54 95 1972 1 8 0 1356 4 75 41 0 1 -"29428" 4 393 3 1 1 1 3 1 100 42 95 24 3833 5 7 1 689 4 75 71 0 0 -"29429" 4 393 3 1 1 1 3 1 100 42 24 42 1837 9 5 1 1108 4 75 18 1 1 -"29430" 4 393 3 1 1 1 4 1 100 725 100 150 3238 8 7 1 481 3 50 50 1 1 -"29431" 4 393 3 1 1 1 4 1 100 725 150 225 1720 3 10 0 700 3 50 75 0 1 -"29432" 4 393 3 1 1 1 4 1 100 725 225 169 1920 7 9 1 1275 2 25 56 0 0 -"29433" 4 393 3 1 1 1 4 1 100 725 169 330 1707 9 1 1 616 5 95 161 1 1 -"29434" 4 393 3 1 1 1 4 1 100 725 330 495 1664 2 3 0 599 3 50 165 0 1 -"29435" 4 393 3 1 1 1 4 1 100 725 495 619 2304 1 8 0 560 2 25 124 0 1 -"29436" 4 393 3 1 1 1 4 1 100 725 619 774 1618 5 4 1 707 2 25 155 1 1 -"29437" 4 393 3 1 1 1 4 1 100 725 774 580 2311 4 2 0 910 2 25 194 1 0 -"29438" 4 393 3 1 1 1 4 1 100 725 580 725 1944 6 3 1 497 2 25 145 1 1 -"29439" 4 393 4 0 1 0 1 1 100 220 100 150 2281 2 7 0 1123 3 50 50 0 1 -"29440" 4 393 4 0 1 0 1 1 100 220 150 293 8707 3 7 0 2410 1 95 143 0 1 -"29441" 4 393 4 0 1 0 1 1 100 220 293 440 1765 2 8 0 1127 3 50 147 0 1 -"29442" 4 393 4 0 1 0 1 1 100 220 440 220 2430 8 9 1 2680 3 50 220 0 0 -"29443" 4 393 5 1 1 0 1 1 100 55 100 195 1890 1 3 0 837 1 95 95 0 1 -"29444" 4 393 5 1 1 0 1 1 100 55 195 341 1371 6 5 1 543 2 75 146 1 1 -"29445" 4 393 5 1 1 0 1 1 100 55 341 597 1475 2 8 0 898 2 75 256 0 1 -"29446" 4 393 5 1 1 0 1 1 100 55 597 298 1341 8 9 1 435 3 50 299 0 0 -"29447" 4 393 5 1 1 0 1 1 100 55 298 581 1507 3 4 0 2752 1 95 283 0 1 -"29448" 4 393 5 1 1 0 1 1 100 55 581 290 2475 5 7 1 443 3 50 291 0 0 -"29449" 4 393 5 1 1 0 1 1 100 55 290 566 1955 7 4 1 1676 1 95 276 1 1 -"29450" 4 393 5 1 1 0 1 1 100 55 566 28 1946 4 1 0 675 1 95 538 1 0 -"29451" 4 393 5 1 1 0 1 1 100 55 28 55 1712 9 6 1 444 1 95 27 1 1 -"29452" 4 393 5 1 1 0 2 0 100 0 100 195 1702 8 1 1 795 1 95 95 1 1 -"29453" 4 393 5 1 1 0 2 0 100 0 195 97 1314 6 2 0 482 3 50 98 1 0 -"29454" 4 393 5 1 1 0 2 0 100 0 97 5 1208 7 9 1 556 1 95 92 0 0 -"29455" 4 393 5 1 1 0 2 0 100 0 5 10 1767 2 10 0 728 1 95 5 0 1 -"29456" 4 393 5 1 1 0 2 0 100 0 10 0 3515 5 3 0 449 1 95 10 1 0 -"29457" 4 393 5 1 1 0 3 0 100 0 100 195 2129 7 5 1 1001 1 95 95 1 1 -"29458" 4 393 5 1 1 0 3 0 100 0 195 10 1335 2 1 0 444 1 95 185 1 0 -"29459" 4 393 5 1 1 0 3 0 100 0 10 20 1220 8 6 1 352 1 95 10 1 1 -"29460" 4 393 5 1 1 0 3 0 100 0 20 39 1196 4 7 0 625 1 95 19 0 1 -"29461" 4 393 5 1 1 0 3 0 100 0 39 76 904 3 10 0 530 1 95 37 0 1 -"29462" 4 393 5 1 1 0 3 0 100 0 76 4 967 6 8 1 646 1 95 72 0 0 -"29463" 4 393 5 1 1 0 3 0 100 0 4 8 1172 9 2 1 362 1 95 4 1 1 -"29464" 4 393 5 1 1 0 3 0 100 0 8 0 1230 5 3 0 417 1 95 8 1 0 -"29465" 4 393 5 1 1 0 4 1 100 2681 100 195 12804 2 3 0 2121 1 95 95 0 1 -"29466" 4 393 5 1 1 0 4 1 100 2681 195 380 1017 7 4 1 742 1 95 185 1 1 -"29467" 4 393 5 1 1 0 4 1 100 2681 380 190 1015 3 1 0 469 3 50 190 1 0 -"29468" 4 393 5 1 1 0 4 1 100 2681 190 371 1123 1 9 0 659 1 95 181 0 1 -"29469" 4 393 5 1 1 0 4 1 100 2681 371 723 1545 8 7 1 1437 1 95 352 1 1 -"29470" 4 393 5 1 1 0 4 1 100 2681 723 1410 1134 9 2 1 445 1 95 687 1 1 -"29471" 4 393 5 1 1 0 4 1 100 2681 1410 2750 1285 5 6 0 613 1 95 1340 0 1 -"29472" 4 393 5 1 1 0 4 1 100 2681 2750 1375 4174 6 8 1 469 3 50 1375 0 0 -"29473" 4 393 5 1 1 0 4 1 100 2681 1375 2681 1604 4 7 0 1365 1 95 1306 0 1 -"29474" 4 401 2 0 1 1 1 1 100 185 100 125 12727 8 3 1 1182 2 25 25 1 1 -"29475" 4 401 2 0 1 1 1 1 100 185 125 156 4339 3 7 0 1571 2 25 31 0 1 -"29476" 4 401 2 0 1 1 1 1 100 185 156 195 4925 8 2 1 1065 2 25 39 1 1 -"29477" 4 401 2 0 1 1 1 1 100 185 195 185 3795 2 1 0 3220 1 5 10 1 0 -"29478" 4 401 3 1 1 1 1 1 100 275 100 105 3165 9 7 1 1651 1 5 5 1 1 -"29479" 4 401 3 1 1 1 1 1 100 275 105 110 5266 4 8 0 1284 1 5 5 0 1 -"29480" 4 401 3 1 1 1 1 1 100 275 110 138 5413 8 2 1 1313 2 25 28 1 1 -"29481" 4 401 3 1 1 1 1 1 100 275 138 103 2231 2 1 0 2099 2 25 35 1 0 -"29482" 4 401 3 1 1 1 1 1 100 275 103 129 4340 7 6 1 1403 2 25 26 1 1 -"29483" 4 401 3 1 1 1 1 1 100 275 129 97 2318 5 3 0 1405 2 25 32 1 0 -"29484" 4 401 3 1 1 1 1 1 100 275 97 146 5440 3 6 0 1235 3 50 49 0 1 -"29485" 4 401 3 1 1 1 1 1 100 275 146 183 2661 6 9 0 1204 2 25 37 0 1 -"29486" 4 401 3 1 1 1 1 1 100 275 183 275 4648 1 4 0 969 3 50 92 0 1 -"29487" 4 401 3 1 1 1 2 1 100 506 100 150 3529 2 9 0 2215 3 50 50 0 1 -"29488" 4 401 3 1 1 1 2 1 100 506 150 225 2155 4 10 0 867 3 50 75 0 1 -"29489" 4 401 3 1 1 1 2 1 100 506 225 112 3143 3 1 0 1111 3 50 113 1 0 -"29490" 4 401 3 1 1 1 2 1 100 506 112 168 2951 8 6 1 2768 3 50 56 1 1 -"29491" 4 401 3 1 1 1 2 1 100 506 168 126 2146 5 7 1 1641 2 25 42 0 0 -"29492" 4 401 3 1 1 1 2 1 100 506 126 94 4240 6 8 1 2548 2 25 32 0 0 -"29493" 4 401 3 1 1 1 2 1 100 506 94 165 3840 7 5 1 1527 4 75 71 1 1 -"29494" 4 401 3 1 1 1 2 1 100 506 165 289 1723 1 10 0 863 4 75 124 0 1 -"29495" 4 401 3 1 1 1 2 1 100 506 289 506 1955 9 3 1 1556 4 75 217 1 1 -"29496" 4 401 3 1 1 1 3 1 100 62 100 195 3253 3 5 0 1648 5 95 95 0 1 -"29497" 4 401 3 1 1 1 3 1 100 62 195 49 1670 8 9 1 1013 4 75 146 0 0 -"29498" 4 401 3 1 1 1 3 1 100 62 49 96 2770 2 4 0 0 5 95 47 0 1 -"29499" 4 401 3 1 1 1 3 1 100 62 96 187 3667 6 3 1 0 5 95 91 1 1 -"29500" 4 401 3 1 1 1 3 1 100 62 187 365 3131 7 6 1 0 5 95 178 1 1 -"29501" 4 401 3 1 1 1 3 1 100 62 365 18 3812 4 2 0 2380 5 95 347 1 0 -"29502" 4 401 3 1 1 1 3 1 100 62 18 27 1937 1 8 0 1907 3 50 9 0 1 -"29503" 4 401 3 1 1 1 3 1 100 62 27 41 3509 5 7 0 3660 3 50 14 0 1 -"29504" 4 401 3 1 1 1 3 1 100 62 41 62 1507 9 5 1 1169 3 50 21 1 1 -"29505" 4 401 3 1 1 1 4 1 100 402 100 150 3734 8 7 1 2642 3 50 50 1 1 -"29506" 4 401 3 1 1 1 4 1 100 402 150 225 2073 3 10 0 1500 3 50 75 0 1 -"29507" 4 401 3 1 1 1 4 1 100 402 225 169 2178 7 9 1 3096 2 25 56 0 0 -"29508" 4 401 3 1 1 1 4 1 100 402 169 254 1618 9 1 1 1043 3 50 85 1 1 -"29509" 4 401 3 1 1 1 4 1 100 402 254 381 1366 2 3 0 635 3 50 127 0 1 -"29510" 4 401 3 1 1 1 4 1 100 402 381 572 3251 1 8 0 918 3 50 191 0 1 -"29511" 4 401 3 1 1 1 4 1 100 402 572 715 1839 5 4 1 813 2 25 143 1 1 -"29512" 4 401 3 1 1 1 4 1 100 402 715 536 3380 4 2 0 969 2 25 179 1 0 -"29513" 4 401 3 1 1 1 4 1 100 402 536 402 2696 6 3 0 813 2 25 134 1 0 -"29514" 4 401 4 0 1 0 1 1 100 37 100 195 2606 2 7 0 2184 1 95 95 0 1 -"29515" 4 401 4 0 1 0 1 1 100 37 195 380 1621 3 7 0 973 1 95 185 0 1 -"29516" 4 401 4 0 1 0 1 1 100 37 380 741 1491 2 8 0 810 1 95 361 0 1 -"29517" 4 401 4 0 1 0 1 1 100 37 741 37 1216 8 9 1 828 1 95 704 0 0 -"29518" 4 401 5 1 1 0 1 1 100 18 100 195 1842 1 3 0 808 1 95 95 0 1 -"29519" 4 401 5 1 1 0 1 1 100 18 195 380 2127 6 5 1 759 1 95 185 1 1 -"29520" 4 401 5 1 1 0 1 1 100 18 380 741 1136 2 8 0 767 1 95 361 0 1 -"29521" 4 401 5 1 1 0 1 1 100 18 741 185 1184 8 9 1 2981 2 75 556 0 0 -"29522" 4 401 5 1 1 0 1 1 100 18 185 361 1316 3 4 0 2451 1 95 176 0 1 -"29523" 4 401 5 1 1 0 1 1 100 18 361 90 1404 5 7 1 623 2 75 271 0 0 -"29524" 4 401 5 1 1 0 1 1 100 18 90 176 1422 7 4 1 1352 1 95 86 1 1 -"29525" 4 401 5 1 1 0 1 1 100 18 176 9 1197 4 1 0 722 1 95 167 1 0 -"29526" 4 401 5 1 1 0 1 1 100 18 9 18 1268 9 6 1 1421 1 95 9 1 1 -"29527" 4 401 5 1 1 0 2 1 100 31 100 195 1533 8 1 1 2817 1 95 95 1 1 -"29528" 4 401 5 1 1 0 2 1 100 31 195 380 1113 6 2 1 874 1 95 185 1 1 -"29529" 4 401 5 1 1 0 2 1 100 31 380 19 1413 7 9 1 719 1 95 361 0 0 -"29530" 4 401 5 1 1 0 2 1 100 31 19 37 1899 2 10 0 694 1 95 18 0 1 -"29531" 4 401 5 1 1 0 2 1 100 31 37 2 1058 5 3 0 685 1 95 35 1 0 -"29532" 4 401 5 1 1 0 2 1 100 31 2 4 1226 4 2 1 590 1 95 2 1 1 -"29533" 4 401 5 1 1 0 2 1 100 31 4 8 1167 3 5 0 660 1 95 4 0 1 -"29534" 4 401 5 1 1 0 2 1 100 31 8 16 1474 9 4 1 681 1 95 8 1 1 -"29535" 4 401 5 1 1 0 2 1 100 31 16 31 1158 1 7 0 1053 1 95 15 0 1 -"29536" 4 401 5 1 1 0 3 0 100 1 100 195 2748 7 5 1 766 1 95 95 1 1 -"29537" 4 401 5 1 1 0 3 0 100 1 195 10 1527 2 1 0 3676 1 95 185 1 0 -"29538" 4 401 5 1 1 0 3 0 100 1 10 18 2139 8 6 1 827 2 75 8 1 1 -"29539" 4 401 5 1 1 0 3 0 100 1 18 1 2809 4 7 1 721 1 95 17 0 0 -"29540" 4 401 5 1 1 0 4 1 100 14 100 195 1458 2 3 0 882 1 95 95 0 1 -"29541" 4 401 5 1 1 0 4 1 100 14 195 380 1373 7 4 1 2257 1 95 185 1 1 -"29542" 4 401 5 1 1 0 4 1 100 14 380 95 1131 3 1 0 1179 2 75 285 1 0 -"29543" 4 401 5 1 1 0 4 1 100 14 95 185 1296 1 9 0 2304 1 95 90 0 1 -"29544" 4 401 5 1 1 0 4 1 100 14 185 324 2408 8 7 1 739 2 75 139 1 1 -"29545" 4 401 5 1 1 0 4 1 100 14 324 567 1325 9 2 1 780 2 75 243 1 1 -"29546" 4 401 5 1 1 0 4 1 100 14 567 142 1492 5 6 1 754 2 75 425 0 0 -"29547" 4 401 5 1 1 0 4 1 100 14 142 7 1604 6 8 1 877 1 95 135 0 0 -"29548" 4 401 5 1 1 0 4 1 100 14 7 14 1056 4 7 0 739 1 95 7 0 1 -"29549" 4 413 2 0 1 1 1 1 100 176 100 150 6138 8 3 1 2524 3 50 50 1 1 -"29550" 4 413 2 0 1 1 1 1 100 176 150 188 12633 3 7 0 2494 2 25 38 0 1 -"29551" 4 413 2 0 1 1 1 1 100 176 188 235 2004 8 2 1 1411 2 25 47 1 1 -"29552" 4 413 2 0 1 1 1 1 100 176 235 176 1701 2 1 0 1346 2 25 59 1 0 -"29553" 4 413 3 1 1 1 1 1 100 524 100 195 7287 9 7 1 1075 5 95 95 1 1 -"29554" 4 413 3 1 1 1 1 1 100 524 195 205 3060 4 8 0 617 1 5 10 0 1 -"29555" 4 413 3 1 1 1 1 1 100 524 205 256 1737 8 2 1 748 2 25 51 1 1 -"29556" 4 413 3 1 1 1 1 1 100 524 256 269 1310 2 1 1 557 1 5 13 1 1 -"29557" 4 413 3 1 1 1 1 1 100 524 269 336 1512 7 6 1 669 2 25 67 1 1 -"29558" 4 413 3 1 1 1 1 1 100 524 336 353 2107 5 3 1 1100 1 5 17 1 1 -"29559" 4 413 3 1 1 1 1 1 100 524 353 441 1485 3 6 0 788 2 25 88 0 1 -"29560" 4 413 3 1 1 1 1 1 100 524 441 419 1700 6 9 1 1081 1 5 22 0 0 -"29561" 4 413 3 1 1 1 1 1 100 524 419 524 1258 1 4 0 796 2 25 105 0 1 -"29562" 4 413 3 1 1 1 2 1 100 240 100 125 2917 2 9 0 1416 2 25 25 0 1 -"29563" 4 413 3 1 1 1 2 1 100 240 125 119 1146 4 10 1 1785 1 5 6 0 0 -"29564" 4 413 3 1 1 1 2 1 100 240 119 89 1289 3 1 0 705 2 25 30 1 0 -"29565" 4 413 3 1 1 1 2 1 100 240 89 174 1733 8 6 1 0 5 95 85 1 1 -"29566" 4 413 3 1 1 1 2 1 100 240 174 183 2128 5 7 0 928 1 5 9 0 1 -"29567" 4 413 3 1 1 1 2 1 100 240 183 174 1689 6 8 1 1974 1 5 9 0 0 -"29568" 4 413 3 1 1 1 2 1 100 240 174 183 1117 7 5 1 1973 1 5 9 1 1 -"29569" 4 413 3 1 1 1 2 1 100 240 183 192 1277 1 10 0 614 1 5 9 0 1 -"29570" 4 413 3 1 1 1 2 1 100 240 192 240 1798 9 3 1 854 2 25 48 1 1 -"29571" 4 413 3 1 1 1 3 1 100 116 100 105 1992 3 5 0 502 1 5 5 0 1 -"29572" 4 413 3 1 1 1 3 1 100 116 105 100 1012 8 9 1 1418 1 5 5 0 0 -"29573" 4 413 3 1 1 1 3 1 100 116 100 105 1283 2 4 0 1228 1 5 5 0 1 -"29574" 4 413 3 1 1 1 3 1 100 116 105 110 1007 6 3 1 1220 1 5 5 1 1 -"29575" 4 413 3 1 1 1 3 1 100 116 110 116 1496 7 6 1 582 1 5 6 1 1 -"29576" 4 413 3 1 1 1 3 1 100 116 116 110 1088 4 2 0 562 1 5 6 1 0 -"29577" 4 413 3 1 1 1 3 1 100 116 110 116 788 1 8 0 572 1 5 6 0 1 -"29578" 4 413 3 1 1 1 3 1 100 116 116 110 918 5 7 1 912 1 5 6 0 0 -"29579" 4 413 3 1 1 1 3 1 100 116 110 116 849 9 5 1 670 1 5 6 1 1 -"29580" 4 413 3 1 1 1 4 1 100 125 100 105 1721 8 7 1 781 1 5 5 1 1 -"29581" 4 413 3 1 1 1 4 1 100 125 105 110 717 3 10 0 800 1 5 5 0 1 -"29582" 4 413 3 1 1 1 4 1 100 125 110 104 1215 7 9 1 948 1 5 6 0 0 -"29583" 4 413 3 1 1 1 4 1 100 125 104 109 780 9 1 1 444 1 5 5 1 1 -"29584" 4 413 3 1 1 1 4 1 100 125 109 114 700 2 3 0 840 1 5 5 0 1 -"29585" 4 413 3 1 1 1 4 1 100 125 114 108 877 1 8 1 337 1 5 6 0 0 -"29586" 4 413 3 1 1 1 4 1 100 125 108 113 1134 5 4 1 1061 1 5 5 1 1 -"29587" 4 413 3 1 1 1 4 1 100 125 113 119 1064 4 2 1 435 1 5 6 1 1 -"29588" 4 413 3 1 1 1 4 1 100 125 119 125 812 6 3 1 939 1 5 6 1 1 -"29589" 4 413 4 0 1 0 1 1 100 23 100 150 4573 2 7 0 489 3 50 50 0 1 -"29590" 4 413 4 0 1 0 1 1 100 23 150 263 2951 3 7 0 920 2 75 113 0 1 -"29591" 4 413 4 0 1 0 1 1 100 23 263 460 1298 2 8 0 554 2 75 197 0 1 -"29592" 4 413 4 0 1 0 1 1 100 23 460 23 1269 8 9 1 1062 1 95 437 0 0 -"29593" 4 413 5 1 1 0 1 0 100 0 100 195 4121 1 3 0 403 1 95 95 0 1 -"29594" 4 413 5 1 1 0 1 0 100 0 195 380 908 6 5 1 943 1 95 185 1 1 -"29595" 4 413 5 1 1 0 1 0 100 0 380 741 863 2 8 0 484 1 95 361 0 1 -"29596" 4 413 5 1 1 0 1 0 100 0 741 37 858 8 9 1 599 1 95 704 0 0 -"29597" 4 413 5 1 1 0 1 0 100 0 37 72 2136 3 4 0 520 1 95 35 0 1 -"29598" 4 413 5 1 1 0 1 0 100 0 72 4 1036 5 7 1 856 1 95 68 0 0 -"29599" 4 413 5 1 1 0 1 0 100 0 4 8 860 7 4 1 489 1 95 4 1 1 -"29600" 4 413 5 1 1 0 1 0 100 0 8 0 1239 4 1 0 458 1 95 8 1 0 -"29601" 4 413 5 1 1 0 2 1 100 31 100 195 2238 8 1 1 2329 1 95 95 1 1 -"29602" 4 413 5 1 1 0 2 1 100 31 195 380 1192 6 2 1 532 1 95 185 1 1 -"29603" 4 413 5 1 1 0 2 1 100 31 380 19 849 7 9 1 349 1 95 361 0 0 -"29604" 4 413 5 1 1 0 2 1 100 31 19 37 802 2 10 0 778 1 95 18 0 1 -"29605" 4 413 5 1 1 0 2 1 100 31 37 72 986 5 3 1 960 1 95 35 1 1 -"29606" 4 413 5 1 1 0 2 1 100 31 72 4 847 4 2 0 372 1 95 68 1 0 -"29607" 4 413 5 1 1 0 2 1 100 31 4 8 984 3 5 0 366 1 95 4 0 1 -"29608" 4 413 5 1 1 0 2 1 100 31 8 16 802 9 4 1 369 1 95 8 1 1 -"29609" 4 413 5 1 1 0 2 1 100 31 16 31 669 1 7 0 1010 1 95 15 0 1 -"29610" 4 413 5 1 1 0 3 0 100 0 100 195 1347 7 5 1 385 1 95 95 1 1 -"29611" 4 413 5 1 1 0 3 0 100 0 195 10 756 2 1 0 422 1 95 185 1 0 -"29612" 4 413 5 1 1 0 3 0 100 0 10 20 807 8 6 1 734 1 95 10 1 1 -"29613" 4 413 5 1 1 0 3 0 100 0 20 39 601 4 7 0 434 1 95 19 0 1 -"29614" 4 413 5 1 1 0 3 0 100 0 39 76 1082 3 10 0 764 1 95 37 0 1 -"29615" 4 413 5 1 1 0 3 0 100 0 76 4 789 6 8 1 749 1 95 72 0 0 -"29616" 4 413 5 1 1 0 3 0 100 0 4 8 1343 9 2 1 740 1 95 4 1 1 -"29617" 4 413 5 1 1 0 3 0 100 0 8 0 1262 5 3 0 576 1 95 8 1 0 -"29618" 4 413 5 1 1 0 4 1 100 20 100 195 1306 2 3 0 538 1 95 95 0 1 -"29619" 4 413 5 1 1 0 4 1 100 20 195 380 805 7 4 1 404 1 95 185 1 1 -"29620" 4 413 5 1 1 0 4 1 100 20 380 19 614 3 1 0 334 1 95 361 1 0 -"29621" 4 413 5 1 1 0 4 1 100 20 19 37 612 1 9 0 429 1 95 18 0 1 -"29622" 4 413 5 1 1 0 4 1 100 20 37 72 781 8 7 1 846 1 95 35 1 1 -"29623" 4 413 5 1 1 0 4 1 100 20 72 140 954 9 2 1 386 1 95 68 1 1 -"29624" 4 413 5 1 1 0 4 1 100 20 140 210 1117 5 6 0 685 3 50 70 0 1 -"29625" 4 413 5 1 1 0 4 1 100 20 210 10 786 6 8 1 282 1 95 200 0 0 -"29626" 4 413 5 1 1 0 4 1 100 20 10 20 978 4 7 0 386 1 95 10 0 1 -"29627" 4 414 2 0 1 1 1 1 100 295 100 150 12353 8 3 1 59 3 50 50 1 1 -"29628" 4 414 2 0 1 1 1 1 100 295 150 225 4118 3 7 0 2192 3 50 75 0 1 -"29629" 4 414 2 0 1 1 1 1 100 295 225 281 2861 8 2 1 410 2 25 56 1 1 -"29630" 4 414 2 0 1 1 1 1 100 295 281 295 3020 2 1 1 1218 1 5 14 1 1 -"29631" 4 414 3 1 1 1 1 1 100 1047 100 195 3010 9 7 1 0 5 95 95 1 1 -"29632" 4 414 3 1 1 1 1 1 100 1047 195 244 1448 4 8 0 1465 2 25 49 0 1 -"29633" 4 414 3 1 1 1 1 1 100 1047 244 366 2017 8 2 1 687 3 50 122 1 1 -"29634" 4 414 3 1 1 1 1 1 100 1047 366 384 1923 2 1 1 2109 1 5 18 1 1 -"29635" 4 414 3 1 1 1 1 1 100 1047 384 480 4198 7 6 1 1516 2 25 96 1 1 -"29636" 4 414 3 1 1 1 1 1 100 1047 480 504 3353 5 3 1 2599 1 5 24 1 1 -"29637" 4 414 3 1 1 1 1 1 100 1047 504 630 1376 3 6 0 909 2 25 126 0 1 -"29638" 4 414 3 1 1 1 1 1 100 1047 630 598 3438 6 9 1 1056 1 5 32 0 0 -"29639" 4 414 3 1 1 1 1 1 100 1047 598 1047 1859 1 4 0 1224 4 75 449 0 1 -"29640" 4 414 3 1 1 1 2 1 100 364 100 150 3260 2 9 0 614 3 50 50 0 1 -"29641" 4 414 3 1 1 1 2 1 100 364 150 225 1455 4 10 0 447 3 50 75 0 1 -"29642" 4 414 3 1 1 1 2 1 100 364 225 169 3335 3 1 0 4214 2 25 56 1 0 -"29643" 4 414 3 1 1 1 2 1 100 364 169 254 1643 8 6 1 1234 3 50 85 1 1 -"29644" 4 414 3 1 1 1 2 1 100 364 254 190 3525 5 7 1 2803 2 25 64 0 0 -"29645" 4 414 3 1 1 1 2 1 100 364 190 95 1547 6 8 1 687 3 50 95 0 0 -"29646" 4 414 3 1 1 1 2 1 100 364 95 166 1307 7 5 1 1611 4 75 71 1 1 -"29647" 4 414 3 1 1 1 2 1 100 364 166 291 1635 1 10 0 1036 4 75 125 0 1 -"29648" 4 414 3 1 1 1 2 1 100 364 291 364 1520 9 3 1 1637 2 25 73 1 1 -"29649" 4 414 3 1 1 1 3 1 100 368 100 175 2813 3 5 0 1756 4 75 75 0 1 -"29650" 4 414 3 1 1 1 3 1 100 368 175 87 1462 8 9 1 1046 3 50 88 0 0 -"29651" 4 414 3 1 1 1 3 1 100 368 87 91 2874 2 4 0 567 1 5 4 0 1 -"29652" 4 414 3 1 1 1 3 1 100 368 91 96 3465 6 3 1 442 1 5 5 1 1 -"29653" 4 414 3 1 1 1 3 1 100 368 96 168 2250 7 6 1 813 4 75 72 1 1 -"29654" 4 414 3 1 1 1 3 1 100 368 168 126 2020 4 2 0 626 2 25 42 1 0 -"29655" 4 414 3 1 1 1 3 1 100 368 126 221 2169 1 8 0 752 4 75 95 0 1 -"29656" 4 414 3 1 1 1 3 1 100 368 221 210 3658 5 7 1 561 1 5 11 0 0 -"29657" 4 414 3 1 1 1 3 1 100 368 210 368 1411 9 5 1 526 4 75 158 1 1 -"29658" 4 414 3 1 1 1 4 1 100 1927 100 175 3500 8 7 1 628 4 75 75 1 1 -"29659" 4 414 3 1 1 1 4 1 100 1927 175 263 4243 3 10 0 1455 3 50 88 0 1 -"29660" 4 414 3 1 1 1 4 1 100 1927 263 250 2575 7 9 1 498 1 5 13 0 0 -"29661" 4 414 3 1 1 1 4 1 100 1927 250 488 984 9 1 1 1463 5 95 238 1 1 -"29662" 4 414 3 1 1 1 4 1 100 1927 488 854 2190 2 3 0 1053 4 75 366 0 1 -"29663" 4 414 3 1 1 1 4 1 100 1927 854 1665 2032 1 8 0 2566 5 95 811 0 1 -"29664" 4 414 3 1 1 1 4 1 100 1927 1665 1748 1801 5 4 1 921 1 5 83 1 1 -"29665" 4 414 3 1 1 1 4 1 100 1927 1748 1835 1587 4 2 1 452 1 5 87 1 1 -"29666" 4 414 3 1 1 1 4 1 100 1927 1835 1927 1436 6 3 1 1613 1 5 92 1 1 -"29667" 4 414 4 0 1 0 1 1 100 37 100 195 8025 2 7 0 4200 1 95 95 0 1 -"29668" 4 414 4 0 1 0 1 1 100 37 195 380 4781 3 7 0 703 1 95 185 0 1 -"29669" 4 414 4 0 1 0 1 1 100 37 380 741 2315 2 8 0 1939 1 95 361 0 1 -"29670" 4 414 4 0 1 0 1 1 100 37 741 37 1195 8 9 1 336 1 95 704 0 0 -"29671" 4 414 5 1 1 0 1 1 100 68 100 195 1844 1 3 0 394 1 95 95 0 1 -"29672" 4 414 5 1 1 0 1 1 100 68 195 380 1980 6 5 1 1028 1 95 185 1 1 -"29673" 4 414 5 1 1 0 1 1 100 68 380 741 1712 2 8 0 356 1 95 361 0 1 -"29674" 4 414 5 1 1 0 1 1 100 68 741 370 2952 8 9 1 1313 3 50 371 0 0 -"29675" 4 414 5 1 1 0 1 1 100 68 370 722 2112 3 4 0 1081 1 95 352 0 1 -"29676" 4 414 5 1 1 0 1 1 100 68 722 361 2114 5 7 1 444 3 50 361 0 0 -"29677" 4 414 5 1 1 0 1 1 100 68 361 704 2076 7 4 1 262 1 95 343 1 1 -"29678" 4 414 5 1 1 0 1 1 100 68 704 35 1311 4 1 0 286 1 95 669 1 0 -"29679" 4 414 5 1 1 0 1 1 100 68 35 68 1326 9 6 1 372 1 95 33 1 1 -"29680" 4 414 5 1 1 0 2 0 100 0 100 195 1596 8 1 1 605 1 95 95 1 1 -"29681" 4 414 5 1 1 0 2 0 100 0 195 380 1173 6 2 1 187 1 95 185 1 1 -"29682" 4 414 5 1 1 0 2 0 100 0 380 19 2623 7 9 1 240 1 95 361 0 0 -"29683" 4 414 5 1 1 0 2 0 100 0 19 37 1474 2 10 0 209 1 95 18 0 1 -"29684" 4 414 5 1 1 0 2 0 100 0 37 9 2484 5 3 0 1220 2 75 28 1 0 -"29685" 4 414 5 1 1 0 2 0 100 0 9 0 1167 4 2 0 245 1 95 9 1 0 -"29686" 4 414 5 1 1 0 3 0 100 1 100 195 2273 7 5 1 283 1 95 95 1 1 -"29687" 4 414 5 1 1 0 3 0 100 1 195 10 1203 2 1 0 247 1 95 185 1 0 -"29688" 4 414 5 1 1 0 3 0 100 1 10 20 1312 8 6 1 1288 1 95 10 1 1 -"29689" 4 414 5 1 1 0 3 0 100 1 20 1 2472 4 7 1 240 1 95 19 0 0 -"29690" 4 414 5 1 1 0 4 1 100 1544 100 195 1611 2 3 0 948 1 95 95 0 1 -"29691" 4 414 5 1 1 0 4 1 100 1544 195 380 1324 7 4 1 611 1 95 185 1 1 -"29692" 4 414 5 1 1 0 4 1 100 1544 380 285 2131 3 1 0 644 4 25 95 1 0 -"29693" 4 414 5 1 1 0 4 1 100 1544 285 556 1386 1 9 0 725 1 95 271 0 1 -"29694" 4 414 5 1 1 0 4 1 100 1544 556 1084 1232 8 7 1 267 1 95 528 1 1 -"29695" 4 414 5 1 1 0 4 1 100 1544 1084 2114 987 9 2 1 274 1 95 1030 1 1 -"29696" 4 414 5 1 1 0 4 1 100 1544 2114 1585 2618 5 6 1 3131 4 25 529 0 0 -"29697" 4 414 5 1 1 0 4 1 100 1544 1585 792 1675 6 8 1 841 3 50 793 0 0 -"29698" 4 414 5 1 1 0 4 1 100 1544 792 1544 1814 4 7 0 422 1 95 752 0 1 -"29699" 4 416 2 0 1 1 1 1 100 247 100 150 3968 8 3 1 946 3 50 50 1 1 -"29700" 4 416 2 0 1 1 1 1 100 247 150 263 7850 3 7 0 619 4 75 113 0 1 -"29701" 4 416 2 0 1 1 1 1 100 247 263 329 1514 8 2 1 880 2 25 66 1 1 -"29702" 4 416 2 0 1 1 1 1 100 247 329 247 1283 2 1 0 772 2 25 82 1 0 -"29703" 4 416 3 1 1 1 1 1 100 173 100 150 6927 9 7 1 2035 3 50 50 1 1 -"29704" 4 416 3 1 1 1 1 1 100 173 150 225 1535 4 8 0 741 3 50 75 0 1 -"29705" 4 416 3 1 1 1 1 1 100 173 225 236 1750 8 2 1 1273 1 5 11 1 1 -"29706" 4 416 3 1 1 1 1 1 100 173 236 177 682 2 1 0 543 2 25 59 1 0 -"29707" 4 416 3 1 1 1 1 1 100 173 177 168 866 7 6 0 950 1 5 9 1 0 -"29708" 4 416 3 1 1 1 1 1 100 173 168 210 46 5 3 1 914 2 25 42 1 1 -"29709" 4 416 3 1 1 1 1 1 100 173 210 199 2931 3 6 1 861 1 5 11 0 0 -"29710" 4 416 3 1 1 1 1 1 100 173 199 99 2529 6 9 1 726 3 50 100 0 0 -"29711" 4 416 3 1 1 1 1 1 100 173 99 173 513 1 4 0 654 4 75 74 0 1 -"29712" 4 416 3 1 1 1 2 1 100 497 100 195 4322 2 9 0 859 5 95 95 0 1 -"29713" 4 416 3 1 1 1 2 1 100 497 195 244 3167 4 10 0 1141 2 25 49 0 1 -"29714" 4 416 3 1 1 1 2 1 100 497 244 256 2292 3 1 1 573 1 5 12 1 1 -"29715" 4 416 3 1 1 1 2 1 100 497 256 320 2015 8 6 1 1332 2 25 64 1 1 -"29716" 4 416 3 1 1 1 2 1 100 497 320 336 2898 5 7 0 677 1 5 16 0 1 -"29717" 4 416 3 1 1 1 2 1 100 497 336 252 2306 6 8 1 491 2 25 84 0 0 -"29718" 4 416 3 1 1 1 2 1 100 497 252 378 1784 7 5 1 842 3 50 126 1 1 -"29719" 4 416 3 1 1 1 2 1 100 497 378 473 2302 1 10 0 767 2 25 95 0 1 -"29720" 4 416 3 1 1 1 2 1 100 497 473 497 2456 9 3 1 1217 1 5 24 1 1 -"29721" 4 416 3 1 1 1 3 1 100 174 100 195 3258 3 5 0 910 5 95 95 0 1 -"29722" 4 416 3 1 1 1 3 1 100 174 195 49 1647 8 9 1 695 4 75 146 0 0 -"29723" 4 416 3 1 1 1 3 1 100 174 49 74 1099 2 4 0 2149 3 50 25 0 1 -"29724" 4 416 3 1 1 1 3 1 100 174 74 130 2500 6 3 1 2003 4 75 56 1 1 -"29725" 4 416 3 1 1 1 3 1 100 174 130 123 3433 7 6 0 428 1 5 7 1 0 -"29726" 4 416 3 1 1 1 3 1 100 174 123 92 1669 4 2 0 667 2 25 31 1 0 -"29727" 4 416 3 1 1 1 3 1 100 174 92 179 1337 1 8 0 1567 5 95 87 0 1 -"29728" 4 416 3 1 1 1 3 1 100 174 179 89 2578 5 7 1 1005 3 50 90 0 0 -"29729" 4 416 3 1 1 1 3 1 100 174 89 174 1618 9 5 1 1060 5 95 85 1 1 -"29730" 4 416 3 1 1 1 4 1 100 648 100 195 2898 8 7 1 1158 5 95 95 1 1 -"29731" 4 416 3 1 1 1 4 1 100 648 195 341 1716 3 10 0 1785 4 75 146 0 1 -"29732" 4 416 3 1 1 1 4 1 100 648 341 170 1433 7 9 1 851 3 50 171 0 0 -"29733" 4 416 3 1 1 1 4 1 100 648 170 332 1808 9 1 1 752 5 95 162 1 1 -"29734" 4 416 3 1 1 1 4 1 100 648 332 415 1657 2 3 0 1056 2 25 83 0 1 -"29735" 4 416 3 1 1 1 4 1 100 648 415 519 2626 1 8 0 1059 2 25 104 0 1 -"29736" 4 416 3 1 1 1 4 1 100 648 519 545 2022 5 4 1 436 1 5 26 1 1 -"29737" 4 416 3 1 1 1 4 1 100 648 545 518 1964 4 2 0 932 1 5 27 1 0 -"29738" 4 416 3 1 1 1 4 1 100 648 518 648 1253 6 3 1 575 2 25 130 1 1 -"29739" 4 416 4 0 1 0 1 1 100 285 100 150 3470 2 7 0 590 3 50 50 0 1 -"29740" 4 416 4 0 1 0 1 1 100 285 150 293 8943 3 7 0 1364 1 95 143 0 1 -"29741" 4 416 4 0 1 0 1 1 100 285 293 571 2496 2 8 0 1270 1 95 278 0 1 -"29742" 4 416 4 0 1 0 1 1 100 285 571 285 1447 8 9 1 1032 3 50 286 0 0 -"29743" 4 416 5 1 1 0 1 1 100 2634 100 195 2453 1 3 0 502 1 95 95 0 1 -"29744" 4 416 5 1 1 0 1 1 100 2634 195 341 2356 6 5 1 674 2 75 146 1 1 -"29745" 4 416 5 1 1 0 1 1 100 2634 341 665 1559 2 8 0 1234 1 95 324 0 1 -"29746" 4 416 5 1 1 0 1 1 100 2634 665 499 1340 8 9 1 899 4 25 166 0 0 -"29747" 4 416 5 1 1 0 1 1 100 2634 499 973 1133 3 4 0 492 1 95 474 0 1 -"29748" 4 416 5 1 1 0 1 1 100 2634 973 924 3362 5 7 1 1064 5 5 49 0 0 -"29749" 4 416 5 1 1 0 1 1 100 2634 924 1802 1755 7 4 1 1109 1 95 878 1 1 -"29750" 4 416 5 1 1 0 1 1 100 2634 1802 1351 1485 4 1 0 1316 4 25 451 1 0 -"29751" 4 416 5 1 1 0 1 1 100 2634 1351 2634 1595 9 6 1 421 1 95 1283 1 1 -"29752" 4 416 5 1 1 0 2 0 100 0 100 195 2920 8 1 1 619 1 95 95 1 1 -"29753" 4 416 5 1 1 0 2 0 100 0 195 341 4363 6 2 1 1041 2 75 146 1 1 -"29754" 4 416 5 1 1 0 2 0 100 0 341 17 1516 7 9 1 514 1 95 324 0 0 -"29755" 4 416 5 1 1 0 2 0 100 0 17 33 1022 2 10 0 995 1 95 16 0 1 -"29756" 4 416 5 1 1 0 2 0 100 0 33 2 1273 5 3 0 538 1 95 31 1 0 -"29757" 4 416 5 1 1 0 2 0 100 0 2 0 1221 4 2 0 617 1 95 2 1 0 -"29758" 4 416 5 1 1 0 3 0 100 0 100 195 3078 7 5 1 442 1 95 95 1 1 -"29759" 4 416 5 1 1 0 3 0 100 0 195 10 1162 2 1 0 944 1 95 185 1 0 -"29760" 4 416 5 1 1 0 3 0 100 0 10 20 1568 8 6 1 476 1 95 10 1 1 -"29761" 4 416 5 1 1 0 3 0 100 0 20 39 1714 4 7 0 1006 1 95 19 0 1 -"29762" 4 416 5 1 1 0 3 0 100 0 39 76 1332 3 10 0 650 1 95 37 0 1 -"29763" 4 416 5 1 1 0 3 0 100 0 76 4 1793 6 8 1 785 1 95 72 0 0 -"29764" 4 416 5 1 1 0 3 0 100 0 4 8 1033 9 2 1 1373 1 95 4 1 1 -"29765" 4 416 5 1 1 0 3 0 100 0 8 0 1529 5 3 0 598 1 95 8 1 0 -"29766" 4 416 5 1 1 0 4 1 100 4 100 25 1492 2 3 1 3747 2 75 75 0 0 -"29767" 4 416 5 1 1 0 4 1 100 4 25 49 1685 7 4 1 563 1 95 24 1 1 -"29768" 4 416 5 1 1 0 4 1 100 4 49 2 1149 3 1 0 549 1 95 47 1 0 -"29769" 4 416 5 1 1 0 4 1 100 4 2 4 1504 1 9 0 1382 1 95 2 0 1 -"29770" 4 416 5 1 1 0 4 1 100 4 4 8 1297 8 7 1 793 1 95 4 1 1 -"29771" 4 416 5 1 1 0 4 1 100 4 8 16 2000 9 2 1 583 1 95 8 1 1 -"29772" 4 416 5 1 1 0 4 1 100 4 16 31 1528 5 6 0 1133 1 95 15 0 1 -"29773" 4 416 5 1 1 0 4 1 100 4 31 2 1493 6 8 1 659 1 95 29 0 0 -"29774" 4 416 5 1 1 0 4 1 100 4 2 4 1129 4 7 0 721 1 95 2 0 1 -"29775" 4 420 2 0 1 1 1 1 100 185 100 125 12750 8 3 1 2021 2 25 25 1 1 -"29776" 4 420 2 0 1 1 1 1 100 185 125 156 12185 3 7 0 892 2 25 31 0 1 -"29777" 4 420 2 0 1 1 1 1 100 185 156 148 2510 8 2 0 1437 1 5 8 1 0 -"29778" 4 420 2 0 1 1 1 1 100 185 148 185 1750 2 1 1 760 2 25 37 1 1 -"29779" 4 420 3 1 1 1 1 1 100 33 100 75 6372 9 7 0 1130 2 25 25 1 0 -"29780" 4 420 3 1 1 1 1 1 100 33 75 113 3290 4 8 0 1683 3 50 38 0 1 -"29781" 4 420 3 1 1 1 1 1 100 33 113 220 2504 8 2 1 0 5 95 107 1 1 -"29782" 4 420 3 1 1 1 1 1 100 33 220 11 10678 2 1 0 0 5 95 209 1 0 -"29783" 4 420 3 1 1 1 1 1 100 33 11 17 6599 7 6 1 3027 3 50 6 1 1 -"29784" 4 420 3 1 1 1 1 1 100 33 17 26 2880 5 3 1 1861 3 50 9 1 1 -"29785" 4 420 3 1 1 1 1 1 100 33 26 39 2954 3 6 0 2099 3 50 13 0 1 -"29786" 4 420 3 1 1 1 1 1 100 33 39 19 1580 6 9 1 1351 3 50 20 0 0 -"29787" 4 420 3 1 1 1 1 1 100 33 19 33 2588 1 4 0 1519 4 75 14 0 1 -"29788" 4 420 3 1 1 1 2 1 100 153 100 150 3405 2 9 0 602 3 50 50 0 1 -"29789" 4 420 3 1 1 1 2 1 100 153 150 112 10731 4 10 1 1394 2 25 38 0 0 -"29790" 4 420 3 1 1 1 2 1 100 153 112 106 4020 3 1 0 1483 1 5 6 1 0 -"29791" 4 420 3 1 1 1 2 1 100 153 106 159 2633 8 6 1 1850 3 50 53 1 1 -"29792" 4 420 3 1 1 1 2 1 100 153 159 79 4959 5 7 1 1465 3 50 80 0 0 -"29793" 4 420 3 1 1 1 2 1 100 153 79 39 3048 6 8 1 1445 3 50 40 0 0 -"29794" 4 420 3 1 1 1 2 1 100 153 39 68 2949 7 5 1 1559 4 75 29 1 1 -"29795" 4 420 3 1 1 1 2 1 100 153 68 102 2314 1 10 0 778 3 50 34 0 1 -"29796" 4 420 3 1 1 1 2 1 100 153 102 153 3490 9 3 1 762 3 50 51 1 1 -"29797" 4 420 3 1 1 1 3 1 100 259 100 50 2505 3 5 1 1576 3 50 50 0 0 -"29798" 4 420 3 1 1 1 3 1 100 259 50 75 2194 8 9 0 3320 3 50 25 0 1 -"29799" 4 420 3 1 1 1 3 1 100 259 75 146 2919 2 4 0 2082 5 95 71 0 1 -"29800" 4 420 3 1 1 1 3 1 100 259 146 73 3268 6 3 0 2254 3 50 73 1 0 -"29801" 4 420 3 1 1 1 3 1 100 259 73 18 2520 7 6 0 2158 4 75 55 1 0 -"29802" 4 420 3 1 1 1 3 1 100 259 18 35 4004 4 2 1 0 5 95 17 1 1 -"29803" 4 420 3 1 1 1 3 1 100 259 35 68 5004 1 8 0 0 5 95 33 0 1 -"29804" 4 420 3 1 1 1 3 1 100 259 68 133 4755 5 7 0 2231 5 95 65 0 1 -"29805" 4 420 3 1 1 1 3 1 100 259 133 259 7715 9 5 1 0 5 95 126 1 1 -"29806" 4 420 3 1 1 1 4 0 100 0 100 5 2005 8 7 0 0 5 95 95 1 0 -"29807" 4 420 3 1 1 1 4 0 100 0 5 0 4139 3 10 1 0 5 95 5 0 0 -"29808" 4 420 4 0 1 0 1 0 100 1 100 25 1996 2 7 1 3995 2 75 75 0 0 -"29809" 4 420 4 0 1 0 1 0 100 1 25 1 2090 3 7 1 1732 1 95 24 0 0 -"29810" 4 420 5 1 1 0 1 0 100 0 100 5 1524 1 3 1 1225 1 95 95 0 0 -"29811" 4 420 5 1 1 0 1 0 100 0 5 0 2331 6 5 0 786 1 95 5 1 0 -"29812" 4 420 5 1 1 0 2 0 100 1 100 5 2283 8 1 0 620 1 95 95 1 0 -"29813" 4 420 5 1 1 0 2 0 100 1 5 10 1660 6 2 1 538 1 95 5 1 1 -"29814" 4 420 5 1 1 0 2 0 100 1 10 20 3686 7 9 0 1020 1 95 10 0 1 -"29815" 4 420 5 1 1 0 2 0 100 1 20 1 2553 2 10 1 1541 1 95 19 0 0 -"29816" 4 420 5 1 1 0 3 0 100 0 100 5 1613 7 5 0 1334 1 95 95 1 0 -"29817" 4 420 5 1 1 0 3 0 100 0 5 10 2043 2 1 1 968 1 95 5 1 1 -"29818" 4 420 5 1 1 0 3 0 100 0 10 0 2175 8 6 0 1077 1 95 10 1 0 -"29819" 4 420 5 1 1 0 4 1 100 40743 100 195 2740 2 3 0 727 1 95 95 0 1 -"29820" 4 420 5 1 1 0 4 1 100 40743 195 380 1602 7 4 1 819 1 95 185 1 1 -"29821" 4 420 5 1 1 0 4 1 100 40743 380 741 2442 3 1 1 1204 1 95 361 1 1 -"29822" 4 420 5 1 1 0 4 1 100 40743 741 1445 2024 1 9 0 749 1 95 704 0 1 -"29823" 4 420 5 1 1 0 4 1 100 40743 1445 2818 1574 8 7 1 1113 1 95 1373 1 1 -"29824" 4 420 5 1 1 0 4 1 100 40743 2818 5495 4321 9 2 1 1153 1 95 2677 1 1 -"29825" 4 420 5 1 1 0 4 1 100 40743 5495 10715 4888 5 6 0 1141 1 95 5220 0 1 -"29826" 4 420 5 1 1 0 4 1 100 40743 10715 20894 3092 6 8 0 967 1 95 10179 0 1 -"29827" 4 420 5 1 1 0 4 1 100 40743 20894 40743 4326 4 7 0 797 1 95 19849 0 1 -"29828" 4 424 2 0 1 1 1 1 100 140 100 150 3754 8 3 1 1501 3 50 50 1 1 -"29829" 4 424 2 0 1 1 1 1 100 140 150 225 10594 3 7 0 984 3 50 75 0 1 -"29830" 4 424 2 0 1 1 1 1 100 140 225 281 1742 8 2 1 993 2 25 56 1 1 -"29831" 4 424 2 0 1 1 1 1 100 140 281 140 3783 2 1 0 1633 3 50 141 1 0 -"29832" 4 424 3 1 1 1 1 1 100 495 100 150 4654 9 7 1 1134 3 50 50 1 1 -"29833" 4 424 3 1 1 1 1 1 100 495 150 188 1262 4 8 0 2223 2 25 38 0 1 -"29834" 4 424 3 1 1 1 1 1 100 495 188 282 1075 8 2 1 790 3 50 94 1 1 -"29835" 4 424 3 1 1 1 1 1 100 495 282 211 933 2 1 0 1638 2 25 71 1 0 -"29836" 4 424 3 1 1 1 1 1 100 495 211 264 2125 7 6 1 1389 2 25 53 1 1 -"29837" 4 424 3 1 1 1 1 1 100 495 264 251 1841 5 3 0 638 1 5 13 1 0 -"29838" 4 424 3 1 1 1 1 1 100 495 251 377 1525 3 6 0 850 3 50 126 0 1 -"29839" 4 424 3 1 1 1 1 1 100 495 377 283 1543 6 9 1 4491 2 25 94 0 0 -"29840" 4 424 3 1 1 1 1 1 100 495 283 495 1323 1 4 0 2410 4 75 212 0 1 -"29841" 4 424 3 1 1 1 2 1 100 709 100 150 3047 2 9 0 2260 3 50 50 0 1 -"29842" 4 424 3 1 1 1 2 1 100 709 150 158 1230 4 10 0 1289 1 5 8 0 1 -"29843" 4 424 3 1 1 1 2 1 100 709 158 118 1321 3 1 0 1672 2 25 40 1 0 -"29844" 4 424 3 1 1 1 2 1 100 709 118 207 1237 8 6 1 1224 4 75 89 1 1 -"29845" 4 424 3 1 1 1 2 1 100 709 207 155 1308 5 7 1 1267 2 25 52 0 0 -"29846" 4 424 3 1 1 1 2 1 100 709 155 194 2132 6 8 0 980 2 25 39 0 1 -"29847" 4 424 3 1 1 1 2 1 100 709 194 291 2227 7 5 1 1942 3 50 97 1 1 -"29848" 4 424 3 1 1 1 2 1 100 709 291 567 1375 1 10 0 1428 5 95 276 0 1 -"29849" 4 424 3 1 1 1 2 1 100 709 567 709 2229 9 3 1 1560 2 25 142 1 1 -"29850" 4 424 3 1 1 1 3 1 100 536 100 125 1410 3 5 0 1453 2 25 25 0 1 -"29851" 4 424 3 1 1 1 3 1 100 536 125 62 997 8 9 1 868 3 50 63 0 0 -"29852" 4 424 3 1 1 1 3 1 100 536 62 93 1824 2 4 0 1015 3 50 31 0 1 -"29853" 4 424 3 1 1 1 3 1 100 536 93 163 1076 6 3 1 895 4 75 70 1 1 -"29854" 4 424 3 1 1 1 3 1 100 536 163 245 1001 7 6 1 863 3 50 82 1 1 -"29855" 4 424 3 1 1 1 3 1 100 536 245 233 859 4 2 0 1320 1 5 12 1 0 -"29856" 4 424 3 1 1 1 3 1 100 536 233 408 991 1 8 0 1205 4 75 175 0 1 -"29857" 4 424 3 1 1 1 3 1 100 536 408 306 1794 5 7 1 2730 2 25 102 0 0 -"29858" 4 424 3 1 1 1 3 1 100 536 306 536 1205 9 5 1 986 4 75 230 1 1 -"29859" 4 424 3 1 1 1 4 1 100 966 100 150 1453 8 7 1 1901 3 50 50 1 1 -"29860" 4 424 3 1 1 1 4 1 100 966 150 225 856 3 10 0 1301 3 50 75 0 1 -"29861" 4 424 3 1 1 1 4 1 100 966 225 169 1156 7 9 1 1091 2 25 56 0 0 -"29862" 4 424 3 1 1 1 4 1 100 966 169 330 1532 9 1 1 1401 5 95 161 1 1 -"29863" 4 424 3 1 1 1 4 1 100 966 330 578 980 2 3 0 883 4 75 248 0 1 -"29864" 4 424 3 1 1 1 4 1 100 966 578 1127 863 1 8 0 1980 5 95 549 0 1 -"29865" 4 424 3 1 1 1 4 1 100 966 1127 1071 1443 5 4 0 1628 1 5 56 1 0 -"29866" 4 424 3 1 1 1 4 1 100 966 1071 1017 828 4 2 0 1498 1 5 54 1 0 -"29867" 4 424 3 1 1 1 4 1 100 966 1017 966 1472 6 3 0 1727 1 5 51 1 0 -"29868" 4 424 4 0 1 0 1 1 100 110 100 150 2404 2 7 0 728 3 50 50 0 1 -"29869" 4 424 4 0 1 0 1 1 100 110 150 225 2770 3 7 0 1916 3 50 75 0 1 -"29870" 4 424 4 0 1 0 1 1 100 110 225 439 1086 2 8 0 1656 1 95 214 0 1 -"29871" 4 424 4 0 1 0 1 1 100 110 439 110 1950 8 9 1 1367 2 75 329 0 0 -"29872" 4 424 5 1 1 0 1 0 100 0 100 195 1567 1 3 0 1018 1 95 95 0 1 -"29873" 4 424 5 1 1 0 1 0 100 0 195 293 1119 6 5 1 1695 3 50 98 1 1 -"29874" 4 424 5 1 1 0 1 0 100 0 293 571 756 2 8 0 613 1 95 278 0 1 -"29875" 4 424 5 1 1 0 1 0 100 0 571 29 965 8 9 1 469 1 95 542 0 0 -"29876" 4 424 5 1 1 0 1 0 100 0 29 57 1004 3 4 0 658 1 95 28 0 1 -"29877" 4 424 5 1 1 0 1 0 100 0 57 3 1007 5 7 1 493 1 95 54 0 0 -"29878" 4 424 5 1 1 0 1 0 100 0 3 6 963 7 4 1 731 1 95 3 1 1 -"29879" 4 424 5 1 1 0 1 0 100 0 6 0 717 4 1 0 609 1 95 6 1 0 -"29880" 4 424 5 1 1 0 2 0 100 0 100 195 1394 8 1 1 734 1 95 95 1 1 -"29881" 4 424 5 1 1 0 2 0 100 0 195 380 1008 6 2 1 614 1 95 185 1 1 -"29882" 4 424 5 1 1 0 2 0 100 0 380 19 1046 7 9 1 482 1 95 361 0 0 -"29883" 4 424 5 1 1 0 2 0 100 0 19 37 736 2 10 0 699 1 95 18 0 1 -"29884" 4 424 5 1 1 0 2 0 100 0 37 9 1463 5 3 0 815 2 75 28 1 0 -"29885" 4 424 5 1 1 0 2 0 100 0 9 0 830 4 2 0 988 1 95 9 1 0 -"29886" 4 424 5 1 1 0 3 0 100 0 100 195 1233 7 5 1 1348 1 95 95 1 1 -"29887" 4 424 5 1 1 0 3 0 100 0 195 10 984 2 1 0 487 1 95 185 1 0 -"29888" 4 424 5 1 1 0 3 0 100 0 10 20 973 8 6 1 468 1 95 10 1 1 -"29889" 4 424 5 1 1 0 3 0 100 0 20 39 771 4 7 0 460 1 95 19 0 1 -"29890" 4 424 5 1 1 0 3 0 100 0 39 76 1427 3 10 0 525 1 95 37 0 1 -"29891" 4 424 5 1 1 0 3 0 100 0 76 4 993 6 8 1 488 1 95 72 0 0 -"29892" 4 424 5 1 1 0 3 0 100 0 4 8 1821 9 2 1 3940 1 95 4 1 1 -"29893" 4 424 5 1 1 0 3 0 100 0 8 0 2454 5 3 0 2647 1 95 8 1 0 -"29894" 4 424 5 1 1 0 4 1 100 27 100 195 1231 2 3 0 578 1 95 95 0 1 -"29895" 4 424 5 1 1 0 4 1 100 27 195 380 1164 7 4 1 762 1 95 185 1 1 -"29896" 4 424 5 1 1 0 4 1 100 27 380 19 939 3 1 0 2252 1 95 361 1 0 -"29897" 4 424 5 1 1 0 4 1 100 27 19 37 798 1 9 0 381 1 95 18 0 1 -"29898" 4 424 5 1 1 0 4 1 100 27 37 72 1264 8 7 1 442 1 95 35 1 1 -"29899" 4 424 5 1 1 0 4 1 100 27 72 140 882 9 2 1 413 1 95 68 1 1 -"29900" 4 424 5 1 1 0 4 1 100 27 140 273 943 5 6 0 464 1 95 133 0 1 -"29901" 4 424 5 1 1 0 4 1 100 27 273 14 1623 6 8 1 2223 1 95 259 0 0 -"29902" 4 424 5 1 1 0 4 1 100 27 14 27 691 4 7 0 504 1 95 13 0 1 -"29903" 4 434 2 0 1 1 1 1 100 140 100 150 3776 8 3 1 1538 3 50 50 1 1 -"29904" 4 434 2 0 1 1 1 1 100 140 150 225 11302 3 7 0 2074 3 50 75 0 1 -"29905" 4 434 2 0 1 1 1 1 100 140 225 281 1925 8 2 1 2831 2 25 56 1 1 -"29906" 4 434 2 0 1 1 1 1 100 140 281 140 2789 2 1 0 727 3 50 141 1 0 -"29907" 4 434 3 1 1 1 1 1 100 651 100 150 6866 9 7 1 4089 3 50 50 1 1 -"29908" 4 434 3 1 1 1 1 1 100 651 150 188 5699 4 8 0 1458 2 25 38 0 1 -"29909" 4 434 3 1 1 1 1 1 100 651 188 282 2938 8 2 1 620 3 50 94 1 1 -"29910" 4 434 3 1 1 1 1 1 100 651 282 211 2604 2 1 0 326 2 25 71 1 0 -"29911" 4 434 3 1 1 1 1 1 100 651 211 264 4603 7 6 1 590 2 25 53 1 1 -"29912" 4 434 3 1 1 1 1 1 100 651 264 330 1927 5 3 1 1185 2 25 66 1 1 -"29913" 4 434 3 1 1 1 1 1 100 651 330 413 1422 3 6 0 616 2 25 83 0 1 -"29914" 4 434 3 1 1 1 1 1 100 651 413 434 2496 6 9 0 462 1 5 21 0 1 -"29915" 4 434 3 1 1 1 1 1 100 651 434 651 2381 1 4 0 682 3 50 217 0 1 -"29916" 4 434 3 1 1 1 2 1 100 602 100 150 9773 2 9 0 1146 3 50 50 0 1 -"29917" 4 434 3 1 1 1 2 1 100 602 150 158 5729 4 10 0 1118 1 5 8 0 1 -"29918" 4 434 3 1 1 1 2 1 100 602 158 166 4574 3 1 1 530 1 5 8 1 1 -"29919" 4 434 3 1 1 1 2 1 100 602 166 249 2206 8 6 1 483 3 50 83 1 1 -"29920" 4 434 3 1 1 1 2 1 100 602 249 187 1690 5 7 1 447 2 25 62 0 0 -"29921" 4 434 3 1 1 1 2 1 100 602 187 178 3663 6 8 1 750 1 5 9 0 0 -"29922" 4 434 3 1 1 1 2 1 100 602 178 267 1640 7 5 1 1832 3 50 89 1 1 -"29923" 4 434 3 1 1 1 2 1 100 602 267 401 1533 1 10 0 1394 3 50 134 0 1 -"29924" 4 434 3 1 1 1 2 1 100 602 401 602 2541 9 3 1 970 3 50 201 1 1 -"29925" 4 434 3 1 1 1 3 1 100 284 100 150 8040 3 5 0 551 3 50 50 0 1 -"29926" 4 434 3 1 1 1 3 1 100 284 150 75 1802 8 9 1 858 3 50 75 0 0 -"29927" 4 434 3 1 1 1 3 1 100 284 75 113 1782 2 4 0 839 3 50 38 0 1 -"29928" 4 434 3 1 1 1 3 1 100 284 113 107 2748 6 3 0 463 1 5 6 1 0 -"29929" 4 434 3 1 1 1 3 1 100 284 107 112 2142 7 6 1 613 1 5 5 1 1 -"29930" 4 434 3 1 1 1 3 1 100 284 112 168 3461 4 2 1 887 3 50 56 1 1 -"29931" 4 434 3 1 1 1 3 1 100 284 168 252 1442 1 8 0 2499 3 50 84 0 1 -"29932" 4 434 3 1 1 1 3 1 100 284 252 189 3178 5 7 1 416 2 25 63 0 0 -"29933" 4 434 3 1 1 1 3 1 100 284 189 284 1714 9 5 1 1519 3 50 95 1 1 -"29934" 4 434 3 1 1 1 4 1 100 749 100 150 2869 8 7 1 1083 3 50 50 1 1 -"29935" 4 434 3 1 1 1 4 1 100 749 150 225 1860 3 10 0 266 3 50 75 0 1 -"29936" 4 434 3 1 1 1 4 1 100 749 225 112 2146 7 9 1 298 3 50 113 0 0 -"29937" 4 434 3 1 1 1 4 1 100 749 112 196 1673 9 1 1 391 4 75 84 1 1 -"29938" 4 434 3 1 1 1 4 1 100 749 196 343 1826 2 3 0 449 4 75 147 0 1 -"29939" 4 434 3 1 1 1 4 1 100 749 343 600 2513 1 8 0 479 4 75 257 0 1 -"29940" 4 434 3 1 1 1 4 1 100 749 600 630 3526 5 4 1 1027 1 5 30 1 1 -"29941" 4 434 3 1 1 1 4 1 100 749 630 788 2488 4 2 1 391 2 25 158 1 1 -"29942" 4 434 3 1 1 1 4 1 100 749 788 749 3906 6 3 0 453 1 5 39 1 0 -"29943" 4 434 4 0 1 0 1 1 100 26 100 150 3389 2 7 0 522 3 50 50 0 1 -"29944" 4 434 4 0 1 0 1 1 100 26 150 293 4582 3 7 0 4681 1 95 143 0 1 -"29945" 4 434 4 0 1 0 1 1 100 26 293 513 5000 2 8 0 1495 2 75 220 0 1 -"29946" 4 434 4 0 1 0 1 1 100 26 513 26 2917 8 9 1 472 1 95 487 0 0 -"29947" 4 434 5 1 1 0 1 1 100 10 100 195 5131 1 3 0 516 1 95 95 0 1 -"29948" 4 434 5 1 1 0 1 1 100 10 195 146 4825 6 5 0 1250 4 25 49 1 0 -"29949" 4 434 5 1 1 0 1 1 100 10 146 285 1563 2 8 0 279 1 95 139 0 1 -"29950" 4 434 5 1 1 0 1 1 100 10 285 14 1263 8 9 1 312 1 95 271 0 0 -"29951" 4 434 5 1 1 0 1 1 100 10 14 27 3415 3 4 0 354 1 95 13 0 1 -"29952" 4 434 5 1 1 0 1 1 100 10 27 53 1662 5 7 0 365 1 95 26 0 1 -"29953" 4 434 5 1 1 0 1 1 100 10 53 103 1417 7 4 1 323 1 95 50 1 1 -"29954" 4 434 5 1 1 0 1 1 100 10 103 5 2553 4 1 0 469 1 95 98 1 0 -"29955" 4 434 5 1 1 0 1 1 100 10 5 10 3644 9 6 1 420 1 95 5 1 1 -"29956" 4 434 5 1 1 0 2 1 100 9305 100 195 2456 8 1 1 481 1 95 95 1 1 -"29957" 4 434 5 1 1 0 2 1 100 9305 195 293 2679 6 2 1 3189 3 50 98 1 1 -"29958" 4 434 5 1 1 0 2 1 100 9305 293 440 2258 7 9 0 420 3 50 147 0 1 -"29959" 4 434 5 1 1 0 2 1 100 9305 440 858 1517 2 10 0 347 1 95 418 0 1 -"29960" 4 434 5 1 1 0 2 1 100 9305 858 1673 2288 5 3 1 825 1 95 815 1 1 -"29961" 4 434 5 1 1 0 2 1 100 9305 1673 1255 3543 4 2 0 820 4 25 418 1 0 -"29962" 4 434 5 1 1 0 2 1 100 9305 1255 2447 3290 3 5 0 403 1 95 1192 0 1 -"29963" 4 434 5 1 1 0 2 1 100 9305 2447 4772 2085 9 4 1 432 1 95 2325 1 1 -"29964" 4 434 5 1 1 0 2 1 100 9305 4772 9305 2112 1 7 0 425 1 95 4533 0 1 -"29965" 4 434 5 1 1 0 3 1 100 844 100 195 2809 7 5 1 433 1 95 95 1 1 -"29966" 4 434 5 1 1 0 3 1 100 844 195 10 1370 2 1 0 390 1 95 185 1 0 -"29967" 4 434 5 1 1 0 3 1 100 844 10 20 2080 8 6 1 1643 1 95 10 1 1 -"29968" 4 434 5 1 1 0 3 1 100 844 20 39 2826 4 7 0 906 1 95 19 0 1 -"29969" 4 434 5 1 1 0 3 1 100 844 39 76 3460 3 10 0 853 1 95 37 0 1 -"29970" 4 434 5 1 1 0 3 1 100 844 76 114 1698 6 8 0 1631 3 50 38 0 1 -"29971" 4 434 5 1 1 0 3 1 100 844 114 222 1638 9 2 1 326 1 95 108 1 1 -"29972" 4 434 5 1 1 0 3 1 100 844 222 433 2187 5 3 1 409 1 95 211 1 1 -"29973" 4 434 5 1 1 0 3 1 100 844 433 844 1075 1 10 0 333 1 95 411 0 1 -"29974" 4 434 5 1 1 0 4 1 100 6851 100 195 2390 2 3 0 706 1 95 95 0 1 -"29975" 4 434 5 1 1 0 4 1 100 6851 195 380 1461 7 4 1 797 1 95 185 1 1 -"29976" 4 434 5 1 1 0 4 1 100 6851 380 741 1708 3 1 1 856 1 95 361 1 1 -"29977" 4 434 5 1 1 0 4 1 100 6851 741 1445 1189 1 9 0 425 1 95 704 0 1 -"29978" 4 434 5 1 1 0 4 1 100 6851 1445 2818 1266 8 7 1 852 1 95 1373 1 1 -"29979" 4 434 5 1 1 0 4 1 100 6851 2818 2677 3777 9 2 0 0 5 5 141 1 0 -"29980" 4 434 5 1 1 0 4 1 100 6851 2677 5220 3075 5 6 0 406 1 95 2543 0 1 -"29981" 4 434 5 1 1 0 4 1 100 6851 5220 5481 3576 6 8 0 1356 5 5 261 0 1 -"29982" 4 434 5 1 1 0 4 1 100 6851 5481 6851 2585 4 7 0 1540 4 25 1370 0 1 -"29983" 4 505 2 0 1 0 1 1 100 99 100 150 23020 2 7 0 2011 3 50 50 0 1 -"29984" 4 505 2 0 1 0 1 1 100 99 150 263 25486 3 7 0 4106 2 75 113 0 1 -"29985" 4 505 2 0 1 0 1 1 100 99 263 395 2104 2 8 0 517 3 50 132 0 1 -"29986" 4 505 2 0 1 0 1 1 100 99 395 99 2491 8 9 1 1225 2 75 296 0 0 -"29987" 4 505 3 1 1 0 1 1 100 47 100 195 2511 1 3 0 3590 1 95 95 0 1 -"29988" 4 505 3 1 1 0 1 1 100 47 195 97 4134 6 5 0 621 3 50 98 1 0 -"29989" 4 505 3 1 1 0 1 1 100 47 97 146 2274 2 8 0 529 3 50 49 0 1 -"29990" 4 505 3 1 1 0 1 1 100 47 146 73 3957 8 9 1 1247 3 50 73 0 0 -"29991" 4 505 3 1 1 0 1 1 100 47 73 36 3056 3 4 1 1218 3 50 37 0 0 -"29992" 4 505 3 1 1 0 1 1 100 47 36 54 3023 5 7 0 455 3 50 18 0 1 -"29993" 4 505 3 1 1 0 1 1 100 47 54 95 2258 7 4 1 952 2 75 41 1 1 -"29994" 4 505 3 1 1 0 1 1 100 47 95 24 2658 4 1 0 2069 2 75 71 1 0 -"29995" 4 505 3 1 1 0 1 1 100 47 24 47 2374 9 6 1 1851 1 95 23 1 1 -"29996" 4 505 3 1 1 0 2 1 100 1337 100 195 3901 8 1 1 2423 1 95 95 1 1 -"29997" 4 505 3 1 1 0 2 1 100 1337 195 293 2521 6 2 1 424 3 50 98 1 1 -"29998" 4 505 3 1 1 0 2 1 100 1337 293 146 2264 7 9 1 444 3 50 147 0 0 -"29999" 4 505 3 1 1 0 2 1 100 1337 146 285 2026 2 10 0 3459 1 95 139 0 1 -"30000" 4 505 3 1 1 0 2 1 100 1337 285 499 3399 5 3 1 1733 2 75 214 1 1 -"30001" 4 505 3 1 1 0 2 1 100 1337 499 873 2709 4 2 1 2642 2 75 374 1 1 -"30002" 4 505 3 1 1 0 2 1 100 1337 873 1528 2027 3 5 0 2055 2 75 655 0 1 -"30003" 4 505 3 1 1 0 2 1 100 1337 1528 764 2877 9 4 0 829 3 50 764 1 0 -"30004" 4 505 3 1 1 0 2 1 100 1337 764 1337 2439 1 7 0 1404 2 75 573 0 1 -"30005" 4 505 3 1 1 0 3 1 100 2244 100 195 2094 7 5 1 2246 1 95 95 1 1 -"30006" 4 505 3 1 1 0 3 1 100 2244 195 293 2660 2 1 1 1911 3 50 98 1 1 -"30007" 4 505 3 1 1 0 3 1 100 2244 293 513 2329 8 6 1 2225 2 75 220 1 1 -"30008" 4 505 3 1 1 0 3 1 100 2244 513 128 2528 4 7 1 2841 2 75 385 0 0 -"30009" 4 505 3 1 1 0 3 1 100 2244 128 250 2678 3 10 0 1493 1 95 122 0 1 -"30010" 4 505 3 1 1 0 3 1 100 2244 250 438 2336 6 8 0 486 2 75 188 0 1 -"30011" 4 505 3 1 1 0 3 1 100 2244 438 767 1981 9 2 1 538 2 75 329 1 1 -"30012" 4 505 3 1 1 0 3 1 100 2244 767 1151 2063 5 3 1 637 3 50 384 1 1 -"30013" 4 505 3 1 1 0 3 1 100 2244 1151 2244 1831 1 10 0 2806 1 95 1093 0 1 -"30014" 4 505 3 1 1 0 4 1 100 30 100 50 2590 2 3 1 1055 3 50 50 0 0 -"30015" 4 505 3 1 1 0 4 1 100 30 50 98 1544 7 4 1 2026 1 95 48 1 1 -"30016" 4 505 3 1 1 0 4 1 100 30 98 5 3247 3 1 0 2287 1 95 93 1 0 -"30017" 4 505 3 1 1 0 4 1 100 30 5 10 2362 1 9 0 624 1 95 5 0 1 -"30018" 4 505 3 1 1 0 4 1 100 30 10 20 2124 8 7 1 1209 1 95 10 1 1 -"30019" 4 505 3 1 1 0 4 1 100 30 20 39 2190 9 2 1 1761 1 95 19 1 1 -"30020" 4 505 3 1 1 0 4 1 100 30 39 68 2002 5 6 0 2438 2 75 29 0 1 -"30021" 4 505 3 1 1 0 4 1 100 30 68 17 2115 6 8 1 446 2 75 51 0 0 -"30022" 4 505 3 1 1 0 4 1 100 30 17 30 1943 4 7 0 1036 2 75 13 0 1 -"30023" 4 505 4 0 1 1 1 1 100 169 100 150 16536 8 3 1 1411 3 50 50 1 1 -"30024" 4 505 4 0 1 1 1 1 100 169 150 225 30023 3 7 0 1067 3 50 75 0 1 -"30025" 4 505 4 0 1 1 1 1 100 169 225 338 1992 8 2 1 1361 3 50 113 1 1 -"30026" 4 505 4 0 1 1 1 1 100 169 338 169 1661 2 1 0 444 3 50 169 1 0 -"30027" 4 505 5 1 1 1 1 1 100 414 100 150 1824 9 7 1 3185 3 50 50 1 1 -"30028" 4 505 5 1 1 1 1 1 100 414 150 75 2438 4 8 1 500 3 50 75 0 0 -"30029" 4 505 5 1 1 1 1 1 100 414 75 131 1666 8 2 1 1111 4 75 56 1 1 -"30030" 4 505 5 1 1 1 1 1 100 414 131 65 1729 2 1 0 3561 3 50 66 1 0 -"30031" 4 505 5 1 1 1 1 1 100 414 65 98 2122 7 6 1 1132 3 50 33 1 1 -"30032" 4 505 5 1 1 1 1 1 100 414 98 147 1825 5 3 1 465 3 50 49 1 1 -"30033" 4 505 5 1 1 1 1 1 100 414 147 221 1783 3 6 0 2392 3 50 74 0 1 -"30034" 4 505 5 1 1 1 1 1 100 414 221 276 1896 6 9 0 1619 2 25 55 0 1 -"30035" 4 505 5 1 1 1 1 1 100 414 276 414 2264 1 4 0 433 3 50 138 0 1 -"30036" 4 505 5 1 1 1 2 1 100 1234 100 175 2221 2 9 0 1914 4 75 75 0 1 -"30037" 4 505 5 1 1 1 2 1 100 1234 175 219 1668 4 10 0 3388 2 25 44 0 1 -"30038" 4 505 5 1 1 1 2 1 100 1234 219 274 1794 3 1 1 1394 2 25 55 1 1 -"30039" 4 505 5 1 1 1 2 1 100 1234 274 343 1579 8 6 1 2649 2 25 69 1 1 -"30040" 4 505 5 1 1 1 2 1 100 1234 343 429 1684 5 7 0 1057 2 25 86 0 1 -"30041" 4 505 5 1 1 1 2 1 100 1234 429 322 1751 6 8 1 1600 2 25 107 0 0 -"30042" 4 505 5 1 1 1 2 1 100 1234 322 403 1761 7 5 1 2145 2 25 81 1 1 -"30043" 4 505 5 1 1 1 2 1 100 1234 403 705 2528 1 10 0 2179 4 75 302 0 1 -"30044" 4 505 5 1 1 1 2 1 100 1234 705 1234 1673 9 3 1 964 4 75 529 1 1 -"30045" 4 505 5 1 1 1 3 1 100 343 100 150 1990 3 5 0 904 3 50 50 0 1 -"30046" 4 505 5 1 1 1 3 1 100 343 150 75 1783 8 9 1 827 3 50 75 0 0 -"30047" 4 505 5 1 1 1 3 1 100 343 75 113 1785 2 4 0 427 3 50 38 0 1 -"30048" 4 505 5 1 1 1 3 1 100 343 113 170 1669 6 3 1 539 3 50 57 1 1 -"30049" 4 505 5 1 1 1 3 1 100 343 170 298 2350 7 6 1 488 4 75 128 1 1 -"30050" 4 505 5 1 1 1 3 1 100 343 298 149 1893 4 2 0 1928 3 50 149 1 0 -"30051" 4 505 5 1 1 1 3 1 100 343 149 261 2214 1 8 0 1431 4 75 112 0 1 -"30052" 4 505 5 1 1 1 3 1 100 343 261 196 2017 5 7 1 2468 2 25 65 0 0 -"30053" 4 505 5 1 1 1 3 1 100 343 196 343 1649 9 5 1 1820 4 75 147 1 1 -"30054" 4 505 5 1 1 1 4 1 100 225 100 175 1933 8 7 1 621 4 75 75 1 1 -"30055" 4 505 5 1 1 1 4 1 100 225 175 263 1664 3 10 0 1403 3 50 88 0 1 -"30056" 4 505 5 1 1 1 4 1 100 225 263 131 1557 7 9 1 1788 3 50 132 0 0 -"30057" 4 505 5 1 1 1 4 1 100 225 131 229 1637 9 1 1 1314 4 75 98 1 1 -"30058" 4 505 5 1 1 1 4 1 100 225 229 344 2133 2 3 0 977 3 50 115 0 1 -"30059" 4 505 5 1 1 1 4 1 100 225 344 602 1965 1 8 0 821 4 75 258 0 1 -"30060" 4 505 5 1 1 1 4 1 100 225 602 301 2068 5 4 0 2094 3 50 301 1 0 -"30061" 4 505 5 1 1 1 4 1 100 225 301 150 2408 4 2 0 588 3 50 151 1 0 -"30062" 4 505 5 1 1 1 4 1 100 225 150 225 1781 6 3 1 1744 3 50 75 1 1 -"30063" 4 508 2 0 1 0 1 1 100 219 100 150 14504 2 7 0 2193 3 50 50 0 1 -"30064" 4 508 2 0 1 0 1 1 100 219 150 225 14080 3 7 0 854 3 50 75 0 1 -"30065" 4 508 2 0 1 0 1 1 100 219 225 439 5026 2 8 0 2650 1 95 214 0 1 -"30066" 4 508 2 0 1 0 1 1 100 219 439 219 4146 8 9 1 708 3 50 220 0 0 -"30067" 4 508 3 1 1 0 1 1 100 161 100 195 2550 1 3 0 2036 1 95 95 0 1 -"30068" 4 508 3 1 1 0 1 1 100 161 195 244 2273 6 5 1 568 4 25 49 1 1 -"30069" 4 508 3 1 1 0 1 1 100 161 244 366 2105 2 8 0 666 3 50 122 0 1 -"30070" 4 508 3 1 1 0 1 1 100 161 366 183 2174 8 9 1 548 3 50 183 0 0 -"30071" 4 508 3 1 1 0 1 1 100 161 183 275 3104 3 4 0 1778 3 50 92 0 1 -"30072" 4 508 3 1 1 0 1 1 100 161 275 206 4466 5 7 1 467 4 25 69 0 0 -"30073" 4 508 3 1 1 0 1 1 100 161 206 258 2627 7 4 1 715 4 25 52 1 1 -"30074" 4 508 3 1 1 0 1 1 100 161 258 129 3694 4 1 0 1305 3 50 129 1 0 -"30075" 4 508 3 1 1 0 1 1 100 161 129 161 2058 9 6 1 470 4 25 32 1 1 -"30076" 4 508 3 1 1 0 2 1 100 944 100 150 3898 8 1 1 464 3 50 50 1 1 -"30077" 4 508 3 1 1 0 2 1 100 944 150 188 2258 6 2 1 1327 4 25 38 1 1 -"30078" 4 508 3 1 1 0 2 1 100 944 188 141 4199 7 9 1 801 4 25 47 0 0 -"30079" 4 508 3 1 1 0 2 1 100 944 141 275 1781 2 10 0 2655 1 95 134 0 1 -"30080" 4 508 3 1 1 0 2 1 100 944 275 344 1608 5 3 1 1611 4 25 69 1 1 -"30081" 4 508 3 1 1 0 2 1 100 944 344 258 4073 4 2 0 715 4 25 86 1 0 -"30082" 4 508 3 1 1 0 2 1 100 944 258 503 1188 3 5 0 3033 1 95 245 0 1 -"30083" 4 508 3 1 1 0 2 1 100 944 503 629 2691 9 4 1 1171 4 25 126 1 1 -"30084" 4 508 3 1 1 0 2 1 100 944 629 944 2408 1 7 0 853 3 50 315 0 1 -"30085" 4 508 3 1 1 0 3 1 100 21 100 150 2688 7 5 1 408 3 50 50 1 1 -"30086" 4 508 3 1 1 0 3 1 100 21 150 75 2566 2 1 0 441 3 50 75 1 0 -"30087" 4 508 3 1 1 0 3 1 100 21 75 131 2601 8 6 1 1554 2 75 56 1 1 -"30088" 4 508 3 1 1 0 3 1 100 21 131 255 2120 4 7 0 1355 1 95 124 0 1 -"30089" 4 508 3 1 1 0 3 1 100 21 255 497 2199 3 10 0 1709 1 95 242 0 1 -"30090" 4 508 3 1 1 0 3 1 100 21 497 124 2078 6 8 1 2913 2 75 373 0 0 -"30091" 4 508 3 1 1 0 3 1 100 21 124 242 3977 9 2 1 1521 1 95 118 1 1 -"30092" 4 508 3 1 1 0 3 1 100 21 242 12 2985 5 3 0 2004 1 95 230 1 0 -"30093" 4 508 3 1 1 0 3 1 100 21 12 21 3220 1 10 0 1022 2 75 9 0 1 -"30094" 4 508 3 1 1 0 4 1 100 1443 100 195 2950 2 3 0 987 1 95 95 0 1 -"30095" 4 508 3 1 1 0 4 1 100 1443 195 380 915 7 4 1 818 1 95 185 1 1 -"30096" 4 508 3 1 1 0 4 1 100 1443 380 190 1117 3 1 0 617 3 50 190 1 0 -"30097" 4 508 3 1 1 0 4 1 100 1443 190 371 992 1 9 0 1059 1 95 181 0 1 -"30098" 4 508 3 1 1 0 4 1 100 1443 371 723 870 8 7 1 1413 1 95 352 1 1 -"30099" 4 508 3 1 1 0 4 1 100 1443 723 1410 1548 9 2 1 1610 1 95 687 1 1 -"30100" 4 508 3 1 1 0 4 1 100 1443 1410 1481 3665 5 6 0 733 5 5 71 0 1 -"30101" 4 508 3 1 1 0 4 1 100 1443 1481 740 1527 6 8 1 1549 3 50 741 0 0 -"30102" 4 508 3 1 1 0 4 1 100 1443 740 1443 3543 4 7 0 1145 1 95 703 0 1 -"30103" 4 508 4 0 1 1 1 1 100 177 100 150 13234 8 3 1 836 3 50 50 1 1 -"30104" 4 508 4 0 1 1 1 1 100 177 150 225 4354 3 7 0 2382 3 50 75 0 1 -"30105" 4 508 4 0 1 1 1 1 100 177 225 236 1911 8 2 1 1000 1 5 11 1 1 -"30106" 4 508 4 0 1 1 1 1 100 177 236 177 1121 2 1 0 2142 2 25 59 1 0 -"30107" 4 508 5 1 1 1 1 1 100 858 100 150 2292 9 7 1 853 3 50 50 1 1 -"30108" 4 508 5 1 1 1 1 1 100 858 150 188 1910 4 8 0 927 2 25 38 0 1 -"30109" 4 508 5 1 1 1 1 1 100 858 188 282 1149 8 2 1 616 3 50 94 1 1 -"30110" 4 508 5 1 1 1 1 1 100 858 282 141 929 2 1 0 402 3 50 141 1 0 -"30111" 4 508 5 1 1 1 1 1 100 858 141 275 854 7 6 1 1101 5 95 134 1 1 -"30112" 4 508 5 1 1 1 1 1 100 858 275 344 1704 5 3 1 903 2 25 69 1 1 -"30113" 4 508 5 1 1 1 1 1 100 858 344 516 1157 3 6 0 1116 3 50 172 0 1 -"30114" 4 508 5 1 1 1 1 1 100 858 516 490 1241 6 9 1 1536 1 5 26 0 0 -"30115" 4 508 5 1 1 1 1 1 100 858 490 858 1048 1 4 0 1009 4 75 368 0 1 -"30116" 4 508 5 1 1 1 2 1 100 868 100 150 2679 2 9 0 729 3 50 50 0 1 -"30117" 4 508 5 1 1 1 2 1 100 868 150 188 1099 4 10 0 996 2 25 38 0 1 -"30118" 4 508 5 1 1 1 2 1 100 868 188 94 1102 3 1 0 822 3 50 94 1 0 -"30119" 4 508 5 1 1 1 2 1 100 868 94 183 1037 8 6 1 0 5 95 89 1 1 -"30120" 4 508 5 1 1 1 2 1 100 868 183 192 4488 5 7 0 4426 1 5 9 0 1 -"30121" 4 508 5 1 1 1 2 1 100 868 192 182 998 6 8 1 1420 1 5 10 0 0 -"30122" 4 508 5 1 1 1 2 1 100 868 182 228 2635 7 5 1 988 2 25 46 1 1 -"30123" 4 508 5 1 1 1 2 1 100 868 228 445 980 1 10 0 0 5 95 217 0 1 -"30124" 4 508 5 1 1 1 2 1 100 868 445 868 1787 9 3 1 0 5 95 423 1 1 -"30125" 4 508 5 1 1 1 3 1 100 1237 100 125 1605 3 5 0 1530 2 25 25 0 1 -"30126" 4 508 5 1 1 1 3 1 100 1237 125 62 1144 8 9 1 432 3 50 63 0 0 -"30127" 4 508 5 1 1 1 3 1 100 1237 62 121 976 2 4 0 0 5 95 59 0 1 -"30128" 4 508 5 1 1 1 3 1 100 1237 121 236 1145 6 3 1 1667 5 95 115 1 1 -"30129" 4 508 5 1 1 1 3 1 100 1237 236 460 1475 7 6 1 0 5 95 224 1 1 -"30130" 4 508 5 1 1 1 3 1 100 1237 460 345 1468 4 2 0 1110 2 25 115 1 0 -"30131" 4 508 5 1 1 1 3 1 100 1237 345 673 1038 1 8 0 0 5 95 328 0 1 -"30132" 4 508 5 1 1 1 3 1 100 1237 673 707 1847 5 7 0 1850 1 5 34 0 1 -"30133" 4 508 5 1 1 1 3 1 100 1237 707 1237 1004 9 5 1 812 4 75 530 1 1 -"30134" 4 508 5 1 1 1 4 1 100 587 100 150 1517 8 7 1 451 3 50 50 1 1 -"30135" 4 508 5 1 1 1 4 1 100 587 150 188 826 3 10 0 834 2 25 38 0 1 -"30136" 4 508 5 1 1 1 4 1 100 587 188 94 756 7 9 1 421 3 50 94 0 0 -"30137" 4 508 5 1 1 1 4 1 100 587 94 183 1078 9 1 1 0 5 95 89 1 1 -"30138" 4 508 5 1 1 1 4 1 100 587 183 357 1247 2 3 0 1404 5 95 174 0 1 -"30139" 4 508 5 1 1 1 4 1 100 587 357 696 1468 1 8 0 0 5 95 339 0 1 -"30140" 4 508 5 1 1 1 4 1 100 587 696 522 1225 5 4 0 1237 2 25 174 1 0 -"30141" 4 508 5 1 1 1 4 1 100 587 522 391 800 4 2 0 888 2 25 131 1 0 -"30142" 4 508 5 1 1 1 4 1 100 587 391 587 827 6 3 1 416 3 50 196 1 1 -"30143" 4 527 2 0 1 0 1 1 100 256 100 150 11943 2 7 0 1600 3 50 50 0 1 -"30144" 4 527 2 0 1 0 1 1 100 256 150 263 18400 3 7 0 974 2 75 113 0 1 -"30145" 4 527 2 0 1 0 1 1 100 256 263 513 3529 2 8 0 659 1 95 250 0 1 -"30146" 4 527 2 0 1 0 1 1 100 256 513 256 4869 8 9 1 1506 3 50 257 0 0 -"30147" 4 527 3 1 1 0 1 0 100 1 100 195 1728 1 3 0 497 1 95 95 0 1 -"30148" 4 527 3 1 1 0 1 0 100 1 195 380 3246 6 5 1 373 1 95 185 1 1 -"30149" 4 527 3 1 1 0 1 0 100 1 380 190 5065 2 8 1 849 3 50 190 0 0 -"30150" 4 527 3 1 1 0 1 0 100 1 190 95 1801 8 9 1 1315 3 50 95 0 0 -"30151" 4 527 3 1 1 0 1 0 100 1 95 185 2030 3 4 0 394 1 95 90 0 1 -"30152" 4 527 3 1 1 0 1 0 100 1 185 9 5380 5 7 1 852 1 95 176 0 0 -"30153" 4 527 3 1 1 0 1 0 100 1 9 18 2634 7 4 1 383 1 95 9 1 1 -"30154" 4 527 3 1 1 0 1 0 100 1 18 1 3677 4 1 0 330 1 95 17 1 0 -"30155" 4 527 3 1 1 0 2 1 100 1386 100 195 2043 8 1 1 487 1 95 95 1 1 -"30156" 4 527 3 1 1 0 2 1 100 1386 195 380 2340 6 2 1 1539 1 95 185 1 1 -"30157" 4 527 3 1 1 0 2 1 100 1386 380 285 2518 7 9 1 505 4 25 95 0 0 -"30158" 4 527 3 1 1 0 2 1 100 1386 285 556 1731 2 10 0 2021 1 95 271 0 1 -"30159" 4 527 3 1 1 0 2 1 100 1386 556 278 2452 5 3 0 1499 3 50 278 1 0 -"30160" 4 527 3 1 1 0 2 1 100 1386 278 208 3036 4 2 0 803 4 25 70 1 0 -"30161" 4 527 3 1 1 0 2 1 100 1386 208 406 1574 3 5 0 430 1 95 198 0 1 -"30162" 4 527 3 1 1 0 2 1 100 1386 406 792 2273 9 4 1 488 1 95 386 1 1 -"30163" 4 527 3 1 1 0 2 1 100 1386 792 1386 1224 1 7 0 1187 2 75 594 0 1 -"30164" 4 527 3 1 1 0 3 1 100 31 100 195 3363 7 5 1 933 1 95 95 1 1 -"30165" 4 527 3 1 1 0 3 1 100 31 195 10 1235 2 1 0 951 1 95 185 1 0 -"30166" 4 527 3 1 1 0 3 1 100 31 10 20 6321 8 6 1 891 1 95 10 1 1 -"30167" 4 527 3 1 1 0 3 1 100 31 20 39 2644 4 7 0 424 1 95 19 0 1 -"30168" 4 527 3 1 1 0 3 1 100 31 39 68 1399 3 10 0 1682 2 75 29 0 1 -"30169" 4 527 3 1 1 0 3 1 100 31 68 34 2488 6 8 1 523 3 50 34 0 0 -"30170" 4 527 3 1 1 0 3 1 100 31 34 66 2099 9 2 1 391 1 95 32 1 1 -"30171" 4 527 3 1 1 0 3 1 100 31 66 16 3000 5 3 0 1335 2 75 50 1 0 -"30172" 4 527 3 1 1 0 3 1 100 31 16 31 1623 1 10 0 521 1 95 15 0 1 -"30173" 4 527 3 1 1 0 4 1 100 1586 100 195 3353 2 3 0 448 1 95 95 0 1 -"30174" 4 527 3 1 1 0 4 1 100 1586 195 380 1658 7 4 1 356 1 95 185 1 1 -"30175" 4 527 3 1 1 0 4 1 100 1586 380 190 1528 3 1 0 1273 3 50 190 1 0 -"30176" 4 527 3 1 1 0 4 1 100 1586 190 371 1487 1 9 0 447 1 95 181 0 1 -"30177" 4 527 3 1 1 0 4 1 100 1586 371 723 1799 8 7 1 387 1 95 352 1 1 -"30178" 4 527 3 1 1 0 4 1 100 1586 723 1410 1523 9 2 1 387 1 95 687 1 1 -"30179" 4 527 3 1 1 0 4 1 100 1586 1410 2115 3223 5 6 0 1106 3 50 705 0 1 -"30180" 4 527 3 1 1 0 4 1 100 1586 2115 1057 5834 6 8 1 768 3 50 1058 0 0 -"30181" 4 527 3 1 1 0 4 1 100 1586 1057 1586 2537 4 7 0 2524 3 50 529 0 1 -"30182" 4 527 4 0 1 1 1 1 100 17 100 150 9554 8 3 1 1515 3 50 50 1 1 -"30183" 4 527 4 0 1 1 1 1 100 17 150 225 3280 3 7 0 1511 3 50 75 0 1 -"30184" 4 527 4 0 1 1 1 1 100 17 225 338 1917 8 2 1 827 3 50 113 1 1 -"30185" 4 527 4 0 1 1 1 1 100 17 338 17 3144 2 1 0 0 5 95 321 1 0 -"30186" 4 527 5 1 1 1 1 1 100 542 100 195 2093 9 7 1 0 5 95 95 1 1 -"30187" 4 527 5 1 1 1 1 1 100 542 195 293 2648 4 8 0 702 3 50 98 0 1 -"30188" 4 527 5 1 1 1 1 1 100 542 293 440 1891 8 2 1 1068 3 50 147 1 1 -"30189" 4 527 5 1 1 1 1 1 100 542 440 220 1427 2 1 0 1012 3 50 220 1 0 -"30190" 4 527 5 1 1 1 1 1 100 542 220 330 1881 7 6 1 1220 3 50 110 1 1 -"30191" 4 527 5 1 1 1 1 1 100 542 330 247 2822 5 3 0 1117 2 25 83 1 0 -"30192" 4 527 5 1 1 1 1 1 100 542 247 371 1922 3 6 0 555 3 50 124 0 1 -"30193" 4 527 5 1 1 1 1 1 100 542 371 278 1811 6 9 1 1392 2 25 93 0 0 -"30194" 4 527 5 1 1 1 1 1 100 542 278 542 1528 1 4 0 1298 5 95 264 0 1 -"30195" 4 527 5 1 1 1 2 1 100 913 100 195 2746 2 9 0 380 5 95 95 0 1 -"30196" 4 527 5 1 1 1 2 1 100 913 195 293 2623 4 10 0 608 3 50 98 0 1 -"30197" 4 527 5 1 1 1 2 1 100 913 293 220 1660 3 1 0 1149 2 25 73 1 0 -"30198" 4 527 5 1 1 1 2 1 100 913 220 429 1847 8 6 1 1175 5 95 209 1 1 -"30199" 4 527 5 1 1 1 2 1 100 913 429 214 2749 5 7 1 673 3 50 215 0 0 -"30200" 4 527 5 1 1 1 2 1 100 913 214 160 1751 6 8 1 1803 2 25 54 0 0 -"30201" 4 527 5 1 1 1 2 1 100 913 160 240 1615 7 5 1 481 3 50 80 1 1 -"30202" 4 527 5 1 1 1 2 1 100 913 240 468 1467 1 10 0 712 5 95 228 0 1 -"30203" 4 527 5 1 1 1 2 1 100 913 468 913 1556 9 3 1 1136 5 95 445 1 1 -"30204" 4 527 5 1 1 1 3 1 100 23 100 150 2892 3 5 0 560 3 50 50 0 1 -"30205" 4 527 5 1 1 1 3 1 100 23 150 7 1608 8 9 1 0 5 95 143 0 0 -"30206" 4 527 5 1 1 1 3 1 100 23 7 12 2918 2 4 0 1122 4 75 5 0 1 -"30207" 4 527 5 1 1 1 3 1 100 23 12 18 1692 6 3 1 461 3 50 6 1 1 -"30208" 4 527 5 1 1 1 3 1 100 23 18 27 1287 7 6 1 482 3 50 9 1 1 -"30209" 4 527 5 1 1 1 3 1 100 23 27 13 1917 4 2 0 731 3 50 14 1 0 -"30210" 4 527 5 1 1 1 3 1 100 23 13 25 1486 1 8 0 0 5 95 12 0 1 -"30211" 4 527 5 1 1 1 3 1 100 23 25 12 2777 5 7 1 520 3 50 13 0 0 -"30212" 4 527 5 1 1 1 3 1 100 23 12 23 1758 9 5 1 0 5 95 11 1 1 -"30213" 4 527 5 1 1 1 4 1 100 1133 100 195 2643 8 7 1 0 5 95 95 1 1 -"30214" 4 527 5 1 1 1 4 1 100 1133 195 293 2176 3 10 0 1691 3 50 98 0 1 -"30215" 4 527 5 1 1 1 4 1 100 1133 293 220 1779 7 9 1 405 2 25 73 0 0 -"30216" 4 527 5 1 1 1 4 1 100 1133 220 429 1639 9 1 1 814 5 95 209 1 1 -"30217" 4 527 5 1 1 1 4 1 100 1133 429 644 1642 2 3 0 957 3 50 215 0 1 -"30218" 4 527 5 1 1 1 4 1 100 1133 644 966 1477 1 8 0 623 3 50 322 0 1 -"30219" 4 527 5 1 1 1 4 1 100 1133 966 1208 4235 5 4 1 440 2 25 242 1 1 -"30220" 4 527 5 1 1 1 4 1 100 1133 1208 906 1744 4 2 0 361 2 25 302 1 0 -"30221" 4 527 5 1 1 1 4 1 100 1133 906 1133 1425 6 3 1 363 2 25 227 1 1 -"30222" 4 533 2 0 1 0 1 1 100 128 100 150 18787 2 7 0 2383 3 50 50 0 1 -"30223" 4 533 2 0 1 0 1 1 100 128 150 293 18142 3 7 0 1050 1 95 143 0 1 -"30224" 4 533 2 0 1 0 1 1 100 128 293 513 5852 2 8 0 1299 2 75 220 0 1 -"30225" 4 533 2 0 1 0 1 1 100 128 513 128 5532 8 9 1 704 2 75 385 0 0 -"30226" 4 533 3 1 1 0 1 1 100 158 100 175 7947 1 3 0 1180 2 75 75 0 1 -"30227" 4 533 3 1 1 0 1 1 100 158 175 263 12419 6 5 1 1528 3 50 88 1 1 -"30228" 4 533 3 1 1 0 1 1 100 158 263 460 7679 2 8 0 2397 2 75 197 0 1 -"30229" 4 533 3 1 1 0 1 1 100 158 460 690 5472 8 9 0 731 3 50 230 0 1 -"30230" 4 533 3 1 1 0 1 1 100 158 690 345 6619 3 4 1 392 3 50 345 0 0 -"30231" 4 533 3 1 1 0 1 1 100 158 345 431 2742 5 7 0 1250 4 25 86 0 1 -"30232" 4 533 3 1 1 0 1 1 100 158 431 323 4135 7 4 0 617 4 25 108 1 0 -"30233" 4 533 3 1 1 0 1 1 100 158 323 81 4431 4 1 0 927 2 75 242 1 0 -"30234" 4 533 3 1 1 0 1 1 100 158 81 158 4757 9 6 1 1822 1 95 77 1 1 -"30235" 4 533 3 1 1 0 2 0 100 0 100 195 6859 8 1 1 1248 1 95 95 1 1 -"30236" 4 533 3 1 1 0 2 0 100 0 195 97 5375 6 2 0 904 3 50 98 1 0 -"30237" 4 533 3 1 1 0 2 0 100 0 97 189 2679 7 9 0 1640 1 95 92 0 1 -"30238" 4 533 3 1 1 0 2 0 100 0 189 9 6249 2 10 1 1252 1 95 180 0 0 -"30239" 4 533 3 1 1 0 2 0 100 0 9 0 5288 5 3 0 933 1 95 9 1 0 -"30240" 4 533 3 1 1 0 3 0 100 1 100 175 4086 7 5 1 514 2 75 75 1 1 -"30241" 4 533 3 1 1 0 3 0 100 1 175 87 2758 2 1 0 646 3 50 88 1 0 -"30242" 4 533 3 1 1 0 3 0 100 1 87 170 7227 8 6 1 1186 1 95 83 1 1 -"30243" 4 533 3 1 1 0 3 0 100 1 170 332 3335 4 7 0 697 1 95 162 0 1 -"30244" 4 533 3 1 1 0 3 0 100 1 332 249 6588 3 10 1 565 4 25 83 0 0 -"30245" 4 533 3 1 1 0 3 0 100 1 249 12 4985 6 8 1 1179 1 95 237 0 0 -"30246" 4 533 3 1 1 0 3 0 100 1 12 1 1366 9 2 0 646 1 95 11 1 0 -"30247" 4 533 3 1 1 0 4 0 100 0 100 5 3418 2 3 1 1688 1 95 95 0 0 -"30248" 4 533 3 1 1 0 4 0 100 0 5 0 5107 7 4 0 972 1 95 5 1 0 -"30249" 4 533 4 0 1 1 1 1 100 641 100 150 9038 8 3 1 817 3 50 50 1 1 -"30250" 4 533 4 0 1 1 1 1 100 641 150 263 7718 3 7 0 1220 4 75 113 0 1 -"30251" 4 533 4 0 1 1 1 1 100 641 263 513 4797 8 2 1 629 5 95 250 1 1 -"30252" 4 533 4 0 1 1 1 1 100 641 513 641 9963 2 1 1 626 2 25 128 1 1 -"30253" 4 533 5 1 1 1 1 1 100 1248 100 195 3304 9 7 1 871 5 95 95 1 1 -"30254" 4 533 5 1 1 1 1 1 100 1248 195 341 5058 4 8 0 1166 4 75 146 0 1 -"30255" 4 533 5 1 1 1 1 1 100 1248 341 512 6052 8 2 1 1016 3 50 171 1 1 -"30256" 4 533 5 1 1 1 1 1 100 1248 512 640 9128 2 1 1 423 2 25 128 1 1 -"30257" 4 533 5 1 1 1 1 1 100 1248 640 800 5670 7 6 1 2216 2 25 160 1 1 -"30258" 4 533 5 1 1 1 1 1 100 1248 800 1000 11787 5 3 1 1176 2 25 200 1 1 -"30259" 4 533 5 1 1 1 1 1 100 1248 1000 950 17636 3 6 1 1278 1 5 50 0 0 -"30260" 4 533 5 1 1 1 1 1 100 1248 950 998 3643 6 9 0 844 1 5 48 0 1 -"30261" 4 533 5 1 1 1 1 1 100 1248 998 1248 4258 1 4 0 987 2 25 250 0 1 -"30262" 4 533 5 1 1 1 2 0 100 1 100 95 4070 2 9 1 922 1 5 5 0 0 -"30263" 4 533 5 1 1 1 2 0 100 1 95 100 3041 4 10 0 849 1 5 5 0 1 -"30264" 4 533 5 1 1 1 2 0 100 1 100 50 5999 3 1 0 539 3 50 50 1 0 -"30265" 4 533 5 1 1 1 2 0 100 1 50 63 3251 8 6 1 2229 2 25 13 1 1 -"30266" 4 533 5 1 1 1 2 0 100 1 63 95 5407 5 7 0 680 3 50 32 0 1 -"30267" 4 533 5 1 1 1 2 0 100 1 95 24 4640 6 8 1 1535 4 75 71 0 0 -"30268" 4 533 5 1 1 1 2 0 100 1 24 1 4924 7 5 0 1187 5 95 23 1 0 -"30269" 4 533 5 1 1 1 3 0 100 0 100 95 2527 3 5 1 1070 1 5 5 0 0 -"30270" 4 533 5 1 1 1 3 0 100 0 95 5 2101 8 9 1 1142 5 95 90 0 0 -"30271" 4 533 5 1 1 1 3 0 100 0 5 10 3415 2 4 0 664 5 95 5 0 1 -"30272" 4 533 5 1 1 1 3 0 100 0 10 5 4620 6 3 0 1743 3 50 5 1 0 -"30273" 4 533 5 1 1 1 3 0 100 0 5 10 2885 7 6 1 516 5 95 5 1 1 -"30274" 4 533 5 1 1 1 3 0 100 0 10 0 5338 4 2 0 831 5 95 10 1 0 -"30275" 4 533 5 1 1 1 4 1 100 457 100 195 3912 8 7 1 0 5 95 95 1 1 -"30276" 4 533 5 1 1 1 4 1 100 457 195 341 18198 3 10 0 2043 4 75 146 0 1 -"30277" 4 533 5 1 1 1 4 1 100 457 341 426 14967 7 9 0 2340 2 25 85 0 1 -"30278" 4 533 5 1 1 1 4 1 100 457 426 405 5709 9 1 0 1811 1 5 21 1 0 -"30279" 4 533 5 1 1 1 4 1 100 457 405 385 4938 2 3 1 1283 1 5 20 0 0 -"30280" 4 533 5 1 1 1 4 1 100 457 385 366 2460 1 8 1 1191 1 5 19 0 0 -"30281" 4 533 5 1 1 1 4 1 100 457 366 458 3951 5 4 1 1402 2 25 92 1 1 -"30282" 4 533 5 1 1 1 4 1 100 457 458 481 4516 4 2 1 1097 1 5 23 1 1 -"30283" 4 533 5 1 1 1 4 1 100 457 481 457 2074 6 3 0 983 1 5 24 1 0 -"30284" 4 551 2 0 1 1 1 1 100 253 100 150 13444 8 3 1 1647 3 50 50 1 1 -"30285" 4 551 2 0 1 1 1 1 100 253 150 225 3200 3 7 0 2418 3 50 75 0 1 -"30286" 4 551 2 0 1 1 1 1 100 253 225 338 1118 8 2 1 763 3 50 113 1 1 -"30287" 4 551 2 0 1 1 1 1 100 253 338 253 1530 2 1 0 2527 2 25 85 1 0 -"30288" 4 551 3 1 1 1 1 1 100 401 100 150 1487 9 7 1 1348 3 50 50 1 1 -"30289" 4 551 3 1 1 1 1 1 100 401 150 225 3165 4 8 0 801 3 50 75 0 1 -"30290" 4 551 3 1 1 1 1 1 100 401 225 338 1536 8 2 1 542 3 50 113 1 1 -"30291" 4 551 3 1 1 1 1 1 100 401 338 253 1401 2 1 0 2429 2 25 85 1 0 -"30292" 4 551 3 1 1 1 1 1 100 401 253 380 2146 7 6 1 559 3 50 127 1 1 -"30293" 4 551 3 1 1 1 1 1 100 401 380 285 2083 5 3 0 1582 2 25 95 1 0 -"30294" 4 551 3 1 1 1 1 1 100 401 285 356 1410 3 6 0 1340 2 25 71 0 1 -"30295" 4 551 3 1 1 1 1 1 100 401 356 267 1537 6 9 1 1254 2 25 89 0 0 -"30296" 4 551 3 1 1 1 1 1 100 401 267 401 1330 1 4 0 877 3 50 134 0 1 -"30297" 4 551 3 1 1 1 2 1 100 498 100 150 1736 2 9 0 1597 3 50 50 0 1 -"30298" 4 551 3 1 1 1 2 1 100 498 150 225 1292 4 10 0 841 3 50 75 0 1 -"30299" 4 551 3 1 1 1 2 1 100 498 225 112 2380 3 1 0 783 3 50 113 1 0 -"30300" 4 551 3 1 1 1 2 1 100 498 112 196 1275 8 6 1 736 4 75 84 1 1 -"30301" 4 551 3 1 1 1 2 1 100 498 196 294 1557 5 7 0 1210 3 50 98 0 1 -"30302" 4 551 3 1 1 1 2 1 100 498 294 147 1548 6 8 1 936 3 50 147 0 0 -"30303" 4 551 3 1 1 1 2 1 100 498 147 221 1606 7 5 1 1295 3 50 74 1 1 -"30304" 4 551 3 1 1 1 2 1 100 498 221 332 1363 1 10 0 1087 3 50 111 0 1 -"30305" 4 551 3 1 1 1 2 1 100 498 332 498 1265 9 3 1 969 3 50 166 1 1 -"30306" 4 551 3 1 1 1 3 1 100 226 100 150 1483 3 5 0 1026 3 50 50 0 1 -"30307" 4 551 3 1 1 1 3 1 100 226 150 75 2242 8 9 1 1067 3 50 75 0 0 -"30308" 4 551 3 1 1 1 3 1 100 226 75 131 1195 2 4 0 599 4 75 56 0 1 -"30309" 4 551 3 1 1 1 3 1 100 226 131 197 1179 6 3 1 698 3 50 66 1 1 -"30310" 4 551 3 1 1 1 3 1 100 226 197 296 1099 7 6 1 563 3 50 99 1 1 -"30311" 4 551 3 1 1 1 3 1 100 226 296 148 1146 4 2 0 438 3 50 148 1 0 -"30312" 4 551 3 1 1 1 3 1 100 226 148 259 984 1 8 0 921 4 75 111 0 1 -"30313" 4 551 3 1 1 1 3 1 100 226 259 129 1163 5 7 1 538 3 50 130 0 0 -"30314" 4 551 3 1 1 1 3 1 100 226 129 226 1151 9 5 1 1481 4 75 97 1 1 -"30315" 4 551 3 1 1 1 4 1 100 539 100 175 1718 8 7 1 1829 4 75 75 1 1 -"30316" 4 551 3 1 1 1 4 1 100 539 175 263 1083 3 10 0 674 3 50 88 0 1 -"30317" 4 551 3 1 1 1 4 1 100 539 263 131 1143 7 9 1 700 3 50 132 0 0 -"30318" 4 551 3 1 1 1 4 1 100 539 131 255 1359 9 1 1 710 5 95 124 1 1 -"30319" 4 551 3 1 1 1 4 1 100 539 255 383 1197 2 3 0 970 3 50 128 0 1 -"30320" 4 551 3 1 1 1 4 1 100 539 383 575 1269 1 8 0 866 3 50 192 0 1 -"30321" 4 551 3 1 1 1 4 1 100 539 575 719 1474 5 4 1 2577 2 25 144 1 1 -"30322" 4 551 3 1 1 1 4 1 100 539 719 359 1419 4 2 0 936 3 50 360 1 0 -"30323" 4 551 3 1 1 1 4 1 100 539 359 539 1039 6 3 1 1353 3 50 180 1 1 -"30324" 4 551 4 0 1 0 1 1 100 149 100 175 7473 2 7 0 1595 2 75 75 0 1 -"30325" 4 551 4 0 1 0 1 1 100 149 175 341 1419 3 7 0 1207 1 95 166 0 1 -"30326" 4 551 4 0 1 0 1 1 100 149 341 597 1314 2 8 0 2199 2 75 256 0 1 -"30327" 4 551 4 0 1 0 1 1 100 149 597 149 1135 8 9 1 899 2 75 448 0 0 -"30328" 4 551 5 1 1 0 1 1 100 217 100 195 2367 1 3 0 3060 1 95 95 0 1 -"30329" 4 551 5 1 1 0 1 1 100 217 195 293 998 6 5 1 1334 3 50 98 1 1 -"30330" 4 551 5 1 1 0 1 1 100 217 293 440 1162 2 8 0 987 3 50 147 0 1 -"30331" 4 551 5 1 1 0 1 1 100 217 440 220 1030 8 9 1 984 3 50 220 0 0 -"30332" 4 551 5 1 1 0 1 1 100 217 220 330 1102 3 4 0 926 3 50 110 0 1 -"30333" 4 551 5 1 1 0 1 1 100 217 330 165 1542 5 7 1 1310 3 50 165 0 0 -"30334" 4 551 5 1 1 0 1 1 100 217 165 248 1343 7 4 1 809 3 50 83 1 1 -"30335" 4 551 5 1 1 0 1 1 100 217 248 124 1381 4 1 0 788 3 50 124 1 0 -"30336" 4 551 5 1 1 0 1 1 100 217 124 217 1096 9 6 1 1633 2 75 93 1 1 -"30337" 4 551 5 1 1 0 2 1 100 245 100 150 1582 8 1 1 640 3 50 50 1 1 -"30338" 4 551 5 1 1 0 2 1 100 245 150 225 1243 6 2 1 902 3 50 75 1 1 -"30339" 4 551 5 1 1 0 2 1 100 245 225 169 1019 7 9 1 682 4 25 56 0 0 -"30340" 4 551 5 1 1 0 2 1 100 245 169 330 1017 2 10 0 1659 1 95 161 0 1 -"30341" 4 551 5 1 1 0 2 1 100 245 330 165 1437 5 3 0 1123 3 50 165 1 0 -"30342" 4 551 5 1 1 0 2 1 100 245 165 41 1145 4 2 0 2353 2 75 124 1 0 -"30343" 4 551 5 1 1 0 2 1 100 245 41 72 1205 3 5 0 996 2 75 31 0 1 -"30344" 4 551 5 1 1 0 2 1 100 245 72 140 1051 9 4 1 1542 1 95 68 1 1 -"30345" 4 551 5 1 1 0 2 1 100 245 140 245 1009 1 7 0 889 2 75 105 0 1 -"30346" 4 551 5 1 1 0 3 1 100 681 100 175 1347 7 5 1 1437 2 75 75 1 1 -"30347" 4 551 5 1 1 0 3 1 100 681 175 87 1153 2 1 0 536 3 50 88 1 0 -"30348" 4 551 5 1 1 0 3 1 100 681 87 170 906 8 6 1 3150 1 95 83 1 1 -"30349" 4 551 5 1 1 0 3 1 100 681 170 255 872 4 7 0 901 3 50 85 0 1 -"30350" 4 551 5 1 1 0 3 1 100 681 255 319 1751 3 10 0 444 4 25 64 0 1 -"30351" 4 551 5 1 1 0 3 1 100 681 319 239 1636 6 8 1 1239 4 25 80 0 0 -"30352" 4 551 5 1 1 0 3 1 100 681 239 466 931 9 2 1 1543 1 95 227 1 1 -"30353" 4 551 5 1 1 0 3 1 100 681 466 349 1292 5 3 0 725 4 25 117 1 0 -"30354" 4 551 5 1 1 0 3 1 100 681 349 681 924 1 10 0 1248 1 95 332 0 1 -"30355" 4 551 5 1 1 0 4 1 100 823 100 175 1378 2 3 0 846 2 75 75 0 1 -"30356" 4 551 5 1 1 0 4 1 100 823 175 263 1131 7 4 1 879 3 50 88 1 1 -"30357" 4 551 5 1 1 0 4 1 100 823 263 131 908 3 1 0 861 3 50 132 1 0 -"30358" 4 551 5 1 1 0 4 1 100 823 131 229 1209 1 9 0 1705 2 75 98 0 1 -"30359" 4 551 5 1 1 0 4 1 100 823 229 401 958 8 7 1 849 2 75 172 1 1 -"30360" 4 551 5 1 1 0 4 1 100 823 401 702 1106 9 2 1 1562 2 75 301 1 1 -"30361" 4 551 5 1 1 0 4 1 100 823 702 878 1367 5 6 0 762 4 25 176 0 1 -"30362" 4 551 5 1 1 0 4 1 100 823 878 658 1048 6 8 1 782 4 25 220 0 0 -"30363" 4 551 5 1 1 0 4 1 100 823 658 823 824 4 7 0 756 4 25 165 0 1 -"30364" 4 561 2 0 1 0 1 1 100 56 100 150 28191 2 7 0 706 3 50 50 0 1 -"30365" 4 561 2 0 1 0 1 1 100 56 150 225 11705 3 7 0 1083 3 50 75 0 1 -"30366" 4 561 2 0 1 0 1 1 100 56 225 112 3486 2 8 1 1236 3 50 113 0 0 -"30367" 4 561 2 0 1 0 1 1 100 56 112 56 3419 8 9 1 954 3 50 56 0 0 -"30368" 4 561 3 1 1 0 1 1 100 250 100 150 4944 1 3 0 881 3 50 50 0 1 -"30369" 4 561 3 1 1 0 1 1 100 250 150 112 6696 6 5 0 1236 4 25 38 1 0 -"30370" 4 561 3 1 1 0 1 1 100 250 112 168 2280 2 8 0 1916 3 50 56 0 1 -"30371" 4 561 3 1 1 0 1 1 100 250 168 84 2425 8 9 1 1131 3 50 84 0 0 -"30372" 4 561 3 1 1 0 1 1 100 250 84 126 2577 3 4 0 2360 3 50 42 0 1 -"30373" 4 561 3 1 1 0 1 1 100 250 126 63 4044 5 7 1 2351 3 50 63 0 0 -"30374" 4 561 3 1 1 0 1 1 100 250 63 95 2437 7 4 1 923 3 50 32 1 1 -"30375" 4 561 3 1 1 0 1 1 100 250 95 143 2604 4 1 1 1245 3 50 48 1 1 -"30376" 4 561 3 1 1 0 1 1 100 250 143 250 3028 9 6 1 1453 2 75 107 1 1 -"30377" 4 561 3 1 1 0 2 1 100 313 100 175 3434 8 1 1 1749 2 75 75 1 1 -"30378" 4 561 3 1 1 0 2 1 100 313 175 131 6157 6 2 0 1227 4 25 44 1 0 -"30379" 4 561 3 1 1 0 2 1 100 313 131 98 1812 7 9 1 763 4 25 33 0 0 -"30380" 4 561 3 1 1 0 2 1 100 313 98 191 1618 2 10 0 1230 1 95 93 0 1 -"30381" 4 561 3 1 1 0 2 1 100 313 191 95 3585 5 3 0 1077 3 50 96 1 0 -"30382" 4 561 3 1 1 0 2 1 100 313 95 47 4897 4 2 0 1126 3 50 48 1 0 -"30383" 4 561 3 1 1 0 2 1 100 313 47 92 2315 3 5 0 1579 1 95 45 0 1 -"30384" 4 561 3 1 1 0 2 1 100 313 92 179 1767 9 4 1 1551 1 95 87 1 1 -"30385" 4 561 3 1 1 0 2 1 100 313 179 313 2946 1 7 0 1086 2 75 134 0 1 -"30386" 4 561 3 1 1 0 3 1 100 1365 100 195 2896 7 5 1 1940 1 95 95 1 1 -"30387" 4 561 3 1 1 0 3 1 100 1365 195 97 2648 2 1 0 1183 3 50 98 1 0 -"30388" 4 561 3 1 1 0 3 1 100 1365 97 189 1938 8 6 1 1055 1 95 92 1 1 -"30389" 4 561 3 1 1 0 3 1 100 1365 189 284 2104 4 7 0 1402 3 50 95 0 1 -"30390" 4 561 3 1 1 0 3 1 100 1365 284 554 2174 3 10 0 1205 1 95 270 0 1 -"30391" 4 561 3 1 1 0 3 1 100 1365 554 693 5324 6 8 0 1190 4 25 139 0 1 -"30392" 4 561 3 1 1 0 3 1 100 1365 693 1040 1835 9 2 1 1417 3 50 347 1 1 -"30393" 4 561 3 1 1 0 3 1 100 1365 1040 780 4035 5 3 0 2024 4 25 260 1 0 -"30394" 4 561 3 1 1 0 3 1 100 1365 780 1365 1721 1 10 0 975 2 75 585 0 1 -"30395" 4 561 3 1 1 0 4 1 100 182 100 195 5336 2 3 0 2396 1 95 95 0 1 -"30396" 4 561 3 1 1 0 4 1 100 182 195 293 1878 7 4 1 921 3 50 98 1 1 -"30397" 4 561 3 1 1 0 4 1 100 182 293 146 2421 3 1 0 1988 3 50 147 1 0 -"30398" 4 561 3 1 1 0 4 1 100 182 146 285 1532 1 9 0 835 1 95 139 0 1 -"30399" 4 561 3 1 1 0 4 1 100 182 285 556 1582 8 7 1 1620 1 95 271 1 1 -"30400" 4 561 3 1 1 0 4 1 100 182 556 973 1708 9 2 1 985 2 75 417 1 1 -"30401" 4 561 3 1 1 0 4 1 100 182 973 486 2414 5 6 1 1278 3 50 487 0 0 -"30402" 4 561 3 1 1 0 4 1 100 182 486 121 1868 6 8 1 1270 2 75 365 0 0 -"30403" 4 561 3 1 1 0 4 1 100 182 121 182 1753 4 7 0 933 3 50 61 0 1 -"30404" 4 561 4 0 1 1 1 1 100 82 100 125 10679 8 3 1 603 2 25 25 1 1 -"30405" 4 561 4 0 1 1 1 1 100 82 125 188 6034 3 7 0 1368 3 50 63 0 1 -"30406" 4 561 4 0 1 1 1 1 100 82 188 329 1805 8 2 1 670 4 75 141 1 1 -"30407" 4 561 4 0 1 1 1 1 100 82 329 82 1806 2 1 0 1128 4 75 247 1 0 -"30408" 4 561 5 1 1 1 1 1 100 728 100 175 2185 9 7 1 1142 4 75 75 1 1 -"30409" 4 561 5 1 1 1 1 1 100 728 175 263 1881 4 8 0 1087 3 50 88 0 1 -"30410" 4 561 5 1 1 1 1 1 100 728 263 395 1460 8 2 1 1651 3 50 132 1 1 -"30411" 4 561 5 1 1 1 1 1 100 728 395 197 1720 2 1 0 1184 3 50 198 1 0 -"30412" 4 561 5 1 1 1 1 1 100 728 197 296 1450 7 6 1 987 3 50 99 1 1 -"30413" 4 561 5 1 1 1 1 1 100 728 296 444 1918 5 3 1 1076 3 50 148 1 1 -"30414" 4 561 5 1 1 1 1 1 100 728 444 555 1644 3 6 0 1134 2 25 111 0 1 -"30415" 4 561 5 1 1 1 1 1 100 728 555 416 1498 6 9 1 1400 2 25 139 0 0 -"30416" 4 561 5 1 1 1 1 1 100 728 416 728 1245 1 4 0 1193 4 75 312 0 1 -"30417" 4 561 5 1 1 1 2 1 100 632 100 150 1660 2 9 0 997 3 50 50 0 1 -"30418" 4 561 5 1 1 1 2 1 100 632 150 188 1728 4 10 0 1118 2 25 38 0 1 -"30419" 4 561 5 1 1 1 2 1 100 632 188 94 1415 3 1 0 686 3 50 94 1 0 -"30420" 4 561 5 1 1 1 2 1 100 632 94 141 1068 8 6 1 1998 3 50 47 1 1 -"30421" 4 561 5 1 1 1 2 1 100 632 141 247 2043 5 7 0 1250 4 75 106 0 1 -"30422" 4 561 5 1 1 1 2 1 100 632 247 123 1707 6 8 1 1073 3 50 124 0 0 -"30423" 4 561 5 1 1 1 2 1 100 632 123 185 1455 7 5 1 1012 3 50 62 1 1 -"30424" 4 561 5 1 1 1 2 1 100 632 185 361 2144 1 10 0 1570 5 95 176 0 1 -"30425" 4 561 5 1 1 1 2 1 100 632 361 632 1480 9 3 1 1717 4 75 271 1 1 -"30426" 4 561 5 1 1 1 3 1 100 399 100 150 2024 3 5 0 536 3 50 50 0 1 -"30427" 4 561 5 1 1 1 3 1 100 399 150 37 1130 8 9 1 1244 4 75 113 0 0 -"30428" 4 561 5 1 1 1 3 1 100 399 37 65 1094 2 4 0 1324 4 75 28 0 1 -"30429" 4 561 5 1 1 1 3 1 100 399 65 81 1739 6 3 1 2112 2 25 16 1 1 -"30430" 4 561 5 1 1 1 3 1 100 399 81 122 1332 7 6 1 1205 3 50 41 1 1 -"30431" 4 561 5 1 1 1 3 1 100 399 122 91 1449 4 2 0 2390 2 25 31 1 0 -"30432" 4 561 5 1 1 1 3 1 100 399 91 177 1159 1 8 0 1136 5 95 86 0 1 -"30433" 4 561 5 1 1 1 3 1 100 399 177 266 1727 5 7 0 1063 3 50 89 0 1 -"30434" 4 561 5 1 1 1 3 1 100 399 266 399 1444 9 5 1 1976 3 50 133 1 1 -"30435" 4 561 5 1 1 1 4 1 100 263 100 150 2603 8 7 1 963 3 50 50 1 1 -"30436" 4 561 5 1 1 1 4 1 100 263 150 225 1318 3 10 0 591 3 50 75 0 1 -"30437" 4 561 5 1 1 1 4 1 100 263 225 169 1300 7 9 1 1467 2 25 56 0 0 -"30438" 4 561 5 1 1 1 4 1 100 263 169 296 1092 9 1 1 1124 4 75 127 1 1 -"30439" 4 561 5 1 1 1 4 1 100 263 296 444 1588 2 3 0 1123 3 50 148 0 1 -"30440" 4 561 5 1 1 1 4 1 100 263 444 666 1224 1 8 0 1139 3 50 222 0 1 -"30441" 4 561 5 1 1 1 4 1 100 263 666 333 1729 5 4 0 962 3 50 333 1 0 -"30442" 4 561 5 1 1 1 4 1 100 263 333 250 1330 4 2 0 951 2 25 83 1 0 -"30443" 4 561 5 1 1 1 4 1 100 263 250 263 1194 6 3 1 1668 1 5 13 1 1 -"30444" 4 562 2 0 1 1 1 1 100 134 100 150 5119 8 3 1 1185 3 50 50 1 1 -"30445" 4 562 2 0 1 1 1 1 100 134 150 188 7797 3 7 0 3529 2 25 38 0 1 -"30446" 4 562 2 0 1 1 1 1 100 134 188 179 2860 8 2 0 2585 1 5 9 1 0 -"30447" 4 562 2 0 1 1 1 1 100 134 179 134 2396 2 1 0 1338 2 25 45 1 0 -"30448" 4 562 3 1 1 1 1 1 100 501 100 150 4546 9 7 1 582 3 50 50 1 1 -"30449" 4 562 3 1 1 1 1 1 100 501 150 225 3535 4 8 0 664 3 50 75 0 1 -"30450" 4 562 3 1 1 1 1 1 100 501 225 338 3017 8 2 1 687 3 50 113 1 1 -"30451" 4 562 3 1 1 1 1 1 100 501 338 169 2110 2 1 0 775 3 50 169 1 0 -"30452" 4 562 3 1 1 1 1 1 100 501 169 254 2595 7 6 1 710 3 50 85 1 1 -"30453" 4 562 3 1 1 1 1 1 100 501 254 381 2489 5 3 1 883 3 50 127 1 1 -"30454" 4 562 3 1 1 1 1 1 100 501 381 572 1614 3 6 0 625 3 50 191 0 1 -"30455" 4 562 3 1 1 1 1 1 100 501 572 286 2231 6 9 1 683 3 50 286 0 0 -"30456" 4 562 3 1 1 1 1 1 100 501 286 501 2083 1 4 0 818 4 75 215 0 1 -"30457" 4 562 3 1 1 1 2 1 100 488 100 150 3519 2 9 0 442 3 50 50 0 1 -"30458" 4 562 3 1 1 1 2 1 100 488 150 225 2658 4 10 0 807 3 50 75 0 1 -"30459" 4 562 3 1 1 1 2 1 100 488 225 169 2224 3 1 0 598 2 25 56 1 0 -"30460" 4 562 3 1 1 1 2 1 100 488 169 254 3210 8 6 1 835 3 50 85 1 1 -"30461" 4 562 3 1 1 1 2 1 100 488 254 381 4185 5 7 0 854 3 50 127 0 1 -"30462" 4 562 3 1 1 1 2 1 100 488 381 95 2381 6 8 1 548 4 75 286 0 0 -"30463" 4 562 3 1 1 1 2 1 100 488 95 143 2072 7 5 1 424 3 50 48 1 1 -"30464" 4 562 3 1 1 1 2 1 100 488 143 250 2380 1 10 0 921 4 75 107 0 1 -"30465" 4 562 3 1 1 1 2 1 100 488 250 488 2136 9 3 1 914 5 95 238 1 1 -"30466" 4 562 3 1 1 1 3 1 100 273 100 150 2361 3 5 0 716 3 50 50 0 1 -"30467" 4 562 3 1 1 1 3 1 100 273 150 37 1619 8 9 1 1160 4 75 113 0 0 -"30468" 4 562 3 1 1 1 3 1 100 273 37 56 1667 2 4 0 805 3 50 19 0 1 -"30469" 4 562 3 1 1 1 3 1 100 273 56 84 1736 6 3 1 1431 3 50 28 1 1 -"30470" 4 562 3 1 1 1 3 1 100 273 84 147 1297 7 6 1 1248 4 75 63 1 1 -"30471" 4 562 3 1 1 1 3 1 100 273 147 37 1208 4 2 0 908 4 75 110 1 0 -"30472" 4 562 3 1 1 1 3 1 100 273 37 72 2601 1 8 0 1178 5 95 35 0 1 -"30473" 4 562 3 1 1 1 3 1 100 273 72 140 1852 5 7 0 839 5 95 68 0 1 -"30474" 4 562 3 1 1 1 3 1 100 273 140 273 1630 9 5 1 973 5 95 133 1 1 -"30475" 4 562 3 1 1 1 4 1 100 710 100 195 2423 8 7 1 997 5 95 95 1 1 -"30476" 4 562 3 1 1 1 4 1 100 710 195 341 1404 3 10 0 755 4 75 146 0 1 -"30477" 4 562 3 1 1 1 4 1 100 710 341 170 1558 7 9 1 522 3 50 171 0 0 -"30478" 4 562 3 1 1 1 4 1 100 710 170 332 1570 9 1 1 1315 5 95 162 1 1 -"30479" 4 562 3 1 1 1 4 1 100 710 332 647 1800 2 3 0 753 5 95 315 0 1 -"30480" 4 562 3 1 1 1 4 1 100 710 647 971 1993 1 8 0 932 3 50 324 0 1 -"30481" 4 562 3 1 1 1 4 1 100 710 971 728 1915 5 4 0 483 2 25 243 1 0 -"30482" 4 562 3 1 1 1 4 1 100 710 728 364 1751 4 2 0 718 3 50 364 1 0 -"30483" 4 562 3 1 1 1 4 1 100 710 364 710 2293 6 3 1 749 5 95 346 1 1 -"30484" 4 562 4 0 1 0 1 1 100 197 100 150 4222 2 7 0 703 3 50 50 0 1 -"30485" 4 562 4 0 1 0 1 1 100 197 150 263 2296 3 7 0 815 2 75 113 0 1 -"30486" 4 562 4 0 1 0 1 1 100 197 263 395 2055 2 8 0 782 3 50 132 0 1 -"30487" 4 562 4 0 1 0 1 1 100 197 395 197 1479 8 9 1 887 3 50 198 0 0 -"30488" 4 562 5 1 1 0 1 0 100 1 100 195 2995 1 3 0 414 1 95 95 0 1 -"30489" 4 562 5 1 1 0 1 0 100 1 195 293 1367 6 5 1 684 3 50 98 1 1 -"30490" 4 562 5 1 1 0 1 0 100 1 293 440 1557 2 8 0 587 3 50 147 0 1 -"30491" 4 562 5 1 1 0 1 0 100 1 440 110 1155 8 9 1 727 2 75 330 0 0 -"30492" 4 562 5 1 1 0 1 0 100 1 110 215 1372 3 4 0 530 1 95 105 0 1 -"30493" 4 562 5 1 1 0 1 0 100 1 215 11 1448 5 7 1 620 1 95 204 0 0 -"30494" 4 562 5 1 1 0 1 0 100 1 11 1 1815 7 4 0 786 1 95 10 1 0 -"30495" 4 562 5 1 1 0 2 0 100 0 100 195 1772 8 1 1 728 1 95 95 1 1 -"30496" 4 562 5 1 1 0 2 0 100 0 195 49 2147 6 2 0 910 2 75 146 1 0 -"30497" 4 562 5 1 1 0 2 0 100 0 49 2 1914 7 9 1 671 1 95 47 0 0 -"30498" 4 562 5 1 1 0 2 0 100 0 2 4 3906 2 10 0 386 1 95 2 0 1 -"30499" 4 562 5 1 1 0 2 0 100 0 4 0 1369 5 3 0 623 1 95 4 1 0 -"30500" 4 562 5 1 1 0 3 1 100 181 100 150 1444 7 5 1 667 3 50 50 1 1 -"30501" 4 562 5 1 1 0 3 1 100 181 150 75 1241 2 1 0 818 3 50 75 1 0 -"30502" 4 562 5 1 1 0 3 1 100 181 75 146 1322 8 6 1 926 1 95 71 1 1 -"30503" 4 562 5 1 1 0 3 1 100 181 146 256 1294 4 7 0 1269 2 75 110 0 1 -"30504" 4 562 5 1 1 0 3 1 100 181 256 384 1571 3 10 0 793 3 50 128 0 1 -"30505" 4 562 5 1 1 0 3 1 100 181 384 192 1377 6 8 1 768 3 50 192 0 0 -"30506" 4 562 5 1 1 0 3 1 100 181 192 374 1392 9 2 1 435 1 95 182 1 1 -"30507" 4 562 5 1 1 0 3 1 100 181 374 93 1594 5 3 0 777 2 75 281 1 0 -"30508" 4 562 5 1 1 0 3 1 100 181 93 181 1530 1 10 0 444 1 95 88 0 1 -"30509" 4 562 5 1 1 0 4 1 100 943 100 195 1714 2 3 0 661 1 95 95 0 1 -"30510" 4 562 5 1 1 0 4 1 100 943 195 293 1404 7 4 1 624 3 50 98 1 1 -"30511" 4 562 5 1 1 0 4 1 100 943 293 73 1191 3 1 0 1484 2 75 220 1 0 -"30512" 4 562 5 1 1 0 4 1 100 943 73 142 1188 1 9 0 431 1 95 69 0 1 -"30513" 4 562 5 1 1 0 4 1 100 943 142 277 1134 8 7 1 576 1 95 135 1 1 -"30514" 4 562 5 1 1 0 4 1 100 943 277 540 1240 9 2 1 595 1 95 263 1 1 -"30515" 4 562 5 1 1 0 4 1 100 943 540 945 1532 5 6 0 807 2 75 405 0 1 -"30516" 4 562 5 1 1 0 4 1 100 943 945 898 2164 6 8 1 1038 5 5 47 0 0 -"30517" 4 562 5 1 1 0 4 1 100 943 898 943 1343 4 7 0 541 5 5 45 0 1 -"30518" 4 566 2 0 1 1 1 1 100 224 100 150 16291 8 3 1 2887 3 50 50 1 1 -"30519" 4 566 2 0 1 1 1 1 100 224 150 225 6406 3 7 0 763 3 50 75 0 1 -"30520" 4 566 2 0 1 1 1 1 100 224 225 236 2459 8 2 1 3080 1 5 11 1 1 -"30521" 4 566 2 0 1 1 1 1 100 224 236 224 2244 2 1 0 372 1 5 12 1 0 -"30522" 4 566 3 1 1 1 1 1 100 1537 100 195 15007 9 7 1 0 5 95 95 1 1 -"30523" 4 566 3 1 1 1 1 1 100 1537 195 293 14142 4 8 0 1262 3 50 98 0 1 -"30524" 4 566 3 1 1 1 1 1 100 1537 293 571 4904 8 2 1 1221 5 95 278 1 1 -"30525" 4 566 3 1 1 1 1 1 100 1537 571 285 2267 2 1 0 387 3 50 286 1 0 -"30526" 4 566 3 1 1 1 1 1 100 1537 285 499 2523 7 6 1 832 4 75 214 1 1 -"30527" 4 566 3 1 1 1 1 1 100 1537 499 474 5059 5 3 0 428 1 5 25 1 0 -"30528" 4 566 3 1 1 1 1 1 100 1537 474 830 1801 3 6 0 869 4 75 356 0 1 -"30529" 4 566 3 1 1 1 1 1 100 1537 830 788 3843 6 9 1 797 1 5 42 0 0 -"30530" 4 566 3 1 1 1 1 1 100 1537 788 1537 2806 1 4 0 0 5 95 749 0 1 -"30531" 4 566 3 1 1 1 2 1 100 1244 100 195 9301 2 9 0 2769 5 95 95 0 1 -"30532" 4 566 3 1 1 1 2 1 100 1244 195 244 3575 4 10 0 1817 2 25 49 0 1 -"30533" 4 566 3 1 1 1 2 1 100 1244 244 183 2133 3 1 0 440 2 25 61 1 0 -"30534" 4 566 3 1 1 1 2 1 100 1244 183 357 2468 8 6 1 0 5 95 174 1 1 -"30535" 4 566 3 1 1 1 2 1 100 1244 357 375 6384 5 7 0 412 1 5 18 0 1 -"30536" 4 566 3 1 1 1 2 1 100 1244 375 187 2515 6 8 1 936 3 50 188 0 0 -"30537" 4 566 3 1 1 1 2 1 100 1244 187 327 2082 7 5 1 3547 4 75 140 1 1 -"30538" 4 566 3 1 1 1 2 1 100 1244 327 638 2004 1 10 0 0 5 95 311 0 1 -"30539" 4 566 3 1 1 1 2 1 100 1244 638 1244 4045 9 3 1 1518 5 95 606 1 1 -"30540" 4 566 3 1 1 1 3 1 100 152 100 195 4762 3 5 0 0 5 95 95 0 1 -"30541" 4 566 3 1 1 1 3 1 100 152 195 10 2092 8 9 1 0 5 95 185 0 0 -"30542" 4 566 3 1 1 1 3 1 100 152 10 18 2240 2 4 0 515 4 75 8 0 1 -"30543" 4 566 3 1 1 1 3 1 100 152 18 35 2007 6 3 1 0 5 95 17 1 1 -"30544" 4 566 3 1 1 1 3 1 100 152 35 44 10395 7 6 1 785 2 25 9 1 1 -"30545" 4 566 3 1 1 1 3 1 100 152 44 42 1771 4 2 0 1004 1 5 2 1 0 -"30546" 4 566 3 1 1 1 3 1 100 152 42 82 1398 1 8 0 0 5 95 40 0 1 -"30547" 4 566 3 1 1 1 3 1 100 152 82 78 3552 5 7 1 550 1 5 4 0 0 -"30548" 4 566 3 1 1 1 3 1 100 152 78 152 2295 9 5 1 0 5 95 74 1 1 -"30549" 4 566 3 1 1 1 4 1 100 2843 100 195 5646 8 7 1 0 5 95 95 1 1 -"30550" 4 566 3 1 1 1 4 1 100 2843 195 293 1962 3 10 0 2545 3 50 98 0 1 -"30551" 4 566 3 1 1 1 4 1 100 2843 293 366 3445 7 9 0 446 2 25 73 0 1 -"30552" 4 566 3 1 1 1 4 1 100 2843 366 714 1904 9 1 1 0 5 95 348 1 1 -"30553" 4 566 3 1 1 1 4 1 100 2843 714 1392 1929 2 3 0 2993 5 95 678 0 1 -"30554" 4 566 3 1 1 1 4 1 100 2843 1392 2714 2149 1 8 0 1557 5 95 1322 0 1 -"30555" 4 566 3 1 1 1 4 1 100 2843 2714 2850 2834 5 4 1 359 1 5 136 1 1 -"30556" 4 566 3 1 1 1 4 1 100 2843 2850 2993 3716 4 2 1 504 1 5 143 1 1 -"30557" 4 566 3 1 1 1 4 1 100 2843 2993 2843 6158 6 3 0 251 1 5 150 1 0 -"30558" 4 566 4 0 1 0 1 1 100 1297 100 175 10697 2 7 0 4166 2 75 75 0 1 -"30559" 4 566 4 0 1 0 1 1 100 1297 175 341 5504 3 7 0 380 1 95 166 0 1 -"30560" 4 566 4 0 1 0 1 1 100 1297 341 665 3960 2 8 0 1655 1 95 324 0 1 -"30561" 4 566 4 0 1 0 1 1 100 1297 665 1297 1756 8 9 0 391 1 95 632 0 1 -"30562" 4 566 5 1 1 0 1 1 100 6240 100 195 3107 1 3 0 415 1 95 95 0 1 -"30563" 4 566 5 1 1 0 1 1 100 6240 195 293 1510 6 5 1 1078 3 50 98 1 1 -"30564" 4 566 5 1 1 0 1 1 100 6240 293 513 1538 2 8 0 659 2 75 220 0 1 -"30565" 4 566 5 1 1 0 1 1 100 6240 513 641 6903 8 9 0 2333 4 25 128 0 1 -"30566" 4 566 5 1 1 0 1 1 100 6240 641 1250 1680 3 4 0 4257 1 95 609 0 1 -"30567" 4 566 5 1 1 0 1 1 100 6240 1250 1313 3692 5 7 0 0 5 5 63 0 1 -"30568" 4 566 5 1 1 0 1 1 100 6240 1313 1641 4840 7 4 1 4185 4 25 328 1 1 -"30569" 4 566 5 1 1 0 1 1 100 6240 1641 3200 4258 4 1 1 300 1 95 1559 1 1 -"30570" 4 566 5 1 1 0 1 1 100 6240 3200 6240 1918 9 6 1 2649 1 95 3040 1 1 -"30571" 4 566 5 1 1 0 2 1 100 228 100 195 3074 8 1 1 366 1 95 95 1 1 -"30572" 4 566 5 1 1 0 2 1 100 228 195 293 4413 6 2 1 414 3 50 98 1 1 -"30573" 4 566 5 1 1 0 2 1 100 228 293 366 6687 7 9 0 1648 4 25 73 0 1 -"30574" 4 566 5 1 1 0 2 1 100 228 366 714 1252 2 10 0 1003 1 95 348 0 1 -"30575" 4 566 5 1 1 0 2 1 100 228 714 678 3942 5 3 0 0 5 5 36 1 0 -"30576" 4 566 5 1 1 0 2 1 100 228 678 34 4293 4 2 0 281 1 95 644 1 0 -"30577" 4 566 5 1 1 0 2 1 100 228 34 60 1439 3 5 0 366 2 75 26 0 1 -"30578" 4 566 5 1 1 0 2 1 100 228 60 117 1359 9 4 1 349 1 95 57 1 1 -"30579" 4 566 5 1 1 0 2 1 100 228 117 228 1239 1 7 0 333 1 95 111 0 1 -"30580" 4 566 5 1 1 0 3 1 100 43 100 195 1994 7 5 1 401 1 95 95 1 1 -"30581" 4 566 5 1 1 0 3 1 100 43 195 10 1531 2 1 0 393 1 95 185 1 0 -"30582" 4 566 5 1 1 0 3 1 100 43 10 18 1303 8 6 1 1336 2 75 8 1 1 -"30583" 4 566 5 1 1 0 3 1 100 43 18 32 7049 4 7 0 2253 2 75 14 0 1 -"30584" 4 566 5 1 1 0 3 1 100 43 32 8 4101 3 10 1 921 2 75 24 0 0 -"30585" 4 566 5 1 1 0 3 1 100 43 8 12 6132 6 8 0 419 3 50 4 0 1 -"30586" 4 566 5 1 1 0 3 1 100 43 12 21 1729 9 2 1 442 2 75 9 1 1 -"30587" 4 566 5 1 1 0 3 1 100 43 21 22 3286 5 3 1 0 5 5 1 1 1 -"30588" 4 566 5 1 1 0 3 1 100 43 22 43 1510 1 10 0 2573 1 95 21 0 1 -"30589" 4 566 5 1 1 0 4 1 100 10604 100 195 5499 2 3 0 399 1 95 95 0 1 -"30590" 4 566 5 1 1 0 4 1 100 10604 195 380 1297 7 4 1 319 1 95 185 1 1 -"30591" 4 566 5 1 1 0 4 1 100 10604 380 741 7405 3 1 1 432 1 95 361 1 1 -"30592" 4 566 5 1 1 0 4 1 100 10604 741 1445 1655 1 9 0 612 1 95 704 0 1 -"30593" 4 566 5 1 1 0 4 1 100 10604 1445 2529 1446 8 7 1 365 2 75 1084 1 1 -"30594" 4 566 5 1 1 0 4 1 100 10604 2529 4932 1559 9 2 1 306 1 95 2403 1 1 -"30595" 4 566 5 1 1 0 4 1 100 10604 4932 5179 3056 5 6 0 0 5 5 247 0 1 -"30596" 4 566 5 1 1 0 4 1 100 10604 5179 10099 9579 6 8 0 686 1 95 4920 0 1 -"30597" 4 566 5 1 1 0 4 1 100 10604 10099 10604 7305 4 7 0 0 5 5 505 0 1 -"30598" 4 575 2 0 1 0 1 1 100 115 100 150 25348 2 7 0 2334 3 50 50 0 1 -"30599" 4 575 2 0 1 0 1 1 100 115 150 263 6329 3 7 0 1724 2 75 113 0 1 -"30600" 4 575 2 0 1 0 1 1 100 115 263 460 2623 2 8 0 1605 2 75 197 0 1 -"30601" 4 575 2 0 1 0 1 1 100 115 460 115 2431 8 9 1 812 2 75 345 0 0 -"30602" 4 575 3 1 1 0 1 1 100 1183 100 195 2777 1 3 0 4954 1 95 95 0 1 -"30603" 4 575 3 1 1 0 1 1 100 1183 195 293 2672 6 5 1 878 3 50 98 1 1 -"30604" 4 575 3 1 1 0 1 1 100 1183 293 513 2153 2 8 0 1179 2 75 220 0 1 -"30605" 4 575 3 1 1 0 1 1 100 1183 513 641 2996 8 9 0 1222 4 25 128 0 1 -"30606" 4 575 3 1 1 0 1 1 100 1183 641 962 2242 3 4 0 1422 3 50 321 0 1 -"30607" 4 575 3 1 1 0 1 1 100 1183 962 721 4596 5 7 1 2267 4 25 241 0 0 -"30608" 4 575 3 1 1 0 1 1 100 1183 721 901 2300 7 4 1 1766 4 25 180 1 1 -"30609" 4 575 3 1 1 0 1 1 100 1183 901 676 2897 4 1 0 1270 4 25 225 1 0 -"30610" 4 575 3 1 1 0 1 1 100 1183 676 1183 3922 9 6 1 841 2 75 507 1 1 -"30611" 4 575 3 1 1 0 2 1 100 1975 100 175 4082 8 1 1 1768 2 75 75 1 1 -"30612" 4 575 3 1 1 0 2 1 100 1975 175 219 2981 6 2 1 848 4 25 44 1 1 -"30613" 4 575 3 1 1 0 2 1 100 1975 219 274 2927 7 9 0 1439 4 25 55 0 1 -"30614" 4 575 3 1 1 0 2 1 100 1975 274 411 2347 2 10 0 1234 3 50 137 0 1 -"30615" 4 575 3 1 1 0 2 1 100 1975 411 617 2502 5 3 1 1113 3 50 206 1 1 -"30616" 4 575 3 1 1 0 2 1 100 1975 617 463 2256 4 2 0 874 4 25 154 1 0 -"30617" 4 575 3 1 1 0 2 1 100 1975 463 579 2114 3 5 0 1057 4 25 116 0 1 -"30618" 4 575 3 1 1 0 2 1 100 1975 579 1013 2364 9 4 1 2262 2 75 434 1 1 -"30619" 4 575 3 1 1 0 2 1 100 1975 1013 1975 2220 1 7 0 2408 1 95 962 0 1 -"30620" 4 575 3 1 1 0 3 1 100 47 100 125 2185 7 5 1 933 4 25 25 1 1 -"30621" 4 575 3 1 1 0 3 1 100 47 125 62 1770 2 1 0 784 3 50 63 1 0 -"30622" 4 575 3 1 1 0 3 1 100 47 62 109 1980 8 6 1 2020 2 75 47 1 1 -"30623" 4 575 3 1 1 0 3 1 100 47 109 164 2431 4 7 0 1788 3 50 55 0 1 -"30624" 4 575 3 1 1 0 3 1 100 47 164 123 2367 3 10 1 1543 4 25 41 0 0 -"30625" 4 575 3 1 1 0 3 1 100 47 123 61 1620 6 8 1 1154 3 50 62 0 0 -"30626" 4 575 3 1 1 0 3 1 100 47 61 107 2020 9 2 1 831 2 75 46 1 1 -"30627" 4 575 3 1 1 0 3 1 100 47 107 27 2248 5 3 0 1355 2 75 80 1 0 -"30628" 4 575 3 1 1 0 3 1 100 47 27 47 1928 1 10 0 1413 2 75 20 0 1 -"30629" 4 575 3 1 1 0 4 1 100 2733 100 175 2338 2 3 0 1116 2 75 75 0 1 -"30630" 4 575 3 1 1 0 4 1 100 2733 175 263 2187 7 4 1 883 3 50 88 1 1 -"30631" 4 575 3 1 1 0 4 1 100 2733 263 329 2063 3 1 1 1741 4 25 66 1 1 -"30632" 4 575 3 1 1 0 4 1 100 2733 329 642 1656 1 9 0 2326 1 95 313 0 1 -"30633" 4 575 3 1 1 0 4 1 100 2733 642 1124 2015 8 7 1 1755 2 75 482 1 1 -"30634" 4 575 3 1 1 0 4 1 100 2733 1124 2192 2128 9 2 1 1410 1 95 1068 1 1 -"30635" 4 575 3 1 1 0 4 1 100 2733 2192 2740 2001 5 6 0 1977 4 25 548 0 1 -"30636" 4 575 3 1 1 0 4 1 100 2733 2740 2877 2072 6 8 0 1354 5 5 137 0 1 -"30637" 4 575 3 1 1 0 4 1 100 2733 2877 2733 2160 4 7 1 913 5 5 144 0 0 -"30638" 4 575 4 0 1 1 1 1 100 207 100 150 10607 8 3 1 1154 3 50 50 1 1 -"30639" 4 575 4 0 1 1 1 1 100 207 150 188 2863 3 7 0 805 2 25 38 0 1 -"30640" 4 575 4 0 1 1 1 1 100 207 188 197 2809 8 2 1 846 1 5 9 1 1 -"30641" 4 575 4 0 1 1 1 1 100 207 197 207 2078 2 1 1 675 1 5 10 1 1 -"30642" 4 575 5 1 1 1 1 1 100 557 100 150 1885 9 7 1 665 3 50 50 1 1 -"30643" 4 575 5 1 1 1 1 1 100 557 150 188 1484 4 8 0 2330 2 25 38 0 1 -"30644" 4 575 5 1 1 1 1 1 100 557 188 282 1054 8 2 1 1183 3 50 94 1 1 -"30645" 4 575 5 1 1 1 1 1 100 557 282 211 1246 2 1 0 1331 2 25 71 1 0 -"30646" 4 575 5 1 1 1 1 1 100 557 211 264 1259 7 6 1 1697 2 25 53 1 1 -"30647" 4 575 5 1 1 1 1 1 100 557 264 198 1114 5 3 0 972 2 25 66 1 0 -"30648" 4 575 5 1 1 1 1 1 100 557 198 297 1136 3 6 0 2171 3 50 99 0 1 -"30649" 4 575 5 1 1 1 1 1 100 557 297 371 1560 6 9 0 671 2 25 74 0 1 -"30650" 4 575 5 1 1 1 1 1 100 557 371 557 1199 1 4 0 1085 3 50 186 0 1 -"30651" 4 575 5 1 1 1 2 1 100 374 100 95 2913 2 9 1 782 1 5 5 0 0 -"30652" 4 575 5 1 1 1 2 1 100 374 95 119 1036 4 10 0 878 2 25 24 0 1 -"30653" 4 575 5 1 1 1 2 1 100 374 119 149 1459 3 1 1 845 2 25 30 1 1 -"30654" 4 575 5 1 1 1 2 1 100 374 149 224 1402 8 6 1 940 3 50 75 1 1 -"30655" 4 575 5 1 1 1 2 1 100 374 224 280 1253 5 7 0 3443 2 25 56 0 1 -"30656" 4 575 5 1 1 1 2 1 100 374 280 210 1879 6 8 1 2503 2 25 70 0 0 -"30657" 4 575 5 1 1 1 2 1 100 374 210 315 1412 7 5 1 1168 3 50 105 1 1 -"30658" 4 575 5 1 1 1 2 1 100 374 315 394 1194 1 10 0 1461 2 25 79 0 1 -"30659" 4 575 5 1 1 1 2 1 100 374 394 374 2313 9 3 0 765 1 5 20 1 0 -"30660" 4 575 5 1 1 1 3 1 100 87 100 75 2371 3 5 1 785 2 25 25 0 0 -"30661" 4 575 5 1 1 1 3 1 100 87 75 37 1388 8 9 1 1379 3 50 38 0 0 -"30662" 4 575 5 1 1 1 3 1 100 87 37 56 1418 2 4 0 974 3 50 19 0 1 -"30663" 4 575 5 1 1 1 3 1 100 87 56 84 1384 6 3 1 927 3 50 28 1 1 -"30664" 4 575 5 1 1 1 3 1 100 87 84 105 1458 7 6 1 1587 2 25 21 1 1 -"30665" 4 575 5 1 1 1 3 1 100 87 105 52 1040 4 2 0 1287 3 50 53 1 0 -"30666" 4 575 5 1 1 1 3 1 100 87 52 78 1283 1 8 0 1082 3 50 26 0 1 -"30667" 4 575 5 1 1 1 3 1 100 87 78 58 1391 5 7 1 1632 2 25 20 0 0 -"30668" 4 575 5 1 1 1 3 1 100 87 58 87 2682 9 5 1 864 3 50 29 1 1 -"30669" 4 575 5 1 1 1 4 1 100 187 100 150 1616 8 7 1 788 3 50 50 1 1 -"30670" 4 575 5 1 1 1 4 1 100 187 150 188 1267 3 10 0 828 2 25 38 0 1 -"30671" 4 575 5 1 1 1 4 1 100 187 188 94 1665 7 9 1 903 3 50 94 0 0 -"30672" 4 575 5 1 1 1 4 1 100 187 94 118 1164 9 1 1 1111 2 25 24 1 1 -"30673" 4 575 5 1 1 1 4 1 100 187 118 177 1061 2 3 0 880 3 50 59 0 1 -"30674" 4 575 5 1 1 1 4 1 100 187 177 266 1539 1 8 0 645 3 50 89 0 1 -"30675" 4 575 5 1 1 1 4 1 100 187 266 333 1156 5 4 1 842 2 25 67 1 1 -"30676" 4 575 5 1 1 1 4 1 100 187 333 250 2349 4 2 0 745 2 25 83 1 0 -"30677" 4 575 5 1 1 1 4 1 100 187 250 187 1373 6 3 0 813 2 25 63 1 0 -"30678" 4 583 2 0 1 1 1 1 100 106 100 105 25011 8 3 1 1579 1 5 5 1 1 -"30679" 4 583 2 0 1 1 1 1 100 106 105 52 9333 3 7 1 1317 3 50 53 0 0 -"30680" 4 583 2 0 1 1 1 1 100 106 52 101 3376 8 2 1 1380 5 95 49 1 1 -"30681" 4 583 2 0 1 1 1 1 100 106 101 106 4077 2 1 1 916 1 5 5 1 1 -"30682" 4 583 3 1 1 1 1 1 100 513 100 175 2921 9 7 1 1045 4 75 75 1 1 -"30683" 4 583 3 1 1 1 1 1 100 513 175 166 4370 4 8 1 1374 1 5 9 0 0 -"30684" 4 583 3 1 1 1 1 1 100 513 166 208 2669 8 2 1 694 2 25 42 1 1 -"30685" 4 583 3 1 1 1 1 1 100 513 208 198 3879 2 1 0 2492 1 5 10 1 0 -"30686" 4 583 3 1 1 1 1 1 100 513 198 297 1784 7 6 1 662 3 50 99 1 1 -"30687" 4 583 3 1 1 1 1 1 100 513 297 312 4839 5 3 1 1220 1 5 15 1 1 -"30688" 4 583 3 1 1 1 1 1 100 513 312 390 1793 3 6 0 1294 2 25 78 0 1 -"30689" 4 583 3 1 1 1 1 1 100 513 390 410 2208 6 9 0 1018 1 5 20 0 1 -"30690" 4 583 3 1 1 1 1 1 100 513 410 513 1555 1 4 0 2619 2 25 103 0 1 -"30691" 4 583 3 1 1 1 2 1 100 521 100 125 2475 2 9 0 678 2 25 25 0 1 -"30692" 4 583 3 1 1 1 2 1 100 521 125 156 1770 4 10 0 1146 2 25 31 0 1 -"30693" 4 583 3 1 1 1 2 1 100 521 156 164 1982 3 1 1 3746 1 5 8 1 1 -"30694" 4 583 3 1 1 1 2 1 100 521 164 205 3186 8 6 1 1863 2 25 41 1 1 -"30695" 4 583 3 1 1 1 2 1 100 521 205 195 1817 5 7 1 970 1 5 10 0 0 -"30696" 4 583 3 1 1 1 2 1 100 521 195 185 1402 6 8 1 1085 1 5 10 0 0 -"30697" 4 583 3 1 1 1 2 1 100 521 185 278 1762 7 5 1 1377 3 50 93 1 1 -"30698" 4 583 3 1 1 1 2 1 100 521 278 417 1835 1 10 0 1073 3 50 139 0 1 -"30699" 4 583 3 1 1 1 2 1 100 521 417 521 1993 9 3 1 920 2 25 104 1 1 -"30700" 4 583 3 1 1 1 3 1 100 380 100 105 2665 3 5 0 859 1 5 5 0 1 -"30701" 4 583 3 1 1 1 3 1 100 380 105 110 2646 8 9 0 676 1 5 5 0 1 -"30702" 4 583 3 1 1 1 3 1 100 380 110 193 1977 2 4 0 524 4 75 83 0 1 -"30703" 4 583 3 1 1 1 3 1 100 380 193 183 2349 6 3 0 1121 1 5 10 1 0 -"30704" 4 583 3 1 1 1 3 1 100 380 183 229 1415 7 6 1 1234 2 25 46 1 1 -"30705" 4 583 3 1 1 1 3 1 100 380 229 218 2015 4 2 0 2820 1 5 11 1 0 -"30706" 4 583 3 1 1 1 3 1 100 380 218 207 2320 1 8 1 1206 1 5 11 0 0 -"30707" 4 583 3 1 1 1 3 1 100 380 207 217 2030 5 7 0 921 1 5 10 0 1 -"30708" 4 583 3 1 1 1 3 1 100 380 217 380 1333 9 5 1 552 4 75 163 1 1 -"30709" 4 583 3 1 1 1 4 1 100 218 100 105 2167 8 7 1 1180 1 5 5 1 1 -"30710" 4 583 3 1 1 1 4 1 100 218 105 131 1164 3 10 0 461 2 25 26 0 1 -"30711" 4 583 3 1 1 1 4 1 100 218 131 98 1682 7 9 1 1116 2 25 33 0 0 -"30712" 4 583 3 1 1 1 4 1 100 218 98 172 1597 9 1 1 1401 4 75 74 1 1 -"30713" 4 583 3 1 1 1 4 1 100 218 172 258 1721 2 3 0 819 3 50 86 0 1 -"30714" 4 583 3 1 1 1 4 1 100 218 258 245 2129 1 8 1 1260 1 5 13 0 0 -"30715" 4 583 3 1 1 1 4 1 100 218 245 306 1607 5 4 1 935 2 25 61 1 1 -"30716" 4 583 3 1 1 1 4 1 100 218 306 291 2057 4 2 0 978 1 5 15 1 0 -"30717" 4 583 3 1 1 1 4 1 100 218 291 218 942 6 3 0 1862 2 25 73 1 0 -"30718" 4 583 4 0 1 0 1 1 100 13 100 195 3557 2 7 0 1766 1 95 95 0 1 -"30719" 4 583 4 0 1 0 1 1 100 13 195 341 2588 3 7 0 1427 2 75 146 0 1 -"30720" 4 583 4 0 1 0 1 1 100 13 341 256 1990 2 8 1 1412 4 25 85 0 0 -"30721" 4 583 4 0 1 0 1 1 100 13 256 13 1551 8 9 1 1152 1 95 243 0 0 -"30722" 4 583 5 1 1 0 1 1 100 68 100 195 1481 1 3 0 1338 1 95 95 0 1 -"30723" 4 583 5 1 1 0 1 1 100 68 195 380 4040 6 5 1 1342 1 95 185 1 1 -"30724" 4 583 5 1 1 0 1 1 100 68 380 741 3894 2 8 0 763 1 95 361 0 1 -"30725" 4 583 5 1 1 0 1 1 100 68 741 370 1266 8 9 1 1225 3 50 371 0 0 -"30726" 4 583 5 1 1 0 1 1 100 68 370 722 2212 3 4 0 1296 1 95 352 0 1 -"30727" 4 583 5 1 1 0 1 1 100 68 722 361 2542 5 7 1 1164 3 50 361 0 0 -"30728" 4 583 5 1 1 0 1 1 100 68 361 704 1844 7 4 1 1065 1 95 343 1 1 -"30729" 4 583 5 1 1 0 1 1 100 68 704 35 2384 4 1 0 2151 1 95 669 1 0 -"30730" 4 583 5 1 1 0 1 1 100 68 35 68 1424 9 6 1 2331 1 95 33 1 1 -"30731" 4 583 5 1 1 0 2 1 100 9635 100 195 4301 8 1 1 1735 1 95 95 1 1 -"30732" 4 583 5 1 1 0 2 1 100 9635 195 380 1656 6 2 1 1559 1 95 185 1 1 -"30733" 4 583 5 1 1 0 2 1 100 9635 380 475 1811 7 9 0 632 4 25 95 0 1 -"30734" 4 583 5 1 1 0 2 1 100 9635 475 926 1953 2 10 0 1028 1 95 451 0 1 -"30735" 4 583 5 1 1 0 2 1 100 9635 926 1158 1363 5 3 1 1306 4 25 232 1 1 -"30736" 4 583 5 1 1 0 2 1 100 9635 1158 2027 2239 4 2 1 1314 2 75 869 1 1 -"30737" 4 583 5 1 1 0 2 1 100 9635 2027 2534 1641 3 5 0 735 4 25 507 0 1 -"30738" 4 583 5 1 1 0 2 1 100 9635 2534 4941 2670 9 4 1 1025 1 95 2407 1 1 -"30739" 4 583 5 1 1 0 2 1 100 9635 4941 9635 1362 1 7 0 1095 1 95 4694 0 1 -"30740" 4 583 5 1 1 0 3 1 100 15 100 195 2787 7 5 1 973 1 95 95 1 1 -"30741" 4 583 5 1 1 0 3 1 100 15 195 10 1389 2 1 0 943 1 95 185 1 0 -"30742" 4 583 5 1 1 0 3 1 100 15 10 20 1399 8 6 1 633 1 95 10 1 1 -"30743" 4 583 5 1 1 0 3 1 100 15 20 39 1920 4 7 0 763 1 95 19 0 1 -"30744" 4 583 5 1 1 0 3 1 100 15 39 76 1349 3 10 0 1055 1 95 37 0 1 -"30745" 4 583 5 1 1 0 3 1 100 15 76 4 1258 6 8 1 969 1 95 72 0 0 -"30746" 4 583 5 1 1 0 3 1 100 15 4 8 2426 9 2 1 922 1 95 4 1 1 -"30747" 4 583 5 1 1 0 3 1 100 15 8 16 1246 5 3 1 1272 1 95 8 1 1 -"30748" 4 583 5 1 1 0 3 1 100 15 16 15 812 1 10 1 730 5 5 1 0 0 -"30749" 4 583 5 1 1 0 4 1 100 1045 100 195 1892 2 3 0 1436 1 95 95 0 1 -"30750" 4 583 5 1 1 0 4 1 100 1045 195 380 1127 7 4 1 896 1 95 185 1 1 -"30751" 4 583 5 1 1 0 4 1 100 1045 380 741 1737 3 1 1 973 1 95 361 1 1 -"30752" 4 583 5 1 1 0 4 1 100 1045 741 1445 1469 1 9 0 962 1 95 704 0 1 -"30753" 4 583 5 1 1 0 4 1 100 1045 1445 2818 3524 8 7 1 2442 1 95 1373 1 1 -"30754" 4 583 5 1 1 0 4 1 100 1045 2818 5495 2290 9 2 1 1250 1 95 2677 1 1 -"30755" 4 583 5 1 1 0 4 1 100 1045 5495 10715 1634 5 6 0 1047 1 95 5220 0 1 -"30756" 4 583 5 1 1 0 4 1 100 1045 10715 20894 2126 6 8 0 952 1 95 10179 0 1 -"30757" 4 583 5 1 1 0 4 1 100 1045 20894 1045 2444 4 7 1 868 1 95 19849 0 0 -"30758" 4 602 2 0 1 1 1 1 100 246 100 150 18222 8 3 1 2612 3 50 50 1 1 -"30759" 4 602 2 0 1 1 1 1 100 246 150 263 31168 3 7 0 2755 4 75 113 0 1 -"30760" 4 602 2 0 1 1 1 1 100 246 263 197 8006 8 2 0 639 2 25 66 1 0 -"30761" 4 602 2 0 1 1 1 1 100 246 197 246 4123 2 1 1 1099 2 25 49 1 1 -"30762" 4 602 3 1 1 1 1 1 100 304 100 175 1767 9 7 1 980 4 75 75 1 1 -"30763" 4 602 3 1 1 1 1 1 100 304 175 219 3059 4 8 0 607 2 25 44 0 1 -"30764" 4 602 3 1 1 1 1 1 100 304 219 274 1893 8 2 1 2351 2 25 55 1 1 -"30765" 4 602 3 1 1 1 1 1 100 304 274 205 2409 2 1 0 881 2 25 69 1 0 -"30766" 4 602 3 1 1 1 1 1 100 304 205 256 2068 7 6 1 1233 2 25 51 1 1 -"30767" 4 602 3 1 1 1 1 1 100 304 256 269 3252 5 3 1 687 1 5 13 1 1 -"30768" 4 602 3 1 1 1 1 1 100 304 269 256 1596 3 6 1 465 1 5 13 0 0 -"30769" 4 602 3 1 1 1 1 1 100 304 256 243 2747 6 9 1 454 1 5 13 0 0 -"30770" 4 602 3 1 1 1 1 1 100 304 243 304 1601 1 4 0 620 2 25 61 0 1 -"30771" 4 602 3 1 1 1 2 1 100 438 100 150 2885 2 9 0 535 3 50 50 0 1 -"30772" 4 602 3 1 1 1 2 1 100 438 150 188 2643 4 10 0 3544 2 25 38 0 1 -"30773" 4 602 3 1 1 1 2 1 100 438 188 235 1716 3 1 1 444 2 25 47 1 1 -"30774" 4 602 3 1 1 1 2 1 100 438 235 294 2036 8 6 1 644 2 25 59 1 1 -"30775" 4 602 3 1 1 1 2 1 100 438 294 279 3288 5 7 1 1289 1 5 15 0 0 -"30776" 4 602 3 1 1 1 2 1 100 438 279 265 1979 6 8 1 556 1 5 14 0 0 -"30777" 4 602 3 1 1 1 2 1 100 438 265 278 1640 7 5 1 591 1 5 13 1 1 -"30778" 4 602 3 1 1 1 2 1 100 438 278 417 1352 1 10 0 985 3 50 139 0 1 -"30779" 4 602 3 1 1 1 2 1 100 438 417 438 1841 9 3 1 1772 1 5 21 1 1 -"30780" 4 602 3 1 1 1 3 1 100 201 100 95 4784 3 5 1 857 1 5 5 0 0 -"30781" 4 602 3 1 1 1 3 1 100 201 95 47 1511 8 9 1 874 3 50 48 0 0 -"30782" 4 602 3 1 1 1 3 1 100 201 47 59 3839 2 4 0 1926 2 25 12 0 1 -"30783" 4 602 3 1 1 1 3 1 100 201 59 74 3036 6 3 1 666 2 25 15 1 1 -"30784" 4 602 3 1 1 1 3 1 100 201 74 93 2851 7 6 1 633 2 25 19 1 1 -"30785" 4 602 3 1 1 1 3 1 100 201 93 70 2149 4 2 0 553 2 25 23 1 0 -"30786" 4 602 3 1 1 1 3 1 100 201 70 137 3691 1 8 0 1372 5 95 67 0 1 -"30787" 4 602 3 1 1 1 3 1 100 201 137 103 1588 5 7 1 1078 2 25 34 0 0 -"30788" 4 602 3 1 1 1 3 1 100 201 103 201 1725 9 5 1 1287 5 95 98 1 1 -"30789" 4 602 3 1 1 1 4 1 100 615 100 150 2247 8 7 1 537 3 50 50 1 1 -"30790" 4 602 3 1 1 1 4 1 100 615 150 188 1714 3 10 0 1011 2 25 38 0 1 -"30791" 4 602 3 1 1 1 4 1 100 615 188 179 1333 7 9 1 1269 1 5 9 0 0 -"30792" 4 602 3 1 1 1 4 1 100 615 179 269 1891 9 1 1 1074 3 50 90 1 1 -"30793" 4 602 3 1 1 1 4 1 100 615 269 336 1594 2 3 0 1087 2 25 67 0 1 -"30794" 4 602 3 1 1 1 4 1 100 615 336 588 3072 1 8 0 1156 4 75 252 0 1 -"30795" 4 602 3 1 1 1 4 1 100 615 588 617 1659 5 4 1 970 1 5 29 1 1 -"30796" 4 602 3 1 1 1 4 1 100 615 617 586 4061 4 2 0 783 1 5 31 1 0 -"30797" 4 602 3 1 1 1 4 1 100 615 586 615 1585 6 3 1 849 1 5 29 1 1 -"30798" 4 602 4 0 1 0 1 1 100 29 100 150 9347 2 7 0 1788 3 50 50 0 1 -"30799" 4 602 4 0 1 0 1 1 100 29 150 293 11499 3 7 0 726 1 95 143 0 1 -"30800" 4 602 4 0 1 0 1 1 100 29 293 571 2212 2 8 0 1348 1 95 278 0 1 -"30801" 4 602 4 0 1 0 1 1 100 29 571 29 1604 8 9 1 654 1 95 542 0 0 -"30802" 4 602 5 1 1 0 1 1 100 1043 100 195 1488 1 3 0 755 1 95 95 0 1 -"30803" 4 602 5 1 1 0 1 1 100 1043 195 293 1570 6 5 1 646 3 50 98 1 1 -"30804" 4 602 5 1 1 0 1 1 100 1043 293 440 2287 2 8 0 1462 3 50 147 0 1 -"30805" 4 602 5 1 1 0 1 1 100 1043 440 220 1337 8 9 1 1334 3 50 220 0 0 -"30806" 4 602 5 1 1 0 1 1 100 1043 220 429 2267 3 4 0 2330 1 95 209 0 1 -"30807" 4 602 5 1 1 0 1 1 100 1043 429 450 1563 5 7 0 1570 5 5 21 0 1 -"30808" 4 602 5 1 1 0 1 1 100 1043 450 563 1544 7 4 1 687 4 25 113 1 1 -"30809" 4 602 5 1 1 0 1 1 100 1043 563 535 2183 4 1 0 625 5 5 28 1 0 -"30810" 4 602 5 1 1 0 1 1 100 1043 535 1043 1335 9 6 1 796 1 95 508 1 1 -"30811" 4 602 5 1 1 0 2 1 100 452 100 195 14419 8 1 1 883 1 95 95 1 1 -"30812" 4 602 5 1 1 0 2 1 100 452 195 293 2343 6 2 1 1188 3 50 98 1 1 -"30813" 4 602 5 1 1 0 2 1 100 452 293 278 2355 7 9 1 452 5 5 15 0 0 -"30814" 4 602 5 1 1 0 2 1 100 452 278 487 1451 2 10 0 866 2 75 209 0 1 -"30815" 4 602 5 1 1 0 2 1 100 452 487 243 1630 5 3 0 1389 3 50 244 1 0 -"30816" 4 602 5 1 1 0 2 1 100 452 243 61 2327 4 2 0 590 2 75 182 1 0 -"30817" 4 602 5 1 1 0 2 1 100 452 61 119 4741 3 5 0 1844 1 95 58 0 1 -"30818" 4 602 5 1 1 0 2 1 100 452 119 232 1456 9 4 1 842 1 95 113 1 1 -"30819" 4 602 5 1 1 0 2 1 100 452 232 452 1528 1 7 0 1051 1 95 220 0 1 -"30820" 4 602 5 1 1 0 3 1 100 560 100 195 1470 7 5 1 516 1 95 95 1 1 -"30821" 4 602 5 1 1 0 3 1 100 560 195 49 1332 2 1 0 732 2 75 146 1 0 -"30822" 4 602 5 1 1 0 3 1 100 560 49 96 1618 8 6 1 662 1 95 47 1 1 -"30823" 4 602 5 1 1 0 3 1 100 560 96 101 5467 4 7 0 598 5 5 5 0 1 -"30824" 4 602 5 1 1 0 3 1 100 560 101 126 2021 3 10 0 558 4 25 25 0 1 -"30825" 4 602 5 1 1 0 3 1 100 560 126 94 2394 6 8 1 2624 4 25 32 0 0 -"30826" 4 602 5 1 1 0 3 1 100 560 94 183 1551 9 2 1 948 1 95 89 1 1 -"30827" 4 602 5 1 1 0 3 1 100 560 183 320 1574 5 3 1 974 2 75 137 1 1 -"30828" 4 602 5 1 1 0 3 1 100 560 320 560 1058 1 10 0 527 2 75 240 0 1 -"30829" 4 602 5 1 1 0 4 1 100 1533 100 195 2204 2 3 0 474 1 95 95 0 1 -"30830" 4 602 5 1 1 0 4 1 100 1533 195 293 1973 7 4 1 1275 3 50 98 1 1 -"30831" 4 602 5 1 1 0 4 1 100 1533 293 220 1469 3 1 0 834 4 25 73 1 0 -"30832" 4 602 5 1 1 0 4 1 100 1533 220 429 1347 1 9 0 533 1 95 209 0 1 -"30833" 4 602 5 1 1 0 4 1 100 1533 429 751 1473 8 7 1 757 2 75 322 1 1 -"30834" 4 602 5 1 1 0 4 1 100 1533 751 1464 1726 9 2 1 573 1 95 713 1 1 -"30835" 4 602 5 1 1 0 4 1 100 1533 1464 1537 3671 5 6 0 0 5 5 73 0 1 -"30836" 4 602 5 1 1 0 4 1 100 1533 1537 1460 5048 6 8 1 0 5 5 77 0 0 -"30837" 4 602 5 1 1 0 4 1 100 1533 1460 1533 4986 4 7 0 0 5 5 73 0 1 -"30838" 4 621 2 0 1 0 1 1 100 141 100 125 18602 2 7 0 9 4 25 25 0 1 -"30839" 4 621 2 0 1 0 1 1 100 141 125 188 3720 3 7 0 1078 3 50 63 0 1 -"30840" 4 621 2 0 1 0 1 1 100 141 188 282 3261 2 8 0 1628 3 50 94 0 1 -"30841" 4 621 2 0 1 0 1 1 100 141 282 141 2610 8 9 1 1692 3 50 141 0 0 -"30842" 4 621 3 1 1 0 1 1 100 51 100 195 3338 1 3 0 942 1 95 95 0 1 -"30843" 4 621 3 1 1 0 1 1 100 51 195 293 4663 6 5 1 927 3 50 98 1 1 -"30844" 4 621 3 1 1 0 1 1 100 51 293 146 2148 2 8 1 1047 3 50 147 0 0 -"30845" 4 621 3 1 1 0 1 1 100 51 146 7 3908 8 9 1 1023 1 95 139 0 0 -"30846" 4 621 3 1 1 0 1 1 100 51 7 14 2537 3 4 0 919 1 95 7 0 1 -"30847" 4 621 3 1 1 0 1 1 100 51 14 27 4175 5 7 0 1014 1 95 13 0 1 -"30848" 4 621 3 1 1 0 1 1 100 51 27 53 2343 7 4 1 1712 1 95 26 1 1 -"30849" 4 621 3 1 1 0 1 1 100 51 53 26 3470 4 1 0 949 3 50 27 1 0 -"30850" 4 621 3 1 1 0 1 1 100 51 26 51 2756 9 6 1 787 1 95 25 1 1 -"30851" 4 621 3 1 1 0 2 1 100 2284 100 195 2980 8 1 1 899 1 95 95 1 1 -"30852" 4 621 3 1 1 0 2 1 100 2284 195 293 2468 6 2 1 1180 3 50 98 1 1 -"30853" 4 621 3 1 1 0 2 1 100 2284 293 366 4409 7 9 0 999 4 25 73 0 1 -"30854" 4 621 3 1 1 0 2 1 100 2284 366 549 1989 2 10 0 868 3 50 183 0 1 -"30855" 4 621 3 1 1 0 2 1 100 2284 549 686 3917 5 3 1 581 4 25 137 1 1 -"30856" 4 621 3 1 1 0 2 1 100 2284 686 343 2731 4 2 0 1812 3 50 343 1 0 -"30857" 4 621 3 1 1 0 2 1 100 2284 343 669 2029 3 5 0 767 1 95 326 0 1 -"30858" 4 621 3 1 1 0 2 1 100 2284 669 1305 2666 9 4 1 811 1 95 636 1 1 -"30859" 4 621 3 1 1 0 2 1 100 2284 1305 2284 1839 1 7 0 1234 2 75 979 0 1 -"30860" 4 621 3 1 1 0 3 1 100 320 100 175 2289 7 5 1 843 2 75 75 1 1 -"30861" 4 621 3 1 1 0 3 1 100 320 175 44 1765 2 1 0 823 2 75 131 1 0 -"30862" 4 621 3 1 1 0 3 1 100 320 44 86 1749 8 6 1 682 1 95 42 1 1 -"30863" 4 621 3 1 1 0 3 1 100 320 86 129 2466 4 7 0 894 3 50 43 0 1 -"30864" 4 621 3 1 1 0 3 1 100 320 129 226 1897 3 10 0 1098 2 75 97 0 1 -"30865" 4 621 3 1 1 0 3 1 100 320 226 56 2109 6 8 1 1274 2 75 170 0 0 -"30866" 4 621 3 1 1 0 3 1 100 320 56 109 1832 9 2 1 634 1 95 53 1 1 -"30867" 4 621 3 1 1 0 3 1 100 320 109 164 2722 5 3 1 982 3 50 55 1 1 -"30868" 4 621 3 1 1 0 3 1 100 320 164 320 1905 1 10 0 807 1 95 156 0 1 -"30869" 4 621 3 1 1 0 4 1 100 212 100 195 1769 2 3 0 1378 1 95 95 0 1 -"30870" 4 621 3 1 1 0 4 1 100 212 195 293 1927 7 4 1 658 3 50 98 1 1 -"30871" 4 621 3 1 1 0 4 1 100 212 293 146 1802 3 1 0 510 3 50 147 1 0 -"30872" 4 621 3 1 1 0 4 1 100 212 146 285 1803 1 9 0 670 1 95 139 0 1 -"30873" 4 621 3 1 1 0 4 1 100 212 285 499 1648 8 7 1 995 2 75 214 1 1 -"30874" 4 621 3 1 1 0 4 1 100 212 499 973 1726 9 2 1 889 1 95 474 1 1 -"30875" 4 621 3 1 1 0 4 1 100 212 973 486 3122 5 6 1 950 3 50 487 0 0 -"30876" 4 621 3 1 1 0 4 1 100 212 486 121 1796 6 8 1 1601 2 75 365 0 0 -"30877" 4 621 3 1 1 0 4 1 100 212 121 212 1639 4 7 0 1511 2 75 91 0 1 -"30878" 4 621 4 0 1 1 1 1 100 117 100 125 8780 8 3 1 2591 2 25 25 1 1 -"30879" 4 621 4 0 1 1 1 1 100 117 125 156 3386 3 7 0 1073 2 25 31 0 1 -"30880" 4 621 4 0 1 1 1 1 100 117 156 234 1638 8 2 1 961 3 50 78 1 1 -"30881" 4 621 4 0 1 1 1 1 100 117 234 117 1592 2 1 0 1373 3 50 117 1 0 -"30882" 4 621 5 1 1 1 1 1 100 408 100 195 2771 9 7 1 1482 5 95 95 1 1 -"30883" 4 621 5 1 1 1 1 1 100 408 195 244 1750 4 8 0 862 2 25 49 0 1 -"30884" 4 621 5 1 1 1 1 1 100 408 244 305 1341 8 2 1 978 2 25 61 1 1 -"30885" 4 621 5 1 1 1 1 1 100 408 305 229 1455 2 1 0 1038 2 25 76 1 0 -"30886" 4 621 5 1 1 1 1 1 100 408 229 286 1513 7 6 1 554 2 25 57 1 1 -"30887" 4 621 5 1 1 1 1 1 100 408 286 272 2215 5 3 0 653 1 5 14 1 0 -"30888" 4 621 5 1 1 1 1 1 100 408 272 286 1339 3 6 0 749 1 5 14 0 1 -"30889" 4 621 5 1 1 1 1 1 100 408 286 272 1692 6 9 1 678 1 5 14 0 0 -"30890" 4 621 5 1 1 1 1 1 100 408 272 408 1417 1 4 0 1571 3 50 136 0 1 -"30891" 4 621 5 1 1 1 2 1 100 527 100 150 1462 2 9 0 475 3 50 50 0 1 -"30892" 4 621 5 1 1 1 2 1 100 527 150 188 1586 4 10 0 829 2 25 38 0 1 -"30893" 4 621 5 1 1 1 2 1 100 527 188 179 1989 3 1 0 681 1 5 9 1 0 -"30894" 4 621 5 1 1 1 2 1 100 527 179 224 1412 8 6 1 1537 2 25 45 1 1 -"30895" 4 621 5 1 1 1 2 1 100 527 224 235 2869 5 7 0 1223 1 5 11 0 1 -"30896" 4 621 5 1 1 1 2 1 100 527 235 223 1679 6 8 1 692 1 5 12 0 0 -"30897" 4 621 5 1 1 1 2 1 100 527 223 234 1396 7 5 1 789 1 5 11 1 1 -"30898" 4 621 5 1 1 1 2 1 100 527 234 351 1475 1 10 0 1024 3 50 117 0 1 -"30899" 4 621 5 1 1 1 2 1 100 527 351 527 1504 9 3 1 1037 3 50 176 1 1 -"30900" 4 621 5 1 1 1 3 1 100 274 100 125 1417 3 5 0 1048 2 25 25 0 1 -"30901" 4 621 5 1 1 1 3 1 100 274 125 62 1547 8 9 1 811 3 50 63 0 0 -"30902" 4 621 5 1 1 1 3 1 100 274 62 109 1147 2 4 0 543 4 75 47 0 1 -"30903" 4 621 5 1 1 1 3 1 100 274 109 114 1604 6 3 1 691 1 5 5 1 1 -"30904" 4 621 5 1 1 1 3 1 100 274 114 143 1296 7 6 1 794 2 25 29 1 1 -"30905" 4 621 5 1 1 1 3 1 100 274 143 107 1304 4 2 0 802 2 25 36 1 0 -"30906" 4 621 5 1 1 1 3 1 100 274 107 209 1322 1 8 0 4588 5 95 102 0 1 -"30907" 4 621 5 1 1 1 3 1 100 274 209 219 1845 5 7 0 687 1 5 10 0 1 -"30908" 4 621 5 1 1 1 3 1 100 274 219 274 1416 9 5 1 1493 2 25 55 1 1 -"30909" 4 621 5 1 1 1 4 1 100 494 100 150 1680 8 7 1 2447 3 50 50 1 1 -"30910" 4 621 5 1 1 1 4 1 100 494 150 225 1619 3 10 0 400 3 50 75 0 1 -"30911" 4 621 5 1 1 1 4 1 100 494 225 169 1700 7 9 1 1033 2 25 56 0 0 -"30912" 4 621 5 1 1 1 4 1 100 494 169 296 1485 9 1 1 1246 4 75 127 1 1 -"30913" 4 621 5 1 1 1 4 1 100 494 296 370 1480 2 3 0 1509 2 25 74 0 1 -"30914" 4 621 5 1 1 1 4 1 100 494 370 555 2030 1 8 0 931 3 50 185 0 1 -"30915" 4 621 5 1 1 1 4 1 100 494 555 527 3325 5 4 0 1509 1 5 28 1 0 -"30916" 4 621 5 1 1 1 4 1 100 494 527 395 2172 4 2 0 885 2 25 132 1 0 -"30917" 4 621 5 1 1 1 4 1 100 494 395 494 1275 6 3 1 840 2 25 99 1 1 -"30918" 4 623 2 0 1 1 1 1 100 99 100 150 17729 8 3 1 3487 3 50 50 1 1 -"30919" 4 623 2 0 1 1 1 1 100 99 150 263 12456 3 7 0 1074 4 75 113 0 1 -"30920" 4 623 2 0 1 1 1 1 100 99 263 395 1751 8 2 1 1550 3 50 132 1 1 -"30921" 4 623 2 0 1 1 1 1 100 99 395 99 1532 2 1 0 1365 4 75 296 1 0 -"30922" 4 623 3 1 1 1 1 1 100 483 100 195 2127 9 7 1 0 5 95 95 1 1 -"30923" 4 623 3 1 1 1 1 1 100 483 195 244 3656 4 8 0 2641 2 25 49 0 1 -"30924" 4 623 3 1 1 1 1 1 100 483 244 366 1785 8 2 1 1514 3 50 122 1 1 -"30925" 4 623 3 1 1 1 1 1 100 483 366 183 1639 2 1 0 1280 3 50 183 1 0 -"30926" 4 623 3 1 1 1 1 1 100 483 183 275 1682 7 6 1 796 3 50 92 1 1 -"30927" 4 623 3 1 1 1 1 1 100 483 275 206 3522 5 3 0 1052 2 25 69 1 0 -"30928" 4 623 3 1 1 1 1 1 100 483 206 309 1953 3 6 0 950 3 50 103 0 1 -"30929" 4 623 3 1 1 1 1 1 100 483 309 386 2632 6 9 0 1440 2 25 77 0 1 -"30930" 4 623 3 1 1 1 1 1 100 483 386 483 1766 1 4 0 732 2 25 97 0 1 -"30931" 4 623 3 1 1 1 2 1 100 389 100 150 2702 2 9 0 2528 3 50 50 0 1 -"30932" 4 623 3 1 1 1 2 1 100 389 150 112 2937 4 10 1 647 2 25 38 0 0 -"30933" 4 623 3 1 1 1 2 1 100 389 112 84 1288 3 1 0 2103 2 25 28 1 0 -"30934" 4 623 3 1 1 1 2 1 100 389 84 147 1432 8 6 1 623 4 75 63 1 1 -"30935" 4 623 3 1 1 1 2 1 100 389 147 184 2582 5 7 0 924 2 25 37 0 1 -"30936" 4 623 3 1 1 1 2 1 100 389 184 138 2090 6 8 1 1540 2 25 46 0 0 -"30937" 4 623 3 1 1 1 2 1 100 389 138 207 1552 7 5 1 1224 3 50 69 1 1 -"30938" 4 623 3 1 1 1 2 1 100 389 207 311 1492 1 10 0 744 3 50 104 0 1 -"30939" 4 623 3 1 1 1 2 1 100 389 311 389 1973 9 3 1 1577 2 25 78 1 1 -"30940" 4 623 3 1 1 1 3 1 100 543 100 150 1860 3 5 0 684 3 50 50 0 1 -"30941" 4 623 3 1 1 1 3 1 100 543 150 112 1722 8 9 1 1866 2 25 38 0 0 -"30942" 4 623 3 1 1 1 3 1 100 543 112 168 1554 2 4 0 871 3 50 56 0 1 -"30943" 4 623 3 1 1 1 3 1 100 543 168 210 1304 6 3 1 1409 2 25 42 1 1 -"30944" 4 623 3 1 1 1 3 1 100 543 210 263 1745 7 6 1 1556 2 25 53 1 1 -"30945" 4 623 3 1 1 1 3 1 100 543 263 197 2488 4 2 0 1555 2 25 66 1 0 -"30946" 4 623 3 1 1 1 3 1 100 543 197 345 1437 1 8 0 957 4 75 148 0 1 -"30947" 4 623 3 1 1 1 3 1 100 543 345 362 2154 5 7 0 2792 1 5 17 0 1 -"30948" 4 623 3 1 1 1 3 1 100 543 362 543 1621 9 5 1 744 3 50 181 1 1 -"30949" 4 623 3 1 1 1 4 1 100 708 100 175 2021 8 7 1 901 4 75 75 1 1 -"30950" 4 623 3 1 1 1 4 1 100 708 175 219 1394 3 10 0 2145 2 25 44 0 1 -"30951" 4 623 3 1 1 1 4 1 100 708 219 109 1405 7 9 1 852 3 50 110 0 0 -"30952" 4 623 3 1 1 1 4 1 100 708 109 191 1195 9 1 1 2104 4 75 82 1 1 -"30953" 4 623 3 1 1 1 4 1 100 708 191 287 1264 2 3 0 2427 3 50 96 0 1 -"30954" 4 623 3 1 1 1 4 1 100 708 287 431 1393 1 8 0 842 3 50 144 0 1 -"30955" 4 623 3 1 1 1 4 1 100 708 431 453 2885 5 4 1 2335 1 5 22 1 1 -"30956" 4 623 3 1 1 1 4 1 100 708 453 566 2074 4 2 1 1278 2 25 113 1 1 -"30957" 4 623 3 1 1 1 4 1 100 708 566 708 1838 6 3 1 1497 2 25 142 1 1 -"30958" 4 623 4 0 1 0 1 1 100 256 100 150 11172 2 7 0 2196 3 50 50 0 1 -"30959" 4 623 4 0 1 0 1 1 100 256 150 263 5082 3 7 0 1465 2 75 113 0 1 -"30960" 4 623 4 0 1 0 1 1 100 256 263 513 1400 2 8 0 2522 1 95 250 0 1 -"30961" 4 623 4 0 1 0 1 1 100 256 513 256 2758 8 9 1 1130 3 50 257 0 0 -"30962" 4 623 5 1 1 0 1 1 100 1026 100 195 1565 1 3 0 1862 1 95 95 0 1 -"30963" 4 623 5 1 1 0 1 1 100 1026 195 244 5653 6 5 1 377 4 25 49 1 1 -"30964" 4 623 5 1 1 0 1 1 100 1026 244 366 1109 2 8 0 744 3 50 122 0 1 -"30965" 4 623 5 1 1 0 1 1 100 1026 366 274 854 8 9 1 395 4 25 92 0 0 -"30966" 4 623 5 1 1 0 1 1 100 1026 274 534 912 3 4 0 990 1 95 260 0 1 -"30967" 4 623 5 1 1 0 1 1 100 1026 534 561 1268 5 7 0 1449 5 5 27 0 1 -"30968" 4 623 5 1 1 0 1 1 100 1026 561 701 1142 7 4 1 585 4 25 140 1 1 -"30969" 4 623 5 1 1 0 1 1 100 1026 701 526 1350 4 1 0 2372 4 25 175 1 0 -"30970" 4 623 5 1 1 0 1 1 100 1026 526 1026 1400 9 6 1 386 1 95 500 1 1 -"30971" 4 623 5 1 1 0 2 1 100 951 100 195 1218 8 1 1 612 1 95 95 1 1 -"30972" 4 623 5 1 1 0 2 1 100 951 195 244 1042 6 2 1 311 4 25 49 1 1 -"30973" 4 623 5 1 1 0 2 1 100 951 244 122 1055 7 9 1 905 3 50 122 0 0 -"30974" 4 623 5 1 1 0 2 1 100 951 122 238 967 2 10 0 251 1 95 116 0 1 -"30975" 4 623 5 1 1 0 2 1 100 951 238 298 1192 5 3 1 1848 4 25 60 1 1 -"30976" 4 623 5 1 1 0 2 1 100 951 298 223 1694 4 2 0 409 4 25 75 1 0 -"30977" 4 623 5 1 1 0 2 1 100 951 223 435 1032 3 5 0 483 1 95 212 0 1 -"30978" 4 623 5 1 1 0 2 1 100 951 435 761 1271 9 4 1 544 2 75 326 1 1 -"30979" 4 623 5 1 1 0 2 1 100 951 761 951 1386 1 7 0 1237 4 25 190 0 1 -"30980" 4 623 5 1 1 0 3 1 100 1347 100 195 1264 7 5 1 283 1 95 95 1 1 -"30981" 4 623 5 1 1 0 3 1 100 1347 195 97 705 2 1 0 2128 3 50 98 1 0 -"30982" 4 623 5 1 1 0 3 1 100 1347 97 189 1017 8 6 1 386 1 95 92 1 1 -"30983" 4 623 5 1 1 0 3 1 100 1347 189 331 1040 4 7 0 1795 2 75 142 0 1 -"30984" 4 623 5 1 1 0 3 1 100 1347 331 497 1444 3 10 0 891 3 50 166 0 1 -"30985" 4 623 5 1 1 0 3 1 100 1347 497 373 1013 6 8 1 587 4 25 124 0 0 -"30986" 4 623 5 1 1 0 3 1 100 1347 373 727 1129 9 2 1 364 1 95 354 1 1 -"30987" 4 623 5 1 1 0 3 1 100 1347 727 691 1610 5 3 0 1071 5 5 36 1 0 -"30988" 4 623 5 1 1 0 3 1 100 1347 691 1347 1100 1 10 0 379 1 95 656 0 1 -"30989" 4 623 5 1 1 0 4 1 100 1690 100 195 1280 2 3 0 410 1 95 95 0 1 -"30990" 4 623 5 1 1 0 4 1 100 1690 195 380 1014 7 4 1 606 1 95 185 1 1 -"30991" 4 623 5 1 1 0 4 1 100 1690 380 285 1097 3 1 0 349 4 25 95 1 0 -"30992" 4 623 5 1 1 0 4 1 100 1690 285 556 1196 1 9 0 327 1 95 271 0 1 -"30993" 4 623 5 1 1 0 4 1 100 1690 556 1084 1086 8 7 1 409 1 95 528 1 1 -"30994" 4 623 5 1 1 0 4 1 100 1690 1084 1355 1134 9 2 1 1601 4 25 271 1 1 -"30995" 4 623 5 1 1 0 4 1 100 1690 1355 1423 1347 5 6 0 1011 5 5 68 0 1 -"30996" 4 623 5 1 1 0 4 1 100 1690 1423 1352 1334 6 8 1 684 5 5 71 0 0 -"30997" 4 623 5 1 1 0 4 1 100 1690 1352 1690 1315 4 7 0 1115 4 25 338 0 1 -"30998" 4 642 2 0 1 1 1 1 100 176 100 150 15659 8 3 1 1862 3 50 50 1 1 -"30999" 4 642 2 0 1 1 1 1 100 176 150 188 17722 3 7 0 1041 2 25 38 0 1 -"31000" 4 642 2 0 1 1 1 1 100 176 188 235 2855 8 2 1 1216 2 25 47 1 1 -"31001" 4 642 2 0 1 1 1 1 100 176 235 176 2142 2 1 0 1131 2 25 59 1 0 -"31002" 4 642 3 1 1 1 1 1 100 259 100 150 24469 9 7 1 991 3 50 50 1 1 -"31003" 4 642 3 1 1 1 1 1 100 259 150 188 2833 4 8 0 1085 2 25 38 0 1 -"31004" 4 642 3 1 1 1 1 1 100 259 188 282 2238 8 2 1 904 3 50 94 1 1 -"31005" 4 642 3 1 1 1 1 1 100 259 282 141 2147 2 1 0 907 3 50 141 1 0 -"31006" 4 642 3 1 1 1 1 1 100 259 141 176 1870 7 6 1 1136 2 25 35 1 1 -"31007" 4 642 3 1 1 1 1 1 100 259 176 132 3060 5 3 0 882 2 25 44 1 0 -"31008" 4 642 3 1 1 1 1 1 100 259 132 198 1955 3 6 0 824 3 50 66 0 1 -"31009" 4 642 3 1 1 1 1 1 100 259 198 148 2188 6 9 1 1061 2 25 50 0 0 -"31010" 4 642 3 1 1 1 1 1 100 259 148 259 1783 1 4 0 1045 4 75 111 0 1 -"31011" 4 642 3 1 1 1 2 1 100 468 100 150 2057 2 9 0 1154 3 50 50 0 1 -"31012" 4 642 3 1 1 1 2 1 100 468 150 188 2738 4 10 0 1358 2 25 38 0 1 -"31013" 4 642 3 1 1 1 2 1 100 468 188 141 2484 3 1 0 1762 2 25 47 1 0 -"31014" 4 642 3 1 1 1 2 1 100 468 141 212 1986 8 6 1 1085 3 50 71 1 1 -"31015" 4 642 3 1 1 1 2 1 100 468 212 159 3343 5 7 1 1094 2 25 53 0 0 -"31016" 4 642 3 1 1 1 2 1 100 468 159 119 2394 6 8 1 1394 2 25 40 0 0 -"31017" 4 642 3 1 1 1 2 1 100 468 119 208 2162 7 5 1 1069 4 75 89 1 1 -"31018" 4 642 3 1 1 1 2 1 100 468 208 312 1719 1 10 0 1523 3 50 104 0 1 -"31019" 4 642 3 1 1 1 2 1 100 468 312 468 1818 9 3 1 1873 3 50 156 1 1 -"31020" 4 642 3 1 1 1 3 1 100 313 100 150 1882 3 5 0 1272 3 50 50 0 1 -"31021" 4 642 3 1 1 1 3 1 100 313 150 75 1699 8 9 1 943 3 50 75 0 0 -"31022" 4 642 3 1 1 1 3 1 100 313 75 94 1920 2 4 0 1834 2 25 19 0 1 -"31023" 4 642 3 1 1 1 3 1 100 313 94 141 1907 6 3 1 873 3 50 47 1 1 -"31024" 4 642 3 1 1 1 3 1 100 313 141 212 2137 7 6 1 1378 3 50 71 1 1 -"31025" 4 642 3 1 1 1 3 1 100 313 212 159 1955 4 2 0 1191 2 25 53 1 0 -"31026" 4 642 3 1 1 1 3 1 100 313 159 239 1977 1 8 0 1392 3 50 80 0 1 -"31027" 4 642 3 1 1 1 3 1 100 313 239 179 2408 5 7 1 1144 2 25 60 0 0 -"31028" 4 642 3 1 1 1 3 1 100 313 179 313 1901 9 5 1 988 4 75 134 1 1 -"31029" 4 642 3 1 1 1 4 1 100 305 100 150 4028 8 7 1 1305 3 50 50 1 1 -"31030" 4 642 3 1 1 1 4 1 100 305 150 188 5124 3 10 0 2025 2 25 38 0 1 -"31031" 4 642 3 1 1 1 4 1 100 305 188 94 2945 7 9 1 1437 3 50 94 0 0 -"31032" 4 642 3 1 1 1 4 1 100 305 94 165 5109 9 1 1 987 4 75 71 1 1 -"31033" 4 642 3 1 1 1 4 1 100 305 165 248 3929 2 3 0 1145 3 50 83 0 1 -"31034" 4 642 3 1 1 1 4 1 100 305 248 434 1707 1 8 0 1132 4 75 186 0 1 -"31035" 4 642 3 1 1 1 4 1 100 305 434 325 1681 5 4 0 1040 2 25 109 1 0 -"31036" 4 642 3 1 1 1 4 1 100 305 325 244 1619 4 2 0 2235 2 25 81 1 0 -"31037" 4 642 3 1 1 1 4 1 100 305 244 305 1638 6 3 1 1312 2 25 61 1 1 -"31038" 4 642 4 0 1 0 1 1 100 296 100 175 3728 2 7 0 3401 2 75 75 0 1 -"31039" 4 642 4 0 1 0 1 1 100 296 175 263 4530 3 7 0 1257 3 50 88 0 1 -"31040" 4 642 4 0 1 0 1 1 100 296 263 395 2901 2 8 0 1522 3 50 132 0 1 -"31041" 4 642 4 0 1 0 1 1 100 296 395 296 1682 8 9 1 1056 4 25 99 0 0 -"31042" 4 642 5 1 1 0 1 1 100 761 100 175 2382 1 3 0 2319 2 75 75 0 1 -"31043" 4 642 5 1 1 0 1 1 100 761 175 219 2025 6 5 1 893 4 25 44 1 1 -"31044" 4 642 5 1 1 0 1 1 100 761 219 329 1920 2 8 0 1068 3 50 110 0 1 -"31045" 4 642 5 1 1 0 1 1 100 761 329 247 1543 8 9 1 930 4 25 82 0 0 -"31046" 4 642 5 1 1 0 1 1 100 761 247 371 1913 3 4 0 2507 3 50 124 0 1 -"31047" 4 642 5 1 1 0 1 1 100 761 371 464 2496 5 7 0 1138 4 25 93 0 1 -"31048" 4 642 5 1 1 0 1 1 100 761 464 580 1660 7 4 1 1036 4 25 116 1 1 -"31049" 4 642 5 1 1 0 1 1 100 761 580 435 1578 4 1 0 1585 4 25 145 1 0 -"31050" 4 642 5 1 1 0 1 1 100 761 435 761 1554 9 6 1 3175 2 75 326 1 1 -"31051" 4 642 5 1 1 0 2 1 100 763 100 150 3474 8 1 1 932 3 50 50 1 1 -"31052" 4 642 5 1 1 0 2 1 100 763 150 188 3034 6 2 1 790 4 25 38 1 1 -"31053" 4 642 5 1 1 0 2 1 100 763 188 141 1639 7 9 1 1893 4 25 47 0 0 -"31054" 4 642 5 1 1 0 2 1 100 763 141 212 1389 2 10 0 1879 3 50 71 0 1 -"31055" 4 642 5 1 1 0 2 1 100 763 212 265 2894 5 3 1 1593 4 25 53 1 1 -"31056" 4 642 5 1 1 0 2 1 100 763 265 199 3586 4 2 0 636 4 25 66 1 0 -"31057" 4 642 5 1 1 0 2 1 100 763 199 249 1426 3 5 0 1129 4 25 50 0 1 -"31058" 4 642 5 1 1 0 2 1 100 763 249 436 1229 9 4 1 1348 2 75 187 1 1 -"31059" 4 642 5 1 1 0 2 1 100 763 436 763 1415 1 7 0 2676 2 75 327 0 1 -"31060" 4 642 5 1 1 0 3 1 100 269 100 150 2537 7 5 1 1199 3 50 50 1 1 -"31061" 4 642 5 1 1 0 3 1 100 269 150 75 2682 2 1 0 2200 3 50 75 1 0 -"31062" 4 642 5 1 1 0 3 1 100 269 75 113 1359 8 6 1 1844 3 50 38 1 1 -"31063" 4 642 5 1 1 0 3 1 100 269 113 141 1359 4 7 0 1467 4 25 28 0 1 -"31064" 4 642 5 1 1 0 3 1 100 269 141 212 2516 3 10 0 722 3 50 71 0 1 -"31065" 4 642 5 1 1 0 3 1 100 269 212 159 2049 6 8 1 1073 4 25 53 0 0 -"31066" 4 642 5 1 1 0 3 1 100 269 159 239 7150 9 2 1 2279 3 50 80 1 1 -"31067" 4 642 5 1 1 0 3 1 100 269 239 179 1581 5 3 0 1271 4 25 60 1 0 -"31068" 4 642 5 1 1 0 3 1 100 269 179 269 1898 1 10 0 544 3 50 90 0 1 -"31069" 4 642 5 1 1 0 4 1 100 697 100 150 2856 2 3 0 2313 3 50 50 0 1 -"31070" 4 642 5 1 1 0 4 1 100 697 150 225 1577 7 4 1 1099 3 50 75 1 1 -"31071" 4 642 5 1 1 0 4 1 100 697 225 169 1478 3 1 0 1069 4 25 56 1 0 -"31072" 4 642 5 1 1 0 4 1 100 697 169 296 1436 1 9 0 1077 2 75 127 0 1 -"31073" 4 642 5 1 1 0 4 1 100 697 296 444 2048 8 7 1 1688 3 50 148 1 1 -"31074" 4 642 5 1 1 0 4 1 100 697 444 666 1825 9 2 1 1299 3 50 222 1 1 -"31075" 4 642 5 1 1 0 4 1 100 697 666 699 2014 5 6 0 1458 5 5 33 0 1 -"31076" 4 642 5 1 1 0 4 1 100 697 699 664 1700 6 8 1 1110 5 5 35 0 0 -"31077" 4 642 5 1 1 0 4 1 100 697 664 697 1612 4 7 0 1465 5 5 33 0 1 -"31078" 4 647 2 0 1 1 1 1 100 355 100 150 11504 8 3 1 2218 3 50 50 1 1 -"31079" 4 647 2 0 1 1 1 1 100 355 150 225 10691 3 7 0 866 3 50 75 0 1 -"31080" 4 647 2 0 1 1 1 1 100 355 225 338 3021 8 2 1 746 3 50 113 1 1 -"31081" 4 647 2 0 1 1 1 1 100 355 338 355 2573 2 1 1 408 1 5 17 1 1 -"31082" 4 647 3 1 1 1 1 1 100 486 100 150 8265 9 7 1 1042 3 50 50 1 1 -"31083" 4 647 3 1 1 1 1 1 100 486 150 188 2245 4 8 0 604 2 25 38 0 1 -"31084" 4 647 3 1 1 1 1 1 100 486 188 282 1945 8 2 1 395 3 50 94 1 1 -"31085" 4 647 3 1 1 1 1 1 100 486 282 353 1567 2 1 1 762 2 25 71 1 1 -"31086" 4 647 3 1 1 1 1 1 100 486 353 371 2881 7 6 1 519 1 5 18 1 1 -"31087" 4 647 3 1 1 1 1 1 100 486 371 352 2219 5 3 0 445 1 5 19 1 0 -"31088" 4 647 3 1 1 1 1 1 100 486 352 370 2913 3 6 0 259 1 5 18 0 1 -"31089" 4 647 3 1 1 1 1 1 100 486 370 389 3046 6 9 0 262 1 5 19 0 1 -"31090" 4 647 3 1 1 1 1 1 100 486 389 486 1012 1 4 0 1550 2 25 97 0 1 -"31091" 4 647 3 1 1 1 2 1 100 420 100 150 1709 2 9 0 1009 3 50 50 0 1 -"31092" 4 647 3 1 1 1 2 1 100 420 150 188 977 4 10 0 2606 2 25 38 0 1 -"31093" 4 647 3 1 1 1 2 1 100 420 188 94 1403 3 1 0 988 3 50 94 1 0 -"31094" 4 647 3 1 1 1 2 1 100 420 94 118 2775 8 6 1 1429 2 25 24 1 1 -"31095" 4 647 3 1 1 1 2 1 100 420 118 124 1742 5 7 0 300 1 5 6 0 1 -"31096" 4 647 3 1 1 1 2 1 100 420 124 130 2734 6 8 0 300 1 5 6 0 1 -"31097" 4 647 3 1 1 1 2 1 100 420 130 137 1672 7 5 1 306 1 5 7 1 1 -"31098" 4 647 3 1 1 1 2 1 100 420 137 240 1881 1 10 0 1727 4 75 103 0 1 -"31099" 4 647 3 1 1 1 2 1 100 420 240 420 2044 9 3 1 585 4 75 180 1 1 -"31100" 4 647 3 1 1 1 3 1 100 176 100 195 1610 3 5 0 898 5 95 95 0 1 -"31101" 4 647 3 1 1 1 3 1 100 176 195 49 1380 8 9 1 1411 4 75 146 0 0 -"31102" 4 647 3 1 1 1 3 1 100 176 49 96 1430 2 4 0 850 5 95 47 0 1 -"31103" 4 647 3 1 1 1 3 1 100 176 96 91 4393 6 3 0 241 1 5 5 1 0 -"31104" 4 647 3 1 1 1 3 1 100 176 91 177 1835 7 6 1 393 5 95 86 1 1 -"31105" 4 647 3 1 1 1 3 1 100 176 177 44 1932 4 2 0 2043 4 75 133 1 0 -"31106" 4 647 3 1 1 1 3 1 100 176 44 86 1827 1 8 0 760 5 95 42 0 1 -"31107" 4 647 3 1 1 1 3 1 100 176 86 90 4316 5 7 0 275 1 5 4 0 1 -"31108" 4 647 3 1 1 1 3 1 100 176 90 176 2104 9 5 1 1626 5 95 86 1 1 -"31109" 4 647 3 1 1 1 4 1 100 396 100 125 2153 8 7 1 653 2 25 25 1 1 -"31110" 4 647 3 1 1 1 4 1 100 396 125 188 1307 3 10 0 680 3 50 63 0 1 -"31111" 4 647 3 1 1 1 4 1 100 396 188 141 1521 7 9 1 593 2 25 47 0 0 -"31112" 4 647 3 1 1 1 4 1 100 396 141 148 1933 9 1 1 1094 1 5 7 1 1 -"31113" 4 647 3 1 1 1 4 1 100 396 148 155 1637 2 3 0 394 1 5 7 0 1 -"31114" 4 647 3 1 1 1 4 1 100 396 155 302 1793 1 8 0 1314 5 95 147 0 1 -"31115" 4 647 3 1 1 1 4 1 100 396 302 529 2483 5 4 1 1918 4 75 227 1 1 -"31116" 4 647 3 1 1 1 4 1 100 396 529 264 2478 4 2 0 3169 3 50 265 1 0 -"31117" 4 647 3 1 1 1 4 1 100 396 264 396 2071 6 3 1 699 3 50 132 1 1 -"31118" 4 647 4 0 1 0 1 1 100 33 100 175 6728 2 7 0 748 2 75 75 0 1 -"31119" 4 647 4 0 1 0 1 1 100 33 175 341 7124 3 7 0 640 1 95 166 0 1 -"31120" 4 647 4 0 1 0 1 1 100 33 341 665 988 2 8 0 478 1 95 324 0 1 -"31121" 4 647 4 0 1 0 1 1 100 33 665 33 1206 8 9 1 370 1 95 632 0 0 -"31122" 4 647 5 1 1 0 1 1 100 10688 100 195 3879 1 3 0 390 1 95 95 0 1 -"31123" 4 647 5 1 1 0 1 1 100 10688 195 380 1778 6 5 1 305 1 95 185 1 1 -"31124" 4 647 5 1 1 0 1 1 100 10688 380 741 873 2 8 0 328 1 95 361 0 1 -"31125" 4 647 5 1 1 0 1 1 100 10688 741 1445 1743 8 9 0 332 1 95 704 0 1 -"31126" 4 647 5 1 1 0 1 1 100 10688 1445 2818 1199 3 4 0 365 1 95 1373 0 1 -"31127" 4 647 5 1 1 0 1 1 100 10688 2818 2959 5102 5 7 0 0 5 5 141 0 1 -"31128" 4 647 5 1 1 0 1 1 100 10688 2959 5770 2641 7 4 1 1789 1 95 2811 1 1 -"31129" 4 647 5 1 1 0 1 1 100 10688 5770 5481 1708 4 1 0 0 5 5 289 1 0 -"31130" 4 647 5 1 1 0 1 1 100 10688 5481 10688 2401 9 6 1 390 1 95 5207 1 1 -"31131" 4 647 5 1 1 0 2 0 100 0 100 195 1833 8 1 1 1012 1 95 95 1 1 -"31132" 4 647 5 1 1 0 2 0 100 0 195 380 1535 6 2 1 297 1 95 185 1 1 -"31133" 4 647 5 1 1 0 2 0 100 0 380 19 1452 7 9 1 309 1 95 361 0 0 -"31134" 4 647 5 1 1 0 2 0 100 0 19 37 1138 2 10 0 285 1 95 18 0 1 -"31135" 4 647 5 1 1 0 2 0 100 0 37 2 1373 5 3 0 274 1 95 35 1 0 -"31136" 4 647 5 1 1 0 2 0 100 0 2 0 1193 4 2 0 250 1 95 2 1 0 -"31137" 4 647 5 1 1 0 3 0 100 0 100 195 1293 7 5 1 324 1 95 95 1 1 -"31138" 4 647 5 1 1 0 3 0 100 0 195 10 707 2 1 0 228 1 95 185 1 0 -"31139" 4 647 5 1 1 0 3 0 100 0 10 20 1095 8 6 1 235 1 95 10 1 1 -"31140" 4 647 5 1 1 0 3 0 100 0 20 39 813 4 7 0 336 1 95 19 0 1 -"31141" 4 647 5 1 1 0 3 0 100 0 39 76 886 3 10 0 268 1 95 37 0 1 -"31142" 4 647 5 1 1 0 3 0 100 0 76 4 1297 6 8 1 282 1 95 72 0 0 -"31143" 4 647 5 1 1 0 3 0 100 0 4 8 1089 9 2 1 636 1 95 4 1 1 -"31144" 4 647 5 1 1 0 3 0 100 0 8 0 653 5 3 0 350 1 95 8 1 0 -"31145" 4 647 5 1 1 0 4 0 100 0 100 195 940 2 3 0 291 1 95 95 0 1 -"31146" 4 647 5 1 1 0 4 0 100 0 195 10 650 7 4 0 246 1 95 185 1 0 -"31147" 4 647 5 1 1 0 4 0 100 0 10 0 674 3 1 0 258 1 95 10 1 0 -"31148" 4 654 2 0 1 0 1 1 100 211 100 150 13325 2 7 0 1531 3 50 50 0 1 -"31149" 4 654 2 0 1 0 1 1 100 211 150 188 8574 3 7 0 656 4 25 38 0 1 -"31150" 4 654 2 0 1 0 1 1 100 211 188 282 2497 2 8 0 1131 3 50 94 0 1 -"31151" 4 654 2 0 1 0 1 1 100 211 282 211 3329 8 9 1 659 4 25 71 0 0 -"31152" 4 654 3 1 1 0 1 1 100 610 100 195 2351 1 3 0 958 1 95 95 0 1 -"31153" 4 654 3 1 1 0 1 1 100 610 195 293 5308 6 5 1 1054 3 50 98 1 1 -"31154" 4 654 3 1 1 0 1 1 100 610 293 440 4169 2 8 0 2543 3 50 147 0 1 -"31155" 4 654 3 1 1 0 1 1 100 610 440 330 2676 8 9 1 851 4 25 110 0 0 -"31156" 4 654 3 1 1 0 1 1 100 610 330 413 3717 3 4 0 1299 4 25 83 0 1 -"31157" 4 654 3 1 1 0 1 1 100 610 413 310 4608 5 7 1 765 4 25 103 0 0 -"31158" 4 654 3 1 1 0 1 1 100 610 310 465 2365 7 4 1 3929 3 50 155 1 1 -"31159" 4 654 3 1 1 0 1 1 100 610 465 581 4677 4 1 1 1491 4 25 116 1 1 -"31160" 4 654 3 1 1 0 1 1 100 610 581 610 9328 9 6 1 913 5 5 29 1 1 -"31161" 4 654 3 1 1 0 2 0 100 0 100 25 2884 8 1 0 706 2 75 75 1 0 -"31162" 4 654 3 1 1 0 2 0 100 0 25 44 2734 6 2 1 972 2 75 19 1 1 -"31163" 4 654 3 1 1 0 2 0 100 0 44 22 2580 7 9 1 1039 3 50 22 0 0 -"31164" 4 654 3 1 1 0 2 0 100 0 22 39 1729 2 10 0 1254 2 75 17 0 1 -"31165" 4 654 3 1 1 0 2 0 100 0 39 10 3643 5 3 0 1942 2 75 29 1 0 -"31166" 4 654 3 1 1 0 2 0 100 0 10 5 3811 4 2 0 471 3 50 5 1 0 -"31167" 4 654 3 1 1 0 2 0 100 0 5 9 2078 3 5 0 943 2 75 4 0 1 -"31168" 4 654 3 1 1 0 2 0 100 0 9 0 2021 9 4 0 857 1 95 9 1 0 -"31169" 4 654 3 1 1 0 3 1 100 271 100 175 4926 7 5 1 1039 2 75 75 1 1 -"31170" 4 654 3 1 1 0 3 1 100 271 175 87 1643 2 1 0 851 3 50 88 1 0 -"31171" 4 654 3 1 1 0 3 1 100 271 87 170 2481 8 6 1 2234 1 95 83 1 1 -"31172" 4 654 3 1 1 0 3 1 100 271 170 127 3554 4 7 1 688 4 25 43 0 0 -"31173" 4 654 3 1 1 0 3 1 100 271 127 191 1553 3 10 0 1078 3 50 64 0 1 -"31174" 4 654 3 1 1 0 3 1 100 271 191 143 3854 6 8 1 904 4 25 48 0 0 -"31175" 4 654 3 1 1 0 3 1 100 271 143 279 2836 9 2 1 640 1 95 136 1 1 -"31176" 4 654 3 1 1 0 3 1 100 271 279 139 11054 5 3 0 922 3 50 140 1 0 -"31177" 4 654 3 1 1 0 3 1 100 271 139 271 4667 1 10 0 378 1 95 132 0 1 -"31178" 4 654 3 1 1 0 4 1 100 2414 100 195 2071 2 3 0 1948 1 95 95 0 1 -"31179" 4 654 3 1 1 0 4 1 100 2414 195 380 2012 7 4 1 398 1 95 185 1 1 -"31180" 4 654 3 1 1 0 4 1 100 2414 380 285 1861 3 1 0 316 4 25 95 1 0 -"31181" 4 654 3 1 1 0 4 1 100 2414 285 556 1919 1 9 0 425 1 95 271 0 1 -"31182" 4 654 3 1 1 0 4 1 100 2414 556 834 1920 8 7 1 1039 3 50 278 1 1 -"31183" 4 654 3 1 1 0 4 1 100 2414 834 1626 2914 9 2 1 573 1 95 792 1 1 -"31184" 4 654 3 1 1 0 4 1 100 2414 1626 2033 8756 5 6 0 1410 4 25 407 0 1 -"31185" 4 654 3 1 1 0 4 1 100 2414 2033 1931 2159 6 8 1 695 5 5 102 0 0 -"31186" 4 654 3 1 1 0 4 1 100 2414 1931 2414 1795 4 7 0 661 4 25 483 0 1 -"31187" 4 654 4 0 1 1 1 1 100 141 100 125 4533 8 3 1 1534 2 25 25 1 1 -"31188" 4 654 4 0 1 1 1 1 100 141 125 188 12158 3 7 0 2824 3 50 63 0 1 -"31189" 4 654 4 0 1 1 1 1 100 141 188 282 1557 8 2 1 4106 3 50 94 1 1 -"31190" 4 654 4 0 1 1 1 1 100 141 282 141 1533 2 1 0 616 3 50 141 1 0 -"31191" 4 654 5 1 1 1 1 1 100 270 100 195 2539 9 7 1 1608 5 95 95 1 1 -"31192" 4 654 5 1 1 1 1 1 100 270 195 205 2398 4 8 0 827 1 5 10 0 1 -"31193" 4 654 5 1 1 1 1 1 100 270 205 256 1528 8 2 1 2417 2 25 51 1 1 -"31194" 4 654 5 1 1 1 1 1 100 270 256 128 1478 2 1 0 623 3 50 128 1 0 -"31195" 4 654 5 1 1 1 1 1 100 270 128 160 2261 7 6 1 603 2 25 32 1 1 -"31196" 4 654 5 1 1 1 1 1 100 270 160 152 2133 5 3 0 367 1 5 8 1 0 -"31197" 4 654 5 1 1 1 1 1 100 270 152 190 1639 3 6 0 645 2 25 38 0 1 -"31198" 4 654 5 1 1 1 1 1 100 270 190 180 1844 6 9 1 1631 1 5 10 0 0 -"31199" 4 654 5 1 1 1 1 1 100 270 180 270 1109 1 4 0 4089 3 50 90 0 1 -"31200" 4 654 5 1 1 1 2 1 100 581 100 175 1805 2 9 0 1480 4 75 75 0 1 -"31201" 4 654 5 1 1 1 2 1 100 581 175 184 2166 4 10 0 800 1 5 9 0 1 -"31202" 4 654 5 1 1 1 2 1 100 581 184 138 1491 3 1 0 438 2 25 46 1 0 -"31203" 4 654 5 1 1 1 2 1 100 581 138 207 1343 8 6 1 910 3 50 69 1 1 -"31204" 4 654 5 1 1 1 2 1 100 581 207 217 1681 5 7 0 585 1 5 10 0 1 -"31205" 4 654 5 1 1 1 2 1 100 581 217 206 2074 6 8 1 451 1 5 11 0 0 -"31206" 4 654 5 1 1 1 2 1 100 581 206 258 1697 7 5 1 691 2 25 52 1 1 -"31207" 4 654 5 1 1 1 2 1 100 581 258 387 1535 1 10 0 1289 3 50 129 0 1 -"31208" 4 654 5 1 1 1 2 1 100 581 387 581 1726 9 3 1 1106 3 50 194 1 1 -"31209" 4 654 5 1 1 1 3 1 100 297 100 150 2216 3 5 0 405 3 50 50 0 1 -"31210" 4 654 5 1 1 1 3 1 100 297 150 37 1419 8 9 1 574 4 75 113 0 0 -"31211" 4 654 5 1 1 1 3 1 100 297 37 65 1650 2 4 0 642 4 75 28 0 1 -"31212" 4 654 5 1 1 1 3 1 100 297 65 68 2209 6 3 1 590 1 5 3 1 1 -"31213" 4 654 5 1 1 1 3 1 100 297 68 102 1764 7 6 1 529 3 50 34 1 1 -"31214" 4 654 5 1 1 1 3 1 100 297 102 97 1824 4 2 0 775 1 5 5 1 0 -"31215" 4 654 5 1 1 1 3 1 100 297 97 189 1335 1 8 0 911 5 95 92 0 1 -"31216" 4 654 5 1 1 1 3 1 100 297 189 198 1726 5 7 0 372 1 5 9 0 1 -"31217" 4 654 5 1 1 1 3 1 100 297 198 297 1397 9 5 1 3706 3 50 99 1 1 -"31218" 4 654 5 1 1 1 4 1 100 956 100 175 1442 8 7 1 398 4 75 75 1 1 -"31219" 4 654 5 1 1 1 4 1 100 956 175 263 2121 3 10 0 410 3 50 88 0 1 -"31220" 4 654 5 1 1 1 4 1 100 956 263 197 1938 7 9 1 994 2 25 66 0 0 -"31221" 4 654 5 1 1 1 4 1 100 956 197 384 1555 9 1 1 675 5 95 187 1 1 -"31222" 4 654 5 1 1 1 4 1 100 956 384 576 1348 2 3 0 768 3 50 192 0 1 -"31223" 4 654 5 1 1 1 4 1 100 956 576 1008 1579 1 8 0 794 4 75 432 0 1 -"31224" 4 654 5 1 1 1 4 1 100 956 1008 958 1834 5 4 0 523 1 5 50 1 0 -"31225" 4 654 5 1 1 1 4 1 100 956 958 910 1503 4 2 0 874 1 5 48 1 0 -"31226" 4 654 5 1 1 1 4 1 100 956 910 956 1750 6 3 1 619 1 5 46 1 1 -"31227" 4 656 2 0 1 1 1 1 100 689 100 175 19213 8 3 1 1175 4 75 75 1 1 -"31228" 4 656 2 0 1 1 1 1 100 689 175 306 14940 3 7 0 1645 4 75 131 0 1 -"31229" 4 656 2 0 1 1 1 1 100 689 306 459 4781 8 2 1 929 3 50 153 1 1 -"31230" 4 656 2 0 1 1 1 1 100 689 459 689 2323 2 1 1 604 3 50 230 1 1 -"31231" 4 656 3 1 1 1 1 1 100 918 100 195 24337 9 7 1 826 5 95 95 1 1 -"31232" 4 656 3 1 1 1 1 1 100 918 195 293 7027 4 8 0 569 3 50 98 0 1 -"31233" 4 656 3 1 1 1 1 1 100 918 293 366 4158 8 2 1 1219 2 25 73 1 1 -"31234" 4 656 3 1 1 1 1 1 100 918 366 458 4505 2 1 1 816 2 25 92 1 1 -"31235" 4 656 3 1 1 1 1 1 100 918 458 573 2915 7 6 1 2873 2 25 115 1 1 -"31236" 4 656 3 1 1 1 1 1 100 918 573 544 3884 5 3 0 1136 1 5 29 1 0 -"31237" 4 656 3 1 1 1 1 1 100 918 544 816 2685 3 6 0 1002 3 50 272 0 1 -"31238" 4 656 3 1 1 1 1 1 100 918 816 612 3439 6 9 1 1155 2 25 204 0 0 -"31239" 4 656 3 1 1 1 1 1 100 918 612 918 2506 1 4 0 703 3 50 306 0 1 -"31240" 4 656 3 1 1 1 2 1 100 1998 100 175 2582 2 9 0 3234 4 75 75 0 1 -"31241" 4 656 3 1 1 1 2 1 100 1998 175 306 2117 4 10 0 539 4 75 131 0 1 -"31242" 4 656 3 1 1 1 2 1 100 1998 306 291 3397 3 1 0 1011 1 5 15 1 0 -"31243" 4 656 3 1 1 1 2 1 100 1998 291 509 1881 8 6 1 617 4 75 218 1 1 -"31244" 4 656 3 1 1 1 2 1 100 1998 509 534 4777 5 7 0 1373 1 5 25 0 1 -"31245" 4 656 3 1 1 1 2 1 100 1998 534 507 1953 6 8 1 1124 1 5 27 0 0 -"31246" 4 656 3 1 1 1 2 1 100 1998 507 761 2011 7 5 1 1602 3 50 254 1 1 -"31247" 4 656 3 1 1 1 2 1 100 1998 761 1332 2395 1 10 0 1239 4 75 571 0 1 -"31248" 4 656 3 1 1 1 2 1 100 1998 1332 1998 2085 9 3 1 1827 3 50 666 1 1 -"31249" 4 656 3 1 1 1 3 1 100 31 100 195 2679 3 5 0 1157 5 95 95 0 1 -"31250" 4 656 3 1 1 1 3 1 100 31 195 10 1401 8 9 1 756 5 95 185 0 0 -"31251" 4 656 3 1 1 1 3 1 100 31 10 20 2035 2 4 0 1082 5 95 10 0 1 -"31252" 4 656 3 1 1 1 3 1 100 31 20 39 5248 6 3 1 2172 5 95 19 1 1 -"31253" 4 656 3 1 1 1 3 1 100 31 39 76 2244 7 6 1 1065 5 95 37 1 1 -"31254" 4 656 3 1 1 1 3 1 100 31 76 4 3493 4 2 0 3922 5 95 72 1 0 -"31255" 4 656 3 1 1 1 3 1 100 31 4 8 2634 1 8 0 1192 5 95 4 0 1 -"31256" 4 656 3 1 1 1 3 1 100 31 8 16 3760 5 7 0 1414 5 95 8 0 1 -"31257" 4 656 3 1 1 1 3 1 100 31 16 31 2031 9 5 1 1312 5 95 15 1 1 -"31258" 4 656 3 1 1 1 4 1 100 2838 100 195 2414 8 7 1 0 5 95 95 1 1 -"31259" 4 656 3 1 1 1 4 1 100 2838 195 380 2496 3 10 0 1772 5 95 185 0 1 -"31260" 4 656 3 1 1 1 4 1 100 2838 380 361 2199 7 9 1 1563 1 5 19 0 0 -"31261" 4 656 3 1 1 1 4 1 100 2838 361 704 1653 9 1 1 1456 5 95 343 1 1 -"31262" 4 656 3 1 1 1 4 1 100 2838 704 1373 1906 2 3 0 856 5 95 669 0 1 -"31263" 4 656 3 1 1 1 4 1 100 2838 1373 2403 4575 1 8 0 4023 4 75 1030 0 1 -"31264" 4 656 3 1 1 1 4 1 100 2838 2403 2523 2993 5 4 1 2967 1 5 120 1 1 -"31265" 4 656 3 1 1 1 4 1 100 2838 2523 1892 2172 4 2 0 2370 2 25 631 1 0 -"31266" 4 656 3 1 1 1 4 1 100 2838 1892 2838 3691 6 3 1 4383 3 50 946 1 1 -"31267" 4 656 4 0 1 0 1 1 100 33 100 175 11455 2 7 0 2597 2 75 75 0 1 -"31268" 4 656 4 0 1 0 1 1 100 33 175 341 4365 3 7 0 1143 1 95 166 0 1 -"31269" 4 656 4 0 1 0 1 1 100 33 341 665 7232 2 8 0 817 1 95 324 0 1 -"31270" 4 656 4 0 1 0 1 1 100 33 665 33 1670 8 9 1 1508 1 95 632 0 0 -"31271" 4 656 5 1 1 0 1 0 100 0 100 195 2080 1 3 0 1200 1 95 95 0 1 -"31272" 4 656 5 1 1 0 1 0 100 0 195 380 2303 6 5 1 929 1 95 185 1 1 -"31273" 4 656 5 1 1 0 1 0 100 0 380 741 1486 2 8 0 2626 1 95 361 0 1 -"31274" 4 656 5 1 1 0 1 0 100 0 741 37 1715 8 9 1 854 1 95 704 0 0 -"31275" 4 656 5 1 1 0 1 0 100 0 37 72 1744 3 4 0 912 1 95 35 0 1 -"31276" 4 656 5 1 1 0 1 0 100 0 72 4 1389 5 7 1 383 1 95 68 0 0 -"31277" 4 656 5 1 1 0 1 0 100 0 4 8 1868 7 4 1 398 1 95 4 1 1 -"31278" 4 656 5 1 1 0 1 0 100 0 8 0 1046 4 1 0 934 1 95 8 1 0 -"31279" 4 656 5 1 1 0 2 1 100 31 100 195 1238 8 1 1 464 1 95 95 1 1 -"31280" 4 656 5 1 1 0 2 1 100 31 195 380 1495 6 2 1 574 1 95 185 1 1 -"31281" 4 656 5 1 1 0 2 1 100 31 380 19 1420 7 9 1 973 1 95 361 0 0 -"31282" 4 656 5 1 1 0 2 1 100 31 19 37 1369 2 10 0 916 1 95 18 0 1 -"31283" 4 656 5 1 1 0 2 1 100 31 37 72 1331 5 3 1 665 1 95 35 1 1 -"31284" 4 656 5 1 1 0 2 1 100 31 72 4 1114 4 2 0 1443 1 95 68 1 0 -"31285" 4 656 5 1 1 0 2 1 100 31 4 8 1428 3 5 0 735 1 95 4 0 1 -"31286" 4 656 5 1 1 0 2 1 100 31 8 16 1231 9 4 1 853 1 95 8 1 1 -"31287" 4 656 5 1 1 0 2 1 100 31 16 31 1168 1 7 0 807 1 95 15 0 1 -"31288" 4 656 5 1 1 0 3 0 100 0 100 195 1226 7 5 1 828 1 95 95 1 1 -"31289" 4 656 5 1 1 0 3 0 100 0 195 10 1176 2 1 0 600 1 95 185 1 0 -"31290" 4 656 5 1 1 0 3 0 100 0 10 20 1302 8 6 1 762 1 95 10 1 1 -"31291" 4 656 5 1 1 0 3 0 100 0 20 39 1558 4 7 0 996 1 95 19 0 1 -"31292" 4 656 5 1 1 0 3 0 100 0 39 76 1044 3 10 0 1116 1 95 37 0 1 -"31293" 4 656 5 1 1 0 3 0 100 0 76 4 2096 6 8 1 1009 1 95 72 0 0 -"31294" 4 656 5 1 1 0 3 0 100 0 4 8 1287 9 2 1 929 1 95 4 1 1 -"31295" 4 656 5 1 1 0 3 0 100 0 8 0 4639 5 3 0 428 1 95 8 1 0 -"31296" 4 656 5 1 1 0 4 0 100 0 100 195 1272 2 3 0 843 1 95 95 0 1 -"31297" 4 656 5 1 1 0 4 0 100 0 195 380 1217 7 4 1 838 1 95 185 1 1 -"31298" 4 656 5 1 1 0 4 0 100 0 380 19 1187 3 1 0 908 1 95 361 1 0 -"31299" 4 656 5 1 1 0 4 0 100 0 19 37 1580 1 9 0 961 1 95 18 0 1 -"31300" 4 656 5 1 1 0 4 0 100 0 37 72 1301 8 7 1 997 1 95 35 1 1 -"31301" 4 656 5 1 1 0 4 0 100 0 72 140 1469 9 2 1 802 1 95 68 1 1 -"31302" 4 656 5 1 1 0 4 0 100 0 140 7 1958 5 6 1 399 1 95 133 0 0 -"31303" 4 656 5 1 1 0 4 0 100 0 7 0 1546 6 8 1 1797 1 95 7 0 0 -"31304" 4 658 2 0 1 1 1 1 100 84 100 150 12953 8 3 1 1900 3 50 50 1 1 -"31305" 4 658 2 0 1 1 1 1 100 84 150 225 43564 3 7 0 574 3 50 75 0 1 -"31306" 4 658 2 0 1 1 1 1 100 84 225 338 5276 8 2 1 622 3 50 113 1 1 -"31307" 4 658 2 0 1 1 1 1 100 84 338 84 1442 2 1 0 620 4 75 254 1 0 -"31308" 4 658 3 1 1 1 1 1 100 1035 100 195 1650 9 7 1 837 5 95 95 1 1 -"31309" 4 658 3 1 1 1 1 1 100 1035 195 205 2085 4 8 0 1336 1 5 10 0 1 -"31310" 4 658 3 1 1 1 1 1 100 1035 205 359 1921 8 2 1 781 4 75 154 1 1 -"31311" 4 658 3 1 1 1 1 1 100 1035 359 341 1490 2 1 0 2629 1 5 18 1 0 -"31312" 4 658 3 1 1 1 1 1 100 1035 341 426 2069 7 6 1 1431 2 25 85 1 1 -"31313" 4 658 3 1 1 1 1 1 100 1035 426 447 3419 5 3 1 425 1 5 21 1 1 -"31314" 4 658 3 1 1 1 1 1 100 1035 447 559 1271 3 6 0 1447 2 25 112 0 1 -"31315" 4 658 3 1 1 1 1 1 100 1035 559 531 2734 6 9 1 627 1 5 28 0 0 -"31316" 4 658 3 1 1 1 1 1 100 1035 531 1035 1298 1 4 0 1359 5 95 504 0 1 -"31317" 4 658 3 1 1 1 2 1 100 1312 100 175 4061 2 9 0 624 4 75 75 0 1 -"31318" 4 658 3 1 1 1 2 1 100 1312 175 184 2091 4 10 0 515 1 5 9 0 1 -"31319" 4 658 3 1 1 1 2 1 100 1312 184 175 3335 3 1 0 3786 1 5 9 1 0 -"31320" 4 658 3 1 1 1 2 1 100 1312 175 306 1915 8 6 1 1877 4 75 131 1 1 -"31321" 4 658 3 1 1 1 2 1 100 1312 306 291 3044 5 7 1 584 1 5 15 0 0 -"31322" 4 658 3 1 1 1 2 1 100 1312 291 276 2857 6 8 1 427 1 5 15 0 0 -"31323" 4 658 3 1 1 1 2 1 100 1312 276 345 1947 7 5 1 1144 2 25 69 1 1 -"31324" 4 658 3 1 1 1 2 1 100 1312 345 673 1803 1 10 0 1189 5 95 328 0 1 -"31325" 4 658 3 1 1 1 2 1 100 1312 673 1312 1433 9 3 1 933 5 95 639 1 1 -"31326" 4 658 3 1 1 1 3 1 100 513 100 150 1388 3 5 0 2349 3 50 50 0 1 -"31327" 4 658 3 1 1 1 3 1 100 513 150 37 1509 8 9 1 2056 4 75 113 0 0 -"31328" 4 658 3 1 1 1 3 1 100 513 37 72 1231 2 4 0 0 5 95 35 0 1 -"31329" 4 658 3 1 1 1 3 1 100 513 72 108 1640 6 3 1 509 3 50 36 1 1 -"31330" 4 658 3 1 1 1 3 1 100 513 108 189 1817 7 6 1 531 4 75 81 1 1 -"31331" 4 658 3 1 1 1 3 1 100 513 189 142 1383 4 2 0 774 2 25 47 1 0 -"31332" 4 658 3 1 1 1 3 1 100 513 142 277 1519 1 8 0 0 5 95 135 0 1 -"31333" 4 658 3 1 1 1 3 1 100 513 277 263 2473 5 7 1 903 1 5 14 0 0 -"31334" 4 658 3 1 1 1 3 1 100 513 263 513 1696 9 5 1 0 5 95 250 1 1 -"31335" 4 658 3 1 1 1 4 1 100 366 100 175 2329 8 7 1 343 4 75 75 1 1 -"31336" 4 658 3 1 1 1 4 1 100 366 175 263 1798 3 10 0 3101 3 50 88 0 1 -"31337" 4 658 3 1 1 1 4 1 100 366 263 66 1409 7 9 1 808 4 75 197 0 0 -"31338" 4 658 3 1 1 1 4 1 100 366 66 129 1547 9 1 1 0 5 95 63 1 1 -"31339" 4 658 3 1 1 1 4 1 100 366 129 252 1565 2 3 0 0 5 95 123 0 1 -"31340" 4 658 3 1 1 1 4 1 100 366 252 491 1542 1 8 0 0 5 95 239 0 1 -"31341" 4 658 3 1 1 1 4 1 100 366 491 466 2084 5 4 0 401 1 5 25 1 0 -"31342" 4 658 3 1 1 1 4 1 100 366 466 349 1417 4 2 0 656 2 25 117 1 0 -"31343" 4 658 3 1 1 1 4 1 100 366 349 366 2151 6 3 1 1300 1 5 17 1 1 -"31344" 4 658 4 0 1 0 1 1 100 143 100 195 3302 2 7 0 2786 1 95 95 0 1 -"31345" 4 658 4 0 1 0 1 1 100 143 195 293 1430 3 7 0 565 3 50 98 0 1 -"31346" 4 658 4 0 1 0 1 1 100 143 293 571 1261 2 8 0 504 1 95 278 0 1 -"31347" 4 658 4 0 1 0 1 1 100 143 571 143 1218 8 9 1 3248 2 75 428 0 0 -"31348" 4 658 5 1 1 0 1 1 100 20 100 195 1859 1 3 0 433 1 95 95 0 1 -"31349" 4 658 5 1 1 0 1 1 100 20 195 293 2649 6 5 1 1730 3 50 98 1 1 -"31350" 4 658 5 1 1 0 1 1 100 20 293 220 8089 2 8 1 2705 4 25 73 0 0 -"31351" 4 658 5 1 1 0 1 1 100 20 220 110 2410 8 9 1 262 3 50 110 0 0 -"31352" 4 658 5 1 1 0 1 1 100 20 110 215 2973 3 4 0 296 1 95 105 0 1 -"31353" 4 658 5 1 1 0 1 1 100 20 215 107 2791 5 7 1 266 3 50 108 0 0 -"31354" 4 658 5 1 1 0 1 1 100 20 107 209 1222 7 4 1 726 1 95 102 1 1 -"31355" 4 658 5 1 1 0 1 1 100 20 209 10 1338 4 1 0 365 1 95 199 1 0 -"31356" 4 658 5 1 1 0 1 1 100 20 10 20 1978 9 6 1 327 1 95 10 1 1 -"31357" 4 658 5 1 1 0 2 1 100 133 100 195 2348 8 1 1 309 1 95 95 1 1 -"31358" 4 658 5 1 1 0 2 1 100 133 195 244 1051 6 2 1 1787 4 25 49 1 1 -"31359" 4 658 5 1 1 0 2 1 100 133 244 122 1494 7 9 1 299 3 50 122 0 0 -"31360" 4 658 5 1 1 0 2 1 100 133 122 238 1579 2 10 0 1125 1 95 116 0 1 -"31361" 4 658 5 1 1 0 2 1 100 133 238 357 2105 5 3 1 1254 3 50 119 1 1 -"31362" 4 658 5 1 1 0 2 1 100 133 357 18 1267 4 2 0 2792 1 95 339 1 0 -"31363" 4 658 5 1 1 0 2 1 100 133 18 35 1359 3 5 0 391 1 95 17 0 1 -"31364" 4 658 5 1 1 0 2 1 100 133 35 68 1051 9 4 1 292 1 95 33 1 1 -"31365" 4 658 5 1 1 0 2 1 100 133 68 133 1268 1 7 0 234 1 95 65 0 1 -"31366" 4 658 5 1 1 0 3 0 100 0 100 195 2298 7 5 1 227 1 95 95 1 1 -"31367" 4 658 5 1 1 0 3 0 100 0 195 10 918 2 1 0 312 1 95 185 1 0 -"31368" 4 658 5 1 1 0 3 0 100 0 10 20 1266 8 6 1 530 1 95 10 1 1 -"31369" 4 658 5 1 1 0 3 0 100 0 20 39 1015 4 7 0 521 1 95 19 0 1 -"31370" 4 658 5 1 1 0 3 0 100 0 39 76 1702 3 10 0 307 1 95 37 0 1 -"31371" 4 658 5 1 1 0 3 0 100 0 76 4 1332 6 8 1 229 1 95 72 0 0 -"31372" 4 658 5 1 1 0 3 0 100 0 4 8 1139 9 2 1 917 1 95 4 1 1 -"31373" 4 658 5 1 1 0 3 0 100 0 8 0 2249 5 3 0 223 1 95 8 1 0 -"31374" 4 658 5 1 1 0 4 1 100 789 100 195 1625 2 3 0 218 1 95 95 0 1 -"31375" 4 658 5 1 1 0 4 1 100 789 195 341 1268 7 4 1 251 2 75 146 1 1 -"31376" 4 658 5 1 1 0 4 1 100 789 341 85 971 3 1 0 299 2 75 256 1 0 -"31377" 4 658 5 1 1 0 4 1 100 789 85 166 1144 1 9 0 891 1 95 81 0 1 -"31378" 4 658 5 1 1 0 4 1 100 789 166 324 1088 8 7 1 370 1 95 158 1 1 -"31379" 4 658 5 1 1 0 4 1 100 789 324 632 1125 9 2 1 334 1 95 308 1 1 -"31380" 4 658 5 1 1 0 4 1 100 789 632 664 3396 5 6 0 2655 5 5 32 0 1 -"31381" 4 658 5 1 1 0 4 1 100 789 664 631 1964 6 8 1 575 5 5 33 0 0 -"31382" 4 658 5 1 1 0 4 1 100 789 631 789 1403 4 7 0 721 4 25 158 0 1 -"31383" 4 662 2 0 1 0 1 1 100 8 100 150 19425 2 7 0 369 3 50 50 0 1 -"31384" 4 662 2 0 1 0 1 1 100 8 150 225 6001 3 7 0 930 3 50 75 0 1 -"31385" 4 662 2 0 1 0 1 1 100 8 225 169 3645 2 8 1 1544 4 25 56 0 0 -"31386" 4 662 2 0 1 0 1 1 100 8 169 8 1914 8 9 1 611 1 95 161 0 0 -"31387" 4 662 3 1 1 0 1 0 100 1 100 195 14201 1 3 0 4241 1 95 95 0 1 -"31388" 4 662 3 1 1 0 1 0 100 1 195 146 3864 6 5 0 459 4 25 49 1 0 -"31389" 4 662 3 1 1 0 1 0 100 1 146 219 1580 2 8 0 490 3 50 73 0 1 -"31390" 4 662 3 1 1 0 1 0 100 1 219 55 6781 8 9 1 1393 2 75 164 0 0 -"31391" 4 662 3 1 1 0 1 0 100 1 55 107 3263 3 4 0 461 1 95 52 0 1 -"31392" 4 662 3 1 1 0 1 0 100 1 107 53 3122 5 7 1 1667 3 50 54 0 0 -"31393" 4 662 3 1 1 0 1 0 100 1 53 13 2352 7 4 0 708 2 75 40 1 0 -"31394" 4 662 3 1 1 0 1 0 100 1 13 1 2754 4 1 0 1177 1 95 12 1 0 -"31395" 4 662 3 1 1 0 2 1 100 1921 100 195 3194 8 1 1 1052 1 95 95 1 1 -"31396" 4 662 3 1 1 0 2 1 100 1921 195 293 5292 6 2 1 707 3 50 98 1 1 -"31397" 4 662 3 1 1 0 2 1 100 1921 293 513 3056 7 9 0 2564 2 75 220 0 1 -"31398" 4 662 3 1 1 0 2 1 100 1921 513 385 6086 2 10 1 698 4 25 128 0 0 -"31399" 4 662 3 1 1 0 2 1 100 1921 385 674 2296 5 3 1 958 2 75 289 1 1 -"31400" 4 662 3 1 1 0 2 1 100 1921 674 1011 6386 4 2 1 1386 3 50 337 1 1 -"31401" 4 662 3 1 1 0 2 1 100 1921 1011 505 6342 3 5 1 711 3 50 506 0 0 -"31402" 4 662 3 1 1 0 2 1 100 1921 505 985 4580 9 4 1 399 1 95 480 1 1 -"31403" 4 662 3 1 1 0 2 1 100 1921 985 1921 1884 1 7 0 520 1 95 936 0 1 -"31404" 4 662 3 1 1 0 3 0 100 0 100 195 2228 7 5 1 448 1 95 95 1 1 -"31405" 4 662 3 1 1 0 3 0 100 0 195 10 2389 2 1 0 1376 1 95 185 1 0 -"31406" 4 662 3 1 1 0 3 0 100 0 10 20 1485 8 6 1 416 1 95 10 1 1 -"31407" 4 662 3 1 1 0 3 0 100 0 20 39 1803 4 7 0 339 1 95 19 0 1 -"31408" 4 662 3 1 1 0 3 0 100 0 39 2 1861 3 10 1 358 1 95 37 0 0 -"31409" 4 662 3 1 1 0 3 0 100 0 2 0 6751 6 8 1 475 1 95 2 0 0 -"31410" 4 662 3 1 1 0 4 1 100 985 100 195 3257 2 3 0 362 1 95 95 0 1 -"31411" 4 662 3 1 1 0 4 1 100 985 195 10 2244 7 4 0 573 1 95 185 1 0 -"31412" 4 662 3 1 1 0 4 1 100 985 10 20 1444 3 1 1 332 1 95 10 1 1 -"31413" 4 662 3 1 1 0 4 1 100 985 20 39 1344 1 9 0 427 1 95 19 0 1 -"31414" 4 662 3 1 1 0 4 1 100 985 39 76 1466 8 7 1 317 1 95 37 1 1 -"31415" 4 662 3 1 1 0 4 1 100 985 76 148 1735 9 2 1 369 1 95 72 1 1 -"31416" 4 662 3 1 1 0 4 1 100 985 148 259 4985 5 6 0 409 2 75 111 0 1 -"31417" 4 662 3 1 1 0 4 1 100 985 259 505 1808 6 8 0 361 1 95 246 0 1 -"31418" 4 662 3 1 1 0 4 1 100 985 505 985 4101 4 7 0 318 1 95 480 0 1 -"31419" 4 662 4 0 1 1 1 1 100 3 100 125 8851 8 3 1 547 2 25 25 1 1 -"31420" 4 662 4 0 1 1 1 1 100 3 125 31 4639 3 7 1 485 4 75 94 0 0 -"31421" 4 662 4 0 1 1 1 1 100 3 31 60 1650 8 2 1 1439 5 95 29 1 1 -"31422" 4 662 4 0 1 1 1 1 100 3 60 3 1628 2 1 0 1055 5 95 57 1 0 -"31423" 4 662 5 1 1 1 1 1 100 1154 100 195 5693 9 7 1 1575 5 95 95 1 1 -"31424" 4 662 5 1 1 1 1 1 100 1154 195 293 6311 4 8 0 545 3 50 98 0 1 -"31425" 4 662 5 1 1 1 1 1 100 1154 293 440 1506 8 2 1 904 3 50 147 1 1 -"31426" 4 662 5 1 1 1 1 1 100 1154 440 220 2384 2 1 0 2285 3 50 220 1 0 -"31427" 4 662 5 1 1 1 1 1 100 1154 220 110 2138 7 6 0 687 3 50 110 1 0 -"31428" 4 662 5 1 1 1 1 1 100 1154 110 193 1995 5 3 1 2206 4 75 83 1 1 -"31429" 4 662 5 1 1 1 1 1 100 1154 193 338 5708 3 6 0 3071 4 75 145 0 1 -"31430" 4 662 5 1 1 1 1 1 100 1154 338 592 2414 6 9 0 1783 4 75 254 0 1 -"31431" 4 662 5 1 1 1 1 1 100 1154 592 1154 11523 1 4 0 932 5 95 562 0 1 -"31432" 4 662 5 1 1 1 2 1 100 23 100 150 1842 2 9 0 854 3 50 50 0 1 -"31433" 4 662 5 1 1 1 2 1 100 23 150 142 2273 4 10 1 999 1 5 8 0 0 -"31434" 4 662 5 1 1 1 2 1 100 23 142 149 1320 3 1 1 431 1 5 7 1 1 -"31435" 4 662 5 1 1 1 2 1 100 23 149 261 1317 8 6 1 619 4 75 112 1 1 -"31436" 4 662 5 1 1 1 2 1 100 23 261 65 2336 5 7 1 814 4 75 196 0 0 -"31437" 4 662 5 1 1 1 2 1 100 23 65 3 2051 6 8 1 0 5 95 62 0 0 -"31438" 4 662 5 1 1 1 2 1 100 23 3 6 4559 7 5 1 0 5 95 3 1 1 -"31439" 4 662 5 1 1 1 2 1 100 23 6 12 1569 1 10 0 484 5 95 6 0 1 -"31440" 4 662 5 1 1 1 2 1 100 23 12 23 1651 9 3 1 0 5 95 11 1 1 -"31441" 4 662 5 1 1 1 3 1 100 4 100 150 2205 3 5 0 550 3 50 50 0 1 -"31442" 4 662 5 1 1 1 3 1 100 4 150 37 1187 8 9 1 4010 4 75 113 0 0 -"31443" 4 662 5 1 1 1 3 1 100 4 37 39 1546 2 4 0 887 1 5 2 0 1 -"31444" 4 662 5 1 1 1 3 1 100 4 39 29 1385 6 3 0 2196 2 25 10 1 0 -"31445" 4 662 5 1 1 1 3 1 100 4 29 14 2125 7 6 0 653 3 50 15 1 0 -"31446" 4 662 5 1 1 1 3 1 100 4 14 10 1477 4 2 0 1762 2 25 4 1 0 -"31447" 4 662 5 1 1 1 3 1 100 4 10 18 1056 1 8 0 2475 4 75 8 0 1 -"31448" 4 662 5 1 1 1 3 1 100 4 18 4 1997 5 7 1 1304 4 75 14 0 0 -"31449" 4 662 5 1 1 1 3 1 100 4 4 4 1807 9 5 1 494 1 5 0 1 1 -"31450" 4 662 5 1 1 1 4 1 100 912 100 175 25936 8 7 1 449 4 75 75 1 1 -"31451" 4 662 5 1 1 1 4 1 100 912 175 263 1169 3 10 0 801 3 50 88 0 1 -"31452" 4 662 5 1 1 1 4 1 100 912 263 131 1145 7 9 1 690 3 50 132 0 0 -"31453" 4 662 5 1 1 1 4 1 100 912 131 255 1292 9 1 1 507 5 95 124 1 1 -"31454" 4 662 5 1 1 1 4 1 100 912 255 497 1104 2 3 0 1374 5 95 242 0 1 -"31455" 4 662 5 1 1 1 4 1 100 912 497 870 2771 1 8 0 614 4 75 373 0 1 -"31456" 4 662 5 1 1 1 4 1 100 912 870 914 1501 5 4 1 1028 1 5 44 1 1 -"31457" 4 662 5 1 1 1 4 1 100 912 914 960 2271 4 2 1 671 1 5 46 1 1 -"31458" 4 662 5 1 1 1 4 1 100 912 960 912 2525 6 3 0 697 1 5 48 1 0 -"31459" 4 669 2 0 1 1 1 1 100 140 100 150 17316 8 3 1 3563 3 50 50 1 1 -"31460" 4 669 2 0 1 1 1 1 100 140 150 225 7874 3 7 0 882 3 50 75 0 1 -"31461" 4 669 2 0 1 1 1 1 100 140 225 281 3960 8 2 1 2013 2 25 56 1 1 -"31462" 4 669 2 0 1 1 1 1 100 140 281 140 18097 2 1 0 640 3 50 141 1 0 -"31463" 4 669 3 1 1 1 1 1 100 753 100 195 3508 9 7 1 439 5 95 95 1 1 -"31464" 4 669 3 1 1 1 1 1 100 753 195 185 4769 4 8 1 418 1 5 10 0 0 -"31465" 4 669 3 1 1 1 1 1 100 753 185 361 1777 8 2 1 526 5 95 176 1 1 -"31466" 4 669 3 1 1 1 1 1 100 753 361 271 2639 2 1 0 1796 2 25 90 1 0 -"31467" 4 669 3 1 1 1 1 1 100 753 271 407 4610 7 6 1 1140 3 50 136 1 1 -"31468" 4 669 3 1 1 1 1 1 100 753 407 387 5310 5 3 0 310 1 5 20 1 0 -"31469" 4 669 3 1 1 1 1 1 100 753 387 406 2136 3 6 0 757 1 5 19 0 1 -"31470" 4 669 3 1 1 1 1 1 100 753 406 386 2425 6 9 1 259 1 5 20 0 0 -"31471" 4 669 3 1 1 1 1 1 100 753 386 753 1603 1 4 0 0 5 95 367 0 1 -"31472" 4 669 3 1 1 1 2 1 100 1193 100 195 2371 2 9 0 0 5 95 95 0 1 -"31473" 4 669 3 1 1 1 2 1 100 1193 195 185 1806 4 10 1 324 1 5 10 0 0 -"31474" 4 669 3 1 1 1 2 1 100 1193 185 139 2027 3 1 0 878 2 25 46 1 0 -"31475" 4 669 3 1 1 1 2 1 100 1193 139 209 1726 8 6 1 2044 3 50 70 1 1 -"31476" 4 669 3 1 1 1 2 1 100 1193 209 199 3142 5 7 1 301 1 5 10 0 0 -"31477" 4 669 3 1 1 1 2 1 100 1193 199 209 2244 6 8 0 251 1 5 10 0 1 -"31478" 4 669 3 1 1 1 2 1 100 1193 209 314 1795 7 5 1 522 3 50 105 1 1 -"31479" 4 669 3 1 1 1 2 1 100 1193 314 612 1514 1 10 0 0 5 95 298 0 1 -"31480" 4 669 3 1 1 1 2 1 100 1193 612 1193 2972 9 3 1 0 5 95 581 1 1 -"31481" 4 669 3 1 1 1 3 1 100 277 100 175 2525 3 5 0 428 4 75 75 0 1 -"31482" 4 669 3 1 1 1 3 1 100 277 175 44 1920 8 9 1 429 4 75 131 0 0 -"31483" 4 669 3 1 1 1 3 1 100 277 44 86 2603 2 4 0 1419 5 95 42 0 1 -"31484" 4 669 3 1 1 1 3 1 100 277 86 82 3351 6 3 0 250 1 5 4 1 0 -"31485" 4 669 3 1 1 1 3 1 100 277 82 103 2040 7 6 1 358 2 25 21 1 1 -"31486" 4 669 3 1 1 1 3 1 100 277 103 77 1532 4 2 0 351 2 25 26 1 0 -"31487" 4 669 3 1 1 1 3 1 100 277 77 150 1432 1 8 0 583 5 95 73 0 1 -"31488" 4 669 3 1 1 1 3 1 100 277 150 142 4763 5 7 1 1464 1 5 8 0 0 -"31489" 4 669 3 1 1 1 3 1 100 277 142 277 1667 9 5 1 1072 5 95 135 1 1 -"31490" 4 669 3 1 1 1 4 1 100 465 100 105 2035 8 7 1 2340 1 5 5 1 1 -"31491" 4 669 3 1 1 1 4 1 100 465 105 110 944 3 10 0 288 1 5 5 0 1 -"31492" 4 669 3 1 1 1 4 1 100 465 110 104 1246 7 9 1 234 1 5 6 0 0 -"31493" 4 669 3 1 1 1 4 1 100 465 104 203 1053 9 1 1 471 5 95 99 1 1 -"31494" 4 669 3 1 1 1 4 1 100 465 203 355 1154 2 3 0 927 4 75 152 0 1 -"31495" 4 669 3 1 1 1 4 1 100 465 355 621 2038 1 8 0 1685 4 75 266 0 1 -"31496" 4 669 3 1 1 1 4 1 100 465 621 652 1831 5 4 1 299 1 5 31 1 1 -"31497" 4 669 3 1 1 1 4 1 100 465 652 489 1341 4 2 0 279 2 25 163 1 0 -"31498" 4 669 3 1 1 1 4 1 100 465 489 465 1845 6 3 0 513 1 5 24 1 0 -"31499" 4 669 4 0 1 0 1 1 100 37 100 195 5683 2 7 0 4216 1 95 95 0 1 -"31500" 4 669 4 0 1 0 1 1 100 37 195 380 2225 3 7 0 306 1 95 185 0 1 -"31501" 4 669 4 0 1 0 1 1 100 37 380 741 1085 2 8 0 285 1 95 361 0 1 -"31502" 4 669 4 0 1 0 1 1 100 37 741 37 1443 8 9 1 258 1 95 704 0 0 -"31503" 4 669 5 1 1 0 1 0 100 0 100 195 1831 1 3 0 448 1 95 95 0 1 -"31504" 4 669 5 1 1 0 1 0 100 0 195 380 3678 6 5 1 985 1 95 185 1 1 -"31505" 4 669 5 1 1 0 1 0 100 0 380 741 1705 2 8 0 327 1 95 361 0 1 -"31506" 4 669 5 1 1 0 1 0 100 0 741 37 1358 8 9 1 291 1 95 704 0 0 -"31507" 4 669 5 1 1 0 1 0 100 0 37 2 1182 3 4 1 290 1 95 35 0 0 -"31508" 4 669 5 1 1 0 1 0 100 0 2 4 1198 5 7 0 270 1 95 2 0 1 -"31509" 4 669 5 1 1 0 1 0 100 0 4 8 1316 7 4 1 310 1 95 4 1 1 -"31510" 4 669 5 1 1 0 1 0 100 0 8 0 1248 4 1 0 296 1 95 8 1 0 -"31511" 4 669 5 1 1 0 2 1 100 2498 100 195 1193 8 1 1 327 1 95 95 1 1 -"31512" 4 669 5 1 1 0 2 1 100 2498 195 293 1698 6 2 1 3143 3 50 98 1 1 -"31513" 4 669 5 1 1 0 2 1 100 2498 293 220 2203 7 9 1 406 4 25 73 0 0 -"31514" 4 669 5 1 1 0 2 1 100 2498 220 429 1161 2 10 0 494 1 95 209 0 1 -"31515" 4 669 5 1 1 0 2 1 100 2498 429 450 2424 5 3 1 353 5 5 21 1 1 -"31516" 4 669 5 1 1 0 2 1 100 2498 450 337 1974 4 2 0 636 4 25 113 1 0 -"31517" 4 669 5 1 1 0 2 1 100 2498 337 657 1224 3 5 0 271 1 95 320 0 1 -"31518" 4 669 5 1 1 0 2 1 100 2498 657 1281 1098 9 4 1 373 1 95 624 1 1 -"31519" 4 669 5 1 1 0 2 1 100 2498 1281 2498 1056 1 7 0 313 1 95 1217 0 1 -"31520" 4 669 5 1 1 0 3 0 100 1 100 195 1714 7 5 1 252 1 95 95 1 1 -"31521" 4 669 5 1 1 0 3 0 100 1 195 10 1375 2 1 0 369 1 95 185 1 0 -"31522" 4 669 5 1 1 0 3 0 100 1 10 20 1076 8 6 1 320 1 95 10 1 1 -"31523" 4 669 5 1 1 0 3 0 100 1 20 1 1901 4 7 1 826 1 95 19 0 0 -"31524" 4 669 5 1 1 0 4 1 100 257 100 195 1059 2 3 0 280 1 95 95 0 1 -"31525" 4 669 5 1 1 0 4 1 100 257 195 244 1194 7 4 1 581 4 25 49 1 1 -"31526" 4 669 5 1 1 0 4 1 100 257 244 183 1170 3 1 0 1004 4 25 61 1 0 -"31527" 4 669 5 1 1 0 4 1 100 257 183 357 1285 1 9 0 340 1 95 174 0 1 -"31528" 4 669 5 1 1 0 4 1 100 257 357 696 1290 8 7 1 294 1 95 339 1 1 -"31529" 4 669 5 1 1 0 4 1 100 257 696 1357 1361 9 2 1 294 1 95 661 1 1 -"31530" 4 669 5 1 1 0 4 1 100 257 1357 2646 1268 5 6 0 928 1 95 1289 0 1 -"31531" 4 669 5 1 1 0 4 1 100 257 2646 132 1465 6 8 1 654 1 95 2514 0 0 -"31532" 4 669 5 1 1 0 4 1 100 257 132 257 953 4 7 0 227 1 95 125 0 1 -"31533" 4 674 2 0 1 0 1 1 100 99 100 150 13174 2 7 0 1499 3 50 50 0 1 -"31534" 4 674 2 0 1 0 1 1 100 99 150 263 19405 3 7 0 1360 2 75 113 0 1 -"31535" 4 674 2 0 1 0 1 1 100 99 263 395 2640 2 8 0 1615 3 50 132 0 1 -"31536" 4 674 2 0 1 0 1 1 100 99 395 99 2792 8 9 1 1380 2 75 296 0 0 -"31537" 4 674 3 1 1 0 1 1 100 844 100 195 3961 1 3 0 1056 1 95 95 0 1 -"31538" 4 674 3 1 1 0 1 1 100 844 195 244 2646 6 5 1 579 4 25 49 1 1 -"31539" 4 674 3 1 1 0 1 1 100 844 244 366 3315 2 8 0 648 3 50 122 0 1 -"31540" 4 674 3 1 1 0 1 1 100 844 366 274 1950 8 9 1 619 4 25 92 0 0 -"31541" 4 674 3 1 1 0 1 1 100 844 274 411 1944 3 4 0 455 3 50 137 0 1 -"31542" 4 674 3 1 1 0 1 1 100 844 411 514 4736 5 7 0 1157 4 25 103 0 1 -"31543" 4 674 3 1 1 0 1 1 100 844 514 643 1978 7 4 1 569 4 25 129 1 1 -"31544" 4 674 3 1 1 0 1 1 100 844 643 482 2468 4 1 0 959 4 25 161 1 0 -"31545" 4 674 3 1 1 0 1 1 100 844 482 844 2772 9 6 1 921 2 75 362 1 1 -"31546" 4 674 3 1 1 0 2 1 100 1644 100 195 3252 8 1 1 1256 1 95 95 1 1 -"31547" 4 674 3 1 1 0 2 1 100 1644 195 293 2893 6 2 1 920 3 50 98 1 1 -"31548" 4 674 3 1 1 0 2 1 100 1644 293 146 1721 7 9 1 2395 3 50 147 0 0 -"31549" 4 674 3 1 1 0 2 1 100 1644 146 285 1847 2 10 0 1222 1 95 139 0 1 -"31550" 4 674 3 1 1 0 2 1 100 1644 285 428 2472 5 3 1 1633 3 50 143 1 1 -"31551" 4 674 3 1 1 0 2 1 100 1644 428 321 1884 4 2 0 561 4 25 107 1 0 -"31552" 4 674 3 1 1 0 2 1 100 1644 321 562 1699 3 5 0 2564 2 75 241 0 1 -"31553" 4 674 3 1 1 0 2 1 100 1644 562 1096 2498 9 4 1 1381 1 95 534 1 1 -"31554" 4 674 3 1 1 0 2 1 100 1644 1096 1644 2625 1 7 0 1194 3 50 548 0 1 -"31555" 4 674 3 1 1 0 3 1 100 720 100 195 3483 7 5 1 1464 1 95 95 1 1 -"31556" 4 674 3 1 1 0 3 1 100 720 195 49 2704 2 1 0 974 2 75 146 1 0 -"31557" 4 674 3 1 1 0 3 1 100 720 49 96 1469 8 6 1 1008 1 95 47 1 1 -"31558" 4 674 3 1 1 0 3 1 100 720 96 144 2020 4 7 0 1130 3 50 48 0 1 -"31559" 4 674 3 1 1 0 3 1 100 720 144 252 1944 3 10 0 449 2 75 108 0 1 -"31560" 4 674 3 1 1 0 3 1 100 720 252 126 4702 6 8 1 626 3 50 126 0 0 -"31561" 4 674 3 1 1 0 3 1 100 720 126 246 1509 9 2 1 1211 1 95 120 1 1 -"31562" 4 674 3 1 1 0 3 1 100 720 246 369 4631 5 3 1 865 3 50 123 1 1 -"31563" 4 674 3 1 1 0 3 1 100 720 369 720 1746 1 10 0 1212 1 95 351 0 1 -"31564" 4 674 3 1 1 0 4 1 100 1606 100 195 2461 2 3 0 902 1 95 95 0 1 -"31565" 4 674 3 1 1 0 4 1 100 1606 195 293 1630 7 4 1 447 3 50 98 1 1 -"31566" 4 674 3 1 1 0 4 1 100 1606 293 220 1953 3 1 0 1056 4 25 73 1 0 -"31567" 4 674 3 1 1 0 4 1 100 1606 220 429 2430 1 9 0 966 1 95 209 0 1 -"31568" 4 674 3 1 1 0 4 1 100 1606 429 837 1581 8 7 1 848 1 95 408 1 1 -"31569" 4 674 3 1 1 0 4 1 100 1606 837 1632 1838 9 2 1 836 1 95 795 1 1 -"31570" 4 674 3 1 1 0 4 1 100 1606 1632 1714 3551 5 6 0 647 5 5 82 0 1 -"31571" 4 674 3 1 1 0 4 1 100 1606 1714 1285 1886 6 8 1 630 4 25 429 0 0 -"31572" 4 674 3 1 1 0 4 1 100 1606 1285 1606 2103 4 7 0 552 4 25 321 0 1 -"31573" 4 674 4 0 1 1 1 1 100 329 100 150 7702 8 3 1 1228 3 50 50 1 1 -"31574" 4 674 4 0 1 1 1 1 100 329 150 225 5931 3 7 0 2654 3 50 75 0 1 -"31575" 4 674 4 0 1 1 1 1 100 329 225 439 1938 8 2 1 874 5 95 214 1 1 -"31576" 4 674 4 0 1 1 1 1 100 329 439 329 1558 2 1 0 786 2 25 110 1 0 -"31577" 4 674 5 1 1 1 1 1 100 133 100 195 3218 9 7 1 630 5 95 95 1 1 -"31578" 4 674 5 1 1 1 1 1 100 133 195 244 2063 4 8 0 1197 2 25 49 0 1 -"31579" 4 674 5 1 1 1 1 1 100 133 244 476 2101 8 2 1 576 5 95 232 1 1 -"31580" 4 674 5 1 1 1 1 1 100 133 476 119 1348 2 1 0 1384 4 75 357 1 0 -"31581" 4 674 5 1 1 1 1 1 100 133 119 208 1710 7 6 1 545 4 75 89 1 1 -"31582" 4 674 5 1 1 1 1 1 100 133 208 156 1885 5 3 0 544 2 25 52 1 0 -"31583" 4 674 5 1 1 1 1 1 100 133 156 273 1748 3 6 0 877 4 75 117 0 1 -"31584" 4 674 5 1 1 1 1 1 100 133 273 68 2257 6 9 1 1327 4 75 205 0 0 -"31585" 4 674 5 1 1 1 1 1 100 133 68 133 1495 1 4 0 793 5 95 65 0 1 -"31586" 4 674 5 1 1 1 2 1 100 2418 100 195 2355 2 9 0 1006 5 95 95 0 1 -"31587" 4 674 5 1 1 1 2 1 100 2418 195 244 1405 4 10 0 499 2 25 49 0 1 -"31588" 4 674 5 1 1 1 2 1 100 2418 244 183 1523 3 1 0 461 2 25 61 1 0 -"31589" 4 674 5 1 1 1 2 1 100 2418 183 357 1318 8 6 1 1242 5 95 174 1 1 -"31590" 4 674 5 1 1 1 2 1 100 2418 357 446 2768 5 7 0 604 2 25 89 0 1 -"31591" 4 674 5 1 1 1 2 1 100 2418 446 424 1441 6 8 1 678 1 5 22 0 0 -"31592" 4 674 5 1 1 1 2 1 100 2418 424 636 1383 7 5 1 460 3 50 212 1 1 -"31593" 4 674 5 1 1 1 2 1 100 2418 636 1240 1257 1 10 0 657 5 95 604 0 1 -"31594" 4 674 5 1 1 1 2 1 100 2418 1240 2418 1472 9 3 1 666 5 95 1178 1 1 -"31595" 4 674 5 1 1 1 3 1 100 211 100 175 1836 3 5 0 721 4 75 75 0 1 -"31596" 4 674 5 1 1 1 3 1 100 211 175 9 1691 8 9 1 688 5 95 166 0 0 -"31597" 4 674 5 1 1 1 3 1 100 211 9 18 1916 2 4 0 568 5 95 9 0 1 -"31598" 4 674 5 1 1 1 3 1 100 211 18 23 1669 6 3 1 730 2 25 5 1 1 -"31599" 4 674 5 1 1 1 3 1 100 211 23 35 1386 7 6 1 419 3 50 12 1 1 -"31600" 4 674 5 1 1 1 3 1 100 211 35 44 2308 4 2 1 675 2 25 9 1 1 -"31601" 4 674 5 1 1 1 3 1 100 211 44 86 1286 1 8 0 1232 5 95 42 0 1 -"31602" 4 674 5 1 1 1 3 1 100 211 86 108 2013 5 7 0 542 2 25 22 0 1 -"31603" 4 674 5 1 1 1 3 1 100 211 108 211 1240 9 5 1 793 5 95 103 1 1 -"31604" 4 674 5 1 1 1 4 1 100 806 100 195 2812 8 7 1 533 5 95 95 1 1 -"31605" 4 674 5 1 1 1 4 1 100 806 195 244 1310 3 10 0 2268 2 25 49 0 1 -"31606" 4 674 5 1 1 1 4 1 100 806 244 122 1486 7 9 1 456 3 50 122 0 0 -"31607" 4 674 5 1 1 1 4 1 100 806 122 238 1092 9 1 1 443 5 95 116 1 1 -"31608" 4 674 5 1 1 1 4 1 100 806 238 464 1353 2 3 0 553 5 95 226 0 1 -"31609" 4 674 5 1 1 1 4 1 100 806 464 905 1464 1 8 0 528 5 95 441 0 1 -"31610" 4 674 5 1 1 1 4 1 100 806 905 860 4306 5 4 0 756 1 5 45 1 0 -"31611" 4 674 5 1 1 1 4 1 100 806 860 645 1512 4 2 0 782 2 25 215 1 0 -"31612" 4 674 5 1 1 1 4 1 100 806 645 806 1733 6 3 1 681 2 25 161 1 1 -"31613" 4 675 2 0 1 0 1 1 100 285 100 150 9471 2 7 0 2281 3 50 50 0 1 -"31614" 4 675 2 0 1 0 1 1 100 285 150 293 13897 3 7 0 1149 1 95 143 0 1 -"31615" 4 675 2 0 1 0 1 1 100 285 293 571 2666 2 8 0 2690 1 95 278 0 1 -"31616" 4 675 2 0 1 0 1 1 100 285 571 285 1639 8 9 1 3971 3 50 286 0 0 -"31617" 4 675 3 1 1 0 1 1 100 610 100 195 1651 1 3 0 1436 1 95 95 0 1 -"31618" 4 675 3 1 1 0 1 1 100 610 195 380 1976 6 5 1 689 1 95 185 1 1 -"31619" 4 675 3 1 1 0 1 1 100 610 380 285 2191 2 8 1 1165 4 25 95 0 0 -"31620" 4 675 3 1 1 0 1 1 100 610 285 214 1540 8 9 1 939 4 25 71 0 0 -"31621" 4 675 3 1 1 0 1 1 100 610 214 417 1451 3 4 0 506 1 95 203 0 1 -"31622" 4 675 3 1 1 0 1 1 100 610 417 521 4112 5 7 0 1043 4 25 104 0 1 -"31623" 4 675 3 1 1 0 1 1 100 610 521 651 2951 7 4 1 754 4 25 130 1 1 -"31624" 4 675 3 1 1 0 1 1 100 610 651 488 1260 4 1 0 1237 4 25 163 1 0 -"31625" 4 675 3 1 1 0 1 1 100 610 488 610 2004 9 6 1 969 4 25 122 1 1 -"31626" 4 675 3 1 1 0 2 0 100 0 100 195 4263 8 1 1 1784 1 95 95 1 1 -"31627" 4 675 3 1 1 0 2 0 100 0 195 97 2717 6 2 0 923 3 50 98 1 0 -"31628" 4 675 3 1 1 0 2 0 100 0 97 24 2443 7 9 1 2031 2 75 73 0 0 -"31629" 4 675 3 1 1 0 2 0 100 0 24 47 1209 2 10 0 729 1 95 23 0 1 -"31630" 4 675 3 1 1 0 2 0 100 0 47 2 1993 5 3 0 605 1 95 45 1 0 -"31631" 4 675 3 1 1 0 2 0 100 0 2 0 1532 4 2 0 595 1 95 2 1 0 -"31632" 4 675 3 1 1 0 3 0 100 0 100 175 1918 7 5 1 837 2 75 75 1 1 -"31633" 4 675 3 1 1 0 3 0 100 0 175 9 1193 2 1 0 575 1 95 166 1 0 -"31634" 4 675 3 1 1 0 3 0 100 0 9 18 2024 8 6 1 941 1 95 9 1 1 -"31635" 4 675 3 1 1 0 3 0 100 0 18 35 1490 4 7 0 574 1 95 17 0 1 -"31636" 4 675 3 1 1 0 3 0 100 0 35 68 998 3 10 0 581 1 95 33 0 1 -"31637" 4 675 3 1 1 0 3 0 100 0 68 3 1321 6 8 1 555 1 95 65 0 0 -"31638" 4 675 3 1 1 0 3 0 100 0 3 6 1295 9 2 1 431 1 95 3 1 1 -"31639" 4 675 3 1 1 0 3 0 100 0 6 0 1162 5 3 0 480 1 95 6 1 0 -"31640" 4 675 3 1 1 0 4 1 100 2061 100 195 1402 2 3 0 558 1 95 95 0 1 -"31641" 4 675 3 1 1 0 4 1 100 2061 195 293 1617 7 4 1 993 3 50 98 1 1 -"31642" 4 675 3 1 1 0 4 1 100 2061 293 146 1187 3 1 0 1842 3 50 147 1 0 -"31643" 4 675 3 1 1 0 4 1 100 2061 146 285 1305 1 9 0 492 1 95 139 0 1 -"31644" 4 675 3 1 1 0 4 1 100 2061 285 556 1196 8 7 1 472 1 95 271 1 1 -"31645" 4 675 3 1 1 0 4 1 100 2061 556 1084 1780 9 2 1 671 1 95 528 1 1 -"31646" 4 675 3 1 1 0 4 1 100 2061 1084 2114 1355 5 6 0 844 1 95 1030 0 1 -"31647" 4 675 3 1 1 0 4 1 100 2061 2114 1057 1312 6 8 1 689 3 50 1057 0 0 -"31648" 4 675 3 1 1 0 4 1 100 2061 1057 2061 1229 4 7 0 434 1 95 1004 0 1 -"31649" 4 675 4 0 1 1 1 1 100 117 100 150 11175 8 3 1 566 3 50 50 1 1 -"31650" 4 675 4 0 1 1 1 1 100 117 150 188 1570 3 7 0 984 2 25 38 0 1 -"31651" 4 675 4 0 1 1 1 1 100 117 188 235 1188 8 2 1 1069 2 25 47 1 1 -"31652" 4 675 4 0 1 1 1 1 100 117 235 117 1074 2 1 0 641 3 50 118 1 0 -"31653" 4 675 5 1 1 1 1 1 100 335 100 150 1478 9 7 1 838 3 50 50 1 1 -"31654" 4 675 5 1 1 1 1 1 100 335 150 225 1220 4 8 0 571 3 50 75 0 1 -"31655" 4 675 5 1 1 1 1 1 100 335 225 281 1492 8 2 1 1398 2 25 56 1 1 -"31656" 4 675 5 1 1 1 1 1 100 335 281 211 1073 2 1 0 1060 2 25 70 1 0 -"31657" 4 675 5 1 1 1 1 1 100 335 211 264 1608 7 6 1 848 2 25 53 1 1 -"31658" 4 675 5 1 1 1 1 1 100 335 264 198 1789 5 3 0 827 2 25 66 1 0 -"31659" 4 675 5 1 1 1 1 1 100 335 198 297 1218 3 6 0 653 3 50 99 0 1 -"31660" 4 675 5 1 1 1 1 1 100 335 297 223 1470 6 9 1 965 2 25 74 0 0 -"31661" 4 675 5 1 1 1 1 1 100 335 223 335 1071 1 4 0 527 3 50 112 0 1 -"31662" 4 675 5 1 1 1 2 1 100 452 100 150 6301 2 9 0 549 3 50 50 0 1 -"31663" 4 675 5 1 1 1 2 1 100 452 150 188 2118 4 10 0 1037 2 25 38 0 1 -"31664" 4 675 5 1 1 1 2 1 100 452 188 141 976 3 1 0 903 2 25 47 1 0 -"31665" 4 675 5 1 1 1 2 1 100 452 141 212 1466 8 6 1 557 3 50 71 1 1 -"31666" 4 675 5 1 1 1 2 1 100 452 212 318 1682 5 7 0 771 3 50 106 0 1 -"31667" 4 675 5 1 1 1 2 1 100 452 318 79 1419 6 8 1 586 4 75 239 0 0 -"31668" 4 675 5 1 1 1 2 1 100 452 79 119 2582 7 5 1 710 3 50 40 1 1 -"31669" 4 675 5 1 1 1 2 1 100 452 119 232 1592 1 10 0 1086 5 95 113 0 1 -"31670" 4 675 5 1 1 1 2 1 100 452 232 452 1568 9 3 1 669 5 95 220 1 1 -"31671" 4 675 5 1 1 1 3 1 100 772 100 175 2747 3 5 0 841 4 75 75 0 1 -"31672" 4 675 5 1 1 1 3 1 100 772 175 87 1454 8 9 1 2485 3 50 88 0 0 -"31673" 4 675 5 1 1 1 3 1 100 772 87 152 1323 2 4 0 558 4 75 65 0 1 -"31674" 4 675 5 1 1 1 3 1 100 772 152 266 1445 6 3 1 631 4 75 114 1 1 -"31675" 4 675 5 1 1 1 3 1 100 772 266 466 1371 7 6 1 1230 4 75 200 1 1 -"31676" 4 675 5 1 1 1 3 1 100 772 466 116 1498 4 2 0 1478 4 75 350 1 0 -"31677" 4 675 5 1 1 1 3 1 100 772 116 203 1545 1 8 0 1958 4 75 87 0 1 -"31678" 4 675 5 1 1 1 3 1 100 772 203 396 1999 5 7 0 736 5 95 193 0 1 -"31679" 4 675 5 1 1 1 3 1 100 772 396 772 1313 9 5 1 1329 5 95 376 1 1 -"31680" 4 675 5 1 1 1 4 1 100 144 100 150 3798 8 7 1 448 3 50 50 1 1 -"31681" 4 675 5 1 1 1 4 1 100 144 150 263 1058 3 10 0 1578 4 75 113 0 1 -"31682" 4 675 5 1 1 1 4 1 100 144 263 66 1382 7 9 1 922 4 75 197 0 0 -"31683" 4 675 5 1 1 1 4 1 100 144 66 116 1852 9 1 1 911 4 75 50 1 1 -"31684" 4 675 5 1 1 1 4 1 100 144 116 203 1260 2 3 0 1754 4 75 87 0 1 -"31685" 4 675 5 1 1 1 4 1 100 144 203 396 1308 1 8 0 976 5 95 193 0 1 -"31686" 4 675 5 1 1 1 4 1 100 144 396 297 1670 5 4 0 684 2 25 99 1 0 -"31687" 4 675 5 1 1 1 4 1 100 144 297 74 951 4 2 0 2061 4 75 223 1 0 -"31688" 4 675 5 1 1 1 4 1 100 144 74 144 1324 6 3 1 741 5 95 70 1 1 -"31689" 4 686 2 0 1 1 1 1 100 148 100 150 4324 8 3 1 1880 3 50 50 1 1 -"31690" 4 686 2 0 1 1 1 1 100 148 150 112 14842 3 7 1 1727 2 25 38 0 0 -"31691" 4 686 2 0 1 1 1 1 100 148 112 118 2022 8 2 1 2138 1 5 6 1 1 -"31692" 4 686 2 0 1 1 1 1 100 148 118 148 2327 2 1 1 1303 2 25 30 1 1 -"31693" 4 686 3 1 1 1 1 1 100 104 100 125 6574 9 7 1 1003 2 25 25 1 1 -"31694" 4 686 3 1 1 1 1 1 100 104 125 156 3783 4 8 0 1269 2 25 31 0 1 -"31695" 4 686 3 1 1 1 1 1 100 104 156 117 2018 8 2 0 2520 2 25 39 1 0 -"31696" 4 686 3 1 1 1 1 1 100 104 117 88 2971 2 1 0 967 2 25 29 1 0 -"31697" 4 686 3 1 1 1 1 1 100 104 88 92 2645 7 6 1 1641 1 5 4 1 1 -"31698" 4 686 3 1 1 1 1 1 100 104 92 69 5462 5 3 0 1437 2 25 23 1 0 -"31699" 4 686 3 1 1 1 1 1 100 104 69 66 2131 3 6 1 743 1 5 3 0 0 -"31700" 4 686 3 1 1 1 1 1 100 104 66 83 2241 6 9 0 1364 2 25 17 0 1 -"31701" 4 686 3 1 1 1 1 1 100 104 83 104 3061 1 4 0 1127 2 25 21 0 1 -"31702" 4 686 3 1 1 1 2 1 100 90 100 95 3942 2 9 1 741 1 5 5 0 0 -"31703" 4 686 3 1 1 1 2 1 100 90 95 119 1965 4 10 0 1773 2 25 24 0 1 -"31704" 4 686 3 1 1 1 2 1 100 90 119 89 4774 3 1 0 1290 2 25 30 1 0 -"31705" 4 686 3 1 1 1 2 1 100 90 89 85 2546 8 6 0 819 1 5 4 1 0 -"31706" 4 686 3 1 1 1 2 1 100 90 85 64 2692 5 7 1 993 2 25 21 0 0 -"31707" 4 686 3 1 1 1 2 1 100 90 64 32 2063 6 8 1 898 3 50 32 0 0 -"31708" 4 686 3 1 1 1 2 1 100 90 32 48 2600 7 5 1 994 3 50 16 1 1 -"31709" 4 686 3 1 1 1 2 1 100 90 48 72 1893 1 10 0 1225 3 50 24 0 1 -"31710" 4 686 3 1 1 1 2 1 100 90 72 90 2626 9 3 1 2177 2 25 18 1 1 -"31711" 4 686 3 1 1 1 3 1 100 135 100 125 3414 3 5 0 2211 2 25 25 0 1 -"31712" 4 686 3 1 1 1 3 1 100 135 125 62 2975 8 9 1 723 3 50 63 0 0 -"31713" 4 686 3 1 1 1 3 1 100 135 62 93 6232 2 4 0 628 3 50 31 0 1 -"31714" 4 686 3 1 1 1 3 1 100 135 93 140 3468 6 3 1 640 3 50 47 1 1 -"31715" 4 686 3 1 1 1 3 1 100 135 140 133 5334 7 6 0 1921 1 5 7 1 0 -"31716" 4 686 3 1 1 1 3 1 100 135 133 126 4234 4 2 0 1041 1 5 7 1 0 -"31717" 4 686 3 1 1 1 3 1 100 135 126 189 1453 1 8 0 1181 3 50 63 0 1 -"31718" 4 686 3 1 1 1 3 1 100 135 189 142 2469 5 7 1 847 2 25 47 0 0 -"31719" 4 686 3 1 1 1 3 1 100 135 142 135 2526 9 5 0 3317 1 5 7 1 0 -"31720" 4 686 3 1 1 1 4 1 100 318 100 95 2663 8 7 0 673 1 5 5 1 0 -"31721" 4 686 3 1 1 1 4 1 100 318 95 100 1634 3 10 0 719 1 5 5 0 1 -"31722" 4 686 3 1 1 1 4 1 100 318 100 75 1770 7 9 1 731 2 25 25 0 0 -"31723" 4 686 3 1 1 1 4 1 100 318 75 113 1688 9 1 1 1179 3 50 38 1 1 -"31724" 4 686 3 1 1 1 4 1 100 318 113 170 2740 2 3 0 723 3 50 57 0 1 -"31725" 4 686 3 1 1 1 4 1 100 318 170 255 3193 1 8 0 918 3 50 85 0 1 -"31726" 4 686 3 1 1 1 4 1 100 318 255 268 2980 5 4 1 773 1 5 13 1 1 -"31727" 4 686 3 1 1 1 4 1 100 318 268 335 2936 4 2 1 501 2 25 67 1 1 -"31728" 4 686 3 1 1 1 4 1 100 318 335 318 3048 6 3 0 1367 1 5 17 1 0 -"31729" 4 686 4 0 1 0 1 1 100 9 100 150 4936 2 7 0 2036 3 50 50 0 1 -"31730" 4 686 4 0 1 0 1 1 100 9 150 188 5138 3 7 0 1112 4 25 38 0 1 -"31731" 4 686 4 0 1 0 1 1 100 9 188 179 1468 2 8 1 1162 5 5 9 0 0 -"31732" 4 686 4 0 1 0 1 1 100 9 179 9 2166 8 9 1 2447 1 95 170 0 0 -"31733" 4 686 5 1 1 0 1 1 100 1181 100 195 4323 1 3 0 943 1 95 95 0 1 -"31734" 4 686 5 1 1 0 1 1 100 1181 195 341 3273 6 5 1 2165 2 75 146 1 1 -"31735" 4 686 5 1 1 0 1 1 100 1181 341 512 2823 2 8 0 3097 3 50 171 0 1 -"31736" 4 686 5 1 1 0 1 1 100 1181 512 384 4462 8 9 1 1034 4 25 128 0 0 -"31737" 4 686 5 1 1 0 1 1 100 1181 384 480 1929 3 4 0 597 4 25 96 0 1 -"31738" 4 686 5 1 1 0 1 1 100 1181 480 720 2123 5 7 0 1711 3 50 240 0 1 -"31739" 4 686 5 1 1 0 1 1 100 1181 720 900 3360 7 4 1 954 4 25 180 1 1 -"31740" 4 686 5 1 1 0 1 1 100 1181 900 675 2407 4 1 0 1823 4 25 225 1 0 -"31741" 4 686 5 1 1 0 1 1 100 1181 675 1181 1476 9 6 1 2270 2 75 506 1 1 -"31742" 4 686 5 1 1 0 2 1 100 935 100 150 4483 8 1 1 456 3 50 50 1 1 -"31743" 4 686 5 1 1 0 2 1 100 935 150 225 3382 6 2 1 832 3 50 75 1 1 -"31744" 4 686 5 1 1 0 2 1 100 935 225 281 3299 7 9 0 1305 4 25 56 0 1 -"31745" 4 686 5 1 1 0 2 1 100 935 281 422 5006 2 10 0 978 3 50 141 0 1 -"31746" 4 686 5 1 1 0 2 1 100 935 422 443 5820 5 3 1 785 5 5 21 1 1 -"31747" 4 686 5 1 1 0 2 1 100 935 443 332 2404 4 2 0 1249 4 25 111 1 0 -"31748" 4 686 5 1 1 0 2 1 100 935 332 498 1817 3 5 0 3011 3 50 166 0 1 -"31749" 4 686 5 1 1 0 2 1 100 935 498 623 1902 9 4 1 1106 4 25 125 1 1 -"31750" 4 686 5 1 1 0 2 1 100 935 623 935 4949 1 7 0 2407 3 50 312 0 1 -"31751" 4 686 5 1 1 0 3 1 100 523 100 150 7432 7 5 1 905 3 50 50 1 1 -"31752" 4 686 5 1 1 0 3 1 100 523 150 75 1558 2 1 0 750 3 50 75 1 0 -"31753" 4 686 5 1 1 0 3 1 100 523 75 113 1855 8 6 1 1341 3 50 38 1 1 -"31754" 4 686 5 1 1 0 3 1 100 523 113 170 1706 4 7 0 716 3 50 57 0 1 -"31755" 4 686 5 1 1 0 3 1 100 523 170 255 2308 3 10 0 2336 3 50 85 0 1 -"31756" 4 686 5 1 1 0 3 1 100 523 255 191 2276 6 8 1 1046 4 25 64 0 0 -"31757" 4 686 5 1 1 0 3 1 100 523 191 334 1575 9 2 1 1890 2 75 143 1 1 -"31758" 4 686 5 1 1 0 3 1 100 523 334 418 2115 5 3 1 1350 4 25 84 1 1 -"31759" 4 686 5 1 1 0 3 1 100 523 418 523 1644 1 10 0 623 4 25 105 0 1 -"31760" 4 686 5 1 1 0 4 1 100 613 100 195 1435 2 3 0 1930 1 95 95 0 1 -"31761" 4 686 5 1 1 0 4 1 100 613 195 146 2753 7 4 0 1404 4 25 49 1 0 -"31762" 4 686 5 1 1 0 4 1 100 613 146 73 2361 3 1 0 1234 3 50 73 1 0 -"31763" 4 686 5 1 1 0 4 1 100 613 73 142 1710 1 9 0 1970 1 95 69 0 1 -"31764" 4 686 5 1 1 0 4 1 100 613 142 249 1778 8 7 1 2102 2 75 107 1 1 -"31765" 4 686 5 1 1 0 4 1 100 613 249 436 4044 9 2 1 1002 2 75 187 1 1 -"31766" 4 686 5 1 1 0 4 1 100 613 436 654 4949 5 6 0 1464 3 50 218 0 1 -"31767" 4 686 5 1 1 0 4 1 100 613 654 490 3340 6 8 1 1149 4 25 164 0 0 -"31768" 4 686 5 1 1 0 4 1 100 613 490 613 1994 4 7 0 860 4 25 123 0 1 -"31769" 4 691 2 0 1 0 1 1 100 4 100 150 5759 2 7 0 1424 3 50 50 0 1 -"31770" 4 691 2 0 1 0 1 1 100 4 150 37 12265 3 7 1 1833 2 75 113 0 0 -"31771" 4 691 2 0 1 0 1 1 100 4 37 72 3559 2 8 0 1835 1 95 35 0 1 -"31772" 4 691 2 0 1 0 1 1 100 4 72 4 2611 8 9 1 2057 1 95 68 0 0 -"31773" 4 691 3 1 1 0 1 1 100 424 100 150 3244 1 3 0 1041 3 50 50 0 1 -"31774" 4 691 3 1 1 0 1 1 100 424 150 188 3912 6 5 1 882 4 25 38 1 1 -"31775" 4 691 3 1 1 0 1 1 100 424 188 367 2221 2 8 0 1367 1 95 179 0 1 -"31776" 4 691 3 1 1 0 1 1 100 424 367 275 5145 8 9 1 812 4 25 92 0 0 -"31777" 4 691 3 1 1 0 1 1 100 424 275 344 2294 3 4 0 1169 4 25 69 0 1 -"31778" 4 691 3 1 1 0 1 1 100 424 344 430 1917 5 7 0 1316 4 25 86 0 1 -"31779" 4 691 3 1 1 0 1 1 100 424 430 452 2449 7 4 1 732 5 5 22 1 1 -"31780" 4 691 3 1 1 0 1 1 100 424 452 339 2880 4 1 0 1944 4 25 113 1 0 -"31781" 4 691 3 1 1 0 1 1 100 424 339 424 2632 9 6 1 1393 4 25 85 1 1 -"31782" 4 691 3 1 1 0 2 1 100 176 100 150 3379 8 1 1 1216 3 50 50 1 1 -"31783" 4 691 3 1 1 0 2 1 100 176 150 75 1839 6 2 0 1137 3 50 75 1 0 -"31784" 4 691 3 1 1 0 2 1 100 176 75 37 1721 7 9 1 943 3 50 38 0 0 -"31785" 4 691 3 1 1 0 2 1 100 176 37 56 1312 2 10 0 1095 3 50 19 0 1 -"31786" 4 691 3 1 1 0 2 1 100 176 56 70 1365 5 3 1 921 4 25 14 1 1 -"31787" 4 691 3 1 1 0 2 1 100 176 70 52 1961 4 2 0 1013 4 25 18 1 0 -"31788" 4 691 3 1 1 0 2 1 100 176 52 78 1760 3 5 0 1442 3 50 26 0 1 -"31789" 4 691 3 1 1 0 2 1 100 176 78 117 1784 9 4 1 3747 3 50 39 1 1 -"31790" 4 691 3 1 1 0 2 1 100 176 117 176 1371 1 7 0 3302 3 50 59 0 1 -"31791" 4 691 3 1 1 0 3 1 100 1413 100 195 2330 7 5 1 2267 1 95 95 1 1 -"31792" 4 691 3 1 1 0 3 1 100 1413 195 244 1634 2 1 1 1796 4 25 49 1 1 -"31793" 4 691 3 1 1 0 3 1 100 1413 244 183 2862 8 6 0 1124 4 25 61 1 0 -"31794" 4 691 3 1 1 0 3 1 100 1413 183 275 2028 4 7 0 1203 3 50 92 0 1 -"31795" 4 691 3 1 1 0 3 1 100 1413 275 344 1950 3 10 0 759 4 25 69 0 1 -"31796" 4 691 3 1 1 0 3 1 100 1413 344 430 2425 6 8 0 2261 4 25 86 0 1 -"31797" 4 691 3 1 1 0 3 1 100 1413 430 753 2259 9 2 1 1035 2 75 323 1 1 -"31798" 4 691 3 1 1 0 3 1 100 1413 753 1130 2004 5 3 1 1517 3 50 377 1 1 -"31799" 4 691 3 1 1 0 3 1 100 1413 1130 1413 2382 1 10 0 1227 4 25 283 0 1 -"31800" 4 691 3 1 1 0 4 1 100 334 100 195 2821 2 3 0 2225 1 95 95 0 1 -"31801" 4 691 3 1 1 0 4 1 100 334 195 293 3389 7 4 1 583 3 50 98 1 1 -"31802" 4 691 3 1 1 0 4 1 100 334 293 73 2060 3 1 0 1266 2 75 220 1 0 -"31803" 4 691 3 1 1 0 4 1 100 334 73 128 1658 1 9 0 1004 2 75 55 0 1 -"31804" 4 691 3 1 1 0 4 1 100 334 128 224 1376 8 7 1 1615 2 75 96 1 1 -"31805" 4 691 3 1 1 0 4 1 100 334 224 437 2371 9 2 1 2207 1 95 213 1 1 -"31806" 4 691 3 1 1 0 4 1 100 334 437 765 2139 5 6 0 882 2 75 328 0 1 -"31807" 4 691 3 1 1 0 4 1 100 334 765 191 1863 6 8 1 1947 2 75 574 0 0 -"31808" 4 691 3 1 1 0 4 1 100 334 191 334 1596 4 7 0 1637 2 75 143 0 1 -"31809" 4 691 4 0 1 1 1 1 100 158 100 150 5103 8 3 1 987 3 50 50 1 1 -"31810" 4 691 4 0 1 1 1 1 100 158 150 158 2243 3 7 0 729 1 5 8 0 1 -"31811" 4 691 4 0 1 1 1 1 100 158 158 166 2013 8 2 1 1011 1 5 8 1 1 -"31812" 4 691 4 0 1 1 1 1 100 158 166 158 1882 2 1 0 4637 1 5 8 1 0 -"31813" 4 691 5 1 1 1 1 1 100 232 100 150 1913 9 7 1 1040 3 50 50 1 1 -"31814" 4 691 5 1 1 1 1 1 100 232 150 225 1527 4 8 0 1195 3 50 75 0 1 -"31815" 4 691 5 1 1 1 1 1 100 232 225 338 2404 8 2 1 1075 3 50 113 1 1 -"31816" 4 691 5 1 1 1 1 1 100 232 338 169 1608 2 1 0 1619 3 50 169 1 0 -"31817" 4 691 5 1 1 1 1 1 100 232 169 211 2648 7 6 1 1282 2 25 42 1 1 -"31818" 4 691 5 1 1 1 1 1 100 232 211 222 1779 5 3 1 1191 1 5 11 1 1 -"31819" 4 691 5 1 1 1 1 1 100 232 222 233 1745 3 6 0 1670 1 5 11 0 1 -"31820" 4 691 5 1 1 1 1 1 100 232 233 221 2102 6 9 1 682 1 5 12 0 0 -"31821" 4 691 5 1 1 1 1 1 100 232 221 232 1302 1 4 0 834 1 5 11 0 1 -"31822" 4 691 5 1 1 1 2 1 100 166 100 150 1756 2 9 0 802 3 50 50 0 1 -"31823" 4 691 5 1 1 1 2 1 100 166 150 188 1823 4 10 0 889 2 25 38 0 1 -"31824" 4 691 5 1 1 1 2 1 100 166 188 141 2223 3 1 0 1574 2 25 47 1 0 -"31825" 4 691 5 1 1 1 2 1 100 166 141 148 1683 8 6 1 802 1 5 7 1 1 -"31826" 4 691 5 1 1 1 2 1 100 166 148 141 1449 5 7 1 784 1 5 7 0 0 -"31827" 4 691 5 1 1 1 2 1 100 166 141 134 1901 6 8 1 779 1 5 7 0 0 -"31828" 4 691 5 1 1 1 2 1 100 166 134 127 797 7 5 0 670 1 5 7 1 0 -"31829" 4 691 5 1 1 1 2 1 100 166 127 133 1178 1 10 0 726 1 5 6 0 1 -"31830" 4 691 5 1 1 1 2 1 100 166 133 166 1403 9 3 1 1463 2 25 33 1 1 -"31831" 4 691 5 1 1 1 3 1 100 246 100 125 2213 3 5 0 1162 2 25 25 0 1 -"31832" 4 691 5 1 1 1 3 1 100 246 125 94 1455 8 9 1 1264 2 25 31 0 0 -"31833" 4 691 5 1 1 1 3 1 100 246 94 141 1628 2 4 0 679 3 50 47 0 1 -"31834" 4 691 5 1 1 1 3 1 100 246 141 212 1778 6 3 1 865 3 50 71 1 1 -"31835" 4 691 5 1 1 1 3 1 100 246 212 223 1226 7 6 1 1861 1 5 11 1 1 -"31836" 4 691 5 1 1 1 3 1 100 246 223 212 1444 4 2 0 1212 1 5 11 1 0 -"31837" 4 691 5 1 1 1 3 1 100 246 212 223 1159 1 8 0 1105 1 5 11 0 1 -"31838" 4 691 5 1 1 1 3 1 100 246 223 234 1845 5 7 0 1688 1 5 11 0 1 -"31839" 4 691 5 1 1 1 3 1 100 246 234 246 1308 9 5 1 946 1 5 12 1 1 -"31840" 4 691 5 1 1 1 4 1 100 257 100 175 1767 8 7 1 1278 4 75 75 1 1 -"31841" 4 691 5 1 1 1 4 1 100 257 175 263 1736 3 10 0 1188 3 50 88 0 1 -"31842" 4 691 5 1 1 1 4 1 100 257 263 197 1782 7 9 1 1260 2 25 66 0 0 -"31843" 4 691 5 1 1 1 4 1 100 257 197 246 1587 9 1 1 1607 2 25 49 1 1 -"31844" 4 691 5 1 1 1 4 1 100 257 246 258 1524 2 3 0 826 1 5 12 0 1 -"31845" 4 691 5 1 1 1 4 1 100 257 258 271 1298 1 8 0 890 1 5 13 0 1 -"31846" 4 691 5 1 1 1 4 1 100 257 271 285 1164 5 4 1 815 1 5 14 1 1 -"31847" 4 691 5 1 1 1 4 1 100 257 285 271 1105 4 2 0 706 1 5 14 1 0 -"31848" 4 691 5 1 1 1 4 1 100 257 271 257 843 6 3 0 691 1 5 14 1 0 +subjID stage assessment_stage includes_gamble gamble_type block completed block_initial_points block_final_points trial_initial_points trial_final_points choice_latency n_left_colour_boxes token_box left_colour_chosen response_latency stake_index percentage_staked points_staked left_won subject_won +225 2 0 1 0 1 1 100 7 100 150 5971 2 7 0 1244 3 50 50 0 1 +225 2 0 1 0 1 1 100 7 150 75 8774 3 7 1 690 3 50 75 0 0 +225 2 0 1 0 1 1 100 7 75 146 1894 2 8 0 2090 1 95 71 0 1 +225 2 0 1 0 1 1 100 7 146 7 2259 8 9 1 1725 1 95 139 0 0 +225 3 1 1 0 1 0 100 0 100 175 10025 1 3 0 1778 2 75 75 0 1 +225 3 1 1 0 1 0 100 0 175 9 1858 6 5 0 1735 1 95 166 1 0 +225 3 1 1 0 1 0 100 0 9 0 2337 2 8 1 4288 1 95 9 0 0 +225 3 1 1 0 2 0 100 0 100 5 6570 8 1 0 2007 1 95 95 1 0 +225 3 1 1 0 2 0 100 0 5 8 3629 6 2 1 438 3 50 3 1 1 +225 3 1 1 0 2 0 100 0 8 2 5166 7 9 1 526 2 75 6 0 0 +225 3 1 1 0 2 0 100 0 2 4 2930 2 10 0 2372 1 95 2 0 1 +225 3 1 1 0 2 0 100 0 4 0 2184 5 3 0 2350 1 95 4 1 0 +225 3 1 1 0 3 0 100 1 100 195 4621 7 5 1 1935 1 95 95 1 1 +225 3 1 1 0 3 0 100 1 195 10 2368 2 1 0 933 1 95 185 1 0 +225 3 1 1 0 3 0 100 1 10 20 2686 8 6 1 1948 1 95 10 1 1 +225 3 1 1 0 3 0 100 1 20 1 3652 4 7 1 1108 1 95 19 0 0 +225 3 1 1 0 4 1 100 1 100 195 2401 2 3 0 2164 1 95 95 0 1 +225 3 1 1 0 4 1 100 1 195 380 2386 7 4 1 886 1 95 185 1 1 +225 3 1 1 0 4 1 100 1 380 19 2226 3 1 0 709 1 95 361 1 0 +225 3 1 1 0 4 1 100 1 19 37 5136 1 9 0 593 1 95 18 0 1 +225 3 1 1 0 4 1 100 1 37 72 3114 8 7 1 780 1 95 35 1 1 +225 3 1 1 0 4 1 100 1 72 140 3980 9 2 1 692 1 95 68 1 1 +225 3 1 1 0 4 1 100 1 140 273 3720 5 6 0 652 1 95 133 0 1 +225 3 1 1 0 4 1 100 1 273 14 5074 6 8 1 558 1 95 259 0 0 +225 3 1 1 0 4 1 100 1 14 1 3601 4 7 1 731 1 95 13 0 0 +225 4 0 1 1 1 1 100 118 100 150 19845 8 3 1 1019 3 50 50 1 1 +225 4 0 1 1 1 1 100 118 150 158 13464 3 7 0 1260 1 5 8 0 1 +225 4 0 1 1 1 1 100 118 158 237 1989 8 2 1 1364 3 50 79 1 1 +225 4 0 1 1 1 1 100 118 237 118 2360 2 1 0 813 3 50 119 1 0 +225 5 1 1 1 1 1 100 907 100 195 2726 9 7 1 1171 5 95 95 1 1 +225 5 1 1 1 1 1 100 907 195 293 1945 4 8 0 882 3 50 98 0 1 +225 5 1 1 1 1 1 100 907 293 440 5461 8 2 1 631 3 50 147 1 1 +225 5 1 1 1 1 1 100 907 440 220 2551 2 1 0 467 3 50 220 1 0 +225 5 1 1 1 1 1 100 907 220 330 3018 7 6 1 4033 3 50 110 1 1 +225 5 1 1 1 1 1 100 907 330 413 3262 5 3 1 872 2 25 83 1 1 +225 5 1 1 1 1 1 100 907 413 620 2332 3 6 0 923 3 50 207 0 1 +225 5 1 1 1 1 1 100 907 620 465 4012 6 9 1 1155 2 25 155 0 0 +225 5 1 1 1 1 1 100 907 465 907 2113 1 4 0 1235 5 95 442 0 1 +225 5 1 1 1 2 1 100 240 100 150 2718 2 9 0 483 3 50 50 0 1 +225 5 1 1 1 2 1 100 240 150 225 3425 4 10 0 1060 3 50 75 0 1 +225 5 1 1 1 2 1 100 240 225 112 3255 3 1 0 337 3 50 113 1 0 +225 5 1 1 1 2 1 100 240 112 168 1606 8 6 1 983 3 50 56 1 1 +225 5 1 1 1 2 1 100 240 168 84 1622 5 7 1 614 3 50 84 0 0 +225 5 1 1 1 2 1 100 240 84 42 3078 6 8 1 394 3 50 42 0 0 +225 5 1 1 1 2 1 100 240 42 63 2141 7 5 1 2876 3 50 21 1 1 +225 5 1 1 1 2 1 100 240 63 123 2024 1 10 0 54 5 95 60 0 1 +225 5 1 1 1 2 1 100 240 123 240 1552 9 3 1 644 5 95 117 1 1 +225 5 1 1 1 3 1 100 115 100 150 2022 3 5 0 3324 3 50 50 0 1 +225 5 1 1 1 3 1 100 115 150 37 20755 8 9 1 2749 4 75 113 0 0 +225 5 1 1 1 3 1 100 115 37 56 5712 2 4 0 3798 3 50 19 0 1 +225 5 1 1 1 3 1 100 115 56 84 1510 6 3 1 1604 3 50 28 1 1 +225 5 1 1 1 3 1 100 115 84 126 1189 7 6 1 722 3 50 42 1 1 +225 5 1 1 1 3 1 100 115 126 31 1277 4 2 0 470 4 75 95 1 0 +225 5 1 1 1 3 1 100 115 31 47 3261 1 8 0 1392 3 50 16 0 1 +225 5 1 1 1 3 1 100 115 47 59 3699 5 7 0 1681 2 25 12 0 1 +225 5 1 1 1 3 1 100 115 59 115 1630 9 5 1 606 5 95 56 1 1 +225 5 1 1 1 4 1 100 455 100 195 1905 8 7 1 612 5 95 95 1 1 +225 5 1 1 1 4 1 100 455 195 380 1988 3 10 0 772 5 95 185 0 1 +225 5 1 1 1 4 1 100 455 380 95 11404 7 9 1 2372 4 75 285 0 0 +225 5 1 1 1 4 1 100 455 95 185 22680 9 1 1 2087 5 95 90 1 1 +225 5 1 1 1 4 1 100 455 185 324 9816 2 3 0 1836 4 75 139 0 1 +225 5 1 1 1 4 1 100 455 324 486 13753 1 8 0 1512 3 50 162 0 1 +225 5 1 1 1 4 1 100 455 486 729 1997 5 4 1 860 3 50 243 1 1 +225 5 1 1 1 4 1 100 455 729 364 1760 4 2 0 882 3 50 365 1 0 +225 5 1 1 1 4 1 100 455 364 455 2294 6 3 1 1518 2 25 91 1 1 +355 2 0 1 0 1 1 100 8 100 175 12851 2 7 0 3949 2 75 75 0 1 +355 2 0 1 0 1 1 100 8 175 87 4694 3 7 1 1709 3 50 88 0 0 +355 2 0 1 0 1 1 100 8 87 170 3177 2 8 0 1391 1 95 83 0 1 +355 2 0 1 0 1 1 100 8 170 8 3707 8 9 1 944 1 95 162 0 0 +355 3 1 1 0 1 1 100 6 100 195 2277 1 3 0 1062 1 95 95 0 1 +355 3 1 1 0 1 1 100 6 195 380 2040 6 5 1 860 1 95 185 1 1 +355 3 1 1 0 1 1 100 6 380 399 3488 2 8 0 1280 5 5 19 0 1 +355 3 1 1 0 1 1 100 6 399 299 1872 8 9 1 1189 4 25 100 0 0 +355 3 1 1 0 1 1 100 6 299 15 2650 3 4 1 1129 1 95 284 0 0 +355 3 1 1 0 1 1 100 6 15 29 2920 5 7 0 755 1 95 14 0 1 +355 3 1 1 0 1 1 100 6 29 57 1948 7 4 1 685 1 95 28 1 1 +355 3 1 1 0 1 1 100 6 57 111 2242 4 1 1 388 1 95 54 1 1 +355 3 1 1 0 1 1 100 6 111 6 3543 9 6 0 1068 1 95 105 1 0 +355 3 1 1 0 2 0 100 0 100 195 6633 8 1 1 563 1 95 95 1 1 +355 3 1 1 0 2 0 100 0 195 10 2373 6 2 0 674 1 95 185 1 0 +355 3 1 1 0 2 0 100 0 10 0 1367 7 9 1 500 1 95 10 0 0 +355 3 1 1 0 3 1 100 720 100 195 2811 7 5 1 1411 1 95 95 1 1 +355 3 1 1 0 3 1 100 720 195 146 2396 2 1 0 1032 4 25 49 1 0 +355 3 1 1 0 3 1 100 720 146 285 1252 8 6 1 701 1 95 139 1 1 +355 3 1 1 0 3 1 100 720 285 214 4233 4 7 1 478 4 25 71 0 0 +355 3 1 1 0 3 1 100 720 214 375 1236 3 10 0 1615 2 75 161 0 1 +355 3 1 1 0 3 1 100 720 375 469 2655 6 8 0 935 4 25 94 0 1 +355 3 1 1 0 3 1 100 720 469 492 3240 9 2 1 628 5 5 23 1 1 +355 3 1 1 0 3 1 100 720 492 369 3683 5 3 0 522 4 25 123 1 0 +355 3 1 1 0 3 1 100 720 369 720 2198 1 10 0 603 1 95 351 0 1 +355 3 1 1 0 4 1 100 3311 100 195 1898 2 3 0 389 1 95 95 0 1 +355 3 1 1 0 4 1 100 3311 195 380 1088 7 4 1 409 1 95 185 1 1 +355 3 1 1 0 4 1 100 3311 380 665 2059 3 1 1 1358 2 75 285 1 1 +355 3 1 1 0 4 1 100 3311 665 698 1555 1 9 0 487 5 5 33 0 1 +355 3 1 1 0 4 1 100 3311 698 873 2981 8 7 1 693 4 25 175 1 1 +355 3 1 1 0 4 1 100 3311 873 1702 5329 9 2 1 1865 1 95 829 1 1 +355 3 1 1 0 4 1 100 3311 1702 1787 4309 5 6 0 670 5 5 85 0 1 +355 3 1 1 0 4 1 100 3311 1787 1698 1460 6 8 1 572 5 5 89 0 0 +355 3 1 1 0 4 1 100 3311 1698 3311 3070 4 7 0 633 1 95 1613 0 1 +355 4 0 1 1 1 1 100 313 100 125 8175 8 3 1 2023 2 25 25 1 1 +355 4 0 1 1 1 1 100 313 125 188 5812 3 7 0 429 3 50 63 0 1 +355 4 0 1 1 1 1 100 313 188 329 1814 8 2 1 622 4 75 141 1 1 +355 4 0 1 1 1 1 100 313 329 313 2421 2 1 0 1336 1 5 16 1 0 +355 5 1 1 1 1 1 100 786 100 195 1596 9 7 1 1366 5 95 95 1 1 +355 5 1 1 1 1 1 100 786 195 341 1354 4 8 0 552 4 75 146 0 1 +355 5 1 1 1 1 1 100 786 341 358 1947 8 2 1 492 1 5 17 1 1 +355 5 1 1 1 1 1 100 786 358 340 2074 2 1 0 372 1 5 18 1 0 +355 5 1 1 1 1 1 100 786 340 357 1104 7 6 1 1104 1 5 17 1 1 +355 5 1 1 1 1 1 100 786 357 339 1689 5 3 0 566 1 5 18 1 0 +355 5 1 1 1 1 1 100 786 339 424 2617 3 6 0 794 2 25 85 0 1 +355 5 1 1 1 1 1 100 786 424 403 3297 6 9 1 1034 1 5 21 0 0 +355 5 1 1 1 1 1 100 786 403 786 1262 1 4 0 0 5 95 383 0 1 +355 5 1 1 1 2 1 100 1037 100 175 1478 2 9 0 325 4 75 75 0 1 +355 5 1 1 1 2 1 100 1037 175 131 2203 4 10 1 536 2 25 44 0 0 +355 5 1 1 1 2 1 100 1037 131 124 1116 3 1 0 628 1 5 7 1 0 +355 5 1 1 1 2 1 100 1037 124 242 1092 8 6 1 2086 5 95 118 1 1 +355 5 1 1 1 2 1 100 1037 242 230 2400 5 7 1 563 1 5 12 0 0 +355 5 1 1 1 2 1 100 1037 230 218 1301 6 8 1 637 1 5 12 0 0 +355 5 1 1 1 2 1 100 1037 218 273 4996 7 5 1 1700 2 25 55 1 1 +355 5 1 1 1 2 1 100 1037 273 532 2360 1 10 0 0 5 95 259 0 1 +355 5 1 1 1 2 1 100 1037 532 1037 1741 9 3 1 0 5 95 505 1 1 +355 5 1 1 1 3 1 100 53 100 195 3014 3 5 0 861 5 95 95 0 1 +355 5 1 1 1 3 1 100 53 195 10 889 8 9 1 0 5 95 185 0 0 +355 5 1 1 1 3 1 100 53 10 18 1572 2 4 0 879 4 75 8 0 1 +355 5 1 1 1 3 1 100 53 18 19 1490 6 3 1 585 1 5 1 1 1 +355 5 1 1 1 3 1 100 53 19 20 1155 7 6 1 1801 1 5 1 1 1 +355 5 1 1 1 3 1 100 53 20 15 1592 4 2 0 802 2 25 5 1 0 +355 5 1 1 1 3 1 100 53 15 29 1386 1 8 0 781 5 95 14 0 1 +355 5 1 1 1 3 1 100 53 29 30 1550 5 7 0 762 1 5 1 0 1 +355 5 1 1 1 3 1 100 53 30 53 2395 9 5 1 1142 4 75 23 1 1 +355 5 1 1 1 4 1 100 2448 100 195 2040 8 7 1 589 5 95 95 1 1 +355 5 1 1 1 4 1 100 2448 195 380 1310 3 10 0 558 5 95 185 0 1 +355 5 1 1 1 4 1 100 2448 380 285 1536 7 9 1 860 2 25 95 0 0 +355 5 1 1 1 4 1 100 2448 285 556 1488 9 1 1 0 5 95 271 1 1 +355 5 1 1 1 4 1 100 2448 556 1084 1831 2 3 0 0 5 95 528 0 1 +355 5 1 1 1 4 1 100 2448 1084 2114 1653 1 8 0 0 5 95 1030 0 1 +355 5 1 1 1 4 1 100 2448 2114 2220 4413 5 4 1 616 1 5 106 1 1 +355 5 1 1 1 4 1 100 2448 2220 2331 3824 4 2 1 544 1 5 111 1 1 +355 5 1 1 1 4 1 100 2448 2331 2448 1395 6 3 1 846 1 5 117 1 1 +374 2 0 1 1 1 1 100 123 100 125 8561 8 3 1 2615 2 25 25 1 1 +374 2 0 1 1 1 1 100 123 125 156 8872 3 7 0 1249 2 25 31 0 1 +374 2 0 1 1 1 1 100 123 156 164 2761 8 2 1 1284 1 5 8 1 1 +374 2 0 1 1 1 1 100 123 164 123 4948 2 1 0 462 2 25 41 1 0 +374 3 1 1 1 1 1 100 420 100 175 3855 9 7 1 973 4 75 75 1 1 +374 3 1 1 1 1 1 100 420 175 166 3387 4 8 1 959 1 5 9 0 0 +374 3 1 1 1 1 1 100 420 166 208 6727 8 2 1 1804 2 25 42 1 1 +374 3 1 1 1 1 1 100 420 208 198 2499 2 1 0 978 1 5 10 1 0 +374 3 1 1 1 1 1 100 420 198 248 2120 7 6 1 329 2 25 50 1 1 +374 3 1 1 1 1 1 100 420 248 236 2895 5 3 0 363 1 5 12 1 0 +374 3 1 1 1 1 1 100 420 236 295 2007 3 6 0 1189 2 25 59 0 1 +374 3 1 1 1 1 1 100 420 295 280 4234 6 9 1 598 1 5 15 0 0 +374 3 1 1 1 1 1 100 420 280 420 1831 1 4 0 1014 3 50 140 0 1 +374 3 1 1 1 2 1 100 396 100 150 3904 2 9 0 300 3 50 50 0 1 +374 3 1 1 1 2 1 100 396 150 142 2011 4 10 1 593 1 5 8 0 0 +374 3 1 1 1 2 1 100 396 142 135 1977 3 1 0 761 1 5 7 1 0 +374 3 1 1 1 2 1 100 396 135 169 1226 8 6 1 453 2 25 34 1 1 +374 3 1 1 1 2 1 100 396 169 177 1844 5 7 0 290 1 5 8 0 1 +374 3 1 1 1 2 1 100 396 177 168 2057 6 8 1 940 1 5 9 0 0 +374 3 1 1 1 2 1 100 396 168 176 3188 7 5 1 394 1 5 8 1 1 +374 3 1 1 1 2 1 100 396 176 264 1340 1 10 0 346 3 50 88 0 1 +374 3 1 1 1 2 1 100 396 264 396 1518 9 3 1 284 3 50 132 1 1 +374 3 1 1 1 3 1 100 306 100 95 2237 3 5 1 976 1 5 5 0 0 +374 3 1 1 1 3 1 100 306 95 47 1398 8 9 1 298 3 50 48 0 0 +374 3 1 1 1 3 1 100 306 47 59 3449 2 4 0 763 2 25 12 0 1 +374 3 1 1 1 3 1 100 306 59 89 2645 6 3 1 396 3 50 30 1 1 +374 3 1 1 1 3 1 100 306 89 111 1479 7 6 1 2767 2 25 22 1 1 +374 3 1 1 1 3 1 100 306 111 105 5385 4 2 0 325 1 5 6 1 0 +374 3 1 1 1 3 1 100 306 105 184 1419 1 8 0 485 4 75 79 0 1 +374 3 1 1 1 3 1 100 306 184 175 2340 5 7 1 641 1 5 9 0 0 +374 3 1 1 1 3 1 100 306 175 306 1408 9 5 1 979 4 75 131 1 1 +374 3 1 1 1 4 1 100 125 100 105 1930 8 7 1 292 1 5 5 1 1 +374 3 1 1 1 4 1 100 125 105 110 1187 3 10 0 320 1 5 5 0 1 +374 3 1 1 1 4 1 100 125 110 104 1766 7 9 1 567 1 5 6 0 0 +374 3 1 1 1 4 1 100 125 104 109 2042 9 1 1 918 1 5 5 1 1 +374 3 1 1 1 4 1 100 125 109 114 3464 2 3 0 314 1 5 5 0 1 +374 3 1 1 1 4 1 100 125 114 120 1247 1 8 0 378 1 5 6 0 1 +374 3 1 1 1 4 1 100 125 120 126 1629 5 4 1 397 1 5 6 1 1 +374 3 1 1 1 4 1 100 125 126 132 1383 4 2 1 364 1 5 6 1 1 +374 3 1 1 1 4 1 100 125 132 125 1384 6 3 0 460 1 5 7 1 0 +374 4 0 1 0 1 1 100 128 100 175 9354 2 7 0 297 2 75 75 0 1 +374 4 0 1 0 1 1 100 128 175 263 2857 3 7 0 1079 3 50 88 0 1 +374 4 0 1 0 1 1 100 128 263 513 1592 2 8 0 1810 1 95 250 0 1 +374 4 0 1 0 1 1 100 128 513 128 2985 8 9 1 537 2 75 385 0 0 +374 5 1 1 0 1 1 100 107 100 195 3089 1 3 0 2587 1 95 95 0 1 +374 5 1 1 0 1 1 100 107 195 244 4405 6 5 1 353 4 25 49 1 1 +374 5 1 1 0 1 1 100 107 244 305 2189 2 8 0 468 4 25 61 0 1 +374 5 1 1 0 1 1 100 107 305 152 1787 8 9 1 572 3 50 153 0 0 +374 5 1 1 0 1 1 100 107 152 228 1574 3 4 0 352 3 50 76 0 1 +374 5 1 1 0 1 1 100 107 228 57 1509 5 7 1 682 2 75 171 0 0 +374 5 1 1 0 1 1 100 107 57 111 1360 7 4 1 2780 1 95 54 1 1 +374 5 1 1 0 1 1 100 107 111 55 1469 4 1 0 686 3 50 56 1 0 +374 5 1 1 0 1 1 100 107 55 107 1875 9 6 1 2278 1 95 52 1 1 +374 5 1 1 0 2 1 100 365 100 195 3114 8 1 1 1819 1 95 95 1 1 +374 5 1 1 0 2 1 100 365 195 380 1681 6 2 1 303 1 95 185 1 1 +374 5 1 1 0 2 1 100 365 380 19 1142 7 9 1 334 1 95 361 0 0 +374 5 1 1 0 2 1 100 365 19 37 1147 2 10 0 1298 1 95 18 0 1 +374 5 1 1 0 2 1 100 365 37 65 1217 5 3 1 2622 2 75 28 1 1 +374 5 1 1 0 2 1 100 365 65 49 1126 4 2 0 262 4 25 16 1 0 +374 5 1 1 0 2 1 100 365 49 96 1990 3 5 0 333 1 95 47 0 1 +374 5 1 1 0 2 1 100 365 96 187 1075 9 4 1 285 1 95 91 1 1 +374 5 1 1 0 2 1 100 365 187 365 1277 1 7 0 2045 1 95 178 0 1 +374 5 1 1 0 3 1 100 4 100 175 1213 7 5 1 1673 2 75 75 1 1 +374 5 1 1 0 3 1 100 4 175 9 1399 2 1 0 329 1 95 166 1 0 +374 5 1 1 0 3 1 100 4 9 16 1400 8 6 1 419 2 75 7 1 1 +374 5 1 1 0 3 1 100 4 16 24 2107 4 7 0 1724 3 50 8 0 1 +374 5 1 1 0 3 1 100 4 24 42 1304 3 10 0 416 2 75 18 0 1 +374 5 1 1 0 3 1 100 4 42 2 1697 6 8 1 590 1 95 40 0 0 +374 5 1 1 0 3 1 100 4 2 4 1799 9 2 1 992 1 95 2 1 1 +374 5 1 1 0 3 1 100 4 4 2 3893 5 3 0 386 3 50 2 1 0 +374 5 1 1 0 3 1 100 4 2 4 1436 1 10 0 284 1 95 2 0 1 +374 5 1 1 0 4 1 100 2308 100 175 1552 2 3 0 4595 2 75 75 0 1 +374 5 1 1 0 4 1 100 2308 175 341 1286 7 4 1 375 1 95 166 1 1 +374 5 1 1 0 4 1 100 2308 341 170 1498 3 1 0 2041 3 50 171 1 0 +374 5 1 1 0 4 1 100 2308 170 332 1333 1 9 0 275 1 95 162 0 1 +374 5 1 1 0 4 1 100 2308 332 647 1211 8 7 1 375 1 95 315 1 1 +374 5 1 1 0 4 1 100 2308 647 1262 1443 9 2 1 331 1 95 615 1 1 +374 5 1 1 0 4 1 100 2308 1262 2461 2561 5 6 0 1197 1 95 1199 0 1 +374 5 1 1 0 4 1 100 2308 2461 1846 2113 6 8 1 914 4 25 615 0 0 +374 5 1 1 0 4 1 100 2308 1846 2308 1116 4 7 0 2509 4 25 462 0 1 +414 2 0 1 1 1 1 100 295 100 150 12353 8 3 1 59 3 50 50 1 1 +414 2 0 1 1 1 1 100 295 150 225 4118 3 7 0 2192 3 50 75 0 1 +414 2 0 1 1 1 1 100 295 225 281 2861 8 2 1 410 2 25 56 1 1 +414 2 0 1 1 1 1 100 295 281 295 3020 2 1 1 1218 1 5 14 1 1 +414 3 1 1 1 1 1 100 1047 100 195 3010 9 7 1 0 5 95 95 1 1 +414 3 1 1 1 1 1 100 1047 195 244 1448 4 8 0 1465 2 25 49 0 1 +414 3 1 1 1 1 1 100 1047 244 366 2017 8 2 1 687 3 50 122 1 1 +414 3 1 1 1 1 1 100 1047 366 384 1923 2 1 1 2109 1 5 18 1 1 +414 3 1 1 1 1 1 100 1047 384 480 4198 7 6 1 1516 2 25 96 1 1 +414 3 1 1 1 1 1 100 1047 480 504 3353 5 3 1 2599 1 5 24 1 1 +414 3 1 1 1 1 1 100 1047 504 630 1376 3 6 0 909 2 25 126 0 1 +414 3 1 1 1 1 1 100 1047 630 598 3438 6 9 1 1056 1 5 32 0 0 +414 3 1 1 1 1 1 100 1047 598 1047 1859 1 4 0 1224 4 75 449 0 1 +414 3 1 1 1 2 1 100 364 100 150 3260 2 9 0 614 3 50 50 0 1 +414 3 1 1 1 2 1 100 364 150 225 1455 4 10 0 447 3 50 75 0 1 +414 3 1 1 1 2 1 100 364 225 169 3335 3 1 0 4214 2 25 56 1 0 +414 3 1 1 1 2 1 100 364 169 254 1643 8 6 1 1234 3 50 85 1 1 +414 3 1 1 1 2 1 100 364 254 190 3525 5 7 1 2803 2 25 64 0 0 +414 3 1 1 1 2 1 100 364 190 95 1547 6 8 1 687 3 50 95 0 0 +414 3 1 1 1 2 1 100 364 95 166 1307 7 5 1 1611 4 75 71 1 1 +414 3 1 1 1 2 1 100 364 166 291 1635 1 10 0 1036 4 75 125 0 1 +414 3 1 1 1 2 1 100 364 291 364 1520 9 3 1 1637 2 25 73 1 1 +414 3 1 1 1 3 1 100 368 100 175 2813 3 5 0 1756 4 75 75 0 1 +414 3 1 1 1 3 1 100 368 175 87 1462 8 9 1 1046 3 50 88 0 0 +414 3 1 1 1 3 1 100 368 87 91 2874 2 4 0 567 1 5 4 0 1 +414 3 1 1 1 3 1 100 368 91 96 3465 6 3 1 442 1 5 5 1 1 +414 3 1 1 1 3 1 100 368 96 168 2250 7 6 1 813 4 75 72 1 1 +414 3 1 1 1 3 1 100 368 168 126 2020 4 2 0 626 2 25 42 1 0 +414 3 1 1 1 3 1 100 368 126 221 2169 1 8 0 752 4 75 95 0 1 +414 3 1 1 1 3 1 100 368 221 210 3658 5 7 1 561 1 5 11 0 0 +414 3 1 1 1 3 1 100 368 210 368 1411 9 5 1 526 4 75 158 1 1 +414 3 1 1 1 4 1 100 1927 100 175 3500 8 7 1 628 4 75 75 1 1 +414 3 1 1 1 4 1 100 1927 175 263 4243 3 10 0 1455 3 50 88 0 1 +414 3 1 1 1 4 1 100 1927 263 250 2575 7 9 1 498 1 5 13 0 0 +414 3 1 1 1 4 1 100 1927 250 488 984 9 1 1 1463 5 95 238 1 1 +414 3 1 1 1 4 1 100 1927 488 854 2190 2 3 0 1053 4 75 366 0 1 +414 3 1 1 1 4 1 100 1927 854 1665 2032 1 8 0 2566 5 95 811 0 1 +414 3 1 1 1 4 1 100 1927 1665 1748 1801 5 4 1 921 1 5 83 1 1 +414 3 1 1 1 4 1 100 1927 1748 1835 1587 4 2 1 452 1 5 87 1 1 +414 3 1 1 1 4 1 100 1927 1835 1927 1436 6 3 1 1613 1 5 92 1 1 +414 4 0 1 0 1 1 100 37 100 195 8025 2 7 0 4200 1 95 95 0 1 +414 4 0 1 0 1 1 100 37 195 380 4781 3 7 0 703 1 95 185 0 1 +414 4 0 1 0 1 1 100 37 380 741 2315 2 8 0 1939 1 95 361 0 1 +414 4 0 1 0 1 1 100 37 741 37 1195 8 9 1 336 1 95 704 0 0 +414 5 1 1 0 1 1 100 68 100 195 1844 1 3 0 394 1 95 95 0 1 +414 5 1 1 0 1 1 100 68 195 380 1980 6 5 1 1028 1 95 185 1 1 +414 5 1 1 0 1 1 100 68 380 741 1712 2 8 0 356 1 95 361 0 1 +414 5 1 1 0 1 1 100 68 741 370 2952 8 9 1 1313 3 50 371 0 0 +414 5 1 1 0 1 1 100 68 370 722 2112 3 4 0 1081 1 95 352 0 1 +414 5 1 1 0 1 1 100 68 722 361 2114 5 7 1 444 3 50 361 0 0 +414 5 1 1 0 1 1 100 68 361 704 2076 7 4 1 262 1 95 343 1 1 +414 5 1 1 0 1 1 100 68 704 35 1311 4 1 0 286 1 95 669 1 0 +414 5 1 1 0 1 1 100 68 35 68 1326 9 6 1 372 1 95 33 1 1 +414 5 1 1 0 2 0 100 0 100 195 1596 8 1 1 605 1 95 95 1 1 +414 5 1 1 0 2 0 100 0 195 380 1173 6 2 1 187 1 95 185 1 1 +414 5 1 1 0 2 0 100 0 380 19 2623 7 9 1 240 1 95 361 0 0 +414 5 1 1 0 2 0 100 0 19 37 1474 2 10 0 209 1 95 18 0 1 +414 5 1 1 0 2 0 100 0 37 9 2484 5 3 0 1220 2 75 28 1 0 +414 5 1 1 0 2 0 100 0 9 0 1167 4 2 0 245 1 95 9 1 0 +414 5 1 1 0 3 0 100 1 100 195 2273 7 5 1 283 1 95 95 1 1 +414 5 1 1 0 3 0 100 1 195 10 1203 2 1 0 247 1 95 185 1 0 +414 5 1 1 0 3 0 100 1 10 20 1312 8 6 1 1288 1 95 10 1 1 +414 5 1 1 0 3 0 100 1 20 1 2472 4 7 1 240 1 95 19 0 0 +414 5 1 1 0 4 1 100 1544 100 195 1611 2 3 0 948 1 95 95 0 1 +414 5 1 1 0 4 1 100 1544 195 380 1324 7 4 1 611 1 95 185 1 1 +414 5 1 1 0 4 1 100 1544 380 285 2131 3 1 0 644 4 25 95 1 0 +414 5 1 1 0 4 1 100 1544 285 556 1386 1 9 0 725 1 95 271 0 1 +414 5 1 1 0 4 1 100 1544 556 1084 1232 8 7 1 267 1 95 528 1 1 +414 5 1 1 0 4 1 100 1544 1084 2114 987 9 2 1 274 1 95 1030 1 1 +414 5 1 1 0 4 1 100 1544 2114 1585 2618 5 6 1 3131 4 25 529 0 0 +414 5 1 1 0 4 1 100 1544 1585 792 1675 6 8 1 841 3 50 793 0 0 +414 5 1 1 0 4 1 100 1544 792 1544 1814 4 7 0 422 1 95 752 0 1 +583 2 0 1 1 1 1 100 106 100 105 25011 8 3 1 1579 1 5 5 1 1 +583 2 0 1 1 1 1 100 106 105 52 9333 3 7 1 1317 3 50 53 0 0 +583 2 0 1 1 1 1 100 106 52 101 3376 8 2 1 1380 5 95 49 1 1 +583 2 0 1 1 1 1 100 106 101 106 4077 2 1 1 916 1 5 5 1 1 +583 3 1 1 1 1 1 100 513 100 175 2921 9 7 1 1045 4 75 75 1 1 +583 3 1 1 1 1 1 100 513 175 166 4370 4 8 1 1374 1 5 9 0 0 +583 3 1 1 1 1 1 100 513 166 208 2669 8 2 1 694 2 25 42 1 1 +583 3 1 1 1 1 1 100 513 208 198 3879 2 1 0 2492 1 5 10 1 0 +583 3 1 1 1 1 1 100 513 198 297 1784 7 6 1 662 3 50 99 1 1 +583 3 1 1 1 1 1 100 513 297 312 4839 5 3 1 1220 1 5 15 1 1 +583 3 1 1 1 1 1 100 513 312 390 1793 3 6 0 1294 2 25 78 0 1 +583 3 1 1 1 1 1 100 513 390 410 2208 6 9 0 1018 1 5 20 0 1 +583 3 1 1 1 1 1 100 513 410 513 1555 1 4 0 2619 2 25 103 0 1 +583 3 1 1 1 2 1 100 521 100 125 2475 2 9 0 678 2 25 25 0 1 +583 3 1 1 1 2 1 100 521 125 156 1770 4 10 0 1146 2 25 31 0 1 +583 3 1 1 1 2 1 100 521 156 164 1982 3 1 1 3746 1 5 8 1 1 +583 3 1 1 1 2 1 100 521 164 205 3186 8 6 1 1863 2 25 41 1 1 +583 3 1 1 1 2 1 100 521 205 195 1817 5 7 1 970 1 5 10 0 0 +583 3 1 1 1 2 1 100 521 195 185 1402 6 8 1 1085 1 5 10 0 0 +583 3 1 1 1 2 1 100 521 185 278 1762 7 5 1 1377 3 50 93 1 1 +583 3 1 1 1 2 1 100 521 278 417 1835 1 10 0 1073 3 50 139 0 1 +583 3 1 1 1 2 1 100 521 417 521 1993 9 3 1 920 2 25 104 1 1 +583 3 1 1 1 3 1 100 380 100 105 2665 3 5 0 859 1 5 5 0 1 +583 3 1 1 1 3 1 100 380 105 110 2646 8 9 0 676 1 5 5 0 1 +583 3 1 1 1 3 1 100 380 110 193 1977 2 4 0 524 4 75 83 0 1 +583 3 1 1 1 3 1 100 380 193 183 2349 6 3 0 1121 1 5 10 1 0 +583 3 1 1 1 3 1 100 380 183 229 1415 7 6 1 1234 2 25 46 1 1 +583 3 1 1 1 3 1 100 380 229 218 2015 4 2 0 2820 1 5 11 1 0 +583 3 1 1 1 3 1 100 380 218 207 2320 1 8 1 1206 1 5 11 0 0 +583 3 1 1 1 3 1 100 380 207 217 2030 5 7 0 921 1 5 10 0 1 +583 3 1 1 1 3 1 100 380 217 380 1333 9 5 1 552 4 75 163 1 1 +583 3 1 1 1 4 1 100 218 100 105 2167 8 7 1 1180 1 5 5 1 1 +583 3 1 1 1 4 1 100 218 105 131 1164 3 10 0 461 2 25 26 0 1 +583 3 1 1 1 4 1 100 218 131 98 1682 7 9 1 1116 2 25 33 0 0 +583 3 1 1 1 4 1 100 218 98 172 1597 9 1 1 1401 4 75 74 1 1 +583 3 1 1 1 4 1 100 218 172 258 1721 2 3 0 819 3 50 86 0 1 +583 3 1 1 1 4 1 100 218 258 245 2129 1 8 1 1260 1 5 13 0 0 +583 3 1 1 1 4 1 100 218 245 306 1607 5 4 1 935 2 25 61 1 1 +583 3 1 1 1 4 1 100 218 306 291 2057 4 2 0 978 1 5 15 1 0 +583 3 1 1 1 4 1 100 218 291 218 942 6 3 0 1862 2 25 73 1 0 +583 4 0 1 0 1 1 100 13 100 195 3557 2 7 0 1766 1 95 95 0 1 +583 4 0 1 0 1 1 100 13 195 341 2588 3 7 0 1427 2 75 146 0 1 +583 4 0 1 0 1 1 100 13 341 256 1990 2 8 1 1412 4 25 85 0 0 +583 4 0 1 0 1 1 100 13 256 13 1551 8 9 1 1152 1 95 243 0 0 +583 5 1 1 0 1 1 100 68 100 195 1481 1 3 0 1338 1 95 95 0 1 +583 5 1 1 0 1 1 100 68 195 380 4040 6 5 1 1342 1 95 185 1 1 +583 5 1 1 0 1 1 100 68 380 741 3894 2 8 0 763 1 95 361 0 1 +583 5 1 1 0 1 1 100 68 741 370 1266 8 9 1 1225 3 50 371 0 0 +583 5 1 1 0 1 1 100 68 370 722 2212 3 4 0 1296 1 95 352 0 1 +583 5 1 1 0 1 1 100 68 722 361 2542 5 7 1 1164 3 50 361 0 0 +583 5 1 1 0 1 1 100 68 361 704 1844 7 4 1 1065 1 95 343 1 1 +583 5 1 1 0 1 1 100 68 704 35 2384 4 1 0 2151 1 95 669 1 0 +583 5 1 1 0 1 1 100 68 35 68 1424 9 6 1 2331 1 95 33 1 1 +583 5 1 1 0 2 1 100 9635 100 195 4301 8 1 1 1735 1 95 95 1 1 +583 5 1 1 0 2 1 100 9635 195 380 1656 6 2 1 1559 1 95 185 1 1 +583 5 1 1 0 2 1 100 9635 380 475 1811 7 9 0 632 4 25 95 0 1 +583 5 1 1 0 2 1 100 9635 475 926 1953 2 10 0 1028 1 95 451 0 1 +583 5 1 1 0 2 1 100 9635 926 1158 1363 5 3 1 1306 4 25 232 1 1 +583 5 1 1 0 2 1 100 9635 1158 2027 2239 4 2 1 1314 2 75 869 1 1 +583 5 1 1 0 2 1 100 9635 2027 2534 1641 3 5 0 735 4 25 507 0 1 +583 5 1 1 0 2 1 100 9635 2534 4941 2670 9 4 1 1025 1 95 2407 1 1 +583 5 1 1 0 2 1 100 9635 4941 9635 1362 1 7 0 1095 1 95 4694 0 1 +583 5 1 1 0 3 1 100 15 100 195 2787 7 5 1 973 1 95 95 1 1 +583 5 1 1 0 3 1 100 15 195 10 1389 2 1 0 943 1 95 185 1 0 +583 5 1 1 0 3 1 100 15 10 20 1399 8 6 1 633 1 95 10 1 1 +583 5 1 1 0 3 1 100 15 20 39 1920 4 7 0 763 1 95 19 0 1 +583 5 1 1 0 3 1 100 15 39 76 1349 3 10 0 1055 1 95 37 0 1 +583 5 1 1 0 3 1 100 15 76 4 1258 6 8 1 969 1 95 72 0 0 +583 5 1 1 0 3 1 100 15 4 8 2426 9 2 1 922 1 95 4 1 1 +583 5 1 1 0 3 1 100 15 8 16 1246 5 3 1 1272 1 95 8 1 1 +583 5 1 1 0 3 1 100 15 16 15 812 1 10 1 730 5 5 1 0 0 +583 5 1 1 0 4 1 100 1045 100 195 1892 2 3 0 1436 1 95 95 0 1 +583 5 1 1 0 4 1 100 1045 195 380 1127 7 4 1 896 1 95 185 1 1 +583 5 1 1 0 4 1 100 1045 380 741 1737 3 1 1 973 1 95 361 1 1 +583 5 1 1 0 4 1 100 1045 741 1445 1469 1 9 0 962 1 95 704 0 1 +583 5 1 1 0 4 1 100 1045 1445 2818 3524 8 7 1 2442 1 95 1373 1 1 +583 5 1 1 0 4 1 100 1045 2818 5495 2290 9 2 1 1250 1 95 2677 1 1 +583 5 1 1 0 4 1 100 1045 5495 10715 1634 5 6 0 1047 1 95 5220 0 1 +583 5 1 1 0 4 1 100 1045 10715 20894 2126 6 8 0 952 1 95 10179 0 1 +583 5 1 1 0 4 1 100 1045 20894 1045 2444 4 7 1 868 1 95 19849 0 0 diff --git a/commons/models/cgt_cm.yml b/commons/models/cgt_cm.yml index cdf067cd..9185be68 100644 --- a/commons/models/cgt_cm.yml +++ b/commons/models/cgt_cm.yml @@ -45,7 +45,7 @@ data_columns: gamble_type: '' percentage_staked: '' trial_initial_points: '' - assessment_stages: '' + assessment_stage: '' left_colour_chosen: '' n_left_colour_boxes: '' diff --git a/commons/stan_files/cgt_cm.stan b/commons/stan_files/cgt_cm.stan index 495f0139..f6cdb079 100644 --- a/commons/stan_files/cgt_cm.stan +++ b/commons/stan_files/cgt_cm.stan @@ -15,7 +15,7 @@ data { parameters { // Declare all parameters as vectors for vectorizing // Hyper(group)-parameters - vector[5] mu_p; + vector[5] mu_pr; vector[5] sigma; // Subject-level raw parameters (for Matt trick) @@ -35,17 +35,17 @@ transformed parameters { vector[N] beta; for (i in 1:N) { - alpha[i] = Phi_approx( mu_p[1] + sigma[1] * alpha_pr[i] ) * 5; - c[i] = Phi_approx( mu_p[4] + sigma[4] * c_pr[i] ); + alpha[i] = Phi_approx( mu_pr[1] + sigma[1] * alpha_pr[i] ) * 5; + c[i] = Phi_approx( mu_pr[4] + sigma[4] * c_pr[i] ); } - rho = exp(mu_p[2] + sigma[2] * rho_pr); - gamma = exp(mu_p[3] + sigma[3] * gamma_pr); - beta = exp(mu_p[5] + sigma[5] * beta_pr); + rho = exp(mu_pr[2] + sigma[2] * rho_pr); + gamma = exp(mu_pr[3] + sigma[3] * gamma_pr); + beta = exp(mu_pr[5] + sigma[5] * beta_pr); } model { // Hyperpriors (vectorized) - mu_p ~ normal(0, 1); + mu_pr ~ normal(0, 1); sigma ~ normal(0, 0.2); // Individual parameters @@ -116,11 +116,11 @@ generated quantities { } // Assign group level parameters - mu_alpha = Phi_approx(mu_p[1]) * 5; - mu_rho = exp(mu_p[2]); - mu_gamma = exp(mu_p[3]); - mu_c = Phi_approx(mu_p[4]); - mu_beta = exp(mu_p[5]); + mu_alpha = Phi_approx(mu_pr[1]) * 5; + mu_rho = exp(mu_pr[2]); + mu_gamma = exp(mu_pr[3]); + mu_c = Phi_approx(mu_pr[4]); + mu_beta = exp(mu_pr[5]); { // local section, this saves time and space for (i in 1:N) { From ff3a162c8d0c6c33ca7e10ebb64ab224e7f9c17b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 19:50:20 +0900 Subject: [PATCH 132/163] Fix example block --- commons/templates/PY_DOCS_TEMPLATE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/commons/templates/PY_DOCS_TEMPLATE.txt b/commons/templates/PY_DOCS_TEMPLATE.txt index 6517f605..28c00358 100644 --- a/commons/templates/PY_DOCS_TEMPLATE.txt +++ b/commons/templates/PY_DOCS_TEMPLATE.txt @@ -140,7 +140,7 @@ .. code:: python # Run the model and store results in "output" - output <- {model_function}(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- {model_function}(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') From 532bb91e8e387c4acfdd119b61ca7a20a59a586a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 19:51:13 +0900 Subject: [PATCH 133/163] Generate codes for CGT --- Python/hbayesdm/base.py | 8 +- Python/hbayesdm/models/_cgt_cm.py | 251 ++++++++++++++++++++++++++++ Python/hbayesdm/preprocess_funcs.py | 39 +++-- Python/tests/test_cgt_cm.py | 12 ++ R/DESCRIPTION | 1 + R/NAMESPACE | 1 + R/R/cgt_cm.R | 53 ++++++ R/R/preprocess_funcs.R | 34 ++-- R/man/cgt_cm.Rd | 168 +++++++++++++++++++ R/tests/testthat/test_cgt_cm.R | 10 ++ 10 files changed, 545 insertions(+), 32 deletions(-) create mode 100644 Python/hbayesdm/models/_cgt_cm.py create mode 100644 Python/tests/test_cgt_cm.py create mode 100644 R/R/cgt_cm.R create mode 100644 R/man/cgt_cm.Rd create mode 100644 R/tests/testthat/test_cgt_cm.R diff --git a/Python/hbayesdm/base.py b/Python/hbayesdm/base.py index 54dc4164..64b12fee 100644 --- a/Python/hbayesdm/base.py +++ b/Python/hbayesdm/base.py @@ -1,6 +1,7 @@ import os from pathlib import Path import pickle +import warnings import multiprocessing from abc import ABCMeta, abstractmethod from typing import Tuple, List, Sequence, Dict, Union, Callable, Any @@ -440,9 +441,10 @@ def _prepare_gen_init_vb(self, try: fit = sm.vb(data=data_dict) except Exception: - raise RuntimeWarning( + warnings.warn( 'Failed to get VB estimates for initial values. ' - 'Use random values for initial values.') + 'Use random values for initial values.', + RuntimeWarning, stacklevel=1) return 'random' len_param = len(self.parameters) @@ -828,6 +830,8 @@ def _revert_initial_columns(self, initial_columns Initial column names of raw data, as given by the user. """ + print(raw_data.columns) + print(initial_columns) raw_data.columns = initial_columns def _inform_completion(self): diff --git a/Python/hbayesdm/models/_cgt_cm.py b/Python/hbayesdm/models/_cgt_cm.py new file mode 100644 index 00000000..f3700661 --- /dev/null +++ b/Python/hbayesdm/models/_cgt_cm.py @@ -0,0 +1,251 @@ +from typing import Sequence, Union, Any +from collections import OrderedDict + +from numpy import Inf, exp +import pandas as pd + +from hbayesdm.base import TaskModel +from hbayesdm.preprocess_funcs import cgt_preprocess_func + +__all__ = ['cgt_cm'] + + +class CgtCm(TaskModel): + def __init__(self, **kwargs): + super().__init__( + task_name='cgt', + model_name='cm', + model_type='', + data_columns=( + 'subjID', + 'gamble_type', + 'percentage_staked', + 'trial_initial_points', + 'assessment_stage', + 'left_colour_chosen', + 'n_left_colour_boxes', + ), + parameters=OrderedDict([ + ('alpha', (0, 1, 5)), + ('c', (0, 0.5, 1)), + ('rho', (0, 1, Inf)), + ('beta', (0, 1, Inf)), + ('gamma', (0, 1, Inf)), + ]), + regressors=OrderedDict([ + ('y_hat_col', 2), + ('y_hat_bet', 2), + ('bet_utils', 3), + ]), + postpreds=[], + parameters_desc=OrderedDict([ + ('alpha', 'probability distortion'), + ('c', 'color bias'), + ('rho', 'relative loss sensitivity'), + ('beta', 'discounting rate'), + ('gamma', 'choice sensitivity'), + ]), + additional_args_desc=OrderedDict([ + + ]), + **kwargs, + ) + + _preprocess_func = cgt_preprocess_func + + +def cgt_cm( + data: Union[pd.DataFrame, str, None] = None, + niter: int = 4000, + nwarmup: int = 1000, + nchain: int = 4, + ncore: int = 1, + nthin: int = 1, + inits: Union[str, Sequence[float]] = 'vb', + ind_pars: str = 'mean', + model_regressor: bool = False, + vb: bool = False, + inc_postpred: bool = False, + adapt_delta: float = 0.95, + stepsize: float = 1, + max_treedepth: int = 10, + **additional_args: Any) -> TaskModel: + """Cambridge Gambling Task - Cumulative Model + + Hierarchical Bayesian Modeling of the Cambridge Gambling Task [Rogers1999]_ + using Cumulative Model with the following parameters: + "alpha" (probability distortion), "c" (color bias), "rho" (relative loss sensitivity), "beta" (discounting rate), "gamma" (choice sensitivity). + + + + .. [Rogers1999] Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339. + + + + .. codeauthor:: Nathaniel Haines + + User data should contain the behavioral data-set of all subjects of interest for + the current analysis. When loading from a file, the datafile should be a + **tab-delimited** text file, whose rows represent trial-by-trial observations + and columns represent variables. + + For the Cambridge Gambling Task, there should be 7 columns of data + with the labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". It is not necessary for the columns to be + in this particular order; however, it is necessary that they be labeled + correctly and contain the information below: + + - "subjID": A unique identifier for each subject in the data-set. + - "gamble_type": + - "percentage_staked": + - "trial_initial_points": + - "assessment_stage": + - "left_colour_chosen": + - "n_left_colour_boxes": + + .. note:: + User data may contain other columns of data (e.g. ``ReactionTime``, + ``trial_number``, etc.), but only the data within the column names listed + above will be used during the modeling. As long as the necessary columns + mentioned above are present and labeled correctly, there is no need to + remove other miscellaneous data columns. + + Parameters + ---------- + example + Whether to use the example data provided by hBayesDM. + datafile + Path for a TSV file containing the data to be modeled. + Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". + data + Pandas DataFrame object holding the data to be modeled. + Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". + niter + Number of iterations, including warm-up. Defaults to 4000. + nwarmup + Number of iterations used for warm-up only. Defaults to 1000. + + ``nwarmup`` is a numerical value that specifies how many MCMC samples + should not be stored upon the beginning of each chain. For those + familiar with Bayesian methods, this is equivalent to burn-in samples. + Due to the nature of the MCMC algorithm, initial values (i.e., where the + sampling chains begin) can have a heavy influence on the generated + posterior distributions. The ``nwarmup`` argument can be set to a + higher number in order to curb the effects that initial values have on + the resulting posteriors. + nchain + Number of Markov chains to run. Defaults to 4. + + ``nchain`` is a numerical value that specifies how many chains (i.e., + independent sampling sequences) should be used to draw samples from + the posterior distribution. Since the posteriors are generated from a + sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the + sampling is complete, it is possible to check the multiple chains for + convergence by running the following line of code: + + .. code:: python + + output.plot(type='trace') + ncore + Number of CPUs to be used for running. Defaults to 1. + nthin + Every ``nthin``-th sample will be used to generate the posterior + distribution. Defaults to 1. A higher number can be used when + auto-correlation within the MCMC sampling is high. + + ``nthin`` is a numerical value that specifies the "skipping" behavior + of the MCMC sampler. That is, only every ``nthin``-th sample is used to + generate posterior distributions. By default, ``nthin`` is equal to 1, + meaning that every sample is used to generate the posterior. + inits + String or list specifying how the initial values should be generated. + Options are ``'fixed'`` or ``'random'``, or your own initial values. + ind_pars + String specifying how to summarize the individual parameters. + Current options are: ``'mean'``, ``'median'``, or ``'mode'``. + model_regressor + Whether to export model-based regressors. For this model they are: "y_hat_col", "y_hat_bet", "bet_utils". + vb + Whether to use variational inference to approximately draw from a + posterior distribution. Defaults to ``False``. + inc_postpred + **(Currently not available.)** Include trial-level posterior predictive simulations in + model output (may greatly increase file size). Defaults to ``False``. + adapt_delta + Floating point value representing the target acceptance probability of a new + sample in the MCMC chain. Must be between 0 and 1. See note below. + stepsize + Integer value specifying the size of each leapfrog step that the MCMC sampler + can take on each new iteration. See note below. + max_treedepth + Integer value specifying how many leapfrog steps the MCMC sampler can take + on each new iteration. See note below. + **additional_args + Not used for this model. + + .. note:: + + ``adapt_delta``, ``stepsize``, and ``max_treedepth`` are advanced options that + give the user more control over Stan's MCMC sampler. It is recommended that + only advanced users change the default values, as alterations can profoundly + change the sampler's behavior. See [Hoffman2014]_ for more information on the + sampler control parameters. One can also refer to 'Section 34.2. HMC Algorithm + Parameters' of the `Stan User's Guide and Reference Manual`__. + + .. [Hoffman2014] + Hoffman, M. D., & Gelman, A. (2014). + The No-U-Turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. + Journal of Machine Learning Research, 15(1), 1593-1623. + + __ http://mc-stan.org/users/documentation/ + + Returns + ------- + model_data + An ``hbayesdm.TaskModel`` instance with the following components: + + - ``model``: String value that is the name of the model ('cgt_cm'). + - ``all_ind_pars``: Pandas DataFrame containing the summarized parameter values + (as specified by ``ind_pars``) for each subject. + - ``par_vals``: OrderedDict holding the posterior samples over different parameters. + - ``fit``: A PyStan StanFit object that contains the fitted Stan model. + - ``raw_data``: Pandas DataFrame containing the raw data used to fit the model, + as specified by the user. + - ``model_regressor``: Dict holding the extracted model-based regressors. + + Examples + -------- + + .. code:: python + + # Run the model and store results in "output" + output <- cgt_cm(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) + + # Visually check convergence of the sampling chains (should look like "hairy caterpillars") + output.plot(type='trace') + + # Check Rhat values (all Rhat values should be less than or equal to 1.1) + rhat(output, less=1.1) + + # Plot posterior distributions of the hyper-parameters (distributions should be unimodal) + output.plot() + + # Show the LOOIC and WAIC model fit estimates + print_fit(output) + """ + return CgtCm( + data=data, + niter=niter, + nwarmup=nwarmup, + nchain=nchain, + ncore=ncore, + nthin=nthin, + inits=inits, + ind_pars=ind_pars, + model_regressor=model_regressor, + vb=vb, + inc_postpred=inc_postpred, + adapt_delta=adapt_delta, + stepsize=stepsize, + max_treedepth=max_treedepth, + **additional_args) diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py index 171d83d9..022a5232 100644 --- a/Python/hbayesdm/preprocess_funcs.py +++ b/Python/hbayesdm/preprocess_funcs.py @@ -864,52 +864,59 @@ def cgt_preprocess_func(self, raw_data, general_info, additional_args): t_subjs = general_info['t_subjs'] t_max = general_info['t_max'] - uniq_bets = np.unique(raw_data['percentage_staked']) + uniq_bets = np.unique(raw_data['percentagestaked']) n_bets = len(uniq_bets) bets_asc = np.sort(uniq_bets / 100) bets_dsc = np.flip(np.sort(uniq_bets / 100)) + bet_delay = np.arange(n_bets) / 4 - bet_time = raw_data['percentage_staked'] / 100 + bet_time = raw_data['percentagestaked'] / 100 for b in range(n_bets): bet_time[bet_time == bets_asc[b]] = b + 1 - raw_data['bet_time'] = np.where(raw_data['gamble_type'] == 0, + raw_data['bet_time'] = np.where(raw_data['gambletype'] == 0, n_bets + 1 - bet_time, bet_time) - col_chosen = bet_chosen = np.full((n_subj, t_max), 0, dtype=int) - prop_red = prop_chosen = np.full((n_subj, t_max), 0, dtype=float) - gain = loss = np.full((n_subj, t_max, n_bets), 0, dtype=float) + col_chosen = np.full((n_subj, t_max), 0, dtype=int) + bet_chosen = np.full((n_subj, t_max), 0, dtype=int) + prop_red = np.full((n_subj, t_max), 0, dtype=float) + prop_chosen = np.full((n_subj, t_max), 0, dtype=float) + gain = np.full((n_subj, t_max, n_bets), 0, dtype=float) + loss = np.full((n_subj, t_max, n_bets), 0, dtype=float) for s in range(n_subj): t = t_subjs[s] _, subj_data = next(subj_group) - col_chosen[s, :t] = np.where(subj_data['left_colour_chosen'] == 1, - 1, 2) + col_chosen[s, :t] = np.where(subj_data['leftcolourchosen'] == 1, 1, 2) bet_chosen[s, :t] = subj_data['bet_time'] - prop_red[s, :t] = subj_data['n_left_colour_boxes'] / 10 - prop_chosen[s, :t] = np.where(subj_data['left_colour_chosen'] == 1, + prop_red[s, :t] = subj_data['nleftcolourboxes'] / 10 + prop_chosen[s, :t] = np.where(subj_data['leftcolourchosen'] == 1, prop_red[s][:t], 1 - prop_red[s][:t]) for b in range(n_bets): - gain[s, :t, b] = subj_data['trial_initial_points'] / 100 \ - + subj_data['trial_initial_points'] / 100 \ - * np.where(subj_data['gamble_type'] == 1, + gain[s, :t, b] = subj_data['trialinitialpoints'] / 100 \ + + subj_data['trialinitialpoints'] / 100 \ + * np.where(subj_data['gambletype'] == 1, bets_asc[b], bets_dsc[b]) - loss[s, :t, b] = subj_data['trial_initial_points'] / 100 \ - - subj_data['trial_initial_points'] / 100 \ - * np.where(subj_data['gamble_type'] == 1, + loss[s, :t, b] = subj_data['trialinitialpoints'] / 100 \ + - subj_data['trialinitialpoints'] / 100 \ + * np.where(subj_data['gambletype'] == 1, bets_asc[b], bets_dsc[b]) + # Remove the unnecessary intermediate column + raw_data.drop(columns='bet_time', inplace=True) + # Wrap into a dict for pystan data_dict = { 'N': n_subj, 'T': t_max, 'B': n_bets, 'Tsubj': t_subjs, + 'bet_delay': bet_delay, 'gain': gain, 'loss': loss, 'prop_red': prop_red, diff --git a/Python/tests/test_cgt_cm.py b/Python/tests/test_cgt_cm.py new file mode 100644 index 00000000..9d44987a --- /dev/null +++ b/Python/tests/test_cgt_cm.py @@ -0,0 +1,12 @@ +import pytest + +from hbayesdm.models import cgt_cm + + +def test_cgt_cm(): + _ = cgt_cm( + data="example", niter=10, nwarmup=5, nchain=1, ncore=1) + + +if __name__ == '__main__': + pytest.main() diff --git a/R/DESCRIPTION b/R/DESCRIPTION index a54f1dfa..8f4d53aa 100644 --- a/R/DESCRIPTION +++ b/R/DESCRIPTION @@ -61,6 +61,7 @@ Collate: 'bandit4arm_lapse_decay.R' 'bandit4arm_singleA_lapse.R' 'bart_par4.R' + 'cgt_cm.R' 'choiceRT_ddm.R' 'choiceRT_ddm_single.R' 'choiceRT_lba.R' diff --git a/R/NAMESPACE b/R/NAMESPACE index a1066198..41604443 100644 --- a/R/NAMESPACE +++ b/R/NAMESPACE @@ -10,6 +10,7 @@ export(bandit4arm_lapse) export(bandit4arm_lapse_decay) export(bandit4arm_singleA_lapse) export(bart_par4) +export(cgt_cm) export(choiceRT_ddm) export(choiceRT_ddm_single) export(cra_exp) diff --git a/R/R/cgt_cm.R b/R/R/cgt_cm.R new file mode 100644 index 00000000..7b5821d0 --- /dev/null +++ b/R/R/cgt_cm.R @@ -0,0 +1,53 @@ +#' @templateVar MODEL_FUNCTION cgt_cm +#' @templateVar CONTRIBUTOR \href{http://haines-lab.com/}{Nathaniel Haines} <\email{haines.175@@osu.edu}> +#' @templateVar TASK_NAME Cambridge Gambling Task +#' @templateVar TASK_CODE cgt +#' @templateVar TASK_CITE (Rogers et al., 1999) +#' @templateVar MODEL_NAME Cumulative Model +#' @templateVar MODEL_CODE cm +#' @templateVar MODEL_CITE +#' @templateVar MODEL_TYPE Hierarchical +#' @templateVar DATA_COLUMNS "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes" +#' @templateVar PARAMETERS \code{alpha} (probability distortion), \code{c} (color bias), \code{rho} (relative loss sensitivity), \code{beta} (discounting rate), \code{gamma} (choice sensitivity) +#' @templateVar REGRESSORS "y_hat_col", "y_hat_bet", "bet_utils" +#' @templateVar POSTPREDS +#' @templateVar LENGTH_DATA_COLUMNS 7 +#' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} +#' @templateVar DETAILS_DATA_2 \item{gamble_type}{} +#' @templateVar DETAILS_DATA_3 \item{percentage_staked}{} +#' @templateVar DETAILS_DATA_4 \item{trial_initial_points}{} +#' @templateVar DETAILS_DATA_5 \item{assessment_stage}{} +#' @templateVar DETAILS_DATA_6 \item{left_colour_chosen}{} +#' @templateVar DETAILS_DATA_7 \item{n_left_colour_boxes}{} +#' @templateVar LENGTH_ADDITIONAL_ARGS 0 +#' +#' @template model-documentation +#' +#' @export +#' @include hBayesDM_model.R +#' @include preprocess_funcs.R +#' +#' @references +#' Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339. + +#' + +cgt_cm <- hBayesDM_model( + task_name = "cgt", + model_name = "cm", + model_type = "", + data_columns = c("subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes"), + parameters = list( + "alpha" = c(0, 1, 5), + "c" = c(0, 0.5, 1), + "rho" = c(0, 1, Inf), + "beta" = c(0, 1, Inf), + "gamma" = c(0, 1, Inf) + ), + regressors = list( + "y_hat_col" = 2, + "y_hat_bet" = 2, + "bet_utils" = 3 + ), + postpreds = NULL, + preprocess_func = cgt_preprocess_func) diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index c31cf036..77e62c76 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -858,36 +858,41 @@ cgt_preprocess_func <- function(raw_data, general_info) { t_subjs <- general_info$t_subjs t_max <- general_info$t_max - n_bets <- length(unique(raw_data$percentage_staked)) - bets_asc <- sort(unique(raw_data$percentage_staked) / 100) - bets_dsc <- sort(unique(raw_data$percentage_staked) / 100, decreasing = T) + uniq_bet <- unique(raw_data$percentagestaked) + n_bets <- length(uniq_bet) + bets_asc <- sort(uniq_bet / 100) + bets_dsc <- sort(uniq_bet / 100, decreasing = T) + bet_delay <- (1:n_bets - 1) / 4 - bet_time <- raw_data$percentage_staked / 100 + bet_time <- raw_data$percentagestaked / 100 for (b in 1:n_bets) { bet_time[bet_time == bets_asc[b]] <- b } - raw_data$bet_time <- ifelse(raw_data$gamble_type == 0, + raw_data$bet_time <- ifelse(raw_data$gambletype == 0, n_bets + 1 - bet_time, bet_time) - col_chosen <- bet_chosen <- prop_red <- prop_chosen <- - array(0, c(n_subj, t_max)) - gain <- loss <- array(0, c(n_subj, t_max, n_bets)) + col_chosen <- array(0, c(n_subj, t_max)) + bet_chosen <- array(0, c(n_subj, t_max)) + prop_red <- array(0, c(n_subj, t_max)) + prop_chosen <- array(0, c(n_subj, t_max)) + gain <- array(0, c(n_subj, t_max, n_bets)) + loss <- array(0, c(n_subj, t_max, n_bets)) for (i in 1:n_subj) { - t <- t_subj[i] + t <- t_subjs[i] DT_subj <- raw_data[subjid == subjs[i]] - col_chosen [i, 1:t] <- ifelse(DT_subj$left_colour_chosen == 1, 1, 2) + col_chosen [i, 1:t] <- ifelse(DT_subj$leftcolourchosen == 1, 1, 2) bet_chosen [i, 1:t] <- DT_subj$bet_time - prop_red [i, 1:t] <- DT_subj$n_left_colour_boxes / 10 - prop_chosen[i, 1:t] <- ifelse(DT_subj$left_colour_chosen == 1, + prop_red [i, 1:t] <- DT_subj$nleftcolourboxes / 10 + prop_chosen[i, 1:t] <- ifelse(DT_subj$leftcolourchosen == 1, prop_red[i, 1:t], 1 - prop_red[i, 1:t]) for (b in 1:n_bets) { - gain[i, 1:t, b] <- with(DT_subj, trial_initial_points / 100 + trial_initial_points / 100 * ifelse(gamble_type == 1, bets_asc[b], bets_dsc[b])) - loss[i, 1:t, b] <- with(DT_subj, trial_initial_points / 100 - trial_initial_points / 100 * ifelse(gamble_type == 1, bets_asc[b], bets_dsc[b])) + gain[i, 1:t, b] <- with(DT_subj, trialinitialpoints / 100 + trialinitialpoints / 100 * ifelse(gambletype == 1, bets_asc[b], bets_dsc[b])) + loss[i, 1:t, b] <- with(DT_subj, trialinitialpoints / 100 - trialinitialpoints / 100 * ifelse(gambletype == 1, bets_asc[b], bets_dsc[b])) } } @@ -897,6 +902,7 @@ cgt_preprocess_func <- function(raw_data, general_info) { T = t_max, B = n_bets, Tsubj = t_subjs, + bet_delay = bet_delay, gain = gain, loss = loss, prop_red = prop_red, diff --git a/R/man/cgt_cm.Rd b/R/man/cgt_cm.Rd new file mode 100644 index 00000000..fdd8b088 --- /dev/null +++ b/R/man/cgt_cm.Rd @@ -0,0 +1,168 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/cgt_cm.R +\name{cgt_cm} +\alias{cgt_cm} +\title{Cumulative Model} +\usage{ +cgt_cm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4, + ncore = 1, nthin = 1, inits = "vb", indPars = "mean", + modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE, + adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...) +} +\arguments{ +\item{data}{Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, \code{"example"} to use example data, or +\code{"choose"} to choose data with an interactive window. +Columns in the dataset must include: +"subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". See \bold{Details} below for more information.} + +\item{niter}{Number of iterations, including warm-up. Defaults to 4000.} + +\item{nwarmup}{Number of iterations used for warm-up only. Defaults to 1000.} + +\item{nchain}{Number of Markov chains to run. Defaults to 4.} + +\item{ncore}{Number of CPUs to be used for running. Defaults to 1.} + +\item{nthin}{Every \code{i == nthin} sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.} + +\item{inits}{Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.} + +\item{indPars}{Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".} + +\item{modelRegressor}{Whether to export model-based regressors (\code{TRUE} or \code{FALSE}). +For this model they are: "y_hat_col", "y_hat_bet", "bet_utils".} + +\item{vb}{Use variational inference to approximately draw from a posterior distribution. Defaults +to \code{FALSE}.} + +\item{inc_postpred}{Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to \code{FALSE}. +Not available for this model.} + +\item{adapt_delta}{Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See \bold{Details} below.} + +\item{stepsize}{Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See \bold{Details} below.} + +\item{max_treedepth}{Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See \bold{Details} below.} + +\item{...}{For this model, there is no model-specific argument.} +} +\value{ +A class "hBayesDM" object \code{modelData} with the following components: +\describe{ + \item{model}{Character value that is the name of the model (\\code{"cgt_cm"}).} + \item{allIndPars}{Data.frame containing the summarized parameter values (as specified by + \code{indPars}) for each subject.} + \item{parVals}{List object containing the posterior samples over different parameters.} + \item{fit}{A class \code{\link[rstan]{stanfit}} object that contains the fitted Stan + model.} + \item{rawdata}{Data.frame containing the raw data used to fit the model, as specified by + the user.} + + + \item{modelRegressor}{List object containing the extracted model-based regressors.} +} +} +\description{ +Hierarchical Bayesian Modeling of the Cambridge Gambling Task using Cumulative Model. +It has the following parameters: \code{alpha} (probability distortion), \code{c} (color bias), \code{rho} (relative loss sensitivity), \code{beta} (discounting rate), \code{gamma} (choice sensitivity). + +\itemize{ + \item \strong{Task}: Cambridge Gambling Task (Rogers et al., 1999) + \item \strong{Model}: Cumulative Model +} +} +\details{ +This section describes some of the function arguments in greater detail. + +\strong{data} should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a \strong{tab-delimited} text + file, whose rows represent trial-by-trial observations and columns represent variables.\cr +For the Cambridge Gambling Task, there should be 7 columns of data with the + labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below: +\describe{ + \item{subjID}{A unique identifier for each subject in the data-set.} + \item{gamble_type}{} + \item{percentage_staked}{} + \item{trial_initial_points}{} + \item{assessment_stage}{} + \item{left_colour_chosen}{} + \item{n_left_colour_boxes}{} + + +} +\strong{*}Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns. + +\strong{nwarmup} is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + \code{nwarmup} argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors. + +\strong{nchain} is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + \code{plot(output, type = "trace")}. The trace-plot should resemble a "furry caterpillar". + +\strong{nthin} is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every \code{i == nthin} samples to generate posterior distributions. By default, + \code{nthin} is equal to 1, meaning that every sample is used to generate the posterior. + +\strong{Control Parameters:} \code{adapt_delta}, \code{stepsize}, and \code{max_treedepth} are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the \href{http://mc-stan.org/users/documentation/}{Stan User's Guide + and Reference Manual}, or to the help page for \code{\link[rstan]{stan}} for a less technical + description of these arguments. + +\subsection{Contributors}{\href{http://haines-lab.com/}{Nathaniel Haines} <\email{haines.175@osu.edu}>} +} +\examples{ +\dontrun{ +# Run the model with a given data.frame as df +output <- cgt_cm( + data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Run the model with example data +output <- cgt_cm( + data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4) + +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars') +plot(output, type = "trace") + +# Check Rhat values (all Rhat values should be less than or equal to 1.1) +rhat(output) + +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal) +plot(output) + +# Show the WAIC and LOOIC model fit estimates +printFit(output) +} +} +\references{ +Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339. +} +\seealso{ +We refer users to our in-depth tutorial for an example of using hBayesDM: + \url{https://rpubs.com/CCSL/hBayesDM} +} diff --git a/R/tests/testthat/test_cgt_cm.R b/R/tests/testthat/test_cgt_cm.R new file mode 100644 index 00000000..c97670c0 --- /dev/null +++ b/R/tests/testthat/test_cgt_cm.R @@ -0,0 +1,10 @@ +context("Test cgt_cm") +library(hBayesDM) + +test_that("Test cgt_cm", { + # Do not run this test on CRAN + skip_on_cran() + + expect_output(cgt_cm( + data = "example", niter = 10, nwarmup = 5, nchain = 1, ncore = 1)) +}) From 87c27e9d9e26c4b0f200cf97fe89c39ac72b42fb Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 19:51:25 +0900 Subject: [PATCH 134/163] Re-generate other Python codes with updated documentations --- Python/hbayesdm/models/__init__.py | 2 ++ Python/hbayesdm/models/_bandit2arm_delta.py | 2 +- Python/hbayesdm/models/_bandit4arm2_kalman_filter.py | 2 +- Python/hbayesdm/models/_bandit4arm_2par_lapse.py | 2 +- Python/hbayesdm/models/_bandit4arm_4par.py | 2 +- Python/hbayesdm/models/_bandit4arm_lapse.py | 2 +- Python/hbayesdm/models/_bandit4arm_lapse_decay.py | 2 +- Python/hbayesdm/models/_bandit4arm_singleA_lapse.py | 2 +- Python/hbayesdm/models/_bart_par4.py | 2 +- Python/hbayesdm/models/_choiceRT_ddm.py | 2 +- Python/hbayesdm/models/_choiceRT_ddm_single.py | 2 +- Python/hbayesdm/models/_cra_exp.py | 2 +- Python/hbayesdm/models/_cra_linear.py | 2 +- Python/hbayesdm/models/_dbdm_prob_weight.py | 2 +- Python/hbayesdm/models/_dd_cs.py | 2 +- Python/hbayesdm/models/_dd_cs_single.py | 2 +- Python/hbayesdm/models/_dd_exp.py | 2 +- Python/hbayesdm/models/_dd_hyperbolic.py | 2 +- Python/hbayesdm/models/_dd_hyperbolic_single.py | 2 +- Python/hbayesdm/models/_gng_m1.py | 2 +- Python/hbayesdm/models/_gng_m2.py | 2 +- Python/hbayesdm/models/_gng_m3.py | 2 +- Python/hbayesdm/models/_gng_m4.py | 2 +- Python/hbayesdm/models/_igt_orl.py | 2 +- Python/hbayesdm/models/_igt_pvl_decay.py | 2 +- Python/hbayesdm/models/_igt_pvl_delta.py | 2 +- Python/hbayesdm/models/_igt_vpp.py | 2 +- Python/hbayesdm/models/_peer_ocu.py | 2 +- Python/hbayesdm/models/_prl_ewa.py | 2 +- Python/hbayesdm/models/_prl_fictitious.py | 2 +- Python/hbayesdm/models/_prl_fictitious_multipleB.py | 2 +- Python/hbayesdm/models/_prl_fictitious_rp.py | 2 +- Python/hbayesdm/models/_prl_fictitious_rp_woa.py | 2 +- Python/hbayesdm/models/_prl_fictitious_woa.py | 2 +- Python/hbayesdm/models/_prl_rp.py | 2 +- Python/hbayesdm/models/_prl_rp_multipleB.py | 2 +- Python/hbayesdm/models/_pst_gainloss_Q.py | 2 +- Python/hbayesdm/models/_ra_noLA.py | 2 +- Python/hbayesdm/models/_ra_noRA.py | 2 +- Python/hbayesdm/models/_ra_prospect.py | 2 +- Python/hbayesdm/models/_rdt_happiness.py | 2 +- Python/hbayesdm/models/_ts_par4.py | 2 +- Python/hbayesdm/models/_ts_par6.py | 2 +- Python/hbayesdm/models/_ts_par7.py | 2 +- Python/hbayesdm/models/_ug_bayes.py | 2 +- Python/hbayesdm/models/_ug_delta.py | 2 +- Python/hbayesdm/models/_wcs_sql.py | 2 +- 47 files changed, 48 insertions(+), 46 deletions(-) diff --git a/Python/hbayesdm/models/__init__.py b/Python/hbayesdm/models/__init__.py index b0d89462..65840f8a 100644 --- a/Python/hbayesdm/models/__init__.py +++ b/Python/hbayesdm/models/__init__.py @@ -6,6 +6,7 @@ from ._bandit4arm_lapse_decay import bandit4arm_lapse_decay from ._bandit4arm_singleA_lapse import bandit4arm_singleA_lapse from ._bart_par4 import bart_par4 +from ._cgt_cm import cgt_cm from ._choiceRT_ddm import choiceRT_ddm from ._choiceRT_ddm_single import choiceRT_ddm_single from ._cra_exp import cra_exp @@ -54,6 +55,7 @@ 'bandit4arm_lapse_decay', 'bandit4arm_singleA_lapse', 'bart_par4', + 'cgt_cm', 'choiceRT_ddm', 'choiceRT_ddm_single', 'cra_exp', diff --git a/Python/hbayesdm/models/_bandit2arm_delta.py b/Python/hbayesdm/models/_bandit2arm_delta.py index 5d1eec0d..9f798a67 100644 --- a/Python/hbayesdm/models/_bandit2arm_delta.py +++ b/Python/hbayesdm/models/_bandit2arm_delta.py @@ -203,7 +203,7 @@ def bandit2arm_delta( .. code:: python # Run the model and store results in "output" - output <- bandit2arm_delta(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit2arm_delta(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py index 5b664a14..9fbf2696 100644 --- a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py +++ b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py @@ -210,7 +210,7 @@ def bandit4arm2_kalman_filter( .. code:: python # Run the model and store results in "output" - output <- bandit4arm2_kalman_filter(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit4arm2_kalman_filter(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py index afb0c61b..630feeb6 100644 --- a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py @@ -206,7 +206,7 @@ def bandit4arm_2par_lapse( .. code:: python # Run the model and store results in "output" - output <- bandit4arm_2par_lapse(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit4arm_2par_lapse(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bandit4arm_4par.py b/Python/hbayesdm/models/_bandit4arm_4par.py index 9a5556e3..e947485a 100644 --- a/Python/hbayesdm/models/_bandit4arm_4par.py +++ b/Python/hbayesdm/models/_bandit4arm_4par.py @@ -208,7 +208,7 @@ def bandit4arm_4par( .. code:: python # Run the model and store results in "output" - output <- bandit4arm_4par(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit4arm_4par(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bandit4arm_lapse.py b/Python/hbayesdm/models/_bandit4arm_lapse.py index 63b80a7c..4b825f4d 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse.py @@ -210,7 +210,7 @@ def bandit4arm_lapse( .. code:: python # Run the model and store results in "output" - output <- bandit4arm_lapse(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit4arm_lapse(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py index 9c916ec6..d81887e2 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py @@ -212,7 +212,7 @@ def bandit4arm_lapse_decay( .. code:: python # Run the model and store results in "output" - output <- bandit4arm_lapse_decay(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit4arm_lapse_decay(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py index d25435ea..4a8aae00 100644 --- a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py @@ -208,7 +208,7 @@ def bandit4arm_singleA_lapse( .. code:: python # Run the model and store results in "output" - output <- bandit4arm_singleA_lapse(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bandit4arm_singleA_lapse(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py index 3397e91b..aca370e4 100644 --- a/Python/hbayesdm/models/_bart_par4.py +++ b/Python/hbayesdm/models/_bart_par4.py @@ -211,7 +211,7 @@ def bart_par4( .. code:: python # Run the model and store results in "output" - output <- bart_par4(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- bart_par4(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py index 2b6e8ef8..d889bd51 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm.py +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -212,7 +212,7 @@ def choiceRT_ddm( .. code:: python # Run the model and store results in "output" - output <- choiceRT_ddm(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- choiceRT_ddm(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py index a179e5d5..bdb9da03 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm_single.py +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -212,7 +212,7 @@ def choiceRT_ddm_single( .. code:: python # Run the model and store results in "output" - output <- choiceRT_ddm_single(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- choiceRT_ddm_single(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_cra_exp.py b/Python/hbayesdm/models/_cra_exp.py index 54133f11..1f31ccdf 100644 --- a/Python/hbayesdm/models/_cra_exp.py +++ b/Python/hbayesdm/models/_cra_exp.py @@ -213,7 +213,7 @@ def cra_exp( .. code:: python # Run the model and store results in "output" - output <- cra_exp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- cra_exp(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_cra_linear.py b/Python/hbayesdm/models/_cra_linear.py index 709080cd..542cf77c 100644 --- a/Python/hbayesdm/models/_cra_linear.py +++ b/Python/hbayesdm/models/_cra_linear.py @@ -213,7 +213,7 @@ def cra_linear( .. code:: python # Run the model and store results in "output" - output <- cra_linear(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- cra_linear(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_dbdm_prob_weight.py b/Python/hbayesdm/models/_dbdm_prob_weight.py index e8e8f514..50cede01 100644 --- a/Python/hbayesdm/models/_dbdm_prob_weight.py +++ b/Python/hbayesdm/models/_dbdm_prob_weight.py @@ -218,7 +218,7 @@ def dbdm_prob_weight( .. code:: python # Run the model and store results in "output" - output <- dbdm_prob_weight(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- dbdm_prob_weight(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_dd_cs.py b/Python/hbayesdm/models/_dd_cs.py index 7ad63c94..8fb4ae9f 100644 --- a/Python/hbayesdm/models/_dd_cs.py +++ b/Python/hbayesdm/models/_dd_cs.py @@ -210,7 +210,7 @@ def dd_cs( .. code:: python # Run the model and store results in "output" - output <- dd_cs(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- dd_cs(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_dd_cs_single.py b/Python/hbayesdm/models/_dd_cs_single.py index 158401dc..206cf8f4 100644 --- a/Python/hbayesdm/models/_dd_cs_single.py +++ b/Python/hbayesdm/models/_dd_cs_single.py @@ -210,7 +210,7 @@ def dd_cs_single( .. code:: python # Run the model and store results in "output" - output <- dd_cs_single(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- dd_cs_single(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_dd_exp.py b/Python/hbayesdm/models/_dd_exp.py index da015bc4..f1e3d47c 100644 --- a/Python/hbayesdm/models/_dd_exp.py +++ b/Python/hbayesdm/models/_dd_exp.py @@ -208,7 +208,7 @@ def dd_exp( .. code:: python # Run the model and store results in "output" - output <- dd_exp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- dd_exp(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_dd_hyperbolic.py b/Python/hbayesdm/models/_dd_hyperbolic.py index f76e735f..74482823 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic.py +++ b/Python/hbayesdm/models/_dd_hyperbolic.py @@ -208,7 +208,7 @@ def dd_hyperbolic( .. code:: python # Run the model and store results in "output" - output <- dd_hyperbolic(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- dd_hyperbolic(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_dd_hyperbolic_single.py b/Python/hbayesdm/models/_dd_hyperbolic_single.py index cefaf5bf..27e19781 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic_single.py +++ b/Python/hbayesdm/models/_dd_hyperbolic_single.py @@ -208,7 +208,7 @@ def dd_hyperbolic_single( .. code:: python # Run the model and store results in "output" - output <- dd_hyperbolic_single(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- dd_hyperbolic_single(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_gng_m1.py b/Python/hbayesdm/models/_gng_m1.py index 47cf7597..a334d4e6 100644 --- a/Python/hbayesdm/models/_gng_m1.py +++ b/Python/hbayesdm/models/_gng_m1.py @@ -209,7 +209,7 @@ def gng_m1( .. code:: python # Run the model and store results in "output" - output <- gng_m1(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- gng_m1(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_gng_m2.py b/Python/hbayesdm/models/_gng_m2.py index fc2aff4e..e7462fb3 100644 --- a/Python/hbayesdm/models/_gng_m2.py +++ b/Python/hbayesdm/models/_gng_m2.py @@ -211,7 +211,7 @@ def gng_m2( .. code:: python # Run the model and store results in "output" - output <- gng_m2(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- gng_m2(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_gng_m3.py b/Python/hbayesdm/models/_gng_m3.py index b68af254..78dd8e93 100644 --- a/Python/hbayesdm/models/_gng_m3.py +++ b/Python/hbayesdm/models/_gng_m3.py @@ -214,7 +214,7 @@ def gng_m3( .. code:: python # Run the model and store results in "output" - output <- gng_m3(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- gng_m3(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_gng_m4.py b/Python/hbayesdm/models/_gng_m4.py index f23e8980..1eb31993 100644 --- a/Python/hbayesdm/models/_gng_m4.py +++ b/Python/hbayesdm/models/_gng_m4.py @@ -216,7 +216,7 @@ def gng_m4( .. code:: python # Run the model and store results in "output" - output <- gng_m4(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- gng_m4(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_igt_orl.py b/Python/hbayesdm/models/_igt_orl.py index ba5b5df7..a1875675 100644 --- a/Python/hbayesdm/models/_igt_orl.py +++ b/Python/hbayesdm/models/_igt_orl.py @@ -212,7 +212,7 @@ def igt_orl( .. code:: python # Run the model and store results in "output" - output <- igt_orl(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- igt_orl(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_igt_pvl_decay.py b/Python/hbayesdm/models/_igt_pvl_decay.py index 7226598a..22cf816f 100644 --- a/Python/hbayesdm/models/_igt_pvl_decay.py +++ b/Python/hbayesdm/models/_igt_pvl_decay.py @@ -210,7 +210,7 @@ def igt_pvl_decay( .. code:: python # Run the model and store results in "output" - output <- igt_pvl_decay(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- igt_pvl_decay(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_igt_pvl_delta.py b/Python/hbayesdm/models/_igt_pvl_delta.py index 4c1589d8..bde340e0 100644 --- a/Python/hbayesdm/models/_igt_pvl_delta.py +++ b/Python/hbayesdm/models/_igt_pvl_delta.py @@ -210,7 +210,7 @@ def igt_pvl_delta( .. code:: python # Run the model and store results in "output" - output <- igt_pvl_delta(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- igt_pvl_delta(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_igt_vpp.py b/Python/hbayesdm/models/_igt_vpp.py index 46656f7b..92e63edb 100644 --- a/Python/hbayesdm/models/_igt_vpp.py +++ b/Python/hbayesdm/models/_igt_vpp.py @@ -218,7 +218,7 @@ def igt_vpp( .. code:: python # Run the model and store results in "output" - output <- igt_vpp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- igt_vpp(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_peer_ocu.py b/Python/hbayesdm/models/_peer_ocu.py index 67e093fb..b988950a 100644 --- a/Python/hbayesdm/models/_peer_ocu.py +++ b/Python/hbayesdm/models/_peer_ocu.py @@ -214,7 +214,7 @@ def peer_ocu( .. code:: python # Run the model and store results in "output" - output <- peer_ocu(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- peer_ocu(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_ewa.py b/Python/hbayesdm/models/_prl_ewa.py index a7137045..3791642f 100644 --- a/Python/hbayesdm/models/_prl_ewa.py +++ b/Python/hbayesdm/models/_prl_ewa.py @@ -208,7 +208,7 @@ def prl_ewa( .. code:: python # Run the model and store results in "output" - output <- prl_ewa(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_ewa(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_fictitious.py b/Python/hbayesdm/models/_prl_fictitious.py index 1ce3214d..1e1a52e1 100644 --- a/Python/hbayesdm/models/_prl_fictitious.py +++ b/Python/hbayesdm/models/_prl_fictitious.py @@ -209,7 +209,7 @@ def prl_fictitious( .. code:: python # Run the model and store results in "output" - output <- prl_fictitious(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_fictitious(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_fictitious_multipleB.py b/Python/hbayesdm/models/_prl_fictitious_multipleB.py index 5f80390f..072580ab 100644 --- a/Python/hbayesdm/models/_prl_fictitious_multipleB.py +++ b/Python/hbayesdm/models/_prl_fictitious_multipleB.py @@ -211,7 +211,7 @@ def prl_fictitious_multipleB( .. code:: python # Run the model and store results in "output" - output <- prl_fictitious_multipleB(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_fictitious_multipleB(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_fictitious_rp.py b/Python/hbayesdm/models/_prl_fictitious_rp.py index 06524a50..f842ac71 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp.py @@ -212,7 +212,7 @@ def prl_fictitious_rp( .. code:: python # Run the model and store results in "output" - output <- prl_fictitious_rp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_fictitious_rp(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py index 32934584..b40ec495 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py @@ -210,7 +210,7 @@ def prl_fictitious_rp_woa( .. code:: python # Run the model and store results in "output" - output <- prl_fictitious_rp_woa(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_fictitious_rp_woa(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_fictitious_woa.py b/Python/hbayesdm/models/_prl_fictitious_woa.py index 432ead92..0f52d118 100644 --- a/Python/hbayesdm/models/_prl_fictitious_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_woa.py @@ -207,7 +207,7 @@ def prl_fictitious_woa( .. code:: python # Run the model and store results in "output" - output <- prl_fictitious_woa(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_fictitious_woa(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_rp.py b/Python/hbayesdm/models/_prl_rp.py index 52bdff84..ecbdeb67 100644 --- a/Python/hbayesdm/models/_prl_rp.py +++ b/Python/hbayesdm/models/_prl_rp.py @@ -207,7 +207,7 @@ def prl_rp( .. code:: python # Run the model and store results in "output" - output <- prl_rp(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_rp(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_prl_rp_multipleB.py b/Python/hbayesdm/models/_prl_rp_multipleB.py index 11b0a9fe..1aa80ddd 100644 --- a/Python/hbayesdm/models/_prl_rp_multipleB.py +++ b/Python/hbayesdm/models/_prl_rp_multipleB.py @@ -209,7 +209,7 @@ def prl_rp_multipleB( .. code:: python # Run the model and store results in "output" - output <- prl_rp_multipleB(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- prl_rp_multipleB(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py index fd18b510..2e0b4dac 100644 --- a/Python/hbayesdm/models/_pst_gainloss_Q.py +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -206,7 +206,7 @@ def pst_gainloss_Q( .. code:: python # Run the model and store results in "output" - output <- pst_gainloss_Q(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- pst_gainloss_Q(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py index f1b6599b..5933bd49 100644 --- a/Python/hbayesdm/models/_ra_noLA.py +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -206,7 +206,7 @@ def ra_noLA( .. code:: python # Run the model and store results in "output" - output <- ra_noLA(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ra_noLA(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py index d286bf1c..39628b76 100644 --- a/Python/hbayesdm/models/_ra_noRA.py +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -206,7 +206,7 @@ def ra_noRA( .. code:: python # Run the model and store results in "output" - output <- ra_noRA(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ra_noRA(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py index 80a6f155..eec648c4 100644 --- a/Python/hbayesdm/models/_ra_prospect.py +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -208,7 +208,7 @@ def ra_prospect( .. code:: python # Run the model and store results in "output" - output <- ra_prospect(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ra_prospect(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_rdt_happiness.py b/Python/hbayesdm/models/_rdt_happiness.py index c25d71f9..2ae268b5 100644 --- a/Python/hbayesdm/models/_rdt_happiness.py +++ b/Python/hbayesdm/models/_rdt_happiness.py @@ -222,7 +222,7 @@ def rdt_happiness( .. code:: python # Run the model and store results in "output" - output <- rdt_happiness(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- rdt_happiness(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ts_par4.py b/Python/hbayesdm/models/_ts_par4.py index 2edf465b..07fb3ae2 100644 --- a/Python/hbayesdm/models/_ts_par4.py +++ b/Python/hbayesdm/models/_ts_par4.py @@ -211,7 +211,7 @@ def ts_par4( .. code:: python # Run the model and store results in "output" - output <- ts_par4(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ts_par4(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ts_par6.py b/Python/hbayesdm/models/_ts_par6.py index 8b2fdf48..d2476e2b 100644 --- a/Python/hbayesdm/models/_ts_par6.py +++ b/Python/hbayesdm/models/_ts_par6.py @@ -215,7 +215,7 @@ def ts_par6( .. code:: python # Run the model and store results in "output" - output <- ts_par6(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ts_par6(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ts_par7.py b/Python/hbayesdm/models/_ts_par7.py index eaa60394..ed64b5a8 100644 --- a/Python/hbayesdm/models/_ts_par7.py +++ b/Python/hbayesdm/models/_ts_par7.py @@ -217,7 +217,7 @@ def ts_par7( .. code:: python # Run the model and store results in "output" - output <- ts_par7(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ts_par7(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ug_bayes.py b/Python/hbayesdm/models/_ug_bayes.py index e821a68c..df627276 100644 --- a/Python/hbayesdm/models/_ug_bayes.py +++ b/Python/hbayesdm/models/_ug_bayes.py @@ -204,7 +204,7 @@ def ug_bayes( .. code:: python # Run the model and store results in "output" - output <- ug_bayes(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ug_bayes(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_ug_delta.py b/Python/hbayesdm/models/_ug_delta.py index 20a16504..d640319a 100644 --- a/Python/hbayesdm/models/_ug_delta.py +++ b/Python/hbayesdm/models/_ug_delta.py @@ -204,7 +204,7 @@ def ug_delta( .. code:: python # Run the model and store results in "output" - output <- ug_delta(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- ug_delta(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') diff --git a/Python/hbayesdm/models/_wcs_sql.py b/Python/hbayesdm/models/_wcs_sql.py index f462148b..d241f4a3 100644 --- a/Python/hbayesdm/models/_wcs_sql.py +++ b/Python/hbayesdm/models/_wcs_sql.py @@ -204,7 +204,7 @@ def wcs_sql( .. code:: python # Run the model and store results in "output" - output <- wcs_sql(example=True, niter=2000, nwarmup=1000, nchain=4, ncore=4) + output <- wcs_sql(data='example', niter=2000, nwarmup=1000, nchain=4, ncore=4) # Visually check convergence of the sampling chains (should look like "hairy caterpillars") output.plot(type='trace') From 3ca9fa5e8d73c581f74483789994cd8c6e6e7117 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Mon, 26 Aug 2019 20:08:20 +0900 Subject: [PATCH 135/163] Assign RT values only if the number of choices is positive --- Python/hbayesdm/preprocess_funcs.py | 6 ++++-- R/R/preprocess_funcs.R | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py index c3ec34c1..cd54e99d 100644 --- a/Python/hbayesdm/preprocess_funcs.py +++ b/Python/hbayesdm/preprocess_funcs.py @@ -170,8 +170,10 @@ def choiceRT_preprocess_func(self, raw_data, general_info, additional_args): subj_group = iter(general_info['grouped_data']) for s in range(n_subj): _, subj_data = next(subj_group) - RTu[s][:Nu[s]] = subj_data['rt'][subj_data['choice'] == 2] - RTl[s][:Nl[s]] = subj_data['rt'][subj_data['choice'] == 1] + if Nu[s] > 0: + RTu[s][:Nu[s]] = subj_data['rt'][subj_data['choice'] == 2] + if Nl[s] > 0: + RTl[s][:Nl[s]] = subj_data['rt'][subj_data['choice'] == 1] # Minimum reaction time minRT = np.full(n_subj, -1, dtype=float) diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index d71778c1..64337d42 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -161,8 +161,10 @@ choiceRT_preprocess_func <- function(raw_data, general_info, RTbound = 0.1) { subj <- subjs[i] subj_data <- subset(raw_data, raw_data$subjid == subj) - RTu[i, 1:Nu[i]] <- subj_data$rt[subj_data$choice == 2] # (Nu/Nl[i]+1):Nu/Nl_max will be padded with 0's - RTl[i, 1:Nl[i]] <- subj_data$rt[subj_data$choice == 1] # 0 padding is skipped in likelihood calculation + if (Nu[i] > 0) + RTu[i, 1:Nu[i]] <- subj_data$rt[subj_data$choice == 2] # (Nu/Nl[i]+1):Nu/Nl_max will be padded with 0's + if (Nl[i] > 0) + RTl[i, 1:Nl[i]] <- subj_data$rt[subj_data$choice == 1] # 0 padding is skipped in likelihood calculation } # Minimum reaction time From 9e1a4352fc73349d8aa31e8ef1e578279360c578 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 28 Aug 2019 12:44:14 +0900 Subject: [PATCH 136/163] Add descriptions & change column names --- commons/models/cgt_cm.yml | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/commons/models/cgt_cm.yml b/commons/models/cgt_cm.yml index 9185be68..f7fab580 100644 --- a/commons/models/cgt_cm.yml +++ b/commons/models/cgt_cm.yml @@ -41,13 +41,12 @@ model_type: # Also, if `model_type` is "multipleB", `block` must be included, too. data_columns: subjID: A unique identifier for each subject in the data-set. # Required -# block: A unique identifier for each of the multiple blocks within each subject. # Required for multipleB type - gamble_type: '' - percentage_staked: '' - trial_initial_points: '' - assessment_stage: '' - left_colour_chosen: '' - n_left_colour_boxes: '' + gamble_type: 'Integer value representng whether the bets on the current trial were presented in descending (0) or ascending (1) order.' + percentage_staked: 'Integer value representing the bet percentage (not proportion) selected on the current trial: 5, 25, 50, 75, or 95.' + trial_initial_points: 'Floating point value representing the number of points that the subject has at the start of the current trial (e.g., 100, 150, etc.).' + assessment_stage: 'Integer value representing whether the current trial is a practice trial (0) or a test trial (1). Only test trials are used for model fitting.' + red_chosen: 'Integer value representing whether the red color was chosen (1) versus the blue color (0).' + n_red_boxes: 'Integer value representing the number of red boxes shown on the current trial: 1, 2, 3,..., or 9.' # Model parameters. # For each parameter, it should be defined as: From 70e9511f0fe2f87fb0db679e71510fe0150807f1 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 28 Aug 2019 13:08:25 +0900 Subject: [PATCH 137/163] Update codes & preprocess_funcs & extdata for cgt_cm --- Python/hbayesdm/models/_cgt_cm.py | 22 +- Python/hbayesdm/preprocess_funcs.py | 6 +- R/R/cgt_cm.R | 16 +- R/R/preprocess_funcs.R | 6 +- commons/extdata/cgt_exampleData.txt | 744 ++++++++++++++-------------- 5 files changed, 397 insertions(+), 397 deletions(-) diff --git a/Python/hbayesdm/models/_cgt_cm.py b/Python/hbayesdm/models/_cgt_cm.py index f3700661..239b19d7 100644 --- a/Python/hbayesdm/models/_cgt_cm.py +++ b/Python/hbayesdm/models/_cgt_cm.py @@ -22,8 +22,8 @@ def __init__(self, **kwargs): 'percentage_staked', 'trial_initial_points', 'assessment_stage', - 'left_colour_chosen', - 'n_left_colour_boxes', + 'red_chosen', + 'n_red_boxes', ), parameters=OrderedDict([ ('alpha', (0, 1, 5)), @@ -90,17 +90,17 @@ def cgt_cm( and columns represent variables. For the Cambridge Gambling Task, there should be 7 columns of data - with the labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". It is not necessary for the columns to be + with the labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". It is not necessary for the columns to be in this particular order; however, it is necessary that they be labeled correctly and contain the information below: - "subjID": A unique identifier for each subject in the data-set. - - "gamble_type": - - "percentage_staked": - - "trial_initial_points": - - "assessment_stage": - - "left_colour_chosen": - - "n_left_colour_boxes": + - "gamble_type": Integer value representng whether the bets on the current trial were presented in descending (0) or ascending (1) order. + - "percentage_staked": Integer value representing the bet percentage (not proportion) selected on the current trial: 5, 25, 50, 75, or 95. + - "trial_initial_points": Floating point value representing the number of points that the subject has at the start of the current trial (e.g., 100, 150, etc.). + - "assessment_stage": Integer value representing whether the current trial is a practice trial (0) or a test trial (1). Only test trials are used for model fitting. + - "red_chosen": Integer value representing whether the red color was chosen (1) versus the blue color (0). + - "n_red_boxes": Integer value representing the number of red boxes shown on the current trial: 1, 2, 3,..., or 9. .. note:: User data may contain other columns of data (e.g. ``ReactionTime``, @@ -115,10 +115,10 @@ def cgt_cm( Whether to use the example data provided by hBayesDM. datafile Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". + Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". data Pandas DataFrame object holding the data to be modeled. - Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". + Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". niter Number of iterations, including warm-up. Defaults to 4000. nwarmup diff --git a/Python/hbayesdm/preprocess_funcs.py b/Python/hbayesdm/preprocess_funcs.py index 022a5232..a49bce10 100644 --- a/Python/hbayesdm/preprocess_funcs.py +++ b/Python/hbayesdm/preprocess_funcs.py @@ -888,10 +888,10 @@ def cgt_preprocess_func(self, raw_data, general_info, additional_args): t = t_subjs[s] _, subj_data = next(subj_group) - col_chosen[s, :t] = np.where(subj_data['leftcolourchosen'] == 1, 1, 2) + col_chosen[s, :t] = np.where(subj_data['redchosen'] == 1, 1, 2) bet_chosen[s, :t] = subj_data['bet_time'] - prop_red[s, :t] = subj_data['nleftcolourboxes'] / 10 - prop_chosen[s, :t] = np.where(subj_data['leftcolourchosen'] == 1, + prop_red[s, :t] = subj_data['nredboxes'] / 10 + prop_chosen[s, :t] = np.where(subj_data['redchosen'] == 1, prop_red[s][:t], 1 - prop_red[s][:t]) diff --git a/R/R/cgt_cm.R b/R/R/cgt_cm.R index 7b5821d0..b212f94a 100644 --- a/R/R/cgt_cm.R +++ b/R/R/cgt_cm.R @@ -7,18 +7,18 @@ #' @templateVar MODEL_CODE cm #' @templateVar MODEL_CITE #' @templateVar MODEL_TYPE Hierarchical -#' @templateVar DATA_COLUMNS "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes" +#' @templateVar DATA_COLUMNS "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes" #' @templateVar PARAMETERS \code{alpha} (probability distortion), \code{c} (color bias), \code{rho} (relative loss sensitivity), \code{beta} (discounting rate), \code{gamma} (choice sensitivity) #' @templateVar REGRESSORS "y_hat_col", "y_hat_bet", "bet_utils" #' @templateVar POSTPREDS #' @templateVar LENGTH_DATA_COLUMNS 7 #' @templateVar DETAILS_DATA_1 \item{subjID}{A unique identifier for each subject in the data-set.} -#' @templateVar DETAILS_DATA_2 \item{gamble_type}{} -#' @templateVar DETAILS_DATA_3 \item{percentage_staked}{} -#' @templateVar DETAILS_DATA_4 \item{trial_initial_points}{} -#' @templateVar DETAILS_DATA_5 \item{assessment_stage}{} -#' @templateVar DETAILS_DATA_6 \item{left_colour_chosen}{} -#' @templateVar DETAILS_DATA_7 \item{n_left_colour_boxes}{} +#' @templateVar DETAILS_DATA_2 \item{gamble_type}{Integer value representng whether the bets on the current trial were presented in descending (0) or ascending (1) order.} +#' @templateVar DETAILS_DATA_3 \item{percentage_staked}{Integer value representing the bet percentage (not proportion) selected on the current trial: 5, 25, 50, 75, or 95.} +#' @templateVar DETAILS_DATA_4 \item{trial_initial_points}{Floating point value representing the number of points that the subject has at the start of the current trial (e.g., 100, 150, etc.).} +#' @templateVar DETAILS_DATA_5 \item{assessment_stage}{Integer value representing whether the current trial is a practice trial (0) or a test trial (1). Only test trials are used for model fitting.} +#' @templateVar DETAILS_DATA_6 \item{red_chosen}{Integer value representing whether the red color was chosen (1) versus the blue color (0).} +#' @templateVar DETAILS_DATA_7 \item{n_red_boxes}{Integer value representing the number of red boxes shown on the current trial: 1, 2, 3,..., or 9.} #' @templateVar LENGTH_ADDITIONAL_ARGS 0 #' #' @template model-documentation @@ -36,7 +36,7 @@ cgt_cm <- hBayesDM_model( task_name = "cgt", model_name = "cm", model_type = "", - data_columns = c("subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes"), + data_columns = c("subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes"), parameters = list( "alpha" = c(0, 1, 5), "c" = c(0, 0.5, 1), diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index 77e62c76..a62e5e2b 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -883,10 +883,10 @@ cgt_preprocess_func <- function(raw_data, general_info) { t <- t_subjs[i] DT_subj <- raw_data[subjid == subjs[i]] - col_chosen [i, 1:t] <- ifelse(DT_subj$leftcolourchosen == 1, 1, 2) + col_chosen [i, 1:t] <- ifelse(DT_subj$redchosen == 1, 1, 2) bet_chosen [i, 1:t] <- DT_subj$bet_time - prop_red [i, 1:t] <- DT_subj$nleftcolourboxes / 10 - prop_chosen[i, 1:t] <- ifelse(DT_subj$leftcolourchosen == 1, + prop_red [i, 1:t] <- DT_subj$nredboxes / 10 + prop_chosen[i, 1:t] <- ifelse(DT_subj$redchosen == 1, prop_red[i, 1:t], 1 - prop_red[i, 1:t]) diff --git a/commons/extdata/cgt_exampleData.txt b/commons/extdata/cgt_exampleData.txt index c0db0a41..e296f8a0 100644 --- a/commons/extdata/cgt_exampleData.txt +++ b/commons/extdata/cgt_exampleData.txt @@ -1,372 +1,372 @@ -subjID stage assessment_stage includes_gamble gamble_type block completed block_initial_points block_final_points trial_initial_points trial_final_points choice_latency n_left_colour_boxes token_box left_colour_chosen response_latency stake_index percentage_staked points_staked left_won subject_won -225 2 0 1 0 1 1 100 7 100 150 5971 2 7 0 1244 3 50 50 0 1 -225 2 0 1 0 1 1 100 7 150 75 8774 3 7 1 690 3 50 75 0 0 -225 2 0 1 0 1 1 100 7 75 146 1894 2 8 0 2090 1 95 71 0 1 -225 2 0 1 0 1 1 100 7 146 7 2259 8 9 1 1725 1 95 139 0 0 -225 3 1 1 0 1 0 100 0 100 175 10025 1 3 0 1778 2 75 75 0 1 -225 3 1 1 0 1 0 100 0 175 9 1858 6 5 0 1735 1 95 166 1 0 -225 3 1 1 0 1 0 100 0 9 0 2337 2 8 1 4288 1 95 9 0 0 -225 3 1 1 0 2 0 100 0 100 5 6570 8 1 0 2007 1 95 95 1 0 -225 3 1 1 0 2 0 100 0 5 8 3629 6 2 1 438 3 50 3 1 1 -225 3 1 1 0 2 0 100 0 8 2 5166 7 9 1 526 2 75 6 0 0 -225 3 1 1 0 2 0 100 0 2 4 2930 2 10 0 2372 1 95 2 0 1 -225 3 1 1 0 2 0 100 0 4 0 2184 5 3 0 2350 1 95 4 1 0 -225 3 1 1 0 3 0 100 1 100 195 4621 7 5 1 1935 1 95 95 1 1 -225 3 1 1 0 3 0 100 1 195 10 2368 2 1 0 933 1 95 185 1 0 -225 3 1 1 0 3 0 100 1 10 20 2686 8 6 1 1948 1 95 10 1 1 -225 3 1 1 0 3 0 100 1 20 1 3652 4 7 1 1108 1 95 19 0 0 -225 3 1 1 0 4 1 100 1 100 195 2401 2 3 0 2164 1 95 95 0 1 -225 3 1 1 0 4 1 100 1 195 380 2386 7 4 1 886 1 95 185 1 1 -225 3 1 1 0 4 1 100 1 380 19 2226 3 1 0 709 1 95 361 1 0 -225 3 1 1 0 4 1 100 1 19 37 5136 1 9 0 593 1 95 18 0 1 -225 3 1 1 0 4 1 100 1 37 72 3114 8 7 1 780 1 95 35 1 1 -225 3 1 1 0 4 1 100 1 72 140 3980 9 2 1 692 1 95 68 1 1 -225 3 1 1 0 4 1 100 1 140 273 3720 5 6 0 652 1 95 133 0 1 -225 3 1 1 0 4 1 100 1 273 14 5074 6 8 1 558 1 95 259 0 0 -225 3 1 1 0 4 1 100 1 14 1 3601 4 7 1 731 1 95 13 0 0 -225 4 0 1 1 1 1 100 118 100 150 19845 8 3 1 1019 3 50 50 1 1 -225 4 0 1 1 1 1 100 118 150 158 13464 3 7 0 1260 1 5 8 0 1 -225 4 0 1 1 1 1 100 118 158 237 1989 8 2 1 1364 3 50 79 1 1 -225 4 0 1 1 1 1 100 118 237 118 2360 2 1 0 813 3 50 119 1 0 -225 5 1 1 1 1 1 100 907 100 195 2726 9 7 1 1171 5 95 95 1 1 -225 5 1 1 1 1 1 100 907 195 293 1945 4 8 0 882 3 50 98 0 1 -225 5 1 1 1 1 1 100 907 293 440 5461 8 2 1 631 3 50 147 1 1 -225 5 1 1 1 1 1 100 907 440 220 2551 2 1 0 467 3 50 220 1 0 -225 5 1 1 1 1 1 100 907 220 330 3018 7 6 1 4033 3 50 110 1 1 -225 5 1 1 1 1 1 100 907 330 413 3262 5 3 1 872 2 25 83 1 1 -225 5 1 1 1 1 1 100 907 413 620 2332 3 6 0 923 3 50 207 0 1 -225 5 1 1 1 1 1 100 907 620 465 4012 6 9 1 1155 2 25 155 0 0 -225 5 1 1 1 1 1 100 907 465 907 2113 1 4 0 1235 5 95 442 0 1 -225 5 1 1 1 2 1 100 240 100 150 2718 2 9 0 483 3 50 50 0 1 -225 5 1 1 1 2 1 100 240 150 225 3425 4 10 0 1060 3 50 75 0 1 -225 5 1 1 1 2 1 100 240 225 112 3255 3 1 0 337 3 50 113 1 0 -225 5 1 1 1 2 1 100 240 112 168 1606 8 6 1 983 3 50 56 1 1 -225 5 1 1 1 2 1 100 240 168 84 1622 5 7 1 614 3 50 84 0 0 -225 5 1 1 1 2 1 100 240 84 42 3078 6 8 1 394 3 50 42 0 0 -225 5 1 1 1 2 1 100 240 42 63 2141 7 5 1 2876 3 50 21 1 1 -225 5 1 1 1 2 1 100 240 63 123 2024 1 10 0 54 5 95 60 0 1 -225 5 1 1 1 2 1 100 240 123 240 1552 9 3 1 644 5 95 117 1 1 -225 5 1 1 1 3 1 100 115 100 150 2022 3 5 0 3324 3 50 50 0 1 -225 5 1 1 1 3 1 100 115 150 37 20755 8 9 1 2749 4 75 113 0 0 -225 5 1 1 1 3 1 100 115 37 56 5712 2 4 0 3798 3 50 19 0 1 -225 5 1 1 1 3 1 100 115 56 84 1510 6 3 1 1604 3 50 28 1 1 -225 5 1 1 1 3 1 100 115 84 126 1189 7 6 1 722 3 50 42 1 1 -225 5 1 1 1 3 1 100 115 126 31 1277 4 2 0 470 4 75 95 1 0 -225 5 1 1 1 3 1 100 115 31 47 3261 1 8 0 1392 3 50 16 0 1 -225 5 1 1 1 3 1 100 115 47 59 3699 5 7 0 1681 2 25 12 0 1 -225 5 1 1 1 3 1 100 115 59 115 1630 9 5 1 606 5 95 56 1 1 -225 5 1 1 1 4 1 100 455 100 195 1905 8 7 1 612 5 95 95 1 1 -225 5 1 1 1 4 1 100 455 195 380 1988 3 10 0 772 5 95 185 0 1 -225 5 1 1 1 4 1 100 455 380 95 11404 7 9 1 2372 4 75 285 0 0 -225 5 1 1 1 4 1 100 455 95 185 22680 9 1 1 2087 5 95 90 1 1 -225 5 1 1 1 4 1 100 455 185 324 9816 2 3 0 1836 4 75 139 0 1 -225 5 1 1 1 4 1 100 455 324 486 13753 1 8 0 1512 3 50 162 0 1 -225 5 1 1 1 4 1 100 455 486 729 1997 5 4 1 860 3 50 243 1 1 -225 5 1 1 1 4 1 100 455 729 364 1760 4 2 0 882 3 50 365 1 0 -225 5 1 1 1 4 1 100 455 364 455 2294 6 3 1 1518 2 25 91 1 1 -355 2 0 1 0 1 1 100 8 100 175 12851 2 7 0 3949 2 75 75 0 1 -355 2 0 1 0 1 1 100 8 175 87 4694 3 7 1 1709 3 50 88 0 0 -355 2 0 1 0 1 1 100 8 87 170 3177 2 8 0 1391 1 95 83 0 1 -355 2 0 1 0 1 1 100 8 170 8 3707 8 9 1 944 1 95 162 0 0 -355 3 1 1 0 1 1 100 6 100 195 2277 1 3 0 1062 1 95 95 0 1 -355 3 1 1 0 1 1 100 6 195 380 2040 6 5 1 860 1 95 185 1 1 -355 3 1 1 0 1 1 100 6 380 399 3488 2 8 0 1280 5 5 19 0 1 -355 3 1 1 0 1 1 100 6 399 299 1872 8 9 1 1189 4 25 100 0 0 -355 3 1 1 0 1 1 100 6 299 15 2650 3 4 1 1129 1 95 284 0 0 -355 3 1 1 0 1 1 100 6 15 29 2920 5 7 0 755 1 95 14 0 1 -355 3 1 1 0 1 1 100 6 29 57 1948 7 4 1 685 1 95 28 1 1 -355 3 1 1 0 1 1 100 6 57 111 2242 4 1 1 388 1 95 54 1 1 -355 3 1 1 0 1 1 100 6 111 6 3543 9 6 0 1068 1 95 105 1 0 -355 3 1 1 0 2 0 100 0 100 195 6633 8 1 1 563 1 95 95 1 1 -355 3 1 1 0 2 0 100 0 195 10 2373 6 2 0 674 1 95 185 1 0 -355 3 1 1 0 2 0 100 0 10 0 1367 7 9 1 500 1 95 10 0 0 -355 3 1 1 0 3 1 100 720 100 195 2811 7 5 1 1411 1 95 95 1 1 -355 3 1 1 0 3 1 100 720 195 146 2396 2 1 0 1032 4 25 49 1 0 -355 3 1 1 0 3 1 100 720 146 285 1252 8 6 1 701 1 95 139 1 1 -355 3 1 1 0 3 1 100 720 285 214 4233 4 7 1 478 4 25 71 0 0 -355 3 1 1 0 3 1 100 720 214 375 1236 3 10 0 1615 2 75 161 0 1 -355 3 1 1 0 3 1 100 720 375 469 2655 6 8 0 935 4 25 94 0 1 -355 3 1 1 0 3 1 100 720 469 492 3240 9 2 1 628 5 5 23 1 1 -355 3 1 1 0 3 1 100 720 492 369 3683 5 3 0 522 4 25 123 1 0 -355 3 1 1 0 3 1 100 720 369 720 2198 1 10 0 603 1 95 351 0 1 -355 3 1 1 0 4 1 100 3311 100 195 1898 2 3 0 389 1 95 95 0 1 -355 3 1 1 0 4 1 100 3311 195 380 1088 7 4 1 409 1 95 185 1 1 -355 3 1 1 0 4 1 100 3311 380 665 2059 3 1 1 1358 2 75 285 1 1 -355 3 1 1 0 4 1 100 3311 665 698 1555 1 9 0 487 5 5 33 0 1 -355 3 1 1 0 4 1 100 3311 698 873 2981 8 7 1 693 4 25 175 1 1 -355 3 1 1 0 4 1 100 3311 873 1702 5329 9 2 1 1865 1 95 829 1 1 -355 3 1 1 0 4 1 100 3311 1702 1787 4309 5 6 0 670 5 5 85 0 1 -355 3 1 1 0 4 1 100 3311 1787 1698 1460 6 8 1 572 5 5 89 0 0 -355 3 1 1 0 4 1 100 3311 1698 3311 3070 4 7 0 633 1 95 1613 0 1 -355 4 0 1 1 1 1 100 313 100 125 8175 8 3 1 2023 2 25 25 1 1 -355 4 0 1 1 1 1 100 313 125 188 5812 3 7 0 429 3 50 63 0 1 -355 4 0 1 1 1 1 100 313 188 329 1814 8 2 1 622 4 75 141 1 1 -355 4 0 1 1 1 1 100 313 329 313 2421 2 1 0 1336 1 5 16 1 0 -355 5 1 1 1 1 1 100 786 100 195 1596 9 7 1 1366 5 95 95 1 1 -355 5 1 1 1 1 1 100 786 195 341 1354 4 8 0 552 4 75 146 0 1 -355 5 1 1 1 1 1 100 786 341 358 1947 8 2 1 492 1 5 17 1 1 -355 5 1 1 1 1 1 100 786 358 340 2074 2 1 0 372 1 5 18 1 0 -355 5 1 1 1 1 1 100 786 340 357 1104 7 6 1 1104 1 5 17 1 1 -355 5 1 1 1 1 1 100 786 357 339 1689 5 3 0 566 1 5 18 1 0 -355 5 1 1 1 1 1 100 786 339 424 2617 3 6 0 794 2 25 85 0 1 -355 5 1 1 1 1 1 100 786 424 403 3297 6 9 1 1034 1 5 21 0 0 -355 5 1 1 1 1 1 100 786 403 786 1262 1 4 0 0 5 95 383 0 1 -355 5 1 1 1 2 1 100 1037 100 175 1478 2 9 0 325 4 75 75 0 1 -355 5 1 1 1 2 1 100 1037 175 131 2203 4 10 1 536 2 25 44 0 0 -355 5 1 1 1 2 1 100 1037 131 124 1116 3 1 0 628 1 5 7 1 0 -355 5 1 1 1 2 1 100 1037 124 242 1092 8 6 1 2086 5 95 118 1 1 -355 5 1 1 1 2 1 100 1037 242 230 2400 5 7 1 563 1 5 12 0 0 -355 5 1 1 1 2 1 100 1037 230 218 1301 6 8 1 637 1 5 12 0 0 -355 5 1 1 1 2 1 100 1037 218 273 4996 7 5 1 1700 2 25 55 1 1 -355 5 1 1 1 2 1 100 1037 273 532 2360 1 10 0 0 5 95 259 0 1 -355 5 1 1 1 2 1 100 1037 532 1037 1741 9 3 1 0 5 95 505 1 1 -355 5 1 1 1 3 1 100 53 100 195 3014 3 5 0 861 5 95 95 0 1 -355 5 1 1 1 3 1 100 53 195 10 889 8 9 1 0 5 95 185 0 0 -355 5 1 1 1 3 1 100 53 10 18 1572 2 4 0 879 4 75 8 0 1 -355 5 1 1 1 3 1 100 53 18 19 1490 6 3 1 585 1 5 1 1 1 -355 5 1 1 1 3 1 100 53 19 20 1155 7 6 1 1801 1 5 1 1 1 -355 5 1 1 1 3 1 100 53 20 15 1592 4 2 0 802 2 25 5 1 0 -355 5 1 1 1 3 1 100 53 15 29 1386 1 8 0 781 5 95 14 0 1 -355 5 1 1 1 3 1 100 53 29 30 1550 5 7 0 762 1 5 1 0 1 -355 5 1 1 1 3 1 100 53 30 53 2395 9 5 1 1142 4 75 23 1 1 -355 5 1 1 1 4 1 100 2448 100 195 2040 8 7 1 589 5 95 95 1 1 -355 5 1 1 1 4 1 100 2448 195 380 1310 3 10 0 558 5 95 185 0 1 -355 5 1 1 1 4 1 100 2448 380 285 1536 7 9 1 860 2 25 95 0 0 -355 5 1 1 1 4 1 100 2448 285 556 1488 9 1 1 0 5 95 271 1 1 -355 5 1 1 1 4 1 100 2448 556 1084 1831 2 3 0 0 5 95 528 0 1 -355 5 1 1 1 4 1 100 2448 1084 2114 1653 1 8 0 0 5 95 1030 0 1 -355 5 1 1 1 4 1 100 2448 2114 2220 4413 5 4 1 616 1 5 106 1 1 -355 5 1 1 1 4 1 100 2448 2220 2331 3824 4 2 1 544 1 5 111 1 1 -355 5 1 1 1 4 1 100 2448 2331 2448 1395 6 3 1 846 1 5 117 1 1 -374 2 0 1 1 1 1 100 123 100 125 8561 8 3 1 2615 2 25 25 1 1 -374 2 0 1 1 1 1 100 123 125 156 8872 3 7 0 1249 2 25 31 0 1 -374 2 0 1 1 1 1 100 123 156 164 2761 8 2 1 1284 1 5 8 1 1 -374 2 0 1 1 1 1 100 123 164 123 4948 2 1 0 462 2 25 41 1 0 -374 3 1 1 1 1 1 100 420 100 175 3855 9 7 1 973 4 75 75 1 1 -374 3 1 1 1 1 1 100 420 175 166 3387 4 8 1 959 1 5 9 0 0 -374 3 1 1 1 1 1 100 420 166 208 6727 8 2 1 1804 2 25 42 1 1 -374 3 1 1 1 1 1 100 420 208 198 2499 2 1 0 978 1 5 10 1 0 -374 3 1 1 1 1 1 100 420 198 248 2120 7 6 1 329 2 25 50 1 1 -374 3 1 1 1 1 1 100 420 248 236 2895 5 3 0 363 1 5 12 1 0 -374 3 1 1 1 1 1 100 420 236 295 2007 3 6 0 1189 2 25 59 0 1 -374 3 1 1 1 1 1 100 420 295 280 4234 6 9 1 598 1 5 15 0 0 -374 3 1 1 1 1 1 100 420 280 420 1831 1 4 0 1014 3 50 140 0 1 -374 3 1 1 1 2 1 100 396 100 150 3904 2 9 0 300 3 50 50 0 1 -374 3 1 1 1 2 1 100 396 150 142 2011 4 10 1 593 1 5 8 0 0 -374 3 1 1 1 2 1 100 396 142 135 1977 3 1 0 761 1 5 7 1 0 -374 3 1 1 1 2 1 100 396 135 169 1226 8 6 1 453 2 25 34 1 1 -374 3 1 1 1 2 1 100 396 169 177 1844 5 7 0 290 1 5 8 0 1 -374 3 1 1 1 2 1 100 396 177 168 2057 6 8 1 940 1 5 9 0 0 -374 3 1 1 1 2 1 100 396 168 176 3188 7 5 1 394 1 5 8 1 1 -374 3 1 1 1 2 1 100 396 176 264 1340 1 10 0 346 3 50 88 0 1 -374 3 1 1 1 2 1 100 396 264 396 1518 9 3 1 284 3 50 132 1 1 -374 3 1 1 1 3 1 100 306 100 95 2237 3 5 1 976 1 5 5 0 0 -374 3 1 1 1 3 1 100 306 95 47 1398 8 9 1 298 3 50 48 0 0 -374 3 1 1 1 3 1 100 306 47 59 3449 2 4 0 763 2 25 12 0 1 -374 3 1 1 1 3 1 100 306 59 89 2645 6 3 1 396 3 50 30 1 1 -374 3 1 1 1 3 1 100 306 89 111 1479 7 6 1 2767 2 25 22 1 1 -374 3 1 1 1 3 1 100 306 111 105 5385 4 2 0 325 1 5 6 1 0 -374 3 1 1 1 3 1 100 306 105 184 1419 1 8 0 485 4 75 79 0 1 -374 3 1 1 1 3 1 100 306 184 175 2340 5 7 1 641 1 5 9 0 0 -374 3 1 1 1 3 1 100 306 175 306 1408 9 5 1 979 4 75 131 1 1 -374 3 1 1 1 4 1 100 125 100 105 1930 8 7 1 292 1 5 5 1 1 -374 3 1 1 1 4 1 100 125 105 110 1187 3 10 0 320 1 5 5 0 1 -374 3 1 1 1 4 1 100 125 110 104 1766 7 9 1 567 1 5 6 0 0 -374 3 1 1 1 4 1 100 125 104 109 2042 9 1 1 918 1 5 5 1 1 -374 3 1 1 1 4 1 100 125 109 114 3464 2 3 0 314 1 5 5 0 1 -374 3 1 1 1 4 1 100 125 114 120 1247 1 8 0 378 1 5 6 0 1 -374 3 1 1 1 4 1 100 125 120 126 1629 5 4 1 397 1 5 6 1 1 -374 3 1 1 1 4 1 100 125 126 132 1383 4 2 1 364 1 5 6 1 1 -374 3 1 1 1 4 1 100 125 132 125 1384 6 3 0 460 1 5 7 1 0 -374 4 0 1 0 1 1 100 128 100 175 9354 2 7 0 297 2 75 75 0 1 -374 4 0 1 0 1 1 100 128 175 263 2857 3 7 0 1079 3 50 88 0 1 -374 4 0 1 0 1 1 100 128 263 513 1592 2 8 0 1810 1 95 250 0 1 -374 4 0 1 0 1 1 100 128 513 128 2985 8 9 1 537 2 75 385 0 0 -374 5 1 1 0 1 1 100 107 100 195 3089 1 3 0 2587 1 95 95 0 1 -374 5 1 1 0 1 1 100 107 195 244 4405 6 5 1 353 4 25 49 1 1 -374 5 1 1 0 1 1 100 107 244 305 2189 2 8 0 468 4 25 61 0 1 -374 5 1 1 0 1 1 100 107 305 152 1787 8 9 1 572 3 50 153 0 0 -374 5 1 1 0 1 1 100 107 152 228 1574 3 4 0 352 3 50 76 0 1 -374 5 1 1 0 1 1 100 107 228 57 1509 5 7 1 682 2 75 171 0 0 -374 5 1 1 0 1 1 100 107 57 111 1360 7 4 1 2780 1 95 54 1 1 -374 5 1 1 0 1 1 100 107 111 55 1469 4 1 0 686 3 50 56 1 0 -374 5 1 1 0 1 1 100 107 55 107 1875 9 6 1 2278 1 95 52 1 1 -374 5 1 1 0 2 1 100 365 100 195 3114 8 1 1 1819 1 95 95 1 1 -374 5 1 1 0 2 1 100 365 195 380 1681 6 2 1 303 1 95 185 1 1 -374 5 1 1 0 2 1 100 365 380 19 1142 7 9 1 334 1 95 361 0 0 -374 5 1 1 0 2 1 100 365 19 37 1147 2 10 0 1298 1 95 18 0 1 -374 5 1 1 0 2 1 100 365 37 65 1217 5 3 1 2622 2 75 28 1 1 -374 5 1 1 0 2 1 100 365 65 49 1126 4 2 0 262 4 25 16 1 0 -374 5 1 1 0 2 1 100 365 49 96 1990 3 5 0 333 1 95 47 0 1 -374 5 1 1 0 2 1 100 365 96 187 1075 9 4 1 285 1 95 91 1 1 -374 5 1 1 0 2 1 100 365 187 365 1277 1 7 0 2045 1 95 178 0 1 -374 5 1 1 0 3 1 100 4 100 175 1213 7 5 1 1673 2 75 75 1 1 -374 5 1 1 0 3 1 100 4 175 9 1399 2 1 0 329 1 95 166 1 0 -374 5 1 1 0 3 1 100 4 9 16 1400 8 6 1 419 2 75 7 1 1 -374 5 1 1 0 3 1 100 4 16 24 2107 4 7 0 1724 3 50 8 0 1 -374 5 1 1 0 3 1 100 4 24 42 1304 3 10 0 416 2 75 18 0 1 -374 5 1 1 0 3 1 100 4 42 2 1697 6 8 1 590 1 95 40 0 0 -374 5 1 1 0 3 1 100 4 2 4 1799 9 2 1 992 1 95 2 1 1 -374 5 1 1 0 3 1 100 4 4 2 3893 5 3 0 386 3 50 2 1 0 -374 5 1 1 0 3 1 100 4 2 4 1436 1 10 0 284 1 95 2 0 1 -374 5 1 1 0 4 1 100 2308 100 175 1552 2 3 0 4595 2 75 75 0 1 -374 5 1 1 0 4 1 100 2308 175 341 1286 7 4 1 375 1 95 166 1 1 -374 5 1 1 0 4 1 100 2308 341 170 1498 3 1 0 2041 3 50 171 1 0 -374 5 1 1 0 4 1 100 2308 170 332 1333 1 9 0 275 1 95 162 0 1 -374 5 1 1 0 4 1 100 2308 332 647 1211 8 7 1 375 1 95 315 1 1 -374 5 1 1 0 4 1 100 2308 647 1262 1443 9 2 1 331 1 95 615 1 1 -374 5 1 1 0 4 1 100 2308 1262 2461 2561 5 6 0 1197 1 95 1199 0 1 -374 5 1 1 0 4 1 100 2308 2461 1846 2113 6 8 1 914 4 25 615 0 0 -374 5 1 1 0 4 1 100 2308 1846 2308 1116 4 7 0 2509 4 25 462 0 1 -414 2 0 1 1 1 1 100 295 100 150 12353 8 3 1 59 3 50 50 1 1 -414 2 0 1 1 1 1 100 295 150 225 4118 3 7 0 2192 3 50 75 0 1 -414 2 0 1 1 1 1 100 295 225 281 2861 8 2 1 410 2 25 56 1 1 -414 2 0 1 1 1 1 100 295 281 295 3020 2 1 1 1218 1 5 14 1 1 -414 3 1 1 1 1 1 100 1047 100 195 3010 9 7 1 0 5 95 95 1 1 -414 3 1 1 1 1 1 100 1047 195 244 1448 4 8 0 1465 2 25 49 0 1 -414 3 1 1 1 1 1 100 1047 244 366 2017 8 2 1 687 3 50 122 1 1 -414 3 1 1 1 1 1 100 1047 366 384 1923 2 1 1 2109 1 5 18 1 1 -414 3 1 1 1 1 1 100 1047 384 480 4198 7 6 1 1516 2 25 96 1 1 -414 3 1 1 1 1 1 100 1047 480 504 3353 5 3 1 2599 1 5 24 1 1 -414 3 1 1 1 1 1 100 1047 504 630 1376 3 6 0 909 2 25 126 0 1 -414 3 1 1 1 1 1 100 1047 630 598 3438 6 9 1 1056 1 5 32 0 0 -414 3 1 1 1 1 1 100 1047 598 1047 1859 1 4 0 1224 4 75 449 0 1 -414 3 1 1 1 2 1 100 364 100 150 3260 2 9 0 614 3 50 50 0 1 -414 3 1 1 1 2 1 100 364 150 225 1455 4 10 0 447 3 50 75 0 1 -414 3 1 1 1 2 1 100 364 225 169 3335 3 1 0 4214 2 25 56 1 0 -414 3 1 1 1 2 1 100 364 169 254 1643 8 6 1 1234 3 50 85 1 1 -414 3 1 1 1 2 1 100 364 254 190 3525 5 7 1 2803 2 25 64 0 0 -414 3 1 1 1 2 1 100 364 190 95 1547 6 8 1 687 3 50 95 0 0 -414 3 1 1 1 2 1 100 364 95 166 1307 7 5 1 1611 4 75 71 1 1 -414 3 1 1 1 2 1 100 364 166 291 1635 1 10 0 1036 4 75 125 0 1 -414 3 1 1 1 2 1 100 364 291 364 1520 9 3 1 1637 2 25 73 1 1 -414 3 1 1 1 3 1 100 368 100 175 2813 3 5 0 1756 4 75 75 0 1 -414 3 1 1 1 3 1 100 368 175 87 1462 8 9 1 1046 3 50 88 0 0 -414 3 1 1 1 3 1 100 368 87 91 2874 2 4 0 567 1 5 4 0 1 -414 3 1 1 1 3 1 100 368 91 96 3465 6 3 1 442 1 5 5 1 1 -414 3 1 1 1 3 1 100 368 96 168 2250 7 6 1 813 4 75 72 1 1 -414 3 1 1 1 3 1 100 368 168 126 2020 4 2 0 626 2 25 42 1 0 -414 3 1 1 1 3 1 100 368 126 221 2169 1 8 0 752 4 75 95 0 1 -414 3 1 1 1 3 1 100 368 221 210 3658 5 7 1 561 1 5 11 0 0 -414 3 1 1 1 3 1 100 368 210 368 1411 9 5 1 526 4 75 158 1 1 -414 3 1 1 1 4 1 100 1927 100 175 3500 8 7 1 628 4 75 75 1 1 -414 3 1 1 1 4 1 100 1927 175 263 4243 3 10 0 1455 3 50 88 0 1 -414 3 1 1 1 4 1 100 1927 263 250 2575 7 9 1 498 1 5 13 0 0 -414 3 1 1 1 4 1 100 1927 250 488 984 9 1 1 1463 5 95 238 1 1 -414 3 1 1 1 4 1 100 1927 488 854 2190 2 3 0 1053 4 75 366 0 1 -414 3 1 1 1 4 1 100 1927 854 1665 2032 1 8 0 2566 5 95 811 0 1 -414 3 1 1 1 4 1 100 1927 1665 1748 1801 5 4 1 921 1 5 83 1 1 -414 3 1 1 1 4 1 100 1927 1748 1835 1587 4 2 1 452 1 5 87 1 1 -414 3 1 1 1 4 1 100 1927 1835 1927 1436 6 3 1 1613 1 5 92 1 1 -414 4 0 1 0 1 1 100 37 100 195 8025 2 7 0 4200 1 95 95 0 1 -414 4 0 1 0 1 1 100 37 195 380 4781 3 7 0 703 1 95 185 0 1 -414 4 0 1 0 1 1 100 37 380 741 2315 2 8 0 1939 1 95 361 0 1 -414 4 0 1 0 1 1 100 37 741 37 1195 8 9 1 336 1 95 704 0 0 -414 5 1 1 0 1 1 100 68 100 195 1844 1 3 0 394 1 95 95 0 1 -414 5 1 1 0 1 1 100 68 195 380 1980 6 5 1 1028 1 95 185 1 1 -414 5 1 1 0 1 1 100 68 380 741 1712 2 8 0 356 1 95 361 0 1 -414 5 1 1 0 1 1 100 68 741 370 2952 8 9 1 1313 3 50 371 0 0 -414 5 1 1 0 1 1 100 68 370 722 2112 3 4 0 1081 1 95 352 0 1 -414 5 1 1 0 1 1 100 68 722 361 2114 5 7 1 444 3 50 361 0 0 -414 5 1 1 0 1 1 100 68 361 704 2076 7 4 1 262 1 95 343 1 1 -414 5 1 1 0 1 1 100 68 704 35 1311 4 1 0 286 1 95 669 1 0 -414 5 1 1 0 1 1 100 68 35 68 1326 9 6 1 372 1 95 33 1 1 -414 5 1 1 0 2 0 100 0 100 195 1596 8 1 1 605 1 95 95 1 1 -414 5 1 1 0 2 0 100 0 195 380 1173 6 2 1 187 1 95 185 1 1 -414 5 1 1 0 2 0 100 0 380 19 2623 7 9 1 240 1 95 361 0 0 -414 5 1 1 0 2 0 100 0 19 37 1474 2 10 0 209 1 95 18 0 1 -414 5 1 1 0 2 0 100 0 37 9 2484 5 3 0 1220 2 75 28 1 0 -414 5 1 1 0 2 0 100 0 9 0 1167 4 2 0 245 1 95 9 1 0 -414 5 1 1 0 3 0 100 1 100 195 2273 7 5 1 283 1 95 95 1 1 -414 5 1 1 0 3 0 100 1 195 10 1203 2 1 0 247 1 95 185 1 0 -414 5 1 1 0 3 0 100 1 10 20 1312 8 6 1 1288 1 95 10 1 1 -414 5 1 1 0 3 0 100 1 20 1 2472 4 7 1 240 1 95 19 0 0 -414 5 1 1 0 4 1 100 1544 100 195 1611 2 3 0 948 1 95 95 0 1 -414 5 1 1 0 4 1 100 1544 195 380 1324 7 4 1 611 1 95 185 1 1 -414 5 1 1 0 4 1 100 1544 380 285 2131 3 1 0 644 4 25 95 1 0 -414 5 1 1 0 4 1 100 1544 285 556 1386 1 9 0 725 1 95 271 0 1 -414 5 1 1 0 4 1 100 1544 556 1084 1232 8 7 1 267 1 95 528 1 1 -414 5 1 1 0 4 1 100 1544 1084 2114 987 9 2 1 274 1 95 1030 1 1 -414 5 1 1 0 4 1 100 1544 2114 1585 2618 5 6 1 3131 4 25 529 0 0 -414 5 1 1 0 4 1 100 1544 1585 792 1675 6 8 1 841 3 50 793 0 0 -414 5 1 1 0 4 1 100 1544 792 1544 1814 4 7 0 422 1 95 752 0 1 -583 2 0 1 1 1 1 100 106 100 105 25011 8 3 1 1579 1 5 5 1 1 -583 2 0 1 1 1 1 100 106 105 52 9333 3 7 1 1317 3 50 53 0 0 -583 2 0 1 1 1 1 100 106 52 101 3376 8 2 1 1380 5 95 49 1 1 -583 2 0 1 1 1 1 100 106 101 106 4077 2 1 1 916 1 5 5 1 1 -583 3 1 1 1 1 1 100 513 100 175 2921 9 7 1 1045 4 75 75 1 1 -583 3 1 1 1 1 1 100 513 175 166 4370 4 8 1 1374 1 5 9 0 0 -583 3 1 1 1 1 1 100 513 166 208 2669 8 2 1 694 2 25 42 1 1 -583 3 1 1 1 1 1 100 513 208 198 3879 2 1 0 2492 1 5 10 1 0 -583 3 1 1 1 1 1 100 513 198 297 1784 7 6 1 662 3 50 99 1 1 -583 3 1 1 1 1 1 100 513 297 312 4839 5 3 1 1220 1 5 15 1 1 -583 3 1 1 1 1 1 100 513 312 390 1793 3 6 0 1294 2 25 78 0 1 -583 3 1 1 1 1 1 100 513 390 410 2208 6 9 0 1018 1 5 20 0 1 -583 3 1 1 1 1 1 100 513 410 513 1555 1 4 0 2619 2 25 103 0 1 -583 3 1 1 1 2 1 100 521 100 125 2475 2 9 0 678 2 25 25 0 1 -583 3 1 1 1 2 1 100 521 125 156 1770 4 10 0 1146 2 25 31 0 1 -583 3 1 1 1 2 1 100 521 156 164 1982 3 1 1 3746 1 5 8 1 1 -583 3 1 1 1 2 1 100 521 164 205 3186 8 6 1 1863 2 25 41 1 1 -583 3 1 1 1 2 1 100 521 205 195 1817 5 7 1 970 1 5 10 0 0 -583 3 1 1 1 2 1 100 521 195 185 1402 6 8 1 1085 1 5 10 0 0 -583 3 1 1 1 2 1 100 521 185 278 1762 7 5 1 1377 3 50 93 1 1 -583 3 1 1 1 2 1 100 521 278 417 1835 1 10 0 1073 3 50 139 0 1 -583 3 1 1 1 2 1 100 521 417 521 1993 9 3 1 920 2 25 104 1 1 -583 3 1 1 1 3 1 100 380 100 105 2665 3 5 0 859 1 5 5 0 1 -583 3 1 1 1 3 1 100 380 105 110 2646 8 9 0 676 1 5 5 0 1 -583 3 1 1 1 3 1 100 380 110 193 1977 2 4 0 524 4 75 83 0 1 -583 3 1 1 1 3 1 100 380 193 183 2349 6 3 0 1121 1 5 10 1 0 -583 3 1 1 1 3 1 100 380 183 229 1415 7 6 1 1234 2 25 46 1 1 -583 3 1 1 1 3 1 100 380 229 218 2015 4 2 0 2820 1 5 11 1 0 -583 3 1 1 1 3 1 100 380 218 207 2320 1 8 1 1206 1 5 11 0 0 -583 3 1 1 1 3 1 100 380 207 217 2030 5 7 0 921 1 5 10 0 1 -583 3 1 1 1 3 1 100 380 217 380 1333 9 5 1 552 4 75 163 1 1 -583 3 1 1 1 4 1 100 218 100 105 2167 8 7 1 1180 1 5 5 1 1 -583 3 1 1 1 4 1 100 218 105 131 1164 3 10 0 461 2 25 26 0 1 -583 3 1 1 1 4 1 100 218 131 98 1682 7 9 1 1116 2 25 33 0 0 -583 3 1 1 1 4 1 100 218 98 172 1597 9 1 1 1401 4 75 74 1 1 -583 3 1 1 1 4 1 100 218 172 258 1721 2 3 0 819 3 50 86 0 1 -583 3 1 1 1 4 1 100 218 258 245 2129 1 8 1 1260 1 5 13 0 0 -583 3 1 1 1 4 1 100 218 245 306 1607 5 4 1 935 2 25 61 1 1 -583 3 1 1 1 4 1 100 218 306 291 2057 4 2 0 978 1 5 15 1 0 -583 3 1 1 1 4 1 100 218 291 218 942 6 3 0 1862 2 25 73 1 0 -583 4 0 1 0 1 1 100 13 100 195 3557 2 7 0 1766 1 95 95 0 1 -583 4 0 1 0 1 1 100 13 195 341 2588 3 7 0 1427 2 75 146 0 1 -583 4 0 1 0 1 1 100 13 341 256 1990 2 8 1 1412 4 25 85 0 0 -583 4 0 1 0 1 1 100 13 256 13 1551 8 9 1 1152 1 95 243 0 0 -583 5 1 1 0 1 1 100 68 100 195 1481 1 3 0 1338 1 95 95 0 1 -583 5 1 1 0 1 1 100 68 195 380 4040 6 5 1 1342 1 95 185 1 1 -583 5 1 1 0 1 1 100 68 380 741 3894 2 8 0 763 1 95 361 0 1 -583 5 1 1 0 1 1 100 68 741 370 1266 8 9 1 1225 3 50 371 0 0 -583 5 1 1 0 1 1 100 68 370 722 2212 3 4 0 1296 1 95 352 0 1 -583 5 1 1 0 1 1 100 68 722 361 2542 5 7 1 1164 3 50 361 0 0 -583 5 1 1 0 1 1 100 68 361 704 1844 7 4 1 1065 1 95 343 1 1 -583 5 1 1 0 1 1 100 68 704 35 2384 4 1 0 2151 1 95 669 1 0 -583 5 1 1 0 1 1 100 68 35 68 1424 9 6 1 2331 1 95 33 1 1 -583 5 1 1 0 2 1 100 9635 100 195 4301 8 1 1 1735 1 95 95 1 1 -583 5 1 1 0 2 1 100 9635 195 380 1656 6 2 1 1559 1 95 185 1 1 -583 5 1 1 0 2 1 100 9635 380 475 1811 7 9 0 632 4 25 95 0 1 -583 5 1 1 0 2 1 100 9635 475 926 1953 2 10 0 1028 1 95 451 0 1 -583 5 1 1 0 2 1 100 9635 926 1158 1363 5 3 1 1306 4 25 232 1 1 -583 5 1 1 0 2 1 100 9635 1158 2027 2239 4 2 1 1314 2 75 869 1 1 -583 5 1 1 0 2 1 100 9635 2027 2534 1641 3 5 0 735 4 25 507 0 1 -583 5 1 1 0 2 1 100 9635 2534 4941 2670 9 4 1 1025 1 95 2407 1 1 -583 5 1 1 0 2 1 100 9635 4941 9635 1362 1 7 0 1095 1 95 4694 0 1 -583 5 1 1 0 3 1 100 15 100 195 2787 7 5 1 973 1 95 95 1 1 -583 5 1 1 0 3 1 100 15 195 10 1389 2 1 0 943 1 95 185 1 0 -583 5 1 1 0 3 1 100 15 10 20 1399 8 6 1 633 1 95 10 1 1 -583 5 1 1 0 3 1 100 15 20 39 1920 4 7 0 763 1 95 19 0 1 -583 5 1 1 0 3 1 100 15 39 76 1349 3 10 0 1055 1 95 37 0 1 -583 5 1 1 0 3 1 100 15 76 4 1258 6 8 1 969 1 95 72 0 0 -583 5 1 1 0 3 1 100 15 4 8 2426 9 2 1 922 1 95 4 1 1 -583 5 1 1 0 3 1 100 15 8 16 1246 5 3 1 1272 1 95 8 1 1 -583 5 1 1 0 3 1 100 15 16 15 812 1 10 1 730 5 5 1 0 0 -583 5 1 1 0 4 1 100 1045 100 195 1892 2 3 0 1436 1 95 95 0 1 -583 5 1 1 0 4 1 100 1045 195 380 1127 7 4 1 896 1 95 185 1 1 -583 5 1 1 0 4 1 100 1045 380 741 1737 3 1 1 973 1 95 361 1 1 -583 5 1 1 0 4 1 100 1045 741 1445 1469 1 9 0 962 1 95 704 0 1 -583 5 1 1 0 4 1 100 1045 1445 2818 3524 8 7 1 2442 1 95 1373 1 1 -583 5 1 1 0 4 1 100 1045 2818 5495 2290 9 2 1 1250 1 95 2677 1 1 -583 5 1 1 0 4 1 100 1045 5495 10715 1634 5 6 0 1047 1 95 5220 0 1 -583 5 1 1 0 4 1 100 1045 10715 20894 2126 6 8 0 952 1 95 10179 0 1 -583 5 1 1 0 4 1 100 1045 20894 1045 2444 4 7 1 868 1 95 19849 0 0 +subjID gamble_type percentage_staked trial_initial_points assessment_stage red_chosen n_red_boxes +225 0 50 100 0 0 2 +225 0 50 150 0 1 3 +225 0 95 75 0 0 2 +225 0 95 146 0 1 8 +225 0 75 100 1 0 1 +225 0 95 175 1 0 6 +225 0 95 9 1 1 2 +225 0 95 100 1 0 8 +225 0 50 5 1 1 6 +225 0 75 8 1 1 7 +225 0 95 2 1 0 2 +225 0 95 4 1 0 5 +225 0 95 100 1 1 7 +225 0 95 195 1 0 2 +225 0 95 10 1 1 8 +225 0 95 20 1 1 4 +225 0 95 100 1 0 2 +225 0 95 195 1 1 7 +225 0 95 380 1 0 3 +225 0 95 19 1 0 1 +225 0 95 37 1 1 8 +225 0 95 72 1 1 9 +225 0 95 140 1 0 5 +225 0 95 273 1 1 6 +225 0 95 14 1 1 4 +225 1 50 100 0 1 8 +225 1 5 150 0 0 3 +225 1 50 158 0 1 8 +225 1 50 237 0 0 2 +225 1 95 100 1 1 9 +225 1 50 195 1 0 4 +225 1 50 293 1 1 8 +225 1 50 440 1 0 2 +225 1 50 220 1 1 7 +225 1 25 330 1 1 5 +225 1 50 413 1 0 3 +225 1 25 620 1 1 6 +225 1 95 465 1 0 1 +225 1 50 100 1 0 2 +225 1 50 150 1 0 4 +225 1 50 225 1 0 3 +225 1 50 112 1 1 8 +225 1 50 168 1 1 5 +225 1 50 84 1 1 6 +225 1 50 42 1 1 7 +225 1 95 63 1 0 1 +225 1 95 123 1 1 9 +225 1 50 100 1 0 3 +225 1 75 150 1 1 8 +225 1 50 37 1 0 2 +225 1 50 56 1 1 6 +225 1 50 84 1 1 7 +225 1 75 126 1 0 4 +225 1 50 31 1 0 1 +225 1 25 47 1 0 5 +225 1 95 59 1 1 9 +225 1 95 100 1 1 8 +225 1 95 195 1 0 3 +225 1 75 380 1 1 7 +225 1 95 95 1 1 9 +225 1 75 185 1 0 2 +225 1 50 324 1 0 1 +225 1 50 486 1 1 5 +225 1 50 729 1 0 4 +225 1 25 364 1 1 6 +355 0 75 100 0 0 2 +355 0 50 175 0 1 3 +355 0 95 87 0 0 2 +355 0 95 170 0 1 8 +355 0 95 100 1 0 1 +355 0 95 195 1 1 6 +355 0 5 380 1 0 2 +355 0 25 399 1 1 8 +355 0 95 299 1 1 3 +355 0 95 15 1 0 5 +355 0 95 29 1 1 7 +355 0 95 57 1 1 4 +355 0 95 111 1 0 9 +355 0 95 100 1 1 8 +355 0 95 195 1 0 6 +355 0 95 10 1 1 7 +355 0 95 100 1 1 7 +355 0 25 195 1 0 2 +355 0 95 146 1 1 8 +355 0 25 285 1 1 4 +355 0 75 214 1 0 3 +355 0 25 375 1 0 6 +355 0 5 469 1 1 9 +355 0 25 492 1 0 5 +355 0 95 369 1 0 1 +355 0 95 100 1 0 2 +355 0 95 195 1 1 7 +355 0 75 380 1 1 3 +355 0 5 665 1 0 1 +355 0 25 698 1 1 8 +355 0 95 873 1 1 9 +355 0 5 1702 1 0 5 +355 0 5 1787 1 1 6 +355 0 95 1698 1 0 4 +355 1 25 100 0 1 8 +355 1 50 125 0 0 3 +355 1 75 188 0 1 8 +355 1 5 329 0 0 2 +355 1 95 100 1 1 9 +355 1 75 195 1 0 4 +355 1 5 341 1 1 8 +355 1 5 358 1 0 2 +355 1 5 340 1 1 7 +355 1 5 357 1 0 5 +355 1 25 339 1 0 3 +355 1 5 424 1 1 6 +355 1 95 403 1 0 1 +355 1 75 100 1 0 2 +355 1 25 175 1 1 4 +355 1 5 131 1 0 3 +355 1 95 124 1 1 8 +355 1 5 242 1 1 5 +355 1 5 230 1 1 6 +355 1 25 218 1 1 7 +355 1 95 273 1 0 1 +355 1 95 532 1 1 9 +355 1 95 100 1 0 3 +355 1 95 195 1 1 8 +355 1 75 10 1 0 2 +355 1 5 18 1 1 6 +355 1 5 19 1 1 7 +355 1 25 20 1 0 4 +355 1 95 15 1 0 1 +355 1 5 29 1 0 5 +355 1 75 30 1 1 9 +355 1 95 100 1 1 8 +355 1 95 195 1 0 3 +355 1 25 380 1 1 7 +355 1 95 285 1 1 9 +355 1 95 556 1 0 2 +355 1 95 1084 1 0 1 +355 1 5 2114 1 1 5 +355 1 5 2220 1 1 4 +355 1 5 2331 1 1 6 +374 1 25 100 0 1 8 +374 1 25 125 0 0 3 +374 1 5 156 0 1 8 +374 1 25 164 0 0 2 +374 1 75 100 1 1 9 +374 1 5 175 1 1 4 +374 1 25 166 1 1 8 +374 1 5 208 1 0 2 +374 1 25 198 1 1 7 +374 1 5 248 1 0 5 +374 1 25 236 1 0 3 +374 1 5 295 1 1 6 +374 1 50 280 1 0 1 +374 1 50 100 1 0 2 +374 1 5 150 1 1 4 +374 1 5 142 1 0 3 +374 1 25 135 1 1 8 +374 1 5 169 1 0 5 +374 1 5 177 1 1 6 +374 1 5 168 1 1 7 +374 1 50 176 1 0 1 +374 1 50 264 1 1 9 +374 1 5 100 1 1 3 +374 1 50 95 1 1 8 +374 1 25 47 1 0 2 +374 1 50 59 1 1 6 +374 1 25 89 1 1 7 +374 1 5 111 1 0 4 +374 1 75 105 1 0 1 +374 1 5 184 1 1 5 +374 1 75 175 1 1 9 +374 1 5 100 1 1 8 +374 1 5 105 1 0 3 +374 1 5 110 1 1 7 +374 1 5 104 1 1 9 +374 1 5 109 1 0 2 +374 1 5 114 1 0 1 +374 1 5 120 1 1 5 +374 1 5 126 1 1 4 +374 1 5 132 1 0 6 +374 0 75 100 0 0 2 +374 0 50 175 0 0 3 +374 0 95 263 0 0 2 +374 0 75 513 0 1 8 +374 0 95 100 1 0 1 +374 0 25 195 1 1 6 +374 0 25 244 1 0 2 +374 0 50 305 1 1 8 +374 0 50 152 1 0 3 +374 0 75 228 1 1 5 +374 0 95 57 1 1 7 +374 0 50 111 1 0 4 +374 0 95 55 1 1 9 +374 0 95 100 1 1 8 +374 0 95 195 1 1 6 +374 0 95 380 1 1 7 +374 0 95 19 1 0 2 +374 0 75 37 1 1 5 +374 0 25 65 1 0 4 +374 0 95 49 1 0 3 +374 0 95 96 1 1 9 +374 0 95 187 1 0 1 +374 0 75 100 1 1 7 +374 0 95 175 1 0 2 +374 0 75 9 1 1 8 +374 0 50 16 1 0 4 +374 0 75 24 1 0 3 +374 0 95 42 1 1 6 +374 0 95 2 1 1 9 +374 0 50 4 1 0 5 +374 0 95 2 1 0 1 +374 0 75 100 1 0 2 +374 0 95 175 1 1 7 +374 0 50 341 1 0 3 +374 0 95 170 1 0 1 +374 0 95 332 1 1 8 +374 0 95 647 1 1 9 +374 0 95 1262 1 0 5 +374 0 25 2461 1 1 6 +374 0 25 1846 1 0 4 +414 1 50 100 0 1 8 +414 1 50 150 0 0 3 +414 1 25 225 0 1 8 +414 1 5 281 0 1 2 +414 1 95 100 1 1 9 +414 1 25 195 1 0 4 +414 1 50 244 1 1 8 +414 1 5 366 1 1 2 +414 1 25 384 1 1 7 +414 1 5 480 1 1 5 +414 1 25 504 1 0 3 +414 1 5 630 1 1 6 +414 1 75 598 1 0 1 +414 1 50 100 1 0 2 +414 1 50 150 1 0 4 +414 1 25 225 1 0 3 +414 1 50 169 1 1 8 +414 1 25 254 1 1 5 +414 1 50 190 1 1 6 +414 1 75 95 1 1 7 +414 1 75 166 1 0 1 +414 1 25 291 1 1 9 +414 1 75 100 1 0 3 +414 1 50 175 1 1 8 +414 1 5 87 1 0 2 +414 1 5 91 1 1 6 +414 1 75 96 1 1 7 +414 1 25 168 1 0 4 +414 1 75 126 1 0 1 +414 1 5 221 1 1 5 +414 1 75 210 1 1 9 +414 1 75 100 1 1 8 +414 1 50 175 1 0 3 +414 1 5 263 1 1 7 +414 1 95 250 1 1 9 +414 1 75 488 1 0 2 +414 1 95 854 1 0 1 +414 1 5 1665 1 1 5 +414 1 5 1748 1 1 4 +414 1 5 1835 1 1 6 +414 0 95 100 0 0 2 +414 0 95 195 0 0 3 +414 0 95 380 0 0 2 +414 0 95 741 0 1 8 +414 0 95 100 1 0 1 +414 0 95 195 1 1 6 +414 0 95 380 1 0 2 +414 0 50 741 1 1 8 +414 0 95 370 1 0 3 +414 0 50 722 1 1 5 +414 0 95 361 1 1 7 +414 0 95 704 1 0 4 +414 0 95 35 1 1 9 +414 0 95 100 1 1 8 +414 0 95 195 1 1 6 +414 0 95 380 1 1 7 +414 0 95 19 1 0 2 +414 0 75 37 1 0 5 +414 0 95 9 1 0 4 +414 0 95 100 1 1 7 +414 0 95 195 1 0 2 +414 0 95 10 1 1 8 +414 0 95 20 1 1 4 +414 0 95 100 1 0 2 +414 0 95 195 1 1 7 +414 0 25 380 1 0 3 +414 0 95 285 1 0 1 +414 0 95 556 1 1 8 +414 0 95 1084 1 1 9 +414 0 25 2114 1 1 5 +414 0 50 1585 1 1 6 +414 0 95 792 1 0 4 +583 1 5 100 0 1 8 +583 1 50 105 0 1 3 +583 1 95 52 0 1 8 +583 1 5 101 0 1 2 +583 1 75 100 1 1 9 +583 1 5 175 1 1 4 +583 1 25 166 1 1 8 +583 1 5 208 1 0 2 +583 1 50 198 1 1 7 +583 1 5 297 1 1 5 +583 1 25 312 1 0 3 +583 1 5 390 1 0 6 +583 1 25 410 1 0 1 +583 1 25 100 1 0 2 +583 1 25 125 1 0 4 +583 1 5 156 1 1 3 +583 1 25 164 1 1 8 +583 1 5 205 1 1 5 +583 1 5 195 1 1 6 +583 1 50 185 1 1 7 +583 1 50 278 1 0 1 +583 1 25 417 1 1 9 +583 1 5 100 1 0 3 +583 1 5 105 1 0 8 +583 1 75 110 1 0 2 +583 1 5 193 1 0 6 +583 1 25 183 1 1 7 +583 1 5 229 1 0 4 +583 1 5 218 1 1 1 +583 1 5 207 1 0 5 +583 1 75 217 1 1 9 +583 1 5 100 1 1 8 +583 1 25 105 1 0 3 +583 1 25 131 1 1 7 +583 1 75 98 1 1 9 +583 1 50 172 1 0 2 +583 1 5 258 1 1 1 +583 1 25 245 1 1 5 +583 1 5 306 1 0 4 +583 1 25 291 1 0 6 +583 0 95 100 0 0 2 +583 0 75 195 0 0 3 +583 0 25 341 0 1 2 +583 0 95 256 0 1 8 +583 0 95 100 1 0 1 +583 0 95 195 1 1 6 +583 0 95 380 1 0 2 +583 0 50 741 1 1 8 +583 0 95 370 1 0 3 +583 0 50 722 1 1 5 +583 0 95 361 1 1 7 +583 0 95 704 1 0 4 +583 0 95 35 1 1 9 +583 0 95 100 1 1 8 +583 0 95 195 1 1 6 +583 0 25 380 1 0 7 +583 0 95 475 1 0 2 +583 0 25 926 1 1 5 +583 0 75 1158 1 1 4 +583 0 25 2027 1 0 3 +583 0 95 2534 1 1 9 +583 0 95 4941 1 0 1 +583 0 95 100 1 1 7 +583 0 95 195 1 0 2 +583 0 95 10 1 1 8 +583 0 95 20 1 0 4 +583 0 95 39 1 0 3 +583 0 95 76 1 1 6 +583 0 95 4 1 1 9 +583 0 95 8 1 1 5 +583 0 5 16 1 1 1 +583 0 95 100 1 0 2 +583 0 95 195 1 1 7 +583 0 95 380 1 1 3 +583 0 95 741 1 0 1 +583 0 95 1445 1 1 8 +583 0 95 2818 1 1 9 +583 0 95 5495 1 0 5 +583 0 95 10715 1 0 6 +583 0 95 20894 1 1 4 From afb342e3900c85ed2f26736cc4b04655dba4fcfd Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 28 Aug 2019 13:50:30 +0900 Subject: [PATCH 138/163] Minor fix on the R code generator --- commons/convert-to-r.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/commons/convert-to-r.py b/commons/convert-to-r.py index fa2873c8..331ae961 100644 --- a/commons/convert-to-r.py +++ b/commons/convert-to-r.py @@ -61,10 +61,12 @@ def parse_cite_string(cite): if not cite: return None + fullcite = cite.replace('\n', '') + regex_authoryear = r'(?P^.+?)\s\((?P\d+?)\)' regex_author = r'(?=\s\&)?\s?(?P[^,&]+?,\s[^,&]+?)(?=,|\n|\r|$)' - m_ay = re.search(regex_authoryear, cite) + m_ay = re.search(regex_authoryear, fullcite) year = m_ay.group('year') authors = [] @@ -83,7 +85,7 @@ def parse_cite_string(cite): 'year': year, 'shortcite': shortcite, 'barecite': barecite, - 'fullcite': cite + 'fullcite': fullcite } From 965525b79b25b5e55bce82393cba4029cd4658a4 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 28 Aug 2019 13:50:40 +0900 Subject: [PATCH 139/163] Update R code for cgt_cm --- R/R/cgt_cm.R | 1 - R/docs/reference/cgt_cm.html | 343 +++++++++++++++++++++++++++++++++++ R/man/cgt_cm.Rd | 16 +- 3 files changed, 351 insertions(+), 9 deletions(-) create mode 100644 R/docs/reference/cgt_cm.html diff --git a/R/R/cgt_cm.R b/R/R/cgt_cm.R index b212f94a..96c983f6 100644 --- a/R/R/cgt_cm.R +++ b/R/R/cgt_cm.R @@ -29,7 +29,6 @@ #' #' @references #' Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339. - #' cgt_cm <- hBayesDM_model( diff --git a/R/docs/reference/cgt_cm.html b/R/docs/reference/cgt_cm.html new file mode 100644 index 00000000..08afca05 --- /dev/null +++ b/R/docs/reference/cgt_cm.html @@ -0,0 +1,343 @@ + + + + + + + + +Cumulative Model — cgt_cm • hBayesDM + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    +
    + + + +
    + +
    +
    + + +
    + +

    Hierarchical Bayesian Modeling of the Cambridge Gambling Task using Cumulative Model. +It has the following parameters: alpha (probability distortion), c (color bias), rho (relative loss sensitivity), beta (discounting rate), gamma (choice sensitivity).

    +
      +
    • Task: Cambridge Gambling Task (Rogers et al., 1999)

    • +
    • Model: Cumulative Model

    • +
    + +
    + +
    cgt_cm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    +  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    +  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    +  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    + +

    Arguments

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    data

    Data to be modeled. It should be given as a data.frame object, +a filepath for a tab-seperated txt file, "example" to use example data, or +"choose" to choose data with an interactive window. +Columns in the dataset must include: +"subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. +Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is +high.

    inits

    Character value specifying how the initial values should be generated. +Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options +are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). +For this model they are: "y_hat_col", "y_hat_bet", "bet_utils".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults +to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file +size). Defaults to FALSE. +Not available for this model.

    adapt_delta

    Floating point value representing the target acceptance probability of a new +sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can +take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take +on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    + +

    Value

    + +

    A class "hBayesDM" object modelData with the following components:

    +
    model

    Character value that is the name of the model (\code"cgt_cm").

    +
    allIndPars

    Data.frame containing the summarized parameter values (as specified by + indPars) for each subject.

    +
    parVals

    List object containing the posterior samples over different parameters.

    +
    fit

    A class stanfit object that contains the fitted Stan + model.

    +
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by + the user.

    + + +
    modelRegressor

    List object containing the extracted model-based regressors.

    +
    + + +

    Details

    + +

    This section describes some of the function arguments in greater detail.

    +

    data should be assigned a character value specifying the full path and name (including + extension information, e.g. ".txt") of the file that contains the behavioral data-set of all + subjects of interest for the current analysis. The file should be a tab-delimited text + file, whose rows represent trial-by-trial observations and columns represent variables.
    +For the Cambridge Gambling Task, there should be 7 columns of data with the + labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". It is not necessary for the columns to be in this particular order, + however it is necessary that they be labeled correctly and contain the information below:

    +
    subjID

    A unique identifier for each subject in the data-set.

    +
    gamble_type

    Integer value representng whether the bets on the current trial were presented in descending (0) or ascending (1) order.

    +
    percentage_staked

    Integer value representing the bet percentage (not proportion) selected on the current trial: 5, 25, 50, 75, or 95.

    +
    trial_initial_points

    Floating point value representing the number of points that the subject has at the start of the current trial (e.g., 100, 150, etc.).

    +
    assessment_stage

    Integer value representing whether the current trial is a practice trial (0) or a test trial (1). Only test trials are used for model fitting.

    +
    red_chosen

    Integer value representing whether the red color was chosen (1) versus the blue color (0).

    +
    n_red_boxes

    Integer value representing the number of red boxes shown on the current trial: 1, 2, 3,..., or 9.

    +

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", + etc.), but only the data within the column names listed above will be used during the modeling. + As long as the necessary columns mentioned above are present and labeled correctly, there is no + need to remove other miscellaneous data columns.

    +

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored + upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent + to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the + sampling chains begin) can have a heavy influence on the generated posterior distributions. The + nwarmup argument can be set to a high number in order to curb the effects that initial + values have on the resulting posteriors.

    +

    nchain is a numerical value that specifies how many chains (i.e. independent sampling + sequences) should be used to draw samples from the posterior distribution. Since the posteriors + are generated from a sampling process, it is good practice to run multiple chains to ensure + that a reasonably representative posterior is attained. When the sampling is complete, it is + possible to check the multiple chains for convergence by running the following line of code: + plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    +

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, + using only every i == nthin samples to generate posterior distributions. By default, + nthin is equal to 1, meaning that every sample is used to generate the posterior.

    +

    Control Parameters: adapt_delta, stepsize, and max_treedepth are + advanced options that give the user more control over Stan's MCMC sampler. It is recommended + that only advanced users change the default values, as alterations can profoundly change the + sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in + Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for + more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC + Algorithm Parameters' of the Stan User's Guide + and Reference Manual, or to the help page for stan for a less technical + description of these arguments.

    +

    Contributors

    +Nathaniel Haines <haines.175@osu.edu> + +

    References

    + +

    Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339.

    + +

    See also

    + +

    We refer users to our in-depth tutorial for an example of using hBayesDM: + https://rpubs.com/CCSL/hBayesDM

    + + +

    Examples

    +
    # NOT RUN {
    +# Run the model with a given data.frame as df
    +output <- cgt_cm(
    +  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Run the model with example data
    +output <- cgt_cm(
    +  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    +
    +# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    +plot(output, type = "trace")
    +
    +# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    +rhat(output)
    +
    +# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    +plot(output)
    +
    +# Show the WAIC and LOOIC model fit estimates
    +printFit(output)
    +# }
    +
    + +
    + +
    + + +
    +

    Site built with pkgdown 1.3.0.

    +
    +
    +
    + + + + + + diff --git a/R/man/cgt_cm.Rd b/R/man/cgt_cm.Rd index fdd8b088..4cc1e108 100644 --- a/R/man/cgt_cm.Rd +++ b/R/man/cgt_cm.Rd @@ -14,7 +14,7 @@ cgt_cm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4, a filepath for a tab-seperated txt file, \code{"example"} to use example data, or \code{"choose"} to choose data with an interactive window. Columns in the dataset must include: -"subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". See \bold{Details} below for more information.} +"subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". See \bold{Details} below for more information.} \item{niter}{Number of iterations, including warm-up. Defaults to 4000.} @@ -88,16 +88,16 @@ This section describes some of the function arguments in greater detail. subjects of interest for the current analysis. The file should be a \strong{tab-delimited} text file, whose rows represent trial-by-trial observations and columns represent variables.\cr For the Cambridge Gambling Task, there should be 7 columns of data with the - labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "left_colour_chosen", "n_left_colour_boxes". It is not necessary for the columns to be in this particular order, + labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". It is not necessary for the columns to be in this particular order, however it is necessary that they be labeled correctly and contain the information below: \describe{ \item{subjID}{A unique identifier for each subject in the data-set.} - \item{gamble_type}{} - \item{percentage_staked}{} - \item{trial_initial_points}{} - \item{assessment_stage}{} - \item{left_colour_chosen}{} - \item{n_left_colour_boxes}{} + \item{gamble_type}{Integer value representng whether the bets on the current trial were presented in descending (0) or ascending (1) order.} + \item{percentage_staked}{Integer value representing the bet percentage (not proportion) selected on the current trial: 5, 25, 50, 75, or 95.} + \item{trial_initial_points}{Floating point value representing the number of points that the subject has at the start of the current trial (e.g., 100, 150, etc.).} + \item{assessment_stage}{Integer value representing whether the current trial is a practice trial (0) or a test trial (1). Only test trials are used for model fitting.} + \item{red_chosen}{Integer value representing whether the red color was chosen (1) versus the blue color (0).} + \item{n_red_boxes}{Integer value representing the number of red boxes shown on the current trial: 1, 2, 3,..., or 9.} } From fd0fb1bc543ab391296c399429365144238c6181 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 28 Aug 2019 14:21:08 +0900 Subject: [PATCH 140/163] Update comments for v1.0.0 --- R/cran-comments.md | 46 ++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/R/cran-comments.md b/R/cran-comments.md index 2a0942ca..179b0048 100644 --- a/R/cran-comments.md +++ b/R/cran-comments.md @@ -6,11 +6,45 @@ ## R CMD check results -There were 1 NOTE: +There were 2 NOTE: -* checking CRAN incoming feasibility ... NOTE - Maintainer: 'Woo-Young Ahn ' - - GNU make is a SystemRequirements. +- These messages occur since it uses 'data.table'. It works fine when users run it + with 'data.table' installed. +``` +* checking R code for possible problems ... NOTE +bandit2arm_preprocess_func: no visible binding for global variable + ‘subjid’ +bandit4arm2_preprocess_func: no visible binding for global variable + ‘subjid’ +bandit4arm_preprocess_func: no visible binding for global variable + ‘subjid’ +bart_preprocess_func: no visible binding for global variable ‘subjid’ +cgt_preprocess_func: no visible binding for global variable ‘subjid’ +choiceRT_single_preprocess_func: no visible binding for global variable + ‘choice’ +cra_preprocess_func: no visible binding for global variable ‘subjid’ +dbdm_preprocess_func: no visible binding for global variable ‘subjid’ +dd_preprocess_func: no visible binding for global variable ‘subjid’ +gng_preprocess_func: no visible binding for global variable ‘subjid’ +igt_preprocess_func: no visible binding for global variable ‘subjid’ +peer_preprocess_func: no visible binding for global variable ‘subjid’ +prl_multipleB_preprocess_func: no visible binding for global variable + ‘subjid’ +prl_multipleB_preprocess_func: no visible binding for global variable + ‘block’ +prl_preprocess_func: no visible binding for global variable ‘subjid’ +pst_preprocess_func: no visible binding for global variable ‘subjid’ +ra_preprocess_func: no visible binding for global variable ‘subjid’ +rdt_preprocess_func: no visible binding for global variable ‘subjid’ +ts_preprocess_func: no visible binding for global variable ‘subjid’ +ug_preprocess_func: no visible binding for global variable ‘subjid’ +wcs_preprocess_func: no visible binding for global variable ‘subjid’ +Undefined global functions or variables: + block choice subjid +``` - To compile hBayesDM using rstan, GNU make is required. +- To compile hBayesDM using rstan, GNU make is required. +``` +* checking for GNU extensions in Makefiles ... NOTE +GNU make is a SystemRequirements. +``` From c8d9037bea90d5d0b6dbb269e382291415d6509a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Wed, 28 Aug 2019 14:29:09 +0900 Subject: [PATCH 141/163] Refer to the wiki --- commons/README.md | 40 +--------------------------------------- 1 file changed, 1 insertion(+), 39 deletions(-) diff --git a/commons/README.md b/commons/README.md index 058de6aa..41fde4db 100644 --- a/commons/README.md +++ b/commons/README.md @@ -16,43 +16,5 @@ ## How to add a model -1. **Clone the repository and make new branch from `develop`.** -```bash -# Clone the repository -git clone https://github.com/CCS-Lab/hBayesDM -cd hbayesdm +Please check out the [wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of the GitHub repository. -git checkout develop # Check out the develop branch -git checkout -b feature/{branch_name} # Make new branch from develop -``` -2. **Write a Stan code and a YAML file for model information, and append its example data. -You can check out [an example YAML file](./example.yml) for model information.** - - `/commons/stan_files/{task_name}_{model_name}[_{model_type}].stan` - - `/commons/models/{task_name}_{model_name}[_{model_type}].yml` - - `/commons/extdata/{task_name}[_{model_type}]_exampleData.txt` -3. **Run `/commons/generate-codes.sh` to generate R and Python codes. Note that your Python -version should be above 3.5, and [`PyYAML`][pyyaml] should be pre-installed.** -```bash -cd commons -./generate-codes.sh -``` -4. **Implement a function to preprocess data for the model.** - - R: `/R/R/preprocess_funcs.R` - - Python: `/Python/hbayesdm/preprocess_funcs.R` -5. **(For R) Run `devtools::document()` to apply the new function.** -```bash -cd ../R -Rscript -e 'devtools::document()' -``` -6. **Install R and Python packages.** -```bash -# For R -cd ../R -Rscript -e 'devtools::install()' - -# For Python -cd ../Python -python setup.py install -``` - -[pyyaml]: https://pyyaml.org/wiki/PyYAMLDocumentation From 16ab2262430ca7cd312406482bf43981a5f2a4ab Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 29 Aug 2019 17:20:31 +0900 Subject: [PATCH 142/163] Fix LONG_DESC_TYPE as 'text/x-rst' --- Python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/setup.py b/Python/setup.py index 4c2a79bf..a7c03e0a 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -23,7 +23,7 @@ DESC = 'Python interface for hBayesDM, hierarchical Bayesian modeling of RL-DM tasks' with open('README.rst', 'r', encoding='utf-8') as f: LONG_DESC = f.read() -LONG_DESC_TYPE = 'text/restructuredtext' +LONG_DESC_TYPE = 'text/x-rst' AUTHOR = 'hBayesDM Developers' AUTHOR_EMAIL = 'hbayesdm-users@googlegroups.com' URL = 'https://github.com/CCS-Lab/hBayesDM' From 4f832eb2f78154d1f053896f7b7357f3a2774c28 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 29 Aug 2019 17:20:46 +0900 Subject: [PATCH 143/163] Avoid expressing as 'hBayesDM-py' --- Python/README.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Python/README.rst b/Python/README.rst index 442b17ff..46460c14 100644 --- a/Python/README.rst +++ b/Python/README.rst @@ -1,16 +1,16 @@ -hBayesDM-py -=========== +hBayesDM +======== This is the Python version of *hBayesDM* (hierarchical Bayesian modeling of Decision-Making tasks), a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of -decision-making tasks. *hBayesDM* uses `PyStan`_ (Python interface for +decision-making tasks. *hBayesDM* in Python uses `PyStan`_ (Python interface for `Stan`_) for Bayesian inference. .. _PyStan: https://github.com/stan-dev/pystan .. _Stan: http://mc-stan.org/ -hBayesDM-py supports Python 3.5 or higher. It requires several packages including: +It supports Python 3.5 or higher versions and requires several packages including: `NumPy`_, `SciPy`_, `Pandas`_, `PyStan`_, `Matplotlib`_, and `ArviZ`_. .. _NumPy: https://www.numpy.org/ @@ -22,7 +22,7 @@ hBayesDM-py supports Python 3.5 or higher. It requires several packages includin Installation ------------ -You can install hBayesDM-py from PyPI with the following line: +You can install hBayesDM from PyPI with the following line: .. code:: bash From 99d1d5552fe7ba1e63992238be7322d67279423a Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Thu, 29 Aug 2019 17:25:51 +0900 Subject: [PATCH 144/163] Add a documentation link --- Python/README.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Python/README.rst b/Python/README.rst index 46460c14..be5e1511 100644 --- a/Python/README.rst +++ b/Python/README.rst @@ -19,6 +19,8 @@ It supports Python 3.5 or higher versions and requires several packages includin .. _Matplotlib: https://matplotlib.org/ .. _ArviZ: https://arviz-devs.github.io/arviz/ +- **Documentation**: http://hbayesdm.readthedocs.io/ + Installation ------------ From 82b3af7e79cea1d46152e26bc57f6d87dd79d460 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 10:01:28 +0900 Subject: [PATCH 145/163] Fix a line with a wrong operator --- commons/stan_files/igt_orl.stan | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/commons/stan_files/igt_orl.stan b/commons/stan_files/igt_orl.stan index a560de27..6c94d80f 100644 --- a/commons/stan_files/igt_orl.stan +++ b/commons/stan_files/igt_orl.stan @@ -34,9 +34,9 @@ transformed parameters { vector[N] betaP; for (i in 1:N) { - Arew[i] = Phi_approx( mu_pr[1] + sigma[1] * Arew_pr[i] ); - Apun[i] = Phi_approx( mu_pr[2] + sigma[2] * Apun_pr[i] ); - K[i] = Phi_approx(mu_pr[3] + sigma[3] + K_pr[i]) * 5; + Arew[i] = Phi_approx(mu_pr[1] + sigma[1] * Arew_pr[i]); + Apun[i] = Phi_approx(mu_pr[2] + sigma[2] * Apun_pr[i]); + K[i] = Phi_approx(mu_pr[3] + sigma[3] * K_pr[i]) * 5; } betaF = mu_pr[4] + sigma[4] * betaF_pr; betaP = mu_pr[5] + sigma[5] * betaP_pr; From a60d4b5e5ffd651cab1126d3cc818f710179ade9 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 10:47:18 +0900 Subject: [PATCH 146/163] Update NEWS.md --- R/NEWS.md | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/R/NEWS.md b/R/NEWS.md index bcf080be..0df27aac 100644 --- a/R/NEWS.md +++ b/R/NEWS.md @@ -1,3 +1,31 @@ +# hBayesDM 1.0.0 + +- Major changes + - Now, hBayesDM has both R and Python version, with same models included! + You can run hBayesDM with a language you prefer! + - Models in hBayesDM are now specified as YAML files. Using the YAML files, + R and Python codes are generated automatically. If you want to contribute + hBayesDM by adding a model, what you have to do is just to write a Stan file + and to specify its information! You can find how to do in the hBayesDM wiki + (https://github.com/CCS-Lab/hBayesDM/wiki). + - Model functions try to use parameter estimates using variational Bayesian + methods as its initial values for MCMC sampling by default (#96). If VB + estimation fails, then it uses random values instead. + - The `data` argument for model functions can handle a data.frame object (#2, #98). + - `choiceRT_lba` and `choiceRT_lba_single` are temporarily removed since their codes + are not suitable to the new package structure. We plan to re-add the models + in future versions. + - The Cumulative Model for Cambridge Gambling Task is added (`cgt_cm`; #108). +- Minor changes + - The `tau` parameter in all models for the risk aversion task is modified to + be bounded to [0, 30] (#77, #78). + - `bart_4par` is fixed to compute subject-wise log-likelihood (#82). + - `extract_ic` is fixed for its wrong `rep` function usage (#94, #100). + - The drift rate (`delta` parameter) in `choiceRT_ddm` and `choiceRT_ddm_single` is + unbounded and now it is estimated between [-Inf, Inf] (#95, #107). + - Fix a preprocessing error in `choiceRT_ddm` and `choiceRT_ddm_single` (#95, #109). + - Fix `igt_orl` for a wrong Matt trick operation (#110). + # hBayesDM 0.7.2 * Add three new models for the bandit4arm task: `bandit4arm_2par_lapse`, From af2d5ecfbc4db45a3e639ce6449838f6edf57af4 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 10:50:31 +0900 Subject: [PATCH 147/163] Update R documentations --- R/docs/news/index.html | 31 +++++++++++++++++++++++++++++++ R/docs/reference/cgt_cm.html | 2 +- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/R/docs/news/index.html b/R/docs/news/index.html index feb5d59f..19980f47 100644 --- a/R/docs/news/index.html +++ b/R/docs/news/index.html @@ -103,6 +103,36 @@

    Changelog

    Source: NEWS.md +
    +

    +hBayesDM 1.0.0 Unreleased +

    +
      +
    • Major changes +
        +
      • Now, hBayesDM has both R and Python version, with same models included! You can run hBayesDM with a language you prefer!
      • +
      • Models in hBayesDM are now specified as YAML files. Using the YAML files, R and Python codes are generated automatically. If you want to contribute hBayesDM by adding a model, what you have to do is just to write a Stan file and to specify its information! You can find how to do in the hBayesDM wiki (https://github.com/CCS-Lab/hBayesDM/wiki).
      • +
      • Model functions try to use parameter estimates using variational Bayesian methods as its initial values for MCMC sampling by default (#96). If VB estimation fails, then it uses random values instead.
      • +
      • The data argument for model functions can handle a data.frame object (#2, #98).
      • +
      • +choiceRT_lba and choiceRT_lba_single are temporarily removed since their codes are not suitable to the new package structure. We plan to re-add the models in future versions.
      • +
      • The Cumulative Model for Cambridge Gambling Task is added (cgt_cm; #108).
      • +
      +
    • +
    • Minor changes +
        +
      • The tau parameter in all models for the risk aversion task is modified to be bounded to [0, 30] (#77, #78).
      • +
      • +bart_4par is fixed to compute subject-wise log-likelihood (#82).
      • +
      • +extract_ic is fixed for its wrong rep function usage (#94, #100).
      • +
      • The drift rate (delta parameter) in choiceRT_ddm and choiceRT_ddm_single is unbounded and now it is estimated between [-Inf, Inf] (#95, #107).
      • +
      • Fix a preprocessing error in choiceRT_ddm and choiceRT_ddm_single (#95, #109).
      • +
      • Fix igt_orl for a wrong Matt trick operation (#110).
      • +
      +
    • +
    +

    hBayesDM 0.7.2 2019-02-12 @@ -307,6 +337,7 @@

    Contents

    From d95ee8e0d645f0c6e12b15e5c50f0f2ab7e837eb Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 10:53:49 +0900 Subject: [PATCH 148/163] Increment patch version to upload it to PyPI --- Python/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Python/setup.py b/Python/setup.py index a7c03e0a..3a7960a1 100644 --- a/Python/setup.py +++ b/Python/setup.py @@ -14,7 +14,7 @@ MAJOR = 1 MINOR = 0 -MICRO = 0 +MICRO = 1 ISRELEASED = True VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) VERSION += '' if ISRELEASED else '.9000' From c0d29b839f4ec24d7ada6ce51c169f542adfd1fc Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 11:57:23 +0900 Subject: [PATCH 149/163] Fix VB initial values to work --- R/R/hBayesDM_model.R | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/R/R/hBayesDM_model.R b/R/R/hBayesDM_model.R index 669ed66e..bef5c1f3 100644 --- a/R/R/hBayesDM_model.R +++ b/R/R/hBayesDM_model.R @@ -386,29 +386,33 @@ hBayesDM_model <- function(task_name, cat("** Use VB estimates as initial values **\n") cat("****************************************\n") - tryCatch({ + make_gen_init_from_vb <- function() { fit_vb <- rstan::vb(object = stanmodel_arg, data = data_list) m_vb <- colMeans(as.data.frame(fit_vb)) - gen_init <<- function() { + function() { ret <- list( mu_pr = as.vector(m_vb[startsWith(names(m_vb), "mu_pr")]), sigma = as.vector(m_vb[startsWith(names(m_vb), "sigma")]) ) for (p in names(parameters)) { - ret[[p]] <- as.vector(m_vb[startsWith(names(m_vb), paste0(p, "_pr"))]) + ret[[paste0(p, "_pr")]] <- + as.vector(m_vb[startsWith(names(m_vb), paste0(p, "_pr"))]) } return(ret) } - }, error = function(e) { + } + + gen_init <- tryCatch(make_gen_init_from_vb(), error = function(e) { cat("\n") cat("******************************************\n") cat("** Failed to obtain VB estimates. **\n") cat("** Use random values as initial values. **\n") cat("******************************************\n") - gen_init <<- "random" + + return("random") }) } } else if (inits[1] == "random") { @@ -426,6 +430,7 @@ hBayesDM_model <- function(task_name, "(= the number of parameters of this model). ", "Please check again. **\n") } + if (model_type == "single") { gen_init <- function() { individual_level <- as.list(inits) From 956452cee243fe132d1eef08b20718f401db9792 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:03:35 +0900 Subject: [PATCH 150/163] Add Getting Started as a vignette --- R/.Rbuildignore | 1 + R/_pkgdown.yml | 9 +- R/vignettes/bibtex/hBayesDM_bib.bib | 32844 ++++++++++++++++ R/vignettes/bibtex/hBayesDM_bib_short.bib | 567 + R/vignettes/csl/apa-short-authors.csl | 623 + R/vignettes/csl/apa_modified.csl | 495 + R/vignettes/csl/apa_modified_orig.csl | 495 + R/vignettes/csl/biomed-central.csl | 1314 + R/vignettes/css/APAStyle.css | 122 + R/vignettes/css/Rpubs.css | 11 + R/vignettes/getting_started.Rmd | 488 + R/vignettes/images/Figure3.pdf | Bin 0 -> 28433 bytes R/vignettes/images/HBA_concept.png | Bin 0 -> 25837 bytes R/vignettes/images/PPC.png | Bin 0 -> 1923549 bytes R/vignettes/images/hBayesDM_pipeLine.png | Bin 0 -> 161915 bytes R/vignettes/images/list_of_tasks_models.png | Bin 0 -> 167209 bytes R/vignettes/images/old/hBayesDM_pipeLine.png | Bin 0 -> 168786 bytes .../images/old/hBayesDM_pipeLine_v2.png | Bin 0 -> 185830 bytes .../images/old/hBayesDM_pipeLine_v3.png | Bin 0 -> 190507 bytes .../images/old/hBayesDM_pipeLine_v4.png | Bin 0 -> 187364 bytes 20 files changed, 36968 insertions(+), 1 deletion(-) create mode 100644 R/vignettes/bibtex/hBayesDM_bib.bib create mode 100644 R/vignettes/bibtex/hBayesDM_bib_short.bib create mode 100644 R/vignettes/csl/apa-short-authors.csl create mode 100644 R/vignettes/csl/apa_modified.csl create mode 100644 R/vignettes/csl/apa_modified_orig.csl create mode 100644 R/vignettes/csl/biomed-central.csl create mode 100644 R/vignettes/css/APAStyle.css create mode 100644 R/vignettes/css/Rpubs.css create mode 100644 R/vignettes/getting_started.Rmd create mode 100644 R/vignettes/images/Figure3.pdf create mode 100644 R/vignettes/images/HBA_concept.png create mode 100644 R/vignettes/images/PPC.png create mode 100644 R/vignettes/images/hBayesDM_pipeLine.png create mode 100644 R/vignettes/images/list_of_tasks_models.png create mode 100644 R/vignettes/images/old/hBayesDM_pipeLine.png create mode 100644 R/vignettes/images/old/hBayesDM_pipeLine_v2.png create mode 100644 R/vignettes/images/old/hBayesDM_pipeLine_v3.png create mode 100644 R/vignettes/images/old/hBayesDM_pipeLine_v4.png diff --git a/R/.Rbuildignore b/R/.Rbuildignore index 50a51ba3..06470931 100644 --- a/R/.Rbuildignore +++ b/R/.Rbuildignore @@ -18,3 +18,4 @@ ^_pkgdown\.yml$ ^codecov\.yml$ ^cran-comments\.md$ +^vignettes/ diff --git a/R/_pkgdown.yml b/R/_pkgdown.yml index 3e7016f3..dcef2390 100644 --- a/R/_pkgdown.yml +++ b/R/_pkgdown.yml @@ -17,7 +17,7 @@ reference: - starts_with("ts_") - starts_with("ug_") - starts_with("wcs_") -- title: Functions +- title: Diagnostics contents: - estimate_mode - extract_ic @@ -28,3 +28,10 @@ reference: - plotInd - printFit - rhat + +articles: +- title: Getting started + contents: getting_started + +toc: + depth: 1 diff --git a/R/vignettes/bibtex/hBayesDM_bib.bib b/R/vignettes/bibtex/hBayesDM_bib.bib new file mode 100644 index 00000000..0f7ca324 --- /dev/null +++ b/R/vignettes/bibtex/hBayesDM_bib.bib @@ -0,0 +1,32844 @@ +%% This BibTeX bibliography file was created using BibDesk. +%% http://bibdesk.sourceforge.net/ + + +%% Created for Woo-Young Ahn at 2016-07-16 23:12:03 -0700 + + +%% Saved with string encoding Unicode (UTF-8) + + +@string{jgr = {J.~Geophys.~Res.}} + + +@article{den2013dissociable, + Author = {den Ouden, Hanneke EM and Daw, Nathaniel D and Fernandez, Guill{\'e}n and Elshout, Joris A and Rijpkema, Mark and Hoogman, Martine and Franke, Barbara and Cools, Roshan}, + Date-Added = {2016-07-17 06:12:00 +0000}, + Date-Modified = {2016-07-17 06:12:00 +0000}, + Journal = {Neuron}, + Number = {4}, + Pages = {1090--1100}, + Publisher = {Elsevier}, + Title = {Dissociable effects of dopamine and serotonin on reversal learning}, + Volume = {80}, + Year = {2013}} + +@article{newman1986passive, + Author = {Newman, Joseph P. and Kosson, David S.}, + Date-Added = {2016-07-17 04:39:36 +0000}, + Date-Modified = {2016-07-17 05:16:15 +0000}, + Journal = {Journal of abnormal psychology}, + Number = {3}, + Pages = {252}, + Publisher = {American Psychological Association}, + Title = {Passive avoidance learning in psychopathic and nonpsychopathic offenders.}, + Volume = {95}, + Year = {1986}} + +@article{gu2015necessary, + Author = {Gu, Xiaosi and Wang, Xingchao and Hula, Andreas and Wang, Shiwei and Xu, Shuai and Lohrenz, Terry M and Knight, Robert T and Gao, Zhixian and Dayan, Peter and Montague, P Read}, + Date-Added = {2016-07-17 04:19:26 +0000}, + Date-Modified = {2016-07-17 04:19:26 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {2}, + Pages = {467--473}, + Publisher = {Soc Neuroscience}, + Title = {Necessary, yet dissociable contributions of the insular and ventromedial prefrontal cortices to norm adaptation: computational and lesion evidence in humans}, + Volume = {35}, + Year = {2015}} + +@article{o2007model, + Author = {{O'Doherty}, John P. and Hampton, Alan and Kim, Hackjin}, + Date-Added = {2016-04-20 19:01:45 +0000}, + Date-Modified = {2016-04-20 19:02:04 +0000}, + Journal = {Annals of the New York Academy of sciences}, + Number = {1}, + Pages = {35--53}, + Publisher = {Wiley Online Library}, + Title = {Model-based fMRI and its application to reward learning and decision making}, + Volume = {1104}, + Year = {2007}} + +@book{lee2014bayesian, + Author = {Lee, Michael D. and Wagenmakers, Eric-Jan}, + Date-Added = {2016-03-24 02:31:25 +0000}, + Date-Modified = {2016-03-24 02:35:02 +0000}, + Publisher = {Cambridge University Press}, + Title = {Bayesian cognitive modeling: A practical course}, + Year = {2014}} + +@article{steingroever2014absolute, + Author = {Steingroever, Helen and Wetzels, Ruud and Wagenmakers, Eric-Jan}, + Date-Added = {2016-03-23 14:22:04 +0000}, + Date-Modified = {2016-03-23 14:22:04 +0000}, + Journal = {Decision}, + Number = {3}, + Pages = {161}, + Publisher = {Educational Publishing Foundation}, + Title = {Absolute performance of reinforcement-learning models for the Iowa Gambling Task.}, + Volume = {1}, + Year = {2014}} + +@book{kruschke2014doing, + Author = {Kruschke, John}, + Date-Added = {2016-03-23 14:21:35 +0000}, + Date-Modified = {2016-03-24 05:24:34 +0000}, + Publisher = {Academic Press}, + Title = {Doing Bayesian data analysis: A tutorial with {R}, {JAGS}, and {S}tan}, + Year = {2014}} + +@article{daw2011trial, + Author = {Daw, Nathaniel D}, + Date-Added = {2016-03-23 13:47:17 +0000}, + Date-Modified = {2016-03-23 13:47:17 +0000}, + Journal = {Decision making, affect, and learning: Attention and performance XXIII}, + Pages = {3--38}, + Publisher = {Oxford University Press Oxford}, + Title = {Trial-by-trial data analysis using computational models}, + Volume = {23}, + Year = {2011}} + +@article{vehtari2015e, + Author = {Vehtari, Aki and Gelman, Andrew and Gabry, Jonah}, + Date-Added = {2016-03-21 01:44:45 +0000}, + Date-Modified = {2016-03-21 01:46:42 +0000}, + Journal = {arXiv preprint arXiv:1507.04544}, + Title = {Efficient implementation of leave-one-out cross-validation and {WAIC} for evaluating fitted {B}ayesian models}, + Year = {2015}} + +@article{xiang2013, + Author = {Xiang, Ting and Lohrenz, Terry and Montague, P Read}, + Date-Added = {2016-03-20 23:14:07 +0000}, + Date-Modified = {2016-03-20 23:14:12 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {3}, + Pages = {1099--1108}, + Publisher = {Soc Neuroscience}, + Title = {Computational substrates of norms and their violations during social exchange}, + Volume = {33}, + Year = {2013}} + +@article{daw2011, + Author = {Daw, Nathaniel D and Gershman, Samuel J and Seymour, Ben and Dayan, Peter and Dolan, Raymond J}, + Date-Added = {2016-03-20 23:12:58 +0000}, + Date-Modified = {2016-03-20 23:13:03 +0000}, + Journal = {Neuron}, + Number = {6}, + Pages = {1204--1215}, + Publisher = {Elsevier}, + Title = {Model-based influences on humans' choices and striatal prediction errors}, + Volume = {69}, + Year = {2011}} + +@article{erev2010choice, + Author = {Erev, Ido and Ert, Eyal and Roth, Alvin E and Haruvy, Ernan and Herzog, Stefan M and Hau, Robin and Hertwig, Ralph and Stewart, Terrence and West, Robert and Lebiere, Christian}, + Date-Added = {2016-03-20 22:26:03 +0000}, + Date-Modified = {2016-03-20 22:26:03 +0000}, + Journal = {Journal of Behavioral Decision Making}, + Number = {1}, + Pages = {15--47}, + Publisher = {Wiley Online Library}, + Title = {A choice prediction competition: Choices from experience and from description}, + Volume = {23}, + Year = {2010}} + +@article{sokol2009, + Author = {Sokol-Hessner, Peter and Hsu, Ming and Curley, Nina G and Delgado, Mauricio R and Camerer, Colin F and Phelps, Elizabeth A}, + Date-Added = {2016-03-20 22:23:28 +0000}, + Date-Modified = {2016-03-20 22:23:43 +0000}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {13}, + Pages = {5035--5040}, + Publisher = {National Acad Sciences}, + Title = {Thinking like a trader selectively reduces individuals' loss aversion}, + Volume = {106}, + Year = {2009}} + +@article{glascher2009, + Author = {Gl{\"a}scher, Jan and Hampton, Alan N and O'Doherty, John P}, + Date-Added = {2016-03-20 22:22:37 +0000}, + Date-Modified = {2016-03-20 22:23:03 +0000}, + Journal = {Cerebral cortex}, + Number = {2}, + Pages = {483--495}, + Publisher = {Oxford Univ Press}, + Title = {Determining a role for ventromedial prefrontal cortex in encoding action-based value signals during reward-related decision making}, + Volume = {19}, + Year = {2009}} + +@article{cavanagh2013jn, + Author = {Cavanagh, James F and Eisenberg, Ian and Guitart-Masip, Marc and Huys, Quentin J. M. and Frank, Michael J}, + Date-Added = {2016-03-20 22:21:33 +0000}, + Date-Modified = {2016-03-23 13:36:07 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {19}, + Pages = {8541--8548}, + Publisher = {Soc Neuroscience}, + Title = {Frontal theta overrides pavlovian learning biases}, + Volume = {33}, + Year = {2013}} + +@article{guitart2012go, + Author = {Guitart-Masip, Marc and Huys, Quentin J. M. and Fuentemilla, Lluis and Dayan, Peter and Duzel, Emrah and Dolan, Raymond J}, + Date-Added = {2016-03-20 22:20:51 +0000}, + Date-Modified = {2016-03-23 13:35:42 +0000}, + Journal = {Neuroimage}, + Number = {1}, + Pages = {154--166}, + Publisher = {Elsevier}, + Title = {Go and no-go learning in reward and punishment: interactions between affect and effect}, + Volume = {62}, + Year = {2012}} + +@article{worthy2013, + Author = {Worthy, Darrell A and Pang, Bo and Byrne, Kaileigh A}, + Date-Added = {2016-03-20 22:19:45 +0000}, + Date-Modified = {2016-03-20 22:19:52 +0000}, + Journal = {Frontiers in psychology}, + Pages = {640}, + Publisher = {Frontiers}, + Title = {Decomposing the roles of perseveration and expected value representation in models of the Iowa gambling task}, + Volume = {4}, + Year = {2013}} + +@article{ebert2007, + Author = {Ebert, Jane EJ and Prelec, Drazen}, + Date-Added = {2016-03-20 22:18:56 +0000}, + Date-Modified = {2016-03-20 22:19:06 +0000}, + Journal = {Management science}, + Number = {9}, + Pages = {1423--1438}, + Publisher = {INFORMS}, + Title = {The fragility of time: Time-insensitivity and valuation of the near and far future}, + Volume = {53}, + Year = {2007}} + +@article{ahn2008cogsci, + Author = {Ahn, Woo-Young and Busemeyer, Jerome R. and Wagenmakers, Eric-Jan and Stout, Julie C}, + Date-Added = {2016-03-20 22:18:07 +0000}, + Date-Modified = {2016-03-24 02:37:40 +0000}, + Journal = {Cognitive Science}, + Number = {8}, + Pages = {1376--1402}, + Publisher = {Wiley Online Library}, + Title = {Comparison of decision learning models using the generalization criterion method}, + Volume = {32}, + Year = {2008}} + +@article{ahn2014decision, + Author = {Ahn, Woo-Young and Vasilev, Georgi and Lee, Sung-Ha and Busemeyer, Jerome R. and Kruschke, John K and Bechara, Antoine and Vassileva, Jasmin}, + Date-Added = {2016-03-20 22:17:36 +0000}, + Date-Modified = {2016-03-23 13:56:53 +0000}, + Journal = {Frontiers in psychology}, + Pages = {849}, + Publisher = {Citeseer}, + Title = {Decision-making in stimulant and opiate addicts in protracted abstinence: Evidence from computational modeling with pure users}, + Volume = {5}, + Year = {2014}} + +@article{samuelson1937, + Author = {Samuelson, Paul A}, + Date-Added = {2016-03-20 21:16:17 +0000}, + Date-Modified = {2016-03-20 21:17:16 +0000}, + Journal = {The Review of Economic Studies}, + Number = {2}, + Pages = {155--161}, + Publisher = {JSTOR}, + Title = {A note on measurement of utility}, + Volume = {4}, + Year = {1937}} + +@article{crockett2009reconciling, + Author = {Crockett, Molly J and Clark, Luke and Robbins, Trevor W}, + Date-Added = {2013-06-17 09:15:40 +0000}, + Date-Modified = {2013-06-17 09:15:40 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {38}, + Pages = {11993--11999}, + Publisher = {Soc Neuroscience}, + Title = {Reconciling the role of serotonin in behavioral inhibition and aversion: acute tryptophan depletion abolishes punishment-induced inhibition in humans}, + Volume = {29}, + Year = {2009}} + +@article{glascher2010states, + Author = {Gl{\"a}scher, Jan and Daw, Nathaniel and Dayan, Peter and O'Doherty, John P}, + Date-Added = {2013-06-17 06:40:08 +0000}, + Date-Modified = {2013-06-17 06:40:08 +0000}, + Journal = {Neuron}, + Number = {4}, + Pages = {585--595}, + Publisher = {Elsevier}, + Title = {States versus rewards: dissociable neural prediction error signals underlying model-based and model-free reinforcement learning}, + Volume = {66}, + Year = {2010}} + +@article{o2011contributions, + Author = {O'Doherty, John P}, + Date-Added = {2013-06-17 05:23:27 +0000}, + Date-Modified = {2013-06-17 05:23:27 +0000}, + Journal = {Annals of the New York Academy of Sciences}, + Number = {1}, + Pages = {118--129}, + Publisher = {Wiley Online Library}, + Title = {Contributions of the ventromedial prefrontal cortex to goal-directed action selection}, + Volume = {1239}, + Year = {2011}} + +@article{huys2011disentangling, + Author = {Huys, Quentin J. M. and Cools, Roshan and G{\"o}lzer, Martin and Friedel, Eva and Heinz, Andreas and Dolan, Raymond J and Dayan, Peter}, + Date-Added = {2013-06-17 04:43:59 +0000}, + Date-Modified = {2016-03-23 13:35:29 +0000}, + Journal = {PLoS computational biology}, + Number = {4}, + Pages = {e1002028}, + Publisher = {Public Library of Science}, + Title = {Disentangling the roles of approach, activation and valence in instrumental and pavlovian responding}, + Volume = {7}, + Year = {2011}} + +@article{hershberger1986approach, + Author = {Hershberger, Wayne A}, + Date-Added = {2013-06-17 03:26:15 +0000}, + Date-Modified = {2013-06-17 03:26:15 +0000}, + Journal = {Animal Learning \& Behavior}, + Number = {4}, + Pages = {443--451}, + Publisher = {Springer}, + Title = {An approach through the looking-glass}, + Volume = {14}, + Year = {1986}} + +@article{dickinson2002role, + Author = {Dickinson, Anthony and Balleine, Bernard}, + Date-Added = {2013-06-17 03:23:42 +0000}, + Date-Modified = {2013-06-17 03:23:42 +0000}, + Journal = {Stevens' handbook of experimental psychology}, + Publisher = {Wiley Online Library}, + Title = {The role of learning in the operation of motivational systems}, + Year = {2002}} + +@article{o1998conditioning, + Author = {O'Brien, Charles P and Childress, Anna Rose and Ehrman, Ronald and Robbins, Steven J}, + Date-Added = {2013-06-17 03:22:02 +0000}, + Date-Modified = {2013-06-17 03:22:02 +0000}, + Journal = {Journal of Psychopharmacology}, + Number = {1}, + Pages = {15--22}, + Publisher = {Sage Publications}, + Title = {Conditioning factors in drug abuse: can they explain compulsion?}, + Volume = {12}, + Year = {1998}} + +@article{hare2009self, + Author = {Hare, Todd A and Camerer, Colin F and Rangel, Antonio}, + Date-Added = {2013-06-17 01:33:55 +0000}, + Date-Modified = {2013-06-17 01:33:55 +0000}, + Journal = {Science}, + Number = {5927}, + Pages = {646--648}, + Publisher = {American Association for the Advancement of Science}, + Title = {Self-control in decision-making involves modulation of the vmPFC valuation system}, + Volume = {324}, + Year = {2009}} + +@techreport{fraley2012mclust, + Author = {Fraley, Chris and Raftery, Adrian E and Murphy, T Brendan and Scrucca, Luca}, + Date-Added = {2013-06-16 04:33:09 +0000}, + Date-Modified = {2013-06-16 04:33:09 +0000}, + Institution = {Technical Report}, + Title = {mclust Version 4 for R: Normal Mixture Modeling for Model-Based Clustering, Classification, and Density Estimation}, + Year = {2012}} + +@misc{stan-software:2013, + Author = {{Stan Development Team}}, + Date-Added = {2013-06-15 17:59:10 +0000}, + Date-Modified = {2013-06-15 17:59:10 +0000}, + Title = {Stan: A C++ Library for Probability and Sampling, Version 1.3}, + Url = {http://mc-stan.org/}, + Year = 2013, + Bdsk-Url-1 = {http://mc-stan.org/}} + +@article{gelman1992inference, + Author = {Gelman, Andrew and Rubin, Donald B}, + Date-Added = {2013-06-15 17:07:30 +0000}, + Date-Modified = {2013-06-15 17:07:30 +0000}, + Journal = {Statistical science}, + Pages = {457--472}, + Publisher = {JSTOR}, + Title = {Inference from iterative simulation using multiple sequences}, + Year = {1992}} + +@article{mihatsch2002risk, + Author = {Mihatsch, O. and Neuneier, R.}, + Date-Added = {2013-02-06 20:52:58 +0000}, + Date-Modified = {2013-02-06 20:52:58 +0000}, + Journal = {Machine Learning}, + Number = {2}, + Pages = {267--290}, + Publisher = {Springer}, + Title = {Risk-sensitive reinforcement learning}, + Volume = {49}, + Year = {2002}} + +@article{d2009neural, + Author = {d'Acremont, M. and Lu, Z.L. and Li, X. and Van der Linden, M. and Bechara, A.}, + Date-Added = {2013-02-05 19:18:30 +0000}, + Date-Modified = {2013-02-05 19:18:30 +0000}, + Journal = {Neuroimage}, + Number = {4}, + Pages = {1929--1939}, + Publisher = {Elsevier}, + Title = {Neural correlates of risk prediction error during reinforcement learning in humans}, + Volume = {47}, + Year = {2009}} + +@article{niv2012neural, + Author = {Niv, Y. and Edlund, J.A. and Dayan, P. and O'Doherty, J.P.}, + Date-Added = {2013-02-05 18:59:43 +0000}, + Date-Modified = {2013-02-05 18:59:43 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {2}, + Pages = {551--562}, + Publisher = {Soc Neuroscience}, + Title = {Neural prediction errors reveal a risk-sensitive reinforcement-learning process in the human brain}, + Volume = {32}, + Year = {2012}} + +@article{herrnstein1961relative, + Author = {Herrnstein, R.J.}, + Date-Added = {2012-09-25 13:34:44 +0000}, + Date-Modified = {2012-09-25 13:34:44 +0000}, + Journal = {Journal of the experimental analysis of behavior}, + Number = {3}, + Pages = {267}, + Publisher = {Society for the Experimental Analysis of Behavior}, + Title = {Relative and absolute strength of response as a function of frequency of reinforcement}, + Volume = {4}, + Year = {1961}} + +@article{loomes1982regret, + Author = {Loomes, G. and Sugden, R.}, + Date-Added = {2012-09-25 13:08:37 +0000}, + Date-Modified = {2012-09-25 13:08:37 +0000}, + Journal = {The Economic Journal}, + Number = {368}, + Pages = {805--824}, + Publisher = {JSTOR}, + Title = {Regret theory: An alternative theory of rational choice under uncertainty}, + Volume = {92}, + Year = {1982}} + +@article{bell1982regret, + Author = {Bell, D.E.}, + Date-Added = {2012-09-25 13:07:55 +0000}, + Date-Modified = {2012-09-25 13:07:55 +0000}, + Journal = {Operations research}, + Number = {5}, + Pages = {961--981}, + Publisher = {INFORMS}, + Title = {Regret in decision making under uncertainty}, + Volume = {30}, + Year = {1982}} + +@article{coricelli2007brain, + Author = {Coricelli, G. and Dolan, R.J. and Sirigu, A. and others}, + Date-Added = {2012-09-25 04:57:13 +0000}, + Date-Modified = {2012-09-25 04:57:13 +0000}, + Journal = {Trends in cognitive sciences}, + Number = {6}, + Pages = {258--265}, + Publisher = {Elsevier Science}, + Title = {Brain, emotion and decision making: the paradigmatic example of regret}, + Volume = {11}, + Year = {2007}} + +@article{pessoa2008relationship, + Author = {Pessoa, L.}, + Date-Added = {2012-09-25 04:34:45 +0000}, + Date-Modified = {2012-09-25 04:34:45 +0000}, + Journal = {Nature Reviews Neuroscience}, + Number = {2}, + Pages = {148--158}, + Publisher = {Nature Publishing Group}, + Title = {On the relationship between emotion and cognition}, + Volume = {9}, + Year = {2008}} + +@article{Weber2009mindful, + Author = {Weber, E. U. and Johnson, E. J.}, + Date-Added = {2012-09-20 12:31:57 +0000}, + Date-Modified = {2012-09-20 12:32:07 +0000}, + Journal = {Annual review of psychology}, + Pages = {53--85}, + Publisher = {Annual Reviews}, + Title = {Mindful judgment and decision making}, + Volume = {60}, + Year = {2009}} + +@article{Lee2011hba, + Author = {Lee, Michael D.}, + Date-Added = {2012-09-20 12:29:03 +0000}, + Date-Modified = {2016-03-24 02:35:12 +0000}, + Journal = {Journal of Mathematical Psychology}, + Number = {1}, + Pages = {1--7}, + Publisher = {Elsevier}, + Title = {How cognitive modeling can benefit from hierarchical Bayesian models}, + Volume = {55}, + Year = {2011}} + +@article{mellers1997decision, + Author = {Mellers, B.A. and Schwartz, A. and Ho, K. and Ritov, I.}, + Date-Added = {2012-09-20 12:27:58 +0000}, + Date-Modified = {2012-09-20 12:27:58 +0000}, + Journal = {Psychological Science}, + Pages = {423--429}, + Publisher = {JSTOR}, + Title = {Decision affect theory: Emotional reactions to the outcomes of risky options}, + Year = {1997}} + +@article{yechiam2012effect, + Author = {Yechiam, E. and Rakow, T.}, + Date-Added = {2012-09-20 12:26:53 +0000}, + Date-Modified = {2012-09-20 12:26:53 +0000}, + Journal = {Experimental Psychology (formerly Zeitschrift f{\"u}r Experimentelle Psychologie)}, + Number = {2}, + Pages = {55--67}, + Publisher = {Hogrefe \& Huber}, + Title = {The Effect of Foregone Outcomes on Choices From Experience}, + Volume = {59}, + Year = {2012}} + +@article{Mellers1999, + Author = {Mellers, B. and Schwartz, A. and Ritov, I.}, + Date-Added = {2012-09-20 12:25:17 +0000}, + Date-Modified = {2012-09-20 12:25:35 +0000}, + Journal = {Journal of Experimental Psychology: General}, + Number = {3}, + Pages = {332}, + Publisher = {American Psychological Association}, + Title = {Emotion-based choice.}, + Volume = {128}, + Year = {1999}} + +@article{boorman2011, + Author = {Boorman, E.D. and Behrens, T.E. and Rushworth, M.F.}, + Date-Added = {2012-09-20 12:20:48 +0000}, + Date-Modified = {2012-09-20 12:20:57 +0000}, + Journal = {PLoS biology}, + Number = {6}, + Pages = {e1001093}, + Publisher = {Public Library of Science}, + Title = {Counterfactual choice and learning in a neural network centered on human lateral frontopolar cortex}, + Volume = {9}, + Year = {2011}} + +@article{Camille2004, + Author = {Camille, N. and Coricelli, G. and Sallet, J. and Pradat-Diehl, P. and Duhamel, J.R. and Sirigu, A.}, + Date-Added = {2012-09-19 13:07:46 +0000}, + Date-Modified = {2012-09-19 13:08:00 +0000}, + Journal = {Science}, + Number = {5674}, + Pages = {1167--1170}, + Publisher = {American Association for the Advancement of Science}, + Title = {The involvement of the orbitofrontal cortex in the experience of regret}, + Volume = {304}, + Year = {2004}} + +@article{sokol2012emotion, + Author = {Sokol-Hessner, P. and Camerer, C.F. and Phelps, E.A.}, + Date-Added = {2012-08-21 04:29:42 +0000}, + Date-Modified = {2012-08-21 04:29:42 +0000}, + Journal = {Social Cognitive and Affective Neuroscience}, + Publisher = {Oxford University Press}, + Title = {Emotion regulation reduces loss aversion and decreases amygdala responses to losses}, + Year = {2012}} + +@article{de2010amygdala, + Author = {De Martino, B. and Camerer, C.F. and Adolphs, R.}, + Date-Added = {2012-08-21 04:27:54 +0000}, + Date-Modified = {2012-08-21 04:27:54 +0000}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {8}, + Pages = {3788--3792}, + Publisher = {National Acad Sciences}, + Title = {Amygdala damage eliminates monetary loss aversion}, + Volume = {107}, + Year = {2010}} + +@article{herrnstein1961, + Author = {Herrnstein, R. J.}, + Date-Added = {2012-07-31 21:44:44 +0000}, + Date-Modified = {2012-07-31 21:44:57 +0000}, + Journal = {Journal of the Experimental Analysis of Behavior}, + Number = {3}, + Pages = {267}, + Publisher = {Society for the Experimental Analysis of Behavior}, + Title = {Relative and absolute strength of response as a function of frequency of reinforcement}, + Volume = {4}, + Year = {1961}} + +@article{finn2002motivation, + Author = {Finn, P. R.}, + Date-Added = {2012-05-21 17:18:31 +0000}, + Date-Modified = {2012-05-21 17:18:39 +0000}, + Journal = {Behavioral and Cognitive Neuroscience Reviews}, + Number = {3}, + Pages = {183--205}, + Title = {Motivation, working memory, and decision making: A cognitive-motivational theory of personality vulnerability to alcoholism}, + Volume = {1}, + Year = {2002}} + +@article{Endres2011, + Author = {Endres, M. J. and Rickert, M. E. and Bogg, T. and Lucas, J. and Finn, P. R.}, + Date-Added = {2012-05-21 17:16:50 +0000}, + Date-Modified = {2012-05-21 17:17:06 +0000}, + Journal = {Journal of abnormal psychology}, + Number = {2}, + Pages = {336}, + Publisher = {American Psychological Association}, + Title = {Externalizing psychopathology and behavioral disinhibition: Working memory mediates signal discriminability and reinforcement moderates response bias in approach--avoidance learning.}, + Volume = {120}, + Year = {2011}} + +@article{Chamberlin1890, + Author = {Chamberlin, T. C.}, + Date-Added = {2012-05-21 16:55:29 +0000}, + Date-Modified = {2012-05-21 16:55:36 +0000}, + Journal = {Science}, + Number = {366}, + Pages = {92--96}, + Title = {The method of multiple working hypotheses}, + Volume = {15}, + Year = {1890}} + +@inproceedings{angest1999epidemiology, + Author = {Angest, J.}, + Booktitle = {Third International Conference on Bipolar Disorder. Pittsburgh: University of Pittsburgh}, + Date-Added = {2012-05-21 15:53:23 +0000}, + Date-Modified = {2012-05-21 15:53:23 +0000}, + Title = {Epidemiology of bipolar spectrum disorder in communitybased studies}, + Year = {1999}} + +@article{mitchell2001clinical, + Author = {Mitchell, P. B. and Wilhelm, K. and Parker, G. and Austin, M. P. and Rutgers, P. and Malhi, G. S.}, + Date-Added = {2012-05-21 15:44:06 +0000}, + Date-Modified = {2012-05-21 15:44:17 +0000}, + Journal = {Journal of clinical psychiatry}, + Publisher = {Physicians Postgraduate Press}, + Title = {The clinical features of bipolar depression: a comparison with matched major depressive disorder patients.}, + Year = {2001}} + +@article{mitchell1992there, + Author = {Mitchell, P. B. and Parker, G. and Jamieson, K. and Wilhelm, K. and Hickie, I. and Brodaty, H. and Boyce, P. and Hadzi-Pavlovic, D. and Roy, K.}, + Date-Added = {2012-05-21 15:41:45 +0000}, + Date-Modified = {2012-05-21 15:49:29 +0000}, + Journal = {Journal of affective disorders}, + Number = {2}, + Pages = {97--105}, + Publisher = {Elsevier}, + Title = {Are there any differences between bipolar and unipolar melancholia?}, + Volume = {25}, + Year = {1992}} + +@article{angst2006atypical, + Author = {Angst, J. and Gamma, A. and Benazzi, F. and Silverstein, B. and Ajdacic--Gross, V. and Eich, D. and R{\"o}ssler, W.}, + Date-Added = {2012-05-21 15:41:06 +0000}, + Date-Modified = {2012-05-21 15:41:06 +0000}, + Journal = {European archives of psychiatry and clinical neuroscience}, + Number = {1}, + Pages = {44--54}, + Publisher = {Springer}, + Title = {Atypical depressive syndromes in varying definitions}, + Volume = {256}, + Year = {2006}} + +@article{smith2006hypomania, + Author = {Smith, D. J. and Ghaemi, S. N.}, + Date-Added = {2012-05-21 15:23:06 +0000}, + Date-Modified = {2012-05-21 15:23:15 +0000}, + Journal = {Advances in Psychiatric Treatment}, + Number = {2}, + Pages = {110--120}, + Publisher = {RCP}, + Title = {Hypomania in clinical practice}, + Volume = {12}, + Year = {2006}} + +@article{Hirschfeld2001, + Author = {Hirschfeld, R.M.A. and others}, + Date-Added = {2012-05-21 15:22:27 +0000}, + Date-Modified = {2012-05-21 15:22:38 +0000}, + Journal = {Journal of Clinical Psychiatry}, + Pages = {5--9}, + Title = {Bipolar spectrum disorder: improving its recognition and diagnosis}, + Volume = {62}, + Year = {2001}} + +@article{alexander2011medial, + Author = {Alexander, W. H. and Brown, J. W.}, + Date-Added = {2012-05-14 16:55:25 +0000}, + Date-Modified = {2012-05-14 16:55:32 +0000}, + Journal = {Nature neuroscience}, + Number = {10}, + Pages = {1338--1344}, + Publisher = {Nature Publishing Group}, + Title = {Medial prefrontal cortex as an action-outcome predictor}, + Volume = {14}, + Year = {2011}} + +@article{gehring2002medial, + Author = {Gehring, W.J. and Willoughby, A.R.}, + Date-Added = {2012-05-14 15:57:52 +0000}, + Date-Modified = {2012-05-14 15:57:52 +0000}, + Journal = {Science}, + Number = {5563}, + Pages = {2279--2282}, + Publisher = {American Association for the Advancement of Science}, + Title = {The medial frontal cortex and the rapid processing of monetary gains and losses}, + Volume = {295}, + Year = {2002}} + +@article{rainville1997pain, + Author = {Rainville, P. and Duncan, G.H. and Price, D.D. and Carrier, B. and Bushnell, M.C.}, + Date-Added = {2012-05-14 15:56:46 +0000}, + Date-Modified = {2012-05-14 15:56:46 +0000}, + Journal = {Science}, + Number = {5328}, + Pages = {968}, + Publisher = {American Association for the Advancement of Science}, + Title = {Pain affect encoded in human anterior cingulate but not somatosensory cortex}, + Volume = {277}, + Year = {1997}} + +@article{Platt1964, + Author = {Platt, J. R.}, + Date-Added = {2012-05-14 09:49:59 +0000}, + Date-Modified = {2012-05-14 09:50:12 +0000}, + Journal = {science}, + Number = {3642}, + Pages = {347--353}, + Title = {Strong inference}, + Volume = {146}, + Year = {1964}} + +@article{johnson2008algorithm, + Author = {Johnson, M. W. and Bickel, W. K.}, + Date-Added = {2012-05-14 08:35:29 +0000}, + Date-Modified = {2012-05-14 08:35:36 +0000}, + Journal = {Experimental and clinical psychopharmacology}, + Number = {3}, + Pages = {264}, + Publisher = {American Psychological Association}, + Title = {An algorithm for identifying nonsystematic delay-discounting data.}, + Volume = {16}, + Year = {2008}} + +@book{AhnInPress_NeuralChapter, + Author = {Ahn, W.-Y. and Jessup, R. K. and Busemeyer, J. R.}, + Date-Added = {2012-05-14 08:02:30 +0000}, + Date-Modified = {2012-05-14 08:04:59 +0000}, + Editor = {Yuejia & Z.-L. Lu}, + Publisher = {Peking University Press}, + Series = {Progress in Cognitive Science: From Cellular Mechanisms to Computational Theories}, + Title = {Building bridges between neuroscience and complex decision making behavior}, + Year = {in press}} + +@article{silverstone1984response, + Author = {Silverstone, T.}, + Date-Added = {2012-05-14 07:54:55 +0000}, + Date-Modified = {2012-05-14 07:54:55 +0000}, + Journal = {The Lancet}, + Number = {8382}, + Pages = {903--904}, + Publisher = {Elsevier}, + Title = {Response to bromocriptine distinguishes bipolar from unipolar depression}, + Volume = {323}, + Year = {1984}} + +@article{van1980central, + Author = {Van Praag, H. M.}, + Date-Added = {2012-05-14 07:53:51 +0000}, + Date-Modified = {2012-05-14 07:53:58 +0000}, + Journal = {Comprehensive psychiatry}, + Number = {1}, + Pages = {30--43}, + Publisher = {Elsevier}, + Title = {Central monoamine metabolism in depressions. I. Serotonin and related compounds}, + Volume = {21}, + Year = {1980}} + +@article{jacobs1986dextroamphetamine, + Author = {Jacobs, D. and Silverstone, T.}, + Date-Added = {2012-05-14 07:52:35 +0000}, + Date-Modified = {2012-05-14 07:52:35 +0000}, + Journal = {Psychological medicine}, + Number = {02}, + Pages = {323--329}, + Publisher = {Cambridge Univ Press}, + Title = {Dextroamphetamine-induced arousal in human subjects as a model for mania}, + Volume = {16}, + Year = {1986}} + +@article{Mazur1987, + Author = {Mazur, J. E.}, + Date-Added = {2012-05-14 07:01:59 +0000}, + Date-Modified = {2012-05-14 07:02:08 +0000}, + Publisher = {Lawrence Erlbaum Associates, Inc}, + Title = {An adjusting procedure for studying delayed reinforcement.}, + Year = {1987}} + +@article{pizzagalli2005toward, + Author = {Pizzagalli, D. A. and Jahn, A. L. and O'Shea, J. P.}, + Date-Added = {2012-05-14 06:41:58 +0000}, + Date-Modified = {2012-05-14 06:42:07 +0000}, + Journal = {Biological Psychiatry}, + Number = {4}, + Pages = {319--327}, + Publisher = {Elsevier}, + Title = {Toward an objective characterization of an anhedonic phenotype: a signal-detection approach}, + Volume = {57}, + Year = {2005}} + +@article{pizzagalli2008euthymic, + Author = {Pizzagalli, D. A. and Goetz, E. and Ostacher, M. and Iosifescu, D.V. and Perlis, R.H.}, + Date-Added = {2012-05-14 06:40:58 +0000}, + Date-Modified = {2012-05-14 06:41:06 +0000}, + Journal = {Biological psychiatry}, + Number = {2}, + Pages = {162--168}, + Publisher = {Elsevier}, + Title = {Euthymic patients with bipolar disorder show decreased reward learning in a probabilistic reward task}, + Volume = {64}, + Year = {2008}} + +@book{Kruschke2011book, + Author = {Kruschke, John K.}, + Date-Added = {2012-05-14 06:35:58 +0000}, + Date-Modified = {2012-05-14 06:35:58 +0000}, + Publisher = {Academic Press / Elsevier}, + Title = {Doing {B}ayesian Data Analysis: A Tutorial with {R} and {BUGS}}, + Year = {2011}} + +@article{weissman1996cross, + Author = {Weissman, M. M. and Bland, R. C. and Canino, G. J. and Faravelli, C. and Greenwald, S. and Hwu, H. G. and Joyce, P. R. and Karam, E. G. and Lee, C. K. and Lellouch, J. and others}, + Date-Added = {2012-05-14 06:23:25 +0000}, + Date-Modified = {2012-05-14 06:23:54 +0000}, + Journal = {JAMA: the journal of the American Medical Association}, + Number = {4}, + Pages = {293--299}, + Publisher = {Am Med Assoc}, + Title = {Cross-national epidemiology of major depression and bipolar disorder}, + Volume = {276}, + Year = {1996}} + +@article{Budhani2006, + Author = {Budhani, S. and Richell, R.A. and Blair, R.J.R.}, + Date-Added = {2012-05-12 04:41:16 +0000}, + Date-Modified = {2012-05-12 04:41:22 +0000}, + Journal = {Journal of abnormal psychology}, + Number = {3}, + Pages = {552}, + Publisher = {American Psychological Association}, + Title = {Impaired reversal but intact acquisition: Probabilistic response reversal deficits in adult individuals with psychopathy.}, + Volume = {115}, + Year = {2006}} + +@article{Dickstein2010, + Author = {Dickstein, DP and Finger, EC and Brotman, MA and Rich, BA and Pine, DS and Blair, JR and Leibenluft, E. and others}, + Date-Added = {2012-05-12 04:27:39 +0000}, + Date-Modified = {2012-05-12 04:27:49 +0000}, + Journal = {Psychological medicine}, + Number = {7}, + Pages = {1089}, + Publisher = {Cambridge Univ Press}, + Title = {Impaired probabilistic reversal learning in youths with mood and anxiety disorders}, + Volume = {40}, + Year = {2010}} + +@article{heerey2007delay, + Author = {Heerey, E.A. and Robinson, B.M. and McMahon, R.P. and Gold, J.M.}, + Date-Added = {2012-05-11 14:57:18 +0000}, + Date-Modified = {2012-05-11 14:57:18 +0000}, + Journal = {Cognitive neuropsychiatry}, + Number = {3}, + Pages = {213--221}, + Publisher = {Taylor \& Francis}, + Title = {Delay discounting in schizophrenia}, + Volume = {12}, + Year = {2007}} + +@article{heerey2011imagining, + Author = {Heerey, E.A. and Matveeva, T.M. and Gold, J.M.}, + Date-Added = {2012-05-11 14:56:19 +0000}, + Date-Modified = {2012-05-11 14:56:19 +0000}, + Journal = {Journal of abnormal psychology}, + Number = {2}, + Pages = {483}, + Publisher = {American Psychological Association}, + Title = {Imagining the future: degraded representations of future rewards and events in schizophrenia.}, + Volume = {120}, + Year = {2011}} + +@article{swann2001measurement, + Author = {Swann, A.C. and Anderson, J.C. and Dougherty, D.M. and Moeller, F.G.}, + Date-Added = {2012-05-11 14:54:56 +0000}, + Date-Modified = {2012-05-11 14:54:56 +0000}, + Journal = {Psychiatry Research}, + Number = {2}, + Pages = {195--197}, + Publisher = {Elsevier}, + Title = {Measurement of inter-episode impulsivity in bipolar disorder}, + Volume = {101}, + Year = {2001}} + +@article{swann2003impulsivity, + Author = {Swann, A.C. and Pazzaglia, P. and Nicholls, A. and Dougherty, D.M. and Moeller, F.G.}, + Date-Added = {2012-05-11 14:54:36 +0000}, + Date-Modified = {2012-05-11 14:54:36 +0000}, + Journal = {Journal of affective disorders}, + Number = {1-2}, + Pages = {105--111}, + Publisher = {Elsevier}, + Title = {Impulsivity and phase of illness in bipolar disorder}, + Volume = {73}, + Year = {2003}} + +@article{hinson2003impulsive, + Author = {Hinson, J.M. and Jameson, T.L. and Whitney, P.}, + Date-Added = {2012-05-11 14:48:08 +0000}, + Date-Modified = {2012-05-11 14:48:08 +0000}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition; Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Number = {2}, + Pages = {298}, + Publisher = {American Psychological Association}, + Title = {Impulsive decision making and working memory.}, + Volume = {29}, + Year = {2003}} + +@article{shamosh2008delay, + Author = {Shamosh, N.A. and Gray, J.R.}, + Date-Added = {2012-05-11 14:47:04 +0000}, + Date-Modified = {2012-05-11 14:47:04 +0000}, + Journal = {Intelligence}, + Number = {4}, + Pages = {289--305}, + Publisher = {Elsevier}, + Title = {Delay discounting and intelligence: A meta-analysis}, + Volume = {36}, + Year = {2008}} + +@article{shamosh2008individual, + Author = {Shamosh, N.A. and DeYoung, C.G. and Green, A.E. and Reis, D.L. and Johnson, M.R. and Conway, A.R.A. and Engle, R.W. and Braver, T.S. and Gray, J.R.}, + Date-Added = {2012-05-11 14:46:55 +0000}, + Date-Modified = {2012-05-11 14:46:55 +0000}, + Journal = {Psychological Science}, + Number = {9}, + Pages = {904--911}, + Publisher = {SAGE Publications}, + Title = {Individual differences in delay discounting relation to intelligence, working memory, and anterior prefrontal cortex}, + Volume = {19}, + Year = {2008}} + +@article{bornovalova2005impulsivity, + Author = {Bornovalova, M.A. and Lejuez, CW and Daughters, S.B. and Zachary Rosenthal, M. and Lynch, T.R.}, + Date-Added = {2012-05-11 14:45:27 +0000}, + Date-Modified = {2012-05-11 14:45:27 +0000}, + Journal = {Clinical psychology review}, + Number = {6}, + Pages = {790--812}, + Publisher = {Elsevier}, + Title = {Impulsivity as a common process across borderline personality and substance use disorders}, + Volume = {25}, + Year = {2005}} + +@article{Strakowski2009, + Author = {Strakowski, S.M. and Fleck, D.E. and DelBello, M.P. and Adler, C.M. and Shear, P.K. and McElroy, S.L. and Keck Jr, P.E. and Moss, Q. and Cerullo, M.A. and Kotwal, R. and others}, + Date-Added = {2012-05-11 14:43:26 +0000}, + Date-Modified = {2012-05-11 14:43:38 +0000}, + Journal = {Bipolar disorders}, + Number = {1}, + Pages = {41--51}, + Publisher = {Wiley Online Library}, + Title = {Characterizing impulsivity in mania}, + Volume = {11}, + Year = {2009}} + +@article{stone1993assessment, + Author = {Stone, SP and Herbert, P. and Chrisostomou, J. and Vessey, C. and Horwood, C.}, + Date-Added = {2012-05-11 14:34:40 +0000}, + Date-Modified = {2012-05-11 14:34:40 +0000}, + Journal = {Disability \& Rehabilitation}, + Number = {1}, + Pages = {35--37}, + Publisher = {Informa UK Ltd UK}, + Title = {The assessment of disability in patients on an acute medical ward for elderly people}, + Volume = {15}, + Year = {1993}} + +@article{brzezinski1994purification, + Author = {Brzezinski, M.R. and Abraham, T.L. and Stone, C.L. and Dean, R.A. and Bosron, W.F.}, + Date-Added = {2012-05-11 14:32:54 +0000}, + Date-Modified = {2012-05-11 14:32:54 +0000}, + Journal = {Biochemical pharmacology}, + Number = {9}, + Pages = {1747--1755}, + Publisher = {Elsevier}, + Title = {Purification and characterization of a human liver cocaine carboxylesterase that catalyzes the production of benzoylecgonine and the formation of cocaethylene from alcohol and cocaine}, + Volume = {48}, + Year = {1994}} + +@article{aedelroth1995internal, + Author = {Aedelroth, P. and Brzezinski, P. and Malmstroem, B.G.}, + Date-Added = {2012-05-11 14:32:40 +0000}, + Date-Modified = {2012-05-11 14:32:40 +0000}, + Journal = {Biochemistry}, + Number = {9}, + Pages = {2844--2849}, + Publisher = {ACS Publications}, + Title = {Internal electron transfer in cytochrome c oxidase from Rhodobacter sphaeroides}, + Volume = {34}, + Year = {1995}} + +@article{andreasen1982negative, + Author = {Andreasen, N.C.}, + Date-Added = {2012-05-11 14:32:03 +0000}, + Date-Modified = {2012-05-11 14:32:03 +0000}, + Journal = {Archives of General Psychiatry}, + Number = {7}, + Pages = {784}, + Publisher = {Am Med Assoc}, + Title = {Negative symptoms in schizophrenia: definition and reliability}, + Volume = {39}, + Year = {1982}} + +@article{rubinsztein2006impaired, + Author = {Rubinsztein, JS and Michael, A. and Underwood, BR and Tempest, M. and Sahakian, BJ}, + Date-Added = {2012-05-11 14:29:24 +0000}, + Date-Modified = {2012-05-11 14:29:24 +0000}, + Journal = {Psychological medicine}, + Number = {5}, + Pages = {629--640}, + Publisher = {Cambridge Univ Press}, + Title = {Impaired cognition and decision-making in bipolar depression but no'affective bias' evident}, + Volume = {36}, + Year = {2006}} + +@article{murphy2001decision, + Author = {Murphy, F.C. and Rubinsztein, J.S. and Michael, A. and Rogers, R.D. and Robbins, T.W. and Paykel, E.S. and Sahakian, B.J. and others}, + Date-Added = {2012-05-11 14:27:57 +0000}, + Date-Modified = {2012-05-11 14:27:57 +0000}, + Journal = {Psychological Medicine}, + Number = {4}, + Pages = {679--693}, + Publisher = {Cambridge Univ Press}, + Title = {Decision-making cognition in mania and depression}, + Volume = {31}, + Year = {2001}} + +@article{taylor2008neural, + Author = {Taylor Tavares, J.V. and Clark, L. and Furey, M.L. and Williams, G.B. and Sahakian, B.J. and Drevets, W.C.}, + Date-Added = {2012-05-11 14:27:03 +0000}, + Date-Modified = {2012-05-11 14:27:03 +0000}, + Journal = {Neuroimage}, + Number = {3}, + Pages = {1118--1126}, + Publisher = {Elsevier}, + Title = {Neural basis of abnormal response to negative feedback in unmedicated mood disorders}, + Volume = {42}, + Year = {2008}} + +@article{Loewenstein2001, + Author = {Loewenstein, G.F. and Weber, E.U. and Hsee, C.K. and Welch, N.}, + Date-Added = {2012-05-02 02:18:23 +0000}, + Date-Modified = {2012-05-02 02:18:30 +0000}, + Journal = {Psychological bulletin}, + Number = {2}, + Pages = {267}, + Publisher = {American Psychological Association}, + Title = {Risk as feelings.}, + Volume = {127}, + Year = {2001}} + +@article{lagorio2005delay, + Author = {Lagorio, C.H. and Madden, G.J.}, + Date-Added = {2012-04-29 07:13:43 +0000}, + Date-Modified = {2012-04-29 07:13:43 +0000}, + Journal = {Behavioural Processes}, + Number = {2}, + Pages = {173--187}, + Publisher = {Elsevier}, + Title = {Delay discounting of real and hypothetical rewards III: Steady-state assessments, forced-choice trials, and all real rewards}, + Volume = {69}, + Year = {2005}} + +@article{madden2003delay, + Author = {Madden, G.J. and Begotka, A.M. and Raiff, B.R. and Kastern, L.L.}, + Date-Added = {2012-04-29 07:13:42 +0000}, + Date-Modified = {2012-04-29 07:13:42 +0000}, + Journal = {Experimental and Clinical Psychopharmacology}, + Number = {2}, + Pages = {139}, + Publisher = {American Psychological Association}, + Title = {Delay discounting of real and hypothetical rewards.}, + Volume = {11}, + Year = {2003}} + +@article{johnson2002within, + Author = {Johnson, M. W. and Bickel, W. K.}, + Date-Added = {2012-04-29 07:13:41 +0000}, + Date-Modified = {2012-05-18 20:11:33 +0000}, + Journal = {Journal of the Experimental Analysis of Behavior}, + Number = {2}, + Pages = {129}, + Publisher = {Society for the Experimental Analysis of Behavior}, + Title = {Within-subject comparison of real and hypothetical money rewards in delay discounting.}, + Volume = {77}, + Year = {2002}} + +@article{Eckblad1986, + Author = {Eckblad, M. and Chapman, L.J.}, + Date-Added = {2012-04-29 05:47:02 +0000}, + Date-Modified = {2012-04-29 05:47:20 +0000}, + Journal = {Journal of Abnormal Psychology}, + Number = {3}, + Pages = {214}, + Publisher = {American Psychological Association}, + Title = {Development and validation of a scale for hypomanic personality.}, + Volume = {95}, + Year = {1986}} + +@book{gray1982neuropsychology, + Author = {Gray, J.A.}, + Date-Added = {2012-04-29 04:54:54 +0000}, + Date-Modified = {2012-04-29 04:54:54 +0000}, + Publisher = {Clarendon press Oxford}, + Title = {The neuropsychology of anxiety: An enquiry into the functions of the septo-hippocampal system}, + Year = {1982}} + +@article{gray1981critique, + Author = {Gray, J.A.}, + Date-Added = {2012-04-29 04:54:29 +0000}, + Date-Modified = {2012-04-29 04:54:29 +0000}, + Journal = {A model for personality}, + Pages = {246--276}, + Title = {A critique of Eysenck’s theory of personality}, + Year = {1981}} + +@article{carver1994behavioral, + Author = {Carver, C.S. and White, T.L.}, + Date-Added = {2012-04-29 04:53:35 +0000}, + Date-Modified = {2012-04-29 04:53:35 +0000}, + Journal = {Journal of personality and social psychology}, + Number = {2}, + Pages = {319}, + Publisher = {American Psychological Association}, + Title = {Behavioral inhibition, behavioral activation, and affective responses to impending reward and punishment: The BIS/BAS Scales.}, + Volume = {67}, + Year = {1994}} + +@article{beck1996bdi, + Author = {Beck, AT and Steer, RA and Brown, GK}, + Date-Added = {2012-04-28 20:57:07 +0000}, + Date-Modified = {2012-04-28 20:57:16 +0000}, + Journal = {San Antonio, TX: Psychology Corporation}, + Title = {Manual for beck depression inventory II (BDI-II)}, + Year = {1996}} + +@article{lejuez2003balloon, + Author = {Lejuez, CW and Aklin, W.M. and Jones, H.A. and Richards, J.B. and Strong, D.R. and Kahler, C.W. and Read, J.P.}, + Date-Added = {2012-04-28 20:29:04 +0000}, + Date-Modified = {2012-04-28 20:29:04 +0000}, + Journal = {Experimental and Clinical Psychopharmacology}, + Number = {1}, + Pages = {26}, + Publisher = {American Psychological Association}, + Title = {The balloon analogue risk task (BART) differentiates smokers and nonsmokers.}, + Volume = {11}, + Year = {2003}} + +@article{Yechiam2008bipolar, + Abstract = {A formal modeling approach was used to characterize decision-making processes in bipolar disorder. Decision making was examined in 28 bipolar patients (14 acute and 14 remitted) and 25 controls using the Iowa Gambling Task (Bechara et al., 1994), a decision-making task used for assessing cognitive impulsivity. To disentangle motivational and cognitive aspects of decision-making processes, we applied a formal cognitive model to the performance on the Iowa Gambling Task. The model has three parameters: The relative impact of rewards and punishments on evaluations, the impact of recent and past payoffs, and the degree of choice consistency. The results indicated that acute bipolar patients were characterized by low choice consistency, or a tendency to make erratic choices. Low choice consistency improved the prediction of acute bipolar disorder beyond that provided by cognitive functioning and self-report measures of personality and temperament.}, + Author = {Yechiam, E and Hayden, E P and Bodkins, M and O'Donnell, B F and Hetrick, W P}, + Date-Added = {2012-04-28 20:23:36 +0000}, + Date-Modified = {2012-04-28 20:23:46 +0000}, + Doi = {10.1016/j.psychres.2007.07.001}, + Journal = {Psychiatry Res}, + Month = {Nov}, + Number = {2}, + Pages = {142-152}, + Pmid = {18848361}, + Title = {Decision making in bipolar disorder: a cognitive modeling approach}, + Url = {http://www.hubmed.org/display.cgi?uids=18848361}, + Volume = {161}, + Year = {2008}, + Bdsk-Url-1 = {http://www.hubmed.org/display.cgi?uids=18848361}, + Bdsk-Url-2 = {http://dx.doi.org/10.1016/j.psychres.2007.07.001}} + +@article{Ghahramani2001hmm, + Author = {Ghahramani, Z.}, + Date-Added = {2012-04-17 05:01:23 +0000}, + Date-Modified = {2012-04-17 05:01:37 +0000}, + Journal = {IJPRAI}, + Number = {1}, + Pages = {9--42}, + Title = {An introduction to hidden Markov models and Bayesian networks}, + Volume = {15}, + Year = {2001}} + +@article{fridberg2010cognitive, + Author = {Fridberg, D.J. and Queller, S. and Ahn, W.Y. and Kim, W. and Bishara, A.J. and Busemeyer, Jerome R. and Porrino, L. and Stout, J.C.}, + Date-Added = {2012-04-16 08:00:56 +0000}, + Date-Modified = {2016-03-23 13:56:14 +0000}, + Journal = {Journal of mathematical psychology}, + Number = {1}, + Pages = {28--38}, + Publisher = {Elsevier}, + Title = {Cognitive mechanisms underlying risky decision-making in chronic cannabis users}, + Volume = {54}, + Year = {2010}} + +@article{ahn2011model, + Author = {Ahn, Woo-Young and Krawitz, A. and Kim, W. and Busemeyer, Jerome R. and Brown, J.W.}, + Date-Added = {2012-04-16 08:00:51 +0000}, + Date-Modified = {2016-03-24 02:57:04 +0000}, + Journal = {Journal of Neuroscience, Psychology, and Economics}, + Number = {2}, + Pages = {95}, + Publisher = {Educational Publishing Foundation}, + Title = {A model-based fMRI analysis with hierarchical Bayesian parameter estimation.}, + Volume = {4}, + Year = {2011}} + +@article{ahn2011temporal, + Author = {Ahn, W.-Y. and Rass, O. and Fridberg, D.J. and Bishara, A.J. and Forsyth, J.K. and Breier, A. and Busemeyer, J.R. and Hetrick, W.P. and Bolbecker, A.R. and O'Donnell, B.F.}, + Date-Added = {2012-04-16 08:00:34 +0000}, + Date-Modified = {2012-05-14 08:05:32 +0000}, + Journal = {Journal of abnormal psychology}, + Number = {4}, + Pages = {911}, + Publisher = {American Psychological Association}, + Title = {Temporal discounting of rewards in patients with bipolar disorder and schizophrenia.}, + Volume = {120}, + Year = {2011}} + +@book{null1974, + Address = {New York}, + Author = {{de Finetti}, B.}, + Publisher = {{J}ohn {W}iley \& {S}ons}, + Title = {Theory of Probability, Vol. 1 and 2}, + Year = {1974}} + +@article{vanGaalen2006, + Author = {vanGaalen, M.M. and van Koten, R. and Schoffelmeer, A.N.M. and Vanderschuren, L.J.M.J.}, + Journal = {Biological Psychiatry}, + Number = {1}, + Pages = {66--73}, + Publisher = {Elsevier}, + Title = {{Critical involvement of dopaminergic neurotransmission in impulsive decision making}}, + Volume = {60}, + Year = {2006}} + +@book{null2001, + Address = {Amsterdam}, + Author = {{van Kampen}, N. G.}, + Publisher = {Elsevier}, + Title = {Stochastic Processes in Physics and Chemistry}, + Year = {2001}} + +@article{null1981, + Author = {{van Kampen}, N. G.}, + Journal = {Journal of Statistical Physics}, + Pages = {175--187}, + Title = {{I}t\^{o} Versus {S}tratonovich}, + Volume = {24}, + Year = {1981}} + +@article{null1981a, + Author = {{van Kampen}, N. G.}, + Journal = {Journal of Statistical Physics}, + Pages = {431--442}, + Title = {The Validity of Nonlinear {L}angevin Equations}, + Volume = {25}, + Year = {1981}} + +@article{null2006, + Author = {{de Rooij}, S. and Gr\"{u}nwald, P.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {180--192}, + Title = {An Empirical Study of Minimum Description Length Model Selection with Infinite Parametric Complexity}, + Volume = {50}, + Year = {2006}} + +@article{Abrams2004, + Author = {Abrams, K. Y. and Yune, S. K. and Kim, S. J. and Jeon, H. J. and Han, S. J. and Hwang, J. and Sung, Y. H. and Lee, K. J. and Lyoo, I. K.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Jun}, + Pages = {240--248}, + Title = {{{T}rait and state aspects of harm avoidance and its implication for treatment in major depressive disorder, dysthymic disorder, and depressive personality disorder}}, + Volume = {58}, + Year = {2004}} + +@article{Adams2003, + Abstract = {This study examined the utility of Cloninger's tridimensional personality + theory (1986, 1987a) in predicting preferred substance of abuse and + self-reported motivations for use among a sample of 200 adolescent + substance abusers and 200 matched community control adolescents. + Two primary hypotheses were tested: (1) Cloninger's type II profile + is more strongly associated with stimulant use, and his type I profile + is more strongly associated with substances having sedative-hypnotic + effects; and 2) type II individuals will report motivations for use + that focus primarily on obtaining positive rewards, whereas type + I individuals will report motivations primarily concerning negative + reinforcement or the avoidance of problems and negative life experiences. + Our results did not show strong associations between Cloninger's + Harm Avoidance and Reward Dependence dimensions and preferred substance + or motivations for use. However, in partial support of the hypotheses + examined here, we did find that individuals low in novelty seeking + (NS) tended to prefer alcohol and marijuana, whereas those high in + NS endorsed a wider range of preferred substances. High NS was associated + with significantly greater stimulant use and motivations focused + on obtaining positive rewards, whereas low NS was associated with + greater sedative use and motivations related to avoiding negative + emotions or negative life experiences.}, + Author = {Justin B Adams and Alisa J Heath and Susan E Young and John K Hewitt and Robin P Corley and Michael C Stallings}, + Institution = {Department of Psychology, University of Colorado, Boulder, Colorado 80309-0447, USA.}, + Journal = {Am J Drug Alcohol Abuse}, + Keywords = {Adolescent; Adolescent Behavior, psychology; Adult; Case-Control Studies; Colorado; Exploratory Behavior; Humans; Juvenile Delinquency, psychology; Male; Motivation; Personality; Personality Tests; Psychological Theory; Substance-Related Disorders, ethnology/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {691--712}, + Pmid = {14510047}, + Timestamp = {2009.08.06}, + Title = {Relationships between personality and preferred substance and motivations for use among adolescent substance abusers.}, + Volume = {29}, + Year = {2003}} + +@article{Adcock2006, + Author = {Adcock, R. A. and Thangavel, A. and Whitfield-Gabrieli, S. and Knutson, B. and Gabrieli, J. D.}, + Journal = {Neuron}, + Month = {May}, + Pages = {507--517}, + Title = {{{R}eward-motivated learning: mesolimbic activation precedes memory formation}}, + Volume = {50}, + Year = {2006}} + +@article{Adinoff2001, + Author = {Adinoff, B. and Devous, M.D. and Best, S.M. and George, M.S. and Alexander, D. and Payne, K.}, + Journal = {American Journal of Psychiatry}, + Number = {3}, + Pages = {390--398}, + Publisher = {Am Psychiatric Assoc}, + Title = {{Limbic responsiveness to procaine in cocaine-addicted subjects}}, + Volume = {158}, + Year = {2001}} + +@article{Adinoff2003, + Author = {Adinoff, B. and Devous, M.D. and Cooper, D.B. and Best, S.E. and Chandler, P. and Harris, T. and Cervin, C.A. and Cullum, C.M.}, + Journal = {American Journal of Psychiatry}, + Number = {10}, + Pages = {1892--1894}, + Publisher = {Am Psychiatric Assoc}, + Title = {{Resting regional cerebral blood flow and gambling task performance in cocaine-dependent subjects and healthy comparison subjects}}, + Volume = {160}, + Year = {2003}} + +@article{Aerts2003, + Author = {Aerts, D. and Czachor, M. and Gabora, L. and Kuna, M. and Posiewnik, A. and Pykacz, J. and Syty, M.}, + Journal = {Physical Review E}, + Pages = {51926}, + Title = {Quantum Morphogenesis: A Variation on {T}hom's Catastrophe Theory}, + Volume = {67}, + Year = {2003}} + +@article{Agnew1991, + Author = {Agnew, J. and Schwartz, B. S. and Bolla, K. I. and Ford, D. P. and Bleecker, M. L.}, + Journal = {J Occup Med}, + Month = {Nov}, + Pages = {1156--1162}, + Title = {{{C}omparison of computerized and examiner-administered neurobehavioral testing techniques}}, + Volume = {33}, + Year = {1991}} + +@article{Agresti1992, + Author = {Agresti, A.}, + Journal = {Statistical Science}, + Pages = {131--177}, + Title = {A Survey of Exact Inference for Contingency Tables (with discussion)}, + Volume = {7}, + Year = {1992}} + +@article{Aharon2001, + Author = {Aharon, I. and Etcoff, N. and Ariely, D. and Chabris, C.F. and O'Connor, E. and Breiter, H.C.}, + Journal = {Neuron}, + Number = {3}, + Pages = {537--551}, + Publisher = {Elsevier}, + Title = {{Beautiful Faces Have Variable Reward Value fMRI and Behavioral Evidence}}, + Volume = {32}, + Year = {2001}} + +@article{Ahmed2004, + Author = {Serge H Ahmed}, + Doi = {10.1126/science.1107071}, + Journal = {Science}, + Keywords = {Animals; Choice Behavior; Cocaine-Related Disorders, physiopathology/psychology; Computer Simulation; Cues; Dopamine, physiology; Humans; Impulsive Behavior; Learning; Models, Neurological; Models, Psychological; Neurons, physiology; Reinforcement (Psychology); Reward}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {5703}, + Owner = {Woo-Young Ahn}, + Pages = {1901--1902}, + Pii = {306/5703/1901}, + Pmid = {15591193}, + Timestamp = {2009.08.05}, + Title = {Neuroscience. Addiction as compulsive reward prediction.}, + Url = {http://dx.doi.org/10.1126/science.1107071}, + Volume = {306}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1126/science.1107071}} + +@article{Ahn2004, + Author = {Ahn, K. H. and Lyoo, I. K. and Lee, H. K. and Song, I. C. and Oh, J. S. and Hwang, J. and Kwon, J. and Kim, M. J. and Kim, M. and Renshaw, P. F.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Oct}, + Pages = {516--521}, + Title = {{{W}hite matter hyperintensities in subjects with bipolar disorder}}, + Volume = {58}, + Year = {2004}} + +@article{Ahn2009, + Author = {Ahn, Woo-Young and Krawitz, Adam and Kim, Woojae and Busemeyer, Jerome R. and Brown, Joshua W.}, + Journal = {Manuscript submitted for publication}, + Owner = {Woo-Young Ahn}, + Timestamp = {2009.08.15}, + Title = {A Model-Based fMRI Analysis with Hierarchical Bayesian Parameter Estimation}, + Year = {2009}} + +@article{Ainslie2004, + Author = {Ainslie, G. and Monterosso, J.}, + Journal = {Science}, + Month = {Oct}, + Pages = {421--423}, + Title = {{{B}ehavior. {A} marketplace in the brain?}}, + Volume = {306}, + Year = {2004}} + +@article{Ainslie2003, + Author = {Ainslie, G. and Monterosso, J. R.}, + Journal = {J Exp Anal Behav}, + Month = {Jan}, + Pages = {37--48}, + Title = {{{B}uilding blocks of self-control: increased tolerance for delay with bundled rewards}}, + Volume = {79}, + Year = {2003}} + +@book{Aitchison1975, + Address = {Cambridge}, + Author = {Aitchison, J. and Dunsmore, I. R.}, + Publisher = {Cambridge University Press}, + Title = {Statistical prediction analysis}, + Year = {1975}} + +@article{Aitkin1996, + Author = {Aitkin, M.}, + Journal = {The American Statistician}, + Pages = {384--385}, + Title = {Comment on ``Simple Counterexamples Against the Conditionality Principle" by {I}nge {S}. {H}elland}, + Volume = {50}, + Year = {1996}} + +@incollection{Akaike1973, + Address = {Budapest}, + Author = {Akaike, H.}, + Booktitle = {Second International Symposium on Information Theory}, + Editor = {Petrov, B. N. and Csaki, F.}, + Pages = {267--281}, + Publisher = {Akademiai Kiado}, + Title = {Information Theory as an Extension of the Maximum Likelihood Principle}, + Year = {1973}} + +@article{Akaike1974, + Author = {Akaike, H.}, + Journal = {IEEE Transactions on Automatic Control}, + Pages = {716--723}, + Title = {A New Look at the Statistical Model Identification}, + Volume = {19}, + Year = {1974}} + +@article{akiskal2000re, + Author = {Akiskal, HS and Bourgeois, ML and Angst, J. and Post, R. and Moller, H. and Hirschfeld, R.}, + Journal = {Journal of Affective Disorders}, + Pages = {S5}, + Publisher = {J Affect Disord}, + Title = {{Re-evaluating the prevalence of and diagnostic composition within the broad clinical spectrum of bipolar disorders.}}, + Volume = {59}, + Year = {2000}} + +@article{Aklin2009, + Author = {Aklin, W. M. and Moolchan, E. T. and Luckenbaugh, D. A. and Ernst, M.}, + Journal = {Nicotine Tob. Res.}, + Month = {Jun}, + Pages = {750--755}, + Title = {{{E}arly tobacco smoking in adolescents with externalizing disorders: inferences for reward function}}, + Volume = {11}, + Year = {2009}} + +@article{Alessi2003, + Abstract = {Research and clinical expertise indicates that impulsivity is an underlying + feature of pathological gambling. This study examined the extent + to which impulsive behavior, defined by the rate of discounting delayed + monetary rewards, varies with pathological gambling severity, assessed + by the South Oaks Gambling Screen (SOGS). Sixty-two pathological + gamblers completed a delay discounting task, the SOGS, the Eysenck + impulsivity scale, the Addiction Severity Index (ASI), and questions + about gambling and substance use at intake to outpatient treatment + for pathological gambling. In the delay discounting task, participants + chose between a large delayed reward (US $1000) and smaller more + immediate rewards (US $1-$999) across a range of delays (6h to 25 + years). The rate at which the delayed reward was discounted (k value) + was derived for each participant and linear regression was used to + identify the variables that predicted k values. Age, gender, years + of education, substance abuse treatment history, and cigarette smoking + history failed to significantly predict k values. Scores on the Eysenck + impulsivity scale and the SOGS both accounted for a significant proportion + of the variance in k values. The predictive value of the SOGS was + 1.4 times that of the Eysenck scale. These results indicate that + of the measures tested, gambling severity was the best single predictor + of impulsive behavior in a delay discounting task in this sample + of pathological gamblers.}, + Author = {S. Alessi and N. Petry}, + Institution = {Department of Psychiatry, University of Connecticut Health Center, 263 Farmington Avenue, 06030-3944, Farmington, CT, USA}, + Journal = {Behav Processes}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {345--354}, + Pii = {S0376635703001505}, + Pmid = {14580703}, + Timestamp = {2009.08.06}, + Title = {Pathological gambling severity is associated with impulsivity in a delay discounting procedure.}, + Volume = {64}, + Year = {2003}} + +@article{Alessi2003a, + Abstract = {Research and clinical expertise indicates that impulsivity is an underlying + feature of pathological gambling. This study examined the extent + to which impulsive behavior, defined by the rate of discounting delayed + monetary rewards, varies with pathological gambling severity, assessed + by the South Oaks Gambling Screen (SOGS). Sixty-two pathological + gamblers completed a delay discounting task, the SOGS, the Eysenck + impulsivity scale, the Addiction Severity Index (ASI), and questions + about gambling and substance use at intake to outpatient treatment + for pathological gambling. In the delay discounting task, participants + chose between a large delayed reward (US $1000) and smaller more + immediate rewards (US $1-$999) across a range of delays (6h to 25 + years). The rate at which the delayed reward was discounted (k value) + was derived for each participant and linear regression was used to + identify the variables that predicted k values. Age, gender, years + of education, substance abuse treatment history, and cigarette smoking + history failed to significantly predict k values. Scores on the Eysenck + impulsivity scale and the SOGS both accounted for a significant proportion + of the variance in k values. The predictive value of the SOGS was + 1.4 times that of the Eysenck scale. These results indicate that + of the measures tested, gambling severity was the best single predictor + of impulsive behavior in a delay discounting task in this sample + of pathological gamblers.}, + Author = {S. Alessi and N. Petry}, + Institution = {Department of Psychiatry, University of Connecticut Health Center, 263 Farmington Avenue, 06030-3944, Farmington, CT, USA}, + Journal = {Behav Processes}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {345--354}, + Pii = {S0376635703001505}, + Pmid = {14580703}, + Timestamp = {2009.08.06}, + Title = {Pathological gambling severity is associated with impulsivity in a delay discounting procedure.}, + Volume = {64}, + Year = {2003}} + +@article{Alexander2006, + Author = {Alexander, D. F. and Alving, B. M. and Battey, J. F. and Berg, J. M. and Collins, F. S. and Fauci, A. S. and Gallin, J. I. and Grady, P. A. and Hodes, R. J. and Hrynkow, S. H. and Insel, T. R. and Jones, J. F. and Katz, S. I. and Landis, S. C. and Li, T. K. and Lindberg, D. A. and Nabel, E. G. and Niederhuber, J. E. and Pettigrew, R. I. and Rodgers, G. P. and Ruffin, J. and Scarpa, A. and Schwartz, D. A. and Sieving, P. A. and Straus, S. E. and Tabak, L. A. and Volkow, N. D.}, + Journal = {J. Clin. Invest.}, + Month = {Jun}, + Pages = {1462--1463}, + Title = {{{R}esponse to: "{R}escuing the {N}{I}{H} before it is too late"}}, + Volume = {116}, + Year = {2006}} + +@article{Alexoff2003, + Author = {Alexoff, D. L. and Vaska, P. and Marsteller, D. and Gerasimov, T. and Li, J. and Logan, J. and Fowler, J. S. and Taintor, N. B. and Thanos, P. K. and Volkow, N. D.}, + Journal = {J. Nucl. Med.}, + Month = {May}, + Pages = {815--822}, + Title = {{{R}eproducibility of 11{C}-raclopride binding in the rat brain measured with the micro{P}{E}{T} {R}4: effects of scatter correction and tracer specific activity}}, + Volume = {44}, + Year = {2003}} + +@article{Alia-Klein2008a, + Author = {Alia-Klein, N. and Goldstein, R. Z. and Kriplani, A. and Logan, J. and Tomasi, D. and Williams, B. and Telang, F. and Shumay, E. and Biegon, A. and Craig, I. W. and Henn, F. and Wang, G. J. and Volkow, N. D. and Fowler, J. S.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {5099--5104}, + Title = {{{B}rain monoamine oxidase {A} activity predicts trait aggression}}, + Volume = {28}, + Year = {2008}} + +@article{Alia-Klein2009, + Author = {Alia-Klein, N. and Goldstein, R. Z. and Tomasi, D. and Woicik, P. A. and Moeller, S. J. and Williams, B. and Craig, I. W. and Telang, F. and Biegon, A. and Wang, G. J. and Fowler, J. S. and Volkow, N. D.}, + Journal = {Emotion}, + Month = {Jun}, + Pages = {385--396}, + Title = {{{N}eural mechanisms of anger regulation as a function of genetic risk for violence}}, + Volume = {9}, + Year = {2009}} + +@article{Alia-Klein2007, + Author = {Alia-Klein, N. and Goldstein, R. Z. and Tomasi, D. and Zhang, L. and Fagin-Jones, S. and Telang, F. and Wang, G. J. and Fowler, J. S. and Volkow, N. D.}, + Journal = {Emotion}, + Month = {Aug}, + Pages = {649--659}, + Title = {{{W}hat is in a word? {N}o versus {Y}es differentially engage the lateral orbitofrontal cortex}}, + Volume = {7}, + Year = {2007}} + +@article{Alia-Klein2008, + Author = {Alia-Klein, N. and Kriplani, A. and Pradhan, K. and Ma, J. Y. and Logan, J. and Williams, B. and Craig, I. W. and Telang, F. and Tomasi, D. and Goldstein, R. Z. and Wang, G. J. and Volkow, N. D. and Fowler, J. S.}, + Journal = {Psychiatry Res}, + Month = {Oct}, + Pages = {73--76}, + Title = {{{T}he {M}{A}{O}-{A} genotype does not modulate resting brain metabolism in adults}}, + Volume = {164}, + Year = {2008}} + +@article{Allman2007, + Author = {Allman, B.L. and Meredith, M.A.}, + Journal = {Journal of neurophysiology}, + Number = {1}, + Pages = {545}, + Publisher = {Am Physiological Soc}, + Title = {{Multisensory processing in" unimodal" neurons: cross-modal subthreshold auditory effects in cat extrastriate visual cortex}}, + Volume = {98}, + Year = {2007}} + +@article{Alvaro1996, + Author = {Alvaro, J. D. and Tatro, J. B. and Quillan, J. M. and Fogliano, M. and Eisenhard, M. and Lerner, M. R. and Nestler, E. J. and Duman, R. S.}, + Journal = {Mol. Pharmacol.}, + Month = {Sep}, + Pages = {583--591}, + Title = {{{M}orphine down-regulates melanocortin-4 receptor expression in brain regions that mediate opiate addiction}}, + Volume = {50}, + Year = {1996}} + +@article{Anastasio2003, + Author = {Anastasio, T.J. and Patton, P.E.}, + Journal = {Journal of Neuroscience}, + Number = {17}, + Pages = {6713--6727}, + Publisher = {Soc Neuroscience}, + Title = {{A two-stage unsupervised learning algorithm reproduces multisensory enhancement in a neural network model of the corticotectal system}}, + Volume = {23}, + Year = {2003}} + +@article{Anastasio2000, + Author = {Anastasio, T.J. and Patton, P.E. and Belkacem-Boussaid, K.}, + Journal = {Neural Computation}, + Number = {5}, + Pages = {1165--1187}, + Publisher = {MIT Press}, + Title = {{Using Bayes' rule to model multisensory enhancement in the superior colliculus}}, + Volume = {12}, + Year = {2000}} + +@article{Anderson2000, + Author = {Anderson, B. J. and Gatley, S. J. and Rapp, D. N. and Coburn-Litvak, P. S. and Volkow, N. D.}, + Journal = {Brain Res.}, + Month = {Jul}, + Pages = {262--265}, + Title = {{{T}he ratio of striatal {D}1 to muscarinic receptors changes in aging rats housed in an enriched environment}}, + Volume = {872}, + Year = {2000}} + +@article{Anderson1994, + Author = {Anderson, J. L. and Platt, M. L. and Guarnieri, T. and Fox, T. L. and Maser, M. J. and Pritchett, E. L.}, + Journal = {Am. J. Cardiol.}, + Month = {Sep}, + Pages = {578--584}, + Title = {{{F}lecainide acetate for paroxysmal supraventricular tachyarrhythmias. {T}he {F}lecainide {S}upraventricular {T}achycardia {S}tudy {G}roup}}, + Volume = {74}, + Year = {1994}} + +@article{Anderson2004, + Author = {Anderson, J. R. and Bothell, D. and Byrne, M. D. and Douglass, S. and Lebiere, C. and Qin, Y.}, + Journal = {Psychological Review}, + Owner = {WooYoung Ahn}, + Pages = {1036-1060}, + Timestamp = {2008.03.25}, + Title = {An integrated theory of Mind.}, + Volume = {111}, + Year = {2004}} + +@article{Andersoninpress, + Author = {Anderson, John R. and Qin, Yulin}, + Journal = {Journal of Cognitive Neurscience}, + Owner = {WooYoung Ahn}, + Timestamp = {2008.03.26}, + Title = {Using Brain Imaging to Extract the Structure of Complex Events at the Rational Time Band}, + Year = {in press}} + +@article{Anderson2005, + Author = {Anderson, K. G. and Schweinsburg, A. and Paulus, M. P. and Brown, S. A. and Tapert, S.}, + Journal = {J. Stud. Alcohol}, + Month = {May}, + Pages = {323--331}, + Title = {{{E}xamining personality and alcohol expectancies using functional magnetic resonance imaging (f{M}{R}{I}) with adolescents}}, + Volume = {66}, + Year = {2005}} + +@article{andreoli2003selective, + Author = {Andreoli, M. and Tessari, M. and Pilla, M. and Valerio, E. and Hagan, JJ and Heidbreder, CA}, + Journal = {Neuropsychopharmacology: official publication of the American College of Neuropsychopharmacology}, + Number = {7}, + Pages = {1272}, + Title = {{Selective antagonism at dopamine D3 receptors prevents nicotine-triggered relapse to nicotine-seeking behavior.}}, + Volume = {28}, + Year = {2003}} + +@article{Andrew2002, + Author = {Andrew, D. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Apr}, + Pages = {1889--1901}, + Title = {{{R}esponses of spinothalamic lamina {I} neurons to maintained noxious mechanical stimulation in the cat}}, + Volume = {87}, + Year = {2002}} + +@article{Andrew2002a, + Author = {Andrew, D. and Craig, A. D.}, + Journal = {J. Physiol. (Lond.)}, + Month = {Dec}, + Pages = {913--931}, + Title = {{{Q}uantitative responses of spinothalamic lamina {I} neurones to graded mechanical stimulation in the cat}}, + Volume = {545}, + Year = {2002}} + +@article{Andrew2001, + Author = {Andrew, D. and Craig, A. D.}, + Journal = {Nat. Neurosci.}, + Month = {Jan}, + Pages = {72--77}, + Title = {{{S}pinothalamic lamina {I} neurons selectively sensitive to histamine: a central neural pathway for itch}}, + Volume = {4}, + Year = {2001}} + +@article{Andrew2001a, + Author = {Andrew, D. and Craig, A. D.}, + Journal = {J. Physiol. (Lond.)}, + Month = {Dec}, + Pages = {489--495}, + Title = {{{S}pinothalamic lamina {I} neurones selectively responsive to cutaneous warming in cats}}, + Volume = {537}, + Year = {2001}} + +@article{Andrew2003, + Author = {Andrew, D. and Krout, K. E. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Apr}, + Pages = {257--271}, + Title = {{{D}ifferentiation of lamina {I} spinomedullary and spinothalamic neurons in the cat}}, + Volume = {458}, + Year = {2003}} + +@article{Anscombe1963, + Author = {Anscombe, F. J.}, + Journal = {Journal of the American Statistical Association}, + Pages = {365--383}, + Title = {Sequential Medical Trials}, + Volume = {58}, + Year = {1963}} + +@article{Anscombe1954, + Author = {Anscombe, F. J.}, + Journal = {Biometrics}, + Pages = {89--100}, + Title = {Fixed-Sample-Size Analysis of Sequential Observations}, + Volume = {10}, + Year = {1954}} + +@article{deAraujo2005, + Author = {de Araujo, I.E. and Rolls, E.T. and Velazco, M.I. and Margot, C. and Cayeux, I.}, + Journal = {Neuron}, + Number = {4}, + Pages = {671--679}, + Title = {{Cognitive modulation of olfactory processing}}, + Volume = {46}, + Year = {2005}} + +@article{Arcaini2009, + Author = {Arcaini, L. and Pascutto, C. and Passamonti, F. and Bruno, R. and Merli, M. and Rizzi, S. and Orlandi, E. and Astori, C. and Rattotti, S. and Paulli, M. and others}, + Journal = {International Journal of Cancer}, + Number = {9}, + Publisher = {Wiley Subscription Services, Inc., A Wiley Company Hoboken}, + Title = {{Bayesian models identify specific lymphoproliferative disorders associated with hepatitis C virus infection}}, + Volume = {124}, + Year = {2009}} + +@article{Arce2006a, + Author = {Arce, E. and Leland, D. S. and Miller, D. A. and Simmons, A. N. and Winternheimer, K. C. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Aug}, + Pages = {704--713}, + Title = {{{I}ndividuals with schizophrenia present hypo- and hyperactivation during implicit cueing in an inhibitory task}}, + Volume = {32}, + Year = {2006}} + +@article{Arce2006, + Author = {Arce, E. and Miller, D. A. and Feinstein, J. S. and Stein, M. B. and Paulus, M. P.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Nov}, + Pages = {105--116}, + Title = {{{L}orazepam dose-dependently decreases risk-taking related activation in limbic areas}}, + Volume = {189}, + Year = {2006}} + +@article{Arce2008, + Author = {Arce, E. and Simmons, A. N. and Lovero, K. L. and Stein, M. B. and Paulus, M. P.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {661--672}, + Title = {{{E}scitalopram effects on insula and amygdala {B}{O}{L}{D} activation during emotional processing}}, + Volume = {196}, + Year = {2008}} + +@article{Arce2009, + Author = {Arce, E. and Simmons, A. N. and Stein, M. B. and Winkielman, P. and Hitchcock, C. and Paulus, M. P.}, + Journal = {J Affect Disord}, + Month = {Apr}, + Pages = {286--293}, + Title = {{{A}ssociation between individual differences in self-reported emotional resilience and the affective perception of neutral faces}}, + Volume = {114}, + Year = {2009}} + +@article{Arias2007, + Author = {Arias-Carri{\'o}n, O. and P{\u{o}}ppel, E.}, + Journal = {Acta neurobiologiae experimentalis}, + Number = {4}, + Pages = {481}, + Title = {{Dopamine, learning, and reward-seeking behavior.}}, + Volume = {67}, + Year = {2007}} + +@article{Armitage1961, + Author = {Armitage, P.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {30--31}, + Title = {Comment on ``Consistency in Statistical Inference and Decision" by {C}edric {A}. {B}. {S}mith}, + Volume = {23}, + Year = {1961}} + +@book{Armitage1960, + Address = {Springfield (IL)}, + Author = {Armitage, P.}, + Publisher = {Thomas}, + Title = {Sequential Medical Trials}, + Year = {1960}} + +@article{Armitage1957, + Author = {Armitage, P.}, + Journal = {Biometrika}, + Pages = {9--26}, + Title = {Restricted Sequential Procedures}, + Volume = {44}, + Year = {1957}} + +@article{Armitage1969, + Author = {Armitage, P. and McPherson, C. K. and Rowe, B. C.}, + Journal = {Journal of the Royal Statistical Society A}, + Pages = {235--244}, + Title = {Repeated Significance Tests on Accumulating Data}, + Volume = {132}, + Year = {1969}} + +@book{Arnold2003, + Address = {Berlin}, + Author = {Arnold, L.}, + Publisher = {Springer Verlag}, + Title = {Random Dynamical Systems}, + Year = {2003}} + +@book{Arnold1999, + Address = {New York}, + Author = {Arnold, V. I. and Afrajmovich, V. S. and Il'yashenko, Y. S. and Shil'nikov, L. P.}, + Publisher = {Springer Verlag}, + Title = {Bifurcation Theory and Catastrophe Theory}, + Year = {1999}} + +@article{Aron2005, + Author = {Aron, A. and Fisher, H. and Mashek, D.J. and Strong, G. and Li, H. and Brown, L.L.}, + Journal = {Journal of Neurophysiology}, + Number = {1}, + Pages = {327--337}, + Publisher = {Am Physiological Soc}, + Title = {{Reward, motivation, and emotion systems associated with early-stage intense romantic love}}, + Volume = {94}, + Year = {2005}} + +@article{Aron2007, + Author = {Aron, J. L. and Paulus, M. P.}, + Journal = {Addiction}, + Month = {Apr}, + Pages = {33--43}, + Title = {{{L}ocation, location: using functional magnetic resonance imaging to pinpoint brain differences relevant to stimulant use}}, + Volume = {102 Suppl 1}, + Year = {2007}} + +@article{Asai2007, + Author = {Asai, M. and Iwata, N. and Yoshikawa, A. and Aizaki, Y. and Ishiura, S. and Saido, T.C. and Maruyama, K.}, + Journal = {Biochemical and Biophysical Research Communications}, + Number = {2}, + Pages = {498--502}, + Publisher = {Elsevier}, + Volume = {352}, + Year = {2007}} + +@article{Ashby1994, + Author = {Ashby, F.G. and Maddox, W.T. and Lee, W.W.}, + Journal = {Psychological Science}, + Number = {3}, + Pages = {144--151}, + Publisher = {Blackwell Publishing Ltd}, + Title = {{On the dangers of averaging across subjects when using multidimensional scaling or the similarity-choice model}}, + Volume = {5}, + Year = {1994}} + +@article{Au1998, + Author = {Au, K. S. and Rodriguez, J. A. and Finch, J. L. and Volcik, K. A. and Roach, E. S. and Delgado, M. R. and Rodriguez, E. and Northrup, H.}, + Journal = {Am. J. Hum. Genet.}, + Month = {Feb}, + Pages = {286--294}, + Title = {{{G}erm-line mutational analysis of the {T}{S}{C}2 gene in 90 tuberous-sclerosis patients}}, + Volume = {62}, + Year = {1998}} + +@article{Au1997, + Author = {Au, K. S. and Rodriguez, J. A. and Rodriguez, E. and Dobyns, W. B. and Delgado, M. R. and Northrup, H.}, + Journal = {Hum. Mutat.}, + Pages = {23--29}, + Title = {{{M}utations and polymorphisms in the tuberous sclerosis complex gene on chromosome 16}}, + Volume = {9}, + Year = {1997}} + +@article{Au2007, + Author = {Au, K. S. and Williams, A. T. and Roach, E. S. and Batchelor, L. and Sparagana, S. P. and Delgado, M. R. and Wheless, J. W. and Baumgartner, J. E. and Roa, B. B. and Wilson, C. M. and Smith-Knuppel, T. K. and Cheung, M. Y. and Whittemore, V. H. and King, T. M. and Northrup, H.}, + Journal = {Genet. Med.}, + Month = {Feb}, + Pages = {88--100}, + Title = {{{G}enotype/phenotype correlation in 325 individuals referred for a diagnosis of tuberous sclerosis complex in the {U}nited {S}tates}}, + Volume = {9}, + Year = {2007}} + +@article{Auerbach1987, + Author = {Auerbach, A. H. and Childress, A. R.}, + Journal = {J. Nerv. Ment. Dis.}, + Month = {Mar}, + Pages = {138--142}, + Title = {{{T}he value of {D}{S}{M}-{I}{I}{I} for psychotherapy. {A} feasibility study}}, + Volume = {175}, + Year = {1987}} + +@article{Ayer1955, + Author = {Ayer, M. and Brunk, H. D. and Ewing, G. M. and Reid, W. T. and Silverman, E.}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {547--560}, + Title = {An Empirical Distribution Function for Sampling with Incomplete Information}, + Volume = {39}, + Year = {1955}} + +@article{pmid19184648, + Author = {Azizian, A. and Monterosso, J. and O'Neill, J. and London, E. D.}, + Journal = {Handb Exp Pharmacol}, + Pages = {113--143}, + Title = {{{M}agnetic resonance imaging studies of cigarette smoking}}, + Year = {2009}} + +@article{Azizian2008, + Author = {Azizian, A. and Monterosso, J. R. and Brody, A. L. and Simon, S. L. and London, E. D.}, + Journal = {Nicotine Tob. Res.}, + Month = {Apr}, + Pages = {599--606}, + Title = {{{S}everity of nicotine dependence moderates performance on perceptual-motor tests of attention}}, + Volume = {10}, + Year = {2008}} + +@article{Azzalini1990, + Author = {Azzalini, A. and Bowman, A. W.}, + Journal = {Applied Statistics}, + Pages = {357--365}, + Title = {A Look at Some Data on the {O}ld {F}aithful Geyser}, + Volume = {39}, + Year = {1990}} + +@article{B?ckelmann2004, + Author = {B?ckelmann, P. K. and Bechara, I. J.}, + Journal = {J. Submicrosc. Cytol. Pathol.}, + Month = {Jan}, + Pages = {55--64}, + Title = {{{E}ffect of naproxen on tail fin regeneration in teleost}}, + Volume = {36}, + Year = {2004}} + +@article{Backe1999, + Author = {Backe, A.}, + Journal = {Philosophy of Science}, + Pages = {S354--S361}, + Title = {The Likelihood Principle and the Reliability of Experiments}, + Volume = {66}, + Year = {1999}} + +@article{Bae2008, + Author = {Bae, S. and Kim, J. and Hwang, J. and Lee, Y. and Lee, H. and Lee, J. and Lyoo, I. and Renshaw, P. and Yoon, S.}, + Journal = {J. Psychopharmacol. (Oxford)}, + Month = {Nov}, + Title = {{{I}ncreased prevalence of white matter hyperintensities in patients with panic disorder}}, + Year = {2008}} + +@article{Bae2006, + Author = {Bae, S. C. and Lyoo, I. K. and Sung, Y. H. and Yoo, J. and Chung, A. and Yoon, S. J. and Kim, D. J. and Hwang, J. and Kim, S. J. and Renshaw, P. F.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {83--88}, + Title = {{{I}ncreased white matter hyperintensities in male methamphetamine abusers}}, + Volume = {81}, + Year = {2006}} + +@article{Bagot2007, + Author = {Bagot, K. S. and Berarducci, J. M. and Franken, F. H. and Frazier, M. J. and Ernst, M. and Moolchan, E. T.}, + Journal = {Am J Addict}, + Pages = {62--66}, + Title = {{{A}dolescents with conduct disorder: early smoking and treatment requests}}, + Volume = {16}, + Year = {2007}} + +@article{Baicy2005, + Author = {Baicy, K. and Bearden, C. E. and Monterosso, J. and Brody, A. L. and Isaacson, A. J. and London, E. D.}, + Journal = {Int. Rev. Neurobiol.}, + Pages = {117--145}, + Title = {{{C}ommon substrates of dysphoria in stimulant drug abuse and primary depression: therapeutic targets}}, + Volume = {65}, + Year = {2005}} + +@article{Baicy2007, + Author = {Baicy, K. and London, E. D. and Monterosso, J. and Wong, M. L. and Delibasi, T. and Sharma, A. and Licinio, J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Nov}, + Pages = {18276--18279}, + Title = {{{L}eptin replacement alters brain response to food cues in genetically leptin-deficient adults}}, + Volume = {104}, + Year = {2007}} + +@article{Baillie1996, + Author = {Baillie, R. T.}, + Journal = {Journal of Econometrics}, + Pages = {5--59}, + Title = {Long Memory Processes and Fractional Integration in Econometrics}, + Volume = {73}, + Year = {1996}} + +@article{Baillie2002, + Author = {Baillie, R. T. and Crato, N. and Ray, B. K.}, + Journal = {International Journal of Forecasting}, + Title = {Long--memory forecasting [{S}pecial issue]}, + Volume = {18(2)}, + Year = {2002}} + +@book{Bak1996, + Address = {New York}, + Author = {Bak, P.}, + Publisher = {Springer Verlag}, + Title = {How Nature Works: The Science of Self--Organized Criticality}, + Year = {1996}} + +@article{Bak1987, + Author = {Bak, P. and Tang, C. and Wiesenfeld, K.}, + Journal = {Physical Review Letters}, + Pages = {381--384}, + Title = {Self--organized Criticality: An Explanation of $1/f$ Noise}, + Volume = {59}, + Year = {1987}} + +@article{Bakan1966, + Author = {Bakan, D.}, + Journal = {Psychological Bulletin}, + Pages = {423--437}, + Title = {The Test of Significance in Psychological Research}, + Volume = {66}, + Year = {1966}} + +@article{Balakrishnan2001, + Author = {Balakrishnan, J. D. and Busemeyer, J. R. and MacDonald, J. A. and Lin, A.}, + Journal = {Indiana University Cognitive Science Technical Report 248}, + Title = {Dynamic signal detection theory: The next logical step in signal detection analysis.}, + Year = {2001}} + +@article{Balbus1998, + Author = {Balbus, J. M. and Stewart, W. and Bolla, K. I. and Schwartz, B. S.}, + Journal = {Arch. Environ. Health}, + Pages = {264--270}, + Title = {{{S}imple visual reaction time in organolead manufacturing workers: influence of the interstimulus interval}}, + Volume = {53}, + Year = {1998}} + +@article{Balbus1997, + Author = {Balbus, J. M. and Stewart, W. and Bolla, K. I. and Schwartz, B. S.}, + Journal = {Am. J. Ind. Med.}, + Month = {Nov}, + Pages = {544--549}, + Title = {{{S}imple visual reaction time in organolead manufacturing workers: comparison of different methods of modeling lead exposure and reaction time}}, + Volume = {32}, + Year = {1997}} + +@article{Balbus-Kornfeld1995, + Author = {Balbus-Kornfeld, J. M. and Stewart, W. and Bolla, K. I. and Schwartz, B. S.}, + Journal = {Occup Environ Med}, + Month = {Jan}, + Pages = {2--12}, + Title = {{{C}umulative exposure to inorganic lead and neurobehavioural test performance in adults: an epidemiological review}}, + Volume = {52}, + Year = {1995}} + +@article{Baler2006, + Author = {Baler, R. D. and Volkow, N. D.}, + Journal = {Trends Mol Med}, + Month = {Dec}, + Pages = {559--566}, + Title = {{{D}rug addiction: the neurobiology of disrupted self-control}}, + Volume = {12}, + Year = {2006}} + +@article{Baler2008, + Author = {Baler, R. D. and Volkow, N. D. and Fowler, J. S. and Benveniste, H.}, + Journal = {J Psychiatry Neurosci}, + Month = {May}, + Pages = {187--195}, + Title = {{{I}s fetal brain monoamine oxidase inhibition the missing link between maternal smoking and conduct disorders?}}, + Volume = {33}, + Year = {2008}} + +@article{Ballard2009, + Author = {Ballard, K. and Knutson, B.}, + Journal = {Neuroimage}, + Month = {Mar}, + Pages = {143--150}, + Title = {{{D}issociable neural representations of future reward magnitude and delay during temporal discounting}}, + Volume = {45}, + Year = {2009}} + +@article{Balleine2007a, + Author = {Balleine, B. W. and Delgado, M. R. and Hikosaka, O.}, + Journal = {J. Neurosci.}, + Month = {Aug}, + Pages = {8161--8165}, + Title = {{{T}he role of the dorsal striatum in reward and decision-making}}, + Volume = {27}, + Year = {2007}} + +@article{Balleine2007, + Author = {Balleine, B. W. and Doya, K. and O'Doherty, J. and Sakagami, M.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {May}, + Pages = {xi-xv}, + Title = {{{C}urrent trends in decision making}}, + Volume = {1104}, + Year = {2007}} + +@article{Balota1999, + Author = {Balota, D. A. and Spieler, D. H.}, + Journal = {Journal of Experimental Psychology: General}, + Pages = {32--55}, + Title = {Word Frequency, Repetition, and Lexicality Effects in Word Recognition Tasks: {B}eyond Measures of Central Tendency}, + Volume = {128}, + Year = {1999}} + +@article{Bando1996, + Abstract = {BACKGROUND: Although several techniques for the treatment of long-segment + stenosis of the trachea have been reported, including slide tracheoplasty, + rib grafting, and use of a pericardial patch, the optimal repair + remains controversial because of a lack of midterm to long-term follow-up + data. METHODS: To assess the intermediate and long-term outcomes + of patients having repair with anterior pericardial tracheoplasty, + we reviewed case histories of 12 patients (1984 to present). The + median age was 6.7 months (range, 1 to 98 months), and the median + weight was 6.0 kg (range, 0.97 to 42 kg). All patients underwent + anterior pericardial tracheoplasty through a median sternotomy during + partial normothermic cardiopulmonary bypass. An average of 13 tracheal + rings (range, five to 23) were divided anteriorly, and a patch of + fresh autologous pericardium was used to enlarge the trachea by 1.5 + times the predicted diameter for patient age and weight. RESULTS: + There was one hospital death, and all but 2 patients are long-term + survivors. All but 1 current survivor remain asymptomatic, with no + bronchoscopic evidence of airway obstruction or granulation on the + pericardial patch. All survivors examined have normal tracheal growth + and development, with a median follow-up of 5.5 years (range, 1 to + 11 years). CONCLUSIONS: Anterior pericardial tracheoplasty for congenital + tracheal stenosis provides excellent results at intermediate to long-term + follow-up.}, + Author = {K. Bando and M. W. Turrentine and K. Sun and T. G. Sharp and B. Matt and B. Karmazyn and S. A. Heifetz and J. Stevens and K. A. Kesler and J. W. Brown}, + Doi = {10.1016/0003-4975(96)00478-X}, + Institution = {Section of Cardiothoracic Surgery, James W. Riley Hospital for Children, Indianapolis, Indiana, USA.}, + Journal = {Ann Thorac Surg}, + Keywords = {Child; Child, Preschool; Follow-Up Studies; Humans; Infant; Infant, Newborn; Methods; Pericardium, transplantation; Postoperative Care; Postoperative Complications; Reoperation; Trachea, surgery; Tracheal Stenosis, congenital/surgery; Treatment Outcome}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {981--989}, + Pii = {0003-4975(96)00478-X}, + Pmid = {8823076}, + Timestamp = {2009.08.04}, + Title = {Anterior pericardial tracheoplasty for congenital tracheal stenosis: intermediate to long-term outcomes.}, + Url = {http://dx.doi.org/10.1016/0003-4975(96)00478-X}, + Volume = {62}, + Year = {1996}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/0003-4975(96)00478-X}} + +@article{Bar-Haim2009, + Author = {Bar-Haim, Y. and Fox, N. A. and Benson, B. and Guyer, A. E. and Williams, A. and Nelson, E. E. and Perez-Edgar, K. and Pine, D. S. and Ernst, M.}, + Journal = {Psychol Sci}, + Month = {Jul}, + Title = {{{N}eural {C}orrelates of {R}eward {P}rocessing in {A}dolescents {W}ith a {H}istory of {I}nhibited {T}emperament}}, + Year = {2009}} + +@article{Barch1997, + Author = {Barch, D. M. and Braver, T. S. and Nystrom, L. E. and Forman, S. D. and Noll, D. C. and Cohen, J. D.}, + Journal = {Neuropsychologia}, + Month = {Oct}, + Pages = {1373--1380}, + Title = {{{D}issociating working memory from task difficulty in human prefrontal cortex}}, + Volume = {35}, + Year = {1997}} + +@article{Barch2001, + Author = {Barch, D. M. and Carter, C. S. and Braver, T. S. and Sabb, F. W. and MacDonald, A. and Noll, D. C. and Cohen, J. D.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {280--288}, + Title = {{{S}elective deficits in prefrontal cortex function in medication-naive patients with schizophrenia}}, + Volume = {58}, + Year = {2001}} + +@article{Barch2003, + Author = {Barch, D. M. and Carter, C. S. and MacDonald, A. W. and Braver, T. S. and Cohen, J. D.}, + Journal = {J Abnorm Psychol}, + Month = {Feb}, + Pages = {132--143}, + Title = {{{C}ontext-processing deficits in schizophrenia: diagnostic specificity, 4-week course, and relationships to clinical symptoms}}, + Volume = {112}, + Year = {2003}} + +@article{Barch1999, + Author = {Barch, D. M. and Sabb, F. W. and Carter, C. S. and Braver, T. S. and Noll, D. C. and Cohen, J. D.}, + Journal = {Neuroimage}, + Month = {Dec}, + Pages = {642--657}, + Title = {{{O}vert verbal responding during f{M}{R}{I} scanning: empirical investigations of problems and potential solutions}}, + Volume = {10}, + Year = {1999}} + +@book{Barlow1972, + Address = {Chichester}, + Author = {Barlow, R. E. and Bartholomew, D. J. and Bremner, J. M. and Brunk, H. D.}, + Publisher = {John Wiley \& Sons}, + Title = {Statistical Inference Under Order Restrictions}, + Year = {1972}} + +@article{Barnard1949, + Author = {Barnard, G. A.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {115--149}, + Title = {Statistical Inference}, + Volume = {11}, + Year = {1949}} + +@article{Barnard1947, + Author = {Barnard, G. A.}, + Journal = {Biometrika}, + Pages = {179--182}, + Title = {The Meaning of a Significance Level}, + Volume = {34}, + Year = {1947}} + +@article{Barnard1962, + Author = {Barnard, G. A. and Jenkins, G. M. and Winsten, C. B.}, + Journal = {Journal of the Royal Statistical Society A}, + Pages = {321--372}, + Title = {Likelihood Inference and Time Series}, + Volume = {125}, + Year = {1962}} + +@article{Barren2003, + Author = {Barren, G. and Erev, I.}, + Journal = {Journal of Behavioral Decision Making}, + Owner = {WooYoung Ahn}, + Pages = {215-233}, + Timestamp = {2007.07.18}, + Title = {Small feedback-based decisions and their limited correspondence to description-based decisions}, + Volume = {16}, + Year = {2003}} + +@article{Barron1998, + Author = {Barron, A. and Rissanen, J. and Yu, B.}, + Journal = {IEEE Transactions on Information Theory}, + Pages = {2743--2760}, + Title = {The Minimum Description Length Principle in Coding and Modeling}, + Volume = {44}, + Year = {1998}} + +@article{Barrot2002, + Author = {Barrot, M. and Olivier, J. D. and Perrotti, L. I. and DiLeone, R. J. and Berton, O. and Eisch, A. J. and Impey, S. and Storm, D. R. and Neve, R. L. and Yin, J. C. and Zachariou, V. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Aug}, + Pages = {11435--11440}, + Title = {{{C}{R}{E}{B} activity in the nucleus accumbens shell controls gating of behavioral responses to emotional stimuli}}, + Volume = {99}, + Year = {2002}} + +@article{Bartzokis1999, + Author = {Bartzokis, G. and Goldstein, I.B. and Hance, D.B. and Beckson, M. and Shapiro, D. and Lu, P.H. and Edwards, N. and Mintz, J. and Bridge, P.}, + Journal = {American Journal of Neuroradiology}, + Number = {9}, + Pages = {1628--1635}, + Publisher = {Am Soc Neuroradiology}, + Title = {{The incidence of T2-weighted MR imaging signal abnormalities in the brain of cocaine-dependent patients is age-related and region-specific}}, + Volume = {20}, + Year = {1999}} + +@article{Basak2001, + Author = {Basak, G. K. and Chan, N. H. and Palma, W.}, + Journal = {Journal of Forecasting}, + Pages = {367?389}, + Title = {The Approximation of Long--memory Processes by an {ARMA} Model}, + Volume = {20}, + Year = {2001}} + +@article{Bassingthwaighte1988, + Author = {Bassingthwaighte, J. B.}, + Journal = {News in Physiological Sciences}, + Pages = {5--10}, + Title = {Physiological Heterogeneity: Fractals Link Determinism and Randomness in Structure and Function}, + Volume = {3}, + Year = {1988}} + +@article{Basu1975, + Author = {Basu, D.}, + Journal = {Sankhya A}, + Pages = {1--71}, + Title = {Statistical Information and Likelihood}, + Volume = {37}, + Year = {1975}} + +@article{Basu1964, + Author = {Basu, D.}, + Journal = {Sankhya A}, + Pages = {3--16}, + Title = {Recovery of Ancillary Information}, + Volume = {26}, + Year = {1964}} + +@article{Batchelder1998, + Author = {Batchelder, W. H.}, + Journal = {Psychological Assessment}, + Pages = {331--344}, + Title = {Multinomial Processing Tree Models and Psychological Assessment}, + Volume = {10}, + Year = {1998}} + +@article{Batchelder1999, + Author = {Batchelder, W. H. and Riefer, D. M.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {57--86}, + Title = {Theoretical and Empirical Review of Multinomial Process Tree Modeling}, + Volume = {6}, + Year = {1999}} + +@article{Baumgartner2006, + Author = {Baumgartner, U. and Tiede, W. and Treede, R. D. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Nov}, + Pages = {2802--2808}, + Title = {{{L}aser-evoked potentials are graded and somatotopically organized anteroposteriorly in the operculoinsular cortex of anesthetized monkeys}}, + Volume = {96}, + Year = {2006}} + +@article{Bayarri2004, + Author = {Bayarri, M. J. and Berger, J. O.}, + Journal = {Statistical Science}, + Pages = {58--80}, + Title = {The Interplay of {B}ayesian and Frequentist Analysis}, + Volume = {19}, + Year = {2004}} + +@article{Bayer2005, + Author = {Bayer, H.M. and Glimcher, P.W.}, + Journal = {Neuron}, + Number = {1}, + Pages = {129--141}, + Publisher = {Elsevier}, + Title = {{Midbrain dopamine neurons encode a quantitative reward prediction error signal}}, + Volume = {47}, + Year = {2005}} + +@article{Bearden2004, + Author = {Bearden, C. E. and van Erp, T. G. and Monterosso, J. R. and Simon, T. J. and Glahn, D. C. and Saleh, P. A. and Hill, N. M. and McDonald-McGinn, D. M. and Zackai, E. and Emanuel, B. S. and Cannon, T. D.}, + Journal = {Neurocase}, + Month = {Jun}, + Pages = {198--206}, + Title = {{{R}egional brain abnormalities in 22q11.2 deletion syndrome: association with cognitive abilities and behavioral symptoms}}, + Volume = {10}, + Year = {2004}} + +@article{Bechara2005, + Author = {Bechara, A.}, + Journal = {Nat. Neurosci.}, + Month = {Nov}, + Pages = {1458--1463}, + Title = {{{D}ecision making, impulse control and loss of willpower to resist drugs: a neurocognitive perspective}}, + Volume = {8}, + Year = {2005}} + +@article{Bechara2004e, + Author = {Bechara, A.}, + Journal = {Int. Rev. Neurobiol.}, + Pages = {159--193}, + Title = {{{D}isturbances of emotion regulation after focal brain lesions}}, + Volume = {62}, + Year = {2004}} + +@article{Bechara2003, + Author = {Bechara, A.}, + Journal = {J Gambl Stud}, + Pages = {23--51}, + Title = {{{R}isky business: emotion, decision-making, and addiction}}, + Volume = {19}, + Year = {2003}} + +@article{Bechara2003e, + Author = {Bechara, A.}, + Journal = {Journal of Gambling Studies}, + Number = {1}, + Pages = {23--51}, + Publisher = {Springer}, + Title = {{Risky business: emotion, decision-making, and addiction}}, + Volume = {19}, + Year = {2003}} + +@article{Bechara2001, + Author = {Bechara, A.}, + Journal = {Semin Clin Neuropsychiatry}, + Month = {Jul}, + Pages = {205--216}, + Title = {{{N}eurobiology of decision-making: risk and reward}}, + Volume = {6}, + Year = {2001}} + +@article{Bechara1994, + Author = {Bechara, A. and Damasio, A. R. and Damasio, H. and Anderson, S.}, + Journal = {Cognition}, + Pages = {7--15}, + Title = {Insensitivity to Future Consequences Following Damage to Human Prefrontal Cortex}, + Volume = {50}, + Year = {1994}} + +@article{Bechara2002, + Author = {Bechara, A. and Damasio, H.}, + Journal = {Neuropsychologia}, + Pages = {1675--1689}, + Title = {{{D}ecision-making and addiction (part {I}): impaired activation of somatic states in substance dependent individuals when pondering decisions with negative future consequences}}, + Volume = {40}, + Year = {2002}} + +@article{Bechara1997, + Author = {Bechara, A. and Damasio, H. and Tranel, D. and Damasio, A. R.}, + Journal = {Science}, + Pages = {1293--1295}, + Title = {Deciding Advantageously Before Knowing the Advantageous Strategy}, + Volume = {275}, + Year = {1997}} + +@article{Bechara2001a, + Author = {Bechara, A. and Dolan, S. and Denburg, N. and Hindes, A. and Anderson, S. W. and Nathan, P. E.}, + Journal = {Neuropsychologia}, + Pages = {376--389}, + Title = {{{D}ecision-making deficits, linked to a dysfunctional ventromedial prefrontal cortex, revealed in alcohol and stimulant abusers}}, + Volume = {39}, + Year = {2001}} + +@article{Bechara2002a, + Author = {Bechara, A. and Dolan, S. and Hindes, A.}, + Journal = {Neuropsychologia}, + Pages = {1690--1705}, + Title = {{{D}ecision-making and addiction (part {I}{I}): myopia for the future or hypersensitivity to reward?}}, + Volume = {40}, + Year = {2002}} + +@article{Bechara1992, + Author = {Bechara, A. and Harrington, F. and Nader, K. and van der Kooy, D.}, + Journal = {Behav. Neurosci.}, + Month = {Oct}, + Pages = {798--807}, + Title = {{{N}eurobiology of motivation: double dissociation of two motivational mechanisms mediating opiate reward in drug-naive versus drug-dependent animals}}, + Volume = {106}, + Year = {1992}} + +@article{Bechara1992a, + Author = {Bechara, A. and van der Kooy, D.}, + Journal = {Behav. Neurosci.}, + Month = {Apr}, + Pages = {364--373}, + Title = {{{C}hronic exposure to morphine does not alter the neural tissues subserving its acute rewarding properties: apparent tolerance is overshadowing}}, + Volume = {106}, + Year = {1992}} + +@article{Bechara1992b, + Author = {Bechara, A. and van der Kooy, D.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {May}, + Pages = {9--18}, + Title = {{{L}esions of the tegmental pedunculopontine nucleus: effects on the locomotor activity induced by morphine and amphetamine}}, + Volume = {42}, + Year = {1992}} + +@article{Bechara2004, + Author = {Bechara, A. and Martin, E. M.}, + Journal = {Neuropsychology}, + Month = {Jan}, + Pages = {152--162}, + Title = {{{I}mpaired decision making related to working memory deficits in individuals with substance addictions}}, + Volume = {18}, + Year = {2004}} + +@article{Bechara1998, + Author = {Bechara, A. and Nader, K. and van der Kooy, D.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jan}, + Pages = {1--17}, + Title = {{{A} two-separate-motivational-systems hypothesis of opioid addiction}}, + Volume = {59}, + Year = {1998}} + +@article{Bechara2007, + Author = {Bechara, F. G. and Sand, M. and Hoffmann, K. and Sand, D. and Altmeyer, P. and St?cker, M.}, + Journal = {J. Cutan. Pathol.}, + Month = {Jul}, + Pages = {552--557}, + Title = {{{F}at tissue after lipolysis of lipomas: a histopathological and immunohistochemical study}}, + Volume = {34}, + Year = {2007}} + +@article{Bechara2008, + Author = {Bechara, F. G. and Skrygan, M. and Kreuter, A. and Altmeyer, P. and Gambichler, T.}, + Journal = {Arch. Dermatol. Res.}, + Month = {Sep}, + Pages = {455--459}, + Title = {{{C}ytokine m{R}{N}{A} levels in human fat tissue after injection lipolysis with phosphatidylcholine and deoxycholate}}, + Volume = {300}, + Year = {2008}} + +@article{Beck2009, + Author = {Beck, A. and Schlagenhauf, F. and W?stenberg, T. and Hein, J. and Kienast, T. and Kahnt, T. and Schmack, K. and H?gele, C. and Knutson, B. and Heinz, A. and Wrase, J.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Title = {{{V}entral {S}triatal {A}ctivation {D}uring {R}eward {A}nticipation {C}orrelates with {I}mpulsivity in {A}lcoholics}}, + Year = {2009}} + +@article{Beck2008a, + Author = {Beck, J. M. and Ma, W. J. and Kiani, R. and Hanks, T. and Churchland, A. K. and Roitman, J. and Shadlen, M. N. and Latham, P. E. and Pouget, A.}, + Journal = {Neuron}, + Month = {Dec}, + Pages = {1142--1152}, + Title = {{{P}robabilistic population codes for {B}ayesian decision making}}, + Volume = {60}, + Year = {2008}} + +@article{Becker1988, + Author = {Becker, G.S. and Murphy, K.M.}, + Journal = {The Journal of Political Economy}, + Number = {4}, + Pages = {675}, + Publisher = {UChicago Press}, + Title = {{A theory of rational addiction}}, + Volume = {96}, + Year = {1988}} + +@article{Beesdo2009, + Author = {Beesdo, K. and Lau, J. Y. and Guyer, A. E. and McClure-Tone, E. B. and Monk, C. S. and Nelson, E. E. and Fromm, S. J. and Goldwin, M. A. and Wittchen, H. U. and Leibenluft, E. and Ernst, M. and Pine, D. S.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {275--285}, + Title = {{{C}ommon and distinct amygdala-function perturbations in depressed vs anxious adolescents}}, + Volume = {66}, + Year = {2009}} + +@article{Beggs2003, + Author = {Beggs, J. and Jordan, S. and Ericson, A. C. and Blomqvist, A. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {334--354}, + Title = {{{S}ynaptology of trigemino- and spinothalamic lamina {I} terminations in the posterior ventral medial nucleus of the macaque}}, + Volume = {459}, + Year = {2003}} + +@article{Behrens2007, + Author = {Behrens, Timothy E. J. and Woolrich, Mark W. and Walton, Mark E. and Rushworth, Matthew F. S.}, + Journal = {Nature neuroscience}, + Owner = {WooYoung Ahn}, + Pages = {1214-1221}, + Timestamp = {2007.12.12}, + Title = {Learning the value of information in an uncertain world}, + Volume = {10 (9)}, + Year = {2007}} + +@article{Beitner1989, + Author = {Beitner, D. B. and Duman, R. S. and Nestler, E. J.}, + Journal = {Mol. Pharmacol.}, + Month = {May}, + Pages = {559--564}, + Title = {{{A} novel action of morphine in the rat locus coeruleus: persistent decrease in adenylate cyclase}}, + Volume = {35}, + Year = {1989}} + +@article{Beitner-Johnson1992, + Author = {Beitner-Johnson, D. and Guitart, X. and Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Jun}, + Pages = {2165--2176}, + Title = {{{N}eurofilament proteins and the mesolimbic dopamine system: common regulation by chronic morphine and chronic cocaine in the rat ventral tegmental area}}, + Volume = {12}, + Year = {1992}} + +@article{Beitner-Johnson1991a, + Author = {Beitner-Johnson, D. and Guitart, X. and Nestler, E. J.}, + Journal = {Brain Res.}, + Month = {Oct}, + Pages = {147--150}, + Title = {{{D}opaminergic brain reward regions of {L}ewis and {F}ischer rats display different levels of tyrosine hydroxylase and other morphine- and cocaine-regulated phosphoproteins}}, + Volume = {561}, + Year = {1991}} + +@article{Beitner-Johnson1991, + Author = {Beitner-Johnson, D. and Nestler, E. J.}, + Journal = {J. Neurochem.}, + Month = {Jul}, + Pages = {344--347}, + Title = {{{M}orphine and cocaine exert common chronic actions on tyrosine hydroxylase in dopaminergic brain reward regions}}, + Volume = {57}, + Year = {1991}} + +@article{Bekir1993, + Author = {Bekir, P. and McLellan, T. and Childress, A. R. and Gariti, P.}, + Journal = {Int J Addict}, + Month = {May}, + Pages = {613--630}, + Title = {{{R}ole reversals in families of substance misusers: a transgenerational phenomenon}}, + Volume = {28}, + Year = {1993}} + +@article{Belin2008a, + Author = {Belin, D. and Everitt, B. J.}, + Journal = {Neuron}, + Pages = {432--441}, + Title = {{{C}ocaine seeking habits depend upon dopamine-dependent serial connectivity linking the ventral with the dorsal striatum}}, + Volume = {57}, + Year = {2008}} + +@article{Belin2008, + Author = {Belin, D. and Mar, A. C. and Dalley, J. W. and Robbins, T. W. and Everitt, B. J.}, + Journal = {Science}, + Pages = {1352--1355}, + Title = {{{H}igh impulsivity predicts the switch to compulsive cocaine-taking}}, + Volume = {320}, + Year = {2008}} + +@article{Bell2007, + Abstract = {Amyloid beta-peptide (Abeta) clearance from the central nervous system + (CNS) maintains its low levels in brain. In Alzheimer's disease, + Abeta accumulates in brain possibly because of its faulty CNS clearance + and a deficient efflux across the blood-brain barrier (BBB). By using + human-specific enzyme-linked immunosorbent assays, we measured a + rapid 30 mins efflux at the BBB and transport via the interstitial + fluid (ISF) bulk flow of human-unlabeled Abeta and of Abeta transport + proteins, apolipoprotein E (apoE) and apoJ in mice. We show (i) Abeta40 + is cleared rapidly across the BBB via low-density lipoprotein receptor-related + protein (LRP)1 at a rate of 0.21 pmol/min g ISF or 6-fold faster + than via the ISF flow; (ii) Abeta42 is removed across the BBB at + a rate 1.9-fold slower compared with Abeta40; (iii) apoE, lipid-poor + isoform 3, is cleared slowly via the ISF flow and across the BBB + (0.03-0.04 pmol/min g ISF), and after lipidation its transport at + the BBB becomes barely detectable within 30 mins; (iv) apoJ is eliminated + rapidly across the BBB (0.16 pmol/min g ISF) via LRP2. Clearance + rates of unlabeled and corresponding 125I-labeled Abeta and apolipoproteins + were almost identical, but could not be measured at low physiologic + levels by mass spectrometry. Amyloid beta-peptide 40 binding to apoE3 + reduced its efflux rate at the BBB by 5.7-fold, whereas Abeta42 binding + to apoJ enhanced Abeta42 BBB clearance rate by 83\%. Thus, Abeta, + apoE, and apoJ are cleared from brain by different transport pathways, + and apoE and apoJ may critically modify Abeta clearance at the BBB.}, + Author = {Robert D Bell and Abhay P Sagare and Alan E Friedman and Gurrinder S Bedi and David M Holtzman and Rashid Deane and Berislav V Zlokovic}, + Doi = {10.1038/sj.jcbfm.9600419}, + Institution = {Frank P Smith Laboratory for Neuroscience and Neurosurgical Research, Department of Neurosurgery, University of Rochester Medical Center, Rochester, New York 14642, USA.}, + Journal = {J Cereb Blood Flow Metab}, + Keywords = {Alzheimer Disease, metabolism; Amyloid beta-Protein, metabolism; Animals; Apolipoproteins E, metabolism; Biological Transport, Active, physiology; Blood-Brain Barrier; Clusterin, metabolism; Electrophoresis, Polyacrylamide Gel; Enzyme-Linked Immunosorbent Assay; Humans; Iodine Radioisotopes, diagnostic use; Ligands; Mice; Mice, Inbred C57BL; Models, Statistical; Spectrometry, Mass, Electrospray Ionization; Spectrometry, Mass, Matrix-Assisted Laser Desorption-Ionization}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Young}, + Pages = {909--918}, + Pii = {9600419}, + Pmid = {17077814}, + Timestamp = {2009.12.10}, + Title = {Transport pathways for clearance of human Alzheimer's amyloid beta-peptide and apolipoproteins E and J in the mouse central nervous system.}, + Url = {http://dx.doi.org/10.1038/sj.jcbfm.9600419}, + Volume = {27}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/sj.jcbfm.9600419}} + +@article{Bellgrove2004, + Author = {Bellgrove, M. A. and Hester, R. and Garavan, H.}, + Journal = {Neuropsychologia}, + Pages = {1910--1916}, + Title = {{{T}he functional neuroanatomical correlates of response variability: evidence from a response inhibition task}}, + Volume = {42}, + Year = {2004}} + +@article{Bellman1957, + Author = {Bellman, R.}, + Journal = {NJ: Princeton UP}, + Title = {{Dynamic Programming, Princeton}}, + Year = {1957}} + +@article{Belova2007, + Author = {Belova, M. A. and Paton, J. J. and Morrison, S. E. and Salzman, C. D.}, + Journal = {Neuron}, + Month = {Sep}, + Pages = {970--984}, + Title = {{{E}xpectation modulates neural responses to pleasant and aversive stimuli in primate amygdala}}, + Volume = {55}, + Year = {2007}} + +@article{Ben-Porath1986, + Author = {Y. S. Ben-Porath and J. N. Butcher}, + Journal = {Computers in Human Behavior}, + Pages = {167--182}, + Title = {Computers in personality assessment: A brief past, an ebullient present, and an expanding future}, + Volume = {2}, + Year = {1986}} + +@article{Bendiksby2006, + Author = {Bendiksby, M. S. and Platt, M. L.}, + Journal = {Neuropsychologia}, + Pages = {2411--2420}, + Title = {{{N}eural correlates of reward and attention in macaque area {L}{I}{P}}}, + Volume = {44}, + Year = {2006}} + +@article{Benn2009, + Author = {Benn, E. K. and Allen Hauser, W. and Shih, T. and Leary, L. and Bagiella, E. and Dayan, P. and Green, R. and Andrews, H. and Thurman, D. J. and Hesdorffer, D. C.}, + Journal = {Epilepsia}, + Month = {Jun}, + Title = {{{U}nderlying cause of death in incident unprovoked seizures in the urban community of {N}orthern {M}anhattan, {N}ew {Y}ork {C}ity}}, + Year = {2009}} + +@article{Benn2008, + Author = {Benn, E. K. and Hauser, W. A. and Shih, T. and Leary, L. and Bagiella, E. and Dayan, P. and Green, R. and Andrews, H. and Thurman, D. J. and Hesdorffer, D. C.}, + Journal = {Epilepsia}, + Month = {Aug}, + Pages = {1431--1439}, + Title = {{{E}stimating the incidence of first unprovoked seizure and newly diagnosed epilepsy in the low-income urban community of {N}orthern {M}anhattan, {N}ew {Y}ork {C}ity}}, + Volume = {49}, + Year = {2008}} + +@article{Bennett1998, + Author = {Bennett, B. A. and Hollingsworth, C. K. and Martin, R. S. and Childers, S. R. and Ehrenkaufer, R. E. and Porrino, L. J. and Davies, H. M.}, + Journal = {Neuropharmacology}, + Pages = {123--130}, + Title = {{{P}rolonged dopamine and serotonin transporter inhibition after exposure to tropanes}}, + Volume = {37}, + Year = {1998}} + +@article{Benveniste2005, + Author = {Benveniste, H. and Fowler, J. S. and Rooney, W. and Ding, Y. S. and Baumann, A. L. and Moller, D. H. and Du, C. and Backus, W. and Logan, J. and Carter, P. and Coplan, J. D. and Biegon, A. and Rosenblum, L. and Scharf, B. and Gatley, J. S. and Volkow, N. D.}, + Journal = {J. Nucl. Med.}, + Month = {Feb}, + Pages = {312--320}, + Title = {{{M}aternal and fetal 11{C}-cocaine uptake and kinetics measured in vivo by combined {P}{E}{T} and {M}{R}{I} in pregnant nonhuman primates}}, + Volume = {46}, + Year = {2005}} + +@article{Benveniste2003, + Author = {Benveniste, H. and Fowler, J. S. and Rooney, W. D. and Moller, D. H. and Backus, W. W. and Warner, D. A. and Carter, P. and King, P. and Scharf, B. and Alexoff, D. A. and Ma, Y. and Vaska, P. and Schlyer, D. and Volkow, N. D.}, + Journal = {J. Nucl. Med.}, + Month = {Sep}, + Pages = {1522--1530}, + Title = {{{M}aternal-fetal in vivo imaging: a combined {P}{E}{T} and {M}{R}{I} study}}, + Volume = {44}, + Year = {2003}} + +@book{Beran1994, + Address = {New York}, + Author = {Beran, J.}, + Publisher = {Chapman \& Hall}, + Title = {Statistics for Long--memory Processes}, + Year = {1994}} + +@article{Berger1985a, + Author = {Berger, J.O.}, + Publisher = {Springer}, + Title = {{Statistical decision theory and Bayesian analysis}}, + Year = {1985}} + +@article{Berger2003, + Author = {Berger, J. O.}, + Journal = {Statistical Science}, + Pages = {1--32}, + Title = {Could {F}isher, {J}effreys and {N}eyman Have Agreed on Testing?}, + Volume = {18}, + Year = {2003}} + +@article{Berger1996, + Author = {Berger, J. O.}, + Journal = {The American Statistician}, + Pages = {382--383}, + Title = {Comment on ``Simple Counterexamples Against the Conditionality Principle" by {I}nge {S}. {H}elland}, + Volume = {50}, + Year = {1996}} + +@book{Berger1985, + Address = {New York}, + Author = {Berger, J. O.}, + Edition = {2nd}, + Publisher = {Springer}, + Title = {Statistical Decision Theory and {B}ayesian Analysis}, + Year = {1985}} + +@incollection{Berger1988, + Address = {New York}, + Author = {Berger, J. O. and Berry, D. A.}, + Booktitle = {Statistical Decision Theory and Related Topics: Vol. 1}, + Editor = {Gupta, S. S. and Berger, J. O.}, + Pages = {29--72}, + Publisher = {Springer Verlag}, + Title = {The Relevance of Stopping Rules in Statistical Inference}, + Year = {1988}} + +@article{Berger1988a, + Author = {Berger, J. O. and Berry, D. A.}, + Journal = {American Scientist}, + Pages = {159--165}, + Title = {Statistical Analysis and the Illusion of Objectivity}, + Volume = {76}, + Year = {1988}} + +@article{Berger1997, + Author = {Berger, J. O. and Boukai, B. and Wang, Y.}, + Journal = {Statistical Science}, + Pages = {133--160}, + Title = {Unified Frequentist and {B}ayesian Testing of a Precise Hypothesis (with discussion)}, + Volume = {12}, + Year = {1997}} + +@article{Berger1994, + Author = {Berger, J. O. and Brown, L. and Wolpert, R.}, + Journal = {The Annals of Statistics}, + Pages = {1787--1807}, + Title = {A Unified Conditional Frequentist and {B}ayesian Test for Fixed and Sequential Hypothesis Testing}, + Volume = {22}, + Year = {1994}} + +@article{Berger1987, + Author = {Berger, J. O. and Delampady, M.}, + Journal = {Statistical Science}, + Pages = {317--352}, + Title = {Testing Precise Hypotheses}, + Volume = {2}, + Year = {1987}} + +@article{Berger1999, + Author = {Berger, J. O. and Mortera, J.}, + Journal = {Journal of the American Statistical Association}, + Pages = {542--554}, + Title = {Default {B}ayes Factors for Nonnested Hypothesis Testing}, + Volume = {94}, + Year = {1999}} + +@article{Berger1996a, + Author = {Berger, J. O. and Pericchi, L. R.}, + Journal = {Journal of the American Statistical Association}, + Pages = {109--122}, + Title = {The Intrinsic {B}ayes Factor for Model Selection and Prediction}, + Volume = {91}, + Year = {1996}} + +@article{Berger1987a, + Author = {Berger, J. O. and Sellke, T.}, + Journal = {Journal of the American Statistical Association}, + Pages = {112--139}, + Title = {Testing a Point Null Hypothesis: The Irreconcilability of p Values and Evidence}, + Volume = {82}, + Year = {1987}} + +@book{Berger1988b, + Address = {Hayward (CA)}, + Author = {Berger, J. O. and Wolpert, R. L.}, + Publisher = {Institute of Mathematical Statistics}, + Title = {The Likelihood Principle (2nd ed.)}, + Year = {1988}} + +@article{Berhow1995, + Author = {Berhow, M. T. and Russell, D. S. and Terwilliger, R. Z. and Beitner-Johnson, D. and Self, D. W. and Lindsay, R. M. and Nestler, E. J.}, + Journal = {Neuroscience}, + Month = {Oct}, + Pages = {969--979}, + Title = {{{I}nfluence of neurotrophic factors on morphine- and cocaine-induced biochemical changes in the mesolimbic dopamine system}}, + Volume = {68}, + Year = {1995}} + +@article{Berke2000, + Author = {Berke, J. D. and Hyman, S. E.}, + Journal = {Neuron}, + Month = {Mar}, + Pages = {515--532}, + Title = {{{A}ddiction, dopamine, and the molecular mechanisms of memory}}, + Volume = {25}, + Year = {2000}} + +@article{Berman2008, + Author = {Berman, S. M. and Voytek, B. and Mandelkern, M. A. and Hassid, B. D. and Isaacson, A. and Monterosso, J. and Miotto, K. and Ling, W. and London, E. D.}, + Journal = {Mol. Psychiatry}, + Month = {Sep}, + Pages = {897--908}, + Title = {{{C}hanges in cerebral glucose metabolism during early abstinence from chronic methamphetamine abuse}}, + Volume = {13}, + Year = {2008}} + +@book{Bernardo1994, + Address = {New York}, + Author = {Bernardo, J. M. and Smith, A. F. M.}, + Publisher = {Wiley}, + Title = {{B}ayesian Theory}, + Year = {1994}} + +@article{Berns2001, + Author = {Berns, G.S. and McClure, S.M. and Pagnoni, G. and Montague, P.R.}, + Journal = {Journal of Neuroscience}, + Number = {8}, + Pages = {2793}, + Title = {{Predictability modulates human brain response to reward}}, + Volume = {21}, + Year = {2001}} + +@article{Berrettini2004, + Author = {Berrettini, W. and Bierut, L. and Crowley, T. J. and Cubells, J. F. and Frascella, J. and Gelernter, J. and Hewitt, J. K. and Kreek, M. J. and Lachman, H. and Leppert, M. and Li, M. D. and Lachman, H. and Leppert, M. and Li, M. D. and Madden, P. and Miner, C. and Pollock, J. D. and Pomerleau, O. and Rice, J. P. and Rutter, J. L. and Shurtleff, D. and Swan, G. E. and Tischfield, J. A. and Tsuang, M. and Uhl, G. R. and Vanyukov, M. and Volkow, N. D. and Wanke, K.}, + Journal = {Science}, + Month = {Jun}, + Pages = {1445--1447}, + Title = {{{S}etting priorities for genomic research}}, + Volume = {304}, + Year = {2004}} + +@article{Berridge2008, + Author = {Berridge, K.C. and Aldridge, J.W.}, + Journal = {Social Cognition}, + Number = {5}, + Pages = {621--646}, + Publisher = {Guilford Publications}, + Title = {{Special Review: Decision Utility, The Brain, and Pursuit of Hedonic Goals}}, + Volume = {26}, + Year = {2008}} + +@article{Berry1989, + Author = {Berry, D. A.}, + Journal = {Biometrics}, + Pages = {1197--1211}, + Title = {Monitoring Accumulating Data in a Clinical Trial}, + Volume = {45}, + Year = {1989}} + +@book{Berry1985, + Address = {London}, + Author = {Berry, D. A. and Fristedt, B.}, + Publisher = {Chapman \& Hall}, + Title = {Bandit Problems: {S}equential Allocation of Experiments}, + Year = {1985}} + +@article{Bertram2007a, + Abstract = {The past decade has witnessed hundreds of reports declaring or refuting + genetic association with putative Alzheimer disease susceptibility + genes. This wealth of information has become increasingly difficult + to follow, much less interpret. We have created a publicly available, + continuously updated database that comprehensively catalogs all genetic + association studies in the field of Alzheimer disease (http://www.alzgene.org). + We performed systematic meta-analyses for each polymorphism with + available genotype data in at least three case-control samples. In + addition to identifying the epsilon4 allele of APOE and related effects, + we pinpointed over a dozen potential Alzheimer disease susceptibility + genes (ACE, CHRNB2, CST3, ESR1, GAPDHS, IDE, MTHFR, NCSTN, PRNP, + PSEN1, TF, TFAM and TNF) with statistically significant allelic summary + odds ratios (ranging from 1.11-1.38 for risk alleles and 0.92-0.67 + for protective alleles). Our database provides a powerful tool for + deciphering the genetics of Alzheimer disease, and it serves as a + potential model for tracking the most viable gene candidates in other + genetically complex diseases.}, + Author = {Lars Bertram and Matthew B McQueen and Kristina Mullin and Deborah Blacker and Rudolph E Tanzi}, + Doi = {10.1038/ng1934}, + Institution = {Genetics and Aging Research Unit, MassGeneral Institute for Neurodegenerative Disease (MIND), Department of Neurology, Massachusetts General Hospital, Charlestown, Massachusetts 02129, USA. bertram@helix.mgh.harvard.edu}, + Journal = {Nat Genet}, + Keywords = {Alzheimer Disease, genetics; Apolipoprotein E4, genetics; Databases, Genetic; Genetic Heterogeneity; Genetic Predisposition to Disease; Humans; Linkage (Genetics); Polymorphism, Genetic}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Young}, + Pages = {17--23}, + Pii = {ng1934}, + Pmid = {17192785}, + Timestamp = {2009.12.10}, + Title = {Systematic meta-analyses of Alzheimer disease genetic association studies: the AlzGene database.}, + Url = {http://dx.doi.org/10.1038/ng1934}, + Volume = {39}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/ng1934}} + +@article{Besag1995, + Author = {Besag, J. and Green, P. and Higdon, D. and Mengersen, K.}, + Journal = {Statistical Science}, + Owner = {Wooyoung Ahn}, + Pages = {3-66}, + Timestamp = {2007.05.01}, + Title = {Bayesian computation and stochastic systems (with discussion)}, + Volume = {10(1)}, + Year = {1995}} + +@article{Beveridge2008, + Author = {Beveridge, T. J. and Gill, K. E. and Hanlon, C. A. and Porrino, L. J.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Oct}, + Pages = {3257--3266}, + Title = {{{R}eview. {P}arallel studies of cocaine-related neural and cognitive impairment in humans and monkeys}}, + Volume = {363}, + Year = {2008}} + +@article{Beveridge2006, + Author = {Beveridge, T. J. and Smith, H. R. and Daunais, J. B. and Nader, M. A. and Porrino, L. J.}, + Journal = {Eur. J. Neurosci.}, + Month = {Jun}, + Pages = {3109--3118}, + Title = {{{C}hronic cocaine self-administration is associated with altered functional activity in the temporal lobes of non human primates}}, + Volume = {23}, + Year = {2006}} + +@article{Beveridge2009, + Author = {Beveridge, T. J. and Smith, H. R. and Nader, M. A. and Porrino, L. J.}, + Journal = {Neuropsychopharmacology}, + Month = {Apr}, + Pages = {1162--1171}, + Title = {{{A}bstinence from chronic cocaine self-administration alters striatal dopamine systems in rhesus monkeys}}, + Volume = {34}, + Year = {2009}} + +@article{Beveridge2005, + Author = {Beveridge, T. J. and Smith, H. R. and Nader, M. A. and Porrino, L. J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Aug}, + Pages = {781--788}, + Title = {{{E}ffects of chronic cocaine self-administration on norepinephrine transporters in the nonhuman primate brain}}, + Volume = {180}, + Year = {2005}} + +@article{Beveridge2004, + Author = {Beveridge, T. J. and Smith, H. R. and Nader, M. A. and Porrino, L. J.}, + Journal = {Neurosci. Lett.}, + Month = {Nov}, + Pages = {201--205}, + Title = {{{F}unctional effects of cocaine self-administration in primate brain regions regulating cardiovascular function}}, + Volume = {370}, + Year = {2004}} + +@article{Bhansali1999, + Author = {Bhansali, R. J.}, + Journal = {Journal of Statistical Planning and Inference}, + Pages = {295--305}, + Title = {Autoregressive Model Selection for Multistep Prediction}, + Volume = {78}, + Year = {1999}} + +@article{Bibb2001, + Author = {Bibb, J. A. and Chen, J. and Taylor, J. R. and Svenningsson, P. and Nishi, A. and Snyder, G. L. and Yan, Z. and Sagawa, Z. K. and Ouimet, C. C. and Nairn, A. C. and Nestler, E. J. and Greengard, P.}, + Journal = {Nature}, + Month = {Mar}, + Pages = {376--380}, + Title = {{{E}ffects of chronic exposure to cocaine are regulated by the neuronal protein {C}dk5}}, + Volume = {410}, + Year = {2001}} + +@article{Bickel1999, + Author = {Bickel, W.K. and Odum, A.L. and Madden, G.J.}, + Journal = {Psychopharmacology}, + Owner = {ahnw}, + Pages = {447-454}, + Timestamp = {2007.05.01}, + Title = {Impulsivity and cigarette smoking: delay discounting in current, never, and ex-smokers}, + Volume = {146}, + Year = {1999}} + +@article{Bickel2001, + Abstract = {Behavioral economics examines conditions that influence the consumption + of commodities and provides several concepts that may be instrumental + in understanding drug dependence. One such concept of significance + is that of how delayed reinforcers are discounted by drug dependent + individuals. Discounting of delayed reinforcers refers to the observation + that the value of a delayed reinforcer is discounted (reduced in + value or considered to be worth less) compared to the value of an + immediate reinforcer. This paper examines how delay discounting may + provide an explanation of both impulsivity and loss of control exhibited + by the drug dependent. In so doing, the paper reviews economic models + of delay discounting, the empirical literature on the discounting + of delayed reinforcers by the drug dependent and the scientific literature + on personality assessments of impulsivity among drug-dependent individuals. + Finally, future directions for the study of discounting are discussed, + including the study of loss of control and loss aversion among drug-dependent + individuals, the relationship of discounting to both the behavioral + economic measure of elasticity as well as to outcomes observed in + clinical settings, and the relationship between impulsivity and psychological + disorders other than drug dependence.}, + Author = {W. K. Bickel and L. A. Marsch}, + Doi = {10.1080/09652140020016978}, + Institution = {University of Vermont, Department of Psychiatry, Human Behavioral Pharmacology Laboratory, 38 Fletcher Place, Ira Allen School, Burlington, VT 05401-1419, USA. Warren.Bickel@uvm.edu}, + Journal = {Addiction}, + Keywords = {Humans; Impulsive Behavior, economics/psychology; Models, Economic; Models, Psychological; Personality Assessment; Psychophysics; Reinforcement (Psychology); Substance-Related Disorders, economics/psychology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {73--86}, + Pmid = {11177521}, + Timestamp = {2009.08.06}, + Title = {Toward a behavioral economic understanding of drug dependence: delay discounting processes.}, + Url = {http://dx.doi.org/10.1080/09652140020016978}, + Volume = {96}, + Year = {2001}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/09652140020016978}} + +@article{Bickel2001a, + Abstract = {Behavioral economics examines conditions that influence the consumption + of commodities and provides several concepts that may be instrumental + in understanding drug dependence. One such concept of significance + is that of how delayed reinforcers are discounted by drug dependent + individuals. Discounting of delayed reinforcers refers to the observation + that the value of a delayed reinforcer is discounted (reduced in + value or considered to be worth less) compared to the value of an + immediate reinforcer. This paper examines how delay discounting may + provide an explanation of both impulsivity and loss of control exhibited + by the drug dependent. In so doing, the paper reviews economic models + of delay discounting, the empirical literature on the discounting + of delayed reinforcers by the drug dependent and the scientific literature + on personality assessments of impulsivity among drug-dependent individuals. + Finally, future directions for the study of discounting are discussed, + including the study of loss of control and loss aversion among drug-dependent + individuals, the relationship of discounting to both the behavioral + economic measure of elasticity as well as to outcomes observed in + clinical settings, and the relationship between impulsivity and psychological + disorders other than drug dependence.}, + Author = {W. K. Bickel and L. A. Marsch}, + Doi = {10.1080/09652140020016978}, + Institution = {University of Vermont, Department of Psychiatry, Human Behavioral Pharmacology Laboratory, 38 Fletcher Place, Ira Allen School, Burlington, VT 05401-1419, USA. Warren.Bickel@uvm.edu}, + Journal = {Addiction}, + Keywords = {Humans; Impulsive Behavior, economics/psychology; Models, Economic; Models, Psychological; Personality Assessment; Psychophysics; Reinforcement (Psychology); Substance-Related Disorders, economics/psychology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {73--86}, + Pmid = {11177521}, + Timestamp = {2009.08.06}, + Title = {Toward a behavioral economic understanding of drug dependence: delay discounting processes.}, + Url = {http://dx.doi.org/10.1080/09652140020016978}, + Volume = {96}, + Year = {2001}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/09652140020016978}} + +@article{Bickel2008, + Abstract = {CONCEPTUAL PAPER PURPOSE: The purpose of this chapter is to examine + a new conceptual model of addiction and interpret the results from + delay discounting studies in light of this new perspective. METHODOLOGY/APPROACH: + To accomplish this we (1) introduce this new conceptual model, (2) + briefly review executive function, including evidence for executive + dysfunction among the addicted, (3) describe the unique relationship + of temporal discounting to the new model and executive dysfunction, + and (4) reinterpret the discounting literature in light of this new + conceptual model. FINDINGS: Addicted individuals discount the future + more than controls. This is consistent with greater relative activation + of the impulsive system and decreased relative activation of the + executive system. It also supports the new conceptual model of addiction. + RESEARCH IMPLICATIONS: The new model provides a model for understanding + the observations from the broader area of research in temporal discounting. + ORIGINALITY/VALUE OF CHAPTER: Given the view of executive function + as important for the cross-temporal organization of behavior, we + think that temporal discounting, the valuing of future commodities, + qualifies this process to be included as an executive function.}, + Author = {Warren K Bickel and Richard Yi}, + Institution = {Louis Dierks Research Laboratories Psychiatric Research Institute, University of Arkansas for Medical Sciences, Little Rock, AR, USA.}, + Journal = {Adv Health Econ Health Serv Res}, + Language = {eng}, + Medline-Pst = {ppublish}, + Owner = {Woo-Young Ahn}, + Pages = {289--309}, + Pmid = {19552313}, + Timestamp = {2009.08.06}, + Title = {Temporal discounting as a measure of executive function: insights from the competing neuro-behavioral decision system hypothesis of addiction.}, + Volume = {20}, + Year = {2008}} + +@article{Bickel2008a, + Abstract = {CONCEPTUAL PAPER PURPOSE: The purpose of this chapter is to examine + a new conceptual model of addiction and interpret the results from + delay discounting studies in light of this new perspective. METHODOLOGY/APPROACH: + To accomplish this we (1) introduce this new conceptual model, (2) + briefly review executive function, including evidence for executive + dysfunction among the addicted, (3) describe the unique relationship + of temporal discounting to the new model and executive dysfunction, + and (4) reinterpret the discounting literature in light of this new + conceptual model. FINDINGS: Addicted individuals discount the future + more than controls. This is consistent with greater relative activation + of the impulsive system and decreased relative activation of the + executive system. It also supports the new conceptual model of addiction. + RESEARCH IMPLICATIONS: The new model provides a model for understanding + the observations from the broader area of research in temporal discounting. + ORIGINALITY/VALUE OF CHAPTER: Given the view of executive function + as important for the cross-temporal organization of behavior, we + think that temporal discounting, the valuing of future commodities, + qualifies this process to be included as an executive function.}, + Author = {Warren K Bickel and Richard Yi}, + Institution = {Louis Dierks Research Laboratories Psychiatric Research Institute, University of Arkansas for Medical Sciences, Little Rock, AR, USA.}, + Journal = {Adv Health Econ Health Serv Res}, + Language = {eng}, + Medline-Pst = {ppublish}, + Owner = {Woo-Young Ahn}, + Pages = {289--309}, + Pmid = {19552313}, + Timestamp = {2009.08.06}, + Title = {Temporal discounting as a measure of executive function: insights from the competing neuro-behavioral decision system hypothesis of addiction.}, + Volume = {20}, + Year = {2008}} + +@article{Biggins1997, + Author = {Biggins, C. A. and MacKay, S. and Clark, W. and Fein, G.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {472--485}, + Title = {{{E}vent-related potential evidence for frontal cortex effects of chronic cocaine dependence}}, + Volume = {42}, + Year = {1997}} + +@article{Biggins1995, + Author = {Biggins, C. A. and MacKay, S. and Poole, N. and Fein, G.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {1032--1042}, + Title = {{{D}elayed {P}3{A} in abstinent elderly male chronic alcoholics}}, + Volume = {19}, + Year = {1995}} + +@book{Bilodeau1999, + Address = {New York}, + Author = {Bilodeau, M. and Brenner, D.}, + Publisher = {Springer Verlag}, + Title = {Theory of Multivariate Statistics}, + Year = {1999}} + +@article{Birnbaum1977, + Author = {Birnbaum, A.}, + Journal = {Synthese}, + Pages = {19--49}, + Title = {The {N}eyman--{P}earson theory as decision theory, and as inference theory; {w}ith a criticism of the {L}indley--{S}avage argument for {B}ayesian theory}, + Volume = {36}, + Year = {1977}} + +@article{Birnbaum1962, + Author = {Birnbaum, A.}, + Journal = {Journal of the American Statistical Association}, + Pages = {259--326}, + Title = {On the Foundations of Statistical Inference (with discussion)}, + Volume = {53}, + Year = {1962}} + +@article{Bisaglia2002, + Author = {Bisaglia, L. and Bordignon, S.}, + Journal = {Statistical Papers}, + Pages = {161--175}, + Title = {Mean Square Prediction Error for Long--memory Processes}, + Volume = {43}, + Year = {2002}} + +@article{Bisaglia1998, + Author = {Bisaglia, L. and Gu\'{e}gan, D.}, + Journal = {Computational Statistics \& Data Analysis}, + Pages = {61--81}, + Title = {A Comparison of Techniques of Estimation in Long--memory Processes}, + Volume = {27}, + Year = {1998}} + +@article{Bishara2009, + Author = {Bishara, A. J. and Pleskac, T. J. and Fridberg, D. J. and Yechiam, E. and Lucas, J. and Busemeyer, J. R. and Finn, P. R. and Stout, J. C.}, + Journal = {Journal of Behavioral Decision Making}, + Publisher = {John Wiley \& Sons, Ltd. Chichester, UK}, + Title = {{Similar processes despite divergent behavior in two commonly used measures of risky decision making}}, + Year = {2009}} + +@article{Bjork2004, + Author = {Bjork, J. M. and Knutson, B. and Fong, G. W. and Caggiano, D. M. and Bennett, S. M. and Hommer, D. W.}, + Journal = {J. Neurosci.}, + Month = {Feb}, + Pages = {1793--1802}, + Title = {{{I}ncentive-elicited brain activation in adolescents: similarities and differences from young adults}}, + Volume = {24}, + Year = {2004}} + +@article{Bjork2008, + Author = {Bjork, J. M. and Knutson, B. and Hommer, D. W.}, + Journal = {Addiction}, + Month = {Aug}, + Pages = {1308--1319}, + Title = {{{I}ncentive-elicited striatal activation in adolescent children of alcoholics}}, + Volume = {103}, + Year = {2008}} + +@article{Blankenship2000, + Author = {Blankenship, M. R. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {1778--1784}, + Title = {{{A} characterization of approach and avoidance learning in high-alcohol-drinking ({H}{A}{D}) and low-alcohol-drinking ({L}{A}{D}) rats}}, + Volume = {24}, + Year = {2000}} + +@article{Blankenship1998, + Author = {Blankenship, M. R. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Sep}, + Pages = {1227--1233}, + Title = {{{A} characterization of approach and avoidance learning in alcohol-preferring and alcohol-nonpreferring rats}}, + Volume = {22}, + Year = {1998}} + +@article{Blatter2006, + Author = {Blatter, K. and Schultz, W.}, + Journal = {Exp Brain Res}, + Month = {Jan}, + Pages = {541--546}, + Title = {{{R}ewarding properties of visual stimuli}}, + Volume = {168}, + Year = {2006}} + +@article{Bleecker1991, + Author = {Bleecker, M. L. and Bolla, K. I. and Agnew, J. and Schwartz, B. S. and Ford, D. P.}, + Journal = {Am. J. Ind. Med.}, + Pages = {715--728}, + Title = {{{D}ose-related subclinical neurobehavioral effects of chronic exposure to low levels of organic solvents}}, + Volume = {19}, + Year = {1991}} + +@article{Blomqvist2000a, + Author = {Blomqvist, A. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Dec}, + Pages = {1--4}, + Title = {{{I}s neuropathic pain caused by the activation of nociceptive-specific neurons due to anatomic sprouting in the dorsal horn?}}, + Volume = {428}, + Year = {2000}} + +@article{Blomqvist1992, + Author = {Blomqvist, A. and Ericson, A. C. and Broman, J. and Craig, A. D.}, + Journal = {Brain Res.}, + Month = {Jul}, + Pages = {425--430}, + Title = {{{E}lectron microscopic identification of lamina {I} axon terminations in the nucleus submedius of the cat thalamus}}, + Volume = {585}, + Year = {1992}} + +@article{Blomqvist1996, + Author = {Blomqvist, A. and Ericson, A. C. and Craig, A. D. and Broman, J.}, + Journal = {Exp Brain Res}, + Month = {Feb}, + Pages = {33--44}, + Title = {{{E}vidence for glutamate as a neurotransmitter in spinothalamic tract terminals in the posterior region of owl monkeys}}, + Volume = {108}, + Year = {1996}} + +@article{Blomqvist2000, + Author = {Blomqvist, A. and Zhang, E. T. and Craig, A. D.}, + Journal = {Brain}, + Month = {Mar}, + Pages = {601--619}, + Title = {{{C}ytoarchitectonic and immunohistochemical characterization of a specific pain and temperature relay, the posterior portion of the ventral medial nucleus, in the human thalamus}}, + Volume = {123 Pt 3}, + Year = {2000}} + +@article{Blum2000, + Author = {Blum, K. and Braverman, E. R. and Holder, J. M. and Lubar, J. F. and Monastra, V. J. and Miller, D. and Lubar, J. O. and Chen, T. J. and Comings, D. E.}, + Journal = {J Psychoactive Drugs}, + Pages = {1--112}, + Title = {{{R}eward deficiency syndrome: a biogenetic model for the diagnosis and treatment of impulsive, addictive, and compulsive behaviors}}, + Volume = {32 Suppl}, + Year = {2000}} + +@article{Bobova2009, + Author = {Bobova, L. and Finn, P. R. and Rickert, M. E. and Lucas, J.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Feb}, + Pages = {51--61}, + Title = {{{D}isinhibitory psychopathology and delay discounting in alcohol dependence: personality and cognitive correlates}}, + Volume = {17}, + Year = {2009}} + +@article{Boelen2005, + Author = {Boelen, P. A. and {van den Bout}, J.}, + Journal = {The American Journal of Psychiatry}, + Pages = {2175--2177}, + Title = {Complicated Grief, Depression, and Anxiety as Distinct Post--loss Syndromes: {A} Confirmatory Factor Analysis Study}, + Volume = {162}, + Year = {2005}} + +@article{Boes2009, + Author = {Boes, A. D. and Bechara, A. and Tranel, D. and Anderson, S. W. and Richman, L. and Nopoulos, P.}, + Journal = {Soc Cogn Affect Neurosci}, + Month = {Mar}, + Pages = {1--9}, + Title = {{{R}ight ventromedial prefrontal cortex: a neuroanatomical correlate of impulse control in boys}}, + Volume = {4}, + Year = {2009}} + +@article{Bogacz2007, + Author = {Bogacz, R.}, + Journal = {Trends in Cognitive Sciences}, + Pages = {118--125}, + Title = {Optimal Decision--making Theories: {L}inking Neurobiology with Behaviour}, + Volume = {11}, + Year = {2007}} + +@article{Bogacz2006, + Author = {Bogacz, R. and Brown, E. and Moehlis, J. and Holmes, P. and Cohen, J. D.}, + Journal = {Psychological Review}, + Pages = {700--765}, + Title = {The Physics of Optimal Decision Making: {A} Formal Analysis of Models of Performance in Two--alternative Forced Choice Tasks}, + Volume = {113}, + Year = {2006}} + +@article{Bogacz2007a, + Author = {Bogacz, R. and Gurney, K.}, + Journal = {Neural Computation}, + Pages = {442--477}, + Title = {The Basal Ganglia and Cortex Implement Optimal Decision Making Between Alternative Actions}, + Volume = {19}, + Year = {2007}} + +@article{Bogacz2007b, + Author = {Bogacz, R. and McClure, S. M. and Li, J. and Cohen, J. D. and Montague, P. R.}, + Journal = {Brain Res.}, + Month = {Jun}, + Pages = {111--121}, + Title = {{{S}hort-term memory traces for action bias in human reinforcement learning}}, + Volume = {1153}, + Year = {2007}} + +@article{Bogaczinpress, + Author = {Bogacz, R. and Usher, M. and Zhang, J. and McClelland, J. L.}, + Journal = {Philosophical Transactions of the Royal Society, Series B}, + Pages = {??--??}, + Title = {Extending a Biologically Inspired Model of Choice: Multi--alternatives, Nonlinearity and Value--based Multidimensional Choice}, + Volume = {??}, + Year = {in press}} + +@article{Bogg2009, + Author = {Bogg, T. and Finn, P. R.}, + Journal = {J Stud Alcohol Drugs}, + Month = {May}, + Pages = {446--457}, + Title = {{{A}n ecologically based model of alcohol-consumption decision making: evidence for the discriminative and predictive role of contextual reward and punishment information}}, + Volume = {70}, + Year = {2009}} + +@article{Bolanos2003, + Author = {Bolanos, C. A. and Barrot, M. and Berton, O. and Wallace-Black, D. and Nestler, E. J.}, + Journal = {Biol. Psychiatry}, + Month = {Dec}, + Pages = {1317--1329}, + Title = {{{M}ethylphenidate treatment during pre- and periadolescence alters behavioral responses to emotional stimuli at adulthood}}, + Volume = {54}, + Year = {2003}} + +@article{Bolanos2004, + Author = {Bolanos, C. A. and Nestler, E. J.}, + Journal = {Neuromolecular Med.}, + Pages = {69--83}, + Title = {{{N}eurotrophic mechanisms in drug addiction}}, + Volume = {5}, + Year = {2004}} + +@article{Bolanos2005, + Author = {Bolanos, C. A. and Neve, R. L. and Nestler, E. J.}, + Journal = {Synapse}, + Month = {Jun}, + Pages = {166--169}, + Title = {{{P}hospholipase {C} gamma in distinct regions of the ventral tegmental area differentially regulates morphine-induced locomotor activity}}, + Volume = {56}, + Year = {2005}} + +@article{Bolla2004, + Author = {Bolla, K. and Ernst, M. and Kiehl, K. and Mouratidis, M. and Eldreth, D. and Contoreggi, C. and Matochik, J. and Kurian, V. and Cadet, J. and Kimes, A. and Funderburk, F. and London, E.}, + Journal = {J Neuropsychiatry Clin Neurosci}, + Pages = {456--464}, + Title = {{{P}refrontal cortical dysfunction in abstinent cocaine abusers}}, + Volume = {16}, + Year = {2004}} + +@article{Bolla2000a, + Author = {Bolla, K. I.}, + Journal = {Occup Med}, + Pages = {617--625}, + Title = {{{U}se of neuropsychological testing in idiopathic environmental testing}}, + Volume = {15}, + Year = {2000}} + +@article{Bolla1996, + Author = {Bolla, K. I.}, + Journal = {Regul. Toxicol. Pharmacol.}, + Month = {Aug}, + Pages = {48--51}, + Title = {{{N}europsychological evaluation for detecting alterations in the central nervous system after chemical exposure}}, + Volume = {24}, + Year = {1996}} + +@article{Bolla1996a, + Author = {Bolla, K. I.}, + Journal = {Regul. Toxicol. Pharmacol.}, + Month = {Aug}, + Pages = {S52--54}, + Title = {{{N}eurobehavioral performance in multiple chemical sensitivities}}, + Volume = {24}, + Year = {1996}} + +@article{Bolla1991b, + Author = {Bolla, K. I.}, + Journal = {Environ. Health Perspect.}, + Month = {Nov}, + Pages = {93--98}, + Title = {{{N}europsychological assessment for detecting adverse effects of volatile organic compounds on the central nervous system}}, + Volume = {95}, + Year = {1991}} + +@article{Bolla1992, + Author = {Bolla, K. I. and Briefel, G. and Spector, D. and Schwartz, B. S. and Wieler, L. and Herron, J. and Gimenez, L.}, + Journal = {Arch. Neurol.}, + Month = {Oct}, + Pages = {1021--1026}, + Title = {{{N}eurocognitive effects of aluminum}}, + Volume = {49}, + Year = {1992}} + +@article{Bolla2002, + Author = {Bolla, K. I. and Brown, K. and Eldreth, D. and Tate, K. and Cadet, J. L.}, + Journal = {Neurology}, + Month = {Nov}, + Pages = {1337--1343}, + Title = {{{D}ose-related neurocognitive effects of marijuana use}}, + Volume = {59}, + Year = {2002}} + +@article{Bolla1998a, + Author = {Bolla, K. I. and Cadet, J. L. and London, E. D.}, + Journal = {J Neuropsychiatry Clin Neurosci}, + Pages = {280--289}, + Title = {{{T}he neuropsychiatry of chronic cocaine abuse}}, + Volume = {10}, + Year = {1998}} + +@article{Bolla2003, + Author = {Bolla, K. I. and Eldreth, D. A. and London, E. D. and Kiehl, K. A. and Mouratidis, M. and Contoreggi, C. and Matochik, J. A. and Kurian, V. and Cadet, J. L. and Kimes, A. S. and Funderburk, F. R. and Ernst, M.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1085--1094}, + Title = {{{O}rbitofrontal cortex dysfunction in abstinent cocaine abusers performing a decision-making task}}, + Volume = {19}, + Year = {2003}} + +@article{Bolla2005, + Author = {Bolla, K. I. and Eldreth, D. A. and Matochik, J. A. and Cadet, J. L.}, + Journal = {Neuroimage}, + Month = {Jun}, + Pages = {480--492}, + Title = {{{N}eural substrates of faulty decision-making in abstinent marijuana users}}, + Volume = {26}, + Year = {2005}} + +@article{Bolla2004a, + Author = {Bolla, K. I. and Eldreth, D. A. and Matochik, J. A. and Cadet, J. L.}, + Journal = {Cereb. Cortex}, + Month = {Nov}, + Pages = {1226--1232}, + Title = {{{S}ex-related differences in a gambling task and its neurological correlates}}, + Volume = {14}, + Year = {2004}} + +@article{Bolla2000, + Author = {Bolla, K. I. and Funderburk, F. R. and Cadet, J. L.}, + Journal = {Neurology}, + Month = {Jun}, + Pages = {2285--2292}, + Title = {{{D}ifferential effects of cocaine and cocaine alcohol on neurocognitive performance}}, + Volume = {54}, + Year = {2000}} + +@article{Bolla2008, + Author = {Bolla, K. I. and Lesage, S. R. and Gamaldo, C. E. and Neubauer, D. N. and Funderburk, F. R. and Cadet, J. L. and David, P. M. and Verdejo-Garcia, A. and Benbrook, A. R.}, + Journal = {Sleep}, + Month = {Jun}, + Pages = {901--908}, + Title = {{{S}leep disturbance in heavy marijuana users}}, + Volume = {31}, + Year = {2008}} + +@article{Bolla1991a, + Author = {Bolla, K. I. and Lindgren, K. N. and Bonaccorsy, C. and Bleecker, M. L.}, + Journal = {Arch. Neurol.}, + Month = {Jan}, + Pages = {61--64}, + Title = {{{M}emory complaints in older adults. {F}act or fiction?}}, + Volume = {48}, + Year = {1991}} + +@article{Bolla1990a, + Author = {Bolla, K. I. and Lindgren, K. N. and Bonaccorsy, C. and Bleecker, M. L.}, + Journal = {J Clin Psychol}, + Month = {Sep}, + Pages = {623--628}, + Title = {{{P}redictors of verbal fluency ({F}{A}{S}) in the healthy elderly}}, + Volume = {46}, + Year = {1990}} + +@article{Bolla1998, + Author = {Bolla, K. I. and McCann, U. D. and Ricaurte, G. A.}, + Journal = {Neurology}, + Month = {Dec}, + Pages = {1532--1537}, + Title = {{{M}emory impairment in abstinent {M}{D}{M}{A} ("{E}cstasy") users}}, + Volume = {51}, + Year = {1998}} + +@article{Bolla1991, + Author = {Bolla, K. I. and Milstien, S. and Briefel, G. and Wieler, L. and Kaufman, S.}, + Journal = {Neurology}, + Month = {Nov}, + Pages = {1806--1809}, + Title = {{{D}ihydropteridine reductase activity: lack of association with serum aluminum levels and cognitive functioning in patients with end-stage renal disease}}, + Volume = {41}, + Year = {1991}} + +@article{Bolla1999, + Author = {Bolla, K. I. and Rothman, R. and Cadet, J. L.}, + Journal = {J Neuropsychiatry Clin Neurosci}, + Pages = {361--369}, + Title = {{{D}ose-related neurobehavioral effects of chronic cocaine use}}, + Volume = {11}, + Year = {1999}} + +@article{Bolla1990, + Author = {Bolla, K. I. and Schwartz, B. S. and Agnew, J. and Ford, P. D. and Bleecker, M. L.}, + Journal = {J Occup Med}, + Month = {Aug}, + Pages = {671--677}, + Title = {{{S}ubclinical neuropsychiatric effects of chronic low-level solvent exposure in {U}{S} paint manufacturers}}, + Volume = {32}, + Year = {1990}} + +@article{Bolla1995, + Author = {Bolla, K. I. and Schwartz, B. S. and Stewart, W. and Rignani, J. and Agnew, J. and Ford, D. P.}, + Journal = {Am. J. Ind. Med.}, + Month = {Feb}, + Pages = {231--246}, + Title = {{{C}omparison of neurobehavioral function in workers exposed to a mixture of organic and inorganic lead and in workers exposed to solvents}}, + Volume = {27}, + Year = {1995}} + +@article{Bonson2002, + Author = {Bonson, K. R. and Grant, S. J. and Contoreggi, C. S. and Links, J. M. and Metcalfe, J. and Weyl, H. L. and Kurian, V. and Ernst, M. and London, E. D.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {376--386}, + Title = {{{N}eural systems and cue-induced cocaine craving}}, + Volume = {26}, + Year = {2002}} + +@article{Borycz2008, + Author = {Borycz, J. and Zapata, A. and Quiroz, C. and Volkow, N. D. and Ferr?, S.}, + Journal = {Neuropsychopharmacology}, + Month = {Feb}, + Pages = {619--626}, + Title = {{5-{H}{T} 1{B} receptor-mediated serotoninergic modulation of methylphenidate-induced locomotor activation in rats}}, + Volume = {33}, + Year = {2008}} + +@article{bossert2007differential, + Author = {Bossert, J.M. and Poles, G.C. and Wihbey, K.A. and Koya, E. and Shaham, Y.}, + Journal = {Journal of Neuroscience}, + Number = {46}, + Pages = {12655}, + Publisher = {Soc Neuroscience}, + Title = {{Differential effects of blockade of dopamine D1-family receptors in nucleus accumbens core or shell on reinstatement of heroin seeking induced by contextual and discrete cues}}, + Volume = {27}, + Year = {2007}} + +@article{Botvinick2001, + Author = {Botvinick, M. M. and Braver, T. S. and Barch, D. M. and Carter, C. S. and Cohen, J. D.}, + Journal = {Psychological Review}, + Pages = {624?652}, + Title = {Conflict Monitoring and Cognitive Control}, + Volume = {108}, + Year = {2001}} + +@article{Botvinick2001a, + Author = {Botvinick, M. M. and Braver, T. S. and Barch, D. M. and Carter, C. S. and Cohen, J. D.}, + Journal = {Psychol Rev}, + Month = {Jul}, + Pages = {624--652}, + Title = {{{C}onflict monitoring and cognitive control}}, + Volume = {108}, + Year = {2001}} + +@incollection{Box1979, + Address = {New York}, + Author = {Box, G. E. P.}, + Booktitle = {Robustness in Statistics}, + Editor = {Launer, R. L. and Wilkinson, G. N.}, + Pages = {201--236}, + Publisher = {Academic Press}, + Title = {Robustness in Scientific Model Building}, + Year = {1979}} + +@article{Box1980, + Author = {Box, G. E. P.}, + Journal = {Journal of the Royal Statistical Society, Series A}, + Pages = {383?430}, + Title = {Sampling and {B}ayes' Inference in Scientific Modelling and Robustness}, + Volume = {143}, + Year = {1980}} + +@book{Box1970, + Address = {San Francisco}, + Author = {Box, G. E. P. and Jenkins, G. M.}, + Publisher = {Holden Day}, + Title = {Time Series Analysis: Forecasting and Control}, + Year = {1970}} + +@book{Box1973, + Address = {Reading}, + Author = {Box, G. E. P. and Tiao, G. C.}, + Publisher = {Addison--Wesley}, + Title = {{B}ayesian Inference in Statistical Analysis}, + Year = {1973}} + +@article{Boyson1986, + Author = {Boyson, SJ and McGonigle, P. and Molinoff, PB}, + Journal = {Journal of Neuroscience}, + Number = {11}, + Pages = {3177--3188}, + Publisher = {Soc Neuroscience}, + Title = {{Quantitative autoradiographic localization of the D1 and D2 subtypes of dopamine receptors in rat brain}}, + Volume = {6}, + Year = {1986}} + +@article{Brady2005, + Author = {Brady, K. T. and Sinha, R.}, + Journal = {Am J Psychiatry}, + Month = {Aug}, + Pages = {1483--1493}, + Title = {{{C}o-occurring mental and substance use disorders: the neurobiological effects of chronic stress}}, + Volume = {162}, + Year = {2005}} + +@article{Brady2005a, + Author = {Brady, K. T. and Sinha, R.}, + Journal = {Am J Psychiatry}, + Month = {Aug}, + Pages = {1483--1493}, + Title = {{{C}o-occurring mental and substance use disorders: the neurobiological effects of chronic stress}}, + Volume = {162}, + Year = {2005}} + +@article{Braedel1994, + Author = {Braedel, H. U. and Steffens, J. and Ziegler, M. and Polsky, M. S. and Platt, M. L.}, + Journal = {J. Urol.}, + Month = {Jan}, + Pages = {62--66}, + Title = {{{A} possible ontogenic etiology for idiopathic left varicocele}}, + Volume = {151}, + Year = {1994}} + +@article{Bramon2008, + Author = {Bramon, E. and Dempster, E. and Frangou, S. and Shaikh, M. and Walshe, M. and Filbey, F. M. and McDonald, C. and Sham, P. and Collier, D. A. and Murray, R.}, + Journal = {Schizophr. Res.}, + Month = {Aug}, + Pages = {178--185}, + Title = {{{N}euregulin-1 and the {P}300 waveform--a preliminary association study using a psychosis endophenotype}}, + Volume = {103}, + Year = {2008}} + +@article{braus2001alcohol, + Author = {Braus, DF and Wrase, J. and Gr{\\"u}sser, S. and Hermann, D. and Ruf, M. and Flor, H. and Mann, K. and Heinz, A.}, + Journal = {Journal of Neural Transmission}, + Number = {7}, + Pages = {887--894}, + Publisher = {Springer}, + Title = {{Alcohol-associated stimuli activate the ventral striatum in abstinent alcoholics}}, + Volume = {108}, + Year = {2001}} + +@article{Braver1999a, + Author = {Braver, T. S. and Barch, D. M. and Cohen, J. D.}, + Journal = {Biol. Psychiatry}, + Month = {Aug}, + Pages = {312--328}, + Title = {{{C}ognition and control in schizophrenia: a computational model of dopamine and prefrontal function}}, + Volume = {46}, + Year = {1999}} + +@article{Braver2001, + Author = {Braver, T. S. and Barch, D. M. and Keys, B. A. and Carter, C. S. and Cohen, J. D. and Kaye, J. A. and Janowsky, J. S. and Taylor, S. F. and Yesavage, J. A. and Mumenthaler, M. S. and Jagust, W. J. and Reed, B. R.}, + Journal = {J Exp Psychol Gen}, + Month = {Dec}, + Pages = {746--763}, + Title = {{{C}ontext processing in older adults: evidence for a theory relating cognitive control to neurobiology in healthy aging}}, + Volume = {130}, + Year = {2001}} + +@article{Braver1999, + Author = {Braver, T. S. and Cohen, J. D.}, + Journal = {Prog. Brain Res.}, + Pages = {327--349}, + Title = {{{D}opamine, cognitive control, and schizophrenia: the gating model}}, + Volume = {121}, + Year = {1999}} + +@article{Braver1997, + Author = {Braver, T. S. and Cohen, J. D. and Nystrom, L. E. and Jonides, J. and Smith, E. E. and Noll, D. C.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {49--62}, + Title = {{{A} parametric study of prefrontal cortex involvement in human working memory}}, + Volume = {5}, + Year = {1997}} + +@article{Bray2007, + Author = {Bray, S. and O'Doherty, J.}, + Journal = {J. Neurophysiol.}, + Month = {Apr}, + Pages = {3036--3045}, + Title = {{{N}eural coding of reward-prediction error signals during classical conditioning with attractive faces}}, + Volume = {97}, + Year = {2007}} + +@article{Bray2008, + Author = {Bray, S. and Rangel, A. and Shimojo, S. and Balleine, B. and O'Doherty, J. P.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {5861--5866}, + Title = {{{T}he neural mechanisms underlying the influence of pavlovian cues on human decision making}}, + Volume = {28}, + Year = {2008}} + +@article{Bray2007a, + Author = {Bray, S. and Shimojo, S. and O'Doherty, J. P.}, + Journal = {J. Neurosci.}, + Month = {Jul}, + Pages = {7498--7507}, + Title = {{{D}irect instrumental conditioning of neural activity using functional magnetic resonance imaging-derived reward feedback}}, + Volume = {27}, + Year = {2007}} + +@article{Brebner2002, + Author = {Brebner, K. and Childress, A. R. and Roberts, D. C.}, + Journal = {Alcohol Alcohol.}, + Pages = {478--484}, + Title = {{{A} potential role for {G}{A}{B}{A}({B}) agonists in the treatment of psychostimulant addiction}}, + Volume = {37}, + Year = {2002}} + +@article{Breese2005, + Author = {Breese, G. R. and Chu, K. and Dayas, C. V. and Funk, D. and Knapp, D. J. and Koob, G. F. and Le, D. A. and O'Dell, L. E. and Overstreet, D. H. and Roberts, A. J. and Sinha, R. and Valdez, G. R. and Weiss, F.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Feb}, + Pages = {185--195}, + Title = {{{S}tress enhancement of craving during sobriety: a risk for relapse}}, + Volume = {29}, + Year = {2005}} + +@article{Breese2005a, + Author = {Breese, G. R. and Chu, K. and Dayas, C. V. and Funk, D. and Knapp, D. J. and Koob, G. F. and Le, D. A. and O'Dell, L. E. and Overstreet, D. H. and Roberts, A. J. and Sinha, R. and Valdez, G. R. and Weiss, F.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Feb}, + Pages = {185--195}, + Title = {{{S}tress enhancement of craving during sobriety: a risk for relapse}}, + Volume = {29}, + Year = {2005}} + +@article{Breiter2001, + Author = {Breiter, H.C. and Aharon, I. and Kahneman, D. and Dale, A. and Shizgal, P.}, + Journal = {Neuron}, + Number = {2}, + Pages = {619--639}, + Publisher = {Elsevier}, + Title = {{Functional imaging of neural responses to expectancy and experience of monetary gains and losses}}, + Volume = {30}, + Year = {2001}} + +@article{breiter1997acute, + Author = {Breiter, H.C. and Gollub, R.L. and Weisskoff, R.M. and Kennedy, D.N. and Makris, N. and Berke, J.D. and Goodman, J.M. and Kantor, H.L. and Gastfriend, D.R. and Riorden, J.P. and others}, + Journal = {Neuron}, + Number = {3}, + Pages = {591--611}, + Title = {{Acute effects of cocaine on human brain activity and emotion}}, + Volume = {19}, + Year = {1997}} + +@article{brewer2008neurobiology, + Author = {Brewer, J.A. and Potenza, M.N.}, + Journal = {Biochemical pharmacology}, + Number = {1}, + Pages = {63--75}, + Publisher = {Elsevier}, + Title = {{The neurobiology and genetics of impulse control disorders: relationships to drug addictions}}, + Volume = {75}, + Year = {2008}} + +@incollection{Brinley1965, + Address = {Springfield, IL}, + Author = {Brinley, J. F.}, + Booktitle = {Behavior, Aging and the Nervous System}, + Editor = {Welford, A. T. and Birren, J. E.}, + Pages = {114?149}, + Publisher = {Thomas}, + Title = {Cognitive Sets, Speed and Accuracy of Performance in the Elderly}, + Year = {1965}} + +@article{Britten1996a, + Author = {Britten, K. H. and Newsome, W. T. and Shadlen, M. N. and Celebrini, S. and Movshon, J. A.}, + Journal = {Vis. Neurosci.}, + Pages = {87--100}, + Title = {{{A} relationship between behavioral choice and the visual responses of neurons in macaque {M}{T}}}, + Volume = {13}, + Year = {1996}} + +@article{Britten1993a, + Author = {Britten, K. H. and Shadlen, M. N. and Newsome, W. T. and Movshon, J. A.}, + Journal = {Vis. Neurosci.}, + Pages = {1157--1169}, + Title = {{{R}esponses of neurons in macaque {M}{T} to stochastic motion signals}}, + Volume = {10}, + Year = {1993}} + +@article{Britten1992a, + Author = {Britten, K. H. and Shadlen, M. N. and Newsome, W. T. and Movshon, J. A.}, + Journal = {J. Neurosci.}, + Month = {Dec}, + Pages = {4745--4765}, + Title = {{{T}he analysis of visual motion: a comparison of neuronal and psychophysical performance}}, + Volume = {12}, + Year = {1992}} + +@article{Brody2002, + Author = {Brody, A. L. and Mandelkern, M. A. and London, E. D. and Childress, A. R. and Lee, G. S. and Bota, R. G. and Ho, M. L. and Saxena, S. and Baxter, L. R. and Madsen, D. and Jarvik, M. E.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Dec}, + Pages = {1162--1172}, + Title = {{{B}rain metabolic changes during cigarette craving}}, + Volume = {59}, + Year = {2002}} + +@article{Brody2009, + Author = {Brody, A. L. and Mandelkern, M. A. and Olmstead, R. E. and Allen-Martinez, Z. and Scheibal, D. and Abrams, A. L. and Costello, M. R. and Farahi, J. and Saxena, S. and Monterosso, J. and London, E. D.}, + Journal = {Neuropsychopharmacology}, + Month = {Jan}, + Pages = {282--289}, + Title = {{{V}entral striatal dopamine release in response to smoking a regular vs a denicotinized cigarette}}, + Volume = {34}, + Year = {2009}} + +@article{Brody2007, + Author = {Brody, A. L. and Mandelkern, M. A. and Olmstead, R. E. and Jou, J. and Tiongson, E. and Allen, V. and Scheibal, D. and London, E. D. and Monterosso, J. R. and Tiffany, S. T. and Korb, A. and Gan, J. J. and Cohen, M. S.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {642--651}, + Title = {{{N}eural substrates of resisting craving during cigarette cue exposure}}, + Volume = {62}, + Year = {2007}} + +@article{Brooks2005, + Author = {Brooks, J. C. and Zambreanu, L. and Godinez, A. and Craig, A. D. and Tracey, I.}, + Journal = {Neuroimage}, + Month = {Aug}, + Pages = {201--209}, + Title = {{{S}omatotopic organisation of the human insula to painful heat studied with high resolution functional imaging}}, + Volume = {27}, + Year = {2005}} + +@article{Brown2008, + Abstract = {Recent work on cognitive control has suggested a variety of performance + monitoring functions of the anterior cingulate cortex such as errors, + conflict, error likelihood, and others. Given the variety of monitoring + effects, a corresponding variety of control effects on behavior might + be expected. This paper explores whether conflict and error likelihood + produce distinct cognitive control effects on behavior, as measured + by response time. A Change signal task (Brown & Braver, Science 307:1118-1121, + 2005) was modified to include conditions of likely errors due to + tardy as well as premature responses in conditions with and without + conflict. The results discriminate between competing hypotheses of + independent versus interacting conflict and error likelihood control + effects. Specifically, the results suggest that the likelihood of + premature versus tardy response errors can lead to multiple distinct + control effects, which are independent of cognitive control effects + driven by response conflict. As a whole, the results point to the + existence of multiple distinct cognitive control mechanisms and challenge + existing models of cognitive control that incorporate only a single + control signal.}, + Author = {Joshua Brown}, + Doi = {10.1007/s00426-008-0198-7}, + Institution = {Department of Psychological and Brain Sciences, Indiana University, 1101 E Tenth St., Bloomington, IN, 47405, USA, jwmbrown@indiana.edu.}, + Journal = {Psychol Res}, + Language = {eng}, + Medline-Pst = {aheadofprint}, + Month = {Nov}, + Owner = {Woo-Young Ahn}, + Pmid = {19030873}, + Timestamp = {2009.08.04}, + Title = {Multiple cognitive control effects of error likelihood and conflict.}, + Url = {http://dx.doi.org/10.1007/s00426-008-0198-7}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00426-008-0198-7}} + +@article{Brown2009, + Abstract = {The error likelihood computational model of anterior cingulate cortex + (ACC) (Brown, J.W., Braver, T.S., 2005. Learned predictions of error + likelihood in the anterior cingulate cortex. Science 307: 1118-1121) + has successfully predicted error likelihood effects, risk prediction + effects, and how individual differences in conflict and error likelihood + effects vary with trait differences in risk aversion. The same computational + model now makes a further prediction that apparent conflict effects + in ACC may result in part from an increasing number of simultaneously + active responses, regardless of whether or not the cued responses + are mutually incompatible. In Experiment 1, the model prediction + was tested with a modification of the Eriksen flanker task, in which + some task conditions require two otherwise mutually incompatible + responses to be generated simultaneously. In that case, the two response + processes are no longer in conflict with each other. The results + showed small but significant medial PFC effects in the incongruent + vs. congruent contrast, despite the absence of response conflict, + consistent with model predictions. This is the multiple response + effect. Nonetheless, actual response conflict led to greater ACC + activation, suggesting that conflict effects are specific to particular + task contexts. In Experiment 2, results from a change signal task + suggested that the context dependence of conflict signals does not + depend on error likelihood effects. Instead, inputs to ACC may reflect + complex and task specific representations of motor acts, such as + bimanual responses. Overall, the results suggest the existence of + a richer set of motor signals monitored by medial PFC and are consistent + with distinct effects of multiple responses, conflict, and error + likelihood in medial PFC.}, + Author = {Joshua W Brown}, + Doi = {10.1016/j.neuroimage.2009.04.034}, + Institution = {Department of Psychological and Brain Sciences, Indiana University, 1101 E Tenth St., Bloomington, IN 47405, USA. jwmbrown@indiana.edu}, + Journal = {Neuroimage}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {334--341}, + Pii = {S1053-8119(09)00392-9}, + Pmid = {19375509}, + Timestamp = {2009.08.04}, + Title = {Conflict effects without conflict in anterior cingulate cortex: multiple response effects and context specific representations.}, + Url = {http://dx.doi.org/10.1016/j.neuroimage.2009.04.034}, + Volume = {47}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.neuroimage.2009.04.034}} + +@article{Brown1980, + Abstract = {Insectivorous bat embryos (Tadarida and Myotis) ranging from 6- to + 16-mm C-R length were examined for the presence of the nervus terminalis. + These embryos have no vomeronasal nerve with which the nervus terminalis + could be confused. The nerve and associated ganglion cells first + appear in the 7-mm embryo. As the embryo ages, a gradual increase + in nerve size and ganglion cell numbers occurs. In the 13-mm embryo, + nerve size and ganglion cell numbers are reduced, and in older embryos + both nerve and cells are absent, as in the adult. The ganglion cells + arise as clusters from the nasal septal epithelium. The largest number + of cell clusters occurs in the 10.5-mm embryo. Their number then + decreases and none are present in embryos of 13-mm and longer. These + cells migrate centrally along the course of the nerve which accompanies + the olfactory nerve from the nasal cavity roof to a level just caudal + to the olfactory bulb, where the nervus terminalis turns dorsalward + along the medial telencephalic wall surface. Except in the youngest + and oldest embryos the nervus terminalis, where present, divides + into two or three branches to pierce the hemispheric wall, one usually + entering the region of the nucleus olfactorious anterior, and the + other(s), the region of the medial septal nucleus. In some cases, + several ganglion cells are present along the intrahemispheric course + of the nerve fibers. All ganglion cells resemble those in various + sensory ganglia, and so, are probably also sensory neurons.}, + Author = {J. W. Brown}, + Doi = {10.1002/ar.1091960104}, + Journal = {Anat Rec}, + Keywords = {Animals; Chiroptera, embryology; Neurons, Afferent, cytology; Olfactory Nerve, embryology; Telencephalon, embryology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {23--35}, + Pmid = {7416499}, + Timestamp = {2009.08.04}, + Title = {Developmental history of nervus terminalis in embryos of insectivorous bats.}, + Url = {http://dx.doi.org/10.1002/ar.1091960104}, + Volume = {196}, + Year = {1980}, + Bdsk-Url-1 = {http://dx.doi.org/10.1002/ar.1091960104}} + +@article{Brown1975, + Abstract = {The classical view of conduction aphasia and the isolation syndrome + holds that there is, respectively, preferential damage to, or sparing + of, a (repetition) pathway between the posterior and anterior speech + areas. This concept is deeply entrenched in neurological thinking, + but is supported neither by clinical nor pathological evidence. These + two disorders are explained from the standpoint of a more dynamic + theory of language organization. This new approach has implications + for our understanding of anatomical relationships "between" the speech + areas.}, + Author = {J. W. Brown}, + Journal = {Cortex}, + Keywords = {Aged; Aphasia, pathology/physiopathology; Apraxias, etiology; Astrocytoma, complications; Brain Mapping; Brain Neoplasms, complications; Brain, blood supply/pathology; Cerebral Cortex, pathology/physiopathology; Dyslexia, etiology; Echolalia, pathology/physiopathology; Female; Functional Laterality; Handwriting; Humans; Infarction, complications; Male; Middle Aged; Neural Pathways; Parietal Lobe; Speech}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {37--52}, + Pmid = {1149466}, + Timestamp = {2009.08.04}, + Title = {The problem of repetition: a study of "conduction" aphasia and the "isolation" syndrome.}, + Volume = {11}, + Year = {1975}} + +@article{Brown2008a, + Abstract = {The error likelihood effect in anterior cingulate cortex (ACC) has + recently been shown to be a special case of an even more general + risk prediction effect, which signals both the likelihood of an error + and the potential severity of its consequences. Surprisingly, these + error likelihood and anticipated consequence effects are strikingly + absent in risk-taking individuals. Conversely, conflict effects in + ACC were found to be stronger in these same individuals. Here we + show that the error likelihood computational model can account for + individual differences in error likelihood, predicted error consequence, + and conflict effects in ACC with no changes from the published version + of the model. In particular, the model accounts for the counterintuitive + inverse relationship between conflict and error likelihood effects + as a function of the ACC learning rate in response to errors. As + the learning rate increases, ACC learns more effectively from mistakes, + which increases risk prediction effects at the expense of conflict + effects. Thus, the model predicts that individuals with faster error-based + learning in ACC will be more risk-averse and shows greater ACC error + likelihood effects but smaller ACC conflict effects. Furthermore, + the model suggests that apparent response conflict effects in ACC + may actually consist of two related effects: increased error likelihood + and a greater number of simultaneously cued responses, whether or + not the responses are mutually incompatible. The results clarify + the basic computational mechanisms of learned risk aversion and may + have broad implications for predicting and managing risky behavior + in healthy and clinical populations.}, + Author = {Joshua W Brown and Todd S Braver}, + Doi = {10.1016/j.brainres.2007.06.080}, + Institution = {Department of Psychological and Brain Sciences, Indiana University, 1101 E Tenth St., Bloomington, IN 47405, USA. jwmbrown@indiana.edu}, + Journal = {Brain Res}, + Keywords = {Cognition, physiology; Computer Simulation; Conflict (Psychology); Cues; Decision Making, physiology; Discrimination Learning, physiology; Gyrus Cinguli, physiology; Humans; Likelihood Functions; Mental Processes, physiology; Observer Variation; Risk-Taking}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Owner = {Woo-Young Ahn}, + Pages = {99--108}, + Pii = {S0006-8993(07)01385-6}, + Pmid = {17707352}, + Timestamp = {2009.08.04}, + Title = {A computational model of risk, conflict, and individual difference effects in the anterior cingulate cortex.}, + Url = {http://dx.doi.org/10.1016/j.brainres.2007.06.080}, + Volume = {1202}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.brainres.2007.06.080}} + +@article{Brown2007, + Abstract = {The recently proposed error-likelihood hypothesis suggests that anterior + cingulate cortex (ACC) and surrounding areas will become active in + proportion to the perceived likelihood of an error. The hypothesis + was originally derived from a computational model prediction. The + same computational model now makes a further prediction that ACC + will be sensitive not only to predicted error likelihood, but also + to the predicted magnitude of the consequences, should an error occur. + The product of error likelihood and predicted error consequence magnitude + collectively defines the general "expected risk" of a given behavior + in a manner analogous but orthogonal to subjective expected utility + theory. New fMRI results from an incentivechange signal task now + replicate the error-likelihood effect, validate the further predictions + of the computational model, and suggest why some segments of the + population may fail to show an error-likelihood effect. In particular, + error-likelihood effects and expected risk effects in general indicate + greater sensitivity to earlier predictors of errors and are seen + in risk-averse but not risk-tolerant individuals. Taken together, + the results are consistent with an expected risk model of ACC and + suggest that ACC may generally contribute to cognitive control by + recruiting brain activity to avoid risk.}, + Author = {Joshua W Brown and Todd S Braver}, + Institution = {Department of Psychological and Brain Sciences, Indiana University, Bloomington, Indiana 47405, USA. jwmbown@indiana.edu}, + Journal = {Cogn Affect Behav Neurosci}, + Keywords = {Adult; Cognition, physiology; Color Perception, physiology; Computer Simulation; Cues; Feedback, Psychological, physiology; Female; Gambling, psychology; Humans; Image Processing, Computer-Assisted; Individuality; Magnetic Resonance Imaging; Male; Photic Stimulation; Prefrontal Cortex, physiology; Psychomotor Performance, physiology; Reaction Time, physiology; Risk-Taking; Visual Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {266--277}, + Pmid = {18189000}, + Timestamp = {2009.08.04}, + Title = {Risk prediction and aversion by anterior cingulate cortex.}, + Volume = {7}, + Year = {2007}} + +@article{Brown2005, + Author = {Brown, Joshua W. and Braver, Todd S.}, + Journal = {Science}, + Owner = {WooYoung Ahn}, + Pages = {1118-1121}, + Timestamp = {2007.12.12}, + Title = {Learned predictions of error likelihood in the anterior cingulate cortex}, + Volume = {307}, + Year = {2005}} + +@article{Brown2005c, + Abstract = {The anterior cingulate cortex (ACC) and the related medial wall play + a critical role in recruiting cognitive control. Although ACC exhibits + selective error and conflict responses, it has been unclear how these + develop and become context-specific. With use of a modified stop-signal + task, we show from integrated computational neural modeling and neuroimaging + studies that ACC learns to predict error likelihood in a given context, + even for trials in which there is no error or response conflict. + These results support a more general error-likelihood theory of ACC + function based on reinforcement learning, of which conflict and error + detection are special cases.}, + Author = {Joshua W Brown and Todd S Braver}, + Doi = {10.1126/science.1105783}, + Institution = {Department of Psychology, CB 1125, Washington University, St. Louis, MO 63130, USA. jwbrown@artsci.wustl.edu}, + Journal = {Science}, + Keywords = {Brain Mapping; Cognition; Computer Simulation; Conflict (Psychology); Cues; Dopamine, physiology; Frontal Lobe, cytology/physiology; Gyrus Cinguli, cytology/physiology; Humans; Learning; Magnetic Resonance Imaging; Models, Neurological; Neural Networks (Computer); Neurons, physiology; Probability Learning; Psychomotor Performance; Reinforcement (Psychology)}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {5712}, + Owner = {Woo-Young Ahn}, + Pages = {1118--1121}, + Pii = {307/5712/1118}, + Pmid = {15718473}, + Timestamp = {2009.08.04}, + Title = {Learned predictions of error likelihood in the anterior cingulate cortex.}, + Url = {http://dx.doi.org/10.1126/science.1105783}, + Volume = {307}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1126/science.1105783}} + +@article{Brown1985, + Abstract = {The ideal operation for infants with coarctation of the aorta remains + controversial. Subclavian flap aortoplasty is the most popular technique + for this age group. The 5 to 20\% recurrence rate is attributed to + regrowth of the coarctation web or inadequate length of the subclavian + flap, particularly when the aortic isthmus is long and narrow. Severe + arm ischemia following subclavian flap aortoplasty, although rare, + is a disturbing complication. The purpose of this study is to report + the results with a new technique we call isthmus flap aortoplasty + for coarctation of a long segment of the aorta in infants. This technique + avoids the limitations of subclavian flap aortoplasty. A short segment + of aorta, including the ductal entrance and coarctation web, was + resected in 4 infants (mean age, 35.5 days) with long-segment coarctation. + The posterior wall of the long isthmus was opened longitudinally + to the level of the transverse aortic arch. The descending aorta + was mobilized and advanced to the level of the aortic arch where + the posterior half was sutured. The anterior flap of attached isthmus + was then sewn into a longitudinal incision made in the anterior wall + of the descending aorta. All infants survived this procedure and + had no gradient at completion of the repair. The mean transconduit + gradient at rest was zero and rose to 7.0 +/- 0.93 mm Hg after angiography + at a mean follow-up of 42 months. Aortograms demonstrated that the + reconstructed area had grown in girth and attained a normal caliber + in each child.(ABSTRACT TRUNCATED AT 250 WORDS)}, + Author = {J. W. Brown and A. C. Fiore and H. King}, + Journal = {Ann Thorac Surg}, + Keywords = {Aorta, surgery; Aortic Coarctation, surgery; Blood Pressure; Follow-Up Studies; Heart Catheterization; Humans; Infant; Infant, Newborn; Surgical Flaps, methods; Suture Techniques; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {274--279}, + Pmid = {3899030}, + Timestamp = {2009.08.04}, + Title = {Isthmus flap aortoplasty: an alternative to subclavian flap aortoplasty for long-segment coarctation of the aorta in infants.}, + Volume = {40}, + Year = {1985}} + +@article{Brown2001, + Abstract = {OBJECTIVES: Truncus arteriosus (TA) continues to be associated with + significant morbidity and mortality, but there have been clinically + significant improvements with early repair. METHODS: Sixty patients + underwent physiological correction of TA between November 1978 and + January 2000. The average age was 76 days (range, 3 days--20 months). + Associated cardiac anomalies were frequently encountered, the most + common being severe truncal valve regurgitation (n=7), interrupted + aortic arch (n=6), coronary artery anomalies (n=6), non-confluent + pulmonary arteries (n=4), and total anomalous pulmonary venous return + (n=1). Truncal valve replacement was performed initially or subsequently + in seven patients with severe regurgitation (mechanical prostheses + in six patients and a cryopreserved aortic homograft in one patient). + Right ventricle--pulmonary artery continuity was established with + an aortic (n=16) or pulmonary homograft (n=32) in 48 patients, a + Dacron polyester porcine valved conduit in five, a non-valved polytetrafluoroethylene + (PTFE) tube in three, direct anastomosis to the right ventricle with + anterior patch arterioplasty in three, and a bovine jugular venous + valve conduit in one patient. RESULTS: There were ten hospital deaths + (17\%; 70\% confidence limit, 7--25\%). Multivariate and univariate + analyses demonstrated a relationship between hospital mortality and + associated cardiac anomalies. In the 43 patients without these associated + cardiac anomalies, the early survival was 91\% (group I). In the + 17 patients with one or more of these risk factors, the survival + was 71\% (group II, P=0.002). There was one late death. Twenty-three + patients (46\%) required reoperation for right ventricular outflow + tract (RVOT) obstruction at a mean follow-up time of 59.1 months. + In 23 patients, the RVOT reconstruction was performed with a PTFE + monocusp, and six patients had of a variety of replacement conduits + inserted. Postoperatively, there were 34 (68\%) patients in New York + Heart Association functional class I and 16 (32\%) in class II. Twenty-eight + surviving patients are reported as doing well without any medication. + The freedom of reoperation in the 39 hospital survivors (group I) + without risk factors was 64\% at 7 years; and 36\% at 10 years in + the 11 patients (group II) surviving with risk factors. CONCLUSIONS: + Associated cardiac anomalies were risk factors for death after the + repair of TA. In the absence of these associated lesions, TA can + be repaired with an excellent surgical outcome in the neonatal and + early infancy period.}, + Author = {J. W. Brown and M. Ruzmetov and Y. Okada and P. Vijay and M. W. Turrentine}, + Institution = {Section of Cardiothoracic Surgery, Indiana University Medical Center, 545 Barnhill Drive, EH 215, Indianapolis, IN 46202-5123, USA. jobrown@iupui.edu}, + Journal = {Eur J Cardiothorac Surg}, + Keywords = {Blood Vessel Prosthesis Implantation; Cardiac Surgical Procedures; Coronary Vessel Anomalies, surgery; Female; Heart Septal Defects, Atrial, surgery; Heart Valve Prosthesis Implantation; Hospital Mortality; Humans; Infant; Infant, Newborn; Male; Reoperation; Risk Factors; Truncus Arteriosus, Persistent, mortality/surgery}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {221--227}, + Pii = {S1010-7940(01)00816-8}, + Pmid = {11463535}, + Timestamp = {2009.08.04}, + Title = {Truncus arteriosus repair: outcomes, risk factors, reoperation and management.}, + Volume = {20}, + Year = {2001}} + +@article{Brown2005b, + Abstract = {Two sets of experiments were performed to characterize the role of + the Pre-Optic Area of the Anterior Hypothalamus (POAH) in the decrease + in set point and hypothermia that follows severe hemorrhage. In the + first set, lidocaine or artificial cerebrospinal fluid (ACSF) was + microinjected into the POAH of rats at the time of hemorrhage. Lidocaine + microinjection attenuated the hemorrhagic hypothermia by approximately + 50\%. The mean drop in core temperature (Tc) following hemorrhage + was 1.5 degrees C with ACSF microinjection (N = 6), 0.70 degrees + C (N = 6) with lidocaine, and 1.77 degrees C (N = 6) after sham microinjection. + This partial attenuation of the hemorrhagic hypothermic response + indicates that an intact POAH is necessary for at least some of the + hypothermia following hemorrhage. In the second experimental set, + hypothalamic tissue temperature (Thyp) was modulated in an attempt + to alter the hemorrhagic hypothermic response. Bilateral closed-ended + cannulas were inserted into the POAH. One cannula consisted of a + water-perfused thermode to change local tissue temperature. The other + housed a thermocouple to measure local temperature. The effectiveness + of the thermode was first confirmed in conscious rats, evidenced + by an inverse deflection in Tc upon Thyp modulation. Then, the POAH + region was either heated, cooled, or sham perfused following hemorrhage. + The mean drop in Tc following hemorrhage was 2.16 degrees C (N = + 5) with hypothalamic heating, 1.35 degrees C (N = 5) with cooling, + and 1.44 degrees C (N = 5) following the sham perfusion control. + Heating of the POAH significantly exacerbated the hemorrhagic hypothermic + response. These data further suggest that the POAH is at least partially + responsible for mediating hemorrhagic hypothermia.}, + Author = {Justin W Brown and Marvin E Whitehurst and Christopher J Gordon and Robert G Carroll}, + Doi = {10.1016/j.brainres.2005.01.069}, + Institution = {Department of Physiology, 6n98, Brody School of Medicine at East Carolina University, 600 Moye Boulevard, Greenville, NC 27858-4354, USA.}, + Journal = {Brain Res}, + Keywords = {Adaptation, Physiological, physiology; Anesthetics, Local, administration /&/ dosage; Animals; Body Temperature Regulation, physiology; Hemorrhage, physiopathology; Hypothermia, physiopathology; Lidocaine, administration /&/ dosage; Male; Microinjections; Preoptic Area, drug effects/physiology; Rats; Rats, Sprague-Dawley; Temperature}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {1--10}, + Pii = {S0006-8993(05)00099-5}, + Pmid = {15804494}, + Timestamp = {2009.08.04}, + Title = {The Pre-Optic Anterior Hypothalamus (POAH) partially mediates the hypothermic response to hemorrhage in rats.}, + Url = {http://dx.doi.org/10.1016/j.brainres.2005.01.069}, + Volume = {1041}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.brainres.2005.01.069}} + +@article{Brown1967, + Author = {Brown, L.}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {1068?1071}, + Title = {The Conditional Level of {S}tudent's $t$ Test}, + Volume = {38}, + Year = {1967}} + +@article{Brown2002, + Author = {Brown, L. D. and Cai, T. T. and DasGupta, A.}, + Journal = {The Annals of Statistics}, + Pages = {160?201}, + Title = {Confidence Intervals For a Binomial Proportion and Asymptotic Expansions}, + Volume = {30}, + Year = {2002}} + +@article{Brown2005a, + Author = {Brown, S. and Heathcote, A.}, + Journal = {Psychological Review}, + Pages = {117?128}, + Title = {A Ballistic Model of Choice Response Time}, + Volume = {112}, + Year = {2005}} + +@article{Browne2000, + Author = {Browne, M.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {108-132}, + Title = {Cross-Validation Methods}, + Volume = {44}, + Year = {2000}} + +@article{Bruguier2008, + Author = {Bruguier, A. and Preuschoff, K. and Quartz, S. and Bossaerts, P.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {35--44}, + Title = {{{I}nvestigating signal integration with canonical correlation analysis of f{M}{R}{I} brain activation data}}, + Volume = {41}, + Year = {2008}} + +@article{Buehler1963, + Author = {Buehler, R. J. and Fedderson, A. P.}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {1098--1100}, + Title = {Note on a Conditional Property of {S}tudent's $t$}, + Volume = {34}, + Year = {1963}} + +@article{Bulsara1979, + Author = {Bulsara, A. R. and Lindenberg, K. and Seshadri, V. and Shuler, K. E. and West, B. J.}, + Journal = {Physica}, + Pages = {234--243}, + Title = {Stochastic Processes with Non-Additive Fluctuations. {II}. {S}ome Applications of {I}t\^{o} and {S}tratonovich Calculus}, + Volume = {97A}, + Year = {1979}} + +@book{Burdette1970, + Address = {Springfield (IL)}, + Author = {Burdette, W. J. and Gehan, E. A.}, + Publisher = {Charles C. Thomas}, + Title = {Planning and Analysis of Clinical Studies}, + Year = {1970}} + +@article{Burg1975a, + Author = {W. van den Burg and H. M. van Praag and E. R. Bos and A. K. van Zanten and D. A. Piers}, + Journal = {Prog Brain Res}, + Keywords = {Clinical Trials as Topic; Depression, drug therapy; Humans; Thyrotropin-Releasing Hormone, therapeutic use}, + Language = {eng}, + Medline-Pst = {ppublish}, + Owner = {Young}, + Pages = {68--69}, + Pmid = {812147}, + Timestamp = {2010.05.01}, + Title = {TRH as a possible quick-acting but short-lasting antidepressant.}, + Volume = {42}, + Year = {1975}} + +@article{Burg1975, + Abstract = {In a double reversal design the potency of thyrotropin releasing hormone + (TRH) (500 mug intravenously) as a quick-acting antidepressive agent + was evaluated. A first injection did seem to give rise to a very + slight short-lasting effect, though this could not be ascertained + clearly. There were no visible effects after a second injection. + The thyroid stimulating hormone (TSH) response curve after TRH administration + in the depressive patients group was blunted in comparison with that + in a matched control group of normals.}, + Author = {W. Van den Burg and H. M. Van Praag and E. R. Bos and D. A. Piers and A. K. Van Zanten and H. Doorenbos}, + Journal = {Psychol Med}, + Keywords = {Adult; Aged; Antidepressive Agents; Clinical Trials as Topic; Drug Evaluation; Female; Humans; Injections, Intravenous; Male; Middle Aged; Placebos; Psychological Tests; Self Assessment (Psychology); Thyrotropin, blood; Thyrotropin-Releasing Hormone, administration /&/ dosage/pharmacology; Thyroxine, blood; Time Factors; Triiodothyronine, blood}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {4}, + Owner = {Young}, + Pages = {404--412}, + Pmid = {812133}, + Timestamp = {2010.05.01}, + Title = {Thyrotropin releasing hormone (TRH) as a possible quick-acting but short-lasting antidepressant.}, + Volume = {5}, + Year = {1975}} + +@book{Burnham2002, + Address = {New York}, + Author = {Burnham, K. P. and Anderson, D. R.}, + Publisher = {Springer Verlag}, + Title = {Model Selection and Multimodel Inference: A Practical Information--Theoretic Approach (2nd ed.)}, + Year = {2002}} + +@article{Burton1979a, + Author = {Burton, H. and Craig, A. D.}, + Journal = {Brain Res.}, + Month = {Feb}, + Pages = {515--521}, + Title = {{{D}istribution of trigeminothalamic projection cells in cat and monkey}}, + Volume = {161}, + Year = {1979}} + +@article{Burton1979, + Author = {Burton, H. and Craig, A. D. and Poulos, D. A. and Molt, J. T.}, + Journal = {J. Comp. Neurol.}, + Month = {Feb}, + Pages = {753--777}, + Title = {{{E}fferent projections from temperature sensitive recording loci within the marginal zone of the nucleus caudalis of the spinal trigeminal complex in the cat}}, + Volume = {183}, + Year = {1979}} + +@book{Busemeyer2010, + Author = {Busemeyer, Jerome R. and Diederich, A.}, + Date-Modified = {2016-03-23 13:48:40 +0000}, + Publisher = {Sage Publications, Inc}, + Title = {{Cognitive modeling}}, + Year = {2010}} + +@article{Busemeyer2007, + Author = {Busemeyer, Jerome R. and Jessup, Ryan K. and Dimperio, Eric}, + Owner = {WooYoung Ahn}, + Pages = {Manuscript submitted for publication}, + Timestamp = {2007.12.14}, + Title = {Integrating sophisticated choice models with basic learning processes to more fully account for complex choice behavior}, + Year = {2007}} + +@article{Busemeyer1992, + Author = {Busemeyer, J. R. and Myung, I. J.}, + Journal = {Journal of Experimental Psychology: General}, + Owner = {Wooyoung Ahn}, + Pages = {177-194}, + Timestamp = {2007.05.03}, + Title = {An adaptive approach to human decision-making: Learning theory, decision theory, and human performance}, + Volume = {121}, + Year = {1992}} + +@article{Busemeyer2009a, + Author = {Busemeyer, J. R. and Pleskac, T. J.}, + Journal = {Journal of Mathematical Psychology}, + Number = {3}, + Pages = {126--138}, + Publisher = {Elsevier}, + Title = {{Theoretical tools for understanding and aiding dynamic decision making}}, + Volume = {53}, + Year = {2009}} + +@article{Busemeyer2002, + Author = {Busemeyer, J. R. and Stout, J. C.}, + Journal = {Psychological Assessment}, + Owner = {Wooyoung Ahn}, + Pages = {253-262}, + Timestamp = {2007.04.30}, + Title = {A contribution of cognitive decision models to clinical assessment: Decomposing performance on the {B}echara {G}ambling {T}ask}, + Volume = {14(3)}, + Year = {2002}} + +@article{Busemeyer2002a, + Author = {Busemeyer, J. R. and Stout, J. C.}, + Journal = {Psychological Assessment}, + Pages = {253--262}, + Title = {A Contribution of Cognitive Decision Models to Clinical Assessment: {D}ecomposing Performance on the {B}echara Gambling Task}, + Volume = {14}, + Year = {2002}} + +@article{Busemeyer1993, + Author = {Busemeyer, J. R. and Townsend, J. T.}, + Journal = {Psychological Review}, + Pages = {432--459}, + Title = {Decision Field Theory: A Dynamic--Cognitive Approach to Decision Making}, + Volume = {100}, + Year = {1993}} + +@article{Busemeyer1992a, + Author = {Busemeyer, J. R. and Townsend, J. T.}, + Journal = {Mathematical Social Sciences}, + Pages = {255--282}, + Title = {Fundamental Derivations From Decision Field Theory}, + Volume = {23}, + Year = {1992}} + +@article{Busemeyer2000a, + Author = {Busemeyer, Jerome R. and Wang, Y.-M.}, + Date-Modified = {2016-03-23 13:53:52 +0000}, + Journal = {Journal of Mathematical Psychology}, + Pages = {171--189}, + Title = {Model Comparisons and Model Selections Based on Generalization Criterion Methodology}, + Volume = {44}, + Year = {2000}} + +@article{Businelle2008, + Author = {Businelle, M. S. and Apperson, M. R. and Kendzor, D. E. and Terlecki, M. A. and Copeland, A. L.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Dec}, + Pages = {513--520}, + Title = {{{T}he relative impact of nicotine dependence, other substance dependence, and gender on {B}echara {G}ambling {T}ask performance}}, + Volume = {16}, + Year = {2008}} + +@article{Caccia1997, + Author = {Caccia, D. C. and Percival, D. and Cannon, M. J. and Raymond, G. and Bassingthwaighte, J. B.}, + Journal = {Physica A}, + Pages = {609--632}, + Title = {Analyzing Exact Fractal Time Series: Evaluating Dispersional Analysis and Rescaled Range Methods}, + Volume = {246}, + Year = {1997}} + +@article{Cadet1996, + Author = {Cadet, J. L. and Bolla, K. I.}, + Journal = {Synapse}, + Month = {Jan}, + Pages = {28--34}, + Title = {{{C}hronic cocaine use as a neuropsychiatric syndrome: a model for debate}}, + Volume = {22}, + Year = {1996}} + +@article{Caine2002, + Author = {Caine, S.B. and Negus, S.S. and Mello, N.K. and Patel, S. and Bristow, L. and Kulagowski, J. and Vallone, D. and Saiardi, A. and Borrelli, E.}, + Journal = {Journal of Neuroscience}, + Number = {7}, + Pages = {2977}, + Publisher = {Soc Neuroscience}, + Title = {{Role of dopamine D2-like receptors in cocaine self-administration: studies with D2 receptor mutant mice and novel D2 receptor antagonists}}, + Volume = {22}, + Year = {2002}} + +@article{Caine2007, + Author = {Caine, S.B. and Thomsen, M. and Gabriel, K.I. and Berkowitz, J.S. and Gold, L.H. and Koob, G.F. and Tonegawa, S. and Zhang, J. and Xu, M.}, + Journal = {Journal of Neuroscience}, + Number = {48}, + Pages = {13140}, + Publisher = {Soc Neuroscience}, + Title = {{Lack of self-administration of cocaine in dopamine D1 receptor knock-out mice}}, + Volume = {27}, + Year = {2007}} + +@article{Calder2000, + Author = {Calder, A.J. and Keane, J. and Manes, F. and Antoun, N. and Young, A.W.}, + Journal = {Nature Neuroscience}, + Number = {11}, + Pages = {1077--1078}, + Publisher = {Nature Publishing Group}, + Title = {{Impaired recognition and experience of disgust following brain injury}}, + Volume = {3}, + Year = {2000}} + +@article{Campbell2007, + Author = {Campbell, D. W. and Sareen, J. and Paulus, M. P. and Goldin, P. R. and Stein, M. B. and Reiss, J. P.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {455--463}, + Title = {{{T}ime-varying amygdala response to emotional faces in generalized social phobia}}, + Volume = {62}, + Year = {2007}} + +@article{Campbell2009, + Author = {Campbell, D. W. and Sareen, J. and Stein, M. B. and Kravetsky, L. B. and Paulus, M. P. and Hassard, S. T. and Reiss, J. P.}, + Journal = {Depress Anxiety}, + Pages = {419--424}, + Title = {{{H}appy but not so approachable: the social judgments of individuals with generalized social phobia}}, + Volume = {26}, + Year = {2009}} + +@article{Campbell2004, + Author = {Campbell, M. C. and Stout, J. C. and Finn, P. R.}, + Journal = {J Int Neuropsychol Soc}, + Month = {Mar}, + Pages = {239--245}, + Title = {{{R}educed autonomic responsiveness to gambling task losses in {H}untington's disease}}, + Volume = {10}, + Year = {2004}} + +@article{Campbell-Meiklejohn2008, + Author = {Campbell-Meiklejohn, D. K. and Woolrich, M. W. and Passingham, R. E. and Rogers, R. D.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {293--300}, + Title = {{{K}nowing when to stop: the brain mechanisms of chasing losses}}, + Volume = {63}, + Year = {2008}} + +@article{Camps1989, + Author = {Camps, M. and Cort?s, R. and Gueye, B. and Probst, A. and Palacios, J. M.}, + Journal = {Neuroscience}, + Pages = {275--290}, + Title = {{{D}opamine receptors in human brain: autoradiographic distribution of {D}2 sites}}, + Volume = {28}, + Year = {1989}} + +@article{Cannon1997, + Author = {Cannon, M. J. and Percival, D. B. and Caccia, D. C. and Raymond, G. M. and Bassingthwaighte, J. B.}, + Journal = {Physica A}, + Pages = {606--626}, + Title = {Evaluating Scaled Windowed Variance Methods for Estimating the {H}urst Coefficient of Time Series}, + Volume = {241}, + Year = {1997}} + +@article{Cantlon2009, + Author = {Cantlon, J. F. and Platt, M. L. and Brannon, E. M.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Feb}, + Pages = {83--91}, + Title = {{{B}eyond the number domain}}, + Volume = {13}, + Year = {2009}} + +@article{Cantrell2008, + Author = {Cantrell, H. and Finn, P. R. and Rickert, M. E. and Lucas, J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {1398--1407}, + Title = {{{D}ecision making in alcohol dependence: insensitivity to future consequences and comorbid disinhibitory psychopathology}}, + Volume = {32}, + Year = {2008}} + +@article{Cardenas2007, + Author = {Cardenas, V.A. and Studholme, C. and Gazdzinski, S. and Durazzo, T.C. and Meyerhoff, D.J.}, + Journal = {Neuroimage}, + Number = {3}, + Pages = {879--887}, + Publisher = {Elsevier}, + Title = {{Deformation-based morphometry of brain changes in alcohol dependence and abstinence}}, + Volume = {34}, + Year = {2007}} + +@article{Cardinal2002, + Author = {Cardinal, R.N. and Parkinson, J.A. and Hall, J. and Everitt, B.J.}, + Journal = {Neuroscience and Biobehavioral Reviews}, + Number = {3}, + Pages = {321--352}, + Title = {{Emotion and motivation: the role of the amygdala, ventral striatum, and prefrontal cortex}}, + Volume = {26}, + Year = {2002}} + +@article{Carlezon2005, + Author = {Carlezon, W. A. and Duman, R. S. and Nestler, E. J.}, + Journal = {Trends Neurosci.}, + Month = {Aug}, + Pages = {436--445}, + Title = {{{T}he many faces of {C}{R}{E}{B}}}, + Volume = {28}, + Year = {2005}} + +@article{Carlezon2002, + Author = {Carlezon, W. A. and Nestler, E. J.}, + Journal = {Trends Neurosci.}, + Month = {Dec}, + Pages = {610--615}, + Title = {{{E}levated levels of {G}lu{R}1 in the midbrain: a trigger for sensitization to drugs of abuse?}}, + Volume = {25}, + Year = {2002}} + +@article{Carlezon1998, + Author = {Carlezon, W. A. and Thome, J. and Olson, V. G. and Lane-Ladd, S. B. and Brodkin, E. S. and Hiroi, N. and Duman, R. S. and Neve, R. L. and Nestler, E. J.}, + Journal = {Science}, + Month = {Dec}, + Pages = {2272--2275}, + Title = {{{R}egulation of cocaine reward by {C}{R}{E}{B}}}, + Volume = {282}, + Year = {1998}} + +@article{Carlin1998, + Author = {Carlin, B. P. and Kadane, J. B. and Gelfand, A. E.}, + Journal = {Biometrics}, + Pages = {964--975}, + Title = {Approaches for Optimal Sequential Decision Analysis in Clinical Trials}, + Volume = {54}, + Year = {1998}} + +@article{Carmichael1996, + Author = {Carmichael, S. T. and Price, J. L.}, + Journal = {J. Comp. Neurol.}, + Pages = {179--207}, + Title = {{{C}onnectional networks within the orbital and medial prefrontal cortex of macaque monkeys}}, + Volume = {371}, + Year = {1996}} + +@article{Carmichael1995, + Author = {Carmichael, S. T. and Price, J. L.}, + Journal = {J. Comp. Neurol.}, + Pages = {615--641}, + Title = {{{L}imbic connections of the orbital and medial prefrontal cortex in macaque monkeys}}, + Volume = {363}, + Year = {1995}} + +@article{Carney1993a, + Author = {Carney, T. and Shadlen, M. N.}, + Journal = {Vision Res.}, + Month = {Sep}, + Pages = {1977--1995}, + Title = {{{D}ichoptic activation of the early motion system}}, + Volume = {33}, + Year = {1993}} + +@article{Carney1992a, + Author = {Carney, T. and Shadlen, M. N.}, + Journal = {Vision Res.}, + Month = {Jan}, + Pages = {187--191}, + Title = {{{B}inocularity of early motion mechanisms: comments on {G}eorgeson and {S}hackleton}}, + Volume = {32}, + Year = {1992}} + +@article{Carpenter2001, + Author = {Carpenter, R. H. S. and Reddi, B. A. J.}, + Journal = {Nature Neuroscience}, + Pages = {337}, + Title = {Deciding Between the Deciders: {T}wo Models of Reaction Time may Happily Coexist}, + Volume = {4}, + Year = {2001}} + +@article{Carroll2006, + Author = {Carroll, K. M. and Easton, C. J. and Nich, C. and Hunkele, K. A. and Neavins, T. M. and Sinha, R. and Ford, H. L. and Vitolo, S. A. and Doebrick, C. A. and Rounsaville, B. J.}, + Journal = {J Consult Clin Psychol}, + Month = {Oct}, + Pages = {955--966}, + Title = {{{T}he use of contingency management and motivational/skills-building therapy to treat young adults with marijuana dependence}}, + Volume = {74}, + Year = {2006}} + +@article{Carroll2006a, + Author = {Carroll, K. M. and Easton, C. J. and Nich, C. and Hunkele, K. A. and Neavins, T. M. and Sinha, R. and Ford, H. L. and Vitolo, S. A. and Doebrick, C. A. and Rounsaville, B. J.}, + Journal = {J Consult Clin Psychol}, + Month = {Oct}, + Pages = {955--966}, + Title = {{{T}he use of contingency management and motivational/skills-building therapy to treat young adults with marijuana dependence}}, + Volume = {74}, + Year = {2006}} + +@article{Carroll2006b, + Author = {Carroll, K. M. and Easton, C. J. and Nich, C. and Hunkele, K. A. and Neavins, T. M. and Sinha, R. and Ford, H. L. and Vitolo, S. A. and Doebrick, C. A. and Rounsaville, B. J.}, + Journal = {J Consult Clin Psychol}, + Month = {Oct}, + Pages = {955--966}, + Title = {{{T}he use of contingency management and motivational/skills-building therapy to treat young adults with marijuana dependence}}, + Volume = {74}, + Year = {2006}} + +@article{Carroll2002, + Author = {Carroll, K. M. and Sinha, R. and Nich, C. and Babuscio, T. and Rounsaville, B. J.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Feb}, + Pages = {54--63}, + Title = {{{C}ontingency management to enhance naltrexone treatment of opioid dependence: a randomized clinical trial of reinforcement magnitude}}, + Volume = {10}, + Year = {2002}} + +@article{Carroll2002a, + Author = {Carroll, K. M. and Sinha, R. and Nich, C. and Babuscio, T. and Rounsaville, B. J.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Feb}, + Pages = {54--63}, + Title = {{{C}ontingency management to enhance naltrexone treatment of opioid dependence: a randomized clinical trial of reinforcement magnitude}}, + Volume = {10}, + Year = {2002}} + +@article{Carter1998, + Author = {Carter, C. S. and Braver, T. S. and Barch, D. M. and Botvinick, M. M. and Noll, D. and Cohen, J. D.}, + Journal = {Science}, + Month = {May}, + Pages = {747--749}, + Title = {{{A}nterior cingulate cortex, error detection, and the online monitoring of performance}}, + Volume = {280}, + Year = {1998}} + +@article{Carter2006, + Author = {Carter, R. M. and O'Doherty, J. P. and Seymour, B. and Koch, C. and Dolan, R. J.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {1007--1012}, + Title = {{{C}ontingency awareness in human aversive conditioning involves the middle frontal gyrus}}, + Volume = {29}, + Year = {2006}} + +@unpublished{Casella2004, + Author = {Casella, G. and Moreno, E.}, + Title = {Objective {B}ayesian Analysis of Contingency Tables}, + Year = {2004}} + +@book{Castrigiano1993, + Address = {New York}, + Author = {Castrigiano, D. P. L. and Hayes, S. A.}, + Publisher = {Addison--Wesley}, + Title = {Catastrophe Theory}, + Year = {1993}} + +@article{Cattapan-Ludewig2008, + Author = {Cattapan-Ludewig, K. and Ludewig, S. and Messerli, N. and Vollenweider, F. X. and Seitz, A. and Feldon, J. and Paulus, M. P.}, + Journal = {J. Nerv. Ment. Dis.}, + Month = {Feb}, + Pages = {157--160}, + Title = {{{D}ecision-making dysregulation in first-episode schizophrenia}}, + Volume = {196}, + Year = {2008}} + +@article{Cerella1985, + Author = {Cerella, J.}, + Journal = {Psychological Bulletin}, + Pages = {67--83}, + Title = {Information Processing Rates in the Elderly}, + Volume = {98}, + Year = {1985}} + +@article{Chae2003, + Author = {Chae, H. and Lyoo, I. K. and Lee, S. J. and Cho, S. and Bae, H. and Hong, M. and Shin, M.}, + Journal = {J Altern Complement Med}, + Month = {Aug}, + Pages = {519--528}, + Title = {{{A}n alternative way to individualized medicine: psychological and physical traits of {S}asang typology}}, + Volume = {9}, + Year = {2003}} + +@article{Chakroun2004, + Abstract = {SUMMARY: The International Consortium of Psychiatric Epidemiology + has confirmed the high comorbidity in community-drawn samples between + substance use disorders and anxiety or depression. In the same way, + associations between substance use and specific personality traits + (such as novelty seeking, harm avoidance or antisocial personality) + have also been extensively documented. Self-medication and social + deviance are among the most commonly evoked explanatory models for + these forms of comorbidity, and are based on findings that affective + disorders and specific personality traits often precede the onset + of substance use disorders. The self-medication model postulates + that an individual chooses a specific substance according to its + psychopharmacologic action on the given psychological state of the + person. By contrast, the social deviance model posits that this form + of comorbidity is due to the fact that persons consuming certain + substances may have affective or personality characteristics that + are more severe or more deviant than non-consumers (or than consumers + of socially well-accepted substances). In this way, the individual + does not use a particular substance to assuage pre-existing disorders + but, due to a more deviant personality, is less influenced by social + norms and may more easily turn to using illicit substances or to + polyconsumption. However, a major limitation of the current scientific + literature concerning tests of these models is that previous investigations + have been based in overwhelming majority on clinical populations. + The examination only of clinical samples renders difficult the identification + of causal (or primary) risk factors for the emergence of substance + use disorders from the potential consequences of substance use itself. + The goal of the current study was therefore to simultaneously compare + both models of association using a non clinical population of substance + users. In addition to selecting subjects based on use (rather than + abuse or dependence), multiple comparisons were corrected with a + Bonferroni adjustment. METHOD: A two-phase sampling plan was used + with post-stratification on substances use. In the first stage, an + initial sample of 685 students was pre-selected based on responses + to a battery of self-questionnaires, including information concerning + recent consumption of substances (alcohol, cannabis, cocaine, heroin, + acid, solvents, and so on), anxiety levels measured by the State + and Trait Anxiety Inventory (STAI, Spielberger, 1983) and depression + levels evaluated by the Center of Epidemiologic Studies Depression + Scale (CES-D, Radloff, 1977). Based on responses to these questionnaires, + 98 subjects were selected in the second phase to compose four groups + of substance users: non consumers (those who did not use any substance + during the last month); consumers of alcohol only, consumers of cannabis + (with or without alcohol) and consumers of other illicit substances + (with or without cannabis or alcohol). These subjects were then invited + to participate in a brief laboratory-based meeting where they completed + the Temperament and Character Inventory (TCI, Cloninger, 1992), which + assessed different personality characteristics such as novelty seeking + (NS), harm avoidance (HA) or antisocial personality disorder (APD). + ANALYSES: The hypotheses concerning self-medication were tested by + multiple logistic regression by comparing each group of substance + consumption to the non-consumer group relative to levels of anxiety, + depression and scores of novelty seeking and harm avoidance. The + social deviance model was tested by ANOVAs using contrasts which + allowed for a test of a linear tendency across the four groups of + consumption relative to each of the personality traits (novelty seeking, + harm avoidance and antisocial personality). RESULTS: Results of multiple + logistic regressions showed no difference between non-consumers and + any group of consumers with regard to anxiety, depression and harm + avoidance. However, consumers of other illicit substances significantly + differed from non-consumers for novelty seeking trait (qOR=8.4; p<0.05). + Results of the ANOVA also showed no differences between the four + groups with regard to scores of harm avoidance and level of antisocial + personality but again a comparison of novelty seeking scores was + significant, F(94)=6.46, p<0.05. Moreover, the contrast method demonstrated + that novelty seeking scores increased linearly and significantly + (p<0.001) from the group of non-consumers to the group of the consumers + of the most deviant substances. CONCLUSION: The results obtained + in this non-clinical sample are in favor of social deviance model + which posits that the personality trait of novelty seeking is associated + to the consumption of the most illicit and deviant substances (such + as heroin or cocaine). On the other hand, no support was found for + the hypothesis of self-medication which assumes that specific substances + should be particularly associated with specific psychological characteristics + or vulnerabilities.}, + Author = {N. Chakroun and J. Doron and J. Swendsen}, + Journal = {Encephale}, + Keywords = {Adolescent; Adult; Alcoholism, diagnosis/epidemiology/psychology; Antisocial Personality Disorder, diagnosis/epidemiology/psychology; Anxiety Disorders, diagnosis/epidemiology/psychology; Comorbidity; Cross-Sectional Studies; Defense Mechanisms; Depressive Disorder, diagnosis/epidemiology/psychology; Diagnosis, Dual (Psychiatry); Exploratory Behavior; Female; France; Humans; Male; Personality Disorders, diagnosis/epidemiology/psychology; Personality Inventory, statistics /&/ numerical data; Psychometrics, statistics /&/ numerical data; Psychotropic Drugs; Reproducibility of Results; Risk Factors; Self Medication, psychology; Students, psychology; Substance-Related Disorders, diagnosis/epidemiology/psychology}, + Language = {fre}, + Medline-Pst = {ppublish}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {564--569}, + Pii = {MDOI-ENC-12-2004-30-6-0013-7006-101019-ART8}, + Pmid = {15738859}, + Timestamp = {2009.08.06}, + Title = {[Substance use, affective problems and personality traits: test of two association models.]}, + Volume = {30}, + Year = {2004}} + +@article{Chambers2007, + Author = {Chambers, C. D. and Bellgrove, M. A. and Gould, I. C. and English, T. and Garavan, H. and McNaught, E. and Kamke, M. and Mattingley, J. B.}, + Journal = {J. Neurophysiol.}, + Month = {Dec}, + Pages = {3638--3647}, + Title = {{{D}issociable mechanisms of cognitive control in prefrontal and premotor cortex}}, + Volume = {98}, + Year = {2007}} + +@article{Chambers2006, + Author = {Chambers, C. D. and Bellgrove, M. A. and Stokes, M. G. and Henderson, T. R. and Garavan, H. and Robertson, I. H. and Morris, A. P. and Mattingley, J. B.}, + Journal = {J Cogn Neurosci}, + Month = {Mar}, + Pages = {444--455}, + Title = {{{E}xecutive "brake failure" following deactivation of human frontal lobe}}, + Volume = {18}, + Year = {2006}} + +@article{Chambers2009, + Author = {Chambers, C. D. and Garavan, H. and Bellgrove, M. A.}, + Journal = {Neurosci Biobehav Rev}, + Month = {May}, + Pages = {631--646}, + Title = {{{I}nsights into the neural basis of response inhibition from cognitive and clinical neuroscience}}, + Volume = {33}, + Year = {2009}} + +@article{Chambers2003, + Abstract = {OBJECTIVE: Epidemiological studies indicate that experimentation with + addictive drugs and onset of addictive disorders is primarily concentrated + in adolescence and young adulthood. The authors describe basic and + clinical data supporting adolescent neurodevelopment as a biologically + critical period of greater vulnerability for experimentation with + substances and acquisition of substance use disorders. METHOD: The + authors reviewed recent literature regarding neurocircuitry underlying + motivation, impulsivity, and addiction, with a focus on studies investigating + adolescent neurodevelopment. RESULTS: Adolescent neurodevelopment + occurs in brain regions associated with motivation, impulsivity, + and addiction. Adolescent impulsivity and/or novelty seeking as a + transitional trait behavior can be explained in part by maturational + changes in frontal cortical and subcortical monoaminergic systems. + These developmental processes may advantageously promote learning + drives for adaptation to adult roles but may also confer greater + vulnerability to the addictive actions of drugs. CONCLUSIONS: An + exploration of developmental changes in neurocircuitry involved in + impulse control has significant implications for understanding adolescent + behavior, addiction vulnerability, and the prevention of addiction + in adolescence and adulthood.}, + Author = {R. Andrew Chambers and Jane R Taylor and Marc N Potenza}, + Institution = {Connecticut Mental health Center, the Problem Grambling Clinic, Yale University School of Medicine, New Haven, CT 06508, USA. robert.chambers@yale.edu}, + Journal = {Am J Psychiatry}, + Keywords = {Adaptation, Psychological; Adolescent; Adolescent Behavior, physiology/psychology; Adult; Age Factors; Behavior, Addictive, physiopathology/psychology; Brain, growth /&/ development/physiology/physiopathology; Critical Period (Psychology); Decision Making, physiology; Disease Susceptibility, physiopathology/psychology; Frontal Lobe, growth /&/ development/physiology/physiopathology; Humans; Models, Neurological; Motivation; Neural Inhibition, physiology; Personality Development; Personality, physiology; Substance-Related Disorders, physiopathology/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {1041--1052}, + Pmid = {12777258}, + Timestamp = {2009.08.06}, + Title = {Developmental neurocircuitry of motivation in adolescence: a critical period of addiction vulnerability.}, + Volume = {160}, + Year = {2003}} + +@article{Chandler2009, + Author = {Chandler, R. K. and Fletcher, B. W. and Volkow, N. D.}, + Journal = {JAMA}, + Month = {Jan}, + Pages = {183--190}, + Title = {{{T}reating drug abuse and addiction in the criminal justice system: improving public health and safety}}, + Volume = {301}, + Year = {2009}} + +@article{Chang2002, + Author = {Chang, L. and Ernst, T. and Speck, O. and Patel, H. and DeSilva, M. and Leonido-Yee, M. and Miller, E.N.}, + Journal = {Psychiatry Research: Neuroimaging}, + Number = {2}, + Pages = {65--79}, + Publisher = {Elsevier}, + Title = {{Perfusion MRI and computerized cognitive test abnormalities in abstinent methamphetamine users}}, + Volume = {114}, + Year = {2002}} + +@article{Chang2008, + Author = {Chang, L. and Wang, G. J. and Volkow, N. D. and Ernst, T. and Telang, F. and Logan, J. and Fowler, J. S.}, + Journal = {Neuroimage}, + Month = {Aug}, + Pages = {869--878}, + Title = {{{D}ecreased brain dopamine transporters are related to cognitive deficits in {H}{I}{V} patients with or without cocaine abuse}}, + Volume = {42}, + Year = {2008}} + +@article{Chao2004, + Author = {Chao, J. and Nestler, E. J.}, + Journal = {Annu. Rev. Med.}, + Pages = {113--132}, + Title = {{{M}olecular neurobiology of drug addiction}}, + Volume = {55}, + Year = {2004}} + +@article{Chao2002, + Author = {Chao, J. R. and Ni, Y. G. and Bolanos, C. A. and Rahman, Z. and DiLeone, R. J. and Nestler, E. J.}, + Journal = {Eur. J. Neurosci.}, + Month = {Oct}, + Pages = {1284--1294}, + Title = {{{C}haracterization of the mouse adenylyl cyclase type {V}{I}{I}{I} gene promoter: regulation by c{A}{M}{P} and {C}{R}{E}{B}}}, + Volume = {16}, + Year = {2002}} + +@article{Chaplin2009, + Author = {Chaplin, T. M. and Fahy, T. and Sinha, R. and Mayes, L. C.}, + Journal = {Neurotoxicol Teratol}, + Month = {May}, + Title = {{{E}motional arousal in cocaine exposed toddlers: {P}rediction of behavior problems}}, + Year = {2009}} + +@article{Chaplin2009a, + Author = {Chaplin, T. M. and Fahy, T. and Sinha, R. and Mayes, L. C.}, + Journal = {Neurotoxicol Teratol}, + Month = {May}, + Title = {{{E}motional arousal in cocaine exposed toddlers: {P}rediction of behavior problems}}, + Year = {2009}} + +@article{Chaplin2008, + Author = {Chaplin, T. M. and Hong, K. and Bergquist, K. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jul}, + Pages = {1242--1250}, + Title = {{{G}ender differences in response to emotional stress: an assessment across subjective, behavioral, and physiological domains and relations to alcohol craving}}, + Volume = {32}, + Year = {2008}} + +@article{Chaplin2008a, + Author = {Chaplin, T. M. and Hong, K. and Bergquist, K. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jul}, + Pages = {1242--1250}, + Title = {{{G}ender differences in response to emotional stress: an assessment across subjective, behavioral, and physiological domains and relations to alcohol craving}}, + Volume = {32}, + Year = {2008}} + +@article{Chaplin2008b, + Author = {Chaplin, T. M. and Hong, K. and Bergquist, K. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jul}, + Pages = {1242--1250}, + Title = {{{G}ender differences in response to emotional stress: an assessment across subjective, behavioral, and physiological domains and relations to alcohol craving}}, + Volume = {32}, + Year = {2008}} + +@article{Charlton2008, + Author = {Charlton, J. J. and Allen, P. B. and Psifogeorgou, K. and Chakravarty, S. and Gomes, I. and Neve, R. L. and Devi, L. A. and Greengard, P. and Nestler, E. J. and Zachariou, V.}, + Journal = {Neuron}, + Month = {Apr}, + Pages = {238--247}, + Title = {{{M}ultiple actions of spinophilin regulate mu opioid receptor function}}, + Volume = {58}, + Year = {2008}} + +@article{Chen2001, + Author = {Chen, Y. and Ding, M. and Kelso, J. A. S.}, + Journal = {Journal of Motor Behavior}, + Pages = {3--8}, + Title = {Origin of Timing Errors in Human Sensorimotor Coordination}, + Volume = {33}, + Year = {2001}} + +@article{Chen1997, + Author = {Chen, Y. and Ding, M. and Kelso, J. A. S.}, + Journal = {Physical Review Letters}, + Pages = {4501--4504}, + Title = {Long Memory Processes ($1/f^\alpha$ type) in Human Coordination}, + Volume = {79}, + Year = {1997}} + +@article{Chikama1997, + Author = {Chikama, M. and McFarland, N.R. and Amaral, D.G. and Haber, S.N.}, + Journal = {Journal of Neuroscience}, + Number = {24}, + Pages = {9686--9705}, + Publisher = {Soc Neuroscience}, + Title = {{Insular cortical projections to functional regions of the striatum correlate with cortical cytoarchitectonic organization in the primate}}, + Volume = {17}, + Year = {1997}} + +@article{Childress1981, + Author = {Childress, A. R. and Burns, D. D.}, + Journal = {Psychosomatics}, + Month = {Dec}, + Pages = {1017--1027}, + Title = {{{T}he basics of cognitive therapy}}, + Volume = {22}, + Year = {1981}} + +@article{Childress1994, + Author = {Childress, A. R. and Ehrman, R. and McLellan, A. T. and MacRae, J. and Natale, M. and O'Brien, C. P.}, + Journal = {J Subst Abuse Treat}, + Pages = {17--23}, + Title = {{{C}an induced moods trigger drug-related responses in opiate abuse patients?}}, + Volume = {11}, + Year = {1994}} + +@article{Childress2008, + Author = {Childress, A. R. and Ehrman, R. N. and Wang, Z. and Li, Y. and Sciortino, N. and Hakun, J. and Jens, W. and Suh, J. and Listerud, J. and Marquez, K. and Franklin, T. and Langleben, D. and Detre, J. and O'Brien, C. P.}, + Journal = {PLoS ONE}, + Pages = {e1506}, + Title = {{{P}relude to passion: limbic activation by "unseen" drug and sexual cues}}, + Volume = {3}, + Year = {2008}} + +@article{Childress1993, + Author = {Childress, A. R. and Hole, A. V. and Ehrman, R. N. and Robbins, S. J. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {73--95}, + Title = {{{C}ue reactivity and cue reactivity interventions in drug dependence}}, + Volume = {137}, + Year = {1993}} + +@article{Childress1988, + Author = {Childress, A. R. and McLellan, A. T. and Ehrman, R. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {25--43}, + Title = {{{C}lassically conditioned responses in opioid and cocaine dependence: a role in relapse?}}, + Volume = {84}, + Year = {1988}} + +@article{Childress1987a, + Author = {Childress, A. R. and McLellan, A. T. and Ehrman, R. N. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {189--195}, + Title = {{{E}xtinction of conditioned responses in abstinent cocaine or opioid users}}, + Volume = {76}, + Year = {1987}} + +@article{Childress1987, + Author = {Childress, A. R. and McLellan, A. T. and Natale, M. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {137--144}, + Title = {{{M}ood states can elicit conditioned withdrawal and craving in opiate abuse patients}}, + Volume = {76}, + Year = {1987}} + +@article{Childress1986, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {Psychiatr. Clin. North Am.}, + Month = {Sep}, + Pages = {413--425}, + Title = {{{R}ole of conditioning factors in the development of drug dependence}}, + Volume = {9}, + Year = {1986}} + +@article{Childress1986a, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {366--372}, + Title = {{{N}ature and incidence of conditioned responses in a methadone population: a comparison of laboratory, clinic, and naturalistic settings}}, + Volume = {67}, + Year = {1986}} + +@article{Childress1986b, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {J Subst Abuse Treat}, + Pages = {173--179}, + Title = {{{C}onditioned responses in a methadone population. {A} comparison of laboratory, clinic, and natural settings}}, + Volume = {3}, + Year = {1986}} + +@article{Childress1986c, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {Br J Addict}, + Month = {Oct}, + Pages = {655--660}, + Title = {{{A}bstinent opiate abusers exhibit conditioned craving, conditioned withdrawal and reductions in both through extinction}}, + Volume = {81}, + Year = {1986}} + +@article{Childress1985, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {Int J Addict}, + Pages = {947--969}, + Title = {{{B}ehavioral therapies for substance abuse}}, + Volume = {20}, + Year = {1985}} + +@article{Childress1984, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Month = {Mar}, + Pages = {212--219}, + Title = {{{M}easurement and extinction of conditioned withdrawal-like responses in opiate-dependent patients}}, + Volume = {49}, + Year = {1984}} + +@article{Childress1984a, + Author = {Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {202--210}, + Title = {{{A}ssessment and extinction of conditioned withdrawal-like responses in an integrated treatment for opiate dependence}}, + Volume = {55}, + Year = {1984}} + +@article{Childress1991, + Author = {Childress, A. R. and McLellan, A. T. and Woody, G. E. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {167--177}, + Title = {{{A}re there minimum conditions necessary for methadone maintenance to reduce intravenous drug use and {A}{I}{D}{S} risk behaviors?}}, + Volume = {106}, + Year = {1991}} + +@article{Childress1999, + Author = {Childress, A. R. and Mozley, P. D. and McElgin, W. and Fitzgerald, J. and Reivich, M. and O'Brien, C. P.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {11--18}, + Title = {{{L}imbic activation during cue-induced cocaine craving}}, + Volume = {156}, + Year = {1999}} + +@article{Childress2000, + Author = {Childress, A. R. and O'Brien, C. P.}, + Journal = {Trends Pharmacol. Sci.}, + Month = {Jan}, + Pages = {6--9}, + Title = {{{D}opamine receptor partial agonists could address the duality of cocaine craving}}, + Volume = {21}, + Year = {2000}} + +@article{Chiodo1980, + Author = {Chiodo, LA and Antelman, SM and Caggiula, AR and Lineberry, CG}, + Journal = {Brain research}, + Number = {2}, + Pages = {544}, + Title = {{Sensory stimuli alter the discharge rate of dopamine (DA) neurons: evidence for two functional types of DA cells in the substantia nigra.}}, + Volume = {189}, + Year = {1980}} + +@article{Chiu2008, + Author = {Chiu, P. H. and Kayali, M. A. and Kishida, K. T. and Tomlin, D. and Klinger, L. G. and Klinger, M. R. and Montague, P. R.}, + Journal = {Neuron}, + Month = {Feb}, + Pages = {463--473}, + Title = {{{S}elf responses along cingulate cortex reveal quantitative neural phenotype for high-functioning autism}}, + Volume = {57}, + Year = {2008}} + +@article{Chiu2008a, + Author = {Chiu, P. H. and Lohrenz, T. M. and Montague, P. R.}, + Journal = {Nat. Neurosci.}, + Month = {Apr}, + Pages = {514--520}, + Title = {{{S}mokers' brains compute, but ignore, a fictive error signal in a sequential investment task}}, + Volume = {11}, + Year = {2008}} + +@conference{Chiu2006, + Author = {Chiu, Y. C. and Lin, C. H. and Huang, J. T. and Lin, S. and Huang, J. T.}, + Owner = {WooYoung Ahn}, + Timestamp = {2008.03.23}, + Title = {Reexamining the effect of long-term outcome and gain-loss frequency: from uncertainty to certainty. {Paper presented at the Fourth Annual Meeting of the Society for Neuroeconoimics, Park City, Utah, U.S.A. September 07-10, 2006}}, + Year = {2006}} + +@conference{Chiu2005, + Author = {Chiu, Y. C. and Lin, C. H. and Huang, J. T. and Lin, S. and Lee, P. L. and Hsieh, J. C.}, + Owner = {ahnw}, + Timestamp = {2007.05.04}, + Title = {Immediate gain is long-term loss: Are there foresighted decision makers in {I}owa {G}ambling {T}ask? {Paper presented at the Third Annual Meeting of the Society for Neuroeconoimics, Kiawah Island, South Carolina, U.S.A. September 15-18, 2005}}, + Year = {2005}} + +@article{Chiuinpress, + Author = {Chiu, Yao-Chu and Lin, Ching-Hung and Huang, Jong-Tsun and Lin, Shuyeu and Lee, Po-Lei and Hsieh, Jen-Chuen}, + Journal = {Behavioral and Brain Functions}, + Owner = {WooYoung Ahn}, + Timestamp = {2008.03.23}, + Title = {Immediate gain is long-term loss: {A}re there foresighted decision makers in the {I}owa {G}ambling {T}ask?}, + Year = {in press}} + +@article{Cho2002a, + Author = {Cho, M. J. and Lyoo, I. K. and Lee, D. W. and Kwon, J. S. and Lee, J. S. and Lee, D. S. and Jung, J. K. and Lee, M. C.}, + Journal = {J Affect Disord}, + Month = {May}, + Pages = {159--166}, + Title = {{{B}rain single photon emission computed tomography findings in depressive pseudodementia patients}}, + Volume = {69}, + Year = {2002}} + +@article{Cho2002, + Author = {Cho, R. Y. and Nystrom, L. E. and Brown, E. T. and Jones, A. D. and Braver, T. S. and Holmes, P. J. and Cohen, J. D.}, + Journal = {Cogn Affect Behav Neurosci}, + Month = {Dec}, + Pages = {283--299}, + Title = {{{M}echanisms underlying dependencies of performance on stimulus history in a two-alternative forced-choice task}}, + Volume = {2}, + Year = {2002}} + +@article{Cho2008, + Author = {Cho, S. C. and Hwang, J. W. and Lyoo, I. K. and Yoo, H. J. and Kin, B. N. and Kim, J. W.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Apr}, + Pages = {160--166}, + Title = {{{P}atterns of temperament and character in a clinical sample of {K}orean children with attention-deficit hyperactivity disorder}}, + Volume = {62}, + Year = {2008}} + +@article{Cho2009, + Author = {Cho, S. C. and Kim, B. N. and Kim, J. W. and Rohde, L. A. and Hwang, J. W. and Chungh, D. S. and Shin, M. S. and Lyoo, I. K. and Go, B. J. and Lee, S. E. and Kim, H. W.}, + Journal = {Eur Child Adolesc Psychiatry}, + Month = {Jul}, + Pages = {447--457}, + Title = {{{F}ull syndrome and subthreshold attention-deficit/hyperactivity disorder in a {K}orean community sample: comorbidity and temperament findings}}, + Volume = {18}, + Year = {2009}} + +@article{Christensen2005, + Author = {Christensen, R.}, + Journal = {The American Statistician}, + Pages = {121--126}, + Title = {Testing {F}isher, {N}eyman, {P}earson, and {B}ayes}, + Volume = {59}, + Year = {2005}} + +@article{Christodoulou2006, + Author = {Christodoulou, T. and Lewis, M. and Ploubidis, GB and Frangou, S.}, + Journal = {European psychiatry: the journal of the Association of European Psychiatrists}, + Number = {4}, + Pages = {270}, + Title = {{The relationship of impulsivity to response inhibition and decision-making in remitted patients with bipolar disorder.}}, + Volume = {21}, + Year = {2006}} + +@article{Chudnow2000, + Author = {Chudnow, R. S. and Wolfe, G. I. and Sparagana, S. P. and Delgado, M. R. and Batchelor, L. and Roach, E. S.}, + Journal = {J. Child Neurol.}, + Month = {Aug}, + Pages = {529--532}, + Title = {{{A}bnormal sudomotor function in the hypomelanotic macules of tuberous sclerosis complex}}, + Volume = {15}, + Year = {2000}} + +@article{Chung2007, + Author = {Chung, A. and Lyoo, I. K. and Kim, S. J. and Hwang, J. and Bae, S. C. and Sung, Y. H. and Sim, M. E. and Song, I. C. and Kim, J. and Chang, K. H. and Renshaw, P. F.}, + Journal = {Int. J. Neuropsychopharmacol.}, + Month = {Dec}, + Pages = {765--775}, + Title = {{{D}ecreased frontal white-matter integrity in abstinent methamphetamine abusers}}, + Volume = {10}, + Year = {2007}} + +@article{Churchland2008a, + Author = {Churchland, A. K. and Kiani, R. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {Jun}, + Pages = {693--702}, + Title = {{{D}ecision-making with multiple alternatives}}, + Volume = {11}, + Year = {2008}} + +@article{Cifarelli1996, + Author = {Cifarelli, D. M. and Regazzini, E.}, + Journal = {Statistical Science}, + Pages = {253--282}, + Title = {{D}e {F}inetti's Contribution to Probability and Statistics}, + Volume = {11}, + Year = {1996}} + +@article{Clark2008, + Author = {Clark, L. and Bechara, A. and Damasio, H. and Aitken, MRF and Sahakian, BJ and Robbins, TW}, + Journal = {Brain}, + Publisher = {Oxford Univ Press}, + Title = {{Differential effects of insular and ventromedial prefrontal cortex lesions on risky decision-making}}, + Year = {2008}} + +@article{Clark2001, + Abstract = {OBJECTIVE: Mania has received little attention from a contemporary + neuropsychological perspective despite its clear resemblance to the + disinhibition syndrome sometimes seen after frontal brain injury, + particularly injury to the inferior aspect of the prefrontal cortex. + The purpose of this investigation was to describe the neuropsychological + profile of severe acute mania by using a range of tasks selected + primarily for the detection of localized neural disruption within + the prefrontal cortex. METHOD: Fifteen acutely manic inpatients were + compared with 30 nonpsychiatric subjects on tasks from the Cambridge + Automated Neuropsychological Test Battery (Tower of London, spatial + working memory, intradimensional-extradimensional attentional shift, + and rapid visual information processing tasks) and on the Iowa Gambling + Task, Stroop Color and Word Test, a verbal fluency task, and the + California Verbal Learning Test. RESULTS: Discriminant function analysis + identified deficits in sustained attention (on the rapid visual information + processing task) and verbal learning (on the California Verbal Learning + Test) as the best indicators of manic performance, rather than deficits + on any of the tests of executive functioning. The model correctly + classified 91\% of subjects overall and 87\% of manic subjects. Manic + patients did not resemble patients with ventromedial prefrontal cortex + damage in their performance on the Iowa Gambling Task. CONCLUSIONS: + Acute mania is characterized by core deficits in verbal memory and + sustained attention against a background of milder impairments in + functions that are traditional measures of prefrontal cortex integrity + (attentional set shifting, planning, working memory). The data do + not implicate ventral prefrontal cortex disruption as a locus of + pathology in acute mania. Verbal memory and sustained attention deficits + may relate differentially to the state and trait characteristics + of bipolar disorder.}, + Author = {L. Clark and S. D. Iversen and G. M. Goodwin}, + Institution = {Department of Psychiatry, University of Oxford, UK.}, + Journal = {Am J Psychiatry}, + Keywords = {Acute Disease; Adult; Antipsychotic Agents, pharmacology/therapeutic use; Attention, drug effects/physiology; Bipolar Disorder, diagnosis/drug therapy/physiopathology; Female; Hospitalization; Humans; Male; Memory, drug effects/physiology; Neuropsychological Tests, statistics /&/ numerical data; Prefrontal Cortex, drug effects/physiopathology; Verbal Learning, drug effects/physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {10}, + Owner = {Young}, + Pages = {1605--1611}, + Pmid = {11578991}, + Timestamp = {2010.05.01}, + Title = {A neuropsychological investigation of prefrontal cortex involvement in acute mania.}, + Volume = {158}, + Year = {2001}} + +@article{Clark2009, + Author = {Clark, L. and Lawrence, A.J. and Astley-Jones, F. and Gray, N.}, + Journal = {Neuron}, + Number = {3}, + Pages = {481--490}, + Publisher = {Elsevier}, + Title = {{Gambling near-misses enhance motivation to gamble and recruit win-related brain circuitry}}, + Volume = {61}, + Year = {2009}} + +@article{Clark2003, + Author = {Clark, L. and Manes, F. and Antoun, N. and Sahakian, B.J. and Robbins, T.W.}, + Journal = {Neuropsychologia}, + Number = {11}, + Pages = {1474--1483}, + Publisher = {Elsevier}, + Title = {{The contributions of lesion laterality and lesion volume to decision-making impairment following frontal lobe damage}}, + Volume = {41}, + Year = {2003}} + +@article{Clark2002, + Author = {Clark, L. and Robbins, T.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Sep}, + Pages = {361}, + Title = {{{D}ecision-making deficits in drug addiction}}, + Volume = {6}, + Year = {2002}} + +@article{Clarke2001, + Author = {Clarke, B.}, + Journal = {Sankhya A}, + Pages = {229--249}, + Title = {Combining Model Selection Procedures for Online Prediction}, + Volume = {63}, + Year = {2001}} + +@article{Clarke2004, + Author = {Clarke, HF and Dalley, JW and Crofts, HS and Robbins, TW and Roberts, AC}, + Journal = {Science}, + Number = {5672}, + Pages = {878--880}, + Publisher = {American Association for the Advancement of Science}, + Title = {{Cognitive inflexibility after prefrontal serotonin depletion}}, + Volume = {304}, + Year = {2004}} + +@article{Clarke1983, + Author = {Clarke, P. B. and Kumar, R.}, + Journal = {British Journal of Pharmacology}, + Number = {2}, + Pages = {329}, + Publisher = {Nature Publishing Group}, + Title = {{The effects of nicotine on locomotor activity in non-tolerant and tolerant rats.}}, + Volume = {78}, + Year = {1983}} + +@article{Cobb1981, + Author = {Cobb, L.}, + Journal = {Behavioral Science}, + Pages = {75--78}, + Title = {Parameter Estimation for the Cusp Catastrophe Model}, + Volume = {26}, + Year = {1981}} + +@article{Cobb1978, + Author = {Cobb, L.}, + Journal = {Behavioral Science}, + Pages = {360--374}, + Title = {Stochastic Catastrophe Models and Multimodal Distributions}, + Volume = {23}, + Year = {1978}} + +@article{Cobb1983, + Author = {Cobb, L. and Koppstein, P. and Chen, N. H.}, + Journal = {Journal of the American Statistical Association}, + Pages = {124--130}, + Title = {Estimation and Moment Recursion Relations for Multimodal Distributions of the Exponential Family}, + Volume = {78}, + Year = {1983}} + +@article{Cobb1980, + Author = {Cobb, L. and Watson, B.}, + Journal = {Mathematical Modelling}, + Pages = {311--317}, + Title = {Statistical Catastrophe Theory: An Overview}, + Volume = {1}, + Year = {1980}} + +@article{Cobb1985, + Author = {Cobb, L. and Zacks, S.}, + Journal = {Journal of the American Statistical Association}, + Pages = {793--802}, + Title = {Applications of Catastrophe Theory for Statistical Modeling in the Biosciences}, + Volume = {80}, + Year = {1985}} + +@article{Cohen1994, + Author = {Cohen, J.}, + Journal = {American Psychologist}, + Pages = {997--1003}, + Title = {The Earth is Round ($p < .05$)}, + Volume = {49}, + Year = {1994}} + +@article{Cohen2002, + Author = {Cohen, J. D. and Braver, T. S. and Brown, J. W.}, + Journal = {Curr. Opin. Neurobiol.}, + Pages = {223--229}, + Title = {{{C}omputational perspectives on dopamine function in prefrontal cortex}}, + Volume = {12}, + Year = {2002}} + +@article{Cohen1996, + Author = {Cohen, J. D. and Braver, T. S. and O'Reilly, R. C.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Oct}, + Pages = {1515--1527}, + Title = {{{A} computational approach to prefrontal cortex, cognitive control and schizophrenia: recent developments and current challenges}}, + Volume = {351}, + Year = {1996}} + +@article{Cohen1997a, + Author = {Cohen, J. D. and Dunbar, K. O. and Barch, D. M. and Braver, T. S.}, + Journal = {J Exp Psychol Gen}, + Month = {Mar}, + Pages = {37--41}, + Title = {{{I}ssues concerning relative speed of processing hypotheses, schizophrenic performance deficits, and prefrontal function: comment on {S}chooler et al. (1997)}}, + Volume = {126}, + Year = {1997}} + +@article{Cohen2007, + Author = {Cohen, Jonathan D. and McClure, Samuel E. and Yu, Angela J.}, + Journal = {Philosophical transactions of the Royal Society B}, + Owner = {WooYoung Ahn}, + Pages = {933-942}, + Timestamp = {2007.12.12}, + Title = {Should I stay or should I go? How the human brain manages the trade-off between exploitation and exploration}, + Volume = {362}, + Year = {2007}} + +@article{Cohen1997, + Author = {Cohen, J. D. and Perlstein, W. M. and Braver, T. S. and Nystrom, L. E. and Noll, D. C. and Jonides, J. and Smith, E. E.}, + Journal = {Nature}, + Month = {Apr}, + Pages = {604--608}, + Title = {{{T}emporal dynamics of brain activation during a working memory task}}, + Volume = {386}, + Year = {1997}} + +@article{Cohen2008, + Author = {Cohen, M. X.}, + Journal = {Cogn Affect Behav Neurosci}, + Pages = {113--125}, + Title = {{Neurocomputational mechanisms of reinforcement-guided learning in humans: A review}}, + Volume = {8}, + Year = {2008}} + +@article{Colby2003, + Author = {Colby, C. R. and Whisler, K. and Steffen, C. and Nestler, E. J. and Self, D. W.}, + Journal = {J. Neurosci.}, + Month = {Mar}, + Pages = {2488--2493}, + Title = {{{S}triatal cell type-specific overexpression of {D}elta{F}os{B} enhances incentive for cocaine}}, + Volume = {23}, + Year = {2003}} + +@article{Coltheart2001, + Author = {Coltheart, M. and Rastle, K. and Perry, C. and Langdon, R. and Ziegler, J.}, + Journal = {Psychological Review}, + Pages = {204-?56}, + Title = {{DRC}: {A} Dual Route Cascaded Model of Visual Word Recognition and Reading Aloud}, + Volume = {108}, + Year = {2001}} + +@article{Colyvan2004, + Author = {Colyvan, M.}, + Journal = {International Journal of Approximate Reasoning}, + Pages = {71-?5}, + Title = {The Philosophical Significance of {C}ox's Theorem}, + Volume = {37}, + Year = {2004}} + +@article{Comings2000, + Abstract = {The dopaminergic and opioidergic reward pathways of the brain are + critical for survival since they provide the pleasure drives for + eating, love and reproduction; these are called 'natural rewards' + and involve the release of dopamine in the nucleus accumbens and + frontal lobes. However, the same release of dopamine and production + of sensations of pleasure can be produced by 'unnatural rewards' + such as alcohol, cocaine, methamphetamine, heroin, nicotine, marijuana, + and other drugs, and by compulsive activities such as gambling, eating, + and sex, and by risk taking behaviors. Since only a minority of individuals + become addicted to these compounds or behaviors, it is reasonable + to ask what factors distinguish those who do become addicted from + those who do not. It has usually been assumed that these behaviors + are entirely voluntary and that environmental factors play the major + role; however, since all of these behaviors have a significant genetic + component, the presence of one or more variant genes presumably act + as risk factors for these behaviors. Since the primary neurotransmitter + of the reward pathway is dopamine, genes for dopamine synthesis, + degradation, receptors, and transporters are reasonable candidates. + However, serotonin, norepinephrine, GABA, opioid, and cannabinoid + neurons all modify dopamine metabolism and dopamine neurons. We have + proposed that defects in various combinations of the genes for these + neurotransmitters result in a Reward Deficiency Syndrome (RDS) and + that such individuals are at risk for abuse of the unnatural rewards. + Because of its importance, the gene for the [figure: see text] dopamine + D2 receptor was a major candidate gene. Studies in the past decade + have shown that in various subject groups the Taq I A1 allele of + the DRD2 gene is associated with alcoholism, drug abuse, smoking, + obesity, compulsive gambling, and several personality traits. A range + of other dopamine, opioid, cannabinoid, norepinephrine, and related + genes have since been added to the list. Like other behavioral disorders, + these are polygenically inherited and each gene accounts for only + a small per cent of the variance. Techniques such as the Multivariate + Analysis of Associations, which simultaneously examine the contribution + of multiple genes, hold promise for understanding the genetic make + up of polygenic disorders.}, + Author = {D. E. Comings and K. Blum}, + Institution = {Department of Medical Genetics, City of Hope Medical Center, Duarte, CA 91010, USA. dcomings@earthlink.net}, + Journal = {Prog Brain Res}, + Keywords = {Attention Deficit Disorder with Hyperactivity, genetics; Behavior, Addictive, genetics; Carrier Proteins, genetics/physiology; Central Nervous System Stimulants, pharmacology; Compulsive Behavior, genetics; Dangerous Behavior; Dopamine beta-Hydroxylase, genetics; Exploratory Behavior; Genetic Heterogeneity; Genetic Predisposition to Disease; Humans; Impulsive Behavior, genetics; Models, Neurological; Monoamine Oxidase, genetics; Neurotransmitter Agents, metabolism/physiology; Nucleus Accumbens, drug effects/physiology; Opioid Peptides, genetics/physiology; Polymorphism, Restriction Fragment Length; Prefrontal Cortex, drug effects/physiology; Receptors, Adrenergic, drug effects/genetics; Receptors, Cannabinoid; Receptors, Dopamine D2, drug effects/genetics/physiology; Receptors, Dopamine D4; Receptors, Drug, drug effects/genetics; Receptors, Neurotransmitter, drug effects/genetics/physiology; Reward; Satiation, physiology; Self Stimulation, physiology; Stress Disorders, Post-Traumatic, genetics/physiopathology; Substance-Related Disorders, genetics; Tourette Syndrome, genetics/physiopathology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Owner = {Woo-Young Ahn}, + Pages = {325--341}, + Pii = {S0079-6123(00)26022-6}, + Pmid = {11105655}, + Timestamp = {2009.08.09}, + Title = {Reward deficiency syndrome: genetic aspects of behavioral disorders.}, + Volume = {126}, + Year = {2000}} + +@article{Compton2005, + Author = {Compton, W. M. and Stein, J. B. and Robertson, E. B. and Pintello, D. and Pringle, B. and Volkow, N. D.}, + Journal = {J Subst Abuse Treat}, + Month = {Oct}, + Pages = {167--172}, + Title = {{{C}harting a course for health services research at the {N}ational {I}nstitute on {D}rug {A}buse}}, + Volume = {29}, + Year = {2005}} + +@article{Compton2006, + Author = {Compton, W. M. and Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Jun}, + Pages = {4--7}, + Title = {{{A}buse of prescription drugs and the risk of addiction}}, + Volume = {83 Suppl 1}, + Year = {2006}} + +@article{Compton2006a, + Author = {Compton, W. M. and Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Feb}, + Pages = {103--107}, + Title = {{{M}ajor increases in opioid analgesic abuse in the {U}nited {S}tates: concerns and strategies}}, + Volume = {81}, + Year = {2006}} + +@article{Constans1995a, + Author = {Constans, J. M. and Meyerhoff, D. J. and Gerson, J. and MacKay, S. and Norman, D. and Fein, G. and Weiner, M. W.}, + Journal = {Radiology}, + Month = {Nov}, + Pages = {517--523}, + Title = {{{H}-1 {M}{R} spectroscopic imaging of white matter signal hyperintensities: {A}lzheimer disease and ischemic vascular dementia}}, + Volume = {197}, + Year = {1995}} + +@article{Constans1995, + Author = {Constans, J. M. and Meyerhoff, D. J. and Norman, D. and Fein, G. and Weiner, M. W.}, + Journal = {Neuroradiology}, + Month = {Nov}, + Pages = {615--623}, + Title = {{1{H} and 31{P} magnetic resonance spectroscopic imaging of white matter signal hyperintensity areas in elderly subjects}}, + Volume = {37}, + Year = {1995}} + +@article{Contreras2007, + Abstract = {Addiction profoundly alters motivational circuits so that drugs become + powerful reinforcers of behavior. The interoceptive system continuously + updates homeostatic and emotional information that are important + elements in motivational decisions. We tested the idea that interoceptive + information is essential in drug craving and in the behavioral signs + of malaise. We inactivated the primary interoceptive cortex in amphetamine-experienced + rats, which prevented the urge to seek amphetamine in a place preference + task. Interoceptive insula inactivation also blunted the signs of + malaise induced by acute lithium administration. Drug-seeking and + malaise both induced Fos expression, a marker of neuronal activation, + in the insula. We conclude that the insular cortex is a key structure + in the perception of bodily needs that provides direction to motivated + behaviors.}, + Author = {Marco Contreras and Francisco Ceric and Fernando Torrealba}, + Doi = {10.1126/science.1145590}, + Journal = {Science}, + Keywords = {Amphetamine-Related Disorders, physiopathology; Animals; Behavior, Addictive; Behavior, Animal, drug effects; Cerebral Cortex, physiology/physiopathology; Conditioning (Psychology); Dextroamphetamine, administration /&/ dosage; Fatigue, chemically induced; Lidocaine, administration /&/ dosage/pharmacology; Lithium Chloride, administration /&/ dosage/pharmacology; Male; Motor Activity, drug effects; Rats}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {5850}, + Owner = {Woo-Young Ahn}, + Pages = {655--658}, + Pii = {318/5850/655}, + Pmid = {17962567}, + Timestamp = {2009.08.05}, + Title = {Inactivation of the interoceptive insula disrupts drug craving and malaise induced by lithium.}, + Url = {http://dx.doi.org/10.1126/science.1145590}, + Volume = {318}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1126/science.1145590}} + +@article{Cools2009, + Author = {Cools, R. and Frank, M. J. and Gibbs, S. E. and Miyakawa, A. and Jagust, W. and D'Esposito, M.}, + Journal = {J. Neurosci.}, + Month = {Feb}, + Pages = {1538--1543}, + Title = {{{S}triatal dopamine predicts outcome-specific reversal learning and its sensitivity to dopaminergic drug administration}}, + Volume = {29}, + Year = {2009}} + +@article{Cooper2008, + Author = {Cooper, J. C. and Knutson, B.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {538--547}, + Title = {{{V}alence and salience contribute to nucleus accumbens activation}}, + Volume = {39}, + Year = {2008}} + +@article{Coricelli2005, + Author = {Coricelli, G. and Critchley, H. D. and Joffily, M. and O'Doherty, J. P. and Sirigu, A. and Dolan, R. J.}, + Journal = {Nat. Neurosci.}, + Month = {Sep}, + Pages = {1255--1262}, + Title = {{{R}egret and its avoidance: a neuroimaging study of choice behavior}}, + Volume = {8}, + Year = {2005}} + +@article{Cornfield1969, + Author = {Cornfield, J.}, + Journal = {Biometrics}, + Pages = {617--657}, + Title = {The {B}ayesian Outlook and Its Application}, + Volume = {25}, + Year = {1969}} + +@article{Cornfield1966, + Author = {Cornfield, J.}, + Journal = {The American Statistician}, + Pages = {18--23}, + Title = {Sequential Trials, Sequential Analysis, and the Likelihood Principle}, + Volume = {20}, + Year = {1966}} + +@article{Cornish2002, + Author = {Cornish, J. W. and Herman, B. H. and Ehrman, R. N. and Robbins, S. J. and Childress, A. R. and Bead, V. and Esmonde, C. A. and Martz, K. and Poole, S. and Caruso, F. S. and O'Brien, C. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Jul}, + Pages = {177--183}, + Title = {{{A} randomized, double-blind, placebo-controlled safety study of high-dose dextromethorphan in methadone-maintained male inpatients}}, + Volume = {67}, + Year = {2002}} + +@article{Cortes1989, + Author = {Cortes, R. and Gueye, B. and Pazos, A. and Probst, A. and Palacios, JM}, + Journal = {Neuroscience}, + Number = {2}, + Pages = {263}, + Title = {{Dopamine receptors in human brain: autoradiographic distribution of D1 sites.}}, + Volume = {28}, + Year = {1989}} + +@article{Cortina1997, + Author = {Cortina, J. M. and Dunlap, W. P.}, + Journal = {Psychological Methods}, + Pages = {161--172}, + Title = {On the Logic and Purpose of Significance Testing}, + Volume = {2}, + Year = {1997}} + +@article{Cosenza2000, + Author = {Cosenza, M. and Gifford, A. N. and Gatley, S. J. and Pyatt, B. and Liu, Q. and Makriyannis, A. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {Dec}, + Pages = {477--482}, + Title = {{{L}ocomotor activity and occupancy of brain cannabinoid {C}{B}1 receptors by the antagonist/inverse agonist {A}{M}281}}, + Volume = {38}, + Year = {2000}} + +@article{Cousins2009, + __Markedentry = {[Young]}, + Abstract = {OBJECTIVE: Despite effective pharmacological treatments for bipolar + disorder, we still lack a comprehensive pathophysiological model + of the illness. Recent neurobiological research has implicated a + number of key brain regions and neuronal components in the behavioural + and cognitive manifestations of bipolar disorder. Dopamine has previously + been investigated in some depth in bipolar disorder, but of late + has not been a primary focus of attention. This article examines + the role of dopamine in bipolar disorder, incorporating recent advances + into established models where possible. METHODS: A critical evaluation + of the literature was undertaken, including a review of behavioural, + neurochemical, receptor, and imaging studies, as well as genetic + studies focusing on dopamine receptors and related metabolic pathways. + In addition, pharmacologic manipulation of the central dopaminergic + pathways and comparisons with other disease states such as schizophrenia + were considered, principally as a means of exploring the hypothesised + models. RESULTS: Multiple lines of evidence, including data from + pharmacological interventions and structural and functional magnetic + resonance imaging studies, suggest that the dopaminergic system may + play a central role in bipolar disorder. CONCLUSION: Future research + into the pathophysiological mechanisms of bipolar disorder and the + development of new treatments for bipolar disorder should focus on + the dopaminergic system.}, + Author = {David A Cousins and Kelly Butts and Allan H Young}, + Doi = {10.1111/j.1399-5618.2009.00760.x}, + Institution = {Newcastle Magnetic Resonance Centre, Campus for Ageing and Vitality, Newcastle University, Newcastle upon Tyne, UK. d.a.cousins@ncl.ac.uk}, + Journal = {Bipolar Disord}, + Keywords = {Animals; Antimanic Agents, therapeutic use; Bipolar Disorder, metabolism/pathology; Brain, metabolism; Disease Models, Animal; Dopamine, metabolism; Humans; Magnetic Resonance Imaging, methods}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {8}, + Owner = {Young}, + Pages = {787--806}, + Pii = {BDI760}, + Pmid = {19922550}, + Timestamp = {2010.05.01}, + Title = {The role of dopamine in bipolar disorder.}, + Url = {http://dx.doi.org/10.1111/j.1399-5618.2009.00760.x}, + Volume = {11}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1399-5618.2009.00760.x}} + +@article{Cowan2004, + Abstract = {We assessed a hypothesis that working memory capacity should include + a constant number of separate mental units, or chunks (cf. Miller, + 1956). Because of the practical difficulty of measuring chunks, this + hypothesis has not been tested previously, despite wide attention + to Miller's article. We used a training procedure to manipulate the + strength of associations between pairs of words to be included in + an immediate serial-recall task. Although the amount of training + on associations clearly increased the availability of two-item chunks + and therefore the number of items correct in list recall, the number + of total chunks recalled (singletons plus two-word chunks) appeared + to remain approximately constant across association strengths, supporting + a hypothesis of constant capacity.}, + Author = {Nelson Cowan and Zhijian Chen and Jeffrey N Rouder}, + Doi = {10.1111/j.0956-7976.2004.00732.x}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia 65211, USA. cowann@missouri.edu}, + Journal = {Psychol Sci}, + Keywords = {Adult; Female; Humans; Logic; Male; Memory; Mental Recall}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {9}, + Owner = {Woo-Young Ahn}, + Pages = {634--640}, + Pii = {PSCI732}, + Pmid = {15327636}, + Timestamp = {2009.08.15}, + Title = {Constant capacity in an immediate serial-recall task: a logical sequel to Miller (1956).}, + Url = {http://dx.doi.org/10.1111/j.0956-7976.2004.00732.x}, + Volume = {15}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.0956-7976.2004.00732.x}} + +@article{Cowan2009, + Abstract = {Bays and Husain (Reports, 8 August 2008, p. 851) reported that human + working memory, the limited information currently in mind, reflects + resources distributed across all items in an array. In an alternative + interpretation, memory is limited to several well-represented items. + We argue that this item-limit model fits the extant data better than + the distributed-resources model and is more interpretable theoretically.}, + Author = {Nelson Cowan and Jeffrey N Rouder}, + Doi = {10.1126/science.1166478}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia, MO 65211, USA. CowanN@missouri.edu}, + Journal = {Science}, + Keywords = {Humans; Memory, Short-Term, physiology; Models, Neurological; Visual Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {5916}, + Owner = {Woo-Young Ahn}, + Pages = {877; author reply 877}, + Pii = {323/5916/877c}, + Pmid = {19213899}, + Timestamp = {2009.08.15}, + Title = {Comment on "Dynamic shifts of limited working memory resources in human vision".}, + Url = {http://dx.doi.org/10.1126/science.1166478}, + Volume = {323}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1126/science.1166478}} + +@article{Cowan2003, + Author = {Cowan, R. L. and Lyoo, I. K. and Sung, S. M. and Ahn, K. H. and Kim, M. J. and Hwang, J. and Haga, E. and Vimal, R. L. and Lukas, S. E. and Renshaw, P. F.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {225--235}, + Title = {{{R}educed cortical gray matter density in human {M}{D}{M}{A} ({E}cstasy) users: a voxel-based morphometry study}}, + Volume = {72}, + Year = {2003}} + +@article{Cox1971, + Author = {Cox, D. R.}, + Journal = {Journal of the Royal Statistical Society Series B}, + Pages = {251--255}, + Title = {The Choice Between Alternative Ancillary Statistics}, + Volume = {33}, + Year = {1971}} + +@article{Cox1958, + Author = {Cox, D. R.}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {357--372}, + Title = {Some Problems Connected with Statistical Inference}, + Volume = {29}, + Year = {1958}} + +@book{Cox1970, + Address = {London}, + Author = {Cox, D. R. and Miller, H. D.}, + Publisher = {Methuen}, + Title = {The Theory of Stochastic Processes}, + Year = {1970}} + +@article{Cox1946, + Author = {Cox, R. T.}, + Journal = {The American Journal of Physics}, + Pages = {1--13}, + Title = {Probability, Frequency and Reasonable Expectation}, + Volume = {14}, + Year = {1946}} + +@article{Coyle2003, + Author = {Coyle, T. R.}, + Journal = {Intelligence}, + Pages = {567--587}, + Title = {A Review of the Worst Performance Rule: Evidence, Theory, and Alternative Hypotheses}, + Volume = {31}, + Year = {2003}} + +@article{Craig2009, + Author = {Craig, A. D.}, + Journal = {Nature Reviews Neuroscience}, + Number = {6}, + Pages = {466}, + Publisher = {Nature Publishing Group}, + Title = {{A rat is not a monkey is not a human: comment on Mogil (Nature Rev. Neurosci. 10, 283--294 (2009))}}, + Volume = {10}, + Year = {2009}} + +@article{Craig2009a, + Author = {Craig, A. D.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Jan}, + Pages = {59--70}, + Title = {{{H}ow do you feel--now? {T}he anterior insula and human awareness}}, + Volume = {10}, + Year = {2009}} + +@article{Craig2009b, + Author = {Craig, A. D.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Jul}, + Pages = {1933--1942}, + Title = {{{E}motional moments across time: a possible neural basis for time perception in the anterior insula}}, + Volume = {364}, + Year = {2009}} + +@article{Craig2009c, + Author = {Craig, A. D.}, + Journal = {Brain}, + Month = {Mar}, + Title = {{{D}isembodied hallucinatory voices: {C}omment on {S}ommer et al., 2008 {B}rain 131, 3169-77}}, + Year = {2009}} + +@article{Craig2009d, + Author = {Craig, A. D.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Jun}, + Pages = {466}, + Title = {{{A} rat is not a monkey is not a human: comment on {M}ogil ({N}ature {R}ev. {N}eurosci. 10, 283-294 (2009))}}, + Volume = {10}, + Year = {2009}} + +@article{Craig2008, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {315--328}, + Title = {{{R}etrograde analyses of spinothalamic projections in the macaque monkey: input to the ventral lateral nucleus}}, + Volume = {508}, + Year = {2008}} + +@article{Craig2008a, + Author = {Craig, A. D.}, + Journal = {Pain}, + Month = {Apr}, + Pages = {215--216}, + Title = {{{C}an the basis for central neuropathic pain be identified by using a thermal grill?}}, + Volume = {135}, + Year = {2008}} + +@article{Craig2006a, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Dec}, + Pages = {965--978}, + Title = {{{R}etrograde analyses of spinothalamic projections in the macaque monkey: input to ventral posterior nuclei}}, + Volume = {499}, + Year = {2006}} + +@article{Craig2005, + Author = {Craig, A. D.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Dec}, + Pages = {566--571}, + Title = {{{F}orebrain emotional asymmetry: a neuroanatomical basis?}}, + Volume = {9}, + Year = {2005}} + +@article{Craig2004, + Author = {Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Oct}, + Pages = {2604--2609}, + Title = {{{L}amina {I}, but not lamina {V}, spinothalamic neurons exhibit responses that correspond with burning pain}}, + Volume = {92}, + Year = {2004}} + +@article{Craig2004a, + Author = {Craig, A. D.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Jun}, + Pages = {239--241}, + Title = {{{H}uman feelings: why are some more aware than others?}}, + Volume = {8}, + Year = {2004}} + +@article{Craig2004b, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Sep}, + Pages = {119--148}, + Title = {{{D}istribution of trigeminothalamic and spinothalamic lamina {I} terminations in the macaque monkey}}, + Volume = {477}, + Year = {2004}} + +@article{Craig2003, + Author = {Craig, A. D.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {1--30}, + Title = {{{P}ain mechanisms: labeled lines versus convergence in central processing}}, + Volume = {26}, + Year = {2003}} + +@article{Craig2003a, + Author = {Craig, A. D.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Aug}, + Pages = {500--505}, + Title = {{{I}nteroception: the sense of the physiological condition of the body}}, + Volume = {13}, + Year = {2003}} + +@article{Craig2003b, + Author = {Craig, A. D.}, + Journal = {Somatosens Mot Res}, + Pages = {209--222}, + Title = {{{D}istribution of trigeminothalamic and spinothalamic lamina {I} terminations in the cat}}, + Volume = {20}, + Year = {2003}} + +@article{Craig2003c, + Author = {Craig, A. D.}, + Journal = {Trends Neurosci.}, + Month = {Jun}, + Pages = {303--307}, + Title = {{{A} new view of pain as a homeostatic emotion}}, + Volume = {26}, + Year = {2003}} + +@article{Craig2002c, + Author = {Craig, A. D.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Aug}, + Pages = {655--666}, + Title = {{{H}ow do you feel? {I}nteroception: the sense of the physiological condition of the body}}, + Volume = {3}, + Year = {2002}} + +@article{Craig2000a, + Author = {Craig, A. D.}, + Journal = {Prog. Brain Res.}, + Pages = {137--151}, + Title = {{{T}he functional anatomy of lamina {I} and its role in post-stroke central pain}}, + Volume = {129}, + Year = {2000}} + +@article{Craig1996a, + Author = {Craig, A. D.}, + Journal = {Prog. Brain Res.}, + Pages = {225--242}, + Title = {{{A}n ascending general homeostatic afferent pathway originating in lamina {I}}}, + Volume = {107}, + Year = {1996}} + +@article{Craig1995, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Oct}, + Pages = {225--248}, + Title = {{{D}istribution of brainstem projections from spinal lamina {I} neurons in the cat and the monkey}}, + Volume = {361}, + Year = {1995}} + +@article{Craig1993, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {517--530}, + Title = {{{P}ropriospinal input to thoracolumbar sympathetic nuclei from cervical and lumbar lamina {I} neurons in the cat and the monkey}}, + Volume = {331}, + Year = {1993}} + +@article{Craig1992a, + Author = {Craig, A. D.}, + Journal = {Brain Res.}, + Month = {Jul}, + Pages = {325--328}, + Title = {{{S}pinal and trigeminal lamina {I} input to the locus coeruleus anterogradely labeled with {P}haseolus vulgaris leucoagglutinin ({P}{H}{A}-{L}) in the cat and the monkey}}, + Volume = {584}, + Year = {1992}} + +@article{Craig1991b, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Nov}, + Pages = {377--393}, + Title = {{{S}pinal distribution of ascending lamina {I} axons anterogradely labeled with {P}haseolus vulgaris leucoagglutinin ({P}{H}{A}-{L}) in the cat}}, + Volume = {313}, + Year = {1991}} + +@article{Craig1978a, + Author = {Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Oct}, + Pages = {729--743}, + Title = {{{S}pinal and medullary input to the lateral cervical nucleus}}, + Volume = {181}, + Year = {1978}} + +@article{Craig1976, + Author = {Craig, A. D.}, + Journal = {Neurosci. Lett.}, + Month = {Nov}, + Pages = {173--177}, + Title = {{{S}pinocervical tract cells in cat and dog, labeled by the retrograde transport of horseradish peroxidase}}, + Volume = {3}, + Year = {1976}} + +@article{Craig2002b, + Author = {Craig, A. D. and Andrew, D.}, + Journal = {J. Neurophysiol.}, + Month = {Apr}, + Pages = {1902--1914}, + Title = {{{R}esponses of spinothalamic lamina {I} neurons to repeated brief contact heat stimulation in the cat}}, + Volume = {87}, + Year = {2002}} + +@article{Craig2002a, + Author = {Craig, A. D. and Blomqvist, A.}, + Journal = {J Pain}, + Month = {Apr}, + Pages = {95--101}, + Title = {{{I}s there a specific lamina {I} spinothalamocortical pathway for pain and temperature sensations in primates?}}, + Volume = {3}, + Year = {2002}} + +@article{Craig1992, + Author = {Craig, A. D. and Broman, J. and Blomqvist, A.}, + Journal = {J. Comp. Neurol.}, + Month = {Aug}, + Pages = {99--110}, + Title = {{{L}amina {I} spinocervical tract terminations in the medial part of the lateral cervical nucleus in the cat}}, + Volume = {322}, + Year = {1992}} + +@article{Craig1985b, + Author = {Craig, A. D. and Burton, H.}, + Journal = {Exp Brain Res}, + Pages = {227--254}, + Title = {{{T}he distribution and topographical organization in the thalamus of anterogradely-transported horseradish peroxidase after spinal injections in cat and raccoon}}, + Volume = {58}, + Year = {1985}} + +@article{Craig1981, + Author = {Craig, A. D. and Burton, H.}, + Journal = {J. Neurophysiol.}, + Month = {Mar}, + Pages = {443--466}, + Title = {{{S}pinal and medullary lamina {I} projection to nucleus submedius in medial thalamus: a possible pain center}}, + Volume = {45}, + Year = {1981}} + +@article{Craig1979, + Author = {Craig, A. D. and Burton, H.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {329--346}, + Title = {{{T}he lateral cervical nucleus in the cat: anatomic organization of cervicothalamic neurons}}, + Volume = {185}, + Year = {1979}} + +@article{Craig1994b, + Author = {Craig, A. D. and Bushnell, M. C.}, + Journal = {Science}, + Month = {Jul}, + Pages = {252--255}, + Title = {{{T}he thermal grill illusion: unmasking the burn of cold pain}}, + Volume = {265}, + Year = {1994}} + +@article{Craig1994a, + Author = {Craig, A. D. and Bushnell, M. C. and Zhang, E. T. and Blomqvist, A.}, + Journal = {Nature}, + Pages = {770--773}, + Title = {{{A} thalamic nucleus specific for pain and temperature sensation}}, + Volume = {372}, + Year = {1994}} + +@article{Craig2000, + Author = {Craig, A. D. and Chen, K. and Bandy, D. and Reiman, E. M.}, + Journal = {Nat. Neurosci.}, + Month = {Feb}, + Pages = {184--190}, + Title = {{{T}hermosensory activation of insular cortex}}, + Volume = {3}, + Year = {2000}} + +@article{Craig2001a, + Author = {Craig, A. D. and Dostrovsky, J. O.}, + Journal = {J. Neurophysiol.}, + Month = {Aug}, + Pages = {856--870}, + Title = {{{D}ifferential projections of thermoreceptive and nociceptive lamina {I} trigeminothalamic and spinothalamic neurons in the cat}}, + Volume = {86}, + Year = {2001}} + +@article{Craig1991a, + Author = {Craig, A. D. and Dostrovsky, J. O.}, + Journal = {Exp Brain Res}, + Pages = {470--474}, + Title = {{{T}hermoreceptive lamina {I} trigeminothalamic neurons project to the nucleus submedius in the cat}}, + Volume = {85}, + Year = {1991}} + +@article{Craig1988, + Author = {Craig, A. D. and Heppelmann, B. and Schaible, H. G.}, + Journal = {J. Comp. Neurol.}, + Month = {Oct}, + Pages = {279--288}, + Title = {{{T}he projection of the medial and posterior articular nerves of the cat's knee to the spinal cord}}, + Volume = {276}, + Year = {1988}} + +@article{Craig1991, + Author = {Craig, A. D. and Hunsley, S. J.}, + Journal = {Brain Res.}, + Month = {Aug}, + Pages = {93--97}, + Title = {{{M}orphine enhances the activity of thermoreceptive cold-specific lamina {I} spinothalamic neurons in the cat}}, + Volume = {558}, + Year = {1991}} + +@article{Craig1985a, + Author = {Craig, A. D. and Kniffki, K. D.}, + Journal = {J. Physiol. (Lond.)}, + Month = {Aug}, + Pages = {197--221}, + Title = {{{S}pinothalamic lumbosacral lamina {I} cells responsive to skin and muscle stimulation in the cat}}, + Volume = {365}, + Year = {1985}} + +@article{Craig2001, + Author = {Craig, A. D. and Krout, K. and Andrew, D.}, + Journal = {J. Neurophysiol.}, + Month = {Sep}, + Pages = {1459--1480}, + Title = {{{Q}uantitative response characteristics of thermoreceptive and nociceptive lamina {I} spinothalamic neurons in the cat}}, + Volume = {86}, + Year = {2001}} + +@article{Craig1989, + Author = {Craig, A. D. and Linington, A. J. and Kniffki, K. D.}, + Journal = {Exp Brain Res}, + Pages = {431--436}, + Title = {{{S}ignificant differences in the retrograde labeling of spinothalamic tract cells by horseradish peroxidase and the fluorescent tracers fast blue and diamidino yellow}}, + Volume = {74}, + Year = {1989}} + +@article{Craig1989a, + Author = {Craig, A. D. and Linington, A. J. and Kniffki, K. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Nov}, + Pages = {568--585}, + Title = {{{C}ells of origin of spinothalamic tract projections to the medial and lateral thalamus in the cat}}, + Volume = {289}, + Year = {1989}} + +@article{Craig1983, + Author = {Craig, A. D. and Mense, S.}, + Journal = {Neurosci. Lett.}, + Month = {Nov}, + Pages = {233--238}, + Title = {{{T}he distribution of afferent fibers from the gastrocnemius-soleus muscle in the dorsal horn of the cat, as revealed by the transport of horseradish peroxidase}}, + Volume = {41}, + Year = {1983}} + +@article{Craig1996, + Author = {Craig, A. D. and Reiman, E. M. and Evans, A. and Bushnell, M. C.}, + Journal = {Nature}, + Month = {Nov}, + Pages = {258--260}, + Title = {{{F}unctional imaging of an illusion of pain}}, + Volume = {384}, + Year = {1996}} + +@article{Craig1987, + Author = {Craig, A. D. and Sailer, S. and Kniffki, K. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Sep}, + Pages = {214--222}, + Title = {{{O}rganization of anterogradely labeled spinocervical tract terminations in the lateral cervical nucleus of the cat}}, + Volume = {263}, + Year = {1987}} + +@article{Craig1994, + Author = {Craig, A. D. and Serrano, L. P.}, + Journal = {Brain Res.}, + Month = {Feb}, + Pages = {233--244}, + Title = {{{E}ffects of systemic morphine on lamina {I} spinothalamic tract neurons in the cat}}, + Volume = {636}, + Year = {1994}} + +@article{Craig1985, + Author = {Craig, A. D. and Tapper, D. N.}, + Journal = {J. Neurophysiol.}, + Month = {Apr}, + Pages = {995--1015}, + Title = {{{A} dorsal spinal neural network in cat. {I}{I}{I}. {D}ynamic nonlinear analysis of responses to random stimulation of single type 1 cutaneous input fibers}}, + Volume = {53}, + Year = {1985}} + +@article{Craig1978, + Author = {Craig, A. D. and Tapper, D. N.}, + Journal = {J. Neurophysiol.}, + Month = {Nov}, + Pages = {1511--1534}, + Title = {{{L}ateral cervical nucleus in the cat: functional organization and characteristics}}, + Volume = {41}, + Year = {1978}} + +@article{Craig1982, + Author = {Craig, A. D. and Wiegand, S. J. and Price, J. L.}, + Journal = {J. Comp. Neurol.}, + Month = {Mar}, + Pages = {28--48}, + Title = {{{T}he thalamo-cortical projection of the nucleus submedius in the cat}}, + Volume = {206}, + Year = {1982}} + +@article{Craig2006, + Author = {Craig, A. D. and Zhang, E. T.}, + Journal = {J. Comp. Neurol.}, + Month = {Dec}, + Pages = {953--964}, + Title = {{{R}etrograde analyses of spinothalamic projections in the macaque monkey: input to posterolateral thalamus}}, + Volume = {499}, + Year = {2006}} + +@article{Craig2002, + Author = {Craig, A. D. and Zhang, E. T. and Blomqvist, A.}, + Journal = {Pain}, + Month = {May}, + Pages = {105--115}, + Title = {{{A}ssociation of spinothalamic lamina {I} neurons and their ascending axons with calbindin-immunoreactivity in monkey and human}}, + Volume = {97}, + Year = {2002}} + +@article{Craig1999, + Author = {Craig, A. D. and Zhang, E. T. and Blomqvist, A.}, + Journal = {J. Comp. Neurol.}, + Month = {Feb}, + Pages = {221--234}, + Title = {{{A} distinct thermoreceptive subregion of lamina {I} in nucleus caudalis of the owl monkey}}, + Volume = {404}, + Year = {1999}} + +@article{Crane1989, + Author = {Crane, A. M. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {Oct}, + Pages = {87--92}, + Title = {{{A}daptation of the quantitative 2-[14{C}]deoxyglucose method for use in freely moving rats}}, + Volume = {499}, + Year = {1989}} + +@article{Crato2002, + Author = {Crato, N. and Ray, B. K.}, + Journal = {Journal of Statistical Planning and Inference}, + Pages = {283--297}, + Title = {Semi--parametric Smoothing Estimators for Long--memory Processes with Added Noise}, + Volume = {105}, + Year = {2002}} + +@article{Crato1996, + Author = {Crato, N. and Ray, B. K.}, + Journal = {Journal of Forecasting}, + Pages = {107--125}, + Title = {Model Selection and Forecasting for Long--range Dependent Processes}, + Volume = {15}, + Year = {1996}} + +@article{Crauel1998, + Author = {Crauel, H. and Flandoli, F.}, + Journal = {Journal of Dynamics and Differential Equations}, + Pages = {259--274}, + Title = {Additive Noise Destroys a Pitchfork Bifurcation}, + Volume = {10}, + Year = {1998}} + +@article{Critchley2001, + Abstract = {We used functional magnetic resonance neuroimaging to measure brain + activity during delay between reward-related decisions and their + outcomes, and the modulation of this delay activity by uncertainty + and arousal. Feedback, indicating financial gain or loss, was given + following a fixed delay. Anticipatory arousal was indexed by galvanic + skin conductance. Delay-period activity was associated with bilateral + activation in orbital and medial prefrontal, temporal, and right + parietal cortices. During delay, activity in anterior cingulate and + orbitofrontal cortices was modulated by outcome uncertainty, whereas + anterior cingulate, dorsolateral prefrontal, and parietal cortices + activity was modulated by degree of anticipatory arousal. A distinct + region of anterior cingulate was commonly activated by both uncertainty + and arousal. Our findings highlight distinct contributions of cognitive + uncertainty and autonomic arousal to anticipatory neural activity + in prefrontal cortex.}, + Author = {H. D. Critchley and C. J. Mathias and R. J. Dolan}, + Institution = {Wellcome Department of Cognitive Neurology, 12 Queen Square, Institute of Neurology, University College London, WC1N 3BG, London, United Kingdom. h.critchley@fil.ion.ucl.ac.uk}, + Journal = {Neuron}, + Keywords = {Adult; Arousal, physiology; Cerebral Cortex, physiology; Decision Making, physiology; Female; Gyrus Cinguli, physiology; Humans; Magnetic Resonance Imaging; Male; Prefrontal Cortex, physiology; Probability; Reward}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {537--545}, + Pii = {S0896-6273(01)00225-2}, + Pmid = {11239442}, + Timestamp = {2009.08.07}, + Title = {Neural activity in the human brain relating to uncertainty and arousal during anticipation.}, + Volume = {29}, + Year = {2001}} + +@article{Critchley2003, + Author = {Critchley, H. D. and Mathias, C. J. and Josephs, O. and O'Doherty, J. and Zanini, S. and Dewar, B. K. and Cipolotti, L. and Shallice, T. and Dolan, R. J.}, + Journal = {Brain}, + Month = {Oct}, + Pages = {2139--2152}, + Title = {{{H}uman cingulate cortex and autonomic control: converging neuroimaging and clinical evidence}}, + Volume = {126}, + Year = {2003}} + +@article{Critchley2005, + Author = {Critchley, H. D. and Rotshtein, P. and Nagai, Y. and O'Doherty, J. and Mathias, C. J. and Dolan, R. J.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {751--762}, + Title = {{{A}ctivity in the human brain predicting differential heart rate responses to emotional facial expressions}}, + Volume = {24}, + Year = {2005}} + +@article{crombag2005opposite, + Author = {Crombag, H.S. and Gorny, G. and Li, Y. and Kolb, B. and Robinson, T.E.}, + Journal = {Cerebral Cortex}, + Number = {3}, + Pages = {341--348}, + Publisher = {Oxford Univ Press}, + Title = {{Opposite effects of amphetamine self-administration experience on dendritic spines in the medial and orbital prefrontal cortex}}, + Volume = {15}, + Year = {2005}} + +@article{Cromwell2005, + Author = {Cromwell, H. C. and Hassani, O. K. and Schultz, W.}, + Journal = {Exp Brain Res}, + Month = {May}, + Pages = {520--525}, + Title = {{{R}elative reward processing in primate striatum}}, + Volume = {162}, + Year = {2005}} + +@article{Cromwell2003, + Author = {Cromwell, H. C. and Schultz, W.}, + Journal = {J. Neurophysiol.}, + Month = {May}, + Pages = {2823--2838}, + Title = {{{E}ffects of expectations for different reward magnitudes on neuronal activity in primate striatum}}, + Volume = {89}, + Year = {2003}} + +@article{Crone2004, + Author = {Crone, E. A. and {van der Molen}, M. W.}, + Journal = {Developmental Neuropsychology}, + Pages = {251--279}, + Title = {Developmental Changes in Real--Life Decision--Making: {P}erformance on a Gambling Task Previously Shown to Depend on the Ventromedial Prefrontal Cortex}, + Volume = {25}, + Year = {2004}} + +@article{Crowther1995, + Author = {Crowther, C. S. and Batchelder, W. H. and Hu, X.}, + Journal = {Psychological Review}, + Pages = {396--408}, + Title = {A Measurement--Theoretic Analysis of the Fuzzy Logical Model of Perception}, + Volume = {102}, + Year = {1995}} + +@article{Cui2007, + Author = {Cui, X. and Jeter, C. B. and Yang, D. and Montague, P. R. and Eagleman, D. M.}, + Journal = {Vision Res.}, + Month = {Feb}, + Pages = {474--478}, + Title = {{{V}ividness of mental imagery: individual variability can be measured objectively}}, + Volume = {47}, + Year = {2007}} + +@article{Cumminginpress, + Author = {Cumming, G.}, + Journal = {Psychological Science}, + Title = {Understanding the Average Probability of Replication: Comment on Killeen (2005)}, + Year = {in press}} + +@article{Cumming2007, + Author = {Cumming, G.}, + Title = {Replication and $p$ values: $p$ values predict the future vaguely, but confidence intervals do better. {M}anuscript submitted for publication.}, + Year = {2007}} + +@article{Dager2004, + Author = {Dager, S. R. and Friedman, S. D. and Parow, A. and Demopulos, C. and Stoll, A. L. and Lyoo, I. K. and Dunner, D. L. and Renshaw, P. F.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {May}, + Pages = {450--458}, + Title = {{{B}rain metabolic alterations in medication-free patients with bipolar disorder}}, + Volume = {61}, + Year = {2004}} + +@article{Dagher2007, + Author = {Dagher, A.}, + Journal = {Neuron}, + Month = {Jan}, + Pages = {7--8}, + Title = {{{S}hopping centers in the brain}}, + Volume = {53}, + Year = {2007}} + +@article{Daglish2001, + Author = {Daglish, M. R. and Weinstein, A. and Malizia, A. L. and Wilson, S. and Melichar, J. K. and Britten, S. and Brewer, C. and Lingford-Hughes, A. and Myles, J. S. and Grasby, P. and Nutt, D. J.}, + Journal = {Am J Psychiatry}, + Month = {Oct}, + Pages = {1680--1686}, + Title = {{{C}hanges in regional cerebral blood flow elicited by craving memories in abstinent opiate-dependent subjects}}, + Volume = {158}, + Year = {2001}} + +@article{Daglish2003, + Author = {Daglish, M. R. and Weinstein, A. and Malizia, A. L. and Wilson, S. and Melichar, J. K. and Lingford-Hughes, A. and Myles, J. S. and Grasby, P. and Nutt, D. J.}, + Journal = {Neuroimage}, + Month = {Dec}, + Pages = {1964--1970}, + Title = {{{F}unctional connectivity analysis of the neural circuits of opiate craving: "more" rather than "different"?}}, + Volume = {20}, + Year = {2003}} + +@article{Dalley2009, + Author = {Dalley, J. W. and Everitt, B.J.}, + Booktitle = {Seminars in Cell and Developmental Biology}, + Organization = {Elsevier}, + Title = {{Dopamine receptors in the learning, memory and drug reward circuitry}}, + Year = {2009}} + +@article{Dalley2007, + Journal = {Science}, + Pages = {1267--1270}, + Title = {{{N}ucleus accumbens {D}2/3 receptors predict trait impulsivity and cocaine reinforcement}}, + Volume = {315}, + Year = {2007}} + +@article{dalley2005time, + Author = {Dalley, J. W. and Laane, K. and Theobald, D.E.H. and Armstrong, H.C. and Corlett, P.R. and Chudasama, Y. and Robbins, T.W.}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {17}, + Pages = {6189--6194}, + Publisher = {National Acad Sciences}, + Title = {{Time-limited modulation of appetitive Pavlovian memory by D1 and NMDA receptors in the nucleus accumbens}}, + Volume = {102}, + Year = {2005}} + +@book{Damasio1999, + Author = {Damasio, A.R.}, + Publisher = {Harcourt}, + Title = {{The feeling of what happens: Body and emotion in the making of consciousness}}, + Year = {1999}} + +@book{Damasio1994, + Address = {New York}, + Author = {Damasio, A. R.}, + Publisher = {Putnam}, + Title = {Descartes' Error: {E}motion, Reason, and the Human Brain}, + Year = {1994}} + +@article{Dani2007, + Author = {Dani, J. A. and Montague, P. R.}, + Journal = {Nat. Neurosci.}, + Month = {Apr}, + Pages = {403--404}, + Title = {{{D}isrupting addiction through the loss of drug-associated internal states}}, + Volume = {10}, + Year = {2007}} + +@article{Dasberg1974, + Author = {H. Dasberg and H. M. van Praag}, + Journal = {Acta Psychiatr Scand}, + Keywords = {Acute Disease; Administration, Oral; Adult; Aged; Anxiety Disorders, blood/drug therapy; Clinical Trials as Topic; Crisis Intervention; Diazepam, administration /&/ dosage/blood/therapeutic use; Female; Humans; Male; Middle Aged; Placebos; Psychiatric Status Rating Scales; Questionnaires; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {3}, + Owner = {Young}, + Pages = {326--340}, + Pmid = {4609190}, + Timestamp = {2010.05.01}, + Title = {The therapeutic effect of short-term oral diazepam treatment on acute clinical anxiety in a crisis centre.}, + Volume = {50}, + Year = {1974}} + +@article{Daunais1997a, + Author = {Daunais, J. B. and Hart, S. L. and Hedgecock-Rowe, A. and Matasi, J. J. and Thornley, C. and Davies, H. M. and Porrino, L. J.}, + Journal = {Brain Res. Mol. Brain Res.}, + Month = {Oct}, + Pages = {293--304}, + Title = {{{A}lterations in behavior and opioid gene expression induced by the novel tropane analog {W}{F}-31}}, + Volume = {50}, + Year = {1997}} + +@article{Daunais1998, + Author = {Daunais, J. B. and Hart, S. L. and Smith, H. R. and Letchworth, S. R. and Davies, H. M. and Sexton, T. and Bennett, B. A. and Childers, S. R. and Porrino, L. J.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Jun}, + Pages = {1246--1254}, + Title = {{{L}ong-acting blockade of biogenic amine transporters in rat brain by administration of the potent novel tropane 2beta-propanoyl-3beta-(2-{N}aphthyl)-tropane}}, + Volume = {285}, + Year = {1998}} + +@article{Daunais2001, + Author = {Daunais, J. B. and Letchworth, S. R. and Sim-Selley, L. J. and Smith, H. R. and Childers, S. R. and Porrino, L. J.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {471--485}, + Title = {{{F}unctional and anatomical localization of mu opioid receptors in the striatum, amygdala, and extended amygdala of the nonhuman primate}}, + Volume = {433}, + Year = {2001}} + +@article{Daunais1997, + Author = {Daunais, J. B. and Nader, M. A. and Porrino, L. J.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jul}, + Pages = {471--475}, + Title = {{{L}ong-term cocaine self-administration decreases striatal preproenkephalin m{R}{N}{A} in rhesus monkeys}}, + Volume = {57}, + Year = {1997}} + +@article{David2005, + Author = {David, S. P. and Munafo, M. R. and Johansen-Berg, H. and Smith, S. M. and Rogers, R. D. and Matthews, P. M. and Walton, R. T.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {488--494}, + Title = {{{V}entral striatum/nucleus accumbens activation to smoking-related pictorial cues in smokers and nonsmokers: a functional magnetic resonance imaging study}}, + Volume = {58}, + Year = {2005}} + +@article{Davis2009, + Author = {Davis, L. M. and Michaelides, M. and Cheskin, L. J. and Moran, T. H. and Aja, S. and Watkins, P. A. and Pei, Z. and Contoreggi, C. and McCullough, K. and Hope, B. and Wang, G. J. and Volkow, N. D. and Thanos, P. K.}, + Journal = {Neuroendocrinology}, + Pages = {152--162}, + Title = {{{B}romocriptine administration reduces hyperphagia and adiposity and differentially affects dopamine {D}2 receptor and transporter binding in leptin-receptor-deficient {Z}ucker rats and rats with diet-induced obesity}}, + Volume = {89}, + Year = {2009}} + +@article{Daw2004, + Author = {Daw, N. D. and Dayan, P.}, + Journal = {Science}, + Month = {Jun}, + Pages = {1753--1754}, + Title = {{{N}euroscience. {M}atchmaking}}, + Volume = {304}, + Year = {2004}} + +@article{Daw2006a, + Author = {Daw, N. D. and Doya, K.}, + Journal = {Current opinion in neurobiology}, + Number = {2}, + Pages = {199--204}, + Publisher = {Elsevier}, + Title = {{The computational neurobiology of learning and reward}}, + Volume = {16}, + Year = {2006}} + +@article{Daw2002, + Author = {Daw, N. D. and Kakade, S. and Dayan, P.}, + Journal = {Neural Netw}, + Pages = {603--616}, + Title = {{{O}pponent interactions between serotonin and dopamine}}, + Volume = {15}, + Year = {2002}} + +@article{Daw2005, + Author = {Daw, N. D. and Niv, Y. and Dayan, P.}, + Journal = {Nat. Neurosci.}, + Month = {Dec}, + Pages = {1704--1711}, + Title = {{{U}ncertainty-based competition between prefrontal and dorsolateral striatal systems for behavioral control}}, + Volume = {8}, + Year = {2005}} + +@article{Daw2006, + Author = {Daw, N. D. and O'Doherty, J. P. and Dayan, P. and Seymour, B. and Dolan, R. J.}, + Journal = {Nature}, + Pages = {876--879}, + Title = {Cortical Substrates for Exploratory Decisions in Humans}, + Volume = {441}, + Year = {2006}} + +@incollection{Dawid1992, + Address = {Oxford}, + Author = {Dawid, A. P.}, + Booktitle = {{B}ayesian Statistics 4}, + Editor = {Bernardo, J. M. and Berger, J. O. and Dawid, A. P. and Smith, A. F. M.}, + Pages = {109--121}, + Publisher = {Oxford University Press}, + Title = {Prequential Analysis, Stochastic Complexity and {B}ayesian Inference}, + Year = {1992}} + +@incollection{Dawidinpress, + Address = {Cambridge}, + Author = {Dawid, A. P.}, + Booktitle = {Evidence}, + Editor = {Tybjerg, K. and Swenson--Wright, J. and Bell, A.}, + Pages = {??--??}, + Publisher = {Cambridge University Press}, + Title = {Statistics and the Law}, + Year = {in press}} + +@article{Dawid2005, + Author = {Dawid, A. P.}, + Journal = {Significance}, + Pages = {6--8}, + Title = {Statistics on Trial}, + Volume = {2}, + Year = {2005}} + +@article{Dawid1999, + Author = {Dawid, A. P.}, + Journal = {The Computer Journal}, + Pages = {323--326}, + Title = {Discussion of the Papers by {R}issanen and by {W}allace and {D}owe}, + Volume = {42}, + Year = {1999}} + +@article{Dawid1991, + Author = {Dawid, A. P.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {79--109}, + Title = {Fisherian Inference in Likelihood and Prequential Frames of Reference}, + Volume = {53}, + Year = {1991}} + +@article{Dawid1984, + Author = {Dawid, A. P.}, + Journal = {Journal of the Royal Statistical Society A}, + Pages = {278--292}, + Title = {Statistical Theory: The Prequential Approach}, + Volume = {147}, + Year = {1984}} + +@article{Dawid1999a, + Author = {Dawid, A. P. and Vovk, V. G.}, + Journal = {Bernoulli}, + Pages = {125--162}, + Title = {Prequential Probability: Principles and Properties}, + Volume = {5}, + Year = {1999}} + +@article{Dayan2009, + Author = {Dayan, P.}, + Journal = {Neural Netw}, + Month = {Apr}, + Pages = {213--219}, + Title = {{{G}oal-directed control and its antipodes}}, + Volume = {22}, + Year = {2009}} + +@article{Dayan2009a, + Author = {Dayan, P.}, + Journal = {Network}, + Pages = {32--46}, + Title = {{{P}rospective and retrospective temporal difference learning}}, + Volume = {20}, + Year = {2009}} + +@article{Dayan2008, + Author = {Dayan, P.}, + Journal = {Front Neurosci}, + Month = {Dec}, + Pages = {255--263}, + Title = {{{S}imple substrates for complex cognition}}, + Volume = {2}, + Year = {2008}} + +@article{Dayan2007, + Author = {Dayan, P.}, + Journal = {Front Comput Neurosci}, + Pages = {1}, + Title = {{{B}ilinearity, rules, and prefrontal cortex}}, + Volume = {1}, + Year = {2007}} + +@article{Dayan2007a, + Author = {Dayan, P.}, + Journal = {Network}, + Month = {Jun}, + Pages = {1--4}, + Title = {{{B}{O}{O}{K} {R}{E}{V}{I}{E}{W}}}, + Year = {2007}} + +@article{Dayan2006, + Author = {Dayan, P.}, + Journal = {Neural Comput}, + Month = {Oct}, + Pages = {2293--2319}, + Title = {{{I}mages, frames, and connectionist hierarchies}}, + Volume = {18}, + Year = {2006}} + +@article{Dayan2003, + Author = {Dayan, P.}, + Journal = {J. Physiol. Paris}, + Pages = {475--489}, + Title = {{{P}attern formation and cortical maps}}, + Volume = {97}, + Year = {2003}} + +@article{Dayan2002, + Author = {Dayan, P.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Mar}, + Pages = {105--106}, + Title = {{{M}atters temporal}}, + Volume = {6}, + Year = {2002}} + +@article{Dayan2002a, + Author = {Dayan, P. and Ahmad, F. and Urtecho, J. and Novick, M. and Dixon, P. and Levine, D. and Miller, S.}, + Journal = {Clin Pediatr (Phila)}, + Pages = {415--418}, + Title = {{or = 60 days of age}}, + Volume = {41}, + Year = {2002}} + +@article{Dayan2002b, + Author = {Dayan, P. and Balleine, B. W.}, + Journal = {Neuron}, + Month = {Oct}, + Pages = {285--298}, + Title = {{{R}eward, motivation, and reinforcement learning}}, + Volume = {36}, + Year = {2002}} + +@article{Dayan2008a, + Author = {Dayan, P. and Daw, N. D.}, + Journal = {Cogn Affect Behav Neurosci}, + Month = {Dec}, + Pages = {429--453}, + Title = {{{D}ecision theory, reinforcement learning, and the brain}}, + Volume = {8}, + Year = {2008}} + +@article{Dayan2009b, + Author = {Dayan, P. and Huys, Q. J.}, + Journal = {Annu. Rev. Neurosci.}, + Month = {Mar}, + Title = {{{S}erotonin in {A}ffective {C}ontrol}}, + Year = {2009}} + +@article{Dayan2008b, + Author = {Dayan, P. and Huys, Q. J.}, + Journal = {PLoS Comput. Biol.}, + Month = {Feb}, + Pages = {e4}, + Title = {{{S}erotonin, inhibition, and negative mood}}, + Volume = {4}, + Year = {2008}} + +@article{Dayan2008c, + Author = {Dayan, P. and Niv, Y.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Apr}, + Pages = {185--196}, + Title = {{{R}einforcement learning: the good, the bad and the ugly}}, + Volume = {18}, + Year = {2008}} + +@article{Dayan2006a, + Author = {Dayan, P. and Niv, Y. and Seymour, B. and Daw, N. D.}, + Journal = {Neural Netw}, + Month = {Oct}, + Pages = {1153--1160}, + Title = {{{T}he misbehavior of value and the discipline of the will}}, + Volume = {19}, + Year = {2006}} + +@article{Dayan2006b, + Author = {Dayan, P. and Yu, A. J.}, + Journal = {Network}, + Month = {Dec}, + Pages = {335--350}, + Title = {{{P}hasic norepinephrine: a neural interrupt signal for unexpected events}}, + Volume = {17}, + Year = {2006}} + +@article{Dayan2004, + Author = {Dayan, P. S. and Hanson, E. and Bennett, J. E. and Langsam, D. and Miller, S. Z.}, + Journal = {Pediatr Emerg Care}, + Month = {Feb}, + Pages = {85--88}, + Title = {{{C}linical course of urinary tract infections in infants younger than 60 days of age}}, + Volume = {20}, + Year = {2004}} + +@article{Dayan2007b, + Author = {Dayan, P. S. and Osmond, M. and Kuppermann, N. and Lang, E. and Klassen, T. and Johnson, D. and Strauss, S. and Hess, E. and Schneider, S. and Afilalo, M. and Pusic, M.}, + Journal = {Acad Emerg Med}, + Month = {Nov}, + Pages = {978--983}, + Title = {{{D}evelopment of the capacity necessary to perform and promote knowledge translation research in emergency medicine}}, + Volume = {14}, + Year = {2007}} + +@article{Dayan2004a, + Author = {Dayan, P. S. and Vitale, M. and Langsam, D. J. and Ruzal-Shapiro, C. and Novick, M. K. and Kuppermann, N. and Miller, S. Z.}, + Journal = {Acad Emerg Med}, + Month = {Jul}, + Pages = {736--743}, + Title = {{{D}erivation of clinical prediction rules to identify children with fractures after twisting injuries of the ankle}}, + Volume = {11}, + Year = {2004}} + +@article{Dayton2003, + Author = {Dayton, C. M.}, + Journal = {Psychological Methods}, + Pages = {61--71}, + Title = {Information Criteria for Pairwise Comparisons}, + Volume = {8}, + Year = {2003}} + +@article{DeLucia2008, + Author = {De Lucia, M. and Fritschy, J. and Dayan, P. and Holder, D. S.}, + Journal = {Med Biol Eng Comput}, + Month = {Mar}, + Pages = {263--272}, + Title = {{{A} novel method for automated classification of epileptiform activity in the human electroencephalogram-based on independent component analysis}}, + Volume = {46}, + Year = {2008}} + +@article{DeLuna2003, + Author = {{De Luna}, X. and Skouras, K.}, + Journal = {Scandinavian Journal of Statistics}, + Pages = {113--128}, + Title = {Choosing a Model Selection Strategy}, + Volume = {30}, + Year = {2003}} + +@misc{DeMartino2006, + Author = {De Martino, B. and Kumaran, D. and Seymour, B. and Dolan, R.J.}, + Journal = {Science}, + Number = {5787}, + Pages = {684--687}, + Publisher = {American Association for the Advancement of Science}, + Title = {{Frames, biases, and rational decision-making in the human brain}}, + Volume = {313}, + Year = {2006}} + +@article{Dean2004, + Author = {Dean, H. L. and Crowley, J. C. and Platt, M. L.}, + Journal = {J. Neurophysiol.}, + Month = {Nov}, + Pages = {3056--3068}, + Title = {{{V}isual and saccade-related activity in macaque posterior cingulate cortex}}, + Volume = {92}, + Year = {2004}} + +@article{Dean2006, + Author = {Dean, H. L. and Platt, M. L.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {1117--1127}, + Title = {{{A}llocentric spatial referencing of neuronal activity in macaque posterior cingulate cortex}}, + Volume = {26}, + Year = {2006}} + +@article{Deaner2005, + Author = {Deaner, R. O. and Khera, A. V. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Mar}, + Pages = {543--548}, + Title = {{{M}onkeys pay per view: adaptive valuation of social images by rhesus macaques}}, + Volume = {15}, + Year = {2005}} + +@article{Deaner2003, + Author = {Deaner, R. O. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Sep}, + Pages = {1609--1613}, + Title = {{{R}eflexive social attention in monkeys and humans}}, + Volume = {13}, + Year = {2003}} + +@article{Deaner2007, + Author = {Deaner, R. O. and Shepherd, S. V. and Platt, M. L.}, + Journal = {Biol. Lett.}, + Month = {Feb}, + Pages = {64--67}, + Title = {{{F}amiliarity accentuates gaze cuing in women but not men}}, + Volume = {3}, + Year = {2007}} + +@article{DeGroot1973, + Author = {{DeGroot}, M. H.}, + Journal = {Journal of the American Statistical Association}, + Pages = {966--969}, + Title = {Doing What Comes Naturally: {I}nterpreting a Tail Area as a Posterior Probability or as a Likelihood Ratio}, + Volume = {68}, + Year = {1973}} + +@article{Delaney1998, + Author = {Delaney, P. F. and Reder, L. M. and Staszewski, J. J. and Ritter F. E.}, + Journal = {Psychological Science}, + Pages = {1--7}, + Title = {The Strategy--Specific Nature of Improvement: {T}he Power Law Applies by Strategy Within Task}, + Volume = {9}, + Year = {1998}} + +@article{Delgado2008d, + Author = {Delgado, M. R.}, + Journal = {Neuron}, + Pages = {470--471}, + Title = {{{F}ool me once, shame on you; fool me twice, shame on oxytocin}}, + Volume = {58}, + Year = {2008}} + +@article{Delgado2007, + Author = {Delgado, M. R.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Pages = {70--88}, + Title = {{{R}eward-related responses in the human striatum}}, + Volume = {1104}, + Year = {2007}} + +@article{Delgado2003b, + Author = {Delgado, M. R.}, + Journal = {J Am Acad Orthop Surg}, + Pages = {291--294}, + Title = {{{B}otulinum neurotoxin type {A}}}, + Volume = {11}, + Year = {2003}} + +@article{Delgado1996a, + Author = {Delgado, M. R.}, + Journal = {J. Child Neurol.}, + Pages = {1--3}, + Title = {{{G}uillain-{B}arr? syndrome: a pediatric challenge}}, + Volume = {11}, + Year = {1996}} + +@article{Delgado2003a, + Author = {Delgado, M. R. and Albright, A. L.}, + Journal = {J. Child Neurol.}, + Pages = {1--8}, + Title = {{{M}ovement disorders in children: definitions, classifications, and grading systems}}, + Volume = {18 Suppl 1}, + Year = {2003}} + +@article{Delgado2005a, + Author = {Delgado, M. R. and Frank, R. H. and Phelps, E. A.}, + Journal = {Nat. Neurosci.}, + Pages = {1611--1618}, + Title = {{{P}erceptions of moral character modulate the neural systems of reward during the trust game}}, + Volume = {8}, + Year = {2005}} + +@article{Delgado2008c, + Author = {Delgado, M. R. and Gillis, M. M. and Phelps, E. A.}, + Journal = {Nat. Neurosci.}, + Pages = {880--881}, + Title = {{{R}egulating the expectation of reward via cognitive strategies}}, + Volume = {11}, + Year = {2008}} + +@article{Delgado2006a, + Author = {Delgado, M. R. and Labouliere, C. D. and Phelps, E. A.}, + Journal = {Soc Cogn Affect Neurosci}, + Pages = {250--259}, + Title = {{{F}ear of losing money? {A}versive conditioning with secondary reinforcers}}, + Volume = {1}, + Year = {2006}} + +@article{Delgado2008b, + Author = {Delgado, M. R. and Li, J. and Schiller, D. and Phelps, E. A.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Pages = {3787--3800}, + Title = {{{T}he role of the striatum in aversive learning and aversive prediction errors}}, + Volume = {363}, + Year = {2008}} + +@article{Delgado2003, + Author = {Delgado, M. R. and Locke, H. M. and Stenger, V. A. and Fiez, J. A.}, + Journal = {Cogn Affect Behav Neurosci}, + Pages = {27--38}, + Title = {{{D}orsal striatum responses to reward and punishment: effects of valence and magnitude manipulations}}, + Volume = {3}, + Year = {2003}} + +@article{Delgado2005, + Author = {Delgado, M. R. and Miller, M. M. and Inati, S. and Phelps, E. A.}, + Journal = {Neuroimage}, + Pages = {862--873}, + Title = {{{A}n f{M}{R}{I} study of reward-related probability learning}}, + Volume = {24}, + Year = {2005}} + +@article{Delgado2008a, + Author = {Delgado, M. R. and Nearing, K. I. and Ledoux, J. E. and Phelps, E. A.}, + Journal = {Neuron}, + Pages = {829--838}, + Title = {{{N}eural circuitry underlying the regulation of conditioned fear and its relation to extinction}}, + Volume = {59}, + Year = {2008}} + +@article{Delgado2000, + Author = {Delgado, M. R. and Nystrom, L. E. and Fissell, C. and Noll, D. C. and Fiez, J. A.}, + Journal = {J. Neurophysiol.}, + Pages = {3072--3077}, + Title = {{{T}racking the hemodynamic responses to reward and punishment in the striatum}}, + Volume = {84}, + Year = {2000}} + +@article{Delgado2006, + Author = {Delgado, M. R. and Olsson, A. and Phelps, E. A.}, + Journal = {Biol Psychol}, + Pages = {39--48}, + Title = {{{E}xtending animal models of fear conditioning to humans}}, + Volume = {73}, + Year = {2006}} + +@article{Delgado1994, + Author = {Delgado, M. R. and Riela, A. R. and Mills, J. and Browne, R. and Roach, E. S.}, + Journal = {J. Child Neurol.}, + Pages = {311--314}, + Title = {{{T}hrombocytopenia secondary to high valproate levels in children with epilepsy}}, + Volume = {9}, + Year = {1994}} + +@article{Delgado1996, + Author = {Delgado, M. R. and Riela, A. R. and Mills, J. and Pitt, A. and Browne, R.}, + Journal = {Pediatrics}, + Pages = {192--197}, + Title = {{{D}iscontinuation of antiepileptic drug treatment after two seizure-free years in children with cerebral palsy}}, + Volume = {97}, + Year = {1996}} + +@article{Delgado2008, + Author = {Delgado, M. R. and Schotter, A. and Ozbay, E. Y. and Phelps, E. A.}, + Journal = {Science}, + Pages = {1849--1852}, + Title = {{{U}nderstanding overbidding: using the neural circuitry of reward to design economic auctions}}, + Volume = {321}, + Year = {2008}} + +@article{Delgado2004, + Author = {Delgado, M. R. and Stenger, V. A. and Fiez, J. A.}, + Journal = {Cereb. Cortex}, + Pages = {1022--1030}, + Title = {{{M}otivation-dependent responses in the human caudate nucleus}}, + Volume = {14}, + Year = {2004}} + +@article{Deligni`eres2004, + Author = {Deligni\`{e}res, D. and Fortes, M. and Ninot, G.}, + Journal = {Nonlinear Dynamics in Psychology and Life Sciences}, + Pages = {479--510}, + Title = {The Fractal Dynamics of Self--esteem and Physical Self}, + Volume = {8}, + Year = {2004}} + +@article{DeLosh1997, + Author = {DeLosh, Edward L. and Busemeyer, Jerome R. and McDaniel, Mark A.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Owner = {WooYoung Ahn}, + Pages = {968-986}, + Timestamp = {2008.03.26}, + Title = {Extrapolation: {T}he {S}ine {Q}ua {N}on for Abstraction in Function Learning}, + Volume = {23 (4)}, + Year = {1997}} + +@article{Dennis1996, + Author = {Dennis, I. and Evans, J. B. T.}, + Journal = {British Journal of Psychology}, + Pages = {105--129}, + Title = {The Speed--Error Trade--Off Problem in Psychometric Testing}, + Volume = {87}, + Year = {1996}} + +@article{Dennis2008, + Author = {Dennis, S. and Lee, M.D. and Kinnell, A.}, + Journal = {Journal of Memory and Language}, + Number = {3}, + Pages = {361--376}, + Publisher = {Elsevier}, + Title = {{Bayesian analysis of recognition memory: The case of the list-length effect}}, + Volume = {59}, + Year = {2008}} + +@article{Deriche1993, + Author = {Deriche, M. and Tewfik, A. H.}, + Journal = {IEEE Transactions on Signal Processing}, + Pages = {2977--2989}, + Title = {Maximum Likelihood Estimation of the Parameters of Discrete Fractionally Differenced Gaussian Noise Process}, + Volume = {41}, + Year = {1993}} + +@book{Derksen2006, + Address = {Diemen}, + Author = {Derksen, T.}, + Publisher = {Veen Magazines}, + Title = {Lucia de {B}. {R}econstructie van een Gerechtelijke Dwaling}, + Year = {2006}} + +@article{Desmarais2005, + Author = {Michel C. Desmarais and Xiaoming Pu}, + Journal = {International Journal of Artificial Intelligence in Education}, + Number = {4}, + Pages = {291--323}, + Title = {A Bayesian student model without hidden nodes and its comparison with Item Response Theory}, + Volume = {15}, + Year = {2005}} + +@article{Desmeules2007, + Author = {Desmeules, Remi and Bechara, Antoine and Dube, Laurette}, + Journal = {Journal of Behavioral Decision Making}, + Owner = {WooYoung Ahn}, + Pages = {211-224}, + Timestamp = {2008.03.26}, + Title = {Subjective valuation and aymmetrical motivational systems: {I}mplications of scope insensitivity for decision making.}, + Volume = {21(2)}, + Year = {2007}} + +@article{Dewey1998, + Author = {Dewey, S. L. and Morgan, A. E. and Ashby, C. R. and Horan, B. and Kushner, S. A. and Logan, J. and Volkow, N. D. and Fowler, J. S. and Gardner, E. L. and Brodie, J. D.}, + Journal = {Synapse}, + Month = {Oct}, + Pages = {119--129}, + Title = {{{A} novel strategy for the treatment of cocaine addiction}}, + Volume = {30}, + Year = {1998}} + +@article{di2003attenuation, + Author = {Di Ciano, P. and Underwood, R.J. and Hagan, J.J. and Everitt, B.J.}, + Journal = {Neuropsychopharmacology(New York, NY)}, + Number = {2}, + Pages = {329--338}, + Publisher = {Elsevier Science}, + Title = {{Attenuation of cue-controlled cocaine-seeking by a selective D3 dopamine receptor antagonist SB-277011-A}}, + Volume = {28}, + Year = {2003}} + +@article{DiSclafani1998, + Author = {Di Sclafani, V. and Clark, H. W. and Tolou-Shams, M. and Bloomer, C. W. and Salas, G. A. and Norman, D. and Fein, G.}, + Journal = {J Int Neuropsychol Soc}, + Month = {Nov}, + Pages = {559--565}, + Title = {{{P}remorbid brain size is a determinant of functional reserve in abstinent crack-cocaine and crack-cocaine-alcohol-dependent adults}}, + Volume = {4}, + Year = {1998}} + +@article{DiSclafani1995, + Author = {Di Sclafani, V. and Ezekiel, F. and Meyerhoff, D. J. and MacKay, S. and Dillon, W. P. and Weiner, M. W. and Fein, G.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Oct}, + Pages = {1121--1126}, + Title = {{{B}rain atrophy and cognitive function in older abstinent alcoholic men}}, + Volume = {19}, + Year = {1995}} + +@article{DiSclafani2007, + Author = {Di Sclafani, V. and Finn, P. and Fein, G.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {May}, + Pages = {795--803}, + Title = {{{P}sychiatric comorbidity in long-term abstinent alcoholic individuals}}, + Volume = {31}, + Year = {2007}} + +@article{DiSclafani2002, + Author = {Di Sclafani, V. and Tolou-Shams, M. and Price, L. J. and Fein, G.}, + Journal = {Drug Alcohol Depend}, + Month = {Apr}, + Pages = {161--171}, + Title = {{{N}europsychological performance of individuals dependent on crack-cocaine, or crack-cocaine and alcohol, at 6 weeks and 6 months of abstinence}}, + Volume = {66}, + Year = {2002}} + +@article{Diaconis1985, + Author = {Diaconis, P. and Efron, B.}, + Journal = {The Annals of Statistics}, + Pages = {845--913}, + Title = {Testing for Independence in a Two-Way Table: New Interpretations of the Chi--square Statistic (with discussion)}, + Volume = {13}, + Year = {1985}} + +@article{Diamond1983, + Author = {Diamond, G. A. and Forrester, J. S.}, + Journal = {Annals of Internal Medicine}, + Pages = {385--394}, + Title = {Clinical Trials and Statistical Verdicts: Probable Grounds for Appeal}, + Volume = {98}, + Year = {1983}} + +@article{DiCiccio1997, + Author = {DiCiccio, T. J. and Kass, R. E. and Raftery, A. E. and Wasserman, L.}, + Journal = {Journal of the American Statistical Association}, + Pages = {903--915}, + Title = {Computing {B}ayes Factors by Combining Simulation and Asymptotic Approximations}, + Volume = {92}, + Year = {1997}} + +@article{Dickey1977, + Author = {Dickey, J. M.}, + Journal = {Journal of the American Statistical Association}, + Pages = {138--142}, + Title = {Is the Tail Area Useful as an Approximate {B}ayes Factor?}, + Volume = {72}, + Year = {1977}} + +@article{Dickey1973, + Author = {Dickey, J. M.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {285--305}, + Title = {Scientific Reporting and Personal Probabilities: {S}tudent's Hypothesis}, + Volume = {35}, + Year = {1973}} + +@article{Dickman1988, + Author = {Dickman, S. J. and Meyer, D. E.}, + Journal = {Journal of Personality and Social Psychology}, + Pages = {274--290}, + Title = {Impulsivity and Speed--Accuracy Tradeoffs in Information Processing}, + Volume = {54}, + Year = {1988}} + +@article{Diederich1997, + Author = {Diederich, A.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {260--274}, + Title = {Dynamic Stochastic Models for Decision Making Under Time Constraints}, + Volume = {41}, + Year = {1997}} + +@article{Diederich2006, + Author = {Diederich, A. and Busemeyer, J. R.}, + Journal = {Perception \& Psychophysics}, + Pages = {194--207}, + Title = {Modeling the Effects of Payoff on Response Bias in a Perceptual Discrimination Task: {B}ound--change, Drift--rate--change, or Two--stage--processing Hypothesis}, + Volume = {68}, + Year = {2006}} + +@article{Diederich2003, + Author = {Diederich, A. and Busemeyer, J. R.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {304--322}, + Title = {Simple Matrix Methods for Analyzing Diffusion Models of Choice Probability, Choice Response Time, and Simple Response Time}, + Volume = {47}, + Year = {2003}} + +@article{Diederich2004, + Author = {Diederich, A. and Colonius, H.}, + Journal = {Perception \& Psychophysics}, + Pages = {1388--1404}, + Title = {Bimodal and Trimodal Multisensory Enhancement: {E}ffects of Stimulus Onset and Intensity on Reaction Time}, + Volume = {66}, + Year = {2004}} + +@article{Dietz2009, + Author = {Dietz, D. M. and Dietz, K. C. and Nestler, E. J. and Russo, S. J.}, + Journal = {Pharmacopsychiatry}, + Month = {May}, + Pages = {69--78}, + Title = {{{M}olecular mechanisms of psychostimulant-induced structural plasticity}}, + Volume = {42 Suppl 1}, + Year = {2009}} + +@article{DiLeone2003, + Author = {DiLeone, R. J. and Georgescu, D. and Nestler, E. J.}, + Journal = {Life Sci.}, + Month = {Jun}, + Pages = {759--768}, + Title = {{{L}ateral hypothalamic neuropeptides in reward and drug addiction}}, + Volume = {73}, + Year = {2003}} + +@article{DiMartino2008, + Author = {DiMartino, A. and Scheres, A. and Margulies, DS and Kelly, AMC and Uddin, LQ and Shehzad, Z. and Biswal, B. and Walters, JR and Castellanos, FX and Milham, MP}, + Journal = {Cerebral Cortex}, + Publisher = {Oxford Univ Press}, + Title = {{Functional connectivity of human striatum: a resting state fMRI study}}, + Year = {2008}} + +@article{Ding2002, + Author = {Ding, M. and Chen, Y. and Kelso, J. A. S.}, + Journal = {Brain and Cognition}, + Pages = {98--106}, + Title = {Statistical Analysis of Timing Errors}, + Volume = {48}, + Year = {2002}} + +@article{Ding2004, + Author = {Ding, Y. S. and Fowler, J. S. and Logan, J. and Wang, G. J. and Telang, F. and Garza, V. and Biegon, A. and Pareto, D. and Rooney, W. and Shea, C. and Alexoff, D. and Volkow, N. D. and Vocci, F.}, + Journal = {Synapse}, + Month = {Sep}, + Pages = {184--189}, + Title = {{6-[18{F}]{F}luoro-{A}-85380, a new {P}{E}{T} tracer for the nicotinic acetylcholine receptor: studies in the human brain and in vivo demonstration of specific binding in white matter}}, + Volume = {53}, + Year = {2004}} + +@article{Ding2004a, + Author = {Ding, Y. S. and Gatley, S. J. and Thanos, P. K. and Shea, C. and Garza, V. and Xu, Y. and Carter, P. and King, P. and Warner, D. and Taintor, N. B. and Park, D. J. and Pyatt, B. and Fowler, J. S. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {Sep}, + Pages = {168--175}, + Title = {{{B}rain kinetics of methylphenidate ({R}italin) enantiomers after oral administration}}, + Volume = {53}, + Year = {2004}} + +@article{Ding2000, + Author = {Ding, Y. S. and Logan, J. and Bermel, R. and Garza, V. and Rice, O. and Fowler, J. S. and Volkow, N. D.}, + Journal = {J. Neurochem.}, + Month = {Apr}, + Pages = {1514--1521}, + Title = {{{D}opamine receptor-mediated regulation of striatal cholinergic activity: positron emission tomography studies with norchloro[18{F}]fluoroepibatidine}}, + Volume = {74}, + Year = {2000}} + +@article{Ding1998, + Author = {Ding, Y. S. and Logan, J. and Gatley, S. J. and Fowler, J. S. and Volkow, N. D.}, + Journal = {J Neural Transm}, + Pages = {1199--1211}, + Title = {{{P}{E}{T} studies of peripheral catechol-{O}-methyltransferase in non-human primates using [18{F}]{R}o41-0960}}, + Volume = {105}, + Year = {1998}} + +@article{Ding1999, + Author = {Ding, Y. S. and Molina, P. E. and Fowler, J. S. and Logan, J. and Volkow, N. D. and Kuhar, M. J. and Carroll, F. I.}, + Journal = {Nucl. Med. Biol.}, + Month = {Jan}, + Pages = {139--148}, + Title = {{{C}omparative studies of epibatidine derivatives [18{F}]{N}{F}{E}{P} and [18{F}]{N}-methyl-{N}{F}{E}{P}: kinetics, nicotine effect, and toxicity}}, + Volume = {26}, + Year = {1999}} + +@article{Ding2000a, + Author = {Ding, Y. S. and Volkow, N. D. and Logan, J. and Garza, V. and Pappas, N. and King, P. and Fowler, J. S.}, + Journal = {Synapse}, + Month = {Mar}, + Pages = {234--237}, + Title = {{{O}ccupancy of brain nicotinic acetylcholine receptors by nicotine doses equivalent to those obtained when smoking a cigarette}}, + Volume = {35}, + Year = {2000}} + +@article{Ditterich2003a, + Author = {Ditterich, J. and Mazurek, M. E. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {Aug}, + Pages = {891--898}, + Title = {{{M}icrostimulation of visual cortex affects the speed of perceptual decisions}}, + Volume = {6}, + Year = {2003}} + +@article{Dixon2006, + Abstract = {The present study demonstrated the relative impact of gambling and + nongambling contexts on the degree of delay discounting by pathological + gamblers. We used a delay-discounting task with 20 pathological gamblers + in and out of the natural context in which they regularly gambled. + For 16 of the 20 participants, it appeared that the difference of + context altered the subjective value of delayed rewards, thereby + producing relative changes in delay-discounting rates that were generally + consistent with a hyperbolic model of intertemporal choice. The current + data suggest that empirically derived k values from delay-discounting + tasks are context sensitive and are not constant across various settings + for the individual. Implications for future transitional research + on addictive disorders generally, and gambling specifically, are + discussed.}, + Author = {Mark R Dixon and Eric A Jacobs and Scott Sanders}, + Institution = {ern Illinois University, Carbondale 62901, USA. mdixon@siu.edu}, + Journal = {J Appl Behav Anal}, + Keywords = {Adult; Association Learning; Choice Behavior; Female; Gambling, psychology; Humans; Impulsive Behavior, psychology; Internal-External Control; Male; Motivation; Reinforcement Schedule; Reward; Social Environment; Time Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {413--422}, + Pmid = {17236338}, + Timestamp = {2009.08.06}, + Title = {Contextual control of delay discounting by pathological gamblers.}, + Volume = {39}, + Year = {2006}} + +@article{Dixon2006a, + Abstract = {The present study demonstrated the relative impact of gambling and + nongambling contexts on the degree of delay discounting by pathological + gamblers. We used a delay-discounting task with 20 pathological gamblers + in and out of the natural context in which they regularly gambled. + For 16 of the 20 participants, it appeared that the difference of + context altered the subjective value of delayed rewards, thereby + producing relative changes in delay-discounting rates that were generally + consistent with a hyperbolic model of intertemporal choice. The current + data suggest that empirically derived k values from delay-discounting + tasks are context sensitive and are not constant across various settings + for the individual. Implications for future transitional research + on addictive disorders generally, and gambling specifically, are + discussed.}, + Author = {Mark R Dixon and Eric A Jacobs and Scott Sanders}, + Institution = {Behavior Analysis and Therapy Program, Rehabilitation Institute, Southern Illinois University, Carbondale 62901, USA. mdixon@siu.edu}, + Journal = {J Appl Behav Anal}, + Keywords = {Adult; Association Learning; Choice Behavior; Female; Gambling, psychology; Humans; Impulsive Behavior, psychology; Internal-External Control; Male; Motivation; Reinforcement Schedule; Reward; Social Environment; Time Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {413--422}, + Pmid = {17236338}, + Timestamp = {2009.08.06}, + Title = {Contextual control of delay discounting by pathological gamblers.}, + Volume = {39}, + Year = {2006}} + +@article{Dixon2003, + Author = {Dixon, P.}, + Journal = {Canadian Journal of Experimental Psychology}, + Pages = {189--202}, + Title = {The $p$--value Fallacy and how to Avoid it}, + Volume = {57}, + Year = {2003}} + +@article{Dodge2005, + Author = {Dodge, R. and Sindelar, J. and Sinha, R.}, + Journal = {J Subst Abuse Treat}, + Month = {Mar}, + Pages = {189--196}, + Title = {{{T}he role of depression symptoms in predicting drug abstinence in outpatient substance abuse treatment}}, + Volume = {28}, + Year = {2005}} + +@article{Dolan2002, + Author = {Dolan, C. V. and {v}an {d}er {M}aas, H. L. J. and Molenaar, P. C. M.}, + Journal = {Behavior Research Methods, Instruments, \& Computers}, + Pages = {304--323}, + Title = {A Framework for {ML} Estimation of Parameters of (Mixtures of) Common Reaction Time Distributions Given Optional Truncation or Censoring}, + Volume = {34}, + Year = {2002}} + +@article{Dolan2008, + Author = {Dolan, S. L. and Bechara, A. and Nathan, P. E.}, + Journal = {Behav Sci Law}, + Pages = {799--822}, + Title = {{{E}xecutive dysfunction as a risk marker for substance abuse: the role of impulsive personality traits}}, + Volume = {26}, + Year = {2008}} + +@article{Dom2006, + Abstract = {AIMS: To test the hypothesis that early-onset alcoholics (EOAs) can + be differentiated from late-onset alcoholics (LOAs) by more severe + substance-related problems and higher levels of impulsivity and aggression. + DESIGN AND MEASUREMENTS: A cross-sectional patient survey with a + community comparison group. The European Addiction Severity Index + was used to assess substance-related problems and the Barratt Impulsiveness + Scale, the Dutch version of the Zuckermann Sensation Seeking Scale + and the Buss-Durkee Hostility Inventory were used to assess impulsive + and aggressive traits. Impulsive decision making was assessed using + a delay discounting task (DDT) with hypothetical monetary rewards. + PARTICIPANTS AND SETTING: Participants were EOAs (n = 42) and LOAs + (n = 46) recruited from an addiction treatment centre and an unmatched, + non-substance-abusing comparison group (n = 54). Findings The EOAs + had higher levels of impulsive decision making than both the LOAs + and the comparison group. The EOAs had higher scores than the LOAs + on measures of impulsiveness, aggressiveness and the severity of + substance-related problems. CONCLUSIONS: This study provides evidence + that EOAs are more impulsive and aggressive than LOAs. Further identification + of alcoholism subtypes based on dimensions of impulsivity should + be considered in the light of their relationship with pharmacological + and behavioural treatment interventions.}, + Author = {G. Dom and P. D'haene and W. Hulstijn and B. Sabbe}, + Doi = {10.1111/j.1360-0443.2005.01270.x}, + Institution = {Psychiatric Centre Alexian Brothers, Boechout, Belgium. geert.dom@fracarita.org}, + Journal = {Addiction}, + Keywords = {Adult; Age of Onset; Aggression; Alcoholism, psychology; Cross-Sectional Studies; Decision Making; Female; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Questionnaires; Reward; Self Assessment (Psychology); Severity of Illness Index}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {50--59}, + Pii = {ADD1270}, + Pmid = {16393191}, + Timestamp = {2009.08.06}, + Title = {Impulsivity in abstinent early- and late-onset alcoholics: differences in self-report measures and a discounting task.}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2005.01270.x}, + Volume = {101}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2005.01270.x}} + +@article{Dom2006a, + Abstract = {AIMS: To test the hypothesis that early-onset alcoholics (EOAs) can + be differentiated from late-onset alcoholics (LOAs) by more severe + substance-related problems and higher levels of impulsivity and aggression. + DESIGN AND MEASUREMENTS: A cross-sectional patient survey with a + community comparison group. The European Addiction Severity Index + was used to assess substance-related problems and the Barratt Impulsiveness + Scale, the Dutch version of the Zuckermann Sensation Seeking Scale + and the Buss-Durkee Hostility Inventory were used to assess impulsive + and aggressive traits. Impulsive decision making was assessed using + a delay discounting task (DDT) with hypothetical monetary rewards. + PARTICIPANTS AND SETTING: Participants were EOAs (n = 42) and LOAs + (n = 46) recruited from an addiction treatment centre and an unmatched, + non-substance-abusing comparison group (n = 54). Findings The EOAs + had higher levels of impulsive decision making than both the LOAs + and the comparison group. The EOAs had higher scores than the LOAs + on measures of impulsiveness, aggressiveness and the severity of + substance-related problems. CONCLUSIONS: This study provides evidence + that EOAs are more impulsive and aggressive than LOAs. Further identification + of alcoholism subtypes based on dimensions of impulsivity should + be considered in the light of their relationship with pharmacological + and behavioural treatment interventions.}, + Author = {G. Dom and P. D'haene and W. Hulstijn and B. Sabbe}, + Doi = {10.1111/j.1360-0443.2005.01270.x}, + Institution = {Psychiatric Centre Alexian Brothers, Boechout, Belgium. geert.dom@fracarita.org}, + Journal = {Addiction}, + Keywords = {Adult; Age of Onset; Aggression; Alcoholism, psychology; Cross-Sectional Studies; Decision Making; Female; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Questionnaires; Reward; Self Assessment (Psychology); Severity of Illness Index}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {50--59}, + Pii = {ADD1270}, + Pmid = {16393191}, + Timestamp = {2009.08.06}, + Title = {Impulsivity in abstinent early- and late-onset alcoholics: differences in self-report measures and a discounting task.}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2005.01270.x}, + Volume = {101}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2005.01270.x}} + +@article{Dom2005, + Abstract = {BACKGROUND: Orbitofrontal cortex dysfunctions have been frequently + documented in people with substance use disorders. The exact role + of this cortical region, however, remains unspecified. AIMS: To assess + the functionality of the orbitofrontal cortex in people with substance + use disorders. METHOD: Reports of studies using behavioural decision-making + tasks and/or neuroimaging techniques to investigate orbitofrontal + cortex functioning in cases of substance misuse were reviewed. Studies + focusing exclusively on tobacco-smoking and gambling were excluded. + RESULTS: Fifty-two research articles were evaluated. Most studies + showed significant deficits in decision-making in people with substance + use disorders. A consistent finding in the neuroimaging studies was + hypoactivity of the orbitofrontal cortex after detoxification. The + association between hyperactivity of this region and craving or cue + reactivity was not consistent across studies. CONCLUSIONS: The orbitofrontal + cortex has an important role in addictive behaviours. Further studies + are needed to elucidate the underlying neuronal substrates of cue + reactivity, craving and decision-making, and the implications for + treatment and relapse prevention.}, + Author = {G. Dom and B. Sabbe and W. Hulstijn and W. van den Brink}, + Doi = {10.1192/bjp.187.3.209}, + Institution = {Psychiatric Centre Brothers Alexians, Boechout, Belgium. geert.dom@fracarita.org}, + Journal = {Br J Psychiatry}, + Keywords = {Brain Mapping, methods; Cues; Decision Making; Female; Frontal Lobe, physiopathology; Humans; Magnetic Resonance Imaging; Male; Positron-Emission Tomography; Substance-Related Disorders, physiopathology/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Owner = {Woo-Young Ahn}, + Pages = {209--220}, + Pii = {187/3/209}, + Pmid = {16135857}, + Timestamp = {2009.08.07}, + Title = {Substance use disorders and the orbitofrontal cortex: systematic review of behavioural decision-making and neuroimaging studies.}, + Url = {http://dx.doi.org/10.1192/bjp.187.3.209}, + Volume = {187}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1192/bjp.187.3.209}} + +@article{Domier2007, + Author = {Domier, C. P. and Monterosso, J. R. and Brody, A. L. and Simon, S. L. and Mendrek, A. and Olmstead, R. and Jarvik, M. E. and Cohen, M. S. and London, E. D.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Nov}, + Pages = {1--9}, + Title = {{{E}ffects of cigarette smoking and abstinence on {S}troop task performance}}, + Volume = {195}, + Year = {2007}} + +@article{Donders1969, + Author = {Donders, F. C.}, + Journal = {Acta Psychologica}, + Pages = {412--431}, + Title = {On the Speed of Mental Processes (Translation from the 1869 Original {D}utch Text)}, + Volume = {30}, + Year = {1969}} + +@article{Donohoe2007, + Author = {Donohoe, G. and Morris, D. W. and Clarke, S. and McGhee, K. A. and Schwaiger, S. and Nangle, J. M. and Garavan, H. and Robertson, I. H. and Gill, M. and Corvin, A.}, + Journal = {Neuropsychologia}, + Month = {Jan}, + Pages = {454--458}, + Title = {{{V}ariance in neurocognitive performance is associated with dysbindin-1 in schizophrenia: a preliminary study}}, + Volume = {45}, + Year = {2007}} + +@article{Donohoe2008, + Author = {Donohoe, G. and Morris, D. W. and De Sanctis, P. and Magno, E. and Montesi, J. L. and Garavan, H. P. and Robertson, I. H. and Javitt, D. C. and Gill, M. and Corvin, A. P. and Foxe, J. J.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {484--489}, + Title = {{{E}arly visual processing deficits in dysbindin-associated schizophrenia}}, + Volume = {63}, + Year = {2008}} + +@article{Donohoe2006, + Author = {Donohoe, G. and Reilly, R. and Clarke, S. and Meredith, S. and Green, B. and Morris, D. and Gill, M. and Corvin, A. and Garavan, H. and Robertson, I. H.}, + Journal = {J Int Neuropsychol Soc}, + Month = {Nov}, + Pages = {901--906}, + Title = {{{D}o antisaccade deficits in schizophrenia provide evidence of a specific inhibitory function?}}, + Volume = {12}, + Year = {2006}} + +@article{Doornik2003, + Author = {Doornik, J. A. and Ooms, M.}, + Journal = {Computational Statistics \& Data Analysis}, + Pages = {333--348}, + Title = {Computational Aspects of Maximum Likelihood Estimation of Autoregressive Fractionally Integrated Moving Average Models}, + Volume = {42}, + Year = {2003}} + +@article{Doros2005, + Author = {Doros, G. and Geier, A. B.}, + Journal = {Psychological Science}, + Pages = {1005--1006}, + Title = {Probability of Replication Revisited: Comment on ``An Alternative to Null--Hypothesis Significance Tests"}, + Volume = {16}, + Year = {2005}} + +@article{Dorsey2006, + Author = {Dorsey, C. D. and Lee, B. K. and Bolla, K. I. and Weaver, V. M. and Lee, S. S. and Lee, G. S. and Todd, A. C. and Shi, W. and Schwartz, B. S.}, + Journal = {J. Occup. Environ. Med.}, + Month = {May}, + Pages = {489--496}, + Title = {{{C}omparison of patella lead with blood lead and tibia lead and their associations with neurobehavioral test scores}}, + Volume = {48}, + Year = {2006}} + +@article{Dostrovsky1996, + Author = {Dostrovsky, J. O. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Dec}, + Pages = {3656--3665}, + Title = {{{C}ooling-specific spinothalamic neurons in the monkey}}, + Volume = {76}, + Year = {1996}} + +@book{Doucet2001, + Author = {Doucet, A. and {de Freitas}, N. and Gordon, N.}, + Owner = {Wooyoung Ahn}, + Publisher = {Berlin: Springer}, + Timestamp = {2007.05.02}, + Title = {Sequential Monte Carlo methods in practice}, + Year = {2001}} + +@article{Dreher2008, + Author = {Dreher, J.C. and Meyer-Lindenberg, A. and Kohn, P. and Berman, K.F.}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {39}, + Pages = {15106}, + Publisher = {National Acad Sciences}, + Title = {{Age-related changes in midbrain dopaminergic regulation of the human reward system}}, + Volume = {105}, + Year = {2008}} + +@article{Dretsch2008, + Author = {Dretsch, M. N. and Tipples, J.}, + Journal = {Brain Cogn}, + Month = {Feb}, + Pages = {83--90}, + Title = {{{W}orking memory involved in predicting future outcomes based on past experiences}}, + Volume = {66}, + Year = {2008}} + +@article{drevets2001amphetamine, + Author = {Drevets, W.C. and Gautier, C. and Price, J.C. and Kupfer, D.J. and Kinahan, P.E. and Grace, A.A. and Price, J.L. and Mathis, C.A.}, + Journal = {Psychiatry}, + Pages = {81--96}, + Title = {{Amphetamine-induced dopamine release in human ventral striatum correlates with euphoria}}, + Volume = {49}, + Year = {2001}} + +@article{Droungas1995, + Author = {Droungas, A. and Ehrman, R. N. and Childress, A. R. and O'Brien, C. P.}, + Journal = {Addict Behav}, + Pages = {657--673}, + Title = {{{E}ffect of smoking cues and cigarette availability on craving and smoking behavior}}, + Volume = {20}, + Year = {1995}} + +@article{Drummond2006, + Author = {Drummond, S. P. and Paulus, M. P. and Tapert, S. F.}, + Journal = {J Sleep Res}, + Month = {Sep}, + Pages = {261--265}, + Title = {{{E}ffects of two nights sleep deprivation and two nights recovery sleep on response inhibition}}, + Volume = {15}, + Year = {2006}} + +@article{DSM4, + Author = {American Psychiatric Association. Task Force on DSM-IV}, + Publisher = {American Psychiatric Association Washington, DC}, + Title = {{DSM-IV: diagnostic and statistical manual of mental disorders}}, + Year = {1994}} + +@book{DSMIV, + Author = {American Psychiatric Association. Task Force on DSM-IV}, + Publisher = {American Psychiatric Association Washington, DC}, + Title = {{DSM-IV: diagnostic and statistical manual of mental disorders}}, + Year = {1994}} + +@article{Du2006, + Author = {Du, C. and Yu, M. and Volkow, N. D. and Koretsky, A. P. and Fowler, J. S. and Benveniste, H.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {11522--11531}, + Title = {{{C}ocaine increases the intracellular calcium concentration in brain independently of its cerebrovascular effects}}, + Volume = {26}, + Year = {2006}} + +@book{Dudewicz1988, + Address = {New York}, + Author = {Dudewicz, E. J. and Mishra, S. N.}, + Publisher = {John Wiley \& Sons}, + Title = {Modern Mathematical Statistics}, + Year = {1988}} + +@article{Dulawa1999, + Author = {Dulawa, S. C. and Grandy, D. K. and Low, M. J. and Paulus, M. P. and Geyer, M. A.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {9550--9556}, + Title = {{{D}opamine {D}4 receptor-knock-out mice exhibit reduced exploration of novel stimuli}}, + Volume = {19}, + Year = {1999}} + +@article{Dunninpress, + Author = {Dunn, B. D. and Dalgleish, T. and Lawrence, A. D.}, + Journal = {Neuroscience and Biobehavioral Reviews}, + Pages = {?--?}, + Title = {The Somatic Marker Hypothesis: {A} Critical Evaluation}, + Volume = {?}, + Year = {in press}} + +@article{Dunn2003, + Author = {Dunn, J. C. and James, R. N.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {389--416}, + Title = {Signed Difference Analysis: Theory and Application}, + Volume = {47}, + Year = {2003}} + +@article{Dunnett1985b, + Abstract = {To provide a more specific test of memory impairments following lesions + to central cholinergic systems, rats were trained on an operant delayed + matching task. Ibotenic acid lesions of the nucleus basalis produced + a disruption of performance at all delay intervals (a parallel downward + shift in the delay-performance curve). By contrast, fimbria-fornix + transections had no effects at short delays, but produced a progressively + greater impairment as the delays lengthened (an increased downward + slope of the delay-performance curve). Scopolamine produced a dose-dependent + disruption of performance, apparent at the shortest delays but greater + at longer delays, that was similar to the two lesion deficits combined, + whereas physostigmine induced a mild but significant enhancement + of performance. The results support the hypothesis that disruption + of hippocampal circuitries, including cholinergic afferents via the + fimbria-fornix, produces short-term or working memory impairments, + whereas disruption of the cortical cholinergic system implicates + more stable long-term aspects of task performance. Peripherally administered + cholinergic drugs produce both types of effect and thus may influence + both systems.}, + Author = {S. B. Dunnett}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Animals; Basal Ganglia, physiology; Brain Mapping; Cerebral Cortex, physiology; Cholinergic Fibers, physiology; Female; Hippocampus, physiology; Memory, physiology; Methamphetamine, pharmacology; Neural Pathways, physiology; Physostigmine, pharmacology; Rats; Rats, Inbred Strains; Scopolamine, pharmacology; Septum Pellucidum, physiology; Substantia Innominata, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {3}, + Owner = {Young}, + Pages = {357--363}, + Pmid = {3936093}, + Timestamp = {2009.12.10}, + Title = {Comparative effects of cholinergic drugs and lesions of nucleus basalis or fimbria-fornix on delayed matching in rats.}, + Volume = {87}, + Year = {1985}} + +@article{Durstewitz1999, + Author = {Durstewitz, D. and Kelc, M. and Gunturkun, O.}, + Journal = {Journal of Neuroscience}, + Number = {7}, + Pages = {2807--2822}, + Publisher = {Soc Neuroscience}, + Title = {{A neurocomputational theory of the dopaminergic modulation of working memory functions}}, + Volume = {19}, + Year = {1999}} + +@article{Durstewitz2002, + Author = {Durstewitz, D. and Seamans, J.K.}, + Journal = {Neural Networks}, + Number = {4-6}, + Pages = {561--572}, + Publisher = {Elsevier}, + Title = {{The computational role of dopamine D1 receptors in working memory}}, + Volume = {15}, + Year = {2002}} + +@article{Dworkin1992, + Author = {Dworkin, S. I. and Porrino, L. J. and Smith, J. E.}, + Journal = {NIDA Res. Monogr.}, + Pages = {173--188}, + Title = {{{I}mportance of behavioral controls in the analysis of ongoing events}}, + Volume = {124}, + Year = {1992}} + +@article{Dworkin1991, + Author = {Dworkin, S. I. and Porrino, L. J. and Smith, J. E.}, + Journal = {Adv. Exp. Med. Biol.}, + Pages = {327--338}, + Title = {{{P}harmacology of basal forebrain involvement in reinforcement}}, + Volume = {295}, + Year = {1991}} + +@article{Dykes1998, + Author = {Dykes, R. W. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Jul}, + Pages = {120--129}, + Title = {{{C}ontrol of size and excitability of mechanosensory receptive fields in dorsal column nuclei by homolateral dorsal horn neurons}}, + Volume = {80}, + Year = {1998}} + +@article{Dykstra2002, + Author = {Dykstra, R. L. and Robertson, T. and Silvapulle, M. J.}, + Journal = {Journal of Statistical Planning and Inference}, + Number = {1--2}, + Pages = {--}, + Title = {Statistical Inference Under Inequality Constraints [{S}pecial issue]}, + Volume = {107}, + Year = {2002}} + +@article{Earleywine1994, + Author = {Earleywine, M. and Finn, P. R.}, + Journal = {Int J Addict}, + Month = {Apr}, + Pages = {583--591}, + Title = {{{C}ompensatory responses to placebo vary with presumed personality "risk" for alcoholism and drinking habits}}, + Volume = {29}, + Year = {1994}} + +@article{Earleywine1991, + Author = {Earleywine, M. and Finn, P. R.}, + Journal = {Addict Behav}, + Pages = {123--128}, + Title = {{{S}ensation seeking explains the relation between behavioral disinhibition and alcohol consumption}}, + Volume = {16}, + Year = {1991}} + +@article{Earleywine1990, + Author = {Earleywine, M. and Finn, P. R. and Martin, C. S.}, + Journal = {Addict Behav}, + Pages = {183--187}, + Title = {{{P}ersonality risk and alcohol consumption: a latent variable analysis}}, + Volume = {15}, + Year = {1990}} + +@article{Earleywine1992, + Author = {Earleywine, M. and Finn, P. R. and Peterson, J. B. and Pihl, R. O.}, + Journal = {J. Stud. Alcohol}, + Month = {May}, + Pages = {233--238}, + Title = {{{F}actor structure and correlates of the {T}ridimensional {P}ersonality {Q}uestionnaire}}, + Volume = {53}, + Year = {1992}} + +@article{Easter2005, + Author = {Easter, J. and McClure, E. B. and Monk, C. S. and Dhanani, M. and Hodgdon, H. and Leibenluft, E. and Charney, D. S. and Pine, D. S. and Ernst, M.}, + Journal = {J Child Adolesc Psychopharmacol}, + Month = {Aug}, + Pages = {563--570}, + Title = {{{E}motion recognition deficits in pediatric anxiety disorders: implications for amygdala research}}, + Volume = {15}, + Year = {2005}} + +@book{Edwards1992, + Address = {Baltimore, MD}, + Author = {Edwards, A. W. F.}, + Publisher = {The Johns Hopkins University Press}, + Title = {Likelihood}, + Year = {1992}} + +@article{Edwards1965, + Author = {Edwards, W.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {312--329}, + Title = {Optimal Strategies for Seeking Information: {M}odels for Statistics, Choice Reaction Times, and Human Information Processing}, + Volume = {2}, + Year = {1965}} + +@article{Edwards1963, + Author = {Edwards, W. and Lindman, H. and Savage, L. J.}, + Journal = {Psychological Review}, + Pages = {193--242}, + Title = {{B}ayesian Statistical Inference for Psychological Research}, + Volume = {70}, + Year = {1963}} + +@article{Efron2005, + Author = {Efron, B.}, + Journal = {Journal of the American Statistical Association}, + Pages = {1--5}, + Title = {{B}ayesians, Frequentists, and Scientists}, + Volume = {100}, + Year = {2005}} + +@article{Efron1986, + Author = {Efron, B.}, + Journal = {The American Statistician}, + Pages = {1--5}, + Title = {Why Isn't Everyone a {B}ayesian?}, + Volume = {40}, + Year = {1986}} + +@article{Efron1983, + Author = {Efron, B. and Gong, G.}, + Journal = {The American Statistician}, + Pages = {36--48}, + Title = {A Leisurely Look at the Bootstrap, the Jackknife, and Cross--Validation}, + Volume = {37}, + Year = {1983}} + +@article{Efron1997, + Author = {Efron, B. and Tibshirani, R.}, + Journal = {Journal of the American Statistical Association}, + Pages = {548--560}, + Title = {Improvements on Cross--Validation: The $.632+$ Bootstrap Method}, + Volume = {92}, + Year = {1997}} + +@book{Efron1993, + Address = {New York}, + Author = {Efron, B. and Tibshirani, R. J.}, + Publisher = {Chapman \& Hall}, + Title = {An Introduction to the Bootstrap}, + Year = {1993}} + +@article{Ehrlich2004, + Author = {Ehrlich, S. and Noam, G. G. and Lyoo, I. K. and Kwon, B. J. and Clark, M. A. and Renshaw, P. F.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Jun}, + Pages = {770--776}, + Title = {{{W}hite matter hyperintensities and their associations with suicidality in psychiatrically hospitalized children and adolescents}}, + Volume = {43}, + Year = {2004}} + +@article{Ehrlich2003, + Author = {Ehrlich, S. and Noam, G. G. and Lyoo, I. K. and Kwon, B. J. and Clark, M. A. and Renshaw, P. F.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Dec}, + Pages = {265--268}, + Title = {{{S}ubanalysis of the location of white matter hyperintensities and their association with suicidality in children and youth}}, + Volume = {1008}, + Year = {2003}} + +@article{Ehrman2002, + Author = {Ehrman, R. N. and Robbins, S. J. and Bromwell, M. A. and Lankford, M. E. and Monterosso, J. R. and O'Brien, C. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Jul}, + Pages = {185--191}, + Title = {{{C}omparing attentional bias to smoking cues in current smokers, former smokers, and non-smokers using a dot-probe task}}, + Volume = {67}, + Year = {2002}} + +@article{Ehrman1998, + Author = {Ehrman, R. N. and Robbins, S. J. and Childress, A. R. and Goehl, L. and Hole, A. V. and O'Brien, C. P.}, + Journal = {J Subst Abuse Treat}, + Pages = {431--435}, + Title = {{{L}aboratory exposure to cocaine cues does not increase cocaine use by outpatient subjects}}, + Volume = {15}, + Year = {1998}} + +@article{Ehrman1991, + Author = {Ehrman, R. N. and Robbins, S. J. and Childress, A. R. and McLellan, A. T. and O'Brien, C. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {231--244}, + Title = {{{R}esponding to drug-related stimuli in humans as a function of drug-use history}}, + Year = {1991}} + +@article{Ehrman1992, + Author = {Ehrman, R. N. and Robbins, S. J. and Childress, A. R. and O'Brien, C. P.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {523--529}, + Title = {{{C}onditioned responses to cocaine-related stimuli in cocaine abuse patients}}, + Volume = {107}, + Year = {1992}} + +@article{Ehrman1996, + Author = {Ehrman, R. N. and Robbins, S. J. and Cornish, J. W. and Childress, A. R. and O'Brien, C. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Nov}, + Pages = {167--174}, + Title = {{{F}ailure of ritanserin to block cocaine cue reactivity in humans}}, + Volume = {42}, + Year = {1996}} + +@article{Ehrman1990, + Author = {Ehrman, R. N. and Robbins, S. J. and MacRae, J. R. and Childress, A. R.}, + Journal = {NIDA Res. Monogr.}, + Pages = {282--283}, + Title = {{{S}pecificity of conditional responding to naturalistic stimuli in humans with different drug-use histories}}, + Volume = {105}, + Year = {1990}} + +@article{Einstein1905, + Author = {Einstein, A.}, + Journal = {Annalen der Physik}, + Pages = {549--560}, + Title = {\"{U}ber die von der Molekularkinetischen {T}heorie der {W}\"{a}rme geforderte {B}ewegung von in Ruhenden {F}l\"{u}ssigkeiten Suspendierten {T}eilchen}, + Volume = {17}, + Year = {1905}} + +@article{Eldreth2004, + Author = {Eldreth, D. A. and Matochik, J. A. and Cadet, J. L. and Bolla, K. I.}, + Journal = {Neuroimage}, + Month = {Nov}, + Pages = {914--920}, + Title = {{{A}bnormal brain activity in prefrontal brain regions in abstinent marijuana users}}, + Volume = {23}, + Year = {2004}} + +@article{Elffers2003, + Author = {Elffers, H.}, + Journal = {Nederlands JuristenBlad}, + Pages = {1812--1814}, + Title = {Bij Toeval Veroordeeld? {S}tatistische Analyse van Dienstroosterdata in het Strafproces}, + Volume = {34}, + Year = {2003}} + +@article{Elliott2000, + Author = {Elliott, R. and Friston, K.J. and Dolan, R.J.}, + Journal = {Journal of Neuroscience}, + Number = {16}, + Pages = {6159}, + Title = {{Dissociable neural responses in human reward systems}}, + Volume = {20}, + Year = {2000}} + +@article{Emeric2008, + Abstract = {We describe intracranial local field potentials (LFP) recorded in + the anterior cingulate cortex (ACC) of macaque monkeys performing + a saccade countermanding task. The most prominent feature at approximately + 70\% of sites was greater negative polarity after errors than after + rewarded correct trials. This negative polarity was also evoked in + unrewarded correct trials. The LFP evoked by the visual target was + much less polarized, and the weak presaccadic modulation was insufficient + to control the initiation of saccades. When saccades were cancelled, + LFP modulation decreased slightly with the magnitude of response + conflict that corresponds to the coactivation of gaze-shifting and + -holding neurons estimated from the probability of canceling. However, + response time adjustments on subsequent trials were not correlated + with LFP polarity on individual trials. The results provide clear + evidence that error- and feedback-related, but not conflict-related, + signals are carried by the LFP in the macaque ACC. Finding performance + monitoring field potentials in the ACC of macaque monkeys establishes + a bridge between event-related potential and functional brain-imaging + studies in humans and neurophysiology studies in non-human primates.}, + Author = {Erik E Emeric and Joshua W Brown and Melanie Leslie and Pierre Pouget and Veit Stuphorn and Jeffrey D Schall}, + Doi = {10.1152/jn.00896.2006}, + Institution = {Center for Integrative and Cognitive Neuroscience, Vanderbilt Vision Research Center, Department of Psychology,Vanderbilt University, Nashville, Tennessee, USA.}, + Journal = {J Neurophysiol}, + Keywords = {Animals; Brain Mapping; Conflict (Psychology); Discrimination (Psychology), physiology; Evoked Potentials, physiology; Feedback; Frontal Lobe, physiology; Functional Laterality, physiology; Inhibition (Psychology); Macaca radiata, anatomy /&/ histology/physiology; Male; Photic Stimulation; Reaction Time, physiology; Saccades, physiology; Visual Fields}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {759--772}, + Pii = {00896.2006}, + Pmid = {18077665}, + Timestamp = {2009.08.04}, + Title = {Performance monitoring local field potentials in the medial frontal cortex of primates: anterior cingulate cortex.}, + Url = {http://dx.doi.org/10.1152/jn.00896.2006}, + Volume = {99}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1152/jn.00896.2006}} + +@article{Emerson1970, + Author = {Emerson, P. L.}, + Journal = {Psychometrika}, + Pages = {99--109}, + Title = {Simple Reaction Time with {M}arkovian Evolution of {G}aussian Discriminal Processes}, + Volume = {35}, + Year = {1970}} + +@article{Ende1997, + Author = {Ende, G. R. and Laxer, K. D. and Knowlton, R. C. and Matson, G. B. and Schuff, N. and Fein, G. and Weiner, M. W.}, + Journal = {Radiology}, + Month = {Mar}, + Pages = {809--817}, + Title = {{{T}emporal lobe epilepsy: bilateral hippocampal metabolite changes revealed at proton {M}{R} spectroscopic imaging}}, + Volume = {202}, + Year = {1997}} + +@article{Engel1994a, + Author = {Engel, S. A. and Rumelhart, D. E. and Wandell, B. A. and Lee, A. T. and Glover, G. H. and Chichilnisky, E. J. and Shadlen, M. N.}, + Journal = {Nature}, + Month = {Jun}, + Pages = {525}, + Title = {{f{M}{R}{I} of human visual cortex}}, + Volume = {369}, + Year = {1994}} + +@article{Erev2005, + Author = {Erev, I. and Barron, G.}, + Journal = {Psychological Review}, + Owner = {Wooyoung Ahn}, + Pages = {912-931}, + Timestamp = {2007.05.03}, + Title = {On adaptation, maximization, and reinforcement learning among cognitive strategies}, + Volume = {112(4)}, + Year = {2005}} + +@article{Erev2008, + Author = {Erev, I. and Ert, E. and Yechiam, E.}, + Owner = {WooYoung Ahn}, + Pages = {Manuscript submitted for publication}, + Timestamp = {2007.08.01}, + Title = {Loss Aversion, Diminishing Sensitivity, and the Effect of Experience on Repeated Decisions}, + Year = {2008}} + +@article{Erev2005a, + Author = {Erev, Ido and Haruvy, Ernan}, + Journal = {Journal of Mathematical Psychology}, + Owner = {WooYoung Ahn}, + Pages = {357--371}, + Timestamp = {2008.03.22}, + Title = {Generality, repetition, and the role of descriptive learning models}, + Volume = {49}, + Year = {2005}} + +@article{Erev1998, + Author = {Erev, I. and Roth, A. E.}, + Journal = {American Economic Review}, + Owner = {Wooyoung Ahn}, + Pages = {848-881}, + Timestamp = {2007.05.03}, + Title = {Predicting how people play games: Reinforcement learning in experimental games with unique, mixed strategy equilibria}, + Volume = {88}, + Year = {1998}} + +@article{Erev2007, + Author = {Erev, Ido and Roth, Alvin E. and Slonim, Robert L. and Barron, Greg}, + Journal = {Economic Theory}, + Owner = {WooYoung Ahn}, + Pages = {29-51}, + Timestamp = {2008.03.22}, + Title = {Learning and equilibrium as useful approximations: {A}ccuracy of prediction on randomly selected constant sum games}, + Volume = {33}, + Year = {2007}} + +@article{Erev2002, + Author = {Erev, Ido and Roth, Alvin E. and Slonim, Robert L. and Barron, Greg}, + Journal = {International Journal of Forecasting}, + Owner = {WooYoung Ahn}, + Pages = {359-368}, + Timestamp = {2008.03.22}, + Title = {Predictive value and the usefulness of game theoretic models}, + Volume = {18}, + Year = {2002}} + +@article{Ericson1995, + Author = {Ericson, A. C. and Blomqvist, A. and Craig, A. D. and Ottersen, O. P. and Broman, J.}, + Journal = {Eur. J. Neurosci.}, + Month = {Feb}, + Pages = {305--317}, + Title = {{{E}vidence for glutamate as neurotransmitter in trigemino-and spinothalamic tract terminals in the nucleus submedius of cats}}, + Volume = {7}, + Year = {1995}} + +@article{Ericson1996, + Author = {Ericson, A. C. and Blomqvist, A. and Krout, K. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Aug}, + Pages = {497--512}, + Title = {{{F}ine structural organization of spinothalamic and trigeminothalamic lamina {I} terminations in the nucleus submedius of the cat}}, + Volume = {371}, + Year = {1996}} + +@article{Ericson1997, + Author = {Ericson, A. C. and Craig, A. D. and Blomqvist, A.}, + Journal = {Neuroscience}, + Month = {Jan}, + Pages = {491--502}, + Title = {{{G}{A}{B}{A}-like immunoreactivity in the thalamic nucleus submedius of the cat}}, + Volume = {76}, + Year = {1997}} + +@article{Eriksen1974, + Author = {Eriksen, B. A. and Eriksen, C. W.}, + Journal = {Perception \& Psychophysics}, + Pages = {143--149}, + Title = {Effects of Noise Letters Upon the Identification of a Target Letter in a Nonsearch Task}, + Volume = {16}, + Year = {1974}} + +@article{Erinoff2004, + Author = {Erinoff, L. and Compton, W. M. and Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {1--2}, + Title = {{{D}rug abuse and suicidal behavior}}, + Volume = {76 Suppl}, + Year = {2004}} + +@article{Erk2002, + Author = {Erk, S. and Spitzer, M. and Wunderlich, A.P. and Galley, L. and Walter, H.}, + Journal = {Neuroreport}, + Number = {18}, + Pages = {2499}, + Title = {{Cultural objects modulate reward circuitry.}}, + Volume = {13}, + Year = {2002}} + +@article{Ernst2002a, + Author = {Ernst, M. and Bolla, K. and Mouratidis, M. and Contoreggi, C. and Matochik, J. A. and Kurian, V. and Cadet, J. L. and Kimes, A. S. and London, E. D.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Pages = {682--691}, + Title = {{{D}ecision-making in a risk-taking task: a {P}{E}{T} study}}, + Volume = {26}, + Year = {2002}} + +@article{Ernst2002, + Author = {Ernst, M. and Bolla, K. and Mouratidis, M. and Contoreggi, C. and Matochik, J.A. and Kurian, V. and Cadet, J.L. and Kimes, A.S. and London, E.D.}, + Journal = {Neuropsychopharmacology(New York, NY)}, + Number = {5}, + Pages = {682--691}, + Title = {{Decision-making in a risk-taking task: a PET study}}, + Volume = {26}, + Year = {2002}} + +@article{Ernst2004, + Author = {Ernst, M. and Dickstein, D. P. and Munson, S. and Eshel, N. and Pradella, A. and Jazbec, S. and Pine, D. S. and Leibenluft, E.}, + Journal = {J Affect Disord}, + Month = {Oct}, + Pages = {S89-S101}, + Title = {{{R}eward-related processes in pediatric bipolar disorder: a pilot study}}, + Volume = {82 Suppl 1}, + Year = {2004}} + +@article{Ernst2009a, + Author = {Ernst, M. and Fudge, J. L.}, + Journal = {Neurosci Biobehav Rev}, + Month = {Mar}, + Pages = {367--382}, + Title = {{{A} developmental neurobiological model of motivated behavior: anatomy, connectivity and ontogeny of the triadic nodes}}, + Volume = {33}, + Year = {2009}} + +@article{Ernst2003b, + Author = {Ernst, M. and Grant, S. J. and London, E. D. and Contoreggi, C. S. and Kimes, A. S. and Spurgeon, L.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {33--40}, + Title = {{{D}ecision making in adolescents with behavior disorders and adults with substance abuse}}, + Volume = {160}, + Year = {2003}} + +@article{Ernst2003, + Author = {Ernst, M. and Kimes, A. S. and Jazbec, S.}, + Journal = {Neuroimaging Clin. N. Am.}, + Month = {Nov}, + Pages = {833--849}, + Title = {{{N}euroimaging and mechanisms of drug abuse: interface of molecular imaging and molecular genetics}}, + Volume = {13}, + Year = {2003}} + +@article{Ernst2003a, + Author = {Ernst, M. and Kimes, A. S. and London, E. D. and Matochik, J. A. and Eldreth, D. and Tata, S. and Contoreggi, C. and Leff, M. and Bolla, K.}, + Journal = {Am J Psychiatry}, + Month = {Jun}, + Pages = {1061--1070}, + Title = {{{N}eural substrates of decision making in adults with attention deficit hyperactivity disorder}}, + Volume = {160}, + Year = {2003}} + +@article{Ernst2006, + Author = {Ernst, M. and Luckenbaugh, D. A. and Moolchan, E. T. and Leff, M. K. and Allen, R. and Eshel, N. and London, E. D. and Kimes, A.}, + Journal = {Pediatrics}, + Month = {Jun}, + Pages = {2030--2039}, + Title = {{{B}ehavioral predictors of substance-use initiation in adolescents with and without attention-deficit/hyperactivity disorder}}, + Volume = {117}, + Year = {2006}} + +@article{Ernst2007, + Author = {Ernst, M. and Maheu, F. S. and Schroth, E. and Hardin, J. and Golan, L. G. and Cameron, J. and Allen, R. and Holzer, S. and Nelson, E. and Pine, D. S. and Merke, D. P.}, + Journal = {Neuropsychologia}, + Month = {May}, + Pages = {2104--2113}, + Title = {{{A}mygdala function in adolescents with congenital adrenal hyperplasia: a model for the study of early steroid abnormalities}}, + Volume = {45}, + Year = {2007}} + +@article{Ernst2008, + Author = {Ernst, M. and Mueller, S. C.}, + Journal = {Dev Neurobiol}, + Month = {May}, + Pages = {729--743}, + Title = {{{T}he adolescent brain: insights from functional neuroimaging research}}, + Volume = {68}, + Year = {2008}} + +@article{Ernst2005b, + Author = {Ernst, M. and Nelson, E. E. and Jazbec, S. and McClure, E. B. and Monk, C. S. and Leibenluft, E. and Blair, J. and Pine, D. S.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {1279--1291}, + Title = {{{A}mygdala and nucleus accumbens in responses to receipt and omission of gains in adults and adolescents}}, + Volume = {25}, + Year = {2005}} + +@article{Ernst2004a, + Author = {Ernst, M. and Nelson, E. E. and McClure, E. B. and Monk, C. S. and Munson, S. and Eshel, N. and Zarahn, E. and Leibenluft, E. and Zametkin, A. and Towbin, K. and Blair, J. and Charney, D. and Pine, D. S.}, + Journal = {Neuropsychologia}, + Pages = {1585--1597}, + Title = {{{C}hoice selection and reward anticipation: an f{M}{R}{I} study}}, + Volume = {42}, + Year = {2004}} + +@article{Ernst2005, + Author = {Ernst, M. and Paulus, M. P.}, + Journal = {Biol. Psychiatry}, + Month = {Oct}, + Pages = {597--604}, + Title = {{{N}eurobiology of decision making: a selective review from a neurocognitive and clinical perspective}}, + Volume = {58}, + Year = {2005}} + +@article{Ernst2006a, + Author = {Ernst, M. and Pine, D. S. and Hardin, M.}, + Journal = {Psychol Med}, + Month = {Mar}, + Pages = {299--312}, + Title = {{{T}riadic model of the neurobiology of motivated behavior in adolescence}}, + Volume = {36}, + Year = {2006}} + +@article{Ernst2009, + Author = {Ernst, M. and Romeo, R. D. and Andersen, S. L.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Sep}, + Pages = {199--211}, + Title = {{{N}eurobiology of the development of motivated behaviors in adolescence: a window into a neural systems model}}, + Volume = {93}, + Year = {2009}} + +@article{Ersche2005, + Abstract = {RATIONALE: There is converging evidence for impairments in decision-making + in chronic substance users. In the light of findings that substance + abuse is associated with disruptions of the functioning of the striato-thalamo-orbitofrontal + circuits, it has been suggested that decision-making impairments + are linked to frontal lobe dysfunction. We sought to investigate + this possibility using functional neuroimaging. METHODS: Decision-making + was investigated using the Cambridge Risk Task during H2(15)O PET + scans. A specific feature of the Risk Task is the decisional conflict + between an unlikely high reward option and a likely low reward option. + Four groups, each consisting of 15 participants, were compared: chronic + amphetamine users, chronic opiate users, ex-drug users who had been + long-term amphetamine/opiate users but are abstinent from all drugs + of abuse for at least 1 year and healthy matched controls without + a drug-taking history. RESULTS: During decision-making, control participants + showed relatively greater activation in the right dorsolateral prefrontal + cortex, whereas participants engaged in current or previous drug + use showed relatively greater activation in the left orbitofrontal + cortex. CONCLUSION: Our results indicate a disturbance in the mediation + by the prefrontal cortex of a risky decision-making task associated + with amphetamine and opiate abuse. Moreover, this disturbance was + observed in a group of former drug users who had been abstinent for + at least 1 year.}, + Author = {K. D. Ersche and P. C. Fletcher and S. J G Lewis and L. Clark and G. Stocks-Gee and M. London and J. B. Deakin and T. W. Robbins and B. J. Sahakian}, + Doi = {10.1007/s00213-005-2205-7}, + Institution = {Department of Psychiatry, School of Clinical Medicine, Addenbrooke's Hospital, University of Cambridge, Cambridge, UK. ke220@cam.ac.uk}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Adult; Age of Onset; Amphetamine-Related Disorders, pathology/physiopathology; Analysis of Variance; Brain Mapping; Decision Making, physiology; Female; Humans; Male; Memory, Short-Term, drug effects/physiology; Middle Aged; Neuropsychological Tests, statistics /&/ numerical data; Opioid-Related Disorders, pathology/physiopathology; Positron-Emission Tomography, methods; Prefrontal Cortex, pathology/physiopathology; Psychomotor Performance, drug effects/physiology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {612--623}, + Pmid = {16163533}, + Timestamp = {2009.08.07}, + Title = {Abnormal frontal activations related to decision-making in current and former amphetamine and opiate dependent individuals.}, + Url = {http://dx.doi.org/10.1007/s00213-005-2205-7}, + Volume = {180}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-005-2205-7}} + +@article{Ersner-Hershfield2009, + Author = {Ersner-Hershfield, H. and Wimmer, G. E. and Knutson, B.}, + Journal = {Soc Cogn Affect Neurosci}, + Month = {Mar}, + Pages = {85--92}, + Title = {{{S}aving for the future self: neural measures of future self-continuity predict temporal discounting}}, + Volume = {4}, + Year = {2009}} + +@article{Eshel2007, + Author = {Eshel, N. and Nelson, E. E. and Blair, R. J. and Pine, D. S. and Ernst, M.}, + Journal = {Neuropsychologia}, + Month = {Mar}, + Pages = {1270--1279}, + Title = {{{N}eural substrates of choice selection in adults and adolescents: development of the ventrolateral prefrontal and anterior cingulate cortices}}, + Volume = {45}, + Year = {2007}} + +@article{Esposito1984, + Author = {Esposito, R. U. and Porrino, L. J. and Seeger, T. F. and Crane, A. M. and Everist, H. D. and Pert, A.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jan}, + Pages = {635--639}, + Title = {{{C}hanges in local cerebral glucose utilization during rewarding brain stimulation}}, + Volume = {81}, + Year = {1984}} + +@article{Estes1956, + Author = {Estes, W.K.}, + Journal = {Psychological Bulletin}, + Number = {2}, + Pages = {134--140}, + Title = {{The problem of inference from curves based on group data}}, + Volume = {53}, + Year = {1956}} + +@article{Estes2002, + Author = {Estes, W. K.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {3--25}, + Title = {Traps in the Route to Models of Memory and Decision}, + Volume = {9}, + Year = {2002}} + +@article{Estes1950, + Author = {Estes, W. K.}, + Journal = {Psychological Review}, + Pages = {94--107}, + Title = {Toward a Statistical Theory of Learning}, + Volume = {57}, + Year = {1950}} + +@article{Estes2005, + Abstract = {With the goal of drawing inferences about underlying processes from + fits of theoretical models to cognitive data, we examined the tradeoff + of risks of depending on model fits to individual performance versus + risks of depending on fits to averaged data with respect to estimation + of values of a model's parameters. Comparisons based on several models + applied to experiments on recognition and categorization and to artificial, + computer-generated data showed that results of using the two types + of model fitting are strongly determined by two factors: model complexity + and number of subjects. Reasonably accurate information about true + parameter values was found only for model fits to individual performance + and then only for some of the parameters of a complex model. Suggested + guidelines are given for circumventing a variety of obstacles to + successful recovery of useful estimates of a model's parameters from + applications to cognitive data.}, + Author = {W. K. Estes and W. Todd Maddox}, + Institution = {Department of Psychology, Indiana University, Bloomington, Indiana 47405, USA. wkestes@indiana.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Cognition; Humans; Memory; Psychology, methods; Recognition (Psychology)}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {403--408}, + Pmid = {16235625}, + Timestamp = {2009.08.14}, + Title = {Risks of drawing inferences about cognitive processes from model fits to individual versus average performance.}, + Volume = {12}, + Year = {2005}} + +@article{Estilaei2001, + Author = {Estilaei, M. R. and Matson, G. B. and Payne, G. S. and Leach, M. O. and Fein, G. and Meyerhoff, D. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jan}, + Pages = {89--97}, + Title = {{{E}ffects of chronic alcohol consumption on the broad phospholipid signal in human brain: an in vivo 31{P} {M}{R}{S} study}}, + Volume = {25}, + Year = {2001}} + +@article{Estilaei2001a, + Author = {Estilaei, M. R. and Matson, G. B. and Payne, G. S. and Leach, M. O. and Fein, G. and Meyerhoff, D. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {1213--1220}, + Title = {{{E}ffects of abstinence from alcohol on the broad phospholipid signal in human brain: an in vivo 31{P} magnetic resonance spectroscopy study}}, + Volume = {25}, + Year = {2001}} + +@article{Evans2004, + Author = {Evans, C. E. Y. and Kemish, K. and Turnbull, O. H.}, + Journal = {Brain and Cognition}, + Pages = {240--244}, + Title = {Paradoxical Effects of Education on the {I}owa {G}ambling {T}ask}, + Volume = {54}, + Year = {2004}} + +@article{Evans1986, + Author = {Evans, M. and Fraser, D. A. S. and Monette, G.}, + Journal = {Canadian Journal of Statistics}, + Pages = {181--199}, + Title = {On Principles and Arguments to Likelihood}, + Volume = {14}, + Year = {1986}} + +@article{Everitt1999, + Author = {Everitt, B. J. and Parkinson, J. A. and Olmstead, M. C. and Arroyo, M. and Robledo, P. and Robbins, T. W.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Jun}, + Pages = {412--438}, + Title = {{{A}ssociative processes in addiction and reward. {T}he role of amygdala-ventral striatal subsystems}}, + Volume = {877}, + Year = {1999}} + +@article{Evrard2008, + Author = {Evrard, H. C. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {286--314}, + Title = {{{R}etrograde analysis of the cerebellar projections to the posteroventral part of the ventral lateral thalamic nucleus in the macaque monkey}}, + Volume = {508}, + Year = {2008}} + +@article{Ewalt1998, + Author = {Ewalt, D. H. and Sheffield, E. and Sparagana, S. P. and Delgado, M. R. and Roach, E. S.}, + Journal = {J. Urol.}, + Month = {Jul}, + Pages = {141--145}, + Title = {{{R}enal lesion growth in children with tuberous sclerosis complex}}, + Volume = {160}, + Year = {1998}} + +@article{Fagergren2003, + Author = {Fagergren, P. and Smith, H. R. and Daunais, J. B. and Nader, M. A. and Porrino, L. J. and Hurd, Y. L.}, + Journal = {Eur. J. Neurosci.}, + Month = {May}, + Pages = {2212--2218}, + Title = {{{T}emporal upregulation of prodynorphin m{R}{N}{A} in the primate striatum after cocaine self-administration}}, + Volume = {17}, + Year = {2003}} + +@article{Fareri2008, + Author = {Fareri, D. S. and Martin, L. N. and Delgado, M. R.}, + Journal = {Dev. Psychopathol.}, + Pages = {1191--1211}, + Title = {{{R}eward-related processing in the human brain: developmental considerations}}, + Volume = {20}, + Year = {2008}} + +@article{Fassbender2006, + Author = {Fassbender, C. and Foxe, J. J. and Garavan, H.}, + Journal = {Hum Brain Mapp}, + Month = {Oct}, + Pages = {819--827}, + Title = {{{M}apping the functional anatomy of task preparation: priming task-appropriate brain networks}}, + Volume = {27}, + Year = {2006}} + +@article{Fassbender2009, + Author = {Fassbender, C. and Hester, R. and Murphy, K. and Foxe, J. J. and Foxe, D. M. and Garavan, H.}, + Journal = {Eur. J. Neurosci.}, + Month = {Jan}, + Pages = {181--187}, + Title = {{{P}refrontal and midline interactions mediating behavioural control}}, + Volume = {29}, + Year = {2009}} + +@article{Fein2005b, + Author = {Fein, G. and Allen, J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Apr}, + Pages = {538--546}, + Title = {{{E}{E}{G} spectral changes in treatment-naive, actively drinking alcoholics}}, + Volume = {29}, + Year = {2005}} + +@article{Fein1990, + Author = {Fein, G. and Bachman, L. and Fisher, S. and Davenport, L.}, + Journal = {West. J. Med.}, + Month = {May}, + Pages = {531--537}, + Title = {{{C}ognitive impairments in abstinent alcoholics}}, + Volume = {152}, + Year = {1990}} + +@article{Fein1995, + Author = {Fein, G. and Biggins, C. A. and MacKay, S.}, + Journal = {Arch. Neurol.}, + Month = {Nov}, + Pages = {1109--1118}, + Title = {{{D}elayed latency of the event-related brain potential {P}3{A} component in {H}{I}{V} disease. {P}rogressive effects with increasing cognitive impairment}}, + Volume = {52}, + Year = {1995}} + +@article{Fein1995a, + Author = {Fein, G. and Biggins, C. A. and MacKay, S.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {183--195}, + Title = {{{A}lcohol abuse and {H}{I}{V} infection have additive effects on frontal cortex function as measured by auditory evoked potential {P}3{A} latency}}, + Volume = {37}, + Year = {1995}} + +@article{Fein1996, + Author = {Fein, G. and Biggins, C. and MacKay, S.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {955--965}, + Title = {{{C}ocaine abusers have reduced auditory {P}50 amplitude and suppression compared to both normal controls and alcoholics}}, + Volume = {39}, + Year = {1996}} + +@article{Fein2008, + Author = {Fein, G. and Chang, M.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {141--148}, + Title = {{{S}maller feedback {E}{R}{N} amplitudes during the {B}{A}{R}{T} are associated with a greater family history density of alcohol problems in treatment-naive alcoholics}}, + Volume = {92}, + Year = {2008}} + +@article{Fein2006c, + Author = {Fein, G. and Chang, M.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {2000--2007}, + Title = {{{V}isual {P}300s in long-term abstinent chronic alcoholics}}, + Volume = {30}, + Year = {2006}} + +@article{Fein2004c, + Author = {Fein, G. and Di Sclafani, V.}, + Journal = {Alcohol}, + Month = {Jan}, + Pages = {63--67}, + Title = {{{C}erebral reserve capacity: implications for alcohol and drug abuse}}, + Volume = {32}, + Year = {2004}} + +@article{Fein2002a, + Author = {Fein, G. and Di Sclafani, V. and Cardenas, V. A. and Goldmann, H. and Tolou-Shams, M. and Meyerhoff, D. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Apr}, + Pages = {558--564}, + Title = {{{C}ortical gray matter loss in treatment-naive alcohol dependent individuals}}, + Volume = {26}, + Year = {2002}} + +@article{Fein2007a, + Author = {Fein, G. and Di Sclafani, V. and Finn, P. and Scheiner, D. L.}, + Journal = {Drug Alcohol Depend}, + Month = {Mar}, + Pages = {139--145}, + Title = {{{S}ub-diagnostic psychiatric comorbidity in alcoholics}}, + Volume = {87}, + Year = {2007}} + +@article{Fein2002, + Author = {Fein, G. and Di Sclafani, V. and Meyerhoff, D. J.}, + Journal = {Drug Alcohol Depend}, + Month = {Sep}, + Pages = {87--93}, + Title = {{{P}refrontal cortical volume reduction associated with frontal cortex function deficit in 6-week abstinent crack-cocaine dependent men}}, + Volume = {68}, + Year = {2002}} + +@article{Fein1998, + Author = {Fein, G. and Fletcher, D. J. and Di Sclafani, V.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {196S-200S}, + Title = {{{E}ffect of chronic alcohol abuse on the {C}{N}{S} morbidity of {H}{I}{V} disease}}, + Volume = {22}, + Year = {1998}} + +@article{Fein2004b, + Author = {Fein, G. and Klein, L. and Finn, P.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Oct}, + Pages = {1487--1491}, + Title = {{{I}mpairment on a simulated gambling task in long-term abstinent alcoholics}}, + Volume = {28}, + Year = {2004}} + +@article{Fein2005, + Author = {Fein, G. and Landman, B.}, + Journal = {Alcohol}, + Month = {Jun}, + Pages = {19--26}, + Title = {{{T}reated and treatment-naive alcoholics come from different populations}}, + Volume = {36}, + Year = {2005}} + +@article{Fein2005a, + Author = {Fein, G. and Landman, B.}, + Journal = {Alcohol}, + Month = {Jan}, + Pages = {19--26}, + Title = {{{T}reated and treatment-naive alcoholics come from different populations}}, + Volume = {35}, + Year = {2005}} + +@article{Fein2006b, + Author = {Fein, G. and Landman, B. and Tran, H. and McGillivray, S. and Finn, P. and Barakos, J. and Moon, K.}, + Journal = {Neuroimage}, + Month = {Sep}, + Pages = {1465--1471}, + Title = {{{B}rain atrophy in long-term abstinent alcoholics who demonstrate impairment on a simulated gambling task}}, + Volume = {32}, + Year = {2006}} + +@article{Fein2007, + Author = {Fein, G. and McGillivray, S.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Nov}, + Pages = {1788--1799}, + Title = {{{C}ognitive performance in long-term abstinent elderly alcoholics}}, + Volume = {31}, + Year = {2007}} + +@article{Fein2006a, + Author = {Fein, G. and McGillivray, S. and Finn, P.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jun}, + Pages = {959--966}, + Title = {{{N}ormal performance on a simulated gambling task in treatment-naive alcohol-dependent individuals}}, + Volume = {30}, + Year = {2006}} + +@article{Fein2004a, + Author = {Fein, G. and McGillivray, S. and Finn, P.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {1861--1866}, + Title = {{{M}ismatch negativity: no difference between treatment-naive alcoholics and controls}}, + Volume = {28}, + Year = {2004}} + +@article{Fein2000, + Author = {Fein, G. and Meyerhoff, D. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {1227--1235}, + Title = {{{E}thanol in human brain by magnetic resonance spectroscopy: correlation with blood and breath levels, relaxation, and magnetization transfer}}, + Volume = {24}, + Year = {2000}} + +@article{Fein2009, + Author = {Fein, G. and Shimotsu, R. and Di Sclafani, V. and Barakos, J. and Harper, C.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jan}, + Pages = {70--78}, + Title = {{{I}ncreased white matter signal hyperintensities in long-term abstinent alcoholics compared with nonalcoholic controls}}, + Volume = {33}, + Year = {2009}} + +@article{Fein2006, + Author = {Fein, G. and Torres, J. and Price, L. J. and Di Sclafani, V.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Sep}, + Pages = {1538--1544}, + Title = {{{C}ognitive performance in long-term abstinent alcoholic individuals}}, + Volume = {30}, + Year = {2006}} + +@article{Fein2004, + Author = {Fein, G. and Whitlow, B. and Finn, P.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jan}, + Pages = {137--142}, + Title = {{{M}ismatch negativity: no difference between controls and abstinent alcoholics}}, + Volume = {28}, + Year = {2004}} + +@article{Feinstein2002, + Author = {Feinstein, J. S. and Goldin, P. R. and Stein, M. B. and Brown, G. G. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {Jul}, + Pages = {1255--1258}, + Title = {{{H}abituation of attentional networks during emotion processing}}, + Volume = {13}, + Year = {2002}} + +@article{Feinstein2004, + Author = {Feinstein, J. S. and Stein, M. B. and Castillo, G. N. and Paulus, M. P.}, + Journal = {Conscious Cogn}, + Month = {Jun}, + Pages = {323--335}, + Title = {{{F}rom sensory processes to conscious perception}}, + Volume = {13}, + Year = {2004}} + +@article{Feinstein2006, + Author = {Feinstein, J. S. and Stein, M. B. and Paulus, M. P.}, + Journal = {Soc Cogn Affect Neurosci}, + Month = {Sep}, + Pages = {136--142}, + Title = {{{A}nterior insula reactivity during certain decisions is associated with neuroticism}}, + Volume = {1}, + Year = {2006}} + +@book{Feller1971, + Address = {New York}, + Author = {Feller, W.}, + Publisher = {John Wiley \& Sons}, + Title = {An Introduction to Probability Theory and Its Applications: {V}ol. II}, + Year = {1971}} + +@book{Feller1970, + Address = {New York}, + Author = {Feller, W.}, + Publisher = {John Wiley \& Sons}, + Title = {An Introduction to Probability Theory and Its Applications: {V}ol. {I}}, + Year = {1970}} + +@article{Feller1940, + Author = {Feller, W.}, + Journal = {Journal of Parapsychology}, + Pages = {271--298}, + Title = {Statistical Aspects of {ESP}}, + Volume = {4}, + Year = {1940}} + +@article{Ferr?2009, + Author = {Ferr?, S. and Baler, R. and Bouvier, M. and Caron, M. G. and Devi, L. A. and Durroux, T. and Fuxe, K. and George, S. R. and Javitch, J. A. and Lohse, M. J. and Mackie, K. and Milligan, G. and Pfleger, K. D. and Pin, J. P. and Volkow, N. D. and Waldhoer, M. and Woods, A. S. and Franco, R.}, + Journal = {Nat. Chem. Biol.}, + Month = {Mar}, + Pages = {131--134}, + Title = {{{B}uilding a new conceptual framework for receptor heteromers}}, + Volume = {5}, + Year = {2009}} + +@article{Field2007, + Abstract = {AIMS: To investigate whether adolescent heavy drinkers exhibit biased + cognitive processing of alcohol-related cues and impulsive decision + making. DESIGN: A between-subjects design was employed. SETTING: + Classrooms in a single sixth-form college in Merseyside, UK. PARTICIPANTS: + Ninety adolescent students (mean age 16.83 years), of whom 38\% were + identified as heavy drinkers and 36\% were identified as light drinkers, + based on a tertile split of their weekly alcohol consumption. MEASUREMENTS: + Participants provided information about alcohol consumption before + completing measures of alcohol craving, delay discounting and an + "alcohol Stroop" in which they were required to name the colour in + which alcohol-related and matched control words were printed. FINDINGS: + Compared to light drinkers, heavy drinkers showed more pronounced + discounting of delayed hypothetical monetary and alcohol rewards, + which is indicative of a more short-term focus in decision making + in heavy drinkers. Heavy drinkers were also slower to colour-name + alcohol-related words, which indicates an attentional bias for alcohol-related + cues. In all participants, measures of delay discounting and attentional + bias were correlated moderately with each other, and also with the + level of alcohol consumption and with alcohol craving. CONCLUSIONS: + In adolescents, heavy alcohol use is associated with biased attentional + processing of alcohol-related cues and a shorter-term focus in decision + making.}, + Author = {Matt Field and Paul Christiansen and Jon Cole and Andrew Goudie}, + Doi = {10.1111/j.1360-0443.2007.01743.x}, + Institution = {School of Psychology, University of Liverpool, Bedford Street South, Liverpool, UK. mfield@liverpool.ac.uk}, + Journal = {Addiction}, + Keywords = {Adolescent; Alcohol Drinking, psychology; Alcoholic Intoxication, psychology; Area Under Curve; Attention, drug effects; Cognition, drug effects; Color Perception, drug effects/physiology; Cues; Female; Great Britain; Humans; Impulsive Behavior, psychology; Male; Psychological Tests; Statistics, Nonparametric}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {579--586}, + Pii = {ADD1743}, + Pmid = {17309540}, + Timestamp = {2009.08.06}, + Title = {Delay discounting and the alcohol Stroop in heavy drinking adolescents.}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2007.01743.x}, + Volume = {102}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2007.01743.x}} + +@article{Field2007b, + Abstract = {AIMS: To investigate whether adolescent heavy drinkers exhibit biased + cognitive processing of alcohol-related cues and impulsive decision + making. DESIGN: A between-subjects design was employed. SETTING: + Classrooms in a single sixth-form college in Merseyside, UK. PARTICIPANTS: + Ninety adolescent students (mean age 16.83 years), of whom 38\% were + identified as heavy drinkers and 36\% were identified as light drinkers, + based on a tertile split of their weekly alcohol consumption. MEASUREMENTS: + Participants provided information about alcohol consumption before + completing measures of alcohol craving, delay discounting and an + "alcohol Stroop" in which they were required to name the colour in + which alcohol-related and matched control words were printed. FINDINGS: + Compared to light drinkers, heavy drinkers showed more pronounced + discounting of delayed hypothetical monetary and alcohol rewards, + which is indicative of a more short-term focus in decision making + in heavy drinkers. Heavy drinkers were also slower to colour-name + alcohol-related words, which indicates an attentional bias for alcohol-related + cues. In all participants, measures of delay discounting and attentional + bias were correlated moderately with each other, and also with the + level of alcohol consumption and with alcohol craving. CONCLUSIONS: + In adolescents, heavy alcohol use is associated with biased attentional + processing of alcohol-related cues and a shorter-term focus in decision + making.}, + Author = {Matt Field and Paul Christiansen and Jon Cole and Andrew Goudie}, + Doi = {10.1111/j.1360-0443.2007.01743.x}, + Institution = {School of Psychology, University of Liverpool, Bedford Street South, Liverpool, UK. mfield@liverpool.ac.uk}, + Journal = {Addiction}, + Keywords = {Adolescent; Alcohol Drinking, psychology; Alcoholic Intoxication, psychology; Area Under Curve; Attention, drug effects; Cognition, drug effects; Color Perception, drug effects/physiology; Cues; Female; Great Britain; Humans; Impulsive Behavior, psychology; Male; Psychological Tests; Statistics, Nonparametric}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {579--586}, + Pii = {ADD1743}, + Pmid = {17309540}, + Timestamp = {2009.08.06}, + Title = {Delay discounting and the alcohol Stroop in heavy drinking adolescents.}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2007.01743.x}, + Volume = {102}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2007.01743.x}} + +@article{Field2007a, + Abstract = {We investigated the effects of exposure to environmental smoking-related + cues (holding a lit cigarette in an environment previously associated + with smoking) on cigarette craving, colour naming of smoking-related + words in a modified Stroop task, and on the delay discounting of + hypothetical rewards, in daily cigarette smokers (N = 30). Compared + to exposure to neutral cues, exposure to smoking-related cues was + associated with increased cigarette craving and slower colour naming + of smoking-related compared to matched control words. However, smoking + cues had no effect on delay discounting. These results suggest that + smoking cues increase craving and the ability of smoking-related + words to grab the attention, but do not influence impulsive decision-making. + Theoretical and clinical implications are discussed.}, + Author = {Matt Field and Michelle Rush and Jon Cole and Andrew Goudie}, + Doi = {10.1177/0269881106070995}, + Institution = {School of Psychology, University of Liverpool, Eleanor Rathbone Building, Bedford Street South, Liverpool, UK. mfield@liverpool.ac.uk}, + Journal = {J Psychopharmacol}, + Keywords = {Adult; Attention; Behavior, Addictive; Cues; Decision Making; Female; Humans; Male; Middle Aged; Questionnaires; Reaction Time; Reinforcement (Psychology); Smoking Cessation, psychology; Smoking, psychology; Tobacco Use Disorder, psychology; Token Economy}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {603--610}, + Pii = {0269881106070995}, + Pmid = {17092980}, + Timestamp = {2009.08.06}, + Title = {The smoking Stroop and delay discounting in smokers: effects of environmental smoking cues.}, + Url = {http://dx.doi.org/10.1177/0269881106070995}, + Volume = {21}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1177/0269881106070995}} + +@article{Field2007c, + Abstract = {We investigated the effects of exposure to environmental smoking-related + cues (holding a lit cigarette in an environment previously associated + with smoking) on cigarette craving, colour naming of smoking-related + words in a modified Stroop task, and on the delay discounting of + hypothetical rewards, in daily cigarette smokers (N = 30). Compared + to exposure to neutral cues, exposure to smoking-related cues was + associated with increased cigarette craving and slower colour naming + of smoking-related compared to matched control words. However, smoking + cues had no effect on delay discounting. These results suggest that + smoking cues increase craving and the ability of smoking-related + words to grab the attention, but do not influence impulsive decision-making. + Theoretical and clinical implications are discussed.}, + Author = {Matt Field and Michelle Rush and Jon Cole and Andrew Goudie}, + Doi = {10.1177/0269881106070995}, + Institution = {School of Psychology, University of Liverpool, Eleanor Rathbone Building, Bedford Street South, Liverpool, UK. mfield@liverpool.ac.uk}, + Journal = {J Psychopharmacol}, + Keywords = {Adult; Attention; Behavior, Addictive; Cues; Decision Making; Female; Humans; Male; Middle Aged; Questionnaires; Reaction Time; Reinforcement (Psychology); Smoking Cessation, psychology; Smoking, psychology; Tobacco Use Disorder, psychology; Token Economy}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {603--610}, + Pii = {0269881106070995}, + Pmid = {17092980}, + Timestamp = {2009.08.06}, + Title = {The smoking Stroop and delay discounting in smokers: effects of environmental smoking cues.}, + Url = {http://dx.doi.org/10.1177/0269881106070995}, + Volume = {21}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1177/0269881106070995}} + +@article{Filbey2008c, + Author = {Filbey, F. M. and Claus, E. and Audette, A. R. and Niculescu, M. and Banich, M. T. and Tanabe, J. and Du, Y. P. and Hutchison, K. E.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Pages = {1391--1401}, + Title = {{{E}xposure to the taste of alcohol elicits activation of the mesocorticolimbic neurocircuitry}}, + Volume = {33}, + Year = {2008}} + +@article{Filbey1999, + Author = {Filbey, F. M. and Holcomb, J. and Nair, T. R. and Christensen, J. D. and Garver, D. L.}, + Journal = {Schizophr. Res.}, + Month = {Jan}, + Pages = {15--23}, + Title = {{{N}egative symptoms of familial schizophrenia breed true in unstable (vs. stable) cerebral-ventricle pedigrees}}, + Volume = {35}, + Year = {1999}} + +@article{Filbey2005, + Author = {Filbey, F. M. and Holroyd, T. and Carver, F. and Sunderland, T. and Cohen, R. M.}, + Journal = {Neuroreport}, + Month = {Nov}, + Pages = {1747--1752}, + Title = {{{A} magnetoencephalography spatiotemporal analysis of neural activities during feature binding}}, + Volume = {16}, + Year = {2005}} + +@article{Filbey2008b, + Author = {Filbey, F. M. and Ray, L. and Smolen, A. and Claus, E. D. and Audette, A. and Hutchison, K. E.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jul}, + Pages = {1113--1123}, + Title = {{{D}ifferential neural response to alcohol priming and alcohol taste cues is associated with {D}{R}{D}4 {V}{N}{T}{R} and {O}{P}{R}{M}1 genotypes}}, + Volume = {32}, + Year = {2008}} + +@article{Filbey2008a, + Author = {Filbey, F. M. and Russell, T. and Morris, R. G. and Murray, R. M. and McDonald, C.}, + Journal = {Ann Gen Psychiatry}, + Pages = {18}, + Title = {{{F}unctional magnetic resonance imaging (f{M}{R}{I}) of attention processes in presumed obligate carriers of schizophrenia: preliminary findings}}, + Volume = {7}, + Year = {2008}} + +@article{Filbey2009, + Author = {Filbey, Francesca M. and Schacht, Joseph P. and Myers, Ursula S. and Chavez, Robert S. and Hutchison, Kent E.}, + Doi = {10.1073/pnas.0903863106}, + Eprint = {http://www.pnas.org/content/106/31/13016.full.pdf+html}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {31}, + Pages = {13016-13021}, + Title = {{Marijuana craving in the brain}}, + Url = {http://www.pnas.org/content/106/31/13016.abstract}, + Volume = {106}, + Year = {2009}, + Bdsk-Url-1 = {http://www.pnas.org/content/106/31/13016.abstract}, + Bdsk-Url-2 = {http://dx.doi.org/10.1073/pnas.0903863106}} + +@article{Filbey2006, + Author = {Filbey, F. M. and Slack, K. J. and Sunderland, T. P. and Cohen, R. M.}, + Journal = {Neuroreport}, + Month = {Oct}, + Pages = {1585--1590}, + Title = {{{F}unctional magnetic resonance imaging and magnetoencephalography differences associated with {A}{P}{O}{E}epsilon4 in young healthy adults}}, + Volume = {17}, + Year = {2006}} + +@article{Filbey2008, + Author = {Filbey, F. M. and Toulopoulou, T. and Morris, R. G. and McDonald, C. and Bramon, E. and Walshe, M. and Murray, R. M.}, + Journal = {Schizophr. Res.}, + Month = {Apr}, + Pages = {169--175}, + Title = {{{S}elective attention deficits reflect increased genetic vulnerability to schizophrenia}}, + Volume = {101}, + Year = {2008}} + +@article{Filoteo2005, + Author = {Filoteo, J.V. and Maddox, W.T. and Simmons, A.N. and Ing, A.D. and Cagigas, X.E. and Matthews, S. and Paulus, M.P.}, + Journal = {NeuroReport}, + Number = {2}, + Pages = {111}, + Title = {{Cortical and subcortical brain regions involved in rule-based category learning.}}, + Volume = {16}, + Year = {2005}} + +@article{Filoteo2005a, + Author = {Filoteo, J. V. and Maddox, W. T. and Simmons, A. N. and Ing, A. D. and Cagigas, X. E. and Matthews, S. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {Feb}, + Pages = {111--115}, + Title = {{{C}ortical and subcortical brain regions involved in rule-based category learning}}, + Volume = {16}, + Year = {2005}} + +@book{Fine1973, + Address = {New York}, + Author = {Fine, T. L.}, + Publisher = {Academic Press}, + Title = {Theories of Probability}, + Year = {1973}} + +@article{Finn2004a, + Author = {Finn, P. R.}, + Journal = {Addiction}, + Month = {Oct}, + Pages = {1248--1249}, + Title = {{{G}ender differences in alcohol's disinhibiting effects: a commentary on {F}illmore and {W}eafer}}, + Volume = {99}, + Year = {2004}} + +@article{Finn2002a, + Author = {Finn, P. R.}, + Journal = {Behav Cogn Neurosci Rev}, + Month = {Sep}, + Pages = {183--205}, + Title = {{{M}otivation, working memory, and decision making: a cognitive-motivational theory of personality vulnerability to alcoholism}}, + Volume = {1}, + Year = {2002}} + +@article{Finn2005, + Author = {Finn, P. R. and Bobova, L. and Wehner, E. and Fargo, S. and Rickert, M. E.}, + Journal = {Addiction}, + Month = {Jul}, + Pages = {953--962}, + Title = {{{A}lcohol expectancies, conduct disorder and early-onset alcoholism: negative alcohol expectancies are associated with less drinking in non-impulsive versus impulsive subjects}}, + Volume = {100}, + Year = {2005}} + +@article{Finn1992, + Author = {Finn, P. R. and Earleywine, M. and Pihl, R. O.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jun}, + Pages = {585--590}, + Title = {{{S}ensation seeking, stress reactivity, and alcohol dampening discriminate the density of a family history of alcoholism}}, + Volume = {16}, + Year = {1992}} + +@article{Finn2004, + Author = {Finn, P. R. and Hall, J.}, + Journal = {J Abnorm Psychol}, + Month = {Nov}, + Pages = {569--581}, + Title = {{{C}ognitive ability and risk for alcoholism: short-term memory capacity and intelligence moderate personality risk for alcohol problems}}, + Volume = {113}, + Year = {2004}} + +@article{Finn1999a, + Author = {Finn, P. R. and Justus, A.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Feb}, + Pages = {256--262}, + Title = {{{R}educed {E}{E}{G} alpha power in the male and female offspring of alcoholics}}, + Volume = {23}, + Year = {1999}} + +@article{Finn1997a, + Author = {Finn, P. R. and Justus, A.}, + Journal = {Alcohol Health Res World}, + Pages = {227--231}, + Title = {{{P}hysiological responses in sons of alcoholics}}, + Volume = {21}, + Year = {1997}} + +@article{Finn1999, + Author = {Finn, P. R. and Justus, A. and Mazas, C. and Steinmetz, J. E.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Oct}, + Pages = {465--472}, + Title = {{{W}orking memory, executive processes and the effects of alcohol on {G}o/{N}o-{G}o learning: testing a model of behavioral regulation and impulsivity}}, + Volume = {146}, + Year = {1999}} + +@article{Finn2001, + Author = {Finn, P. R. and Justus, A. N. and Mazas, C. and Rorick, L. and Steinmetz, J. E.}, + Journal = {Integr Physiol Behav Sci}, + Pages = {154--167}, + Title = {{{C}onstraint, alcoholism, and electrodermal response in aversive classical conditioning and mismatch novelty paradigms}}, + Volume = {36}, + Year = {2001}} + +@article{Finn1994, + Author = {Finn, P. R. and Kessler, D. N. and Hussong, A. M.}, + Journal = {J Abnorm Psychol}, + Month = {May}, + Pages = {293--301}, + Title = {{{R}isk for alcoholism and classical conditioning to signals for punishment: evidence for a weak behavioral inhibition system?}}, + Volume = {103}, + Year = {1994}} + +@article{Finn1990a, + Author = {Finn, P. R. and Kleinman, I. and Pihl, R. O.}, + Journal = {J. Nerv. Ment. Dis.}, + Month = {Aug}, + Pages = {500--504}, + Title = {{{T}he lifetime prevalence of psychopathology in men with multigenerational family histories of alcoholism}}, + Volume = {178}, + Year = {1990}} + +@article{Finn1987a, + Author = {Finn, P. R. and Martin, J. and Pihl, R. O.}, + Journal = {Psychother Psychosom}, + Pages = {18--21}, + Title = {{{A}lexithymia in males at high genetic risk for alcoholism}}, + Volume = {47}, + Year = {1987}} + +@article{Finn2002, + Author = {Finn, P. R. and Mazas, C. A. and Justus, A. N. and Steinmetz, J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Feb}, + Pages = {186--206}, + Title = {{{E}arly-onset alcoholism with conduct disorder: go/no go learning deficits, working memory capacity, and personality}}, + Volume = {26}, + Year = {2002}} + +@article{Finn1988, + Author = {Finn, P. R. and Pihl, R. O.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {742--747}, + Title = {{{R}isk for alcoholism: a comparison between two different groups of sons of alcoholics on cardiovascular reactivity and sensitivity to alcohol}}, + Volume = {12}, + Year = {1988}} + +@article{Finn1987, + Author = {Finn, P. R. and Pihl, R. O.}, + Journal = {J Abnorm Psychol}, + Month = {Aug}, + Pages = {230--236}, + Title = {{{M}en at high risk for alcoholism: the effect of alcohol on cardiovascular response to unavoidable shock}}, + Volume = {96}, + Year = {1987}} + +@article{Finn2000a, + Author = {Finn, P. R. and Ramsey, S. E. and Earleywine, M.}, + Journal = {J. Stud. Alcohol}, + Month = {Jan}, + Pages = {38--45}, + Title = {{{F}rontal {E}{E}{G} response to threat, aggressive traits and a family history of alcoholism: a preliminary study}}, + Volume = {61}, + Year = {2000}} + +@article{Finn2009, + Author = {Finn, P. R. and Rickert, M. E. and Miller, M. A. and Lucas, J. and Bogg, T. and Bobova, L. and Cantrell, H.}, + Journal = {J Abnorm Psychol}, + Month = {Feb}, + Pages = {100--116}, + Title = {{{R}educed cognitive ability in alcohol dependence: examining the role of covarying externalizing psychopathology}}, + Volume = {118}, + Year = {2009}} + +@article{Finn2000, + Author = {Finn, P. R. and Sharkansky, E. J. and Brandt, K. M. and Turcotte, N.}, + Journal = {J Abnorm Psychol}, + Month = {Feb}, + Pages = {122--133}, + Title = {{{T}he effects of familial risk, personality, and expectancies on alcohol use and abuse}}, + Volume = {109}, + Year = {2000}} + +@article{Finn1997, + Author = {Finn, P. R. and Sharkansky, E. J. and Viken, R. and West, T. L. and Sandy, J. and Bufferd, G. M.}, + Journal = {J Abnorm Psychol}, + Month = {Feb}, + Pages = {26--36}, + Title = {{{H}eterogeneity in the families of sons of alcoholics: the impact of familial vulnerability type on offspring characteristics}}, + Volume = {106}, + Year = {1997}} + +@article{Finn1990, + Author = {Finn, P. R. and Zeitouni, N. C. and Pihl, R. O.}, + Journal = {J Abnorm Psychol}, + Month = {Feb}, + Pages = {79--85}, + Title = {{{E}ffects of alcohol on psychophysiological hyperreactivity to nonaversive and aversive stimuli in men at high risk for alcoholism}}, + Volume = {99}, + Year = {1990}} + +@article{Fiorillo2008, + Author = {Fiorillo, C. D. and Newsome, W. T. and Schultz, W.}, + Journal = {Nat. Neurosci.}, + Month = {Jul}, + Title = {{{T}he temporal precision of reward prediction in dopamine neurons}}, + Year = {2008}} + +@article{Fiorillo2005, + Author = {Fiorillo, C. D. and Tobler, P. N. and Schultz, W.}, + Journal = {Behav Brain Funct}, + Month = {Jun}, + Pages = {7}, + Title = {{{E}vidence that the delay-period activity of dopamine neurons corresponds to reward uncertainty rather than backpropagating {T}{D} errors}}, + Volume = {1}, + Year = {2005}} + +@article{Fiorillo2003, + Author = {Fiorillo, C. D. and Tobler, P. N. and Schultz, W.}, + Journal = {Science}, + Pages = {1898--1902}, + Title = {{{D}iscrete coding of reward probability and uncertainty by dopamine neurons}}, + Volume = {299}, + Year = {2003}} + +@article{Fiorino1999, + Author = {Fiorino, D. F. and Phillips, A. G.}, + Journal = {J. Neurosci.}, + Pages = {456--463}, + Title = {{{F}acilitation of sexual behavior and enhanced dopamine efflux in the nucleus accumbens of male rats after {D}-amphetamine-induced behavioral sensitization}}, + Volume = {19}, + Year = {1999}} + +@article{Firth1999, + Author = {Firth, D. and Kuha, J.}, + Journal = {Sociological Methods \& Research}, + Pages = {398--402}, + Title = {Comments on ``A Critique of the {B}ayesian Information Criterion for Model Selection"}, + Volume = {27}, + Year = {1999}} + +@article{Fishbein2005, + Author = {Fishbein, D. and Hyde, C. and Eldreth, D. and London, E. D. and Matochik, J. and Ernst, M. and Isenberg, N. and Steckley, S. and Schech, B. and Kimes, A.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Feb}, + Pages = {25--40}, + Title = {{{C}ognitive performance and autonomic reactivity in abstinent drug abusers and nonusers}}, + Volume = {13}, + Year = {2005}} + +@article{Fishbein2007, + Author = {Fishbein, D. H. and Krupitsky, E. and Flannery, B. A. and Langevin, D. J. and Bobashev, G. and Verbitskaya, E. and Augustine, C. B. and Bolla, K. I. and Zvartau, E. and Schech, B. and Egorova, V. and Bushara, N. and Tsoy, M.}, + Journal = {Drug Alcohol Depend}, + Month = {Sep}, + Pages = {25--38}, + Title = {{{N}eurocognitive characterizations of {R}ussian heroin addicts without a significant history of other drug use}}, + Volume = {90}, + Year = {2007}} + +@article{Fishburn1986, + Author = {Fishburn, P. C.}, + Journal = {Statistical Science}, + Pages = {335--345}, + Title = {The Axioms of Subjective Probability}, + Volume = {1}, + Year = {1986}} + +@book{Fisher1958, + Address = {New York}, + Author = {Fisher, R. A.}, + Publisher = {Hafner}, + Title = {Statistical Methods For Research Workers (13th ed.)}, + Year = {1958}} + +@article{Fisher1935, + Author = {Fisher, R. A.}, + Journal = {Journal of the Royal Statistical Society}, + Pages = {39--82}, + Title = {The Logic of Inductive Inference (with discussion)}, + Volume = {98}, + Year = {1935}} + +@book{Fisher1935a, + Address = {Edinburgh}, + Author = {Fisher, R. A.}, + Publisher = {Oliver and Boyd}, + Title = {The Design of Experiments}, + Year = {1935}} + +@book{Fisher1934, + Address = {London}, + Author = {Fisher, R. A.}, + Publisher = {Oliver and Boyd}, + Title = {Statistical Methods for Research Workers (5th ed.)}, + Year = {1934}} + +@book{Fisher1925, + Address = {Edinburgh}, + Author = {Fisher, R. A.}, + Publisher = {Oliver and Boyd}, + Title = {Statistical Methods for Research Workers}, + Year = {1925}} + +@article{Fittro1992, + Author = {Fittro, K. P. and Bolla, K. I. and Heller, J. R. and Meyd, C. J.}, + Journal = {J Occup Med}, + Month = {Sep}, + Pages = {918--922}, + Title = {{{T}he {M}ilan {A}utomated {N}eurobehavioral {S}ystem. {A}ge, sex, and education differences}}, + Volume = {34}, + Year = {1992}} + +@article{Fitzgerald1995, + Author = {Fitzgerald, L. W. and Nestler, E. J.}, + Journal = {Clin. Neurosci.}, + Pages = {165--173}, + Title = {{{M}olecular and cellular adaptations in signal transduction pathways following ethanol exposure}}, + Volume = {3}, + Year = {1995}} + +@article{Fletcher2008, + Author = {Fletcher, P.C. and Frith, C.D.}, + Publisher = {Nature Publishing Group}, + Title = {{Perceiving is believing: a Bayesian approach to explaining the positive symptoms of schizophrenia}}, + Year = {2008}} + +@article{Florens1999, + Author = {Florens, D.}, + Journal = {Statistical Inference for Stochastic Processes}, + Pages = {175--195}, + Title = {Estimation of the Diffusion Coefficient from Crossings}, + Volume = {1}, + Year = {1999}} + +@article{Florens1993, + Author = {Florens, D.}, + Journal = {Journal of Applied Probability}, + Pages = {790--804}, + Title = {On Estimating the Diffusion Coefficient from Discrete Observations}, + Volume = {30}, + Year = {1993}} + +@article{Florens1991, + Author = {Florens, D.}, + Journal = {Stochastic Processes and their Applications}, + Pages = {139--151}, + Title = {Statistics on Crossings of Discretized Diffusions and Local Time}, + Volume = {39}, + Year = {1991}} + +@article{Foley2009, + Author = {Foley, A. G. and Prendergast, A. and Barry, C. and Scully, D. and Upton, N. and Medhurst, A.D. and Regan, C.M.}, + Journal = {Neuropsychopharmacology}, + Number = {12}, + Pages = {2585--2600}, + Publisher = {Nature Publishing Group}, + Title = {{H3 Receptor Antagonism Enhances NCAM PSA-Mediated Plasticity and Improves Memory Consolidation in Odor Discrimination and Delayed Match-to-Position Paradigms}}, + Volume = {34}, + Year = {2009}} + +@article{Forbes2007a, + Author = {Forbes, EE and Brown, SM and Kimak, M. and Ferrell, RE and Manuck, SB and Hariri, AR}, + Journal = {Molecular psychiatry}, + Number = {1}, + Pages = {60--70}, + Publisher = {Nature Publishing Group}, + Title = {{Genetic variation in components of dopamine neurotransmission impacts ventral striatal reactivity associated with impulsivity}}, + Volume = {14}, + Year = {2007}} + +@article{Forbes2007, + Abstract = {Individual differences in traits such as impulsivity involve high + reward sensitivity and are associated with risk for substance use + disorders. The ventral striatum (VS) has been widely implicated in + reward processing, and individual differences in its function are + linked to these disorders. Dopamine (DA) plays a critical role in + reward processing and is a potent neuromodulator of VS reactivity. + Moreover, altered DA signaling has been associated with normal and + pathological reward-related behaviors. Functional polymorphisms in + DA-related genes represent an important source of variability in + DA function that may subsequently impact VS reactivity and associated + reward-related behaviors. Using an imaging genetics approach, we + examined the modulatory effects of common, putatively functional + DA-related polymorphisms on reward-related VS reactivity associated + with self-reported impulsivity. Genetic variants associated with + relatively increased striatal DA release (DRD2 -141C deletion) and + availability (DAT1 9-repeat), as well as diminished inhibitory postsynaptic + DA effects (DRD2 -141C deletion and DRD4 7-repeat), predicted 9-12\% + of the interindividual variability in reward-related VS reactivity. + In contrast, genetic variation directly affecting DA signaling only + in the prefrontal cortex (COMT Val158Met) was not associated with + variability in VS reactivity. Our results highlight an important + role for genetic polymorphisms affecting striatal DA neurotransmission + in mediating interindividual differences in reward-related VS reactivity. + They further suggest that altered VS reactivity may represent a key + neurobiological pathway through which these polymorphisms contribute + to variability in behavioral impulsivity and related risk for substance + use disorders.}, + Author = {E. E. Forbes and S. M. Brown and M. Kimak and R. E. Ferrell and S. B. Manuck and A. R. Hariri}, + Doi = {10.1038/sj.mp.4002086}, + Institution = {Department of Psychiatry, University of Pittsburgh, Pittsburgh, PA 15213, USA.}, + Journal = {Mol Psychiatry}, + Keywords = {Adult; Analysis of Variance; Basal Ganglia, blood supply/physiopathology; Case-Control Studies; Catechol O-Methyltransferase, genetics; Dopamine Plasma Membrane Transport Proteins, genetics; Dopamine, genetics/metabolism; Female; Gene Frequency; Genetic Variation, genetics; Genotype; Humans; Image Processing, Computer-Assisted, methods; Impulsive Behavior, genetics/pathology; Magnetic Resonance Imaging, methods; Male; Middle Aged; Oxygen, blood; Receptors, Dopamine D3, genetics; Receptors, Dopamine D4, genetics; Reward; Synaptic Transmission, genetics; Young Adult}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {60--70}, + Pii = {4002086}, + Pmid = {17893706}, + Timestamp = {2009.08.09}, + Title = {Genetic variation in components of dopamine neurotransmission impacts ventral striatal reactivity associated with impulsivity.}, + Url = {http://dx.doi.org/10.1038/sj.mp.4002086}, + Volume = {14}, + Year = {2079}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/sj.mp.4002086}} + +@article{Forbey2007, + Author = {Johnathan D. Forbey and Yossef S. Ben-Porath}, + Journal = {Psychological Assessment}, + Number = {1}, + Pages = {14--24}, + Title = {Computerized adaptive personality testing: A review and illustration with the MMPI-2 computerized adaptive version}, + Volume = {19}, + Year = {2007}} + +@article{Forster2000, + Author = {Forster, M. R.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {205--231}, + Title = {Key Concepts in Model Selection: Performance and Generalizability}, + Volume = {44}, + Year = {2000}} + +@article{Fowler2007b, + Author = {Fowler, J. S. and Alia-Klein, N. and Kriplani, A. and Logan, J. and Williams, B. and Zhu, W. and Craig, I. W. and Telang, F. and Goldstein, R. and Volkow, N. D. and Vaska, P. and Wang, G. J.}, + Journal = {Biol. Psychiatry}, + Month = {Aug}, + Pages = {355--358}, + Title = {{{E}vidence that brain {M}{A}{O} {A} activity does not correspond to {M}{A}{O} {A} genotype in healthy male subjects}}, + Volume = {62}, + Year = {2007}} + +@article{Fowler2001a, + Author = {Fowler, J. S. and Ding, Y. S. and Logan, J. and MacGregor, R. R. and Shea, C. and Garza, V. and Gimi, R. and Volkow, N. D. and Wang, G. J. and Schlyer, D. and Ferrieri, R. and Gatley, S. J. and Alexoff, D. and Carter, P. and King, P. and Pappas, N. and Arnett, C. D.}, + Journal = {Nucl. Med. Biol.}, + Month = {Oct}, + Pages = {779--785}, + Title = {{{S}pecies differences in [11{C}]clorgyline binding in brain}}, + Volume = {28}, + Year = {2001}} + +@article{Fowler2003b, + Author = {Fowler, J. S. and Ding, Y. S. and Volkow, N. D.}, + Journal = {Semin Nucl Med}, + Month = {Jan}, + Pages = {14--27}, + Title = {{{R}adiotracers for positron emission tomography imaging}}, + Volume = {33}, + Year = {2003}} + +@article{Fowler2007, + Author = {Fowler, J. S. and Kroll, C. and Ferrieri, R. and Alexoff, D. and Logan, J. and Dewey, S. L. and Schiffer, W. and Schlyer, D. and Carter, P. and King, P. and Shea, C. and Xu, Y. and Muench, L. and Benveniste, H. and Vaska, P. and Volkow, N. D.}, + Journal = {J. Nucl. Med.}, + Month = {Oct}, + Pages = {1724--1732}, + Title = {{{P}{E}{T} studies of d-methamphetamine pharmacokinetics in primates: comparison with l-methamphetamine and ( --)-cocaine}}, + Volume = {48}, + Year = {2007}} + +@article{Fowler2001, + Author = {Fowler, J. S. and Logan, J. and Ding, Y. S. and Franceschi, D. and Wang, G. J. and Volkow, N. D. and Pappas, N. and Schlyer, D. and Gatley, S. J. and Alexoff, D. and Felder, C. and Biegon, A. and Zhu, W.}, + Journal = {J. Neurochem.}, + Month = {Dec}, + Pages = {1039--1046}, + Title = {{{N}on-{M}{A}{O} {A} binding of clorgyline in white matter in human brain}}, + Volume = {79}, + Year = {2001}} + +@article{Fowler2005, + Author = {Fowler, J. S. and Logan, J. and Volkow, N. D. and Wang, G. J.}, + Journal = {Mol Imaging Biol}, + Pages = {377--387}, + Title = {{{T}ranslational neuroimaging: positron emission tomography studies of monoamine oxidase}}, + Volume = {7}, + Year = {2005}} + +@article{Fowler2002a, + Author = {Fowler, J. S. and Logan, J. and Volkow, N. D. and Wang, G. J. and MacGregor, R. R. and Ding, Y. S.}, + Journal = {Methods}, + Month = {Jul}, + Pages = {263--277}, + Title = {{{M}onoamine oxidase: radiotracer development and human studies}}, + Volume = {27}, + Year = {2002}} + +@article{Fowler2003a, + Author = {Fowler, J. S. and Logan, J. and Wang, G. J. and Franceschi, D. and Volkow, N. D. and Telang, F. and Pappas, N. and Ferrieri, R. and Shea, C. and Garza, V. and Xu, Y. and King, P. and Schlyer, D. and Gatley, S. J. and Ding, Y. S. and Warner, D. and Netusil, N. and Carter, P. and Jayne, M. and Alexoff, D. and Zhu, W. and Vaska, P.}, + Journal = {Synapse}, + Month = {Sep}, + Pages = {178--187}, + Title = {{{M}onoamine oxidase {A} imaging in peripheral organs in healthy human subjects}}, + Volume = {49}, + Year = {2003}} + +@article{Fowler2003c, + Author = {Fowler, J. S. and Logan, J. and Wang, G. J. and Volkow, N. D.}, + Journal = {Neurotoxicology}, + Month = {Jan}, + Pages = {75--82}, + Title = {{{M}onoamine oxidase and cigarette smoking}}, + Volume = {24}, + Year = {2003}} + +@article{Fowler2004a, + Author = {Fowler, J. S. and Logan, J. and Wang, G. J. and Volkow, N. D. and Telang, F. and Ding, Y. S. and Shea, C. and Garza, V. and Xu, Y. and Li, Z. and Alexoff, D. and Vaska, P. and Ferrieri, R. and Schlyer, D. and Zhu, W. and John Gatley, S.}, + Journal = {Nucl. Med. Biol.}, + Month = {Apr}, + Pages = {313--319}, + Title = {{{C}omparison of the binding of the irreversible monoamine oxidase tracers, [(11){C}]clorgyline and [(11){C}]l-deprenyl in brain and peripheral organs in humans}}, + Volume = {31}, + Year = {2004}} + +@article{Fowler2003, + Author = {Fowler, J. S. and Logan, J. and Wang, G. J. and Volkow, N. D. and Telang, F. and Zhu, W. and Franceschi, D. and Pappas, N. and Ferrieri, R. and Shea, C. and Garza, V. and Xu, Y. and Schlyer, D. and Gatley, S. J. and Ding, Y. S. and Alexoff, D. and Warner, D. and Netusil, N. and Carter, P. and Jayne, M. and King, P. and Vaska, P.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Sep}, + Pages = {11600--11605}, + Title = {{{L}ow monoamine oxidase {B} in peripheral organs in smokers}}, + Volume = {100}, + Year = {2003}} + +@article{Fowler2005a, + Author = {Fowler, J. S. and Logan, J. and Wang, G. J. and Volkow, N. D. and Telang, F. and Zhu, W. and Franceschi, D. and Shea, C. and Garza, V. and Xu, Y. and Ding, Y. S. and Alexoff, D. and Warner, D. and Netusil, N. and Carter, P. and Jayne, M. and King, P. and Vaska, P.}, + Journal = {J. Nucl. Med.}, + Month = {Sep}, + Pages = {1414--1420}, + Title = {{{C}omparison of monoamine oxidase a in peripheral organs in nonsmokers and smokers}}, + Volume = {46}, + Year = {2005}} + +@article{Fowler2002, + Author = {Fowler, J. S. and Logan, J. and Wang, G. J. and Volkow, N. D. and Zhu, W. and Franceschi, D. and Pappas, N. and Ferrieri, R. and Shea, C. and Garza, V. and Xu, Y. and MacGregor, R. R. and Schlyer, D. and Gatley, S. J. and Ding, Y. S. and Alexoff, D.}, + Journal = {J. Nucl. Med.}, + Month = {Oct}, + Pages = {1331--1338}, + Title = {{{P}{E}{T} imaging of monoamine oxidase {B} in peripheral organs in humans}}, + Volume = {43}, + Year = {2002}} + +@article{Fowler2001c, + Author = {Fowler, J. S. and Volkow, N. D.}, + Journal = {J Clin Pharmacol}, + Month = {Jul}, + Pages = {9S-10S}, + Title = {{18{F}{D}{G} for the study of central nervous system drugs}}, + Volume = {Suppl}, + Year = {2001}} + +@article{Fowler1998a, + Author = {Fowler, J. S. and Volkow, N. D.}, + Journal = {J. Toxicol. Clin. Toxicol.}, + Pages = {163--174}, + Title = {{{P}{E}{T} imaging studies in drug abuse}}, + Volume = {36}, + Year = {1998}} + +@article{Fowler1999, + Author = {Fowler, J. S. and Volkow, N. D. and Cilento, R. and Wang, G. J. and Felder, C. and Logan, J.}, + Journal = {Clin. Positron Imaging}, + Month = {Mar}, + Pages = {71--79}, + Title = {{{C}omparison of {B}rain {G}lucose {M}etabolism and {M}onoamine {O}xidase {B} ({M}{A}{O} {B}) in {T}raumatic {B}rain {I}njury}}, + Volume = {2}, + Year = {1999}} + +@article{Fowler1999c, + Author = {Fowler, J. S. and Volkow, N. D. and Ding, Y. S. and Wang, G. J. and Dewey, S. and Fischman, M. W. and Foltin, R. and Hitzemann, R.}, + Journal = {J Clin Pharmacol}, + Month = {Aug}, + Pages = {13S-16S}, + Title = {{{P}ositron emission tomography studies of dopamine-enhancing drugs}}, + Volume = {Suppl}, + Year = {1999}} + +@article{Fowler2007a, + Author = {Fowler, J. S. and Volkow, N. D. and Kassed, C. A. and Chang, L.}, + Journal = {Sci Pract Perspect}, + Month = {Apr}, + Pages = {4--16}, + Title = {{{I}maging the addicted human brain}}, + Volume = {3}, + Year = {2007}} + +@article{Fowler2008, + Author = {Fowler, J. S. and Volkow, N. D. and Logan, J. and Alexoff, D. and Telang, F. and Wang, G. J. and Wong, C. and Ma, Y. and Kriplani, A. and Pradhan, K. and Schlyer, D. and Jayne, M. and Hubbard, B. and Carter, P. and Warner, D. and King, P. and Shea, C. and Xu, Y. and Muench, L. and Apelskog, K.}, + Journal = {Neuroimage}, + Month = {Dec}, + Pages = {756--763}, + Title = {{{F}ast uptake and long-lasting binding of methamphetamine in the human brain: comparison with cocaine}}, + Volume = {43}, + Year = {2008}} + +@article{Fowler2001d, + Author = {Fowler, J. S. and Volkow, N. D. and Logan, J. and Franceschi, D. and Wang, G. J. and MacGregor, R. and Shea, C. and Garza, V. and Pappas, N. and Carter, P. and Netusil, N. and Bridge, P. and Liederman, D. and Elkashef, A. and Rotrosen, J. and Hitzemann, R.}, + Journal = {Life Sci.}, + Month = {May}, + Pages = {2759--2768}, + Title = {{{E}vidence that {L}-deprenyl treatment for one week does not inhibit {M}{A}{O} {A} or the dopamine transporter in the human brain}}, + Volume = {68}, + Year = {2001}} + +@article{Fowler1998, + Author = {Fowler, J. S. and Volkow, N. D. and Logan, J. and Pappas, N. and King, P. and MacGregor, R. and Shea, C. and Garza, V. and Gatley, S. J.}, + Journal = {Life Sci.}, + Pages = {19--23}, + Title = {{{A}n acute dose of nicotine does not inhibit {M}{A}{O} {B} in baboon brain in vivo}}, + Volume = {63}, + Year = {1998}} + +@article{Fowler2004, + Author = {Fowler, J. S. and Volkow, N. D. and Wang, G. J. and Ding, Y. S.}, + Journal = {Semin Nucl Med}, + Month = {Apr}, + Pages = {112--121}, + Title = {{2-deoxy-2-[18{F}]fluoro-{D}-glucose and alternative radiotracers for positron emission tomography imaging using the human brain as a model}}, + Volume = {34}, + Year = {2004}} + +@article{Fowler1999d, + Author = {Fowler, J. S. and Volkow, N. D. and Wang, G. J. and Ding, Y. S. and Dewey, S. L.}, + Journal = {J. Nucl. Med.}, + Month = {Jul}, + Pages = {1154--1163}, + Title = {{{P}{E}{T} and drug research and development}}, + Volume = {40}, + Year = {1999}} + +@article{Fowler2001b, + Author = {Fowler, J. S. and Volkow, N. D. and Wang, G. J. and Gatley, S. J. and Logan, J.}, + Journal = {Nucl. Med. Biol.}, + Month = {Jul}, + Pages = {561--572}, + Title = {{[(11)]{C}ocaine: {P}{E}{T} studies of cocaine pharmacokinetics, dopamine transporter availability and dopamine transporter occupancy}}, + Volume = {28}, + Year = {2001}} + +@article{Fowler1996a, + Abstract = {The massive health problem associated with cigarette smoking is exacerbated + by the addictive properties of tobacco smoke and the limited success + of current approaches to cessation of smoking. Yet little is known + about the neuropharmacological actions of cigarette smoke that contribute + to smoking behaviour, or why smoking is so prevalent in psychiatric + disorders and is associated with a decreased risk of Parkinson's + disease. Here we report that brains of living smokers show a 40\% + decrease in the level of monoamine oxidase B (MAO B; EC 1.4.3.4) + relative to non-smokers or former smokers. MAO B is involved in the + breakdown of dopamine, a neurotransmitter implicated in reinforcing + and motivating behaviours as well as movement. MAO B inhibition is + therefore associated with enhanced activity of dopamine, as well + as with decreased production of hydrogen peroxide, a source of reactive + oxygen species. We propose that reduction of MAO B activity may synergize + with nicotine to produce the diverse behavioural and epidemiological + effects of smoking.}, + Author = {J. S. Fowler and N. D. Volkow and G. J. Wang and N. Pappas and J. Logan and R. MacGregor and D. Alexoff and C. Shea and D. Schlyer and A. P. Wolf and D. Warner and I. Zezulkova and R. Cilento}, + Doi = {10.1038/379733a0}, + Institution = {Brookhaven National Laboratory, Upton, New York 11973, USA.}, + Journal = {Nature}, + Keywords = {Adult; Aged; Aged, 80 and over; Brain, drug effects/enzymology; Dopamine, metabolism; Female; Glucose, metabolism; Humans; Male; Middle Aged; Monoamine Oxidase Inhibitors, pharmacology; Monoamine Oxidase, metabolism; Nicotine, pharmacology; Selegiline, pharmacology; Smoking, metabolism; Tomography, Emission-Computed}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {6567}, + Owner = {Woo-Young Ahn}, + Pages = {733--736}, + Pmid = {8602220}, + Timestamp = {2009.08.06}, + Title = {Inhibition of monoamine oxidase B in the brains of smokers.}, + Url = {http://dx.doi.org/10.1038/379733a0}, + Volume = {379}, + Year = {1996}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/379733a0}} + +@article{Fowler1998b, + Author = {Fowler, J. S. and Volkow, N. D. and Wang, G. J. and Pappas, N. and Logan, J. and MacGregor, R. and Alexoff, D. and Wolf, A. P. and Warner, D. and Cilento, R. and Zezulkova, I.}, + Journal = {J Addict Dis}, + Pages = {23--34}, + Title = {{{N}europharmacological actions of cigarette smoke: brain monoamine oxidase {B} ({M}{A}{O} {B}) inhibition}}, + Volume = {17}, + Year = {1998}} + +@article{Fowler2000, + Author = {Fowler, J. S. and Wang, G. J. and Volkow, N. D. and Franceschi, D. and Logan, J. and Pappas, N. and Shea, C. and MacGregor, R. R. and Garza, V.}, + Journal = {Am J Psychiatry}, + Month = {Nov}, + Pages = {1864--1866}, + Title = {{{M}aintenance of brain monoamine oxidase {B} inhibition in smokers after overnight cigarette abstinence}}, + Volume = {157}, + Year = {2000}} + +@article{Fowler1999b, + Author = {Fowler, J. S. and Wang, G. J. and Volkow, N. D. and Franceschi, D. and Logan, J. and Pappas, N. and Shea, C. and MacGregor, R. R. and Garza, V.}, + Journal = {Nicotine Tob. Res.}, + Month = {Dec}, + Pages = {325--329}, + Title = {{{S}moking a single cigarette does not produce a measurable reduction in brain {M}{A}{O} {B} in non-smokers}}, + Volume = {1}, + Year = {1999}} + +@article{Fowler1999a, + Author = {Fowler, J. S. and Wang, G. J. and Volkow, N. D. and Ieni, J. and Logan, J. and Pappas, N. and Dewey, S. L.}, + Journal = {Clin. Positron Imaging}, + Month = {Jul}, + Pages = {205--209}, + Title = {{{P}{E}{T} {S}tudies of the {E}ffect of the {A}ntidepressant {D}rugs {N}efazodone or {P}aroxetine on [11{C}]{R}aclopride {B}inding in {H}uman {B}rain}}, + Volume = {2}, + Year = {1999}} + +@article{Fowler2000a, + Author = {Fowler, J. S. and Wang, G. J. and Volkow, N. D. and Logan, J. and Franceschi, D. and Franceschi, M. and MacGregor, R. and Shea, C. and Garza, V. and Liu, N. and Ding, Y. S.}, + Journal = {Life Sci.}, + Month = {Jan}, + Pages = {L141--146}, + Title = {{{E}vidence that gingko biloba extract does not inhibit {M}{A}{O} {A} and {B} in living human brain}}, + Volume = {66}, + Year = {2000}} + +@article{Fox2007a, + Author = {Fox, H. C. and Bergquist, K. L. and Hong, K. I. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Mar}, + Pages = {395--403}, + Title = {{{S}tress-induced and alcohol cue-induced craving in recently abstinent alcohol-dependent individuals}}, + Volume = {31}, + Year = {2007}} + +@article{Fox2007b, + Author = {Fox, H. C. and Bergquist, K. L. and Hong, K. I. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Mar}, + Pages = {395--403}, + Title = {{{S}tress-induced and alcohol cue-induced craving in recently abstinent alcohol-dependent individuals}}, + Volume = {31}, + Year = {2007}} + +@article{Fox2007c, + Author = {Fox, H. C. and Bergquist, K. L. and Hong, K. I. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Mar}, + Pages = {395--403}, + Title = {{{S}tress-induced and alcohol cue-induced craving in recently abstinent alcohol-dependent individuals}}, + Volume = {31}, + Year = {2007}} + +@article{Fox2006, + Author = {Fox, H. C. and Garcia, M. and Kemp, K. and Milivojevic, V. and Kreek, M. J. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Apr}, + Pages = {348--357}, + Title = {{{G}ender differences in cardiovascular and corticoadrenal response to stress and drug cues in cocaine dependent individuals}}, + Volume = {185}, + Year = {2006}} + +@article{Fox2006a, + Author = {Fox, H. C. and Garcia, M. and Kemp, K. and Milivojevic, V. and Kreek, M. J. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Apr}, + Pages = {348--357}, + Title = {{{G}ender differences in cardiovascular and corticoadrenal response to stress and drug cues in cocaine dependent individuals}}, + Volume = {185}, + Year = {2006}} + +@article{Fox2006b, + Author = {Fox, H. C. and Garcia, M. and Kemp, K. and Milivojevic, V. and Kreek, M. J. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Apr}, + Pages = {348--357}, + Title = {{{G}ender differences in cardiovascular and corticoadrenal response to stress and drug cues in cocaine dependent individuals}}, + Volume = {185}, + Year = {2006}} + +@article{Fox2008a, + Author = {Fox, H. C. and Hong, K. A. and Paliwal, P. and Morgan, P. T. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jan}, + Pages = {527--536}, + Title = {{{A}ltered levels of sex and stress steroid hormones assessed daily over a 28-day cycle in early abstinent cocaine-dependent females}}, + Volume = {195}, + Year = {2008}} + +@article{Fox2008c, + Author = {Fox, H. C. and Hong, K. A. and Paliwal, P. and Morgan, P. T. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jan}, + Pages = {527--536}, + Title = {{{A}ltered levels of sex and stress steroid hormones assessed daily over a 28-day cycle in early abstinent cocaine-dependent females}}, + Volume = {195}, + Year = {2008}} + +@article{Fox2008e, + Author = {Fox, H. C. and Hong, K. A. and Paliwal, P. and Morgan, P. T. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jan}, + Pages = {527--536}, + Title = {{{A}ltered levels of sex and stress steroid hormones assessed daily over a 28-day cycle in early abstinent cocaine-dependent females}}, + Volume = {195}, + Year = {2008}} + +@article{Fox2008, + Author = {Fox, H. C. and Hong, K. I. and Siedlarz, K. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {796--805}, + Title = {{{E}nhanced sensitivity to stress and drug/alcohol craving in abstinent cocaine-dependent individuals compared to social drinkers}}, + Volume = {33}, + Year = {2008}} + +@article{Fox2008b, + Author = {Fox, H. C. and Hong, K. I. and Siedlarz, K. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {796--805}, + Title = {{{E}nhanced sensitivity to stress and drug/alcohol craving in abstinent cocaine-dependent individuals compared to social drinkers}}, + Volume = {33}, + Year = {2008}} + +@article{Fox2008d, + Author = {Fox, H. C. and Hong, K. I. and Siedlarz, K. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {796--805}, + Title = {{{E}nhanced sensitivity to stress and drug/alcohol craving in abstinent cocaine-dependent individuals compared to social drinkers}}, + Volume = {33}, + Year = {2008}} + +@article{Fox2007, + Author = {Fox, H. C. and Hong, K. I. and Siedlarz, K. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Title = {{{E}nhanced {E}motional and {P}hysiological {S}ensitivity to {S}tress and {D}rug/{A}lcohol {C}raving in {A}bstinent {C}ocaine-{D}ependent {I}ndividuals {C}ompared to {S}ocially {D}rinking {C}ontrols}}, + Year = {2007}} + +@article{Fox2007d, + Author = {Fox, H. C. and Hong, K. I. and Siedlarz, K. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Title = {{{E}nhanced {E}motional and {P}hysiological {S}ensitivity to {S}tress and {D}rug/{A}lcohol {C}raving in {A}bstinent {C}ocaine-{D}ependent {I}ndividuals {C}ompared to {S}ocially {D}rinking {C}ontrols}}, + Year = {2007}} + +@article{Fox2009a, + Author = {Fox, H. C. and Jackson, E. D. and Sinha, R.}, + Journal = {Psychoneuroendocrinology}, + Month = {Sep}, + Pages = {1198--1207}, + Title = {{{E}levated cortisol and learning and memory deficits in cocaine dependent individuals: relationship to relapse outcomes}}, + Volume = {34}, + Year = {2009}} + +@article{Fox2009c, + Author = {Fox, H. C. and Jackson, E. D. and Sinha, R.}, + Journal = {Psychoneuroendocrinology}, + Month = {Sep}, + Pages = {1198--1207}, + Title = {{{E}levated cortisol and learning and memory deficits in cocaine dependent individuals: relationship to relapse outcomes}}, + Volume = {34}, + Year = {2009}} + +@article{Fox2009e, + Author = {Fox, H. C. and Jackson, E. D. and Sinha, R.}, + Journal = {Psychoneuroendocrinology}, + Month = {Sep}, + Pages = {1198--1207}, + Title = {{{E}levated cortisol and learning and memory deficits in cocaine dependent individuals: relationship to relapse outcomes}}, + Volume = {34}, + Year = {2009}} + +@article{Fox2009, + Author = {Fox, H. C. and Sinha, R.}, + Journal = {Harv Rev Psychiatry}, + Pages = {103--119}, + Title = {{{S}ex differences in drug-related stress-system changes: implications for treatment in substance-abusing women}}, + Volume = {17}, + Year = {2009}} + +@article{Fox2009b, + Author = {Fox, H. C. and Sinha, R.}, + Journal = {Harv Rev Psychiatry}, + Pages = {103--119}, + Title = {{{S}ex differences in drug-related stress-system changes: implications for treatment in substance-abusing women}}, + Volume = {17}, + Year = {2009}} + +@article{Fox2009d, + Author = {Fox, H. C. and Sinha, R.}, + Journal = {Harv Rev Psychiatry}, + Pages = {103--119}, + Title = {{{S}ex differences in drug-related stress-system changes: implications for treatment in substance-abusing women}}, + Volume = {17}, + Year = {2009}} + +@article{Fox2005a, + Author = {Fox, H. C. and Talih, M. and Malison, R. and Anderson, G. M. and Kreek, M. J. and Sinha, R.}, + Journal = {Psychoneuroendocrinology}, + Month = {Oct}, + Pages = {880--891}, + Title = {{{F}requency of recent cocaine and alcohol use affects drug craving and associated responses to stress and drug-related cues}}, + Volume = {30}, + Year = {2005}} + +@article{Fox2005b, + Author = {Fox, H. C. and Talih, M. and Malison, R. and Anderson, G. M. and Kreek, M. J. and Sinha, R.}, + Journal = {Psychoneuroendocrinology}, + Month = {Oct}, + Pages = {880--891}, + Title = {{{F}requency of recent cocaine and alcohol use affects drug craving and associated responses to stress and drug-related cues}}, + Volume = {30}, + Year = {2005}} + +@article{Fox2005, + Author = {Fox, N. A. and Nichols, K. E. and Henderson, H. A. and Rubin, K. and Schmidt, L. and Hamer, D. and Ernst, M. and Pine, D. S.}, + Journal = {Psychol Sci}, + Month = {Dec}, + Pages = {921--926}, + Title = {{{E}vidence for a gene-environment interaction in predicting behavioral inhibition in middle childhood}}, + Volume = {16}, + Year = {2005}} + +@article{Frank2008c, + Author = {Frank, G. K. and Oberndorfer, T. A. and Simmons, A. N. and Paulus, M. P. and Fudge, J. L. and Yang, T. T. and Kaye, W. H.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {1559--1569}, + Title = {{{S}ucrose activates human taste pathways differently from artificial sweetener}}, + Volume = {39}, + Year = {2008}} + +@article{Frank2008, + Author = {Frank, M. J.}, + Journal = {Schizophr Bull}, + Month = {Nov}, + Pages = {1008--1011}, + Title = {{{S}chizophrenia: a computational reinforcement learning perspective}}, + Volume = {34}, + Year = {2008}} + +@article{Frank2006a, + Author = {Frank, M. J.}, + Journal = {Neural Netw}, + Month = {Oct}, + Pages = {1120--1136}, + Title = {{{H}old your horses: a dynamic computational role for the subthalamic nucleus in decision making}}, + Volume = {19}, + Year = {2006}} + +@article{Frank2005a, + Author = {Frank, M. J.}, + Journal = {J Cogn Neurosci}, + Month = {Jan}, + Pages = {51--72}, + Title = {{{D}ynamic dopamine modulation in the basal ganglia: a neurocomputational account of cognitive deficits in medicated and nonmedicated {P}arkinsonism}}, + Volume = {17}, + Year = {2005}} + +@article{Frank2006, + Author = {Frank, M. J. and Claus, E. D.}, + Journal = {Psychological Review}, + Pages = {300--326}, + Title = {Anatomy of a Decision: {S}triato--Orbitofrontal Interactions in Reinforcement Learning, Decision Making, and Reversal}, + Volume = {113}, + Year = {2006}} + +@article{Frank2006d, + Author = {Frank, M. J. and Claus, E. D.}, + Journal = {Psychol Rev}, + Pages = {300--326}, + Title = {{{A}natomy of a decision: striato-orbitofrontal interactions in reinforcement learning, decision making, and reversal}}, + Volume = {113}, + Year = {2006}} + +@article{Frank2007, + Author = {Frank, M. J. and D'Lauro, C. and Curran, T.}, + Journal = {Cogn Affect Behav Neurosci}, + Month = {Dec}, + Pages = {297--308}, + Title = {{{C}ross-task individual differences in error processing: neural, electrophysiological, and genetic components}}, + Volume = {7}, + Year = {2007}} + +@article{Frank2009, + Author = {Frank, M. J. and Hutchison, K.}, + Journal = {Neuroscience}, + Month = {Apr}, + Title = {{{G}enetic contributions to avoidance-based decisions: striatal {D}2 receptor polymorphisms}}, + Year = {2009}} + +@article{Frank2008a, + Author = {Frank, M. J. and Kong, L.}, + Journal = {Psychol Aging}, + Month = {Jun}, + Pages = {392--398}, + Title = {{{L}earning to avoid in older age}}, + Volume = {23}, + Year = {2008}} + +@article{Frank2007b, + Author = {Frank, M. J. and Moustafa, A. A. and Haughey, H. M. and Curran, T. and Hutchison, K. E.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Oct}, + Pages = {16311--16316}, + Title = {{{G}enetic triple dissociation reveals multiple roles for dopamine in reinforcement learning}}, + Volume = {104}, + Year = {2007}} + +@article{Frank2006c, + Author = {Frank, M. J. and O'reilly, R. C.}, + Journal = {Behav. Neurosci.}, + Pages = {497--517}, + Title = {{{A} mechanistic account of striatal dopamine function in human cognition: psychopharmacological studies with cabergoline and haloperidol}}, + Volume = {120}, + Year = {2006}} + +@article{Frank2008b, + Author = {Frank, M. J. and O'Reilly, R. C. and Curran, T.}, + Journal = {Behav Brain Funct}, + Pages = {5}, + Title = {{{M}idazolam, hippocampal function, and transitive inference: {R}eply to {G}reene}}, + Volume = {4}, + Year = {2008}} + +@article{Frank2006b, + Author = {Frank, M. J. and O'Reilly, R. C. and Curran, T.}, + Journal = {Psychol Sci}, + Month = {Aug}, + Pages = {700--707}, + Title = {{{W}hen memory fails, intuition reigns: midazolam enhances implicit inference in humans}}, + Volume = {17}, + Year = {2006}} + +@article{Frank2007a, + Author = {Frank, M. J. and Samanta, J. and Moustafa, A. A. and Sherman, S. J.}, + Journal = {Science}, + Month = {Nov}, + Pages = {1309--1312}, + Title = {{{H}old your horses: impulsivity, deep brain stimulation, and medication in parkinsonism}}, + Volume = {318}, + Year = {2007}} + +@article{Frank2007c, + Author = {Frank, M. J. and Santamaria, A. and O'Reilly, R. C. and Willcutt, E.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {1583--1599}, + Title = {{{T}esting computational models of dopamine and noradrenaline dysfunction in attention deficit/hyperactivity disorder}}, + Volume = {32}, + Year = {2007}} + +@article{Frank2004, + Author = {Frank, M. J. and Seeberger, L. C. and O'reilly, R. C.}, + Journal = {Science}, + Month = {Dec}, + Pages = {1940--1943}, + Title = {{{B}y carrot or by stick: cognitive reinforcement learning in parkinsonism}}, + Volume = {306}, + Year = {2004}} + +@article{Frank2005, + Author = {Frank, M. J. and Woroch, B. S. and Curran, T.}, + Journal = {Neuron}, + Pages = {495--501}, + Title = {{{E}rror-related negativity predicts reinforcement learning and conflict biases}}, + Volume = {47}, + Year = {2005}} + +@article{franklin2002decreased, + Journal = {Biological psychiatry}, + Number = {2}, + Pages = {134--142}, + Publisher = {Elsevier}, + Title = {{Decreased gray matter concentration in the insular, orbitofrontal, cingulate, and temporal cortices of cocaine patients}}, + Volume = {51}, + Year = {2002}} + +@article{Franklin2002, + Author = {Franklin, T. R. and Acton, P. D. and Maldjian, J. A. and Gray, J. D. and Croft, J. R. and Dackis, C. A. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Biol. Psychiatry}, + Month = {Jan}, + Pages = {134--142}, + Title = {{{D}ecreased gray matter concentration in the insular, orbitofrontal, cingulate, and temporal cortices of cocaine patients}}, + Volume = {51}, + Year = {2002}} + +@article{Franklin2008, + Author = {Franklin, T. R. and Ehrman, R. and Lynch, K. G. and Harper, D. and Sciortino, N. and O'Brien, C. P. and Childress, A. R.}, + Journal = {J Womens Health (Larchmt)}, + Month = {Mar}, + Pages = {287--292}, + Title = {{{M}enstrual cycle phase at quit date predicts smoking status in an {N}{R}{T} treatment trial: a retrospective analysis}}, + Volume = {17}, + Year = {2008}} + +@article{Franklin2009a, + Author = {Franklin, T. R. and Harper, D. and Kampman, K. and Kildea-McCrea, S. and Jens, W. and Lynch, K. G. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Drug Alcohol Depend}, + Month = {Jul}, + Pages = {30--36}, + Title = {{{T}he {G}{A}{B}{A} {B} agonist baclofen reduces cigarette consumption in a preliminary double-blind placebo-controlled smoking reduction study}}, + Volume = {103}, + Year = {2009}} + +@article{Franklin2009, + Author = {Franklin, T. R. and Lohoff, F. W. and Wang, Z. and Sciortino, N. and Harper, D. and Li, Y. and Jens, W. and Cruz, J. and Kampman, K. and Ehrman, R. and Berrettini, W. and Detre, J. A. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Neuropsychopharmacology}, + Month = {Feb}, + Pages = {717--728}, + Title = {{{D}{A}{T} genotype modulates brain and behavioral responses elicited by cigarette cues}}, + Volume = {34}, + Year = {2009}} + +@article{Franklin2004, + Author = {Franklin, T. R. and Napier, K. and Ehrman, R. and Gariti, P. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Nicotine Tob. Res.}, + Month = {Feb}, + Pages = {171--175}, + Title = {{{R}etrospective study: influence of menstrual cycle on cue-induced cigarette craving}}, + Volume = {6}, + Year = {2004}} + +@article{Franklin2007, + Author = {Franklin, T. R. and Wang, Z. and Wang, J. and Sciortino, N. and Harper, D. and Li, Y. and Ehrman, R. and Kampman, K. and O'Brien, C. P. and Detre, J. A. and Childress, A. R.}, + Journal = {Neuropsychopharmacology}, + Month = {Nov}, + Pages = {2301--2309}, + Title = {{{L}imbic activation to cigarette smoking cues independent of nicotine withdrawal: a perfusion f{M}{R}{I} study}}, + Volume = {32}, + Year = {2007}} + +@article{Frederick2004, + Author = {Frederick, B. D. and Lyoo, I. K. and Satlin, A. and Ahn, K. H. and Kim, M. J. and Yurgelun-Todd, D. A. and Cohen, B. M. and Renshaw, P. F.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Dec}, + Pages = {1313--1322}, + Title = {{{I}n vivo proton magnetic resonance spectroscopy of the temporal lobe in {A}lzheimer's disease}}, + Volume = {28}, + Year = {2004}} + +@article{Freedland2000a, + Author = {Freedland, C. S. and Poston, J. S. and Porrino, L. J.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Oct}, + Pages = {265--270}, + Title = {{{E}ffects of {S}{R}141716{A}, a central cannabinoid receptor antagonist, on food-maintained responding}}, + Volume = {67}, + Year = {2000}} + +@article{Freedland2001, + Author = {Freedland, C. S. and Sharpe, A. L. and Samson, H. H. and Porrino, L. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Feb}, + Pages = {277--282}, + Title = {{{E}ffects of {S}{R}141716{A} on ethanol and sucrose self-administration}}, + Volume = {25}, + Year = {2001}} + +@article{Freedland2000, + Author = {Freedland, C. S. and Smith, H. R. and Hart, S. L. and Daunais, J. B. and Davies, H. M. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {Jun}, + Pages = {98--104}, + Title = {{{A} comparison of the behavioral effects of the repeated administration of {P}{T}{T}, 2beta-propanoyl-3beta-(4-tolyl)tropane and cocaine}}, + Volume = {869}, + Year = {2000}} + +@article{Freedland2002, + Author = {Freedland, C. S. and Whitlow, C. T. and Miller, M. D. and Porrino, L. J.}, + Journal = {Synapse}, + Month = {Aug}, + Pages = {134--142}, + Title = {{{D}ose-dependent effects of {D}elta9-tetrahydrocannabinol on rates of local cerebral glucose utilization in rat}}, + Volume = {45}, + Year = {2002}} + +@article{Freedland2003, + Author = {Freedland, C. S. and Whitlow, C. T. and Smith, H. R. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {Feb}, + Pages = {169--179}, + Title = {{{F}unctional consequences of the acute administration of the cannabinoid receptor antagonist, {S}{R}141716{A}, in cannabinoid-naive and -tolerant animals: a quantitative 2-[14{C}]deoxyglucose study}}, + Volume = {962}, + Year = {2003}} + +@article{Freeman2001, + Author = {Freeman, W. M. and Nader, M. A. and Nader, S. H. and Robertson, D. J. and Gioia, L. and Mitchell, S. M. and Daunais, J. B. and Porrino, L. J. and Friedman, D. P. and Vrana, K. E.}, + Journal = {J. Neurochem.}, + Month = {Apr}, + Pages = {542--549}, + Title = {{{C}hronic cocaine-mediated changes in non-human primate nucleus accumbens gene expression}}, + Volume = {77}, + Year = {2001}} + +@article{Freeman2000, + Author = {Freeman, W. M. and Yohrling, G. J. and Daunais, J. B. and Gioia, L. and Hart, S. L. and Porrino, L. J. and Davies, H. M. and Vrana, K. E.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {15--21}, + Title = {{{A} cocaine analog, 2beta-propanoyl-3beta-(4-tolyl)-tropane ({P}{T}{T}), reduces tyrosine hydroxylase in the mesolimbic dopamine pathway}}, + Volume = {61}, + Year = {2000}} + +@article{Freet2009, + Author = {Freet, C. S. and Steffen, C. and Nestler, E. J. and Grigson, P. S.}, + Journal = {Behav. Neurosci.}, + Month = {Apr}, + Pages = {397--407}, + Title = {{{O}verexpression of {D}elta{F}os{B} is associated with attenuated cocaine-induced suppression of saccharin intake in mice}}, + Volume = {123}, + Year = {2009}} + +@article{Freireich1963, + Author = {Freireich, E. J. and Gehan, E. and {Frei III}, E. and Schroeder, L. R. and Wolman, I. J. and Anbari, R. and Burgert, E. O. and Mills, S. D. and Pinkel, D. and Selawry, O. S. and Moon, J. H. and Gendel, B. R. and Spurr, C. L. and Storrs, R. and Haurani, F. and Hoogstraten, B. and Lee, S.}, + Journal = {Blood}, + Pages = {699--716}, + Title = {The Effect of 6--{M}ercaptopurine on the Duration of Steroid--Induced Remissions in Acute Leukemia: {A} Model for Evaluation of Other Potentially Useful Therapy}, + Volume = {21}, + Year = {1963}} + +@article{Fresquet2004, + Author = {Fresquet, N. and Angst, M.J. and Sandner, G.}, + Journal = {Behavioural brain research}, + Number = {2}, + Pages = {357--365}, + Publisher = {Elsevier}, + Title = {{Insular cortex lesions alter conditioned taste avoidance in rats differentially when using two methods of sucrose delivery}}, + Volume = {153}, + Year = {2004}} + +@article{Frick1996, + Author = {Frick, R. W.}, + Journal = {Psychological Methods}, + Pages = {379--390}, + Title = {The Appropriate Use of Null Hypothesis Testing}, + Volume = {1}, + Year = {1996}} + +@article{Fridberg2009, + Author = {Fridberg, D. J. and Queller, Sarah and Ahn, W.-Y. and Kim, Woojae and Bishara A. J. , Busemeyer, J. R. and Porrino, L. and Stout, J. C.}, + Journal = {Journal of Mathematical Psychology}, + Owner = {Woo-Young Ahn}, + Pages = {Accepted pending minor revision}, + Timestamp = {2009.08.06}, + Title = {Cognitive Mechanisms Underlying Risky Decision-Making in Chronic Cannabis Users}, + Year = {2009}} + +@book{Friedman1998, + Address = {New York}, + Author = {Friedman, L. M. and Furberg, C. D. and DeMets, D. L.}, + Publisher = {Springer}, + Title = {Fundamentals of Clinical Trials (3rd ed.)}, + Year = {1998}} + +@article{Friedman2004, + Author = {Friedman, S. D. and Dager, S. R. and Parow, A. and Hirashima, F. and Demopulos, C. and Stoll, A. L. and Lyoo, I. K. and Dunner, D. L. and Renshaw, P. F.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {340--348}, + Title = {{{L}ithium and valproic acid treatment effects on brain chemistry in bipolar disorder}}, + Volume = {56}, + Year = {2004}} + +@article{Fryer2007, + Author = {Fryer, S. L. and Tapert, S. F. and Mattson, S. N. and Paulus, M. P. and Spadoni, A. D. and Riley, E. P.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {1415--1424}, + Title = {{{P}renatal alcohol exposure affects frontal-striatal {B}{O}{L}{D} response during inhibitory control}}, + Volume = {31}, + Year = {2007}} + +@article{Gaag1975, + Abstract = {Hospital use in the Netherlands is examined in a cross-section analysis + of 1969 and 1971 data for 120 service regions. Elasticities of admissions + with respect to bed supply and supply of general practitioners are + calculated, and the substitutability of first level care (by general + practitioners) for hospital care is considered. Substitution effects + found indicate that the Dutch government's plan to reduce the ratio + of hospital beds to population is feasible.}, + Author = {J. van der Gaag and F. F. Rutten and B. M. van Praag}, + Journal = {Health Serv Res}, + Keywords = {Adolescent; Adult; Age Factors; Aged; Child; Child, Preschool; Delivery of Health Care; Family Practice; Female; Health Manpower, supply /&/ distribution; Hospitalization; Hospitals, General, supply /&/ distribution/utilization; Humans; Infant; Insurance, Health; Length of Stay; Male; Middle Aged; Netherlands; Population Density; Regression Analysis; Sex Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {3}, + Owner = {Young}, + Pages = {264--277}, + Pmid = {1225868}, + Timestamp = {2010.05.01}, + Title = {Determinants of hospital utilization in the Netherlands.}, + Volume = {10}, + Year = {1975}} + +@book{Galavotti2005, + Address = {Stanford}, + Author = {Galavotti, M. C.}, + Publisher = {CSLI Publications}, + Title = {A Philosophical Introduction to Probability}, + Year = {2005}} + +@article{Galynker2007, + Author = {Galynker, I. I. and Eisenberg, D. and Matochik, J. A. and Gertmenian-King, E. and Cohen, L. and Kimes, A. S. and Contoreggi, C. and Kurian, V. and Ernst, M. and Rosenthal, R. N. and Prosser, J. and London, E. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Oct}, + Pages = {166--174}, + Title = {{{C}erebral metabolism and mood in remitted opiate dependence}}, + Volume = {90}, + Year = {2007}} + +@article{Ganguli2008a, + Author = {Ganguli, S. and Bisley, J. W. and Roitman, J. D. and Shadlen, M. N. and Goldberg, M. E. and Miller, K. D.}, + Journal = {Neuron}, + Month = {Apr}, + Pages = {15--25}, + Title = {{{O}ne-dimensional dynamics of attention and decision making in {L}{I}{P}}}, + Volume = {58}, + Year = {2008}} + +@article{Garavan2007, + Author = {Garavan, H. and Hester, R.}, + Journal = {Neuropsychol Rev}, + Month = {Sep}, + Pages = {337--345}, + Title = {{{T}he role of cognitive control in cocaine dependence}}, + Volume = {17}, + Year = {2007}} + +@article{Garavan2006, + Author = {Garavan, H. and Hester, R. and Murphy, K. and Fassbender, C. and Kelly, C.}, + Journal = {Brain Res.}, + Month = {Aug}, + Pages = {130--142}, + Title = {{{I}ndividual differences in the functional neuroanatomy of inhibitory control}}, + Volume = {1105}, + Year = {2006}} + +@article{Garavan2008, + Author = {Garavan, H. and Kaufman, J. N. and Hester, R.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Oct}, + Pages = {3267--3276}, + Title = {{{A}cute effects of cocaine on the neurobiology of cognitive control}}, + Volume = {363}, + Year = {2008}} + +@article{Garavan2005, + Author = {Garavan, H. and Stout, J. C.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Apr}, + Pages = {195--201}, + Title = {{{N}eurocognitive insights into substance abuse}}, + Volume = {9}, + Year = {2005}} + +@book{Gardiner2004, + Address = {Berlin}, + Author = {Gardiner, C. W.}, + Edition = {3rd}, + Publisher = {Springer Verlag}, + Title = {Handbook of Stochastic Methods}, + Year = {2004}} + +@article{Garrett2000, + Author = {Garrett, A. S. and Flowers, D. L. and Absher, J. R. and Fahey, F. H. and Gage, H. D. and Keyes, J. W. and Porrino, L. J. and Wood, F. B.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {111--123}, + Title = {{{C}ortical activity related to accuracy of letter recognition}}, + Volume = {11}, + Year = {2000}} + +@article{Gaspar1989, + Author = {Gaspar, P. and Berger, B. and Febvret, A. and Vigny, A. and Henry, J.P.}, + Journal = {The Journal of Comparative Neurology}, + Number = {2}, + Publisher = {Alan R. Liss, Inc. New York}, + Title = {{Catecholamine innervation of the human cerebral cortex as revealed by comparative immunohistochemistry of tyrosine hydroxylase and dopamine-beta-hydroxylase}}, + Volume = {279}, + Year = {1989}} + +@article{Gatley1998, + Author = {Gatley, S. J. and Ding, Y. S. and Brady, D. and Gifford, A. N. and Dewey, S. L. and Carroll, F. I. and Fowler, J. S. and Volkow, N. D.}, + Journal = {Nucl. Med. Biol.}, + Month = {Jul}, + Pages = {449--454}, + Title = {{{I}n vitro and ex vivo autoradiographic studies of nicotinic acetylcholine receptors using [18{F}]fluoronochloroepibatidine in rodent and human brain}}, + Volume = {25}, + Year = {1998}} + +@article{Gatley2000, + Author = {Gatley, S. J. and Gifford, A. N. and Carroll, F. I. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {Dec}, + Pages = {483--488}, + Title = {{{S}ensitivity of binding of high-affinity dopamine receptor radioligands to increased synaptic dopamine}}, + Volume = {38}, + Year = {2000}} + +@article{Gatley1998a, + Author = {Gatley, S. J. and Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Pages = {97--108}, + Title = {{{A}ddiction and imaging of the living human brain}}, + Volume = {51}, + Year = {1998}} + +@article{Gatley1999, + Author = {Gatley, S. J. and Volkow, N. D. and Gifford, A. N. and Fowler, J. S. and Dewey, S. L. and Ding, Y. S. and Logan, J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Sep}, + Pages = {93--100}, + Title = {{{D}opamine-transporter occupancy after intravenous doses of cocaine and methylphenidate in mice and humans}}, + Volume = {146}, + Year = {1999}} + +@article{Gatley2005, + Author = {Gatley, S. J. and Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Logan, J. and Ding, Y. S. and Gerasimov, M.}, + Journal = {Curr. Pharm. Des.}, + Pages = {3203--3219}, + Title = {{{P}{E}{T} imaging in clinical drug abuse research}}, + Volume = {11}, + Year = {2005}} + +@article{Geisser1975, + Author = {Geisser, S.}, + Journal = {Journal of the American Statistical Association}, + Pages = {320--328}, + Title = {The Predictive Sample Reuse Method With Applications}, + Volume = {70}, + Year = {1975}} + +@article{Gelfand1992, + Author = {Gelfand, A. E. and Smith, A. F. M. and Lee, T.--M.}, + Journal = {Journal of the American Statistical Association}, + Pages = {523?532}, + Title = {{B}ayesian Analysis of Constrained Parameter and Truncated Data Problems Using {G}ibbs Sampling}, + Volume = {87}, + Year = {1992}} + +@book{Gelman2004, + Address = {Boca Raton (FL)}, + Author = {Gelman, A. and Carlin, J. B. and Stern, H. S. and Rubin, D. B.}, + Publisher = {Chapman \& Hall/CRC}, + Title = {{B}ayesian Data Analysis (2nd ed.)}, + Year = {2004}} + +@article{Gelman1999, + Author = {Gelman, A. and Rubin, D. B.}, + Journal = {Sociological Methods \& Research}, + Pages = {403--410}, + Title = {Evaluating and Using Statistical Methods in the Social Sciences}, + Volume = {27}, + Year = {1999}} + +@article{Gelman2006, + Author = {Gelman, A. and Stern, H.}, + Journal = {The American Statistician}, + Pages = {328--331}, + Title = {The Difference Between ``Significant" and ``Not Significant" is not Itself Statistically Significant}, + Volume = {60}, + Year = {2006}} + +@article{Genova1997, + Author = {Genova, L. and Berke, J. and Hyman, S. E.}, + Journal = {Neurobiol. Dis.}, + Pages = {239--246}, + Title = {{{M}olecular adaptations to psychostimulants in striatal neurons: toward a pathophysiology of addiction}}, + Volume = {4}, + Year = {1997}} + +@article{George1991, + Author = {George, F. R. and Porrino, L. J. and Ritz, M. C. and Goldberg, S. R.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {457--462}, + Title = {{{I}nbred rat strain comparisons indicate different sites of action for cocaine and amphetamine locomotor stimulant effects}}, + Volume = {104}, + Year = {1991}} + +@article{George2001, + Author = {George, M. S. and Anton, R. F. and Bloomer, C. and Teneback, C. and Drobes, D. J. and Lorberbaum, J. P. and Nahas, Z. and Vincent, D. J.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Apr}, + Pages = {345--352}, + Title = {{{A}ctivation of prefrontal cortex and anterior thalamus in alcoholic subjects on exposure to alcohol-specific cues}}, + Volume = {58}, + Year = {2001}} + +@article{Gerasimov2000a, + Author = {Gerasimov, M. R. and Franceschi, M. and Volkow, N. D. and Gifford, A. and Gatley, S. J. and Marsteller, D. and Molina, P. E. and Dewey, S. L.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Oct}, + Pages = {51--57}, + Title = {{{C}omparison between intraperitoneal and oral methylphenidate administration: {A} microdialysis and locomotor activity study}}, + Volume = {295}, + Year = {2000}} + +@article{Gerasimov2000, + Author = {Gerasimov, M. R. and Franceschi, M. and Volkow, N. D. and Rice, O. and Schiffer, W. K. and Dewey, S. L.}, + Journal = {Synapse}, + Month = {Dec}, + Pages = {432--437}, + Title = {{{S}ynergistic interactions between nicotine and cocaine or methylphenidate depend on the dose of dopamine transporter inhibitor}}, + Volume = {38}, + Year = {2000}} + +@article{Gerges2004, + Author = {Gerges, N.Z. and Alzoubi, K.H. and Park, C.R. and Diamond, D.M. and Alkadhi, K.A.}, + Journal = {Behavioural brain research}, + Number = {1}, + Pages = {77--84}, + Publisher = {Elsevier}, + Title = {{Adverse effect of the combination of hypothyroidism and chronic psychosocial stress on hippocampus-dependent memory in rats}}, + Volume = {155}, + Year = {2004}} + +@article{Gerring2002, + Author = {Gerring, J. P. and Slomine, B. and Vasa, R. A. and Grados, M. and Chen, A. and Rising, W. and Christensen, J. R. and Denckla, M. B. and Ernst, M.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Feb}, + Pages = {157--165}, + Title = {{{C}linical predictors of posttraumatic stress disorder after closed head injury in children}}, + Volume = {41}, + Year = {2002}} + +@article{Gescuk1994, + Author = {Gescuk, B. and Lang, S. and Porrino, L. J. and Kornetsky, C.}, + Journal = {Brain Res.}, + Month = {Nov}, + Pages = {303--311}, + Title = {{{T}he local cerebral metabolic effects of morphine in rats exposed to escapable footshock}}, + Volume = {663}, + Year = {1994}} + +@article{Geurtsinpress, + Author = {Geurts, H. M. and {van Oord}, S. and Crone, E. A.}, + Journal = {Journal of Abnormal Child Psychology}, + Pages = {??--??}, + Title = {Hot and Cool Aspects of Cognitive Control in Children With {ADHD}: {D}ecision--Making and Inhibition}, + Volume = {??}, + Year = {in press}} + +@article{Geweke1983, + Author = {Geweke, J. and Porter--Hudack, S.}, + Journal = {Journal of Time Series Analysis}, + Pages = {221--237}, + Title = {The Estimation and Application of Long--memory Time Series Models}, + Volume = {4}, + Year = {1983}} + +@article{Geyer1992, + Author = {Geyer, M. A. and Paulus, M. P.}, + Journal = {NIDA Res. Monogr.}, + Pages = {203--235}, + Title = {{{M}ultivariate and nonlinear approaches to characterizing drug effects on the locomotor and investigatory behavior of rats}}, + Volume = {124}, + Year = {1992}} + +@article{Gifford1999a, + Author = {Gifford, A. N. and Bruneus, M. and Gatley, S. J. and Lan, R. and Makriyannis, A. and Volkow, N. D.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Feb}, + Pages = {478--483}, + Title = {{{L}arge receptor reserve for cannabinoid actions in the central nervous system}}, + Volume = {288}, + Year = {1999}} + +@article{Gifford2000a, + Author = {Gifford, A. N. and Bruneus, M. and Gatley, S. J. and Volkow, N. D.}, + Journal = {Br. J. Pharmacol.}, + Month = {Oct}, + Pages = {645--650}, + Title = {{{C}annabinoid receptor-mediated inhibition of acetylcholine release from hippocampal and cortical synaptosomes}}, + Volume = {131}, + Year = {2000}} + +@article{Gifford1999, + Author = {Gifford, A. N. and Bruneus, M. and Lin, S. and Goutopoulos, A. and Makriyannis, A. and Volkow, N. D. and Gatley, S. J.}, + Journal = {Eur. J. Pharmacol.}, + Month = {Oct}, + Pages = {9--14}, + Title = {{{P}otentiation of the action of anandamide on hippocampal slices by the fatty acid amide hydrolase inhibitor, palmitylsulphonyl fluoride ({A}{M} 374)}}, + Volume = {383}, + Year = {1999}} + +@article{Gifford2002, + Author = {Gifford, A. N. and Makriyannis, A. and Volkow, N. D. and Gatley, S. J.}, + Journal = {Chem. Phys. Lipids}, + Month = {Dec}, + Pages = {65--72}, + Title = {{{I}n vivo imaging of the brain cannabinoid receptor}}, + Volume = {121}, + Year = {2002}} + +@article{Gifford2000, + Author = {Gifford, A. N. and Park, M. H. and Kash, T. L. and Herman, L. M. and Park, E. H. and Gatley, S. J. and Volkow, N. D.}, + Journal = {Naunyn Schmiedebergs Arch. Pharmacol.}, + Month = {Nov}, + Pages = {413--418}, + Title = {{{E}ffect of amphetamine-induced dopamine release on radiotracer binding to {D}1 and {D}2 receptors in rat brain striatal slices}}, + Volume = {362}, + Year = {2000}} + +@article{Gifford1997, + Author = {Gifford, A. N. and Tang, Y. and Gatley, S. J. and Volkow, N. D. and Lan, R. and Makriyannis, A.}, + Journal = {Neurosci. Lett.}, + Month = {Nov}, + Pages = {84--86}, + Title = {{{E}ffect of the cannabinoid receptor {S}{P}{E}{C}{T} agent, {A}{M} 281, on hippocampal acetylcholine release from rat brain slices}}, + Volume = {238}, + Year = {1997}} + +@incollection{Gigerenzer1993, + Address = {Hillsdale (NJ)}, + Author = {Gigerenzer, G.}, + Booktitle = {A Handbook for Data Analysis in the Behavioral Sciences: Methodological Issues}, + Editor = {Keren, G. and Lewis, C.}, + Pages = {311--339}, + Publisher = {Erlbaum}, + Title = {The {S}uperego, the {E}go, and the {I}d in Statistical Reasoning}, + Year = {1993}} + +@article{Gigerenzer1998, + Author = {Gigerenzer, G.}, + Journal = {Behavioral and Brain Sciences}, + Pages = {199--200}, + Title = {We Need Statistical Thinking, not Statistical Rituals}, + Volume = {21}, + Year = {1998}} + +@article{Gilden2001, + Author = {Gilden, D. L.}, + Journal = {Psychological Review}, + Pages = {33--56}, + Title = {Cognitive Emissions of $1/f$ Noise}, + Volume = {108}, + Year = {2001}} + +@article{Gilden1997, + Author = {Gilden, D. L.}, + Journal = {Psychological Science}, + Pages = {296--301}, + Title = {Fluctuations in the Time Required for Elementary Decisions}, + Volume = {8}, + Year = {1997}} + +@article{Gilden1995, + Author = {Gilden, D. L. and Thornton, T. and Mallon, M. W.}, + Journal = {Science}, + Pages = {1837--1839}, + Title = {$1/f$ Noise in Human Cognition}, + Volume = {267}, + Year = {1995}} + +@article{Gilden1995a, + Author = {Gilden, D. L. and Wilson, S. G.}, + Journal = {Cognitive Psychology}, + Pages = {17--64}, + Title = {On the Nature of Streaks in Signal Detection}, + Volume = {28}, + Year = {1995}} + +@article{Gilden1995b, + Author = {Gilden, D. L. and Wilson, S. G.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {260--265}, + Title = {Streaks in Skilled Performance}, + Volume = {2}, + Year = {1995}} + +@book{Gill2002, + Address = {Boca Raton (FL)}, + Author = {Gill, J.}, + Publisher = {CRC Press}, + Title = {{B}ayesian Methods: {A} Social and Behavioral Sciences Approach}, + Year = {2002}} + +@book{Gilmore1981, + Address = {New York}, + Author = {Gilmore, R.}, + Publisher = {Dover}, + Title = {Catastrophe Theory for Scientists and Engineers}, + Year = {1981}} + +@article{Giraitis2001, + Author = {Giraitis, L. and Kokoszka, P. and Leipus, R.}, + Journal = {Journal of Applied Probability}, + Pages = {1033--1054}, + Title = {Testing for Long Memory in the Presence of a General Trend}, + Volume = {38}, + Year = {2001}} + +@article{Gisiger2001, + Author = {Gisiger, T.}, + Journal = {Biological Reviews of the Cambridge Philosophical Society}, + Pages = {161--209}, + Title = {Scale Invariance in Biology: Coincidence or Footprint of a Universal Mechanism?}, + Volume = {76}, + Year = {2001}} + +@article{Gleissner2004, + Author = {Gleissner, T. and Wilms, J. and Pottschmidt, K. and Uttley, P. and Nowak, M. A. and Staubert, R.}, + Journal = {Astronomy \& Astrophysics}, + Pages = {1091--1104}, + Title = {Long Term Variability of {C}yg {X}-1. {II}. The rms-Flux Relation}, + Volume = {414}, + Year = {2004}} + +@article{Glenner1984, + Author = {Glenner, G.G. and Wong, C.W.}, + Journal = {Biochemical and Biophysical Research Communications}, + Number = {3}, + Pages = {885--890}, + Publisher = {Elsevier}, + Title = {{Alzheimer's disease: initial report of the purification and characterization of a novel cerebrovascular amyloid protein}}, + Volume = {120}, + Year = {1984}} + +@article{Glickmaninpress, + Author = {Glickman, M. E. and Gray, J. R. and Morales, C. J.}, + Journal = {Psychometrika}, + Title = {Combining Speed and Accuracy to Assess Error--free Cognitive Processes}, + Year = {in press}} + +@article{Glicksohn2007, + Author = {Glicksohn, J. and Naor-Ziv, R. and Leshem, R.}, + Journal = {Cognition}, + Month = {Oct}, + Pages = {195--205}, + Title = {{{I}mpulsive decision-making: learning to gamble wisely?}}, + Volume = {105}, + Year = {2007}} + +@article{Glimcher2001, + Author = {Glimcher, P. W. and Ciaramitaro, V. M. and Platt, M. L. and Bayer, H. M. and Brown, M. A. and Handel, A.}, + Journal = {J. Neurosci. Methods}, + Month = {Jul}, + Pages = {131--144}, + Title = {{{A}pplication of neurosonography to experimental physiology}}, + Volume = {108}, + Year = {2001}} + +@article{Glover2004, + Author = {Glover, S. and Dixon, P.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {791--806}, + Title = {Likelihood Ratios: A Simple and Flexible Statistic for Empirical Psychologists}, + Volume = {11}, + Year = {2004}} + +@article{Gluck1988, + Author = {Gluck, M. A. and Bower, G. H.}, + Journal = {Journal of Experimental Psychology: General}, + Owner = {Wooyoung Ahn}, + Pages = {227-247}, + Timestamp = {2007.05.04}, + Title = {From conditioning to category learning: An adaptive network model}, + Volume = {117}, + Year = {1988}} + +@article{Goel2009, + Author = {Goel, V. and Stollstorff, M. and Nakic, M. and Knutson, K. and Grafman, J.}, + Journal = {Neuropsychologia}, + Month = {Jun}, + Title = {{{A} role for right ventrolateral prefrontal cortex in reasoning about indeterminate relations}}, + Year = {2009}} + +@article{Goetz1994, + Author = {Goetz, C. G. and Bolla, K. I. and Rogers, S. M.}, + Journal = {Neurology}, + Month = {May}, + Pages = {801--809}, + Title = {{{N}eurologic health outcomes and {A}gent {O}range: {I}nstitute of {M}edicine report}}, + Volume = {44}, + Year = {1994}} + +@article{Goff2002, + Author = {Goff, D. C. and Hennen, J. and Lyoo, I. K. and Tsai, G. and Wald, L. L. and Evins, A. E. and Yurgelun-Todd, D. A. and Renshaw, P. F.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {493--497}, + Title = {{{M}odulation of brain and serum glutamatergic concentrations following a switch from conventional neuroleptics to olanzapine}}, + Volume = {51}, + Year = {2002}} + +@article{Gold2007a, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {535--574}, + Title = {{{T}he neural basis of decision making}}, + Volume = {30}, + Year = {2007}} + +@article{Gold2003a, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {632--651}, + Title = {{{T}he influence of behavioral context on the representation of a perceptual decision in developing oculomotor commands}}, + Volume = {23}, + Year = {2003}} + +@article{Gold2002, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {Neuron}, + Pages = {299--308}, + Title = {Banburismus and the Brain: {D}ecoding the Relationship Between Sensory Stimuli, Decisions, and Reward}, + Volume = {36}, + Year = {2002}} + +@article{Gold2002b, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {Neuron}, + Month = {Oct}, + Pages = {299--308}, + Title = {{{B}anburismus and the brain: decoding the relationship between sensory stimuli, decisions, and reward}}, + Volume = {36}, + Year = {2002}} + +@article{Gold2001, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {Trends in Cognitive Science}, + Pages = {10--16}, + Title = {Neural Computations that Underlie Decisions About Sensory Stimuli}, + Volume = {5}, + Year = {2001}} + +@article{Gold2001b, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Jan}, + Pages = {10--16}, + Title = {{{N}eural computations that underlie decisions about sensory stimuli}}, + Volume = {5}, + Year = {2001}} + +@article{Gold2000a, + Author = {Gold, J. I. and Shadlen, M. N.}, + Journal = {Nature}, + Month = {Mar}, + Pages = {390--394}, + Title = {{{R}epresentation of a perceptual decision in developing oculomotor commands}}, + Volume = {404}, + Year = {2000}} + +@article{Golde2006a, + Abstract = {Alzheimer's disease (AD) is the most common form of dementia in industrialized + nations. If more effective therapies are not developed that either + prevent AD or block progression of the disease in its very early + stages, the economic and societal cost of caring for AD patients + will be devastating. Only two types of drugs are currently approved + for the treatment of AD: inhibitors of acetyl cholinesterase, which + symptomatically enhance cognitive state to some degree but are not + disease modifying; and the adamantane derivative, memantine. Memantine + preferentially blocks excessive NMDA receptor activity without disrupting + normal receptor activity and is thought to be a neuroprotective agent + that blocks excitotoxicty. Memantine therefore may have a potentially + disease modifying effect in multiple neurodegenerative conditions. + An improved understanding of the pathogeneses of AD has now led to + the identification of numerous therapeutic targets designed to alter + amyloid beta protein (Abeta) or tau accumulation. Therapies that + alter Abeta and tau through these various targets are likely to have + significant disease modifying effects. Many of these targets have + been validated in proof of concept studies in preclinical animal + models, and some potentially disease modifying therapies targeting + Abeta or tau are being tested in the clinic. This review will highlight + both the promise of and the obstacles to developing such disease + modifying AD therapies.}, + Author = {Todd E Golde}, + Doi = {10.1111/j.1471-4159.2006.04211.x}, + Institution = {Mayo Clinic College of Medicine, Department of Neuroscience, Mayo Clinic Jacksonville 4500 San Pablo Road., Jacksonville, Florida 32224, USA. tgolde@mayo.edu}, + Journal = {J Neurochem}, + Keywords = {Alzheimer Disease, genetics/metabolism/therapy; Alzheimer Vaccines; Amyloid beta-Protein, antagonists /&/ inhibitors/physiology/toxicity; Animals; Humans; Immunotherapy; Molecular Chaperones; Neurofibrillary Tangles, pathology; Peptide Hydrolases, metabolism; Phosphorylation; Protease Inhibitors, pharmacology; Protein Folding; tau Proteins, metabolism}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {3}, + Owner = {Young}, + Pages = {689--707}, + Pii = {JNC4211}, + Pmid = {17076654}, + Timestamp = {2009.12.10}, + Title = {Disease modifying therapy for AD?}, + Url = {http://dx.doi.org/10.1111/j.1471-4159.2006.04211.x}, + Volume = {99}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1471-4159.2006.04211.x}} + +@article{Goldman-Rakic1985, + Author = {Goldman-Rakic, P. S. and Porrino, L. J.}, + Journal = {J. Comp. Neurol.}, + Month = {Dec}, + Pages = {535--560}, + Title = {{{T}he primate mediodorsal ({M}{D}) nucleus and its projection to the frontal lobe}}, + Volume = {242}, + Year = {1985}} + +@article{Goldstein2008, + Author = {Goldstein, R. and Woicik, P. and Moeller, S. and Telang, F. and Jayne, M. and Wong, C. and Wang, G. and Fowler, J. and Volkow, N.}, + Journal = {J. Psychopharmacol. (Oxford)}, + Month = {Nov}, + Title = {{{L}iking and wanting of drug and non-drug rewards in active cocaine users: the {S}{T}{R}{A}{P}-{R} questionnaire}}, + Year = {2008}} + +@article{Goldstein2005, + Author = {Goldstein, R. Z. and Alia-Klein, N. and Leskovjan, A. C. and Fowler, J. S. and Wang, G. J. and Gur, R. C. and Hitzemann, R. and Volkow, N. D.}, + Journal = {Psychiatry Res}, + Month = {Jan}, + Pages = {13--22}, + Title = {{{A}nger and depression in cocaine addiction: association with the orbitofrontal cortex}}, + Volume = {138}, + Year = {2005}} + +@article{Goldstein2009, + Author = {Goldstein, R. Z. and Alia-Klein, N. and Tomasi, D. and Carrillo, J. H. and Maloney, T. and Woicik, P. A. and Wang, R. and Telang, F. and Volkow, N. D.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jun}, + Pages = {9453--9458}, + Title = {{{A}nterior cingulate cortex hypoactivations to an emotionally salient task in cocaine addiction}}, + Volume = {106}, + Year = {2009}} + +@article{Goldstein2007b, + Author = {Goldstein, R. Z. and Alia-Klein, N. and Tomasi, D. and Zhang, L. and Cottone, L. A. and Maloney, T. and Telang, F. and Caparelli, E. C. and Chang, L. and Ernst, T. and Samaras, D. and Squires, N. K. and Volkow, N. D.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {43--51}, + Title = {{{I}s decreased prefrontal cortical sensitivity to monetary reward associated with impaired motivation and self-control in cocaine addiction?}}, + Volume = {164}, + Year = {2007}} + +@article{Goldstein2006, + Author = {Goldstein, R. Z. and Cottone, L. A. and Jia, Z. and Maloney, T. and Volkow, N. D. and Squires, N. K.}, + Journal = {Int J Psychophysiol}, + Month = {Nov}, + Pages = {272--279}, + Title = {{{T}he effect of graded monetary reward on cognitive event-related potentials and behavior in young healthy adults}}, + Volume = {62}, + Year = {2006}} + +@article{Goldstein2004, + Author = {Goldstein, R. Z. and Leskovjan, A. C. and Hoff, A. L. and Hitzemann, R. and Bashan, F. and Khalsa, S. S. and Wang, G. J. and Fowler, J. S. and Volkow, N. D.}, + Journal = {Neuropsychologia}, + Pages = {1447--1458}, + Title = {{{S}everity of neuropsychological impairment in cocaine and alcohol addiction: association with metabolism in the prefrontal cortex}}, + Volume = {42}, + Year = {2004}} + +@article{Goldstein2008a, + Author = {Goldstein, R. Z. and Parvaz, M. A. and Maloney, T. and Alia-Klein, N. and Woicik, P. A. and Telang, F. and Wang, G. J. and Volkow, N. D.}, + Journal = {Psychophysiology}, + Month = {Sep}, + Pages = {705--713}, + Title = {{{C}ompromised sensitivity to monetary reward in current cocaine users: an {E}{R}{P} study}}, + Volume = {45}, + Year = {2008}} + +@article{Goldstein2007d, + Author = {Goldstein, R. Z. and Tomasi, D. and Alia-Klein, N. and Cottone, L. A. and Zhang, L. and Telang, F. and Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Mar}, + Pages = {233--240}, + Title = {{{S}ubjective sensitivity to monetary gradients is associated with frontolimbic activation to reward in cocaine abusers}}, + Volume = {87}, + Year = {2007}} + +@article{Goldstein2009a, + Author = {Goldstein, R. Z. and Tomasi, D. and Alia-Klein, N. and Honorio Carrillo, J. and Maloney, T. and Woicik, P. A. and Wang, R. and Telang, F. and Volkow, N. D.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {6001--6006}, + Title = {{{D}opaminergic response to drug words in cocaine addiction}}, + Volume = {29}, + Year = {2009}} + +@article{Goldstein2007, + Author = {Goldstein, R. Z. and Tomasi, D. and Alia-Klein, N. and Zhang, L. and Telang, F. and Volkow, N. D.}, + Journal = {Neuroimage}, + Month = {Mar}, + Pages = {194--206}, + Title = {{{T}he effect of practice on a sustained attention task in cocaine abusers}}, + Volume = {35}, + Year = {2007}} + +@article{Goldstein2007c, + Author = {Goldstein, R. Z. and Tomasi, D. and Rajaram, S. and Cottone, L. A. and Zhang, L. and Maloney, T. and Telang, F. and Alia-Klein, N. and Volkow, N. D.}, + Journal = {Neuroscience}, + Month = {Feb}, + Pages = {1153--1159}, + Title = {{{R}ole of the anterior cingulate and medial orbitofrontal cortex in processing drug cues in cocaine addiction}}, + Volume = {144}, + Year = {2007}} + +@article{Goldstein2002a, + Author = {Goldstein, R. Z. and Volkow, N. D.}, + Journal = {Am J Psychiatry}, + Month = {Oct}, + Pages = {1642--1652}, + Title = {{{D}rug addiction and its underlying neurobiological basis: neuroimaging evidence for the involvement of the frontal cortex}}, + Volume = {159}, + Year = {2002}} + +@article{Goldstein2002, + Author = {Goldstein, R. Z. and Volkow, N. D. and Chang, L. and Wang, G. J. and Fowler, J. S. and Depue, R. A. and Gur, R. C.}, + Journal = {Neuroreport}, + Month = {Dec}, + Pages = {2253--2257}, + Title = {{{T}he orbitofrontal cortex in methamphetamine addiction: involvement in fear}}, + Volume = {13}, + Year = {2002}} + +@article{Goldstein2001, + Author = {Goldstein, R. Z. and Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Rajaram, S.}, + Journal = {Neuroreport}, + Month = {Aug}, + Pages = {2595--2599}, + Title = {{{A}ddiction changes orbitofrontal gyrus function: involvement in response inhibition}}, + Volume = {12}, + Year = {2001}} + +@article{Goldstein2007a, + Author = {Goldstein, R. Z. and Woicik, P. A. and Lukasik, T. and Maloney, T. and Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Jun}, + Pages = {97--101}, + Title = {{{D}rug fluency: a potential marker for cocaine use disorders}}, + Volume = {89}, + Year = {2007}} + +@article{Gomez2008, + Abstract = {This article investigates how the perspective from which we see an + object affects memory. Object identification can be affected by the + orientation of the object. Palmer, Rosch, and Chase (1981) coined + the term canonical to describe perspectives in which identification + performance is best. We present two experiments that tested the effects + of object perspective on memory. Our results revealed a double dissociation + between task (recognition and recall) and type of object perspective. + In recognition, items studied in the noncanonical viewpoint produced + higher proportions of "old" responses than did items studied in the + canonical viewpoint, whereas new objects presented from a noncanonical + viewpoint produced fewer "old" responses than did new objects presented + from the canonical viewpoint. In free recall, conversely, objects + studied from the noncanonical viewpoint produced lower recall rates + than did objects studied from the canonical viewpoint. These results, + which reveal a pattern similar to word frequency effects, support + the psychological reality of canonical viewpoints and the frequency-of-exposure-based + accounts of canonical viewpoint effects.}, + Author = {Pablo Gomez and Jennifer Shutter and Jeffrey N Rouder}, + Doi = {10.3758/PBR.15.5.940}, + Institution = {Department of Psychology, DePaul University, Chicago, Illinois 60614, USA. pgomez1@depaul.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Humans; Memory; Mental Recall; Recognition (Psychology); Visual Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {940--944}, + Pii = {15/5/940}, + Pmid = {18926985}, + Timestamp = {2009.08.15}, + Title = {Memory for objects in canonical and noncanonical viewpoints.}, + Url = {http://dx.doi.org/10.3758/PBR.15.5.940}, + Volume = {15}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.3758/PBR.15.5.940}} + +@article{Gonzalez2007, + Author = {Gonzalez, R. and Bechara, A. and Martin, E. M.}, + Journal = {J Clin Exp Neuropsychol}, + Month = {Feb}, + Pages = {155--159}, + Title = {{{E}xecutive functions among individuals with methamphetamine or alcohol as drugs of choice: preliminary observations}}, + Volume = {29}, + Year = {2007}} + +@article{Gonzalez2005, + Author = {Gonzalez, R. and Vassileva, J. and Bechara, A. and Grbesic, S. and Sworowski, L. and Novak, R. M. and Nunnally, G. and Martin, E. M.}, + Journal = {J Int Neuropsychol Soc}, + Month = {Mar}, + Pages = {121--131}, + Title = {{{T}he influence of executive functions, sensation seeking, and {H}{I}{V} serostatus on the risky sexual practices of substance-dependent individuals}}, + Volume = {11}, + Year = {2005}} + +@incollection{Good1985, + Address = {New York}, + Author = {Good, I. J.}, + Booktitle = {{B}ayesian Statistics 2}, + Editor = {Bernardo, J. M. and DeGroot, M. H. and Lindley, D. V. and Smith, A. F. M.}, + Pages = {249--269}, + Publisher = {Elsevier}, + Title = {Weight of Evidence: A Brief Survey}, + Year = {1985}} + +@book{Good1983, + Address = {Minneapolis}, + Author = {Good, I. J.}, + Publisher = {University of Minnesota Press}, + Title = {Good Thinking: {T}he Foundations of Probability and Its Applications}, + Year = {1983}} + +@article{Good1982, + Author = {Good, I. J.}, + Journal = {Journal of the American Statistical Association}, + Pages = {342--347}, + Title = {Comment on ``{L}indley's Paradox" by Glenn Shafer}, + Volume = {77}, + Year = {1982}} + +@article{Good1987, + Author = {Good, I. J. and Crook, J. F.}, + Journal = {The Annals of Statistics}, + Pages = {670--693}, + Title = {The Robustness and Sensitivity of the Mixed-Dirichlet {B}ayesian Test for ``Independence" in Contingency Tables}, + Volume = {15}, + Year = {1987}} + +@article{Goodman1993, + Author = {Goodman, S. N.}, + Journal = {American Journal of Epidemiology}, + Pages = {485--496}, + Title = {P Values, Hypothesis Tests, and Likelihood: Implications for Epidemiology of a Neglected Historical Debate}, + Volume = {137}, + Year = {1993}} + +@article{Gordon2007, + Author = {Gordon, S. and Tompkins, T. and Dayan, P. S.}, + Journal = {Pediatr Emerg Care}, + Month = {Aug}, + Pages = {521--527}, + Title = {{{R}andomized trial of single-dose intramuscular dexamethasone compared with prednisolone for children with acute asthma}}, + Volume = {23}, + Year = {2007}} + +@article{Gorelick2008, + Author = {Gorelick, M. H. and Atabaki, S. M. and Hoyle, J. and Dayan, P. S. and Holmes, J. F. and Holubkov, R. and Monroe, D. and Callahan, J. M. and Kuppermann, N.}, + Journal = {Acad Emerg Med}, + Month = {Aug}, + Title = {{{I}nterobserver {A}greement in {A}ssessment of {C}linical {V}ariables in {C}hildren with {B}lunt {H}ead {T}rauma}}, + Year = {2008}} + +@article{Gorelick2008a, + Author = {Gorelick, M. H. and Atabaki, S. M. and Hoyle, J. and Dayan, P. S. and Holmes, J. F. and Holubkov, R. and Monroe, D. and Callahan, J. M. and Kuppermann, N. and Gerardi, M. and Tunik, M. and Tsung, J. and Melville, K. and Lee, L. and Lillis, K. and Mahajan, P. and Dayan, P. and Nadel, F. and Powell, E. and Atabaki, S. and Brown, K. and Glass, T. and Hoyle, J. and Cooper, A. and Jacobs, E. and Monroe, D. and Borgialli, D. and Gorelick, M. and Bandyopadhyay, S. and Schamban, N. and Kuppermann, N. and Holmes, J. and Lichenstein, R. and Stanley, R. and Babcock-Cimpello, L. and Badawy, M. and Schunk, J. and Callahan, J. and Jaffe, D. and Quayle, K. and Kuppermann, N. and Alpern, E. and Chamberlain, J. and Dean, J. M. and Gerardi, M. and Goepp, J. and Gorelick, M. and Hoyle, J. and Jaffe, D. and Johns, C. and Levick, N. and Mahajan, P. and Maio, R. and Melville, K. and Miller, S. and Monroe, D. and Ruddy, R. and Stanley, R. and Treloar, D. and Tunik, M. and Walker, A. and Kavanaugh, D. and Park, H. and Holubkov, R. and Knight, S. and Donaldson, A. and Chamberlain, J. and Brown, M. and Corneli, H. and Goepp, J. and Holubkov, R. and Mahajan, P. and Melville, K. and Stremski, E. and Tunik, M. and Gorelick, M. and Alpern, E. and Dean, J. M. and Foltin, G. and Joseph, J. and Miller, S. and Moler, F. and Stanley, R. and Teach, S. and Jaffe, D. and Brown, K. and Cooper, A. and Dean, J. M. and Johns, C. and Maio, R. and Mann, N. C. and Monroe, D. and Shaw, K. and Teitelbaum, D. and Treloar, D. and Stanley, R. and Alexander, D. and Brown, J. and Gerardi, M. and Gregor, M. and Holubkov, R. and Lillis, K. and Nordberg, B. and Ruddy, R. and Shults, M. and Walker, A. and Levick, N. and Brennan, J. and Brown, J. and Dean, J. M. and Hoyle, J. and Maio, R. and Ruddy, R. and Schalick, W. and Singh, T. and Wright, J.}, + Journal = {Acad Emerg Med}, + Month = {Sep}, + Pages = {812--818}, + Title = {{{I}nterobserver agreement in assessment of clinical variables in children with blunt head trauma}}, + Volume = {15}, + Year = {2008}} + +@article{Gormley2001, + Author = {Gormley, M. E. and Gaebler-Spira, D. and Delgado, M. R.}, + Journal = {J. Child Neurol.}, + Month = {Feb}, + Pages = {113--118}, + Title = {{{U}se of botulinum toxin type {A} in pediatric patients with cerebral palsy: a three-center retrospective chart review}}, + Volume = {16}, + Year = {2001}} + +@article{Gorrindo2005, + Author = {Gorrindo, T. and Blair, RJR and Budhani, S. and Dickstein, D.P. and Pine, D.S. and Leibenluft, E.}, + Journal = {American Journal of Psychiatry}, + Number = {10}, + Pages = {1975}, + Publisher = {Am Psychiatric Assoc}, + Title = {{Deficits on a probabilistic response-reversal task in patients with pediatric bipolar disorder}}, + Volume = {162}, + Year = {2005}} + +@article{Gottfried2003, + Author = {Gottfried, J. A. and O'Doherty, J. and Dolan, R. J.}, + Journal = {Science}, + Pages = {1104--1107}, + Title = {{{E}ncoding predictive reward value in human amygdala and orbitofrontal cortex}}, + Volume = {301}, + Year = {2003}} + +@article{Gottfried2002, + Author = {Gottfried, J. A. and O'Doherty, J. and Dolan, R. J.}, + Journal = {J. Neurosci.}, + Month = {Dec}, + Pages = {10829--10837}, + Title = {{{A}ppetitive and aversive olfactory learning in humans studied using event-related functional magnetic resonance imaging}}, + Volume = {22}, + Year = {2002}} + +@article{Gottschalk1995, + Author = {Gottschalk, A. and Bauer, M. S. and Whybrow, P. C.}, + Journal = {Archives of General Psychiatry}, + Pages = {947--959}, + Title = {Evidence of Chaotic Mood Variation in Bipolar Disorder}, + Volume = {52}, + Year = {1995}} + +@article{Goutis1992, + Author = {Goutis, C. and Casella, G.}, + Journal = {The Annals of Statistics}, + Pages = {1501?1513}, + Title = {Increasing the Confidence in {S}tudent's $t$ Interval}, + Volume = {20}, + Year = {1992}} + +@article{Grunwald2000, + Author = {Gr\"{u}nwald, P.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {133--152}, + Title = {Model Selection Based on Minimum Description Length}, + Volume = {44}, + Year = {2000}} + +@phdthesis{Grunwald1998, + Author = {Gr\"{u}nwald, P.}, + School = {University of Amsterdam}, + Title = {The {MDL} Principle and Reasoning Under Uncertainty}, + Type = {ILLC Dissertation Series DS 1998-03}, + Year = {1998}} + +@article{Grace1991, + Author = {Grace, AA}, + Journal = {Neuroscience}, + Number = {1}, + Pages = {1}, + Title = {{Phasic versus tonic dopamine release and the modulation of dopamine system responsivity: a hypothesis for the etiology of schizophrenia.}}, + Volume = {41}, + Year = {1991}} + +@article{Graham2009, + Author = {Graham, D. L. and Krishnan, V. and Larson, E. B. and Graham, A. and Edwards, S. and Bachtell, R. K. and Simmons, D. and Gent, L. M. and Berton, O. and Bolanos, C. A. and DiLeone, R. J. and Parada, L. F. and Nestler, E. J. and Self, D. W.}, + Journal = {Biol. Psychiatry}, + Month = {Apr}, + Pages = {696--701}, + Title = {{{T}ropomyosin-related kinase {B} in the mesolimbic dopamine system: region-specific effects on cocaine reward}}, + Volume = {65}, + Year = {2009}} + +@article{Graham2000, + Author = {Graham, H. K. and Aoki, K. R. and Autti-R?m?, I. and Boyd, R. N. and Delgado, M. R. and Gaebler-Spira, D. J. and Gormley, M. E. and Guyer, B. M. and Heinen, F. and Holton, A. F. and Matthews, D. and Molenaers, G. and Motta, F. and Garc?a Ruiz, P. J. and Wissel, J.}, + Journal = {Gait Posture}, + Month = {Feb}, + Pages = {67--79}, + Title = {{{R}ecommendations for the use of botulinum toxin type {A} in the management of cerebral palsy}}, + Volume = {11}, + Year = {2000}} + +@article{Grainger1996, + Author = {Grainger, J. and Jacobs, A. M.}, + Journal = {Psychological Review}, + Pages = {518--565}, + Title = {Orthographic Processing in Visual Word Recognition: A Multiple Read--out Model}, + Volume = {103}, + Year = {1996}} + +@article{Granger1980, + Author = {Granger, C. W. J. and Joyeux, R.}, + Journal = {Journal of Time Series Analysis}, + Pages = {15--30}, + Title = {An Introduction to Long-range Time Series Models and Fractional Differencing}, + Volume = {1}, + Year = {1980}} + +@article{Granger1976, + Author = {Granger, C. W. J. and Morris, M. J.}, + Journal = {Journal of the Royal Statistical Society A}, + Pages = {246--257}, + Title = {Time Series Modelling and Interpretation}, + Volume = {139}, + Year = {1976}} + +@article{Grant2007, + Author = {Grant, B. F. and Compton, W. M. and Crowley, T. J. and Hasin, D. S. and Helzer, J. E. and Li, T. K. and Rounsaville, B. J. and Volkow, N. D. and Woody, G. E.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {379--380}, + Title = {{{E}rrors in assessing {D}{S}{M}-{I}{V} substance use disorders}}, + Volume = {64}, + Year = {2007}} + +@article{Grant2000, + Author = {Grant, S. and Contoreggi, C. and London, E.D.}, + Journal = {Neuropsychologia}, + Number = {8}, + Pages = {1180--1187}, + Publisher = {Elsevier}, + Title = {{Drug abusers show impaired performance in a laboratory test of decision making}}, + Volume = {38}, + Year = {2000}} + +@article{Grant1996, + Author = {Grant, S. and London, E. D. and Newlin, D. B. and Villemagne, V. L. and Liu, X. and Contoreggi, C. and Phillips, R. L. and Kimes, A. S. and Margolin, A.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Oct}, + Pages = {12040--12045}, + Title = {{{A}ctivation of memory circuits during cue-elicited cocaine craving}}, + Volume = {93}, + Year = {1996}} + +@book{Green1966, + Author = {Green, D. M. and Swets, J. A.}, + Owner = {Wooyoung Ahn}, + Publisher = {New York: Wiley}, + Timestamp = {2007.04.30}, + Title = {Signal detection theory and psychophysics}, + Year = {1966}} + +@book{Green1966a, + Address = {New York}, + Author = {Green, D. M. and Swets, J. A.}, + Publisher = {Wiley}, + Title = {Signal Detection Theory and Psychophysics}, + Year = {1966}} + +@article{Green1994, + Author = {Green, L. and Fry, A. F. and Myerson, J.}, + Journal = {Psychological Science}, + Owner = {Wooyoung Ahn}, + Pages = {33-36}, + Timestamp = {2007.05.01}, + Title = {Discounting of delayed rewards: A life-span comparison}, + Volume = {5}, + Year = {1994}} + +@article{Green2004, + Author = {Green, L. and Myerson, J.}, + Journal = {Psychological Bulletin}, + Number = {5}, + Pages = {769--792}, + Title = {A discounting framework for choice with delayed and probabilistic rewards}, + Volume = {130}, + Year = {2004}} + +@article{Green1999, + Author = {Green, L. and Myerson, J. and Ostaszewski, P.}, + Journal = {Behavioural Processes}, + Owner = {ahnw}, + Pages = {89-96}, + Timestamp = {2007.05.01}, + Title = {Discounting of delayed rewards across the life span: Age differences in individual discounting functions}, + Volume = {46}, + Year = {1999}} + +@article{Green2003, + Author = {Green, P. J.}, + Journal = {Highly structured stochastic systems}, + Pages = {179--198}, + Publisher = {Oxford University Press}, + Title = {{Trans-dimensional markov chain monte carlo}}, + Volume = {27}, + Year = {2003}} + +@article{Green1995, + Author = {Green, P. J.}, + Journal = {Biometrika}, + Pages = {711--732}, + Title = {Reversible Jump {M}arkov chain {M}onte {C}arlo Computation and {B}ayesian Model Determination}, + Volume = {82}, + Year = {1995}} + +@article{Greenwood1938, + Author = {Greenwood, J. A.}, + Journal = {Journal of Parapsychology}, + Pages = {222--230}, + Title = {An Empirical Investigation of Some Sampling Problems}, + Volume = {2}, + Year = {1938}} + +@article{Gregorios-Pippas2009, + Author = {Gregorios-Pippas, L. and Tobler, P. N. and Schultz, W.}, + Journal = {J. Neurophysiol.}, + Month = {Mar}, + Pages = {1507--1523}, + Title = {{{S}hort-term temporal discounting of reward value in human ventral striatum}}, + Volume = {101}, + Year = {2009}} + +@article{Griffiths2008, + Author = {Griffiths, T.L. and Yuille, A.}, + Journal = {The Probabilistic Mind: Prospects for Bayesian Cognitive Science}, + Pages = {33}, + Publisher = {Oxford University Press, USA}, + Title = {{A primer on probabilistic inference}}, + Year = {2008}} + +@article{Grimes1985, + Author = {Grimes, J. D. and Delgado, M. R.}, + Journal = {Clin Neuropharmacol}, + Pages = {73--77}, + Title = {{{B}romocriptine: problems with low-dose de novo therapy in {P}arkinson's disease}}, + Volume = {8}, + Year = {1985}} + +@article{Grobin2005, + Author = {Grobin, A. C. and VanDoren, M. J. and Porrino, L. J. and Morrow, A. L.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {May}, + Pages = {544--550}, + Title = {{{C}ortical 3 alpha-hydroxy-5 alpha-pregnan-20-one levels after acute administration of {D}elta 9-tetrahydrocannabinol, cocaine and morphine}}, + Volume = {179}, + Year = {2005}} + +@article{Grosenick2008, + Author = {Grosenick, L. and Greer, S. and Knutson, B.}, + Journal = {IEEE Trans Neural Syst Rehabil Eng}, + Month = {Dec}, + Pages = {539--548}, + Title = {{{I}nterpretable classifiers for {F}{M}{R}{I} improve prediction of purchases}}, + Volume = {16}, + Year = {2008}} + +@article{Gruber2006, + Author = {Gruber, A. J. and Dayan, P. and Gutkin, B. S. and Solla, S. A.}, + Journal = {J Comput Neurosci}, + Month = {Apr}, + Pages = {153--166}, + Title = {{{D}opamine modulation in the basal ganglia locks the gate to working memory}}, + Volume = {20}, + Year = {2006}} + +@article{Grundy1998, + Author = {S. M. Grundy}, + Journal = {Circulation}, + Keywords = {Antilipemic Agents, therapeutic use; Cholesterol, LDL, blood; Clinical Trials as Topic; Coronary Disease, blood/drug therapy; Humans}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {15}, + Owner = {Young}, + Pages = {1436--1439}, + Pmid = {9576422}, + Timestamp = {2009.12.10}, + Title = {Statin trials and goals of cholesterol-lowering therapy.}, + Volume = {97}, + Year = {1998}} + +@article{Grupp-Phelan2009, + Author = {Grupp-Phelan, J. and Mahajan, P. and Foltin, G. L. and Jacobs, E. and Tunik, M. and Sonnett, M. and Miller, S. and Dayan, P. and Kuppermann, N. and Alexander, D. and Alpern, E. and Chamberlain, J. and Dean, J. M. and Gerardi, M. and Goepp, J. and Gorelick, M. and Hoyle, J. and Jaffe, D. and Johns, C. and Levick, N. and Mahajan, P. and Maio, R. and Miller, S. and Monroe, D. and Ruddy, R. and Stanley, R. and Treloar, D. and Tunik, M. and Walker, A. and Kavanagh, D. and Park, H. and Dean, M. and Holubkov, R. and Knight, S. and Dong, L. and Donaldson, A. and Chamberlain, J. and Brown, M. and Corneli, H. and Goepp, J. and Holubkov, R. and Mahajan, P. and Melville, K. and Stremski, E. and Tunik, M. and Gorelick, M. and Alpern, E. and Dean, J. M. and Foltin, G. and Joseph, J. and Miller, S. and Moler, F. and Stanley, R. and Teach, S.}, + Journal = {Pediatr Emerg Care}, + Month = {Apr}, + Pages = {217--220}, + Title = {{{R}eferral and resource use patterns for psychiatric-related visits to pediatric emergency departments}}, + Volume = {25}, + Year = {2009}} + +@article{grusser2004cue, + Author = {Grusser, S.M. and Wrase, J. and Klein, S. and Hermann, D. and Smolka, M.N. and Ruf, M. and Weber-Fahr, W. and Flor, H. and Mann, K. and Braus, D.F. and others}, + Journal = {Psychopharmacology}, + Number = {3}, + Pages = {296--302}, + Publisher = {Springer}, + Title = {{Cue-induced activation of the striatum and medial prefrontal cortex is associated with subsequent relapse in abstinent alcoholics}}, + Volume = {175}, + Year = {2004}} + +@article{Guarraci1999, + Author = {Guarraci, F.A. and Kapp, B.S.}, + Journal = {Behavioural brain research}, + Number = {2}, + Pages = {169--179}, + Publisher = {Elsevier}, + Title = {{An electrophysiological characterization of ventral tegmental area dopaminergic neurons during differential Pavlovian fear conditioning in the awake rabbit}}, + Volume = {99}, + Year = {1999}} + +@article{Guitart1992a, + Author = {Guitart, X. and Beitner-Johnson, D. and Marby, D. W. and Kosten, T. A. and Nestler, E. J.}, + Journal = {Synapse}, + Month = {Nov}, + Pages = {242--253}, + Title = {{{F}ischer and {L}ewis rat strains differ in basal levels of neurofilament proteins and their regulation by chronic morphine in the mesolimbic dopamine system}}, + Volume = {12}, + Year = {1992}} + +@article{Guitart1993, + Author = {Guitart, X. and Nestler, E. J.}, + Journal = {Neurochem. Res.}, + Month = {Jan}, + Pages = {5--13}, + Title = {{{S}econd messenger and protein phosphorylation mechanisms underlying opiate addiction: studies in the rat locus coeruleus}}, + Volume = {18}, + Year = {1993}} + +@article{Guitart1992, + Author = {Guitart, X. and Thompson, M. A. and Mirante, C. K. and Greenberg, M. E. and Nestler, E. J.}, + Journal = {J. Neurochem.}, + Month = {Mar}, + Pages = {1168--1171}, + Title = {{{R}egulation of cyclic {A}{M}{P} response element-binding protein ({C}{R}{E}{B}) phosphorylation by acute and chronic morphine in the rat locus coeruleus}}, + Volume = {58}, + Year = {1992}} + +@article{Gunderson1997, + Author = {Gunderson, J. G. and Lyoo, I. K.}, + Journal = {Harv Rev Psychiatry}, + Pages = {272--278}, + Title = {{{F}amily problems and relationships for adults with borderline personality disorder}}, + Volume = {4}, + Year = {1997}} + +@article{Guo2008, + Author = {Guo, Y. and DuBois Bowman, F. and Kilts, C.}, + Journal = {Human Brain Mapping}, + Number = {9}, + Publisher = {Wiley Subscription Services, Inc., A Wiley Company Hoboken}, + Title = {{Predicting the brain response to treatment using a Bayesian hierarchical model with application to a study of schizophrenia}}, + Volume = {29}, + Year = {2008}} + +@article{Guyer2006, + Author = {Guyer, A. E. and Kaufman, J. and Hodgdon, H. B. and Masten, C. L. and Jazbec, S. and Pine, D. S. and Ernst, M.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Sep}, + Pages = {1059--1067}, + Title = {{{B}ehavioral alterations in reward system function: the role of childhood maltreatment and psychopathology}}, + Volume = {45}, + Year = {2006}} + +@article{Guyer2008, + Author = {Guyer, A. E. and Lau, J. Y. and McClure-Tone, E. B. and Parrish, J. and Shiffrin, N. D. and Reynolds, R. C. and Chen, G. and Blair, R. J. and Leibenluft, E. and Fox, N. A. and Ernst, M. and Pine, D. S. and Nelson, E. E.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Nov}, + Pages = {1303--1312}, + Title = {{{A}mygdala and ventrolateral prefrontal cortex function during anticipated peer evaluation in pediatric social anxiety}}, + Volume = {65}, + Year = {2008}} + +@article{Guyer2007, + Author = {Guyer, A. E. and McClure, E. B. and Adler, A. D. and Brotman, M. A. and Rich, B. A. and Kimes, A. S. and Pine, D. S. and Ernst, M. and Leibenluft, E.}, + Journal = {J Child Psychol Psychiatry}, + Month = {Sep}, + Pages = {863--871}, + Title = {{{S}pecificity of facial expression labeling deficits in childhood psychopathology}}, + Volume = {48}, + Year = {2007}} + +@article{Guyer2008a, + Author = {Guyer, A. E. and Monk, C. S. and McClure-Tone, E. B. and Nelson, E. E. and Roberson-Nay, R. and Adler, A. D. and Fromm, S. J. and Leibenluft, E. and Pine, D. S. and Ernst, M.}, + Journal = {J Cogn Neurosci}, + Month = {Sep}, + Pages = {1565--1582}, + Title = {{{A} developmental examination of amygdala response to facial expressions}}, + Volume = {20}, + Year = {2008}} + +@article{Guyer2006a, + Author = {Guyer, A. E. and Nelson, E. E. and Perez-Edgar, K. and Hardin, M. G. and Roberson-Nay, R. and Monk, C. S. and Bjork, J. M. and Henderson, H. A. and Pine, D. S. and Fox, N. A. and Ernst, M.}, + Journal = {J. Neurosci.}, + Month = {Jun}, + Pages = {6399--6405}, + Title = {{{S}triatal functional alteration in adolescents characterized by early childhood behavioral inhibition}}, + Volume = {26}, + Year = {2006}} + +@article{Ha2007a, + Author = {Ha, J. H. and Kim, S. Y. and Bae, S. C. and Bae, S. and Kim, H. and Sim, M. and Lyoo, I. K. and Cho, S. C.}, + Journal = {Psychopathology}, + Pages = {424--430}, + Title = {{{D}epression and {I}nternet addiction in adolescents}}, + Volume = {40}, + Year = {2007}} + +@article{Ha2004, + Author = {Ha, K. S. and Kim, S. J. and Yune, S. K. and Kim, J. H. and Hwang, J. W. and Lee, N. Y. and Sung, Y. H. and Abrams, K. Y. and Lyoo, I. K.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Feb}, + Pages = {42--47}, + Title = {{{T}hree-year follow up of women with and without borderline personality disorder: development of {C}loninger's character in adolescence}}, + Volume = {58}, + Year = {2004}} + +@article{Ha2007, + Author = {Ha, K. S. and Yoo, H. K. and Lyoo, I. K. and Jeong, D. U.}, + Journal = {Acta Neurol. Scand.}, + Month = {Nov}, + Pages = {312--316}, + Title = {{{C}omputerized assessment of cognitive impairment in narcoleptic patients}}, + Volume = {116}, + Year = {2007}} + +@book{Hacking1965, + Address = {Cambridge}, + Author = {Hacking, I.}, + Publisher = {Cambridge University Press}, + Title = {Logic of Statistical Inference}, + Year = {1965}} + +@article{Hagen1997, + Author = {Hagen, R. L.}, + Journal = {American Psychologist}, + Pages = {15--24}, + Title = {In Praise of the Null Hypothesis Statistical Test}, + Volume = {52}, + Year = {1997}} + +@article{Hahn2006, + Author = {Hahn, J. S. and Barkovich, A. J. and Stashinko, E. E. and Kinsman, S. L. and Delgado, M. R. and Clegg, N. J.}, + Journal = {Brain Dev.}, + Month = {Aug}, + Pages = {413--419}, + Title = {{{F}actor analysis of neuroanatomical and clinical characteristics of holoprosencephaly}}, + Volume = {28}, + Year = {2006}} + +@article{Hahn2003, + Author = {Hahn, J. S. and Delgado, M. R. and Clegg, N. J. and Sparagana, S. P. and Gerace, K. L. and Barkovich, A. J. and Olson, D. M.}, + Journal = {Clin Neurophysiol}, + Month = {Oct}, + Pages = {1908--1917}, + Title = {{{E}lectroencephalography in holoprosencephaly: findings in children without epilepsy}}, + Volume = {114}, + Year = {2003}} + +@article{Hahn2005, + Author = {Hahn, J. S. and Hahn, S. M. and Kammann, H. and Barkovich, A. J. and Clegg, N. J. and Delgado, M. R. and Levey, E.}, + Journal = {J. Pediatr. Endocrinol. Metab.}, + Month = {Oct}, + Pages = {935--941}, + Title = {{{E}ndocrine disorders associated with holoprosencephaly}}, + Volume = {18}, + Year = {2005}} + +@article{Haldane1945, + Author = {Haldane, J. B. S.}, + Journal = {Biometrika}, + Pages = {222--225}, + Title = {On a Method of Estimating Frequencies}, + Volume = {33}, + Year = {1945}} + +@article{Hall1994, + Author = {Hall, H. and Sedvall, G. and Magnusson, O. and Kopp, J. and Halldin, C. and Farde, L.}, + Journal = {Neuropsychopharmacology: official publication of the American College of Neuropsychopharmacology}, + Number = {4}, + Pages = {245}, + Title = {{Distribution of D1-and D2-dopamine receptors, and dopamine and its metabolites in the human brain.}}, + Volume = {11}, + Year = {1994}} + +@article{Ham2007a, + Author = {Ham, B. J. and Chey, J. and Yoon, S. J. and Sung, Y. and Jeong, D. U. and Ju Kim, S. and Sim, M. E. and Choi, N. and Choi, I. G. and Renshaw, P. F. and Lyoo, I. K.}, + Journal = {Eur. J. Neurosci.}, + Month = {Jan}, + Pages = {324--329}, + Title = {{{D}ecreased {N}-acetyl-aspartate levels in anterior cingulate and hippocampus in subjects with post-traumatic stress disorder: a proton magnetic resonance spectroscopy study}}, + Volume = {25}, + Year = {2007}} + +@article{Ham2005, + Author = {Ham, B. J. and Lee, M. S. and Lee, Y. M. and Kim, M. K. and Choi, M. J. and Oh, K. S. and Jung, H. Y. and Lyoo, I. K. and Choi, I. G.}, + Journal = {Neuropsychobiology}, + Pages = {151--154}, + Title = {{{A}ssociation between the catechol {O}-methyltransferase {V}al108/158{M}et polymorphism and alexithymia}}, + Volume = {52}, + Year = {2005}} + +@article{Ham2006, + Author = {Ham, B. J. and Lee, Y. M. and Kim, M. K. and Lee, J. and Ahn, D. S. and Choi, M. J. and Lyoo, I. K. and Choi, I. G. and Lee, M. S.}, + Journal = {Neuropsychobiology}, + Pages = {203--209}, + Title = {{{P}ersonality, dopamine receptor {D}4 exon {I}{I}{I} polymorphisms, and academic achievement in medical students}}, + Volume = {53}, + Year = {2006}} + +@article{Ham2007, + Author = {Ham, B. J. and Sung, Y. and Kim, N. and Kim, S. J. and Kim, J. E. and Kim, D. J. and Lee, J. Y. and Kim, J. H. and Yoon, S. J. and Lyoo, I. K.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Mar}, + Pages = {403--411}, + Title = {{{D}ecreased {G}{A}{B}{A} levels in anterior cingulate and basal ganglia in medicated subjects with panic disorder: a proton magnetic resonance spectroscopy (1{H}-{M}{R}{S}) study}}, + Volume = {31}, + Year = {2007}} + +@article{Hamidovic2008, + Abstract = {The neurotransmitter dopamine is integrally involved in the rewarding + effects of drugs, and it has also been thought to mediate impulsive + behaviors in animal models. Most of the studies of drug effects on + impulsive behaviors in humans have involved drugs with complex actions + on different transmitter systems and different receptor subtypes. + The present study was designed to characterize the effect of single + doses of pramipexole, a D2/D3 agonist, on measures of cognitive and + impulsive behavior, as well as on mood in healthy volunteers. Healthy + men and women (N = 10) received placebo and 2 doses of pramipexole, + 0.25 and 0.50 mg, in a within-subject, double-blinded study. Outcome + measures included changes in cognitive performance, assessed by the + Automated Neuropsychological Assessment Metrics, several behavioral + measures related to impulsive behavior, including the Balloon Analogue + Risk Task, Delay Discounting Task, Go/No-Go Task, Card Perseveration + Task, and subjective ratings of mood assessed by Addiction Research + Center Inventory, Profile of Mood States, and Drug Effects Questionnaire. + Pramipexole decreased positive ratings of mood (euphoria, intellectual + efficiency, and energy) and increased both subjectively reported + sedation and behavioral sedation indicated by impaired cognitive + performance on several measures of the Automated Neuropsychological + Assessment Metrics. Single low to medium doses of this drug did not + produce a decrease in impulsive responding on behavioral measures + included in this study. The sedative-like effects observed in this + study may reflect presynaptic actions of the drug. Higher doses with + postsynaptic actions may be needed to produce either behavioral or + subjective stimulant-like effects.}, + Author = {Ajna Hamidovic and Un Jung Kang and Harriet de Wit}, + Doi = {10.1097/jcp.0b013e3181602fab}, + Institution = {Department of Psychiatry, The University of Chicago, Chicago, IL 60637, USA.}, + Journal = {J Clin Psychopharmacol}, + Keywords = {Adolescent; Adult; Affect, drug effects; Analysis of Variance; Benzothiazoles, administration /&/ dosage/pharmacology; Cognition, drug effects; Dopamine Agonists, administration /&/ dosage/pharmacology; Double-Blind Method; Female; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Personality Tests; Reaction Time, drug effects}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {45--51}, + Pii = {00004714-200802000-00008}, + Pmid = {18204340}, + Timestamp = {2009.08.06}, + Title = {Effects of low to moderate acute doses of pramipexole on impulsivity and cognition in healthy volunteers.}, + Url = {http://dx.doi.org/10.1097/jcp.0b013e3181602fab}, + Volume = {28}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1097/jcp.0b013e3181602fab}} + +@article{Hamidovic2008a, + Abstract = {The neurotransmitter dopamine is integrally involved in the rewarding + effects of drugs, and it has also been thought to mediate impulsive + behaviors in animal models. Most of the studies of drug effects on + impulsive behaviors in humans have involved drugs with complex actions + on different transmitter systems and different receptor subtypes. + The present study was designed to characterize the effect of single + doses of pramipexole, a D2/D3 agonist, on measures of cognitive and + impulsive behavior, as well as on mood in healthy volunteers. Healthy + men and women (N = 10) received placebo and 2 doses of pramipexole, + 0.25 and 0.50 mg, in a within-subject, double-blinded study. Outcome + measures included changes in cognitive performance, assessed by the + Automated Neuropsychological Assessment Metrics, several behavioral + measures related to impulsive behavior, including the Balloon Analogue + Risk Task, Delay Discounting Task, Go/No-Go Task, Card Perseveration + Task, and subjective ratings of mood assessed by Addiction Research + Center Inventory, Profile of Mood States, and Drug Effects Questionnaire. + Pramipexole decreased positive ratings of mood (euphoria, intellectual + efficiency, and energy) and increased both subjectively reported + sedation and behavioral sedation indicated by impaired cognitive + performance on several measures of the Automated Neuropsychological + Assessment Metrics. Single low to medium doses of this drug did not + produce a decrease in impulsive responding on behavioral measures + included in this study. The sedative-like effects observed in this + study may reflect presynaptic actions of the drug. Higher doses with + postsynaptic actions may be needed to produce either behavioral or + subjective stimulant-like effects.}, + Author = {Ajna Hamidovic and Un Jung Kang and Harriet de Wit}, + Doi = {10.1097/jcp.0b013e3181602fab}, + Institution = {Department of Psychiatry, The University of Chicago, Chicago, IL 60637, USA.}, + Journal = {J Clin Psychopharmacol}, + Keywords = {Adolescent; Adult; Affect, drug effects; Analysis of Variance; Benzothiazoles, administration /&/ dosage/pharmacology; Cognition, drug effects; Dopamine Agonists, administration /&/ dosage/pharmacology; Double-Blind Method; Female; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Personality Tests; Reaction Time, drug effects}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {45--51}, + Pii = {00004714-200802000-00008}, + Pmid = {18204340}, + Timestamp = {2009.08.06}, + Title = {Effects of low to moderate acute doses of pramipexole on impulsivity and cognition in healthy volunteers.}, + Url = {http://dx.doi.org/10.1097/jcp.0b013e3181602fab}, + Volume = {28}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1097/jcp.0b013e3181602fab}} + +@article{Hampson2009, + Author = {Hampson, R. E. and Espana, R. A. and Rogers, G. A. and Porrino, L. J. and Deadwyler, S. A.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jan}, + Pages = {355--369}, + Title = {{{M}echanisms underlying cognitive enhancement and reversal of cognitive deficits in nonhuman primates by the ampakine {C}{X}717}}, + Volume = {202}, + Year = {2009}} + +@article{Hampton2007, + Author = {Hampton, A. N. and Adolphs, R. and Tyszka, M. J. and O'Doherty, J. P.}, + Journal = {Neuron}, + Pages = {545--555}, + Title = {{{C}ontributions of the amygdala to reward expectancy and choice signals in human prefrontal cortex}}, + Volume = {55}, + Year = {2007}} + +@article{Hampton2008, + Author = {Hampton, A. N. and Bossaerts, P. and O'Doherty, J. P.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {May}, + Pages = {6741--6746}, + Title = {{{N}eural correlates of mentalizing-related computations during strategic interactions in humans}}, + Volume = {105}, + Year = {2008}} + +@article{Hampton2006, + Author = {Hampton, A. N. and Bossaerts, P. and {O'Doherty}, J. P.}, + Journal = {Journal of Neuroscience}, + Owner = {WooYoung Ahn}, + Pages = {8360--8367}, + Timestamp = {2007.09.17}, + Title = {The role of the ventromedial prefrontal cortex in abstract state-based inference during decision making in humans}, + Volume = {26(32)}, + Year = {2006}} + +@article{Hampton2007a, + Author = {Hampton, A. N. and O'doherty, J. P.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jan}, + Pages = {1377--1382}, + Title = {{{D}ecoding the neural substrates of reward-related decision making with functional {M}{R}{I}}}, + Volume = {104}, + Year = {2007}} + +@article{Han2008c, + Author = {Han, D. H. and Bolo, N. and Daniels, M. A. and Lyoo, I. K. and Min, K. J. and Kim, C. H. and Renshaw, P. F.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Sep}, + Pages = {1593--1599}, + Title = {{{C} {G}{A}{B}{A}{A}alpha6}}, + Volume = {32}, + Year = {2008}} + +@article{Han2006, + Author = {Han, D. H. and Kee, B. S. and Min, K. J. and Lee, Y. S. and Na, C. and Park, D. B. and Lyoo, I. K.}, + Journal = {Neuroreport}, + Month = {Jan}, + Pages = {95--99}, + Title = {{{E}ffects of catechol-{O}-methyltransferase {V}al158{M}et polymorphism on the cognitive stability and aggression in the first-onset schizophrenic patients}}, + Volume = {17}, + Year = {2006}} + +@article{Han2008b, + Author = {Han, D. H. and Renshaw, P. F. and Dager, S. R. and Chung, A. and Hwang, J. and Daniels, M. A. and Lee, Y. S. and Lyoo, I. K.}, + Journal = {J Psychiatr Res}, + Month = {Apr}, + Pages = {399--407}, + Title = {{{A}ltered cingulate white matter connectivity in panic disorder patients}}, + Volume = {42}, + Year = {2008}} + +@article{Han2008a, + Author = {Han, D. H. and Renshaw, P. F. and Sim, M. E. and Kim, J. I. and Arenella, L. S. and Lyoo, I. K.}, + Journal = {Schizophr. Res.}, + Month = {Aug}, + Pages = {338--340}, + Title = {{{T}he effect of internet video game play on clinical and extrapyramidal symptoms in patients with schizophrenia}}, + Volume = {103}, + Year = {2008}} + +@article{Han2008, + Author = {Han, D. H. and Yoon, S. J. and Sung, Y. H. and Lee, Y. S. and Kee, B. S. and Lyoo, I. K. and Renshaw, P. F. and Cho, S. C.}, + Journal = {Compr Psychiatry}, + Pages = {387--392}, + Title = {{{A} preliminary study: novelty seeking, frontal executive function, and dopamine receptor ({D}2) {T}aq{I} {A} gene polymorphism in patients with methamphetamine dependence}}, + Volume = {49}, + Year = {2008}} + +@article{Han1998, + Author = {Han, Z. S. and Zhang, E. T. and Craig, A. D.}, + Journal = {Nat. Neurosci.}, + Month = {Jul}, + Pages = {218--225}, + Title = {{{N}ociceptive and thermoreceptive lamina {I} neurons are anatomically distinct}}, + Volume = {1}, + Year = {1998}} + +@article{Hanks2006a, + Author = {Hanks, T. D. and Ditterich, J. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {May}, + Pages = {682--689}, + Title = {{{M}icrostimulation of macaque area {L}{I}{P} affects decision-making in a motion discrimination task}}, + Volume = {9}, + Year = {2006}} + +@article{Hanlon2009, + Author = {Hanlon, C. A. and Wesley, M. J. and Porrino, L. J.}, + Journal = {Drug Alcohol Depend}, + Month = {Jun}, + Pages = {88--94}, + Title = {{{L}oss of functional specificity in the dorsal striatum of chronic cocaine users}}, + Volume = {102}, + Year = {2009}} + +@article{Hanlon1990, + Abstract = {In a number of studies that have examined the gestural disturbance + in aphasia and the utility of gestural interventions in aphasia therapy, + a variable degree of facilitation of verbalization during gestural + activity has been reported. The present study examined the effect + of different unilateral gestural movements on simultaneous oral-verbal + expression, specifically naming to confrontation. It was hypothesized + that activation of the phylogenetically older proximal motor system + of the hemiplegic right arm in the execution of a communicative but + nonrepresentational pointing gesture would have a facilitatory effect + on naming ability. Twenty-four aphasic patients, representing five + aphasic subtypes, including Broca's, Transcortical Motor, Anomic, + Global, and Wernicke's aphasics were assessed under three gesture/naming + conditions. The findings indicated that gestures produced through + activation of the proximal (shoulder) musculature of the right paralytic + limb differentially facilitated naming performance in the nonfluent + subgroup, but not in the Wernicke's aphasics. These findings may + be explained on the view that functional activation of the archaic + proximal motor system of the hemiplegic limb, in the execution of + a communicative gesture, permits access to preliminary stages in + the formative process of the anterior action microgeny, which ultimately + emerges in vocal articulation.}, + Author = {R. E. Hanlon and J. W. Brown and L. J. Gerstman}, + Institution = {City College of the City University of New York.}, + Journal = {Brain Lang}, + Keywords = {Aged; Aged, 80 and over; Anomia, psychology/rehabilitation; Aphasia, Broca, psychology/rehabilitation; Aphasia, Wernicke, psychology; Aphasia, psychology; Cerebral Infarction, psychology/rehabilitation; Dominance, Cerebral; Female; Functional Laterality; Gestures; Hemiplegia, psychology; Humans; Kinesics; Male; Middle Aged; Neuropsychological Tests}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {298--314}, + Pmid = {2322814}, + Timestamp = {2009.08.04}, + Title = {Enhancement of naming in nonfluent aphasia through gesture.}, + Volume = {38}, + Year = {1990}} + +@article{Hannan1980, + Author = {Hannan, E. J.}, + Journal = {The Annals of Statistics}, + Pages = {1071--1081}, + Title = {The Estimation of the Order of an {ARMA} Process}, + Volume = {8}, + Year = {1980}} + +@article{Hansen2001, + Author = {Hansen, M. H. and Yu, B.}, + Journal = {Journal of the American Statistical Association}, + Pages = {746--774}, + Title = {Model Selection and the Principle of Minimum Description Length}, + Volume = {96}, + Year = {2001}} + +@article{Hanson2008, + Author = {Hanson, K. L. and Luciana, M. and Sullwold, K.}, + Journal = {Drug Alcohol Depend}, + Month = {Jul}, + Pages = {99--110}, + Title = {{{R}eward-related decision-making deficits and elevated impulsivity among {M}{D}{M}{A} and other drug users}}, + Volume = {96}, + Year = {2008}} + +@article{Hao2004, + Author = {Hao, W. and Su, Z. and Xiao, S. and Fan, C. and Chen, H. and Liu, T. and Young, D.}, + Journal = {Addiction}, + Month = {Sep}, + Pages = {1176--1180}, + Title = {{{L}ongitudinal surveys of prevalence rates and use patterns of illicit drugs at selected high-prevalence areas in {C}hina from 1993 to 2000}}, + Volume = {99}, + Year = {2004}} + +@article{Hao2002, + Author = {Hao, W. and Xiao, S. and Liu, T. and Young, D. and Chen, S. and Zhang, D. and Li, C. and Shi, J. and Chen, G. and Yang, K.}, + Journal = {Addiction}, + Month = {Oct}, + Pages = {1305--1315}, + Title = {{{T}he second {N}ational {E}pidemiological {S}urvey on illicit drug use at six high-prevalence areas in {C}hina: prevalence rates and use patterns}}, + Volume = {97}, + Year = {2002}} + +@article{Hardin2009a, + Author = {Hardin, M. G. and Mandell, D. and Mueller, S. C. and Dahl, R. E. and Pine, D. S. and Ernst, M.}, + Journal = {J Child Psychol Psychiatry}, + Month = {Jul}, + Title = {{{I}nhibitory control in anxious and healthy adolescents is modulated by incentive and incidental affective stimuli}}, + Year = {2009}} + +@article{Hardin2009, + Author = {Hardin, M. G. and Pine, D. S. and Ernst, M.}, + Journal = {Neuroimage}, + Month = {Jun}, + Title = {{{T}he influence of context valence in the neural coding of monetary outcomes}}, + Year = {2009}} + +@article{Hardin2007, + Author = {Hardin, M. G. and Schroth, E. and Pine, D. S. and Ernst, M.}, + Journal = {J Child Psychol Psychiatry}, + Month = {May}, + Pages = {446--454}, + Title = {{{I}ncentive-related modulation of cognitive control in healthy, anxious, and depressed adolescents: development and psychopathology related differences}}, + Volume = {48}, + Year = {2007}} + +@article{Hardy2006, + Abstract = {On the 100th anniversary of Alzheimer's lecture describing the clinicopathological + entity which bears his eponym, this article reviews the major areas + of progress in our understanding of the disease and outlines the + many gaps still remaining. The progress toward effective mechanistic + therapy is reviewed.}, + Author = {John Hardy}, + Doi = {10.1016/j.neuron.2006.09.016}, + Institution = {Laboratory of Neurogenetics, National Institute on Aging, Porter Neuroscience Building, Bethesda, Maryland 20892, USA. hardyJ@mail.nih.gov}, + Journal = {Neuron}, + Keywords = {Alzheimer Disease, genetics/history/pathology/physiopathology; Animals; History, 20th Century; History, 21st Century; Humans; Research, history/methods}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {1}, + Owner = {Young}, + Pages = {3--13}, + Pii = {S0896-6273(06)00723-9}, + Pmid = {17015223}, + Timestamp = {2009.12.10}, + Title = {A hundred years of Alzheimer's disease research.}, + Url = {http://dx.doi.org/10.1016/j.neuron.2006.09.016}, + Volume = {52}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.neuron.2006.09.016}} + +@article{Hare2008, + Author = {Hare, T. A. and O'Doherty, J. and Camerer, C. F. and Schultz, W. and Rangel, A.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {5623--5630}, + Title = {{{D}issociating the role of the orbitofrontal cortex and the striatum in the computation of goal values and prediction errors}}, + Volume = {28}, + Year = {2008}} + +@article{Hariri2006, + Author = {Hariri, A.R. and Brown, S.M. and Williamson, D.E. and Flory, J.D. and de Wit, H. and Manuck, S.B.}, + Journal = {Journal of Neuroscience}, + Number = {51}, + Pages = {13213}, + Publisher = {Soc Neuroscience}, + Title = {{Preference for immediate over delayed rewards is associated with magnitude of ventral striatal activity}}, + Volume = {26}, + Year = {2006}} + +@article{Hariri2006a, + Abstract = {Discounting future outcomes as a function of their deferred availability + underlies much of human decision making. Discounting, or preference + for immediate over delayed rewards of larger value, is often associated + with impulsivity and is a risk factor for addictive disorders such + as pathological gambling, cigarette smoking, and drug and alcohol + abuse. The ventral striatum (VS) is involved in mediating behavioral + responses and physiological states associated with reward, and dysregulation + of the VS contributes to addiction, perhaps by affecting impulsive + decision-making. Behavioral tests of delay discounting (DD), which + index preference for smaller immediate over larger delayed rewards, + covary with impulsive tendencies in humans. In the current study, + we examined the relationship between individual differences in DD, + measured in a behavioral assessment, and VS activity measured with + blood oxygenation level-dependent functional magnetic resonance imaging, + in 45 adult volunteers. VS activity was determined using a task involving + positive and negative feedback with monetary reward. Analyses revealed + that individual differences in DD correlate positively with magnitude + of VS activation in response to both positive and negative feedback, + compared with a no-feedback control condition. Variability in DD + was also associated with differential VS activation in response to + positive, compared with negative, feedback. Collectively, our results + suggest that increased preference for smaller immediate over larger + delayed rewards reflects both a relatively indiscriminate and hyper-reactive + VS circuitry. They also highlight a specific neurocognitive mechanism + that may contribute to increased risk for addiction.}, + Author = {Ahmad R Hariri and Sarah M Brown and Douglas E Williamson and Janine D Flory and Harriet de Wit and Stephen B Manuck}, + Doi = {10.1523/JNEUROSCI.3446-06.2006}, + Institution = {Department of Psychiatry, University of Pittsburgh, Pittsburgh, Pennsylvania 15213, USA. haririar@upmc.edu}, + Journal = {J Neurosci}, + Keywords = {Adult; Basal Ganglia, physiology; Behavior, Addictive, physiopathology/psychology; Choice Behavior, physiology; Feedback, Psychological, physiology; Female; Humans; Impulsive Behavior, physiopathology/psychology; Male; Middle Aged; Nerve Net, physiology; Reward; Risk Factors; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {51}, + Owner = {Woo-Young Ahn}, + Pages = {13213--13217}, + Pii = {26/51/13213}, + Pmid = {17182771}, + Timestamp = {2009.08.06}, + Title = {Preference for immediate over delayed rewards is associated with magnitude of ventral striatal activity.}, + Url = {http://dx.doi.org/10.1523/JNEUROSCI.3446-06.2006}, + Volume = {26}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1523/JNEUROSCI.3446-06.2006}} + +@article{Hariri2006b, + Abstract = {Discounting future outcomes as a function of their deferred availability + underlies much of human decision making. Discounting, or preference + for immediate over delayed rewards of larger value, is often associated + with impulsivity and is a risk factor for addictive disorders such + as pathological gambling, cigarette smoking, and drug and alcohol + abuse. The ventral striatum (VS) is involved in mediating behavioral + responses and physiological states associated with reward, and dysregulation + of the VS contributes to addiction, perhaps by affecting impulsive + decision-making. Behavioral tests of delay discounting (DD), which + index preference for smaller immediate over larger delayed rewards, + covary with impulsive tendencies in humans. In the current study, + we examined the relationship between individual differences in DD, + measured in a behavioral assessment, and VS activity measured with + blood oxygenation level-dependent functional magnetic resonance imaging, + in 45 adult volunteers. VS activity was determined using a task involving + positive and negative feedback with monetary reward. Analyses revealed + that individual differences in DD correlate positively with magnitude + of VS activation in response to both positive and negative feedback, + compared with a no-feedback control condition. Variability in DD + was also associated with differential VS activation in response to + positive, compared with negative, feedback. Collectively, our results + suggest that increased preference for smaller immediate over larger + delayed rewards reflects both a relatively indiscriminate and hyper-reactive + VS circuitry. They also highlight a specific neurocognitive mechanism + that may contribute to increased risk for addiction.}, + Author = {Ahmad R Hariri and Sarah M Brown and Douglas E Williamson and Janine D Flory and Harriet de Wit and Stephen B Manuck}, + Doi = {10.1523/JNEUROSCI.3446-06.2006}, + Institution = {Department of Psychiatry, University of Pittsburgh, Pittsburgh, Pennsylvania 15213, USA. haririar@upmc.edu}, + Journal = {J Neurosci}, + Keywords = {Adult; Basal Ganglia, physiology; Behavior, Addictive, physiopathology/psychology; Choice Behavior, physiology; Feedback, Psychological, physiology; Female; Humans; Impulsive Behavior, physiopathology/psychology; Male; Middle Aged; Nerve Net, physiology; Reward; Risk Factors; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {51}, + Owner = {Woo-Young Ahn}, + Pages = {13213--13217}, + Pii = {26/51/13213}, + Pmid = {17182771}, + Timestamp = {2009.08.06}, + Title = {Preference for immediate over delayed rewards is associated with magnitude of ventral striatal activity.}, + Url = {http://dx.doi.org/10.1523/JNEUROSCI.3446-06.2006}, + Volume = {26}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1523/JNEUROSCI.3446-06.2006}} + +@article{Hariri2009, + Abstract = {BACKGROUND: Fatty acid amide hydrolase (FAAH) is a key enzyme in regulating + endocannabinoid (eCB) signaling. A common single nucleotide polymorphism + (C385A) in the human FAAH gene has been associated with increased + risk for addiction and obesity. METHODS: Using imaging genetics in + 82 healthy adult volunteers, we examined the effects of FAAH C385A + on threat- and reward-related human brain function. RESULTS: Carriers + of FAAH 385A, associated with reduced enzyme and possibly increased + eCB signaling, had decreased threat-related amygdala reactivity but + increased reward-related ventral striatal reactivity in comparison + with C385 homozygotes. Similarly divergent effects of FAAH C385A + genotype were manifest at the level of brain-behavior relationships. + The 385A carriers showed decreased correlation between amygdala reactivity + and trait anxiety but increased correlation between ventral striatal + reactivity and delay discounting, an index of impulsivity. CONCLUSIONS: + Our results parallel pharmacologic and genetic dissection of eCB + signaling, are consistent with the psychotropic effects of Delta(9)-tetrahydrocannabinol, + and highlight specific neural mechanisms through which variability + in eCB signaling impacts complex behavioral processes related to + risk for addiction and obesity.}, + Author = {Ahmad R Hariri and Adam Gorka and Luke W Hyde and Mark Kimak and Indrani Halder and Francesca Ducci and Robert E Ferrell and David Goldman and Stephen B Manuck}, + Doi = {10.1016/j.biopsych.2008.10.047}, + Institution = {Department of Psychiatry, University of Pittsburgh, Pittsburgh, Pennsylvania 15213, USA. haririar@upmc.edu}, + Journal = {Biol Psychiatry}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {9--16}, + Pii = {S0006-3223(08)01409-1}, + Pmid = {19103437}, + Timestamp = {2009.08.06}, + Title = {Divergent effects of genetic variation in endocannabinoid signaling on human threat- and reward-related brain function.}, + Url = {http://dx.doi.org/10.1016/j.biopsych.2008.10.047}, + Volume = {66}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.biopsych.2008.10.047}} + +@article{Hariri2009a, + Abstract = {BACKGROUND: Fatty acid amide hydrolase (FAAH) is a key enzyme in regulating + endocannabinoid (eCB) signaling. A common single nucleotide polymorphism + (C385A) in the human FAAH gene has been associated with increased + risk for addiction and obesity. METHODS: Using imaging genetics in + 82 healthy adult volunteers, we examined the effects of FAAH C385A + on threat- and reward-related human brain function. RESULTS: Carriers + of FAAH 385A, associated with reduced enzyme and possibly increased + eCB signaling, had decreased threat-related amygdala reactivity but + increased reward-related ventral striatal reactivity in comparison + with C385 homozygotes. Similarly divergent effects of FAAH C385A + genotype were manifest at the level of brain-behavior relationships. + The 385A carriers showed decreased correlation between amygdala reactivity + and trait anxiety but increased correlation between ventral striatal + reactivity and delay discounting, an index of impulsivity. CONCLUSIONS: + Our results parallel pharmacologic and genetic dissection of eCB + signaling, are consistent with the psychotropic effects of Delta(9)-tetrahydrocannabinol, + and highlight specific neural mechanisms through which variability + in eCB signaling impacts complex behavioral processes related to + risk for addiction and obesity.}, + Author = {Ahmad R Hariri and Adam Gorka and Luke W Hyde and Mark Kimak and Indrani Halder and Francesca Ducci and Robert E Ferrell and David Goldman and Stephen B Manuck}, + Doi = {10.1016/j.biopsych.2008.10.047}, + Institution = {Department of Psychiatry, University of Pittsburgh, Pittsburgh, Pennsylvania 15213, USA. haririar@upmc.edu}, + Journal = {Biol Psychiatry}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {9--16}, + Pii = {S0006-3223(08)01409-1}, + Pmid = {19103437}, + Timestamp = {2009.08.06}, + Title = {Divergent effects of genetic variation in endocannabinoid signaling on human threat- and reward-related brain function.}, + Url = {http://dx.doi.org/10.1016/j.biopsych.2008.10.047}, + Volume = {66}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.biopsych.2008.10.047}} + +@article{Harmer1999, + Author = {Harmer, C. J. and Phillips, G. D.}, + Journal = {Neuroscience}, + Month = {Apr}, + Pages = {119--130}, + Title = {{{E}nhanced dopamine efflux in the amygdala by a predictive, but not a non-predictive, stimulus: facilitation by prior repeated {D}-amphetamine}}, + Volume = {90}, + Year = {1999}} + +@phdthesis{Hartelman1997, + Author = {Hartelman, P. A. I.}, + School = {University of Amsterdam}, + Title = {Stochastic Catastrophe Theory}, + Year = {1997}} + +@article{Hartelman1998, + Author = {Hartelman, P. A. I. and van der Maas, H. L. J. and Molenaar, P. C. M.}, + Journal = {British Journal of Developmental Psychology}, + Pages = {97--122}, + Title = {Detecting and Modeling Developmental Transitions}, + Volume = {16}, + Year = {1998}} + +@article{Hayden2008, + Author = {Hayden, B. Y. and Nair, A. C. and McCoy, A. N. and Platt, M. L.}, + Journal = {Neuron}, + Month = {Oct}, + Pages = {19--25}, + Title = {{{P}osterior cingulate cortex mediates outcome-contingent allocation of behavior}}, + Volume = {60}, + Year = {2008}} + +@article{Hayden2007b, + Author = {Hayden, B. Y. and Parikh, P. C. and Deaner, R. O. and Platt, M. L.}, + Journal = {Proc. Biol. Sci.}, + Month = {Jul}, + Pages = {1751--1756}, + Title = {{{E}conomic principles motivating social attention in humans}}, + Volume = {274}, + Year = {2007}} + +@article{Hayden2009b, + Author = {Hayden, B. Y. and Pearson, J. M. and Platt, M. L.}, + Journal = {Science}, + Month = {May}, + Pages = {948--950}, + Title = {{{F}ictive reward signals in the anterior cingulate cortex}}, + Volume = {324}, + Year = {2009}} + +@article{Hayden2009a, + Author = {Hayden, B. Y. and Platt, M. L.}, + Journal = {Anim Cogn}, + Month = {Jan}, + Pages = {201--207}, + Title = {{{G}ambling for {G}atorade: risk-sensitive decision making for fluid rewards in humans}}, + Volume = {12}, + Year = {2009}} + +@article{Hayden2007, + Author = {Hayden, B. Y. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Jan}, + Pages = {49--53}, + Title = {{{T}emporal discounting predicts risk sensitivity in rhesus macaques}}, + Volume = {17}, + Year = {2007}} + +@article{Hayden2007a, + Author = {Hayden, B. Y. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Nov}, + Pages = {R922--923}, + Title = {{{A}nimal cognition: great apes wait for grapes}}, + Volume = {17}, + Year = {2007}} + +@article{Hayden2007c, + Abstract = {Humans and animals tend both to avoid uncertainty and to prefer immediate + over future rewards. The comorbidity of psychiatric disorders such + as impulsivity, problem gambling, and addiction suggests that a common + mechanism may underlie risk sensitivity and temporal discounting. + Nonetheless, the precise relationship between these two traits remains + largely unknown. To examine whether risk sensitivity and temporal + discounting reflect a common process, we recorded choices made by + two rhesus macaques in a visual gambling task while we varied the + delay between trials. We found that preference for the risky option + declined with increasing delay between sequential choices in the + task, even when all other task parameters were held constant. These + results were quantitatively predicted by a model that assumed that + the subjective expected utility of the risky option is evaluated + based on the expected time of the larger payoff. The importance of + the larger payoff in this model suggests that the salience of larger + payoffs played a critical role in determining the value of risky + options. These data suggest that risk sensitivity may be a product + of other cognitive processes, and specifically that myopia for the + future and the salience of jackpots control the propensity to take + a gamble.}, + Author = {Benjamin Y Hayden and Michael L Platt}, + Doi = {10.1016/j.cub.2006.10.055}, + Institution = {Department of Neurobiology, Center for Neuroeconomic Studies, Duke University Medical School, Durham, North Carolina 27710, USA. hayden@neuro.duke.edu}, + Journal = {Curr Biol}, + Keywords = {Animals; Gambling, psychology; Impulsive Behavior, psychology; Macaca mulatta; Male; Risk; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {49--53}, + Pii = {S0960-9822(06)02430-4}, + Pmid = {17208186}, + Timestamp = {2009.08.06}, + Title = {Temporal discounting predicts risk sensitivity in rhesus macaques.}, + Url = {http://dx.doi.org/10.1016/j.cub.2006.10.055}, + Volume = {17}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.cub.2006.10.055}} + +@article{Hayden2007d, + Abstract = {Humans and animals tend both to avoid uncertainty and to prefer immediate + over future rewards. The comorbidity of psychiatric disorders such + as impulsivity, problem gambling, and addiction suggests that a common + mechanism may underlie risk sensitivity and temporal discounting. + Nonetheless, the precise relationship between these two traits remains + largely unknown. To examine whether risk sensitivity and temporal + discounting reflect a common process, we recorded choices made by + two rhesus macaques in a visual gambling task while we varied the + delay between trials. We found that preference for the risky option + declined with increasing delay between sequential choices in the + task, even when all other task parameters were held constant. These + results were quantitatively predicted by a model that assumed that + the subjective expected utility of the risky option is evaluated + based on the expected time of the larger payoff. The importance of + the larger payoff in this model suggests that the salience of larger + payoffs played a critical role in determining the value of risky + options. These data suggest that risk sensitivity may be a product + of other cognitive processes, and specifically that myopia for the + future and the salience of jackpots control the propensity to take + a gamble.}, + Author = {Benjamin Y Hayden and Michael L Platt}, + Doi = {10.1016/j.cub.2006.10.055}, + Institution = {Department of Neurobiology, Center for Neuroeconomic Studies, Duke University Medical School, Durham, North Carolina 27710, USA. hayden@neuro.duke.edu}, + Journal = {Curr Biol}, + Keywords = {Animals; Gambling, psychology; Impulsive Behavior, psychology; Macaca mulatta; Male; Risk; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {49--53}, + Pii = {S0960-9822(06)02430-4}, + Pmid = {17208186}, + Timestamp = {2009.08.06}, + Title = {Temporal discounting predicts risk sensitivity in rhesus macaques.}, + Url = {http://dx.doi.org/10.1016/j.cub.2006.10.055}, + Volume = {17}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.cub.2006.10.055}} + +@article{Hayden2006, + Author = {Hayden, B. Y. and Platt, M. L.}, + Journal = {Nat. Neurosci.}, + Month = {Jul}, + Pages = {857--859}, + Title = {{{F}ool me once, shame on me--fool me twice, blame the {A}{C}{C}}}, + Volume = {9}, + Year = {2006}} + +@article{Hayden2009, + Author = {Hayden, B. Y. and Smith, D. V. and Platt, M. L.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Apr}, + Pages = {5948--5953}, + Title = {{{E}lectrophysiological correlates of default-mode processing in macaque posterior cingulate cortex}}, + Volume = {106}, + Year = {2009}} + +@article{Hazy2007, + Author = {Hazy, T. E. and Frank, M. J. and O'reilly, R. C.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Sep}, + Pages = {1601--1613}, + Title = {{{T}owards an executive without a homunculus: computational models of the prefrontal cortex/basal ganglia system}}, + Volume = {362}, + Year = {2007}} + +@article{Hazy2006, + Author = {Hazy, T. E. and Frank, M. J. and O'Reilly, R. C.}, + Journal = {Neuroscience}, + Month = {Apr}, + Pages = {105--118}, + Title = {{{B}anishing the homunculus: making working memory work}}, + Volume = {139}, + Year = {2006}} + +@article{Heathcote1996, + Author = {Heathcote, A.}, + Journal = {Behaviour Research Methods, Instruments \& Computers}, + Pages = {427--445}, + Title = {{RTSYS}: A Computer Program for Analysing Response Time Data}, + Volume = {28}, + Year = {1996}} + +@article{Heilbronner2007, + Author = {Heilbronner, S. R. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Dec}, + Pages = {R1008--1010}, + Title = {{{A}nimal cognition: time flies when chimps are having fun}}, + Volume = {17}, + Year = {2007}} + +@article{Heimansohn1991, + Abstract = {Congenital tracheal stenosis may be a life-threatening anomaly not + relieved by airway intubation. Over the past 7 years, anterior pericardial + tracheoplasty has been used at our institution for treatment of congenital + long-segment tracheal stenosis in infants with impeding airway obstruction. + Case histories of eight patients undergoing nine anterior pericardial + tracheoplasties have been reviewed to assess this technique. Of these + patients, six have required preoperative tracheal intubation before + repair to maintain ventilation. The surgical technique of anterior + pericardial tracheoplasty includes a median sternotomy approach with + partial normothermic cardiopulmonary bypass. An anterior tracheotomy + through all hypoplastic rings allows enlargement with autologous + pericardium to 1.5 times the predicted normal diameter. After insertion, + the pericardium and hypoplastic tracheal cartilages are suspended + to surrounding mediastinal structures, which prevents airway collapse. + Seven of eight infants have survived without tracheoplasty dehiscence + or wound infections. Five were ultimately extubated and are currently + free of symptoms from 6 months to 5 years after anterior pericardial + tracheoplasty. The other two survivors had residual stenosis as a + result of complications of prior tracheostomy. One of these patients + has undergone a successful second anterior pericardial tracheoplasty + and is currently extubated and well. The other is palliated at 6 + months with a tracheostomy awaiting a second anterior pericardial + tracheoplasty. Our review of anterior pericardial tracheoplasty has + demonstrated the safety, utility, and at least medium-term benefit + of this procedure in infants of any age and weight.}, + Author = {D. A. Heimansohn and K. A. Kesler and M. W. Turrentine and Y. Mahomed and L. Means and B. Matt and E. Weisberger and J. W. Brown}, + Institution = {University Medical Center, Department of Surgery, Indianapolis, Ind.}, + Journal = {J Thorac Cardiovasc Surg}, + Keywords = {Adolescent; Adult; Child; Child, Preschool; Humans; Male; Postoperative Complications; Reoperation; Tracheal Stenosis, congenital/surgery}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {710--5; discussion 715-6}, + Pmid = {1943189}, + Timestamp = {2009.08.04}, + Title = {Anterior pericardial tracheoplasty for congenital tracheal stenosis.}, + Volume = {102}, + Year = {1991}} + +@article{Heinz2004, + Author = {Heinz, A. and Siessmeier, T. and Wrase, J. and Hermann, D. and Klein, S. and Grusser, S. M. and Grusser-Sinopoli, S. M. and Flor, H. and Braus, D. F. and Buchholz, H. G. and Grunder, G. and Schreckenberger, M. and Smolka, M. N. and Rosch, F. and Mann, K. and Bartenstein, P.}, + Journal = {Am J Psychiatry}, + Month = {Oct}, + Pages = {1783--1789}, + Title = {{{C}orrelation between dopamine {D}(2) receptors in the ventral striatum and central processing of alcohol cues and craving}}, + Volume = {161}, + Year = {2004}} + +@article{Helland1995, + Author = {Helland, I. S.}, + Journal = {The American Statistician}, + Pages = {351--356}, + Title = {Simple Counterexamples Against the Conditionality Principle}, + Volume = {49}, + Year = {1995}} + +@article{Helmers1995, + Author = {Helmers, K. F. and Young, S. N. and Pihl, R. O.}, + Journal = {Journal of Personality and Individual Differences}, + Owner = {Wooyoung Ahn}, + Pages = {927-935}, + Timestamp = {2007.04.30}, + Title = {Assessment of measures of impulsivity in healthy male volunteers}, + Volume = {6}, + Year = {1995}} + +@article{Hemerly1989, + Author = {Hemerly, E. M. and Davis, M. H. A.}, + Journal = {The Annals of Statistics}, + Pages = {941--946}, + Title = {Strong Consistency of the PLS Criterion for Order Determination of Autoregressive Processes}, + Volume = {17}, + Year = {1989}} + +@article{Hertwig2004, + Author = {Hertwig, Ralph and Barren, G. and Weber, E. U. and Erev, Ido}, + Date-Modified = {2016-03-20 22:36:57 +0000}, + Journal = {Psychological Science}, + Owner = {WooYoung Ahn}, + Pages = {534-539}, + Timestamp = {2007.07.18}, + Title = {Decisions from experience and the effect of rare events in risky choice}, + Volume = {15}, + Year = {2004}} + +@article{Hesdorffer2008, + Author = {Hesdorffer, D. C. and Chan, S. and Tian, H. and Allen Hauser, W. and Dayan, P. and Leary, L. D. and Hinton, V. J.}, + Journal = {Epilepsia}, + Month = {May}, + Pages = {765--771}, + Title = {{{A}re {M}{R}{I}-detected brain abnormalities associated with febrile seizure type?}}, + Volume = {49}, + Year = {2008}} + +@article{Hester2007, + Author = {Hester, R. and Barre, N. and Mattingley, J. B. and Foxe, J. J. and Garavan, H.}, + Journal = {Cogn Affect Behav Neurosci}, + Month = {Dec}, + Pages = {317--326}, + Title = {{{A}voiding another mistake: error and posterror neural activity associated with adaptive posterror behavior change}}, + Volume = {7}, + Year = {2007}} + +@article{Hester2007b, + Author = {Hester, R. and D'Esposito, M. and Cole, M. W. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {446--454}, + Title = {{{N}eural mechanisms for response selection: comparing selection of responses and items from working memory}}, + Volume = {34}, + Year = {2007}} + +@article{Hester2006, + Author = {Hester, R. and Dixon, V. and Garavan, H.}, + Journal = {Drug Alcohol Depend}, + Month = {Feb}, + Pages = {251--257}, + Title = {{{A} consistent attentional bias for drug-related material in active cocaine users across word and picture versions of the emotional {S}troop task}}, + Volume = {81}, + Year = {2006}} + +@article{Hester2004c, + Author = {Hester, R. and Fassbender, C. and Garavan, H.}, + Journal = {Cereb. Cortex}, + Month = {Sep}, + Pages = {986--994}, + Title = {{{I}ndividual differences in error processing: a review and reanalysis of three event-related f{M}{R}{I} studies using the {G}{O}/{N}{O}{G}{O} task}}, + Volume = {14}, + Year = {2004}} + +@article{Hester2005a, + Author = {Hester, R. and Foxe, J. J. and Molholm, S. and Shpaner, M. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Sep}, + Pages = {602--608}, + Title = {{{N}eural mechanisms involved in error processing: a comparison of errors made with and without awareness}}, + Volume = {27}, + Year = {2005}} + +@article{Hester2009a, + Author = {Hester, R. and Garavan, H.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Sep}, + Pages = {270--277}, + Title = {{{N}eural mechanisms underlying drug-related cue distraction in active cocaine users}}, + Volume = {93}, + Year = {2009}} + +@article{Hester2005, + Author = {Hester, R. and Garavan, H.}, + Journal = {Mem Cognit}, + Month = {Mar}, + Pages = {221--233}, + Title = {{{W}orking memory and executive function: the influence of content and load on the control of attention}}, + Volume = {33}, + Year = {2005}} + +@article{Hester2004, + Author = {Hester, R. and Garavan, H.}, + Journal = {J. Neurosci.}, + Month = {Dec}, + Pages = {11017--11022}, + Title = {{{E}xecutive dysfunction in cocaine addiction: evidence for discordant frontal, cingulate, and cerebellar activity}}, + Volume = {24}, + Year = {2004}} + +@article{Hester2004a, + Author = {Hester, R. and Murphy, K. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Sep}, + Pages = {202--212}, + Title = {{{B}eyond common resources: the cortical basis for resolving task interference}}, + Volume = {23}, + Year = {2004}} + +@article{Hester2009, + Author = {Hester, R. and Nestor, L. and Garavan, H.}, + Journal = {Neuropsychopharmacology}, + Month = {Jun}, + Title = {{{I}mpaired {E}rror {A}wareness and {A}nterior {C}ingulate {C}ortex {H}ypoactivity in {C}hronic {C}annabis {U}sers}}, + Year = {2009}} + +@article{Hester2007a, + Author = {Hester, R. and Sim?es-Franklin, C. and Garavan, H.}, + Journal = {Neuropsychopharmacology}, + Month = {Sep}, + Pages = {1974--1984}, + Title = {{{P}ost-error behavior in active cocaine users: poor awareness of errors in the presence of intact performance adjustments}}, + Volume = {32}, + Year = {2007}} + +@article{Hester2004b, + Author = {Hester, R. L. and Murphy, K. and Foxe, J. J. and Foxe, D. M. and Javitt, D. C. and Garavan, H.}, + Journal = {J Cogn Neurosci}, + Month = {Jun}, + Pages = {776--785}, + Title = {{{P}redicting success: patterns of cortical activation and deactivation prior to response inhibition}}, + Volume = {16}, + Year = {2004}} + +@article{Hetherington1999, + Author = {Hetherington, H. P. and Telang, F. and Pan, J. W. and Sammi, M. and Schuhlein, D. and Molina, P. and Volkow, N. D.}, + Journal = {Magn Reson Med}, + Month = {Dec}, + Pages = {1019--1026}, + Title = {{{S}pectroscopic imaging of the uptake kinetics of human brain ethanol}}, + Volume = {42}, + Year = {1999}} + +@article{Hick1952, + Author = {Hick, W. E.}, + Journal = {Quarterly Journal of Experimental Psychology}, + Pages = {11--26}, + Title = {On the Rate of Gain of Information}, + Volume = {4}, + Year = {1952}} + +@article{Hikosaka1989, + Author = {Hikosaka, O. and Sakamoto, M. and Usui, S.}, + Journal = {J. Neurophysiol.}, + Pages = {814--832}, + Title = {{{F}unctional properties of monkey caudate neurons. {I}{I}{I}. {A}ctivities related to expectation of target and reward}}, + Volume = {61}, + Year = {1989}} + +@article{Hikosaka1989a, + Author = {Hikosaka, O. and Sakamoto, M. and Usui, S.}, + Journal = {J. Neurophysiol.}, + Pages = {799--813}, + Title = {{{F}unctional properties of monkey caudate neurons. {I}{I}. {V}isual and auditory responses}}, + Volume = {61}, + Year = {1989}} + +@article{Hikosaka1989b, + Author = {Hikosaka, O. and Sakamoto, M. and Usui, S.}, + Journal = {J. Neurophysiol.}, + Pages = {780--798}, + Title = {{{F}unctional properties of monkey caudate neurons. {I}. {A}ctivities related to saccadic eye movements}}, + Volume = {61}, + Year = {1989}} + +@article{Hill1987, + Author = {Hill, B. M.}, + Journal = {The American Statistician}, + Pages = {95--100}, + Title = {The Validity of the Likelihood Principle}, + Volume = {41}, + Year = {1987}} + +@article{Hill1985, + Author = {Hill, B. M.}, + Journal = {Econometric Reviews}, + Pages = {191--246}, + Title = {Some Subjective {B}ayesian Considerations in the Selection of Models}, + Volume = {4}, + Year = {1985}} + +@article{Hill2005, + Author = {Hill, R.}, + Journal = {Significance}, + Pages = {13--15}, + Title = {Reflections on the Cot Death Cases}, + Volume = {2}, + Year = {2005}} + +@article{Hill2001, + Author = {Hill, S. Y. and De Bellis, M. D. and Keshavan, M. S. and Lowers, L. and Shen, S. and Hall, J. and Pitts, T.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {894--905}, + Title = {{{R}ight amygdala volume in adolescent and young adult offspring from families at high risk for developing alcoholism}}, + Volume = {49}, + Year = {2001}} + +@article{Hinson2002, + Author = {Hinson, J. M. and Jameson, T. L. and Whitney, P.}, + Journal = {Cognitive, Affective, \& Behavioral Neuroscience}, + Pages = {341--353}, + Title = {Somatic Markers, Working Memory, and Decision Making}, + Volume = {2}, + Year = {2002}} + +@article{Hinton1996, + Author = {Hinton, G. E. and Dayan, P.}, + Journal = {Neural Netw}, + Month = {Nov}, + Pages = {1385--1403}, + Title = {{{V}arieties of {H}elmholtz {M}achine}}, + Volume = {9}, + Year = {1996}} + +@article{Hiroi1997, + Author = {Hiroi, N. and Brown, J. R. and Haile, C. N. and Ye, H. and Greenberg, M. E. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Sep}, + Pages = {10397--10402}, + Title = {{{F}os{B} mutant mice: loss of chronic cocaine induction of {F}os-related proteins and heightened sensitivity to cocaine's psychomotor and rewarding effects}}, + Volume = {94}, + Year = {1997}} + +@article{Hjorth1982, + Author = {Hjorth, U.}, + Journal = {Scandinavian Journal of Statistics}, + Pages = {95--105}, + Title = {Model Selection and Forward Validation}, + Volume = {9}, + Year = {1982}} + +@article{Ho1988, + Author = {Ho, V. W. and Porrino, L. J. and Crane, A. M. and Burns, R. S. and Kopin, I. J. and Sokoloff, L.}, + Journal = {Ann. Neurol.}, + Month = {Jan}, + Pages = {86--89}, + Title = {{{M}etabolic mapping of the oculomotor system in {M}{P}{T}{P}-induced parkinsonian monkeys}}, + Volume = {23}, + Year = {1988}} + +@book{HBO, + Author = {Hoffman, J. and Froemke, S. and Sheff, D.}, + Publisher = {Rodale}, + Title = {{Addiction: why can't they just stop?: new knowledge, new treatments, new hope}}, + Year = {2007}} + +@article{Hoffman2008, + Abstract = {BACKGROUND: Methamphetamine (MA)-dependent individuals prefer smaller + immediate over larger delayed rewards in delay discounting (DD) tasks. + Human and animal data implicate ventral (amygdala, ventral striatum, + ventrolateral prefrontal cortex insula) and dorsal (dorsolateral + prefrontal cortex, dorsal anterior cingulate cortex and posterior + parietal cortex) systems in DD decisions. The ventral system is hypothesized + to respond to the salience and immediacy of rewards while the dorsal + system is implicated in the process of comparison and choice. METHODS: + We used functional Magnetic Resonance Imaging to probe the neural + correlates of DD in 19 recently abstinent MA-dependent patients and + 17 age- and gender-matched controls. RESULTS: Hard DD choices were + associated with greatest activation in bilateral middle cingulate, + posterior parietal cortex (PPC), and the right rostral insula. Control + subjects showed more activation than MA patients bilaterally in the + precuneus and in the right caudate nucleus, anterior cingulate cortex + (ACC), and dorsolateral prefrontal cortex (DLPFC). Magnitude of discounting + was correlated with activity in the amygdala, DLPFC, posterior cingulate + cortex and PPC. CONCLUSIONS: Our findings were consistent with a + model wherein dorsal cognitive systems modulate the neural response + of ventral regions. Patients addicted to MA, who strongly prefer + smaller immediate over larger delayed rewards, activate the dorsal + cognitive control system in order to overcome their preference. Activation + of the amygdala during choice of delayed rewards was associated with + a greater degree of discounting, suggesting that heavily discounting + MA-dependent individuals may be more responsive to the negative salience + of delayed rewards than controls.}, + Author = {William F Hoffman and Daniel L Schwartz and Marilyn S Huckans and Bentson H McFarland and Gal Meiri and Alexander A Stevens and Suzanne H Mitchell}, + Doi = {10.1007/s00213-008-1261-1}, + Institution = {Mental Health and Clinical Neurosciences Division P35C, Veterans Affairs Medical Center, 3710 SW US Veterans Hospital Road, Portland, OR 97239, USA. hoffmanw@ohsu.edu}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Adult; Amphetamine-Related Disorders, diagnosis/psychology; Amygdala, anatomy /&/ histology/drug effects/physiology; Behavior, Addictive, chemically induced/diagnosis/psychology; Choice Behavior, drug effects/physiology; Data Interpretation, Statistical; Female; Gyrus Cinguli, anatomy /&/ histology/drug effects/physiology; Humans; Image Processing, Computer-Assisted, methods; Magnetic Resonance Imaging; Male; Middle Aged; Models, Anatomic; Parietal Lobe, anatomy /&/ histology/drug effects/physiology; Prefrontal Cortex, anatomy /&/ histology/drug effects/physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {183--193}, + Pmid = {18685833}, + Timestamp = {2009.08.06}, + Title = {Cortical activation during delay discounting in abstinent methamphetamine dependent individuals.}, + Url = {http://dx.doi.org/10.1007/s00213-008-1261-1}, + Volume = {201}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-008-1261-1}} + +@article{Hoffman2008a, + Abstract = {BACKGROUND: Methamphetamine (MA)-dependent individuals prefer smaller + immediate over larger delayed rewards in delay discounting (DD) tasks. + Human and animal data implicate ventral (amygdala, ventral striatum, + ventrolateral prefrontal cortex insula) and dorsal (dorsolateral + prefrontal cortex, dorsal anterior cingulate cortex and posterior + parietal cortex) systems in DD decisions. The ventral system is hypothesized + to respond to the salience and immediacy of rewards while the dorsal + system is implicated in the process of comparison and choice. METHODS: + We used functional Magnetic Resonance Imaging to probe the neural + correlates of DD in 19 recently abstinent MA-dependent patients and + 17 age- and gender-matched controls. RESULTS: Hard DD choices were + associated with greatest activation in bilateral middle cingulate, + posterior parietal cortex (PPC), and the right rostral insula. Control + subjects showed more activation than MA patients bilaterally in the + precuneus and in the right caudate nucleus, anterior cingulate cortex + (ACC), and dorsolateral prefrontal cortex (DLPFC). Magnitude of discounting + was correlated with activity in the amygdala, DLPFC, posterior cingulate + cortex and PPC. CONCLUSIONS: Our findings were consistent with a + model wherein dorsal cognitive systems modulate the neural response + of ventral regions. Patients addicted to MA, who strongly prefer + smaller immediate over larger delayed rewards, activate the dorsal + cognitive control system in order to overcome their preference. Activation + of the amygdala during choice of delayed rewards was associated with + a greater degree of discounting, suggesting that heavily discounting + MA-dependent individuals may be more responsive to the negative salience + of delayed rewards than controls.}, + Author = {William F Hoffman and Daniel L Schwartz and Marilyn S Huckans and Bentson H McFarland and Gal Meiri and Alexander A Stevens and Suzanne H Mitchell}, + Doi = {10.1007/s00213-008-1261-1}, + Institution = {Mental Health and Clinical Neurosciences Division P35C, Veterans Affairs Medical Center, 3710 SW US Veterans Hospital Road, Portland, OR 97239, USA. hoffmanw@ohsu.edu}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Adult; Amphetamine-Related Disorders, diagnosis/psychology; Amygdala, anatomy /&/ histology/drug effects/physiology; Behavior, Addictive, chemically induced/diagnosis/psychology; Choice Behavior, drug effects/physiology; Data Interpretation, Statistical; Female; Gyrus Cinguli, anatomy /&/ histology/drug effects/physiology; Humans; Image Processing, Computer-Assisted, methods; Magnetic Resonance Imaging; Male; Middle Aged; Models, Anatomic; Parietal Lobe, anatomy /&/ histology/drug effects/physiology; Prefrontal Cortex, anatomy /&/ histology/drug effects/physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {183--193}, + Pmid = {18685833}, + Timestamp = {2009.08.06}, + Title = {Cortical activation during delay discounting in abstinent methamphetamine dependent individuals.}, + Url = {http://dx.doi.org/10.1007/s00213-008-1261-1}, + Volume = {201}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-008-1261-1}} + +@article{Hoffrage2000, + Author = {Hoffrage, U. and Lindsey, S. and Hertwig, R. and Gigerenzer, G.}, + Journal = {Science}, + Pages = {2261--2262}, + Title = {Communicating Statistical Information}, + Volume = {290}, + Year = {2000}} + +@article{Hoijtink2001, + Author = {Hoijtink, H.}, + Journal = {Multivariate Behavioral Research}, + Pages = {563?588}, + Title = {Confirmatory Latent Class Analysis: {M}odel Selection Using {B}ayes Factors and (Pseudo) Likelihood Ratio Statistics}, + Volume = {36}, + Year = {2001}} + +@article{Holland2004, + Author = {Holland, P.C. and Gallagher, M.}, + Journal = {Current opinion in neurobiology}, + Number = {2}, + Pages = {148--155}, + Title = {{Amygdala--frontal interactions and reward expectancy}}, + Volume = {14}, + Year = {2004}} + +@article{Hommer2003, + Author = {Hommer, D. W. and Knutson, B. and Fong, G. W. and Bennett, S. and Adams, C. M. and Varnera, J. L.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Apr}, + Pages = {476--478}, + Title = {{{A}mygdalar recruitment during anticipation of monetary rewards: an event-related f{M}{R}{I} study}}, + Volume = {985}, + Year = {2003}} + +@book{Honerkamp1994, + Address = {New York}, + Author = {Honerkamp, J.}, + Publisher = {VCH Publishers}, + Title = {Stochastic Dynamical Systems}, + Year = {1994}} + +@article{Honey2003, + Author = {Honey, GD and Suckling, J. and Zelaya, F. and Long, C. and Routledge, C. and Jackson, S. and Ng, V. and Fletcher, PC and Williams, SCR and Brown, J. and others}, + Journal = {Brain}, + Number = {8}, + Pages = {1767}, + Publisher = {Oxford Univ Press}, + Title = {{Dopaminergic drug effects on physiological connectivity in a human cortico-striato-thalamic system}}, + Volume = {126}, + Year = {2003}} + +@article{Hope1992, + Author = {Hope, B. and Kosofsky, B. and Hyman, S. E. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jul}, + Pages = {5764--5768}, + Title = {{{R}egulation of immediate early gene expression and {A}{P}-1 binding in the rat nucleus accumbens by chronic cocaine}}, + Volume = {89}, + Year = {1992}} + +@article{Hope1992a, + Author = {Hope, B. and Kosofsky, B. and Hyman, S. E. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jul}, + Pages = {5764--5768}, + Title = {{{R}egulation of immediate early gene expression and {A}{P}-1 binding in the rat nucleus accumbens by chronic cocaine}}, + Volume = {89}, + Year = {1992}} + +@article{Hosking1984, + Author = {Hosking, J. R. M.}, + Journal = {Water Resources Research}, + Pages = {1898--1908}, + Title = {Modeling Persistence in Hydrological Time Series Using Fractional Differencing}, + Volume = {20}, + Year = {1984}} + +@article{Hosking1981, + Author = {Hosking, J. R. M.}, + Journal = {Biometrika}, + Pages = {165--176}, + Title = {Fractional Differencing}, + Volume = {68}, + Year = {1981}} + +@article{Howard1998, + Author = {Howard, J. V.}, + Journal = {Statistical Science}, + Pages = {351--367}, + Title = {The $2\times2$ Table: A Discussion from a {B}ayesian Viewpoint}, + Volume = {13}, + Year = {1998}} + +@article{Howlett2004, + Author = {Howlett, A. C. and Breivogel, C. S. and Childers, S. R. and Deadwyler, S. A. and Hampson, R. E. and Porrino, L. J.}, + Journal = {Neuropharmacology}, + Pages = {345--358}, + Title = {{{C}annabinoid physiology and pharmacology: 30 years of progress}}, + Volume = {47 Suppl 1}, + Year = {2004}} + +@book{Howson2006, + Address = {Chicago}, + Author = {Howson, C. and Urbach, P.}, + Publisher = {Open Court}, + Title = {Scientific Reasoning: {T}he {B}ayesian Approach (3rd. ed.)}, + Year = {2006}} + +@article{Hsu2007, + Author = {Hsu, A. S. and Dayan, P.}, + Journal = {Vision Res.}, + Month = {Oct}, + Pages = {2868--2877}, + Title = {{{A}n unsupervised learning model of neural plasticity: {O}rientation selectivity in goggle-reared kittens}}, + Volume = {47}, + Year = {2007}} + +@article{Hsu2005, + Author = {Hsu, M. and Bhatt, M. and Adolphs, R. and Tranel, D. and Camerer, C.F.}, + Journal = {Science}, + Number = {5754}, + Pages = {1680--1683}, + Title = {{Neural systems responding to degrees of uncertainty in human decision-making}}, + Volume = {310}, + Year = {2005}} + +@article{Hsu2005a, + Author = {Hsu, R. and Taylor, J. R. and Newton, S. S. and Alvaro, J. D. and Haile, C. and Han, G. and Hruby, V. J. and Nestler, E. J. and Duman, R. S.}, + Journal = {Eur. J. Neurosci.}, + Month = {Apr}, + Pages = {2233--2242}, + Title = {{{B}lockade of melanocortin transmission inhibits cocaine reward}}, + Volume = {21}, + Year = {2005}} + +@article{Hua2005, + Author = {Hua, l. e. H. and Strigo, I. A. and Baxter, L. C. and Johnson, S. C. and Craig, A. D.}, + Journal = {Am. J. Physiol. Regul. Integr. Comp. Physiol.}, + Month = {Aug}, + Pages = {R319-R325}, + Title = {{{A}nteroposterior somatotopy of innocuous cooling activation focus in human dorsal posterior insular cortex}}, + Volume = {289}, + Year = {2005}} + +@article{Huang2008, + Author = {Huang, Y. H. and Lin, Y. and Brown, T. E. and Han, M. H. and Saal, D. B. and Neve, R. L. and Zukin, R. S. and Sorg, B. A. and Nestler, E. J. and Malenka, R. C. and Dong, Y.}, + Journal = {J. Biol. Chem.}, + Month = {Feb}, + Pages = {2751--2760}, + Title = {{{C}{R}{E}{B} modulates the functional output of nucleus accumbens neurons: a critical role of {N}-methyl-{D}-aspartate glutamate receptor ({N}{M}{D}{A}{R}) receptors}}, + Volume = {283}, + Year = {2008}} + +@article{Hubbard2003, + Author = {Hubbard, R. and Bayarri, M. J.}, + Journal = {The American Statistician}, + Pages = {171--182}, + Title = {Confusion over Measures of Evidence ($p$'s) Versus Errors ($\alpha$'s) in Classical Statistical Testing}, + Volume = {57}, + Year = {2003}} + +@article{Huettel2006, + Author = {Huettel, S. A. and Stowe, C. J. and Gordon, E. M. and Warner, B. T. and Platt, M. L.}, + Journal = {Neuron}, + Month = {Mar}, + Pages = {765--775}, + Title = {{{N}eural signatures of economic preferences for risk and ambiguity}}, + Volume = {49}, + Year = {2006}} + +@article{Huk2005a, + Author = {Huk, A. C. and Shadlen, M. N.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {10420--10436}, + Title = {{{N}eural activity in macaque parietal cortex reflects temporal integration of visual motion signals during perceptual decision making}}, + Volume = {25}, + Year = {2005}} + +@article{Hull1995, + Abstract = {A superior-anterior mediastinal tumor was excised from a 50-year-old + man. The 207-g mass was encapsulated and multilobulated. It contained + adipose tissue and abnormal thymic tissue. In some areas the thymic + tissue was characterized by cords and nests of epithelial cells lying + within either the adipose tissue or a myxoid matrix. Other areas + were characterized by cortical thymic tissue with increased numbers + of epithelial cells. Foci of normal medullary tissue were present. + The prominent epithelial cells were immunoreactive for cytokeratin + and nonimmunoreactive for vimentin, S-100, chromogranin, and parathyroid + hormone. Flow cytometry showed that the lymphocyte populations were + consistent with a late cortical thymic phenotype. The tumor was diploid. + By electron microscopy, the prominent epithelial cells had desmosomes + and a few tonofilaments. The cytoplasm contained additional organelles + including mitochondria, polyribosomes, and occasional lysosomes. + Nuclei were oval and had relatively smooth contours, prominent nucleoli, + and moderate quantities of heterochromatin. Basal lamina was present + around many nests and cords of cells. This is the first such study + of a tumor with this histology.}, + Author = {M. T. Hull and K. A. Warfel and P. Kotylo and M. P. Goheen and J. W. Brown}, + Institution = {Department of Pathology, Indiana University School of Medicine, Indianapolis, USA.}, + Journal = {Ultrastruct Pathol}, + Keywords = {DNA, Neoplasm, analysis; Flow Cytometry; Humans; Immunohistochemistry; Immunophenotyping; Lipoma, ultrastructure; Male; Microscopy, Electron; Middle Aged; Thymus Neoplasms, immunology/ultrastructure}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {75--81}, + Pmid = {7770964}, + Timestamp = {2009.08.04}, + Title = {Proliferating thymolipoma: ultrastructural, immunohistochemical, and flowcytometric study.}, + Volume = {19}, + Year = {1995}} + +@article{Hultgren2009, + Author = {Hultgren, S. and Goldstein, J. M. and Delancey, J. O. and Bandstra, E. S. and Brady, K. T. and Brown, J. S. and Deng, H. W. and Dunaif, A. and Ehrmann, D. A. and Mayer, E. A. and Sinha, R. and Tobet, S. and Levine, J. E.}, + Journal = {Science}, + Month = {Feb}, + Pages = {1009--1010}, + Title = {{{T}he vital role of {O}{R}{W}{H}}}, + Volume = {323}, + Year = {2009}} + +@article{Hultgren2009a, + Author = {Hultgren, S. and Goldstein, J. M. and Delancey, J. O. and Bandstra, E. S. and Brady, K. T. and Brown, J. S. and Deng, H. W. and Dunaif, A. and Ehrmann, D. A. and Mayer, E. A. and Sinha, R. and Tobet, S. and Levine, J. E.}, + Journal = {Science}, + Month = {Feb}, + Pages = {1009--1010}, + Title = {{{T}he vital role of {O}{R}{W}{H}}}, + Volume = {323}, + Year = {2009}} + +@article{Hultgren2009b, + Author = {Hultgren, S. and Goldstein, J. M. and Delancey, J. O. and Bandstra, E. S. and Brady, K. T. and Brown, J. S. and Deng, H. W. and Dunaif, A. and Ehrmann, D. A. and Mayer, E. A. and Sinha, R. and Tobet, S. and Levine, J. E.}, + Journal = {Science}, + Month = {Feb}, + Pages = {1009--1010}, + Title = {{{T}he vital role of {O}{R}{W}{H}}}, + Volume = {323}, + Year = {2009}} + +@article{Hultsch2002, + Author = {Hultsch, D. F. and MacDonald, S. W. S. and Dixon, R. A.}, + Journal = {Journal of Gerontology: Psychological Sciences}, + Pages = {101--115}, + Title = {Variability in Reaction Time Performance of Younger and Older Adults}, + Volume = {57B}, + Year = {2002}} + +@article{Huntjens2006, + Author = {Huntjens, R. J. C. and Peters, M. L. and Woertman, L. and Bovenschen, L. M. and Martin, R. C. and Postma, A.}, + Journal = {Psychological Medicin}, + Pages = {857--863}, + Title = {Inter--identity Amnesia in Dissociative Identity Disorder: {A} Simulated Memory Impairment?}, + Volume = {36}, + Year = {2006}} + +@article{Hurd2001, + Author = {Hurd, Y.L. and Suzuki, M. and Sedvall, G.C.}, + Journal = {Journal of Chemical Neuroanatomy}, + Number = {1-2}, + Pages = {127--137}, + Publisher = {Elsevier}, + Title = {{D1 and D2 dopamine receptor mRNA expression in whole hemisphere sections of the human brain}}, + Volume = {22}, + Year = {2001}} + +@article{Hurst1951, + Author = {Hurst, H. E.}, + Journal = {Transactions of the American Society of Civil Engineers}, + Pages = {770--799}, + Title = {Long--term Storage Capacity of Reservoirs}, + Volume = {116}, + Year = {1951}} + +@article{Hurvich1989, + Author = {Hurvich, C. M. and Tsai, C.-L.}, + Journal = {Biometrika}, + Pages = {297--307}, + Title = {Regression and Time Series Model Selection in Small Samples}, + Volume = {76}, + Year = {1989}} + +@article{Husted1994, + Author = {Husted, C. A. and Goodin, D. S. and Hugg, J. W. and Maudsley, A. A. and Tsuruda, J. S. and de Bie, S. H. and Fein, G. and Matson, G. B. and Weiner, M. W.}, + Journal = {Ann. Neurol.}, + Month = {Aug}, + Pages = {157--165}, + Title = {{{B}iochemical alterations in multiple sclerosis lesions and normal-appearing white matter detected by in vivo 31{P} and 1{H} spectroscopic imaging}}, + Volume = {36}, + Year = {1994}} + +@article{Hutchison2007, + Author = {Hutchison, K. E. and Allen, D. L. and Filbey, F. M. and Jepson, C. and Lerman, C. and Benowitz, N. L. and Stitzel, J. and Bryan, A. and McGeary, J. and Haughey, H. M.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Sep}, + Pages = {1078--1086}, + Title = {{{C}{H}{R}{N}{A}4 and tobacco dependence: from gene regulation to treatment outcome}}, + Volume = {64}, + Year = {2007}} + +@article{Hutchison1999, + Author = {Hutchison, K. E. and Niaura, R. and Swift, R.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Aug}, + Pages = {250--256}, + Title = {{{S}moking cues decrease prepulse inhibition of the startle response and increase subjective craving in humans}}, + Volume = {7}, + Year = {1999}} + +@article{Hutchison2006, + Author = {Hutchison, K. E. and Ray, L. and Sandman, E. and Rutter, M. C. and Peters, A. and Davidson, D. and Swift, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Jun}, + Pages = {1310--1317}, + Title = {{{T}he effect of olanzapine on craving and alcohol consumption}}, + Volume = {31}, + Year = {2006}} + +@article{Huys2009, + Author = {Huys, Q. J. and Dayan, P.}, + Journal = {Cognition}, + Month = {Mar}, + Title = {{{A} {B}ayesian formulation of behavioral control}}, + Year = {2009}} + +@article{Huys2007, + Author = {Huys, Q. J. and Zemel, R. S. and Natarajan, R. and Dayan, P.}, + Journal = {Neural Comput}, + Month = {Feb}, + Pages = {404--441}, + Title = {{{F}ast population coding}}, + Volume = {19}, + Year = {2007}} + +@article{Hwang2006b, + Author = {Hwang, J. and Lyoo, I. K. and Dager, S. R. and Friedman, S. D. and Oh, J. S. and Lee, J. Y. and Kim, S. J. and Dunner, D. L. and Renshaw, P. F.}, + Journal = {Am J Psychiatry}, + Month = {Feb}, + Pages = {276--285}, + Title = {{{B}asal ganglia shape alterations in bipolar disorder}}, + Volume = {163}, + Year = {2006}} + +@article{Hwang2006a, + Author = {Hwang, J. and Lyoo, I. K. and Kim, S. J. and Sung, Y. H. and Bae, S. and Cho, S. N. and Lee, H. Y. and Lee, D. S. and Renshaw, P. F.}, + Journal = {Drug Alcohol Depend}, + Month = {Apr}, + Pages = {177--181}, + Title = {{{D}ecreased cerebral blood flow of the right anterior cingulate cortex in long-term and short-term abstinent methamphetamine users}}, + Volume = {82}, + Year = {2006}} + +@article{Hwang2006, + Author = {Hwang, J. W. and Lyoo, I. K. and Kim, B. N. and Shin, M. S. and Kim, S. J. and Cho, S. C.}, + Journal = {J Dev Behav Pediatr}, + Month = {Feb}, + Pages = {18--24}, + Title = {{{T}he relationship between temperament and character and psychopathology in community children with overweight}}, + Volume = {27}, + Year = {2006}} + +@article{Hwang2002, + Author = {Hwang, K. Y. and Lee, B. K. and Bressler, J. P. and Bolla, K. I. and Stewart, W. F. and Schwartz, B. S.}, + Journal = {Environ. Health Perspect.}, + Month = {Feb}, + Pages = {133--138}, + Title = {{{P}rotein kinase {C} activity and the relations between blood lead and neurobehavioral function in lead workers}}, + Volume = {110}, + Year = {2002}} + +@article{Hwang1999, + Author = {Hwang, W.--L.}, + Journal = {IEEE Transactions on Signal Processing}, + Pages = {2211--2219}, + Title = {Estimation of Fractional {B}rownian Motion Embedded in a Noisy Environment Using Nonorthogonal Wavelets}, + Volume = {47}, + Year = {1999}} + +@article{Hyman1953, + Author = {Hyman, R.}, + Journal = {Journal of Experimental Psychology}, + Pages = {188--196}, + Title = {Stimulus Information as a Determinant of Reaction Time}, + Volume = {45}, + Year = {1953}} + +@article{Hyman2007, + Author = {Hyman, S. E.}, + Journal = {Am J Bioeth}, + Month = {Jan}, + Pages = {8--11}, + Title = {{{T}he neurobiology of addiction: implications for voluntary control of behavior}}, + Volume = {7}, + Year = {2007}} + +@article{Hyman2005, + Author = {Hyman, Steven E.}, + Journal = {American Journal of Psychiatry}, + Owner = {WooYoung Ahn}, + Pages = {1414-1422}, + Timestamp = {2007.12.12}, + Title = {Addiction: a disease of learning and memory}, + Volume = {162 (8)}, + Year = {2005}} + +@article{Hyman1999, + Author = {Hyman, S. E.}, + Journal = {Nat. Neurosci.}, + Month = {Oct}, + Pages = {855--856}, + Title = {{{S}elective gene expression increases behavioral sensitivity to cocaine}}, + Volume = {2}, + Year = {1999}} + +@article{Hyman1996, + Author = {Hyman, S. E.}, + Journal = {Science}, + Month = {Aug}, + Pages = {611--612}, + Title = {{{S}haking out the cause of addiction}}, + Volume = {273}, + Year = {1996}} + +@article{Hyman1996a, + Author = {Hyman, S. E.}, + Journal = {Neuron}, + Month = {May}, + Pages = {901--904}, + Title = {{{A}ddiction to cocaine and amphetamine}}, + Volume = {16}, + Year = {1996}} + +@article{Hyman1993, + Author = {Hyman, S. E.}, + Journal = {Curr Opin Neurol Neurosurg}, + Month = {Aug}, + Pages = {609--613}, + Title = {{{M}olecular and cell biology of addiction}}, + Volume = {6}, + Year = {1993}} + +@article{Hyman1993a, + Author = {Hyman, S. E. and Kosofsky, B. E. and Nguyen, T. V. and Cohen, B. M. and Comb, M. J.}, + Journal = {NIDA Res. Monogr.}, + Pages = {25--38}, + Title = {{{E}verything activates c-fos--how can it matter?}}, + Volume = {125}, + Year = {1993}} + +@article{Hyman2001, + Author = {Hyman, S. E. and Malenka, R. C.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Oct}, + Pages = {695--703}, + Title = {{{A}ddiction and the brain: the neurobiology of compulsion and its persistence}}, + Volume = {2}, + Year = {2001}} + +@article{Hyman2006, + Author = {Hyman, S. E. and Malenka, R. C. and Nestler, E. J.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {565--598}, + Title = {{{N}eural mechanisms of addiction: the role of reward-related learning and memory}}, + Volume = {29}, + Year = {2006}} + +@article{Hyman2006a, + Author = {Hyman, S. E. and Malenka, R. C. and Nestler, E. J.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {565--598}, + Title = {{{N}eural mechanisms of addiction: the role of reward-related learning and memory}}, + Volume = {29}, + Year = {2006}} + +@article{Hyman2007b, + Author = {Hyman, S. M. and Fox, H. and Hong, K. I. and Doebrick, C. and Sinha, R.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Apr}, + Pages = {134--143}, + Title = {{{S}tress and drug-cue-induced craving in opioid-dependent individuals in naltrexone treatment}}, + Volume = {15}, + Year = {2007}} + +@article{Hyman2007d, + Author = {Hyman, S. M. and Fox, H. and Hong, K. I. and Doebrick, C. and Sinha, R.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Apr}, + Pages = {134--143}, + Title = {{{S}tress and drug-cue-induced craving in opioid-dependent individuals in naltrexone treatment}}, + Volume = {15}, + Year = {2007}} + +@article{Hyman2007f, + Author = {Hyman, S. M. and Fox, H. and Hong, K. I. and Doebrick, C. and Sinha, R.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Apr}, + Pages = {134--143}, + Title = {{{S}tress and drug-cue-induced craving in opioid-dependent individuals in naltrexone treatment}}, + Volume = {15}, + Year = {2007}} + +@article{Hyman2005a, + Author = {Hyman, S. M. and Garcia, M. and Kemp, K. and Mazure, C. M. and Sinha, R.}, + Journal = {Addict Behav}, + Month = {May}, + Pages = {847--852}, + Title = {{{A} gender specific psychometric analysis of the early trauma inventory short form in cocaine dependent adults}}, + Volume = {30}, + Year = {2005}} + +@article{Hyman2005b, + Author = {Hyman, S. M. and Garcia, M. and Kemp, K. and Mazure, C. M. and Sinha, R.}, + Journal = {Addict Behav}, + Month = {May}, + Pages = {847--852}, + Title = {{{A} gender specific psychometric analysis of the early trauma inventory short form in cocaine dependent adults}}, + Volume = {30}, + Year = {2005}} + +@article{Hyman2005c, + Author = {Hyman, S. M. and Garcia, M. and Kemp, K. and Mazure, C. M. and Sinha, R.}, + Journal = {Addict Behav}, + Month = {May}, + Pages = {847--852}, + Title = {{{A} gender specific psychometric analysis of the early trauma inventory short form in cocaine dependent adults}}, + Volume = {30}, + Year = {2005}} + +@article{Hyman2006b, + Author = {Hyman, S. M. and Garcia, M. and Sinha, R.}, + Journal = {Am J Drug Alcohol Abuse}, + Pages = {655--664}, + Title = {{{G}ender specific associations between types of childhood maltreatment and the onset, escalation and severity of substance use in cocaine dependent adults}}, + Volume = {32}, + Year = {2006}} + +@article{Hyman2006c, + Author = {Hyman, S. M. and Garcia, M. and Sinha, R.}, + Journal = {Am J Drug Alcohol Abuse}, + Pages = {655--664}, + Title = {{{G}ender specific associations between types of childhood maltreatment and the onset, escalation and severity of substance use in cocaine dependent adults}}, + Volume = {32}, + Year = {2006}} + +@article{Hyman2006d, + Author = {Hyman, S. M. and Garcia, M. and Sinha, R.}, + Journal = {Am J Drug Alcohol Abuse}, + Pages = {655--664}, + Title = {{{G}ender specific associations between types of childhood maltreatment and the onset, escalation and severity of substance use in cocaine dependent adults}}, + Volume = {32}, + Year = {2006}} + +@article{Hyman2008, + Author = {Hyman, S. M. and Paliwal, P. and Chaplin, T. M. and Mazure, C. M. and Rounsaville, B. J. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {208--216}, + Title = {{{S}everity of childhood trauma is predictive of cocaine relapse outcomes in women but not men}}, + Volume = {92}, + Year = {2008}} + +@article{Hyman2008a, + Author = {Hyman, S. M. and Paliwal, P. and Chaplin, T. M. and Mazure, C. M. and Rounsaville, B. J. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {208--216}, + Title = {{{S}everity of childhood trauma is predictive of cocaine relapse outcomes in women but not men}}, + Volume = {92}, + Year = {2008}} + +@article{Hyman2008b, + Author = {Hyman, S. M. and Paliwal, P. and Chaplin, T. M. and Mazure, C. M. and Rounsaville, B. J. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {208--216}, + Title = {{{S}everity of childhood trauma is predictive of cocaine relapse outcomes in women but not men}}, + Volume = {92}, + Year = {2008}} + +@article{Hyman2007a, + Author = {Hyman, S. M. and Paliwal, P. and Sinha, R.}, + Journal = {Psychol Addict Behav}, + Month = {Jun}, + Pages = {233--238}, + Title = {{{C}hildhood maltreatment, perceived stress, and stress-related coping in recently abstinent cocaine dependent adults}}, + Volume = {21}, + Year = {2007}} + +@article{Hyman2007c, + Author = {Hyman, S. M. and Paliwal, P. and Sinha, R.}, + Journal = {Psychol Addict Behav}, + Month = {Jun}, + Pages = {233--238}, + Title = {{{C}hildhood maltreatment, perceived stress, and stress-related coping in recently abstinent cocaine dependent adults}}, + Volume = {21}, + Year = {2007}} + +@article{Hyman2007e, + Author = {Hyman, S. M. and Paliwal, P. and Sinha, R.}, + Journal = {Psychol Addict Behav}, + Month = {Jun}, + Pages = {233--238}, + Title = {{{C}hildhood maltreatment, perceived stress, and stress-related coping in recently abstinent cocaine dependent adults}}, + Volume = {21}, + Year = {2007}} + +@article{Hyman2009, + Author = {Hyman, S. M. and Sinha, R.}, + Journal = {J Subst Abuse Treat}, + Month = {Jun}, + Pages = {400--413}, + Title = {{{S}tress-related factors in cannabis use and misuse: implications for prevention and treatment}}, + Volume = {36}, + Year = {2009}} + +@article{Hyman2009a, + Author = {Hyman, S. M. and Sinha, R.}, + Journal = {J Subst Abuse Treat}, + Month = {Jun}, + Pages = {400--413}, + Title = {{{S}tress-related factors in cannabis use and misuse: implications for prevention and treatment}}, + Volume = {36}, + Year = {2009}} + +@article{Hyman2009b, + Author = {Hyman, S. M. and Sinha, R.}, + Journal = {J Subst Abuse Treat}, + Month = {Jun}, + Pages = {400--413}, + Title = {{{S}tress-related factors in cannabis use and misuse: implications for prevention and treatment}}, + Volume = {36}, + Year = {2009}} + +@article{Iguchi1989, + Author = {Iguchi, M. Y. and Griffiths, R. R. and Bickel, W. K. and Handelsman, L. and Childress, A. R. and McLellan, A. T.}, + Journal = {NIDA Res. Monogr.}, + Pages = {364--365}, + Title = {{{R}elative abuse liability of benzodiazepines in methadone maintained populations in three cities}}, + Volume = {95}, + Year = {1989}} + +@article{Ingvar1998, + Abstract = {OBJECTIVE: We investigated the effect of 0.07\% alcohol on regional + brain activity at rest and during cognitive performance in order + to elucidate the anatomical substrate for the effects of alcohol + in man as well as to clarify the interaction between changes in cerebral + activity induced by cognitive performance and alcohol inebriation. + METHOD: Regional cerebral blood flow (3D-PET, 15O Butanol) was measured + in 13 male, nonalcoholic volunteers. Each subject was scanned 12 + times, three scans in each of the following four situations: sober/rest, + sober/test and inebriated/rest, inebriated/test. We used statistical + parametric mapping and a computerized brain atlas for localization. + RESULTS: Alcohol induced a sense of inebriation and elation as well + as a relative increase of the cerebral blood flow in medial parts + of the temporal lobes, in the anterobasal parts of the anterior cingulate + cortex including the septal region. In addition, there was an increase + of blood flow in midline parts of the lower brain stem. Relative + decreases of flow were observed in the cerebellum and in the occipital + cortex bilaterally. In the sober state, a computerized perceptual + maze test and a (silent) serial seven test induced two distinct neocortical + activation patterns that were specific to the tasks. Alcohol did + not change these patterns and the test performance was also uninfluenced. + CONCLUSIONS: A moderate dose of alcohol selectively activates target + structures that pertain to the so-called cerebral reward system and + the ascending reticular activating system. Alcohol (approximately + 0.07\%) appears to have only minor effects in the neocortical systems + that are involved in on-line cognitive activity. This apparent independence + between the subcortical alcohol target and the neocortical cognitive + mechanisms is a new finding that appears to be of importance for + an understanding of the effect of moderate doses of alcohol on the + brain.}, + Institution = {PET-Cognitive Neurophysiology R2-01, Department of Clinical Neuroscience, Karolinska Hospital, Stockholm, Sweden.}, + Journal = {J Stud Alcohol}, + Keywords = {Adult; Alcoholic Intoxication, psychology/radionuclide imaging; Arousal, drug effects/physiology; Brain Mapping; Brain, drug effects/radionuclide imaging; Dominance, Cerebral, drug effects/physiology; Ethanol, pharmacology; Euphoria, drug effects/physiology; Humans; Image Processing, Computer-Assisted; Infusions, Intravenous; Male; Maze Learning, drug effects/physiology; Motivation; Problem Solving, drug effects/physiology; Regional Blood Flow, drug effects/physiology; Reward; Serial Learning, drug effects/physiology; Tomography, Emission-Computed}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {258--269}, + Pmid = {9598706}, + Timestamp = {2009.08.04}, + Title = {Alcohol activates the cerebral reward system in man.}, + Volume = {59}, + Year = {1998}} + +@article{Insel1992, + Author = {Insel, TR}, + Journal = {Archives of General Psychiatry}, + Number = {9}, + Pages = {739--744}, + Publisher = {Am Med Assoc}, + Title = {{Toward a neuroanatomy of obsessive-compulsive disorder}}, + Volume = {49}, + Year = {1992}} + +@article{Insel2004, + Author = {Insel, T. R. and Volkow, N. D. and Landis, S. C. and Li, T. K. and Battey, J. F. and Sieving, P.}, + Journal = {Nat. Neurosci.}, + Month = {May}, + Pages = {426--427}, + Title = {{{L}imits to growth: why neuroscience needs large-scale science}}, + Volume = {7}, + Year = {2004}} + +@article{Insel2003, + Author = {Insel, T. R. and Volkow, N. D. and Li, T. K. and Battey, J. F. and Landis, S. C.}, + Journal = {PLoS Biol.}, + Month = {Oct}, + Pages = {E17}, + Title = {{{N}euroscience networks: data-sharing in an information age}}, + Volume = {1}, + Year = {2003}} + +@article{Iosifescu2005, + Author = {Iosifescu, D. V. and Papakostas, G. I. and Lyoo, I. K. and Lee, H. K. and Renshaw, P. F. and Alpert, J. E. and Nierenberg, A. and Fava, M.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {291--299}, + Title = {{{B}rain {M}{R}{I} white matter hyperintensities and one-carbon cycle metabolism in non-geriatric outpatients with major depressive disorder ({P}art {I})}}, + Volume = {140}, + Year = {2005}} + +@article{Iosifescu2007, + Author = {Iosifescu, D. V. and Renshaw, P. F. and Dougherty, D. D. and Lyoo, I. K. and Lee, H. K. and Fraguas, R. and Cassano, P. and Nierenberg, A. A. and Fava, M.}, + Journal = {J. Nerv. Ment. Dis.}, + Month = {Feb}, + Pages = {175--178}, + Title = {{{M}ajor depressive disorder with anger attacks and subcortical {M}{R}{I} white matter hyperintensities}}, + Volume = {195}, + Year = {2007}} + +@article{Iosifescu2006, + Author = {Iosifescu, D. V. and Renshaw, P. F. and Lyoo, I. K. and Lee, H. K. and Perlis, R. H. and Papakostas, G. I. and Nierenberg, A. A. and Fava, M.}, + Journal = {Br J Psychiatry}, + Month = {Feb}, + Pages = {180--185}, + Title = {{{B}rain white-matter hyperintensities and treatment outcome in major depressive disorder}}, + Volume = {188}, + Year = {2006}} + +@article{Irwin1992, + Author = {Irwin, K. B. and Craig, A. D. and Bracha, V. and Bloedel, J. R.}, + Journal = {Neurosci. Lett.}, + Month = {Dec}, + Pages = {71--75}, + Title = {{{D}istribution of c-fos expression in brainstem neurons associated with conditioning and pseudo-conditioning of the rabbit nictitating membrane reflex}}, + Volume = {148}, + Year = {1992}} + +@article{Ishii2002, + Author = {Ishii, S. and Yoshida, W. and Yoshimoto, J.}, + Journal = {Neural networks}, + Number = {4-6}, + Pages = {665--687}, + Publisher = {Elsevier}, + Title = {{Control of exploitation--exploration meta-parameter in reinforcement learning}}, + Volume = {15}, + Year = {2002}} + +@article{Ito1951, + Author = {It\^{o}, K.}, + Journal = {Memoirs of the American Mathematical Society}, + Pages = {1--51}, + Title = {On Stochastic Differential Equations}, + Volume = {4}, + Year = {1951}} + +@article{Ito2008, + Author = {Ito, S. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Jan}, + Pages = {301--327}, + Title = {{{S}triatal projections of the vagal-responsive region of the thalamic parafascicular nucleus in macaque monkeys}}, + Volume = {506}, + Year = {2008}} + +@article{Ito2005, + Author = {Ito, S. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Oct}, + Pages = {2976--2982}, + Title = {{{V}agal-evoked activity in the parafascicular nucleus of the primate thalamus}}, + Volume = {94}, + Year = {2005}} + +@article{Ito2003, + Author = {Ito, S. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Jul}, + Pages = {143--154}, + Title = {{{V}agal input to lateral area 3a in cat cortex}}, + Volume = {90}, + Year = {2003}} + +@article{Ito2003a, + Abstract = {Consensus is emerging that the medial frontal lobe of the brain is + involved in monitoring performance, but precisely what is monitored + remains unclear. A saccade-countermanding task affords an experimental + dissociation of neural signals of error, reinforcement, and conflict. + Single-unit activity was monitored in the anterior cingulate cortex + of monkeys performing this task. Neurons that signaled errors were + found, half of which responded to the omission of earned reinforcement. + A further diversity of neurons signaled earned or unexpected reinforcement. + No neurons signaled the form of conflict engendered by interruption + of saccade preparation produced in this task. These results are consistent + with the hypothesis that the anterior cingulate cortex monitors the + consequences of actions.}, + Author = {Shigehiko Ito and Veit Stuphorn and Joshua W Brown and Jeffrey D Schall}, + Doi = {10.1126/science.1087847}, + Institution = {of Psychology, Wilson Hall, Vanderbilt University, Nashville, TN 37203, USA.}, + Journal = {Science}, + Keywords = {Animals; Brain Mapping; Fixation, Ocular; Frontal Lobe, physiology; Gyrus Cinguli, physiology; Macaca radiata; Male; Neurons, physiology; Psychomotor Performance; Reinforcement (Psychology); Saccades; Visual Pathways}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {5642}, + Owner = {Woo-Young Ahn}, + Pages = {120--122}, + Pii = {302/5642/120}, + Pmid = {14526085}, + Timestamp = {2009.08.04}, + Title = {Performance monitoring by the anterior cingulate cortex during saccade countermanding.}, + Url = {http://dx.doi.org/10.1126/science.1087847}, + Volume = {302}, + Year = {2003}, + Bdsk-Url-1 = {http://dx.doi.org/10.1126/science.1087847}} + +@article{Jaakkola1995, + Author = {Jaakkola, T. and Singh, S.P. and Jordan, M.I.}, + Journal = {Advances in Neural Information Processing Systems 7}, + Title = {{Reinforcement learning algorithm for partially observable Markov decision problems}}, + Year = {1995}} + +@article{Janssen2005a, + Author = {Janssen, P. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {Feb}, + Pages = {234--241}, + Title = {{{A} representation of the hazard rate of elapsed time in macaque area {L}{I}{P}}}, + Volume = {8}, + Year = {2005}} + +@article{Jasiukaitis1999, + Author = {Jasiukaitis, P. and Fein, G.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Aug}, + Pages = {1019--1036}, + Title = {{{I}ntact visual word priming in cocaine dependent subjects with and without cognitive deficit}}, + Volume = {23}, + Year = {1999}} + +@incollection{Jaynes1976, + Address = {Dordrecht, Holland}, + Author = {Jaynes, E. T.}, + Booktitle = {Foundations of Probability Theory, Statistical Inference, and Statistical Theories of Science, {V}ol. {II}}, + Editor = {Harper, W. L. and Hooker, C. A.}, + Pages = {175--257}, + Publisher = {{D}. {R}eidel Publishing Company}, + Title = {Confidence Intervals vs {B}ayesian Intervals}, + Year = {1976}} + +@book{Jaynes2003, + Address = {Cambridge, UK}, + Author = {Jaynes, E. T.}, + Publisher = {Cambridge University Press}, + Title = {Probability Theory: The Logic of Science}, + Year = {2003}} + +@article{Jaynes1968, + Author = {Jaynes, E. T.}, + Journal = {{IEEE} Transactions on Systems Science and Cybernetics}, + Pages = {227--241}, + Title = {Prior Probabilities}, + Volume = {4}, + Year = {1968}} + +@article{Jazbec2006, + Author = {Jazbec, S. and Hardin, M. G. and Schroth, E. and McClure, E. and Pine, D. S. and Ernst, M.}, + Journal = {Exp Brain Res}, + Month = {Oct}, + Pages = {754--762}, + Title = {{{A}ge-related influence of contingencies on a saccade task}}, + Volume = {174}, + Year = {2006}} + +@article{Jazbec2005, + Author = {Jazbec, S. and McClure, E. and Hardin, M. and Pine, D. S. and Ernst, M.}, + Journal = {Biol. Psychiatry}, + Month = {Oct}, + Pages = {632--639}, + Title = {{{C}ognitive control under contingencies in anxious and depressed adolescents: an antisaccade task}}, + Volume = {58}, + Year = {2005}} + +@book{Jeffreys1961, + Address = {Oxford, UK}, + Author = {Jeffreys, H.}, + Publisher = {Oxford University Press}, + Title = {Theory of Probability}, + Year = {1961}} + +@article{Jennison1990, + Author = {Jennison, C. and Turnbull, B. W.}, + Journal = {Statistical Science}, + Pages = {299--317}, + Title = {Statistical Approaches to Interim Monitoring of Medical Trials: {A} Review and Commentary}, + Volume = {5}, + Year = {1990}} + +@book{Jensen1998, + Address = {Westport (CT)}, + Author = {Jensen, A. R.}, + Publisher = {Praeger Publishers}, + Title = {The \it{g} Factor}, + Year = {1998}} + +@article{Jensen2008, + Author = {Jensen, J. E. and Daniels, M. and Haws, C. and Bolo, N. R. and Lyoo, I. K. and Yoon, S. J. and Cohen, B. M. and Stoll, A. L. and Rusche, J. R. and Renshaw, P. F.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Jun}, + Pages = {199--206}, + Title = {{{T}riacetyluridine ({T}{A}{U}) decreases depressive symptoms and increases brain p{H} in bipolar patients}}, + Volume = {16}, + Year = {2008}} + +@article{Jentsch2002, + Author = {Jentsch, J. D. and Olausson, P. and Nestler, E. J. and Taylor, J. R.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {111--118}, + Title = {{{S}timulation of protein kinase a activity in the rat amygdala enhances reward-related learning}}, + Volume = {52}, + Year = {2002}} + +@article{Jessup2008, + Author = {Jessup, R.K. and Bishara, A.J. and Busemeyer, J.R.}, + Journal = {Psychological Science}, + Number = {10}, + Pages = {1015--1022}, + Publisher = {Blackwell Publishing Inc}, + Title = {{Feedback Produces Divergence From Prospect Theory in Descriptive Choice}}, + Volume = {19}, + Year = {2008}} + +@article{Jessup2009, + Author = {Jessup, R. K. and O'Doherty, J. P.}, + Journal = {Neuron}, + Month = {Mar}, + Pages = {649--650}, + Title = {{{I}t was nice not seeing you: perceptual learning with rewards in the absence of awareness}}, + Volume = {61}, + Year = {2009}} + +@article{Jiang1999, + Author = {Jiang, G. J. and Knight, J. L.}, + Journal = {Journal of Computational Finance}, + Pages = {1--34}, + Title = {Finite Sample Comparison of Alternative Estimators of {I}t\^{o} diffusion processes -- A {M}onte {C}arlo Study}, + Volume = {2}, + Year = {1999}} + +@article{Jiang1997, + Author = {Jiang, G. J. and Knight, J. L.}, + Journal = {Econometric Theory}, + Pages = {615--645}, + Title = {A Nonparametric Approach to the Estimation of Diffusion Processes, With an Application to a Short--Term Interest Rate Model}, + Volume = {13}, + Year = {1997}} + +@article{Jones2002, + Author = {Jones, A. D. and Cho, R. Y. and Nystrom, L. E. and Cohen, J. D. and Braver, T. S.}, + Journal = {Cognitive, Affective, \& Behavioral Neuroscience}, + Pages = {300--317}, + Title = {A Computational Model of Anterior Cingulate Function in Speeded Response Tasks: {E}ffects of Frequency, Sequence, and Conflict.}, + Volume = {2}, + Year = {2002}} + +@article{Jones2002a, + Author = {Jones, A. D. and Cho, R. Y. and Nystrom, L. E. and Cohen, J. D. and Braver, T. S.}, + Journal = {Cogn Affect Behav Neurosci}, + Month = {Dec}, + Pages = {300--317}, + Title = {{{A} computational model of anterior cingulate function in speeded response tasks: effects of frequency, sequence, and conflict}}, + Volume = {2}, + Year = {2002}} + +@article{Jones2009, + Abstract = {Recently, delay discounting has been argued to be conceptually consistent + with the notion of temporal horizon [Bickel, W.K., Yi, R., Kowal, + B.P., Gatchalian, K.M., 2008. Cigarette smokers discount past and + future rewards symmetrically and more than controls: is discounting + a measure of impulsivity? Drug Alcohol Depend. 96, 256-262]. Temporal + horizon refers to the temporal distance over which behavioral events + or objects can influence behavior. Here we examine the results on + two putative measures of temporal horizon, future time perspective + (FTP) and delay discounting, collected over three separate studies + (n=227), to determine the influence of smoking and gender on temporal + horizon. By comparing the results on these temporal horizon measures + we address our population of interest: women who smoke. One of the + measures of FTP indicates that smoking women have a shorter temporal + horizon than their nonsmoking counterparts. Additionally, the story + completion measures of FTP are positively correlated with delay discounting. + In contrast, results of delay discounting measures showed no difference + between smoking women and nonsmoking women, while results of delay + discounting measures indicated smoking men have a shorter temporal + horizon than non-smoking men. Additionally, the results of the FTP + story completion measure indicated that lower third income earners + had a shortened temporal horizon compared to upper third income earners. + A possible explanation for these results is explored, and the implications + of the modulation of temporal horizon by gender and smoking are discussed.}, + Author = {Bryan A Jones and Reid D Landes and Richard Yi and Warren K Bickel}, + Doi = {10.1016/j.drugalcdep.2009.04.001}, + Institution = {chiatry, Center for Addiction Research, 4301 W. Markham St. \#843, Little Rock, AR 72205-7199, United States.}, + Journal = {Drug Alcohol Depend}, + Language = {eng}, + Medline-Pst = {aheadofprint}, + Month = {May}, + Owner = {Woo-Young Ahn}, + Pii = {S0376-8716(09)00116-1}, + Pmid = {19446407}, + Timestamp = {2009.08.06}, + Title = {Temporal horizon: Modulation by smoking status and gender.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.001}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.001}} + +@article{Jones2009a, + Abstract = {Recently, delay discounting has been argued to be conceptually consistent + with the notion of temporal horizon [Bickel, W.K., Yi, R., Kowal, + B.P., Gatchalian, K.M., 2008. Cigarette smokers discount past and + future rewards symmetrically and more than controls: is discounting + a measure of impulsivity? Drug Alcohol Depend. 96, 256-262]. Temporal + horizon refers to the temporal distance over which behavioral events + or objects can influence behavior. Here we examine the results on + two putative measures of temporal horizon, future time perspective + (FTP) and delay discounting, collected over three separate studies + (n=227), to determine the influence of smoking and gender on temporal + horizon. By comparing the results on these temporal horizon measures + we address our population of interest: women who smoke. One of the + measures of FTP indicates that smoking women have a shorter temporal + horizon than their nonsmoking counterparts. Additionally, the story + completion measures of FTP are positively correlated with delay discounting. + In contrast, results of delay discounting measures showed no difference + between smoking women and nonsmoking women, while results of delay + discounting measures indicated smoking men have a shorter temporal + horizon than non-smoking men. Additionally, the results of the FTP + story completion measure indicated that lower third income earners + had a shortened temporal horizon compared to upper third income earners. + A possible explanation for these results is explored, and the implications + of the modulation of temporal horizon by gender and smoking are discussed.}, + Author = {Bryan A Jones and Reid D Landes and Richard Yi and Warren K Bickel}, + Doi = {10.1016/j.drugalcdep.2009.04.001}, + Institution = {chiatry, Center for Addiction Research, 4301 W. Markham St. \#843, Little Rock, AR 72205-7199, United States.}, + Journal = {Drug Alcohol Depend}, + Language = {eng}, + Medline-Pst = {aheadofprint}, + Month = {May}, + Owner = {Woo-Young Ahn}, + Pii = {S0376-8716(09)00116-1}, + Pmid = {19446407}, + Timestamp = {2009.08.06}, + Title = {Temporal horizon: Modulation by smoking status and gender.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.001}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.001}} + +@article{Juckel2006a, + Author = {Juckel, G. and Schlagenhauf, F. and Koslowski, M. and Filonov, D. and W?stenberg, T. and Villringer, A. and Knutson, B. and Kienast, T. and Gallinat, J. and Wrase, J. and Heinz, A.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Aug}, + Pages = {222--228}, + Title = {{{D}ysfunction of ventral striatal reward prediction in schizophrenic patients treated with typical, not atypical, neuroleptics}}, + Volume = {187}, + Year = {2006}} + +@article{Juckel2006, + Author = {Juckel, G. and Schlagenhauf, F. and Koslowski, M. and W?stenberg, T. and Villringer, A. and Knutson, B. and Wrase, J. and Heinz, A.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {409--416}, + Title = {{{D}ysfunction of ventral striatal reward prediction in schizophrenia}}, + Volume = {29}, + Year = {2006}} + +@article{Judd1998, + Author = {Judd, L. L. and Akiskal, H. S. and Maser, J. D. and Zeller, P. J. and Endicott, J. and Coryell, W. and Paulus, M. P. and Kunovac, J. L. and Leon, A. C. and Mueller, T. I. and Rice, J. A. and Keller, M. B.}, + Journal = {J Affect Disord}, + Month = {Sep}, + Pages = {97--108}, + Title = {{{M}ajor depressive disorder: a prospective study of residual subthreshold depressive symptoms as predictor of rapid relapse}}, + Volume = {50}, + Year = {1998}} + +@article{Judd1998b, + Author = {Judd, L. L. and Akiskal, H. S. and Maser, J. D. and Zeller, P. J. and Endicott, J. and Coryell, W. and Paulus, M. P. and Kunovac, J. L. and Leon, A. C. and Mueller, T. I. and Rice, J. A. and Keller, M. B.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Aug}, + Pages = {694--700}, + Title = {{{A} prospective 12-year study of subsyndromal and syndromal depressive symptoms in unipolar major depressive disorders}}, + Volume = {55}, + Year = {1998}} + +@article{Judd1997, + Author = {Judd, L. L. and Akiskal, H. S. and Paulus, M. P.}, + Journal = {J Affect Disord}, + Month = {Aug}, + Pages = {5--17}, + Title = {{{T}he role and clinical significance of subsyndromal depressive symptoms ({S}{S}{D}) in unipolar major depressive disorder}}, + Volume = {45}, + Year = {1997}} + +@article{Judd1998a, + Author = {Judd, L. L. and Kessler, R. C. and Paulus, M. P. and Zeller, P. V. and Wittchen, H. U. and Kunovac, J. L.}, + Journal = {Acta Psychiatr Scand Suppl}, + Pages = {6--11}, + Title = {{{C}omorbidity as a fundamental feature of generalized anxiety disorders: results from the {N}ational {C}omorbidity {S}tudy ({N}{C}{S})}}, + Volume = {393}, + Year = {1998}} + +@article{Judd1996, + Author = {Judd, L. L. and Paulus, M. P. and Wells, K. B. and Rapaport, M. H.}, + Journal = {Am J Psychiatry}, + Month = {Nov}, + Pages = {1411--1417}, + Title = {{{S}ocioeconomic burden of subsyndromal depressive symptoms and major depression in a sample of the general population}}, + Volume = {153}, + Year = {1996}} + +@article{Judd1999, + Author = {Judd, L. L. and Paulus, M. P. and Zeller, P.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Aug}, + Pages = {764--765}, + Title = {{{T}he role of residual subthreshold depressive symptoms in early episode relapse in unipolar major depressive disorder}}, + Volume = {56}, + Year = {1999}} + +@article{Judd1994, + Author = {Judd, L. L. and Rapaport, M. H. and Paulus, M. P. and Brown, J. L.}, + Journal = {J Clin Psychiatry}, + Month = {Apr}, + Pages = {18--28}, + Title = {{{S}ubsyndromal symptomatic depression: a new mood disorder?}}, + Volume = {55 Suppl}, + Year = {1994}} + +@article{Jung2007, + Author = {Jung, Y.C. and Jang, D.P. and Namkoong, K. and Ku, J. and Kim, J.J. and Park, S. and Cho, Z.H. and Kim, Y.B. and Lee, E.}, + Journal = {NeuroReport}, + Number = {17}, + Pages = {1787}, + Title = {{Shape deformation of the insula in alcoholics: reduction of left-right asymmetry}}, + Volume = {18}, + Year = {2007}} + +@article{Justus2007, + Author = {Justus, A. N. and Finn, P. R.}, + Journal = {Pers Individ Dif}, + Month = {Dec}, + Pages = {2057--2071}, + Title = {{{S}tartle modulation in non-incarcerated men and women with psychopathic traits}}, + Volume = {43}, + Year = {2007}} + +@article{Justus2001, + Author = {Justus, A. N. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Oct}, + Pages = {1457--1466}, + Title = {{{P}300, disinhibited personality, and early-onset alcohol problems}}, + Volume = {25}, + Year = {2001}} + +@article{Justus2000, + Author = {Justus, A. N. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jul}, + Pages = {1028--1035}, + Title = {{{T}he influence of traits of disinhibition on the association between alcohol use and risky sexual behavior}}, + Volume = {24}, + Year = {2000}} + +@article{K?bler2006, + Author = {K?bler, A. and Dixon, V. and Garavan, H.}, + Journal = {J Cogn Neurosci}, + Month = {Aug}, + Pages = {1331--1342}, + Title = {{{A}utomaticity and reestablishment of executive control-an f{M}{R}{I} study}}, + Volume = {18}, + Year = {2006}} + +@article{K?li2004, + Author = {K?li, S. and Dayan, P.}, + Journal = {Nat. Neurosci.}, + Month = {Mar}, + Pages = {286--294}, + Title = {{{O}ff-line replay maintains declarative memories in a model of hippocampal-neocortical interactions}}, + Volume = {7}, + Year = {2004}} + +@article{Kadane1996, + Author = {Kadane, J. B. and Schervish, M. J. and Seidenfeld, T.}, + Journal = {Journal of the American Statistical Association}, + Pages = {1228--1235}, + Title = {Reasoning to a Foregone Conclusion}, + Volume = {91}, + Year = {1996}} + +@article{Kadane1996a, + Author = {Kadane, J. B. and Schervish, M. J. and Seidenfeld, T.}, + Journal = {Philosophy of Science}, + Pages = {S281--S289}, + Title = {When Several {B}ayesians Agree That There Will Be No Reasoning to a Foregone Conclusion}, + Volume = {63}, + Year = {1996}} + +@article{Kadane2002, + Author = {Kadane, J. B. and Vlachos, P. K.}, + Journal = {Statistics and Computing}, + Pages = {147?152}, + Title = {Hybrid Methods for Calculating Optimal Few--Stage Sequential Strategies: {D}ata Monitoring for a Clinical Trial}, + Volume = {12}, + Year = {2002}} + +@article{Kaelbling1996, + Author = {Kaelbling, L. P. and Littman, M. L. and Moore, A. W.}, + Journal = {Journal of Artificial Intelligence Research}, + Pages = {237--285}, + Title = {Reinforcement Learning: {A} Survey}, + Volume = {4}, + Year = {1996}} + +@article{Kahn2002, + Author = {Kahn, I. and Yeshurun, Y. and Rotshtein, P. and Fried, I. and Ben-Bashat, D. and Hendler, T.}, + Journal = {Neuron}, + Number = {6}, + Pages = {983--994}, + Publisher = {Elsevier}, + Title = {{The role of the amygdala in signaling prospective outcome of choice}}, + Volume = {33}, + Year = {2002}} + +@article{Kahn2009, + Author = {Kahn, R. and Biswas, K. and Childress, A. R. and Shoptaw, S. and Fudala, P. J. and Gorgon, L. and Montoya, I. and Collins, J. and McSherry, F. and Li, S. H. and Chiang, N. and Alathari, H. and Watson, D. and Liberto, J. and Beresford, T. and Stock, C. and Wallace, C. and Gruber, V. and Elkashef, A.}, + Journal = {Drug Alcohol Depend}, + Month = {Jul}, + Pages = {59--64}, + Title = {{{M}ulti-center trial of baclofen for abstinence initiation in severe cocaine-dependent individuals}}, + Volume = {103}, + Year = {2009}} + +@article{Kahneman1979, + Author = {Kahneman, D. and Tversky, A.}, + Journal = {Econometrica: Journal of the Econometric Society}, + Pages = {263--291}, + Publisher = {The Econometric Society}, + Title = {{Prospect theory: An analysis of decision under risk}}, + Year = {1979}} + +@article{Kahneman1997, + Author = {Kahneman, D. and Wakker, P.P. and Sarin, R.}, + Journal = {The Quarterly Journal of Economics}, + Number = {2}, + Pages = {375--405}, + Publisher = {MIT Press}, + Title = {{Back to Bentham? Explorations of Experienced Utility*}}, + Volume = {112}, + Year = {1997}} + +@article{Kakade2002, + Author = {Kakade, S. and Dayan, P.}, + Journal = {Neural Netw}, + Pages = {549--559}, + Title = {{{D}opamine: generalization and bonuses}}, + Volume = {15}, + Year = {2002}} + +@article{Kakade2002a, + Author = {Kakade, S. and Dayan, P.}, + Journal = {Psychol Rev}, + Month = {Jul}, + Pages = {533--544}, + Title = {{{A}cquisition and extinction in autoshaping}}, + Volume = {109}, + Year = {2002}} + +@article{Kalenscher2005, + Abstract = {BACKGROUND: Animals prefer small over large rewards when the delays + preceding large rewards exceed an individual tolerance limit. Such + impulsive choice behavior occurs even in situations in which alternative + strategies would yield more optimal outcomes. Behavioral research + has shown that an animal's choice is guided by the alternative rewards' + subjective values, which are a function of reward amount and time-to-reward. + Despite increasing knowledge about the pharmacology and anatomy underlying + impulsivity, it is still unknown how the brain combines reward amount + and time-to-reward information to represent subjective reward value. + RESULTS: We trained pigeons to choose between small, immediate rewards + and large rewards delivered after gradually increasing delays. Single-cell + recordings in the avian Nidopallium caudolaterale, the presumed functional + analog of the mammalian prefrontal cortex, revealed that neural delay + activation decreased with increasing delay length but also covaried + with the expected reward amount. This integrated neural response + was modulated by reward amount and delay, as predicted by a hyperbolical + equation, of subjective reward value derived from behavioral studies. + Furthermore, the neural activation pattern reflected the current + reward preference and the time point of the shift from large to small + rewards. CONCLUSIONS: The reported activity was modulated by the + temporal devaluation of the anticipated reward in addition to reward + amount. Our findings contribute to the understanding of neuropathologies + such as drug addiction, pathological gambling, frontal lobe syndrome, + and attention-deficit disorders, which are characterized by inappropriate + temporal discounting and increased impulsiveness.}, + Doi = {10.1016/j.cub.2005.02.052}, + Journal = {Curr Biol}, + Keywords = {Animals; Choice Behavior, physiology; Columbidae, physiology; Electrophysiology; Impulsive Behavior; Neurons, physiology; Prefrontal Cortex, physiology; Reward; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {7}, + Owner = {Woo-Young Ahn}, + Pages = {594--602}, + Pii = {S0960-9822(05)00224-1}, + Pmid = {15823531}, + Timestamp = {2009.08.06}, + Title = {Single units in the pigeon brain integrate reward amount and time-to-reward in an impulsive choice task.}, + Url = {http://dx.doi.org/10.1016/j.cub.2005.02.052}, + Volume = {15}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.cub.2005.02.052}} + +@article{Kalenscher2005a, + Abstract = {BACKGROUND: Animals prefer small over large rewards when the delays + preceding large rewards exceed an individual tolerance limit. Such + impulsive choice behavior occurs even in situations in which alternative + strategies would yield more optimal outcomes. Behavioral research + has shown that an animal's choice is guided by the alternative rewards' + subjective values, which are a function of reward amount and time-to-reward. + Despite increasing knowledge about the pharmacology and anatomy underlying + impulsivity, it is still unknown how the brain combines reward amount + and time-to-reward information to represent subjective reward value. + RESULTS: We trained pigeons to choose between small, immediate rewards + and large rewards delivered after gradually increasing delays. Single-cell + recordings in the avian Nidopallium caudolaterale, the presumed functional + analog of the mammalian prefrontal cortex, revealed that neural delay + activation decreased with increasing delay length but also covaried + with the expected reward amount. This integrated neural response + was modulated by reward amount and delay, as predicted by a hyperbolical + equation, of subjective reward value derived from behavioral studies. + Furthermore, the neural activation pattern reflected the current + reward preference and the time point of the shift from large to small + rewards. CONCLUSIONS: The reported activity was modulated by the + temporal devaluation of the anticipated reward in addition to reward + amount. Our findings contribute to the understanding of neuropathologies + such as drug addiction, pathological gambling, frontal lobe syndrome, + and attention-deficit disorders, which are characterized by inappropriate + temporal discounting and increased impulsiveness.}, + Doi = {10.1016/j.cub.2005.02.052}, + Journal = {Curr Biol}, + Keywords = {Animals; Choice Behavior, physiology; Columbidae, physiology; Electrophysiology; Impulsive Behavior; Neurons, physiology; Prefrontal Cortex, physiology; Reward; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {7}, + Owner = {Woo-Young Ahn}, + Pages = {594--602}, + Pii = {S0960-9822(05)00224-1}, + Pmid = {15823531}, + Timestamp = {2009.08.06}, + Title = {Single units in the pigeon brain integrate reward amount and time-to-reward in an impulsive choice task.}, + Url = {http://dx.doi.org/10.1016/j.cub.2005.02.052}, + Volume = {15}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.cub.2005.02.052}} + +@article{Kalisch2005, + Author = {Kalisch, R. and Wiech, K. and Critchley, H. D. and Seymour, B. and O'Doherty, J. P. and Oakley, D. A. and Allen, P. and Dolan, R. J.}, + Journal = {J Cogn Neurosci}, + Month = {Jun}, + Pages = {874--883}, + Title = {{{A}nxiety reduction through detachment: subjective, physiological, and neural effects}}, + Volume = {17}, + Year = {2005}} + +@article{Kalivas2005, + Author = {Kalivas, P. W. and Volkow, N. D.}, + Journal = {Am J Psychiatry}, + Month = {Aug}, + Pages = {1403--1413}, + Title = {{{T}he neural basis of addiction: a pathology of motivation and choice}}, + Volume = {162}, + Year = {2005}} + +@article{Kanegaye2009, + Author = {Kanegaye, J. T. and Nigrovic, L. E. and Malley, R. and Cannavino, C. R. and Schwab, S. H. and Bennett, J. E. and Mohseni, M. M. and Wang, V. J. and Katsogridakis, Y. L. and Herman, M. I. and Kuppermann, N. and Agrawal, D. and Bandyopadhyay, S. and Bonsu, B. and Bulloch, B. and Chapman, J. and Dayan, P. and Ishimine, P. and Johnston, P. and Kaplan, R. and Leake, J. and Macias, C. G. and Mansour, K. and McCaslin, R. I. and Moro-Sutherland, D. and Riffenburgh, R. H. and Schremmer, R. and Steele, D. and Truong, U.}, + Journal = {Pediatrics}, + Month = {Jun}, + Pages = {e967--971}, + Title = {{{D}iagnostic value of immature neutrophils (bands) in the cerebrospinal fluid of children with cerebrospinal fluid pleocytosis}}, + Volume = {123}, + Year = {2009}} + +@article{Kaplan1993, + Author = {Kaplan, L. M. and Kuo, C.--C. J.}, + Journal = {IEEE Transactions on Signal Processing}, + Pages = {3554--3562}, + Title = {Fractal Estimation from Noisy Data via Discrete Fractional Gaussian Noise ({DFGN}) and the {H}aar Basis}, + Volume = {41}, + Year = {1993}} + +@article{Karabatsos2006, + Author = {Karabatsos, G.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {123--148}, + Title = {Bayesian Nonparametric Model Selection and Model Testing}, + Volume = {50}, + Year = {2006}} + +@article{Kass1993, + Author = {Kass, R. E.}, + Journal = {The Statistician}, + Pages = {551--560}, + Title = {{B}ayes Factors in Practice}, + Volume = {42}, + Year = {1993}} + +@article{Kass1982, + Author = {Kass, R. E.}, + Journal = {Journal of the American Statistical Association}, + Pages = {347--349}, + Title = {Comment on ``{L}indley's Paradox" by Glenn Shafer}, + Volume = {77}, + Year = {1982}} + +@article{Kass1995, + Author = {Kass, R. E. and Raftery, A. E.}, + Journal = {Journal of the American Statistical Association}, + Pages = {377--395}, + Title = {{B}ayes Factors}, + Volume = {90}, + Year = {1995}} + +@article{Kass1996, + Author = {Kass, R. E. and Wasserman, L.}, + Journal = {Journal of the American Statistical Association}, + Pages = {1343--1370}, + Title = {The Selection of Prior Distributions by Formal Rules}, + Volume = {91}, + Year = {1996}} + +@article{Kass1995a, + Author = {Kass, R. E. and Wasserman, L.}, + Journal = {Journal of the American Statistical Association}, + Pages = {928--934}, + Title = {A Reference {B}ayesian test for Nested Hypotheses and Its Relationship to the {S}chwarz Criterion}, + Volume = {90}, + Year = {1995}} + +@article{Kaufman1990, + Author = {Kaufman, E. E. and Porrino, L. J. and Nelson, T.}, + Journal = {Biochem. Pharmacol.}, + Month = {Dec}, + Pages = {2637--2640}, + Title = {{{P}yretic action of low doses of gamma-hydroxybutyrate in rats}}, + Volume = {40}, + Year = {1990}} + +@article{Kaufman2003, + Author = {Kaufman, J. N. and Ross, T. J. and Stein, E. A. and Garavan, H.}, + Journal = {J. Neurosci.}, + Month = {Aug}, + Pages = {7839--7843}, + Title = {{{C}ingulate hypoactivity in cocaine users during a {G}{O}-{N}{O}{G}{O} task as revealed by event-related functional magnetic resonance imaging}}, + Volume = {23}, + Year = {2003}} + +@article{Keep2006, + Author = {Keep, M. F. and Mastrofrancesco, L. and Craig, A. D. and Ashby, L. S.}, + Journal = {J. Neurosurg.}, + Month = {Dec}, + Pages = {222--228}, + Title = {{{G}amma {K}nife surgery targeting the centromedian nucleus of the thalamus for the palliative management of thalamic pain: durable response in stroke-induced thalamic pain syndrome}}, + Volume = {105 Suppl}, + Year = {2006}} + +@article{Kelly2005, + Author = {Kelly, A. M. and Garavan, H.}, + Journal = {Cereb. Cortex}, + Month = {Aug}, + Pages = {1089--1102}, + Title = {{{H}uman functional neuroimaging of brain changes associated with practice}}, + Volume = {15}, + Year = {2005}} + +@article{Kelly2006a, + Author = {Kelly, A. M. and Hester, R. and Foxe, J. J. and Shpaner, M. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Jun}, + Pages = {866--886}, + Title = {{{F}lexible cognitive control: effects of individual differences and brief practice on a complex cognitive task}}, + Volume = {31}, + Year = {2006}} + +@article{Kelly2004, + Author = {Kelly, A. M. and Hester, R. and Murphy, K. and Javitt, D. C. and Foxe, J. J. and Garavan, H.}, + Journal = {Eur. J. Neurosci.}, + Month = {Jun}, + Pages = {3105--3112}, + Title = {{{P}refrontal-subcortical dissociations underlying inhibitory control revealed by event-related f{M}{R}{I}}}, + Volume = {19}, + Year = {2004}} + +@article{Kelly2006, + Author = {Kelly, C. and Foxe, J. J. and Garavan, H.}, + Journal = {Arch Phys Med Rehabil}, + Month = {Dec}, + Pages = {S20--29}, + Title = {{{P}atterns of normal human brain plasticity after practice and their implications for neurorehabilitation}}, + Volume = {87}, + Year = {2006}} + +@article{Kelz1999, + Author = {Kelz, M. B. and Chen, J. and Carlezon, W. A. and Whisler, K. and Gilden, L. and Beckmann, A. M. and Steffen, C. and Zhang, Y. J. and Marotti, L. and Self, D. W. and Tkatch, T. and Baranauskas, G. and Surmeier, D. J. and Neve, R. L. and Duman, R. S. and Picciotto, M. R. and Nestler, E. J.}, + Journal = {Nature}, + Month = {Sep}, + Pages = {272--276}, + Title = {{{E}xpression of the transcription factor delta{F}os{B} in the brain controls sensitivity to cocaine}}, + Volume = {401}, + Year = {1999}} + +@article{Kelz2000, + Author = {Kelz, M. B. and Nestler, E. J.}, + Journal = {Curr. Opin. Neurol.}, + Month = {Dec}, + Pages = {715--720}, + Title = {{delta{F}os{B}: a molecular switch underlying long-term neural plasticity}}, + Volume = {13}, + Year = {2000}} + +@article{Kerridge1963, + Author = {Kerridge, D.}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {1109--1110}, + Title = {Bounds for the Frequency of Misleading {B}ayes Inferences}, + Volume = {34}, + Year = {1963}} + +@article{Kesler2003, + Abstract = {OBJECTIVES: The purpose of this study was to determine the pattern + of mediastinal dissemination of nonseminomatous germ cell tumors + of testicular origin and evaluate variables that may influence survival + with mediastinal dissection in patients with metastatic nonseminomatous + germ cell tumors. METHODS: From 1981 to 2000, a total of 421 patients + were seen at our institution for extirpation of residual lung or + mediastinal disease after cisplatin-based chemotherapy for metastatic + testicular nonseminomatous germ cell tumors. We reviewed 268 of these + patients, with a mean age of 26.8 years, who required at least one + surgical procedure to remove residual mediastinal disease. Pathologic + types of resected residual mediastinal disease were necrosis (15\%), + teratoma (59\%), persistent nonseminomatous germ cell cancer (15\%), + and non-germ cell carcinomatous degeneration (11\%). Twelve variables + were evaluated by univariate analyses, and four variables potentially + statistically significant at P <.10 were subsequently entered into + a Cox regression model. RESULTS: All patients demonstrated metastases + to the visceral mediastinum. Fewer patients also demonstrated metastases + to the paravertebral sulcus or anterior compartments (16\% and 7\%, + respectively). Overall 5- and 10-year survivals were 86\% +/- 2\% + and 74\% +/- 4\%, respectively. According to multivariate analysis, + disease-related survival was negatively influenced by an elevated + preoperative beta-human chorionic gonadotropin level (P =.028) and + adverse pathologic characteristics of residual mediastinal disease + (P =.006). CONCLUSIONS: Testicular nonseminomatous germ cell tumors + follow a predictable pattern of mediastinal dissemination, primarily + following the course of the thoracic duct and its major tributaries. + Patients who require surgery to remove residual mediastinal disease + after cisplatin-based chemotherapy for metastatic nonseminomatous + germ cell tumors have good to excellent long-term survivals. These + results justify an aggressive surgical approach, including multiple + surgical procedures if clinically indicated.}, + Author = {Kenneth A Kesler and Jo Ann Brooks and Karen M Rieger and Naomi S Fineberg and Lawrence H Einhorn and John W Brown}, + Doi = {10.1067/mtc.2003.407}, + Institution = {Department of Surgery, Indiana University School of Medicine, Indianapolis, Ind, USA. kkesler@iupui.edu}, + Journal = {J Thorac Cardiovasc Surg}, + Keywords = {Adolescent; Adult; Child; Follow-Up Studies; Germinoma, mortality/secondary/surgery; Humans; Male; Mediastinal Neoplasms, mortality/secondary/surgery; Middle Aged; Postoperative Complications, epidemiology; Prognosis; Retrospective Studies; Survival Rate; Testicular Neoplasms, pathology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {913--923}, + Pii = {S0022522303000990}, + Pmid = {12698156}, + Timestamp = {2009.08.04}, + Title = {Mediastinal metastases from testicular nonseminomatous germ cell tumors: patterns of dissemination and predictors of long-term survival with surgery.}, + Url = {http://dx.doi.org/10.1067/mtc.2003.407}, + Volume = {125}, + Year = {2003}, + Bdsk-Url-1 = {http://dx.doi.org/10.1067/mtc.2003.407}} + +@article{Kesler2000, + Abstract = {Severe hyperhidrosis palmaris represents a disabling problem for many + patients. Thoracoscopic techniques that involve dissection and removal + of the upper thoracic sympathetic chain are believed to result in + the lowest incidence of recurrent symptoms. However, aside from an + axillary incision, an additional upper anterior chest wall approach + is usually required. Over the past 2 years, we have used a periareolar + incision in eight patients to improve postoperative cosmesis for + this benign condition.}, + Author = {K. A. Kesler and J. A. Brooks-Brunn and R. L. Campbell and J. W. Brown}, + Institution = {Department of Surgery, Indiana University School of Medicine, Indianapolis 46202, USA. kkesler@iupui.edu}, + Journal = {Ann Thorac Surg}, + Keywords = {Adult; Female; Follow-Up Studies; Hand; Humans; Hyperhidrosis, therapy; Male; Nipples; Sympathectomy, methods; Thoracoscopy}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {314--317}, + Pii = {S0003497500014491}, + Pmid = {10921742}, + Timestamp = {2009.08.04}, + Title = {Thoracoscopic sympathectomy for hyperhidrosis palmaris: a periareolar approach.}, + Volume = {70}, + Year = {2000}} + +@article{Kesler1992, + Abstract = {To determine the efficacy of ventricular closure techniques, we reviewed + our experience with 62 patients who survived the repair of aneurysms + of the anterior wall of the left ventricular from 1984 through 1989. + Forty of these patients underwent aneurysm repair by standard linear + closure and 22 by a circular closure technique. After a mean follow-up + interval of 3 years, there were no demonstrable differences in angina + class, New York Heart Association functional classification, or survival. + In 41 surviving patients, postoperative left ventricular dimensions + and function were satisfactorily evaluated by standard echocardiographic + measurements. No significant differences were found in postoperative + long-axis left ventricular systolic diameter or in short-axis systolic + or diastolic areas. There was a significantly larger long-axis diastolic + diameter in the circular closure group; however, there was no difference + in this parameter when the ratios of postoperative to preoperative + lengths were compared. Further intragroup comparisons demonstrated + an increase in short-axis areas postoperatively within the circular + closure group in contrast to a decrease in patients in the linear + closure group; these changes were not statistically significant. + There was no significant difference in postoperative ejection fraction + between the two closure groups, although minor reductions were found + in the circular closure group. These data demonstrate no significant + difference between the linear and circular closure techniques with + respect to standard echocardiographic parameters, functional classification, + and survival.}, + Author = {K. A. Kesler and A. C. Fiore and K. S. Naunheim and T. G. Sharp and Y. Mahomed and T. W. Zollinger and S. G. Sawada and J. W. Brown and A. J. Labovitz and H. B. Barner}, + Institution = {Department of Surgery, Indiana University, Indianapolis 46202.}, + Journal = {J Thorac Cardiovasc Surg}, + Keywords = {Actuarial Analysis; Echocardiography; Female; Follow-Up Studies; Heart Aneurysm, mortality/surgery/ultrasonography; Humans; Male; Postoperative Complications, epidemiology; Retrospective Studies; Risk Factors; Suture Techniques; Time Factors; Ventricular Function, Left, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {841--7; discussion 847-8}, + Pmid = {1569764}, + Timestamp = {2009.08.04}, + Title = {Anterior wall left ventricular aneurysm repair. A comparison of linear versus circular closure.}, + Volume = {103}, + Year = {1992}} + +@article{Kesler1990, + Abstract = {The use of the internal mammary artery (IMA) as a coronary artery + bypass graft conduit has recently been expanded to include sequential + bypass grafting of multiple vessels. This has the theoretical advantage + of allowing a greater percentage of myocardium to be revascularized + with a conduit that has superior long-term patency rates. This article + reviews technical considerations including maximizing IMA graft length + and diameter, avoidance of an acute mediastinal or epicardial course, + as well as anastomotic techniques for optimizing results of sequential + IMA bypass grafting to the left anterior descending coronary artery + system.}, + Author = {K. A. Kesler and T. G. Sharp and M. W. Turrentine and J. W. Brown}, + Institution = {Indiana University School of Medicine, Department of Surgery, Indianapolis 46202-5125.}, + Journal = {J Card Surg}, + Keywords = {Anastomosis, Surgical, methods; Coronary Vessels, surgery; Humans; Internal Mammary-Coronary Artery Anastomosis, methods; Mammary Arteries, surgery; Mediastinum, surgery; Pleura, surgery}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {134--144}, + Pmid = {2133831}, + Timestamp = {2009.08.04}, + Title = {Technical considerations and early results of sequential left internal mammary artery bypass grafting to the left anterior descending coronary artery system.}, + Volume = {5}, + Year = {1990}} + +@article{Kester2006, + Author = {Kester, Hana M. and Sevy, Serge and Yechiam, Eldad and Burdick, katherine E. and Cervellione, Kelly L. and Kumra, Sanjiv}, + Journal = {Schizophrenia Research}, + Owner = {WooYoung Ahn}, + Pages = {113-123}, + Timestamp = {2008.01.07}, + Title = {Decision-making impairments in adolescents with early-onset schizophrenia}, + Volume = {85}, + Year = {2006}} + +@article{Khan2002, + Author = {Khan, A. N. and Dayan, P. S. and Miller, S. and Rosen, M. and Rubin, D. H.}, + Journal = {Pediatr Emerg Care}, + Month = {Jun}, + Pages = {171--173}, + Title = {{{C}osmetic outcome of scalp wound closure with staples in the pediatric emergency department: a prospective, randomized trial}}, + Volume = {18}, + Year = {2002}} + +@article{Kiani2008a, + Author = {Kiani, R. and Hanks, T. D. and Shadlen, M. N.}, + Journal = {J. Neurosci.}, + Month = {Mar}, + Pages = {3017--3029}, + Title = {{{B}ounded integration in parietal cortex underlies decisions even when viewing duration is dictated by the environment}}, + Volume = {28}, + Year = {2008}} + +@article{Kiani2006a, + Author = {Kiani, R. and Hanks, T. D. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {Jul}, + Pages = {861--863}, + Title = {{{W}hen is enough enough?}}, + Volume = {9}, + Year = {2006}} + +@article{Kiani2009a, + Author = {Kiani, R. and Shadlen, M. N.}, + Journal = {Science}, + Month = {May}, + Pages = {759--764}, + Title = {{{R}epresentation of confidence associated with a decision by neurons in the parietal cortex}}, + Volume = {324}, + Year = {2009}} + +@article{Killeen2006, + Author = {Killeen, P. R.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {549--562}, + Title = {Beyond Statistical Inference: {A} Decision Theory for Science}, + Volume = {13}, + Year = {2006}} + +@article{Killeen2005, + Author = {Killeen, P. R.}, + Journal = {Psychological Science}, + Pages = {345--353}, + Title = {An Alternative to Null--Hypothesis Significance Tests}, + Volume = {16}, + Year = {2005}} + +@article{Killeen2005a, + Author = {Killeen, P. R.}, + Journal = {Psychological Science}, + Pages = {1009--1012}, + Title = {Replicability, Confidence, and Priors}, + Volume = {16}, + Year = {2005}} + +@article{Kim2007c, + Author = {Kim, D. J. and Lyoo, I. K. and Yoon, S. J. and Choi, T. and Lee, B. and Kim, J. E. and Lee, J. S. and Renshaw, P. F.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Aug}, + Pages = {1182--1188}, + Title = {{{C}linical response of quetiapine in rapid cycling manic bipolar patients and lactate level changes in proton magnetic resonance spectroscopy}}, + Volume = {31}, + Year = {2007}} + +@article{Kim2007, + Author = {Kim, H. and Adolphs, R. and O'Doherty, J. P. and Shimojo, S.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Nov}, + Pages = {18253--18258}, + Title = {{{T}emporal isolation of neural processes underlying face preference decisions}}, + Volume = {104}, + Year = {2007}} + +@article{Kim2006, + Author = {Kim, Hackjin and Shimojo, Shinsuke and O'Doherty, John P.}, + Journal = {PLOS Biology}, + Owner = {WooYoung Ahn}, + Pages = {1453-1461}, + Timestamp = {2007.12.12}, + Title = {Is avoiding an aversive outcome rewarding? Neural substrates of avoidance learning in the human brain}, + Volume = {4 (8)}, + Year = {2006}} + +@article{Kim2006a, + Author = {Kim, H. and Shimojo, S. and O'Doherty, J. P.}, + Journal = {PLoS Biol.}, + Month = {Jul}, + Pages = {e233}, + Title = {{{I}s avoiding an aversive outcome rewarding? {N}eural substrates of avoidance learning in the human brain}}, + Volume = {4}, + Year = {2006}} + +@article{Kim2009a, + Author = {Jungsu Kim and Joseph M. Castellano and Hong Jiang and Jacob M. Basak and Maia Parsadanian and Vi Pham and Stephanie M. Mason and Steven M. Paul and David M. Holtzman}, + Journal = {Neuron}, + Owner = {Young}, + Pages = {632-644}, + Timestamp = {2009.12.10}, + Title = {Overexpression of Low-Density Lipoprotein Receptor in the Brain Markedly Inhibits Amyloid Deposition and Increases Extracellular ABeta Clearance}, + Volume = {64}, + Year = {2009}} + +@article{Kim1996, + Abstract = {Behavioral stress has detrimental effects on subsequent cognitive + performance in many species, including humans. For example, humans + exposed to stressful situations typically exhibit marked deficits + in various learning and memory tasks. However, the underlying neural + mechanisms by which stress exerts its effects on learning and memory + are unknown. We now report that in adult male rats, stress (i.e., + restraint plus tailshock) impairs long-term potentiation (LTP) but + enhances long-term depression (LTD) in the CA1 area of the hippocampus, + a structure implicated in learning and memory processes. These effects + on LTP and LTD are prevented when the animals were given CGP39551 + (the carboxyethylester of CGP 37849; DL-(E)-2-amino-4-methyl-5-phosphono-3-pentenoic + acid), a competitive N-methyl-D-aspartate (NMDA) receptor antagonist, + before experiencing stress. In contrast, the anxiolytic drug diazepam + did not block the stress effects on hippocampal plasticity. Thus, + the effects of stress on subsequent LTP and LTD appear to be mediated + through the activation of the NMDA subtype of glutamate receptors. + Such modifications in hippocampal plasticity may contribute to learning + and memory impairments associated with stress.}, + Author = {J. J. Kim and M. R. Foy and R. F. Thompson}, + Institution = {Neurosciences Program, University of Southern California, Los Angeles, 90089-2520, USA.}, + Journal = {Proc Natl Acad Sci U S A}, + Keywords = {2-Amino-5-phosphonovalerate, analogs /&/ derivatives/pharmacology; Animals; Behavior, Animal, physiology; Excitatory Amino Acid Antagonists, pharmacology; Hippocampus, physiology; Humans; Infant, Newborn; Learning, physiology; Long-Term Potentiation, physiology; Male; Memory, physiology; Neuronal Plasticity, physiology; Rats; Receptors, N-Methyl-D-Aspartate, antagonists /&/ inhibitors/physiology; Stress, Physiological, physiopathology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {10}, + Owner = {Young}, + Pages = {4750--4753}, + Pmid = {8643474}, + Timestamp = {2009.12.10}, + Title = {Behavioral stress modifies hippocampal plasticity through N-methyl-D-aspartate receptor activation.}, + Volume = {93}, + Year = {1996}} + +@article{Kim1999a, + Author = {Kim, J. N. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {Feb}, + Pages = {176--185}, + Title = {{{N}eural correlates of a decision in the dorsolateral prefrontal cortex of the macaque}}, + Volume = {2}, + Year = {1999}} + +@article{Kim2008a, + Author = {Kim, M. J. and Chey, J. and Chung, A. and Bae, S. and Khang, H. and Ham, B. and Yoon, S. J. and Jeong, D. U. and Lyoo, I. K.}, + Journal = {J Psychiatr Res}, + Month = {Mar}, + Pages = {268--277}, + Title = {{{D}iminished rostral anterior cingulate activity in response to threat-related events in posttraumatic stress disorder}}, + Volume = {42}, + Year = {2008}} + +@article{Kim2007b, + Author = {Kim, M. J. and Lyoo, I. K. and Dager, S. R. and Friedman, S. D. and Chey, J. and Hwang, J. and Lee, Y. J. and Dunner, D. L. and Renshaw, P. F.}, + Journal = {Bipolar Disord}, + Month = {May}, + Pages = {274--280}, + Title = {{{T}he occurrence of cavum septi pellucidi enlargement is increased in bipolar disorder patients}}, + Volume = {9}, + Year = {2007}} + +@article{Kim2005a, + Author = {Kim, M. J. and Lyoo, I. K. and Kim, S. J. and Sim, M. and Kim, N. and Choi, N. and Jeong, D. U. and Covell, J. and Renshaw, P. F.}, + Journal = {Neuroreport}, + Month = {Jul}, + Pages = {1049--1053}, + Title = {{{D}isrupted white matter tract integrity of anterior cingulate in trauma survivors}}, + Volume = {16}, + Year = {2005}} + +@article{Kim2006d, + Author = {Kim, S. J. and Jeong, D. U. and Sim, M. E. and Bae, S. C. and Chung, A. and Kim, M. J. and Chang, K. H. and Ryu, J. and Renshaw, P. F. and Lyoo, I. K.}, + Journal = {Neuropsychobiology}, + Pages = {120--125}, + Title = {{{A}symmetrically altered integrity of cingulum bundle in posttraumatic stress disorder}}, + Volume = {54}, + Year = {2006}} + +@article{Kim2006c, + Author = {Kim, S. J. and Lee, S. J. and Yune, S. K. and Sung, Y. H. and Bae, S. C. and Chung, A. and Kim, J. and Lyoo, I. K.}, + Journal = {Psychopathology}, + Pages = {80--86}, + Title = {{{T}he relationship between the biogenetic temperament and character and psychopathology in adolescents}}, + Volume = {39}, + Year = {2006}} + +@article{Kim2006b, + Author = {Kim, S. J. and Lyoo, I. K. and Hwang, J. and Chung, A. and Hoon Sung, Y. and Kim, J. and Kwon, D. H. and Chang, K. H. and Renshaw, P. F.}, + Journal = {Int. J. Neuropsychopharmacol.}, + Month = {Apr}, + Pages = {221--228}, + Title = {{{P}refrontal grey-matter changes in short-term and long-term abstinent methamphetamine abusers}}, + Volume = {9}, + Year = {2006}} + +@article{Kim2005, + Author = {Kim, S. J. and Lyoo, I. K. and Hwang, J. and Sung, Y. H. and Lee, H. Y. and Lee, D. S. and Jeong, D. U. and Renshaw, P. F.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {1383--1391}, + Title = {{{F}rontal glucose hypometabolism in abstinent methamphetamine users}}, + Volume = {30}, + Year = {2005}} + +@article{Kim2007a, + Author = {Kim, S. J. and Lyoo, I. K. and Lee, Y. S. and Kim, J. and Sim, M. E. and Bae, S. J. and Kim, H. J. and Lee, J. Y. and Jeong, D. U.}, + Journal = {Acta Psychiatr Scand}, + Month = {Aug}, + Pages = {145--153}, + Title = {{{D}ecreased cerebral blood flow of thalamus in {P}{T}{S}{D} patients as a strategy to reduce re-experience symptoms}}, + Volume = {116}, + Year = {2007}} + +@article{Kim2009, + Author = {Kim, S. J. and Lyoo, I. K. and Lee, Y. S. and Lee, J. Y. and Yoon, S. J. and Kim, J. E. and Kim, J. H. and Hong, S. J. and Jeong, D. U.}, + Journal = {Acta Neurol. Scand.}, + Month = {Jan}, + Pages = {61--67}, + Title = {{{G}ray matter deficits in young adults with narcolepsy}}, + Volume = {119}, + Year = {2009}} + +@article{Kim2008, + Author = {Kim, S. J. and Lyoo, I. K. and Lee, Y. S. and Sung, Y. H. and Kim, H. J. and Kim, J. H. and Kim, K. H. and Jeong, D. U.}, + Journal = {Sleep}, + Month = {Mar}, + Pages = {342--347}, + Title = {{{I}ncreased {G}{A}{B}{A} levels in medial prefrontal cortex of young adults with narcolepsy}}, + Volume = {31}, + Year = {2008}} + +@article{Kimes2003, + Author = {Kimes, A. S. and Horti, A. G. and London, E. D. and Chefer, S. I. and Contoreggi, C. and Ernst, M. and Friello, P. and Koren, A. O. and Kurian, V. and Matochik, J. A. and Pavlova, O. and Vaupel, D. B. and Mukhin, A. G.}, + Journal = {FASEB J.}, + Month = {Jul}, + Pages = {1331--1333}, + Title = {{2-[18{F}]{F}-{A}-85380: {P}{E}{T} imaging of brain nicotinic acetylcholine receptors and whole body distribution in humans}}, + Volume = {17}, + Year = {2003}} + +@article{King-Casas2008, + Author = {King-Casas, B. and Sharp, C. and Lomax-Bream, L. and Lohrenz, T. and Fonagy, P. and Montague, P. R.}, + Journal = {Science}, + Month = {Aug}, + Pages = {806--810}, + Title = {{{T}he rupture and repair of cooperation in borderline personality disorder}}, + Volume = {321}, + Year = {2008}} + +@article{King-Casas2005, + Author = {King-Casas, B. and Tomlin, D. and Anen, C. and Camerer, C. F. and Quartz, S. R. and Montague, P. R.}, + Journal = {Science}, + Month = {Apr}, + Pages = {78--83}, + Title = {{{G}etting to know you: reputation and trust in a two-person economic exchange}}, + Volume = {308}, + Year = {2005}} + +@article{Kirby2004, + Abstract = {AIMS: To test a prediction of the discounting model of impulsiveness + that discount rates would be positively associated with addiction. + The delay-discount rate refers to the rate of reduction in the present + value of a future reward as the delay to that reward increases. DESIGN + AND MEASUREMENTS: We estimated participants' discount rates on the + basis of their pattern of choices between smaller immediate rewards + ($11-80) and larger, delayed rewards ($25-85; at delays from 1 week + to 6 months) in a questionnaire format. Participants had a one-in-six + chance of winning a reward that they chose on one randomly selected + trial. PARTICIPANTS AND SETTING: Heroin (n = 27), cocaine (n = 41) + and alcohol (n = 33) abusers and non-drug-using controls (n = 44) + were recruited from advertisements. They were tested in a drug abuse + research clinic at a medical school. FINDINGS: On average, the cocaine + and heroin groups had higher rates than controls (both P < 0.001), + but alcoholics did not (P = 0.44). Abstinence was associated with + lower rates for heroin abusers (P = 0.03), but not for cocaine or + alcohol abusers (both P > 0.50). CONCLUSIONS: These data suggest + that discount rates vary with the preferred drug of abuse, and that + high discount rates should be considered in the development of substance + abuse prevention and treatment efforts.}, + Author = {Kris N Kirby and Nancy M Petry}, + Doi = {10.1111/j.1360-0443.2003.00669.x}, + Institution = {Department of Psychology, Williams College, Williamstown, MA, USA. kkirby@williams.edu}, + Journal = {Addiction}, + Keywords = {Adult; Alcoholism, psychology; Choice Behavior; Cocaine-Related Disorders, psychology; Female; Heroin Dependence, psychology; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Motivation; Questionnaires; Reward; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {461--471}, + Pii = {ADD669}, + Pmid = {15049746}, + Timestamp = {2009.08.06}, + Title = {Heroin and cocaine abusers have higher discount rates for delayed rewards than alcoholics or non-drug-using controls.}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2003.00669.x}, + Volume = {99}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2003.00669.x}} + +@article{Kirby2004a, + Abstract = {AIMS: To test a prediction of the discounting model of impulsiveness + that discount rates would be positively associated with addiction. + The delay-discount rate refers to the rate of reduction in the present + value of a future reward as the delay to that reward increases. DESIGN + AND MEASUREMENTS: We estimated participants' discount rates on the + basis of their pattern of choices between smaller immediate rewards + ($11-80) and larger, delayed rewards ($25-85; at delays from 1 week + to 6 months) in a questionnaire format. Participants had a one-in-six + chance of winning a reward that they chose on one randomly selected + trial. PARTICIPANTS AND SETTING: Heroin (n = 27), cocaine (n = 41) + and alcohol (n = 33) abusers and non-drug-using controls (n = 44) + were recruited from advertisements. They were tested in a drug abuse + research clinic at a medical school. FINDINGS: On average, the cocaine + and heroin groups had higher rates than controls (both P < 0.001), + but alcoholics did not (P = 0.44). Abstinence was associated with + lower rates for heroin abusers (P = 0.03), but not for cocaine or + alcohol abusers (both P > 0.50). CONCLUSIONS: These data suggest + that discount rates vary with the preferred drug of abuse, and that + high discount rates should be considered in the development of substance + abuse prevention and treatment efforts.}, + Author = {Kris N Kirby and Nancy M Petry}, + Doi = {10.1111/j.1360-0443.2003.00669.x}, + Institution = {Department of Psychology, Williams College, Williamstown, MA, USA. kkirby@williams.edu}, + Journal = {Addiction}, + Keywords = {Adult; Alcoholism, psychology; Choice Behavior; Cocaine-Related Disorders, psychology; Female; Heroin Dependence, psychology; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Motivation; Questionnaires; Reward; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {461--471}, + Pii = {ADD669}, + Pmid = {15049746}, + Timestamp = {2009.08.06}, + Title = {Heroin and cocaine abusers have higher discount rates for delayed rewards than alcoholics or non-drug-using controls.}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2003.00669.x}, + Volume = {99}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2003.00669.x}} + +@article{Kirby1999, + Author = {Kirby, K. N. and Petry, N. M. and Bickel, W. K.}, + Journal = {Journal of experimental psychology: General}, + Owner = {ahnw}, + Pages = {78-87}, + Timestamp = {2007.05.01}, + Title = {Heroin addicts have higher discount rates for delayed rewards than non-drug-using controls}, + Volume = {128(1)}, + Year = {1999}} + +@article{Kirkpatrick2007, + Author = {Kirkpatrick, T. and Joyce, E. and Milton, J. and Duggan, C. and Tyrer, P. and Rogers, R. D.}, + Journal = {J. Pers. Disord.}, + Month = {Jun}, + Pages = {243--261}, + Title = {{{A}ltered emotional decision-making in prisoners with borderline personality disorder}}, + Volume = {21}, + Year = {2007}} + +@article{Klauerinpress, + Author = {Klauer, K. C. and Voss, A. and Schmitz, F. and Teige--{M}ocigemba, S.}, + Journal = {Journal of Personality and Social Psychology}, + Pages = {??-??}, + Title = {Process Components of the Implicit Association Test: {A} Diffusion--model Analysis}, + Volume = {??}, + Year = {in press}} + +@article{Klein2008, + Author = {Klein, J. T. and Deaner, R. O. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Mar}, + Pages = {419--424}, + Title = {{{N}eural correlates of social target value in macaque parietal cortex}}, + Volume = {18}, + Year = {2008}} + +@article{Kleven1990, + Author = {Kleven, MS and Anthony, EW and Woolverton, WL}, + Journal = {Journal of Pharmacology and Experimental Therapeutics}, + Number = {1}, + Pages = {312--317}, + Publisher = {ASPET}, + Title = {{Pharmacological characterization of the discriminative stimulus effects of cocaine in rhesus monkeys}}, + Volume = {254}, + Year = {1990}} + +@article{Klugkist2005, + Author = {Klugkist, I. and Kato, B. and Hoijtink, H.}, + Journal = {Statistica Neerlandica}, + Pages = {57?69}, + Title = {Bayesian Model Selection Using Encompassing Priors}, + Volume = {59}, + Year = {2005}} + +@article{Klugkist2005a, + Author = {Klugkist, I. and Laudy, O. and Hoijtink, H.}, + Journal = {Psychological Methods}, + Pages = {477?493}, + Title = {Inequality Constrained Analysis of Variance: {A} {B}ayesian Approach}, + Volume = {10}, + Year = {2005}} + +@article{Klugkist2005b, + Author = {Klugkist, I. and Laudy, O. and Hoijtink, H.}, + Journal = {Psychological Methods}, + Pages = {500?503}, + Title = {{B}ayesian Eggs and {B}ayesian Omelettes: {R}eply to {S}tern (2005)}, + Volume = {10}, + Year = {2005}} + +@article{Knill2004, + Author = {Knill, D.C. and Pouget, A.}, + Journal = {TRENDS in Neurosciences}, + Number = {12}, + Pages = {712--719}, + Publisher = {Elsevier}, + Title = {{The Bayesian brain: the role of uncertainty in neural coding and computation}}, + Volume = {27}, + Year = {2004}} + +@article{Knutson2004c, + Author = {Knutson, B.}, + Journal = {Science}, + Month = {Aug}, + Pages = {1246--1247}, + Title = {{{B}ehavior. {S}weet revenge?}}, + Volume = {305}, + Year = {2004}} + +@article{Knutson2001b, + Author = {Knutson, B. and Adams, C. M. and Fong, G. W. and Hommer, D.}, + Journal = {J. Neurosci.}, + Pages = {RC159}, + Title = {{{A}nticipation of increasing monetary reward selectively recruits nucleus accumbens}}, + Volume = {21}, + Year = {2001}} + +@article{Knutson2005b, + Author = {Knutson, B. and Adcock, R. A.}, + Journal = {Neuron}, + Month = {Feb}, + Pages = {331--332}, + Title = {{{R}emembrance of rewards past}}, + Volume = {45}, + Year = {2005}} + +@article{Knutson2008e, + Author = {Knutson, B. and Bhanji, J. P. and Cooney, R. E. and Atlas, L. Y. and Gotlib, I. H.}, + Journal = {Biol. Psychiatry}, + Pages = {686--692}, + Title = {{{N}eural responses to monetary incentives in major depression}}, + Volume = {63}, + Year = {2008}} + +@article{Knutson2004b, + Author = {Knutson, B. and Bjork, J. M. and Fong, G. W. and Hommer, D. and Mattay, V. S. and Weinberger, D. R.}, + Journal = {Neuron}, + Month = {Jul}, + Pages = {261--269}, + Title = {{{A}mphetamine modulates human incentive processing}}, + Volume = {43}, + Year = {2004}} + +@article{Knutson2006a, + Author = {Knutson, B. and Cooper, J. C.}, + Journal = {Neuron}, + Month = {Aug}, + Pages = {280--282}, + Title = {{{T}he lure of the unknown}}, + Volume = {51}, + Year = {2006}} + +@article{Knutson2005a, + Author = {Knutson, B. and Cooper, J. C.}, + Journal = {Curr. Opin. Neurol.}, + Month = {Aug}, + Pages = {411--417}, + Title = {{{F}unctional magnetic resonance imaging of reward prediction}}, + Volume = {18}, + Year = {2005}} + +@article{Knutson2003, + Author = {Knutson, B. and Fong, G.W. and Bennett, S.M. and Adams, C.M. and Hommer, D.}, + Journal = {Neuroimage}, + Number = {2}, + Pages = {263--272}, + Publisher = {Elsevier}, + Title = {{A region of mesial prefrontal cortex tracks monetarily rewarding outcomes: characterization with rapid event-related fMRI}}, + Volume = {18}, + Year = {2003}} + +@article{Knutson2001a, + Author = {Knutson, B. and Fong, G. W. and Adams, C. M. and Varner, J. L. and Hommer, D.}, + Journal = {Neuroreport}, + Pages = {3683--3687}, + Title = {{{D}issociation of reward anticipation and outcome with event-related f{M}{R}{I}}}, + Volume = {12}, + Year = {2001}} + +@article{Knutson2003a, + Author = {Knutson, B. and Fong, G. W. and Bennett, S. M. and Adams, C. M. and Hommer, D.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {263--272}, + Title = {{{A} region of mesial prefrontal cortex tracks monetarily rewarding outcomes: characterization with rapid event-related f{M}{R}{I}}}, + Volume = {18}, + Year = {2003}} + +@article{Knutson2007, + Author = {Knutson, B. and Gibbs, S.E.B.}, + Journal = {Psychopharmacology}, + Number = {3}, + Pages = {813--822}, + Publisher = {Springer}, + Title = {{Linking nucleus accumbens dopamine and blood oxygenation}}, + Volume = {191}, + Year = {2007}} + +@article{Knutson2007e, + Author = {Knutson, B. and Gibbs, S. E.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Apr}, + Pages = {813--822}, + Title = {{{L}inking nucleus accumbens dopamine and blood oxygenation}}, + Volume = {191}, + Year = {2007}} + +@article{Knutson2008d, + Author = {Knutson, B. and Greer, S. M.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Dec}, + Pages = {3771--3786}, + Title = {{{A}nticipatory affect: neural correlates and consequences for choice}}, + Volume = {363}, + Year = {2008}} + +@article{Knutson2001, + Author = {Knutson, B. and Momenan, R. and Rawlings, R. R. and Fong, G. W. and Hommer, D.}, + Journal = {Biol. Psychiatry}, + Month = {Nov}, + Pages = {685--690}, + Title = {{{N}egative association of neuroticism with brain volume ratio in healthy humans}}, + Volume = {50}, + Year = {2001}} + +@article{Knutson2007d, + Author = {Knutson, B. and Rick, S. and Wimmer, G. E. and Prelec, D. and Loewenstein, G.}, + Journal = {Neuron}, + Month = {Jan}, + Pages = {147--156}, + Title = {{{N}eural predictors of purchases}}, + Volume = {53}, + Year = {2007}} + +@article{Knutson2005, + Author = {Knutson, B. and Taylor, J. and Kaufman, M. and Peterson, R. and Glover, G.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {4806--4812}, + Title = {{{D}istributed neural representation of expected value}}, + Volume = {25}, + Year = {2005}} + +@article{Knutson2000, + Author = {Knutson, B. and Westdorp, A. and Kaiser, E. and Hommer, D.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {20--27}, + Title = {{{F}{M}{R}{I} visualization of brain activity during a monetary incentive delay task}}, + Volume = {12}, + Year = {2000}} + +@article{Knutson2007c, + Author = {Knutson, B. and Wimmer, G. E.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {May}, + Pages = {54--69}, + Title = {{{S}plitting the difference: how does the brain code reward episodes?}}, + Volume = {1104}, + Year = {2007}} + +@article{Knutson2008c, + Author = {Knutson, B. and Wimmer, G. E. and Kuhnen, C. M. and Winkielman, P.}, + Journal = {Neuroreport}, + Month = {Mar}, + Pages = {509--513}, + Title = {{{N}ucleus accumbens activation mediates the influence of reward cues on financial risk taking}}, + Volume = {19}, + Year = {2008}} + +@article{Knutson2008b, + Author = {Knutson, B. and Wimmer, G. E. and Rick, S. and Hollon, N. G. and Prelec, D. and Loewenstein, G.}, + Journal = {Neuron}, + Month = {Jun}, + Pages = {814--822}, + Title = {{{N}eural antecedents of the endowment effect}}, + Volume = {58}, + Year = {2008}} + +@article{Knutson2007b, + Author = {Knutson, D. and Steiner, E.}, + Journal = {Am Fam Physician}, + Month = {Jun}, + Pages = {1660--1666}, + Title = {{{S}creening for breast cancer: current recommendations and future directions}}, + Volume = {75}, + Year = {2007}} + +@article{Knutson2007a, + Author = {Knutson, K. M. and Mah, L. and Manly, C. F. and Grafman, J.}, + Journal = {Hum Brain Mapp}, + Month = {Oct}, + Pages = {915--930}, + Title = {{{N}eural correlates of automatic beliefs about gender and race}}, + Volume = {28}, + Year = {2007}} + +@article{Knutson2008a, + Author = {Knutson, K. M. and McClellan, E. M. and Grafman, J.}, + Journal = {Exp Brain Res}, + Month = {Jun}, + Pages = {187--198}, + Title = {{{O}bserving social gestures: an f{M}{R}{I} study}}, + Volume = {188}, + Year = {2008}} + +@article{Knutson2004a, + Author = {Knutson, K. M. and Wood, J. N. and Grafman, J.}, + Journal = {Neuroimage}, + Month = {Dec}, + Pages = {1299--1307}, + Title = {{{B}rain activation in processing temporal sequence: an f{M}{R}{I} study}}, + Volume = {23}, + Year = {2004}} + +@article{Knutson2006, + Author = {Knutson, K. M. and Wood, J. N. and Spampinato, M. V. and Grafman, J.}, + Journal = {Soc Neurosci}, + Pages = {25--40}, + Title = {{{P}olitics on the brain: an {F}{M}{R}{I} investigation}}, + Volume = {1}, + Year = {2006}} + +@article{Knutson2008, + Author = {Knutson, K. M. and Zamboni, G. and Tierney, M. C. and Grafman, J.}, + Journal = {Dement Geriatr Cogn Disord}, + Pages = {467--474}, + Title = {{{N}eural correlates of caregiver burden in cortical basal syndrome and frontotemporal dementia}}, + Volume = {26}, + Year = {2008}} + +@article{Knutson2004, + Author = {Knutson, T. and Hawas, B.}, + Journal = {Scand. J. Urol. Nephrol.}, + Pages = {348--350}, + Title = {{{H}orseshoe kidney with a circumcaval ureter}}, + Volume = {38}, + Year = {2004}} + +@article{Kobayashi2006, + Author = {Kobayashi, S. and Nomoto, K. and Watanabe, M. and Hikosaka, O. and Schultz, W. and Sakagami, M.}, + Journal = {Neuron}, + Month = {Sep}, + Pages = {861--870}, + Title = {{{I}nfluences of rewarding and aversive outcomes on activity in macaque lateral prefrontal cortex}}, + Volume = {51}, + Year = {2006}} + +@article{Kobayashi2008, + Author = {Kobayashi, S. and Schultz, W.}, + Journal = {J. Neurosci.}, + Month = {Jul}, + Pages = {7837--7846}, + Title = {{{I}nfluence of reward delays on responses of dopamine neurons}}, + Volume = {28}, + Year = {2008}} + +@article{Kong2004, + Abstract = {We identify berberine (BBR), a compound isolated from a Chinese herb, + as a new cholesterol-lowering drug. Oral administration of BBR in + 32 hypercholesterolemic patients for 3 months reduced serum cholesterol + by 29\%, triglycerides by 35\% and LDL-cholesterol by 25\%. Treatment + of hyperlipidemic hamsters with BBR reduced serum cholesterol by + 40\% and LDL-cholesterol by 42\%, with a 3.5-fold increase in hepatic + LDLR mRNA and a 2.6-fold increase in hepatic LDLR protein. Using + human hepatoma cells, we show that BBR upregulates LDLR expression + independent of sterol regulatory element binding proteins, but dependent + on ERK activation. BBR elevates LDLR expression through a post-transcriptional + mechanism that stabilizes the mRNA. Using a heterologous system with + luciferase as a reporter, we further identify the 5' proximal section + of the LDLR mRNA 3' untranslated region responsible for the regulatory + effect of BBR. These findings show BBR as a new hypolipidemic drug + with a mechanism of action different from that of statin drugs.}, + Author = {Weijia Kong and Jing Wei and Parveen Abidi and Meihong Lin and Satoru Inaba and Cong Li and Yanling Wang and Zizheng Wang and Shuyi Si and Huaining Pan and Shukui Wang and Jingdan Wu and Yue Wang and Zhuorong Li and Jingwen Liu and Jian-Dong Jiang}, + Doi = {10.1038/nm1135}, + Institution = {Institute of Medicinal Biotechnology, Chinese Academy of Medical Sciences, and Peking Union Medical College, Beijing, 100050, China.}, + Journal = {Nat Med}, + Keywords = {Animals; Anticholesteremic Agents, pharmacology/therapeutic use; Berberine, chemistry/pharmacology/therapeutic use; Blotting, Northern; China; Cholesterol, LDL, blood; Cholesterol, blood; Cricetinae; DNA Primers; Flow Cytometry; Gene Expression Regulation, drug effects; Humans; Hypercholesterolemia, drug therapy; Liver, metabolism; Plasmids, genetics; Receptors, LDL, genetics/metabolism; Regulatory Sequences, Nucleic Acid, genetics; Reverse Transcriptase Polymerase Chain Reaction; Triglycerides, blood; Tumor Cells, Cultured}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {12}, + Owner = {Young}, + Pages = {1344--1351}, + Pii = {nm1135}, + Pmid = {15531889}, + Timestamp = {2009.12.10}, + Title = {Berberine is a novel cholesterol-lowering drug working through a unique mechanism distinct from statins.}, + Url = {http://dx.doi.org/10.1038/nm1135}, + Volume = {10}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/nm1135}} + +@article{Kong2004b, + Abstract = {We identify berberine (BBR), a compound isolated from a Chinese herb, + as a new cholesterol-lowering drug. Oral administration of BBR in + 32 hypercholesterolemic patients for 3 months reduced serum cholesterol + by 29\%, triglycerides by 35\% and LDL-cholesterol by 25\%. Treatment + of hyperlipidemic hamsters with BBR reduced serum cholesterol by + 40\% and LDL-cholesterol by 42\%, with a 3.5-fold increase in hepatic + LDLR mRNA and a 2.6-fold increase in hepatic LDLR protein. Using + human hepatoma cells, we show that BBR upregulates LDLR expression + independent of sterol regulatory element binding proteins, but dependent + on ERK activation. BBR elevates LDLR expression through a post-transcriptional + mechanism that stabilizes the mRNA. Using a heterologous system with + luciferase as a reporter, we further identify the 5' proximal section + of the LDLR mRNA 3' untranslated region responsible for the regulatory + effect of BBR. These findings show BBR as a new hypolipidemic drug + with a mechanism of action different from that of statin drugs.}, + Author = {Weijia Kong and Jing Wei and Parveen Abidi and Meihong Lin and Satoru Inaba and Cong Li and Yanling Wang and Zizheng Wang and Shuyi Si and Huaining Pan and Shukui Wang and Jingdan Wu and Yue Wang and Zhuorong Li and Jingwen Liu and Jian-Dong Jiang}, + Doi = {10.1038/nm1135}, + Institution = {Institute of Medicinal Biotechnology, Chinese Academy of Medical Sciences, and Peking Union Medical College, Beijing, 100050, China.}, + Journal = {Nat Med}, + Keywords = {Animals; Anticholesteremic Agents, pharmacology/therapeutic use; Berberine, chemistry/pharmacology/therapeutic use; Blotting, Northern; China; Cholesterol, LDL, blood; Cholesterol, blood; Cricetinae; DNA Primers; Flow Cytometry; Gene Expression Regulation, drug effects; Humans; Hypercholesterolemia, drug therapy; Liver, metabolism; Plasmids, genetics; Receptors, LDL, genetics/metabolism; Regulatory Sequences, Nucleic Acid, genetics; Reverse Transcriptase Polymerase Chain Reaction; Triglycerides, blood; Tumor Cells, Cultured}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {12}, + Owner = {Young}, + Pages = {1344--1351}, + Pii = {nm1135}, + Pmid = {15531889}, + Timestamp = {2009.12.10}, + Title = {Berberine is a novel cholesterol-lowering drug working through a unique mechanism distinct from statins.}, + Url = {http://dx.doi.org/10.1038/nm1135}, + Volume = {10}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/nm1135}} + +@incollection{Kontkanen2001, + Author = {Kontkanen, P. and Myllym\"{a}ki, P. and Tirri, H.}, + Booktitle = {Proceedings of the Eighth International Workshop on Artificial Intelligence and Statistics}, + Editor = {Jaakkola, T. and Richardson, T.}, + Pages = {233--238}, + Publisher = {Morgan Kaufmann Publishers}, + Title = {Comparing Prequential Model Selection Criteria in Supervised Learning of Mixture Models}, + Year = {2001}} + +@article{Kontsevich1999, + Author = {Leonid L. Kontsevich and Christopher W. Tyler}, + Journal = {Vision Research}, + Pages = {2729--2737}, + Title = {Bayesian adaptive estimation of psychometric slope and threshold}, + Volume = {39}, + Year = {1999}} + +@article{Koob1988, + Author = {Koob, GF and Bloom, FE}, + Journal = {Science}, + Number = {4879}, + Pages = {715--723}, + Title = {{Cellular and molecular mechanisms of drug dependence}}, + Volume = {242}, + Year = {1988}} + +@article{Koob2004, + Author = {Koob, G. F. and Ahmed, S. H. and Boutrel, B. and Chen, S. A. and Kenny, P. J. and Markou, A. and O'Dell, L. E. and Parsons, L. H. and Sanna, P. P.}, + Journal = {Neurosci Biobehav Rev}, + Month = {Jan}, + Pages = {739--749}, + Title = {{{N}eurobiological mechanisms in the transition from drug use to drug dependence}}, + Volume = {27}, + Year = {2004}} + +@article{Koob1997, + Author = {Koob, G. F. and Nestler, E. J.}, + Journal = {J Neuropsychiatry Clin Neurosci}, + Pages = {482--497}, + Title = {{{T}he neurobiology of drug addiction}}, + Volume = {9}, + Year = {1997}} + +@article{Kopnisky2002, + Author = {Kopnisky, K. L. and Cowan, W. M. and Hyman, S. E.}, + Journal = {Dev. Psychopathol.}, + Pages = {437--461}, + Title = {{{L}evels of analysis in psychiatric research}}, + Volume = {14}, + Year = {2002}} + +@article{Korf1974, + Author = {J. Korf and H. M. van Praag and D. Schut and R. J. Nienhuis and J. P. Lakke}, + Journal = {Eur Neurol}, + Keywords = {Aged; Depression, Chemical; Dopa Decarboxylase, antagonists /&/ inhibitors/pharmacology/therapeutic use; Drug Combinations; Female; Homovanillic Acid, cerebrospinal fluid; Humans; Hydroxyindoleacetic Acid, cerebrospinal fluid; Levodopa, pharmacology/therapeutic use; Male; Middle Aged; Parkinson Disease, cerebrospinal fluid/drug therapy; Phenylacetates, cerebrospinal fluid; Probenecid, cerebrospinal fluid/diagnostic use; Stimulation, Chemical; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {5-6}, + Owner = {Young}, + Pages = {340--350}, + Pmid = {4448192}, + Timestamp = {2010.05.01}, + Title = {Parkinson's disease and amine metabolites in cerebrospinal fluid: implications for L-Dopa therapy.}, + Volume = {12}, + Year = {1974}} + +@article{Kornetsky1991, + Author = {Kornetsky, C. and Huston-Lyons, D. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {Feb}, + Pages = {75--81}, + Title = {{{T}he role of the olfactory tubercle in the effects of cocaine, morphine and brain-stimulation reward}}, + Volume = {541}, + Year = {1991}} + +@article{Kornetsky1992, + Author = {Kornetsky, C. and Porrino, L. J.}, + Journal = {Res Publ Assoc Res Nerv Ment Dis}, + Pages = {59--77}, + Title = {{{B}rain mechanisms of drug-induced reinforcement}}, + Volume = {70}, + Year = {1992}} + +@article{Kosten1997, + Author = {Kosten, T. A. and Miserendino, M. J. and Haile, C. N. and DeCaprio, J. L. and Jatlow, P. I. and Nestler, E. J.}, + Journal = {Brain Res.}, + Month = {Dec}, + Pages = {418--429}, + Title = {{{A}cquisition and maintenance of intravenous cocaine self-administration in {L}ewis and {F}ischer inbred rat strains}}, + Volume = {778}, + Year = {1997}} + +@article{Kosten2006, + Author = {Kosten, T. R. and Scanley, B. E. and Tucker, K. A. and Oliveto, A. and Prince, C. and Sinha, R. and Potenza, M. N. and Skudlarski, P. and Wexler, B. E.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {644--650}, + Title = {{{C}ue-induced brain activity changes and relapse in cocaine-dependent patients}}, + Volume = {31}, + Year = {2006}} + +@article{Kosten2006a, + Author = {Kosten, T. R. and Scanley, B. E. and Tucker, K. A. and Oliveto, A. and Prince, C. and Sinha, R. and Potenza, M. N. and Skudlarski, P. and Wexler, B. E.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {644--650}, + Title = {{{C}ue-induced brain activity changes and relapse in cocaine-dependent patients}}, + Volume = {31}, + Year = {2006}} + +@article{Kosten2006b, + Author = {Kosten, T. R. and Scanley, B. E. and Tucker, K. A. and Oliveto, A. and Prince, C. and Sinha, R. and Potenza, M. N. and Skudlarski, P. and Wexler, B. E.}, + Journal = {Neuropsychopharmacology}, + Month = {Mar}, + Pages = {644--650}, + Title = {{{C}ue-induced brain activity changes and relapse in cocaine-dependent patients}}, + Volume = {31}, + Year = {2006}} + +@article{Kowal2007, + Abstract = {Two algorithms are commonly applied in computerized temporal discounting + procedures (Decreasing Adjustment and Double-Limit Algorithms); however, + the degree to which the two algorithms produce similar patterns of + discounting is unknown. The present experiment compared the two common + algorithms across sign (gains and losses) and magnitude ($10 and + $1000) conditions. Twenty participants made choices between larger + later and smaller sooner alternatives that were presented by each + of the algorithms in separate conditions. Strong correlations were + found between the two measures; however, the Decreasing Adjustment + Algorithm tended to produce lower indifference points and higher + rates of discounting than the Double-Limit Algorithm. Both algorithms + found significant magnitude effects. Less consistent results were + found when comparing the two algorithms across sign. The present + results suggest that researchers should apply caution when making + comparisons between outcomes of delay discounting studies that have + used the two different algorithms. However, the interpretation of + findings from individual studies is probably not strongly affected + by the use of different computer algorithms.}, + Author = {Benjamin P Kowal and Richard Yi and Amanda C Erisman and Warren K Bickel}, + Doi = {10.1016/j.beproc.2007.02.005}, + Institution = {Center for Addiction Research, Fred and Dierk's Research Laboratories, Psychiatric Research Institute, University of Arkansas for Medical Sciences, Little Rock, AR 72205, USA. bpkowal@uams.edu}, + Journal = {Behav Processes}, + Keywords = {Adult; Algorithms; Behavioral Research, methods; Choice Behavior; Economics; Female; Humans; Male; Middle Aged; Models, Psychological; Motivation; Numerical Analysis, Computer-Assisted; Reference Values; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {231--236}, + Pii = {S0376-6357(07)00030-7}, + Pmid = {17368965}, + Timestamp = {2009.08.06}, + Title = {A comparison of two algorithms in computerized temporal discounting procedures.}, + Url = {http://dx.doi.org/10.1016/j.beproc.2007.02.005}, + Volume = {75}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.beproc.2007.02.005}} + +@article{Kowal2007a, + Abstract = {Two algorithms are commonly applied in computerized temporal discounting + procedures (Decreasing Adjustment and Double-Limit Algorithms); however, + the degree to which the two algorithms produce similar patterns of + discounting is unknown. The present experiment compared the two common + algorithms across sign (gains and losses) and magnitude ($10 and + $1000) conditions. Twenty participants made choices between larger + later and smaller sooner alternatives that were presented by each + of the algorithms in separate conditions. Strong correlations were + found between the two measures; however, the Decreasing Adjustment + Algorithm tended to produce lower indifference points and higher + rates of discounting than the Double-Limit Algorithm. Both algorithms + found significant magnitude effects. Less consistent results were + found when comparing the two algorithms across sign. The present + results suggest that researchers should apply caution when making + comparisons between outcomes of delay discounting studies that have + used the two different algorithms. However, the interpretation of + findings from individual studies is probably not strongly affected + by the use of different computer algorithms.}, + Author = {Benjamin P Kowal and Richard Yi and Amanda C Erisman and Warren K Bickel}, + Doi = {10.1016/j.beproc.2007.02.005}, + Institution = {Center for Addiction Research, Fred and Dierk's Research Laboratories, Psychiatric Research Institute, University of Arkansas for Medical Sciences, Little Rock, AR 72205, USA. bpkowal@uams.edu}, + Journal = {Behav Processes}, + Keywords = {Adult; Algorithms; Behavioral Research, methods; Choice Behavior; Economics; Female; Humans; Male; Middle Aged; Models, Psychological; Motivation; Numerical Analysis, Computer-Assisted; Reference Values; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {231--236}, + Pii = {S0376-6357(07)00030-7}, + Pmid = {17368965}, + Timestamp = {2009.08.06}, + Title = {A comparison of two algorithms in computerized temporal discounting procedures.}, + Url = {http://dx.doi.org/10.1016/j.beproc.2007.02.005}, + Volume = {75}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.beproc.2007.02.005}} + +@article{Krain2008, + Author = {Krain, A. L. and Gotimer, K. and Hefton, S. and Ernst, M. and Castellanos, F. X. and Pine, D. S. and Milham, M. P.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {563--568}, + Title = {{{A} functional magnetic resonance imaging investigation of uncertainty in adolescents with anxiety disorders}}, + Volume = {63}, + Year = {2008}} + +@article{Krain2006, + Author = {Krain, A. L. and Hefton, S. and Pine, D. S. and Ernst, M. and Castellanos, F. X. and Klein, R. G. and Milham, M. P.}, + Journal = {J Child Psychol Psychiatry}, + Month = {Oct}, + Pages = {1023--1030}, + Title = {{{A}n f{M}{R}{I} examination of developmental differences in the neural correlates of uncertainty and decision-making}}, + Volume = {47}, + Year = {2006}} + +@article{Krantz1999, + Author = {Krantz, D. H.}, + Journal = {Journal of the American Statistical Association}, + Pages = {1372--1381}, + Title = {The Null Hypothesis Testing Controversy in Psychology}, + Volume = {44}, + Year = {1999}} + +@article{Krebs-Thomson1998, + Author = {Krebs-Thomson, K. and Lehmann-Masten, V. and Naiem, S. and Paulus, M. P. and Geyer, M. A.}, + Journal = {Eur. J. Pharmacol.}, + Month = {Feb}, + Pages = {135--143}, + Title = {{{M}odulation of phencyclidine-induced changes in locomotor activity and patterns in rats by serotonin}}, + Volume = {343}, + Year = {1998}} + +@article{Krebs-Thomson1998a, + Author = {Krebs-Thomson, K. and Paulus, M. P. and Geyer, M. A.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Pages = {339--351}, + Title = {{{E}ffects of hallucinogens on locomotor and investigatory activity and patterns: influence of 5-{H}{T}2{A} and 5-{H}{T}2{C} receptors}}, + Volume = {18}, + Year = {1998}} + +@article{Krueger2001, + Author = {Krueger, J.}, + Journal = {American Psychologist}, + Pages = {16--26}, + Title = {Null Hypothesis Significance Testing: {O}n the Survival of a Flawed Method}, + Volume = {56}, + Year = {2001}} + +@article{Krueger2009, + Author = {Krueger, K. A. and Dayan, P.}, + Journal = {Cognition}, + Month = {Mar}, + Pages = {380--394}, + Title = {{{F}lexible shaping: how learning in small steps helps}}, + Volume = {110}, + Year = {2009}} + +@book{Kruschke2009, + Author = {Kruschke, John K.}, + Owner = {Woo-Young Ahn}, + Timestamp = {2009.08.14}, + Title = {Bayesian Data Analysis: A Tutorial and How-To Guide with R}, + Year = {in preparation}} + +@article{Kruskal1964, + Author = {Kruskal, J. B.}, + Journal = {Psychometrika}, + Pages = {115--129}, + Title = {Nonmetric Multidimensional Scaling: A Numerical Method}, + Volume = {29}, + Year = {1964}} + +@book{Kuhn1962, + Address = {Chicago}, + Author = {Kuhn, T. S.}, + Publisher = {University of {C}hicago Press}, + Title = {The Structure of Scientific Revolutions}, + Year = {1962}} + +@article{Kuhnen2005, + Author = {Kuhnen, C. M. and Knutson, B.}, + Journal = {Neuron}, + Pages = {763--770}, + Title = {{{T}he neural basis of financial risk taking}}, + Volume = {47}, + Year = {2005}} + +@article{Kujala2006, + Author = {Janne V. Kujala and Tuomas J. Lukka}, + Journal = {Journal of Mathematical Psychology}, + Pages = {369--389}, + Title = {Bayesian adaptive estimation: The next dimension}, + Volume = {50}, + Year = {2006}} + +@article{Kunz2008, + Author = {Kunz, S. and Beblo, T. and Driessen, M. and Woermann, F.}, + Journal = {Neurocase}, + Pages = {343--346}, + Title = {{f{M}{R}{I} of alcohol craving after individual cues: a follow-up case report}}, + Volume = {14}, + Year = {2008}} + +@article{Kwon2003, + Author = {Kwon, J. S. and Kim, J. J. and Lee, D. W. and Lee, J. S. and Lee, D. S. and Kim, M. S. and Lyoo, I. K. and Cho, M. J. and Lee, M. C.}, + Journal = {Psychiatry Res}, + Month = {Jan}, + Pages = {37--47}, + Title = {{{N}eural correlates of clinical symptoms and cognitive dysfunctions in obsessive-compulsive disorder}}, + Volume = {122}, + Year = {2003}} + +@article{Kwon2000, + Author = {Kwon, J. S. and Kim, Y. M. and Chang, C. G. and Park, B. J. and Kim, L. and Yoon, D. J. and Han, W. S. and Lee, H. J. and Lyoo, I. K.}, + Journal = {Am J Psychiatry}, + Month = {Dec}, + Pages = {1966--1972}, + Title = {{{T}hree-year follow-up of women with the sole diagnosis of depressive personality disorder: subsequent development of dysthymia and major depression}}, + Volume = {157}, + Year = {2000}} + +@article{LaBerge1994, + Author = {LaBerge, D. A.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {198--243}, + Title = {Quantitative Models of Attention and Response Processes in Shape Identification Tasks}, + Volume = {38}, + Year = {1994}} + +@article{Lacbawan2009, + Journal = {J. Med. Genet.}, + Month = {Jun}, + Pages = {389--398}, + Title = {{{C}linical spectrum of {S}{I}{X}3-associated mutations in holoprosencephaly: correlation between genotype, phenotype and function}}, + Volume = {46}, + Year = {2009}} + +@article{Lai2003, + Author = {Lai, J. and Cella, D. and Chang, C. and Bode, R. K. and Heinemann, A. W.}, + Journal = {Quality of Life Research: An International Journal of Quality of Life Aspects of Treatment, Care and Rehabilitation}, + Owner = {Wooyoung Ahn}, + Pages = {485-501}, + Timestamp = {2007.04.30}, + Title = {Item banking to improve, shorten and computerize self-reported fatigue: An illustration of steps to create a core item bank from the {FACIT}-Fatigue scale}, + Volume = {12}, + Year = {2003}} + +@article{Laming1979, + Author = {Laming, D.}, + Journal = {Acta Psychologica}, + Pages = {199--224}, + Title = {Choice Reaction Performance Following an Error}, + Volume = {43}, + Year = {1979}} + +@article{Laming1979a, + Author = {Laming, D.}, + Journal = {Acta Psychologica}, + Pages = {381--412}, + Title = {Autocorrelation of Choice--Reaction Times}, + Volume = {43}, + Year = {1979}} + +@book{Laming1973, + Address = {New York}, + Author = {Laming, D. R. J.}, + Publisher = {Academic Press}, + Title = {Mathematical Psychology}, + Year = {1973}} + +@book{Laming1968, + Address = {London}, + Author = {Laming, D. R. J.}, + Publisher = {Academic Press}, + Title = {Information Theory of Choice--reaction Times}, + Year = {1968}} + +@article{Lan1999, + Author = {Lan, R. and Gatley, J. and Lu, Q. and Fan, P. and Fernando, S. R. and Volkow, N. D. and Pertwee, R. and Makriyannis, A.}, + Journal = {AAPS PharmSci}, + Pages = {E4}, + Title = {{{D}esign and synthesis of the {C}{B}1 selective cannabinoid antagonist {A}{M}281: a potential human {S}{P}{E}{C}{T} ligand}}, + Volume = {1}, + Year = {1999}} + +@article{Landau2007, + Author = {Landau, S. M. and Garavan, H. and Schumacher, E. H. and D'Esposito, M.}, + Journal = {Brain Res.}, + Month = {Nov}, + Pages = {78--89}, + Title = {{{R}egional specificity and practice: dynamic changes in object and spatial working memory}}, + Volume = {1180}, + Year = {2007}} + +@article{Landau2004, + Author = {Landau, S. M. and Schumacher, E. H. and Garavan, H. and Druzgal, T. J. and D'Esposito, M.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {211--221}, + Title = {{{A} functional {M}{R}{I} study of the influence of practice on component processes of working memory}}, + Volume = {22}, + Year = {2004}} + +@article{Langleben2002, + Author = {Langleben, D. D. and Acton, P. D. and Austin, G. and Elman, I. and Krikorian, G. and Monterosso, J. R. and Portnoy, O. and Ridlehuber, H. W. and Strauss, H. W.}, + Journal = {J. Nucl. Med.}, + Month = {Dec}, + Pages = {1624--1629}, + Title = {{{E}ffects of methylphenidate discontinuation on cerebral blood flow in prepubescent boys with attention deficit hyperactivity disorder}}, + Volume = {43}, + Year = {2002}} + +@article{Langleben2005, + Author = {Langleben, D. D. and Loughead, J. W. and Bilker, W. B. and Ruparel, K. and Childress, A. R. and Busch, S. I. and Gur, R. C.}, + Journal = {Hum Brain Mapp}, + Month = {Dec}, + Pages = {262--272}, + Title = {{{T}elling truth from lie in individual subjects with fast event-related f{M}{R}{I}}}, + Volume = {26}, + Year = {2005}} + +@article{Langleben2006, + Author = {Langleben, D. D. and Monterosso, J. and Elman, I. and Ash, B. and Krikorian, G. and Austin, G.}, + Journal = {Psychiatry Res}, + Month = {Mar}, + Pages = {315--320}, + Title = {{{E}ffect of methylphenidate on {S}troop {C}olor-{W}ord task performance in children with attention deficit hyperactivity disorder}}, + Volume = {141}, + Year = {2006}} + +@article{Langleben2008, + Author = {Langleben, D. D. and Ruparel, K. and Elman, I. and Busch-Winokur, S. and Pratiwadi, R. and Loughead, J. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Am J Psychiatry}, + Month = {Mar}, + Pages = {390--394}, + Title = {{{A}cute effect of methadone maintenance dose on brain {F}{M}{R}{I} response to heroin-related cues}}, + Volume = {165}, + Year = {2008}} + +@article{Langleben2002a, + Author = {Langleben, D. D. and Schroeder, L. and Maldjian, J. A. and Gur, R. C. and McDonald, S. and Ragland, J. D. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Neuroimage}, + Month = {Mar}, + Pages = {727--732}, + Title = {{{B}rain activity during simulated deception: an event-related functional magnetic resonance study}}, + Volume = {15}, + Year = {2002}} + +@article{Langs2008, + Author = {Langs, G. and Samaras, D. and Paragios, N. and Honorio, J. and Alia-Klein, N. and Tomasi, D. and Volkow, N. D. and Goldstein, R. Z.}, + Journal = {Med Image Comput Comput Assist Interv Int Conf Med Image Comput Comput Assist Interv}, + Pages = {925--933}, + Title = {{{T}ask-specific functional brain geometry from model maps}}, + Volume = {11}, + Year = {2008}} + +@article{Larson1990, + Author = {Larson, G. E. and Alderton, D. L.}, + Journal = {Intelligence}, + Pages = {309--325}, + Title = {Reaction Time Variability and Intelligence: A ``Worst Performance" Analysis of Individual Differences}, + Volume = {14}, + Year = {1990}} + +@article{laruelle1995spect, + Author = {Laruelle, M. and Abi-Dargham, A. and van Dyck, C.H. and Rosenblatt, W. and Zea-Ponce, Y. and Zoghbi, S.S. and Baldwin, R.M. and Charney, D.S. and Hoffer, P.B. and Kung, H.F. and others}, + Journal = {Journal of Nuclear Medicine}, + Number = {7}, + Pages = {1182--1190}, + Publisher = {Soc Nuclear Med}, + Title = {{SPECT imaging of striatal dopamine release after amphetamine challenge}}, + Volume = {36}, + Year = {1995}} + +@article{Latham2005, + Author = {Latham, P. E. and Dayan, P.}, + Journal = {Nat. Neurosci.}, + Month = {Apr}, + Pages = {408--409}, + Title = {{{T}ouch?: the feeling of choice}}, + Volume = {8}, + Year = {2005}} + +@article{Lau2009, + Author = {Lau, J. Y. and Goldman, D. and Buzas, B. and Fromm, S. J. and Guyer, A. E. and Hodgkinson, C. and Monk, C. S. and Nelson, E. E. and Shen, P. H. and Pine, D. S. and Ernst, M.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {349--355}, + Title = {{{A}mygdala function and 5-{H}{T}{T} gene variants in adolescent anxiety and major depressive disorder}}, + Volume = {65}, + Year = {2009}} + +@article{Lau2008, + Author = {Lau, J. Y. and Lissek, S. and Nelson, E. E. and Lee, Y. and Roberson-Nay, R. and Poeth, K. and Jenness, J. and Ernst, M. and Grillon, C. and Pine, D. S.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Jan}, + Pages = {94--102}, + Title = {{{F}ear conditioning in adolescents with anxiety disorders: results from a novel experimental paradigm}}, + Volume = {47}, + Year = {2008}} + +@article{Laudy2005, + Author = {Laudy, O. and Zoccolillo, M. and Baillargeon, R. H. and Boom, J. and Tremblay, R. E. and Hoijtink, H.}, + Journal = {European Journal of Developmental Psychology}, + Pages = {1--15}, + Title = {Applications of Confirmatory Latent Class Analysis in Developmental Psychology}, + Volume = {2}, + Year = {2005}} + +@article{Lavine1999, + Author = {Lavine, M. and Schervish, M. J.}, + Journal = {The American Statistician}, + Pages = {119--122}, + Title = {Bayes Factors: What They are and What They are not}, + Volume = {53}, + Year = {1999}} + +@article{laviolette2006roles, + Author = {Laviolette, S.R. and Grace, A.A.}, + Journal = {Cellular and Molecular Life Sciences (CMLS)}, + Number = {14}, + Pages = {1597--1613}, + Publisher = {Springer}, + Title = {{The roles of cannabinoid and dopamine receptor systems in neural emotional learning circuits: implications for schizophrenia and addiction}}, + Volume = {63}, + Year = {2006}} + +@article{Lawrance1977, + Author = {Lawrance, A. J. and Kottegoda, N. T.}, + Journal = {Journal of the Royal Statistical Society A}, + Pages = {1--47}, + Title = {Stochastic Modelling of Riverflow Time Series}, + Volume = {140}, + Year = {1977}} + +@article{Lawrence2003, + Author = {Lawrence, N. S. and Ross, T. J. and Hoffmann, R. and Garavan, H. and Stein, E. A.}, + Journal = {J Cogn Neurosci}, + Month = {Oct}, + Pages = {1028--1038}, + Title = {{{M}ultiple neuronal networks mediate sustained attention}}, + Volume = {15}, + Year = {2003}} + +@article{le2005dopamine, + Author = {Le Foll, B. and Goldberg, S.R. and Sokoloff, P.}, + Journal = {Neuropharmacology}, + Number = {4}, + Pages = {525--541}, + Publisher = {Elsevier}, + Title = {{The dopamine D3 receptor and drug dependence: effects on reward or beyond?}}, + Volume = {49}, + Year = {2005}} + +@article{Lee2007, + Author = {Lee, B. K. and Glass, T. A. and McAtee, M. J. and Wand, G. S. and Bandeen-Roche, K. and Bolla, K. I. and Schwartz, B. S.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Jul}, + Pages = {810--818}, + Title = {{{A}ssociations of salivary cortisol with cognitive function in the {B}altimore memory study}}, + Volume = {64}, + Year = {2007}} + +@article{Lee2008a, + Author = {Lee, B. K. and Glass, T. A. and Wand, G. S. and McAtee, M. J. and Bandeen-Roche, K. and Bolla, K. I. and Schwartz, B. S.}, + Journal = {Am J Psychiatry}, + Month = {Nov}, + Pages = {1456--1464}, + Title = {{{A}polipoprotein e genotype, cortisol, and cognitive function in community-dwelling older adults}}, + Volume = {165}, + Year = {2008}} + +@article{Lee2008, + Author = {Lee, B. T. and Paik, J. W. and Kang, R. H. and Chung, S. Y. and Kwon, H. I. and Khang, H. S. and Lyoo, I. K. and Chae, J. H. and Kwon, J. H. and Kim, J. W. and Lee, M. S. and Ham, B. J.}, + Journal = {World J. Biol. Psychiatry}, + Month = {May}, + Pages = {1--8}, + Title = {{{T}he neural substrates of affective face recognition in patients with {H}wa-{B}yung and healthy individuals in {K}orea}}, + Year = {2008}} + +@article{Lee2006a, + Author = {Lee, D.}, + Journal = {Current opinion in neurobiology}, + Number = {2}, + Pages = {191--198}, + Publisher = {Elsevier}, + Title = {{Neural basis of quasi-rational decision making}}, + Volume = {16}, + Year = {2006}} + +@article{Lee2005a, + Author = {Lee, D.}, + Journal = {Nature neuroscience}, + Number = {9}, + Pages = {1129--1130}, + Title = {{Neuroeconomics: making risky choices in the brain}}, + Volume = {8}, + Year = {2005}} + +@article{Lee2003, + Author = {Lee, J. H. and Telang, F. W. and Springer, C. S. and Volkow, N. D.}, + Journal = {Life Sci.}, + Month = {Aug}, + Pages = {1953--1961}, + Title = {{{A}bnormal brain activation to visual stimulation in cocaine abusers}}, + Volume = {73}, + Year = {2003}} + +@article{Lee2005b, + Author = {Lee, J. Y. and Lyoo, I. K. and Kim, S. U. and Jang, H. S. and Lee, D. W. and Jeon, H. J. and Park, S. C. and Cho, M. J.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Feb}, + Pages = {45--51}, + Title = {{{I}ntellect declines in healthy elderly subjects and cerebellum}}, + Volume = {59}, + Year = {2005}} + +@article{Leeinpress, + Author = {Lee, M. D.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {??--??}, + Title = {Three Case Studies in the {B}ayesian Analysis of Cognitive Models}, + Volume = {??}, + Year = {in press}} + +@article{Lee2008b, + Author = {Lee, M. D.}, + Journal = {Psychonomic Bulletin and Review}, + Number = {1}, + Pages = {1}, + Publisher = {PSYCHONOMIC SOCIETY, INC.}, + Title = {{Three case studies in the Bayesian analysis of cognitive models}}, + Volume = {15}, + Year = {2008}} + +@article{Lee2006d, + Author = {Lee, M. D.}, + Journal = {Cognitive Science: A Multidisciplinary Journal}, + Number = {3}, + Pages = {1--26}, + Publisher = {Psychology Press}, + Title = {{A hierarchical Bayesian model of human decision-making on an optimal stopping problem}}, + Volume = {30}, + Year = {2006}} + +@article{Lee2002, + Author = {Lee, M. D.}, + Journal = {Journal of Classification}, + Pages = {69--85}, + Title = {Generating Additive Clustering Models With Limited Stochastic Complexity}, + Volume = {19}, + Year = {2002}} + +@incollection{Leeinpressa, + Address = {Cambridge, MA}, + Author = {Lee, M. D. and Fuss, I. and Navarro, D.}, + Booktitle = {Advances in {N}eural {I}nformation {P}rocessing {S}ystems 19}, + Editor = {B. Sch\"{o}lkopf and J. Platt and T. Hoffman}, + Publisher = {MIT Press}, + Title = {A {B}ayesian Approach to Diffusion Models of Decision--Making and Response Time}, + Year = {in press}} + +@article{Lee2006, + Author = {Lee, M. D. and Pope, K. J.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {193--202}, + Title = {Model Selection for the Rate Problem: {A} Comparison of Significance Testing, {B}ayesian, and {M}inimum {D}escription {L}ength Statistical Inference}, + Volume = {50}, + Year = {2006}} + +@article{Lee2008e, + Author = {Lee, M. D. and Vanpaemel, W.}, + Journal = {Cognitive Science: A Multidisciplinary Journal}, + Number = {8}, + Pages = {1403--1424}, + Publisher = {Psychology Press}, + Title = {{Exemplars, Prototypes, Similarities, and Rules in Category Representation: An Example of Hierarchical Bayesian Analysis}}, + Volume = {32}, + Year = {2008}} + +@article{Lee2005, + Author = {Lee, M. D. and Wagenmakers, E.--J.}, + Journal = {Psychological Review}, + Pages = {662--668}, + Title = {{B}ayesian statistical inference in psychology: Comment on {T}rafimow (2003).}, + Volume = {112}, + Year = {2005}} + +@book{Lee1989, + Address = {New York}, + Author = {Lee, P. M.}, + Publisher = {Oxford University Press}, + Title = {{B}ayesian Statistics: An Introduction}, + Year = {1989}} + +@article{Lee2009, + Author = {Lee, S. H. and Han, D. H. and Oh, S. and Lyoo, I. K. and Lee, Y. S. and Renshaw, P. F. and Lukas, S. E.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jan}, + Pages = {393--397}, + Title = {{{Q}uantitative electroencephalographic (q{E}{E}{G}) correlates of craving during virtual reality therapy in alcohol-dependent patients}}, + Volume = {91}, + Year = {2009}} + +@article{Lee2006c, + Author = {Lee, Y. S. and Han, D. H. and Jeon, C. M. and Lyoo, I. K. and Na, C. and Chae, S. L. and Cho, S. C.}, + Journal = {Neuroreport}, + Month = {May}, + Pages = {743--746}, + Title = {{{S}erum homocysteine, folate level and methylenetetrahydrofolate reductase 677, 1298 gene polymorphism in {K}orean schizophrenic patients}}, + Volume = {17}, + Year = {2006}} + +@article{Lee2006b, + Author = {Lee, Y. S. and Hwang, J. and Kim, S. J. and Sung, Y. H. and Kim, J. and Sim, M. E. and Bae, S. C. and Kim, M. J. and Lyoo, I. K.}, + Journal = {J Psychiatr Res}, + Month = {Sep}, + Pages = {528--534}, + Title = {{{D}ecreased blood flow of temporal regions of the brain in subjects with panic disorder}}, + Volume = {40}, + Year = {2006}} + +@article{Leek2007, + Author = {Leek, A. B. and Hayes, E. T. and Curran, T. P. and Callan, J. J. and Beattie, V. E. and Dodd, V. A. and O'Doherty, J. V.}, + Journal = {Bioresour. Technol.}, + Month = {Dec}, + Pages = {3431--3439}, + Title = {{{T}he influence of manure composition on emissions of odour and ammonia from finishing pigs fed different concentrations of dietary crude protein}}, + Volume = {98}, + Year = {2007}} + +@article{Lejuez2003, + Author = {Lejuez, C. W. and Aklin, W.M. and Jones, H.A. and Richards, J.B. and Strong, D.R. and Kahler, C.W. and Read, J.P.}, + Journal = {Experimental and Clinical Psychopharmacology}, + Number = {1}, + Pages = {26--32}, + Publisher = {Washington, DC: American Psychological Association, c1993-}, + Title = {{The balloon analogue risk task (BART) differentiates smokers and nonsmokers}}, + Volume = {11}, + Year = {2003}} + +@article{Lejuez2002, + Author = {Lejuez, C. W. and Read, JP and Kahler, CW and Richards, JB and Ramsey, SE and Stuart, GL and Strong, DR and Brown, RA}, + Journal = {Journal of experimental psychology. Applied}, + Number = {2}, + Pages = {75}, + Title = {{Evaluation of a behavioral measure of risk taking: the Balloon Analogue Risk Task (BART).}}, + Volume = {8}, + Year = {2002}} + +@article{Leland2006, + Author = {Leland, D. S. and Arce, E. and Feinstein, J. S. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Nov}, + Pages = {725--731}, + Title = {{{Y}oung adult stimulant users' increased striatal activation during uncertainty is related to impulsivity}}, + Volume = {33}, + Year = {2006}} + +@article{Leland2008, + Author = {Leland, D. S. and Arce, E. and Miller, D. A. and Paulus, M. P.}, + Journal = {Biol. Psychiatry}, + Month = {Jan}, + Pages = {184--190}, + Title = {{{A}nterior cingulate cortex and benefit of predictive cueing on response inhibition in stimulant dependent individuals}}, + Volume = {63}, + Year = {2008}} + +@article{Leland2005, + Author = {Leland, D. S. and Paulus, M. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Apr}, + Pages = {83--90}, + Title = {{{I}ncreased risk-taking decision-making but not altered response to punishment in stimulant-using young adults}}, + Volume = {78}, + Year = {2005}} + +@article{Lengyel2005, + Author = {Lengyel, M. and Kwag, J. and Paulsen, O. and Dayan, P.}, + Journal = {Nat. Neurosci.}, + Month = {Dec}, + Pages = {1677--1683}, + Title = {{{M}atching storage and recall: hippocampal spike timing-dependent plasticity and phase response curves}}, + Volume = {8}, + Year = {2005}} + +@article{Leon2003a, + Author = {Leon, M. I. and Shadlen, M. N.}, + Journal = {Neuron}, + Pages = {317--327}, + Title = {{{R}epresentation of time by neurons in the posterior parietal cortex of the macaque}}, + Volume = {38}, + Year = {2003}} + +@article{Leon1999, + Author = {Leon, M. I. and Shadlen, M. N.}, + Journal = {Neuron}, + Pages = {415--425}, + Title = {{{E}ffect of expected reward magnitude on the response of neurons in the dorsolateral prefrontal cortex of the macaque}}, + Volume = {24}, + Year = {1999}} + +@article{Leon1998a, + Author = {Leon, M. I. and Shadlen, M. N.}, + Journal = {Neuron}, + Pages = {669--672}, + Title = {{{E}xploring the neurophysiology of decisions}}, + Volume = {21}, + Year = {1998}} + +@article{Leshner2003, + Author = {Leshner, A.I.}, + Journal = {Focus}, + Number = {2}, + Pages = {190--193}, + Publisher = {Am Psychiatric Assoc}, + Title = {{Addiction is a brain disease, and it matters}}, + Volume = {1}, + Year = {2003}} + +@article{Leshner1997, + Author = {Leshner, AI}, + Journal = {Archives of General Psychiatry}, + Number = {8}, + Pages = {691--694}, + Publisher = {Am Med Assoc}, + Title = {{Drug abuse and addiction treatment research. The next generation}}, + Volume = {54}, + Year = {1997}} + +@article{Letchworth1997, + Author = {Letchworth, S. R. and Daunais, J. B. and Hedgecock, A. A. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {Mar}, + Pages = {214--222}, + Title = {{{E}ffects of chronic cocaine administration on dopamine transporter m{R}{N}{A} and protein in the rat}}, + Volume = {750}, + Year = {1997}} + +@article{Letchworth2001, + Author = {Letchworth, S. R. and Nader, M. A. and Smith, H. R. and Friedman, D. P. and Porrino, L. J.}, + Journal = {J. Neurosci.}, + Pages = {2799--2807}, + Title = {{{P}rogression of changes in dopamine transporter binding site density as a result of cocaine self-administration in rhesus monkeys}}, + Volume = {21}, + Year = {2001}} + +@article{Letchworth2001a, + Author = {Letchworth, S. R. and Nader, M. A. and Smith, H. R. and Friedman, D. P. and Porrino, L. J.}, + Journal = {J. Neurosci.}, + Month = {Apr}, + Pages = {2799--2807}, + Title = {{{P}rogression of changes in dopamine transporter binding site density as a result of cocaine self-administration in rhesus monkeys}}, + Volume = {21}, + Year = {2001}} + +@article{Letchworth1999, + Author = {Letchworth, S. R. and Sexton, T. and Childers, S. R. and Vrana, K. E. and Vaughan, R. A. and Davies, H. M. and Porrino, L. J.}, + Journal = {J. Neurochem.}, + Month = {Nov}, + Pages = {1982--1989}, + Title = {{{R}egulation of rat dopamine transporter m{R}{N}{A} and protein by chronic cocaine administration}}, + Volume = {73}, + Year = {1999}} + +@article{Letchworth2000, + Author = {Letchworth, S. R. and Smith, H. R. and Porrino, L. J. and Bennett, B. A. and Davies, H. M. and Sexton, T. and Childers, S. R.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {May}, + Pages = {686--696}, + Title = {{{C}haracterization of a tropane radioligand, [(3){H}]2beta-propanoyl-3beta-(4-tolyl) tropane ([(3){H}]{P}{T}{T}), for dopamine transport sites in rat brain}}, + Volume = {293}, + Year = {2000}} + +@article{Leung1990, + Author = {Leung, H. K.}, + Journal = {Physical Review A}, + Pages = {1862--1866}, + Title = {Stochastic Aspects of Nonequilibrium Phase Transitions}, + Volume = {41}, + Year = {1990}} + +@article{Levine2004, + Author = {Levine, D. A. and Platt, S. L. and Dayan, P. S. and Macias, C. G. and Zorc, J. J. and Krief, W. and Schor, J. and Bank, D. and Fefferman, N. and Shaw, K. N. and Kuppermann, N.}, + Journal = {Pediatrics}, + Month = {Jun}, + Pages = {1728--1734}, + Title = {{{R}isk of serious bacterial infection in young febrile infants with respiratory syncytial virus infections}}, + Volume = {113}, + Year = {2004}} + +@article{Lewis2002, + Author = {Lewis, A. J. and Simon, E. M. and Barkovich, A. J. and Clegg, N. J. and Delgado, M. R. and Levey, E. and Hahn, J. S.}, + Journal = {Neurology}, + Month = {Dec}, + Pages = {1860--1865}, + Title = {{{M}iddle interhemispheric variant of holoprosencephaly: a distinct cliniconeuroradiologic subtype}}, + Volume = {59}, + Year = {2002}} + +@article{Li2006d, + Author = {Li, C. S. and Huang, C. and Constable, R. T. and Sinha, R.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {186--192}, + Title = {{{I}maging response inhibition in a stop-signal task: neural correlates independent of signal monitoring and post-response processing}}, + Volume = {26}, + Year = {2006}} + +@article{Li2006e, + Author = {Li, C. S. and Huang, C. and Constable, R. T. and Sinha, R.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {1918--1929}, + Title = {{{G}ender differences in the neural correlates of response inhibition during a stop signal task}}, + Volume = {32}, + Year = {2006}} + +@article{Li2006i, + Author = {Li, C. S. and Huang, C. and Constable, R. T. and Sinha, R.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {186--192}, + Title = {{{I}maging response inhibition in a stop-signal task: neural correlates independent of signal monitoring and post-response processing}}, + Volume = {26}, + Year = {2006}} + +@article{Li2006j, + Author = {Li, C. S. and Huang, C. and Constable, R. T. and Sinha, R.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {1918--1929}, + Title = {{{G}ender differences in the neural correlates of response inhibition during a stop signal task}}, + Volume = {32}, + Year = {2006}} + +@article{Li2006n, + Author = {Li, C. S. and Huang, C. and Constable, R. T. and Sinha, R.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {186--192}, + Title = {{{I}maging response inhibition in a stop-signal task: neural correlates independent of signal monitoring and post-response processing}}, + Volume = {26}, + Year = {2006}} + +@article{Li2006o, + Author = {Li, C. S. and Huang, C. and Constable, R. T. and Sinha, R.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {1918--1929}, + Title = {{{G}ender differences in the neural correlates of response inhibition during a stop signal task}}, + Volume = {32}, + Year = {2006}} + +@article{Li2008c, + Author = {Li, C. S. and Huang, C. and Yan, P. and Bhagwagar, Z. and Milivojevic, V. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {1798--1806}, + Title = {{{N}eural correlates of impulse control during stop signal inhibition in cocaine-dependent men}}, + Volume = {33}, + Year = {2008}} + +@article{Li2008g, + Author = {Li, C. S. and Huang, C. and Yan, P. and Bhagwagar, Z. and Milivojevic, V. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {1798--1806}, + Title = {{{N}eural correlates of impulse control during stop signal inhibition in cocaine-dependent men}}, + Volume = {33}, + Year = {2008}} + +@article{Li2008k, + Author = {Li, C. S. and Huang, C. and Yan, P. and Bhagwagar, Z. and Milivojevic, V. and Sinha, R.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {1798--1806}, + Title = {{{N}eural correlates of impulse control during stop signal inhibition in cocaine-dependent men}}, + Volume = {33}, + Year = {2008}} + +@article{Li2008b, + Author = {Li, C. S. and Huang, C. and Yan, P. and Paliwal, P. and Constable, R. T. and Sinha, R.}, + Journal = {J Cogn Neurosci}, + Month = {Jun}, + Pages = {1021--1029}, + Title = {{{N}eural correlates of post-error slowing during a stop signal task: a functional magnetic resonance imaging study}}, + Volume = {20}, + Year = {2008}} + +@article{Li2008f, + Author = {Li, C. S. and Huang, C. and Yan, P. and Paliwal, P. and Constable, R. T. and Sinha, R.}, + Journal = {J Cogn Neurosci}, + Month = {Jun}, + Pages = {1021--1029}, + Title = {{{N}eural correlates of post-error slowing during a stop signal task: a functional magnetic resonance imaging study}}, + Volume = {20}, + Year = {2008}} + +@article{Li2008j, + Author = {Li, C. S. and Huang, C. and Yan, P. and Paliwal, P. and Constable, R. T. and Sinha, R.}, + Journal = {J Cogn Neurosci}, + Month = {Jun}, + Pages = {1021--1029}, + Title = {{{N}eural correlates of post-error slowing during a stop signal task: a functional magnetic resonance imaging study}}, + Volume = {20}, + Year = {2008}} + +@article{Li2005b, + Author = {Li, C. S. and Kemp, K. and Milivojevic, V. and Sinha, R.}, + Journal = {Gend Med}, + Month = {Sep}, + Pages = {174--182}, + Title = {{{N}euroimaging study of sex differences in the neuropathology of cocaine abuse}}, + Volume = {2}, + Year = {2005}} + +@article{Li2005e, + Author = {Li, C. S. and Kemp, K. and Milivojevic, V. and Sinha, R.}, + Journal = {Gend Med}, + Month = {Sep}, + Pages = {174--182}, + Title = {{{N}euroimaging study of sex differences in the neuropathology of cocaine abuse}}, + Volume = {2}, + Year = {2005}} + +@article{Li2005h, + Author = {Li, C. S. and Kemp, K. and Milivojevic, V. and Sinha, R.}, + Journal = {Gend Med}, + Month = {Sep}, + Pages = {174--182}, + Title = {{{N}euroimaging study of sex differences in the neuropathology of cocaine abuse}}, + Volume = {2}, + Year = {2005}} + +@article{Li2006c, + Author = {Li, C. S. and Kosten, T. R. and Sinha, R.}, + Journal = {Neuroreport}, + Month = {Feb}, + Pages = {243--247}, + Title = {{{A}ntisocial personality and stress-induced brain activation in cocaine-dependent patients}}, + Volume = {17}, + Year = {2006}} + +@article{Li2006h, + Author = {Li, C. S. and Kosten, T. R. and Sinha, R.}, + Journal = {Neuroreport}, + Month = {Feb}, + Pages = {243--247}, + Title = {{{A}ntisocial personality and stress-induced brain activation in cocaine-dependent patients}}, + Volume = {17}, + Year = {2006}} + +@article{Li2006m, + Author = {Li, C. S. and Kosten, T. R. and Sinha, R.}, + Journal = {Neuroreport}, + Month = {Feb}, + Pages = {243--247}, + Title = {{{A}ntisocial personality and stress-induced brain activation in cocaine-dependent patients}}, + Volume = {17}, + Year = {2006}} + +@article{Li2005a, + Author = {Li, C. S. and Kosten, T. R. and Sinha, R.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {487--494}, + Title = {{{S}ex differences in brain activation during stress imagery in abstinent cocaine users: a functional magnetic resonance imaging study}}, + Volume = {57}, + Year = {2005}} + +@article{Li2005d, + Author = {Li, C. S. and Kosten, T. R. and Sinha, R.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {487--494}, + Title = {{{S}ex differences in brain activation during stress imagery in abstinent cocaine users: a functional magnetic resonance imaging study}}, + Volume = {57}, + Year = {2005}} + +@article{Li2005g, + Author = {Li, C. S. and Kosten, T. R. and Sinha, R.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {487--494}, + Title = {{{S}ex differences in brain activation during stress imagery in abstinent cocaine users: a functional magnetic resonance imaging study}}, + Volume = {57}, + Year = {2005}} + +@article{Li2009, + Author = {Li, C. S. and Luo, X. and Yan, P. and Bergquist, K. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Apr}, + Pages = {740--750}, + Title = {{{A}ltered impulse control in alcohol dependence: neural measures of stop signal performance}}, + Volume = {33}, + Year = {2009}} + +@article{Li2009a, + Author = {Li, C. S. and Luo, X. and Yan, P. and Bergquist, K. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Apr}, + Pages = {740--750}, + Title = {{{A}ltered impulse control in alcohol dependence: neural measures of stop signal performance}}, + Volume = {33}, + Year = {2009}} + +@article{Li2009b, + Author = {Li, C. S. and Luo, X. and Yan, P. and Bergquist, K. and Sinha, R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Apr}, + Pages = {740--750}, + Title = {{{A}ltered impulse control in alcohol dependence: neural measures of stop signal performance}}, + Volume = {33}, + Year = {2009}} + +@article{Li2005, + Author = {Li, C. S. and Milivojevic, V. and Constable, R. T. and Sinha, R.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {271--280}, + Title = {{{R}ecent cannabis abuse decreased stress-induced {B}{O}{L}{D} signals in the frontal and cingulate cortices of cocaine dependent individuals}}, + Volume = {140}, + Year = {2005}} + +@article{Li2005c, + Author = {Li, C. S. and Milivojevic, V. and Constable, R. T. and Sinha, R.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {271--280}, + Title = {{{R}ecent cannabis abuse decreased stress-induced {B}{O}{L}{D} signals in the frontal and cingulate cortices of cocaine dependent individuals}}, + Volume = {140}, + Year = {2005}} + +@article{Li2005f, + Author = {Li, C. S. and Milivojevic, V. and Constable, R. T. and Sinha, R.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {271--280}, + Title = {{{R}ecent cannabis abuse decreased stress-induced {B}{O}{L}{D} signals in the frontal and cingulate cortices of cocaine dependent individuals}}, + Volume = {140}, + Year = {2005}} + +@article{Li2006b, + Author = {Li, C. S. and Milivojevic, V. and Kemp, K. and Hong, K. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {205--212}, + Title = {{{P}erformance monitoring and stop signal inhibition in abstinent patients with cocaine dependence}}, + Volume = {85}, + Year = {2006}} + +@article{Li2006g, + Author = {Li, C. S. and Milivojevic, V. and Kemp, K. and Hong, K. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {205--212}, + Title = {{{P}erformance monitoring and stop signal inhibition in abstinent patients with cocaine dependence}}, + Volume = {85}, + Year = {2006}} + +@article{Li2006l, + Author = {Li, C. S. and Milivojevic, V. and Kemp, K. and Hong, K. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {205--212}, + Title = {{{P}erformance monitoring and stop signal inhibition in abstinent patients with cocaine dependence}}, + Volume = {85}, + Year = {2006}} + +@article{Li2008a, + Author = {Li, C. S. and Sinha, R.}, + Journal = {Neurosci Biobehav Rev}, + Pages = {581--597}, + Title = {{{I}nhibitory control and emotional stress regulation: neuroimaging evidence for frontal-limbic dysfunction in psycho-stimulant addiction}}, + Volume = {32}, + Year = {2008}} + +@article{Li2008e, + Author = {Li, C. S. and Sinha, R.}, + Journal = {Neurosci Biobehav Rev}, + Pages = {581--597}, + Title = {{{I}nhibitory control and emotional stress regulation: neuroimaging evidence for frontal-limbic dysfunction in psycho-stimulant addiction}}, + Volume = {32}, + Year = {2008}} + +@article{Li2008i, + Author = {Li, C. S. and Sinha, R.}, + Journal = {Neurosci Biobehav Rev}, + Pages = {581--597}, + Title = {{{I}nhibitory control and emotional stress regulation: neuroimaging evidence for frontal-limbic dysfunction in psycho-stimulant addiction}}, + Volume = {32}, + Year = {2008}} + +@article{Li2006a, + Author = {Li, C. S. and Sinha, R.}, + Journal = {J Psychiatry Neurosci}, + Month = {Mar}, + Pages = {115--121}, + Title = {{{A}lexithymia and stress-induced brain activation in cocaine-dependent men and women}}, + Volume = {31}, + Year = {2006}} + +@article{Li2006f, + Author = {Li, C. S. and Sinha, R.}, + Journal = {J Psychiatry Neurosci}, + Month = {Mar}, + Pages = {115--121}, + Title = {{{A}lexithymia and stress-induced brain activation in cocaine-dependent men and women}}, + Volume = {31}, + Year = {2006}} + +@article{Li2006k, + Author = {Li, C. S. and Sinha, R.}, + Journal = {J Psychiatry Neurosci}, + Month = {Mar}, + Pages = {115--121}, + Title = {{{A}lexithymia and stress-induced brain activation in cocaine-dependent men and women}}, + Volume = {31}, + Year = {2006}} + +@article{Li2007a, + Author = {Li, C. S. and Yan, P. and Bergquist, K. L. and Sinha, R.}, + Journal = {Neuroimage}, + Month = {Nov}, + Pages = {640--648}, + Title = {{{G}reater activation of the "default" brain regions predicts stop signal errors}}, + Volume = {38}, + Year = {2007}} + +@article{Li2007b, + Author = {Li, C. S. and Yan, P. and Bergquist, K. L. and Sinha, R.}, + Journal = {Neuroimage}, + Month = {Nov}, + Pages = {640--648}, + Title = {{{G}reater activation of the "default" brain regions predicts stop signal errors}}, + Volume = {38}, + Year = {2007}} + +@article{Li2007c, + Author = {Li, C. S. and Yan, P. and Bergquist, K. L. and Sinha, R.}, + Journal = {Neuroimage}, + Month = {Nov}, + Pages = {640--648}, + Title = {{{G}reater activation of the "default" brain regions predicts stop signal errors}}, + Volume = {38}, + Year = {2007}} + +@article{Li2008, + Author = {Li, C. S. and Yan, P. and Sinha, R. and Lee, T. W.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1352--1363}, + Title = {{{S}ubcortical processes of motor response inhibition during a stop signal task}}, + Volume = {41}, + Year = {2008}} + +@article{Li2008d, + Author = {Li, C. S. and Yan, P. and Sinha, R. and Lee, T. W.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1352--1363}, + Title = {{{S}ubcortical processes of motor response inhibition during a stop signal task}}, + Volume = {41}, + Year = {2008}} + +@article{Li2008h, + Author = {Li, C. S. and Yan, P. and Sinha, R. and Lee, T. W.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1352--1363}, + Title = {{{S}ubcortical processes of motor response inhibition during a stop signal task}}, + Volume = {41}, + Year = {2008}} + +@article{Li2006, + Author = {Li, J. and McClure, S. M. and King-Casas, B. and Montague, P. R.}, + Journal = {PLoS ONE}, + Pages = {e103}, + Title = {{{P}olicy adjustment in a dynamic economic game}}, + Volume = {1}, + Year = {2006}} + +@book{Li1997, + Address = {New York}, + Author = {Li, M. and Vit\'{a}nyi, P.}, + Publisher = {Springer Verlag}, + Title = {An Introduction to {K}olmogorov Complexity and its Applications (2nd ed.)}, + Year = {1997}} + +@article{Li2002, + Author = {Li, S.--C.}, + Journal = {Current Directions in Psychological Science}, + Pages = {38--43}, + Title = {Connecting the Many Levels and Facets of Cognitive Aging}, + Volume = {11}, + Year = {2002}} + +@article{Li2001, + Author = {Li, T. K. and Spanagel, R. and Colombo, G. and McBride, W. J. and Porrino, L. J. and Suzuki, T. and Rodd-Henricks, Z. A.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {May}, + Pages = {117S-126S}, + Title = {{{A}lcohol reinforcement and voluntary ethanol consumption}}, + Volume = {25}, + Year = {2001}} + +@article{Li2007, + Author = {Li, T. K. and Volkow, N. D. and Baler, R. D. and Egli, M.}, + Journal = {Biol. Psychiatry}, + Month = {Jan}, + Pages = {1--3}, + Title = {{{T}he biological bases of nicotine and alcohol co-addiction}}, + Volume = {61}, + Year = {2007}} + +@article{Lim2002, + Author = {Lim, K.O. and Choi, S.J. and Pomara, N. and Wolkin, A. and Rotrosen, J.P.}, + Journal = {Biological Psychiatry}, + Number = {11}, + Pages = {890--895}, + Publisher = {Elsevier}, + Title = {{Reduced frontal white matter integrity in cocaine dependence: a controlled diffusion tensor imaging study}}, + Volume = {51}, + Year = {2002}} + +@article{Linden2006, + Author = {van der Linden, W. J.}, + Journal = {Journal of Educational and Behavioral Statistics}, + Pages = {181--204}, + Title = {A Lognormal Model for Response Times on Test Items}, + Volume = {31}, + Year = {2006}} + +@article{Lindley2004, + Author = {Lindley, D. V.}, + Journal = {Significance}, + Pages = {85--87}, + Title = {That Wretched Prior}, + Volume = {1}, + Year = {2004}} + +@article{Lindley1996, + Author = {Lindley, D. V.}, + Journal = {The American Statistician}, + Pages = {386}, + Title = {Comment on ``Simple Counterexamples Against the Conditionality Principle" by {I}nge {S}. {H}elland}, + Volume = {50}, + Year = {1996}} + +@article{Lindley1993, + Author = {Lindley, D. V.}, + Journal = {Teaching Statistics}, + Pages = {22--25}, + Title = {The Analysis of Experimental Data: The Appreciation of Tea and Wine}, + Volume = {15}, + Year = {1993}} + +@article{Lindley1986, + Author = {Lindley, D. V.}, + Journal = {The American Statistician}, + Pages = {6--7}, + Title = {Comment on ``Why Isn't Everyone a {B}ayesian?" by Bradley Efron}, + Volume = {40}, + Year = {1986}} + +@article{Lindley1982, + Author = {Lindley, D. V.}, + Journal = {International Statistical Review}, + Pages = {1--26}, + Title = {Scoring Rules and the Inevitability of Probability}, + Volume = {50}, + Year = {1982}} + +@article{Lindley1977, + Author = {Lindley, D. V.}, + Journal = {Synthese}, + Pages = {51--58}, + Title = {The Distinction Between Inference and Decision}, + Volume = {36}, + Year = {1977}} + +@book{Lindley1972, + Address = {Philadelphia (PA)}, + Author = {Lindley, D. V.}, + Publisher = {SIAM}, + Title = {{B}ayesian Statistics, a Review}, + Year = {1972}} + +@article{Lindley1964, + Author = {Lindley, D. V.}, + Journal = {The Annals of Mathematical Statistics}, + Pages = {1622--1643}, + Title = {The {B}ayesian Analysis of Contingency Tables}, + Volume = {35}, + Year = {1964}} + +@article{Lindley1957, + Author = {Lindley, D. V.}, + Journal = {Biometrika}, + Pages = {187--192}, + Title = {A Statistical Paradox}, + Volume = {44}, + Year = {1957}} + +@article{Lindley1976, + Author = {Lindley, D. V. and Phillips, L. D.}, + Journal = {The American Statistician}, + Pages = {112--119}, + Title = {Inference for a {B}ernoulli Process (A {B}ayesian View)}, + Volume = {30}, + Year = {1976}} + +@book{Lindley1984, + Address = {London}, + Author = {Lindley, D. V. and Scott, W. F.}, + Publisher = {Cambridge University Press}, + Title = {New {C}ambridge Elementary Statistical Tables}, + Year = {1984}} + +@article{Lindsey2003, + Author = {Lindsey, K. P. and Gatley, S. J. and Volkow, N. D.}, + Journal = {Curr Psychiatry Rep}, + Month = {Oct}, + Pages = {355--361}, + Title = {{{N}euroimaging in drug abuse}}, + Volume = {5}, + Year = {2003}} + +@article{Lindstrom2009, + Author = {Lindstrom, K. and Guyer, A. E. and Mogg, K. and Bradley, B. P. and Fox, N. A. and Ernst, M. and Nelson, E. E. and Leibenluft, E. and Britton, J. C. and Monk, C. S. and Pine, D. S. and Bar-Haim, Y.}, + Journal = {Brain Res.}, + Month = {Jul}, + Title = {{{N}ormative data on development of neural and behavioral mechanisms underlying attention orienting toward social-emotional stimuli: {A}n exploratory study}}, + Year = {2009}} + +@book{Link1992, + Address = {Hillsdale (NJ)}, + Author = {Link, S. W.}, + Publisher = {Lawrence Erlbaum Associates}, + Title = {The Wave Theory of Difference and Similarity}, + Year = {1992}} + +@article{Link1975, + Author = {Link, S. W.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {114--135}, + Title = {The Relative Judgement Theory of Two Choice Response Time}, + Volume = {12}, + Year = {1975}} + +@article{Link1975a, + Author = {Link, S. W. and Heath, R. A.}, + Journal = {Psychometrika}, + Pages = {77--105}, + Title = {A Sequential Theory of Psychological Discrimination}, + Volume = {40}, + Year = {1975}} + +@article{Linkenkaer-Hansen2001, + Author = {Linkenkaer--Hansen, K. and Nikouline, V. V. and Palva, J. M. and Ilmoniemi, R. J.}, + Journal = {The Journal of Neuroscience}, + Pages = {1370--1377}, + Title = {Long--range Temporal Correlations and Scaling Behavior in Human Brain Oscillations}, + Volume = {21}, + Year = {2001}} + +@article{Little2006, + Author = {Little, R. J.}, + Journal = {The American Statistician}, + Pages = {213--223}, + Title = {Calibrated {B}ayes: {A} {B}ayes/Frequentist Roadmap}, + Volume = {60}, + Year = {2006}} + +@article{Littman2009, + Author = {Littman, M.L.}, + Journal = {Journal of Mathematical Psychology}, + Number = {3}, + Pages = {119--125}, + Publisher = {Elsevier}, + Title = {{A tutorial on partially observable Markov decision processes}}, + Volume = {53}, + Year = {2009}} + +@article{Littman1995, + Author = {Littman, M.L. and Dean, T.L. and Kaelbling, L.P.}, + Booktitle = {Proceedings of the Eleventh Conference on Uncertainty in Artificial Intelligence}, + Pages = {394--402}, + Title = {{On the complexity of solving Markov decision problems}}, + Year = {1995}} + +@article{Liu1999, + Author = {Liu, H. and Delgado, M. R.}, + Journal = {Clin Pharmacokinet}, + Month = {Jun}, + Pages = {453--470}, + Title = {{{T}herapeutic drug concentration monitoring using saliva samples. {F}ocus on anticonvulsants}}, + Volume = {36}, + Year = {1999}} + +@article{Liu1995a, + Author = {Liu, H. and Delgado, M. R.}, + Journal = {Epilepsia}, + Month = {Mar}, + Pages = {249--254}, + Title = {{{I}nteractions of phenobarbital and phenytoin with carbamazepine and its metabolites' concentrations, concentration ratios, and level/dose ratios in epileptic children}}, + Volume = {36}, + Year = {1995}} + +@article{Liu1994, + Author = {Liu, H. and Delgado, M. R.}, + Journal = {Epilepsy Res.}, + Month = {Mar}, + Pages = {257--269}, + Title = {{{T}he influence of polytherapy on the relationships between serum carbamazepine and its metabolites in epileptic children}}, + Volume = {17}, + Year = {1994}} + +@article{Liu1994a, + Author = {Liu, H. and Delgado, M. R.}, + Journal = {Ther Drug Monit}, + Month = {Oct}, + Pages = {469--476}, + Title = {{{I}nfluence of sex, age, weight, and carbamazepine dose on serum concentrations, concentration ratios, and level/dose ratios of carbamazepine and its metabolites}}, + Volume = {16}, + Year = {1994}} + +@article{Liu1994b, + Author = {Liu, H. and Delgado, M. R.}, + Journal = {Ther Drug Monit}, + Month = {Apr}, + Pages = {132--138}, + Title = {{{I}mproved therapeutic monitoring of drug interactions in epileptic children using carbamazepine polytherapy}}, + Volume = {16}, + Year = {1994}} + +@article{Liu1994c, + Author = {Liu, H. and Delgado, M. R.}, + Journal = {Epilepsia}, + Pages = {1221--1229}, + Title = {{{A} comprehensive study of the relation between serum concentrations, concentration ratios, and level/dose ratios of carbamazepine and its metabolites with age, weight, dose, and clearances in epileptic children}}, + Volume = {35}, + Year = {1994}} + +@article{Liu1995, + Author = {Liu, H. and Delgado, M. R. and Browne, R. H.}, + Journal = {Clin Neuropharmacol}, + Month = {Feb}, + Pages = {1--12}, + Title = {{{I}nteractions of valproic acid with carbamazepine and its metabolites' concentrations, concentrations ratios, and level/dose ratios in epileptic children}}, + Volume = {18}, + Year = {1995}} + +@article{Liu2009, + Author = {Liu, J. and Liang, J. and Qin, W. and Tian, J. and Yuan, K. and Bai, L. and Zhang, Y. and Wang, W. and Wang, Y. and Li, Q. and others}, + Journal = {Neuroscience Letters}, + Publisher = {Elsevier}, + Title = {{Dysfunctional connectivity patterns in chronic heroin users: An fMRI study}}, + Year = {2009}} + +@article{Liu2003b, + Abstract = {OBJECTIVE: A recent study identified a new class of compounds designated + as the sterol-regulatory element binding protein (SREBP) cleavage-activating + protein (SCAP) ligands that putatively bind to SCAP, leading to increased + LDL receptor (LDLR) expression. In this study, we examined the effects + of SCAP ligand GW707 in comparison with lovastatin and cytokine oncostatin + M (OM) on the regulation of LDLR expression in cultured HepG2 cells. + METHODS AND RESULTS: Our studies uncovered several new features that + distinguish SCAP ligand from lovastatin, a classic 3-hydroxy-3-methylglutaryl + coenzyme A (HMG-CoA) reductase inhibitor, and from OM, which utilize + an SREBP-independent regulatory pathway. We show that the induction + of LDLR mRNA expression by GW707 is not affected by intracellular + cholesterol but is completely abolished by blocking de novo protein + synthesis. Moreover, the effects of GW707 but not lovastatin on LDLR + promoter activity, mRNA expression, and uptake of 1,1'-dioctadecyl-3,3,3',3'-tetramethyl-indocarbocyanin + perchlorate-LDL are markedly enhanced by OM. We further demonstrate + that the amounts of the mature form of SREBP-2 translocated to the + nucleus under GW707 treatment are increased by costimulating cells + with OM. CONCLUSIONS: Our studies provide the first evidence that + higher levels of LDLR expression and function can be achieved through + simultaneous stimulation of the SREBP-dependent and SREBP-independent + pathways, suggesting a strategy to develop an adjunct therapeutic + intervention utilizing both pathways.}, + Author = {Jingwen Liu and Fang Zhang and Cong Li and Meihong Lin and Michael R Briggs}, + Institution = {VA Palo Alto Health Care System, Palo Alto, Calif 94304, USA. Jingwen.Liu@med.va.gov}, + Journal = {Arterioscler Thromb Vasc Biol}, + Keywords = {Active Transport, Cell Nucleus, drug effects; CCAAT-Enhancer-Binding Proteins, physiology; Cell Nucleus, drug effects/metabolism; Cytokines, metabolism/pharmacology; DNA-Binding Proteins, physiology; Humans; Intracellular Signaling Peptides and Proteins; Kinetics; Ligands; Lovastatin, metabolism/pharmacology; Membrane Proteins, metabolism/pharmacology; Oncostatin M; Peptide Biosynthesis, drug effects; Peptides, metabolism/pharmacology; Promoter Regions, Genetic, drug effects/genetics; RNA, Messenger, biosynthesis/genetics/metabolism; Receptors, LDL, biosynthesis/genetics; Steroids, metabolism/pharmacology; Sterol Regulatory Element Binding Protein 1; Sterol Regulatory Element Binding Protein 2; Transcription Factors, physiology; Transcription, Genetic, drug effects/physiology; Tumor Cells, Cultured}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Young}, + Pages = {90--96}, + Pmid = {12524230}, + Timestamp = {2009.12.10}, + Title = {Synergistic activation of human LDL receptor expression by SCAP ligand and cytokine oncostatin M.}, + Volume = {23}, + Year = {2003}} + +@article{Liu1998, + Abstract = {The present study was conducted to test the hypothesis that individuals + with substance abuse disorder exhibit structural deficits in the + prefrontal cortex. Volumes of the prefrontal lobe in subjects with + histories of polysubstance abuse (n = 25) were measured and compared + with those in normal volunteers (n = 14), using high-resolution volumetric + magnetic resonance imaging (MRI). The research participants were + men, 22 to 41 years of age. Polysubstance abusers were abstinent + from drugs of abuse (except nicotine) for at least 15 days before + MRI scanning. The total volumes of the prefrontal lobe (left and + right hemispheres) were significantly smaller in the substance abuse + group than in the control group. When the prefrontal lobe was segmented + for gray and white matter, the deficit in the substance abusers was + seen as significantly smaller volumes of gray but not of white matter. + These results indicate that hypoplasia and/or atrophy in the prefrontal + cortex accompany substance abuse and suggest that structural deficits + in the prefrontal cortex may play an essential role in the neuropathological + basis of functional impairments in substance abuse disorder, as demonstrated + by functional brain imaging and cognitive studies.}, + Author = {X. Liu and J. A. Matochik and J. L. Cadet and E. D. London}, + Doi = {10.1016/S0893-133X(97)00143-7}, + Institution = {Neuroscience Branch, National Institute on Drug Abuse, Baltimore, Maryland 21224, USA.}, + Journal = {Neuropsychopharmacology}, + Keywords = {Adult; Humans; Image Interpretation, Computer-Assisted; Magnetic Resonance Imaging; Male; Observer Variation; Prefrontal Cortex, pathology; Substance-Related Disorders, pathology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {243--252}, + Pii = {S0893133X97001437}, + Pmid = {9509492}, + Timestamp = {2009.08.04}, + Title = {Smaller volume of prefrontal lobe in polysubstance abusers: a magnetic resonance imaging study.}, + Url = {http://dx.doi.org/10.1016/S0893-133X(97)00143-7}, + Volume = {18}, + Year = {1998}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/S0893-133X(97)00143-7}} + +@article{Ljungberg1992, + Author = {Ljungberg, T. and Apicella, P. and Schultz, W.}, + Journal = {Journal of neurophysiology}, + Number = {1}, + Pages = {145--163}, + Publisher = {Am Physiological Soc}, + Title = {{Responses of monkey dopamine neurons during learning of behavioral reactions}}, + Volume = {67}, + Year = {1992}} + +@article{Lo1991, + Author = {Lo, A. W.}, + Journal = {Econometrica}, + Pages = {1279--1313}, + Title = {Long--term Memory in Stock Market Prices}, + Volume = {59}, + Year = {1991}} + +@article{Lodewyckx2010, + Author = {Lodewyckx, T. Lee, M. D. Wagenmakers, E.-J.}, + Journal = {Manuscript submitted for publication}, + Owner = {Young}, + Timestamp = {2010.05.01}, + Title = {A General Computational Method for Estimating Bayes Factors}, + Year = {2010}} + +@incollection{Loftus2002, + Address = {New York}, + Author = {Loftus, G. R.}, + Booktitle = {{S}tevens' Handbook of Experimental Psychology: Vol. 4. Methodology in Experimental Psychology (3rd ed.)}, + Editor = {Pashler, H. and Wixted, J.}, + Pages = {339--390}, + Publisher = {Wiley}, + Title = {Analysis, Interpretation, and Visual Presentation of Experimental Data}, + Year = {2002}} + +@article{Loftus1996, + Author = {Loftus, G. R.}, + Journal = {Current Directions in Psychological Science}, + Pages = {161--171}, + Title = {Psychology Will be a Much Better Science When we Change the Way we Analyze Data}, + Volume = {5}, + Year = {1996}} + +@article{Loftus1978, + Author = {Loftus, G. R.}, + Journal = {Memory \& Cognition}, + Pages = {312--319}, + Title = {On Interpretation of Interactions}, + Volume = {6}, + Year = {1978}} + +@article{Loftus2004, + Author = {Loftus, G. R. and Oberg, M. A. and Dillon, A. M.}, + Journal = {Psychological Review}, + Pages = {835--863}, + Title = {Linear Theory, Dimensional Theory, and the Face--Inversion Effect}, + Volume = {111}, + Year = {2004}} + +@article{Logan2002, + Author = {Logan, G. D.}, + Journal = {Psychological Review}, + Pages = {376--400}, + Title = {An Instance Theory of Attention and Memory}, + Volume = {109}, + Year = {2002}} + +@article{Logan1992, + Author = {Logan, G. D.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {883--914}, + Title = {Shapes of Reaction--Time Distributions and Shapes of Learning Curves: {A} Test of the Instance Theory of Automaticity}, + Volume = {18}, + Year = {1992}} + +@article{Logan1988, + Author = {Logan, G. D.}, + Journal = {Psychological Review}, + Pages = {492--527}, + Title = {Toward An Instance Theory of Automatization}, + Volume = {95}, + Year = {1988}} + +@article{Logan2001, + Author = {Logan, J. and Fowler, J. S. and Dewey, S. L. and Volkow, N. D. and Gatley, S. J.}, + Journal = {J Neural Transm}, + Pages = {279--286}, + Title = {{{A} consideration of the dopamine {D}2 receptor monomer-dimer equilibrium and the anomalous binding properties of the dopamine {D}2 receptor ligand, {N}-methyl spiperone}}, + Volume = {108}, + Year = {2001}} + +@article{Logan2002a, + Author = {Logan, J. and Fowler, J. S. and Ding, Y. S. and Franceschi, D. and Wang, G. J. and Volkow, N. D. and Felder, C. and Alexoff, D.}, + Journal = {J. Cereb. Blood Flow Metab.}, + Month = {Nov}, + Pages = {1367--1376}, + Title = {{{S}trategy for the formation of parametric images under conditions of low injected radioactivity applied to {P}{E}{T} studies with the irreversible monoamine oxidase {A} tracers [11{C}]clorgyline and deuterium-substituted [11{C}]clorgyline}}, + Volume = {22}, + Year = {2002}} + +@article{Logan2001a, + Author = {Logan, J. and Fowler, J. S. and Volkow, N. D. and Ding, Y. S. and Wang, G. J. and Alexoff, D. L.}, + Journal = {J. Cereb. Blood Flow Metab.}, + Month = {Mar}, + Pages = {307--320}, + Title = {{{A} strategy for removing the bias in the graphical analysis method}}, + Volume = {21}, + Year = {2001}} + +@article{Logan2000, + Author = {Logan, J. and Fowler, J. S. and Volkow, N. D. and Wang, G. J. and MacGregor, R. R. and Shea, C.}, + Journal = {Nucl. Med. Biol.}, + Month = {Jan}, + Pages = {43--49}, + Title = {{{R}eproducibility of repeated measures of deuterium substituted [11{C}]{L}-deprenyl ([11{C}]{L}-deprenyl-{D}2) binding in the human brain}}, + Volume = {27}, + Year = {2000}} + +@article{Logan2007, + Author = {Logan, J. and Wang, G. J. and Telang, F. and Fowler, J. S. and Alexoff, D. and Zabroski, J. and Jayne, M. and Hubbard, B. and King, P. and Carter, P. and Shea, C. and Xu, Y. and Muench, L. and Schlyer, D. and Learned-Coughlin, S. and Cosson, V. and Volkow, N. D. and Ding, Y. S.}, + Journal = {Nucl. Med. Biol.}, + Month = {Aug}, + Pages = {667--679}, + Title = {{{I}maging the norepinephrine transporter in humans with ({S},{S})-[11{C}]{O}-methyl reboxetine and {P}{E}{T}: problems and progress}}, + Volume = {34}, + Year = {2007}} + +@article{Logothetis2001, + Author = {Logothetis, N.K. and Pauls, J. and Augath, M. and Trinath, T. and Oeltermann, A.}, + Journal = {Nature}, + Pages = {150--157}, + Title = {{Neurophysiological investigation of the basis of the fMRI signal}}, + Volume = {412}, + Year = {2001}} + +@article{Lohrenz2007, + Author = {Lohrenz, T. and McCabe, K. and Camerer, C. F. and Montague, P. R.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {May}, + Pages = {9493--9498}, + Title = {{{N}eural signature of fictive learning signals in a sequential investment task}}, + Volume = {104}, + Year = {2007}} + +@article{london1999brain, + Author = {London, ED and Bonson, KR and Ernst, M. and Grant, S.}, + Journal = {Critical reviews in neurobiology}, + Number = {3}, + Pages = {227}, + Title = {{Brain imaging studies of cocaine abuse: implications for medication development.}}, + Volume = {13}, + Year = {1999}} + +@article{London2005, + Author = {London, E. D. and Berman, S. M. and Voytek, B. and Simon, S. L. and Mandelkern, M. A. and Monterosso, J. and Thompson, P. M. and Brody, A. L. and Geaga, J. A. and Hong, M. S. and Hayashi, K. M. and Rawson, R. A. and Ling, W.}, + Journal = {Biol. Psychiatry}, + Month = {Nov}, + Pages = {770--778}, + Title = {{{C}erebral metabolic dysfunction and impaired vigilance in recently abstinent methamphetamine abusers}}, + Volume = {58}, + Year = {2005}} + +@article{Long2009, + Author = {Long, A. B. and Kuhn, C. M. and Platt, M. L.}, + Journal = {Soc Cogn Affect Neurosci}, + Month = {Jun}, + Title = {{{S}erotonin shapes risky decision making in monkeys}}, + Year = {2009}} + +@article{Love1999, + Abstract = {The order of processing, whether global forms are processed prior + to local forms or vice versa, has been of considerable interest. + Many current theories hold that the more perceptually conspicuous + form is identified first. An alternative view is presented here in + which the stuctural relations among elements are an important factor + in explaining the relative speeds of global and local processing. + We equated the conspicuity of the global and local forms in three + experiments and still found advantages in the processing of global + forms. Subjects were able to process the relations among the elements + quickly, even before the elements themselves were identified. According + to our alternative view, subjects created equivalence classes of + similar and proximate local elements before identifying the constituent + elements. The experiments required subjects to decide whether two + displays were the same or different, and consequently, the results + are relevant to work in higher-level cognition that stresses the + importance of comparison processes (e.g., analogy and conceptual + combination). We conclude by evaluating related work in higher-level + cognition in light of our findings.}, + Author = {B. C. Love and J. N. Rouder and E. J. Wisniewski}, + Doi = {10.1006/cogp.1998.0697}, + Institution = {Northwestern University, 2029 Sheridan Road, Evanston, IL 60208, USA. loveb@nwu.edu}, + Journal = {Cogn Psychol}, + Keywords = {Adult; Analysis of Variance; Concept Formation, physiology; Discrimination (Psychology), physiology; Humans; Models, Psychological; Pattern Recognition, Visual, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {291--316}, + Pii = {S0010-0285(98)90697-1}, + Pmid = {10090805}, + Timestamp = {2009.08.15}, + Title = {A structural account of global and local processing.}, + Url = {http://dx.doi.org/10.1006/cogp.1998.0697}, + Volume = {38}, + Year = {1999}, + Bdsk-Url-1 = {http://dx.doi.org/10.1006/cogp.1998.0697}} + +@article{Lovero2009, + Author = {Lovero, K. L. and Simmons, A. N. and Aron, J. L. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Jan}, + Title = {{{A}nterior insular cortex anticipates impending stimulus significance}}, + Year = {2009}} + +@article{Lovero2009a, + Author = {Lovero, K. L. and Simmons, A. N. and Aron, J. L. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {976--983}, + Title = {{{A}nterior insular cortex anticipates impending stimulus significance}}, + Volume = {45}, + Year = {2009}} + +@incollection{Luce1959, + Address = {New York}, + Author = {Luce, R. D.}, + Booktitle = {Handbook of Mathematical Psychology}, + Editor = {Luce, R. D. and Bush, R. R. and Galanter, E.}, + Pages = {103--189}, + Publisher = {Wiley}, + Title = {Detection and Recognition}, + Year = {1959}} + +@book{Luce1986, + Address = {New York}, + Author = {Luce, R. D.}, + Publisher = {Oxford University Press}, + Title = {Response Times}, + Year = {1986}} + +@book{Luce1959a, + Author = {Luce, R. D.}, + Owner = {Wooyoung Ahn}, + Publisher = {New York: Wiley}, + Timestamp = {2007.05.04}, + Title = {Individual choice behavior}, + Year = {1959}} + +@article{Lucignani1987, + Author = {Lucignani, G. and Namba, H. and Nehlig, A. and Porrino, L. J. and Kennedy, C. and Sokoloff, L.}, + Journal = {J. Cereb. Blood Flow Metab.}, + Month = {Jun}, + Pages = {309--314}, + Title = {{{E}ffects of insulin on local cerebral glucose utilization in the rat}}, + Volume = {7}, + Year = {1987}} + +@article{Lucignani1984, + Author = {Lucignani, G. and Porrino, L. J. and Tamminga, C. A.}, + Journal = {Eur. J. Pharmacol.}, + Month = {May}, + Pages = {147--151}, + Title = {{{E}ffects of systemically administered cholecystokinin-octapeptide on local cerebral metabolism}}, + Volume = {101}, + Year = {1984}} + +@article{Ludbrook2003, + Author = {Ludbrook, J.}, + Journal = {BMC Medical Research Methodology}, + Pages = {15}, + Title = {Interim Analyses of Data as They Accumulate in Laboratory Experimentation}, + Volume = {3}, + Year = {2003}} + +@article{Ludewig2003a, + Author = {Ludewig, K. and Paulus, M. P. and Vollenweider, F. X.}, + Journal = {Psychiatry Res}, + Month = {Aug}, + Pages = {293--306}, + Title = {{{B}ehavioural dysregulation of decision-making in deficit but not nondeficit schizophrenia patients}}, + Volume = {119}, + Year = {2003}} + +@article{Ludewig2003, + Author = {Ludewig, S. and Paulus, M. P. and Ludewig, K. and Vollenweider, F. X.}, + Journal = {J Affect Disord}, + Month = {Sep}, + Pages = {183--189}, + Title = {{{D}ecision-making strategies by panic disorder subjects are more sensitive to errors}}, + Volume = {76}, + Year = {2003}} + +@article{Lunn2009, + Author = {Lunn, D. and Spiegelhalter, D. and Thomas, A. and Best, N.}, + Journal = {Statistics in medicine}, + Title = {{The BUGS project: Evolution, critique and future directions.}}, + Year = {2009}} + +@article{Lunn2000, + Author = {Lunn, D.J. and Thomas, A. and Best, N. and Spiegelhalter, D.}, + Journal = {Statistics and Computing}, + Number = {4}, + Pages = {325--337}, + Publisher = {Springer}, + Title = {{WinBUGS-a Bayesian modelling framework: concepts, structure, and extensibility}}, + Volume = {10}, + Year = {2000}} + +@article{Luo2009, + Author = {Luo, Z. and Yu, M. and Smith, S. D. and Kritzer, M. and Du, C. and Ma, Y. and Volkow, N. D. and Glass, P. S. and Benveniste, H.}, + Journal = {Anesth. Analg.}, + Month = {Jan}, + Pages = {334--344}, + Title = {{{T}he effect of intravenous lidocaine on brain activation during non-noxious and acute noxious stimulation of the forepaw: a functional magnetic resonance imaging study in the rat}}, + Volume = {108}, + Year = {2009}} + +@article{Lutter2009, + Author = {Lutter, M. and Nestler, E. J.}, + Journal = {J. Nutr.}, + Month = {Mar}, + Pages = {629--632}, + Title = {{{H}omeostatic and hedonic signals interact in the regulation of food intake}}, + Volume = {139}, + Year = {2009}} + +@article{Lyons1996, + Author = {Lyons, D. and Friedman, D. P. and Nader, M. A. and Porrino, L. J.}, + Journal = {J. Neurosci.}, + Month = {Feb}, + Pages = {1230--1238}, + Title = {{{C}ocaine alters cerebral metabolism within the ventral striatum and limbic cortex of monkeys}}, + Volume = {16}, + Year = {1996}} + +@article{Lyons1998b, + Author = {Lyons, D. and Miller, M. D. and Hedgecock-Rowe, A. A. and Crane, A. M. and Porrino, L. J.}, + Journal = {Alcohol}, + Month = {Oct}, + Pages = {213--219}, + Title = {{{T}ime-dependent effects of acute ethanol administration on regional cerebral blood flow in the rat}}, + Volume = {16}, + Year = {1998}} + +@article{Lyons1997, + Author = {Lyons, D. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {Apr}, + Pages = {69--79}, + Title = {{{D}opamine depletion in the rostral nucleus accumbens alters the cerebral metabolic response to cocaine in the rat}}, + Volume = {753}, + Year = {1997}} + +@article{Lyons1998a, + Author = {Lyons, D. and Whitlow, C. T. and Porrino, L. J.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Oct}, + Pages = {201--206}, + Title = {{{M}ultiphasic consequences of the acute administration of ethanol on cerebral glucose metabolism in the rat}}, + Volume = {61}, + Year = {1998}} + +@article{Lyons1998, + Author = {Lyons, D. and Whitlow, C. T. and Smith, H. R. and Porrino, L. J.}, + Journal = {Recent Dev Alcohol}, + Pages = {253--284}, + Title = {{{B}rain imaging. {F}unctional consequences of ethanol in the central nervous system}}, + Volume = {14}, + Year = {1998}} + +@article{Lyoo2003c, + Author = {Lyoo, I. K. and Demopulos, C. M. and Hirashima, F. and Ahn, K. H. and Renshaw, P. F.}, + Journal = {Bipolar Disord}, + Month = {Aug}, + Pages = {300--306}, + Title = {{{O}ral choline decreases brain purine levels in lithium-treated subjects with rapid-cycling bipolar disorder: a double-blind trial using proton and lithium magnetic resonance spectroscopy}}, + Volume = {5}, + Year = {2003}} + +@article{Lyoo2004b, + Author = {Lyoo, I. K. and Han, C. H. and Lee, S. J. and Yune, S. K. and Ha, J. H. and Chung, S. J. and Choi, H. and Seo, C. S. and Hong, K. E.}, + Journal = {Compr Psychiatry}, + Pages = {121--128}, + Title = {{{T}he reliability and validity of the junior temperament and character inventory}}, + Volume = {45}, + Year = {2004}} + +@article{Lyoo1998, + Author = {Lyoo, I. K. and Han, M. H. and Cho, D. Y.}, + Journal = {J Affect Disord}, + Month = {Sep}, + Pages = {235--243}, + Title = {{{A} brain {M}{R}{I} study in subjects with borderline personality disorder}}, + Volume = {50}, + Year = {1998}} + +@article{Lyoo2006b, + Author = {Lyoo, I. K. and Hwang, J. and Sim, M. and Dunn, B. J. and Renshaw, P. F.}, + Journal = {CNS Spectr}, + Month = {Apr}, + Pages = {269--280}, + Title = {{{A}dvances in magnetic resonance imaging methods for the evaluation of bipolar disorder}}, + Volume = {11}, + Year = {2006}} + +@article{Lyoo2004a, + Author = {Lyoo, I. K. and Kim, M. J. and Stoll, A. L. and Demopulos, C. M. and Parow, A. M. and Dager, S. R. and Friedman, S. D. and Dunner, D. L. and Renshaw, P. F.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {648--651}, + Title = {{{F}rontal lobe gray matter density decreases in bipolar {I} disorder}}, + Volume = {55}, + Year = {2004}} + +@article{Lyoo2003b, + Author = {Lyoo, I. K. and Kong, S. W. and Sung, S. M. and Hirashima, F. and Parow, A. and Hennen, J. and Cohen, B. M. and Renshaw, P. F.}, + Journal = {Psychiatry Res}, + Month = {Jun}, + Pages = {87--100}, + Title = {{{M}ultinuclear magnetic resonance spectroscopy of high-energy phosphate metabolites in human brain following oral supplementation of creatine-monohydrate}}, + Volume = {123}, + Year = {2003}} + +@article{Lyoo2002b, + Author = {Lyoo, I. K. and Kwon, J. S. and Lee, S. J. and Han, M. H. and Chang, C. G. and Seo, C. S. and Lee, S. I. and Renshaw, P. F.}, + Journal = {Biol. Psychiatry}, + Month = {Dec}, + Pages = {1134--1143}, + Title = {{{D}ecrease in genu of the corpus callosum in medication-naive, early-onset dysthymia and depressive personality disorder}}, + Volume = {52}, + Year = {2002}} + +@article{Lyoo2001, + Author = {Lyoo, I. K. and Lee, D. W. and Kim, Y. S. and Kong, S. W. and Kwon, J. S.}, + Journal = {J Clin Psychiatry}, + Month = {Aug}, + Pages = {637--641}, + Title = {{{P}atterns of temperament and character in subjects with obsessive-compulsive disorder}}, + Volume = {62}, + Year = {2001}} + +@article{Lyoo2002a, + Author = {Lyoo, I. K. and Lee, H. K. and Jung, J. H. and Noam, G. G. and Renshaw, P. F.}, + Journal = {Compr Psychiatry}, + Pages = {361--368}, + Title = {{{W}hite matter hyperintensities on magnetic resonance imaging of the brain in children with psychiatric disorders}}, + Volume = {43}, + Year = {2002}} + +@article{Lyoo1995, + Author = {Lyoo, I. K. and Mino, I. and Renshaw, P. F. and Lee, H. K.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Jul}, + Pages = {833--834}, + Title = {{{W}hite matter hyperintensities}}, + Volume = {34}, + Year = {1995}} + +@article{Lyoo1996a, + Author = {Lyoo, I. K. and Noam, G. G. and Lee, C. K. and Lee, H. K. and Kennedy, B. P. and Renshaw, P. F.}, + Journal = {Biol. Psychiatry}, + Month = {Nov}, + Pages = {1060--1063}, + Title = {{{T}he corpus callosum and lateral ventricles in children with attention-deficit hyperactivity disorder: a brain magnetic resonance imaging study}}, + Volume = {40}, + Year = {1996}} + +@article{Lyoo2006a, + Author = {Lyoo, I. K. and Pollack, M. H. and Silveri, M. M. and Ahn, K. H. and Diaz, C. I. and Hwang, J. and Kim, S. J. and Yurgelun-Todd, D. A. and Kaufman, M. J. and Renshaw, P. F.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jan}, + Pages = {139--144}, + Title = {{{P}refrontal and temporal gray matter density decreases in opiate dependence}}, + Volume = {184}, + Year = {2006}} + +@article{Lyoo2002, + Author = {Lyoo, I. K. and Renshaw, P. F.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {195--207}, + Title = {{{M}agnetic resonance spectroscopy: current and future applications in psychiatric research}}, + Volume = {51}, + Year = {2002}} + +@article{Lyoo1997, + Author = {Lyoo, I. K. and Satlin, A. and Lee, C. K. and Renshaw, P. F.}, + Journal = {Psychiatry Res}, + Month = {May}, + Pages = {63--72}, + Title = {{{R}egional atrophy of the corpus callosum in subjects with {A}lzheimer's disease and multi-infarct dementia}}, + Volume = {74}, + Year = {1997}} + +@article{Lyoo1996, + Author = {Lyoo, I. K. and Seol, H. Y. and Byun, H. S. and Renshaw, P. F.}, + Journal = {J Neuropsychiatry Clin Neurosci}, + Pages = {54--59}, + Title = {{{U}nsuspected multiple sclerosis in patients with psychiatric disorders: a magnetic resonance imaging study}}, + Volume = {8}, + Year = {1996}} + +@article{Lyoo2004, + Author = {Lyoo, I. K. and Streeter, C. C. and Ahn, K. H. and Lee, H. K. and Pollack, M. H. and Silveri, M. M. and Nassar, L. and Levin, J. M. and Sarid-Segal, O. and Ciraulo, D. A. and Renshaw, P. F. and Kaufman, M. J.}, + Journal = {Psychiatry Res}, + Month = {Jul}, + Pages = {135--145}, + Title = {{{W}hite matter hyperintensities in subjects with cocaine and opiate dependence and healthy comparison subjects}}, + Volume = {131}, + Year = {2004}} + +@article{Lyoo2006, + Author = {Lyoo, I. K. and Sung, Y. H. and Dager, S. R. and Friedman, S. D. and Lee, J. Y. and Kim, S. J. and Kim, N. and Dunner, D. L. and Renshaw, P. F.}, + Journal = {Bipolar Disord}, + Month = {Feb}, + Pages = {65--74}, + Title = {{{R}egional cerebral cortical thinning in bipolar disorder}}, + Volume = {8}, + Year = {2006}} + +@article{Lyoo2003a, + Author = {Lyoo, I. K. and Yoon, T. and Kang, D. H. and Kwon, J. S.}, + Journal = {Acta Psychiatr Scand}, + Month = {Apr}, + Pages = {298--304}, + Title = {{{P}atterns of changes in temperament and character inventory scales in subjects with obsessive-compulsive disorder following a 4-month treatment}}, + Volume = {107}, + Year = {2003}} + +@article{Lyoo2003, + Author = {Lyoo, I. K. and Youn, T. and Ha, T. H. and Park, H. S. and Kwon, J. S.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Aug}, + Pages = {417--423}, + Title = {{{C}lassification of frequency distributions of diagnostic criteria scores in twelve personality disorders by the curve fitting method}}, + Volume = {57}, + Year = {2003}} + +@article{Maas2003, + Author = {van der Maas, H. L. J. and Kolstein, R. and van der Pligt, J.}, + Journal = {Sociological Methods and Research}, + Pages = {125--152}, + Title = {Sudden Transitions in Attitudes}, + Volume = {32}, + Year = {2003}} + +@article{Maas1992, + Author = {van der Maas, H. L. J. and Molenaar, P. C. M.}, + Journal = {Psychological Review}, + Pages = {395--417}, + Title = {Stagewise Cognitive Development: An Application of Catastrophe Theory}, + Volume = {99}, + Year = {1992}} + +@article{Maas2006, + Author = {van der Maas, H. L. J. and Wagenmakers, E.--J. and Visser, I.}, + Title = {Phase--Transitions in the Tradeoff Between Speed and Accuracy. {U}nder Revision.}, + Year = {2006}} + +@article{Maas1998, + Author = {Maas, L. C. and Lukas, S. E. and Kaufman, M. J. and Weiss, R. D. and Daniels, S. L. and Rogers, V. W. and Kukes, T. J. and Renshaw, P. F.}, + Journal = {Am J Psychiatry}, + Pages = {124--126}, + Title = {{{F}unctional magnetic resonance imaging of human brain activation during cue-induced cocaine craving}}, + Volume = {155}, + Year = {1998}} + +@article{Macdonaldinpress, + Author = {Macdonald, R. R.}, + Journal = {Psychological Science}, + Title = {Why Replication Probabilities Depend on Prior Probability Distributions: A Rejoinder to Killeen (2005)}, + Year = {in press}} + +@article{MacDonald2003, + Author = {MacDonald, S. W. S. and Hultsch, D. F. and Dixon, R. A.}, + Journal = {Psychology and Aging}, + Pages = {510--523}, + Title = {Performance Variability is Related to Change in Cognition: {E}vidence From the {V}ictoria Longitudinal Study}, + Volume = {18}, + Year = {2003}} + +@article{Macey2004, + Author = {Macey, D. J. and Rice, W. N. and Freedland, C. S. and Whitlow, C. T. and Porrino, L. J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Apr}, + Pages = {384--392}, + Title = {{{P}atterns of functional activity associated with cocaine self-administration in the rat change over time}}, + Volume = {172}, + Year = {2004}} + +@article{Macey2003, + Author = {Macey, D. J. and Smith, H. R. and Nader, M. A. and Porrino, L. J.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {12--16}, + Title = {{{C}hronic cocaine self-administration upregulates the norepinephrine transporter and alters functional activity in the bed nucleus of the stria terminalis of the rhesus monkey}}, + Volume = {23}, + Year = {2003}} + +@article{MacKay1996, + Author = {MacKay, S. and Meyerhoff, D. J. and Constans, J. M. and Norman, D. and Fein, G. and Weiner, M. W.}, + Journal = {Arch. Neurol.}, + Month = {Feb}, + Pages = {167--174}, + Title = {{{R}egional gray and white matter metabolite differences in subjects with {A}{D}, with subcortical ischemic vascular dementia, and elderly controls with 1{H} magnetic resonance spectroscopic imaging}}, + Volume = {53}, + Year = {1996}} + +@article{MacKay1993, + Author = {MacKay, S. and Meyerhoff, D. J. and Dillon, W. P. and Weiner, M. W. and Fein, G.}, + Journal = {Biol. Psychiatry}, + Month = {Aug}, + Pages = {261--264}, + Title = {{{A}lteration of brain phospholipid metabolites in cocaine-dependent polysubstance abusers}}, + Volume = {34}, + Year = {1993}} + +@article{Mackillop2009, + Abstract = {Delayed reward discounting (DRD) is a behavioral economic index of + impulsivity that reflects the extent to which an individual devalues + a reward based on its delay in time (i.e., preference for smaller + immediate rewards relative to larger delayed rewards). Current smokers + exhibit greater DRD compared to non-smokers, but also exhibit greater + DRD compared to ex-smokers, suggesting that either DRD is inversely + associated with successful smoking cessation or that smoking cessation + itself reduces DRD. In a sample of treatment-seeking smokers (n=57, + 61\% male, 85\% Caucasian) participating in a randomized controlled + smoking cessation trial, the current study prospectively examined + DRD for money in general and at three magnitudes in relation to time + to the participants' first lapse to smoking. Survival analysis using + Cox proportional-hazards regression revealed that DRD predicted days + to first lapse (ps<.05-.01) and did so beyond nicotine dependence, + sensation-seeking, and income in covariate analyses, with the exception + of small magnitude discounting. In addition, dichotomous comparisons + revealed significantly more impulsive baseline discounting for individuals + who had lapsed by the two-week and eight-week follow-up visits. These + findings indicate that high levels of DRD reflect a risk factor for + poor smoking cessation treatment response. Interrelationships among + the variables assessed and clinical strategies to improve outcomes + for smokers who are high in DRD are discussed.}, + Author = {James Mackillop and Christopher W Kahler}, + Doi = {10.1016/j.drugalcdep.2009.04.020}, + Institution = {Department of Psychology, University of Georgia, Athens, GA, USA; Center for Alcohol and Addiction Studies, Brown University, Providence, RI, USA.}, + Journal = {Drug Alcohol Depend}, + Language = {eng}, + Medline-Pst = {aheadofprint}, + Month = {Jun}, + Owner = {Woo-Young Ahn}, + Pii = {S0376-8716(09)00174-4}, + Pmid = {19570621}, + Timestamp = {2009.08.06}, + Title = {Delayed reward discounting predicts treatment response for heavy drinkers receiving smoking cessation treatment.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.020}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.020}} + +@article{Mackillop2009a, + Abstract = {Delayed reward discounting (DRD) is a behavioral economic index of + impulsivity that reflects the extent to which an individual devalues + a reward based on its delay in time (i.e., preference for smaller + immediate rewards relative to larger delayed rewards). Current smokers + exhibit greater DRD compared to non-smokers, but also exhibit greater + DRD compared to ex-smokers, suggesting that either DRD is inversely + associated with successful smoking cessation or that smoking cessation + itself reduces DRD. In a sample of treatment-seeking smokers (n=57, + 61\% male, 85\% Caucasian) participating in a randomized controlled + smoking cessation trial, the current study prospectively examined + DRD for money in general and at three magnitudes in relation to time + to the participants' first lapse to smoking. Survival analysis using + Cox proportional-hazards regression revealed that DRD predicted days + to first lapse (ps<.05-.01) and did so beyond nicotine dependence, + sensation-seeking, and income in covariate analyses, with the exception + of small magnitude discounting. In addition, dichotomous comparisons + revealed significantly more impulsive baseline discounting for individuals + who had lapsed by the two-week and eight-week follow-up visits. These + findings indicate that high levels of DRD reflect a risk factor for + poor smoking cessation treatment response. Interrelationships among + the variables assessed and clinical strategies to improve outcomes + for smokers who are high in DRD are discussed.}, + Author = {James Mackillop and Christopher W Kahler}, + Doi = {10.1016/j.drugalcdep.2009.04.020}, + Institution = {Department of Psychology, University of Georgia, Athens, GA, USA; Center for Alcohol and Addiction Studies, Brown University, Providence, RI, USA.}, + Journal = {Drug Alcohol Depend}, + Language = {eng}, + Medline-Pst = {aheadofprint}, + Month = {Jun}, + Owner = {Woo-Young Ahn}, + Pii = {S0376-8716(09)00174-4}, + Pmid = {19570621}, + Timestamp = {2009.08.06}, + Title = {Delayed reward discounting predicts treatment response for heavy drinkers receiving smoking cessation treatment.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.020}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2009.04.020}} + +@article{MacLean2009, + Author = {MacLean, E. L. and Prior, S. R. and Platt, M. L. and Brannon, E. M.}, + Journal = {J Appl Anim Welf Sci}, + Pages = {73--81}, + Title = {{{P}rimate location preference in a double-tier cage: the effects of illumination and cage height}}, + Volume = {12}, + Year = {2009}} + +@book{MacMillan2004, + Address = {Hillsdale (NJ)}, + Author = {MacMillan, N.A. and Creelman, C. D.}, + Publisher = {Lawrence Erlbaum Associates}, + Title = {Detection Theory: A User's Guide (2nd ed.)}, + Year = {2004}} + +@article{Maddox2004, + Abstract = {Starting from the premise that the purpose of cognitive modeling is + to gain information about the cognitive processes of individuals, + we develop a general theoretical framework for assessment of models + on the basis of tests of the models' ability to yield information + about the true performance patterns of individual subjects and the + processes underlying them. To address the central problem that observed + performance is a composite of true performance and error, we present + formal derivations concerning inference from noisy data to true performance. + Analyses of model fits to simulated data illustrate the usefulness + of our approach for coping with difficult issues of model identifiability + and testability.}, + Author = {W. Todd Maddox and W. K. Estes}, + Institution = {Department of Psychology, Institute for Neuroscience, University of Texas, 1 University Station A8000, Austin, TX 78712-0187, USA. maddox@psy.utexas.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Cognition; Generalization (Psychology); Humans; Models, Psychological}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {1129--1135}, + Pmid = {15875987}, + Timestamp = {2009.08.14}, + Title = {Predicting true patterns of cognitive performance from noisy data.}, + Volume = {11}, + Year = {2004}} + +@article{Magno2006, + Author = {Magno, E. and Foxe, J. J. and Molholm, S. and Robertson, I. H. and Garavan, H.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {4769--4773}, + Title = {{{T}he anterior cingulate and error avoidance}}, + Volume = {26}, + Year = {2006}} + +@article{Magno2008, + Author = {Magno, E. and Sim?es-Franklin, C. and Robertson, I. H. and Garavan, H.}, + Journal = {J Cogn Neurosci}, + Month = {Dec}, + Title = {{{T}he {R}ole of the {D}orsal {A}nterior {C}ingulate in {E}valuating {B}ehavior for {A}chieving {G}ains and {A}voiding {L}osses}}, + Year = {2008}} + +@article{Magno2008a, + Author = {Magno, E. and Yeap, S. and Thakore, J. H. and Garavan, H. and De Sanctis, P. and Foxe, J. J.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {385--391}, + Title = {{{A}re auditory-evoked frequency and duration mismatch negativity deficits endophenotypic for schizophrenia? {H}igh-density electrical mapping in clinically unaffected first-degree relatives and first-episode and chronic schizophrenia}}, + Volume = {64}, + Year = {2008}} + +@article{Maheu2008, + Author = {Maheu, F. S. and Mazzone, L. and Merke, D. P. and Keil, M. F. and Stratakis, C. A. and Pine, D. S. and Ernst, M.}, + Journal = {Dev. Psychopathol.}, + Pages = {1177--1189}, + Title = {{{A}ltered amygdala and hippocampus function in adolescents with hypercortisolemia: a functional magnetic resonance imaging study of {C}ushing syndrome}}, + Volume = {20}, + Year = {2008}} + +@article{Maheu2008a, + Author = {Maheu, F. S. and Merke, D. P. and Schroth, E. A. and Keil, M. F. and Hardin, J. and Poeth, K. and Pine, D. S. and Ernst, M.}, + Journal = {Psychoneuroendocrinology}, + Month = {Feb}, + Pages = {238--245}, + Title = {{{S}teroid abnormalities and the developing brain: declarative memory for emotionally arousing and neutral material in children with congenital adrenal hyperplasia}}, + Volume = {33}, + Year = {2008}} + +@article{Mahomed1992, + Abstract = {Surgical treatment options for interruption of atrioventricular node + reentrant tachycardia include (1) skeletonization of the atrioventricular + node by dissecting it from most of its atrial inputs and (2) discrete + cryosurgery of the perinodal tissues by applying a series of sequential + cryolesions to the atrial tissues immediately adjacent to the atrioventricular + node. Both these techniques attempt to interrupt one of the dual + atrioventricular node conduction pathways while preserving the other. + This report describes 17 consecutive patients who underwent surgical + treatment, 10 patients with skeletonization of the atrioventricular + node and seven patients with discrete perinodal cryosurgery. There + were 10 female and seven male patients and their ages ranged from + 28 to 56 years (mean 38). Two of the 17 patients had Wolff-Parkinson-White + syndrome and their accessory pathways were interrupted before the + atrioventricular nodal reentrant tachycardia was ablated. All the + procedures were performed in a normothermic beating heart while atrioventricular + conduction was monitored closely. In the skeletonization technique, + the right atrial septum was mobilized and the atrioventricular node + exposed anterior to the tendon of the Todaro. The perinodal cryosurgical + procedure was also performed through a right atriotomy and a series + of sequential 3 mm cryolesions were placed around the borders of + the triangle of Koch on the inferior right atrial septum. There were + no operative deaths. Two patients who underwent the skeletonization + operation had heart block necessitating pacemaker therapy. At postoperative + electrophysiologic study, no echoes or atrioventricular nodal reentrant + tachycardia were inducible in any of the 17 patients. All patients + have remained free of arrhythmia recurrence and have required no + antiarrhythmic therapy after a follow-up of 5 to 28 months (mean + 14). In conclusion, both atrioventricular node skeletonization and + perinodal cryosurgery successfully ablate atrioventricular nodal + reentrant tachycardia; however, perinodal cryosurgery appears to + be safer in avoiding heart block, is more easily performed, and is + our procedure of choice for the management of medically refractory + atrioventricular nodal reentrant tachycardia.}, + Author = {Y. Mahomed and R. D. King and D. Zipes and W. M. Miles and L. S. Klein and J. W. Brown}, + Institution = {Department of Surgery, Krannert Institute of Cardiology, Indiana University School of Medicine, Indianapolis 46202.}, + Journal = {J Thorac Cardiovasc Surg}, + Keywords = {Adolescent; Adult; Atrioventricular Node, surgery; Bundle-Branch Block, etiology; Cryosurgery; Female; Humans; Male; Methods; Middle Aged; Postoperative Complications; Tachycardia, Atrioventricular Nodal Reentry, surgery}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {1035--1044}, + Pmid = {1405661}, + Timestamp = {2009.08.04}, + Title = {Surgery for atrioventricular node reentry tachycardia. Results with surgical skeletonization of the atrioventricular node and discrete perinodal cryosurgery.}, + Volume = {104}, + Year = {1992}} + +@article{Mahomed1988, + Abstract = {Kent bundle interruption for ventricular preexcitation has been successfully + accomplished utilizing several different surgical techniques. The + external closed-heart technique of Guiraudon combining surgical dissection + and cryoablation has been used to interrupt 52 accessory pathways + in 47 consecutive patients since May, 1985. The 35 male and 12 female + patients ranged in age from 10 to 67 years (mean, 30 years). There + were 25 left free wall, 13 right free wall, 13 posterior septal, + and 1 anterior septal accessory pathways. Preoperative and intraoperative + electrophysiological studies were performed in all patients to induce + the arrhythmia and localize all accessory pathways. The operation + consisted of dissection of the atrioventricular fat pad. Following + this, the delta wave and retrograde accessory pathway conduction + disappeared, thereby indicating successful pathway ablation. In 4 + patients with right-sided accessory pathways, interruption of the + pathway required cryoablation. Cryolesions (made with cryoprobe at + -60 degrees C for two minutes) were created in the region of the + accessory pathway insertion. All accessory pathways were successfully + ablated without any deaths or heart block. Concomitant surgical procedures + were performed in 4 patients. Two patients required a second operation + the next day for an accessory pathway not found at the initial operation. + Three patients had postpericardiotomy syndrome, and 4 had recurrent + atrial fibrillation requiring therapy. The remaining patients have + had no arrhythmia recurrence and have remained drug free after a + follow-up of 1 month to 22 months (mean, 12.5 months). We conclude + that the closed-heart technique of accessory pathway ablation is + safe and reproducible, obviates the necessity for aortic cross-clamping + and cardioplegic arrest, and allows instantaneous monitoring of conduction + over the pathway.}, + Author = {Y. Mahomed and R. D. King and D. P. Zipes and W. M. Miles and E. N. Prystowsky and J. J. Heger and J. W. Brown}, + Institution = {Department of Surgery, Indiana University School of Medicine, Indianapolis 46223.}, + Journal = {Ann Thorac Surg}, + Keywords = {Adolescent; Adult; Aged; Arrhythmias, Cardiac, etiology; Atrioventricular Node, pathology/surgery; Child; Cryosurgery; Electrophysiology; Female; Heart Conduction System, surgery; Heart, innervation; Humans; Male; Middle Aged; Neural Pathways, surgery; Postoperative Complications; Wolff-Parkinson-White Syndrome, pathology/surgery}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {495--504}, + Pmid = {3365040}, + Timestamp = {2009.08.04}, + Title = {Surgical division of Wolff-Parkinson-White pathways utilizing the closed-heart technique: a 2-year experience in 47 patients.}, + Volume = {45}, + Year = {1988}} + +@article{Makarov1996a, + Author = {Makarov, M. R. and Birch, J. G. and Delgado, M. R. and Welch, R. D. and Samchukov, M. L.}, + Journal = {Clin. Orthop. Relat. Res.}, + Month = {Aug}, + Pages = {310--316}, + Title = {{{E}ffects of external fixation and limb lengthening on peripheral nerve function}}, + Year = {1996}} + +@article{Makarov1998, + Author = {Makarov, M. R. and Delgado, M. R. and Birch, J. G. and Samchukov, M. L.}, + Journal = {Tech Hand Up Extrem Surg}, + Month = {Mar}, + Pages = {56--63}, + Title = {{{P}eripheral nerve monitoring during surgery for upper extremity lengthening}}, + Volume = {2}, + Year = {1998}} + +@article{Makarov1997, + Author = {Makarov, M. R. and Delgado, M. R. and Birch, J. G. and Samchukov, M. L.}, + Journal = {J Pediatr Orthop}, + Pages = {663--667}, + Title = {{{M}onitoring peripheral nerve function during external fixation of upper extremities}}, + Volume = {17}, + Year = {1997}} + +@article{Makarov1996, + Author = {Makarov, M. R. and Delgado, M. R. and Birch, J. G. and Samchukov, M. L.}, + Journal = {J Pediatr Orthop}, + Pages = {155--160}, + Title = {{{I}ntraoperative {S}{S}{E}{P} monitoring during external fixation procedures in the lower extremities}}, + Volume = {16}, + Year = {1996}} + +@article{Makarov1994, + Author = {Makarov, M. R. and Delgado, M. R. and Samchukov, M. L. and Welch, R. D. and Birch, J. G.}, + Journal = {Clin. Orthop. Relat. Res.}, + Month = {Nov}, + Pages = {254--263}, + Title = {{{S}omatosensory evoked potential evaluation of acute nerve injury associated with external fixation procedures}}, + Year = {1994}} + +@article{Makarov2003, + Author = {Makarov, M. R. and Samchukov, M. L. and Birch, J. G. and Johnston, C. E. and Delgado, M. R. and Rampy, P. L. and Van Allen, E. M.}, + Journal = {J Pediatr Orthop}, + Pages = {470--477}, + Title = {{{A}cute deformity correction of lower extremities under {S}{S}{E}{P}-monitoring control}}, + Volume = {23}, + Year = {2003}} + +@article{Maki2007, + Author = {Maki, P. M. and Ernst, M. and London, E. D. and Mordecai, K. L. and Perschler, P. and Durso, S. C. and Brandt, J. and Dobs, A. and Resnick, S. M.}, + Journal = {J. Clin. Endocrinol. Metab.}, + Month = {Nov}, + Pages = {4107--4114}, + Title = {{{I}ntramuscular testosterone treatment in elderly men: evidence of memory decline and altered brain function}}, + Volume = {92}, + Year = {2007}} + +@article{Makris2004, + Author = {Makris, N. and Gasic, G.P. and Seidman, L.J. and Goldstein, J.M. and Gastfriend, D.R. and Elman, I. and Albaugh, M.D. and Hodge, S.M. and Ziegler, D.A. and Sheahan, F.S. and others}, + Journal = {Neuron}, + Number = {4}, + Pages = {729--740}, + Publisher = {Elsevier}, + Title = {{Decreased absolute amygdala volume in cocaine addicts}}, + Volume = {44}, + Year = {2004}} + +@book{Mandelbrot1977, + Address = {San Francisco}, + Author = {Mandelbrot, B. B.}, + Publisher = {Freeman}, + Title = {Fractals: Form, Chance, and Dimension}, + Year = {1977}} + +@article{Mangieri2009, + Author = {Mangieri, R. A. and Hong, K. I. and Piomelli, D. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jul}, + Pages = {63--72}, + Title = {{{A}n endocannabinoid signal associated with desire for alcohol is suppressed in recently abstinent alcoholics}}, + Volume = {205}, + Year = {2009}} + +@article{Mangieri2009a, + Author = {Mangieri, R. A. and Hong, K. I. and Piomelli, D. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jul}, + Pages = {63--72}, + Title = {{{A}n endocannabinoid signal associated with desire for alcohol is suppressed in recently abstinent alcoholics}}, + Volume = {205}, + Year = {2009}} + +@article{Mangieri2009b, + Author = {Mangieri, R. A. and Hong, K. I. and Piomelli, D. and Sinha, R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jul}, + Pages = {63--72}, + Title = {{{A}n endocannabinoid signal associated with desire for alcohol is suppressed in recently abstinent alcoholics}}, + Volume = {205}, + Year = {2009}} + +@article{Mantle1999, + Author = {Mantle, R. E. and Lach, B. and Delgado, M. R. and Baeesa, S. and B?langer, G.}, + Journal = {J. Neurosurg.}, + Month = {Sep}, + Pages = {375--383}, + Title = {{{P}redicting the probability of meningioma recurrence based on the quantity of peritumoral brain edema on computerized tomography scanning}}, + Volume = {91}, + Year = {1999}} + +@article{Marsteller2002, + Author = {Marsteller, D. A. and Gerasimov, M. R. and Schiffer, W. K. and Geiger, J. M. and Barnett, C. R. and Schaich Borg, J. and Scott, S. and Ceccarelli, J. and Volkow, N. D. and Molina, P. E. and Alexoff, D. L. and Dewey, S. L.}, + Journal = {Neuropsychopharmacology}, + Month = {Aug}, + Pages = {163--170}, + Title = {{{A}cute handling stress modulates methylphenidate-induced catecholamine overflow in the medial prefrontal cortex}}, + Volume = {27}, + Year = {2002}} + +@article{Martin1990, + Author = {Martin, C. S. and Earleywine, M. and Finn, P. R. and Young, R. D.}, + Journal = {J. Stud. Alcohol}, + Month = {Nov}, + Pages = {500--505}, + Title = {{{S}ome boundary conditions for effective use of alcohol placebos}}, + Volume = {51}, + Year = {1990}} + +@article{Martin2007, + Author = {Martin, E. M. and Nixon, H. and Pitrak, D. L. and Weddington, W. and Rains, N. A. and Nunnally, G. and Grbesic, S. and Gonzalez, R. and Jacobus, J. and Bechara, A.}, + Journal = {J Clin Exp Neuropsychol}, + Month = {Jul}, + Pages = {496--504}, + Title = {{{C}haracteristics of prospective memory deficits in {H}{I}{V}-seropositive substance-dependent individuals: preliminary observations}}, + Volume = {29}, + Year = {2007}} + +@article{Martin2003, + Author = {Martin, E. M. and Pitrak, D. L. and Rains, N. and Grbesic, S. and Pursell, K. and Nunnally, G. and Bechara, A.}, + Journal = {Neuropsychology}, + Month = {Apr}, + Pages = {283--288}, + Title = {{{D}elayed nonmatch-to-sample performance in {H}{I}{V}-seropositive and {H}{I}{V}-seronegative polydrug abusers}}, + Volume = {17}, + Year = {2003}} + +@article{Martin2004, + Author = {Martin, E. M. and Pitrak, D. L. and Weddington, W. and Rains, N. A. and Nunnally, G. and Nixon, H. and Grbesic, S. and Vassileva, J. and Bechara, A.}, + Journal = {J Int Neuropsychol Soc}, + Month = {Nov}, + Pages = {931--938}, + Title = {{{C}ognitive impulsivity and {H}{I}{V} serostatus in substance dependent males}}, + Volume = {10}, + Year = {2004}} + +@article{Martin2007a, + Author = {Martin, T. J. and Kahn, W. R. and Xiao, R. and Childers, S. R.}, + Journal = {Synapse}, + Month = {Mar}, + Pages = {176--184}, + Title = {{{D}ifferential regional effects of methadone maintenance compared to heroin dependence on mu-opioid receptor desensitization in rat brain}}, + Volume = {61}, + Year = {2007}} + +@article{Martin2007b, + Author = {Martin, T. J. and Kahn, W. R. and Xiao, R. and Childers, S. R.}, + Journal = {Synapse}, + Month = {Mar}, + Pages = {176--184}, + Title = {{{D}ifferential regional effects of methadone maintenance compared to heroin dependence on mu-opioid receptor desensitization in rat brain}}, + Volume = {61}, + Year = {2007}} + +@article{Martin1997, + Author = {Martin, T. J. and Miller, M. and Dworkin, S. I. and Smith, J. E. and Porrino, L. J.}, + Journal = {Brain Res.}, + Month = {May}, + Pages = {313--318}, + Title = {{{A}lteration of local cerebral glucose utilization following intravenous administration of heroin in {F}ischer 344 rats}}, + Volume = {755}, + Year = {1997}} + +@article{MartinSolch2001, + Author = {Martin-Solch, C. and Magyar, S. and K{\\"u}nig, G. and Missimer, J. and Schultz, W. and Leenders, K.}, + Journal = {Experimental Brain Research}, + Number = {3}, + Pages = {278--286}, + Publisher = {Springer}, + Title = {{Changes in brain activation associated with reward processing in smokers and nonsmokers}}, + Volume = {139}, + Year = {2001}} + +@article{Massaro1999, + Author = {Massaro, D. W.}, + Journal = {Trends in Cognitive Sciences}, + Pages = {310--317}, + Title = {Speechreading: {I}llusion or Window Into Pattern Recognition}, + Volume = {3}, + Year = {1999}} + +@book{Massaro1998, + Address = {Cambridge (MA)}, + Author = {Massaro, D. W.}, + Publisher = {MIT Press}, + Title = {Perceiving Talking Faces: {F}rom Speech Perception to a Behavioral Principle}, + Year = {1998}} + +@article{Massaro1991, + Author = {Massaro, D. W. and Cohen, M. M.}, + Journal = {Cognitive Psychology}, + Pages = {558--614}, + Title = {Integration Versus Interactive Activation: {T}he Joint Influence of Stimulus and Context in Perception}, + Volume = {23}, + Year = {1991}} + +@article{Massaro2001, + Author = {Massaro, D. W. and Cohen, M. M. and Campbell, C. S. and Rodriguez, T.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {1--17}, + Title = {Bayes Factor of Model Selection Validates {FLMP}}, + Volume = {8}, + Year = {2001}} + +@article{Massaro1990, + Author = {Massaro, D. W. and Friedman, D.}, + Journal = {Psychological Review}, + Pages = {225--252}, + Title = {Models of Integration Given Multiple Sources of Information}, + Volume = {97}, + Year = {1990}} + +@article{Massaro1986, + Author = {Massaro, D. W. and Hary, J. M.}, + Journal = {Psychological Research}, + Pages = {123--132}, + Title = {Addressing Issues in Letter Recognition}, + Volume = {48}, + Year = {1986}} + +@article{Masten2008, + Author = {Masten, C. L. and Guyer, A. E. and Hodgdon, H. B. and McClure, E. B. and Charney, D. S. and Ernst, M. and Kaufman, J. and Pine, D. S. and Monk, C. S.}, + Journal = {Child Abuse Negl}, + Month = {Jan}, + Pages = {139--153}, + Title = {{{R}ecognition of facial emotions among maltreated children with high rates of post-traumatic stress disorder}}, + Volume = {32}, + Year = {2008}} + +@article{Mathew1992, + Abstract = {Regional CBF was measured with the 133Xe inhalation technique before + and thrice after smoking marijuana of two strengths and placebo in + 20 physically and mentally healthy male volunteers with a previous + history of exposure to marijuana. They were drug-free at the time + of the study. Blood pressure, pulse rate, end-tidal carbon dioxide, + end-tidal carbon monoxide, and forehead skin perfusion were quantified + during the CBF measurements. Blood samples were drawn for quantification + of plasma levels of delta 9-tetrahydrocannabinol (THC) before and + during the 2 h after smoking marijuana or placebo. Drug-induced intoxication + and changes in mood were quantified with rating scales. Marijuana + smoking was associated with bilateral CBF increase, which was maximal + 30 min later. Greater CBF increases were seen in the frontal region + and right hemisphere. No significant CBF changes were seen after + placebo. Pulse rate and respiration increased significantly after + marijuana but not placebo. Both marijuana and placebo smoking were + associated with increased end-tidal carbon monoxide. CBF increase + in both hemispheres correlated significantly with degree of intoxication, + plasma levels of THC, and pulse rate.}, + Author = {R. J. Mathew and W. H. Wilson and D. F. Humphreys and J. V. Lowe and K. E. Wiethe}, + Institution = {Cerebral Blood Flow Laboratory, Duke University Medical Center, Durham, North Carolina 27710.}, + Journal = {J Cereb Blood Flow Metab}, + Keywords = {Adult; Analysis of Variance; Cerebrovascular Circulation, drug effects/physiology; Humans; Male; Marijuana Smoking, blood/physiopathology; Tetrahydrocannabinol, blood}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {750--758}, + Pmid = {1324250}, + Timestamp = {2009.08.04}, + Title = {Regional cerebral blood flow after marijuana smoking.}, + Volume = {12}, + Year = {1992}} + +@article{Matochik2005, + Author = {Matochik, J. A. and Eldreth, D. A. and Cadet, J. L. and Bolla, K. I.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {23--30}, + Title = {{{A}ltered brain tissue composition in heavy marijuana users}}, + Volume = {77}, + Year = {2005}} + +@article{Matochik2003, + Author = {Matochik, J. A. and London, E. D. and Eldreth, D. A. and Cadet, J. L. and Bolla, K. I.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1095--1102}, + Title = {{{F}rontal cortical tissue composition in abstinent cocaine abusers: a magnetic resonance imaging study}}, + Volume = {19}, + Year = {2003}} + +@article{Matsumoto2009, + Author = {Matsumoto, M. and Hikosaka, O.}, + Journal = {Nature}, + Number = {7248}, + Pages = {837--841}, + Publisher = {Nature Publishing Group}, + Title = {{Two types of dopamine neuron distinctly convey positive and negative motivational signals}}, + Volume = {459}, + Year = {2009}} + +@article{Matthews2004a, + Author = {Matthews, S. C. and Paulus, M. P. and Dimsdale, J. E.}, + Journal = {Psychosomatics}, + Pages = {281--286}, + Title = {{{C}ontribution of functional neuroimaging to understanding neuropsychiatric side effects of interferon in hepatitis {C}}}, + Volume = {45}, + Year = {2004}} + +@article{Matthews2004b, + Author = {Matthews, S. C. and Paulus, M. P. and Simmons, A. N. and Nelesen, R. A. and Dimsdale, J. E.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1151--1156}, + Title = {{{F}unctional subdivisions within anterior cingulate cortex and their relationship to autonomic nervous system function}}, + Volume = {22}, + Year = {2004}} + +@article{Matthews2005, + Author = {Matthews, S. C. and Simmons, A. N. and Arce, E. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {May}, + Pages = {755--760}, + Title = {{{D}issociation of inhibition from error processing using a parametric inhibitory task during functional magnetic resonance imaging}}, + Volume = {16}, + Year = {2005}} + +@article{Matthews2004, + Author = {Matthews, S. C. and Simmons, A. N. and Lane, S. D. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {Sep}, + Pages = {2123--2127}, + Title = {{{S}elective activation of the nucleus accumbens during risk-taking decision making}}, + Volume = {15}, + Year = {2004}} + +@article{Matthews2007, + Author = {Matthews, S. C. and Simmons, A. N. and Strigo, I. and Jang, K. and Stein, M. B. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {223--227}, + Title = {{{H}eritability of anterior cingulate response to conflict: an f{M}{R}{I} study in female twins}}, + Volume = {38}, + Year = {2007}} + +@article{Matthews2008, + Author = {Matthews, S. C. and Strigo, I. A. and Simmons, A. N. and Yang, T. T. and Paulus, M. P.}, + Journal = {J Affect Disord}, + Month = {Nov}, + Pages = {13--20}, + Title = {{{D}ecreased functional coupling of the amygdala and supragenual cingulate is related to increased depression in unmedicated individuals with current major depressive disorder}}, + Volume = {111}, + Year = {2008}} + +@article{Mavaddat2000, + Author = {Mavaddat, N. and Kirkpatrick, P. J. and Rogers, R. D. and Sahakian, B. J.}, + Journal = {Brain}, + Month = {Oct}, + Pages = {2109--2117}, + Title = {{{D}eficits in decision-making in patients with aneurysms of the anterior communicating artery}}, + Volume = {123 ( Pt 10)}, + Year = {2000}} + +@article{May2004, + Author = {May, J. C. and Delgado, M. R. and Dahl, R. E. and Stenger, V. A. and Ryan, N. D. and Fiez, J. A. and Carter, C. S.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {359--366}, + Title = {{{E}vent-related functional magnetic resonance imaging of reward-related brain circuitry in children and adolescents}}, + Volume = {55}, + Year = {2004}} + +@article{Mayer2006, + Author = {Mayer, E. A. and Naliboff, B. D. and Craig, A. D.}, + Journal = {Gastroenterology}, + Month = {Dec}, + Pages = {1925--1942}, + Title = {{{N}euroimaging of the brain-gut axis: from basic understanding to treatment of functional {G}{I} disorders}}, + Volume = {131}, + Year = {2006}} + +@article{Mazas2000, + Author = {Mazas, C. A. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jul}, + Pages = {1036--1040}, + Title = {{{D}ecision-making biases, antisocial personality, and early-onset alcoholism}}, + Volume = {24}, + Year = {2000}} + +@article{Mazurek2003a, + Author = {Mazurek, M. E. and Roitman, J. D. and Ditterich, J. and Shadlen, M. N.}, + Journal = {Cereb. Cortex}, + Month = {Nov}, + Pages = {1257--1269}, + Title = {{{A} role for neural integrators in perceptual decision making}}, + Volume = {13}, + Year = {2003}} + +@article{Mazurek2002a, + Author = {Mazurek, M. E. and Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {May}, + Pages = {463--471}, + Title = {{{L}imits to the temporal fidelity of cortical spike rate signals}}, + Volume = {5}, + Year = {2002}} + +@article{McCabe2009, + Author = {McCabe, J. A. and Tobler, P. N. and Schultz, W. and Dickinson, A. and Lupson, V. and Fletcher, P. C.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {1046--1051}, + Title = {{{A}ppetitive and aversive taste conditioning in a computer game influences real-world decision making and subsequent activation in insular cortex}}, + Volume = {29}, + Year = {2009}} + +@article{McCarroll1992, + Author = {McCarroll, D. and Crays, N. and Dunlap, W. P.}, + Journal = {Educational \& Psychological Measurement}, + Pages = {387--393}, + Title = {Sequential {ANOVA}s and {T}ype {I} Error Rates}, + Volume = {52}, + Year = {1992}} + +@article{McClelland1991, + Author = {McClelland, J. L.}, + Journal = {Cognitive Psychology}, + Pages = {1--44}, + Title = {Stochastic Interactive Activation and the Effect of Context on Perception}, + Volume = {23}, + Year = {1991}} + +@article{McClung2008, + Author = {McClung, C. A. and Nestler, E. J.}, + Journal = {Neuropsychopharmacology}, + Month = {Jan}, + Pages = {3--17}, + Title = {{{N}europlasticity mediated by altered gene expression}}, + Volume = {33}, + Year = {2008}} + +@article{McClung2003, + Author = {McClung, C. A. and Nestler, E. J.}, + Journal = {Nat. Neurosci.}, + Month = {Nov}, + Pages = {1208--1215}, + Title = {{{R}egulation of gene expression and cocaine reward by {C}{R}{E}{B} and {D}elta{F}os{B}}}, + Volume = {6}, + Year = {2003}} + +@article{McClung2005, + Author = {McClung, C. A. and Sidiropoulou, K. and Vitaterna, M. and Takahashi, J. S. and White, F. J. and Cooper, D. C. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jun}, + Pages = {9377--9381}, + Title = {{{R}egulation of dopaminergic transmission and cocaine reward by the {C}lock gene}}, + Volume = {102}, + Year = {2005}} + +@article{McClung2004, + Author = {McClung, C. A. and Ulery, P. G. and Perrotti, L. I. and Zachariou, V. and Berton, O. and Nestler, E. J.}, + Journal = {Brain Res. Mol. Brain Res.}, + Month = {Dec}, + Pages = {146--154}, + Title = {{{D}elta{F}os{B}: a molecular switch for long-term adaptation in the brain}}, + Volume = {132}, + Year = {2004}} + +@article{McClure2007b, + Author = {McClure, E. B. and Adler, A. and Monk, C. S. and Cameron, J. and Smith, S. and Nelson, E. E. and Leibenluft, E. and Ernst, M. and Pine, D. S.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {97--105}, + Title = {{f{M}{R}{I} predictors of treatment outcome in pediatric anxiety disorders}}, + Volume = {191}, + Year = {2007}} + +@article{McClure2007a, + Author = {McClure, E. B. and Monk, C. S. and Nelson, E. E. and Parrish, J. M. and Adler, A. and Blair, R. J. and Fromm, S. and Charney, D. S. and Leibenluft, E. and Ernst, M. and Pine, D. S.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Jan}, + Pages = {97--106}, + Title = {{{A}bnormal attention modulation of fear circuit function in pediatric generalized anxiety disorder}}, + Volume = {64}, + Year = {2007}} + +@article{McClure2004b, + Author = {McClure, E. B. and Monk, C. S. and Nelson, E. E. and Zarahn, E. and Leibenluft, E. and Bilder, R. M. and Charney, D. S. and Ernst, M. and Pine, D. S.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {1047--1055}, + Title = {{{A} developmental examination of gender differences in brain engagement during evaluation of threat}}, + Volume = {55}, + Year = {2004}} + +@article{McClure2007, + Author = {McClure, E. B. and Parrish, J. M. and Nelson, E. E. and Easter, J. and Thorne, J. F. and Rilling, J. K. and Ernst, M. and Pine, D. S.}, + Journal = {J Abnorm Child Psychol}, + Month = {Aug}, + Pages = {567--577}, + Title = {{{R}esponses to conflict and cooperation in adolescents with anxiety and mood disorders}}, + Volume = {35}, + Year = {2007}} + +@article{McClure2003, + Author = {McClure, S. M. and Berns, G. S. and Montague, P. R.}, + Journal = {Neuron}, + Pages = {339--346}, + Title = {{{T}emporal prediction errors in a passive learning task activate human striatum}}, + Volume = {38}, + Year = {2003}} + +@article{McClure2003a, + Author = {McClure, S. M. and Daw, N. D. and Montague, P. R.}, + Journal = {Trends Neurosci.}, + Pages = {423--428}, + Title = {{{A} computational substrate for incentive salience}}, + Volume = {26}, + Year = {2003}} + +@article{McClure2004, + Author = {McClure, S. M. and Li, J. and Tomlin, D. and Cypert, K. S. and Montague, L. M. and Montague, P. R.}, + Journal = {Neuron}, + Pages = {379--387}, + Title = {{{N}eural correlates of behavioral preference for culturally familiar drinks}}, + Volume = {44}, + Year = {2004}} + +@article{McClure2004a, + Author = {McClure, S. M. and York, M. K. and Montague, P. R.}, + Journal = {Neuroscientist}, + Pages = {260--268}, + Title = {{{T}he neural substrates of reward processing in humans: the modern role of {F}{M}{R}{I}}}, + Volume = {10}, + Year = {2004}} + +@article{McCoy2005, + Author = {McCoy, A.N. and Platt, M.L.}, + Journal = {Nature neuroscience}, + Number = {9}, + Pages = {1220--1227}, + Title = {{Risk-sensitive neurons in macaque posterior cingulate cortex}}, + Volume = {8}, + Year = {2005}} + +@article{McCoy2003, + Author = {McCoy, A. N. and Crowley, J. C. and Haghighian, G. and Dean, H. L. and Platt, M. L.}, + Journal = {Neuron}, + Month = {Dec}, + Pages = {1031--1040}, + Title = {{{S}accade reward signals in posterior cingulate cortex}}, + Volume = {40}, + Year = {2003}} + +@article{McCoy2005a, + Author = {McCoy, A. N. and Platt, M. L.}, + Journal = {J. Comp. Physiol. A Neuroethol. Sens. Neural. Behav. Physiol.}, + Month = {Mar}, + Pages = {201--211}, + Title = {{{E}xpectations and outcomes: decision-making in the primate brain}}, + Volume = {191}, + Year = {2005}} + +@article{McKee2007, + Author = {McKee, S. A. and Carroll, K. M. and Sinha, R. and Robinson, J. E. and Nich, C. and Cavallo, D. and O'Malley, S.}, + Journal = {Drug Alcohol Depend}, + Month = {Nov}, + Pages = {97--101}, + Title = {{{E}nhancing brief cognitive-behavioral therapy with motivational enhancement techniques in cocaine users}}, + Volume = {91}, + Year = {2007}} + +@article{McKee2007a, + Author = {McKee, S. A. and Carroll, K. M. and Sinha, R. and Robinson, J. E. and Nich, C. and Cavallo, D. and O'Malley, S.}, + Journal = {Drug Alcohol Depend}, + Month = {Nov}, + Pages = {97--101}, + Title = {{{E}nhancing brief cognitive-behavioral therapy with motivational enhancement techniques in cocaine users}}, + Volume = {91}, + Year = {2007}} + +@article{McKee2007b, + Author = {McKee, S. A. and Carroll, K. M. and Sinha, R. and Robinson, J. E. and Nich, C. and Cavallo, D. and O'Malley, S.}, + Journal = {Drug Alcohol Depend}, + Month = {Nov}, + Pages = {97--101}, + Title = {{{E}nhancing brief cognitive-behavioral therapy with motivational enhancement techniques in cocaine users}}, + Volume = {91}, + Year = {2007}} + +@article{mckirdy2008set, + Author = {McKirdy, J. and Sussmann, JED and Hall, J. and Lawrie, SM and Johnstone, EC and McIntosh, AM}, + Journal = {Psychological medicine}, + Number = {08}, + Pages = {1289--1293}, + Publisher = {Cambridge University Press}, + Title = {{Set shifting and reversal learning in patients with bipolar disorder or schizophrenia}}, + Volume = {39}, + Year = {2008}} + +@article{McLellan1985, + Author = {McLellan, A. T. and Childress, A. R.}, + Journal = {J Subst Abuse Treat}, + Pages = {187--191}, + Title = {{{A}versive therapies for substance abuse: do they work?}}, + Volume = {2}, + Year = {1985}} + +@article{McLellan1986, + Author = {McLellan, A. T. and Childress, A. R. and Ehrman, R. and O'Brien, C. P. and Pashko, S.}, + Journal = {J Subst Abuse Treat}, + Pages = {33--40}, + Title = {{{E}xtinguishing conditioned responses during opiate dependence treatment turning laboratory findings into clinical procedures}}, + Volume = {3}, + Year = {1986}} + +@article{McLellan1984, + Author = {McLellan, A. T. and Childress, A. R. and Griffith, J. and Woody, G. E.}, + Journal = {Am J Drug Alcohol Abuse}, + Pages = {77--95}, + Title = {{{T}he psychiatrically severe drug abuse patient: methadone maintenance or therapeutic community?}}, + Volume = {10}, + Year = {1984}} + +@article{McMillen2006, + Author = {Mc{M}illen, T. and Holmes, P.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {30--57}, + Title = {The Dynamics of Choice Among Multiple Alternatives}, + Volume = {50}, + Year = {2006}} + +@article{Mechelli2004, + Author = {Mechelli, A. and Crinion, J. T. and Noppeney, U. and O'Doherty, J. and Ashburner, J. and Frackowiak, R. S. and Price, C. J.}, + Journal = {Nature}, + Month = {Oct}, + Pages = {757}, + Title = {{{N}eurolinguistics: structural plasticity in the bilingual brain}}, + Volume = {431}, + Year = {2004}} + +@techreport{Meevis2005, + Author = {Meevis, M. and Luth, I. and {vom Kothen}, L. and Koomen, A. and Verouden, J.}, + Institution = {University of Amsterdam}, + Title = {{IQ} en Reactiesnelheid: Een Experiment en een Wiskundige Analyse}, + Year = {2005}} + +@article{Mehta2003, + Author = {Mehta, M.A. and McGowan, S.W. and Lawrence, A.D. and Aitken, M.R.F. and Montgomery, A.J. and Grasby, P.M.}, + Journal = {Neuroimage}, + Number = {4}, + Pages = {1982--1994}, + Publisher = {Elsevier}, + Title = {{Systemic sulpiride modulates striatal blood flow: relationships to spatial working memory and planning}}, + Volume = {20}, + Year = {2003}} + +@article{Mendrek2006, + Author = {Mendrek, A. and Monterosso, J. and Simon, S. L. and Jarvik, M. and Brody, A. and Olmstead, R. and Domier, C. P. and Cohen, M. S. and Ernst, M. and London, E. D.}, + Journal = {Addict Behav}, + Month = {May}, + Pages = {833--844}, + Title = {{{W}orking memory in cigarette smokers: comparison to non-smokers and effects of abstinence}}, + Volume = {31}, + Year = {2006}} + +@article{Mendrek2006a, + Author = {Mendrek, A. and Monterosso, J. and Simon, S. L. and Jarvik, M. and Brody, A. and Olmstead, R. and Domier, C. P. and Cohen, M. S. and Ernst, M. and London, E. D.}, + Journal = {Addict Behav}, + Month = {May}, + Pages = {833--844}, + Title = {{{W}orking memory in cigarette smokers: comparison to non-smokers and effects of abstinence}}, + Volume = {31}, + Year = {2006}} + +@article{Mense1988, + Author = {Mense, S. and Craig, A. D.}, + Journal = {Neuroscience}, + Month = {Sep}, + Pages = {1023--1035}, + Title = {{{S}pinal and supraspinal terminations of primary afferent fibers from the gastrocnemius-soleus muscle in the cat}}, + Volume = {26}, + Year = {1988}} + +@article{Messer2000, + Author = {Messer, C. J. and Eisch, A. J. and Carlezon, W. A. and Whisler, K. and Shen, L. and Wolf, D. H. and Westphal, H. and Collins, F. and Russell, D. S. and Nestler, E. J.}, + Journal = {Neuron}, + Month = {Apr}, + Pages = {247--257}, + Title = {{{R}ole for {G}{D}{N}{F} in biochemical and behavioral adaptations to drugs of abuse}}, + Volume = {26}, + Year = {2000}} + +@article{Meyer1988, + Author = {Meyer, D. E. and Irwin, D. E. and Osman, A. M. and Kounios, J.}, + Journal = {Psychological Review}, + Pages = {183--237}, + Title = {The Dynamics of Cognition and Action: Mental Processes Inferred From Speed--accuracy Decomposition}, + Volume = {95}, + Year = {1988}} + +@article{Meyerhoff1999, + Author = {Meyerhoff, D. J. and Bloomer, C. and Cardenas, V. and Norman, D. and Weiner, M. W. and Fein, G.}, + Journal = {Neurology}, + Month = {Mar}, + Pages = {995--1003}, + Title = {{{E}levated subcortical choline metabolites in cognitively and clinically asymptomatic {H}{I}{V}+ patients}}, + Volume = {52}, + Year = {1999}} + +@article{Meyerhoff1993, + Author = {Meyerhoff, D. J. and MacKay, S. and Bachman, L. and Poole, N. and Dillon, W. P. and Weiner, M. W. and Fein, G.}, + Journal = {Neurology}, + Month = {Mar}, + Pages = {509--515}, + Title = {{{R}educed brain {N}-acetylaspartate suggests neuronal loss in cognitively impaired human immunodeficiency virus-seropositive individuals: in vivo 1{H} magnetic resonance spectroscopic imaging}}, + Volume = {43}, + Year = {1993}} + +@article{Meyerhoff1994, + Author = {Meyerhoff, D. J. and MacKay, S. and Constans, J. M. and Norman, D. and Van Dyke, C. and Fein, G. and Weiner, M. W.}, + Journal = {Ann. Neurol.}, + Month = {Jul}, + Pages = {40--47}, + Title = {{{A}xonal injury and membrane alterations in {A}lzheimer's disease suggested by in vivo proton magnetic resonance spectroscopic imaging}}, + Volume = {36}, + Year = {1994}} + +@article{Meyerhoff1995, + Author = {Meyerhoff, D. J. and MacKay, S. and Sappey-Marinier, D. and Deicken, R. and Calabrese, G. and Dillon, W. P. and Weiner, M. W. and Fein, G.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jun}, + Pages = {685--692}, + Title = {{{E}ffects of chronic alcohol abuse and {H}{I}{V} infection on brain phosphorus metabolites}}, + Volume = {19}, + Year = {1995}} + +@article{Meyerhoff1996, + Author = {Meyerhoff, D. J. and Weiner, M. W. and Fein, G.}, + Journal = {AJNR Am J Neuroradiol}, + Month = {May}, + Pages = {973--978}, + Title = {{{D}eep gray matter structures in {H}{I}{V} infection: a proton {M}{R} spectroscopic study}}, + Volume = {17}, + Year = {1996}} + +@book{Mikosch1998, + Address = {Singapore}, + Author = {Mikosch, T.}, + Publisher = {World Scientific}, + Title = {Elementary Stochastic Calculus with Finance in View}, + Year = {1998}} + +@article{Milham2005, + Author = {Milham, M. P. and Nugent, A. C. and Drevets, W. C. and Dickstein, D. P. and Leibenluft, E. and Ernst, M. and Charney, D. and Pine, D. S.}, + Journal = {Biol. Psychiatry}, + Month = {May}, + Pages = {961--966}, + Title = {{{S}elective reduction in amygdala volume in pediatric anxiety disorders: a voxel-based morphometry investigation}}, + Volume = {57}, + Year = {2005}} + +@article{Miller1989, + Author = {Miller, S. B. and Finn, P. R. and Ditto, B. and Pihl, R. O.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {505--507}, + Title = {{{R}isk for hypertension in female members of multigenerational male-limited alcoholic families}}, + Volume = {13}, + Year = {1989}} + +@article{Minassian2009, + Author = {Minassian, A. and Henry, B. L. and Geyer, M. A. and Paulus, M. P. and Young, J. W. and Perry, W.}, + Journal = {J Affect Disord}, + Month = {May}, + Title = {{{T}he quantitative assessment of motor activity in mania and schizophrenia}}, + Year = {2009}} + +@article{Minassian2004, + Author = {Minassian, A. and Paulus, M. P. and Perry, W.}, + Journal = {J Affect Disord}, + Month = {Oct}, + Pages = {203--208}, + Title = {{{I}ncreased sensitivity to error during decision-making in bipolar disorder patients with acute mania}}, + Volume = {82}, + Year = {2004}} + +@article{Mintzer2002, + Author = {Mintzer, M.Z. and Stitzer, M.L.}, + Journal = {Drug and alcohol dependence}, + Number = {1}, + Pages = {41--51}, + Publisher = {Elsevier}, + Title = {{Cognitive impairment in methadone maintenance patients}}, + Volume = {67}, + Year = {2002}} + +@article{Mintzer2006, + Author = {Mintzer, M. Z. and Kuwabara, H. and Alexander, M. and Brasic, J. R. and Ye, W. and Ernst, M. and Griffiths, R. R. and Wong, D. F.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Nov}, + Pages = {445--461}, + Title = {{{D}ose effects of triazolam on brain activity during episodic memory encoding: a {P}{E}{T} study}}, + Volume = {188}, + Year = {2006}} + +@article{Mirenowicz1996, + Author = {Mirenowicz, J. and Schultz, W.}, + Publisher = {Nature Publishing Group}, + Title = {{Preferential activation of midbrain dopamine neurons by appetitive rather than aversive stimuli}}, + Year = {1996}} + +@article{Mitchell1996, + Author = {Mitchell, C. S. and Shear, M. S. and Bolla, K. I. and Schwartz, B. S.}, + Journal = {J. Occup. Environ. Med.}, + Month = {Apr}, + Pages = {372--378}, + Title = {{{C}linical evaluation of 58 organolead manufacturing workers}}, + Volume = {38}, + Year = {1996}} + +@article{Mitchell1990, + Author = {Mitchell, J. B. and Stewart, J.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Mar}, + Pages = {643--650}, + Title = {{{F}acilitation of sexual behaviors in the male rat associated with intra-{V}{T}{A} injections of opiates}}, + Volume = {35}, + Year = {1990}} + +@article{Mitchell2005, + Abstract = {BACKGROUND: Impaired decision-making is one diagnostic characteristic + of alcoholism. Quantifying decision-making with rapid and robust + laboratory-based measures is thus desirable for the testing of novel + treatments for alcoholism. Previous research has demonstrated the + utility of delay discounting (DD) tasks for quantifying differences + in decision-making in substance abusers and normal controls. In DD + paradigms subjects choose between a small, immediate reward and a + larger, delayed reward. METHODS: We used a novel computerized DD + task to demonstrate that abstinent alcoholics (AA, n=14) choose the + larger, delayed option significantly less often than control subjects + (n=14; p<0.02). This difference in choice tendency was independent + of subject age, gender, years of education, or socio-economic status. + RESULTS: All subjects discounted as a function of reward delay and + amount, with alcoholics demonstrating steeper discounting curves + for both variables. This tendency to discount delayed rewards was + positively correlated with subjective reports of both alcohol addiction + severity (Drug Use Screening Inventory-Revised, Domain 1, p<0.01), + and impulsivity (Barratt Impulsivity Scale-11, p<0.004). Novel aspects + of this new paradigm include an element of time pressure, an additional + experimental condition that evaluated motor impulsivity by assessing + the ability to inhibit a pre-potent response, and another control + condition to requiring non-subjective choice. CONCLUSIONS: Non-alcoholic + controls and alcoholics did not differ on motor impulsivity or non-subjective + choice, suggesting that the differing choice behavior of the two + groups was due mainly to differences in cognitive impulsivity.}, + Author = {Jennifer M Mitchell and Howard L Fields and Mark D'Esposito and Charlotte A Boettiger}, + Institution = {Ernest Gallo Clinic and Research Center, University of California, San Francisco, CA 94608, USA.}, + Journal = {Alcohol Clin Exp Res}, + Keywords = {Adult; Aging, psychology; Alcoholism, psychology; Data Interpretation, Statistical; Decision Making; Education; Family, psychology; Female; Humans; Impulsive Behavior, psychology; Male; Neuropsychological Tests; Psychometrics; Reward; Sex Characteristics; Social Class; Temperance}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {12}, + Owner = {Woo-Young Ahn}, + Pages = {2158--2169}, + Pii = {00000374-200512000-00010}, + Pmid = {16385186}, + Timestamp = {2009.08.06}, + Title = {Impulsive responding in alcoholics.}, + Volume = {29}, + Year = {2005}} + +@article{Mitchell2005a, + Abstract = {BACKGROUND: Impaired decision-making is one diagnostic characteristic + of alcoholism. Quantifying decision-making with rapid and robust + laboratory-based measures is thus desirable for the testing of novel + treatments for alcoholism. Previous research has demonstrated the + utility of delay discounting (DD) tasks for quantifying differences + in decision-making in substance abusers and normal controls. In DD + paradigms subjects choose between a small, immediate reward and a + larger, delayed reward. METHODS: We used a novel computerized DD + task to demonstrate that abstinent alcoholics (AA, n=14) choose the + larger, delayed option significantly less often than control subjects + (n=14; p<0.02). This difference in choice tendency was independent + of subject age, gender, years of education, or socio-economic status. + RESULTS: All subjects discounted as a function of reward delay and + amount, with alcoholics demonstrating steeper discounting curves + for both variables. This tendency to discount delayed rewards was + positively correlated with subjective reports of both alcohol addiction + severity (Drug Use Screening Inventory-Revised, Domain 1, p<0.01), + and impulsivity (Barratt Impulsivity Scale-11, p<0.004). Novel aspects + of this new paradigm include an element of time pressure, an additional + experimental condition that evaluated motor impulsivity by assessing + the ability to inhibit a pre-potent response, and another control + condition to requiring non-subjective choice. CONCLUSIONS: Non-alcoholic + controls and alcoholics did not differ on motor impulsivity or non-subjective + choice, suggesting that the differing choice behavior of the two + groups was due mainly to differences in cognitive impulsivity.}, + Author = {Jennifer M Mitchell and Howard L Fields and Mark D'Esposito and Charlotte A Boettiger}, + Institution = {Ernest Gallo Clinic and Research Center, University of California, San Francisco, CA 94608, USA.}, + Journal = {Alcohol Clin Exp Res}, + Keywords = {Adult; Aging, psychology; Alcoholism, psychology; Data Interpretation, Statistical; Decision Making; Education; Family, psychology; Female; Humans; Impulsive Behavior, psychology; Male; Neuropsychological Tests; Psychometrics; Reward; Sex Characteristics; Social Class; Temperance}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {12}, + Owner = {Woo-Young Ahn}, + Pages = {2158--2169}, + Pii = {00000374-200512000-00010}, + Pmid = {16385186}, + Timestamp = {2009.08.06}, + Title = {Impulsive responding in alcoholics.}, + Volume = {29}, + Year = {2005}} + +@article{Mitchell1999, + Author = {Mitchell, S. H.}, + Journal = {Psychopharmacology}, + Owner = {ahnw}, + Pages = {455-464}, + Timestamp = {2007.05.01}, + Title = {Measures of impulsivity in cigarette smokers and non-smokers}, + Volume = {146}, + Year = {1999}} + +@article{Moazzezi2008, + Author = {Moazzezi, R. and Dayan, P.}, + Journal = {Network}, + Pages = {236--252}, + Title = {{{C}hange-based inference for invariant discrimination}}, + Volume = {19}, + Year = {2008}} + +@article{Mobbs2003, + Author = {Mobbs, D. and Greicius, M.D. and Abdel-Azim, E. and Menon, V. and Reiss, A.L.}, + Journal = {Neuron}, + Number = {5}, + Pages = {1041--1048}, + Publisher = {Elsevier}, + Title = {{Humor modulates the mesolimbic reward centers}}, + Volume = {40}, + Year = {2003}} + +@article{Modha1998, + Author = {Modha, D. S. and Masry, E.}, + Journal = {IEEE Transactions on Information Theory}, + Pages = {117--133}, + Title = {Memory-universal Prediction of Stationary Random Processes}, + Volume = {44}, + Year = {1998}} + +@article{Modha1998a, + Author = {Modha, D. S. and Masry, E.}, + Journal = {Machine Learning}, + Pages = {5--39}, + Title = {Prequential and Cross-validated Regression Estimation}, + Volume = {33}, + Year = {1998}} + +@article{Moeller2009, + Author = {Moeller, S. J. and Maloney, T. and Parvaz, M. A. and Dunning, J. P. and Alia-Klein, N. and Woicik, P. A. and Hajcak, G. and Telang, F. and Wang, G. J. and Volkow, N. D. and Goldstein, R. Z.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {169--176}, + Title = {{{E}nhanced choice for viewing cocaine pictures in cocaine addiction}}, + Volume = {66}, + Year = {2009}} + +@incollection{Molenaar1996, + Address = {San Diego}, + Author = {Molenaar, P. C. M. and Hartelman, P. A. I.}, + Booktitle = {Categorical Variables in Developmental Research}, + Editor = {von Eye, A. and Clogg, C. C.}, + Pages = {107--130}, + Publisher = {Academic Press}, + Title = {Catastrophe Theory of Stage Transitions in Metrical and Discrete Stochastic Systems}, + Year = {1996}} + +@article{Molina2001, + Author = {Molina, P. E. and Ahmed, N. and Gatley, J. and Volkow, N. D. and Abumrad, N. N.}, + Journal = {Life Sci.}, + Month = {Sep}, + Pages = {1897--1906}, + Title = {{{L}-tryptophan attenuation of the dopaminergic and behavioral responses to cocaine}}, + Volume = {69}, + Year = {2001}} + +@article{Momenan2004, + Author = {Momenan, R. and Rawlings, R. and Fong, G. and Knutson, B. and Hommer, D.}, + Journal = {Neuroimage}, + Month = {Mar}, + Pages = {965--972}, + Title = {{{V}oxel-based homogeneity probability maps of gray matter in groups: assessing the reliability of functional effects}}, + Volume = {21}, + Year = {2004}} + +@article{Monk2003, + Author = {Monk, C. S. and Grillon, C. and Baas, J. M. and McClure, E. B. and Nelson, E. E. and Zarahn, E. and Charney, D. S. and Ernst, M. and Pine, D. S.}, + Journal = {Dev Psychobiol}, + Month = {Dec}, + Pages = {359--366}, + Title = {{{A} neuroimaging method for the study of threat in adolescents}}, + Volume = {43}, + Year = {2003}} + +@article{Monk2008a, + Author = {Monk, C. S. and Klein, R. G. and Telzer, E. H. and Schroth, E. A. and Mannuzza, S. and Moulton, J. L. and Guardino, M. and Masten, C. L. and McClure-Tone, E. B. and Fromm, S. and Blair, R. J. and Pine, D. S. and Ernst, M.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {90--98}, + Title = {{{A}mygdala and nucleus accumbens activation to emotional facial expressions in children and adolescents at risk for major depression}}, + Volume = {165}, + Year = {2008}} + +@article{Monk2003a, + Author = {Monk, C. S. and McClure, E. B. and Nelson, E. E. and Zarahn, E. and Bilder, R. M. and Leibenluft, E. and Charney, D. S. and Ernst, M. and Pine, D. S.}, + Journal = {Neuroimage}, + Month = {Sep}, + Pages = {420--428}, + Title = {{{A}dolescent immaturity in attention-related brain engagement to emotional facial expressions}}, + Volume = {20}, + Year = {2003}} + +@article{Monk2006, + Author = {Monk, C. S. and Nelson, E. E. and McClure, E. B. and Mogg, K. and Bradley, B. P. and Leibenluft, E. and Blair, R. J. and Chen, G. and Charney, D. S. and Ernst, M. and Pine, D. S.}, + Journal = {Am J Psychiatry}, + Month = {Jun}, + Pages = {1091--1097}, + Title = {{{V}entrolateral prefrontal cortex activation and attentional bias in response to angry faces in adolescents with generalized anxiety disorder}}, + Volume = {163}, + Year = {2006}} + +@article{Monk2004, + Author = {Monk, C. S. and Nelson, E. E. and Woldehawariat, G. and Montgomery, L. A. and Zarahn, E. and McClure, E. B. and Guyer, A. E. and Leibenluft, E. and Charney, D. S. and Ernst, M. and Pine, D. S.}, + Journal = {Biol. Psychiatry}, + Month = {Oct}, + Pages = {607--610}, + Title = {{{E}xperience-dependent plasticity for attention to threat: {B}ehavioral and neurophysiological evidence in humans}}, + Volume = {56}, + Year = {2004}} + +@article{Monk2008, + Author = {Monk, C. S. and Telzer, E. H. and Mogg, K. and Bradley, B. P. and Mai, X. and Louro, H. M. and Chen, G. and McClure-Tone, E. B. and Ernst, M. and Pine, D. S.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {May}, + Pages = {568--576}, + Title = {{{A}mygdala and ventrolateral prefrontal cortex activation to masked angry faces in children and adolescents with generalized anxiety disorder}}, + Volume = {65}, + Year = {2008}} + +@article{Montague2008, + Author = {Montague, P. R.}, + Journal = {Curr. Biol.}, + Month = {Jul}, + Pages = {R584--585}, + Title = {{{F}ree will}}, + Volume = {18}, + Year = {2008}} + +@article{Montague2007, + Author = {Montague, P. R.}, + Journal = {Funct. Neurol.}, + Pages = {219--234}, + Title = {{{N}euroeconomics: a view from neuroscience}}, + Volume = {22}, + Year = {2007}} + +@article{Montague2007a, + Author = {Montague, P. R.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Oct}, + Pages = {407--409}, + Title = {{{T}he first wave}}, + Volume = {11}, + Year = {2007}} + +@article{Montague2008a, + Author = {Montague, P. R. and Assad, J.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Apr}, + Pages = {117--119}, + Title = {{{E}ditorial overview}}, + Volume = {18}, + Year = {2008}} + +@article{Montague2002, + Author = {Montague, P. R. and Berns, G. S.}, + Journal = {Neuron}, + Month = {Oct}, + Pages = {265--284}, + Title = {{{N}eural economics and the biological substrates of valuation}}, + Volume = {36}, + Year = {2002}} + +@article{Montague2002a, + Author = {Montague, P. R. and Berns, G. S. and Cohen, J. D. and McClure, S. M. and Pagnoni, G. and Dhamala, M. and Wiest, M. C. and Karpov, I. and King, R. D. and Apple, N. and Fisher, R. E.}, + Journal = {Neuroimage}, + Month = {Aug}, + Pages = {1159--1164}, + Title = {{{H}yperscanning: simultaneous f{M}{R}{I} during linked social interactions}}, + Volume = {16}, + Year = {2002}} + +@article{Montague2007b, + Author = {Montague, P. R. and Chiu, P. H.}, + Journal = {Nat. Neurosci.}, + Month = {Feb}, + Pages = {137--138}, + Title = {{{F}or goodness' sake}}, + Volume = {10}, + Year = {2007}} + +@article{Montague1996, + Author = {Montague, P. R. and Dayan, P. and Sejnowski, T. J.}, + Journal = {J. Neurosci.}, + Pages = {1936--1947}, + Title = {{{A} framework for mesencephalic dopamine systems based on predictive {H}ebbian learning}}, + Volume = {16}, + Year = {1996}} + +@article{Montague2004, + Author = {Montague, P. R. and Hyman, S. E. and Cohen, J. D.}, + Journal = {Nature}, + Month = {Oct}, + Pages = {760--767}, + Title = {{{C}omputational roles for dopamine in behavioural control}}, + Volume = {431}, + Year = {2004}} + +@article{Montague2007c, + Author = {Montague, P. R. and King-Casas, B.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Dec}, + Pages = {514--519}, + Title = {{{E}fficient statistics, common currencies and the problem of reward-harvesting}}, + Volume = {11}, + Year = {2007}} + +@article{Montague2006, + Author = {Montague, P. R. and King-Casas, B. and Cohen, J. D.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {417--448}, + Title = {{{I}maging valuation models in human choice}}, + Volume = {29}, + Year = {2006}} + +@article{Montague2007d, + Author = {Montague, P. R. and Lohrenz, T.}, + Journal = {Neuron}, + Month = {Oct}, + Pages = {14--18}, + Title = {{{T}o detect and correct: norm violations and their enforcement}}, + Volume = {56}, + Year = {2007}} + +@article{Montague2004a, + Author = {Montague, P. R. and McClure, S. M. and Baldwin, P. R. and Phillips, P. E. and Budygin, E. A. and Stuber, G. D. and Kilpatrick, M. R. and Wightman, R. M.}, + Journal = {J. Neurosci.}, + Month = {Feb}, + Pages = {1754--1759}, + Title = {{{D}ynamic gain control of dopamine delivery in freely moving animals}}, + Volume = {24}, + Year = {2004}} + +@article{Monterosso2007a, + Author = {Monterosso, J. and Ainslie, G.}, + Journal = {Drug Alcohol Depend}, + Month = {Sep}, + Pages = {S100--111}, + Title = {{{T}he behavioral economics of will in recovery from addiction}}, + Volume = {90 Suppl 1}, + Year = {2007}} + +@article{Monterosso2007b, + Abstract = {Behavioral economic studies demonstrate that rewards are discounted + proportionally with their delay (hyperbolic discounting). Hyperbolic + discounting implies temporary preference for smaller rewards when + they are imminent, and this concept has been widely considered by + researchers interested in the causes of addictive behavior. Far less + consideration has been given to the fact that systematic preference + reversal also predicts various self-control phenomena, which may + also be analyzed from a behavioral economic perspective. Here we + summarize self-control phenomena predicted by hyperbolic discounting, + particularly with application to the field of addiction. Of greatest + interest is the phenomenon of choice bundling, an increase in motivation + to wait for delayed rewards that can be expected to result from making + choices in whole categories. Specifically, when a person's expectations + about her own future behavior are conditional upon her current behavior, + the value of these expectations is added to the contingencies for + the current behavior, resulting in reduced impulsivity. Hyperbolic + discounting provides a bottom-up basis for the intuitive learning + of choice bundling, the properties of which match common descriptions + of willpower. We suggest that the bundling effect can also be discerned + in the advice of 12-step programs.}, + Author = {John Monterosso and George Ainslie}, + Doi = {10.1016/j.drugalcdep.2006.09.004}, + Institution = {Department of Psychiatry and Biobehavioral Sciences, University of California Los Angeles, Los Angeles, CA 90024, USA. jmont@ucla.edu}, + Journal = {Drug Alcohol Depend}, + Keywords = {Behavior Therapy; Choice Behavior; Conditioning, Classical; Drive; Humans; Impulsive Behavior, psychology/rehabilitation; Internal-External Control; Motivation; Rationalization; Reinforcement (Psychology); Self-Help Groups; Substance-Related Disorders, psychology/rehabilitation; Temperance, psychology; Volition}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Owner = {Woo-Young Ahn}, + Pages = {S100--S111}, + Pii = {S0376-8716(06)00334-6}, + Pmid = {17034958}, + Timestamp = {2009.08.06}, + Title = {The behavioral economics of will in recovery from addiction.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2006.09.004}, + Volume = {90 Suppl 1}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2006.09.004}} + +@article{Monterosso2007d, + Abstract = {Behavioral economic studies demonstrate that rewards are discounted + proportionally with their delay (hyperbolic discounting). Hyperbolic + discounting implies temporary preference for smaller rewards when + they are imminent, and this concept has been widely considered by + researchers interested in the causes of addictive behavior. Far less + consideration has been given to the fact that systematic preference + reversal also predicts various self-control phenomena, which may + also be analyzed from a behavioral economic perspective. Here we + summarize self-control phenomena predicted by hyperbolic discounting, + particularly with application to the field of addiction. Of greatest + interest is the phenomenon of choice bundling, an increase in motivation + to wait for delayed rewards that can be expected to result from making + choices in whole categories. Specifically, when a person's expectations + about her own future behavior are conditional upon her current behavior, + the value of these expectations is added to the contingencies for + the current behavior, resulting in reduced impulsivity. Hyperbolic + discounting provides a bottom-up basis for the intuitive learning + of choice bundling, the properties of which match common descriptions + of willpower. We suggest that the bundling effect can also be discerned + in the advice of 12-step programs.}, + Author = {John Monterosso and George Ainslie}, + Doi = {10.1016/j.drugalcdep.2006.09.004}, + Institution = {Department of Psychiatry and Biobehavioral Sciences, University of California Los Angeles, Los Angeles, CA 90024, USA. jmont@ucla.edu}, + Journal = {Drug Alcohol Depend}, + Keywords = {Behavior Therapy; Choice Behavior; Conditioning, Classical; Drive; Humans; Impulsive Behavior, psychology/rehabilitation; Internal-External Control; Motivation; Rationalization; Reinforcement (Psychology); Self-Help Groups; Substance-Related Disorders, psychology/rehabilitation; Temperance, psychology; Volition}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Owner = {Woo-Young Ahn}, + Pages = {S100--S111}, + Pii = {S0376-8716(06)00334-6}, + Pmid = {17034958}, + Timestamp = {2009.08.06}, + Title = {The behavioral economics of will in recovery from addiction.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2006.09.004}, + Volume = {90 Suppl 1}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2006.09.004}} + +@article{Monterosso1999, + Author = {Monterosso, J. and Ainslie, G.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Oct}, + Pages = {339--347}, + Title = {{{B}eyond discounting: possible experimental models of impulse control}}, + Volume = {146}, + Year = {1999}} + +@article{Monterosso2001a, + Author = {Monterosso, J. and Ehrman, R. and Napier, K. L. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Addiction}, + Month = {Dec}, + Pages = {1825--1837}, + Title = {{{T}hree decision-making tasks in cocaine-dependent patients: do they measure the same construct?}}, + Volume = {96}, + Year = {2001}} + +@article{Monterosso2001b, + Abstract = {AIMS: Substance-abusing populations perform poorly on decision-making + tasks related to delay and risk. These tasks include: (1) the Delay + Discounting Procedure (DDP), in which choices are made between smaller-sooner + and later-larger rewards, (2) the Gambling Task (GT), in which choices + are made between alternatives varying in pay-off and punishment, + and (3) the Rogers Decision-Making Task (RDMT) in which subjects + choose between higher or lower probability gambles. We examine the + interrelationship among these tasks. DESIGN: A test battery was created + which included the DDP, GT and RDMT, as well as measures of impulsivity, + intellectual functioning and drug use. SETTING: Subjects completed + the test battery at an outpatient center, prior to beginning 12 weeks + of treatment. PARTICIPANTS: Thirty-two treatment-seeking cocaine + dependent individuals (primarily African-American males) participated. + FINDINGS: Performance on the GT was significantly correlated with + performance on the DDP (r = 0.37; p = 0.04). Reaction times on the + RDMT correlated with performance on the GT (r = 0.36, p = 0.04) and + DDP (r = 0.33, p = 0.07), but actual choices on the RDMT did not + (p > 0.9 for both). While no significant relationships were observed + between task performance and impulsivity, IQ estimate was positively + correlated with both the GT (r = 0.44, p = 0.01) and RDMT (r = 0.41, + p = 0.021). Split half reliability data indicated higher reliability + when using only data from the latter half of the GT (r = 0.92 vs. + r = 0.80). CONCLUSIONS: These data offer preliminary evidence of + overlap in the decision-making functioning tapped by these tasks. + Possible implications for drug-taking behavior are discussed.}, + Author = {J. Monterosso and R. Ehrman and K. L. Napier and C. P. O'Brien and A. R. Childress}, + Doi = {10.1080/09652140120089571}, + Institution = {Center for the Study of Addictions, Department of Psychiatry, University of Pennsylvania School of Medicine, Philadelphia 19104-6178, USA. jmont@psych.upenn.edu}, + Journal = {Addiction}, + Keywords = {Adult; African Americans; Cocaine-Related Disorders, ethnology/psychology; Decision Making; Female; Gambling, psychology; Humans; Impulsive Behavior, ethnology/psychology; Male; Personality Assessment; Psychological Tests; Reaction Time; Reproducibility of Results; Reward; Sensitivity and Specificity; Wechsler Scales}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {12}, + Owner = {Woo-Young Ahn}, + Pages = {1825--1837}, + Pmid = {11784475}, + Timestamp = {2009.08.06}, + Title = {Three decision-making tasks in cocaine-dependent patients: do they measure the same construct?}, + Url = {http://dx.doi.org/10.1080/09652140120089571}, + Volume = {96}, + Year = {2001}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/09652140120089571}} + +@article{Monterosso2001c, + Abstract = {AIMS: Substance-abusing populations perform poorly on decision-making + tasks related to delay and risk. These tasks include: (1) the Delay + Discounting Procedure (DDP), in which choices are made between smaller-sooner + and later-larger rewards, (2) the Gambling Task (GT), in which choices + are made between alternatives varying in pay-off and punishment, + and (3) the Rogers Decision-Making Task (RDMT) in which subjects + choose between higher or lower probability gambles. We examine the + interrelationship among these tasks. DESIGN: A test battery was created + which included the DDP, GT and RDMT, as well as measures of impulsivity, + intellectual functioning and drug use. SETTING: Subjects completed + the test battery at an outpatient center, prior to beginning 12 weeks + of treatment. PARTICIPANTS: Thirty-two treatment-seeking cocaine + dependent individuals (primarily African-American males) participated. + FINDINGS: Performance on the GT was significantly correlated with + performance on the DDP (r = 0.37; p = 0.04). Reaction times on the + RDMT correlated with performance on the GT (r = 0.36, p = 0.04) and + DDP (r = 0.33, p = 0.07), but actual choices on the RDMT did not + (p > 0.9 for both). While no significant relationships were observed + between task performance and impulsivity, IQ estimate was positively + correlated with both the GT (r = 0.44, p = 0.01) and RDMT (r = 0.41, + p = 0.021). Split half reliability data indicated higher reliability + when using only data from the latter half of the GT (r = 0.92 vs. + r = 0.80). CONCLUSIONS: These data offer preliminary evidence of + overlap in the decision-making functioning tapped by these tasks. + Possible implications for drug-taking behavior are discussed.}, + Author = {J. Monterosso and R. Ehrman and K. L. Napier and C. P. O'Brien and A. R. Childress}, + Doi = {10.1080/09652140120089571}, + Institution = {Center for the Study of Addictions, Department of Psychiatry, University of Pennsylvania School of Medicine, Philadelphia 19104-6178, USA. jmont@psych.upenn.edu}, + Journal = {Addiction}, + Keywords = {Adult; African Americans; Cocaine-Related Disorders, ethnology/psychology; Decision Making; Female; Gambling, psychology; Humans; Impulsive Behavior, ethnology/psychology; Male; Personality Assessment; Psychological Tests; Reaction Time; Reproducibility of Results; Reward; Sensitivity and Specificity; Wechsler Scales}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {12}, + Owner = {Woo-Young Ahn}, + Pages = {1825--1837}, + Pmid = {11784475}, + Timestamp = {2009.08.06}, + Title = {Three decision-making tasks in cocaine-dependent patients: do they measure the same construct?}, + Url = {http://dx.doi.org/10.1080/09652140120089571}, + Volume = {96}, + Year = {2001}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/09652140120089571}} + +@article{Monterosso2007, + Author = {Monterosso, J. R. and Ainslie, G. and Xu, J. and Cordova, X. and Domier, C. P. and London, E. D.}, + Journal = {Hum Brain Mapp}, + Month = {May}, + Pages = {383--393}, + Title = {{{F}rontoparietal cortical activity of methamphetamine-dependent and comparison subjects performing a delay discounting task}}, + Volume = {28}, + Year = {2007}} + +@article{Monterosso2007c, + Abstract = {Relative to individuals who do not have addictive disorders, drug + abusers exhibit greater devaluation of rewards as a function of their + delay ("delay discounting"). The present study sought to extend this + finding to methamphetamine (MA) abusers and to help understand its + neural basis. MA abusers (n = 12) and control subjects who did not + use illicit drugs (n = 17) participated in tests of delay discounting + with hypothetical money rewards. We then used a derived estimate + of each individual's delay discounting to generate a functional magnetic + resonance imaging probe task consisting of three conditions: "hard + choices," requiring selections between "smaller, sooner" and "larger, + later" alternatives that were similarly valued given the individual's + delay discounting; "easy choices," in which alternatives differed + dramatically in value; and a "no choice" control condition. MA abusers + exhibited more delay discounting than control subjects (P < 0.05). + Across groups, the "hard choice > no choice" contrast revealed significant + effects in the ventrolateral prefrontal cortex, dorsolateral prefrontal + cortex (DLPFC), dorsal anterior cingulate cortex, and areas surrounding + the intraparietal sulcus (IPS). With group comparisons limited to + these clusters, the "hard choice > easy choice" contrast indicated + significant group differences in task-related activity within the + left DLPFC and right IPS; qualitatively similar nonsignificant effects + were present in the other clusters tested. Whereas control subjects + showed less recruitment associated with easy than with hard choices, + MA abusers generally did not. Correlational analysis did not indicate + a relationship between this anomaly in frontoparietal recruitment + and greater degree of delay discounting exhibited by MA abusers. + Therefore, while apparent inefficiency of cortical processing related + to decision-making in MA abusers may contribute to the neural basis + of enhanced delay discounting by this population, other factors remain + to be identified.}, + Author = {John R Monterosso and George Ainslie and Jiansong Xu and Xochitl Cordova and Catherine P Domier and Edythe D London}, + Doi = {10.1002/hbm.20281}, + Institution = {Department of Psychiatry and Biobehavioral Sciences, University of California, Los Angeles, California 90024, USA. jmont@ucla.edu}, + Journal = {Hum Brain Mapp}, + Keywords = {Adult; Amphetamine-Related Disorders, pathology/physiopathology; Brain Mapping; Case-Control Studies; Cerebral Cortex, blood supply/physiopathology; Choice Behavior, physiology; Female; Humans; Image Processing, Computer-Assisted, methods; Magnetic Resonance Imaging, methods; Male; Neuropsychological Tests; Oxygen, blood; Reward}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {383--393}, + Pmid = {16944492}, + Timestamp = {2009.08.06}, + Title = {Frontoparietal cortical activity of methamphetamine-dependent and comparison subjects performing a delay discounting task.}, + Url = {http://dx.doi.org/10.1002/hbm.20281}, + Volume = {28}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1002/hbm.20281}} + +@article{Monterosso2007e, + Abstract = {Relative to individuals who do not have addictive disorders, drug + abusers exhibit greater devaluation of rewards as a function of their + delay ("delay discounting"). The present study sought to extend this + finding to methamphetamine (MA) abusers and to help understand its + neural basis. MA abusers (n = 12) and control subjects who did not + use illicit drugs (n = 17) participated in tests of delay discounting + with hypothetical money rewards. We then used a derived estimate + of each individual's delay discounting to generate a functional magnetic + resonance imaging probe task consisting of three conditions: "hard + choices," requiring selections between "smaller, sooner" and "larger, + later" alternatives that were similarly valued given the individual's + delay discounting; "easy choices," in which alternatives differed + dramatically in value; and a "no choice" control condition. MA abusers + exhibited more delay discounting than control subjects (P < 0.05). + Across groups, the "hard choice > no choice" contrast revealed significant + effects in the ventrolateral prefrontal cortex, dorsolateral prefrontal + cortex (DLPFC), dorsal anterior cingulate cortex, and areas surrounding + the intraparietal sulcus (IPS). With group comparisons limited to + these clusters, the "hard choice > easy choice" contrast indicated + significant group differences in task-related activity within the + left DLPFC and right IPS; qualitatively similar nonsignificant effects + were present in the other clusters tested. Whereas control subjects + showed less recruitment associated with easy than with hard choices, + MA abusers generally did not. Correlational analysis did not indicate + a relationship between this anomaly in frontoparietal recruitment + and greater degree of delay discounting exhibited by MA abusers. + Therefore, while apparent inefficiency of cortical processing related + to decision-making in MA abusers may contribute to the neural basis + of enhanced delay discounting by this population, other factors remain + to be identified.}, + Author = {John R Monterosso and George Ainslie and Jiansong Xu and Xochitl Cordova and Catherine P Domier and Edythe D London}, + Doi = {10.1002/hbm.20281}, + Institution = {Department of Psychiatry and Biobehavioral Sciences, University of California, Los Angeles, California 90024, USA. jmont@ucla.edu}, + Journal = {Hum Brain Mapp}, + Keywords = {Adult; Amphetamine-Related Disorders, pathology/physiopathology; Brain Mapping; Case-Control Studies; Cerebral Cortex, blood supply/physiopathology; Choice Behavior, physiology; Female; Humans; Image Processing, Computer-Assisted, methods; Magnetic Resonance Imaging, methods; Male; Neuropsychological Tests; Oxygen, blood; Reward}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {383--393}, + Pmid = {16944492}, + Timestamp = {2009.08.06}, + Title = {Frontoparietal cortical activity of methamphetamine-dependent and comparison subjects performing a delay discounting task.}, + Url = {http://dx.doi.org/10.1002/hbm.20281}, + Volume = {28}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1002/hbm.20281}} + +@article{Monterosso2005, + Author = {Monterosso, J. R. and Aron, A. R. and Cordova, X. and Xu, J. and London, E. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Aug}, + Pages = {273--277}, + Title = {{{D}eficits in response inhibition associated with chronic methamphetamine abuse}}, + Volume = {79}, + Year = {2005}} + +@article{Monterosso2001, + Author = {Monterosso, J. R. and Flannery, B. A. and Pettinati, H. M. and Oslin, D. W. and Rukstalis, M. and O'Brien, C. P. and Volpicelli, J. R.}, + Journal = {Am J Addict}, + Pages = {258--268}, + Title = {{{P}redicting treatment response to naltrexone: the influence of craving and family history}}, + Volume = {10}, + Year = {2001}} + +@article{Montgomery1979, + Abstract = {The construction of a depression rating scale designed to be particularly + sensitive to treatment effects is described. Ratings of 54 English + and 52 Swedish patients on a 65 item comprehensive psychopathology + scale were used to identify the 17 most commonly occurring symptoms + in primary depressive illness in the combined sample. Ratings on + these 17 items for 64 patients participating in studies of four different + antidepressant drugs were used to create a depression scale consisting + of the 10 items which showed the largest changes with treatment and + the highest correlation to overall change. The inner-rater reliability + of the new depression scale was high. Scores on the scale correlated + significantly with scores on a standard rating scale for depression, + the Hamilton Rating Scale (HRS), indicating its validity as a general + severity estimate. Its capacity to differentiate between responders + and non-responders to antidepressant treatment was better than the + HRS, indicating greater sensitivity to change. The practical and + ethical implications in terms of smaller sample sizes in clinical + trials are discussed.}, + Author = {S. A. Montgomery and M. Asberg}, + Journal = {Br J Psychiatry}, + Keywords = {Adolescent; Adult; Aged; Amitriptyline, therapeutic use; Clomipramine, therapeutic use; Depression, drug therapy/psychology; England; Female; Humans; Male; Maprotiline, therapeutic use; Mianserin, therapeutic use; Middle Aged; Psychiatric Status Rating Scales; Psychometrics; Sweden}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Owner = {Young}, + Pages = {382--389}, + Pmid = {444788}, + Timestamp = {2010.05.01}, + Title = {A new depression scale designed to be sensitive to change.}, + Volume = {134}, + Year = {1979}} + +@article{Moolchan2007, + Author = {Moolchan, E. T. and Frazier, M. and Franken, F. H. and Ernst, M.}, + Journal = {Psychiatry Res}, + Month = {Aug}, + Pages = {281--285}, + Title = {{{A}dolescents in smoking cessation treatment: relationship between externalizing symptoms, smoking history and outcome}}, + Volume = {152}, + Year = {2007}} + +@article{Moolchan2005, + Author = {Moolchan, E. T. and Robinson, M. L. and Ernst, M. and Cadet, J. L. and Pickworth, W. B. and Heishman, S. J. and Schroeder, J. R.}, + Journal = {Pediatrics}, + Month = {Apr}, + Pages = {e407--414}, + Title = {{{S}afety and efficacy of the nicotine patch and gum for the treatment of adolescent tobacco addiction}}, + Volume = {115}, + Year = {2005}} + +@article{Moore2007, + Author = {Moore, C. M. and Biederman, J. and Wozniak, J. and Mick, E. and Aleardi, M. and Wardrop, M. and Dougherty, M. and Harpold, T. and Hammerness, P. and Randall, E. and Lyoo, I. K. and Renshaw, P. F.}, + Journal = {J Affect Disord}, + Month = {Apr}, + Pages = {19--25}, + Title = {{{M}ania, glutamate/glutamine and risperidone in pediatric bipolar disorder: a proton magnetic resonance spectroscopy study of the anterior cingulate cortex}}, + Volume = {99}, + Year = {2007}} + +@article{Moore1998, + Author = {Moore, R. J. and Vinsant, S. L. and Nader, M. A. and Porrino, L. J. and Friedman, D. P.}, + Journal = {Synapse}, + Month = {Jan}, + Pages = {1--9}, + Title = {{{E}ffect of cocaine self-administration on striatal dopamine {D}1 receptors in rhesus monkeys}}, + Volume = {28}, + Year = {1998}} + +@article{Moore1998a, + Author = {Moore, R. J. and Vinsant, S. L. and Nader, M. A. and Porrino, L. J. and Friedman, D. P.}, + Journal = {Synapse}, + Month = {Sep}, + Pages = {88--96}, + Title = {{{E}ffect of cocaine self-administration on dopamine {D}2 receptors in rhesus monkeys}}, + Volume = {30}, + Year = {1998}} + +@article{Moran2008, + Author = {Moran, E. K. and Becker, J. A. and Satlin, A. and Lyoo, I. K. and Fischman, A. J. and Johnson, K. A.}, + Journal = {Neurobiol. Aging}, + Month = {Aug}, + Pages = {1218--1225}, + Title = {{{P}sychosis of {A}lzheimer's disease: {G}ender differences in regional perfusion}}, + Volume = {29}, + Year = {2008}} + +@article{Morean2009, + Author = {Morean, M. E. and Corbin, W. R. and Sinha, R. and O'Malley, S. S.}, + Journal = {J Stud Alcohol Drugs}, + Month = {Mar}, + Pages = {227--236}, + Title = {{{P}arental history of anxiety and alcohol-use disorders and alcohol expectancies as predictors of alcohol-related problems}}, + Volume = {70}, + Year = {2009}} + +@article{Morgan2002, + Author = {Morgan, D. and Grant, K.A. and Gage, H.D. and Mach, R.H. and Kaplan, J.R. and Prioleau, O. and Nader, S.H. and Buchheimer, N. and Ehrenkaufer, R.L. and Nader, M.A.}, + Journal = {nature neuroscience}, + Number = {2}, + Pages = {169--174}, + Publisher = {Nature Publishing Group}, + Title = {{Social dominance in monkeys: dopamine D2 receptors and cocaine self-administration}}, + Volume = {5}, + Year = {2002}} + +@article{Morgan2006, + Author = {Morgan, M. J. and Impallomeni, L. C. and Pirona, A. and Rogers, R. D.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {1562--1573}, + Title = {{{E}levated impulsivity and impaired decision-making in abstinent {E}cstasy ({M}{D}{M}{A}) users compared to polydrug and drug-naive controls}}, + Volume = {31}, + Year = {2006}} + +@article{Morgan2009, + Author = {Morgan, P. T. and Paliwal, P. and Malison, R. T. and Sinha, R.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jul}, + Pages = {54--58}, + Title = {{{S}ex differences in sleep and sleep-dependent learning in abstinent cocaine users}}, + Volume = {93}, + Year = {2009}} + +@article{Morgan2002a, + Author = {Morgan, R. E. and Garavan, H. P. and Mactutus, C. F. and Levitsky, D. A. and Booze, R. M. and Strupp, B. J.}, + Journal = {Behav. Neurosci.}, + Month = {Aug}, + Pages = {624--633}, + Title = {{{E}nduring effects of prenatal cocaine exposure on attention and reaction to errors}}, + Volume = {116}, + Year = {2002}} + +@article{Morganroth1987, + Author = {Morganroth, J. and Pratt, C. M. and Kennedy, H. L. and Singh, S. N. and Platt, M. L. and Baker, B. J. and Mason, D. T.}, + Journal = {Am. J. Cardiol.}, + Month = {Oct}, + Pages = {48F-51F}, + Title = {{{E}fficacy and tolerance of {E}thmozine (moricizine {H}{C}l) in placebo-controlled trials}}, + Volume = {60}, + Year = {1987}} + +@article{Morin2002, + Author = {Morin, C. and Bushnell, M. C. and Luskin, M. B. and Craig, A. D.}, + Journal = {Clin J Pain}, + Pages = {191--195}, + Title = {{{D}isruption of thermal perception in a multiple sclerosis patient with central pain}}, + Volume = {18}, + Year = {2002}} + +@article{Morris2004, + Author = {Morris, G. and Arkadir, D. and Nevet, A. and Vaadia, E. and Bergman, H.}, + Journal = {Neuron}, + Number = {1}, + Pages = {133--143}, + Publisher = {Elsevier}, + Title = {{Coincident but distinct messages of midbrain dopamine and striatal tonically active neurons}}, + Volume = {43}, + Year = {2004}} + +@article{Mortimer2009, + Author = {Mortimer, D. and Feldner, J. and Vaughan, T. and Vetter, I. and Pujic, Z. and Rosoff, W. J. and Burrage, K. and Dayan, P. and Richards, L. J. and Goodhill, G. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jun}, + Title = {{{A} {B}ayesian model predicts the response of axons to molecular gradients}}, + Year = {2009}} + +@article{Moustafa2008, + Author = {Moustafa, A. A. and Cohen, M. X. and Sherman, S. J. and Frank, M. J.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {12294--12304}, + Title = {{{A} role for dopamine in temporal decision making and reward maximization in parkinsonism}}, + Volume = {28}, + Year = {2008}} + +@article{Moustafa2008a, + Author = {Moustafa, A. A. and Sherman, S. J. and Frank, M. J.}, + Journal = {Neuropsychologia}, + Month = {Nov}, + Pages = {3144--3156}, + Title = {{{A} dopaminergic basis for working memory, learning and attentional shifting in {P}arkinsonism}}, + Volume = {46}, + Year = {2008}} + +@article{Moutoussis2008, + Author = {Moutoussis, M. and Bentall, R. P. and Williams, J. and Dayan, P.}, + Journal = {Network}, + Pages = {137--160}, + Title = {{{A} temporal difference account of avoidance learning}}, + Volume = {19}, + Year = {2008}} + +@article{Moutoussis2007, + Author = {Moutoussis, M. and Williams, J. and Dayan, P. and Bentall, R. P.}, + Journal = {Cogn Neuropsychiatry}, + Month = {Nov}, + Pages = {495--510}, + Title = {{{P}ersecutory delusions and the conditioned avoidance paradigm: towards an integration of the psychology and biology of paranoia}}, + Volume = {12}, + Year = {2007}} + +@article{Movellan2001, + Author = {Movellan, J. R. and McClelland, J. L.}, + Journal = {Psychological Review}, + Pages = {113--148}, + Title = {The {M}orton--{M}assaro Law of Information Integration: {I}mplications for Models of Perception}, + Volume = {108}, + Year = {2001}} + +@article{Mueller2009, + Author = {Mueller, S. C. and Mandell, D. and Leschek, E. W. and Pine, D. S. and Merke, D. P. and Ernst, M.}, + Journal = {J Child Adolesc Psychopharmacol}, + Month = {Feb}, + Pages = {41--50}, + Title = {{{E}arly hyperandrogenism affects the development of hippocampal function: preliminary evidence from a functional magnetic resonance imaging study of boys with familial male precocious puberty}}, + Volume = {19}, + Year = {2009}} + +@article{Mueller2009a, + Author = {Mueller, S. C. and Temple, V. and Cornwell, B. and Grillon, C. and Pine, D. S. and Ernst, M.}, + Journal = {J Child Psychol Psychiatry}, + Month = {Jul}, + Title = {{{I}mpaired spatial navigation in pediatric anxiety}}, + Year = {2009}} + +@article{Munson2006, + Author = {Munson, S. and Schroth, E. and Ernst, M.}, + Journal = {Pediatrics}, + Month = {Apr}, + Pages = {1372--1381}, + Title = {{{T}he role of functional neuroimaging in pediatric brain injury}}, + Volume = {117}, + Year = {2006}} + +@article{Murphy2006, + Author = {Murphy, K. and Dixon, V. and LaGrave, K. and Kaufman, J. and Risinger, R. and Bloom, A. and Garavan, H.}, + Journal = {Am J Psychiatry}, + Month = {Jul}, + Pages = {1245--1251}, + Title = {{{A} validation of event-related {F}{M}{R}{I} comparisons between users of cocaine, nicotine, or cannabis and control subjects}}, + Volume = {163}, + Year = {2006}} + +@article{Murphy2005, + Author = {Murphy, K. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {771--777}, + Title = {{{D}eriving the optimal number of events for an event-related f{M}{R}{I} study based on the spatial extent of activation}}, + Volume = {27}, + Year = {2005}} + +@article{Murphy2004, + Author = {Murphy, K. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Jun}, + Pages = {879--885}, + Title = {{{A}n empirical investigation into the number of subjects required for an event-related f{M}{R}{I} study}}, + Volume = {22}, + Year = {2004}} + +@article{Murray2007, + Author = {Murray, E. A. and O'Doherty, J. P. and Schoenbaum, G.}, + Journal = {J. Neurosci.}, + Month = {Aug}, + Pages = {8166--8169}, + Title = {{{W}hat we know and do not know about the functions of the orbitofrontal cortex after 20 years of cross-species studies}}, + Volume = {27}, + Year = {2007}} + +@article{Musen2006, + Author = {Musen, G. and Lyoo, I. K. and Sparks, C. R. and Weinger, K. and Hwang, J. and Ryan, C. M. and Jimerson, D. C. and Hennen, J. and Renshaw, P. F. and Jacobson, A. M.}, + Journal = {Diabetes}, + Month = {Feb}, + Pages = {326--333}, + Title = {{{E}ffects of type 1 diabetes on gray matter density as measured by voxel-based morphometry}}, + Volume = {55}, + Year = {2006}} + +@article{Myerson1995, + Author = {Myerson, J. and Green, L.}, + Journal = {Journal of the Experimental Analysis of Behavior}, + Owner = {Wooyoung Ahn}, + Pages = {263-276}, + Timestamp = {2007.05.01}, + Title = {Discounting of delayed rewards: models of individual choice}, + Volume = {64}, + Year = {1995}} + +@article{myrick2004differential, + Author = {Myrick, H. and Anton, R.F. and Li, X. and Henderson, S. and Drobes, D. and Voronin, K. and George, M.S.}, + Journal = {Neuropsychopharmacology(New York, NY)}, + Number = {2}, + Pages = {393--402}, + Publisher = {Elsevier Science}, + Title = {{Differential brain activity in alcoholics and social drinkers to alcohol cues: relationship to craving}}, + Volume = {29}, + Year = {2004}} + +@article{Myung2003, + Author = {Myung, I.J.}, + Journal = {Journal of Mathematical Psychology}, + Number = {1}, + Pages = {90--100}, + Publisher = {Elsevier}, + Title = {{Tutorial on maximum likelihood estimation}}, + Volume = {47}, + Year = {2003}} + +@article{Myung2000b, + Author = {Myung, IJ and Kim, C. and Pitt, MA}, + Journal = {Memory \& Cognition}, + Number = {5}, + Pages = {832}, + Title = {{Toward an explanation of the power law artifact: insights from response surface analysis.}}, + Volume = {28}, + Year = {2000}} + +@article{Myung2000, + Author = {Myung, I. J.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {190--204}, + Title = {The Importance of Complexity in Model Selection}, + Volume = {44}, + Year = {2000}} + +@article{Myung2000a, + Author = {Myung, I. J. and Forster, M. R. and Browne, M. W.}, + Journal = {Journal of Mathematical Psychology}, + Number = {1--2}, + Pages = {--}, + Title = {Model Selection [{S}pecial Issue]}, + Volume = {44}, + Year = {2000}} + +@article{Myung2006, + Author = {Myung, I. J. and Navarro, D. J. and Pitt, M. A.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {167--179}, + Title = {Model Selection by Normalized Maximum Likelihood}, + Volume = {50}, + Year = {2006}} + +@article{Myung1997, + Author = {Myung, I. J. and Pitt, M. A.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {79--95}, + Title = {Applying {O}ccam's Razor in Modeling Cognition: {A} {B}ayesian Approach}, + Volume = {4}, + Year = {1997}} + +@article{Nachtigall2009, + Author = {Nachtigall, P. and Bludsk?, O. and Grajciar, L. and Nachtigallov?, D. and Delgado, M. R. and Are?n, C. O.}, + Journal = {Phys Chem Chem Phys}, + Month = {Feb}, + Pages = {791--802}, + Title = {{{C}omputational and {F}{T}{I}{R} spectroscopic studies on carbon monoxide and dinitrogen adsorption on a high-silica {H}-{F}{E}{R} zeolite}}, + Volume = {11}, + Year = {2009}} + +@article{Nachtigall2006, + Author = {Nachtigall, P. and Garrone, E. and Palomino, G. T. and Delgado, M. R. and Nachtigallov?, D. and Are?n, C. O.}, + Journal = {Phys Chem Chem Phys}, + Month = {May}, + Pages = {2286--2292}, + Title = {{{F}{T}{I}{R} spectroscopic and computational studies on hydrogen adsorption on the zeolite {L}i-{F}{E}{R}}}, + Volume = {8}, + Year = {2006}} + +@article{Nader1996, + Author = {Nader, K. and Bechara, A. and van der Kooy, D.}, + Journal = {Behav. Neurosci.}, + Month = {Dec}, + Pages = {1496--1502}, + Title = {{{L}esions of the lateral parabrachial nucleus block the aversive motivational effects of both morphine and morphine withdrawal but spare morphine's discriminative properties}}, + Volume = {110}, + Year = {1996}} + +@article{Nader1994, + Author = {Nader, K. and Bechara, A. and Roberts, D. C. and van der Kooy, D.}, + Journal = {Behav. Neurosci.}, + Month = {Dec}, + Pages = {1128--1138}, + Title = {{{N}euroleptics block high- but not low-dose heroin place preferences: further evidence for a two-system model of motivation}}, + Volume = {108}, + Year = {1994}} + +@article{Nader2002, + Author = {Nader, M. A. and Daunais, J. B. and Moore, T. and Nader, S. H. and Moore, R. J. and Smith, H. R. and Friedman, D. P. and Porrino, L. J.}, + Journal = {Neuropsychopharmacology}, + Month = {Jul}, + Pages = {35--46}, + Title = {{{E}ffects of cocaine self-administration on striatal dopamine systems in rhesus monkeys: initial and chronic exposure}}, + Volume = {27}, + Year = {2002}} + +@article{Najt2007, + Abstract = {Impulsivity is frequently associated with bipolar disorder (BD) during + manic episodes, but may also be present in euthymic bipolar patients. + Aggression is an impulsivity-related behavior also found during manic + episodes. The objective of this review is to further clarify the + relationship between impulsivity and BD. A search in Medline and + Psycinfo databases, combined with a manual search of selected references, + was conducted to identify available literature on BD and impulsivity-related + features. Although few studies have directly measured impulsivity + in BD, available findings suggest that impulsivity is not only state-related, + but also a trait component of BD, which could represent a core feature + of the illness. Further research exploring the neurobiology of the + impulsivity/BD relationship may contribute to elucidate the pathophysiology + and to improve the diagnosis and treatment of this severe illness.}, + Author = {P. Najt and J. Perez and M. Sanches and M. A M Peluso and D. Glahn and J. C. Soares}, + Doi = {10.1016/j.euroneuro.2006.10.002}, + Institution = {MOOD-CNS Program, Division of Mood and Anxiety Disorders, Department of Psychiatry, The University of Texas Health Science Center at San Antonio (UTHSCSA), San Antonio, TX 78229-3900, USA.}, + Journal = {Eur Neuropsychopharmacol}, + Keywords = {Aggression, physiology; Bipolar Disorder, complications; Databases, Factual; Humans; Impulsive Behavior, etiology; MEDLINE; Suicide}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {5}, + Owner = {Young}, + Pages = {313--320}, + Pii = {S0924-977X(06)00202-1}, + Pmid = {17140772}, + Timestamp = {2010.05.01}, + Title = {Impulsivity and bipolar disorder.}, + Url = {http://dx.doi.org/10.1016/j.euroneuro.2006.10.002}, + Volume = {17}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.euroneuro.2006.10.002}} + +@article{Nakamura2000, + Abstract = {Nicotine produces profound behavioral effects in humans, but little + is known about the sites of its action. There is a hypothesis that + frontal lobe and limbic/cingulate cortical structures might be the + sites. In this study, we examined the effects of cigarette smoking + on feeling and cerebral blood flow (CBF) in human subjects. Young + and healthy 9 cigarette smokers (all males, 24-33 years, average, + 26.4) were included. After prohibiting them from smoking for 15 hours, + CBF was measured using a Xenon CT-CBF system. Fifteen minutes later + after allowing them to smoke two pieces of cigarette, the second + CBF measurement was performed. Subtraction CBF map was created to + display the changes after smoking. CT images were taken at three + levels so as to include the cerebral lobes, basal ganglia, limbic + system, brainstem and cerebellum. Arterial nicotine increased up + to the levels 8 times higher than before smoking. The increases of + blood pressure and pulse rate were minimal. Arterial carbon dioxide + level and hematocrit did not change. Feeling after smoking was variable + in individual subject. In 8 subjects with a relatively high feeling, + CBF increased mainly in the frontal lobe, hippocampus, uncus, thalamus + and caudate nucleus. CBF did not change in the parietal, temporal + and occipital lobes, and in the putamen, insula, brainstem and cerebellum. + In two subjects with uncomfortable feeling, CBF did reduce in the + whole brain. The CBF increase in frontal lobe and limbic structures + seems to be secondary to nicotine-induced neuronal activation in + each structure. Mesocorticolimbic dopamine system, which is believed + to influence learning, memory or emotional performance, appears to + be a target for nicotine. The CBF reduction in the whole brain might + be due to cerebral vasoconstriction or be secondary to a systemic + hypotension.}, + Author = {H. Nakamura and A. Tanaka and Y. Nomoto and Y. Ueno and Y. Nakayama}, + Institution = {Department of Neurosurgery, Fukuoka University, Chikushi Hospital, Japan.}, + Journal = {Keio J Med}, + Keywords = {Adult; Brain, physiopathology; Cerebrovascular Circulation; Frontal Lobe, blood supply/physiopathology; Humans; Limbic System, blood supply/physiopathology; Male; Smoking, physiopathology/psychology; Tomography, X-Ray Computed, methods; Xenon, diagnostic use}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Owner = {Woo-Young Ahn}, + Pages = {A122--A124}, + Pmid = {10750360}, + Timestamp = {2009.08.04}, + Title = {Activation of fronto-limbic system in the human brain by cigarette smoking: evaluated by a CBF measurement.}, + Volume = {49 Suppl 1}, + Year = {2000}} + +@article{Naqvi2009, + Author = {Naqvi, N. H. and Bechara, A.}, + Journal = {Trends in Neurosciences}, + Number = {1}, + Pages = {56--67}, + Title = {{The hidden island of addiction: the insula}}, + Volume = {32}, + Year = {2009}} + +@article{Naqvi2005, + Author = {Naqvi, N. H. and Bechara, A.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Aug}, + Pages = {821--829}, + Title = {{{T}he airway sensory impact of nicotine contributes to the conditioned reinforcing effects of individual puffs from cigarettes}}, + Volume = {81}, + Year = {2005}} + +@article{Naqvi2007, + Author = {Naqvi, N. H. and Rudrauf, D. and Damasio, H. and Bechara, A.}, + Journal = {Science}, + Month = {Jan}, + Pages = {531--534}, + Title = {{{D}amage to the insula disrupts addiction to cigarette smoking}}, + Volume = {315}, + Year = {2007}} + +@article{Natarajan2008, + Author = {Natarajan, R. and Huys, Q. J. and Dayan, P. and Zemel, R. S.}, + Journal = {Neural Comput}, + Month = {Sep}, + Pages = {2325--2360}, + Title = {{{E}ncoding and decoding spikes for dynamic stimuli}}, + Volume = {20}, + Year = {2008}} + +@article{Navarro2006, + Author = {Navarro, D. J. and Griffiths, T. L. and Steyvers, M. and Lee, M. D.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {101--122}, + Title = {Modeling Individual Differences Using {D}irichlet Processes}, + Volume = {50}, + Year = {2006}} + +@article{Navarro2004, + Author = {Navarro, D. J. and Pitt, M. A. and Myung, I. J.}, + Journal = {Cognitive Psychology}, + Pages = {47?84}, + Title = {Assessing the Distinguishability of Models and the Informativeness of Data}, + Volume = {49}, + Year = {2004}} + +@article{Nawa2008, + Author = {Nawa, N. E. and Nelson, E. E. and Pine, D. S. and Ernst, M.}, + Journal = {Soc Cogn Affect Neurosci}, + Month = {Dec}, + Pages = {367--376}, + Title = {{{D}o you make a difference? {S}ocial context in a betting task}}, + Volume = {3}, + Year = {2008}} + +@article{Nehlig1984, + Author = {Nehlig, A. and Lucignani, G. and Kadekaro, M. and Porrino, L. J. and Sokoloff, L.}, + Journal = {Eur. J. Pharmacol.}, + Month = {May}, + Pages = {91--100}, + Title = {{{E}ffects of acute administration of caffeine on local cerebral glucose utilization in the rat}}, + Volume = {101}, + Year = {1984}} + +@article{Nehlig1985, + Author = {Nehlig, A. and Porrino, L. J. and Crane, A. M. and Sokoloff, L.}, + Journal = {J. Cereb. Blood Flow Metab.}, + Month = {Sep}, + Pages = {393--400}, + Title = {{{L}ocal cerebral glucose utilization in normal female rats: variations during the estrous cycle and comparison with males}}, + Volume = {5}, + Year = {1985}} + +@article{Neilan2008, + Author = {Neilan, E. G. and Delgado, M. R. and Donovan, M. A. and Kim, S. Y. and Jou, R. L. and Wu, B. L. and Kang, P. B.}, + Journal = {Arch. Neurol.}, + Month = {Aug}, + Pages = {1117--1121}, + Title = {{{R}esponse of motor complications in {C}ockayne syndrome to carbidopa-levodopa}}, + Volume = {65}, + Year = {2008}} + +@article{Nelder1965, + Author = {Nelder, J. A. and Mead, R.}, + Journal = {Computer Journal}, + Owner = {WooYoung Ahn}, + Pages = {308-313}, + Timestamp = {2007.07.18}, + Title = {A simplex method for function minimization}, + Volume = {7}, + Year = {1965}} + +@article{Nelson2009, + Author = {Nelson, E. E. and Herman, K. N. and Barrett, C. E. and Noble, P. L. and Wojteczko, K. and Chisholm, K. and Delaney, D. and Ernst, M. and Fox, N. A. and Suomi, S. J. and Winslow, J. T. and Pine, D. S.}, + Journal = {Biol. Psychiatry}, + Month = {May}, + Title = {{{A}dverse {R}earing {E}xperiences {E}nhance {R}esponding to {B}oth {A}versive and {R}ewarding {S}timuli in {J}uvenile {R}hesus {M}onkeys}}, + Year = {2009}} + +@article{Nelson2003, + Author = {Nelson, E. E. and McClure, E. B. and Monk, C. S. and Zarahn, E. and Leibenluft, E. and Pine, D. S. and Ernst, M.}, + Journal = {J Child Psychol Psychiatry}, + Month = {Oct}, + Pages = {1015--1024}, + Title = {{{D}evelopmental differences in neuronal engagement during implicit encoding of emotional faces: an event-related f{M}{R}{I} study}}, + Volume = {44}, + Year = {2003}} + +@article{Nelson1986, + Author = {Nelson, N. and Rosenthal, R. and Rosnow, R. L.}, + Journal = {American Psychologist}, + Pages = {1299--1301}, + Title = {Interpretation of Significance Levels and Effect Sizes by Psychological Researchers}, + Volume = {41}, + Year = {1986}} + +@article{Nestler2008, + Author = {Nestler, E. J.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Oct}, + Pages = {3245--3255}, + Title = {{{R}eview. {T}ranscriptional mechanisms of addiction: role of {D}elta{F}os{B}}}, + Volume = {363}, + Year = {2008}} + +@article{Nestler2005, + Author = {Nestler, E. J.}, + Journal = {Sci Pract Perspect}, + Month = {Dec}, + Pages = {4--10}, + Title = {{{T}he neurobiology of cocaine addiction}}, + Volume = {3}, + Year = {2005}} + +@article{Nestler2005a, + Author = {Nestler, E. J.}, + Journal = {Nat. Neurosci.}, + Month = {Nov}, + Pages = {1445--1449}, + Title = {{{I}s there a common molecular pathway for addiction?}}, + Volume = {8}, + Year = {2005}} + +@article{Nestler2004, + Author = {Nestler, E. J.}, + Journal = {Neuropharmacology}, + Pages = {24--32}, + Title = {{{M}olecular mechanisms of drug addiction}}, + Volume = {47 Suppl 1}, + Year = {2004}} + +@article{Nestler2004a, + Author = {Nestler, E. J.}, + Journal = {Trends Pharmacol. Sci.}, + Month = {Apr}, + Pages = {210--218}, + Title = {{{H}istorical review: {M}olecular and cellular mechanisms of opiate and cocaine addiction}}, + Volume = {25}, + Year = {2004}} + +@article{Nestler2002, + Author = {Nestler, E. J.}, + Journal = {Nat. Neurosci.}, + Month = {Nov}, + Pages = {1076--1079}, + Title = {{{F}rom neurobiology to treatment: progress against addiction}}, + Volume = {5 Suppl}, + Year = {2002}} + +@article{Nestler2002a, + Author = {Nestler, E. J.}, + Journal = {Neurobiol Learn Mem}, + Month = {Nov}, + Pages = {637--647}, + Title = {{{C}ommon molecular and cellular substrates of addiction and memory}}, + Volume = {78}, + Year = {2002}} + +@article{Nestler2001b, + Author = {Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {8324--8327}, + Title = {{{P}sychogenomics: opportunities for understanding addiction}}, + Volume = {21}, + Year = {2001}} + +@article{Nestler2001c, + Author = {Nestler, E. J.}, + Journal = {Science}, + Month = {Jun}, + Pages = {2266--2267}, + Title = {{{N}eurobiology. {T}otal recall-the memory of addiction}}, + Volume = {292}, + Year = {2001}} + +@article{Nestler2001d, + Author = {Nestler, E. J.}, + Journal = {Am J Addict}, + Pages = {201--217}, + Title = {{{M}olecular neurobiology of addiction}}, + Volume = {10}, + Year = {2001}} + +@article{Nestler2001e, + Author = {Nestler, E. J.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Feb}, + Pages = {119--128}, + Title = {{{M}olecular basis of long-term plasticity underlying addiction}}, + Volume = {2}, + Year = {2001}} + +@article{Nestler2000, + Author = {Nestler, E. J.}, + Journal = {Nat. Genet.}, + Month = {Nov}, + Pages = {277--281}, + Title = {{{G}enes and addiction}}, + Volume = {26}, + Year = {2000}} + +@article{Nestler1997a, + Author = {Nestler, E. J.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Oct}, + Pages = {713--719}, + Title = {{{M}olecular mechanisms of opiate and cocaine addiction}}, + Volume = {7}, + Year = {1997}} + +@article{Nestler1994a, + Author = {Nestler, E. J.}, + Journal = {Neuropsychopharmacology}, + Month = {Oct}, + Pages = {77--87}, + Title = {{{M}olecular neurobiology of drug addiction}}, + Volume = {11}, + Year = {1994}} + +@article{Nestler1993b, + Author = {Nestler, E. J.}, + Journal = {Crit Rev Neurobiol}, + Pages = {23--39}, + Title = {{{C}ellular responses to chronic treatment with drugs of abuse}}, + Volume = {7}, + Year = {1993}} + +@article{Nestler1992, + Author = {Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Jul}, + Pages = {2439--2450}, + Title = {{{M}olecular mechanisms of drug addiction}}, + Volume = {12}, + Year = {1992}} + +@article{Nestler1997, + Author = {Nestler, E. J. and Aghajanian, G. K.}, + Journal = {Science}, + Month = {Oct}, + Pages = {58--63}, + Title = {{{M}olecular and cellular basis of addiction}}, + Volume = {278}, + Year = {1997}} + +@article{Nestler1994, + Author = {Nestler, E. J. and Alreja, M. and Aghajanian, G. K.}, + Journal = {Brain Res. Bull.}, + Pages = {521--528}, + Title = {{{M}olecular and cellular mechanisms of opiate action: studies in the rat locus coeruleus}}, + Volume = {35}, + Year = {1994}} + +@article{Nestler2001a, + Author = {Nestler, E. J. and Barrot, M. and Self, D. W.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Sep}, + Pages = {11042--11046}, + Title = {{{D}elta{F}os{B}: a sustained molecular switch for addiction}}, + Volume = {98}, + Year = {2001}} + +@article{Nestler1993a, + Author = {Nestler, E. J. and Bergson, C. M. and Gultart, X. and Hope, B. T.}, + Journal = {NIDA Res. Monogr.}, + Pages = {92--116}, + Title = {{{R}egulation of neural gene expression in opiate and cocaine addiction}}, + Volume = {125}, + Year = {1993}} + +@article{Nestler1996, + Author = {Nestler, E. J. and Berhow, M. T. and Brodkin, E. S.}, + Journal = {Mol. Psychiatry}, + Month = {Jul}, + Pages = {190--199}, + Title = {{{M}olecular mechanisms of drug addiction: adaptations in signal transduction pathways}}, + Volume = {1}, + Year = {1996}} + +@article{Nestler1989, + Author = {Nestler, E. J. and Erdos, J. J. and Terwilliger, R. and Duman, R. S. and Tallman, J. F.}, + Journal = {Brain Res.}, + Month = {Jan}, + Pages = {230--239}, + Title = {{{R}egulation of {G} proteins by chronic morphine in the rat locus coeruleus}}, + Volume = {476}, + Year = {1989}} + +@article{Nestler1993, + Author = {Nestler, E. J. and Hope, B. T. and Widnell, K. L.}, + Journal = {Neuron}, + Month = {Dec}, + Pages = {995--1006}, + Title = {{{D}rug addiction: a model for the molecular basis of neural plasticity}}, + Volume = {11}, + Year = {1993}} + +@article{Nestler2001, + Author = {Nestler, E. J. and Landsman, D.}, + Journal = {Nature}, + Month = {Feb}, + Pages = {834--835}, + Title = {{{L}earning about addiction from the genome}}, + Volume = {409}, + Year = {2001}} + +@article{Nestler1988, + Author = {Nestler, E. J. and Tallman, J. F.}, + Journal = {Mol. Pharmacol.}, + Month = {Feb}, + Pages = {127--132}, + Title = {{{C}hronic morphine treatment increases cyclic {A}{M}{P}-dependent protein kinase activity in the rat locus coeruleus}}, + Volume = {33}, + Year = {1988}} + +@article{Nestor2009, + Author = {Nestor, L. and Hester, R. and Garavan, H.}, + Journal = {Neuroimage}, + Month = {Jul}, + Title = {{{I}ncreased ventral striatal {B}{O}{L}{D} activity during non-drug reward anticipation in cannabis users}}, + Year = {2009}} + +@article{Nestor2008, + Author = {Nestor, L. and Roberts, G. and Garavan, H. and Hester, R.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {1328--1339}, + Title = {{{D}eficits in learning and memory: parahippocampal hyperactivity and frontocortical hypoactivity in cannabis users}}, + Volume = {40}, + Year = {2008}} + +@book{Neter1996, + Address = {Chicago}, + Author = {Neter, J. and Kutner, M. H. and Nachtsheim, C. J. and Wasserman, W.}, + Publisher = {Irwin}, + Title = {Applied Linear Statistical Models (4th ed.)}, + Year = {1996}} + +@article{Neve2005, + Author = {Neve, R. L. and Neve, K. A. and Nestler, E. J. and Carlezon, W. A.}, + Journal = {BioTechniques}, + Month = {Sep}, + Pages = {381--391}, + Title = {{{U}se of herpes virus amplicon vectors to study brain disorders}}, + Volume = {39}, + Year = {2005}} + +@article{Newell2001, + Author = {Newell, K. M. and Liu, Y.-T. and Mayer-Kress, G.}, + Journal = {Psychological Review}, + Pages = {57--82}, + Title = {Time Scales in Motor Learning and Development}, + Volume = {108}, + Year = {2001}} + +@article{Newman1985, + Author = {Newman, Joseph P. and Widom, C. S. and Nathan, S.}, + Date-Modified = {2016-07-17 04:44:49 +0000}, + Journal = {Journal of Personality and Individual Differences}, + Owner = {Wooyoung Ahn}, + Pages = {1316-1327}, + Timestamp = {2007.04.30}, + Title = {Passive avoidance in syndromes of disinhibition, psychopathy, and extraversion}, + Volume = {48}, + Year = {1985}} + +@article{Neyman1977, + Author = {Neyman, J.}, + Journal = {Synthese}, + Pages = {97--131}, + Title = {Frequentist Probability and Frequentist Statistics}, + Volume = {36}, + Year = {1977}} + +@article{Neyman1933, + Author = {Neyman, J. and Pearson, E. S.}, + Journal = {Philosophical Transactions of the Royal Society A}, + Pages = {289--337}, + Title = {On the Problem of the Most Efficient Tests of Statistical Hypotheses}, + Volume = {231}, + Year = {1933}} + +@article{Nickerson2000, + Author = {Nickerson, R. S.}, + Journal = {Psychological Methods}, + Pages = {241--301}, + Title = {Null Hypothesis Statistical Testing: A Review of an Old and Continuing Controversy}, + Volume = {5}, + Year = {2000}} + +@article{Nielson2002, + Author = {Nielson, K. A. and Langenecker, S. A. and Garavan, H.}, + Journal = {Psychol Aging}, + Month = {Mar}, + Pages = {56--71}, + Title = {{{D}ifferences in the functional neuroanatomy of inhibitory control across the adult life span}}, + Volume = {17}, + Year = {2002}} + +@article{Nielson2004, + Author = {Nielson, K. A. and Langenecker, S. A. and Ross, T. J. and Garavan, H. and Rao, S. M. and Stein, E. A.}, + Journal = {Neuroreport}, + Month = {Jan}, + Pages = {129--133}, + Title = {{{C}omparability of functional {M}{R}{I} response in young and old during inhibition}}, + Volume = {15}, + Year = {2004}} + +@article{Nigg2006, + Abstract = {OBJECTIVE: To evaluate the predictive power of executive functions, + in particular, response inhibition, in relation to alcohol-related + problems and illicit drug use in adolescence. METHOD: A total of + 498 children from 275 families from a longitudinal high-risk study + completed executive function measures in early and late adolescence + and lifetime drinking and drug-related ratings at multiple time points + including late adolescence (ages 15-17). Multi-informant measures + of attention-deficit/hyperactivity disorder and conduct disorder + were obtained in early childhood (ages 3-5), middle childhood, and + adolescence. RESULTS: In multilevel models, poor response inhibition + predicted aggregate alcohol-related problems, the number of illicit + drugs used, and comorbid alcohol and drug use (but not the number + of drug-related problems), independently of IQ, parental alcoholism + and antisocial personality disorder, child attention-deficit/hyperactivity + disorder and conduct symptoms, or age. Multivariate models explained + 8\% to 20\% of residual variance in outcome scores. The incremental + predictive power of response inhibition was modest, explaining about + 1\% of the variance in most outcomes, but more than 9\% of the residual + variance in problem outcomes within the highest risk families. Other + measured executive functions did not independently predict substance + use onset. CONCLUSION: Models of alcoholism and other drug risks + that invoke executive functions may benefit from specifying response + inhibition as an incremental component.}, + Author = {Joel T Nigg and Maria M Wong and Michelle M Martel and Jennifer M Jester and Leon I Puttler and Jennifer M Glass and Kenneth M Adams and Hiram E Fitzgerald and Robert A Zucker}, + Doi = {10.1097/01.chi.0000199028.76452.a9}, + Institution = {Department of Psychology, Michigan State University, East Lansing, MI 48824-116, USA. nigg@msu.edu}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Keywords = {Adolescent; Alcoholism, psychology; Attention; Child; Female; Forecasting; Humans; Inhibition (Psychology); Male; Reaction Time; Risk Factors; Substance-Related Disorders, psychology; Thinking}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {468--475}, + Pii = {00004583-200604000-00014}, + Pmid = {16601652}, + Timestamp = {2009.08.06}, + Title = {Poor response inhibition as a predictor of problem drinking and illicit drug use in adolescents at risk for alcoholism and other substance use disorders.}, + Url = {http://dx.doi.org/10.1097/01.chi.0000199028.76452.a9}, + Volume = {45}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1097/01.chi.0000199028.76452.a9}} + +@article{Nigrovic2007, + Author = {Nigrovic, L. E. and Kuppermann, N. and Macias, C. G. and Cannavino, C. R. and Moro-Sutherland, D. M. and Schremmer, R. D. and Schwab, S. H. and Agrawal, D. and Mansour, K. M. and Bennett, J. E. and Katsogridakis, Y. L. and Mohseni, M. M. and Bulloch, B. and Steele, D. W. and Kaplan, R. L. and Herman, M. I. and Bandyopadhyay, S. and Dayan, P. and Truong, U. T. and Wang, V. J. and Bonsu, B. K. and Chapman, J. L. and Kanegaye, J. T. and Malley, R.}, + Journal = {JAMA}, + Month = {Jan}, + Pages = {52--60}, + Title = {{{C}linical prediction rule for identifying children with cerebrospinal fluid pleocytosis at very low risk of bacterial meningitis}}, + Volume = {297}, + Year = {2007}} + +@article{Nigrovic2008, + Author = {Nigrovic, L. E. and Kuppermann, N. and Malley, R. and Macias, J. T. and Moro-Sutherland, D. M. and Schremmer, R. D. and Schwab, S. H. and Agrawal, D. and Mansour, K. M. and Bennett, J. E. and Katsogridakis, Y. L. and Mohseni, M. M. and Bulloch, B. and Steele, D. W. and Kaplan, R. L. and Herman, M. I. and Bandyopadyay, S. and Dayan, P. and Truong, U. T. and Wang, V. J. and Bonsu, B. K. and Chapman, J. L.}, + Journal = {Acad Emerg Med}, + Month = {Jun}, + Pages = {522--528}, + Title = {{{C}hildren with bacterial meningitis presenting to the emergency department during the pneumococcal conjugate vaccine era}}, + Volume = {15}, + Year = {2008}} + +@article{Nigrovic2008a, + Author = {Nigrovic, L. E. and Malley, R. and Macias, C. G. and Kanegaye, J. T. and Moro-Sutherland, D. M. and Schremmer, R. D. and Schwab, S. H. and Agrawal, D. and Mansour, K. M. and Bennett, J. E. and Katsogridakis, Y. L. and Mohseni, M. M. and Bulloch, B. and Steele, D. W. and Kaplan, R. L. and Herman, M. I. and Bandyopadhyay, S. and Dayan, P. and Truong, U. T. and Wang, V. J. and Bonsu, B. K. and Chapman, J. L. and Kuppermann, N. and Alpern, E. R. and Bush, T. and Campos, J. M. and Cannavino, C. R. and Edelberg, M. and Hauptman, M. and Ishimine, P. and Kaplan, D. M. and McCaslin, R. I. and Salim, U. and Wilde, J. and Zhao, X.}, + Journal = {Pediatrics}, + Month = {Oct}, + Pages = {726--730}, + Title = {{{E}ffect of antibiotic pretreatment on cerebrospinal fluid profiles of children with bacterial meningitis}}, + Volume = {122}, + Year = {2008}} + +@article{Ning2008, + Author = {Ning, C. and Green-Golan, L. and Stratakis, C. A. and Leschek, E. and Sinaii, N. and Schroth, E. and Ernst, M. and Merke, D. P.}, + Journal = {J. Pediatr. Endocrinol. Metab.}, + Month = {Aug}, + Pages = {771--780}, + Title = {{{B}ody image in adolescents with disorders of steroidogenesis}}, + Volume = {21}, + Year = {2008}} + +@article{Niv2009, + Author = {Yael Niv}, + Doi = {DOI: 10.1016/j.jmp.2008.12.005}, + Issn = {0022-2496}, + Journal = {Journal of Mathematical Psychology}, + Number = {3}, + Pages = {139 - 154}, + Title = {Reinforcement learning in the brain}, + Volume = {53}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.jmp.2008.12.005}} + +@article{Niv2006, + Author = {Niv, Y. and Daw, N. D. and Dayan, P.}, + Journal = {Nat. Neurosci.}, + Pages = {987--988}, + Title = {{{C}hoice values}}, + Volume = {9}, + Year = {2006}} + +@article{Niv2007, + Author = {Niv, Y. and Daw, N. D. and Joel, D. and Dayan, P.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Apr}, + Pages = {507--520}, + Title = {{{T}onic dopamine: opportunity costs and the control of response vigor}}, + Volume = {191}, + Year = {2007}} + +@article{Niv2005, + Author = {Niv, Y. and Duff, M. O. and Dayan, P.}, + Journal = {Behav Brain Funct}, + Pages = {6}, + Title = {{{D}opamine, uncertainty and {T}{D} learning}}, + Volume = {1}, + Year = {2005}} + +@article{Niv2006a, + Author = {Niv, Y. and Joel, D. and Dayan, P.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Aug}, + Pages = {375--381}, + Title = {{{A} normative perspective on motivation}}, + Volume = {10}, + Year = {2006}} + +@article{No?l2007, + Author = {No?l, X. and Van der Linden, M. and d'Acremont, M. and Bechara, A. and Dan, B. and Hanak, C. and Verbanck, P.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jun}, + Pages = {291--298}, + Title = {{{A}lcohol cues increase cognitive impulsivity in individuals with alcoholism}}, + Volume = {192}, + Year = {2007}} + +@article{No?l2005, + Author = {No?l, X. and Van der Linden, M. and d'Acremont, M. and Colmant, M. and Hanak, C. and Pelc, I. and Verbanck, P. and Bechara, A.}, + Journal = {Addiction}, + Month = {Sep}, + Pages = {1302--1309}, + Title = {{{C}ognitive biases toward alcohol-related words and executive deficits in polysubstance abusers with alcoholism}}, + Volume = {100}, + Year = {2005}} + +@article{Nobles2005, + Author = {Nobles, R. and Schiff, D.}, + Journal = {Significance}, + Pages = {17--19}, + Title = {Misleading Statistics Within Criminal Trials: {T}he {S}ally {C}lark Case}, + Volume = {2}, + Year = {2005}} + +@article{Norrsell1999, + Author = {Norrsell, U. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Aug}, + Pages = {611--625}, + Title = {{{B}ehavioral thermosensitivity after lesions of thalamic target areas of a thermosensory spinothalamic pathway in the cat}}, + Volume = {82}, + Year = {1999}} + +@article{Nosofsky1997, + Author = {Nosofsky, R. M. and Palmeri, T. J.}, + Journal = {Psychological Review}, + Pages = {266--300}, + Title = {An Exemplar--Based Random Walk Model of Speeded Classification}, + Volume = {104}, + Year = {1997}} + +@article{Novikov1997, + Author = {Novikov, E. and Novikov, A. and Shannahoff--Khalsa, D. and Schwartz, B. and Wright, J.}, + Journal = {Physical Review E}, + Pages = {R2387--2389}, + Title = {Scale--similar Activity in the Brain}, + Volume = {56}, + Year = {1997}} + +@article{Numan1998, + Author = {Numan, S. and Lane-Ladd, S. B. and Zhang, L. and Lundgren, K. H. and Russell, D. S. and Seroogy, K. B. and Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Dec}, + Pages = {10700--10708}, + Title = {{{D}ifferential regulation of neurotrophin and trk receptor m{R}{N}{A}s in catecholaminergic nuclei during chronic opiate treatment and withdrawal}}, + Volume = {18}, + Year = {1998}} + +@article{Nystrom2000, + Author = {Nystrom, L. E. and Braver, T. S. and Sabb, F. W. and Delgado, M. R. and Noll, D. C. and Cohen, J. D.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {424--446}, + Title = {{{W}orking memory for letters, shapes, and locations: f{M}{R}{I} evidence against stimulus-based regional organization in human prefrontal cortex}}, + Volume = {11}, + Year = {2000}} + +@article{Nystrom2000a, + Author = {Nystrom, L. E. and Braver, T. S. and Sabb, F. W. and Delgado, M. R. and Noll, D. C. and Cohen, J. D.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {424--446}, + Title = {{{W}orking memory for letters, shapes, and locations: f{M}{R}{I} evidence against stimulus-based regional organization in human prefrontal cortex}}, + Volume = {11}, + Year = {2000}} + +@article{Oberauer2005, + Author = {Oberauer, K.}, + Journal = {Journal of Experimental Psychology: General}, + Pages = {368--387}, + Title = {Binding and Inhibition in Working Memory: {I}ndividual and Age Differences in Short--Term Recognition}, + Volume = {134}, + Year = {2005}} + +@article{Oberlin2009, + Author = {B. G. Oberlin and N. J. Grahame}, + Doi = {10.1111/j.1530-0277.2009.00955.x}, + Institution = {Department of Psychology, Indiana University Purdue University at Indianapolis, Indianapolis, Indiana, USA.}, + Journal = {Alcohol Clin Exp Res}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {7}, + Owner = {Woo-Young Ahn}, + Pages = {1294--1303}, + Pii = {ACER955}, + Pmid = {19389183}, + Timestamp = {2009.08.06}, + Title = {High-alcohol preferring mice are more impulsive than low-alcohol preferring mice as measured in the delay discounting task.}, + Url = {http://dx.doi.org/10.1111/j.1530-0277.2009.00955.x}, + Volume = {33}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1530-0277.2009.00955.x}} + +@article{Oberlin2009a, + Author = {B. G. Oberlin and N. J. Grahame}, + Doi = {10.1111/j.1530-0277.2009.00955.x}, + Institution = {Department of Psychology, Indiana University Purdue University at Indianapolis, Indianapolis, Indiana, USA.}, + Journal = {Alcohol Clin Exp Res}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {7}, + Owner = {Woo-Young Ahn}, + Pages = {1294--1303}, + Pii = {ACER955}, + Pmid = {19389183}, + Timestamp = {2009.08.06}, + Title = {High-alcohol preferring mice are more impulsive than low-alcohol preferring mice as measured in the delay discounting task.}, + Url = {http://dx.doi.org/10.1111/j.1530-0277.2009.00955.x}, + Volume = {33}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1530-0277.2009.00955.x}} + +@article{OBoyle1984, + Author = {OBoyle, KM and Waddington, JL}, + Journal = {European journal of pharmacology}, + Number = {1-2}, + Pages = {171--174}, + Publisher = {Elsevier}, + Title = {{Loss of rat striatal dopamine receptors with ageing is selective for D-2 but not D-1 sites: association with increased non-specific binding of the D-1 ligand[? 3 ?H] piflutixol}}, + Volume = {105}, + Year = {1984}} + +@article{Oden1978, + Author = {Oden, G. C. and Massaro, D. W.}, + Journal = {Psychological Review}, + Pages = {172--191}, + Title = {Integration of Featural Information in Speech Perception}, + Volume = {85}, + Year = {1978}} + +@article{Ogden2004, + Author = {Ogden, C. A. and Rich, M. E. and Schork, N. J. and Paulus, M. P. and Geyer, M. A. and Lohr, J. B. and Kuczenski, R. and Niculescu, A. B.}, + Journal = {Mol. Psychiatry}, + Month = {Nov}, + Pages = {1007--1029}, + Title = {{{C}andidate genes, pathways and mechanisms for bipolar (manic-depressive) and related disorders: an expanded convergent functional genomics approach}}, + Volume = {9}, + Year = {2004}} + +@article{Oh1999, + Author = {Oh, H. S. and DasGupta, A.}, + Journal = {Journal of Statistical Planning and Inference}, + Pages = {93--107}, + Title = {Comparison of the {P}--Value and Posterior Probability}, + Volume = {76}, + Year = {1999}} + +@article{Oh2005, + Author = {Oh, J. S. and Lyoo, I. K. and Sung, Y. H. and Hwang, J. and Kim, J. and Chung, A. and Park, K. S. and Kim, S. J. and Renshaw, P. F. and Song, I. C.}, + Journal = {Neurosci. Lett.}, + Pages = {76--81}, + Title = {{{S}hape changes of the corpus callosum in abstinent methamphetamine users}}, + Volume = {384}, + Year = {2005}} + +@article{Ongur2005, + Author = {Ongur, D. and Price, JL}, + Journal = {Cerebral Cortex}, + Number = {3}, + Pages = {206--219}, + Publisher = {Oxford Univ Press}, + Title = {{The organization of networks within the orbital and medial prefrontal cortex of rats, monkeys and humans}}, + Volume = {10}, + Year = {2000}} + +@article{Overman2004, + Author = {Overman, W.H. and Frassrand, K. and Ansel, S. and Trawalter, S. and Bies, B. and Redmond, A.}, + Journal = {Neuropsychologia}, + Number = {13}, + Pages = {1838--1851}, + Publisher = {Elsevier}, + Title = {{Performance on the IOWA card task by adolescents and adults}}, + Volume = {42}, + Year = {2004}} + +@incollection{Pachella1974, + Address = {Hillsdale (NJ)}, + Author = {Pachella, R. G.}, + Booktitle = {Human Information Processing: {T}utorials in Performance and Cognition}, + Editor = {Kantowitz, B. H.}, + Pages = {41--82}, + Publisher = {Lawrence Erlbaum Associates}, + Title = {The Interpretation of Reaction Time in Information--Processing Research}, + Year = {1974}} + +@article{Packard1997, + Author = {Packard, A. M. and Miller, V. S. and Delgado, M. R.}, + Journal = {Neurology}, + Month = {May}, + Pages = {1427--1434}, + Title = {{{S}chizencephaly: correlations of clinical and radiologic features}}, + Volume = {48}, + Year = {1997}} + +@article{Padoa-Schioppa2007, + Author = {Padoa-Schioppa, C.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Pages = {232--253}, + Title = {{{O}rbitofrontal cortex and the computation of economic value}}, + Volume = {1121}, + Year = {2007}} + +@article{Padoa-Schioppa2006, + Author = {Padoa-Schioppa, C. and Assad, J. A.}, + Journal = {Nature}, + Pages = {223--226}, + Title = {{{N}eurons in the orbitofrontal cortex encode economic value}}, + Volume = {441}, + Year = {2006}} + +@article{Pagano1974, + Author = {Pagano, M.}, + Journal = {Annals of Statistics}, + Pages = {99--108}, + Title = {Estimation of Models of Autoregressive Signal Plus White Noise}, + Volume = {2}, + Year = {1974}} + +@article{Pagnoni2002, + Author = {Pagnoni, G. and Zink, C. F. and Montague, P. R. and Berns, G. S.}, + Journal = {Nat. Neurosci.}, + Month = {Feb}, + Pages = {97--98}, + Title = {{{A}ctivity in human ventral striatum locked to errors of reward prediction}}, + Volume = {5}, + Year = {2002}} + +@article{Paiva1997, + Author = {Paiva, C. S. and Alberto-Rincon, M. C. and Paiva, S. M. and Bechara, I. J. and Yamada, A. T.}, + Journal = {Biotech Histochem}, + Month = {Jul}, + Pages = {202--208}, + Title = {{{P}hospholipid containing choline histochemistry of mouse uterine epithelia during preimplantation stage}}, + Volume = {72}, + Year = {1997}} + +@article{Paletzki2008, + Author = {Paletzki, R. F. and Myakishev, M. V. and Polesskaya, O. and Orosz, A. and Hyman, S. E. and Vinson, C.}, + Journal = {Neuroscience}, + Month = {Apr}, + Pages = {1040--1053}, + Title = {{{I}nhibiting activator protein-1 activity alters cocaine-induced gene expression and potentiates sensitization}}, + Volume = {152}, + Year = {2008}} + +@article{Paliwal2008, + Author = {Paliwal, P. and Hyman, S. M. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Mar}, + Pages = {252--259}, + Title = {{{C}raving predicts time to cocaine relapse: further validation of the {N}ow and {B}rief versions of the cocaine craving questionnaire}}, + Volume = {93}, + Year = {2008}} + +@article{Paliwal2008a, + Author = {Paliwal, P. and Hyman, S. M. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Mar}, + Pages = {252--259}, + Title = {{{C}raving predicts time to cocaine relapse: further validation of the {N}ow and {B}rief versions of the cocaine craving questionnaire}}, + Volume = {93}, + Year = {2008}} + +@article{Paliwal2008b, + Author = {Paliwal, P. and Hyman, S. M. and Sinha, R.}, + Journal = {Drug Alcohol Depend}, + Month = {Mar}, + Pages = {252--259}, + Title = {{{C}raving predicts time to cocaine relapse: further validation of the {N}ow and {B}rief versions of the cocaine craving questionnaire}}, + Volume = {93}, + Year = {2008}} + +@article{Palmer2005, + Author = {Palmer, J. and Huk, A. C. and Shadlen, M. N.}, + Journal = {Journal of Vision}, + Pages = {376--404}, + Title = {The Effect of Stimulus Strength on the Speed and Accuracy of a Perceptual Decision}, + Volume = {5}, + Year = {2005}} + +@article{Palmer2005b, + Author = {Palmer, J. and Huk, A. C. and Shadlen, M. N.}, + Journal = {J Vis}, + Pages = {376--404}, + Title = {{{T}he effect of stimulus strength on the speed and accuracy of a perceptual decision}}, + Volume = {5}, + Year = {2005}} + +@article{Palmeri1999, + Author = {Palmeri, T. J.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {543--551}, + Title = {Theories of Automaticity and the Power Law of Practice}, + Volume = {25}, + Year = {1999}} + +@article{Palmeri1997, + Author = {Palmeri, T. J.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {324--354}, + Title = {Exemplar Similarity and the Development of Automaticity}, + Volume = {23}, + Year = {1997}} + +@article{Palombo1988, + Author = {Palombo, E. and Porrino, L. J. and Bankiewicz, K. S. and Crane, A. M. and Kopin, I. J. and Sokoloff, L.}, + Journal = {Brain Res.}, + Month = {Jun}, + Pages = {227--234}, + Title = {{{A}dministration of {M}{P}{T}{P} acutely increases glucose utilization in the substantia nigra of primates}}, + Volume = {453}, + Year = {1988}} + +@article{Palombo1990, + Author = {Palombo, E. and Porrino, L. J. and Bankiewicz, K. S. and Crane, A. M. and Sokoloff, L. and Kopin, I. J.}, + Journal = {J. Neurosci.}, + Month = {Mar}, + Pages = {860--869}, + Title = {{{L}ocal cerebral glucose utilization in monkeys with hemiparkinsonism induced by intracarotid infusion of the neurotoxin {M}{P}{T}{P}}}, + Volume = {10}, + Year = {1990}} + +@article{Palombo1991, + Author = {Palombo, E. and Porrino, L. J. and Crane, A. M. and Bankiewicz, K. S. and Kopin, I. J. and Sokoloff, L.}, + Journal = {J. Neurochem.}, + Month = {May}, + Pages = {1639--1646}, + Title = {{{C}erebral metabolic effects of monoamine oxidase inhibition in normal and 1-methyl-4-phenyl-1,2,3,6-tetrahydropyridine acutely treated monkeys}}, + Volume = {56}, + Year = {1991}} + +@article{Papakostas2005, + Author = {Papakostas, G. I. and Iosifescu, D. V. and Renshaw, P. F. and Lyoo, I. K. and Lee, H. K. and Alpert, J. E. and Nierenberg, A. A. and Fava, M.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {301--307}, + Title = {{{B}rain {M}{R}{I} white matter hyperintensities and one-carbon cycle metabolism in non-geriatric outpatients with major depressive disorder ({P}art {I}{I})}}, + Volume = {140}, + Year = {2005}} + +@article{Papp1994, + Author = {Papp, M. and Klimek, V. and Willner, P.}, + Journal = {Psychopharmacology}, + Number = {4}, + Pages = {441--446}, + Publisher = {Springer}, + Title = {{Parallel changes in dopamine D 2 receptor binding in limbic forebrain associated with chronic mild stress-induced anhedonia and its reversal by imipramine}}, + Volume = {115}, + Year = {1994}} + +@article{Parzen1962, + Author = {Parzen, E.}, + Journal = {Annals of Mathematical Statistics}, + Pages = {1065--1076}, + Title = {On Estimation of a Probability Density Function and Mode}, + Volume = {33}, + Year = {1962}} + +@article{Pascau2009, + Author = {Pascau, J. and Gispert, J. D. and Michaelides, M. and Thanos, P. K. and Volkow, N. D. and Vaquero, J. J. and Soto-Montenegro, M. L. and Desco, M.}, + Journal = {Mol Imaging Biol}, + Pages = {107--113}, + Title = {{{A}utomated method for small-animal {P}{E}{T} image registration with intrinsic validation}}, + Volume = {11}, + Year = {2009}} + +@article{Passetti2008, + Abstract = {A growing literature supports a role for neurocognitive deficits such + as impaired decision-making in the development and maintenance of + addictive behaviour. On the basis of these findings, it has been + suggested that measures of neurocognitive functioning may be applied + to the task of predicting clinical outcome in drug addiction. This + in turn may have relevance for differentiating treatment based on + individual patient needs. To explore this hypothesis we obtained + neurocognitive measures of planning, impulsivity and decision-making + from 37 opiate dependent individuals within 6 weeks of starting a + community drug treatment programme and we followed them up 3 months + into the programme. Performance on two tests of decision-making, + but not on tests of planning, motor inhibition, reflection impulsivity + or delay discounting, was found to predict abstinence from illicit + drugs at 3 months with high specificity and moderate sensitivity. + In particular, two thirds of the participants performing normally + on the Cambridge Gamble Task and the Iowa Gambling Task, but none + of those impaired on both, were abstinent from illicit drugs at follow + up. Other neuropsychological, psychiatric or psychosocial factors + measured in this sample did not explain this finding. The results + are discussed in terms of the brain circuitry involved and the potential + implications for the planning of treatment services for opiate dependence.}, + Author = {F. Passetti and L. Clark and M. A. Mehta and E. Joyce and M. King}, + Doi = {10.1016/j.drugalcdep.2007.10.008}, + Institution = {Department of Mental Health Sciences, University of London, London NW3 2PF, UK. fpassett@sgul.ac.uk}, + Journal = {Drug Alcohol Depend}, + Keywords = {Adolescent; Adult; Cognition Disorders, diagnosis/epidemiology; Decision Making; Female; Humans; Impulse Control Disorders, diagnosis/epidemiology; Male; Middle Aged; Neuropsychological Tests; Opioid-Related Disorders, epidemiology; Predictive Value of Tests; Prospective Studies; Questionnaires; Severity of Illness Index}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {1-3}, + Owner = {Woo-Young Ahn}, + Pages = {82--91}, + Pii = {S0376-8716(07)00421-8}, + Pmid = {18063322}, + Timestamp = {2009.08.06}, + Title = {Neuropsychological predictors of clinical outcome in opiate addiction.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2007.10.008}, + Volume = {94}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2007.10.008}} + +@article{Passetti2008a, + Abstract = {A growing literature supports a role for neurocognitive deficits such + as impaired decision-making in the development and maintenance of + addictive behaviour. On the basis of these findings, it has been + suggested that measures of neurocognitive functioning may be applied + to the task of predicting clinical outcome in drug addiction. This + in turn may have relevance for differentiating treatment based on + individual patient needs. To explore this hypothesis we obtained + neurocognitive measures of planning, impulsivity and decision-making + from 37 opiate dependent individuals within 6 weeks of starting a + community drug treatment programme and we followed them up 3 months + into the programme. Performance on two tests of decision-making, + but not on tests of planning, motor inhibition, reflection impulsivity + or delay discounting, was found to predict abstinence from illicit + drugs at 3 months with high specificity and moderate sensitivity. + In particular, two thirds of the participants performing normally + on the Cambridge Gamble Task and the Iowa Gambling Task, but none + of those impaired on both, were abstinent from illicit drugs at follow + up. Other neuropsychological, psychiatric or psychosocial factors + measured in this sample did not explain this finding. The results + are discussed in terms of the brain circuitry involved and the potential + implications for the planning of treatment services for opiate dependence.}, + Author = {F. Passetti and L. Clark and M. A. Mehta and E. Joyce and M. King}, + Doi = {10.1016/j.drugalcdep.2007.10.008}, + Institution = {Department of Mental Health Sciences, University of London, London NW3 2PF, UK. fpassett@sgul.ac.uk}, + Journal = {Drug Alcohol Depend}, + Keywords = {Adolescent; Adult; Cognition Disorders, diagnosis/epidemiology; Decision Making; Female; Humans; Impulse Control Disorders, diagnosis/epidemiology; Male; Middle Aged; Neuropsychological Tests; Opioid-Related Disorders, epidemiology; Predictive Value of Tests; Prospective Studies; Questionnaires; Severity of Illness Index}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {1-3}, + Owner = {Woo-Young Ahn}, + Pages = {82--91}, + Pii = {S0376-8716(07)00421-8}, + Pmid = {18063322}, + Timestamp = {2009.08.06}, + Title = {Neuropsychological predictors of clinical outcome in opiate addiction.}, + Url = {http://dx.doi.org/10.1016/j.drugalcdep.2007.10.008}, + Volume = {94}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.drugalcdep.2007.10.008}} + +@article{Passingham2009, + Author = {Passingham, R.}, + Journal = {Current Opinion in Neurobiology}, + Publisher = {Elsevier}, + Title = {{How good is the macaque monkey model of the human brain?}}, + Year = {2009}} + +@article{Patak2007, + Abstract = {This research was designed to determine if, and to what extent, participants + incorporate uncertainty into their valuations for delayed rewards + when completing measures of delay discounting, even though uncertainty + is not specified in the delay-discounting questions. Twenty-four + adolescent participants completed a question-based measure of delay + discounting and immediately following answered questions about perceived + certainties of receiving the delayed rewards. Results showed that + respondents rated the delayed rewards as increasingly uncertain with + longer delays. Also, ratings of uncertainty were correlated with + rate of delay discounting (r=0.55). These findings suggest participants + automatically evaluate delayed rewards as uncertain when using this + assessment procedure. The current finding may hold important implications + for future addiction research in interpreting why addicted persons + often discount more by delay than non-addicted controls, i.e., delay + to reward or uncertainty about delayed rewards?}, + Author = {Michele Patak and Brady Reynolds}, + Doi = {10.1016/j.addbeh.2006.03.034}, + Institution = {Columbus Children's Research Institute, Department of Pediatrics, The Ohio State University, 700 Children's Drive, J1401, Columbus, Ohio 43205, USA.}, + Journal = {Addict Behav}, + Keywords = {Adolescent; Adolescent Psychology, methods; Area Under Curve; Choice Behavior; Data Collection; Humans; Questionnaires; Reward; Time Factors; Uncertainty}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {351--357}, + Pii = {S0306-4603(06)00105-5}, + Pmid = {16647214}, + Timestamp = {2009.08.06}, + Title = {Question-based assessments of delay discounting: do respondents spontaneously incorporate uncertainty into their valuations for delayed rewards?}, + Url = {http://dx.doi.org/10.1016/j.addbeh.2006.03.034}, + Volume = {32}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.addbeh.2006.03.034}} + +@article{Patak2007a, + Abstract = {This research was designed to determine if, and to what extent, participants + incorporate uncertainty into their valuations for delayed rewards + when completing measures of delay discounting, even though uncertainty + is not specified in the delay-discounting questions. Twenty-four + adolescent participants completed a question-based measure of delay + discounting and immediately following answered questions about perceived + certainties of receiving the delayed rewards. Results showed that + respondents rated the delayed rewards as increasingly uncertain with + longer delays. Also, ratings of uncertainty were correlated with + rate of delay discounting (r=0.55). These findings suggest participants + automatically evaluate delayed rewards as uncertain when using this + assessment procedure. The current finding may hold important implications + for future addiction research in interpreting why addicted persons + often discount more by delay than non-addicted controls, i.e., delay + to reward or uncertainty about delayed rewards?}, + Author = {Michele Patak and Brady Reynolds}, + Doi = {10.1016/j.addbeh.2006.03.034}, + Institution = {Columbus Children's Research Institute, Department of Pediatrics, The Ohio State University, 700 Children's Drive, J1401, Columbus, Ohio 43205, USA.}, + Journal = {Addict Behav}, + Keywords = {Adolescent; Adolescent Psychology, methods; Area Under Curve; Choice Behavior; Data Collection; Humans; Questionnaires; Reward; Time Factors; Uncertainty}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {351--357}, + Pii = {S0306-4603(06)00105-5}, + Pmid = {16647214}, + Timestamp = {2009.08.06}, + Title = {Question-based assessments of delay discounting: do respondents spontaneously incorporate uncertainty into their valuations for delayed rewards?}, + Url = {http://dx.doi.org/10.1016/j.addbeh.2006.03.034}, + Volume = {32}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.addbeh.2006.03.034}} + +@article{Paton2006, + Author = {Paton, J. J. and Belova, M. A. and Morrison, S. E. and Salzman, C. D.}, + Journal = {Nature}, + Month = {Feb}, + Pages = {865--870}, + Title = {{{T}he primate amygdala represents the positive and negative value of visual stimuli during learning}}, + Volume = {439}, + Year = {2006}} + +@article{Patton1995, + Author = {Patton, J.H. and Stanford, M.S. and Barratt, E.S.}, + Journal = {Journal of Clinical Psychology}, + Number = {6}, + Publisher = {Wiley Subscription Services, Inc., A Wiley Company Brandon}, + Title = {{Factor structure of the Barratt impulsiveness scale}}, + Volume = {51}, + Year = {1995}} + +@article{Patton2002, + Author = {Patton, P. and Belkacem-Boussaid, K. and Anastasio, T.J.}, + Journal = {Cognitive Brain Research}, + Number = {1}, + Pages = {10--19}, + Publisher = {Elsevier}, + Title = {{Multimodality in the superior colliculus: an information theoretic analysis}}, + Volume = {14}, + Year = {2002}} + +@article{Pauler1998, + Author = {Pauler, D. K.}, + Journal = {Biometrika}, + Pages = {13--27}, + Title = {The {S}chwarz Criterion and Related Methods for Normal Linear Models}, + Volume = {85}, + Year = {1998}} + +@article{Paulus2008, + Author = {Paulus, M. P.}, + Journal = {Depress Anxiety}, + Pages = {348--356}, + Title = {{{T}he role of neuroimaging for the diagnosis and treatment of anxiety disorders}}, + Volume = {25}, + Year = {2008}} + +@article{Paulus2007, + Author = {Paulus, M. P.}, + Journal = {Dialogues Clin Neurosci}, + Pages = {379--387}, + Title = {{{N}eural basis of reward and craving--a homeostatic point of view}}, + Volume = {9}, + Year = {2007}} + +@article{Paulus2007a, + Author = {Paulus, M. P.}, + Journal = {Science}, + Month = {Oct}, + Pages = {602--606}, + Title = {{{D}ecision-making dysfunctions in psychiatry--altered homeostatic processing?}}, + Volume = {318}, + Year = {2007}} + +@article{Paulus2005a, + Author = {Paulus, M. P.}, + Journal = {Brain Res Cogn Brain Res}, + Month = {Apr}, + Pages = {2--10}, + Title = {{{N}eurobiology of decision-making: quo vadis?}}, + Volume = {23}, + Year = {2005}} + +@article{Paulus1998a, + Author = {Paulus, M. P. and Bakshi, V. P. and Geyer, M. A.}, + Journal = {Behav. Brain Res.}, + Month = {Aug}, + Pages = {271--280}, + Title = {{{I}solation rearing affects sequential organization of motor behavior in post-pubertal but not pre-pubertal {L}ister and {S}prague-{D}awley rats}}, + Volume = {94}, + Year = {1998}} + +@article{Paulus2003e, + Author = {Paulus, M. P. and Braff, D. L.}, + Journal = {Biol. Psychiatry}, + Month = {Jan}, + Pages = {3--11}, + Title = {{{C}haos and schizophrenia: does the method fit the madness?}}, + Volume = {53}, + Year = {2003}} + +@article{Paulus1993a, + Author = {Paulus, M. P. and Callaway, C. W. and Geyer, M. A.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {187--198}, + Title = {{{Q}uantitative assessment of the microstructure of rat behavior: {I}{I}. {D}istinctive effects of dopamine releasers and uptake inhibitors}}, + Volume = {113}, + Year = {1993}} + +@article{Paulus2005c, + Author = {Paulus, M. P. and Feinstein, J. S. and Castillo, G. and Simmons, A. N. and Stein, M. B.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {282--288}, + Title = {{{D}ose-dependent decrease of activation in bilateral amygdala and insula by lorazepam during emotion processing}}, + Volume = {62}, + Year = {2005}} + +@article{Paulus2005b, + Author = {Paulus, M. P. and Feinstein, J. S. and Leland, D. and Simmons, A. N.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {607--615}, + Title = {{{S}uperior temporal gyrus and insula provide response and outcome-dependent information during assessment and action selection in a decision-making situation}}, + Volume = {25}, + Year = {2005}} + +@article{Paulus2004, + Author = {Paulus, M. P. and Feinstein, J. S. and Simmons, A. and Stein, M. B.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {1179--1187}, + Title = {{{A}nterior cingulate activation in high trait anxious subjects is related to altered error processing during decision making}}, + Volume = {55}, + Year = {2004}} + +@article{Paulus2004a, + Author = {Paulus, M. P. and Feinstein, J. S. and Tapert, S. F. and Liu, T. T.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {733--743}, + Title = {{{T}rend detection via temporal difference model predicts inferior prefrontal cortex activation during acquisition of advantageous action selection}}, + Volume = {21}, + Year = {2004}} + +@article{Paulus2003c, + Author = {Paulus, M. P. and Frank, L. and Brown, G. G. and Braff, D. L.}, + Journal = {Neuropsychopharmacology}, + Month = {Apr}, + Pages = {795--806}, + Title = {{{S}chizophrenia subjects show intact success-related neural activation but impaired uncertainty processing during decision-making}}, + Volume = {28}, + Year = {2003}} + +@article{Paulus2006b, + Author = {Paulus, M. P. and Frank, L. R.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {668--677}, + Title = {{{A}nterior cingulate activity modulates nonlinear decision weight function of uncertain prospects}}, + Volume = {30}, + Year = {2006}} + +@article{Paulus2003b, + Author = {Paulus, M. P. and Frank, L. R.}, + Journal = {Neuroreport}, + Month = {Jul}, + Pages = {1311--1315}, + Title = {{{V}entromedial prefrontal cortex activation is critical for preference judgments}}, + Volume = {14}, + Year = {2003}} + +@article{Paulus1993, + Author = {Paulus, M. P. and Geyer, M. A.}, + Journal = {Behav. Brain Res.}, + Month = {Feb}, + Pages = {11--20}, + Title = {{{T}hree independent factors characterize spontaneous rat motor activity}}, + Volume = {53}, + Year = {1993}} + +@article{Paulus1993b, + Author = {Paulus, M. P. and Geyer, M. A.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {177--186}, + Title = {{{Q}uantitative assessment of the microstructure of rat behavior: {I}, f(d), the extension of the scaling hypothesis}}, + Volume = {113}, + Year = {1993}} + +@article{Paulus1992, + Author = {Paulus, M. P. and Geyer, M. A.}, + Journal = {Neuropsychopharmacology}, + Month = {Aug}, + Pages = {15--31}, + Title = {{{T}he effects of {M}{D}{M}{A} and other methylenedioxy-substituted phenylalkylamines on the structure of rat locomotor activity}}, + Volume = {7}, + Year = {1992}} + +@article{Paulus1991, + Author = {Paulus, M. P. and Geyer, M. A.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Pages = {903--919}, + Title = {{{A} scaling approach to find order parameters quantifying the effects of dopaminergic agents on unconditioned motor activity in rats}}, + Volume = {15}, + Year = {1991}} + +@article{Paulus1991a, + Author = {Paulus, M. P. and Geyer, M. A.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {6--16}, + Title = {{{A} temporal and spatial scaling hypothesis for the behavioral effects of psychostimulants}}, + Volume = {104}, + Year = {1991}} + +@article{Paulus1999a, + Author = {Paulus, M. P. and Geyer, M. A. and Braff, D. L.}, + Journal = {Schizophr. Res.}, + Month = {Jan}, + Pages = {69--75}, + Title = {{{L}ong-range correlations in choice sequences of schizophrenic patients}}, + Volume = {35}, + Year = {1999}} + +@article{Paulus1996, + Author = {Paulus, M. P. and Geyer, M. A. and Braff, D. L.}, + Journal = {Am J Psychiatry}, + Month = {May}, + Pages = {714--717}, + Title = {{{U}se of methods from chaos theory to quantify a fundamental dysfunction in the behavioral organization of schizophrenic patients}}, + Volume = {153}, + Year = {1996}} + +@article{Paulus1994, + Author = {Paulus, M. P. and Geyer, M. A. and Braff, D. L.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Nov}, + Pages = {1169--1185}, + Title = {{{T}he assessment of sequential response organization in schizophrenic and control subjects}}, + Volume = {18}, + Year = {1994}} + +@article{Paulus1990, + Author = {Paulus, M. P. and Geyer, M. A. and Gold, L. H. and Mandell, A. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jan}, + Pages = {723--727}, + Title = {{{A}pplication of entropy measures derived from the ergodic theory of dynamical systems to rat locomotor behavior}}, + Volume = {87}, + Year = {1990}} + +@article{Paulus1998, + Author = {Paulus, M. P. and Geyer, M. A. and Sternberg, E.}, + Journal = {Physiol. Behav.}, + Month = {Dec}, + Pages = {601--606}, + Title = {{{D}ifferential movement patterns but not amount of activity in unconditioned motor behavior of {F}ischer, {L}ewis, and {S}prague-{D}awley rats}}, + Volume = {65}, + Year = {1998}} + +@article{Paulus2002a, + Author = {Paulus, M. P. and Hozack, N. and Frank, L. and Brown, G. G.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {836--846}, + Title = {{{E}rror rate and outcome predictability affect neural activation in prefrontal cortex and anterior cingulate during decision-making}}, + Volume = {15}, + Year = {2002}} + +@article{Paulus2003d, + Author = {Paulus, M. P. and Hozack, N. and Frank, L. and Brown, G. G. and Schuckit, M. A.}, + Journal = {Biol. Psychiatry}, + Month = {Jan}, + Pages = {65--74}, + Title = {{{D}ecision making by methamphetamine-dependent subjects is associated with error-rate-independent decrease in prefrontal and parietal activation}}, + Volume = {53}, + Year = {2003}} + +@article{Paulus2001a, + Author = {Paulus, M. P. and Hozack, N. and Zauscher, B. and McDowell, J. E. and Frank, L. and Brown, G. G. and Braff, D. L.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {91--100}, + Title = {{{P}refrontal, parietal, and temporal cortex networks underlie decision-making in the presence of uncertainty}}, + Volume = {13}, + Year = {2001}} + +@article{Paulus2002b, + Author = {Paulus, M. P. and Hozack, N. E. and Zauscher, B. E. and Frank, L. and Brown, G. G. and Braff, D. L. and Schuckit, M. A.}, + Journal = {Neuropsychopharmacology}, + Month = {Jan}, + Pages = {53--63}, + Title = {{{B}ehavioral and functional neuroimaging evidence for prefrontal dysfunction in methamphetamine-dependent subjects}}, + Volume = {26}, + Year = {2002}} + +@article{Paulus2002, + Author = {Paulus, M. P. and Hozack, N. E. and Zauscher, B. E. and Frank, L. and Brown, G. G. and McDowell, J. and Braff, D. L.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {995--1004}, + Title = {{{P}arietal dysfunction is associated with increased outcome-related decision-making in schizophrenia patients}}, + Volume = {51}, + Year = {2002}} + +@article{Paulus2008a, + Author = {Paulus, M. P. and Lovero, K. L. and Wittmann, M. and Leland, D. S.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {1054--1060}, + Title = {{{R}educed behavioral and neural activation in stimulant users to different error rates during decision making}}, + Volume = {63}, + Year = {2008}} + +@article{Paulus1999, + Author = {Paulus, M. P. and Perry, W. and Braff, D. L.}, + Journal = {Biol. Psychiatry}, + Month = {Sep}, + Pages = {662--670}, + Title = {{{T}he nonlinear, complex sequential organization of behavior in schizophrenic patients: neurocognitive strategies and clinical correlations}}, + Volume = {46}, + Year = {1999}} + +@article{Paulus2009, + Author = {Paulus, M. P. and Potterat, E. G. and Taylor, M. K. and Van Orden, K. F. and Bauman, J. and Momen, N. and Padilla, G. A. and Swain, J. L.}, + Journal = {Neurosci Biobehav Rev}, + Month = {Jul}, + Pages = {1080--1088}, + Title = {{{A} neuroscience approach to optimizing brain resources for human performance in extreme environments}}, + Volume = {33}, + Year = {2009}} + +@article{Paulus2001, + Author = {Paulus, M. P. and Rapaport, M. H. and Braff, D. L.}, + Journal = {Biol. Psychiatry}, + Month = {Jan}, + Pages = {71--77}, + Title = {{{T}rait contributions of complex dysregulated behavioral organization in schizophrenic patients}}, + Volume = {49}, + Year = {2001}} + +@article{Paulus2003, + Author = {Paulus, M. P. and Rogalsky, C. and Simmons, A. and Feinstein, J.S. and Stein, M.B.}, + Journal = {Neuroimage}, + Number = {4}, + Pages = {1439--1448}, + Title = {{Increased activation in the right insula during risk-taking decision making is related to harm avoidance and neuroticism}}, + Volume = {19}, + Year = {2003}} + +@article{Paulus2003a, + Author = {Paulus, M. P. and Rogalsky, C. and Simmons, A. and Feinstein, J. S. and Stein, M. B.}, + Journal = {Neuroimage}, + Month = {Aug}, + Pages = {1439--1448}, + Title = {{{I}ncreased activation in the right insula during risk-taking decision making is related to harm avoidance and neuroticism}}, + Volume = {19}, + Year = {2003}} + +@article{Paulus2007b, + Author = {Paulus, M. P. and Stein, M. B.}, + Journal = {Neuropsychol Rev}, + Month = {Jun}, + Pages = {179--188}, + Title = {{{R}ole of functional magnetic resonance imaging in drug discovery}}, + Volume = {17}, + Year = {2007}} + +@article{Paulus2006a, + Author = {Paulus, M. P. and Stein, M. B.}, + Journal = {Biol. Psychiatry}, + Month = {Aug}, + Pages = {383--387}, + Title = {{{A}n insular view of anxiety}}, + Volume = {60}, + Year = {2006}} + +@article{Paulus2006, + Author = {Paulus, M. P. and Tapert, S. F. and Pulido, C. and Schuckit, M. A.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {1363--1371}, + Title = {{{A}lcohol attenuates load-related activation during a working memory task: relation to level of response to alcohol}}, + Volume = {30}, + Year = {2006}} + +@article{Paulus2005, + Author = {Paulus, M. P. and Tapert, S. F. and Schuckit, M. A.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Jul}, + Pages = {761--768}, + Title = {{{N}eural activation patterns of methamphetamine-dependent subjects during decision making predict relapse}}, + Volume = {62}, + Year = {2005}} + +@article{Paulus2000, + Author = {Paulus, M. P. and Varty, G. B. and Geyer, M. A.}, + Journal = {Physiol. Behav.}, + Month = {Jan}, + Pages = {389--394}, + Title = {{{T}he genetic liability to stress and postweaning isolation have a competitive influence on behavioral organization in rats}}, + Volume = {68}, + Year = {2000}} + +@article{Payer2008, + Author = {Payer, D. E. and Lieberman, M. D. and Monterosso, J. R. and Xu, J. and Fong, T. W. and London, E. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {93--102}, + Title = {{{D}ifferences in cortical activity between methamphetamine-dependent and healthy individuals performing a facial affect matching task}}, + Volume = {93}, + Year = {2008}} + +@article{Payne2005, + Author = {Payne, John W.}, + Journal = {The Journal of Risk and Uncertainty}, + Owner = {WooYoung Ahn}, + Pages = {5-19}, + Timestamp = {2008.03.26}, + Title = {It is whether you win or lose: {T}he importance of the overall probabilities of winning or losing in risky choice.}, + Volume = {30(1)}, + Year = {2005}} + +@article{Peakman2003, + Author = {Peakman, M. C. and Colby, C. and Perrotti, L. I. and Tekumalla, P. and Carle, T. and Ulery, P. and Chao, J. and Duman, C. and Steffen, C. and Monteggia, L. and Allen, M. R. and Stock, J. L. and Duman, R. S. and McNeish, J. D. and Barrot, M. and Self, D. W. and Nestler, E. J. and Schaeffer, E.}, + Journal = {Brain Res.}, + Month = {Apr}, + Pages = {73--86}, + Title = {{{I}nducible, brain region-specific expression of a dominant negative mutant of c-{J}un in transgenic mice decreases sensitivity to cocaine}}, + Volume = {970}, + Year = {2003}} + +@article{Peluso2007, + Abstract = {BACKGROUND: Impulsivity is a key component of the manic behavior of + bipolar disorder and is reported to occur in bipolar patients as + a stable characteristic, i.e. a trait. Nevertheless, impulsivity + has not been widely studied in depressed bipolar patients. We assessed + impulsivity in depressed and euthymic bipolar and unipolar patients + and healthy controls. We hypothesized that bipolar subjects would + have higher levels of trait impulsivity than the comparison groups. + METHODS: Twenty-four depressed bipolar, 24 depressed unipolar, 12 + euthymic bipolar, and 10 euthymic unipolar patients, as well as 51 + healthy subjects were evaluated with the Barratt Impulsiveness Scale + (BIS). Analysis of covariance with age and sex as covariates was + used to compare mean group differences. RESULTS: Depressed bipolar, + euthymic bipolar, and depressed unipolar patients did not differ, + and showed greater impulsivity than healthy controls on all of the + BIS scales. Euthymic unipolar patients scored higher than healthy + controls only on motor impulsivity. LIMITATIONS: Higher number of + past substance abusers in the bipolar groups, and no control for + anxiety and personality disorders, as well as small sample sizes, + limit the reach of this study. CONCLUSIONS: This study replicates + prior findings of stable trait impulsivity in bipolar disorder patients, + and extends them, confirming that this trait can be demonstrated + in depressed patients, as well as manic and euthymic ones. Trait + impulsivity may be the result of repeated mood episodes or be present + prior to their onset, either way it would influence the clinical + presentation of bipolar disorder.}, + Author = {M. A M Peluso and J. P. Hatch and D. C. Glahn and E. S. Monkul and M. Sanches and P. Najt and C. L. Bowden and E. S. Barratt and J. C. Soares}, + Doi = {10.1016/j.jad.2006.09.037}, + Institution = {Department of Psychiatry, The University of Texas Health Science Center at San Antonio, USA. mampeluso@hotmail.com}, + Journal = {J Affect Disord}, + Keywords = {Adolescent; Adult; Bipolar Disorder, diagnosis/epidemiology/psychology; Diagnostic and Statistical Manual of Mental Disorders; Dysthymic Disorder, diagnosis/epidemiology/psychology; Female; Humans; Impulse Control Disorders, diagnosis/epidemiology/psychology; Male; Middle Aged; Mood Disorders, diagnosis/epidemiology/psychology; Prevalence; Questionnaires; Severity of Illness Index}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {1-3}, + Owner = {Young}, + Pages = {227--231}, + Pii = {S0165-0327(06)00438-1}, + Pmid = {17097740}, + Timestamp = {2010.05.01}, + Title = {Trait impulsivity in patients with mood disorders.}, + Url = {http://dx.doi.org/10.1016/j.jad.2006.09.037}, + Volume = {100}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.jad.2006.09.037}} + +@article{Peng1993, + Author = {Peng, C. K. and Mietus, J. and Hausdorff, J. M. and Havlin, S. and Stanley, H. E. and Goldberger, A. L.}, + Journal = {Physical Review Letters}, + Pages = {1343--1346}, + Title = {Long--range Anti--correlations and Non--{G}aussian Behavior of the Heartbeat}, + Volume = {70}, + Year = {1993}} + +@article{Perez-Edgar2007, + Author = {Perez-Edgar, K. and Roberson-Nay, R. and Hardin, M. G. and Poeth, K. and Guyer, A. E. and Nelson, E. E. and McClure, E. B. and Henderson, H. A. and Fox, N. A. and Pine, D. S. and Ernst, M.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {1538--1546}, + Title = {{{A}ttention alters neural responses to evocative faces in behaviorally inhibited adolescents}}, + Volume = {35}, + Year = {2007}} + +@article{Perrotti2005, + Author = {Perrotti, L. I. and Bolanos, C. A. and Choi, K. H. and Russo, S. J. and Edwards, S. and Ulery, P. G. and Wallace, D. L. and Self, D. W. and Nestler, E. J. and Barrot, M.}, + Journal = {Eur. J. Neurosci.}, + Month = {May}, + Pages = {2817--2824}, + Title = {{{D}elta{F}os{B} accumulates in a {G}{A}{B}{A}ergic cell population in the posterior tail of the ventral tegmental area after psychostimulant treatment}}, + Volume = {21}, + Year = {2005}} + +@article{Perry2008, + Abstract = {BACKGROUND: Impulsivity is a multifaceted construct that has recently + been recognized as a factor contributing to enhanced vulnerability + to drug abuse. OBJECTIVES: In the present review, we focus on two + facets of impulsivity (and tasks that measure them): (1) impulsive + choice (delay discounting task) and (2) inhibitory failure (go/no-go, + stop signal reaction time, and five-choice serial reaction time tasks). + We also describe how performance on each of these tasks is associated + with drug-related behavior during phases of drug abuse that capture + the essential features of addiction (acquisition, escalation, and + reinstatement of drug-seeking after drug access has terminated). + Three hypotheses (H) regarding the relationship between impulsivity + and drug abuse are discussed: (1) increased levels of impulsivity + lead to drug abuse (H1), (2) drugs of abuse increase impulsivity + (H2), and (3) impulsivity and drug abuse are associated through a + common third factor (H3). CONCLUSION: Impulsivity expressed as impulsive + choice or inhibitory failure plays a role in several key transition + phases of drug abuse. There is evidence to support all three nonexclusive + hypotheses. Increased levels of impulsivity lead to acquisition of + drug abuse (H1) and subsequent escalation or dysregulation of drug + intake. Drugs of abuse may increase impulsivity (H2), which is an + additional contributor to escalation/dysregulation. Abstinence, relapse, + and treatment may be influenced by both H1 and H2. In addition, there + is a relationship between impulsivity and other drug abuse vulnerability + factors, such as sex, hormonal status, reactivity to nondrug rewards, + and early environmental experiences that may impact drug intake during + all phases of addiction (H3). Relating drug abuse and impulsivity + in phases of addiction via these three hypotheses provides a heuristic + model from which future experimental questions can be addressed.}, + Author = {Jennifer L Perry and Marilyn E Carroll}, + Doi = {10.1007/s00213-008-1173-0}, + Institution = {Minneapolis Medical Research Foundation, S-3, 860, 914 South 8th Street, Minneapolis, MN 55404, USA. perry050@umm.edu}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Animals; Behavior, Addictive, etiology/physiopathology; Humans; Impulsive Behavior, etiology/physiopathology; Inhibition (Psychology); Reaction Time; Reward; Risk Factors; Sex Factors; Substance-Related Disorders, etiology/physiopathology; Task Performance and Analysis}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {1--26}, + Pmid = {18600315}, + Timestamp = {2009.08.06}, + Title = {The role of impulsive behavior in drug abuse.}, + Url = {http://dx.doi.org/10.1007/s00213-008-1173-0}, + Volume = {200}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-008-1173-0}} + +@article{Perry2008a, + Abstract = {BACKGROUND: Impulsivity is a multifaceted construct that has recently + been recognized as a factor contributing to enhanced vulnerability + to drug abuse. OBJECTIVES: In the present review, we focus on two + facets of impulsivity (and tasks that measure them): (1) impulsive + choice (delay discounting task) and (2) inhibitory failure (go/no-go, + stop signal reaction time, and five-choice serial reaction time tasks). + We also describe how performance on each of these tasks is associated + with drug-related behavior during phases of drug abuse that capture + the essential features of addiction (acquisition, escalation, and + reinstatement of drug-seeking after drug access has terminated). + Three hypotheses (H) regarding the relationship between impulsivity + and drug abuse are discussed: (1) increased levels of impulsivity + lead to drug abuse (H1), (2) drugs of abuse increase impulsivity + (H2), and (3) impulsivity and drug abuse are associated through a + common third factor (H3). CONCLUSION: Impulsivity expressed as impulsive + choice or inhibitory failure plays a role in several key transition + phases of drug abuse. There is evidence to support all three nonexclusive + hypotheses. Increased levels of impulsivity lead to acquisition of + drug abuse (H1) and subsequent escalation or dysregulation of drug + intake. Drugs of abuse may increase impulsivity (H2), which is an + additional contributor to escalation/dysregulation. Abstinence, relapse, + and treatment may be influenced by both H1 and H2. In addition, there + is a relationship between impulsivity and other drug abuse vulnerability + factors, such as sex, hormonal status, reactivity to nondrug rewards, + and early environmental experiences that may impact drug intake during + all phases of addiction (H3). Relating drug abuse and impulsivity + in phases of addiction via these three hypotheses provides a heuristic + model from which future experimental questions can be addressed.}, + Author = {Jennifer L Perry and Marilyn E Carroll}, + Doi = {10.1007/s00213-008-1173-0}, + Institution = {Minneapolis Medical Research Foundation, S-3, 860, 914 South 8th Street, Minneapolis, MN 55404, USA. perry050@umm.edu}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Animals; Behavior, Addictive, etiology/physiopathology; Humans; Impulsive Behavior, etiology/physiopathology; Inhibition (Psychology); Reaction Time; Reward; Risk Factors; Sex Factors; Substance-Related Disorders, etiology/physiopathology; Task Performance and Analysis}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {1--26}, + Pmid = {18600315}, + Timestamp = {2009.08.06}, + Title = {The role of impulsive behavior in drug abuse.}, + Url = {http://dx.doi.org/10.1007/s00213-008-1173-0}, + Volume = {200}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-008-1173-0}} + +@article{Pessiglione2006, + Author = {Pessiglione, M. and Seymour, B. and Flandin, G. and Dolan, R.J. and Frith, C.D.}, + Journal = {Nature}, + Number = {7106}, + Pages = {1042}, + Title = {{Dopamine-dependent prediction errors underpin reward-seeking behaviour in humans}}, + Volume = {442}, + Year = {2006}} + +@article{Peterson1998, + Author = {Peterson, B. S. and Leckman, J. F.}, + Journal = {Biological Psychiatry}, + Pages = {1337--1348}, + Title = {The Temporal Dynamics of tics in {G}illes de la {T}ourette Syndrome}, + Volume = {44}, + Year = {1998}} + +@article{Peterson1992, + Author = {Peterson, J. B. and Finn, P. R. and Pihl, R. O.}, + Journal = {J. Stud. Alcohol}, + Month = {Mar}, + Pages = {154--160}, + Title = {{{C}ognitive dysfunction and the inherited predisposition to alcoholism}}, + Volume = {53}, + Year = {1992}} + +@article{Peterson1996, + Author = {Peterson, J. B. and Pihl, R. O. and Gianoulakis, C. and Conrod, P. and Finn, P. R. and Stewart, S. H. and LeMarquand, D. G. and Bruce, K. R.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {1542--1552}, + Title = {{{E}thanol-induced change in cardiac and endogenous opiate function and risk for alcoholism}}, + Volume = {20}, + Year = {1996}} + +@article{Peterson1993, + Author = {Peterson, J. B. and Pihl, R. O. and Seguin, J. R. and Finn, P. R. and Stewart, S. H.}, + Journal = {J Psychiatry Neurosci}, + Month = {Jul}, + Pages = {190--198}, + Title = {{{H}eart-rate reactivity and alcohol consumption among sons of male alcoholics and sons of non-alcoholics}}, + Volume = {18}, + Year = {1993}} + +@article{Peterson1991, + Author = {Peterson, J. B. and Weiner, D. and Pihl, R. O. and Finn, P. R. and Earleywine, M.}, + Journal = {Addict Behav}, + Pages = {549--554}, + Title = {{{T}he {T}ridimensional {P}ersonality {Q}uestionnaire and the inherited risk for alcoholism}}, + Volume = {16}, + Year = {1991}} + +@article{Peterson2005, + Author = {Peterson, R. L.}, + Journal = {Brain Res. Bull.}, + Month = {Nov}, + Pages = {391--397}, + Title = {{{T}he neuroscience of investing: f{M}{R}{I} of the reward system}}, + Volume = {67}, + Year = {2005}} + +@article{Peto1976, + Author = {Peto, R. and Pike, M. C. and Armitage, P. and Breslow, N. E. and Cox, D. R. and Howard, S. V. and Mantel, N. and McPherson, K. and Peto, J. and Smith, P. G.}, + Journal = {British Journal of Cancer}, + Pages = {585--612}, + Title = {Design and Analysis of Randomized Clinical Trials Requiring Prolonged Observation of Each Patient, {I}: {I}ntroduction and Design}, + Volume = {34}, + Year = {1976}} + +@article{Petry2006, + Author = {Nancy Petry}, + Doi = {10.1111/j.1360-0443.2005.01307.x}, + Journal = {Addiction}, + Keywords = {Age of Onset; Alcoholism, psychology; Humans; Impulsive Behavior; Reward}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {292; author reply 293--292; author reply 294}, + Pii = {ADD1307}, + Pmid = {16445559}, + Timestamp = {2009.08.06}, + Title = {Early-onset alcoholism: a separate or unique predictor of delay discounting? Comment on Dom et al. (2006).}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2005.01307.x}, + Volume = {101}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2005.01307.x}} + +@article{Petry2006a, + Author = {Nancy Petry}, + Doi = {10.1111/j.1360-0443.2005.01307.x}, + Journal = {Addiction}, + Keywords = {Age of Onset; Alcoholism, psychology; Humans; Impulsive Behavior; Reward}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {292; author reply 293--292; author reply 294}, + Pii = {ADD1307}, + Pmid = {16445559}, + Timestamp = {2009.08.06}, + Title = {Early-onset alcoholism: a separate or unique predictor of delay discounting? Comment on Dom et al. (2006).}, + Url = {http://dx.doi.org/10.1111/j.1360-0443.2005.01307.x}, + Volume = {101}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1360-0443.2005.01307.x}} + +@article{Petry2001, + Author = {Petry, N. M.}, + Journal = {Drug Alcohol Depend}, + Month = {Jun}, + Pages = {29--38}, + Title = {{{S}ubstance abuse, pathological gambling, and impulsiveness}}, + Volume = {63}, + Year = {2001}} + +@article{Petry2001a, + Abstract = {Pathological gambling is classified as a disorder of impulse control, + yet little research has evaluated behavioral indices of impulsivity + in gamblers. The rates at which rewards delayed in time are subjectively + devalued may be a behavioral marker of impulsivity. This study evaluated + delay discounting in 60 pathological gamblers and 26 control participants. + Gamblers were divided into those with (n = 21) and without (n = 39) + substance use disorders. A hypothetical $1,000 reward was delayed + at intervals ranging from 6 hr to 25 years, and immediate rewards + varied from $1 to $999. Pathological gamblers discounted delayed + rewards at higher rates than control participants, and gamblers with + substance use disorders discounted delayed rewards at higher rates + than non-substance-abusing gamblers. These data provide further evidence + that rapid discounting of delayed rewards may be a feature central + to impulse control and addictive disorders, including pathological + gambling.}, + Author = {N. M. Petry}, + Institution = {Department of Psychiatry, University of Connecticut School of Medicine, Farmington 06030-3944, USA. petry@psychiatry.uchc.edu}, + Journal = {J Abnorm Psychol}, + Keywords = {Adult; Behavior, Addictive, psychology; Case-Control Studies; Female; Gambling, psychology; Humans; Impulse Control Disorders, complications/psychology; Impulsive Behavior; Male; Reward; Substance-Related Disorders, complications/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {482--487}, + Pmid = {11502091}, + Timestamp = {2009.08.06}, + Title = {Pathological gamblers, with and without substance use disorders, discount delayed rewards at high rates.}, + Volume = {110}, + Year = {2001}} + +@article{Petry2001b, + Abstract = {RATIONALE: Impulsivity is implicated in alcohol dependence, and discounting + of delayed rewards may be an objective indicator of impulsiveness. + OBJECTIVES: This study evaluated delay discounting functions in alcoholics + and controls. It compared discounting rates between different magnitudes + ($1000 and $100) and different types (money and alcohol) of rewards. + METHODS: Active alcoholics (n = 19), currently abstinent alcoholics + (n = 12) and controls (n = 15) indicated preferences for immediate + versus delayed rewards using a titration procedure that determined + indifference points at various delays. Four conditions were presented, + and the delayed rewards in the four conditions were $1000, $100, + 150 bottles of an alcoholic beverage, and 15 bottles of an alcoholic + beverage. RESULTS: In all three groups across all four conditions, + hyperbolic discounting functions provided a good fit of the data. + Linear contrasts, predicting the most rapid discounting rates in + active alcoholics, intermediary rates in currently abstinent alcoholics, + and the least rapid rates in controls, were significant for three + of the four conditions. Alcohol was discounted more rapidly than + money. CONCLUSIONS: These data provide further evidence of more rapid + discounting of delayed rewards in alcohol abusers compared to controls, + and especially steep discounting among current users. Rapid discounting + of delayed rewards may be a feature related to addictive disorders. + A better understanding of how delaying rewards in time impacts their + value may have implications for treatment.}, + Author = {N. M. Petry}, + Institution = {Department of Psychiatry, University of Connecticut Health Center, 263 Farmington Avenue, Farmington, CT 06030-1517, USA. petry@psychiatry.uchc.edu}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Adult; Alcoholic Beverages, economics; Alcoholism, economics/psychology; Analysis of Variance; Chi-Square Distribution; Female; Humans; Impulsive Behavior, economics/psychology; Male; Middle Aged; Reward; Temperance, economics/psychology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {243--250}, + Pmid = {11351931}, + Timestamp = {2009.08.06}, + Title = {Delay discounting of money and alcohol in actively using alcoholics, currently abstinent alcoholics, and controls.}, + Volume = {154}, + Year = {2001}} + +@article{Petry2001c, + Abstract = {RATIONALE: Impulsivity is implicated in alcohol dependence, and discounting + of delayed rewards may be an objective indicator of impulsiveness. + OBJECTIVES: This study evaluated delay discounting functions in alcoholics + and controls. It compared discounting rates between different magnitudes + ($1000 and $100) and different types (money and alcohol) of rewards. + METHODS: Active alcoholics (n = 19), currently abstinent alcoholics + (n = 12) and controls (n = 15) indicated preferences for immediate + versus delayed rewards using a titration procedure that determined + indifference points at various delays. Four conditions were presented, + and the delayed rewards in the four conditions were $1000, $100, + 150 bottles of an alcoholic beverage, and 15 bottles of an alcoholic + beverage. RESULTS: In all three groups across all four conditions, + hyperbolic discounting functions provided a good fit of the data. + Linear contrasts, predicting the most rapid discounting rates in + active alcoholics, intermediary rates in currently abstinent alcoholics, + and the least rapid rates in controls, were significant for three + of the four conditions. Alcohol was discounted more rapidly than + money. CONCLUSIONS: These data provide further evidence of more rapid + discounting of delayed rewards in alcohol abusers compared to controls, + and especially steep discounting among current users. Rapid discounting + of delayed rewards may be a feature related to addictive disorders. + A better understanding of how delaying rewards in time impacts their + value may have implications for treatment.}, + Author = {N. M. Petry}, + Institution = {Department of Psychiatry, University of Connecticut Health Center, 263 Farmington Avenue, Farmington, CT 06030-1517, USA. petry@psychiatry.uchc.edu}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Adult; Alcoholic Beverages, economics; Alcoholism, economics/psychology; Analysis of Variance; Chi-Square Distribution; Female; Humans; Impulsive Behavior, economics/psychology; Male; Middle Aged; Reward; Temperance, economics/psychology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {243--250}, + Pmid = {11351931}, + Timestamp = {2009.08.06}, + Title = {Delay discounting of money and alcohol in actively using alcoholics, currently abstinent alcoholics, and controls.}, + Volume = {154}, + Year = {2001}} + +@article{Petry2001d, + Abstract = {Pathological gambling is classified as a disorder of impulse control, + yet little research has evaluated behavioral indices of impulsivity + in gamblers. The rates at which rewards delayed in time are subjectively + devalued may be a behavioral marker of impulsivity. This study evaluated + delay discounting in 60 pathological gamblers and 26 control participants. + Gamblers were divided into those with (n = 21) and without (n = 39) + substance use disorders. A hypothetical $1,000 reward was delayed + at intervals ranging from 6 hr to 25 years, and immediate rewards + varied from $1 to $999. Pathological gamblers discounted delayed + rewards at higher rates than control participants, and gamblers with + substance use disorders discounted delayed rewards at higher rates + than non-substance-abusing gamblers. These data provide further evidence + that rapid discounting of delayed rewards may be a feature central + to impulse control and addictive disorders, including pathological + gambling.}, + Author = {N. M. Petry}, + Institution = {Department of Psychiatry, University of Connecticut School of Medicine, Farmington 06030-3944, USA. petry@psychiatry.uchc.edu}, + Journal = {J Abnorm Psychol}, + Keywords = {Adult; Behavior, Addictive, psychology; Case-Control Studies; Female; Gambling, psychology; Humans; Impulse Control Disorders, complications/psychology; Impulsive Behavior; Male; Reward; Substance-Related Disorders, complications/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {482--487}, + Pmid = {11502091}, + Timestamp = {2009.08.06}, + Title = {Pathological gamblers, with and without substance use disorders, discount delayed rewards at high rates.}, + Volume = {110}, + Year = {2001}} + +@article{Petry1998, + Abstract = {AIMS: To investigate whether heroin addicts demonstrate shortened + time horizons and decreased sensitivity to future consequences of + their behavior compared to non-drug users. DESIGN SETTING AND PARTICIPANTS: + Thirty-four heroin addicts enrolled in a buprenorphine treatment + clinic and 59 non-drug-using controls completed a personality questionnaire + and two laboratory tasks. MEASUREMENTS: The Stanford Time Perception + Inventory (STPI) personality questionnaire assessed orientation to + the future, and the Future Time Perspective (FTP) task elicited predictions + of the timing and ordering of future events. The Bechara card task + measured preferences for decks of cards that range in magnitude and + probability of delayed and immediate rewards and punishers. FINDINGS: + Heroin addicts scored significantly lower than controls on the STPI + scale indicative of future orientation. In the FTP, heroin addicts + were less likely to predict events far into the future and less likely + to systematically organize events in the future. In the card task, + heroin addicts were less likely to win money than controls. They + were more likely to play from a deck that contained greater immediate + gains but that resulted in large, delayed punishers and overall net + losses. They also made fewer selections from a deck that provided + an overall net gain via relatively low immediate rewards and frequent + small punishments. CONCLUSIONS: Shortened time horizons and decreased + sensitivity to delayed consequences may explain drug abusers' persistent + use of drugs, despite the long-term negative consequences associated + with drug use.}, + Author = {N. M. Petry and W. K. Bickel and M. Arnett}, + Institution = {University of Vermont, Department of Psychiatry, Burlington, USA.}, + Journal = {Addiction}, + Keywords = {Adolescent; Adult; Decision Making; Female; Forecasting; Heroin Dependence, psychology; Humans; Male; Middle Aged; Time Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {729--738}, + Pmid = {9692271}, + Timestamp = {2009.08.06}, + Title = {Shortened time horizons and insensitivity to future consequences in heroin addicts.}, + Volume = {93}, + Year = {1998}} + +@article{Petry1999, + Abstract = {This study evaluated delay discounting functions of substance abusing + problem gamblers, substance abusing non-problem gamblers, and non-problem + gambling/non-substance abusing controls. Subjects chose between hypothetical + monetary amounts available after various delays or immediately. In + one condition, a USUS$1000 reward was delayed at intervals ranging + from 6 h to 25 years. At each delay interval, the immediately available + rewards varied from USUS$1 to USUS$999 until choices reflected indifference + between the smaller immediate and larger delayed rewards. In a second + condition, the delayed reward was USUS$100, and immediate rewards + varied from USUS$0.10 to USUS$99.90. In all three groups, hyperbolic + discounting functions provided a good fit of the data, and the smaller + reward was discounted at a higher rate than the larger reward. Substance + abusers discounted delayed rewards at significantly higher rates + than non-substance abusing controls, and problem gambling substance + abusers discounted delayed rewards at higher rates than their non-problem + gambling substance abusing counterparts. Discounting rates were not + associated with types or recency of drug use. These results provide + further evidence of more rapid discounting of delayed rewards in + substance abusers, and especially among substance abusers with a + comorbid gambling problem. Rapid discounting of delayed rewards may + be a feature central to impulse control and addictive disorders.}, + Author = {N. M. Petry and T. Casarella}, + Institution = {Department of Psychiatry, University of Connecticut School of Medicine, Farmington 06030-2103, USA.}, + Journal = {Drug Alcohol Depend}, + Keywords = {Adult; Female; Gambling, psychology; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Reward; Substance-Related Disorders, psychology/urine; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {25--32}, + Pii = {S0376-8716(99)00010-1}, + Pmid = {10462089}, + Timestamp = {2009.08.06}, + Title = {Excessive discounting of delayed rewards in substance abusers with gambling problems.}, + Volume = {56}, + Year = {1999}} + +@article{Petry1999a, + Abstract = {This study evaluated delay discounting functions of substance abusing + problem gamblers, substance abusing non-problem gamblers, and non-problem + gambling/non-substance abusing controls. Subjects chose between hypothetical + monetary amounts available after various delays or immediately. In + one condition, a USUS$1000 reward was delayed at intervals ranging + from 6 h to 25 years. At each delay interval, the immediately available + rewards varied from USUS$1 to USUS$999 until choices reflected indifference + between the smaller immediate and larger delayed rewards. In a second + condition, the delayed reward was USUS$100, and immediate rewards + varied from USUS$0.10 to USUS$99.90. In all three groups, hyperbolic + discounting functions provided a good fit of the data, and the smaller + reward was discounted at a higher rate than the larger reward. Substance + abusers discounted delayed rewards at significantly higher rates + than non-substance abusing controls, and problem gambling substance + abusers discounted delayed rewards at higher rates than their non-problem + gambling substance abusing counterparts. Discounting rates were not + associated with types or recency of drug use. These results provide + further evidence of more rapid discounting of delayed rewards in + substance abusers, and especially among substance abusers with a + comorbid gambling problem. Rapid discounting of delayed rewards may + be a feature central to impulse control and addictive disorders.}, + Author = {N. M. Petry and T. Casarella}, + Institution = {Department of Psychiatry, University of Connecticut School of Medicine, Farmington 06030-2103, USA.}, + Journal = {Drug Alcohol Depend}, + Keywords = {Adult; Female; Gambling, psychology; Humans; Impulsive Behavior, psychology; Male; Middle Aged; Reward; Substance-Related Disorders, psychology/urine; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {25--32}, + Pii = {S0376-8716(99)00010-1}, + Pmid = {10462089}, + Timestamp = {2009.08.06}, + Title = {Excessive discounting of delayed rewards in substance abusers with gambling problems.}, + Volume = {56}, + Year = {1999}} + +@article{Pettinati2003, + Author = {Pettinati, H. M. and Monterosso, J. and Lipkin, C. and Volpicelli, J. R.}, + Journal = {Am J Addict}, + Pages = {324--335}, + Title = {{{P}atient attitudes toward treatment predict attendance in clinical pharmacotherapy trials of alcohol and drug treatment}}, + Volume = {12}, + Year = {2003}} + +@article{Pew1969, + Author = {Pew, R. W.}, + Journal = {Acta Psychologica}, + Pages = {16--26}, + Title = {The Speed--Accuracy Operating Characteristic}, + Volume = {30}, + Year = {1969}} + +@article{Peyron1999, + Author = {Peyron, R. and Garc?-Larrea, L. and Gr?oire, M. C. and Costes, N. and Convers, P. and Lavenne, F. and Maugui?e, F. and Michel, D. and Laurent, B.}, + Journal = {Brain}, + Pages = {1765--1780}, + Title = {{{H}aemodynamic brain responses to acute pain in humans: sensory and attentional networks}}, + Volume = {122 ( Pt 9)}, + Year = {1999}} + +@article{Peyron2000, + Author = {Peyron, R. and Laurent, B. and Garc?-Larrea, L.}, + Journal = {Neurophysiol Clin}, + Pages = {263--288}, + Title = {{{F}unctional imaging of brain responses to pain. {A} review and meta-analysis (2000)}}, + Volume = {30}, + Year = {2000}} + +@article{Phelps2004, + Author = {Phelps, E. A. and Delgado, M. R. and Nearing, K. I. and LeDoux, J. E.}, + Journal = {Neuron}, + Month = {Sep}, + Pages = {897--905}, + Title = {{{E}xtinction learning in humans: role of the amygdala and vm{P}{F}{C}}}, + Volume = {43}, + Year = {2004}} + +@article{Philiastides2006, + Author = {Philiastides, M. G. and Ratcliff, R. and Sajda, P.}, + Journal = {Journal of Neuroscience}, + Pages = {8965--8975}, + Title = {Neural Representation of Task Difficulty and Decision--making During Perceptual Categorization: {A} Timing Diagram}, + Volume = {26}, + Year = {2006}} + +@article{Phillips1998, + Author = {Phillips, K. A. and Gunderson, J. G. and Triebwasser, J. and Kimble, C. R. and Faedda, G. and Lyoo, I. K. and Renn, J.}, + Journal = {Am J Psychiatry}, + Month = {Aug}, + Pages = {1044--1048}, + Title = {{{R}eliability and validity of depressive personality disorder}}, + Volume = {155}, + Year = {1998}} + +@article{Phillips2003, + Abstract = {The dopamine-containing projection from the ventral tegmental area + of the midbrain to the nucleus accumbens is critically involved in + mediating the reinforcing properties of cocaine. Although neurons + in this area respond to rewards on a subsecond timescale, neurochemical + studies have only addressed the role of dopamine in drug addiction + by examining changes in the tonic (minute-to-minute) levels of extracellular + dopamine. To investigate the role of phasic (subsecond) dopamine + signalling, we measured dopamine every 100 ms in the nucleus accumbens + using electrochemical technology. Rapid changes in extracellular + dopamine concentration were observed at key aspects of drug-taking + behaviour in rats. Before lever presses for cocaine, there was an + increase in dopamine that coincided with the initiation of drug-seeking + behaviours. Notably, these behaviours could be reproduced by electrically + evoking dopamine release on this timescale. After lever presses, + there were further increases in dopamine concentration at the concurrent + presentation of cocaine-related cues. These cues alone also elicited + similar, rapid dopamine signalling, but only in animals where they + had previously been paired to cocaine delivery. These findings reveal + an unprecedented role for dopamine in the regulation of drug taking + in real time.}, + Author = {Paul E M Phillips and Garret D Stuber and Michael L A V Heien and R. Mark Wightman and Regina M Carelli}, + Doi = {10.1038/nature01476}, + Institution = {Department of Psychology, Neuroscience Center, University of North Carolina, Chapel Hill, North Carolina 27599, USA.}, + Journal = {Nature}, + Keywords = {Animals; Cocaine-Related Disorders, metabolism; Conditioning (Psychology); Cues; Dopamine, metabolism/secretion; Kinetics; Male; Nucleus Accumbens, metabolism; Rats; Rats, Sprague-Dawley; Reinforcement (Psychology); Reward; Signal Transduction; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {6932}, + Owner = {Woo-Young Ahn}, + Pages = {614--618}, + Pii = {nature01476}, + Pmid = {12687000}, + Timestamp = {2009.08.05}, + Title = {Subsecond dopamine release promotes cocaine seeking.}, + Url = {http://dx.doi.org/10.1038/nature01476}, + Volume = {422}, + Year = {2003}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/nature01476}} + +@article{Pierce1973, + Author = {Pierce, D. A.}, + Journal = {The Annals of Statistics}, + Pages = {241--250}, + Title = {On Some Difficulties in a Frequency Theory of Inference}, + Volume = {1}, + Year = {1973}} + +@article{Pike1973, + Author = {Pike, A. R.}, + Journal = {Psychological Review}, + Pages = {53--68}, + Title = {Response Latency Models for Signal Detection}, + Volume = {80}, + Year = {1973}} + +@article{Pike1966, + Author = {Pike, A. R.}, + Journal = {British Journal of Mathematical and Statistical Psychology}, + Pages = {161--182}, + Title = {Stochastic Models of Choice Behaviour: Response Probabilities and Latencies of Finite {M}arkov Chain Systems}, + Volume = {21}, + Year = {1966}} + +@article{pilla1999selective, + Author = {Pilla, M. and Perachon, S. and Sautel, F. and Garrido, F. and Mann, A. and Wermuth, C.G. and Schwartz, J.C. and Everitt, B.J. and Sokoloff, P.}, + Journal = {Nature}, + Number = {6742}, + Pages = {371--375}, + Publisher = {Nature Publishing Group}, + Title = {{Selective inhibition of cocaine-seeking behaviour by a partial dopamine D3 receptor agonist}}, + Volume = {400}, + Year = {1999}} + +@article{Pine2005, + Author = {Pine, D. S. and Mogg, K. and Bradley, B. P. and Montgomery, L. and Monk, C. S. and McClure, E. and Guyer, A. E. and Ernst, M. and Charney, D. S. and Kaufman, J.}, + Journal = {Am J Psychiatry}, + Month = {Feb}, + Pages = {291--296}, + Title = {{{A}ttention bias to threat in maltreated children: implications for vulnerability to stress-related psychopathology}}, + Volume = {162}, + Year = {2005}} + +@article{Pitt2002, + Author = {Pitt, M. A. and Myung, I. J. and Zhang, S.}, + Journal = {Psychological Review}, + Pages = {472--491}, + Title = {Toward a Method of Selecting Among Computational Models of Cognition}, + Volume = {109}, + Year = {2002}} + +@article{Pizzagalli2008, + Author = {Pizzagalli, D. A. and Evins, A. E. and Schetter, E. C. and Frank, M. J. and Pajtas, P. E. and Santesso, D. L. and Culhane, M.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Feb}, + Pages = {221--232}, + Title = {{{S}ingle dose of a dopamine agonist impairs reinforcement learning in humans: behavioral evidence from a laboratory-based measure of reward responsiveness}}, + Volume = {196}, + Year = {2008}} + +@article{Plas2008, + Author = {van der Plas, E. A. and Crone, E. A. and van den Wildenberg, W. P. and Tranel, D. and Bechara, A.}, + Journal = {J Clin Exp Neuropsychol}, + Month = {Nov}, + Pages = {1--14}, + Title = {{{E}xecutive control deficits in substance-dependent individuals: {A} comparison of alcohol, cocaine, and methamphetamine and of men and women}}, + Year = {2008}} + +@article{Plassmann2007, + Author = {Plassmann, H. and O'Doherty, J. and Rangel, A.}, + Journal = {J. Neurosci.}, + Pages = {9984--9988}, + Title = {{{O}rbitofrontal cortex encodes willingness to pay in everyday economic transactions}}, + Volume = {27}, + Year = {2007}} + +@article{Plassmann2008, + Author = {Plassmann, H. and O'Doherty, J. and Shiv, B. and Rangel, A.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Pages = {1050--1054}, + Title = {{{M}arketing actions can modulate neural representations of experienced pleasantness}}, + Volume = {105}, + Year = {2008}} + +@article{Platt2004, + Author = {Platt, M. L.}, + Journal = {Nat. Neurosci.}, + Month = {Apr}, + Pages = {319--320}, + Title = {{{U}npredictable primates and prefrontal cortex}}, + Volume = {7}, + Year = {2004}} + +@article{Platt2003, + Author = {Platt, M. L.}, + Journal = {Neuron}, + Month = {May}, + Pages = {518--519}, + Title = {{{L}earning is bitter and sweet in ventral striatum}}, + Volume = {38}, + Year = {2003}} + +@article{Platt2002, + Author = {Platt, M. L.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Apr}, + Pages = {141--148}, + Title = {{{N}eural correlates of decisions}}, + Volume = {12}, + Year = {2002}} + +@article{Platt2002a, + Author = {Platt, M. L.}, + Journal = {Neuron}, + Month = {Jan}, + Pages = {316--318}, + Title = {{{C}audate clues to rewarding cues}}, + Volume = {33}, + Year = {2002}} + +@article{PLATT1960a, + Author = {PLATT, M. L.}, + Journal = {Br J Clin Pract}, + Month = {Jun}, + Pages = {457--460}, + Title = {{{T}reatment of dyspepsia with a combination of trifluoperazine and isopropamide iodide}}, + Volume = {14}, + Year = {1960}} + +@article{Platt1986, + Author = {Platt, M. L. and Belville, W. D. and Stones, C. and Oberhofer, T. R.}, + Journal = {J. Urol.}, + Month = {Nov}, + Pages = {1044--1046}, + Title = {{{R}apid bacteriuria screening in a urological setting: clinical use}}, + Volume = {136}, + Year = {1986}} + +@article{Platt2000, + Author = {Platt, M. L. and Glimcher, P. W.}, + Journal = {Exp Brain Res}, + Month = {Jun}, + Pages = {279--286}, + Title = {{{S}hort-term changes in movement frequency do not alter the spatial tuning of saccade-related neurons in intraparietal cortex}}, + Volume = {132}, + Year = {2000}} + +@article{Platt1999, + Author = {Platt, M. L. and Glimcher, P. W.}, + Journal = {Nature}, + Pages = {233--238}, + Title = {Neural Correlates of Decision Variables in Parietal Cortex}, + Volume = {400}, + Year = {1999}} + +@article{Platt1999a, + Author = {Platt, M. L. and Glimcher, P. W.}, + Journal = {Nature}, + Month = {Jul}, + Pages = {233--238}, + Title = {{{N}eural correlates of decision variables in parietal cortex}}, + Volume = {400}, + Year = {1999}} + +@article{Platt1998, + Author = {Platt, M. L. and Glimcher, P. W.}, + Journal = {Exp Brain Res}, + Month = {Jul}, + Pages = {65--75}, + Title = {{{R}esponse fields of intraparietal neurons quantified with multiple saccadic targets}}, + Volume = {121}, + Year = {1998}} + +@article{Platt1997, + Author = {Platt, M. L. and Glimcher, P. W.}, + Journal = {J. Neurophysiol.}, + Month = {Sep}, + Pages = {1574--1589}, + Title = {{{R}esponses of intraparietal neurons to saccadic targets and visual distractors}}, + Volume = {78}, + Year = {1997}} + +@article{Platt2008, + Author = {Platt, M. L. and Huettel, S. A.}, + Journal = {Nature Neuroscience}, + Pages = {398--403}, + Title = {{{R}isky business: the neuroeconomics of decision making under uncertainty}}, + Volume = {11}, + Year = {2008}} + +@article{Platt1990, + Author = {Platt, M. L. and Kiesling, V. J. and Vaccaro, J. A.}, + Journal = {J. Urol.}, + Month = {Jul}, + Pages = {127--129}, + Title = {{{E}osinophilic ureteritis associated with eosinophilic cholangitis: a case report}}, + Volume = {144}, + Year = {1990}} + +@article{PLATT1960, + Author = {PLATT, M. L. and ROWELL, S. S. and WOOLAS, K. D.}, + Journal = {Br J Clin Pract}, + Month = {Dec}, + Pages = {991--993}, + Title = {{{C}linical experience with '{S}elvigon', a new antitussive}}, + Volume = {14}, + Year = {1960}} + +@article{Platt2009, + Author = {Platt, M. L. and Spelke, E. S.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Jun}, + Title = {{{W}hat can developmental and comparative cognitive neuroscience tell us about the adult human brain?}}, + Year = {2009}} + +@article{Plawner2002, + Author = {Plawner, L. L. and Delgado, M. R. and Miller, V. S. and Levey, E. B. and Kinsman, S. L. and Barkovich, A. J. and Simon, E. M. and Clegg, N. J. and Sweet, V. T. and Stashinko, E. E. and Hahn, J. S.}, + Journal = {Neurology}, + Month = {Oct}, + Pages = {1058--1066}, + Title = {{{N}euroanatomy of holoprosencephaly as predictor of function: beyond the face predicting the brain}}, + Volume = {59}, + Year = {2002}} + +@article{Ploeger2002, + Author = {Ploeger, A. and van der Maas, H. L. J. and Hartelman, P. A. I.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {26--42}, + Title = {Stochastic Catastrophe Analysis of Switches in the Perception of Apparent Motion}, + Volume = {9}, + Year = {2002}} + +@book{Pocock1983, + Address = {New York}, + Author = {Pocock, S. J.}, + Publisher = {Wiley}, + Title = {Clinical Trials: {A} Practical Approach}, + Year = {1983}} + +@article{Pocock1977, + Author = {Pocock, S. J.}, + Journal = {Biometrika}, + Pages = {191--199}, + Title = {Group Sequential Methods in the Design and Analysis of Clinical Trials}, + Volume = {64}, + Year = {1977}} + +@article{Pollock2009, + Author = {Pollock, J. D. and Koustova, E. and Hoffman, A. and Shurtleff, D. and Volkow, N. D.}, + Journal = {Lancet}, + Month = {Apr}, + Title = {{{T}reatments for nicotine addiction should be a top priority}}, + Year = {2009}} + +@article{Pontieri1990, + Author = {Pontieri, F. E. and Crane, A. M. and Seiden, L. S. and Kleven, M. S. and Porrino, L. J.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {175--182}, + Title = {{{M}etabolic mapping of the effects of intravenous methamphetamine administration in freely moving rats}}, + Volume = {102}, + Year = {1990}} + +@article{Pontieri1995, + Author = {Pontieri, F. E. and Viola, J. J. and Sokoloff, L. and Porrino, L. J.}, + Journal = {Neuroreport}, + Month = {Jun}, + Pages = {1330--1332}, + Title = {{{S}elective metabolic activation by apomorphine in striosomes of denervated striatum in {M}{P}{T}{P}-induced hemiparkinsonian monkeys}}, + Volume = {6}, + Year = {1995}} + +@article{Porcelli2009, + Author = {Porcelli, A. J. and Delgado, M. R.}, + Journal = {Psychol Sci}, + Month = {Mar}, + Pages = {278--283}, + Title = {{{A}cute stress modulates risk taking in financial decision making}}, + Volume = {20}, + Year = {2009}} + +@article{Porrino1993, + Author = {Porrino, L. J.}, + Journal = {Psychopharmacology (Berl.)}, + Pages = {343--351}, + Title = {{{F}unctional consequences of acute cocaine treatment depend on route of administration}}, + Volume = {112}, + Year = {1993}} + +@article{Porrino1992, + Author = {Porrino, L. J.}, + Journal = {NIDA Res. Monogr.}, + Pages = {135--151}, + Title = {{{M}etabolic mapping methods for identifying the neural substrates of the effects of abused substances}}, + Volume = {124}, + Year = {1992}} + +@article{Porrino1987a, + Author = {Porrino, L. J. and Burns, R. S. and Crane, A. M. and Palombo, E. and Kopin, I. J. and Sokoloff, L.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Aug}, + Pages = {5995--5999}, + Title = {{{L}ocal cerebral metabolic effects of {L}-dopa therapy in 1-methyl-4-phenyl-1,2,3,6-tetrahydropyridine-induced parkinsonism in monkeys}}, + Volume = {84}, + Year = {1987}} + +@article{Porrino1987b, + Author = {Porrino, L. J. and Burns, R. S. and Crane, A. M. and Palombo, E. and Kopin, I. J. and Sokoloff, L.}, + Journal = {Life Sci.}, + Month = {Apr}, + Pages = {1657--1664}, + Title = {{{C}hanges in local cerebral glucose utilization associated with {P}arkinson's syndrome induced by 1-methyl-4-phenyl-1,2,3,6-tetrahydropyridine ({M}{P}{T}{P}) in the primate}}, + Volume = {40}, + Year = {1987}} + +@article{Porrino1980, + Author = {Porrino, L. J. and Coons, E. E.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jan}, + Pages = {125--130}, + Title = {{{E}ffects of {G}{A}{B}{A} receptor blockade on stimulation-induced feeding and self-stimulation}}, + Volume = {12}, + Year = {1980}} + +@article{Porrino1983b, + Author = {Porrino, L. J. and Coons, E. E. and MacGregor, B.}, + Journal = {Brain Res.}, + Month = {Oct}, + Pages = {269--282}, + Title = {{{T}wo types of medial hypothalamic inhibition of lateral hypothalamic reward}}, + Volume = {277}, + Year = {1983}} + +@article{Porrino1981, + Author = {Porrino, L. J. and Crane, A. M. and Goldman-Rakic, P. S.}, + Journal = {J. Comp. Neurol.}, + Month = {May}, + Pages = {121--136}, + Title = {{{D}irect and indirect pathways from the amygdala to the frontal lobe in rhesus monkeys}}, + Volume = {198}, + Year = {1981}} + +@article{Porrino2005, + Author = {Porrino, L. J. and Daunais, J. B. and Rogers, G. A. and Hampson, R. E. and Deadwyler, S. A.}, + Journal = {PLoS Biol.}, + Month = {Sep}, + Pages = {e299}, + Title = {{{F}acilitation of task performance and removal of the effects of sleep deprivation by an ampakine ({C}{X}717) in nonhuman primates}}, + Volume = {3}, + Year = {2005}} + +@article{Porrino2004a, + Author = {Porrino, L. J. and Daunais, J. B. and Smith, H. R. and Nader, M. A.}, + Journal = {Neurosci Biobehav Rev}, + Month = {Jan}, + Pages = {813--820}, + Title = {{{T}he expanding effects of cocaine: studies in a nonhuman primate model of cocaine self-administration}}, + Volume = {27}, + Year = {2004}} + +@article{Porrino1995, + Author = {Porrino, L. J. and Davies, H. M. and Childers, S. R.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Feb}, + Pages = {901--910}, + Title = {{{B}ehavioral and local cerebral metabolic effects of the novel tropane analog, 2 beta-propanoyl-3 beta-(4-tolyl)-tropane}}, + Volume = {272}, + Year = {1995}} + +@article{Porrino1988b, + Author = {Porrino, L. J. and Domer, F. R. and Crane, A. M. and Sokoloff, L.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Pages = {109--118}, + Title = {{{S}elective alterations in cerebral metabolism within the mesocorticolimbic dopaminergic system produced by acute cocaine administration in rats}}, + Volume = {1}, + Year = {1988}} + +@article{Porrino1991a, + Author = {Porrino, L. J. and Dworkin, S. I. and Smith, J. E.}, + Journal = {Adv. Exp. Med. Biol.}, + Pages = {339--351}, + Title = {{{B}asal forebrain involvement in self-administration of drugs of abuse}}, + Volume = {295}, + Year = {1991}} + +@article{Porrino1984a, + Author = {Porrino, L. J. and Esposito, R. U. and Seeger, T. F. and Crane, A. M. and Pert, A. and Sokoloff, L.}, + Journal = {Science}, + Month = {Apr}, + Pages = {306--309}, + Title = {{{M}etabolic mapping of the brain during rewarding self-stimulation}}, + Volume = {224}, + Year = {1984}} + +@article{Porrino1982, + Author = {Porrino, L. J. and Goldman-Rakic, P. S.}, + Journal = {J. Comp. Neurol.}, + Month = {Feb}, + Pages = {63--76}, + Title = {{{B}rainstem innervation of prefrontal and anterior cingulate cortex in the rhesus monkey revealed by retrograde transport of {H}{R}{P}}}, + Volume = {205}, + Year = {1982}} + +@article{Porrino1988a, + Author = {Porrino, L. J. and Goodman, N. L. and Sharpe, L. G.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Nov}, + Pages = {623--626}, + Title = {{{I}ntravenous self-administration of the indirect dopaminergic agonist amfonelic acid by rats}}, + Volume = {31}, + Year = {1988}} + +@article{Porrino1990, + Author = {Porrino, L. J. and Huston-Lyons, D. and Bain, G. and Sokoloff, L. and Kornetsky, C.}, + Journal = {Brain Res.}, + Month = {Mar}, + Pages = {1--6}, + Title = {{{T}he distribution of changes in local cerebral energy metabolism associated with brain stimulation reward to the medial forebrain bundle of the rat}}, + Volume = {511}, + Year = {1990}} + +@article{Porrino1988, + Author = {Porrino, L. J. and Kornetsky, C.}, + Journal = {NIDA Res. Monogr.}, + Pages = {92--106}, + Title = {{{T}he effects of cocaine on local cerebral metabolic activity}}, + Volume = {88}, + Year = {1988}} + +@article{Porrino1987, + Author = {Porrino, L. J. and Lucignani, G.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {126--138}, + Title = {{{D}ifferent patterns of local brain energy metabolism associated with high and low doses of methylphenidate. {R}elevance to its action in hyperactive children}}, + Volume = {22}, + Year = {1987}} + +@article{Porrino1984, + Author = {Porrino, L. J. and Lucignani, G. and Dow-Edwards, D. and Sokoloff, L.}, + Journal = {Brain Res.}, + Month = {Jul}, + Pages = {311--320}, + Title = {{{C}orrelation of dose-dependent effects of acute amphetamine administration on behavior and local cerebral metabolism in rats}}, + Volume = {307}, + Year = {1984}} + +@article{Porrino2000, + Author = {Porrino, L. J. and Lyons, D.}, + Journal = {Cereb. Cortex}, + Month = {Mar}, + Pages = {326--333}, + Title = {{{O}rbital and medial prefrontal cortex and psychostimulant abuse: studies in animal models}}, + Volume = {10}, + Year = {2000}} + +@article{Porrino2002, + Author = {Porrino, L. J. and Lyons, D. and Miller, M. D. and Smith, H. R. and Friedman, D. P. and Daunais, J. B. and Nader, M. A.}, + Journal = {J. Neurosci.}, + Month = {Sep}, + Pages = {7687--7694}, + Title = {{{M}etabolic mapping of the effects of cocaine during the initial phases of self-administration in the nonhuman primate}}, + Volume = {22}, + Year = {2002}} + +@article{Porrino2004, + Author = {Porrino, L. J. and Lyons, D. and Smith, H. R. and Daunais, J. B. and Nader, M. A.}, + Journal = {J. Neurosci.}, + Month = {Apr}, + Pages = {3554--3562}, + Title = {{{C}ocaine self-administration produces a progressive involvement of limbic, association, and sensorimotor striatal domains}}, + Volume = {24}, + Year = {2004}} + +@article{Porrino1994a, + Author = {Porrino, L. J. and Migliarese, K. and Davies, H. M. and Saikali, E. and Childers, S. R.}, + Journal = {Life Sci.}, + Pages = {L511--517}, + Title = {{{B}ehavioral effects of the novel tropane analog, 2 beta-propanoyl-3 beta-(4-toluyl)-tropane ({P}{T}{T})}}, + Volume = {54}, + Year = {1994}} + +@article{Porrino1997, + Author = {Porrino, L. J. and Miller, M. and Hedgecock, A. A. and Thornley, C. and Matasi, J. J. and Davies, H. M.}, + Journal = {Synapse}, + Month = {Sep}, + Pages = {26--35}, + Title = {{{L}ocal cerebral metabolic effects of the novel cocaine analog, {W}{F}-31: comparisons to fluoxetine}}, + Volume = {27}, + Year = {1997}} + +@article{Porrino1983a, + Author = {Porrino, L. J. and Rapoport, J. L. and Behar, D. and Ismond, D. R. and Bunney, W. E.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Jun}, + Pages = {688--693}, + Title = {{{A} naturalistic assessment of the motor activity of hyperactive boys. {I}{I}. {S}timulant drug effects}}, + Volume = {40}, + Year = {1983}} + +@article{Porrino1983, + Author = {Porrino, L. J. and Rapoport, J. L. and Behar, D. and Sceery, W. and Ismond, D. R. and Bunney, W. E.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Jun}, + Pages = {681--687}, + Title = {{{A} naturalistic assessment of the motor activity of hyperactive boys. {I}. {C}omparison with normal controls}}, + Volume = {40}, + Year = {1983}} + +@article{Porrino1989, + Author = {Porrino, L. J. and Ritz, M. C. and Goodman, N. L. and Sharpe, L. G. and Kuhar, M. J. and Goldberg, S. R.}, + Journal = {Life Sci.}, + Pages = {1529--1535}, + Title = {{{D}ifferential effects of the pharmacological manipulation of serotonin systems on cocaine and amphetamine self-administration in rats}}, + Volume = {45}, + Year = {1989}} + +@article{Porrino2007, + Author = {Porrino, L. J. and Smith, H. R. and Nader, M. A. and Beveridge, T. J.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Nov}, + Pages = {1593--1600}, + Title = {{{T}he effects of cocaine: a shifting target over the course of addiction}}, + Volume = {31}, + Year = {2007}} + +@article{Porrino1991, + Author = {Porrino, L. J. and Viola, J. J. and Crane, A. M. and Pontieri, F. E.}, + Journal = {Neurosci. Lett.}, + Month = {Jun}, + Pages = {155--159}, + Title = {{{A}lterations in opiate receptor binding in {M}{P}{T}{P}-induced hemiparkinsonian monkeys}}, + Volume = {127}, + Year = {1991}} + +@article{Porrino1998a, + Author = {Porrino, L. J. and Whitlow, C. T. and Samson, H. H.}, + Journal = {Brain Res.}, + Month = {Apr}, + Pages = {18--26}, + Title = {{{E}ffects of the self-administration of ethanol and ethanol/sucrose on rates of local cerebral glucose utilization in rats}}, + Volume = {791}, + Year = {1998}} + +@article{Porrino1994, + Author = {Porrino, L. J. and Williams-Hemby, L. and Davies, H. M.}, + Journal = {NIDA Res. Monogr.}, + Pages = {71--86}, + Title = {{{M}etabolic mapping methods for the identification of the neural substrates of the effects of novel tropane analogs}}, + Volume = {138}, + Year = {1994}} + +@article{Porrino1998, + Author = {Porrino, L. J. and Williams-Hemby, L. and Whitlow, C. and Bowen, C. and Samson, H. H.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Feb}, + Pages = {176--182}, + Title = {{{M}etabolic mapping of the effects of oral alcohol self-administration in rats}}, + Volume = {22}, + Year = {1998}} + +@book{Poston1978, + Address = {New York}, + Author = {Poston, T. and Stewart, I.}, + Publisher = {Dover}, + Title = {Catastrophe Theory and its Applications}, + Year = {1978}} + +@article{Potenza2008, + Author = {Potenza, M. N. and Brodkin, E. S. and Yang, B. Z. and Birnbaum, S. G. and Nestler, E. J. and Gelernter, J.}, + Journal = {Neuropsychopharmacology}, + Month = {Oct}, + Pages = {2735--2746}, + Title = {{{Q}uantitative trait locus analysis identifies rat genomic regions related to amphetamine-induced locomotion and {G}alpha(i3) levels in nucleus accumbens}}, + Volume = {33}, + Year = {2008}} + +@article{Potts2006, + Author = {Potts, G. F. and Martin, L. E. and Burton, P. and Montague, P. R.}, + Journal = {J Cogn Neurosci}, + Month = {Jul}, + Pages = {1112--1119}, + Title = {{{W}hen things are better or worse than expected: the medial frontal cortex and the allocation of processing resources}}, + Volume = {18}, + Year = {2006}} + +@article{Pottschmidt2003, + Author = {Pottschmidt, K. and Wilms, J. and Nowak, M. A. and Pooley, G. G. and Gleissner, T. and Heindl, W. A. and Smith, D. M. and Remillard, R. and Staubert, R.}, + Journal = {Astronomy \& Astrophysics}, + Pages = {1039--1058}, + Title = {Long Term Variability of {C}ygnus {X}--1. {I}. {X}--Ray Spectral--Temporal Correlations in the Hard State}, + Volume = {407}, + Year = {2003}} + +@article{Pouget2003, + Author = {Pouget, A. and Dayan, P. and Zemel, R. S.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {381--410}, + Title = {{{I}nference and computation with population codes}}, + Volume = {26}, + Year = {2003}} + +@book{Pourahmadi2001, + Address = {New York}, + Author = {Pourahmadi, M.}, + Publisher = {Wiley}, + Title = {Foundations of Time Series Analysis and Prediction Theory}, + Year = {2001}} + +@article{Powell2004, + Author = {Powell, S. B. and Geyer, M. A. and Gallagher, D. and Paulus, M. P.}, + Journal = {Behav. Brain Res.}, + Month = {Jul}, + Pages = {341--349}, + Title = {{{T}he balance between approach and avoidance behaviors in a novel object exploration paradigm in mice}}, + Volume = {152}, + Year = {2004}} + +@article{Powell2004a, + Author = {Powell, S. B. and Lehmann-Masten, V. D. and Paulus, M. P. and Gainetdinov, R. R. and Caron, M. G. and Geyer, M. A.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {May}, + Pages = {310--317}, + Title = {{{M}{D}{M}{A} "ecstasy" alters hyperactive and perseverative behaviors in dopamine transporter knockout mice}}, + Volume = {173}, + Year = {2004}} + +@article{Powell2003, + Author = {Powell, S. B. and Paulus, M. P. and Hartman, D. S. and Godel, T. and Geyer, M. A.}, + Journal = {Neuropharmacology}, + Month = {Mar}, + Pages = {473--481}, + Title = {{{R}{O}-10-5824 is a selective dopamine {D}4 receptor agonist that increases novel object exploration in {C}57 mice}}, + Volume = {44}, + Year = {2003}} + +@article{Praag1975b, + Author = {H. M. van Praag}, + Journal = {Ned Tijdschr Geneeskd}, + Keywords = {Acetylcholine, metabolism; Affect, drug effects; Depression, metabolism; Dopamine, metabolism; Drive, drug effects; Humans; Neostigmine, pharmacology; Norepinephrine, metabolism; Physostigmine, pharmacology; Serotonin, metabolism}, + Language = {dut}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {22}, + Owner = {Young}, + Pages = {882--883}, + Pmid = {1143572}, + Timestamp = {2010.05.01}, + Title = {[Acetylcholine and depression The monoamines barrier broken]}, + Volume = {119}, + Year = {1975}} + +@article{Praag1975c, + Author = {H. M. van Praag}, + Journal = {Ned Tijdschr Geneeskd}, + Keywords = {Brain, metabolism; Dopamine, metabolism; Humans; Schizophrenia, metabolism; Sensory Receptor Cells}, + Language = {dut}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {19}, + Owner = {Young}, + Pages = {755--756}, + Pmid = {1143560}, + Timestamp = {2010.05.01}, + Title = {[The cerebral dopamine metabolism in schizophrenia]}, + Volume = {119}, + Year = {1975}} + +@article{Praag1975f, + Author = {H. M. van Praag}, + Journal = {Psychopharmacol Bull}, + Keywords = {Bibliography as Topic; Humans; Netherlands; Psychopharmacology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {2}, + Owner = {Young}, + Pages = {71--73}, + Pmid = {1094486}, + Timestamp = {2010.05.01}, + Title = {A bibliography of papers on psychopharmacology by Dutch authors.}, + Volume = {11}, + Year = {1975}} + +@article{Praag1975g, + Author = {H. M. van Praag}, + Journal = {Compr Psychiatry}, + Keywords = {Amphetamine; Animals; Antidepressive Agents, pharmacology/therapeutic use; Brain Chemistry, drug effects; Catecholamines, metabolism; Depression, drug therapy; Dopamine, metabolism; Humans; Levodopa; Motor Activity, drug effects; Norepinephrine, metabolism; Psychoses, Substance-Induced; Psychotic Disorders, drug therapy/metabolism; Rabbits; Receptors, Drug; Research; Research Design; Tranquilizing Agents, pharmacology/therapeutic use}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {1}, + Owner = {Young}, + Pages = {7--22}, + Pmid = {1089049}, + Timestamp = {2010.05.01}, + Title = {Neuroleptics as a guideline to biological research in psychotic disorders.}, + Volume = {16}, + Year = {1975}} + +@article{Praag1975h, + Abstract = {Two statements are presented here: 1) antidepressants increase the + amount of MA at the central receptors, and 2) antidepressants are + effective in some, but quite ineffective in other patients, even + if they belong to the same diagnostic category. Three questions result + from these statements: 1) Does a central MA deficiency occur in depressive + patients? 2) If so, is this disorder present in only a proportion + of the patients? 3) If so, can this explain the apparent selectivity + of antidepressants in the sense that particularly MA-deficient patients + benefit from this type of therapy? A tentative answer is given to + these three questions. In depressive patients the cerebral MA turnover + can be diminished. These disorders do not occur in all patients but + seem to be confined to certain categories of depression. Diminution + or non-diminution of the central MA turnover is a (not the) factor + which determines whether antidepressant medication will succeed or + fail. The theoretical and practical implications of these answers + are discussed.}, + Author = {H. M. van Praag}, + Journal = {Int J Psychiatry Med}, + Keywords = {Adjustment Disorders, metabolism; Antidepressive Agents, pharmacology/therapeutic use; Brain, metabolism; Depression, cerebrospinal fluid/drug therapy/metabolism; Drug Resistance; Homovanillic Acid, cerebrospinal fluid; Humans; Hydroxyindoleacetic Acid, cerebrospinal fluid; Monoamine Oxidase, deficiency/metabolism; Norepinephrine, metabolism; Probenecid, diagnostic use; Receptors, Drug, drug effects; Serotonin, metabolism}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {1-2}, + Owner = {Young}, + Pages = {99--111}, + Pmid = {773865}, + Timestamp = {2010.05.01}, + Title = {Therapy-resistant depressions: biochemical and pharmacological considerations.}, + Volume = {6}, + Year = {1975}} + +@article{Praag1974, + Author = {H. M. van Praag}, + Journal = {Ned Tijdschr Geneeskd}, + Keywords = {Adrenocorticotropic Hormone, analogs /&/ derivatives; Androgen Antagonists; Antidepressive Agents, pharmacology; Humans; Hypnotics and Sedatives, pharmacology; Psychopharmacology; Substance-Related Disorders; Thyrotropin-Releasing Hormone, pharmacology; Tranquilizing Agents, pharmacology}, + Language = {dut}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {49}, + Owner = {Young}, + Pages = {1869--1877}, + Pmid = {4373667}, + Timestamp = {2010.05.01}, + Title = {[New trends in human psychopharmacology]}, + Volume = {118}, + Year = {1974}} + +@article{Praag1974a, + Author = {H. M. van Praag}, + Doi = {10.1055/s-0028-1094407}, + Journal = {Pharmakopsychiatr Neuropsychopharmakol}, + Keywords = {Antidepressive Agents, metabolism/therapeutic use; Brain, metabolism; Depression, drug therapy/metabolism; Dopamine, metabolism; Drug Resistance; Histocytochemistry; Humans; Monoamine Oxidase, metabolism; Norepinephrine, metabolism; Serotonin, metabolism; Suicide; Tryptophan, metabolism}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {2}, + Owner = {Young}, + Pages = {88--98}, + Pmid = {4438429}, + Timestamp = {2010.05.01}, + Title = {Therapy-resistant depressions. Biochemical and pharmacological considerations. Contributions to biochemistry.}, + Url = {http://dx.doi.org/10.1055/s-0028-1094407}, + Volume = {7}, + Year = {1974}, + Bdsk-Url-1 = {http://dx.doi.org/10.1055/s-0028-1094407}} + +@article{Praag1975e, + Author = {H. M. van Praag and L. C. Dols and T. Schut}, + Journal = {Compr Psychiatry}, + Keywords = {Adolescent; Adult; Aged; Aggression; Anxiety; Basal Ganglia Diseases, chemically induced; Chlorpromazine, adverse effects/therapeutic use; Cognition Disorders, drug therapy; Delusions, drug therapy; Female; Hallucinations, drug therapy; Humans; Indoles, therapeutic use; Male; Middle Aged; Motor Activity, drug effects; Piperazines, adverse effects/therapeutic use; Psychotic Disorders, drug therapy}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {3}, + Owner = {Young}, + Pages = {255--263}, + Pii = {0010-440X(75)90052-8}, + Pmid = {237735}, + Timestamp = {2010.05.01}, + Title = {Biochemical versus psychopathological action profile of neuroleptics: a comparative study of chlorpromazine and oxypertine in acute psychotic disorders.}, + Volume = {16}, + Year = {1975}} + +@article{Praag1975, + Abstract = {There are indications for a functional deficiency of 5-HT and DA in + certain kinds of depression. The question arises if these biochemical + disturbances are primary or secondary, whether they contribute to + the pathogenesis of the depression or whether they result from it. + From research with MA precursors we drew the tentative conclusion + that they are presumabely primary and interrelated with the depression + in a causal and/or predisposing way.}, + Author = {H. M. van Praag and J. Korf}, + Doi = {10.1055/s-0028-1094463}, + Journal = {Pharmakopsychiatr Neuropsychopharmakol}, + Keywords = {5-Hydroxytryptophan, therapeutic use; Clomipramine, therapeutic use; Depression, drug therapy/metabolism; Dopamine, deficiency; Homovanillic Acid, metabolism; Humans; Serotonin, deficiency}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {5}, + Owner = {Young}, + Pages = {322--326}, + Pmid = {1085937}, + Timestamp = {2010.05.01}, + Title = {Central monoamine deficiency in depressions: causative of secondary phenomenon?}, + Url = {http://dx.doi.org/10.1055/s-0028-1094463}, + Volume = {8}, + Year = {1975}, + Bdsk-Url-1 = {http://dx.doi.org/10.1055/s-0028-1094463}} + +@article{Praag1975d, + Abstract = {A strategy is presented for biological psychosis research with neuroleptics + acting as a point of crystallisation like antidepressants do in biological + depression research. The neuroleptics chlorpromazine, haloperidol + and oxypertine were studied, and it was found that they influence + central catecholamine (CA) metabolism in man. An increased central + dopamine (DA) turnover was found to occur in psychotic disorders, + mostly in the form of motor agitation. As the first of a planned + series of studies, chlorpromazine with presumed ability to reduce + both DA-ergic and noradrenaline (NA)-ergic transmission and oxypertine + as a more selective blocker of NA-ergic transmission were selected + for comparison. The overall therapeutic effect of oxypertine was + inferior to that of chlorpromazine, whereas oxypertine proved more + effective in cases where loss of initiative was predominant. On the + other hand, chlorpromazine exerted a more marked influence on extrapyramidal + motor functions than oxypertine. In chronic psychotic disorders with + inertia, oxypertine thus seems to be a neuroleptic which is strong + enough to prevent exacerbation of delusions and hallucinations while + at the same time increasing the level of motivation. These findings + were in accordance with our predictions. The comparative study is + illustrative of the practical significance of the research approach + in this study: The biochemical action profile of a neuroleptic seems + to be a more reliable indicator of its clinical action than does + its chemical structure.}, + Author = {H. M. van Praag and J. Korf}, + Journal = {Acta Psychiatr Scand}, + Keywords = {Brain, metabolism; Catecholamines, metabolism; Chlorpromazine, pharmacology/therapeutic use; Depression, Chemical; Dopamine, metabolism; Drug Evaluation; Haloperidol, pharmacology/therapeutic use; Homovanillic Acid, cerebrospinal fluid; Humans; Indoles, therapeutic use; Methoxyhydroxyphenylglycol, cerebrospinal fluid; Motor Activity, drug effects; Norepinephrine, metabolism; Piperazines, pharmacology/therapeutic use; Psychotic Disorders, drug therapy/metabolism; Research Design; Stimulation, Chemical}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {4}, + Owner = {Young}, + Pages = {268--284}, + Pmid = {238363}, + Timestamp = {2010.05.01}, + Title = {Biochemical research into psychosis.}, + Volume = {51}, + Year = {1975}} + +@article{Praag1975a, + Abstract = {The authors examined central catecholamine metabolism in various symptomatological + psychotic disorders and the relationship between the biochemical + and therapeutic action profiles of neuroleptics. Haloperidol and + (to a lesser entent) chlorpromaziner icrease the dopamine (DA) turnover + in the central nervous system, but the authors influenced; oxypertine + has the reverse effect. The authors question whether disorders of + DA-metabolism underlie or result from disorders of motor activity, + postulating that the hyperdopaminergic activity observable in psychoses + is dependent on motor hyperactivity rather than on "true" or psychotic + symptoms such as delusions and hallucinations.}, + Author = {H. M. Van Praag and J. Korf}, + Journal = {Am J Psychiatry}, + Keywords = {Adolescent; Adult; Brain Chemistry, drug effects; Brain, metabolism; Catecholamines, metabolism; Chlorpromazine, pharmacology; Depression, Chemical; Dopamine, metabolism; Dose-Response Relationship, Drug; Female; Haloperidol, pharmacology; Humans; Male; Methoxyhydroxyphenylglycol, cerebrospinal fluid; Middle Aged; Movement Disorders; Norepinephrine, metabolism; Piperazines, pharmacology; Psychotic Disorders, cerebrospinal fluid/drug therapy/metabolism; Stimulation, Chemical; Tranquilizing Agents, pharmacology/therapeutic use}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {6}, + Owner = {Young}, + Pages = {593--597}, + Pmid = {235848}, + Timestamp = {2010.05.01}, + Title = {Neuroleptics, catecholamines, and psychoses: a study of their interrelations.}, + Volume = {132}, + Year = {1975}} + +@article{Pratt1977, + Author = {Pratt, J. W.}, + Journal = {Synthese}, + Pages = {59--69}, + Title = {Decisions as Statistical Evidence and {B}irnbaum's `confidence concept'}, + Volume = {36}, + Year = {1977}} + +@article{Pratt1965, + Author = {Pratt, J. W.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {169--203}, + Title = {{B}ayesian Interpretation of Standard Inference Statements}, + Volume = {27}, + Year = {1965}} + +@article{Pratt1962, + Author = {Pratt, J. W.}, + Journal = {Journal of the American Statistical Association}, + Pages = {314--315}, + Title = {Discussion of ``On the Foundations of Statistical Inference"}, + Volume = {57}, + Year = {1962}} + +@article{Pratt1961, + Author = {Pratt, J. W.}, + Journal = {Journal of the American Statistical Association}, + Pages = {163--167}, + Title = {Review of {L}ehmann, {E}. {L}., Testing Statistical Hypotheses}, + Volume = {56}, + Year = {1961}} + +@article{Pratte2009, + Abstract = {Subliminal priming is said to occur when a subliminal prime influences + the classification of a subsequent target. Most subliminal-priming + claims are based on separate target- and prime-classification tasks. + Because primes are intended to be subliminal, the prime-classification + task is difficult, and the target-classification task is easy. To + assess whether this task-difficulty difference accounts for previous + claims of subliminal priming, we manipulated the ease of the prime-classification + task by intermixing long-duration (visible) primes with short-duration + (near liminal) ones. In Experiment 1, this strategy of intermixing + long-duration primes raised classification of the short-duration + ones. In Experiments 2 and 3, prime duration was lowered in such + a way that prime classification was at chance in intermixed presentations. + Under these conditions, we failed to observe any priming effects; + hence, previous demonstrations of subliminal priming may simply have + reflected a task-difficulty artifact.}, + Author = {Michael S Pratte and Jeffrey N Rouder}, + Doi = {10.3758/APP.71.6.1276}, + Institution = {Department of Psychological Sciences, 210 McAlester Hall, University of Missouri, Columbia, MO 65211, USA. prattems@gmail.com}, + Journal = {Atten Percept Psychophys}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {1276--1283}, + Pii = {71/6/1276}, + Pmid = {19633343}, + Timestamp = {2009.08.15}, + Title = {A task-difficulty artifact in subliminal priming.}, + Url = {http://dx.doi.org/10.3758/APP.71.6.1276}, + Volume = {71}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.3758/APP.71.6.1276}} + +@article{Pressing1997, + Author = {Pressing, J. and Jolley--Rogers, G.}, + Journal = {Biological Cybernetics}, + Pages = {339--347}, + Title = {Spectral Properties of Human Cognition and Skill}, + Volume = {76}, + Year = {1997}} + +@article{Preuschoff2007, + Author = {Preuschoff, K. and Bossaerts, P.}, + Journal = {Annals of the New York Academy of Sciences}, + Number = {1 Reward and Decision Making in Corticobasal Ganglia Networks}, + Pages = {135--146}, + Title = {{Adding prediction risk to the theory of reward learning}}, + Volume = {1104}, + Year = {2007}} + +@article{Preuschoff2006, + Author = {Preuschoff, K. and Bossaerts, P. and Quartz, S. R.}, + Journal = {Neuron}, + Pages = {381--390}, + Title = {{{N}eural differentiation of expected reward and risk in human subcortical structures}}, + Volume = {51}, + Year = {2006}} + +@article{Preuschoff2008, + Author = {Preuschoff, K. and Quartz, S. R. and Bossaerts, P.}, + Journal = {J. Neurosci.}, + Pages = {2745--2752}, + Title = {{{H}uman insula activation reflects risk prediction errors as well as risk}}, + Volume = {28}, + Year = {2008}} + +@book{Priestley1981, + Address = {London}, + Author = {Priestley, M. B.}, + Publisher = {Academic Press}, + Title = {Spectral Analysis and Time Series}, + Year = {1981}} + +@article{Pritchett1991, + Author = {Pritchett, E. L. and DaTorre, S. D. and Platt, M. L. and McCarville, S. E. and Hougham, A. J.}, + Journal = {J. Am. Coll. Cardiol.}, + Month = {Feb}, + Pages = {297--303}, + Title = {{{F}lecainide acetate treatment of paroxysmal supraventricular tachycardia and paroxysmal atrial fibrillation: dose-response studies. {T}he {F}lecainide {S}upraventricular {T}achycardia {S}tudy {G}roup}}, + Volume = {17}, + Year = {1991}} + +@article{Proctor1986, + Author = {Proctor, R. W.}, + Journal = {Psychological Review}, + Pages = {473--477}, + Title = {Response Bias, Criteria Settings, and the Fast--\it{Same} Phenomenon: {A} Reply to {R}atcliff}, + Volume = {93}, + Year = {1986}} + +@article{Proctor1981, + Author = {Proctor, R. W.}, + Journal = {Psychological Review}, + Pages = {291--326}, + Title = {A Unified Theory for Matching Task Phenomena}, + Volume = {88}, + Year = {1981}} + +@article{Proctor1983, + Author = {Proctor, R. W. and Rao, K. V.}, + Journal = {Perception and Psychophysics}, + Pages = {72--76}, + Title = {Evidence That the \it{Same--Different} Disparity is not Attributable to Response Bias}, + Volume = {34}, + Year = {1983}} + +@article{Qian1996, + Author = {Qian, G. and Gabor, G. and Gupta, R. P.}, + Journal = {Biometrika}, + Pages = {41--54}, + Title = {Generalised Linear Model Selection by the Predictive Least Quasi-Deviance Criterion}, + Volume = {83}, + Year = {1996}} + +@manual{RDevelopmentCoreTeam2004, + Address = {Vienna, Austria}, + Author = {{R Development Core Team}}, + Note = {{ISBN} 3--900051--00--3}, + Organization = {R Foundation for Statistical Computing}, + Title = {R: A language and environment for statistical computing}, + Url = {http://www.R-project.org}, + Year = {2004}, + Bdsk-Url-1 = {http://www.R-project.org}} + +@article{Rabbitt1979, + Author = {Rabbitt, P.}, + Journal = {British Journal of Psychology}, + Pages = {305--311}, + Title = {How Old and Young Subjects Monitor and Control Responses for Accuracy and Speed}, + Volume = {70}, + Year = {1979}} + +@article{Rabbitt1966, + Author = {Rabbitt, P.}, + Journal = {Journal of Experimental Psychology}, + Pages = {264--272}, + Title = {Errors and Error Correction in Choice--Response Tasks}, + Volume = {71}, + Year = {1966}} + +@article{Rabbitt1977, + Author = {Rabbitt, P. and Rodgers, B.}, + Journal = {Quarterly Journal of Experimental Psychology}, + Pages = {727--743}, + Title = {What Does a Man do After he Makes an Error? {A}n Analysis of Response Programming}, + Volume = {29}, + Year = {1977}} + +@article{Rachlin1992, + Abstract = {The fundamental law underlying economic demand and exchange is the + tendency for value of marginal units to diminish with increasing + amounts of a commodity. The present paper demonstrates that this + law follows from three still-more-basic psychological assumptions: + (a) limited consumption rate, (b) delay discounting, and (c) choice + of highest valued alternative. Cases of diminishing marginal value + apparently due to pure intensity of reward may plausibly be attributed + to the above three factors. The further assumption that maximum consumption + rate may vary within and across individuals implies that some substances + may be unusually addictive and that some individual animals may be + unusually susceptible to addiction.}, + Author = {H. Rachlin}, + Doi = {10.1901/jeab.1992.57-407}, + Institution = {Psychology Department, SUNY, Stony Brook 11794-2500.}, + Journal = {J Exp Anal Behav}, + Keywords = {Animals; Appetitive Behavior; Conditioning, Operant; Consummatory Behavior; Models, Statistical; Motivation; Reinforcement Schedule}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {407--415}, + Pmid = {1602271}, + Timestamp = {2009.08.06}, + Title = {Diminishing marginal value as delay discounting.}, + Url = {http://dx.doi.org/10.1901/jeab.1992.57-407}, + Volume = {57}, + Year = {1992}, + Bdsk-Url-1 = {http://dx.doi.org/10.1901/jeab.1992.57-407}} + +@article{Rachlin1992a, + Abstract = {The fundamental law underlying economic demand and exchange is the + tendency for value of marginal units to diminish with increasing + amounts of a commodity. The present paper demonstrates that this + law follows from three still-more-basic psychological assumptions: + (a) limited consumption rate, (b) delay discounting, and (c) choice + of highest valued alternative. Cases of diminishing marginal value + apparently due to pure intensity of reward may plausibly be attributed + to the above three factors. The further assumption that maximum consumption + rate may vary within and across individuals implies that some substances + may be unusually addictive and that some individual animals may be + unusually susceptible to addiction.}, + Author = {H. Rachlin}, + Doi = {10.1901/jeab.1992.57-407}, + Institution = {Psychology Department, SUNY, Stony Brook 11794-2500.}, + Journal = {J Exp Anal Behav}, + Keywords = {Animals; Appetitive Behavior; Conditioning, Operant; Consummatory Behavior; Models, Statistical; Motivation; Reinforcement Schedule}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {407--415}, + Pmid = {1602271}, + Timestamp = {2009.08.06}, + Title = {Diminishing marginal value as delay discounting.}, + Url = {http://dx.doi.org/10.1901/jeab.1992.57-407}, + Volume = {57}, + Year = {1992}, + Bdsk-Url-1 = {http://dx.doi.org/10.1901/jeab.1992.57-407}} + +@article{Rachlin1991, + Author = {Rachlin, H. and Raineri, A. and Cross, D.}, + Journal = {Journal of the Experimental Analysis of Behavior}, + Owner = {Wooyoung Ahn}, + Pages = {233-244}, + Timestamp = {2007.05.01}, + Title = {Subjective probability and delay}, + Volume = {55}, + Year = {1991}} + +@incollection{Raftery1993, + Address = {Newbury Park, CA}, + Author = {Raftery, A. E.}, + Booktitle = {Testing Structural Equation Models}, + Editor = {Bollen, K. A. and Long, J. S.}, + Pages = {163--180}, + Publisher = {Sage Publications}, + Title = {{B}ayesian Model Selection in Structural Equation Models}, + Year = {1993}} + +@incollection{Raftery1996, + Address = {Boca Raton (FL)}, + Author = {Raftery, A. E.}, + Booktitle = {{M}arkov chain {M}onte {C}arlo in Practice}, + Editor = {Gilks, W. R. and Richardson, S. and Spiegelhalter, D. J.}, + Pages = {163--187}, + Publisher = {Chapman \& Hall/CRC}, + Title = {Hypothesis testing and model selection}, + Year = {1996}} + +@incollection{Raftery1995, + Address = {Cambridge}, + Author = {Raftery, A. E.}, + Booktitle = {Sociological Methodology}, + Editor = {Marsden, P. V.}, + Pages = {111--196}, + Publisher = {Blackwells}, + Title = {{B}ayesian model selection in social research}, + Year = {1995}} + +@article{Raftery1999, + Author = {Raftery, A. E.}, + Journal = {Sociological Methods \& Research}, + Pages = {411--427}, + Title = {{B}ayes Factors and {BIC}}, + Volume = {27}, + Year = {1999}} + +@book{Raiffa1961, + Address = {Cambridge (MA)}, + Author = {Raiffa, H. and Schlaifer, R.}, + Publisher = {The {MIT} Press}, + Title = {Applied Statistical Decision Theory}, + Year = {1961}} + +@article{Ralph2001a, + Author = {Ralph, R. J. and Paulus, M. P. and Fumagalli, F. and Caron, M. G. and Geyer, M. A.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {305--313}, + Title = {{{P}repulse inhibition deficits and perseverative motor patterns in dopamine transporter knock-out mice: differential effects of {D}1 and {D}2 receptor antagonists}}, + Volume = {21}, + Year = {2001}} + +@article{Ralph2001, + Author = {Ralph, R. J. and Paulus, M. P. and Geyer, M. A.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Jul}, + Pages = {148--155}, + Title = {{{S}train-specific effects of amphetamine on prepulse inhibition and patterns of locomotor behavior in mice}}, + Volume = {298}, + Year = {2001}} + +@article{Ralph-Williams2003, + Author = {Ralph-Williams, R. J. and Paulus, M. P. and Zhuang, X. and Hen, R. and Geyer, M. A.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {352--359}, + Title = {{{V}alproate attenuates hyperactive and perseverative behaviors in mutant mice with a dysregulated dopamine system}}, + Volume = {53}, + Year = {2003}} + +@article{Ramsey1997, + Author = {Ramsey, S. E. and Finn, P. R.}, + Journal = {J. Stud. Alcohol}, + Month = {Nov}, + Pages = {606--616}, + Title = {{{P}300 from men with a family history of alcoholism under different incentive conditions}}, + Volume = {58}, + Year = {1997}} + +@article{Rangarajan2000, + Author = {Rangarajan, G. and Ding, M.}, + Journal = {Physical Review E}, + Pages = {4991--5001}, + Title = {Integrated Approach to the Assessment of Long Range Correlation in Time Series Data}, + Volume = {61}, + Year = {2000}} + +@article{Rangel2008, + Author = {Rangel, A. and Camerer, C. and Montague, P. R.}, + Journal = {Nat. Rev. Neurosci.}, + Pages = {545--556}, + Title = {{{A} framework for studying the neurobiology of value-based decision making}}, + Volume = {9}, + Year = {2008}} + +@article{Ratcliff2006, + Author = {Ratcliff, R.}, + Journal = {Cognitive Psychology}, + Pages = {195?237}, + Title = {Modeling Response Signal and Response Time Data}, + Volume = {53}, + Year = {2006}} + +@article{Ratcliff2002, + Author = {Ratcliff, R.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {278--291}, + Title = {A Diffusion Model Account of Response Time and Accuracy in a Brightness Discrimination Task: Fitting Real Data and Failing to Fit Fake but Plausible Data}, + Volume = {9}, + Year = {2002}} + +@article{Ratcliff2001, + Author = {Ratcliff, R.}, + Journal = {Nature Neuroscience}, + Pages = {336}, + Title = {Putting Noise into Neurophysiological Models of Simple Decision Making}, + Volume = {4}, + Year = {2001}} + +@article{Ratcliff1985, + Author = {Ratcliff, R.}, + Journal = {Psychological Review}, + Pages = {212--225}, + Title = {Theoretical Interpretations of the Speed and Accuracy of Positive and Negative Responses}, + Volume = {92}, + Year = {1985}} + +@article{Ratcliff1981, + Author = {Ratcliff, R.}, + Journal = {Psychological Review}, + Pages = {552--572}, + Title = {A Theory of Order Relations in Perceptual Matching}, + Volume = {88}, + Year = {1981}} + +@article{Ratcliff1979, + Author = {Ratcliff, R.}, + Journal = {Psychological Bulletin}, + Pages = {446--461}, + Title = {Group Reaction Time Distributions and an Analysis of Distribution Statistics}, + Volume = {86}, + Year = {1979}} + +@article{Ratcliff1978, + Author = {Ratcliff, R.}, + Journal = {Psychological Review}, + Pages = {59--108}, + Title = {A Theory of Memory Retrieval}, + Volume = {85}, + Year = {1978}} + +@article{Ratcliff2004, + Author = {Ratcliff, R. and Gomez, P. and McKoon, G.}, + Journal = {Psychological Review}, + Pages = {159--182}, + Title = {Diffusion Model Account of Lexical Decision}, + Volume = {111}, + Year = {2004}} + +@article{Ratcliff1982, + Author = {Ratcliff, R. and Hacker, M. J.}, + Journal = {Perception and Psychophysics}, + Pages = {603--604}, + Title = {On the Misguided Use of Reaction--Time Differences: {A} Reply to {P}roctor and {R}ao (1982)}, + Volume = {31}, + Year = {1982}} + +@article{Ratcliff1981a, + Author = {Ratcliff, R. and Hacker, M. J.}, + Journal = {Perception and Psychophysics}, + Pages = {303--307}, + Title = {Speed and Accuracy of Same and Different Responses in Perceptual Matching}, + Volume = {30}, + Year = {1981}} + +@article{Ratcliff2007, + Author = {Ratcliff, R. and Hasegawa, Y. T. and Hasegawa, Y. P. and Smith, P. L. and Segraves, M. A.}, + Journal = {Journal of Neurophysiology}, + Pages = {1756--1774}, + Title = {Dual Diffusion Model for Single--cell Recording Data From the Superior Colliculus in a Brightness--discrimination Task}, + Volume = {97}, + Year = {2007}} + +@article{Ratcliff2000, + Author = {Ratcliff, R. and Rouder, J. N.}, + Journal = {Journal of Experimental Psychology: Human Perception and Performance}, + Pages = {127--140}, + Title = {A Diffusion Model Account of Masking in Two--choice Letter Identification}, + Volume = {26}, + Year = {2000}} + +@article{Ratcliff2000a, + Abstract = {The diffusion model developed by R. Ratcliff (1978, 1981, 1985, 1988) + for 2-choice decisions was applied to data from 2 letter identification + experiments. In the experiments, stimulus letters were displayed + and then masked, and the stimulus onset asynchrony between letter + and mask was manipulated to vary accuracy from near chance to near + ceiling. A standard reaction time procedure was used in one experiment + and a deadline procedure in the other. Two hypotheses about the effect + of masking on the information provided to the decision process were + contrasted: (a) The output of perception to the decision process + varies with time, so that the information used by the decision process + rises and falls, reflecting the stimulus onset and mask onset. (b) + The output of perception to the decision is constant over time, reflecting + information integrated over the time between the stimulus and mask + onsets. The data were well fit by the diffusion model only with the + assumption of constant information over time.}, + Author = {R. Ratcliff and J. N. Rouder}, + Institution = {Department of Psychology, Northwestern University, Evanston, Illinois 60208, USA. r-ratcliff@nwu.edu}, + Journal = {J Exp Psychol Hum Percept Perform}, + Keywords = {Adult; Choice Behavior; Female; Humans; Male; Models, Psychological; Pattern Recognition, Visual; Perception; Perceptual Masking; Psychomotor Performance; Reaction Time}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {127--140}, + Pmid = {10696609}, + Timestamp = {2009.08.15}, + Title = {A diffusion model account of masking in two-choice letter identification.}, + Volume = {26}, + Year = {2000}} + +@article{Ratcliff1998, + Author = {Ratcliff, R. and Rouder, J. N.}, + Journal = {Psychological Science}, + Pages = {347--356}, + Title = {Modeling Response Times for Two--choice Decisions}, + Volume = {9}, + Year = {1998}} + +@article{Ratcliffinpress, + Author = {Ratcliff, R. and Schmiedek, F. and McKoon, G.}, + Journal = {Intelligence}, + Pages = {?????}, + Title = {A Diffusion Model Explanation of the Worst Performance Rule for Reaction Time and {IQ}}, + Volume = {??}, + Year = {in press}} + +@article{Ratcliff2003, + Author = {Ratcliff, R. and Segraves, M. and Cherian, A.}, + Journal = {Journal of Neurophysiology}, + Pages = {1392--1407}, + Title = {A Comparison of Macaque Behavior and Superior Colliculus Neuronal Activity to Predictions from Models of Simple Two--choice Decisions}, + Volume = {90}, + Year = {2003}} + +@article{Ratcliff2004a, + Author = {Ratcliff, R. and Smith, P. L.}, + Journal = {Psychological Review}, + Pages = {333--367}, + Title = {A Comparison of Sequential Sampling Models for Two--choice Reaction Time}, + Volume = {111}, + Year = {2004}} + +@article{Ratcliff2004b, + Author = {Ratcliff, R. and Thapar, A. and Gomez, P. and McKoon, G.}, + Journal = {Psychology and Aging}, + Pages = {278--289}, + Title = {A Diffusion Model Analysis of the Effects of Aging in the Lexical--Decision Task}, + Volume = {19}, + Year = {2004}} + +@article{Ratcliffinpressa, + Author = {Ratcliff, R. and Thapar, A. and McKoon, G.}, + Journal = {Psychology and Aging}, + Pages = {?????}, + Title = {Applying the Diffusion Model to Data from 75--85 Year Old Subjects in 5 Experimental Tasks}, + Volume = {??}, + Year = {in press}} + +@article{Ratcliff2006a, + Author = {Ratcliff, R. and Thapar, A. and McKoon, G.}, + Journal = {Psychology and Aging}, + Pages = {353?371}, + Title = {Aging, Practice, and Perceptual Tasks: {A} Diffusion Model Analysis}, + Volume = {21}, + Year = {2006}} + +@article{Ratcliff2006b, + Author = {Ratcliff, R. and Thapar, A. and McKoon, G.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {626?635}, + Title = {Aging and Individual Differences in Rapid Two--choice Decisions}, + Volume = {13}, + Year = {2006}} + +@article{Ratcliff2004c, + Author = {Ratcliff, R. and Thapar, A. and McKoon, G.}, + Journal = {Journal of Memory and Language}, + Pages = {408--424}, + Title = {A Diffusion Model Analysis of the Effects of Aging on Recognition Memory}, + Volume = {50}, + Year = {2004}} + +@article{Ratcliff2003a, + Author = {Ratcliff, R. and Thapar, A. and McKoon, G.}, + Journal = {Perception \& Psychophysics}, + Pages = {523--535}, + Title = {A Diffusion Model Analysis of the Effects of Aging on Brightness Discrimination}, + Volume = {65}, + Year = {2003}} + +@article{Ratcliff2001a, + Author = {Ratcliff, R. and Thapar, A. and McKoon, G.}, + Journal = {Psychology and Aging}, + Pages = {323--341}, + Title = {The Effects of Aging on Reaction Time in a Signal Detection Task}, + Volume = {16}, + Year = {2001}} + +@article{Ratcliff2002a, + Author = {Ratcliff, R. and Tuerlinckx, F.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {438--481}, + Title = {Estimating Parameters of the Diffusion Model: Approaches to Dealing with Contaminant Reaction Times and Parameter Variability}, + Volume = {9}, + Year = {2002}} + +@article{Ratcliff1999, + Author = {Ratcliff, R. and {Van Zandt}, T. and McKoon, G.}, + Journal = {Psychological Review}, + Pages = {261--300}, + Title = {Connectionist and Diffusion Models of Reaction Time}, + Volume = {102}, + Year = {1999}} + +@article{Ravizza2004, + Author = {Ravizza, S. M. and Delgado, M. R. and Chein, J. M. and Becker, J. T. and Fiez, J. A.}, + Journal = {Neuroimage}, + Month = {Jun}, + Pages = {562--573}, + Title = {{{F}unctional dissociations within the inferior parietal cortex in verbal working memory}}, + Volume = {22}, + Year = {2004}} + +@article{Ray2008, + Author = {Ray, D. and King-Casas, B. and Montague, P.R. and Dayan, P.}, + Journal = {In Proc. 22nd Conf. on Neural Information Processing Systems}, + Title = {{Bayesian Model of Behaviour in Economic Games}}, + Year = {2008}} + +@article{Reddi2003, + Author = {Reddi, B. A. J. and Asrress, K. N. and Carpenter, R. H. S.}, + Journal = {Journal of Neurophysiology}, + Pages = {3538--3546}, + Title = {Accuracy, Information, and Response Time in a Saccadic Decision Task}, + Volume = {90}, + Year = {2003}} + +@article{Reddi2000, + Author = {Reddi, B. A. J. and Carpenter, R. H. S.}, + Journal = {Nature Neuroscience}, + Pages = {827--830}, + Title = {The Influence of Urgency on Decision Time}, + Volume = {3}, + Year = {2000}} + +@article{Redish2004, + Author = {Redish, A. D.}, + Journal = {Science}, + Pages = {1944--1947}, + Title = {{{A}ddiction as a computational process gone awry}}, + Volume = {306}, + Year = {2004}} + +@article{Reeves2005, + Author = {Reeves, A. and Santhi, N. and Decaro, S.}, + Journal = {Spatial Vision}, + Pages = {73--83}, + Title = {A Random--ray Model for Speed and Accuracy in Perceptual Experiments}, + Volume = {18}, + Year = {2005}} + +@article{Regier1988, + Abstract = {One-month prevalence results were determined from 18,571 persons interviewed + in the first-wave community samples of all five sites that constituted + the National Institute of Mental Health Epidemiologic Catchment Area + Program. US population estimates, based on combined site data, were + that 15.4\% of the population 18 years of age and over fulfilled + criteria for at least one alcohol, drug abuse, or other mental disorder + during the period one month before interview. Higher prevalence rates + of most mental disorders were found among younger people (less than + age 45 years), with the exception of severe cognitive impairments. + Men had higher rates of substance abuse and antisocial personality, + whereas women had higher rates of affective, anxiety, and somatization + disorders. When restricted to the diagnostic categories covered in + international studies based on the Present State Examination, results + fell within the range reported for European and Australian studies.}, + Author = {D. A. Regier and J. H. Boyd and J. D. Burke and D. S. Rae and J. K. Myers and M. Kramer and L. N. Robins and L. K. George and M. Karno and B. Z. Locke}, + Institution = {Division of Clinical Research, National Institute of Mental Health, Rockville, Md.}, + Journal = {Arch Gen Psychiatry}, + Keywords = {Adolescent; Adult; Age Factors; Aged; Australia; Catchment Area (Health); Cross-Cultural Comparison; Cross-Sectional Studies; Europe; Female; Health Surveys; Humans; Male; Mental Disorders, diagnosis/epidemiology; Middle Aged; Periodicity; Psychiatric Status Rating Scales; Sex Factors; United States}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {11}, + Owner = {Young}, + Pages = {977--986}, + Pmid = {3263101}, + Timestamp = {2010.05.01}, + Title = {One-month prevalence of mental disorders in the United States. Based on five Epidemiologic Catchment Area sites.}, + Volume = {45}, + Year = {1988}} + +@article{Reidy2000, + Author = {Reidy, J. J. and Paulus, M. P. and Gona, S.}, + Journal = {Cornea}, + Month = {Nov}, + Pages = {767--771}, + Title = {{{R}ecurrent erosions of the cornea: epidemiology and treatment}}, + Volume = {19}, + Year = {2000}} + +@article{Reijntjesetal.2007, + Author = {{Reijntjes et al.}, A.}, + Title = {Children's Emotional Activation and Regulation in Response to an In Vivo Peer Evaluation Manipulation: {T}he Role of Depressive Symptoms. {M}anuscript submitted for publication.}, + Year = {2007}} + +@article{Reise2003, + Author = {Reise, S. P. and Henson, J. M.}, + Journal = {Journal of Personality Assessment}, + Owner = {Wooyoung Ahn}, + Pages = {93-103}, + Timestamp = {2007.04.30}, + Title = {A discussion of modern versus traditional psychometrics as applied to personality assessment scales}, + Volume = {81}, + Year = {2003}} + +@article{Reiss2006, + Author = {Reiss, J. P. and Campbell, D. W. and Leslie, W. D. and Paulus, M. P. and Ryner, L. N. and Polimeni, J. O. and Foot, B. J. and Sareen, J.}, + Journal = {Schizophr. Res.}, + Month = {Oct}, + Pages = {127--137}, + Title = {{{D}eficit in schizophrenia to recruit the striatum in implicit learning: a functional magnetic resonance imaging investigation}}, + Volume = {87}, + Year = {2006}} + +@article{Reiss2005, + Author = {Reiss, J. P. and Campbell, D. W. and Leslie, W. D. and Paulus, M. P. and Stroman, P. W. and Polimeni, J. O. and Malcolmson, K. A. and Sareen, J.}, + Journal = {Neuroreport}, + Month = {Aug}, + Pages = {1291--1295}, + Title = {{{T}he role of the striatum in implicit learning: a functional magnetic resonance imaging study}}, + Volume = {16}, + Year = {2005}} + +@article{Renshaw2009, + Author = {Renshaw, P. F. and Parsegian, A. and Yang, C. K. and Novero, A. and Yoon, S. J. and Lyoo, I. K. and Cohen, B. M. and Carlezon, W. A.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Mar}, + Pages = {88--92}, + Title = {{{L}ovastatin potentiates the antidepressant efficacy of fluoxetine in rats}}, + Volume = {92}, + Year = {2009}} + +@article{Renthal2008a, + Author = {Renthal, W. and Carle, T. L. and Maze, I. and Covington, H. E. and Truong, H. T. and Alibhai, I. and Kumar, A. and Montgomery, R. L. and Olson, E. N. and Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Jul}, + Pages = {7344--7349}, + Title = {{{D}elta {F}os{B} mediates epigenetic desensitization of the c-fos gene after chronic amphetamine exposure}}, + Volume = {28}, + Year = {2008}} + +@article{Renthal2009a, + Author = {Renthal, W. and Kumar, A. and Xiao, G. and Wilkinson, M. and Covington, H. E. and Maze, I. and Sikder, D. and Robison, A. J. and LaPlant, Q. and Dietz, D. M. and Russo, S. J. and Vialou, V. and Chakravarty, S. and Kodadek, T. J. and Stack, A. and Kabbaj, M. and Nestler, E. J.}, + Journal = {Neuron}, + Month = {May}, + Pages = {335--348}, + Title = {{{G}enome-wide analysis of chromatin regulation by cocaine reveals a role for sirtuins}}, + Volume = {62}, + Year = {2009}} + +@article{Renthal2007, + Author = {Renthal, W. and Maze, I. and Krishnan, V. and Covington, H. E. and Xiao, G. and Kumar, A. and Russo, S. J. and Graham, A. and Tsankova, N. and Kippin, T. E. and Kerstetter, K. A. and Neve, R. L. and Haggarty, S. J. and McKinsey, T. A. and Bassel-Duby, R. and Olson, E. N. and Nestler, E. J.}, + Journal = {Neuron}, + Month = {Nov}, + Pages = {517--529}, + Title = {{{H}istone deacetylase 5 epigenetically controls behavioral adaptations to chronic emotional stimuli}}, + Volume = {56}, + Year = {2007}} + +@article{Renthal2009, + Author = {Renthal, W. and Nestler, E. J.}, + Journal = {Semin. Cell Dev. Biol.}, + Month = {Jun}, + Pages = {387--394}, + Title = {{{H}istone acetylation in drug addiction}}, + Volume = {20}, + Year = {2009}} + +@article{Renthal2008, + Author = {Renthal, W. and Nestler, E. J.}, + Journal = {Trends Mol Med}, + Month = {Aug}, + Pages = {341--350}, + Title = {{{E}pigenetic mechanisms in drug addiction}}, + Volume = {14}, + Year = {2008}} + +@book{Rescorla1972, + Author = {Rescorla, R. A. and Wagner, A. R.}, + Editor = {Black, A. H. and Prokasy, W. F.}, + Owner = {WooYoung Ahn}, + Publisher = {Appleton-Century-Crofts}, + Timestamp = {2007.09.17}, + Title = {A theory of {P}avlovian conditioning: variations in the effectiveness of reinforcement and nonreinforcement}, + Year = {1972}} + +@article{Reske2008, + Author = {Reske, M. and Paulus, M. P.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Oct}, + Pages = {270--283}, + Title = {{{P}redicting treatment outcome in stimulant dependence}}, + Volume = {1141}, + Year = {2008}} + +@article{Reuter2005, + Author = {Reuter, J. and Raedler, T. and Rose, M. and Hand, I. and Gl{\\"a}scher, J. and B{\\"u}chel, C.}, + Journal = {Nature Neuroscience}, + Number = {2}, + Pages = {147--148}, + Publisher = {Nature Publishing Group}, + Title = {{Pathological gambling is linked to reduced activation of the mesolimbic reward system}}, + Volume = {8}, + Year = {2005}} + +@article{Rice2001, + Author = {Rice, O. V. and Gatley, S. J. and Shen, J. and Huemmer, C. L. and Rogoz, R. and DeJesus, O. T. and Volkow, N. D. and Gifford, A. N.}, + Journal = {Neuropsychopharmacology}, + Month = {Nov}, + Pages = {679--689}, + Title = {{{E}ffects of endogenous neurotransmitters on the in vivo binding of dopamine and 5-{H}{T} radiotracers in mice}}, + Volume = {25}, + Year = {2001}} + +@article{Rickard2004, + Author = {Rickard, T. C.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {65--82}, + Title = {Strategy Execution in Cognitive Skill Learning: {A}n Item--Level Test of Candidate Models}, + Volume = {30}, + Year = {2004}} + +@article{Rickard1999, + Author = {Rickard, T. C.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {532--542}, + Title = {A {CMPL} Alternative Account of Practice Effects in Numerosity Judgment Tasks}, + Volume = {25}, + Year = {1999}} + +@article{Rickard1997, + Author = {Rickard, T. C.}, + Journal = {Journal of Experimental Psychology: General}, + Pages = {288--311}, + Title = {Bending the Power Law: {A} {CMPL} Theory of Strategy Shifts and the Automatization of Cognitive Skills}, + Volume = {126}, + Year = {1997}} + +@article{Riefer2002, + Author = {Riefer, D. M. and Knapp, B. R. and Batchelder, W. H. and Bamber, D. and Manifold, V.}, + Journal = {Psychological Assessment}, + Pages = {184--201}, + Title = {Cognitive Psychometrics: {A}ssessing Storage and Retrieval Deficits in Special Populations with Multinomial Processing Tree Models}, + Volume = {14}, + Year = {2002}} + +@article{Riefer1992, + Abstract = {A series of experiments was conducted to explore the cognitive processes + that mediate the bizarreness effect, that is, the finding that bizarre + or unusual imagery is recalled better than common imagery. In all + experiments, subjects were presented with noun pairs that were embedded + within bizarre or common sentences in a mixed-list design. None of + the experiments produced a bizarreness effect for cued recall; however, + for two of the experiments, the bizarre noun pairs were remembered + significantly better than the common pairs for free recall. To determine + if these differences were due to the storage or retrieval of the + items, a multinomial model for the analysis of imagery mediation + in paired-associate learning was developed and applied to the data + from the experiments. The model revealed that bizarre sentences benefited + the retrieval of the noun pairs but not their storage within memory. + The empirical and modeling results are discussed relative to previous + findings and theories on the bizarreness effect.}, + Author = {D. M. Riefer and J. N. Rouder}, + Institution = {Department of Psychology, California State University, San Bernardino 92407.}, + Journal = {Mem Cognit}, + Keywords = {Adult; Attention; Female; Humans; Imagination; Male; Mental Recall; Models, Statistical; Paired-Associate Learning; Retention (Psychology)}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {601--611}, + Pmid = {1435263}, + Timestamp = {2009.08.15}, + Title = {A multinomial modeling analysis of the mnemonic benefits of bizarre imagery.}, + Volume = {20}, + Year = {1992}} + +@article{Rieskamp2003, + Author = {Rieskamp, J. and Busemeyer, Jerome R. and Laine, Tei}, + Journal = {Journal of Experimental Psychology: Learning, Memory and Cognition}, + Owner = {WooYoung Ahn}, + Pages = {1066-1081}, + Timestamp = {2008.04.08}, + Title = {How do people learn to allocate resources? Comparing Two Learning Theories.}, + Volume = {29}, + Year = {2003}} + +@article{Risbrough2006, + Author = {Risbrough, V. B. and Masten, V. L. and Caldwell, S. and Paulus, M. P. and Low, M. J. and Geyer, M. A.}, + Journal = {Neuropsychopharmacology}, + Month = {Nov}, + Pages = {2349--2358}, + Title = {{{D}ifferential contributions of dopamine {D}1, {D}2, and {D}3 receptors to {M}{D}{M}{A}-induced effects on locomotor behavior patterns in mice}}, + Volume = {31}, + Year = {2006}} + +@article{Risinger2005, + Author = {Risinger, R. C. and Salmeron, B. J. and Ross, T. J. and Amen, S. L. and Sanfilipo, M. and Hoffmann, R. G. and Bloom, A. S. and Garavan, H. and Stein, E. A.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1097--1108}, + Title = {{{N}eural correlates of high and craving during cocaine self-administration using {B}{O}{L}{D} f{M}{R}{I}}}, + Volume = {26}, + Year = {2005}} + +@book{Risken1984, + Address = {Berlin}, + Author = {Risken, H.}, + Publisher = {Springer}, + Title = {The {F}okker--{P}lanck Equation}, + Year = {1984}} + +@incollection{Rissanen1992, + Address = {Oxford}, + Author = {Rissanen, J.}, + Booktitle = {{B}ayesian Statistics 4}, + Editor = {Bernardo, J. M. and Berger, J. O. and Dawid, A. P. and Smith, A. F. M.}, + Pages = {121--122}, + Publisher = {Oxford University Press}, + Title = {Discussion of ``Prequential Analysis, Stochastic Complexity and {B}ayesian Inference" by {A. P. Dawid}}, + Year = {1992}} + +@article{Rissanen2003, + Author = {Rissanen, J.}, + Journal = {{IEEE} Transactions on Information Theory}, + Pages = {476--484}, + Title = {Complexity of Simple Nonlogarithmic Loss Functions}, + Volume = {49}, + Year = {2003}} + +@article{Rissanen2001, + Author = {Rissanen, J.}, + Journal = {{IEEE} Transactions on Information Theory}, + Pages = {1712--1717}, + Title = {Strong Optimality of the Normalized {ML} Models as Universal Codes and Information in Data}, + Volume = {47}, + Year = {2001}} + +@article{Rissanen1999, + Author = {Rissanen, J.}, + Journal = {The Computer Journal}, + Pages = {260--269}, + Title = {Hypothesis Selection and Testing by the {MDL} Principle}, + Volume = {42}, + Year = {1999}} + +@article{Rissanen1999a, + Author = {Rissanen, J.}, + Journal = {The Computer Journal}, + Pages = {327--329}, + Title = {Discussion of Paper `{M}inimum {M}essage {L}ength and {K}olmogorov {C}omplexity' by {C. S. W}allace and {D. L. D}owe}, + Volume = {42}, + Year = {1999}} + +@article{Rissanen1996, + Author = {Rissanen, J.}, + Journal = {{IEEE} Transactions on Information Theory}, + Pages = {40--47}, + Title = {{F}isher Information and Stochastic Complexity}, + Volume = {42}, + Year = {1996}} + +@book{Rissanen1989, + Address = {Teaneck (NJ)}, + Author = {Rissanen, J.}, + Publisher = {World Scientific Publishers}, + Title = {Stochastic Complexity in Statistical Inquiry}, + Year = {1989}} + +@article{Rissanen1987, + Author = {Rissanen, J.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {223--239}, + Title = {Stochastic Complexity}, + Volume = {49}, + Year = {1987}} + +@article{Rissanen1986, + Author = {Rissanen, J.}, + Journal = {The Annals of Statistics}, + Pages = {1080--1100}, + Title = {Stochastic Complexity and Modeling}, + Volume = {14}, + Year = {1986}} + +@article{Rissanen1986a, + Author = {Rissanen, J.}, + Journal = {{IMA} Journal of Mathematical Control and Information}, + Pages = {211--222}, + Title = {A Predictive Least-squares Principle}, + Volume = {3}, + Year = {1986}} + +@article{Rissanen1992a, + Author = {Rissanen, J. and Speed, T. and Yu, B.}, + Journal = {{IEEE} Transactions on Information Theory}, + Pages = {315--323}, + Title = {Density Estimation by Stochastic Complexity}, + Volume = {38}, + Year = {1992}} + +@article{Roach1995, + Author = {Roach, E. S. and Delgado, M. R.}, + Journal = {Dermatol Clin}, + Month = {Jan}, + Pages = {151--161}, + Title = {{{T}uberous sclerosis}}, + Volume = {13}, + Year = {1995}} + +@article{Robbins2000, + Author = {Robbins, S. J. and Ehrman, R. N. and Childress, A. R. and Cornish, J. W. and O'Brien, C. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Apr}, + Pages = {33--42}, + Title = {{{M}ood state and recent cocaine use are not associated with levels of cocaine cue reactivity}}, + Volume = {59}, + Year = {2000}} + +@article{Robbins1999, + Author = {Robbins, S. J. and Ehrman, R. N. and Childress, A. R. and O'Brien, C. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Feb}, + Pages = {223--230}, + Title = {{{C}omparing levels of cocaine cue reactivity in male and female outpatients}}, + Volume = {53}, + Year = {1999}} + +@article{Robbins1997, + Author = {Robbins, S. J. and Ehrman, R. N. and Childress, A. R. and O'Brien, C. P.}, + Journal = {Addict Behav}, + Pages = {157--167}, + Title = {{{R}elationships among physiological and self-report responses produced by cocaine-related cues}}, + Volume = {22}, + Year = {1997}} + +@article{Robbins1992, + Author = {Robbins, S. J. and Ehrman, R. N. and Childress, A. R. and O'Brien, C. P.}, + Journal = {Addict Behav}, + Pages = {491--499}, + Title = {{{U}sing cue reactivity to screen medications for cocaine abuse: a test of amantadine hydrochloride}}, + Volume = {17}, + Year = {1992}} + +@article{Roberson-Nay2006, + Author = {Roberson-Nay, R. and McClure, E. B. and Monk, C. S. and Nelson, E. E. and Guyer, A. E. and Fromm, S. J. and Charney, D. S. and Leibenluft, E. and Blair, J. and Ernst, M. and Pine, D. S.}, + Journal = {Biol. Psychiatry}, + Month = {Nov}, + Pages = {966--973}, + Title = {{{I}ncreased amygdala activity during successful memory encoding in adolescent major depressive disorder: {A}n {F}{M}{R}{I} study}}, + Volume = {60}, + Year = {2006}} + +@book{Robert1999, + Address = {New York}, + Author = {Robert, C. P. and Casella, G.}, + Publisher = {Springer}, + Title = {{M}onte {C}arlo Statistical Methods}, + Year = {1999}} + +@article{Robert1996, + Author = {Robert, C. P. and Hwang, J. T. G.}, + Journal = {Journal of the American Statistical Association}, + Pages = {167?172}, + Title = {Maximum Likelihood Estimation Under Order Restriction by the Prior Feedback Method}, + Volume = {91}, + Year = {1996}} + +@article{Roberts2009, + Author = {Roberts, G. M. and Nestor, L. and Garavan, H.}, + Journal = {Brain Res.}, + Month = {Jul}, + Title = {{{L}earning and memory deficits in ecstasy users and their neural correlates during a face-learning task}}, + Year = {2009}} + +@article{Roberts2008, + Author = {Roberts, G. M. and Newell, F. and Sim?es-Franklin, C. and Garavan, H.}, + Journal = {Brain Res.}, + Month = {Aug}, + Pages = {79--87}, + Title = {{{M}enstrual cycle phase modulates cognitive control over male but not female stimuli}}, + Volume = {1224}, + Year = {2008}} + +@article{Roberts2002, + Author = {Roberts, S. and Pashler, H.}, + Journal = {Psychological Review}, + Pages = {605}, + Title = {Reply to {R}odgers and {R}owe (2002)}, + Volume = {109}, + Year = {2002}} + +@article{Roberts2000, + Author = {Roberts, S. and Pashler, H.}, + Journal = {Psychological Review}, + Pages = {358--367}, + Title = {How Persuasive is a Good Fit? {A} Comment on Theory Testing in Psychology}, + Volume = {107}, + Year = {2000}} + +@article{Roberts2005, + Author = {Roberts, S. J. and Platt, M. L.}, + Journal = {Contemp Top Lab Anim Sci}, + Month = {Sep}, + Pages = {13--18}, + Title = {{{E}ffects of isosexual pair-housing on biomedical implants and study participation in male macaques}}, + Volume = {44}, + Year = {2005}} + +@book{Robertson1988, + Address = {New York}, + Author = {Robertson, T. and Wright, F. T. and Dykstra, R. L.}, + Publisher = {Wiley}, + Title = {Order Restricted Statistical Inference}, + Year = {1988}} + +@article{Robins2000, + Author = {Robins, J. and Wasserman, L.}, + Journal = {Journal of the American Statistical Association}, + Pages = {1340--1346}, + Title = {Conditioning, Likelihood, and Coherence: A Review of Some Foundational Concepts}, + Volume = {95}, + Year = {2000}} + +@article{Robinson1975, + Author = {Robinson, G. K.}, + Journal = {Biometrika}, + Pages = {155--161}, + Title = {Some Counterexamples to the Theory of Confidence Intervals}, + Volume = {62}, + Year = {1975}} + +@article{Robinson2001, + Author = {Robinson, T.E. and Gorny, G. and Mitton, E. and Kolb, B.}, + Journal = {Synapse}, + Number = {3}, + Publisher = {John Wiley \& Sons, Inc. New York}, + Title = {{Cocaine self-administration alters the morphology of dendrites and dendritic spines in the nucleus accumbens and neocortex}}, + Volume = {39}, + Year = {2001}} + +@article{Roche2004, + Author = {Roche, R. A. and Dockree, P. M. and Garavan, H. and Foxe, J. J. and Robertson, I. H. and O'Mara, S. M.}, + Journal = {Neurosci. Lett.}, + Month = {May}, + Pages = {1--5}, + Title = {{{E}{E}{G} alpha power changes reflect response inhibition deficits after traumatic brain injury ({T}{B}{I}) in humans}}, + Volume = {362}, + Year = {2004}} + +@article{Roche2005, + Author = {Roche, R. A. and Garavan, H. and Foxe, J. J. and O'Mara, S. M.}, + Journal = {Exp Brain Res}, + Month = {Jan}, + Pages = {60--70}, + Title = {{{I}ndividual differences discriminate event-related potentials but not performance during response inhibition}}, + Volume = {160}, + Year = {2005}} + +@article{Roe2001, + Author = {Roe, R. M. and Busemeyer, J. R. and Townsend, J. T.}, + Journal = {Psychological Review}, + Pages = {370--392}, + Title = {Multi--alternative Decision Field Theory: A Dynamic Artificial Neural Network Model of Decision--making}, + Volume = {108}, + Year = {2001}} + +@article{Rogers2000, + Author = {Rogers, R. D. and Andrews, T. C. and Grasby, P. M. and Brooks, D. J. and Robbins, T. W.}, + Journal = {J Cogn Neurosci}, + Month = {Jan}, + Pages = {142--162}, + Title = {{{C}ontrasting cortical and subcortical activations produced by attentional-set shifting and reversal learning in humans}}, + Volume = {12}, + Year = {2000}} + +@article{Rogers1999, + Author = {Rogers, R. D. and Blackshaw, A. J. and Middleton, H. C. and Matthews, K. and Hawtin, K. and Crowley, C. and Hopwood, A. and Wallace, C. and Deakin, J. F. and Sahakian, B. J. and Robbins, T. W.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Oct}, + Pages = {482--491}, + Title = {{{T}ryptophan depletion impairs stimulus-reward learning while methylphenidate disrupts attentional control in healthy young adults: implications for the monoaminergic basis of impulsive behaviour}}, + Volume = {146}, + Year = {1999}} + +@article{Rogers1999b, + Abstract = {We used a novel computerized decision-making task to compare the decision-making + behavior of chronic amphetamine abusers, chronic opiate abusers, + and patients with focal lesions of orbital prefrontal cortex (PFC) + or dorsolateral/medial PFC. We also assessed the effects of reducing + central 5-hydroxytryptamine (5-HT) activity using a tryptophan-depleting + amino acid drink in normal volunteers. Chronic amphetamine abusers + showed suboptimal decisions (correlated with years of abuse), and + deliberated for significantly longer before making their choices. + The opiate abusers exhibited only the second of these behavioral + changes. Importantly, both sub-optimal choices and increased deliberation + times were evident in the patients with damage to orbitofrontal PFC + but not other sectors of PFC. Qualitatively, the performance of the + subjects with lowered plasma tryptophan was similar to that associated + with amphetamine abuse, consistent with recent reports of depleted + 5-HT in the orbital regions of PFC of methamphetamine abusers. Overall, + these data suggest that chronic amphetamine abusers show similar + decision-making deficits to those seen after focal damage to orbitofrontal + PFC. These deficits may reflect altered neuromodulation of the orbitofrontal + PFC and interconnected limbic-striatal systems by both the ascending + 5-HT and mesocortical dopamine (DA) projections.}, + Author = {R. D. Rogers and B. J. Everitt and A. Baldacchino and A. J. Blackshaw and R. Swainson and K. Wynne and N. B. Baker and J. Hunter and T. Carthy and E. Booker and M. London and J. F. Deakin and B. J. Sahakian and T. W. Robbins}, + Doi = {10.1016/S0893-133X(98)00091-8}, + Institution = {Department of Experimental Psychology, University of Cambridge, UK.}, + Journal = {Neuropsychopharmacology}, + Keywords = {Adult; Amphetamine-Related Disorders, psychology; Biogenic Monoamines, physiology; Brain Chemistry, physiology; Cognition, drug effects; Decision Making, drug effects; Female; Humans; Male; Middle Aged; Opioid-Related Disorders, psychology; Prefrontal Cortex, physiology; Risk Adjustment; Serotonin, physiology; Time Factors; Tryptophan, deficiency}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {322--339}, + Pii = {S0893133X98000918}, + Pmid = {10088133}, + Timestamp = {2009.08.06}, + Title = {Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms.}, + Url = {http://dx.doi.org/10.1016/S0893-133X(98)00091-8}, + Volume = {20}, + Year = {1999}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/S0893-133X(98)00091-8}} + +@article{Rogers2004, + Author = {Rogers, R. D. and Lancaster, M. and Wakeley, J. and Bhagwagar, Z.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {157--164}, + Title = {{{E}ffects of beta-adrenoceptor blockade on components of human decision-making}}, + Volume = {172}, + Year = {2004}} + +@article{Rogers1999a, + Author = {Rogers, R. D. and Owen, A. M. and Middleton, H. C. and Williams, E. J. and Pickard, J. D. and Sahakian, B. J. and Robbins, T. W.}, + Journal = {J. Neurosci.}, + Month = {Oct}, + Pages = {9029--9038}, + Title = {{{C}hoosing between small, likely rewards and large, unlikely rewards activates inferior and orbital prefrontal cortex}}, + Volume = {19}, + Year = {1999}} + +@article{Rogers2004a, + Author = {Rogers, R. D. and Ramnani, N. and Mackay, C. and Wilson, J. L. and Jezzard, P. and Carter, C. S. and Smith, S. M.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {594--602}, + Title = {{{D}istinct portions of anterior cingulate cortex and medial prefrontal cortex are activated by reward processing in separable phases of decision-making cognition}}, + Volume = {55}, + Year = {2004}} + +@article{Rogers2003, + Author = {Rogers, R. D. and Tunbridge, E. M. and Bhagwagar, Z. and Drevets, W. C. and Sahakian, B. J. and Carter, C. S.}, + Journal = {Neuropsychopharmacology}, + Month = {Jan}, + Pages = {153--162}, + Title = {{{T}ryptophan depletion alters the decision-making of healthy volunteers through altered processing of reward cues}}, + Volume = {28}, + Year = {2003}} + +@article{Rogers2007, + Author = {Rogers, R. D. and Wakeley, J. and Robson, P. J. and Bhagwagar, Z. and Makela, P.}, + Journal = {Neuropsychopharmacology}, + Month = {Feb}, + Pages = {417--428}, + Title = {{{T}he effects of low doses of delta-9 tetrahydrocannabinol on reinforcement processing in the risky decision-making of young healthy adults}}, + Volume = {32}, + Year = {2007}} + +@article{Rohsenow1990, + Author = {Rohsenow, D. J. and Niaura, R. S. and Childress, A. R. and Abrams, D. B. and Monti, P. M.}, + Journal = {Int J Addict}, + Pages = {957--993}, + Title = {{{C}ue reactivity in addictive behaviors: theoretical and treatment implications}}, + Volume = {25}, + Year = {1990}} + +@article{Roiser2009, + Abstract = {OBJECTIVES: Neuropsychological studies in subjects with bipolar disorder + (BD) have reported deficits on a variety of cognitive measures. However, + because the majority of subjects were medicated at the time of testing + in previous studies, it is currently unclear whether the pattern + of deficits reported is related to BD itself or to psychotropic medication. + We addressed this issue by examining cognitive performance in a group + of unmedicated, currently depressed subjects with BD. METHODS: Forty-nine + unmedicated subjects who met DSM-IV criteria for BD, depressed phase, + and 55 control subjects participated in this study. Most patients + were diagnosed with bipolar II disorder. Performance on emotion-dependent, + or 'hot', and emotion-independent, or 'cold', cognitive tasks was + assessed using tests from the Cambridge Neuropsychological Test Automated + Battery. RESULTS: The groups were well matched with respect to general + intelligence and demographic variables. Deficits in the unmedicated + depressed BD group were apparent on tests tapping 'hot' cognitive + processing, for example the Cambridge Gamble task and the Probabilistic + Reversal Learning task. However, other than a deficit on the Spatial + Span test in the depressed BD subjects, the groups performed equivalently + on most measures of 'cold' cognitive processing, for example visual + memory, attention, and working memory. CONCLUSIONS: These data suggest + that deficits on tests involving reward processing, short-term spatial + memory storage, and sensitivity to negative feedback in depressed + BD subjects represent an effect of the illness itself and not mood-stabilizing + medication.}, + Author = {Jonathan P Roiser and Dara M Cannon and Shilpa K Gandhi and Joana Taylor Tavares and Kristine Erickson and Suzanne Wood and Jacqueline M Klaver and Luke Clark and Carlos A Zarate and Barbara J Sahakian and Wayne C Drevets}, + Doi = {10.1111/j.1399-5618.2009.00669.x}, + Institution = {Institute of Cognitive Neuroscience, University College London, London, UK.}, + Journal = {Bipolar Disord}, + Keywords = {Adult; Analysis of Variance; Attention, physiology; Automatic Data Processing; Bipolar Disorder, complications; Case-Control Studies; Choice Behavior, physiology; Cognition Disorders, etiology/psychology; Emotions; Female; Humans; Male; Memory, physiology; Neuropsychological Tests; Problem Solving, physiology; Psychotherapeutic Processes; Reversal Learning, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {2}, + Owner = {Young}, + Pages = {178--189}, + Pii = {BDI669}, + Pmid = {19267700}, + Timestamp = {2010.05.01}, + Title = {Hot and cold cognition in unmedicated depressed subjects with bipolar disorder.}, + Url = {http://dx.doi.org/10.1111/j.1399-5618.2009.00669.x}, + Volume = {11}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1399-5618.2009.00669.x}} + +@article{Roitman2007a, + Author = {Roitman, J. D. and Brannon, E. M. and Andrews, J. R. and Platt, M. L.}, + Journal = {Acta Psychol (Amst)}, + Month = {Mar}, + Pages = {296--318}, + Title = {{{N}onverbal representation of time and number in adults}}, + Volume = {124}, + Year = {2007}} + +@article{Roitman2007, + Author = {Roitman, J. D. and Brannon, E. M. and Platt, M. L.}, + Journal = {PLoS Biol.}, + Month = {Aug}, + Pages = {e208}, + Title = {{{M}onotonic coding of numerosity in macaque lateral intraparietal area}}, + Volume = {5}, + Year = {2007}} + +@article{Roitman2002, + Author = {Roitman, J. D. and Shadlen, M. N.}, + Journal = {Journal of Neuroscience}, + Pages = {9475--9489}, + Title = {Responses of Neurons in the Lateral Interparietal Area During a Combined Visual Discrimination Reaction Time Task}, + Volume = {22}, + Year = {2002}} + +@article{Roitman2002b, + Author = {Roitman, J. D. and Shadlen, M. N.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {9475--9489}, + Title = {{{R}esponse of neurons in the lateral intraparietal area during a combined visual discrimination reaction time task}}, + Volume = {22}, + Year = {2002}} + +@article{Romeas2009, + Abstract = {RATIONALE: Anhedonia, or hyposensitivity to normally pleasurable stimuli, + is a cardinal symptom of depression. As such, reward circuitry may + comprise a substrate with relevance to this symptom of depression. + OBJECTIVES: Our aim was to characterize in the rat changes in the + rewarding properties of a pharmacological and a natural stimulus + following olfactory bulbectomy (OBX), a pre-clinical animal model + of depression. METHODS: We measured amphetamine enhancement of brain + stimulation reward, changes in sucrose intake, as well as striatal + cAMP response element binding protein (CREB) activity, a molecular + index previously associated with depressant-like behavior. Moreover, + since alteration of psychomotor activity is also a common symptom + of depression, and psychostimulant reward and locomotion are thought + to share common neurobiology, we used the same treatment schedule + of amphetamine to probe for changes in locomotion. RESULTS: Our findings + show that OBX produces a behavioral phenotype characterized by both + anhedonia and exaggerated locomotor activation. Thus, we observed + a blunted response to the rewarding properties of amphetamine (1 + mg/kg, 21 days post-lesion), a long-lasting reduction in sucrose + intake and increased striatal CREB activity. In addition, the same + dose of amphetamine, at a coincident time post-lesion, triggered + an exaggerated response to its locomotor-stimulant actions. CONCLUSIONS: + These paradoxical findings are not consistent with the notion that + reward and locomotion are mediated by a common substrate; this dissociation + may be useful in modeling psychiatric disorders such as mixed depressive + states. In addition, our findings suggest that central reward circuitry + may constitute a possible target for rationally designed therapeutics + for depression.}, + Doi = {10.1007/s00213-009-1539-y}, + Institution = {Department of Pharmacology, University of Montreal, Montreal, Quebec, Canada.}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Amphetamine, pharmacology; Analgesics, Non-Narcotic, administration /&/ dosage; Analysis of Variance; Animals; Body Weight, drug effects; CREB-Binding Protein, metabolism; Central Nervous System Stimulants, pharmacology; Conditioning, Operant, drug effects; Depression, drug therapy/physiopathology/psychology; Disease Models, Animal; Food Preferences, drug effects; Gene Expression Regulation, drug effects/physiology; Locomotion, drug effects/physiology; Male; Olfactory Bulb, injuries/physiopathology; Quinine, administration /&/ dosage; Rats; Rats, Sprague-Dawley; Reward; Sucrose, administration /&/ dosage; Sweetening Agents, administration /&/ dosage}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {2}, + Owner = {Young}, + Pages = {293--303}, + Pmid = {19404615}, + Timestamp = {2009.12.10}, + Title = {Simultaneous anhedonia and exaggerated locomotor activation in an animal model of depression.}, + Url = {http://dx.doi.org/10.1007/s00213-009-1539-y}, + Volume = {205}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-009-1539-y}} + +@article{Romo1990, + Author = {Romo, R. and Schultz, W.}, + Journal = {Journal of neurophysiology}, + Number = {3}, + Pages = {592--606}, + Publisher = {Am Physiological Soc}, + Title = {{Dopamine neurons of the monkey midbrain: contingencies of responses to active touch during self-initiated arm movements}}, + Volume = {63}, + Year = {1990}} + +@article{Rooney2000, + Author = {Rooney, W. D. and Lee, J. H. and Li, X. and Wang, G. J. and Franceschi, D. and Springer, C. S. and Volkow, N. D.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jun}, + Pages = {830--836}, + Title = {{4.0 {T} water proton {T}1 relaxation times in normal human brain and during acute ethanol intoxication}}, + Volume = {24}, + Year = {2000}} + +@article{Rorick2004, + Author = {Rorick, L. M. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Integr Physiol Behav Sci}, + Pages = {24--41}, + Title = {{{H}eart rate reactivity in {H}{A}{D} and {L}{A}{D} rats during {P}avlovian fear conditioning}}, + Volume = {39}, + Year = {2004}} + +@article{Rorick2003, + Author = {Rorick, L. M. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Apr}, + Pages = {89--102}, + Title = {{{M}oderate doses of ethanol partially reverse avoidance learning deficits in high-alcohol-drinking rats}}, + Volume = {75}, + Year = {2003}} + +@article{Rorick2003a, + Author = {Rorick, L. M. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Sep}, + Pages = {223--230}, + Title = {{{H}igh-alcohol-drinking rats exhibit persistent freezing responses to discrete cues following {P}avlovian fear conditioning}}, + Volume = {76}, + Year = {2003}} + +@article{Rosenkilde1981, + Author = {Rosenkilde, CE and Bauer, RH and Fuster, JM}, + Journal = {Brain Research}, + Number = {2}, + Pages = {375}, + Title = {{Single cell activity in ventral prefrontal cortex of behaving monkeys.}}, + Volume = {209}, + Year = {1981}} + +@article{Rosenthal1963, + Author = {Rosenthal, R. and Gaito, J.}, + Journal = {The Journal of Psychology}, + Pages = {33-?8}, + Title = {The Interpretation of Levels of Significance by Psychological Researchers}, + Volume = {55}, + Year = {1963}} + +@article{Roskind2007, + Author = {Roskind, C. G. and Ruzal-Shapiro, C. B. and Dowd, E. K. and Dayan, P. S.}, + Journal = {Pediatr Emerg Care}, + Month = {Nov}, + Pages = {785--789}, + Title = {{{T}est characteristics of the 3-view abdominal radiograph series in the diagnosis of intussusception}}, + Volume = {23}, + Year = {2007}} + +@article{Rosnow1989, + Author = {Rosnow, R. L. and Rosenthal, R.}, + Journal = {American Psychologist}, + Pages = {1276--1284}, + Title = {Statistical Procedures and the Justification of Knowledge in Psychological Science}, + Volume = {44}, + Year = {1989}} + +@article{Rotheram-Fuller2004, + Author = {Rotheram-Fuller, E. and Shoptaw, S. and Berman, S. M. and London, E. D.}, + Journal = {Drug Alcohol Depend}, + Pages = {79--86}, + Title = {{{I}mpaired performance in a test of decision-making by opiate-dependent tobacco smokers}}, + Volume = {73}, + Year = {2004}} + +@article{Rouder2004d, + Abstract = {Letters and words are better identified when there are fewer available + choices. How do readers use choice-set restrictions? By analyzing + new experimental data and previously reported data, the author shows + that Bayes theorem-based models overestimate readers' use of choice-set + restrictions. This result is discordant with choice-similarity models + such as R. D. Luce's (1963a) similarity choice model, G. Keren and + S. Baggen's (1981) letter recognition model, and D. W. Massaro and + G. C. Oden's (1979) fuzzy logical model of perception. Other models + posit that choice restrictions affect accuracy only by improving + guessing (e.g., J. L. McClelland & D. E. Rumelhart's, 1981, interactive + activation model). It is shown that these models underestimate readers' + use of choice-set restrictions. Restriction of choice set does improve + perception of letters and words, but not optimally. Decision models + that may be able to explain this phenomenon are discussed.}, + Author = {Jeffrey N Rouder}, + Doi = {10.1037/0033-295X.111.1.80}, + Institution = {Department of Psychological Sciences, University of Missouri-Columbia, Columbia, MO 65211, USA. jeff@banta.psyc.missouri.edu}, + Journal = {Psychol Rev}, + Keywords = {Attention; Choice Behavior; Conditioning (Psychology); Decision Support Techniques; Humans; Models, Psychological; Paired-Associate Learning; Pattern Recognition, Visual; Reading; Semantics; Sensory Thresholds; Set (Psychology)}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {80--93}, + Pii = {2004-10332-005}, + Pmid = {14756587}, + Timestamp = {2009.08.15}, + Title = {Modeling the effects of choice-set size on the processing of letters and words.}, + Url = {http://dx.doi.org/10.1037/0033-295X.111.1.80}, + Volume = {111}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1037/0033-295X.111.1.80}} + +@article{Rouder2001b, + Abstract = {It is well known that people can perfectly identify only a handful + of distinct unidimensional stimuli, such as line lengths, but can + identify thousands of complex stimuli, such as letters and words. + This result is consistent with capacity limits in identifying unidimensional + stimuli but not complex stimuli. The experiments reported here tested + this theoretical dissociation using Luce's (1963) Similarity Choice + Model to measure the psychological distance between stimuli in line-length-identification + and letter-identification tasks. The psychological distance between + line-length stimuli decreased with the number of to-be-identified + stimuli; this result is concordant with capacity limits in unidimensional + absolute identification. Surprisingly, the opposite result held in + letter identification. Psychological distance between letters increased + with an increased number of to-be-identified stimuli. This result + indicates an opposite type of processing deficit: People process + letters more efficiently with more choices.}, + Author = {J. N. Rouder}, + Institution = {University of Missouri, Columbia, 65211, USA. jeff@missouri.edu}, + Journal = {Psychol Sci}, + Keywords = {Cognition, physiology; Humans; Models, Psychological; Visual Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {318--322}, + Pmid = {11476099}, + Timestamp = {2009.08.15}, + Title = {Absolute identification with simple and complex stimuli.}, + Volume = {12}, + Year = {2001}} + +@article{Rouder2001c, + Abstract = {Many models of response time assume that subjects accrue stimulus + "evidence" samples in time (e.g., random walk models, counter models). + In this paper, the concept of one stimulus dominating another is + used to construct a test of the whole class of evidence accrual models. + For an example of dominance, consider stimuli that are presented + either virtually instantaneously (stepped) or in a gradually increasing + manner (ramped). Ramped stimuli are presented such that the ramped + portion precedes the stepped onset of stepped stimuli. In this case + ramped stimuli dominate stepped stimuli. In this paper the class + of evidence accrual models is formalized. It is shown that under + appropriate assumptions evidence accrual models do predict more accurate + responses to dominating stimuli. However, this result does not hold + for response latencies. There are anomalous cases where an evidence + accrual model (the accumulator model of Vickers (1970, Ergonomics + 13, 37-58)) predicts slower mean correct response latencies to dominating + stimuli. It is shown through extensive computer simulation that these + anomalous cases occur only when response criteria are so asymmetric + that there are exceedingly extreme response biases. For experiments + where response biases are not exceedingly extreme, random walk and + accumulator models predict more accurate and quicker correct responses + to dominating stimuli. In sum, manipulating the time course of stimuli + in accordance with the concept of dominance can provide empirical + tests of the class of evidence accrual models. Copyright 2001 Academic + Press. }, + Author = {Rouder, J. N.}, + Doi = {10.1006/jmps.2000.1319}, + Institution = {University of Missouri-Columbia}, + Journal = {J Math Psychol}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {334--354}, + Pii = {jmps.2000.1319}, + Pmid = {11302716}, + Timestamp = {2009.08.15}, + Title = {Testing Evidence Accrual Models by Manipulating Stimulus Onset.}, + Url = {http://dx.doi.org/10.1006/jmps.2000.1319}, + Volume = {45}, + Year = {2001}, + Bdsk-Url-1 = {http://dx.doi.org/10.1006/jmps.2000.1319}} + +@article{Rouder2000a, + Abstract = {Many models of perceptual processing assume that participants integrate + stimulus evidence over time, for example, random walk models. This + class of models is tested in a luminance discrimination paradigm + in which the onsets of the stimuli are either instantaneous (stepped) + or slowly ramped. The ramped portion of ramped stimuli occurs prior + to the stepped stimuli onsets. Consequently, there is more luminance + energy in ramped stimuli. Therefore, if participants integrate luminance + energy, they should perform better to ramped stimuli. This did not + occur in 4 experiments. Participants performed better to stepped + stimuli than ramped stimuli in earlier foreperiods and the reverse + in later foreperiods. A new model is proposed in which participants + monitor both integrated luminance energy and quick temporal changes + in luminance, but they do so in a serial fashion. First, participants + monitor temporal changes in luminance; later, they monitor integrated + luminance energy.}, + Author = {J. N. Rouder}, + Institution = {Department of Psychology, Northwestern University, USA. rouderj@missouri.edu}, + Journal = {J Exp Psychol Hum Percept Perform}, + Keywords = {Adult; Contrast Sensitivity; Discrimination (Psychology); Feedback; Female; Humans; Male; Models, Psychological; Observer Variation; Pattern Recognition, Visual; Psychomotor Performance; Psychophysics; Reaction Time; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {359--378}, + Pmid = {10696623}, + Timestamp = {2009.08.15}, + Title = {Assessing the roles of change discrimination and luminance integration: evidence for a hybrid race model of perceptual decision making in luminance discrimination.}, + Volume = {26}, + Year = {2000}} + +@article{Rouder1996, + Author = {Rouder, J. N.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {287-?96}, + Title = {Premature Sampling in Random Walks}, + Volume = {40}, + Year = {1996}} + +@article{Rouder2001, + Abstract = {We offer a critique of the temporal distinctiveness model of serial + position effects (Nairne, Neath, Serra, & Byun, 1997). The temporal + distinctiveness model combines a precise definition of stimulus distinctiveness + with a memory perturbation process. The critique is empirically motivated-we + show that with a more complete analysis, the temporal distinctiveness + model does not adequately account for Nairne et al.'s experimental + data. To better account for the data, we independently modified two + components of Nairne et al.'s model: the mathematical form of the + definition of temporal distinctiveness and the mathematical form + of the mapping from distinctiveness to free-recall probabilities. + Both of these modifications provided for better fits. Yet both Nairne + et al.'s definition and our modified definition are fairly arbitrary. + We show that a significant challenge to this approach is to find + theoretically motivated constraints of the temporal distinctiveness + model while providing for adequate fits to data.}, + Author = {Rouder, J. N. and Pablo Gomez}, + Doi = {10.1080/09658210042000102}, + Institution = {University of Missouri-Columbia, USA.}, + Journal = {Memory}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {301--311}, + Pmid = {11747584}, + Timestamp = {2009.08.15}, + Title = {Modelling serial position curves with temporal distinctiveness.}, + Url = {http://dx.doi.org/10.1080/09658210042000102}, + Volume = {9}, + Year = {2001}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/09658210042000102}} + +@article{Rouder2001a, + Abstract = {We offer a critique of the temporal distinctiveness model of serial + position effects (Nairne, Neath, Serra, & Byun, 1997). The temporal + distinctiveness model combines a precise definition of stimulus distinctiveness + with a memory perturbation process. The critique is empirically motivated-we + show that with a more complete analysis, the temporal distinctiveness + model does not adequately account for Nairne et al.'s experimental + data. To better account for the data, we independently modified two + components of Nairne et al.'s model: the mathematical form of the + definition of temporal distinctiveness and the mathematical form + of the mapping from distinctiveness to free-recall probabilities. + Both of these modifications provided for better fits. Yet both Nairne + et al.'s definition and our modified definition are fairly arbitrary. + We show that a significant challenge to this approach is to find + theoretically motivated constraints of the temporal distinctiveness + model while providing for adequate fits to data.}, + Author = {J. N. Rouder and P. Gomez}, + Institution = {Department of Psychology, University of Missouri-Columbia, 65211, USA.}, + Journal = {Memory}, + Keywords = {Humans; Memory, Short-Term, physiology; Models, Psychological; Probability; Psychological Tests; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {4-6}, + Owner = {Woo-Young Ahn}, + Pages = {301--311}, + Pmid = {11594353}, + Timestamp = {2009.08.15}, + Title = {Modelling serial position curves with temporal distinctiveness.}, + Volume = {9}, + Year = {2001}} + +@article{Rouder2003, + Abstract = {In a speeded choice reaction time task, responses to centrally presented + letter targets can be altered by the identity of surrounding task-irrelevant + letters (flankers). In the standard flanker effect, flankers associated + with the same response as the target lead to faster and more accurate + responses, whereas flankers associated with a different response + lead to slower and more error-prone responses. B. A. Eriksen and + C. W. Eriksen (1974, pp. 143-149) have argued that these flanker + effects occur through response competition. We present data from + a novel version of the Eriksen task, in which some targets and flankers + consist of letter forms that are morphed versions of target letters. + In this paradigm, flankers induce classic flanker effects on well-formed + targets. But flankers induce an opposite effect, termed a negative + flanker effect on morphed letter targets. For example, targets that + are morphs between the letters "A" and "H" are more likely to be + identified as an "A" when flanked by an "H." The interpretation advanced + here is that there are two distinct kinds of flanker effects contrast + enhancement in perceptual processes and response competition in response + selection processes.}, + Author = {Jeffrey N Rouder and Jonathan W King}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia, Missouri 65211, USA. jeff@banta.psyc.missouri.edu}, + Journal = {Percept Psychophys}, + Keywords = {Affect; Humans; Reaction Time; Recognition (Psychology); Visual Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {287--297}, + Pmid = {12713244}, + Timestamp = {2009.08.15}, + Title = {Flanker and negative flanker effects in letter identification.}, + Volume = {65}, + Year = {2003}} + +@article{Rouder2005, + Author = {Rouder, J. N. and Lu, J.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {573-?04}, + Title = {An Introduction to {B}ayesian Hierarchical Models with an Application in the Theory of Signal Detection}, + Volume = {12}, + Year = {2005}} + +@article{Rouder2005b, + Abstract = {Although many nonlinear models of cognition have been proposed in + the past 50 years, there has been little consideration of corresponding + statistical techniques for their analysis. In analyses with nonlinear + models, unmodeled variability from the selection of items or participants + may lead to asymptotically biased estimation. This asymptotic bias, + in turn, renders inference problematic. We show, for example, that + a signal detection analysis of recognition memory data leads to asymptotic + underestimation of sensitivity. To eliminate asymptotic bias, we + advocate hierarchical models in which participant variability, item + variability, and measurement error are modeled simultaneously. By + accounting for multiple sources of variability, hierarchical models + yield consistent and accurate estimates of participant and item effects + in recognition memory. This article is written in tutorial format; + we provide an introduction to Bayesian statistics, hierarchical modeling, + and Markov chain Monte Carlo computational techniques.}, + Author = {Jeffrey N Rouder and Jun Lu}, + Institution = {Department of Psychological Sciences, 210 McAlester Hall, University of Missouri, Columbia, MO 65211, USA. rouderj@missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Bayes Theorem; Bias (Epidemiology); Humans; Markov Chains; Mathematical Computing; Mental Recall; Models, Statistical; Monte Carlo Method; Nonlinear Dynamics; Reference Values; Signal Detection, Psychological; Software}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {573--604}, + Pmid = {16447374}, + Timestamp = {2009.08.15}, + Title = {An introduction to Bayesian hierarchical models with an application in the theory of signal detection.}, + Volume = {12}, + Year = {2005}} + +@article{Rouder2008a, + Abstract = {In fitting the process-dissociation model (L. L. Jacoby, 1991) to + observed data, researchers aggregate outcomes across participant, + items, or both. T. Curran and D. L. Hintzman (1995) demonstrated + how biases from aggregation may lead to artifactual support for the + model. The authors develop a hierarchical process-dissociation model + that does not require aggregation for analysis. Most importantly, + the Curran and Hintzman critique does not hold for this model. Model + analysis provides for support of process dissociation--selective + influence holds, and there is a dissociation in correlation patterns + among participants and items. Items that are better recollected also + elicit higher automatic activation. There is no correlation, however, + across participants; that is, participants with higher recollection + have no increased tendency toward automatic activation. The critique + of aggregation is not limited to process dissociation. Aggregation + distorts analysis in many nonlinear models, including signal detection, + multinomial processing tree models, and strength models. Hierarchical + modeling serves as a general solution for accurately fitting these + psychological-processing models to data.}, + Author = {Jeffrey N Rouder and Jun Lu and Richard D Morey and Dongchu Sun and Paul L Speckman}, + Doi = {10.1037/0096-3445.137.2.370}, + Institution = {Department of Psychological Sciences, University of Missouri-Columbia, Columbia, MO 65211, USA. rouderj@missouri.edu}, + Journal = {J Exp Psychol Gen}, + Keywords = {Attention; Bayes Theorem; Data Interpretation, Statistical; Humans; Mental Recall; Models, Statistical; Practice (Psychology); Reaction Time; Retention (Psychology); Signal Detection, Psychological; Verbal Learning}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {370--389}, + Pii = {2008-05019-009}, + Pmid = {18473664}, + Timestamp = {2009.08.15}, + Title = {A hierarchical process-dissociation model.}, + Url = {http://dx.doi.org/10.1037/0096-3445.137.2.370}, + Volume = {137}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1037/0096-3445.137.2.370}} + +@article{Rouder2005c, + Abstract = {We present a statistical model for inference with response time (RT) + distributions. The model has the following features. First, it provides + a means of estimating the shape, scale, and location (shift) of RT + distributions. Second, it is hierarchical and models between-subjects + and within-subjects variability simultaneously. Third, inference + with the model is Bayesian and provides a principled and efficient + means of pooling information across disparate data from different + individuals. Because the model efficiently pools information across + individuals, it is particularly well suited for those common cases + in which the researcher collects a limited number of observations + from several participants. Monte Carlo simulations reveal that the + hierarchical Bayesian model provides more accurate estimates than + several popular competitors do. We illustrate the model by providing + an analysis of the symbolic distance effect in which participants + can more quickly ascertain the relationship between nonadjacent digits + than that between adjacent digits.}, + Author = {Jeffrey N Rouder and Jun Lu and Paul Speckman and Dongchu Sun and Yi Jiang}, + Institution = {Department of Psychological Sciences, 210 McAlester Hall, University of Missouri, Columbia, MO 65211, USA. jeff@missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Cognition; Humans; Monte Carlo Method; Reaction Time}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {195--223}, + Pmid = {16082801}, + Timestamp = {2009.08.15}, + Title = {A hierarchical model for estimating response time distributions.}, + Volume = {12}, + Year = {2005}} + +@article{Rouder2005a, + Author = {Rouder, J. N. and Lu, J. and Speckman, P. L. and Sun, D. and Jiang, Y.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {195-?23}, + Title = {A Hierarchical Model for Estimating Response Time Distributions}, + Volume = {12}, + Year = {2005}} + +@article{Rouderinpress, + Author = {Rouder, J. N. and Lu, J. and Sun, D. and Speckman, P. and Morey, R. and Naveh--Benjamin, M.}, + Journal = {Psychometrika}, + Pages = {??-??}, + Title = {Signal Detection Models with Random Participant and Item Effects}, + Volume = {??}, + Year = {in press}} + +@article{Rouder2007a, + Author = {Rouder, J. N. and Lu, J. and Sun, D. and Speckman, P. and Morey, R. and Naveh-Benjamin, M.}, + Journal = {Psychometrika}, + Number = {4}, + Pages = {621--642}, + Publisher = {Springer}, + Title = {{Signal detection models with random participant and item effects}}, + Volume = {72}, + Year = {2007}} + +@article{Rouder2009, + Abstract = {Following G. T. Fechner (1966), thresholds have been conceptualized + as the amount of intensity needed to transition between mental states, + such as between states of unconsciousness and consciousness. With + the advent of the theory of signal detection, however, discrete-state + theory and the corresponding notion of threshold have been discounted. + Consequently, phenomena such as subliminal priming and perception + have a reduced theoretical basis. The authors propose a process-neutral + definition of threshold that allows for graded perception and activation + throughout the system. Thresholds correspond to maximum stimulus + intensities such that the distribution of mental states does not + differ from that when an appropriate baseline stimulus is presented. + In practice, thresholds are maximum intensities such that the probability + distribution on behavioral events does not differ from that from + baseline. These thresholds, which the authors call task thresholds, + may be estimated with modified item response psychometric measurement + models. (PsycINFO Database Record (c) 2009 APA, all rights reserved).}, + Author = {Jeffrey N Rouder and Richard D Morey}, + Doi = {10.1037/a0016413}, + Institution = {Department of Psychological Sciences.}, + Journal = {Psychol Rev}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jul}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {655--660}, + Pii = {2009-10379-010}, + Pmid = {19618991}, + Timestamp = {2009.08.15}, + Title = {The nature of psychological thresholds.}, + Url = {http://dx.doi.org/10.1037/a0016413}, + Volume = {116}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1037/a0016413}} + +@article{Rouder2005d, + Author = {Jeffrey N Rouder and Richard D Morey}, + Doi = {10.1111/j.0956-7976.2005.00783.x}, + Institution = {University of Missouri-Columbia, MO 65211, USA. jeff@banta.psyc.missouri.edu}, + Journal = {Psychol Sci}, + Keywords = {Confidence Intervals; Humans; Research, statistics /&/ numerical data; Social Sciences, statistics /&/ numerical data}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {77--79}, + Pii = {PSCI783}, + Pmid = {15660855}, + Timestamp = {2009.08.15}, + Title = {Relational and arelational confidence intervals: a comment on Fidler, Thomason, Cumming, Finch, and Leeman (2004).}, + Url = {http://dx.doi.org/10.1111/j.0956-7976.2005.00783.x}, + Volume = {16}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.0956-7976.2005.00783.x}} + +@article{Rouder2004a, + Abstract = {We tested whether there is long-term learning in the absolute identification + of line lengths. Line lengths are unidimensional stimuli, and there + is a common belief that learning of these stimuli quickly reaches + a low-level asymptote of about seven items and progresses no more. + We show that this is not the case. Our participants served in a 1.5-h + session each day for over a week. Although they did not achieve perfect + performance, they continued to improve day by day throughout the + week and eventually learned to distinguish between 12 and 20 line + lengths. These results are in contrast to common characterizations + of learning in absolute identification tasks with unidimensional + stimuli. We suggest that this learning reflects improvement in short-term + processing.}, + Author = {Jeffrey N Rouder and Richard D Morey and Nelson Cowan and Monique Pfaltz}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia, MO 65211, USA. jeff@banta.psyc.missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Cognition; Humans; Learning; Memory}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {938--944}, + Pmid = {15732707}, + Timestamp = {2009.08.15}, + Title = {Learning in a unidimensional absolute identification task.}, + Volume = {11}, + Year = {2004}} + +@article{Rouder2008b, + Abstract = {Visual working memory is often modeled as having a fixed number of + slots. We test this model by assessing the receiver operating characteristics + (ROC) of participants in a visual-working-memory change-detection + task. ROC plots yielded straight lines with a slope of 1.0, a tell-tale + characteristic of all-or-none mnemonic representations. Formal model + assessment yielded evidence highly consistent with a discrete fixed-capacity + model of working memory for this task.}, + Author = {Jeffrey N Rouder and Richard D Morey and Nelson Cowan and Christopher E Zwilling and Candice C Morey and Michael S Pratte}, + Doi = {10.1073/pnas.0711295105}, + Institution = {Department of Psychological Sciences, 210 McAlester Hall, University of Missouri, Columbia, MO 65211, USA. rouderj@missouri.edu}, + Journal = {Proc Natl Acad Sci U S A}, + Keywords = {Humans; Memory, Short-Term; Models, Psychological; ROC Curve; Visual Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {16}, + Owner = {Woo-Young Ahn}, + Pages = {5975--5979}, + Pii = {0711295105}, + Pmid = {18420818}, + Timestamp = {2009.08.15}, + Title = {An assessment of fixed-capacity models of visual working memory.}, + Url = {http://dx.doi.org/10.1073/pnas.0711295105}, + Volume = {105}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1073/pnas.0711295105}} + +@article{Rouder2007, + Abstract = {In many paradigms, the persuasiveness of subliminal priming relies + on establishing that stimuli are undetectable. The standard significance + test approach is ill-suited as null results may reflect either truly + undetectable stimuli or a lack of power to resolve weakly detectable + stimuli. We present a novel statistical model as an alternative. + The model provides for estimates of the probability that each individual + is truly at chance. Researchers may select individuals for whom there + are sufficiently high probabilities of true undetectability. The + model is hierarchical, and estimation is done within the Bayesian + framework.}, + Author = {Jeffrey N Rouder and Richard D Morey and Paul L Speckman and Michael S Pratte}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia, Missouri 65211, USA. rouderj@missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Cognition; Humans; Models, Psychological; Signal Detection, Psychological; Unconscious (Psychology)}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {597--605}, + Pmid = {17972720}, + Timestamp = {2009.08.15}, + Title = {Detecting chance: a solution to the null sensitivity problem in subliminal priming.}, + Volume = {14}, + Year = {2007}} + +@article{Rouder2004c, + Abstract = {Four experiments are presented that competitively test rule- and exemplar-based + models of human categorization behavior. Participants classified + stimuli that varied on a unidimensional axis into 2 categories. The + stimuli did not consistently belong to a category; instead, they + were probabilistically assigned. By manipulating these assignment + probabilities, it was possible to produce stimuli for which exemplar- + and rule-based explanations made qualitatively different predictions. + F. G. Ashby and J. T. Townsend's (1986) rule-based general recognition + theory provided a better account of the data than R. M. Nosofsky's + (1986) exemplar-based generalized context model in conditions in + which the to-be-classified stimuli were relatively confusable. However, + generalized context model provided a better account when the stimuli + were relatively few and distinct. These findings are consistent with + multiple process accounts of categorization and demonstrate that + stimulus confusion is a determining factor as 10 which process mediates + categorization.}, + Author = {Jeffrey N Rouder and Roger Ratcliff}, + Doi = {10.1037/0096-3445.133.1.63}, + Institution = {Department of Psychological Sciences, University of Missouri-Columbia, Columbia, MO, US. jeff@banta.psyc.missouri.edu}, + Journal = {J Exp Psychol Gen}, + Keywords = {Decision Making; Decision Support Techniques; Humans; Models, Psychological; Probability; Psychological Theory}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Mar}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {63--82}, + Pii = {2004-10964-005}, + Pmid = {14979752}, + Timestamp = {2009.08.15}, + Title = {Comparing categorization models.}, + Url = {http://dx.doi.org/10.1037/0096-3445.133.1.63}, + Volume = {133}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1037/0096-3445.133.1.63}} + +@article{Rouder2000, + Abstract = {People name well-known objects shown in pictures more quickly if they + have studied them previously. The most common interpretation of this + priming effect is that processing is facilitated by an implicit memory + trace in a perceptual representation system. We show that object + priming can be explained instead as a bias in information processing, + without recourse to an implicit memory system. Assumptions about + psychological decision-making processes and bias were added to a + neural network model for object identification, and the model accounted + for performance both qualitatively and quantitatively in four object + identification experiments.}, + Author = {J. N. Rouder and R. Ratcliff and G. McKoon}, + Institution = {Northwestern University, Evanston, USA.}, + Journal = {Psychol Sci}, + Keywords = {Adult; Decision Making; Female; Humans; Male; Memory; Mental Processes; Nerve Net, physiology; Recognition (Psychology), physiology; Visual Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jan}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {13--19}, + Pmid = {11228837}, + Timestamp = {2009.08.15}, + Title = {A neural network model of implicit memory for object recognition.}, + Volume = {11}, + Year = {2000}} + +@article{Rouder2004, + Author = {Rouder, J. N. and Speckman, P. L.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {419-?27}, + Title = {An Evaluation of the {V}incentizing Method of Forming Group--Level Response Time Distributions}, + Volume = {11}, + Year = {2004}} + +@article{Rouder2004b, + Abstract = {Vincentizing (quantile averaging) is a popular means of pooling response + time distributions across individuals to produce a group average. + The benefit of Vincentizing is that the resulting histogram "looks + like" an average of the individuals. In this article, we competitively + test Vincentizing against the more mundane approach of averaging + parameter estimates from fits to individuals. We simulate data from + three distributions: the ex-Gaussian, the Weibull, and the shifted-Wald. + For the ex-Gaussian and the shifted-Wald, parameter averaging outperforms + Vincentizing. There is only an advantage of Vincentizing for the + Weibull and only when there are few observations per participant. + Overall, we recommend that researchers use Vincentizing only in select + circumstances and with the knowledge that Vincentized estimates are + often inconsistent estimators of averaged parameters.}, + Author = {Jeffrey N Rouder and Paul L Speckman}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia, MO 65211, USA. jeff@banta.psyc.missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Cognition; Humans; Normal Distribution; Reaction Time; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {419--427}, + Pmid = {15376789}, + Timestamp = {2009.08.15}, + Title = {An evaluation of the Vincentizing method of forming group-level response time distributions.}, + Volume = {11}, + Year = {2004}} + +@article{Rouder2009a, + Abstract = {Progress in science often comes from discovering invariances in relationships + among variables; these invariances often correspond to null hypotheses. + As is commonly known, it is not possible to state evidence for the + null hypothesis in conventional significance testing. Here we highlight + a Bayes factor alternative to the conventional t test that will allow + researchers to express preference for either the null hypothesis + or the alternative. The Bayes factor has a natural and straightforward + interpretation, is based on reasonable assumptions, and has better + properties than other methods of inference that have been advocated + in the psychological literature. To facilitate use of the Bayes factor, + we provide an easy-to-use, Web-based program that performs the necessary + calculations.}, + Author = {Jeffrey N Rouder and Paul L Speckman and Dongchu Sun and Richard D Morey and Geoffrey Iverson}, + Doi = {10.3758/PBR.16.2.225}, + Institution = {University of Missouri, Columbia, MO 65211, USA. rouderj@missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Analysis of Variance; Bayes Theorem; Data Interpretation, Statistical; Humans; Likelihood Functions; Mathematical Computing; Probability; Psychology, Experimental, statistics /&/ numerical data; Software}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {225--237}, + Pii = {16/2/225}, + Pmid = {19293088}, + Timestamp = {2009.08.15}, + Title = {Bayesian t tests for accepting and rejecting the null hypothesis.}, + Url = {http://dx.doi.org/10.3758/PBR.16.2.225}, + Volume = {16}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.3758/PBR.16.2.225}} + +@article{Rouder2003a, + Author = {Rouder, J. N. and Sun, D. and Speckman, P.L. and Lu, J. and Zhou, D.}, + Journal = {Psychometrika}, + Number = {4}, + Pages = {589--606}, + Publisher = {Springer}, + Title = {{A hierarchical Bayesian statistical framework for response time distributions}}, + Volume = {68}, + Year = {2003}} + +@article{Rouder2008, + Abstract = {Understanding how response time (RT) changes with manipulations has + been critical in distinguishing among theories in cognition. It is + well known that aggregating data distorts functional relationships + (e.g., Estes, 1956). Less well appreciated is a second pitfall: Minimizing + squared errors (i.e., OLS regression) also distorts estimated functional + forms with RT data. We discuss three properties of RT that should + be modeled for accurate analysis and, on the basis of these three + properties, provide a hierarchical Weibull regression model for regressing + RT onto covariates. Hierarchical regression model analysis of lexical + decision task data reveals that RT decreases as a power function + of word frequency with the scale of RT decreasing 11\% for every + doubling of word frequency. A detailed discussion of the model and + analysis techniques are presented as archived materials and may be + downloaded from www.psychonomic.org/archive.}, + Author = {Jeffrey N Rouder and Francis Tuerlinckx and Paul Speckman and Jun Lu and Pablo Gomez}, + Doi = {10.3758/PBR.15.6.1201}, + Institution = {Department of Psychological Sciences, University of Missouri, Columbia, MO 65211, USA. rouderj@missouri.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Decision Making; Humans; Psychometrics, statistics /&/ numerical data; Reaction Time; Recognition (Psychology); Regression Analysis; Semantics; Verbal Learning}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {1201--1208}, + Pii = {15/6/1201}, + Pmid = {19001591}, + Timestamp = {2009.08.15}, + Title = {A hierarchical approach for fitting curves to response time measurements.}, + Url = {http://dx.doi.org/10.3758/PBR.15.6.1201}, + Volume = {15}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.3758/PBR.15.6.1201}} + +@article{Rougier2005, + Author = {Rougier, N. P. and Noelle, D. C. and Braver, T. S. and Cohen, J. D. and O'Reilly, R. C.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {May}, + Pages = {7338--7343}, + Title = {{{P}refrontal cortex and flexible cognitive control: rules without symbols}}, + Volume = {102}, + Year = {2005}} + +@article{Royall2000, + Author = {Royall, R.}, + Journal = {Journal of the American Statistical Association}, + Pages = {760-?80}, + Title = {On the Probability of Observing Misleading Statistical Evidence (with Discussion)}, + Volume = {95}, + Year = {2000}} + +@book{Royall1997, + Address = {London}, + Author = {Royall, R. M.}, + Publisher = {{C}hapman \& {H}all}, + Title = {Statistical Evidence: A Likelihood Paradigm}, + Year = {1997}} + +@book{Rumelhart1987, + Author = {Rumelhart, D. E. and McClelland, J. L. and {THE PDP Research Group}T}, + Owner = {Wooyoung Ahn}, + Publisher = {Cambridge, MA: MIT Press}, + Timestamp = {2007.05.04}, + Title = {Parallel distributed processing: Explorations in the micro structure of cognition (Vols 1 \& 2)}, + Year = {1987}} + +@article{Rushworth2004, + __Markedentry = {[Woo-Young Ahn]}, + Abstract = {Activations in human dorsomedial frontal and cingulate cortices are + often present in neuroimaging studies of decision making and action + selection. Interpretations have emphasized executive control, movement + sequencing, error detection and conflict monitoring. Recently, however, + experimental approaches, using lesions, inactivation, and cell recording, + have suggested that these are just components of the areas' functions. + Here we review these results and integrate them with those from neuroimaging. + A medial superior frontal gyrus (SFG) region centred on the pre-supplementary + motor area (pre-SMA) is involved in the selection of action sets + whereas the anterior cingulate cortex (ACC) has a fundamental role + in relating actions to their consequences, both positive reinforcement + outcomes and errors, and in guiding decisions about which actions + are worth making.}, + Author = {M. F S Rushworth and M. E. Walton and S. W. Kennerley and D. M. Bannerman}, + Doi = {10.1016/j.tics.2004.07.009}, + Institution = {Department of Experimental Psychology, University of Oxford, Oxford OX1 3UD, UK. matthew.rushworth@psy.ox.ac.uk}, + Journal = {Trends Cogn Sci}, + Keywords = {Animals; Conditioning (Psychology); Cues; Decision Making, physiology; Evoked Potentials, physiology; Frontal Lobe, physiology; Gyrus Cinguli, physiology; Haplorhini; Magnetic Resonance Imaging; Memory, physiology; Psychomotor Performance, physiology; Reinforcement (Psychology); Reward; Visual Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {9}, + Owner = {Woo-Young Ahn}, + Pages = {410--417}, + Pii = {S1364-6613(04)00191-3}, + Pmid = {15350242}, + Timestamp = {2009.08.19}, + Title = {Action sets and decisions in the medial frontal cortex.}, + Url = {http://dx.doi.org/10.1016/j.tics.2004.07.009}, + Volume = {8}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.tics.2004.07.009}} + +@article{Russo2009, + Author = {Russo, S. J. and Mazei-Robison, M. S. and Ables, J. L. and Nestler, E. J.}, + Journal = {Neuropharmacology}, + Pages = {73--82}, + Title = {{{N}eurotrophic factors and structural plasticity in addiction}}, + Volume = {56 Suppl 1}, + Year = {2009}} + +@article{Ruzmetov2005, + Abstract = {Anomalous coronary artery (ACA) anatomy occurs in 2-9\% of patients + with tetralogy of Fallot (TOF), in which the left anterior descending + coronary artery (LAD) originates from the right coronary artery (RCA) + crossing the right ventricular outflow tract. The purpose of this + study was to review our results of repair for TOF with ACA. Between + 1978 and 2001, 43 ACA patients (mean age, 4.8 years; range, 5 months-41 + years) underwent repair for TC. The ACA anatomy was classified as + the single LAD from the RCA (n = 20), a significant conal branch + (dual LAD) from the RCA (n = 13), paired anterior descending arteries + originating from the left and right coronary arteries (n = 7), and + single RCA from the LAD (n = 3). In cases in which the anomalous + LAD crossed the obstructed infundibulum, thinning or coring of the + endocardium was done. Patch infundibuloplasty was performed in 39 + patients, with 10 needing separate RV and pulmonary artery patches, + and the pulmonary valve was preserved. Nine patients had the addition + of a monocusp to a transannular incision. Two patients had a main + pulmonary arterioplasty alone. There was one early (2.3\%) and no + late deaths. Mean early and late postoperative gradients were 21.5 + +/- 10.5 mmHg (4 patients had > or =30 mmHg) and 27.1 +/- 13.7 mmHg + (5 patients had >30 mmHg; p = 0.12), respectively. There were four + reoperations during a mean follow-up of 4.8 years (range, 6 months + to 18 years). Actuarial freedom from reoperation was 90\% at 5, 10, + and 15 years. At the latest follow-up, all patients were in NYHA + functional class I. TOF repair for an ACA can be performed without + disturbing the native coronary anatomy and without the use of conduits + in most cases. Outcomes are similar to those of other patients with + TOF. The presence of ACA does not impose increased risk after this + surgical strategy.}, + Author = {M. Ruzmetov and M. A. Jimenez and A. Pruitt and M. W. Turrentine and J. W. Brown}, + Doi = {10.1007/s00246-004-0640-6}, + Institution = {Section of Cardiothoracic Surgery, James W. Riley Hospital for Children and Indiana University Medical Center, Indianapolis, IN 46202, USA.}, + Journal = {Pediatr Cardiol}, + Keywords = {Adolescent; Adult; Cardiac Surgical Procedures; Child; Child, Preschool; Coronary Vessel Anomalies, surgery; Female; Follow-Up Studies; Humans; Indiana; Infant; Male; Postoperative Complications, etiology/mortality/surgery; Reoperation; Survival Analysis; Tetralogy of Fallot, surgery; Time; Treatment Outcome; Ventricular Outflow Obstruction, surgery}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {537--542}, + Pmid = {16378207}, + Timestamp = {2009.08.04}, + Title = {Repair of tetralogy of Fallot with anomalous coronary arteries coursing across the obstructed right ventricular outflow tract.}, + Url = {http://dx.doi.org/10.1007/s00246-004-0640-6}, + Volume = {26}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00246-004-0640-6}} + +@article{Sabini2005, + Author = {Sabini, J. and Monterosso, J.}, + Journal = {Ethics Behav}, + Pages = {81--94}, + Title = {{{J}udgments of the fairness of using performance enhancing drugs}}, + Volume = {15}, + Year = {2005}} + +@article{Saeed2008, + Author = {Saeed, A. and O'Doherty, M. and O'Doherty, J. and O'Keefe, M.}, + Journal = {J Cataract Refract Surg}, + Month = {Oct}, + Pages = {1736--1741}, + Title = {{{L}aser-assisted subepithelial keratectomy retreatment after laser in situ keratomileusis}}, + Volume = {34}, + Year = {2008}} + +@article{Saeed2007, + Author = {Saeed, A. and O'Doherty, M. and O'Doherty, J. and O'Keefe, M.}, + Journal = {Int Ophthalmol}, + Month = {Feb}, + Pages = {23--29}, + Title = {{{A}nalysis of the visual and refractive outcome following laser in situ keratomileusis ({L}{A}{S}{I}{K}) retreatment over a four-year follow-up period}}, + Volume = {27}, + Year = {2007}} + +@article{Sahani2003, + Author = {Sahani, M. and Dayan, P.}, + Journal = {Neural Comput}, + Month = {Oct}, + Pages = {2255--2279}, + Title = {{{D}oubly distributional population codes: simultaneous representation of uncertainty and multiplicity}}, + Volume = {15}, + Year = {2003}} + +@article{Salthouse1996, + Author = {Salthouse, T. A.}, + Journal = {Psychological Review}, + Pages = {403-428}, + Title = {The Processing--speed Theory of Adult Age Differences in Cognition}, + Volume = {103}, + Year = {1996}} + +@article{Salzman1974, + Author = {Salzman, C. and Kochansky, G. E. and Shader, R. I. and Porrino, L. J. and Harmatz, J. S. and Swett, C. P.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Sep}, + Pages = {401--405}, + Title = {{{C}hlordiazepoxide-induced hostility in a small group setting}}, + Volume = {31}, + Year = {1974}} + +@article{Salzman2007, + Author = {Salzman, C. D. and Paton, J. J. and Belova, M. A. and Morrison, S. E.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Dec}, + Pages = {336--354}, + Title = {{{F}lexible neural representations of value in the primate brain}}, + Volume = {1121}, + Year = {2007}} + +@article{Samanez-Larkin2007, + Author = {Samanez-Larkin, G. R. and Gibbs, S. E. and Khanna, K. and Nielsen, L. and Carstensen, L. L. and Knutson, B.}, + Journal = {Nat. Neurosci.}, + Month = {Jun}, + Pages = {787--791}, + Title = {{{A}nticipation of monetary gain but not loss in healthy older adults}}, + Volume = {10}, + Year = {2007}} + +@article{Samanez-Larkin2008, + Author = {Samanez-Larkin, G. R. and Hollon, N. G. and Carstensen, L. L. and Knutson, B.}, + Journal = {Psychol Sci}, + Month = {Apr}, + Pages = {320--323}, + Title = {{{I}ndividual differences in insular sensitivity during loss anticipation predict avoidance learning}}, + Volume = {19}, + Year = {2008}} + +@article{Sammi1999, + Author = {Sammi, M. K. and Felder, C. A. and Fowler, J. S. and Lee, J. H. and Levy, A. V. and Li, X. and Logan, J. and P?lyka, I. and Rooney, W. D. and Volkow, N. D. and Wang, G. J. and Springer, C. S.}, + Journal = {Magn Reson Med}, + Month = {Aug}, + Pages = {345--360}, + Title = {{{I}ntimate combination of low- and high-resolution image data: {I}. {R}eal-space {P}{E}{T} and (1){H}(2){O} {M}{R}{I}, {P}{E}{T}{A}{M}{R}{I}}}, + Volume = {42}, + Year = {1999}} + +@article{Sammi2000, + Author = {Sammi, M. K. and Pan, J. W. and Telang, F. W. and Schuhlein, D. and Molina, P. E. and Volkow, N. D. and Springer, C. S. and Hetherington, H. P.}, + Journal = {Magn Reson Med}, + Month = {Jul}, + Pages = {35--40}, + Title = {{{M}easurements of human brain ethanol {T}(2) by spectroscopic imaging at 4 {T}}}, + Volume = {44}, + Year = {2000}} + +@article{Sanger2006, + Author = {Sanger, T. D. and Chen, D. and Delgado, M. R. and Gaebler-Spira, D. and Hallett, M. and Mink, J. W.}, + Journal = {Pediatrics}, + Month = {Nov}, + Pages = {2159--2167}, + Title = {{{D}efinition and classification of negative motor signs in childhood}}, + Volume = {118}, + Year = {2006}} + +@article{Sanger2003, + Author = {Sanger, T. D. and Delgado, M. R. and Gaebler-Spira, D. and Hallett, M. and Mink, J. W.}, + Journal = {Pediatrics}, + Month = {Jan}, + Pages = {89--97}, + Title = {{{C}lassification and definition of disorders causing hypertonia in childhood}}, + Volume = {111}, + Year = {2003}} + +@article{Santesso2009, + Author = {Santesso, D. L. and Evins, A. E. and Frank, M. J. and Schetter, E. C. and Bogdan, R. and Pizzagalli, D. A.}, + Journal = {Hum Brain Mapp}, + Month = {Jul}, + Pages = {1963--1976}, + Title = {{{S}ingle dose of a dopamine agonist impairs reinforcement learning in humans: evidence from event-related potentials and computational modeling of striatal-cortical function}}, + Volume = {30}, + Year = {2009}} + +@article{Sappey-Marinier1992, + Author = {Sappey-Marinier, D. and Calabrese, G. and Hetherington, H. P. and Fisher, S. N. and Deicken, R. and Van Dyke, C. and Fein, G. and Weiner, M. W.}, + Journal = {Magn Reson Med}, + Month = {Aug}, + Pages = {313--327}, + Title = {{{P}roton magnetic resonance spectroscopy of human brain: applications to normal white matter, chronic infarction, and {M}{R}{I} white matter signal hyperintensities}}, + Volume = {26}, + Year = {1992}} + +@article{Sareen2007, + Author = {Sareen, J. and Campbell, D. W. and Leslie, W. D. and Malisza, K. L. and Stein, M. B. and Paulus, M. P. and Kravetsky, L. B. and Kjernisted, K. D. and Walker, J. R. and Reiss, J. P.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {396--404}, + Title = {{{S}triatal function in generalized social phobia: a functional magnetic resonance imaging study}}, + Volume = {61}, + Year = {2007}} + +@article{Sareen2006, + Author = {Sareen, J. and Chartier, M. and Paulus, M. P. and Stein, M. B.}, + Journal = {Psychiatry Res}, + Month = {May}, + Pages = {11--17}, + Title = {{{I}llicit drug use and anxiety disorders: findings from two community surveys}}, + Volume = {142}, + Year = {2006}} + +@article{Sarin1999, + Author = {Sarin, R. and Vahid, F.}, + Journal = {Games \& Economic Behavior}, + Owner = {Wooyoung Ahn}, + Pages = {294-309}, + Timestamp = {2007.05.03}, + Title = {Payoff assessments without probabilities: A simple dynamic model of choice}, + Volume = {28}, + Year = {1999}} + +@book{Savage1954, + Address = {New York}, + Author = {Savage, L. J.}, + Publisher = {{J}ohn {W}iley \& {S}ons}, + Title = {The Foundations of Statistics}, + Year = {1954}} + +@article{Schafer2005, + Author = {Schafer, J. H. and Glass, T. A. and Bolla, K. I. and Mintz, M. and Jedlicka, A. E. and Schwartz, B. S.}, + Journal = {J Am Geriatr Soc}, + Month = {Mar}, + Pages = {381--388}, + Title = {{{H}omocysteine and cognitive function in a population-based study of older adults}}, + Volume = {53}, + Year = {2005}} + +@article{Schall2001, + Author = {Schall, J. D.}, + Journal = {Nature Reviews Neuroscience}, + Pages = {33--42}, + Title = {Neural Basis of Deciding, Choosing, and Acting}, + Volume = {2}, + Year = {2001}} + +@article{Schall2002, + Abstract = {Success requires deciding among alternatives, controlling the initiation + of movements, and judging the consequences of actions. When alternatives + are difficult to distinguish, habitual responses must be overcome, + or consequences are uncertain, deliberation is necessary and a supervisory + system exerts control over the processes that produce sensory-guided + movements. We have investigated these processes by recording neural + activity in the frontal lobe of macaque monkeys performing a countermanding + task. Distinct neurons in the frontal eye field respond to visual + stimuli or control the production of the movements. In the supplementary + eye field and anterior cingulate cortex, neurons appear not to control + directly movement initiation but instead signal the production of + errors, the anticipation and delivery of reinforcement, and the presence + of processing conflict. These signals form the core of current models + of supervisory control of sensorimotor processes.}, + Author = {Jeffrey D Schall and Veit Stuphorn and Joshua W Brown}, + Institution = {Center for Integrative and Cognitive Neuroscience, Department of Psychology, Vanderbilt University, Nashville, TN 37203, USA. jeffrey.d.schall@vanderbilt.edu}, + Journal = {Neuron}, + Keywords = {Animals; Frontal Lobe, physiology; Humans; Psychomotor Performance, physiology; Reinforcement (Psychology); Visual Fields, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {309--322}, + Pii = {S0896627302009649}, + Pmid = {12383784}, + Timestamp = {2009.08.04}, + Title = {Monitoring and control of action by the frontal lobes.}, + Volume = {36}, + Year = {2002}} + +@article{Scheres2007, + Author = {Scheres, A. and Milham, M. P. and Knutson, B. and Castellanos, F. X.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {720--724}, + Title = {{{V}entral striatal hyporesponsiveness during reward anticipation in attention-deficit/hyperactivity disorder}}, + Volume = {61}, + Year = {2007}} + +@article{Schervish1996, + Author = {Schervish, M. J.}, + Journal = {The American Statistician}, + Pages = {203--206}, + Title = {P Values: What They are and What They are not}, + Volume = {50}, + Year = {1996}} + +@article{Schiffer2003, + Author = {Schiffer, W. K. and Azmoodeh, M. and Gerasimov, M. and Volkow, N. D. and Fowler, J. S. and Dewey, S. L.}, + Journal = {Synapse}, + Month = {Apr}, + Pages = {35--38}, + Title = {{{S}elegiline potentiates cocaine-induced increases in rodent nucleus accumbens dopamine}}, + Volume = {48}, + Year = {2003}} + +@article{Schiffer2006, + Author = {Schiffer, W. K. and Volkow, N. D. and Fowler, J. S. and Alexoff, D. L. and Logan, J. and Dewey, S. L.}, + Journal = {Synapse}, + Month = {Mar}, + Pages = {243--251}, + Title = {{{T}herapeutic doses of amphetamine or methylphenidate differentially increase synaptic and extracellular dopamine}}, + Volume = {59}, + Year = {2006}} + +@article{Schlagenhauf2008, + Author = {Schlagenhauf, F. and Juckel, G. and Koslowski, M. and Kahnt, T. and Knutson, B. and Dembler, T. and Kienast, T. and Gallinat, J. and Wrase, J. and Heinz, A.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {673--684}, + Title = {{{R}eward system activation in schizophrenic patients switched from typical neuroleptics to olanzapine}}, + Volume = {196}, + Year = {2008}} + +@article{Schmidt1996, + Author = {Schmidt, F. L.}, + Journal = {Psychological Methods}, + Pages = {115--129}, + Title = {Statistical Significance Testing and Cumulative Knowledge in Psychology: {I}mplications for Training of Researchers}, + Volume = {1}, + Year = {1996}} + +@article{Schmiedekinpress, + Author = {Schmiedek, F. and Oberauer, K. and Wilhelm, O. and S\"{u}{\ss}, H.--M. and Wittmann, W. W.}, + Journal = {Journal of Experimental Psychology: General}, + Pages = {??--??}, + Title = {Individual Differences in Components of Reaction Time Distributions and Their Relations to Working Memory and Intelligence}, + Volume = {??}, + Year = {in press}} + +@article{Schmitz2001, + Author = {Schmitz, Y. and Lee, C. J. and Schmauss, C. and Gonon, F. and Sulzer, D.}, + Journal = {J. Neurosci.}, + Pages = {5916--5924}, + Title = {{{A}mphetamine distorts stimulation-dependent dopamine overflow: effects on {D}2 autoreceptors, transporters, and synaptic vesicle stores}}, + Volume = {21}, + Year = {2001}} + +@article{Schnadower2007, + Author = {Schnadower, D. and Lin, S. and Perera, P. and Smerling, A. and Dayan, P.}, + Journal = {Acad Emerg Med}, + Month = {May}, + Pages = {483--485}, + Title = {{{A} pilot study of ultrasound analysis before pediatric peripheral vein cannulation attempt}}, + Volume = {14}, + Year = {2007}} + +@article{Schnadower2007a, + Author = {Schnadower, D. and Vazquez, H. and Lee, J. and Dayan, P. and Roskind, C. G.}, + Journal = {Curr. Opin. Pediatr.}, + Month = {Jun}, + Pages = {258--264}, + Title = {{{C}ontroversies in the evaluation and management of minor blunt head trauma in children}}, + Volume = {19}, + Year = {2007}} + +@article{Schneider2006, + Author = {Schneider, E. and Bolo, N. R. and Frederick, B. and Wilkinson, S. and Hirashima, F. and Nassar, L. and Lyoo, I. K. and Koch, P. and Jones, S. and Hwang, J. and Sung, Y. and Villafuerte, R. A. and Maier, G. and Hsu, R. and Hashoian, R. and Renshaw, P. F.}, + Journal = {J Clin Pharm Ther}, + Month = {Jun}, + Pages = {261--273}, + Title = {{{M}agnetic resonance spectroscopy for measuring the biodistribution and in situ in vivo pharmacokinetics of fluorinated compounds: validation using an investigation of liver and heart disposition of tecastemizole}}, + Volume = {31}, + Year = {2006}} + +@article{Schoenbaum2008a, + Abstract = {Studies using brain imaging methods have shown that neuronal activity + in the orbitofrontal cortex, a brain area thought to promote the + ability to control behavior according to likely outcomes or consequences, + is altered in drug addicts. These human imaging findings have led + to the hypothesis that core features of addiction like compulsive + drug use and drug relapse are mediated in part by drug-induced changes + in orbitofrontal function. Here, we discuss results from laboratory + studies using rats and monkeys on the effect of drug exposure on + orbitofrontal-mediated learning tasks and on neuronal structure and + activity in orbitofrontal cortex. We also discuss results from studies + on the role of the orbitofrontal cortex in drug self-administration + and relapse. Our main conclusion is that although there is clear + evidence that drug exposure impairs orbitofrontal-dependent learning + tasks and alters neuronal activity in orbitofrontal cortex, the precise + role these changes play in compulsive drug use and relapse has not + yet been established.}, + Author = {Geoffrey Schoenbaum and Yavin Shaham}, + Doi = {10.1016/j.biopsych.2007.06.003}, + Institution = {Department of Anatomy and Neurobiology, University of Maryland, School of Medicine, Baltimore, Maryland 21201, USA. schoenbg@schoenbaumlab.org}, + Journal = {Biol Psychiatry}, + Keywords = {Animals; Frontal Lobe, drug effects/pathology/physiopathology; Humans; Substance-Related Disorders, drug therapy/pathology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {256--262}, + Pii = {S0006-3223(07)00534-3}, + Pmid = {17719014}, + Timestamp = {2009.08.04}, + Title = {The role of orbitofrontal cortex in drug addiction: a review of preclinical studies.}, + Url = {http://dx.doi.org/10.1016/j.biopsych.2007.06.003}, + Volume = {63}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.biopsych.2007.06.003}} + +@article{Schonberg2007, + Author = {Schonberg, Tom and Daw, Nathaniel D. and Joel, Daphna and O'Doherty, John P.}, + Journal = {Journal of Neuroscience}, + Owner = {WooYoung Ahn}, + Pages = {12860-12867}, + Timestamp = {2007.12.12}, + Title = {Reinforcement learning signals in the human striatum distinguish learners from nonlearners during reward-based decision making}, + Volume = {27 (47)}, + Year = {2007}} + +@article{Schouten1967, + Author = {Schouten, J. F. and Bekker, J. A. M.}, + Journal = {Acta Psychologica}, + Pages = {143--153}, + Title = {Reaction time and accuracy}, + Volume = {27}, + Year = {1967}} + +@article{Schuff1999, + Author = {Schuff, N. and Amend, D. L. and Knowlton, R. and Norman, D. and Fein, G. and Weiner, M. W.}, + Journal = {Neurobiol. Aging}, + Pages = {279--285}, + Title = {{{A}ge-related metabolite changes and volume loss in the hippocampus by magnetic resonance spectroscopy and imaging}}, + Volume = {20}, + Year = {1999}} + +@article{Schuff1998, + Author = {Schuff, N. and Amend, D. L. and Meyerhoff, D. J. and Tanabe, J. L. and Norman, D. and Fein, G. and Weiner, M. W.}, + Journal = {Radiology}, + Month = {Apr}, + Pages = {91--102}, + Title = {{{A}lzheimer disease: quantitative {H}-1 {M}{R} spectroscopic imaging of frontoparietal brain}}, + Volume = {207}, + Year = {1998}} + +@article{Schultz2005, + Author = {Schultz, J. and Friston, K. J. and O'Doherty, J. and Wolpert, D. M. and Frith, C. D.}, + Journal = {Neuron}, + Month = {Feb}, + Pages = {625--635}, + Title = {{{A}ctivation in posterior superior temporal sulcus parallels parameter inducing the percept of animacy}}, + Volume = {45}, + Year = {2005}} + +@article{Schultz2008, + Author = {Schultz, W.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Dec}, + Pages = {3767--3769}, + Title = {{{I}ntroduction. {N}euroeconomics: the promise and the profit}}, + Volume = {363}, + Year = {2008}} + +@article{Schultz2007, + Author = {Schultz, W.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {259--288}, + Title = {{{M}ultiple dopamine functions at different time courses}}, + Volume = {30}, + Year = {2007}} + +@article{Schultz2007a, + Author = {Schultz, W.}, + Journal = {Trends Neurosci.}, + Pages = {203--210}, + Title = {{{B}ehavioral dopamine signals}}, + Volume = {30}, + Year = {2007}} + +@article{Schultz2007b, + Author = {Schultz, W.}, + Journal = {Scholarpedia}, + Number = {3}, + Pages = {1652}, + Title = {Reward}, + Volume = {2}, + Year = {2007}} + +@article{Schultz2007c, + Author = {Schultz, W.}, + Journal = {Trends in neurosciences}, + Number = {5}, + Pages = {203--210}, + Publisher = {Elsevier}, + Title = {{Behavioral dopamine signals}}, + Volume = {30}, + Year = {2007}} + +@article{Schultz2006, + Author = {Schultz, W.}, + Journal = {Annu Rev Psychol}, + Pages = {87--115}, + Title = {{{B}ehavioral theories and the neurophysiology of reward}}, + Volume = {57}, + Year = {2006}} + +@article{Schultz2004, + Author = {Schultz, W.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Apr}, + Pages = {139--147}, + Title = {{{N}eural coding of basic reward terms of animal learning theory, game theory, microeconomics and behavioural ecology}}, + Volume = {14}, + Year = {2004}} + +@article{Schultz2002, + Author = {Schultz, W.}, + Journal = {Neuron}, + Month = {Oct}, + Pages = {241--263}, + Title = {{{G}etting formal with dopamine and reward}}, + Volume = {36}, + Year = {2002}} + +@article{Schultz1999, + Author = {Schultz, W.}, + Journal = {News Physiol. Sci.}, + Month = {Dec}, + Pages = {249--255}, + Title = {{{T}he {R}eward {S}ignal of {M}idbrain {D}opamine {N}eurons}}, + Volume = {14}, + Year = {1999}} + +@article{Schultz1998, + Author = {Schultz, W.}, + Journal = {Journal of neurophysiology}, + Number = {1}, + Pages = {1--27}, + Publisher = {Am Physiological Soc}, + Title = {{Predictive reward signal of dopamine neurons}}, + Volume = {80}, + Year = {1998}} + +@article{Schultz1993, + Author = {Schultz, W. and Apicella, P. and Ljungberg, T.}, + Journal = {Journal of Neuroscience}, + Number = {3}, + Pages = {900--913}, + Publisher = {Soc Neuroscience}, + Title = {{Responses of monkey dopamine neurons to reward and conditioned stimuli during successive steps of learning a delayed response task}}, + Volume = {13}, + Year = {1993}} + +@article{Schultz1997, + Author = {Schultz, W. and Dayan, P. and Montague, P. R.}, + Journal = {Science}, + Pages = {1593--1599}, + Title = {{{A} neural substrate of prediction and reward}}, + Volume = {275}, + Year = {1997}} + +@article{Schultz2008a, + Author = {Schultz, W. and Preuschoff, K. and Camerer, C. and Hsu, M. and Fiorillo, C. D. and Tobler, P. N. and Bossaerts, P.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Dec}, + Pages = {3801--3811}, + Title = {{{E}xplicit neural signals reflecting reward uncertainty}}, + Volume = {363}, + Year = {2008}} + +@article{Schultz1987, + Author = {Schultz, W. and Romo, R.}, + Journal = {Journal of Neurophysiology}, + Number = {1}, + Pages = {201--217}, + Publisher = {Am Physiological Soc}, + Title = {{Responses of nigrostriatal dopamine neurons to high-intensity somatosensory stimulation in the anesthetized monkey}}, + Volume = {57}, + Year = {1987}} + +@article{Schultz2003, + Author = {Schultz, W. and Tremblay, L. and Hollerman, J. R.}, + Journal = {Trends Neurosci.}, + Month = {Jun}, + Pages = {321--328}, + Title = {{{C}hanges in behavior-related neuronal activity in the striatum during learning}}, + Volume = {26}, + Year = {2003}} + +@article{Schwartz2002, + Author = {Schwartz, B. and Ward, A. and Monterosso, J. and Lyubomirsky, S. and White, K. and Lehman, D. R.}, + Journal = {J Pers Soc Psychol}, + Month = {Nov}, + Pages = {1178--1197}, + Title = {{{M}aximizing versus satisficing: happiness is a matter of choice}}, + Volume = {83}, + Year = {2002}} + +@article{Schwartz1993, + Author = {Schwartz, B. S. and Bolla, K. I. and Stewart, W. and Ford, D. P. and Agnew, J. and Frumkin, H.}, + Journal = {Am. J. Epidemiol.}, + Month = {May}, + Pages = {1006--1021}, + Title = {{{D}ecrements in neurobehavioral performance associated with mixed exposure to organic and inorganic lead}}, + Volume = {137}, + Year = {1993}} + +@article{Schwartz2007a, + Author = {Schwartz, B. S. and Chen, S. and Caffo, B. and Stewart, W. F. and Bolla, K. I. and Yousem, D. and Davatzikos, C.}, + Journal = {Neuroimage}, + Month = {Aug}, + Pages = {633--641}, + Title = {{{R}elations of brain volumes with cognitive function in males 45 years and older with past lead exposure}}, + Volume = {37}, + Year = {2007}} + +@article{Schwartz1991, + Author = {Schwartz, B. S. and Ford, D. P. and Bolla, K. I. and Agnew, J. and Bleecker, M. L.}, + Journal = {Am J Psychiatry}, + Month = {Jun}, + Pages = {751--756}, + Title = {{{S}olvent-associated olfactory dysfunction: not a predictor of deficits in learning and memory}}, + Volume = {148}, + Year = {1991}} + +@article{Schwartz1990, + Author = {Schwartz, B. S. and Ford, D. P. and Bolla, K. I. and Agnew, J. and Rothman, N. and Bleecker, M. L.}, + Journal = {Am. J. Ind. Med.}, + Pages = {697--706}, + Title = {{{S}olvent-associated decrements in olfactory function in paint manufacturing workers}}, + Volume = {18}, + Year = {1990}} + +@article{Schwartz2004, + Author = {Schwartz, B. S. and Glass, T. A. and Bolla, K. I. and Stewart, W. F. and Glass, G. and Rasmussen, M. and Bressler, J. and Shi, W. and Bandeen-Roche, K.}, + Journal = {Environ. Health Perspect.}, + Month = {Mar}, + Pages = {314--320}, + Title = {{{D}isparities in cognitive functioning by race/ethnicity in the {B}altimore {M}emory {S}tudy}}, + Volume = {112}, + Year = {2004}} + +@article{Schwartz2001, + Author = {Schwartz, B. S. and Lee, B. K. and Lee, G. S. and Stewart, W. F. and Lee, S. S. and Hwang, K. Y. and Ahn, K. D. and Kim, Y. B. and Bolla, K. I. and Simon, D. and Parsons, P. J. and Todd, A. C.}, + Journal = {Am. J. Epidemiol.}, + Month = {Mar}, + Pages = {453--464}, + Title = {{{A}ssociations of blood lead, dimercaptosuccinic acid-chelatable lead, and tibia lead with neurobehavioral test scores in {S}outh {K}orean lead workers}}, + Volume = {153}, + Year = {2001}} + +@article{Schwartz2000, + Author = {Schwartz, B. S. and Stewart, W. F. and Bolla, K. I. and Simon, P. D. and Bandeen-Roche, K. and Gordon, P. B. and Links, J. M. and Todd, A. C.}, + Journal = {Neurology}, + Month = {Oct}, + Pages = {1144--1150}, + Title = {{{P}ast adult lead exposure is associated with longitudinal decline in cognitive function}}, + Volume = {55}, + Year = {2000}} + +@article{Schwartz1978, + Author = {Schwartz, G.}, + Journal = {Annals of Statistics}, + Owner = {ahnw}, + Pages = {461-464}, + Timestamp = {2007.05.04}, + Title = {Estimating the dimension of a model}, + Volume = {5}, + Year = {1978}} + +@article{Schwartz2007, + Author = {Schwartz, O. and Hsu, A. and Dayan, P.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Jul}, + Pages = {522--535}, + Title = {{{S}pace and time in visual context}}, + Volume = {8}, + Year = {2007}} + +@article{Schwartz2006, + Author = {Schwartz, O. and Sejnowski, T. J. and Dayan, P.}, + Journal = {Neural Comput}, + Month = {Nov}, + Pages = {2680--2718}, + Title = {{{S}oft mixer assignment in a hierarchical generative model of natural scene statistics}}, + Volume = {18}, + Year = {2006}} + +@article{Schwarz1978, + Author = {Schwarz, G.}, + Journal = {Annals of Statistics}, + Pages = {461--464}, + Title = {Estimating the Dimension of a Model}, + Volume = {6}, + Year = {1978}} + +@article{Schwarz1991, + Author = {Schwarz, W.}, + Journal = {British Journal of Mathematical and Statistical Psychology}, + Pages = {251--264}, + Title = {Variance Results for Random Walk Models of Choice Reaction Time}, + Volume = {44}, + Year = {1991}} + +@article{Schweinsburg2004, + Author = {Schweinsburg, A. D. and Paulus, M. P. and Barlett, V. C. and Killeen, L. A. and Caldwell, L. C. and Pulido, C. and Brown, S. A. and Tapert, S. F.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Jun}, + Pages = {391--394}, + Title = {{{A}n {F}{M}{R}{I} study of response inhibition in youths with a family history of alcoholism}}, + Volume = {1021}, + Year = {2004}} + +@article{Sears1994, + Author = {Sears, L. L. and Finn, P. R. and Steinmetz, J. E.}, + Journal = {J Autism Dev Disord}, + Month = {Dec}, + Pages = {737--751}, + Title = {{{A}bnormal classical eye-blink conditioning in autism}}, + Volume = {24}, + Year = {1994}} + +@book{Seber2003, + Address = {Hoboken (NJ)}, + Author = {Seber, G. A. F. and Lee, A. J.}, + Publisher = {John Wiley \& Sons}, + Title = {Linear Regression Analysis (2nd ed.)}, + Year = {2003}} + +@article{Seeman1987, + Author = {Seeman, P. and Bzowej, N.H. and Guan, H.C. and Bergeron, C. and Becker, L.E. and Reynolds, G.P. and Bird, ED and Riederer, P. and Jellinger, K. and Watanabe, S. and others}, + Journal = {Synapse}, + Number = {5}, + Publisher = {Wiley Subscription Services, Inc., A Wiley Company Hoboken}, + Title = {{Human brain dopamine receptors in children and aging adults}}, + Volume = {1}, + Year = {1987}} + +@article{Self1992, + Author = {Self, DW and Stein, L.}, + Journal = {Brain research}, + Number = {2}, + Pages = {349}, + Title = {{The D1 agonists SKF 82958 and SKF 77434 are self-administered by rats.}}, + Volume = {582}, + Year = {1992}} + +@article{Self1996, + Author = {Self, D. W. and Barnhart, W. J. and Lehman, D. A. and Nestler, E. J.}, + Journal = {Science}, + Month = {Mar}, + Pages = {1586--1589}, + Title = {{{O}pposite modulation of cocaine-seeking behavior by {D}1- and {D}2-like dopamine receptor agonists}}, + Volume = {271}, + Year = {1996}} + +@article{Self1998a, + Author = {Self, D. W. and Genova, L. M. and Hope, B. T. and Barnhart, W. J. and Spencer, J. J. and Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Mar}, + Pages = {1848--1859}, + Title = {{{I}nvolvement of c{A}{M}{P}-dependent protein kinase in the nucleus accumbens in cocaine self-administration and relapse of cocaine-seeking behavior}}, + Volume = {18}, + Year = {1998}} + +@article{Self1995a, + Author = {Self, D. W. and McClenahan, A. W. and Beitner-Johnson, D. and Terwilliger, R. Z. and Nestler, E. J.}, + Journal = {Synapse}, + Month = {Dec}, + Pages = {312--318}, + Title = {{{B}iochemical adaptations in the mesolimbic dopamine system in response to heroin self-administration}}, + Volume = {21}, + Year = {1995}} + +@article{Self1998, + Author = {Self, D. W. and Nestler, E. J.}, + Journal = {Drug Alcohol Depend}, + Pages = {49--60}, + Title = {{{R}elapse to drug-seeking: neural and molecular mechanisms}}, + Volume = {51}, + Year = {1998}} + +@article{Self1995, + Author = {Self, D. W. and Nestler, E. J.}, + Journal = {Annu. Rev. Neurosci.}, + Pages = {463--495}, + Title = {{{M}olecular mechanisms of drug reinforcement and addiction}}, + Volume = {18}, + Year = {1995}} + +@article{Self1994, + Author = {Self, D. W. and Terwilliger, R. Z. and Nestler, E. J. and Stein, L.}, + Journal = {J. Neurosci.}, + Month = {Oct}, + Pages = {6239--6247}, + Title = {{{I}nactivation of {G}i and {G}(o) proteins in nucleus accumbens reduces both cocaine and heroin reinforcement}}, + Volume = {14}, + Year = {1994}} + +@article{Sellke2001, + Author = {Sellke, T. and Bayarri, M. J. and Berger, J. O.}, + Journal = {The American Statistician}, + Pages = {62--71}, + Title = {Calibration of $p$ Values for Testing Precise Null Hypotheses}, + Volume = {55}, + Year = {2001}} + +@article{Sevy2007, + Author = {Sevy, S. and Burdick, K. E. and Visweswaraiah, H. and Abdelmessih, S. and Lukin, M. and Yechiam, E. and Bechara, A.}, + Journal = {Schizophr. Res.}, + Month = {May}, + Pages = {74--84}, + Title = {{{I}owa gambling task in schizophrenia: a review and new data in patients with schizophrenia and co-occurring cannabis use disorders}}, + Volume = {92}, + Year = {2007}} + +@article{Sevy2007a, + Author = {Sevy, Serge and Burdick, Katherine E. and Visweswaraiah, Hema and Abdelmessih, Sheriff and Lukin, Meredith and Yechiam, Eldad and Bechara, Antoine}, + Journal = {Schizophrenia Research}, + Owner = {WooYoung Ahn}, + Pages = {74-84}, + Timestamp = {2008.01.07}, + Title = {Iowa Gambling Task in schizophrenia: A review and new data in patients with schizophrenia and co-occuring cannabis use disorders}, + Volume = {92}, + Year = {2007}} + +@article{Seymour2007, + Author = {Seymour, B. and Daw, N. and Dayan, P. and Singer, T. and Dolan, R.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {4826--4831}, + Title = {{{D}ifferential encoding of losses and gains in the human striatum}}, + Volume = {27}, + Year = {2007}} + +@article{Seymour2004, + Author = {Seymour, B. and O'Doherty, J. P. and Dayan, P. and Koltzenburg, M. and Jones, A. K. and Dolan, R. J. and Friston, K. J. and Frackowiak, R. S.}, + Journal = {Nature}, + Month = {Jun}, + Pages = {664--667}, + Title = {{{T}emporal difference models describe higher-order learning in humans}}, + Volume = {429}, + Year = {2004}} + +@article{Seymour2004a, + Author = {Seymour, B. and O'Doherty, J. P. and Dayan, P. and Koltzenburg, M. and Jones, A. K. and Dolan, R. J. and Friston, K. J. and Frackowiak, R. S.}, + Journal = {Nature}, + Month = {Jun}, + Pages = {664--667}, + Title = {{{T}emporal difference models describe higher-order learning in humans}}, + Volume = {429}, + Year = {2004}} + +@article{Seymour2005, + Author = {Seymour, B. and O'Doherty, J. P. and Koltzenburg, M. and Wiech, K. and Frackowiak, R. and Friston, K. and Dolan, R.}, + Journal = {Nat. Neurosci.}, + Month = {Sep}, + Pages = {1234--1240}, + Title = {{{O}pponent appetitive-aversive neural processes underlie predictive learning of pain relief}}, + Volume = {8}, + Year = {2005}} + +@article{Sgambato2003, + Author = {Sgambato, V. and Minassian, R. and Nairn, A. C. and Hyman, S. E.}, + Journal = {J. Neurochem.}, + Month = {Jul}, + Pages = {153--164}, + Title = {{{R}egulation of ania-6 splice variants by distinct signaling pathways in striatal neurons}}, + Volume = {86}, + Year = {2003}} + +@article{Shadlen2002a, + Author = {Shadlen, M. N.}, + Journal = {Nat. Neurosci.}, + Month = {Sep}, + Pages = {819--821}, + Title = {{{P}ursuing commitments}}, + Volume = {5}, + Year = {2002}} + +@article{Shadlen1996c, + Author = {Shadlen, M. N. and Britten, K. H. and Newsome, W. T. and Movshon, J. A.}, + Journal = {J. Neurosci.}, + Month = {Feb}, + Pages = {1486--1510}, + Title = {{{A} computational analysis of the relationship between neuronal and behavioral responses to visual motion}}, + Volume = {16}, + Year = {1996}} + +@article{Shadlen2007a, + Author = {Shadlen, M. N. and Kiani, R.}, + Journal = {Nature}, + Month = {Aug}, + Pages = {539--540}, + Title = {{{N}eurology: an awakening}}, + Volume = {448}, + Year = {2007}} + +@article{Shadlen1999a, + Author = {Shadlen, M. N. and Movshon, J. A.}, + Journal = {Neuron}, + Month = {Sep}, + Pages = {67--77}, + Title = {{{S}ynchrony unbound: a critical evaluation of the temporal binding hypothesis}}, + Volume = {24}, + Year = {1999}} + +@article{Shadlen2001, + Author = {Shadlen, M. N. and Newsome, W. T.}, + Journal = {Journal of Neurophysiology}, + Pages = {1916--1936}, + Title = {Neural Basis of a Perceptual Decision in the Parietal Cortex (Area {LIP}) of the Rhesus Monkey}, + Volume = {86}, + Year = {2001}} + +@article{Shadlen2001b, + Author = {Shadlen, M. N. and Newsome, W. T.}, + Journal = {J. Neurophysiol.}, + Month = {Oct}, + Pages = {1916--1936}, + Title = {{{N}eural basis of a perceptual decision in the parietal cortex (area {L}{I}{P}) of the rhesus monkey}}, + Volume = {86}, + Year = {2001}} + +@article{Shadlen1998a, + Author = {Shadlen, M. N. and Newsome, W. T.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {3870--3896}, + Title = {{{T}he variable discharge of cortical neurons: implications for connectivity, computation, and information coding}}, + Volume = {18}, + Year = {1998}} + +@article{Shadlen1996b, + Author = {Shadlen, M. N. and Newsome, W. T.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jan}, + Pages = {628--633}, + Title = {{{M}otion perception: seeing and deciding}}, + Volume = {93}, + Year = {1996}} + +@article{Shadlen1995a, + Author = {Shadlen, M. N. and Newsome, W. T.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Apr}, + Pages = {248--250}, + Title = {{{I}s there a signal in the noise?}}, + Volume = {5}, + Year = {1995}} + +@article{Shadlen1994a, + Author = {Shadlen, M. N. and Newsome, W. T.}, + Journal = {Curr. Opin. Neurobiol.}, + Month = {Aug}, + Pages = {569--579}, + Title = {{{N}oise, neural codes and cortical organization}}, + Volume = {4}, + Year = {1994}} + +@article{Shafer1982, + Author = {Shafer, G.}, + Journal = {Journal of the American Statistical Association}, + Pages = {325--351}, + Title = {{L}indley's Paradox}, + Volume = {77}, + Year = {1982}} + +@article{Shammi1998, + Author = {Shammi, P. and Bosman, E. and Stuss, D. T.}, + Journal = {Aging, Neuropsychology, and Cognition}, + Pages = {1--13}, + Title = {Aging and Variability in Performance}, + Volume = {5}, + Year = {1998}} + +@article{Shannon1948, + Author = {Shannon, C. E.}, + Journal = {Bell Systems Technical Journal}, + Owner = {Wooyoung Ahn}, + Pages = {379-423, 623-656}, + Timestamp = {2007.05.01}, + Title = {A mathematical theory of communication}, + Volume = {27}, + Year = {1948}} + +@article{Shao1997, + Author = {Shao, J.}, + Journal = {Statistica Sinica}, + Pages = {221--264}, + Title = {An Asymptotic Theory for Linear Model Selection}, + Volume = {7}, + Year = {1997}} + +@article{Shao1996, + Author = {Shao, J.}, + Journal = {Journal of the American Statistical Association}, + Pages = {655--665}, + Title = {Bootstrap Model Selection}, + Volume = {91}, + Year = {1996}} + +@article{Shao1993, + Author = {Shao, J.}, + Journal = {Journal of the American Statistical Association}, + Number = {422}, + Pages = {286--292}, + Title = {Linear Model Selection by Cross--validation}, + Volume = {88}, + Year = {1993}} + +@article{Sharkansky1998, + Author = {Sharkansky, E. J. and Finn, P. R.}, + Journal = {J. Stud. Alcohol}, + Month = {Mar}, + Pages = {198--206}, + Title = {{{E}ffects of outcome expectancies and disinhibition on ad lib alcohol consumption}}, + Volume = {59}, + Year = {1998}} + +@article{Sharot2004, + Author = {Sharot, T. and Delgado, M. R. and Phelps, E. A.}, + Journal = {Nat. Neurosci.}, + Month = {Dec}, + Pages = {1376--1380}, + Title = {{{H}ow emotion enhances the feeling of remembering}}, + Volume = {7}, + Year = {2004}} + +@article{Sharot2007, + Author = {Sharot, T. and Martorella, E. A. and Delgado, M. R. and Phelps, E. A.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jan}, + Pages = {389--394}, + Title = {{{H}ow personal experience modulates the neural circuitry of memories of {S}eptember 11}}, + Volume = {104}, + Year = {2007}} + +@article{Shepherd2006a, + Author = {Shepherd, S. V. and Deaner, R. O. and Platt, M. L.}, + Journal = {Curr. Biol.}, + Month = {Feb}, + Pages = {R119--120}, + Title = {{{S}ocial status gates social attention in monkeys}}, + Volume = {16}, + Year = {2006}} + +@article{Shepherd2009, + Author = {Shepherd, S. V. and Klein, J. T. and Deaner, R. O. and Platt, M. L.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jun}, + Pages = {9489--9494}, + Title = {{{M}irroring of attention by neurons in macaque parietal cortex}}, + Volume = {106}, + Year = {2009}} + +@article{Shepherd2008, + Author = {Shepherd, S. V. and Platt, M. L.}, + Journal = {Anim Cogn}, + Month = {Jan}, + Pages = {13--20}, + Title = {{{S}pontaneous social orienting and gaze following in ringtailed lemurs ({L}emur catta)}}, + Volume = {11}, + Year = {2008}} + +@article{Shepherd2006, + Author = {Shepherd, S. V. and Platt, M. L.}, + Journal = {Methods}, + Month = {Mar}, + Pages = {185--194}, + Title = {{{N}oninvasive telemetric gaze tracking in freely moving socially housed prosimian primates}}, + Volume = {38}, + Year = {2006}} + +@article{Sher2000, + Abstract = {The personality systems of Cloninger (as measured by the Tridimensional + Personality Questionnaire [TPQ]) and Eysenck (as measured by the + Eysenck Personality Questionnaire [EPQ]) both have been linked to + substance use and abuse. The current study examined the predictive + utility of both systems for substance use disorder (SUD) diagnoses, + both cross-sectionally and prospectively. Participants (N = 489 at + baseline) completed the EPQ and TPQ and were assessed via structured + diagnostic interview at baseline and 6 years later (N = 457 at follow-up). + Both the EPQ and TPQ scales demonstrated bivariate cross-sectional + and prospective associations with SUDs. Within each system, those + dimensions marking a broad impulsive sensation-seeking or behavioral + disinhibition trait were the best predictors prospectively, although + the 2 systems were differentially sensitive to specific diagnoses. + These relations remained significant even with autoregressivity, + other concurrent SUD diagnoses, and multiple personality dimensions + statistically controlled.}, + Author = {K. J. Sher and B. D. Bartholow and M. D. Wood}, + Institution = {Department of Psychology, University of Missouri, Columbia 65211, USA. SherK@missouri.edu}, + Journal = {J Consult Clin Psychol}, + Keywords = {Adolescent; Adult; Cross-Sectional Studies; Female; Follow-Up Studies; Humans; Impulsive Behavior, psychology; Inhibition (Psychology); Logistic Models; Male; Personality Disorders, diagnosis; Personality Inventory, standards; Predictive Value of Tests; Prospective Studies; Risk-Taking; Sensitivity and Specificity; Substance-Related Disorders, diagnosis/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {818--829}, + Pmid = {11068968}, + Timestamp = {2009.08.06}, + Title = {Personality and substance use disorders: a prospective study.}, + Volume = {68}, + Year = {2000}} + +@article{Sheu1998, + Author = {Sheu, C.-F. and O'{C}urry, S. L.}, + Journal = {Behavioral Research Methods, Instruments, \& Computers}, + Pages = {232--237}, + Title = {Simulation-based {B}ayesian Inference Using {BUGS}}, + Volume = {30}, + Year = {1998}} + +@article{Shiffrin2008, + Author = {Shiffrin, R.M. and Lee, Michael D. and Kim, W. and Wagenmakers, E.J.}, + Date-Modified = {2016-03-24 02:34:35 +0000}, + Journal = {Cognitive Science: A Multidisciplinary Journal}, + Number = {8}, + Pages = {1248--1284}, + Publisher = {Psychology Press}, + Title = {{A survey of model evaluation approaches with a tutorial on hierarchical Bayesian methods}}, + Volume = {32}, + Year = {2008}} + +@article{Shih2006, + Author = {Shih, R. A. and Glass, T. A. and Bandeen-Roche, K. and Carlson, M. C. and Bolla, K. I. and Todd, A. C. and Schwartz, B. S.}, + Journal = {Neurology}, + Month = {Nov}, + Pages = {1556--1562}, + Title = {{{E}nvironmental lead exposure and cognitive function in community-dwelling older adults}}, + Volume = {67}, + Year = {2006}} + +@article{Shiv2005, + Author = {Shiv, B. and Loewenstein, G. and Bechara, A.}, + Journal = {Brain Res Cogn Brain Res}, + Month = {Apr}, + Pages = {85--92}, + Title = {{{T}he dark side of emotion in decision-making: when individuals with decreased emotional reactions make more advantageous decisions}}, + Volume = {23}, + Year = {2005}} + +@book{Siegmund1985, + Address = {New York}, + Author = {Siegmund, D.}, + Publisher = {Springer}, + Title = {Sequential Analysis: Tests and Confidence Intervals}, + Year = {1985}} + +@article{Sigman2006, + Author = {Sigman, M. and Dehaene, S.}, + Journal = {PLoS Biology}, + Pages = {e220}, + Title = {Dynamics of the Central Bottleneck: {D}ual--Task and Task Uncertainty}, + Volume = {4}, + Year = {2006}} + +@article{Sigman2005, + Author = {Sigman, M. and Dehaene, S.}, + Journal = {PLoS Biology}, + Pages = {334--349}, + Title = {Parsing a Cognitive Task: {A} Characterization of the Mind's Bottleneck}, + Volume = {3}, + Year = {2005}} + +@book{Silverman1986, + Address = {London}, + Author = {Silverman, B. W.}, + Publisher = {Chapman \& Hall}, + Title = {Density Estimation for Statistics and Data Analysis}, + Year = {1986}} + +@article{Silverstone1984, + Author = {T. Silverstone}, + Journal = {Lancet}, + Keywords = {Adult; Bipolar Disorder, diagnosis/drug therapy; Bromocriptine, therapeutic use; Depressive Disorder, diagnosis/drug therapy; Diagnosis, Differential; Female; Humans; Male; Middle Aged}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {8382}, + Owner = {Young}, + Pages = {903--904}, + Pmid = {6143203}, + Timestamp = {2010.05.01}, + Title = {Response to bromocriptine distinguishes bipolar from unipolar depression.}, + Volume = {1}, + Year = {1984}} + +@article{Sim2007, + Author = {Sim, M. E. and Lyoo, I. K. and Streeter, C. C. and Covell, J. and Sarid-Segal, O. and Ciraulo, D. A. and Kim, M. J. and Kaufman, M. J. and Yurgelun-Todd, D. A. and Renshaw, P. F.}, + Journal = {Neuropsychopharmacology}, + Month = {Oct}, + Pages = {2229--2237}, + Title = {{{C}erebellar gray matter volume correlates with duration of cocaine use in cocaine-dependent subjects}}, + Volume = {32}, + Year = {2007}} + +@article{Sim-Selley1999, + Author = {Sim-Selley, L. J. and Daunais, J. B. and Porrino, L. J. and Childers, S. R.}, + Journal = {Neuroscience}, + Pages = {651--662}, + Title = {{{M}u and kappa1 opioid-stimulated [35{S}]guanylyl-5'-{O}-(gamma-thio)-triphosphate binding in cynomolgus monkey brain}}, + Volume = {94}, + Year = {1999}} + +@article{Simen2006, + Author = {Simen, Patrick and Cohen, Jonathan D. and Holmes, Philip}, + Journal = {Neural Networks}, + Owner = {WooYoung Ahn}, + Pages = {1013-1026}, + Timestamp = {2007.12.14}, + Title = {Rapid decision threshold modulation by reward rate in a neural network}, + Volume = {19}, + Year = {2006}} + +@article{Simen2006a, + Author = {Simen, P. and Cohen, J. D. and Holmes, P.}, + Journal = {Neural Networks}, + Pages = {1013?1026}, + Title = {Rapid Decision Threshold Modulation by Reward Rate in a Neural Network}, + Volume = {19}, + Year = {2006}} + +@article{Simmons2008a, + Author = {Simmons, A. and Matthews, S. C. and Feinstein, J. S. and Hitchcock, C. and Paulus, M. P. and Stein, M. B.}, + Journal = {Neuroreport}, + Month = {Jul}, + Pages = {1033--1037}, + Title = {{{A}nxiety vulnerability is associated with altered anterior cingulate response to an affective appraisal task}}, + Volume = {19}, + Year = {2008}} + +@article{Simmons2008b, + Author = {Simmons, A. and Matthews, S. C. and Paulus, M. P. and Stein, M. B.}, + Journal = {Neurosci. Lett.}, + Month = {Jan}, + Pages = {92--97}, + Title = {{{I}ntolerance of uncertainty correlates with insula activation during affective ambiguity}}, + Volume = {430}, + Year = {2008}} + +@article{Simmons2004, + Author = {Simmons, A. and Matthews, S. C. and Stein, M. B. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {Oct}, + Pages = {2261--2265}, + Title = {{{A}nticipation of emotionally aversive visual stimuli activates right insula}}, + Volume = {15}, + Year = {2004}} + +@article{Simmons2005, + Author = {Simmons, A. and Miller, D. and Feinstein, J. S. and Goldberg, T. E. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {30--38}, + Title = {{{L}eft inferior prefrontal cortex activation during a semantic decision-making task predicts the degree of semantic organization}}, + Volume = {28}, + Year = {2005}} + +@article{Simmons2006a, + Author = {Simmons, A. and Stein, M. B. and Matthews, S. C. and Feinstein, J. S. and Paulus, M. P.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {655--661}, + Title = {{{A}ffective ambiguity for a group recruits ventromedial prefrontal cortex}}, + Volume = {29}, + Year = {2006}} + +@article{Simmons2009a, + Author = {Simmons, A. and Strigo, I. A. and Matthews, S. C. and Paulus, M. P. and Stein, M. B.}, + Journal = {Psychosom Med}, + Month = {May}, + Pages = {373--377}, + Title = {{{I}nitial evidence of a failure to activate right anterior insula during affective set shifting in posttraumatic stress disorder}}, + Volume = {71}, + Year = {2009}} + +@article{Simmons2006, + Author = {Simmons, A. and Strigo, I. and Matthews, S. C. and Paulus, M. P. and Stein, M. B.}, + Journal = {Biol. Psychiatry}, + Month = {Aug}, + Pages = {402--409}, + Title = {{{A}nticipation of aversive visual stimuli is associated with increased insula activation in anxiety-prone subjects}}, + Volume = {60}, + Year = {2006}} + +@article{Simmons2009, + Author = {Simmons, A. N. and Arce, E. and Lovero, K. L. and Stein, M. B. and Paulus, M. P.}, + Journal = {Int. J. Neuropsychopharmacol.}, + Month = {Jun}, + Pages = {1--12}, + Title = {{{S}ubchronic {S}{S}{R}{I} administration reduces insula response during affective anticipation in healthy volunteers}}, + Year = {2009}} + +@article{Simmons2008, + Author = {Simmons, A. N. and Paulus, M. P. and Thorp, S. R. and Matthews, S. C. and Norman, S. B. and Stein, M. B.}, + Journal = {Biol. Psychiatry}, + Month = {Oct}, + Pages = {681--690}, + Title = {{{F}unctional activation and neural networks in women with posttraumatic stress disorder related to intimate partner violence}}, + Volume = {64}, + Year = {2008}} + +@article{Singer2009, + Author = {Singer, T. and Critchley, H. D. and Preuschoff, K.}, + Journal = {Trends Cogn. Sci.}, + Title = {{{A} common role of insula in feelings, empathy and uncertainty}}, + Year = {in press}} + +@article{Singer2004, + Author = {Singer, T. and Seymour, B. and O'Doherty, J. and Kaube, H. and Dolan, R. J. and Frith, C. D.}, + Journal = {Science}, + Month = {Feb}, + Pages = {1157--1162}, + Title = {{{E}mpathy for pain involves the affective but not sensory components of pain}}, + Volume = {303}, + Year = {2004}} + +@article{Singer2006, + Author = {Singer, T. and Seymour, B. and O'Doherty, J. P. and Stephan, K. E. and Dolan, R. J. and Frith, C. D.}, + Journal = {Nature}, + Month = {Jan}, + Pages = {466--469}, + Title = {{{E}mpathic neural responses are modulated by the perceived fairness of others}}, + Volume = {439}, + Year = {2006}} + +@article{Sinha2009a, + Author = {Sinha, R.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {100--101}, + Title = {{{S}tress and addiction: a dynamic interplay of genes, environment, and drug intake}}, + Volume = {66}, + Year = {2009}} + +@article{Sinha2009b, + Author = {Sinha, R.}, + Journal = {Addict Biol}, + Month = {Jan}, + Pages = {84--98}, + Title = {{{M}odeling stress and drug craving in the laboratory: implications for addiction treatment development}}, + Volume = {14}, + Year = {2009}} + +@article{Sinha2009d, + Author = {Sinha, R.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {100--101}, + Title = {{{S}tress and addiction: a dynamic interplay of genes, environment, and drug intake}}, + Volume = {66}, + Year = {2009}} + +@article{Sinha2009e, + Author = {Sinha, R.}, + Journal = {Addict Biol}, + Month = {Jan}, + Pages = {84--98}, + Title = {{{M}odeling stress and drug craving in the laboratory: implications for addiction treatment development}}, + Volume = {14}, + Year = {2009}} + +@article{Sinha2009g, + Author = {Sinha, R.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {100--101}, + Title = {{{S}tress and addiction: a dynamic interplay of genes, environment, and drug intake}}, + Volume = {66}, + Year = {2009}} + +@article{Sinha2009h, + Author = {Sinha, R.}, + Journal = {Addict Biol}, + Month = {Jan}, + Pages = {84--98}, + Title = {{{M}odeling stress and drug craving in the laboratory: implications for addiction treatment development}}, + Volume = {14}, + Year = {2009}} + +@article{Sinha2008, + Author = {Sinha, R.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Oct}, + Pages = {105--130}, + Title = {{{C}hronic stress, drug use, and vulnerability to addiction}}, + Volume = {1141}, + Year = {2008}} + +@article{Sinha2008a, + Author = {Sinha, R.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Oct}, + Pages = {105--130}, + Title = {{{C}hronic stress, drug use, and vulnerability to addiction}}, + Volume = {1141}, + Year = {2008}} + +@article{Sinha2008b, + Author = {Sinha, R.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Oct}, + Pages = {105--130}, + Title = {{{C}hronic stress, drug use, and vulnerability to addiction}}, + Volume = {1141}, + Year = {2008}} + +@article{Sinha2007c, + Author = {Sinha, R.}, + Journal = {Curr Psychiatry Rep}, + Month = {Oct}, + Pages = {388--395}, + Title = {{{T}he role of stress in addiction relapse}}, + Volume = {9}, + Year = {2007}} + +@article{Sinha2007g, + Author = {Sinha, R.}, + Journal = {Curr Psychiatry Rep}, + Month = {Oct}, + Pages = {388--395}, + Title = {{{T}he role of stress in addiction relapse}}, + Volume = {9}, + Year = {2007}} + +@article{Sinha2007k, + Author = {Sinha, R.}, + Journal = {Curr Psychiatry Rep}, + Month = {Oct}, + Pages = {388--395}, + Title = {{{T}he role of stress in addiction relapse}}, + Volume = {9}, + Year = {2007}} + +@article{Sinha2003b, + Author = {Sinha, R. and Easton, C. and Kemp, K.}, + Journal = {Am J Drug Alcohol Abuse}, + Month = {Aug}, + Pages = {585--597}, + Title = {{{S}ubstance abuse treatment characteristics of probation-referred young adults in a community-based outpatient program}}, + Volume = {29}, + Year = {2003}} + +@article{Sinha2003e, + Author = {Sinha, R. and Easton, C. and Kemp, K.}, + Journal = {Am J Drug Alcohol Abuse}, + Month = {Aug}, + Pages = {585--597}, + Title = {{{S}ubstance abuse treatment characteristics of probation-referred young adults in a community-based outpatient program}}, + Volume = {29}, + Year = {2003}} + +@article{Sinha2003a, + Author = {Sinha, R. and Easton, C. and Renee-Aubin, L. and Carroll, K. M.}, + Journal = {Am J Addict}, + Pages = {314--323}, + Title = {{{E}ngaging young probation-referred marijuana-abusing individuals in treatment: a pilot trial}}, + Volume = {12}, + Year = {2003}} + +@article{Sinha2003d, + Author = {Sinha, R. and Easton, C. and Renee-Aubin, L. and Carroll, K. M.}, + Journal = {Am J Addict}, + Pages = {314--323}, + Title = {{{E}ngaging young probation-referred marijuana-abusing individuals in treatment: a pilot trial}}, + Volume = {12}, + Year = {2003}} + +@article{Sinha2007b, + Author = {Sinha, R. and Fox, H. and Hong, K. I. and Sofuoglu, M. and Morgan, P. T. and Bergquist, K. T.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Oct}, + Pages = {445--452}, + Title = {{{S}ex steroid hormones, stress response, and drug craving in cocaine-dependent women: implications for relapse susceptibility}}, + Volume = {15}, + Year = {2007}} + +@article{Sinha2007f, + Author = {Sinha, R. and Fox, H. and Hong, K. I. and Sofuoglu, M. and Morgan, P. T. and Bergquist, K. T.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Oct}, + Pages = {445--452}, + Title = {{{S}ex steroid hormones, stress response, and drug craving in cocaine-dependent women: implications for relapse susceptibility}}, + Volume = {15}, + Year = {2007}} + +@article{Sinha2007j, + Author = {Sinha, R. and Fox, H. and Hong, K. I. and Sofuoglu, M. and Morgan, P. T. and Bergquist, K. T.}, + Journal = {Exp Clin Psychopharmacol}, + Month = {Oct}, + Pages = {445--452}, + Title = {{{S}ex steroid hormones, stress response, and drug craving in cocaine-dependent women: implications for relapse susceptibility}}, + Volume = {15}, + Year = {2007}} + +@article{Sinha2009, + Author = {Sinha, R. and Fox, H. C. and Hong, K. A. and Bergquist, K. and Bhagwagar, Z. and Siedlarz, K. M.}, + Journal = {Neuropsychopharmacology}, + Month = {Apr}, + Pages = {1198--1208}, + Title = {{{E}nhanced negative emotion and alcohol craving, and altered physiological responses following stress and cue exposure in alcohol dependent individuals}}, + Volume = {34}, + Year = {2009}} + +@article{Sinha2009c, + Author = {Sinha, R. and Fox, H. C. and Hong, K. A. and Bergquist, K. and Bhagwagar, Z. and Siedlarz, K. M.}, + Journal = {Neuropsychopharmacology}, + Month = {Apr}, + Pages = {1198--1208}, + Title = {{{E}nhanced negative emotion and alcohol craving, and altered physiological responses following stress and cue exposure in alcohol dependent individuals}}, + Volume = {34}, + Year = {2009}} + +@article{Sinha2009f, + Author = {Sinha, R. and Fox, H. C. and Hong, K. A. and Bergquist, K. and Bhagwagar, Z. and Siedlarz, K. M.}, + Journal = {Neuropsychopharmacology}, + Month = {Apr}, + Pages = {1198--1208}, + Title = {{{E}nhanced negative emotion and alcohol craving, and altered physiological responses following stress and cue exposure in alcohol dependent individuals}}, + Volume = {34}, + Year = {2009}} + +@article{Sinha2006, + Author = {Sinha, R. and Garcia, M. and Paliwal, P. and Kreek, M. J. and Rounsaville, B. J.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {324--331}, + Title = {{{S}tress-induced cocaine craving and hypothalamic-pituitary-adrenal responses are predictive of cocaine relapse outcomes}}, + Volume = {63}, + Year = {2006}} + +@article{Sinha2006a, + Author = {Sinha, R. and Garcia, M. and Paliwal, P. and Kreek, M. J. and Rounsaville, B. J.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {324--331}, + Title = {{{S}tress-induced cocaine craving and hypothalamic-pituitary-adrenal responses are predictive of cocaine relapse outcomes}}, + Volume = {63}, + Year = {2006}} + +@article{Sinha2006b, + Author = {Sinha, R. and Garcia, M. and Paliwal, P. and Kreek, M. J. and Rounsaville, B. J.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {324--331}, + Title = {{{S}tress-induced cocaine craving and hypothalamic-pituitary-adrenal responses are predictive of cocaine relapse outcomes}}, + Volume = {63}, + Year = {2006}} + +@article{Sinha2007a, + Author = {Sinha, R. and Kimmerling, A. and Doebrick, C. and Kosten, T. R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {569--574}, + Title = {{{E}ffects of lofexidine on stress-induced and cue-induced opioid craving and opioid abstinence rates: preliminary findings}}, + Volume = {190}, + Year = {2007}} + +@article{Sinha2007e, + Author = {Sinha, R. and Kimmerling, A. and Doebrick, C. and Kosten, T. R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {569--574}, + Title = {{{E}ffects of lofexidine on stress-induced and cue-induced opioid craving and opioid abstinence rates: preliminary findings}}, + Volume = {190}, + Year = {2007}} + +@article{Sinha2007i, + Author = {Sinha, R. and Kimmerling, A. and Doebrick, C. and Kosten, T. R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {569--574}, + Title = {{{E}ffects of lofexidine on stress-induced and cue-induced opioid craving and opioid abstinence rates: preliminary findings}}, + Volume = {190}, + Year = {2007}} + +@article{Sinha2005, + Author = {Sinha, R. and Lacadie, C. and Skudlarski, P. and Fulbright, R. K. and Rounsaville, B. J. and Kosten, T. R. and Wexler, B. E.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Dec}, + Pages = {171--180}, + Title = {{{N}eural activity associated with stress-induced cocaine craving: a functional magnetic resonance imaging study}}, + Volume = {183}, + Year = {2005}} + +@article{Sinha2005a, + Author = {Sinha, R. and Lacadie, C. and Skudlarski, P. and Fulbright, R. K. and Rounsaville, B. J. and Kosten, T. R. and Wexler, B. E.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Dec}, + Pages = {171--180}, + Title = {{{N}eural activity associated with stress-induced cocaine craving: a functional magnetic resonance imaging study}}, + Volume = {183}, + Year = {2005}} + +@article{Sinha2005b, + Author = {Sinha, R. and Lacadie, C. and Skudlarski, P. and Fulbright, R. K. and Rounsaville, B. J. and Kosten, T. R. and Wexler, B. E.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Dec}, + Pages = {171--180}, + Title = {{{N}eural activity associated with stress-induced cocaine craving: a functional magnetic resonance imaging study}}, + Volume = {183}, + Year = {2005}} + +@article{Sinha2004, + Author = {Sinha, R. and Lacadie, C. and Skudlarski, P. and Wexler, B. E.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Dec}, + Pages = {254--257}, + Title = {{{N}eural circuits underlying emotional distress in humans}}, + Volume = {1032}, + Year = {2004}} + +@article{Sinha2004a, + Author = {Sinha, R. and Lacadie, C. and Skudlarski, P. and Wexler, B. E.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Dec}, + Pages = {254--257}, + Title = {{{N}eural circuits underlying emotional distress in humans}}, + Volume = {1032}, + Year = {2004}} + +@article{sinha2007imaging, + Author = {Sinha, R. and Li, C.S.R.}, + Journal = {Drug and Alcohol Review}, + Number = {1}, + Pages = {25--31}, + Publisher = {Informa Healthcare}, + Title = {{Imaging stress-and cue-induced drug and alcohol craving: association with relapse and clinical implications}}, + Volume = {26}, + Year = {2007}} + +@article{Sinha2007, + Author = {Sinha, R. and Li, C. S.}, + Journal = {Drug Alcohol Rev}, + Month = {Jan}, + Pages = {25--31}, + Title = {{{I}maging stress- and cue-induced drug and alcohol craving: association with relapse and clinical implications}}, + Volume = {26}, + Year = {2007}} + +@article{Sinha2007d, + Author = {Sinha, R. and Li, C. S.}, + Journal = {Drug Alcohol Rev}, + Month = {Jan}, + Pages = {25--31}, + Title = {{{I}maging stress- and cue-induced drug and alcohol craving: association with relapse and clinical implications}}, + Volume = {26}, + Year = {2007}} + +@article{Sinha2007h, + Author = {Sinha, R. and Li, C. S.}, + Journal = {Drug Alcohol Rev}, + Month = {Jan}, + Pages = {25--31}, + Title = {{{I}maging stress- and cue-induced drug and alcohol craving: association with relapse and clinical implications}}, + Volume = {26}, + Year = {2007}} + +@article{Sinha2002, + Author = {Sinha, R. and Rounsaville, B. J.}, + Journal = {J Clin Psychiatry}, + Month = {Jul}, + Pages = {616--627}, + Title = {{{S}ex differences in depressed substance abusers}}, + Volume = {63}, + Year = {2002}} + +@article{Sinha2002a, + Author = {Sinha, R. and Rounsaville, B. J.}, + Journal = {J Clin Psychiatry}, + Month = {Jul}, + Pages = {616--627}, + Title = {{{S}ex differences in depressed substance abusers}}, + Volume = {63}, + Year = {2002}} + +@article{Sinha2003, + Author = {Sinha, R. and Talih, M. and Malison, R. and Cooney, N. and Anderson, G. M. and Kreek, M. J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Oct}, + Pages = {62--72}, + Title = {{{H}ypothalamic-pituitary-adrenal axis and sympatho-adreno-medullary responses during stress-induced and drug cue-induced cocaine craving states}}, + Volume = {170}, + Year = {2003}} + +@article{Sinha2003c, + Author = {Sinha, R. and Talih, M. and Malison, R. and Cooney, N. and Anderson, G. M. and Kreek, M. J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Oct}, + Pages = {62--72}, + Title = {{{H}ypothalamic-pituitary-adrenal axis and sympatho-adreno-medullary responses during stress-induced and drug cue-induced cocaine craving states}}, + Volume = {170}, + Year = {2003}} + +@article{Skeel2008, + Author = {Skeel, R. L. and Pilarski, C. and Pytlak, K. and Neudecker, J.}, + Journal = {Psychol Addict Behav}, + Month = {Sep}, + Pages = {402--409}, + Title = {{{P}ersonality and performance-based measures in the prediction of alcohol use}}, + Volume = {22}, + Year = {2008}} + +@article{Sklair-Tavron1996, + Author = {Sklair-Tavron, L. and Shi, W. X. and Lane, S. B. and Harris, H. W. and Bunney, B. S. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Oct}, + Pages = {11202--11207}, + Title = {{{C}hronic morphine induces visible changes in the morphology of mesolimbic dopamine neurons}}, + Volume = {93}, + Year = {1996}} + +@article{Skouras1998, + Author = {Skouras, K. and Dawid, A. P.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {765--780}, + Title = {On Efficient Point Prediction Systems}, + Volume = {60}, + Year = {1998}} + +@article{Slifkin1998, + Author = {Slifkin, A. B. and Newell, K. M.}, + Journal = {Current Directions in Psychological Science}, + Pages = {170--177}, + Title = {Is Variability in Human Performance a Reflection of System Noise?}, + Volume = {7}, + Year = {1998}} + +@article{Smith1993, + Author = {Smith, A. F. M. and Roberts, G. O.}, + Journal = {Journal of the Royal Statistical Society: {S}eries {B}}, + Pages = {3--23}, + Title = {{B}ayesian Computation via the {G}ibbs Sampler and Related {M}arkov Chain {M}onte {C}arlo Methods}, + Volume = {55}, + Year = {1993}} + +@article{Smith1980, + Author = {Smith, A. F. M. and Spiegelhalter, D. J.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {213--220}, + Title = {{B}ayes Factors and Choice Criteria for Linear Models}, + Volume = {42}, + Year = {1980}} + +@article{Smith2009, + Author = {Smith, B. W. and Mitchell, D. G. and Hardin, M. G. and Jazbec, S. and Fridberg, D. and Blair, R. J. and Ernst, M.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {600--609}, + Title = {{{N}eural substrates of reward magnitude, probability, and risk during a wheel of fortune decision-making task}}, + Volume = {44}, + Year = {2009}} + +@article{Smith1961, + Author = {Smith, C. A. B.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {1--37}, + Title = {Consistency in Statistical Inference and Decision (with discussion)}, + Volume = {23}, + Year = {1961}} + +@article{Smith2006, + Author = {Smith, H. R. and Beveridge, T. J. and Porrino, L. J.}, + Journal = {Neuroscience}, + Pages = {703--714}, + Title = {{{D}istribution of norepinephrine transporters in the non-human primate brain}}, + Volume = {138}, + Year = {2006}} + +@article{Smith1999, + Author = {Smith, H. R. and Daunais, J. B. and Nader, M. A. and Porrino, L. J.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Jun}, + Pages = {700--702}, + Title = {{{D}istribution of [3{H}]citalopram binding sites in the nonhuman primate brain}}, + Volume = {877}, + Year = {1999}} + +@article{Smith2008, + Author = {Smith, H. R. and Porrino, L. J.}, + Journal = {Brain Struct Funct}, + Month = {Sep}, + Pages = {73--91}, + Title = {{{T}he comparative distributions of the monoamine transporters in the rodent, monkey, and human amygdala}}, + Volume = {213}, + Year = {2008}} + +@article{Smith2000, + Author = {Smith, P. L.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {408--463}, + Title = {Stochastic Dynamic Models of Response Time and Accuracy: A Foundational Primer}, + Volume = {44}, + Year = {2000}} + +@article{Smith2004, + Author = {Smith, P. L. and Ratcliff, R.}, + Journal = {Trends in Neurosciences}, + Pages = {161--168}, + Title = {The Psychology and Neurobiology of Simple Decisions}, + Volume = {27}, + Year = {2004}} + +@article{Smith1988, + Author = {Smith, P. L. and Vickers, D.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {135--168}, + Title = {The Accumulator Model of Two--choice Discrimination}, + Volume = {32}, + Year = {1988}} + +@article{Smoller2008, + Author = {Smoller, J. W. and Paulus, M. P. and Fagerness, J. A. and Purcell, S. and Yamaki, L. H. and Hirshfeld-Becker, D. and Biederman, J. and Rosenbaum, J. F. and Gelernter, J. and Stein, M. B.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Mar}, + Pages = {298--308}, + Title = {{{I}nfluence of {R}{G}{S}2 on anxiety-related temperament, personality, and brain function}}, + Volume = {65}, + Year = {2008}} + +@article{Smoski2009, + Author = {Smoski, M. J. and Felder, J. and Bizzell, J. and Green, S. R. and Ernst, M. and Lynch, T. R. and Dichter, G. S.}, + Journal = {J Affect Disord}, + Month = {Mar}, + Title = {{f{M}{R}{I} of alterations in reward selection, anticipation, and feedback in major depressive disorder}}, + Year = {2009}} + +@article{Smythe1983, + Author = {Smythe, J. and Moss, F. and McClintock, P. V. E.}, + Journal = {Physical Review Letters}, + Pages = {1062--1065}, + Title = {Observation of a Noise--Induced Phase Transition with an Analog Simulator}, + Volume = {51}, + Year = {1983}} + +@book{Soong1973, + Address = {New York}, + Author = {Soong, T. T.}, + Publisher = {Academic Press}, + Title = {Random Differential Equations in Science and Engineering}, + Year = {1973}} + +@book{Sornette2000, + Address = {Berlin}, + Author = {Sornette, D.}, + Publisher = {Springer Verlag}, + Title = {Critical Phenomena in Natural Sciences}, + Year = {2000}} + +@article{Sowell1992, + Author = {Sowell, F. B.}, + Journal = {Journal of Econometrics}, + Pages = {165--188}, + Title = {Maximum Likelihood Estimation of Stationary Univariate Fractionally Integrated Time Series Models}, + Volume = {53}, + Year = {1992}} + +@article{Sowell1992a, + Author = {Sowell, F. B.}, + Journal = {Journal of Monetary Economics}, + Pages = {277--302}, + Title = {Modeling Long Run Behavior With the Fractional ARIMA Model}, + Volume = {29}, + Year = {1992}} + +@article{Spaniol2006, + Author = {Spaniol, J. and Madden, D. J. and Voss, A.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {101--117}, + Title = {A Diffusion Model Analysis of Adult Age Differences in Episodic and Semantic Long--Term Memory Retrieval}, + Volume = {32}, + Year = {2006}} + +@article{Sparagana2003, + Author = {Sparagana, S. P. and Delgado, M. R. and Batchelor, L. L. and Roach, E. S.}, + Journal = {Arch. Neurol.}, + Month = {Sep}, + Pages = {1286--1289}, + Title = {{{S}eizure remission and antiepileptic drug discontinuation in children with tuberous sclerosis complex}}, + Volume = {60}, + Year = {2003}} + +@article{Speckman2004, + Abstract = {Heathcote, Brown, and Mewhort (2002) have introduced a new, robust + method of estimating response time distributions. Their method may + have practical advantages over conventional maximum likelihood estimation. + The basic idea is that the likelihood of parameters is maximized + given a few quantiles from the data. We show that Heathcote et al.'s + likelihood function is not correct and provide the appropriate correction. + However, although our correction stands on firmer theoretical ground + than Heathcote et al.'s, it appears to yield worse parameter estimates. + This result further indicates that, at least for some distributions + and situations, quantile maximum likelihood estimation may have better + nonasymptotic properties than a more theoretically justified approach.}, + Author = {Paul L Speckman and Jeffrey N Rouder}, + Institution = {University of Missouri, Columbia, Missouri 65211, USA.}, + Journal = {Psychon Bull Rev}, + Keywords = {Humans; Models, Statistical; Reaction Time}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {574--6; discussion 577-8}, + Pmid = {15376813}, + Timestamp = {2009.08.15}, + Title = {A comment on Heathcote, Brown, and Mewhort's QMLE method for response time distributions.}, + Volume = {11}, + Year = {2004}} + +@incollection{Sperling1981, + Address = {Providence, Rhode Island}, + Author = {Sperling, G.}, + Booktitle = {Mathematical Psychology and Psychophysiology}, + Editor = {Grossberg, S.}, + Publisher = {SIAM Proceedings}, + Title = {Mathematical models of binocular vision}, + Year = {1981}} + +@article{Sperling1970, + Author = {Sperling, G.}, + Journal = {The American Journal of Psychology}, + Pages = {461--534}, + Title = {Binocular Vision: A Physical and a Neural Theory}, + Volume = {83}, + Year = {1970}} + +@article{Spezio2008, + Author = {Spezio, M. L. and Rangel, A. and Alvarez, R. M. and O'Doherty, J. P. and Mattes, K. and Todorov, A. and Kim, H. and Adolphs, R.}, + Journal = {Soc Cogn Affect Neurosci}, + Month = {Dec}, + Pages = {344--352}, + Title = {{{A} neural basis for the effect of candidate appearance on election outcomes}}, + Volume = {3}, + Year = {2008}} + +@article{Spiegelhalter2002, + Author = {Spiegelhalter, D. J. and Best, N. G. and Carlin, B. P. and {van der Linde}, A.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {583--639}, + Title = {{B}ayesian Measures of Model Complexity and Fit}, + Volume = {64}, + Year = {2002}} + +@article{Spiegelhalter1994, + Author = {Spiegelhalter, D. J. and Freedman, L. S. and Parmar, M. K. B.}, + Journal = {Journal of the Royal Statistical Society A}, + Pages = {357--416}, + Title = {{B}ayesian Approaches to Randomized Trials (with discussion)}, + Volume = {157}, + Year = {1994}} + +@book{Spiegelhalter2004, + Address = {Cambridge, UK}, + Author = {Spiegelhalter, D. J. and Thomas, A. and Best, N. G.}, + Publisher = {Medical Research Council Biostatistics Unit}, + Title = {WinBUGS Version 1.4 User Manual}, + Year = {2004}} + +@manual{Spitzer1990, + Author = {Spitzer, R.L. and Williams, J.B.W. and Gibbon, M.}, + Publisher = {Washington, DC, American Psychiatric Press}, + Year = {1990}} + +@article{Spitzer1992, + Abstract = {The history, rationale, and development of the Structured Clinical + Interview for DSM-III-R (SCID) is described. The SCID is a semistructured + interview for making the major Axis I DSM-III-R diagnoses. It is + administered by a clinician and includes an introductory overview + followed by nine modules, seven of which represent the major axis + I diagnostic classes. Because of its modular construction, it can + be adapted for use in studies in which particular diagnoses are not + of interest. Using a decision tree approach, the SCID guides the + clinician in testing diagnostic hypotheses as the interview is conducted. + The output of the SCID is a record of the presence or absence of + each of the disorders being considered, for current episode (past + month) and for lifetime occurrence.}, + Author = {R. L. Spitzer and J. B. Williams and M. Gibbon and M. B. First}, + Institution = {Department of Psychiatry, Columbia University, New York, NY.}, + Journal = {Arch Gen Psychiatry}, + Keywords = {Decision Trees; History, 20th Century; Humans; Mental Disorders, classification/diagnosis; Psychiatric Status Rating Scales, history/instrumentation/statistics /&/ numerical data; Psychometrics; Reproducibility of Results; Terminology as Topic}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {8}, + Owner = {Young}, + Pages = {624--629}, + Pmid = {1637252}, + Timestamp = {2010.05.01}, + Title = {The Structured Clinical Interview for DSM-III-R (SCID). I: History, rationale, and description.}, + Volume = {49}, + Year = {1992}} + +@article{Srivareerat2009, + Abstract = {BACKGROUND: Alzheimer's disease (AD) is a degenerative disorder that + leads to progressive cognitive decline. Alzheimer's disease develops + as a result of over-production and aggregation of beta-amyloid (Abeta) + peptides in the brain. The reason for variation in the gravity of + symptoms among AD patients is unknown and might result from patient-related + factors including lifestyle. Individuals suffering from chronic stress + are at an increased risk for developing AD. This study investigated + the effect of chronic psychosocial stress in Abeta rat model of AD. + METHODS: Psychosocial stress was induced with a rat intruder model. + The rat model of AD was induced by 14-day osmotic pump infusion of + a mixture of 300 pmol/day Abeta(1-40)/Abeta(1-42). The effect of + chronic stress on the severity of Abeta-induced spatial learning + and memory impairment was tested by three approaches: behavioral + testing in the radial arm water maze, in vivo electrophysiological + recording in anesthetized rat, and immunoblot analysis to determine + protein levels of learning- and memory-related molecules. RESULTS: + A marked impairment of learning and memory developed when stress + was combined with Abeta, more so than that caused by Abeta alone. + Additionally, there was a significantly greater impairment of early-phase + long-term potentiation (E-LTP) in chronically stressed/Abeta-treated + rats than in either the stressed or Abeta-treated rats. This might + be a manifestation of the reduction in protein levels of calcium/calmodulin-dependent + protein kinase II (CaMKII) and the abnormal increase in calcineurin + levels. CONCLUSIONS: Chronic stress significantly intensified Abeta-induced + deficits of short-term memory and E-LTP by a mechanism involving + decreased CaMKII activation along with increased calcineurin levels.}, + Author = {Marisa Srivareerat and Trinh T Tran and Karem H Alzoubi and Karim A Alkadhi}, + Doi = {10.1016/j.biopsych.2008.08.021}, + Institution = {Department of Pharmacological and Pharmaceutical Sciences, College of Pharmacy, University of Houston, Houston, Texas 77204-5037, USA.}, + Journal = {Biol Psychiatry}, + Keywords = {Alzheimer Disease, chemically induced/complications; Amyloid beta-Protein, administration /&/ dosage; Analysis of Variance; Animals; Behavior, Animal; Biophysics; Calcineurin, metabolism; Calcium-Calmodulin-Dependent Protein Kinase Type 2, metabolism; Cognition Disorders, etiology/pathology; Disease Models, Animal; Down-Regulation, drug effects/physiology; Electric Stimulation; Excitatory Postsynaptic Potentials, drug effects/physiology; Hippocampus, physiopathology; Long-Term Potentiation, drug effects/physiology; Male; Maze Learning, physiology; Patch-Clamp Techniques; Peptide Fragments, administration /&/ dosage; Rats; Rats, Wistar; Stress, Psychological, complications/etiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {11}, + Owner = {Young}, + Pages = {918--926}, + Pii = {S0006-3223(08)01019-6}, + Pmid = {18849021}, + Timestamp = {2009.12.10}, + Title = {Chronic psychosocial stress exacerbates impairment of cognition and long-term potentiation in beta-amyloid rat model of Alzheimer's disease.}, + Url = {http://dx.doi.org/10.1016/j.biopsych.2008.08.021}, + Volume = {65}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.biopsych.2008.08.021}} + +@article{Stanis2008, + Abstract = {RATIONALE: Repeated amphetamine (AMPH) exposure is known to cause + long-term changes in AMPH-induced locomotor behavior (i.e., sensitization) + that are associated with similarly long-lasting changes in brain + function. It is not clear, however, if such exposure produces long-lasting + changes in a cognitive behavior that, in humans, is hypothesized + to contribute to addiction. OBJECTIVES: To examine whether repeated + AMPH exposure induces both locomotor sensitization and alters impulsive + choice in a delay-discounting task. MATERIALS AND METHODS: Adult, + male Sprague-Dawley rats (n = 29) were pretreated with 3.0 mg/kg + AMPH or saline every other day for 20 days and were then trained + to lever press for small, immediately delivered food reinforcement + or larger reinforcements delivered after delays. We subsequently + assessed the effects of acute AMPH (0.1-2.0 mg/kg) on delay-discounting. + Lastly, we tested for long-lasting effects of pretreatment by giving + an AMPH challenge (3.0 mg/kg) 1 week after the final delay-discounting + session. RESULTS: Repeated AMPH produced sensitization to the drug's + stereotypy-inducing effects but did not alter acquisition or baseline + behavior in the delay-discounting task. Following acute AMPH, impulsive + choice and other measures of delay-discounting were altered, but + to a similar extent in both saline- and AMPH-pretreated groups. The + AMPH challenge, given approximately 3 months after the last pretreatment + injection, revealed that sensitization was still evident. CONCLUSIONS: + Our results suggest that one behavioral consequence of repeated AMPH + exposure-sensitization-does not overlap with another potential outcome-increased + impulsivity. Furthermore, the neuroadaptations known to be associated + with sensitization may be somewhat distinct from those that lead + to changes in impulsive choice.}, + Author = {Jessica J Stanis and Hector Marquez Avila and Martin D White and Joshua M Gulley}, + Doi = {10.1007/s00213-008-1182-z}, + Institution = {Department of Psychology and Neuroscience Program, University of Illinois at Urbana-Champaign, 603 E Daniel Street, Champaign, IL 61820, USA.}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Amphetamine, adverse effects/pharmacology; Animals; Central Nervous System Stimulants, adverse effects/pharmacology; Choice Behavior, drug effects; Conditioning, Operant, drug effects; Food; Impulsive Behavior, psychology; Male; Motivation; Motor Activity, drug effects; Psychomotor Performance, drug effects; Rats; Rats, Sprague-Dawley; Reward; Stereotyped Behavior, drug effects; Substance Withdrawal Syndrome, psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {539--548}, + Pmid = {18473112}, + Timestamp = {2009.08.06}, + Title = {Dissociation between long-lasting behavioral sensitization to amphetamine and impulsive choice in rats performing a delay-discounting task.}, + Url = {http://dx.doi.org/10.1007/s00213-008-1182-z}, + Volume = {199}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-008-1182-z}} + +@article{Stanis2008a, + Abstract = {RATIONALE: Repeated amphetamine (AMPH) exposure is known to cause + long-term changes in AMPH-induced locomotor behavior (i.e., sensitization) + that are associated with similarly long-lasting changes in brain + function. It is not clear, however, if such exposure produces long-lasting + changes in a cognitive behavior that, in humans, is hypothesized + to contribute to addiction. OBJECTIVES: To examine whether repeated + AMPH exposure induces both locomotor sensitization and alters impulsive + choice in a delay-discounting task. MATERIALS AND METHODS: Adult, + male Sprague-Dawley rats (n = 29) were pretreated with 3.0 mg/kg + AMPH or saline every other day for 20 days and were then trained + to lever press for small, immediately delivered food reinforcement + or larger reinforcements delivered after delays. We subsequently + assessed the effects of acute AMPH (0.1-2.0 mg/kg) on delay-discounting. + Lastly, we tested for long-lasting effects of pretreatment by giving + an AMPH challenge (3.0 mg/kg) 1 week after the final delay-discounting + session. RESULTS: Repeated AMPH produced sensitization to the drug's + stereotypy-inducing effects but did not alter acquisition or baseline + behavior in the delay-discounting task. Following acute AMPH, impulsive + choice and other measures of delay-discounting were altered, but + to a similar extent in both saline- and AMPH-pretreated groups. The + AMPH challenge, given approximately 3 months after the last pretreatment + injection, revealed that sensitization was still evident. CONCLUSIONS: + Our results suggest that one behavioral consequence of repeated AMPH + exposure-sensitization-does not overlap with another potential outcome-increased + impulsivity. Furthermore, the neuroadaptations known to be associated + with sensitization may be somewhat distinct from those that lead + to changes in impulsive choice.}, + Author = {Jessica J Stanis and Hector Marquez Avila and Martin D White and Joshua M Gulley}, + Doi = {10.1007/s00213-008-1182-z}, + Institution = {Department of Psychology and Neuroscience Program, University of Illinois at Urbana-Champaign, 603 E Daniel Street, Champaign, IL 61820, USA.}, + Journal = {Psychopharmacology (Berl)}, + Keywords = {Amphetamine, adverse effects/pharmacology; Animals; Central Nervous System Stimulants, adverse effects/pharmacology; Choice Behavior, drug effects; Conditioning, Operant, drug effects; Food; Impulsive Behavior, psychology; Male; Motivation; Motor Activity, drug effects; Psychomotor Performance, drug effects; Rats; Rats, Sprague-Dawley; Reward; Stereotyped Behavior, drug effects; Substance Withdrawal Syndrome, psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {539--548}, + Pmid = {18473112}, + Timestamp = {2009.08.06}, + Title = {Dissociation between long-lasting behavioral sensitization to amphetamine and impulsive choice in rats performing a delay-discounting task.}, + Url = {http://dx.doi.org/10.1007/s00213-008-1182-z}, + Volume = {199}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1007/s00213-008-1182-z}} + +@article{Stashinko2004, + Author = {Stashinko, E. E. and Clegg, N. J. and Kammann, H. A. and Sweet, V. T. and Delgado, M. R. and Hahn, J. S. and Levey, E. B.}, + Journal = {Am. J. Med. Genet. A}, + Month = {Jul}, + Pages = {114--119}, + Title = {{{A} retrospective survey of perinatal risk factors of 104 living children with holoprosencephaly}}, + Volume = {128A}, + Year = {2004}} + +@article{Steelinpress, + Author = {Steel, D.}, + Journal = {Synthese}, + Pages = {?--?}, + Title = {{B}ayesian Confirmation Theory and the Likelihood Principle}, + Volume = {?}, + Year = {in press}} + +@article{Steel2003, + Author = {Steel, D.}, + Journal = {Erkenntnis}, + Pages = {213?227}, + Title = {A {B}ayesian Way to Make Stopping Rules Matter}, + Volume = {58}, + Year = {2003}} + +@article{Stein1993, + Author = {Stein, B.E. and Meredith, M.A. and Wolf, S.}, + Publisher = {Mit Press Cambridge, MA}, + Title = {{The merging of the senses}}, + Year = {1993}} + +@article{Stein1993a, + Author = {Stein, B.E. and Meredith, M.A. and Wolf, S.}, + Publisher = {Mit Press Cambridge, MA}, + Title = {{The merging of the senses}}, + Year = {1993}} + +@article{Stein1998, + Abstract = {OBJECTIVE: Nicotine is a highly addictive substance, and cigarette + smoking is a major cause of premature death among humans. Little + is known about the neuropharmacology and sites of action of nicotine + in the human brain. Such knowledge might help in the development + of new behavioral and pharmacological therapies to aid in treating + nicotine dependence and to improve smoking cessation success rates. + METHOD: Functional magnetic resonance imaging, a real-time imaging + technique, was used to determine the acute CNS effects of intravenous + nicotine in 16 active cigarette smokers. An injection of saline followed + by injections of three doses of nicotine (0.75, 1.50, and 2.25 mg/70 + kg of weight) were each administered intravenously over 1-minute + periods in an ascending, cumulative-dosing paradigm while whole brain + gradient-echo, echo-planar images were acquired every 6 seconds during + consecutive 20-minute trials. RESULTS: Nicotine induced a dose-dependent + increase in several behavioral parameters, including feelings of + "rush" and "high" and drug liking. Nicotine also induced a dose-dependent + increase in neuronal activity in a distributed system of brain regions, + including the nucleus accumbens, amygdala, cingulate, and frontal + lobes. Activation in these structures is consistent with nicotine's + behavior-arousing and behavior-reinforcing properties in humans. + CONCLUSIONS: The identified brain regions have been previously shown + to participate in the reinforcing, mood-elevating, and cognitive + properties of other abused drugs such as cocaine, amphetamine, and + opiates, suggesting that nicotine acts similarly in the human brain + to produce its reinforcing and dependence properties.}, + Author = {E. A. Stein and J. Pankiewicz and H. H. Harsch and J. K. Cho and S. A. Fuller and R. G. Hoffmann and M. Hawkins and S. M. Rao and P. A. Bandettini and A. S. Bloom}, + Institution = {Department of Psychiatry, Biophysics Research Institute, Medical College of Wisconsin, Milwaukee 53226, USA. estein@mcw.edu}, + Journal = {Am J Psychiatry}, + Keywords = {Adolescent; Adult; Affect, drug effects; Brain, anatomy /&/ histology/drug effects/physiology; Cerebral Cortex, anatomy /&/ histology/drug effects/physiology; Cognition, drug effects; Dose-Response Relationship, Drug; Female; Humans; Injections, Intravenous; Limbic System, anatomy /&/ histology/drug effects/physiology; Magnetic Resonance Imaging; Male; Nicotine, blood/pharmacokinetics/pharmacology; Receptors, Nicotinic, drug effects/physiology; Reinforcement (Psychology); Smoking Cessation; Smoking, prevention /&/ control; Tobacco Use Disorder, therapy}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {8}, + Owner = {Woo-Young Ahn}, + Pages = {1009--1015}, + Pmid = {9699686}, + Timestamp = {2009.08.04}, + Title = {Nicotine-induced limbic cortical activation in the human brain: a functional MRI study.}, + Volume = {155}, + Year = {1998}} + +@article{Stein2007, + Author = {Stein, M. B. and Simmons, A. N. and Feinstein, J. S. and Paulus, M. P.}, + Journal = {Am J Psychiatry}, + Month = {Feb}, + Pages = {318--327}, + Title = {{{I}ncreased amygdala and insula activation during emotion processing in anxiety-prone subjects}}, + Volume = {164}, + Year = {2007}} + +@article{Steingard1996, + Author = {Steingard, R. J. and Renshaw, P. F. and Yurgelun-Todd, D. and Appelmans, K. E. and Lyoo, I. K. and Shorrock, K. L. and Bucci, J. P. and Cesena, M. and Abebe, D. and Zurakowski, D. and Poussaint, T. Y. and Barnes, P.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Mar}, + Pages = {307--311}, + Title = {{{S}tructural abnormalities in brain magnetic resonance images of depressed children}}, + Volume = {35}, + Year = {1996}} + +@article{Steinmetz2000, + Author = {Steinmetz, J. E. and Blankenship, M. R. and Green, J. T. and Smith, G. B. and Finn, P. R.}, + Journal = {Prog. Neuropsychopharmacol. Biol. Psychiatry}, + Month = {Aug}, + Pages = {1025--1039}, + Title = {{{E}valuation of behavioral disinhibition in {P}/{N}{P} and {H}{A}{D}1/{L}{A}{D}1 rats}}, + Volume = {24}, + Year = {2000}} + +@article{Stephens2004, + Abstract = {Observed animal impulsiveness challenges ideas from foraging theory + about the fitness value of food rewards, and may play a role in important + behavioural phenomena such as cooperation and addiction. Behavioural + ecologists usually invoke temporal discounting to explain the evolution + of animal impulsiveness. According to the discounting hypothesis, + delay reduces the fitness value of the delayed food. We develop an + alternative model for the evolution of impulsiveness that does not + require discounting. We show that impulsive or short-sighted rules + can maximize long-term rates of food intake. The advantages of impulsive + rules come from two sources. First, naturally occurring choices have + a foreground-background structure that reduces the long-term cost + of impulsiveness. Second, impulsive rules have a discrimination advantage + because they tend to compare smaller quantities. Discounting contributes + little to this result. Although we find that impulsive rules are + optimal in a simple foreground-background choice situation in the + absence of discounting, in contrast we do not find comparable impulsiveness + in binary choice situations even when there is strong discounting.}, + Doi = {10.1098/rspb.2004.2871}, + Institution = {Department of Ecology, Evolution, and Behavior, University of Minnesota, 1987 Upper Buford Circle, St Paul, MN 55108-6097, USA. dws@forager.cbs.umn.edu}, + Journal = {Proc Biol Sci}, + Keywords = {Animals; Ecology; Evolution; Feeding Behavior, physiology; Impulsive Behavior; Models, Biological}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {1556}, + Owner = {Woo-Young Ahn}, + Pages = {2459--2465}, + Pii = {TU0JP5HKEFFUPLCG}, + Pmid = {15590596}, + Timestamp = {2009.08.06}, + Title = {Impulsiveness without discounting: the ecological rationality hypothesis.}, + Url = {http://dx.doi.org/10.1098/rspb.2004.2871}, + Volume = {271}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1098/rspb.2004.2871}} + +@article{Stephens2004a, + Abstract = {Observed animal impulsiveness challenges ideas from foraging theory + about the fitness value of food rewards, and may play a role in important + behavioural phenomena such as cooperation and addiction. Behavioural + ecologists usually invoke temporal discounting to explain the evolution + of animal impulsiveness. According to the discounting hypothesis, + delay reduces the fitness value of the delayed food. We develop an + alternative model for the evolution of impulsiveness that does not + require discounting. We show that impulsive or short-sighted rules + can maximize long-term rates of food intake. The advantages of impulsive + rules come from two sources. First, naturally occurring choices have + a foreground-background structure that reduces the long-term cost + of impulsiveness. Second, impulsive rules have a discrimination advantage + because they tend to compare smaller quantities. Discounting contributes + little to this result. Although we find that impulsive rules are + optimal in a simple foreground-background choice situation in the + absence of discounting, in contrast we do not find comparable impulsiveness + in binary choice situations even when there is strong discounting.}, + Doi = {10.1098/rspb.2004.2871}, + Institution = {Department of Ecology, Evolution, and Behavior, University of Minnesota, 1987 Upper Buford Circle, St Paul, MN 55108-6097, USA. dws@forager.cbs.umn.edu}, + Journal = {Proc Biol Sci}, + Keywords = {Animals; Ecology; Evolution; Feeding Behavior, physiology; Impulsive Behavior; Models, Biological}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {1556}, + Owner = {Woo-Young Ahn}, + Pages = {2459--2465}, + Pii = {TU0JP5HKEFFUPLCG}, + Pmid = {15590596}, + Timestamp = {2009.08.06}, + Title = {Impulsiveness without discounting: the ecological rationality hypothesis.}, + Url = {http://dx.doi.org/10.1098/rspb.2004.2871}, + Volume = {271}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1098/rspb.2004.2871}} + +@article{Stern2005, + Author = {Stern, H.}, + Journal = {Psychological Methods}, + Pages = {494?499}, + Title = {Model Inference or Model Selection: {D}iscussion of {K}lugkist, {L}audy, and {H}oijtink (2005)}, + Volume = {10}, + Year = {2005}} + +@article{Stetson2006, + Author = {Stetson, C. and Cui, X. and Montague, P. R. and Eagleman, D. M.}, + Journal = {Neuron}, + Month = {Sep}, + Pages = {651--659}, + Title = {{{M}otor-sensory recalibration leads to an illusory reversal of action and sensation}}, + Volume = {51}, + Year = {2006}} + +@article{Stewart1983, + Author = {Stewart, I. N. and Peregoy, P. L.}, + Journal = {Psychological Bulletin}, + Pages = {336--362}, + Title = {Catastrophe Theory Modeling in Psychology}, + Volume = {94}, + Year = {1983}} + +@article{Stewart1984, + Author = {Stewart, J. and De Wit, H. and Eikelboom, R.}, + Journal = {Psychol Rev}, + Number = {2}, + Pages = {251--268}, + Title = {{Role of unconditioned and conditioned drug effects in the self-administration of opiates and stimulants}}, + Volume = {91}, + Year = {1984}} + +@article{Stewart1995, + Author = {Stewart, S. H. and Finn, P. R. and Pihl, R. O.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jun}, + Pages = {261--267}, + Title = {{{A} dose-response study of the effects of alcohol on the perceptions of pain and discomfort due to electric shock in men at high familial-genetic risk for alcoholism}}, + Volume = {119}, + Year = {1995}} + +@article{Stewart1992, + Author = {Stewart, S. H. and Finn, P. R. and Pihl, R. O.}, + Journal = {J. Stud. Alcohol}, + Month = {Sep}, + Pages = {499--506}, + Title = {{{T}he effects of alcohol on the cardiovascular stress response in men at high risk for alcoholism: a dose response study}}, + Volume = {53}, + Year = {1992}} + +@article{Stewart1999, + Author = {Stewart, W. F. and Schwartz, B. S. and Simon, D. and Bolla, K. I. and Todd, A. C. and Links, J.}, + Journal = {Neurology}, + Month = {May}, + Pages = {1610--1617}, + Title = {{{N}eurobehavioral function and tibial and chelatable lead levels in 543 former organolead workers}}, + Volume = {52}, + Year = {1999}} + +@article{Steyvers2009, + Author = {Steyvers, M. and Lee, M.D. and Wagenmakers, E.J.}, + Journal = {Journal of Mathematical Psychology}, + Number = {3}, + Pages = {168--179}, + Publisher = {Elsevier}, + Title = {{A Bayesian analysis of human decision-making on bandit problems}}, + Volume = {53}, + Year = {2009}} + +@article{Stoletniy1999, + Author = {Stoletniy, L. N. and Pai, S. M. and Platt, M. L. and Torres, V. I. and Pai, R. G.}, + Journal = {J Electrocardiol}, + Month = {Apr}, + Pages = {173--177}, + Title = {{{Q}{T} dispersion as a noninvasive predictor of inducible ventricular tachycardia}}, + Volume = {32}, + Year = {1999}} + +@article{Stone1977, + Author = {Stone, M.}, + Journal = {Journal of the Royal Statistical Society Series B}, + Pages = {44--47}, + Title = {An Asymptotic Equivalence of Choice of Model by Cross--validation and {A}kaike's Criterion}, + Volume = {39}, + Year = {1977}} + +@article{Stone1977a, + Author = {Stone, M.}, + Journal = {Biometrika}, + Pages = {29--35}, + Title = {Asymptotics for and Against Cross--validation}, + Volume = {64}, + Year = {1977}} + +@article{Stone1974, + Author = {Stone, M.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {111--147}, + Title = {Cross--validatory choice and assessment of statistical predictions (with discussion)}, + Volume = {36}, + Year = {1974}} + +@article{Stone1960, + Author = {Stone, M.}, + Journal = {Psychometrika}, + Pages = {251--260}, + Title = {Models for Choice--Reaction Time}, + Volume = {25}, + Year = {1960}} + +@article{Stout2004, + Author = {Stout, J. C. and Busemeyer, J. R. and Lin, A. and Grant, S. J. and Bonson, K. R.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {742--747}, + Title = {Cognitive Modeling Analysis of Decision--Making Processes in Cocaine Abusers}, + Volume = {11}, + Year = {2004}} + +@article{Stout2005, + Author = {Stout, J. C. and Rock, S. L. and Campbell, M. C. and Busemeyer, J. R. and Finn, P. R.}, + Journal = {Psychol Addict Behav}, + Month = {Jun}, + Pages = {148--157}, + Title = {{{P}sychological processes underlying risky decisions in drug abusers}}, + Volume = {19}, + Year = {2005}} + +@article{Stout2005a, + Author = {Stout, J. C. and Rock, S. L. and Campbell, M. C. and Busemeyer, J. R. and Finn, P. R.}, + Journal = {Psychol Addict Behav}, + Month = {Jun}, + Pages = {148--157}, + Title = {{{P}sychological processes underlying risky decisions in drug abusers}}, + Volume = {19}, + Year = {2005}} + +@article{Str?hle2008, + Author = {Str?hle, A. and Stoy, M. and Wrase, J. and Schwarzer, S. and Schlagenhauf, F. and Huss, M. and Hein, J. and Nedderhut, A. and Neumann, B. and Gregor, A. and Juckel, G. and Knutson, B. and Lehmkuhl, U. and Bauer, M. and Heinz, A.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {966--972}, + Title = {{{R}eward anticipation and outcomes in adult males with attention-deficit/hyperactivity disorder}}, + Volume = {39}, + Year = {2008}} + +@article{Strathearn2008, + Author = {Strathearn, L. and Li, J. and Fonagy, P. and Montague, P. R.}, + Journal = {Pediatrics}, + Month = {Jul}, + Pages = {40--51}, + Title = {{{W}hat's in a smile? {M}aternal brain responses to infant facial cues}}, + Volume = {122}, + Year = {2008}} + +@article{Stratonovich1966, + Author = {Stratonovich, R. L.}, + Journal = {SIAM Journal on Control}, + Pages = {362--371}, + Title = {A New Representation for Stochastic Integrals and Equations}, + Volume = {4}, + Year = {1966}} + +@article{Strigo2008, + Author = {Strigo, I. A. and Simmons, A. N. and Matthews, S. C. and Craig, A. D. and Paulus, M. P.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Nov}, + Pages = {1275--1284}, + Title = {{{A}ssociation of major depressive disorder with altered functional brain response during anticipation and processing of heat pain}}, + Volume = {65}, + Year = {2008}} + +@article{Strigo2008a, + Author = {Strigo, I. A. and Simmons, A. N. and Matthews, S. C. and Craig, A. D. and Paulus, M. P.}, + Journal = {Psychosom Med}, + Month = {Apr}, + Pages = {338--344}, + Title = {{{I}ncreased affective bias revealed using experimental graded heat stimuli in young depressed adults: evidence of "emotional allodynia"}}, + Volume = {70}, + Year = {2008}} + +@article{Strigo2008b, + Author = {Strigo, I. A. and Simmons, A. N. and Matthews, S. C. and Craig, A. D. and Paulus, M. P.}, + Journal = {Psychosom Med}, + Month = {Apr}, + Pages = {338--344}, + Title = {{{I}ncreased affective bias revealed using experimental graded heat stimuli in young depressed adults: evidence of "emotional allodynia"}}, + Volume = {70}, + Year = {2008}} + +@article{Strigo2008c, + Author = {Strigo, I. A. and Simmons, A. N. and Matthews, S. C. and Craig, A. D. and Paulus, M. P.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Nov}, + Pages = {1275--1284}, + Title = {{{A}ssociation of major depressive disorder with altered functional brain response during anticipation and processing of heat pain}}, + Volume = {65}, + Year = {2008}} + +@article{Strube2006, + Author = {Strube, M. J.}, + Journal = {Behavior Research Methods}, + Pages = {24--27}, + Title = {{SNOOP}: {A} Program for Demonstrating the Consequences of Premature and Repeated Null Hypothesis Testing}, + Volume = {38}, + Year = {2006}} + +@book{Stuart1999, + Address = {London}, + Author = {Stuart, A. and Ord, J. K. and Arnold, S.}, + Publisher = {Arnold}, + Title = {{K}endall's Advanced Theory of Statistics Vol. 2{A}: Classical Inference \& the Linear Model (6th ed.)}, + Year = {1999}} + +@article{Suh2009, + Author = {Suh, J. J. and Langleben, D. D. and Ehrman, R. N. and Hakun, J. G. and Wang, Z. and Li, Y. and Busch, S. I. and O'Brien, C. P. and Childress, A. R.}, + Journal = {Drug Alcohol Depend}, + Month = {Jan}, + Pages = {11--17}, + Title = {{{L}ow prefrontal perfusion linked to depression symptoms in methadone-maintained opiate-dependent patients}}, + Volume = {99}, + Year = {2009}} + +@article{Sung2004, + Author = {Sung, M. and Erkanli, A. and Angold, A. and Costello, E.J.}, + Journal = {Drug and alcohol dependence}, + Number = {3}, + Pages = {287--299}, + Publisher = {Elsevier}, + Title = {{Effects of age at first substance use and psychiatric comorbidity on the development of substance use disorders}}, + Volume = {75}, + Year = {2004}} + +@article{Sung2002, + Author = {Sung, S. M. and Kim, J. H. and Yang, E. and Abrams, K. Y. and Lyoo, I. K.}, + Journal = {Compr Psychiatry}, + Pages = {235--243}, + Title = {{{R}eliability and validity of the {K}orean version of the {T}emperament and {C}haracter {I}nventory}}, + Volume = {43}, + Year = {2002}} + +@article{Sung2007, + Author = {Sung, Y. H. and Cho, S. C. and Hwang, J. and Kim, S. J. and Kim, H. and Bae, S. and Kim, N. and Chang, K. H. and Daniels, M. and Renshaw, P. F. and Lyoo, I. K.}, + Journal = {Drug Alcohol Depend}, + Month = {Apr}, + Pages = {28--35}, + Title = {{{R}elationship between {N}-acetyl-aspartate in gray and white matter of abstinent methamphetamine abusers and their history of drug abuse: a proton magnetic resonance spectroscopy study}}, + Volume = {88}, + Year = {2007}} + +@book{Sutton1998, + Author = {Sutton, R. S. and Barto, A. G.}, + Owner = {Wooyoung Ahn}, + Publisher = {Cambridge, MA: MIT Press}, + Timestamp = {2007.05.04}, + Title = {Reinforcement learning: An introduction}, + Year = {1998}} + +@book{Sutton1998a, + Address = {Cambridge (MA)}, + Author = {Sutton, R. S. and Barto, A. G.}, + Publisher = {The {MIT} Press}, + Title = {Reinforcement Learning: {A}n introduction}, + Year = {1998}} + +@article{Swainson2000, + Author = {Swainson, R. and Rogers, R. D. and Sahakian, B. J. and Summers, B. A. and Polkey, C. E. and Robbins, T. W.}, + Journal = {Neuropsychologia}, + Pages = {596--612}, + Title = {{{P}robabilistic learning and reversal deficits in patients with {P}arkinson's disease or frontal or temporal lobe lesions: possible adverse effects of dopaminergic medication}}, + Volume = {38}, + Year = {2000}} + +@article{Swann2009a, + Abstract = {Impulsivity, a breakdown in the balance between initiation and screening + of action that leads to reactions to stimuli without adequate reflection + or regard for consequences, is a core feature of bipolar disorder + and is prominent in manic episodes. Catecholaminergic function is + related to impulsivity and mania. Manic individuals have abnormal + dopaminergic reactions to reward and abnormal responses in the ventral + prefrontal cortex that are consistent with impulsive behavior. Impulsivity + in mania is pervasive, encompassing deficits in attention and behavioral + inhibition. Impulsivity is increased with severe course of illness + (eg, frequent episodes, substance use disorders, and suicide attempts). + In mixed states, mania-associated impulsivity combines with depressive + symptoms to increase the risk of suicide. Clinical management of + impulsivity in mania involves addressing interpersonal distortions + inherent in mania; reducing overstimulation; alertness to medical-, + trauma-, or substance-related problems; and prompt pharmacologic + treatment. Manic episodes must be viewed in the context of the life + course of bipolar disorder.}, + Author = {Alan C Swann}, + Institution = {Department of Psychiatry and Behavioral Sciences, The University of Texas Health Science Center at Houston, 1300 Moursund Street, Room 270, Houston, TX 77030, USA. alan.c.swann@uth.tmc.edu}, + Journal = {Curr Psychiatry Rep}, + Keywords = {Bipolar Disorder, complications/metabolism; Brain, metabolism; Dopamine, metabolism; Humans; Impulse Control Disorders, complications/metabolism; Impulsive Behavior, complications/metabolism; Norepinephrine, metabolism; Reward; Suicide, Attempted; Synaptic Transmission}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {6}, + Owner = {Young}, + Pages = {481--487}, + Pmid = {19909671}, + Timestamp = {2010.05.01}, + Title = {Impulsivity in mania.}, + Volume = {11}, + Year = {2009}} + +@article{Swanson2007, + Author = {Swanson, J. M. and Elliott, G. R. and Greenhill, L. L. and Wigal, T. and Arnold, L. E. and Vitiello, B. and Hechtman, L. and Epstein, J. N. and Pelham, W. E. and Abikoff, H. B. and Newcorn, J. H. and Molina, B. S. and Hinshaw, S. P. and Wells, K. C. and Hoza, B. and Jensen, P. S. and Gibbons, R. D. and Hur, K. and Stehli, A. and Davies, M. and March, J. S. and Conners, C. K. and Caron, M. and Volkow, N. D.}, + Journal = {J Am Acad Child Adolesc Psychiatry}, + Month = {Aug}, + Pages = {1015--1027}, + Title = {{{E}ffects of stimulant medication on growth rates across 3 years in the {M}{T}{A} follow-up}}, + Volume = {46}, + Year = {2007}} + +@article{Swanson2009, + Author = {Swanson, J. M. and Volkow, N. D.}, + Journal = {J Child Psychol Psychiatry}, + Month = {Jan}, + Pages = {180--193}, + Title = {{{P}sychopharmacology: concepts and opinions about the use of stimulant medications}}, + Volume = {50}, + Year = {2009}} + +@article{Swanson2008, + Author = {Swanson, J. M. and Volkow, N. D.}, + Journal = {Nature}, + Month = {May}, + Pages = {586}, + Title = {{{I}ncreasing use of stimulants warns of potential abuse}}, + Volume = {453}, + Year = {2008}} + +@article{Swanson2003, + Author = {Swanson, J. M. and Volkow, N. D.}, + Journal = {Neurosci Biobehav Rev}, + Month = {Nov}, + Pages = {615--621}, + Title = {{{S}erum and brain concentrations of methylphenidate: implications for use and abuse}}, + Volume = {27}, + Year = {2003}} + +@article{Swanson2002, + Author = {Swanson, J. M. and Volkow, N. D.}, + Journal = {Behav. Brain Res.}, + Month = {Mar}, + Pages = {73--78}, + Title = {{{P}harmacokinetic and pharmacodynamic properties of stimulants: implications for the design of new treatments for {A}{D}{H}{D}}}, + Volume = {130}, + Year = {2002}} + +@article{Sweitzer2008a, + Abstract = {Impulsivity has been identified as a behavioral precursor to addiction, + and may be the manifestation of a neurological vulnerability. The + present study investigated whether individual differences in impulsivity + were associated with performance on the Iowa Gambling Task (IGT, + a test of emotional decision making thought to be associated in part + with ventromedial prefrontal cortex function) and the Wisconsin Card + Sorting Task (WCST, a set-shifting thought to be associated in part + with dorsolateral prefrontal cortex function). Subjects were screened + for impulsivity using the BIS-11 (self-report) and a delay discounting + questionnaire (a behavioral measure of impulsivity). High impulsivity + was associated with poorer performance on the final block of trials + of the IGT but was not significantly related to WCST performance. + Both measures were significantly correlated with scores on the BIS. + These results provide support for hypothesis that, in a nonclinical + sample, impulsivity may vary systematically with performance on neuropsychological + indicators of prefrontal function.}, + Author = {Maggie M Sweitzer and Philip A Allen and Kevin P Kaut}, + Doi = {10.1017/S1355617708080934}, + Institution = {Department of Psychology, University of Akron, Akron, Ohio, USA. mms74@pitt.edu}, + Journal = {J Int Neuropsychol Soc}, + Keywords = {Adolescent; Adult; Decision Making, physiology; Emotions, physiology; Female; Humans; Impulsive Behavior, physiopathology; Individuality; Male; Middle Aged; Neuropsychological Tests; Predictive Value of Tests; Questionnaires; Sex Factors; Young Adult}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {878--882}, + Pii = {S1355617708080934}, + Pmid = {18764983}, + Timestamp = {2009.08.06}, + Title = {Relation of individual differences in impulsivity to nonclinical emotional decision making.}, + Url = {http://dx.doi.org/10.1017/S1355617708080934}, + Volume = {14}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1017/S1355617708080934}} + +@article{Sweitzer2008c, + Abstract = {Impulsivity has been identified as a behavioral precursor to addiction, + and may be the manifestation of a neurological vulnerability. The + present study investigated whether individual differences in impulsivity + were associated with performance on the Iowa Gambling Task (IGT, + a test of emotional decision making thought to be associated in part + with ventromedial prefrontal cortex function) and the Wisconsin Card + Sorting Task (WCST, a set-shifting thought to be associated in part + with dorsolateral prefrontal cortex function). Subjects were screened + for impulsivity using the BIS-11 (self-report) and a delay discounting + questionnaire (a behavioral measure of impulsivity). High impulsivity + was associated with poorer performance on the final block of trials + of the IGT but was not significantly related to WCST performance. + Both measures were significantly correlated with scores on the BIS. + These results provide support for hypothesis that, in a nonclinical + sample, impulsivity may vary systematically with performance on neuropsychological + indicators of prefrontal function.}, + Author = {Maggie M Sweitzer and Philip A Allen and Kevin P Kaut}, + Doi = {10.1017/S1355617708080934}, + Institution = {Department of Psychology, University of Akron, Akron, Ohio, USA. mms74@pitt.edu}, + Journal = {J Int Neuropsychol Soc}, + Keywords = {Adolescent; Adult; Decision Making, physiology; Emotions, physiology; Female; Humans; Impulsive Behavior, physiopathology; Individuality; Male; Middle Aged; Neuropsychological Tests; Predictive Value of Tests; Questionnaires; Sex Factors; Young Adult}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Sep}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {878--882}, + Pii = {S1355617708080934}, + Pmid = {18764983}, + Timestamp = {2009.08.06}, + Title = {Relation of individual differences in impulsivity to nonclinical emotional decision making.}, + Url = {http://dx.doi.org/10.1017/S1355617708080934}, + Volume = {14}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1017/S1355617708080934}} + +@article{Sweitzer2008, + Author = {Maggie M Sweitzer and Eric C Donny and Lisa C Dierker and Janine D Flory and Stephen B Manuck}, + Doi = {10.1080/14622200802323274}, + Institution = {Department of Psychology, University of Pittsburgh, Pittsburgh, PA 15260, USA.}, + Journal = {Nicotine Tob Res}, + Keywords = {Adult; Behavior, Addictive, psychology; Choice Behavior; Female; Humans; Male; Personal Autonomy; Psychometrics; Questionnaires; Reinforcement (Psychology); Reproducibility of Results; Self Efficacy; Smoking, psychology; Social Environment; Tobacco Use Disorder, diagnosis/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {10}, + Owner = {Woo-Young Ahn}, + Pages = {1571--1575}, + Pii = {904706261}, + Pmid = {18946776}, + Timestamp = {2009.08.06}, + Url = {http://dx.doi.org/10.1080/14622200802323274}, + Volume = {10}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/14622200802323274}} + +@article{Sweitzer2008b, + Author = {Maggie M Sweitzer and Eric C Donny and Lisa C Dierker and Janine D Flory and Stephen B Manuck}, + Doi = {10.1080/14622200802323274}, + Institution = {Department of Psychology, University of Pittsburgh, Pittsburgh, PA 15260, USA.}, + Journal = {Nicotine Tob Res}, + Keywords = {Adult; Behavior, Addictive, psychology; Choice Behavior; Female; Humans; Male; Personal Autonomy; Psychometrics; Questionnaires; Reinforcement (Psychology); Reproducibility of Results; Self Efficacy; Smoking, psychology; Social Environment; Tobacco Use Disorder, diagnosis/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {10}, + Owner = {Woo-Young Ahn}, + Pages = {1571--1575}, + Pii = {904706261}, + Pmid = {18946776}, + Timestamp = {2009.08.06}, + Url = {http://dx.doi.org/10.1080/14622200802323274}, + Volume = {10}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1080/14622200802323274}} + +@article{Swensson1972, + Author = {Swensson, R. G.}, + Journal = {Perception \& Psychophysics}, + Pages = {16--32}, + Title = {The Elusive Tradeoff: {S}peed Versus Accuracy in Visual Discrimination Tasks}, + Volume = {12}, + Year = {1972}} + +@article{Takahashi2006, + Abstract = {Impulsivity in drug addicts have been associated with impatience in + intertemporal choice, i.e., high degrees to which delayed rewards + are discounted, indicating the importance of reducing the degree + of discounting in drug addicts. Intertemporal choice (delay discounting) + has been attracting attention in neuropsychopharmacology and behavioral + neuroeconomics. Recently, behavioral economists have reported that + impatience/impulsivity in intertemporal choice is increased if a + delay period is presented as a sum of divided time-blocks, which + is referred to as subadditive discounting (i.e., "total patience" + over the delay period is larger than the "sum of patience" over divided + delay periods). This finding implies that abstinent drug addicts + may more readily relapse into addiction if an abstinence period is + presented as a series of shorter abstinent periods, rather than a + single block of a long abstinence period. Therefore, understanding + of neuropsychological processing underlying subadditive discounting + is important for establishing medical treatments of drug addiction, + although to date, no study has addressed this question. In this study, + we propose that time-estimation following Weber-Fechner law, formerly + introduced for explaining hyperbolic discounting, may also explain + subadditive discounting. Our present hypothesis also predicts that + possibility of relapse into drug dependence can be decreased by helping + abstinent patients to perceive time-duration of an abstinence/withdrawal + period precisely.}, + Author = {Taiki Takahashi}, + Doi = {10.1016/j.mehy.2006.05.056}, + Institution = {Department of Cognitive and Behavioral Science, The University of Tokyo, 3-8-1 Komaba, Meguro, Tokyo 153-8902, Japan. taiki@ecs.c.u-tokyo.ac.jp}, + Journal = {Med Hypotheses}, + Keywords = {Behavior, Addictive, psychology; Choice Behavior, physiology; Humans; Impulse Control Disorders, psychology; Models, Psychological; Substance-Related Disorders, physiopathology; Time Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {1372--1374}, + Pii = {S0306-9877(06)00428-2}, + Pmid = {16872753}, + Timestamp = {2009.08.06}, + Title = {Time-estimation error following Weber-Fechner law may explain subadditive time-discounting.}, + Url = {http://dx.doi.org/10.1016/j.mehy.2006.05.056}, + Volume = {67}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.mehy.2006.05.056}} + +@article{Takahashi2006a, + Abstract = {Intertemporal and probabilistic decision-making has been studied in + psychiatry, ecology, and neuroeconomics. Because drug addicts and + psycopaths often make risky decisions (e.g., drug misuse and aggression), + investigation into types of impulsivity in intertemporal and probabilistic + choices (delay and probability discounting) are important for psychiatric + treatments. Studies in behavioral ecology proposed that delay and + probability discounting are mediated by the same psychological process, + because a decrease in probability of winning corresponds to an increase + in delay until winning. According to this view, odds-against winning + (=1/p-1) in probabilistic choice corresponds to delay in intertemporal + choice. This hypothesis predicts that preference of gambling (low + degree of probability discounting) may be associated with patience, + rather than impulsivity or impatience, in intertemporal choice (low + degree of delay discounting). However, recent empirical evidence + in psychiatric research employing pathological gamblers indicates + that pathological gamblers are impulsive in intertemporal choice + (high degrees of delay discounting). However, a hyperbolic discounting + function (usually adopted to explain intertemporal choice) with odds-against + (instead of delay) explain experimental data in probabilistic choice + dramatically well. Therefore, an alternative explanation is required + for the hypothetical equivalence of odds-against to delay. We propose + that queuing theory (often adopted for analyzing computer network + traffic) under a competitive social foraging condition may explain + the equivalence. Our hypothesis may help understand impulsivity of + psychiatrics in social behavior (e.g., aggression and antisocial + behavior) in addition to non-social impulsivity in reward-seeking + (e.g., substance misuse).}, + Author = {Taiki Takahashi}, + Doi = {10.1016/j.mehy.2006.02.016}, + Institution = {Department of Behavioral Science, Hokkaido University, Sapporo 060-0810, Japan. taikitakahashi_neuroeconomics@hotmail.com}, + Journal = {Med Hypotheses}, + Keywords = {Behavior, Addictive, psychology; Decision Making; Humans; Impulse Control Disorders, psychology; Models, Psychological; Probability; Social Environment; Systems Theory}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {276--279}, + Pii = {S0306-9877(06)00130-7}, + Pmid = {16574335}, + Timestamp = {2009.08.06}, + Title = {Queuing theory under competitive social foraging may explain a mathematical equivalence of delay and probability in impulsive decision-making.}, + Url = {http://dx.doi.org/10.1016/j.mehy.2006.02.016}, + Volume = {67}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.mehy.2006.02.016}} + +@article{Takahashi2006b, + Abstract = {Impulsivity in drug addicts have been associated with impatience in + intertemporal choice, i.e., high degrees to which delayed rewards + are discounted, indicating the importance of reducing the degree + of discounting in drug addicts. Intertemporal choice (delay discounting) + has been attracting attention in neuropsychopharmacology and behavioral + neuroeconomics. Recently, behavioral economists have reported that + impatience/impulsivity in intertemporal choice is increased if a + delay period is presented as a sum of divided time-blocks, which + is referred to as subadditive discounting (i.e., "total patience" + over the delay period is larger than the "sum of patience" over divided + delay periods). This finding implies that abstinent drug addicts + may more readily relapse into addiction if an abstinence period is + presented as a series of shorter abstinent periods, rather than a + single block of a long abstinence period. Therefore, understanding + of neuropsychological processing underlying subadditive discounting + is important for establishing medical treatments of drug addiction, + although to date, no study has addressed this question. In this study, + we propose that time-estimation following Weber-Fechner law, formerly + introduced for explaining hyperbolic discounting, may also explain + subadditive discounting. Our present hypothesis also predicts that + possibility of relapse into drug dependence can be decreased by helping + abstinent patients to perceive time-duration of an abstinence/withdrawal + period precisely.}, + Author = {Taiki Takahashi}, + Doi = {10.1016/j.mehy.2006.05.056}, + Institution = {Department of Cognitive and Behavioral Science, The University of Tokyo, 3-8-1 Komaba, Meguro, Tokyo 153-8902, Japan. taiki@ecs.c.u-tokyo.ac.jp}, + Journal = {Med Hypotheses}, + Keywords = {Behavior, Addictive, psychology; Choice Behavior, physiology; Humans; Impulse Control Disorders, psychology; Models, Psychological; Substance-Related Disorders, physiopathology; Time Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {6}, + Owner = {Woo-Young Ahn}, + Pages = {1372--1374}, + Pii = {S0306-9877(06)00428-2}, + Pmid = {16872753}, + Timestamp = {2009.08.06}, + Title = {Time-estimation error following Weber-Fechner law may explain subadditive time-discounting.}, + Url = {http://dx.doi.org/10.1016/j.mehy.2006.05.056}, + Volume = {67}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.mehy.2006.05.056}} + +@article{Takahashi2006c, + Abstract = {Intertemporal and probabilistic decision-making has been studied in + psychiatry, ecology, and neuroeconomics. Because drug addicts and + psycopaths often make risky decisions (e.g., drug misuse and aggression), + investigation into types of impulsivity in intertemporal and probabilistic + choices (delay and probability discounting) are important for psychiatric + treatments. Studies in behavioral ecology proposed that delay and + probability discounting are mediated by the same psychological process, + because a decrease in probability of winning corresponds to an increase + in delay until winning. According to this view, odds-against winning + (=1/p-1) in probabilistic choice corresponds to delay in intertemporal + choice. This hypothesis predicts that preference of gambling (low + degree of probability discounting) may be associated with patience, + rather than impulsivity or impatience, in intertemporal choice (low + degree of delay discounting). However, recent empirical evidence + in psychiatric research employing pathological gamblers indicates + that pathological gamblers are impulsive in intertemporal choice + (high degrees of delay discounting). However, a hyperbolic discounting + function (usually adopted to explain intertemporal choice) with odds-against + (instead of delay) explain experimental data in probabilistic choice + dramatically well. Therefore, an alternative explanation is required + for the hypothetical equivalence of odds-against to delay. We propose + that queuing theory (often adopted for analyzing computer network + traffic) under a competitive social foraging condition may explain + the equivalence. Our hypothesis may help understand impulsivity of + psychiatrics in social behavior (e.g., aggression and antisocial + behavior) in addition to non-social impulsivity in reward-seeking + (e.g., substance misuse).}, + Author = {Taiki Takahashi}, + Doi = {10.1016/j.mehy.2006.02.016}, + Institution = {Department of Behavioral Science, Hokkaido University, Sapporo 060-0810, Japan. taikitakahashi_neuroeconomics@hotmail.com}, + Journal = {Med Hypotheses}, + Keywords = {Behavior, Addictive, psychology; Decision Making; Humans; Impulse Control Disorders, psychology; Models, Psychological; Probability; Social Environment; Systems Theory}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {276--279}, + Pii = {S0306-9877(06)00130-7}, + Pmid = {16574335}, + Timestamp = {2009.08.06}, + Title = {Queuing theory under competitive social foraging may explain a mathematical equivalence of delay and probability in impulsive decision-making.}, + Url = {http://dx.doi.org/10.1016/j.mehy.2006.02.016}, + Volume = {67}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.mehy.2006.02.016}} + +@article{Takahashi2007, + Abstract = {OBJECTIVES: Alcohol dependence has been associated with disrupted + neuroendocrine systems, impulsivity in intertemporal choice (delay + discounting). However, little is known regarding stability of discount + rates in alcoholics. This study examined both differential stability + (stability of individual differences) and absolute stability (stability + of group mean) of hyperbolic discount rates for monetary gains in + severe alcoholic inpatients (diagnosed with DSM-IV) over a 2-month + period during abstinence. METHODS: We estimated male alcoholics' + discount rates for delayed monetary rewards on the basis of their + pattern of choices between smaller immediate rewards (1,100-8,000 + yen) and larger, delayed rewards (2,500-8,500 yen; at delays from + 1 week to 6 months), two times at 2-month time-interval during hospitalized + alcohol withdrawal. RESULTS: It was observed that the alcoholics' + mean hyperbolic discount rates for gains had both differential and + absolute stability over 2 months, although a slight non-significant + decrease in the group mean of the discount rates was observed. CONCLUSIONS: + The results indicate that abstinent alcoholic's discount rates are + stable over a relatively long-term period. The usefulness of assessing + discount rates of addicts in psychoneuroendocrinology and neuroeconomics + of addiction is discussed.}, + Author = {Taiki Takahashi and Aizo Furukawa and Tomohiro Miyakawa and Hitoshi Maesato and Susumu Higuchi}, + Institution = {Department of Cognitive and Behavioral Science, Graduate School of Arts and Sciences, University of Tokyo, Tokyo, Japan. taiki@ecs.c.u-tokyo.ac.jp}, + Journal = {Neuro Endocrinol Lett}, + Keywords = {Alcoholism, prevention /&/ control/psychology; Choice Behavior, physiology; Humans; Impulsive Behavior, etiology; Inpatients; Male; Middle Aged; Reward; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {131--136}, + Pii = {NEL280207A05}, + Pmid = {17435675}, + Timestamp = {2009.08.06}, + Title = {Two-month stability of hyperbolic discount rates for delayed monetary gains in abstinent inpatient alcoholics.}, + Volume = {28}, + Year = {2007}} + +@article{Takahashi2007a, + Abstract = {OBJECTIVES: Alcohol dependence has been associated with disrupted + neuroendocrine systems, impulsivity in intertemporal choice (delay + discounting). However, little is known regarding stability of discount + rates in alcoholics. This study examined both differential stability + (stability of individual differences) and absolute stability (stability + of group mean) of hyperbolic discount rates for monetary gains in + severe alcoholic inpatients (diagnosed with DSM-IV) over a 2-month + period during abstinence. METHODS: We estimated male alcoholics' + discount rates for delayed monetary rewards on the basis of their + pattern of choices between smaller immediate rewards (1,100-8,000 + yen) and larger, delayed rewards (2,500-8,500 yen; at delays from + 1 week to 6 months), two times at 2-month time-interval during hospitalized + alcohol withdrawal. RESULTS: It was observed that the alcoholics' + mean hyperbolic discount rates for gains had both differential and + absolute stability over 2 months, although a slight non-significant + decrease in the group mean of the discount rates was observed. CONCLUSIONS: + The results indicate that abstinent alcoholic's discount rates are + stable over a relatively long-term period. The usefulness of assessing + discount rates of addicts in psychoneuroendocrinology and neuroeconomics + of addiction is discussed.}, + Author = {Taiki Takahashi and Aizo Furukawa and Tomohiro Miyakawa and Hitoshi Maesato and Susumu Higuchi}, + Institution = {Department of Cognitive and Behavioral Science, Graduate School of Arts and Sciences, University of Tokyo, Tokyo, Japan. taiki@ecs.c.u-tokyo.ac.jp}, + Journal = {Neuro Endocrinol Lett}, + Keywords = {Alcoholism, prevention /&/ control/psychology; Choice Behavior, physiology; Humans; Impulsive Behavior, etiology; Inpatients; Male; Middle Aged; Reward; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {131--136}, + Pii = {NEL280207A05}, + Pmid = {17435675}, + Timestamp = {2009.08.06}, + Title = {Two-month stability of hyperbolic discount rates for delayed monetary gains in abstinent inpatient alcoholics.}, + Volume = {28}, + Year = {2007}} + +@article{Takanashi2003, + Author = {Takanashi, J. and Barkovich, A. J. and Clegg, N. J. and Delgado, M. R.}, + Journal = {AJNR Am J Neuroradiol}, + Month = {Mar}, + Pages = {394--397}, + Title = {{{M}iddle interhemispheric variant of holoprosencephaly associated with diffuse polymicrogyria}}, + Volume = {24}, + Year = {2003}} + +@article{Talmi2008, + Author = {Talmi, D. and Seymour, B. and Dayan, P. and Dolan, R. J.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {360--368}, + Title = {{{H}uman pavlovian-instrumental transfer}}, + Volume = {28}, + Year = {2008}} + +@article{Tamaki2003, + Author = {Tamaki, T. and Torii, T. and Meada, K.}, + Journal = {Physical Review D}, + Pages = {24028}, + Title = {Stability Analysis of Black Holes via a Catastrophe Theory and Black Hole Thermodynamics in Generalized Theories of Gravity}, + Volume = {68}, + Year = {2003}} + +@article{Tamminga2006, + Author = {Tamminga, C. A. and Nestler, E. J.}, + Journal = {Am J Psychiatry}, + Month = {Feb}, + Pages = {180--181}, + Title = {{{P}athological gambling: focusing on the addiction, not the activity}}, + Volume = {163}, + Year = {2006}} + +@article{Tanabe2007, + Author = {Tanabe, J. and Thompson, L. and Claus, E. and Dalwani, M. and Hutchison, K. and Banich, M. T.}, + Journal = {Human Brain Mapping}, + Number = {12}, + Pages = {1276}, + Publisher = {John Wiley \& Sons, Ltd}, + Title = {{Prefrontal cortex activity is reduced in gambling and nongambling substance users during decision-making}}, + Volume = {28}, + Year = {2007}} + +@article{Tanaka2008, + Author = {Tanaka, S. C. and Balleine, B. W. and O'Doherty, J. P.}, + Journal = {J. Neurosci.}, + Month = {Jun}, + Pages = {6750--6755}, + Title = {{{C}alculating consequences: brain systems that encode the causal effects of actions}}, + Volume = {28}, + Year = {2008}} + +@article{Tanaka2004, + Author = {Tanaka, Saori C. and Doya, Kenji and Okada, Go and Ueda, Kazutaka and Okamoto, Yasumasa and Yamawaki, Shigeto}, + Journal = {Nature neuroscience}, + Owner = {WooYoung Ahn}, + Pages = {887-893}, + Timestamp = {2007.12.12}, + Title = {Prediction of immediate and future rewards differently recruits cortico-basal ganglia loops}, + Volume = {7 (8)}, + Year = {2004}} + +@article{Tanda1997, + Author = {Tanda, G. and Pontieri, F.E. and Chiara, G.D.}, + Journal = {Science}, + Number = {5321}, + Pages = {2048}, + Publisher = {AAAS}, + Title = {{Cannabinoid and Heroin Activation of Mesolimbic Dopamine Transmission by a Common? 1 Opioid Receptor Mechanism}}, + Volume = {276}, + Year = {1997}} + +@article{Tanzi2005a, + Author = {Rudolph E Tanzi}, + Doi = {10.1038/nn0805-977}, + Journal = {Nat Neurosci}, + Keywords = {Alzheimer Disease, metabolism/physiopathology; Amyloid beta-Protein, metabolism; Brain, metabolism; Endocytosis; Humans; Models, Neurological; Receptors, N-Methyl-D-Aspartate, metabolism; Synapses, metabolism}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {8}, + Owner = {Young}, + Pages = {977--979}, + Pii = {nn0805-977}, + Pmid = {16047022}, + Timestamp = {2009.12.10}, + Title = {The synaptic Abeta hypothesis of Alzheimer disease.}, + Url = {http://dx.doi.org/10.1038/nn0805-977}, + Volume = {8}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/nn0805-977}} + +@article{Tapert2003, + Author = {Tapert, S. F. and Cheung, E. H. and Brown, G. G. and Frank, L. R. and Paulus, M. P. and Schweinsburg, A. D. and Meloy, M. J. and Brown, S. A.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Jul}, + Pages = {727--735}, + Title = {{{N}eural response to alcohol stimuli in adolescents with alcohol use disorder}}, + Volume = {60}, + Year = {2003}} + +@article{Tapert2004, + Author = {Tapert, S. F. and Pulido, C. and Paulus, M. P. and Schuckit, M. A. and Burke, C.}, + Journal = {J. Stud. Alcohol}, + Month = {Nov}, + Pages = {692--700}, + Title = {{{L}evel of response to alcohol and brain response during visual working memory}}, + Volume = {65}, + Year = {2004}} + +@article{Tapert2007, + Author = {Tapert, S. F. and Schweinsburg, A. D. and Drummond, S. P. and Paulus, M. P. and Brown, S. A. and Yang, T. T. and Frank, L. R.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Oct}, + Pages = {173--183}, + Title = {{{F}unctional {M}{R}{I} of inhibitory processing in abstinent adolescent marijuana users}}, + Volume = {194}, + Year = {2007}} + +@article{Tapper1983, + Author = {Tapper, D. N. and Wiesenfeld, Z. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Feb}, + Pages = {534--547}, + Title = {{{A} dorsal spinal neural network in cat. {I}{I}. {C}hanges in responsiveness initiated by single conditioning impulses in single type 1 cutaneous input fibers}}, + Volume = {49}, + Year = {1983}} + +@incollection{Taqqu1998, + Address = {Boston}, + Author = {Taqqu, M. S. and Teverovsky, V.}, + Booktitle = {A Practical Guide to Heavy Tails: Statistical Techniques and Applications}, + Editor = {Adler, R. and Feldman, R. and Taqqu, M. S.}, + Pages = {177--217}, + Publisher = {Birkhauser}, + Title = {On Estimating the Intensity of Long--range Dependence in Finite and Infinite Variance Time Series}, + Year = {1998}} + +@article{Taylor1999, + Author = {Taylor, J. R. and Horger, B. A.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Feb}, + Pages = {31--40}, + Title = {{{E}nhanced responding for conditioned reward produced by intra-accumbens amphetamine is potentiated after cocaine sensitization}}, + Volume = {142}, + Year = {1999}} + +@article{Taylor2007, + Author = {Taylor, J. R. and Lynch, W. J. and Sanchez, H. and Olausson, P. and Nestler, E. J. and Bibb, J. A.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Mar}, + Pages = {4147--4152}, + Title = {{{I}nhibition of {C}dk5 in the nucleus accumbens enhances the locomotor-activating and incentive-motivational effects of cocaine}}, + Volume = {104}, + Year = {2007}} + +@article{Telang1999, + Author = {Telang, F. W. and Ding, Y. S. and Volkow, N. D. and Molina, P. E. and Gatley, S. J.}, + Journal = {Nucl. Med. Biol.}, + Month = {Feb}, + Pages = {249--250}, + Title = {{{P}yridostigmine, a carbamate acetylcholinesterase {A}{C}h{E} inhibitor and reactivator, is used prophylactically against chemical warfare agents}}, + Volume = {26}, + Year = {1999}} + +@article{Telang1999a, + Author = {Telang, F. W. and Volkow, N. D. and Levy, A. and Logan, J. and Fowler, J. S. and Felder, C. and Wong, C. and Wang, G. J.}, + Journal = {Synapse}, + Month = {Mar}, + Pages = {290--296}, + Title = {{{D}istribution of tracer levels of cocaine in the human brain as assessed with averaged [11{C}]cocaine images}}, + Volume = {31}, + Year = {1999}} + +@article{Telzer2008, + Author = {Telzer, E. H. and Mogg, K. and Bradley, B. P. and Mai, X. and Ernst, M. and Pine, D. S. and Monk, C. S.}, + Journal = {Biol Psychol}, + Month = {Oct}, + Pages = {216--222}, + Title = {{{R}elationship between trait anxiety, prefrontal cortex, and attention bias to angry faces in children and adolescents}}, + Volume = {79}, + Year = {2008}} + +@article{Teng2006, + Author = {Teng, D. and Dayan, P. and Tyler, S. and Hauser, W. A. and Chan, S. and Leary, L. and Hesdorffer, D.}, + Journal = {Pediatrics}, + Month = {Feb}, + Pages = {304--308}, + Title = {{{R}isk of intracranial pathologic conditions requiring emergency intervention after a first complex febrile seizure episode among children}}, + Volume = {117}, + Year = {2006}} + +@article{Terwilliger1991, + Author = {Terwilliger, R. Z. and Beitner-Johnson, D. and Sevarino, K. A. and Crain, S. M. and Nestler, E. J.}, + Journal = {Brain Res.}, + Month = {May}, + Pages = {100--110}, + Title = {{{A} general role for adaptations in {G}-proteins and the cyclic {A}{M}{P} system in mediating the chronic actions of morphine and cocaine on neuronal function}}, + Volume = {548}, + Year = {1991}} + +@article{Thanos2009, + Author = {Thanos, P. and Bermeo, C. and Rubinstein, M. and Suchland, K. and Wang, G. and Grandy, D. and Volkow, N.}, + Journal = {J. Psychopharmacol. (Oxford)}, + Month = {Apr}, + Title = {{{C}onditioned place preference and locomotor activity in response to methylphenidate, amphetamine and cocaine in mice lacking dopamine {D}4 receptors}}, + Year = {2009}} + +@article{Thanos2001a, + Author = {Thanos, P.K. and Volkow, N.D. and Freimuth, P. and Umegaki, H. and Ikari, H. and Roth, G. and Ingram, D.K. and Hitzemann, R.}, + Journal = {Journal of neurochemistry}, + Number = {5}, + Pages = {1094}, + Title = {{Overexpression of dopamine D2 receptors reduces alcohol self-administration.}}, + Volume = {78}, + Year = {2001}} + +@article{Thanos2009a, + Author = {Thanos, P. K. and Bermeo, C. and Wang, G. J. and Volkow, N. D.}, + Journal = {Behav. Brain Res.}, + Month = {May}, + Pages = {345--349}, + Title = {{{D}-cycloserine accelerates the extinction of cocaine-induced conditioned place preference in {C}57b{L}/c mice}}, + Volume = {199}, + Year = {2009}} + +@article{Thanos2009b, + Author = {Thanos, P. K. and Cavigelli, S. A. and Michaelides, M. and Olvet, D. M. and Patel, U. and Diep, M. N. and Volkow, N. D.}, + Journal = {Physiol Res}, + Pages = {219--228}, + Title = {{{A} non-invasive method for detecting the metabolic stress response in rodents: characterization and disruption of the circadian corticosterone rhythm}}, + Volume = {58}, + Year = {2009}} + +@article{Thanos2005, + Author = {Thanos, P. K. and Dimitrakakis, E. S. and Rice, O. and Gifford, A. and Volkow, N. D.}, + Journal = {Behav. Brain Res.}, + Month = {Nov}, + Pages = {206--213}, + Title = {{{E}thanol self-administration and ethanol conditioned place preference are reduced in mice lacking cannabinoid {C}{B}1 receptors}}, + Volume = {164}, + Year = {2005}} + +@article{Thanos2005a, + Author = {Thanos, P. K. and Katana, J. M. and Ashby, C. R. and Michaelides, M. and Gardner, E. L. and Heidbreder, C. A. and Volkow, N. D.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {May}, + Pages = {190--197}, + Title = {{{T}he selective dopamine {D}3 receptor antagonist {S}{B}-277011-{A} attenuates ethanol consumption in ethanol preferring ({P}) and non-preferring ({N}{P}) rats}}, + Volume = {81}, + Year = {2005}} + +@article{Thanos2008d, + Author = {Thanos, P. K. and Michaelides, M. and Benveniste, H. and Wang, G. J. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {May}, + Pages = {319--324}, + Title = {{{T}he effects of cocaine on regional brain glucose metabolism is attenuated in dopamine transporter knockout mice}}, + Volume = {62}, + Year = {2008}} + +@article{Thanos2007, + Author = {Thanos, P. K. and Michaelides, M. and Benveniste, H. and Wang, G. J. and Volkow, N. D.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Oct}, + Pages = {426--433}, + Title = {{{E}ffects of chronic oral methylphenidate on cocaine self-administration and striatal dopamine {D}2 receptors in rodents}}, + Volume = {87}, + Year = {2007}} + +@article{Thanos2008a, + Author = {Thanos, P. K. and Michaelides, M. and Gispert, J. D. and Pascau, J. and Soto-Montenegro, M. L. and Desco, M. and Wang, R. and Wang, G. J. and Volkow, N. D.}, + Journal = {Int J Obes (Lond)}, + Month = {Jul}, + Pages = {1171--1179}, + Title = {{{D}ifferences in response to food stimuli in a rat model of obesity: in-vivo assessment of brain glucose metabolism}}, + Volume = {32}, + Year = {2008}} + +@article{Thanos2008c, + Author = {Thanos, P. K. and Michaelides, M. and Ho, C. W. and Wang, G. J. and Newman, A. H. and Heidbreder, C. A. and Ashby, C. R. and Gardner, E. L. and Volkow, N. D.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jun}, + Pages = {499--507}, + Title = {{{T}he effects of two highly selective dopamine {D}3 receptor antagonists ({S}{B}-277011{A} and {N}{G}{B}-2904) on food self-administration in a rodent model of obesity}}, + Volume = {89}, + Year = {2008}} + +@article{Thanos2008e, + Author = {Thanos, P. K. and Michaelides, M. and Piyis, Y. K. and Wang, G. J. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {Jan}, + Pages = {50--61}, + Title = {{{F}ood restriction markedly increases dopamine {D}2 receptor ({D}2{R}) in a rat model of obesity as assessed with in-vivo mu{P}{E}{T} imaging ([11{C}] raclopride) and in-vitro ([3{H}] spiperone) autoradiography}}, + Volume = {62}, + Year = {2008}} + +@article{Thanos2008b, + Author = {Thanos, P. K. and Michaelides, M. and Umegaki, H. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {Jul}, + Pages = {481--486}, + Title = {{{D}2{R} {D}{N}{A} transfer into the nucleus accumbens attenuates cocaine self-administration in rats}}, + Volume = {62}, + Year = {2008}} + +@article{Thanos2008, + Author = {Thanos, P. K. and Ramalhete, R. C. and Michaelides, M. and Piyis, Y. K. and Wang, G. J. and Volkow, N. D.}, + Journal = {Synapse}, + Month = {Sep}, + Pages = {637--642}, + Title = {{{L}eptin receptor deficiency is associated with upregulation of cannabinoid 1 receptors in limbic brain regions}}, + Volume = {62}, + Year = {2008}} + +@article{Thanos2005b, + Author = {Thanos, P. K. and Rivera, S. N. and Weaver, K. and Grandy, D. K. and Rubinstein, M. and Umegaki, H. and Wang, G. J. and Hitzemann, R. and Volkow, N. D.}, + Journal = {Life Sci.}, + Month = {May}, + Pages = {130--139}, + Title = {{{D}opamine {D}2{R} {D}{N}{A} transfer in dopamine {D}2 receptor-deficient mice: effects on ethanol drinking}}, + Volume = {77}, + Year = {2005}} + +@article{Thanos2002, + Author = {Thanos, P. K. and Taintor, N. B. and Alexoff, D. and Vaska, P. and Logan, J. and Grandy, D. K. and Fang, Y. and Lee, J. H. and Fowler, J. S. and Volkow, N. D. and Rubinstein, M.}, + Journal = {J. Nucl. Med.}, + Month = {Nov}, + Pages = {1570--1577}, + Title = {{{I}n vivo comparative imaging of dopamine {D}2 knockout and wild-type mice with (11){C}-raclopride and micro{P}{E}{T}}}, + Volume = {43}, + Year = {2002}} + +@article{Thanos2004, + Author = {Thanos, P. K. and Taintor, N. B. and Rivera, S. N. and Umegaki, H. and Ikari, H. and Roth, G. and Ingram, D. K. and Hitzemann, R. and Fowler, J. S. and Gatley, S. J. and Wang, G. J. and Volkow, N. D.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {May}, + Pages = {720--728}, + Title = {{{D}{R}{D}2 gene transfer into the nucleus accumbens core of the alcohol preferring and nonpreferring rats attenuates alcohol drinking}}, + Volume = {28}, + Year = {2004}} + +@article{Thanos2001, + Author = {Thanos, P. K. and Volkow, N. D. and Freimuth, P. and Umegaki, H. and Ikari, H. and Roth, G. and Ingram, D. K. and Hitzemann, R.}, + Journal = {J. Neurochem.}, + Month = {Sep}, + Pages = {1094--1103}, + Title = {{{O}verexpression of dopamine {D}2 receptors reduces alcohol self-administration}}, + Volume = {78}, + Year = {2001}} + +@article{Thapar2009, + Abstract = {The present study investigated age-related differences in the locus + of the emotional enhancement effect in recognition memory. Younger + and older adults studied an emotion-heterogeneous list followed by + a forced choice recognition memory test. Luce's (1963) similarity + choice model was used to assess whether emotional valence impacts + memory sensitivity or response bias. Results revealed that the emotional + enhancement effect in both age groups was due to a more liberal response + bias for emotional words. However, the pattern of bias differed, + with younger adults more willing to classify negative words as old + and older adults more willing to classify positive words as old. + The results challenge the conclusion that emotional words are more + memorable than neutral words.}, + Author = {Anjali Thapar and Jeffrey N Rouder}, + Doi = {10.3758/PBR.16.4.699}, + Institution = {Bryn Mawr College, Bryn Mawr, Pennsylvania.}, + Journal = {Psychon Bull Rev}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Aug}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {699--704}, + Pii = {16/4/699}, + Pmid = {19648455}, + Timestamp = {2009.08.15}, + Title = {Aging and recognition memory for emotional words: A bias account.}, + Url = {http://dx.doi.org/10.3758/PBR.16.4.699}, + Volume = {16}, + Year = {2009}, + Bdsk-Url-1 = {http://dx.doi.org/10.3758/PBR.16.4.699}} + +@article{Thapar2001, + Abstract = {In recent years, Ratcliff, McKoon, and colleagues have argued that + priming in perceptual implicit memory tests is the result of biases + in information processing. Three experiments are presented that extend + this framework to the conceptual implicit memory domain. Participants + studied a list of words before receiving a set of general knowledge + questions. For some questions, participants studied the correct answer; + for others, they studied a similar but incorrect answer. Although + study of a correct answer facilitated performance, study of the similar + alternative hurt performance. Costs and benefits of previous study + were observed in both production and forced-choice tasks. However, + there was no benefit of previous study when participants studied + both the correct answer and the similar but incorrect alternative. + The pattern of results indicates that participants were biased to + respond with previously studied words on the conceptual implicit + memory test. This pattern is concordant with the biased information-processing + approach to priming.}, + Author = {A. Thapar and J. N. Rouder}, + Institution = {Department of Psychology, Bryn Mawr College, Pennsylvania 19010, USA. athapar@brynmawr.edu}, + Journal = {Psychon Bull Rev}, + Keywords = {Bias (Epidemiology); Humans; Memory; Psychological Tests, statistics /&/ numerical data; Random Allocation; Visual Perception}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {791--797}, + Pmid = {11848601}, + Timestamp = {2009.08.15}, + Title = {Bias in conceptual priming.}, + Volume = {8}, + Year = {2001}} + +@article{Thapar2003, + Author = {Thapar, A. Ratcliff, R. and McKoon, G.}, + Journal = {Psychology and Aging}, + Pages = {415--429}, + Title = {A Diffusion Model Analysis of the Effects of Aging on Letter Discrimination}, + Volume = {18}, + Year = {2003}} + +@book{Thom1975, + Address = {New York}, + Author = {Thom, R.}, + Publisher = {Benjamin-Addison Wesley}, + Title = {Structural Stability and Morphogenesis}, + Year = {1975}} + +@article{OpenBUGS, + Journal = {R News}, + Pages = {12--17}, + Title = {{OpenBUGS software package}}, + Volume = {6}, + Year = {2006}} + +@article{Thornton2005, + Author = {Thornton, T. L. and Gilden, D. L.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {409?441}, + Title = {Provenance of Correlations in Psychological Data}, + Volume = {12}, + Year = {2005}} + +@article{Thorpe1983, + Author = {Thorpe, SJ and Rolls, ET and Maddison, S.}, + Journal = {Experimental Brain Research}, + Number = {1}, + Pages = {93--115}, + Publisher = {Springer}, + Title = {{The orbitofrontal cortex: neuronal activity in the behaving monkey}}, + Volume = {49}, + Year = {1983}} + +@article{Tiffany2000, + Author = {Tiffany, S. T. and Carter, B. L. and Singleton, E. G.}, + Journal = {Addiction}, + Month = {Aug}, + Pages = {S177--187}, + Title = {{{C}hallenges in the manipulation, assessment and interpretation of craving relevant variables}}, + Volume = {95 Suppl 2}, + Year = {2000}} + +@article{Tiihonen1994, + Abstract = {Using high-resolution single photon emission computed tomography, + the authors studied changes in cerebral blood flow (CBF) in six healthy + men after the men rapidly consumed intoxicating amounts of ethanol. + When the subjects were given intravenous placebo before ethanol intake, + regional CBF was significantly increased over baseline in the right + prefrontal cortex, but no significant change in CBF was observed + when the subjects received intravenous naloxone before ethanol intake. + The results indicate that euphoria occurring during acute ethanol + intake is associated with activation of the right prefrontal cortex + and mediated through the endogenous opioid system.}, + Author = {J. Tiihonen and J. Kuikka and P. Hakola and J. Paanila and J. Airaksinen and M. Eronen and T. Hallikainen}, + Institution = {Department of Forensic Psychiatry, Faculty of Medicine, University of Kuopio, Finland.}, + Journal = {Am J Psychiatry}, + Keywords = {Alcohol Drinking, physiopathology; Brain, radionuclide imaging; Cerebrovascular Circulation, drug effects/physiology; Endorphins, physiology; Ethanol, blood/pharmacology; Euphoria, drug effects/physiology; Frontal Lobe, blood supply/physiology; Humans; Injections, Intravenous; Male; Naloxone, administration /&/ dosage/pharmacology; Organotechnetium Compounds, diagnostic use; Oximes, diagnostic use; Placebos; Technetium Tc 99m Exametazime; Tomography, Emission-Computed, Single-Photon}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {10}, + Owner = {Woo-Young Ahn}, + Pages = {1505--1508}, + Pmid = {8092344}, + Timestamp = {2009.08.04}, + Title = {Acute ethanol-induced changes in cerebral blood flow.}, + Volume = {151}, + Year = {1994}} + +@article{Tobler2009, + Author = {Tobler, P. N. and Christopoulos, G. I. and O'Doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Pages = {7185--7190}, + Title = {{{R}isk-dependent reward value signal in human prefrontal cortex}}, + Volume = {106}, + Year = {2009}} + +@article{Tobler2008, + Author = {Tobler, P. N. and Christopoulos, G. I. and O'Doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {11703--11711}, + Title = {{{N}euronal distortions of reward probability without choice}}, + Volume = {28}, + Year = {2008}} + +@article{Tobler2008a, + Author = {Tobler, P. N. and Christopoulos, G. I. and O'Doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {11703--11711}, + Title = {{{N}euronal distortions of reward probability without choice}}, + Volume = {28}, + Year = {2008}} + +@article{Tobler2003, + Author = {Tobler, P. N. and Dickinson, A. and Schultz, W.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {10402--10410}, + Title = {{{C}oding of predicted reward omission by dopamine neurons in a conditioned inhibition paradigm}}, + Volume = {23}, + Year = {2003}} + +@article{Tobler2005, + Author = {Tobler, P. N. and Fiorillo, C. D. and Schultz, W.}, + Journal = {Science}, + Pages = {1642--1645}, + Title = {{{A}daptive coding of reward value by dopamine neurons}}, + Volume = {307}, + Year = {2005}} + +@article{Tobler2007, + Author = {Tobler, P. N. and Fletcher, P. C. and Bullmore, E. T. and Schultz, W.}, + Journal = {Neuron}, + Month = {Apr}, + Pages = {167--175}, + Title = {{{L}earning-related human brain activations reflecting individual finances}}, + Volume = {54}, + Year = {2007}} + +@article{Tobler2007a, + Author = {Tobler, P. N. and O'Doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {J. Neurophysiol.}, + Month = {Feb}, + Pages = {1621--1632}, + Title = {{{R}eward value coding distinct from risk attitude-related uncertainty coding in human reward systems}}, + Volume = {97}, + Year = {2007}} + +@article{Tobler2007b, + Author = {Tobler, P. N. and O'Doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {J. Neurophysiol.}, + Month = {Feb}, + Pages = {1621--1632}, + Title = {{{R}eward value coding distinct from risk attitude-related uncertainty coding in human reward systems}}, + Volume = {97}, + Year = {2007}} + +@article{Tobler2006, + Author = {Tobler, P. N. and O'doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {J. Neurophysiol.}, + Month = {Jan}, + Pages = {301--310}, + Title = {{{H}uman neural learning depends on reward prediction errors in the blocking paradigm}}, + Volume = {95}, + Year = {2006}} + +@article{Tobler2006a, + Author = {Tobler, P. N. and O'doherty, J. P. and Dolan, R. J. and Schultz, W.}, + Journal = {J. Neurophysiol.}, + Month = {Jan}, + Pages = {301--310}, + Title = {{{H}uman neural learning depends on reward prediction errors in the blocking paradigm}}, + Volume = {95}, + Year = {2006}} + +@article{Tom2007, + Author = {Tom, Sabrina M. and Fox, Craig R. and Trepel, Christopher and Poldrack, Russell A.}, + Journal = {Science}, + Owner = {WooYoung Ahn}, + Pages = {515-518}, + Timestamp = {2007.12.12}, + Title = {The neural basis of loss aversion in decision-making under risk}, + Volume = {315}, + Year = {2007}} + +@article{Tomasi2007, + Author = {Tomasi, D. and Goldstein, R. Z. and Telang, F. and Maloney, T. and Alia-Klein, N. and Caparelli, E. C. and Volkow, N. D.}, + Journal = {Brain Res.}, + Month = {Sep}, + Pages = {83--92}, + Title = {{{W}idespread disruption in brain activation patterns to a working memory task during cocaine abstinence}}, + Volume = {1171}, + Year = {2007}} + +@article{Tomasi2007a, + Author = {Tomasi, D. and Goldstein, R. Z. and Telang, F. and Maloney, T. and Alia-Klein, N. and Caparelli, E. C. and Volkow, N. D.}, + Journal = {Psychiatry Res}, + Month = {Aug}, + Pages = {189--201}, + Title = {{{T}halamo-cortical dysfunction in cocaine abusers: implications in attention and perception}}, + Volume = {155}, + Year = {2007}} + +@article{Tomasi2009, + Author = {Tomasi, D. and Volkow, N. D. and Wang, R. and Telang, F. and Wang, G. J. and Chang, L. and Ernst, T. and Fowler, J. S.}, + Journal = {PLoS ONE}, + Pages = {e6102}, + Title = {{{D}opamine transporters in striatum correlate with deactivation in the default mode network during visuospatial attention}}, + Volume = {4}, + Year = {2009}} + +@article{Tomasi2009a, + Author = {Tomasi, D. and Wang, R. L. and Telang, F. and Boronikolas, V. and Jayne, M. C. and Wang, G. J. and Fowler, J. S. and Volkow, N. D.}, + Journal = {Cereb. Cortex}, + Month = {Jan}, + Pages = {233--240}, + Title = {{{I}mpairment of attentional networks after 1 night of sleep deprivation}}, + Volume = {19}, + Year = {2009}} + +@article{Tomer2008, + Author = {Tomer, R. and Goldstein, R. Z. and Wang, G. J. and Wong, C. and Volkow, N. D.}, + Journal = {Biol Psychol}, + Month = {Jan}, + Pages = {98--101}, + Title = {{{I}ncentive motivation is associated with striatal dopamine asymmetry}}, + Volume = {77}, + Year = {2008}} + +@article{Tomlin2006, + Author = {Tomlin, D. and Kayali, M. A. and King-Casas, B. and Anen, C. and Camerer, C. F. and Quartz, S. R. and Montague, P. R.}, + Journal = {Science}, + Month = {May}, + Pages = {1047--1050}, + Title = {{{A}gent-specific responses in the cingulate cortex during economic exchanges}}, + Volume = {312}, + Year = {2006}} + +@article{Tonk1996, + Author = {Tonk, V. and Schneider, N. R. and Delgado, M. R. and Mao, J. and Schultz, R. A.}, + Journal = {Am. J. Med. Genet.}, + Month = {Jan}, + Pages = {16--20}, + Title = {{q24.3)] inherited from a mother mosaic for the abnormality}}, + Volume = {61}, + Year = {1996}} + +@article{Torres1998, + Author = {Torres, O. A. and Roach, E. S. and Delgado, M. R. and Sparagana, S. P. and Sheffield, E. and Swift, D. and Bruce, D.}, + Journal = {J. Child Neurol.}, + Month = {Apr}, + Pages = {173--177}, + Title = {{{E}arly diagnosis of subependymal giant cell astrocytoma in patients with tuberous sclerosis}}, + Volume = {13}, + Year = {1998}} + +@book{Townsend1983, + Address = {London}, + Author = {Townsend, J. T. and Ashby, F. G.}, + Publisher = {Cambridge University Press}, + Title = {Stochastic Modeling of Elementary Psychological Processes}, + Year = {1983}} + +@article{Trafimow2005, + Author = {Trafimow, D.}, + Journal = {Psychological Review}, + Pages = {669--674}, + Title = {The Ubiquitous {L}aplacian Assumption: Reply to {L}ee and {W}agenmakers (2005)}, + Volume = {112}, + Year = {2005}} + +@article{Trafimow2003, + Author = {Trafimow, D.}, + Journal = {Psychological Review}, + Pages = {526--535}, + Title = {Hypothesis Testing and Theory Evaluation at the Boundaries: Surprising Insights From {B}ayes's Theorem}, + Volume = {110}, + Year = {2003}} + +@article{Tricomi2009, + Author = {Tricomi, E. and Balleine, B. W. and O'Doherty, J. P.}, + Journal = {Eur. J. Neurosci.}, + Month = {May}, + Title = {{{A} specific role for posterior dorsolateral striatum in human habit learning}}, + Year = {2009}} + +@article{Tricomi2006, + Author = {Tricomi, E. and Delgado, M. R. and McCandliss, B. D. and McClelland, J. L. and Fiez, J. A.}, + Journal = {J Cogn Neurosci}, + Month = {Jun}, + Pages = {1029--1043}, + Title = {{{P}erformance feedback drives caudate activation in a phonological learning task}}, + Volume = {18}, + Year = {2006}} + +@article{Tricomi2004, + Author = {Tricomi, E. M. and Delgado, M. R. and Fiez, J. A.}, + Journal = {Neuron}, + Pages = {281--292}, + Title = {{{M}odulation of caudate activity by action contingency}}, + Volume = {41}, + Year = {2004}} + +@article{Tsankova2007, + Author = {Tsankova, N. and Renthal, W. and Kumar, A. and Nestler, E. J.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {May}, + Pages = {355--367}, + Title = {{{E}pigenetic regulation in psychiatric disorders}}, + Volume = {8}, + Year = {2007}} + +@article{Tuerlinckx2004, + Author = {Tuerlinckx, F.}, + Journal = {Behavior Research Methods, Instruments, \& Computers}, + Pages = {702-?16}, + Title = {The Efficient Computation of the Distribution Function of the Diffusion Process}, + Volume = {36}, + Year = {2004}} + +@article{Tuerlinckxinpress, + Author = {Tuerlinckx, F. and De Boeck, P.}, + Journal = {Psychometrika}, + Pages = {??-??}, + Title = {Two Interpretations of the Discrimination Parameter}, + Volume = {??}, + Year = {in press}} + +@article{Tuerlinckx2001, + Author = {Tuerlinckx, F. and Maris, E. and Ratcliff, R. and De Boeck, P.}, + Journal = {Behavior Research Methods, Instruments, \& Computers}, + Pages = {443--456}, + Title = {A Comparison of Four Methods for Simulating the Diffusion Process}, + Volume = {33}, + Year = {2001}} + +@book{Tukey1977, + Address = {Reading, MA}, + Author = {Tukey, J. W.}, + Publisher = {Addison--Wesley}, + Title = {Explanatory Data Analysis}, + Year = {1977}} + +@article{Turetsky2002, + Author = {Turetsky, B. I. and Fein, G.}, + Journal = {Psychophysiology}, + Month = {Mar}, + Pages = {147--157}, + Title = {{{A}lpha2-noradrenergic effects on {E}{R}{P} and behavioral indices of auditory information processing}}, + Volume = {39}, + Year = {2002}} + +@article{Tversky1992, + Author = {Tversky, A. and Kahneman, D.}, + Journal = {Journal of Risk and Uncertainty}, + Owner = {Wooyoung Ahn}, + Pages = {297-323}, + Timestamp = {2007.05.04}, + Title = {Advances in Prospect Theory: Cumulaive Representations of Uncertainty}, + Volume = {5}, + Year = {1992}} + +@article{Tversky1981, + Author = {Tversky, A. and Kahneman, D.}, + Journal = {Science}, + Number = {4481}, + Pages = {453--458}, + Title = {{The framing of decisions and the psychology of choice}}, + Volume = {211}, + Year = {1981}} + +@article{Ungless2004, + Author = {Ungless, M.A. and Magill, P.J. and Bolam, J.P.}, + Journal = {Science}, + Number = {5666}, + Pages = {2040--2042}, + Publisher = {American Association for the Advancement of Science}, + Title = {{Uniform inhibition of dopamine neurons in the ventral tegmental area by aversive stimuli}}, + Volume = {303}, + Year = {2004}} + +@article{Usher2001, + Author = {Usher, M. and McClelland, J. L.}, + Journal = {Psychological Review}, + Pages = {550--592}, + Title = {On the Time Course of Perceptual Choice: The Leaky Competing Accumulator Model}, + Volume = {108}, + Year = {2001}} + +@article{Uslaner2006, + Abstract = {The subthalamic nucleus (STN) is traditionally thought of as part + of a system involved in motor control but recent evidence suggests + that it may also play a role in other psychological processes. Here + we examined the effects of STN lesions on two measures of impulsivity + and found that STN lesions increased 'impulsive action' (produced + behavioral disinhibition), as measured by performance on a differential + reinforcement of low rates of responding task, but decreased 'impulsive + choice' (impulsive decision making), as measured by a delay discounting + task. In addition, amphetamine and food restriction increased 'impulsive + action' and decreased 'impulsive choice' to a greater extent in STN-lesioned + animals than in sham controls. We speculate that these apparently + discrepant effects may be because STN lesions enhance the incentive + salience assigned to rewards. These findings suggest that the STN + may serve as a novel target for the treatment of psychological disorders + characterized by deficits in behavioral control, such as drug addiction + and attention deficit hyperactivity disorder.}, + Author = {Jason M Uslaner and Terry E Robinson}, + Doi = {10.1111/j.1460-9568.2006.05117.x}, + Institution = {Biopsychology and Neuroscience Programs, Department of Psychology, University of Michigan, East Hall, 525 E. University Street, Ann Arbor, MI 48019-1109, USA. Jason_Uslaner@merck.com}, + Journal = {Eur J Neurosci}, + Keywords = {Amphetamine, pharmacology; Animals; Central Nervous System Stimulants, pharmacology; Choice Behavior, physiology; Conditioning, Operant, physiology; Dose-Response Relationship, Drug; Food Deprivation, physiology; Impulsive Behavior, psychology; Male; Motivation; Psychomotor Performance, physiology; Rats; Rats, Sprague-Dawley; Reinforcement (Psychology); Reinforcement Schedule; Reward; Subthalamic Nucleus, anatomy /&/ histology/physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {8}, + Owner = {Woo-Young Ahn}, + Pages = {2345--2354}, + Pii = {EJN5117}, + Pmid = {17074055}, + Timestamp = {2009.08.06}, + Title = {Subthalamic nucleus lesions increase impulsive action and decrease impulsive choice - mediation by enhanced incentive motivation?}, + Url = {http://dx.doi.org/10.1111/j.1460-9568.2006.05117.x}, + Volume = {24}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1460-9568.2006.05117.x}} + +@article{Uslaner2006a, + Abstract = {The subthalamic nucleus (STN) is traditionally thought of as part + of a system involved in motor control but recent evidence suggests + that it may also play a role in other psychological processes. Here + we examined the effects of STN lesions on two measures of impulsivity + and found that STN lesions increased 'impulsive action' (produced + behavioral disinhibition), as measured by performance on a differential + reinforcement of low rates of responding task, but decreased 'impulsive + choice' (impulsive decision making), as measured by a delay discounting + task. In addition, amphetamine and food restriction increased 'impulsive + action' and decreased 'impulsive choice' to a greater extent in STN-lesioned + animals than in sham controls. We speculate that these apparently + discrepant effects may be because STN lesions enhance the incentive + salience assigned to rewards. These findings suggest that the STN + may serve as a novel target for the treatment of psychological disorders + characterized by deficits in behavioral control, such as drug addiction + and attention deficit hyperactivity disorder.}, + Author = {Jason M Uslaner and Terry E Robinson}, + Doi = {10.1111/j.1460-9568.2006.05117.x}, + Institution = {Biopsychology and Neuroscience Programs, Department of Psychology, University of Michigan, East Hall, 525 E. University Street, Ann Arbor, MI 48019-1109, USA. Jason_Uslaner@merck.com}, + Journal = {Eur J Neurosci}, + Keywords = {Amphetamine, pharmacology; Animals; Central Nervous System Stimulants, pharmacology; Choice Behavior, physiology; Conditioning, Operant, physiology; Dose-Response Relationship, Drug; Food Deprivation, physiology; Impulsive Behavior, psychology; Male; Motivation; Psychomotor Performance, physiology; Rats; Rats, Sprague-Dawley; Reinforcement (Psychology); Reinforcement Schedule; Reward; Subthalamic Nucleus, anatomy /&/ histology/physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {8}, + Owner = {Woo-Young Ahn}, + Pages = {2345--2354}, + Pii = {EJN5117}, + Pmid = {17074055}, + Timestamp = {2009.08.06}, + Title = {Subthalamic nucleus lesions increase impulsive action and decrease impulsive choice - mediation by enhanced incentive motivation?}, + Url = {http://dx.doi.org/10.1111/j.1460-9568.2006.05117.x}, + Volume = {24}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1460-9568.2006.05117.x}} + +@article{Valentin2007, + Author = {Valentin, V. V. and Dickinson, A. and O'Doherty, J. P.}, + Journal = {J. Neurosci.}, + Month = {Apr}, + Pages = {4019--4026}, + Title = {{{D}etermining the neural substrates of goal-directed learning in the human brain}}, + Volume = {27}, + Year = {2007}} + +@article{VanHorn2003, + Author = {{Van Horn}, K. S.}, + Journal = {International Journal of Approximate Reasoning}, + Pages = {3?24}, + Title = {Constructing a Logic of Plausible Inference: {A} Guide to {C}ox's Theorem}, + Volume = {34}, + Year = {2003}} + +@article{VanOrden2003, + Author = {Van Orden, G. C. and Holden, J. G. and Turvey, M. T.}, + Journal = {Journal of Experimental Psychology: General}, + Pages = {331--350}, + Title = {Self--organization of Cognitive Performance}, + Volume = {132}, + Year = {2003}} + +@article{VanZandt2000, + Author = {Van Zandt, T. and Colonius, H. and Proctor, R. W.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {208?256}, + Title = {A Comparison of Two Response Time Models Applied to Perceptual Matching}, + Volume = {7}, + Year = {2000}} + +@article{Vandekerckhoveinpress, + Author = {Vandekerckhove, J. and Tuerlinckx, F.}, + Journal = {Behavior Research Methods}, + Pages = {??-??}, + Title = {Diffusion Model Analysis with {MATLAB}: {A} {DMAT} Primer.}, + Volume = {??}, + Year = {in press}} + +@article{Vandekerckhove2007, + Author = {Vandekerckhove, J. and Tuerlinckx, F.}, + Title = {Fitting the {R}atcliff Diffusion Model to Experimental Data. {M}anuscript submitted for publication.}, + Year = {2007}} + +@article{Vandekerckhove2009, + Author = {Vandekerckhove, J. and Tuerlinckx, F. and Lee, M. D.}, + Journal = {Manuscript submitted for publication}, + Owner = {Woo-Young Ahn}, + Timestamp = {2009.08.15}, + Title = {Hierarchical diffusion models for two-choice response time}, + Year = {2009}} + +@article{Vanderschuren2005, + Author = {Vanderschuren, L. J. and Di Ciano, P. and Everitt, B.J.}, + Journal = {Journal of Neuroscience}, + Number = {38}, + Pages = {8665--8670}, + Publisher = {Soc Neuroscience}, + Title = {{Involvement of the dorsal striatum in cue-controlled cocaine seeking}}, + Volume = {25}, + Year = {2005}} + +@article{Varty2000, + Author = {Varty, G. B. and Paulus, M. P. and Braff, D. L. and Geyer, M. A.}, + Journal = {Biol. Psychiatry}, + Month = {May}, + Pages = {864--873}, + Title = {{{E}nvironmental enrichment and isolation rearing in the rat: effects on locomotor behavior and startle response plasticity}}, + Volume = {47}, + Year = {2000}} + +@article{Vassileva2007, + Author = {Vassileva, J. and Gonzalez, R. and Bechara, A. and Martin, E. M.}, + Journal = {Addict Behav}, + Month = {Dec}, + Pages = {3071--3076}, + Title = {{{A}re all drug addicts impulsive? {E}ffects of antisociality and extent of multidrug use on cognitive and motor impulsivity}}, + Volume = {32}, + Year = {2007}} + +@article{Verdejo-Garcia2007a, + Author = {Verdejo-Garc?a, A. and Bechara, A. and Recknor, E. C. and P?rez-Garc?a, M.}, + Journal = {Drug Alcohol Depend}, + Month = {Dec}, + Pages = {213--219}, + Title = {{{N}egative emotion-driven impulsivity predicts substance dependence problems}}, + Volume = {91}, + Year = {2007}} + +@article{Verdejo-Garc?a2006, + Author = {Verdejo-Garc?a, A. and Bechara, A. and Recknor, E. C. and P?rez-Garc?a, M.}, + Journal = {J Int Neuropsychol Soc}, + Month = {May}, + Pages = {405--415}, + Title = {{{E}xecutive dysfunction in substance dependent individuals during drug use and abstinence: an examination of the behavioral, cognitive and emotional correlates of addiction}}, + Volume = {12}, + Year = {2006}} + +@article{Verdejo-Garc?a2006a, + Author = {Verdejo-Garc?a, A. and P?rez-Garc?a, M. and Bechara, A.}, + Journal = {Curr Neuropharmacol}, + Month = {Jan}, + Pages = {17--31}, + Title = {{{E}motion, decision-making and substance dependence: a somatic-marker model of addiction}}, + Volume = {4}, + Year = {2006}} + +@article{Verdejo-Garcia2009, + Author = {Verdejo-Garcia, A. and Bechara, A.}, + Journal = {Neuropharmacology}, + Pages = {48--62}, + Title = {{{A} somatic marker theory of addiction}}, + Volume = {56 Suppl 1}, + Year = {2009}} + +@article{Verdejo-Garcia2007, + Author = {Verdejo-Garcia, A. and Benbrook, A. and Funderburk, F. and David, P. and Cadet, J. L. and Bolla, K. I.}, + Journal = {Drug Alcohol Depend}, + Month = {Sep}, + Pages = {2--11}, + Title = {{{T}he differential relationship between cocaine use and marijuana use on decision-making performance over repeat testing with the {I}owa {G}ambling {T}ask}}, + Volume = {90}, + Year = {2007}} + +@article{Verdejo-Garcia2008, + Abstract = {There is a longstanding association between substance-use disorders + (SUDs) and the psychological construct of impulsivity. In the first + section of this review, personality and neurocognitive data pertaining + to impulsivity will be summarised in regular users of four classes + of substance: stimulants, opiates, alcohol and 3,4-methylenedioxymethamphetamine + (MDMA). Impulsivity in these groups may arise via two alternative + mechanisms, which are not mutually exclusive. By one account, impulsivity + may occur as a consequence of chronic exposure to substances causing + harmful effects on the brain. By the alternative account, impulsivity + pre-dates SUDs and is associated with the vulnerability to addiction. + We will review the evidence that impulsivity is associated with addiction + vulnerability by considering three lines of evidence: (i) studies + of groups at high-risk for development of SUDs; (ii) studies of pathological + gamblers, where the harmful consequences of the addiction on brain + structure are minimised, and (iii) genetic association studies linking + impulsivity to genetic risk factors for addiction. Within each of + these three lines of enquiry, there is accumulating evidence that + impulsivity is a pre-existing vulnerability marker for SUDs.}, + Doi = {10.1016/j.neubiorev.2007.11.003}, + Journal = {Neurosci Biobehav Rev}, + Keywords = {Animals; Gambling; Genetic Predisposition to Disease; Humans; Impulsive Behavior, etiology/genetics; Risk-Taking; Substance-Related Disorders, complications}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {777--810}, + Pii = {S0149-7634(08)00006-7}, + Pmid = {18295884}, + Timestamp = {2009.08.06}, + Title = {Impulsivity as a vulnerability marker for substance-use disorders: review of findings from high-risk research, problem gamblers and genetic association studies.}, + Url = {http://dx.doi.org/10.1016/j.neubiorev.2007.11.003}, + Volume = {32}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.neubiorev.2007.11.003}} + +@article{Verhoeven1981, + Abstract = {Since 1975, different morphinomimetic peptides have been isolated + from hypophyseal-hypothalamic extracts: the pentapeptides methionine-enkephalin + and leucine-enkephalin, and the longer peptides alpha-, beta- and + gamma-endorphin. The primary structure of most of these peptides + is also present in that of beta-lipotropin. The morphinomimetic properties + of endorphins can be blocked with opiate-antagonists. In rats, moreover, + the endorphins influence behavior which cannot be blocked with opiate + antagonists. On the basis of the hypothesis that hyperactivity of + endorphin systems may be involved in the pathogenesis of schizophrenia + and manic syndromes, the effect of opiate antagonists on psychotic + and manic symptoms has been examined in a number of clinical studies + in the past few years. A transient therapeutic effect has been demonstrated + in about 30\% of the patients so treated. Our own double-blind controlled + study of 5 schizophrenic and 5 manic patients in the context of a + World Health Organization project failed to reveal any therapeutic + effect after subcutaneous injection of 20 mg naloxone. The possible + reasons of the negative results are discussed.}, + Author = {W. M. Verhoeven and H. M. van Praag and J. T. de Jong}, + Journal = {Neuropsychobiology}, + Keywords = {Affective Disorders, Psychotic, drug therapy; Bipolar Disorder, drug therapy; Double-Blind Method; Endorphins, metabolism; Hallucinations, drug therapy; Humans; Naloxone, therapeutic use; Schizophrenia, drug therapy}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {3}, + Owner = {Young}, + Pages = {159--168}, + Pmid = {7231653}, + Timestamp = {2010.05.01}, + Title = {Use of naloxone in schizophrenic psychoses and manic syndromes.}, + Volume = {7}, + Year = {1981}} + +@article{Verney2003, + Author = {Verney, S. P. and Brown, G. G. and Frank, L. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {May}, + Pages = {923--928}, + Title = {{{E}rror-rate-related caudate and parietal cortex activation during decision making}}, + Volume = {14}, + Year = {2003}} + +@article{Vesely2008, + Author = {Vesely, S. and M?ller, M. and Knutson, T. and Peeker, R. and Hellstr?m, M. and Dahlstrand, C.}, + Journal = {Scand. J. Urol. Nephrol.}, + Pages = {53--58}, + Title = {{{T}ransurethral microwave thermotherapy of the prostate--evaluation with {M}{R}{I} and analysis of parameters relevant to outcome}}, + Volume = {42}, + Year = {2008}} + +@article{Vickers1998, + Author = {Vickers, D. and Lee, M. D.}, + Journal = {Nonlinear Dynamics, Psychology, and Life Sciences}, + Pages = {169--194}, + Title = {Dynamic Models of Simple Judgments: I. Properties of a Self--Regulating Accumulator Module}, + Volume = {2}, + Year = {1998}} + +@article{Vickers2003, + Author = {Vickers, D. and Lee, M. D. and Dry, M. and Hughes, P.}, + Journal = {Memory \& Cognition}, + Pages = {1094--1104}, + Title = {The Roles of the Convex Hull and the Number of Potential Intersections in Performance on Visually Presented Traveling Salesperson Problems}, + Volume = {31}, + Year = {2003}} + +@article{Vinod2008, + Author = {Vinod, K. Y. and Yalamanchili, R. and Thanos, P. K. and Vadasz, C. and Cooper, T. B. and Volkow, N. D. and Hungund, B. L.}, + Journal = {Synapse}, + Month = {Aug}, + Pages = {574--581}, + Title = {{{G}enetic and pharmacological manipulations of the {C}{B}(1) receptor alter ethanol preference and dependence in ethanol preferring and nonpreferring mice}}, + Volume = {62}, + Year = {2008}} + +@article{Vintzileos1999, + Author = {Vintzileos, A. and Delecluse, P. and Sadourny, R.}, + Journal = {Climate Dynamics}, + Owner = {WooYoung Ahn}, + Pages = {43-62}, + Timestamp = {2008.03.23}, + Title = {On the mechanisms in a tropical ocean-global atmosphere coupled general circulation model. {P}art {I}: {M}ean state and the seasonal cycle.}, + Volume = {15 (1)}, + Year = {1999}} + +@article{Volkow2009, + Author = {Volkow, N. D.}, + Journal = {Tenn Med}, + Month = {Apr}, + Pages = {28--29}, + Title = {{{T}een prescription drug abuse a major health concern}}, + Volume = {102}, + Year = {2009}} + +@article{Volkow2009b, + Author = {Volkow, N. D.}, + Journal = {Schizophr Bull}, + Month = {May}, + Pages = {469--472}, + Title = {{{S}ubstance use disorders in schizophrenia--clinical implications of comorbidity}}, + Volume = {35}, + Year = {2009}} + +@article{Volkow2008, + Author = {Volkow, N. D.}, + Journal = {Ann. N. Y. Acad. Sci.}, + Month = {Oct}, + Pages = {xi-xii}, + Title = {{{A}ddiction {R}eviews. {I}ntroduction}}, + Volume = {1141}, + Year = {2008}} + +@article{Volkow2007, + Author = {Volkow, N. D.}, + Journal = {Sci Pract Perspect}, + Month = {Apr}, + Pages = {2--3}, + Title = {{{A} note from {N}{I}{D}{A}'s director. {I}mages and interventions}}, + Volume = {3}, + Year = {2007}} + +@article{Volkow2007c, + Author = {Volkow, N. D.}, + Journal = {Sci. Am.}, + Month = {Sep}, + Pages = {84--85}, + Title = {{{T}his is your brain on food. {I}nterview by {K}ristin {L}eutwyler-{O}zelli}}, + Volume = {297}, + Year = {2007}} + +@article{Volkow2006a, + Author = {Volkow, N. D.}, + Journal = {Drug Alcohol Depend}, + Month = {Sep}, + Pages = {4--7}, + Title = {{{H}ispanic drug abuse research: challenges and opportunities}}, + Volume = {84 Suppl 1}, + Year = {2006}} + +@article{Volkow2006d, + Author = {Volkow, N. D.}, + Journal = {Am J Psychiatry}, + Month = {Mar}, + Pages = {359--361}, + Title = {{{S}timulant medications: how to minimize their reinforcing effects?}}, + Volume = {163}, + Year = {2006}} + +@article{Volkow2005, + Author = {Volkow, N. D.}, + Journal = {Sci Pract Perspect}, + Month = {Dec}, + Pages = {2}, + Title = {{{A} note from {N}{I}{D}{A}'s {D}irector}}, + Volume = {3}, + Year = {2005}} + +@article{Volkow2005b, + Author = {Volkow, N. D.}, + Journal = {Am J Psychiatry}, + Month = {Aug}, + Pages = {1401--1402}, + Title = {{{W}hat do we know about drug addiction?}}, + Volume = {162}, + Year = {2005}} + +@article{Volkow2004, + Author = {Volkow, N. D.}, + Journal = {Sci Pract Perspect}, + Month = {Aug}, + Pages = {2}, + Title = {{{A} note from {N}{I}{D}{A}'s director: blending clinical practice and research}}, + Volume = {2}, + Year = {2004}} + +@article{Volkow2004a, + Author = {Volkow, N. D.}, + Journal = {J. Nucl. Med.}, + Month = {Nov}, + Pages = {13N-16N, 19N-20N, 22N passim}, + Title = {{{I}maging the addicted brain: from molecules to behavior}}, + Volume = {45}, + Year = {2004}} + +@article{Volkow2004b, + Author = {Volkow, N. D.}, + Journal = {Biol. Psychiatry}, + Month = {Nov}, + Pages = {714--717}, + Title = {{{T}he reality of comorbidity: depression and drug abuse}}, + Volume = {56}, + Year = {2004}} + +@article{Volkow2003, + Author = {Volkow, N. D.}, + Journal = {Sci Pract Perspect}, + Month = {Aug}, + Pages = {2}, + Title = {{{B}lending practice and research: a potent catalyst for progress}}, + Volume = {2}, + Year = {2003}} + +@article{Volkow2001b, + Author = {Volkow, N. D.}, + Journal = {Am J Psychiatry}, + Month = {Aug}, + Pages = {1181--1183}, + Title = {{{D}rug abuse and mental illness: progress in understanding comorbidity}}, + Volume = {158}, + Year = {2001}} + +@article{Volkow2001DA, + Author = {Volkow, N. D. and Chang, L. and Wang, GJ and Fowler, JS and Leonido-Yee, M. and Franceschi, D. and Sedler, M. and Gatley, SJ and Hitzemann, R. and Ding, YS and others}, + Journal = {Am J Psychiatry}, + Pages = {377--382}, + Title = {{Dopamine transporter losses in methamphetamine abusers are associated with psychomotor impairment}}, + Volume = {158}, + Year = {2001}} + +@article{Volkow2001, + Author = {Volkow, N. D. and Chang, L. and Wang, G. J. and Fowler, J. S. and Ding, Y. S. and Sedler, M. and Logan, J. and Franceschi, D. and Gatley, J. and Hitzemann, R. and Gifford, A. and Wong, C. and Pappas, N.}, + Journal = {Am J Psychiatry}, + Month = {Dec}, + Pages = {2015--2021}, + Title = {{{L}ow level of brain dopamine {D}2 receptors in methamphetamine abusers: association with metabolism in the orbitofrontal cortex}}, + Volume = {158}, + Year = {2001}} + +@article{Volkow2001a, + Author = {Volkow, N. D. and Chang, L. and Wang, G. J. and Fowler, J. S. and Franceschi, D. and Sedler, M. and Gatley, S. J. and Miller, E. and Hitzemann, R. and Ding, Y. S. and Logan, J.}, + Journal = {J. Neurosci.}, + Month = {Dec}, + Pages = {9414--9418}, + Title = {{{L}oss of dopamine transporters in methamphetamine abusers recovers with protracted abstinence}}, + Volume = {21}, + Year = {2001}} + +@article{Volkow2001d, + Author = {Volkow, N. D. and Chang, L. and Wang, G. J. and Fowler, J. S. and Franceschi, D. and Sedler, M. J. and Gatley, S. J. and Hitzemann, R. and Ding, Y. S. and Wong, C. and Logan, J.}, + Journal = {Am J Psychiatry}, + Month = {Mar}, + Pages = {383--389}, + Title = {{{H}igher cortical and lower subcortical metabolism in detoxified methamphetamine abusers}}, + Volume = {158}, + Year = {2001}} + +@article{Volkow2001e, + Author = {Volkow, N. D. and Chang, L. and Wang, G. J. and Fowler, J. S. and Leonido-Yee, M. and Franceschi, D. and Sedler, M. J. and Gatley, S. J. and Hitzemann, R. and Ding, Y. S. and Logan, J. and Wong, C. and Miller, E. N.}, + Journal = {Am J Psychiatry}, + Month = {Mar}, + Pages = {377--382}, + Title = {{{A}ssociation of dopamine transporter reduction with psychomotor impairment in methamphetamine abusers}}, + Volume = {158}, + Year = {2001}} + +@article{Volkow2001c, + Author = {Volkow, N. D. and Ding, Y. S. and Fowler, J. S. and Gatley, S. J.}, + Journal = {Biol. Psychiatry}, + Month = {Feb}, + Pages = {211--220}, + Title = {{{I}maging brain cholinergic activity with positron emission tomography: its role in the evaluation of cholinergic treatments in {A}lzheimer's dementia}}, + Volume = {49}, + Year = {2001}} + +@article{Volkow2000e, + Author = {Volkow, N. D. and Fowler, J. S.}, + Journal = {Cereb. Cortex}, + Month = {Mar}, + Pages = {318--325}, + Title = {{{A}ddiction, a disease of compulsion and drive: involvement of the orbitofrontal cortex}}, + Volume = {10}, + Year = {2000}} + +@article{Volkow1999, + Author = {Volkow, N. D. and Fowler, J. S. and Ding, Y. S. and Wang, G. J. and Gatley, S. J.}, + Journal = {Nicotine Tob. Res.}, + Pages = {S127--132}, + Title = {{{I}maging the neurochemistry of nicotine actions: studies with positron emission tomography}}, + Volume = {1 Suppl 2}, + Year = {1999}} + +@article{Volkow1999e, + Author = {Volkow, N. D. and Fowler, J. S. and Gatley, S. J. and Dewey, S. L. and Wang, G. J. and Logan, J. and Ding, Y. S. and Franceschi, D. and Gifford, A. and Morgan, A. and Pappas, N. and King, P.}, + Journal = {Synapse}, + Month = {Jan}, + Pages = {59--66}, + Title = {{{C}omparable changes in synaptic dopamine induced by methylphenidate and by cocaine in the baboon brain}}, + Volume = {31}, + Year = {1999}} + +@article{Volkow2009c, + Author = {Volkow, N. D. and Fowler, J. S. and Logan, J. and Alexoff, D. and Zhu, W. and Telang, F. and Wang, G. J. and Jayne, M. and Hooker, J. M. and Wong, C. and Hubbard, B. and Carter, P. and Warner, D. and King, P. and Shea, C. and Xu, Y. and Muench, L. and Apelskog-Torres, K.}, + Journal = {JAMA}, + Month = {Mar}, + Pages = {1148--1154}, + Title = {{{E}ffects of modafinil on dopamine and dopamine transporters in the male human brain: clinical implications}}, + Volume = {301}, + Year = {2009}} + +@article{Volkow2002, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. and Ding, Y. and Gatley, S. J.}, + Journal = {J Atten Disord}, + Pages = {31--43}, + Title = {{{M}echanism of action of methylphenidate: insights from {P}{E}{T} imaging studies}}, + Volume = {6 Suppl 1}, + Year = {2002}} + +@article{Volkow2004d, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J.}, + Journal = {Neuropharmacology}, + Pages = {3--13}, + Title = {{{T}he addicted human brain viewed in the light of imaging studies: brain circuits and treatment strategies}}, + Volume = {47 Suppl 1}, + Year = {2004}} + +@article{Volkow2003d, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J.}, + Journal = {Semin Nucl Med}, + Month = {Apr}, + Pages = {114--128}, + Title = {{{P}ositron emission tomography and single-photon emission computed tomography in substance abuse research}}, + Volume = {33}, + Year = {2003}} + +@article{Volkow2003e, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J.}, + Journal = {J. Clin. Invest.}, + Month = {May}, + Pages = {1444--1451}, + Title = {{{T}he addicted human brain: insights from imaging studies}}, + Volume = {111}, + Year = {2003}} + +@article{Volkow2002d, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J.}, + Journal = {Behav Pharmacol}, + Month = {Sep}, + Pages = {355--366}, + Title = {{{R}ole of dopamine in drug reinforcement and addiction in humans: results from imaging studies}}, + Volume = {13}, + Year = {2002}} + +@article{Volkow1999a, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J.}, + Journal = {J. Psychopharmacol. (Oxford)}, + Month = {Dec}, + Pages = {337--345}, + Title = {{{I}maging studies on the role of dopamine in cocaine reinforcement and addiction in humans}}, + Volume = {13}, + Year = {1999}} + +@article{Volkow2009e, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J. and Baler, R. and Telang, F.}, + Journal = {Neuropharmacology}, + Pages = {3--8}, + Title = {{{I}maging dopamine's role in drug abuse and addiction}}, + Volume = {56 Suppl 1}, + Year = {2009}} + +@article{Volkow2002c, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J. and Ding, Y. S. and Gatley, S. J.}, + Journal = {Eur Neuropsychopharmacol}, + Month = {Dec}, + Pages = {557--566}, + Title = {{{R}ole of dopamine in the therapeutic and reinforcing effects of methylphenidate in humans: results from imaging studies}}, + Volume = {12}, + Year = {2002}} + +@article{Volkow2002a, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J. and Goldstein, R. Z.}, + Journal = {Neurobiol Learn Mem}, + Month = {Nov}, + Pages = {610--624}, + Title = {{{R}ole of dopamine, the frontal cortex and memory circuits in drug addiction: insight from imaging studies}}, + Volume = {78}, + Year = {2002}} + +@article{Volkow2004f, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J. and Swanson, J. M.}, + Journal = {Mol. Psychiatry}, + Month = {Jun}, + Pages = {557--569}, + Title = {{{D}opamine in drug abuse and addiction: results from imaging studies and treatment implications}}, + Volume = {9}, + Year = {2004}} + +@article{Volkow2007b, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J. and Swanson, J. M. and Telang, F.}, + Journal = {Arch. Neurol.}, + Month = {Nov}, + Pages = {1575--1579}, + Title = {{{D}opamine in drug abuse and addiction: results of imaging studies and treatment implications}}, + Volume = {64}, + Year = {2007}} + +@article{Volkow2008e, + Author = {Volkow, N. D. and Fowler, J. S. and Wang, G. J. and Telang, F. and Logan, J. and Wong, C. and Ma, J. and Pradhan, K. and Benveniste, H. and Swanson, J. M.}, + Journal = {PLoS ONE}, + Pages = {e2017}, + Title = {{{M}ethylphenidate decreased the amount of glucose needed by the brain to perform a cognitive task}}, + Volume = {3}, + Year = {2008}} + +@article{Volkow1991, + Abstract = {OBJECTIVE: The authors investigated changes in brain function associated + with cocaine dependence and withdrawal to provide clues regarding + the processes that lead to the uncontrollable self-administration + of cocaine. METHOD: They measured regional brain metabolism with + [18F]-fluorodeoxyglucose (FDG) and positron emission tomography in + 15 outpatients with the diagnosis of cocaine abuse and 17 normal + comparison subjects. Ten of the patients were studied less than 1 + week after they had last had cocaine, and five were studied 2-4 weeks + after withdrawal. RESULTS: Patients studied within 1 week of cocaine + withdrawal but not those studied within 2-4 weeks of cocaine withdrawal + had higher levels of global brain metabolism as well as higher levels + of regional brain metabolism in the basal ganglia and orbitofrontal + cortex than did normal subjects, probably as a consequence of less + brain dopamine activity. There was also a significant relationship + between the number of days since cocaine withdrawal and regional + brain glucose metabolism in the orbitofrontal cortex and in the basal + ganglia, and the correlations between cocaine craving and metabolic + activity were significant in the prefrontal cortex and the orbitofrontal + cortex. CONCLUSIONS: Although the time-dependent fall in metabolic + activity suggests that the higher metabolic activity observed less + than a week after cocaine withdrawal may represent a nonspecific + expression of drug withdrawal, the selectivity of changes in glucose + metabolism for the basal ganglia and for the orbitofrontal cortex + suggests that the regional metabolic changes seen in cocaine abusers + during detoxification are related to changes in brain dopamine activity.}, + Author = {N. D. Volkow and J. S. Fowler and A. P. Wolf and R. Hitzemann and S. Dewey and B. Bendriem and R. Alpert and A. Hoff}, + Institution = {Medical Department Brookhaven National Laboratory, Upton, NY 11973.}, + Journal = {Am J Psychiatry}, + Keywords = {Adolescent; Adult; Ambulatory Care; Basal Ganglia, metabolism; Brain, metabolism; Cocaine, adverse effects; Deoxyglucose, analogs /&/ derivatives/diagnostic use/metabolism; Dopamine, metabolism; Fluorodeoxyglucose F18; Frontal Lobe, metabolism; Glucose, metabolism; Humans; Male; Occipital Lobe, metabolism; Regression Analysis; Substance Withdrawal Syndrome, diagnosis/etiology/metabolism; Substance-Related Disorders, diagnosis/metabolism; Time Factors; Tomography, Emission-Computed}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {621--626}, + Pmid = {2018164}, + Timestamp = {2009.08.05}, + Title = {Changes in brain glucose metabolism in cocaine dependence and withdrawal.}, + Volume = {148}, + Year = {1991}} + +@article{Volkow2000c, + Author = {Volkow, N. D. and Gatley, S. J. and Fowler, J. S. and Wang, G. J. and Swanson, J.}, + Journal = {Science}, + Month = {Apr}, + Pages = {11}, + Title = {{{S}erotonin and the therapeutic effects of ritalin}}, + Volume = {288}, + Year = {2000}} + +@article{Volkow1998c, + Author = {Volkow, N. D. and Gur, R. C. and Wang, G. J. and Fowler, J. S. and Moberg, P. J. and Ding, Y. S. and Hitzemann, R. and Smith, G. and Logan, J.}, + Journal = {Am J Psychiatry}, + Month = {Mar}, + Pages = {344--349}, + Title = {{{A}ssociation between decline in brain dopamine activity with age and cognitive and motor impairment in healthy individuals}}, + Volume = {155}, + Year = {1998}} + +@article{Volkow1992, + Author = {Volkow, N. D. and Hitzemann, R. and Wang, G.J. and Fowler, J.S. and Wolf, A.P. and Dewey, S.L. and Handlesman, L.}, + Journal = {Synapse}, + Number = {3}, + Publisher = {Wiley Subscription Services, Inc., A Wiley Company Hoboken}, + Title = {{Long-term frontal brain metabolic changes in cocaine abusers}}, + Volume = {11}, + Year = {1992}} + +@article{Volkow2003a, + Author = {Volkow, N. D. and Insel, T. R.}, + Journal = {Biol. Psychiatry}, + Month = {Dec}, + Pages = {1307--1309}, + Title = {{{W}hat are the long-term effects of methylphenidate treatment?}}, + Volume = {54}, + Year = {2003}} + +@article{Volkow2005a, + Author = {Volkow, N. D. and Li, T. K.}, + Journal = {Pharmacol. Ther.}, + Month = {Oct}, + Pages = {3--17}, + Title = {{{D}rugs and alcohol: treating and preventing abuse, addiction and their medical consequences}}, + Volume = {108}, + Year = {2005}} + +@article{Volkow2004c, + Author = {Volkow, N. D. and Li, T. K.}, + Journal = {Nat. Rev. Neurosci.}, + Month = {Dec}, + Pages = {963--970}, + Title = {{{D}rug addiction: the neurobiology of behaviour gone awry}}, + Volume = {5}, + Year = {2004}} + +@article{Volkow2000f, + Author = {Volkow, N. D. and Logan, J. and Fowler, J. S. and Wang, G. J. and Gur, R. C. and Wong, C. and Felder, C. and Gatley, S. J. and Ding, Y. S. and Hitzemann, R. and Pappas, N.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {75--80}, + Title = {{{A}ssociation between age-related decline in brain dopamine activity and impairment in frontal and cingulate metabolism}}, + Volume = {157}, + Year = {2000}} + +@article{Volkow2008f, + Author = {Volkow, N. D. and Ma, Y. and Zhu, W. and Fowler, J. S. and Li, J. and Rao, M. and Mueller, K. and Pradhan, K. and Wong, C. and Wang, G. J.}, + Journal = {Psychiatry Res}, + Month = {Apr}, + Pages = {205--213}, + Title = {{{M}oderate doses of alcohol disrupt the functional organization of the human brain}}, + Volume = {162}, + Year = {2008}} + +@article{Volkow1988, + Abstract = {Regional distribution of cerebral blood flow was assessed in a group + of 13 normal social drinkers under baseline conditions and after + acute alcohol intoxication. Blood flow measurements were done using + 15O-labeled water and positron emission tomography (PET). Each subject + underwent two control sessions under baseline conditions and two + sessions after alcohol. Seven of the subjects were given 0.5 g/kg + of alcohol and six were given 1 g/kg of alcohol p.o. The first and + second post-alcohol scans were done 40 and 60 min after alcohol ingestion. + The studies revealed that both the high and the low doses of alcohol + reduced blood flow to the cerebellum. This effect was significant + only for the high doses of alcohol, which also increased blood flow + to the right temporal and the prefrontal cortex. The decrease in + blood flow of the cerebellum could account for the muscular incoordination + induced by alcohol.}, + Author = {N. D. Volkow and N. Mullani and L. Gould and S. S. Adler and R. W. Guynn and J. E. Overall and S. Dewey}, + Institution = {Department of Psychiatry and Behavioral Sciences, University of Texas Health Science Center, Houston.}, + Journal = {Psychiatry Res}, + Keywords = {Adult; Brain, blood supply/radionuclide imaging; Cerebrovascular Circulation, drug effects; Dose-Response Relationship, Drug; Ethanol, pharmacology; Humans; Male; Tomography, Emission-Computed}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {May}, + Number = {2}, + Owner = {Woo-Young Ahn}, + Pages = {201--209}, + Pii = {0165-1781(88)90063-7}, + Pmid = {3261427}, + Timestamp = {2009.08.04}, + Title = {Effects of acute alcohol intoxication on cerebral blood flow measured with PET.}, + Volume = {24}, + Year = {1988}} + +@article{Volkow2007e, + Author = {Volkow, N. D. and O'Brien, C. P.}, + Journal = {Am J Psychiatry}, + Month = {May}, + Pages = {708--710}, + Title = {{{I}ssues for {D}{S}{M}-{V}: should obesity be included as a brain disorder?}}, + Volume = {164}, + Year = {2007}} + +@article{Volkow2008d, + Author = {Volkow, N. D. and Swanson, J. M.}, + Journal = {Am J Psychiatry}, + Month = {May}, + Pages = {553--555}, + Title = {{{D}oes childhood treatment of {A}{D}{H}{D} with stimulant medication affect substance abuse in adulthood?}}, + Volume = {165}, + Year = {2008}} + +@article{Volkow2008g, + Author = {Volkow, N. D. and Swanson, J. M.}, + Journal = {Nature}, + Month = {Jan}, + Pages = {520}, + Title = {{{T}he action of enhancers can lead to addiction}}, + Volume = {451}, + Year = {2008}} + +@article{Volkow2003c, + Author = {Volkow, N. D. and Swanson, J. M.}, + Journal = {Am J Psychiatry}, + Month = {Nov}, + Pages = {1909--1918}, + Title = {{{V}ariables that affect the clinical use and abuse of methylphenidate in the treatment of {A}{D}{H}{D}}}, + Volume = {160}, + Year = {2003}} + +@article{Volkow2009a, + Author = {Volkow, N. D. and Tomasi, D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Wang, R. L. and Logan, J. and Wong, C. and Jayne, M. and Swanson, J. M.}, + Journal = {Neuroimage}, + Month = {May}, + Pages = {1232--1240}, + Title = {{{H}yperstimulation of striatal {D}2 receptors with sleep deprivation: {I}mplications for cognitive impairment}}, + Volume = {45}, + Year = {2009}} + +@article{volkow1997decreased, + Author = {Volkow, N. D. and Wang, G.J. and Fowler, JS and Logan, J. and Gatley, SJ and Hitzemann, R. and Chen, AD and Dewey, SL and Pappas, N.}, + Publisher = {Nature Publishing Group}, + Title = {{Decreased striatal dopaminergic responsiveness in detoxified cocaine-dependent subjects}}, + Year = {1997}} + +@article{Volkow2001f, + Author = {Volkow, N. D. and Wang, G. and Fowler, J. S. and Logan, J. and Gerasimov, M. and Maynard, L. and Ding, Y. and Gatley, S. J. and Gifford, A. and Franceschi, D.}, + Journal = {J. Neurosci.}, + Month = {Jan}, + Pages = {RC121}, + Title = {{{T}herapeutic doses of oral methylphenidate significantly increase extracellular dopamine in the human brain}}, + Volume = {21}, + Year = {2001}} + +@article{Volkow2006, + Author = {Volkow, N. D. and Wang, G. J. and Begleiter, H. and Porjesz, B. and Fowler, J. S. and Telang, F. and Wong, C. and Ma, Y. and Logan, J. and Goldstein, R. and Alexoff, D. and Thanos, P. K.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Sep}, + Pages = {999--1008}, + Title = {{{H}igh levels of dopamine {D}2 receptors in unaffected members of alcoholic families: possible protective factors}}, + Volume = {63}, + Year = {2006}} + +@article{Volkow2000b, + Author = {Volkow, N. D. and Wang, G. J. and Fischman, M. W. and Foltin, R. and Fowler, J. S. and Franceschi, D. and Franceschi, M. and Logan, J. and Gatley, S. J. and Wong, C. and Ding, Y. S. and Hitzemann, R. and Pappas, N.}, + Journal = {Life Sci.}, + Month = {Aug}, + Pages = {1507--1515}, + Title = {{{E}ffects of route of administration on cocaine induced dopamine transporter blockade in the human brain}}, + Volume = {67}, + Year = {2000}} + +@article{Volkow2005c, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Ding, Y. S.}, + Journal = {Biol. Psychiatry}, + Month = {Jun}, + Pages = {1410--1415}, + Title = {{{I}maging the effects of methylphenidate on brain dopamine: new model on its therapeutic actions for attention-deficit/hyperactivity disorder}}, + Volume = {57}, + Year = {2005}} + +@article{Volkow1998b, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Ding, Y. S. and Gur, R. C. and Gatley, J. and Logan, J. and Moberg, P. J. and Hitzemann, R. and Smith, G. and Pappas, N.}, + Journal = {Ann. Neurol.}, + Month = {Jul}, + Pages = {143--147}, + Title = {{{P}arallel loss of presynaptic and postsynaptic dopamine markers in normal aging}}, + Volume = {44}, + Year = {1998}} + +@article{Volkow1999d, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Fischman, M. and Foltin, R. and Abumrad, N. N. and Gatley, S. J. and Logan, J. and Wong, C. and Gifford, A. and Ding, Y. S. and Hitzemann, R. and Pappas, N.}, + Journal = {Life Sci.}, + Pages = {7--12}, + Title = {{{M}ethylphenidate and cocaine have a similar in vivo potency to block dopamine transporters in the human brain}}, + Volume = {65}, + Year = {1999}} + +@article{Volkow2000d, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Franceschi, D. and Thanos, P. K. and Wong, C. and Gatley, S. J. and Ding, Y. S. and Molina, P. and Schlyer, D. and Alexoff, D. and Hitzemann, R. and Pappas, N.}, + Journal = {Life Sci.}, + Month = {Feb}, + Pages = {L161--167}, + Title = {{{C}ocaine abusers show a blunted response to alcohol intoxication in limbic brain regions}}, + Volume = {66}, + Year = {2000}} + +@article{Volkow1999g, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Gatley, S. J. and Logan, J. and Ding, Y. S. and Dewey, S. L. and Hitzemann, R. and Gifford, A. N. and Pappas, N. R.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Jan}, + Pages = {14--20}, + Title = {{{B}lockade of striatal dopamine transporters by intravenous methylphenidate is not sufficient to induce self-reports of "high"}}, + Volume = {288}, + Year = {1999}} + +@article{Volkow1998, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Gatley, S. J. and Logan, J. and Ding, Y. S. and Hitzemann, R. and Pappas, N.}, + Journal = {Am J Psychiatry}, + Month = {Oct}, + Pages = {1325--1331}, + Title = {{{D}opamine transporter occupancies in the human brain induced by therapeutic doses of oral methylphenidate}}, + Volume = {155}, + Year = {1998}} + +@article{Volkow1999f, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Hitzemann, R. and Angrist, B. and Gatley, S. J. and Logan, J. and Ding, Y. S. and Pappas, N.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {19--26}, + Title = {{{A}ssociation of methylphenidate-induced craving with changes in right striato-orbitofrontal metabolism in cocaine abusers: implications in addiction}}, + Volume = {156}, + Year = {1999}} + +@article{Volkow1998a, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Hitzemann, R. and Gatley, J. and Ding, Y. S. and Wong, C. and Pappas, N.}, + Journal = {Psychiatry Res}, + Month = {Jul}, + Pages = {29--36}, + Title = {{{D}ifferences in regional brain metabolic responses between single and repeated doses of methylphenidate}}, + Volume = {83}, + Year = {1998}} + +@article{Volkow2005f, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Learned-Coughlin, S. and Yang, J. and Logan, J. and Schlyer, D. and Gatley, J. S. and Wong, C. and Zhu, W. and Pappas, N. and Schueller, M. and Jayne, M. and Carter, P. and Warner, D. and Ding, Y. S. and Shea, C. and Xu, Y.}, + Journal = {Biol. Psychiatry}, + Month = {Mar}, + Pages = {640--646}, + Title = {{{T}he slow and long-lasting blockade of dopamine transporters in human brain induced by the new antidepressant drug radafaxine predict poor reinforcing effects}}, + Volume = {57}, + Year = {2005}} + +@article{Volkow2002h, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Logan, J. and Franceschi, D. and Maynard, L. and Ding, Y. S. and Gatley, S. J. and Gifford, A. and Zhu, W. and Swanson, J. M.}, + Journal = {Synapse}, + Month = {Mar}, + Pages = {181--187}, + Title = {{{R}elationship between blockade of dopamine transporters by oral methylphenidate and the increases in extracellular dopamine: therapeutic implications}}, + Volume = {43}, + Year = {2002}} + +@article{Volkow2000a, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Logan, J. and Gatley, J. S. and Pappas, N. R. and Wong, C. T. and Felder, C.}, + Journal = {Life Sci.}, + Month = {Sep}, + Pages = {2213--2220}, + Title = {{{I}ncreased activity of the temporal insula in subjects with bradycardia}}, + Volume = {67}, + Year = {2000}} + +@article{Volkow1999c, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Logan, J. and Gatley, S. J. and Gifford, A. and Hitzemann, R. and Ding, Y. S. and Pappas, N.}, + Journal = {Am J Psychiatry}, + Month = {Sep}, + Pages = {1440--1443}, + Title = {{{P}rediction of reinforcing responses to psychostimulants in humans by brain dopamine {D}2 receptor levels}}, + Volume = {156}, + Year = {1999}} + +@article{Volkow1999b, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Logan, J. and Gatley, S. J. and Wong, C. and Hitzemann, R. and Pappas, N. R.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Oct}, + Pages = {409--415}, + Title = {{{R}einforcing effects of psychostimulants in humans are associated with increases in brain dopamine and occupancy of {D}(2) receptors}}, + Volume = {291}, + Year = {1999}} + +@article{Volkow2002f, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Logan, J. and Jayne, M. and Franceschi, D. and Wong, C. and Gatley, S. J. and Gifford, A. N. and Ding, Y. S. and Pappas, N.}, + Journal = {Synapse}, + Month = {Jun}, + Pages = {175--180}, + Title = {{"{N}onhedonic" food motivation in humans involves dopamine in the dorsal striatum and methylphenidate amplifies this effect}}, + Volume = {44}, + Year = {2002}} + +@article{Volkow2003g, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Molina, P. E. and Logan, J. and Gatley, S. J. and Gifford, A. and Ding, Y. S. and Wong, C. and Pappas, N. R. and Zhu, W. and Swanson, J. M.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Mar}, + Pages = {264--270}, + Title = {{{C}ardiovascular effects of methylphenidate in humans are associated with increases of dopamine in brain and of epinephrine in plasma}}, + Volume = {166}, + Year = {2003}} + +@article{Volkow2000, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Rooney, W. D. and Felder, C. A. and Lee, J. H. and Franceschi, D. and Maynard, L. and Schlyer, D. J. and Pan, J. W. and Gatley, S. J. and Springer Jr, C. S.}, + Journal = {Magn Reson Med}, + Month = {Nov}, + Pages = {701--705}, + Title = {{{R}esting brain metabolic activity in a 4 tesla magnetic field}}, + Volume = {44}, + Year = {2000}} + +@article{Volkow2008b, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Telang, F.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Oct}, + Pages = {3191--3200}, + Title = {{{O}verlapping neuronal circuits in addiction and obesity: evidence of systems pathology}}, + Volume = {363}, + Year = {2008}} + +@article{Volkow2007f, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Telang, F. and Jayne, M. and Wong, C.}, + Journal = {Am J Psychiatry}, + Month = {Jan}, + Pages = {157--160}, + Title = {{{S}timulant-induced enhanced sexual desire as a potential contributing factor in {H}{I}{V} transmission}}, + Volume = {164}, + Year = {2007}} + +@article{Volkow2004e, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Telang, F. and Maynard, L. and Logan, J. and Gatley, S. J. and Pappas, N. and Wong, C. and Vaska, P. and Zhu, W. and Swanson, J. M.}, + Journal = {Am J Psychiatry}, + Month = {Jul}, + Pages = {1173--1180}, + Title = {{{E}vidence that methylphenidate enhances the saliency of a mathematical task by increasing dopamine in the human brain}}, + Volume = {161}, + Year = {2004}} + +@article{Volkow2002e, + Author = {Volkow, N. D. and Wang, G. J. and Fowler, J. S. and Thanos, P. P. and Logan, J. and Gatley, S. J. and Gifford, A. and Ding, Y. S. and Wong, C. and Pappas, N. and Thanos, P.}, + Journal = {Synapse}, + Month = {Nov}, + Pages = {79--82}, + Title = {{{B}rain {D}{A} {D}2 receptors predict reinforcing effects of stimulants in humans: replication study}}, + Volume = {46}, + Year = {2002}} + +@article{Volkow2006e, + Author = {Volkow, N. D. and Wang, G. J. and Franceschi, D. and Fowler, J. S. and Thanos, P. P. and Maynard, L. and Gatley, S. J. and Wong, C. and Veech, R. L. and Kunos, G. and Kai Li, T.}, + Journal = {Neuroimage}, + Month = {Jan}, + Pages = {295--301}, + Title = {{{L}ow doses of alcohol substantially decrease glucose metabolism in the human brain}}, + Volume = {29}, + Year = {2006}} + +@article{Volkow2005e, + Author = {Volkow, N. D. and Wang, G. J. and Ma, Y. and Fowler, J. S. and Wong, C. and Ding, Y. S. and Hitzemann, R. and Swanson, J. M. and Kalivas, P.}, + Journal = {J. Neurosci.}, + Month = {Apr}, + Pages = {3932--3939}, + Title = {{{A}ctivation of orbital and medial prefrontal cortex by methylphenidate in cocaine-addicted subjects but not in controls: relevance to addiction}}, + Volume = {25}, + Year = {2005}} + +@article{Volkow2006c, + Author = {Volkow, N. D. and Wang, G. J. and Ma, Y. and Fowler, J. S. and Wong, C. and Jayne, M. and Telang, F. and Swanson, J. M.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {1782--1792}, + Title = {{{E}ffects of expectation on the brain metabolic responses to methylphenidate and to its placebo in non-drug abusing subjects}}, + Volume = {32}, + Year = {2006}} + +@article{Volkow2003b, + Author = {Volkow, N. D. and Wang, G. J. and Ma, Y. and Fowler, J. S. and Zhu, W. and Maynard, L. and Telang, F. and Vaska, P. and Ding, Y. S. and Wong, C. and Swanson, J. M.}, + Journal = {J. Neurosci.}, + Month = {Dec}, + Pages = {11461--11468}, + Title = {{{E}xpectation enhances the regional brain metabolic and the reinforcing effects of stimulants in cocaine abusers}}, + Volume = {23}, + Year = {2003}} + +@article{Volkow2002b, + Author = {Volkow, N. D. and Wang, G. J. and Maynard, L. and Fowler, J. S. and Jayne, B. and Telang, F. and Logan, J. and Ding, Y. S. and Gatley, S. J. and Hitzemann, R. and Wong, C. and Pappas, N.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {163--172}, + Title = {{{E}ffects of alcohol detoxification on dopamine {D}2 receptors in alcoholics: a preliminary study}}, + Volume = {116}, + Year = {2002}} + +@article{Volkow2003f, + Author = {Volkow, N. D. and Wang, G. J. and Maynard, L. and Jayne, M. and Fowler, J. S. and Zhu, W. and Logan, J. and Gatley, S. J. and Ding, Y. S. and Wong, C. and Pappas, N.}, + Journal = {Int J Eat Disord}, + Month = {Mar}, + Pages = {136--142}, + Title = {{{B}rain dopamine is associated with eating behaviors in humans}}, + Volume = {33}, + Year = {2003}} + +@article{Volkow2007g, + Author = {Volkow, N. D. and Wang, G. J. and Newcorn, J. and Fowler, J. S. and Telang, F. and Solanto, M. V. and Logan, J. and Wong, C. and Ma, Y. and Swanson, J. M. and Schulz, K. and Pradhan, K.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {1182--1190}, + Title = {{{B}rain dopamine transporter levels in treatment and drug na?ve adults with {A}{D}{H}{D}}}, + Volume = {34}, + Year = {2007}} + +@article{Volkow2007d, + Author = {Volkow, N. D. and Wang, G. J. and Newcorn, J. and Telang, F. and Solanto, M. V. and Fowler, J. S. and Logan, J. and Ma, Y. and Schulz, K. and Pradhan, K. and Wong, C. and Swanson, J. M.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Aug}, + Pages = {932--940}, + Title = {{{D}epressed dopamine activity in caudate and preliminary evidence of limbic involvement in adults with attention-deficit/hyperactivity disorder}}, + Volume = {64}, + Year = {2007}} + +@article{Volkow2009d, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Goldstein, R. Z. and Alia-Klein, N. and Logan, J. and Wong, C. and Thanos, P. K. and Ma, Y. and Pradhan, K.}, + Journal = {Obesity (Silver Spring)}, + Month = {Jan}, + Pages = {60--65}, + Title = {{{I}nverse association between {B}{M}{I} and prefrontal metabolic activity in healthy adults}}, + Volume = {17}, + Year = {2009}} + +@article{Volkow2008h, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Logan, J. and Childress, A. R. and Jayne, M. and Ma, Y. and Wong, C.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {1266--1273}, + Title = {{{D}opamine increases in striatum do not elicit craving in cocaine abusers unless they are coupled with cocaine cues}}, + Volume = {39}, + Year = {2008}} + +@article{Volkow2008i, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Logan, J. and Childress, A. R. and Jayne, M. and Ma, Y. and Wong, C.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {1266--1273}, + Title = {{{D}opamine increases in striatum do not elicit craving in cocaine abusers unless they are coupled with cocaine cues}}, + Volume = {39}, + Year = {2008}} + +@article{Volkow2006b, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Logan, J. and Childress, A. R. and Jayne, M. and Ma, Y. and Wong, C.}, + Journal = {J. Neurosci.}, + Month = {Jun}, + Pages = {6583--6588}, + Title = {{{C}ocaine cues and dopamine in dorsal striatum: mechanism of craving in cocaine addiction}}, + Volume = {26}, + Year = {2006}} + +@article{Volkow2006f, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Logan, J. and Childress, A. R. and Jayne, M. and Ma, Y. and Wong, C.}, + Journal = {J. Neurosci.}, + Month = {Jun}, + Pages = {6583--6588}, + Title = {{{C}ocaine cues and dopamine in dorsal striatum: mechanism of craving in cocaine addiction}}, + Volume = {26}, + Year = {2006}} + +@article{Volkow2007a, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Logan, J. and Jayne, M. and Ma, Y. and Pradhan, K. and Wong, C.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {12700--12706}, + Title = {{{P}rofound decreases in dopamine release in striatum in detoxified alcoholics: possible orbitofrontal involvement}}, + Volume = {27}, + Year = {2007}} + +@article{Volkow2008a, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Logan, J. and Wong, C. and Ma, J. and Pradhan, K. and Tomasi, D. and Thanos, P. K. and Ferr?, S. and Jayne, M.}, + Journal = {J. Neurosci.}, + Month = {Aug}, + Pages = {8454--8461}, + Title = {{{S}leep deprivation decreases binding of [11{C}]raclopride to dopamine {D}2/{D}3 receptors in the human brain}}, + Volume = {28}, + Year = {2008}} + +@article{Volkow2008c, + Author = {Volkow, N. D. and Wang, G. J. and Telang, F. and Fowler, J. S. and Thanos, P. K. and Logan, J. and Alexoff, D. and Ding, Y. S. and Wong, C. and Ma, Y. and Pradhan, K.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {1537--1543}, + Title = {{{L}ow dopamine striatal {D}2 receptors are associated with prefrontal metabolism in obese subjects: possible contributing factors}}, + Volume = {42}, + Year = {2008}} + +@article{Volkow2005d, + Author = {Volkow, N. D. and Wise, R. A.}, + Journal = {Nat. Neurosci.}, + Month = {May}, + Pages = {555--560}, + Title = {{{H}ow can drug addiction help us understand obesity?}}, + Volume = {8}, + Year = {2005}} + +@article{Volkow2002g, + Author = {Volkow, N. D. and Zhu, W. and Felder, C. A. and Mueller, K. and Welsh, T. F. and Wang, G. J. and de Leon, M. J.}, + Journal = {Psychiatry Res}, + Month = {Feb}, + Pages = {39--50}, + Title = {{{C}hanges in brain functional homogeneity in subjects with {A}lzheimer's disease}}, + Volume = {114}, + Year = {2002}} + +@article{Vollenweider2005, + Author = {Vollenweider, F. X. and Liechti, M. E. and Paulus, M. P.}, + Journal = {J. Psychopharmacol. (Oxford)}, + Month = {Jul}, + Pages = {366--374}, + Title = {{{M}{D}{M}{A} affects both error-rate dependent and independent aspects of decision-making in a two-choice prediction task}}, + Volume = {19}, + Year = {2005}} + +@article{Volpicelli2000, + Author = {Volpicelli, J. R. and Markman, I. and Monterosso, J. and Filing, J. and O'Brien, C. P.}, + Journal = {J Subst Abuse Treat}, + Month = {Jan}, + Pages = {41--49}, + Title = {{{P}sychosocially enhanced treatment for cocaine-dependent mothers: evidence of efficacy}}, + Volume = {18}, + Year = {2000}} + +@book{VonNeuman1944, + Author = {Von Neumann, J. and Morgenstern, O.}, + Publisher = {Princeton}, + Title = {{Theory of games and economic behavior}}, + Year = {1944}} + +@article{Voss2004, + Author = {Voss, A. and Rothermund, K. and Voss, J.}, + Journal = {Memory \& Cognition}, + Pages = {1206--1220}, + Title = {Interpreting the Parameters of the Diffusion Model: An Empirical Validation}, + Volume = {32}, + Year = {2004}} + +@article{Vossinpress, + Author = {Voss, A. and Voss, J.}, + Journal = {Behavior Research Methods}, + Pages = {??--??}, + Title = {Fast--dm: A Free Program for Efficient Diffusion Model Analysis}, + Volume = {??}, + Year = {in press}} + +@article{Voss1975, + Author = {Voss, R. F. and Clarke, J.}, + Journal = {Nature}, + Pages = {317--318}, + Title = {`$1/f$' Noise in Music and Speech}, + Volume = {258}, + Year = {1975}} + +@article{Voytek2005, + Author = {Voytek, B. and Berman, S. M. and Hassid, B. D. and Simon, S. L. and Mandelkern, M. A. and Brody, A. L. and Monterosso, J. and Ling, W. and London, E. D.}, + Journal = {Synapse}, + Month = {Aug}, + Pages = {113--115}, + Title = {{{D}ifferences in regional brain metabolism associated with marijuana abuse in methamphetamine abusers}}, + Volume = {57}, + Year = {2005}} + +@article{Vythilingam2009, + Author = {Vythilingam, M. and Nelson, E. E. and Scaramozza, M. and Waldeck, T. and Hazlett, G. and Southwick, S. M. and Pine, D. S. and Drevets, W. and Charney, D. S. and Ernst, M.}, + Journal = {Psychiatry Res}, + Month = {Apr}, + Pages = {75--77}, + Title = {{{R}eward circuitry in resilience to severe trauma: an f{M}{R}{I} investigation of resilient special forces soldiers}}, + Volume = {172}, + Year = {2009}} + +@article{Wagar2004, + Author = {Wagar, B. M. and Thagard, P.}, + Journal = {Psychological Review}, + Pages = {67--79}, + Title = {Spiking {P}hineas {G}age: {A} Neurocomputational Theory of Cognitive--Affective Integration in Decision Making}, + Volume = {111}, + Year = {2004}} + +@article{Wagenmakersinpress, + Author = {Wagenmakers, E.--J.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {??-??}, + Title = {A Practical Solution to the Pervasive Problems of $p$--Values}, + Volume = {??}, + Year = {in press}} + +@article{Wagenmakers2003, + Author = {Wagenmakers, E.--J.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {580?586}, + Title = {How Many Parameters Does it Take to Fit an Elephant? {B}ook Review of ``{M}odel Selection and Multimodel Inference: {A} Practical Information--Theoretic Approach", by {K. P. B}urnham and {D. R. A}nderson}, + Volume = {47}, + Year = {2003}} + +@article{Wagenmakersinpressa, + Author = {Wagenmakers, E.--J. and {van der Maas}, H. J. L. and Grasman, R. P. P. P.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {??-??}, + Title = {An {EZ}--Diffusion Model for Response Time and Accuracy}, + Volume = {??}, + Year = {in press}} + +@unpublished{Wagenmakers2006, + Author = {Wagenmakers, E.--J. and {van der Maas}, H. L. J. and Grasman, R. P. P. P.}, + Pages = {??-??}, + Title = {An {EZ}--Diffusion Model for Response Time and Accuracy: Extensions to Biased Starting Points, Model Fitting, and Model Selection. {M}anuscript in preparation.}, + Volume = {??}, + Year = {2006}} + +@article{Wagenmakers2007, + Author = {Wagenmakers, E.--J. and Brown, S.}, + Journal = {Psychological Review}, + Pages = {830-841}, + Title = {On the Linear Relation between the Mean and the Standard Deviation of a Response Time Distribution}, + Volume = {114(3)}, + Year = {2007}} + +@article{Wagenmakers2004, + Author = {Wagenmakers, E.--J. and Farrell, S.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {192-?96}, + Title = {{AIC} Model Selection Using {A}kaike Weights}, + Volume = {11}, + Year = {2004}} + +@article{Wagenmakers2005, + Author = {Wagenmakers, E.--J. and Farrell, S. and Ratcliff, R.}, + Journal = {Journal of Experimental Psychology: General}, + Pages = {108-?16}, + Title = {Human Cognition and a Pile of Sand: A Discussion on Serial Correlations and Self--organized Criticality}, + Volume = {134}, + Year = {2005}} + +@article{Wagenmakers2004a, + Author = {Wagenmakers, E.--J. and Farrell, S. and Ratcliff, R.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {579-?15}, + Title = {Estimation and Interpretation of $1/f^\alpha$ Noise in Human Cognition}, + Volume = {11}, + Year = {2004}} + +@article{Wagenmakers2006a, + Author = {Wagenmakers, E.--J. and Gr\"{u}nwald, P.}, + Journal = {Psychological Science}, + Pages = {641--642}, + Title = {A {B}ayesian Perspective on Hypothesis Testing}, + Volume = {17}, + Year = {2006}} + +@article{Wagenmakers2006b, + Author = {Wagenmakers, E.--J. and Gr\"{u}nwald, P. and Steyvers, M.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {149--166}, + Title = {Accumulative Prediction Error and the Selection of Time Series Models}, + Volume = {50}, + Year = {2006}} + +@article{Wagenmakers2005a, + Author = {Wagenmakers, E.--J. and Grasman, R. P. P. P. and Molenaar, P. C. M.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {195--204}, + Title = {On the Relation Between the Mean and the Variance of a Diffusion Model Response Time Distribution}, + Volume = {49}, + Year = {2005}} + +@unpublished{Wagenmakers2005b, + Author = {Wagenmakers, E.--J. and Huizinga, H. and Gomez, P. and {van der Maas}, H. J. L.}, + Pages = {??-??}, + Title = {A Diffusion Model Perspective on {IQ}--Related Differences in Response Speed. {M}anuscript submitted for publication.}, + Volume = {??}, + Year = {2005}} + +@book{bnCourse, + Author = {Wagenmakers, Eric-Jan and Lee, Michael D.}, + Owner = {Woo-Young Ahn}, + Timestamp = {2009.08.14}, + Title = {A course in Bayesian Graphical Modeling for Cognitive Science}, + Year = {in preparation}} + +@incollection{Wagenmakers2005c, + Address = {Chichester}, + Author = {Wagenmakers, E.--J. and van der Maas, H. L. J. and Molenaar, P. C. M.}, + Booktitle = {Encyclopedia of Behavioral Statistics}, + Editor = {Everitt, B. and Howel, D.}, + Pages = {234?239}, + Publisher = {Wiley}, + Title = {Fitting the Cusp Catastrophe Model}, + Year = {2005}} + +@article{Wagenmakers2005d, + Author = {Wagenmakers, E.--J. and Molenaar, P. C. M. and Grasman, R. P. P. P. and Hartelman, P. A. I. and {van der Maas}, H. L. J.}, + Journal = {Physica D}, + Pages = {263--276}, + Title = {Transformation Invariant Stochastic Catastrophe Theory}, + Volume = {211}, + Year = {2005}} + +@article{Wagenmakers2005e, + Author = {Wagenmakers, E.--J. and Nieuwenhuis, S.}, + Journal = {Neuropraxis}, + Pages = {165--169}, + Title = {Damasio's error: {D}e somatic marker hypothese onder vuur}, + Volume = {9}, + Year = {2005}} + +@article{Wagenmakers2004b, + Author = {Wagenmakers, E.--J. and Ratcliff, R. and Gomez, P. and Iverson, G. J.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {28--50}, + Title = {Assessing Model Mimicry Using the Parametric Bootstrap}, + Volume = {48}, + Year = {2004}} + +@article{Wagenmakersinpressb, + Author = {Wagenmakers, E.--J. and Ratcliff, R. and Gomez, P. and McKoon, G.}, + Journal = {Journal of Memory and Language}, + Title = {A Diffusion Model Account of Criterion Manipulations in the Lexical Decision Task}, + Year = {in press}} + +@article{Wagenmakers2004c, + Author = {Wagenmakers, E.--J. and Steyvers, M. and Raaijmakers, J. G. W. and Shiffrin, R. M. and {van Rijn}, H. and Zeelenberg, R.}, + Journal = {Cognitive Psychology}, + Pages = {332--367}, + Title = {A Model for Evidence Accumulation in the Lexical Decision Task}, + Volume = {48}, + Year = {2004}} + +@article{Wagenmakers2006c, + Author = {Wagenmakers, E.--J. and Waldorp, L.}, + Journal = {Journal of Mathematical Psychology}, + Number = {2}, + Pages = {--}, + Title = {Model Selection: {T}heoretical Developments and Applications [{S}pecial Issue]}, + Volume = {50}, + Year = {2006}} + +@article{Wager2004, + Author = {Wager, T. D. and Rilling, J. K. and Smith, E. E. and Sokolik, A. and Casey, K. L. and Davidson, R. J. and Kosslyn, S. M. and Rose, R. M. and Cohen, J. D.}, + Journal = {Science}, + Pages = {1162--1167}, + Title = {{{P}lacebo-induced changes in {F}{M}{R}{I} in the anticipation and experience of pain}}, + Volume = {303}, + Year = {2004}} + +@article{Wagner2002, + Abstract = {The focal point of this paper is the transition from drug use to drug + dependence. We present new evidence on risk for starting to use marijuana, + cocaine, and alcohol, as well as risks for progression from first + drug use to the onset of drug dependence, separately for each of + these drugs. Data from the National Comorbidity Survey (NCS) were + analyzed. The NCS had a representative sample of the United States + population ages 15-54 years (n = 8,098). Survival analysis techniques + were used to provide age- and time-specific risk estimates of initiating + use of marijuana, cocaine, and alcohol, as well as of becoming dependent + on each drug. With respect to risk of initiating use, estimated peak + values for alcohol and marijuana were found at age 18, about two + years earlier than the later peak in risk of initiating cocaine use. + With respect to risk of meeting criteria for the clinical dependence + syndrome, estimated peak values for alcohol and marijuana were found + at age 17-18. Peak values for cocaine dependence were found at age + 23-25. Once use began, cocaine dependence emerged early and more + explosively, with an estimated 5-6\% of cocaine users becoming cocaine + dependent in the first year of use. Most of the observed cases of + cocaine dependence met criteria for dependence within three years + after initial cocaine use. Whereas some 15-16\% of cocaine users + had developed cocaine dependence within 10 years of first cocaine + use, the corresponding values were about 8\% for marijuana users, + and 12-13\% for alcohol users. The most novel findings of this study + document a noteworthy risk for quickly developing cocaine dependence + after initial cocaine use, with about one in 16 to 20 cocaine users + becoming dependent within the first year of cocaine use. For marijuana + and alcohol, there is a more insidious onset of the drug dependence + syndrome.}, + Author = {Fernando A Wagner and James C Anthony}, + Doi = {10.1016/S0893-133X(01)00367-0}, + Institution = {Department of Mental Hygiene, Johns Hopkins University, Bloomberg School of Public Health, Electronic Collaboratory for Investigations about Drugs (ELCID) Rm. 893, 624 N. Broadway, Baltimore, MD 21205, USA.}, + Journal = {Neuropsychopharmacology}, + Keywords = {Adolescent; Adult; Age Factors; Aged; Alcohol Drinking, psychology; Alcoholism, epidemiology/psychology; Child; Cluster Analysis; Cocaine-Related Disorders, epidemiology/psychology; Data Collection; Female; Humans; Male; Marijuana Abuse, epidemiology/psychology; Middle Aged; Psychiatric Status Rating Scales; Questionnaires; Risk; Survival Analysis; United States, epidemiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Apr}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {479--488}, + Pii = {S0893133X01003670}, + Pmid = {11927172}, + Timestamp = {2009.08.06}, + Title = {From first drug use to drug dependence; developmental periods of risk for dependence upon marijuana, cocaine, and alcohol.}, + Url = {http://dx.doi.org/10.1016/S0893-133X(01)00367-0}, + Volume = {26}, + Year = {2002}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/S0893-133X(01)00367-0}} + +@article{Wahrle2005, + Abstract = {Apolipoprotein E (apoE) genotype has a major influence on the risk + for Alzheimer disease (AD). Different apoE isoforms may alter AD + pathogenesis via their interactions with the amyloid beta-peptide + (Abeta). Mice lacking the lipid transporter ABCA1 were found to have + markedly decreased levels and lipidation of apoE in the central nervous + system. We hypothesized that if Abca1-/- mice were bred to the PDAPP + mouse model of AD, PDAPP Abca1-/ mice would have a phenotype similar + to that of PDAPP Apoe+/- and PDAPP Apoe-/- mice, which develop less + amyloid deposition than PDAPP Apoe+/+ mice. In contrast to this prediction, + 12-month-old PDAPP Abca -/- mice had significantly higher levels + of hippocampal Abeta, and cerebral amyloid angiopathy was significantly + more common compared with PDAPP Abca1+/+ mice. Amyloid precursor + protein (APP) C-terminal fragments were not different between Abca1 + genotypes prior to plaque deposition in 3-month-old PDAPP mice, suggesting + that deletion of Abca1 did not affect APP processing or Abeta production. + As expected, 3-month-old PDAPP Abca1-/- mice had decreased apoE levels, + but they also had a higher percentage of carbonate-insoluble apoE, + suggesting that poorly lipidated apoE is less soluble in vivo. We + also found that 12-month-old PDAPP Abca1-/- mice had a higher percentage + of carbonate-insoluble apoE and that apoE deposits co-localize with + amyloid plaques, demonstrating that poorly lipidated apoE co-deposits + with insoluble Abeta. Together, these data suggest that despite substantially + lower apoE levels, poorly lipidated apoE produced in the absence + of ABCA1 is strongly amyloidogenic in vivo.}, + Author = {Suzanne E Wahrle and Hong Jiang and Maia Parsadanian and Richard E Hartman and Kelly R Bales and Steven M Paul and David M Holtzman}, + Doi = {10.1074/jbc.M508780200}, + Institution = {Program in Neurosciences, Department of Neurology, Washington University School of Medicine, St. Louis, Missouri 63110, USA.}, + Journal = {J Biol Chem}, + Keywords = {ATP-Binding Cassette Transporters, genetics/physiology; Alzheimer Disease, genetics/pathology; Amyloid beta-Protein, metabolism; Amyloid, metabolism; Animals; Apolipoproteins E, metabolism; Blotting, Western; Brain, metabolism/pathology; Enzyme-Linked Immunosorbent Assay; Gene Deletion; Heterozygote; Hippocampus, metabolism/pathology; Mice; Mice, Transgenic; Models, Genetic; Protein Structure, Tertiary; Thiazoles, pharmacology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Dec}, + Number = {52}, + Owner = {Young}, + Pages = {43236--43242}, + Pii = {M508780200}, + Pmid = {16207708}, + Timestamp = {2009.12.10}, + Title = {Deletion of Abca1 increases Abeta deposition in the PDAPP transgenic mouse model of Alzheimer disease.}, + Url = {http://dx.doi.org/10.1074/jbc.M508780200}, + Volume = {280}, + Year = {2005}, + Bdsk-Url-1 = {http://dx.doi.org/10.1074/jbc.M508780200}} + +@article{Wahrle2008, + Abstract = {APOE genotype is a major genetic risk factor for late-onset Alzheimer + disease (AD). ABCA1, a member of the ATP-binding cassette family + of active transporters, lipidates apoE in the CNS. Abca1(-/-) mice + have decreased lipid associated with apoE and increased amyloid deposition + in several AD mouse models. We hypothesized that mice overexpressing + ABCA1 in the brain would have increased lipidation of apoE-containing + lipoproteins and decreased amyloid deposition. To address these hypotheses, + we created PrP-mAbca1 Tg mice that overexpress mouse Abca1 throughout + the brain under the control of the mouse prion promoter. We bred + the PrP-mAbca1 mice to the PDAPP AD mouse model, a transgenic line + overexpressing a mutant human amyloid precursor protein. PDAPP/Abca1 + Tg mice developed a phenotype remarkably similar to that seen in + PDAPP/Apoe(-/-) mice: there was significantly less amyloid beta-peptide + (Abeta) deposition, a redistribution of Abeta to the hilus of the + dentate gyrus in the hippocampus, and an almost complete absence + of thioflavine S-positive amyloid plaques. Analyses of CSF from PrP-mAbca1 + Tg mice and media conditioned by PrP-mAbca1 Tg primary astrocytes + demonstrated increased lipidation of apoE-containing particles. These + data support the conclusions that increased ABCA1-mediated lipidation + of apoE in the CNS can reduce amyloid burden and that increasing + ABCA1 function may have a therapeutic effect on AD.}, + Author = {Suzanne E Wahrle and Hong Jiang and Maia Parsadanian and Jungsu Kim and Aimin Li and Amanda Knoten and Sanjay Jain and Veronica Hirsch-Reinshagen and Cheryl L Wellington and Kelly R Bales and Steven M Paul and David M Holtzman}, + Doi = {10.1172/JCI33622}, + Institution = {Department of Neurology, Washington University School of Medicine, St. Louis, Missouri 63110, USA.}, + Journal = {J Clin Invest}, + Keywords = {ATP-Binding Cassette Transporters, genetics/metabolism; Alzheimer Disease, metabolism; Amyloid beta-Protein, analysis/metabolism; Animals; Apolipoproteins E, metabolism; Astrocytes, metabolism; Brain Chemistry; Brain, metabolism; Disease Models, Animal; Lipoproteins, metabolism; Male; Mice; Mice, Transgenic}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Feb}, + Number = {2}, + Owner = {Young}, + Pages = {671--682}, + Pmid = {18202749}, + Timestamp = {2009.12.10}, + Title = {Overexpression of ABCA1 reduces amyloid deposition in the PDAPP mouse model of Alzheimer disease.}, + Url = {http://dx.doi.org/10.1172/JCI33622}, + Volume = {118}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1172/JCI33622}} + +@article{Wainer1999, + Author = {Wainer, H.}, + Journal = {Psychological Methods}, + Pages = {212--213}, + Title = {One Cheer for Null Hypothesis Significance Testing}, + Volume = {4}, + Year = {1999}} + +@article{Wald1948, + Author = {Wald, A. and Wolfowitz, J.}, + Journal = {Annals of Mathematical Statistics}, + Pages = {326--339}, + Title = {Optimal Character of the Sequential Probability Ratio Test}, + Volume = {19}, + Year = {1948}} + +@article{Wales2001, + Author = {Wales, D. J.}, + Journal = {Science}, + Pages = {2067--2070}, + Title = {A Microscopic Basis for the Global Appearance of Energy Landscapes}, + Volume = {293}, + Year = {2001}} + +@article{Wallace1968, + Author = {Wallace, C. S. and Boulton, D. M.}, + Journal = {The Computer Journal}, + Pages = {185--194}, + Title = {An Information Measure for Classification}, + Volume = {11}, + Year = {1968}} + +@article{Wallace1999, + Author = {Wallace, C. S. and Dowe, D. L.}, + Journal = {The Computer Journal}, + Pages = {330--337}, + Title = {Refinements of {MDL} and {MML} Coding}, + Volume = {42}, + Year = {1999}} + +@article{Wallace1987, + Author = {Wallace, C. S. and Freeman, P. R.}, + Journal = {Journal of the Royal Statistical Society B}, + Pages = {240--265}, + Title = {Estimation and Inference by Compact Coding}, + Volume = {49}, + Year = {1987}} + +@article{Wallis2007, + Author = {Wallis, J.D.}, + Publisher = {Annual Reviews}, + Title = {{Orbitofrontal cortex and its contribution to decision-making}}, + Year = {2007}} + +@article{Wallsten2005, + Author = {Wallsten, T. S. and Pleskac, T. J. and Lejuez, C. W.}, + Journal = {Psychological Review}, + Owner = {Wooyoung Ahn}, + Pages = {862-880}, + Timestamp = {2007.04.30}, + Title = {Modeling behavior in a clinically diagnostic sequential risk-taking task}, + Volume = {112(4)}, + Year = {2005}} + +@article{Wallsten2005a, + Author = {Wallsten, T. S. and Pleskac, T. J. and Lejuez, C. W.}, + Journal = {Psychological Review}, + Pages = {862?880}, + Title = {Modeling Behavior in a Clinically Diagnostic Sequential Risk--Taking Task}, + Volume = {112}, + Year = {2005}} + +@article{Walter1989, + Author = {Walter, N. G. and Reise, S. P.}, + Journal = {Journal of Personality and Social Psychology}, + Owner = {Wooyoung Ahn}, + Pages = {1051--1058}, + Timestamp = {2007.04.30}, + Title = {Computerized adaptive personality assessment: An illustration with the Absortion scale}, + Volume = {57}, + Year = {1989}} + +@article{Walton2004, + __Markedentry = {[Woo-Young Ahn]}, + Abstract = {Our ability to judge the consequences of our actions is central to + rational decision making. A large body of evidence implicates primate + prefrontal regions in the regulation of this ability. It has proven + extremely difficult, however, to separate functional areas in the + frontal lobes. Using functional magnetic resonance imaging, we demonstrate + complementary and reciprocal roles for the human orbitofrontal (OFC) + and dorsal anterior cingulate cortices (ACd) in monitoring the outcome + of behavior. Activation levels in these regions were negatively correlated, + with activation increasing in the ACd and decreasing in the OFC when + the selected response was the result of the participant's own decision. + The pattern was reversed when the selected response was guided by + the experimenter rather than the participant. These results indicate + that the neural mechanisms underlying the way we assess the consequences + of choices differ depending on whether we are told what to do or + are able to exercise our volition.}, + Author = {Mark E Walton and Joseph T Devlin and Matthew F S Rushworth}, + Doi = {10.1038/nn1339}, + Institution = {Department of Experimental Psychology, South Parks Road, Oxford, OX1 3UD, England, UK. mark.walton@psy.ox.ac.uk}, + Journal = {Nat Neurosci}, + Keywords = {Adaptation, Psychological; Adult; Brain Mapping; Cues; Decision Making, physiology; Feedback, physiology; Female; Gyrus Cinguli, blood supply/physiology; Humans; Image Processing, Computer-Assisted, methods; Magnetic Resonance Imaging, methods; Male; Neuropsychological Tests; Oxygen, blood; Photic Stimulation, methods; Prefrontal Cortex, anatomy /&/ histology/blood supply/physiology; Psychomotor Performance, physiology; Time Factors}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {11}, + Owner = {Woo-Young Ahn}, + Pages = {1259--1265}, + Pii = {nn1339}, + Pmid = {15494729}, + Timestamp = {2009.08.19}, + Title = {Interactions between decision making and performance monitoring within prefrontal cortex.}, + Url = {http://dx.doi.org/10.1038/nn1339}, + Volume = {7}, + Year = {2004}, + Bdsk-Url-1 = {http://dx.doi.org/10.1038/nn1339}} + +@article{Waltz2007, + Author = {Waltz, J. A. and Frank, M. J. and Robinson, B. M. and Gold, J. M.}, + Journal = {Biol. Psychiatry}, + Month = {Oct}, + Pages = {756--764}, + Title = {{{S}elective reinforcement learning deficits in schizophrenia support predictions from computational models of striatal-cortical dysfunction}}, + Volume = {62}, + Year = {2007}} + +@article{Wang1995, + Author = {Wang, G.J. and Volkow, N.D. and Logan, J. and Fowler, J.S. and Schlyer, D. and MacGregor, R.R. and Hitzemann, R.J. and Gur, R.C. and Wolf, A.P.}, + Journal = {Life Sciences}, + Number = {14}, + Publisher = {Elsevier}, + Title = {{Evaluation of age-related changes in serotonin 5-HT2 and dopamine D2 receptor availability in healthy human subjects}}, + Volume = {56}, + Year = {1995}} + +@article{Wang2004, + Author = {Wang, G. J. and Chang, L. and Volkow, N. D. and Telang, F. and Logan, J. and Ernst, T. and Fowler, J. S.}, + Journal = {Brain}, + Month = {Nov}, + Pages = {2452--2458}, + Title = {{{D}ecreased brain dopaminergic transporters in {H}{I}{V}-associated dementia patients}}, + Volume = {127}, + Year = {2004}} + +@article{Wang2008, + Author = {Wang, G. J. and Tomasi, D. and Backus, W. and Wang, R. and Telang, F. and Geliebter, A. and Korner, J. and Bauman, A. and Fowler, J. S. and Thanos, P. K. and Volkow, N. D.}, + Journal = {Neuroimage}, + Month = {Feb}, + Pages = {1824--1831}, + Title = {{{G}astric distention activates satiety circuitry in the human brain}}, + Volume = {39}, + Year = {2008}} + +@article{Wang2004c, + Author = {Wang, G. J. and Volkow, N. D. and Chang, L. and Miller, E. and Sedler, M. and Hitzemann, R. and Zhu, W. and Logan, J. and Ma, Y. and Fowler, J. S.}, + Journal = {Am J Psychiatry}, + Month = {Feb}, + Pages = {242--248}, + Title = {{{P}artial recovery of brain metabolism in methamphetamine abusers after protracted abstinence}}, + Volume = {161}, + Year = {2004}} + +@article{Wang2002b, + Author = {Wang, G. J. and Volkow, N. D. and Felder, C. and Fowler, J. S. and Levy, A. V. and Pappas, N. R. and Wong, C. T. and Zhu, W. and Netusil, N.}, + Journal = {Neuroreport}, + Month = {Jul}, + Pages = {1151--1155}, + Title = {{{E}nhanced resting activity of the oral somatosensory cortex in obese subjects}}, + Volume = {13}, + Year = {2002}} + +@article{Wang2002a, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S.}, + Journal = {Expert Opin. Ther. Targets}, + Month = {Oct}, + Pages = {601--609}, + Title = {{{T}he role of dopamine in motivation for food in humans: implications for obesity}}, + Volume = {6}, + Year = {2002}} + +@article{Wang1999b, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S. and Cervany, P. and Hitzemann, R. J. and Pappas, N. R. and Wong, C. T. and Felder, C.}, + Journal = {Life Sci.}, + Pages = {775--784}, + Title = {{{R}egional brain metabolic activation during craving elicited by recall of previous drug experiences}}, + Volume = {64}, + Year = {1999}} + +@article{Wang2000, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S. and Franceschi, D. and Logan, J. and Pappas, N. R. and Wong, C. T. and Netusil, N.}, + Journal = {J. Nucl. Med.}, + Month = {Aug}, + Pages = {1352--1356}, + Title = {{{P}{E}{T} studies of the effects of aerobic exercise on human striatal dopamine release}}, + Volume = {41}, + Year = {2000}} + +@article{Wang2003, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S. and Franceschi, D. and Wong, C. T. and Pappas, N. R. and Netusil, N. and Zhu, W. and Felder, C. and Ma, Y.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jun}, + Pages = {909--917}, + Title = {{{A}lcohol intoxication induces greater reductions in brain metabolism in male than in female subjects}}, + Volume = {27}, + Year = {2003}} + +@article{Wang1998b, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S. and Hitzemann, R. J. and Pappas, N. R. and Netusil, N.}, + Journal = {Psychiatry Res}, + Month = {Apr}, + Pages = {37--46}, + Title = {{{E}valuation of gender difference in regional brain metabolic responses to lorazepam}}, + Volume = {82}, + Year = {1998}} + +@article{Wang1999, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S. and Logan, J. and Pappas, N. R. and Wong, C. T. and Hitzemann, R. J. and Netusil, N.}, + Journal = {J. Nucl. Med.}, + Month = {Aug}, + Pages = {1285--1291}, + Title = {{{R}eproducibility of repeated measures of endogenous dopamine competition with [11{C}]raclopride in the human brain in response to methylphenidate}}, + Volume = {40}, + Year = {1999}} + +@article{Wang1998a, + Author = {Wang, G. J. and Volkow, N. D. and Fowler, J. S. and Pappas, N. R. and Wong, C. T. and Pascani, K. and Felder, C. A. and Hitzemann, R. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Nov}, + Pages = {1850--1854}, + Title = {{{R}egional cerebral metabolism in female alcoholics of moderate severity does not differ from that of controls}}, + Volume = {22}, + Year = {1998}} + +@article{Wang2000a, + Author = {Wang, G. J. and Volkow, N. D. and Franceschi, D. and Fowler, J. S. and Thanos, P. K. and Scherbaum, N. and Pappas, N. and Wong, C. T. and Hitzemann, R. J. and Felder, C. A.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Jun}, + Pages = {822--829}, + Title = {{{R}egional brain metabolism during alcohol intoxication}}, + Volume = {24}, + Year = {2000}} + +@article{Wang1999a, + Author = {Wang, G. J. and Volkow, N. D. and Levy, A. V. and Felder, C. A. and Fowler, J. S. and Pappas, N. R. and Hitzemann, R. J. and Wong, C. T.}, + Journal = {J. Nucl. Med.}, + Month = {May}, + Pages = {715--720}, + Title = {{{M}easuring reproducibility of regional brain metabolic responses to lorazepam using statistical parametric maps}}, + Volume = {40}, + Year = {1999}} + +@article{Wang2001, + Author = {Wang, G. J. and Volkow, N. D. and Logan, J. and Pappas, N. R. and Wong, C. T. and Zhu, W. and Netusil, N. and Fowler, J. S.}, + Journal = {Lancet}, + Month = {Feb}, + Pages = {354--357}, + Title = {{{B}rain dopamine and obesity}}, + Volume = {357}, + Year = {2001}} + +@article{Wang2004b, + Author = {Wang, G. J. and Volkow, N. D. and Telang, F. and Jayne, M. and Ma, J. and Rao, M. and Zhu, W. and Wong, C. T. and Pappas, N. R. and Geliebter, A. and Fowler, J. S.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {1790--1797}, + Title = {{{E}xposure to appetitive food stimuli markedly activates the human brain}}, + Volume = {21}, + Year = {2004}} + +@article{Wang2009, + Author = {Wang, G. J. and Volkow, N. D. and Telang, F. and Jayne, M. and Ma, Y. and Pradhan, K. and Zhu, W. and Wong, C. T. and Thanos, P. K. and Geliebter, A. and Biegon, A. and Fowler, J. S.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Jan}, + Pages = {1249--1254}, + Title = {{{E}vidence of gender differences in the ability to inhibit brain activation elicited by food stimulation}}, + Volume = {106}, + Year = {2009}} + +@article{Wang2004a, + Author = {Wang, G. J. and Volkow, N. D. and Thanos, P. K. and Fowler, J. S.}, + Journal = {J Addict Dis}, + Pages = {39--53}, + Title = {{{S}imilarity between obesity and drug addiction as assessed by neurofunctional imaging: a concept review}}, + Volume = {23}, + Year = {2004}} + +@article{Wang2006, + Author = {Wang, G. J. and Yang, J. and Volkow, N. D. and Telang, F. and Ma, Y. and Zhu, W. and Wong, C. T. and Tomasi, D. and Thanos, P. K. and Fowler, J. S.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Oct}, + Pages = {15641--15645}, + Title = {{{G}astric stimulation in obese subjects activates the hippocampus and other regions involved in brain reward circuitry}}, + Volume = {103}, + Year = {2006}} + +@article{Wang2006a, + Author = {Wang, W. and Xiao, H. and Lu, L.}, + Journal = {J Psychoactive Drugs}, + Month = {Jun}, + Pages = {203--205}, + Title = {{{C}ase-control retrospective study of pulmonary tuberculosis in heroin-abusing patients in {C}hina}}, + Volume = {38}, + Year = {2006}} + +@article{Wang2006b, + Author = {Wang, W. and Xiao, H. and Lu, L.}, + Journal = {J Psychoactive Drugs}, + Month = {Jun}, + Pages = {203--205}, + Title = {{{C}ase-control retrospective study of pulmonary tuberculosis in heroin-abusing patients in {C}hina}}, + Volume = {38}, + Year = {2006}} + +@article{Wang2002, + Author = {Wang, X.--J.}, + Journal = {Neuron}, + Pages = {955?968}, + Title = {Probabilistic Decision Making by Slow Reverberation in Cortical Circuits}, + Volume = {36}, + Year = {2002}} + +@article{Wang1998, + Author = {Wang, Y. and Chan, G.L.Y. and Holden, J.E. and Dobko, T. and Mak, E. and Schulzer, M. and Huser, J.M. and Snow, B.J. and Ruth, T.J. and Calne, D.B. and others}, + Journal = {Synapse}, + Number = {1}, + Publisher = {John Wiley \& Sons, Inc. New York}, + Title = {{Age-dependent decline of dopamine D1 receptors in human brain: a PET study}}, + Volume = {30}, + Year = {1998}} + +@article{Wang2008a, + Author = {Wang, Z. and Aguirre, G. K. and Rao, H. and Wang, J. and Fernandez-Seara, M. A. and Childress, A. R. and Detre, J. A.}, + Journal = {Magn Reson Imaging}, + Month = {Feb}, + Pages = {261--269}, + Title = {{{E}mpirical optimization of {A}{S}{L} data analysis using an {A}{S}{L} data processing toolbox: {A}{S}{L}tbx}}, + Volume = {26}, + Year = {2008}} + +@article{Wang2006d, + Author = {Wang, Z. and Childress, A. R. and Detre, J. A.}, + Journal = {Conf Proc IEEE Eng Med Biol Soc}, + Pages = {1006--1009}, + Title = {{{B}oost up the detection sensitivity of {A}{S}{L} perfusion f{M}{R}{I} through support vector machine}}, + Volume = {1}, + Year = {2006}} + +@article{Wang2007, + Author = {Wang, Z. and Childress, A. R. and Wang, J. and Detre, J. A.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1139--1151}, + Title = {{{S}upport vector machine learning-based f{M}{R}{I} data group analysis}}, + Volume = {36}, + Year = {2007}} + +@article{Wang2006c, + Author = {Wang, Z. and Wang, J. and Calhoun, V. and Rao, H. and Detre, J. A. and Childress, A. R.}, + Journal = {Magn Reson Imaging}, + Month = {Jun}, + Pages = {591--596}, + Title = {{{S}trategies for reducing large f{M}{R}{I} data sets for independent component analysis}}, + Volume = {24}, + Year = {2006}} + +@article{Wang2005, + Author = {Wang, Z. and Wang, J. and Childress, A. R. and Rao, H. and Detre, J. A.}, + Journal = {Conf Proc IEEE Eng Med Biol Soc}, + Pages = {5904--5907}, + Title = {{{C}{R}{L}{S}-{P}{C}{A} based independent component analysis for f{M}{R}{I} study}}, + Volume = {6}, + Year = {2005}} + +@article{Ware2005, + Author = {Ware, J. E. and Gandek, B. and Sinclair, S. J. and Bjorner, J. B.}, + Journal = {Rehabilitation Psychology}, + Owner = {Wooyoung Ahn}, + Pages = {71-78}, + Timestamp = {2007.04.30}, + Title = {Item response theory and computerized adaptive testing: Implications for outcomes measurement in rehavilitation}, + Volume = {50}, + Year = {2005}} + +@article{Ware1989, + Author = {Ware, J. H.}, + Journal = {Statistical Science}, + Pages = {298--340}, + Title = {Investigating Therapies of Potentially Great Benefit: {ECMO}}, + Volume = {4}, + Year = {1989}} + +@book{Wasserman2004, + Address = {New York}, + Author = {Wasserman, L.}, + Publisher = {Springer}, + Title = {All of Statistics: {A} Concise Course in Statistical Inference}, + Year = {2004}} + +@article{Wasserman2000, + Author = {Wasserman, L.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {92--107}, + Title = {{B}ayesian Model Selection and Model Averaging}, + Volume = {44}, + Year = {2000}} + +@article{Watanabe2009, + Author = {Watanabe, H. and Henriksson, R. and Ohnishi, Y. N. and Ohnishi, Y. H. and Harper, C. and Sheedy, D. and Garrick, T. and Nyberg, F. and Nestler, E. J. and Bakalkin, G. and Yakovleva, T.}, + Journal = {Addict Biol}, + Month = {Jul}, + Pages = {294--297}, + Title = {{{F}{O}{S}{B} proteins in the orbitofrontal and dorsolateral prefrontal cortices of human alcoholics}}, + Volume = {14}, + Year = {2009}} + +@article{Watson2009, + Author = {Watson, K. K. and Ghodasra, J. H. and Platt, M. L.}, + Journal = {PLoS ONE}, + Pages = {e4156}, + Title = {{{S}erotonin transporter genotype modulates social reward and punishment in rhesus macaques}}, + Volume = {4}, + Year = {2009}} + +@article{Watson2008, + Author = {Watson, K. K. and Platt, M. L.}, + Journal = {Philos. Trans. R. Soc. Lond., B, Biol. Sci.}, + Month = {Dec}, + Pages = {3825--3835}, + Title = {{{N}euroethology of reward and decision making}}, + Volume = {363}, + Year = {2008}} + +@article{Weakliem1999, + Author = {Weakliem, D. L.}, + Journal = {Sociological Methods \& Research}, + Pages = {359--397}, + Title = {A Critique of the {B}ayesian Information Criterion for Model Selection}, + Volume = {27}, + Year = {1999}} + +@article{Weber2004, + Author = {Weber, E.U. and Shafir, S. and Blais, A.R. and Israel, R.}, + Journal = {Psychological Review}, + Pages = {430--445}, + Title = {{Predicting Risk-Sensitivty in Humans and Lower Animals: Risk as Variance or Coefficient of Variation}}, + Volume = {111}, + Year = {2004}} + +@article{Wei1992, + Author = {Wei, C. Z.}, + Journal = {The Annals of Statistics}, + Pages = {1--42}, + Title = {On Predictive Least Squares Principles}, + Volume = {20}, + Year = {1992}} + +@article{Weinger2008, + Author = {Weinger, K. and Jacobson, A. M. and Musen, G. and Lyoo, I. K. and Ryan, C. M. and Jimerson, D. C. and Renshaw, P. F.}, + Journal = {Diabetologia}, + Month = {Mar}, + Pages = {417--425}, + Title = {{{T}he effects of type 1 diabetes on cerebral white matter}}, + Volume = {51}, + Year = {2008}} + +@article{Weinger1994, + Author = {Weinger, M. B. and Herndon, O. W. and Zornow, M. H. and Paulus, M. P. and Gaba, D. M. and Dallen, L. T.}, + Journal = {Anesthesiology}, + Month = {Jan}, + Pages = {77--92}, + Title = {{{A}n objective methodology for task analysis and workload assessment in anesthesia providers}}, + Volume = {80}, + Year = {1994}} + +@article{Weiss1993, + Abstract = {Dopaminergic neurotransmission in the nucleus accumbens may be an + important factor in ethanol reinforcement and genetically determined + ethanol preference. This hypothesis was tested by measuring dopamine + (DA) release by intracranial microdialysis during voluntary oral + ethanol self-administration in alcohol-preferring (P) and genetically + heterogeneous Wistar rats. The animals were trained to respond for + ethanol (10\% w/v) or water in a free-choice operant task. Extracellular + DA levels in the nucleus accumbens were subsequently monitored during + 30-min self-administration sessions and a 15-min "waiting period" + before session onset. Ethanol self-administration in all animals + was followed by a significant, dose-dependent rise in DA release + with maximal effects at approximately 15 min after peak intake. Dose-effect + functions revealed significantly steeper slopes for the DA-releasing + effects of ethanol in P than in genetically heterogeneous Wistar + rats. Over an identical range of ethanol doses and blood alcohol + levels, increases in DA efflux ranged from 143\% to 459\% of basal + levels in P rats but only from 142\% to 212\% in Wistar rats. To + differentiate the pharmacological effects of ethanol from the effects + of operant responding, additional groups of P and Wistar rats were + tested during self-administration of saccharin (0.05\% w/v). By contrast + with ethanol, saccharin did not substantially elevate extracellular + DA levels. A significant, transient increase in DA efflux was, however, + observed in both strains of rats during the presession waiting period + in the absence of ethanol or saccharin availability.(ABSTRACT TRUNCATED + AT 250 WORDS)}, + Author = {F. Weiss and M. T. Lorang and F. E. Bloom and G. F. Koob}, + Institution = {Department of Neuropharmacology, Scripps Research Institute, La Jolla, California.}, + Journal = {J Pharmacol Exp Ther}, + Keywords = {Administration, Oral; Animals; Dopamine, metabolism; Ethanol, administration /&/ dosage/pharmacology; Male; Nucleus Accumbens, metabolism; Rats; Rats, Inbred Strains; Rats, Wistar; Reinforcement (Psychology); Saccharin, administration /&/ dosage; Self Administration; Synaptic Transmission, drug effects}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Oct}, + Number = {1}, + Owner = {Woo-Young Ahn}, + Pages = {250--258}, + Pmid = {8229752}, + Timestamp = {2009.08.18}, + Title = {Oral alcohol self-administration stimulates dopamine release in the rat nucleus accumbens: genetic and motivational determinants.}, + Volume = {267}, + Year = {1993}} + +@article{Weiss1992, + Author = {Weiss, F. and Paulus, M. P. and Lorang, M. T. and Koob, G. F.}, + Journal = {J. Neurosci.}, + Month = {Nov}, + Pages = {4372--4380}, + Title = {{{I}ncreases in extracellular dopamine in the nucleus accumbens by cocaine are inversely related to basal levels: effects of acute and repeated administration}}, + Volume = {12}, + Year = {1992}} + +@article{Weiss2002, + Author = {Weiss, F. and Porrino, L. J.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {3332--3337}, + Title = {{{B}ehavioral neurobiology of alcohol addiction: recent advances and challenges}}, + Volume = {22}, + Year = {2002}} + +@article{Weissman1996, + Abstract = {OBJECTIVE: To estimate the rates and patterns of major depression + and bipolar disorder based on cross-national epidemiologic surveys. + DESIGN AND SETTING: Population-based epidemiologic studies using + similar methods from 10 countries: the United States, Canada, Puerto + Rico, France, West Germany, Italy, Lebanon, Taiwan, Korea, and New + Zealand. PARTICIPANTS: Approximately 38000 community subjects. OUTCOME + MEASURES: Rates, demographics, and age at onset of major depression + and bipolar disorder. Symptom profiles, comorbidity, and marital + status with major depression. RESULTS: The lifetime rates for major + depression vary widely across countries, ranging from 1.5 cases per + 100 adults in the sample in Taiwan to 19.0 cases per 100 adults in + Beirut. The annual rates ranged from 0.8 cases per 100 adults in + Taiwan to 5.8 cases per 100 adults in New Zealand. The mean age at + onset shows less variation (range, 24.8-34.8 years). In every country, + the rates of major depression were higher for women than men. By + contrast, the lifetime rates of bipolar disorder are more consistent + across countries (0.3/100 in Taiwan to 1.5/100 in New Zealand); the + sex ratios are nearly equal; and the age at first onset is earlier + (average, 6 years) than the onset of major depression. Insomnia and + loss of energy occurred in most persons with major depression at + each site. Persons with major depression were also at increased risk + for comorbidity with substance abuse and anxiety disorders at all + sites. Persons who were separated or divorced had significantly higher + rates of major depression than married persons in most of the countries, + and the risk was somewhat greater for divorced or separated men than + women in most countries. CONCLUSIONS: There are striking similarities + across countries in patterns of major depression and of bipolar disorder. + The differences in rates for major depression across countries suggest + that cultural differences or different risk factors affect the expression + of the disorder.}, + Institution = {Department of Psychiatry, College of Physicians and Surgeons of Columbia University, New York, USA. weissman@child.cpmc.columbia.edu}, + Journal = {JAMA}, + Keywords = {Adolescent; Adult; Age of Onset; Aged; Bipolar Disorder, epidemiology; Canada, epidemiology; Comorbidity; Cross-Cultural Comparison; Depressive Disorder, epidemiology; Female; France, epidemiology; Germany, West, epidemiology; Humans; Italy, epidemiology; Korea, epidemiology; Lebanon, epidemiology; Logistic Models; Male; Marital Status; Middle Aged; New Zealand, epidemiology; Population Surveillance; Puerto Rico, epidemiology; Risk Factors; Sex Distribution; Taiwan, epidemiology; United States, epidemiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {4}, + Owner = {Young}, + Pages = {293--299}, + Pmid = {8656541}, + Timestamp = {2010.05.01}, + Title = {Cross-national epidemiology of major depression and bipolar disorder.}, + Volume = {276}, + Year = {1996}} + +@article{Weller2007, + Abstract = {Do decisions about potential gains and potential losses require different + neural structures for advantageous choices? In a lesion study, we + used a new measure of adaptive decision making under risk to examine + whether damage to neural structures subserving emotion affects an + individual's ability to make adaptive decisions differentially for + gains and losses. We found that individuals with lesions to the amygdala, + an area responsible for processing emotional responses, displayed + impaired decision making when considering potential gains, but not + when considering potential losses. In contrast, patients with damage + to the ventromedial prefrontal cortex, an area responsible for integrating + cognitive and emotional information, showed deficits in both domains. + We argue that this dissociation provides evidence that adaptive decision + making for risks involving potential losses may be more difficult + to disrupt than adaptive decision making for risks involving potential + gains. This research further demonstrates the role of emotion in + decision competence.}, + Author = {Joshua A Weller and Irwin P Levin and Baba Shiv and Antoine Bechara}, + Doi = {10.1111/j.1467-9280.2007.02009.x}, + Institution = {Decision Research, Eugene, Oregon 97401, USA. jweller@decisionresearch.org}, + Journal = {Psychol Sci}, + Keywords = {Adaptation, Psychological; Adult; Affect; Cognition, physiology; Decision Making; Female; Humans; Male; Prefrontal Cortex, physiology; Risk-Taking}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {11}, + Owner = {Woo-Young Ahn}, + Pages = {958--964}, + Pii = {PSCI2009}, + Pmid = {17958709}, + Timestamp = {2009.08.10}, + Title = {Neural correlates of adaptive decision making for risky gains and losses.}, + Url = {http://dx.doi.org/10.1111/j.1467-9280.2007.02009.x}, + Volume = {18}, + Year = {2007}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1467-9280.2007.02009.x}} + +@article{Weller2009, + Author = {Weller, J.A. and Levin, I.P. and Shiv, B. and Bechara, A.}, + Journal = {Social Neuroscience}, + Number = {1}, + Pages = {1--12}, + Publisher = {Psychology Press}, + Title = {{The effects of insula damage on decision-making for risky gains and losses}}, + Volume = {99999}, + Year = {2009}} + +@article{Weller2008, + Abstract = {Delay discounting (DD) is a measure of the degree to which an individual + is driven by immediate gratification vs. the prospect of larger, + but delayed, rewards. Because of hypothesized parallels between drug + addiction and obesity, and reports of increased delay discounting + in drug-dependent individuals, we hypothesized that obese individuals + would show higher rates of discounting than controls. Obese and healthy-weight + age-matched participants of both sexes completed two versions of + a DD of money task, allowing us to calculate how subjective value + of $1000 or $50,000 declined as delay until hypothetical delivery + increased from 2 weeks to 10 years. On both tasks, obese women (N=29) + showed greater delay discounting than control women did (N=26; P + values <.02). Subsequent analyses showed that these differences were + not related to differences in IQ or income. Obese (N=19) and healthy-weight + (N=21) men did not differ significantly. Further research is needed + to determine why greater delay discounting was not also observed + in obese men.}, + Author = {Rosalyn E Weller and Edwin W Cook and Kathy B Avsar and James E Cox}, + Doi = {10.1016/j.appet.2008.04.010}, + Institution = {Department of Psychology, University of Alabama at Birmingham, Birmingham, AL 35294, USA. reweller@uab.edu}, + Journal = {Appetite}, + Keywords = {Adolescent; Adult; Area Under Curve; Body Mass Index; Case-Control Studies; Female; Humans; Internal-External Control; Male; Middle Aged; Obesity, psychology; Reward; Risk Factors; Sex Distribution; Thinness, psychology; Time Factors; Young Adult}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {563--569}, + Pii = {S0195-6663(08)00147-5}, + Pmid = {18513828}, + Timestamp = {2009.08.06}, + Title = {Obese women show greater delay discounting than healthy-weight women.}, + Url = {http://dx.doi.org/10.1016/j.appet.2008.04.010}, + Volume = {51}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.appet.2008.04.010}} + +@article{Weller2008a, + Abstract = {Delay discounting (DD) is a measure of the degree to which an individual + is driven by immediate gratification vs. the prospect of larger, + but delayed, rewards. Because of hypothesized parallels between drug + addiction and obesity, and reports of increased delay discounting + in drug-dependent individuals, we hypothesized that obese individuals + would show higher rates of discounting than controls. Obese and healthy-weight + age-matched participants of both sexes completed two versions of + a DD of money task, allowing us to calculate how subjective value + of $1000 or $50,000 declined as delay until hypothetical delivery + increased from 2 weeks to 10 years. On both tasks, obese women (N=29) + showed greater delay discounting than control women did (N=26; P + values <.02). Subsequent analyses showed that these differences were + not related to differences in IQ or income. Obese (N=19) and healthy-weight + (N=21) men did not differ significantly. Further research is needed + to determine why greater delay discounting was not also observed + in obese men.}, + Author = {Rosalyn E Weller and Edwin W Cook and Kathy B Avsar and James E Cox}, + Doi = {10.1016/j.appet.2008.04.010}, + Institution = {Department of Psychology, University of Alabama at Birmingham, Birmingham, AL 35294, USA. reweller@uab.edu}, + Journal = {Appetite}, + Keywords = {Adolescent; Adult; Area Under Curve; Body Mass Index; Case-Control Studies; Female; Humans; Internal-External Control; Male; Middle Aged; Obesity, psychology; Reward; Risk Factors; Sex Distribution; Thinness, psychology; Time Factors; Young Adult}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Number = {3}, + Owner = {Woo-Young Ahn}, + Pages = {563--569}, + Pii = {S0195-6663(08)00147-5}, + Pmid = {18513828}, + Timestamp = {2009.08.06}, + Title = {Obese women show greater delay discounting than healthy-weight women.}, + Url = {http://dx.doi.org/10.1016/j.appet.2008.04.010}, + Volume = {51}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.appet.2008.04.010}} + +@article{Weng1999, + Author = {Weng, X. and Ding, Y. S. and Volkow, N. D.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Sep}, + Pages = {11073--11074}, + Title = {{{I}maging the functioning human brain}}, + Volume = {96}, + Year = {1999}} + +@article{West1979, + Author = {West, B. J. and Bulsara, A. R. and Lindenberg, K. and Seshadri, V. and Shuler, K. E.}, + Journal = {Physica}, + Pages = {211--233}, + Title = {Stochastic Processes with Non-Additive Fluctuations. {I}. {I}t\^{o} and {S}tratonovich Calculus and the Effects of Correlations}, + Volume = {97A}, + Year = {1979}} + +@article{Westlund1996, + Author = {Westlund, K. N. and Craig, A. D.}, + Journal = {Exp Brain Res}, + Month = {Jul}, + Pages = {151--162}, + Title = {{{A}ssociation of spinal lamina {I} projections with brainstem catecholamine neurons in the monkey}}, + Volume = {110}, + Year = {1996}} + +@article{Wetzels2009a, + Author = {Wetzels, R. and Lee, M. D. and Wagenmakers, E. J.}, + Owner = {Woo-Young Ahn}, + Timestamp = {2009.08.15}, + Title = {Bayesian Inference Using WBDev: A Tutorial for Social Scientists. Manuscript submitted for publication.}, + Volume = {Manuscript submitted for publication}, + Year = {2009}} + +@article{Wetzels2009, + Author = {Wetzels, R. and Vandekerckhove, J. and Tuerlinckx, F. and Wagenmakers, E. J.}, + Journal = {Journal of Mathematical Psychology}, + Publisher = {Elsevier}, + Title = {{Bayesian parameter estimation in the Expectancy Valence model of the Iowa gambling task}}, + Year = {2009}} + +@article{White2008, + Abstract = {ABSTRACT: BACKGROUND: The A1 allele of the ANKK1 TaqIA polymorphism + (previously reported as located in the D2 dopamine receptor (DRD2) + gene) is associated with reduced DRD2 density in the striatum and + with clinical disorders, particularly addiction. It was hypothesized + that impulsivity represents an endophenotype underlying these associations + with the TaqIA and that environmental stress would moderate the strength + of the gene-behavior relationship. METHODS: TaqIA genotyping was + conducted on 72 healthy young adults who were randomly allocated + to either an acute psychosocial stress or relaxation induction condition. + Behavioral phenotypes of impulsivity were measured using a card-sorting + index of reinforcement sensitivity and computerized response inhibition + and delay discounting tasks. RESULTS: Separate analyses of variance + revealed associations between the A1 allele and two laboratory measures + of impulsivity. The presence of the TaqIA allele (A1+) was associated + with slower card-sorting in the presence of small financial reinforcers, + but was overcome in a second administration after either a five-minute + rest or psychosocial stress induction. A1+ participants also demonstrated + significantly poorer response inhibition and faster response times + on a computerized stop inhibition task, independent of acute stress + exposure. CONCLUSION: These findings indicate the A1 allele is associated + with an endophenotype comprising both a "rash impulsive" behavioral + style and reinforcement-related learning deficits. These effects + are independent of stress.}, + Author = {Melanie J White and C. Phillip Morris and Bruce R Lawford and Ross McD Young}, + Doi = {10.1186/1744-9081-4-54}, + Institution = {Institute of Health and Biomedical Innovation, Queensland University of Technology, Kelvin Grove, Brisbane, Queensland 4059, Australia. m2.white@qut.edu.au.}, + Journal = {Behav Brain Funct}, + Language = {eng}, + Medline-Pst = {epublish}, + Owner = {Woo-Young Ahn}, + Pages = {54}, + Pii = {1744-9081-4-54}, + Pmid = {19025655}, + Timestamp = {2009.08.06}, + Title = {Behavioral phenotypes of impulsivity related to the ANKK1 gene are independent of an acute stressor.}, + Url = {http://dx.doi.org/10.1186/1744-9081-4-54}, + Volume = {4}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1186/1744-9081-4-54}} + +@article{White2008a, + Abstract = {ABSTRACT: BACKGROUND: The A1 allele of the ANKK1 TaqIA polymorphism + (previously reported as located in the D2 dopamine receptor (DRD2) + gene) is associated with reduced DRD2 density in the striatum and + with clinical disorders, particularly addiction. It was hypothesized + that impulsivity represents an endophenotype underlying these associations + with the TaqIA and that environmental stress would moderate the strength + of the gene-behavior relationship. METHODS: TaqIA genotyping was + conducted on 72 healthy young adults who were randomly allocated + to either an acute psychosocial stress or relaxation induction condition. + Behavioral phenotypes of impulsivity were measured using a card-sorting + index of reinforcement sensitivity and computerized response inhibition + and delay discounting tasks. RESULTS: Separate analyses of variance + revealed associations between the A1 allele and two laboratory measures + of impulsivity. The presence of the TaqIA allele (A1+) was associated + with slower card-sorting in the presence of small financial reinforcers, + but was overcome in a second administration after either a five-minute + rest or psychosocial stress induction. A1+ participants also demonstrated + significantly poorer response inhibition and faster response times + on a computerized stop inhibition task, independent of acute stress + exposure. CONCLUSION: These findings indicate the A1 allele is associated + with an endophenotype comprising both a "rash impulsive" behavioral + style and reinforcement-related learning deficits. These effects + are independent of stress.}, + Author = {Melanie J White and C. Phillip Morris and Bruce R Lawford and Ross McD Young}, + Doi = {10.1186/1744-9081-4-54}, + Institution = {Institute of Health and Biomedical Innovation, Queensland University of Technology, Kelvin Grove, Brisbane, Queensland 4059, Australia. m2.white@qut.edu.au.}, + Journal = {Behav Brain Funct}, + Language = {eng}, + Medline-Pst = {epublish}, + Owner = {Woo-Young Ahn}, + Pages = {54}, + Pii = {1744-9081-4-54}, + Pmid = {19025655}, + Timestamp = {2009.08.06}, + Title = {Behavioral phenotypes of impulsivity related to the ANKK1 gene are independent of an acute stressor.}, + Url = {http://dx.doi.org/10.1186/1744-9081-4-54}, + Volume = {4}, + Year = {2008}, + Bdsk-Url-1 = {http://dx.doi.org/10.1186/1744-9081-4-54}} + +@article{Whitehead1993, + Author = {Whitehead, J.}, + Journal = {Statistics in Medicine}, + Pages = {1405?1413}, + Title = {The Case for Frequentism in Clinical Trials}, + Volume = {12}, + Year = {1993}} + +@article{Whitlow2003, + Author = {Whitlow, C. T. and Freedland, C. S. and Porrino, L. J.}, + Journal = {Drug Alcohol Depend}, + Month = {Aug}, + Pages = {169--177}, + Title = {{{F}unctional consequences of the repeated administration of {D}elta9-tetrahydrocannabinol in the rat}}, + Volume = {71}, + Year = {2003}} + +@article{Whitlow2002, + Author = {Whitlow, C. T. and Freedland, C. S. and Porrino, L. J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {May}, + Pages = {129--136}, + Title = {{{M}etabolic mapping of the time-dependent effects of delta 9-tetrahydrocannabinol administration in the rat}}, + Volume = {161}, + Year = {2002}} + +@article{Whitlow2004, + Author = {Whitlow, C. T. and Liguori, A. and Livengood, L. B. and Hart, S. L. and Mussat-Whitlow, B. J. and Lamborn, C. M. and Laurienti, P. J. and Porrino, L. J.}, + Journal = {Drug Alcohol Depend}, + Month = {Oct}, + Pages = {107--111}, + Title = {{{L}ong-term heavy marijuana users make costly decisions on a gambling task}}, + Volume = {76}, + Year = {2004}} + +@article{Wickelgren1977, + Author = {Wickelgren, W. A.}, + Journal = {Acta Psychologica}, + Pages = {67--85}, + Title = {Speed--accuracy Tradeoff and Information Processing Dynamics}, + Volume = {41}, + Year = {1977}} + +@article{Widnell1994, + Author = {Widnell, K. L. and Russell, D. S. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Nov}, + Pages = {10947--10951}, + Title = {{{R}egulation of expression of c{A}{M}{P} response element-binding protein in the locus coeruleus in vivo and in a locus coeruleus-like cell line in vitro}}, + Volume = {91}, + Year = {1994}} + +@article{Widnell1996, + Author = {Widnell, K. L. and Self, D. W. and Lane, S. B. and Russell, D. S. and Vaidya, V. A. and Miserendino, M. J. and Rubin, C. S. and Duman, R. S. and Nestler, E. J.}, + Journal = {J. Pharmacol. Exp. Ther.}, + Month = {Jan}, + Pages = {306--315}, + Title = {{{R}egulation of {C}{R}{E}{B} expression: in vivo evidence for a functional role in morphine action in the nucleus accumbens}}, + Volume = {276}, + Year = {1996}} + +@article{Wiecki2009, + Author = {Wiecki, T. V. and Riedinger, K. and von Ameln-Mayerhofer, A. and Schmidt, W. J. and Frank, M. J.}, + Journal = {Psychopharmacology (Berl.)}, + Month = {Jun}, + Pages = {265--277}, + Title = {{{A} neurocomputational account of catalepsy sensitization induced by {D}2 receptor blockade in rats: context dependency, extinction, and renewal}}, + Volume = {204}, + Year = {2009}} + +@article{Wilkinson1999, + Author = {Wilkinson, L. and {the Task Force on Statistical Inference}}, + Journal = {American Psychologist}, + Pages = {594--604}, + Title = {Statistical Methods in Psychology Journals: {G}uidelines and Explanations}, + Volume = {54}, + Year = {1999}} + +@article{Williams2005, + Author = {Williams, J. and Dayan, P.}, + Journal = {J Child Adolesc Psychopharmacol}, + Month = {Apr}, + Pages = {160--179}, + Title = {{{D}opamine, learning, and impulsivity: a biological account of attention-deficit/hyperactivity disorder}}, + Volume = {15}, + Year = {2005}} + +@article{Williams-Hemby1996, + Author = {Williams-Hemby, L. and Grant, K. A. and Gatto, G. J. and Porrino, L. J.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Jun}, + Pages = {415--423}, + Title = {{{M}etabolic mapping of the effects of chronic voluntary ethanol consumption in rats}}, + Volume = {54}, + Year = {1996}} + +@article{Williams-Hemby1997, + Author = {Williams-Hemby, L. and Porrino, L. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {1581--1591}, + Title = {{{I}{I}. {F}unctional consequences of intragastrically administered ethanol in rats as measured by the 2-[14{C}]deoxyglucose method: the contribution of dopamine}}, + Volume = {21}, + Year = {1997}} + +@article{Williams-Hemby1997a, + Author = {Williams-Hemby, L. and Porrino, L. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Dec}, + Pages = {1573--1580}, + Title = {{{I}. {F}unctional consequences of intragastrically administered ethanol in rats as measured by the 2-[14{C}]deoxyglucose method}}, + Volume = {21}, + Year = {1997}} + +@article{Williams-Hemby1994, + Author = {Williams-Hemby, L. and Porrino, L. J.}, + Journal = {Alcohol. Clin. Exp. Res.}, + Month = {Aug}, + Pages = {982--988}, + Title = {{{L}ow and moderate doses of ethanol produce distinct patterns of cerebral metabolic changes in rats}}, + Volume = {18}, + Year = {1994}} + +@article{Wilson1996, + Author = {Wilson, J. M. and Kalasinsky, K. S. and Levey, A. I. and Bergeron, C. and Reiber, G. and Anthony, R. M. and Schmunk, G. A. and Shannak, K. and Haycock, J. W. and Kish, S. J.}, + Journal = {Nat. Med.}, + Month = {Jun}, + Pages = {699--703}, + Title = {{{S}triatal dopamine nerve terminal markers in human, chronic methamphetamine users}}, + Volume = {2}, + Year = {1996}} + +@article{Wilson2002, + Author = {Wilson, L. B. and Andrew, D. and Craig, A. D.}, + Journal = {J. Neurophysiol.}, + Month = {Mar}, + Pages = {1641--1645}, + Title = {{{A}ctivation of spinobulbar lamina {I} neurons by static muscle contraction}}, + Volume = {87}, + Year = {2002}} + +@article{Wilson2008, + Author = {Wilson, S. J. and Sayette, M. A. and Delgado, M. R. and Fiez, J. A.}, + Journal = {J Abnorm Psychol}, + Month = {May}, + Pages = {428--434}, + Title = {{{E}ffect of smoking opportunity on responses to monetary gain and loss in the caudate nucleus}}, + Volume = {117}, + Year = {2008}} + +@article{Wilson2005, + Author = {Wilson, S. J. and Sayette, M. A. and Delgado, M. R. and Fiez, J. A.}, + Journal = {Nicotine Tob. Res.}, + Month = {Aug}, + Pages = {637--645}, + Title = {{{I}nstructed smoking expectancy modulates cue-elicited neural activity: a preliminary study}}, + Volume = {7}, + Year = {2005}} + +@article{Winship1999, + Author = {Winship, C.}, + Journal = {Sociological Methods \& Research}, + Pages = {355--358}, + Title = {Editor's Introduction to the Special Issue on the {B}ayesian Information Criterion}, + Volume = {27}, + Year = {1999}} + +@article{Winstanley2009a, + Author = {Winstanley, C. A. and Bachtell, R. K. and Theobald, D. E. and Laali, S. and Green, T. A. and Kumar, A. and Chakravarty, S. and Self, D. W. and Nestler, E. J.}, + Journal = {Cereb. Cortex}, + Month = {Feb}, + Pages = {435--444}, + Title = {{{I}ncreased impulsivity during withdrawal from cocaine self-administration: role for {D}elta{F}os{B} in the orbitofrontal cortex}}, + Volume = {19}, + Year = {2009}} + +@article{Winstanley2009, + Author = {Winstanley, C. A. and Green, T. A. and Theobald, D. E. and Renthal, W. and LaPlant, Q. and DiLeone, R. J. and Chakravarty, S. and Nestler, E. J.}, + Journal = {Pharmacol. Biochem. Behav.}, + Month = {Sep}, + Pages = {278--284}, + Title = {{{D}elta{F}os{B} induction in orbitofrontal cortex potentiates locomotor sensitization despite attenuating the cognitive dysfunction caused by cocaine}}, + Volume = {93}, + Year = {2009}} + +@article{Winstanley2007, + Author = {Winstanley, C. A. and LaPlant, Q. and Theobald, D. E. and Green, T. A. and Bachtell, R. K. and Perrotti, L. I. and DiLeone, R. J. and Russo, S. J. and Garth, W. J. and Self, D. W. and Nestler, E. J.}, + Journal = {J. Neurosci.}, + Month = {Sep}, + Pages = {10497--10507}, + Title = {{{D}elta{F}os{B} induction in orbitofrontal cortex mediates tolerance to cocaine-induced cognitive dysfunction}}, + Volume = {27}, + Year = {2007}} + +@article{Winston2007, + Author = {Winston, J. S. and O'Doherty, J. and Kilner, J. M. and Perrett, D. I. and Dolan, R. J.}, + Journal = {Neuropsychologia}, + Month = {Jan}, + Pages = {195--206}, + Title = {{{B}rain systems for assessing facial attractiveness}}, + Volume = {45}, + Year = {2007}} + +@article{Winterer2002, + Author = {Winterer, G. and Adams, C. M. and Jones, D. W. and Knutson, B.}, + Journal = {Neuroimage}, + Month = {Oct}, + Pages = {851--858}, + Title = {{{V}olition to action--an event-related f{M}{R}{I} study}}, + Volume = {17}, + Year = {2002}} + +@article{Wise2004, + Author = {Wise, R.A.}, + Journal = {Nature Reviews Neuroscience}, + Number = {6}, + Pages = {483--494}, + Publisher = {Nature Publishing Group}, + Title = {{Dopamine, learning and motivation}}, + Volume = {5}, + Year = {2004}} + +@article{Wise1988, + Author = {Wise, R.A.}, + Journal = {Journal of Abnormal Psychology}, + Number = {2}, + Pages = {118--132}, + Title = {{The neurobiology of craving: implications for the understanding and treatment of addiction}}, + Volume = {97}, + Year = {1988}} + +@article{Wise1987, + Author = {Wise, R.A. and Bozarth, M.A.}, + Journal = {Psychol Rev}, + Number = {4}, + Pages = {469--492}, + Title = {{A psychomotor stimulant theory of addiction}}, + Volume = {94}, + Year = {1987}} + +@article{Wittmann2007, + Author = {Wittmann, M. and Leland, D. S. and Churan, J. and Paulus, M. P.}, + Journal = {Drug Alcohol Depend}, + Month = {Oct}, + Pages = {183--192}, + Title = {{{I}mpaired time perception and motor timing in stimulant-dependent subjects}}, + Volume = {90}, + Year = {2007}} + +@article{Wittmann2007a, + Author = {Wittmann, M. and Leland, D. S. and Paulus, M. P.}, + Journal = {Exp Brain Res}, + Month = {Jun}, + Pages = {643--653}, + Title = {{{T}ime and decision making: differential contribution of the posterior insular cortex and the striatum during a delay discounting task}}, + Volume = {179}, + Year = {2007}} + +@article{Wittmann2008, + Author = {Wittmann, M. and Paulus, M. P.}, + Journal = {Trends Cogn. Sci. (Regul. Ed.)}, + Month = {Jan}, + Pages = {7--12}, + Title = {{{D}ecision making, impulsivity and time perception}}, + Volume = {12}, + Year = {2008}} + +@article{Woicik2009, + Author = {Woicik, P. A. and Moeller, S. J. and Alia-Klein, N. and Maloney, T. and Lukasik, T. M. and Yeliosof, O. and Wang, G. J. and Volkow, N. D. and Goldstein, R. Z.}, + Journal = {Neuropsychopharmacology}, + Month = {Apr}, + Pages = {1112--1122}, + Title = {{{T}he neuropsychology of cocaine addiction: recent cocaine use masks impairment}}, + Volume = {34}, + Year = {2009}} + +@book{Wolf1978, + Address = {New York}, + Author = {Wolf, D.}, + Publisher = {Springer Verlag}, + Title = {Noise in Physical Systems}, + Year = {1978}} + +@article{Wong2006, + Journal = {Neuropsychopharmacology}, + Month = {Dec}, + Pages = {2716--2727}, + Title = {{{I}ncreased occupancy of dopamine receptors in human striatum during cue-elicited cocaine craving}}, + Volume = {31}, + Year = {2006}} + +@article{Wong2007a, + Author = {Wong, K. F. and Huk, A. C. and Shadlen, M. N. and Wang, X. J.}, + Journal = {Front Comput Neurosci}, + Pages = {6}, + Title = {{{N}eural circuit dynamics underlying accumulation of time-varying evidence during perceptual decision making}}, + Volume = {1}, + Year = {2007}} + +@article{Wong2006a, + Abstract = {The developmental trajectories of behavioral control and resiliency + from early childhood to adolescence and their effects on early onset + of substance use were examined. Behavioral control is the tendency + to express or contain one's impulses and behaviors. Resiliency is + the ability to adapt flexibly one's characteristic level of control + in response to the environment. Study participants were 514 children + of alcoholics and matched controls from a longitudinal community + sample (Time 1 age in years: M=4.32, SD=0.89). Children with slower + rates of increase in behavioral control were more likely to use alcohol + and other drugs in adolescence. Children with higher initial levels + of resiliency were less likely to begin using alcohol.}, + Author = {Maria M Wong and Joel T Nigg and Robert A Zucker and Leon I Puttler and Hiram E Fitzgerald and Jennifer M Jester and Jennifer M Glass and Kenneth Adams}, + Doi = {10.1111/j.1467-8624.2006.00916.x}, + Institution = {Department of Psychology, Idaho State University, Pocatello, ID 83209-8112, USA. wongmari@isu.edu}, + Journal = {Child Dev}, + Keywords = {Adaptation, Psychological; Adolescent; Alcohol Drinking, adverse effects/epidemiology/psychology; Alcoholic Intoxication, epidemiology/psychology; Alcoholism, epidemiology/psychology; Child; Child of Impaired Parents, psychology/statistics /&/ numerical data; Child, Preschool; Female; Humans; Internal-External Control; Longitudinal Studies; Male; Michigan; Prospective Studies; Q-Sort; Statistics as Topic; Street Drugs; Substance-Related Disorders, epidemiology/psychology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Number = {4}, + Owner = {Woo-Young Ahn}, + Pages = {1016--1033}, + Pii = {CDEV916}, + Pmid = {16942503}, + Timestamp = {2009.08.06}, + Title = {Behavioral control and resiliency in the onset of alcohol and illicit drug use: a prospective study from preschool to adolescence.}, + Url = {http://dx.doi.org/10.1111/j.1467-8624.2006.00916.x}, + Volume = {77}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1111/j.1467-8624.2006.00916.x}} + +@article{Wood2005b, + Author = {Wood, J. N. and Knutson, K. M. and Grafman, J.}, + Journal = {Cereb. Cortex}, + Month = {Aug}, + Pages = {1155--1161}, + Title = {{{P}sychological structure and neural correlates of event knowledge}}, + Volume = {15}, + Year = {2005}} + +@article{Wood2005a, + Author = {Wood, J. N. and Romero, S. G. and Knutson, K. M. and Grafman, J.}, + Journal = {Neuropsychologia}, + Pages = {249--259}, + Title = {{{R}epresentation of attitudinal knowledge: role of prefrontal cortex, amygdala and parahippocampal gyrus}}, + Volume = {43}, + Year = {2005}} + +@article{Wood2006, + Author = {Wood, R. M. and Rilling, J. K. and Sanfey, A. G. and Bhagwagar, Z. and Rogers, R. D.}, + Journal = {Neuropsychopharmacology}, + Month = {May}, + Pages = {1075--1084}, + Title = {{{E}ffects of tryptophan depletion on the performance of an iterated {P}risoner's {D}ilemma game in healthy adults}}, + Volume = {31}, + Year = {2006}} + +@article{Wood2005, + Author = {Wood, S. and Busemeyer, J. and Koling, A. and Cox, C. R. and Davis, H.}, + Journal = {Psychology and Aging}, + Pages = {220--225}, + Title = {Older Adults as Adaptive Decision Makers: {E}vidence From the {I}owa Gambling Task}, + Volume = {20}, + Year = {2005}} + +@book{Woodworth1954, + Address = {New York}, + Author = {Woodworth, R. S. and Schlosberg, H.}, + Publisher = {Holt}, + Title = {Experimental Psychology}, + Year = {1954}} + +@article{Wornell1992, + Author = {Wornell, G. W. and Oppenheim, A. V.}, + Journal = {IEEE Transactions on Signal Processing}, + Pages = {611--623}, + Title = {Estimation of Fractal Signals from Noisy Measurements Using Wavelets}, + Volume = {40}, + Year = {1992}} + +@article{Wrase2002, + Author = {Wrase, J. and Gr{\\"u}sser, SM and Klein, S. and Diener, C. and Hermann, D. and Flor, H. and Mann, K. and Braus, DF and Heinz, A.}, + Journal = {European Psychiatry}, + Number = {5}, + Pages = {287--291}, + Publisher = {Elsevier}, + Title = {{Development of alcohol-associated cues and cue-induced brain activation in alcoholics}}, + Volume = {17}, + Year = {2002}} + +@article{Wrase2007a, + Author = {Wrase, J. and Kahnt, T. and Schlagenhauf, F. and Beck, A. and Cohen, M. X. and Knutson, B. and Heinz, A.}, + Journal = {Neuroimage}, + Month = {Jul}, + Pages = {1253--1262}, + Title = {{{D}ifferent neural systems adjust motor behavior in response to reward and punishment}}, + Volume = {36}, + Year = {2007}} + +@article{Wrase2007, + Author = {Wrase, J. and Schlagenhauf, F. and Kienast, T. and W?stenberg, T. and Bermpohl, F. and Kahnt, T. and Beck, A. and Str?hle, A. and Juckel, G. and Knutson, B. and Heinz, A.}, + Journal = {Neuroimage}, + Month = {Apr}, + Pages = {787--794}, + Title = {{{D}ysfunction of reward processing correlates with alcohol craving in detoxified alcoholics}}, + Volume = {35}, + Year = {2007}} + +@article{Wu2009, + Author = {Wu, S. W. and Delgado, M. R. and Maloney, L. T.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Apr}, + Pages = {6088--6093}, + Title = {{{E}conomic decision-making compared with an equivalent motor task}}, + Volume = {106}, + Year = {2009}} + +@article{Wyer2008, + Author = {Wyer, P. C. and Naqvi, Z. and Dayan, P. S. and Celentano, J. J. and Eskin, B. and Graham, M. J.}, + Journal = {Adv Health Sci Educ Theory Pract}, + Month = {Sep}, + Title = {{{D}o workshops in evidence-based practice equip participants to identify and answer questions requiring consideration of clinical research? {A} diagnostic skill assessment}}, + Year = {2008}} + +@article{Wyvell2001, + Author = {Wyvell, C. L. and Berridge, K. C.}, + Journal = {J. Neurosci.}, + Month = {Oct}, + Pages = {7831--7840}, + Title = {{{I}ncentive sensitization by previous amphetamine exposure: increased cue-triggered "wanting" for sucrose reward}}, + Volume = {21}, + Year = {2001}} + +@article{Xiao2008, + Author = {Xiao, L. and Bechara, A. and Cen, S. and Grenard, J. L. and Stacy, A. W. and Gallaher, P. and Wei, Y. and Jia, Y. and Anderson Johnson, C.}, + Journal = {Nicotine Tob. Res.}, + Month = {Jun}, + Pages = {1085--1097}, + Title = {{{A}ffective decision-making deficits, linked to a dysfunctional ventromedial prefrontal cortex, revealed in 10th-grade {C}hinese adolescent smokers}}, + Volume = {10}, + Year = {2008}} + +@article{Xiao2002, + Author = {Xiao, S. and Wang, Z. and Hu, D.}, + Journal = {IEEE Eng Med Biol Mag}, + Pages = {74--76}, + Title = {{{S}tudying cardiac contractility change trend to evaluate cardiac reserve}}, + Volume = {21}, + Year = {2002}} + +@article{Xiao2006, + Author = {Xiao, Z. and Lee, T. and Zhang, J. X. and Wu, Q. and Wu, R. and Weng, X. and Hu, X.}, + Journal = {Drug Alcohol Depend}, + Month = {Jun}, + Pages = {157--162}, + Title = {{{T}hirsty heroin addicts show different f{M}{R}{I} activations when exposed to water-related and drug-related cues}}, + Volume = {83}, + Year = {2006}} + +@article{Xiao2006a, + Author = {Xiao, Z. and Lee, T. and Zhang, J. X. and Wu, Q. and Wu, R. and Weng, X. and Hu, X.}, + Journal = {Drug Alcohol Depend}, + Month = {Jun}, + Pages = {157--162}, + Title = {{{T}hirsty heroin addicts show different f{M}{R}{I} activations when exposed to water-related and drug-related cues}}, + Volume = {83}, + Year = {2006}} + +@article{Xie1999, + Author = {Xie, Y.}, + Journal = {Sociological Methods \& Research}, + Pages = {428--435}, + Title = {The Tension Between Generality and Accuracy}, + Volume = {27}, + Year = {1999}} + +@article{Xu2008, + Author = {Xu, H. and Dayan, P. and Lipkin, R. M. and Qian, N.}, + Journal = {J. Neurosci.}, + Month = {Mar}, + Pages = {3374--3383}, + Title = {{{A}daptation across the cortical hierarchy: low-level curve adaptation affects high-level facial-expression judgments}}, + Volume = {28}, + Year = {2008}} + +@article{Xu2008a, + Author = {Xu, J. and Azizian, A. and Monterosso, J. and Domier, C. P. and Brody, A. L. and Fong, T. W. and London, E. D.}, + Journal = {Nicotine Tob. Res.}, + Month = {Nov}, + Pages = {1653--1661}, + Title = {{{G}ender effects on mood and cigarette craving during early abstinence and resumption of smoking}}, + Volume = {10}, + Year = {2008}} + +@article{Xu2005, + Author = {Xu, J. and Mendrek, A. and Cohen, M. S. and Monterosso, J. and Rodriguez, P. and Simon, S. L. and Brody, A. and Jarvik, M. and Domier, C. P. and Olmstead, R. and Ernst, M. and London, E. D.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {143--150}, + Title = {{{B}rain activity in cigarette smokers performing a working memory task: effect of smoking abstinence}}, + Volume = {58}, + Year = {2005}} + +@article{Xu2005a, + Author = {Xu, J. and Mendrek, A. and Cohen, M. S. and Monterosso, J. and Rodriguez, P. and Simon, S. L. and Brody, A. and Jarvik, M. and Domier, C. P. and Olmstead, R. and Ernst, M. and London, E. D.}, + Journal = {Biol. Psychiatry}, + Month = {Jul}, + Pages = {143--150}, + Title = {{{B}rain activity in cigarette smokers performing a working memory task: effect of smoking abstinence}}, + Volume = {58}, + Year = {2005}} + +@article{Xu2006a, + Author = {Xu, J. and Mendrek, A. and Cohen, M. S. and Monterosso, J. and Simon, S. and Brody, A. L. and Jarvik, M. and Rodriguez, P. and Ernst, M. and London, E. D.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {103--109}, + Title = {{{E}ffects of acute smoking on brain activity vary with abstinence in smokers performing the {N}-{B}ack task: a preliminary study}}, + Volume = {148}, + Year = {2006}} + +@article{Xu2006c, + Author = {Xu, J. and Mendrek, A. and Cohen, M. S. and Monterosso, J. and Simon, S. and Brody, A. L. and Jarvik, M. and Rodriguez, P. and Ernst, M. and London, E. D.}, + Journal = {Psychiatry Res}, + Month = {Dec}, + Pages = {103--109}, + Title = {{{E}ffects of acute smoking on brain activity vary with abstinence in smokers performing the {N}-{B}ack task: a preliminary study}}, + Volume = {148}, + Year = {2006}} + +@article{Xu2007, + Author = {Xu, J. and Mendrek, A. and Cohen, M. S. and Monterosso, J. and Simon, S. and Jarvik, M. and Olmstead, R. and Brody, A. L. and Ernst, M. and London, E. D.}, + Journal = {Neuropsychopharmacology}, + Month = {Jun}, + Pages = {1421--1428}, + Title = {{{E}ffect of cigarette smoking on prefrontal cortical function in nondeprived smokers performing the {S}troop {T}ask}}, + Volume = {32}, + Year = {2007}} + +@article{Xu2006b, + Author = {Xu, K. and Ernst, M. and Goldman, D.}, + Journal = {Neuroinformatics}, + Pages = {51--64}, + Title = {{{I}maging genomics applied to anxiety, stress response, and resiliency}}, + Volume = {4}, + Year = {2006}} + +@article{Xu2006, + Author = {Xueli Xu}, + Journal = {Psychometrica}, + Number = {1}, + Pages = {121--137}, + Title = {Computerized adaptive testing under nonparametric IRT models}, + Volume = {71}, + Year = {2006}} + +@article{Yacubian2006, + Author = {Yacubian, J. and Glascher, J. and Schroeder, K. and Sommer, T. and Braus, D.F. and Buchel, C.}, + Journal = {Journal of Neuroscience}, + Number = {37}, + Pages = {9530}, + Title = {{Dissociable systems for gain-and loss-related value predictions and errors of prediction in the human brain}}, + Volume = {26}, + Year = {2006}} + +@article{Yang2007a, + Author = {Yang, T. and Shadlen, M. N.}, + Journal = {Nature}, + Month = {Jun}, + Pages = {1075--1080}, + Title = {{{P}robabilistic reasoning by neurons}}, + Volume = {447}, + Year = {2007}} + +@article{Yang2007, + Author = {Yang, T. T. and Simmons, A. N. and Matthews, S. C. and Tapert, S. F. and Bischoff-Grethe, A. and Frank, G. K. and Arce, E. and Paulus, M. P.}, + Journal = {Neurosci. Lett.}, + Month = {Nov}, + Pages = {109--114}, + Title = {{{I}ncreased amygdala activation is related to heart rate during emotion processing in adolescent subjects}}, + Volume = {428}, + Year = {2007}} + +@article{Yang2009, + Author = {Yang, T. T. and Simmons, A. N. and Matthews, S. C. and Tapert, S. F. and Frank, G. K. and Bischoff-Grethe, A. and Lansing, A. E. and Wu, J. and Brown, G. G. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {Mar}, + Pages = {440--444}, + Title = {{{D}epressed adolescents demonstrate greater subgenual anterior cingulate activity}}, + Volume = {20}, + Year = {2009}} + +@article{Yang2009a, + Author = {Yang, T. T. and Simmons, A. N. and Matthews, S. C. and Tapert, S. F. and Frank, G. K. and Bischoff-Grethe, A. and Lansing, A. E. and Wu, J. and Paulus, M. P.}, + Journal = {Neuroreport}, + Month = {Jan}, + Pages = {19--23}, + Title = {{{A}dolescent subgenual anterior cingulate activity is related to harm avoidance}}, + Volume = {20}, + Year = {2009}} + +@article{Yap2006, + Author = {Yap, M. J. and Balota, D. A. and Cortese, M. J. and Watson, J. M.}, + Journal = {Journal of Experimental Psychology: Human Perception and Performance}, + Pages = {1324--1344}, + Title = {Single versus Dual Process Models of Lexical Decision Performance: {I}nsights from {RT} Distributional Analysis}, + Volume = {32}, + Year = {2006}} + +@article{Yatham2005, + Author = {Lakshmi N Yatham}, + Journal = {J Clin Psychiatry}, + Keywords = {Adaptation, Psychological; Anticonvulsants, therapeutic use; Antidepressive Agents, therapeutic use; Bipolar Disorder, diagnosis/prevention /&/ control/therapy; Drug Therapy, Combination; Humans; Lithium, therapeutic use; Patient Education as Topic; Psychotherapy, methods; Social Support}, + Language = {eng}, + Medline-Pst = {ppublish}, + Owner = {Young}, + Pages = {13--17}, + Pmid = {15693747}, + Timestamp = {2010.05.01}, + Title = {Diagnosis and management of patients with bipolar II disorder.}, + Volume = {66 Suppl 1}, + Year = {2005}} + +@article{Yatham2007, + Author = {Yatham, L. N. and Lyoo, I. K. and Liddle, P. and Renshaw, P. F. and Wan, D. and Lam, R. W. and Hwang, J.}, + Journal = {Bipolar Disord}, + Month = {Nov}, + Pages = {693--697}, + Title = {{{A} magnetic resonance imaging study of mood stabilizer- and neuroleptic-naive first-episode mania}}, + Volume = {9}, + Year = {2007}} + +@article{Yeap2008, + Author = {Yeap, S. and Kelly, S. P. and Sehatpour, P. and Magno, E. and Garavan, H. and Thakore, J. H. and Foxe, J. J.}, + Journal = {Eur Arch Psychiatry Clin Neurosci}, + Month = {Aug}, + Pages = {305--316}, + Title = {{{V}isual sensory processing deficits in {S}chizophrenia and their relationship to disease state}}, + Volume = {258}, + Year = {2008}} + +@article{Yeap2006, + Author = {Yeap, S. and Kelly, S. P. and Sehatpour, P. and Magno, E. and Javitt, D. C. and Garavan, H. and Thakore, J. H. and Foxe, J. J.}, + Journal = {Arch. Gen. Psychiatry}, + Month = {Nov}, + Pages = {1180--1188}, + Title = {{{E}arly visual sensory deficits as endophenotypes for schizophrenia: high-density electrical mapping in clinically unaffected first-degree relatives}}, + Volume = {63}, + Year = {2006}} + +@article{Yechiaminpress, + Author = {Yechiam, Eldad and Busemeyer, J. R.}, + Journal = {Games and Economic Behavior}, + Owner = {Wooyoung Ahn}, + Timestamp = {2007.05.03}, + Title = {Evaluating generalizability and parameter consistency in learning models}, + Year = {in press}} + +@article{Yechiam2005, + Author = {Yechiam, Eldad and Busemeyer, J. R.}, + Journal = {Psychonomic Bulletin \& Review}, + Owner = {Wooyoung Ahn}, + Pages = {387-402}, + Timestamp = {2007.05.03}, + Title = {Comparison of basic assumptions embedded in learning models for experience-based decision making}, + Volume = {12(3)}, + Year = {2005}} + +@article{Yechiam2005a, + Author = {Yechiam, E. and Busemeyer, J. R.}, + Journal = {Psychonomic Bulletin \& Review}, + Pages = {387--402}, + Title = {Comparison of Basic Assumptions Embedded in Learning Models for Experience--Based Decision Making}, + Volume = {12}, + Year = {2005}} + +@article{Yechiam2005b, + Author = {Yechiam, E. and Busemeyer, J. R. and Stout, J. C. and Bechara, A.}, + Journal = {Psychological Science}, + Pages = {973--978}, + Title = {Using Cognitive Models to Map Relations Between Neuropsychological Disorders and Human Decision--Making Deficits}, + Volume = {16}, + Year = {2005}} + +@article{Yechiam2007, + Author = {Yechiam, E. and Ert, E.}, + Journal = {Journal of Mathematical Psychology}, + Owner = {WooYoung Ahn}, + Pages = {75-84}, + Timestamp = {2007.07.18}, + Title = {Evaluating the reliance on past choices in adaptive learning models}, + Volume = {51}, + Year = {2007}} + +@article{Yechiam2006, + Author = {Yechiam, E. and Goodnight, J. and Bates, J. E. and Busemeyer, J. R. and Dodge, K. A. and Pettit, G. S. and Newman, J. P.}, + Journal = {Psychological Assessment}, + Owner = {Wooyoung Ahn}, + Pages = {239-249}, + Timestamp = {2007.04.30}, + Title = {A formal cognitive model of the {Go/No-Go} Discrimination Task: Evaluation and implications}, + Volume = {18(3)}, + Year = {2006}} + +@article{Yechiam2005c, + Author = {Yechiam, E. and Stout, J. C. and Busemeyer, J. R. and Rock, S. L. and Finn, P. R.}, + Journal = {Journal of Behavioral Decision Making}, + Pages = {97--110}, + Title = {Individual Differences in the Response to Foregone Payoffs: {A}n Examination of High Functioning Drug Abusers}, + Volume = {18}, + Year = {2005}} + +@incollection{Yechiam2007a, + Author = {Yechiam, E. and Veinott, E. S. and Busemeyer, J. R. and Stout, J. C.}, + Booktitle = {Advances in Clinical Cognitive Science: Formal Modeling and Assessment of Processes and Symptoms}, + Editor = {Neufeld, R.}, + Owner = {Wooyoung Ahn}, + Publisher = {APA Publications, Washington, DC.}, + Timestamp = {2007.05.03}, + Title = {Cognitive models for evaluating basic decision processes in clinical populations}, + Year = {2007}} + +@article{Yellott1971, + Author = {Yellott, J. I.}, + Journal = {Journal of Mathematical Psychology}, + Pages = {159--199}, + Title = {Correction for Fast Guessing and the Speed--accuracy Tradeoff in Choice Reaction Time}, + Volume = {8}, + Year = {1971}} + +@article{Yin2006, + Author = {Yin, H.H. and Knowlton, B.J.}, + Journal = {Nature Reviews Neuroscience}, + Number = {6}, + Pages = {464--476}, + Publisher = {Nature Publishing Group}, + Title = {{The role of the basal ganglia in habit formation}}, + Volume = {7}, + Year = {2006}} + +@article{Yoo2004, + Author = {Yoo, H. J. and Cho, S. C. and Ha, J. and Yune, S. K. and Kim, S. J. and Hwang, J. and Chung, A. and Sung, Y. H. and Lyoo, I. K.}, + Journal = {Psychiatry Clin. Neurosci.}, + Month = {Oct}, + Pages = {487--494}, + Title = {{{A}ttention deficit hyperactivity symptoms and internet addiction}}, + Volume = {58}, + Year = {2004}} + +@article{Yoo2006, + Author = {Yoo, H. J. and Kim, M. and Ha, J. H. and Chung, A. and Sim, M. E. and Kim, S. J. and Lyoo, I. K.}, + Journal = {Psychopathology}, + Pages = {25--31}, + Title = {{{B}iogenetic temperament and character and attention deficit hyperactivity disorder in {K}orean children}}, + Volume = {39}, + Year = {2006}} + +@article{Yoo2005, + Author = {Yoo, H. K. and Kim, M. J. and Kim, S. J. and Sung, Y. H. and Sim, M. E. and Lee, Y. S. and Song, S. Y. and Kee, B. S. and Lyoo, I. K.}, + Journal = {Eur. J. Neurosci.}, + Month = {Oct}, + Pages = {2089--2094}, + Title = {{{P}utaminal gray matter volume decrease in panic disorder: an optimized voxel-based morphometry study}}, + Volume = {22}, + Year = {2005}} + +@article{Yoon2009, + Author = {Yoon, S. J. and Lyoo, I. K. and Haws, C. and Kim, T. S. and Cohen, B. M. and Renshaw, P. F.}, + Journal = {Neuropsychopharmacology}, + Month = {Jun}, + Pages = {1810--1818}, + Title = {{{D}ecreased glutamate/glutamine levels may mediate cytidine's efficacy in treating bipolar depression: a longitudinal proton magnetic resonance spectroscopy study}}, + Volume = {34}, + Year = {2009}} + +@article{Yoon2005, + Author = {Yoon, S. J. and Pae, C. U. and Lee, H. and Choi, B. and Kim, T. S. and Lyoo, I. K. and Kwon, D. H. and Kim, D. J.}, + Journal = {Neurosci. Res.}, + Month = {Dec}, + Pages = {391--395}, + Title = {{{G}hrelin precursor gene polymorphism and methamphetamine dependence in the {K}orean population}}, + Volume = {53}, + Year = {2005}} + +@article{Yoshida2006, + Abstract = {Making optimal decisions in the face of uncertain or incomplete information + arises as a common problem in everyday behavior, but the neural processes + underlying this ability remain poorly understood. A typical case + is navigation, in which a subject has to search for a known goal + from an unknown location. Navigating under uncertain conditions requires + making decisions on the basis of the current belief about location + and updating that belief based on incoming information. Here, we + use functional magnetic resonance imaging during a maze navigation + task to study neural activity relating to the resolution of uncertainty + as subjects make sequential decisions to reach a goal. We show that + distinct regions of prefrontal cortex are engaged in specific computational + functions that are well described by a Bayesian model of decision + making. This permits efficient goal-oriented navigation and provides + new insights into decision making by humans.}, + Author = {Wako Yoshida and Shin Ishii}, + Doi = {10.1016/j.neuron.2006.05.006}, + Institution = {Graduate School of Information Science, Nara Institute of Science and Technology, 8916-5 Takayama, Ikoma, Nara 630-0192, Japan.}, + Journal = {Neuron}, + Keywords = {Adult; Bayes Theorem; Decision Making, physiology; Female; Goals; Humans; Magnetic Resonance Imaging; Male; Markov Chains; Models, Neurological; Prefrontal Cortex, physiology; Space Perception, physiology}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Jun}, + Number = {5}, + Owner = {Woo-Young Ahn}, + Pages = {781--789}, + Pii = {S0896-6273(06)00372-2}, + Pmid = {16731515}, + Timestamp = {2009.08.17}, + Title = {Resolution of uncertainty in prefrontal cortex.}, + Url = {http://dx.doi.org/10.1016/j.neuron.2006.05.006}, + Volume = {50}, + Year = {2006}, + Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.neuron.2006.05.006}} + +@article{Yoshida2005, + Author = {Yoshida, W. and Ishii, S.}, + Journal = {Neurocomputing}, + Pages = {253--269}, + Publisher = {Elsevier}, + Title = {{Model-based reinforcement learning: a computational model and an fMRI study}}, + Volume = {63}, + Year = {2005}} + +@article{yoshida2005model, + Author = {Yoshida, W. and Ishii, S.}, + Journal = {Neurocomputing}, + Pages = {253--269}, + Publisher = {Elsevier}, + Title = {{Model-based reinforcement learning: a computational model and an fMRI study}}, + Volume = {63}, + Year = {2005}} + +@article{Yoshinaga2000, + Author = {Yoshinaga, H. and Miyazima, S. and Mitake, S.}, + Journal = {Physica A}, + Pages = {582--586}, + Title = {Fluctuation of Biological Rhythm in Finger Tapping}, + Volume = {280}, + Year = {2000}} + +@article{Youn2002, + Author = {Youn, T. and Lyoo, I. K. and Kim, J. K. and Park, H. J. and Ha, K. S. and Lee, D. S. and Abrams, K. Y. and Lee, M. C. and Kwon, J. S.}, + Journal = {Biol Psychol}, + Month = {Sep}, + Pages = {109--120}, + Title = {{{R}elationship between personality trait and regional cerebral glucose metabolism assessed with positron emission tomography}}, + Volume = {60}, + Year = {2002}} + +@article{Young2007, + Author = {Young, J. W. and Minassian, A. and Paulus, M. P. and Geyer, M. A. and Perry, W.}, + Journal = {Neurosci Biobehav Rev}, + Pages = {882--896}, + Title = {{{A} reverse-translational approach to bipolar disorder: rodent and human studies in the {B}ehavioral {P}attern {M}onitor}}, + Volume = {31}, + Year = {2007}} + +@article{Young1978, + Abstract = {An eleven item clinician-administered Mania Rating Scale (MRS) is + introduced, and its reliability, validity and sensitivity are examined. + There was a high correlation between the scores of two independent + clinicians on both the total score (0.93) and the individual item + scores (0.66 to 0.92). The MRS score correlated highly with an independent + global rating, and with scores of two other mania rating scales administered + concurrently. The score also correlated with the number of days of + subsequent stay in hospital. It was able to differentiate statistically + patients before and after two weeks of treatment and to distinguish + levels of severity based on the global rating.}, + Author = {R. C. Young and J. T. Biggs and V. E. Ziegler and D. A. Meyer}, + Journal = {Br J Psychiatry}, + Keywords = {Bipolar Disorder, psychology; Humans; Length of Stay; Psychiatric Status Rating Scales; Psychometrics}, + Language = {eng}, + Medline-Pst = {ppublish}, + Month = {Nov}, + Owner = {Young}, + Pages = {429--435}, + Pmid = {728692}, + Timestamp = {2010.05.01}, + Title = {A rating scale for mania: reliability, validity and sensitivity.}, + Volume = {133}, + Year = {1978}} + +@article{Young1991, + Author = {Young, S. T. and Porrino, L. J. and Iadarola, M. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Feb}, + Pages = {1291--1295}, + Title = {{{C}ocaine induces striatal c-fos-immunoreactive proteins via dopaminergic {D}1 receptors}}, + Volume = {88}, + Year = {1991}} + +@article{Yu2009a, + Author = {Yu, A.J. and Cohen, J.D.}, + Journal = {Advances in Neural Information Processing Systems}, + Title = {{Sequential effects: Superstition or rational behavior}}, + Volume = {21}, + Year = {2009}} + +@article{Yu2005, + Author = {Yu, A. J. and Dayan, P.}, + Journal = {Neuron}, + Month = {May}, + Pages = {681--692}, + Title = {{{U}ncertainty, neuromodulation, and attention}}, + Volume = {46}, + Year = {2005}} + +@article{Yu2002, + Author = {Yu, A. J. and Dayan, P.}, + Journal = {Neural Netw}, + Pages = {719--730}, + Title = {{{A}cetylcholine in cortical inference}}, + Volume = {15}, + Year = {2002}} + +@article{Yu2009, + Author = {Yu, A. J. and Dayan, P. and Cohen, J. D.}, + Journal = {J Exp Psychol Hum Percept Perform}, + Month = {Jun}, + Pages = {700--717}, + Title = {{{D}ynamics of attentional selection under conflict: toward a rational {B}ayesian account}}, + Volume = {35}, + Year = {2009}} + +@article{Yu1999, + Author = {Yu, X. H. and Zhang, E. T. and Craig, A. D. and Shigemoto, R. and Ribeiro-da-Silva, A. and De Koninck, Y.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {3545--3555}, + Title = {{{N}{K}-1 receptor immunoreactivity in distinct morphological types of lamina {I} neurons of the primate spinal cord}}, + Volume = {19}, + Year = {1999}} + +@article{Yulmetyev2002, + Author = {Yulmetyev, R. M. and Emelyanova, N. and H\"{a}nggi, P. and Gafarov, F. and Prokhorov, A.}, + Journal = {Physica A}, + Pages = {671--687}, + Title = {Long--range Memory and Non--{M}arkov Statistical Effects in Human Sensorimotor Coordination}, + Volume = {316}, + Year = {2002}} + +@article{Zachariou2006, + Author = {Zachariou, V. and Bolanos, C. A. and Selley, D. E. and Theobald, D. and Cassidy, M. P. and Kelz, M. B. and Shaw-Lutchman, T. and Berton, O. and Sim-Selley, L. J. and Dileone, R. J. and Kumar, A. and Nestler, E. J.}, + Journal = {Nat. Neurosci.}, + Month = {Feb}, + Pages = {205--211}, + Title = {{{A}n essential role for {D}elta{F}os{B} in the nucleus accumbens in morphine action}}, + Volume = {9}, + Year = {2006}} + +@article{Zachariou2003, + Author = {Zachariou, V. and Georgescu, D. and Sanchez, N. and Rahman, Z. and DiLeone, R. and Berton, O. and Neve, R. L. and Sim-Selley, L. J. and Selley, D. E. and Gold, S. J. and Nestler, E. J.}, + Journal = {Proc. Natl. Acad. Sci. U.S.A.}, + Month = {Nov}, + Pages = {13656--13661}, + Title = {{{E}ssential role for {R}{G}{S}9 in opiate action}}, + Volume = {100}, + Year = {2003}} + +@article{Zaki2001, + Author = {Zaki, S. R. and Nosofsky, R. M.}, + Journal = {Journal of Experimental Psychology: Learning, Memory, and Cognition}, + Pages = {1022--1041}, + Title = {Exemplar Accounts of Blending and Distinctiveness Effects in Perceptual Old--New Recognition}, + Volume = {27}, + Year = {2001}} + +@book{Zeeman1977, + Address = {New York}, + Author = {Zeeman, E. C.}, + Publisher = {Addison--Wesley}, + Title = {Catastrophe Theory: Selected Papers (1972-1977)}, + Year = {1977}} + +@article{Zeeman1976, + Author = {Zeeman, E. C.}, + Journal = {Scientific American}, + Pages = {65--83}, + Title = {Catastrophe Theory}, + Volume = {234}, + Year = {1976}} + +@article{Zeigenfuse2009, + Author = {Zeigenfuse, M. D. and Lee, M. D.}, + Owner = {Woo-Young Ahn}, + Publisher = {In N. Taatgen, H. van Rijn, J. Nerbonne, \& L. Shonmaker (Eds.), Proceedings of the 31st Annual Conference of the Cognitivie Science Society. Austin, TX: Cognitive Science Society.}, + Timestamp = {2009.08.15}, + Title = {Bayesian nonparametric modeling of individual differences: A case studying using decision-making on bandit problems.}, + Year = {2009}} + +@article{Zelicof-Paul2005, + Author = {Zelicof-Paul, A. and Smith-Lockridge, A. and Schnadower, D. and Tyler, S. and Levin, S. and Roskind, C. and Dayan, P.}, + Journal = {Curr. Opin. Pediatr.}, + Month = {Jun}, + Pages = {355--362}, + Title = {{{C}ontroversies in rapid sequence intubation in children}}, + Volume = {17}, + Year = {2005}} + +@article{Zhang1997, + Author = {Zhang, E. T. and Craig, A. D.}, + Journal = {J. Neurosci.}, + Month = {May}, + Pages = {3274--3284}, + Title = {{{M}orphology and distribution of spinothalamic lamina {I} neurons in the monkey}}, + Volume = {17}, + Year = {1997}} + +@article{Zhang1996, + Author = {Zhang, E. T. and Han, Z. S. and Craig, A. D.}, + Journal = {J. Comp. Neurol.}, + Month = {Apr}, + Pages = {537--549}, + Title = {{{M}orphological classes of spinothalamic lamina {I} neurons in the cat}}, + Volume = {367}, + Year = {1996}} + +@article{Zhang1993, + Author = {Zhang, P.}, + Journal = {Annals of Statistics}, + Pages = {299--313}, + Title = {Model Selection via Multifold Cross--validation}, + Volume = {21}, + Year = {1993}} + +@article{Zhaoping2006, + Author = {Zhaoping, L. and Dayan, P.}, + Journal = {Neural Netw}, + Month = {Nov}, + Pages = {1437--1439}, + Title = {{{P}re-attentive visual selection}}, + Volume = {19}, + Year = {2006}} + +@article{Zhaoping2003, + Author = {Zhaoping, L. and Herzog, M. H. and Dayan, P.}, + Journal = {Network}, + Month = {May}, + Pages = {233--247}, + Title = {{{N}onlinear ideal observation and recurrent preprocessing in perceptual learning}}, + Volume = {14}, + Year = {2003}} + +@article{Zhou2008, + Author = {Zhou, Z. and Zhu, G. and Hariri, A. R. and Enoch, M. A. and Scott, D. and Sinha, R. and Virkkunen, M. and Mash, D. C. and Lipsky, R. H. and Hu, X. Z. and Hodgkinson, C. A. and Xu, K. and Buzas, B. and Yuan, Q. and Shen, P. H. and Ferrell, R. E. and Manuck, S. B. and Brown, S. M. and Hauger, R. L. and Stohler, C. S. and Zubieta, J. K. and Goldman, D.}, + Journal = {Nature}, + Month = {Apr}, + Pages = {997--1001}, + Title = {{{G}enetic variation in human {N}{P}{Y} expression affects stress response and emotion}}, + Volume = {452}, + Year = {2008}} + +@article{Zhu2006, + Author = {Zhu, F. and Qian, C.}, + Journal = {BMC neuroscience}, + Number = {1}, + Pages = {78}, + Publisher = {BioMed Central Ltd}, + Title = {{Berberine chloride can ameliorate the spatial memory impairment and increase the expression of interleukin-1 beta and inducible nitric oxide synthase in the rat model of Alzheimer's disease}}, + Volume = {7}, + Year = {2006}} + +@article{Zhu2004, + Author = {Zhu, W. and Volkow, N. D. and Ma, Y. and Fowler, J. S. and Wang, G. J.}, + Journal = {Alcohol Alcohol.}, + Pages = {53--58}, + Title = {{{R}elationship between ethanol-induced changes in brain regional metabolism and its motor, behavioural and cognitive effects}}, + Volume = {39}, + Year = {2004}} + +@article{Zijlstra2009, + Author = {Zijlstra, F. and Veltman, D. J. and Booij, J. and van den Brink, W. and Franken, I. H.}, + Journal = {Drug Alcohol Depend}, + Pages = {183--192}, + Title = {{{N}eurobiological substrates of cue-elicited craving and anhedonia in recently abstinent opioid-dependent males}}, + Volume = {99}, + Year = {2009}} + +@article{Zohary1994a, + Author = {Zohary, E. and Shadlen, M. N. and Newsome, W. T.}, + Journal = {Nature}, + Month = {Jul}, + Pages = {140--143}, + Title = {{{C}orrelated neuronal discharge rate and its implications for psychophysical performance}}, + Volume = {370}, + Year = {1994}} + +@article{Zorc2005, + Author = {Zorc, J. J. and Levine, D. A. and Platt, S. L. and Dayan, P. S. and Macias, C. G. and Krief, W. and Schor, J. and Bank, D. and Shaw, K. N. and Kuppermann, N.}, + Journal = {Pediatrics}, + Month = {Sep}, + Pages = {644--648}, + Title = {{{C}linical and demographic factors associated with urinary tract infection in young febrile infants}}, + Volume = {116}, + Year = {2005}} + +@book{Bollen1993, + Address = {Newbury Park, CA}, + Editor = {Bollen, K. A. and Long, J. S.}, + Publisher = {Sage Publications}, + Title = {Testing Structural Equation Models}, + Year = {1993}} + +@book{Doornik2001, + Address = {London}, + Editor = {Doornik, J. A.}, + Publisher = {Timberlake Consultants Press}, + Title = {{O}x: An Object--oriented Matrix Language}, + Year = {2001}} + +@book{Doukhan2003, + Address = {New York}, + Editor = {Doukhan, P. and Oppenheim, G. and Taqqu, M. S.}, + Publisher = {Springer Verlag}, + Title = {Theory and Applications of Long--range Dependence}, + Year = {2003}} + +@article{Gammerman1999, + Editor = {Gammerman, A. and Vovk, V.}, + Journal = {The Computer Journal}, + Title = {{K}olmogorov Complexity [Special issue]}, + Volume = {42(4)}, + Year = {1999}} + +@book{Gilks1996, + Address = {Boca Raton (FL)}, + Editor = {Gilks, W. R. and Richardson, S. and Spiegelhalter, D. J.}, + Publisher = {Chapman \& Hall/CRC}, + Title = {{M}arkov chain {M}onte {C}arlo in Practice}, + Year = {1996}} + +@book{Grunwald2005, + Address = {Cambridge, MA}, + Editor = {Gr\"{u}nwald, P. and Myung, I. J. and Pitt, M. A.}, + Publisher = {MIT Press}, + Title = {Advances in Minimum Description Length: Theory and Applications}, + Year = {2005}} + +@book{Handel1993, + Address = {New York}, + Editor = {Handel, P. H. and Chung, A. L.}, + Publisher = {AIP Press}, + Title = {Noise in Physical Systems and $1/f$ Fluctuations}, + Year = {1993}} + +@book{Rangarajan2003, + Address = {New York}, + Editor = {Rangarajan, G. and Ding, M.}, + Publisher = {Springer Verlag}, + Title = {Processes with Long--range Correlations: Theory and Applications}, + Year = {2003}} diff --git a/R/vignettes/bibtex/hBayesDM_bib_short.bib b/R/vignettes/bibtex/hBayesDM_bib_short.bib new file mode 100644 index 00000000..04a46748 --- /dev/null +++ b/R/vignettes/bibtex/hBayesDM_bib_short.bib @@ -0,0 +1,567 @@ +%% This BibTeX bibliography file was created using BibDesk. +%% http://bibdesk.sourceforge.net/ + + +%% Created for Woo-Young Ahn at 2019-02-21 18:15:41 +0900 + + +%% Saved with string encoding Unicode (UTF-8) + + + +@article{aylward2018, + Author = {Aylward, Jessica and Valton, Vincent and Ahn, Woo-Young and Bond, Rebecca L and Dayan, Peter and Roiser, Jonathan P and Robinson, Oliver J}, + Date-Added = {2019-02-21 09:15:00 +0000}, + Date-Modified = {2019-02-21 09:15:41 +0000}, + Journal = {PsyArXiv}, + Title = {Altered decision-making under uncertainty in unmedicated mood and anxiety disorders}, + Year = {2018}} + +@article{mathys2011bayesian, + Author = {Mathys, Christoph and Daunizeau, Jean and Friston, Karl J and Stephan, Klaas Enno}, + Date-Added = {2018-09-11 19:00:24 +0000}, + Date-Modified = {2018-09-11 19:00:24 +0000}, + Journal = {Frontiers in human neuroscience}, + Pages = {39}, + Publisher = {Frontiers}, + Title = {A Bayesian foundation for individual learning under uncertainty}, + Volume = {5}, + Year = {2011}} + +@article{bishara2010sequential, + Author = {Bishara, Anthony J and Kruschke, John K and Stout, Julie C and Bechara, Antoine and McCabe, David P and Busemeyer, Jerome R}, + Date-Added = {2018-09-11 18:59:00 +0000}, + Date-Modified = {2018-09-11 18:59:00 +0000}, + Journal = {Journal of mathematical psychology}, + Number = {1}, + Pages = {5--13}, + Publisher = {Elsevier}, + Title = {Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals}, + Volume = {54}, + Year = {2010}} + +@article{wunderlich2012, + Author = {Wunderlich, Klaus and Smittenaar, Peter and Dolan, Raymond J}, + Date-Added = {2018-09-11 18:57:44 +0000}, + Date-Modified = {2018-09-11 18:57:51 +0000}, + Journal = {Neuron}, + Number = {3}, + Pages = {418--424}, + Publisher = {Elsevier}, + Title = {Dopamine enhances model-based over model-free choice behavior}, + Volume = {75}, + Year = {2012}} + +@article{rutledge2014, + Author = {Rutledge, Robb B and Skandali, Nikolina and Dayan, Peter and Dolan, Raymond J}, + Date-Added = {2018-09-11 18:56:32 +0000}, + Date-Modified = {2018-09-11 18:56:39 +0000}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {33}, + Pages = {12252--12257}, + Publisher = {National Acad Sciences}, + Title = {A computational and neural model of momentary subjective well-being}, + Volume = {111}, + Year = {2014}} + +@article{levy2009neural, + Author = {Levy, Ifat and Snell, Jason and Nelson, Amy J and Rustichini, Aldo and Glimcher, Paul W}, + Date-Added = {2018-09-11 18:54:55 +0000}, + Date-Modified = {2018-09-11 18:54:55 +0000}, + Journal = {Journal of neurophysiology}, + Number = {2}, + Pages = {1036--1047}, + Publisher = {American Physiological Society Bethesda, MD}, + Title = {Neural representation of subjective value under risk and ambiguity}, + Volume = {103}, + Year = {2009}} + +@article{Frank2007, + Author = {Frank, Michael J. and Moustafa, Ahmed A. and Haughey, Heather M. and Curran, Tim and Hutchison, Kent E."}, + Date-Added = {2018-09-11 18:50:57 +0000}, + Date-Modified = {2018-09-11 19:07:45 +0000}, + Journal = {PNAS; Proceedings of the National Academy of Sciences}, + Number = {41}, + Pages = {16311--16316}, + Title = {Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning}, + Volume = {104}, + Year = {2007}} + +@article{Frank2004, + Author = {Frank, M. J. and Seeberger, L. C. and O'reilly, R. C.}, + Date-Added = {2018-09-11 14:14:43 +0000}, + Date-Modified = {2018-09-11 14:14:43 +0000}, + Journal = {Science}, + Month = {Dec}, + Pages = {1940--1943}, + Title = {{{B}y carrot or by stick: cognitive reinforcement learning in parkinsonism}}, + Volume = {306}, + Year = {2004}} + +@article{haines2018, + Author = {Haines, Nathaniel and Vassileva, J. and Ahn, Woo-Young}, + Date-Added = {2018-09-11 14:10:45 +0000}, + Date-Modified = {2018-09-11 14:12:34 +0000}, + Journal = {Cognitive Science}, + Title = {The Outcome-Representation Learning model: a novel reinforcement learning model of the Iowa Gambling Task}, + Year = {in press}} + +@article{Wallsten2005, + Author = {Wallsten, T. S. and Pleskac, T. J. and Lejuez, C. W.}, + Date-Added = {2018-09-11 14:05:03 +0000}, + Date-Modified = {2018-09-11 14:05:03 +0000}, + Journal = {Psychological Review}, + Owner = {Wooyoung Ahn}, + Pages = {862-880}, + Timestamp = {2007.04.30}, + Title = {Modeling behavior in a clinically diagnostic sequential risk-taking task}, + Volume = {112(4)}, + Year = {2005}} + +@article{annis2017bayesian, + Author = {Annis, Jeffrey and Miller, Brent J and Palmeri, Thomas J}, + Date-Added = {2017-12-27 00:43:24 +0000}, + Date-Modified = {2017-12-27 00:43:24 +0000}, + Journal = {Behavior research methods}, + Number = {3}, + Pages = {863--886}, + Publisher = {Springer}, + Title = {Bayesian inference with Stan: A tutorial on adding custom distributions}, + Volume = {49}, + Year = {2017}} + +@article{chung2015social, + Author = {Chung, Dongil and Christopoulos, George I and King-Casas, Brooks and Ball, Sheryl B and Chiu, Pearl H}, + Date-Added = {2017-12-26 09:38:01 +0000}, + Date-Modified = {2017-12-26 09:38:01 +0000}, + Journal = {Nature neuroscience}, + Number = {6}, + Pages = {912--916}, + Publisher = {Nature Research}, + Title = {Social signals of safety and risk confer utility and have asymmetric effects on observers' choices}, + Volume = {18}, + Year = {2015}} + +@article{shen2016high, + Author = {Shen, Bo and Yin, Yunlu and Wang, Jiashu and Zhou, Xiaolin and McClure, Samuel M and Li, Jian}, + Date-Added = {2017-06-02 03:31:29 +0000}, + Date-Modified = {2017-06-02 03:31:29 +0000}, + Journal = {NeuroImage}, + Pages = {343--352}, + Publisher = {Elsevier}, + Title = {High-definition tDCS alters impulsivity in a baseline-dependent manner}, + Volume = {143}, + Year = {2016}} + +@article{mkrtchian2017modeling, + Author = {Mkrtchian, Anahit and Aylward, Jessica and Dayan, Peter and Roiser, Jonathan P and Robinson, Oliver J}, + Date-Added = {2017-06-02 03:29:34 +0000}, + Date-Modified = {2017-06-02 03:29:34 +0000}, + Journal = {Biological Psychiatry}, + Publisher = {Elsevier}, + Title = {Modeling Avoidance in Mood and Anxiety Disorders Using Reinforcement Learning}, + Year = {2017}} + +@article{jollans2017computational, + Author = {Jollans, Lee and Whelan, Robert and Venables, Louise and Turnbull, Oliver H and Cella, Matteo and Dymond, Simon}, + Date-Added = {2017-06-02 03:27:58 +0000}, + Date-Modified = {2017-06-02 03:27:58 +0000}, + Journal = {Behavioural brain research}, + Pages = {28--35}, + Publisher = {Elsevier}, + Title = {Computational EEG modelling of decision making under ambiguity reveals spatio-temporal dynamics of outcome evaluation}, + Volume = {321}, + Year = {2017}} + +@article{seymour2012serotonin, + Author = {Seymour, Ben and Daw, Nathaniel D and Roiser, Jonathan P and Dayan, Peter and Dolan, Ray}, + Date-Added = {2017-05-23 21:16:23 +0000}, + Date-Modified = {2017-05-23 21:16:23 +0000}, + Journal = {Journal of Neuroscience}, + Number = {17}, + Pages = {5833--5842}, + Publisher = {Soc Neuroscience}, + Title = {Serotonin selectively modulates reward value in human decision-making}, + Volume = {32}, + Year = {2012}} + +@article{brown2008simplest, + Author = {Brown, Scott and Heathcote, Andrew}, + Date-Added = {2017-05-23 21:14:39 +0000}, + Date-Modified = {2017-12-27 00:54:09 +0000}, + Journal = {Cognitive psychology}, + Number = {3}, + Pages = {153--178}, + Publisher = {Elsevier}, + Title = {The simplest complete model of choice response time: Linear ballistic accumulation}, + Volume = {57}, + Year = {2008}} + +@article{ratcliff1978theory, + Author = {Ratcliff, Roger}, + Date-Added = {2017-05-23 21:13:52 +0000}, + Date-Modified = {2017-05-23 21:13:52 +0000}, + Journal = {Psychological review}, + Number = {2}, + Pages = {59}, + Publisher = {American Psychological Association}, + Title = {A theory of memory retrieval.}, + Volume = {85}, + Year = {1978}} + +@article{worthy2013, + Author = {Worthy, Darrell A and Pang, Bo and Byrne, Kaileigh A}, + Date-Added = {2016-12-16 15:49:55 +0000}, + Date-Modified = {2016-12-16 15:49:55 +0000}, + Journal = {Frontiers in psychology}, + Pages = {640}, + Publisher = {Frontiers}, + Title = {Decomposing the roles of perseveration and expected value representation in models of the Iowa gambling task}, + Volume = {4}, + Year = {2013}} + +@article{vehtari2015e, + Author = {Vehtari, Aki and Gelman, Andrew and Gabry, Jonah}, + Date-Added = {2016-12-16 15:49:48 +0000}, + Date-Modified = {2016-12-16 15:49:48 +0000}, + Journal = {arXiv preprint arXiv:1507.04544}, + Title = {Efficient implementation of leave-one-out cross-validation and {WAIC} for evaluating fitted {B}ayesian models}, + Year = {2015}} + +@article{Tom2007, + Author = {Tom, Sabrina M. and Fox, Craig R. and Trepel, Christopher and Poldrack, Russell A.}, + Date-Added = {2016-12-16 15:49:39 +0000}, + Date-Modified = {2016-12-16 15:49:39 +0000}, + Journal = {Science}, + Owner = {WooYoung Ahn}, + Pages = {515-518}, + Timestamp = {2007.12.12}, + Title = {The neural basis of loss aversion in decision-making under risk}, + Volume = {315}, + Year = {2007}} + +@article{steingroever2014absolute, + Author = {Steingroever, Helen and Wetzels, Ruud and Wagenmakers, Eric-Jan}, + Date-Added = {2016-12-16 15:49:32 +0000}, + Date-Modified = {2016-12-16 15:49:32 +0000}, + Journal = {Decision}, + Number = {3}, + Pages = {161}, + Publisher = {Educational Publishing Foundation}, + Title = {Absolute performance of reinforcement-learning models for the Iowa Gambling Task.}, + Volume = {1}, + Year = {2014}} + +@article{sokol2009, + Author = {Sokol-Hessner, Peter and Hsu, Ming and Curley, Nina G and Delgado, Mauricio R and Camerer, Colin F and Phelps, Elizabeth A}, + Date-Added = {2016-12-16 15:49:24 +0000}, + Date-Modified = {2016-12-16 15:49:24 +0000}, + Journal = {Proceedings of the National Academy of Sciences}, + Number = {13}, + Pages = {5035--5040}, + Publisher = {National Acad Sciences}, + Title = {Thinking like a trader selectively reduces individuals' loss aversion}, + Volume = {106}, + Year = {2009}} + +@article{Shiffrin2008, + Author = {Shiffrin, R.M. and Lee, Michael D. and Kim, W. and Wagenmakers, E.J.}, + Date-Added = {2016-12-16 15:49:14 +0000}, + Date-Modified = {2016-12-16 15:49:14 +0000}, + Journal = {Cognitive Science: A Multidisciplinary Journal}, + Number = {8}, + Pages = {1248--1284}, + Publisher = {Psychology Press}, + Title = {{A survey of model evaluation approaches with a tutorial on hierarchical Bayesian methods}}, + Volume = {32}, + Year = {2008}} + +@article{samuelson1937, + Author = {Samuelson, Paul A}, + Date-Added = {2016-12-16 15:49:09 +0000}, + Date-Modified = {2016-12-16 15:49:09 +0000}, + Journal = {The Review of Economic Studies}, + Number = {2}, + Pages = {155--161}, + Publisher = {JSTOR}, + Title = {A note on measurement of utility}, + Volume = {4}, + Year = {1937}} + +@article{o2007model, + Author = {{O'Doherty}, John P. and Hampton, Alan and Kim, Hackjin}, + Date-Added = {2016-12-16 15:49:02 +0000}, + Date-Modified = {2016-12-16 15:49:02 +0000}, + Journal = {Annals of the New York Academy of sciences}, + Number = {1}, + Pages = {35--53}, + Publisher = {Wiley Online Library}, + Title = {Model-based fMRI and its application to reward learning and decision making}, + Volume = {1104}, + Year = {2007}} + +@article{den2013dissociable, + Author = {den Ouden, Hanneke EM and Daw, Nathaniel D and Fernandez, Guill{\'e}n and Elshout, Joris A and Rijpkema, Mark and Hoogman, Martine and Franke, Barbara and Cools, Roshan}, + Date-Added = {2016-12-16 15:48:52 +0000}, + Date-Modified = {2016-12-16 15:48:52 +0000}, + Journal = {Neuron}, + Number = {4}, + Pages = {1090--1100}, + Publisher = {Elsevier}, + Title = {Dissociable effects of dopamine and serotonin on reversal learning}, + Volume = {80}, + Year = {2013}} + +@article{Newman1985, + Author = {Newman, Joseph P. and Widom, C. S. and Nathan, S.}, + Date-Added = {2016-12-16 15:48:41 +0000}, + Date-Modified = {2016-12-16 15:48:41 +0000}, + Journal = {Journal of Personality and Individual Differences}, + Owner = {Wooyoung Ahn}, + Pages = {1316-1327}, + Timestamp = {2007.04.30}, + Title = {Passive avoidance in syndromes of disinhibition, psychopathy, and extraversion}, + Volume = {48}, + Year = {1985}} + +@article{newman1986passive, + Author = {Newman, Joseph P. and Kosson, David S.}, + Date-Added = {2016-12-16 15:48:31 +0000}, + Date-Modified = {2016-12-16 15:48:31 +0000}, + Journal = {Journal of abnormal psychology}, + Number = {3}, + Pages = {252}, + Publisher = {American Psychological Association}, + Title = {Passive avoidance learning in psychopathic and nonpsychopathic offenders.}, + Volume = {95}, + Year = {1986}} + +@book{Mazur1987, + Author = {Mazur, J. E.}, + Date-Added = {2016-12-16 15:48:23 +0000}, + Date-Modified = {2017-06-02 03:42:46 +0000}, + Publisher = {Lawrence Erlbaum Associates, Inc}, + Title = {An adjusting procedure for studying delayed reinforcement.}, + Year = {1987}} + +@article{Lee2011hba, + Author = {Lee, Michael D.}, + Date-Added = {2016-12-16 15:48:12 +0000}, + Date-Modified = {2016-12-16 15:48:12 +0000}, + Journal = {Journal of Mathematical Psychology}, + Number = {1}, + Pages = {1--7}, + Publisher = {Elsevier}, + Title = {How cognitive modeling can benefit from hierarchical Bayesian models}, + Volume = {55}, + Year = {2011}} + +@book{lee2014bayesian, + Author = {Lee, Michael D. and Wagenmakers, Eric-Jan}, + Date-Added = {2016-12-16 15:48:12 +0000}, + Date-Modified = {2016-12-16 15:48:12 +0000}, + Publisher = {Cambridge University Press}, + Title = {Bayesian cognitive modeling: A practical course}, + Year = {2014}} + +@book{kruschke2014doing, + Author = {Kruschke, John}, + Date-Added = {2016-12-16 15:47:57 +0000}, + Date-Modified = {2016-12-16 15:47:57 +0000}, + Publisher = {Academic Press}, + Title = {Doing Bayesian data analysis: A tutorial with {R}, {JAGS}, and {S}tan}, + Year = {2014}} + +@article{Jessup2008, + Author = {Jessup, R.K. and Bishara, A.J. and Busemeyer, J.R.}, + Date-Added = {2016-12-16 15:47:48 +0000}, + Date-Modified = {2016-12-16 15:47:48 +0000}, + Journal = {Psychological Science}, + Number = {10}, + Pages = {1015--1022}, + Publisher = {Blackwell Publishing Inc}, + Title = {{Feedback Produces Divergence From Prospect Theory in Descriptive Choice}}, + Volume = {19}, + Year = {2008}} + +@article{huys2011disentangling, + Author = {Huys, Quentin J. M. and Cools, Roshan and G{\"o}lzer, Martin and Friedel, Eva and Heinz, Andreas and Dolan, Raymond J and Dayan, Peter}, + Date-Added = {2016-12-16 15:47:35 +0000}, + Date-Modified = {2016-12-16 15:47:35 +0000}, + Journal = {PLoS computational biology}, + Number = {4}, + Pages = {e1002028}, + Publisher = {Public Library of Science}, + Title = {Disentangling the roles of approach, activation and valence in instrumental and pavlovian responding}, + Volume = {7}, + Year = {2011}} + +@article{Hertwig2004, + Author = {Hertwig, Ralph and Barren, G. and Weber, E. U. and Erev, Ido}, + Date-Added = {2016-12-16 15:47:20 +0000}, + Date-Modified = {2016-12-16 15:47:20 +0000}, + Journal = {Psychological Science}, + Owner = {WooYoung Ahn}, + Pages = {534-539}, + Timestamp = {2007.07.18}, + Title = {Decisions from experience and the effect of rare events in risky choice}, + Volume = {15}, + Year = {2004}} + +@article{guitart2012go, + Author = {Guitart-Masip, Marc and Huys, Quentin J. M. and Fuentemilla, Lluis and Dayan, Peter and Duzel, Emrah and Dolan, Raymond J}, + Date-Added = {2016-12-16 15:47:10 +0000}, + Date-Modified = {2016-12-16 15:47:10 +0000}, + Journal = {Neuroimage}, + Number = {1}, + Pages = {154--166}, + Publisher = {Elsevier}, + Title = {Go and no-go learning in reward and punishment: interactions between affect and effect}, + Volume = {62}, + Year = {2012}} + +@article{gu2015necessary, + Author = {Gu, Xiaosi and Wang, Xingchao and Hula, Andreas and Wang, Shiwei and Xu, Shuai and Lohrenz, Terry M and Knight, Robert T and Gao, Zhixian and Dayan, Peter and Montague, P Read}, + Date-Added = {2016-12-16 15:47:01 +0000}, + Date-Modified = {2016-12-16 15:47:01 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {2}, + Pages = {467--473}, + Publisher = {Soc Neuroscience}, + Title = {Necessary, yet dissociable contributions of the insular and ventromedial prefrontal cortices to norm adaptation: computational and lesion evidence in humans}, + Volume = {35}, + Year = {2015}} + +@article{glascher2009, + Author = {Gl{\"a}scher, Jan and Hampton, Alan N and O'Doherty, John P}, + Date-Added = {2016-12-16 15:46:54 +0000}, + Date-Modified = {2016-12-16 15:46:54 +0000}, + Journal = {Cerebral cortex}, + Number = {2}, + Pages = {483--495}, + Publisher = {Oxford Univ Press}, + Title = {Determining a role for ventromedial prefrontal cortex in encoding action-based value signals during reward-related decision making}, + Volume = {19}, + Year = {2009}} + +@article{erev2010choice, + Author = {Erev, Ido and Ert, Eyal and Roth, Alvin E and Haruvy, Ernan and Herzog, Stefan M and Hau, Robin and Hertwig, Ralph and Stewart, Terrence and West, Robert and Lebiere, Christian}, + Date-Added = {2016-12-16 15:46:43 +0000}, + Date-Modified = {2016-12-16 15:46:43 +0000}, + Journal = {Journal of Behavioral Decision Making}, + Number = {1}, + Pages = {15--47}, + Publisher = {Wiley Online Library}, + Title = {A choice prediction competition: Choices from experience and from description}, + Volume = {23}, + Year = {2010}} + +@article{ebert2007, + Author = {Ebert, Jane EJ and Prelec, Drazen}, + Date-Added = {2016-12-16 15:46:26 +0000}, + Date-Modified = {2016-12-16 15:46:26 +0000}, + Journal = {Management science}, + Number = {9}, + Pages = {1423--1438}, + Publisher = {INFORMS}, + Title = {The fragility of time: Time-insensitivity and valuation of the near and far future}, + Volume = {53}, + Year = {2007}} + +@article{Daw2006, + Author = {Daw, N. D. and O'Doherty, J. P. and Dayan, P. and Seymour, B. and Dolan, R. J.}, + Date-Added = {2016-12-16 15:46:19 +0000}, + Date-Modified = {2016-12-16 15:46:19 +0000}, + Journal = {Nature}, + Pages = {876--879}, + Title = {Cortical Substrates for Exploratory Decisions in Humans}, + Volume = {441}, + Year = {2006}} + +@article{daw2011, + Author = {Daw, Nathaniel D and Gershman, Samuel J and Seymour, Ben and Dayan, Peter and Dolan, Raymond J}, + Date-Added = {2016-12-16 15:46:09 +0000}, + Date-Modified = {2016-12-16 15:46:09 +0000}, + Journal = {Neuron}, + Number = {6}, + Pages = {1204--1215}, + Publisher = {Elsevier}, + Title = {Model-based influences on humans' choices and striatal prediction errors}, + Volume = {69}, + Year = {2011}} + +@article{daw2011trial, + Author = {Daw, Nathaniel D}, + Date-Added = {2016-12-16 15:45:58 +0000}, + Date-Modified = {2016-12-16 15:45:58 +0000}, + Journal = {Decision making, affect, and learning: Attention and performance XXIII}, + Pages = {3--38}, + Publisher = {Oxford University Press Oxford}, + Title = {Trial-by-trial data analysis using computational models}, + Volume = {23}, + Year = {2011}} + +@article{cavanagh2013jn, + Author = {Cavanagh, James F and Eisenberg, Ian and Guitart-Masip, Marc and Huys, Quentin J. M. and Frank, Michael J}, + Date-Added = {2016-12-16 15:45:48 +0000}, + Date-Modified = {2016-12-16 15:45:48 +0000}, + Journal = {The Journal of Neuroscience}, + Number = {19}, + Pages = {8541--8548}, + Publisher = {Soc Neuroscience}, + Title = {Frontal theta overrides pavlovian learning biases}, + Volume = {33}, + Year = {2013}} + +@article{ahn2011model, + Author = {Ahn, Woo-Young and Krawitz, A. and Kim, W. and Busemeyer, Jerome R. and Brown, J. W.}, + Date-Added = {2016-12-16 15:45:38 +0000}, + Date-Modified = {2017-12-27 00:52:44 +0000}, + Journal = {Journal of Neuroscience, Psychology, and Economics}, + Number = {2}, + Pages = {95}, + Publisher = {Educational Publishing Foundation}, + Title = {A model-based fMRI analysis with hierarchical Bayesian parameter estimation.}, + Volume = {4}, + Year = {2011}} + +@article{ahn2014decision, + Author = {Ahn, Woo-Young and Vasilev, Georgi and Lee, Sung-Ha and Busemeyer, Jerome R. and Kruschke, John K and Bechara, Antoine and Vassileva, Jasmin}, + Date-Added = {2016-12-16 15:45:38 +0000}, + Date-Modified = {2016-12-16 15:45:38 +0000}, + Journal = {Frontiers in psychology}, + Pages = {849}, + Publisher = {Citeseer}, + Title = {Decision-making in stimulant and opiate addicts in protracted abstinence: Evidence from computational modeling with pure users}, + Volume = {5}, + Year = {2014}} + +@article{ahn2008cogsci, + Author = {Ahn, Woo-Young and Busemeyer, Jerome R. and Wagenmakers, Eric-Jan and Stout, Julie C}, + Date-Added = {2016-12-16 15:45:38 +0000}, + Date-Modified = {2016-12-16 15:45:38 +0000}, + Journal = {Cognitive Science}, + Number = {8}, + Pages = {1376--1402}, + Publisher = {Wiley Online Library}, + Title = {Comparison of decision learning models using the generalization criterion method}, + Volume = {32}, + Year = {2008}} + +@book{Busemeyer2010, + Author = {Busemeyer, Jerome R. and Diederich, A.}, + Date-Added = {2016-12-16 15:45:29 +0000}, + Date-Modified = {2016-12-16 15:45:29 +0000}, + Publisher = {Sage Publications, Inc}, + Title = {{Cognitive modeling}}, + Year = {2010}} + +@article{Busemeyer2000a, + Author = {Busemeyer, Jerome R. and Wang, Y.-M.}, + Date-Added = {2016-12-16 15:45:29 +0000}, + Date-Modified = {2016-12-16 15:45:29 +0000}, + Journal = {Journal of Mathematical Psychology}, + Pages = {171--189}, + Title = {Model Comparisons and Model Selections Based on Generalization Criterion Methodology}, + Volume = {44}, + Year = {2000}} diff --git a/R/vignettes/csl/apa-short-authors.csl b/R/vignettes/csl/apa-short-authors.csl new file mode 100644 index 00000000..1411b9c6 --- /dev/null +++ b/R/vignettes/csl/apa-short-authors.csl @@ -0,0 +1,623 @@ + + \ No newline at end of file diff --git a/R/vignettes/csl/apa_modified.csl b/R/vignettes/csl/apa_modified.csl new file mode 100644 index 00000000..6676f247 --- /dev/null +++ b/R/vignettes/csl/apa_modified.csl @@ -0,0 +1,495 @@ + + diff --git a/R/vignettes/csl/apa_modified_orig.csl b/R/vignettes/csl/apa_modified_orig.csl new file mode 100644 index 00000000..6676f247 --- /dev/null +++ b/R/vignettes/csl/apa_modified_orig.csl @@ -0,0 +1,495 @@ + + diff --git a/R/vignettes/csl/biomed-central.csl b/R/vignettes/csl/biomed-central.csl new file mode 100644 index 00000000..f8caff62 --- /dev/null +++ b/R/vignettes/csl/biomed-central.csl @@ -0,0 +1,1314 @@ + + + + + + + + + + + + + + + + + + + + + + + + + styles/biomed-central.csl at master · citation-style-language/styles · GitHub + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Skip to content + + + + + + + + + + + +
    + +
    +
    + + +
    +
    +
    + + + +
    +
    + + + + + + + +
    + +
    + + + +
    + +
    + + Find file + + +
    + +
    + + +
    + + + 789d1b6 + + + + + +
    + + @rmzelle + @adam3smith + @cparnot + + +
    + + +
    + +
    +
    +
    + +
    + Raw + Blame + History +
    + + + + + + + +
    + +
    + 187 lines (186 sloc) + + 6.89 KB +
    +
    + + + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    <?xml version="1.0" encoding="utf-8"?>
    <style xmlns="http://purl.org/net/xbiblio/csl" class="in-text" version="1.0" demote-non-dropping-particle="sort-only" default-locale="en-US">
    <info>
    <title>BioMed Central</title>
    <id>http://www.zotero.org/styles/biomed-central</id>
    <link href="http://www.zotero.org/styles/biomed-central" rel="self"/>
    <link href="http://www.biomedcentral.com/bmcbioinformatics/authors/instructions/researcharticle#formatting-references" rel="documentation"/>
    <!-- This documentation link contains incorrect information about the number of authors for supplemental info; it was confirmed directly by BMC that the correct max number of authors to list is 30, see also https://github.com/citation-style-language/styles/issues/190
    <link href="http://www.biomedcentral.com/authors/instprepdoc#refs" rel="documentation"/>
    -->
    <author>
    <name>Robert M Flight</name>
    <email>rflight79@gmail.com</email>
    </author>
    <category citation-format="numeric"/>
    <category field="medicine"/>
    <category field="biology"/>
    <updated>2013-01-12T23:55:23+00:00</updated>
    <rights license="http://creativecommons.org/licenses/by-sa/3.0/">This work is licensed under a Creative Commons Attribution-ShareAlike 3.0 License</rights>
    </info>
    <locale xml:lang="en">
    <terms>
    <term name="collection-editor" form="long">
    <single>Series editor</single>
    <multiple>Series editors</multiple>
    </term>
    </terms>
    </locale>
    <macro name="author">
    <names variable="author" suffix=": ">
    <name sort-separator=" " initialize-with="" name-as-sort-order="all" delimiter=", " delimiter-precedes-last="always"/>
    <label form="short" prefix=" (" suffix=")" text-case="capitalize-first" strip-periods="true"/>
    <substitute>
    <names variable="editor"/>
    </substitute>
    </names>
    </macro>
    <macro name="editor">
    <names variable="editor" suffix=".">
    <label form="verb" suffix=" " text-case="capitalize-first"/>
    <name sort-separator=" " initialize-with="" name-as-sort-order="all" delimiter=", " delimiter-precedes-last="always"/>
    </names>
    </macro>
    <macro name="publisher">
    <group delimiter=": ">
    <choose>
    <if type="thesis" match="none">
    <text variable="publisher-place"/>
    </if>
    </choose>
    <text variable="publisher"/>
    </group>
    </macro>
    <macro name="container-title">
    <choose>
    <if type="article-journal" match="any">
    <text variable="container-title" font-style="italic" form="short" strip-periods="true"/>
    </if>
    <else>
    <text variable="container-title" font-style="italic"/>
    </else>
    </choose>
    </macro>
    <macro name="title">
    <choose>
    <if type="bill book graphic legal_case legislation motion_picture report song" match="any">
    <text variable="title" font-style="italic" text-case="title"/>
    </if>
    <else>
    <text variable="title" font-weight="bold"/>
    </else>
    </choose>
    </macro>
    <macro name="volume">
    <choose>
    <if type="article-journal article-magazine" match="any">
    <text variable="volume" font-weight="bold"/>
    </if>
    </choose>
    </macro>
    <macro name="volume-book">
    <choose>
    <if type="article-journal article-magazine" match="none">
    <group delimiter=" " font-style="italic">
    <text term="volume" text-case="capitalize-first"/>
    <text variable="volume"/>
    </group>
    </if>
    </choose>
    </macro>
    <macro name="edition">
    <choose>
    <if is-numeric="edition">
    <group delimiter=" ">
    <number variable="edition" form="ordinal"/>
    <text term="edition" form="long" suffix="."/>
    </group>
    </if>
    <else>
    <text variable="edition" suffix="."/>
    </else>
    </choose>
    </macro>
    <citation collapse="citation-number">
    <sort>
    <key variable="citation-number"/>
    </sort>
    <layout prefix="[" suffix="]" delimiter=", ">
    <text variable="citation-number"/>
    </layout>
    </citation>
    <bibliography et-al-min="31" et-al-use-first="30">
    <layout>
    <text variable="citation-number" suffix=". "/>
    <choose>
    <if type="webpage">
    <!-- The webpages will be the bane of my existence -->
    <text macro="title"/>
    <text variable="URL" prefix=" [" suffix="]"/>
    </if>
    <else>
    <!-- Now for everything else -->
    <group suffix=".">
    <text macro="author"/>
    <text macro="title" suffix=". "/>
    <choose>
    <if type="bill book graphic legal_case legislation motion_picture report song thesis" match="any">
    <group delimiter=". " prefix=" " suffix="; ">
    <text macro="edition"/>
    <text macro="volume-book"/>
    <text variable="genre" font-style="italic"/>
    <text macro="publisher"/>
    </group>
    </if>
    <else-if type="chapter paper-conference" match="any">
    <group delimiter=". " suffix="; ">
    <group delimiter=" ">
    <text term="in" text-case="capitalize-first"/>
    <text macro="container-title"/>
    </group>
    <text macro="volume-book"/>
    <text macro="edition"/>
    <text macro="editor"/>
    <text macro="publisher"/>
    </group>
    </else-if>
    <else>
    <text macro="container-title" suffix=" "/>
    </else>
    </choose>
    <date variable="issued">
    <date-part name="year"/>
    </date>
    <text macro="volume" prefix=", "/>
    <!-- This will hopefully deal with supplements at least reasonably well -->
    <choose>
    <if is-numeric="issue">
    </if>
    <else>
    <text variable="issue" prefix="(" suffix=")"/>
    </else>
    </choose>
    <text variable="page" prefix=":"/>
    </group>
    <group prefix=" [" suffix="]" delimiter=": ">
    <names variable="collection-editor">
    <name sort-separator=" " initialize-with="" name-as-sort-order="all" delimiter=", " delimiter-precedes-last="always"/>
    <label prefix=" (" suffix=")" form="long"/>
    </names>
    <group delimiter=", ">
    <text variable="collection-title" font-style="italic" text-case="title"/>
    <choose>
    <if is-numeric="collection-number">
    <text variable="collection-number" prefix="vol. "/>
    </if>
    <else>
    <text variable="collection-number"/>
    </else>
    </choose>
    </group>
    </group>
    </else>
    </choose>
    </layout>
    </bibliography>
    </style>
    + +
    + +
    + + + + +
    + +
    + + +
    +
    + +
    + + + + + + + + + +
    + + + Something went wrong with that request. Please try again. +
    + + + + + + + + + + + + + + + diff --git a/R/vignettes/css/APAStyle.css b/R/vignettes/css/APAStyle.css new file mode 100644 index 00000000..f4139d50 --- /dev/null +++ b/R/vignettes/css/APAStyle.css @@ -0,0 +1,122 @@ +@import "http://fonts.googleapis.com/css?family=Anonymous+Pro:400,400italic,700,700italic&subset=latin,latin-ext"; +@import url(http://fonts.googleapis.com/css?family=Inconsolata:400,700); +@import url(http://fonts.googleapis.com/css?family=Source+Code+Pro:400,700); + + +body { + margin:0 auto; + font-family: "Times New Roman", Times, serif; + font-size:1em; + text-align:left; + max-width:6.5in +} + +table { + border-collapse: collapse; + border-bottom:1px solid #000; + margin:auto; +} + +th, td{padding:5px;} + + + +p { + text-align:left; + margin-bottom:2ex; + text-indent: 2em; + line-height: 1.25em; +} + +.references p {padding-left: 2.5em ; + text-indent: -2.5em;} + +.level1 h1 {font-size:1em;} + +.level2 h2 {font-size:1em;} + +.level3 h3 {font-size:1em;} + +.level4 + h4 {font-size:1em;} + +h1 { + text-align:center; +} + +h2, h3, h4, h5 { +text-align: left; +} + +h3, h4, h5 { +margin-left: 2em; +} + +.author , .date, .author_afil h4 {text-align:left; + margin-left:0; + font-weight:normal; + font-style:normal; + padding:0px; + margin:0px; + } + +#header h4 em { + font-weight:normal; + font-style:normal; + } + + + +blockquote p {text-indent:0em; + margin-left:2.5em;} + +blockquote p + p { +text-indent: 2.5em; +} + +pre, img { +max-width: 100%; +} + +pre { +overflow-x: auto; +} + +code { + font-family:"Anonymous Pro",monospace; + text-align:left; + padding:5px; + font-size: 92%; + border: 1px solid #ccc; +} + +pre code { +font-size: 0.9em; +font-family: 'Source Code Pro' !important; +text-align: left; +display: block; padding: 0.5em; +} + +code.r { +font-family: 'Inconsolata' !important; +font-size: 1.2em; +background-color: rgba(160, 160, 160, 0.2); +} +sup,sub { + position: relative; + vertical-align: 0; +} + +sup { + bottom: .4em +} + +sub { + top:.4em +} + +.header th { + border-bottom:1px solid #000; + border-top: 1px solid #000; + font-weight: normal; +} \ No newline at end of file diff --git a/R/vignettes/css/Rpubs.css b/R/vignettes/css/Rpubs.css new file mode 100644 index 00000000..15bcd941 --- /dev/null +++ b/R/vignettes/css/Rpubs.css @@ -0,0 +1,11 @@ +body, td { + font-size: 16px; +} + +code.r { + font-size: 14px; +} + +pre { + font-size: 14px +} diff --git a/R/vignettes/getting_started.Rmd b/R/vignettes/getting_started.Rmd new file mode 100644 index 00000000..d7dd2604 --- /dev/null +++ b/R/vignettes/getting_started.Rmd @@ -0,0 +1,488 @@ +--- +title: "Getting Started" +csl: csl/apa-short-authors.csl +editor_options: + chunk_output_type: console +output: + html_document: + fig_height: 3 + fig_width: 8 + keep_md: yes + toc: yes + pdf_document: + latex_engine: xelatex + number_sections: yes +header-includes: \setlength\parindent{24pt}\setlength{\parskip}{0.0pt plus 1.0pt} +fig_caption: yes +pkgdown: + as_is: yes +bibliography: bibtex/hBayesDM_bib_short.bib +--- + +hBayesDM (**_h_**ierarchical **_Bayes_**ian modeling of **_D_**ecision-**_M_**aking tasks) is a user-friendly R package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. Click [**here**](https://cran.r-project.org/web/packages/hBayesDM/hBayesDM.pdf) to download its help file (reference manual). Click [**here**](https://www.mitpressjournals.org/doi/full/10.1162/CPSY_a_00002) to read our paper published in Computational Psychiatry. Click [**here**](https://u.osu.edu/ccsl/files/2016/12/hBayesDM_SRP_v1_revised-1qxbg1x.pdf) to download a poster we presented at several conferences/meetings. You can find hBayesDM on [CRAN](https://cran.r-project.org/web/packages/hBayesDM/) and [GitHub](https://github.com/CCS-Lab/hBayesDM). + +## Motivation + +Computational modeling provides a quantitative framework for investigating latent neurocognitive processes (e.g., learning rate, reward sensitivity) and interactions among multiple decision-making systems. Parameters of a computational model reflect psychologically meaningful individual differences: thus, getting accurate parameter estimates of a computational model is critical to improving the interpretation of its findings. Hierarchical Bayesian analysis (HBA) is regarded as the gold standard for parameter estimation, especially when the amount of information from each participant is small (see below "Why hierarchical Bayesian analysis?"). However, many researchers interested in HBA often find the approach too technical and challenging to be implemented. + +We introduce a free R package **hBayesDM**, which offers HBA of various computational models on an array of decision-making tasks (see below for a list of tasks and models currently available). _**Users can perform HBA of various computational models with a single line of coding**_. Example datasets are also available. With hBayesDM, we hope anyone with minimal knowledge of programming can take advantage of advanced computational modeling and HBA. It is our expectation that hBayesDM will contribute to the dissemination of these computational tools and enable researchers in related fields to easily characterize latent neurocognitive processes within their study populations. + +## Why hierarchical Bayesian analysis (HBA)? + +![](images/HBA_concept.png) + +Most computational models do not have closed form solutions and we need to estimate parameter values. Traditionally parameters are estimated at the individual level with maximum likelihood estimation (MLE): getting point estimates for each individual separately. However, individual MLE estimates are often noisy especially when there is insufficient amount of data. A group-level analysis (e.g., group-level MLE), which estimate a single set of parameters for the whole group of individuals, may generate more reliable estimates but inevitably ignores individual differences. + +HBA and other hierarchical approaches [e.g., @huys2011disentangling] allow for individual differences while pooling information across individuals. Both individual and group parameter estimates (i.e., posterior distributions) are estimated simultaneously in a mutually constraining fashion. Consequently, individual parameter estimates tend to be more stable and reliable because commonalities among individuals are captured and informed by the group tendencies [e.g., @ahn2011model]. HBA also finds full posterior distributions instead of point estimates (thus providing rich information about the parameters). HBA also makes it easy to do group comparisons in a Bayesian fashion (e.g., comparing clinical and non-clinical groups, see an example below). + +HBA is a branch of Bayesian statistics and the conceptual framework of Bayesian data analysis is clearly written in [Chapter 2](https://sites.google.com/site/doingbayesiandataanalysis/sample-chapter/DoingBayesianDataAnalysisChapter2.pdf) of John Kruschke's book [@kruschke2014doing]. In Bayesian statistics, we assume prior beliefs (i.e., prior distributions) for model parameters and update the priors into posterior distributions given the data (e.g., trial-by-trial choices and outcomes) using [Bayes' rule](https://en.wikipedia.org/wiki/Bayes%27_rule). Note that the prior distributions we use for model parameters are vague (e.g., flat) or weakly informative priors, so they play a minimal role in the posterior distribution. + +For Bayesian updating, we use the Stan software package (), which implements a very efficient Markov Chain Monte Carlo (MCMC) algorithm called Hamiltonian Monte Carlo (HMC). HMC is known to be effective and works well even for large complex models. See Stan reference manual () and Chapter 14 of @kruschke2014doing for a comprehensive description of HMC and Stan. What is MCMC and why shoud we use it? Remember, we need to update our priors into posterior distributions in order to make inference about model parameters. Simply put, MCMC is a way of approximating a posterior distribution by drawing a large number of samples from it. MCMC algorithms are used when posterior distributions cannot be analytically achieved or using MCMC is more efficient than searching for the whole grid of parameter space (i.e., grid search). To learn more about the basic foundations of MCMC, we recommend Chapter 7 of @kruschke2014doing. + + +Detailed specification of Bayesian models is not available in text yet (stay tuned for our tutorial paper whose citation is listed below). At the same time, users can go over our Stan codes to check how we implement each computational model (e.g., `pathTo_gng_m1 = system.file("stan/gng_m1.stan", package="hBayesDM")` ). We made strong efforts to optimize Stan codes through reparameterization (e.g., Matt trick) and vectorization. + + +## Prerequisites +* R version 3.4.0 or later is required. R is freely available from . +* **Latest Stan (RStan 2.18.1 or later)**. Detailed instructions for installing RStan is available in this link: . +* RStudio () is not required but strongly recommended. + +**Note**: Additional R packages (e.g., [ggplot2](https://cran.r-project.org/web/packages/ggplot2/), [loo](https://cran.r-project.org/web/packages/loo/)) will be installed (if not installed yet) during the installation of hBayesDM. + + +## Tasks & models implemented in hBayesDM + +There are +Table: As of hBayesDM v1.0.0 (`r format(Sys.time(), '%B %d, %Y')`) + + +## How to install hBayesDM + +There are three ways to install hBayesDM as described below. _Make sure to install [RStan](http://mc-stan.org/interfaces/rstan) prior to install hBayesDM. And restart R/RStudio after the installation of hBayesDM._ Typically RStan can be installed just by typing `install.packages("rstan", dependencies = TRUE)`. **For Windows, you need to install Rtools first to install RStan and install the hBayesDM from CRAN**. For detailed instructions for the installation of rstan, please go to this link: https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started. If you are a Mac user, [make sure Xcode is installed](https://github.com/stan-dev/rstan/wiki/RStan-Mac-OS-X-Prerequisite-Installation-Instructions#step2_3). + +How can you tell if RStan is correctly installed? Check if you can fit the 'Eight Schools' model without a problem. Check [here](http://mc-stan.org/interfaces/rstan.html) or here if you experience difficulty installing RStan. + +### Method A (recommended for all users - Windows/Mac/Linux) + +Use the following call: +```{r eval=FALSE} +install.packages("hBayesDM", dependencies=TRUE) +``` + +### Method B + +Install the package from GitHub: +```{r eval=FALSE} +## install 'devtools' if required +if (!require(devtools)) install.packages("devtools") +devtools::install_github("CCS-Lab/hBayesDM") +``` + +### Method C + +1. Download a copy from [**here**](https://cran.r-project.org/src/contrib/hBayesDM_1.0.0.tar.gz) to a directory (e.g., "~/Downloads"). +2. Open R(Studio) and set working directory to the downloaded folder. (e.g., `setwd("~/Downloads")` ) +3. Install the package from the downloaded file. + +```{r eval=FALSE} +install.packages(pkgs="hBayesDM_1.0.0.tar.gz", dependencies=TRUE, repos=NULL) +``` + +### Precompiling Stan models during installation +If you follow the direction described below, Stan models will be precompiled during installation and models will run immediately when called. This is recommended if you are a frequent hBayesDM user! + +```{r eval=FALSE} +Sys.setenv(BUILD_ALL='true') # Build all the models on installation +Sys.setenv(MAKEFLAGS='-j 4') # Use 4 cores for compilation (or the number you want) + +install.packages("hBayesDM") # Install from CRAN +## or +devtools::install_github("CCS-Lab/hBayesDM") # Install from GitHub +``` + +**We highly recommend you use multiple cores for compiling, since it will take quite a long time to complete.** + + +## How to use hBayesDM + +First, open RStudio (or just R) and load the package: + +```{r results='hide', message=FALSE, warning=FALSE} +library(hBayesDM) +``` + +Four steps of doing HBA with hBayesDM are illustrated below. As an example, four models of the orthogonalized Go/Nogo task (Guitart-Masip et al., 2012; Cavanagh et al., 2013) are fit and compared with the hBayesDM package. + + +![](images/hBayesDM_pipeLine.png) + + +### 1) Prepare the data + +* For fitting a model with hBayesDM, all subjects' data should be combined into a single text (*.txt) file. Look at the sample dataset and a help file (e.g., `?gng_m1`) for each task and carefully follow the instructions. +* Subjects’ data must contain variables that are consistent with the column names specified in the help file, though extra variables are in practice allowed. +* It is okay if the number of trials is different across subjects. But there should exist no N/A data. If some trials contain N/A data (e.g., `choice=NA` in trial#10), remove the trials first. +* Sample data are available [**here**](https://u.osu.edu/ccsl/files/2016/03/sampleData_hBayesDM_0.2.0-1d9qdvj.zip), although users can fit a model with sample data without separately downloading them with one of the function arguments. Once the hBayesDM package is installed, sample data can be also retrieved from the package folder. Note that the file name of sample (example) data for a given task is **taskName_exampleData.txt** (e.g., dd_exampleData.txt, igt_exampleData.txt, gng_exampleData.txt, etc.). See each model's help file (e.g., `?gng_m1`) to check required data columns and their labels. +```{r eval=FALSE} +dataPath = system.file("extdata/gng_exampleData.txt", package="hBayesDM") +``` + +If you download the sample data to "~/Downloads", you may specify the path to the data file like this: +```{r eval=FALSE} +dataPath = "~/Downloads/gng_exampleData.txt" +``` + + +### 2) Fit candidate models + +Below the `gng_m1` model is fit with its sample data. The command indicates that three MCMC chains are run and three cores are used for parallel computing. If you enter "example" as an argument for `data`, hBayesDM will use the sample data for the task. Note that you can save the output to a file (see the `saveDir` argument) or send an email when fitting is complete (see the `email` argument). You can also assign your own initial values (see the `inits` argument; e.g., `inits=c(0.1, 0.2, 1.0)`): +```{r eval=FALSE} +output1 = gng_m1(data="example", niter=2000, nwarmup=1000, nchain=4, ncore=4) +``` +, which is the same as the command below because the default numbers of total (including warmup) iterations (MCMC samples), warmup iterations, and chains are 2,000, 1,000, and 4 for `gng` models. +```{r eval=FALSE} +output1 = gng_m1("example", ncore=4) +``` + +```{r echo=FALSE} +file_output1 = './cached_output1.rda' +if (file.exists(file_output1)) { + load(file_output1) +} else { + output1 = gng_m1(data="example", niter=2000, nwarmup=1000, nchain=4, ncore=4) + save(output1, file = file_output1) +} +``` + + +Executing the command will generate messages like below in the R console. It will take approximately 2~3 minutes (with the `gng_m1` model & "example" data) for the model fitting to complete (with MCMC sampling). Note that you may get warning messages about "numerical problems" or that there are a certain number of "divergent transitions after warmup". When we check our models with example datasets, warning messages appear mostly at the beginning of the warmup period and there are very few divergent transitions after warmup. In such cases, you can ignore the warnings. Also see Appendix D of the [Stan Reference Manual](https://github.com/stan-dev/stan/releases/download/v2.17.0/stan-reference-2.17.0.pdf). + +``` +Model name = gng_m1 +Data file = example + +Details: + # of chains = 4 + # of cores used = 4 + # of MCMC samples (per chain) = 2000 + # of burn-in samples = 1000 + # of subjects = 10 + # of (max) trials per subject = 240 + +**************************************** +** Use VB estimates as initial values ** +**************************************** + + +*********************************** +** Loading a precompiled model ** +*********************************** +starting worker pid=75130 on localhost:11950 at 08:25:48.905 +starting worker pid=75138 on localhost:11950 at 08:25:49.101 + +SAMPLING FOR MODEL 'gng_m1' NOW (CHAIN 1). + +Chain 1, Iteration: 1 / 2000 [ 0%] (Warmup) +SAMPLING FOR MODEL 'gng_m1' NOW (CHAIN 2). +... +``` + +When model fitting is complete, you see this message and data are stored into `output1`. +``` +************************************ +**** Model fitting is complete! **** +************************************ +``` + + +`output1`, a hBayesDM object, is a list with 4 elements (class: "hBayesDM"): + +1. `model`: Name of the fitted model (i.e., `output1$model` is 'gng_m1'). +2. `allIndPars`: Summary of individual subjects' parameters (default: _mean_). Users can also choose to use _median_ or _mode_ (e.g., `output1 = gng_m1("example", indPars="mode")` ). +3. `parVals`: Posterior samples of all parameters. Extracted by `rstan::extract(rstan_object, permuted=T)`. **Note that hyper (group) mean parameters are indicated by `mu_PARAMETER` (e.g., `mu_xi`, `mu_ep`, `mu_rho`).** +4. `fit`: RStan object (i.e., `fit = stan(file='gng_m1.stan', ...)` ). +5. `rawdata`: Raw trial-by-trial data used for modeling. Raw data are provided in the output to allow users to easily access data and compare trial-by-trial model-based regressors (e.g., prediction errors) with choice data. +6. `modelRegressor` (optional): Trial-by-trial model-based regressors such as prediction errors, the values of the chosen option, etc. For each model, we pre-select appropriate model-based regressors. Currently (version 0.2.3.3), this feature is available only for the orthogonalized Go/NoGo task. + +``` +> output1$allIndPars + xi ep rho subjID +1 0.03688558 0.1397615 5.902901 1 +2 0.02934812 0.1653435 6.066120 2 +3 0.04467025 0.1268796 5.898099 3 +4 0.02103926 0.1499842 6.185020 4 +5 0.02620808 0.1498962 6.081908 5 +... +``` + +``` +> output1$fit +Inference for Stan model: gng_m1. +4 chains, each with iter=2000; warmup=1000; thin=1; +post-warmup draws per chain=4000, total post-warmup draws=4000. + + mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat +mu_xi 0.03 0.00 0.02 0.00 0.02 0.03 0.05 0.08 2316 1.00 +mu_ep 0.15 0.00 0.02 0.11 0.13 0.15 0.16 0.19 4402 1.00 +mu_rho 5.97 0.01 0.72 4.76 5.45 5.89 6.40 7.61 3821 1.00 +sigma[1] 0.54 0.06 1.02 0.02 0.18 0.35 0.61 1.99 318 1.01 +sigma[2] 0.12 0.00 0.08 0.01 0.05 0.10 0.16 0.31 2620 1.00 +sigma[3] 0.12 0.00 0.09 0.01 0.05 0.10 0.16 0.33 2402 1.00 +... +``` + + +$\hat{R}$ (`Rhat`) is an index of the convergence of the chains. $\hat{R}$ values close to 1.00 would indicate that MCMC chains are converged to stationary target distributions. When we check MCMC performance of our models on sample data, $\hat{R}$ values are 1.00 for most parameters or at most 1.04. + + + +### 3) Plot model parameters + +Make sure to visually diagnose MCMC performance (i.e., visually check whether MCMC samples are well mixed and converged to stationary distributions). For the diagnosis or visualization of hyper (group) parameters, you can use `plot.hBayesDM` or just `plot`, which searches for an extension function that contains the class name. The class of any hBayesDM output is `hBayesDM`: + +Let's first visually diagnose MCMC performance of hyper parameters with trace plots: + +```{r echo=TRUE} +plot(output1, type="trace", fontSize=11) # traceplot of hyper parameters. Set font size 11. +``` + +The trace plots indicate that MCMC samples are indeed well mixed and converged, which is consistent with their $\hat{R}$ values (see [**here**](http://stats.stackexchange.com/questions/20437/why-should-we-care-about-rapid-mixing-in-mcmc-chains) for some discussion on why we care about mixing). Note that the plots above exclude burn-in samples. If you want, you can include burn-in (warmup) MCMC samples. +```{r echo=TRUE} +plot(output1, type="trace", inc_warmup=T) # traceplot of hyper parameters w/ warmup samples +``` + +You can also plot the posterior distributions of the hyper (group) parameters with `plot`: +```{r echo=TRUE} +plot(output1) +``` + + +To visualize individual parameters, you can use our newly updated function called `plotInd` (based on Stan's native function `stan_plot`). For example, to plot each individual's $\epsilon$ (learning rate) parameter (e.g., individual posterior distributions): + +```{r echo=TRUE, message=FALSE, warning=FALSE, fig.height=5, fig.width=8} +plotInd(output1, "ep") +``` + + + + +### 4) Compare models (and groups) + +To compare models, you first fit all models in the same manner as the example above (e.g., `outpu4 = gng_m4("example", niter=2000, nwarmup=1000, nchain=4, ncore=4)` ). Next, we use the command `printFit`, which is a convenient way to summarize Leave-One-Out Information Criterion (LOOIC) or Widely Applicable Information Criterion (WAIC) of all models we consider (see @vehtari2015e for the details of LOOIC and WAIC). By default, `printFit` function uses the LOOIC which is preferable to the WAIC when there are influential observations [@vehtari2015e]. + +Assuming four models' outputs are `output1` (gng_m1), `output2` (gng_m2), `output3` (gng_m3), and `output4` (gng_m4), their model fits can be simultaneously summarized by: + +``` +> printFit(output1, output2, output3, output4) + Model LOOIC +1 gng_m1 1588.843 +2 gng_m2 1571.129 +3 gng_m3 1573.872 +4 gng_m4 1543.335 +``` + +Note that the lower LOOIC is, the better its model-fit is. Thus, model#4 has the best LOOIC compared to other models. Users can print WAIC or both by calling `printFit(output1, output2, output3, output4, ic="waic")` or `printFit(output1, output2, output3, output4, ic="both")`. Use the `extract_ic` function (e.g., `extract_ic(output3)` ) if you want more detailed information including standard errors and expected log pointwise predictive density (elpd). Note that the `extract_ic` function can be used only for a single model output. + +We also want to remind you that there are multiple ways to compare computational models (e.g., simulation method (absolute model performance), parameter recovery, generalization criterion) and the goodness of fit (e.g., LOOIC or WAIC) is just one of them. Check if predictions from your model (e.g., "posterior predictive check") can mimic the data (same data or new data) with reasonable accuracy. See @kruschke2014doing (for posterior predictive check), Guitart-Masip et al. (2012) (for goodness of fit and simulation performance on the orthogonalized Go/Nogo task), and @Busemeyer2000a (for generalization criterion) as well as Ahn et al. (2008; 2014) and @steingroever2014absolute (for the combination of multiple model comparison methods). + +To compare two groups in a Bayesian fashion [e.g., @ahn2014decision], first you need to fit each group with the same model and ideally the same number of MCMC samples. For example, + +```{r eval=FALSE} +data_group1 = "~/Project_folder/gng_data_group1.txt" # data file for group1 +data_group2 = "~/Project_folder/gng_data_group2.txt" # data file for group2 + +output_group1 = gng_m4(data_group1) # fit group1 data with the gng_m4 model +output_group2 = gng_m4(data_group2) # fit group2 data with the gng_m4 model + +## After model fitting is complete for both groups, +## evaluate the group difference (e.g., on the 'pi' parameter) by examining the posterior distribution of group mean differences. + +diffDist = output_group1$parVals$mu_pi - output_group2$parVals$mu_pi # group1 - group2 +HDIofMCMC( diffDist ) # Compute the 95% Highest Density Interval (HDI). +plotHDI( diffDist ) # plot the group mean differences +``` + + +### 5) Extracting trial-by-trial regressors for model-based fMRI/EEG analysis + +In model-based neuroimaging [e.g., @o2007model], model-based time series of a latent cognitive process are generated by computational models, and then time series data are convolved with a hemodynamic response function and regressed again fMRI or EEG data. This model-based neuroimaging approach has been particularly popular in cognitive neuroscience. + + +The biggest challenge for performing model-based fMRI/EEG is to learn how to extract trial-by-trial model-based regressors. The hBayesDM package allows users to easily extract model-based regressors that can be used for model-based fMRI or EEG analysis. **Note that in the current version (version 0.4.0), only the orthogonalized Go/NoGo task provides model-based regressors**. The hBayesDM package currently provides the following model-based regressors. With the trial-by-trial regressors, users can easily use their favorite neuroimaging package (e.g., in Statistical Parametric Mapping (SPM; http://www.fil.ion.ucl.ac.uk/spm/) to perform model-based fMRI analysis. See our [paper](https://www.mitpressjournals.org/doi/abs/10.1162/CPSY_a_00002) (**Extracting Trial-by-Trial Regressors for Model-Based fMRI/EEG Analysis**) for more details. + + +As an example, if you would like to extract trial-by-trial stimulus values (i.e., expected value of stimulus on each trial), first fit a model like the follwoing (set the `modelRegressor` input variable to `TRUE`. Its default value is `FALSE`): + +```{r echo=FALSE} +#load("~/Dropbox/Public/modelRegressorSaved_m3.RData") +``` + + +```{r eval=FALSE} +## fit example data with the gng_m3 model +output3 = gng_m3(data="example", niter=2000, nwarmup=1000, modelRegressor=TRUE) +``` + +```{r echo=FALSE} +file_output3 = './cached_output3.rda' +if (file.exists(file_output3)) { + load(file_output3) +} else { + output3 = gng_m3(data="example", niter=2000, nwarmup=1000, nchain=4, ncore=4, + modelRegressor=TRUE) + save(output3, file = file_output3) +} +``` + + +Once the sampling is completed, all model-based regressors are contained in the `modelRegressor` list. + +```{r eval=TRUE} +## store all subjects' stimulus value (SV) in ‘sv_all’ +sv_all = output3$modelRegressor$SV + +dim(output3$modelRegressor$SV) # number of rows=# of subjects (=10), number of columns=# of trials (=240) + +## visualize SV (Subject #1) +plot(sv_all[1, ], type="l", xlab="Trial", ylab="Stimulus Value (subject #1)") + +## visualize SV (Subject #5) +plot(sv_all[5, ], type="l", xlab="Trial", ylab="Stimulus Value (subject #5)") +``` + +Similarly, users can extract and visualize other model-based regressors. **W(Go)**, **W(NoGo)**, **Q(Go)**, **Q(NoGo)** are stored in `Wgo`, `Wnogo`, `Qgo`, and `Qnogo`, respectively. + + +### 6) Variational inference for approximate posterior sampling + +To use Stan's variational algorithm for approximate posterior sampling in hBayesDM, users just need to set `vb=TRUE` (default = `FALSE`). It takes very little time (especially with precompiled models) to do variational inference - try it yourself for any model!! But variational inference should be used only to get a rough estimate. It is recommended that users use MCMC for final inferences. + +For example, to run `gng_m1` using variational inference: + +```{r eval=FALSE} +## fit example data with the gng_m3 model +output3 = gng_m3(data="example", vb = TRUE) +``` + +Note that input arguments for MCMC sampling (e.g., `nchain`, `niter`, `nthin`, `nwarmup`) are not specified here. `?rstan::vb` for more details. + + + +### 7) Posterior predictive checks + +Simply put, _posterior predictive checks_ refer to when a fitted model is used to generate simulated data and check if simulated data are similar to the actual data. Posterior predictive checks are useful in assessing if a model generates valid predictions. + +From v0.5.0, users can run posterior predictive checks on all models except drift-diffusion models in hBayesDM. Simulated data from posterior predictive checks are contained in `hBayesDM_OUTPUT$parVals$y_pred`. In a future release, we will include a function/command that can conveniently summarize and plot posterior predictive checks. In the mean time, users can program their own codes like the following: + +```{r eval=FALSE} + +## fit example data with the gng_m3 model and run posterior predictive checks +x = gng_m3(data="example", niter=2000, nwarmup=1000, nchain=4, ncore=4, inc_postpred = TRUE) + +## dimension of x$parVals$y_pred +dim(x$parVals$y_pred) # y_pred --> 4000 (MCMC samples) x 10 (subjects) x 240 (trials) +[1] 4000 10 240 + +y_pred_mean = apply(x$parVals$y_pred, c(2,3), mean) # average of 4000 MCMC samples + +dim(y_pred_mean) # y_pred_mean --> 10 (subjects) x 240 (trials) +[1] 10 240 + +numSubjs = dim(x$allIndPars)[1] # number of subjects + +subjList = unique(x$rawdata$subjID) # list of subject IDs +maxT = max(table(x$rawdata$subjID)) # maximum number of trials +true_y = array(NA, c(numSubjs, maxT)) # true data (`true_y`) + +## true data for each subject +for (i in 1:numSubjs) { + tmpID = subjList[i] + tmpData = subset(x$rawdata, subjID == tmpID) + true_y[i, ] = tmpData$keyPressed # only for data with a 'choice' column +} + +## Subject #1 +plot(true_y[1, ], type="l", xlab="Trial", ylab="Choice (0 or 1)", yaxt="n") +lines(y_pred_mean[1,], col="red", lty=2) +axis(side=2, at = c(0,1) ) +legend("bottomleft", legend=c("True", "PPC"), col=c("black", "red"), lty=1:2) +``` + +![](images/PPC.png) + +## To-do list + +We are planning to add more tasks/models. We plan to include the following tasks and/or models in the near future. If you have any requests for a specific task or a model, please let us know. + +* Hierarchical Gaussian Filtering [@mathys2011bayesian]. +* More sequential sampling models (e.g., drift diffusion models with different drift rates for multiple conditions). +* Models for the passive avoidance learning task [@newman1986passive; @Newman1985]. +* Models for the Stop Signal Task (SST) +* Allowing users to extract model-based regressors [@o2007model] from more tasks. + +## Citation + +If you used hBayesDM or some of its codes for your research, please cite [this +paper][paper]: + +```bibtex +@article{hBayesDM, + title = {Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the {hBayesDM} Package}, + author = {Ahn, Woo-Young and Haines, Nathaniel and Zhang, Lei}, + journal = {Computational Psychiatry}, + year = {2017}, + volume = {1}, + pages = {24--57}, + publisher = {MIT Press}, + url = {doi:10.1162/CPSY_a_00002}, +} +``` + +[paper]: https://www.mitpressjournals.org/doi/full/10.1162/CPSY_a_00002 + +## Papers citing hBayesDM + +Here is a selected list of papers being we know that used or cited hBayesDM (from Google Scholar). Let us know if you used hBayesDM for your papers! + +- [Papers citing hBayesDM (Google Scholar)][paper-citing-hbayesdm] + +[paper-citing-hbayesdm]: https://scholar.google.co.kr/scholar?oi=bibs&hl=en&cites=14115085235970942065&as_sdt=5 + +## Suggested reading + +You can refer to other helpful review papers or books [@lee2014bayesian; @daw2011trial; @Busemeyer2010; @Lee2011hba; @Shiffrin2008] to know more about HBA or computational modeling in general. + + +## Other Useful Links + +1. "Modelling behavioural data" by Quentin Huys, available in . +2. Introductory tutorial on reinforcment learning by Jill O'Reilly and Hanneke den Ouden, available in . +3. VBA Toolbox: A flexible modeling (MATLAB) toolbox using Variational Bayes (). +4. TAPAS: A collection of algorithms and software tools written in MATLAB. Developed by the Translational Neuromodeling Unit (TNU) at Zurich (). +5. Bayesian analysis toolbox for delay discounting data, available in . +6. rtdists: Response time distributions in R, available in . +7. RWiener: Wiener process distribution functions, available in . + +## Acknoledgement + +This work was supported in part by the National Institute on Drug Abuse (NIDA) under award number R01DA021421 (PI: Jasmin Vassileva). + +## References + + + + diff --git a/R/vignettes/images/Figure3.pdf b/R/vignettes/images/Figure3.pdf new file mode 100644 index 0000000000000000000000000000000000000000..a9115c9d3ab9ca865f84c3003d936accaeb86999 GIT binary patch literal 28433 zcmdSAWmp}{)-F164KBd~2^QSlf(LhZcXypgfZ!I~AwUT37ThIhaQ9%r9p=1~thLTw zd#~?1_dDl#?vGoOrmL%~MvYNDs_Lz-w<#6HBp6s2*^nuFcg}YXi!X8~di#;t$(YF; zjIELR`N^1M&Fn2)Ey*|_Ny=nQ5>~dZX3mhat&yvln3;)#sTrA|AhL_Avzd_{vL{r% z_LB2*6Q!O-^HTk}sl3~$LVdYv&i=QYAK7FTp2z$`*X>lUw zVIz(zRi}-97k6aOPFRdic%k(TGeLuoK<0RgiuQv#YSk=v~raU#%F3 zy%@WkI{#R)tEYnPar4;QHla>Htv20*OrABT$osP^mYgWLW!v&8K*z(gyw0Idr-|1q ziCA9HO0cKx$EWW3ybI+n^x%W2UmH(8XU;MW!%DUuA|WZ89mj1=tM*pz4JUm<;t%&* zMM}zbLiQgB#UJd+D?VACa5~0V_Q9{>9$cnf{b%naoYkPV<|_n-!_BX+>aP7rFliicAKtE0dE!1*rNhXwF)g3R#a zchATV*xZ(|L^y=^y<6m+iB9png!4VgsrnYW+h`$sFY)h=UHh6eVO+&6m{aJZ-whN| zsq!KGO!v#ppx4{}h}MMI3zMJGRic(_g9garhI8#8fo?&XwJr%l^1f8To`v>;c718k zxp{Z+@@PQ>#&T4>8|!CH$(U;DFx>`fuF4Q5-P9HiMQiTz$gN&M5!ihEf0TE)J#6NczoS;e;)C6ikm>S_#@Kdl9xfzVLiSJUn$PH1P zN&?v56<}(-fI``L&oKcP9;MEfYXff*9?LQjvJ3t5lgU*A4i@7gqU$nL?jh17yTCTQ z=->c}H6lOVx2y{5K>Pd)oCz9}Dq1QEg&4rm=uc&$T!Ad8&<_M;0lMz1FKO7xT1aP+ zRx~y;c>`UBn=PS|<*N-F(D1uB0(s8hRa5*e3f}CJNT@Mfun*&kMhn{gOd~7yJMVVd zKw;;NcIq?cc~$HAx(XcY%7Gd{-lfb7jl}r5@TN(Ry2e zt+Wwe#Bn|rxm{#QH8$4v91r`xvUcJcB7R)3#YFV&D@mF0UVOsSi+KN_I^TMjSxL2K z^w8~_0yjQ+=*gQG$1M1tVd>Ulw&;RRHpadRJVfgl8vkD zz(@~Px=GQlYEjCw8tGW6*T^~VeTF~O;q%6YG9-ytX}y0GbnO&$UF&PSL#CXuml`Ds}-Z%FM-W| zEzg_>y=opF8&>n)x2_&}>npx?ykt!YmT&Rc4{!fUD8H^F&N&L9Q%cTj z)av>Lhp`siWbFyq+q4fq30Zi@PArxh%_ctY@AMhq^jER3K5>4r$X|{WuO2^A1jnnGsa{jw<+6M4@O|%_3;7#u=FPjY;qk6`AHNBbht#FF(OeVO6s+f$dPiMbE_Gbx%#}?_(7Bo7$3CB*Vx4Fy zgRC0XT(%e==n3QE3?|BPXikp&aOkkErC7BE?iejtW3@WC7m zxTfjEGoc{xB6J@SZT4le>QIcX-ioB|zoS#K6?7q~zNI1vKl+xhe^XLxo z0T2x9+(k@wU8i0^qi(RSBh5Q`B{Q4F5K^PkVw+bVl!Y|}kj$#zF;*_+xYP_rA1P}8 z5{nT>@nqEJ%jL`cSa0(ddY9yji~f{X_VAI_$TfuQ}QUKQ`9(+D}L6a9A)r=Bf|_EFyC>Sp8Z%+AG0SNkfMVZ?Wa(vI`X z8&#fJuRg2k>{_$3#J_mTY<0#U$w;?_QFAvG)g^T`d~sWZeKvEFttC-%Q8?3nF%^NIB zZzblS5&k#m4ELcyVw+(r(nMmhT^on`qzxWDN{}1I3rnqLpK{D=!D!p(-h5z_F({+xO?za)0KM0KOWP0DY6tb4&VGa);czjH z!%ledFHM0%g2UAhuUUG-aMlUDExKfr68zC92A#~S`jOe($z#B7>C)qr7@i;Wk(2mS zWx|WZuR>yfIo-2_Bgw73=rsC85Tpu!FtkwCbH)rB!}?sCD@B5AVDOXvYkSaF;R5(F zu_#@Fbef>mN3?ai%&*91_NIS=IY{go13m-4=QAri5BKxk-|@fULsc(FGcqPcBa7ev zoz3iB$=LqDippj#4sOmSW-er$e|ml|=1Qgmsai~ej7inZ)0Iq*j7ikN z*1=iD(a6M%>>0d@y0DOO{V9fwN!-&_QpFVlTz}t_bYVqi`BM`~NIsT7Ag-Vw1l>Xu z{hdcum5q$$SviQds;Vqx+|heHa&=KOa~5^5b9AtO zZf)*=p>i=%)qh}e@H}`OfFUg*B>{kf0s!7aegN0!L17I=XuwJtW!(%HM zA-r+IVGWGWLnIfe?!r}`Jf&bWb`C;9#>0O}KuAeN{g#H7or9B$n}=6aOk6_py_B?y zs+zinrk1veshPQjrIoddtDC!rr)kKqvsiJy{^Q&Q8?^9w!~78QRf`C3z3 zSKrXs)ZEhD)7#fSFgP?kHT`2|c5Z%Qabt69duMm==l;Rj`Niec_08?w{j*)qcK#Lq zu-3v=Cyx1&tO3MmpP+_+`gy0*TTCB8c>67@N%@)!`dyV?R8HQt?{m9w zdu1M)c4Bz&@zJDZYpB+v8|m~D5sn?su+|LZyx6}pVXmU=)YExfJ8=ge-O*iV4X|u; zYVJ`Gm-GRA>&4v#xzGCR&k%G7uUlTr1fnV)2l6+8gfE5~srS-FE~J4G8t$EAbT>j9 z2#;878(h7>kX5|1otvn79i3<&g~rwdiU=xID@7JS0xO&LbfN3^O2X9>HntzC&@FZ2 z5z7NVXaZQ<*BA?;u;Sw49s1$Gkp7-Ao6>UAsg`bV5P{gFSav(7S!m}~d8@cv;N~;ty&j|}Zo$eH*dyq&W8J!cZv=vJ@$qedo9t|dT>hmeSmh2_HC!mKE z(gxnWX*p2Skj7T8`S}3+kUG!;YQNaXxpm_@@&W_i(SZSvXbz_ZAQHKUSHKBmFr;N+ zG9KxEIeqqh2aCPQ#}>1;Pz(knSP9h#l7RseLl$5_&@>n@g!^g+3>fdb5dw)-s4d+; zjE4gMUD??GsH3vE<3B{;08M>`!GOGkaWDYxxstcnl^}nnW-#FV^>8cD!0UAUkvHR- z9}LLF1p`0`T(?SK09w>k6$5AQ%w%9fW(s_3yUE9>snXx@6S?1J>&xV#NQYB#(x} z;fWvt7!ao7Kag_?-2n!4?t%fx|Ix(lyP(0_U@$e(dWUKn?CBPlNxb|ZF@!g2`r9pdk9JvUje8e6VHw$I^T$zxwDzRTB3E+@?a1F(VX zNf2LUx>k#cTt7r+t(mm@n6EL86_I3!?ghyop3=mi&6$JPSKV|uP zd8`|dYOyOzjM`O*#J~9Q?)0Y@K^K8;rm11l>)30J&1mD{dvwIxVri@cZ&U5qMM9mo zX03uM3(NharnJ8#Zq&H-k@j#mNb8EbJ3f%stp)tzC*9H`SJx?oth0vltzWs774T{4 z9NqORD7ehWxo^9XzZ@1~Z}9M00+INmxysdzZ@EWYfdMf<_(!Qz@=d{l(~I8Dy6dK7 z0z$5k!p689Mp2wV)2J$h9W#WcL9!gOO4f275qZ`XLaOu@=%wJn_%o zPgi~GvwjC6^V}0Y6|Wy&7^%<~HLvAO(HtGccTyDQ9&?J8=jPY=IoC~kOT~0-wk{O$3hyktHh=$I^JhzR+w$RyM>(1p zYSb-VVI(>BxBKSVy={ z>CL&vzwCG<{0;^TvIDJrr68RMC<6v`m90GnOFmNZjz1x_bUa-dLHemHu-l{zQe8$V z7_g>J`j{eF3mhVSL;(XDQ0_WfL4bdh|4;e;;dTu6N**ufJ zP8I@V$rO#*lcOGZjZo^I^6!<0%ZAlDhij@;2FHY!i&{bnJcJZxZhfG_z(teM+FgG@+re z7c_O{FNKM-o(Gw zg?jbom`%BQ*7Z=&Kqk%J_QY5!FG=ulfDE-v;3H4zO!p%z7!VI50tQs7meKu)mi8U8 zF01re15(yUL|Dy(3?TjldD7>ekVhX>ck%>PdLhvv)A5hA0_-#2lB_jr3YR~VL%^nYM-(( z#KFM#K4|q|c603gXYn5L@=;UP%u{ixl3R)KcV{Ud+(u-U_A7SV(uWo|cXn6ouRT~( z$2OhYO)cm5n}s3Ir8nmey8;Y2EH4Iq8qgcARV#VmY1go0d{|r!^=&_F`SGhX_}2Q6 zwCDK6a)?^JvJ^J}ZMSv!PDS>txF-WL%~=t0oW$0Br(B<|u>EECx%Hh`VsSFTgdFL2 zQX#rf;!J>JPa=}&qKHi4oXn+3xzd0tq`HmRy&e!UWR|3kg!JNhI(bNcChTChfebO% zZ3Z9w-#nFG$&eCyHY_Pc=<)V09Z(u4^93o34`loHMit46lBB^vIl}q7-gb27pfd1l zl`m?079Nv@-tUlyu-_z+izfx;4^rNB$G~_T>Ut$}M)ONn$u%T&w9?sr>!?&2EX*8O zl8)M-m~G{3zf9Zwq462m&2zd13L^lXYEgp$Bn2SM@i5X$?h%N?3<=#HJ3qOTmbSXa`(ue9 zQe5>tM8v_&L;kF zhonz5#9CpU9et3oyED4Zpyh+>xDizIT_G0eVSq^^ znFS2--ofdjj*Xt4ATu36 zIri@t1qWv!w_7kE?x{HbNp8|$g!)n7;h`(R-%+j@0j5Amr8uRsCBH?YZry zsaZQ#9qviEPd+bo)41?X`n#eqf9y5`|87W&MX(grZvEAX{i{*D`?FzY%qKa%3u#tA2&^=t z(ED2^a4o(U@_Jcv&GEQ$8+<+T^yQmIzJaGhFA(!ePg3Sbo+qQ-uB+uvbU%TnEoAhH zDM-Hr17b-}6@js&bqYO8v7tE_p_%t`y{F6A4z_RNO#>TgP>T-`d0q-Ok#ATpy6qtL zv+p(9-1vXc;#?Bexo1o)J}?T|%}-|7?ZZL<1W}-Ah0`~o)S+IE0NxYN!r^|Gmy$l$ z5LRX=r8b#J>`q5WF!poCc050pc!AehvS+`NCb^0jqC;jDa1zfj9$44vR}R5ZaP33n z$(jq_swh;G3iO&jU7JDEN9&Jh zhohv6V5m{BPkJuRuK)%Z*^z<)Vrz4m|4gFdTGy`z1E%8pQhiY#m7ZgJI7;GfS;gQV z&pzKCbBl25&-BVbYLgTrkC!qy1oK>Qnt9J$^7JiP1MfD}+N)5l70D&jZ1>9+2nMDb;bCE>Opp z4_QvJ2wnU#aN*&-QoyoK{{#l0ext#eb9tYzY%M(V+4v>7a2eG(Q4*KfMfq^mR0a_18 zB@|Gzjh?(~gP<+Sh{DH_6P~YcD~o^@OhHPIax;Z z{6eiza~6Q2UTjaE?l7wiZva8hvLl&R;ircjhy1YFYk<}saIScH=eaC9(nvr|9N!NV zIkKg%8{u|+;)JcQZ&Zx$!}=hmG(j5!D0RD&5Q;hDptN?9=2B>#Tz=GHJtM_eS1*Kn zxxz8?^8^CG0{odhm)1{u9l{3fePs%yaC5mIWX0qV_)%N1@CAYm=h8Gz_$PW?P?nY@ zYWirCTpbCt6kstt$?i3FE00-Dsir%pDx)7?qbKF-4PtGs_#c;z-$;8)>;(vgG!bJU zOrM?*l*x&UVk9Ikl;@-*3igCFT^S$H3)0fE`Eb;pp99mtGa%sMRnCTc2tp``)RI?~ z7N`8PvsBFaDE|=p(`A1L72VbJ#&tPb^SU2`YCur!f^eq0+q()Ocvyej`>>F+uvt2; zGcBE|dr;V({~I13WGOq3n3-T7kwL++qLNcbLdTfKy-{(b2M&+;Sk+{ z0GKn=U5j$FbY((1B;s{5jAk&P0s`{#2z3M$l&5>jLzWx5MasIVO_vpSq!uNlvn30?argQvBQRDKQ1$74 zOkzo>DY=mWU;+@nt95KRJgPtnH$s+Yoe-Gu^~oBSwp`i32Mi#8ye=1o-kvUDdV~U= zThIuFa~yuOTTcM_B@bf);=`aKp}n?)JOu(^>fgDwS&rGuJq={4<^efwa0N7Y#?TOO z7l7Ssx5bcFwJ(9RsxugHruvT*wSwNwS75+BL#m&jv zUgbHucmos2kMng;c_2D1Pjd$}(Q@hUPHe+B3AHlrxJeo#Qp8yioC%x(3)Ju(@`9L$ zPM(n)v7sE6Iiswa!bF*SH&tI^gmWxZ3^JXz51;grt=?X^UQR*go4dxSs%}Z7P)(X) zsSaQoMRR#!*;+Oj;3I$+@wRdVeXpuQVIZ5N@ysEZqw>Xau3JZaGRL?T=gv!?(g4Y( zikhl#3T0gzYiQQ#Pt$Ob^7ZK(;b1_Kr4Fe|6!H?<_E+&|XV2I$<4|VI>2q_?4+GCa zI_Y7@LhI)D?8qFzZH&{4vUB|Ev@uKymkY7pQLGtJ!k{K}_c6Tm^vD8ALK*Fa|giSm>W#So%^#}H~%|s!JK1V@8ta|Pe#{I&( z`3L3p*H53@8MF?RNW?0G@~dnn0#*=oJi$MAnH`Wt3$RD}5z_7Rk~&WD=D>hx2*`Lr zch(3BRflBIF#vJs-Ne(E>Q#64_f;&|ROvj9Rb5b}9j zIm8s!Q3o0B0XpJ^h21uUrtfHVR-tz}NDAUG4iF5bl;$;Bn2uY?g3;I4%?Ff#A>$SN zK|oXx#~l?@VD&^&Ng?G5dk1H1Z*@D#F8iLfl8WB6%&8rz?_8Hqy+wZpQHGQt{;mGH z{OJ0Di-dD&)s9^kiTpasRuS~oGD7t2#r5ag#4SE6_9X)H33@X)E!!KUECutlm<`BU z6D9lV!ir~mZt8RmZa7-}V;_fGjow4YEyC<+F<>l2bJHnW&pN+;uGL@PzzaP0&t>u7 z-*){^;^2jUZtefxnttA{=Hle$`D>T`_m=oyd)R+$TK{X`oXNz9jOCf|`RA7W?>+PX zU1sLLXa4&J{$IP}f9`6_Lr4Qm!q41N`{(_6CNVR2D-$ziNs)hTZvS_b)2w6uP`Qi2N#J18sGF*GLP>_bF%ic>2@W!z zLpsM$?C(+%6W>{hvOzj2=eSr+e;UwQTi9!yTd+9sn`;3mJ_f;dyJ|uOu;*#w{5tH? z#ljxxy@7s-0fnXs)wyAA4l8*p2L0o)O&~4p%lrP?>tBokrmai#9g0*gvtVHS8!k}_ zcmT5a3@5)o75)zNCzaYh0#-2W@MRJWYbXmVULI0@*_*B2O55w=UAoqx)D)`iJVLE8 zf^Y2EQLT!r>69AsVNO<*)K2FzEfDc1CqHo-G`et+i%* zRL0bX2e_ut&LF96Pf`Qa*}=ct*(g&uAp@Ml0qRyWFJ6V(>L>JWqEe$o9v#`bK;_sl z(*5Lm$O&>7?g-_%NgMg~pw3r>FBC{Fh0A6pzEf0OBqKIDii(ypZu)eC0ftaft-cTS z&}Zg!;-7{5R8#u-3F$+)Worn3pk^GSDe?#4%m(l!Cp7|rf0hl7rJDi3FN8<2NeW@m zK>#314ntutG*%pfYc4W-AR{w0z2OVu05(UMRWiVbfcF?M$uNxXp{?G*)(8vE1CWJT=Aqn! ztsMbB0=*|-<%2jK;dNm(I$t=Vpaw|3Lk{hdVTM`_v_cFz#lX!M{u+n(89hy87K2C| z(MwcVksAl5Pc%FZBjMfM1l5<9w83BVL=^=t$z2Is5cPuZa}g%U*x{d`{61i&LQfmL zmO z#A#AMVZOGBfAbl-L_9qX?K4qx^!s?HZ5a5lJfqZ?c>M%hVOmie#_C41#?s6)sV1rF zviQ54<``^&cR_{5Gu1_!)MY~MNSv4%(G0!nhOve^RVmdl^F~K}#ApMZ1e?T;{!K4+ zS?xJ0U>nirqFMtEdtPi%oq2wvUBx*GLho`pHTHeojtnFQzFLbV?eCzDL(+z|2||CJ zOh#c#p^8C>xQF=k0kfN3De#Me>zhyvuU=han(CN&;crsJRPu>9QlS+1$`mwdb%Cl< z)^GHv1gP3xCl2wNN;<#PdZ({YL6JjdK#86pr15vAD>BJdbqqJFjU5rLE8=<|pmPB$>#Ta*?t%RGwHGZUbB;7MV#TxjsKkC+_!MlKyPFL^7i^)7g2uL%c({Q#-5--_2}z%ce|F z7?l{+8WnrNzS}&$HNM1IX{8VTMw4_$YftN&^d`wX2`VY}tE`&C49-l}SG%tYKbWnl ztc|P}tRH3=OQXk&$33#gvkch5+mB%fRW0G(P50JUJqH2@iG=Bd?Suq`YJ@55{Tu}8 z_US|EJL!Au19g_#DD|+~JKB5o`pijbzeZKfDQhEn^2#fYiy0;bKd+Pul$(61)wC#( zuN}kIOJLOD5^mIORPhi|kYUnfGN@EK$rDt{DbM-bA=#k=9PXSTTQrOm&glA*7m?Rt zlx92tpl^X!#vL7sK%{157TyY;0sS$(3d12isTPeEoTgpfLDhZF zVsE;Qo{8T;sP&df?O4&uZhd(P!x4_Z6Q$mP*+b&6|ZG!jnRy~7%M~(rKfK*g2lj zlrhS{(=qRd+EUV%_Tl}g++6gYd#_#Z%DlkBQ_}+-x(s@}WaPl7;>10|aTNWJDU}-W z8kLO+H^wK{C&hxig5iSEg4pfu?WKN9w?EWi7I_qP58+4RK-I&&cX`Q3&t67#F?{ z!ri&#W@<$yZ0l$fdFvM=n{IOL(V}zxPRYo4MVvvGmJ{tOB2tny3~)Ghcgc=y#ic>B z#}4dVVr$q#!d^e!Ui6vXt=VHdY|4k450hdky?Ws$QWa9TQd`gp!u9KJH3sK_+a;gk zJ{8cgl}wg=xB5_bTQ@C2X%taKr^hBh-Nw|zUcfBcGHf;TOAR~!-DJG%TNf?!*3=k~ z?A7}yiVVtNEtFd8npF!)vz%e2QOw=T?euN?5t5;cp$C5sa}EzTi*|QV=wlbhFB`P- zkl6~|rDk~#y{?vxtM&n%?>yX6%EseAovQ0_#1HWM$x8R*sjlLd84dn{4oEZrNaVf02DiZ ze-=M386sIY&>ibG!}nv3>kHQ>&hF@h{^~8K0f(pcD^VZp(tSm?g63OT?$a;_UDv?H|hW|wEf8$NSd`N;aL?FUR}k&KZr2xWkMFQcy0(rbq; zWX9via5FP_ay!C2y6-w8Q!B-RnIttzNhI?Zkkyh+rb& z?|(72`+)VA1@`?`pC%DX(Dj8zy}6$(pG=On@~M#(XtTcxl|Rkg&Y73AT^{Am8IIR} zw1^FD5@iA>-Bl{|z>SxT(@E4x97vW+jz`@^t$0^o_MMJIrA%*ybkKkrZjvTTbNoFX z+ANHRdQWX857UICSHHCL+|ORy?V0}Ep7yXhDJqUwEo~RN4t&acFu5Fv-#=2gRY=N- z7V_{#dOY`}vNPXZyZLbO@LwR|XBhg7V_7*_SpNdIf2Bb3a|IgyT>8$@^oT@sznf(FmpKbc%rlP2XxRMN=q?N6$ zijlpGoa!GWSTQpf6K5+&R|jWg79NON5E`twJ>;V=D|?7Caz-Y~4t7TN|4Nata&~bQ zwKQ@jV`GJUIcD_d1uHYe-roe*-xhOmBR^AW|LY&??|PWYIGF!9{_9+SrE+kxL1b}p z{B_O6`mE(2KQ3+v_ZML+Xld6`{Z4_%K<0wve74|O9y`mkBmH0g{x>c4e<<<4mBhjf zQS{#&@Y(i%cfe;S`Ij&J;T!TscF&aW|H5%>jVvHe3h{vFVj>WNH?j@`3nwcX11mQx zgia0NHS@4=L&(}v5U;i}5w^FmHG|a1BEk5eO~2S@dgXe0HTvK_48~G2&9jC&ZG4^9 z*lWzIJUa}%%LQb%g_{hdTtDkpp?j0@%p2c1m-ZV_+X)vDSKS;r$70ai^GaQvr3m8` zUX<70%y){SV>~l-;E!cql$Xx-F9nglZnv6Aj|uhExnL3LKkC0@Zg!I!8LoEvu5XE7S>EPg>v!p6$u~PwQ64+4;0MWA&R|D0uqg31 zOo!`7?DqjRL?v(U4b%@oh%5A%p<42F7(*gC@FJs%W&>mOZXGhDn7m#CB zz{}KiOvvAwa(eTsmXzEeRQI~{b-~lLV)7F@z(UmR8l?B?q0_MasidnXxDs`0HEv)P z=Jb{^N=!8*Taq4u;v8!_kJB!xl~oPNIc%OJ~u< z$xlBfOlvsUwGN4*8}9qFufi4Z{0@Dgb4d;HJJAB{0O&B6(3gM@fEK70052530%PJ* z4&vZ34B$y|4X(3L?ZF3s}8SKQ1W5xjafTVio2V{W4x_z@B!8^6W<#wE0 z_weOcu&2u}Vx<8YHCUW&-`$xkL^SmZw&M5RBaEZy1GVdVvn0yWNCN#(2Nm!DSx_d^fGw8``0|q zQA~>cmnc2p;mlG2Gfie^GEJ0%P`)AT4+dJ>wR!gol(KINmccgUkMlEV0k`h3?u5Yp z@0+a(Eg8{o*zfn26x1W-O~-*_eMda@)8NdLr<@LFDD!NDm#>1cg-&-zCV<4gQ?-^{ z@0QR<@0V8QChB!yd?sGiq55Z?W<>5Ln4j|_$2(ZDEQW_pxG>k?X9b=ZNZ5VG&FW0@ zMD|AaUs~{$o$&whkuyQml9X=J+!?bze{SO9r&UeRD77~GN1-mJ?u=kp60cNZN;NUj zPp9hhIVpTATnyAphiU0aJd`FiK2yC&Thr71&B|G5KGMz!43p`GEE!SoZSq+4g!R%T zaU-zV1+WD}+?k51ZoRZFOPTT(ZvC2kXAj5Ba*6HEq46M}MBB9-SBm}0#UbE~eDh2; zsYJhHN$HnVzgtAwC;BK+n*&VJGVn`jIWWh5=m?lBvONCg?<$+|q<3eiP=?L8q>`JvzWz(iU8CwYITo zmXjYkShLMNwd>!tGME-D4WvX8gE=1Vy-7BnP)ghR4oqFXmX>0(Qt{nO>sBc`^=~Ka zimbbeP?>P0z~AV(RV1_&X-j&JM3}F?Sgv0p*vj%#Zn`8g#5ON8QN*`3zvb-FMMIWc zYq)+?fF z(vPl~O)qSGJ!;5or*RGW!YW~8@`eL>E7xXw9Cd3+px#74B>j@ZIJmH4%Fw`|WJGBZ z|EQGY)k;nGPMofv8#Wb#o%&?Jg$+IoCuL~wrg7ab&L|?_dWvk{5;>}MQgy!21 z3y|Qqmx$v)ynSWKX9x`Zc#FVyK%`nQ zg;ATN^8PsOm~*yuO9&DKDTt&`&=X?{rU=mc=!}iv4>QpW<38Yas8hX}w|K2%6ji85 zRye)5>xrL6JiJ)j6m5Su`pbGiIyPx(MAv*hXA$D zqU7pAZB1KYA!(AgKwsZ02O6=G2eMM@fcLTaupOZXn3Q_4%9-LU-*rg?6mVp}ck5@N zobUm&9l-8HWopQXS?fN+Zv;SAE}!AI%(YsJ$O9$S51HV`-Xao9iYqZ_DhbV`L?+$K zdC#uaOUGUj*C2oDS(kYnA{+{<&R1dn&dI=~lqj{$9hORY-{%~U%?7h7o5jV(y!qYr zkX~OEwtJhBA;tDiGF*Ve^Nck=thWccr0(ldG5t-L>)P3e^c(OaaaOqTVr`Ubpo zu&wkwJ}*Piq0Mf-NW+hb9(~F0y&L7e!=Zb2L7CxRQ5Jr!%cpiWY`_Tp#PM|-_8>}lrq~!+UEbo0(g=_ZQ#@3r>;f1_R7=z z3aComvDkWV$mYS>V(W5D6u*~Gxys>jn^7y=i6M&RFEmjE_b_xyE%TIma38N9&rF@C zY>DyC8!4;1N^dSiUpl^>07@5zg+dtUDvwaiMG!6;bW8LosX$P4GV=ovh z(hTuBDYFu7_<*2({Jn7%l4%vJC_r1hg%Jri*E)I^uXWu_hhaV;d3{U9Ueq`3J{Q_A zD69p5S4y*eZbKin`<0TaG95OM#_38#VpTuk|F9<5Y{3|MSL`87-O{UvC#T1Ak-!ufT$kM0Yx54)_tn1XESqqHDwK@%eCrE(1gB0aBx&YSa1{bcVQO8NDeLEb2bp z7q`hTpvWX5e32tKl^toEhc@AYzoWuRxUQ0#!DG3uj*Y@rEnGnnBf?gV{k*!Bg}$9T zGZCEK8yRvh?9Ys&3{lLI#gLV#of947%`EizBxlQIhYHu|+Mp-MTvc#)6`1~s{yP&V z!tP2^<0siDcT;)0w773&MRZM<8%R4U=to{{F70k-n_qAbS*M4K_QuTJ*+{u};bPL1 zmYhDrD}3ch_v7xfJbx|Z!@QSXJabpIW_%G?IeW{&njwaTv-aK7$0IGSKT-MpWaEg=2?itFL}6q)8he>XdP}w^PFT z4DAi%-*h~!6?Et}m=8^b`7878XMdL+VYxfj(R@i-9jEmRKgZK_6$imkiRB)51&)|q zL`-Qqgg3X1=8pyl#YHKF-L?u|;+6YyLiOkm%J@G?1z+310-Sc1LN%e}oQi)mREoG?w>9XCZERZQz!x?K%RlM2kafM-{#Ovd(j@+F&lhOreW-EsuNr zpc(g|d6C>Z_)mu(`-8&ARmqkJuWRtQ*;2-oa0K@g<7yIVq1TBJJ!{Rf}Ne!u_yzH>3xIeT4eZ)VTzbIn@kUiZD# zNss#|I?>~!j)|kX=U8mpZR;`dWohBD$j6Br1VZi9OVH;$geFI81wwePY~P!e2Wznp zIGkd&RvpY$nXBwYKcKBWIa>q>B6NR2stlM->YvqtgbF>$8!<->lx)FY3Jos!e%75!K%=~e4 zGz{+^a^e)WzoS`3vP(!D7&T>4+uWO2eoUZ)SG!nSO9#w_0`^90Emj_4mkijxq1!2fagPTFKR%#<)5dbLpn?;$WTnGo zMLlaG7rL_3+G=1hZl)(RjB*{XG7Belem+c2Iy33QcRsCw)x-%=neZ&*RhJowzxzHY zR-mrlY}PMeVe0J3 zes*PJkx7L3NHiZzT2eXb?6-Vxake%MioW&oT2M9jb{|Bqt8c8n>_(+E!a0eG+LgtG zqWiB$8Lp*4u&m&Y>Q_irtsOiY_m_x_)i&CM-sp{~g z&*%4(*jKJdb!Zuwj!23FR=J{$88#c9djNMF9B}=6_+JT*L_rjLd;el-8At2$I6KXL#2Dw!(^}mPpP*Ia0F?O%+->LprS`z*Fb;P$WliXg z3gt)d)lE%)_n{=04NI9Swi$J(z24~Y0iLrEOBLIrCyu+L1SHl`4Pn+;+xpG@ozPI< z5Y$@vo}#^dK~A1S#26ss8X{?cej0URn6ZTXtDt>hl=$A5%^890!qIX_ z2OEwbGaS=fHLm&)Wu0}0gG_Jkyk%Zu2lYO{N#YixH*kv~n_)4L^EzLTFZ z!_@N5a9+RY>veo3P>UbMYL4|4Pu(qr|HDXo5IVK4WE=`5CqfBXTYH zi53H~IWSW*QeXVd1bG4=&KD^`ogHt9Jh0&1{!^;w)Wq67l7!_g06qRY%h3m#17;C5 z&g?zj0|sl*4wERGSZ`{wAUg8U3jfP4rXbP|qAkBwzKQ*Z=w$WNT)D%G_k1ES&7^FN zjByy9SWg#OW~7J2i*R}-72r-h!0fToRH zx+MG3n%~a+F>|hbkUxgl+aM;v5lmzS#3Op&y?DC#jsBy@YwAr`b445c9;_-zS*F!Q z@np}lg`xY-$Jrl=UM#Ct9BCA0*JpEOkwBFpXw4x;;TdKSKMqSbDhaY{cYh@QsE`qS zJV%!vw6E_1z~D=lMEY5wdn-_|-OIj)9zS0->^)0s9e%zC7~_n?F_h!-9K=#-aEV(R zU5qjA83DlDghnxmTV}zW;{)zAI}S^89~UrRrY@ew3$Hfz7{Q5RLeG)`S6*Kp`Ym;0 z`aTsyfi`mbO40a!QdPyHa6VJ~Kt7I^XcJL-3ru+rh1W0Lz958d`-rT${e@)S*gVd) z%PK~AyC``hje^9Bj#Et{`Lk49j0wgx_1DpNTfM0$c)X1Tw+F~;QADwOUdeu;m(6U# zQ|Sa+^t{&b>hsAn;zQbiENttR(_nd64xfT)$H+t*Q%`zri}Nawx^#3u9SkwJn09-- zcQ3u>c8lbB84TSi&QNBcP!#urvbMmJklUNkYngCQ8nY1s__zwYQEWRHY`X~4e2e&S zW*u?Kk8+F!-Hh)oPU_W{8Cn%Bj=WQs(Vv_ck36gT0Dcf*x#9s8(J^N;4BK)PHVh(j z!ZQq*T6T3rnt96mfu{lIbVKRf(2H`nC3Wq@W)f6c#a-&7vee>djsE~a5T2cbnN3$` zJ!7K6VsLrJ)$WLS+rhdcVzQZj4z>oh6TvAq1Pf=Hi z1*RP2;$Uk6!&QGrqA-B};HMn-9W?E&zo}6I|JB?11&f507XH8@xqpQsxnaQJzb}km zuD{#6|v~Ta>`&_?6O8-g?{WlQFAg=#WJ14f(*(CaEAm}L%RV=q84Q0(- z8+7%?%{HWhm)(+SBO}QebTz?!+lcfox5B-kKn<1XhEu{@$Xi2)OIy>q38%}~ypgL} z_p{!<%7mY7gnX$ale5T&Ob~b2Jv})#~YsK>jRFkkQwkZ$<(>PC@H7a_F5x6pWWC2`}M1jqkpMGjOWt(C1XO&+afF zp*tdwpG)xD)pgQFdFpcD)x6ii(TmrtB5MS0aU83bby@MykvV&@a&MT7?g^Z><{Q-i zuspxZn{vUB+V6^1H|yy8H}-RN{%6ed??w3y@#Ovqcv9)JfnWeA8y6>#iiejM26_T+ zgsuKF;K>VuEzqBUCpQee`~~m?!J0n+PepT+8|)ne07IdFqM!UQ!1M?Dd9#jxqMzSd zH}}lH;OBRZs~_|JF7Ne=)GP3x8iya?ClJK*i^kOdREF!S^;l7VibV9xP!tWmo%?J? z$h5?}^e!3a^b{7gEn4)!lVnv>G;GF2v|U`ISQ)0rcIFrvYG8X)5L4#{TyY-oMWwhc{P7jv$%KGICQZ8K~@k$q8le@ zB(rG}9v>Ro|FTTbGy2lsEKXc@X8!%c3?X^8(nyqY5?T~G*~itPrlr!6Y0SF1>aQb{ zmm0*WmwMgc)6or1ElT?x8?4<$CvwQ)SEH#f@WoiiShrEnZ2YkFNQiA0mocIEuHErL zs+eGXQk;@->M~V&0sov($x%jO&X7pe{otw249hb^p#mZ7wM*U^oM7#d!Dfb}k}o-z zCR6JbUs0DcIO%V-w}AKk_BxC%nrTv`ng*>~X?HKW9$6upN{Xx^i&JS(l~Z|B(Neik zVN&%|&%@Q>APpD$f4CrzagB{`SsV%|^jo4b3Z#iaGH=ldFpWXulcMI&S`tQ(%5tQW z23Fwh!1-h^g>z10Cf^n!c#Sala`q#CE8gu6>*c%-SJQxs*pS}xRZD!Sk4B4wbV5blvYgY-r?k2v{?wQsnB{Qt^9v6qt&Q zBu`%-2|Sgui)$^xB_J~tw&|nC|Vu9YMWki?aA-EWxB-KfCL7fx+U z(@9yb0LGc)$}N0yR2NVi98UcdrMXGr{iVA#R-4^}J0mz?@_J+D?w(KV?6!KZ`yQZv zXe#{>KTiu%=L4D0U(DWE2vhBI-+k**IccO^*k!qzdf&~=R*nFM0rvZ-VZK9Ar_O1o z!#+)`W`0&kVP$MA1?QgmLGhw)zwCH5(yx!V%teJ@BbsMg#4FE=S)YhfCgv;2^~oks zYOa2Uu4(PKX^p;;)N8w4JUTvww7_JMp(`}_+FIaoOjRZHivbSR8A(J<%f$rO0EH;7j*% z{vc+@9`YQJvBwI7*lwl#uF>&HG7%0J7t{e!C)!sr$Iq3tT8oiz*3iS8lllC4ZoZ-Q zvp9>2V91G-^?g=0Z89LpMZ@HPP+zMok7v?$(X38NMAqG@Oh?B1txww3wC>C2fU02d zSH6>NH;YQC_b}}gKFv0wCwJe0SNdJm_&7twQK%}|O#Kkh4$w~Vj-k+nH&adXk6l5Q z3PxOsXKUK0MAi{&jmSE9Z9R>fPp_u-8hD(EjyuwOb2oD#)qD4?$5Y{NC%O1X&%i07 zY*uleppfgmYA?O3CC}n0s77WU-~6$U&sbNex%}4CYgdO)+^;IfGMN|;lDO(F_6B23 z?)eBN6H!ouo~jt`PV0LuUzQg1S_s-4)CBTLdyvsIV#MCTJP$MV=fhyym}YL82w%QD zYan_;bKyIqKS+=;Vc{hvT)!}WSpPXb+i6Ik+K1fIhU9XcVY&oWC!DWn!5E9PBQL8} z@p{-KMwS_m>63-Co6>kIWO0IY&vf3#3^4ij8d;)}BzV2Hw6HvZ+kxMCE4|bbBU zPHk|^tc}ZH7?7Wfh{z}=KE{-jp3}?Zh(bf(z~bSt2Sf1E7#^;AWY_fh0GOks&yUMm zGcEW-6-Jtzr(oo4OBk8oophNux{JMyt`bmihxj1IZl+&EtF7++)d{TLP#2_lhB=XY zF^$4%qtpGaOBffvJrYi6Oa$*8t5ApC(~`twijLUm^9vqoRs?Uv*>qf9QHqUXIDS z0#JW83`wvMzCK=BVQsCh07KabP7TIO6P;&8AF15^u;_JMy&+dG8>v`=$pU7q!b*suob@&1=q@B{D)BIJLLwz59I#- zoBvI@^6yyZ7fs6lR3RuQr!IY8@<)Z>-;wygOrD?Bfd9YxKoBfs`5BYL3bcNd2g1tc zFuoW6-JE}mtG}0I-b5?^ElT_Q4gXb_1`E@;VX+OY$P6a`$;rzJt3BiSAHy_Q&DyVF z+8=dkd_Tf8ewf?8g=rX|n_%oFdXuH%hh6;)(tt3hUrN;YZUU^IQQAK;qd#d@Fz=ty z>Gyo%-+Ih{ngA*gta24L$bZr2FCzA~#hbc5+qLcH`NAhyh3BHYJAeZ&4G0)?FYltQ zvX&JEG4)&>TRQSdsTXDJ zbln@7fuj_BxQi0udUO4Zsrg$kITpvf9Uf;?3n8|o2+WKn3t_#WHJtJD7g8al&6q7M zvUJa=hETad_N>@GvdP5q+9~X@Z)5p@4d*fzCKIcxuSpK(wxiO8k-y-Z`kZD|`3r}w ze5nhoNFoKWQ0?w0pF4A|W4@4YjQ0+a zYfSe}T@s=17Nwqek#-g6$E1Gk-sUZ)-b{_kAb0X`<)K^L8iVX38o^d1!vgQk)8{&T z?dc|kc|qd?qA2uBb1x;%XN@cOi2|okxY}ArUpiV9O7n@hId4w@+@PF{x2U};++!Wf ziXTAVh|*qREG0Z2Z2SB;pv^Gn^JPGrakEU%Dc)P7%@-dn$v(?n*lWK(dyrwk-ZTXx zTdXZ?_SHFiqCHP^-E&NU*1131pBJ3dudY8Yn*F?&Jiqp`uitzp{Z3w9<9Khid)=%6 zxtD(T$b5P9JA-2iX}(a-r~6cGXl){aCV|v};(?85*J$2ot+c7fZA&?Ze1VSGn`(im zdn1^%XrHlzS3FJ7Ot4Kj4sV=_tQ$*6sFUVi6Y)@YOl<`TVvlVdgE6RUa6(Sdu7H-C z>Ul0<<4x1nxA~RC^Gsqjw09mND(iea*48CDXFYN#8G2JOP*gG$%bGQ%$F2iaNv?e- zJi84^NZRSVQ=U+stY{J5Yn*0|@qFv)F@sh%H@u1BRitNYojOMe%P|7$6lNrK)->76 zO!EYFMQq+;b#N2PfPOsgF*jpfSLA^c_(_sjew$DX?MRjtMGqi!+m(!`%cVnULPDtQ zfJ>~WWhB50{vK`{^uVxGT zQlrVN;F5DS8-)e)w8ey!_B!%VeU3M!T8J`<$j4WX`<;->u{79P)}o6ep4CAR?>480 zG>u=JUC!$6krf}_(Q68eVQG2)6>voG?1eZKmqNNF?V57fgHP1#8chfxb$=?PS^ooJ zZn(!Cl;m(*q&qF)=kv2jGA6iFU(wuvx_k}2Vy8p|Ot>}{ZC0ZyNbj<$qnfL+Ny1|F zhv18ZBj$B^mxe4=!}TT0dIg0C$}*+%UAk-*J#Qsd)EP-0}(VPpf^y}&g(rEl^IZdQU1!Hxe!16gtk^?of@3MR)GF_1kd zs?lhTx5`y`>c+fVr^p<{MT2{fxzxqapQWIEr98q_-5|BJ4{N1MS9S=DVOPFF=Z2?- z&Om6uKjgdcU`>1bd_h~13Bt4#i;t_0Bq5`8K%lgH*zx6!rtkQnw66uT6uv>add@D3 z8}m{H0_}2*4|w6lim^+dWuI*Fac)gpE+BuUMN3jpFQz>mNBwSAR;%}fpG+9>4z~Gf zzAlCglYjYJ7Mapac9HFL)n?Upwa#@7t(Ad^$08qe`SoU))U^^rq}DoS&L{LZD&^>K zUuiy3i;}NV%ju)s(F;muzdZu6Vcstmp_SrFw^k#Wo(rmNZ9aaI0xce3D)li*K~uUL zq*xGsIU7_(Dw!oTmfKZsbn?RDnWSI|bhEfTZjEssl%DzaL!YaBjg-_#nP0AAgsCTO znEz{IU((0|IVoHntU7%OjfOjwYD{-qMP4-u!Dy9qjG@XbQlBgvag~`&-@F#nc+l3{ zzd2GxzB;*9G37*}zUE-n08|*&a*Sg%f(y#Y$&b$O)~4I^m(*@GfGdcQD}Bb=O$T7e zTZQZ%wphrm(s?vwiVsO^)YzCkpk@>mY-1_CMh{YOjY-g@ z;lA3Z*>_n8mM<|}^$m;hr#f{rGC7ddZw;&WeQYllk5pfl@N81)Dr;b2N>dr_x_6!u z*z+kgRpT)1JHo&eh=u_b?6F$WM4`qVh?_od{cs8s)>J& z5J_>uYiv1e6R|QYirq>WN>@@d4aS?c!JEDcL(;pOet<8$yT!_Oer0xi@e$!2Wq+~1 zOd9+H8tQ(C(rS{2j+5@Xd4yzwtf7zu0TREe-F|iq?NS$?W)MAxUG3}DgockYH2yY} z-EoS|s!*-3#m~U21{E)Vqo(AME2@g+pR@5nG;xtKuTiT61 zHb*nej#P~Gd@qBB8Byr=pLySM2;PAjnrDJ+Ep}Rf*I1=FDfG4gX!L2!SoDU)p8C&%zNl z`-l8I+4HCgj(kE}B45-pdWowKBLMCfXGQ)>=EqI^R9flb^YHC5+iFY zxU^|yMQDLm6>=?T)=9Fn+bo<{1iNsNu?sqJa<~}l0clZc+V_X}?h05dui-u~c=JFO zA#l!)bdL93qn*{TMEqWHbNOZ>^2RX5*U?f5yR)Fat-Z=}&3XMe;aKhFqpf&@?C{Q2 ztjV-5$CIKJyY_ZXa)){(g#d}q&%E#2*dlS!wTb6lP?2+&+bEJLK2|L3wSCf#b|)Jr zLa$xC%Ql#%9vx(nEvIsPtrsc%h8qQZzLNW>hLntu<9>MSJ1@0zQ=R&eJhd$pIsb!c zuyQZM`vwJO+dz-v&5xr~O~jV=&BtYK=SMEXh8VHcQ`IpN&#MYVmVB#o-59UFN|_hm znjV4{JNv)ck9uF4bv)`v@Up3iX%hFKXm@+pUfCgUD)K9wVW-;b|BzID=f?o~cz!%U zhNT1lgy@X0w1-MXgGvXMQLqDm>;Nh?R}*K5Clzd;f^QojKp{-uGcj1&@oy(d0T@XN z7Qw?r|9JrXT-F`n!$GA`u0E7VNZ);n{Pp2OfbnC>4a}% z!i-7|4}Wd{OeUbt<`ytA5El>wM*R8rpyJ`?=HjNZp!&xKcf{?!-2#SJ4<{oWVA#d|}>`hC1_t>0}x z04FT-`ojj|gweEqw{ZetWS&24u-p%pkpAumv%wO=KW%(BiWfJ>kIx0b$@kZ11)B%& zUw&M?fA}Qy|DSzfbAy2hzxx3JFx8H~Y+PJ_&JE@V`pfTTvHjT>#CfxH zZjK*w1A>4s0^6T95Z_;Yc|kCK)E|DlAfCVG!OIOxqJQ{7oK0YqEaz|A%xPGA!qzS< zgH?BMfbkYzIeQBS7+Lb4Ye^VgQV0aHfYnY~@N)rpEFOY@54lbGxp{fG%y|H2 kFan-ArwGP>clqa;f(rz8TYkG4fUp6$G3e=~RHQNfA8-~x(s z#oyaP{{H24&Q#!?2kls09*t!Wjq&reKNPFUs0VWGodVt6Q=@3Op+~E(|zowLM1NX|2@DOeI1l$^FJo%Dv_hog5>= zni3gZMEaswSA&w(_=_@tR}Isr(QsJ`7GOFz{r3_2wON*un8uCTqG787GbR}sQLa7g zjqHD$N-ef$Ky_)c)66B_2k~!NwfytLLo_(i&SrycZ88Q`Thl%!3om)3|5_I7j;nUY zv`EE%zj9}@#3Ik@Z&97=vH2s^srP-^<6= zgHuow#mYivUMjxWiMYb-`x~8Cs)^Fo=Mm@8-`P6BK}-(=ouffNgGPhKf_jYssY1gA z>s_lHuGAi;1DEz^uML(1Nc_l}ZFe&rsugMJ9Eqt&++n<{8s^-mU@SCCNsZaC%&+@* zBv4492qlgs^;Y?vQ5S~}a_n6l;9TWVTU%?_^&>N-j*AL^mjqz~y_8kLM_#R~7TvRA zP=?y(J3Bm0Ax*+BbyE_!(G6wZMNWXbd&=oA&h_v1ep@aD0rV#Yo%gXviG7rQr(3+P z;bqCp-P?(&Ap0<=ob+BZdfv%^9QAnVJo!A2yB(L%kV7v24y&h2?Bw8%V=d`BQYJ-c z7@W?TZ2afTB~<>i$ocw(UdmMYK+Kh?tAoq#TW;UrhpFAgK9A~c_RvN?p@Qz^W6v2; z@uw;z+r$v$B50tty4j=4vwuK$mDl$`LLw#j_eHja{{H7a^zF3tJoQwR1ub2iILxhF zEUYwi-e185{4FS>ohs4uC1f-{;P9x>w zZcX!+;|&KVNECyHhDO-k%0}>=wCw*hfBq)|dhhA!DhPr2`1o-6@Nl@e+d{Ym1Oyr8|L)1z6Y{^jpPLH*s})ppx3hlk`5%8#F5&-B{=e$}rypU+ zzs~<3gZZzd|EYZzRTM)Q^1st2ilGbx>mwnFBPmErX!#-^7QDY%t(vTV`BMf+W&#c3|$*vT0=sdIsCUI{;rJTj^qxFq5{o1m8AUg!&BGsmm5e7>u$EEP@75~8sGKSh8W(77#NlcxDMQM>`m)rHxx zifsRLa0-(Q&2uS&(}}0;v3SWfS6MQ43J3uLvq!o1K?SvK&3b`?ZgV0IfB#&@+T-UK zsOhO+i&uF5G*EbOK;wx@jufzQ`+u|0y$igkYeBj@=M@TU* z#enJby;gl?lQe(97C2`LY(1wX0dOUgnotWdEh(;%$u1yYzG|$wuN|)>NlVXv_{wPdo*v#OYkT0SQ@GkzLfAg1A2h5v zqd3Ey=3{DmU=H`sa3s0d^vgJ?CDB`ciP4OfhnDh3d&WNGt5r>>)y!UXpu%u`=HvnM(6717$M)GU@G44FT%-z*_9P@kL>(pe=ZCo*Fu>CFR}wPWQ{x zGg&HkM`q+mmF6?9Qry#Peb#otSFPis{gwfXv=XVhs4`Z@Yj}52)rWBZCZ*kAIAbQO zp&ELX!`&08=HT}l5L@fxQwG`9j8Bhv971s z$H|(Y&9?eyUoAx$I%d@z+bQ9{EL$K=GL*g)+Db&@{ilWSJ{n6(#bIBzE7V_--HW#E z9Cg|oRhyb~QE0A~MUQ}TaQH(#LZp>*x!~3_ZM&_SZZt7}j;1({^a_GV@&VD~aV4^S>On#MWl^I#c5W}y& zz?gFQE1-7|o>gSp=-AFMkH z9=VLUkCx)pBK_0=;C>=_1Pim}(>a3sBFB89OfiQs@_VT9vL}BgGs?^T9ZGgU~q6Q8`(?f?kWT+-oBQwHu|C z_WVnF^i1Tos~+`{_tJ?~&c19!+_Il7f0d7e8T8kaOVD17^bri;n(pKddP!gKukJZ!JRiwX7xLr)E68qL!>*1&pX@kRI z&6+E#?b|J(dnSYXm8!obpK!twr3+L>0cAK{wBoa7_)>=G5~vh`kH)L9WNyj0%U->Wi??%3ad|2hTtf zL2R?S`&1DrJ6gRRq}sM)$I>0gl{jpm+- z0sz#^an=QcK8eg!^G+3cZXHNSjzu+uWucnX&s#a^6OZJ?8pzZWpgH@FGf0CN;xDSN z(&t(oItGc10>0;3B|?5Q4u@+s?kXdTwiwX#)`rax&{&W!$Ios@Oz~-oXC=0LFuN^g zL7WsM+TX^pAfQAuyqcQ?05IFF{K#eMtSRaBIc-C^>-V1f59kO}LS9m3`q*{UTX}vH z)5@ki9`yG}%J^44;h{KY)t^xV=L}t`uYP`SU>WQJ2aYLc^c_2gb(I3&cU8Z?El|9! z4jByG`-6FIOz#`i0Pdo^cKMPZHmfi&81WMIo<7W(ReZ5q`fm&pAN9O)Hmx!dF98ks zL-q9Io(f9Vx-j2P;OmkW$okZkeK&twfZLQoAzo{J-QFZB@5EY|%R6o!4BuPwrEGvMVCr(tvpB@HoGzAY5E6%7FApTENDQ zdbeZ|6!;$Il9tWKcS;rg=`@T}B8G3j(cUteTg}m5HEkJ-vQ9+riFo2~iY(1|4z_4_ zPBDELZHP~j+L>fnmy})1E#?(E;sd51NP`F39Hm`4maZ)qjVm(9wV>;DeT9(}Tj}H( zZhsD)9T=j@TE$06vr?AfU3+Zu|7q`3{9czfG!S$}s7k1JErv%$*kVUt>)xm0W^~hM zK}$u?AKRi<{Q+N89D>Q#W{FDj>qMX@L@$&dU1H=|V-?3-##|TKVod3IQ4>amVBon~ zND0%Wyr*&WE2Kw>QS%iIE2eBHIaf_#$x9V=`5DqffnE3q{4KDtP&u< zW~`>qXZ6+8JN7%7jl>g%s0Y&YvN5x9aE9#RE{sfcDPiyq=;tIw(ie(Tq5w8Q z5}drf!3i%TrdIv)Y01oXBiKvycJxIf0w`R)vAB=laS=DlHs9TA?_$tRhi08l_D?jZoMv0a3!xd6N`mC(TM}Pv(s~8_nyiCjx6kGd>e}Vn66)BoI}trwgtyL3{WB_R zG71HCJPOFGbDbv+qonD(oQRV5PC3-@f&O{bA*^p>=cNwE8R}cFG63D0;KwAxS-dtl z62|lEUKYb2hHKb29_dnANb%PqFTTwiGm;z`ZZm7yZ;0yG41D zsTx#*8QiF0AOYHCirpJOEX6xhJ4Y+MiTonpgS*Pw3~@=2--GUorl8A@cs*kV_iiKx;uGw`qkMLB8)(h2YR0 z{;F>nFizx%GVfA73^xK4cc=-6rbk9Zkw!w%ukY$t=zo^-Zur@JJuv9=2ocWo5)!VUH zl>{!4f9k7PUCZ+T)98YagCP-;hJ?!oSyW$QhtR}Js&3KOQost$iz{KcG384{2zKi{%+)a1L zTYr{b?BIwZ!3*xp`Rkk656Qq>o-q-aB*03nex~{|$AU(P+jAD2B^MoR<&4QqbhNB` zk&JrV3yB(yG>$Sv_4UCxk(Iu(2@|z=kO_7l5Fy?k;MJ#?dT4 z4{m)K8Vw`bA|3^^$+K)nklW-{Vvm_<(@pyXDf~z5P2>bzSjC}=TcT%)0U+uO!pr-E zdJV=*Arz_MTzQh?*O*n{EGWYWa=}<8Y1uZ!+{Q#GoI7AJTCG6w$u6fKz8qKqJ6f(% zsKPpR5sW@_3h7R3O%EY!W_K>d(Y`z9f((}E_H(CjzZL!n~xW%VxrkY!t?gTaSS0UAOsJXB%?tlA0# zT$^6I!)JST5Emh5_(cbGlH5W2zRXqWpzT6A+g|cvt8;<7(^*w!#*6C8dvu~%O^-c@ z5nBg49<^x3o+Xxu?IWs9qS-CuZkmgdn9hl3J)Ky(wq zZiVP>xLgac`CGze)n?Vs8q_6cLS9H3%ww87Or}nO0w3o7cpflT_tk${EKq?6B<@N} z!X;_2NgDNDqx&)JaMe!a z(3BQrC3@|vjj&OTy>i4JTjcj`b8ARL9kLWz6=2>XU3D@OfOz&c`` zu8)qZoQ*2kkLbP_sXy4@GU#MII|I0#5+4+(@`wOsUzw)d=(8GY0=G)g`0b#yQ2oK^ z7s~}kdjiDCZ}nsqK0wgoqmdU$a~m3aNKNX`VAEP-V%Hsmn;QPA3`Y#2mwB3$%E6z} zrU;N-VAc%xAA0yCP$Deo2y_A^SB*)~PkFE@AlXMcVdHhnf4MV1Y69B`kzHnwX0!9W zI`PH6Dx`dOCLF*(?BDP+c`$$^ZbW{0gx_b!!5_pgq-D(Zd63o9URz43w?@Av)4!>C*tp zr$|^(=ApCx5LHmdmj0KT_TZr37qP~lXUE5G*0cUR99&v@CyD(@Jj`pV6b9RM(DA#q z)M%uiT6F25xcc9?lT{Qdz$_7G&#`gyD;6=arDH6tB4yiB$GetH*n>u-$hSe?QTZSopbz$JmVII8M z^?u2&q&5zz72@D^Xix44y8?zRY^yoV^I?tzRJwK}Y*%Ywnt(tAC33yUp4-6+Jm_3% zumzF{@%n&yU(y8%9w~KRsOvP?pV`(zb)x0e4KeE#5<arL7Uu=_2G zmB?q4p1sF<4;$DeFF_*wkpTueYtU&Uk?oOqU2XEH@<%T$4?UxuS>NU1+vTccj#cl> ze%*xk%fT(rFl=N2eT^j$=ax2%hF0w^_M|uz3p$V@mfE0m$1h-GQk_}Mw$}ep@P+hd z+$k68y?k24E4^J8mzSk7#!zO=TLsg}T8B<)!dfaHPKWX(( zdh1+QT6=Vs3h!7l3i&Ko7o1W#FR%Ghq)=<{+j8~;D zxSKM5G6A~qmkE=5{OT^PRt|oM_P!($oHC|%%z}$=@uHQ2LP|oEysqil2`&B4{O)Vw zqZczI1|Ncz!#XQ#^>&)6Mo}Zf!l~m|`r|3|ANA8nIUg8(&*!N8zdxWA+YeoyY60|T z4oiNo(+SD17@Eh7{uF+am(sEtZaXjK)6t7IPSF8)!=DHBB>C&MdY{xPGiI;^pkG zN9^P=`!4&9{?1||MSht~-V4T#cgUleOb=_IzIXj77m6Dx(VOg!$?TcMUfukWeu>a| zRH44*m^8bA+9!VyuOAFYB8OwP%L=Pfs-fMXm17Fu*0weg)2O0%lbDb&qIcl78_!CF zItCa{&!w0W(%d&4M}K+zj$lT%C`}?sBKF3Ar0ZZsU8{h@J5|yh$_*W=~^8q|As@}Rx5jiXgRZMEJGA` z7I@CTR-{EegdqxD3tTG@q{!q?ge>g-3k>=+$pxn92_n4TSE01oW4fqRmYhCD~ zURZXvvli;(UfAl}o?M%6*Vs->2?X+G5Oh+sMYSCnIp}u;1LBlcwe3I%?aO?+w2!UCoh^+@XTIL72L{W z4Kwf8)PW*c(PeB|{$)UZQc|wPq z3A>|Id%!dE_23#`>S_wuGb694fc6R?pPKJl_dk0iqvzdbBQsn=GY);!fUm4v-}_FO|BEVza#a# zc?A8ft~xh~VIj}f1f3h{5Q(PwmM*j9gkS~^a`_P4Bm>tj(l|8`^QVjQTa0HDQ7H_} z#Q62+sXKF1cy*E4$Cdzau{P?U$B02j4Uc5=!;sR#cH`8^2SgyASs;90tdcMb84!tc zWzk`FSJ!RP-2b5szV##VZpgsvw{|19z+lE_QGrgJnL=~LtP1)T;9^j56&wT7!U~HC z_|ivB5`HJCtAZ+siRrj^75-s+02HQ|5|6(1BS3#vl8k<$yUDc%$*<1fu^{2^7WI1F zkX|UBD2;<3@9PaY9B_Do;X($yHQO<-4@&^u_UonYRX=aFm_o^t=V7DGjT(i*QogUe zs}M#viM&PrHy!+rIqgGns^P1M>yPPjnRjYy<8b3@0w^xVIJybGm-ivfcu?;p27tOG z>1(A^B-WXpG(wOenI;dKzWwzZPM-noWz(cYQ4AM^;n^s8{~!bMcFIbM&N;{Pd=dtN zq-SMknXV@??tXx1i(OgtnPyMO#w&x$#U&WfVjdvwZL%7}XF;PyE<84YWwgQgWmPGEFoqs~hoFiG`b`5T6FvrIN<~5lg7B=g51;pNDC^=_#G5oY_bgr=T65Yl`J>{hp z(Rw&0sYJ3uc!0u6AJb9&zzzOUjZ^-* zZ-|dQu;>F#qmYUzo_#RdNS3d-gN;m{TCqNh@G_)QW|0hc`Nmbia8}#bcvdThurikc z$}FT*2sx`)u|Rhd<0HK0^pgqgm{UMX2^!GdIQb*2t+b)ne)yZq+?0*InpAM5J^7^g z$wr1;>3p> z^tZUE?bF&%TZA)czh@DkPW6ihAjx_V;Ka-gG+w@kWZvSMS6hdu>;VgI^pRw-Z79H? zsOM^UD+2gOMYkG&yaYIS=cbzJVeEbFo+HanG|U-*{`1U6HDTQHCj=N1yrUOJe+fpW zS{KZ$Y??lPSMceA!Mvsi-i-|4rGZDcZOc)DzEIMym@1=8>zVlKW|IjXEZE!M1<#hx zIu~wxH=K)-%#M^lZ^9K;2ri30O7w*63{}6sdwM;0uY}PD%mtYdTHFr-9*``Iw#Gzi zySFQGPKujKLRD((UF(sI)!S@tcLq){jh#-C?+!5Q)=~EHk-NzjxYWIt7?)#^F@8ut z@6krRcn-2h3#(i}rOPy~>$7@zd9^yH3h%RtYEK%9-gm)!w;&N%?(`HSV^1Kedk0j4 zJUbulm4+1BgIOD~=RL=BF>pTnJY_ zc5YgFY(7apy`vQqtD7HOcLL!gf7&r(H=^mF^Bp6tbZ(O;-VmiE%h#g9>bTQ>l(=pe z2j$Q>g0q7wFxnh{|B?(bXGNys3X91L5>@cJy(BJTK%Los2{;SZSwZ=57eP_C$C~}3 z62=_WCHD*@2EvxzVEce|i8IVAj6o33I~q|4dLR>L({6bsjoe5qHdisuF~B%_`Bo4P-X^7an{E;;acNoY3M zhPTW$h3Pw(78$Xy$tV?xKf`r+33djp!bbJP2 zlxX3DAkZr;%##=tGQEbc2o)1Wstz_~LcN`oyLz3?s_njT+i&!*n3^P%DYD*`b6{e` zx8Y~Z3=iu%oW4&an5FtGOL{taQV%SCF2vs%T?xOx;^WaJnG4I>CalVK z6oQZrw@?lDdEK5pgEZ@e!~GYcI!u}a_14lDbBl6Ew#w6vn|F3Xs)6#gylQAK*1ZVy z&YV}x4vO&b=tV~;0vNpjj z++hIdVv+!GBKjq78z`omW3L4*^q#mBv8EL@2J-rWHsK5NOhrJ=i(uvt>R|7%fycnKD0YU>_3g(MOMhGEO6zX+AXR%|42dnTZ-49?wN=Nu(tK?()MUR+M4RY-Sn}FuH@ow5?+53LRez{m`_G= z#|yd+R3a@nm+VN+c{~BC3WJ(8-~Kzx zLyZho)zN_)0`dw>v7}T1`VM1Vg3KokAIed;>f~Ho=W&4%dMxjN?|@t9X#C~{jB;}u zwy4pg(8-772aj+Oapf&ovdKf0E%>{h=(OfPx)L4iEN^nZwQ(R|MyrK!@Z zkBCM*xCyI{rU=1&6d-iUbqI_*&t4cTo0tUSrcz|LNl`UL?)m$U?+0e zl)1&wEQuZx6S?_FKxh0P5f;(-^}o`PLP=*wqF?$1aifp65v$^14z87jFL;p`^`vaW zmdSEyIL9GCfvYH+|H|F>+~`EEM;6m@%>yJyo*|!4^TCeoac;ba)25VqT$tmDCbw0U z&>P2+FkW_)4=Xk9-^r_Q(xHj3z*P&=HYm&V*A7pzjJV(r!#`TOwa^8Mt-cNziPRhq z3wvxa+;f;S-S44O!(6|}wW>Dcm-J-|j^2mW&#K+i?@sjV=J^6yX+R`J((B_XF2d?9Lnom*(y@ z5sVK7dx&CLFy_M3_kp$X>6ITN>Rn~-49I)2lPVO@zi-N(Qu&@JcDG^T#W5D0C69ed z&jRdN0QuP+Ay}lUB-cExs#?iaUCn0h+-~hl)*Zj(hWLZ|?OKI-vF6{K3iAb?>J3)~ zm#thYYZW|gZlIa_n3IQfn;`dl4Ds*gf>J;en`qaOFsSCmi%%PGJfN;IJ!%*d#wm zbmz<_$X4xFQO&Ex>;08E>rUiJL?r0JgWV?{0b}5Oo>0nZ9T~M-LlmR^hm>tZTwe`fN=^zvqWs^*D z({i!1VpmHP=Jyt$N4-}cCu?v&MZW`siv!{Cu-eEuMe$%eQ27U zzQ$SK_@%pMaYN4COq%C+YcM zNJq8ylfxNe*pOUA_Muai97@BoV?pbUtk%GPEK24pxV{5ryWuFvvw$Bl@Hu9(`6KibxUdU&&G2s)# z2L7foPO(r4mjM@k11_R7)eLtVZrGBtV)|e#pv!2fcFQ*bk_o3POiE=F5V7CvCfDHy zl{b4&#s`g>$2X)-xhE%)y0_k+XbuL%WD@*w?R43je_=CuCjYX+1GYBahHxQ$ zke8*{1q%pBXxuV%R2nldRIVy&mD&_kK~<`^4HMc z`9$)vjJ`)cFn80e;U>9mjk(h0GCHpkduLIj{5uh+_s^#E7GqyclMGS`3wI_LpjkPA1bn%_G&v31>n0H;?5n{pNOZSMq=cSJ`Po z%O>GJq>=*8(K_=@?WZd6c?7JjA=|8vz#bdc3@1xnf;~^@n)a#M7ro?=&!70uXXU)f z-4C+!3<55?>U5WHZlcE&b<%gzaYqF=N8^4ZlzsU;`_V${cy_^LH$L$rg;0=7x@0rm zM7GKwr)GPeB9;60r3cd6qY-ApM!&fhMjYYMd|`u&irMxrx-P185PMUoKttorB3#z_ z9y!f8P>Y_{ECBoYBoM0kcTVNd7-k&LOnEJTx#>l3cXI!OQlYnvP50uD$McKjVjd}@ zI$LqLC!G$v)8(c$nK_dK`zRKK-9PrLQ4~aF%v6)bJ9hThSQS<_ZK;fE$M1RC>g^m5 z=3mNQrHj2UtP6w=snlHE1|2_Q277Ex1;TQzWmUhwPxIG0STIbE^Iv{@BXD@pU}%uX z;pe%*c#pDrVA!v5tD^x4v$4zk+K;}ks&JxW)=Z$YYp9|clA_8brRM9{T$VZG(%f{q z(=5LGOO3K4V$#F3U0VCyuP8!mhzNDp{8ZI)ZSb*5;8*KkCPC!xxaT4V38=Sku5u^1 zA0C&3?dvJ!t>eViq>G%IsKYK(^97bA$`GmYj zICu>6lBVL%4+S{)e(4yqw@#LRF6F-C4uiV{y_&eMwiw6N+xK=UP30FH`?%U4@V43H zYy$Fu>KDh{K=_wJ&_pQO)Zyp5g>g3Hd77UX@fZ5f-BK&aKW#^_|@5R|^$|;idv2 zdx0E%ux=CY{x5GIh860@Hy5vGs?cnyxg;XYV<76_)6uktgH%D5V8v0*3@4oeDTl&5u)nx&*NX}T; zzUd0g`!y&!S3G2?%ihqZ^P0$*f&DRTRPvUqXYC=6WV0Njpg4tIu=KZt4(~NPEISyZ zKTWuPk4s&$$f_&d5@BfY*pFwwX;Jk}lBJyREcQm3;bGTePiRh>86ZLt*2Yl%`OjR_ zcd7HYe>YxfGIrElu~YA8eC0}x%xB6CaKq4lw+V~O@vc;RWlhKAhkkMN#}HC+bNGAV z>tuQ1m+xLl%hq3@iL-MBCf+@-1r;qtPUC6sM{BD1UJ2Mp*KBPa@1}YwrFAYfr&D=y3CNXI{J5m&T>8qJ{c>gLW zcg+91Jg*}%epLi3wwMI^fD-a_V@HLgOh`MMVb z4`Vg?@~6h)r{wY9y8`zLYhNF@2%SmE?%86W5kFKH(`Ob`_+OyM?yd8AvfU}wlr7nc z@w@-fRV29Oi&ojk`6KpsC{0FaeUn^O&p~Mer}z%N>J3SE_cP9+|D%b{@GfxbE_Qsk z_7n8&`P{qXg^w*|AGbbDW~mw-s(y45bJ+Ro6PEZCbUyz%L{mQ);|o~E>?lF}FBSTN zO80|}{h#T>fjT%ZZis(@O)#+1(a?2`=H~l||9PHZtXy}4|M>ED{>zYjhRWZu_J3_Z zR-9{w3v9)fwZ6TBk2uA~`$+5V*n)JEgSLLH`tw~-7T}3#3%aSWim;sW#9SuF#odj} zC_Kct*(KehAY?UmOTW=3=O>=>&piO8i@4CM9G+}yW?Y++SURDrt*npD9`r0;=SP3r zbxj`{GQOlQt@ID zL$0x)g@e-Fdq0ESMYs%_(Uj7{`-i#8XE-6HrNl+y?#%^+;fvryV_dySSrSp$9DdjU zeKl2HY~k3T{%EJo&XwLngO3(<_b=|V(zD#V5#jw+s55WZhT<;7jcVbIeUhMtHhNlG zmwTnEh7*`0*zm&Vxx=8<9Qc)!bM6gFXV!v|UsW>|vF-P!9n7SGV0A4wey?)0H=+O! zKbZZdwTEU+X6EDk_v4X#(arJO7YH@n2hWFhB`gow7q6%u1TT^*g*2{WtAu#n+oHhV zhA-S7_saNe4*vY+erV4<+q&iZpmOWYTb`H841iXnjZC8}L4J|ua_v6s3L4}uHiw+G z;!-r8@H?w5l$xF&pYHiC~@&ehptCnG<_5>|4Wi^zJkuW%^MrP zoWY>@9RADW$ADHo)pq%MsKVkXl&6Wp)a-$;AG)^FUpL=(ygn-t5mU4;M;lqkmq)J( zz%;uQ0eFuee=2wmAZ2M7s=qBaeXMSyYX6i2n)r8rgH$ku0vL zXPnFouJ-)7=YOAO6DM9ymqMUZE|p32FTu0X1-^q-s%<>TXR8`r#M;)Ufz-*uKGtIV zct^=HbfVa~mOf-Ct8*8blC=CXYs( z_`mW%w-U0`5vzR0B+vcx2e?ug+MT+I3(T_kpV@tH1k87QtfrM$6ka>|#-geW z8i6FRYyH(znFkfmK!KWawvktpoXv8h637*`$Efxd5sLwn}lp z%|0S<6@RP~Y4ztx?f%-bv42{YmF|hPm3sB0N275vOA2NK*X{SIef&>DPD8pC%^B2F zP{_|XHC>?>kb(`Hs!2PxalvS7h(_EC+wNVJVh8tBZvm?$1?hVH1&){?4Th?$w_dj%H(|-@Wtk%C|Z{!!g{E&T8m&j1-Kkolu3J!kuWcMnpo9te1GK8}Dhn0JRUb0{t zm-||QeP%ucrTHD;JoHsMD^>*_du3^D2hA2nWgqEaqzE!hFJTyLG5}c zAsOqp3)8n%iArL;dn)u_0!!lH6*JVsW&UwJj`8AjymCI^A6q91)}qtRL^F7|828H| zxabgcJo5bROfw776K3m}`F%`GOu|UHDDd-kSn_5=;hMt%X%`mp4DlA}5;0<509kSd z44zLw6|T9^+U~|N)Vh}MDUU|g45r%h`J7<+JsS&e92p>*L`p@pL{y~5yZ*`HpV7{t z)tb}%bU`1-Si0u8fjW#4zQDvI$8}c-;#;Na735GL<3nG-DAOD72(;^72~qGhs#VFl zC;rVEnFvfL%ve~caE}P3p7HRj@omQ}Gq|sJZ2cXaP8Pz_!Qv~NbsXbUU;7bh+H!H^ z-E;aK7*4k`+E}vX+pvNy6aI?%UXNebSEeZWDatPr>0&7E_tU_A!1UKUmuWTpBqCM9 zHR&^tMwJZSFArRz-hB0eFI~DTa8zzn{3VhJaBL1g_U2IV3Da8x@o2ejr$}LTngJiw z$5QPLd8@KX?J#)DOAMJ81Ic?~lv%_UyGUkI9=f#!9a1N<}j#*`VX9|_;e-2^y0IdyD%N#WTLylnaY2lzkQLZVg z{H0$Q7cPF$Y)yh@;6>owm*-nM+U1@cT3{8iV8|V^@N5fw}+r;&B zJRhfv+$E*OY zc+;kR>9|L8hC}@(sfJp_b$-G`C#Sq$SCpcKfs5sIIT~SP<3F&o>e@bMdU|xoqin?| z=7@DhdGXns(g2obyMGzYWp76csO5)h9601XrfY~XOR}!pLvLZyPhzvSEP^p;-`}=y1(h*3W{M7G}7ynHw29r zOzJ$}5cLxUERi{|5CrmEq}IyD#NA0YNUe^}nTm8cn0w%ma$sLbX-iHY^l^)6q6m@p z5}i;vb|ASZAy?Xr2h#?0h92=nO-eoJWvIUu8ss0MC=v8_{_u*43NE~`sE@Vb_kBwV zYY?5&LAz_7_qay>ads_;VuP#xAA~j{g^ysoE!`R^$?3vWqGT7x`lI}MU#!>U2acZzDy`QSnmcfJpv)_z%P@)O?T3-xG`TFz zydle9O^Y~{bXP64p-BDiugY(yuR`Va!KjYU7qO^V9X>le195oBE-~qEba{U;n~!bj zVubNwDPa5!Kve}UWw*b4dmBuJY3H6Ch-p_b*C)s zspMxr%Ce}O#bM=<+*0?e1z~w#;oQu2Wv_uPE}}SVZq-BG{a{m;+4}(XuM8xO$!YY@ zSRMk+w-*8tQ^3)T@(LecqJK8l|Er_xaAfmqqohGlwO4ITZKAYR393{}jhdxK%+@Gs z*D8{h8Zla1gAN20l&Tfd*p$}Kj2(NA5-WV^_ZQsz+~>Z}z4tuloHxXOy1-uPPiCz7 zOi2^u?Hw9BnJu|uhc9{h9cjwG;E!|S^FDd@d`bjM4=v+CU2$!{_&3{hMg}Stc{&NmUoUEEf=exKy6^ablyAl zo+|nv`j*k^wkA^}riOcq0p2Jx>f-P5I1lZPhQjee%ogWb8$@1I-(tU?ta;_7JDmpd z8G$kW;%>Rs1)1hGXV9Io^j4 zC{Y>!o2ctje|j~&fuFa9`m7CZn1l?aB~G(Ixkf;b>I{ZnU`_LHB*P^s$@x`Y7C6G5 z0PZ9TH;givT%Va#MlA?(4ifIXn!jNu-Yps%mD2f(!oXj@Vy4`nzIT{#m-%uy%5u{T z{AUsEz57tad7E2iyjYIUh=REJzXUd^8fSi#iL_t2Q!5kMyYq`-O4^iT+-)Ss)l>RL zO4)VEiNk@|U!hW}4|k^B9#U5uj^qU1iVCaZ*_LZVW=?PD6O7Yjwm0p$&K^NC*c&hX zK(P=11Q1muMZ;}pBa&ZtL0v1h?7Y{hvSE{Y;P$Q1tZ!z(tCW4*@YyA|vz8eV4deG& z8kCVKkBWUcuCl09yeP5FTy0whf7?-Y7PCeoUWH`dV@M=+LVTW7O{98%xF)wZbER2# zPp}tzcLp2dwc;V|g+%Gf{}Q@0!16v1bRoXeMgQ-sZ7y5>p4aDFP1fh(H=HOQEDvi_ z*FV>|mAu?@ckPDK;SN@j^if*vh+wx@?{~Su)Bo!Tspz(KjpI){Ha9baSLKZ@0b_X( z&^hdBYRh7juxf{z*(-{?VSKnr;J>>;0V`(#PyO%g^Bg0Vm2!{0=OSP<)Gr)HdcF#J zI^5!2|5H+<4_$k{4?8HhZFxQNn03Ecd#vTPn;ipTf$zcDxf^Ef_&?W_yi||fuK3^| z*+SwoTh()UPO?G!3dcdeT&8l4{q{B+m~MXOd3$MgsNFqgzgFgD@0p33SC)gt?*rQX zr6py{Q(5INtKF9i1VKp0Bd;B{O-DG^zK=Vdgb~(Y;5c#LaH#Ng;%`M z29>!`2}(y;RDZ35bL0rT9b2khYp)z&gzCQd6T@FtZhC?M#5eU;5%8kTdUwV)jF6aF>1xhjCfB^41@+IaWSRi{=DRJ-DmGu8NV zvQ6P9U8%EUB&mGh^F+&B6ZRRaiij-h(tHbLgRA&@T@_D}jeV#5$i7=`r;=;TC2WwT zT}?2Icej(#3@FZGVGz~AGgSoSMfxSV^dv>+XQ`k6a4H(g9(v2ZAfi3m?GeBud?<-o zdo;EcUVT{JDjEFIas0;R{sxZZRQ84S>NN=OhJgm{o(f$r4+$sk=XF}FSZ=YzG^`LL zRdWE@rY`D}p~4n#+c$FQjDBGCf9%P}Vre%^ zW`vu>S!CF(uUAK*(pYjAW&nJgIoX^7UxCvJu|ptyF&zie9s4?dpjZa7=fD-t?(;}s z!hpiMQ$KaKOOr3AGV&vVScq2FuuXD%WqDsCu#W*gAX}g|q1tEhZ)1-;bnJuZ6HykF z9Zi>Cm(9Tswa5?K-vBj<>s6Aw%vr@ABAneX1ybW44q1dcgjiuTS4Ny>6xhMctjb+M z)s}A3+sec{hmxMI)ped9dLKmofM_NM+P&NooC(hyD|P!>%_Qqr^m}1;o1L~n^xZL^ zbvIH&XZdqsz$3|G;*If1ArRQWC@^uA&xbd8GbKIu6veS^6rH&PHLV}`Z2bJY{%aZ= zS^BVF;OQz{F`^cDG`xhTU5ad$nv~P>l2`(b2aTY&hxeOrWs&n97-`@KDTKCr3`-16 zk?~COm0iV6GJCr4ts&e5?s*LRm(2u=45#dC;My2pyOhJ1AG@0={3K_ATa4ZNvTHJ< z%Lng--l(W>=^i=>R!QvDDk?dJO?@3*3$dbmG5ETx)e>-jw#_k7%ThF)CsWa)y^f&`K5hUs=0iTwrOdPu$SvPuvoY7T|)awj@?2cGBmN5~nxHS+{o2 zSRscAS++rb!^d@MTUIqmv;KcV%y#G*BRclG?S}7EJ9O<+cM!{stt|@%*6B2NI(I)a zzm3=*+v)|o*=e|y>p+|_VkeA=I^kSwJdBP5o*fNx!xnqdksI>CUOR;YAyrJb!jgMv z3a14(Bw~Ti?^>AxMl5S(R6}z0odV~sL>oi9lvO-A9W<0X;AcFsU)ld`p&=5J;GL|s z+!el6c&$N7LZC7ilKqbv?i#?QnZlnwXbM-DM?mm6Ux#>8->Qf-&2M!WuTo1pWlFe? zPiAV?V-U?ckNcP9n5^~aH7(h{a<_=cll&3OaBlc*xC3a?%w;zztS7?O>?oo2>Q(7U zX_wBpUgrK^J zoYORoFf1ABe8T}9A8|uts}CDNI6BUL2O|D}V<0mSUAO3*rHqCJI zp!$2d@!u+?gy{cz3uQ5ufPb`I%f`2yOYN-Q){*x^qh!=Y`h>E2(Ud!ZU*s#Q@ev&Q zwqLpJ+B1)n+y@&S-EVFwi|jBzdN!Y$0gh2u(UOmF;0(bsU7ja~QLoPc$U0Bscu8pLr{D|lUj&eyJ}(^{@rL#bktHWdx=H2gjhNVzamPGk{*sLD z$FG^H(7qM&Mz28T6)r;OtQ?049OEp1XGEJI@?!LT3n!aSh8%~Sd1Fde`RN@z(KMI* z9--fFWTyAaJp!{j{*0)S4N(L*BTcH?&c#_iMg#nwEs*ih8#!`gJ}AT6{>GoCz1M_f z_`mM)wsq&-Qj+>x!Tw;r2BCF_SCWMo3Ba!|etlFyZWf>J;|zK3gN{tXl(dp#)VIE% z6{jlzL_Az}2xP&qN*IHL*L>s{-|v_rOg`0kr9rhDp@^X$ zjqs80^uW~4GFb)~?_9F6n7!oK)H*iGh#!l5Y*hX_#W^6q+#h*FCHz|frH8B#c>rfF zk0k+>#PCN))Xf>3-pDer^oj$3k6G1d!$>K!>`LhK$DSj%mTZH30{u~6XsgIN_i;Dd zU+(CqftBL$bmQD!Nu2r;DWmsB(3>6tUf~Z^Buo5IJh{M|=GGRk_$$kYcTa?>#26=? zm&m*@Ol!oFX@*M_ifVB=$B)lfc7p$arz0WLd9eYY9WKQaz=t>*~aDg7x4 z5A(ppSsNxW%H%`?P)(EXPH9RcR+0xkXX~%paUULD-ptjZuT;cahwnXPea`UK zD5h}yD~7CrcipWzKKj&X_ezDVjqj|!u9pvK2{!K8YR1{%F!HaRIdhm~0n??K0Mfdm6#5ld7-^cKGAsF#5bd7px(EFikEF|M!Et&m}e! zoJlsbVRD7zt4^&iq?nn;&`b4xBAt6tfc`^M8BaV@vkog{5lRlGxW-rr@9$lfqYrr=yIqQ5d z9`QJPZo9zK> z1b^0|7sqOryFkZLdTWlxow0|no>R`06klQSLrNmwV!1&mHY3h03Q8EnZN!G@d;=vl z+iZXz33=ePfXN7a4U{htaVO|CTGk^#KwmJTz@WNl;|^%bl@a0rI4hFBqhi>~jd~tl z)9!JvYf#xpc=f9YzpU1%n{FeCKia}<&RSh;l@1{SV5EAx%^2u1${ED*fm@KJfF5@M zxR1YIKludw$V&8sm!s0wJisrRY*|xi?)_nlREqcnSd4fg8)A`bCPrCJ z!K(SHrM8}x7 zOCr7fWv1)9V##8sTR}hmkQR99eQjn8h3PjW&hGKZY8}_QG~Zt%+TMHGTP|U@i(u#W zg#BbIj(BwN%qw;9P2M)S8VwOTIxiqyJ&8Xmf6S)j|C&Mwml~ev6vj+2ctsf1TVpgr z`u6X@grf1C_B_H`$-yHYgYT1`7~`JNjiJ=75Y(V=KK4IuXsqU6A2hBv6_#IF;*PPt zmA+X${?O|6gIid2xrD!;3t}B0@Rzl!G*viU!*_J1?m;MwI{LkKOH{*9-kfdy-BVuy|))TRA6*-y#V}Gnp}_rMLT}m)78I< zDdfM$(t{=C5)rl?t7)}(vb2ps71K6i`$5Kni>A4^n)T65&QziSSyxW<%t6Li&0d70RjxKH`5}$0dPb; za?kta;C*3P>J)}_1YkD^mWA7}_sKLbw>0JcUOc((l1<@Um2Fl^49Scs!xw+bZ!z{8 zagWU0&%qO2MW&zRqWkrt?yNwx@HovqLQaFs1h;uu|&H&;-MM^kV5S@Yr zf=Xfy%AgPFr|oNOp=Yt5^O4!7kpd?zLVEHuP%r+I_-svB z$D5SjpERxMNc+O@>G{Kg8T#JH!FzwQ!DGxlLX4esf>7<2%)Gp9PN!q$;c49?|JfQJ z;WKgtOYfs7v!f!6*-DuHXXl6IDaXweNGs6Xy<*ur)hj!8)Ir^+YWd3B{PWBwZfUn^ zM+LGTDDE!YdjOjH7~?D3p#9AYMvyU~Lm3+j#3hJi`qe%?z5$MyHggV0{hln zlVzIKGO=^J#_t`qAOgI2sopCRhQ>Kss~P zFv7w=yjMo@ya6{40M110#4+E8gQ-&WQygCF2_tW4d9ODNEaK1wogpUn&sBHp9lEle zct6~Un}XtIg)vzVytr0phdKZ)`)$1quXx;L3pJDCW?{Jt-wRu>@#7<|MYfVsD2_dU zV!GGaP@~)`cBwl%ZT^1EWq(g!GqMN29$uw3iTY=?GTZmn9> z!C34kHSG{H9@$wUMdUTfbC{qTfqj*%>1SjiAXS`uFcMUYTsFwNaV7r0*L2gjqTNw4 z^SjJfUWCuz=~{F(5D07-J3OlF{sy+raT8uNf@Z@<@^i*d3Iq$-@`HyEM7gnD&Yt@6R8tw|Z2R~E1xP46;EEN)I zh$+`qKYZZSBpWVq1Yd&ojx>HF6J2Fh^OGy zdyU0-TZtW6e>OJ-UKV`63L&dEF80ggef;GLJBl`K>R>mN{amTh~))ShrVXlhP%y(xY(f7J* zW4|isnB=?&y&l!(mmC z;z#DaPPKg(G*gU}LA9R-m0~s>XbIx9(nsr1Hcse_;K`GERA+8D_LR2Fr>OPWQwn`p zrrL73vab2PIWPAHS6}_V=0h?satK1-bczmASaJw5?Jx zwvPPyS@tE1drKZ!qWi(-0#N!SpKXe5%I))@4c7x%yR?-P$=Y9{hrC}i$D@zvyEO!v&o2wC~J0jmurS|rfbB5OI~e?hg= zzt}pb*>PB%X6fQ`vzRT)Yn(7ZslsYov92%Ec2Y4mJYRl0zkxkU6*YkZ>e^7> z+;+9K=IoLS&AyDF(P z@06P;4xUs0)DBE*n8l((v`@Z%x!o|i$%;?f(f?G~w9G-fX$Y}dBgRAO>ALenMNgE% zXEK=q9i>OX`>KUQy=+u2n|f=%yis{AR7pj=ponD9_N17BFs*HK)J|J@@ZWN@9CJh7 znk0f=E2N1!(B}%`*Oc zWbJJDB=1oNXbcL@@{3{W$Kg((*$*@RbGx+`X76Q!e3KCQ0sG}W@lWv=;Sn2b;P z1D&wg2-%gr)!s0f|IhVk?V)c>!)^kT$6K-zL~e5w>QvNQirlkrix@uJKe|n8Cn!D2 zAxv_m6l;P@VibL1XuwKx;1f-WF7>h(rGOu;9#+=DarKnOV4ZHt&g7k%uQWG;CkM>( zf7l6Y`8jml|2GLr^k<&c{Q!#gTrQE=&z&Q{u)b%k6vA;g%9eWXOOH)HcH;2Eqjot6 zA#?$b946c{V_gcTbiJ#hMqk;oo z3P>10iVbt20>{Avz$sJ-9KGuHXpvGrKir+>*h%m?F7T+E(nPszvkhvPy`p6_a-5Oz zQwa0h18vmd_&88oESO*;+wF27O>bnBY2d=qvc@_?3b}{U6RIQf?ESio6DMWz{4m(4 zVHe!)R!{EDv$4ymYfdWf=g2B^5K*BG^X}G3X3v2%9Y2zEn7Fb=a zOm4!L5XEdO&u4^QgJ|xHvBUV196KLyM}z#zbZJ_!c5B~a#xE}tm|EC-oX|7E1!2X5QzXkHS3(*3 zoN~qJ1*R*P5agV(C-I@FujG*T!O%EW+Y6TARUg(C;VktQg5PS^?(~W`HQAvPfKI=h zAJD>gqV8t&&2FfUe3Y=ct;<$>RGYKlz%sZP6!z?{=COWK43q=CvaDg#ro5`UXY19_tVr(S=VJOpstc_ ziMzn&%*OJ?DE$f)f{7OyAQ0P#sVu6#iH=VEEWHf_`$W^YPK|(2{%fI8s^8|?k%42S z2(Mf--gu7N#ULZ~G3_s1kO3d>10?5Qv#GVI{QIH6D65{3=h%1D9r>dC;4Gfe-J6j2 zk^lkGR#BKpjiZyz-6Tp5xm7H7a5{8FNne!#>Y#HgAoTw4=EuI(J-z6%l%C^uB86$k zrCtZ&nAQHEql!(LBWR^@G+c1eGNuGlTx*!3US*ll1e{BHN#HqXcltJZ{X?}wO z(uUke--8^u4ZDhn^*Ih-zg0Hs&&pC0NFZDKP@G)ry*RaR4YNb52Iy^wwcapWs9f!d zYjcr8o!>j)4)57x9tk|$YkIq0lOmy;5XGc7sLORi7u*aDJa%#C=VFCVJIv6>c6V)@Fw!Hv{mq&HE;VqC*=(OW~J z&L6R#d}lrBoxS_}-lt7p)XuMuowuX6C1fNPy6Y+$F~DCe zW`HNwheFBRW)*upY}+;Gn;=IGx`?o&9Pp4!_0L$fr=5P2xEsmgOy;? zXFJPG(L&M`Ruf~$g*lA4xFZg z<-$R`fD3nkvlh5)43dWb3fNl*^*ytoc9QF)&m3YIp3ijUN;xATt^rSDnj7I4^xKT- zG~r^qQ4hxKU#YO#is57M(RN~9R9oDhG$>|v{jEVA+|#-1bFy<{ zuox3$m8;nANkM98Dw*EkxY{=wljP{+SL0~-t?-C^r9tK|A3Oqa4*1U?F3_Nlfm=!F zudT@8I#Vr#;*8@3+0G0kX8sv-Y@3ne2OpUun8M56tGh2)aYagCe(0N2p^nw!H;}jv z0KRldIQo-8lhd*D@ay?11*2n6nOGf&t_Z{Q8haw~7+zAu+ z`S;J>VLKwrC9BwhY$QVw!LW$hLTf1T)4Do0wUTVND|@u@LMB`s9t>npcxU`GYtY$r zH@*qAQ!Q^a8=&>fDYc5~wvM0=x1={@tWw+}N<+cih0x%szr`|lD_V7q8wvX&YDcn= z%2tjew!OA_$RjLsU4Et%CyiqS6sO+@z~%fDnM^29Q}GS2l#Oi`b!H1s(TyVZ&X=;a z@js;i#d0lK!fjmRzdA34p4nLNfPEVpY`RV%mft}Rac`V)lAf1f77q305a}dQAVIp0 z(rW|~Aru9vAw)_D5X$!iXZ+4S-#+`Cv)>u~e!n+=%*-*SgkP_njx=hJnuU zBOFKe?Adet+Eq>CJ$pD^_Ut)gdFUtLPoRgHn)dA3E9k1BaRa2Gqv7f9>231M9ec+s z4xV=%jdgyOR8WxLvqvc2{`PI-tD-_JcUW%UZs`;eKH}|f{P=OS@$Jx-H}Ck~y=miX zee)#6+WHea^QU)v_Fv*S9bD7&=1{H#;n$Ae6Pk~{Ha3oW7PidG*JU6sbn(vZ+Z}t# z65I8TsL%26S)CW=?6BQ)mu)ZiAqUUBuX@kUoxCl>yu$r5^$DLdA9df8&gA`0Qeksr zdf_h*sJj=|@$pTkJk3fd-m_TuC?_xP;f070zC*gkBD(4okdP~mMoxj+5x??ZdSQH8 zdFd(H=T!8uW1VN%)ju;DZ7MKxWk%n1X1;sqK64uX+c%#Y62ihJn11efksNj>jB($- zZPVXUQqS&ta9?%byLyGzJT1o?_UzB2&#)g_*h>u5RSV4S`~+0)@Gn;_Aba*$Yt#PO zdn!TW%AP&yd#-6-F}=5UZusr-J$%RZN#5ShIC?kwSx7{BqJMFd)toUQVc3)lljk?uj z;;%mlaksPe5B?x(Z8^m5(q3!KyxS_`4}LvXG4q4K#y}tDKL+jeVZQsVP9Ns=08BU)1CPbC^{e+>CXJmY-kttGu@g0fT9CBo$kzc5#)dR zBDypG0YwK465W~qiNM`+i*#qcd$s#1d-(+Y0Nm~uo_+vs_qT3$hA4f{ zeD@1a-!rH0neT4wOOM%ix85{rrN`_)#vKEvi|MD?cTX*UTV#67zWWAoXNc0B`R*5< z?#y@JAnwH5bZ5T%g{M37-8YDIXTJM#eW$bN&KwXG7L!hSe?ALf_lGO}JLf-QbBn%z zPTxP@UAsAPERS|UJq=|5ne(c~lDrSE>>|9|e%#e@8>@7uHI{-urv!rHBiVZSr| zecmoC=Kce;{ttWB#HZ}epMH>_|9D8=1#kMpo~6S$=>B82y7thQ{O@b0fBDYzCExwL zgx=*p!ZR}S(TRQc#~Pj3|0VtYeWr>|>~vz^wRWd>Ilaqw_Y4PpmhW0~e}^``%fFXm zcQ5F(oIcBUt=;KezPru+-2%|NoZjX1F8|vDXnOiaPv3Tz;pxOqCw4lq(~14N())`q z=!$)J`TYCx(TSZ->~vzM6Z>~U_=_*-#7-x6`nJmM%ozGC-<>-DM~b9JG4v>AcNw1E z<@7G6D|WhK|BqAXujF95V&7dp|GtiNVy6>3o!I|xC-!}j2!lVL1@QL~5uN1!zS{oF zcczm(o#g2x|9?Bl|4WMaE(rgf3p%m?eYO3U?@T9lIBRm& zsnXx@8alDliJeaDbYiCy`+xnwzQ=V+`FnxXihXjF94wls{yF^L;*8t;pR&Owd3=9Y zssEelAn#KC-@M{)JINx;z&I#-^T^MCdz<~!ouc~>WI6xoU;JCE_`muL^dkl){?S!rBO?{a#V@2*>; z&vN=K-#vz>D|WhK-&KaE%eUQY?(fj1clq~H?CJ%5meXhX?zKC;%XhW8zxz&lm(#nP z-sOLLCPR;6c2%f<4`u0!efOIC`v9ZQ^6#bC)eHJ8r_b`;Yj=8=?`m_?6+3;F?_P6% zhc>;-zn5ZHFX*$JKFfEn-RWKazi)1>bvMMH&jR@FH~$YV=p9e*czVavleq6f|F6EF zll=dl$)YFv{=o{FXS&>H|Lf;p z{hed|&A!3tA5``IsVBbPM>`o8@&9K)?9vV0Q%{rpF;%~>F2%spgS~#y_qV@dgXZ7B zE|ZJYPya)R{?#RN(FQ=5dMz*Z-?;6+LgRnh3vT*BRcGitbWhz%s;KY8U-3cyjfGLp z?qvTv>ExgD>rQ`XpV`ag0=kf>_@f>}cd+8jz8}Oi(){YO-tnLgaUUpw68+4g=jOg9j31l87B=Vwch^BxVG zE8Y)Io!KQT1xWeqb7AFOiX}mx_VIpHS$P#l>{6x9MV@23xR(Kc4TmG5m7x`JpcPq3HOHbz^k{L_bUf_ zHM1aUX!`I*+?(Fa6=^2t5<#1D0{@1{{`W-Q#gnhM)(JIJYTIjxT4L4=`_U)l3GYX` zWe+6&kqsoBT>aRrinZ~C9HG=xh_}0apR3~;oGXV_>iBo6 z<9lQ%U@F7+CvmskyCrokhRhR2MrFV0-o6hs|M54*2|vcug_RxX5>ryf>|lE@gYyyV z;DyHFswu^XSu_uQ<z%TMoq zdiU=-@$D#FI{DMdpFaEPv;W7i4_*4vl|Nni(~aLh3ER4=8gl;1X94`{t$n%)q-Oy1 z41gX1(IcRL&6EFxsQ)2L{D)-gfBa&)4E(_|P`+~Y(1EM>)qNQ}p#-a%G>__{>eo|@ ztIxZGCx`S4v;KlL{@)>Et`^urA#O15QNJA+CW3gK8`KH1m4Y@V;->e469sRq?zUgK z@$k*SO-9j4KPI&K(B86MWNU3@xkxb?-b z8t>Uq=lXP;+OQ3(4pr32J`hGg)F6T|QMP@_+l-FlS9{%{kua}%4>Mn z;(4FLxlbecf9VdSY*88-YJhulI-1L?v&pegu0|~%rUZ9WIkxD&4z+e8dAPi_%M zx8O=%WA7EOT<>vHGidmtSO0>#z`MOLZ_yoy-7kYSX$;+_g!(tEmoyA+0-k;g>v6fEU%XWUN2wz?mAN}i zgpir!kRocn=X(AFgTVgbd*d;~M{U3UI$uYEddf|y2;uy}Oz6SUC5iY9GelVO%hZ|} zNViHQ6)0%ae4|8&7lmxMO@_$Hxo#qZCq!z@8`k@Wo7J`!EJpP*{6_+YD?_2g0JF?|QTVJv3NWO6I;gh#WojTUWEmEx z`ZAkkQ)SY;cI@;pxN;Z^ZT1h=q5RHP|1#*!KAsylehUy%?QMmw*Xf1iLwuxlt?zw) zIBb%&-uG-&pm0V>>9T)?`q8Q1U_lms5v*N@_3EZ1l%CCXo2 zQ~MnxUMg8G^FB+FB8X{QayfFe2dXpFiCIgPU+eOd=J$=#wxlzc zUkGMwaK~?dUrVc783Y-Q;5K@~Rr^%NPQ=LvFpYy6RzQMa z`>wX64Fm{I38RiuX15JA8ayZz+~h_T1;E2%Ft!pG$~aTN9hRgzg(P#lemFWjw=J$4 zI!vViNV6HuN)0XbmvNSetJ_ZBHV>IFuP#B>50aLqOv{O=kxuQJXQy$&aS*|iX15Bg zs(%uX_A`}7h2VUPx?jTmVUPM94)%w{L0ZWOYy+nGh?)36Tyu!EIMik(Jajc&gkrs9a>U(|l*Df089dHeF!!k1jCHjg zAgo4Qu0YewM6z9dnY%(KX*2g@L><#Q<3s@I%B_k0Ro>@=nA8kbwm zhL*}wwpvZ>|5Tw&*&Ye%nY$wBztJyNIpjSC@YS}f zHUy~!-9(TEIqr$oPGu?a!)^CTyt3^W&~Dfw6K;#C1%;mXUm#bbgVKYT@HRs(U6RG~ z-l}$p8I^5IoWIQQLBBZ2rqt*NFN3_-LL*xi+KG~NmW)dbSvi1RzusI`kPhj#t|}yQ zyk3!c8|d%z)AqR9*7*AoiL5L?gO5SkpCbJ0mojvCt*=G^JnUQ0INCjmq*f!9UQu++ zk3?KzdgrWIW!T^k@AjEgWUDas+}<=&BOa>w#h_4yQ!M=?Hn7`{I=C)lwc?N?b)*xvuRIK}364Cs>-QfhLwvU?4`Jye1w$-un~QtUl4 zET={uG#l0u!JcW*jMU<=LABoJZFf2YEmu84iKI1L$F?cBVVs*Jj|t5Wo(=2^ovfwo z$tX%qsmqk9Cs^X4pSCEY2AvBmNQt`T56`lWcdPD^e)(9vBx?!hcifsv@>t2rA`_-q zKs`^vw@9oOk61x39(J#>y-R&{mS97rjhc zEAxx9>eBOwu8rce>bFbZ?93fyLzU?(OdM8uLQ>$og5IC*;k80ggD5o&ht zp{>o)?K)}+Fz|ZOiR*E3!C&|}?V%GQb{1|e;bZGGydZd!^|ouj2uN5|Un6!3_1J}G zRE>0g9GWVTja4$4uaM!?78}V;>x#&rYRvLircjU zd6AQaGlPxfVa$Vel`dG!q&Oy2wy$AInOv}atf&F8TrTkv>#VqeXWQwF=v$=Nu1{H8 zGg*F>qajq+1C)?E=wgannksd5Ds*dV9u7!n*UgJ$USPN=N`aujoN~}z#;t5_?ok7O zYlf45G(*t*_JrM5UJW2OK%^#z1*Dh62ubUDQvKbFgUhO)2SG^UItu+LYislRcKu{W zmj0$_GGD|N;x)+YuOm8v?G_1unZb);0%pz;{; zR%38isA&B~Mg+JHI4!g@;FP<`kGNY(Al5;p988Rq=cXrz9@A)JD;*af|7!2wv@E>w)>Xr@=?)lmL$tn z1RWCsg0&J?E=4zcf4C)i?)|j_k^9^uL=x!Pj~VrDRjGuSo2ykaL=_+sfI^cX`b}~_9>gZ zk{fRhs=oBHbLAXe>+<(FHd9*OHTW7j-Bg%K6EZ!#{hXGSixY=x0S5WVem!A3=An-J zTyuPu-+s%^DVmNUZ!h&eWa1%o7dUU_^xKdvK`O-Uj{#8)r!G5j4gd1)czB3#IkI3| ztJ89e51SIgq;R475psq@zS=BqUG{xJVi^RfgYms9Mmk{Cx-_);crrnmw?&5)!=)hq zp`>X4Il^>Et0~doW;Y8-?i_5|TJq*-e({tB@A{MOnEc;OyU&IROn%DI*70~e=`P(I zE_<@yq!;?BO06a$I+qZZqu`Pg?6b)Z_RQ(lp!UqF^yM(Nb@Sg2?i{Z!!cI#$5r)Ie z987xymH_d~%cSVjR${rxHSxw-@nCC(2hXii^2O?=`3AMY(2?Vbs>)e`^75! z1hQtnI%W@Mb@0~fH!-SH5|@OV4+(R`p-TPr(oe>|t?AD}O=XiDeBhimoSp|@J$zw< z4fxUDgSM_57OD6}+8L%`Ybgix^p-FnpenEi{oz$4sKIIH?U1VN%}S}z$(0#=N#pc& z&TUoE9&hofEylS!onBU!tHYhsn@ubPj6C_Bj@Bz}$^|CeZM_)=49aNP;^xd|kz~D< zM^)bX=uOW%btPVN?k6Go6#2@TxXVjNQref+Jxqdph&OxvZKgsbPM;wYYbUds%F>GD1CcWgWb*cprUD8S22Zo2(yJ}o4#ELTj;r{7KAhmHR(-a zwvkJ8IyG^*Z8K$6uK#9lAR8QnQIv0TnYYhX4qYkF%BXQApZs?Az5w{P7hag%mWA(9 z%p0l9h3IE?nzNq4^<$@z)8aLAWv&;^+6|5d4i z9ENc%2f-wR-8b4Uws4g}1$?EdW03+|0-~_0OZ|bT`XN&p{vI2R&ji*>V`dKIDx3yq zss369n&aIXh+ETF!*C6lV$tJ=HLeUJu^TPbMn+G%>ci9a=YrM*920i$t--f|kjk z)+;NSYO{DttXL&yOtPgna|VT)t_32Ru0>9+ zF!$(~qHamz5lxa@=fs3}riBiO=#2s6J`9oaEH4cjlnogVs2Ged^Lb)!#b5^?@VDPP zZKKxrblg@lQl?G+%woIt2@6fXc5Qzj5F`t-eh64uZ=ii#$nOMltKBR}x$?>EcHuJ3 z;E2Uwz>-Io^ca0hirxr?L3Fq$_<(8Qe)j8=aOD?4U64%3VkpkW--fc7+)zuUg#n0d zb^n%bz#R4h%&qYWg@l4BUj)^vYRFitJAjV5eKTySOUcuox+zh-Ho*2+G?$oH1>_D{ zM>UG`m_SWcPWU@xbL;{y&BSqT{->Tkh&|<^kR?VTw!F_Y=7Y4XWEg8LNtA=4s+1a> z@DK|4>gIC|wC7b$RC4De2w3V69j~^9D@oXjX}l`Mnpcn80XAZev5rFH@V#E~NQLeu zty(Lc{-;?oQ@yERrZX|9_X;lz+mZxsw6j}5Z0voOQ;0Fv`}{xit{@$cac9hoC*Sr8 z){uG6&}8l|Mb4bA6+L+HL%deS=BJj+wpt4Fu#!9`i}Zo|%a>nXKgE^g#eS_}+=Y~hhV##awp5$k?Zlu$!d;TD`kp?JgmQX!q%wlgD(|ljkPo<7P?~l7 zKQvg5aixVT#FaSKl?Zf6Ruva{8kJ6dO6iDG%3tBqT%d3?VO~p0P8JF=p}ySil*hRT zT}8mSn2PM(2W0R(oaN)PM*&1yPI|LCqdg(oxB)jV_&B2f4#R*vaVoXj=TShcFw~T> zsjq5T(tipiTH|p^i#%&?dnybny<@Fley1nx-$>`D^AMN(A4b{K{5)o3MBUE%H@t zbWv>hpz7DKc`?rbJ%1{(vJ#J}%N+LU)gJ~-=7)MJ1Lkbc3ulZOR59&&ba8(!xzJ9d{xfHN&Z;V)cMXgZ#<#961&m5~-}}PP zJFvNIxV?;Nu}ZjyEzj&929DT=>uQ5}FMeEo)NvM%lW%t&ZU^YP;AYvP8t_(0tI40D zL_?;-Lkfd{+qpN#>`^t5G_NHQh-nJUMa!|GU%Bjsh#4BnVySBj*#}jGkZ(q=?cez# z#BtAi?{cckHn=XaP`$C}pPN8la{?f@XqRh&|Ye_(7t&F4P$O}M&1SJ%W%Nsx~c*~&8 zuH|TZn6>JcvrIFY+Z$6jY=+x-j4CB^(FCmHFaK328Jj!tlUBk5)n6?anHv@QDXB^A zO~GPy>;1_2kri|$w;NxXlIbr8Wi=spib*myK}3`w$s+5T@ImLuJ}QaUQ%?ia8wa{i zP~`R6u&cLn$>QzXtJr+C%TZEzIq3OuP_*}@et&AS4tg_QB4i;-Zp*+$OyOgF!MLur z!aOK#R?3|c);!TF70Ky zr%p+0SWA1RIkh==K3dD1B?IENp5Cz1?UP$I#FXxCcWs|YWO+Wx3mak}G_mZ? zl`l{fTqbXS0pjK+4;`?lsQb7}Scd{$uKMj6#%M%CesK3TM2!L&2Ao(ZPh>qSOsLC+ zniI?V@@(6Z>FBs#2HdP8m1b&Bo_*y z*sHDBr!k(XAOab9A@GF}mXh*cq(^wEADjJvq|794xCqGM0>}3a=K94AXCE;;@fZse zRGGOpO!Mmqmfk1YaDN>TK;dy6E8I2^C4qC{$n|1m>X+IfJry8XA>T(kv}h_!GqtV= z+?1^pQQCH*>i+uJ`wUn@cqQ*{gy?!W$zVa17RJUMtPo4=IE{~JGOB~$exk+_@i1pB4rCo`Dti(Jk|^sX}e960U zGN@AALeFbL#0E#|TNYJ1O3R_`Ivkbk8Mu7FKn!`S1kpl`zRtDc6nq4;P;qYs=a@9RI@4?J%ti$kV^v%)KBooSVfl>tb2qGE&2q} za$#_=x|NthiosoozELdog9-EzQT3}+kX0pHOOvbEC}{D_%E}s|%7I9*k;UaJ0*yBM ztr70qEg!y8>E|ViG%&n6c?VX+ zG`+ADBj8c|Wj*vSapl(sh9fVgwwr8iJ{@M^#^DDW2plo6qOQaK!@lOL_hqvr<2WH2 zagp9bI1@d_T-EDqncIu)5}u2D!Kg2W`4Z(E|HOZ{~? zQ7=bm8wLlt$&ueS2~%u7%?()f(Eg{3`8CbKFUH+f1@;Qm?BhaNcD`<>xc`s-uv(uh zmuu)RY!s@#HeBOKo%G{24n5_(eE;oPUy36vpodf<&fX4NzUgM+T|} z)H)QqmX|i(U$}v?EK~Lv`y?n9ztUi#VuZ(I;noTaz-P;{y39!!;Cilvuqjb1>smB< zYQ*Zi!sB?A*HTuIP+I=zhEUBr28{CbHMamYPB@#z=%W|PRuDJI$g%V`p%THi?k8Lo zH)OpZBSfsGmwHq+wFkJk1|KaFk7^Q(r0WOSEb7C*S&AqZWo^HMI!Ox`BOBtx@#fmg zR3is3?9=cn6@HjYMut+R4P0$8cCoy~K!&2!mWOhwD%Dn)=5JXMWBZH|)A9Fw1|r_{VJJsk7E#oJ_Qj+aQk674Dt6(I8iUcu_xg2V|uhj=(j?>!34 zX_0P{aS|Ql=w2ms5aT=X(Qi^&90-mkoV)J%y_#=P9JEa}s<5Sa0W?F{YOrvtON0DYn6?9OcZrv4MThac z$nyZgwGR)GLMQ}VN;NJ<5qo9FYTO2gO;6tSv4;@vC3`gZKlZqdy?d&0)C&6{9GFQP zjNF1W!~v!)1R5rkY_S!t$=%<(&0^iR@?<^H#xA?te);pwgl#fRRpnPvgK?kHi!|FH zKD^S93yKn`KhKOBMz4Mzabz5*eg5lBMxE1Qw^ zDe}(Bvj`JDt7Mp@nWZ7jSB!vvqN)kfveOaMHcHoH*IGOC^Z{!dfJJC7RBSouD(|^) zcSgn{yMbZm$4z?D^?n+T1Gd^&Y|R5@O>N8y&BP^U;LD+onhm^KP8mdzA|^;Pi-IO$ zZfIf7Z8P2<@XqE-vM)wyHixOmYJj-^GQo1vvM=1__<%t^+&Es#0cS>vo7X2@qEJ%5 ze41A7vjuJ*_}J?ia4PLTd`Z^XyirEtFk(PVJE?Qh1ns$Yb(|F@4})k~_XY;h5D#aY zQ54f*ti!KOWvN}e;eF}wxPduKOfQdPakt^rPhDdj>vI0q8nbc0h3tzh);4VYh?YX; zV)`ZjOdyZI#Tji+#=2hJghpe6wk#20;mehMWtdw<0tW$??vF>J6;0w$1f2)niP0 zR$Q6KI%!$APK~d8@}Q7OCPNOytAhy~riio|5}~Zy%M6%XtxdRPqe96=#KgJ?hYrw@ z>)ls@-++!Oc0F);%6+uLhzp4ZkmeraDwf)Agts3|SxOP=3K5H3={(cjQ%z-u`9j*1 znPN&R7BTWMbGlJ#98o5d0OZ|**kXVisWB1@E$<67A1pn|M3X~vWwf&x-YevcH*E~b z*521BubWjoCWNHFd7Vc``$G2l-HQw7jvp>XZvNyh5l{9^>8s5Hs-HHl0gITtVrM88 zz%w{yBg#8*xqX|tpfuCv7!v>(@_{mC0~Al^aJzfDHpF2*MYlw)T?)48uEjX8%8@$d zVxR?Pf;X%z0v;K;62cS#aQWog6WK{|&c^3t@XqVYQruJ`vAH8>-D;%y7rp@8Oo)3r z4z209Q(Tqg+&W^*XBk(vk#Ig4(Bbltv#pZ>*8mtB&Fe+lh)x{RT*{8Slomd*%ohtp zBTrV#1KEjsP6PunZH=b)DP_+!Dl+3-3nuN)!Q2_KLw2va`I%$bmfU$r`Y3Hkm4pZr z^=>>MnVv1B;>+aAp`fN8(TEuSsH93AAQ)p92e-V# z+yRf=Y>{rU?fu-5qU~ki+hvg}hY1jy#j{8z_Gy2)Ez|K zDN@91lx{Jq8yOmL>s*uz+~!>VS+J&;L4W}}OLt0{5#xGw?pBPeGs3=%9i|{}E?Q&l znGkO2{RGe{_=)sx3^0;QhI;p2i@-vp1bm)W2{~INu^JCC83MughG>Ekw+vE9P@IyR$y_ z9!f?4y_arMKNZ*HI$3EEoB_8;j~`Wx_wbO3@@iA|^r*{(!()33?d&g~>`#n&^^N_f z5|;Qbw_)R})|K){M7vr%uPZ^gpEb%mW==)g)qUG76QDg9N;qx(=j@-uoYklUr?tf# zX!{HweQDh8*9%JSaBBz{n7T*+fcZihsKdCw%bbPEKq z<{j(R9cVdcsqttMWEm;1HpGIvIw*r0LQS@J^wj- zg-|1W42$5m>MetUn(G{>m_RnL-DNa*ux&Agh|izTYjeO3)aQ+fr?otOdm*uztE) zj3$$}7zfWtfXL@L3ZTj+vHc3v0sdb>%qNxoa`rHz7^I{*0CXDUg@~JDy~t zDBkXf_DqhRoCi&&6VsFsXUUZ>M~=^UMmWOA)p0Uyh7p_0z10ku)MqhBUw)5ra*I-% zq3aP$>Pa%_`o_q!IHR=r6rpB7vqbj_bqP1Snwn3_*(Al1HwU>lvaVZ}`YEzn3=^uo41dDPxrO4xr_ENTg z>>$t6eU9zSkG-EygkJqJa4&lBPjq)IHiUdRc*0>^g)n;MII*D(NP&p$T+5fL(J3@3 zt_9}RtmH&q4tF2*n4Y6t-q9@4e`pr1ibq^rRl_BvnT156n_2OxFqcg7f+JNoA~Zt2 z>{VuGyue+2p6oe-sG7igEsf#a>yb7B5sjh*Z^>Qv}>Law2 z^IC@@Ue5V?UOzXWQ`}t_Dp?G_*tA=N1ehQz{HY+!ih}(>f3?(inbK&jLlWVyv5{)y z%IDDW?%67AYcfJ3&p&q`?grAtgRj0Y4BRROxEoOBRUKtPFd)#F62+v^;(WdLrpBD{ z8ee2DtP~;&iIQOXd~lfia2&!$?_}#3kV0w(w5*$1@=V4StdPYyqKq0QSGY_N8@RMDAx99juYZqsmL$Com~UfrCp& z`$vD4HlSvA3+%@xwXOui9e_$tMrn7Vj%X4>Wb56CpjIzj$mVIdf=Mi_ zMaZq@OE8Yz3TO(EBBW6|<|D_+An8dMy17DfN)n@x<5~Vl)j`tW`IG|WrRH$FR?@b> z*dRJas2j6^M~|4aYXLV~z+?40L8uGh^1HFMfBfjcJ!B4(MYPFeKo{2^>b&9{a7+^J zV61k?^y1ftcB>DE?Rl0`bYX0=J4&Wl-DjUj%?wJ<3^iIi2T7c`Ot8Ba0L+#qQqMqO^- zDnr1>ZViV7n`E)%w=k>7G15+KwB#*76E5vquS4`O!7D zdwvG?bC;ndjtH&s##HE~pZTLKy@Pw61=u<+@s)9MTr)M44hhF1H`aluv26v;yLM$i zO#(gOsJ&>zXGPjEpxB0|-X|N+WQhDH1M2;FsOdG0AL0Q006ywKaFwptJ=u!LE8j}& zixcXy^nUr9qk~an#d7GMLof!GeR@$;0;a+}dOHDx-LE^wKpk%J75 zNYQoUr@{O^4^8LgA8)P@dlHj4J&2M%k+qbZ6dy2F1g(n#>@G6kRGc}8M(g7!c_PLl zdZ6AFcEgHhDfLBzc%RZjLz(7YC7toSBw%CAFo1X(ct@MT6d=;22?$I8oE0Vs9YNXW zT5DHHU<5gqFkC>m0tkGd!+}JnEY{n^c#iU(rO{H4aegLzyNSUVTo`8xlSJohO-c7&UtucNP9xQDj)$nwkxwC-A;&DxS zv^)WTJG<71QAiVQ7Q`EkI-xzgQXchX(DZot`xl}#+GhTEGt6}>5RaIhk=x7g?2xs0 ztH8597|RaeUncy)>89=i70&?f)qT8~$zI0Uh*~}yDOIfyVmKPUfwN}My=gZ9tfESU zw^~K=z`c$^-oe;K!eNI1K)cw!PqT_`3dS~c%Al%bfVc%!^s34ngK7k<0?igfUJyMx zX_6+AYu-ZEt{fT;vh>tDRlvpj-0%#(cEg*N$hUUkZ>_)(V`Ko=M~G zqdk6;qFD;g)XC_gifaCFmkcqnEwizRS|ApLf4S2Yq6vX}0;#(5^%^t(N$Ypk?PC<% z+1t`Otj|Z^Vra@O7lOHwD0C32VnNlw8-=KZiN*olb53>08$It#WyCq?75%?jMquK6 zB%~P9#R821&H$h_uM40~C}?^B%R3u6$kY4Cevq{VyXg{~D8yppjJwR1tK66HpsHER z!ALI_$Ol3?i3<5!qS(UFZ*14(!q|1XT^z?bkwb$xMWJ(r=-pK?A4C(>im~H;Y%vt~ z4DLZ&+OX{b%(Qbb)ff+t9XTV5D;vSOcebv;*%m%0@e_pP z!a=t}^`s|3t#8+VozP*9=~eX(u)Pop5d5r$K4JLsKxSnk4d=C(!a8l>KvNvzpxd1N zaY-Ejqq+o>ib+jSU&<;!Y)7YEV5PN<7Z5o`(BPkP2qwu3+((Q;Tq`!6WBtjFsqA>m z;;X}Iv6cK7gqO`#)>-nz2O5g80SL$nbU%!wRUsxqVG&T*y-n%O4&!y-#n2xopnD>L z`~~&&+9uzPS8w=v*IFWBY>GQ_CihP{WAOixGbtZoJ9($P6F0DcM)Tp?sr@HoKIedi z8W4cd65S?fLtm4HuBTe%mvb<<_e=FNm}eOsmc_iLW^B! zz~ZIFE?xI@OOn>YA==rQJ+f zfNFk5RV+}bbs2N6S-2e^hZIIx6y6L7LPhxtA=*`zPTUiM$X9iBwA7`t;0Zcn>$H+uHO*OJH!Z*)!b2V#wIZfr} zcmdNF&@ti*i6Y?$V%an_r0wXltj z)6E|P0a(hoU02%CGl1?G5M`ctEn!0wR#do4<|Nk}dr z20ZWErJ#t!+-q(6h(U>)pQC;Dl$PA;GGB*pIm?-~&eQEFfq;4&qUHC0m{Sa3PW>w4 zSpah?`ZdpPJ#V;?%}yC2QY^Uhjm+M|nsP78(rkNCwjug7tUoMQ1(?3g0N5-#+n(~V z^P!|0nd=$c6#)8*IAHHhuuB;!mZ+(f@KXEaUXy}0jhlo;9q-ChfBl`NojXB z1Nyr@m8vYg@;!NR>-%}_Tk@jtKhiUONXb_~@ z$Y90AoJ*GdN8kxr3CexS+WnD|gKeUc#^jq2 z{gf5u{nN@VlNLN9V6)SpeykIynCen@bfFk+e6LU$`1f+NJnqSZv$u@JT{F0U?!CC4 zy~NkneerU!ic_MVRotUiL|d2}r?$;dRPjkmjWM$V+=E*}A-%9SCyFddgpY;_hl)j+ z($i8cO?hrwU6U-X${y5>d|Tnda51h}--;W3v$MeRE?)+e8}g{-kZaz@M?I!bna0$I zj4PgrEoO|p``q6%`RB6$ny#naFFx%%Z^?1`LvbvdK%;s<_G{e_stHMhIHfkpMwwGM z@sI?wNMzLJ5HwM{KgkMLJRpdzXpmN&eVsd~8wGPxi7a;$5q2u3sK!wpqJop(3%cZ3 z8(Ryn&%8D}3QI^a2Opbw>TP$9w>Th`Kd(_;;gBM5+g{9|HvMaZoMJCfNnFxiAEi#- zs4@k+V=Sv#ZlvF|GFMZe8s4dEZ*>;JS2&r3Jo?}mGJ94X#Mp1965pSfaL*(}H!2p( z6)B>nRwrKa&`7sKASQ?FU<~TZeIH|R-q(Uu^9|#TFB}@tzpC{o=^2{W=-Oum%omqb zY?&5B+Fg_E3C?|OTwzkz_xocX;BKQI%S{M2>U2rI7g{w1E6Di1f=jPFpAt_kMV`3C zp#A0$d)Q5Z+j1^e{T)_A+S!*F>=p&=j8o1%v)> z8*GuFh%lcIjSHu?WZ8@h`=uVa32CZVkvKAnbtW20cnNAedlPjMw--)T2IZ%kFQBYa zCv6+Ap!A<<2bubKV5XG$tiIIP%=l0)G8TiYHT|^nCrjM$uCBEzsA;Y^=Ae9;-*hS} z-QC?QRM#zUCnxv z;4fSnio%F=%MQQc{VujfG5svyEJ%)@womLJ8-E$En^p!}pRpcv+H~m}iu}2TmGgbr zq@9@8^R7FGd#bg`GNsl1U-Plh_IGB-g&2$B2}g_-%)oimOp2$R`^}7xRiT&f$Cn_R6ZDN|9r3y7p^f{pzKcZTY8MH=N0 zW5kg}Z4|3V<=t(or2B2}%FyBL$mJ6B*$Q|hQ=XxnO~hx? zCA63%zx|Te`-PGds9yc(pn}wbkAv{&N^`hEF<7}#N#!J{t<&J>I4LI;2ztg(irVe% zcfH{ez;hwgq-Ag8jov`H>*%Gxvm*&vwQe1OG5cMH3zot->`SjmE#z+`u%3h~RA}c- z3u=E+Y7CFL!vHjxebl?kCkDmvQx8p-fIkIr>eg9w_NTm8-O5uPZ<)~%58Jkxt69lc z0`k`?3x@|n-aiiRjurdt`Qe&Zp38-iZ$DE!0(yMKbxx{V0dgB1vjf{Vhps=oKb#G| z_`E_sT76=_IPk@sHvUU%G{o=af2q5Fnjxur8l@ZT)^P8Pcb<3An22!qi)XWPK(UR1 z4*Qtmb*NC;c)sT1O~1lNHR~_8##wFzn6B}flMP5OZgxJ)Fw3venHXRPPh`mVq+x~o ze+H@<8JF5j!_8z$jhi`FpE7Ay&^h!&ReimO-KB+3`?B8T60d-+TG^o_KAyEIZc5Mx ziA3$`EYJSnU7IK}#yt5`hIvxrgll0AsxZl(q2DM=MibqqEK#I*Rj~2qQ@>tg*1g4N zQo{v`c*i!x-&-m1ub;i5Xy()D^-=@vRtQ5ZWZZyxt6t8k$=DwSPXs0a1*}FNb89XYR4SmXdel|ryN=e}e>GnC~Vj)1*+B2qL zfyUdG(tA-O{ZH-lOd1PXkKuI{`(NF?_0Y@nMdIBkt8}v+&dwFI+Cjl{*}ayA?@;W; zNS25v03W|#|2?>KQ=#kWn127y3JAwK{)|)R7YowI0kW1T$V8wMn>etMh=ruVtVQ69 zF7o?4OJH}34ZO_2?+UFtj?TQG{LWflUB?D=3|=R0TkS(ptd`BqXJVEG)q|B=<0rGN zjpv)qYfbjL<|&l*{{A{B!JuCjF-sh|COF|loxZ$fPL<3#7U@`$O%^ZA=MY!QUGLLB zd(dJ0 zX7e)m?A5bbo^6J3o6*oMRKjj*b?wZ#jO7b@k$!1Mh5KgDz_8M7P~5bsit%xO@}13Q zf$YV?MK#21#z@g*zw&^qV|9>eW(>w`B;rKBnJ(%HZNS1NB_kiWl_*Ri2K$5I592=+ z4OW*GqZ;n2gRbh9I_Rw#F4)^QodwSZ5 z>o?(V0^~?%g=8ekO>fC;O30G*vot`{E;j+mAiQp$4r+PJEGnE_(Z!hKdK%gjz&zo6 z`bFbLD zf&>17z26!bcROfK)alQM14}n)A}0Byt3Fmi!{N6IV|NXgi>(K}_&kIMTUpl(7Vg;R z>)!_^Ol8v5-_h=`Pc2uOz_AxQVoj7kh4jCAQ@(Io;pDLU21 zC31z5QMi#U0nL?}!3_?l#QLyICMV7{c<3CZEOdc6*OQ25-rt}7 z{HqDq<1_kFg|dp@%6Ix6Bp1A+k!LleQ~6&kZ1JgV3$IGUg! zv|$zd3UjpURpSbj<+2u?<`BPSk(|s7m1p}Y@Y)SUD?8_pCl6*QJ~e-8RgDHywqYpk zB`k3__zDxe$S?hAq_rNYy2U#|*)~3$w$ZI!N;OQ+a8u?}EEgwKtN~pfseQNE2Lma< z`x=3~G?!lLV8uJ+cqQ3YMnGG=$Vz+x$xV~yf)8{iaik2l-DLnLLw^)0P8D#ft;X#p zA4>Z2N34bX;8JyO!TV{~3e+6*Tk5kv%6J2ZM(|r7rk4jk$6h7w^b)4swd+c%=0OcWfv#9Io#j}J zSZ<=)UAB^R_HN-zSVzq={TlDrVom^fESWdxw4%9={q_u{fyMUFnDgK$rk>M94|3u> zxP&{q^bj|s9BBvdbi96e)JW+d#oJpnI@R&gi>z*F+QDl}V^s?KaORXoU`3G-c$G5w z52fj@1W@d9hpH_B4htaJaC)Q_r;Phj<1_3uc@3ph zx}d>k#XxF`{B~SlWrn~je{it^X5S|B4|Xm%M?lAjI~e#5=5YL(NrprAV8eGM33f8g znx}G0toqoy>IHi7iNv6Whok6o(x7Rnf>2Yv*e45OG^S4w z-WV%}G;ko|;LB)3fVmA1Cl~D&A~JH;PAtv z2pW zW}kdzg#ufqN!DQqUqIX%v=F0;w+IEx(s1{T56bnrh>Y6~~0hWYTcD3Q{mhKk_IV$zyC0^fYd>D$f(aVkGGC;Oyz zkr|wzrCYr13@!Gm(q-4=vR(gl*}I-ZPCmtRGyOODiL}T|!3`%9;F=F=%@o`t+LOOf zL-Ivk0fX4y8dt|RU%5+(H^O`58p}?)KljlFpy8t?x4|HfI98cAoij^+p$m+ zZ4h%ZUL*Rrn5+ z!B**>3H*LJ1=MfxZEmBZ3J^L7ux3AI_i_kPqh-&dq$eN_jRhK!JYN@0eavdc>}^ue zVgXOuIN#~|rrhYBCr7Pp7Tz;%rj~6h+q)Ul zPdP$4#iqUWRatP_KS05%X_b6Hm2Pj(;L6mEy37MIikjV7|5vbZtL6fLg!Y&$wEb>? zVjlGCEC!jpKFy)jt;R*2o&}|Dm^sjvMYg^FOeElAsqW3`vWdwfAwe&jyTCLY}3tn$^02u1eNwPd5!1D*7q**NO ziU8WiTr=<3B!>iw%}2V`>d;`Kyf&9EkbY}Pqx&I1wHiSqW^?{0a2Igel16PUVkgIy zcHjToLXBbu5NniiBaNVVQ83WGRc#zI>jdkW09$nkQxicn6?P{@>f01s+T{5%f+4ED zCcW^GWc@~+VhCFvrEFb1I2BF8vU9mi9SYvcw$Fzkaz=o;{f}RHpZ7F39{Qt05O^E( zDOM?J4n*l)!AC^6`WhHbHjU;Gxd-(sg-{Q*?(F!^O{bateuG%ndzj-KSmv-Z%g5qk zKLmPh3>Uu17=B`kw^y~vn~My+3kyS(32P+!7{iNXl4v$v4Q>NQ;%pf8+!(Z6RDFa- zIB0qfR`X_}#iPCC(lIU?dmLv0b^rjs#XE@cIv?rD=*E6AI=R)KMu8MAkhN%I7@T-dXkJUZ;W9P0UW7!T|QekA7D$h<{Q=FoXuGq3FUe*#T;5e<-`pBnXcD*qtRCJq98 z+X}g4HRWFG8z&)B=}w<{=ih^cts85~2~6qMCl6Y+51}n}TmhN0R|njXoi__<;&!MJ zw53O%j`gGa))28^2MKR4-(X;)Gb(`t4I7HD+8<;W?`@7I33My2ZqB&6DuN@6t?b*o zLk`eH1yk7hvO-!1seDWJ>fyNZejQpZeQHL$&K1=KLmEA42F*)orN{z2_h~TI@%3`c z)#;I8j_htt+9d0DK()*Y6$OQ;n;BtwHCOm06(*%&mSIs;KHZOZvsu8OnKsO;L63Xu zGwhLJVmzgeRbtvwH-lO}`<@RetPlZPQ5b)Z_QhMbsGu3I3mO9Mrm5e1; zIT~;b`{17W6o3>g)jGR6%j(G)FW(r0t!}`s665ZRU#5$Q%SVR-&Uv}V!gZQQ`R^wK zZv56&_(i2Q0h~7Sn<2Kxt!GAT0d0A8zem`^{*iNh3O`w1^SNbn5SE!15ep2uBKW?98*f(e=Y{;!vuLM z$I%$ORP@wcfhF0h;2j-Ad)bB6XmJIpl!u|D-RN;dL*7?Dh_F40;#x}r9Q{em3EbnP zl?2iU;o#Mjwg74JZh3nyD|dxMumi`|!I5`)UCxcY`mLENEkc->JJ39cD%!^+-%l(H z{O}7r*L#$qiN1ty7*G`17h8_R&`#?$UEocfP*u47n-n&%NR_1zhvyWJh_*co!i==I zxJN4kIlT2E4y-WHglSeKX58(YN7bM_E(j;nSc%x;jpQCkVg{m;*z2r$!O61nwaPJG zd5&R{szeAMr;5z4$4UXQsTQBO&y)-%l~?69+!v}@7gX63f(V)hK*a}e%5DQJTpFCH zzOSAz$UF10A?tZogL3$t2_8sj$jT6b6hEj@Yw0U18Cpj+NFax0!vAG&Ed=)V!83T| zpS}zf;TwS`4C_*ZjUFp=Fr;(i`@YK0X|6`Uy^o6E4$HJCDSPt}lgMkupt`J)%B+tL z(;#SRsto%E!F~ZD^O|juOfRFyeb(qUuBBn_P!I$9KHOai{U-kf^{!XuBpDpSX|+7c z_@kfe@P6FPDzc8wk!ussf=|iz*4!KK1f^@dxpAkvOf4V^wB0iR#nZFvTjj-k4dh1; z$E1$DjFNdZP1IKVwwE)MQYXmk6Y3yyryrj^w3eN2>`I%C$jo)) zRsO5wb~!zklkwAfR}!Ymm?F0Irc(Uq*h1%rg?MmlxmPaM=U1|=v8TF9HKtIq04cb2 z?ajlYN^r7)&L9sIC?G9o$Xo%EQwW-+Q6V!(CGuLHmgVAlX{6{)k-Y332Lwm6g#9dE zW0cS5*9S3mO+_IpukadL`V%>$*{?w9igJaN$bmnrK|?grygt=$GmL-B^$E>Q(J0~| zyK_pxdjR%}8T3|f247JXK9`1Q@&PDl{AF%qD%5a`vksZ|B9vip4gLs7>0zFsA?T}2 z`UO~y`k2sb{wJnBWvug{HR6~j-aOA&$owtbwzU1cFv0Mcvz`hN5Cvwb1;b8DAP4J1 zI>rSIQ|%i=aUMu8bNl!L21X-@lSTAtu+<$%E>17(D3c25 z6?w}y4YWb!@jjef`%!v92i;7s-M>2eVJ%u}`=BW_)UbH3W+FMroquT09jfHyU1{C!lsiM-LcrVT1OSmzm?PPh@?l)*;}Bt8pqEd3k$tn=T&#MCk5kP0M?N@lH6M^&B{blq_vN z$q_&nQYjAUvW%;fpPJp6ojXQZ)dMLyiAK#vsmAKgS4tvm(ip1FV2}ozkzL$9-jwZ+ zulTNko$BqaF9tY#VVv377~s7ryw+RK78HGqf7@lC(O=4GujEp^@lH-Z{2fbWze_YR zt~hq{ph`0#3psJn(y}JCkUg%WecBbLin&rkDbVt6ipkf)6;5#MB03GufvNtOHVOjgQo^!&9tI8Nprg2OOZL^gaT?&_fmnx5w*vB3Nn+j>`0-&@ z3y?T(rc~CP&4RmciD4nmw$JYtidMuGD}*>SqCsOFE-&v-`s&ulGOE7c9jCO*E^!Yka(akXZ+M|(9()0bi=%Xf6H&^e}*`t3wDNbr2^Hc&l6pnbIZL^K()Z~L#+ z%)7SJOHM_sCF-l7J@r@kgpy2y1*@#ny0M}Zypv_@)ne7CyxKP3JGR3Mn9}vG%MNVSRzu&WFgGUDLW}y zuad``u2vtd7(9T9Kheesa6_urf?g8x3r&5Ya-~+-xVBE390}4U;7O-@2>-EP=TnJ! zW+{YPK;87<-CfOv>M0XWVsQ6}dL~CB+9vjb!M9+v3{;4g8#hU<5)x!5pUFIc(89B> zJ1Qh?g|e2pRdePYt$^esVi5|*4o@=#290#)J=E#D?B(?rMiq`K^gmI+A#uzGZk4!= z!F%HF+ch2;bDz`-#!f^F3}#IZ>0~jVWR9Q#Hg`GMv8K3> zG`+;n6*#?Ga`s9kH-$d|GZ`&&mAZ!}-xoCD3HFgWJxk5?7nv+WkqS~%H~Mp#$bb&_ zuH{?9(k(Elw-cRVw02|!i5HgNyzRn2D6Wy{Z}JEzmV={j&#Q77TKTy26F=LeZnuCdli!o(nGTKiNqpL!f)btnI3q0#k|u*FvboLIBMGu;ijS+tD^Oj=`=>37Y!BdLyI3R*4>VjN(Vfv$5n+IPz1# zIcuCSw{$&ICq3ount!O5EY@?NxcVdnGPFMYQhI%VE4^86`AlcQpbbN=2@d^1vKfzn zA#opN4ZHK_BZNNdL#|3)(Cb_ApziATN4)3kTF_fWlSxrh)OV%fBzw`aS~Z5dkzDop zIb@J!+yZy;dA7YgQguE}MBQ;X9?2J#Z~`h>L)Fw%-HPkCx|FcBY#mHD;6>5e<5G68 zOiD<+`MDOp}>Q2Ys z5jR@QAaH9aE#buDBBzXH!isVQmpDkGO9yFd!Gum@rHcp?%fZR*7YR6*7`5{w?QC zO`LWhzdTR9j`}6Q)U@6nr4+GI3FJD1qboo;#O(z>)mQHXITdmKWsaD)p3y#^Q~bo( zLeVzSYac}MG&8yhQ}~hq&!E@%ei`fb?d-$73G$EtXA%MaB)ffYFwR+~sz+K%-J`XG z>OGl7Pjel=tS+CqViDl@$+xHxvy}2@oyc-QJ z3f+5;Q);A+Z!L^kw3C5Qu;Rd{;+I4MN#5CTSd!x-AcJm%^f$Q@6>0zN^~%npuuElm;v zI4mYgj5Czrv&CH$m=t0e)w(VX_*&*#N}Q(Shxyh95|vkL@~(_VKg<}imn{?TJa-hS z0!9506t%h8BW$!fJwYnWat#6MgH>2rj+1~f!i2Ji2C>qT({L1Y62ru9|CVb?vOXNZ zGO0V^R$SNq2L&Qd5ke5Sg-^6J5wd{L$>O~Qep_u<|0k>Mh*O)jLEcz0Z7|LWr~5Md zbg%21HmoLkq=6ksP4O{q?u}vR3$`xbB&Y%wcKnb6b*<(?N0cJAJ&!0R1)dIteLDBP zjX9h)b1mAm5Uvgm({@$02_Ebi37`KPA3p4+fX#OyuT_UPrvzJ7txQNveQ|ihoKrib z3EFZk=d*Br>aXW;4OAEuKSb#;=|a68XebM?GNIgF3DvQ(IBMWFRL3>{*0D$m60iRF zjgey|8w*v-gHAd7=u}U6da&IUWq!5qjR*Cn3?OhaOQc;YmYO!w7W!8idE3EXB9uK5 zWX1VlUz5L1sBuv7yA*bG#y$zm!ONxJtdhml4GQ7ZhZWQC!BpNnH%YW}i)Ub^W?PRK zJ3A@(Ux0)d_<&ctj9tb%&8G-tdvBk0$Dh{Z)`~B-!Wf=|UY+3Z51yHyMyHr_!Qa6tAbP82|vx3OrZRtm&&YkeW}P z_FV-#)!l>RhWfOjKy-i7eAG@f)u}dCC?$k@!W9B}abE+#4^)Q&Fd=`b;NEFyP!B;c z*trYt`6m&Pdk?^fArA8~vww>lR5DN(xy`zK^KZ#TlLqky09;jEu4>@pMc{KUGRu+y zO@f*ycu__*wA7W}9&1Y8w*@#@u4P|{W5OtVnZ3HS-f~kF*l>`IFgu(IFnMuPiMCjk zHkA7bKP{II5Aj&$p&WSzuy3}%GCk3h7+A;Nhl>qtiH_qK2g(wWP=oRoc@XQV&kjinxizYa@N z-?+j|XhoK+kPzX7m{?>1AcLJFd%Br`9eU$5q%RTZ*ra5X&3MY1B-D^q6D^Op%#hVk z*-lC993h@Q|izHjS7DBhnJ zsMqD){#z3!APdc3{^;R0B`c92W=M*l`qKe*zvaF z5x8?%ZUtrcp9o~_S_=o%7$@)iFIoh)JuKhN?E`(p0CYC3zr8%)=|*to9v}fjV<$xw6pKwxDhQKGAaqyc(}FlzJ2Z#VW5yLXBjF#q zv!+c+xpJ&Drf%HLCiofpxDPV{q7B0ML}?A3cu*-55|ASYZyHH@*=^4zLpBhE(PFQL z*fJrOrfHS0z}a@i#=c)0s4*ltZ>!itIRvfkKfD_ZKR=N?qp{A*`ZH5xiLV1l5xGU>Hy|y; z3*vGhFhGdz%+FcH^Q#*{xubtz@#k?!QVTBpl=#nw2#Un_TPGbUg1J2~mqi>`@4d)+ z9TD^Zqy)@S5u;BZcPS%*oRnEt0TLkLQQMjlnvpR&`C^K6y5(t?i%!8<;)Vlasw%580 zHsV1y7g0VfK2{6$u4J-V$`*&SU!FzZ1|3=VS#n@%HXbxt4Td|!fq|UXu|V43WWTiO z<~RNL@t%_6hgU6W<%tv}Vg_UaNtzzD?7fLm4b|bIux1Ag;CUq<8p~y`Ks+zue+3lm zdkS4@`)+cs+V;_3y|jtvP&>~X!wyTjvy4uKV%!yuv`8O*0C++->%Zd(}J-BFzMSkzT7o8{9#)^Bl z+$Gz71vnT(l?eq$lF^&Ccf36JPy+f>St`fRbG(RNdSniwCCBeE-xzgzB9y>GKoC7@ zz56jDdA)B>bw38x11|OmGKR7`y^d1jF^b?W3y8RByo|o@Hy_o0UZF>YphKt#NDI9C zSMvkuO=yf{xv_c)Z@$DTL7iAf>0ciMeR+1J&rdr#x!&e?;_O$X(&2Qk%$@R|ScNxc zm&}$}KB|M9aA!V0loMX&r;Z|M^#su1j#fZFUQ3#9YzUPmfML03!<;zrdTJ<(0M#hf znXu`d)rqjt;Uy&Z8!ktqT688MAv729J)qY^3pnWg?_y-i zDd{%|#?EwWy){HTifG5Adn&-bmdnnpf}_ zN-x38KERh)>Qu1G2p=lCtaqwp2n|?o`$RvAY)r-+Mw!^ zX>x|6Qfs;%2-wYhCP~>#(!$_@(b4hhi0&zgu&RJ2$P4?zZjN%uX#@?qro^0^oS94j*MSfX~ccmIVxmuxp>$ z8t!gf+o$NE8QflqKXlvc4|3kq;mrd1xr_?`=^U7OrXswu2_OuyEom3q=>ZiN^NvLO z9oysWiwDmax-Lb9c;9!mK&nZT%BNxahWOBgnw~!#+Y1#eztU$gq3p4Yv>mbgv7+P+ zpDJY7WGgxD3YWM>({`$!%lF(syc?;yE>7t7qhA>zZW92V&o9-(t*ohNr^w6fA6XMMili`KX$;tuYF#&p`B*VDzP_yKBKLxw@$QZM0}1 z^K&4+JIkS#s?jq0F!nAfw!E_2jXG7;(Lg8aSq?^=7_AI0NNCA>Ti**|!1_D-`C2!U z=N~1I}?!0g#1V4h&SGoE+-^C2KUn~eWz_X75}hQ7vsJ@W)?FASYEt2+j+ zm(shux0c-fVzUaLz8wLU=x#;%e?=@C{)5@Y>S!P9?1OZIDF@V#P(hiJxO{=TL6#xd z%)2=@mjE_q7LfSoWYmGA@mqUJvK2zo*yzW+%PmmrZ<{&|Qe)MNa{8VL+`S1-1*P)+ z3@@U_qob9PF3Ekq68lyz+v5p_nRd52=HfFl>p{~$ioY-)`%j;&kK%|~1OennffT@1 zX_`B}nsWu7o}UDnDh`mTG7lD@U>CZ;=4uFhX`~W2AnNo4BFv6=!T|&zBo59G`gcpY zYA!6Q4r(`(B@nB5X$Q7ZnvuAA#F9PTnzm-jYusJh2d70XIpR2FI?}2&-n_QLiJm`3DN}l(yrpvBlP4LW}Lkj(M_!6qoe6RRi-5L#o=XDy4 z4;swi8=K$r089_k3Q(Y{6g8_;+(byWfP4h&%XS#Yd2lYGDTyu9W5McFI$?FHFbE-E z9lUq5igGN~$$u6|-w4Ks6TO~wG?0j*vA^)ukJuNC zc8h(7P7Um7+Z!D<-%AX^qYjWUhE7&14k{ZEnr)RaYOruz1bZydDDm}ZL%!ePrvs^j z8s4PG@FZadzUpcZjfAuNbsu@gJo{5Nm3%tQ)Mq=LzUbTL!=> zVj-CA3=4?gu*f9(Jm9R_!uwPe|6w|YNL~H~^;v|`FI7Vx2=(|)Sk%&GMzxmWV;to!IhSoPu3a+B!T9R85(o?v*q z@#+QV>f;|%>xX1_1@I=VQ}m5`1uW8ghour*cL#|r~cN=NQ# z#@lm$!*P2gfa)4^`-Fi{cazY8Kj##c%8vC|RT$r=BNInYhzS z;C*(@Y7D1JocFkfKP8`))o3aNrk_WU_VfdgAgQ?b;_M}8d+u!1^E|g3-6Cl2|hTs+3Qm-socXY|BN47T&`SGC~@8S*_~?Z zwc9%KB~$E7-*pjEl#s*$zNB0bJRxB@_~3i|!4_}L;oC8Z`4X7JchsOX`8ih)GsS(B z*VrlBnkp13*RVifY`!Sh1BDKp;?*Y3_S&b*1d&-!E8T{7-TgE6=GQ`lLs&2z)iR69 zC^ASAvggzXrdfFCd+w94buF7{c^{(%Rj@uFMT=;CV4D#nN3uWMo%MRTfw12 zWbz&up;M9{O+9x(4W^-YY1fj|3h`1>IP{cDEN!+pv4S^PcfUK(zToRJWYFH~$${5O z$Xb7$U1zLLN3ybB3HH%O+!Q{vsIl+W^8R>i^aP14mhFo>hOgHg3eF@ROg`^Z?;uHY zWuA}aAL~0xBE)6p81@+-i-2}d+3R5th7qje@zns9A!OQvHe*=NtW7LEf6F+^JkA@6 zP|Hs0574Gzy2?64j~!}8rc1mxuGde+i%3FN17%PD&xK3RKA(iXos1@|?A~vYUR40{ zOmTC3tN(>*!y!*<_G=%FNaLFD)Wz7>mL$U3!n*nw?R>V^lgm_3rForuGIRm*ofLc) zBYsPrMGR`AjcV7n72Gh&CH*$m);Au((>v&Al@O;nXq(Xv=Hb~=3kAM!IYF_~8 zzkshxAt}TN%b~Y;=owXrO+I>>8J-RJ1;BS=y zJV{#1Lus}M;8C5Nj1LsXXn|zd+ylqKM>Gh#J*8beaP9=3F=&L@faem7P0jL9*O6t)RXI zFdL0gE$?goLAR=^6Tjrto#TqtdLAc#ujLsxYKO&I#s~cv)Rcj$ShIaa4j4sBG z0h)MRx5I;`Fx{1@JMXsplg>a$agVv%HhXK+7NpEY%e=PnVts7HBusI^N)_TfDkc|% zRUK0Y-({Lp(Go#&Y(sl&2dG0hWKT3cy|)t2-)(QHY zzd~Pel(-u3HlWWAW(K_f%J&{Th~> z3cx`wP7N&i_{Xdu1cAHHch=XN6!--r=OTFS-)p#mua1qWtcsE?FMY40Cb*F9Jyz{m zL|KBdgNicRQ=1X4#?gDL~Uz3P2%NAHsD9?yx9)#~kpN_PEH z-k}7>=brH({lP;_MP&+IcRp5p8SAy_@LX9S1vkgb zHR3(Is7dmEk`@RUw{3ovUQjYJ`RQV310d*^ox_8zOEvX0d)#S~X0-67G~(}PvS7In zylwq#@*}P%ZMhXP@qXh=Al6GDt(7c^xl%H=Tuu35adV%m<3wiTh*i}^TdSs%^gTd} zqcGOWpbV#7tNh`u`MuNos6#Jy#q;~Ak-%`TBAkF>ObOtDI(y>({6r^VFAAHXhx1BO8=0+2N>%hVu8@P;M?Q zy<}rO|FfS%=Zj#h;NhZhf>elouJ{Pjn;F~|fKMVSq@17wSo+2Hgt1MZWhkas&gOGkM?t zhr4=VxZTFFD{9Ou@2`&2z9NI-oL4TCuH;TVF4$A{Xh$T9_ z@2?Dw9gb8uASZ&27q=v4A1r^4Oj>L|3R`($epKk~q@%w`1`~xJm^t}dgj?pC=BZsO z^1Ac|`-o99^8qLC9riGAlc|eZ_|G95FZYtMt(~!o*@YA^hc=flk(q%Cd#zY%Iz<1Y zu<7V_l&4r$Slq$J59XLq*@@sPAM-Y$UE<^I9zcOcE>Z6H{KF~(E`PjUBtxM`{7>%b zm#~=ds{Dk0ZGG8N*T!+pG|_FnJc2r|_Dy<)HT0#am=?GUjMm`;?K($jnn-G8=BJeM zTfe6*B$wN3HdLvP47pKz`;5|9%6hh+`6RP`*J$_O?l4;zReU7zK3f zqu>5}?Bzta)(euR8-(5)kmT2iX*aTTGKB(XM+<4vyLHkLvBECj+8Ifq>3ho ze%FV)Bo8&5CztG;<;FI-OYCP=KfU@n(c0z%UL^RS>iR2uBIL}sxNA@wCN(pFH~BDb zU+@ycg3meQd)Cz+J6^}z>xcNFeMz0I6q?QheLdZ1Vy~SxJLRzN#bZZHzF+|up9bq# zdHaL%-kUJtP%(kNcnJ+}(n}1N9{3&lA1aY3coXOzlLU$v{KB&xmGN}zQv&?I<1prjz%{GW78T1cl?ewn^n{cm+}KX`{k?c1-`*5c)d{k zk+G>AT2Q?9i+nQO1g=eJ%3(Wa{!3?%@28w?8YK=)odbRq47`xXW;GVwr4Vg+BLj+J9|jTLeHM!q2by zyJ_Q7^#;(cdD#m?hQjqUn7xI8JTb+E{CR_^>LQ^iZJ44$&;v_NVGfqFsG5#VHs?xo zm{pUstps)`eCjUV$o&9tXTr7tF#MyxBCpXGJqJ{DqN71M7>sH(#*%_YBJQVW*lv7b zM3CH`pAMs_Y+L;znd2i#+0hx2!K~W|AzP}*T$aDHHgCp~xU%h*pTjjDR&5<}B^_3P z$Le*B9PmjZ9fpu&`{k#OWNl1ECsN295#zU2H8Xj=6w@ z%ZIBs{8-M=T5V1lb!t3+pV z@wtCC&3?seU(a3sg|lB;E{5EJ=628cPxoHmaoUYYS*;58sAdL0F-_qO%#cu;$-&}2 zDTxaYy$*uL5o;1*Eu}g@dZ_18&XfZT*;C|}&}B!Xu9+CVEam9`mE{J12hR>*S}0~9 z(chPL4{pQi?svL1{*+r#8bjKI^jod#t-C!!2AV0fkr3vqLkKe{7*dkwNE)I>i9I#v zHXsV+mHvF_44xT1Z@Zr`rZ+!rO`LRSU9$2e(~jif0w1pbt{Ttuvn)7iTMps;?wd_I zZ{3uQN=u;1s9>G^?!h?9vx6xU)(_Y+^>R-cW0OniK#NhHLSwW{`A|ZFf`TJ^j(yL_ z0?{>HVXl%N6J$e#*uSMnI+>7E-40i6^6$bE{!fZ z8G96n){ebdTD050@4Riid)4`e-O+UO-N4u5c;AjAR`JJ9$KB4=8pZl>meY?CmSM&` z5&`{jj0heHsVgpTL8Oc%k#;Q?zpWvOdIj==6RpPOfM4 zpr**Yd%OPPIdS{?tIM6>kyI6XresK+(f!foi##r`FCtx$aCoByOtkr!^c3@Q08l!^ z*$%n;(!1JKRvx}wBSZIe>eeD6E*`dFpWC(LAC=5x#1FR}&qm(&{q@eBGZbv%>Q`kNH z<8(X;IbnF^y>2X3*njF&uPA22UzG>QP*?A*8&#wFefMZ-*h33;6q?8_(=SeB$60c- zd1Y5y5C1$(aHV~phR+NAKEnkoph>>e%;Z;bSYU6Km@4hE2-zSzr*0!~P%hR}2_Uzc zCfIh1q3a!WK0Z2J7V_xlyWgi6APyyt7u1pvYFTIW!xdsFcKLz*QHa{iSSh2@F%!2k z3im@Zqg(q8cv8ycnWw5ww`HV=zTV{V_+q#oIyQU%fH*hCZE<7)(sqZxtn5oT|D019 zdmc1>vaQy}NA8FWXqoC$gwakI*}N#LWUT)|=3chFfeE?9hL?8mj@`!oQKxr-8IBRH zyqAa5G8cb-5MMGfZO|ICLxp?!`8av~CK>vgh%>A%V^c5WRb1HNE;khGlT@>8DkkgZpYc-6FuTkqhUj15A zDdVAQ%+u`F@3Zj-5XgE*5zxa0@IKlto|ZCB3^$<ce}Uuy_HVaZDM}HGB@O`xqyPrkTf@R6L9p}Oh2)hqp}Q=V`-s2v1NPP!#D(O z?nTM5RUL2ai?ISxG%{E>Ar#|z?zk>F9~pf6Y|E)#yM}>6igTwe+A_;IqG^Fat`OwI zul|I^k`Z>1HycVcc`{ycn0Xo`Y!TaKU_H3x>Po~5G=> z0{3MK2~Iq)X!Cc0UW{eaFW5xq@GMA*os6!HYp>Bvz~7Al5d6MJeUZB+t(NT|CbUGy zQj0e%vtP`aDH&g3(&#Yk``pc#aKib~0=E)$6PQ&zj@FmsOF?-kbL<1kso%cG{rmzon&P`0JblGFqa%k*u@%Or|$y-a#nj>0bG*Ub3z*q1w4NGNg&=Jq2ci~0lbC>S_R>$5%fFs{L+x_JF$^S&#uglLb zg6;29$rp`fMiN=f1k2Z9nNqhiSl#Q1+0m!*SgW{^~!@yhsZ@ z{T#~mt-i|RE%wGxS$Mi+ofH=Lb7AdCv6=8ZxIm^F*!$oD8SX*B1AkspdXc}Stq9rH z#_z1XQkN_ag9EY|C$E5BKC#c!rqR zV|uY*cc{JAHt?02WeePKylj2U(sX!4%j1VsL^Np z{Fi7wzEIcwOUmYLF1jnTr&@pLs4*DY-9J=)fN=Ojoh@{5ihamRl??_G?iqTkgw_JTu;hw?0_dL|(Q1<}`rQsW3Fhj$HQA*V$zR zX$R8BaIVlFjrz;0<$Yz|$JN_qjE7BfFgGFR9z~<^cCYTp;kX?GhXtu2y-Tl^?8B<2 z!(D%f8MM_GoJw)+oDG6FZkaWKbT@|O`E>}4EPKD_EiWvdzRQ@%^l{KN@(t+9NJ7~Q^GxitIuBj1>l7E@D*US#H)fw*@Xb?_6Qn9g! zmc}vE@#=n#NT$f3kVBmfgXpx={`PeR*Q|vmfqduJ`9OO%%|#YDO`}?n`E2GZG6${mG{8zF@<8s0Kel zGB_(xhMq8e{A5VKX$ufaZs|Lbhun z*RyI1?AP)7BR>Qye@qr{D^}MftOGpnqvDAkn(!8s5}%E|(h$|oHXCp3Q(QmahwQ6e zeLtV|Zqs)AK3nE-q0xgdpTPl?5c8TA& zPHEiuNXD(+ZnP8k@B=$<9ZlV!hR2q;tfvX;>|uJr%EgZx&rk2Jh^jp2;W^pD3^X_r z9LuiCCVT?e;sD|kHkRiT$H4_)%HcFVy8AEXU)=gXjjKoQj|$3Y5q2dai=$A&$NS`a ze0cArx8ki2IJ!PHKzZS+T#L0V=+cu2IbS^zNJ7uLWyK!@?f^tPnLPrbiAkD13y(^{ zqq8+t&30pRfh4R#8Or`vaCj)zBi<$U&490XOA&_FZr2FXM@das4?aCB`YZr z{~Ahd6^pPL+#d$%;HoAG&zX;oK+JaXAKTM*ZuFOks`bF$?<1psxb&mcaqgquY+eob{5G-|UvnN)TycTuhdNV-bfoq2mQ56e1B*$oT7DcH^p{SMI*;%r&2dYvh&k7vp)So~ z*t+X{07#v&W@>%hSdZF5sFmU)-2PoObIaouBDZ@&#_NU85lywZM*HrpC(%6_nNy{I z)_C6>4dN?4=oxJ%jgShOTDgBHJ#cWY4L0h<_qsABtf+^Hh<-d7c{>?@9gTT>>e`J` zacbj5Zr2s}k=HA`3?(a^)n5}fV;etIs$D8yesP7iD58uhSMUdt%O)HvzL9-gJnF`n zkT&R+S+Uc5$#I~`<92H@wN^n z)pJVAezxCC*r{0#;`{jU0^RT>oz(!`m;OO%`V0oW z<%2rb;ZX2I(d8ERk%dXes?Z_Z+82~*-7^n6<;H4BPx^0GhWm@?+(}T&LW($_A5&sC z^lmclbb0N4bP70nHC1hPE1N@yc9mVn2(o{WgNZy*_DrU<6~FO+@!DjQg@}IKTwHw@ z94%n*p_PQsDW&>XK6}`Mx$}cq0x%+W%$4;4x2I zBotvvrW2%~Wj>t7xb>8qmd^4)+C2pz5<{s6h!tu$dHe!P9b*qoaKYa+4u~6Pi@eh3 znf?e1uiCIe&XD14AB<;?KEazwhuUlA4dd@dL~4>e1^eC_hnvFO79MeeI~shljLXMI z--gDPhmnbVg(ZuUwK03OG3#N6ZDXY)F$Uwy(~?czKP#$pth=PqS0t^kKh32V`o1N( zRj|CsD)wc``(R=u(S5YAP$Z6+pG;Kr|6%RDqMF>=w$Y^^MX7?)35be{fQW#!1Vuze zM5+P;(mP1+iAa?yqSB;G?={p!={@ug5ke2W6H@*su6M2P{r<7{*eClaoRE<*p5&hQ zRcFWj_xA>IakpC8TRZ)RQt_SDqG(?@XtxoX$^D1rc&h*+FoN_7=Hg>zxwHiD9XVrl z+lU7!zhYT@u++5q{Ldr5)cS2Co(=x(SP5QFr}{fOvN=R7%+Ki1++mDJ7@y7ZUu#>^ zV+*tPvlfvWy-ls(u7vyPJPtA)FXi5ow!+5=pKW6M0$PI)nue~1SHT0pwFWlDgPgL` zG(nV)N^A>XTzK05`EJvjOs>ii=jX34{#t1s@n&loxidyKbIEoK-N`$tC}wL|({|XR z0IfKjHlkf@eU9n^)L?b8E3oz4XzJb|1op=G={eDH5^IFMEi-he+tB@Pc9mbR{d;|x ziDGBkQ0#aw{p2&^14TVGuHS!>7ICWYN9!NEt$IbRhPRH9hY2YYwBe4%@H!B1*a6jJ zue5gv9}AV}cLCKDXk3x7u=$2Yd&kAyJCedCfQ{V>m@W{YP?W5nYvvULPA_s}!#p2A z?mV3g8GZQN1~8%vwBEZnr9_A^40BYkSKJQ=1oZoX)!I8^LEJ5CS1aT2iLmok)lV3n zMaPCfxIXWY}8A-%${^4v6k;Jia%NQz?7t(*=CeI%sKquq{ z;>nRbK-^~lWNr2j3QlFb?dcRiInQjscG%_?YFu#59uE<{j?vNw_zBI+ zO>b_V$dt-l4`gC(*5r>OJ_6Kcc7yFFXKkRpptXCx7B+byaOPqxc{vlo6`^e;vTTCa z3P<{BWCIYgL1@*olyGC&MAl5ebDE!7x)zs$`4o}1lSMq?pM&J^&IQ|}50E7I6fn5q z*q&;&%aZaN|7AdnRhYC#oNB$T{)e#mn|xFV0{R0@>DiW@F)2-4l4P1FX(QwCX{L98 zx%yNE!H&0QFn$(YnW8?cN&C`3gC_@9=Ja=r>5rE0rnYz0!AVB`M!RZY+Y?xBvQ5F< z*kXcRtMf3B;P(SMRLC7EmF-J5^bus0%)oPGGLF3;7sDy|ubgk<6;ULJ$>kjt63eI+ z1KNXkANK%p=z(;y-Pk5)%tGSMhYBpL<#oLkg5h7o{4HWUKzH=JIjL zFz*@*qD9ijr8;e|8s6ADvnJ(pKSauE50XCwY{X}m6H!|>C9z}zutc7yv>$R%ykWs= zYqrCnXU&hE zFF(Ac+JS2HK6l)2mc#p?hK;E-T-?QAxahjc38CieEd$)^p= zwJN~1?K7>!Noyl1%L`5#MMr+qjMB$Hj*i^l`^Aud?ZCdEJzGBi|cyWJA#*VK&Gi#>^7?B%q^y;wl^nQ#764MW@LaN{fGY9x>Nvrx-h z((E71d&y0nCY&EE!x;bdjEaq>sEpm3h!i8M3Afzed;|=EiW{7qZF2|1C8rz7&{HL( z!4iDi3J8L{T0&}JUBR&zgZ`#D*fX604|aY~l=ZI{s}Xf=bzuZ;hGiG@p>B0#lB4^W z9B`uO92+n5CuqzR)ZYC8UM%QrK0yp=k_B9YHS(KQli#!@-dV$f6<>Rt1`n3|(pAN3 zK!!gmA2Z!~$TjBIHuDNSYCVIKglBeDkgO80MiRqN9VF!S(t~#Pu7SG2eZ;dEoQ16|N`7;$kKhk_7uSF_*hGvOEN|{)OVttp=;LDf-G4{3SsF37Djt~yO1hNE z^tjOP`tShpYGj2puj-ozQ6pk0?BA|HdIaiju2?9?zzLgMb$}02PdP8~X0kf4o|Mr1 zVsHwm24;wiN8VigoXcD7+w-;(d4Rt^RLP^QReY zztZ@i=ud{2Ph=Fr#2L4v9snK8oqhY%*kq6IPvdCRjjg;Gb%X!soFmyt9a^OpLaCfF z>kIbYzrnaZt})fH#y&eZgOb7Cu1ueN*Wbcv;vz9>cs45ju>_k+KO~UKr2h!_;IMNrHDr8p?k+OUd06IchHUej^B@ODpFj?vWyyAEb{TA<;8LQ7 z_M5CDG;Y~Tc%1so4%NX%+#RN!Rct%z`)G}3uv-t>|H`i*$|YruCpt=x>+L}(vMxI4 z0nVvU%V|{`%!q|c1p(Dgikr6V$bMOZJd%)$He7>4_NHE)6j>j^iq{pz(j?r?jm;Up zdv2kTkP1t22&Mm>u@$$6!tH_l_ZRom0TL9xZ>rb-L~Qs_?^es_(+AH(jUD8H6g!nG z_p_1Nh2JBye-*)`kn@+0Gnwv8lA#yDSxIDGm?&7+x8I%!Y!@eoVg^k$eSmTm<`o^PMiLG`!;316B zJ=$+Y(V^l4XjQYlLu2=^Q0fU)I#xKVu~{^bh%FTx0y3uH5$(LIi7xA2en4Gg-oo2v zlEOF-j4-yj&4pfZ@M`9o{#a0wqSii0%BcfN?ta9W=yB>?rvL&M-B$%V?VbztwqCWA z%>DY4)tM=re{}YIo4flJCUz18+A@Vdyn&SOcU0S*f*nk;ofHLg_aA+=iaanrz}@g` z)Rg|=f?!n$gAab1?{1@0Q&@bbyh&KD)CUo%u_dR(Ee|O|52{jiE>s zS@{-^^QjNKYSD)CaL7RtN!nN0Hr?^o(u0Vbf~_>EEE$NBH#jk45!O@33uG8n@E;iT zYqY*$J){(Rd{E-SB{jq9Rz<8u)r#V$U|btHGW}wYN8-yZ?F*FBboW4;%?#*K8n`nJ z+78Ey%-cJIwu|Pa%}t6=PJXAE%bO)^cB@dzH8!@YasT5<&~ry@CB|(cPe*^N+PLd5 zRW;SpyYMuajC|DXH^3xck3R8~IT0cT;(k<5^!AvWnqBH69y)s1&P#Hm#tQ&KBrix)xXCbBYeSe-tF@TE7WBs?j2y z+|rxU>V28q%EK@uu!6ZXWxI{G#9|(lD1O)&;R-x;#>IqIA+&m+%4DP`Z$+86ZQkbGt8(in&X%<`?TAJWxI{4p;b{A*tNxuo}9 zc0{H(vPKCT^Neix)c}|hWzJQAH=!zLOTy+6aoirUq`x+b-fs7)`JUD5n)F)-c3SQJ zWWb7tC2CK5^|_>A4yTvFQx5@)*w5YRWe98vVNO)sd%rYy7lGqd0roAEE7)ufFGSVdmamOVwDEP$AP6R|q5g7RMTu zy~KBP)!BS2!QRE|l-<8ZoputbI;LHO+j~D*;xyjUOBvGOM0eLoz1$=UB8k-JSSFLg zCD}>Juhp9>t39L~z$Pk&Fcjf^WU7`WR~TI)5m(*-)as#J?tw^)&VSs86*3=b#ip%| z>wK(4m@2d`MmD0nDUcT)|6%34-D-l8RYFw{`23Of&oUG z6(ng%*dVPX=WXBG4>f?2G&nv#_b+gAv z-U}Q3EsNTtgQIMcVRJ9?IQ&9V{RSL1HFM~Lguf%5M74Z(z6Y5lA}1*yCQnZZ9BU&b z&~^1mq*KVT{njaZi&WLlMA<|?`TKJJCIgA+xQaBoxEAW*JWo~gAW0^hAqJ_vQ+Cr~ zg$3NM58xfJ%(uQJNncB$Hy3J_w@8TkEqbY%aD3V6&m-Sx{iM_9?d$Q|N9&fd&mfZR z`!|F4oT3k4gEdd0+6U!=(TPTTAu?I|YDFhU$*AL9Q4f!!<@uw=B~Fi+)RNQGZOUbL zI4G;nyT^2T)1|qdQWlhNk=y64$6=}VXKx8>!hQwwhN;`huQrqJN`63`9EEfG%*n22 zE#J2?VDtL1Who^#S@G|p(l6?NsP=jv+g!-;zvY=u=ClhDWG*E+V*K3bAV=$d`=raB zK?@jA#1&5;g!Bd|m8&~~5(~Z@4L`{f$&32CrGT6g#^gr$WYy((T_npNUoh7nnzk4n zXV%$_yZPxHns}|P6;u<$TaZsuC+9aaq$FS2{$?SUjx-gFB3cxRKsU?CqA9Q!M<@d} z$Z_wxDZ6uWEI@5p^G$tuHQ;G!UF1(xVb#oc_qGSREYfz))8LNdl&%w;H zHwKP?%^r-j#b^#8{@|ILqgqAAjhRXA!I)r?V)n$O$| z9oRxuiC5N<$vlp*BV~o;3wd$}IX`bNUS)EAeJaK;n96V0zNB90M-wRqa@|ZDjgIdf zoZQ0qSWc^!cm~xEkl0r?RPde`#rT7>oPg3ba?c6Tj=^x5qK@K&%zpl>+JhvA4?0r@ zxBTzKfvM-4v||RBIROy!r7{wZi=nna7f1-*^>Y__ZiF zsSGF{0%mbWE^%^Lr((HK;*pxSX?w&)lLCHfYzRX#UTn=a6>sLTNZ8sc-iBU+#BPP`)gpg?EpY|R?4BQ2wz+117yAKbU` z4yMq@fYbRhc!+G7+lYx#h;S*C&#Vl3u4dh+f8#epa+6!hK|#spou*oBDzH>s*^zx) zX{Z?`LtD`oE66mO=(ww%GH}s4cTwBN4u>7#QjU&dk(`#x4&M4gmU-Af4&bpdQvm&# z0*k3dW(Cd?SX2T%yq}>029>Lnb~Y!GXrApYKZ6Cwa&IKrkFc){TVMchSUbeQKz4^a za3aJpWkMZSrYm;dxMkAvA@aAlZY z3Vt+9%H%ws2{2zP(^-Jyu(KzZtMb4z;Eln#QJa5b+mvVR(G^m6|HVxIDMx%n^EXC% z+_aV@4S2U^!$Yq{w1_ap6+;4FaE=7+u$oCS{<-*REp#`9ljHwe=mBnhI+A;N_=KV$ zMW_49j_9DJo>sLoOHx7EA<&(AV)Pu7zkJ^;h&}f};9Amu)@uxNDA~g0*%h+)94OW> zy>DKy{E?hfx&oNEfVhMW5DbBk!xIlMIltZsOIgVh^VF&ympbkBI_$lkYLC#bSA#AC zEbPz>L88Pdu3=ezlefetdCF(Y=ZI7Me}K_102oC9)8!WD;uGL{8rygkytc@)WGi)7 zj?#zBbfB4rSDzDp&avN}aDX{4)OtD{ttDCB)cfSrw_oi2TA>=cz(J^bNuQy5wmIAM z;qeD8`iO@qtJoz8?o$d(HOQ1^@2KaQGgIY~Z>|hramY#0t}gOyADz}-(otDAE*=o$ zFU+wK8NFz_Dq{+Gxhz=~?Mt$9bsR_qNas{Sxu}a2VL`1fiNHwrX3x!NnV}IkGHkFZ zc)jH%XVPl{v6n!pC}o8-P044{1$J7gBgBa%?6RJ{M;_%v9zr&o6igd3{e2^v@Yqe~ znOvC!gzRn4R%8hsF(y;4(N(rG&6Sdz&0gf05RN0ZH4cK>7E z8uiUD(0JODVv4dUH0J%162L-iY%Cx!PR$%=u+<76NvE4Fi*cwc-(Zfs;B~}y!*VCs zfHO~o92NKxU-yoqToiDJ1P@={yEOE~Ma*Jsl1-4jvY3QciN5IezUcomem1pePTrzw zw4YPuw#jK{${V{DErsosp_mK|RPEN=Z<7eW9s#0LXmNLocLKT!4S}EJj0z9lx0} z8oyVzQx%bC{DIs$aCon^zd673sxg0LDG>sTZv<+4b0WDMUB^g28GAPMjvdM5?Rcr% z8``tRMd^F$3t~#O7KF$y*t{)73`lV0JlbLIN-u)Tz`w zcwxjS1F1SF7HwjdC31h}P0!kwyWyv$&$_ivtP<~gM~yg1Qkz{_MGzayW{6AQNSz)u zuPz|Q94k^OG&wEoqYjSN_4e3B=X3CfiqQI%FKL4Z)CoVF61Y>(!<^Mx%&Hll(a9_v zUoadG$Gc-AenO65TPHD~gb5cMiY#UI9krjRkaj>be#F((=`7L_sm=%B8XZvA;=XU* zJ7U|RoyqN6dLD6zPTQ*4o*e_!aJMJ#3}iB2F4z03A3rx-#Ra&2Z3m-}f5ramKY2@O zK`!*Mcsri@zo*Yz^|P+b)j;4Qye)oG6cCJDAB*{w9c$DcNJrh%n90Un_Yn)uBic=u z{_Q$*`!Cnozs}MB;&nsO4cd;cr2yNRkWWEMh|;{#54AYo;Zdo}FuUS7CdcQdx7#k7 zbQJlMXO9vKKAm?j)en(pXYS za@D$uNJ=Z-e!2CrQ|D12l?Th4jZhDqUb3(a*}f*xrH+07ag&a2rR@U08PLgfC_`(+ zbl)@ovejm?jmRb~Fl(CW9axs0Iz2FBbzN!(&Ds!pl)>e0u(cQYNt}IK8m1!VZ|r*E z+;&ACuFrE9YrL9}4Cy6FArf)`>K(J~S2p?qNuU6s^C81;vLxI`5*9;A_E zLod7{qUYc_$HEJ>`?Z&c*ldsZCo|8Ut(*mF;!!Tojcy~Z_lc!iN+w>kR*KmifF{E~i&27V&m7*Kwy7oqk?4j$2DBkeCCqt)q2 z&MPt|>OK z^2x~-{)NZ*7`l#BtZ*#yOI6e_uYTv6=tugEVMDA~y5=XLN$Ell#+@>B&NZUov0qln zCE@ad>8vPO{hEnEt}`-J8IC2B_DZwm&9j+9DR}?X;nxWX9PGFG^eJv zJw|soY#JR*4dD;ZB7;yR0CyFZ?kF<xyKOb3^=b1k?B+fw|a3>ET`EAGx;pLzI?)9sIa<#ozm25$@J=BIZDY%i2u zD9jQMTZLc?EAG8Nt9O&i+vS>ed2fK0vs*-F?K3a6$gs7~|JvIAGgR_@5(WQcDy1EO z8V|g&O@2YJ!I-7F=(n6=gbfz?1RuViC%h@*ZR%3BwxQ|UNuMDNv*{m;K zd}n#gZ|+o)xSeI|L#OzD=~8bqQ1ZDH1Smh1@gvEN9RN5oar;Sn{SY|1MTe?ZH+_t9 zWBC#6{qRlx4GEW3ui4cQ{GkWbCkKsa$Prcb)C`bYp|U3?(K}$4lBfq>md92gHnG!Q zfGc3?YRbuh(blFU+)+OykTeX1#xXgIDX!E^(?kFcv1~l|sB9ev3JmhB;QiL4;P;?5 znCtwxwqqr4MrcL3salj?S#4r~jZ_jGx#hQMLlW>f#Ya#2#({7^XT`g|z(s3@CqX2j zPk4-WPLOFV1n>Xl{K)~ov}Cl&hM(2zQL1Fwezyp70=+zO98Ds;h$FuCYmO$a^bum@ zTLRf1_o(J4*d!b3XHh{?1Ktf4j|G(VPC0r=HR2b^s?gQ`o~(DxS(dmBBix2E-u)}O zDbpH_Gx8!0%2|DBn@oZpzL6w9f(=t;wj|{$zv9(^zg$a5V(Eh*_$h`}{#l8@;LI{K zzZHQJ?nqfoL)pC;yFAi~m$qG{{L_>w2N1ZNdZ#^A<1`feLB{px@7KKBXJ-*9g=mIMNx_@&m1TxLzcT``dmP)U6lqYtWr#fM? zKd}74=2%X+Ncjs(Yrv#~*Fgc$azPm4=?#Grmwg(U`K%pwD;3{wi6a7%4X>!8bBWPzn47;kp|7u%?U<(5}OX9P%)egaUj}+OJdwx)y$4`g|bOc zu_W%ap=|VuUfY^WGb7>RP>(yN*&5DwsDdlQ}YDalqi2Hvk=O*D+R5e|H3ChsME)0KEp|R12FO$~V4A~F* zs9Ni(9=kF$Y$^%^rj=matYPZ1k*v6*&p*R~-QVl%J0(v<65*dqIsOB9*!^|Sy3Qeh zt)s$e;aC=cJU|;ATT%tv?o=BO*cS+V2S#tjo$|GFYyp&YE-iIZ4wBz2?6yb0SUu`svM@e}= zpZgE^_E!>nkMtKv)QAIp{rZvZfQv;K47hzw!W%sZr`H-`jTiZ-TBP7rLa~OZ( z3EIf*t3GlrSktEq*2GomwTc}xGJ^g^aRZ<63Nbiyx<%}pA-sZ(%EptEIl#xPgp1udIn;)| zWbilvy`6xIPCx7Ct)-pb1|UYSz?>~S2ZTC;8T#Uot*9!FyhA}I8i42}awE8Xa*)-g z(G5U?+P0ys&pr`q3f81^AWvSqNTL3k6@hzrxRyF{PO+U2@!&e{#;Zq2Fqn;RFlj6?M7eD?hu9vte(Q`@iG zqhFlevnAFdy2nm88oI!l{q9jb8OhP2>p~k3IISHRi(p#;poYqBvI1;${0sDEl0odI zUnAFUeCfS~P=7rarlv4}MQ^3KXuGKZgwW$3((!2Mn2mj+z?Jo)yjrtR=2_9B+i6RL4z1e(j|Rxc#op? zz~bkx%3!&gxb;`}EpPdysPbF(dL(Gw&RXU_^3ECmGJP4G>C}ciW|O_j>)R$i4Mbp$ z7KnSWwVY{02`-<s7; z{Dlnz-sNNu!>TcV3W2)n%j~&aK5oW8JmY{1T0qJ5fsT0vGT5dYwnjUCIPLqxyAQuI-D$# zW+krRD_3n3|F~vRbQ}9*vq^G<{_+4VT@~*l!|>V{TFs~=X34mdUj2h$x8{=WigNgx z*Xyf9C7f%gvb`VMM_3Hlw9Ou?{vEvdvIyinuRre)&7R3h2%ff5f7kEvH_+eGbQX2Y{j#giOGv3^#a4ErLH9-AE6tfh8nu)Qf2) zuoicfgx5m`m3#;m)sSmp=<-aJMV@(aakdZNugWU?N`F%bt<1F|A+CUv=XTyxipt~S7~X7NPpwW_#)C$5?A2x4RhCoq=DND zIl*C{nQl|8HzQfRWy=u4!#-uzLLcYm?lhif4$vbbG2D6@8O~LlEFEuK2r&}~G%iGKyA#&Vt zi=x&C)_ZqQ2$bG; z&)-SUd2y_J$)L^5gT|T$wEI*%9!nss4VXr2hauEgGA)r(P^|uq`n>(u%YoSu>-|k~ zv+0(|V{5J9DA_N?4|_&kqEtOspbE#n5gw9;CLL2BQ>K=Ej0F5s+Ea+ZQn@E=Z%TP& zM86?dWYOvd6_$=_7?7sX$+MBu@5>P?F479$1wgeOU+yM1{{?5cCs=vf#t;o#5U`mM z1fNNsX?$l~45mG2VK$uoT`&c$_*{V5=&$Vj2+?z}4IkzEf{*BKQd>xIEEwZZAF}rF}XJ}?nlD^f%xDcbO zs9^qRUbI^c=*teTbsfd~`GvP4DlJUx*x^ z=qK?mw=M>qF;-w_q6+-`Mdtk(*8s8LBp0Q6!SH}We8!G>r^l}lVt?mn108Vr8Tpq9 zY;pSm4Lz;uj8!fRM-4jZ?jQG;M+6S?jnfKua;>m0>y-~LMG9U$(d9N<3^7wtf6YXl zYp~dA92;_HVdRp&Jlm;3SBr&*xTnFALEPQmm3;%r?hxG*#7^K1MyI!2klk_r1%85; z>yzX)_Ksas9jIGJ_fW)r1;0!=v*a9Frzl_6i;7mWN|KAMyjY3aLw;lnf$0v|mg3&a zqRIvT2>OL!Df?UWDmg+A3D4japH7S;1lj)r&kZPRqlMw@&b!xI`(iq6%wqHGbP_if zTP-fDpaxlN!6<9<26iSPNJNr!A_d)RpINUUk(INwXJZ($c__!)1oHewb zF$&Xct6n5t4?BJ7E23mrZBA@F8q5j2!qFk)p|E_cWO80r(xud2!SB-*umhUw=U&colvhvqPx^HuZui?Kpk-@&Rh$^!=+5Q(<;X#-30r0MyUyzVYM~ z{&8oO&k&q0W|!#`jzw+3VfYYlt5m;XKh2zc z>dT@!N2sKnQP@d$R=k8$oWWyBhGbPPwdXJbc4VD&0;*ccfoRu*_dj{huZ`F&7Zhvk zS?x@%20E>&E9XgDrqVkV;kHORiUm5q@P!et+ht>;tBN|VCxa%Xnz)+3o9+ri3Nn8`#SE!B6bM_^luNz>5j{}fMvI$E1g10|YN$9isH=kG;qJ}G8|~n1Uir=Y z*oSpB?(PgWN7abW!d1r?IYHh!UrM}O6XpjNhTd=Ur{2pn@71-;^OxQ1C@c?_Z06O2 zi&VVPITOe+5r0i>x7ql;pOw#HK@a_GvvJm&t0IJstmn14?Y4@?{o`T?cOQ`IxYu%w zkCEDR=>vHu%;T!B zRJ^@8&P?}C*5Bnrdt}u^0hbv|NC7W=I(Dgd@bZA)p-3IvP-jJEy_8DZ?~Ai|YyW9Q z5zoZzSKB-H1B?gz@Y5TbaP@e8i>2)p;`ktj@h?`QshMx0j1UnR7B5B_Jg z{b#}@n(>Wtm8vEt6nKu(kFq;xSaPJ8o9YlU_L#v_kvnqnKR^q{!YMMjJp9PzD!M9_ zACMB>yjS!T9nl?IBWXROVUvC7zk7rcyL1q_)-XX#WM0K0^14=4Se4%KnM#s$`VTHF%yfzPz6c7%&3ijI2t8#pH7wK8Pr7D%B zvDhy~VbwR_{du`mrf3KuLG2ste6N*JQ!ZeB@8+zESh(UX4X^R%;T{CuwBG}IQ#4^( zDLuz_B@3NbWz1Ym>;`>GN0Q$HQ=)PRZM+2t~ev>Rjx;zUd|`hZ$7hV zMpx;n_U0%BvHB%7VSuWP`EzV<&dxu=>3IR8?g2>ny?BVP_4%1_&u%(eu{-bE1R~}4 z5#NF!LPk?J(Z=GR7TXK)`9&_jmVt*Q%Y9hrJnyH{9%dA>eZ@~-EQTmaAJJv&wQB=u1_fRh5yJER{$b z%T%Y}H80#yq*t@+OdZa3mRGDdH( z$NYUE^!55eIe7~KKi!kulppotKe^Q?TfJfi`~BWFhqS$Z>tJ#BGn=@SwO{lXZz+%~)?4_0j zuSg4{&s1sZ8|_5$N6SSr3iI`vdKF^S3rv~ezuZz!B3O#>-xq-k;_FA=PpYqgtwuIq z7>!Hyg z^l@;a6MiRuG_smT;o{?=0nbQZv$*TE$!&jDtnpc=u!Nigqtiq^zpk6(flp(HU&I;a zdoxN?#@*!03E7OHr#Mjr@<&%-Vp1twu466 zDWn^qFfomvMgAR7z3=V%b*iWzA^$97|Jlo$6u!`cm3UV7=xW%|O>p>pE32ha z=?ZtWtT=mTzLr^TB7dWW3I?kUh=Om z7#eIxW8E{mKa`r^zG4V+L=;)66N<*(Wa@B3YdH1beVyU4KnJ7*f!^`XH=gSpwte8K z__!C30KhnOFm=N2^!|Q#_qosQxEzpAVd8f~?l8%vB*N^`lD6Q%GYdW}_@<5JwdTOJ z@-}>-?~T`YKbY_p@C0uz-^M?$R!O*({9!v?I^$WPZXJ1keQR*Zz}((@9tIY*R2dv;#SchEKKu}~_+ByU5-{|%Y@(5Hlukti5DWW4h?*Y^qhv(!%}IFI9?i6A6!bs z?R5dawu+2~cg}?vW=`2RHS?-?-Yn92Ye82akZLaJyC{^uHL9Q7ope6KowbhfdG(V? zPpX=diOY0fmq*+8v|D0$x9rM+XW9|0(vXW3IgK#|EtDmU3YR7hAK zqc~N%NI=QkE39L9qHVL8nqiM4rraU5JSdDR;`6MztHbLljfHg z6PAAbZfH-PdWqxNIR9(W3~au-l1l^F3WlcT(=$B7jFJx1xxuT9F0@YK?{Q~$eGF}5 zJ%PM5HX)j5M(>8 z!1;{e&E{On%51p8Mx*a5?7cCRvyyv=`}>Xf{DhM{hDZI5s_d>hePN$1eL!FIIC=+e zp$I%&Bhxg*Qaum9*Atj6@6IdPk6Tba_ur1NOpk;aBMT~%^G|2pA?exY5>$=P2iIShNI7s~YX@+|#NB7JarxH2%_nm07q^Vp-~}i-P2o)XyCAP#Tdj%x4fX4}H*RR>`cQ+hQr}Ux9klOl{^0uE<-iR3$Pwxul|e?I{vChTSdZ8Ho*m}b zSeq3Fsjt%m_>!miM75#)TiXUDFjkiDro#oe!1vmg4w(O*awu;aBeof-WHHlVHSP0G zUnC5@&ha;$2N2UZR{bvU>5f*_3cOND4H`36Qy0A3=A&U+`ar5A)8gFJ%{3vzuW-KS z7)7<7-Pgt4m1)`i`7|J4y8J8z);5VsyM)>sRpO>;Si$14E*~Zu@$!LC#ft%E@7rau z^%Sm~SJPipz3Jn6xMfTJuQ2na&s_{C)jLB>E(3(swlrbKae_>9NLw(QxOk=_!(Z)?H%3 z@%zM#%QNmts&p{0-vC<7wzy@VmM^4>)nujHySecmG)0IhWVKwIS0DmGc#{<=jOA6B za*rf#ffQvWoPHAe;Hd}G-$z3xb?e?}w{wElP{Rq03gLbYipxLcN~{?gnY2HBO3V_q zOm1hReA;sUAC8Hq`~AuJQqxtY=KoDH1)h4oj>%V+NIeWB-5bnAD0b}(QmmHtA_}k+< zOojg&jEP{Lx~+5%MBIPqm&vU)XU#EN`LqZhO?r8RN+}{r!-yU89#g!&{X#FvNcq*G zFipjQfSIPDTbW_3pkJt@Rl!i(*#O49QQEm6V5#X%vyqV4KzrFz)0*GySLISlJS!S( zfsM|VG0&kNq!{sB4N<>0wb=+ae_dTB=o$4hd`U7X@misUj~DZbqmFXVVr}F&FQJ51 zS->>1NOa7eafW*Iew}$hb(j3zJLX}`ii`(U@sg+FoXnmkyWAs`5<+OLoSDJ$KCbdz z#ovmjJ7>sUVU=FN4`gnG6HM`X*_^sR1*sscows+>QG4xIcG_?I#9#i4 zz2?**lhz|kBPLa&b>Yris&e1)Ra142;_lrCI#3Sw3fDCE51U>VoqNg1ohh^qKva2T zj^n-wx4CN02eyWftXK5iHNUA8K3xJ`mhJWWsC6%zpMgbtiYRD<4XtPhmZ5p%nP@IFO?0=BnnY;i%>j;b6V|JA6< z4d6INsAeCzN!th40ug_0^ubs`F9a8(cldodw@7OGWo`luC{f`2xuJ}Au81HzJ9UG5 zBld@oBG&gVTg@Fm)mmjv?=CQRJZTE#`O|to>bRgE)n0pE-O7# zJn}C}p6avQJqC4F7=yS)|EULK|40aJQ_J8E|}$fhBH&8|ensFk}O4#2=kBDIv4_4fln zxwQ40Ha%O&)$3JrwnpMhwO6n7q@Vt5t$rbD%C+^lCVl7S!!o*>eSx^Utr?XnNAj~1 zRZoi}&)hR~;aLz&lIz6Yrt&O&FxIWR{4<)@<2xB_*>smcC7coE_|oR#rEXEi&p(A< z85Qa5)_?mZ?Ld5m$jC-+x_zC4A1tyj1*S5&;GfGu7OTVN%~_G}exAvre|#8p1^o{G;(C9K7nG;1&TIf zcD6?ykLz0wJKe~9(CM8)D_R-&_6(!lE>rO1%8i716QoMQp_%3ItvKk~-#n39UG#+1 zAC{;+=Lzp%s&dG1UX*_!;@ejvp55JhnblFa$6g&xgOzIwu_FMnV3b!Da>i??UfFQx zymrZ0;~6DDhVasy&fwFB09N}2_3-=Uf4*JGR}rVTo8U7p(bwR0g&%!3l!Qbi5R-ST zkpwNaajh(w%8e3^M$gO5CVw?EBuUbHkK+d=53in@h8CTRJh8%en+E_3P*&s=4Qg|x z%xNOcexJm;?|eI!YbaZ9<^mh%RCB*+G{?}j0SE=*o6E;^30u4n>&Gb$1(zrhCfxTt zr^_c~APbEde`9WWA}xg0R#RV$H=V*(iG8PFYmNLCKAAqYr(EKDJXJFRx@-@>c0CfTk?ft*l`@?{DERgY!5y0%^*qH(9H`kynbEi z@#1qk%*)BA1p>@s9$1O7TvZ6MOiWEM!$&q$(V&C{!Ay;qiaAd6(`T_!d>U@hb+tQ= zAZy!$>JsHDS?O+9(2=E^LFpbRT#CE<#sTAjEb#QlIQbJDbyc{kFL5w|(Jn7yB&{ZZ zvtND8_Q)}b08F;;eE-q14o&%_myGCpxN7!ZRBeE#hS<|gK6b8fC-Dc)vW-()4cvKT zY>#u5AgLH>3>o4yw0`Wm8s4@OoSb$XB%V694+4q$ymLG?c?xy(eS(iYy)b(T3DHn3 z`RCk4&@G=vuwZ*eIE@%edObw*0Q=`72`KJ74fT0;* z;V^!6Asex9&1TuQM0+OuvV>0F$nb)te1TYF5bN-vqGOJC;lKra&ds-^mAk7AFQb^- zI|OxWs5fgUE+?EsdE{D`cz;dw_;s$dXVRt5-%FRns^J3gaonF0S55?Fq)edI1D-d4 zHL3q~%rK|7k)q@R&(`Pa?yV*EQl}{V@OP@4GktX1uPIRef@$1YR7Bj^fQNVNLjP%F zG|--7bEnh;T)lhRCJL+7AXp_`%vwr@ArifsXrBU?$Vc`yvOl@)(iBaD4ZpEF(p!J? zP%Y_k>Pb^zmWpvJT)nR>?+wS6MxZCy3_1{+hTa1{8BJ=@)?yZOZ!e9ap*#Y+^V+oj zVy^PL)ZR8eBr@PU-FxX@O-yknS!4>5g5xfA@0E zdCv2F=AC)}I*c=CV2JC!>T})dv@icQrT_CEkLOd9D1ULH$;XBX5g_m0Zot?lNA+Nz zS#3B>(yAC|3PM_yNL@U7B>`6v7|fN}e%ti`P6hwuBDWX@k^V$$)A39g4V_!u(yv_~yu`wUv*3He6iDg|c6Ob#=vpVq4*Kl-w zj-OIiumxev)(IV}7;Me$WLJK9=0Awt7PKC`C(Q%PHTUpf55#|$!p5zox`$jt*zfR& z^ad#hyw08)AR(ZB4}3&k5GO10Vrdo0tDgcc~|dYs<9Etnsx z=I!mp`kcue4&+;Nt>?pO{nPHkPv--3_$0k1T$S{=3Y20N&k98d73UR-w-OZun$w9w zLYc8oxa|VZ-*Kw`b`v;%P#gEWa~A`!Cy5|;-nq#4X3dYQ{Z7I7cd1-*16SjPDs?(H z`);L6P~oH~UNI7JF}tMCmk$9e`LS%q`774krEw=%HsIL2?rlr_YDnWcvN!mE=%B`b z_-6MDYoB!8*SkV#;z|q_-GVLpRpKQHCrOG|f={H9Vj?oeQCK?>u5^W}YzBsefxcSB z4S{TAaSO%K@Ulj#Wo~&i@-}KXpOr~4hz%NoMsRZ``0=25Vq6QVHsF1CH6cFx&Uwr5 zHYY>&4%ROSlYGmRbR1ICaIV%kRL^90SkEL>`u)h>JGXJ;=+yM$A>3(mMW2Y={zaW` z=ogob^g=SPJ)82WqhiLdB%RyHv|%?6-#^!qy%F#=-ZTk7@^qHlf<}X0Ig|eYN(^yz zx{;|SzI@VQvPX9r0P6_Eil`i5ugT@|BpVETFSEDpO#y~pynhzcUj{4Tca^MW0&-6G zNFnU?AE8=tSiGXI8(`8CM>U0eMH0`}2eSvykGou zyh)QVDccp&7WE^7bOAZ~gwLxU?J;@VwxM*Ys}Zd(qAMYT6_>~%sIUtD;M%4`Z2Lz6RKaR>2{-+oVV z>19~&f1y6ofG>g8%t{5WgE557Cfj?2dHC&Ah6=g~vr z;PM)EN!Hkq%crsGuKX*+RrI6=hy5M>=}Cm^+D~)*76R49#))^-2l9Ee?}LxZTy0}D%u~wADN^Cf5>w{cPVt=< z2HQzwqRkr_ka1iC`%^-TqV#V+#0Pi*Ngsx%V&K{Ly5wwTh()i0-qQ4e-Ee0`?#!i0 zZXR5H8PCetBOI)-_oblPpwgi z+<$hVT4u*8VfAB!`4YN|1#CMBo(5TdmYT^b7sSF)N=5mKQLc@AM4jt`GMtu6W3jg5 zxObiIvJm~1{2|X~ilND~`JsMx`*~v`40i(qId&8gOK8}+w6NsVkd)Ep+vFP~zq*ze z6k7ppF`4{dn+Wy>;qMEhq|DB_`cNSra+^rCI6x1;gZGcr^GWYjs`ZkZ;oCGUaBEs( z4FBHC3YBJHovd>FkH}r|B=Fp1kHW#G3zncS!r)XfJQO+$!-A_c|2iI(%4VOVg~WuGNR~b*U`+tCk41so$JDeN4AQyw`u!x z@iB2>$cfMxbM`EgeH>~bMo&>(1KXhU*r``yh122+~d)kPOA!!fgi@TTVoSB@+_+Q z&CPRT8^Bh1iQ;Tq}(z9k+n(N-p-9 z?SvZ ztn%Kg^R z=NZs8SiB;;f#W|v=vKbvs;3R%yA%wWe(=i~Yo@OB(bqRh`ws~KizAklV*5Xo$XFC) z8E5`q)uns?desOKo^W4-S#mEZn4^Z=ec zaNwbmgQzOqhwuLlO|~@%!h~og)nHLYBxE@~TfFg|v(ieuaAj>94z3eYN8r;~#%{RI z+NiQV@W-Kw2UR*_ze4%==aZl~9Ar0}7uecPsZmEdGEai6?e&_eh0hN5bsaQlH;=2b z$oAFltcG9cI`B(HVwSe>GD$=bn)=LEowq51GU0qQZI70TI^eegpFMm$_i!OAfJG-55r?t+XzITCBypYw(2wqg44bPk@DCPfj6_$h@?wrIx~j( z#7%^W?%>ZSnk{39QiPhaxB1^R`1B=2ilDB8+WMlC>MB>J9+E{GjohJZcq#!I+B5sI zQ2etf8t1WRm>R@$9(E5%y0oj zD~z(Tp}S81Xy;OUPc*S~Ti5CO4*h|*xK%0rHb)>OX^j8wnaAxkw&Xfn)!n3SMD2dg z_;CU>`;0r6B9=E2X?)5#8;ItVphuk19qb!Hx0#q#KV8YGL`?R%r|u5!N`PlINHYn@ zf327IJ+(n5*fz_MAupxk4+aGV>}5C8UE_a3Uq%qde*yRvQ9z9qB`~-6n^o9y3`NpB z8x&tNov%;@o@uEbEMN7xtrAiThnBe}O<;zKt+8-F6Onghs0Li5fj3*pP1*;<^?CdMy{|$o#inv-L_&d%x6=QlV9?B%;3uPizUdZA&lfBh~|6*mrF@+I{!= zNThF0bM@K1e7h*yE7SR{bbv(b#1H(eL_~ba6*-(BJlR6D0q%IDnfwWOo`y`u5cv^` zph)!Tbpn8Qji{9&6a1;99r9X#BCwZd-m)PSovlHaiJmF)<*T4M`tMH_Iv-bKCeO&s z)7+Q4o07;T>Zn624ovh{9!GH3!FmG9dQi!V@VxG>4*7T%zP+=r2x9xO8N)Gy&eqbN zmE^7d+L@e9nCw7U3dSFrs0ztg7CRm+IPjhI-Xwvg`F`t?;ibfszzQ~T_9AW+3iwlP zTT{=<5Wf>mT82O% zx4=dn=)(@NYq4FDjR#u1X6T7I+w$*qI{RMT=cf14E%(KLq+6$($a7(bv-wvtu>qe+ z(GR6T1+7s`bCpSw`;dnv=P?BJAY1|8a?ukNl<{vk^)GzYy6$QE24mUT{KDy{k4U)XPhby88 z^8&1Gj$E1avyjV@y|f=k@8I?@2g6LFA7S;zHt3M7kwtogH?S>}ne+vvF3aFcU?ATv z{x30?fc4G^QTi<*sSc|DpaD@$VIR&(uU@?KHc7Bq&MOpB9B|*!jp*1(#X>=#)wikc)?Ht7)u)K%ASkvBwnes zTL@ojU>=tS3pW+qgql*KHyTVV0(QEe&d)GY^9Zf-&G^POHUMjgt2h3g zTxw;dat7C!_ihxF2cyc&>r0NbnV+}rOkK#s1M1e+3v?pI?wla?B)Qb^-Rq{s;^Z2N z-ZFJ%4f)%18SXue3XYd9Z^B%Z95}wUKZCiuz5aOU!$77v%x?I}QPj?ZOG{YS%-5@x zxF)$RuJZ-L7M|*Lw^)`Kts+F2Me(mS-Z(!+{w)n(u*f+2j8&B3b;_>B4Lrp>cVFAH zufI&^J>Sf=?!cBQ%9bJfTvE`ogkgP)--32j+8caw{p=oPzOEFrdd8&O<+J`=z7}Mw zf8RhGgzM2`M1Iutuwc*nmZN|IfEj2#_GBUrL9$ef@~2G-?jJAgM&jsy|58TqKr3Imu0qN-&qQ{+~z6n=b=;YEmJiDAU}ht`A5Ylj1MaOD3O%_4x`JBBsom1 zWGvrGq-NH(eLZXBGABm(t9erYsbYVoB>Y}}pq6J4^ZFmd=_!0s9*ekeiy&y-{@NH%nvP^;e-<)LYWK0<&1Z zx_mwtYMeMN+|b=6_bh_3LUy@}g`(SZ2xyj;{8RF$lajca1W&*V|XI&6vkSHplZ1)J%P zOm~6#M=SvLJy*?G)hq5UrzUuc9WTG84D-ENkpL<@BVl~)HEflS-nVv&M&NaU9vkHK zOp+_Qyiu+WFO*~UJ8qL>`UyJqA5`^(3=2-%j+&)hkO^N}cF0qJzb;QPKm#f5U;RXS zPcUG2r~H8`&NrER$-|81 z*rK@?fV8Idd1H9lQ6M07_Wp8c_GBHeaUR0?L&2Es&yF_-%;U`H8&)2|-`n2Vb*}&T zJhye?nHvAg_rB8{g~wVpx4mN zvLDD!)HFd}#F0UrMxuzEn!O=jE+KJoi9LF)9r$WrJ8sEngUW=?^ej7VzxObdMN=U! z9SLo$`c68(1n_0L=8G)T^sY?W8uv>?-~>?OT72XJ5FCpGCT6t0o_UMA`ZhCsu#zyj z52Bj?co+42F{^8%prmFbaUVfx46mrL+rolAR6XZ}Sbx1QUn(YU(_t_N$oGd%pen?a zS9~N_xGI*^5s=ov>jLaGoHdTjAMwSrk<%xfU6$9Y7F6PANY)#5PRS5q3E&w9@dQsv zAhcJG$iLdFhP^b$qS*Lp(Oxd}MW~%;nOGQAm&0uUue301!0RA8sN^E^DlG+(;rCn} zEXKO~YhM-9EU7i9q10e0Q{xt@UxPZ1=%Mk;jG-jX$s<%3#{&}wWXrCU-Y7)ChF2b>mZ$VZ=?A)lG~wHDxgh9!tnp{B;{*~;CKi1ne^aa4es zx)t)=V|;@mYi{oxt}@?Cr^2BpoWLj(_f*g;Lb&aS_gvz%gWFY@ zrEkzqnlv49U2*tj(8#L_Uq&LNQu!uaTg%D66Y`DX#ym~A+}msdsnfHQ4`PT*sravD zlY}&rr8xWfA6w^d$rSB}2$T9K~sZRo6 zl(7J#^aWcUeIko5(x?3U0OP;#f{J+W#T?z#eSffBHkW4ecTTXkFYvsGu$<%Uxc}## z!6FTon{nVd2;c?K{;#%wjvymD%KwEH2zdnlwPsyY`%!n19^dR_whTB*1-qkv{BWdx zvO9fHJpD~kuM%r?oxTr?PtxvoW@!8vgLu5Ucv%Y9O&JaF(F$SnajnAp8+me1?V#M> z(ny@Skmtvh6h0@K%GPtMH!zObVolI}w6po>2b!A(zLS>yGES1Q_2%Bykfx%CEvtj_ zkkQi@bI5Jqn?+Oa{(#z8J(3=ZppknLb1a>vgSgn(sC(j@Dsw%hKwmV{fTm zFW`6x>O9NC1WX)9>KRmI?Um+mw;2B3klGMj>f3FBDf&z`^E>ciqrW- zwn%7+Ub|qxedTpv)88F4D&+Pp<>)6?ip^*aRq4Wol1AZysVKll;#}r#c>Y#DiwXX- zDx=1U96fNeJ6FO3{oeQp-DB>*Qmrl^)xywCMqHQZdIR&dR?RYG#kAxOzU)I5l=tc< zgs?{(FqvhQn)2rmV6pb#U$C%*v=AjVto1TY945SeRX=MA7&t}Z2hVmr4FJ0hnAumN^|`7w0>UOF2d-IBM~M+mtA)I; zUye($wVEh0ZM4Rn&T9=^w4%PMnJCOCsnZ6@`@Pid-fY^UefOz%M5a_~@#b5JOKyFf z(S#a@vCy5kNrWBPI$_wViv+p!*Wo_xBXYUGQ`lF3w`-h$H>7W@y41(b#JNLYu%nd% zm4h8lnhhlhI}ztdcO`A2QiFm{-ZN}5AX7E?Vt4HJLY?R>i|1pimyEr+mqX|P{g z|HoUo7c>CbbZ&n!|HqbfXK(WOnA%>BCjld!3Ll&;f*Sy|appE@>Ty#A@yX_EWQ$7( zMDGhZn$)b#?^MWU)CxWS3cY1)rHv4>MO*DEKZlRb%Bs-Ge^_Oxvno2X8U$As~Un#dUZMg&!eU;~U|+_KJ^!mdkPe-e@N z@iE;Z{5k#XFjW*ruv2O#d=sYM6*%N-w;XEH4ji#i%;vo&EExIuF~q;jgIl#GHq#0W zndoLkmo?9D_`+;p_D3qVlhYRjTfv{Oz4-`<4~?{4v3fXa6h>V0mw9qNOcIk-UYv>e z{OV6BS^7_pjMG65VKn27jpKR9ShoK1sqas(`&zgPLFg_^^1N{;JdJ-UT?3BD7V$$ zs;tfI9vmE;J=F#O`8pW+BCu1vMvT}VuXe4Z^vf#R2{KC9E_%oF&Z*wEhU{ro6wFnN zxg=ebZ_K15RtE)i9Cfdc3C^OZ;E9HE{+2Xwd2<~qaC5Q*)cb~XE0%B?uS zA_`vOz6Bm)PAW`n(TEV~Wx85D3CeVk<$}x*TB2W>$Lpf>9Ox({(Ae4%e63K#@XO&g z-%fT#28fEDZShX>QBY$_bS!gNWY>|DC{`|24e&L^uzylV2R@Hs_=>*m9$my#Cm&p0 zCy>OA1<)G9pZLF};vU`Q0U9}Y^0G5xyOO6_E_=f|)X1B1IzHzN=)~lZ-@OX}e<0tv zz$db?T~V)w*4N5Fl^pn5tTYSb7k~Pm!m1gH>!>`gDkL@m==+}TGqHKYkckAmbR$<; zU)YRY*jZ@Mvp_+TGf)|~yhr>_fFiS|;Pv1v@am{FXrFr~nVgN~VO_;_9#SrCAtF5 zmv`|KZ``&DfutsYm8nGsD?XR>qfzPx{8e=iKuR`a_}4gxF{iGjfY<%30P2G4+wkff zh@w)+VX=%9fdnZNs*)aakZ@aTl&(peZ|r!P$-qnLsBoZX&u`_exhJ?k)omMm7aj0C z_%s0-@&>*CdX0kqIh-$k&;D{8{F*AWP)_bGgC4VIlMcLOJuLYF4bu761oc;IQ)XX_Yf2 zdCpx;U)#jK=Vi)t*6Tq0;JJCjfmxm(w#hoclfR^E0ft!{g7;=gN>{~jtP0Q<{~*Mk zT7hi1c9p6?rGBnOPk*XNjd!z-%#=`pN-Dp`#lm?2K3?)!@=(%vM4ny4<@QI9V4Pn| zKYH!JD(g;)%YLVJviCQHU(p-F&sfY#YQ&u92pujmMV!f-j;T!B>#p|IQ@zh6sN~`@ zVCdXy9z&e#7fO=n#*hU`fG9s`6yHYPp$-OuwsjxpHG(eG%Z0J2bo0Z(Id0wU_zoeHhyAo36SjzU65HE`|< z)%F@y^qE{SwxW%fA}$4XQThi9wdE)6A6-3H9!r%nRfW+I1e@dpJz zL^_Zepb5(yp(Nt=G5}KOnU7bki^M4SXM13DjEj0<%#1yK%2g3CsLBL#d!v6!p=T{v zF*v1O0f>~d<=DSM-N zhWK-F`@(Rn3?IHow{Hqvvx&)6bGn_#$A;?N2OnlLY{buw^t@)3)@+GuV0>bmUfNMI zR+(Aei7XqQ5a9KO8eJpR#_C;+hhzL3DuO^zPqGEukMlp_553OoJYd*Y9xA%R3_c^^ zHD(!3nvyyw{>8SC{ssj0MU)zYmirhc&t`&z{~I)t(X(s@IM{<({&7S9J|BO@k1`x_ zHEhpUc(5RA--M6+h-bieX_&RO`#s6iE2Z6B$FINh)5zlw*(4Gn0!)01)Z*-J zx&d18uu3Ux=y={ew{lY`UwCo=R!U(mVMbNMJ)B~u|ls%=wk)BJum3*)hubSsGq-0RO$IFw`z z#ZLDdL=EG&OJa)8?S*yl`YM0(FYYy9uptg>m8Ob1{^x@znW%?SK%d2rMDFzGgA}J& z;BOz&mx5CbkJHnH%}sP^7&%RBwN=U!YI(6INSw|Mo*CD0Qul+uaXM_I4@T$w&~&OR zxOs;8+bK%Nd-}cg=Ew+VJci+AY>EfGC;2Q!)zH?bv$uLUz$CGuLhx5a%DotgP`l16 z%eBc`s!4^nis;Bmg~nF$GUQTYztT7c)wwo!&jWkCAJz zW3=mB^U7Tra{Dx4=_Ch+(ho8eV|aY-ggdV29nt0kRpa3y1P(C6D$-Yc8U;<$zP)8P z)US0V?qX8D94Nk3vOR(6hBE z_x3ll?}SJ}W8A1BHfqmy(=h>{Igd%Vnk(Tq2U~uOO4$gYF0C3#TmX8!Tx2$%8qNDv z!h9~|Po|&H6-CrM;CPzq1IMXNyIeQc@%`~>LGW$auye#rp8p>DiH~<@;3eT)TiD2l z-t)`_)9O6#t_R!Cblz;PxZru$Q2|-c6uS@IO{=bEL47%bHDua>2Xc80-}htgLmQL%%S0hU0TpWWUtwq znq-1MAUJ{9u4#ALcjsb?iL~*f_`)JSAWNTwq}EH@quVT88E~%^<-Q#v`^07Lsg!&j zZc&u)bMB{e1sEzoV_oCGDI?cB-8E%9rA=2hfD}+Mj3!D&BUe!RLoXeFq=}g0b%NpR z93GVm9i2ogB}b2Z!1!D*?C66$6mxRgJmOdxJFrYa8}Y`x5Fy3+pd_)v#CQ-Z4DqPm zZdyXo$h|P*uzN%_d-tb1uyGCgG^!AYsJJGxyo21k-z?g?uYcYlk-~o89jmP=yz+^O zVP&AvIZ07O33$mAx9o;A4&Lk0p4UE93ljD+MC2(`#|=`OE2JVl=gpNx>Ds^C3(flK z|8+i+2sb(ChydFlDFE|7rpN_Sg0 z(DbMnNPmlUF}!{zcs;#Jf$niEn+_)wHNmE;yl4jJ%@EnS1MRTc>$2p%ztS1&0o@FH z#LqpKs+0d-<&3y$^&>v`IH4ukPzIxSzLs)`O$b4#oJQzQr6O6|A4c6?jXqIL5fkV1 zeUsE1fZmi_Fb#k3KpWqXA4cF2qfxF{K1Sx=Y@S#&#rr>}a~fVN(h`ws_0(#11TQ)m zuR=O=H7|<$U#aRbAkPC&nv)&Wj}ghAsV6r-+a$6bcT@E?@|g!d*Ij(-1woMe7_rCw zbv+U$c({}1gmsA`&d;R1tvMO9_&|};jM|_EKXLiNKLLE?9v6KRPiJ2lod9jxt_3(= z?a8bSqTwGqF!Sd0an#JZ9Ys8mL3M#CS9<=S8M70^*kGC2OeSD0@vxkWXP)WXIxwi$ zvWAXFfh`5-SY9a8b?+DAOK(LOS*Tyod|aiopiukHg)*7uv3@!*Q!{!3h#Oa(76Wpk z7J%4r#4huV5(VU6}cy|0ss{fHPCI)@rOXWO;?q z-uvhW*I%kf6w@8(>jEZ+Fb{kE2{Mq=@JB+zT2W3{l~d-y`RdS!%qvw_M@&qd+#yJ$zRQ9n(A8_=GsUXNCVhrOTD?M z??D%@qORZV#BZ6v#LCL^Jb%}D^Dk}YMmz}8s?hYBGPLh6<-k5c?li5ge$%hRvm$Y~ z=KM+Tt>RAD1@Q$Rz^nS7T!;S_=T}a?`Hb~We_Z+53lW7d3C!yi@<~T2GfMLB(CC|1 z;XfPRDw~Dj(J-ptmHD~QtUW*gA%0mcKdlLn-(MJPjBQuowOM?cWUfF!BLa1V{D(lO zYaIw<&f`S38BR#By`#>ZHM!CBL^NP(_1zr9S*`^#?XCEhmI``v1qHa9uJ!k;Sd(;| zFRxe~o@;MRqm$RJwJL_8kX^ObVW1g?A+Cm4J7Fv-fylnIyfWSobEq4T2?4ml@SBh7 zPcQYv5*m;8i!&d&B9F_o6HYC)pUENnv_h=m}uRhxUrVSb%O_k?v6|H*Sd!w}+V zcNcf?eI|P3!oT>*t9BtBr^m2`>VxnqOvvE(~Mz4Ul+da`Uyspc;O~U}P@WRVnNHVIANTnY99| zXlA`q`rknJV8YV7T{{D!fL(}k0by_9_Dm(KV~DL|NAt;93lH9!l*PWn>M#C-F4ywp zEIn_{ZqcbcIqAJ@k>I{8n;ME(^(;rfzC?kS+%KylgtDWG(J}{UaZt6b5DrN~ziSV9 z9ppM&*J1NWX#`3zI34|KWBEvGrV0jZB&GUXj(uFNzXmQ3SETSG}=fyIYcLmlLXm!;;(o}(4QzYuMd0nMxsIJyLt9jS25cfL% zLASB)>YX(nOPTXZ#rJ3*nO{y>zQpCCqzW9)igfLxeBB@&*aaGKG8#%IR=brM0 z$CW_e`&G8u>a-Sp8YD5vi29WwbgCj?=oGOnw2U^<>|VJo1edMlJZTDPIuKS8T&4yb zYit)C!By4SN7qPIQTXlW8-oI+1Br&hgHQ-6=n4p9*sor(wxp|JMM%kA+1b6;{#LRW z9wieoRq#NeU}daxRJ@+zu|B3c03|>i{KmF8h{bE*tvRWkdCBEP=5Qz5Rr7&HEs;)R zQzf{RA=m^sC@AIC-=^xPCI8WFlgMFq`3^kHQeFTSP!TYr#=#O$m;hFzzP!0ZHD!M5 zNXg%2P5=^+543vQbq=-YCd}IDDrN;|QxrPI8IvBU5Od9No=K3aJ!64HYaIqel0goH zYj^+nO-vGh@HGqVWqUu_D4uaOg9>IP3`k`_zIcLos1qW_BU(HnyloIE`wG6shFl-w zU#vTy3(qwu81&21BD~fE&-*~)#?s)KhmuXlf>McqsmJXgfGcJWp|irAL7mied)wB zEzPSR^hJ2CleMA#uJ3*4x>>uE)ae(*J`#42t6XExn|yPblV)U$e_H#ErS?ziqW>NC zvA$2Uj@#`;v5sSs>4TGv;VX`|pk2Ay-OHxQG2}2fgkfqmnv9ijw(Ihyxn`IE+zM({ z9jHPwn$=7+89(KYV&N#EZ$Iy!vyIGo0&Hn{kK8~61&>fHm<+FUxFMb=r`yPG3&mst zuwQ#!Vt3zv-x-t7xugGN@y9x8f+USJw+>$+&GnHeq6T8w&p!orrKlESGT~M*L;00C z6v?1{i-^TXiyQ71v8-1Aj63+U4R-^p50l*?dH>ole+_a0vWi$c{kAZ3Z#|Qi+c{DX zc!cKel`jJga3G~ICi%`>Mg%l%XMf9`c(zJl0%tjW5#{Yc4OD}H_| zXil6Y9k{T+E}MB7iNgJI1sb-AipX?3KigCz-?!WdU$69k1kRHBd9!KAO@UmOmGA@2 z_I*P1vkF)_8w-j=+-q*AieF)9J$*AgIYY`HV4p zE|vD#|G{4>i4xqs@fGQ1hbr6fN1PfT#|+e8Y_tvZ9jA!-&Y*V%ZR;|5$_3ldT9;}=I#$H*BI0*P3ZHSF%wx5 z_sTdajgH^L$Jq_JY3wKRmg>W5%tQ|1@2g*z0E_kY1|og=6$8WsC{5y>ybe?Pia%Ep zcVjYBUQh=-voM;Q2io~3I}x6m=FAUqF4K&-7^@`z@<^-uJiqmrulzBIDn2nh|57Q7 z|GG?M2iE|lc$)3P7kPY8OFuv3JGf!?0*FFZ0T);$`0_3Kk+6MPHEhSQFnxBL$`xa* za||Ne2{hC&TtTTuHY?B#?^WdZg|gRbP<|BKJ=xr+%@vgxO*Y?)4}`iR^>-CCzP9@U zawrohRx3LGOl0Ad+{oE!fxo2MeIlP&veiXq)6c}n9-+Jg6WXwBkn*PL@9w0m^rt4C zi8{A_EC876H97Lzb@<42N3kJeg{2HbKG2arii!H>3Kbk9r`O>FgN*Dp^Wg_i`Q6Do z%y2!JMs^r(S9Ys4rnqYIkzQ`LKHqT<>NGJO3SWuR3aj`bK4ISPYp}kv^fHM2Tkn1p z8Yi-pt2ou0ek0oiI@}4W&nv7kCE^-pwJ~Ma;y?M&+>({`dU=atDe+LM_A?PHDnlTs z&gOS>isZ`ETINOFo(IX5j*t3dRv#fxZtO4M<{=gRPZm0%q-qxRG!tEF$AO3afut=!W{lcvD!^=?(UV;lw9q+ehq+E9j{+gQA63 z@S&dHT57-!8*~HmdP8U(0rE~m0#VdS?JLfAp1v8V@kZvaCXZO>BS$qo(s7ulOxgQW ztv(nFObScemv{U@7Opo=EARywb9{A?lkn0$+N8`42!@jd1@%IS>j~AB^GpG0igIaP1yy)m757Tw)Y}p^e=Ob<5d8CB; z&lPh9d_XNhmakU6!UlHpVn4Dqke2QahRwK(R7Tq$slzXU?BbiljR&&QBQ z=+D8gh$3XHL@1aw@^ig^O;LB#YL)5!M6s*Sd-Mvu*!U|cCAb`}o-CI0Lk*?z)2E2b ziw^Da$?%sYvbM%?=|6${&Iq>ktJdxy=!V(cInE^{uIW9mW{q8c$q-s}CW(fmq3yKe zhhLo&S037(-`fw@2h4r3Az7vGR#Eo zczW(s1Bvw3oq>|pY~mVn=Q@yu06P>ByY=Y@FC&WQ*W9%6NI^4$Q`SI16=z>bVFyU{ zs%M;`@U!qle~C<-{=kl}p`A^6FDcGyIWleDIL6*K1F4AysUFq<|Bds+EyIr4xGyR%ttTSenFH;tNEQhsFgbf22>!62zSKs-W?fOxnWcJ z&6z+3PkWFh@Aw5N5k$}!rEe>nICHqhT6!6X@r2g(I?ZLfxOd}jRDgQL&0WJ0OVS;Th zFRIi8dw?LBcw>iv`nv}y9k`g{Dq$kiZUl@hg*8D8!0wB!FPXlYCh?Q|E-M9#S&oyQ zsIT4GXYT%CFWs$(tCkr+C|?w7Um)Nx5EUg0UUh&Z_9~LFh&5QDt8fd_wn9@pV7ao`1KhS{M2qlpwWHtUsa&g5CH{)1odcNRD| zDi>CEZHEHh3vKsp3bsBz>5o%VL-@(ey=S!KUhTy3dpEC4IxyuVA$@6CZ_8ywJVvJp za@^gjv0dLXc$N(x9OVD(s(O#je^+8A7?xJYvqNxVHUP}RmB0xXaeN)U{dUQ5CF*_# z4(#wT3Q<|0I~y_ty75#4)pS0K@up1!qvI9QY4^eD0@4>IWCzoN!>KZ$xAhSWnB%xQ zzkvVCU90FNrQSbX4NGzqQ@^@XSpGZ8|I_eNp+w%4PS{)53OQs{UvrET26(_>!?boX zjx0OEL#_X!aVgHU8=yNii4{F`q+=2nH0y|nOZ8A}Z<&k{do1Icb~pK!3KZS-7uqX3 z2zR1L(P{w>&V>fBt&U)ukly#YESs3jfe$X5N}-g!W{7PTG(CzQNs@E2sNr|N^j@J; zz^LD`Axu*n*T$&Y8>f)41J!Gc4ntMQmzh}l2Gp)aLzZsXs$Y#kIc=96Jo=oV%u5pCqvP-Yf}_PjSmM<0{d^Ws@7`+4E3&(Ou5xzpQc5Xxs9jWv14=%6P z73soy^#Q5PMK7KOOMhM~V4TuC2!pROlkk3z{-Q1jt&eP_Z99Yf^x{;Q*o8z79fid$AH*$ED@+$1G^86LoL6=__ zD8sQSwXqK(1aStf>Gq2X8&!4>;%Rsmf2&Lv8zqyMUA93ZSwIp(`c_@gXoA~2pd=wza>M zmsaAt=VpU`PuWga-Rg4AppUhFTBTZhy<;HiL1QruvJ6|gc?y_igM3shNJmP?E9vDo z>y}}`ZmWePj8BVmU|UxaRy5gH4K*+p#c+~;I~~3;K<8^fHjMf|qbq+)4Of)m0jaqG zR$zmmdxD46Zqk1+CJX~u*dW184?>hWo{~{>I*ZgBJJF5TUSM4ava;5Rh^y)jA`WKy zdbEI2U}0V)#=T2H)BE4TLa#Hs z^Q(Q@f<%Qx&?7pj_6k*+eJ`=Ax+Wsar}QnDd!G&F@mM}f&OHHw3;x5FJKdXT2Seo; za_6aRjmR333(TO;h#?BHX*2xVUBV$1tbJ(Hec7~Kx6b|XJ7g@{AcPkA{*j=WY)p2< zNQ}59o4piUjGMty@JPr2;qp$fsX;a8I?Lud|2Xr@{7&>R@XH>oc9}-9qogCQ`Fg-} zJ7m@@wKu#HrRzvj`Vz1)I1)L^jnJA|GISvcf=jRV~(>lkC7|H)G$K?v7FQ_KZbZ zc4L9kzzbC!;gl|0!2}c*32aEkpQXD;y2LVRW~~7;AP@F)BqCZus<5eDWx>Pd^eL6q zfZ0VE*b%M^Fvdrw(oZ%Tod_3G>Nm?4})-0<1SHS2allhKF+Q z2uqU++2i3~6bA(cw$KZ9-%^5^h3R|4_E@#*)fe8ax6s2m6E;+T zr&88W?0X!IuS#HM=XMSQ+t?eVGfx$yl-A;5kJcjT&TiR-1Jho&zJ&Y_T%znT7F7`dG zWBX!tv8dCY!8?J{zW>%Hv`KX}t1q0ar_dfn5BPbk5NmM)_w%gA3o?kCie36DW;g}V zw`2zFjr3ei+x_^)!y`6~A9`_;m? z?0maMK#mnPh^$wf@?+y>vYiG;%5e9&o360o7An7MD<2`DRjZ1SaVmFIR~3P8nbe_0JY(< z)2k-7<+*ddS067I|1O>~Eu)kj2Qcbb!T42)jR(3nJXB|9^D$%$ywoZL0V-2{(Jbgc zU?pz1Xl~&|w9LGR5JJ4=ZW0lLuYtlFU`YKj7rV~CM;{TK>4%6B6%-qK;UL(aw6(W5 zY>|OXJjSr-j;w<$6v$R`9yt1TVt$LGn_dkehqDnWktTj@%~#;XyO3$KOY2J8AdZPARH zmrIjCZ=uwEh4% zb5YIL53nY?4plD6g968?BB{}Z&$CSAoY=Wg0{9IqnTC@N?#&Sbu1?pxZ+;$`dt^kM zz%bz3nyBepteDsXWGNvdrxz-aSkvRSRA53BD&Q`-T&}?LV4}7MvRmR$N#H)H^s{(>0T=1djsuIOo(JaxA)wILCE2(rp5tqt(LF= zuAWT?tuCxh>LT($WW&fco7&+kojLxJC+)zlx*D?EyjdChMe>ld=M$0>nOqe)+Y}^x zZ;5>y(6w2_jDEE_p~ry%%&Iqo(pQ8x7`$8hZ!su%Fnj8T=)=RQVc!GMOz!|^pz(@pYLJ;ZE}E>J?9IB z&GPc3iHipF?ubg!hzxbyv^Q9+n#)NLj)ak;5doO+(_q11A08ZK<-bHmNLe zEzE27hylungm=N9D(UUYi)>j28%eYP`jRP*G8<1Yyd6!O`dM6=0VLC%G6JIm z*_x#c69aR;+$i3(kKbD|zh>|NGU1fPO>!Z(5i@uv*?xTm50daz+DqKd6^ye)25JYCQ`Y^i}tj3l4J`7<8Dr6%Wr_uzlD=o{$!$=*1u?&R$H%&@L(4_vby#J+1nm}iT8K6m}t)QN9fF`X2H0hZzzJT3- z(WJ3}F3saN^TveV?|Bkss^ECS5uz^7VRV!^aIlu+JQc_`cNx}n>@E*P!sXllKaudU zo0gYUyb0Kz;^&xpTc%{!H6_nBdGWHHS1_;WmPZf4ccEW0SdZm^799XnW`ZG!pKVud z@7^*7-ujl?udNAFkB=aa7{%p5!n0u*`5O}}O=R0Ln<>9E?k0D>apE<1L~oBSVoW?$ zMl1r>Fit!8Wwdvx>8urreW!lg+p6hV{NDZM5(OP1mpbHY@GE%f^UixuhCk(!feW{B zLp8%7TfZpb*%x+2J^(|aF(YG|+HuL{T6FrHhD>ZlCRe8Y4e5A7tmp`{uGb#(jLLv= zog}orP*OQ(fXN(kF;F~yh`EGfmynp)R-GgCd8LNixW>OzZW=Kg6T|2n0eX!S-RMwM&R+Ia z5A;xnO@a(j(=r}c1o zrXA50`JL(k?5(Qodr(qwz>h$_w`sBdwP^)2$llPBMswJ_Ag#!^A;eVAxch(9y;V?L zi`Fe1LV^Sj1b26LcL^RWcnI$9G;Rql!QGwU7Tn$4-Mw-97yF$3?Q`y}zwXO@xldGe z75&oPW6dFROplrbbhYDGTM8&VDNH|YAAkft!ScEWxVMihv>sF1i9 zb!pUCbx8r~2f*)OIv*OXr2TUy%ofj{cAUgzpVQ~B?X1G1?Ua~Qzi5?59gs5*pcEX$ zM2-T6MP|e&*(6*8+iOUH*%!;7yf0^`d{mxRN0?zjv3R0@rE4I}yy(57+dBb>h<)b? zTOH-nSP_v)=mBT*J+pbgLRADoAO3AKm&1fd%EY^N)-4l{GC-Q|Et%#DZ3UW6D&Kr> zk;ai1JVv|vr9i9i@wik&wf8W}H219|!s9zN&S#Ahd*Q#_Aj1(r2@e=5hhx8nz&@?_ zgg?#5VE?%3VthHzeqU~~UC~ zBh!0kBeUg<-qfJzf?V5YTk;x>hMB);B}Xp}tbEPc{HPDaSQ_#61KVzDWqrywff{TX zN3!8Ch`&atvhGoXpc6l$!A3HE4qEK`&2Uyel3rd8bNsOP!jjcCIdye#^P@oN#ZCD&z$JZsTQFtd4NW}(p7>s?#Q8Q_M8>V&&zI-;*0KEFH;=fT zF1;WKZR;#YkPJFqn!9haKWAF&EXcqE5q+G0Cs$qrxplr0e``bXQLoyNd3*ujh5Tg< z?+7HX4vOuUA=H30y=>$0t)+nFLzRxGJ8SK!JXUMhmM9qvf$eb&>3^w1->vae{wmDG zT(xSu)usf|SpH;X?UG(`Hu|=h2&p8fA5>T(y)ad%q~gpWkTrz~gq6aA$;kQv(+J>p zIYUkl!?>Byt8+8R7&Jv4(0f8EtB)DybYLzBpffWf1Sv?h^vmM&HHZGV0M-{kh04@e zmwNO~q1vNc{QhT#b)LjD8$N;EcryiPQ7T7sWY92(tqcF6=*6l^$)%XSx-87G?$wcW8KF0Q@ve%%A%F?@M%G2$(mES3hXt#|#PJA-3UHBlS z_%y+v`}AA!GOIOc(TSII0!UlC4>P&1_Lwf6PAR*&CgmBH0ZxJ~Bqg*LYqj z_nkq_eJIOR+p;r;_)mTU?s(R*nOc(Mmx@ZBJ#G+_rJcrMbgx)q1hMbnS3qBevB>O8 zf$8~R_3K_P8Xr}pJ*Dd}GANY@Eat4w=E}_14^9Pn6!gwAS2#4%gFjNOx{G)oIE)Ba zwgQ{Gp*D>sETKQ#PxiMuLP3Y&e7Ed^D(4WE9<>DirgND~MvTXwOP zU#b5FL6^e6Dey9>B5JygOu>#X4H=&a^PK($U9k_DgMM+Q2k@@@py4T@1VTyV5PO)~ zo)59&D>uIp+yjf>0Vnz=aBfU#cze!|(U;|TT3z>p>?@|pS_6BPDjq=k)v^=VyhtFL zSYmpZQW|z8%Q5pXRjK95*D22rfwm20B*F1}qx}}fKwzWzlO!;mkNu=CT+{)wu=5BB ztdC{t6=~2vHKE3$fs`--&6&uKA`?Fh{=4d33vV2bZBR&N5R{prw z9U9cxSHBU+q-~21Ac%0M`}kd?rtI!=dMdu9ny7ga75S8zBD6MiK0keBOtdUB_JI=U zXo5WJ`v~^P0%A0s5$lN=+%C`omO`UR`Nt02P^TK_nzl(26kMOsiYB?JKX6lQwHS2* zHS$F6mismM;8+67F(XrB_C&*7M<3ZQcf1ol`tzK!_w@wLAQU4o9jQJO? z(I}bVpxL~ZW&TqB``E!#=IV#l2@DNAhNZ>=1g7s>mw#sF{_oJl%ph5C9$o5@aq_Xr zkx93?WFCS9&3%p)7kqsvBoLwz(hNhl+T4eR>?Jz?{cVrawF2Ke2sADd;W{j;k1`17 z$}9W?qa9O!R&^DK^Q}S?o(sk=)1*+!Z!iw)YP{H#qA-f=e$vA86s>>;NxuoIY{dv5F=(i0z1`o?2b{F~zFdSx zKtd19awK`Uy49%mjA{k;Ow)tQSAYU+o|3&K-6yZd?D5LyUpX%$}MVSIXb7g1qwMz~&oG`pjvUnTQ6c$6Z)-AMdr>x%hP z_Y(>JZ=IH$giO&W@Q_CWI#Un<`3gNlDjqlNOKqFYE%L^~{nr7!6bX#dK-<4oPa-g> zefOhhxtkwV9r$g7Zk@%g*B^bp73-37o~x3ylY-#jdEQ3fRPbQZ{v3qF|MhCsQLRxU zx0ief9%lY4)!fHlBo8RMrGl>;%c>4i%h%t%dmnnddL%5jWDE4+_IP==TD~RQE3SqQ zGw`1i%L!jIEk>LXxmR*-zQ1WSUEiekeFduzDW<9Pa~4lbbn@YsOl_VCkVW*E(n#}2 zp#<8Om#PCwRE^baffUg(y%g)JpvKYX z2F{m~6W9eW+#M9F+#JojcGWoIb@5y%Q5s}a^xUK{UUs2Q$2%>nEQ<$TqVwuz->&MD z62j}xUrZB2Ch3B;kkgy?nCXhBOYRv{z1rWbZ2NE1VeO@EQ*u^X(C3x+i(FSB;9%?0 zmHDgp^^$y|#TE8ZG4Ph%anb)_>9g48>G^ieVzK$gH)SVgoQIMrMW00?bP|)7mnAhe zWXDa@OJ=70ppL(8LSGXbTDScF9j>@XX7V(b8j>pZ4NfR;*ooDt+83w4dfh}UeTps! zpotliprPwUo$8J5FQk+z1lY`z3KN|^CA?d;KYwSMe`HDARC)j!AkSg-B^Gyvl_El) z_+wjK&x52XTWk+i1K2gNXmt8#*E*4Kt_Gki#Wk|zMJ>nf2|W;tT`#j)FzB%7=Rzg& z+0cOFbKid!2z*i9hS1v_{!y0(6tIG>=ZoUmos+~Fy6767-MT~8>OKM%o=tVupqe%g zyF*J-?n6c0AF->a@c0W%gx8Bxs(;P`NbGTZ4f^zKIM7MJ$SM=-@f71AfB(GZi56*$*oJ@( z98V5uQ29|31O(v~Pq+LKv88~GMBtrzvq>?;9>amcxN9PPC;bqilsOlR*e{A7zOufh z%Nm3KtSKOv!bT#XK0{uvA^-*~c8$CHe?8+){8G~ zAQv-ECleKVeslhSt;;$7*2ccF|D`lsOhZm_mHxImmCya8a3{@qrCxTy?RDRn?`A1$ zjjMn9b|PTXz6&VZR*M)nvLks3nK>*iMLM67O@*YvIG zs+YN1uCXQC{_YJOC0VH z#^$Esp@5X_=eN@*hckshT~3sw=@SKae6SBm%vCK{pYjSBLcc>Iht`&K;sNFuzm^a( zebz1A&qoj53ogU=ILJFdgeHmO>S-6#f{_h=3sq?zSY=N|6<%jBz;*jmr3XZn=YRJ` z_VNpE;%)B!d5skSI}nKrLyS)xO(&P>9EMb$c|87YqdY@Tx4mHs^$1sAL_L{Rqr1Wl z0B-1j9~qyM2u7T?LgLH0&Ny5oo7K)IKdzG3QMA-X`pe?Og3(2>@3;a=>eOP^8FFr6 ze}eQ$>r+-kKm97lg*PEMJglsS(Fx@T%ccglU;0^u03sI~g`_I{t}`2Uw&EBU(G438 zd-^8I1(Us6{^cX4uVrfVnC04e0d&+-cXm|%H&^KHnHgSFYpEed7lC-7*(Y6s>Oj+N zrH`a4lpA}MJR@|~2Jw|{0+DQGssE^!LV}+=xpGHlApSc&^zWOUKGz0smisD~AI;>+ zdo**cb;9NGywHG+2KM?`ENjOGphDtar1g`-zh|gg&z~M?v)GzVqLy5*-7WNx>@V|= zI`p5NEenqSL%{kN4kn>ViW;|`%Ik?YJAvtUI9IYy+MxbRg|9EMLM)XQ0Cp^Udg1&M zeLZoh9dKmkK)Lf468aJ}jZrc<;i_P%WxLlz+n~!A#09fzMu}PhT}vcu<_$|E{sO3z zMrqk4bB5`vUi@iHMc)}%={U?9nWbOx@@E)8`T$#Np-O;sSW5V*&qzKluoS4hd z3Rj<1n}gq_$SVqTxV@>_mRNl>s@eO#l0_Y=Kp`VSbERsr$+R!K0}x1D>mRsb`fvX5 zDs~wO6uY}mpKr!S;AcFWP-?hETjC}&>IWu0tRuO1`}bs+aJ@xZSd`q1M`M3FGDZ-C zdbyKlv+2dNVM0nS$M&z$^F7{11oh@jasdosvOZfV$ZXlEKx4eTrVbEawRdGxk@3bT zr14t~)-<`eh?DHFPz;9%LE(D%Gd?*7B;yH|bgs9&^Rszh25iyNWzHdfu4Vz3%m6kR z>?FcD*j`n~ zo$mOtt9Ra-Rm6dFF)95~boh2Gh@BhV{c-o3 zcobGAA$ukw7Vn;U{wHxoyM_a*=zPi&v27k+)|@z9r$i0MWk<8#?ZN5pcm5ITY#i$L z=U>sRSagBAYpX^}V`bV2Q7J@yYktENGBA~~SdjKhw|O3-#GW$-sFnEj0^^(#W8p8$ zsaB`Zrvkk}u}!L2j1OQ8CE~|t!BLmykg`^>>%cb0HbW1u4paOQ@jAzkyzja2pXVDuWkYpgbJ*P>no_>@b2L zb}(w3QxG2l%KV(nPAwWYxyU@rzq7<33M}nt1#8|wK(~1`;1cn*OKvD~K`ChZ^+5b0 zp)~eEIhB;>>hzn>tcB`_M7=l@95v3g-y+f&6RHl^KJ*>C|D*8EeqH*qjTl@OyIN|7 zFS1Vhv$H#D2Gr*`9Dq=Rr-EPs!T_q-sfo+q4~8&D$J>HY3`~R`cdp<|i;X z{xKX)3<$a7OyG&=|2O4qtnE!wpPj5H8|g$KBLBsMKUFk#I_$=npL?no22Tq7W5QuwGCI9$ zI0z!$sf|L0-z20zV35-+#@OK_Z^BGTiDJ6q|Lt_S6QFSHiV6PV2b_ z)0f=z!<zF40H{xBP)azqo@6juuR*cL9Iq+!Vap#bhkcz&+LU&}-O|ch^RpfY5hbHz6PVS?*zY zWW$bEn~rFyc~?`uuX#6`;NxR-3dQBs+k=JGdHFeQ(Y;!apF$xpEbz~}QX9f_C-N=N zbd_Ba5muR5tENJ%{FJP`Kj&cq1S37ndrXYF$>az8ON-`i5!&T1odi8)AuYk*XI!$l z$4*r4BrZr=qR)IML{I*WP<1yZ+aHrcU=tx&N5rp~-l=PW%BI#)5a?QOzt1H14cek`(0!{!%B+r%U~_?D4z7U=YW;X{Xno$Am{=nIND9 znqH#ps*ShxSZQWKrPa01@M7>31-x#-eR?z5jawZj4NZNUK6s1f4N)5LuKjT25C87` z>>u&cyYq^GY<(NsTb&_(rX35DWwbQ z&}F;+%zxBGwqdS>=9!bM4tRTZocGw3lG$ZtU)rE8bOP!dcJv!UiTFOfRb@tqhL~E( zwiS#Nu0|>Nk(zqo61^*JARy}$uQ(r>Cf*G9B4R0v4th(S3?s6>RSo-Qm?0_OkV!K! zdmsj%q6KXR*-U+mV=#9w-W1+Hbf%dej#4Y?gxM?&l%(b?U&ONnwM2J zC7xIl<5a@aTWGL<6bk)E+286CbV#pBsr+T}zs_)WEQAbCNB5VBYxc6p3rfSY)6nS= zDR)^b2{@al?+J{v?V6aJ9%*9pkRs0QV=!Hr)+PuhT#M(pO*3INlVKam=FfftARqax z#Z8bd{M8u%s{`q;9}WDV_UDI64{(X@#s|$}n(AjMW&Me)?hO%6Nm}I-DhE#e79~8> z?@waX(Dg812rWnQ1!lr`6axeF6^Be$n;b$H!bvd|lIU@zE@JBM7}q8dm9=|mTg_c@ zGzjZYW?NPNG=#d|P8AiGtJ0L$6BBIjq)3-$i&$P2>jFQ6^fL03il776>EbDLftQ9P zH$Y$?@24nMnblGf==*Lm&s=i>>`=fteAA?Xsu4?3?KAwOCo)PpTwiGY69N4z5^#s) zYERQ!^d8?6^LFc$m(e=?pu_0zE&%)?@SS41Dwi)T#v0RalPC?RezJoAmJx+w;DmTB zU%_~g2LJDIqp@DZ+TSf!1xFe8d&yRD>soVK$GXJ{{v^5gFV`GAAlAD1n#+-DhWe); zUAZ*wz^K;(em^%b_*t{+5^$qzK#=nCyz<&xu}%v;>q6^z*6xX4##4 z4`a`uvpauS*)rDi47Z;Pv#ghK)x_}L5t6Par3c%r9m+O!%{MFN-K@Z|bLHI8l$C@I<@kX=_6N8*Ot0j()^^5;D>b(R5AWl8WXsgflVq#+;mkz2I)oj~ZHk+Ps#Ubvd zNXHB&qq~SL2@kS%--#`|9VOl9Ejru5;WzO})n%}j%IK9{1@!(}NLq7a4?B^Lk?(6i zy_^#u^yACWpR#KKPxy-?RCo>Mh7?xQFI25J!rY8fcGD6pDOz)n$pinC|E`EGOIGK<#j~86>eF*iZ%8!h!aY?Fq>t34IV7B2aY|k|yWK>fx_$=E z;H?4m?Z#0^1o$S{F%pw}vF83rVOP;>;->dt9I1<6_46#=4#5s-d|5O0f(eS+rhNGh z`poA1LRW<@r?TUOp1kDB}C2F^J3xxnj~IQkk8~h-Mkb zgn`S|p&PZUa&-cC-zkeL5E#U;DA!G|zO)FRjv0^D{YYgo4ur%j3DfsBq?A}kR@Wvi zpa($AF>yfR!prqc=t^whVAx<)+2FV8B_W*{SoARH5W=L$-~y6J8^R1hb1Mt$zL z#};`|S0ki!gCe&b-<6X;NL6aV3D2-}#5uW3um+?2v#){#1w7%AqDh1Q!MVTI+K>(3 z$kJ!^mzz8~HFHofM1=ywB8i_oCaKp9fVJwhIgyQ#HR2&sz#*upkr+ZT!%3C2m@Nw1 zOT9PyNc%hl%B8!$11-fZ)oGfp---E>Kf*{{cheTgBrbI~10UWVm6eX+hreKqxgQs! zSe_3dktA%`bBl&91CvZNC}dEc$KSa>8~`x8Y*uZ~=)c~&O~Q-+Sn992i%ZxNg+w@H zb!OSK>C;)X{iq5k{C)b2B(Rx2oWPNd;8>gAX;pi-`l6x)kW99Ej|L490uiy{qEinE zgi?)9j#h3F3&i1jN@>YPk&17WBeX8+Gr-U5NUEoZ_rFVO5Bq9Ag_IHn=NqR%@Za$u z(SH3l?10yBFrhfJD@!*Q==CTvyy~o5bM$yQJz(u2Lg%*2Go6JhH@5QYK_cXb=6pg9 zmTQ4$=KUOPGw-AJF=jLcEPbreFHT|TYS|@I3YEhX)nJnNvgo}WTH6qCKlf0B3*({hnR zn8Ti#ZgH1%2F)JY991+A8Qqo#YzVL1B5h#Fz(2x!1ros5SGwoyysEBPZ$ZPlb zedphgW{^x#b|*HP+$b=yE}Y|+F2X8@)AGe)$Q)9KnB!6%*^-VYv!7=#t9zSy#IV~) zLmiq6n1|^K8#SSA&xIN{Zf47wg#&k4ma z@50LFNvByX$Wr1*^EY-5(p_8~KtxyR?eLakv;I zl7Crj0&Z!j(Ui9&oeby=XoL^6TUAhk4x;1}zUNXppA40QREC1J9%c3RxW|sQl~D_h zg9G8NJQG^fzk1FaI=NJ*fp#PW%x-A)b1uF3>x6&ElC5OJ6g12^a-2u5V2yt) zotGxj_jvxfqi+zECX6z2gz5&bpdL#C_CV*BfTGL&GsRtd>cUDKZ62lrBMhh=aEYor zvm1FTKRxH$ql_4~8Uuf4IzjILPUN}2uE1wFy8>yjKL_6ZxM@+__q&+`vCYfJi1FGb zAcr0@_OT*qif$e;Pi^Z-Xy?IRDo+RB-;WI{qOE@ZZb(0W2gNha7^t=$52LwotiA60 z;mzOVFoP7n55kg6F}Cf0S+xs+U`-4lZ|x9H$HOML19cb#{dOI=@6Mtzc>N}2O4-Jj z3_s@3v+5V5JeS$|JEVzzQD*-ePW}U__@k??QU*Rv0^=0pb~HD>25W6!7JUXQN}x-0 zEJt<3Bfs9jprHL0|J4+Q%wj1k$i}h; zyGz*3pSn&0^qU2AMT1c6@Y{ZsC3)rLo}8%MIepmqUE!UFdj)j&ghlPh<;^#%t7}U` z5H(`B3|rSQ(PH zQ5N82tqEZt`R3<>)PP%~fxpqEDVO}BC5ZyI$LigVJF)j*GAkNEi6)rl8P=41*&`u) zKFdEaG9mLi7&qZI8ZvQ^X@3ule5};wR0cFs$dF|%>Zr<&cvDFh;k^V-K|?5qw+0=Y z>NHcgtI1`pqU;8b@bTw(pI-#X9_TBr$sbfIccZ4s9k5tON}i)0%dXjIXjSAfKqiY) zU4(opsa42ZyJ6V(&tM9g*fdC_lBVSQs-br?ooAF2@W!$VxTs8Po#nuT6(#sxl=ffr z(A)Q`A_@gPzP@3k;x|qK$-CT&h7ZvKA)W;L-i0EZ9ww=GQ4s|T)uuBUH08}R2lV0l z%#5O^7fIN&Jy^0N#suDr?#)7FbR6?GD?OhZmOK^`BOgFUixN=jjAEuE0dxzed$v^w z=BLd!3n})_A>U`-XxSYw9zXsyH?4R4BLC~tU*w0kagUf`%M2S3j>8A==wuccSRHu` z_;SK52bg-l#-s?B#bfdl)j7jxsyl|-kZ>WeeRA6O8&)BJgIp2EGRI3W7n3=6@ItG^ z{|Xg0*;Q6P&tUL~ZJhYx$n_mdLEY>cvC-tNAv8nZMl*3U#i0x3Gn=|V105`gZ@x-A z0I4}8$&LLgpvTQ9p?=ozXmVrV>-1RxE>Y@k|76n3Jwwec;NJyG5UBhKw*bTlAZ9mg zJ~g&bql#H@6r>Z6MX*4wK`@66fD!Jwo0^q$w$U_4E0*JjgS1t+VSzR8dp&O+AYxqe z0;>%zgn~_psV%2{COS&*6#YioKRh$qN(i;Wu>H0MJ!l7crEOYN^cil=JaP`oru(OD9rue;x@?7#a}#G zS4KppN?(Hvs`gM7=0%@Awj%?|K$7lkBjK)(Ak{a?q^^w-x!iP`;p%$|c~!hB<7BxX zW#df~!2YjUc#Wx>j!JxhD5)VDmY&G5_#mUn$y=;h4HdL}%9~HncsV)}iI;T^9*m2M z1_Z=7Zt0sFn9~9uX=;TwE%5cTE75skLW%1Uhh$<%0wZ3*DhjD3@+$Oq?#haxvN)Og z{~**dW+ddof^0T%#D7gvg7oki{j{uizu*eqFjGBjktywEivVA#!p>YsWviHPVy1yZ ziXUBzKhEX~u^QJ~Za`i65j1R-V>0ldef8L<+|HK3t`CScB#OuWVFclB) zi2<3oZ=I=m$J}AajGXdkAp=_o8y+g747YVfnAwxRTw2xl?5Coo}j%t?lVfJ2NC0=AF zxWe*O0hxej>e!ysk0h+5`D3$oP=RjL^)Z&ArLdj+yRdn@Tq?w`p3R zQCu|1L=4@Z`t3o%Rp4l1pu2l%qSGYwx`oVBru=e4YLT0}G6hX1ff5qszp0#4w)iU! z;VAX5b~oG&isJK;vlQSHwnT63d}jH9&%K{IP*UaUKWh>gN2@4eKEvu3EL{W-e|5tl zJ6Z!PH`&MaZjHHsW3$4r_mEKmR1tttcZPX3t{YuTiS*-@cJ@OS(;>wpW`Vl=Eo&81 z4J`A1Iq%duL^p~t9fE~te0mbL1twU4ISE}YNo7{#qfRk${2(;8RDN@&8oozXBeETz zjh@J9^5q(H4ln!DfSPg``*d}pNM95k4n{l)(a(ozP)9LLhou1l!PBZa)u>h6XlzB9 zDPH{?Vgr1GO?3EnEa#JnT^jH)LY9p5z^53zhIYA?ne z5jc3^xviqcMk;Uyo*dX2fdHaegH)R$PkH!Au|iN*91-SYbHSZsjgD>kF1y=c+^Y`L z{K6LC)C_@K^>Arj5e?mM;aQ*em7HRjg2$;?GGL3!sAslA@7cyj2FXVFQP_ge41Snk z?3Bw3X@(m8ED>!Z&<3hgE3r17IsSk`_|F`<3w*sd(}s!0^cnsRb9I-9AZ=b2wqN%( z*5fbu+10~YFY-~Twl~Y!^!rc1=G3okuw9wc=OkU$x}GEWiA$|Mqv^58#+A2c=d;an z(`IZT@*#2=*7RbnM`M{YW%O`#l`*EQ;1s`wFx9rHR(D+dK*e?EC`NHx{H58^?4wrc zYiNV3#okEAAq%cqNO{4k&$ujk0Lz;dgc^<7L>8)+kd<-j@y)&mn$G}&-w}!ubt#5_ ztUW?3<@C39E?9{5dvq^WgA{_{zt)op;SEOLBx&JhVQ{Ok^P|S3R(q2fY?44GZNUrC zV?5KMf5~WE>p*!c@7q4?T^wqhsrfoDEqx5OhJnwh^abO+HNt>0Jv}-iI*t0Zu)t} z#VAq!*gQCi=toj3e)!ux2C(sSN#gf#ttxyImh2ykR>ka32WmurKE>;n&AgmAkF}=q zx*=}S9plKJf^OKj*=>a^Q6w&@MrET%hj6YM=v}qGHQvnk9`M~I=*N2UyU$?zy1Wf) z@>xsdMjA@Dmi5Y9FWP2G@omroP0AiL0G+>dyw5H*JH{T$irM=qTvg*yT2SaoUo`xx zb4fI&bPiACv~`qQg{^3`<|28ef<;UlZAkkW!s-EffggLyXt`6hZo9%>Iu2mgtHKsm zSd$)tPjC@LuuAe^UpZdduDCkuX)#A8e2T7^SegGN<&d!z=RJYF;kXe7g{e7LC*%S! zN71#DS7>c!7R}a>Pmx_;o1PYz`62c~hn0Ab35Xiwv53d-+j=1H{)~?&h;L7&OFS38 zzzA!6h&7b{Xu0XXs0$?h`DE}Ma? zTv{FU_RG!qeO-$LerB;9*3_wgnV%7D(}8|f{l;nv#4v-~%G6U7;w*3XqX;AWtlQey zs^!k;*L$v{ZOFJ>^3#k-iBAk-aS+Y=R*sKs$Ns2OwNvRb+!uX$->&nK$s=S*(2(D( z4}P~$?%^+e^DCBFS~)y&EzPjO*|iP*Bvn> zB88jBxFM#XKYf^CxpZ`1&b@DQ4pMsA)=Lv)!Le8v%C05r&=i_;JV|B1a(SR7Nl-p`hB? zi8(55cueIK6OJ7?eLE;jEeCDgz>?r5O8@ToE^;XERe~p$A9ZM>_(HwZWFTQF z@loeLB=YRmvGlttqn}*D1QW&N&@-c)+i34O#&hoS#3<4V#-JP32Y(sgn^bXuGh^X! z0JAP!-qk5LHJ6|xFik2+Z+;6-S0Po?f{A`E)a!v``f?(kdU)J^(}QCLerR*kp@R_; z4*jbSuXVkdD||j7ef+Cs4Nki$aI9O(nt6WQJ-7p6R;7Oqil8h~HLd2?dJz3alV4FO zU*D@$rA(EByn>t^kgetVMym=+zePpz(jz9_WwKKmglo-h*j2BmzbJP$H>)NA+P%aw zUhe+HFSd4eJA-*wkGUb`R1yb4oFeC8pfYRqDl-vxEDw!0W7uv@2D?$j5+z@IyD<)` zZg|4xiig|kmt$L{Gy9zj2N-GPE7a|fK+zU6?w_$-ID@V9@Wm3G^h(?x3#xSmX^jiB zwZw#kdfX=39i#hqi2^ygV%JoakG4;xfTqwVEz=I40ZGz$v#L97(hQ5ENHH7$j<%w!Oom*y{-t`et}}QkuI9Y6q5z2LBIXYwTyf-v5=KbgYf*FB zU;0{D)P@C~Wcd2Vl)vF{qA5f;+sXMhNeC3RA5P4>Ib5gMwywh~@Z>qOPtnK~1X}KU za7yh0j)Dh__P9-YO#5z=^%(qGcQA$_4xc51;1tQNsO5knb&cnI$V`6RoszZbe(_%CvoLh?%P zw)C?k48pef>j5hT2tgnyAnyn*Sm5=~M@m8s5 zm3VdBF-lS0)dSZpoe{3&_*AQkoLX+fU71~Ehq7V}f;40CgD3sN_jjX=%evyV1h^-k zV0Gq7=}BJ1VE6^EMGQ^}Lk7@XYYenjX_0(RQ#1!C{Ul8+lRWWvY1tyM4AMEJ{OM%z zIJ70O>ceHDQ5b?HSgfLEM<5pjbr(M9u#W6>atQARzY&f~&-crRMrR0ow&hdcns^dl z0$+S6KR^DAl_q(OV9-R87*t~HkFZZdoV4`MrlH`#tNuiVO5G0&`46TM!Ylhh%o?C4 zy(xan(o*>lNaM@G2XQ#SWAI3CQD%1-J-ZzQWCm<0P5i zf5DcALQNW6N7$AHU%p*8IBJzbrXeRh2tACh@@q-x096OFeDp=&_3P`dSSI01`Dyq5 zrfEv-M6B#!lxm&8FNS%HHu$pu&+^I6C&-uxd ztJ5YT;6bsta@q?W0e7b-k^3jAbN#-H-YbhD z-X45C!ll*cAc9(77Ni8mMXcp(QIdca3eoep^OY&c4gx!Ek=xwnTJyA^e`^2~{uKSRTrR(i;J=m(an%~=0)!L&iss$vXC<4K!f^;tA*uf0_3zN@wXgnm@) zQ`faZM~8wR4rigmdyXCS^p@ur4*C|!zVOiT^zZrCxwBhn9q?H7S;0&;ERUHFhZjC+ z9FE$E>PWPHhfm};g&HGcSBk10SHNYmXE_vblB!i z6^{tzpuSD0OZ)(;#E5!gpFixk!$Rjng~h@y^?RU-&?LS)Q@wi;VmH7uc|_T}T138= z#VIqgY6dqiH7;7$K_x#f%od$sw9Xx<)o+S|a~QwGYd2EkHobxIg-{mP+UzIvt|Nj0 zdy;`Y69^yQ)7$!Y7e&6<$lJI)cmpwzND9^GF@8Ctt}Yeu~jyATI~`bLQyQM9i5 znApRESHVslAvy*Km$(gCl}2F--r0xl)h!M7)6py=k_exuB{J>1ZutdiAES@n8K3n=wg64v?nmm z{h(W)-_FQ~!Jk#TQraEOH*mm5Xtu`4M0>kbeW~)H>ZpM33w{VB#n`%qha?Vjv30#r zX=Z25!n{N4kzw_r_tWLl%EL(r88$^HL?Guf>1Vsi~e{{3V7Mc#x^)z zn#;Hn*SX@t^tj@`YyX4}c*01za?IoOJp-k`znPLtPB~+IYg2#N8GOL3OG&c5{Nn`5 zQ9e8wLH!)*`l@7gPc!KU0!gdv;w9yVGP3Pyik4#j!&{XZh)c-ycnR1f$rpZJGycox znq62>LO;P-0*44%UL71(ZPJb_J1ZGtl zzylZLfkP)mZZj4Jm(~Xm+sKuO$n3K&nEmSSoiP}K2a#QDt=!hnM{*P43xK$7zPj7+ zV-$Y$L0b)1XiTS(clRn_xkOlsizlxo=>7b&!@Rd3E_uIAToWU=QqatzB)@>3XliJC z>^CYgLi_tJXTQVf(7Ev@U%yC5ch>#1nP1%q8|uoGo{Qx@n&gJ0XL7h^z@J;7@K$<+ zSE;ZM%G{o^T}d2Oj8kJ+X#==%xRjRVx|kWvP?1#3=V8 zy^n0)F`oV;klVC+6tlC0M_Ktqot$}?ffZ8*c&}x*9L9q2NUYF9$++j#U}c_+ad%fsKXWTrwL7284{%aMETv_s~nq~`a$BJa!S28)-s)Zg)NndwvU3V3@ z@7-b<%~)Pjeky#iw3|1q-V0KDVoi$aGtF1rjdUCf*3zaD?tSQo>kK0Di{z`~c^|w)XZoIHX)d&~#{ikC^$cH$_ zvaxjgpF46F1b}w6TRF4Md^oL=9&|q*Dt`Xa9?SPMw5{oYv_Ns5^nRw2Xn_yWGL znVSwDaR`N0ztCa^G22M@n-%wBLk3qVznkeT-vJGgHDBB`9DIB=>ukoEl>v#UTz$a1 ztj(yHRu}bYKpnnz7`0mP2WBj~D}LD+qL=>wD+BLY`XB$1P576#?vns0=wfnEvfkZ8 zS+VUPihYrtzQ1f4seBu;+4s?GX$lh(ZtUl<64zpH{|`|F1iE+AH46WKIRrH?e#_7N!Iz@}T*5faxAcf9 z_jw$^*CFL?J3}6l1N6e}Z}Jt&ZyP5~n%fag^SA&{v|YWhcpqIsQY1p}su7Tr-g~!K zMHJcB#xnXQ{V@eP4HWlg0VPQy-mqQq3)KSwXevq6@<4f-D>;H}!GybR?)zq#?evJ_ z3Ubx@M>@07@A{m|P6{B4H=5pl#dsnYq&`0;Nca0ZJl5~W(vcFM80Ueo5HzSDEiO-Y#4iW;66iwW+lG;?d<#GHc;l7NpffD5 zV>(HgbeMe6I2+c@cxCMQD1h!wU*BBn=5BBi)m>8v9or(0iN;~>tjw>qxn9B)kiu^! zkHxaV4C877|3d~l86%tdWCL#f+*V?re8xwAvv!Ol=zN_H%vk1k$aam`yF<(^6ZO(a zUjj{7Pkz8i7|aPh;vMnq(Sb&lC$EY1n)9oD&dko2a6GL`|8-G+I#l|D)In6AK0XDu zidM{lv{kmZ6sG#wxmBsSY`sq2$^I>uJXwg%oc<5IgO#R(qiXvSgk-qZP z(X)*E=hnDw@pZ&Z;GQ!AJ$=Z|Pe3MDq%|xtH#~X(4Ft(26g+ROq(cl_*O7dXAv z`KqV0ls*^1h2?{G-!$A2EX|}}B`?7e0L)iw%!s|Z+IyH8=#uYnB<;Bn8qz9q^3r7c zt1xkXRmih&MVSx1q22{7`_Q)!L-)76zYc0|eh%&{BG8SyH>N;oU!E{B^%*4kLVL+4JVmM(qRG6xqhh z4ucQU?u%Xcg*#5JJJbAOA3eb^3AWJ`SFphH%6wb>J)!0;^3r{}i*V^(`AennKK~ve z+y3v)5>Fz|nN}76uqW!bT$NaeUg&`|x>8T$QFIlW=UQS)1rpZ`|0g<}8(5o>jtcZb zm8A=r6>9tbjs2GNR-Jpb%KYf%Te2<5g#tdTbGm1Yb?K&N`{q_y5CT4ThU{rYr-mpm zdbwUGoeCa@c|`m3d!jScZf_S@^PNd$Yq(EEoFkkya0FK6H zaAnZp=#T-mihq}YB18H6up7T=2J2Uhzx$f3^aW#7&1uy}%w~s02$$Itqn4IjPf^=u zol>|w+L#vrqsW%c+!ajhU$Xw$Oz&-BqyA!n$cBtenHLdTtedr$HLp>Fg~aS|#0NAB zbVrEXeU=teW4v1+o}QP=by1iP(1VTO3R~H#S#~#ZocF4)6p%p16eYaS%QQlUePMT@ z4(|+MmVd}1*8eccQV}C4xQf$evfNWecFwjp3>HUCL^`gZnjJcaU%7OFw4oM1OsJeiq!qpj*aTEOf5BDr1%uGD%II1_L06&MM5! zv8p9I=lKBjHPwFS>oU&U+J?@tA{A9w9^*MPe>M|ZiCLFEm6h*?#6G3)@U9+_YKC~f zk4#WWWGSr$FqkEl@;QD%{6QLHMt@%2>iojbPi26&)lLw-I^DA4Q(WXqxKhm9gV;A;S?pU3&%Hw-g?|3G?WoN7A#X{ zhfj1JxwVM?e`I}SSX2$SHi`mDi_#(8DBUd#9a7RF-6=6JfYQ?4jWBd~DM)vBcjwUY z?eRJ9dCocCFaB|HvG=T5cP^dWM6W{g`ppj`lK1wfVil%pVG*QQ82XwiN9kt7EHv6k z9$d?!$;(E;{lZ@bHE!HxV~p1?xGTsnU*Jx!zBc%kmwJ*o6-br5R}2S$4$rNPHrxDf zQb}id#!YSxo2NEY3W?hGqu4dPpBwuFPR$O#8Gy}V@8vj!aBKRu6KD!BwtG8>T{4H&w?Fe0itY8|5~eA-C&x6EVl%>CaW8CQ{OWEf?`*w@++JlsytwsZ?pV zxFHx@~Dyj!gQEf1^^P7y*=CF3O0-K}|q;%}Y>ekhbS< zKL{N#VttSpG96u<;fP2hI=iKiIn*%9pHg-5sJr2vxr_!ZDU??HM+8v9$=216% z5}rhwi;)|Tp)|g|vrv~6uaKiY-|x(9LVIe;I%6cE1mas}uXxrQOi=JD&`^j9ru;uS z(Elsr5xgIc6itfC`Cmo@;4Z-hS{+U0@Uu^I(PB?;Sa4q^Qj%Gmb&g)K7$m^L@*Pby9STA|!O^f-vrDMO^=0M8~R;Mr77q>O7OV;UEE zS11pMbsU97h?~Q3ea{}IoO=vJX~r+Jj7+SyDd{?MxUbG5E9nR-2Jw0Z@3m{oSuM19 zas*|9d&)*ntdW)6P-T;>hRgQL1b>v*%XE$B!$L#U@*qLC4Zx`%z~sh9ME+Fj4axB2 z<+4w~48ry|*?juedix zu#wUr>ctO>m*cbAwxMdCPIozv_u+yj@?Ww5_32!>D(m^FPw+4{cyfKN4ST}ny4&;v zaEF8<_$0O1ll7G^rLS0ESvixv5D$JXU*N98RGRywgs#_=u6m@~t*UQ3Vdem_{LF}n zxQdj?lD$dthzefUIvoR9fy;=U#M0DuyZ?Zzm4%S@s6o=cI^x<)7VB2x<8W{6KMSEtxyhNqTL~Na>vHld<%n?#*fkjH%1zrplmZ1LWv`hMvl*IB1Q)!OZBfC2orS#xOE zzaaDS#m0EQ>v%3{=dD!}YQ3V!bWZakRQRkHPELjBkHbC)q0KuW*w9)V&9eHLzK$YL z#nkbj?!o?$Wf^DM&9V4k$RM+u(Y%fVjyqlDJ_$2boH0ppd zxhjRQr`Pxi8c@&d+jd4szBhxM9G;Tf@21ySmih1mc!rPL*%oGRriNG1fT2~%Q+2%& z0v}ouT=oGZNs?~De$!>(uVZ$vG8zE8)Q?S`N>|Ku>aoHW81q-I;&gBV+seS>&W=I< z)j5&jH^&x{>AK?XRY9-8)t^ZuvnbJ+s|G%^3)o9vyauKlr_au8g!B>m+<_{PgU> z#LdD=@#C*|5%;fW_$eZt5&|8uY6*!5yC@f)KkvD#u8ZrUadbcExZv z_w3?DQG%z)ZX6Y)h}GYqP8r{)3HOR7?mE17*E||w=j|J_T5yhJbL16U(;pY2{vF(# ztGJXzwN%h$ESN4!Wo3e;oV}+QF%j_KUxMh18A~rd?i#*~<*waG& zwS;dI2%?lS01TK(e3DBAmBZ2Dg6i@@cgJn(;mNEwEfj!EZ_q_3qoGe2PlgfRi@{)7 zydEh*9C~zF@@MB>n-IzVkF^ZczdWclZFwnuwY98fkkUn zr|Q%!K#a!aqn^@?Q|wlbo6Oz*U$$zHRJnp!=^6y1*0k9O3O%=yQ8EtGxyxWPcEGzN z1Z+^)+~I8gmvB~{!1%IbUquma4$73s=>I)ThfUtMTv({yPCldDIAS5hM+y-3!zb&) zR!APOt+8BRX7j!61j<{}$|{6-uF)Xi;dzuLeI4X|-y2mqq<$ZsJV(`*@$*asxhlU! zXyS5MynLPB(A{?zD~cMIJo4A(*^DE$FoYkg?TIxit{KnTLpqsD*HUqK={HL=oZ~!~ zS!rgzm1orvxY9%JJ+kA=sP8^}F?MhK#wA^qSR!~~X6KJwrQH2(D|?jdgDpRD)wW=8 z=!(+47!Wn9P;Lz|lKw&PQ~P;X9SO0X<*l3qJw$})#>|EdJKQN&mDV`WHV)G} z0^W+N;}4tkkg%H=MHxE@7@2(BxaeLd54O~|fwUWcE8pg*Z??Qp?T36X)Me5_nq0b~ z4U_!9WY`V1aTWmqdYF*_1TRMzm}o*TrImpkueg*VU)o*3fOo#|l%rTTL~I)EHj^FZ z{pHG6)V|Ugw1HSnM2I<*|E_tPqWZk)O!a8KezyMLF){vLs@El2@O#lUHo+>}vyXh- zb8-V$#{onc{Vm6BNVV$3N(4g%OP~$*atw;IRT{j3PA`*B^){h!{wjBJWm@MCn#P}M zO-x*DZ|*1v?M!qJspJJ|g;l>DCCjNc4g7%VT9^YG%GY7FWsXe zso6vxQduBQ&%XF0k-Ox}+L&#KZ~r4+^_y)saxdSN^}ya7&nfRZG|{J>{$cYyUj}3+ z+5YnJSI4g^fQnghjQ07{B!c)C_sxWtv>v~nvxNw+3n8ztMDw5ZC4(Nl_qh;Uw~x5% zS$>^pBq3LQtLh!L%>u`CTtP!1gmgEn#0Chk9jUFz?fQ9zc^iJtgFiUMZq^x>8kRs> zxgou9Kl|p3JclVL2NA~~ZplxZO48;Db7>0h!7;6D${pdxNu8T^A2=N8Cg00U^jK9~ z>KmSJGZrH=Y%GiK7|O%A_NAB617P8)3rw`;SfU<%#DxUaJoIMSahs9n=hf$NtXb!Toq0Bt(wwq6A38jW+zhU+kyM|lX zYSir&T3MPk@?FK@C80daKWkWrsW}xA_+3W}dFgLM&p4b;_MQ@>vTT1$)5ecW!@{d0 zC?BV(+v0M=bS>{Ty~VvyDRZPdOIk?nXQ+BSnsgyqh3~ANkTCFzo{2kHc$Cn3X=sLt z1N3@{X(!K)DR$hosA0S)rT9Sh%Ikh=!?jC2VcJ`qiY01yycr1CeMjSeh64w@udQ_# z-DExgCA9wb{)w0qh~ShlDRoCXYT_I+Hrmz|oy?A2CcecWXhkJTdR=?6yyw6B9W}sq z_EibbUx*k@Az_U3|H*_5k5Yq^jtm-daG!gOtc6$Ah<%6Zk6vw~{SREh4c?<6tH<#f zf?_vts3z3g>P=FMTpzddKIuI84; zQGtGQTlPn?+$Qe3v-C-i2Ir;b->KZhxyXJwVe9KY2Pvpp^NBSr$4v%>A_Bq?<>Ip%s6io`Dn1y75(%&I01s9M9BXh0*! z&_#)MKL=s^E|Q5NMUSipW%sF)#z_Na;LyUjW2tAgmMC&9b2cK@=0S7quKV$CrMISG zhJjp28|cy2xqy{4+3^dvhVU|jZ4i5j>(dr%2;2?PxOr5EV&6)@1}Xy1HKdh1{e5|# zgB%YSp;`9LuQSAy+OP3;Fxcla-zbQnQpIaz6%hNtG(dr#OAd z1VQK}I{A7$XL6jjrOo3GUjY zbvbWE6XZ*qK3Y2iHyB5iLHRvhq=hwg*Cw26RbwB2`x^nv%cz|4^#>l0GsnSrE`rDF zP4X3DG;cR6c1hX~(o+GYQg-le_)R=c6xqoeJo?t)*&CodwUVVyGw2GF3JU_ih6yRP3D#6sr{Cebej@@Cs^8UU8; zEVg)eMaezHN$<6O<&dC8zQV1P{WWzwbY;b01IRfKX05ZH!OTrP+7iq+-HPk0EdwU& zzdLg&hP1eSl}k23FX%EM-~0Vihq#MfWQ{WZ1tjF4O%GRiL9gx@NK)%u=|Uo87n|$Hi{iXNTechKHOhB*)aLFcGs9BcOH)uuMyB50TVk=(0X#M#I zgZvhw0u;INCT`PSI1uzwy-sv`bi8~|sZdK%afSHLljT*3_yC@r4ctlPa2<#_;}sSN zT2vhc!nNlRIw#@UN}L40J-^FiH6qs^<*4__E)VX}>}GpkGlYfhPH1$Tw7{MYmN`kI z`kxD%K73*GxQ?6ucZq{DVY9wZIG*&a`U&vHbKbY${6|bIx~};N7`z#3YpOivm6C)6 zZgSok{P-sxNG}spQYb(RNIa%P2=FEDA~0&h8_v-))t>&51anbC*+9AbUD)(Z-$pRI zAAanQV|sl;XVF*C*hyq-K~Pi#r$pr=zwQN`Lvt91{{Ydt&sN**YMti}70A)B$msUk zY|)BrXG(fxI$d8%bI(`8WJn%!-x&51jr9@6S?#w{zhOFWKKFC9X^@_v*(v_m6>yNu zufg1SwR#d}ZKsiqSkQpEY-mj+KaV)uJ58F&=bI)?sCnwbI;Mk?b+V~0K_p}~V3L&} z?*;}=G@Xe0D_34<2oMf+oeQ5lO9kf9fE?Mb5qQ#qbE5uwGk0wI>l;inab%?|`MS<- zi?wU9HNG6J`(8Gzn&4CAdmBpLarGbb*G11OjvlDN$ zgaZ<^52S#4=s0dw&Ng&(?Xa`T%P8F0Nu*zgb{oKi_|5Tm=}#vp81lPvs~}@LnW%%UPFn|Xv zwyBqp7ks!HAI<<{F@wyF14rcx5Iyt0%V{OEW}J8E(x)?H^F0;HkdJRA$m!qfc_5p4 zqy`MObCDC00b`e^`T(o!ju%T|BzXJzES8bSXo^T$R+B-lBZ2{ahpLM# z^g4@g2S=l=j=*lxX9TLFC^jj|Zk4&_0aNwB!aAF00ikD}Ep%2L?Iu2PUtq~di9{7H5Gj@)_v2rIQX81l^3Nl=E+ z%aF1!(Q)aadp(8IQjP;7?@jKqN;$a}Y`B)`M|en^xkoF^HHnN~2ROVJ8A@hRfqmU$ zu^BxThlC}1cEkGp1}2abPv1a7Tgu1lOwu4VV%C`$IK9GPnurGCAO39}*u{N!E&8P+ zarK*bF#oikJ*oa`i)(Sm&F&490mJNzp2W{MisM69=f(0-xVFfMEP{KnF`0V^E8;-) zX24h>mmAyp5#mx{I|?s20rHfr`+~MYQTr4mp!I$8&=;V{Nc}`H{?i3uxCNy9Tor8? zIwq!)H-3lU{NaRx+Mkt7_E0XLqcQM9i+;vCR>W`!O+|ZYS$@X`k;jv3q?;VBXS&Jd zhFX1LuMuaK9?G!SToT9B^!OwFFF#*lBr=p|oEVV9{4am@cSEZpK{RuSe2(tsz+x$J zrBLGE9|@;kD>ey%sl^rZTM`ovGY$Ww*4Y}t*)ey+{>hTzU||h68)n*|SXxB5*68-d zlKP$MgEGXNI-IR@=iIeN*Eq~caQb`9ty{$y8nw&lHwM`u;Uzn1&!FD?&Pag(humd& zcY}k>QNV0ontwmGp=HgUOe>8%m*k(tHQPV&XmOGa!{Yg*-weBaps0Y0&XMviY^e46H1kE?A)FmKC6-; zpEH~pic_vTU*CK}D^l#Xu6GSg;H^%xGnHD>m1 z`2=I;i02w2dA&?+*~w@}jSz((zkGN2fOh4uM>HtBb?Uo*ahOkISHI*`9W0+^dN^=1 zmi#8OUu-F`e5O(t_?B$S+ZjZ@o({QgGC#wswZa!!%Vem{n|YsVTaf#3j)bZFY?M@j zK>BfUORzP+@En^;7CV%n-#Pp8;dbIF-?-6#Ba=qA4WLfU}1{to6%{gXl34AZa;~(z3R0-^gb<6CQm@dN#12 zJ>@f`JXp%+umsda8O-n5`A6%-1wPTgzxHPob4ssa`)d!Zz86F$HT%hb$nR)L@q$=c ze}}zOF2ed)IWi3>y?VBKdFN4PA}nRZTgnhOK zxp&mim-MU;cB@R5?%AFsg%JiV78ye26?w^pTJ@wTEf1vQ{}u~yM`XLEB*P>TksZQs zDu%xI{s*Hf%#;L^3!}2W-IgSro)&cec;7vO5bS%|Ol-F`D(MQ+fWQNExmQ0_7 ztP>RzJ|*Vjf2wtUME22bY-pdab!|1p+8Y)Y zT}{d$!=L!W2mPW{uvDW*9*b+c`HFGl@3TTk0@0P;?HnN7f)!_^P%r+`luCSR1i|#3 zQ(^{1mL5t9h#tn%N^D4`?-|6xs=5Hs-m+Z)Uyk|q> z-e34kyg3h1LpdKC`9gh;X35$WP} z9^89l3CR(Azu!VYONg(x`J`Tbz&8C?Ka}c5Sd64JXnUpm@5K7MT$!074L@V*BR*Z% zndCc(UEtQ4bhX~OmBR`MAv0Keal+#E#73pQP7_~bqkG~)GwQ@y?41=B#*r`e=S;M> z_v8?~my?u^st=JVq|$LhD1D3Jxa9bwYKfGBC9VV~2HF`l58SP+2{N8RyYc`3Si12& z>0`cE3bAIUc2B3=kzUs@7Ni6Sv6YO`KGdJVcCOS6-(H&j1&$hZA_NG-#wo%Z*L<2b z@ecv}Fc4`z4T%GaZncR?ioJi5)41|Q_c#&@-^rhvIU^ivE^eTF8VY3l_jNw$^!?Xf z#q`CU|Er5jIK&k(oGCkW4hfg(Sb;s*n_WRz_v=CPWnB(gPGfRceaK@Vw66kR5BUg$ zoVSBOf1mAfwaj{c;z-MrVSRkPr6Q)5W@M7;7*-&kCV*kb4UyyBm^SRH|7q{Di&qeC zzMjZ|zOeOSO!PpHLudY$!7&C{X%#34cNj`Adhr*F#7i&{{RKATd+Ay+O`@~STvd;< zR>Yc*68I)wTDuMqa#q=-PLwVXrvfbw>1jcA9wJ1$s5%?WIPc%cjMo;IR$zR((1yz0 zO(?UtVh9?A`Geg_wn0r-hVPb6hdIj8o{lFGqy0whMOL}_{0{Ldk+x?8(>f%4cCueo zO#tzadpE3TmEr8wlGWY_ZQFcW(n`~-IFUuP_q9U6r=-LZ>T&Ei{wqu3Zx7FNrwr+( z`Fa~YGv)=rfMyp!-}sIvL42rzoTZPY2Gf&@f7i=n)X7hD=o%$}k$J4NMHFS0%6GRh z!HVD$V6yefW{Tsh$o~f34vb31tAz;!xaClHE%aCiC|^e0K0|e~|M7l#ZV>S5kP&#a zeSr2H;2Nw6bye4-a}!i2X8x2_d&E(kZgQ{}hfs&JMi$j0F!JC~@dL?29nYcGklvvo z5elM*2bb!L&FwiStWPP79RM{nHJd!R*cf7->YPSj*4hI_5X& zjl!MHmlYaOoEiqZeCGbKdrp8gBXh5ixx>IgM;h>cmZ1qOg1c!D>L@9MAr=?0q1o;y9>D)!0n@JD)nX#Nt3?}Vz)1qi z<;ObFv#m)RaIA08NJ7LBwwoBj`X+Y(iu{j=8NrOW?Z!K&L;dyN@d(%}o)~1Tqh-AKgv+?z zQ(t6$nVJDqCc=1sVahw}O|& z3~zaiTksLKXYkMTO@~YP&&NsfwQl;7d%-}N*m?6mX6c?z&|6Q zZ6n=UoJ?qa$g?%K1ccH}G;vl2swb;wx^Nb%+*sI}FYCkBiOC8>4kB4VYt+9{#j;mc z7xzp_Tt$eedEVRV%^RtpFY!9&&Tgi;kJH5K=H@m(-tTq-aa!7s826DnRZRQy^*gx* zK1BoP!WRhxykEJUM6{1)$J@GmXU- z=O;$9odEuvg$gAZ{AiGzR%(oYZ5qpa%9|0YexOZnqLIUB>C5HFVa$zJ59%O(Mppl! zy?U*=M_EJF*K4iDX~gu+A>Zt@BuJQ%}!!%n9<5@Uw3l) zThLz5so_?ZWMT&H9neEQFwzhq$&u7Kpa5NIWhWscL_1|sZjA@I>Rx-=Rqg1uXZu{0 z?*KAtFkuBl)j|LKH`f$97Z(f2WA+hm!mkL3GTJckAvmg#+n$fJP=~-=-n}?usNjB* ziTmUdD}_MVHpX(PM2()epY<6tb(i;Sqgii$6=>vxO2g&vO-56hh%eMWWtlCaw2NmI z@$}CzR`!EW26gh{CtFduqd1G3#1)Wx4p$k21X2#GV-Ub4S{K(tw9cW2uQe(jL z0k`j)k-CP?ctt@FXQrZ?f9%s6&G^pig}h}tdKZtWt8e{rUOJ4i+rJ30v-5~MHJjlj zB@I8)C*%@`KfI3aL}1?U5HUQy24q77Agctmb9Z1x}?qcN=BpnOq29RP1h?PbnKr$Fm z#o_zNIL@E@REfj{%NHdx8n5@?@eQiemw1AJfFhuRMx7@Mke#%FgtF2yeWbR=F1-tF z8>Gh~dmjH-*Q#kRfD32yYc&p}?)wxtZ0k5l9A<&?x5V{$wXCtl6AaiMcT=Y4`z!XC z7eg3nAL29`;jYIo41);q;n_imbN0R@weuhConK;+^&3w`J`-^Hvw;Hag$-^RVNU3~ zrXDwwPLZmNoKr4OHv3JWV7nDDV}>AqWc~9j@GY2-+5u^%z30>hiTtv!Wm-okD$tJ* zo;u3@8eg5_Bn;6nYjtuFR#OGBb66Mrunh9+;XE)CYV1oT1O2{R6R| zjz2yTxbUODQ0P~V4?|_cF=h|gy>ChXBOQqJz{@0La^HVr{9mXQ3ki-{M-p*T4KfBW z{f^cxfWpo0WgHse$*wKmvU~%|!yD#tM7w@tc1iY)?o%x<(qSqOa>_q}=-!pw)coe;%i0Kz7Z$iOsE0r5C*r z65Z&b7|7qNs{auZdf*us>9go7pkA<0IGu}{?9cVqcOJ2?6c}wUv~7#I8aD~?jm@qd zwlyR%A3ha0GA@3Hj{M=xK(`Nfnn=)oV1Z(gYerFw51Cogkyl9!`I}y;lzx%#sxr5j zq!r5C9Nz40&GDJClEtcKBBbX-u@=_@lr0aMQBXj)-1HaJ!l}>CG0hAq8ZKvL2Y~MS zYS=+-;S8Q0l#h+CA8F#9Dp|2i?+kXep;TPXu~NgqAhuT^M-#X@wgv8!bp(+a9(P$t zX&6S%U9~Zh4Pqh&ZWRzlXVW47l@P#7K=#g8Jr;5HD#JK=CqXdzV!HfxrmgF1a9EJj z9RrX~@hN(M)Wgu@-waZWO&l!nJ+((hXd`;EWA|1nbFc(6hO=z=_Q4<=^B#hcHFjDv z*IF2sRa3pd)2&6i7-D*@(UOIm?%AN_w(zw;vsrA7JieqNPIOdp@UI}+CenMP98GDP z{u-EmIj87bGL($1+X*Xgdjo6nz(?PoTcD7{Sb6W{_#oF@0cD^2XG7dNbLX$DP7ApW zLL6ptnLc%h84DD8lEEy^w-Kys5+_a2)Sq^g4|-3M4cVOdKaOP2x|y7S2nL^wi&9;! zfW5iqSB+prBn9tYRK!7f*?_0IWCx^S`(&o7S&{oBy05TnOdtwf=R3U~cm_WNkM}D# ztxKj@=A|Zep5Vq1L@jh^yn?123dk&biBQ}FW#N+#yBfbVQVM$!;#Gvkk8q4Isc&&B zN~vzw75GPN>r7@O@bZCz(wUcb%=Hxlo7f9bAuXKlnaFYyNw-;b|DuMA*_09AR9$o{ zgu<@U%sm2|r1aBH3)y^6Cb(j9KJPezN_OR)RC~F|aPU>{FAt~FmKS*s{Rw>}sy{*U z1d33;=`zr6s!wSU9S?=@0kiL(xDA5Nc2AX;u#f+lq^=ixtN(#Kp0d+lJzyLR{zz&2 zw79+|5l@mtd{b(wn$D9Woe&*wixg&_BH2yc5n2AjK=}DyVze9s#XJ ztsJY8)Z-eGQ|J z9;2oITRr&Cxir5g95rRn-q8M^ZBPV%7%j&Bsk-6N>5_}X1h-DyC*|1^2?;(0;Kk(P zATKetfRRl+_syR0pwesk3x#PV0(xoxv#h~cyT7G6aBN(>rJ9!TH@F$T{vZXb`!2j( zoe113OLpwDfqI$~Gt=sv2L7Fbyyyy7BVay1v7wMZf&M|RcwoyfYZA6x?Xa?5up}Te zw09a1a|1geHm&swZDK?dUB}F0;M1C2IRMc==t#ku-t0)y#%E=Df8nNBijSOub2VUq$~=Ewj4@h zK&WWfb4Rvo-tzoO*g%j&}4_+7~Q@?YW>=a}-84V2~x(P=LezWoLY+P{lqs>u%V$rd;4 zGQisM^9r`i#(1YdS%EI`CGVe}yzqOX;#Gnf2$@|_GGZXYPys~N7DHkULe1+$Hjg;A z`pA72p~MlLn<60dH8*y*JYIVO++VaEMxF!Hw69m(&)~l!k0OByuNii=iUSZBltMgg zlH6;0heu2KV(yoBe(P4xC5e%nPg-90qV$D+j}T57X2Nj;p133PiO zo49#8{hl&b#W$mx+ ztiwxdUt$7%ry-9%3HnwBk53%5K@LB9!k%cj7dUQslCL}bnNA!3O!=tme(=WL{N9%x+X=i_Wx0!YK3 z*W}lC&%2vj(Cb+&tx7mWRY$7me&6po#B}ipS+0z*2gJ$mF<25udETJe@lJq0$@eQl#CuTh(ue25V8H(iJ*MbtG@fw&PiEv3fG632D!;VvNq)jY237Rl z1DzsiX5I~76)HU0VNh*{JWXod%i(l3Cp!1o+)WGl_lhA7-oH6Ok#bWpqDS-=Nd$gUW{tvuelky9n;7s|xSv?E&XMRY$6 z+txIu__4v68+_3Q>7F_su?^;*McN9Te~fE}&|lYxLhOJXMT{4kFr(7V7J-{%v6ehFo{dH=wwF4Aad4R_{PSDP8@Zu94qFXIU6P0nJD$*cl&8(YY z%9xp-&)>Q(zLraBChW@vb#}lTQ^M>;w&kI!*LmKlH{*^-7C7z@LA(I_GNaxO_yZ960i-Y zp}FrQ1JZ8d2YMfi)Hcn|_DWPd-p(cL?5ig0do>eIaq(f8iCa&80$Y0+ z#Hmc#zjqLs>)u?rRMuQJJ!$M|Nh;dYrE-gJbFyj3e`BhKVYZjTt1@tX=%Qxb8^cQ4 z0Nn4fzIl@d=^2bE9~cY^_4vp&$FC9AvZ7-iGcgpiFhPs<+(RT;(W`^3AYlH|Wmt4{ z0PW_gO0&uN*kuQ!y7=I&iym81-RjGU$K&?33T>}05?E+c7cU_1(h3^-pfv5@xv=AB5)bH78J9pX3T|3mW*k&ALYPqIK=$Dw%w_%K zSPCiDIv;c?cmr@3xam$&T6V|3EzAzIUjL-N%;dBj&%#ku-CkaK-gsEx`9+$b$NWW7 zBaY!1atNo95cUnnv*PcFVCSjiLcb!@L!4nA{((@^+rzxtzV^^Ar;9~*+X9IXm%Jk% z#HW7Wq8wu=4|fXg(HRk-L4P)_gO@EP&d*G``G%*j7B**$G>DQuNlm3w5S_BOKB=vW zFG8%E$;JHv%l`UvLF~rVY6>f316!%^a#35}Q_xm4IV`bkLfvBaooA-&d27smdUs2D zksdt}8)!R`3?@va-q#sx(oI~=WG6w9wPlaH^z$rhXNPwgf%6(aP1=Uz9c|ur?I&V} zb%%oTofB}jY87b|#tp`V*`3+l_r==h^q8(Se*vB7yOWVcmy?PD&iVnhZnaKMnTb8U zBQfc}&O-?}n5&B47sm+dY|sUtlvcUWGY+%!!qM!RHuOHsa^J5vhL&^kGo3t#m$5vR zDM-a7Kxq*qdAgKxC@eRLhWFy*74s-0+2?0m?(JTjMiPs& z!*JEU2mBvm$?s3HXsQwrCBzRuDjf8i2~I;mI*_)y)%cn4TkVCdG{x^tBUt{z+YL$k z$=$o}{axP2=RN?%5{+7NVf|Rph4;X<1U--4{9Rq)nL1ccY7By{7~T!?u>D5S7gyD= zES)R=;7-c`clcN=Hr!RMhS_>%gUMV)PJZJGzVoWzoi~bh&L|Z{auS&v{_(~FD3sc$ zJbnX5!w6Xj$m3fSno(jL=_8&Wf===lIVnd7W9##6-IG^7-cgUH)1zj8K+Z%Lx-D`c zuuz-z>>{^A!P^1~V!}9Oh4u$(=e2?ABU(hD^R;4Aj)_(=^q@FqsWob&JRd`vMq2B zA(9n@U9C;z09OQTzxM?QDu5>nRR>_$hpq)$4RGvCVUjv-bDEBU_}5(bxwmCe^j72- zS=QLIe8lTOJg@x?ZPFx~I0uBuZ&TmxF?m(|apfFX7BXu%@sFtyD$+J9t#Z>bWW|s| z7jzrM%!Ax{&=PJc$*hIHe--}~Fyq^9Vkre+=8y0#oRU#ku!d)z(kG~KKZ~pRREBfo zYv~!vf~V-P)-ypC6wM(`OT?ZuwbpdwR<2}>Be#bGTQU%V?LKhDs`Ugfni{b|&|!mB z4w=TzP2Bj`8*WSCPCu^V+J%H0usJ-0$kNlIeRurF6q^;fGO_} zab6E+7Nd`Mn8$Cn3Rt~+&FAfVcaOaEi(8{Lp4)Ihyl$ekq>pLmYVQ$KoMWx9boIvBffS| z0SYpbGLU;7`tpDZny6F0R-kaB&XbOadg-%Rv1JGQ|1oMN3PKD(6P0zw|bmc)0-?D4w??@`BP0c1LX2gm( ztXd(F`x>q_3xxecA?!V0zD) zYp3z#iLSRBf)ckTR}F)qILPLBHJC_zC>{ zPjwn8=e&0M3|XIvL(t@#OX2GraM}fU8z^4wIAS`aaL1idGpZKVv3SgSAiyiltRhf7 zvNC`y=osgr9gxhj2XDBLH0|_Z28cHWEG}l22YViMSNIhecIMuY%rcMM8S3I*|966Z z*85u+SA^(?zcR}I$gI9JK#*@k66B3%Rr$xR2jnXfLh}84D6P!Ih-ub$$YH;tp;6C) zgDyM#`L0lp|1f-iV`KkJSHO5){MGy(P9GYgg0ji!&#OQemA8k-=9vzYS2)l zt;4M@uYMdppNUzhfnbvHm$7TIr)aeCBC(a9fQGon}6lhb%Z<=~&Nlv=E!p zG;K@h1ytHo{b{y!BaOFL;6@<=(@4MUTi?En!MBZtwV7q{wW=3Q$`k%Z8V!+NZe0&g{o`XU>@R^8F%J``ga| zP*z8JoXgy_Fdy7=D0AUsB+O+?dY@~nMD&^sghDi08_a2g{P*_PB(3m_9gnsk>8HDX z)E4FBjown-JGq%u&}!VpHU=!e)E{_LuDrRD2{L^A z3yt`=7gXg14|zDR11Z^rQ$pQBYaEfI<`jEc&ex+SzH796EY3qu?a1jv*mcaRPdNB8 zUb=83Ka);j5U>4glQOB(>(K0lYDV->fED@WJ;3)*^`mJt=}X>ym{WhPFn@rCHYTbK zIcF^!sM{pQIxtM~uO)mlz zv=zPKV(~X9v2tf&6L$ctbD?yJeGWU9cIkUHYcRfKrBHm zcCQH6+?lqfaz{Ifv=<$Mlbp^+3T}YA zGijM}1h+WMj$+lW!e0d2x+ZEi`;(@ffNszD{WnJXV(LWfDbA=HWCmvkj(oXt%Ipe) zYSv^YHTD*RFsykownK=JZPf`%`f^BwsGi?(@A< z=<;i1@+<+ah1;F3Fk6YSfJPpSs8^ap_iekAjySScy0ztG(CTjF2Ui~>nJX0BfUYvi*Q75 zA{rl}YZ!d?Of$ok?N`GX_=EB~FEVR%TB9P#>5FL6ELZxS3l>7g!)Z(4;yLV2d+8-+ z&=zbhS5DHbViP{7@hmFl3|+A%P)KD^pXc2~chYCP zcyoto87-7#Su*>xM3JQf+lSO6C}DByh(G4*#r4YciCWRevlWveGB1yNJ1+EdyN0Wf z7J_?_j^y_ZPs_M5vrYSSiL4Xr%aRHl29UCjndiVn$y}X?4NI8LjTlf#Jy^#lDoLZS z%pw{imZEzT4Wsabeqej^4^)Tpz_EJ=s(0Xj!}-6A_ezpq~5*3x+S8Jgquo z=o{?3KnH*?dhArrex?(4SkATdod^CueM&C@7Y)^b;_6oWU;DEVRBPz@iljpgT#>oy zJtQg-h`ObmSbKSf*l)JKBIJ~!%3*ZouByNpwn<-QiUJ4mRkZv>c>O10&(0IKtWHtu zS(2xfAZv=51#n0{iyAk%^c9bF!AwtR*Vm4t6DuBz;Qd|_YPv0t2B9%b+*shCL0#Q* zb@ea<9fhm)#$q<;*C!SbS*;BzFUn}F4M7)>?I73GIlaA-7~5dh#-zDzv@B7E2g7y& zq?2ysfWFo@0`EQIEQ@^3C&J7u$|Z1-z?lz()XEwxyIweXC+{Nk@jQK?1FgN=6Sl}x z)ZkPD#45;}0ouyEI;efw26G2#ysM@*@tV#neg)w->ugDbypBa>$au^Z z4Z((dXbILAgE8y1-G?A&G27DJpY1bg{JV73D`bySwCvJ92fb$&>*-GZVQ6#r=V?TKZ?tW%Tp$eJ)5`iS&R%gdHsfbj6h|fKvqp_OZg1*2N6M((8*NLv-zy zlb@ou7rnd+`S~dp#fwdquuky1M5;<4F&+aWD`QTN(iJ0i>r^z@H<`{Q<&*lbi#7J}dctnfbM`ZUH>d9U z&MBmdnk|W5=o(FhQDEv0-PF6kKAa{4 zwoX*6wWkAaIrk5Ovs-3?#uMK<^t7-DI|sj zJbQernbwG+iHrgyZK+1`7zdR02v-B#M1bsWn2vcv8C8$Us>u+-o(-XK``*&^+Yf2{ z4d3>Sjdb~FkKXoMEuu7WJ$!n`LqZPfqfb%6m*^YH%38Bh%bP>hU?B&b&f#OGrz=>&iUP{kuBX!c=;!Fm zDP;Fj6Rn{Y0azM7PdLIr4QyGx#(o6wniw+v-=8wtoHi(KzBbgBig(leu`-Jg&UZJxPL<4bo__J>?H_pig`+D zRr2D5N>f%)s!gxXURhA&MuFjl3?2~O{jb5;S0LD{#h|L74Cx_MMA@Mv0hI%eMdrr( z5v!PVgIRoDW*@j)QC-Mbw(D=nc=Q}zXQvhZoR-VD z#JAc48KK+*sZP@d=)4LU=z}ze4VFHQJUQ(MQ$FqV`P$tNiTjv6hvM+lmJIXnYl;zm zd^1faIU~?&R2OtPAF&{}9=>ba$t!&R&OK2Jyy#~5(1HgoMoFbi_K?EW{-n=cb(P4? zfs#n6f31aX+Iy&VbENfY(PGg!sEe3$?!D+p!2aG*OCu9SXgjaL*+AFAUNT^5hW)%V z{pCF~*ql0g^kbd1$tG3nYdbo&M!kF+d<2e)u6@!-aXtxlaH4(T{2I8`1N<#X;aMU6 zlzE@~U(Hr|vo`b#oT8~Ab#A~?wyv<|sNMOm>UYh~-6~1e2UPr9kX`wo2WO(YHC(P4 zemFonA+->!lqD}wM#}mcDvY-^({dRBWNW7)(dz}NU4L?GAqyyYBugY2_E_`HMC>JX z`U_R1Iju)l%uGRTMobO_On*x{@;Yb~>+9lWeawEKC1g2lzAiH9EUv(iBkctrz~yMS zvtM;e+P$C7**`$9&Nd5+t}`!K7!;x4$vQnTZ_H5d%~SUqXmV*vB$Y$G#B{~3V+HO@ zK)QVw#c3%=o)c~~Br~KeI4@2mfEI|aRsHfR&~XBP*!usm^_6i^|J&9e3MeIl(jXw+ zHFTGBcS_d`jdTcth?I19OGq=Mbb~Yu4MT@?!}G&)&i$WzKhJAk0W<5nSM0TI)DNsY zzFtcoUO5<>P2I2J^$x&%jsR!sm*j@y0a1?PjIUSCY@F5<$xFz)&2IItjV5+hx1Sez z@j^h~Qnk)c3t`XuGbkLC8;PE4_A;3UdLDdjX|N@sVC0|!4;Dmc0WU6zRYP%0y7)=d zWNQg&Vv20=qQ%W0oPTZiuI+Gp#5e=xLx}CcbDB(Q3=fZ&t~8HjI3myzK_w0 zwi-YYw&_BeZ2Q3*>`NvWqZ{GHYhgQ%?NRZ`4d5g?pT$ry68s^|n&~R=%}g1e>GSlG zm`9LPcu68Iz5!OACVbEc>fbZPUytQ()lta*7-`N1>~m{p?XV=){FoA1#pgsSW2;3T z2H?(gNDA~IWEvdh>R-{&Eu%uMr4JNV-z+_lxRN$5J(gX^Unq){BCpN!g(g~NJ3puQT=D+u zGDnkSn50G3d&75->ygkWO_B#Sx?NQ_@8$dX%Y5L6j)Rv?#Ph*p<8bAu9JGXz#hMq> zan$tor5kaYO|;CXOm4bM9~ya_wi%c_^khOD9lb0#9tS`0N|+Oo)9HXLrY^HA)A#a! z1r$X-6KoY$>`_4flUMvV;6;5w`ukzw_AbJ6|EdpElQssEmKHB6UV(D*${nU5J5mD0FbX zOfYR1a9M!&j9^LPY)bLsA)}c)psIv+yt||^X86T(x%b+yL?o4^l^uhy)jQ3J@j9QO z_(AF;hwJ||*yq%<@*VI5HU@smAfk=n^ZiQm!%uOB}FY& zgSvF6hGnVqh+eDFf|p`iU2*z_hb?b`Vp?$)c{3O86uks;59IuHRm4tLja|6v15bxt zj4Y%FB*oaC>{=Uf^J*PsnTu6ZOP13$B-&Tp}CmwoenMRHRU&A19 z(Q{JBGQH@WcMYBE+8;#%QGA|KPm;>;J1elClF-DOd?_EXx4CL{8i3`Z8U#{yeqo6! zu{|QV`lY34|DubfxIs#XY}U`OW_8y{t7#>*-U)xka)|JZKsLFwD(d$4daJyz(p($0 z1~(U5)@V$`75PLkurU~ZcUn+2F(X3O9AV(O{iDK{rtG@QQ8{IjrJ260r}}lnk$>OF z?N7q?X#Nr8cgFEphPoZM?zV5dlZlr!dx*0l2^AEfr=lh}3%$p1#Tnptj)`x7GIYbg zsNDf~n|IG)&~9gEe>pc5(r+FV?ZR25GzzqM2bM<*X9)Ox86KvpJCVI>wG+PAzK=xc zKcu^TbzC%;)Bu0h1VXyyo4FvbjVGA!%h(^i`;+OUQf-I>=o1`$DxOjs;r-F#O?6jk zn5)GyD`n8`Lsb@<2cqNa?YD5DEE(^{3LBz5QM@6nlN!cIkt59kUB!@s)AE53G~qSQ z9x2s!1C)4QqH1gA-8RjS=|BYGo`%IR7Lo#vN*Kpo4Vhz6FSALjC(W#l{kip-LD{E> zuzWos9QfaT?l%{Y7K|?w4)K47nfzSowQz^{jPeqKx&rkKv0t?)xdFIAR6rV9+x*qD|kqH6GUzt+Ump?6PGZ8k5?Kr3)Ep=oot zj;zpkyQL%TeRGx_Qngj0zyK7yW^<_hD$epF{baD>n7csoidt^kZw@~dv=i$oLTe-? zgK^J1A#TGpyPekZ=5gTd*6VlWtrmVBXIsp)+c`9!?K{pWO$S409v8b{^SC5EA7oLe zDZ8@MLI$=~Oc&MQv`R@e<0x^_6b(?ZV#h7^t#aM>Tp4Sf{VQ+XvZ+5BI1QK7LY+*C+i=4$fHU=tP&{B9A1;|<)vSI(+bZOR9QD!g|1uQ7q2Eu7 zNtb$Bxqm&x9lx1+tU#a|G6?$VE#b(!bv_*N+Vikxe>H6K`eVy@M9Z%CZ;p(jbIf?J zEPBnt<^>f+j{}&505IIp(j+ga) zOYi7{c1Y3xwSDz1<_Z|L{1qdb?%3w|Xi^NO7hm(bH^ph_p|?>`|t%<4Ky6p(azmB?*nTN1A0m+Q51#}39_b>hQaap;P!IK>A@ zFqT1KDoc32p}2X|ALx|&C&qb_IV-bcOvCT30wML%we(mr?)`t84gSO~eE&ij)R87z zlLd$lc!qlm@LD6UQil4d3QQl?u?E;(sBABFU{m63F~E*d#*lSUMp0-{#*yhy9?tF2 zeo=?k6KH!}!3D_}PblD#bD4E79D7PQV!5rvhxJEoZz~5M4b%#hf{5-aj_ zYSxeGw^R@g0uitvRO`&T0hRGm2)HV3L%-&A4kUlGr<%`eSYdg9C-p{PgRQZQa=uXL z;H&l;B1}X$@~kZpsr>XyEnVB>;?H|Jn1_>%>;lp^PNz1+Wm#`l&ke1#h@I~&{ajrO8*+^yWlCwH#SaU}h{GS4)VB0cb#_7_@vGUXgW^_(R`O z!0?o$X0~XGs}pNi4O#jN+3-ln=khSQ1jJerKb8%rmT)wKW7ps#;)#Y5&7y6;iXy3E z_ypu|5Hz>$^5sA!Wt6m(ax{bDLGc*xH!@hddS&gilKZE*n?a7aMcT-v=YHRA zSu9u!YyyMm6p#t)5Uo=61J}JAe*Uz`NqvZ_O*B`ndF-q|2sJYpUTYCaYFzGzVUTZu5*-RPCXPgtLh_ z$CHg)-z+FD`y;UArf$D}v0VG)e7h}Ffx5Z2(5))rE1CNXPSnid`Of?g@VeH0Xj&`wkh&sTo+zr7GE1|+MOpvhld^n18`A{jujdcD}$k5A70Sb%J& z#qYbvE6W`_Fmo%>LWmr77u9@{Ew-I~oXRa6YJ`E*>)X3Hnc4q%z+p%3JT(~yXLe>P zbBNe_;S2=>#O=L!##ns#{o*s%(X)D`42m*qqM+Q@ltkYR)y(9Zf9IAw!WE3NTA_=U zpmU;z?wsxg&Dg>;#H_E81GMoipLCbAH;!hIpr9E`%0AQ4=EF2QsRwsyf6qB%O$t$!;h2_YJJ&tW< z_4Ph#+u)ShnmZ2>XJH`?HiZ!jvyU^~;6p}hA!$2qLrVVrQe$POLm(nB7atW-^=n<) zy>m>ehy&AT;7nYI6E3|L3O9A_mn zn04B?GO6aFSunD@$#`Is42+ykcT3)}o8O8hyGv3#ps9P<};NzaB7y<0T)vWofFlS=xLkMq~89Mt$@M*5{GXG`%9Ks~m&}(6|ym>>b zRE4`uPk%e(2)TT_KxMluCxsgGwE84HPRQ>dr*#IRLh#T4w`bi($9L`#fg_*#{^i~T zi~fr@9y}0nV3x9B2quE^KBF&b;ZrLp1?z9jRECBfV%X`Nc&7CLy_J$_+TzEHH(-0U z$ecs0bcOaNk+XvE_wTjDB`|T!{b7y1C*f*kmZ4*W7!0QskoO4x=>kY6pM(ZGoY@{; z^0)CT@=CfMw7~jiM}eisu4j0KALeFM^@))lKj`%nlAg+s(ptBHS!x4mNzSIDRE&*} zg?<8diyws_mj`6UPvZW8(Wq>+?u!d=j5z%JfTPt?GM#B?=d+iU#)dMJm_qu6Sf5{@ z=>N$}ax$1I-oE0v)_*v04vLgx(wY`-->8x{(BAD&IP5VPCJnR}S1xb7BXup(uuuu}$FukpZ*%8M*<*u+wQLSIebAbePOQ!bMgkU+c(C{PZ6;k)Fj9X59*JkE^1P< znzH87Sv*yg=)*Ejh`B25bd>6!zH39!F7$NIPm@LXfc13ALSchq-C6T!V%3ma292Ye zX9773#6Z1YX{MqV;L`UQiP5{8tU_MZYh2SZ0qbO~R43J4=rWm#+n;6-`P3mb-)Vs6Eue{U^Ci7y4^fbFqml^U=l-lq^WeWs zN&ZJ!wE~1Y2H}>myz8!V#LWV>v#JH0^5TcoBQpP3_!riU7~+TP%LD`{!;mZCIQGs@IWz&h;|dmKHozTwooE=ii z4R!jzxEUtE&3@8Vg-f30*))AO0wIg-b0V_e0s5TRHDk_XGGF%7LroX`W}Rjh2QyY*&60t&emIn^kc0hI=3O(tgMk1Yyd~z zf6vN#VgSbx)buc1%54UcHcv>el|L^^+BZbmil)M@JIIQEX50;ugf4Ef2w&)PnSsmv z9d-fgrrn%1?usk&N3b=3wiNZe=3~*THaDTLHlI(oBvYK=sk7Ug}94!O|+^ zCu*_{QngChl<--@5uv5pLj!5%5ap?$z;5t$KJF>zzLFSRb+pJDKM@w?6kCkp4JTUY zS+4=gX+x;&(>+#i>_S}nV#s#Jd4|+^`*ZB|Rx9^M>wgm`k1R-p^tYA|PZSjx_>F%0 zL$~Bd4vA*kS?A|GEaeQo@OsrU{nZ#Y0A(zA&arHJ$hpT%@+F*$2!0!KOhm7RDVu6D zb$)O#uB3&NV1$XYcnGLjC#0uu-3^$Hk(G@C?LMo0X8@bD!D#&LQ|3CANTAu!L2cDT z9A8=d49WZG^@q>5Q1)6xud8hdT^m-?zm?jb*Zl|FE#wF8|Xub2f(U ze=b+~13yI<0H&NN(k)_l3iM~B%k+)XJ?zvF=|aMK4M zGDde}+HP`nYqU~___LxWBR_Huw63;U+O2jnxt~;lJBg}mCc6ipJ&rgt#KEZ_d%9Ci zJQ-wx@@knUF^_`0kVBSJ3um$?M%+>MuCChE@A^9LyWMMtX<>U@PPbX}#^EqN5dcwM z$JwX5D3gedyz@9-tq15CrM<6Yr}T_IbP(Q`Z)~!9Zd=auB!VN{JmbDv_ZYbDlO=ee zE*|!#NYL>3Rm~FbCXYE4D3wr@uT|G|pizb58qT40`A0rOx@3aXc34vXy*(`(gW zg6g)A=Ck5226~w-U|-GN%@4J8#%V!WMysxyLIgT}z|<2xUd66MB~kjR7HyEomrPks zv@JoV$eeXsX@ouNo3W4FYeh97x!U33Q;*P;YhK)k4W4KPksYF=S@$rZX?U;V^z_3_ zCEKO~u~FFX^3uve;#u#Y{@J5`q0L%fTr(3Pj@)?~gv(cUtS@!%RcorcBfC{xbtavH zclwAa9-+pZelg2KbXSR2^6nG+74}tFZ;D(Qg~32)tn8fbMWlA>pkA}?!qxod8%&%y zvhbWg=v2P~j1g+i3I)#8Jb#a89N^BLKip^RcrIEtQYhRWd@NhuuJ9S=>EMl+==8Fy zMcf&WJxVA0{<|Hm&^v#AkqvYYSr+aSPQIn=Z4JNuGnNOS3Q;!O%k{6`t@G+cZBURn zp^@}H#i~Yo2f;G1yVf^;@h4TcAA}0F6oa?V;>ZjkIEU?2UM7 zUUgSZClmQqBegkud`7^biy}cqLD%U9;!ZYyfNOn~Gss1WLxh+rXDp}Ca29&!X5v9F zOzZ+DXw2Y*aRap@1_S+;@&ZdA5f$)_PR>GFJn=fB>B6qfB- zb%F^%WphHG;nF^>14k+C4{sl+junVMen|5^izv5rY^eGt&@NH)uz*{%3T_^hP!dtUcz9 zC;mIH{12Hzj(t>qc9uQ?Og#w%Z2VoxT-vkRbIwe50Tvi;?`3F`3jTC^6vx?@>*o5l?naebqW2?Lyvl(+$ zv%e_0O^b0@au=nv6xUW#kB{Sp9`g36RZ8 zpQ8SjxqdUwewNhOf7ZvcKvb@i12w3Qb7vvex-+4husF+C_5s|1>tF%Ykit??NF_Wa z^~YNiUy*ykb-eE%8}tOxdK%VI=)Mim>B{#{aJXU1aX`{~Dn<1K_fo*;$Ou_-7mEge z0^WR~_(l3Sx=71h0av(nPd#X*vz#Q?&}3L}^$PXhr%U$F)KB?$+K1IkKb7GG&1gx7 z{OVo`*Qga+#%a9@=csE4Au9{E7I)4#S!jfr&m~0Y%Y`8t8E`~@4Oakt6$F9(7cq{d zZ|GnmrZ>1Nw{fB)>%Cv*9^Xo5tujtAn9r8pkF*SFt^}}I-w(9>`wfmbg7gdgN9wc= z=lpz`b&eKSqD#~L`3;04!Byt9-=sGem+-{m>~*ech%fjmqZCK}T?;;$xc#d|BO8WQ@9Q6B0gZhf0R)3W&LjrGN$zz(T9cqe&-|LmCz*Kq!Q6-F?E`$=Q+ z1k5h8{l{{wYvQx&0GdH?Z>Tb|~VN{EmtYn7fBM5)UKYLE+ob;mB*Jo1m$+ggN zp+G^l^tj`nI{AW_S4Qt1#Eu{2?{C)0#1>tO<`7rJ_7^%QwMuGE4H0-r8(|k6WKs!K z0{)1df|VK~-zmnPAlG|m{$yE7{D4+iq#)^X*yXQUI)aL4li(CBw=@e)<)5IU`=CRB zpMCy>*O{Z-_ad1#ys|?%TVkT*(}Ry zbzT$S(RJJq zfV?7bnobVPDvFOB0R+l`^82WnZR4GU!Ch>?C3#r!;re~D#i%@i69X)_S_~BiWHHANU+ObSo)Fl77LBv zu$*D`x%V%$t@rcH$i!=}uL&c2ZJAxQQ|^bL#yemkwWJk~$RXb$`H~?JtB2K&W8s|` zym%vEH(*h{Z&(jxRYhQZz-x187cTZ!U*5eATzt1H*^$g%-nU-zL#Sskr*{pEXOIiZ z@Pa6Pzr|b*;K(Y9&mkRshT^~rXWt^lSW42t8!YO7@hmQ*cJ3`hBseUj9|fwl-!t;5 zM?eaL#b5^)cD-=O*P*gZp{s6pf7CMq6=p7@rXSv>!GUrz`^j0*!8YV9rCvGK z9fo@1g4=ZA@@ZCyUa~ZCIDX zA1B79oAO63|JqvSH8R*oKzU;7J?^nRc`N2SzNgnhZwcvO_7=-) zyIT^DI?G>9{TQ3~0E#gtS^f7y_6#tJ_Zl*fBTom@)tpU&0ds7cc&H;f`Qpt1UGIwZ z=l9n;387T)N=2hX0q}8CzWgBQLLwL(q4lKeRKkD>tU)s8d4R4qE1oD-p1g2gZY86p(SBgXS}jKh{s zNdJ980|3UFP(2)bRK|8z8k4Gy;=Ua_>5nZzo!ah1;L&b(8v6z+e2`6`w<|l((f6k+ zdXaNq1z3YG=G7K%_bf+bKQW#99;5a!t{@7o93L5H!VYO~!Rt%%LU5<2%Xn`Fp(z(* zv?~WezQtm%wu2)_SCg2z34Y4tGPDE%*Kgg18$|)9i|&P@URgZ>U~Rz`7aL|%NRrv5 zv)nM_Qc69pEJ1Zm0>{EJn~|~Q)P8TSrlafNw1r%l+mPClQt})TNt&`%c!uY~Z5zg% zp}dL*dC_jKU)t?mZ<9Ul$z3A=t zEbZtXvtL4A1TJkF`d(H-uM@#$SH)vT8pEEp_79zs<5%$A;e)1uZUx_jqb|zrWsl8L zq>nt4(3yJ8DX;=mVx@0=kQLJYF4;YDzwv>6=|ntF&mA#tL>yaeBssJ)#s|f@C<)wd`De3Su zS|(JklxkAvz~6WcE!m@w@92%}nGZtn3vev~hyN%fHV@>ZhBAa1El=^Zx zD^{e!a?#osirP<6k^yoxdg7#Z7?1*C1!f^nu4X0Lc2J8`pSjLl6NR>0!4b{QxB-LT zyu#|n9Y7R~^_#Fq!g_bTSQm*e(C54@SCehG3bjM7&4;NpA_hVEp+xyN^=CDuUAjVd z6Jm=uc}?lx%kM9D3`hqukJmcB)3i1@Vl(FgrnYwzC8Nj5Adyks$J>Sby1u37ujD-f zYd`Ev>zBz9MSlI~E^ojN!RScRb0LOcp_9885TL=1~RBm z`|9KRkgSNox$N(07x0-+OrkEkI3A%U4G-}#Rr!B(;g=9~qsm7|y|=p-8NAUuamNi} z^Hz+B+9d0MnY><0zI@VzKjcWCk5Mr(%b>vJFZ zc^*+E-|o4%*sE7Rrhi_i`Nf~U1V~rA$NXKt79cj>nrXSka4!}@3M4C_#@UO^qI@FT zJAC&2`3`#>pIr!imXXp%8}({=I;bjq&@-`oq$u8eD@o>B%Jp={E{lc_#3gf&JZG^8 z1OZ(bDe%FHRgoZ!GYpTFg}Zf{l1)_#BZ>#>WAAboI**`0W{f7&g@rt&q7xaf^Crfe zaxj_8_r#ni{!D87QBVksJS62i#vJhvTpY97St0=$x2cmc;?=LlZ>vk~y}a<+U1<}@ ztzR?Yd}UHEv+}+hb6&Y-lU&7$2b4jz^(%3%1mb6P?OA*_0QkJk7NPn$?+~~%zq`on zV%P?_w@df6p8PAbA-6BDa{By|ZYhqz%q6+LIV8olwW=njDbcIL3PhRDUU?wy#)ADT zWKtIN6i4gvcZ9tFw3Ui&5&E>9D1{nqn(+oo%}CI9?x8D%mR z2}>e-e5H`|tzdJghD-F%th^fU5&PG_r{QY~vA?$0Tcz$9kCmwWa6I-^?W?#Hm;CzqE*-d62%Q@V5V$tQ za&3^v$^@qn&`2eL@!X@YW+e zBdr(%e|ilyW8WS7u5{_4g(UT6Uh7^ZtuM`b_^G`9hCU02&W3Ge+l7?HoFsA1@n)4Y z_7#XOJnF*t7ITgfi*6erX7Oo&;?guZgxCK0v^Q)zp=`tuxEzMF)ctv`i5#k>IH`bW zmlhmhTF_s${6DMNVvjbJSQ0SF{@3y#ApEqM05(;2fFi@SNctR7;`_cwpK8RjtCxbb zA62^Eo8{zWIm$CCq}gRHoA2vhnikGu1KDs&MrV02qM5MSp^dWn$g?=V{{p1;--=Q~ z5`G&JAGrj6oZnsc+%Ug2KH27%hR3c2%?%{@;*@u7kx;@Um|t2LNGH5Sd@)T{lGh&{ zI?y~(qS_>yNVlo-@huBEFcwL@p@6Qf4O{&U>vccdaXRG_gRtP9aK30<`X&W6uulE- z3}K3RR_3+A^xi3vD<*ZsPt2Cka~;=5yUKj1pw}JPd@tyXo5rH!vi!ACzp3#0iZ$eiAx}MNeifc#;5)VIL>ctshMhSREs> z7GX|y{iR;?qlGMP1O#haZ!YTjEAk|xl%ZJJ_3u&s|Kfy&Ka%B&bBmGMH8e9~(!d`F zLxt~Ez?Y~jXS;4j*~>5sW^#iCHWV85UBMldqAfsyzxu5$puDacMj!%5S||xtuK6RQ zq-vAyWo=+1AWkjO&A98!gzni1KLa6m4kJVA3!^^2ueS#43I)J(p&oHo^W-3iJ7&(uyHb+@;mp!{&!VI>A7bIktfUh_teO_&BGFU zyOdXRlmwP#1@4*9L966N8r=3RTHmnbV`njP=yWRT#of|7^roV6TmC9bmCL?$HKu}Z zwrnfTyw&-kav;thp{}#lX}o^jQw_ZrQf_k|BMKyg)uWH>DtXQS!k4bDV2d|sf3xJH z$KC+k5}V3_?D$qunO}YJ^bC5BT~@gh5cx4Ej~U|90%GPw=9Nf^$=`t>68!`l{G`z6 z{UrAvUmM@*Q7;YvmDB!L2LwR1l&O)dmd8jt8F0jHtU~}Yq*deF65vfFSIqZTzXj1z zuP)J99m{0w<0Zxjl(*o!A~HoPAl!JXZC4ema&ZL-v5)ZYoPy;rftQ z82|IDn7-EZTg&q$qtfipa`E>At+>UJ<-SSUFNQdeI!#U^q80S_Y2BgBCqYE(r(8^7 z>pL&F9q*Dv=64XB*ggXel|0I$y2E3&M>GM*JN;W=*h@^ychBV`5Dli?DUt_Dlb=sG zF(-*)0qMal=#&|$PmFtrm1#;g$tvk=@z!lsZb&-YC`VuC!#&HsL~&hvYTDXM?WY5= zV#Im62|o2v%+6T668jgQSw

    7RIeMg?ChJGA>$|a>XL_2HRLKM=d{w%0vSEEiM`A&UNZ5!ZT^XxG>G zY`Y2Cmq9c&hyX zrSVsLAzHg_QxgI-z=Yxqa6k+esfYmVi2xfG%n_Qnt61q#RoyzXg8M7-4 zC!=tx=gFKY?&>3v}HuTvDj1%g=2&|!YJ`_fQNL} zUYN@g-k+Z9k=4TAOUdj-fo_a2*8GSjNs8^6H__Ixb3KsM~qJoy=#4^Yw~2@7k>bBAEKeg=W;drbhU^! z40{*1a0+Y19l(N`TYQ9jcD)YnY0T3Fonx_jAE-rNK`+lYj7hFEtR(^?p{%_0d`i zOHfrdWyDS!J6d9@nT0<%%W{r^ zuI))a3$T|rhuzf;qU=h$xh5X|jOXmb-~mBXt3jfdcJrFfyPt$_kU9qG-siD2Fw1C5 zA=@3eveQL6+I%}UB14+*&h#=qf%RR#tYxtTrV)A0M?#d`a@gI#m~&tBrHLv$4P9T! z&lZuON5vgbxEK7c`d=$^X*mGs)(MAwM_pKgQ{-j#7T#XXO?|3_$g=}daLubnuW7HN z18x^~RD9$pSz=#E7Tx;oRyfKsq#rh1a(0(oe@=xj{e2T-W2X?sqEVHMvY8iFubh%_ zj0~Rst@}o}T7B&`6e6(wjzL}VA1OHV1#*7_PU(||_5WHx`zzQ?3?dNLbE5Fs25Wnq zpCeE|iK3Tgt2Z&j#xf701gxiIJS5o!n%lPk2aCOgbNKgj1Ll*&<7o|h-&d`K$jU_i z|59cy9ViW|HmANNW_i{%C{@7N?rHy^tg{%l=!cI#G+j(|bL$%i_4i$lD=}bD1FhML-GuIUnBt9-hVV|#6}CoGZ!Ozx*eH~ z9Wm1+(L##sVzBnylUbU|c&=u4`27XNZgSX-)aQW6>U>vL&9V>Y#A;E!{+O}tjW>Eda}JQY^tuvD}MSd4og+3wk|tvhcfoPN_1D_7;iuP6vpw5R@LiktVkF=Rv&ar*kmo1)^jZ}`=+S8^ z9?Xbh>fc)`LtH}p+g_fZF$n}~#fsOf)gbN;x&1jCV2ruG#51+4Jeho^R<1sPy=H&S z(c0&%7TMta2cJ69T|EYJ;cPUv8hRc_%spx9pD~NrIbBu)`adIK0`{v8Un{N)}dK1W;6l;JZ zg*b>jR@jPtqrOMv^BkMupHt9M(U zuy&rpN+cEY_l)YrXovY8^&UHh#fzjhmH zc#@yXH3)JI6!j5IbEmKASHo^6%ZtZr!D=}i&sp*wBBO<41=`subzvE0D2H#Kw!ZU2 z^hhst9Sbrn3RQgJqF{fR_+nxMLn+Cq4DY=zqRh`Ur2>~l8Ng&~6-&2-eOuTRI%QD& z&m8j-x!A(Eeg~aO`2SELV`Y31q{|}AHqBpNg$F=L)EIQ~6ymO!+MULGX!GZ8p!IUQkHD z{rl*}mg5Ow4aOBYybr&MFuLRLn@!D70@KoTx9;CCC6A~+f{%iNAIxp~RU44%pBsEQ z*?urQt|fj})BTqUG4d`PIpRwiiSigrG(_Pd&G}lrDogLog@1oygpHr;742F|p&>ia z&HcRk+wWX1t)f_86P`FoycRW`yl+yxZ2Am1aJyZiG)^Kex&{X71GJ zArIueXKJ&$Q)2mxC>v`+a}>FVp&TmvXg*993qR=W-`Jm5Hdkf$$|xf5c~JnUXDP^% z#aM=lEC#!-t@}u>AL2ytUGq14){GeZYxguub0V;0fO+RK=tZ&(t}F34u~xecUAmtt zf@~p!JDle!qv`xa_SD_mp45$+$gQ!>6GGy9xqNhy)TO0b0^l9Hm%CM(r@yRx1qrsTwEW1 z5IE{-W~jxAQXfL)N`dD44^Cv63@JZK<_tCev}M>3 zp$R$A0Q0)EY~{ujMm6FoKH68=Rt+6KKt8#FV<-t>O$^-i5+7|K$$Tw`yR`WvP7n}> zDX*^MApwU8*axvUsWKz_&xW|CB{Q09fBYjU z+#gvD^UK7my$*O|WFOKS0E7NZ3FYwxs1n5~={jdUal;Zl-UFlI>&QaPOg^>O%&}N& z*Lu*)k@W?oV1Cp4hbL=t-Wum^2P+jABj^kWBzA);0bWDww^}k9Aw&YmdAKA zDpTwxQojc>?Xxv1iBDA==pGM#zIW*rd)Im)$aRfSjj>sscxz_dwy`MxWOg7ndd-c! z&D@kpnZ!pJxs$5?EpoDUf(p^vI2D6I%TK!lJvCC>;bP|(M|MOh&J6P2bTEHc{|h6k zW-W2owvF-iieXcn1baD(r8!L3)y*aa5_v`|;*r=s&v^nOH5fDWJ-G4GWRl4m>Bo;5 z{Ufox9@l4%+p<;ttU+j#bxkw1seZmnuLeif`U>VYS*1s)y$hnJ83O+xC)>}}t0?F@ zW01T)eEHC8*hTq8?sqrb@I%0*oO>W>Dgd@)AGH}iw$B&&1QmH_$IY>@*# z*Le&P4_TOlJWIJ#Q>9j~ljGCU#O^Ga^+`Utl3Yz93nZjEdSqh#jS|PdTtWtWG9U&~ zf>@%zSt=?FM^OaG>)MdJxBF z{2lt>s5kO)ltCpwYMh3_hkpzKi2pRR(`Y$+`MADhWfaR>YtNRRT*n7M(Cg$AoW++ZNk_Y=Sv z5LwxYGP8l>047?EQK9~<`~In^L!KRqUOB}GwlpyGn574%PDP|ZRMRjHz+#(ZJ}yx^ z;>$YW+=x==FJ!fYoY+`_O{L+>n35rGcE#xJHf2~%4O9Ez>yMJKWf8!uW-NdUnA8}E z&3Lcj)dVQKTk`_8KQhP}En`|2+a`jbAYi4(CKYtYcs+|Bms5G|t1gscjFi*coAnEU zUOc$BtV4Z&knd09?Pj!ar1wX>)ikv4f86K=zT@PSL2q@ymI{t5ucH0*0D))K4nqe!!)@`KJ^rx&M8MK=a=-t zgmN1}RI4H!{s%J39xtd&hk=5_`=330U3G@;SsYDYM93hyw%CB<9q3q!^8DLgt8o3; zf${&S!WL*=*LX|B%`>+zAsJ;qJ#d+^HYO6Um5$IR3F7qVS37vWX(B zHBU1#ga`3yKQY6(93hFVwjW4p#RS>zSp}zQC(V1Ym@`wuMmh1E$ zva&{YSWh8p&aqrF0IXsbhX1??601M4%kA>9Ehc`Q?G+w619I~Vg*uZYse{|s9Pq;4;o;GnGNwS;}();zk@%SmP?;5A;(|S_`%ea(q9=J8@VhvloOc$?kbJXI~tK*|&%u-fgOvR>0H`6a`0#rFarer4UoR8n^s&taV4X2f=qp;<6 zu~4e__HcKE(YhuziK;$F9Z1$nw_wb5TYi1suHr}c>M1$8gTY#@2`cKYjC_{st9RpD z#_HW+KWxLPzx=wPKJt7M-dD^G6&9=QPgYVn@;>Xy8#ouf6?FwcTr65eKvvGj6anr2 zVF4>1C;Gp}$s8_s@;FpA0!~HK23(~)5BD~Sy({F@ft?gR*Y57r={*rGqFi%TIO+<0 ziIEg>9?)WQ6L&QN5+M^)ARwUBBcMR?+FI*X=UW#iRBP#9bG7%3Nrvv%F6g*5e(j(h zs#ws17d}DWJMV4gickeScE@g1a1Su1h{r3+KJLCwX6aaEJnCbZZ1df{nx)c7SLDt6 zZZ-1gfw7=S9iD~JsN{>$5I&Y!;I<>k*D~71pvo1$^CXm^%%PZz+kw(heR@yg(UW#s z%s?Ds@-$q-b!WJFObh>-(wwtPiC)OH6@Q*t9Qqn|_15^<`x{AjHUw#^Vv%Bj3xS0vN_M-iga8$8`bSV0voV*;qrL2!c zfXd`oG^evBeZj1NZ#%^|(}i+|QL9g~spoe}Sp(lS`e-325ytPE3<9*0+!8srsP^hb z&xo{ABF~l@xXG{zpxBsgL@O2T0WIk?InJ<5liE;E9 z!v5gBB35&mG~%Kkotap`g!;o_U_^EPhUp5}G^r4!Pd$ zHWr~Z`Br1yo*Y=zjRcnaY<2`$f7w@`Ou|LCagOnOFUn$X6s!-AE5g?1ns&I}?&1G? zQb`Ygup==YR`IF5`X*B)uhV^o+rK=pPPRMnWnI&<@S_auEF zuYJ~zcfhA~jNmN?F(Jv_e5qE>*t6wJdv*?CQwF(UAiN|&=Kk5t5$9N5kr`_@YEk*F z=by05vZ;tvv(H{H*FW>f-%xN{2LfT6G}3)~F7k%zmfVUuFEA?>+fH^%){h#y8+sR# zIzZX}A6su37G=Y&fr=<0C87cjp(5QibV+whgLDqfNQbnPNOws~GjxbZ$IwVi=YTXr zo@ac&Z@>HObN&H;<{DM-Iz0|HA0B?<16UwPMaui8ilp*oYAuDxq02Um(lQQ49?plvFTi^#Ymt>f1q&LDC`^>rABs~y1!8{O>Exb^-JI;% z&MC#+p3_iUKAA1YNDSZ{gz9unV`%35Sbp$j7k3g`xvk@C#^U@!i`=6reL#nk$6;t@ zfFm7OU^dN(?OGIsJ-UUYnsnajTxXD8O)}g8pKKMSo`*>{ROsZCyXw5D^@Dw3yqO5a znQ3-a`69MTN@5LY_I~GumD>i^X_jV3(KswCA6^PfymM4Anwu0^KYH_+C5y#lLYIxMt+Nn$8EV{L2(sDi zvKo5>YWLaC*gGzE3wjHS0bayuc`7Ujcg^%XOJ9Kl4Yx308MobCT658mUKeIf(;WTr z-6e+iE#5(+rAn^Vx3C_%IbEf|Fhz0UcAFa&OmwA>X^EnylD=v}Tzs09XMO%MF?t zncxBCI8Y#0?nbdTU??gbZt=3<_Gj<#aQ-|e(X@})yU+kfYyhfa&UxQ)Q;8 zwSosmw~8c6C?o+t&+x9+OV4CRk?U8w!y|lYJf(Ny*6E6wcjp{q<2Rv%@jqX!UOs%q zmvODQ*3)MscDpjJq)z2^peVg!hoOR0fVCbkGvpn7i0V81wbzmtU0pory~qWs%q2RZ zpcs0$!Edx}0xbE45vcq95Vuf`zm>6z3iH`aQzJ0hTU44%8jSJJeTzvhap>lM2 ztTZg*F{(F1z8rLj5ooE=rE{w%22BsL{6$0KmMnQM?UTrU`VvmJP7D1oYv284%a;hX zDKS&7yI5w06pa|dGY!aRh8X(gM~G52UwGD&ES<_qypiTdoI;7RuyO!A53!woS#>7) z;{@y*_)lbEvbbB-PJ{uz_{g4EKM<1r&uuhIcxfyeq~gUZ-WsU}Z^ReQ9DmVTReb4n zU(OR@+Lw%k>?D;CyWujnpI_?GlJHJiwi&ewe5-nj@PB!C7w9DluCAO%i~Dep5L{;# zDs$xPOF=|Q4<@1Cbn>A3UXU0Hq7%F|KqV@)T>QYb-r)d8GE*F?9yA*oe??|}0G%Ts zpFyzgOsg$sm-V0FAJQ%VJ1r{H0zPvaLgfekr}ghYDLOB4_e}1!Du|)KZ+?ycCqT$Q zT;s2__nTHE(@IXHV-QZlbPI|1|3H=M4EUmlyBMnkKr6>3{Va<{Gi$Hz2Sx_cG4J1j z@g&-!W`$9?hI(~4BqSuhwz!5bg1(Twy2i!&ETue4Fy%pGYzht_r}@OFfqyu%=J?N` zI5u8ww&-1zG;~d|<;a3`4X1EJt2#$P4d8DK zxt7^ueuQ~Vux#{|hBdTZu8x`G+KqtkmI<^_hH5dJ*_%8#lqLG3%;TIXPBvUX%Igc) zVzs5luPr3+u}E@xSN^qvX+BHmX_c5>30clrE()&qG5v+{Y+dm|OAgaZ8Sm@joU2HkRX* zr6gzxG=c=V(TZ?<~VT^C>%Fr*n`kytu?aUF+E0vMxCVHV_0$CRb z2PDQ~aiT4|Rnkj9nPR6m_c|?bpdbb>Te`6) zh-488j5hFgQg%V)R*h%kW|!t2i7+uMT^s#|X{9%;dVgYreV75E z`p&Q*!^b12A2~+1np!S$dHZwNG?egj2|gC_4hT2FhOcF_^*}R@=kREz)&9icRtj<2 z&C^dN8Sw&Q%2kLVw4~c7k&)|V6h+OzPH6uSXx+vY2d8bv^nf#wU>B)AGF!(*8r=tn&HXUo`8L2f|Zl4o~0$aZWWCjzehui-!sW(`JpihyqX`;MdShZaU<3ifkQhFsIC`4w#LsK9sqUZ^dP9B*K0X19R?$5tr)daDzoV4(*U4+zO4dn2@Kh}{j z1hN*EsLOZaM(|v3ST5VRHADL;t0h^4zLU8t)zo;nlz$zFoX`JobEDsnpf3^0|CJnm z{|3LFwy5QFwSI4RZ1L3cR~>oDGb-RGTJ<;onuy+~1~^+lup0lJ6@Mp#C=Q9$Y{V#o zf(dZH5ENfmxvu^(D%!@_uHh!y(CIZ~hx3_9wQDx%IdYhMPsQ5gg z#6Vbo_o%Q~&dTjDTw(7yu=4vI$fZQ%RHWF2*Faxqz$3QcAMQei_;}a!iGP zlEX}t3E?Q9cp;r;nUVq(!s$zPb|cFhTJ6_%ti!)aO)gxZfIN`v#en!H<96gAywm0c z$^=;V{BV)&RPdg&5|fjC?#>4JG0w)U5V-_yJbOwnJYZ=E#R@Ih8-9$0 zJVMV|2fpmK*TBr#XvH=i(!~UM>G(uvC=Vi0E*meAOVtnYIj>-QHCdW14SYYMUEOGT zd2@6frrn8cb-LXff4z6?SJq6unDub2co6|vQuh8xBRiD#43b6Qojnqj>$(CGIVR<< zwTKeVxZUi+RL;O2J#|hLy-(YVFFErP?HNw!NbT9M*4bwk51BOSv_cB+1-EgZ6XA)_ zil&Vhur+hmfjBc|eUy?#`SmkQwz>~0nHzR_imta9N9QlJRWi6F*4U_k_d%rzO1?O! z!OxVsJ?LX~{bStE&mymakAb?A44_e~YiP6$?47HoMOB)qefuuc3w3i0s(e~h{zmf& zx}Y>+V}boh$c|{qdi)#-7u1q9o(Vz7P}UX7Q>ILp8PFlmUVtY9E9YKd0C>X1!9w}| z<5VER$tOlxx$zw?dg@hv{QO?*q-*;otCWi@H3522#`HS9^6V2H0d&RYyOL^UQI~)e zh;O2(cD0_k2?sj3;tXRpmAdUimH8>G>5i1xb5R1sw%#Quw@a2T5#Ev{2zNIUj(cB1 z=GNV_T%U&PBZTDn0%VezN&tJc<{d&$ksnhYnC!H&!gDLE^)c0cRJ`%SLIR5QL_y^j<4U_F@VS#1SJrBw zRIjD0lANvx%?|_5Xy7z(Lg?4_< zqL+@l=j|2y?%kXXtI5NV&J@d%XPa3S&^;LO;KByhqyAtiHphcP;ryxAZAH1m73|vI zt6!$u-XX3IW5!?2?$bBx0%0w-hcodaY%}-We^pt<#)a`QYLyhjY`w}6+O18rgU^1K z8nx98B)ro!Mm_kCR*v!`01Y|kDPYHpeYrYy9__9FA*+*Q$5|ij?qXz!HN5-ehAn#z zQH_={vc5PiD^DE&O+ze>`Ddzsx4=yBC#zI=JBI`=wCB{#egW5FoM-KUw?<-_J%_b8 z>uLj9r<8$=Qgsa1>B_)XKf~ZgC%~#94cu4k$O7YU(*yBS7r&OtpL2+Jc8-?hROQBJ zP39)pX$Kt+uzq-*2>`+eA{BW}7PRwIb-f!okuDJQi&s46B}1u9Tzpoyo5~&7_yLBkxIGGQ0W#+l6*NJ_LhQwLU38lr~@`xYyj2__c6x4#l3WTc)jk(J2qH9`+>6#s}b0c{HMwH+$grwnk!k z+?)rNZVk6faplbJYgC2GgwU_0N~S7zu*IVE#46tOU+-1q)jd47&s^EfJ6xD3-)8n$ zi6~Pf&^A3?f1c@{t9#nKR6MkqS=v-f^7}`jkRr3-a!?W1+u#q^>#Rzu6rmP{K$Cdl zFzXM^c)YRETHZHdpoO#NN$TwMD4opFhG6yiq7C->GBqD+R>`V-j5D|6`Eo+kcSr8A zpXXub!s#)d!*AW+>jWfPppGvi4bilxcY!ULQEtTKSyH#2N3+&CJ}h@#?2l2mUZ>t4 zYHu4CXqV8&bzjM&W z{hBU!CAmQ#rPg=$@Am7W5SyH3cuUQIB)L0(li9lE(yzB>G%>O7=q}o8UzL`G&`7ig z{>tIlI&8x8*GwDvh98Z=vR6densi6KADeS9u?mtg&*&H?Z9tIpY;1YZX1_#p*pc&d zmZlnWA(j>?ICJAcEy37g3N;3tNbD??y!nz7M7%fsv;Sj4F^Da7a{m;hZ1 z9WHeNtx(M&_cB6>|7v*u1GWGS@6KoQfxf>w_4t3_7F=bP{p=H*dw~SS zWfzn>333>SjP{_l^vyJ-n5nOaIhDPPT%x(tKe}eWiRkl-0Q3vF1|=8VZ5(&8`$a>& zAI+1&=riY3lqrj%UC;bnX&xIp0qK3UB=6@Ae{!y!)ZsQ6h&czoC?5c`?ie>OHy^xP zZNG()&-c4DWHVGPr~OJzV44`V_A*zj|K{FyX}ZhdyPa=OQfobay&COetEEi%#_?-U`RV$&ru5lCAQh z>&@TV+QY#zIb>fW19sut`2K=g%lD23B5i?$yBdn*RX2{>J2wOr7R#5C%9j; ztg2kZ^yR9elsk$h0P-X-s8vHjA~)9DPF>H2d8ruR0W0D~hr#q?=-1uyFm%uCQ>o*F z5UJB|#Vc5kK~H2ujikE}~|CvD`I4r7CV<vYLFXGdgR%1u$kl7n*x&`DG)Wibvtt zE^SMNT-2?=p1d(i?0_8MWN95!QuCZax_CC5-y$_l9R-oo24qfqQ?$4+>~f6(e#rmw z$%I9+XP?s{t}89gL^N@mqo)g4DPv&xxOY5BV;q0RcBA?lmW(#hEn`q#uk!LXwsl7` zqCH8%bM42jhgRxQcR{18(#z_FnC!yM_ydE6+w9Gu+>OVn1uw4FStqge>XOB5Soyuk zJ<{MhZ@Dusz*fzXj)xm|duosf?_VRli?we_{aV&T7ID7roPC8o3rR|;A@qtZn9tm~ zba~LR@qsCe>dyHH=evMkQ~bay0T-_?HU>pat)?GUvnmh?16b4&b&yb-Ju=;x*P52Q z*-nY0i`ziA(!b-04Sk=qNZYXdf7Jm0GtR!_{zEvy0vhape>W>3>%zq?1jOXQ7@fz7 z@)`uZ20=hp(35s=jL!(~v*Ex;32>zz*i%Sm`WyR%e+Z{=W_iQ5qnI6qMDNt6w5W_4 z8mAEg#-@6@8j9>%!bD$wt{YDj5v91<^|NA^HZ{GQve35B{M3WtE^_zb0}BV75jlRp zM8YYcZ{{HoX=y=HD@o4cvCcRN7&3ZDrIXl$x=>LzCjMDSk6dPFPiOlQ$ip878VxLS zUj7d}T#7mLL&05Jb{t6=j8sW0V<(UiDNf|40V^76)TnxvX*~SE=d{O{h7=u0;qv*8 zKGC}ftq2Qv745Ct%Cs}pBtkZ&ihbDP866C}5?`wxZQJrR^Vru25uq^`xsqF!v{lz$F~ zDAXr%)^lQi=Fk-Gv;CT>%7aUBar$7_OL~g_WI=8GgrK8z7vA{B`w6n0*01iFlY2$$ zgz(+%!8@xLg;rJGQd->~Ypks@gda5vHHE)4+V^i=o2-Ba$*DXCBsfTL{f9hP_#qFz z^MSN6W2EB(%L_4CvXI&24uH@q*2k?!K}Z#-p^QVb0W&D{YkL~ni4#+dYJu)FG>@_+ zkKA^Ojv~#e5!rK8y>E;^$t?gaN+(wyzT(Vol+3GY!KMI9f-+q4eREq93$@YHE2&txSzob9%lu*f_r`wc!}d2S{TtwXCOq5-*~-=wtY zQK)-&R^Q2WcJq^VqIWDG$Q0d^nKmHAJQ3Y3n^Vvm%H1~_bnU{R0XdWXxL1FCO@kCm zWOLAB^Bm@TXLVUkJ>>Bi4}?e<%2{;Q2CWqD3c4nN&s25q(2mXC$~ zJ<=zl@N1}AkmVhxmf!%cjINIkIbwfa*^6s3%r=5+gEe$#h?Q5lWDM@3;*< zF`1Zf?2Q}b3}Q$kQ1rIbXj5%|ktObctCU7e2!gS^VoKCrFYJX{F~P9>!~BBs!vv|L zoJoJxI_5NW#&%tL98dJ3Ve*!Lr40HHCNz97zrKeWR{Bf{L?N@y0L_Ju6PRG50D1{! z={d7QpIA)cJG#&)0gJ9vj?2HLe%pLMP-22=LHB49nbNbJcOx)F{ zlrp$U1K3P6d?C_||0J`UoS{dqonj)B4&&+vG*h3I|da9$QgfEGU(@HSU~6 z_euKrmXOVyYgm9RpC^9agaQn%sm9`f)j`RG{L%l|q*D9+?M9|F2C(h_izxpJZ~|t5 zqzc_)e3nGTUuTH9K+I-9e}j=Eq^$>T&)KE|c=C{fh&Au+?myvUNq;>5xnB{e88yw< zh1pV|3pWE>S9O8#e9`dNZ_0$fRI*m(n*SGeq=Wo_aHExTDj)r;g2k&Nrpp(r{14vr zr`1{)x=n6EE~T6Dz*Gc0R)<7Fu@8;=;tGE_sF%ASir-L*HUHGU6FH*S2v zMM9=>@Rs&H4tFvUg zeGhSHAL2xdERsgwN4UcDGj49b7BV>$5^NaY&&&e;`fbjU#&bj3-tx^rG|Qmx-y6Ba z7S_Qk*%gLOD;m$b8_Ej?GbJ*UI)JRF_Sh_(onAm%__ef^rFKXZ@~|+OL%Q|FYWOPC z08u)R_eJMUBBqFTneRy8gwTOc#m<<8`>Ww;{QQ2^FhdVBZ%{3Q?TN*r&uVzc)_3xz zehgL#=rRyoaOOETSHNSTciJOT;-NvK?%D?Rz^av|-%)*D01>&_J`uV?@!zvY83*wR zipovp;HxazoiX!)rw$iA5d?@+An33a&YMSD{=m%iyJnIBJ*=@6bU5FW644p#yuDIC zSV^MKUTtntHy({5<2J|FTu#XLY_!t8S>{y+#+Z6UR+p{UDqSh+J3Soqy=CwDZhQ7n z1JFQJRSn5FZO&p?wcM@7Po}2qp>leuAz&6-fSadQDsE!5($5L|fZgVV^;6SuNVN%w zhNu`0r11L!&XtN@BCTGP+!Rr6tAT*BXf_9rECJ{!x-Qh-Cv_Zs>UH~g&B6mKwn9#7 z^^(QB_te^k#xOfoGDPB%?;g-)VB*rTpr2v3k0(}yx?KgVd z*kH~5q>TbrUsX3Ai1R0A@3FWEio1&#AcGW`bvm(_en71%80>c8 zOq|~l+a3O&mgnrzwZ`zI;14f#w9YAut%0nBMef1^@k#f8A~80?A%bC`T4ZJg1iyqo zrXy&_Oj8?^kFtj4&peG+9;dIpaDw+=Pj)!HGiyGM*Py~hY02(VN(14ap2 z;pl6^wSk@UW)1r+&(q1B5Am3+)IKao^u8L-sy$rURpRBhH-3agsgTI}QNt_aFH9 zL?FpjtS*4U>uCM+Q45U*ICXvzX%~A58}5!$3iBEYdhNwWCHuaVMARJp^sel6p_Dlx zpy}g&n=$+XP>Cr2+8t_n2Dke_m~ljW$bQaOqyCwb68$fFBu_dpP=f^=+~Tw{PIx{; ziex>hK{9awntUYq*_`p4^qqQzHV?)8TIo!f@QAl&V7D&8=Hhvq4N|Ku_skdWmCoyK zM>FZ%-(aYCu{ZfDIv1H_Dsq*go~0k~8}u|2lOC~PZ=p&q-$;g#YWKp=A?@zbCpK9m zo;w`iuV`sylqZzc`tLeM@)F~1_Z6m$fvV(x|MZ`{FS=*|Kql|pvvW#qR{unbqUuP< zP0SV|6No-J<`5%2RR6cE@E@-3FhQARJTGr6?o_Z*Zk_{(=qB9UuKAFXd{ls7yek_1 zd*cxnnEMr+Hxqf$KW^7|!5I&YL`9YI{EaHzwjqmAm{!hISaIGkYX%~7^ta4I8x!7^ zTo|+vlZUuUL!>gVM|0m^1_}gGD`!5b_r|DQP|@qaI+0LZD36a}C?iXznt0AW_KGcq zweTZZbbuhCoh%puddp#Fz;&xi{oJ8A2e6YvJ377`JXsGl7D}Mo87m! zB}`TE7GT+-o@mO3_qtq{zQ!euh1SyZ*F=}|+)IT76MlOxbXsiF2ZVHjeRtHGKN16! zw(e1h7J~nrJlmBox&m>OuXDy7N5#pS#d1CpA_)RMZ-W(YVUNuWoAw&`;7 zpuh674@VThBi1R`MKwk40Rs+{uN~NtYbi>RCvj#|+0-XaiFr=wvK4qh_@V~eEvL@b zR_|%QDFq({oomrACbl7w`{Ng0iS^ENyUYiFUWmQ4f1Jeqb|Hb=JL|X38&j<{Va&E_ zgLg|t!FWv>(X&pgQ8qz5n~Ob$mn{VJ&U~o>vT&&h3zLCuvp%C+6AWq>yQzoAGlLI` zmi$uxBF?k`lIAX8xrEJV-3XXpdmLeYZVzv1JvBxlPmil~7^`ROi>qzr@bC=#6`pq0 z4}v$_HtQ9BWq}{>3bzcx2Jo2mixT-3V_)vcXlcy`J}o$vI{&qW1QTGI0702BPL?Ef zA-{^yfF~QfCu^#X_e!)b7rXXE9u4zrIXAUd5{bca>hwWyz>fL~L?x8^UY!y;#REIP z?!{m!H#IXjq{iiZ?_5fpSf7vdn2E2cnD!XH3ENlK=Uo&HfYSh;bB!x9;{Af)U|@OE zg55TGw}0Uf&S>#9ytuQvWsUK9E>v-y$vLdz_hDflG8mID*e-fYzsCKXU!i}dP9S}R zT-=u)qA@yCr-(PsX+4U6YFq5AQF|MhOfk1l{Ph=6uSljKe4;7&y7S$)>?I5S6Yuxa zDH^q=pe^ulw^@upq3$#)?yxSCK9g#%c=n|vzIx|V(`J&Hi4z44^13 zeOlXPO!wiDN|fE8^(DA@AkK2sX>=n{7d3&r zp~o}L-exl{e-iw%+Gbo7L@ZDcbK908T4hOZPfT@D?lx`BBllOo*PuCqE?YEoC%Ieg z{ktd`2GmPQ(NSWl{b%O&sOU~nYgv-4KI&Kkq>qr#s5_ad@<~nH#&nmRRhxp zO|2u<>v?jFm@IR`z9T_B-*oMS4_Y#d=763-*bRZ-7Pc~j|Eyghh_tV^O8~RxG!JR-t96e~gzIt3Z^Pfbfj*{znK-PMKiNJ-Wbyq2N*4fd~_Qz40eRKEy6$ZVX=3za(M9(k?AV%*_KNG zFE~0;o){%Prezb>9iboH^INUVmZc@|3HY|uh);7_5pGIWVheqd;|vw#JAnIdJdd(W z^KmJ0Q`~)TI~Q+B5Bl&7rK~i6mVCK^_MCuz#V11f00;}JhOkK?PHbkmjuU}GXdY(0 zf_;8OZ8y`4v3jOA3~b!s=aF_w4_DDT-s6hZWYqWU)+X<>Dxh%bxaIfh#hER2SXEgk z#{FCAqq7aWZ!ydR#`L^FjLKQ#IbbV*p;^kKa2Tt*f&1-a9}u(MU94uj_G$;p?Etyq znZEe1*m5CkTy6g;uczCZ-!W>SVlJSR@OTBB))w0fCyuE#O1gEMvP*#Vr|ClrbT~$)~>@wN0C#z(9Bf868gUq4{ zIXI`GVk_G19X>1K7vQkcIP82fqZS0Av)G^KeKnN6RXA!9Gu(+7rjR9B;thy8zv@qOZE1~kocFdzT-(|=$(Hf$&~barK`xz*?~06E z66$|&Bm~Ksi_S&YXNtsJYxNzoS92T4EhgLu=UCluh#G~PKxa3+q}!%~&Uz=%W`4xW zo{~%k{6}XshA_aQsdB-RKjg3r^o3)mGc4s5*r01f13O)p9!CbrmVlkX@aNXCQPIy6 zUH5DNF2#s=xNbe-Li(&EjS^5#%+Ox@w=H?&)8A?wQBqg%yc5&j*(lmZ&b8z^&Bpgk zYh#|Y==>ymJJ(I8xSs|QMl`r(*e1 zr-fFz5iaKPEF>gi={fp>rMwqU^Y>igB`sFVLKQ@cL0i559@2L6soNi8hNf2Z37N|O zp8a3qYU4ijqqQ-AS@?gn6yLKsfJx>Z_&%`z;jTG>;iW5TZ_{VLylkMmeDNc=-*)lZ z2SB4K#GyY~6aSfIFaP96^t)zWzdr?Dq$qTPb2CX*u=mehV)CAeU*sVdS?I>G2rYqW(LB;dJ+B>IVka z2>|LFS>r!8Wht}O;Z;*2-j1G))Nk^Cs8<{HI8on2Zf%To!XFt0ZpQ>9wOqwb88(`_2-=?_qz8ZEk`ki=p>)>`AfT`qVXy-bf)HXpC zJ=>@;P)v{njJ0l1E#ExWAv@0(c}@dP+46H+qf-Xh=|>#sL*?tU`km-P5d*j81p{Mq zGKF%TRRt&uWTaq@O7Gsb@j#!4ILWVak*bua`fo$N4cGIMIyXc}%-GHRbr0;PEZc3K zvPrfsK*r`d*9fwQEHAO;D8XhqoK^}Fxm{|C6){{>?iYmPRD093t1%5L;(%bxV*j!t z&mXv?Vsdj**J+zbdTD7Sh7kB9LbqIGcd?@{*W|@xmBzzLU6f$5>~Ν89FDFd70C z_%2nOXjZPS;XA7fca9|`A)PeWI+>d77^iJX4_oOv-iV7?9_w<@F-BRT2Y&3z@nC30 zeq*dC2+6q&pPek&nW3A1FBsMHdsbIzx7sMFTbQ=IWh_NV#Il=L%Ro;Q`_GRw%UC}m zxAB)?W`vLRm~ARi@FCz+sF@b_yhN z^;n=k7Qa8FyM9S+L2hJx4r?AVx~*>7jN~+V`N{4Ly87yB2fLcV_bw*CGxv;l_F?$H z(yA|0uM6S+6|FU+g*L0GujK4yk_SMrx4n8WaB^Q#TwlP8d{)0v%lSj^%BG++v+v?D z6mw)W*HT!W*{2g^5xh+lM7~|Jinr4a$VEv>8H`(6tUz}B=lX5}3(E6i&?V>P;6R&_ zce`B(dQW_&yvRhwI)?y>VS$1>I#6kf@Q-z`X}fNRxx=-x`jQ^+le`_+*9%VIEOhM` zyV<4cIwv10%NK+Lo0AQ|eDD1$w(VuH+(%Qk9DW__5j6PET6E9FHD7v^Vrs%6wLn17 zm|P)UQK)X9he2(vz3&dk!?%4ftu{k(F)=Uu4cRI`{>vHYsN`?Cv){uQi(@U*2t_e^ zm!DxXb=N1>!t47Dx1O+Dgu~`QF#kja&?b;WDrb_<>2DWmXTYC*pF>;91#YMaL?Pmy19)9J0{G~X4jhT zyX zAq5_UBOVPwPBsnxi*beeN{vDv5A;P_2P7frJXNbr{@v4Z)(2tlHG1}Lsgjr^9``QE-QRg|&tCx;+KESNm6j=$S5I6(Gg z4oZYhq*BI))#RJo_hKU`=r2B9#c)~CF1f(rETDabl8_0?p0)8P?JINGe^7>PH9Kym&^3tN1$Xf8G==AE6U9EEp&?P=fVhUA;L4y<20`y z-e^SA`g!|;>rc7eh#iW*YFMsH!OrqoHRpV=! zg%$G%=j_vj{IbogrMShjr8`3i?d^bT%l9t>Q6m_By-*nfE&eBo!su32v#*~UL9HF8 zSg)EY3}>q~GxM7KXIoo-rSjsF4U7WW`flSAao3yQR?OdD5BCWDSW&J*Y6)asJvr8i zF8=SG3EJ=q$)GWq#-Ky)NH0f zzD;|qz{j@Z}W+pIE4%URzET2VjQ+aXA zw$m7A01Ec=kg2afuT4GDjvX!MqL;|ir89Xft}u^UR6!08D~)n}?u*&BjDtm2Zxr3@ z-vLBGHz?z&C_JV3?5c(RjdPKo=ux?&0fGK>|ISq`o{9(5Slrk&Nk5mr_I)X$j<@>) zLdpjp_IDtBDWJJT@`+h*y-|nQu}pnD!Y{d6(aM&3oge0+Gc)-!=fTiwcjiSjIEskugB%q ztqy4YsM-hAOn42c1ZwRyY>h77qJps1_1cfg-8MNhbM|lGcZ2v!CNzEblGM2>tleF? zWCw*+wi-oui+1x_qxb=d_?AZwekA>Rp9^BsVU`PLjNSFj{U~fe=LdH>;LTbnM>BCv z6yB3r+EREF`8beXs!lYlOs0SBIcjHv^yf?9ii9>C1X<>#CspFt#Db3pDR@?gKAlx* z@Wl=v_?<7HI3N8sVsyH;;5f1PwvMgau%%95P1CLl{qn&F#^*a@?%k*odI{6uW6(gN zDdNuH&+xmw0Q&+1q!?(4;m`0tdLAtD=I%4;h|F}#0PKbx7@D)^XvI74qs1s>6oi9; zw0}67sVOX$JeK|M9781$aCJl3tIMdl0#^o0f!mv;!;hP!C`uSO?@>M z>8|RR#pZy-^;j*brcy^g9a&2;ibL0^{CU$0nZPMaqDNeSIIolz zuESq-RX5%RZgL*!Q|EY}7>w0vPY}-iWXP%bO06`Rs&XVS)Wnk(M(*|};&tz$Kb$;M zcJs}`^TzT{>MfPLyr>;7K1!dXI$_ehxMciF(gas(WT`=nTF#lg9M|57BWg}Jn18hOyEq2Z-f==-Jqzk_T<0z=>BX=+jE`+vtB+Fx3M0Z4(g zLSl32C?X%bcW4Rc zlRf;BoSY1|7xtnpkRkwd`WEUdHOF>+Tt@n{Bm=ygviC6qgJ>PZ2Auq{0y>JKXNZdK z$Jw@MqtI#;ZMTZ(v^iH!SGs$?dDNCH83#+I<34bP>i~!Y4>5pR=w8S9x0O9XPgI%-zU0&Ozt8*UDEsJVA-)-$S4^(&wy6|YtV z#O9=zYVulISdAHInt`o7YdLXlm^&(SpkH_QT{u~1h08sFxfIt)7TqSpUuPUTao?sb z4UVa24ZHqA7|ETvd5=n zy=bqxIZ*h$*l_yAFwi!MwQVq^Em$o*^=JR*gUHz(yxDKTuNA)lNw?#I%7tf| z`?==fxxzh?;#~{KsL`swMuYqzrc>U4IC-+Shv$;p)5?ZVDD>%ThQ86E zRqi9WZ3{RDPM5#k6?^TcxUtZ7aXhXOOLJgPH&N}%FUWG^O`N%%EB@NT8y=-OY>BGR zv5;T!psv%O++^qf37pn2ej`Q}+wP6u)YJG=u4UoTTQ?k?RzxnWMGK-&{n659UxXiEXdQfE>( zi95>(f9g43zH#qowjaSet|3eq4E<*!H+{wM<)-^$N#V@{~ ze0S2ifQ9l#wPmmGfR>{__Q0S{UK*P*1dWR)T747g1f-Cg)Fx>h0ggwEoja6(w3B2D z+Pnjh-wI33#}@6ZGy_pr4%y#fFhq)nx!(^KMfW2cRJVOPIsn&Vvo1J!6|s;D@jU z&8MH8v(S6`I~wZBj0X8$_$fEGt~TA{OARdlSvkfSk%_L(3}ZJPQOexYS=K4g2q(!^jOtk9xB9P!JOYlN|r)-^A|2JU}2*flVYDO3b7J9MKDO zPp_6r1AUS5u%6>AO87mW+TYHrNd{m>5@0SRss{~tKcBg6UgF~^H0>gO+8{Rf22#Qq z3YKtbtOkRoJQT+;=~;T*UA@eVc=)O-xRKsB?YE^(SCbX*RyDM`{Z1KS`)(ELyGz#_ zG^J02$N|_X=NWCHQiPl1N%~bL{4EMse)8woBFpJokfOZ!{_@g$KCh0EZW#k-K`QR< z0Q&n5Cm2%oLp7!DC&vEM?Wi3+n+M;|QvGvdbkxRka0Oem@tv#1Kqdt<%S9~BwVz_$ zx*Tk@lQL7p=L2QwB%?{jM}rWyYgsb4#%o5hne~h-!S#M`Zv~I!fv}1LTf+l+k|o7V z;yC_<7F#CqfwA>>d}hrBCj;o#On=zvV^m*z;k_ZirsKv3Tcs&edh{%w19#}BTOULo z0R+MWI(jB;6o^b~g?_+`&~37`3>R1%ZO z$LJf<5{_sIn!yeJ{um+VAwC!ujpxG#w+GGVQwkaUr&EZX@*Q=bVJ*1&rmL2g->L6` zmY$ZDR`4md%Ou)N$29n9720ezbm=WXJ#JxXFUD+jj*NQ znsDO+@{wbj808H>h8oCdYbzp626)eBw zBwJx~{UWlt8~=2--{(_)l8fomH9e5}Voc!+u}6?hf#1;r6NOl3Nqz0j&gfvZdxl?+0g)$OXPxAhlou}p8W3uFSl4$8^_O}hm+ zXH0fc5tEW8D=&(aYL%l$kJqjceJNtE#c<Z_n4+y3nQIygR+X8j4!`%02f=B6xuqp7B?o zs7Mpgvf7-~hSlZ57Wr9}^?zMA!i&8Y|6rW{*0C(^8%jIYV0pp-xT5t&M1H7Gt_buM zmZk5sOuyI>#tGj|92rzum|DdDy<`!z+aRU#Jk(C$gk6gWoLm|x5#O$wq54prGPyGT zYK2zQbwAk4jpD6&6yKH`h*LThT`N=6yz^PT3`x3zG_Pj|9%Y~WYJGFKo`WXQ`Fzds z{gO@B%tcc6IP{6BR8IxS!j8mTymya{Q!fYj@| z%a7k}S3hK9WnwgVcRvphHSzfDnL=bCmlK6g`!ON0pcno$HH8 zt8M2*@gDn^sLFUn+9@C4_q-hphz!eaVCBy-ZY{pm304o{fI0hnYmn00S>EZ;BD{&| z=2OaT#DFrN6Pk$H&aQf8durVM@XBmxvAt347-mfEY`9{feE0r1eOyFnKwz@eIeM4nfm_XspA&XP@^GhBp#A-`K8q zd|H@##55U>8`v<4%ppqlwpOAw)bA?%P*O)Z-MGm-TOR7&d`Z|=IZ3ggs z9tw<Y`}Z-Yj;0=Ki>yvV{Ce$c6c2wsr9@7z;V{SsNLRsB^Sl$gOh=Yuu1 zS$!_GH5tl^94Z;c-Sp0pZJG|v)1Bse^Z1zat`n-9OX!C+!hX#I@Voji=ZCUM0vja6 zF9hBm1eW@rMkQ=&<7;bCN}qgx7mV-Dx+AcW9+S;66-$Z2nE^He^ld58 z4Owj#9Q6M;U!aJ>o9n!6TA0&q9U=lZ`h1T@U%e?W^Xqp(F`qr(Uz@+eh(Pv=dV~?a z*3*(a7|0S9)ErN^8fX7qlLlYhe%uus*)Lj-&?PIYdYYAiXilcetaaSPrY-VXx{x{@ z*%YbR7dcjN6M%e+b#}o7NiS>;60%ZkIr{*~-4Tgf#;=fEZ|q&O$x%4ssd1&qI)B!W zDbO!heJZ*L@$Rm;JN@BwriRbY-yUd&x+;&d+Y}{t+GV9Klh_(-YLnr%=W~ej1X|x8wp^sg&u9xV6oP2>b`{Z zMHqg)l=Hq9z-z)3cy-_)p2C-fXC1Zy{lZwsHTk>k5^Um_|T+Yuv(zNFJO5qBrQ1rtL#7{}as4%2049xlm z5~zssJoZ+?h)Wj%!<9vCMvr(XW!v$u55K>ZuI61D7#&RcO zxYZ^!7bL!R8NfO>)av7A3;hjd)dF#ri%@w}m3^L+bsYxPr?9+HPCMtmA(_FSELEJY z$G*Uv{^s;!tKmK-pO?TM)fPdtQu*^2r&q!Eq@{9_^w%8B77twj) z&h#Vh=OHH>_lB2?%|hDteHIIyO5$~P?%&YPNL$0W)fQ5$Dm|Hbei(kovnM>5}&KZ*_W&{!0o!y`t5RbmTZwH>@Ck0A!MD4X?8MU;atIe;ym#y7KC= ztfKk7^(dONfHWI)5YJ^n($yjG6#CTttzF@Qf~RoM^(8ls`-!%CW?G&yXS;1rL(G?!k(kM8DCJ!iXkZ zUe~1p%q{Gfg`8rwAC^F~_90#62Rj}74i4yVZej3;=-W7c91sYR4Y*G$8w(Nj0U!`pLyArwxxrJ}N34=@DNR zWgYdNtrR2`&!<2B0&=bp$PjMiw~%L(5_1xRm&Mx+yL9B5+pQG9S2t`qYI8CN z8g}{9!4oJQz`61Yxw*Oa-r7tklZj3)wfK~Z8P7r2?K8S8maO;5___TLXMd4^1#|i{ z!{=!4zUr?x{648w)!lvmZ8_V4D{(Topw-70XpC>I++RN$?A%bI1WoZE#Zsz9+;@gf zYy&stJCFU^v2W;#Ldtt?=QDyZaV)u+4e0l0*2B(oV3YCuZd>1~CQv7OIpGQeW5&}q z0D>s(n5#SlL@TI}nrAm?Ve^TC>)}71x?E=uFz2K3EF@3YjCeVW{rT40(G)>eSor-~ zf&unP{!ibX)5QVTXFX`<8vGbW7Um0qmz@o4SdGWk&kq{})_$97yMFJs9@IFCI=POZ zSX~zBNte2Rf6SVO98_}#;wT$ zYu+0ov=ts&>*owtiz>Ft4ca0f5LFG*gI&q#@}_k?jEF{GQ7r!97Y7rNkWCm9qtXnwSi>O~ zTm3q@s`tJI0v1=s&sPLFug6MA#DoNv(CEm%8O=zJTnx%1Dx{i03uPNzxA92c)+*~D zHXZ|ZdELaoRoaZur~EdD+_t_vTrQ%p1x8f@?fDLjopF!*1c7lD^cjAH9nFwO z1a%fj!t;T@&DHAOX1_)}Y{|A=Te|!nm|$Pv8k1t|>$9lVpRw$+xdHhLZ|%^K8%3VA z>O9F%fIXS2x}J6|L6>)^`o#e2Pn36eAU`MXD~8SnlGo83@Gnc|g4r$voAogz^W!9> zm6jy3mR9_hPmz0^dnWmJ;Qc53)%;d~d}7a2bm$DXupH{#%Pq2Zd3TUHJgIC}%wzjB zu7v}do8rAUrYcUVI6Z;Bp=>pdg?-@}Tv=ju5=~n)47QlQJ zwFmbny;kxFq!?c9C7DjyPqc2tPaG2MJ?TVZn%p$klq@h|gz|7D58q=)Y9e8p$&J`_ zyj@c+x-|lBLj(L>9xik!;UP6ku69) zT+G&gQ!D+|p%Dug5e0H`j6V_c22Hy<*5kyw=pC&5^=YDZ?%-mE#TXisq#mOVU8m#D zRp`5znd#~z3wzjBcRmDC3$D)qDTAHJmjq`FBYr}6GCBa&6cJ+}&!8DX< zGFyFvABl(8y}qkl;YfdETOBLnF==!HxhUh`w99E)slKh4p5OeWzQ+ZmXWRLaoiMf; zO--SvpVC%_UyoRN-WpaeJ={ioaX9KM^x7Itbq^$)E<0<~-Y4{yKXg`_Etv1vs}_{N z9u9Dv51c%UBJVY|6tvzw@zA{xs9ftJbqPsbERz630B6E@6h(2NLFV^yyz=B4fOJ|3YbDwCFx8-Tc+$wd!JvMk%PoYGnT}3wuPNK;J@|ybg z6+X{6dfzgpr$QFxvdnz_uy&j31#a@Q$JhPbIIor>y=dHY?=#g;-WKTAi^F7*fr|ve zFD=6F-K>`99%L-+2>&YXuE!SIQ`aV0)G<2#IPRBYVk*m(a2VZ#^RaI-&&VG>LgY>U zG5DC1US1m{RHwsr>Fbn4Zd$=CaPe2DFC>3WU(sr^D?uz*?0)CUXyjnB9f8;!XfU08 z;Q4GG50*@F*e^P;Vi~yA*V}=+p%vpmcgCNugp6!_Cv*6&8asjo20j$0Z@XEdIW8Ea zG)YQJPYvGN^Fj2h7siiy56hhQ!k3mrZY%#(>P1F&akQMhC0X;k>-}E&A>r)ATKYyg zJ_#h3FE{1SGUwQ`>Xs7UKknkR*;}J4`g~>`j3k^c3>o5g5)DcVVDs8A3&fFYvL{aM zp?GQbs3zOC|B+2j6y1g3Oci3gnJ#c1jAc{O-DbOFts}`gtk*mdqan=Gui z+q>hRS8Z^#6|PxXKPSBm*sfobes)iA863b^8^?|Ol?EG3dMHE}zqU8yP>BQ-Xi2l) z(K1sbFAG{Jyx-mp5^~98c-wzP{0h30>LB9zHq5ww{#(dWd!5lX`}pEC_*XdF!vs;u zP6(>>yeV^33i>_Ul{PrTPDh4lN$&m(SdAcsrk+a%5|d#98Kt_}3Ho@z=hpBpd)JDNctl~2=9#V@xx-TW zU1HtZX7#MF-Vfzw9lT>E_Y~E))O|}u#8Vp@XI`28_iC?jz&*E z#^=68^-_Ay=uVg3J9cxz)#=O&XizOIZqCoqam9vFz`i$oyfox9gPqEM@tuS$zpD=8 z=_y|r_r)$O)w^xO!U9ohRQ9>S-~pIJht|(OM9-*NU-};7Fvh3CNn(+^{7!>>2z&QY z!n?fI?mxJ<`k)S3VQI#QbVR{SB#e1EDEdf4r4tQ%MSL1`QN;KxD%tyFf0qh)1%Om! zH1%9`cdl^dgij+mRUVGlhYqKGn(lRWD?ThTBU3qPNY7iZR&SOwMjU0HXGcRy7KS%W zz%?zXJK!S=EPdUPn$CScywlUgTpxvsyUS3$( zWmEIUSD`&EE!z_c<}}?$@AOuNs#5PLq-5a&Md~{=gYOHys#+ON(DB%-`O5d6Sr>3? zAkeLoAHr@0d(#1O%vLQgt&pw`GXH&oc}>Y9_llW@b6waB5;^uEl-_UoXdT?~DF~vj zojwm>!eoMh<(>+!?bUp911i#rGh++Xa^?)4#J#q?GU96Us%_&2DrfXyE&4nvC!=CO z&)-y)$M52rVb{2lYN!p>5re(39*e&^g$@py?x)sC;%!-XJ1 ze^>0al)Rz$$y*Z24-S#`lejG7xlS17BHN=cCtjs^e&@+#72cLZT^~##%r_A{K$1>^ zc*l!?)5O78maF|!siRzfsY=f)Y>kaO%3F_|r3 z{3K&TZoWLH;rv}hd0?$^>kXme?7i=Oq@%r)!f>)mOa4g-D)%#cZKHb0^HSmD3R}%d zV>(`sa?DFD>Uh%Xr%{c+%qr90RoB^!#?>5<%W1*oo<7+~ne&rN=qCx#@vOPKeR0JW z{Xlw*hM&)Y4$> zUPQOIG-LmhLc<*9c;kv|Xugo?!b6s4L%+Q5pr?+Vcso2%D5linNN4XpPa3|{^C0N9 zFMGVf6CW*g_CjYnU%ckWgv+vK$Vbs7m{ZJanFj0_<0%cs)$(XS)!JEh;@Ilu(ZpYr z3h#;KS?%i1lU2|$Xn0TRm}ZZ9kcemlu8g)_HPK?_>-nM}^5%U3Cm8AC^7wDL*L_mO z=h@$Hr!PwO(iFv-W$zo|4{1Vw_U&-o=GM(jDd~*Rwb=Lx|5bz4U;r^yc31f3|Mvk) zxWl{Q)RMc(=I{@>WjAS}!ilB!X`YLU^DHXpe*&KByBe%!52*A*$#3b*?1 zwulaA22BqfgI1*_ja&YOgF4qg;`3Jw!+rYXtmpIhh3;Ls&-?Y-_~h$bpy(GrUU7~5 z+yKh*uKDHHW&(LX97NVMaV0ymR4^m*DR)|;)pV>!U+e<^t8kEG%^pmO~#1O3z<$%B%j1aBw%Sdq6Nw7`Z8E(n@IWBNeChz^( zgaiMil!+!M*gH*PfX_GrSBi6QXSPylx5NG3k-aIoW(_NExD|Sl`do^PN;svB8fE`- z;UMN|wC_zk>+h7&*BXC8zon(u&1ClOz9pa=5z4X;I8JH$^YWqTn#ovm(*&4Bcb0i? z1F=Nr0Xt{nL-(k}z37&*ByIZOcQYNAO-2!^sJPqr0$x35PRC1husPwHRTbMQ!5m&j zn_Ap&Aa+?zJw%reI&6GnuvmYh*(#A!$oaJex{_yTswZ0?dI191og@f5`<=9Xz`M{5 z-gozEKGBL9Qz7?s}?qX144DkmFE_IiQfH!zPh63}rE z0r?F9+vobm9UT?}3B}#AfUaZ*@XL_xL@)(xhFU_eI_6e zn0{`UG!OK4jil+86e9y6H<*XZ9V*TR>RBGnlB8;Zx@T9>e`sQ2Eko))Te0gcYaiag zC;Jn>NIKJy1Cm;!E_S@3+e<_*);J4HJZr#SdGelfZ8Ablg}Gl;`tC=x~h^N;Dfn*9PC-DTuz`z2vs zDe(>!94EasN}A%=aBI}@4Kt`iJLL~v$of0p#-sqLIUlVAaU@rr&-N9_z}fwUx5!K8 zwP}!Zq0X3FPt3PPdJH-F#E~-o^Rwn314T$D+{s?W#t)}2-{Cys8wM@1hV&XNr0g53h+B)*j zj^MrST_Su*>R#hG*Ml|nmddTPyjP~d5R9z4n!E{T>p5w-#Eue)6|#Y)6=7-%;LICY z_LNxJB_J(%c<$5Cco+oR|5PUub^89D(}f+C>oAdQ$PlTnL!VGM?<17ZcfpxBNc}$d zz^2D5;Wo^nIt(*6dQNc!#i`)F7RUsaG5A`29mowfQXW zCfDRj=v}ohJ21CtYK+E-H_CnfC~6a|0lDr<7rW(({d+>Z?+}9()i@x}xD*)pX5VGP zF~@~E|6r3T{$xeg4cdCXw|y7$1ZF&8jV-V|IBON3jrF+Y&bjy2{)_ZFj753OLi_Wi z|1JA{lyq%Up$vP++2H(2-g-`)0*lSY(WbhdG{3+tnL+RS!%;#tLj~z-p?bHDnfn}F_xkr$ z_?cusK@JceY|hrAs>PoJ3Fwdyt5>+9atFd%1)LJn(gG;o&qa5Oo&Hu0j4{7*lys7O zC1i#dbFVQNm?u+bZ}SCZ;P@b|kUtqTSE~*laBs)%2qs+Wn144ZZXDLM%1Bcjbuqw< zfk}58q0=ebe<{w})5p2-NKukvzqRD(ua-+LS{QNt*iN>662cqCo!XWbq2j;xNE zFr6qrc2mBN*mj|@P^XwBGk>G(Ha6w(aFB;7=sqJq641&z)eNa+;&UhJ6A|*HX2nF# zXRcS-nYb>-hFqFEbgXSrL1r)V1We`Jtt=P0-a%PS$sub5zor~|nxi4%Jo=CVa>n$0 zUHLF6IP9Hwnw>j$x-+%jK*L(&<=4tvu#IZF6!}jAk%udbL2R#l$+Sa17291md9wz9 zFUyP>O#U7OJnT6z#gN$7C^UfW3me#1)+BEB32!es+t!7pZhtc*yKSt#8VnzD0y5&W9J-L*Bz|8G*TZ0^{I~+Zy(hICt;IZn2(4RQw|kT2 zBDk9})VX%{$%@6-dj5XLb}wPD+z{BSUGw0@rApihC(X9kx^|qyY{uY3*M{l(2Gyuj zC-pZ^5+JHj7!{9k^(qC=iwVa zMS_p1H7U6^(}sL%@!RJ#k|Z3)yVb^_Up8?rU8OX_ozD$IJ}vR*N51~=F-nx^kllOZ zt*dpE?Ek$l0Y9~1WVA{E_b6UY!gu*eOj1+=ejUI2_f;#N?27A;NS&&@LE(9e&Y>|N z74Qf0NcxFVeG5yR!cAwmi_Q z4hF*Sk1(+BX9_|Gm|Go#s&n@Fj#td~F5zT$4?a$!Y=XPJhWy(~em9JE5P@ugY#Dc8 zf>lLfE5UG9YHQWfux1C)P!i_#)I`{=18^jA*0Q< znIxX4*%Q++uSJw(!elMyE@Io;a2xg$r(|!eXBo%QSxu9SSsdAk9CYrUq|=sc+-?^O z0@p(iL6sAsJt)`g$JBBD ziU{-l*xX1a0P=6+(U|AYax#t_jQWfh%L^5b_u_KqwLcxH72QP(FBnWt-8yH}+Q5h) z54x5}S#l-w@80U{G=80vh8;{wXdL)c*+Z-2$bq|%G;}_l5o->UCCxN1aR-HHpvY!k zY{w3dTwC1EVgg4jQ9KxIoQyOH#82ELn(R$K5p1-nZ(_DeJ*Z4jNm-AnDz9R5}?r<4WNw;xydY zkkFYndFdpc997qL*~g-n@}YFreuy9ZaUTfrsbYU_Mu426I6_39;O)o=8+q0Y>mO3HK~=l%hv8E&YpCZT*ur} zdsCfhT0pjw#pr%M+W2@)*K}ChV%wPCD8Y2)xpU}!;JAu~|9K4~B>?2~{o(2t{8Li- zl<8TlGQTO?1_U-Ne8*E9eIZo~OOez!gV`8ft<=tGfjK|o#a%IjweS=Q<{TeFZ8oa+ z>3=Nw-j1H_Y^iH603S<|9en?vjd(_^`;e0&c$jx%!)(mD;OM zXjm@%8t7oU(RPgb;(B{@SX=<`<1VSLS+kzcyuwGcyP=^5PrOd^L_WCy_ML4XY%ik6 zY1XGHEt03*=eGFjk)#x=WC`qJPCxmG7A%RicnOyrdVjaKFt{L?Nm3iv!99p-HkR`#gp@tOgukO9!4NegG*R zWt$GftVt<|k>UlVadM~E7L@8Q2+WQY-j!dq0^%qrF2PeV_iSf+6pEa66eQiw2KyIQ0-GUv>Ipiq=48C43F%v1fqTy5hL{NQsUW&Q;{V|UCyjY1 zb2avc&>9aMYfE>ERmYa^0+~EVN>~u7c?x&kUTW!Cq)jsSb8t5UUS|;0)X)*ZHYykw zrt|B{h(?H&-+6k~^kmXwk2Pn(vRO;aUf+AijSDp0gJ58{kzphK@{=s5EdI~$v)C;) z*QD2k!aJKBK4g0X3HqAIS5?oeMBM;sCjR=kI%{ZOr0X8WCgccyE(@m8)&=n5PR33W z&#>f^?vZPfR(m2Jq(LSa$=zR92hk$Wdj^56Y-p}w%fJ8UrdOsj=ahX{gPCCVae$+} z-50K3AHiRgQ{9ErhNO6~dnTP5Qih;zAOw$8^|}jAA|=hk6#Rh{r)ce4KRNma#P9m3 zF!;{Zn;*fGY1n0>)Rv+c!RoEPtL>fhLs=<#4KLmS+U@GbIf&kqE`Ki!$M!@BPTU=M zP?851Mf-qY7rEMT;>X&lHE(_oae8kj0P+Swmk&YkTco1{<;O+Zd^zO`)yU z^%xtmh}&ts!1VV9=Tko(7dtVxf>i$&9L0N>6V@UO?Zi1SDPEurCp|pB@)?yJ2Uy!^ zaiFKPyYLMg!~pEBE{}HPdyV#KtjN9j&+TPbdtpu}pO{;*%omRRi5gZa0~=}>%zRj4 zmK|@Ms=4YL!Q}UI99+)>m17l1nc$L2?YuU7|ilfrrG%fjvR(-Z1N)Ne&gs zGU%0nWF0jPCUTE;&ycxNM|{ZPO`kjjKQ*4O(y6HKq(Jdg#6rbKmN$G6uVIvmLhZ+(c$UOef<3f_qxs{m$e_H|>lfTf(!$BFfO>}Q_gw_^y_fX$sBQyuK-L-|AbS%@bZ zww@J%U?qQ;_Wp!%*pZyO==$wS9;$PUtoG#gttgrNY~BQ8*5(*K6;1bH74M$)?Nsw; zRjsPwi;^BjH_v>(ZL3q}(PB5(AVr?|yVko4mlvrudpxGDu`M6nQ4%~a4f29o@6MB- zKLX4wMQqMAP+HB2f)nDXyQq}u5B!tvc=*!&XE0R7271H z&)MHIxGqy&_%t-^AjUQ&Re*^oLzdwL1|3JHi*J}VEiUovuYA>}QjrZJZ(v&1n^6vf zrX4w1K_v4odX>ACf56eNEX8fruvDsUKr^v>%bGQV)in~Nt#~+HqAg;C66wKhPt+p7 zh{iM8AB2Kl#8>K(2seUeTG&vyASq*WE$~1J67Fk*$-Xgt&?7{B+XhJhG!SS6r$gGx zKuhOIdSFlz?>_AyapO(wA2sBz%Wj&j;^w6KvAlzZHc~RAE%N>@RtrcOE=7Dw6+3&k z*fl;_j+@ncXUy6?d|R)Nb9!#!Qd>#-W`y;Pd=U0a5@cE59DbFNZx8`C^}++Zk(Pw* z`S0{mB;c>gfD|5c7XJxSll^n+sO6^^woS_}Dko5>_AGlS?BiMC=fV+Xw#)L0{L|cw)0!8gG4A<*=l{m`^apY*Rzi(kf z3HcAP4x0O@OGf|+PD_I)oabLr+S}rksb9=QfqZ(6%p{}^1Y0w(X<9r>&ngq53A$e6 zue(9Rzu4m;U1rT%QzS)P;~W+IaapA_2(d8KuWynF9e>1{9x2eOSp;A_ZL&*Luw9dV zS$UNou^j!WoBIIrlD$~x6=9$F-y)#xJ08}lTtfj&wsZj%AFmGO;wx$SI(2?~|5*q@ z28|njS4D`*70;DTIjtH_;Zg@_l0H<>hSt2j=HJk?gjcuI(W(5<=5KNKj4(fp&DCf2 zJoR8V#ER@_1H&cN@yD0DJ)0s4MTD_e(s~W^z|9O<;}eSdeSw3v392vIkdQv(pk2By z%n?Ca>zB2n2|DX9I|rj^!k#zo@+HI=cs+Gn{3YL=*PdqgH#g~@(g}kWyDtN!<`(XC z5(94>OhG~{ygeU31K{*To@%{}Nac^wPa_HT0o)9&Nn?HQuSk5 zpvle3slrw%&?q!jY!HG;GXX)0O~*g0{5*J((nZDs46Qv4SifVJQ*?zniG9OsHbEoa zbPCo)Y_By^ix2`FE4v&v8~pOaX+y1VQ2GL1Gg9rw*~HsZaqNPEy}4H+>P|nA(dV-2 z*icaOQ!?#|X$H&QDe>#ONk^2KCtgs^$Ro_UF?ka#_JuYu8E>}4L$k2fVY<4*EkmSD zo^bfIhH_y&V;X=+u^tg=^@D#mdEPV(K>D{9km|m=qtF3bTE$Y-z+az?>Fqs-Hw`tc@&U$#oDAB#kj(VgG9JBW5(~; zFjXy<%_4Oz!#>RyC`!%wraNQ!v{MXY#=L5l8?$ud(&}Y7+RmcE{v8tUz6S0f7=~zA z1JhtSb{=o7U|x6XXLSnalp1XD{~@qP004SU-yKh@{+%S z%)u$}*tyS{veNul*wBH`$bVeAV0OF`V4F1O%C#)}wrtmN0 z4$+_mvDL|fuhQg+kB8vW+0;y$aHU6Vv&Et{E5`{)zARRv)5ZyOdGvR;i&D3Q+!Qht zm&(j}RzNu}%%t^vhrN=n+V;EbHJjR9camDD{4Sg=S5HAwZUj%AWp^N(hfz|6K8+Wj zqg|F3o^D7vm{RRYg?jtZHIS6fhiXXeO-Kdl1uBORnP8P6BA=gh!CRPk@C)OuDlzz?jnLnMhqDeD> z%~NLzYD^`O?A?V{E41-tX$wSpORHG@dJ=-X!}h>o&|Pl@l|cjB_KAhkne| z!3ddfipoysBJai8HQIy(Ag)kzH>+h}Tc5octyy1!wWzfUi0F}yFyPt+Ijk7m8;HYCGL+|cnN?ojebQo*S6;W{gkcs{e4KN(CER24!M*pH} z#VPwh+~LNDmE#Z)+Q;M>=FIfJt05;w`fJ*f3ed?uV$%%k75!7@&ZlA96#tCOz+*gGqZxQN;*rwy?yw}Cck*@^tE3?9lOV(ufnwp?#a z4Yfi$!k9q&^`5Tv8!W-kP`_j4Vi*-vZK*aTlqp$p3hETdLHEVq(&Uaa^;|~a5|xos zTk&f4HM!pXzs=0AS&_NkG5ya7{0~;`WA{eU@9)_T@DM|xm69CZsULcW`;MI~%SPu5 z;686XB$`&zJX{HmD(;bfT=TM@gDi%>WC(4yXCK5q-2OfEh7^%`sS6HF+-C0EDFvQ? z*~Li3{NR?Ae*IUGc{}go0BY^lZ>@=dXvNQStW87`y*w#51ntj;OT$aa7)`0}QZzK| zBVRs3QZmO-f23H&_}j>u;u}>z4!X1q3&p=2f1Z8*hryI|3J7h^GH3^aLT96Ss)_fK zEBj~{E}tC}4m>{u&`>}}Q94rjZG9f=;Mdg%5qs!{;vfkE%w_J>`X04Y(KB+!+NG3k zqEdE<)2Ou-wdj7q+2I>${Wjx)q{KhWW~>;D+9ai{yQ#|i2%}hM2VdB34m@6K`$Flv~NSL5v7j-E}eEm~@$ zf5YAGB3?l|eFM=<2ye6I?<;_s+J3l&nI)H(egVuej9Jdz;p!Xnya!mo2ZBWszj`Bn z?IST3?OKkpyWDyFLZGJPQ|pbY#G1RO7v9jpv0G_cf3H{U?TcgJkSkxp(dQQn$jBD({Opt$-$zlv9OUkS)n=@?=}jDVa60tA*EA^faZMu= z#9;A!1AUQPu#xH@jGdm1$6A#b@;1$@X*+pI4V!rSPhJOzJzhW7KJrERzu0u*qx+4m-Er5iBYQ9}R66DW1N~A?lXgql@n*>nB#t?oYCw1J_k~?r1TdJAk8SxstJg z!J2Ur1${L$8y-JVo7bFRsndO%DPY2_W}qB})n?_%Ru>#7*i~grg=(C4Vu#L-7F-oA%z@vB3x(G8cIwvxizr${&cj0Bq$P z9Ap#ncD;#hPdS>K7JsfG5xi|Lj^mH8T&~n{IYcrK#`0WEmFt_QN))lz#eXF}vo_Ej z;bZx8XHcgN2BIArjJDM_YHU`-xVfuAA<%~!L-9|73iLxx$?P)0dQ$h!*k~Pjh&XsW zPHqB^D&1wJ4L$P`llVA99O!+HvRX2>7_ zVyDzSy(+0wK zk6mb??+J`FjA~&xYLs~&USebc0 z^oUs0c_;6gL>!A2fk-QVFazb)vC}(t>Y8_d!`9fBzdk-`i|CV=FR@TGxiOCCAG@vO zbmSTC##Q{H;j~|CafGgi_D0TTsdzt2e!fcAmFj!++Ox^}o^vEsTtX_FdGy1MfRlA$ z!L7)e^G10P_6X_3LF#xqhdi!#>C@M)2BpEJ)Eww&mP+{PVSs|{Q{(y{k5u3Jwv%OnA?pB-aR=JQgQa@`@N+vnG=dD zIZzmF=XSvceP#rmV1&?7u!4Se#rx#Hk8olM!{tG$M=hf1R;)LG248SrX)LN+G%y_K zX9bw%WR%dfE4iQag2`u5#3Olu#EO(`t{pU|2K^7=$u_LNrYhi9B%WrUt+mz1^;BMCxpgH9vGsSa!l~_{36L{g+?gi6g+Aq9n6s{&ALDl>A`GvD zi7(LQ5j#@nKuzs{@-OE>I6qp4BPu=?+XvirjK?m)W z(l`&|*KAQB;BuDVgy5F5g~4a(iU_JVBVP>N&RMYY^eq?`?mSVg_Y zB$DW!yfD&MYA!0LKZgm}$i?;npx0?`aoZ!;43(Y0a%TQFy;*_thD=aO=3Ml0Q_*XC z{P;QkY|(+E62J7!{z-#inis-eK(D^|)$^0}n@CUns0!L4RupTI{wH(YNFn?_t;qZ7%@e1EA2HL+r;_+7DgTDeF9 zO04H1=VIsigCogUm#0oocYgqaRyoo+wa05(6$XOeb!lCTz5lJ$({e-a9aY@~*!s14 zO_ixr=Kzz{id}a>v-@%MgzWRl0=V9XdmsKFs=e-cmfi@zJEyliX zFASC`dHppbV9I)ZN&eI|qQyATX|S@X+-?1!hn4Q}))peQU+>6+Tb9sFmisZBHi?~?6Br=8i3c`2%$z|dc06V8^$-jVlDS2_8m6`!K1 z%8_Pc&L-7R)#u_>UQu1W?=M_ix)DO}VlP}3b3G8hVVI|UG{r{DWc$+-IWIaA3Lq%}y zt371;7V_KUXNgPCtEJ2VkJYg3D=o=H2Q>ow3OnLE_dN)`ev1@(DPXTZJ2w$N@BWcwG1IU%31idbQ$pFnBME#PaxxgH0=RW zxqf?MmoiXntI6NH~${$G5usuA8L;hvYL9(cD zw~%?}R0_;Ge<-MR1S`qxk_j(|PXZ-Hg$DK7&tp!1bG-*a?Tdg-eF#kNz{& zdfmX2vrzX^P@(V1Bi@|!X$6+)U*aLC)!Lui-eeiL5nf33(TD0D12|u!DsOkA28Wo1pEU#P2W~Xe~l)T z4;qY(z3$p`j@5le-i7{2bXrn1`^=gJ8mP)4Q9P1qY^2dJ5^ zckbMFyliX4-I*>_P}(#=2fam#`aVr1a*+dUWw{?6b~yb`8`0Bs>J!6M=f@p{G(^~- zQM4zy)p(or(lzncq|Q7kyXR!e%dqv23Kp~Shyr)+o%CvaFQ*BGpcl8>=`YcE_t{K= zj}gz!6db67Im{tE#k9#s)4{X?QhN%3v1jDa>cuTQ1z}3z%)gp0F}bU^@d@_k-29yc zA#XHn1nZXU7~IwsngT<(+YRkQQaWmeco=>}@UaxTI1EUAFBsx^Bo|^St5Q8TE@s>< z#^}OH2DuC5LvtPD(Z$v&X9PS1TC&FLQeHy^3*ymV4n9@C7REPR3-i7Yzg5e5mZNFH zoVP=*VTzpGAtT~Xwp1R|ultrgXy6h9f6v7pO64356OqMO;ik1Fsn}C-f_AGr)_?65 zI|~GwWY#!JR>=>{UOMypAIeYwGmfVOZx|OPPC0!WJ?6NscARib#A~bpSV9K_C0PtVY!KCevbQzkF`&{ z*4}9C7^!EL{KV z|D=lj<2LFB`bbJJn7dnR$Nl`b$w)-FJ>zT?7?ao<$&P^B#@55DQ)YYe*k_b!)b?)_ zO#cRz;r@8*%6{~p>!|7fhfU~18s!Cj11@X(i3u*hQFU>{uTfCO7ay+wSoCh?8Ght4 zuw4Uec)tmCgVu~3{ci|vk$NHFygoteO8n?sh>o@&zA2BA{*4SRW(h1QltBZB8nrX*t)ZX*|LSI%(<^}GLAM*<7cao0IDy<);+>+@V zfFM@&A6`sXOZ!!%cXYPP0Aqcf(L6Pe20R<~?I;t7nHHHjX8I5;u>DMc4{Z!;E^MUN@blR=lj@=QB zg@3kN_E}Z^5vjEqwxi$ohc4Y!OP*1{@W}r)9-Cy^!v-<9(C~il2Vs-ap2OBpi(Xm1 zLIRPCS_>E(tV^F4gM{7 z#H9UzltKwEvW~zUrEOyG677pquB}FSt?};eux} zesj-VZcj`mG(xM^zoCyq58UHt9nWVhj$2W)qkJx>oMbEfJ}9wZ)a-7M4q_B|St@79 z;@Qn06%dTV-UBB#%i9HD)>(yHxox(C2QxRuoBkxjqK5Jpf;*qNFG{(FvUM8PLqqXBUtEp#h{WD@_twwfEjOkyS*IF=n(0ROA4%s4&uVg~*NsM%oMQM* zvvfPB6F=q-Si7H_sva7I% zH^HT`+w(@M?HJhOu3)&o9(Xbq6uV6+9V1Dm9TaIQGz0S^QVr|75?mFo)O#qU($ag+ zsUNJ54fR%(?7$e_E{AnM%6XHeER)I;!%^5=m9$3pEUs)ygf6tweOa7RC0XQVCV#D* z^j;G8 zkmF(Fnh}hS*$i}v6WpSXJ+bgB>FPVySykry&{^WN`ZyYqflzBHrMX_a;ir9&-`_1& zl0Qo}>o!OsXtZ36ECqF~5oQf~7d{TmFrM7l58Gc*OFfjPM*}JBwYRJyhs5()#m+2E zm2Xrp)@kN+JwMuWGob*^o{sfdUdC_27kqV14Br3_$YfQ@DebrcnIWsXAZmQ$&QfN|M$Y^$i(AsHfc#ngkCWer>w zgZywOcldkNJL<$YtZ>2&zI@(`?PRd-xS!?HRLzIi5Wm5A%Jv0bqauuE=Xb1qhF|ot z#BS!S;;ryx$}-(xrT*PWwR^G6Bp4)rFW|I$qrhhTP^5Y-Q}6rg#rt&fdRK1_eyb$`(~=Vr`P!XU+&Tyb|IQuNa_#pmY; z>h?v`QCP)dJ6Cvg@2koBeazWm^usKk9=|#>cCk~eU{Xwdly8sUXvJI94X*t3(-ph_ zaBOo-3#tVtBP?8Wk7qC^h&dAeF)=c|8)eVvCy~97-yEgfJk3f4o;7B*%JFZN#jwxj znLg0-0>(PKi<1D|9{KxS(@mAQH5UfTOjF!^?UrZhR$ZJW-4um3y?A*Y!DqL$I7?@$ z?xE>t%$1v$Yq57eA)1tlZ*NZ7*R~853~iU7XXgD6K+Z{3N_I5)>&4;Bz$`5pf9%m5 z&-mm+E78r6r3&_{heC%TMx3%`ty!;ROO^-SsE9+%0v5g$ATYxzdWG62ey_ly!~r?i6! z%YQ1stX^>8pc$`hDPViJJWdV%6y6Rv!+|$BQ3%A)GSy9Jh3xp?^}(DwZ#U-bH&8fR z4UxhE0s71^In?h9??&BNC{P2(yH7S+f5Bp-OsXjS34FaNH39WHBHTWw7kV#0?`Dc` zwn>aS!z7bMO-qc-T3=i3pXkpXyONvQ+f$%i(vQX9wh*4ZlZrN#aC(DADh}*+B~HOk z;d~=x77B(xr~cy4C%a6``?~25SFc1W<1h@bBofECtCtU`B4wlVBEJfQyV+gM^(KdR zcQnZ#e6e@DOAB+}b!p3=hv&!Z&vpXT0g##Gs|lWcsR02sR1P z(dI{)Mrnc=$qb%p`ZKPmQjtA;Q)^}|B3&@S1dwJKfG;VqEg z$uK90t+2iU4?pTj=7&b6(Wjht9cvSX``38?QuPWTO}4l#S@<{}p=cqTguG|3y<`E^ znR>%)gAxL24oV%H!TtMK$81SQr-sNppwbqQ(GWuMEVWK`#|>T^)CY-q-lUj~nK-;Z zrpFjyFTwaOfnhyWD-~3jilmrcYDTI{Z&Iu`T<+Re_2kig5Z`q@z+`c<=T;AFd}qx{ zx*@H!{TnWY@lnkQ4T~m1>fzcom%)d|a1rVI3HaIelj0i3+U#Ni(lJ5wcyUAa;+k6j z%mXI-3J!s}x!8)av28Wy?dA!P&a^$@Mc zf}H$ZS;KEsOH?$8+R10WJj9P;vI9ujzPX;EQl%`Rki!lcbvU4ei}f->a51dPN~}NG zBBgwfcw^oCJuj-F-g4IWi4pY*qj~SmGRKe`5%$jhwnA`g-b2mlt7ZB{h#Gj43lm5AP zaJHqrixshlkzfoiyE`7j5e6}A+5R^R;O*S^=9B-AuD1+}^6%P(2_>Wj=>|a>M22o* zP&y2xq&tUh5DDq-MpC*P=^iAcySrnC0rtiJj%PpbKHg6pz&B$3&b7`mNTi`nSDn|6 zk1#IxY2DLmX^>HPuxTzbE_eSR|CuWd`v#G4Gs3c3{|bdb8F^ZBlyLj6ibS6bp&ucfn5P4qlIaO>3rdU2*$O(zF%DAHD+ ze~v(Y)Vi@HzE5lROv>}&AF7OGn0R)FMN<5t#xLOTW6RxP@v?>Kp}>{kTwKv7amQ)v z;DK1ns^&Ja-Q=%j>yGp70|Z?CH8Cw2{Xu(q`4C^AvEywf{fopF%=r^laBT=pez=8e z^oCGcxt`Hhvrxswd-92iiSHi{WhfdPZN8-9SMAJ8LBUtwdEfg7)6uMc-$_NX=zE(Z zmRGKONIDbte961#g~fS+_@ckr+I9P<>-Otb4t-)$&eHIfYpt1h&{K#W+( zFmv+{Do+p-YAXIlgFbc=ERH{6^C{HTUUCv_{lgujy~vici925uIcebb2=$Y@FeyMF z*m%doIbT#;nclTn7x-ONu76KE2J}IfzI?>OjrO@s55v6D-NO_lwZe^azS|v{;Cr17 zJq2$%w|VZ`{h&Qpkezi5$HK%*+vNQ~n8UK=XsepB!R3L&shU`mMd1MNeK%gZ5?{ zwbX=?MTf4R=h5LsYEVRKl!G~*4zN4mh!$)LjB3BlTv)G2S538rVh&EB znLPAtP;9>BRhnJTuN}IoSFk$wdHEoA2E>3aZnVpIlffttwS&U{+!zePBX*SbRK!Lu zlQR#84L5z{?qM6sbUv&wOgI{Ey!7SUh)rL?iXnd&PwO;OcMSl5%ziE|AOfT8p*G7> zKx)q8Y^}MtBl&S5^#4~lB0j3S4vC{q=l}4f{}S-(BR$fig6L7gnvv~% z22QDnK{D3FbA3zr<>UGaN+%&~FXdM*gR{djfm5i6a>rb%h_LXHm6@rJPLk)uyZk!a zWA+u?yrZ>X{fmjfB>BX%=cuO(5%ad6@a`tWi-H~wmnWao60aivc_~1MrqFyEzu7ZUZ(HUS0%IA=;&h*x)X=^4JCs4GFJPUwsrZ>f>8u5at_)p6?}fR^9c#fk2Z>+iAJPDn2^RPbarm zDw-%QCIr0FkM}yqv33>4P0;CD0=o~1@3n<)#fVccyc{R#)TDY(T=2cU#iP=(K1(!{ z%p2DwvXwhDl(`Tz-_?P-=_ratK zNK(38yVY}J_1BQ4cGpT??RLXLwPnGlU<@M#yJ332^uDs6yjD=gsv|mKC{r%j3mL*OhGGu23=csS;%3}FcvL{V6*4X zvE;XoPAdT9!iI2C{pxm>=G`PiAmui6Qmrok{`{%sbn&hDDdr{N4`(j7jGzmLvvvmP zX9v%%Y3JJ)`T?A0X;nnAy zSeebev;c#!N}9}Wr?Jh4to+v`Wnb!P__9N2O2e(qjhB2G6w!n@`SekQ*mIro&`iu4 zy97{{o@s}@De14U=lNtNn)x}koj#(bwRnf{R#%T^hVP5PIT?jAnOocOb5+p~OjM2F z|LLdu!Ry;A`$Ik|=|79E@4qak5|Kd96G?Iez+e@qaFS?#T>O)c374uU4`Bn! z#lX5y8$t}`UmV2qR)H}UC)*1<{knZYLg2eSf@&bNw&wpT(?aB2Vz+7N(Qa|blyh(f zbIDua#d+$MoS_`;!{a7gMo?-P#Hnxv{>9Yr4xZFDO*uGU$;@socq~$TYRr{e9&E~S zg8W5cjfwu2IMgnn?*~&sGSAn*I>1%@v%B#YBM=VNVRJJdk&8#!`u4-NW%oH<{2u#T5l>0Cw4z%@z+e zkQwAm4TL{6{R$ed$lUAIbT5=1&>Aelb>zeYSa{-u8T|X>tl}dxq)eBG$4RJ77z+4q^kX z{>O{R7Wvqimi?^GdrsSO0FtRC6L772!5Lt?*46`Ph0txKaGHY;(mH!n$TO`1z{~r$ zG(sErJvcW(n0k;=tz1C!@w72G)WrDds{v#C#-uCh0}7UQ_-=c& z%|L5;#vo!+=Q40Yo4+9$6H;?Cr10w>*m_?2w!hK=TPtT=qQOkvyThu`Ofbsxo7REP zzfGUf(!Z;aM^}24A5H=YY28=6jxD>|cLbs4SVMeY51RHTdr8_q9|F!hvS7AIaoy7c z9B;6GGi4@hzoL2sbx;~3)XQgan^RjPD0UjRLg&|MZT^(Yj^JND;@!20t;!?FvP;N# zWcfUKOvZmK_S{1f?f7*6>$vyGcx;jad1 z@Hi_vY$A;RjxXl&E;d(*_i_4BVsXVB{|}=puSwD~ouflPSWvi7BX8#1e>KDjVeJ@V zE_czLeewBwuIT+L&15kI`axbvSdjy^F8;BEt3jgFbzjQ`6M7-Hk3jCc&UNGNUC6?T z=M6Wdex>6g_?Ql|_%^PHUwQOKI>;e1ms|J!i>c!XEek9MeFqNWd}Kvy7e(IjPtE7c zHI+qlKvJlHOr)FYwTJHAUcz_ogb5QlZS*r`QaS{oTC4t&GW@p@M8ko_36*M*M zz+>uz1#EsPpSbvyfCPU8C@oP*Z-^QD32W9LBmuYzbQni88;+N}0R1`4)Jsy_1f7-xvp zCz(F4PED8;*Mnb~L*#<$f?uMH$(|uj4I^NM0Y4<`8yvTu7SA*D@vjWz7I`xK*8jH7A^(D(3dS&-6*Nao|KCzF0o=$X`sf-b|3_i;I{<_l3zUeR} zl;J{mRY}czzRDP;ia(}gJbxABsj@$_AtIjJ4GtJIP2L{4**S|Ba0iN@?HIt+yMSTj zEuwiZL1p%NyEmHl=7FxieJ<)?Ig6F!sX^Zh%;G^`Yjkh4)%s)LCKQNRj-4*Gk1$p= zbclqj>1l(^7_jPu-k4+yPNvl@O)Z)_3y#g@ID_@d;gK2B1~Xq_fDrZSzL$Tp6KJ{% zA4}>yqJag^-LmNwJ<~k8=EgUuk7~$Z%+)neD0?a*Kro#aO6e5*UM}&O^d*%VKUmmO zVZo*~0x{k1lY)pHnCH~cyS*pc7E>siPbY%k5C`Z}z%CU1m`Rf5j^jYGch9z-tg>{H zTvJ`&C;F&*^^#KA&eTmOwrOlQhs5%rHi^~J!Ovo-`1>FMG`sWfs7ML-d|-RiN(M)c zbAU6j=tbt;FLGXAgntAyMYvpTnT$S{`Zd#C%OVr$u)bv|$#W#)`|w)s%s~IaFJ+)C z>$_)-R?E9YQkk}lqa;@%7=)#F!^xKE)&_t3w02gWpDi<&x6AbtwD-<$_-sujM%LIY za3UChEi{W~QQ<~R;ht+iPmX%3$g!yEeBiGuEAw+z-`<*JBVFu`Ufq6b`#8f?v{%tz zLy_yK>hD^}=%KkkIlN7^2Tji!8DkI}o({K#B+$l6$xDD5M6hHB} zQpNxuq5VUF=(6}s>g=OrnQfD|eYBvC&%r8Dn%kZR|I~c{%eLSnQ_P_3nKO6C?%r(H zQlNE}GX3gG(M~(zSZFgYJ++!=YnA3+rIpYZZ-7MN?Zwvs%=mgzq$sPnZG${bnLw<> z*5v#Intf63HH=yLg5?mO-T-)qb87F6wx=^neezabfevV&`J7tte~PF4YJkN;W==z< z=|7Y9A0pY8d+h0BP{7B>(K;Cygj>AT|4$g4I;4E_5n`6KwI7Jbr00eY?ajzv(>2ox z_pPC|l3s4^e>M+IyTPVy{y7RhNN2tCCIjH1|~R?!BV*ma?H%x+SJ=;>efdm}*M0io+`^6c{bQ zL~_wXLvcJ~TbxZ6bGn-Dmbc{gDXM>&C!Z~kBH0r4h6ZqWO_faE0QKO3xS|>yIWlsy zbQH9ywesa$PvWWaq_K^5+|rCwDRHPAqvp;DGlQdzUXn2}sjHAh1mX%06a8@z@$SO0 zSha`q-X+NMGOxGaS$oiaKgqZC@+#8}=5^2Rc#2NB5;k2wh9K%8lR-fxm3X929`h@e z#{8d~3gl%dJlfJ`vd})2J8N6x%{^ZE2)(qq-=#k1bnoSz{J{&=xXbXIdKJ+D-jNG( zc77i~N@Jena^B6Z;gWpXbVwRcrR&*ssqb>o|cxw#@Y^H3> z=NSnn`RN^s;N#WOlZ*RdqJ0QzQyuCeR_5~@>^5+;h6TJjJh;(%S`Udbbi%3 zZd@yDQdgqyHp)lv--|?(?QiHjCvzxHZM8qBbONi_QfdD9U}dYCP>_vibj0%8 z41Vw!Cl~>Q`sj?oH*O-~FT&z)iN(Ailgz|Sc`olMZhQR*GIHRYLaR?|kAQ>gJZGz; z3F?Lnb4DTD81*pc!k7YYGc*oXgZmoad(nH+FWZdV3RPendx?)EesSb9A=A4_Xd{ew za$W9SMtMKQ=?UVi^D8~5?%^<(q14?0ks)!Rbp$XpO$cg*6|4RJf*-0&9VR8$E%(~X zDJzs7Vdore9u;QDyhf@s3jg_!y@>~~?~J&_{q{zqPX8a=SGKwLSVgL{kC%gZ!cy9f znt6dJLJuWPKnE=m@-ZGK$qT)&V5g19kOV2>2@t~N9a@)6@B#O`Uvee8 z=yG2^>Lv1ls{?hWpIx&x#drsz1-w60NsDE)<^9#oo1Kp z(lb3Rd>P42Cmh$)I4``EozQk*{A0R%_4txRWR1XBX3BdRh_V%IpB4y-YFYL}v)>E; zqJ_^YUU_K6I$2}?HF2?_)Uk|ywP;xpcS~3n0h9|eb8R|pp zd3zF|y^Q$J1jWxpzFT5%Gm8G-WHc?dqu?VQJvC}?$5|^V<%AoaNiIHwM@BkI(2h4T zGm~_-Im?QmZv;9P#$!DuUjGZL;s9r}|By8QjVlm*j4K!u3~9;!GXwSBdSX}0Tf+ncFr+%}k1WuGPkTOlJ7 zFo$iX+;#q?oy9C`bO0kS=i8Ru$aFpypIX>03i0t=SGKm9?y~7-<86wgplXk1NIAHw zN1+ZZzrAPJP|e7dr-H=diRLBvO@`Wx)g8vjPwFsd{+e7cI`~6#Cl{{kQ98HnkL7Fh z)Zu^%r<<|A2-m-w5P>?WB;h8c3?$|gtq9;9SI3-{YC02 zkY8iv=6)Mmm}I>36IZiOWd?a;#5KZ<3UeHW;$_A`!&0C6({|?aucNcRf-a>xWX{|4 zIuIbvfXNub6kgsl-k7?X<+(8~$XK=y^lbdIfAhI8_JQc0axF6PzBvX$7yqu(z7A^? zq?ixsd%pDk{<&B(BpbeWt9F0O{WEPo^F6bkUg&a=onAEwRTX5gO=j;Jf!ch`Nzv}C zX(H<+vX={z@d}`cA5z-u0ZAOA{b}A(S{r$3U3Tn>njuf+>rew(#47n+CwL|xZZY9d zgKQ{eJTfYaPzcxq3zwyln0^(bn3=EIF#8tOLiM~}AoTe}V&u*IuVYQ3O*3?0%0Kj| z6-}}fmur%CsK8hAcf|hdz9t! zN7SskPAmZTbBh5Wm`k_XCVeiv<9s1ynf!gyW-rFkPIig+Dv!zbd{O`1B!@1sA_1&DKeLDp8S`Ct#ywB=cuqrK&Dphc^|VkOdG5pS^khQHA~maq!4&iP|^ zkVFZY4~-rOZXiY_2jqr-U9J|qw~&AqnPq(j;ACzv^Ya%raEr8=5D2GqN;<0<8js{k z<=qhbFYio_Y8FzNV!kP0w(LhZ?qrv( zyw`8>Jrg6*$&<~v$RiW<3MDRjVVY6o>~Y=9HV~hxMK+VlRP5vDF91HtLGqEfAKLgN zazlH!wV5vhjq*aAIw4=L1c}u^ARsEQ`TF+Kx`&Gbc+DYAJg>M_Gx_cf6*N7|JH3h_N+4PuGRoJ;9+7gh^K#Far69jj8 zRzpYVvOvJFn-S-?C;S#|51Mnch<~&m%x@%|h~OVM=MmOJ%#vO{9A%A+EJ+M388F`Q z3e}NBZ9Fybkr4w^+2oJ(9lbvB!25kmdn}66F7Ph2j;=Y|+pg`6+QQ40Gyd8BYtPQ(n2K1oH4nMemX3S^4B_kG6Zuh@US=c_{3pxm!yC+4wITZE5PM4A8LsOy3e4Y7G zTKm8TSY)*nEK)v)VP3ZXXxmo)#%^1rSaQrHvcAvK%RVOZw9evJ59~8Bt7Re1hKJB5 zDIu#RC|{R~q|zy7k+ee`c9Jv93P)t6PI*?0f#y zwrbr$El}szBnhbPxNUDXZb81kJq*D+0Pzv)s~S!kn-&HluEB-t&DKCOwk)NkXPUx0 zAZg$<+Nu2QvM|c+0DIpxVgX%t6TLQ_=U0dk13S++I~(B5tSe0zfnoxO;Q-88$^8lS zhm}PJau55>P^I(l+&^o0W6~|XOreLSslRz3QOr_@VFFTD&*Eu##SEpORb}pYQye@z z-v?7D!Necl!%wrV-h2LMxD%>nCRU|8gIncmeK1bN7+^pkQ!R;R8oV0eP&c zDIOd&`e%y2rmsK3z{Yf7icf#4KOM7f7dB|}G>5U-|8L=y-l z!e;`CKuK9czDyk50^U1#4)$Yp4cXrmC%i$h9G3a53lCxSufv!*L@vvhq;P(}Bs3bC zzDfL9+BbTA-2q0~Ro@&X9HhGbRDXHo5#)kk4H6W^;mda4uKyMV(KzeU2qElDU8g#L zG6U7y@m@`Io7o)Pg%-`j(?5~LVilFTQ#J0F_uxPI9?}VC4376RTDG-P!ajTH!TXw| z6K~rL@7&KFw@+^_r;kPI@KkgH_Be0vg`lcBevZL@IEwy9Q+SK2Z4%J*HoSI&8D=oq zFu`_Yj!hF*hAH~h76&sZ0!8XWQI=14qf<)+9=Xf z)Q-qx-sAkzBwW;Js}tN3zG0g6-50+2Wk^9geab~)Ix#wnR@jzGZ+u|ws6sLIu2p(2 zI;tUkvdG+=xBM|NlKS6J=Vf3xkcE;#x|l3=i3cOd64kr>PHEg$os~|DfD93dX8uNM4hj0S8&nGps;W+}u#IW|=;k z@=3k>pW}KeRiduzjWjW)+f}m%FJL3_$aR{3qERE5K}v=LkB*sq;Ii^z+NCga#3k#P zZtmwhIR!M8`EqMwUXJJ~PqYSa$#u@V*QUj}Xj@gohQ!k`?I|rw{tVNg)%^g~Ym!~N z-qShBP+jq);^~N?l z``r$Zg86!}Ti(&_pIfm{f+kkM6HWH1yV>3xthQUN#*tobulOVIsf`R%E_4zS5^nJy z`gJ+@r#(9z-GcY5@o2Z3|7U_}EXrAt2Kb9BMm{gRxfwY@7`WS_6jr{P2g zGX!jdZ&m1@{fp2w36H_H-+4LNw0OB(yU1qIR@`ZxfjYbH+lcRW?1u{>0NQA(HnOY% zQzk>kKwpBAPUO*(fUYkkG@RJ`Xlcj|n=sK4tB(@buG3!b5B_p9S@0#>UilV?$?&@P z9no-u&5lAr!?9S$;cf8ZrBXzbyJodis5g-?05S=m&NYNHM(r@uGJ$&&^;G0}!Q=vIwop)S`8TqO(Rx*X7wAhk7W@HnRu07fLUx@;!<2t( z3T(yKuXzap;9!k#0Ump}L)grj75qAM_#Ku;kp{tsJm=ace*3OK5UVMLdLrfg6T(c&W-^R{0(LsEa5b)(--~7%!s7DO z+JqZB8z=>W*P~|8hyE*8ld#1 zV@oxf`RRwVsd6VsilO@}Af|hnCm%w|Xp;wfhh zzSklHqSe^Ij02eso}0+sJ%9e`OwJxHX*vh$&&~%H&VoOkD(ad2&GYEnXXm(k0XPTY z`e>3Hg2rMI3L?9;i9`n69iWMIg1QzaTP42?$mc%F^P&=Ozxj&j5eJBvl2={7A|3_Q zf4W^)Gz3LYBIChrUGPvFG%>v)u8by-nbQ^>o^-orV3-;&u-yjlR03XKGsKQtxYJ2E z?~Kr(8_@6Vk!wxN^)h-A2ixw>f3R{mdGFWvKj$W8)}4xPr`h`TWHPpU9)d+|M?f)O zDO0;UL4oJhlP<$5p4%Sa75CReBe-HBg)Kc_D*-OD#M^#0!EK=S7oBiHjeVG>S^>63 z!li5$i1<@m#rmXI#6j4g7Pe=bNC1%YSRaNRfL@x+C(8GMDY?!A{{cfe)t4OA(`$tR z1Ko$lMFi#&^B@8XkxQM4E;Pf9H?A|$)6=VQd>i$jdk!r&k!e=Dt40XR|HD5rp(>L5 z^4S*%;Zh|*?eRfR)C=2+T=_TY71~-Xmh`Sj2k=&_t>zr`Jkg>SqtXrOB+gXZBL25v zjUG|B&Nq~xvZ2+FFL2{|_P-j`8 zK6cCCW2W!6K;zw1e)XuD#nCrd$itmp?P{F*FTM!KM)IyJgzZZmpT@q1<^E*e4Y7NoxHJv z*_+uZ&_Cx*c=PC6ek*l~={o~c+a({XcT#JE6Q5Ji$h9MhT8Akv9%1ss) zg62g_!1#oonUIlD1y}Bi`A?n0Bdes7OkUpFiHYkLzWSz;Cgo+1JvWQ|W}B&^>y`pF zpb_0kMp*+ds?B=u#ookN^A_mTVvu51)6kcCTfb4Y_@ zRCF3`mqA&!n(4cter@~e3!Z<)Ht#YcX69jXJQ6}^bhJ*%eO&;Emp zp+0*0321VhFcExZ5VMs_scw)H|$lIM!X+R z{GCyuP$`mCJCeYGpm`6Cn_qX6J>#ZG)O-h_ z;r%#fh8l?(UTv!RY=k#2`%fMQz=ieBE65p0N)IC?VrfM7!4Vl&!Mj7nLhek{pA$v% zX$TpV>3w|8p!7a0Zj(%Zlxr#7@j9W7JYH5D_|gdi7wH*qluJf%=|ma_Sx5*N=cg$Y zd*nJ9XPrNC!3o!DI9lQcB|m;3qzPqPoW7#|*@ua#Iy?(6IB859H$DPzJ1 zb+iz)B8jLHLy^L`i034aYtxU|3=qh04Hn@|8EJ{zbT;Un10qLn-A^K~&DAI83otgp zCPnS_<@pMvYggztNCfJT|LfC#f0Y!K2RqC+QeG1+Y6xwfG)wwPS-#~(T-8}+)k zfcG7t(J=NFknR?YyG3n2qMU*?lz4TGGWAVMhqHgl#?WQliwdc;++(RFg`d0J-SpX* z+&c4|)BMP8_4_ueTHbp`?_Be4L7@{hS4B|fQms==PM^oUubYZ7V$iz!;y3Gl$t8oM z5&!Pj3eD#4xYeYLUBr%(l-XA~Q8mXy$sw3LWk%=$V4|;n7No(osh4+H`SP06#Cguw zdsoWdRp^L|1P+_NPi^Cb8LUXNv~}7AorE#p_Hf7P7}>V!&s;kSruqpw0~}A9vL1$h zBU{_J&mA^yJdYa=2P}cYYeC|UK|t=E@J%j_6IjPNS?e~bujA(Gn>U(CM_(Q}KPm>* zS-99|R;{m|>%9(R2VU{0b1L&MO~zDcE!e8+3R+l-R%*oLF;!yGU=EcrdG}9Yy#1N) zsjXXhO6}Hh{8OiRzz#*x)i_gLg5O6~j-+4mJ54C6Reb+ogcKfr^-G8CecGt&*Q%sd zq^CJz=tZOyGy}V84abD=9!iRKy+)_Co;ePXH5HjYN0 z(-{d-_9n!EeOQ`*w=?IOmb3jEg9HW`41xtIo=|?y@ zfgWRXm3ED}`45qy*KZsⅆpw$-bs$*X0bM*Em@}JTE%R5)#d^OE)XTo5cZ1>S;fB zT5>8?9vjG^*3gEIiMWw%aGi_DKX?aA**eeBPP(!qJb-1KzXYe^p*6aRY z2wWm?)Ynld)F#@FQ%gmf+T48B_wG43#yXjy)JbvN?{uq%XXrRa`aIW#_T|#sdzr_Y zmF50w>9oZeonM6aZcn4%xIXKv(Cy+`%wypvc&8!jEmPOtMB5=2L}En#AhYCRncln z3y8<#l5$o1_W(&~79GqT@z;2KL$Fwyz4+{ZKg~fAeicVzdA*L7&JcoXuB~db?%BkI zYZiNQgFuJ*ge~#dqkrf1PMEFyW_g~bCH>%Dzngv=F1|MQIgxgVNo#7 zNHh!)vi#6CFOBZYAq?LkAGcTQRwctr7+c_a1UxK8Lr0Fxx}_t{_}6b2@AmbYU$``D zDL3G)C`SB6{Gowq-7eds$#$5;E%0GM z=tQKVd%rf0$0teCIkjV^fzmeW=%sgyccJ2CJKp|-jyOEJ5KcwdQZ+wMiOg z((2b&^k8?#sVfzYjC$)3rhqwYFDOfn4eHJo=oA`n(?sTm-)*|6j+@vl0bnRCX)a+z z=z-c#Kmn=hE>w?5YT`+@!<#UWT-VN5c}Z&#R87Jhtg1CwRhJzY$s#LJn@PvdM=SAE3FrJNfu2hera~;eQ}=n zJ|{e2vN-ABs*>yQ#bkx}*^TL$3t?M0OJD7+E7}aMa)FU4ag11aS*9{@z>z7a%z!xr zz?FJwQcQs^?P+teopDOjFC&UyUZ6F2Nka5=#PS-Rk<5)}ohELHWe?Lu`bAT3HdJ2k zG%Fq@deXdLn^+4X!)RrQ%9yR0%?>sdS94-;yWhL~cm}|fkU;cr*2`@=iy{@1ZE|(S ztqsSo90v8tDLdmPddV*$evcg^b~2;JpwWqgG78>B>t`a|RlMZr1s(XT(iqA=JWKi| zTsfKaCo*AS^LAIWHfOyQi^b?-W2HW$kYXdcd`X0*^>LfTmo(XX0JDUmf{8{$v%KBh z9UXaGn|c2`a{PKpB_LMe$Q5D#_YeX`k{>5;M{I=%vbHlYgdYE#4Vkz29EiTLd|YO- z3g`Z=OPts7eWULr86{{y?qBm~QQKv_*P34pHgKdccY%1xc(A^){C3IKD}2yt(UZeV z7g)y};&WT(*yX@Cs;!!Gvxg-iwmv{kuPM=-@U!Q+or#+c2fs1m2YWcPz6_!Qc5XXN zJYQaw-~YfFqL?2-;(l@NPhs&Ag5Jr#b1qSN(SRIlsy=#gykIe0!kQyCCq0x_#X~i0 z;+Rh;%esITX_X-7OB6ktijFjX(4SGc?Kmf#)5V{5HLlnk70THZkGGm1A|->b41|&c zT7nR)#LlCFN5zCnb}qnCW?>F2`a?QK=;9Zl)^)5uN+&*39x8q{f6~xWm0^Uv2j|w$ z;?PCvloD?Kx7a6=Uj7s>wjRElg`+8=_tr3Y*|1g~^fSl|$4=`Vo5*bw=;wabfc~&f zd{y&ZA^`IelZCcSU^j0TwR|X5!Cv0~B(MKd3HD!@=98ltF(mKwHEm?v!#xH!(_C&a z57xB_N)WGe$P=*|83$(xAdBUad(z}s3LWskK`w^_MU1T|B73X9DHyVYD^F8bIEmFC z=Mt?5Vi=}-)&p&9Q@$qoR!7f23fc_XW3o`6?=GP`))9w=U>Np@9?=DU!eNhCg$WQo z*gTKlFNS??G9ks7;=}Zt)in62n`DG)&Z{;0-SoZlI3yIbJ=9}@az%RPDKRg$aV@?{ zbPZEi{>cV0Mw3b{#Mx)!SJU~rlM>UV{O@nfa|99W)%rsjdSOFFLyyi!VhIwIp<=!Kn+AjOU+Ws1vmyn zPy41XhVgceHS7wP*~PJ{3@SmE{@4L5+c#{C6szX`SS*pRsW0M{+bQtn-Q@IvO{szW zmhT-pmeyBa>%iU_9q}~(A&XB}TrAV3by08nhF8g_jpT30EDKv#mTQ|iFEQsDCw-xj zm2Su3YeHPTSMq1u(aTczp&Kw!xQ>~p1*a(LkQvoA*~`iWJiyHb`)M~>7nj55zR#20 z7W;dK@+?X500p$Rki}U-txPJ<#+5ERAiAvHOJvYX7?x(t$C$bgBtd;G;=FabdOJa1P+R6E z%!MM)52$$7ap7LWw{6V?WNM9OvAOseY0kIdKjJkxU?)AC z=j|5{16T+yW~f)U{l*Yy{!-k8NpnG?)&GltBw$|hyp3Nt|Jk4ZyD;^3_;UL)kxCP0 zpys$ol4pmUWb_kdMdy-I8LYjBZ(~URvo(?aqRul=3!>EEk51pu%E(BvYxme-NMAll z!wXnVq=m9#$N;?^J)AE9LC+%uQhZvBUbwW;I}wK{+AkxlhG&zqqT>pIWB#g%&Tz_X zR79U$9llx{UoA{~@0yVC_m1>&<{XOgLdb8L2C%H}Gem^a!VS-J*unwE=)|`Ud3G$r zdb}LaKwRbMHTJlQl*Oh!u2Ju8_1#Hd%^j9K+1Km$@kQ)zB*t;Q0Al_nT+TpO9aQb6??^k5KcSo{DlmZl6A&ySTO zrlW$dn!%3mGDxkVQ=dXti3PH-$4t;-DAKo-lbsq@=oG!`3f5PAL34k8EBd=3O=b|! zb8;wT*P-l2p=*9W;(-w=JcaFcWth{&6?PLg}B>jm$*OfZ}&x1 zI0S%*ncll=R~%h;|E2hfUc+Zd@dh+kYw-sI!j-hEWi#txO=6GTU8h%hx<0lOMkV|) zID9$aZ>4i+c%~M5_bxxndO@ssCIZ@j?q>~UYB$B;-^HB8t}S{yrP7=(eqUMWvxhp@ z3kI9=^RKP1aMC|yZs5J7sUo|xQer`^be29$RJ#;Hmap=BF&xfiUhE@WUA*d?H*!~ zy1-sU$!+J+#$g%)7T!G+!^$R$tVi@yIJC)~2i`;pS8eXZ?2NpyR_6k=zHi(~%?VTB zEzOfl*;PS~h4S8Z>yBS669(xJa?2X9kI`L8@KRN6X1+ZY8Cym3qzN{Rv7q-^b${;K z6FP4`=Hei#gONUAj~9N!sFC)wLaoV{6|#Aoi-XVf#`~RqMwC?mvrO9uqqzNKqfZG~ z)9IfE;>NSY!h6iu3v+%*$A>+R8$-5_jwx6gnS2TAn1sa>41TC6bFV))ik=BX;UJFZ z9)eEfK@(-rigm+O9eKu<`nONY)qP&V>Z7+$`DsRbN3BHqr1#nZ+KHgGWBET>0H!~e zHeU@EB4l-QaXdZx_SvJP&mUg3eJ{{Qs+}btQY8;pd{2eeppCsp+JMVNKJE+*4*&;I zjAv7p4~nJ?)d|kAT3sb>HC6I3d%biddnukKLURovayzcM6b{=)esa{aahE2ZNYX|F z_{HF51%D=zi7>4}DZI}66C|`-Ap;D-?xBO`tv^#qMQHAlifseg))huVd z{)ksxk-RLjQjI8 zQ*49o$QoGun#J!L;t{3;4-p7`auP?vvb2bK%UuS?(ep27Eo2gZvaec6O~=YKO~uQO z!1YOZ7VzllkCd(w^ZAO_lH~oKsvhWgX7g1&BPz+mf}AvRIqz5|w`skCYFni}T^2sEodvNpA@ZFwDR z96!DKB0W+M+wzuNe{gYoW6}erbG*l7(^APCD!(lqH7bgU_R803>$G|U?&ZS#Q->A% z5%m04)1E!Pf#JsIme~RS_jB+!Td_RZxjm4&7dn;37|g5%|_3 zphvVrbI`cP4a^hv?INf9lCWya>o)C{3}2r}#9jZg?ES<>IfI@v`Mg!RG#OjdbRlZz zT)bT!GK~Ozqt8gPLdg|QCql$Q~z&JItCibp}Ia>th$t_);7rDX-= zWoGE|nKd1Bf6|s#&)H=CHG}z4U*WRk5#2=>`56M+k;5$KbYTTRhs5 zO-PJPVe75f*T&8g0~-ns_V^7PW+HQvm|=@pI5qgt^LD>o=L&b4y~Z>qSMkt@__Wzp z)Of%;gT^;v^X1r!iKTD(7y?ikB1^qMQj=u-`Jm7hnhA4a?E5N2X+> zaTaYrvE;5O5K((I8K7xPYJ#IxEuD``1?iX7uNBACY1)FXf;D;c?rP+xKs_ySqQEl` zx607D1$W)J8u{u#g`0`C(Ut_O`x1+(^2 z>^g9G8)e`X)v3vB>C}a%ssKCxgEXgPMHr!=zepV+0rn#Vcc($REee$SVG21nAF;Sy zbQagR>_?r`QI?8pujiK>wEU(vz8miWsfLibra{Y$^bTVD&er`1Za4jfTl<>5(G0gy z5qG(733OrG$wdC7v-BUs;@^G7$#oY>&4+f!e7ORN4W`DF#r+;kYcYzk;S4IGI z{$^7t#gR94@uQ}W2jzF+B1=8#F>(E+Ht*ZJH#s;%f>m$QTjxrq{2^yU@HyNNr2-$)NUi#*hHn$baBweiun~Q4V3F4JPdT$zk#MnWBut}L) zz^z^5KJ=y~t8!w%wAQ>G&ENVS_Fs)T^Bvj{g?>8^hsgh#H<>BenV52XUni9~uVpA7 z^vyd}&1imE!lK=4*DQrSKl9wEFv0%Yp9|YkHm|GM@e-hB8%0*0^wS@m2cN~2vINtK zhOj(5J*#JO<~08ceN3+rh^UJty!q<;I|l*J+f#es#2mt}g1hO1-{SRED978LdHsMszs9E z(JjXeOK1&ygT#O4lKjiCC&QDL(_;@qsShm|?^kmvr#MMs9P9_i z?VG6iMX_X&Baf@N5k(F&fm|7hf{U*k@5wV3uiygp^L*OIer=fcEiVRLR?sQORGcDS z&|qgOeTP4_-;e#oHugpNF<3YDXkF&>nSqPipo(|N`bwv_Vd9L&m^KZiS5-0bhgEvf zai=Xgwtbl)V&;()Qkbu@eV*P1fBDw~h#49DQ8~|chw*J$Sl#gH&OG4DMRfe|-8Chd zpH&h=8=A6uL?Pc>r?lu(;S9d}$Ru73qn+`0fO^}xrHll-gzxy-)p;T*nnq;&#M*LemYiEBWVC;KhO3=zrl8LsA-hoV>hYC zte$0E%AlaB+sf+!i)qKMy7mEyMHsdETskeTBp_vfHd%T7oWqV6MOIlQGvhAwhBqQ- zw`ueuOFT&CmfVYJ@%s?I@qt(H8gtsJ+x^W(gzl8GJ3x@XM931LU7&5H0^Zmf(GuBP zLhN9h0x{`c>xvgEQVtur(8$Uqh^YRB2_PjAgZv%sw-7#JGUio{J)fd8s1#g3hK~aN zm}|%Dd;`yD?^@rMQ_gd_tCsX^WBKMZPg^5;=#W*|LqJnK`d7WnvP<-uTC`-dSJWjEbUtSy9SW5 zpAVGG=K$zF90+6PVn=rpfg~6wq!Kz){et5~0&YfoN&&n2GBI{hu6eg|n&yO)kh~?{ zwa97k=cPBxK+A5WqbA&$6L}rs>tTA34a#?qrpLr1lk_FuC@eQ-cX~J31jLJ^O?Mqd zGK&+-uxUHb_IW=0KlK#nc>=|dkO7aL?O6Z}k8jPc)R#)=zBErMqhW-3b@iR9S&q}E zlL+9)(gprpav{7E%cPZ_;*SrOm^WB&w3~Ff#9xyFU8%oT6A=~B*Cba_He|l~CMA?; zsu>!UdLrE%Ixy__b&{rk)@-v|l!psUEP1o)e>mPh9c!kc{7lMN)}_>2x67c5{xZDg z*fke#u%g_1XRal^L0G}Yc9vk!zPlSyeoL)0tzijWsb=jew2(J6qjm8Dc$#+msmZv-?3z) z&ggwWJ;t(k+DAe=(&6`XOdp2pWjux`R^>Z>g(|VS3+;`2;F`PH(8?Hf-M3#aOo}z) zTO|HZSsJ215%mLG{vWz1GA?n3=`DK^NdmUgTRPeq!B3skAnL=I=M?XrC_C^zizSL&P(Z7?JQm=pyJJXTm)=WaJmB+3kg7qr`AFqq~9KU z?xdepHf1yWAH_9I-Wp(|K+LJ6Pv2f1c8Qz`85E@R=Did4!Jek+xGr{+j}DUSo5f zx5$S%iMmMmZD#Ga*0fOG+AX!WB3qIVOsM|0TFBml^b939Ls1!tVv5&!ja)>09#Va}S zsCkxF@;g*@h}(kKIk(pZShnzdHbmwfs4paIY?_T!;!H}J2N9}a}d2wfpJoOz|Zu5OiKs@#c(5)WiFUDx}{+urYdWVP3E1zPcdDixwO?b3F zHY`t>&bI^x%Uk06(tOafti0pe0M0X007?~FHjTOVBbcnJP%k#~R}WwUrEsh~`9Ml& z!)|q6_vC39Gv?2V|JZ6(CS@5}?tO(6fb^u4;P6Z+vT%{S^N{Z!UCBfU2Iz1l<+t*H zE$Oo9MU_5UAx~g5-s3#2|LE1_MIa08jtC$kW12{Mek8b7?9MLq1P8{e)W&os?Y0-; zaV}gPE$}mzwDlm?k5D+e;46DO*+X_$*3f+(AcEkU59Jh1w|t7fusS1g%+Y9W6dBV# zZJ)ew>`h5b-`{}jCRA#0Zn#D0v?U?$WO`jNbrIB|rES=H{(eo=v$zuxH@UE2C`~#| zEtS&I(ur$H&otv~xOF~j+U>;kyw#NCByFJk=MYJuEUQ~u5VB^X5ci(%hYgfs+oP7r zOX}pl36Jnsl!jHg3F)pkFZ9V)Y@Q5(L(N1BnrwFhz#m7SCn_zx_Q-VrlM$Zd&)EI0 z#m`es>D>zL>7+0zJZwtTRBpO;IumJFyxo^|?nhV8LonRGJt~;9jD-p?r|YJfu55G+{2WH*=+yyG_(z^Br%x^s zI1nK0u6s$`qW3qi_&s~paleko+Hp&y4TW$Pj}2&(dTkh$tW6Abkbz;PJvSz(}+|oR9zBHQaa_yq?m_4rHMl}@zELYR7@!v^t^8DE$x8d%MNc6cO&aD1o zPUu?9zxLDuo8*JV4H7CWI;Q_=5s0&QGHuz}QLkJ6k1EV*&JRHVp;BpaN)8VLwCdzm zGf!+Zl$=aTa&|kpQdtf0Oa-TM^hy|Y&usa*unVwlVOd~w_R5|M_1!BWv@exX6xO@W z3^%OpQ)oB50jqhzl4dVxw_8!m2dsQx@u^}I-Wk3_gT=v!<_nXW+ue%!&Fyjf{v7H( zIG(tN0>yV_p-w0O$CyfM+h8MsP}38~mO7wh;mqp1I~=4})!UCTcnFYge$Zdx`g_($ z0^2{t4K$nB1s^Py;m9us-n0(Ve)5)MdEmg;8dU0wY8?V{=oyInPywQ#5rd%qLQjfL zS7_3L_cz)~309GoYePtb``OF%+Jm~}J}sC~Y8k;;>A&I1 zLSKGX|GRtnS_?S7?LDy+*3d8_H7$C7<9gIMO)k>0HCGyRNdf>M3ffDI!M8sp1mFD4 zCE4eBaGYZHu41m9bDOz|7&=q`0%aPj0K2&kG^Kk&VKiTNf&!!2AsR(r*%Zc=Ez@GJdzqx>V4?VcZ9n{jZMq#T)m}OU$O64j+aXzVZasN7uI)0af#o6 z17)+=u<|ZzU(G~H8}`Zs4Y)G}!L5H!G!X{35y^*nm95ZOdL_gqK|bKe6vVCXl$>BD z3ypoiNze#Z(Qk=<@5bq)sS62m`D4^QBhnESl1K&|zBh(L&-Zv*ecP1cPcfy>RzLzz zvWUlsPzt!5?* zHJs>Hn+C7lir*k^7G45x>`)&5L&bqbD;0zHfh*_M->D9e1nS+2^IwnOC!PD+SwaVc zy5WjW$|8uXjQg!}`qGa+NSF9|=`V3md8pv0pQVj%aN`ingCc``}h> zg@|UzKMeryhe_}w#P(B9TQ}bQKfF%T0!hrdx14kA-=Xy1U=cBI08@3PQY+V^v@Iq# z6C)6$+{5Iw|K&AZIfjN6olk7QCjo6dwx{`gWJOrr16t!#jsrqG)`v8BnkDZ{uB9IT zN6qd9@b0#fy(qL70+ve(jit|h!3GI$m^x4ulSwi;-h6e#cBQQ_`b?Z5)oAfQ!$+8N zoC=yF=;tgIg>YTpdFKcat-ayj1Irlpfzt~r-eIF?SdlL0y_5W7M)4%vSx%FNXkX-_ zT$J@DzsL;-l2*#AI?=zafU?@#k&;u>rgZkIkS&@%fewQ|FSQzKD*Mi>6q*|90R@51 z^EVu~2f%%*iKMcRT01|6vi*CMj`Q>haH)pe#T# z!Ox|k6M#r=^gLa*0FGpXs4am}z7I&rl_Bf3zxtxzE9JI`j#3W2DXVt(Rjm_v#gXq# zXwew{z!T_C^oD^~U}Xco7<{8T|I)OP>AiKYDUR=yCp&ekws=SKI_rc4!qdKe@iq-K zTShBpR>q1F%^z0&Bad14i(r#)!;Q@Jof8Ksd6BI;T}64vt0o?Yp!O^Lc4~LqO60a+ zc&l%LFrS*eVckfR@er{i+e@gZ6&~fjnCo%BeJ7wa58W6(Q*GvoA6?%+R}&hy3wjQ5 zYia#!jd8EWZ+tqmqZ^K9y!Sa0eeKM6DM8~e?1`X~A@Nr>0{KM`-i47#?oqz_1`HiM zVCb}PejF$Hiamj|c{`)k<$Ji8SQW25LGta!Jg&RA><{^MEfM^WlH zF=ccTWavzxiHC0i3>x5%N$vP<2@AW!i5%oln=h(ED_WqW1Qajgy?d459!Y^Yh>dx{ zb1C4rt+79(xI*87XBrJ{5roybkxjkeE#Fivqf2)5VjeLM>n+lBpBalgoJ&N?M4A3H zB`zbd$W*EpMyKo#v-{(ZX!Fkv0)t!v`>J{2XKEHJio`kKQsaC_Lqax^g5pgbod}%_ zxP$j+=SODF`Qf9Cd+a%1#OaXg`?o;DvBj;0(p*H+Z`mGb~{Qv9k1X{;B@ zKkd`o(~W*Q1G8l#eA28VO2~J{;GPW63ti&N63Dw0Qa~l2dgTJWIrpel`EzP0O4w%m zi=oQ^pi(O@lYTbqjNHiD-{+j9&C?oRe9#9jpI4k0-3>`O-3r5Y zntg>5TrkE3Sp76f`$4bl#U5unF?Tx9MIMTxnpFa4Sq6YdWcYo|Jkr>YUl@4XlX7(F zixrQ!F(7MP?%p~9Z~q@YrD~EXSfhQ_WB)&0=|2(1x2yo-p-;xpe`S>D48*;esqysBbsIcQ}j$)tmrm{@{2#T2R}>CbKiYWNMeK$=T~)r#Wwj@K|(gGiC-(g zsUq;xbeL(K(oFA$v-q!A1(In-5u1!PJn4z9(idac%P+}xNZ{MEeM1>onquYnM(sH_ zd~1ZXHoVD0%nocZA3Fd%4Z8ItCV^i}g2PoLiW_3JQgOsA*X+AhJ>j@123gZ&-Fc(u zHo0$^m**vLn66`jYFC{m4f)rWU)sT)W3VvmSo`zj^pIRA85tBvFQQWw@(l#6V$PGJ zo0lksTC^_zDeOj>0UR-wqVkRH@15^82YqH4nQU0`tBmEtGY~mye2ow6XMdyAjW=x&1{d%+`PdVh?|>=k;C z44H*4?mLUgtN++#*cVou*~wY@V2Ls1Ds$WFU(77WTTcTp-@K?gJB;QInSsUza#gt(C}tmI zr)!l$bV|R(F{RG#f5yqPDwdY0xT^d-Y$!=p75%)Bcm@xpJQWT{pv%k){$)}17ooJi ze{rERDchvbHR=QwFjji|!x$vYUgQQbFoW&*%HZn2!?Yg9^rolvO?^Fre|DX4JV=OV3nhSFWwYAM~l-jRP5qS>Bd zB~Ug#q>d4|-~2UBFGG?6wVB#j0r95)o2BG7q}P_ir-Xd+lT(w_B4d;p$BV3b4##aF zyA=uNll=I@>mG>@H$mOUSKK{^-h_Vs+5K5(yk=oU!gD9PmI$#_*qYMlxr@NvnsNQK zFORxej4E6)c%IMCn4m~NXirpTv0h%89%6tu@#-zBoz1n|!Ry;U`1iC8iw&GfW1mlvW|NmS`>G{w%JVmGD4@~!}&A=Lbs^(HT zVH+VKq5Z3A299OEbJQQ>L7LxGfzVG>OxpL%^owJ2T}VI)-4g&$Zd%c9ly|a!t-Yqt1?T|Q>`Sr6cThJuR0Cjlq1_% zfv`VxL&U@{fuqLev>fx3Y0qfhl;D}G3Y~f&rNVq*+G|hzbQHHWY93!1t^-ebFeIw8T&d$>P+M> z6ti$VTW=B#R6ZvnTfna3z0v##>a(`HVN0s~CM6s_9QJj$zn{14?sCu_a=Y)oe}M2E zq;$PK-T#BwAB)U-0gk{~Ll zJl&I$xH7px+Vqc8Aay!d_oDj&?xX@KvsXP;5E3Q z9Z{AmyN0m^NNzm}&$y}%-5<3@XT4_?54ZpIi8LHgs&{t}bIMI<=d%jWs-ZRf+-%CRfo;~6= zprL%C#%5g7i176u9U)J8=HBA%;L(X6Y;0<1syLDyoW0$+e5rV(69d^ZFkJ z;+vt-4x74gjKsCv1$6h74GVtF66fd^)fCFM_!K+^zita5p(joEIPX5Yl$D*4C1r0o zuEMoTUP`#qQH-NN*Wzc1E0yEB42+q869J3tWeW*s7}J@VpRW1=#T? ze$OZIQLJ`5r@b5aQO$eoRIfkGTU4hD? z(5HZYYds@a^nqF1vO~7$eK2~$sOBZzrIw&lNYARKB@)m=QTIw&WI#+u5w5=}3cHzT zD=jb}$)5Tq4Ja?_Uvj`>azHTI8pTw@lMa+~69zCG=gr6b%iK(&4Ak#F#V&rU)jBEu zocB64zTC^#Gpk>Hz(ES6f1mhS|NLB6)M%sr(v4MYL}<*YV_N`4+-^?(gEG^-(~(0u z$)|(g_jdR3&;Qj{LWc)r(>(i98)7zaYSkzBzjfyL>jnnx%V6zZ4@yRTv{SA8n)>rn zH5!vM*X}9+Y;4Nv$_A{|k~KfbKpW+5)!&*#-Bh@>9GQq0ZRuEy zdyI;B%*s#0A|tycbgfLWGr=i4GgHXu;evU)0pQrgZ;wo8FWx~BUzW=+ygBkaE9**3 zd4cD9$ZQ|1lMuAHF}ikNV4rTCjEd7`GLLAMfDyQEzn;AYOz+=yH_NC(NJCd!S4XYsxwTalE);Zl6!`< zz1gn|4LCjx9l*MR*3}w#CL;m=J2;fJB!O}SdCP$PMd!z+W~)e8x~)cm8Cd?;@16_@~=KIsA`?RaTc0fx{DQ)!0w~RN=RHFhIzo_F{K%NVd=Xs&) z(=6WygaJ|t`XO76Pn9e4WEVZv^$Uod>PSF%8#lKs-nkaTUw(tAUMsddWuct}59#o^ zeO`nF!@b8*W65R3>hxv3kh`V72G2jEu8HLdxof|9RBHz+{cV1812S zEUynP{D|3gt#`80tP*>1`w<-0YuJtJ>YB9F#AUIn-bL=e#N?6p%&H98m&k|j_U_}^_y1-4 z#>|oSLNs(rz6tzy$KfC6JM;rGUwH^q8`@VQAW7pg0axSjoA~~UyaV#-eK7OMQwiB- zZ9Mg|0ti}B3ZQ|+ATD}!I?PC(WEB`-sd35^{0CVO0k-!4G?2)g#}=wWZ^G3OYzz_; z_NOtG8TFrV_Q^k$ywh4Uq;=`o&8D&v{q6G5qur z35QQl@I3#de=l(&Br*7tEX-ly5@K)+|7CIw?tk$J+#4;P2@}|S6jrbF^xiydMq|+} zVWq}1dIl?3hV6&Ecv~EsG0dsfAk(t{Xe#y)j~yZ)NAAE>R;ZX;Ff%+XHIGZ-p(i3k zh~J4s4stXE&6i2V^Fz@qKQ?kaBc1i@gd4NnbKUHBJ!#avG+L#W_k$wUMpK86T&Vox zon-y}4+jJhAF~%@>EOVgqa@R}sG=1IUFF^Oa6@0TAzkiD1v!a2_>3LtY>uDD55lfl zB(c(G=Eow))Gee7XZ&m=n_nBDjmbw43R>ma4sqX|Vw1B1->3BGkJP3I=Fi=G(2%co z^F7hdb{F=jUQZ$z?{SatlJOw-mE-({RPx7eKcXkZksYCcpdjyNbWwHA2KFlruB2^V zIPmu9Q83p|!NpM77l$mfyOIKPBURX+^Qf`8Aa%kw>dBTh7^B>@C4ua~yow?UUa;mt zj&Shq5}Ers(f|{AI@A?&?i``Vk@mAP0F<6YdVR&)yL z3Aa^lzoiH5hxH{QA@Cr~Wg(?@2@8H=(|f!v-Q1YA{<_;(;jU}72H0NC!KKpIx$gp` zj}}ZLABi@H2^IQ5+dmCweifMMPU}n1AqT4R!|Mcnexm||4Jv=yOIk&((;snM0DX~3 zyZL0@Fjo4^an_X^rIA3$NvK6h(lZYma!RA=uD-rl%3-fR){D3$Arum0){^30Nwv4L&aA^r~sW{34+I3vPvW`->#i=tN zrlAD@#hNKx^=e8?@qns>V$L+R>!%}GVC^#Eq&1se0o$GLk^yFx!e3sw+!*8+rvjT5 zjSAd`FU8bRoy7h$@p%|soUUHZw`HwPD*!8)BLjR(7A!1wzVBAlO6ftZ^ zk(YX|=ssYZp3=B)*=dVE1cS>r&kTI4W>IiiE~;s;|FeYztUuL2>R2UDk+xDmDk(>% z4?a$wec_RuX~f(q4FEA-eX9TqJwOb(lm%W7pLv6AS!C*gGl}C5zR5{GPViF?+-yY; zkwDWPW4SxARewnHZfHOJhr|Qqi1*ar_UoKS;C6X*R)V(Hv%_WIc2>I0bDB+jmu`SwnI-;fh1Epfx9J*iS6%V^%2KB5UA6fJ$qup%Aiy{mYmj#~gnnnuKGrZ=6Lt(UwDVAKpopX$p7 zs51iuNKm$}epcDe56{*NRt0`rw3zL5(tUltSyr2x(qElx!(1V|N08WXMB2wL=sF9% z8@Yq<;QysV?mUiH8LOWAQS-z{KpTKzGpi7cqH_j#Q9piIt$9!aRB&q_2Lg6bdNq=x zRmJKCvDfomN70Ef?MW|5H$j?7UA%Grms)l{3|(y`qz9>7r}i-#?CH>XzWeZ>Sd;$X4EG0sXbn?KG~0h_91zSw1CA#v=x|P>@2uWue&nYE<8jn1Y*kv0NCcKMq1W!9|lXGDIXragJ% ze7UyY7koi_BwBw-dg)IWZ6_!36=Z^@3W~nErOhjH90y`uZnhMwKA{D^J=4s=6F!dA z5j<6$!!)bxj%>x{Zw>jWwmU9>ik!`W;`-_puY*B`RTZoc=`Sav>A4->gil!7bKd7tHnpo-`NI&(9oW{NsVcbU^j5KjZDy>D>MIJ%svMz$~qh>~ju(pZ3!dB@O zS&zn9avtD})mW3UqLJ~5Ba*-T&EunJW(jw*PtAB)wPG{T2foRo-1ZH4m&`Hu9G58F ziD{E{6yn|I4>}4#U)No!cN$1!HHID zrr>4NpS^LvhDdBXoZsOsk-;sanmVwjIw$Tg@M=z*5Qe&cltCsiO*CeFJ#dv8SE)sdk(wM)q5ne9$&?2;`QNJa@YtrJ;=sruc11^y1jJ4&C>WH_uk<&b|A9So)U?m#2oDZuH-E8rFb_ z^>QmQ;!hT(OIcps8JQ&X81SqeyFMay;fW|>*R=Bd#~CVRB#-7)txGQ%q~jL5?r?g8 zpR)J-0rcCoZanT0Rcnz%J-|FBP`IF%L3;Z+=(j_Th>7S7x{9qSJVF9GV1dc|4NToS*22W=`i*pB$^1it~ zRgfM;?>h*T^;b@xG${GzsAM@`&?t*G34z&O`?hGWgPKgafLVE6Qt95T5@Q$Q60>ZB zEps6wT=OlgtAqVni_+cB>Nm%nuCVRA`}w=B7k{9t%01sH#pQr5cF&bd4M1463VLR0 zT9I;a)jmzSakqLO%Z-|^hK_8J|KTASlaUgMpFZa+@i2rQL>LS_8wi7tR$Kj8sn)PV z(#S5R9fx?mjaGi4&rMpP=hfJNWdRJ+6+$%28O3TNk4MrMM9Qmx6g z926SD#P!|e9Vk=itpZcmxCMH!B`}G4&w0_ z{Q%ay*R8J6|Ky_%te2~{fQx`-o(}i3Gp-P1#n69A7EdO2_jeU^O+Agi|6?`F_ah}w zNfW*+Jf3901Jszvhcw-wmzF}%g*-vjL9o$semBW@P@0H)(F#jQa}XEGxRTk1uDDyO zcS(}jY*tu(O0cnN9r53t?f;^sw>CC5n=>M2re;C!b_wwz1r)UVChamn+3}QU{ZpV` zR%^s11P?PGalZDA(K$lj=oMxuJHrH=V8xSTiJck;9#$b%9x!Wc(~fXlyNPzets*J+ z_%gqxyx$Gzr<4a1HTAaxqNA|g_Se~7X>eJ?CKk?@CINkFn=;u)U2)C6v1T|nq|OY9 zK36oMxYm7JK&gfEjZ~DI0O3UvxU#jm#HgT2X3uo~rUab1$5AKi0`tdkxJe8d||xA9qr>^1OVX>`2^?#eTFU%PBCuCk37_B=HwB8NL;%q<|ii zg%Kz0&H4H#+TnL?9(1PHu0D0#-+7z;L7-nG=q9?`-+5s%`p)*dtvb-5qdhehfjy}z~qIp10LP(5{0Le#5X+Ee##wHm@dUwjf z9F3~dG)$zPs5@+6ZpqG1V_Ye09abxDV=#-Djd(u0DU?Owkc4O$S)c_~?8V?3mb14} zkJNF_n*ais!|PJ6HFn2d#A6A?bUe(p6(^6Xk=dTbutuVc5BkoL8@LT_LVj@Wh6dHv zZ6})fYfF+PW~`!a$p?pty=_}W58!C~*LvcEYX=ls!y!D+R`^|Ec3`;_`qEyXGVR2x zs=VFdaUZ6MQuh|Z*88a5VN&1H1%RnP0p`_6!t9qM*X+m&_B{fi3fDlVhh!_WA5F^z zRLxfoj(|zn`DhO(3d!!JF+{J}RG>VNHQFuncUxprhgGF@<(&FnsP~v5R;eqy$TrBZ z7XdTG-}_)G+05+^Gs-KNk7!6Pqjiz2pTZ-vfYx932aKM;D(ul$l{>1;-CV?_U=-X{ z>IcfoQ$sjPgq+{CK5yR&E73S5uM|q4M#Jr}ZPVAiEgW^dLDhN4vHOQN2_OCl<6fO9 zr0Bby4DVwI3%=hYO#h=jcfDXgzm?zMn|`TCkO!~-eHw~qc})R83xA}`jktT%%h*Vt z9N*V~kzs=vO2)`8tzPl;NRLiTtvL-*}MW#O?R%-ic8dLVVTCp{V+{>t%T3r~z~0#w;)SAwE~50O<&=tj^F zukufZm%|b7!_p5Pw99CGEY-o*$^%E*Ai0$1`RZ0DZ>{wyH|)P*OPh0R-SIlr-Y)n1 zQol+UV5>*>T&OH~C8m3Rk}vW`R;dsx3S4h=ebGhoXq~HpkNd;Sg=tCSh~csSUG(hD zwoXHTsn6Q(DK3xkw2UM*;8|d(P}7NkLUwS-K?z+^KXR+RCQuLMElX@4qEqe+lqU7} zDGRIMo7=^PcK;Llnso8Z;Z+MOndd#fqT_SBQAoj#OCkHAU19@<=P_Zm{lMMwlmQ>z z7uon#5QBX=;J8ni;rm~^5WPNxn?TKa<>=hfMg~HA;W%ABH3XeB9J-=5k7qV~nQ_rR z_)HROYZ^iiDR+y4;cY4q8-$FqS${^hm%<+*smkY+q?q9thJ5==1krlr56BwVf5`)b zZQtZyH;hQE)bV#;_i-13Bz97Fun`y4Ar`A&pW~BYhd?-ehxK>OKp2T?3x32iAnu1i z#N(JKQ2xBoaB0PKiq4`=f4v}qPANT!V3KfNft~0?$IW>x;5BQ$evYO@Pu2 zmS&(d<7Sbj^-zj#Onp z$hkraM2xUc5BtD-cBGUd{GG^4{9v%L+0RA||Fi-q%=%S~j1DkD*aIUZzt}@Zd6;k1 zbwbM>Cep4i(5v6YvQGaY$h~Q1d?dG-uET6yfxNOIvl?@ab+v@E>WS;x?!%z4ahb7e zEQ|I4yNn667kfUfU3ma)B}b6{d^za$&0y8t;dR<*Y0JWGWbVy}o3QTVIid!-yt){- zd%6_hWLvydEDnBNyl2XfN^HfE^X*caSi;BUqR!W)C(zMujB~zuAh&A8o^&k zv`jmQ6HO~Angz=ILISXUiv&?Fi`sJ_^ifltnN!W^p25TBL+A>QeJ|!2T*w(!pU%Ck zF(VS{x>k<$YLu8H5*^kuB+|y9+qXD9fVJlOc1AE6ZSj=XtY-o!6HpFj9m{j7ug6WG zt@UfR59J@_mRLe{4~zEwO?h~WXb8HyT?F>4qn4usyB7qD&GU6I9?PVz+atc9S`hr#P^Wze{^9a6=GFg; z(EhXOJ|ux+WrC#?e9B$t%{z?BH1ZMz(K_hZj;xh`7Il#6l+3mmbHmn{*Jkci#2a8o z{L{HbXz#9*d+wyTvDlUKM?3DnT>z!CP9q8fY2NmOcm)RMFDsF>9*&K%K%i{oZPDAx zDIU)2FguFg7mp#cEhmgMA;eg}?H2d7?+>XPeDAMH=P`8uJZ&54aF91XaT0!(7vmGm zs7d`$X59F^`WeS7zyhtG;1li5$rCrjLF)Btj9_KS?KUmfEr z$%0qm(Sq5`g8#*B0mS7SbB!?yWI}fy=Y&`O=TBuaa<3auXG+yigkU&KuS`Ak!h+dITCmhM@DORQvhV9mmm?tYPji zDr1k7fD_~*+n?BdREzJs(VnKiA#aA)sp|C~#|}BU&rj^*I*xwRs1-_Z&y1Y|{|&Wcuwdusqr2zr z)bO0>l`Yj{qJhbfpBx=(LPq1>3=-dPEUdVk2Oa!(;JCuwuwkak^&=i0OmOWI{}VjI zpR2HW9pCeY0kOl6ZKZpt3Tw(LrZ}znLY+A+Wz0a`VI||@P6T{PXwfklXXdAuz%eDdsa>&c*|iCo zDD}ma?k?o&bE44Gwx9)G%g|BQLqltXPX?t*B_;Tkqo8?qg&Xwaw=qr@1hF(}n1Cn>F3e6ISq~JU_e2IK; z+@%+wu&Kf`U@y1I{qsw|@5TMN7=tvG?w@h$pN2LIovz_b7qefH4V++SJ`cRQJ;6|< zGob+{pa28AyCCocQPGuKY|l6h6{zis{cw}OC(&{!jN}F|)H=;G5@1?HrjOPxe$70L zV(Kz~M2V5^Eb(Yez?Nb$q(|-EPLpvv45|2m>Ofk+6TazEbIW@?6>l?|CQ^=#aAA^(J zW`6k}{fiW({dYh{5TymKqWk|Z4S;N#`TU=Wr!`Q3f@#XESp^tx8A|GmO3E5^qFIR- ztOuE1tv{=#h5nMyX%4duKcS^)dIo`?PTx`l4_|IEyPhhdS_g_@SB_JiA}LVd1GPc> z4568F6h9p?-Wo<#{|yA^i@Q1y6KY~{XKdzLVe?I1|g6w z2XPX%d@7CRIah_hImPay;fI+dlp`ijpcf=b{q7+T%+z(* z#ad0r&@hZqERlXpn+Ls7p)J9O7vXcgj0juu2X-P|vvIjcfmGD2rNk_~$%a777E@`{ z1dQ6xW6%MLevp^hy*3qZXEVxF^AY#N_sQXjeD4KN#HN81Fw-R+FUK8m8Y;mPL-FQ> zy%UBh;|vMUp#-py-+2L~?rX8WWkubZR;g2%&)uR}@(|_H>8&;m?l}3`uBq6>6U%N-_z4Q;&s%p)JZQtL&VN_ykgmz1TW%jH= zq+$GnY6ohCaJ+Ez9=Z2FbdG_J%Sn9q_6cHID)mrr(8;xZf0xR~tP&1QuKFVmEuDMe zEubM_RM>M|A!3H+%hzpREVkLY>$^Gd)LlYpZA$u`i-2o8j!X7LlKy$>MlH(XotwaC z&$L&R-tPQ+tGMY>`>sBs{By!r)w5oNm85jzDAq6xnaXmVh|r?1Rc0G8-BTB|daQoz z=V~=^LlKGif2oOkMg^*pf}fl)OWhHc(+K0* zwJyqy39%hLdzTDf>G!(=YuJ&2I0fpk@>x~tcju?WlY`(g1y4Fr;Huzf-XHIF^77`e z6fWs|j9=anu3q*_EXVuVbcP&cf*YP?K6@?;$Z=Zz?`9=M)RVUCantd_(K^oXR%;eNuN#n>Ch)jH!L3lw-edc|e? z^rq8UYqjO*z1ZSKs6JWU#Fx?Ym1@6D8L{DuKNW=CKm~x6X$ge;wA*JUsS?YyI2KmK@Wh zs+vryEsLZ)ecd5;0lO2#Q0OVn|lHTF?hH&AVcoB@*zo^Aim;ykF8vj*#`)iVxO}WV*jynLgU5Jq+;;%^2 zxp!?Ht~vO;B7;MauYSYSRx9=f_c?hMe@PvMaJQPNAubSIReIG4B5$!`2dZYlk{w+5 z3l=;hwC3KEd46AN{)Kx(vS_Pnt|a`tzyHU1|3W}XENmBZr*cf2p;hW$5ulwlj*X&8 z7rSRN52Y=r*IK?NX;B)G(MT_eYk=Lees#swQed}Xo|N(_k8IduqWR=5Q8=vF$PjzR z8+Nb@drQJbCh3M%MZ)%sHSoz2L^$y!_HXDj-|xAsPpdDf@3K5ZynRJ{MXQ&pN?ie% zvU^#)heV}+Ll$_k@hGNY_*BL3I7M3is!vs@`1yiDbeYh{zi}!d*vQ7}4&$3@I$!Ly z(Oigz65Wavw{UKVX}g7~#j^+eSEQE{^MvD-sXolyfJIC4ZQ@6=KlRmgG;tDEVqw6+ zo8u7~DD<&Z?|!XaG=WMs3KnH&tuNZZ0sQ}C>o0@iTC^=t7(#*sm*CJ?a0~9PA-FpP z4>ayhumpF4y99TF6Wrb1-M!Jb*yrwZzwf^HtBa-zs*th9oO4JdN!xF2>;4->_l&bH z^UYvlo+i@599f4D>i~{(d|6v+IYmmQUoFqTNLY62W#hcQLwdD|cZvQ;3NG}bAF#M+ z)UesgYpD?y&4-a!$WZ7~+1j97@Eit>a1h8DtrF}rZ}aN;cum1Jc>W@>pXhlHk2)yC zLju7r`vpxaA7i=UTQ?kcu8)`$JdlpY?}7HM?O@73Iy!o7-Ks=I${UA^2Uas^VP@Q^ z*-73jm(-~Xg;LycJbUWec!hpilL`BWpT!)Z`yF3SZG#F3ExTY34U%r$d%ZX<* z`YjQt-yM-bV?hh#DktuW+EQ&m>nC0wNP^&t|IK&q4T1H)TRs_lM5{*vl6vHlONqT} zAwOkk&0JK^RHS2dqr>n>yc9i>(L;VRe)8?X=9>*WQo}H;SH*<_yDT^Sjv97-hR|%u zlganK=Ux$E!^-gHQ1^o#rI{t|k2f>P{|o!9w2ufR*GhIGuuU<3OAIZ7c zKcua%Ms-Qm{2_(S-y6g4x8v2amhs{P6a0}HtzMJeJ%VlOc0vLHHhUdipibWhV|QQlmN?Yn}6DyxUl>D%rV|r-a*< zQKIMNQ$~n@oz0sjh2C}+chHxn(d&%?*B=HprWT@OrpC`n>e_7pm26B+C_VX{(!5W3 z8#n4b1L3^)`CRn)3DQP!x@f?#5P0UeJn|~(syo66MVKD0J3|#|jX@r(W~rZFuxh{a z(w~Urk5DcY75r1%8$Ez-wDY`Hy%nk-l_qhkpYkiS&7Cmu@Bx~uX@qz5ICkH+Zhz4l z(_vXC3;z(`e;Y;v^5CDH()?)eGMHr6=r8!V{?)eXkn!5GKaN;IxUf`+;{*d)61>dg zA@ynWE<-jQHOr`=l#!Kq%I5Q-82f?mz&J^>hnSPFHg__^(hXG~6m0=@bkFT0yrp^v z(2LX(>o|5_LT*SDa3eQSOlPQ@vU&8r+&>KSdb4m9WXxEzj}1hmYHRak0_tf2(k}Fl zDQrpIQJizUt~3ZU?Zh+-2Nzfxhr@hQbu*?$fVnkr0Z|TT)7twy zbUC5E)yW1!pd_55dEP_6d91Fr{70#4cu!O_XJDQE07t?+Wd+`i{y86M#2;PZ?U4fT zz2K%rUJJRnqVA+x1X}u~bZ}xd@mA=G_Ef@)_$Q&i?6fX@ujmWXoTlISc1))a!KHtw3TZU<%Am^q^DxZWwA}VwdnVIJGG#hjE6fuxEG2T! zBG@yYip(du-3_}UGO?pA5FR;s<6|8;M#`bvzWUau&os-sl(B5=d!BBS^YjVk!AAAf z`9DK%V4^dv?p-TtJTJ&)?76yCrw(1RP;P3Sl)@qCC~rI*JhwJ8{#VbWeib+|NVI>Q zd9~)cyd0SU$&&G#f>~&prK#BGnK0-M;1z6Ej^xH`HBPsAlXsN6j0JRI4(rfhVBvSP z=I?PKG)hW{4je3ueczZCF0XgO!wqMNAN}Ugm+%BG1l{*PCphtdla8a5AJeOFCEDf# z9!9#6Z{~I{D5C@Q_jZWuJ&Sm^l~$_*kAv&T!l%yx3{(PPZ~-(#zY#yj!Y{kRA%O2T+LpSu;u^u%47>9 z_iOB@U-8NrknrO01b%TJ7{II=}kSSqyb%og;s&d)90=dX`jnwx3oWK>eN(@|wrEY}VQ9cYi1gIo2>hYbTY@KPo zR6)A~n^f)B)8Z1n3iI3Y>#2NysA>n{ACpwEl--^48kLxRtB~x_JEd@=NAtWsq^E?z zEQoTcQ@Gom*TqDS8SlRDo=%y}lNKWita#bfZiRJUkM8&b#{%2qJAlu5phJ7VgD}Mk z+7a`81MT`)KY}*TM-8@z3hpv(|5{A_?d1E;c(*qC5T&1R_kf)t8T&GRna@GM)-SDo zDRN9SA-U<{9Z*ZRZUr;7BF%GlJF;+q-!XaZt-R4DV`z?a+Lk2gd>9iE#LfUy_$AX@ z5PU)jrh%Oq$Gzaj+6QmFBs>aVB1A3kyng4`K^DGW@)tgpnGlPZXSOCyxPv6$v&dyl z;ma38ff{Sw%{!FRYg6J}ZXlLP=}b)()_n${7Uz?jte#KHqR0b2qy6Iw;c=&52M&Vy zi>dK6E&5xF{0r``Nbq2$!;A@zhD>HDVwigw7EW} z`TAft($Fi?g|Mn&trmfj)9B~?s|?g^jc1QG_4l$agC?pQ+V5o&iv^^m^z{?-R`Zy> zlJi8j9yDt*#kGugCv!y=RL6jSSCB)7{@(bK{@?k{$8Cg3(o6cXNjU&9NzLzlzhk&yx03e_&~y+0z*Of?^godjgj0G zdqxOTb$c1Rwp+NC$Myb+I9?Hs>@@!z-F^a{Qz^rL0`)W3L<8M+7mB&I4u;!`JuD#! zOQZMGMj!H0RwX*E3$hUY9DS|;RqhdBmHl>WE!EHexs6cbHWF^sA+Ioe*_}Yp*WQyB z;cU0M3VXl3Af{xhR^L<$kPSnwhWp0FSp$V!aLrM z`Y;A*XL`f_Nxgq=4&xY1&bOybIze|XFD}nWs*1!8Jv2$`PbegVH(k?m|Cuc=WoC-OC~%^rmI9?M$%v z4s?GaNkM+U`YFdg&WVU(G~@m1#pNBm-Fxa`;-mADx7@=+L%#Y4aKLa{Tz+gIbGBVr z^0CT}JH5x_8FphQvBe6VO{?KlPyG3OD|#dpgUz+mwMWQ%@{z&!s>QF$d(z?Faboi& z=JsCa^18fff6>OJdQ}dJs1`0~SGK&``7y}G&ySX4ldTYmUaGFx8_z(%NaATJL8Vf= z&9$gY)U_m4#gNqy|BR^aG%{)#gN#{`$OtMPp?L6^Af~Mx0#9_$TJJwYm@b&(a}>S2 zAQFDI#VDC&#vbl444wzVg@I|7uhT&ipsPH0F5dj9zR%TEdVx?^&(XX0XyqTOq+=** zuw|=i0fh+ypmXy;7hO4#VRDK zCd}H%hfS0T_QOzt+>B{V`iCAb1ndZ;126>h^|(|2YZ=xflMrO)oCT?o=e<%RD~o?4}@%h(gKEM1hl= zq->hyKQgqYng&S{D4gvyrs!YO)^7tiSfLi0)hT}DXzn71Lo2#)&mKSpcvVhbp$us6 zi6&j=#)toQ;mZMKNa9bmrw|qTc9>%_pFkacJ&Qx{hX9%|5aQr)&izy5nBARt=%={< z(SE5mY1p`vXR2~FO6`2%5__n1g?;Qj;mnv-g@M_W9(|XFr&Cyk;nD`~@JeMk=(A>P z#v=_dn?CAC`*1=Y6x(hX?q9esR!Ias%n6=m)n@sK1)xs9T-Z>(e7Uf^I|HvN%)^fJ zf{wUp>AoM>bEgMA=(gR*Hyr@_c_&oQ%%&4)R9kf-pXNoNFvikX4H4#kENvB$q-lC! z3}ckVcj2Vbh~K!{NXWyHqQ!

  • mEK$I|kQ?*6%B6bdnP_Gv*Bt>Slooz426xBS(?bG->lydA%GnBXiB3g!Le(ECL zq^#sSd?b>TaKCe$5PcYt(p-3X^Ki1y%7{|oU9a=$H{)qnrPr&hoiDP<-gV2*i%n!~ z!O!Wtp{|VknTU+GMu(8QeLlt%WLFrczr)li^_V1DXa>FyUNt=!rRC7sD(`Z3VFaC{ zdw9XLzQAm{M5(6g=Fjm{J?f4_191hgc`V|sqwX&y=%VE_?vG)+{Jw~{LUUh<+bHNz z5|=AnjoY$o^IveKX$`#Yh1$XzY6^qh!Fm|{1g7sf?WHj3H%C$i;&O@mP3eL8^ANuG zw?S+5d!}u+8LBwE{S$uLS%B${IpTnl8HYY*aG?^{MKaLfd^*vje(Cw))iCg3}ASTGFxc6bQ`_(=hkEODkkOODLi zp)Mi(XDL`EK%T;Zs8wm8@!y>e5hGU#zoQvKkwIw!fawL?zXkIk(WWl~O*k$h`xWOf z((`6fvolOMgXo~tlU-)}TLj=N5}s3*IGG(D+9-7>3UTpPdtEw`x7Fr9!LWFnh25L# zA=kl`;25DS+D>*n`t^R=W_teucnGpczW`YQnG-RIp01{0h(zu<8 za7-NVmRkb(HXIeVU~T^Je=|bCoi+&{{bp6o!L{+#_W=Ju5}%$Yv`WO4?-`7hf=ayw zvoL2-3`hD!FIaQ!ruToPC-^47wU*<32h0EU7c>q)xmOn2*)wwXN|{}uvxQ_3Pyqte z5}p3?7?a?MzRJsBGQI{X14Vv=aP<5~D-9G?JRqb}Hv+h(78H3Wl;QmoAC)4kDu`ow zip?d)A@L+oAZCG`8&obKlu_b58QgA)C6&kw^YjvOfbYJ()V5{aEoKsNSY^j%CNnzD z-d>}Qur3gOrc5Us2FJh=EwDpe=iV+M%z=1qSsRWZ@Z-7;5bRFv)R;hLHL3;dfC2y2 zrxOxgrfyWNF7;r4%_M^y=tZkoYz_ZPa)*;h*NGYI4~N6@!s{At7yWCA?tjg#RJv4# z41uP%jpLQW{^H#6&Xo)gM&N)^M->JGoLfbZBwbA>jk(|z-;Ud_MF43lvbrw1s3E05 zE}OiDI*dK|O&b;{t5UJ>h~Xn=?Cj1(m1IX`4Lb%@sY+GjqTy{8aU#JLq#2t(3@lQPNpTG^Q5ZVqGBagx6vu zao#j6g8SsSW$HR811bp4+kJ@#-|!6t2^KLl!i^2LS*;x1FufHBDAO0w({3B(Ia6N3 z=@U$Q?CihnQ38>ajgws%(iKasj$92YE1&fA3a-fNxqZ}h9+B1aZfH7}mcOW9enQ!;4?$D`SNkqU4JAskr+&tZRL>k49g8`Uo`~8_)7H6f@jc8!5X~Hzo zj?*o2#~H0a_AA}ahlMS&=^a{ti)~XEAg?tQ_MN+qV>^fA{d$v9emFyF(y z{UBZhXHHJ}vShGX?s?PqQL~D5H*n_8qS_9y!6E0ZuP)fWdYcLmbfZzEb}5-_qa_tr z3Eana6@1ysf3U{nxq+R@BD~l&T?^C=xOKXve3#mhPi6) zzZL`LQJ>0Z#%@q=yrUj%+7q!UlpPN;3Lz-Z#Uz<<)5hpA@Lm>}LBT->1QQpFR# z1^(TKRhI&cgNJCr*vFI|_Ayg`H;o&ncBrC!fq%@cSwY#5SThhFTNF^;j0w!)^ z#BbHmw32BjoF6ZEv9a-Q?LnbOGoRRztwmrJk}BMG0w~n6YJu3n?zmos!XANjlz>D3 zEg!jOvn_r36+xFV6KtFV2Zi~OH^%S*WCD5DSzq8Hbrq89_P+A%Szep;5+$Olt)10d zROr!zepMJ>JrfW2T2_Cu3vtmjkKZW)#veN=$3WSy?27?eO1N)#$xu^W zB04;3@>_XTX9a!($d6eSx0KYYS42Nbee*6x5KLg5msxLz_n#JokZk(_NOof^fwK06 zVut_y6Bc2XC`S$P7`B#cv`5Z2mD!-uR5GsyMgmEP(%{`aB1i3KitA!5=X0GJaUaDe zeXQSq33~h!;-o^~wu~T)Sn<;VE=v7kMEAG!SL^SRkI?cP^pDFWVDFNA%s}m)UW6P3 z)vdPc1edS}VxYL79H<6!MX<)4!4p2d4FvTaU^FEoUSWHAjFDCYR;t#^${AYkWgSBA z-%g!&!Yq`v^1L5=6~mqPq6+E&fW*a;oPzaE6@KY{9kq!rlDkK@&b_wjWJ-BCy{~hR z)|iGeX}(evY1N_o!&g@vnDJ_rlkj!m=YkS?tqfD}4@CWWl`Fq;E5#lMECgY%>kHo1 zhR@hv@vlnyqX&P9LBnA6-9N`%xlu#fK8~jFo?SC>T4lDiFuL%rq!>Mr0*}7~t%pJ! zwDz0Eoh%|mZpXuK?t|Fyu2xU(ciLqj1@8CZ1@?w`jtzy3z{D3^*%Hbx=J^HK=j~;x z3(l(2?=G!o*(?4-%iS1FG_1o<uUfg-`BsUc#<@5PMxapqhzLDYq1Idn2>-V(*l3aLHe3(Qh!vC0{7|KLWxkOT8yVt;$y2^3{nYY!X0!Ccbe?swo ztfJAe>y4cGA}}T6fr4(fYjUQ~$z)Cv=Hsz9Nzmj!vw-Sf10I zI|v4p%R9Of=FK_%fdh&&PEiK* z<+Wv%!(&|R(>fKvGO&%ZR{x*s8K^2v^_5GRFF^yXJp#dtye~*!VpINs)7PvjC_JsCG0EL+uKcSg}c9+-^a=y_2nuCW`c(8Wj{@$gd`1ekIn=Ach z#})P5R`7z)R7dWasZa0@PB60#qGO)EbXyCt2Ql^ocZJKEp$HBF=Ja}uvi+F-R)@2{ z){x5H1d++*p!C6lIHIwjnQZAp-z3uTQ%AIX6$f5hem+_jm6OJeKc{U!+P$M5=eJy9 z&4LeJmdb};O!)SitgoJg-p85qGu#dW9TjYC*xzE6&H{R(@CT!FnEzOp!s12d9!UQ7%Qf$ z7+M$Z&*j5kf08~Hp3o;SbGa1iWqW6^mk*Bb(L$7iftuX3E?Df@%+4Jpn(qU&fP>1j zwqN)^oBUeOmPg8Efj~Z+w`W8bpoUJOB|M`G7Mw1+kCxq^h}~-BLWAFmEu0~u#liuR z+x$|{P+nX|;6UcQzD46)GvDLx(Z|G?y}Ss$3yzy(8Fx0Gk1UdUE(nHY-8Kr=!Pvf^ zJ@eoZf%-HMQpMyer9x)%_2z(>Mem~C-9WaemPySrjx&d8%w*_nOq;Eb2%rhN-}huB2jojZup#*zbI>h#P^S*c@C zQhM4mx`OuG%ICCk9uMvQD2(4BL=e=2^P432b?u)-+s0!r;*=G!lsDm=l#>%}e@@hT z^QfTSIxG2%oRMujev#nY*W9H1I`a3as3|9Gpo#??YFwKdn1W%kT6 z<3$Y>A8Nmpf6|~Yv6;mSjn>KpCy1)UT&0O~Zz4b#z&V3K#LpBtCcLTZe7NW4+1s}P z?sSe8$smh7nlbUJHIQ^ba_V*OGtF6xTD3C?cK%i=Peix^Yb5lerq)*;CWehT?4Ftp z)|xPL14*$hhzl$ezxEg8H{5^UJ!sMh)Wd25zIB9)c_s_&@?hwibrc&%7MK~Wv9&x& zxfE!n2V#-FQE{bY4yTIdPNa&Ku?257o2=oU!pN%HC&NX~YdU-%3p&XLh|s-#TiYAH z!`zOzLD;i-)aG8tc^Ik7u`# z;2gF~90$?v196~k;NHLJU689Y)n3>O_)Yi}j*j1~go1t3Ax$i>YFclz4Wykg`EBe| z7`h_|nC*;D1jinK$9=f1T(~DhFq`c_D_~_CNm!EF>5R_f6E)N+Gs%s6Pbh#NGka=d zPCu%_Hp4!HIK#=jI?sfY>_OcbiC(X^9gw0#WICwpF3#^CiiquzY;(wqR=6SQUtkW>8BkK2Ku#P&A64(a$;NQs{yn z*LIoL94aKxdn%`>DQ?0GO8%P=cK;;fyU>!vA28=~+w{55%{v(yLBJkoHe`C@h4`L6Y; zk{#IoUpD1u`1LjLiqbNr41lh)m+D(a+k_9(4X$YpjUMhRok*SS?MR)#Bi@P9 z937;1?q{DzRDt7D&Y!o#2y{N z6spLfQ-UBxNH0iIH!(*?pr1ZJmH4BsZX$C%e*$~wQ`WC>lw};qjzZc7p&BIun{Udh zP|QU)i9#E!U!q%=u(NL9^L`U>s%4nBG>5-g6@L!jYV$h#Yf9)jhAV5d9S6xc&wR&S z7TNGH++#(M%UT4dcZ4tyS^J@5>!ZzwjWlTOQ#X$*mlz+7zQ5kT%qQU$EuBIKYZz7; zyT>-%)t4v?rK~635tCJ+$2zTDbvJqyf-7Ern|#GIn~!))gb zL#NJDO(a{9G{yz4;0FR)mF1FQ7E-_Sy|ZrGc)y_N(pxZ{g zT%^LK)fIs)_MlYudGxXHNyJH$n`K8zsADZ!(w+ZN3AmRN_^Lq+rh>P)T~yd7&?J>S zuwo)AT1MPD=cBlOeD#B?K?hB&|5d~j@X5J>kLS&ByG@~=YR!S6Ny_l97M(CtxiZw$ zGzkS;?ecCItU-VrmW|v|a+E~w2l^!j5K-`~%{JD4=)$Bm@nG}HP15MEadyj8w4Ijw z>3q#|m<3o=!KG%KeWlJ*=DMghf4;U}S6@$ar*M+#UZ-Jyjqa)hDwWwW);8iX)1=$m z)di?)u&Qglsgs6SO>@d)LBIYNZz)$n`TA*em_4^idweA64(PVM7AcIQm{zyjC$|MC zdr>1{r)NANxYk+4HT^+?iG1oQAbo9rp|7j=xDDM8o?ApNCHU&mlf&^w2jSIkcNnUvt$Nkl zv@FQ81%l(>_oHAW6ovWG=_>H7={gIIrew9Mi(VEmJ_O7WH`uB%45>y?I%e()Ct3mv zvZ;8<80Oa~R1q(23*>xM zNI* z%E)l|^O!D^K0Cqpu_I=ITO=qrqYt(XdULXgW^~NT*Kw?Y2K60P8=yS#4YRYQdaqh-1S7CRMIsL=n2mI;VPGF ztmfIKGct?iqa-cz#Tm~!HX70{ngbpKx5_D2-#CR^yV)tNF0HgI$`H2(WS-vl1hl}@ zm$xz-N0S>0gT*Hv(Ilnh;%CH20709>+{hA6W2(xk&N*hFU)^`J;@ql4YE=2?j}gwR zZUvgMtCJ;Ya^8-4Y_)Q>ym9>9FKtbw4asDQ{eBf-<*O;k)3)y0_Vtu`A&YNkRY@meWER@B^LP~!}k~?_| zWY={}^zBmNX>>LFgb){1D{Axp7s38ff?lM;D>w<%NPup&fKD)h3pKoFydN_U9#W6o zTTC$XSHa%&1S{-bMzla!=*L2RZ#EJRRkg4<0$L>4xYU{Y5+&`jSh+sBwmunp$Fnu z7<3q;=sPN{r)XZPf?&I zL8fMDDN3M7v1OPU>G~rN1b8?IKwP5ruUl{NTy0sF)q{7PMXD=n`U4}mMYK_MOvp}K zLH)OeFg2QiT?L^q$f3PZ3$|uoJ&{*5+1~JG4y&FVUHXN-TcKZ!@A;tK*4S%e< zFz=P8!LEp+pi+W0JZjOP$vP5){uE_pT6LXEtCI=e15B1t3a3BsG<*Ue%5;vZP%%N$ z3CAGKah!)H(j-3cV4t#SWSiYJ!J(V)RA{ppdw5&1!^|7wh=JSoFbqd@1LG)_IC-U_ z)P}&F#+Y3}7YaAcnH!4gTd40)t1ob&j0XV(`$1NzAdJYEzdPfpqCdP)X18G#+K_h! zz)x`f?@>+pZn&$c9R3pX+rc0T9h?8yre;3)9^k*w^pUOX4xjWH34$5}{3C;6&2Y4I zQ^Zu4+xlPyNeCXpPzFMI1n#zEwnVF;JCygmSwDM<1JAYOX@v8nhHSRk_jNPdw!<{X z#lA;s$sBC3&JE5vhWJ~TjzG)HanksFfQ-iYe1f2%CW@Q9i6>R+ zj09?c7uP&o1@Q`^-W*)~Qiwz^r?DSR73ccueBiMqkG+B%Xl#anSOxvNKn_m`(>TfPZzNIfm0T@2#%hPjkjRDe072( zMcVo1Z?uY>;?vdt!KrB&6_>{xcwnKydT@*9q!t0h#B4S>2MT}231LAs7d81XUyA4s zW{hqfinM35FjZkn0!^8c>kZO!0XzFXJHTUx|4im8%98^WipJJN{n{q~A8^{MQ%|YV zK(4ZI>QH=m6Qe5di(3SYPDkZMA1m%gq0YG&$H`ogLhP7$OkZ~vtyaY8RyQBUZ_y_B zC#L?yg=>M$!*LkDH++SJW~DvkLkj}++j*pQm-o`RTsLz@`7IW)tkVDm;_19ZvSQTx zLWMW4#K&JS-bc2$?^5LOk-WcTbe8C4GR1G#nDGedygbotEq;$U{V(K@(-^_jW_vM;sOoL=3)-bNccDec z#Y}$^EkwxPj}0Px(6u%{-FLUH{WE5e_`WlBIIr`yX!oadQT~0%0y_U~rlLDs^p>(i z4=6btm>-)jCYS5`UVI-T`+fZQKny?c?Hg5Pf~|rUCKaUS?$0AT&1M2|bd6zAdFinV zlWa53(W|uchuF^F!pJbTOgWB@(z`w^4xdeCr2ix}uEyTuR9uWl5X+#d&Cl#}7D!J4EP+Ig|{nWU0PzDofMQF_F(YE)pv^!--7@00Ca)cRD z=l0>U`Z{RvV17$qE`2qz?K=vLpx?9Pa?1V&#RZ4#Rr<5sDAY!^v{4f(k^K)h(9azw z7ZBNt$CeAggZ9*DP{J#ntc$Q5js~&#Hf`UQUG`Z&yxlAJejPEj5@DzjG6gRAFsrmJ z3N4V1>^FxW>nSBiE&tkWkrj#yE^ zoWbwkr%}J#auILZ+3YsC5i$0moCP(UyvJy9!@?Kx{pNVjnVfUUz%yP|B_h{5mp~qU z)<`9hsUXZ$oWp*Trc2y zkD=_m$txRWl3p$HZ2wLf;PwVjmQ2Py(NGuQ=eL2C%hsZb@R(mRIX|0aN;7Hp0%w@a zn-Ls?->KiYFS_kN*;9B(J2S%S*6IB5CZXC7p9sKSS*h#{TXMuqGaYBH9_1NivoJ)q zms;iHE?R&FOK9lC-Uh)ImSvrzR5?s{MgvdF8JxV|JUH-zMT1MH@^`@;d{>t0Xy?EEQtwi=F3~ItM z+74FBHc@8gO&wTHW%!-vkm5PYH3A?TiL4Od&b~=Q%@r#G{~->*heEz(A%k8W?++twMHf+N zZUW?N1gyl`&yA3g%c!K2=8Hf7%fwQHpF$G)o!8h&^~rpukdhqIgpW6oq#c?VrcKoI zOZ-)CbS&7cOkrJHhpxTozR7EM+zzwu?g+vr9+Hw^??Yu%y_Yb8D6AS*+wbw$MK&0S z5g*LfFp~FlAWL$Ha-gWkdi4Q&G@}46*qxgb)_~(L7cza?x08^|;bntmo%s7_zD27+ zRqL0(w;xDv5E|?h$p0>_oM-jENUcOvLYi8a2{^cF`2s~mi!J3cKfI3vLc&`*pJDeq z1P%a%nt*QUj5x?{yEK9dU+3;6L3J94d>>C33q4h*;)X=1e7%;Fbx*kQh$Cyt4Y8{K zv)w-n$eiwfdyY@3H*Ux77NpAqa^9xCRWagH$ug`UEn{X%xxFB5Y&{v3EE_=KPgq6k#_Ja>N(R zVK5R(?pQc~*Y^X{Xxn$9XxNf7T#6@){* zCeBBYSvcji$hqCw=+19(bC7q=DOSG?Xm|tA4;VPm7c&V5nF;^j&=3D+0UYWaqxXv3 zd3>Zm#LEdRj`Uea91$B94;^avgX1~fCh8kStL4v><=W#pja(|e8j4&9OrV;o1oUQMjX8Aeb;bDGyFyWqC%Z9hC7Ly& zwOX_za`q0A?1r4jPnd^VcO<-OH>jxXE{LZ7qt_MFlQPPMv0E4C7Yf2!K}g7f3HA0^ z-qg+@BkSgsVcsA^;}!-h6pElrH!438|6wKvm!ntF_i}LuQ0U4zO1Z6~t|(j$0E4a{ zb0`dFV4G-OfCOjK3qF1K1&e~D;I>Pm`{_fGBFnHqrwWH=esjb*ihQck_wO>j87K(k z#}H7~a-hjldF8&s0`sT^nq=7@i;gJIo-SvhTchp374dOXq|GgYnjPshLD;b%-kdA% z90U|@gkxV0^<|$p>V*p1Ebh!hl6!qcE}O6GpEn~_7Lg70K~g%a1qa&z#vh zVeN~A3=}O3U|o~zetq90<~p{dZrJ{26n>t&OBUSlB<{p8Dry?SOnTtJbz1TiA^-T# zvd!y4F@e4gW|W`!0}RxtTA1QvR4OpL+S+>rEV!zBBBvxl0|gmoH9-}4bI{-t7_IUk z$6rZFNk$~|H(rB77$KpChsepNl15Mn6b>)& z-#j*L0iJ(62ydUD>ztt}nkTBkp`bWo$0uU8h|y|>0d2S6@^dmg#l_e#z?v(PM6!2| z@8uKRT#T6quql}S+Kf{=wVT!^*V1kR!{olpg zKbL^1UqF`RN7xj^b$0Q35`PA8I!;4UIKtfQ?QoORCJ$zr_9;BF z02aup#O)N%pbse`{PzkVEr4;ju)=>|q%KP|(lZne*vN_bUp%X0P^=xejm_%p^a$D8 zMKbIb2-(GEp+F%KoOt%+@{g04Xg{jsp{|e9FL(#yMfUQvO&juxezdeF{TN8q{E300wMA?mz zxB5I|ET@Xig;jZJtHu2jG=7sDVXjf2<8!3Im4f<@2m7n_;51HHwG!JP2a+nh4%Bzu ze?Hvh;xI?Z*Kp_uPNrx%Lsaf~?tfv%J zI=I|k=*Qw8xOv`0%RF_hjZ5Y<{qB(5pI_Pb4t^88taE;d7IZL7xK6&IXxpwzbZ5Ko z%SD^b*mOpLtbLV!`-`d-B3u1%vse%_;<0=T0pt*2G z9Rkma5lD<49y~ISfnmHSM_Or(a2^N)>+8 zJ#tbPH9Kj0`Iel3IjH2A$kU9CX6cuvbRuRN89 z66`B1Jl+t18$QltGodl>)t7;9SMLXz`aQ0`SaOWJSAR^@H5I+`@dK(elSu>SZgHf- zP2b@Pff2uj`r;yw_(NBy1!QER?JqY|G9pO=D&+Pp%J7^mn3ep;*I~s%-5Y?oS*w^!N`%Z27gVeCgT4Ymq&+^ zUvOVf5SV^5?=q|ss1V~xB?7FD5KC3$KI*hysB=H#Mjd^ep9g*S{Fv29vY*D3eKg^+ z>Xi7C5x)ZC_VFUo)M``0gZo%;A`zU5BZ*v*1ulfEVgEa5^?b(U&P9O9d(%g5dYP9f zx5+j5uE^57Ird|D)Fhc#5@_3e!XD_U+jaRk7xP)ww2SP}xYmKNn?WoGjtAZ(u;n6&T)jR4$QL z8-wL{w)7ucAXC>hk{!ESFnOpk1OV}GJYCxSvF`BV>G08OjDZ9`*A^N;>&YU}ys4>s z!WlphxO}PMv8Y6|Pi`VeS?SqGzAy`S4cQ0QI(Jgg(-s*#Irl*W>7&iDeIm!|#7HrE>Y5yF#UNa0%qWonEC2fji?K|x4ipgPIj~A%M_NIsv#$iV&Tte~)6DCZlnBWJw z%BYBPE4NKL_f~@TP*rgmi%j9F&hyi03QXJMb{DgYQX@OS5T8Bvm8x&tMp>JDu8X@^ zM5LI50Mk`PCYEcVywG+TRup?bR_$r^Ut!h~=eBw1>qsSZ_}}ul%l#RAA03SRhK22u z97ahDS|$j;1m^|%z)j0cr6-h*=c9T@!TplM4HZ0to=g>7bt^CO>q99-E-OlZ@+FwA z#dhV`bmNQ52i*;bN_G{nIV1co(|p`p&aAOhCH7R4r4+OwmdS*i#U)17r}Sv+iH--O zny0+vbIJyb2J10E&Vm7NUp^($ z85Th9y+wNXhLAJ?T&mc9#O)UX`66^V(!MOwg1dT8VO%jR5nE_@;h>8bu#v~ zCAP8}B)OFfh^~kV4;4)ipi+s{{QASlV4oXq9y(8YRI4sdCqhA6W!sLgV`Q6}jlj6t zz$bUdKG`^|)cHoAF;}4AGf=F0`LI=hVs@xdyG;)I)a3JYI1>L%7vooS{UmOm zquq^?aV$4N2YHLk*)(w$`!Kw+sfqG?bN}l``Vsyj1WN|!OKC~7WSiKR_YkI-w&=&n zzW+D=oi$-O{X+@=d)ELyii!c~A+U@2`uX}R;12%(8<%@2M~%c@2j)o4PHDZa;?ZW+ z1VP}5ysaBS0V%Y4nAG_QF&3&yG&ak?y@&%soe$*GT|N6&UMomHU6C~j6EHLpRYl0(={ps5ENR(9xGhqC=F-ge@0xzAFvd?n9vNn zj(tysfuCaE!f|dt7wKTNSntO5`n9N4m5ZtM;?p;xq}w+zu$0(3W%blW<=*w^LG{+~ zKspmzn0c+});tRX2IahSwE5>2i|itU2o-~`|Ie>WLiSz^MWN87d(@uor+7dI$>Pz~a$$<9IOf0kUI2*!Rw^S5nxOjwIsxA0*^w_0-{Vq_;8A ziLms8S&#sE)es61JH#(AP3^c13DoauX5eaKe`C5JT-78}@DlxH)tLPvc}@t;0^)gg zwWad1HJYI}`|54_^Q*l?ktPWcp8=pm6&nz8SN;Ex^_EdlHf*~vh!WCBcL_>&!_X-W zf}}J^cXtX5-Q6H6-5pB9(9O`@-LNn2=e^(O+xz>=EEbCyIIiiiJ|`?!J**~<3y z`6k|M)4ue}vlF=?;8P#|E%bV5o*lwLf84f!!l0d`WkZ0}TW{-X8iB^*DtUz;Hh@W} z=swhRES}pWfP$805O6nwmdTe@VE2hQ9JlXbsl+afWn6}ECXd5C1TgkHmV8*gD%0Kh z%-F5mDt>5@N_}!)K;k4^I(S-r*Y$i0(ygZ>3CVI+BGR!iu|y-CsuejQZ4mJ~*mP+y z4Mj_`Fg1p=IKWcTt=1fsQj!=VOSEYJFgF9}UbJ?q;-+H4tQ+|S^Y{r0%9O%zEMj!4$M6WL{kx#r}>Q(`s z*81>01@32#7M|Kp5#=&SkUd0EKHA}8y3_xHA2#@>7%&+AjP6M%KF`1Hgf(E|cRC7D z6rAF)AZ=W7RvMHiSt%j=CdhNBIT|+;eV9h-FOB`v=&GR{=2YS^Fs{N*w$->Y%=t ziN8ULa}^*!5&c1wc^!s#k1}Vut;^xUliB~moA4ol9SGPDtksqNE13L46`la;dmZt2 zSw0S^is2lo;X+@Lz*Gfr3k`J4R556X?U1+v=T6%6&HlK(_f?h5H7dLkCJP!+x?X0u zxK?WZdx_X=4mG@jyLd7}P@I&{1yym*7usyAw3Tkp|E$5MutZGUq&qaZ@>&;Jf54zq z@wgpBotN4@WzAsQ|C#G5N)8Qul+wX&j6X|QNTKGPn|fmYS9V;U+4Ziz4`ffBdRKpa zFIedS|A}M_sRWF8;^6_X+fj_ZgXz&4ytRVs%Qv%sllqslXb=1)8$*o- zJ{WGk=T+u<{~+DVq<7Y(T(oGN{m7sZWq~oD&Ye!8e>+f9L8sHeMQyq`{03RpKe|Wi z5%5Ec2(2Y>*GQoiik$u=g&vh|+h5G>^H}jKGzqK~@RkhC^xCvttO_Am(*&R4A6EMi zybN@d&~U`%KV+{0R3174~EaQzh|&Jw)M4)x}B~sLNhbE)QIsVb04G+4PGPU=&jnx zJmwe#*g(gXDGg76>AZ{|aDsY+_9pN(O!-IoeReKdF$-u8!K@wE3UtluD|&ENc8R!R z(@r{gSqh>)(og&wzM?=I8v>={Pbc1CnM*S*@m9nD*nT}A3IVLWDkat%|JT~fqd2N? zvZnxLBHPX2$quI$zQZ*zNE}`UF7#e)ta=N*fI!yUsT2ib5|>n5zn{<}02`O_dc|n-Sk5VPFPbyT z!FbU2@$C5wyzTpE+_MM0aFVKpjit3`2hZ0Fd2gfFmA|~LRQv$ln;K-@#n(POSnl~} z%WZ{|w0OvmpdE{9U*zW;b>LvRCR1$cksbCMoiW!{82tpRy<~! z1C9qxu}EWlDW`AZ)pzT`te)r))WbA{KEH&18qPD!?v=3DSxroE@?6`bnW)4kuJV3Of{%o zdxl{Kuu)dvDQKyM;8Oq?cA>(XXm>lHzjl;c$OHn7f|5Z7FH!k)_7}L95V-T zk7Cb&i*2)7Z{Z0*sCiSPY?)9&qnUyWe)pJZ8b4!xw_^)fO5JGF>J-}l$C+ZGmfT26 z6{VC`HGsBW8S3Y3l(I}R+6eVNZe3A$3}_^MA$9K!4f;KyMp^Piqpd-&vt*xaDFc)r zBgMzEVOdw`#DFoa`=~b>)bPocdg-~}p#DACZ5Y+0B8HY5PnI3=8iKr=%IM7)lhj5P ziHhe&spSwIq5i!w@g)-C#OpFKbZ1d~-Rh95?_{pf2zaK3ldhf3KgZK&o2EyDdeiT0 z;4nyomW$-heRpP!{u&3qCmjtzi~iXSYaQryl5He*vNc{<)R+Py8Jx)lc2~VN^TqK& z;$_ynQafzx&`|^l)KwT5?tAvoI^xY_)zeCAip2a>&VEw3UH;c-I~&CJ0AK$Hv(#+v zH&#~ARgt^`a1`OjpmP?`bam5^o1>##CC^F2dZbDg-4RBmUX8K8@sJvsyhtY@eNA?H zgpGDcjA{-A4U3qx3AvoBCpHY_vSJPda-$y5hQ9dd2G>lt!Q5$?%a3L3_qp#BSkCpI zJ+@m*5h(e|o7Huf`PQYl;?W$?ekhg%TF0P<>`>Xfd`876LJU5W<(o~F)&y@oha6|D z>r+FFpjX%f-Di8O%{dY>cX=KwaumC%`dv7*z85256MLTdJSa2~)wc!l8Av_@5Z?@5cQXR3}aI zIo^yx<57V`&NU(;E6mU(iyCP$MW&eAUj&+eIXU~w;Ptm!MEPZDD0cTz)0$|9?Y^9e zdpTK-(_gj{5lGGTWUMvHHbaM_s5%Nm zqN-Zm;HZ3;N+PQrLCft({NW08Z6@;_nK`LS`tZ%Mi&${(zu)=n#HgM*-@f`R-vau}Y4YMN1VltgEblaMy&ngRv)R4at?#j66X?)^dNVd^4@ z#sORu8Ibt$t)|eKCP(lr7{{JEQBdEV7B~(Twuzl^NPX*|Q&ZGPw;ATjF2bKit{3mH zLG`pVl6kUZA%`w^EmAq~zhbNkYYsw9VV?ZE|1xebF@z6r@hEC0uefYIW#WJA1j|du zCl8@_sTM{z#F7A~v%fGYk%nnxyZ>bHefeYJx{BquRcY7{r>8+(v z>~U4!C>f0o6Ilkp)VZ~A&=!&IXLSMG`~^9{+2nX^&4?OETL#F3)+66{jeaEy=i zIHhokMp?7%z~p#BWht!DZcY7V|aBF$Q=i5IT{O$(8`uNev(E^l}jg3-kE`&uLO*9e-DP!i1GXZ|#e6q|B*pR+>t$zawS6r;0@VC0N&83>jJl??3@w2MnNzn$e%2ijT$x2- z0Gq+cmN^AxrPO@QYoC8C*IWuk{a(nL`MY(=uvzszWOEP(NWg>7CDx^yCMAP-jt%!6 z>aWB&d|f+RuhYK75cZNq^Two=Kl!2Zp+eM&Wn-i*KWQPAN6Z9`HK77 z;z_{5w*A}2)ZTN%1FP!^Lt~Q)0gPh@MbtXLgROsUjZoAZJ5Q@~75b8O09v_!`*nNN ziX7bjP&Gw=c&Dn!i7pvXKXH5=w|!|q!E4N3Llah+&tb9?Y&E2*q#cV*o7SAXr60GY z{Q03&AoQS!Gu~QQg?-zWNtyTU4^kG+5+;sd&eOK(W3}LX_~GS79c?#PytvF;Kdue< zpks3-LR5`^9lr^%Ae5X&2hg~B`2U+taD*>qYfPypDEKaY(IqJx&#jm26%=Y6sVAB9 z-8%V!vg>7ARQ#lc=T-G3i{Zy7Dx!Pg<|nW%2Y=h0`gvqjMfDD-J*BPAk#TiTe9RY? z)o-3ML{r=|fg1;K#(HdnKSn&nrr+LRgyw^fLxGFF_C{=oyMtUWR+5eoNZ@cz*GHS0 zo`QT!k2kVpE0;YErd2D|AJL-6fN{k zD?&%RRMa`3x$;JgjBV&S%Cd}gab(!Hn`mh9e1!frZK-X^0nV#Z`gcgkUCZw&C*BW; zbtQi6#{S-1nPaq+j*M3n!$d1W^!aK%^r%EX9?0Dt=9_Uc$-FF-8{-zYsK z!t3bxY`W^w3HSjApA~WUa&fBWRx)^%L6Ab$%}Pej=tj<(eCkHX4`hJjf zmKi)+aV_pbXUjTQZhL+~aP!>C7c)O7dtOyqZ3b?cakklqIp7G5G_b!;Tso7dJhIJ zM}Cd>c<7L%kIkB8fxyO%&MGIFtNqC7g}^*$dGeSja27k)fBZI~)l|bTyJrbBN_NB? z_}!WG+}3E@u-a~rk@Oy}7H%9~MRfaN8`RH^n?R!rYWv$chVPxcI#Rw3fS_ zp~vdido|uJgpw8Mx1w2}%@e7Lt+9~T7Ju+IN2sX{cKzLG^MrgijEjk!SGJLp2Wy92Om^5Kx0H0F#9q{UP6iZspr|yv*ZJFa!>7N|p?j#;yy_|GhVsAB zj;8%7{VI*Uk~mDY;IHljK`dlC!McNzpNTYu!!BlqP> z5<$<6r)I2dD0HJiS}7ig<>$eOw{~x%iX}E zifP{>XdAAWX}YceoNYh|-6U3tF38E>AA zw6J3pIMdW@AT!k#cmZH!-3R&F)Q+9lHXWcb`~&L9`(^&fP`phYX&>I(+l^z|=ToDo zRK((JL8eKUe1wuvn0~#!9@45zEdR3@JNft1WSuN4RWo|9B)&Jd;@Q?0J8NMdMV*Cn zn8z5Mgr_??TMFZvN`ayTfO*%yuVJD4iTG}NX$@}H&AxbjeRnL>TrSRR)@b&TLIRNG z7~;F*?G`t9jIWQ&#l59X9txc4B>Pg z$4gDp1piUyP9npAEq0C&PB(b6M#&e)^zSm1%}85+-&xlE#g`7SBZ+@SV;J z&OT`GOR#$Is5U1s$*~S)|GDNXFtYvBPVpj*j@TYOzW?34Tsfcby5ZHneDL-15q=j? z*K->k4-;7BqInQ!X z)Ca(F1(-37rEqaGyhpCv?0W+@1*fsdVKC0_`Z70aPM=e zBdcGQvm4lO`f4OmKM4pSZ84ldH@uLo*IK`Go!8l%nPidG~0^$vyYVc0%UV{(TGWJyO3fo4b#~6V2^le|2?T zdJElk`5~wWQS#z(Ya+XEAk-z5qTou=Hc{#9&ZnXm zz&v?`ZK>I`dEbJ2Pf|fMYxd;cFA7n4y;RgpT~1R6}L8yZP-u1TZ_LZo;(Ju zCZ~oAR#~d{ypuK!Yu{3Q;oF+sDjNgP%=ZDnTUzy4YDXD@MF>ED&DFCfg`9hBDH7G* z;Su}Y*20lXPyKEes1_=ZteRBS&PlT`1W@p3$|B;TUk z)3>vSHvCiB`nRrFxcV+3`!w_qEw-q^JaKNd77-7S&5Or`)$KmekNDGXcfQ??+i9x& zGwzQ+-%9nFnrCBg0ga_W#*|S?_i;)s^*axOiG%XQqwvsPay?y#Gc=-iJ$rCn_q{Fl zKfM0MxSVAI^DBPX()NF0@P8uqF~LAbCI0vy9o)yYqQnb{h!2Gv_UD%b=<_M7N`2`Zu&k|kKI==NU%9%cu9WLrDGR}`N z!DDdn3ZZ5gaO0bK5rqL8>)BdKt?~C^pZVvl4Bm+{u|{FwA*U>ugV(m6B{U??CA^-g z#B&aY+ocB|D-kZwipFxQ>O!Oas-pQ4Xb6k@mJg*{zeo{0hY0v$2!4vdaEnbDyqtiw zR!QAzim9Kt`d}Lp5qn&EtpkbK??rD^(j9EhVr;oFG-~1kSu*O|Y_+D<<0*%7vEqj- z1zO>KSD97R&|M!=*4}%EKada0wX)H23tcHySHQR6dUk!g_m&}!>Z!r*i7nGxy0sGR} z)~{NO!Ws;=-AJ(8V;Gf(r-~M!?@=$93~-G8S#qgh(Y_!K=!rA(_QF7cc+Amqs6iM) zo$sR2n2EI8+Z?`)+_x_8cMgS?qGeAziW@7o2)t6_4Ijzhh_wX`1RZ8pjA6*CYCOfX z1$Mz&Q(PjAhbr}%0tB4cS$s%+CxTfVu^^9g9LZ#s?Bb@q?R|2a@fCD1Tk5)rm)R@# zMU1(ImM9}uiM4|(Ese)X)e8bMr0x%Rb3=(CuqFCi9-o7oOi#~yIGKuxLK-zwQN$Kc zs!KL;ipyq5WR_cIICDLEP~;lcG+~D2qA2@Rd@?-F;M1~U~#Y52f+>Z z>a62bJ7N;;gZw5$qI`jZ>xX-cHT^j7Sdo-w8WhRj-+ZCZ_Ui%RASD#2`v8qomAYc` z6bRrCs$k?+0!J9~JBMJt#Wz6u=b^WzDTe^p=Acu~-(-_HchT!BAfL%)z0-QydYp-y zv}vfrbMv95i7Od<6D;{0^FZJCjOi|9{Bv{GDP^V)GJPv$*jaA*_8fyO|Ca)#FWW78 zY`x{9mBl>$L zo>$rbiJi4ck_2&FZ#oc%JGuO2rbJn_cTtAM~uSz+E-6S(1Je>2Q%o_{x7FP9*1p$fH7CdIwV}>zdP_hFUS2OEfjdX2S?Lvx}Q07S>qo#P{`}C3B7E+ z2ay*pyXmg@Mg(xkA4jPh6h8rXF8|jMLP2CUOu~4!iGj_|KS^R-}DDv#*6szWKqke~Ta`=9F)^ zN`n!Dxjhaj^Z+)@Yl@RxsN}1$5&ZZ0eU7&^u16dT?PoaEm*Mn7N6klRSh>#D;o-8+ zc2k~e?V4jgz)9ID&g{z}^{@~{p*iA*<7XSwY{w`9o;5`FwvqM<~o}7>UP~QluZTBoEKx_C!AnKItkeUv^#-CTum$U*#StgF-aP3gJ ztsOO$4fwPAxM#()=XilqGpC;}Wwn#7I3}L^pW~gxzE|~sIU9>kYh?E2J$}s#YqC<& zJtP8D`df^{o|*8dMY)l_0sk22_!25mc!f5XyKLphLzo1pzR{v$_CRI^ldSfXqLIV; z8(piI3Lgco8uPga^b;b)>T$AoQ6^2k+goXgqzdkv{@#d``sKPu$Ta8hE6YL686QuD zzJ)dwPYnPZ_XTaSSDhYMoA_wq%trzUs$2NpO|vMKBDj}Zib4=SIezBnz+-kP`;q${ zeuD32g;1+XCo!gVw&%bVqh+waJ>?<2Z}}4`@c&ZCGqd{ooiL&=&Rvj-^iRzq70+VJ z`v}$|4tUASU--sz4kG()U+CJ=MNF8r%T|jdU&K<5x)mXW)$nd(qY+|Pj=$F2WTTH$XcjUF-!VX90 zt?;OH5Pa;88bi3TYHi*d3)8#tnTbAJkhw!stxxI;U?->BMms)|U33M@iT@ zL<34pBn0BOErRG;Cq%5}RNc(Bn-$DzgC~zSc%vZUo<$dko}e|ig1Vx7b|&nups&j=9R)HRs*~SaB-F#0 zHMT*_Z^ugd>3?Y+Gq{Ew|FOq~yKPBrPDT}re$+;U#MVQd8 zWUvB`y%%&4R-5w1bKo)9g@h9FC>)btEjmUMEL9l1rCdWyjQA1s<~eNbmU-ab=FIvG zpxfL&psQVZZDoq%c6B?$3KkdMN)81h*<;tH)9a2Ip!PP~^>hG7>_{-mC5*NoB&9}~ zg4FN;&TpWj)Sd2MFYx0Zzp{`cTFakh|Jp_VBUL~NO1l1JxE7MU8Z)LlSH+2zEho`|4daeY_dq`@lP zwncb3Mrf~z6!?cR`3N>FyH#IIBZwty+%f-Y0??Us$-HkpPF?4+&7yP3KCh<#bA@0#E0!_LW!Fr@F^%mRS$R)tm3KQ^KOp zVyqC@$B7DASE}%A=^DoT!?+dQQUt!~6U%(V+*k|45p(V+9v84#7n}b)*4d1p=bT6& z(Be7w3$j@H-m-H6%H=E^BGCr+(XnOT0afWz%Nl@`TR-Lz=Qc8b)>||0-%L)o2<4wJ z+%(a@izCo!WW$4EB(?C&M`v*o>LULRh46F_xj&sdjxm29odz;x`O!jmCG0wI>#TkQ zSf$yg1$W@TmwtG6I^s_Y8|75b3TIxzbX|(|Exf=yP27mRC2t15v+@@xWiMOs=>jT@ zHUX!zs~QdP+)k&gh2x9tHIZ62h>%`}#E#;Sbha#gv|X&d*-Gc9;3)Gs5ve}3ed-Z- z3T95Jly9lkXw}*GgIjz%Mqy|aEx6T&XKo(P1u>?>ybB_`F_)ou?2;j!?abBFLNsi8j-9r97^JA#Hd4$0=-wjHm*Wea{Sixu( z@w*=U#t6ps9@Waaht!^^W1e3(B>53m^6m+< zB4LdY%FNVDEM0V9xlxEj7jkIVe?(Ohz2cVPzWB)pQW5Oa=2nC7=xeu47_NY`t~{Ub zJ>!_Yw)y2PSG%(8Q2Tm-krD(*#rYg_F$IcCuOW=tplSk<5q$2|WKYI4FOKh7^Br2akPWlJt0WpPE+Al7p)%WbT={*XR*QPz@+ zQ1dsJrGckJ09#ohf*fww|1Or2#d*7v%dTCfs8D`|#IZioruWR{sg| zUXY0zVp%^z@ho@t0d+|wIRJ>44Z|K6!1d|;G_%3s5>bSuaKvJocotQ;8SSIze|mNI z9AOk)*nZm-n;*qyTfmSd|0KV0ObKBs@aS=~YQ5#Z^Cm-(wt7-eclRQz#8RBn!N#bGs`jH=ekWYCW zBd>W8C>g?@IqbIDR^E&lJL+Fhnz@DfQ-L0G-A_ivhQ)fxY)K2e-%L*kSuU27XJM3^ z?1t>20?`FC^*dtYYNROP<&0}s>pmNy!+3FALO3E1ZnZZ6&o`X!!;1tW)cCJaZu}oH zcU@lf{}prJp{N-cmLaOjylYWkMU+Q8r{+u>lJdaobay4i!Ueo1l1clev%C>ek1?O= zr22ZkxLb^F9rn#x$5ZQk_#Kkm3sa?mX|0T@mEYud!SqUXUOYPH4oxhgbr6BYN}6+$ zro2O$(9orq9{w~3+}9BW3OmzQi$FJU>~{;xM{<%xyS8`TAP&zT#JRvkcK>>}TnS&; z`H(%TQoxtGHB&(LUARJOPT#?z&gisYqvd{|Yn`-jZ%L3X| z9M?qy+_w8AQz*T@Mbap2!C-kaQ`m4Ndz(GCszHx#OZ~A7sga##+y*D3Yzv*B9FxATcN#8DLhvSz16EFS=S0yHL7e#zC0F=bL9} z$j)GtTH?!pD&p5mvgD}mVZ$t=zCDyWHb=h+^#!^vczaPm+q|aF^kpnBeq0*NTtKYz zJNUbaI7d+UJ+<)KTwdwA_XnfIB}IWyCqD%zcGv*%LOmT<9p4GUs4b16tBPLhUnQ+5eO3KU?^L?)`2l)29I1l}omPwLN zlx0p69h={s9f!@2v?%FlhRY6a1S_};201wC<+&+YJ(Xd_zx zNWSs53Ky@GclEX_PRTOT0Q^NpV7pQTc3vNZ<5-EZ|I!Q(PKUJMGis~8n74ZJjVP0H z>ef?;U^{Qi_XpGs>xH0kC2Z%9_+N#_tV{#Z4!zL(y#y$em*49U0{%UULhf*;df;2&Ljwe6u%dSH>0|^&==8 zov_xE_)}jFWp}1ixFO%;(Y2t#2V0+FeaZQ9P{9=5lvR&g{?r_If{Fhq(Oxd%z{xNO zI^bfCW)$-wNN4&@_sB+Qn|D0tMh5z`?*t#-D5~i-?*_Hhx5RDR*R5%2INwloe2@cL z&e-oSCL)yCjF^nH>r$lj?o`<8Tn-DU=X=fg57R4Cp_!CfN{xMp@0}ft&D0<&W*gFB zJfbPYly8wl-S9yN>F-9~kxRsoV9-$09-qJqLYfvEPCk*wq3tw_PXlHFCU4G9|AprV zww!7UpIbGH|9gD(?h#g}`FhTUoR(nbQ`&`@_GuHz7`R2kZ`-DO_MQQ;T~1-1B>`cL z(^@EvNgrSyfeBV(GN)TLraI!G;0!$e&CMN!+;dYsq_rG1s*B?w3K8xvzY&ABWLqeX3c< zXEX%qn^DwwpKtMWzGmfsrO{s@U{Eb2=Cc3ey3q4aYU_Jf1 zMIVEmmIK}P|1xYTCKEW~DgNYSm(E@Ve+AqW0wz(xS8oyVml`K@R{FNpvAJN-(Cx)T zMGj*z&u1m@QV%usEKnF{vz_;TY<2uB=L*aeQ=#j`)U+Cw-G*{c+`mp<|NDg^kqS?8 z*+5#KNzC&$AD2#liKzz6-E?}}mW3`^jJ8x3I`wnQNo4Qo-4F|M5bYLAdT5zNQ7giyxiQ3Y082Df!0R8QyI7j)&4bdj_; zlL#-&N3A!!~w2dZzVu%Rl^h&HQZ0mF;ylc^CA)>c`WlmtTQ}=It++41(b{n2l0kSm*TKD@p zUQ17@yq3l)uFrvxhr>F8;%gjK#ZcVbCfqnweB@Hg(NS+M=2z|~PDV6D2XAhJrcIl_ zI*~C-vC)QBITl#c4- zHpJRVdZCMizo>uPJyJ00xVT7y?j~=@!D(aq_ZIxP>l^nh`hs#i+ALQkeYyInLg3~R zI)?`I^&wCn3#93?Qv+yHUTmkn(DvSG`F+j`w< zdJKo2lDi(w4L6unh^a1{7E5Fc&{4n*Rx~Yc&Kp|aaS{Q0@IWxT1FSz{v?4zhrxIz& z0R^vnD3w4Y+3@jA8aq&x-n=>dCJw!q>4KDa*UwGDeS<)V?!Z1|{y^Zg#Ol9fX^OF5 zcI^KrL;N3_U_e+FXqeFx0ir2IfC=_g!$29k3nD=EAY|=#Ks-)e ztLVK?Cs#G&rhqI|5M6sn3HsqRGli=@jXd`=tgZwRx;G95k8OSP^m(+THK{ft9;KOU zWFqG6m%%XSB`oByq}(lVfy@9?J3OxHYlYvwl-`)2Pw;?#k*DJgO8&ATdXFeoHN#?! z(oJU0ml+yJpWy(9lE4G7idgFNx)H(o7RsbpW#%4_B=7(`A<#zTy~RHK-k^g4=M4}y z^H=G-GT4eEi8&m|>7Qa|M?-37&KN(}k|w5L!1B>24yG|%U4WX_>DmXcc64Z;o4dD)w58|U>=5^W^aJ^Eoz(URSP{ol!wG234}|kcSPnr~4stZO z@teM=9}SADY*U9fvg31-@e+S@_{4W!jD+TwR05V5B+d0JFqsYFBu)?7Kx z>wkSajs5%qvl^CY_Vp@oj)S1g<^_I=z?PCL4h z&{;Tx0R_Zyc{05E+TotViySg^>~{|!gsyly^Y8ZR&E{+?WfR@l)&@l}BRpWICtWGR z`2`1A<|Qr#MU@a@!vLTAA~e?_d_18LZ6+P=wR1;?oMQ=@6AZEp0+Jl&V^kn1 zv&eXZp`jS_svsB-#ve(TY#hYJHWx5CuXE~7Y^WgMse(5543=A9;=1Igm=i9wEP>$& za{cfD9AX4-S_ONjK=D7@e(C7;*L}5Zv;~-}s7cz2_1`15Yh)w{+q_mm0nhntU;qyR z@-L|03@T&XxJO7fx#iHdtgDf$=y;lA(2COH)*S-;UiOiA@$Ktv_a0OlAB^obWzT2v z+RdK=&=mE>8-QquFak&`P&LV;#Cr7-Y$qt|=P2bX|2j)VsENybBa(e6quNbEl~dQT z{V}8C)=t%!+w2u8N}zwxWr)-#Me0xQLh@GggfBySanuQXS({r(5t>FciJ&t?jld1b zSj)u>dJG&=0l1x8jqTJyZW2hho|Mr|b$MgYY`*VJe^9%!*XOT~4sgF5Ap4T{ zr{#|$M=24#47N0oQ04r#x_9c&?~MUuoIK8lt9_|k4cv0ZQlXYCmu-*a$wN;C68#zU zpVmLc+LCw0g@WD7g1x6Q_U*W)4IaMJ(wzgkXmv@}Z4D-!T*<-h5dU|EpwQF&>y8Js z*-$YTVVCn*GdhA5?<1bpW}?^kg24I2&1_a>PjruakpPkOOjbGhvj4Wnu;QEdgyk#$ z{|dHXfWCyU?o`1mH2D5#LIGpq)QWACQ9b5e?9@>hxYMxmlvJ(Wc4=xML2p(S(?YSy z=Du{j@1OA8WC5P(S77a^tFkXfq~61pLwoB{?E88ThSmFKpm&KDqpKhL=7x(fnGzJb z2Tfq+=xqq-@E(U6X9`;voCg^%dNM(dJI>XLpjPe*dhsepU7kG^-qZJl+8o>>YzZ8Z~ zCk`mubC+(2+bTajH|v=CY*%Ill-!424P|r7m3+$Qi{U0s1ZBeZqxHE=yvKG7-XknE z6H=BTrg#x*=#dR#PkIw|Yu(^Ni4D+ocY}7;M_vbmlg?*?18r<#w?z6VwiYZC3KrgM=;dDj+(nG7kMt(mHwS!N4mUc>v( ze|s2Ne9d+$6~j)=yjE!c zrd>b&;_4MS3x)JDr4_@bzugYtu#=&3CMD$NdR0HVZX&EJ=wPD7z{1gv@z8r5qu|cB({1C2d?{^#78ZumNocLb@9@FfvWYT*wGjJ-K1*Hq#G8md-++h2 z4Q7h=N61s;)90CC?|!+Am3GD4+)E^d96}!8gk?}@(}`p3ygOlVDl;(Y_2QUYVaB0a zdG-JWO%Itkd;o&#R=r*f=RtS#0;iK-xX$|ldebuCit;5}XMV6b&S|X)sNj=z zIrb#UNPjG~%Bl*)G{Y1cTKK!HDjRK%)4 zchZ5M)i!}BgFRw|8pIGO3R7kPt`?T{k4JO>z(=+QB1*4}R`#aBmAB0kdt5JVa0J#;XekuI)3cW5b=|lY@SG!NR7gpm}}#@Z_-VgsApS9gd#D z+6gbD0ExYD;a_%>dX9$tMc){|yfOh>&%Zw21@EHda7Et}8zWc-?#Au82j3W2r;`1N zO(-)j!bc{vxzah?jsYb;c=i+V5xdg1B6uS|KA_!_#XYN@CC8y7fjUz=x>F7*9Es~f z9uA#47R`~Z;&*RgvL>dcay`BenCrSDsxxy$q5m!m9?<{L0C0?6>bvhmSJ{x!j6?1B zl69lDC0+^u|AL*c1U;Bscc*O)u()&Qcx@MQJe1VhqtAeHm`&?IzCNj|QkCqUQ1&Kb z+wuH$W<2&Ot*j$IRixv5`&l4|AGri%D`*kvwwAEp$6 zV%624TnqpF`<8zp7c(_KIfF!G`^(jiFPq)L2tJ$Zl^`Q;WnCa{NI%G=KIn=uWY5*v z`Bi-!JyCVKQ{8u99Mxd8u0+@W@`Lh2`u9JOxM!AI+c+|q(17vZvJG>^nWoeFPGghx ziC#|UwEfG%LZzhWWgCSt`kHBmmM>tJ-2thqkA3EKZ>FOvXd2RdO02M^B6EJIU4tgK3lMHRUf)EK zD5N6u9t&yLA5nK1kuMNj-v5W+L*HW;97ouW5-lD4#oQHWMLh2j#b$j_-Lml!SqS;F zxi=YIvB|WF+SOT2AvMveK@{P|Qrd+39(OEYam~_%ty_;zHAse@CecGB~^Pj^=Z4*n2JefCbi89fr(Du2 z`J%Ik^-{Yar>_s{TG*Ra#Q)Uuq0BXA?}=y~iOPl^1RpGbA5WQm#v*^~@-sHHI{%!z z+Er9Uj?j}P8lImKEAf%s+&Q)bK!+}mkuKnk8ky4L$5*Op0zlO#FgOV&@$`>4uYSPA z6s7*74DhYL4|n~zjNyW0l=8>x|3_xl(pdOQwn*L^QHD|eK|X605SQnNNJ;+>S#KRx zb=z%$f+!s-NT-N&cWqK>kY-DFOM^5>H_{!F3c{u~-OZ-EySqE?kN5lDbM8I&Ux#BT zvDY)_nsd#mJz|>rNJ*6C=CfMgMvd!~{c6EclC)2(Je{5lgFX@7sFY6dY-@o0nN~KO z_%nE)0V?p)sL1Og^}lRbqbn`^O(zlaE7H!wP#8%ZC4zc$|Hy}XOxE8&a<{6Yc|TC3`KLc?L;*l2U1y5kR`0&TC}IN*<*@Z zzcLLwcJ~uM64xqO0y!r3AHldSMtdR5k%=Lgwrsejfy+WYJi2&$W@2)X$_52XElMP)j60I z-A{spxs43Er)Qp$yqd%A=X$*3?|E*_22}oh+*%lC)*=6q(mdv z>@PFVQt+TM^#v%LUYfcrA+5rkAb{GaS_!}{o)OJD;_mqDDhL*T6=IBT%q_!{R>tL5 zT8hdr6&n?4Q&MS;O+9G2{<+mi)#%W!{A;A0VB1+(P06f zgqO}%_>x#0CSi=zhT>3oH}2`;>-;BD^B=$dj4mGJv0kbZf1P&+vN!c+*t$uCL`FAZ zwrgEDU`qZhu8*H^lVe$M%@2FxeMyHm4X8Ym#uv*}uKJ20UD1V4%a8e}p7L&c0kJWF z)D)d?D`!;C#vx`+33z*1e*ssJnZTLpMtR~nqjK|wxm3;mRt+OdH*F*KZ&ckvCU!6N z2OxQ69Ln-&2a&IYpR`EK_Up(q@?(SK_d0aLbWAd&P;^BfRZcT^kVTyL<|n-=*rFke z=W7Diiza})l^L(-?mN03F+sHibs1%^;Amcs`K|Buj~+W{^NsFrA9@xm3+HE5wIZ%* zV50q1m=+EPx-aC`$tb+(Z>C~xh&?$T21a)HH;H@GnNk2kMpxOQFGK+hI>Z%`Bf5O{ z=mXS=jbXsc_l9sP;L?-6P1ir7B0lrGqx z_4ECkp`lKp=rb9MF-N7?jy51g5DKZO2{bxot!S`Zb?^1gxTh%;axAi)T5OlNzT`V! ztmMuQ{NKzbVN&$0txbVbrR4wb{=ZN70ACmzQcRO_UQB}WqeS55>$}Kdf0jL;k3YrA z%K$cZ1;+iEP6ZGbb+Ffe(9EFImP^wZ35Ok{b?&U$$eyOo{+b3cUlAq7ioHp`ZyvkP zzN)yUivMXLh`ceZC5|KN5?di@!DW6$mg|mp$S%+Y={=y=IxQD!E^$oRTD78eUmLo0 z!^Z6Na~?k6KZhU1^pq%RU6ZAcrveJb+n`wa@#5mMIeJ!I*X5@Nkl&{rY2^>y z;fl$hoya94?xyRt1nD1k zCBZOCZh7JmN`LxDttGqHxG2pw=yauv=;3F?@i5-azgi=q$VBflh>*gSWnkr-9)M4_YptKi>%gbE(@zl zqOe({ArE8fl8MnlKTm8U_?aH%M9h8UvV|x5zI{z!Y|n#Ds3Kq6C}yo=#$`g%lA79! z&`o0eZbuMX*L6M-LQb^qONo`-kez7ha6demu#xRvEo-^OzSH$Y(G0;}X(GFCsWjJC z9c#a+&02HKY)D>ZcL67xYLlqk8HaB#k+9FC)u5(t4() z9FnyfY(NNI;4`R|`A2@EORR?1jcLZvO^+l}j4Rtl8@e z)kPnw*ZzHu@fkPRg+xXwQ20MEx$FTkD7~1JO8&EHow5%>|1{a4b{%H-MuJIJ%GmPWfWRywil6;lRV}Rz4OM)IH_a1J?yDHJhGz8yLx)%`)Ep$W zq`7)hY!r89F!^aJ>g(CWo%cfX6De4ilh6{E?e+?t6Y!H2&?;**Cak<@o*i< z%NfFo9m#viRxXv=F!i%NU~&scC&}PdktXrqebE-jmlKXM@*R7ElN9X`V>=yrwE~&Q zG`9(|Epp!w4{U@pMJO0zKU3ape z(35LaO}1gw0g4Q^&%O^!UT?cZiXsalm*e)f1V-ER}_kZ&>Inc9EG!ov;zBD8RP9;QQPx}>Ac|T;V|31DvnjixIMUS4RWbT^UPz8w^8T&59Hw4 zDAZa%v6pbmy2VzoC@XVbkS-l>4V-Fc#WIWC*FuUstpwCu!t);aTBP=6oaEon0-sjhDMP4a3_iq3JnJS;p7#!&86X5}9 zGIJQ8MM1&hvrAxDB%S&CMHHA-nEy-Wvi&_Vo|3j9Lxaek0*M|HrqDb(sYCc$BqHmx z(f268#I!4}Ka~-$10beF0+grhJ)IY zS_xW5J^G|02{!-&0B&j|<7L@cYC=uhx%WHiA4`YFAWdN?>(sq-?KAjiidY~sE+FaY zo}HUIJ_jgT{Ptl=oQ1GH>oLp1;vdVAn9y$pOCQryHrv1y>T=dKjNgrh3?7B|@79Oj zd-L50QQFuG6vuYvcqJdb5y5`)xEFCo9zZm^59Sdb`b^Ul2t&7ZjkfZ^1WG3doXvZk z;3oW{kLw}ojI#UO0T*@@Gw}Mf2KND{$S29o^;W;Qeyn8=1lJpP)P8p2%>^+=k`i{~ z1Bn&0R!!1r+4g;WSzP((53g~*g>|*M#eEjuQ+(~UsZ=gPpjFU|eSua!7j&T+SrqgOtVI%M*0?BO~alE3g z{*%l(Hy|U!M~jyyeQtOMuW>uPN3KPCum(a-Z<|&IQU{z~o+_Q<#Xhm!T*YDyWVRGm;WgfZpJLz>dJsioJX0`G*SPe$K(K z)r0Z5aV(Y0{-ncdp}ZW*|LSgfp+NSLbtvjX_P+$`zu4!4DV!Wr`3H`jWP!wE<*Z&6 zFFUPwKn+)T^DcAC8?Q_%v}dN|s{n$<1OQBlsFT-zEuzZ@?;H?fxCyJaxcLfuNOhkd zaB>VFV#}!X{p_#z$if%!6PfQ>s0IwDyzhh9rDD0(NPw8et;R!PfGa%%o7^ca(qRMK zO(YUoUM6D4xR(e(1E}WP8P(9R|AdOje4Z=e#c_P^x|l%Z`LYv@_)RGQ->CHo@)7jr zj4anZCb&Nc#o2bexq>ksmubG;U!$aCaf`Lea>5u@{!OIBE6Qc^D#uzj4@Vvz*Y3kf z?EBO{YLOs;N|*f}Rqk&`!)M6l+9f_d$A^3lh@ESn$`o`PXDb6Paf4-W!msryDD3=! zWexV5?4R-QEFg;?uj-6ne|(BB+-zXuF;PtAi*^gB;*B;T)DrhN`i@?>Z~d-H4}d#;1*cv2zR zIJY(VfBLI@7P`5t3jaupEGKkHj9elESEGxSw8(;@NE8>|^QP|szt`)!Nbt!(hhls; zZaB%x))hj`A=p(eg;ZbvUUPHEC3Nn^t)=NY;mcocA-i@MDkX8K&$qj((zT!$^fO_5 zgyrlO?YF^vrGG^EqcOCYDqZ}b0>^=-SD)Z)auL2_Htf=k!68+N|GGR-FlXn4R%}4Y zkVT(Ii|$;#TO6B;>2#ASgmq1#i1zz8)c-GMs_^M^AwXxvy`p(VHXL})gwX7tSLj8} z-AK>o)i3uHtOq^&?I_6o*2MepezJOhNb6B%Lz2$B)<-d0gx#kPVfV~WtqUG=$M@Vh zxJNL!EDL14x}bN6{3l&Rb(UXc^TR!efl~17&Z9c~99Yr}GyVnw)hNINCns55Ey$*- zZ8M0r2$Y0kYT3lurtc4J0PRa9uUL{N&s3zcmIa3PSI~YZxG$Ri+bjz$mP*suW;|6o zT9{+88NijmH3tQi2;))pY2#`&t3mi5CpP|6x@^yhi&;vF*_A+h3}UVH*8CsIc3#ic z_I1jrY)qSL%IT^PI&cfra4gSC(h^X1RZ+Z*zhDx z{wO0ve489Ua3;mr#UT}wuM*U~y24B%|Iu3yi7CY}qeKo$uY$*rZ#DXmo3LwN@!kpJ z1neL2ph4kHYbGDJgu!%u*9@e;i+o4ZCR5F}Nv>OVZx%O3i5EPAGe8vwOj1V(HLH>Q z{cTH6p8p)>JZ3&t@@~&sMwA|{7mY41h@Q1PO?FayOF;H6O1am}hTtH07KurKu}zzF z?1&U~MVDRi-}TFD1hWi`&MDh}N6tS%;LlYG!Oz+jFKWtp_=nN~g2(07RVW!@U+7 zm8B0_3w+}%ggar^`ss=(e8(%Jt&9@rk0t!T&Agty?jSO_Ft;>HK# z_PTWgm^6v|(QRLVm=BS+_oMLE7_%J&8-BJ_y_##d-Ee;_pT#Y9zPJ*e_0A3dK8`;K zofK#c)VIhT8@X}@S|?-vWsF+R_5pc6k0|9T2R+hWHjSg^0qHeq_MyD%-9lV{JHxCC zr=FC+5v!*u=BWC}Vg5ioUP^8zK^C-agHX?7KQz^4di!u;kl`9(53%J)7VZU-dWqvA zf#-I1i0$}Q&g~9Pf&;n5T$maCu)wxpu7f1;hSd)gZC$D4wDI^i-mfR_tG(Hmw3_DA zWP^kb$ZDUIZyO(z4Bt~+Joaw1F08(mI6rxr!f&9~CBpwjwdGLOV~MhRwC2~YA)1jP^uu1{IsNQt4`%q$ zB(?&F1nB;_a=`cKy-zc5;hn?4FGajmvQ^1)yE5cEer`)7(6?l6=c>6Xiuwy~%@qsg znK0|{Sy@``n%TfG^iz~goa==wj!oLH%3e9J=IQSZOM)YCIb-<30WIELpX7%|jg)TxvgBcW^)~6Zh0~2kiJz z3N^^-3g#E3tQW;^vdW2n7?KPN(9JFr>}Usq-s1Km8tciW0D$OxuI7iUA^#AMv@{1fbC9@vUht3f&N#TS;u+m-x8PYs^5iX`R#>^LT!OFZ;Lva7?M>{%kz+w_(MJ2WB$qJi4xLD-;;O*Ai;yyR! z(?(ah@-vAE_qXdl9MHBhv*fjx>ZNJw>eo zZh2M_#urcUUM^5(FBGXI7e(?9-Y?8nZQ|55``&Z+3W+>hQSz8xAz05&KOn?KPBDSI zn=?Jn1~&OkGtB++AFz56p^bZ9@?kF$Ht~L)oV$R~v%IKt?c<_0={QjUQ|uSP5&$@% z;AMAxS}Tp-J$*7fl3S*t@2fbe^E_g3F-anDjwZl`f1?;(NaICA_u|LukI)W5k>;8X z#?M04A6{0Q-P22jrxYss{p!%hdGk)O8WhNXA2zDquIfP8&*}3`Ig`J!SFqi8mYsoNFR>x638ylYCwWV1z%m75N zP@D7@eSXxtnI@q5MLFg`aIK+Zko10ZIU;4H>>7{Q12S{wd34q_awL|Iq)Rep^YtOH zLPLl8jmIa3)9b$m%x~`mXIuhE>_5Yz{ORW)M?_=$wzcK(7H`c#sZei*#hZrhtNJjCc^bOzasAhyF~%xJE5vbT6LlvI(Tef-6C zte&h7Ca%*Sy;L)EwNJ6s8PgXn)Bq}sEXlrZ-_3b{Q;7MJg;(Q7N-;lA!Mlz?^M_&5 zX6lCbP2w4efD1?v3qgW(1WoXC1L>TRuzr6vvfzzLO@b%f)rvjBpKAJ8Jb?dI)V-1j z>RYM~tE&3JjAyXQwC2RsW$z8veu+~UW+Fb-e~pQ2sK2{Cc;4A3T*`L)U7Rc`uif?{ zrRkPA$kz&7-~1uOF9*bQnGBp-*@5*s`e}BOd;9v})~u!%X2hYdkx-r1B6oi5lxTXe zW@s`zNxaYGPy3rbj5(9EzUb0O|GE4x515P;i?PHk+Jq}>XEK*@6lKLR03T2*cr4Kzn_^}wm@ zF2pq>XdRL7JNZxJ{kpyW**uv6<@Fn+4ll+18H1s>{G5K2?Go)il+X{w6k$dlXht*s z_jPRDm}c#FoBdr8PVb8B=Zl*VAy$ICgP3NqVmK3yrsOc=MebP=;1{FDL%^x|su$I3 zx8+SNJm2L7vy#PKGWzn9GcV7@Ow33V#pTyBvNv}gW&EEDwQ=ptbt5(u{S}o-5vKKa z{~A20Ghn_J7qaIrqm5_x&B;eY0C`U2$!)(kU?f!*jJC1(?Mc)RHUI8+|9iTqgA3*H zD@`6l^l3@2tc9Bik63Mm{TH?Rq0!#Wqc7za0w?%-w|`_(5$}uDTBdQ_r}Q_dNyiA! z$kACmDKgneCH1zDeYgtVKt?J_6dgUV=x6b-`)zb0LbwG676C64kSi>mcydBCf?-H@H(zSa$5C^fA!JjT{%3O<=FJc5Y=T_PPu%;WjU## z;)AOdthQQHE&qY3J!R?A;4WQL{C{sR6bnG)OK|j!zk3Z@YbtZVz3?TzQ0Tm_Io~0FcfVje4txtg27$OYNolz?V2PaD zhD#5liO{~e_){{62_Z_{po-S()2m{vhl2p$Jhn_%bqSx&@+O01jYKIaBwRF1F8#oR ztG4?hcMU|1dP65XF3yO*CFtZr5W7wNZG4mKJz0N z9YQqMTN0OeNUal&?%9!uhu!Qe?=#*zH$#PV7tP#hFYJe*)|u27oq}7DT)^?#^GBWi58P+Kf7vjkS3U*x-cj;06()MZ-AlSPbai zl4-I-AT5fqsT$z#pK^?q$~kpsJ{mdScU_q4V7l|(80|&q5ZQkZg6ZXO;s;t@{(j8w zf}_7O9{*%)+uDZ^TaPZ=m|ymMozo1q@`vs?OtVx9 zq6q)l5+5KK$-{@$pKrXNxI}MSZ0lr5=sbIKt!+Y#YL&m!OJ~2)fPl%B!x7QCTPu^5 zdLfD7QMl1FsHU0uiXo4S8j10m(a2bkoFRswecf$bDsn(_S|p-XBmswlKMSK*0qEE< zXV6r88?z$aH6_%SzzIALERA=&&XxSKSYQ(LK=OxS5V-Y?Wv1tR0wZVpYCBdqgVxOT zPtq~Jc=({XdnZf{>1&7KU-N9PVBW|VqLg{)0^2ZMt~=9=44NnP;0J06UD`Fdr;oso z)YK=WTrG%;f>rwNNBemTmr$$d!vRV>r*7&b9t9;1^3fXNQA(tkhVooA1@x;1FHO}mSV!^LV$3??7kXWQe+-#@wiuQqZN$HZPvV5-IygHD0*^xmhp zb-hHCOHbcA1wC8^{WL}e@>tK+O|NH40dHO>iho)p=+A7|~^3Oe=QH<&+MaE~^ zeKt-XR=2*EMZKv9@8fkmK9)uO?Yzs67a<>FH?!Ft-t0%v9wXY;6Kn-Fb(5}3eKnp* zj4FilC53o|ZWELUOI_hapoQ7gt3Z9t6+)nbQ`vHR5+#c0ei5D8f!5-Or>TE5(_w5p zuO9O?5(!c>oO#t(H~)k&yw1cd&KxfV5qOjrFip{{`Za+lems>n{;f*rk@`doICJcCAdug8w- zDxevox~A_lF{9>^k#%e4k1y<{m(%+-aI6Q}i3Zl>7O;ZV#dJgZgFA>M1IndwONsO5 z{2uq`RgQs+IV=LSPW5~|`Q8C}q}s!1j>N|72>0&9X9;^&5zCQRev!l&Y8V7|o=_$E zmQA7oK90{E#;@z#Y9Bq0ukt_dH_|h7S>Pe)+Y;9`yXx+#k+E26Ov(WBqWzXgNL0Fo zqPP-B0K+6QiS&7j;YGlLUR}??TRZBe{z5@I=N9V{1MZ-pJ`9#B|6(a&pa;3x=#n%l z@c%lOKF7c>!I@b2L0I2)AhNK22jfM0u#J1Z=FLB}o|hfUQNd)ub80HLZ#lO#UI5_J zrA@a}ZwR$>%?3$V9r19N?Z(>39S87m8aSB~7KY{MZDfDDScr+0XG7{)2^|9t41I2pUW^nXuwV5ee~A{G>jN(=vbZ7G;fIBq`azbitDrPcR($$>Y`4kFV$0Ko zX(a|1e6Mz%(|A7}jS~{bAP}ic(8?~A^@S!Wf|@RK{D(6n>;C)e?#+?aM{T6Ex{D~{ z_c+ew#2W|@w&*KvwkxAkjZ)7MuWN%ky}CW>2$6OT1{n?UY2oS@hIXpajuJ8GXk#`K zFL$%h9SwaLK==+;S__PSG=@x}`n<9QGj9qbj{*158nV;(EmP0v^4(CBUWp}~cdaHE zMeXHYfg3_b#@M5T<#7Hx2ID-tP?7#5tH1@v20yGhcx&t;^)+$K{aOBIWXZ-!jYU{t z=lybySCHKslXBy~B3#U=wRhiewHDaXAzVm0U#84Gx5#WhHUGl&46{n^EnF%xyf$1Z zw~M>T)c+lZ*|%&Sczy%sWkQI}XN3tXJ62?_4c{8BpB$!^{Vht-K(}f-SE_i}65>X~ zGVzE@cvdNpirZb}Q5!}TCF8)QCKdU|)lnLH{+LEp6WQ@*T zDWRIFHY|7TQ`PB5^^YzLWbqS+759A7e1+ajs}G*ri5q!wj^Xw#T;D8#+`sndPnvAN zluA%|6CF_W17A|?mIo@z{29AbH&Qs5?6%m(&`JT>nVfop7~KWTh*V4 zr^H3djYyl~h^b28V1_@Rb$#F`u}-RxUj35mLW}k5)qZ6Q46Cp4PM_Xs`OCBC-A3x` zyK>;*H4P++k(#ux>)8jzlqP=n35e&!#kw!}fY1)cyJARGM>W&1JjSTUya}!%b8_9P z_L#asCDEdJA@Gs91GiTU2CS707q7eKV`nt;`3tssBGJ+45hzx!M|%=RqRlU{6T@;2 zGJ>@n3I%|&P<)B~)G}^1PYqgxeNSQsoV{-vpa89sQ|0&ubhKk`X9>eN<{Q25Oc$-P}L9p^-ZFpBM9C7K!NU8qM5ZZ0^O(dmVgWCA!5h zu!(jVR|)@n1Vzqr=x9>j*J5n6N}f4edJ=qBNEzlAZ(LOWlj`%U%x<_-1fn~_K+v&( zzNK4xpS|=!d8tmw0@9@?mekJc_!N_7S4g($IgtX+-S@IWekSa@%DaW3G~3o!pVjOJ z2+mQ4U21>y=VhL0oMsvuEsBU4g;{HWFXJ<(lTJ_WAP_E!sw0ZxZ$|f9y!Zj`K&$u# z?OZB?af+ShD^rHju5S+HMFTSKecr`iI(;XSn5~z}Hf-Jo0*uh`hmko;V`oR(>ZLxs zm$R^DH*kt7jBbiuf00WudEd!!3p6@X4l>|L>puv$Ov~ggCwY{-vaO zG#^*CtlF_MUgR`+vCYZLSbnahl4}Co}5lsvw1CUH{sN<_}jXR)TrI}tn^s2hjANiAKkFskK}~v*!RuYJ;_{x zLMXdLxLqbog?TCmcyMekRMnF`nd^G?FG3WO5K5XCi#pp2-^N#sqsju`{uK(=a z+gmDQ;e1<9U07H@ZKS0x?v(8Kiln{2=Yk(ogp1^R7TC50K9ah9tK+nC!|Y2)suhCP zOy%E+!5;o>7>gED^RAj80yNWs90)TCPQ5T_H(m(55Q2?m_sv3%H&c-bHM{uEqkvC5 znZH?WD+Oa2dRL@|S5=Kc-hvG9+csvyY*(afoYg5@PnK+&^Ko+Zz*`mtT>fF!HaVv&-X>Ieu3|qDClQge7o>E7$d(YrE8@ zO)z!~C&1=&)SkZX@js=Z94jM5Y7j-TbsbX*L2Pdtl)40AMNT6kR*@ccr#n1cOcCA* zzMGSOP0|?hbw6W5)D961;eu8dF8fVWG!=MS=P!2EQ|IJ)JLjEuf13vUO2HX;@!_=I(_dT z+AQs0`^NtQ-4`LYs@S7rRKF(B=QJC~cL(j3@f4)PNn1F##iiZvOORMbn4q9+fo%`D zm~8c7ex`saap&j+(;Dn^%gMb5Su}On?FM%(BkET)WIxVs4HzP3KNJODJYjC)e-bQxWFC&$f0#07{rEefPjV@W(ftC!u1LT$O%$YvV3JmbS@=1(&7ZUsm}9Y7J70FoN|}8On=ts62ks ztZ|||DNA)r&fOrK7v5w(g^D!sP2O<9@Vh-J?)QVW%wev&U=t@<@O1f(2E+8HWtB24 zJpPZ+fXs$HV3ehHGDq`=mzHcFrxp4!lbL%a7LF1_vgq&9g`TFTBveVZW(bdUZVUT~ zF9VqSNd^&xzB79kO$V@T(Y}lQd(=dewLWclMp|ZrSD(7?#B$Rk(I>>I%GR>GUwrtd zqrkkN6_{ju2vJzA6rJV}0en=o1Z9gv&kN~&5DUL7axoU4kQ2)QhOSxBR{f_+v<@%n zeMW=qwNF!8luw&P4c&sGR&=iKc!mq?36{9uBtCh~Yk;8*m)NG;EFgw8BdwR#!OU7_ z;R~yV8U|CJ7Hsp*(+*(zxeKPoz<|?H-E|ecm4Et(`p`5T)@d+!|IjtyI_Mh^JB%)CpIvDZ6(*!v}h*;~8&_kTxMTrV7vDLuAd ze%}8PAjcB`@b!@hzvM53ba{V`5RrNBd5?9>PLMQD`sbIRfyOl4cQf*E$}Y?8bIv9f zU=hLk@P1CiU8WNKSLpbb+rAA~?2lO_Viy&x1 z0*bzT`%!VcIk+=dI7v5n;r28!HRr2K`&0XACBOpPq+_jo!Z0rNi^Dp{FS-UiM1sB-WReNkB+{tK+#tV=V=k+5%^I1+su~n zqp3!c7px=S90LVm)Y~!Hj^IhUvp-s@H5=2+FO8@Fd+M5A1_`H7*56|1Q7DU8+Wp5q7Dc&voT1ON+#Ix%H!#7Ajt|+xzcd9UU@-K zu@4?%b8kSBd740P6aM)~!k=Yraxgo2mB|W~G=8lohBK*N?dSdX=pK#7llZ2Fkspnf zp=D^+Q_pCnuj{B?36G>>vwsTdWad+=SH)`a*m$j*(R@89^@xc_*_(C`}PaFgs$gS(V50$iKxdvVCys@8NfJG?KiC;uY$&UFt&*Q~qcWHVCsuUM5=;xlK4}{L1;<(;3o)J{D6@i? zJozMFd2rp~u^%}1L9`XcDBH9!rI- zyge@YZIJEhdHKlcCgQ*L!jo+UmkVJVs%nGI(n8nG8*GF3{<0iJ>rX))AT=xL3lj1S z+RFihG8yd4!<)G2S4Q8`1W-+qNRt+SP|b^pgBV(b#>{FAW1Af45N{6RUu-&-bq6=f z?B5uWRa!a&cOQlSqi4qXDiH`Tf&X8S?7x4^98qkbxr7le8M12ykE43+B>{4yfJ3mZ;iWjei3}yCcRSBwQ2nGK;8%190>e) z7%WK=qvx{&ONRkbIs<j1+wy)KiIZBMht(;~%ZO4M~K%78Co=1O)#@-2Lz)ImfxZaiEI(Bda6?K=3_3b2>P)r99!%W$3-H!<=boJizxR|sf zZEQ55z2gBYE=8TlSu`UhV9?E2r3~oarOY@FaGJEXBBdG+dF1c7RDgTxw%nKI%FAlW z@~*?qj|l6J4)ev&`5lgc7>6jq`JsubPM^|m67E?S>CI7aks9^j#HLs6QRu5MGGjcT zYFyj#bt-dai3eX~gv?r|aSQ)9@vfA(`d0jDx6XHi$WX7RI|P#9uIGVunWB(3p$|;; zrF8fr1cpP#x(IWNB}A~7-b2Y@ad<*hZ%))VYp!GQH}1*PMmdes#>kEGk7S-eduK0}x0w#z6Bgc` zL|Nu*_i7C_ul1iPFA4N+MM;fsa10=}s=>kKn~(OCWz{=t2$@0;? z**uRJv;5c6K1YTeV*2Bqtv{*K)8JB&2H^)5gko7t8PAS&Jx^%MId15ny88F-X4Fm) zVaD#4B~|cbbW~bUnUptOoW8YCEWb)!V+R65<&)%lY3uv2mf{F@7#6eyI@KV|ptYz= za8>*O=Nyw~lyyZJa zoRz}J_3pbRc>sqasgda25v zwb|*MppPMpX$pK0Fv%?OX{NpV!Y3!{iPpoyNG>!7#XTCym4gml{P|xrWk;ZQly0Xy zG5vqNq7?eV)T@JZF&KIV$K_1KW$^kvEr>spfp^L#FhCzG;^$8C3Tk>&7hWKIK6Acq zZrw(VOj^j=7fg@u3{HOm4}w0DP5=mFz@z?USrikxw=L zqaBO;>L*qR+M)ZBQh?EhZ?868P0ZV~UQ|JPiN^p>zqo*3bSCDINBNQOdz(?cuxnor z2H6E8=1{zi+Qg`rzva60FMdvxHU5a2nKVpDRvcxI<%G$N+^?Qa#dSI|8DhOL*kREs zsTu$4hG+$bF{SkXGLQ+kswrO5e;7LDphsx$6XUK;OKp)PcP3Y=zIJmRbZm8;l9|uG zpJfvRe5-yk282NC{X6!9Uv}&a=L4*bwXco+Ob|5$bshN9jvI*?b>>lle}Xd1 zG7aUEeaDKG<+?CbF7xSL3)OPz3RIEFpk>a`&h=!Wq?aWLJ|k)2^1NDb6V}B0bi3<( ztSgSB9aH#VsJjqpgW`0Z(Q>{&#){M3rkbsiA5^zZ&Se@-mJ92%vyvrz@<2?92qbW5 zOr)wbvj?rZEe%*L?&H9Dwuqq9TSC(&vgPo_A932glFSz%vEUa0D=ZVhzxgX7S3R!6 zUHtTncy#W5<-1Dq*yYmWWIvD!3I=*)0vjo>5F&fOs)0stOUVA_;dLcO?PeK{AMk`9 zpqDY@)<;cSc6xo;zZl&VI z*^WJZqCR~PdbhSVTHvR0?9#qcbfjc~UDwOHJLGwT!)M~AMiMl@%TwcQa2N zz4e7Vp)2dK$%6y^Vj=53_jSfjYe7AZEq$Opf$+j3tJJA@*$gX$XyXySAYF|KUv8jR zm}J&ix3O*aeF4Oi#|>*T=Kk+TVg%WtVb(E($ZGwSHVS%NY+IwfS}U4Bp*_>#r(%ZO zHmvoEhllRoC)420et(K+QFRF;PKpEaFY#mPK+iqvv=5e2 z8~OK4_}LQn1u-zuH_-I0o-{&u8D|Tybid}_oWbX!ip0l7Zz&g%{pe!VqQ0wU< zMdmg5!ly(=Ms^atZr~kM>i&UZ<^C`LEoAVnCflQ81<0SrO>hY0 zAhDUlrQy1BGwSZh=J4jO8Sn{UJi7FJc$m|@7RBXA;g9u;&omRJ3s2g?k8X*wUw?W% z1do{?tDS?|MadNPrvIh#H<^(nz3(#?gf|?+^MwzYl@Y4poQhjOhaV zvO#iX?nR9-7cb4s%>X@Xk;SE~D+ZE4%i(h+bS&`6<2D;lQvAeRafVB$vI{qvM1=EF zaMi!Z%=rfM5b*Vz+uZdk^v?p84mN$WuiE%DN|RkKOX*!~o>SWA$)F_T7WL{J zL7HEF`dg#jZ#KWOM0%|BZHDXnMuO}ARr9M39*FDHc3DUw0|=oO6~OqH|D{ z*7Z+UT7t`91$7r~aN7NX%dIbpi)ZXjN?nU-CkkAxoa(FjY!hj!IgV3fF!6B{yyul7 zN-VWE?FOx8jlOxVVh=28Uvs*+O094 z$-^n#hw^JX$vMl8$J%L^umy7HO&)Wy;D&8S@r?R69h%8N7-;Nihi$p&X!jf>bH}tU z2s}yw`S`eM3M#&K(Hx%kEHg+_%oC94c&|0GW5GYc{jyj9TzfANQAaizSaGk+-IPQu zdq}sAfNK?8Bba!t2DYh&on>Y^OlaZ(A4l2=7j(4t!M|;sc@cy`Dt&vF+6AB0 zx)*4BLh?O?)eWr)&52=hzngjti{}JkSX@sKz_H-i0ue@}m(XFRZ9Lm5n>bl4@T)7{ zy6SzcNnM;qp&dxi^-=x4cphTE8JeTZtxH%;y2s6kffwaVq6r=D={BZ3txK&NcM0p^ zz6-43Rx38Me)}Y?*u~!oP#?0+d=~B<+IT4)9fCW_Oks#p6vRe4KYO+TqmdhV%$0C^1sPEP?T~~Sp{utHLI!QIl z>B4#fajs1sCN$i>MyFU2>R4Tt_kec|n&u8RMw%+fHT0D_BU&TV7fs>Uz4%kxKJ(hf zVePZS-C2gI36~zCHeuiajP;{~qMkOQn~9ST%-gD(uG@DbV1`yl+8F*~fsI<4x6<0( z%NhQJ$A_KX=J$k)toX92WkfDHL@%$t;xhh3mrq#kf4a)?6%m0b=QpYip@07uGy&q^ z>qz)ZyEu&C02$u&q-h_4npXcbKHCnX7Pyb?Lv1SA1pfg^80V2ny#L)fus+C_YMMeH zwRkZL(|sjdL0HHKy63AU zN$Yleu{idGPT!1u2x_)!ygwZkvZ^?3=~K8MM2Yg*!KW1&0*GGu3xMe5Yy2AEo?oI{E}5U-w4gq$~bHhn36 zpy_m^)*1iM<1>L*#GJBrXkIcalp@%`B@(ZO6v7~Bh+QZ4!uTR%V2NUEmzzn`WM1aX z=sgv~Z-#uoa}UtiGiro9caWjx|;oJE9SrF6@~JN)a}e^n82YAX8ePu0_-{5Jo$ zx?b7wtOZl2SC(jLZJ5hFz)~4el5Qr;Y-}Eemn#_CNmjf_${DHg^cC4+Cjr>ElhUw@ zL5^O`mDK%_#ZOVgLkx|p>Ke`KYHHugNo~42Y=~Xl!attSG{XwrCdbsz#~^DhVD7smYB6a($ah0+&$@C~PPJJ=8&<@Wbx-QN zi11J;B`n>2VZBKqM!I*;aH)*XDQ-9Cm)7Op(5_EoY3-`q!5ldZi{Yuph|svYu79oJ z4rm@sWv(-mSA7<7Ck56BOiUhbd8UpPwpN&LQ~$#zh97f{AzbqkL)v|qLWrir%cQ_*h4jmCLnrB*B} zbQ!R9WLReOHxO^qKE;><6;n^&?X+=!fSwyZ6-jOko>Euqsb8s}BsfC=s=Xe)#T~Nd z9qL)7QIEm6SFzEaXop2||MjigPmSWAt!~Md9ZeU}V*Nk-Fk`$e>R7)3i>i3ez{b@6 zMgiUX9tmf+M6adQAi?6XMg_d*t=5ncA}S(-RZ(MJskf(PyG1Ho-^yOc;(IPC0*0nK z9fCw)bU}jv)e>paYkVWU8Ff&!U}p?|wETuPlL;;HN*&NL#){GXJJx0V{sR^E`I}*- zSj{kRp>GUW6o3h97ceuRv45Im^9>5=?aj{e+n9*k22_g&sX0UG zjc_}Z!=ROx_qypRF{HC#F=l`G%#vvJXO1VSw7YNUGw!0%-a4t8&rxbVy~m%qyL%cQ zZ=7FW*OSN7&Zf8LynlS|QAQowc|Cbx049x*u#h(L*=gM^5vSpxRV#&DVG$|{VdWl) z%83RQehKGvYdH@W=)>e`2izW~bhb67{yjQW>tFsv@9G;9+-2W%Rf!sG|I?)TIUW9T zk$1PD@cv&D!@p^H`iLwo8@Du_h#jaBT9i?;YW&wQnQ{L>-M5*E_Kmp@ffC?a+K!XR4XO=4d!oB z!=`g?BMOi`r=lnmBgKwD1+UR!!7HtbwYt4fsSijv9iUutEdO|TXimgAy0?~HKGd~juJW&I$WA69%Qnm?tW1?`7~#FZ_1 z;!EJF3$fL<=_zeKbGxnt23BnM>`EMN;5(!vU~=&ry`QZGsCei$)^Kz!}ultA|SSUZ~=Ky64g0J&Pi zqp#~aC*~Vw4vek(4@yenS-w;jb2=|A$S&E-AaxM{J@kID4SuEHke)s{MMt?L9fB9W zHB(Mo>X_g%9hHW==bL6m`i2G2BRFtjkR>iRWAgR%VXrkuQL056E)VkvqC?6kxhB^` zq{-gt>D@UOP)`auxjFNcmz%(nCnYdPU+{xTAXl8~_Sa0&(y53ZYJmc33EroV=U&cd z28r?n)vC5o+B46Qe4*#v8SI^UQeHuZMfzB}itAHu~x%;Ok7$*XXlwFo5j? z(j6(0^1Bl;wFCtu!R@+Wf&YWBsGIV?HBO{m`(h<^S`SCX@AVoe;H4_@LinzIm)%k{ zEJ^DO0v?K{m;j=e5|iYIHQ93cW^q?x%z-CsbEoICft2dK&q?Ma&8No( z%rv1PYmy^RJ|BVM0Qwss+!OSD@&CV zy_}WS?4v%5Q{j=;UT}-&7`qK!d>t|mIGH(zN%XcXqO?gy3aiYu(EXkvQiwQsGKrZX6w>?k}F#`Mdu2^@s;NVZ8P_KF2!`k&l4vEvS?h zA6nR-g{u~^6b)`sT78;iQAxvW#hvUKwnF#yumflI)#`Dgmv~q3u6bwk%~K)g67SPV zPqCY#NajF+2tsTU=vg2E7P_2X*GlraDAxw!&zFy0BQ5axzfzW`;TI$9@$GNl{%3?W zn30tb34&R{kXN>$)}ZYa2H);4+7TR~S3zxL03j&^|95_=hi%>c_kGo<&k#b7q;DR6 ze_=fXVxIO}bo-DHEbG6v*|1%O$(O#@0DN0JmG!D7tcTj0mOWSq@##O-in2H%Tc2su zMdi7yAL&TVf0_M8mt)aJ>7T@L9`<^wb>*3Ncy{CJ(X*8t8p_1fEzk+S#;ekh1`Df} z`Gn7l;H~9+P$ZVRg9QUyW6QDQ^W|Z@--@xc8VCBq9bsAI3YGThKbBa|`zCvgk!`=u^yq%bD9H?sP=}Q?M;!X@yXR$~|hmV7An$eg+aNd)-u^KE_Y^G_8K+ zq_E*&7;pYle6wR9UATdua#RY|0(_}MpwVk(GMNYunbF|S(j)Y9o2Q+w?&aZC`6I~4 z%vVBzFh(oPi|*@~%S>gSENRf{(uJ*=OXnWH9@+P7@4>)wSnZ*>Goc_N(X+JQPoLf^ zdG?a3puU}us}zK~7-n^gb320;-t|6sc*ANSx>)Ls(Cw-V2vu@2chh!0lg=P|jQ3gw zw5liFKCw1RTF#fz0ovwkK0bd+?$i4luwn;yNM4_5wfwMe#lFnZJSnhG4cjULor*Gb z&RMQLH}z-Wp4uq6G)POaou?W_WQs7zJ%EmS*~SUXL*cGVr^zn8C5NjKqRt^|7;ch8 zx#y$zTR_l0C(rj(6bqIY%bDa4gH~_Ak2XHc+!e(-lTophWk<6FZkKtoDX+A?N7}u? zV$GD4ECI!HkB<X5=>FJ0YLjN?OI>PuNO??Hpm4w`~Tiv|{mYx!}yO}!<;e>xYve(k1F z+!RVg{CXhn_nO$xfqI*_zid^qM6gX#Z-UZ>0T^Z_m3$FBtCzz%itp6a#XRnGy|=0Y z)-m-r+jvykM0Q^|hXnbjBj=4dYVVnbtc9hZ@NY|!do9gkT`tGF(Ffw54>K;nq-jHt zx0)b^BHQ(7W0-t_==h1O>N*OBzvCM;!q6h>xS*3~Jt`(x>Rf-?r4;9r1(B2wn#CfEYm+{~b1c@QG25m% zpZ!5E3rj~Mm1udn!(Cb`#j0vQ$(IPB-}Nd$p;O9~--*|p5|Z!Vi$@&|(_|$FJ&hJa zbpL;9D=Fxfc_6tniU?)?HH6%3l(N-hI=ubTx_)f!A4pnlz32tiBljRjdRel(3q!|} zUzO%BVd@%@7@v-Jv%u?>>%W$!)cC z+rJZVI&6JVg|2XCgL<1S0Bn9 z*LIsKtgtH5pYt%0GxiJAAim`a8|qF>=jx3cA5Gb53`xg1_cJ!GnrIAzKfGr0`8(m% zu2E>Zq_?y(T2%V285SuQ#!%7;!Y_Fgd;lavebaIpYN=&%qR!e$gR#7R7XU9?37h?PKg?&+ zbJW&4jPbX1H@4Rg`(XZAZ=L*!`PzJ)D8K;%uEI1KsD zxrB5|k)I*Wp5D_`6T12^=t*Tl#7@R#$4yCeD*u`hXGLG715en+6*PEmQmAY}XW4Fj6}(%)Mvk;!wVCAQLKe~B})vGwi+gNaD727*s8^FIRQ zOO`vsI`#5}6w7SfvEe!dd&$%u{Rm?5M}RvU)&hLoB&N@*4c;YEyxIDbav_ zgGt?YkEVx4g#{hX%zeyezO#bx?3Af*6*xVi441&I^?y!GMyKS`@5iV4*$M-J6CtWpZ3FoerCGt*)*)5F`aI|PrX0r|`n1uX$&N#4)){ZF zo$Spi)b=!=;u#%AA$w{aIDrCWs<8LwnAmiP z$N_&dHY;IfMQZyT2cxf|FU~&6^ei~8Ak}Vb+^Yv}!*p-CK4d^AoB1(Jsg=Cp&L)JL zZ^cYRTx%rDi0fD^CRVQw3+JMc{guX7w0sRUJssS=zvjGdlG`Z#0_%{AuNFja>pras z_UKxF*zsKQGR4cO1n&=Q6^s4B+5v1L?opk&`858eL*eLFFLmK2H!el!p7E802WtN& zzRh^(Y{)`x^S=F;{r}Hd-)C%~3#U~0i)-R9fBy%nT91ZF&a1v5(~mt7`+vZy9+Nxc z%SH3(xCM?>|0>gfx^x0{%97`Me?THWN}gHOfEw%m<$xX3jBB>|Ohu|>_fd;EU*f=F}S9F&3LgP!wKwr(yu_jkCNe{EZPp-O&b?MJ-<_>t-lPF+SdMV+xBz} zMV_w>4d4c6eV>K@<@@1vEnv}?0RuY3t+459$-R*+Z$<30*7HI0E_uX|l9hVKJ=?R# z>mo3u;Qo1{e1|9QX3UCIas=yT@t@|UqK-aS%jXC5x=Y1+(rE2x5|rO32OL+qyO45S z6EWIh|D9n;SF(^bNUJ;Y0pcLJgkf`gk|p2BMopvVfTn9T{N7j-$Yvq*J1AP~X8wWh zcfjEaRT&9KJY<+_n`JM#F5^@~SoIDo!v!)Z97r8kttFeAkJLQ<#{AA=`p? zV8)LF6Sw1Z_+7?v{ZMm;UiJ-Z_Kd3286Yo3Lqcz`Q5J`(P{>80M9Lmw&5W#Fqxkm9 zC#fM5Ci=#W0Apt)J3K+YCvG<(0u;Ez_WUM}oM*xc4BCUsn$MoCvor${BRnC|hPgx~8I#u}Cv2>AU9{xB<4J9YZu&uVod5g=~5O#f#+ zR74!Syw%CvLrkjES!dPHC01drDS+k%TtW5A5+ax;k#p2IVuKI8v@F3CV49LV2O<1^ zO$xH-_>yDS&L5$ec|)v$CWX?ctXpNz0!X6?455(K+H=1h=XY z;kT-gLM`mIH(JKUE|J(pK%Ki!mIOn*>&zn`!0R)YMh3@n8$=vHa2sh2q;E5o1AJ1dbZ5}Zzz zTCOcCn8od4$3YR{mZ5vszLJVMK4%&|)3sh;OZJQW>G%dwmU>3ZXIBur9+%ztSVWNi zAUIp!e_6VmbfHYQ{x(!Tgp&teP>?;)TEw!J8O{I*b~(#|h*oB}hb4Ts&-fp_D;<siYtX&SU|Hz>uKlG1oS~32re7K+24t%-D)LYO`?L`^e8~3`RJ4fXQl)a)z z)}4bqq9i|%bKL>TL<$5QNN;Y3buFn_F-bj{*sCZOw@6If`D*gI`Ia?qCAVMhU^8=k z7@kH8ooC3H+JEOh$-i#_n_{Oq$gYi=Ms%B(qsLSuwJG%jWDo{By{Sk;gxTk%BnnNq zQHuW5i!aieFaB*!R>=xFtRdpo4!ZpOe?pVQ7XPe@jk@Y$4;UKC$4Bg#5xk5JQ)oF# zhd<1Jv9Pb8b3_2$#m@L3MT=LOLI7WaZQWh?jZbk37}E^qcQxKVY^_0hxOij-v+@Uc zMQE`YGzY4ij%00rTPqTiVD?P=dp=Ac)p&?n+Wu+nL9T)W#Ub!E6_rAd57qdc^06g; z9Ua=@Jaaj#zfqdEfF!q{ZpUNMN0@Ee)z#xmf7TA(swsrL_H(@uMd9*R49)h5n$@P9 z9e$rjBqHj*6hcgye(mdyUWI5S4p4MuS3jrmE|fVi`o5&|y)3-|EgJu&-#1G0+re2E zybBG;>7Fvj_GQDZ3Xb}x%gWY%E!V}Ka+Q9qJc=-Qh!}xgvLdJM9!Z%iY;;weo|1}2!KC_q?n$@2i zGK^g@L*H!gT44L@U|my?w4VY>;Azp$-UTzE#J!7LUQ`cvK_(CXUykpp?gwlDlhLQ@OY z$n)gLMS<|?WDzf!JS+OCB=eY1rzkE})bfxK@x3N*y z&xJC4pemv=SRP(I6q-NdMM>Vg#4sVNWX}tP%wOUqwu|OOZ4@AqsR4l5FvxMw*mx3N zuljBLuuWMSDf}Zdi$g|~zcMTQgWvA*g*5vm_&Y06n_B%=qO>@t#$VfrihP)Ok@C3b z*!Ut^|7n+-^a<~-{;I=>byi-tj^!!OzQd2nml44iD`vhg{l26|OH?wDMIijK-8xyi7yg13yg^w9jK#i`Hf%Yq|_M9PR&Zatzq{Vl7{M=emrkmoc@HL zUGq&*z2zcKQ6X&xmT=AbeEwU|~;@r_G$&pC*yUtj1J*zhY z@d;8O;2Q)P*G?+ozTWQwggQb^mv316EeK@$%~QQDqU3kT=#1Vg35ekHQ>psCRuf-p zWAraa*dQ!2{YQx`@hxxE|Ll@C(}}`X2qY6SE!- zeXOPesRhSswziLMXU~l|RpZ#;Jl*#L;*`ZerG9)I#JKM|=P5W9wHLjHEBOI@wf1az zD02Q(W5%~mmNL45yW$0w$=k01xLh2E0N%FGb6=z9IjmuBZ{4AFt*fe`;yE<}V(vSY zEs`*vtcX)Bd5?Zgtxak<{x@&}Y$TKPyTxy^l3=dR_XF~p*z!u1Q)zJeRMFSFygC$U zAyp(RP3!PnV5Kc8_nm6<_BWw2z@s0G17fc^>FLYz-c6*|rwPi^b7dZBJLc`NM?Ysd z-ko8BX0m*{u-b?Y`;&Eew|}a!HJ6pnV=}!kFn8!UPBMW7a(*k!uEa;CxW@hiW*?cH zZhzL;IGpc)s93i3<_6iM3+*WmSwZl5O_D$QfGMgWC_-Q$|E|;eZq?a5_u`~mkBE%_ zflM$+&?{hUtP3E}qBHG>52I2lt^)QReN(p>=&{hbXzH*&pdFRnsQj5DI?4>Gv+8u! zVImWY`i4)TBI;``Gg_!C@eu_CmSabsP_iPW9eyMFv@+Yn>MQ%w%dC9b(dY8UC|AbI zt(B%HP4#yn8$~_et9uX!UOoLMe5SqcL~lFv?#B!h`q~j zd72F6H9xL6w|WolaunL$Royk*5WUjhGW+p|Otb&X^$l41`nHN@93W$*GRAZb8)?|P?r;^lcfRT)OPel%^2xvIC*{Pl#U7i`KC{fri2STp;uX#B|5{X+a|(dpq~215z& zw|=v__>=`Llc%_l1(G3}Xi&j=#P`Hzxer!+q%#8<**~-|8Td`sZCL?I%X4d%=Y8YQX135));r&G%r}<>{B?>JYR0Xf7MO+( zA@Z&(ULsP$cDNOY^#IcR0-+1tA-|ImREz$@M=D!?q6EnPFoX{prfU5KK5zrSrBu}LFE35xMbhpuZ3 zIW9D`7#2IG)egH~X$rd^+BsvKNo97)s{7zkf`uco|ObZIqzd% zz3u7IgW6Vm$#cK-7CCEAU@0^VbDU#M&ogv%c`AbPspy|rXe$w3FOnP(a3CIeWba<; zYvy9TKQZj5J?rN#IC&$LUxZR^`XOgS$B?0&n51HAuo9);N9f)uKEneL6D~wje)&I= zWos8#Qrl*%1u-4DK!;Hkzw$^{g@>deSO=%FGFesEt>M362OTy$PZ!jk7dm7lU<-HWElfy%ujdgIKqRAUG+H^sj7m^*7L(bZd z`+afosRMbyAeGuds9)Qw3u%9~_GLsg^%IDZHB_;=74OC(hNc>;%p+~TWXHF4j|>>7 zEeWjFLK^DGUfEk?76x{ZiCEVL?3_r|+hpXc?7jhdz?@TwE*tiHouaUP2AhI!A-N+E zCj4mL*{Hc<(<3!8wmE0PMDisclN<(2Q?(GaVR*S8u&OG4&3MTepS*{HX5Q5${_m`t}eZ4SKYz8%CT{@BRkLGk%N9oY*r2R z{46*Zao=w4#7K%ji}b+b5(seQG*|#J#hWsyxJlhHz9$i2k(Gjby(<|(M*=Z#apGlM z@bG(UhGDGtZPt2Cp_h57YU7KgcNmp*+ds=4I}!fCJN|mF11iFQw0z@-tZEz<#c?;1#lN6oBCFp<-y2Kgb} zA2!Z&0d|mrq!_Ujq(_SFdZ~KjvjyLNzg490Wq+9KTM&z*%trYTSF8RpU|wrm_nir` zw!O?i2i5piwV#k-5GAL(4n*W`?krNt`>Tt(>xs)UBufu+=Q_P<;Bjrvu`Frl%G(RV zDO2#H zcUf$F3*j28rLyF%nLkhWGsDWP*he`LU($iYkrwnRhup9c*n^`4A(Vb=DHoZG&jape^QW?Pu#P(yax1sTel}q)fCqabrqO8c`6*+i+Na7 zhNkVnxbzE0z55qfCP|D!>w>FKz8lVx#dnKCrXG|(Xj2zTP&wCHkAR`#XWd}ErauR2 zRg3P^85+!^nwO^pq-U>{4Tqs#bZKEa7%&xLmfq*3kdZ5AO!eo82M;x9k3`CBE>VTk zNpJ4Otj5Q|`qR2|apiOpP=)&^PVYjDP{(}t+p=0q_D}JEvAkP8BPCcs;sbE~R;uBE zieVV=Kp`P#44j2J*H7b1Df|09xDcF*5L|3@N+TRuRF7SUB%l@07VV3};tAp$qNadZ;i0l|}H?vGc@ zFjr~QWURENy6O%-W8nH7pG=n5%}dRONc z6FG|OrP~Mj^yhzEoo873NMcOe*tC1|F^O0PIDK!s(D~S7I6Z{&bs+R?mNp~QPvV81 z0x`B7t7qSptwmKTrJP2~@R{-L>>G3Q4VQ23VfVA{h{BhPl|CLvDj}!!U=op5;{bvu zAAP$K3l16_%my(EKjKmbx>$T!UO&8Z@78!eNOUOfpgUFzKcHt}*!*-2tFT)riP_{Tw`Sq)vn^#2Z244PLYg zS~t~b##d+@GIwHgg^TYemtvjC^boWrYYQ!HSX(`YIsUj?nh_!m9inzOAhN5>aYPrh zboP@=YnOAA8f?0_{Wi2`6!Y6YT@hWDAJJYE`FsywI9kS< zzQVJ8Gc?M1bm#@B^D#`$DZ0?7Qh_~2Injh3gGq?tY}4KTn3!C(><8?Q_n{ zxs#}^#iH<~Il;jcr+gXwL$E?#Sgx6vK{k<;8hw(Wuf@zro>*ZhMJH?JKdq}qo$|O% z>beUn3kuCRAF!q=k^VFzT&Q#JJVNO_ZlS(%_|wlrN!X3T$2S9mi$6TY^5$60bpy?C zcEBwa>^$pY&Tir&y?5C9LST`%`3<02a9Nq0g4__de{A!^-)Q|l45eeOb~I4gUG5Mi z>*Dvz%XPlhBVBPkWHJSM{KpIMt{*-X^VsPgvYqt!Jk52TF1U=bJL;h$-98|rXlDcs zHG6FLJXq%1RJy0kLdx*^lR$8VwO21&;*d7fK;J33uRb^$NTcNQo0P6tqDn|lf}_o? z`(00qLCS*TY!lB+%e*vqV<2_it4Hx8x)D+B9oyQx)dCciw~lz2e^t(T)t8lb)o4w4 zj@2IXI+ho-z~#mkE%nN*pdP|C-u1Pep z!X55G!u-w0?k-PTVqGALVYyU{5UER^`uhwwzW>_=FjS6z@dkF8yK&ckttM?x=b8?r zCa(H|+S2B$@YeM9tA>vl`|Ek`)QKJMrj}~d!7vi`^HSj4oM;iZ6eRH=5e2;FC?W2_ z*!e-gk`4466_@_Pz$DLt)-8HqNjumN??^0t<%b&ANcF8rXQ(0Ms!3=_Me7SQ4Jpm8 z;NxR}(_Y6Bd%rhAvhUNwNDA?zIuw2=hc|re`UW*8IqTSY6(y2sz)2pJnJh&kXodJ1 z8RJ6AG_Z(BEYFzsTc-?g)(uc_u_~1TefyG1gv}PfL|ILY2B?!M!(opyoF^u{;6~q% zZ=66BQX~owLhXu^NxRY-8C%ULtn~=ng29+8^i_iAA`|?vAkGAX1*%t*ufO~_)^rj* zmZW&!>Ai8RKLtO|5zGq&v;25?rc`|qsE47&Jf&SVB?6!e?J*!Olg+!yw+(^cF3#VpRCSoAcRJZWaroHC*>c5rb{V?`!_ z*%0uJ`s`Q8>WHW;G6Duy!kE8nsapc)b}q$K`zH%&WN58&$k(dt`ef1YCsoq@P2R(Q`P6@H<50kJ zS@c|9GPx1Na8mXc;BN-Y0Qlb7W#z`&HcMyB-;Z|BgDEV@BW(_L6uv@hq#;k}vtGzI zbx6tb9DNA9gn}ui^n!175}85Xh{9XpON-_mBn?(TF9p(&^imi3d*)&GYb|PUx4EV96g-o9J!4hs&WV+Ci{)N&kXd!xTNY!wo6$4B*&oYy zXsdK^Wu1T5L z8PJP<<2rDj4bxcS6-e(&!uY1@NM=ez&u){;M4?~$V@!iJ^{0pfbSBh9El<B>LFz1G#(-l_TE@ z2%{l+yb{AiG7Y~WKg)Sl+*V#89w;l7E8xJ~8Q}|)CS|>w$U!3S0F3igq2|N7gQvH4 zEnw%x5XN`D2_wm`>s6YC5zy07JLZdk#)W7BIw`9RTfmRJ&JRbrSl0W7U=CyO@gg22D@`}Nn0r!gS0w&_D@LNoU(df`pNM+)_?-y`w>xm`@t{+F*55Gb^ZaQdQz8VyoxeRz>fcp zDTIbAR0h*n9T&vr`C)w`9q^QEka?Hc@8}3a?RK6ERg&K1h>l$rfMiUZXoH(0uD-tsr{6UhW{c6 z-u7dOHSZHk#Blra_(x$efM~S;J}xe#^SJ-`51FuFqnw@BJKrhyuAuUF5?tIHQ{Dz$ zN0uI^vT;6o;lQ+GGVmS1#sF)j;#c3OiSvvp{@Vfe+70z@IYlS!-*KCNFkFcCbJEBw zfHEmSntO^AOZe@NXDBzOY=(6LkTFl~BW{&}!&WMv*gSgob14)50G|~6$W#zj_h`KZ zM(NaQ-}&sgvh;@YM@N_Rr@jT_dt{Cp0bK$7K$RcF;?3T()<|O>M5?*SC`nO07t|ZI zm$@p5j{O9hs(n4+EeJBmQAj;cbHot@Gh#O^;1jz6SIOc%NseMyiY_O;x4aB^92!R7 zTw#r4ys~_mfubXThl{=lSiDUwKnhHq{;9>_HVb}uF6P*YjoEH`3c+?tevGNj&wil)Cm#7h z4=mqH*ZAF-R&e1tztQ;D1px<@s8qGFJIVwJfVE6dqSdi=LPh>rD8L-;|NEEl7J$(K zjV~q~@C}A&nanh&3zMcWMlI@{u}!*K6nhU= zZ$F*XuJWyH-_daF^Qd|aQ!sr<@nl478z#+1+fS5rOppVg2pb_>KL-r&E;rG7(p95G zM?~A@soZ5g3M9)5V-&a&A-FP~J`wkBO96|mD;Y^CzvmSVnRi#=m-epmX&1%EQMaB` z*{#v1BF)_tpJ zW(yXcjJuQ#VrBky)cO`7#%gPRMv)wy+WhCqmtVC}nH!CI0{fv17KAEoh|AuFoxBFtQmg=)a$h$n*18P4ho&Q4dJrikCNid zMI0L@Sg{9xFz8_NcazWF-aGZ0c+2(9v&zZqsjSVJ4NmP)YJ3 ze{#PwaFEC$qy12w*WvDbrRcm=$q^5}=pd6!7=YZFBF!FKh_P$pSrGatC- zbZ|gNIsIPlhu`Y%h$E|`cGw!9E@}D3l7biU1XoyjULFf*E=_q_)UvB(LHvcpepdW% zpja|CSyiUz18;ra^yt}|smv!v-lDba3GGH&n zKkzdvWr_I2zGw7swq+~G%)YfzN6o5xH4Qamh`$s0a6VVZA4%R~k-RnHm(*&u7Fp|? z`6t)$N6YxmL4X3~w(AeyFC$A)R;9f+o;8tQ;ApM*N|s-`w#NdgaMGONsEV+sXN#c> zFLy38bT<0c#q0;I6)JqJI(i@!QHX!u>TTULrAYFp>WZrRq!DvYvCJgVh1H|WgbKi8%{evSf2va7#k_4G5Q z)k(Z%L+~d%;dv|M`9ci}`(`fbN$5f%1(}bU@;|GmNjx&rnpbaDrN0BAb%n)Zz3(=J z_}khcnVP6O8D6wCUnC4noJ;XhGXBS7`VZVqWM?W<%R4t^fU@#M6XnOl;bR!aISfT{ zU*<;(eHh{xdKL7vgJ_jD<@`;yu`oM}G7u-E^I&sA`&+6p{&64@oKax)cpnk~KB87_dmba*;=)*9%Fi-}R4!)IRf$4pJ?ZHAexl$>nDX(IJo zF7*$GDpHEq)Ds3@G+s{;?944VxuP!lDLgIKA%i;ci->4Kb~@08WjzMY#pYP*Skq%y zo05mV&A&i?;E#KW_A@U;npeM+f)?5HzkC90ewWB^t`Iqw#4)BN_0Uwl#BnsL@xJ7; zt8%^h_*$hfYc{Dm+dN~45IrPVB3Ra#?lk^0S zv9`Y%%)dc8kC|CJr-`gx%mSbEo~Kpd5qPcpU$uTN2{?&F0oNn=y%}S>3&OS>G}O6k zNN8e`L5A`m?qLIW;5nQ$V9$C8AmvW#7v;)O{ukxF1zH`mkz9W4w-kq1ZqV32eJ?&{ zX=K?4m=(C%a8w>1soo zZJy9P9Xz)YhN^4(G@3tD=oXsSv_<-6!AXo%0dmH|KD2t^QrfnE$F*L4tT%O@K#bxK zD6dQQL`m+)-#*t}CxW4pH(FaB6Z3_Q_lSYVWn0JhN82Ge zKSgYV(O7tHIZ$CD)iVqCcK_wI<7nk@PwLPUAHz_#xqE(z48m06WmP#MmkkaAC_CIG zcXEek7d|3;_Sz}1?9>yVM%&KE@)`w|3P>#trV{iMJ?yK23u|BUejs>x1TmiH$IE8H zJM-&KR7A438T?4)*rcUev)!NyDBbhr4@eTopd{Jm&DRbBMoc9c8Oa>l-jD6Kg%8j; zqav0qS@1zppWu$7^Tnz2gKG)(EGHl7`OMKa{7lj9k|CWBsBz9cA(|7CAacMpZFk_U zu_9+KIz2HwoRTC!3YOG46)sh%1CaMm8L|-vPod5UfJiW zx{GsJoC@;`i8vYtQ4yeki7_K;-t0(EYzHr5a20>5EQ3w{9vJ&GJcg)DYK_gM)WteF zx$J~i^pxF&h=jq^;xMY&aDXzO?`BrTJ*7E{g}9lLkqwVB&cl9<&w0yq@pLKA_Kcs} zpXUF!@Ig-a*K%XA&;N3bNkk+dPiMjNaPshlQ*sQVkEpA*AkVpZ6C|{|#?ZVQ(%s3h zlLh>z#2(P3^sx6fGJg`W@e3Af=-}+fPv7r}3rZGGsb2~ks(B7{)bR4Lv@8a-_u0c% z`)tx2rL7r5Y^d+f2ZE2NY~Ndgg4D_%gP*?_G{=y)b-D+_H3D9PHL%>=hdQyI{tfjB zJZP7x<9FOE^Bl|Rn&1R2*}*cQfM=iV2!ZXZe4xfB_cAzF*>}yzQfY2&-^w^yylj42 zdT_%a?PtxAWK-x4D%uNX%$~??(Qfj<#8X8jlB68x6=}!R6w}U$3!(IhpHr30y@{Wv zCNXPQ$;J6BBxlAtq(;2RUoKk*4dpay-`{viqX9dY2smBSdXj-AyvX$}jh&af{u*a_ z@T;_>*0K}6yi16POTHRfhkW2?4v}V?JVLDOZ}WF1;1QovC4IL#B|ByT^M^D+Nnz9B z^2rd{Yx`%2$xmVLw~W2Ar;MJ)d?Ax_3xJ-b@5HyU{8Ui|>(h&iB|_VhlIu_~rTxlx z=WGP8~S0Sb_HF5FVbLcLqmbHGN(u`3@zggX?z@bN53cN`_&o4I!|ZsFOs z$Xi@zXb7uq`;+~sFp0w{bZQXO&JabHQWhr)sBYRX^nRS z`?#>Nh$C^xL)wj4Gnto3>ib&ZN;fqEl;-pUGRaD0JFqxu3+EbPV_Yc~tJ6yTihBIr zVC&#&U<{;e%bG#(sw&QlWC*WD2Z!)6)aMvP123r|CC*MB z{FmFTD>fPDC3%9|UTjK;yqI*8_taM%H7GUq2NqWDnCkaTsZL8owH$Vmv}moraVVb4 zu#x`dujIFooYo|nZrVKwQhy6})7{-VZO_LjN3T7U*Kzxk=W4 zi@xlB#)~A-yj`k!*vXHR{Wa{UX4e-BtMaLG0y*4ceC;68zuzq+U$JJfsnAh|-s}iB zbVmk3(Oa+Npkq(}{iUcWVmRLA>OTT7sSz_r#80rA=BXL4_cjLE3p4XEg-ve24OiV??p!s@K^3k)6yBTxvQ9JO6Brmx^-Kfk>zfJiA zkzT6ds)$oQRDhso6a-r+%RBh7+kCACC-Eiz67=r;p6#+izH)R_OD*>Q1{;|Is0bg75vox8_TO3em;Jk9`H>>nji|-r>5Bs*jj4yi z#O50h3)Ek8)pR;(NQ7d-nfU^dL;JS@uL<`q$?JVw(E=tKg9E#MyV*f7Io6DxKdc$up`kN;z$U6~ z!}tQblt>Mj$z`|)%`RK{yOp#9M=|DF%_0)UN_-kNWU~02OIq^N_%XZdoh)0w^r^)X zlU=B(?q!r*$rSnLNRUWiDK=N%BgcX}C|lRSot+H-Mz)*l+H6A`Bs5vBYzy{FxmGJZFOgJ3K|wGzoYFA>kAx~v-yQa0s^>892Cw_ z{Y)gi;>|Q7@{DA@39rMND5b9Xtmb6Ax|ii-@e+)aD-T>p&@6~01BF`#^HxInjKZ;x zYj>8cRuAMmZ`p+)9Bk@WMx8Zip+-aa5)&Qx6i<`fqxJk|v?`!F7f(7V94(O85mpKM z4QvGS(o)fs_sa!6)`;SP&9J8aBzYsC?FjC!bS&A^5X1;YG;f=Jv{dH3t6ydKXG2`O zgJ&gB;N%LoWO&%#4puJe<&Gb=)~Vyu!{d0TiA1~M_ZLP`H^1#GL@nFO8ns2$2~$}@ zl>6;+vd{liAqf5MipNO=FRA55Cuq8*)$3=M%9#d1qdXOJ8~&D)7~|P^HE5?r{+b*? zB9t*4ySo0l)RWJST$gx+Yg?)RMSu)FK=WDPG)RE_8y`k1&-%ROd)dwk8Ai?*ZwZ|K zDZ`IF{kFx(Wd}Xt$)Vs;7DeX)9d%-s{#1~s@-KNDMsKV3q_(QY^Yj!rIwgY=8m)f$ zmjLhUMX&z^(C?1V{tOSTXPatC={!&hW$7v_v9b#RxMv}TxsNL%(6sg7oYa@EksqQn zeuKzL-OGXGW2AJg5OZ|8H9lYC!wMlnujj%%k8lDDeX^!9U%U-QYQMISj&k3f} zzbAfn-lg!?_j2n6$^~(SM_y{KktcFb?0Ka}A~eoLh#~1!dzp2MnEcqC=-mJ(k4^K4 z*)5pq{e3c20GlTaCyKvV+!{MrArFs|6=s}tZhWcUD2IP zga2HqHA(%FB%QK-v@mVU!>Y94uph4l6R#`tD~!nt=ZgWoNRB{MdLo+t|D>)t-;kTVbX*q~^5tbbayGk}G7~1Nv0qz&!qZc*T;xGl4iCvA z9?jkT0Ib|de-dpEI2k`vO1)R;K)`*GhTWFk461#0h@WeT=yT3V>7$jLxirR@gsU;r z#XqdnMIXerzcolr=So~P@+WaAV@WV}iB^-}`$w~2*(^?pcFh?n4hOuyKi+8{oNlMu z^^;gf{Sd~zUBRU)ynXXY^uh$LGK07QqAR)>xHEJQ#{#0B9IU<38C5m=@)h3pYN4mU zx#RJqsloxeS`RmkV)^kn)STo5rUTO8J$eu|=hGN+mEv3*I6sI#0unWX@Xr;ob`XWE zy>nw5E(*zp1^W^6jvM;Lp1=$sI0N3p*P(q5FeKswOtTm|=dL~2XW*1vA@V^;$g0ye znNgI{Y8TJyH6i(VLkxGQ@YXS1FzwIdujwxt_4T6%j5znpV13B)qgIm^+pMj@y&YMK z#+U^jq3b{jqP?=Xo7+ULWkjyPwlL}`G5e&b*b z#@1E$xJ{}maz>41Vb1T$C=F9!$2&@F%GUxMz?zgJ@O$`bsg6>y${so z`9^dol)H-Ae`@+Ec`j>7KxL=5r^dB*%KC?f5ToN-&HNnmMN8QouW>S7)NA-2T)r6_ z7!_Q&b_(?hLDaLvE}|WbL*kjypmN^LPbS#ov->7n4$q-y=gas_0h5L|7b+4Zk5o8v z@U9mQSLo62JX-l`6j>3D-CY0A*Gs2 z4KP~vRX%UC#uRG=McCq@56uDRwGCm?&s#d)MIy!#R58XE_%VNc->1Vt*Bs5Qk2R!6 zY72Cj)`X|7x%5Vt$+g@iS=e>v@v4=VH#}qXdK8x=+oj$>v5|!@B2HR;wuy9wXfN5z z9~m!)UH5f*=1?R0EjT+{gecp0f2(Kb&P#Whx74Oq_WWH<6hCP(7(z^~nt#m8-bXkj z;YGr3-$1G6^N05!Uq?wj=UuFSM2ZS7!9=WlD|`~Z8#o2` zRlP5<{#%C;AYGr5;I`=kTO~&>8@r-jVGh!q8}E16 z{7(;_hno#}mi*YAATdorv&ue3w}%XkCl@|Ti7a`7Y#%OSL7MxG*gHb{&ZkAI!t*k8 z=~1H2*Yp;lw2bfyvC;@*&-kA%L5y!IHf@z*5*hjuG_B>~B|NO`N&Pa#ds1Sozu4@> zLM8#mgYg862G>3_@;@yi#2})3@y(U9yctq1`(9l5j=EFymj#`RfxbFNKlG(YdjdDT zxK7ECtILrCGrvIhs5S>pcB+DVdIx;4UB~Kzug`L$DJ2sakJyV#p{MgjBvyiCvc;c6&MZU4&x_@~k!|6clAjwhQPtojsnbaLy^#zcwCv{nBj#1HEcoVk zCCS9=B1zQPFW9^XsqOuGVQef>-)9zfEX(7A-NR-YyV&pHXRG(DE5AHRq~p%<)-?$8 z7~r90snB*#g8B!#F8H!Qy>FQ7NBQcDw=ZX-Es`%|?U&wN-s?Vo+Ryp`0Kb0MPx9wd z-I9TzZQ`-xY8fYB(?p%zU#gQpQkgWu>T@Qy{&7F|3kX~-y=FJM;2YjTb?#{&+z5+B zS~0Sy@YrTi64M#ib*Z^X9WK3@{Py<}u-Kxa?l+Jb_^YqY`ns%fHrgU%MFv0Qy$HCVjMK(-Nn!d*OW>H%&azjqo6J{NoJ=wa zq^A+p8VsTV^QE(RIH}C=2W8humiu_8EFyT(Lp1l0%2>hGGP<^?G`YQ_WO6NVg(1UNJ z9BJR&aieJGxi;2ias>z0$8Y6O5=s&*`She_y3Yij8_dRK7QXcYd~5s(kM?Lj=;%M+ zJ|&sJ(oj()=#KgQn)^n@+xY|B?7@pF7J)Dk`};`Y&+(yi(hNb6fCeKb2KTM9r>jm| z@7wdT9SUq432V&`89@s{Qm(MnEABpl3AVYWr>F+578+kOrjuGN zyU?2ncwxfdz9`$fldT@AFxGjiw(=w$%t6VPOKO>cZr$>RInAcW`Wb>Bi`vEA!%$2T zxh56MCpWrn+lzlM0ECGMaZzTpys0Q#f`0wiNQpUuK(eBXr0biXpoJ;(rvdXj48@1S zH@35zKYDSK8}5!@?Ie4N&44Q9tMjjW$iF;D(H`n3QRr=N9f)!@k0 z2Ns?mieuW09DVy1-rB{PNIn0yA&p#Jv+fxTCcymY;BfXtwWyXPD-C9Paf?s~Mp)Wb z)zP~3k2k|B41T1NJuz#I>R>L7wUZMyain|dC3X#XT?Yg{>hj_N!?QuH3Ou(An z)w|ZP7AASG|5@N-xf97e$MQRA9-Tu@(jjV9xl4ulq1$G6&2Twec};sDQ@BYxJfZ5_ zf=scN=1e6vYNr;@X_9No8`LV<->OOs{WBEEI32YG6gd$sr(1)*Pk9-^Q|bn>z700| zw{JlSJTrN2)+ZNj5h3DVHrn!}jpO z>KR9hCYyb;g7lm)4UL_MijnaH&bwapq!df=qwCdyd%mT^Z>V*l-1P3!h5M9dzMOyc zbcsE5@>deg#9Re(kh*p=@e;#cz~p;wA?1e!YnR1z#K{_rtJ9-APFY?w0hTjVhbD?> z$Oc?8hrNqt>f(Y3uKHJ z)FD*0!(!l_2|y@;U!F!Q3Fr}pQOBge9kgf%>1#_|w#RGeo+Hpx>LTb}-8)*-k=HEq zEM%U1Ee45Fn-#V!Z?feAMgv|>>NK#PHy|J}m5l)^8DSQ_vhsFg$fomr7YO{@$A)yb z^i_zmh_B^kCOr;FO|KMg-X#n48>D_G z!R9#6T2N`ZVQO0wB&4imH-?B8REFkw1MZh}n8eDTz9&N4eWM%?|NFY+GR?7TdjzJ` z>D|K zt^Ee%m91CqskX-!DrrqsvS|WZBqNaKaK8SSSG0NcGqCKLQ*79cruGs*rYpGznmeeezyBrwepua76~};_bi}b2 zxhY66EV89t7Hdk8TLWsw47tjbdGOue>uX8ckMDmdJp?xwkZt&+XLLNg*4gGU{l|2N zXz)e=>Ec|~81P@7yEc6eFPb{rq!j7jh@2EG3w^D@R0Lt7Jf_uZZ}Gh^C%gyU_sw$j zMrgaN%z;T-Y$y>WqA;eo^|hIIbLdHqjK^ z%2SE;6AX4xypo^utH+2GSK~1}b$wAA#T0rN6(|=l*ecI83ApaaJJ$mm&tkFs7;vYM z`eaFMkz>>q+7)~fj-Pu%S(vL-W8#S^*qa!jzU_RrZq`5W>;pkEV~p4)1WO2gngPjx zc*@_{*7rm_7T-%^y(1cjKHv7aF!Ls-AJrtS>j$z~jLr2L#7s=Xmt>>(9o}VuWpd-q zC_EXzo822tB7X;oU@Y-nOV1H9E60gmRLO+l!fO-3j7C#DF9cX}#!2>kdd0A5Z(mjP zI&viCv9Z$UfA+Y3{IKRv(A}E8f7|Dgy1!`Rn`Odw&ohu1I3m(EoJWuacNwS0@sjGY zTT(fUbmJ0#t=GBW9YQD^9A_y4grJ9>TssrN%YQf)KtO)7j^%GPL&p?$f}MIe%7SjI zN-#En(5IyNRU3?c>*t7dF^pHe@zM9xvtoT)lt>~UO%)n{K&=&7cYJqD_9O#W6_?>+ znaWAo-txVrQa08ZJ|-z^<%8xCyPK0b?nG%xrt@T?N|96z@wBe6eH%$dz2Vj&n0pxB zZgeqnm~@*bM5T-h>ie@~?K}H$^mup;%?(GKY=vijbe<6W_AbaoDs}Tgxyxa$6T#uO ztjn5ymrlqlygmW!`6KQ;Naul3%)N><_o4MS@3ltsTsaVv-s+A&7^dGt>O!03s}ZEb zhIBv2m)QCZ$dGO%qAZu2*54YOUK+XIQbu==6NYmcba z#;**^2QiGnxH&Be%2xKgm`)-v_v=SdPRGcrt^y%CbqPPDi(Z5EK|vwnd>G~}zQvK- zZrEBc&)qvLHv;xgg6kpCg^-_gc=Xslq*EYeNj=@HME6v2M zy#wN?AxKl3BZiq70?@vUb?R|_pznb1Ll!?OzIkU`ujdCYeeeluj5!e70iSXnALtm#Dl4)SH2flxEPv=?XCbBi&BTxH2fL>R$ZI5>PrEZy~b4PQTcl0-*jCJ@>8PZskI!4OBb zQ|FepgcBzUS~ZfvN)$gKs>^mo1}b{W%43_*y;5brD!A$A>CHP;<%gGTXf*viC8!i} zDfUr`x5HMLpT2x~`s*0I`&df={r8*9g_NDo3@Jt9ChV#H{5!J^211PwdS@+M2 zmf#c-1`0PR#9#xFN~%eHrMU0u`Z>Trzsz>_r)rJO>)ijGgkf8@)HQY1eoVY)wEbH( z5`cAY>5{7w)`6k-wlzYoKrn;13ddO{tX|!ct@du|D6#qROo;UP`#JujQEZP<;PI;^ z8aJm2`Cd9u;Y2`wpT2`I$#q=?x}d2rdngog5!%bf1PSn$^32|fHRGqIJ*zOCx-+{n z*+#2Y{TwKEz_nJpf7-_5q*yqu;GT>~61?HF9L!fi8};QJ2x{!^_vQ-Mma*7I(klpE zb19(lL{R{ldWrSxX97))PUD?;{50aOo&(ST?r-=s0z?uPhpHE(_4MF=!k+TSxm0k0OB$QeRFdq>Hk&f_UY_Wo4nopI4|3c}f}d7G z^@Ui)^}Z|P=iFI0-s7)r95YMa)u!zWna}Fp$kbGlOR7qf&s1gB*c$v9)$=v8ScJ+) zZEw9G`@Sac#G}nAA?n_5{mabQ5W&Wz7A%;jz^^@1c>BJ+9htO#$C-Rr-wV$1DXMsmBk+4hLC7@dJ-O}3Rgmz}jN9$mqAa#A8 zm$hHn?MDABcE684L^V#a%pcAgL!#v00S0LOm}1fv#Liyvmf!>woagzv9JNm1;+JFsq{Z{t5&?vS1hRGY0E?d1FzFtKz9)yZz**m5fqgLLGc zV_}eKN-`fr21}Q^5qOxJYheL~Ro~zp3WR#$am77B-3YN%`nJ$p zT!{-7_y;o2Sd&>E_1-oq_sGQ2<=N)B%u?3H&FJ$q@$V@VR(@%-3Pj}W!N0KKPak5J zCU)smF&%Z!u4=*(L1sIK*x_v*ZLEmp61OHJYUimaaxRToEMn$(H7%k+VR>s1zb4q> zQ-&Svjcs|FG&>sx#@ZJW!T!8t408FD>R3);xoG{X)c1_}?KvpaVQ*;A#M56Ae#^K; z{W5O8!;`|5Z&rgx7Z{I|h8nf`{$95v=Y2}A(0Qie^84u@Um>2sEng+g1YDn?@+*L% z9;k7uW7H2c8;9u9QdC}oT|d?q4EKrmVv@oUUtp~;{y%t2{oQNZZ1R7+&;Nk4Fe1w6 zbAlushKMgUijl?aCrNyg#+2WV;RFv+%29hJ#sf_f%~gh9sy79-1E0fFmH~kD7k&Gk z1z#&`;g%#go2i}ri+V<$UBW|I&0&F_-=qpg9LdYxfO8;&jd`f3_0 z9+qU2k-Ob=j72&cFd%8#v@S*7PdskLm|}dA*_5DbEO0H~-f*tn$YFGa zrgs_apZo?ZV$4{I+a%eSj>TMTHP^%UG_sR*hdF!z*Qwfq$}jAhdD(vBDoW)>g5H8U9(XZ4I|X^`WP6+g!s!a{LlS4*EBcx|t0b?v!M#hsWL@z6 zUUU*?ZGmiT+Z_QlmhZi$ zy8n{dg|c6z^&|7-(Ak@EfFYL^m)o0UvumVQ_ohhs73a=Sr75#X<}6+fWjTQi3r-YO z@`<}8GtNH};nncI>fyQ8hrKbp^2`S&5jfr0|z8$pHtj@hTwSg{9N0j6hm{JB@EsII-aV>Ge?T(P*yGW|c%L6f|vRA#IWB;O?IuLDArYV~W<5k(a}< z?hBg0N*l})66}Y|+Tn$p?K6Y$RS$XsQdW;$F^!D(P`i}{4CE0*JuLz!OiPmU(KdR!n?9pg$_p|D+O&mAP|kPN+!yNC}K{> zev`Y{7%%jXDTBLbAMrscq1}a%?od*2f`HKdFL+KF(shA#Nj~g=hC^k@Z%%U;TsXRf zj-kPIT^8q{kBzdI@wWIOz7xT;6m<18=g*94h+<|K@tOpS+FWRXFpuvdAsgp1WG>SL zrr~b3p=$*6rI~`@n5MkPb|Ia z((C9q7+g+Of>Y~WA#M9Fvg-{WTfnL=EVp!Kp3o-85_%LH{~k<(!ERoOBl%wx^vEM| zuNj7l1VEXm+vrJ?!8RHnLHz#v9Q~(ubJ%?k8`tZ@(KoKu!mfTT(pfuViT+>~xnd0- zQ{<)kWAC89v%QJo+0gtpiYGGFH)^GwrTRno4y0!p{Do0oiN|H!+CF?^KEYYC5;${ znB(=Gs~BJtFA8ZYGm~8k$7y@o8RNJ+uURi}_8!UOEnm}a{(2110iV~)pYmX_X;_jh zPlTn-JXt&93V=F)O4zEw2l;;o8MrABVU6 z$RDpjH`tm=e<`_S{b0odyeY(iH^HHOG{OG*iP z%v)hYqbEmSb%{0vX0naRe1G^J2EUzo<_o1R>=dW-h6t`$7iQhaesVKW*L1f1WyZ|h zd}xrNt%-4g&O1FjxcyGB1aSG8a=!1AYaoeQ)r`)Ekr!%ZK+G_Dvgp zcpAp3d$ZAUr?Bv~-9N}5k!+~0^zO6WvUU)fIkK*xrfY1Y&)jSq>0#oiRf@jE*izmx z3eM{-UdtATZWGBvC=B=7QR-auA~wHQphJG%aDfwgYd6N$L2)7T;BHSf>#m~&qkzAm zbSGVMuDu5FiC|6lYG>NFZfvuqQOzW(TvC@ejnQW$QI(7W*>0jUC}RjoSCpH8mztgW zT?n=L#vWyCh!90%0q>^p5BDEu=|y=IJTu1v974Y(pA7OlFJ%W;=C17-?K#wPYB> zVXUHifS%Vy8^h-%;Y$*IA?Y~m$H;~dF6!UDogd$s{-SaErj5SNGHJDS2}q9h>G>rU zkPxKtPF^mnfW^jV*8m0cjV8sSA%gRFv`pRzJRkdaey~9=DG>`A(l;(f=a}1)?9a4mlUb^{@nZNq1@@%`k4s(?&M2{&sOV>Bp@H zR7zp&kJY$cV66@#KiLMj0Gon=&*#nldQZ{K|MVbCba;`Caxo>Tfc~!` zCyMRgx+jerw+?3{J>GS_o!>ErWn^bBRnQ(r@;LS?nojsn$-2{AkgP*Ht!M1rlk^!% zM^0Z_;MO~&J`vQ5FitRT6_PU6t}z@+qDS5OE)>o*|7xCfUL@+=$yAwe9GnDB#W9_& zNo=-}@RS>oHg(lt8_!=4d`+sU{CO7V38SsNN;1pJ5OWcR1xJ}k{Coql&@!*ZBJHm3 zl<^^{R_%~3SRC=E>hTuz*$S8bfd6OOSzkyZPUw<%J5F7OFB;n#UGB55y@6fy3t{(R zVykh!#ul_>?kJbASj3zn-Wi?`(6^>WJ@hg@Ly6>CIQ}x$t0Y03enR{RfDp+5DHlo~ z75KALaHq>2HZ;g?A+JU(@ERKJI20Kk8k58(=Bz{Kd>ZocaLRs7PcQzH1zL}#Bo$G7T}9r0y4i}VZ}@Lf znBB>jd*WDRIw7|VEKVi%fZbl)4ccgu!4aNL;BBzBJ-|R+@=Ue&;cnY-s(xESq;?N} zzdW7plvN)l_~I6e^jYod>rz&Wxnd^CiMfvX!Sf^-bgDT`?^r}H$HXLfAOmPacaRG8 z;-bW?+FKycu1Ef(y@Pbjem>~z#|1GvZhXO8*Jd&2yh?^9p&2#zG7NfqX}+_)Bir)BY{5d?9xmkBbJ- zPIal~{@0Zku|_80FQ^3+l$ED-pCEJGml^f*aGUq2Q&@Nr?tM*rrD*69!`62N0i3() z+okCgS>be%xcQ*o$mUQ+l2_n|l83D>`2VU3oMlD85wsm3?H)QO-p*uuZM{K>)PdV^ z7)>iFWQk$*7%jv$SPZ6=#2K9iw^PpA#pI&pMWh()5^dsjaQztC(NqK2Ir^?lv)&S| zRCsNn?tK@ye3-dugrbt|Ak&e&#Q%-4)FpZRfhR|sgra{pNc0HLjV8a|=}$TNTMH*% zM2FBD`Kj$|d%i;QLbE6R^p%w|Z~_=!@5dl&`d&(wSr5|#AaZYLrmiZ(kVZ{9)lheHSwUGP^`2oE#$7? zXv50e32Ac??vS0mnqQWf3a6%V_thBmWCo&>-S_@Q(Y2Vj@r8aNbp{(3C60&o0NYG~ z+8SF35r6HXenQZsf{@#eLUKc|bh|@;OKpcQ+kkN~W?o?)982oMf``oQ)6s?Hf}7Bn zRv7NdH!p+gs!0-C8*VvgttPIV%%65gaye0+IQV|<#!lZ4>W9WoA0y+nEw3BS42=oFB29|bx$RZfgS>hWe#}?cY_T5s3@9-JDS$qFlVsg4wQ;VX7ybQadHxKw zZd7m?z(9js!u`x3W-PG3C47HVAMz%+Y=Y_iKcl%9yTUUhV|N8~RWtnm{OxacSeriU zzfp7eoUz$FLVV0o>+k3;xX&24vW#~vOi3U>1*G)-9skM_zBR(ok@gmwVC?zJDZ6bo zfF!Zg8r<5NAzGz=h&e1d+CRT;w6^H_bGx`sGnabeLZ0xmK*~dLZTy*eI(oC;g+F{0 z%HQF6FZo{W-S`q+?=2er+|GONmaBZh6byIxDe!1bexp;rtTRO3)6{9AJWJwyHP5$4 zz_RX%bUh&{6HAa;VB&M`!okmhMeHaNEFdR}ZW%Dr)pm4;lXqgm#I9Ej7K`&w)yENV z&~(>B_G!p~rwpaa!zy{wZvyRw=#A+HK{R#w=NE*0NIQwGhyddvJl!P;#A*&bf_Ll) z%!7;^oGaN9gdTk)6*!Zj_-f``0jV2%9;j8D4Gg}>1Ce)oPs)5@bV5d=>a+wx}#N%|VnrKY~ARMY({b6A)l@RO%M3`O~2oGPga2)h`R zLcL27U_uE!`R0+tcMWohw;1j{xMg?cYs*z@1mP}86T)ndC!r^SqBe2JuA-!wYl67E}`2( z%Zx&M#m4m^dYaK#T3eFlLXN$oh#IILjw&WU=pzhipFkPMSa>sj1O8Zxf8bX5JYF(U zKC#79Q5)49Tzrmv2Z5k3)9xh^8cfM5l}@mFEuGD~Ty8RMc7jV~gp~i}q;BkwZV%HAl<5c;Emi zPbYbc{d!@jZfXx-xVs!p9VL?YdqItVJ(rg<)F8MPNQ7h@weT>yf!CzpWon}d)ruDg z=t{W=tMhlKd^-9re?|?y{@OBrBJN^r-%5J&foY22JnXejI6BT|`@J&rLF)N(!O_C) z@}orw-xlFS)BPpjJp=iplig0QprPnNUl}kh;wn0!aFanC5A7dKyg3l<6W7m%XDprO`p78E)1yXu(F zyp5W^2AJ7`GD2_t9^Y6mQ@T>l^7Ws}%tbdG-Qa!&HBao;mi+9Z(^qgc5;~gRRZn`) z;v|yk)3Xb}yQ3 z+e9x?bIOO3 z!yimhgoj?1@C|DTezU$pv=Gt)ZAdoo#9LD^dd-VVSq|h$-H5HO>R~bdQ)iDYGrlZs z1w-o|mh!|tPcDU<^HRqoGO8)H4<%xRhL~I%JnR^JQrR0(c z<}J2=uFdJtw1;lQ@M^@eS??g>{b8G~!Vq{df6DTxo1|XjbSM)?FwQ~$$d;?^JOawQ zfPZ7nkB3O=?x|+U?Kwndbah9of;ezowy-V6J00xQ?1yZZL{4F0D3+xfCWZjM;yOxE zx>1y$f_P?%sVoQdH<{(xjI5B{lmKg~<&-3Fj67O0_CypSwz0YkFRTg@PyfIZv%NpC zGksTZdSc;KV^|`JTGehe{gT^X?8584fVrmB7hasO3aH2w&*@cE4(Z+ve@)4C8Bw~9 zHfR1ChPHtae209O;p?Hgr6H*NY1mUJ?DKZI8La8%p<~q?zt4jFzQ7?iyb^iq^)-Sw zFk})$fI;tR$ms1;;7YF0BYN!i&%s`f>#GO+&f0T0;Llk1DkRn!r9 zt|51CMYV<9zwWygVPbf`gsc;?8s1|J$HZYg3-%yDB1_V8PFH-*R4nQ)1a75SH$vF` z1XbGsd6VV!u(UmY^OsA0v*$ddOUCbMC;4-Y35|Ex$)759-cH@xXDIo`lM?FuP5yF9 zA3h)Jt<}F?I$!(2;xPu4fcUau8<9*m(8NiHRJqzRJu2FJZ8aG$YN}5!p-=fgs(&9z z3Wq0{*R%n3u!>jdF-`lP*EI=vuek;8Z})qI?=$p$(th)}F{P*AIgY0w&@&*sBhVV7 zl(YTrK=Ju5E}&gI)6zgW9*pp^5PB% zz0)vwnX6j%R=a@?eh#4}1LxW)u&n0uWsiG{!F7!+f*)s8A89eC@1Q!~~2H9tn zidmTtG`$V`i0ik!-A}PIJtB;ElQz=R2g{L6H_qg(5?4<%^0k8dbAhtH%(n4k z%ouxU#gitJD4Wa>>NN~^dBZ)c;tF499>I*?m`eUvcLv81?AXj^JD~ifRBlnoNPUbF z=YwdS(s6yGYeH(WBXi*Hj}oWHxb(=)&e<8Mbg#OB-Bh_&0qO0x#P!ok3?+07pTfqWINGR@}FBgFk)*1bJ?cAI&jW%aA)E_aw2r@(P5naRx4pt5eb21!36T+kAD zuFh!$w+Jzm*=!X!2SCN79u&_Qh*_@L9lFGkp7u4SVWSizbbt+#*hiEZuN(s#z~)S+ z^!Ff=9MqiFHPBYN>?`(=*D8xmbd3htAvQm8lhnNopT~_ zk?YTZ+^-XpB({uM3{J`9C8}HRq_4}9X|JWUj;VkhxrQE{@JQ~88)f(mu$Fs@aq-)r zdbS*PpQsnMD~%WKkL@v$3=614f~!6YjteH%*`05aU-M>I^navv{!+aWqtx_3$1iq) z72E!bN7RH8E`ep92Q(85pz4rtg=ei=0IetT+d9s(Zn9gPlX@fzw4+ZuQRm)NT5ZeO zn`PbI+Hfj%04Ok%28S+gnyX`LwT|955g7weLZcOK1`+M1SZCbBLftc)G0wRsvI6Ig z58J>wBbuDx-eibBz*Ew~XV}4%+Y`-S#`l)PdgmF-P#O{mYS#JZ8#`4oa@DiU>UN|# z%H~q3x9Y|AikcD!#$#^tUelks^ci1xgq-ABI8eN~|JT!fqxn%2&LZLYvE8`m6NrUI zDI7uSz$t4w>sA`OMoSqBNlgit(kl(;+{6>N4R`VvNx@;RioF-;{d5P43qV&TF$X5U z-`uZT<*g@PxLa+|T0CEN$96ZCXVls~W2(S_SzL#IEHTImoFd^$;KtJe(fEv=Zq;2i zKDfZDH+D3b9)2pS5abtpr6WY!{W#a?e+orF>vr&=Y5rhG%VinajqJ15u=-h>iW&EC z(n&Wje97OM?9P&gfV3-SHeLn5E3x^j05DodS$7f{PJKbbhom7lMm*RvSFP7efYf#> zDKNnA_mVe!Kf7Nrc;B(YTtOhUReh;6v=hE_Kb#&AQ)*68XzjFH7N2kYW?mN7TP{d} z{~$6h;tuE?7QuEDu-nAlFBtJ(YcSb$NI=|r*&Hvr>~0fuAd?nP^f)ElHG8{2TSxW{ z+Zd@UbVU<&S+BbBj488z1-Qf(I3S94i>Bf=Pm2iz+P^gcKd1F8Up_aXh~d(=qqY8 zd@0lcmNC63zNLSy?>(fT3a5FmajZ>EM|=vL{i4ckzyV{b!Uq#uN!ox5RyL|3kt)L;r}^1-iYo>@21i z8+B=aUmmj(=#(fCe6kpPV04w~I0uultYm9Q9wa6f8sJ@AaPWd+bO`XO^K5-7(*H8` zz^dL?zgyOM=^8TT&l6>1@}md{CGLR41e9TmfnbK6-{7iZv{smo_%d@1POIfi%ybA+ zwd5U>5)n#lJ`jpH7;gWz&KF`D*`hkw46J@;%P!OrTgtzgvF$!zLu7xAQA{qVGb#U{ z`r2n(qJ$ zu|YN9%6^?^8iC-#jt9DED*Sl`tOl->`VVD7!y z88K_Bo6U!uyKERCTs+j5B)|Q$BP{kl0TDh23#CLY5fQ-c3t96D=^1vZHdTMVqOPnc zj*xbT`kEgr6Qb@Thy>vPXdE?B_v#%@8KJkkh79POf>K(!R&mC-@f(}TNjk@ZPm<^a56Z_ zHh`f+l3_N?`+1C$ z4LbJ*9|9Tm+(EO6N$wgX&IKnN2y@hFRPPp+ZtQ?_@iuKB3@^_enI0hG1qhP?RltuG z&GBJ7_9H>`y-~%PRncjiyfsZn8QG|z4~Haa^)aIb#-P}K5&}77OnUMo4X7^_<)6-e+9}N;0Zig$Jq++qJNG|OXQstW)d)UFG889E~ zdG;P#&mBuJM{C8)c?ruT3a&=!Vg{8<0Xa@VoUn~nT1w|P9`HrljX$ej8MZHKHF$@} zG@;54J-F`0i8_rVbN(IKve5wSk{uitQSzLdSe-Q=&RVVkHy^bu2`5MD;2%fSE4dtJ zT)%W13J)c{nu&Cbea~4mcO?_rEX)67b4kHzv!h&8B2D3co#e<+A{pB<2mY*QzXtTv zJ(@s6G0%wa6_5J$}) z(a}P$l>>Mk`epSpxWkNDQ`p6kF{v54(z5p{0r6d{Gv{rORPZx8l zUW2E?rVaV7=xc9|fudT|jwY8;jn(lr+AajZjc%!qTEh=FsHbC3Tv|L6K!x{#P9#1D z3x&~$^eE~Z18si@5rk$05QR zpISrg+vu|E?(Kf7pv6UF8{dLvB_lTL8z2$xcXauqEEmY$_5P!h%Bg*C?>J4)_?Iv1 z7Q#t!W>x|+H|>Q<)ZUF8{WNUjxw#IH$ZTO6 zwo9y5E@GXChiRdo?&7)4Oj-#cWpat1Gd$kv4L90CuQbiwEY@ol9cLo$9P1`6;omj6 zP+CJ+W_A8Vl@>KBY4$1Hm97Se2y>8Od*^%Ow_b6K@6*Zff^5p)0q?%`$G$<4ii3OM zJ+0lTAYITOWbLHvYql`Y+#gqm5?eK=kiMnUS;#n`p~=4_2?wb_cCG#>#U68;@e{wY z#}&4xv7?q_;;%7|ez;GQn&eLEr~ z^-=mgeSwSyRy%l$=-K_D5dbz8j^8iIlsWtH#FjNAefo|+p3!sJ3A;i@XXCso-ni7|f_Wn5c4lEZ|K}0;4dC5cVWH<8JHt|U^ga~_%wk|aJs4rMl z586b=me7#-4+JOWB0 z7#tYo`e$VbA$5>?9s&g>#eU~^^P`g^PQO26fM`eNC*qlwk|x$7w=&{@V43TD+94+Kvk)zC1rR&^Z4ayf5}mn=$~nUt_^8LSIuQ5%7TsBvp z?BajrO4PyaOWSqz#-{&GLZ8DAc(!eyNa*FLev?eo)(Gx+Z0I~Zs6Jq$wlmj}3i#C7 zaD)X!@Vk}`{Ww9MhRDp)QsFo7cnn%0RDZZhmmQvUyFz>M@Vw$Nz8IT8PWC7y*gGQa zbB*LqTS=T!p6>DIqMMqv6nRkZCG&X{+xbhbuGiz0J3J2Q3hFgO7%cuR%|sU`AA&h~ z$BMBRBY@?AkeBXt)^`8MijDG0VRsnsEW*rde-lgs;Mot>9Dt>r{kQtdQjScsX8kSe zWOpD=cP9xwlDLYGaRj9eG%D_P6T`bM5e*S9S>PihZAaY8@H_#2q)eC~5}6RlXGD`X1K>ErH?G|7U}T}0M)GGnpaEx65$%Y8ws=#19ohD312 z6pM7A#}P=Qy!>?RQ;ddM*jZkl>#-H?J-xLf^sXJX;pb(x+YcBmijR{tp(Z%cTUf_XXLqgZJ-fOMR%Etrk`ni&D|w0I}Y2uL$sw(IwSdbk7UIO zvGh_MNi2E87dqdq`EO{G=`lF#bpK)PGH zq=&AdyJLX6@tpIWTYvcj&#-6hwcfm0#jlIKb4hHdu+VlYxuy6W0g}UCQp?vSLqyv1w>m6D zp0V69Ae#lfA?m{Nc9<&7Frv+F);%6R*d$y$1IH*^Fb@l8$?r-y$BwMANT-z#pYbXJ z55!mGn3v90_Tk$GFpROyq!KK}Tr*9oE9lLs^A&;GlTJn1-ygqkW*P51Rn62!ImY|( zv>76C^J)v)RXp_#Y0(||^m1U&UA=buL5we3+GPUOZRhfJ=q+?J4hQ_3@$NVyRe{Qd zM}6LR8K^sYOt~Cq5o0Jykhw{C61JgU%dtddNPT4=B@S=@Be8DqWy{c{=~tiYx5&Hm z;6GJ4ZfX(aoJ&`_#g@uoBY3%jge$^>H;(MCbsW8}23-rt(&$w(cH<9g#CHFHwCo^! zDvrhl>O4rkc)OU?`#Qn&#lM3UTR)ws-s-W$aMHg&`-}f}BovN<^1khlX?<(_^nFc) z2fhM8v=f@m2E8g`6j9Wmm!_5XX^*9R86H!iPQZM1n4od8)1K>oO=!%OWVkMWi>R86 zRtjCSX!yykcf^~!9n@QMFVzXTpSQ7DSKpp%B`7nD8S#D}n}4j$7Fr`h_}8EvR;wN? z+}GeL#vr?>u-$xUZ>uJK;92cEbwiqo#Z3+9Q;N6#CUE+K{vvQXjk%ZwP}YdLKxwd7 z_@QPbh}I3}?xReW2$9?KsYONy(2 z3!M04LkPh}jBO+63C=eJYgyUjce^df0~zF3;R%o>AKfTY9)YX$Q10}ypP0!ykC8Lo z<*tZs_aU87Ek!|8Bck8|9FGHP|8Y5&<_Vrv?0k2@bVly-txbh}FO{s?tr(9Bf5`31 zK{(&~8idh2f*wem?8q4dh@H-_%YP>1?irwOBLSdKOB6xTve7r2lA!1wBy-mm7PgFY z_Sf=HJSLFbLf1J3ES3&Q`MziIK0eNSi}5||oqXf!^8<~ubdZxCm51db?TPr6@KuuN zw=#Uf%{R8hiI%Cc-pR`JKhwbC_<|&_iBn^lW%JV*2vUAZG42n+=MbFiIG@1QNVu%;hS-We+l&fex?)M#k_boSUKLW1z zw79l45$-DUAh9-|ysuX&$yX%BrdERwD+*Z0J59a05CaPgZ_4hI zaga+GgY|*m(lcW&m%ZaB@5joJrZBvHmj7{qkl(YAn=N*`{@-A=zqb=DTg<#Cf z-~RO0zCpSIIn_tT8%bvOG-$m?bmGMSXenpyuK!w_e5CWjfLQYejjTca2 zP4klr<|oiNldOU9A*WE-WXcK-Q&nVE zITCw3ZlsmLO&@!6uq>!IuCyAmdm(HM@ltG#m33M5D$=_5oJ1WZi$(Q^b{xm;(A#wr zr;87{^M%{bfC^$nTn1|5WrapgLD6>R;YD zxf$2 z`>C_#iPM`aU;Q*u09lL+&skY4cO(1An}x&a{b^b(uub!^cZ)-W20oyXGD99d=-G z1|o@NLjE!Nun3|c>es`t!q$rZ*WIrYip0Si*WRH)sPDHL9XJV4LAb47)0`^ixJv$c z+qb1WRM>Ntr#z<43GBu03$P`A?_R2=&XtR~6!c9Dm%c+-T7hcsl}H=rz*LWM^3 z2>(Tu@anLxb`}$nu+21|e9vzrpst#kYRqrd235xV(q?o^V(JhkMIQal3(hs-mscf7 zt#B)~LUzvsMI>`ESIxu{{-v)RHz;Vn-Z&WZfK~Lk#`)YJ!OkN!=bn@N+MQ?G#ahCi z>lM1{=W6PRjf78+P^2-5Bt{I^llNBZlG`M%^ua<0+4!sBC?1LF1 zZa_TOL$+u5zX~%w@)`WMgc;Q104LR2Kpzu(-;m%81Menv`BuUZhm`wqQm)pBKjP=A zbPSlU;tQ0!J^c2a?omzEdRgK`$vmTFX1MR9P`*=Cj^_fl_61ZB zV$={E5hGF1^sMu?c(+^=AX`7-y>x`ZJ0fqM9G`{SV^H7e5(~>NRo8NCG`oxr(F*zU1Qy zoo!7Ql&(z&#S8sPQomP4Eg6^ACnN)tKGxK{+9lfTVG=7cn!jD1(|3v)C65A%<(~oD z$d@gh(QhBBQ%Vi2z2L(%HSs5PabAk*xHrAc#QgZARKOYlJXpwozTJ{M*n4a&5`9!G z#qqhAsnB6oItA33bc6JJwOf8@ePf0GF5#C+F}=|u-y=`4vS&ZgQ;Q^z0ju{gblR9u z&af!4@^(HpyleJ8mg7V)WFU(%Yj^f|G;M@dwXrbg9C6^!W#Bt|f9pesk@a}uN)e${ z@VU*pLo~iB#sen7lYx+^WUVvZ2j%DZZ2`(Em*U)z&UL5Kjb$mLb`s> zQn-P)JZye@bz{@XT2GCrd}Apc_U8)>xFxY{OqN8+S2##u!60FNT*VcWN8o z7WdouT*;a57nP~B^ax~Layn^8)Poh(g=^4g99RkISyZv#Eu}YyNH zYpS77X51v=JuDFWmtKlwWtpq5qt1Gd`{9374934g8D7-@<3bHKC^-oKM03#Khhl?W zmI->WBNt-00TAEIDzD}1ktia!CEX-+5SWzG)miHk#JG)P8q=8HdL1p_Y_ZGV(@N^0 z7s3o_CcD${W2PN{eYEkcwuI1l;R8J-E0)jAMd7LUQppr8jL;8|6(ARW;byKa_nqCe z_*Q9{`&j>o7d0=zy!6#~F|1v$He&OA-i`Ff9%FmI_*i<$x)}7|gc9(Tj`ofJ%|8KK z&flJ$=IRue;@Z5^pSNN%8{GDfrFMm34HSStbR%BK_%uQ7bRf>2$z6J=O%5k{*M7!| zw>E|gcmCp=YW&QG*nHAvdVXxBFm*Im+lNO{ogqUH03=|vU)Mij+6#214_rcyfeRHI zJk&<{;1g`s7QJ6tWCo!P2pi}Qv%4<%253;~U-HM7HZRL$^5_=pk5rc5!~p=_wD4Q! z7cWnQd9oE`<4;9LUY98Hmtw=Li`?gr`&%uYf=o!9^w+~`hD$EU z5CU=G17)O5HTERABG6#UO*gx#i$Ba~`X_+E05kWQnYTuIjj7mC@ZS&oL)RRIiP@Xa zEN?q!-Bu)_)iajd4%fSu1K*?s*@%6mQ=mv0)-gUa4s&*fGg5+|?cGlcbLYgRjAS-8 zZ>0czARzec|0o1``=mU8YRT{JcT)1J0Z7P9I%Wo*;FCwhgsgjNgi7#k;FFI_(dOMOW4WXWg$p2Lwb}f&?Gx0`LmlF&Hk>XG6x@2p9wxPele(z= z?uAd7mF$3|icARkX6vIj*R&*6n4uSM&Y^57)>n5v;nU!k-5JHKDDw>cLdcT0i%tUM zn8D;|SJG(_<1Z}C_}yh#AF5#SNCLS@61cs2#+rr7pX%0rG254Om~Lt^O(*}PDRn+E zc(BRc?ILakW5k>0#kY4y1c#}8I=9|^jX`lTJA9|0azwGY) zsJ`d@$%#}C;xT*3aK*qTl>abb(bYNUjU@#onlrerm+j>9D|uv2?4Hxgk|Dhc^G`2- zQ>u#|T`Rv?nR&2M!p1EiN2ShNzy!ed+ z?zNG1?C3YdYdDQF(U(8)Ha)p*>;?wLTyKuLMFm0#K z$<>q&GrlzfAW8m`QlFXnI>>?vkPWrA&Z1T^iI<-Qvv6yEPfdAGOmtVB zRsT%v7{~XQQvOOu*D3{KE3uE4ut|M7wKG`75^R2A2*knJqYn0PHa!g_ua2!V+*b@d zXZU#pGg4*5|@Y*r>N0fbfMHm08S+i#vI25hx$$NF_0Qukhuq{QD^L2^F7F!!- zjcM~$OdOtj*w>3wN$FL>HVU?91J1w55h9jPPY@k0HY&f&?q=8fEyH-)W*tjh5*9|9 zm>h=ZF?T0}aQ;aV#!K8%8V zFA2JYx)gpUZQGD173mbFc8W=z=F7b-T|@iOrYk2daks*{)ZE@lO#4nu?bB@llHpUdt1bMR#;wm2367mQ^aRDHJi7W& zW=U}!QT%}N#?CwX$bU;V!9e;-L3L4y_v1CtT()9S*F)0mR?nM!72J7sYB3_FNkxKs zt{A8wtaIy`*JP2R8abiq;B+%S3cMN|@>8#rW!A49Z1YEtX#rOtBx0wp`<7GPC&@yB zRgO+-M)20UJOnJQ4HBI1^Q9Wo%3hNWkUVDObyK8KQ@+V6qu^VtO2Vo-Oqhr!89w7r z;wIJ}ay*GqWy&;IxbIJ`>4Nl=4mrodE>t}XEV2hc+|8#yg4*6rKL&ITd!hVg&ttBa z2(3zIc|@|TpMIes+>SkO0AhZY1J}-s1lnh926@*y0nfY~pQn6C8W5Q_H-D+O#@_6A z@at%Ql7>Z9=tO^lw_-Q-{A8APvQR>gIc>>3_-EfM&nCd{)Ikes=R*Y~wFK)ex+6E76%35(9z9|52I}dFVL%g4X0#Ck2-t z4UwJ_#dodw2c9jk_+Gx`SL-aBZp*+8SIo#{m`kkGtKHZl3C%|n3E-3dsnt3f{R+Mr z_J134`->r?cI9;6y;@_-LxKP`=J6Q5ig<+tO9aD68p9I-KlGX<69)t*igZkGS(9sR z?HTjkh}lm?ZLz({NX!^(dPf_h1Be3}6fdJGfs8AwI}FJ95CB0~{M0E-Qno+lH%Tm` z>I-SkXPoOHv`9B7{nf5zgf^Pd1)F)RrmIbqSjYNlQm>&rc(oY-y5p329%>+ zedbH;yS}34)u%R>a{U0r)*Op2<&H8oY3ONJnJkbh8ruwWbv%DHVt#IKa2uOu_W|pJ zR;$f(^#U;n(wj5kQ;F&H#l*zM*`&|~mNZ>`HW# zxd15$JnI#Ivm5ukv4zK$vYc$wW|lXs>UO(1k#XO3{o#k-=U=5JahCsR3PSX7_MWLJ zI`#Te`LuV2tWA9$h@*0&xXBq-el2%mu2ncQ2-B$Dq;Q|JqAf}C+K0yt^P&DX_Y(kT zES4W%P?!w*SS?ZDPF?4%1#4C%I+hAMV3pdo!nE%f9FeCgP8%lIv1DJfoGL8aq6_wz`EdFP$$Gb zUdHNgrxW`>PA3hI8Gd_5iPfY2O7O2!$u}E;Rsk09h2{`8^9@3^?r?1T0vUEt6C2~~ zGo925?cn%P!@FwoJX1&J= z6%?J6j56p_NXlR4fPEh;IoW10fU^stuu+&i=kVc=;hI+e+VAf94AM8BHPaav za}C5UViwg+0@;}>mf+0X&=Zk?uNhoAEA(2rV4ic0a_Xp%9K2Eqn{?#qL_+dmJeiY? zo^*Sx@!xJh5BHwy68fdC1ybL98luAtG1JWHKP;v3#hFlew+;}BHL~WdClJPPZ{l9u zkq4$nbstGhm|akvs7ASwUr494M;olQ7|J5Ro~J_Sd+dVqBs3s){HeF!UODh#^Mfj! zIup$Ew0lKTpAjhXVva98XNkAhU9hjXg{JwkmD za~1A@Tg+BrA<`YhSRUU^8TF;GjcdRTuRb*2G+3x2WjKGETzuZ-0?XQm%Q}%mOA(^O ztE7#agiMHjJ%YJ(?0bHBC{kK@hZy3YaL~T_#{TzE=FR*wl2<(2f_hI0wIue8e)`9g-%ZV)o>eDtGCQG})xRtjC!7>&^Qs}oAB;HK zt-^1ran*kqIfviW>;BJN_FHm+>yex8*#mdSBzLpP7Y^0xuV7Ei5L$nkPwlZ?!ugu( zF2=L($l7>#DS1DS`4XTSIYCPe%ef1latrJZG<+1~--nVPD}Ht5gQe(~Q1{UzgGKZ{ z)v4y9HR1{6oL|*Ymm~`(#wR6Z64Vz zW1}eyN4>=NWP7Kg61(f(3A~chYwQW^fFu;BRm%^qe%oxs6Rjnicjw*naSZ-+B&rWTFSZFkU+Xo`vx!|3vHp_9L$y1M`2&u%YG6QV+B1|J*V+yg!Hy+ZAw2Ue*WhEF}bcDJ8u)rRTVP z5|SpblIxOR*)774=6uB8%craqREisUx`cE}nIJs4n(Qrtx&L{`-jqHbZE4l@O9>gBK- z-o2XZ%TdVpveAFzs%_2pL&1aG+J@&?6nNz>h~u5s+R6@KW3YsksU{K2fiFe$;xueM zfr(a}%vucHj|2W%k_@pfr?&wYA+0m%CO#|y7~Z+QZLA>w(YlSs=G#h>dHW=F=Y1FA z`Me@Jzw;*tyZ#k7BeMLaxoY5!=XB%-eW548TdGf71E_o$ANwgL0+VOJDIa#A50Ii-Jr zls_hfc*TE~Z$+}iV0??!gtKDxei3N;aKg4k@FSJQ}V3 z1GF+(t>eVOHgkEmU7$=H?0&NiBOD*QA^}32996Ir)pP+EMbun;=`F1nA0Rdq(q9X+ zTny^OVV5eq*!mryE*^xZ;R;#yuaX|R2bl3*NrjrZ{+xY5V>$AL*u~5XoO{9;UOGxJ zX1;FFg=1E=KwbA26utJ~FDSYPR?zHq=A})SZl7+Zg$*|G7VjDbe11_^t(x`rPUA0T z9WB8du!o?_ggsF_c|&+_OvS5e5lx%@wHNsq;P^%q`amSWj_y)mnzZktef$n9)#Nv8 zWK`en6At;)gB&fuin&Wx9W^WYv#HYrNbz$1fWie_L8Pyd<5sH-z z^El)I^1JofbYqRmYyO2Lzg|oWd$*fyi2D}n(1S1gtCz@|o_bV}w?x%{Pz_6p405?M zvocV4PNcB~Wzh1-kCA?G;_>3Zm)RkfW>J4F1Pr|Bxo%af%%5MZbqX)V)$Zvy4>LJzE|A03gECL-#Y>ZjbBR zm3Wz}FtH`J^>g)GTeVXll$hnlw)DoWVYlZqlt438jtfM}g0dGE5!lG|2(D~!M6sOqf}ZYs>@2yD3f*R@-(=#qR`F9e)70u~p@P0pMIyhlP#NT!V zNi?;|P<(J%r{+I-X<6j=mNqFFG~wpoL?aUKcAKASJwypLN@MJq>6xNFhOSEixi2JV9%F-Gy*8;KKdaSz=_8ZqY z*UC9SolyOTSii~1QV~1jTMMUV)GkrtX9OO<1gw4rbTG8nzeWJ?}PqsRP z#VT;7gjAX~wp_}JWeZt8Iwhe;hI-wGrEzap*bf+8TuPtr=ovZ+aR`=n;i8+TMY%Eo zB`g~W36ybc4og;9ag7*L2E9#5vb<%i1ukzfFfK5A`%T61*S51GJhPaPn?U=biGLPUW~0;B#h@W-SzNh{KWgxWl<} zhNRr&|AYnXAr%nP5%ki>xf3phTbrb$K6iF-$l#3Zc)0BF`s))Eu&(fISR#A}jfSAT z>T@o@yJ~gprtsqKq$P%>6OzxibwnhCDwr zKiNncY3?&_0CwmsJpeKz2TXDa&9vLi;HvHr9@-i_@7aV9S-1*<@ zF~;AMmvwbJP7;tuygGocqxR^m;Hhf}Dr(&4Ro}}ay}H)=T>OWIhJx(zYqbOxF+h zsi+IEGvG@PRfQII{4PDhdP-)kfBbd2Q4Q#mbb`g>aQz>gV`lQaT%%t$-)+%1^xkqS z?Ly7Bh8f|Oo01-O0B2_s>cCpTvG}GVwi`3kl+hr1@ojOW#u|t`@A+YBF_@oX-3|no zSo`$4kHde^-G&Q>gZrYNkH=x}OhV)IHB@USrt7mo=~QAy?09bGdV$H+aHDo}r#Uxy*&_Ro7RiTA2k%b{ zy~2oQ26Y8F`5tr|eVnPjZlGx^@d6vyuY?AMm%ODM=0OW7RJ`WQn-#01WXc%=c8o_# z8u989mM7b|KJ;X8z@pO})tEz1|81sGvv*-WhJGMTSswA5oA_4xwB4*eN&4`Wrz(({ z=xQ3_gr&KgDP|63xi)q^VZ;jmabjV<*|kT$G1#v~6Qmd;otB?acURm1*f|?5a zVpPCf@CW{tR(&@q-Zkrdi={pvFP=(lvw_<=q+|W)#&&qN=64brc}DQEGZppGs*R_h z;u^zmM=qS?H?M*ONA^+KUxLd0ddn)L3S#JYQ<%wt2jcuhe;!2488%J?_0IWb$V)9QBl#%VNfy-3yTIi~)@XYdEJ#@4}#yNOXOu4jv!;u)F|}!*3`<4MRnn zz=5$8%(mtwpRe8LoAnt(L28G}#J$@H4BD@E)rFoHjwZ%1<8+5bHMTws+6sT3{fy>2 z^9YJe{Vp=@)fx<6Foqo3vmH$)b;hU2a-YEbU^ga=Z_i60R{P{9FrcpEy!qi3ffj&eDG)4yqR! zLDPM$JmjrIs!DLdU>q_BNNzj-@_HGQ1l@CDe+8ffS$VI!t4aD%Kix#$wCr9*2U(t9 zf?aS_miidccyjvBB@gG9rnWN1l^hp8@t(-PtU6_LK^joW;F!$AZYp5B>ZBl?*HL(_2Xjc-(10<_TKn#d;Y+;@*zmW^L5M-IL2TpH$XVQ(dOD9S`}3rM}vr) z2^n~eGi83-JvT^N{56X%6y$dLjoV@2JZT|5p1cRE?Ta)yZ-rDokR!?&(BJMSLPBYF zpWGz?P?7ipCEpL@NF0)RFEA}x{D)i33J3Xz(!Jfj(`ggdkW1{2y`iDfQY+s}TCjEP zYo4u!()3s0#1nf(pwuGaF+)`xHkkn)O&^#3ZN3~caNiFpBVabD+6-VDX;94L%}F(U z=!1<4^5Hdik|4nVIv0k=V=*1|*4>ozwn@&?@o{nCG(bHD=@0L)FEgL2iFk%c9 zSFC56?Q0Huk`2L<my+Xc_P8%(e>ycc| zCu0HSmn^xkhEzGQ!blW8=99qc07>kdKOUm`&^5OoMx-h?g{LnoReX;El#WHpiWP) z-Iw8H>2Rv=d0&=UuoLx`PdPFGp1p{t8%6eUj6r=Gkk$Ad;C&_MZU}YAl$vQzc>s4( z842CpO#&g)qkA_Q)9G@ClAG?_x75bkM_}<&KP<-=q1aWz@0?BFV)T3R4)UqpvAl}+ zE&Ji@qH4{s-bWU8a4nQZCO4)1iAM7 zW|MH3iJ zc+ox_D6jVDgu!?y?M3l@gUgF-BbDZNi(M4Sf@j|}csd~QURyb_)=!^o*A z&^}J!8i17H6w6bdf9h^ZWAIq14mJa`r8_CczA24T{_k9r7t)>TsosN3bvopKhuA$I z_1@rtoE4mfRTU*KqsI}58_^WF-+iWVyQKZX5ycI(B{*Oo{>OOH_{-dnQRJYuD6Y;n z+*w-Ejn_7xm-xPBH3e&%oDc!F$(X}=f_pWA2IL0~OB(gHV;NLc)1b-&v9gv65*+TI zTV;aL`Vo;HbQR^sCjlD+fo0{%iM=b**N)H`itw~X^ z+ozmho0Mk%7sZT>+Ad)zKJMv3KUCshL{9?(`NFvXLWTa7;`MiXDH&6^g@*K zcJ>St{=7}~QCw$8-@Qa2XpHmH0h)?OO2w-Lb8t$`-Dcc(3cDP*I$gV33FB_g@#d~- z+IPzdBp(5d=$b6Prt(1=FL^elsFh()@faNa3K(Wtwqx3a8(l4l_A|u#J!=D6cWjU( zFJ!+s&)Uk&3(;c`pu5j!r2nQHe?f_n(WDbEaOEoj3@nD{ZIjG%baPTRZ;}S?j9tn< zWIILdTs|c3ATZh^1)>OB1vtTwu|h#e`hcwLu<}+iF=|L5(e^~G2!3%AF*j4q_)nnK zXkI4G8*oxlKz0<|*ff9(R`S+uBonA5C_p?y4#T#jpilD&3gy+dKVC9JAWa^3yL&MZ z2l9JxHfeWS(-_?OYq|*D2nFwuN|A^(ALptU210?TuzaXeI$Dh|V5<_O5#H$y)S2Wb zc4e{~=*+hG!^-Cm7Np)JgSP=G6_A*phe>z=BT5)1LHWjRek&5%0@{h{#Q6?lFv=(W z)+6>B*uFlqr7$u6xlU&!zXJP9uNC*lTXXAOd(kd!1LGH(WZo4HwcGLA=zKcf3FxH5 zc#%S++l{nTB>5p5a+FZ=MBGEdGl{b6HdWYAY!o-9KokRct8#9rk``AL9kggFh~2Z3 zJfHk^>Wbq#VLPNA?N@I)JyVT-mpdk0e2>H*v1ZzxBl~E2WQ~=ukWpy^oN^F{WoO^s zcSW`C*~qcf0CR()GP&j2fZZgiVV7?vgAK07{Njat{5eD^s5e+&cou)alH|}5KrjZ7U#|eceL^sg zy|2P$4wl>6k{IbB4TLX-qO#USQnghfT}sqMZ?TnNF&TVM~6< zQ!{sG)`nJ!>y=%YKN8@TgDeJv!q>J8D9#0S`G4R$5mDZ~fF<2q?0xkeQtgRua=8I@ z*t_7X*^~TN^q@WN;g9HKLS-WVT$Rs=#GT+H%v2;JC>hApHD(m<$_D`>7{D=^Rq<+q zyrI5qM(KBTAJRYfV*;P#{q^`oYv%+9xE`;XraT+5cTPaoGfuFjh}#`NZehAOmj!e~ zK+wALx&Tl#(MEO0h_!HQ4?36hdvQw*{3D} zzUO-)WRYF~Vz*V`GkPaLV0CKC)XLZGH8EY$V^(|dxvP50!2H*fc;N(4#pM>xqYopP zCx=QK-Z}2W8W35>`k{m~L8QN~NIb`%bOKmcmH<}w|7}%chIWo?+f#0&%x z_f$FlYoJ93<*Vu-e>M%USSjX**>!z;U4q^8qK3ml1Aq@BU8KA6Qv!pm5Cv#hOiw$4 zh%G)AJGie9uMGmRW);=u+X1uFWxr;vmwXK;9hyHqc}(d>K^pt8C<*O5Gv(~hLz%nH z+#tk#kbgv~O>5joce!Q(l#hc;(&)V2k9X0}Wvu|85%Lgqe93meGe<5fp(QbMb+j=C zeXr>!V8xF~PXsOWn?x`0eBvK~=+HBfo3PSG&ocROg@a$VrIUAMV!fXBI&108jT}TF zUEOB?QNPD#Ts#Vdz#c5^7z^fDAGC&Z0dq?M^UEZlmnT=gezYWg>n4zK+kwv&qb)=V zZIVWL#{K+5bn!vAfB((BZpFsZbj2bQ^Ji|hwq)JNx*XkWLc;p~>C$lEf!P}LMpDV^ zAm5DtE&D0fpL$%vYb$8u!4JxA_#8zXvh5VNX-?Ib_jWPLPiz0SqV) zR8qq|#`-ybzG0z&k39~$LgLnVUmPJIeai;_jImdL>WgTthx=Y{sp&kZH0}1F{44{W zI3}hoafoP)*cs4YkUhIgb!7v-k;Yp7&``Szi4CV$adm#3VGTG1W&+#eC{t5I#2+Bu z7#uCrxAN3qjul-7&Y`cR@{?z3%&X-j!58KN@9oOLip{i1Pu7c(Rq(L)p4Ev0P}%+R zfH7XFlZJPA!$~9tJdOcEjod>I5?92JpTA#aJg*Dh3JR%LFT500BC03HOG1 z>Z3dslOos-OvsKgR<%M^6jqFMhAkC4!))ttR<+`46>4HsIzVyBa%Tnl&2o54xu2{t zH=wzjq)-h}7iILbYt7qT>);eoCOv#$TdFb$fJ@pv)D4Q+Pv4{<`C5-TA-Z)2hNuq86biZz>sO6k@2+iltM2d|OIpjbr0?{Q3km--bd)>q z6M_w%s9OKegy3a%9R>aA0)@T~{K#?jHtZW?TcsnhgSFlN7U! zltI@3=ezt=-u_8bT`<_V5Cm=~ig{V_Kt;>-;vgh3u2Z(lvoJ49<-!WOxckp7VrloCc|fcIu)Bd5&>8OkcHA|oZYBsZQ8gB1 zlm+*?TRs_GD!++J+j$<%wK8?Me>cCK)#5=ov=H4tn$|3XSQwPJYuTDkQa; z6oWRGp1*!2EhqLpWR|{mD`Td)wk_-HdDC3=buI4q84=?_vea*yUJQgagZZ;{cH_Yz z{HibRA*TqF@CA*^L3cX^2uY)hyJdYiLL?O00Ra{rL=5 z53So*#PyK`;QCj-JRN0Sr@yjiUG+?3r6l1x2ZSddQ1o=-CZA0PP-V|NBj6ag1yb#S zKt}~7vFM%$VM13wOv`3o;wNpv4D>5ZJ61$*9-5+FIN_ZJW>FF{;;pO?=(@uL1)+D> zNu68cfRoaIy@fd9SkhGxIZfHki+;y{3n^^o_Y|b6W_tdYVFltuk+;mwwBFnYIEr#H z&MA`cnNP65=^FZ0I-5B_%6`;MD)}alzkAv*UBcm9gzfj-f(QvVic8YONG&iMiu{vw zpN>pF&Oc=;%TsZ`l7`xNJ{G%XAJ4ro62TgI75}Z0(c~Mf??aVeF`_GAl z?7_z9)*LVQ#WG`g6+R1~6no1(<>je%%4a&hA(=KiVbDEiuS&8G>OpGODLC?IuIn!Y zOeiXNqHv+AK}9XpPdkX~9Q^O5XDIC!e5{kQl%aqII!G%l4QXoK=L&JyPWGggy261m zvHdvVLpJMo=3QNcVv5u(*1cm|$t6xUEgW4zeFasZ{3W69{*{3zA>9EP-#&H!b20%)JVm!BB*FwFj+=IW zIc3SH+($R|CI7{ACYNIXl3va>^KHslVz}=`NK8pCu0N8h0z&jTV|#&DT&YC(SnPCK z?!JPItETId0AQ1%G4DW+>|4LnByMG(YSk2fJQ+Vykohg>3 zT%mJ?8rDM97Xzj!vHunusN(QcoS_Gh{pM4@P8{%I2TuL0Z!GpbN%amctSn`U{Ochc=|&-Gm@O!1 zds21070)R#3V3V^#E+9nz?B9TMDLp|M%D=L-iRKoK@mz_rrla^>?gJ+iysfA)Q1Ts zipn<;o~)?xctq`;$D(;MlK0o*yM{#K7QhS~)8O+R7|NS4U?{TepRFQ}Y*o zUXx^lo;i&0NZ34K1{y7hf1Ucu!TnjL_^Rf(LN%`3sJ#bnNc&=8!}zoO6GiG)3T4Qf zy=%si@RDz4wUd?K4|tf8^HW*L3QUjo+XJTAwAD`AF(@FY=f@^i%$h>JXWbmg!|v||u}r`ZmbyxBVFS58=M0m~qaB!>FLilHm*E zk5?7T09gJ6lcNh}PXEfwclMe@C2p4sm2_A#$fR5mClbjz?ePYs$kbJ7)(hC~pVjoF zp=ZN!V7<2sGU(a|J74)yckW*H-N>+@Mwe6|Ir5SNhTbihvIFW)g|390@LSrHm}=k+ z^CZ0#-X5=C-qe=CNWYcZwvl;~FU*BmT19h0@*e5uEAZ#%SC7ay^D&Qn8JGRn%JP8q z2o8!(mGs@%a?R{23v%{hw+l;SC3A3 z9g9mF+S&#nZ7Ont_4OmIr&&%k9;d9KevDEP+lQnGTxqTi{9fO${%__FD$9ek6lCj@*#9MhPRCHgSQV@_7Li*P#~nB- z^J53(a@5`-GqGK-p2}Ix=)SARaUaA7_`n{34;1nl0_W`=vMt6M*p}jD75!LLowCN{ z(~-5RxmaE+F8KAi<;T9&PatcSTXMeP0J;WLJ+M+Q9Lr;$ru;JWkw?ErN5la7m-EFJfs{zoPB5kahZ_8JHPIEpQbOhO0LBH= zcHK42J?rF-?6a&fkE^-GhF|yrGS!F;M=Vc{0Z;`Rw`ZbP1CEECk(g1msHa|IKT+nnEM+Li|(wEXxK#H#yUK+g^7^z`5#H&r*D2erDq3V~)E)ww zmRE@N5U1%;J_kX`n_j(*;q|N{D3TK=@8f`#MQf1tbb8*H?}PVO|Bg{Yk)pwm%C*$^6qqzRGF}*uOE4nZY(r5 zP~kYyNO+4#$p4@|*f5SDyLr$2rq2gB-Li`VL*2r~4&9c`_-`pzcJN*(1+Ig>Eonrg+<~yMeQtI`0CoZ$2OUy{E@wW6bhSPKmr;0{;Z+L~1 zw9D$+=X*i&k|JRR&<2odavbn|atf+c+czAf`n<%*`94rMQYWA>hR_5?@UvofLtR7aBlV#|f!m#NI+ON7MW^LDmc4U?C)p`e z1kxB;hI_iWu~NC{5yfSe;XDm%XN!5((9{a^g4^lLknr9vDV-_h>~#+Pp6D@OvQ5&J zx0)Zk=d=-j`#~sq{Yoqzsm4Bkzrn{fiO(^}(co>*=8b6y`&!(5r<32g@(V9A&l0_} zoL8x1PM+$PTF+PzWBg|x&QnhrZOw5O-DfR4_L-qERT8>auyHi&u27Mn*VN9PooE9) zT81E>Kd-OuW%9%UCQBEsKd<~lW^}tY9F%cN8nq)?Ks}3zWt;_EjqCv!yo(H9TPM2i zhUQ&7Wo=Z9+Bu>7d-HN5#rBoo=+PcCFtnf|MKU0pQ_w+@ltf{8hCqG@e=na@G3^QncxO`*AAP*ici4jKj&&B9oWo$kiXZr_s&>o65_lFLjApt$o0q%8syUQ|lK84i zoz3VwM81dMqCF;9I}C{qy&k*-O=_9ua+|Z^UhknV*a0nyE5a+vZAYu0cq)hLu$2APvVV4Hi{FXWtUT07D`%*p9(27X z$kfJ`EdQ{; zzRMBJjj$0^=V#~I!FBsixkYCM-1Cbl4$8y&o<*I{VLh=+((SY$1UB z<{z%|pP9_^3VBaBSB@zTNiB$GLWoE_#=E!{$`Oq5j;!b9H~RJtdw&*+SymSKJ*YNG zD_2cY8LtUo{y6nO43GlSwYz5bG!HOila zZBeLL3eaUp*kaEJGP{|ib(<&d)Njo~uMum|r7c)z%>8Lwb`gXi_simv^E^+qXBDe@ zN6tp>Bj;D(P25B46k$Qj4*V@+tpheklq~Cia*T(gk)b>9RTJbq?#^Af#L`Z8@0QSb zd99wc>DZaxoqATelODyu4G#tn1W8#~3&&HEO_W8Qd~JkoRu=*J9+cN{ z6&p^@E1l||)BD%>{+VS$vLBH>hQ$@Xgh!U*jQBIG%d|MM{Up$mI(POjw`(`gMvLMv zX`03@R4e&zwfcs%ZP#zy)If&-g|8w2W@IxJY(iwV+O6!PH}RO)MB8R1^m_F`j@pOv zl~iP0&X8;FC80Up%YjqdL$tE0#<=sX4Z+xXZa2d%XT#57wU$ew$8T-i!*^7>n}T-k z#Hwg<`#$Md7kygzxac_+x?W0?S%Q}0yQHNX8M;LpX&Ab@rMtTuem6en zIp1^6df)$H&0?6jK6_u+j<6}I@^=r`h5x>JU)r8-)yXO~7cd#q3wdd7e5iT>#ZmTo zGcV3fb!s36ub>Id{8_?odw4Q27WfRb;$jCbinfT6t z&6DZV`I8<#$e<@|p%;r|+B)y*TX;TUF&wXly6cygw9K7>r^R1;5oM{1r!6c0(CCSgY7VQ5}umXoHgF0l&@{dyS#|u{zh$ zCs7oLA21%#Yw@U=a|8eUJ&etJBg>UgL)e9sG)g1Z%UdxWusxUS`>50$mS?o)^l+lx z8t2Ax)ez@L94-opj7O*Jvvbg)Fp@IN`keSF7g^{gM~fYMV=fKO%>E_Q4!(c;!AJ^EWL&40=g>TU{fLWb9e4-D0=KIwrQ;KJ>j7$MPBN zC*65t5Wezlw`!+qoUd|>!*o}lpS9a@K>Q?S-xWvtI=}$b&JJrn3lUc6PCznp;ZuGtpza+*SFsvRmHT;=_KNP50qT^^%B-D=`eWvs|tig_)5x-C|kl zYW-2J5SC9q#qo)CXUwEd+(efG{#!+OhYDtdo*S$=urG4kJrCy#E9BJ|&7y%S0M)al zPI(Zyy1wDPN)kde~_RiY)zMu@arHDe3WkdPMt3RqZ^)4D`jpIR1T(^rtalNB? z`s-|oS|`^fLa$z)Bdl{qXSQz#LgHRzC7G5D>PGmF{6Z&FE1xTDy3md5^ZsQ(qbnHz;(1u$6eKvBf5*aG&fVI*@Yz4TU%n0qm<8L@0!&&pGTF39 zXR=2-x5y?fUGPH(>g^@L_3s@;%dFy6}*M^g;(??SWiXq zMJpuQ#nWgZ$fob51K?epL&LWhiL(w02eH@C@)1M<$xa1EP8OZR6D_}vu*f1Yl!7Fw zMMjCi`8bSBqRn>KAbK;=C|DAVITcPP=scrf@JTzBuYC3tGdzUbP?XpDo6^Oga*V-4 zOU_P-#cUhF`-lvGa5 zO#Io&Yb$DcrW6DVN-)?WSO+d-D$y(qY`E_iUUD7X5~2`)-+b`v>P5JMrJ@6c5qUis zNK;smq-Y}xhtIyb!4Bl{Pu}Qu2VfOHF3%{%mJLS;vi}l-v~Or5?D<4P?f% zF?amTQ>kj#>|EQnE|!^425&gdy_U%QfAGKpYKZ@VklDgJff50S05m) zMIDqa&#`%3DC^47bfLida~@dDf5{p!vv*a1?mk!YQ7K%Ih`x@$Bu1(%mKkl(_i|nC z9OsinS({{k%NQ;6QUcWujhME8c!QuTfVn8-YyNwhPo}^1f!Rgvo1Y>}mi~R2mSBop%m3@2QF3 zMm?zWp(lIW@QM&2eZo}jYZQu|PbnwB#lf zOzJ+!DF-VMy~kQa;`z^;$bon#%wh7kwA-Po?^3QRZWRa~nb9cdeT@6yr1GX0* zV;~ZS_mmc*O@H!I)uCZek~ESZV-le(pzae9g=7i|0f9$D5IU`jpj@^g5OXd%;{s>o zVQP_~V;bAshQG}!TM;Me$CINhqOv0CkqN%Ux$3QMHUkr^sS3xkFP(x}Ux`%+zF*i) zD#G)Z^elS_OICEq*z5*99DF#(4NCS>FgWgXVaQ~gnj0``ufh&y8(?6|fJUZG>+{HQ zlBu;~TRBIdpQYXZiXv<&UjxI>HQl~Nc26!HCgpf|UvY|P1cECy>Mh^$IzVd5Kp8(57x^D5$JCY}3bxT7JHvV!TB+w42-!|2f=j z;E$YY^Eo*QZV*LeVoUv#{TgI?nG+w90X9W_6IEvILGRZve945KeVq*r3BU?ktiTR7R^>ExU2Mj> z&_no7>Xv_La#WkuP16e=E3zGDO9Um{)if058C9M>(uoKaxi5*KgpwzA^F!2m?lsiq zeq(*#Qw0TcT@J_!n`gb+05Wy{_!~IS9@~#z8=D_=H=ib}7kH!-o_`HlvMRNfa;D8k zi)akuWQqpb?3;#`hyb*^x>Ws$L2#BJFUY6RiT?KBi@pN}&8VG9~F0uJJY5tkI4yDg# z=v7aoUMO)H8lmD>HQHk1)clpm@XF#Q3${2tecHougnm;US)1U|#fMz;-56>UbcsGW zQ=7nC8oj6WTF3S_Jgy+ZQZLx1r;lt*VWCd;#9#N^d{nBPw9=O0dWWU$POtFUN#wSM>rZtD z!hVu-UNzAQi}nfjqqYP2g!cLKU=C07k5k!ZvEbRcI{Er*`7V#C?SVjB?bvT4YhgH+ zj%KCXY22^)%>JKw*uHZ5Q=`-UJvgA~BvzLcRid-%Gf#NMpP-{vp8!yxAUQSknGE-1 zh_)y0x*5+Jxkc%i1H_(sf=E>TKSDQ5xQFc)8oSGX){=k6VDb)Hy9lqc68qV@&#!}H zX3v-lGso* zt;1BD#`W|M+#!!s?yuj|dPOx9_*BI5twy@4s05%pycLy#2M<8$IL0}H`DbWbhesMF z@-NlP+g&e*J{G_0K}NpKT-T5|b@ZkI?&o%8rpMxTc+H;#Dm-9kQ-zM|q!1-piF>Hk zrWHtxOAEBpYnKcXjAGea)$?(dL7a{v5zYZCFYXSxsVrWDB{mNcFpr)v*q7c@Qo_px zYs)!QE!1^e1hJvL-sfnfLIPnZoTbMaXwe?% zG9Szm$lFML(Gmje#KJZv+9NNS#0BvsddMMu65|-;0?8Ggm5O6DN5)ZwsBwbW?s3{;E|kr~#P;OM>;s`|nYH{Gx6;@VuTklL&MdPXl$>bA z%ItDFtxqCgH+>$z8VkH{gjQYtG;vNddp%G;PaaW#29+CIaIaRRb`m&*uSsEQ$1d}tG(6E{0)F1;`&w|z3 zs=m9!70W{>N)lL57= z|B2tJ^I6-+;0%p$5*K|&K8Y1y*oMc~m1j7Sd`ip;|78IiJ7c=OGjeWXxg03|xn$Bk z7k>V8@!Iv;=wO}e>gqsMm0RgN6CJ7z+l^(xltSoUL~E`q`w*4V;c~@cbnkA{n!e@1 zL=F&o5v_J8x=Z1*O-pEpWJ6n;n~g{xK3v$IG?;843*EyDyXGq9?+olDpMLw59y-%= zIIrsvECi1vPN@mKIU#>{YQ$q{6?dd&a0`6fR=?vL+% znrs;TpON=BYetxZKp5LXg^;~aS^CX&c37b09_VoB7cPtxsd|Ae=}JId#`l-?K3eDT zVnT*WTd@OlC;c$|3j>s77S|8IVg#DrLAI~%?2&$?9aWQ%z^9~^J<&!m$a zUu927(@JD;tRZnORJqwQnCxo2$q>{{MTMg&pm2pN0D}8o|L+$RzR7BwxT9=L-F zzf6DSV+59TW)wm*57wr2PieY*OQq}9#PpL^g_6{cIZ}#^D+wenpwi+81gDiyz{^*q zl&?Fr>Q62WVRS>L3RXzFoC`dpQF@9eT?4wvmEXjE8v@|w2fGmFr6+{Z3aF0FQM2?S zy-4{jUVBGQi8!{OFMvSyDY2zrU)V#D`U_ILN$3bbhR*&$TC5Ec3Fb0FHrX*QJWUQx z8bPTn4ummDaPbcXmBf1@1cH=!?&QIiv=V|_Vi!e$rq@O5)yL>TX`|QfWj{g56Y)hF z*^i)Ir=)S0%{={$rHGzRuDZ(dFpp)a+a|PR$JQb_bt6y6g#O)zZX98o5Yt9{_EYwU zs?eDTn}|*wMyj_-jaIx-D0IsF0|MGX+xGNNZBq%>8(P0p)`Z(Hzc(8M*d+tO4hqd?C++OQnsu&QQy6AG=yWYcd z-zG42{ni;q55q0F(LuJYIf&A4YQcj;#6mG5oJdrfi%>r~(1f$tfXMy)39&D+mQ+dQ z8NdG`Iv1&5dZbI^_4fMP6B;Mk8cPTHLy#OOuuA2=seN1ijtY;Y1kEsay!VSJ5VmB* z7fn-uz8T|pEQ7oK_6xI4PoW*Vk2F-6qOp5p-~6U<&gSP3!-!%K_IFRSB@rl~`u4$| zQ!)`$Hf`QJ{sOe3dtwx)AfUPwb7ww-jFBp2`wT&e#{*cb$N`3LO{`oXlB;jIl}9~= zEyMR;;Oi~ATpvwxx!Z+)tb!;kx(Y=rP6{*ihk$o@Sbz%>-Nqy@SJu^jP}bkoO#CSw z@;JH$pXOy3#Z+Leq$jA9QqrAiO4mEy@+l?WJ{>KOn@l{SQZi&}4;MOvupPHcoaDFJ zkOKpQ#o^Fd%bm91*AQD-9=70#CGD@NOn&eBeF~u(lzVWajvK{X4tQCgR*Bu|%~tQ{ zv^`8;ik;K9^5}S9cP-bka~iZOY2{U-s-jH&m5%Ah@uvk!tY`wejTU&VKM#65u%%T9P%&vxHU#(@F`_XLG-2IZbOIB!= zeKeOe?wVNa{t-MEE}g(?#kjk2W^$oZkmlY}v>LVM95g z7^K$Nn&bv4fXAvo-octsNE_Ij)V(t3-o!-q;zK6AGd3XqH8ucV3z=Ah7zd&ssVXqH zjdzTvSPE7R6P6g>QQoG9Pfhscemis4W|rIKC%V+ICs3A4mlAgvJlVwCBm+0}mPUD{ zDoGjeQd05Y^ACn!m%kw9-cl>VpGWlbj3RO=>epG}0ik_*8Z^OQlAN_L;U=0!Jbir} z$a4qij=wUAl?Acq|C0t?_Vc;)J=OzJgnraB9pTZ}hb&oHlrVz%_AjV^tyKktss z&n`=JQgvlncQL1&v8uh}Y9zmOT#p9hj)2;mXg;(Cpg%8w{%p*_Hp~8}XZME)q}qr= zV)O2wUq$u5*-eJHZHX0wV2L)~WOR_Y`)-tDJz6;KA84n7A)Ds4J#Wm3L3;S-$j;4R z;+68J0)(6EZJ!7VD&!@Xi|CqIh=+d@2A6E+udHyrpj*80SM9|j#JiQMA)3Ra8H^IN zb<}Ig&=-&XLb#0Xe`cK^N&gAOL^I4(s)5Lfe{C(zY21ov|7Ed6c&!gOtC&*51JE9b zNxOS!O;o?E=}RrrzYKQYAPg50R+%rqTj4c|P)TBTJPj8@%GFgVjNdtBh4T>3mY)V( zNbt@b!2!st$Ti~4-i1abytMSVqp*=-EXjh>u0qoOKfgOrkd)B#{I)}dq1p{N&KZDD z*uZ@*{MX>3;bN|R&Vp=>pDHkiXEHM(4zF#!uR3H65MMOO5ou%O(xCq3u+ILW0N}1x zf4S@SK|4rh2&<7gO{{iun?7^eO}F~{+)RVnpliM2y{#OCannmkk=}xu=nz+3PS-V< zzCI`ykIh#G4+yIld#%xOtI?@feF3$&n zHU_MQCT)9gFS9a2LdAa3cdGwU`pd5SxG z`5b3K`N5lo*WC1`drNE=O=JI@{15(dO)a)c{n#cNYz ztdRJ3av5$LCYj2s_byeBsV%c}c9DFQQqpie7v|)5wP6l2TK;@8jhL=Ua&O<~NL+N; zEJx^d2WZAWNHyV_>JGh`GA{y_D}N{6MPRz9g;R&y*V3sP1tT4g&5XJIhrs#)3HK0z zkU@{g|K038f>?(3K6~VXYToqWoDf~36Qi}#%1bx2B7^_ECitx0ZS+9HYCY9?_&Qe# zsiecN4clX{U@0R84Pvq86W7ZI>vJom?D!@>M@pW%b8fvtP7sPVR{zmS??}nA$_bD3 zs^K^4lQVnKnvc+cD+z~l^JhQI7(poJ3#LH^i+@fTQ>SURad^pT5EUL7D6u#5pObcU zHrOAt^_P9M$|wm{tX{fHPTA0k^|O3Fk4F+ahtE4{y!v9YE~1*;N3g##**_&4WTCX3 zW$O%kZkiuW$rXdiWco*3?XV%3bchRU*s6|}DV;_}$hsi=YW1z}=3`l7@qmM=CZ^T5 zW|5a=w?{2K9+X}bK->^;K?kv7y+>WTQb>UPX{w8Xek|;x3kG4E1y_7`%B+OR!_8VT zI5dk@5|6_9XJJb(%~3wxESvo0w0&@Viw}djq)}G0w{P`m9$R^$W6rqCVNXfR7s&O_ zWsdTXYoR@lKP^&2Ni=uHSk~-(>D*7tJ$e_3PBk~ur|}lJdHeF-65Q!~Py27hMg@}5 z*X#W?*1P&kJhonCs6HnC&eYUmO)2oEdyt`M&+Z#B+JZdTy!sOW46-pN?g%YSvk9$! z&+2~49xZ}l_(~}v;c}OWv%&OhcH+K?%2?bCsThSORi`7jq?*(!yGbCoMLp8suhrh= zN%D;txytoN$-#oiu}m|=XHQ%ZPK=msiOB=Cwl^Ef`aV#wg}B;Bt|bb&f4@|sn1S%c z^m(huGA9j5w{j-6w}fN3W0YwJI^bLB=C|&x$r{3oQqIZTW}cUn#kcQHs;5`;zf7E6 zm?*1{@C_fnu@1pF=l7(~k@(xdMlR5CR~2GclSW@uf$&8@_&aNak6W24nMhrrTvXI$&uq-m{MXoZ?PuD-kUsw>j2G=F~g5q$(!%oNMdT{5)q z)=0a5&k<$MhF%9+cWue55r|J)vANqvf@cc+`S)F)0EeJXOE5w0n~;ewz?L#CihFBx_%7>7QAe7M~y#{X%jSf+w*sedLf!(!=glEK>s>FCE-M0ETKVFznP8S6UZdgI4JPEQOZ1yne z?RKXq(vOhF1cyd!uMplYeQ<~V2h;(-H4S0ginFz#Ig2r+fE0;cKQD?k^Cy;mADM@w z{VTbZM7#}H_unFPt(yzL{oRIcDKQvOrn(2 zYJX15f4HnL+V7}`{~3M%b;7_8@#s$$B*Z6TYOB$vBv?)c?W!prpXofUrO?UG^8qFsd=R8I0Q_Ee4gra{GyWoNZx!c@b;z8XsNiNt)fw{r90}Yzdi#VJ-6b@? z_=}^{>c;rSdob%({LaUbI6I<=Rc5rW(`x#yWa)|ZG6I$&Yb48?{r|!cD;0DT-C-^k z3Do#IR{Pb4kAp+6|62<68bL>UJTuo~Z12f7bCr`{=!h>7(!5pEI(R1jaTOZYNOnyM zn><^7i^XN97%e{@5EDFwgbBhEdO|3!{Wno|NTriHiuFwK!(7(6Vucwqfnh)?u$z`u3uFz}J z{t|HE&E1(otWr~Sqatpt8?xkgDA9LM%ruotlV_}MnV}Z^SEi)Nu9C+^;I_Xy0`Bgc zFLI2qeCzg=@YOn z?oyts`CsEqR~Vma_rDYzyr5b^vfL)L@+)hy-P33YgJ6DKVC9^cbE=tm(}MVM;9^(R z=+TMcM?nenHb-|WZo@Fa93y^H|BGh%qQIvZQIv|N;vH#^EJU3tf)x%}rG1O1FV5QF zHg(sNE71|MoLr=?72_vOWVpyk9ki3FjjT_oypG)W9)=I9zW6J@d^!E}IT&Znwrt3* znw62Pjy&V55!awn=+oe+xAgCU?wj6tca;8XA!;}*Ntk)KyW!qTBT%KNUZrQrKjX2` zWnMHLh!m9e^lW@946Slb0+?NBoqKO2qlv9Uzi~xl3+RMgl5)@kYiaN-jsRNM2Zg2- z1fQa9PWy-6KFwUuf~P-KNwn?yk2_;(yZ1+=nuUS2zJKNpT1k$|U#m*RT%@Ino?ZQCEEp;#_gox#4tJ<%X@FITV@`lAzGb}t?cFkTV>8vyAZXEK6uQjLEagj|Zcy`p+3}x~k@x#TBLmPh^V4($}|YN750?N!bG+jEOf4 zrjjsKIi$L@P| zLL*mFkxPu?v54Bxt&(<`t!k$0oDOe~_{+asG&$7)-35kzTq(AgZRIH0=faEZ%bVgE z`2O(4?5oQIag?s(hc8!BLzu#-06z2I(M9Xas1zW}j3<>nVbQ&f+uCeDnIJchCFyJG z5JUdP`q}XeWv6++;teETT5U>p{dY_W4-xc( z#7K?%imv}_RCyE;prTtYGuLyMxm!`WQ|rIcNS~Bu1j++s9&_OTJ*c*9tpO2D6*_Zw znR7b-WX_FX^A@L-+V8LZ=*Bv92g2|5%wU)9PfmNNm#!GqxJ;047;m5!rR z_oKMSUR!H*Y)lsc#!_wHt*I*I?nnzsxV)fhb2Q$ariDnM^C&Dn9B)1P4H20~jhf&IkH{J?=625P4sdTKuwW&-?lGQ3_N?v;2&q~nJq*FQ7}9k(u=_=X zJ-<0KTK5jHQgjnFkyjQcGNVVIO}{0E1&R%GU}Quc6MlVesJk926AGnlT9-;>Nj{~4 z`auoWrF6TWjJs968853pmUReX_A<|E&T<&>MXHt@jWX1BoH%~#I*sJ(A&Xb>BU+i3 z8Ee`lsXasBxWoP89pz{e{F+zL_<2u>1#q049!3X)-e z88QhaK^4J_$(hw#IcEaKOd_pNms+qbkaOlz>o)QdIbfWA`sCj`=|TpSRaqMOD3SA( z1abiNuT}$8CdXc4JLAVAY^~414M8ei`b$W9H_KEm${9j@_h#+db8#sd;)N6i$0r`8 z=)u(VTa2`vJ&YTRHNn%I>ax7(8T%WSu zDaT(hFYP{j51w0SLo+Ham-kka4O-`bM(*P_5>a+O z#kt89&h6bo?XjBJst3SYn(RKTLpnUGFSCmFDS%d4dH`k|{3}uL>k#Z(_?e!D^dZ5l zc)dg7f@XN>tM{Q-bb7jIo3u^0VkfJ*yOd53$A#1@`ssOhfB}ZU#{)pH;T0cq49ZjI zMtZ^J$cDM!1k=NBXwWZ0);s|_4TrEA>?RaBGUx& z5os@qbk)ew>>JR^pvw7GgOKSy!~^~NxEyqJZJfMPrJBIHX380GJ$>rWi2}HDZcPUAum1B zd!{Z-4e9IWZj)|%7TL>$+*z7| zE-9QJFcv1$s&Q+xY(fdy?3Hy43u|H9?BNDzZi>5>hP$HpOKf<~G#IsgAr0fssUX9p z2c%gt4LcSM26z1xIsegC-vm+Fc#{56F-Fpy|36!+Fh<%ciq5b*Dfsj;haCUwZ-1a@C;3dG}*bI!$6enqXZV3p=-(x zLM5(*u3~n>^vo1=-WVkDa+*yCBAz~=My{Tp`o?h!!MbG>GN50qsw+Ob|p74 z+Ldo%Rak_kpifL&BoSY@3@arIl0$XQc<#*yc&7zg!*=8T?$cgLT+2RS*mr$#OyEcp zF|S?0w^qN?ZX%DSvXMxgV?fUZ;MSTu=XW-j-CBL^y6X*@$^L_L?yZ6br-%cR7xMvi zT;c+1z(qUGB~>2D{}0}B43Ev2Z27iOJ+G}nNqQ$ixLcKhcP%|jj_(snkrnl|-j*+c>GRC5sTE3NB3yATv~DpJkVL6mk^m2^9~^ z`eFf$89=Xg;0fSy(R4PcIYMAc8=~&-JL?47*ZXJ_(*^vk#=DLeC&FU;0wQS+7A_u# zI8|`dHx5*x#CvyQ^Y=bE6=C!g=f9p$GIOiQe3WT(4k4s=gM!qP)3{WW=ntUjDaun$ z$X%i9us5H`jGgKHq&!G{`QYQX^9RKSp*`#80T_BtxGF}`FTTsTh4YKOO~P9_B244qDX*mWsTF>l85*7F9V3om*f^AU@W6zx<*LgDElg-wWk zbM7vci<=vI<&13fs^h`H@MK3Z=p4{yi9jYw0+0+v0ma%$zru}F0dVt3E;P1O26_F>iJoL1t!dt) z0CKhL*)QW@Wwf2POPK$~qihgN&MLqf-$4~Jf;KEAeSa2y5nN`&{5m8*PDf_LhDGk; zWgYwdCs_ZR;{`JunKTc@;3m33p1!_YiE*E0NgEkhF{4UAFR?)K$3fZVPIb>)Ss(r} zJEXiz?ULA)PsNe@u&8~ zkC<&%E2^LGDQaF!q}@L}q;4U0xWKb#j^hfmPh=S0`DV+D{1nI$>hSU9;!-T>E&BZS z+Y!Ho1vZ{wpWp8Yt*H&5&!R5)cg3$5hQxkb{k@o0|No4xWE^BsPc_5Oxj4yTtYWO4 z4PBs84$|-*c|BV3`Tq|m{EbgF^eJ%19rf^Nh1IE?CODuE`*khr=lT)si9j5attbyv zn26zDLKsec8BcBHmG~-_rXy^bVN|H|l@}9kH0z+)U*(4O>wlCRjA{@b@pQSCO^NwFizyha+1$3YkjTX-3{H*Lhi9%f`!ScZr`0mdHuK(F}@Bdls}^t z@?s*TTvu4JbWbGFyA~y0mJ8M{X=R;x#QKAOtMG;|Sz5GD={jst&8BRUVdG**cE4sY zDNUoN1tc#KV~mu}A(>dye&bn*tt>X_cF}|fCqA`x{3yb^_aHAfCUT#Gs>MV1)_03U zclqNt>!G~FuzjaQe-c3fwg)P8>66~R|Bg98D~o_U0jQ=WdqYh($?Y<9g`ZHXp!R(m zf7=mF@X;D`uVVI8UO?xc8HZENT}ax0Ik8nO^s&2V_x8j3khaxF-y`q?Y4S`uHrmh*}X95?~04=8}Kg7PegALH5E4qViLDJyTU6 z!ZR3VQkD0_5JoAu#)eZzlg;}U$m=ZsIQbE;q`bbU9w&+b<=q+kkkg@^LZ6}M5^)P!sD_Tz&Gx@!{Sd=&HBT8QcL=@m z))$n}4xCrVS&=NBLN2Tf*=i#@6Fv;vsnHL(Z2t1n5ygEIW6BU3UP#ely z$hsv_PdfDQ6m}7_dp^jpKUpy-JAG*IUziN#A53<4b=Ud)XxV$OrQT!kwd3P;+zgH! zp#@YeA`Y@3JYSFgsY?H+t7o_jCfPDea!La!uqNXEXe-rbM>Gq~--$X|z7u@CTqpv~ z(RI-Ev*`=~6g! z$0ACtLSVe~&o!mf2!Zv*dKg0yR(z52`T}*G`Pq|{6RHnMpD$&Sx@+G zAlY-ERf3}qhq_*n+AwJI>OC3b@8wMl+A}&`uCQzV@ofVB+3m-~q?`i@S%smat}ZL> zls)5PnzNG%Yjvo%672{&2!R}rmlK97G^E|BAmPQLS5`EfpG( zsW$m^tx=*}2^6`L>T;X{se#+xM;;sNe+YUhJ>+- z4?|X9vwOAXprpSc)&HGs9cW(2mHqV}6V5~-v4F_g>h*yYw+NJ&k^m&N#j_N^@H4Zr z<{P6@SwOl%o!Dx@dJxDJp|M6&HR=f8XN{xe1SNl#f)e45d=z(!{Z`1L>0GoNl^NLf zR~@$#RU#*vT%2vO-H}8sc)h%&__;BKga7>9U9hEIUMcQ>A2%OOeu>L1Iw&pRP(_?e z4`nv7(Wr681r2!Sd3?9_bsel$Ln5NK6(f!VJ6RXJvWX_>?du`$oQ}V; zHNi`sl*|FIZu9qNC4y|Hvu2sQRZ>&ai%S|ya>b~#!VivVqreE9LlOIWHvsG5$p+E; z1XP;=UYYg^HwJ_HG?TZNbv?tB!1=^J=1=|YJ6j=SE4#xoNIFq6VW zTgx7kHC$sI}6t%=R7;QbG%84oj| zbQ_f6wx`;+e$y-cS`TT9&d)r{CZ_Q@Voru>2sa?kS^;~o^W}PZtOM`C&jj+jj9fw@ z+gx9FVGc^$q*g}+_g$z}K(5aA22!7+QWCcbJ1Nu5$mMgY3U^=IVM8nU19s{T(cq8@ zf;)mLL&=aUWk9JAJUd=BBAq!fhTu$?edy9^*ZgwOgUR*7bu6W8A5YTO7j+8z>afH1 zPFLZo>IVH=XX36)x+zr8txbRdz5W@FXyJDMV!$1OLO-H^RB;d{(iNs>h?b&+dVBGa zrEVmqiEi_$-OChL*fJc)(Tj23N>ny*|qmY9xQIsas{yAp0-wi!dj8*Nq8Y0_}Z~u1jGu?9-lMYYNXZs zU|d|5S65nZvt(xatj;8Ch{C-LW%){hNUQ!|wq$Mq2%B;&w|#Ior5y zd^G;O6?jhwtzd|*F=dyP+4(hlI;<)$lLGuTNE;}GqSCh7&+qWxTR%)6MVI@$@BDaB zYd13%vZ3eX0;lWvxnk=52R*|weVq?JvC+(M>7;2RdVzxRDM5ntj74(hg5h6ue!0(VuJJODl4+7Y`S`fnu&34tm)J(J z?@8%TY@GY04{O^W5u?K13#6Suw-RF9qY?DD-)zetnxBd>uH6U09Rjuro9u!Oz$?7tauzj}%I*U0);_2$mXjPaMjdLXy1ggKvx4E!$! z`&I;?v_Ga^9{WshD=0FTq&lbm<**S`_)4A4PfoPs;x|9NtK1APnD0zM^N9Ls`^BVnuv{gqCV6l1EUjkBkynP4r|lmE*meE35uCsbhVKQ1a0AU>{K1Ry_nXC%QZ-+Y4%?>j&+*} zwCR!f;#l|RU3RJQIH?d^`12sfCUm}J;8+L)$Pzyq2sUA3!0zbmUWUAl(8}5)QVM&* z1JEE48pjCve4i-3hR}msQOSfSBy_nS)SJ*3;I2l=ntaHF6NQ=rO(Jdn zAn!bqiD=yytS_d1NHeiIktc@VFW^Y9UDqdM2cby2)J|s;uYd%+;P#}YfGBB-rpC*Q zR~of-G0bdkbM`88HK6m$heQTUS6pU*fbOX74xJBZ6Gc<7;o3wsDfR{4CPu3d`)Ohk z49qKvA9$qN&=HUjr;j{eF)FXa4&&mcNLxk)fK~p`N^^^Yf7GpCWJYzf18=>bh$5Wk z?qcfyAnl;T=S-A1>#5^OzqGk|&h#HsHTrD&tew*^($3L_iYrcv8y7sa&q~v7Zb{iS zT9;4S-`2k6a=Yb;r|+xk7HP{S#jHt0Gd#q-?(L)h&_piKbs`h3p#1VLaEjY}GBNs9 z8yKT&|SJVX|Qd1wSDM|K9Xcm&cTR!imr@WwM-4Pk1l^qj0-MwPesFmrf z|8}{#1#co%*m4!q>9cx5OfI%XP2?uf%FJ*)kI18c@`EYq<6L@4(|(L()Q>w_V0R zYI&@1Ju^)Z?woE=e`^sjWK=`6Ph0$Pu zrk`I-mp8djY<^_6@7`buJ>2k`UZ^@!U5Up{P_5Ho!sB4^iBRzD;H23uUwqKe!p*sC znK1#)xah*pOc5lLjqc>((tZISEmcUl{>|RI$$$1ptm013@_5WjY zU0}f^ToQ%o?Dwb35_Hg_Fj=h{#$p5j3qNbPrjP%&I6mgeNBhu3EW}4{_UXH8XtVF5 zvv)?7YWho0SQ5!zmNO#uEeHM-;fFddJi;)XqZ?_36fGHBI?>o{9lY8!i{KIMd)6JZ z)V^G>W^|VsH-O*y4&=;{qH6A>=S zBcay42c63zGBPz-Cq9S0o9CP=ecAMsGLGr!>mO}f30C$rZw7O3Kr8PChT9)v*W(c% zjFS>-_I2Mb(nf>DO*c}sKNqBbf#B0WXAY9sC=X-wj-rRh5dt!qdjO@etDY*n!%DGw`I3m21OLznIRLI#N6Md?{)d%ic0%IYT|maHs|qlHN{U(J8oW# z+vrXkY|`bpz0$b;T2(Ukj?^K2e0SSyb~EgcH!eYtlc9t*3t_}Gt&Pbsijd)_%e|-B zwfZBKt8s5Oe22I6>bI@VppKbl8%QQoH1WkT#fbrRBE4#sZ+0h4F1Lb+FQk(a36gSE z2Th75Z;Ago!I~5u3V|Dj;xtJ(OZsF^m-?Hl&uJ#V*HvHW(GwhPS!wu|hY+$ay3R`2 z^yC>xn}#5(Fpait+zUP)YFv6D>+pU2w{C}UYZ49FFb(uU@S2vMd5Q3HD==$1;iK+* zbIY`lILi@sl0d_Eh1shyTDqc+hTot*^I7X!!D;6% zDBM(gEHN}1b_t8B(^BlGezP7{YeSgGigv$#`Jc688QiJnAdSRHdE7jAq}f95OhF8v zN`Ku}LBwVR{DrX|TGoX|)4YdBAe>I_txvhO;PnB+U*YgzOlfF^SK zHw6>XW$&&J2*C^Z`F$*#_zl?qMX{g=kfWt=wnynq8Nu-i@+>5-u@DxA@v>K1NBl`g z;*X4Y4qC*{oV7*@h>>|wh(DVi>$PBdoKFdvPsITcU3Yh3_e{z ze9d^Y?!MeEg7IOIjblbGiR#W8k+$)%n8gR)>fs_z+DSd_q9rngU=CvvUCx%-R@Ws= zqj9?(L+Del$q|q72l4-Atj=XWq3H86r?2I8?r%Z2B%(hy?p19mGvi%N8&)wp; zIyT*B)Ve-)18Il?r^lBBeNVKn%oAVL3%EtE1UG;{hi>`An+ZI|%-Nm#s}ld8KQ%_P z1w{4I@mM?{TEekuBKP%jwrrN@cNZjtCWsH3$l@}*)sOAYIbc}!%s!#Iqr|pK1K?(;@Vgw zN9q?ei&%PIoj;mG7INzTg8`fM?rleq{C~S%Q$!fqnUyH_=XIfXn&9HqdaOorA^PV8 za%#%Gv15jq1X{t~50`-nwn#T$p(;O-5v7sgM*U{$S)U=#_coL7k$O%zSKTz6Ix-%= z*7)PpWHN%)HSUCOO_e4=HOhE)cdQvpRpV|z8o#3(0zu!v)TIskyBbuH8CII~=jIvH zGYxTmlB~j2ZCX>5l}f!y5T)1rCgisRH$cddD*HfL_KeA`@&~_HdBUX8Cw?qk)-!k~AmdFHLurGkcfs2@rKa zFy${3U_9S@K1+&|G5wmob+u~8aQ%GZbXwgLC+pluY`Yw#e=z5%({z+MOL0wD7@PYA z<@fg0NI|!WtV>R%2j^AgIj~ub1~492DwSmwykh|;yLiWD%I^8PGjEmS#M5rZIo>6q z+o4?N>CH~u7u8oT#Hj`s38Y7ev&7F)C)xxt_a9A6C0h5;E)TZ`U56Y|0j06+{!;%b zCBc6SS8}RhyrMNY6o=QhCuS=%T(up>{I||*(s0L|A8-#SJ}k~f@CRR&5B@igSQN^X zs+Cc;(}_k?Q72u!pkp%|_n9$!`f6#EdbgVnS**Bo4o-c6fcvUwndU zJ#(`X#b+1CFnfkv*~yk-sXuG>X>L5=5=$|pGeXkp1hnf?b^RPEXL8MsKWl8;(?P4t z^qLhuL6{;>DdY-W8kfW)d`rR(x*e_EEbD~=p;Rk5RbkMNi2#AYl}8WN4tKS6=8nQh z!R(xHH~ba%JJNOyNgDAJu0(%p@8 zGc*o)ZhYSNd(L;xFP6hvE@HUuYwv4^*%g}g(hgd7E!kW~M7FMDF*1roG22KQ6uV<< zX@$ELD!nCW;y@sUoa>n!h|T~Di=AcXpF5i$P5YJkkgCj=P>Y3R8YFLKx|B#zyZCq1VdGoQRD-+ewy>)1h ze}i=C`|dcb?4IsswLRUNmDpwN;y&1U5wF5O9FXqWtNVYcRs8I7(JtG08=TPM`1zi; z<2C^0BZ1lR)+>@R+V-RTIyQ(WZYSn?8|U+4v{!rnb`ZYev<#PD;aQbhdQWHJ_r1>( z(N9Rcq(~_d+J5BLRgoJ@(BJ?UYvPxQDM*0AYRKoxO4ARwFxGqEYWUO+9@DAPU4q3|Li#T7?>RZBg!6q z1@wp}*B#6vjk=*_p4#y0g&iy})^}OsenG(eI%0yTE@cet8y97P?7f|n&u&+9!JjMb zbDZ%eJ%0tZUC=n~`rSdaF3a3fuc}OZ#1;*RnUkTLN;wYuMmLy5)fr_P`tG?K01?zy zA8PIRn?CspG~Gk3KPlsPQIM{lY}!8<6~>zTz%O@G2hwbYnD!)vWd1S;+#xM1O{7L0FRDelj0eVkc+ zMSRq7=UAOZi+R{|%wN45gfrv|b%$tGxc)$6g1LO5>Xt)~d5QwX=Nov3^Uo@Z^Uo^U zmKZg9MkoMQn6V;Yhxn#mBJwt1#Tx@4TRjPjPPI3<(L*)atdc)jD5$)mg*#Uqzrs^| z)OATOXdod{A0jmv^T$X_&AhBs)c2uf5gePF`ESN0?R{;>yXFoRXmP#pM zkZi)!`bA}7MTHJng0>QFKTMUY&~UNX|E5WDtq}lvM1)H)O@SEB!Q_Ckpvx&El*z&5 zbUU2)j`9(y%?Th0ex7l|P#LwV);Q^f+^d&gx%3}V`R|;LMtx0r{LbR5p}$vKu0Ebm z_}y!CrGw$gJj)Y{ZXhv2Iw1%nt90ppCst;x{Rcx5x9P0QWk$bd^w) zBEi3MLoRFo@7k6yId+=<(W={6V>jNdxG2OVAP zrCw4kTJ5>I{BxhlucCDTTpDo*zM*s>0@PHN61~41W(W0vZ|H8L)tD#CEeo6g!paid zjLFL8MVBs_u8$*bYncKbvryXyM20;ef++B>R=uPEg-Ii; zw9O|p_s^3>%zE_^XoBA1B$K*wt5_%(sym2y<&|t$@BplfD@u`nX0*a<1qT-dIN((U4ctzbB zUt1h)p#VnPW;|uBr1WsybqX051GvaPs=h7|0#V`t$0upP%G;S3wlHWU<{-{RdiA_Z zuW}>tX@`aq4*m%3wbR~W@zaRe5}ncCb9H)qx@^x;eqHo3PDGaA#EY3oSQL=2vrjV8Rjm@9pkH=~0g|YwGhHs-{A?b4Ux(@<7v2%jR|x=%=zJ);Z2UDl5dLy` zTM8)+@wCt_i&|G%B%Cm0+3-dq!A8hA5`P%=d>&xv zWIn~)lXA&bi@icw8{HCseLNX9StD_KRnlYs(V&&dN!?W!rPgito}Bs6+zntf)I=}U z0W=LVjA74^HUvh7uxw?z?637U3F|K zzxjGv#-}eroIhdh+p*N&Rdyi!FUNWAHAJmRY`|+clu9*&bN5pv_LolOAQt1GOgYC% zjSL>iD7;Oh{C*gP)AB@(%a7-X8~wU>Bcn=g7}Q=&^XZavv@IB%(t@Zxi&L3C2&#l3&;hS5LNNrPzM77DN z=6fa`__&v4HzYqKcNP5_fbJx+STt#20<3=qF8u2GXUTBd(8XKHN2k=A)r4InAh>Lh zh}S#wP$^1qqFD^Q{LWNaS6&5UK`uG%wco=|byZnD!bH)YKhNyu7*q~jx&f|v_9;hU ziD86j4{@)eA!AS}6mZA^VbfldVDYpUK2C2PRBznOaTttMg{#TYOYWfgW;R}Y+H6XM zM)Mc3(Z+5-OYgx)CEi%#s?kcV96GmE+)oO7IAecwwOiPFAF>QCxducg;sMBhv>ovG zhU3P6_g29CX$6(I8E91>s|}pOidl0wpP{+GDFd=LiC9DeOx;p z8M&@~zU`#RM(R<@^JKgtugDS?^Rn7`d@l00vu0a*oOfe*gq3~T>)WOeuM2r8`1Sz? zdQ6k2{aUuwG4OfBqk!fwBjk9CYiBY{OH(xu}vIT+MtpBLCZ-ULd4tlec{j_}Pmw)7`$PVa>Ns zxKzZvOh3~d8~L$z8RjQ{rD6=*ZAa_N`{T@%>T%f<2`K9MCchfPl-r3AtPa6CDj_>o z&?d>ax8c=O=<7E*#LeDlQu$js5w!szW<|)jwsR&43{3%KNo7T;AwzQ;cB! zdMgIBfiOwzXh{CV48)*a6|Oz;r~078{IOf?7D zi>!t0AIx<>4Y>5!oV2b!zUQ}LO8-u{{>P|mifZcNhL%ECJE?O=1*;Wl1K&nOsM>(E>&O<@wd8c@mFu_0ZRiD!K`ORCF#i{(CA) z2xvgxJn_ZT5dZ(8u)YkzVNjllCfi{S?cfn8o^3wGB-l~-*aOFsHz-UZ%va!?Pq7S&73(pys%Td{n%L2S`wXW04J>7*G2%V z+xlSQnRkVf*u`1-fyIsta@9JQ7~G!R(1W-7Qti?z2}?oQHk_v`X%a{FcW>OX_?37S z*gojS7ItTHin9qHxQ>AsvDrbKr;tw#C@Ga4#{#->x=7-%PB}qQB+0tlk!>40BG)(H ziwxkbkwocuzKHG?tECP{Ea$q-4fbVAmoG?`AC{EUbl@l2KfqY+r zzeYG~NnI!ufL38J`#Nn!!STbquKBs8-mA%M9*B-(s0Evz;6afHjBD)KYNLR8DUDw& z64vMI=f21zxh&%Nht;z=qWc?02HS17L%qei{NhV@5{@GA%&stI|ZR=e>!((Vnio_)YY)H-PTy*el#Zv3tEC-Q>)7+dLq z(r%fhm3!9t_`TMsS)mghLg9ocg)f0Z6zh+5WA&b=ZJ}B11GeGf#Et~(Pzx6h8DwUC zkY_R4Xyqo#w9EVoh0ymXv&JlOg#Ogl}TptD-DjPz7#+6%TiOF7l0qp z|0Y1y-?GqNB$fEaL_MlONv1JLR=Bs1GVr9IeLpK5zB_-D#Ak3D)=dAf1jfVpd@k+9 zQy^Gj{>gHHDTZW_wfZP?rLrqLQV5TEzF93`sxd;jE{XuH)1O`LuB*Y!+%HmF5&Y|L z{dB#o5sIeIZe=mt^SQPKVU9E~t7D8{3i7-TlaFLrw2CU7R66kNh@1~3xO*VJZW7=VM; z1?|8EgmOcYF4Guvca|Isw6_;$XN=Mg;8YT1T`V#%ND+U0Flq zi%B;pKKwD3O7Zzepwxr4pKVdAjr9g`Qq)EbMEpDe&pzJdE&8IH!U)n+Q(?w4a}?Tu z0r++56wUSY%%7BHQ%5eJ#&4@6Ak_8AY*4i1uIG#z3dL5Huh2U9@>rGB+IB zmG3yWCR%O@@C#efYJE%&zzOXvMa-4Xa!4_Efc1eFpbUk03i0=VxPQGbfltI!yh#g| zaofq9QAg%_Ybf2I1!^i!0`np`2${P`FLGL}$tMNqD= zNyAhxMlgF$lYnp3rM%&}$bCGxGZn3Okxn@Iu;EA-D}n-DVedBfPSRe$nh+&$>WK+x z;owx1PaH*E#-XiWwz_crqsQTy3ZM$=|{giSL!Z+8fGZs~=Zrx{` zB14R8GzH5;2CGNq3vWpAQ;3qg&SR%fp_sC5FGpSPHtB?KUR*m3CJez{G60IuK@;)< zs(ENaxZ~Xt)Mrq8sA$)|PULF=utF~yNAJ;E9jIPcCA9PKVJ=;zqQ97~B|ZU=66Oq7 zU)DQUij;KGdPa*FkS2b=7)`Jh+^*rGK!Mq`14nQ{@CuHx7yJs2vbGj_)tai<7F=Tb zn(DN&B=AHrp@*3L$upV0kTQasHc?bKI5*Ge3Hx=!mx8?mjv=G)0%-}eG5|WJ%x(Z= z4Pp+kT^z;-<)j&NMr5h9$9Lr_>VU>HhFm*quW$ldTyz?@TO!0t>;HbNGe zn9oCxgq?s_&s9v^^K(ru9+<+uxn0-Zdl3hag%^sv5XYd+_=fsYO+!67c<2GrHvfra z)#Igh`}8FP6eF=ZZ=v~&dNSj`OhCXs*YEUlWuFrDXaA4Blbi#=F-Orf5~;^HPbHZgY zUx9i_uJ7S!-YlKUwspF+^+L58$eLzkt0O+Z+-W1BVKtUJHZrmLH9XdF5Q z4RA8va-7KM_FXg{Qh0M_G!2^hUk2?H<}Y5gLOpmr?pj%$7grM4WCan2#&wP?T9z10 zD&Wj`Mt_v-e+lDe34E`wDSUDLaVbcgP2#sF-pz|e1(nT zCA_*_o|A0*XZc*`uwC))xH%RSCES0kwC+`~*_qZzcfs(CX}ryF8fq=~Yi)!99I=$y znY#|+xttK6p%2A3lJh~Mxv^sTt+AplRg7<)z&sr1+OIG%PO*R5FAXP3-{F@rO~P4Q zp|UY8895$1G52Ub77K}O(c(s_OuVd}Wysc&>d-SLb{5sP%JvBxDvYT~)0uri!5;Gk zphJ~d+J0R7oFdG>Kf?uXwen&ugU8a)>2iB&9jV#q7NA#@_nzipZ$9B@r{6d9Fud=K z<73ruovkf=6kT&jDYwZ1!6i6iblr6sSKtu>xQySsLY%3=jPInTlJkDGN^WUGocZ&X zW6pjm^q7#lVEhst>T*}nbvl?IzsS6F04+#5``pna^R;<`^^rlzYuK*A0UtdcTs1{OHCR^XF z%H!MfnLWrING|*p>5RFa!&vn)F#hsR>_x*Os`o5#l?+DZg^78JZjA+?vAI30AH8-D`#UX);2U3v(EfbqL1S6dne#`Zv~Ca@5ggE zjKu86F)U8cGP(py7MtLc1YHEI;|2U+upkG~K#PY*fKC8*VM#WX>TlNJFKxftI_);P#e462wuFqdt(VGs3%6za!1LvO*GyALF2CeSGH0u zL^);^#M|8SS0jnoiA1~^dPRnCi4?v#lKk?4@=ky|S~?K^`=$xvaTNL`%6)+)3*ehyL1fKS8Ypj+IKk zyOLKNQOP!3Sy!>q6=^Hc%L}rCL0SB*eA#|d+9!;Nb;hNbc<}1;VzV92msJ`sqpn2jD`TJYbUkRHJYeHY%w$9XsS-BOXhyeDBz*opyGkPiT zCz(sy98(iy@UGq%`G3F)vmp3@FKNi-9bO_;!K`LgzGL{wug=`Ps2;e9$uhvXzS)5s zon#(TsoW}hW=7xH7bDI&a6^=yKePTzk6=tnZRIDHpa5`MQ(x|9W;YPyEf6-2+ijHo z-z6jJ%Qgp`H=+h~spL`Ccbpyu=z@E{th%Ls$b>wFI(*?fbp#jErTyt@lK9gzq?(U6 zBwA7Cpl6^uF~J%|yb%7Gfj}l|kkc|+nFL?P7(H5YT(nYf_#76TyyqY|)+4wPl*>2_+a?+r$bkp94(YZfD(S-csobQD2 zX!r#qs7^bkB>UsvkW|1C+HhvQ($?jafNoQ-Hf*>Tmo;7p$E%87dlW@PBC+ACb{N(_0X@L4LY|pp67+NCao`I(TH&V|!*Y5lXo-1*?a<&0SRD4@c7yNb& zERjtEtYiYcEXYEb#N6;m;x4)~Yf=Q-Y}vH=R9a z5?g&fJyD76g#y$F0oNu4T@-3F2Hb|^Q-xMOBXa$>K3hztUlBsRB6^brO*>+Yy*fvO zT*>^WOi+ZHWxOLahRxB*Nxx<-8m#b(jOYBx&rS7X9?@^h?|(Jc;m9ekV*3U(vL;EM^8cAC-SdtI1-6lT~RuXW+xvpG86|A0X8&&{w3W4y1%3E zP8l2>EHOzE3IFuMxFdrIZXEJ-{uve?1a2?IDF%pw7;2>lHa#K}yE}HA6q)M4FS}#8 zC?yfzQL!F*JEVX~_?U;KJ3DLpX!SPoiJYX5(snaCSm(#S(2TA?BmMJJOJ6-%!X|*O zkzOwX-sg8rJYZ*ML00)`5=RgC7qJoQrh?@~AgF2B(M=o(xlKKlALWwVsTOZS2Xc8; zC55zlgce!RU&MU+O#aO*g0^PN57WlSXADuRx#A7PaR8syC^Oau{4K@vBA+DNJ^ATa z-VfYv=4fFyMS+;e=t3ekkNrwRRc3-lzdy{Ans+t{@$u0Js~0+2FnQ|~c(-lbeeaC* zA$0(wU1r+Osg%bm0tv1818=MalTHI){aG`<29{f4icMj^J=OEv{HT4JzyTEyG+ihr zmbjRhT#>Q^pJo1Mp7O;I_eUv@-n99eezb~K46=ZPWU;Mps4gsZwB9cBZAJ|q=W`WS zX*menD6MRh2$IopsB6a_aYk|L$rk)?+PrTRcwdUTv%#mch@ zp_7jQDkAE2_h3*Mvw1bVHl7YVCY^0VUYUxo?mJun$0xEusU``5^jpVLGURPjo8|Y<_VCx8CDAeD+loR zLVp67H%c9?ITFCU+L}Esa`ZVn4vWo{#c}wB^kOS_kqS>YnbyT!Qtmc-_0@o0msc1K4CiZD`Rqw?UPvN)sgF1vHJVp$Y)g($G2=PV|QG#23y9d zevrfLxZzx4EOcQ0h1fuqb=63a`!aifXj`!5)vNw-i>W<2+1kwFyFJunG}jw`HkNfw zNo$|usGKg$1rK4Uin%WuQd;GvAsTkgZyG8+S)suBEz~$)nn-ouxoCTC>a9-e^DJ85 zDf`N~>Q3H|Vg8!BC*&q)tQw<3or@ZwxHYDw9sxNp=?C)2qf}fXMw;ueu^R^QtVqx0 z=BruUcHjn+{wPJcno834`rSp8G_3xuPZEFf*uV^jkr+OvX{aB4y_t2wM&szwF2N9Q z9lv^83Bve_rk|8Ymi(1@)ok3BYvxN!w5z4a$3a+ECDq9?FA`&ck?)Aq7CRD1KP^M# zqf_!3kSW?am`e<6FcN{GM;Oy<_eS6V#}aT9;lChPrc3nRElr^-e>?BrO%RxoWA^`< zfjGM6aMPK6iA>TFWGof2DY_n|pxC=B-!MPmh!7Pldq#6^E-n;v_F+2mBItrpPEp-f z<^LOiPe3;@x`7W7sQC(jZW(3?XHI=l1nT4T|aa84S&pjzgPMnlvqMwV? zvoIuKOiMqdwpI|=9}{c&3X6Yko-F65%P>e1JPoZ*$l**!5Q%_S=*u0LQDc~J0&!l5 zN;#4fV9}`d9*)Cz6Wd*l>K z*O~over#2KxJ_icBA|=WboJ{R@p}#4HL92N-i_p*_9oJbm4{Id$c9ikDYb2fl#2%R zshZCneu=ZP*mc&eeQ~k7FJ$epH=ad%v^0HIKhOB(IA88}lBSB!|M?H2{@xL?DMqX=+tye?Ng&ZOl(TNaTH%rN;Q!)J zg{*l4I(~|%QR(kS4urn*5Pv`nmj11hy_j`C*k(nXmtFh`fhWA-2&o1#btrE|jN27} zgRu&c!V^~}*tq*s`djyqt@^cNOb4X>o$nWk5`;H1LpD5N*>Qm&_mNoS8U=q?@!bfq zXR<7VbmF-mxu1WA(5-ICh1OSX0-r_iNDc{rw5%XFTg3iY-&>N(pOu?*%C>;a0^Ty#_VSO zE9+d5&eN2$0|p{r%RWwW0{JPaTq3+wBm?&QWg-a03Oh~Aw?Q{tvF$-D_L`)ALZw@Lm`i#w2hsp^rl%J z?pLcTqg&aq^(G$%_#3>cJG+<}3}A zM4;}u)aVRE2k$?RYxnN{MS;>!e@P60*{KALV#R(OEpg;-g+SC9J+T)j2{-ofXhM$- zhTq*u;*@~Y{j!YQH%k@oTk2e3c!&@3fT$iw?(~y$T?dYNEDJ%QFW38evR|rCU>H@z zi|C>_a`MOJ9`loe1DtCw8d z`cC(WwoA((-51#T8Y5~@fK$*Vs+!nrvS_v9fAgtIpL!4y)3k-;V7#H0us}$iKd2Ig z7{r4q4O!o&?`=o{GABmFf;P6*-rmv9)>8gK`?_W|c_a`*R9=EyUie zEb#7 zXN`&5t&s*Dt{&)eXuyDD-ft>;{`N+s4{jaZK`=#}ID3pYGOx!eCZhPTH`ducqkB8> zWd646PNHmgUKz+V$k_ptqQ$#}FSoggNntV`Y*1wgJfAAOo<@L;(8g`L0pUQ1YGz32 zIadWrqd}9cK#KhgSLf&-W1wzK;`B#E<>S+Pk0(oW?`f`ecdAM?$@E~daoFeGVYcOR z_&YRL-B#tETK&{Q`Upf4<*YJJgk%N4NZb!SmEkCg^(9^E{h6&&5ea$MJ>bE+u!5t- z;l?17_sRG9dygB&MLgEKH&YuJiHlDj@`jXUtM3Hr8b?0&+3A3t2Gg?IX-o`_xQ7zM zJRR{|#mORidq#bQLKz$f{mcwFt#xb672Q%zXJ>w}-xeU(1egi27MaWfK=z{bv)&yA zL~=#+9)8D=12;9h@I5mE1xc{8Hgi}*N$`#Q2)J8J$Jk4Crw(xh75kE}AetBmYFHQCQX+tjWDwfvl_jt)@PO}>P!HWXDHKX3H+ub46YE45g~wQ5JQ^05*H%JFYO>l6w47aHpAel%p0;99x;sAb zsx^&Ld%@2`v(G&Ea%Uauj1pEYT_H>H^*^jCmaPE#97siq&cVDWYSzySJw9{*F!VmY&_Ke=Tm1Qbm*TGgVo705IYzK05le-Fp2|EYPa?)Y5OWiV4jUhl@4{Frw}v27$#VN}@D%P`L**j@f|BPcL1PEk zMD^y+WGRw6V4B6_b3$mbsk}Mt&C~L^3=VFs7*K_$#?!p^eikE$T!X0&(p!5U>wI~{ zMw#;yH5`B1sv|GiZ|FeI#uO@>hFCvtew};b=)j5iWfV04>wz_&K*Y%|3!v4Aac-1q zqB4uw_}~6@djiLOaW8tZ!H_QOC;efo?~CrCcMYyYTG-hQw~J(NhU!L z^-?S0bF&3cgwB=!+||t%J<}!Jx8BGm)G1*J^iwR?qJR zapgJ66B!rDU~=*RV1Th50dkX&%Z1NH z4qS}aII6$fI2V|ahLhE|GJUQo^2j<*R_?sj^R&R~H;b1l6)r-7R{`5|w>b4F_hd8<;rQcU8gjneg!kyG zZ0@|MLJ4Hx?~PBVks1o^Yiydo;B>B)L_ z3%(dVpyXRnT$e$;d++b&jF#C*Z#A#Odj)&$?=*aW9X)hlyu*@Xa&zgEE~Sq4$dG;j zZ${IxoeND+)JMvyr;Nr7-9>>VeqypCqQe6pijm+iepC}4_x$?rEC2ZfJ`UoTs0Inr zj;cfil5#qyf9R&!l6dnwAHlMPTVaKLra1WijCWa>dDM9qK33SO_|~(;{5$MqI`}FMCzPWM!u*m%ru=uciNCDJ@!mzLW0)hL z!O7to1`t=l(~TWAh~{+grukB^uC=ObAx-v@xBDV# zcAton3%AVNCqZ6Ic_c+Yp4OkX3xblbFmhk1B%EE8V92mgT&n)jEI!7??$(HpGS$d} zj={DGaPb95!h?B&CAL67wc+{eZ{;yVCB_nW`ysK=Xc^+MPrRwN3GAIH^Y*XwT)gnM z5JE%C+Z%<169x6#rB>J}F)a@PY8SkJpu_I6`#bIm5J%3}&y+>>>_l9cC=9Tpnkek+ zxblUp5gSyoV3T=?#FdV#EYZHQh_0<8V2vn+BZzck(o6~6Ebsc0Y#OKxsmK)mT(KqT zI~yx~ARk0~=L+Y2`MPgBA zh(6;V#Ce${F7=`Xc$?<|AL2ZSy1lAKYC(b(x2|)vL7R8ChC0*NORT3!rexIZmRT^P zy_@P$A6sGLB)`iLiaf$=nv^11gx_{8|AI+-l@ux{B7d^Q@JuP}ZZoRyKD?m5fcz7N!LP z{vhTDS8Bc&Vsz$M@LQfyl5p$1@$3?oxQp?YgymfMBSnVARXB5t!jX>Q4po*0q~wRf z-TZ9vsGKQ`=&AX$Sj-$^^U=W)Cj)zPzUe|`wti7%gL$x}j)HLtCAMTC1MBTa_t%Ns z3|x#}yZ6tLT&t;{p>&NQ75k|qzuYM@_Yog*Da5Pq+eBDUwxhr9Y2lbp$Av7|YPkyJ z+}jQ3mS0Vi=WYe;z4tY4ex_x)Wa~-f)EEs`SL&XK!p56Xhy#5aHRea#kr1tpHC^<^ z#u(9JLR&(w1^U8gTa z+Db;P;2FMv{v%3}F$OD|lKuI^##HWd^b70QL@NJ%Y^K4)^iQ%O#ccYJ%ES3gfy%p_ z)i3(^53+M1A?nJ)7?X4N6DY4NZj+(a(YLykE+80ApT5$ANS_@IFw+SA65M1CCzjD8 z5uixw(^ZHo3|xdioKjyqF=Ocw_LvNmj01gGsoWnc%}}Vcqz6x)v9L?quX#k;0@KV; zCvjH?&tVbIX?Ltfb3tVziOeY?yw;Z&Yi$XipneQERnD&BBeI~}cWRtzAi^Bgf=#ZZ{`RnR} z1R0jw1oGRO{ayEU%KhWGgBNsPD}6sUPTq)=RFuS(73zPHb7$L{H1DfXK({ghGqI!*^(jfPXc^9`SkPqxENkDG_?&WmdQ>~(IzJ+;s$|PPySMyW9zvAN zU0%NVm8$o-#8BYq%KW7=FY^ASs%q8 z!KenoFhjVH<0l;pSwKm^kp5*Bw-PbwK8|X?s7&yq_ zNJeH(4t)9^?09m*p5x6eYqM9eo68fqx5aAJwXa?ectfo1k=tOyZw%@h67{Se@0SDY zycYZN@~UW$!f>0ECf#;Vv@iOeDUie41x8CcXBRpYOhLmaAva`)K`tEAD7&*!i zfj;}J=P*|nCfxQ_fM&?M_~o}ruL1ZuxC+ptQ%LP{L90eh+d-ul9QFX6SX@E9jISwS z5;wJY{A zgnw%?EmD6lf#Obdf^uFS>&FQ*>W8G{JD`$rIgiw8rAoR!kLyCc0h`ghV5FW0 ziaF1OwRRTb9z(9=}NcBA?-!>>P&XmyGrj37??-JjQJTIYHH6mDZ0C z9*V?~jkmm|P*TM(p~$=b^A{mb^v7O%UzZ#8_1v;fFLpLHK3^USXX=Ji`1f>l@!Ogn zeRLy@^?L4+h4U!AnJt;sjyIFrM#XWXqfge%7A&bA28S=P4y`uSRW6b5DgA(w(?new zZb@^3h!%Kr=7Rrd$#mWsyTl+Ldd6~dl_nnIP#C%pS#h-U@RCgU%J0{STJ%jDs@ zd*7-w`Dl6f{cf~8a%4KZ^nXwOl^sS){i*> z?xCURUg(ZS1*aIuQQR?rIXo_+Cle zWxs>28I9k1yZ}~5<1@h+^!Ha#&7?wuAlefvo1$LNse?AR7P0f}nD(Tb zmm9~*!ilhuE<1AeKQTPenepLZpmwt!LoGN{aP?ccB*(XpW#V_;pI9D8ri5?*ELOIU zEc@_IRn|s1IkaXld+9YT-3zcu$Cp=D-4Uwp3Oj4gt~ojBPB&P4UmjL3yWS{LEK|?r zyzCil)rN8et!+aZBZB|+Y1>j z149LQC03WDT|R1|S>*HArib)EbnF|9|70z*Mf%rwc+u}YY-WpPVesF=!+L%8)yis3 zUoNlNEx6E|F?Y2#t6n`G!cLAkvexU*5u)5A9&CVpTgZrCp!YnNANZ z`j4x@t{5~UHfE9S%vQNe4_T{b=rO*+x&u$|?u6VI3lSWP)TUfPVy_+FcLZLB?+v$f zd4CLM{<2!kBlW{n?A3J(oEoW*Y{(GV#Rfi z6P1Hz6&JIIQym$i?#Oxf58rsq)QL`R>B(eLRpup3YSHIugOL^uxETi1VGsK)6H70A z){$2B9ajPz!keb%cm3-W$7Y5~HP>qQnnx^GrEgWW-=f2-~(K>(;UzdOL@KTI2EPEwG(GqwAL0uG`>Pekrry)UjlaQt>jZ)MWi8sy*L_uanhH zOXBYKW)5qfm)2A)mTg&wyJZKZl>NHQ)Bf0kqMKN~o-D5CcrmV0|?10m)-$99zZi1La z`=WMz(;fmyQ(mAu^TY9eOtfO?Zuk8+T_E2e9UMP?V6)MLbuQ9qGxUj^;LVZ*$>GYs zvv-2xr_T$g|Gl6B|6-?nW}bugG9t3C>_d4zygJxkUn3&s4&9NydW!DNv&pe6h=tk` z70I_GhHj=QC+q>SVmmcxY-WtE+8*$n8rb>27gfBwnM}V^JTxE^?_1;I{~_$H0-}7h zyhOU|K;lKAj`@HAo zyX6XodDipGwQQXlGMyrYV;P85jvK%}4nG--^s^m#sjuJ1ty~X#*Rol=A0gVyTtf9WQ2aeZ~(P_gDmglAWIC zTA=NGj9es}@OocB45i#>T>`X!QF%gx|5AC~`wUoDKNdWdeNy=nib^8z zcei&f6z(@HFl4E}*NGoEH`V384BR^py0si-nwA-;LQ$lT)-$sb8Hu}i~CM=j*3nD8iT={vFI`|XyML;X$P`ro%M zvu-5wuy|L_B_+w*F}yvF zwxJ(N{Xo=sXE8Pu??z$iLH;+WAF7Nv+|-KA(yJSU;3A+oHL#WMS~Y^UbM7JnutJNI z)~0pff0BnR{ly%W?1;!VQII+09PCjHP%}(CpLlL6>ythSFzV178amcj#*fC40e&Eh zp)0ZAk-!HfKudI!#uk@SsNH6+Q6>(S(@ysKix=;HY-c zZYLYyH?b>oqP$sE|fq;kAmsmZRfR{Nwx2D~+** zM+N%U!A{&%utZsudvLs2L*7URY~-z_K3#B{`fTmUxVx7bXjMJ91_pVCD}z?^{b%H2 zi1s}=OtEvt0u+T(6Jv83yd8~Ho<^Dt>`h|8BFe^S#`>yH%3{HB+h4y`;Ou?Wflh5a z4nAm%rar-$lPdy8Adr6&NIIT#S`Hxf$qrOM><%;wa@B89|#lbF}+L$H6jzeRPz@qVCH3Rm^ z|Bl>P08eoYEA(*X{-3cc{saGCPSO0r#oH+7_^%Z%zxq{nj%KXvm;iks$rAqwk3k97 zdgVa6WMcEe>%fW)d4erE(>V>2Y_G6Q#h1Yolps`7M$@^q#?^-U`#I?O8=Dnxx69Si zb__b{Y)8K{u-}Y)WxXZ(Zve1=21m@3+c|ss+)TA!(c;U?S#C^|DEketWegMe<3kU9 z>lZVxl^S;MPL~s*H;is``$osM`8a=Fy^*&v!#Ra9{8$9m_gP#9>c@QdlKNeq#ig=R zg^*7q31+h|**eH%$s$;|j1e+Kl0eL6xz`3gpRS8OaCy87B@-=RQ;{Zkf`STFW7b=A z(q>qH;r5%4Ff}pmMpKUo;D?8 zlvI{*U?_KD-W!cG9}7S(H?*kC2l4IOEat8!BE!S;4XJ?1yC;C zlUKo%FwmW~&?wCgbQD-NC%jsTU@2~F)i&)TZeOi8a!Ft{S?g!&8l7-|Ygl{Tm z*vsCmXU3S0Zq(diH$4qRbZ`PSndTT)XBD|n&4Zgss@dDzUOE40 zzPh4WDObN=cCuo@-&(_m0e{TcIgx)A^1D7Zi9gzS4Ew^{%l~Ob8M@V+V>pdwwj&P_ zT&&rg{`6Z^xz@#Fa5^3?LKWcIWjAGeht$>O4)=ZW`X~vSriyF3>Gp zw3t^sUQFKj1hT!LQcR}?&FmTrT9oT-_}t|ZH;teo?WXR@`|}(L>FXBBsC^Fz7gB!3 z8*>T`G9o+xtv>5I$b~63Ys!-M%EK!7%9`hUBJ<<>!>s$VTl$fCFIN0*Evk$EVLq7e+&}7)IKbK{Y;~IhN`wB21}r}a|QSx$1d}=vo&T25P4}qed>k^yx zR4;~Z-pzL#(qzMoq14LCHTRBBi#64f{3kq#BV#)W!6-HOHI^RfrFr?~Qfpr+9WM<; zkcad`zczt+;k~&4@@Bzb=6A8H|dK{@=Exzj-5okxm2C| zlGk@@RY)C<@7D9oE`5lUvF>DEXs5f*HqNwHwJIT*U5jS~gUW@khfm zXsD`2KWz9JjIgoIh|y+V>fXm0>|nh>iNKmOzEc$bTRkXRm1yVP!k07uov;opv0vHM zJ)gV-%wfV?2O8ncderr(P4>sNm@#IZYa4KfBZpO^Oy~E`h6`F2?OHo{pD{Q7E>B-R zsGQmGZ0=7wa+QU3Mx|L-Jyh^14Di8)=N7By`W}UNa!$LHW^+{puq}7&OWD&$k|a4^ z+J5o|M48A4!Y?@2*aIDLcN`}x;xN6pH9PSxo?+k4TH22}CE!cfA=i0c($GO-`ts5U z9r5~CmGifJ@F?02Tn|@ySewe>+<&KNiTKBA0ky{h8^-@tR{xo&;miPZ9m4gEEM73E z8%HVWkq&yp(8N7?6%3rQ_nDQ-TRT_sV7K}j&%*@?P_y-VzyF5g>|e+=;)>?_{XfdPzc>nSnVHK{!*Iu2MRrVttfIMR++G(;h!s#&_YqwE+52l z>&7Oh&gG9J_V1S0&LXrenfexM@S!W)pRI)D3)8G0W3KFG=rx&1!I%f_7rK{Wug08z0LMYRJz*hqOEkb{h!0E0K2oaW z_X!t?><+YEJJ~fToQU@~+Bz2^a`CwLPAwb}C|#=cLvc3fqT>|JA8F#hkf`+Xqn@iR z=1e{ zbR@to8uLyaocnXCRHl7g5zE^$D1Z~i$NOh^a`CfN-h5RNj;fB7ch*Ec*#RUG^?q)S ze}gi7$TS~T11z|@v4G#1txatnQ)exXvK~R94X`DrpC6y099w8$^;?btC0`g59MDgMuex_^{t2-gK_0EFsqV^<-F znT@_sYv%&A`z!Oy>#Xx=PmvihLlW`uk1_I3ra`!}-j46{9O$UfwWSJMTGl|2uz3)l>P2ll`Qh=?g?{0$N{@Y32M-tnkKcBiPArsCVZfh%m)H<^bIka3cp{Zw^tFNzm9XS_ zl#-q-NrZ5>bD6%hS^uq<`7Qqagv%RkJhWmRVfC z+(VyxdayWc0azzkwCcgb;%H4;u;w*gAT)*<<#W94YrzDEheKX) z^@Y|naX-Wy7S(G|g7DFut5L0`uz)y|p8Dg9reE);M@i~OgPOL?pW!Yw&v?gH$l3cY z8EhJfQwC~ zfhaZR`W-#+rhH)A&Qm}~d_OLbYKbp?-DuU4`v6bFLe;>8eoN;QgugKx+f4ehONw^Y znk+B~QZ>K(I~FJHdN!8kGjoRRfOT9j3QW`bZ^q>YhmSC@vRzYLW-T=QuojuxoqNu0 zOkszusR3WauDjdp8ea`Zhjg1gBde=WOZhluc=XxD*7fa6b0c*P#Z+v8ZCQeLy z#$x5*?1i;0LI9qrRTX=b1`2@R;6?-N5HfYV9WTStebMyfR{&%5uNxX&sJ$wc^1GGy z{Hc|I0I<^`o#4X1=$>PP{dF#9nV6RSQOqzl7Q@+RS;1^b;*G?Rib(Z`{%ovc&6zwJ zlA@&?N3!;`8>+fed6))VW5f>cjbykNRJtAft~GIN@M&J)5s1x^CCrX;C!1gD0ik9W zv|Oty?U30tzaS_rYs@7{_{I5{(pIj-O40qR(F0}V@$AO2oq9~%NLSG=6lkS5dEg7! zF|aJf=St})=LxE!N^GOrF}REAs@3f`Ae(0bSp4Js)Xx@8%Ey9lzR|#nk0zj29)Bf^ z6qyt$i6)&yS$Tp=D93~BF@g1#Z&-$k5EPnP@!#tlxmd;=Jd9>5a}4_vujL00Zs&8st-|4ZoA?@Tq|H1(ILxj`0l?c-5Sj&GQh&ubj?tPYoXsB zblzI}*MEas4z=_r-Vk~%hZ$~u0Hh2Btw<@M!J8rq+vK-{H9>>jjhFje3ope`4`c4K z#;xko9Zg-_^6bxya!tS8_Fp?zWoA;yBEG=&28O%_)(#1gn(&;{<6s9@Be6%(8K&Nh zhev!8$G6+Xl?#muae$v?AIl$7VEJ|_6MX;hKD5qi#LXrVeG2&FD~dSb<^n4xdfV;*wM90w3Tv=CNyh#~@OdsiRSfA@SvTK{x zQ}a%d1X{pj&|-M~?Sl0(J8NjHjO@M%anX=7j~rQE_e`% z|MyzHP@F$uHx&=GNX2YN37VDU|ESZA_~6cYJ`UKw%w5CK$JAwOxQ4|L-aY6{j&c#Z z1>f`VQ_C{DPTGr2b$g)Pj|MCM&Y$!_EAa5$^1g8IE+NWnMWxRS$O8Sh25H2ebs{Pi zgJFShFRL(Gk?7=J)`6r-5sHOUBj~=ep8qr2#Z!6XUVze{9f~zcoBrjx%c29d%enSL z&^1YmB+3)jW$E3Sl3oXV-RO>pyTeVD=O+FSsajpSu) z$_i;uPE%~PC^0y`!lT$s@|wj>N1Q(v-U~19 zd$#3*Z0+Yu4`ZC7BQ}Bi22GTcc_WYn@IYjwp2X3$9>xlm+ zz~uMaZ)wJ!pX2|q`JW^-s@t7S(_H_RPs8o=d{Uym zBl3<{O;plVxj=ZZLB3tXkz{!dPux|~l`<0B2b;20^&-chNZ*-J+afNuYDXb6TbZh5 zxThyH7mcuvyQ`>--}otIjl`834^{l!eABC^TO6Sg319!3*j;@x7q)ZYO$|FaC5)lG zKP@!+H(menFS@?lXC~0|vNh=oE!l&-a4JXODF5woyYny--1I^v_^7uC$;o>?aZ@ndl$tdj9VSgEZcH#7J=1=80w z?BBL}8UhLD zSx~~J?)JX*4u8YYs^tS^eqBtZ$jpGiBc`lDukLGCqU^FrOFy?%LIc7)hTZtLvw}jr zALj%*=H+}YFWa|2TRKRB1mx2E0NVuDLJIO|FqnIpjU!V0H!Oge9>#Mp_0AuMm$KU@ z6L`mvhW!$OeKl0`oNn+i=7%s(N!MZ))UKygw z&(V_2V$h5MTG)7WX=MJUQe%q>L9XqscmB5n@X}wy1 z4;Zd*R1IZ4#1f7i(|b;tyPsH4J!L+g|LyYVQ6W%pchY0xsof`pSWTmv1l;+Ix<(9j zD#{0O#&yKsA)}#BJaj;&$+6CqEOLk@gMB}gnz-81XP&zgw+}RaeQb*@Ua&QZhvuJe z%7YN~Hc`X5&Lg$-8*>+Rf{5nS%>OX=)^UNl^h51`38BU-VqWjh3(aYcYmUEL+V(6X zw~>t_sK%n{hmg(vzZu~3Q?+@V>8eE(KYJzNkRwC~WnDY=E{@aKotn;6yWg{J)=Ocg z6<9s$hW|grzK^i&IBU_x@=g@{zSzcmGROh)1Ifc!hf3l>Z?z~UZ&?4x&&_Hvo{q<;;G{l{pj8+$x~HE*`b5Flpq(lH{G=BmwCH4$E|_g z^V-$RD~S4*2>!PKgiBLpNS&FKJmjyjteZ^e9!Ux$;o}f%XZw|@h5!8o_i*(%iY~`c z9MQCin?Z~>?|^WCWA9gK7{u#`JSE9))d=R#K|3cOliW9%3NC(pFZ2;XmZ9Gf-D?{p zC0<(UMcEhM?Kz+T*b{}Aw?dvTsUv59_-ztpJoU>~QY7+H&1=Z>`n)zEK|PF4E)PKv z^}Q>FUKCcXl;B(V!S#a?Nv=hNdjf?&RfZax7DotgJ}AWPhZ2!&#PiEO%+Vh3K-yCy zSg@=2dUbpHXaiPj)C_L_>6*;}y(~|wzzam?dX|=hvhDVAOP*Esal}Lq&bPAC48bfS zA^TRe^!-q^HAj>iqGtxAD^eujfS5`<7B2PpUu=h1PYisqW5LGHEEDgtrQlhFI&p58 zCpw-jm2HZjtv3J3cm9*+%;rLK71uK^V$1nIXwnzIXn&O=v9XoW0_FWD+HOO%vvz3a zO$iQ4FgXMDwNCqz5H%uonAv{jiiT9Z-)lA^hX|A!f*R|2 zxN`Z0`oI_FB!}Oc@@i8U#(3>UGi?|6F25Df<;8&HK(G)UnQ>~tk>1B{I-kk|^y|df zmtGZ=<3ty%cfvc_g3^5*TAL;F;`>O#9D_^VRG%D522wf=&`Sm$iUq0(%&GiVbA&L; z*V&tMY?W3JIq|12b{BUC=56>Ugpy`7Y#5qFo^t@&3dYM@p&y%5gr(44!@!FL0AQ{; z8cIx$-v4z$02{v*U4E@E$axwhI=OtE&nWl@rRX^Z(bU$Ts@&(x*jy9*f=2I-w=1G* z9oL6+QTY@V{I4u|61&{7Xvv`Tr_bpb*H6wmG33tE+sg-mOd-APP z!WK{x>V)E7)fwoG(zU@tsjD8aO-cV%kN57D;P|^2Ig9bUGDDJ9-T-HZO`1#f=#evMWf`XpP4Qc2JzjO z)W2H)KA?^1_P1^5=0)ImsWL5ZpQF`s^FqIN5Os!}=;RxJ%nU`uGx!+eu^hh~2ez~| zAu&AbDQ4itute=L?W3R2453;<84@~RTWnF z=LgQX8}rT7k|wsd)pm^@}r61owgU+jeNTJ;^vna-(^cor5MUj6?Cwu z>2puCLG(Lzq8Tx6Z@CC5vk1jcM>nH$b47=nP2K>78a+gaMm3%{Q)(IRF0A5p{E7c1 zSrhTs&V$tZcf~pIm|@P>`jD})Vs_gcdLHli>PJS#_RM4t{^L@gqAb^yY(Q0Tt(7(TFj7w<{wc^`bQmrOu(= z8%-@x6o&;)(fo9>+CoyndgZiNl>aQoYz!+iprvEDJFh#!jTn$P@JgBv$A!wxLf~ef!_JHxamJjPHHP>T_b-XWX90basUsP4I)~X&P|wp!CNdF>w-aSYw1PsA#M2 zf16;?X*gO1#2m!Y4v*iy3@eV60#c%ypJIY7Z2!Ez@Ge+>L*a4-_TS5UJL~32y%P$5 zF&Nxgs$ho?Wv)!Mv&>54p%|2t@8xkP%JFM&`rESB7}7I5HOe98J%+g*;@)%>+UwcH z>uUF34bl?-XG?Pami08wzcNh!nSy@RlGznsoiu&JpR8~BS{rarK2eoQ>Gu4_+Vx-% z%ia~>yg#{g<_qUdzrS@IdEii==LnyVjrZ-EKVQR*L*iYp(2aq%P5~LTfdgZG6*@pFk}hOIZJNTq;Isy)Sn(*Q;`krP+hskf?4 z2GrxL+M^n(NcuEEN%8sgq=v(h3>7|v=G=S;xCFao(1;8fcvHk;Ox9!m zgkqdus~dgc!uw`w86Kx{B;j^5F136ENaBnYp(rdE!+NZhn*Bi!7DBY*MzE<8ja>NW zelTRD;+=dHJ|zQ34NCBUk_^8%ILIqw1q*^V$b5u}pjomF#8F_2txEQfAQ{nnJrw$RBmB|oriST=edPKml@ z=8S3gn`&$vT3;`}@pVtqK8tDMMV*|;D9OT$d&cc*{CJh;-&=nNb*-)Tv>8G@*v|-v$+LDpJJ3Z(w>kVyVwQyHjbu}XOL_!OZN>K z?VG!MQDB){3yb^xlV+qh91cZeoB&zmiD9SHhY4SKu7tt8t6s;qk3u9hP{R%+vMs!| zw1o3+RA;kzDExi79$s*Acla=7GX)u8ij^3V#+4v0AjL*D`vB>R*erO6Iw|LW-@K3+ zFI7n$3+-0Yu1V`3Lkw0<%k@8}UvvEFM9?9H*!HKg(vq$*m4J0*7Il0T0pB_CU26e8 zvg61LIg}ZNyO$2rHx-S9C-K8#2${Hc#xi|UXmL(8kc{V96~7j*A8P%4Slsj1s2|CG zJ++;(UBL?bWpO6MirxN9#cqjY#)0^kE&9fX3F7C{j#O9AC+_33M-B%Meq^=b?YroN zVA|G3(0(;_Ev5Da;qUf0X?HffQvi8yC+xltP@@vJ9(>yGD7+XJQH}WFSDi< zH)k=vu^D-t=hHg-hmp9<^oe|u&}Lq&CZ(dI{)#08=090e$&36L3Oi0A$3dNjan1Zi ze%8KJh;%7B25y&j!n%txQ3lwB@X@AtalOaXGbiQljhH`2dL4O9GbYPuM2ef=UL3j4 z0!Po9*u}E$)cGML`a~v0bX+*SY>moe28ngtaV(4I<;&9xhLj$jgP27~ryit|Y48%i z_+59qmgX#i{HFKxtMB}WW+WZF<<*#$jLWy$;%Aa#07y9pa`2x46NhHGJzQgr150$%mkCyk#{j3V5N$(Yhld1rE80K zi^AMHPBho}ywj z6wL?VSHFawEbn8fC1`QDwOuMprqjJnToNwUpCc1eFu6LEV^k( ziYFR)1Ss5wH#?sz5SW`4QH38)&hC8Xm_1JA%k{Hsra3ilNO+BX1#yKfvx_3sO#*C2 znmYrv@3^X^T0JJkpApoqF)U;|;hmV=-QD&_B0Q!5SLUJeGp+MaPAGhj<(tCt?ayPz zzflZGVIXEco3Ytp-%EM~oie8S7GNvUCbf2-X7k97D}pa*wAD?z{aI|*9;od zx{@AA2-G&57olmQk-$E<`U+S$m)5qG$kU(nHctk*je9$pv;=c&pE|r5bz7=2i8`bj z4FaXGCNxnOZZVg)KIKvHe|S*8<4JF5F^0RMZ0YTL+_uz$AnhVp7_~Vf)Qcpb)7}|1 zIr69BKcq-3&8*W;wBI)D;*c*bar#Uou`QxPF8|_h-dF7XlDL1~V`4ggTCAG3{twf_ z3#wsLsljP%zg6*%6=3&T7s*eWIv+QQOu;>Dno69m1A(VFD{KJGC5Zbx#5B zRX<~mg3DL64tm1_NK>Y)Tt~t@ONVPfc8LX0ASd^i6)_cWne!Yra{vb<=wbALjj0Cz zB;S6_BWFIPxA_}xGpwxvF$dm@qy;0NjQ6#AR4omc_T@KSHlf~zXZt%h&*8^pKSnZZ z9fKDVJmh1I8rk4$HnF1hVtT>+!I1FYbtjv$`cv{1`$dkH z;kpl%uGX118Vv^_bfMz_tGTF1R|7F|3|of;0*C;sujOraD4i0uo;F|E zyHj2sTjf*?9CfyC*S~g_zO65ec*z*&nQ2e%rMGpwRwMeB8GG>ilHnFm`_tLr7lCPC zlpN@7yRrvG{9>o|g*zl{*{G%Rx)o8p65tqWXDe81=hZYt_jy$-wQVlubSv9WspydQ zQ>0ZMPOlbDQx;ZxJgbneAV;`WAXJOBTT2BV)SvzRL5+A+2mAPIPr0Dz?7$KeCSmz; zSF;NIp^3uW)jlHbfi(jh4B<`{LikZESfc+dCjy^V&E=q)`h;>NL;0>BpUj!Qqam!}K9%GtjPAAo0ht5V?$e*gsI}ZeGscT!bxFU7vHgwpLtgBk zOi;N#)|z#a7d6WgRDpUiMCCI&aDW*u&$REFa*lqVYRw*Fzk&97zY%CG~@R z%*KmexfZbv)sPQ07;;K?1J98e0`dvIy}2Tr*}%CmB9$o@>=7`!KQ1<7YFCo(BFkhR z>KPyPhJo3=*MioEvSfdF1`Nc5+6?MMWhfcKwfwjShp6&cOp^uom;@BbgXIOBejD>x z(RQn=tJmJXgg{AC0{YTE<=)h=RH+J@AZj4**nu!eYIw2PryGDfJH2q6$-od=vN-~( zMa)8An;%G4qZ9-F@+C`;>ZhG*-kL6g7EfihqSdGvaHQfYE(K4ONcDV3*qpNh6m*Ny z`@IH@|9COMYPrj`Zwi0WgxA*0@KOSq#hdJ}xj@hGkE>pa&C{t=!@kAji8X*hU-dXv*+J~nA(`tPU&|SKN&io81~skBM#bxIkg}Ca?nSftbK%Y zq4<25{ls0!S0vTCzNVb6DoU!6Xgu`;=Tjq3o2qR+s?d@vM@@*Gw#VL8=Ggc5 z{2I_fXY}B&f=p{I<=$8BxK2RFx}^+<^^zVa@{}pFuK4!dLnZg_Q~^5%kyhH>3mI(9 zaaiJ|(aglWMTrGUv_EpqBtEITl0=|VUq4yryjjFPo`e!03TN&=*Wg(Se5GB5I4f?- z=md{WG!$Q|HgA8cO&b*&T9&iolqh@Ip@J?0$!s+&r zNfe9NzMq9VrMeM5G{D2klQX`i1T zK3#jkF=Bw|%oq59YL@fy6ztyWCR~?DR>p*#QfL*|9NUu8^L(a43fb_z$L|$jJsC*@ z2$Pp}xS0G0d#t=`pGWv&d-7Co%cotrh?d)9#OOWz7iS?Zo9FiSpX-IWX3q(-a!uU* zv^U&@r5cb?)vl1F-yr?53>Absd@c^kd94Zq(z;8PD~{N|E}F%%SIrZuzyFygk_1xa ztF6f&Ti02)(>D;?5q8c|jFKXD0RrhrYdG$R4f=|q{q9;q#M)kORmR#0pJ7s!f2;Ek z5v>^@2>cwrMU?gQd zevWr*xN-XO95$*co$1sU1tTT0Lr+i=I6#7XFYh~x$(?;7f7945iBuiOAhUi->sKFb z`9v4a?noNt7EZ8V*EQkqCNpaRIsVWOD?|`CN1&O9UUF)YZEu<$E-r}4mgk|4l+ORH zxP8d9!7d_*sr0oQqV-wyua6^}s=oSkQl$)1k({8Y>dp9qYUy$4cAfUyUOS-+-*w%t z-hb@uD>@Iq<^t){My_LXf{H-nF8SW~$wbutH%V)+`Qwa--IKn3k6p>S(;7e$UoXMq zFja$U4xaVlB{;|>mVN^n2f-CIg&ak1(S)|ChrrsD46$g2DxY-c97*4xHNMUDA``^c&|Eq6V zKv(6yAikR)k^O%_D3>7Sw5xpu8o|G-MH7y=A88RHA1Q2Q{Wi_t-~rF(v{sK%oM505 znOFc|)`Y|CF{#Nd0Y`?w$NK1E(I8B;LZL4#aaM+ZGHI7B9sJ;KPg@zjpFbWH^^#*H zE@C@ZOc0e_e1yvW-tcY7KQZ<*R16?veDW)-oXC52?>?K)y;+SW;Jh_dNPK&n z6Jv)^?S_w1I3q!sx+tZt#Fk7xc!QhL#`Nc9kQ!%n&A|~5knSDf*6p=^E+ zMI+kk=?k<0{RA5q*emaT8nu%>V3Cu??`JW(nUjOoFLG%Sn)QNZ=IUHuzypJSz}QQ< z1GnTWw}uS|e=a}s#R}DCM-n%0SE$Fayz5c>r($IBrHQ{VK^MWY6HIb~#wZs1lDkAQ zk>PLhd?PmtOurC);UR&sPd=5`?Rw!2dzI%1i9?6>3F3Qhwf?17A` z;dZ@|Ys#Yi_13cT<1Ki#LKSM4JWR1%IfSy7ayyj=TK)eavb_~xe8IUe;BRZt{rk&Sh+>St&6h`ep z#Hd=@$Cc~Td0V_unSEhXbM{JH+P*4Fe5AIZlnCAvChz%^kdEmbn|94fsTE1>AYB`- zWRhp58x@G@CmOq|?USZ#^hR7hgoO=Aa~iVq_HB{XZN6pMTtje37+g6zU#%fkw8)_N zZRgC#GLKZmw(HLyC8pMdU{%IykWF=E zGs^lBgDIepo_j7@ncs&?J7y&JM>=ymYO|t}55aQ`r31p0`=s?0y^G7KV!%uIJuYG5`9U~`8fDD;+3$#wj{q{Co|I(kh@eQvVjdQx(wYW5U zW$>>0vM0jlUDoESI;(`K-q%C5m28WKi9)s3hD0sB_~QJM{1NKK9%Ylrt@iJnicPiR zcOI?WWyiG$`=e{7GAX^xC9b9`1B3U2HcUpmndd7(S7# zCt+BCnUDK$KcVL6jf^(y_B27O%f*IAGQN<|E+$c z{pY(9`;d+Ubk7V|^c;p0U;Zd8-+cSCieu=?@_7<;m%rl>VtWBgTk|9nGnJDcrqYN6 zur#Ojg0%EM)PC@VLJ!_r_d#u|QT^-NyIH#?DbWk=KJOee#`M+M$*vnuU{cw>wUJll zUtyR?6FeI)YFtv8Iw0{b-jpv-*GP%dW;d!Y2z|(=(xnuD;z%-|U@Q#(`La*nrKW8T z;L@!+uCMsdb8E5(M9_qI0t~D8CBDd@qws_fk%B10k4zZ^snl6~7 z+CojSYec}%Z9jq7sUiv>pD0Y6A7LSk;YKf!%5b}v9W1|@qWobnd<)2LykE3)sF0y3 z;ZaKXH_l3H~bT+mjnE_%!zOau*YL z=!U6o2}m7@o>9Zugl{bV@D(2?Sl+zz=Qlr4w86(i9Rc$4qIaUXuANM`peErg&lO!Wg&ceheI>% zRkP{n8{_SFZn@MwbjRetCDUKpX5D)VB*Z$8Xh$d;gqLu`n^&U=G|a6JKg53?h|%?= zG4t6*#c0fWbk{I`4~|`-B#pz0ormB*63_U9?^OMpA*_${;`uDB{<4rwtTq7_`>5_+ zQs?2>t;!4Vm+Ez&ODCx3Uh&aTPnwp8SK{4N|D5IM*$lnPaJg5%)R|~3yqsEAC9Kq` zrEmZ;_E&(_bc*2<|F3(Zakn)OYTXvi!1ki$Lgmm2gYQ?6qJv*@|%mC z>!T`d=O>ST%UT?1X}6CH{8S+Mt_&e6Pw#jAUI|JkewP=c>r;5V_$)rDw|KM^Z7g0y zmFnqXa%{&$g&^AM-w45R^Rgy>6Lu;fmy~Pp1X)}vx}#E`V;t1KY#UU=dy={6ea2b# zviEy5rQ;&Xi5$GJmcS~_8_Dw0E`q;He$cZYLv;1xMZjp`UvH}_(MP+gi5U|JC!oFY zi1r**-vDvC%dk6&6wc0`JQAhp`VA0RoFS}Zi@)CR)@=e{IEyeDZS`WFL=9s1{vOmd z@;i9Gz+Di52RXlm_$m;-q7CJey&H_2+pKh?l9>NO`sP2H=tq6LD)fckU$KS3{|_D& z|L|x}=(W~kCcp{@H3C~fcv0B4lGgC_0{(Gj{%QSCnVP@?0m}oZ&HzOpx{-=*hb}uy zj2Fs)Zb#Bc_D15msN}}NYNmvV%nUqUn1CH1p;6TnL&+s!760q=1Dj{zTDUrLYtigD zA;)H_5V?1D<5I>O*u4Py&Xjg2U?C7Ry|pJ~DqDf_t}I+C?~_Oh{~q%14+nes4ic*B z*S#ncBp6=W7#fM)HA7)10%C`jN9}%-Q8~9A$+&c667c+nkdE47+&oCY5R< zHfiK;D@Rr#X;^gj}w-~?EYRB8+484Z4}V^xxyed(ZgzoZ-XNguF1l8d!;SG zp0-!F9d2^tuSai^TJxnr8_G}TAI}VIM9BWAW!0TcoX~!?q{_zwT4p1JztscugY@b% zz{su7X(LDr-jr$ivow+XSkYis@?MBewU#Y^R^sB@8z7wlHQW?(s`PBC8}oQ)G>`>MB=*3=nADxQm zyE2U2H5VmsnFlB_q#@*OrWDXehxeplDCS#sq0zr*@6URFkaO~a+e&+W>DcQ$&QaRE zZ@wS0^1RelloJ!**5j{{=OqS4&vISvLCaTv<{De5ObP=j+zE(5OTEfgrRwleEC{?$ zXUOFb#J?&gBW8Be9H;R`7hrZ1ZsPPnnJ?)TQBucp@6uqJoneSz3#2h=bc<^X3PcaccHWqTuC=8MzOM#E|!M!Vd~T^ImnlzD4;NMOz4$cIaoF zF*O5PpwNX%Z^z3i2UyLYpLslEh4D6->{?S|Alw`~aOSILZ<31o%Pqqh*C`(_)v9YTie=ymBYP|XmJ|z?>QV9l#JlFN{a~>s)J|dHcNl1 zT@ND9Rf;O~ie036RsiZAY#Jl*(5RnNo@yOTAnl9vEYK0E-FkEEFh}Va(sJU5>pTZp z+OdVs59M;w46BNMd#-@@%_e+M6Ahn8o;Ex>9GjD&UE7BE;G_#*#X3}Q&X){uV$_M_s|$0Zjnz8ap7v(u5MGkRi6EdmybeMi-n6s zqj%K<@sX++utSK*H;h`G_e4*s)jb#aFBSjrzSc%rZrY~093pP(r##6xK8u0#?|6#i z%lB&Qh7EoF`QEHthMfBlIjGevZ21E*-exAsb;Lh=HU;+yNm$nU`tv4?!_laDkEJ09 z#Jg7h1;Kee7rPCyA&&~&X)EPN-VaC^*tj%lamCAAStNQG0$g7`{#COzOY>56L$bxfJNO`dv6lvU+RxO% zXO~=Qx4d9Nt1wzG}klBZRAlKVe$F53Lr&AIKyFJd4r z3~k=GS^48_cI|z<5AtFKzcQ}C!VEYsjwh}|mBLivfVB8w@c!ld%+`NVNnIqcEDA~BBkOL)-dkrSNMUQk;b;#>Rn9=h{?NyCb z1PPFylI@4X*G%yR#h2;0jbIWd;}!fmiKcz)h>qb~3Rt(p6}kBiFPX7WOCRP>U^l6o zdp&BD0ayl+@8Re+b^iXQ{jS;jA|Rl%gWXfc-4&p(9fRKfhs;Tk6AG+Wv816HjiTPRUn)d0k z@)~zJcn9nLs*0lg6O5OVA;lZiGI#U1W;wvDGm>|YT+i6NS3)U2EHH&7 ziu<0DkmW(Xef`)vZc+`52YVb3zvI!AjW9eOq|> zAVQ!y!|3L{R@tr;u1m1jRd<9AcEi5sic`az%cii~>*x{wj_Z+S!5r(XlW?k{+XGP~ z;Z|H`62TKZs7A%}P_({hYNATait^+qQN{s?h6FxCs*QmfTH&BC`K@#9D?uBP@;(6$ zZ&N%izP5n{M|giT{@C`${-0%fLE`TpjbA3^D@8Q^->Gf;pzdAw*D@qIv(hbtxU4n#kBi$e|AYIaeAe|x&(k0y;($WnwGz>G}#^-&$bI$vd@iMsB zuD$MD6-2p+CXA;u1nE{^z<9*_ln(KNLh>V$0SoA*Ql>|uQGs>%P6UG)B&42$uN;~E zE!K9AYBS&BYV` zlWd}Y2)I4#7s}!n^*SFFQD*^nA{*F^rf2bEvQX!w2$i?8Y0A4b?1q7sZQaGw-Ny?e zDw#ov0~g(4KALd$67tM^;2gI9C#cdOI14#wI1)cP=m2xHBIss&<)exRooTMn5b_k- z@=i{$$&fygdX~g+VI5YCHXC|0*{RRk{RbRtb6ZaXpTZ>wJA7lV?N8|TQyr=I(;zTaim2jg}{BJX4PsZ5i1b2d&~_o}P-f5)EeH|OI^!sEp(jkcD6fko1P z)2g{PM*k!Fqqqi$<^FD-jN->{>p_vglQk)r%N9QN2dzk{bBeTE1~*D3{B~94&zo8M zjc^KU?k#{%tVMc1txmDnp>r{WPFMyH7TdeaJJoGA1FGvD9SY+T`RjmfD=&6T$m}ce z;CRip<8#do?vpz6hsw0ECWoWpC*W{z7s}sKd!Sx29_bB^FPmNYyWO{$0e8A%weOyW z7m3b80_W!VG>la!)+#KfG3{M{Ro}89XKbD1$9cvTHF&pvXwHkwf!{}!V!t^MKo zJN`Y>f~(I^HuQcBbuN1aeN`@cYTpTdBRF zuDaul+y7)dOssEASyYTDRFq6#u!05!2#>gqLeZ}%zR!J(SbN|euuLduaMzJfAw*WF ze8nl!gUy4CP{IdcFk{N3#9?fOjV9)93me8A!f)%}&0+ZIzUD}{4Gufu(1-WkZ`9cu zSoS`gu9f)oEs3h;7bR3f94H=2lF;ie7yMV98M3u3b+lCDTN$^3@D!hH1X$VVCN>Fn znz9~m2%J{`trrq@7b|2lZ+zUSZbZselpDDmfbtTBO+E5Rw>5%>R!o` z&TEQ(_Eh|J!PA#?`+hqmn{S2c!OXj7Ll-QazyHhoRznYGnC)whgtYh6i@bH8f_7@y zRFP=w8~l&^YVoL7HxKSPbk%%`xefd$H~^1=SK%H7w0=Z)(K$ITQMp)XbX9_p+sHlU+(P};`%Pt)J!{K#+loN}&MtE3(1YL$e5*)?dI@Fpfbt*gVEom?UuZUZ zMlB&uFS69gDjBlFcq{UklbD{@3^&dfZtD5PIj)t3TXb601-cee~Q{ zw505A*K;&3(2B-ZhxlsBj>j)OSqFHS8WZTx=Z8YyW@!#N)ZDPr@>z$Z5pRh859? zA(GdZJh`Z_5)H9jRPMHjWN>KrcLmOa?;oW_}&jG7!MgL-z>P?mJsF4W=t zO0f#sxVe0veTH?dnRi>fMWF;UKfiF>eWR5+;97Xu;X3}@5^GJt+cCX=#yG$odiGem zxgQ+cQE&6+E^SFQVc=8s#miK5#IfSNKOt3dBB(H5)vngdna>iJ4#sPtD8M?L%Ic*z z4Z$BK+IhuZYj{qHH>_g>93lt&D4)cToUw;^-ZS=5BsIV12dqOU*!RctWW|5D0zNse z%7_BI1w>vK z)(dW?3aZ5_`I_Upy{+mZL-d$!!pn~YOb#;e`yWnjoLLMw3#fw;d&y*#{vFu6?7bZ4 zHCrs0wL@8{+{`*3@Q>gBj|}$(uf(&+i|$oy`F}0I@g#^m{HeaqoIh346^`)y}w&&H`=t3sd?PL>g zMdvKvNPFO)PacGGZQGO^k-Z|k?<7^x<`xyi3dQDo~;`;MzQ_-slBKs4(uNde;-TV$8h+0C)1 z{*_BNKXIG$10c^EHj>}ak-gLgkLjSRM@0{2D`(Nqk}ASRw^j(-J3I79@8ZV03bG=G zF{{kbCC(Bb%uz}7GwBlPo>0R|u;r;y@EsU2R}x~rS$s=pn^K!wAkCUVUfjd=m|=0K zGCd^VkI8F`ZTL(A=JTJBaC!C3@%`FhzgOBcUcBXavCIQA{A=YKlUDze)DEm9l_QbT z(?t_y*7^r4d?6lUlaPdwR8e82AtQw>+`+aW;Sbs5+8c}4&8famTsDH|ex2P=JB?gu zKup)n(s{Y=4ixA$>??hlQQZzZp<-*yZ~5vn7r&<9f^WK>*Gsf|WOiJ5{t$6f|5ogh4`y{mY7A-9?P55;T!55=2@+?}sFvLx-# z9>CXtqjAgXiWnquAY`!%)LOuVnTVFcLUwOPCc_+7b8Cv$f3K2b@LQe9W(RjP2tSbe(1L5@2+Ga+`rHqptTfk+w|+YPs_K3b8<3 ze@y~FTUKO_R9zi1F!sTYaH!0d7`%sm>hz}ZwRQIKjrg1WiX03Jxi52Fw|Env&6$kT`|eIpe$2I5C^<>d86=Y1tlDX@u;2}#q4&db}TclmDF=I z0wIlW7ST?fOYwc#lOTADXZfbyq8dHi8`7bcC70lqy4@n4*^#nUKIPB>btPOPK2Uy! z^rKCCXx_p|zh&*&`A7AZr2n`P+5k6#CGpwmVcbWqaXDY=uWvT?ZS7j|cVH_DO?k+# zZK>W+p!j=cs|7(6D1xN%BfvNynk;=IWfUb`y&f?pr=BZi&@T)47(V_JWiaODjd@miUWd;+}Rf%x?h9rLk{w9>O$KVQ#Dp4Ne^A)IG!F0$CP2P z&*0brSiXC}@-!J(Hz>5`3J6Nt6ZX?d<~XJx0dFqJv>mfh8ZvOg-#e{|GJVTkmE)Z} z;wQ*QG@crIlISD;GBjzp=Gb^779W`Lm##}5>`2q-orX&HxA zxlmB=?3^^xycBV9!RY>^+jtP9cjEMsIS(J$b{!^pCF=2wZ2yLThpyJUN695%8e8BL zsL()d5*PTaZh9GBbTgi~8(W;zhd|gkBr!nR+5zV7TD^~W!qKm*JFD5qMPO6~KU(*1MPusFeOP-YV!30=2)-uyR z2J#}2^EIz*hbV@P55j+@8IL~?DcUBN#eo27 z+_=>-i*OIdTCDT?BiF+!ohU1KUa-n`U#-Uv#35w))#N!z;e;4Q%O|$^HFVpd9=vIC z#1MuLaf?5eiC(0D6Q_T4^5xrdz$*rI2W!?n1~b)wjHJ^q^glKCb|=V-e*W%+PcGQE zjMac(XupHo)(c$kz!oTeoHOjEYo&l@9BgKoK(?RIlup^nD`~!MdMkOnLp$#~7p5K`*EO0S>ox3)1a^|VS9t6$}o(m(fbFZXx|IdjKWsF~l4qpQK>i=(f$Yv;TB4kK_ zLkG1hJ;1f|M+Ff!S3J81gvgHnk*0q%ho!I#^o`&Srv1CQT_IMgX;))t`2qPt;LjwF zyff++?$H!YgZ6IuY!2B9hL{eo!P{DHC*xF`ovUS{-*0`8_9hMoB z|#vA zrVBF9E7D|76m(dSrz93H_dD-PeP;hU(g0Jlx%T(w- zWrmF>i?Uxxh`xY?r5%dr!FLQA71_`Z8=e8r+gg?_}5YL{Ns8eoA zGYiY*8f6G(%KT$7l~!?apZq~cpV7e|W5pqTPoizag`#SeiI>jDoMISg4vKTOy~SrL zcZd$$=wv9h-+&zjCt)TzGTFpo-zI8iPdEp|aswwIPKSI7l1wxo;(e&4z1gJ$(>=^_+-qZciAqISMwG2sK#YtR;` z9KMSK%+E9GQYmf}cu1&r6UeW#QGez8CAYNCL8|PnEq-M%HsBb2qBAaDf_l`i4GLCN z@QV8&Y~Kf{3`a`FV(^_o;Iu6|>6Q$N@TDHdIN?5Aq-u%?J z?>lNI=wCYmXF#O}J3^qo1E-ApctX^#t1^&%zi3yp_jGnGV*WdoI#HnbUd@j>1p{Y$5>U(SP$~8b zxI9WvC-U+&=!O-Ad>_zl-3~$$QaeVNAK|4*d&%lyauu<_27oS~hkvd6XW?QI%1d}t z$>l@y@L`B6&AHcgE6cr?i=aHNHP-7~sWTV3UmVgWO(W#19Hx!(<2&nw&J0j85}q=; z%1nr;HI}BN=8ylxR{>})P7lyuy}nT%@|b@;-}tCx+O7Fm>sVIEop|>$FV88lmlGga zJRkj}jZtzo5A!XESNZHPE|oM{tl%Sai4R!CSY&;n_`hAzaf=Z!7fZ!aRcLo28%qM1 zwb}l_O}>~UwzP{wnSU~L=Ar)CfNPehVj4MJu+{{X)e6K6>7T^%X@X3iI&?+6>}QK3 zJCx4Yvos)$C&`Z&jmuOiHw4MDE(7*2O!ZEX-j$!E*4qcScGc;hL!9$t-X^xnSq3h6 zQtDRbuZX7x3bh9P5(2o4hpGr^pq~4irLEMrdT0v`@uuw7C!S(6>;1Gd^Mj|96y2n; z*`IuUgY5~r2bN|lZt+U&^oGOb-S`Pv?MjXSks&>p=W|^Nc3tdJmnW*5;XiBQf51FR_aoXX%=7X`>7_C5&4S5fuzN*giSL2E%mFq z$)@LUYmK#NM%!)Mgrt>iN%7jak<@{)rgO$~xr{K*iuEulV4e4M{QD>wofH{P8q5fe zuqwqCHk=JdOcD-sq(V~%jCH$O)u_4SrGREH0V4S)N?(D{1#S*6WhCVbyxE<38-6Xy z^kDR@Jf~o7j9GPZV54eO@n$u>V}pp%kAs$AWVa0;oxWZ$L`ofVaZ9$;AVe9k!;r0zl)JaH2a2fM(#phz-jS`f}rp%;K`A zY3L9LK^VFo45aOfpiks1WTA5CF<#^;>*&4)rW32r2T|Zw37b}y2u0zDmmA5!p^UTSd?H;!{I$H9@^}3BVYsp17Z^+u+1QUH9|xcyp8G-kC}%52f1P)sliL~x>cZ;>D3SDdNO$rWG zk+Z%OfCN!yi+(39{`tI{km7QAbk+ThTH}bgHhNw42w;ssyZvo6gEKV-6N&7nZFqu_ zueHI%bRq!Z-thqV4sO6fxe)r4ao5b$Ia+i(CAV4 zWO`HKyILmmNefvEcMX_xmDvLaqqVYaA5q_s)oDufKd2AopH52^G1gN)jd=}=)vJU! ze!?#Z4VQ^ZvQ$EUS_bIS$J90a^J~s)b*6VVy`w z4(=SjeWPg;U_Vjd?7b7_(-h}m7{x$ub4lxGN=d3`5#Sd@p-6K+Qupd(1lR4Ozk5G` zC1DF&^GT_^zXeGj)a>N$u%*rH4bbhq*AP8$Db!!OkvVLJ{=&rmlp(N}Q*u$;u<|mt zVL%2&9Xyyi+nfC0RgR3a7gvyWwc;g|9GA}?V@vzqP0&orw=$rNmasY)+!3i$kZu1Q zcv}puvEgH9(`~<4Nb^zrV)<*haPp6>rV)OCrV*@&TQ0+_*8o)auDVVoMhZA7Vg1~L zl>Uq5>SZ&P&|g0OZ{6fTU(-?u6yp<{ga)Xt6OIdm$JxZNtxOSk8%@7XX1T&Ox$Y*f z=Rs7O41VkpCu3MWcMV3?C;xn@@&0vryHc#P;9W`FsE@p3TKG}fcCULPyJi@AT>?(7 z6vQokSun(P(O(QH-QNN*#FS0$?dk?#rM_;oC91~cOGp&-u~qlE90$^kVe+dp5ZZk9 z1Dx?<9Jk?CmTwjIn`@LCgo)SD7sdOqrltC(;}=dR!4T~e*Z8Nt`R_;(wFo&;XV3_W zjXIKn^gh2FFZtx&E-q7UCvdK6NjyF%JH?KHAV7KP>!4eY5M%n zk%_NK5~u*JB?!=3yy6f2`@femqbAe9xWqGa-39C)#6jZs4sUlI&NgHt477>t_Tz041h3Z$3MNf zg>#mA%hN>JgRuRbZr1C*4Z7{9Ush@8bGPr9T|35vDrrGz(f=4|be&FrJ^6Ys14p!R z*P)(s`4I9Lu777~f&-M+C`sn{=@t6*4gXtFp5~4MT|QOl|9Qdp|MA{GF>YB&1Z?+h z@<1|$Z%FKR!Nq?89u-*u6ibff(j>ck7?Gd&)NMkJsb!87BllqsE(7l(L zgDMRNM@e^j4Axlc<@Mm9FxpZ z4x_0wg#@B9d^RVL))h-xm$cpE=Uq2pa2#9}B?edkWG z3r57^!8o@%@oq*x?7C&Mv~RHSl)< z5|M$;l(rNeH6t^9e@c<4-&%Zj)g*LkfTgxa_TEiBgZSP`vj;~djBua6AJ2k+hIpN{ zNWmj+%Np<)4pyZq{Do1}FwPMtH2>d*IXJ3}UMY3>vI9lhLsqSKz;#p!&!b`!&ZbA? z#v41UX0-Fw;bUs<;CvoXuWKRBQ`#Bob53>1P6ruw*PQWa<>dtZve)j!Is<2&t1AB$y{ANAmI7>gxai`E8wM;1H9C2t$=1n zx1DlJxWZC5CfLLA_+JLN&=?z9{Q#77&)if8#GxtI8wLn?C(@B@e|Kq+AmQncE}c6S zzbzWXpv>eU1<3mV5NUTejpk;g{nwWE$479`c?v>6_CYIb+Fg=A`KK2n$q49@!guby zR^X&&&8PEo-kXj=!EjIUTLx=#Ghz+j$pSdEOfHFR3E{J4^%={M+`uR8^^X9Md@~g4 z&0AFVB*V9CNO8aTIpALz0`71|4|2lA^w=(cq&ev7X%ie@5&WeqHF4f1LWhM&BJj+YqZb?_JRS{P zzHS-luo-@NckJQ8YG<%E_?jYj%jT-S$LM-=m$j)*eF&qOp z9JuS4s$r<6hM}I5)cE>-5$t@oD|Xu0>fP@v@gRWic?{9e zt6<1}yRuUSVK9E|9hkra5Zdr3;VClMu~W-~(b?06osXEyK~CTnp^NEk&o?Gyzj7e` z-dxxg{g3<-%#lQA>&HQ6AVQ_h!;m@o^PZz0E-my{oE)juEIDn+-xmqZ@KIa-7sW(` z#9R7Os(0hR3>QI&_~OCQHVD9O^h2d$pPnsQ_$?=Rm!0=FzYk^sE3U|2co2Yqhm$)>V~M znb+-R9Y=+f=(F4NgOtE|?i=TDVyzJk{$5#8kKQD3jvdoSjUv#D| z201?-J3fXAPPaz$fVRIzMWWL8DtRTfyI*4qGf8EYa{L;E{ET zo~KHVKDBJWHeJ;Zq}Nhsp4=T`aPpV7!wZ7Al(Aq@vvpz4TCbvUlK=HHe+Q1vjOjl6 ztKh!}Xk0quAHu@#_3XIV{A;E^)}?*_fKND7oK9x$no71iJQym?zpQwJ>TYs6R_Q-l zE_^vtqbKVVlv#nca-wRhLddhs1 z$w&Berk@PzuD!wJTb`|_{t_drOLlt@8V`J6dU)B^ z@V&I<^xInH$#F%6=rCL>^Pr+)n~x!jBef4KR&?|GeB4Vj-Rl=pbYdN@wD8GcFIuScMk*O;-PLeL?!GhT@Dvoja79l5*rE833wD9BzQvEcmhV{-9G z9bQNLSIeQCx-g*jP_E`1eAPsNn&j%k2%OEKnJxBt< z^KzVZ7EmwApr^QRd0}21+%#m8{Ec;HWHf-{r0d#K6y3M|*3@3ler2H#9f|JS{i#Ni znoW6wHQeI)e6nws{%e6 zJwLMg-h6-iC6eY-nwpG*`ioVmoltk20@pbA0XuFTzasboR*y z*;tHEcykqrw6#C4)ZyJa&@OXk1^Rx&wXhNiZMrB-O4{+-zafpP8~S)l5oKIB;AT5J zhSG%*o`p?(@`8bezLLEh2PUmE-_m^`xn#C02KTmYM zL+?1U^lvEl-vr%H$*PnDcVq~k3fBUu-y;o=C)ihpZj&-1w!vR-JnOF&9aPCUY9Oh% zr}tV)p!G@7h;ZJEa4x!4B~MdT`MvJ@iL|u(@^=0E2=V|zuFS>femG_A2M6C#9u(sWr6tPOh7U^HTkTS8zX9fBJ_wK z^AyP=HFyF&6bp5D8lwdmD0qPxv>N`SFD7#C zw2)miz@NnQ8lH-3*Wg2(G*`K%-gf*6-9d}PW2XkA_AV3on56{Vi7)<=d5T_Xyp}Y= zyN-C)`eo@AftX4{F}iTlMnF2k%vK;>zvNo&NcNmqwCVw>k=r7QE`z3nQXMIvA9=8QokY0{;<3$L?s4_AH5hMJNBfK+g7~cuse*- z>v62;w40hv{w2a`fuUw|CJrc#z$aWfs*R%>wbNRHT|V$b4}&H++*szo-FO)(sCna||14 zbo%f%5s9ROejB@x3iL+=BOI@-rXqCF68uR&oDU;GvX4_Dk>5jkNs0)D{Y3N_W4zzX z$UE0opfTSydC8atme{IE@;%muKYq^?0kIFa4fD?JUFmydEwuPnM1I-!5P%<%v(k-6 z*fmsBb*p#FDX}ef>-ihKryGeMi(WB z*Bk9b6O@zt0YmEtTmCK*5Z$x*d5zCDdZ#K%a$s*x{%#*VfY$())_*};yW(S5Rm&h6 zxj_;|TC)^5$LT{jXv$}8{>8hyo*sx0@)uy2IOa4bD+>0kr4q)z`;E^ha7?<9dAFp` zlz}~jKKe?IH)XFu@klOZkNxx8y02`q#W9BgD^)yFmXkY?w{bVSCFmG57X4|BjdiT> z48}8L0x^Yz2HK15ZMV9+?u#rDce$)UUK?{gw_gIdPlvSe)lmo#WyJAI*pL>#zcpMc zvG{lR{WI)dOLm@?)#l~nsQb2^!0XR7r@H%l#2T)*`Y`q>2crdf@Ug#}AAx&}r>N># zBCH*S`sZ3sF8uEYhTBM{^_#ewOC2`WRr_kN^yswa9LOKWqXV6?4KQ&@@2(m$3h6T~ zJYmBfBUn!jFQK~`gY;+4@vc&2P%xN=#wlsQ^| zPeW}Xj5Fo~?6UAiF<IJd zxuI{17+NlzbX!V3O$jMa=w@=Skvrh@UQ2O)g71;dQhJ^oY(gEVVOmig(r3j4VrmJ; zmf6*NRSBja>D&~4f_ZBkaGa*DBu>MM%x6%Eh^;kD37_%%SUq)+I$2$b*^InjGuA4o z^>0b5h{VMB)m$IX<*QYYb#|RT7xhB+06lHBagtuZa~Dsl>e~}!p7ZiyiKkP}keORN z5yG3}8t}foS&a zU>L^gvFGTpQ|lzDNHOmH3|z&9G^;wpq$|&ZRqHzaB+hK9sRSE}`n}wsiBAe^fkNb5 zf8nx&0^)&7wX&dW-pezZcAE)Xo=B)`~yc7rxT@0l0Z{o{^9sZbhe=l$2AXo9=)T z;zsYFGHJ@{9iNFh8zeX@>BcD4wZY*BBuq&fql4p$T_2-usQM)=AGtk@a0wr*q--B0 z^V77skn3Z|B4cSHgaX42NSi5Q+Llhxr{o9?fIh`DvvdUH*cXWXVHRg9R*Y4BYJ(Sp7qC3xeB z_kAC!605WRPOul7z-=k;Vda>%A14Z3)4jU}h|BS~1i#VgZziM3j$JHo-YS?cKdy^$(s1NNuJYopqFxv7PuO~+mO^g6`ZSU_V1N?x2Y=A7f_ z{tSzd6Ag~fU3$pdogFv_`_sXiNpp7tN7mZZMFvebORtdgo8ysbdY&(EYCAAS;VMU5)nK~@iJbl%H0f318Y}VIai^b`T5bL92uWP zawnZu1@+}yH%?;N%v-LFm6)s-O%6M}5~UfM>2hz?xT#GJ7futgJl%MiRd?%`tayV& zZhG0X6sA1asJZXQy?I=y@{L&0`H@duztOvdLMiS?3xn#N5FF6QhCEaPp3`XtMJ6J1 zGdUx74crAI(dJo50PK}Uh!>Zf+ zK@XxR*&ODF|6Gpe zW6b4-FcLZ@t{`Nu4x+QGO8ui{iZC)Q24~dL}*G^yeE9qp3`G=IDgu52O&pCQu zGz1d;@N2m95|CardSh~46SmErLK#_}h06h8*K$^@;n}wRwI15sPdLoP^1Ord5$)>g zbHX3JH_CcuQ%2{6VAGRks4I7qTkia!QqLd#sPJGHB+hLG1tDG4pKcy3x9R`@=J-si z*zf^^5X-(;p+l-bv`=7UF60$OT*G5R?a zE$=p3=(;clF;b>52l2&0b-6OIBurgusSZcosit8@$g9NGX%kWDlU?@5x!2oP0UubS z5}MN7Se=c;Q1;;(H4hM!~g}c9`VE4MTZWl+QXL9s}G(qu5s9n zR8*5gr(hKeSmtb$;JEfGK%Z|15&j1^IWJccI4lS}#!Q2yH3c#`p?w$e@OgC+l+rtB zsNWNaNOk;Wf3dFUvMd!bu>0WBblx}=u&CwUphShMZ#i8Hy{+VR$_Mj(eV~AIb_jrq z)ik$+Q!TQ-*Q5L~{s^VSq+rZxPH6d>x2t5l%1hMHIo=@PfpI1a<4%lZR(g5)i85eyg z)MrnDSMXfd7js!t9|GI_^nCs|^oa}qo4qB**y?AR|Ez?-2n>?MsKeZzQvSa?A@CKR zG=hiC&WQoAxOK_Q_zV{KD?{CeMITqF8PgbdjCGUC7+@Qv@i89)M9#0~LwR)o3SP9f z{9S)FG%?#5i)`@JVIpkk4q@xHoivKyeNL_ctba9vTIQ8V1#GOL7FDlHN^uifFNx^>|RhZEL>W>JxOXVZ>C&z?9s3nAPfX*y!z3T0pw5CMbGA3E%+VBQX+P(Oai zy#~sdIDx4&#hkRQI{1L!VS?9Z)yJw=VE5v8?bcrYXu3{EGrfu$AZk2LIpCP}VgW}v zs4ns$T<)1Jz2E)PGYf9pR*4Ky)Z)*!V%7eV(evR#V8liIGvZb?LF|Rq1`)C>U;LXR zIzpO~-=dQFWe$nTUE-~4@PZ3O`BA>oJ(R4`Ph}QSOm0*LA=``^u{qsx2{{Y@-n}P}2 zy1*YijdcsRijB08~i*<)8zDs!jg&A>Y}>b`z%fMT( VbC&o1+g#5@!_xmN(hssb5`( z42F)3wH*S75E2{Yz9p)u8%l9~9wASZg8Ikc@SjO1^da_B0ByKQd*^jL@nW^}mttZ_ z&rLKuFEW`sdb*VYhZswhy4^^QRENQ=z&YP2>o;4)YznJ5-Y}*^;*?)FFUoLWqlq{H zSL^C?EgfM(H*QX?E&*=5?d>0_ogzN_8YEqMWz*~Z zvNl#eyCkS=xiPDS)=lrpOQqn&AMg!ZAM3u-)BaqCfhCCks@#2OR4%9Vm<#f1;QHo6 ziYHEA$yq7gy}R%qd5(7=HT)FzH&FiG3`Rv>QOKCkzM_+^O7S7r_zTI?sJ)dMonY5- z+U2z1-X;Ub-fvW}_Ev{Ktclj_d0w#_npoM~JxlYZ-fWlc&qs+b@7tkX&vXta_$By^ z%Qo}2d0v83Pu?#o-y1WmuQ9-cdck?#M7`>I1;dlU+vEQ4<8r*aNc%}I5zeGNV{pbu zkpXC(I)06u>*_dBzq^%WJ@l1%@ui>-p^Py)UmQ8}|FC}~C^swe!T&}5QNux2@O! zrOf|#zqEB8ZB6xkg+_(@Y>pKtq)@{9=C*H}6lVh^*w=jhSq)~5C5xUxvw_<0`=kc( z7+>QUbKw4)uNyPwv|Ap-g#K1fJ>ty$L9V&_nqDy^php-j0{K#jL09*&y`dm2&Hy1hYLNq;Za)vV24?28Ic^lA;8TWbx{99 z7{>A*UAppMf3K<2i3ZRzfNpa@%OF~idjwfc0Ep$f{|jO@Nv5TE{ER$AjOt^#V z_@nZ-O+bGlvenf6I!V8E@59%#AIcUyX0@%h7h3J9St;vhz-C_*vh;qFEV%H>Ew9Bt zNNv+qXk8Yz!#-k;tX6_oNH5%Qq(GRW(#qD{%N6c=gu(YMrHg5YPuSb(&`wu6=Lc7x zcm4d!4n4k)pJiMDSIIJPF_iJpB^K=t(mI$}-=EIna zp<8G#ued{cN1)NlnHm*X`+=;~PJXpSH>jPgE@wZdCIrwpu!hXjpt;TU7CxsDw-tPz zKOuIx)i}JtFm~Vw5OP411T3AHk|dgooeKFqJFQP4&r8Hf28q^cO4GCZr?$fh)@dhS zr2qVS*T$=_!O(uI{$f!}!3@4}Ys0&$@Gr>O zw#Q_sk2g^R$JSehMcHs`!+@l8NR4!N$Iv0&p@4LQ64G6g(jC$rDo8gd-9vXtcXtf$h4*tm z``i2Y{&Dbwn&G_GS!)U2Rh8!_CZ+L#9Uhlhs4-;{`rHY-fIFx=1~$XCZT3lzK$(hmvE5>21ezZGL>H(BOeWElm6aE9 z?I)Y&D*?;PP9wi9Zh=g_P~aLB!?MDuW4N1!rk@PC)qjLQRd`M+m|lO&WQDo^fx>^Z zz*-3y7w6oO*7pnqSiA+jp`YAkfcPa<+nXQ#}hrd z70DzWD5Tp!*u$TE0NkyL)0q+3@q24&U;2Y z?xkzbaQmV@++V&A!}edz(KBWJ_h3uWS3cgnkAvZ}I>+buWZmfcX+qMm>Gpyj6Rs}n z)hWUJ(e*rX((SUIsWT#NK`o9749Xl$vFKV8Et{9(@mnJp=kMcz>{U>5{FUyV=ZaeQ z&aR7;ks7?e-vB^Wxo2~1!xQDy2t1c@U&B0Z!)qF5>gJJzcze8z^zO!j#Ab*@31sDe z$@XoIIWf+3O3ueiRY)+fxKZvKIGotUTZKJGP}|XKWl-Ic(T5 zL?k8mEr9_)ZUVavGOp%K_%xS6gZ@ikor-+FY0}pHxmNrY63b`*C-m|X5_}5Xxj|Xc zcFfLQQGH;@E7 zjElC7t+91>xXh*I;YoD93-djjIg_g;?`j>_vwoi$L^;5+h-EV|2ONB3hYm!23nUdp9;H>>9@|ZzKg)ZulIu>= zeXjwwc0^9fpS1NWSq#~*sq)#&169SCX)r`xzTs9k`UTN^YOu>}Jh3xaGp1L3MVV{vZwQsm>Jj?tvLnJqs2Ye;}-31_$Z1bEsPJS(- zZ$`CJg!C}ZmX_c4Xj^@2YtMCW-#pChwLvw!CwhxJfia|W>cQd~4YVa#rBy15C5=oX zd9BX(L!`iN;KNb9&iZ(E;M*ndBZs%&J|3SNt2AV`DUej!>Z%ZDFUt8$fnNYnm!&bS z=0W9`(yT=Iw!)!-EJ@?pMlvmMCH+wl)I)vQ5*UcCY|${0nvnsUluvutoWY;c4Vpuk zt7LAIFTO*zfjM8iT0ZujBJf3{`Tn6f@-NF(oS6vmR4j6`$JpxEsGktG83JW}ae{HW zPI>_)nXD%plP$3$&!7q)mn0Tmzi+dIi_~2`W_YHuS1vnhLThpU!YT0>GPnH7OvN-_VV z@{yy%S$)?bJ{pkv*9`d&6j}jMqkJZ}Z4(>!v0mb>g`${Gb|s9`ebVZo9OD#0ff3@7 z>li+-|Ii~)S0vVd=kCJ&qla%;$VCiS?(+^_wpJ^s9MSIe>NpNNj1|FHpUw=TsenXXn zQF;5*_pIu;20OC)gp%#U%}le#e^vIoV(iiybB4t=8Re+$ z=wP3s^@GC{zw%)KXqRs(IAdtlyz=pd=9ZUJ6oiPlYd1L;&noC3eu9j{zCm2~3n~iU zZ^9|_$!pCZ3nGv2qG#3=9*^Fh<;ZB^Vg!nMgN@?4NQ&%h;++&vyd_8UqGxkZCkZ>h z@C}uKI0)!;=t~@%Y2^78HzpCm=kxM@tw(#=Ba*)3W{*8@W@RJOV4yJ@{i*Za9q%Mf zi!KUX+VhUJAz_FQYr85W3^;CO&mQV`Eve}J^sd2w7hq%&cI&`KJJo~ z^`G~leYICi_<1U?K4`%9?ZM9y9Mi9`&qfZR2uYYWbyoB< zG`*LdLvPBg0`X!$5e0%!J_nDO^atCh){yZh>&f$p!j%k5Lq*%W^5y*Z;5v-C?YfUF zmOdB>5%<54H1c!FImX@LWoLf3DRSe|~;&%r?s;-Q75*ImOaSG-mW zkaP|E*@_b>fwD4PUvdro!NpbB=4s4?rg6 zRT=W@64g91?GFU}+h@y5>VYQxOrmdp`Q9M=jrBH;3T;-k5|dIRBJP?83QG=|H$L3$ zer)73i7j=0uN0wM@hX)>!F0e~?fl)x-|||b2gX~Yx^^6N{I@Goy&6bl2L%P9ZwM)e zCKR+ud&}xjHmzs%2#{FMXnWJieK($sStna?O#C;S9*1>aGAdO z#`Sw12BXGn_%<XmaeL7P5N@t#+IR;)Pb=x)^ZA)hw{7uMYO~4{OiLOw4=65S zErZ|Zu(Axa6rh!4<#@iuSrFmO1>+zc{E3^q?HHSk*|Hn4paJjsL3>GQ?`59Bj)rjU z_+IdeWap#%NTc-IbGOoiBJaP(?z4*5L_XSVDxqKS4*Wl;_t(h?2&eEDAdt*7eWhVc z@1VN9sh&&KUPeekLhbRWtbX#5@kA~Zh2Z+ySlg$Y-dCU;m~x2uvr}W zPd=kYUX(sWbn$5}Q$-YRppvi7Be4klG8;vDfMaG%G6xv#N6Q&<_4ItgM&v);MxIVd zULiLA2imOH-jlJuIHW*|A=+wVqD*gO@rn)Y>Do##kT?zk0F(*d+TlHwnd!8^n_kI6 z*J6q#iBvqb8dOB$TDWv%iYy9nL|>lA8l}-;AijtbU#u9f>k7}Be0pe8ci(pO$;o-t zFc?5MMQMcqslNi<#+v7v{iM-ta^ubSLZZ9t1vS27yxG?Se@P)7wLP;=;*mQ!Fey@9 zjujRE&&8v9@yOk>Knsz z{5NNIjesB3vyiizn|R!hr{-fKKzv5!e?WCBxN`K)^G&Bg%p>Hops#^8xzn$xrfPGN zq6^gjakG3~bethblH~Hj+~V=!*o+tRuH2#`bAFs6O4s#`UM_0a;#?gTYwB+gii_J7XkCM2BwsG--_5%(-N zn@0|%+GUN5i_f;85MPfm;GFCn_tz1ld0L} zs2@?lDBhCCH;Cch56a<<`^3PErc61* zOP=YtkW=SRi}(6JgcNN8WjNolhx7J0OL*LjbcS0QT8?!6H7lnn@<3aI=j{OnRzni6 z`WmVEvcOg`-c#_Jy&88KU#mGb_XU;z9_NRJdxkDK@izE=kWBc=SB>1t@0A!+?uDm${(((# zOY2?VJJu6yc9*vuX)xjLs}7DZjx_CO9YX3Slni;z9ruh|^ZZ6#E<~(Y>xyVONb+Mr zZ%LfHJMMTdA^O3pIu7TPm;42~$O)8p9q99LknrvCjpi7v(-<}HC_V%x*jU}hsa!)K34D}_6$#r&Rq$r5oWk4GSxmVVxf+-Z12`aV0 z30@_MxX+puSkmCZIBKssYh$ND7;=j!q*1v!rkRbf*M zslE9wiqNo*lHvP%ZOIiW<}i&bqoejP^cLL@F5wsF_gD{3T5|trYPECEOjDcn4OQ}x zh<;EuU?B8dV4-Jg=|i5Grpr_{isQ8n`6fdUZ_QQJt~E@+XdZs<_83h6twKqb17dqQ zg<+S6r1MQv2s>OP+1|d}id2CcU(wL@md%XBGeog7u6w;Q#_s?lyayRT9jkliVr_vi z_V5yco{@l297Nn);_kN1O0}tTLx$Jrz3hnb&5rQqGuk|hW?z8q28BrslzvAu!9Jq5 zUu0ML<)yg0K-?T)IKVaKwfJ4Ytqid>orV=|zJV|Sb^46E9wG+G>v^6v7%QM3%CUm;YG;G2wF#Z{&?wt?F6O-(ASrw-Y8 z<2c8URl>PX=vg|GtRlMMy*Qx0jVPa?14EeCs}VX>;{6N^{=Xn)hP;Dl+n@7J`hVM` zO42YcuDK(XMMfO5+vaaMBzWTmBI7WSL-AZO{*F|rr*HUs6Q!&PWPa6R0)M$$Z;zE< zJ8Q2I2X9^0x+k7S3fzgMvZx0sG?y)%cI=hm_3m#XoUZ$l-2ViSF4JU1ertC+Jm_Em z{D4#8a)^U9_a@=9Q`$961T;Z@4KFyn{EihTaaI3LbtHu zoq%lW)|+3`JUeU|u-3jy)sB2iJ>|Y1<#uIMH=fk?)4D4do<3m}YlNup5LZQf9K@?? z5K86KGx|g{-!xpPy3A2FIQ082=&a#YaS-rNz@n`0#0d9@>?{fWLR-6uY^5r!fO}?d z-XFe<{xEFic&|}l4~S{YVV25~-bAJum{qrgXX|~(y|H{q!}!{>!#-qgn=fr=!@UE& z{B(NtPgwaS$&`%fBO1qvh+#D$sYY>4hu%F zKa_nUzn}OvUO;R`pfZ4!UKx#TUmBG%?l?%c#%9b~7;M$mA|yNuoH2u!`OzTG^lBd0H%td8kU4iQ#~=LE?_?^dS&GCR_ANn?M9O zfMXyw(J>QpH4qEOTDa_HyQ=)7G=F8_rf+!1j?za)Ar$gQ?IH`<-Q`MO*4eNnc@<~F zSjb@)Ce5gb{nU2-wN7X=am5ciN>hM7n;kCjrdgyJ5;tONx@BmV0T&UmN8CXXX#m!e zY>8{x{4^qou#4A05mT_~X#N;3WSTPgwkX8rS7W*z5)rmiEs*PoerRv4hoDmTRyA1x6O0W zpC!9KiDTa?HWlQ;z){@ao6~MQInKkLUTp&DS8in)?whY6-Pp)~woQ++b@%@N*<_s3 ziIT(kXCZqYt@L%$uJ`~vXD}qU|7^xk@94J8Uap?`0II9 z*1Vd31TRh%PtQ(7CvscTEhJJ1Ph((vICD_ac1HE5wb!TDco*e=%pzRF+^JgfQA|(* zIcH8!YUVT^@_h;1TI!Qt3b_TD}7zDSx- z6@t^0tlFy=i>f|n9^L#E8SN3d1>>6kMmRo1{)#6aEYJJDT7Hr^j0>x4a12orb`!if z;%>_uLKcOV(V7b0aq)Pqy+m`*un-7Sk4G3^JYUG`*K-wobHy6tT+-sDbIJVzP`++Dg`>*hy@yiOK>&SbO@?zpD9|8w7ygn-ytG)lXX z9jg+-LZHGPpWM00z((=(E?Msg-XvwKW1-WgYyq;frQi_hjc~3U5LZFtg|KgDx2lmfO|(-AN8ExRVUu}w>qh%g*VHOeX{wyTsuj@UMaF&(Er|q z0hz?R98DVCBgg<0mnQyT)q%5z1_$xG;SXiwxn*9p28{1rd z>Q-4=S}GzO+u}p?G11?7z&4OrJXJ~4jH(n}L$H@a~iuc zhch<>ic)9ZasW*Ν0{wz`ZPn2bLnUurSJm7tM788SN2@Up`_k3uiF2+t5NNCP%V z;)mDj!5`CZ(gZuWnzCge9B?l#oBbW;SWenP8nUw?qM)%J++>eU`( z!omBFR?LO%4v*mzDtu%GmvC&0tzNzcXDM<}6*X^ZX+f%og6r2fJf5F=Rhx$-d{ph) z2z#y8SskE7qXeCtn?OT{+z;=?8wIY>Epk6o`>4ki#7ivOCISu8{bO&^lD4~Blg#Hv z3D;rJ36#XUB8KMXwLY~Us~>@mK261Qo$Hx9PGZ3N{V14c$btL6i1=0v7}p%=96}Mt zYQ*VWUzw3fAx_=`+>$b~&VD89Rlc~u8BHspXiz8pEPg8=pj7B4Ua{W-?;J;rPMKo{ z0>IB4gx#H%uWw&R$6pG;`Ee{+q92&qWU;B6OV>2fn z*?m@giWH8$eOE0JmOaU-i~VAG zfJ`j|>m!e=>L~pnk2&k&G^(}>8ZL*OT%8T{+hV;yhhjQ9udG29_Uke6_@-4Q^16q= zuBk0F^@+kT3!X{^{7?L?F#GJ9T~nsOlc$`}C*+eSY3{L|S8c+`^+S(+7_T+mK{^3K zssftQn*8GsPP|6NB_)87+l*^5v{u+M94C}GEu+>(dGYRsfjCR9q1oHIV)gevt1q^+ z46g;Mq03(Pj(C;gD@r_atGvddt*B=`2rAhLjS5d>c{|q>K|SglU4W;Y@si8$uZQZS zd<%rJK%Wd4Sl$Fy@KBJ0DAIYww}I+4FS4=ED3_JK%P@u8kfpFDL|di3{B5mLpK(H! zM-|C~oBLp=kD=r`_{Lz8_q_nelNxx96PJ;X!@P{kcJgTZp$PXvGRN?PDsU}x<33M( z7b5|{(zuMylQFLWDWkn&QJ(FVr^oXZXHxMENWl$v_+P4>x5z6EU93R0yZ`05@C$Wv zd;0f4-V-NWpGaHOtsO(ugc|tIM+Uh4(Ps1oIIVUYN)jv#)WkKCV1U!E%i(5oK66^{ z(lsaKQ3}D}aO@iHc>U~{) zu=jTy#7bR&?-IS_`WcNBZRv3FKtHGL=?Cl^-$pS6!XxTMQq90isb_PcUx*V0%I^N{bM1d~)o`(OEM$PKVpP%kv;V_Zui~EBYMAs(5EY2s z9*B-NVnH$pyC>@zYV_}ceCa&XJ8BKBK{d`21@nzNs+vX^t~aQjNsNkcChPB&^#z;R z6@cD>cg((%9fOLYSJj^Z8sGJliL#8_p!gKoIFH+uTPOMe5 zU*i85`GUR1QhTaS%#_-^BqJc(CN6JEf`#5T9prBlbYk^+q;`s8G-fi_9+6mDlKi4v zaPQg&KsuAs^0$>rgy>5j7KdF|m8QE^Co8^mV&GQW!KPD96+_y8%_%2d6PLC1<}El4Kswg zdwa<_Vb?PP?q0yb00X?4os_fOjx1VAhCWALPZEpKcimM(2&_nqWQb4zonp>SwzdzM z0{hIhY3&Pngy2|eMp@%*P|~Y0Af?1^d5#0)o6O%>PNLeVTGHQ+kp=%mH8zM+=V-ut zTl0JMedIRD4p^75_`^28dBM65$hq;x)asAP$uG+$UVP@Ii zWA%c(#(2~!{#853j*+It9=}*XZ)8fh?gIFFn(i$A=k~2G{GVwSUlAV*Qw%+sFRX}r zKSkv&Wkhw0nFm*NV+6KrcO?h!=AV{Qd^a7ey<*#C7~Ox7?ma&-R23@PlFd^B`c&QN z`%+E4A@VfxB1Qdyu)96K?;ExpG%P+J;i+}l9R4vXonecEkT2@NNl`6U-|_C{^j~(o zDhU1srp-!roqEVjQh53w6VwPlpbjqg-`Vg#BO*OqJ{91Krg84Af-*n)zLe!FJ+J!5 z6s2jMeOZFb9UL6{Fa+ucX|SU3Z4tzC6$!~V6T;aMT@k#k!<+x+?h(rko20Kb!W~ba zB98TG-@}Syd=i9)me)z{(L1%$=<0eJzxTsrgEG-K7RE^n#B3Mn&>gm**?^p1Zv}6lVtZP#XLo^GSO;^a&)?en$bB9&Cp?*9vlf097o7DlP!C}CbS^&!pW*v}6F$Jt`N)`aQ( zD+oV>BEArjyON$8T%`${IID`B{KFQ-t+WI6TV)8zrvd)QA!`lE%w& zI$Jp7Fk+)rTcfB$;#vH~O2|qTL}WUxke#oOg5jlomL)GEiR4{u^&YXg#pDQPF{GAf zl0~m7x1wJ$-8C2}Gf?PWEvY(oklViR{|=JFnBscJo02&uxE%>AKfemUOM9D#7b4>- z=2#Rf=MjWGdp(gIP840B*`LLGsI_}nPwyDRcdaV2Z7I61JDVAwfS}p)z2zO&#Y6WY zN}loY)Kkl|GYb@Hnv-6S{hTpZqs~T674#B`S5z>)TWT8|Kd7iz%R7i|8&y5eNMHU|e6&uO?}Zt`VWAn2yh z{806YKTFP>&dkD>lSieXB5@obTZ`Iw^{QNs@_GDK()`$x3dFfUMSvvuS|GQPYI*ji zygs0#4;zoH@DJvG6R+v@N8k~>(wPKv>+5{)+LDVhEq z14k(VMhH?UznZVLh?$AzWb+-I)f?xp)UMLXWUZ>UGOYo+=#~UM^-H^b?5O6_BiaUg zdrIkVOk|GcX^QCN&?3{aa~c(VW0y(zRYss}n#ytC+my|-J5NbqzYiw=F6O&UEKa3@ zKCTRJ+-|48vdp%{XuL6Tnk^PRv`iO9asgjkL`(#&Jkg+d_+FdRizKZT)0a1&E8i&* z!nM8l;TJF8#JgjdpHizyIdqP_fYKDcxBn0ohl%AUXV6fo-_sPi0ogpJOjqdtFEg%{ z6x)mVk%`0j^Z%3}VnG3_mRch**p&|AnSSce?gsH2BssPo!m+TZClf$_=Gc^QwR02> z`(nvBZ%1@wh~w}seA6-`PX0B24kU>yjeVem9S{pgI}8-jd{bG6N{Na7K}HseT5xz= zglg9Bfp~pwMzdanFZ*60e`9bx2vpqEorWcO;e?e@F%nXfpxD3-`=(4aVWU>bRC4Xd z3tn5qPovdY=I`3qULo2}|GvK2>@Unmt#7o_JfBUb+F-M~T=e*@)(j!~`XJIr$dbd3 z**hf8bh=t!nr_GGmf;xQ7{#m5U_MOj1PLYp<`CgliA{kQFJ{zD@y`6}TDJYeJgzal zvXX-M4CRQ*BIAN2fjV=0(OcYrkviKvqu)|#-GT!Oi;}2u3WN7iZ^Y)Ye)s%?I$Wipl4;I=J@^=weJIzzaMTQW%@V&aiDxK(szrZv zTkOZ_CUWFpaqag=kXeWVn8FJgvtO(2cIGNs6yrf!k+CEakVz&8xc9XZ#48kdZ=_4E zoMJd##Q53JW_K+#7ou8CL{EYSiY6MeC)yV5`Su~gGY^Ra=RZP8EO^J@i|tZ$L-!DY zNp39gpZ(Vw`L~H!&b72VP9kW!&1o6n8AB8`Nc7uU4ob7ZP>`sk_D4( zRnuu+#1L?>h^vv#CAv&y;6CtCcEgD~GVE7k%_YKEL3jT94!F?3PXK;e%Rb}h7;_az=%(T!R-h3@!f3GAu#CQTVTIyq?vi`D|!ybaey zs{@u(jwAJLp7;(%I>8)83VO(tX~vPH4^@j;qL~V-Zs-_~0kEW%@x0?)r3L7gUoK^Q zrNb*n9jQ0cQlEw$`*gnRAOaujL+wXbSP7`qj6YJJGP@35;T<|^Jxo4TSC4Rh18!G( zb(Q2{P&~x82+L0$zypp;t_5TGn(K~mc)N@o#U;PB*^A^KyRNY(Gc^eC@9buO0@D8y zX5_j<)#NdjQ1eOirxD8opar+|BO7*H_VvG%x=5TEgW$P>7LC`ciES_Ujv=a)g19x~ z0*wNz1Jdiw#=;K)NjC3&!`JUoAez|n4?FK z`=3ZYvWC6R3p`6n8)?zP7AGA$|A=?U=cH>fuu)6+qp+rNwV8eb9j~IeFXB&mVQOs_ zwjWTq8%Y%YAm-X1rKVX35^Dh-whC6|@P55j#)2d*A!|4%21e&or$B~y8eyuK+Vk_Z zyfuYcf#gUNeg|u)Xt-@P<2-rYh*ECM-k0Zqw}Tk>*UxIs!lUlQCI^1KM>(fFR%4Gl z9ia>1K+qUIR@!`QX+(wR(U^xOc)lDe$X)maIfv8#!4H?CBtiN4&2O$5LO3&ND-qFC z2)*v7yH2}sl%ETjpJ-eBN)GtT%=3gsEW?+v?%nc%AbeQhiSgTc!6-9~#=y@(k;r>K z*CBZv(e~|yOdkQIjt|;^$aVHU%VWm*f=)Q@R z5r^}wG>;{G73g-O8)n7j(V{Eo(i5M;KuCea|E_aUSifVQD$M|U;|KkqntH1 z0tE1|6$_~U9W(>pQ^9QOOuuJ(W$-^t{BJr>Hqw7m$5Kj+#O%s5^C`{_h?RVuBKiBw z%g4~(?W7#1D1M+=283bv=eU{aKt6D^Zgqa!qG>Y|@2RiRg#q10`#!~Qx1dXS(h-kf zMSF6|5pVC41X+=3|Eq|jhGdPI5!Slrl-)|8Rl`L%={a0qi`CdwjPIoLZs&QxcDAVC zPZ%_EdU>}*Zw2dsEe7M!+6xH_%q@N<0M3cdAfZ#aXRFoq!tiwZ$ZT}Z?Y>IM_x3x(n%ciA&1 z{qPl2a*8L^+>B9@sk_hhy*A~YHu&0%UA@se3ivD%o=~wcSWP$tOB(558)wAPFTeV6ukhwktoh#7i&uvT{KdRlM`&WJF>+FNCjCl`4FGSZk#JPEXO| z2^x4?B6N4hoQuBu4mi8)Ta^W&z}hqaqm&Xyf>*tE2uiMG&T8H=Q?oFWotJ#^mU$;X!N61GK=Jak@~ z;^VGdAGD+;kyWe{IpmY1woIu10R2l}BB@(^1(~Mc6YaTULxqIx@?=Z3;- z;P^)oa3-xYcGL@4mj=goPak8Z;_JpASkB^RHQPy2<1SzBo0YfzRds3I{=ML@wdQxC zfQtxoMx_o025V2!gcL4qk~Vn>@65F^yyLY+q{w&8M^d1xKOTp!?Yn|;KZ=-_$Y)7L?RlJC z8F200ySby}{D;qybp$Gu4uyVL;H8toM3RC@qH0Iu(AP}4CC=-dF$p#0v*S*|N_guO z>E67!t%24rm{NPu?sDuN3hv3h`{w+B5zbc9Cny@xqfjaOb&WQBkn@61&OG8x|ND^C zcEIYs!DyWj3aAp!qP#ZISBe|Nu42$Hvqip*$$*adBNP;(&&Ohb7ap-(*EORrh#Kz7 zVUymLX1uE`e@phKnMZFY=pK)E1uSz;RmeicmeD z+29xvQ?LN}&&VwsS%^yC(~NjA?O@mex?#L}}VJg5dH5YIQ0%?=Dvg>_kTE8Bfz zZ6F@1I;Y1m4$fS&;BgdOzT*d`hVri zLhOI!OL-fTo7Xv&ShJ{D~fUU3Lz5*##rJzW*9VrV4U`jsHpm3p8(+ zrGy~)H@xptH-AEWMqXbY+5^?nkGQg&gK$atsvat|8JwSy8?NRM9BVPb@Oq_&oQ%c3 zK4sY!#$N!56I{qkhajD9@Z?z?hhMa;5Ych|IXNA=#x@i}&_4T$W%o3T?BQ37ZEtj1 zu(60@hDTK@Ix?o0a8J7#!PdluWB5PY`EsL-tgb9o91=gRqxF$pEWX|i^2iUacnr3| z%-c?L)rI1H9+0Q5Oo*3i;=qrTt6EK>OJQS!58rV9JEPbL%YbScp`U%ajj0*!LmUg} z91{+Qnkc*dq@(CBs>q}3l zDC(oI1e-5gf$BK%e`ihooMroPeQURm8?MI4N3a%^wK=d^K50S)gH#q6a9Fu%e5XgS zEcQfJ&=*m#D07;R*DjIHu<^1_rO>!6+V~&;0hkx{+dF$KYcIVNDd)SKP7K(7yHpB0~E;S|1erDN`Z1LFJ>4V z=9JPk=ZEL?+H^%vV6ST&dvhJ9Hk$Akut>hbZ?1*jm1<-77Rk7NvhGR-^E?`*li#2R zsrA|SOrrqoUFr~uacA&rlSYg1nWH3w#`+31=DgK_HGL$(PswiW&48v`RC{0cZRo1( zVkI(9@3Yrq+Y@PWN6=i>zGoFZ+>TB~y# zH!~mJzUib#&be|750k&zsFC~iv)Y5J0f#SbMde0V>SY^-@4eCS7y=9n&eS7W*9rw$ zA9vZANOMdMl&Jd<@6mDKUF81Tmt)(ah*vDMH1N2w_*p#9=Ckq}S#mqi4WYmmJ1o+) zU*gVs;lg@nML70RE$s_+IH36W1En08@l*?<4Z){W2~BWqTnzf{=B)hR$ml?qkfe$Ol%h|+p#RmD$Q&m6flk5rWtglJ1(>NKZhW{# zPS+AbO3rdA{t`4GS7!+a-X~sTY26kxCusb6oUA+4%H|n&3vB&JjUv!DD>`1!q6X(2 z59JP@n8|ynt1ay)Dw&hqPf4E({_@gLyjZV|-=LR-{h}!|nKYbRdo@AAN*?AH`5G8KREkyrUJq z?Qp0VESz1{Uz@L7G=IV^RVT$z8^9=;UR>C&AcBN4IkYW$9h-&O=AufBGP`ICRK1l90-NJ zM9-*tvr6eMXAbsbG)DjGjthnz<3Nx+6geLPY?#a@{fIA*{1eg%?=m1v_?2@-v+$f(XVEyu`XG%7Le|H zS2C3IP&)B#r$WQz=hKLwlxrr7Zr4Q_b)IL>8Bb$*ch5?%T_rGHka!DJZYs-(&eHlr z1(T1l@%*CJ48pUEwUaDSam+`mycAg22i8da4Gs;4Sal@|(%3$SBJ=y| zuA5h0*=Ni7v0f`%AyfRqRcc&`^US!H-0H7LoeqN1!xgz1k3!H zYwyBS)NGnWDPQFy*1fo^lV+m6J^>ovL=tsCo8b>Uuolo#3fR0t9T%k|7vy3ClS5|G zLvuq(8#wR@8MBS;otXFF&)*|V72N3t_wycO%{5;JdQ#dKnFTlTs@Ed9wk9r(*ny+N zOEPBEeqFprWQFX5xR=#vv}3D{fMg`^V?O?ojF}J0OPptVz6&fej`g{-QKXc&e&<-E zQG2aZS}te({YA&`J%V%5F2NuVMF3sP|3Oz3q-81H!IT9XIcOYusz-?sKDM3by7c+^ zgfdXuZFQj)j8UG0_N#nFdf4`(T?s1>Yx&wMDMnb`3l>0W2Q~(E3!)1(C!R(Me_t)Z z;H?xU#LpK$(v>j3OK|R#%t&|75P`T$XbbA(lK?PV%k{wx3uDaR4Vh{=9=(8DA$)hk z6S0nS@)FQzFLyhIR;wkQO-wr@lJZugQ?`V@@!DWjlakDKX|;&sdSQv#x*8JYHQUU8 z+_|~MNIKtjhorRI3ENH|%CWqYu5& z`cKqVnl{t<((g-D&VQKTp#g zw^G1-`oeuB5x?bc%Y)7P#)~kXFIcbAJ;-@rj<##OUd~xf_;0usKTJB|Q+TNYfl#lD z)yTn6i%zIVigz8}Xx_d0a@B-{EdnE#`C!gPIMr&q*Sw&Irb=HjZ#I;VARv})1`uDH zshWdqh;$QT$;5$@n%qkNdQQZ%5Fp&_FH*>lbFCre^L~YE8rjZ$SU4)o_raa%d8heSH;4A~CU-G4LW5fpv$aGG?PNxRwrvOrdr zxfS)tTa48HO}|Zhi1uw(TE$kzhUO(p9ZVyGni9_SqbRJX{7`L(ad|bLRGV=$@D)I} zFWXrA?;m+S4lQ5q4|6IQ9X*vND4(g1XHt?--%iLCX8haHgO&P4Ia-Hw_urt>f6b^) zDu0nT>{65Byh8PRi(i}*1fI&mvG-t-onyxx7YOJ4Hzm$`n8o`>{*?#(4+0N#BKJ-j zRp+D}F(7is%(y@_Y2wWdCOx!|)2e4;_p7I#Q%0%6?{`A>BZ#x^A(v0HEKmBB@ zH&hW4*r|^CNBSQe0=|fbfsBJ5e!~^I%M5|gbI&&|8DcS%>bS;@GTXZq*T?&I(0RGrFx$~U1hr_)TBFl<=FG+k=|=}5>6@E~`5w*JQ(SwA;r(^& zhdh2)mfZ3`ud6Q{-@_!DmiX~0jW}i+9 zHjvV^eS(iwt1Kk>Z#cD!S-lOA;M<4Twon?3Lrvn(3> zL$+}`I8qJ25or!_&Jo+OwqXcLY`lWMjEW`QcRT|ZBo&0V8?rY=Nu8j7^b%Js)njiv z$?T<9q=2r+$5`m7$W9Kl>t!L}K2ZX*SFmPD0eu@ArroG?1P7HM_Z;GPkPU9oPMs9~ zzq^#cZpe%9-baZ{JlaNmgM*Of)Qp&ihP@YFg&6 zb)qKwucoNQ+K!nO=$1PM9r$nPX^PWzjzCUvemoi@$sR*W8F1iA*wPg$G^1yGO3bL< z)R$*E8+xDq z@c@E!cXukyLn$EeP|`?ucXxL;-_5)BTI;{}^&N5Gkry}PH^!V}j$vNEUG0R=OGfae z6)=4*lMkBoV<&RH>igFtn`H&$(|Ncty{reTjS?8({7^o^2T=M^QZ*eZu1zLDLRCrc z^2@V*mB3oz+m!NWv>4&YWw-)AoXC{(X=#W>pQ{u__+I zgQl4EU~H#({4>-$i%v_NIhSN6J+cYqnQT+hx;pjFJJ@9uERxX108Mj~sj{iA-qOK5@(aU6=~&kUF1PsQOut^>gJ z_3K&rO>wl|W(YU@^KZ&1qXf>M8Ec4^@5qPR>VV}{(2v~{c=%##F25bO#(K%APIdQj za`f{p?bL}mBXosDW>Q72CjWI$dPh>1;~|`qX__31)VK^-Fc7XGd>h^ZUCjro{y9$3 zG|<35wpG@BAGjj5kPUuJ3OBRdZe`1T-aMz)%K2kIk`;`#6V_8K#qk32b6Vk^d8BPSBNJW>6N+16dVh}|<(_}&Yc$)Ez335lUm$X}lNXdfqmA`G z)Rny#3Np}oW^R`3|Butj5(oNGPwD?Jyi2&a3fVa>x)*|32S)u25X~Wmcr40JoqazE zUNWLmAGVj*`w6x!5q^weHZ$eq!is zxc*JLTyhpuC!y?TasB~_NO)d~EO5#AYKmeFll&QAPSt&XmSanQ3lpZKC;2R#r{zL$ zu^pbPVu>6X90Sf&#+THT)=Wu(hksLv5}#3U_C=JEyv;e9RtOv$K5cN=a4s7^Qa)V3 zIX7HNcrL|6AfC;jsdIS-_U~i9o+oVmWEY2wP8c!Sqwoso+SQ>hX&bC<6ahNny z7)~iB^iHFCM{HibP?B=wL0Rr5h5#OHCpR9YUZV7U*dm>&29e%{)nAOoa_TH!b5?CJ z*|Q1xPJ#!4HEAKcLr$>em@z*+td!Y@y7IVhBj;XH0cymQvq*s!s&5yRC*nYkDPU!1 z?L(hw6an{YH2#Vc7qu8*XY4JSl7S8pY6qKQgwSY6_zG0Bi!IL1nHO1HE-wn+t}X{f zWCr)ECmv|qe!O+&0TQF-`^8->At-p1lO%Fat_WiR)VS^;8+)Za=DNG?9@f_8Rewej zi1_Ib>VUMjdAD>Ix-FjMb#f>7yAc+a;ey@JusSuG*VG6pgfP4ukFZlb37W|if?P#U zY(e>!x(H!_J;q>xVf!i|Rv#X4~Z+%uUJZ}b^)<9BpWyPaed8-YS+w%;&%<1cmkZ-ce)IDRJ$$FTXHniSepaB;WyLtN z`;{3Foc9E<#w4KtW0kV~kEhaa)K<++VQ_}(#MHfAihU!#)E`ZK7S?RKrB_q;@o=BK@++C_x&L8u0AO6gmii`?K>g$!qn2 zR!(qp!uB;P{tW~JVx?~tx+Ca3Azuj8O<$vH%2_mxvLbA9S(JA_Ngzwhouu#HeJFZN zxrsz9^aX~;5u|y&-~(Ehqr4S!I8&JEapN(@=ydSVYbSyhh+o@%7Mas%nI-B*6qrbp zE&-0EMn2P*!kY9h^q(%wmqg_&7A2rM>~SkS1%JnSe$?g%y9*wQJKG4woLlXP6Yw9Q zQCDHWwD1eKY(&l7$u3^y2;+uSh1JdMSQ%Lc?vTDFj(NF218fxG3g+X8p|7KOZsA{e z&_j5UsRey>*MQaxx;@oFj>GXopJJ!Zwf{+!B?%va4h!Z;+H9`#Q~oKjthk^^zkv%u z>A>3=gGyeRCv`8BeoOnP#}(0MP8iAJ=PgDt@?{`>{rWuw-@fOXBLYAo;x2FenfdUe z6KMgI|B7~2XlToj_ka48ji+TK=4^ahfk0C7ut0)+VivJd|>SrsMkQ3N#qB5 zXRes(SIyE&>@EFrmjc8*Tu$kxt~oYiNAuI@`r+YmF3472leK;q)&EVIkZrm=ZqCVOw%$U8?Pwh22HwHW@8B-1LYut>PD14pRQoMi_d)n*J!Bs$p~m*Jmq(brI;-djGW~)@5C)_m_ABwo z-1AJrDu37#nw*u0iV&I5^0z22qC+5Z>e}Pjclq=SQtl2Ty|3QHDmP4Z3NETej%g_)g9lsEKe9HI*a z$GGQ~T;;r+z5f=CYq!qjkhOYm7jd#?vTxA${trlBWH4JQ-nl+D?g59i4sy*zQt$f+{)jst84^6&nZ&V_U(<*v!QP2lA2C!GiV-CmfJvzgY z_mec1cqCBI!>zEh-w8rU+Kc58=OBQk6KitC&or7$6fFI^FzSIPt2UI;dg3X5*JF7T zxf*<^6oR6y-&}hh^_Np zlLTr!q9R_y{!i`?k6ER@WiXDR#n4oveMrYZ5`e2bM(P||&dGa{fN)LBABn(te-JuI3Z>J+gQ8ExGw^boqYsjXG zkcG{|{*=eEo1sGUk8!nAOdW%2pMOnsa07F)1K*}4WivV|z3J<^2w3rs$>fSHxd&BT zv}%D{vKCL$i6?z!yU)xd(?LvMdv>Rmit#t4K(%W2JRpKTX_r+Wq7^$jPp!S&=d8(< zMzMySS6aASj){ze*w-9a(nR?)({O*6(9u03dAu0$^`0Q6pJnFB6UbpQ^&Zz{I0xFW z%xDAb8=hUI%s$vvdr#r1~yx(x5AXWq{w#xO)XDy_)=mXh>6B)0#Go)LO&1 z-`Fa&znrT4ihOa!8B8tuLe8sDO9wpk$)Au^sek>~@EMFDyn%y+H5&C_#QdlE$&bQt zHP1QGzV80O#E+K$iU*Ap0>O7E8{OS1@ZzuHIVDsNR!H^-e{_5vDt{cxU4B%lY#2c=EDX za4c*XXVr2Va(KzT`c%Py!X}NEM#1{m46`K4auOX?+4NeyZY)^}Q6n|W%~+A4ovx_W zWZANW+$x7~{GT}{Iip9E{`p&iH=Uf~jwy@*351ybN_r*<7qxIOXdr>1J(du)=2MVv zNLiLQ7nJV(d2-wvgkW}1!kDRMZwqXVEHzt|3^5ur4o>&ZTe#N@ig^3PcTKacq+c@J z3$wAWG9D8CD28W0J;hZqQ!ew&e0tov)il`t&A>3Q6jc96NXr5Wg6Az|)#tqt}S#)|SLd4`JRe;qHa* zAJH+yciAVHkX4EjcFZv%+nERI5TseTn_%7bAc(7+0e+~*rYU8i4Pj1Dx!XtpX z3F4?tup;ztJf}tg;Ix9uYCuBLIVLr!PexmoN+<|sOnoM zE-@Zr?eGAuJV^ftb8QO_mAW-#$A!5Rw&htPYKadfsz*_m z5bjMnkFYwbk|c98N#|+(8Q-Wk5GWz!Yk{6XLBrnzjgHrPux<|~-wN)KqEHWVjNAbA zy#GP0GhjkgKyNWNFbgdtmDP*ZL-KeWgw@V<&RT;}9^lcfR-dtb>1u1iUCTmt6BZU$ z2j=uQvps((MU;%jRQ25%K|&eIOb#F}3wU%v>5t8DuH;n}oX&sVVTGdjf%Khps;Kk!s~Y;+_VGkzA8Mn# zT>4Jb@BA=G=gWS9+4!N2-_sa0kF>|*sgEnr8}R0SIJ@!W12SHJH3BewlzMIr^~<7? zGE~7U8WJTMHd+4&XNPrLa%4j%m`>5E@ff$Btui=9lLH~NZj@I0$A*&WvEOn3Oe8vjUd6NiR9jRevHpC6!=^eHF1loh zGjT~wt|7#FapdRPMT94F1Zxts|G^-VSaeI{99M;4AWankFs>31suuh|qsL}M4ySLP z%J+t*$)cfjDWZ;4dY=GZ7M)D$^X}^ob*f*(w!7+McIP5 zGkZ?=Wc&{v;(&R*m^*QkZ`OGBGR-LdnNiZ55LL(N7YlC1RoZ6tb`MIuM*cUM8Jpt% zF>A0BPA{l;?-!Ir#+~m<0YVWWW~Y1PS!VNXJ6gd`|H`!hCP?a|JkeI4$bI;=^8wE)a$qgyPVVOA_3#(1akq-lExqP5w zCCaK?U!Vg4rM>0~JGnv9YFCSGx31qO zENf+*NXPv$fnBgTwPi5kvnGJZ4?A-FQixm{X*lB`&6+&pk0>v=#g4Eqpi9Q5kA|3M zKK^|)35k2cAYt3W58~W^=-(?xi1(dratN$dFOj4se)3VPb-X?j*WhJ`X=Pprj`gZ7 ztCyJW9)pjgleRbzSxQ8A`u%p~1zRPWCtc;~oWeXR z$&FXO?P`SqjY2>4=|bq{tN0d2(c?c-{5%jbR#_bHruG~5;bF6?&CGm9)=RX2g$MgP zXttFhWxc?wcTAdaG}dU~lRrx)AisI@*4WmFld58YfGx|#DE^$5SGa?L;PHDJ{>5yK zLRIj(M%BT6d=PWHbqDT_z2qvPc5$ceCBa(gHK~sR>JZwZ-12Wc%TAgPN_Xc^Lccs0 zvx0V6Nm1PYM*&lhSXp!uPJPm_^Nex;%(U{?d~fb~lu1nyox`C$&O#wwSS`@OmIX5iuA2Da3dD|q#9Q??xJ zbNV{qh0J$jkl0M$Qz?TP|4@fQrSuOAoYD%fEd)+Y4_W5k2pBl%4O_#oPP=H&I^Y2> zQ1Pyliv1kk=DRrq2C0nNS$I(3DKn*g@d@3gXaDNagymVG!m6j-aYMHY^L=ysOv$0s zPg^aL-txJF`)oA-I1n%!q&+*$P)@fxKtE6KaANeK^;*GhQ6wWxQ}x+yhAv*ngM*~w z&@A7yv<&UKtz9S5ZC8s;I56vEi#+MQKhT~_+sx^4`4fScEpaA+?VC<9(z4uYj`HLI zby>f4`5~8gx!Pr-N>V^E*d^)x_Z&QknE>D>LJ%P{1#aXKa?9N*$=)pY(F(t)!oIzi zcI3Gill3N%;Jfk9Mi7!_3)L+qMiPLDF9ulUn>%C-dn1KV>g)m3!5dYo&de`y^}d4H z9^D_CCU1+doG$#S6zcE&`8;oA*9R|?WkCU!o_TQt-HiO}(WdX;qf+JhXkTVx6eQ+A zF;CTZy6k{ZUIN!?=^AX*$+F`_J#I;0v)sY`ux{Ir-Gf(c*JpPPU_lWfgYP^4qks2L z`m0;U|5H}djSIDu1M&iu|M-u8{U69m0^<60O>uSuh5s>+5vK)y)8Bq=ubWdFkKB^t zP{D(LMXVH=_Gov`dXmm=4mfX9Qqz%H_T2NxeotHXBp)_lH5exlsa$5qtR|%7{p&YP z7qu9tIZ{HVs$mf4-0tzRo^EA=W1jrl;VDj#l+2+7dzeLU3PtV@>VHnUIlkbI72{(o z>0YY~flFFN&PeIc&;Op}yX{b?;`xe^|MXFvF2$?#hR3&Mk8zPS!!!EJ8U23rP0nJX zr^(&|RfBGf+gMvUvMiJLuGkP1Z3e3o+}!N+M4=F=mVkE}I?WvX#eB>GhjzPbH*MiaC^eH7`4AwnyNJrh3;e6BA(&69PV`c z#@@sw4d&tEdrD z+`q-?8c}L;k7*4;IN#Xt8zW*A@WqrAz<134EFX?Ji&ESfi6VPwyTo8U@f#R{Y-rAi znMi*~jt5`Dd=4Y{IU?Te)8%&KFi(+U6{%AH4Yaq3 zHiE0r#5(k9=i=m^J6h+o|^syJ!Y__d@<^tls({Xp;0W{#$q%N9J z%q322a>3pEAY`EgLMJ(3Ou^6`9r*QQ)X|djQiuJdk`xDuiqBnJmh`5>V$x}++0KI# zniBLGH2?u&Pi4bMq&kMO7hEAtt@BwB2sQzLUc@UtBiIb%zWrAJ_VG(vh52XyHdv2T zJ3|9mz#v!if!@|{KWDPAAg<_`U&+AwxUx8Qw&L$2wZBcOxlmS&knmZKUgJJ$J)Lhg zr_|;bMvSRXGSf;=V&5WU*p}If(ITb=w=;1o>04MdE!%so&Eo|lz6*FPV?@Sx%-f`F%IC@{rV75MK*citKd_bj zQ=R{PQ{uvW^rJaa*}Nsz$GU2pHKom+gG>5j*eeoUa|DgEG% z74GFsXa!cV=^BOI2-B!K(xK(bO)18qd8UEPZYg_Y*)rdZLzQ&=`#;-$PZE@T)6C+u zqS2d@mZxup1U-!_b8o4aqV!XkfZ!mhUVc?x^2q^~`rGd<9A;45yh;k0+VX^~rlMVO zbDWpNjR+cM@}|3A{b8Gz`}Xu5_KIRWSHJhl;$4CmYA(2%d=@Rkif9>#bB~^Zi9gis zY6$7aO>j0W2^93WY>6FL_C0Ak7jF@PG@6B1)B3<!d%toWgg4idw+*sa+5iJu?86z1%G9mE+l37YONn>_4(qR6U=%{ zMrxQg((q^EBSLXXL(F9VjOG4~TRPeH1w_8bo4kseYD7k+R@`;8e5T6|D%)=o>wnl< z5Lf2)is5#|8%Nbc3Sn{RtEZ-w5#c8LgS+E8VWjk zNg*nsK60PFw=Xg^@&_HTHP0Z${RBg;DRr8Ajw#3MULJD1Iaao0< zq)`K+GQd_F5bG-&7E?G|)5P91cjUt(qN*Wasj2pc?vpY^0s|W#1`UB?UP-{pXqc`A zA&&>eG0B-{2kREcNSnbMVdo`ERAPP)`IgAa5P#L1-(ZNTfuHUY#i$Dnoe5eHXFdu< z!u^5{Qfjqs+dF%eQMhER&MpE_Cob{^x$vtZ~>u&OHt$#H7X- zo3}rDSw25X9VrzbVXFOBSCQsYvzJmvZ+?i9`1{eHYBw8K$DSAF7KXF<=mGJ>5-;K?tH1GF(Zlkj zIl6a+n8Ul5{zwRYjW%Z^Fj>cZj}3+bW9o*XZ@@=HkYNTu`cYx3Mw&c*cbeR<^jBJT zJeQ9a)b*CU6}<(W?OmOxv|5U4fB%N7dB_+vX+{iRjWxEdQ{PZ2| zmlVA1XIWVzeOIQ|Tkgu|cvKZFF@W7ynhuXkG{u+BRm(R`Uy5pSD33G^N^_ya@<+$r zgs~(zX)YHVh$J$GP#c|LQm{gyS{}e1TF|PMXMY+Ee_%En5D@|!yA}&bnAPH4okd3P z=TEu_`lDJ7G^%4)9OJh3BGj#uY^@}jy)HD8+nbD(zF&0Q#xk5?any#1MEd_oxD|2Kh-)BZ(mnd z)^I{?^*Da-R0$Opu8KCFKyNB3%)Ro}!a<3gh2-4~J-SMzz|2vbt;m%`x}ho^5aFFD zHpdh?b0d6_{;;7^zg{6A9fH;8zp6qAs`Hb@7@hYSN=Zht8cZO|nc7nEP7;?knU8(5>nrbfR`lHY_4p6T-21q! z**_Tk;kZ%fzo%TcSb(seD&5Ft z9$d3}kN;myLnSC!2wSC*(lS?6U>(p%m8RTpCj zZ$T^dea;7qJ_dIYIv(#vezWZ4y9Ce|wuibuJPz<73x+>tZ2}2fCg&V+{}xy5$h0UQ_ef*fo!FboW6JGWWJUjK)f(O*%-PCiE=c!d5W z_~5px*baA0XPHmSLd|{ zH}IzUN|Q`ITo#`xbVxr;Ya(V?v5uzJG$DeRZQS+5R(1v; z-&k=k4okPeVh)XS=7g-0#R527>?-rS*vN!6u6x7d1ywFX&z1<)TNiSidWQ!xyr&|L zYE|#beZr<}eVGAzR*Y!eCrS|RM z0|r%ZPwz#5n@cS1i_nZdpC?GD3%z)Qf0$uj#sKHs^6xzzDch;QI$?gPe?zqe$eiax z0MDppBB+*4C@^NBCZgwW&0});%a|nT1cPKC~+iJmd~4%nzyqTe0gCx7A!gCY0q{uYHo-MBld^J zcu=z;2%K{%En9iKtObvQ=?7wcSHe&9T_9P!9cn9h<$c%f!qrsS;k>IQf7ive7KQfX zjLO}`gWdb&P1Tedwb|g!$V0Lk9#{USJ-Z3_oq7aQ*jso#gQN=mu|t-jqmeMXRC@H{ zg9k?>#wZI?Xl*S??=nGX7gYi1oMWjK&@c433yku)1YLwlA(cfXBNd(gYUKss(^%Nc zxfgBsw60rP{0ZA@wTVXI`^h~kyE)p6fzQ&5npk?WSb;AmS~#Mg5aVRLj#em?lMo?V zgkq7SwCbCQF6Aq#v19AMAnvyn4u+53Ll-O1MeNa?6(Nav+fGE~;ywP}OIN zD)|tF5pnz7xxA6DChad9@mzjfv(5OnU6{UDW|6UWd~=ZvBin%hKIhUqdz>!q%mbSk(r67tZ=JjsHQdcmA^I!cUd5X@pq+FPH()mcxX2YN#QV zW;=493kBiaje%b+_&bpNL1P*dD5K->qu=Xt5$E_-!3OBlhwc`4^pmw3t8ow@iJ~$GVl{W4;I5x-~fi$E9dO zISfGm<|Sm7v>_66jxIFSC;bLfNrn>M0xtpy_>_PXW&P)zqvpJGOz5jauw9c&96aoS z_F-XgFlwRNDiKewU5sHRgPU4%P)GmAQARbL>iCBm)I2C?c!~!yMra5L8TDKcWr6-8 zA6YTnJF>VDDgM60mtK}W`Fv5%Yb^eQX(0loYH{JQ`(oG+0`zH*ec{HJ!Olke57_e3 zpS^_%87S%ZZEN-Q7JTR}$h8XoY!GlZjtPMzDR?eXDZ8TaLpW(<+@+xye2bg9J9{RE z>jngg`u4MdTurZo01OX<99d=@DwuEhU_H7^{7psj)*jt=rUUgo(a&PnJ0}*8(R3$& zNLE6jTScF-Hig8W7dFwFLSh50iY8b^w|+58x53HIcbhn42>KB%d2Vem^4d(p=8&Yq z?tiSQ7mSG9UHyYc%1lo@Uc3HQG3~t>5vc8oc)nL!yEvj72d@nPL}k)st`9#3qa!P# zER8(+&sazhj{PK}RkFht$bXLsN<5368&HoBb{;z2EL|=r6io#dFQ$kFQ<?){`-)r2yP zm2^Dj-|->)ezJwunbBskZgptm(G`K^^sH`qzxL@o^?CX^R6Hl;g8Od>wYJb#Y3CjA z9fXyX)fMUh?A6(JQ(T%`8Ax$RZue$~qPpkP@uBD2U4sjFt^@%d+fZoD2p5!O7#t^I zgaiXe`V}WCcSc)uC{v4{Erb=GAw+qJ;h0+xri&j`4pfBFXFqTX_mUY%)&0rG33ooRfBvs|C+RXIsPk%n64IDJmNf0!|8>w8%!@%~XVy%}*IBCYQBd>A;j0cw|r( zBxNrUso~08{D4og)_n8JK(+QQN+4_apE>q()`ZpiO;TRkaJ3bIwe`pyG>Z+($KQ_% z=k)gygRM-=Hld{o2`Dr!OH%@g6wTp}LJ7Y3cc}?0A{VzRJd-_=P7kVmR$i_Ye#@z4 zEww}e`xKJ!S2gcyIuZR&Sh2zvV1Fx8rRJeA))X+Nr>s!VF_n@+O}oDvRLozwA-A<~A+GtbQ~B&pR+A^pj{VT(ig5LvQdNV8`qKQHn!6d+9Tm}ysG1`84d&N*CgDrqet+b=&Acj zhPyfRJB8Z)J&RFg%uH~wfa#qhfzvSBz_yH1QbLfC0@(Ab1IyUgG2mO2M+zR30h7y6u~+Tv#Qh80BYKD1$HPl2 zhmQ$FPMMCxHC^pLSd*OSMRC!K|^egE8N7 z={xv0ZcVD{*>{9!F(e*L*E<`$G^7g#r1Ijp{;>N8K|$K#ob`;quTJS349N>#q*;tO z_^-x{DYA-RJ`$wf7l^(#xEqyXa(xOc=7wnKB|i=`h$ zeG}8ft`e@Coo|0bWx5^sy{V{FCp@BBTh?pUM{*(N9nCvN zt$9x;G_9A-+nZ{iYEPBBEcpzCpF9ODL8Ju7%e;|i9No@5(?M+wcVGRrUQL4!km(z_ z$_k7#ab%4#ni78Vxc=cEC{*tC&D%tty4_!Hzy}(mb)15adc>Z+`Bviv=7eglDPw3` zADCWJ*Jiq_dzpVQErf|NW?nx}7A@|79CpxQuJ2^x*(IynPf{hi$I%go2R;GCqvj#h zH*^7yqPq5hEGnZUn+BhKKHZh{k)EH$Huf#K7L~w-+9saBFy)8&cJUIuo3$+xYbin# zz~-+p40==dg%-Ewgm61U)%^eM1}(qU}mIbF~C z4J@FayGPTB@6@uM+_3litslINleU-vDf-N_i~qrM7gw&(cA zK{*B+X@qEnclA{quY*QT5LWr8W^^3+$&V}_Um;bIa#LVEa+4^9%S)=No~}bQjboW= ze)prH5Z})Lt!-gik|Kqv!b-O=E`}JwFuqpBL=>@$g;Z zcU%Q1=&uPCgB`K`rM`AActDG~FYT5XB1wGsb-}~&z~WKUWWS><;paHighW)!^t?N_ zUeHRl)pI&qgvlB^bTOmcNkA3$pDZ{+%pw3_`Nz1-@q-<5`6XwudJ7Io-3dGDVc*wf zYNxN7y(g>A=|r@w9kczRc^E$nuJobl78pu8+V&mQZKq9qhh}87z4$Wz@~&)en9npd z>D)ALltCi#0#ZB(^~ z_^(*&l3wn&G=s?&SA5~9+(ygRqV#Ey3G`hctX8f=*^bWw|a&J?w@l?6X)M5CSHuGY_-Xf zvRn#X2*9Hs=}CUccTU#|?H2{;sqL$5DT&o1{Fl}SU{%-)_LeY;=l^iiHgrV4Hw34m zFs4wNG_?Nks~9+R{M3?RbsUDeQg@N_WG;n2DJ3hN?O?psPysS}5Ow=+)Xh=28Y{EN z;_>AslxuzReVJLJ@b)57_{8>Q>4IJn?@zSDX=qg?=0~ojO}mE;A)G6G8uYoe3E&RV zh=|Kn>zL7CmiL>mI~@>#=7y-xOvm+J|w(4Ger%`8wN z1&W)>WuB210*v5MSHc|oc_#Qg9839*AA$?I!wx9_pv$JgTD$1?vLQwW<6-=66VCo4M{I&cHGQ^3CE=eeRku?7*zlT$6 z;N;i!U)9~8@xy#UTW_^sM)^&DnlP|yTE8=F`b`0UeooBRo8-S#PkjAU&OL}NEPgFk zQC=^z_C8L}4xEJ}2(itnkAq)?RN8Wn9O<(29^2PiKL}Su?FR#toq^xVM@czqKmQDZ zFx zU6;8s=mU^Yy!TeIH8J_{cy}6R4Okvn2ekoY%rICSUU8zPy|(RoWWaO_iiG^ z0=u3QU-X-%MT_R(lNZ=obG(9a9#Yn(rmI!%O!Pjzl21*KGPicD#d)rlu`-`wrvRei z!a0QGk7e=|mBV9<*-VtA)R;X9zN~jeaR!czD_4AK)A@o*zWr{lgF4Fu{%#6+W{+o^ zRxoC2@QHKyL`44GS5lQCwTz|{l#GzC#=4X(BDQ2+A1#PWNr;ycDp;hb#%LC7O^!g4 zV4jA24_PMPt%H{>U{k+9zF82dwF7%%)nI4l`e!sXZxQR%IL!~^GUH1#3d6)qI(C=b_Fzcq5)3>p$Bf;Ups5=@@m1%(-<_ug zZpIPk|H*CVw%3#C)R)L=HGBHxZt$V7J#64FP#gNO@8#nia^D4!uaAWe_h#=L_3+i5+tWOHPi+2zIApU>guWc7`777G5v7kr!U&F?C z{(`Xk+}ty?hCzfJr(y6%2VodMv%>qmV0aNqVL@TczF1RYGIVa?_Gm3K9+)aFM@kC6 zb5_Vsm~Dtc$oZ?-X~$s$LfA7XJ-ac@x3fGKQ(o?}q0Se4?p5oaLI~m8rBH-lStdia z@&pLr%v??jAeg>7ubW0n`#<@6G;{SxQTyLU-|#N_;Z9VFxt&&(0gnF0=f7w!H%&Lq z$1RH9{Y%#YxPNrAA9Q`x`pL6?Fv+o%${4?TYp}bm$p}6vS0>?Udorw%C)?u)gH>hQ z$T6#c{CFZG%;J39Dyyo@css;Mazj!cByBnyDB?IiPprJcK=G_@V8Pfw&*#HysDA@h^i&YTO}2Hnj|@fpwVgT899WLhTu}B#vu@SYbsV5`b!gG3 z-um)@4E5ds#C@adiX5H0>fBVltE*!gdLOBE7-*3An6Mz?uhKXTg{-F< z<6DUm5v5h3FGpQKUn;kgqyESb)2irW)ObJ0_ zw3yc{0ugblxb!XL$f-THJNBUhs@!@dFVFQUzODRgUc;PwA5hR`$;5N1#O9UM=B0JY zk?**g-Srr21vA@Pc%r=xBqUvJr4h6WNMCQV-VGTSy`yhg%XMtS+;UEv_q2B%`=N z?~Kb?a3S5ldHYhg*($SaubMoLT3K0|o~iuI0D>7^Q5cKqs9p?%!4B=iVuOOpN&GFCB7NA5BjdB(ckJh?h1VCKW5us{@e9OIH?(7qHw8*^N zIum40r>g7_szFP#>2z5;sROR%7ekLvCl6u!t(BL9liBv8A_kDvVG0juO5vI6pI4}1C8pO(x~CzMB5 zm0wVsL^}K6#JXY|8aHWlk4gb2)LKe^_d8g~$AlU>H_I&2`{6&AX(o%zEFQ|F>8QlQt=E|+ zhsY%d(Wb5}AKK@IVy{NL#{S4j;m^cU$EMV&B*WRK^U3Vp#O>IVpV0c^y36QK*5&N# zT8%s>-~F@^gf<9W%&!PhYzNWuZ=K=4*Qxz@Vse3a4zLXC81w|^ul z0@?oW4GplYA~==(^hO76-6xJ~1R14v8UxI%`HgOmEV%L*F_fQ~?f8`gzr%-c99V9Y zdbu!B`kiAyiat~8()G7W4?AQ;tKk1*))_cenHUeH-)wMQi6(iEDzyb>K6*YZ{G0Hl z=?xwubV{B@M_e>djOh*o>krKq{txSq&Z#sA8b=<6(9s6QMO)F0%lBVq20)`NYfjlt zIzFd?D%R32Mgheysu5bzn3T;04)js#B5R*ioJ^IgA&kj7LBWz1rfVUedz;3Bh<yhx-B)4k-$f z=DyWy$*=|)Zpp%#7f(yQS3p;zZTs-F(dICjMKo_qb_E3j##l#=@5y!s{_KQrUXHxF z-=wG0ARO_Wyg5u=o{2%EKTeO^%zZJVM~&$03v4I$zo04+Cu4#c?K#f0Jq!>tebe;O zyj)i;GT&e=e^($T?k}Qiba6k zn_#2sNOCM}Z>kTF3~pxwTXb&USBfbwt7>~4<>uH(Jy(Uy31|9FCHd3#L65nY{^bj? z(YxdYo6BI7wY9xZR|ZwlraSB&*S8Bqy9jw*EoQehpO_%X`}Vc(LkstU`w1XL+BCvE z)&w-_1Ch3Ej?yl1?cBd5L$iCn>D}W9LE+`tW0N(&jR4n?J5b-ApwE4UeOBrn%F6k0 zsry$Tp@NfT{CoL6^BjVIu0i$(Xdk|Yp0oB}P-~(jW7imv+xoQ-*3=kqVXvA|(u2@j zX#pf{3hr((?~-virDH%~j7_?+6Fl#o8zLXh*-@l<3Ee#66Y^Jb3O!_;avkRm(LftJ z8!>sZ@lw`GG#%TOJ`)Q8ZnscG&*eoTS6u6!_OctM@V9OD?w6cy5FHM~uwZRgiYKg8rNs;^3vdQDi^6?~E+qUYx0= z9fZqLL$zG^ZR;Yyd}pL4Q18_ei+OR|!F!GnztcgP<#jc6$+t_jFxVqIW&j7M^KEx6 zO1&Jm_9U~RnZE%0{IO7Xbv|G*GZUPpNK=EgOeAwa$rAJw`HxO#z%231VXfKeCDg8sl-jQO1zSIuBx-4Ozk_EO=~%rNClJB(8tw}fQu;h76+Y($~KOp-IRCAH}Pvbzw}=)n1kjjxP?AH2!xMK(|JMhE0jsAaaVEho`0cU{Cm2`4$*IS26h8 zXhjUUYs&U-*QKo%5hzmH7v*!@1YbmVst*F}7XRLX<3Yct@VhU4A7NPfzY+GGzYCck ze3J_5aF-;`UPy)TT9|_EorR@8f@j|xh27iau8w{4jV|h1yeL3(^k!8mive!5|FtAq zuN?K)`}`BvApm9^c}+<-(UHD7wrD;dD!QaF{gpXhe&QwY-NT2tO6$mZ6|f%p)ym9g zICn+7SAon`fLd>ZZ=-0rSUhJtIxRY2DF|-`(FdkDdm2FSX3f$sF;=W)oW}XdBJ?BD zMj7@Typ7^W)+^W zvW0$Cg|CEGSTCg;ePJsUt*D@gX z=Q+em7N~g1JCw5DZgs^qVKAvFl51gPDy~i_6vcJK(V|_$`=OExunPKLe=%`*h`c zVlAPq)99oVLCS+fI&#QBp?tdbgFG$9q{v(zB;@dCa(HbEEi9Za8BijqOwIQ?ZBb9} zZfg7z%l=Me30&NI?UJ{<;Imx4LA_b?h^h9*e^ziOV`JKPlhW>@u4RkY^Ki|@`)*)q z+$09WI0h%^)iT7v+)*S7d|@4`g=wPM$rCyP+K62(z1rDnD_B&X=d(;gVhZ5!@g$TR z=Q}}hOP2GdW*$=Q0H7~jpK$jMt=A&6C=sRV;GWEHAA>&IQL|{`>k_m5+~f&>pnW6b z=_&U;^59zHXuC&8noH(yHzpH-fr+#GdH6bm3JoUm zzJU02RUj5dBcMfNb|Xy4YG*w--n)Dfdn)f+^b%iQ{bkETM@hV%xzFc|ueTqE6HR!g z$=}|9RA02qSFjn^q_Rj8++_`bFW?gI>$l;(C6?A!{P7D-SMsJI-;6NFK5#Pr1Q#nx zXV8wjJPPI->#Z*r)HEJiuVQH`V`$G<{dcCvLXaIcY|3wglPoGK{^K7uM5L&hD~4_f z_Z&^^y4vrPjThnONW4vO)KCaGDko!~uM@X0zyE5vxN`peB@LJL<+*aBc45&z>+D_K z)x%15^v8O;%!M0Zc{@OIKYGVu#+>icK5+rL_k1WvgZ=^HNS~GpT%+zw4Y!qLmLkBJ z`;%K9`FlzHM2LRIj@_k#*?Nud0{coY|KvUWLeS_PB>Iv&TlR@)`jao&pDzYV9V?P< z3=?msYE;ryXYOPEdY}v29-#T|rjU4ZxLN;Ir}XPiP^o=TH!*xPg;mlEYPlFtW2}~i zHC*_-q4@Kf<-0PfrPxK3u`x|e!64e!O&X*+{F>^DoH~_MV}rXF{*m!tc+BYZuIH(7 zNL-V^Om#r*ozb=>yX4Nqr;`asR$#2UBGDpa^Brj9n!}4m0k`1%<>I?;+MiS)7?D&d%8cj!Yti3FH$5zPyS!d%zU&)q1(oW>A-?9bn|3}tYMpe~t(VCJ_Iiz$W z-AZ?dba#hzr?hk<-QCjNAkv6*#}Vn4L+800-!Ja{?r(>~fqS#|T650zti&edxB@49 z%JL_3URlj50y3J7!7uEUNZ|+|?)3Gx>#@nzm}(*EC$LT{*icfwgZG$778D@n#z1RI*=GqLUQ=UbVSLN-3qJ$F-Z`vDq3~9q4bo7e(Hy~LgKYpgOe1fGk z^MoV47m|DNF$SL{V!~-0EF)|GT!|+heWE(AG{c=}VD_3D!#XgWk*yCho{j4a{(?PN z-pHGuYkkAFgG#fth3kZMUzu%48{V+Z2X@l+j|Z#Bg)iojso?;sY(?`}`p5RKB$u?i zrLNU97M&xTHN%tuh1j$)ObRCX9N-OFPY)gtos3y!0>DZAq0yK2BjlaiS{$HDs5FFYD-+k5ce1 z{7x$$@H!2q>iJ_ijiRXo&Wb(W4YQG*g_tI;ZBL?Sb+9{Ng>>m}}p- zq!QOUb){W7P6I8}4?KIvsgjPIf}tZR-5^_0KmRVguq^1gCB0e?J=mrF+!9V(5BrsN zN#x~bE8SSSMqJ4vS?HN%{o$17s?9#x!BL;vy2>5#mg;i)7__>^Gi2qu^L%w33SkjJ z`)KN&UmpPBFO2}&Q>yb<#_(FHDx_1ZmdJ9IdMH?sx)Z=xf8D*Sl!1M`3|e+P?CQVo z8S($};4vcfrSq-Oi>PCSu(07$xgk9wgvfc1G7>rSk1;h#n#HV=I2-6lmwpxDTz&0A zZr@LWzdv>&uJ#CP-Ehl4NwpW(yHwT(+`XY&uA|;f#jdwGRbJlFr!zzS=3;%?wZ>xb zU^3)_)wMZ}A|z*EqwBM(1w1JFhPo}wSNcgFUVbnYsb43FV+VqQoGRk!sL_)ARhMDT zB#+-s!zM4Kc{Yb|t^}aTCh2txed(Df*{9~uvqQYF>*yn>lwl=}a}x=3HNR}M!4KSK zx1eh)W-pmH%RokR1!Y9DG4d_?vwjso3BUbk@-2`3sk>n zH{9bY&BCCdZB+_~)Sj>UvlyIdi0#npr4;61^jg zX(PsDmdK+6x{z9JjhH9t*RR9p8@5OB0?MtSmkN<*ei*0doF`}8Li9+#*0^&@=9DGf zuWpNZW!#-jx-D0W-wWtBao76PlFPbdkk8vo{df?;JlpY) zdp#1@)S6;k?z7gt970Y;gvI3l8t8AuV0_eNsN>i;|6TL`)mRcl39ljUR={AQW>GLI zc~-#6y%jR08KDR1sp`cj*z%D9yIFPS#&_Wk^n{xEE`*+cx1;CVV$A`3gk7Ft>Ye3h z8aCv_DE!U!o$+sZlH2gfrAEvI5cYQ^CV66>gTV($Wg609|ul4C{ymvvrqRkGEHDh&in8Zp-`=fews}g_a-xDE#!rOfp0iMkeaz6c!-0QZ_xdMaM&n z$g&`u`ic34{QD(_VpikC5uW;blf)16oifh zBOu_B&F%d;Pph`Mv{dr+(DU!oWw7P;720v$%@*;Ri@+QGPCGG_LVPA+iX^cm;${^d zgBLkGRnqG7Yp8znM%JTe6aME3B}>NL6O4`q=p=&xx6Z;I60IX3a#015Ao(AN*pt`gl1@l=m6HVk-7`r0TmoziH!u^43@n zV{7{uN$;#JLo$oI1nMq!)x15{m4%dPLoV434-pdf)gW=?(3g--p8Wad$;~omUClqC zBz~%KYHkv0=3mJ`2SrPDH^Jc4(oX`)_aLnq3Q2nE;an9I)SbPYO1y5IA zCZ$3iYqXs4);7?!oF_$0`d4UOzb2bwWICa2@ovEu9chmEhdmT;fyMlI@W%3_b}%W) zn&uc)MxStbwQ{yaA0_;7#_~NNYIYWk0$)p_fKM{YruZ--qswzBv#DEyF)N*Bp~Guv z=->x%`R?}DhZ77*WKy$_;-xRC&>f1zFwz^HHaKgRgcr#xql|bz==q_EfTNWNd0yrk z&*VAx?yDLXoh2Xfd~*>BZuh|nf&q_E<&YRc4fapJ-hGT#5=>Y(|B}s7X?ifd5*~X~ zBuJh%hNVhUX!qawISBctEh|WovH!pM@qZP(m*h$<-`Yg`;_&cM`3t_yw;LCsK78Pn zHhxXqXDqJuiVDDoRfdGep|{y{l8A)V$$bVC%8$13+)zk~kiYB`90V%$;MKk3)HVSGdnJl>s3Y}_u zE^$RZ^dVkr>^g!9SkR`w>f;wl8cyaPnfJ(ELxlV_K6f+c-+VJ=3&x>hB)@scgg?V1 z`F{~GjT-jxGHtgR{)j6I^l|^BoRZH2eGsT!ZU^|ne>$qn^5N`myprkPO%RHEXc_KF ze%LO}Rp^=HwK|7QF}fmKvX&@k;sS>Hq)QBXN|g@17p?H-KCyHO4gYvFZ{Bf4Ao)1J z5q?&yTB)w_ipgC43dLJ9A0j=~$jMrxZq_OP$I;nW5=gEfnkA-ejF3%t`aK}0f*-h?MnX4`nu$#^TX>o^?#gvjj%EChFW%!<^At`QgaLHj4{y;gm z%en>P4JI^dOA=MULteut+aFL&Xhsp%cx50AGe{pF!D|Ls@oK_?F02mX@h zmyXRmLY^=tf)$)Z_?E4FWh}{|nCx*s8TD zkcXx;qF3+^jcZA1&}0qa7cDyld;w)1se0!8@vpg3uZ0wzNQZ??V!bK^{}A0vLhDc` zJ<1=bBNFdV7!F1YRWg&E7}%71du6ToLv~?5u3fsijJ?lIN6qNUDRHtjXd&aVYp|i~ z4lzfcyh}@G!_WP^;y2460rvJ?#)H1s^}1ZlGsU0M`ek$!Kn`aiCveGS%Mw&%)m!JpQ9HcybFLo(s<0cj)FcvixF!z({<;#eI5#P<;Elvmi5^$7O%-pTgG}f3s zk+zG)-Z7Wc2G_mbRNM9Xk$EE|vDxfFE6jkgDB|Mvl+Mm)robmy z_F0PI$rUk1;fF+Hj~Vi89dvH3-y`{RVJNvz8n7dx{6Z&~B?0%6MfArjNjK>a(cI8J5AFQZ&yKR@ zrpJ8hnTLgo=0kh`iT03V1e|AyzGKt*pP9Coo9wv)(z8mz|3Q9*onsB7y2$|G_imLv ziS3@}*^vL8XQPuF$+n3o>h6isgZ7~tThN0X?;X*jnpk9PJ1mm});gaymp+rs_^{JF zNZ_cdoG;b#9;JRdnkxW>$i4j8lC5sUZes%rbKJ{L{%M~qhE!2~%rx;MGMXIBs|(2f zRM%h&v)7vuLF?9%R6v2;@v>?3$)bL;`6RQg@zS(Q+4D`BqwmFO|K8J<;c$KYZq@XJ z;1TO}w-{RSSHIMzhR&m8PQuC&SgOZ_IKQx?tN|0(g8@|vAguG7h*1I{p0DJxKrP>; z=Kh(r;PYN%rh>1%KH;{*GA55Im%UPQ%ovi3E5wSEPrVp_7~*sCUa|`{C-d4RH)fNaeQLkW#l0 z_j;AQK))s0sVv0MEC*zP=b9$`r8%F_#k-oZ{H)wO6TvbiEac~J7j*5&Hw z64w@cRHeVDo3rNdB^mSEuhr#x>_1?vw9q$+)dMyfU?j)t+%tu zO8D!n3Z3(snCi?;)=w#^<0e>tvz-U|LlWh}^+8bq^Cw5NJiC5c_(h(UjDGJ1i28V7 zTORhL>^Hr3c?=3`@0KRqWkEaY+QvlmHpMGhn7^MF=(tqeOqqRBNq zp0(LIP{`Ox{%Jj5tQ~T8Cu_DpB3vrU{W)O>Hx_||BUb7uIU2Gt)u6lUo$i1ai|Bii z#w0DA2S0C&RSRF&bZv9I1d|^CO8~|`eaIp`rG8YPgpxsQh7BciuA9yt8QUvo$-B#r z@*GhQ7hgU<2nvYhX6W+61qJ^B9&T4u-UFsC($2lS3FL-|a-GiP?%Iz9mjH^h&9hS} z5Ji_hn0kg~M?bsGSeSAeHYdX1Pj^0LbjVTqWpY83*HQ~DYJ&Ex(TyeB%Bs;7n>l6h zzdhwB8O%L_dAN?w|Kul)h%m}le(Am86oxPT{fikzjVn6-Av*gl!?ewN`JCMR7i5Wz z8`g86eSp2klnbo($Z<#$>w=pnr2tlKbk#My-_!Z+_a%Oo_0}_Gc8nE0UyL$UU-mka z5M*qfqqmXbKxb!L5qqjVM4ZTUTbzx)l+2M@ci zPYe;wlA%0o5O-;2YqOanH}J zqF#a}8nPhXVY@<)^3Kd9C8?zl}bMgS=YUwA6LvZdT71WYc;xKpXnnE`J0FklY<;BF!#IzKR#?7!kNa zmr($q(DIID6T!N)6plgD9A5F#pfI<6o1DFb=x0kEm(lnq_fk~4rAc41 zdE?aJ+pZt2iww0!^^eTq6IeKt+svq_BNSn*YF}-+_{OTw#PAm9*F(; zGj4BK?|Z0qv%`4_im|d@UWqZMs&i}~TEK7aH<;UYnv`}lS7*dHlE8o<%kMEqJ{ zwItYr67kmit`<%8;X!2XJx%UW$yH*r5>`0U>kJseYc~=9xx;u|*yK_l{tm?;n0mB| z(*ui-RV2W{>rZpW2C9NcWZLIv1a4@9;#x=~slvB)%DgZ8AlEiOO%r6zKlZm&qAGzg zh;zt|O^o-^wZMmMxS01{!{@7bJ(yw@iZe3!vG1p4ZFKvbQt9NUVB0#=o!5zdXXLA8 zCxF#}ZPKN#(17Wai{S6N>?-*=P6APbziIXz8?o&lhXi-*{m-JTxC8~Khub;;5Mm39 zucrQ4+7JQ;4X_0%$=wWJW8nq5r5rxbf+!}YnyvXgUY=Nq`gy$==ppWWKSk*hMV6QR z!Yb=GI96z#7&5@pPJ^lc^mHWw@fUlF!(8rd!NpA=I_gSX#szIsP8J90yVD{GdSzU( z0DeYb$FEw9X84Ehwip*+hkn+UDz_6$uIa08PEm>y}feonmA#gZHDI z^RRpxKo|hmIc+kISNP-cv8z%V;;J+0zV?$Y<%|Ol^wwEt=*>^w?_e4W_{S}CjL0{7 zuD2IC4A&Oeq!;l{jup;j3o1nWgEfZRg1G7Jq3jZL8e~}*>$B}CW7@(fK6?6)9E2W7esqt*X&`d%JZGne zaG9>sb$~D7~^V6Vw*&_H$DhJKZkw=0UZ9H1~=Jl_rVMAe(seGv^;@>d`Arx>{ zo1ta)%%VNjPG24|=IbthLB4@~xlaXheZu(TU>>P%&g< zfRh_nxL86aFHq`VHq3M2_eW50?H?eULD`*AS!ZV4?$+QWr@tajxJ?P*f7)vh5-YA@ zqL%(OCT|yFUn)zb>HW~TfZHKg6GR7n&&OlN$zWHq=!2O?U21(9J-IWSJ@@r{AP)kS z)wfhdjh%qzdZcmkm0DXJq2aOh{(qOws=uW(ZuH$DMY3)j2B~keu7Yul2DA0#3M2SF z9Kx~Xg;JHuF~Laq(_F>bd@NJ^y*b`!k2YgP656vDO_zDrhio>c!>O^Jvuq-u5VmJ# z#d%4vy}>W3zSnYSW60NQal1)+nj)}84!^NUkViZjYODQ1qbyn)hz>#W9-_9QnGy$a zM+kIFN~Yr6$XsU}X(?Ff{dN2pMg5an%W^)bgAQV+KB8<*#Gn2QTwAi^YMIdf$Rjw} zNWTD|Nl?mdRzeRj$VF~L&RLdeN2nHQ%U@>IZeXVxx zs%CjKN$Lw^P~e|p^6G_0oUZcPmqqG3zzbN8wDqejTy;yaN^`$Gx$YcVyQ}=z7PFgk zcm*9`Sp|O)Wf`TLvmhi}Q}P|!D3cck^Eo>rc1gU-zd`3Imy?O=&>}N<9SV6R6t};K zXvvn*m?qosjk|G7TUVO%ry%Ke2A6OW=t|Fd(b(RaWu0PWl@-XO$f(bUe(_%2%;SVz zN<5@=wWO)Y9U|TwxL~n{H_I&Aj>b8p)U*I06f9jiUg(Eio-|+hX1Ouw;Bd>!h20HG z{}ipC92`hETtbY-WDMy8Zo~o=v>_bc;Q|VNrV}tjGS~kg-pT(G_SLxvXfCo7gYG{+`v7x_uXBD${Zb(1 z!D$Pc@$lLU>^^0_4cay?_uZ&HMaG1*huVdaGV|Bv8KVn_!4$$TFM=$$xCgi_JofSn zGJ5iq)I1`AuCqG7T__Ii`V}lKiAo&x3m+O9XSyN%MqcsyAaxnhEG@gXUEN`6G_vM-iVF zQ)!hxh>IS-OBAn9G2&8FQ41rBzAAK}fGQ;AD(0YBZHgIraYd>@uxy42_j|2E-IiO) z32cZ`+_N_UQQj}2Dr&-zUlHOlk}ICm^TIcxQ=@v%i9L)JQZH%x8kw?NT7epV0(c3Q#Z9U(u|ZtZ$=h?Vw`t!{FO5~x{93n z-rf%PVBL;~r?A{LXUGvuDp42(W6=%y_p};n5>2PPWu(}V9v^rm#zbwkm2)Y@wRN4L z_ffxDZmYnU-k0dpLn*~kXk9Hw9uD>8(ytjSuate`|Y?syVq_F z@995Qx;5*0VHdGNUmI`yUDf9RSX!8wC;e|7C}}%59sWey{*W%JfB*aI2)%B#5WBx{ z--GI5*o2t!?kly@g7%)D?XM{b>5-7bj4e}Q3yH3Zx-ye@R#f1KjmHBt-58c#XQjd~ zdSXE0VhViWaK&_l-NzMQ^%Uc+?t!7xe#8yMfKzRLw3#R8i57|moU!y!W8Ua?_22X8 z$$VA365H7YnsCd81&1LP>carqa)g@y4@spvEnkGt)-R*nG_p(8sN`2#QX9rq9yhRR3Zk4nA%jga;p6U#^q%%)g{8cU~2XsP(wfoe|xX^A`;_3tW2rj?Is zvUxx5r3C3sE_A=dMYt>eMDrCm!Fj4+ZLsnEvO{e0ex){+d?{Y=$CI`pyS57zE|&%U zs067isatd9&s3CVxAhkfOy)C0@>P1tW?!kLe3bpvB3^ig18J^ws){kO%C?ysi(`D@ z%Y2zKaC1lUuX7Tg;NO*mVKfeK}Poft(->4(KK*wgjrwv zURwUbMqOM$rTlAiM_&BCAz<<&86*E!3yEn@lw%#A;gzXN^mkW$fG#CkBs2Siy-XAr zOUi|vj#l+UY;Lz#~Y%_&{5T%w{KPh2PDmL(%&?(Rfvh2uuDV@TmiU+H9Hoc zjibS_PW*oR8t?3t?H4R%Fk3HhRK1V4HTcgW>}Ou!G9paREsUJMg$)jgdyOVX+cvDZ z8FjR8Zhg@6j~42-U(_$GIKQK8%P||mk^wSO zB8!bIh{tTVbTqmo?SvGjY0l9B80tH;o)z6&@ljxG?N7!+ z>vp;z4(Ur5oZIhKU+n0^Mx`r!l2|rLaFf7~=UPw#Lx75?o$&!zo_-hJ_Qwcv9wcn^ zs~nZ>-d2hH2}{IQ;fMp-gxkW$VM^_NWwo%Il78t57Ed^+{>9IAWlqoV7udk|T)}{8 z*6}mN{T*xX(|9~mq}}aQ+-gaU|C*co(m+*7_xKix zw;65B!Ztf`!j7kMU2Xm?=duyHoum-`2U!qz@A3TrOKb+e74PiFgoE(`gf;ieIr~SU zr^3v(W6qX`skj{CusKe6ooEBqT*@OK-EHir- zzpacb^MC{yhTNnG1nZZaQPD$I8?_9EZ zx)OtZP`OHSe3LU0^(_+yF`fM$tKAppbRP8lZQIuZ`9hoKr_asprI;7qoD@l-7PL%X z%y*x{nNa|Uw2KXL`R&WT8rdWMbga8x@93aO2|RPkCDoVbau~!={&{~SA2c!3bZI2D z9Fq|{*QjS!)#5&$&+YARy@M|%%`Aw6R$h8=#>!~>QmLXd z49VHge`buX<@azFySsh0r)xf6<@vDL(BiJIiV*);eAie)kB#|izqpAh1TPw0n^;$U zeO%Ir14kx>$Thwr8DzEM4L^~g<(N9PT_WyzqO`MaiKNUZO?vZTDap${M=78Ut8O{# zopj|`lWNX9Ge93=*@aGV5yu)KHI9E*yZO9A)Vk55B&eJnvs~Q=7ZVTDgqu)FH0Y9` zKn*p5d>w(N@yFRw@?H$Dnji-_H0uB9N;`JMnBspY)glYtND(@5YMZ=idH)uTG{Bwy z7AYPo1c6HWz4D?XRd#gsc+&)&&ea3tO+!IJ$#(*gKJ)Gjzy@-v|3m=cPeh`jYSRwt zv>%_}Ap=91ZMw{I>)6s|N#|0nk`ROd*|4NS<27*LFaCh<1h+7mp#07-THi0|Q=BPb zk3fN-W9HClr$FYzggNStYJS`rR-YD>b3!-H&akRGahbRrbn(s_FhcO4dSJJ3R_`g4 z%;l&v#@crXN`fEePBcqe*;FuuUA!!k>V%W?fMXKEVFwG_1@7))&Dkf~MD|L$bEQ~Y(VlqBKfENh$W@vPu`53#dj3?ylajunHfoqy1X_W3_g`DqH)?iLj;ROPj}=yoBE zE%Iw=aa2jt{ZU4AT5Ho_;aM9clF9j17GcGORW^*`FTa-Y9@3vIVMupGJk zxCwDW=xi!5zoU-QQNuE!4jaWa$4<0Ri#j}s6+ZBvP(b+gx$|C!`VD1*Z9srvVBL76 z?S}kXcfRCmedYK=7>@<9B8fj#52he720Uw%6sDZxP?2NzB@=BASF5lkL(LJoC- z_{-&R?Ta)Q0s76SFyY7jMY#Nj%nC*u9{`ZYW=-HL*e~VUh!Meq53Ba1D=~+8C)iOlUF`p zMT2s>$x2=f*X;PqhERva%dNXh->?R+v$*bE5-zb#os7)^4lWY!%8n?U#nrR=R3LoC z-DlVJ`z4di*IYMCta3VDc6sNOK|EX(G&Qt>cMMa%YMF$d>N+;a(VPy2ZbVu(8gkuQ zdiEqv=6Cs#BplpsOzdJCCx_tce;7{sD-)U}%s*l7hb|+4pxt?INFTZ%lsh?en?Sy% zOA02^<}dAD4-qOC05$++GwgY4ma8z~O2B0zQJ+}kN)c<4A{AlSw2kwGz#8+%oPCX4 zr%>XboN#3sb}GqD7_)dCZzaYE7Nl@r(%R)+hnb`KxBlt>j3bs;hT2*w5E}kO1As&P zWB5rKv+J|u1K>lX8N-&1{VWtDjoif)Z@<{sbW9xJFuj5)F#1;J5WU4X>PbrQedTfO zG)F+sds~L4?UA{)x9Zc*`>2Gv8y*BDfARW{Qusk==DS&*QR@YFjjuS5#qUaNv2fI< zDX^;n0e9E-_SbjNQ}V;oR<5F7N39i6LT-T=Kyr)R3ZmqjnhIIh+G04CuW9? zruN@b1W!}{3Lt|!diy?XGV`%`)5Tz&o~>U;o-x%LAm?!sZl`{dG1Ma{R~%GR@!ORm z+^yHU5*(w&_Q)+f%Ll-JhFB2`@hrOFH>qYHUjiAD5ty+2H_5VNn)%(|3VGiqqv93H zu)4F8ccli7ze@ipH@1_|prFpvEJXT;oEO$5DNbpHY8sU&+$>=k7y5Go{O;C>Q}Asx z5S2}>XFN*E`_=>CpaHpola$&G%1{WgXn!IZ$1-sK6_`G0&CCOZ=p;7`_hdhO9KL@; zPwTIoR0cYeSD*K}{Z*);ND2M81(tex4e9Vpt4enlj1N^sa~PTcm+TY)SC#{dMoPn<+o zLbSNa_p}6hF(`J>9&{|ol zp80FwM={wZD75*vE`cr9>%K}KnmX{mxl*#{c01@ZqEEm-@Q^u}l0nUk*3VlXr7>_| zH*jR0J8p`6;lS`yoQ$`8@4pYtrG2G@TdMCRyv!EOL3_h{Q9q`~HGuGub>mY6@ubGk z(R&tEp!xm@**Zy3>!a1kWNn%dtd90GcR_Rz zciJa?{Njy{Ve%XWw2aQ4ty#P^+hx&dRlW+~jB#i_nHZhEu=&RlGtK))*S*LnlE52t zx%}v`DXBcb;gVlwfR6RzwYZORS7OuoDti&E)o4l!?B@vh`0NI)XbK9_Wtui7p}Q~G z{G_44)Uu1UETMuBnZ&W~xo>D$Poa2x4|h8=V#rG4`>v`Z)t5Gz>!Ke3|5^T<@8J+?Ev?N#Cw>TVB>jm|k^_Zp7$5ef^GO<1*>V5|prZR8WSR zfk5Afye$QSBGY+lOUej7JVrG-txp%mjs|PEENOS?nDvMGM)|&~Wm`oUkkSf&4Tt#7 z(4di-XFq_mO8?|;ARDs|F)aUC(+E9e~Z5z~9j5l;T^R2JDqqc7CB}@4B z@5I80cW&YhTm~>`t%K&cR#`jBj~^r6w3z!SJxGm^#@7qvxyw5XyiCH;mUUvXg@s+u z%DUG{n868_J|M`$qjP4&IE8Z)pHletfMAK> zY^j#Vl!U!M0oIP#-uN6TtYPU6m4utWpuxf!WDPpnV)UFVgV#*cy&T8fIR7$|z#xcLW_9YTMWv<09e; z?w1;yNukFqJ+Nb&fZ`!)r)kT$^5-WhVd00ppr#>#+B)_HDP5UbA!;Z#1jwKwLl(?9 z!Nn`1StFo8{EYiv~3-JW8Bx-Za!(6Ec0= z4C%HYu0{|~6}cyy1+_A^+|{)+?%5mUmE_)N@b+v(e$lYfnQG-8}1}n|d07k+aU` z&W><6l_nJZWA6|EJM@b5Cg-q23Tyl{9!h;H3w(A|bqM9Ae!wEMRtftG&F&Ps)BKe} zXaXp-SWmlDTiOiEKq9_VBm^w5F3v%LrRb4%yHwXbJ$%0)V{5RD)ZSOQSBiQ->^4&T z@>Ank=ec+wXVw_Nc@hHz5s4B^X6&=M9o5iHW>wKk{m;F$`nNI9zudh4pq269swDI5 zgIv@DVi5bL0({q4MIrCqpC`T8nyKHrb$_3sEKUi-#?EaqLD|&np-e+l8-{^e3GUDUoFCzCnb1^l z@K6Zo%6yXNkuN%pqCFX?4UQ7r2ylqd>=t#edTC==>OX3nP#XYYFMt4L;1H7%H`DJA zKG`)-trf;%r>137{Gk|v@jL3SS9@&>P}dlA*CC!I-BNhwK)*b=ULtQ&kHXI5z|0B2 z(6H0BjJAPHZeG3n($js1W6aUZ|+#A@^^ZSiGK#Scehrqr+eVYp=5qcrXh+V-_(I3A3rUq|%iqk~48D43ip`dsYq}h(1OWau{)*WK-zDZczF^NAohN-TBD2kc`vq&K zsmG?k%2>ew@Jl{nVXP`3xqYzb^#6dSLljzj4iHD*Mw4$`&F}%yZU>0}6lY-FCMBcS z`?=jje212@e~FE>F>%K{P>w)- zwm;l)aJ__+u&cfL9YyRgnCi9~b}QgWtITOoX~PbMwI{)ljmMcT2{BjJKNu z+UxB&pCVn`oHWUy9Cd&kC1kz%HJO>8OP(cI!jtZf&N|aRIei8^VcvPt+Pxx4Z}=f+ zm-epiL5(v<8PvD{DhM~{m}P!1#NMrv4RP)wLK2klJX1gY@_;>z%Uas`Ft z0qglvg%;)iyIapDndctl@@5eZSPTdR76XD2A^5z{IfL)f|4+_fc<|x%$mqn6Vu{|B zowt~qi&?ZQ%pE_@N)Xj~FW4+);|Xf$S0nkSx`-LCrq9eDIOyS2t2yqD{`3Q3MzNbfBf^w5FD(wcU-VT_Vj=b_6Sk z#0WtUU$OjjTwQ%U7|3gy9AUy;hgEJxZz7fwtNf&`W_`Eh!=>wBFH0qX@X;fmzSR)d zJS?dZhsbQ%`O%euWg@#QGd*Ar`Sw*4ieOd`0%Xt&s_>+_g+crYdyc6%b){T{>NVB^e)TWfyw`cVVLjKchh*S93xn-** zlE0>Xm2yRzvpVX>CUcktq1lAfP$lS&a)r$M4!CXzh=#6eXGbL~;h>@SrIZ1+L7afi zZu3plINLj|6f3~PBcVqtgYZQl{+}E=Vvyj=N8S=T9UPOx_^Q+T&iJe)Zue|NAn6XQ z2Iv*Ok>#`7v~I*lvu02kCViF4IV}OC0>w8hA1FC37=`=`m()$f!(N}X?w54aNc+VG z*X~iGZy-Kb6EaEjw^Lba>*8p}%IS_@@T@)+|8>&{P)I0sDS^%HScq(lIH?Tw?6rvr za?}^RH`2k~UJxzryC=4n`O>cwT(EiwafZi_J9l=Y*Gg3F8pu|egh3Mh->*w0(mhU$5ZVy=ljs8tp=pX;=lDx z&g$_u@#xO)N^iy?^Q<}_`r94~s1t;R%Ku7?M6~DFg+Y;b#O+1XO|8&)jwt4<3Ut_? zr2ycw@>U|uK2K01q=G#qd7c#gkZ)sch#|M4vivqY1Pe%NFFnnW1>6lX$^+&FBFbil z1We4~w>F`re)ntySbLfq)@H0>q0|S{;hqS7e?n>Xl+zdYOREjG+6mvIcySGnQZPxp zhL9WqTA*O}8Vgx(>lxRl5jWvXmP;#cUwT3^!1R|{m^3c4Q&3OAFbu{Z&~-x@Qa-TU zoxCl@Uzodd}NmKEVUe`>!5U|l`8dvT?fPSdP6 zJ9bAiWQHZsd(#3@9cU85j%;)12#`9uPA#51K0&_-lz zCL0xjK7xPc0%mmBnXXCX=l>cz0IaF#^K*q8!~v?=sM<4p?(b{Z9_~c}aO!5vmSy+R zGv@%be~Q4vhP*~0<`R#CKniyiaCiG|izZ}pSj0%>5o8i|&_s(FSl71AV^Mzas=q}g zx?U?lV&2YRFBZ7uWdo9$*_v9^!1+yWP#AI9&>kRx*)UPkAjNCWUi!jSK#B8xosKt; zulQ5=aCXHNr=2qp?&7a{A*TE~SwzhOqx|PdM^s$9$0~Vf3lF;S?n)NQIvJ=DerY;- zuMw87h;THHmT0ORuCQiOByI?!P%6P6?^bbw?b1D`BO7{;KsoSt7@I(Pw z=0g>T-NEbdr#>(ocl>240B$x3(fuo+WvAK(IRT%ECB*F^*h$7bX9T5NQ;~Q92Ha^p z5Ltlj^U9;*67_4^x!M5WjQ}IjsFHk^JI>hpwS%Ez_Uce9?-j^W6|8$t%YIs7sU2RG zKj%@00a7%yiF)2;mSpJM;-jnBCXANNf5N3#tTW?k9{|9{BDWYzJQ9ND^64xSGM|}S z*o3B|W=f~?%TV*G<-K4AYgPbWOFdhr+*l%;eB@0DJrbK6$MSc|fTOsBrwzJ=_tXHS zi3!NJPz(GKv_XoEi{-C(p(i}mY8V_xKKP@zUEXztnbVIdy2<-P-Is=zOk>Tqx4>gk zvS7L+7MruwUvp`%21eWzKlxs&jFg2KJ8-$c%p8Aq!B#G5SJ{)ESm(=GJhAg>pus}> zl5m5-=(#~Eun%8D4Y-(T%z#(QQ>V6Ek-gj)aLMs}i6J6xiUbshVgv=djQ8SOZE%4w-k^f|P z{%*ye0AP1cOc#wxS;ezfYa>jy8q^H<{;GVE@4T|>6L|Z8HUdslrcl@Jwz)i5;yJ-r zwqRj-ItKqlQg;T!-m6!VpOmv!h@XYa;c+eci-Rw|Wu&Tc3O}+``9>q}Gg|fY0k%y| zjR(Z@A|OV!+M%j2`@FTdQ_3_~2TZ&~x}Cas2Sn}p2LPDEGq-Hv@CQCGo6)*Y3$b)0DGm$}xS)7_CMifRCB7qBvPip=7Bq;)vHBd5m2X}X zNzx}>e_s3UvXupqx-Re2j~PiHns1-2a(_O}0O-&uR&yo>9>CC<`H!JvzPK>)I83S!mXeOMO{CAYwxdgMLuh(MC1S3Z*n}SR`3uJinf9yt z+P%X%HX(M9Y8(3I(DA}6M3A3pphu38-wLx~g8j8v8sVJ~SH zzqyK+nctJ0)gU8u6kv9!Bei#FZHMJ8l-pZa|27pWR!OSksmirxI{e;Sn2=U=<^Qnt z)=^P3{QIzkgfvpp-62SKcXxvVk_*xeBHazr-QChkch{2A-MQp%xS#kw=RNPA>~i+( z?95zq#V0D;*x-gGr^LOf)xXBZ#m^)sFOOun%KoW96$2SGc8Hl!fngTI7PDot$yaLe z%&tY&5e{t@wdWtl#?wllgS>8pRVWWB8?4P$mXl+AQGiL;b#etTOWz;DeI=jm0rBt* zdx87I&Y~QbKU=}FGsM({s1~J^i=fOR99D|c1iQhU=|^^KDWF$tgAR1Wi(<3Y8QHr~ z!EZ+Ce#&y2n?u`-EWJ&WQb)F@0FP!Xq!y!1YH9&4U*DgD)&+#%qUcq}pYH()Dg|L} zn?b^wWu;`lK|0X6igfZzGbj*$v&}MdGf$VrW~)DUu|`zuw$r~FYnZ9Y3DTIC04XQx z*W7J^HXvu*KQ&$N-4q_j7w?Q!|8ifnj?(3B5B_W+?*KK3$TI>J$qnG*6T6m^R70?X zUPWvfqLp{fWv4ETdOIDo#g3eT3RSvP{Go)f_cwL=Fp<~uqj2Q}yx)ZJcSXH% z3r*nbS>NR~e>mtztL#~kk?SIy1(zIHN3z6W2U^!lin)5J<|z1i8n=tA*kpqZJ#nVH z6#gI#(#>wxxxM|zf5Z8tRR4qXz{I~L7lk*EEw0pd@=kXA>+eUwMuh{w28O_V&WN7x zd62tZnSWwNe5SlWqj+(3l$GX8^ECAh#V}sVu5l=dRv;-852a}}U|og=^g9EIDv4-# zm4Kdp+@y&GB%~zF-O|Y=iR}%vu3iE?$y?=N=>&;4SAOfuS+$Rh-j=ZS7o`FWquOM^ z5=TXdP<&_7Blgys{Ih2;k!0s=tIRiAJ`zT&H7=<-_$BQVj15ksgXxdA06uNQ4D#Ts z_Nu-LM%@UH^96tbylSR9vGag&@ z4{^@pZ2moJB?~HhCke0<;?qz2H#wCTM8Y1)W&|V>^=*pc*ECx%_fdTMc0pS3 znn&Ls^&fH{wQB=_=```14&S1J zKvLSrFoc##Qrv@8G9)R;D8&5riGt7(0$0o0Z|A5Z?8Z*DyqjkxZ0ux=A7aS+ODr6V zDtV<|)x-9)CB_*fq~MN1BmpJsrELl5dgzKj6QLbgk8(>k48Wt~yCfer$f!ehgio%_ zP9k%S@ZRpQm2}r&tUFH3-7swFrkaRr?Z}pA8;i?vn2ArLK&JJdu=cr|L@}4Q76n*O z6gYF2%Z9C2Nc}kg*mW5@X(t4o-ZY^(v**o6O&>y|)KgJQm@V_zc1L%p+VB)&mxyO( zFN~z^_9>_F#!sa+Xpz05yjeJ{`x=}N$7KQ5TYwZK0$ zvh=iQHOK>*#dayV24b5db$CA%_+lS#GCGISwbF5i$^h9%I7!E>QiC;u3&QV9xbViN z6zW$ffw>&#Q<(Ewre4_vZI~(XG>r_SAFl0AcT&OO#=KxbUZ{QXAwWSoP=p8bqf7q_ z?840^XBA>145vicGw$WF-zZ8elOX?(>!|&h47FL;k=u&=kK+NYQIAyvjt^(H7t)#@ zSfe>EDIbD7cBKHV2z3Z#8BwnTSPK6rdfP32==FhuK3I3@`s3$3ETzjg8Q|(;oCGv< zaJ+rn&&2N82mAJ1!aC*4(1s;fL3s|)70M7)UUe80+m!KS0!!Uj9UXZ=SL*uF<8vly zM{$Q=L0YOx^wIIFzpRN8P z>gJ8<#T4C-36Yw1Lj#7@?5K14e9yH(tQ|ATCl!X?#ET>?V9Y1bMb(50#a|eStOjpN{m&<1C|3Q&V9b874)&0P$04IG5(7{7Q$7#i@^AfN(7G;!xA}Q+ZRz_ zxN>Rhm(lY3=mnlpVF~_{WdV27NjJDXLGfx*57@F*++h<6+j!ZPB94Hpta|Qh`gkk1{es3OB6W(Mm z$P5TZM-OoZrB=&Zi3c1*p?V#=5R}GPqL^~S!op{YcIB6-YeKFFWypIV+OocD`!mdq z&O|~pC$zzEAS`Oy-M{_4QDvy>%gxlHiY2sMlHnX2_*;i$?an6H`x+sE&RWM3Bf};g zNC4cmtX)%!tv{r2`5aqUHd#);2}Low>k`(+hAI}EI{Ki-T;cUPVZ6Kkwdm|8n2|pN zG3XmVhOYqRCF@aeOT~M1{ldN}0l_9)QVIWfhBx3$7!}0LPS9Bp3?^f|SO;?T6tqRe zVQj6C^-a8xp=_4A?k{Z34<}z{PBL7>)^o@&i(&tx*+L^@=QPca(-UF# zvnPiN(Yzz8#T*>C4`5s^^O87Qa`754{HYHDF1`VC$HbA#lA0}a#5g6LO6Q6~AJJV9 ztgoK`*fEw2Eh<@-Q@5&QH+37JdBk+qzx+TL`5wtNFz}X+gT`aB7G3X?I^4fze1=nG z1DzJ{+l4IZagt5z@hsBalkk5k1zl~hmn;TM){|7J0|O;IbI*X``;hJ99G68_p_{0! z-35g31s>>E|A!p{LeOFFEQHvD=1>mAg$WIC8*aOEYAX`T7q<(VxEFNy)i)%A zveCIbfT<*Qt>+9C8e>>8^dVdjkrOi!STWajUxJEo{+RYywOxhynHjdUPOT%D^id^K ziZ`DTNjkVN!1Wz)b#f7qE>ZB5RU>T}G}Q(TeYiKwiE_k-#W|O}VU%9Yl@d0Fp1~H# zX%GD^h$y3;q_(a0>JfYU5ewUXMIymaX&2+o=FRay>~<8tk(s3qn6LK6;>p_C-ScEB z5WGQND1I_e7&tnLWw<##41=KsTMu@o@soW0Btkrk>&#%SD@e}K)BXIl`D zf=bly$MA~8z60$*VD7pAS??}ocg0!eJirmNtcgw6r#}5Qw_`U_!)iv5*NkHk0Zf6~ zPNsK?aU3SR3kCIcl;*V`s}->lO_DBuE!ny5PQEa_ zYALM&p}4jN9@wsQz~kJO%3=m?vhXca9v9E+vX!kCXK!*QzIE8wO-J`#lIX~ODLQ23|6ckR-2vJiU$q%?RIy}^sl^*&l=?Xo3R40~ns??C=s1CIh{)Ig(fkoQ zTvUcV0(~_QPkW0N!EK2VerD3olGJ1WFX~U}KLMVh)cVPqR>%l<_q46ed*2s_M1)}M zmvkaP!3NfoWmHEmrY{~*d(}a!eUUN&V%M=>$AA9t=+F-r zOnxL7%#zK-#RSB62utbv*WHsqt@Z~EQVKyFgl{~5M{C8CjpMD@GyljPDW_nYu@E3| zR{!N?IP_XcLMhw8Kd->bE61;2IBAT@uN^JPVJp%hHuIC4;^g1(r;E9%Y z4*vWF$L!}t!ztktBOjozemBkaFFLmjxA4%LOtbB-!B>6U??%Hlv2DGOW%VM2Q4s)I zM<~;It>eZ=Uj{M{u{ehGsNaPrFW0gf?P7-|3Nz~$b)j|56n z1r&grV|;xc($oC1`fQP+%lxI=7T7b>X!Fyf0vi_$zYUp#o+RxJOX5&hN{N)>@=zgg z`b{R#2GOJETUH2md--ra-eKUAc4EhW%>GXnC~$@Pj?)=GP-sDxl`Oorjvr95Urn%}>|BR|deiq;Yq^B2(%0}@GXj$Xf* z+eL0`=`oUr(1VStVkOwk-_^2$h29bE{Q<<`CGhq)PZ&Q*RQSE&6>W={87%O1p6SX)xBl(9|YOck+R0JcL(59u@ zRD2?s-fxs8`j3AzEND_yWjEyT~xzs2EUG5Wow`DN9MP^$=LaB5pB8tuf+o$+waE5AnIM( z|5!=?^19(Lg#E)UNS$ItFGT?z_K($FT>QA0O`VIqR2st4#oNe_zozaIRpCgm=ed+^ z!a=QBUaUSU6S#J?PFJR>K_AUZ0JLb5Sf-h%bEv!r4e>pFhes|NL9aP5Tx0T{@L%@+ zK|I}0ND+XFy9HoUBrt$GeRZDCU00!#B`kJcD;0fwT$B(m@aVhgH5*O%QDYa)k;_Nf9#)Ug>CXIe^ zLBiVPi&cP49{5uszrLU*rvG@n5PSDtijx2$s==6exI<`Prz$XAP2wg3Is$`;0hZei z1dpHRfg+Cf9vNp^1K7NZKR>9%FFSjacTcWiFv+)pu-ozS!zX;11Uy8V-1Q-1M@Z|B2TlCMFZ-=o7~Au_AzE3^OluBVTwPu@qB>Z>1D z%xX1~YfpBs6k(5O>$@C>ciY1hN?=Q?0&RsQ@}uvqipZRwANcdC=ZoX2=To8G+I^<$ z!)j^eNry(3(#n#7fp^kc39p8Mfq{ms0rxED?%MX5Q)n+nS=JDQb=>z^v@q%^4lMB*)K4L;j*i zX$$zO%=u2cF*ag&&Qg~y)^STxyPSJ&zRwuIR)m#B7cq}o1>51;qy$Bu3NK$?67lR( z6p!k4w-Hx=s~1O!hIp!>X&09~ac`GLy5T_ITxnYQ43dVhfUI zlaX`~EbO~^w2@i!84Yq)*nQ{gTR2~A)raRJOpuVZVJoe`7O5!eK8x9-vr6^_+jtWX zkB=Vr=CDp8Jy|mRr3u@bV05qTn*JnJ_oxbuBDfGY`RYYPDR!0u4NrJZ3*NZW-MrPM_-iz5=aW?=7AKH*wR zX9b`Im8+e!3Gee8SstF zE%G(Xbv8|N2poI62I!Q-F=zWA=?Aq7(}yc|O=+CWxsCHE4r_GhXA)G;TH~J?jalU; zgkF#62T;LK;uDizixi`lZDs z^7Pw5*FQnw%9Xhxx!an~)eYiy%9e%`Tj^w33$=iHu$ zl^xCtKD?Pe^$n9Y80yRxm$#n~5c7C(!|CYk?tjZE^@;A$%kV#v49|H}`Xv$KAw96#~~Y zg0*V3ALnG6dRt&!vIij4=vw|i>#*P%3D34Bj)@25)Bw>omY&>DCY`iskh zqc2On!_#S)Z28}kVD8+aTq@m==>+G1xA^mM*RxwDIC5uApMql*4llkAx)t_#IdZn2 z<`ePN5{z++q20dCL+(ret36AC4U*5Yb*_jO*=`Ey6%<2rWj6;+G|Kx&M))G~GN~!U((34X-(T;pIS;4a!+gdnW7hL>6w(lq4K4`R z($;t@ma1-aE3SY4rl}1%OvA2h&wzsk;&5TDv9C2MbUR}HRfujS|9#hZtaNQsZPUrs zUnV`adGY!s=@y1!kZmlIQleXs_)7Ja9hIQW!QZQ|-VVox*&UM(B>lRZrCSs&!I^q_ z8Z94kDMkjl`$rPBcq`=%F(H3SpHmn8@IWRu3~?kZUj##EFomPhVi zu6fbe=*477xTC>z=9|-YR?sf0Muyfn#D~!3)*(MQ>7bJy4Xj-o|55Yi!YixX<~_ps z&_SWXWj-y;%c!r=i343Pl7@wM*r;hVyvvc87@`ps*U6xl3)yXEagdSSE|zg=?2Fk; z$+}WS{?dTR{HUfY1kJuGe3O5r%{kYiX;w4A-qgAF%sN&W!Lych8bMaZl3A(+dD2$o zoBMI5p4@8XPk4GYj-c(6eorf3L8Qglo;&;2qfeHgK2pCpb?3Ca*6dnM63#e;pu|@x z{5Oh7EejI(`+bf#7d)>HE7xxRk;k~=9jzZ2!SP4>kEV|*hh5gB`UL4g!UzB&(`T?vwg#DHLx)Xt~Ymv%xX=-3CR$!^n2zPtd;hrmZ zp)t-Oi+ks%&HZ10xe}z)vFoKDN;3R%UpjcwzRJ63VwJ)-h(PqJ(uY<_9!Z6c9_(}t z)QFaqM#vlQzFK>AD%Cit5<8Zf zKe#xa2eK{{>h%i?4=Z=X#4-;_{JHj{*e0U!mZgR!dt`%v2!pacg`* zGK{IlZ+_4j*GA4$66u$jR7_Rc72^oz`D|Cu_m#}ypH|kG406!$Is7~HNjs~>NhhGrsXEo_?a#YUbKUA% zon`t@VCg$07)!0J6@Kehde_{=m1}IvL7vF7T_(+ao=gunG12rXeeEDw?kA3QAHc}2 z_u2k9YPiqEs4la8Nrsv!r!XK)MdPD^e#!d0-%^b=N3WB8PgOcx{YD(8qN6d2nbjT$ zbR{>+Oq|#;`0C|DeQq%(Jig1R$hBASO>7xuFv|Nd1w>qVDQCK2O&7IXRld(7e@UFu znnF=fjCa!ueW5tF3B~{Nx&F7nvYnD(Kq=-0r&QLopmv+e9<3zgdZS>D1V(b$c7pn} zh<4;Sr($3~%C=zKZlaI`H~Gm?m)m%7LOr=StHH@+|9juQ ztF`su6oG<1IziD%6qh#KZgNb52Ou#TZELrz=TzmFSJo0STFC)qx&<~Bd{ja|O(uB* zsfCS+`h_lc((0BN4@&Fso`p9?FUS6`H8;jYG{c7OEL!kiae zu<`1~JJK*Ye9{0;I|8L{(1p8#S>l&O;Bwqpeiu)nXKU2<{ykr`y-z*O zYdVe@hGL^0Fg`9#v)fLh&B11@bAP(aMf_1XS*p53Sf}s}>MMju%g4x);j;lNk(T`U zR$flI*`a{L;UK0GZH!wL;mhD)DlgtiwWIkBugutd%rBLwEDF{nlsQ931(*6Wmf%e@ zBI*n=MMu41hTK3wnt{`x}C2(dn~l$;@oSRp!+|{ zM}|)1H$yJPC4%`cy!`Jw*$fC1*+eJ-jRto3ZR+iNRQA&!$%&-L^ttBYK;jJb8KEp- z(t3Uz7lR6<9M%edA6tU^M?0(IMcy9mjhe1IZtv45B00#Eh$A}fJ-Kx?qR|bUvIK4v z_%SZoG^OuN4$5;SjkD`C$#V|ZakvXJW&vjr3Lf_LnY}!ojyCPuY7hz-1oU_u9nc8z zq{RnPag_6}2OMR88c!D@-VJt(FW)?~>S}*wd2G`F%Yzl@j}=*ZdipNx4#S-Kt)Qu_ zGdd9CoT!h)0IlVg(>8$NSe`nfxD74Otl10eRHw4j;+#q+{A$|%31Q2{J*;|+W^PAS z-VPkMa&_Wr4i5~jbf0<@0`vH;?hP;d#S>B_(+31H*v(Jx80V0cMIweG0_@IQ+P&Uq zqRx$FYKzxO$>g)Rm&{nqjch#)Jix!XuL-GpgT#Yz2B!wQw&q0##5Q@i-_n{iJZ-TY z^c07*UaZg|k}y*<6UUcFl~>5-*772e_~>@%vxb?q#$=ECp~(OwB&sE<4CJ;rT?;z( zwj+tXF3RAnE^#snLgdY9(VWGd{BnGCu9ZwieH7*sen!s%1J>}Bdh#Qe>}3<(7cX+N z3o)s!xgh^v^{U!3MWM@6)%Y9_8nA2B7-t@AKU{XKTjIu9_DrXB7r3fx$@jG|xY75r zZ@fIJ+g;N;x$U9xIF!?n7RfG-? zHO#zqIxvP)5wmFd^l>+vcQvQf7Qis+vW0rC;pNz2Q`AAfRu?9-#~+0INub-)y}Hyn zG~J)cUI&=meyGJ!{iVZxGxDYkq{iS(FWTst@1c)}>0amG< zc`cF-b5KQ%5|uSXAC7LjR;Luss}FKRNFFAn_Axq=Fd;4{kF(X zpbVkn$&htdq+F#eGrxh^DhAY@n)Oh@XFtkO1rlR!ap(r?nveLRTcP8kX=+aH{vZBP zwz!z3B!IHbBiQ;H70GQf%13d;NF=(6pDMU7d4tTPl-tP<-3+QsDnDg5eYpaG)X)nv0zDG5W$tP~15k`=BNB$QKHENETpY*$Qdu zG*DHwz=z(d(onSOOf3|Pqj>|pp_eX_nWtvK46UAJ(ul}hFRiF$m}Pn8?LNX2CU_k# zBDaKS5PRH2B`CcCqB5jB{}SqZ^3$Ab5PWLrVY!BnnMQStg4wyZ@BQPV7}KNnb2`Jo z`vT4Ywr_jH3=S)s43P)Y05R3Fm`Mk?r<(Gk^Il@Ht zliI3!3y7+bLwSz?rf-MaAbote&W}fowV&7DA4T*YGrmaKb(@phOw6@TXQZ3$!pGek zG+lx2o;WX!;?eKF``X6bRbC(Bcmb!q8u1h^h(eH|rRX43mf zxVTIgwT1^cmksM~m`?@(Js?m?`xJ^7In^{!qyJJYWhV7b*J* zQPz^3kqPXrUN2E%%4~f8Ib??sUMf3=jiY+dszq&&mXOhbtUu09FsMr&t7r{ex8_n% zYwgKKg5LrcpVQ}lqcAB|51nv3S8P`!heU@b;ekfpU_>LD5)5Y&gVAE9?nTtPimX=| z1#qK2hG`@NKSj!6?DKo*P10--zE?-%8EZdxk5uz*l3u#&zUjK|A@tqJMGr-3lOh#A zbsS3-cHR#`9HY2~Z=QT;>)qo#cx%CXIqsn; zNl>52Wl?DpoRnZa72bzR3wT~Zf6O-1X=dpFaD#%GELub!E5>-u+rTzR7qgfE#RHREd{MJ-K%Uj2_8;Z4=9n4K$~YYNRY zbmXik^u{3^aQu`BSlgMl)0H;0pF>lYSKAHC@?Ma-gR;o(*?C&eelG`102t#oo#VcO zeDXgJ$?dZAuzpk*>q)LXsxf7Ec_qCIf?+sH~G!1^c z`k$S&m#YEhX|e5H>Q8wwSLZ`h!Xz6 zl|-G&P$FMabI)-qdwPdRK3K(H;++FPxHM6V@Pk9jX-=9}I@vj1DDRIa$r7r70_fqb9QYE&_jQg(oXKN-KGQ6G?_Cb z9{ie6R~$@AsQgI97pW<)D#Ou_P{@dUkUsfGPiF2i%hb=^o|(c||{!(5oZ$Z9LMtfoC=kY_Lx#v5v` z~mUa#H-=<|=rXRi0v8k8y`v(i}$4=(PS>U>7(7h$XSq6Cr z=n7^+g$>6Cj7)1;jd+xaj!Gk$+RNFD`YJAW&!9qP`vQ6bY)rtP^7)67Gjlw|?&i^J zRSd?SL=iG^6aXTvTR+th@Vgqp0^(p7N{uu-B>M6G{0NJ2j-4@@9%4}^m%-SEBN$gK z>buZZKPeB6?!78uHW;{Vy||aJyX)&3DDqL%{ZvWyXci4GwWe~~WCqTPCJbLp#L-tTD{CXig9hXHcX(Sb*$Nr=*>FNo%N7B{*HVz zW*5OF^DFu3Zn*c?xH8PkT|ZdCJTqm5kpNbaq+Bl>7@*qvtJDQAe_NBr}+EZP1u~=n~KSuUBB~f=i+xW~>BB5Ujlim&& zUSjYHKV4d4{UK0#15sE^!j$4K)&+)%P!mL3ylGYdWsje{FrG(+n>uYqwEFDwfp3(2 z*p^Q1qxIws?$VO%W#*?1Mp`6qiY}&#lzHvRzwK8&={Cx8;?1EwT%J`qeA}lsP^|Ox z^NO{?FC)YN5M!lI_JO_jKP1sqb9EgxrSWDPgy2IBh$=-Af!*Q=Rwv~Sx&?%Y4S+4y z*pz$OXF@Y7iaL*WbxS(5l>u6iFk0`^$tr{Qz%8#h86%;NUSht73QvJVjVOckc3BxG zLZmtFo}N#-m+#Vbp+~ldj$6Qt-YzaTOugk;8(j?=RIMYUuM|Mz1@0kz2LSP?|Uu7Y$*7E^nLc9&oTBCj7idf0lFpUDvflsYo2mRuCmL&ern+2`f!Yp z%rbg931$K7Co;Ck2eW+xh&~gk|4a?xs!%AFIo7PpX$%>n18a+9v#nWuw?bd_SWQSC zfx$d_>Sd`FBkWM*dUF>z(jAHs*4U{Jdsieu_FS8H3H`pU>Rx5swo@HkjPGEw6(cWT|IShb!)GcDg}a_&(>gIQGPRnDR%Y+wcjO~)l_ z{GWi&b*1mLb=rOR38SM^lR!u*z`eZ3Q?^ZVLyFRnA+|b6?tk%5nF88B?~#K|sr*0T z$A7a915v;iEKi08&1J!{XwX={nJ#c-jF5(}Ehr$Ebl5-q6nPhi;&tINnHm^Ml**Nq zVc<|$?*n9CmMeyzoqaE+z%wn*a{bPP=y5v8fgNSb%y4m)i7!-3j(z_Cl25xi)d&KFSI`Tq3-M*}P54l5>=+*+{_ z^4M@R>NE3D7K46|18Ih4EYtRNw6HDi!BR8zd29wvg=l9~SuOi2sj3|4x@-3al**o5Ax0yk|79SQq#X@Rx>Em9UnRO1`$wP8$Bdc?B|E9 z#CBcoQ+4QSIoF_Q(cZp<*Xu);QYD|DtVrlqJaFG*5)3m_G5|zN^Z+o~_A|x=tz*`f zlPQQK#NK53e(9(R2?~_z^Xzyq(7~`9f783NcGoAoNQ<}yUs4$>6gf*Bk?RP|)F4NRrG{YSylMP z+#K`amDkEW)3PJ0`^0x5MLzCbl|)xyl?W-gG+IP5=XmdiQA7=NxIZkJ-?kss7DTjs zUP{4?&h<&kKg^BoWH?t2U5mXF3|(}rK1kG8B*TPGG8kA>;c2i4R98BL_dbg_rPEAW zQ)iUMlY`%o<3tEGH>j84>dIwq7Cl#KUoKGO7IjCal2de@vWYp&c>VxY_8fa^Zgu?Y z3(#cC`tQ3XhHq&S{>$YGJYq+bwQoR>$luiA* zD<*r*4(fBR*b%rvd9&R z5H^%+@PKrkBh*IB;pS$Eg>R$nvbMs`%cOBPH@r5{waugvqV!CuXYbZwdVIGLn-vx~ zd57|sB4md!VEt%jwO+0U%M~Vk>yX_(*{mwmngRcuj9W18>Q41{%r@576 z)!QyjhjL|y0o4@chj9#)BvVX~)|ldGsWclg5H47|fK+!=X5pz7n=5tY*v2)&*=2_u zH=HQxr0U^~P;5}otgTmlJy%(wM7o}D zwkHXhmjQ2oRI6h&d){A;6t3|}24wLn>su0ToXh%cZ5nRH)J}%QW(%5sAP%pw87f(mFiKY&mlHm{)|48y`x|(3i{4~z|}haY&4O= zpuH>|FPXXREt&al@|gKg9_V-}j4wDX$c)tQCCO>Q zrx9`zcgKP`9)Aa&fxGdp_U}li%)L+eJTd(XTGAbR=vLac>N^aK>LDBmpKP?5$*dm^ zehaQJ{w7Udil7VxG3SJMvgF z?E-wxAY9omC8jfcmKs*G>HStodK?p-!5?WC;tJ_vlw~_A)3M^b6`&9&-2}ugwp~2@ z%-r8r@uEPuX~U3Y5chB>MU}<2iz*H;yKI#(<6xpZO~?yb27B&pf`{qDgiGdHg}D^#{g-7F@QVUOn(@ zk?QHD;r1;g$ePjcsd>LLDiBUbqBe#6j@3PLQ~7uwlU{A!aHxY1{0CRH6tA%?muh?J zSnq*MMRMZzCF9~9qSKcCjd(7N-zbRqyOO1;{)2aGv4rxwUaNpOQKXTYVtFy0OsS`eJqf^}(f+cXBX$L1z6)ChKo<4S5k-+;cGjG2o z3!jp&sB5%i*tv2nH``ky%J-qS{N&mazbPvl7AZW5JDCg9f_|C6_-(b|qvdm@#cnPU zBK)Y>yu+GW{&b+p)bN2`z+4(|k>qS6Dr8-e6RH!pNY;xUqNjEoNFaS^rr!QFwPPPo za)FfNOxm&KNIlvY23Y1@U+~hJIiV*Wg1}p2uSF;nYD4c){n|2PJJEM}ey1N8Yk_R` z%@QF{e|a;7-{+QiBcj0n6G)^aoEn34|8ht8qL2~HZZES0{I#5<1Z*S;1_L<+brEA6 z&IDEb247R?aMOL$&p=&?6A>qi_oMpr8F!(Q^S&yLq{K7Mnl>KxHj+KRL>j)RcP*e> zU(g-qW$VZ#CBS>#VZ4Iq3+`h)-f49u`kpME{H?5ip`|5OrqA6FlwS#3F>g|78{pkRl3Jys?{u|+ zFh^r+zR#r8H$aTQ`6zXSJrnDY;=J;&+%dNRjOJ{sl|dbF^iL`p@?|?(1b?+VP2%IR zt><}l_PD#>-9c~H?XhrG}FrTa&UpL_M5h{ z{Q6Bmb0IPGz51+=5JAVw1PmpvFc^*yW5B5Q>O|ll>6zIaba!omF^x#i+a!Xn`0J!_ zQU?&S(8wk3HR3`f73ok9gz(84JEH;ZG7^}Q_4foar=dF6))>GNOQN2 zpZpHMuAW2}uai>TB((*EuU?g$fiiV|k&-Q?uZ1UGzPK@YEyn8@s;70^OQDCd$X~Y~ z3#Eiw5>gd|CBEAbmW`5iQ0`WHM8#J%`LsW{D6L&+12oOn?=hcV3I8EADRM!7{bDnU zKzfy$PH5Tp#qTQNBjNuEQ$QDMsqf)W;-d}@j+(7YYO4W7D3o0Q)_Y3pI4lBUWAqg1 zAEP;Ei*|}?b!$HCws7+vBl{Fn9TT6c)JXP)uq0hI16XGtO3r8{kcG4(b&|mG&PR=H z@7L7_%o3~idFp1iq8MSW{+7I7#;{u_+xOM9+{sg(T7b($KYkW=yx zRx!T^)MJi)L+1KTAB4)Pfqaku`f>jfonEjO-zM=X!&$EPFah&T zwI1%H0U;0~^2RVRt%M}W8D&`l7Qx?mu1WI8FW{6d`F$iHtF&jSs*f>$hhy|EFehn^ z>)m;@>`Y)Ghd!kWE?mlFJQjX1081R8Nxv9lg@1@wNZITTWdu$PIbj%g5HIzr>A14P zj70J)7BK@@bTCDQMFM&l)Z!CD{^re?{YxeS2u`Vd*)2KeYQ^v{a6HBxKBH1Ykf*iM zlTbcnk|PU}o_k~Zmi>+}Wipis#y}r97JcsGz=#~udLF?LiOgnnjL?@yK+;;RELx`& zwlEUi6ai{KKB!VxkXV2BYk-1V-k?fZoPI^$xM?JWP-#(N^q>AH1z3bvAitQr-U~Zy zpRM^HA0sdVBGM>*5of)?$ z&1a(K9R(VI~OH@C|xy5xDLwBwX2jxaR*&B1y)ju$-ET8-qYVmCT z-njMMx7SKG@@*h}#^B?9av|!*k4`PRn zuL^jw-G5x0uT$3m(~QDtoTv0WYCch{t-Vc;N=qFwWC(kV7dN;QS}PoSt@hv=+JRNV zhBHcZ^Y4+PeD>?nJGDoHD163(1*+hRZDP~&_VlkaIndm`UnX(XlC?w|?@PMf{gq=+ zDDo^6O;WI1-fKH<{V}=wPOCC`b5~{=$3Axc2X_TO<$q)&Twvz3jACMNW$zRjyJXIO zJPKoZ+E>YDj!>9QE?1yFc+>wV;XxL49V2O@bKN+$Hzm=BErGJan*Pwm?pM zUxI2SaP04`yb+B96;>ej{%Mk&^3QXsX4DV5MY1k+q2gn50tQO)GO=%OReE)c*Er_8 z5|fa=-jDICeSeX;a)mN3&!C*$RZq6&k}$z5J#KiplEQS(Tf&ulY!;_yaG(o>BNe_0 z)kr3or~d6_%wbUOQU}%hl@X(K(QT+&WMyrPnGSW03IHG+^r_QEKrNE6{7EF?*F)7D zAKfnUerdmkEKPN6LCml4o(DE$#(AIOf_avl$9NRG5>7^r=wds@yLVAG`P@=M4rEHL zOiS^7EjQh3ll2`)5TrOYZ-fd3{5(66i!zBX_pR%4;zKF_ z4K+_FgfD-$R=H1o{QnBPzaOCR&uZZY`NNeNc;r|B7mF%iVmNV4YmpWPlCQ@|1QgZn zBk_fg<1Rw67+QoF4Zi|F}xmK^SW&rYQ?1liJ7+Hx?!F!GPNA9qeIeYJ2 zWhm8E?Be5(qK|{45=-W~8m|DOSzhvpiPODcZhLEM-bkliT=&y`smw6(DUotnYvYOX zfSh(bvDnYl;8&`T2^paADh@0E0wQnu#11p&gz0=jqPnldnz>Cc0}d2ehCG`?1Ex-5 znQ93dS1zldx&CH>A=zw%0NCUyfC8|kr{f^|2n}5={hPlamBwINiiuaCW2{AZ9L`Kg z^4bNKLA@o=%RK`R`)woO-WSsvg>2I47*BlooA+I&hc+$4S(F_f5Bt;fPDOo=0bA7|Il5<5VOD;$}V11+APOwFi{#N&F#K(ZX9Y%#~l(?m&z1)ne5 zNh1dEaXKtE*+@#&5{SiokhqYnTX5up*={<3%{XrW0f0QK>6E|Nia!CB3D zlM>%6;^I^Ji}4=1D21VN0I27v!EH=31pK*LKYE+N!?!Wwrj)P)%t=i905y=Qe#aF{ zQsgn=&&mL7bKX}LSqUAn0tX(+@2eY%!j$p;g&#jV3394FLnwgc5eLqc=TfZ1vVqr&7ydi14%2cRs-s4*Y8|0Y(AXkv3gB}5{j zxm}ml&APUi;oiIkmbiVw;_g19_MoEt3f_In{zS%U_wkm{|Hsx>Mn%=W{VGUHH;6Py z3)0=)-Q6i2l2XzQ(%mIPNej~5Fr;)#*AR0y|9IYaopTnSW-Z{uUbCI z;IiR6f5ad_Ur!=(R4e>gE7#2|3hOT)yJA=-*fkn8Gv*Hn@BNnZVCFbEU*zF^I;Uk- zNwV&EDEo%k81474nv#3QF~3YP+g5wGT0vjZV+rj%O*+rrnybfy$7Lr5zKvl3{Ce3z zeZ-e9g7@>ayQhC#`CoL#i-)gI#Lvull6Xc(U^W6t(eKCDm8YVpoB*w1{1GoOGB8C9nVmiU*;+3ce>|W&{?EgAx`%)bN zs~FP|T8O)~cg8mr65i%?DMOFd5e6n}PPizR(CId9e@~>OYBBF%rPS^8l{UWcNzdp)fp1Jn{Q{(_w5P z3?32Qqu6XeTpSG*H9d4uwjPPkqRTxhMbsv8qhPb{O!vYqm33dU>XQ74C#C8l)TOx& zYCz3wd@U*6lA)RA8lXO(zRkENw|63JT+7(hzNMCX(ewv|Urpj*pqS>X=f8R@bUsr| zGgy0{Dss@0gU{aavf*?U-ZU#ln4`j%H>|9uQCYyj#$YP-be}TtK9l4QvM@3n zhvKBruBdztIen54jUK z@C}kX#M~aWA_6F!R|`6UNu>MYP4LMXDsksa>dtfcs!ZE~N?& zQjKbq7Sv(gq;?7%-F}!m7KF~SNXpp*owKH?a zdwL9`>ygy<{B4z1b5g)usgulkUM!e8T)Xbr3bS!Ce8f;){yd)MAEEdUl4=cnfjqOzWjTrq(+)XR7zQu-}!b)Tb)9r4KWjo9jo%n{<)F9rO_Fny4iC^KcfYPMTg~t_8FOL z35f`Ctl^I*{f?sUw@sVlTv~hV>`kIxqXIzH5NSe;hJUfdLZ~@I@S%7Uq0|BgP#TUM z_p)|>5>Q(CGN+-deKAuEy>@ZS>#dn@q?XY0iC-?h!<16NT|lT(?{op;Ic=Y-d{#AZ zri$fC@NlQ0r7|l|-RPE;&l6A``RwB^9;v5XsotsG*&v*h8EuEJ7saR~pgAw<59+fy zs>@hFg^8kI$o7lf@RT%&7?8*8qpo=R7B71jbIZUb2 z^#SwcA-|#PQ@1}*zOa@A>Aa_A8RC3iJGJ2QeOLycqX&3-1dFX# zczlkxHcvdH7HB_ChdU6Y9$@&hrCr&WLjZlEA{+e9TB5GURdmlLA zMa{&05@N(glJ?Pq0%t{#>(5jKca#nuFn*!m2z0F1>Mji8m-mX><#HYjh6Zj*nmuhk zA~+s&|11*pJ`NqrL;xa>d7KJ5+B_(pw?cK^hQ$$MUd$_fgQH5v82rjaYYtvF zEEx}MO~@+QS;emu-v$*i6_m4Ue%B>BO&1>>0yiZC@EF=ZG3S#Vr;sDK{{6DgLiN?N$D?U)no-Yx0^J%`z^H)xiV zln_72QW(4rq8(nzvXXlSfW}?F9ksta1z3%_c+&aJ;7vI**YerR5VCgocl_w!Cs|ZF zqAQNdcRT}vzW(~@@T{C{l1VnrbqS)j*E;MXJJGlevHQGON-T(HR6NhyJ8Af;hdl2r z^u=>Y$1%>=SX9>250__iWoI$W9;1fKhs7p%eVDug&)=e4DzjC;q_%3eI&LfYc0b`G z@YsP>&^_Nz_WJKAi9y=TUQ!jx#vjk8a zRn!CaW~3uD{~35!jj`^<;qRZ4 zU4zM#)+spif)D}jDWjoPn7e^mfJMws+SvQG)-6S)=Ka_XwX>SfI{*^qhjz676h11t zZ!5@-q8_WLM=FFXVvLT|4InlRXAWs+W~Ce9bCX_od4%Uo0mW2@lX#nbb+iRYU| zv-;YIkN9%@a+{=#$apA}I}aWS8*o^sbmuq`&=1Z~CqFrV1X!@PYyY%~vTOidsv;iV zh|G_hAoNkrjbU_esC|0`=t}0PfpO`(o4{mS4z!PKb;r`abM0w>M%T!Cl$=7bB6>t((&m z;_>siCx|HkQfj+Ty~`)~d~SaPT%qUo{>XUzZnE@moc#RrjgyrtHJM|AtAoQ~6^a^Y zYV;#{zgJgZy<+t69fs3|!_LF9hu{t*UMxA!Blk?Dqb+huv3tL~V7*KBF~1j@Tt;eh z!vE}b-w5lN!8Bpf4a=BnblVpE$Hyc2^@~1e?=w>(Kk$0kq&Rs!Me5+9CJ8(81kGVG zbs_Em7Hx1I*|7$YM(Zj(tWkzNyY6>8$H3};*Wt=<^m~`RT?55ID08&9 zK>#d&WqO-H)rmLn#uSDMNk(!LB}ia`jYt^O6b^)>$Wkl~)ZuPB*=hn#=Y@x-3ilY_ zgv|ww0Gw9)GSts+r|ZgoU2TN(NwmtS-j1h8eaO=3piu?bJx%X(dU1pZOb^z>bdLF5 zp8$bKo+=ld$`e}RGJn?NkH|GiZWA%Jeh!=(7eTbhuWl2p!65C|Gqe`|}MokAe4cs$^>) zy;S~reU6BqfSksh$+W#PZf?XV^fGs%E6%6kLx5HUz+#-1qJ5uFZO;tC>z=pTBzR(p zqMZXf>h{-ah8X-?9`v93CN?%5bB0J~ldvbzBfZ{zw7Ekhttrbgt(zC`576*{!GObH zoj*XbU_AToYp)u$EtxKg*6+uPiDx_VY#`^T@0L0%=hRA<$uMd+bHyL;m z=FXtBMbTOW#bL});-w0}S<-#j=Nf~5ppugcc{PU)DxS`_1LVijE4Vk7F=HnBuHUa# zImgPr)zt;tia1D&K5Nk%al-I*J?s{DEvEf4T#UmT`wE=%=s&Pw z9phu3t9`O`*KAN1QBCEm@5hP}{U=_9%rbxlB+e12tKxrfB7pOEd> zbeGRizm*W5o^vneAQrFywpKk$TT%0?Z3@Zxw)Gk%5mb)_@x)W8;{EO))-ntmgQf&b zrtU<)9xq2zeZUe95ljSL%vm?C3eowqTt9h%9QL_&Z*|rL*?*?n5$5FS7uA1MzyHQS zsowCD2?u-3lcggxO-XC}VB)=;J>_JmzjR^m6=s5dQ1XavrvK**g2V;?a`8BDSX6E^ zRu5B;-`TDp-taN-ZuWI$pVtfnzBSCl}lx^xZ z*>#9T5gc?Fw0m5T&6Hi$`tjR}@o;wl^cIY{{%GlOw!EH0a#OBk(fqWu09y0EcY}66 zUeYPoTxB>uTMY391VAFb3LAC-urd8L7a9Isk)}HFG??o2Zjy(~*9d--o}_Tb91<#gGt?Yn{e&S|cgO}D%LD2{tA?Lo+B)wf z(!2TZ2oJ|u_Cb8k!3U+bInm2}2Svx!FasP>@sIM)xeVuQ5nDH>crs|$&AW!TF1ru8 zs`PWqs+_f@7CA?ukf@j7e1BRO|1xT3w-;`YNVKm;H0J<$&TY}!XmlnGKQ@M-Y>~Yh z*Tzk8h2cWfVgQ{Ae(x;0f?K6C3Q7w=<0(22 z!#{}9lsqAF_fuWI{nTF|?VSSzgaYw`F6I_0br-65r5jcWk z%(w^@Atxv_s6Td0Nz;eE{K1dT)p*h_@WDP8;2S`!p9QAbCzGjQvshkEK*R-wj zW5z$LsdSn~bIBFseT@>lntGv$UQ0ZC;)GUYLOCOJ1=nyyvjs3x%EHx7uep4c7mv0>sZx>-85}WZ}pd zai?qqu~zyTTDcKsca%}I*Iljl?~PhH#?}+EWfYCLdyC8f+CLS;I(n&CkE26u(3Jo;_wQA2^I@qEeDQ{($-8 zbD_&DJ8?W3m6Z^T$+*iJ?|KLKo`9r)8Y&Vc$eBPLQVvvtG(!KnX>!5Lgp=kNoGyM- z*qdDg*{H2<&i1p^j#z}%4bycPz4Zjtf3rOuA-1}a^WW0u63SQ zx)m5;+Up?vx4eF5-C*W{61~>C;T!iOUEAxQrb)FwAaM8DUn3%(Zn;e;F;e?uZbrfq2Pqv)^+9LYJ$r4pcT}gW*9kx3-MSzHP@!90} zOin7vP5OlS5$Wwu`&G*K2_#od%@zn3)JQbA6rShilnJco_$=serH=n2l^*4oN z+JOebL8qtg#%>ShP>tFPe)8v`iOv3W@{kWxMV(O^PI6T3IWM_Szi(~05%rj)L$zdm zSDuD3$>08QO62Vz7NIWbI{u>a|7ThJwF&gm(*AX z4VJ)$r~}s?6_p^GI!~>pGj^nLgrh6@2iO&LsyVHufzAVLG>La&BuReK^Z%A5LG~Xb z2&O2aYf#f>JwizDLnm(p-_pQTx8LZ}+JWjUOW3xkW)sEbXh9aZbMskVrcDPe6Jzs2 zyR2Xwg?W06A|#s(>iJDtxbtapviJq33w-7_)qBt|)ek2L&>_c9h1IY7%)W=pwztil zDNo0DTlFJx+!Ss0m%GQh<(2FrwP@bS2#WF{F}k87a}Au`OWTz5pK$)=q-at}7d&sY z*C|OD&y4z2#pt;8mcuDYHi;(}pLB99~RW+GA*b?-x1!`(hRgE&j70aLwg-QoRrC zeMLe0wJc7mo)TV(3;Z48Dt*^BH2X>~S-PqD$KYJ1;C|lmpi=-}+gP|@GRa*(m!(uz zfWOD&`jde_&vBPM{Sk5De!=D`=0NxD}}yJ@x(a{=NWy(R@=}XzIC2uXdjEA`~kF(&3d%aH=veq0Bd4dxSmEz>GmKvPv|>xz9;5@6@bZv~_!!#0XQ6hMl^{0R zfVyfiibeZ7tq-J3HpW~aD9E#PWoe8nHTydzIA#OQDEEC7DX z4;&LcwhpK;k5^s2gY5%nYy@Bbkli7@j-_<$KIdEQ=M&@E@d z&JgukkD@Dq77W0JS}}j=m}#n@$d&xYU0PNbl`R z`5v%6%LmziNxWAXFs4BVL%(S(xI6=PBjG9Ldj#>FtUI>WtU8Ikq}`XGRYt_;Nq#q` zTLB5)t#_wSJ1q4~zy&RrQPs8F$3N^Z<%}|_f#Ys~oM*bD(3l;o5D@!-Wfi%II6dWM zi$wyqSfX+-xcr7^VDw&@2+u%%HN@BRhj1s55!>xDFM)v!dk<1hxOgRc-V?l>A^I_P z1STfn{9wT{Kv`10zgYbnSC)9Yj6>PHfT`IpE^zvl5i4u8Q=mEwi2Vc`o#e7n@BTBzgn%sDLC7_+(ZiDc zNvV)QC)QQgv05@$*(-PKaMUGzKPg-XG`za$7wzc~AqBUufS@7Z$HtHnGtHb!^mFTv z*BVS;#f@FA)p8O~=^q)WwHptL14zD+U1oVds29r#Ay zO~qp;_bB-12elY2}i`Fq2oN}-i8UPj{k$dhfm2*O18mPg^E?=4dY8giPMzmYkvJfzdr#;)d;ff z*+56tiKikD^%sg!lmZ_Dx$p`>H_O6l@+HoR>X~@$m zdUhf~oW2Z>-P${mBXQa0ukt{AFuET*AjP!K=%VEF1ota^IhEj?6cwL%G$)P1!y9%3 zV#PkU^4hn?-S@#Ex^MZu6QA|e*B#yu;>E6vKmH3k9jZmr?6Dif+2mXpR*B%6+?HzP~q0iqlfT-SQ6usN-Uji_5pP#CM2~#=pak^An%*`t0_GyHX1WetkWN0_#3W`4?%I z9;(OpkJzYDq{O%ZulLKGRrD1*r+AUGl5-3SK-*~^X2Sd1ywcpsw*vx# zIWOLUnDS|-jO?SymNEWF3+Xi4`JsaV`g3$DM?vVL{f>&%kV=P$5PT_{esZ&6ITjZR z0Pa16`ww6Z<|Q>m2x9a*5LsLd!vi?*~-i((*s{9~WV__pg7Gy+=P%T6=6&g@D* zVPwqGPv+`9&uSRbGrX=it|EQ1ehzLGQdvcbW{gG#e|?ALbcg_lSs+>Pl5s7V0_?4Y zydv5*2F$4w6xbQs0QY9}VfxCiir<~B<X zF;jp(&2^8m5TfdE8;9Z1Di1Y*YvQRERi#C1JWdoUK(7|1?>u5K0yp#M1x;~+#c3@k z{Sz6KGq;A1J}Er?p$z$>MQuo-f-lCcQ2?+s){a`ASAsM}+1K@Mi;@UfM~5 z4R2eISH#c=7qb)+@_mT08Yv{DU<-l%0W_^1>LkC&i?A(2p&gs0VqT-hcMZKxf#P9l+`TBp+ zo9q)Wf88NKb+ov7JyyVrf`#7-d$8;L`4 zNoeW_jRxep8%0XV;Ua;L;Y;5i(ZE8fF;Tpuk<)2%vpe3>iY*}vkqzudBqH8eMsnf2 z<|sqKz(yuItcu`Zi@)prn=fI~MThguB)nt6Fw=o&EBZLYil1^BGT&^;>%Jg4=~1%-wOnl3rV`aUL93?~L6C3$a+h&5;(Igdpikf^ z;azs(x%zYX2#o=qx>VpHNVUMD{(B8^T!+dgF;$NF=t##bT#dRU^N6kwA?}4s>?yRN zJD8I$MZ_RK7uLf%cw1Mznx==eM`^>-!}(aeklgiT#1Qg_-opS)pcA5Ou420coTu?e z0xK2B!>aZb+0`BDQth|baqLlayY^I7B>+es6E9ecr|g|Oc)jJ!NsVZ{j^h|i^ORTx zq-y;6=XR#fs*Y+(8b=iDv2hCHB1Lj&&WEtvT{%&yj(X;<^~8>gK5^Pl<=NMYG4}jcve%l)5D(9U?%PLtm(&St(ujptn9; z<3FqhVjzV~9)~ToTmC$S<^?#7qn1;s|7mAO2%Nt|WK`sout?zyeajTTA!b`U&{Ffq zYAE&IfgB2Oq#dZhqExelFn`Ne{s=jXYz05 zTn`H^BfO^N&@;g@Wh*=P+0CQUK6)PCT;!}8NVQjgS^N!26wB6!7T{ZOb(}a{l6o%?32xeLw(~*b2G1Dn50VRh$t*<4ue?iwhLR zr5vmj+RnxJ=U|5T7!D`=6MG={u+cLvwUo?yuFPaPbQBy=z)A$;{X`S6p&NaA7VtQ5 z{LZ8_7wbUO<}~?^Cc>bx?r`UZ_Zjak?p+vK{c;n}<+vkwQ@)nLd1p$>>PzSS*lQ04 z$AE{R23{;peIJ_}OOxcZljWiFWoki9(`b+_)3M6Rq)2ZilX6=v%xE)kLuwe-$6F~2 zNNUzj`$2mP;^Z6mN~m%gr*6MxNIK+gjJH=XDa@&r&4n25e6{Sl5gOb) zA{q9DxszLu&?eekId8Z0XFru|d7DTp1}MRPpax7=$kN29w5%W zFOVPHw;WF5=ns9UudWSvN+fb!ZnjzwJkkLlep%Ms4&1|_CzOPzdyX~}Z zv(NjyTL75>-ui_1{R251TBvF zthbR;Jadm!!z!M?OMC(JJ1H8^|so{-?a%! z3UwyuINtb+!aS(Sy_T@9wP?sF_(}46%DM3E4cWz*RfscPpYVxq^rKMon?Zkyqtvtx zM!qXCQhEcVfQ+<4HrkQql*^J!pO@NcrlS<*w_I~4>g0Z;5fF>P0qVnkx75?*kqsDr zCtczOj%ujHqI=6bjxqQRWNXh4ahs=!*-uOBzKVf@?;)aR+5Xh;i#Q{m9Wg>vi+}OI z&%UjvRpqfI{F;r!HMirD(P(qxf<1zH-BnIHJD#YVJ6pKug$7~2_@$HkM?w*G<;P%F zSJ@@JZ5FBU<$@#T+j2p=cAqf~LDN|dfQ~X^q!=MN9;_54&k^iJKHx3%?9Olf3K`KOC|Fw6fCg64-_m~q8 z1@=mHo$;i-a)B>5=9O|^9RLYLRrYRX@|n3EK0@rDy|pzi(7!E1^q1A^`ml>vs!CFKs1Y^iuJ}{gudMU1 zk@w?tAF2Mv{0NID^%teg)egS4gVodbd6lF-d&~M*Xs}#x(qwCWKE#0z?1?W6>erX( zlLC)Y^qf>nnh0JlDz;Nu&cH8e?@ihYG9j=wc}#vVtr`WP94!h8756S2y_T%zNCEZf z_o|r_GqU()xF?ANv$zvr3-~`&K~O>f(bRoIvDEuZ zV(2ztNDfrhpt4obwnDvp+AtF~KW5cm4!w&D>iaeqORr$~*Y83vHuZ*@E}6?M&$n}? zd{s-3^&>@2YE~!f0kQy-livFC&GFJ6KW*E&d+ms2lVeN`5zzM;b=HeE>l8UvqavUj zEj`YAM!8nUT5yxTG#syuDVeGuHjJHd(_ofQzGGve^xR%apz@Rl!dkNYJ?#YQdJjOM zi?KB~?6vvjpP02ACoprLQtB(+nHo?de{R4#6r%&4Rteqwoth*;Ag}Hq`WN9Ed~Al5 zOMT9R<*rg*KH)3re}rnDrmvXB4ot}w>pOYk;xTfPZ>dV&W|I`l%6=qL{VmyitIeKX zBKFNaTg8d!Di!rnq^fZPCTyquV_pLMnQ8)RMddX+o%@Z6dv7By(tU3TB2Lq|sYwT4 z-tWfWKg^zZu2<0Ry`P=H>@O#UZMd}JI-6UZhaS|#G=rTE3 zs?s9DuaZ*&S~N))(Dc^N!ivYhE`A;(J(kNtC6wND?07-&%=@@R3`Bg>R;IQ_qQmBM z0+m20RSn;|77#cgoK|YtoN~TR;wY|T_!JmQBP`+Zg^n3Tq(DYaa8rh@f>9kILR0#_ zO?bH{=6I4}jBd<%l1B)ygFp8aN@!M3Z);R?kWRQeewW%}cU9GtaadJi#|NcAAkZ^B zkE|N6OXC3>kK(!s3JjeNpMDyHgqa~jA_ec;tjn`cY4G*s$WNblxeS`$C{YUG{>F#X zYNC8Q$Z0;pT;{i^V`XuQBauUQK0)QLxp_jR+o{HkfO2Jsz(tKEGl-uk5RB9%%{}l} zn@7lCGZ+|$TvVm{wti#)rq8<$oX>j&?!WLH8uv#v8a{yiPm zDyvGqEWQMycOYE5=z9M*SW@^fvouAm$D)-yHnzFg;#mtopYx^~)#2qwz^nv^3ap}K zM%XBYdGKXHcON)&3OnVB>I$PdlfKq15*c%JMO3*ys<;>Lo$u_{a+KGoJkbzVF5UHa zJ=>9dJZMwkvR=NPtGy{sQL=`uZFSI3wOhQ@`Fl^=H%&C`fAbf{){3z^58I?4P-5KbmW4^W+)_{^!UN`5-aumfomqo_}rB)6+e z4SPk4wnf~=8>$;=g!6vP_zK6g8$HJCyU9ms!M|K@64ZazVKxnnC^ zf;w?S7xp@NdMCbm1 zjUL$LiHy%}4!%J|4}Hn(I+z#!JPpe|p_Siy3DO09u>Z>fWvdsA9Jzde6}_}NwCUqq0M|w#K7ZY zC_Q2s$>%E4{*)m>)+LV0cPh&oaJe#La)4bS zmCj@GvGnxS3>JYOj1kEum_heVSp6MtJZT(~q!K5-DCBe~d80B}70xq;7hCH1WoRY0 zkLI&?2irn}{q(E`g%?=5a?{-xnMJUG?<>-;s z)E5YPF0@(R(iP2lOLHm{_BZ}!N;5oE-$|8Yb}lLH4I_JX8EVSpqlrR4$uiS9!W1Ya z-BQ&wTTxGLaR_sQtEIo8zIBtxThrx=Wk{3Y<+0iijLo{Y(PjoTIaGygg?5@9ByxkX z>++mGe$1mTS-$((>$BGDd;v%OE^+uLZgl>Uw#VwkgW_wwRO=K*Ot+Yri%Zm{t=iHSM0(02i-@ZLc`{?XO{&}FXX82j1+4^bjOgVy9`Z1PON%k@n zd0Wi12}wOnW?p&GEtGk~68xPLCZix`8YWl*XaM`QIhu6Z-LK_Scf-ngMmpjER5GH zYj4fY$mbUmJPDEC9Qegrro-x>G$R1r*J*vAbZeOtz$;Dm3+G$xG=iqlg+>0k9o8w? zxo(6Wl|Rfbbhy|!==0^y=~tv2#Q&*w`HC?(U{52xQ)K)9teJ0N*!MXYNGCm6qj|l+ zRE*ZIlHemXJV2j-!_x>jvH1S}!l2;28HK8C9Bg03<~}w4;^pkL z-!3Xd@#Uq-D`#Tz&TddOWkTG(&OBUJ_ny)W_tTiXZj>etk4QDpoyDUyg~!3AMfmCf z6Uip*Gv;@iNpgc^Ff8c5U#+)?W13Rbe#Rol?v&F}vvw$OkQyuOjp&}RY=Pu~qo9rn*v7UT7qQtIo zirGqD*7S}88rnAC^0@J<5*l5gc@9_}nIiqOnxKqv^$8@cw!sw>BM`+4PSc-bXj=*+ z@K~QN5+IS3Zp-yPJ=xD2q@c~cv|EfO2)j3Vv`~dQ!;aWbC<9b!8XM^gRj20WUQB1! zE(&f#mhHW7MsE_;p_Bz_ytIW1^G2g-`hu?kk~)+zT5_JG-zozbksw7($AdPJ?pEsb9!T zAm(u)Mjy5FP&p|3{DB5d^S*KT>4y(C5HtKeozvipXW9B@#DC|u1TL^4T4$tT|2xXq z7y9^f4?tRf6ilIX(E66jWrCfbfI-j)?2%^R?{~U_nKhX2DTDa76#&TNDm@)DE(sJ) zvg_tsgs63_QF!~4ott2}zuVu!isOM;-^XAN!qbl}dIZAF@A0-ECAvzk z_tn0+Rx7M~d(w9NhF?@x#v$XP=!UC^AGo9QHujM2(HR10gPPz`bocgKvy<`6W@* z21pETC^q;U=^}KSFUswR|F3PE4sBSsS2usol|LuJW;L1o3CI1D<_C_~qJn?ApkIu~ z*R}xC-?5!F^5z~;)VdBVZ@Cy{G3ymx)}Kfi{iyGWwPaa)8ifF#*{-GL{kf3Jn)K+S z(!`zPc!nCDDCcG=af6NnOiB=edkpYAQ=E(H{P(-|u51`n`u#2+GA;s6l5(?F@A_CQ z2Uw7cGV42JHkC|~5Cy4rEu&h~Jti7U5PbKr-?hSB@8w85~?{u>TW}XvMS_iAe|A)W?)@Biw>{m6g7V~+ALd70gF`vA2=08i) zxDEXJ=0>Jhv0^S&pfNwU*8&{CS3|khnEvd>`X!$jh!2mSAq(~LhMP44_uJ8i#wn+A zdjRL_p~9EQB(`R_QI8`~;N097jw97u! zAui={B4C}`EJSg#CVPih6ZS+6nBad*hme?c`8PS<$wJyP~I^1M`sFced+|h#q@)j zz$HRpaa@sULkUaqXV9qln5bev_^%m_+ecOh(&8M(;|=6iX(hwI1`A@v4&Vm5HpOqG z`le@U_wlFN`+KP%AECyaEmLE0ekPud;OW<2oAq5H)S~p5TL;q(wI~#noqD{;t}{*f zFVzH;`4Y0+7vsGK`lW^NT;X&-4gvHSk%>8Y7XP&Mplnr$=JdBBUVj|p#|1bUCBf@i zQCTSY{bh}*ITO)boP7Pb^;E?Ly!u$8kFSkovx6(vfedMlLeOFQ;bdQ{X1GJ-&jZVe z8f8P{++*dUor0Nub_eVV=86&rOC_PR(9l1Yo~Jdak$M4;nPcSv7_A9Rq-VsO4vn9| zT^A|GAzzarS@-l2LeUM%jY}IRJzYPiR-A_K7i!v7{a1q^%m97ri1bvV`cw$vrT4Uz zN-pNke5ew9dgogt+n@>&d{Y(;@!~A?DOoA68GE2Gc#%-W z*r&2@X=p#im@&Y#4sy=XcGN6Xp)Ro*;J~jL96WUfR{~uqv#$^Hni$$tp(D-S?UR(m zdO(wmQbRkUEe1KMv8sN3JdkX76KRyV8x&@0B=} zb6VO9FG}~$XKM*?r!xKq*Uf*A$_bs%8m_o8DDy+2OVYTl@?a{GH^>t}MKL+#e+?u~ zTKD(?V~QTF^8*DX0Iw5^6DT{rk!zR5UpblLgVnUY@%r& zb!~&z0&orrhkHkC3m9ZiJoqbrlM7w4t*%wK;xND`EisNcm=q{1!LHrukCoivn)U0y zl&L}$ogG)mST5yG%#GZL3Io{2k~@ncKzTXKr0WlIh;9tD3NDlmUV5L`xEnDuv-2b z2c;i0)6oUl<=1@m_*Hzw63zbQZ&IWrE4kS@Z|1Z+d@Qq6%EJZabo>{`nKED7R~xhM zM)SdfAyJ(Z4Mu_dZD@R>w3)IEqb;+6-qi58Y%>q{JN;yt>I-H5Nzu4}(kEoM8cIlZ z6k8R)Yo#!_J(rJV0`|Wv5iIwPNNDPvS;T{s7aC?0q`>OA_Az)u?#1xF=iSz+@NN6F zL?2YP_6Gr0m`ylw3B}e39??sYEI`DC{>ufm_(=Vr-}X1!p+!$u-v4!vTOQt<3s1|& zk8<+=5Rd-_+y;`rh^`8PK%845yfJKE<=hw@@&CcuQRF}$>-O6SE5*#iuhg|`e6%bK zjy`*Q^|>T|ZZ{+0Li`UqV(%0GWth}^z724lJ`j2ZFhd}-SD0B;Gf%ILR4B;YpatUuc_EF!}^lz zkY>00pTeYeJl&qQiE@q$6^tKTug+dzSKDdeH=>rafdgq+!;*`K8USDXs(SZDZobzL1D6ob&Tl zy-y8@)WOe)R7qO00>xR(n2bowT$Fd^;N}~WspK*m(Vrt>T?coh{E*Jk?}@t~gdPbv zpr$IS=VlZzKq-!X!rlQPLVG3783#2oqat4I9gqU=Jd@p2+!jD6LPR|F3sBW* zZR5U3W`49{#Ng_(5m5*O@R|OGvJ4fU?Q_if{rE43*6%Y4XraT&bFAv-=5Y=1sq-;%*^b$?RXA^W#f5Pm=;w9D_r=sVCxJuBE z>1$6WeJu}8cSk)g_t)6ND^^B*UFlH5N(oY`BUM+3oN`s;De$m!Q%rI%4J9>s|CU_0 zOx(8{hml`b18+KF*hd3WxJ%0y->z||{G@mqQ<3*Ge&KM!hC$Y2w(UCO%YC85lKL|~ z^A{+!kfJ-_ZWO%4WI7e4{hES71{LAd+;n3~jgdj9tVuuUh#^Y=C#F~;2}5RI<~B(NdGvCl1uH23`dC|X$>iz$%}-3Bqi)5R4hpyrfz= z9{bC-z%jqYXh|x@#A@fBaibcA#PEX(*WdQi-kRm_S5X$QLB3@1(HRLfI@cb7BC^_| zzd(E@0tP;58Q4+R?KU@dc`V|@k`>G}9s1y__3&3Ilt((B!Or+z(5ZpN8KfA}gGJ0a zSf#3jx7RVdPVqMklFEE`0-mATrwT)Y>NEzR(uV(PBI80_6(imT6#S2Js(FZl<~0K& z1c`p8K>O;a)%YIV67!kJnlLfJ^N1H_l;o5a!E-Gs;B|Is3ABVv7(hQs!wL(gvyCNx zxMSbh!uqWVNfZb^Kx2EQWL7jGC8~+3FfkodyQy+q@Z5Q9MO9V)9aIc!Hh^8@btfj z)?+N+^o-fwT5T-H>MU_vvrmNqRuVR;#{k(Dr}#@F!Hb0AS%_lkLY0VRV;pQSRNFDQ ziDr(2zLbjcD>}Y)Rw_hW&is$St0}$KT5^QQ+}JyR9iV#oaMzxw4886%#~+r_>(agA zz(mFbnl<)X(}R9t1ho)MUolMSXhG1dp>oGb8_zGK6r&NN$wmTR;1ptg6%t|oztHUn z*)J!~v=&x_f28dHiT6M-e*pAhURADW+qf^9Va4u?M!>FI<&v04>&^rk2WqaCoD6d{4uy}>uPh$L3;!Wn8^7cX}=T_?(}xn z6zGcQgI=~i?QnVbGqR^L!#Q#_>Gy?;Eojjt#-ef##0`W|mAfN^x_(jdoN7zGZIwI8 zfT@xOw~#^nd_V@csjgPmH?#LL2m1IHpE*$G-8O!R{*OHVOZ1^(`Tauqk!N+w;f7^5 zM?jaxRG0dmT+-CAjY_XZ;Rw{4p0aSeRC7c2?XLy?PI-pX(Cl6}MtNvvyF5XD#bfsr zW=#Ow?5F;4eL?q28N0q#cSV=ha?xm)3Vx`Zmyq~o6!mjLFVIiycxefss5120>Fv&Z z#i4J&u*<$^4AnZ`qv=7SE&Dz2f9QJ4pf=aGZL~;{Qrz9$-Q8VVindtMKyi1cKyWA& zcZ$1vad)Rcf@^T!>3W{`d)Mr5?|;nPGnvfX;kXXF&XZJzUPdUn=2K^oOEgsc2Bt@0 zHcUbSR5%cn=^y(UY3fany2L!aQVoB6!R2IPmJmp~{4GT9 zuR6I^1G*Y_zuC>mr18$+6P(RRw@R*JTWAnE)+nEB-AZOo;%}f7c9}+qLT?xp{UsX4 zsqnjL0;n6A>Q}7*Xd4k=^=j*FRUl{B{5pu1DzNNl&?qp=8$V)QGqy9%+AeVo{6_FW z_AifH){P!9OE_%mPTAZzgDTaOx2}+tzm$k$IA)M%Z-YKj--F7u~YFGwk%`Z zC)&cC@%bC(6^OV_)2*|J975GKIn)|KqLlto1O+tK*&JsZbaPmk^@JJXfH$oBvXg)*K;*-MI+%fL_xv39Cv^X`=a#i*lZ#z5@T({mzuTI; zRvUI7LVu#UjFG{P_8mL9j_&liNdmw( z%b6o_8wHCHhekt=1S2EQK~?Gu_S;xb!_I|g?I3gjcL;QYLQc4?jpeNYG9{X?AiA9@ zmd*gn=2t8g{9tMcXe$u`pMeLigcu5OuY_7G`pxNhtsnK$=U%nOijXO~GisX{edk>l z1%Hda-xU_*F7DUIgld0i{xJXUBgNMiR2RIB7A*F-=C!sS!gK1n>N`J*>AhinfQ;qp zcwE4OOvb>DS&Soe?u}{Pd+0)<;WKt=Hls>pn3WvXfu;@pFg54>ik%hP3Gq$G!Ympa zAcU`(jIp&Z)aUJ1dHR{qAp`vz@t*_5be4XfBZUjg#Xdx2GF~WSy((^-(N2(fl{^g~ zi<tw6iyeX68*2jjl3ybavFrS9u<;}EZybm% zgOsHEqwS6kTBYt;6kE!~)s>u;mpeD@KyRBur#m_?v-t3T91}n0obUStj=0nE)D=}k zgoyT;ax48!tzvtr*AxGDqE5F566nleFhAk+{{zszeNoHJg%Y*CUFUNyJh_5k-T=km zIvM7I7}hN4oHtLWm5}UTWXCO_1iMjXy4~OfFW_c{GW`zIPivU znnLD-njXjk5Aa92e~hA8d<}G4(Q9!d>_8 z6Ro`5nP7lt>dd#`#u#FQ!gurBAM(DtR-oA&d#v$UJLxP7t;V)$q8F(wlW!ED)n&vaUGRyS7GTU9mrH2~f z=!=t`pRrNUAfp-(_PeJF_*wt z#>Fwm)X~9lM5ZH1LZp?0W9s>H!|kuBvX)>)(Hz9^-P=$?Q->;uSmyWJ<-DNoYBadM zm0nqgV#Q?>P3@ln#cHeyT^K?NBJn>fIj<$j59qeG zq}g;u;*60Hb)9Gql-lFlhxZC?uVPqlm}%yE2j_b}MY=F}d6YgTz5&l`IO7!7(l^*X z=W09`6ZiEEh`E)B9R|8@uEPjb`FI}&qq~LZGQMdL*6MUG6yJ_CHy6|(l&LoyHpXqm zvL8Y-Oi^Xi(X$Ua3pVR@Z=eE<+4k1jnRsrzZDF}wdhgO*$>A%G8KB!#lkfNG=8DCk z2(Yx@V2^YZX`CIoG_gBPw*pTXEqy&)d$d!Mm2lWtDPsm(j6|_YVIYJ%0OzQ~=b|U& zA%KfIM>*4;4c+e>^$B0FA(ajd?UU6T@58}ue*t)n`;gqZ{LuFO}ar0;8KuaADn=ZM-c z4=Tbs)5zUzSA)91ZOo2xrxrEfh+yw(gUA(@%~wj0_9Yyiv}OmMl_7lWG_~Mh?vmFa z>0vH@*QXe^&l_mgEcyj0K6}D8ch!9l2*0fEBQDv2ft?Xg_8X8*T$o9!%c#6bmCK@$lagJE4r}ZJCAj8 zSj9FPMoMA2+2(dRPlDkitrUeeVb~$Iq1biC6zaE3^hiJ_tI$FjFGaVT!1y^i9)5?9 zG1l#}8N!1iDr_x)hLHFJy$q8G{zJrN5NQzJ49ZqFSDpZLXPGgdXI^de$gdF0JPYj& z9@gDRF}s{lOID$3%oe7VmI$F-)Hlm1vPt;hRLOUXU~f^zXD^6IOG?vDX6Nn3;X*Ka z(XU~&WN{;7-i~4QoXQ;&#!L*yip&rO{dUG5^>xE;aM1MeIq62SX z;(Ow;##(=BX1hQjTQd2>>^-|p4x1(2eSwd)5_w-E^^^)p^V^h|(g&6eKj(01w~{>6 zPxM!vYZwonS_UzAiKh}v8@T#1!;NxocFy>G1J(3KXGu0+a&uhvHETt;-`e?*fU}2m zIkUME6xgRHp1#P1uV~T*4={4o5|n_Xbr#sDW0vVjYc07{+xe(DeD1bvc5b^iF$I14 zF-cAI;Aq++EKui4$2Ki9;y!HV6V-ogFTy%}}%5AxpN+IH8$A2D=h_T}#0&!wj#YsAeMERF$g8rlP`> ziVtSkQ44e7$F4F}OW%L?=vTyHLYZ;*Bfg)D-7VG>8u<$Os8%Up73K^r9p&9EvMqypV$wS$BJ%$9_z5dp{!j2h++Yj_GBD zD)^zh+%e*I8AF8Gb}huz%IM7`_I_KZ^7+BF6MQ*!n(>7@``l?rHOybETUB!GSP?5) z4SZ7zQ!Mcbzha^&%rsmo@>kI}3F0!~3J}f7F5%7GD!AI&hOc_!CPw5VfvaB2F*$uQfH+lE==jw{yrdJ?q;x- zq?_WcN#u=!gl~CJgy^|x&izWo8?(VRx*Wf3fdTMR6a%_4Ak8dZNtJAiuHmsF6lH!? z;V5zVQVgq$-k_D(NhmWOVgZXJBiseBw`-pHbtS46N7x#6I4 zLWvPG9egjSpNCuJd(yvUN5a==8mfPhzE}cX<)>cu@z;TQ%ZsKrCiI*R;dp?xuJxB9 zw$Bd>@d+7$E%U}IsQ65kr66|%D--lDm~)ZM+2EB``5!3!*ORM|0G|C&gb^<)!zmf7 zDV<))D3n0V?MQ^EWgG=WnbE@gIMf6uNO%L_b}hd^mCA0E!K6QYsr=zQeBRglq;SDl zM*GR@jzV~Ej2rhM!ouK2vLGD7R-4Zrjp0vI#oj_i&J9~4CgsMX?TEc%*rWz%hn^bu zyBWCYJ}If(GgS=tNk^{@sYXS7hQ1B87t`XvOy4g$cNVkX!-icU3dQ&sEH^GQ}AcS8$VaGkH(c zYmS}jmak$?Gx0F4=H#WX|K1*g%2BGU|I{O;J3UdZ9gp{wikRcYYKCQFgJwg)IMu1}hA`YuN&VL{qz>(ui`hY%0KPUU}zj26#)A%5VFztouUqvFuh(cba>wKGF}$!s706r zvGC-%VrE?28g@YK;;1Qp@PD|>;%7uy&W+;xk~#J0M0%yX`X%s!I=1d2#wn+kS51&{*E@Httw)Kdnz$#KUZ+df1&7IbK3QpJ2%Q z)h(sqT2-4=%T7pgSemJ?IJon-IAOulY7FuGQrU;FG=e!4SS`Y<4S89!5WGPr4PTnA zDU{gfBFU$Ur8ieqJN)^_9}`!y>@)1@Vt!L!NW4t4S=`b$lSBvJx0V1yJH;UcEKIuV zw2_R4Q~}39hr}swm$L`OF+@{^a)dNyV185Tvw>4Nr2h+v21vwrQ-sTeTYseKBHg>( zBu-#HV2SB`7jdGRdUN6z;ZP*_Vd#ZmAI^ERaMLNyh+r&nb>qvWz$JrLwdIsM!V4Q_ z?}V$y!k2YD4w1{K;FU}azM)yRd@+tLZb~oD8lsTBgf!7a96#LqVjy!YM9H^IjLVcu zD)-QCu8I&F3Ov`Cwlp!cDhmP|*6jCx9D9Yx3-6&@4%P6_o~h&P+!NN~Sre_5JW)H7zX^WDr>fdOObrE_Q z6Fa<4bLK{*8hyq;qITeE^t2vz#*2dZTkCGS=ZUo1MB$Nof(A#Y(=Lww#I7_t%|ZVfX6EGtK)PCnmiQ{qFn>y-KxCAAH(fNw0^&;lIz zOq$Aq#$P&6~%q%5V8T+_(IxD>Wh8q+8<1 zn1>Wf-~DOKpl5yTKg94KH0(x)-S^HVKaE%Z7c>0lNsbhfLa;HVm|Sxh-a!b$kzQ7w zNr+1vNai6ID9&~g0J6+t*;`?*)Btu|SCrlNW0&#Srszu`=8yYcW*9}y6~&ydhSOcM zZd#J_w)%LgbIn;LoUXZdYLAPj7U#(By@N?|k0&)6jxRDOs7*;>-EgN0IHWcoRC{MH z#rO0F(dIW}F+)fE={cr+3GL={rOjBjLntMbOrkTZc+=GG)@6TS-n^pAx}Whg9qD{5 zGdjp=&81cLDwz85{(yFQ8BuO#HNCL_&6gC-vGX| zX>uw|QN1YB!CEz+p`7W^Gzug-|5~5+DYn)Vq@2zjo~R9?H}u-8{Rv^^btcgJO>ylk zb0No`d;Huk;O=3{}-LhS^<-FWKo50Zm&%=eD1< z^T{96i1h0jFttxd+X-+bkBC_LtO?;uCx2QaRj6q=pXldvT?;x#Z^@!2J*aARvXqJC zf|VAxvMB=5DEve4qS1u(?he#{F}9smO0T>Qp?#=_IVu>0qYISapSdGtHwM3T7;pyw zGeng$_7nj`D|X2>f1n^JK}2nE>xsu(>vu*0lQ4(^F{!J%%YM}yzdrgffB5X;xv;d_ zi0gG`;wS5IJMEPUCQp_&pRC=-)tV3*r(cOdQ!w@iC)-?Yc+b~cib%&4pb?ETXPe{K zlq2u_vd}xu!so`HtRbzW?%j)jm4}EJY}-S$Cxq2SEt=N%%Vu#)%N@IZuIAU)BWbp;$!q2N{?H+-$FU4nVrQ6#?QI!Lzl6tv3oL#S1ObJ9O6h6Z3@?m8qcVow+O90Wc))RqH1_tHn@ zz=v+H=tl9w0 z?m%%eWk7-Kt;N`3|CtnIkp2$xwG+u!$I^LNPyMQ0n8SDmp4ht12h~J+aY=o|UQ#DT zlgt!ND0a}&1MS<&J&C(ydut47p+%z~dS}xaFdZ550P9j;$ZSn4|L2DlVhqoHrSJ}>y*8OrQP7s^qjfFbGVen7&VWr!c? z9`zhz+aia)^O4V zv8N}VZNAhe_My*8eoI&sXw*T&C6NkEO3 zlho`hkxL&U1&))~7xex=EuNGQct}lg03vucxxXU(A4L5kf%&IJ?f+MbOTC}agNB=d z2(MEV%-GCCa-J>{50v^;5x#OtA{k9SMM=Q z?@%G^uLtVh!arlg$i{reZRZmAC7RyuWft4Oh?h4Hlp$SdiYcv5H<|JQ;R17 z<4Aq?hcaAf(JjrgGOW#71~+c_Y>zbsk)g9I#IEA>6poMBza-HZ(u z3aC?dV4$%GMoHI4c=1Mb5y|~Vk9fCu*ww#pvzJ7{E&$u80(xKdep&C#&#jDrZj~V_%v}+Op2HVokpQt!`0;&A zczL(_XZsHX_9>yrs!8$BtohPXUuXGO>i?_j>+RH5O5#?(lYZuoze8khfM7p>&&G#R!7g~`eGRoiV~MKxYYS!3nqL>7lMu)gH(SKB|Kf-FFv)v~IV z6+P_KH77n7=qeB^Lyb+*eDsZJpGGs$Q6g+L3Jt_(J8PeB+gl07Ih^`0es=l5OkBKv zZKf(c&-bQY^_X<4EoTpGh&eGZqB*GG9SlARk@#r4uZ(EZqw%5$yO;yfUCDIN{{nE! z=jM}D9~FaQX;-(BGniPQOd`-Q-U|Y1+#{uG4Pn>SmQIk>08o;V>n$HJXGZk|; zuW+0m!uO-Y{t+Tgk7k#<(^boV-&%U<5_0k(m9FHKUPCHJSDpI(NrvSO?Wy8#B z?WNDrJrDiDenBu?wPfkR9gC#soTfWd>T|_*oRF3_|F7d>foC`~cvzV0gV>m3?R2bx2gxf6a zSdU_LrgbXgwFP8tw=3i6c9l*hlYW?-dS#Zdx_i zy&}_?wYy{sZk-C@7;bXVH|ZAlUFTos=1@?;T-A^ieNoK+vfTf;q3=atXL9g~7?`=O z6zGu=q3_>05|ijInGT1NRBDL*V(n?%fyv@dqvpl-voo@FM&p1Ll@1XMqCz-at>Ngl7i&Y{LM^=wkEXRvKxN z;veuL%@3!c7ej7fejRmP(KhU2XKSE~>=4?_virrWB-|4%zqt0Z!)6M*-J!;?7ikR_7v=lhkZWOOSOP4e8@ zz>#$HH?;pa1C&__KiR;H%zoTmW3zBXpF=oQ@TyZlb1*tS=|UF8l@gM$ zZiZoOQbDSQWW^3UBZsy}d*0Ew!EB}XHpF~OrrHETio^w>P_J~>ktDr+!LUnAwQTOz zlancEM)-Bnzcklk)Gb2l%manJRe?7|^@AG?nltaipJ1E&*{mWmzhULMDBTXF^`=2G zo1KQmuUK?sQz^)mfr`;1kctqAljfZ{=m{mqqG1%VPZ*g*6Xz;*;wb&8f1JS8Z+WS* ze(_enS|bHm6Zi1hDv^#%SmIeQm9Bz#_g(!xvKgKooS!%}w&&;9ZsiKig@ljT8m|{T zeIH=$s+6~&WdB$EejeDjXJI|*pE8dE?`?Kseuk4 z68`hd-Iqq5un)>iQ(dwgD>mnfMSc&Nj%a)0{En3tQWRL)V$qg-if`v?&|xW2F6>l{ zl4>WFm*|b&G>Id?Wb)ryAEHi{C^x4L#E|RL<7!%P~xS_juZ^9(WslSrbzM4VWhku12%BC(* zc1W#!+KvI8%)dEbWhIb5O14+oLkq+M3>hJ2I7xfAVh<8qT*VR`R+H=8Xl#xWYtUNq z!1HClx3F}PARYnIhWktT%$?@=h+pIcqd@(?Au0thhYa}=42!X1Eh-uuNmE_}hxYjA zf@YTRY8auZPV1#O=7s_?hO`?b-XoEZ!A zn0oYRF&F2}aVfLNJN7a?O^BFv2YxEPoVG;2K$A+Cq9~4+U5T{xrV=@iabL{+4** zGoH81rL3giFIl3~Tv5hmcX28@%}DVR_s+b@X(&TxY`%w{Zm`2y!az*rH zMDtW`FyfA54@tb&s0;lttODJgn8u#t#;;lSsISC~@po_bOjEWfugK`ZkMBWl zXMe0--(ARXeT|o>m2YwYNw=Mr?RM;mCN9m($;|};A@9?+g@<1#2aFH@_%_l--?qsS zf;p4ujYj)?Ji%9k!`YHqn^RacY&CfW3%p(ccC*TX~#E)kV$&+ zL=j<{otvdQ;{OInQ0^L_4nIZ4FJ=+Z{x6{YPrQ^YAwhKYj#FTdNQdV<7D4YHo8Xvg zIC2|5P(X@YVMBKECE)41cL8VpUY5-Te`l`SA zP9CBC%O;3M__4bL0Z+fKT$LQL9-w3|T@RNFq7o13i=9&fv^={toN5^-)b?GNApg=z z6maXmaSJ`h7RtV%^S#me3!#|#Cj!a@xdNr)>FZt{qCFzx^1j2^N;7U*(#K*Fe%ag; z8n@(LCHjw{>{oOW$XZ}V#L9W8j2j1<0R`hlF4+ez*b<-O(HVGL4n0G4pR&A6J^G4G zc(fgSD^3-m%yI9?;(EsdRl*So6oAjfs&B|paRj}Mx3pRGAwsGXWmdJA6QfF`^RDk&IOZP4+XXKrXHjJ2NB#@=s+g0b{l9HF&iMIN_?yQ9y#DB`z*4UgHcl%2 zZa*Xm;0TueHtnD*b#VWzBYuqtlB0MGVX%A~TB2bJw%-eBWr}tE%I}zU_5_tDswQ>K z_8D~M^0wcyrk>Mj3m^xe*Q_V9ox{gU?P7B0Xd=O_5XaNiSDX5t>!;gJcdbw2{7|di zvxD4NFKgaSLENkl;k-@r?W3#plig&<55$3XvQ_ztL8w*YZ_ZNC$`O)@sm_?4ClC^@d>QiRKr7qW`t0iLwel-H z)@0rhFz(EK!Y0T(mn}ZC5Q3uxL*r1zHQz0)?<0PM+w8}ZSk9~PYCNH zc^(jrQ({7X-?ts}8yWXVUkPesu$*ft zi`Oxyg}z|_)(|Cej$m;_Y{fZM=OwqC$bh9&=i_pP#pv9sB1Xqd%%ChgtQC;W3>ZBdbUC4|>~{{wyLVhS-|js5LD`{2R#hCL5X;n_c+7rHYseVZ zi5mLzR{+Pm51W*oT6*7}H-H@P;=|ME$+ZAZ>kG8$S(2_ms6weGL>DW036C&Rd6|U< z7%v!*gcwGl)_-N|B2)nbN+MAhdjD7IUKaR<$k6-*D)n3Ba8c8h1QZ-8IY#o~h}NLx ziWQg(bHc%ol zEjd;M^V0uP0YV)x3oIWA?ED7?bFngDbXtWZO1XahmT5(xSl0$5w!t+@gj1d53D4c%MTe2(%c2aPmo6eL)RbA0+NU!4r>+s1L$oC&sL zMgWr`DCrmnq2IpnYLMi8(3tneLrE47fx~9gY8!Kt_W=xh_5R90bPoGI{Yecj{m@3ypN2B7zwOVWuO|s!E*oiQH-C z0^~f&v6=m%b@--)X>ETyS|uKqZPA#7jA+NROzu=X+3Vsp9fT1J$$;S&`8GZBi9EMZ zgtt{-th9?CjzJ&$is--Ne*886ZoZ2dPiVPN@`Ot}VT7v;h1P4ika+;xa5ML&nG-;S zlW-yZUH9PaTQN&vSYI;}WE$dpF&3ajST(o)9K-dqEV0L0_f(^*>Q#H=D3b8VC9jE@ zdg2=A`_w)RFw08*-xkFh8-xJIW#%zykPl`=HxH@u%+7C5p3fC|IL&3qGor>Lb;PS+ z{N!b;|EvylJ+vk0GHz@`&$tq4-~@G4Q30@n#K(MiY&yCmKI3B!#ot|k6nK$*K53FxH&N%S_W^(ISSs>>p<BI$O-19Q9B@Gn+Mr|m z$#s<$XSy+yzy^Zco8`icnE0g#G9?FUlKDU_J!wZuki1CSO&}luKVpEPLXRL%)ndlp z{oN-8=bELWA-My~5hvg=OET|bYs=FFaG+yKAl=r8`uP+|!B@=Mr&<`?UeB`pKeGT# ziwOi@Jb0CMP^Ed$sSYahiRU_LW|uqJOpQzo1~IOPzPz9~2nn^)3VLq>T~O;yIypMQ z@?1@Z8mqBTU|qNZ&BjmeTS`OLn0?OAkah{MJha6khs34oxA6>$sO@`Ya6+-E6@+>W`^lsWD-!0bu=4(sy@AZ%#Bzq%)h<5spwO zY`?GE%Bu;=0&+Ob-FQb({vNKpN1((Ciii~+I^*BBddxX*X{fG`f>U_I({dV<^qf2s zI2>v5Y<|%a=Rkwo!CM=rw%w|&?&y)W|{4#Z*ysb_O=&tE8m#lgZjM6HziIU!XGyIVQ*}hpNA)g0Bv- z5EiErV6vKca3_Bqdq(29bXwaSW`#u_K3%GvEL8i2l$}AwP8@)UQ$(5@_{=AEKB-do z3T4@|PD`zOKHr^`jR;;P+|V@9vaU}Yeq~|@jO8D_(!5e`t(sAJwnzW3i0_7j-4~8V zT!@$bzoIIAq?|y(1+M--KW$y&)vV%@!Hh(5R)2K=W}_ni#Z@xDr6Dh?VJx%em~Mdk zW(8T%xL~m|@s5=o&CbuJcG(lZMI)Py)eQvvwSCaVCMhffxgvNzlSh~VzqI@$j>(u3 zUIUuv%edHu^^NaEMpj6-X$VSsEX}DGgwjnQkrT*z;V>T1{0*Mkd#YhGS{uEr6I&TK zX~`}W*e+5yWPA2V&3G4T(#U4IfEY z4#80ejPQ@hTV0ZgULa5RVd02}U7q?%j4?hQ!3kJRB7kI=(8TUXNG1oy01v+} z;HHT_lPyU~hMhDk}~BSYxn2J*&t30|%B|iAZRz zCGK6G^jzbHpSSuUFF#-#Esqo6J3(UR5f7&+2#xpSuaOyvZBq5v#)ugUm|U}?*!`9QZ0g^CsptOnNZ6$d95p0=~|ZR5)QIS1C_PK ziI()qZ*lPRRF~KQPsz~kA!04{Y{ItG(sB`6L`sNBmt#b6h;x@nusC|>ZH=J_RtN_& zn>#ryMW)m$;e|Zp=Dp;x?$!Gbnpjb1nz&;7URH}n^RDz+OJar}SGSp3n?rmy!4lYG z=fD-sE3&x2t1tApkaGfp@2<$7;Oc{C?oyOJeWu+Oc0n3%k{xKM>*BUGmd7bqx2E_W@*GC^pnFfwgP!P7r6q?EKqjftS4aljp7)uLj2$q`ktRK1}a5HLFO*nR}m9r{&In5f^q@x0M zSV?{FGam8_IM>KwOFDPQgEX_Q=#4-)uz}{!J^KGgj%04;J4 zD}qKG#?a%yI)6=>3?m zHwn64`oIjHxU>0oz$kSs^)<1FdBZhUZ) zsMd=F9`(CZoK$+E5nSczac3rm3UX1ZVdjHnb&@>;9<~{b2R0<}O(CAe3TQQD z5_-oMs~^PqA|~bjPo{){(d{rAoZ~yk`F|Rp|Ab8xR9`S*csK@Ge9$NKbWjqc;Lfnn zv<#7=G1Pf9#=cOHVvu1-pkPMOj|w~>k-JVE7?MpjzKk#@ADW!Ig9_O{sJEW*HCzsg zR}!J%xx6EkZl?+uff+gK43U1)?7XGb1(`t!z}w|%mEZOyK)97SzY@dcc?@mo#KOlP z907J?S`;J&0;>e`J%idV9z=5N-tV`267}m7AMk`wg;?OH7bjgG<=X+PIT<9K+wsua zataEx$kOTJ_#5#cxtK-SHJJJS#~y-2yO=v$#1Ff@Ov$q8$0<$aKY>9A)WrGir_!im zDwko@K%H>Rx}9Ni)hvD=vvANl8L_`KB@_tDnfFEKKrU9eCu2gsKax=ZcvK8k5YbY;F=sZx2l#%$W>vaMSX3AYV&i{#w8J)p0|koJ!qlZeuaws=;PC05 zS)zp?-l2@=sbws&aVrP_d-ZQOY$F)mF_=)a1kZChw;l40f?ci4<+(H5?(`<^0HVsPgs%dMH6wLF7SrcFqa}WcZS4A#w9~>IK?X!qhJc8^#v_JBl@* zr({6&u{Wn5UAX@SV z%OIWT>bcWs`xTJUDgx1We6)m1wgR{?2J+k0T@WTN)H>q==3NI~?i&?A)pcv^Q5?z` zYs!dIL?7N6TzH*$r@3Odl2E`Uf!F4(Cq-LI@$svwNi_VmysMyk<+{>#K<6~=>{6>- z#w(;rm$jJHSZ_b4cEEyfS$9~fF1vmVdjbE8*>cS0vpv1?gMIady!he!NldC`#0E~m zrG9x9N{VHf+CthJlc9jxp5AlAWetmE_%L(0Q~!<>vIm6wrno)#hO@*??>>Zw>!X6! z63s2Rl_k2=;Md8XvRzFKi&<)UUP>{J3m2<1!%BW2pZu6iI$f^>3^yv)V|wwJ9a`%yhM<>o{`Y*vo;)@SUuojTAmon zClq~Qkg0L#O-_~m`n_qgXx1Xlht4tIZ`OYWqFG^Y7}PBZsp~TWbA5h0|cK_u5v7r6*op<;O%8>SP(`(MBEZ_QnU3Vy1arEV?In&zl`XMWLX z9S|+{H4szc8d?SNi6|dkVeCI%1y#|$+J7baGEGw#qbB!eUVbGOGHY*IIKG#W0>NaW zFEMPHu1{o2=b4%efiXARg-&V0mmAt(Rc<_$@svjzQ8#d-2_*l9t%x%t852{Am-ar{ zhs=nYSbHT1%={gED+3IJAW5l2|DL4pyBB=`Z~vEuZ7uSl^|2&L|%IiF_>Z z`-W*B5bQ|@U+)okoY3D*)t05m>A$h@!VXH*7J(+D*s;i&6o1Qta6CVh$>V|!$>@8% zsYVE%V?8oeUXDOj)UT8fUGm9641{mJ1u|8{%ZM7HD9TVTf7S}Y3Q=Pj$;dLlHuf-# zg>NVw@2~4zD9&*3J>{176h%yi*V2Y^@|eFvQFYyxJ(FfhoGH9ytrZ6+8IC4jSudd; z^L2_KH*)dHG!bX>`8uFsJYika*o698%;B^snR>=0-6Ut-5Vh0pC`m0D8Vx4lM6uY2973wTe_&gEXj^b+xw=ap% z4dJy1-9aDT{EjzjP3ErDzHb8#|E^KPW)gV69FQIN7a#G^t@>_G=!+<$4WffQ!=KMl zo>~4uMM`ca+Zh6>DyRAGU7;GX6WkusgF~0@;RFYNgNg$GlTLYja?&nIH;@%6;vT~l zC%M&6-${Sg^tKE)VeRffP{Ypyk4|~V62l)>{T%-PZ5DAV$LzPvA#E#?=nz(`!jsd2q^!D{=Gv@$HsZY?V#EhH}duP^gR77XN+7SDdzI-)9#^= zJv(2bSNRb&GOy-if`aLD!YCPe_^kuTfzqG(9%Xp)r1mr_K?Zt1=8(pZraxCr`cD9# z%UxiOercXzgP^B`#0g14yyrKgo=u0B*e5VZT-MkjKWbkyVgC(mG+}yt@W}BLTN^=X zMZU&W$JAXwVYm2n`r@nFv{%6C>$l^Y(karWpTH>}{Q4r>!PyI@m;fHM=v|+KTNx`v z;3j|kxTlK3=#PcvK~a|Aei0epL0>MOCiLN`>g3&P^PZ13>{4HnWN#!@n4vgn@uNm6 zT&GseFiWcAD2*feu+6%DAQjz0x%G$3a%mZVIuFI*sRnd4wp#k<}VWheJ2|R@9^D6ru(bZT= zuSt~=M8TotP5fDajXi|Dy4c6By;ehm|8d+LN02+fN7KzZj?YF*Wu`Ka1MUIS84Y_XX`?V{ys4Sf=Dr8JQYN zeeV>n46TFzNy-BUFn=I;(N9Qq4O!DZQjZxO*1?cW6@49d8vaBb=%$!+HsD>khn+k zwfjaRhp5M9Tdch=PH%dXbJS(%tV%^=jQ17>TCmDIkhb3-L2Pjm4V`gCkq5(XO~+(a zc<@-~WFx~Rs3m0jn&`mY*n0|?!vV^F+z0TPl*e6{Oy^CCpe-X_2Sd&oX1M>B=SO3= zF)FAIGUV}Qo1qCjIC%#{PHVk;vgO;@r5Du`Ip{TB1Xz3dbuv3=SRhs8@`sCsn<8lf znES_yd}cgo?~n+*PJ>YCq3nVQE&0nZ546ZbWt6!*6k~x&6keyij)RhWMq(S3_ZyI} zxvK8;s4*=_#O0TPRnq`@iIpdxvA(TNYvG()%7#&EBDPaaJ$XoLoaC$wR53iX+=&yy zcu?@rqAVk(`;Bqh9E|h-YDhKAOXtBi3qfo7={mxg7>e!^K0*nN*MH9e)Y9euQQ;na zqKUR18+IBt8PiKKil?<9iWbrjeEs(XtdCVhYFKhJ`~$V1z`S8W*RJPiD!|Eg0N<8G z(8KYv+=?uk3?M9t{d>8(!m>u&7#HQw!e*wG0M~7IaB95xk0(P2f5NNSy>qA_?pl)s z0QQ7MQvK19C0H}_TEm?GU!qZ&BH{Gvu z6&#<>H2F{7^Py+1kgTMfQrJeXS!_FfvtALWtoXX_!k-Iv2;rwvEnF0e=7NUkh>*hB z1_?Q-+?#8oJ7h<0bCudD&JvcK_ETlT;vL&RZJH={13SulwGcwFD~;>dtc&-qXOn&l zS=rT0xk$Ct^wW-`2VRofgua5I3wU16M;;lEYJ(>gI(X0km@ilRfgnNdKisrlda32z zW%`WUmN&Ac_5)X_qAypanMNl|Vyv}Zyjt!_^c}V5tXI$DQdPhdj^OrDI7wct_dccT zTyva{1i@s}6SIj=_0Yo=Q^u)(;q0vu+6=sR^Pbz)bJ8hpWIcP)kt~n~7^^HZFq(xC zw?euJb=s}ej7b$(JKTfi%Ml%3P8{aLn5*m@=J+oiUO%d?Taw#~{A>^uq>%v=4-c^j z#O~#G@${VCyZG|mWBC3FAt-D&C(fx-FBwzIeRlU#_P!vIdc2`oPu>6eWAH2}o&V_k zb$T3qm|QArvDYvBNqBb>TASbDL-g&5-dXxORxq{uQEX)FSVL^a84yRBBr!Y2=+8hj z_;tD9fkAw5BTHtJ`rg0^niXx{t8V&zA~v@yfa`7NH+L&YI1XM(BUK+{-b7O1i?=&4 zP|JAISpGt`_M()wltitFGEbg>`gP{9)H=1p7q;QCn(bXZ9z$f+FIzu(&I+b?yj8== zSs!voEr=pxR&c|<_hSz-!Lg>x#+)?pV@kP{I)!%I!7TI z^n8LN*BHdmd+B|Yg)AnB)A=3I;~-4NY&d;iS&P_XNeGnP)WVqUtfFCwE2=HJfgHcs13iCV27){R@BQ74 z*pJT?rqzDV6(?~>Ib!Xq;ZWp7&?j(yCzpdUt=KRqxhDoFTC9nde@!Z zzp(lqL&9wDz9jj)lnzyE-E9?cSd?L(e(ob+V9YO<$hbJ2M|-pKyvyG;or?Y1em~_` zk2$6(Pda}iPv%*$Vq4Wgx4rZKc0>g+t<=l;RS>R31@_=X|K{=xdPQCx8ynxcuk8L> z0MTxFEKHL6FBRy$WXY$5su^5;!Y>5Ep9gGfn=$QO|H`~C_0t`SwT&L3K|6%Ja z!=ik&c27x&lpx&|Izx&z8c|A2c0 z!8~Orc_I3d-6Bh?Zu(Td{gWK|eRQv&Gl$nf*agp{BsQl^ju7^wxxMV*6_bv3fM4h% zu#pp@$yPH|HMPFWAz0*bTYeWUa`O>oKY!Wn5S3&Wt~>_ov)r3u*>6>zoTMAEz;51z z>bV~S!2QOXj`j6+`5p%-JW5$M+Yd2?*C5Po4lM_Rm*F0vHqQ0c(m(egptZXCxZ7M_ zWkP|#RLWatD|lf=wV%;xMLxr;p>dl&!Mq9;&m|c(moSF{-bG9w=y`b*e0Qi0gcKdD zPEAZ*e7h0m(nb^yhPVHb2ezljYwF9G>+nh%Jr51s%jcf191o5ie~62uPk)dCH^=+G z0rn3qxh@Zco<~YId;E_COGxOEKNH%D0P}Z96H=0k82YTIdRo8KhAmgj`9GyP89x>O zC$e?A!EZ%#thw~&Z8+b2z42pk^LE)_P#NhhtL2-(nP%d$Dpw{;Qb#Ch%nO;zdS`Qm zVYtPBx%lT6nGX6v$IL(@(G!>C(m)4pl>~aHMx6Yjob)CR{j_PSy8An0@4W>@%lp6q zhB5!`p!sRc#rT>1AF5M46Q9dIul*qLTSuP*uJi2cmp*++rSqguKmA2oaV3dd>!C(9)qabtidD;!x0OSjW7x>_ec6rY8& zuRpIEHNZ3C{&(6gjBWbb&YHQSG6w&3Rr?>#%e_M*Rif{UC5;D?6gD;2nWrXTx^MoF zVZBa!74o69U3QTh;g02U{rYbeK(h4cjW&n$su6a^t$$mr{1NU_-?L75WwY8T|17$T=2Tw zHr)Nply4l+IuL)=)@Y5ZuQ(y8gO(5x5|lyuS7I$cYezZnsv?U! z0zfFhDrD%<>FhV(Jv=NoK9#-HiXQV+X(KE3JHH`A8{77_cF`{AP7&50f@!x&5`H2( z_=M~E=-0Qo)fUvXKXIUQshsbZl{BE%;X<1Z{EftI@zuOyN9Oi}K#%igGUmFZ39s!b zTfy_3(%Z?F3AWd-iK_#KJmJ(pp-Y&MRyuti@C)QixVWgFKDhh z{qix9r2X1Z^V2KzFu00;n(A37swxv#D;>YuNER`q!0F2{kj2G^?8_}pq3gXKV>ddX z;C2*)Z}{Y$qx(lb}&H*}V+R_eRI`Eh_-b?I6@Dxp4+ z5Y+2y0F-S*BPVIF1%T1I_{RCYRPLEqbS=82SFU8fMx{ww@ZIA%HaI@`$d!nxzKb|l zxD*or)X9P@eCgQu#`9ifq!}L%% zU=6arV-=++s^9nol5v*v5XqRsD6HMr#q%uv+bN1?en{rt#ZSgjKf-FWW9vcEIppwn zX@yrH59D5U91KlMD0?HNgjVO`xgxV#N1iu!wP5HJ4&9uRom-9XGos18_oA9YFUWgQ zGIYx-&l;`D8B-(R$4$oXcU-3|cEmkGw;zBhur6)faWu*+rmx+N(J*jHlH{gai&nuk zP-nW+xi3qh-1^a7&+PX5fJMn0imq+7TI)U*QUD=X9wu67Dgze`$*$wQrLr_bd@o&# zXmbwos$LV%q&xBU{~8)Y;zX*Hv3jQB3ez1V3@l2vP7Emts7X|Vq*9Epf| zeCwcfJis^W5|Ftg8sy46%5M29KXwjI+zo5*EyO0Sl>acFbIK?Z|Nki*)2PW@JM=^0 z7w0UZBjLaR3`?&Sn%((kNYyXxv!<&hROl1Bb@YbG%(UEhKiLhSPvPy|SIWgG#`}L4 z-OgrnKQhof6!G>ayYL8xVb~MT7x}Ulj6mQj64UgYBe{k6DMU{hjk8|*a$)c5Mr<`; zJo^#>-)Fw{eRx!yqv$;Ztt&r!Bu3^k!Z=$qGd%O!MJ&YSFR zZ8430H|s}-H#0&A8}|n5Yp9Jlw>AO&Dc-3S*GV%uSKOv1IoZ_PG@Fw6h?X7OEMzyI zFm8O$Iz}>BEkBGaV}3=Hw(n%QE4XGznc|b7`(X0R?4Q+GmQwKtcdKkn+iqJ|m&LR4 zH?=`pt8?Ycvg6k`2&SR8tQYWa!gX&ynIX;-F4kWtr0T#o%I_)_9=)6s2X8y>DxW<3 zb{AmcxJZ4Vw05E>bEpej^-N=RG5F(m>Sl;1I z)!9A>!<ZV+_-@WqC%J}j@6ANwAA z+bp-Z{+%3?gQ-J&)U(il%DE^?<`dSh=k8!Jzfg)dU2J(8tSUUxcHxx~E=`3-O>9vY|>i)pyNX@60{3pdC2@vvoYcKUp)fdX2`WUNpGBiER2m|D>4T_KR+j5}8ao_1%i0=8ql~+43Ua*l z%)AC>tSP$Vw^I_A+=n_LkFn_b`A%VHMd(D_W7LfT%=+pa8&jwLv=g}3tG<)sHn9+L z4KAb8mo4XqHldRI^+?l5w%H|iPi1y_J_VPXw^;;`4AKvTrV1avu=iUb4-I8TJY)cF zKL5yjatXuyy*EHmS-tpLAN73tienRTrIA8)jrh1^43` z%TXw*@b8(f^$JHK;A&%!ol@TY6<3ahBALlXC;p<-(GjbHz3LIIg+F9ng#Y=1(e~qG zua57>6bXTmin3(_K1nyaX#JjlB$D?TWV7lNL_@8cLw~h3bMDPd3mCos7{7`QT*eV| z6L&L<82a&4e8)9(Zo!vJDN1J-AT{+q`$A>z$N(idk<$LlJFN=&SKFFqvNGZk=k}H$ z(UEj!Ri9iJIh1h2GtbP0e|=qf^Y^rr< z;{7giyl2yjd*Rg$UKdmyN#2&kN<-7@MbE~CgxDwHG1QP@OA*K5Zz<)YPj>DuNoc#v8!gV0cu6^o|JB6i$?79+)XvdIW!>*)8q`z@t8 zvd;443bd^$fBj|1w=cDv=$mL{N%IY5lN_o2*#6@!-U>@{5i|gw@GPe#d1(WmpVw<1edq_4jrLkS6LZ8K20K%IWosZX~T`~7BC zs_dp4Uyx#^7m+gkVTwHSo4QD^{Q!wC-(kA{Fua>=3znO$mD5=I>6*znDQPhqNr*x5 zd9jCJ(`4Uvt#}o0wvwOT&NSY1641DAJczo!tM9je_ky@ENa1JLe<8A2Q~|dn$ zVVTU3Gb%d*1DE)gE2wRIXV+CBllzK}#g%2&XPLWijeH%a(Fe5kyr0r5;Mr0(FoVCus-5UWOu>a14}-Guqkm|%kbyRpFKtXorxui46tv1bg!>&zB{-K{4zZjx=BA%!f5_WuCC#;>G3ccV?2O1 zpcPfgFYhdeB%b zI9yqmmbHwsqaLqV;R6O3)|bKZQqL@W7wMdMq`@f;e5PvQ0ls?MfHCqF zZMK@OgGM%N_UV2i{HLtXuj!G&8jyeauC&^#7ZtH__xG<_i?YY`nH^QNL;R3yonHqX zL9ZT{3eY;Say<&5H$fCi!u7YC6xyxJ9)&g+Kf*`>Zb*eaf1Tt`-hQ#s#Wtmhyr)jb!>fS#Q zYk3C`jV93`hcfE@Wd6ZJ6a+i`-r=p@xDu$lp!9yUN7T9&UNPh>JSA;&OtFhWI>23p zB{kB&fA0hJeF4%b;{NTuawcA<^ITUyYWUXCh`ON5r-y}(c`Rxs}D2^FL$vG49P zQpw+@Txh+CGzz)?@!Gb?GT8zJ`s20{w8Q(rq*Tlt=duB0ua}hiPvh;e>{{{tV_?Sl z4qv4;ijxERE69P+Jb0Pcu`rrd*+G|@Jr?72EWz1pjuNldQswx!Wb-N0_Q)3{6_CX*UdJ)dtH@)A|X`3XxB zWzlRK_?O(09_c8gcwclD4A^%FyB|%lRY9gtr05cXd``s;zpb_BBvm48ER!bU+N{0v z@Ad~s3zLk&R~H0ra3ZgJ(F5QrG>g4D;Y_Ys5Zvs_%!YrRb zgLKhllZs{x>))o|#NvEx&FoKus-luS7Vw_c<#>Wj2@EtABn%ea0z<>f@t{GIt$37J>1BqH;$o(T)x{RuzeDtJF$xwH*|5|HDcqkrS@;UZV2FNz5 z*Lj$TWGKbFzxu&G=^%Bh<=t1T*uK5m;$9zM1l90eSo{29O^pOD;hJpp3=4314kAar zsI^}GD2vdHv&M_Mt-0@8C%-yL68c$>)5$>>874kU5Ca-nv|)BN!-E zD#N3Bwf!9%Rz?{1su9mBt9i-&v0LUVA+*qocM#rw`U5-h=F4q0UkG$f%_$|sacmaL zZ2biZ4ate={~{dWO~;M88<5+4Wx+LDy07|5Lf{pvdEBIi8u02jr(7Y9jt1)9cn?Fv?~b)iQIMuT%9;#c5*t3UtHqYQS5GCrt}M` zQ+#I&Jzf8=7lRX39C%QWAihB40^45doULA9xE3&Yj2JySxsLHHc=$7Z3m9oroo*;f+1(rMU>V45Xlxv_AkfN>4dH?7!H5DvzFN#w6Z%SjaFm3AlVY_6YG$ z1HDo`d-~-OruC%cRVKyvtg1|6t+-l|&fPp+$dVf*aM4V`>a4n;>fHR=7jLVi) zLAp=@C`6F!{R`BEDH*NLT(*U-|A;GceG(rd@drq~VcQX@@XVl2J{VTQ6UmvJOoY=x z1_^JOE1l0UQIj?#qJA+D3RnCES zJ_l?oMe=oftw~0c@*3}^f8g{??l~#8sVE^!@Wye9Mr-JwvCOQSMy{m*SO#fA;!LgI zCn0i0lHciOFLCG=k>=7Lh6P-Y+vonwsA_ZV-%x53si0JW#2Qr*_GFt2Z)nK z=MDaRw2;QhJ4FA?=xTSQIE=I}W+;u_#z{M^TTgP2UVtjWHdScFhgmP)*8IWcfr|ZQ zKTCxNV6$!25ZJ?7UdfR;B0kW(mT~odOP;AGaX7it`Sf6O>zrF@(Q)w3W+C@cE863a zzGZyVtAFV=^v_=Rz!2{7epBb^Ho79jx7w^*vE1IWaEsCv$J3ic@(y8!oY7;4X3cBg zXPE|{_&jqW`H*&k)$#r;+@$21iwo0|oH!S|&-!xveM0rl8MOyiGR>tkT>?S{iPVtl>y{j;8X1oM^D@`&-@V!f5C8P;QmXkb%6C z_D}oM^5++xj|7g2drRXQqOI`r-#xw){Ys1VROGabAA6UXD2<%mh`D1l8~mMSUo42j zy3S;b`%zUQU?iuRk72=Gx}q-em|A{2E-H(T$en5VDM}cSi^ko3>$7c}{na04kxbL{ z^c#Dd_I4Dv^fv?V+t0-_7T6!k3i~DR@4uE@&rjeo{iQws)sjM=u6-|88>EP{3WI&@ z?WB8(-4c~}ggz($7>pj^U^vNeG?HMVL%LQGF_Hy6=<*hxI(m;DJ3T+bJ{&`jiw<1YHz5SQ!4r=|Eh9hs0$!VqcT9hA?A-d+vF;hlv zq<>Km2Fb$yWC^XdVR`7DP=Ol_n^!zkpx2N8wps1z&^mhsm*SD#vigX6lCN^$aX)vq z#OPiEPyPUxa#Z?@uav?jtA^)6khyBCg6Alo%=7m3Jzm2SZ~p@5PLA{1EFda8%_zq) z7NDKaW=y=xzH_U*q=>-zuR_i*o~Kdy`spmSvuvUB@zREfZ1k$*x?NS|qzh3+3|;=> z$*On8KM#X7_o9ft+#e`<+L_Gu4CET6gF^q8kdutfFuIn7qBa#!$UQi0^5z8%7?nss z9t96_A|`AXOp|kN&7}&^6oPqQ8F+b(v3mt^CShSHsD>(!!e?#i-K24`48xI|wT(od zDtfvZ#TurO4}Fr}!_B4dyHx+RrJ<3TR#x0(l>U0WCaY<@(STDXMOn*1AOGhM3C+r6 zEr+{O2qKx^P%$%g(E^lYtA4Q?aY+lDcTT+r{mI<#6L8b5IRM* z7@>I-Wv$dtVxN^NZ3yMfiITO!KHXb8YI#x<7g5pdeYlJ2=SuyXeDHd|Xq7nU>LVUw z=~!qv1}l1ZR`2_722)la#t9u>N&)qsj+kSV0>N__I_|>W2Pr4($mqN0^fVOy+O#Ko zac^ywi#y!PP_vXusWBP}$EF3&B?rgcpAVjke-R$TSWGI&642sRYyG1Xhi9A=8{bww z@O9;R8mJs)lCirzhuNFgR*vO-4c%8u-V?-;OYO}+1u>|)X?(-2O{be+^Vo{VG$~b= z3v>E*1O$DZJzQ%u0rsgQMM(Wa+?K~X1;JH94sR_8q>BL!186@mT)$#R z!@W!z^SL-YX{)38b72OinSHfBiU}P`jqsh_oL*(qMFBr?>sl|gKgYwwsmEhM=HuHr zZMvq*7$K$ZWK79Rs<*AEugEEMo-S)tV@h5a0a?Z?L&k49B<*8>=hL)fh()6on)hmw zI$Md5_HzA(iD8X?NUY^?H3(BL&POm-li9v~ur%nQl@&KI(ZD2I7r;M}{V_ zCbO?dAg#gM_p~X=S~70UnGk$&hSam5&-0R@<`&2yhQV5U{o{APmgcHF z+HaM%?TTF30)A@ciTk8XwPzk)=H-(XjHGSo^qMC)oY7aeC+^>nOx@a;vzdG6F}UHT zqH?zS;=ySl_eps0!!k1`mY1Fuq|is>OxFD(%3wohgPab6a0RX<%}*`p?%6c@phe|K(0yaq31hm z3=dx{CqPBzeKqwRC+3XRMmwz(k3a&2gg>}wN@}xfEe?mJva#0_uj@R}Q)RhsNc0l| z#of__{9%lm;Wce`Kf~CBJ6JiTPW|=H-Il2AqxM#O6khRU8@fvwg=peW`78P1tPl&O zL(IVE^lNP#*mXRR5s5zJ|3z46C}l}A>Dqlal2s#?{-9U6=q13Y*lzlyZns@#*Poue z|K>22{PTz1DrT6f(1jYd=Htg%l%17~V;?O)?q%C97~W;k;k2O32?m+3vwMwvvp^nj z8GZ*~=WOSSefm~k5o&D4tz#pFQ2l6YgD&5zPQIzyy;(I6ch{)BddB>6$qwI6!#^Fk@TgC zab5@c0nsgH^3zgBoTaDByfl6ZQ5r-|mtzd!+D*1*mioNp;QLPvLfMMnF0MNrpN`KS zj+yGgWtmk}8+Xw@onoFbc!!N1KPAlq6j5K&Jkr-rjn^5t7k%^@$XP z4f>&>4$vl=N)-h(v3XgPwDvFQz_mwbbQb&7Q?EswTn(ULVSr}n6PZL=o?|>VOK00IZ~8xjp$IzK>uY#0w{rBpAOFx1)N+VI0MbRmbXIv)__Qi<|D$}lQT*1-cn$Y{_ttIsHR$nO zT~JYMa1b8-6!&7qo^b-WjJdESi0>wOuek?dPFBv3MRt0;q_$$_5{NsGo_rc)m48(` z90paf*09Jxc@BpZr8s_|jw`J?-D*GF8CBnnD{I^1@!S%Ba}))njW5@{py5M7k8{W;YrIivH>zEif;UU2f7L|x_RRawyuRZR zYAn2sMr&$cVY62QvF~2Knc6mRLr2P-0KWEU!k<^hzWrRq)Z+~zjztM?DKh4tj1)NJ zDMb+DGA)oLF8P&m;Cx5jU-i*(s54vG@tfB&6=bqT6kzDm@5~m8mTU(z|5RzJ_#S9p zZWldy?_;(vC@EN-hT3f0I-+0v$xX+nW^c<4F_VuaQtUWuc$P)02je?4OY+~Er8GtQUvWJG%#wkG6~E+ zNo*?=yxIIs6Yz(6TRP?C;-V$kT!6ci3r2TjRhMsny^@J(ze@AKEwU*YgK!S*isxS$ z#JBfLxBVGuW4T~%Y_%@IbH2@{JrgU;>2SL*n5dfy`#I6iGA5C)?h zgjpKPDvDsY%i(o@z2z^P!$)HQIeWO$x$B%pzMh81yP{23!f6;e($eV2Qj%$7($LlX zIJ21YZNDmb>KFb+u~e{G9!;}jFt2=3Dn=NFO`=b#IX?EOlxKEsJYcc*V-*a5)4^I+ zAeA%o;m9w{d=Cju}?ptS#q3k z?L{CJRMX)aj~-lgEwDwaUqa5fd{mE!saCV(s(l#?5|}7nM`U!QoEUcpC*S1XhHjH0 z9JVb9PTtGr#<Q=@6?uj=8@YI7%Sa9$#MHbBpXqB>tzx-rbXW6wD%mM8t%Zz0(IW^@_7CX z>P9>NqL$gK<+H0fdu^f%5)fD^PV~6zM?Fov%`+jwI>`z6!@Qm~*TFM8S zt%^lfUHwqk&wvO$q)#W@Moe-{n1HsW+$`2RzfT7glu=#SMFM#;qTl<3#LX!_mCt?* zqQ8Qke{C2n|5}%IbC8P6XC(Gd_u*cAlw2tx`G0VgumB+>1z)ix3u0lzXnk*5Iy}!~DMp#peJZSHMh@dfY8=7(s+e zojobmPN$9+6?e}}pAf$p=Qo|lC%W51MYa=&wHZyT_~BV)^>^oAmwbHzDVJ)mb zc%R-*kwDD}8h}@(cI*9~tj%dtM{&v&T}oS^p^F9557IN4txOZyLOv} zwaFpK-fym?|D{Xan1;{VxC=DbB>~+D#Yjx}nR$c?!lF|)txd+vA_PHhRm%QQ+o>6T z?eTayEkKr2JSL7(udIwcW)sv`7}72!=xU3baN0?A)S>*ydWPz(4HqSR-F#307$u>f zkt_Fd`F^xy`)Kr^?uB>m11AO?lmw8pdpa-2P)4PdLsr06kGvCsHXthEy~&bO28g$vbahZ zGzt<@LR>Aigz(Few2|;?9y#dR<7xaA7tF)>hbjD~T-%{|CV56T(`WmHy5+eb&TFyn z3nhx?sv~7F?tK{}D|nNXyB$`|2Y^E@bMDgLxl*|v$?KTZ++75&F@?ycv67cmUS3I@ zLA!F0j2ZG_=9+R@*lwSqCDe~T6xFUlk>mZ`0e`==tDk$swp`$aq{ok0g@d5{V;0u! z^4CW_v+ipj+520lNX6En_+&^eOeMQ!?eKqE-6!ya(dohfKoXfK3yg6hz9L4I;FDTTbz zmh^6d`l~hSnf;`dH{hjkvu23{_Z4eA57C#&K*VL4S>Te@*#5P=MUG-GinG>wa*shT%A}YHvu7wS zG>bgxyoPs@0rp>bsT~7&_LvQD+fe2vy%R-~6^77~G0Ml{4R_Nl}( zo%92t)XnV#Ur=kt$zQ&x1>?9{GazC%w)Z z5wZVI9MDA&9P!N=n13PmwFo#8S*ck3J)OZnRkg4PF(Pq~Lz!T_FOhy}3@NBiG6&O& zDI(h7qoH(0R_xV_2>a&C_t=}@v=KewyfKmSMt;yCO%B{S{&RxcQ|-tsF|kp)BxD5g ziRY7;eqz6^-H(EK_md!RibV57UKLXaR7OQxD{5Ns*AH1j`I{oxy!ChanQyx8BiJc^ z_{)m-@uOkyr+R69L4b=p++ymKeTaTPXqd@0M!5(YDJ+A|y{}M9!xR_gI4$DHZKo$O zdC~SQ(!A~{E<2IXY&)D+PnIM)bMG1G!;L1~!Gbpw{l~il22M{)x zG2XMKxqWOsffvnF1Q~yz(fr`^?-&C0w1XF#I&ub)J*~hl&}Ooq(UjYr?dT?;F@E=) z;p!v57DYiVx;lKO_bUzZ(dnJM&xu74d{fsI|)8$Lvm=Ur-+Q|)sTl?nA-LW=$ z7}~_yoCu{>N8b<2xvS9fMp$feF7P|=*ABeuw-pRRreFaCWi3B*}XexDzc=OHT+`|A{ zE}FDZfIHrga9I?~NcfNBAB6M1`bWuj@AN;6s_q%R<-=3c#U2SHQKwUnzFY?Jzw{3} z!vVf8&^AZzvEa0~H~K6LN-2jD4Dn-qgor;I`PCJ}H*iEAvX16Pkdsg4Z=n3Co(h?v z%}M2N>*KS17u@g3YJXlS!MDc$Jv`qjmUvh$a7(9+TfwN6)4?P8HtNd$Bq1E7=Yhrf zb$js37-h+Jt)-zoC7|vO%a&y`^a}aEJ0sOYNxcr>HPh7gkiS?#t!xl#uD8ip%EV?EXIL5WNKEWThtIW-0?5U)oT2($8;*mSxYJn_y!KY zrEIlCg^;+bzBu1-@mYL&wlN&E*OzVx&DUt(H`XTK|J>@7d~8_rW|o#FK!tY*n$+<` zBsCl^zy#7JC`!bAflS>*pzYOe`Q!^(nM<=I&28kG$J<65EQ<{UP~Dj*g*(4M zSkS2CVVCXxidfDy5ht#HqE7Str0;BdWNy(i z_iJ^_K?R`3m71;kUcZBoIt>Pkt_2|CQS|Nf0uY_vvO?za(>j*FTGMYQ#k;4*B?nCs zUc14uN1!$)3vPtb%vzUbmjIV^W6OR<66^xV>VJj(Jaf>4F+hDxK$fa=X6(n6qYn;D zA^u6@bt|y+G#yaG9T&Uh_$Aufek8ldvL}i!ypkT5wwfeo1q)k-&J%az=ZF~UHv<{C zV?*$GpzIyyj!!+2nm9^uz!t|ZM1-NU2gkl+sj)=N3L{tvU-?dptIdkgYPn?l!$PSl z-YBX@4^_^Z`ISy1Ov z^O%Z+9T(&bfulu>yNLh8F88d}u2asDm4xe5*P&0t57lhT51fbgmFx#UB=L zw;zN5@>l4V=2YPOUDZWk5uj~HXgx>snq7nt0UCiTBLT6qBxg$!?KC+v(hqPhq82$1 z=>=lK6~!xj!FnuGmajLK`G!yWT->xy2SNM2a5o^$*&zd9{iZE@R!y} z_5Kjn^QRQ=AaZ+uK0<_ARq-5E{P6VC8)&=Myk0f3=bT;AckC2l8`1znhN<7S%E`}S z=5bg{m5#2Xa%pA;t#~5?R>LGLS3j{+ROg4RNPoZG-4NCNO)_P-oW9&!%Tx`xyCO+b z)FE!(8Xk)$2`Nvm;$=W?{(OJAj*vfQOp<5%tK$7Qj|`GLq%x@2M(}li03~W(%a@Q- zgZVFTYMdoyRq}3TZ`6egv^jO!BAATne0OEfLx|yb2BwDHX)I2%m(m20GqA-ndGbTn z>4?qIB+cRH_&P2mfJVP#E5c_KFkw>)alY9nLzv=nKIX!U;%X{)BeIma{n??N%lr9) ze^`4kM(-8g@zOZ6za+Z(jjS&oNc48xiqf8ip$4N04Lc0G9%Cqo9Sv0Rrcuz5Ol9zmt3*f|>gQXvHz~=0 zFbZv`wCeQ?5!dWHz(zMgvOm3+i2>t?ZF*Xb$Eq$;vjK_4DX?;E`<~9% zMmYo%-r_t%=i=?1A8Mswt1lu5V})k%<&RSRcpu%V)lf{gr@eTrG0TtLWH$)TvW?!3 zB#dU`ABZBICYqU53-{~swc*xJ=KDQFzGMVGJeU{sz8koST1k$XEDNKQ$_!G%ogOzY zKL!a9EnIcD&;?*X*fSF3jTx{`xyt@1T-=6};Lay2Zc=9rc(4*sC{?=GdLkRFo-eId z;LinVWd4e6+qYEL-MRFl8WGnU_n`PsQzC^Dg@tfLnU(Cvnu$4~fb5V}-32 z#GG!Y8dY~(eswH&o)BsB%_BP`|L^OL&$GGrzh-mk>x_-q_GwC{k#~4epiD|4T9`o0 z-CywCN=Yn*P#t9##uVf;1t!2w{{_OrdZXZ$UbGWg3ViBS67gr)r2+H*+{(zDN@*{d z|Dzf`+6jER~0o&)^ATKd}Hj3Z47MU*P_QH&D4nLL?!GAGGQ3!9HP28%%jzRyWecOKA2F3uL`g<0}?CDGaJ%lr?ss5+04ZHXQ3|oH3Lf~!JZ1zfP>zu z2cde@dCMh2sEY1^ke0LzC~O5{jSetrTPtSM9Ct#=OXpW8~(gVY67NjhuU(ZZIq4xljS@}sFt~`I)ZoH zfzd3;VHKt>N(Tg2>D%%YZsnlr=A41)OCqfckZ#0Ln;=#1mfNp2S9C-RNofx z{_&?0&N{%@d2`ZV%5wtV(aL#=BMbZG?mJ-Lz0To<~2^oNIJ3LAkTmE>0xn`S{%z%ZVm2 z?DDjcP2|taR&;W&Ko-ad_Ya4_OU%QQGL3zmZG#ce9Bs!4@wp@{4CBkKZZUwyypVwn zx|3H>))#A)qs@nNcl%Umywz3DSJ$<0^-Z_GA{d^UbfTLIhuvHw$ir&Tx z?q)g98biqNFFx@iG3Jhsw{+YsAHQEP@^9B`ut~oC>)u&8E_|-UzjlL3-!qs^tSKk3cjylaV@$@yYKBYD9^s zBTlkCJqjXtj=w?8RrxIDm7hLEcq2VHO9 zdloSZX9As}sm+pUee}#eHnJdD#KaPvRg3ifM5MX~LA|=X=p}hA{JAtIz4_ajhJUYs zm~naa-k1LoCk>W&7ZzXTlL@0p^8Ww>ZY$mqd2&p{`YvKI`M#>kiVUf|wIyb*9#<-#M91%Q$H5UTjR>7%CPl zqp!a#6-QTkPI?S!_^XAHEUo45rtypX%Nb?r0`@9gTo!BZKT91ipS z(_b>#6-vZU!wk=Kmlg(D|g1n|b18g<}} z>*IeIA^1;Oyci$*4T#E^Q=9xtn9*dX$RAvdRZvF)UBtGB8~?-`^8RzwgU1yw_MdX? z+}x!fgu@qSRP3~;kJ5)_Y)8_ocA@qZe-zG+Mr+&Yqf7CKSpeLV@v0jHYCB^l@2NdI zWLRR(4*S(w0*8!SjhAom#rfj(wC)REK5I7tD;ZIWCk_PZ^5Y*_r)h#s@N6J|1W#~R zVy9DKo1FT%LG&(T07iy-$LL!vb-OO7UPNe-PlTkyho)0u zt-b6iK=^{%LOA*E!)CzPkX2Ej6>bH+be8`IQBEwE;LUE;>I#UmzM|1WU(p z_fe^gA1_~v?v7RR`QFXD16Ld?4nCV5dp`{7e(S3#J|g!F0#z z_=^h77U=d2WXXQfdFFiAKK>5TT-FbEr?f=I$Rb@Zl+($QdSpJ6P2A;h)JkPgh>A2= z_|ZOcJ{EGRrp<~_7L^aK`e2>xZG|-HL7s7rt4ChEDuPhP$`!(SiU+fIs1d?wC{$`$=AYnC@UdQ}mt-<`h%^XQV zd@f~LBrgzKA|l9E63~_C4 z=;mYHoVsj7$To~-QMT5W+)&q(ce*Kmi8TEEB>%}Qz33uNoj{m<8&}w}%K#Z=zdw7$ zi1`uvx{+jY87c!pL;!UjDmNnh1zE{!D2q|Y@3yChV?LvY18r-p*uQ+s>3ohk0ukhx zu|SN^Y0$5?`ARBO7GoLahPTtHF?Y`60s|jD|9rCpD3k1>`HAkWzGCeUS4(5-5DJGF zfgM8Ut3}I9K=EWP<<3yXQA5Immlpfi4@&gFidS70L;E!37;TC{RijOB3FRkd5->GL zd-_mTSczHv&Oe|r+5rnmJtH1ClLzFx`cnonJ>MrPNSbn!JrS!Bknfv$#MV0c>Y}^I zx-;do=B86{%$yiRznKvR9KM*}3evnUcn9WLBB$khqpV2FQl(gWc}bCC{Ee_sy92z` zYUmlcuixgYQc8l2&*@U)fc2f%0V|mv-5EM;lJvf>*Z;zfN<^q=u?G?Ql9@aHh9-T+ zX>Pmo-{rOnomE~S(KWr4`LCoOt!Iu*z`@~-gl~j;s75^05N%q z8(IFo_U}N~dP(~Q%d-VekuDl5Qf#Fq6#80e6cFbSq5I971wLy}R#ZCE#NdSeP#GZjkqsfYJtYiCY%>1i(7wMC1c+ojFj zf+vunLiFIM%4xsqyM$=-h-DuC6C0dN+uZeUlV4Z{N-HkYWSji!2IIp;p+2 z8B$j+=%aB3bD?*ARty03ww_N_j(tX@G3(v2GW1slk^HQ-y_Dr@L-sr*wyDH!KgpO) zMeI?kP^@C{s+P?32$tG2hK6k;_N^6eSt_&Nf(^96=jpgl7YPJ`9NgFAGg%pJP9cM- zO{~FjwDOuutsvsL&I!#JAof9DiVf+t)t|CV3<|Z}{0T!U=66|iZ-H4ng(f1_-ZDiL z*4{mAS+hsHaTm!+~gIfl7hoB+2ySuwg zkl+y9Eg`rQoDkgI-Q5P)Z*%TF@4m0zZ;Gm!f$81dt5>f8cLsr+vfte$6Du;?Nc|c* z56mCVbl&ztA6cNuCacmWkUvpYy)Rxv!-Dr@}6 z1-}0BI@MZeQD+{B=1%F+Idq^3X8x>1KeZbXWhDPWDF3uG+{J4#8fwu17wMk)&Xy)m zPiFRSKRWUgTCw>j3O8M2;&|sTnjr;Zt2*(Pmu*CEl(ND1l}-h!kh`9I1s&Yf9I#cy@JEl>OE=A5?qXk* zba-g7myLP@b%}9b)O-il^S%zGQq@E#jI(y3_a|?0iSNoB`TRvS>rQSn0WEv}Os!w_ zGaEVYC9K`yPUSk(t45FKi7;F7usap}l1yER~OFDABv@1C4&=yMK z)#npVf`iVxmm_&pxYrk-3#ryQ*&uqUY&8fdMY^y)SQ1B)pBYYLs zpCALpcmIP=Vaz>sK&Go7{prTL%|{0zz%pQci;0TsQhZ%itMNm?Drr6~iCBdVkQs{l zgef^`@i#+n*@@SL*OQxYS6>cZts?h^!qpFBt%;R5mLP=(s|$?2;{;Bsi`oxrID^}Z z3$~3}6PI8As}@u0B`bJb@v@NWhZGfa>rm862#cleb9NL0=g;rjX!&q@igZFhw7{K- zXJ{~)d&}<|RH!-4;8h=+#W2y7(|<0pAP_DT&%>my`z&`a4Vi;E3dpfNWW+2_ zQolR?DiL?RUPccF!%2vf+3SkAR)xDs^5#~PjQdO*zNN91CCuo;AOR>HXThh@Lc}2< zDmdC$p=kb_VW|Wu*YLbN?k>_g)Ld}xBT`?{NuwS%$27OYG*DX7gSqKj?BaIn2lGO<-^(erCmcD*WfW$ zXjW#yE)y8mgc{$4Iutdt-vcF?W8Pl+r$6FHNq6lTkU*wtpF%+#&TbG>@gajA{{p= zS2_BEY;b?llHVyDu$Q?vAJ-`7$khAC&5zoSG45KsWAKWIuk7IK8~nk)j!MprQmzx=ZHWDD|5>%AtJFN*YI634f|mLF5A z1qVB&_Sg~a6aJ}l2B}}0qcZFd^_3TfJudHwi3QDJtM-6 zh!38`f}_B@8$=*O)Dk&yc66wiAoT4qbxXPNC!H%4bp0Rg4 z-%oQW;4PvJDW+vIw-UP3{eV`+6!uj)V#oesX&Qo=bKSQsX}Kh!3uM8yve!hB>$u8m z$vfAGB#eHXZnaVh2K5Y>4`GDoIsxx#$euR-66s=SR+J>vhOdLQ5=RBo^)e{ISVGmxO?_?!vxh8)vshsG>-GyJzRhH}N*n*1U@} zQEqik!z+w5esrg1HrKY{1=7!6)JX2JX@l-$VYmiL7?q_7HsA*t9!h9epssB zzA%)>*3dEnz^wiOE)P>Donm(Ej&9ux0wp+5?)eXov(BGKs13AJ3e}AmlanAD>ICWk zf~{;&EID?AVvFJaZohq|^dNk_z&Z5Fz}PxiKGGw|&n6P1U2l=H)~5t<6d!@|S1BLk zD$bNvL-}$0Q!rXm?Rh`LYfyA&`r#HqqnVLnh7{;d&>W=_GM{I^??#(%%B;FgHf7lm1yyegCQ008u46CcFg+FYMv(suc)L|)BKBlu6Ztwn(7^kUy+hey`!+3% z566)Mit|@nY54CVRKUV}#n1j0&BsGz zp2^nZe}bMi#%i#vgvgmYxeHZi$y*5|$%b@%(Rg#B&?{@v#%s=9Tk_bTa3y7)l=$P+~s?SkM2Nx6Iltx;j3aX7+<`aE_V3!$$>SrRq_}53$f;7$@XE#KANId zo9LaZS@7V#UqO#zAH*h}un$s0#b+`*Ty?Jv2h&HR>55wSQ{_P|#Lsmtbr#MdS_&V& zRvQ3n5XRWk$7_CmDM_$6_6IIGe>eVk^lugp4sgftx5!rtduy;etTOlAGtj^= z<7liV1bX6;94v!6O@y0zm5Kx=y{_HIB+JH+9MGJ!iA4m{-~n4)#8=1ifvEfH11)~6 zH}I_yAG_h$E{kNnLl5wc156?V(II$Hqw;N_EU|l;6!3&ELg{Dm)tTOH;ajBwlChcQ z!nu>|92@J;niXCPlPLD@!ilVEjFTx(D3yeO3{z(D#YoIqnJ)Jp*ZR&MEF=oTZX$O> zqTa(Y%$PoM5CrBGV0Gyf^^QgF9Z#ur^&fs5m0~W z;F`e$^uj7xy%=xKdE?wg;xPKvL#lUlAaAGjEXGc*OX9&-)ojuJgo20xP2j65M5V71 z8hi7aR0@ghH@d{bd{KNo4nlfYdL7LMeh?kwbK->esIoC#a8rVv}2X4@FTO=3S?;B|7q52`Q^>0wKA?<&poql5 zEGc!>xXIN5#@Bj#z)r@Z3FXZkO?i|v&mxrk6yuYG~dq^ zCluS(y7{}N!=Z~&-w~U<;Det)345ydMmQWv&@`dvd9ZrLv2ILh_txJCoYc&hTdA6Z zN67FGfPqh<3(gG*UP!Yksuk2Z@@7u4IB8}3il(_wKj)iAFh<6E4HY;xxdMGa>|q3( z&>WDj{D~j3699(*;z%!){I4Oazqvxa*5m_a>P-Ch_s%zlj5p&DJX&VsAmf#5I@?_u zA@dxk^qvk&2aGVZa>6}BWAZN2eq{0SHE9F~-E_GY53G}8T8|UOS^4b8f_aytDzkwR zK1Fc7zS3&Uft~k>*OQnjZsL`f<*w8M8@-eQt&PLRr&Asbfb~vsJjgf^l6nsvao^bw z4l_0_pY>5{!^r*?!|N^|1h|edqQi&55Q+|80*met}2vi^3xdVS|@9H2gn( z-)Z1=I=Z?t{W)<6ZuwrlS4msj??&&v`kd+7g=seZ7PoeP^t6T}>#DTAlIEq#m8mTM zvL#Vr@I_gu!%>i{-N@^UD?osYMrhg#FRg-$H{!n|?}K|#Z1e!Xbl!)&ymgMJpLF^o zkffd673gFTkt4L zO2ugQqtP#B*uWOo;d&{{i!eRj%;(i0s7}U`R^4rKYxX0x)@$NaoW-7~T4n}IEd>dN1FxPH$p86==x9$CTIw!@k&TPiDC!=pFAbjx`5wB~DzBQem{UrV)8WF6U>WEQ;JHVt_;6O=I%Zq$H7{||RhkT9QvQ5!} zr?Z4~pWh!b_Ln>Ui>AUt86;kgnJvHb*B1X+n@NAX>_MLBhyU2V|4?qFR~rGB0HdrV zi_v`r9rWdobanV8G=4ve;k$v6J~*>`mzv>FLF{j~G`fhCJ*~+x)UXDaZ~axj?7~CW zBWj_o0I@*7f_c%egP6Q+#KaiY)D0`$k^s@Q$oX(6W-sj-T0h@o<9)v`qH$|;>VwS9 zX`hvPQ||6bX>FT9Luc4wS$M1W0OmX`ev%r+nQ)fS3iWgeSPMC<0Gd>E?3c!A5zajr#k4Bl+(?w=+0uka8ABgPfBW9PDnCAI1`edESxxsd)dj znQ-&NJ|oE(lv0^q%muJE2bXCaGE3I#?Q#gX2lfpvA9eBG{4ReIh^r5+(PaKSQ~r^m z?N);0Wr)L8Y4QDZH_Zb6K6t>3Q0eI+^qx3b3`;=QQtTgJHY7IC((YXkD8!3?HfnDBA#-$rVjj7Y_9%3WC`9iBBfHt?rbSJ3obpUnF^24$IB1g&u=N zd{g+i(1!oxG#HmEf6^oD7*>?!{zN!WoQMVf8sbh-iP%!nAY1AD)5TBhV&-kTii?Wa zHeTX`0usSHWaxgHzGyX2G|NK(C~eNL+p^h>V?-zMCIj^63LUU|yw%_HIqMPzEv%z{ zE$|9s#hksgdC`tE3ta_jOomvez_aW?#&bvFfUhj4w=1|=~da_)@niFJ;uhE{&9ZX-m_RQs&7g}uJ;jrFE0 z@d)$M)rUz-&nHxVMWqj$%%4SpO()ZDcSe{(CSrqcM-ty%PgHw0w^HESJj^f=%j1$j z*E(`9Sn#)qpL6FpeLGO2>j!?d)lic=x;OuhY_`@~BFhd`mETA8KkYOLk6s<0?#uaj(J^+*j9@v&#T9R)Unl?CNiPFAafPGYD@o6FhSUAu1n zZjDmd$2jSWv7fluT^vVIOn*Uw{-tQtEJ?gsip~D?VJIbvJ45x8{Q9M_)tc(ql5HB)lV75q7g$$HU0|yQq=LhudCY7&_22*a*$Ef+x==C?;Z8J;<*SfTsZAaI1Op2B- zBCQE+o>7DJQ_jTh^G5Q_=1aZ&sgF(<=nCSwPCS^$PeHd|a!ICITaE9QIZXQ_bNoEH zSTV&P-mFME@|i>)gJY#E>Hmj}gwU6$9hpsCW4e}qPWx%IZKC;3xOuH}9#Dnkup2pN z6bn()SqkJ$1h1=NQfo$d2Q&Pi?^5Ac6KP7>)&8$RSB@b<4zvO%nU?t##IN!flBBd> zJutl7EI?OzCqHty__~(RX6i7WaCTV7j1m;w*gI3|ST$2ggitAO7UUDmp^VbE#+EY3 zw(y3K7-J)@HU0n-`adJBcwuDz1j_@D)0)8ua&-E(y?DlOfi|$FTT2c8 zH*?I*Fm9ALORz=`?X!o?WF|_v2&!(%nia8o7-KiH~89N2PPpj zY8hJHsfwN+X~5MxnvxX54)j5A&|VibbK(%N+Z@{=z2k0j&aMAWiA8~zY_nE%?lR9xWvwe_8he*3$iO7G{PL4cTNtS|>v8h5 zX^`<@$b4w2p>-H#Vb@cO^T?^+{)44g`K2qe*eG<7e(inkt1*D=DB&i;u7wX+2Ii2v zHj1HGpQLu|7>k7#*Jg_Joa<`4`;S_Bl~DfLDg^U&;~pMLcKd9-Dbuwl9ZlOVMV-;= z`nI?D#YJME{$lGQ0V^$SRqR!}3vEIh`S4Z6XQhjlyII9Q?xf!c+%3ea*d^WKmX&#` zc=tXM22wBDSzl{6)l#kpBjdV(rv*A@zSfdx9xL;h{?Oe6wGR}uO3KQst8+n_QD1d2j z`KQ5#Ky1;*SYgQ&7s5$Exfl*^Onp@BkBO5?QBq}EE;?fru<3m(H-Yl%;pR|zSoh)p zRV{aQpD_>^_i-V25#5APgu9*c7=b{DCS3T-0-x%rh7pzVPmrPL+Kss);pi({?X-|R zxSlq`;H}c)&J#bAGf)9?sYi!4_*U?!hFS>^k2DNfm=1~pO&2!y%*gw?@d-D zXJf>Uzt}`RT1?u|?*)JewruJ*3W$ptitN6^bj?J}cdhX-;@(;JLC95rz9E7bG3RP+ zLXbne1)3oZa3Sz_M`UdDOnKlu!yRz^5G3N8L|q1|m&R_dMpZgxf3x$IeKuEZFhgW^ zYiPXjG8>&o#I3N1Y^2^@9pfs!Pbm4(>q2bQzwK@m7A=R*^@{}5v}w}pLS1oniOw?+ zWgRN}Bl0rzJx~2jL6eSJcC4CGJX&5Wkv+l#U^U{Dhysu*{G{Yt@XM{#WE4=+9 zg!YZyL;1VOwVSkGODNk*!cRjC%B8hVlwL4HWR}Fv6(%IEVEVaIaU;_^9FzJZpY3DO zG*5Y-sejQ(Tt?AbuL|Dk#!g`$szKo?we~Uogf@{$;16g^(EAh!p`|F#JM`|$cjAc~ zdg#8`{MqQ+4@q2|FIGa2&5YHRIU*Y|`c4zQRkM#L=6CAa$fmg3uUk85$blxb^q4lH zz~l0SV@Z-Pu21#yEB>Ljw)9^AUoT?M{0+TOQ@zKzk@Je@Zfg7EHp`A}t>;&gkr`jd zW#60SVW60c+IctAJq=5dU%Bd#gjfF8zm74$VukJAj6u;791RW^!QjU!ETb4Sbx8qd zk$k~2ux&u1K*xp{Lr6xFbk1~K_$mNGo{BmAXP#faVX{uG;sj+XRtJFE_=LxJaW5VR z%GA+bfn?(wHnPigmh<&L@GOPOM9|s|<0g}UZQD&v7aBBrT~37Bv}Cx@X$ zlEULhXP8}=ixOIz@AA2C(-!sJQd33K-%c<8hpP&VLpQb($p+mjW~MDAwc!5Zw3U8S zaNUW5Dl$`aD&>m%u2|#^eREOitT6Q{MRZhjlVy-#Z|0moT{Wha(34zc>PFu}UF@Yp zIQ)+qC4~GrSD>uT1e{|9rWm7yS^9ZSZZX!K7Kse9-Hbv4q(7{Ih@+RB>aSS5docor$zvanKNcfbE z!}4?BycEg4=}?)S$31{&qz3%=g+w$D84?9IF@5cckAGr>>`44iy@!KYJOZn1H^`WN zI5uv%H`3I*OVe_i8L)|-hh+`pZhX79QcW&Jp%c`!vss8$2jh3n4N!Dbrh)92%cV(8 z{x{xze|@@4w?-UCex(A|dzE1KZMJ_C&@E zJrmru(~gd^c*pzCQpTht)=I)R{$XByQTEGT?>i~VxZPTI(VBgk)=#zmEPEkB-!M*4 z{)0y~df?g_D<{*9To^V zPG65K-in=9WmjqXaaM$zN^BNM`;7#%uK&hB1Mn1uTne94%HnzSYu9bUmgisf<|x9> z0u+4TGa}5LO7uh@VM2Oy`ICAJ#EU zRMm}=Kii$&k_~6ay%62i^aHkmd3^FhGiq&>&;pA=mm?lj9P$o^8N#MP(05_HI(HQz zwV;%Ncnslf?D?^vkd0oV(9etfUapiKAmI|)aUY}~wIMr8HpgWBqrcNBPGb@nrEcE6 z*vG%1-9;I^kbF~7$9z*D_`!1aTUd$F8_ArvGrRTRk1K%^Di^vG9itt6-G0xawxLri zWb?iu`SvCJ=1~1d3H}KdG9EH+lHO2fG*@M=drZtIP7`zdX$JHI!h z2Q#=oWAZb6=t1KT4mc%6M4y&g$s5*@DsDNH7_uGOmeRQ$(0rSu)^ z-PO2|8~rG~qfvdg6MWh4)pp1gIVLG-F@cI?{%}qKJ0*BqUr`u(D|3vx>pCs5g1wgs zPio_)U)6c!wPI`v0BzK8G<}j)gZp8uBCW;n zwYh%W-1qNvnYMnWG+1G`$OK-_b(ECq3P%6&tRhyatr5x!}s}Pc!gLt{2 zl(xUyfmKudZ3hJO!KFk8HL_p$p&%xt?)>A~*&9{Q4ecLGfHuB!rP@2{m|yc1DDYjQfh5wl3qQ%`sf%vIAR}2#%nzf-Ci|&jvX)Pi(YMioLyh+=*D4% zJn3+4>d5_J#i$f(BEvtK2}@gLq;fL3eQ_Ihy6xRqgw)P_Z#1 zEvAcb201wVu<_=?Jl3D`4y!*VU^tVmxSrYG8(YgzHCnk5ohfFb7r5gk;#NDQ)bfuZ znknK;PO@a%0CH-1KZo0&e}k8vvU!sy^dk8@xV0*s^$<)G?HI3B=kB@4r;28j^cgPu zuCSepDhj4uIbgVeo28RV25@*cLKk4NTLyo8brDbb&SbCmoIX?jRWpFaI zvEC$Vp#AKGyZlbz4*P9qk-ES^2Fa^KQDU9=4>DROq0tF?WhdaYguEoAP|qe%M3#M{ z6O9p31k}WF)#%;Ha_oY!h7>M|t?!}XsS>c8 zq}X$=%P=^Gbr7IEgwt6;s$qs-+4G&gEo3X@akML9oZDid)aN~n#%5DTM%dI%tL=n{ zlXwpxF^=M&4d`?D5d0N8wJtZ9J! zek6K{(a=5#M>32nhV|i}N?@lZTXB?~x zf2rJbB_9i}f8gpuen|Kp+&)uNlX_&U|E#_sC#>O*&n$eRkescEDIN(kaK0R-ObU1f zM2u>3zdZp16*yMApdL*c$lKrj59L_G*wRi?LFDLG-@%q)lLxbje7 z3Yk#S&QEVClz1eril9oes>_@2tvD9X<P<4#sz#>B#7 zo$TTOW+^5UersCi&M35i3o~7z#A}n|I7RyI96e#zvnfWARTNKA{7~XwDyj5_g2Ld& zGptX=tj^#Mj5Ek~ N#J;ncd*MCH3kMi^692Ja@(&DZK`Ee%Dg|PB`-H>kC8JcAx z*sA`2sdgY=MFP!v_TC$+-a8!flaW1Oe}jOKl1ivLH&K%yM*?{I|7+tjd%X*3UGT*C zgo7W!4?SFH{<(9Gh9iKzmFPFL^cLEVlZ_C4K{(1tW;)u$3pJ&|BIRWx2qI?Rf3a%{ zDEC1Iu~HvEyZdSSMjKHnYRGQlAH1kmdU91;)A5K-O596tj}1(*;5oLVbFqtJm4N%X zKOwK%RbBZ?JYLjGrCVS{0>giw2u*qcurinHAL`p%$rl&t^g+~5Qc_QDKazl<6yGlk zX+~gR#veYvU*HB^OWoRr%zLLsr$k?)0_bxjWpVs`r!A+60$X*jRl`2XcwOJYj!Q}K z-`&BL->6q5<7_adVZeaFDT7y{10H5c?YDN-@RYG_ANcl+%5W;{Zj5wX7+Gt=SoBq! z3nm6CzvFw2e5JN4+!U4O$q$#>6$#iA2$r&Fjm3YB>$ZAC+VT3zpJNNgabE)N8SR`W zD6lhF)Z~Z8@D4Eg9$PPVO$bd(b*Sqm^xPY>sD41cGI_XE7Y>W^e};IV0P}jvp9)*@ z6X^)WPj3a_rJpDmqBbyFm4szLfD+gI4*+>eq>$e++W(7(AdzF!D+S0vO)ek8*vt@oUe<5?P1L1J4c=7q(bK-2 zzxv|V6Wb2;9P~Kb|KcU;4Zah1iA5%T)ArnSh+-dvu|bTRxLA5k7gmj!S45O-7!20% zIusUuC63g=|ESiAL#V4`bz;wgJ8aCy_jjl9FWwhiaZ(q`Q@_*w4$F4DU3&9HSjhb8 z0X=ITdN8x2@x%lRZIV!Wjg|;2&hp((hh>fbtr^k|z4U(v`zT+{ZJMN|z%n*>KIaLb zyvftYHYGJ)3C!37Vk=0*MX9Wxj1Xcn#sv$Z(~Z^P&M@+y@(K&J!qK<^?0B}c+z#i2 zic?Q%A;biwCiLLD;b4O&aH26Wp7n}!=!aj2sL&?vstSy*Q)ZN2pzPCP6nx%XyQ(!SJ zKzdzom~Q(ncildsR%*Qvc`%oLo31ozI4Am^Y|31-3|O~#ee7K=L_=+UoiRiYZe02# zqQ%11WGKd$(D8EL*our|>u{l!94w~80M{URY-#2xYxMlGsOu*4^3 zH|WyVmFi;M;0vQrYaiHVh*OD&=qGv`wZl@8gTaD_-l&kp@G>_cYu2ge8)-B{yk=&O zc7L)D?L6!H9OP{A_A2B7t2vvM#r^z&b{J-WwhrvFq~w!aO(*f2OdooJC$S&_t~@%w z)MS|^O%M#QEyU2J<$Hc+PX2gJ-5MI^I@n!mfM*rMK4>Y@ra9j`Q|3&wp7}$m(Z%cx zqtE*%$yQDcM=OV2(}SJVrxr4kpL2NRz(*c|(LjBoCH_J9soK+5KMw7S#i`F>@#Vj9 z(ZD7OrC<0)q<4K%L%Nq$SIeh<4XJ22t~2lV==+D^>wKRf*`z{6$75pXW$_mD5&w&0 z(ZSS7L81EO23GSjM7kW9K}{BN;X4ZSuS11$>$7kUoc6D1cG=H2{3cBD=GGTSp^G|b zkF}V;rHkKSf~BFda0<3Ehx;jud|F?5H6yMFmE~aibPZ#oZnqH@_JB{G*+6B=lzGg~yJPcP@q!mizVV z4ih$}5sx4S_M{^NExWJY{#m;bf~4*Qw%-vtkDfHAUvVw1B@$2kI%;dD2c03giG}B~ zcO6jhY0>=`fOQqq`+O1+NEDy3FB;@AMvLKV+RsrrY$?iG0X$HoCDn!>)UUt;fQ^wa z-(#N`UoK84WIJ?}*`*Ja6tw3g?WYjI>NtK5Y+uT-->+%EC3r=+_a-BhVu|P$hq#1W zRod;_aSQgU3TZ)@Ue`uXR63%2{=~yijVux==>2WlH?GpT6A${T(Z)ACubGF_g`Gh* zZtEnDO(<^eu~ZENaUj2hu!SU%JM2`AX3^~UaLjkhoFDsu%XEM+3x)q5*!zf0+T04H zsxwsDDY|=S{upWPGE|G{+634{4|smkCdZE*$~76Z-c-7v+NX$&9qMoKB1MPhys^`P zSx;BzNBrv~dTe;q#7}>G99w00hsDYAQ21t&uMfC*1{8xx0Drk6JIY9zt{(2GmF{{G zV<^-j1c2X#s9}H5JGN0iI2i!p!N?TeVN0eM6F%hNt481Fw!;1>k*_qNEE}w2%jXhN zAM+ckpted*c!6{!zEphhJpK@#Q;z&bX_cs%S-nmX59gXNX4&4&a730CD7(rn^m{wp zzvEMd?ivrY>sOcN5_ht?>H00O_*)7p3qoWoGzAYSe{O%X-@opJDfqWaV z`hh0+6{>dTaSh&VdbWK9r}-WbF~#=hdlDX8ei%gDoGjLQ?ZDz(8HN@)VZIl(C>*VP zWdq{qXEAoZ{aXa=o%I+A!8yJ%wCc}C*J$U1Rt(%EM|N2DieZRK%8dgPVQ-GDH{u#xnWNu3LpbZ zef=b6n@(M7epsH(_0AeFd%7SJ0Bh zbI|%A=+>T^w0z@Vpg2?W)t*P^G8RSJ3^$4wOzs=|ER(53AkLB6pm0-)zR-st|A{cC zE!mCRsaUheB%$S4xqU7L_3HprXNTpI_*#lXKy2-}M^M-8F*dzi@5D0jk0SH@els`S zKXxdb%8L*mK(y9+TtbtV^5VOTOm9;5JmJ$JpAt@JiODiEGb)MhLX0KPE-- zD2Bw=&-b)fj6f?Mn^bq>@T8n_`Vaz+w)HL(i>h?sPr+6`-&kT4Y$NmWt11hS^s%DT z%d-URD+)mM^jwfMz4!qWgyqcV+@|Ryr5ACanEp+o7z1$8xM*q4r>$ZLU7&dk{!^dA z1WP@dr!7=siSnCjF&jc~h>duP)W=ewVEu2!WwW?&v3t?*X}rsJf}{2&cKik71#axF zJmRqyJ|eE{&T?5HtdqW?>Py6<@s>UL=mr1z)X#~ECJF@A!cmbY!NU*}#yocxPxCS8 zJ!Y;ao>9lQH$)+q5xS!qXY#7EI(B5n2?q?oeb5%ag%-K}5cT4+O;5;ymnhWqTY$?s zPSJKVOTBAnBz{HfgNj+VH-HsqoK6TlR{)B~un{`YX#f09fC@HVGw%x(+PN5rv9n~h zMH-IEx#IF;EA5*Cp61TqakM9e3BP1{Y|Hdxs`@Hn8>IO44_@Nh!ycEhz5{71V4szr z+>DYLkgf#KW(L$gLdG?}i;m#c3hTG#;r6`00_`7Zvt7cGvc8&71+l{qxvj`h=Bn}W zQL(!DNJT?=6dj=5-5>4={__B^<@;skO?^Rt28fR<{&~VeIv%Smd~KBMDG3|61RveR zSWT?D=JM#>gA#3cHkCQbXL1zkCEvm6JWnLW=I48~>fXs=o`*Bq*38o}&j(`EgSB=$7No;L2^!w!R<`L(oVdZEMyU+!+H7OW{PFvTUj4!iDC%-hdMuZ1nsQfD{iz5R@ zFiNUjRYCu=;((RNzy)%pL|~1lLq79h#i=|U`ctxxh4573j1=BRsEviE9X5?%F0^v` z?9KcASX0TAd%Cwt*OR4<+s@MZA&F`3sDY% zYjqtcdns?b>&*+j6uGC}7PZ6}Hd*0ahCt{KZY)8}YH#@;An+kg{IE9>QS)I%OB5)T z>s-B&hzT0rJY(+nKLt${$m8K-{c-LT6Lw)2=g3pJdJpXJ5zhF8tbmcZwla@(T2#XQ z5DhG`xL-1&7&*#7rSUxf_|7jC{BPtO)yfm)Y+HEHg!qt7V=7~m#uRw3kqX)NQTBs5 zsQeseDi|H~!u|RxnoSBpA8a*z+qVRpGVSu>_MiX!Q8zXC&Y1iU_u58sj2Sr4=-DwX z8WmY5H;IvO#{NF31_0XK82S93L6pf#%S}tz;J#PvVyZrDKA-_qvv!*UEhm?%kyn+L z@qr@@Htoy$gZrAUF!>*Vg!!+BeX99Ts^tZKanoYwGDF54uT>FR zjsK-SSGKT8O!a7j=MLkd_ncJt?tr&-QpoS|6_H(W+(CbW3Et z7fRZ`d>RD_BZm|?E|9Mqgv)oj=VRb)9nXsPO>m}tDY7%OUYBh2^rW6+dFajcG_VGe^up911;?C?9 zE8%{z%(Mc{drN+}M(2N(+Ue_Z&kGqxlowE9+gB6!@4MkljdXBVR7f-D zZ)PQA$aXuJ8d6SGmKgd;MW4klb`wa+Sk^_ySBN3r0Yz<@{G-VxskfE&KNbU2QBI11 zwMSD~FF`~!47R!ZU{X$?m-ctm^bF~9WvU<4=o-^WEUJ=+V{}UAG_20wMPmO@LPwu` zxgP}Sg1#+&7jdpFX2*?N_GB#ciFwn&NzoM3MEEHZ3NN%7y`a|^&LG}D1DMu&$^c-A zadGLN6qAoWrVv&3{~xn1-mgzv>rLr>Joc?u*C|fQYNr*3ZzKPE`7u)1_3d<-61{9F zvdpGCqasj41o#D_hMI7?1(dJjn8aP+Y_PmiwyC2SXy^4QG;PfX6RDnc`d&qw&aQKH zWMxpu#Q9f-hheEW66I@oKppF6Y9-A#vBQN8r zsfP%PG)oQ`r$OWU<1R>A*A>ISd+r*+P7(?2`d2j09o6~#%60;o9<%i(XRnK{d@q9}K3391 zz}x$nBk&P{bmW>zoS^hefiN9vUh0tcPwyF8Ux%PX{j?%B(}rc6>iWarOBN8&g;AaR z{#7%9^oc8g@;4DoV)%xI;*z8M7(uI$wn6Re+U5p@M+x?ZpELed7?r1wKp-G33RyzL z*T3R4C{{#*!Y0-CZ9l~N0yqAJ=gzqgU6DxdQ-kkm4{$&{*tCexi2YyaBU<(SmeR04R5ulR?;2- zVu89>Ok#urV(SK zZn2hq4KA|rUv^e%uSb$fGNnf;`%Z;&?Poanzezp$z(`g!sQKY4t0voMVY2|fimx7~ z5V&fask^s6IPt^Odqc1do-A77A|DKFCm&+9^{LK1Z7<$jSLbXE$t$3V9V<^RW8di zCiZ<_DuPgb4yRPTjDfX+Hi-e>(8u$wgb21cVkVrTjP6ovMr13+I3G2?%g63?6^hp! z$R>qBJEZxy**re*BH19pn@pWG5hKU$ngV_rUj~#dhJiq?nQe`{#Wlr~UsWJNkYqF^ znE;T#pTOP&`CiwUY02V~|IV;(ZMyiYv`l=w8{Lf}F7Hd1Q37!+g?b*cvPCyl?uU51m;f7U8(1 zj+e2pXt8h6R^B){U!ng1H;Ay zht|Gxt%8mxO@!2Dk-t#l4-{Kl>ekp~D3EUWe_=(+)lRUGT}2tw`9ZTEGw{L6Uv!_D zI6l@Cz96_>Dz|i6rhy)`_x+RD1VWBipp6AY?mXekrR2NShp*<0Ngr!#T<=V~_BghLB%gn6Q7?U1D6tN_ zTN$D{^!0fv(YN)Bj}2)L^+|iI6X)2``auCnd$$4K{FNKYo8ul^QmcZQxC6xe7lT)C zL0lW&zDoE?)GESeB}aWM`mDwcu<4@VYss5Ks_{3>7md4;H^)Z`-kpT$t(+A%xO-kr z`23&5^dyFKbTdevYg+1m5h^wF;}->g^m!Nf0Zi=_%we8aw6?CfZBbl_ zuj;V=U4#&(-z$nH9aL9tmN*H@v3jPp+%2Gg2bZDnO^P$&^3X+pYo4Mus}D_jXY2x; z7jwCPeK$glo3}5k9m-nnzLy`s8EBXOS@9z}aG1!J{u%eFIl||t$;Q)cP!Q%@Q= zY^^%-!D=b@_Q!lmq6Dw&XIxPJ^fTXU{?;Q|cc`!Rre5OHvNb zZ|;{CCQN&=KoiMJPJTw1dcD8qe1@AGsk7H0H{ z@?WG97ZG7H3JyLU3t=Qq&e})w}ixDEnX=GxEyh}B& zhVAWe91S{#yk8!MA2~*fz?y(Tmkmi6mzXyl9?cw>-^#zYf0Bd&H@fa<+zzv#_tp@e<0WjSA$nVPr7*6)TfyvTs4pBmi11BQEPyeawKo&g5N=qTOUv;a2# z$l1vnJ)q*_fFf`9jW&29kWH?=Mq6*9;m>*(BvRhKg3>@|vge_V;&sfL?w_zv4pgp^ zSH||tjd811I7#1=p?Gb_aS3d&^k*a|?+nItIPq>7AAIjebHt5_ z%EkN5*8H;c`&Cx(J@Sb@hI~mgfQV#?ze=6HZ5S`6xJvu^D^jFPvs9w-HVB1lJbJ4} z7gXcx4{Q)c*!ZYrG=Iv~w;{{VsfvV}ZEoGaXW4S60`6Xp#`&lCsI4l>SRMDMI1`nz zN9@zMd4q31F2mtKVrPqM-TQi^+hS9GTcD3M+-ph^t$(8@qS^2J-E}`Q+M<>K>Wy6u zMy45(i)hlY&$qedT4~h0JiL}zwv~6;Y@2XCp_yO zmps5rD}T`OQYMx%=h5m$5x`m!fg1Z*6zM{N!ThFQNmAF@GS$>ron+Ne8qP$0^)XS; z9YJ~(tLV>3v~<+x+$N&tjCzm*3!+X4aV**b2{b?K8})8DXjN+>8Px{mY*T39>hA5F z>+pc;EW;wy74yUm(g)^iXj{)gWnJ4Ds>Vj<_C#ziKF_S5Es>A=TXzQUP1J$4c%@f% z2Wrn%no?l`MCcPcuo^UqlPgl8(vGh+J9Zftm0Dq9&Pf{v2|r!W=%~TEGi|Z=!YK?U zn%zcM*>em}nH>XWq!+m1(r8Ds9ZT>WJ3bJfHFuRJ=YB}Trgp!OcouPTX3DAy$ocp& zalq32fR(&R{@vgYDP_3bk&nT(?uv(wdOTSUCaZuXp@reKFKi-$DA6aO)Jd5xYIIs~ zPUu0;I2olD3wxVyf6|N7O|bM?q4e9EAGp|Y+F4nqv1F#dn z-gHY!NtZOzjdV9igEUHaNK1D&NOx^&lmCVHxu55}=l6aizQNja%{j)H*JRJ*#AF_L zXgk4ZXC%1Yh}k3c;t}j3n!p!FZ1O**TRc(np zD7O>KF>i0e`>S6vvH^^ZgnStOiSP&yqbK2sC5u`0h3A+(3L5 z53w8BDzv3Pi0}m9ocKTCwwy{m38m2#yr6xeEoY8tyBY>5(JAdsz@#rolPSb=+ z&tnv05x}7J9jkw>VeZrAQfB&-wGYyX(-azSKF7E!fL+$+ z3kSxSsrO|WgVMKQK}cR4*oUq|f2xuVi_UcRf*hdOov3atA2(-M`;Fpt; z%tNR^m)qO|?4LKIOD~e)two`@5uZF$`0_&-286Q#tCY1p24k&ht8DYMLja$3`tt(m zuKG|MRt{|SQe5bS?sxGnIA{`U|Q5q7!S@Sc}hlDySpB*H}GSm#BCBZ0j=)7sJo zwBjiu`XV5>+yz>k=09vv{QsS2l}V;YIJml$k0WCpLhQ5~M$beinL~QBdCK0>5IgQ0 zsj(t25Z1kZyhvlX|G8MYVf~j2Guz2Kx#$K8=sD>?`7e`NNAL z)!aW%U69otcCcSPBLAACrLX8yZs51Q6^gi?rLrD|L%qIqbZ9cpaxz1f*hAj%zP5Va z$IECho%U02v)I`q8AT5?e@|~|SfrkX>pj}-PY-hPAVwFMe;F_`kw5ZNh$dGyR!2?IW&y2hTGS~K`>2WVrb=2;^A6o` zFH-;*!1Why13OwtEX#kzH_YriSyeQba#RMQGn-qGGe-n0@~nLy<=a32mjK%ud02tZ zVsqRH`m$}2&8&}jW8Y%)A05BkuN4#6vK^Xub`l`9r98Oy3vVOovmW}~>oha;$#BUI z>P9P@k3m>WHnI#9d&R6NpLv{uT{)4W~TF>70YZ8nNXQycUG&*M0y z{1i;on;Rs)4^!M_&ysS`4w0C14GsR1m4w==4-xabWUa*>AfS7gYAd^^_N_hL4}15B{6` z7-ImgADFxUoyBw-7^=epMOU1xOIs~;W%^RVT7*J}?ssk34duDb^R@M~*)twPzq$OD z)W5>Ge(O%Osb>>*sl0;mvD%KkIrU zF6z0|wotJAU>=eGY^l0g;;O@B^qM=i0<2k`)a-gp zZ&H$eCw%G)z|pR1qT*2I=MivDUy}uBYIueKAR!Xy1a=GZVf0>&-m(`~R_fE)J$nnl zamNo@nf-2@&YR@h5VraV5t*vpL^$^%I)^4{OijU8tSza7f0{SzX5WH7v$&O#Y;v=}D}gCFqfb34#UvJG;J zVEnD5^mzc&0!KEQa|IxojNCbqKCX%Qc=sBLM_YRn0a273X&1@086N#3bDUZYSEtGc zCa`OM6C*QSy~nL#Fnu`Iu9oar5hW4+y-}xey%Z)(NroiUw^Y9-aItl8tkvq$nl1Jt zVB60A_+)CWLsj#uj9uCJC2EfSl-ADkURMl_b^N$2cPXo%k*D= zBNoH{*X{r>r^f%8z?ET1q4$gfIX}XMsq=_U(pa5U(BtMptuaCt7-^#dC^^97o%IDRwYd@F28HWP7n(p2OAw_oA^ zW=o*{lPyW5F>2A(!xpnaLaj-yvQCvN8>sZDVq=H|+*t$6{K)iN2zW%J!ZUN>pg3_$pRowNEWSi$K0kwQ-(e(>A8Camp%Sw8IE6Z;gG{- z;c~TUDRV#aK4mx+L9|zvTfoh|Mz6~jI!?%cU%y)CWdJ5Co@|kfmn`-SjI$vTJ6D*J z0EX@Thr9MggtOjyMfgQj^F=>@T{QOt$fjQ(IumsGwvTwiJsO!=E8aN9RzRZu_!i`+$ znnfKq8YVAKvC9~=S7Q0a5ixImyT8P*Pb44BBUth$ju&Letk0^P4Lru%$Jl(SQiHXE zHkuX`kAq6G)GG96VT% zzc{|!WL5;2ZbuJ&!f8Z%kH&07&A;KpuOsB@*M0$wuXRV`b`>b;oEQ%277ln}i64`_ zK?@Rn$-Kl9@iGAmZH4Z)w=sii*vau3p@nGfLplDJqFjvhn|D?f?_)B(s@)$FRfmJq zZRusp+ra^^dmfl85d|79xecnXEjrr{D?g;h^K-p4sPEpmewkC2^s{-j>ma>mTU_oP zdf}TXKqA+(B<+>-&>o+1hZKH&61xJbTUdQp%qkklRN6wO7RQC3lPTI-nRrRpLP$oC zW6$D88<&MH;!TBBm1ju6!L?U;4X~4r3&9z_oq{*tq!icVhIvry*0in+(5EHJb3@Gt zqY~xt{mGAs=;5nuhWUPy{cbc{=FT;UD4!eWS>YmiIuj4Q|LPQL@1RPL9ezs&33vFR zVf*4yXivJbpe$C>oFr@5y5IkXvps√Zd#q>c`KES^y8M)h>Z`=@0B zc95XBxrlmL9}2xTaC_j^6DK4l!I#fH5K*E81|jDOBS?oGh#sLd9?wSvZsh%T&&qa| z79t?o-*hxF|E4m@WGkVNvNcmZjTh5R?bWBz79{;Y%n(VqcLc$w0ixPp>*Rf%|^ zl9DZ;Qo@h;f1t+JG0%0k728R=pcK6O%sKOId)$1WxxI&~x2*+&Yy7wU^Zi!Qh4 zF(VnicSVuY>3MNSgar4Mzo8bn*HKVQ^v^qB%zpl^=@@h%m&9B9>LnMs*y120)^enS ziX(VwO**5Gllk4S_*Cpd%V#+`2WC-xnF&fs6S0E6)-`Kdjr~9U>M@3hC&TO~HuaY# z_1~=DOXo@p^Uu_tL}vaW%i=74pB$M{=`9HXMGuSBD~-tyUB3@b!N8Lg>3xxGG4ef1 z#2sccc{2b37~TocQ7tx6*k|4W$J}a@QF(d8`n`f4X0f*1z7+VY0&ZtuN4BRmJvHC2OkPkPXGA6(&=-LhXec4y@875BnPuJH_Km!VkozHo zMmyQg$*E!?2|f13F^4;V66*C5D6nuBn~=IAeN1r7;S4onx63@yBDwuQYWy9p^t9L; zAsA_X-JQ|cQ4h$-I7k60KlEV@#4d|XEJc;x9U8=m%UOTFvbjClC@9vrM48-r=p;G# zdu`Y%Aq0|I8AIgXlEm&k0+)lDX>6W*7zzlwPy0&gfAn0~zyMz!m%R4e56dTreOi)q zJln*RC(7;2NpY2t$%R*fpy3>YwwU>Y+S`!ZrE9Wq^d8uA(8cd~ImoT(D@FOD-|yA! zbw_VAf%9WN*X4;Khp0Rq^4=E-FO$vUwWV>mM75xt#Or&0k#E*Y@M2}TGEW3P&z|6V zjxkQ*@4qScH`9L(cf!hG)IVf_{wT@){$OmBoaqpp>Hyi^|F-^2MWB@$fP@ht>Ibu^ zEQ~HkwdNH&0=~53Koy*>jM?ZQPbiFruorsupQoMr`eLpsG^~Cm_V~>(T;8x|oBx_O z0gv7-U{EX0gX`Fmh{^W#`x}<{vZ2;tEFUZfMtgBC1H_2=&i3Hnbl*Yz3cX^MffI{$ zR(@FUZ|Zny%XqoN+HW(mVp@V4#b9ly+S-&Q1La?zLKEB*ngN^%V%#lBPi&S=qe$EQj(#s1Qn z{jzP0;6pB8D|NsXoy+GaYWCJd9M4Toa<0HxdV?OMaC=X_yimIk7ze-8?T6YUlKtqB zi;puc8RW1>&^4M8cj>Q%!o5j{-^mV~l;aDu*~#D~*O`r*GiiGtcOc#ToSkl`7`SJH z4K2~@|A80uXrIjJzIk)IdNXVqHas`^nBGqT!ITMDds-RuwROo_?*bEk63Oa&^2N3U z?;+0GY)Zo`uh;YoEBb8raI2-4M>14neO?=k{ormZX)xfj6qWL)J@wUA`+jrALxSw5 zbXGFOaU}YpswMu*wt+!&@41peFisb;A>njoTo|CK#?6#d@lV5>CaIbj!$BoIYD2#j z-HSunpTW@iCVNcOcaF^Fb0W)Mf}&7#bxyJ%@PiPtUd9d4w;k(o^xqy9>hk4*;VuE3-tBA)|iPt7b^hAJM{oSW9 zz^~}li*m5w=;c8IM}Py)Y<i=w!5WtSFXL5ejkg^|Nr#k#9 zgsLO><0}Smy~}(%#Yr3pGkXK#tQOZ99nAM2`y+q=@g`lfR-;1l=zXgk>`%cXGq`7Y zh$5*k&+eKP%M;Ux48TcwoD4kv(ktVcqs`0d>tp=kVrEsHCj* z#?rx}<)cjs)4|9FSWts!_p|PORxP@=tEoTd+z}JxiB|4;$bXyfy}OQx(gMb)K&V~B z<9w9u+gJvmhBqa)5ddOA?S#Ou+ZGNL+LW4=Jr7)nM3~z33I&GRp-wLX5lJ8yU1aXr zV)lc{&>w5W;2$hal;Bz)U%@1k~<^=Je!0DR7Po@YoAyYH+VBzbRmS zYdV0hhDxa7d>wxkF1G242ahU!O!gTBdPT7S0zTZL=YU_tN7@?e^tbRw{mQi|4AqL>yx%XQVi;3JN`*RIwuCqcL8-VEaPk}vK{Lc{|6AJ63=J|Y# z=Hcwcg^hob$oL(FPvRh^95E#&kPFLXT+@we_I{+`!yItKpu*%$Yg_;TnF*SbZ}6Se{9I%_ z5+LDBCI_#EbH(qw_-*@Hipy-E8lX3zQl^n{DmHjO*aV|lfG(&^MfiFFz$EaMbMd|4 zB!;1iruRgz{Nq_>Hkz0B!?fziBoG#6eD1=DoK~8hX^V;@u-=3kB5c&U|LBE8HeodHOpw4XQ~Slvi5HRT^ZlM^7Noxg^32zh7|+VmnyIrXuc$q zU(ZN{nIRCRiC`Hfo5I|-Eo?BEZZ>^be1*?vh@1qr1{t#~V^a~evz#hfrB6G@_)SaY zHGeLl*~}L8&nAC-4s|EcCuJPy+FEFRqyO+dYJy{bExn&++e!|7f9!>JZ{9_I z{7D2VzyS3NUs=WT?C2@kR{{8h_GF*Lhy7YO`>7SzDLr@8I?ZBl1|^m4VJ2pp99jMm0yqI|veqR4uOqc97U z0ELdrOaOhP+4!BTj{_`F$~;&o03C0_wi#P^A<>L*yYke!zN+rt48(9?HBbR%QV&ieY4R<4~J9-gn%!-q)F zZ`U=$U={-%jB0mAfkQE9B)@_d@fiXxi7{%*Wm+lm%7|Bh4caz+w6=%s9sh1L?jhlO zt!^+)i?c-(bXS{BbJb*B$Ld>S&MfRHnNt-lV=1z2@oUqpsGNsML zJs$!(IkowA=ik0WBoh_7TL9My?Wrn(`&c6$R^7!2N`*j+p zL>es5eO-k@2-i+~#dZ0CAJEPws+a!Y(qoL&@OlCt6X?djYK6K_BYEt%cD|g@Agq`J zcG7Cx&m>sbjcaMYV1Ez_ec&Ewj})*&?=B~Zu=fIlTP}YE=Gcm9N*%5=A+H!l{TMub9U~StADQW-ssyzE$bt3 zjTeVM;=|HpM6Z)7*42=IkPS0lvIEX%qFR8s47StELZHTJ#toV*hD5aGA>&^KsbA$vj_BwUR5R%BfV5$jSS)yL=hQ_0=^(>I%MXkcQlo{ zjsvKkc|*ymxn&I}p;Layuz~d^J!}<}0cn!_J6Ei`v5Zo2yHR595xPQ> z+-BJ#N1soD$V@VD;4pEq``aTvJ;)kYNAa^=KIS_SAggze?fUQw3ms(zn@y+WCrni< z90hQ#lh&RS!*ITzXca_`M~X_6o8ziLL2Y2uAGUDX<%JJI^J_=cGCpiyCU?lfRt-WIB}! zH7Y*7^r4>?ZQ|-*b5(r4bA3|a#JPD2KxNO15nxB{J+Sj#`kBis#y|dt0tOF!U7u6r z3S46tV_mTx$C2w*_8RwujZy5gjZqd3UmQ~Y#u<0QTF1W|XXJGo80DAufl&c5VhE zYX10P(5d#Qd=`vO^l9SoSBEqevG#$QzO_MU%4LDOwwqfZ`XGU`z%-8)7iasylPYOM z`a50vTY#(FA6#-&_rxExn9xpzf=c>K3pq{LeMgRf75-*pJa{J00P8FtyptaLsRtfR7g?;bo7mmaH~7=R!CeSR zc|I1WVutt;%pUs8#*rM!ql54if9r4GG$%C|biZPMdG6K^$80|Ue;LwMeBy-hY31_X z>(yfH<^t7quHxm_Lr%~;;Mw*DH+HNW@D{PZhK>`&eXkb7$WKR{LKjx8^JT~k zER7uk7roqy)cQBz>~s|F6%%ag9VJCcK3ml4xspOqs}bqSrO^X4xi9%OYv z4Bn?0rlT14#V!W9 zkdN&2B>&ZGrH5HQu>Nl?;jglg!vg>#CO)zmVgT zZKMR>H%kEkg0@>15d46jeToI@Xx4G0B*7RzmQ@ZG)BtgRu%Z|gG}RLj{znJNxcG}q z61V4F7RBq^f|))Vytpc<^`~uI`3{3;K9X9z@ZranBy41x-O8L$!QqIZ3|)#NtWAh!Kl2#ufyrjYVF)C=U!%&N#rKt z348F_0(NX>`|3)d8HB%0WI3iOquTDETYk0Eu5hQxvVhwewnQRcbvLh>5q?zF`FJGE zLE>QnU=k?-sxt^SK1^UWue(H&zS3Pw{mvV}{uoMK6in^=azhK0JOC5(lE6bDck0aA zeHQ@p=3}Bvuq5z-bm&+&enQh z!+7YVe8}!*N0wJ&*^Fz?XKd?%su#f9J`Jo7o4{;Zho9pcI*Xue7#&=z>B z51=$iHVdu?9v`iWS#ZcrlkmT$M-LrLoZjK!cM?gref6;e*S{M3WxyBNrjC7c#tZt) zQubmqM2sB1=wJ}93+8m}m#($G)g4liCcKt<)mAbYn#&P!(-F({(WMtJ0ic6+&OKhF zn2p5vT0bAu$s+iAr@=XSsm;SZ}8C@ym_LqMlZGOR86@`|ljraIHz zdEYOf6M%iV2KFLVZ_|!o)Yj1EHUQ}Di+UjVH9VU9h&+og=-;vD zu{yePoNvxJIsqM`5x&;D^7Bm)dHM~=Qxl=_jftdj#+`RPbT6GKtXc|SdBL(JQHl5+ zUlNA%hdeZG>GQ^+E;y4f-oHNoA(rj;(k!iZS|W6kb4iXd?>(}-HQCd${0R*{{;)Ru zk+kX8rrP>FOQ|`d@o+OW@IpGm9-?`=(&vvazq^}jA)$Z+s~@t@3PW$L1_1^e7wy^(z&-jDF2vAy`U%JzRFV>NqYXK9+Z#31exul+7RTgFT%%J^RT$)ksswbTVE5}QKc3UMkpx#3$TV90X-lwSpvLFYGaLKza1 zp#(*q$=EgrywKR2?q)3UKV;kOmhx&VrQ4kN9@oM;7WE&xmR;BV&3^Ol;%4c#UB0hi zi}`i)D-+Q;a9py3S#wrYn^|idta{{wGT`5(*Btl;#NN|e{Ju*07190IyME=ykIk5< z#Zff?`GyE8;=T4%VOXiOjmH$L*f!J*1xBNE+$1@N`VzuSrUWcA|J_k@PYQ! z0Ul>oA3{Xxbo1X4KElk><86(aip7Ji{KxTL=%wp3kXQDk!PS0II>0|F8E5sC+Ru?p z_K*3wF7c5?rc%_VNx{f@J_;I@e2k#0I>N@7vM9-}M(so&_`5r)miW+ChpB0TEvT4T z%Q||G-JR(9V=k>pJRPhTKHjd!WZiiEN(UCQm$8{BU%P`^Yjwb3O8Eho?S8M8NLMNp zsd4UhpbR517o6kE`v^*&0mh)zbrekyP#|wm-^}v_LZoecSRDVE^CNUKuowI`z2*(A zSNN!B{%FUcG3}w}C_;(h9dG+_DMn;m4u@WBAxb%m(uWMg+c@<+B7IMl5*kq@1m4eD zFAfyu$OfjXiii!srIvx(|7-FOq)g|Y+`MdF>s{%!b)!;y!_t%NpJyXMm{m178f0tXDof2X#H>j(f>+ka$m*Y%zC%TR1?S zBGUlVxRJC)))Y{}8UvL@fYztKI;WX-0y!6i1tb8ft^F2{O1kDv5L)vCaK^nX8A3RH z-W__c4XW(^_&dp8)tyJ!h+N2qJlZrL*PBSkdR~?lELG)^iYcI%soTtu?E^zTPy{We1 z%dB{g#Iu|9dJTz*e@44lqY)+=wkj&_Z3KEpHa|Eu*qMl**X8tU5EwD6sM<>#JA?Yn zcYE0Ibz#LQ&Z>08;dcnr*l%}>-?oyynmtKo>)ltNp-Loxnx?VP8%434jtmjJ{s!T5 z@S=m@#0y6VRBVX95F*P5y`eqY!lFK)uTx+$85z5gqK^nh?T_D#XoPuk$se!`Lt?HX zHBFE)XE`Wu6A}&7s*K~^XBOVW~n!x>UBl%C?Xb!-|Q#e1)awNPl ziM`x+dBsr{EY6WR+vm%bZ$CJwxfPD;D@V*Rjv^^xvPaDl4<(4^45|sYjD1-dLj8ATsC)=U#DLSVpIQp= zCp&k*K14*$@kcHJyG}dK(~^vde~~gkmMNevanV@eM_w%p*d_ z=X&7<=WB}?>J+j_J?BpyCw^dnb?99m;6R>JI_9d;Q)*z6y*~k#n+=xHAX%GBt@Sgs z9~XT8!}2uU7kW&D;d)UnQ2q1N2VqjopZ1iK@H2Vz#a~eqO&6+rF-l2&iSe|5XmnZ! zeA(@4ER}k(=}l*@=v0N4eDXjoT3mBr|iCxUa3V&^7;&vkV z?U1_Ru%0&nmQ=|m8-wq4p4!dfRA4BQCMhY$4E)7!2vbpbUGOVl+8CAf>9?As>S>Fh z%E{1{h5Vc*WLRgek!j_kC618b1os!Yhf|Q&5v01n!8i2QvrXov zHwOcc6i!>PZtI`g0I{qR@>xaJT=)A|QDp7Ds}U>(=KAI(cl3F8T(4||0v3AJVnI|YDLf~t z7x#WE@gMhI==xvx2w0kjjPqOqELncEegIWQl+?EEcilS};Dj54v$y!*u@d=;9z>Xa zC1kNV;kArA_QS$Dq%WZQpwkQ%`ss}+?f{W2emzIp5{-8S0+mucb9I6mR|4cp=s%6) zkmY9f)0RmkcM!RJ2A%-y_fw=YX4H=oF5!<4byEj3oA9+%f96A;1R%0m$5YmQTs(t#>s^*7Uv+z! z7@S7=5ds>X!4j$uOZM0U%{TR2B@xqwK|;(F>Iq3VXJccxSNKueJx<03?Krgh;(vM^ z49@gs8{GE1ThrO30WPtmnv2&^_HViPDgv$iDn-r^b_JY8-uS4gi_5#`7#8=iCYHD` zt24{36lXTGgno?&?DZhjYUa^O{x^Ggo&wylP5gtuio^~{dplZ8P6;n|MII%G>%SI^ z-6LmM%Qbp$-FU3RBBUi?TKGE|VjF*J`Uk&vA3x-D~z|>g{ z59L!eGXl&H8sOX`#zqKSXxT%2^#26$frTZOwv};FcG6|4H1HTtw>-dYVUn?MTzAR7 z&q%GV`Qq8ew13I`&p4hcE=U!7n^%1rRu{jI0dxTIM4l$93tQ4sK(ncVC(3GKp6?s~ zR(6ws!f70iPn?O)c-2#!-+ft>dkoth;+l>&u)YMhGKfV_^fy2S^KXFaZW#tPt9mz! z)a(mzZMfJQJ?vd5p-2LRoof$}w?Cwrn9JZG20&BqGykY;Org1yb#(0$QH6+i%vu3= z>8(eDsSDpv^y-?C35mJiSkmB>H7h+i?Y|{QwxJwom9*%zVQe5I{0Nr%^Q!4%wR*u? z!|?}gJ4$`vl*_5O!Dd;53*00bCcr{2;1IA!NV`N`s_wXG?9Z#l1}{i#s2_k=U=7r} zX~zn|%uDykAeVUS!vtSO?Ky>Xv$j;DYoxp9GjLYQ8=L2dLG&WAM-l#@y9W?g3M;J+ z|FlMpZaGZxVDcRN^KAIL*x~gIUe@vhjN**Gj^T8^bzACxAB36+iL9aO=8eruu+Kh??Bl1P-PW!fqa~Dn^j7pjyKwiu %oNBu zADB|6VzAPEsh+(qSZJP^F75>`(H{pwGHvb8(bjjOD}O3g%6NU{&2|qP zmU7BP&@sJd5SSa-Vka`*OV+E(-*fT3K6Dcuju59?=Hb^o)OskFTWIc=6}Jv{*>I{Y^PHe^rm1}HZ-%LOIj{=>Kr_fOAjwkvG5Sl*kQVy? z5FCGw;hH`%LvCULnU24o;*pL%4aDwN1%WkN|BE)M-a^0!AIiINtYmOO&3255MzhCndTIagU@M-Ic0oXdKPw|l%I5QF@ zPmV&)nZW4=lLdp=ozzC%BQ702N~kM_1jhh#%ILY>e3?XcyL^un^=NAWe9tg8;Aq~6 zu~c!{0PJs7=xOul1o*MYtdFj-hWwfiwj@ZOhdyZU0oTsm{T3Y6T-#1EQ}U{_ph?j& zlSAX7U@${1U@-grQY=`rZCFtaBB#<`2Yl&*LxSO|gbk1($8%ArJRdYbieth;7K0AU z$n|W8xU|OaV8qE^5xIRa3-9kpy((z$g=OpYV+c)~s^$k%?<0uz+Q==lFquhN% z2q_ zl*5+!Cbp>|1QS?xXgvSOjsr=NK8x!Ad@mwNN)e0E5B!>(Oe7$pEe0m;BMlWOAcejs zALd&L8#z&bFmp#f#e~yTZ!5JArM~>CdM!eq%#pyC%ZVLD{k!-D@PJa9R;o$mc`n*} zt5cT-dW;M3D_E~=}#_N3m@zM5~sL7XeK-p#Yk z`==7?G!eqR7!TA71`oEvU=GFpK{@)<61LKsPOxklYEUuipOxJb{VDE|si+f_H|lblBRd^ki&c}A^VCne*vO5viy1Y1J2^T*|3TBX2K?*4Z+GD4g>ZE_ut) zoc(Ee^)tmSdr^e#x04EFT%;|ey}chA+Um%E*@imbF*W=|uwyp>0}$~dY~23-4EL>q z6l^ig)%YyBdsl{na1ycYQB4h+xUX##&ilX2F@G(}b5KNtpbf$~{T@lKdzydO=+~?O zY3WTrn60k2#$2FbveL$Z)E5kHX8xH~CNMxkHoi-4Ud%nd(bB&M*(4_kKWkEm;NIC+ z6!LvSrXRcfWh`(IVx+2QQQ-Z(q30^7v-69AVn=1I%ZW&)=h|_K z;ZkeXKsIk3^P;pRVIB6OzQy3hq=Ma?0HGoWmUOOX77CHmulNCjr3yCLQ}c3f_{VSK z${A`dwA@vJj2!DFUV8k8V|g&+;**Q))w>xx1;fOB4GIx1G6D_NBPnG1q52W|($`-4 z9o?aXe~J|;Tli%1K9&35Decb4x9}fNWphagp0dbhJ06xzm3y3C<~d@htXXPV-ShjN zzGA`nXmh53=$m*8A3FD*hmYmS2kP+G-a6D`pngBsMsc>nl>>#C$@c`ZS73K-%X^wB z(xULtS9OR@j8d>oWGYDa^sj92<^1;Q0>)zBvsc&`6Ee)#95lU-h8gC5uwtXHL4g94rm-HOuDvGhQ`P$#B5$C&P6IJVu z$)i;IXbPJ2PVZJw6&)XI4I8g4f~t2s9|?wV{8;J654q4n1p@F|-w=(lJRW;l3T_|E zXj2Urh0B(4#?tn5b@B|?zqYE-(^>}sNadeTkm3~!Z{^Kwo9CnLpB{)ggchdPE6r&q z+oAhk`PFB?^Nn}YqqRQ+ZZ*c(ERbs+AyAZ(d{g;FZ0;!WJadd z>M18F^>UO-p}_hn-jRH$Bt5)~ZG;vhIBU&w_)~0egUOncH7Y`wb8I7+nH8^zu(m(9 z@)gY$d3nFK1PynMCDx=sr6;#g#G}a2B16!gXU`hw!L`R%{eez7dFunalud9Mi3+0F zgLIpk<5j8>0lW^@r^au^Fs2=Y83*387Yz3|R|(C^*v-Kh3STc2a#*?r&2SgO4jR9; zs!mqlJv6YY+YxPJ+-ICk>?RjRE#3G|7QM;iy6SD#4h(m;kV?NxI?oD#Sqz3sr%#+X zl*LVYKbWEy*An44U$2F>LawPH)DI_Xym-X=EuA5#uvBpNPC3S9YWnwxU!p;{gq_;y zez1=aL8;e4aL&EQe9nP#>23ScACbmx*OQ+nlE;&^+p3c^J)=pad>FJ!Ni$fA)n5Ce z2DEE1EWG=aU#?x-`7sZ@9UJUEYIsJ94{^VSWsQ;a;Lfi7X{jfkUcy|wt|hIn>@c%d zigKpcWW<<$AihK*3&p%mwyj{J@usOT2;Ok7Td?_LT&eVcKbj&$iDYnhbG!m8#X5)6 z+cD&Vl2G-s3}kxbCD5N&pLww`eo*LO?M1Njr0RLp8|I*J<*Lq`Gh$wNPGFCmv6TPk z2Y2a_{21LDX93MVy2`9H{P$FC`1lGs-o+d_zQTIZspbTrvQ1qyUIMjkK|#kk^ZuPW zg7Xot^`1mJ$W{K%W20l4?zko2dd{o0_~)#umvcd((7~en3bSBl%M?`#<8WU+;VCnY z9rp8NkK@NDx@T_)k7Ns0X~fGXvOsCfdz)LWdkP9=a3Bp_q*nVo0ZplE40NTX3=~Ld)JHlxqveR^vqQv921J?a!afKz< z21HaNe#aPM&se<_^kUUD=YpTkh+{G)Rb1mB*XM3Bkz2kG!=0%Y??P(A(12?}jn(^g z#~_?;u(vCEj!)~)QANJq1tM{c2Fhv8HzhF|+>S)!#CPZPsjdR0P!l9oR&#GkwTlSB z+bk6bN<<>=sf$(97gs?ff8I@=thE}4ENE9pAC!)yCzMw3J$$$KDs&Sh)x7C(54kPM zw*UH2e;9n|ex+RM?$$%I<;O*D9!|fVNsWF3ydiMOk^baadT-uR)Eb@$L_J66A9tV_ zN_&SDif83Dy*bO8P9&6QhFQySk8P5sSQI=a?i>wR+$9@g8U!BYz7|4gA~b0R;^i>Txzr?>`;hf9f1MA{74Z1xm7n zQD!1C%BNEFQU;Xj18}*4IM?|=`NfaJa8wTVoKI#*@u#tumOe6YU4l_+sd5-l*@FGS z68H)$XmBu;#|Z>j+wf6j${J`|WamZCR=&Cz17I8xP?Lz?@=nC96+2v(Ib7AAY8wgJkz(34#TFrC;p15xOTiEve9Bu14 z^5)mr9(BKIGGlpKW%rkrYFD!# z(yg+KZfWpnTj^4*z18xwwWGn>G$PqwVLVu7(>sM6klQ2rpK;Bj#q|gwNEgJJ4v%V90Ob}AXtt;6Si1cRahee86 zOyq_;ZCbKrpyF6=`^DMH;-5ISv(8%5wl@=;QO{TbW@X`b8z^Lu+ws)EZa*~Sh^;uw zbpAk=u5^V`Z;i-sMUuAxbvo8;pR{<1@l;ESiaj2cALYjJN^nPTO`plr3f|N1xrixS zML+VgfhY1_2~sb2D!6$t9dt{=58N%L;>|fmB##`Wzb(FVL0<%82|89%ScjW)z)P5zvNXzSx#%^L4vm`%(AYBlb|L$5+?u zh)+kloKFf@ds+3>R~yrwD#Fh<>h1Nr@8eL^>Tz9i9EGwQeM|2yXOg_vH1w?bw`+1| zv^M8&xrQH$9M@m!n|t_y=TO4xcCGh$5=*+!a}lV+9IdW>Z1ytpLP1pr+` zkbmAZ<|qgeW5HL&kfF2a<^)P&!`z-E>nLG9o3B>E&L7%e_`4 z>bK}qZHQlzwku{_Te2iSlem)?NH{L(Y;;h~73i*@g2!n$oXk7i-CAh4!If)@>(SY! z=v`!b{C47k@Z9saul&QvhHA=t!v!nNJ~&~44TKuw-O+b<`d_x<7`oDsNz;`)TNWq_ zODv{vi|WD~78h)wH(t4zGZh+ajyzP{aXH2Dy@W8ZzZ#U-3y+qmBYqL!w8aeDnr zRuS+5@*i^DAX&eA6Yl0UqWNsn?Ff0CR^YBW@2aKX|D;UzBDure{Q$pP(=pm@cy}pa z44i*keuG~H{n^N=kGtKn+~>%6r1M9)cB}>MX?7{LhL>gjAa=^CX z9J%Rup8Zw{qD#!gvv%_*k&jbrO-m2#`@d@36~?zGmVhg^!MFdZcmE8y?9iPEPNw#3 zAJ-8nCPng?=~od5>U1>pIzydEfwk53kx6W6en6gISDmIIDoyu2+BuX>&eTj9p~>A+@}TXrVj# zE(9VZJuPvU`NP;#7&bS4T$An8hgIi>w$_opbvM~N_QAh{};t{zl?(9jYUX?Umr7*GrW>|VcdB7b4?7q3VaG1J# z&zloHH>J6oUvEcer6fL4>a%@n(+_(^G|oj+uW({u96yeDO1D*TA?;rC|G4_ffT$X6 zZ4ng=KqRF@x)F!&jsfZJjsa;9h7b^t?(Poh9zvAv?vNU~JLlWH=bU@b{r>W6fZ5M} zYOVD$$+KCim3o~l`GcQ=ZT2V2hW-%r{wxqMq_CE|`&44Bhu3mJdj8XWzDc*Q^VpqV zhc)64~;o zmT&#h^Dcp)X;L9Q`(2o+RbW}67j4&W|NRpXd$Cmn1X?%~KgmkB1Swh?4=7O+C@0Qj zM3ZTC{klEjDWws>XytUyC%iraF>Iw;g5M`+J5zISi=J=fejE2CF_XdINy9F%AKw-v+*h%BS5o>g*F6{?f5^c4q_0LMgr8r-7QYz; zq1qk~a0ii^lA0*3dV_N&uSZgVr+=#J~s`& zp#~ki2}R_3EvNc%+f@5i)w?m=XL&F4aS|xw=S3c3Bk4EhR9Qt;51)J3CdnG2tYY4U zHOd`H8Gh~g+q}LZEIlsvXF;2pj?SBh_j!^2i3F+bZyv7gaxTmSUyHBR^k^ z$@RV^%nspyBhgkXD9Jav7de;qF5u`@WqaGwrU0C4mKNbAnN#0;9u`P?;LqVL&GQ+$ z&@7fD%KCJKdRs=I%9?ebsL~ZRauA4$1L@Am)~b-dDSPXu_L7+xx`*48@&Unv9&@*dx@kNKVi1tkuk@uTokF$}YY6SKdV4YcD?JxwkS z1sm5SGuM=zb}I9ZU&+PU6s0RK`}C<9JG67UL$KzZc+^^zmwM}#9QuRsZ&WX}w!9<6 zLft-M7%QKqH+7kM-w)p}jIG!Z2)K@e;Z5fJ{R#Q-A~FwZZWH#ftH9dGnM zWys6f5dnqc<$$Vqk>S?UOi!^3YuROF_9W+NNdcn^#I(0Kj&GmLJOuF|%krtd-K5Px%v!q%N8&#^aRzb%OVUB%XT&-lZ zev*CBIOv(@(o{XOo3xri>s9UMEHF>SLhw;IbwruaP&9WZBQi70vim-b%OH*F_cpa}kR##4c6}oiQ zb!CKjR1f;NncsUiL@Ds%oPWa?iGmwP@fV9PQ=qs{?80$&;Ez$=1mXKpv#*Y186|#m z!Wb?0*2j3k2%js&Tkj6@Lp4LW%1a3;dFD? zz}`$H5g3=0)Vck+8N4tj{ttEY$*iA^g{Gyh|GuwS_RU=WFrpu|koZB7O&eXSVGR7P zp6`A;U}RLgDrUGY2YY9kc%gB;In#gOHI)tc*Na{%X~#%EpM7s`;nxXu!`g4`R@3_& zRhR9FG--#GNJs-+78^{E^9cW-BVXCZ+88G`J#i-EKi}NTUlt(LI)>=w)P?o=f=PG1 z$Et)j*t|C*Oq1N4fd9LU!KtMpLAGWn-fsF;nDW$*s<(qi&G?rBdj9rrT6MF8d2YUJ z|JYmW5x$9hSQ{xLoTfqUCGLy4!5OoBNfot5CFawGVo7=7pZ#h0)y=Ot*Wl4^bk$$o z{pxC>+vOK-PK7Z;c!k=_VSf8o0yDz)+0ChAFFHa@9QS1(of?on3+3Nc=kL!1MeqD@ zNwK;>=M|K|s~qOtg9OR-+ez%!FU>1KK=;iiZvkBo35R_qc{NMaNRW2J2De<(o}sN5 z=yrHzWu7{(UMrE6YEtt#CE~}kMV_)N#^rjyC$s)GGvTplU-o-E!mV7;2C`rmJ(LVM zG`vx|3@_#Jk#3Kfrp2*}KkBh3a>jnu8+Q{aADXGtS&*EV{xeBJsn-oEB*V;d4}6<*T$r>6}O6aPsOHFUY&@O7NWm$A%(h_|2>; zbWg&+SFBsar)#ZN5bQ!6A{@TJ@T><;ey&)Kou*`XK60j%aFymc)_9*+C5fKfoZ7S) zTB_#YprY5hbdWe@*;bPpyxCl8bea<+bjn2-mv< zFqv$#RGqy^CYx3sX46B|je!hhTw2_8ZuplE3p5+bN$Bv%zg*5#?QWP4w`?~q{5wkw z^_;)ldMyUrTm}6Pjd>#knxcCb()rDnJMqecT#r1KKcXo3 zc%JsG*d)uh38uRRlZ?Y%b4B~Agqo#5Z4%d(|HUZ5r8H^++kwvE|8_fAs8tHNg?ueT4?KtPJswW+$W&ufLwPSNkkqqpsWiNKBmG>1vlT zxY)`fT926ElcHw>6$9+CHIN)`g^aKQ&-$4Q_|Vm&Ol?1^Z- z{51V|x<_Cvmuf8Li>02nvF%;jGTU<7^8El>+tF)yayS3b-4T$&IKo^ejljScocZHF zn7O8d!D-7Fb}%p1#Ll!c$&_oy_M$&HI~uJ}kOMOY{R0J5#HK=vHQPb*24{_ANBwR0-&oJtvVL&}+k zaCa3Wdscm*rnE@Uw5(WwUKWcX)TTm6Pzk{qUkVx1gt?#oKnwoKhvIrzmk@?fZY-p? z4tq`g#*;mA0o)_r?0NEuyqjZ`=oeDJneBM&!E3Cs71m>bfr5MCV|fo)8-QC-y2}iR zZ%O(HIyMLQ(c5p9_G|v#y-4aAZ%e%t?(}~k8m?kGZfzJMJT`TB8-6R3&~;s_zHBJR zDh#|Mx$fdj=4EbMc)5RR3SjM+=aUo;G!vrm^jfOjT+eedV^{Ji;AUT3G=c5B+CQkP zncF;zx2V)_c|1oGs?WH@KW2BIWX?=vCCVN}%m|j5u0MLev#DCsjZ3<~hk?7(*%63a zwv#A$jwX5aCtstNll>c;Po#L5RiHk09?2nXoo<=;8Sx=2N;zx@_!LxeyeSyQmYa2K z%!AA=mkkc*8_9bd5a=HEg@$W{M2a@9vA#%F6nO`YCWGJOhfNVujvSvR8e!U_{654otQB7H#*=~QZwi$*%tk-(EVoD2ky;t>H3Pa#F-qOCzWuL#A1hQ zxqi#>Pa18RYnA@GWi$5xQJk;dD{MnbU+C$#fCwB&Dt1tz$v^NOZO#X6zE?NI+voP= zlo2SWOBNZImD+_ZW1b6;>0zjk|6yNZ@j~|0j+1p6J}$?xtB+8C`0?yBkXuc%sfqjsyyjr!e5xfA4%x zUhd2cjKi)RFn@|>+(8R--0`f51o;9GoAatoD%35?X01oaxaKv-U)E9zsFp|DTa4B= zJj)t)g=?~Nk08%drU1h-LjDWUl7dGF7Xj zE%WU$sjFXKIg);uj;oz;p71s8Cy9c+#gf(1FZ1PNs&Bm{on>T*gYbIc)3kzv|Lb68 z(mgvtf`G|Ww$1((U;l@ymdZS{I0|{ufAi`cTGRO7;u3RpR*IBJPja5O%(inH)6C^q z{)OmgFszA7@8b}7enJZ)F069CMDt~%vQ61(EOJ}?#GzncuH)(v1Dp^e0S2XcjRWF7lNDQPFOnW#H zQJ&AYk5|1mIXIW zKH-};Sgu5+nVba#KYfA1i(<+jH=PYMdaCD&Z<{X?E7qM5e!a%4N>&#R9+*h1QVG2H$8yC)n}^KF-&tK!%}Z+;XkpvfC)q;I8DpBz7`F zEw~n*qDQ8EVQby?DT4j!6gq-d*gT{y;z}~4ZDZ*&Ucc=haBV%F`?T9%7k|1Nn-NYS zbe|FnN_T~yG>oB9%uUB5C$#D*ldyDIpQPJl@)^${-TocXw^5pI9m{`6g%o&#YIx?k zKw)V5Hwrb!xEpi~%h7xRA>at_W;)*04T@k%j@KVfAQaDbCR_nyqea9~}vZh+k5Z z6Z6y&B9%Lbo)`I5R>IZ8Me51F(K|#ohon-u#>LT8(JbD4&K`rW}Fj0F9*oGQ3A${jY1 z2ai8^avSQv&P)t`zV_7V%1N?T1IH-M3AiL_{yKR{`9J}-l`Ueif_rlLyZt;EJ)Bl* zVt$?nBCo!~tKDXc$-pOiS03b@=@zX(VmgiUEVI=%XaDfkhZ0S6?f4M<^(q~)+>}e; zwBOF5HCMgh(MUG+XkN1%#6G6#EU9h^XKu;+b8~C?ebbfqJMZ#E*v9l>lM!;2JKoYY zPR<5#|0_4@8$E%A=Eh%FBm0mONstrPXQJPW0KH0f>#Sf48!g_;+67IeJN$Sb`?nds z9)q?*`Z@*Qsi$iqA*j7NwChQ0;|Q1Y`>_1q*p#|OrRTY=jtD=uG(2f{Ws*;;=%^B3 zBjKbW(|=Z%=pJY~4_o?m*Xw_~&mG|JGBia;OtljZ;VKGuceP)Gi*61}-leYa=vjKGQJUiTC2(7-?_;m_k;9-a|0no%2( z_X*R##R}h1dz4Vm#^na%h`HqZu4#CFOgnyiH2Y`Jcug@35BA!WDIGK}8+`I=61ga9 z202Ci;JSk*J4oc~O`ke;mypR}+r9R73^e0f&eF}ofS9-7w?&1$aS@N>?M2-8Zq(G} zLz2!qxluH#`vl_%y?w;ZG#<#mb-%X!6HM?**l|r(&!`N@;Xh);tVt>-EJwXDnRcLG z!~i&lw>ss29M+~_z+v4Lft6ll)_*S!_$)wN&^z>K88p<{?lp;qJS7~r2Z{p%#RH-w zgauxkc5~RRH&QNVHp%?smq+1jMd9x+cZ1=ke3?JnTa7XEc=jEl)b>2AF5@sJF)K}v zj*CkplHZ!iSd4inpCHe9zQ*9g@>fprJt2r&rd~gdSx*)AX#I`nhTn;lWU#w_vIIwSM z;1~B_$3bbDq#Oj?u8B;K>Ag$qWefZ}Uh}Vk#V>!Jrrqm5R9&walNh6WUc&O6&i_UW z@ak)(*4-3Z^5PuNQNBTl-aU-VfAq`p=3$=AlB%{OPR>^#e2>O|SV|w$>AN%?-8?NK z_{r^*Z^S;)t+j`b$*Qy`#|j0KFtsfnoDw(ao!>cor#3<30+S1Y8k6uSPZ)k$h)L@y zD?y53-r1@~0fOu-%kwXC4k~rD4_4?bbB2c$_^O~03+w`^YH3e%{#ZE7B$z)OAG=7EHCR9r$F#kIO%9@hoNORA9Mz zKms2SQ%)UwtZn%8lc%hp!c@5Fp4|>{=x8x~Vu-^xxO~7ltJOkeGnT{LV9=-Ui$OW@ zy@ehH(QTmTOtY9J_h;A|P!FQ|phvzDy83o-rb^*8f9LK)<^Vy`$2G@CSV7y|Zt(>D zh0TQyI9huc>ydUr-SZ(eUY65fMTF{ITL?n;xJjh6SZ=OLZjG6`UHHQUjapKvr^9S= zj)1`q^4!GG1Yw^2fJ&xw7FagVdinZu+DJgv)Q7q=`7%>0TU_mI39%~B$Y2ga>@KBa znL?oB1$OrsHoNqK)=W?1eiLoL$>~v9G=_z&?}Hnk{9{C~9JsaD(V;oENr^}E9mgzE84*kKQ+<$fA4xp7o`PuI-_3mvtJ z4%V-1wt8Rg|-3p{@R{X+HL zP(0TKp;ja!X#4Zd!nEcBPoW0RybiulRp%9-I_6m9PF+a$E_{hzuPx**-A#8;1{5nP zJCu^azhMw8)cL$O@`m5!iS^NobbE{vExf%_(3G7{#5=^=>iJ(MXaB>jg6scQ{u8Jp zcL5U)CSLyqqW@}F776*hfSZ|)Xh8c08IqsHzE~7}fk4fHR%OY^gj)RK6hrwT+Q4D> zIw3(ml-?0yyCd%6vcVj~9hfH$c>K-w|iafHd3)43%_astR&{x+j6|GQbVe7;&|cE{&GW zo*QZFwj$K48!-Sx<_DV*9)eA;M!P@+)4akBp&EXMDkOAvAo3sO8503jBcDRIB3bMR zXZ=?g!T1rQy^{j-bL-ISX*=GzWd=v`Yn8Q{md{0U7m0=!yoT?=^pd9b^QiMD%7UgK zzIAf5h)@NzdK1;_kPXsR`Cap01ARE0zn3~?an0Iz$H5>B#H@yBx zKF&%MnL!}~(TlDM`kY25-&Y0a&4h>!-)`yls9!+O@G;>!$3Qnud=0j0#DjqHi-r(>V6q z{T`@;LdQT4iI42Y>zLzZwt2?j65UIi%JsV9vOEMC7v{LKjL$|;>I@*@@9on{{tiYYy zE*7XOZ|wo@G@8f%i*%;Vm*4Lhm0QjS;3eN9&h^dip$8vHXt`b3pKXmG?JFoR$x-W= z#S$UyFa(^nMZZpYUncaVJK>#|;Q8P_X_$M%(s ztd^-Sj{!uM663i>uEJ6`)yVabSjbn-yU%ed!!5K|Z=%4ayO@8{EPW^0A;@W969#Dg z02?RCN3-?WFM9)S8wiaDS?DL1Fl%Ks*Z%x%AjN2A2*2~su{Egc9Y)S$9nZ# z5w9dF1=gzT7z$9GkhGxo{X9W<9#8+8Sum=(OZcaP{x4tE=-HPC5hHH33V^qV445Y9 z)=_xYqVOTE=kofP|E~j^SpxJlQ~OH%9E+#{6@`J;bbD6Me!ZI85%faV(r2cx9FdE> zvYH>}ynj*!Y*?MM4j(r~-RJrp^uc`XBEVWKvw1h)5UfU7c?za~R;Z)QZ?KX5@SUkXS+KxVN6a}Z0OIf0S=8&TuUi;XSk3G2 zGZkp-J=jS#s69u|jBGl*ac_+)Q@n^ldU+p2|CYMTKk^9ViD(m+mb0b&E6CJ{+IXCA-%Gn9eL@$gT=oB-G^v(Uwik>h}iN5s5 zzW%oN*2s@JwwAjO`Kew)SESNR)k~6Oi~|P$WqPk&3O~Gl#NiM9t?B1y#JDlwJI+Wn z*6;+on);R%G_%7t1Z?y=d|v-br21e&nI>OTPNRSb+wzlrxr@b)4~QV;9WrNDpUPOS zKm~E-t0DymQA$24_AzZ&iH9J~{$>kqaL=0$AM4$~=PmsTYg^Qq`d^SYcrM#s`P6u7 z7qsShMgrc-a6Bbx2X^3><40(T!0Y9tD=*7Sf!(`QZ|sz)2RWDe6H%qKf@3IoUE8}J zecR9TpAo#Oytwq{;p%MRO|L*6b7xoGDSjvEm1bUEA9(LV7WSoPF!Dj*06^|zW}Tu@ zup&F^bF};V@1uv`mw&6u*HG3gn}^fxe+Cc%lwhlV^Q%k_+N?(Sw_fGU1#w6}G&uPp0$79@Zp@qq0#r=f%z{h%^RbmU zukRUSh4*o{h zF?-ZY@i9dN;Btb{9SL$iAz3H>@B|55wM#&kfm@@ZxU7DcvyruOU}JgruoE-oledD= z<+OpVECUI9FEQ6(*u6r~v22^l1C)SklK3*ag+E>7#Hg+*%y5J>JB?Sh=n-qlu(b7Dj2>NyF`4}=r5X@-l3e39dp z;*bAT!ofc4x0gVW4?I;9JeN@!+N6yqG4~5xbY+&O*BXH?itbq_lQlzUm9f;u-}{ui zhl*GUY-TObXxd@nnSJ-A_X zZ5Pe^V6~W)3GeKad4EO~^jN&!X@4TI7!!-vInHDjJss9kS}Zf;pj%K+%xd}mPWLrnS4?QCX9znELV zl&k3p)?ihGF2?Dk>mQ%ScR$vIHONx4!I`d`BZH?eSV8O`%_9V)OoX@W0vWAT2<1Fr zrD~ZDntm)BlroN7Ip6ot7XEL~R zNM_ac7?XylnWq}2pKP$OH1BTRuHAMZTCVhC8m)^`%Lf8#tc%w77)bdiw#iVv-*&wt z`})&>;8ve$t!?7q>cpGrm+U(%fT2 z+KIs9E#5X=< zZ#{1!&_#lC5Br^1ib~kNPZ&-%_g1wWxu>E^b`xFhVCA{4;@ihc#oCgW-KQUF zjoi9TUg?6TgnNr1weEsJQsE|MokQzKyoW~L%4U3QKejNRhS1gnRSUwvZNBEi(jTt(;+eFhZx7b~^2#qD)!36;Cv~lSl-B99 z2F4b>J}ke|J%?tkr?`c>x5q{(wFUllb#qg9V51;<>BX#KCT$V}B|1{3+JN$jWpUq32T$PP2pms^!~@~_UTBPl!r9cR;>R}vh53A2#_ zUtZ_lSQpm3-`~@>xzHz zduhf{f_XoyR8gL7>~%P7d6DmPqJY-jpH6)Wy)=abQMMC18Gui!Z;)#dL1^r-7Gs+_ zSM%I2>;h(np0@Yb*?vkH<}H7ld@#kUIQOxgC(VEHZ?5}$42yXfXHujJwaDB?e!y9r zYc%X-5mfrrk1aRf-4V_RdIZ<+QtpQ$sa#JtYu!C? zAFiArv|ScZyDJmFsD0mhH~Wddi43E5P$2Z@0=9XwO=gqlJiX2p>0M6*wusu26-@Zh zT6*r;Of#3$Dc_Fc8@`m-ac%DcZ3t_f_Uwphwbl-$`X+T*FFH9WlZ&pO0P;=}qKbtt{}Ie&e#xx( z#m|2qv7;`Ve)h3Yo?9xbaxQBiYQfZ=?G4B>HQ$SPhh)1w-*Nbp>DmsdcCG?Y*_!9# zdU$?F&K%Y>JMc$dYJLc(53-r+Hz=zh zcbbzW7xaF}6@-DQrtr=?{wg6j153Z22sPMXvo5R2{)C4nue_9J^dt|(_mjg2CpXVt z+UoSn0<_bjq0&DkA(nJ31&r2@H7NUBF57G8was0JXA>_rC<(_fN@o&X&}w8~6Asb1 zsn-$cdQ2!@IIJU3s2t}dD0;$LOFr8rOu67#u6r1JI@jAB#2J|{ENb`>X9_t;SWAj2Li+>$HV##&|Ah0H8ZO`b%b6#} z0H2MV?>tV#Cez+a*#B$&6)>HMcC7Uy0=7+k4PNtfR>VUuupoH;RXxi!C4Zy$>^%~n z6i(~CrcCRN5iUAoh2<%N_Nhw!_bJz$3G_upVlCWy+r+qxDYe8Om$glIjyGRXOIvnz zWnbzrWlh!PVLJR(q4<&-8s(|MaT|Iy96bGLP`e{=Lh>CGtPa!1)nQhJJjq1M?z0E$ z+IwpSDzc3Yo#ik~q#osJB$RfifB2)FoWGU$G+U21Lso_U(V|;8iEw(CqBr|9Ai`+i z%)$iP4qeemeL|D2KPugv_}EvZuJh@fzbrJaLhI<s84U9QW}n`Ot@e zd$`iNG&|l}XA>!pqpogpAq*?{i(KBHJ6pc7@zR3UEK2Xzf&Zm{WW-Oeed?Sv%;o=w zJ3fwvvp+3)f<%z0JgJN}l;@N5Vi9_y-RNnrWF*#X@qa}&;P0Bb>;-k&+VJ>et_Xnq zQpsLDHNmLT$Idr?Tj^jS?^S!H5Q50O`qOsw{CcpA0kz>giXo;pgK(9$%;)bHo| z3O<-Gei;iqbG=q1%HGc3AOEy1x-l^sw)*-0FChPaHVe>oz-3X(JXWz}MH2kX%*_+* zaLX-jtkRG%)mf#9gg|O+eXLT9OTQ_$<^#4>SDnYPc0C>Dxvm9?0avq#q#}A$gqhdf zBYCg1Nr(K$u7toLHiGWB{2d}0K&dT5e+ajJ28-Q~S{t077?vMj%pJLMJ2x}XW2pk+ z*E6|Z8ste#Imii3M?cN-`huFIjOX%E^D>8T=#;wT!)!)abo}#|lJ}kZ_*M!Mlm(6d zsO^_Up3O}oZMy^#b<2u1;?^N_e6LZI>bS-EjFxXt=r%CbUxfJ`n!n^+bTIt^1>kVz z#N=xgCfu?h?D?NB^L3T0u1rD*ivAzgU7jlJmOCdfYz^=M?4x9FLt zZ0dcbB+A-2A2ZaHd1mxE$JkCOnM9Ew+XZVGg%$%usd>?V#xb#oq zWf!0*sw7atL>wonpJ82VIPn}>lv-p%ZaSz=x=}gw<#i0Phz*cJfKVXAz8w&2uGRY= z+x;jg{I5H1^Gusuyp?o%(Anlb5n0E1dAJw?&r-kcKsdnNVVt%)QkljYO)IkjA-%m` zK+64KLqvq2PwM}hU+w`t0yzGo|M5KOR;CAYwW!q<>vJmp^#ouY?~;#*!4%fW&OdBS z#1`0auf{HPf6(!)p@zcOaI*j8dcq>n=ZSD^`{szN9Pw=S9-Bx*f?o_oyaZoMuxfTB z{XzdYy|8b~aHUH@e#;5b!W0NY(!zCuUrl!`|D~1M55=OI%R3npoqhOxm$!P3y2lwj zP3hBgl&0UYRhuK4rc#9LaY;Q2-fcs&|Hc!OefcSME^Q7ngzxkAks|&)4#&2ob%)oE_ zbJJSa_ygu7o~jFV=%y_UqQfS$Ht(t8DYY*L40OQs*L^|pB@>4{rgPsX zT<1;7ZbDuM*Z1AWw4MJHv2d(kYO=r~QWJBx)P1;^)5%YXJb41AmD&8QpmG&zg9E|%STP?g|JWic5% zwnsH%;A{C?`gV|BGoxuL5XeCLKt6rXCgaXOVHEsjI0PXAl~eerfpJ+*?)u3n2h@At zSodX$mV5W%kGt~xlY5@S9-&dsCABjioT`DXBefB{#|j%!eRfzP%G_ID5oJuFTR|6D zHexg1bI@#6;<;yYt+Y~ymsG-a7Hjpo!=m$_6*S>Dx^3)op_cOApH-rw)!UIWcuiB> z-Y85=Ie`P2dvexfY6{tES{cTT0rq&FW~4%gSFFZAFwzSbi{;hJ;i=pq}+HqnGho-90J|-s!tJ% zx0Dgxh}z=;H(g5OW$=siC6@~%=Oa_5)X~k7PrNWa!2}e&^B=fLP8U0;Hg4jdE(xLj zqzlKXgj_y+OL_3i?RDW;bJ0HGSB6Epr(sn7eUs}FZ?f3sM4JaU)6AESO3jlZTCN8r z0E9wwDW$xf-Mq0|+j8x!`nfor?i4LyJ#Y;wo(f|u^%Ov&MUP`4{drf)4LBj>=ftxb z-NH((!%B~{}jwtKC zJ;TbdkpDVpJrKe4>c;)rJNWAMS?H*XZvETq!o^G%YJYfxea%3N$~*o+>8me|UM*M5 z18YtlI~yAW8l>5M1HUyyY{a2!8@e+SQ#&vOgFYK<3Goo%&aIt(qf-5_bXgZ+g;0hm z*AKpzWL*`nYOJX(8$nZDoZW0{KWFJ}iiw39sOJmlm7whrtz_SQOc%)RTBN@Qmii+i zMSgon-kz>#tCL&BYC}jLglM5MclVvkv=5Z4!}E1uuY37ZOlbSrB$iik$zf6bLD^Q@ zj7!E|d@n_W+q+c8AbHyO|mxn zfZ6=nGxutU|d%BQvtt6haQQqq@x zW!riiF8sT8X^#5umG1oR3RU3e`Q)m_)bF7(&=yUe4=nh_$8am z#CUb?yn_XqSA3Ou!ly3*pK>m-?^ot3Z$P5G6;Vszp2FSYuF-Y)Eu*k=zRw<-dAcU5 zw72Z}%Zr+#jkW{?tJw{GQIAcsfAQzuKqfPT@KT zn0s0s%K}!u)^CB;dV+#R8{T9;FSq{EsG%tT$PwcX zwY{hI)3)N~uV>M|H=<>H(58QeJURaZu~l-9oRpO!`LUxKpFp%ySwRL2?zc%a+YY)R z$x%c}%E~*B^x{v&*=@TO4prFSTqwB5?l!QL-c3b`($Gny=3Bt#RPK+t3ar;a)q|($Ax7Cu(zxB-KFlT z;Ll*(MUL5DJK2G_%|rAJSEOUq7DQH9Tep(F;Gf!azrdy^voq{@c5F?f!SD zKieM%2QgoV^a}Udcd$gfirj@c|9A-*{)dTYoDwFDNydA`_qjizh6 zbVF+gS{K=9q2~3U@mbAfw4Qh%oj6|Hv!cx0uqppc$D26_qjP`^S-za0L!tYi${Wia z4P#{fjs)4cq)5B_MHDG;aVhhK?6y%6lFBLbH>VS3YPoSe@&~Bw04lX+7p^fji#aaCuB|ZFIE%6m5~wNe|>YF1i!K zyz#_71D=NBdF+Q^Mn-aK3O*J9)yr6F7G@B@gj3W=;tyJ+VwjFN!E-2Hy{F6F6>r zV=p?ti>zureWb7XC+mcl^gFvS8s6EP_5_fiEYRx$xT{(dyaC{@UWQ~k$&o>=Sl?%S z-~**h?-i^>T=KR{Lex5uAQfya7n@3TFTt~}$y@PZ)x}<|HwG)*Ga4D!H##?bFvVwu zwrE9%wCIyrIjL(4n%CP7Mm+>6FG z;B?+kuVf?-;ikH-MKNr(DnrvK;Vp=v4?K#nq+QtYNNONo-ttk2j=GHyh~~QZ?8o&& zxG?>o9hZN^i-k1WQD!K#_iZT0&>ykn?%-*HEz(YJ#d542p=ObFJjIsqu9ucNqq3z1 z7*MW!&rL*G4YZ93zg6T8nD&H?Yj-TK873poI|G`3Fh7?HSj?t~-Z#w#KWsQp>rXIo zS?g%h>nRtq!9QY_<;ovxw?@&>f3XAq`(G@UKqB)dtkDpO{rbOt_)i4?i{mi`DmE@| zZ^U*RLm|v~P-6zZy7-tZj|VSy1-FG79>Z&GJxLGLvf0W%r!}*ZfZSgyW?D-zgi;uUlQDD{9vH*NjN z|293PN$Cs9YzZUW zhuG=DJCN`5V=LHTWs?h9j_VJDrrCJ@cz3t6QM2ybCG)AJZF#lz8btcbWxDy){J4D2 zVox9oVz(sH(%VXlJb1T#RT~mT(!ENOM)*y!dvf=u+pfGIztja}*dyKYG(|fy5+};@ zI`l>d=Y#r&aXlj*(oyZ;^!>naVv5&p{Q$q~K6^y8KSI0N!)XhE*1#whQZ2qu=(g!7 z%Yrs@w1{7#kC=LrKbY*~-hx4xA~1#cGIjC&754LG`spUL;%A{}seQUq0>qO3U<_|J z%d@ZuH!({$^f#hDea3wbxYXM_;o=)|iZAJ`OY#J)coHK2BmN0bfpB$oooEo{OhABB z9HoDn4#ht5LOJUMKS%dZqsU86-gE2ai~_(S$|y++%OO0w!#F0zM7SRxF4YaD9w8p| zH_VdX)yAKOl?Sx$I%RX6A9oGxWXZ=}GO!_8P~IE!3HtGl{G{+vS~-!<^Tn6vFhc zaTBn!2M?AY2G9MHVCPfV6mp*;YSW#OP%dXRI|oJn-YzMe(x${PS0b2e{@%LZ06 z@x_6zzvH!z3`l;Z$*od@4Zc_H^eDJ^@CZH@?A*=I7G%-m#NiKbjp_NgJJtST+BFK5 zZud2Vf<9t$>FhkXHKnoXAfZ-(CC&#W*t_A1GbjTu<8hqyq#>w&onxpV;UJ#n4C=8) zN!myhS>B9#)kRUVqhd{S*Y(DDK+m_!v*F8?Yu@OwLXT8 z)r?N%hx%iq2L_g_ACMO{7mkWPNJLj#))C!Ni|`zyqgVh_PU-?K4ml z+5fUrrSsiD3Mv^`|2qoD)W#~1PHo23w<#Yqj*gjmC{cuQv-|c#8cqPZMNBWQR}%m5 zrh#5+#@`*NR>5EvPi5FmvO{P&M6*ThFdjb9CY=<~6wtF13nv>`9IcG0B|J;4+vACN zLNo$G#cvDEOPL~^*gODcd?s+af1A#2jVx(aSA;K3X3PPjWdB;(H$MGAw9jR>JLD#V znR&CH6H~YQOnEhf`9XJe7*C<0KW8i44YPF(Rp=CVi{FiC4jR1MkJ5UFr7gITx4c8b z`0#S|{B}4aB*M^~T)98$O)RH1+v$hLRhCiMt;6DOhcxe%HpyXvii)lT`c7+6=>D)2 zP+x5~x&@3j^kdY2VPluL0hjGJmE&Q7TN}qdx`JGzgzJB=)%?#~lF74dG5|Vd_-6m= zV1w_(EGi~ArR?Zjo>Pvs(_~Ego_$CL=?-}J@8V;OCar~^@zSGd3%nVQeWKhpq{ zpJP(B1LFMz%#qSHUmP@(J;i}Bwe%wt+Tk-N; zZHur60@&al)^>=i+o9&?((&7J)thWq{vfjkdP7(kZzDh8j%JO zNu`BB>1IG$V(5mUJCtUCLAtve>Bgaln(y?w?)!P4_x*m&A7W#eZJozj>p1ph!)`12 z`1()8f#y_e?uF}*B7v#bKELu_GJcfvXikRr-E^2-@Hw4PEt~$s+qlD^HI2GGl@eAo z`T9DJ!%O+b^Znx25WC=%e$A8}8Of6bw|QMK?UF*$kpj05$n zHU;Gh4pk#|1sKmNq!X-s+Zr|#8Fj=diOCq2XoP`NCUID!RdVeEnlPiYJ^PcMYkc?F z>1^M`WvyxKT z(3H3IxG-5lnBBoRRXekAagQd#PiR(YVXDmvBjs#O;qY8ALCnNQB&^*a6ZtlG*~I{Q zV_3Zj-Wj8wQ~mxRL9q(Rq&mVpfv6g>*7W3XjV23cUL^~{xd2eC^I4Cd=I8?}35wbw zda97g&6a25H~so0_=no7Z#{(asPm#B^MWQTqB<3|EKtK|E{97;@1A91P@+{~&yD17 zbR)lb{MlcFGTq~+38$qTE-V#A8fqJr4Az7k_bZ}nP_voHv4jW%B8xE(D7B@Xa`m{N zy?DWK{`^}>024#!;QuoaXT4}wtw+Ljsbja$}l8@8md%M57xQFCP zT-_a~HKy5rAa;4T^=-u1ZRz_TW%QyYW&LSa@AJ~F!Eb*Qd+(4l#skd?kUu1iehSiJ*p(fIVh1rSFW`X?@%yvFGFSb-{(ru?XufOtv^=0Wh z!?Y0BIluN+0If;C*IEB}3ZJqRG*sb|*f+IS?oC1hiT?WBsSnETEXjT^wkGx5xYhWG z99jjRsJei*R}X^Rdu98-CAEK5HP7>~FtVOVj2|L5#(X&vi{H|DeKtDLqbflcWJjd6 zGM4^KP`>22^#i#L7|09!{`#G-@l!DV$7PK@Z&K|-oBUcVf@x-|57bzh@VH}%>3?7f z8b0LXTN)Wk0!fYCO;yJtI@=#nY?(MITB?|^XvpW1=r~1bQ?iS8+`D1A*MVGNbCQxOW;J5L$F3VhlU8Jz#DtrGg0Uie;ATVB*HbjJd` zO`Myb6qpKi#|)zPx2lMGu7cA(awV9nK))?3LX#@m}{ zY$3_cjXP)Yf2TmrF<}u8@CSh(#C zC_Rp4;-#2giN=sd09a}7C zkyB!%M!$N%0Rm^ZM#S;#$>^(m3}JQ`K)}ZW(~Yu~H)BpL868cWB0b89e6y~ox>}WQ zL%Q7Zsh!bst6cSy=)A#qp0aU6n+{L5*VWBlDPx8YcK);0XR!7y5FVhw!!R@k0j2rm zokivz&evGOR?XXI?{T)}j2w!wZI5Cbow0%Rewozz<7#A|x)2-#O^>J-4G(8jECP9XYH}79lf1+^cv}1`ic~Q7L==31%EYP$_=$EU< zHzC_6VtGpZ*c6)jnK}jj-X;qOLX_!6%Z~h3)ED20vZif`q2QYpu$H695(Bz9iwRA9 zn+gP=UyEMIxz@yf_NkdzCcR%sC)6d%KdnEzD|-ai9gM90!Mc)D2ba{elP&^D?(D8~ zCw@%bwGqkn(7#?5;f&g26o%9nm;T2+z-o6&l4&|UfHip2`X?y=r6^?Y4+<%jbaX0D z@%Lj5RY~}#HHx=Ymy$d)3h7tjqse)|dneaRYc54j40LlKppDynDTfVgO96ZI`qOdj zC?-W)XVZYTTJ32!MzNcIO|~=vWCnPhkv6-DqHi~I-jkzA72+{?q6*)8jIQJ6Mh`7` z5H^qBo)56W0LaNJM-s=k>_Z|pPF!T0!(1=j6Ba8)jv9coboKD#S(h1*03(8L%0pk4 zq(d`924wL#Q~sI3>hrpF^lrhO=Mp|D>iQ<6E_(|t^9vaPyZy&vfnD9X-tpsD@m9qW zKdbgzyT>$Zpd={9H6Wa$i`X}9R;0Di`}Ql{>L90Nsm2wiv;)ryyx4LY;lz!ks`)+0 zKZK_)Ik$Maup+b&SX04MEoP=k3t{(%ruKgglhZVDE*_Xik)aI}gTE+7Q_8TDa7mdhl$E_2Jou|7C{e5fe27Os{VSnmceZT% zMYouQHA~zmHb#{gi(y2KSk>zz47ZUJJhPJU{Z~f2CH-Mv?2{`;HcxtILWr_11Ea5_ z)--75F}K)ShflLoI&?jXaZ~t)5JW$YMZ0uZPc;E;t`7P2`qil6ji1g;UJ@Xk7^Fiu zy{3;mcUAw4*5O(xTM5~{#8Z2w002tY^4ibCQflrj?4wSyNK)*iozye(G`X8@hgX7J z=Vq{hJTnGO!ya38h52FNk%7pfGJh>-&m9E>b~XrOOiux^V;?VC-k?6Z#UjD_!h>aP zFg()!D&K^!zE$>K~euyOi(XF%$8XHO^)_;eJ&+MUE8^Gd&0OTkJ0(g7eLGX>w~Io8TtwbRZ(dDqn7gRt!bcLG&4Kts8&qfd%6`mBfKbF|Bg? zmsomfi}I3+(E$2siA`2U$GvWhO8v+GnQJIY_X%f6uQpYo?K;WFj)b);+!l69Y2QkY zpj&!SPiab)u-R$`J@~Y5;m_+6$I|GpQ@*OapZno$p*Sy%){#Mk*k-i*RPIS~3H6TG z>Jk`rIbYX(Y3N*`#lzc^UaM zt?KEzx}!Q?w)28b&Cd>NEO^(T618WmL#$#`I;*~PX{W5vJ58}j`)YtJ@w=KjmrgHL zU7LGt`Hz{DWgRej+31*KupkCFH3RxDbPG}+&&l4r5WUdZtdYD&i0?!p<3 z5#RG~yvJxTi;hmGE|+59@E1C?xJlR4dRoarlF8kVDGQC1c|#g-tbHyWDXFdsNZcb0 zTa|8KQ_#5qNcT=bDj6S~l}9-|1Wf2Fg#sRBKkvEX1!XNbO%JRiCkh&W>=jL*QN%f} zZ|=+DSg$Cx1|&41nI3fK*Rfh;s5ZZZG@Ud#)Tmv%A?m|efCksQCBpCcM|6qn#Qds? z%+W~K!g0Fjk;7`rh=2h!KX-npX@A4?^DZZMZ!U1{(7p_bR z=U}^8VH)M---(=-TkuOG#FZy*qn7txRkI8c?w=_^A^7own)g0B1L5~R@9{0xPi@=; zfa?V3-m`DTxQr&$LA-%(1|PiOc^DC6d#5^^cFKvFCXz0{|2j?$$#gEDkZCNjk}%UKAqXN;;Zm&a52m+*Iw z*6KO=M$VU#-w}x=H6o*bI_}8sEl%jHGkiXtbh;V%lk-) zMZ?-7ld~AB%IL6VZZL8{vHe8#e__1-wX_NS-57dn5%6?RmnuP=5%}W<&;BA{|MXb_ zgt=YDXjSsl6(DQqme>e5c=gEAIU>H+II@tu;6Gt7JuS1}^RByE=rVbkdB6gR73H)Q z_?b8ST3$Kf{lh^Zfn}8P?8!>!Rk3&$zEF8Zy89vTxl$IlBnc;qEjoXAYZHG(pXo_xE#-mxU zzx>I^sdpwsMqpSL_gozjXFq3cwo#wrhphqT(YQrTX;vb7ufR#ghgwKiV*kkPbTcd} zq2y0ss|hpOI9|5bS?_6GenMzZR6ME!<;t^KP`4wN0n|@)oMCO9Go}a?UVD;DZ19!r zX-A0EB6Y*9qf-=*d$Rk|>ediJwGIWV%9?Hsl+n0KCH0^Jy)*ew=G&M@3h+AmPJ$1W z3;40$%bH2X!(+s}D=FrJzRB~3cwLeOG+)*mfAw!%A6HRaJ3!9%kP-CHUpp_sUN&i9 zP6KSI)cdF}-W_)02CeCDA>mW46SC;%Uw`E^?dEd}8h|gXENG~>m)Q$;7bI#$O0cKg z<}Voveeh_YCsVocb)~;0jc^$C$Rkfb;<|@kon~O`0nweUBPnE@ni7AKbJou&hrnL- zi|HY8A0$hi5XNktVtkQ#zNea3(?PYo?@HMqLq@3d=9txj8V_wdG;=_5}YRA(TN8| zPd7@|gAR8Er*2tj>f`)7Rkwi|QQ0qjj=Bwy`hyn(ea9euENU*5^t>y(84&pn5`<|y zyYK5oV~g-7)C-qj;Nr>`Tevbzp!DTCbR-)6QEk4*RzYNKea9ELag1szSF@Uq( zXI+_tq$2b~ab8n(*fcX|eBqQYYkV}>qil)skci+UgK)=f&~CBe>sOZ)8iFV!errVc zXWR)`0$eg^(#@!-y4dIWoZ*7jUkB*W%mB_6fpzIMe0P3TRpta^?U?J{YYlZ=iGtl0 zIG8)>!3AQ=Hr4a@V&Wxoq*J3|L;qk75vlwVM-J4d7ZYm72fxSAD^-P)CUt z7_+t7+xm_?$pNUs)i0c3-*z4t(X=uysF9=JjVge6ZRi$eup#GMvxKDkD$2li9vGs> zSWhRpGCkbfq>8zWq$V($0}Wvr`sn8(Y&ZK?bXj@+c22KC^4 z48gQbPGOZ_#Gf?X551mclw%$Fb>r$&@td>;G$k?OY&qeI5FJL5A)sn!QP0Z?I;JSt zFVDYdMTTEpD@!7^C?R`>(8Zq?r@%To(cG`GCb$v%zbpXi>%~h|Csv<4ym9)($oh0J ze2!yX(an3`z*1o=rE+)9@N|jpZs$#&h0$mx_?l(i0A)>rJVE>TWup6*1xQAHI=LJ> z*{qeEa=Vfw&pvzs;3|FG)vw=rj5YK|3=$VPns4i@{II!|?{S#H>NUwX@y+~SUJr3h z=Oys*qAcwRkyEjqz-dFz?$j)Rb+MazmhJf2B^URZXz{-yKa6~? zvbTu>@pIox66NmmvSgP{z!BSz~VF<)SoV=~q~bR?;YA3NYvcwvx2D`H3L4?N7QqC5r+ z6bOQszfS9Kg#zTQu~4Kv8d}K@T}MREB-@Th!C;X{_L zP%;>#smKBR(Z8U}4n^K-&3y`fj&7?aKK8hNpKmxpjnFzxNbx+U@^R9~ud6pJNhAmF z66fdkD}|lv7qt@2KerbRX+Fd?nSc3q0=TrZJ}*7tI)!w`wILB0PJam$W{D`(9&%^d za_+uQeWdpL)tR;5BBSViwZ!LURe7BDijNM*&)qirLx!|EmB_tz(z>>W5~wG!oTby8 zPuM0d>A#JT(Yqc6L``rp&h=R>*!1LuU&jExAbA=)pFB`oN2iqXt^OMLflIk~e}&(J zIqBS>+1~sL4a1JNJCY62F%xo;%81(aK0cithF-tzGo;C#CjPvJCjrosk_Tarh^Pu zpY#;^Vr@b?*93>7&R!U%5`_v48?7>}C{&k1{O0xbNOr}eDnn;f4W_zEjtMl^O+<0@ zwI7q%SExK0Z^OewFh-th>#JA4(1dar`5{=rd4IA|gZWoNtOJX|abWn=n;6QgMzTmPzL!_g7^4^UM;`;6^W{sPy zGOB+jyU+m~rWcCueYc@=DyMrt^XSD4sp*)YMjGAqsLamU(lB)~|EL~p=}CN*>U-AL z1P;U+2BQG~pe9VHRhMd)u1pyr$iBr@;kzPzF_{+IAiae)Z+Wt=iXL)rv!@ z1k|H-M$^#SeA7>Q=hip*PFTb6cZzORc=CSNo=#|9{#Sc(I(OuBbWrc?tI0@3zC61m zpJpY=(t0-^+rD|8G_DJ&xyCp54jZG#X8Qn7a2a6niViRNdKH&sd(k1kzBo{!oPM0w z{7itrFwRZ)gv)fJI!IkoPnd0jw|fS#@RqXcmDz3vtutgEHzi8#u68UJM4k|?R65Z~ zK7yqv{g3j^VbG9h;$UlX?-UwPKs)vr@fzGMgwjH+3*!G*k^Se(i^70$I_sQyfRmsL zja5<`$04)=IG+W@@|MLqQ4Wbe0c;;=FZrwH9MdZBfa@k;6ZbC-N1~?(-+oYS z+}i*=F8Z1bgCRxduixM3Z2!n{6-ZU90E*~m#dGPuMYKaTExOA)@Ndkc{b5kpnV)3h zc+V73ourWxIEkwZG3GLH`8)OcDVvwlO8b<<1Cluz7S)ED1HrjpEcL1jg<#-d6ISU% zMmD!R0Q0WeFjKzv&wVebyKdhXiIAG7bBLT(=qFio+S zAp?A-g7h5%^VjO9`h%0BEjudtfz$3q?kx_;=V8XrdgP$31;AUbu%}VRJAq zvY{ZrQA($Mle?);N>1Rjs7ext1v{q;BfEN+_tQo!;?SSoX*ElQupHHh#4JDjx^s~^ zH&zgYs~4+8Mk%A*;cq3aH3Q{o=cQd6^X3Gh*`kwSMfBpO^?4A^($YtNF~pp1TkH^n zS1gFACFP#%uy-yaey`aiyWUa)zu=!l?f~#{s0ow{$jTlrzWVS; ziVes>`#3M1xm4DRZ?bN!Up7JKYL%3P&99jd*PHQn%@i)bzhu~{`4m$CF zDHt?k`ge-$zs&S^fY&cLxm|Y~=gswHod2@bFn+$ZKwSR9iRq$C*~V(>w_1hi@Oj%Y zc%{(*xP(f?#m+iONN@bYz$a=cNBVTYnM9(!z~EV+vMkKGYZ&+qGzBhEQ^J_N%k+o7 zE6$v8NkF;mx1Pj5ZPC{B(NEg`tg_OD=})>O{Zn%b)TEX$h5>g z0&iteeu?;!w^3z|If7?In4M_U`;Cv?N>e72+{(CFZCc_;4NBXGTuVr=s{(gE5jMs@OS8R@(*6)klU=|No4a zcMj1&C3|(Cg+Z}pL=>WSnDsvd@BiLDzd?wr zo6zQ_cqy+AIM+;g`L`a9hZBgGR5$X@^|1)|&1`=CxAf)uQu`1tumbpj!VdH~B2R{4!FGMezEurLy8*-R_2|@>T&g8sycjB`4=wBiNR(XCfwUA8BFB^C z+4=(+xazlFIGVW_d5~S!x9jVWcf_>`OGNX*oOfPAA3uDb2*3jbBi3UKGOPx+@tfIO zU(W#-?d^RA^uOWWPowR3G%HsMHttZ2V&gnX^4&Kc8tXmp2W7pRh>l8+aZKop_sJ8U z&0rb*`ey#;<3^gw$6ewmNth6*45bcr>~H_wOi1<(_WXV#JWyFE=@_P(HdxKH%H4$u zzp4*MDI-?Mp;{p$I8qX7vs|D_6kaD?k(cyB{NEv1(uy!s?%A)H46Vd!+Fz5f(7$_B zdEn&HkSiBM*Co)V6PshZ>rr}c-TxFIk@T;#5;9$n#uce3F~1&GJ6@kSi&D0rFQR_Y zOWX)odEkgiHhB-kb`wci%ohjq`F8~)W#=y+#*S<%ZU%jInAqBP#+fWW?(Jstc_X$j zTv|0WL#9z?jbh~{;Zl7c?2h~cl8KtO?pOG2p(cx)R&`}W$Y=!nw9OSCRI%_?uvgJV zcoSmN5%^aCj!MyuPGgqj3iJ)@s6z@Wz`_`|7xv`i0h|VV{6)(CTR!aio{AJn77Ie} zqshCYw%D(uQe7jr{h|Yq3tnGcNBI<;=)Ke6AFzM`{q!7=@2#%dm+DeJA3p9M-?p8{ zOop@@PETLrr`k28i!MJ! zAGY(W^_x!AhR0{XG8Rg#g&YH&1A0+q7s~+=*(O#K-PiRe$E4q2X1_~2QLXZ8rP+7J zZYOlYARBUm?%6qsg0B-Kha#EFu$)N!|pDcH`Y!Bkwp1%4$LUWa)-A9&G??s z?;$1zuw46ehWn?!xSH3t*^nyY_K7z$jP`zdqwgEbb{Q3Y7`{u{G(gmqP_|m|>|Hy3 zuI|g0Qu6ibmdh!s7tqaJs?x%`?t)09`d?d&*`5e1=dCaKtlQ-#hRJ=3j`17Cb#_T) z=(~fj(8uvoW&^A1B!*O%Zbnz+lN=ay>9Jw^2TG{)`jXUb%d<>Y|*Fu_H z`@W!Kdr9qiFPT!Y5b;<48#()@50<{h_Q3`go+APf?+wPA3WY?e|9eel`i5xqbtmQf z&fioNhiTUWtM=H;KG$V&VJ1euilPVvdX)u$yxaBQ$#_R}|02M4v1sdSf64t;TSN)V zG9V(@>oQRb8q`tthEbTJ$YQ8IpM!>CJWDCX85d}fKd_7d?AgAxV~+d0G<&`oaYUHq zC5t4z+aav2J2D?#z(d-=;GD*XBfm>2{}(BGc_dyNUcJsrlLM=o0Yzg~7px*Yh8=rr z<{7p^48FG`0CV3={o6CQy!@wU-u&Q_4csN*-o+vjaE2MZi3rupzYYIRx~?h3sBrTm z&}c0jM)9;O|DJ40x9hCy=Kt3*!%l!t>}014O+;#RVX43+_|SICs}O1rciSu8>L*d- z(Sy2WrnC%kF4llrOs$9)&7aPXi)b{MK7o7_Fr#skHPUkP zik2wd$xyS;#ueSrc@0!$K;MQ5arC3*Up-l3j@R|tX)mP0>r@sJPVFA6tOT>)lXs!A zujsz(CG`*YE0PPsU%O{A-P?qtDicr|GzBe27jq1ljQ)I0iGPC;b_-R#rLhtKyhjf+ zY%Sl8G#Jh~wu*$KUB z`=p_d!-m~v$P7_(+_<)Pe4D1Ee39h_(!;9yd9lC>hVAYfPc*?eQ7@naNo#r0D+!Dy zH~)?@KEq!Dygs`14@hhMT>N+5R0^zMHc0V?9%X0}p^2(6XH+--`3>>M3B@H{a++hX z-&IJcNo$du-oau})ejuydw@H_=48}Ys46S#sRgaT^=LlU`2$}Hq@H+timJY(F-@eN zY82#83&=~T>5q9%75EURPbN76vs*5J_zw<~~B=$IO&G&jZZAwNa2f zRKj^X$*51k5v`$CI`t;aZF|{cdr)m?dU6sBgEhhI-Bz?$^e$cXprXH}p2A8?OBI%u zIkUf9aD8Y3P6~$LoH23!`RRNd*lp0Cn522*cb@TJT}Lx~_}iC=z}S`nMM0 zM{^trvMP{yyUCo;`6gf}$ z_bxq*wFeDn816x>!!DaLTCv+C5)llLFD;P_*r4N!)zM&+i-*)K*ry>zEn`@q4fCY8 z5IiDj7lwk7*8-td{%i?cPF^8f<@_DV+qG}L32O6UYKX}A=<~Jp7wO_;IczyJV%sP1 zv#|y2IUY&=_Q^x(lY-wM$Vpd}5mN#vE=zl<7@=pQ{3z0);xQtKbjHsUp@?uL7xbxI zcjrBA1{(KfmGKt%Mho=|5v5k-V!KzV;mCR!URAaR%0t%E_W_(q@h}e(9LU(dysdee zzFFfDPzSGZ`yg*FpxsH(VhYiiL1Kb=`blGEkKHVZmM7U!%9hkDv+-IWjX>C<`@nMbcs<=&!ia zQ8`Bml?KGQ66H2J=V6aG`DGZ}vU7;7UU0FStH`8Xi@If zH``LsnXe1LwWzs2Ipjl7$jJlbBV-+}k!FUpOA#*3i%cBGy!y`4J;4`vyYFby1|%xGP7)9{fIkR9EwKN7n6CFa{^+_~FL! zdpaJP>jS!}fQuaBX}b^a-!~jI&^!}+Q@f)v`|ZAfD;GaV_T$$Md`p1OxT9q?p^+iy zLP5#qvp7{)$7wFL=afwL8*iUub9WsJR3zRdx&2(UQ+T5gX45;;+fyMPhJ|oBEqvwr zb6)8&icbE1+waeklgmh@GvQ*Ium1b5N+;RjakLOJw2eZ;bGw>pH$~V2ZQ}<tg9S{c%j8}0<@Z5#EXp+z(?WkyoK4+(}fBz}RXf=%HCaM@ORN=UF>!W=a8k>4AxMKW?DwmYOT?c=> z(!w_+q!(*ornbJ=40toA<7so(!k6iOsuK(4j@L#lo&)y^xQPNFnzuY%mD;zQ-$4le z@mzJu=M93S&;91@cj0+{7KL1lVouWm{(s&E%@3&gFQQv4j4rnze_;%H;e@llJbBzW zqwS3?pGRfXPc?d{or450wdpQHS@`q68WQ*YnAqPn6yim>?PG!P{zR+IuI$iC7PQE2 z;+$K=a_LrP^MzlN=tao|T)~j%?u6(&G#jR$7#=ZfgrrMKTafLNS-fXp&>2aV7#EA# z@eqpT{=_45sx#U!)4bY?fy=0TsdVus5eIGZU4*A3h~YG6HtRaJIz7h~V|VMB5Yr+! zJ1BhVmNkm_9WQXxQ0g;nT?#^h zLY{%?(TiGsoLkeMxAV)zRcHRZ-_eh(jr1@!uujO_RCsW%T4c91t-iVW+Vp`?{+5@i zj>3s`fz~HtYZd6P>r8b7E-9TV;*iuPJVcbpQt2ed@7COkJTHhQ?z;1ys=*gQ1F6f#_FETR4I|n@ zhXWYJXW#$Uu;=9HEwfpNzX`#wA`vxa(JV>pI#K^tzW)?JX)j{wfb>Qza;N73Sc@@- zaJ5h_76qk50cpEK>1#$qJ9s>bV_*7Hs3d>#GXxe0hcVFWv5I`{{R|DQ;k7|0o~u8oO2sg0w40S9Zy>Gk1xE412)hNhSEClGWK}dS$%c)5 zMd(|);vTBR<_^D{^`&h_eHhk&)4-EustNP=yAWpDL{5`<|` z2-nxoy;k(S070g{;E+*Dr$NaO@}AFI!moDEEs2T@W;8Mm;-E2{MG?a5#jZ|E$XvRV zU6Z@dF|h`c=f(0ixk}P-88}Er@de&gHnVfIJA;G<6+NQF6TeRerkjw9Bt;8+kR*7I zBSGru?d>PDM|FhUz>3|qzvG56g1oG zrSr(cp{H9mIS{4il~G1N4vmyRlgC-21L6k}GvksttfR34fy8WJ_G!q(|#jmg10*Dh9XfJY7R)a}mPjbWArpU?;aO;_93XpcZA2 zsA(AhE@eL6Al#5js{W@q?9=92xp=KWj>-)iLzzSFD`-7A7&Qu8WF;-b8gB<3M{}yn zuswy_W?p5%g1^qiu^)2B205#+`hPz7?1`Dv(XvV&&<%xg?&Z*{i82U2dQe4ST~|b2sgIV50`{sV38zg zFlJYe#x0RNfl|GM-sxRV6?4J}-#opT)Aa*RQ89i&UF+{PuNS12hLZECeGUfm3^x}>8Q1+sxdE;W@fCF zH=mEsUKScXwmAGf_UPxBnD@+V8lg$^JjTd-luIYW78le(%2Mi&B^LN~4D=2v8>3ee zYICe%Bnu+?sY?4k@k%aO$VI&(jIZqb12$Wc0ZrW{K@(BTV6hI9xt%S7T^{>XZG-E; zXn>-MJ0_G8oQY%!v9?WQuHEH2#`V*ieBa0um1b)w7dsJL#kLz=`XzBg>3S&fb6Zcx zTR@9ppEzaTFO{d}3n-caKjJLK3F<_~1c$CUv7Ru4poQ)N^2N8Ko93@$C$OYcM6*s( z9;b^a#8N9u=|Ox<{-9T%Rk*pt4W7iQ3*CsXQ{s)6q$`Ok-Ip`qZGIGydTh8Mw*HhQ zgS4TFCk*snoUbax@{$|e7~SB{Pt#-nK*yr^uVVXKw*&(HUIi~l4{q4aj1EAtCI9$u zwc~NW9X6n3`yXZZZ+){Mmd1z}|7ERDnOmTUk-yXRqw)(CkBrdsB}1y{jL^a~;VIsM z4~#qpd}~40L{Yl?ZI4@U;J+Op{J^%g(&r*@Vdb35X~V76Hj&yPP<0#?i}N-ISA>`@ zH-S_)EPX%K2gb)~ocb9^Tt(NX5XZPz;{nLIFL@?u6MuJ*Gh}iB?l6xg z$Aje%hcq9;?*^>*=?qm+u0!1RJoe8oICR%hy>z@FV)pHYup|Dx$@=csFNyqkPC(qI z{9q5ug*v3!MG|;6kd1S)OS9Q=5UpN^ z9=VPMUdI1=PU%f2HeQD=PS-#Z?RarhR8GOFp!S-A$XEq^L=}T4c-{PPkpAZ8QfZ-H zqh=r-5wcSmSrfBxPKRcjl<^h4VZ7YjA)qdnt}z$!Zqm{julRQUM64jJ)^=!cYbZSM z_{j$M_;K{q9mDlSn5ph1142Z*LnLD%(ey=Zt z>=2nqfxkO#5ay+^Yv;W7{wpV#%3j=+*%csbB@p*X%gABlH^S;nCRue=X^9tlwmOMS zD;MOE7axLei48`gq4};AE!GhV{2Lh4f90M&6Ho+aXxy`q2IH4cNs-Go(g^6w`@N#v z9X;7cn5$SI30S?N3w;S~-h)QJkUF=A4{A5~_Q267!&;_ioijrwm+n}D#7h!A@5e{_ z6$FZW=e)0>e7?w*8B^2XU`l_X8n>bf`X%%5XAO_~pM4;$D8uSi3slj+=RJ$_Z=|S3`Wim(ZmVtqQN&7FOpD9 zG||h&F2G)USGW?}dPZ|DWrW_rYD9qQ!=82FQm!IF);TF$NPp}j*ETK3oHhGE_a@(|q``YzQ|m1_2eKnUbompq+p)ANC`b;)GMi15^%3EeU|&6+$fVTz z1wlQmJM`3VKU|Lhmq_}rj>VN@3nydn>rO$^QY6HXrSeX*(MxR}k;te?!-A~S-lkzX zktPiWE{h8nE7yCdPg1ZaWfnUn=;{q4HXa#kBUni5wd2on2lT(`m^hvfaVfyZVts-1 z18`nieAx3j)%>d2^p{DrNSE&ACc<4RC9qcoMsHDTPUsJHP_+)-_kNqtMl!c~Rf%|v z%29bx+mT6_%U0;I&&VQT`7+h$?6x%>4xFuq-1olas5fDSO>RapNh#lRvd9-$Ml4Ie zSfne1GHys~HldPy9EJbA@=v6EE z@&Z6vuV?S64#$~70u(f637!)QWH6Z6DeVC=pK<#|1%d7 zpj_~csk6H19%Og)Jt6lG8;p|H3(tP=q-%YC`K&ud{Sh9<&7c0K>}1TYS9Yo0DX7%5 z`n2kwtm5>CT?_S8?&`nfocw*xB}DI=Rg-U4-clTbr}d3L3#)sWy`y-uOqUH|+KZrh zz0|#d{){CPelc$hfcd+sKtg-i6lENAB@v_SMI#>*m{~fs9jp-OGY9At7Ct;OIFIBR z4L(7gh-lUfiZmXgZ&>Ms7!r>si~TDbKL-7A+2k9NkN+bp924a>dUXTSgO95mZWRY= zMnIcvTv}^f?`jBBELRB5cQe%J!~y^DzEq#uq$NH4Nu@&z^T(&_*k`_9pA0tIqFdGd zHVH)(lcN0A?p|i95GN4A^0Hr`TgR9QaOuYSu=i>jg%)FrJeJ<1+|7_>N%%>D_K%

    PXFAW(L$6c&e&W?ci_iA z*fH4sD&g*~+V5_0nX`4K1znGfIomPtx+sd))M?2e+4 zog~leCjS`pLzNRed7F2MCUV@W5GlK`=_GQ;s^PfnbA(ZGpt=nAZ9y7|`aJCt*7T z7buwW?4@M%Jb6P5vi`^ZR?ep-L;~e1wkl8}bgWu_`g!JwQL!yn&h>m#x(Cgcf!jRn z4-&NDnWZZqMVhtNSYG-DGmKDfo_{@3(Hw_xS>bB=^aaH@OL0e{c%_?PQ!SnlMbrzR zakQ7!f;cyBJX7Vq{u76d(b=IQ%BF<&bPQs8P;0)^uOiL#0@bQn|63gAh;b?(#TZ3iqpLsWlr(~tJN}>H z;J-7%H;jH*B2M^RiSHU@umo14>@MY>!we~YQVWD>0IU?(=XbQ;2C)|OWx^&0aHtuY{E9mZuVtrF>2+_4y83S#8%T}k)I*2?{r<~92 z(vb2T*d=&5Q0L=HuL&jc!CapZ^nj!0tkU{?E{&dl8j8y_f42-tIxZpCs&_lD;ob5R zhL64VOmjZEtV&9;aBH!(n4a#T$LjS>A{)~=Br#kDi73?lI2>SL5qocNTlIM&Z0z>q z9ELNv<5E`XvWibZb;LER>idD4i-N#lwK@eSOD-_TebD2+40qT~yK=&#$GODnh={9* zhG>ed8*JR992aUzQ^nC=*8x{I_7-SMj~|1Ih#pz<_Ysw+1)oWKHhG-CqC>B%piyE# z1v98y``~p2*b{N46^K+=jss`Q!uD{tGZ;VkhF9u9md?G) z?yJSZIXN*#)V8KKf0q;82~qXgPVR2e&Fg0a3NDZIBPGAj;Lj6%>e^*L8h{>h61a5h zkULTNd@r|!Gw);$6jpEFI%H;F#$=CP;?HY9J7(X|3Fdg(DtZrGKM(XV7=ID*$ysib z6cPDbKM||%dt?3TC0cGB+V03Jpecz+4kjNGk6V+{Xbxn6;NNX(DyD3*q)RI<5&mt~ zi*9h7SI{dUg!KGeIqs}o#~R*G^FqfKHZEu7tBU;J&eue__#YC7K&Yh2VzxJA~d*f3iY6KQMi}B zcjAq+wC6od9X-_`xZ9tXQbX?7xQSQz_kZS+s;0h_2sm!aanQ+= zYVX>Hu2BO+k^Ja6X4-K>hCtP{GsyVE!KUrzIQDmPKdn=766%+)vTPoBwbRuu6{#PG z&7E3idHQ`#8A<;0V2Fvr?~)x?9)i#Qw_~RPbnn0+wL2<)C*^NfZ_|E{3k!Mt(B@Q7 zrPXeV&8uMt{D7?0Sgr`Sdi`XZh~WNUO`X;`THkY!HwY&i&T5qwX%3XsXG~+y;r6}~ znX(TiEnd0qqJ`W>4`6d(*ZLGicrgoj&L>*=n!h-Ksz9!5bewcv&_sVRx;uMCh|Zr? z{$oAeqlTiKROemO_%f$a;}i5(88+n)0-v^%u`Q0_fs$t?HFv5?hI|OYD6dGCb97M? zhrMFsy)|t(YF4ICd}SYl6lE2KnSg2>xZbA!%Hi&6?9zcxySb+AOxOz_+Qd%85dNKo zC>AQa`$-|9>Od^*qg}zbv6s`6EBip=^v^5)&$bGM8$Zie)X24n=FXhGD7-t%A20ZZlTFB+8rpMYASaWI-97a*# za80Fm6>!SFk4t%9Qp$$;S7|JgrXN++ot@L2-vIr*gY;&6tAXrh53pY`j14#|Wv_ng zs~iISFUIx92{)B9xOSf-h4X3O5kNrR-`7H3-p4ZR2OpL1oOqkU{!p1Spp9IgrjSr) z!Nk?pXK_t+$o{)#ktV~mu8r^w^!op}`pU4V8g6YuLXk#7N6z(x6AmQIL#zz`v;UuL~o{`ApLr z5OwV{jFDJ8)L8uzQcNcC$sXP>X=+5LVgXNLr~&djji2>XhtjiE4s|6N22Pube7U5bcyM&2P+mnBSk~(RahYPoO%?05AB3n^Tbs z^j3>9%wJbkV;&rM^vu_M{5XV;QQ7U(Q>QgZQlfKt>E#Z1GOWJ(V)uza6 zF8jJIVBz5uoUB#m5}|gL+{Tsg_EE%>B*+%i&<(#J?Uye&!-Uh`)MvL(rC~lhRbXq= zA1|m$aojlRW+u@4c19DQ6%l|pC8+fUXb-~P3$I8Lgng(7B(Dc(bSzypsMpAom!|1W z+Q9R#CZ~JX(nGEHF+upASzfP2Jv&!rWIe@CaYj|fb4LO?YOPdFPGG^w9ku)SWH(+T zqOFfRBv$&sPW|fFt4HJ(W+5v?g90Hj-K9Zo<66(#3FF(nqV1O|!1Ye!VFHt(^y@$E z(;ruGCnImaql~yL3donoB{Uu)Vnuk#$^6T}pomi!!aGY?p& z(E?G{t(JKxr^vZb7o`*_)$UncvwA15pzP6#YE>^C-wl+>uUFkyt+N#e^A;v}*_DSo zkA|~^G}>~}4;F*AItRdcxct)YX|@eeLFzu;SDD=i_x7`_dzdX(27)u`G+tOJVH}uO z{ZeXxeP{jdyA60xSedVH)mk}C0Sd{82}OSAeeZL>UHR|qR8Ajcxbe0WyeujmNosow z*GF1$NPNfDMe*aSj0hLrOc1Tli<#~rD&l6twYQn z{P=KOoB!UKHky8Ll_8;7elunjke@z zGL8<;*mCAsF+UZIl>d9SKZnCq6kD|#q$iv|(X;AP*Ai%#g`yvH%O=bWd=_}L+tXCm z{v6{imf!zS3crv(7~ebH0Z9LgOQY-w<@cN+FmFxLB5%ZgNwoZM;)Tm+Vhc*~5EeI( zsKfA3GE-Xpx$+=ZK|ANMcE@7;JzjpkvO5AprP4UYKok zyT}w0oGNJR_@;-(5~B|_$!%Ax_;RzN;ehEpe?<%buj!KkLcTCQ(0jPOyjXOvZy3?2 zuLrd%D*bC|{|B{l`@+115%KdRX0`B1>WlvXpNN7#lKQTdY@XX}T+k*QHJB>l(Xxp? zj2dLS@oivl$_-8IKDm)Zx>yan+O*!mf<+5^9U~Dgc033Zn|+;fGP}|tf>W-7`BW=1 zvzuXQ-!%mZc7Y^QC~p?8MaH7(BGF*T$HrC3j(?u!oGhaQTMbM_@zbY4F&(rz|`$uin2w)4m) zzLND>8P9#e;7i&|hT0JqYPwoNar7Lg;xx$wT-Z_zFjf3LSrj3VbgJ03vSC9S%d1ly zZPgJraWCz7Y0l`je8Cq}#3u}?5>Z5+BGZ}4gD50N+^$ny2vtT|%j{(E_7cLDg4YnO zbS)X*nCCMxR(-Rk)51N0B^78`cglZcT8-U#QL|)aMz0D+HQX0)yCQGltCUSa3owzq zunn@nKl?Ibx3M?Tbt>2%%>ATD4jiRgg_hu)v%c51o7LM2w((l9biG>`zKByv+N4f3 zym|oov(}u&kt!tBX7H0o6NL&9h!S(BPDjq!{#dZ7`*r>_xE*;rxlHyC#pgjc z?({44tfk91^9F`z!Q=a4jMh@i&lG?;?(|2s5&>F<;usMdNzdHaH#6G4!A()i!x;0iqr`47~1RQpO&UnZ%24`jyTW`6=GYBmiv(08GF!oJQM2< zQZm6oN^x(dkq`{w=}3dYc?K^W{SY>si-E9sTy~;s6g<3gTJ$p4cSV9QV6Fk^lS`aE zxGcHti1n?>mMD$>K5W+DWr`A>pKU*Q0Q)Q*jFL6DX);YZER;GG*E65%eF3T$Z!Xjp z0ltDSo7Epz_?ex3pdI_#CY}=tQFtEfj0@Vaww^yJ_@QLxN@}B&jIO{-@0s~qjjB0_ zgJH~@6rkc|9j07acV#J;FfGgk-ZGV-Fjdan?0c$|J*%7{=1MwwpW#D)rRsmX?%z{X|+#Qy{6bESxd9|Yr)nkn^vn12H4_ozS2 zztB!$pu4uc2zbU^f;r7Pk)vT=fts8W|6qCuOC{U0m%G-nqt7RtK_K9EtU0^s< z$(o=~CC1Bs|I^W#CD-N~<&o=l;A8H`h`5~NC|x<)L6qd|#Vs#E`A) zh}q@}4DhWH2fqm*2@w7x30y0fR!vel?0&F0BGnc}bWd4ZqS2N))xZ&`w0r+Bptn}L zFtmah(xRig$D`nm(5?A^HaG`HK*t%6=kCy!4{^I34&7UPk%9 zNuUkn+0(m?a8SpIwNq2F^(e=y)h~Jf2gbI;y%>d2E$!$=7-0V%qYuNDqs|mg$u$mB zF+~8y|D%7Ndk>hg*0e^wC&I4cvr_KV9X@?hz3f_?f-ozEwH7|uL>eJ(~`AH3#A+o(4jn{G)(K}FQ)GH(|?P-Np) z+{(5}QCE2LjxTDt!j(=-{Y8B#F@mbBj>DV!N=|~QetsyyobSMm_V=D`WEuIeROESf z#b=TyWsL( z%5dStCnH-yQnZC@IuG+>_i%={`S)}CSEOTPtT>S z7TWhcGR=OJ<(9Qtz-4zL&b=xqMvRb7{FE^)Qs&kUKU4~wDoESkqvjtYIt!7TbY(XC znc0Y|0l`SUoh8@2xAAB{w{orsM^dT|t0!|0G31w)Ae26#njF7$VY)<%YBRVfqjqt( zd-C1d;B!6S5=FG(zM2&^(~>E7=$L6^+(h2>8ECT!O4}BKfx<%}Q<8r@uNrViAObHn zHecJ#2TU#82M?nQB69h<7Q&kJ2Hp-A8N-jRoO-ZA4yga@@vQTiseHKR^b!m`>8t`N zZ3GxO+jD3$q3VewaKFhpg%rGL_Mg~+r?c@m?byLQ3Pj8Yi&=Sa7KhGYO~Lz1@oqa0 z09l7W@^~W=NEO~#P%aaArW)359?LP!81zDoZSnwfag5KcO=x-{TGW; za^;8PX~f0D?UL=LDZ)#jou$)z(OONLl8G?0V00sR;0m_`}rq zlKcYjv|8Mrs^qaMS99}|AI;fQ9nG0oNq%ak)VYt@tWGB5k-b%vg;k=8`4NoX{>0@U zBIql<38dsEW53Dkt{-16ZQp|Bd?Ex#iL6!j6kYS~H6*{am3;CZfn+5Xm8x;+y!-hi zbLxWwb*rH_C22=L8g^QGJ0A&CcsyNaHKTJWxvw4yi#pzz3e-T*)pPsLdOZh>}uHRFQw-m%huX%9}gGBjl ze!Drp1ciCLBBCS_kDMH-zc+42v?^F8Xs8@zrIW*I4OUqhH*xkewK=d~78{iv(Wyhy z9j5VQ658xFmI00?nE9ovAw43VpyJGo5IoS{Zby_TKvO8BuC@ob(&|g|Zm>gnXlfBC z6yM`hJ|VIxOMkZ0U*PG%EJYbRNy4x^D+KX&-fsZmvX_J$LlobziCdD_>;?H!sVDyj zmkMI`g?jS@rhoZ{-5JTB^y|pt|Cdt5!eHKRd`A)k73q2G(ju~msBd&4??SlAo*Hhm zeCD|cf7Ra`L`I{E*WstpngMpb2X6yaH6NabX+>ND+PM51A6gb#4~#qg(!$j~_gda_ z|2BC+l=&djj{sS=bo0mU4gP64pW--dFT1q-vCP)x<#camN)iou9`+a^xZBQlr!K!lb4c*@~*@aE23**>$p>3B`6aQZXC2#Rr@lzj%m1F_r+O2 ziwVt{NFRhbPkMU=I%9dBl^nMk>Dq zU2xtMYaD)eDgOg1E2($oz?%N8=Va;iD+)c>x=s^2h|P}6YY*>zqnQa-v3EvVA|5`N z^M3q-$F)*k!iX(hZyT2AN&f8u5Ovh?y4~H%K=&LyIA_r2w$>B8oW26K)&sbjoC1$v z<-K+^g8!HO&WsI=PtV83ZLF_=*GrWZ4P*wJzA-@cvE1Dk5t@1=dL>xW$+nTYJlfqR z8i20mlnAT&+XA_tR_S*sY7dFMlN^{+AHU13p%4XK_0@4*HeAAbjShwx&9;-qR6U`) zLkF!CPe`pk40iESk!zuUD2@ugB~|993&y`=SkbH-N%^)OvGpA(By87tyPukec8R~* zp`(txmhbF5mmjoA5cf2B7Nnh^8EyoydZ-i{ci+MF8eXyt(A?z~yW4UE zYvaDYeEt?R&aP4l7S^ha-h28t1KaA|qEL?m zu`9C0*I0Qibhop)U#1L;xN4`GZsc8^I&uIr&pbcI6>V$EV&8a%TR*t}`(2 zw`1q2<1&Kbd)<`$0NjJOXI4BA3*}9Sd?-F7 z_Q??XH)U>$2^Y&ao?A=TOJpx?MKbWeizUVL`tOHuSjKdE=n_gAyb*Obr*_y3mk0lxcjA9*Y%rfaN7Rp9lpj5rxlw& zsm+4m=2<8|7nu!{=`K)QZ)Ve@dd}`*Ioy15aGMdMDF%Bi)kl)wX{kSR6(D^Se#B_W z!(q-t$rp8$J>5|3-jCMrNe{}3c3!yGy4mJN$^)Rh``8P10uv5wnZ|)H>Cr^}UZr|u z+>iD9Z0H{O4Qxv)cDy$tq#sP}brV!HzzQy?b0h}8hprBVDnmlUUXcL^Qs_?-aL!kA zW;HMpleEcVWu-ohn+%4LU3qeQxo^I`# z!V3l_l)J&WJZqf9eQj86wk@^icPr>gt?Nhv z$ax7epId{i9g;e5bfAhug^5oYXWY3^3C#Tel!f(b@0A%ey5SnFC61wiSf}-!NY+A-&Nf zXtK;d6x+c^F9$9< z;7hMu+K%LaBRKl3QvoHqAOaLvbW+yO301>l92t*YH9}bcaw$^618b4i+h0<;Yw=)e zF_DVf$rCtTLTtk?qQ1)&Buq!p`e?(TicOvp#uRCq zn-cnG+L`b;pGni;cmy+PnWQza6fTgE2~<5|v-j)kzp~cD^DT)p>%;52BBvM)tjE@^ zG{O{kUakpF*S0|fq{aM};5ki{C5nrFNFT0pPx7hqV1uuO_u_Ib**hJwmAq*JtBlI9 z7n$5Y)Pu-9#lf2RnV#vFVvTR+a%SH6R06a(HgAE0j{xsMQfzLIB1Pm} zu?$S+fc7IsWUG?IA7_OVOAFR!oNXun3-V*cx175nL*FsNnI#2<^a61g8H&hSP`ME%sM? z6_Qd0YgkLe1kp3@i5A;)wyjY03>$hh2~8OK2`;yROC@kkFMnuz9JKs};4n7pLfqi6 zkj-6baHdXrQ_bbEIyimajr`8*-BSb6UoQiQVcK1qpU2KL_nb_p=w$afJY3T*OJnl2 zjpic!$#{a2p0i52@OB3&xHAH2na`JDOlJ77060`6_&)V{lMKXV#HWgu$M@R%9G|y48J?zG%LDhN36IKZ??D8>>Lu%_J@2(t=S(;1*yfoJB~qNN$`etqIJ(63 z4_67hs3+mG8DN!RE9xA=iR5+SXT14vOiEsm5(rGkG6re3crou4iVV&~HiJnUaAS&H z+tUK3oapUa6li8KBFeuoYbE?^!~65^{Np>u zBpq(QftMt8rD(eO1*=vJI80-ELUUQSu;_4O%-vLw74~1sulTz!xcVt~NFpzu@U$6N zpW0dHokI?r$`T5HS&70dxmC0_?6Jl8&Hr3f-E&`M51u(?yUmN(Hl%1Zt6m;7QlHIW z+-mA-NlD_0*^}Z3-yyj)Gh7DIrh!h;x1{6de{#jl39x&9fSvaJcAm#wdRH7P2`1+! z)V@`;A8t7^vi3Z(AHMvR%0U=}YE#YN5f-$UZ-b=X=9g|&8K;!VD{e~4F9wwFKQ1dLZ{*O@TO&;|YmK`P#@j-DA(t$pwXbfnD)N6!U zC9zcfLXY;F5{_puH=&`)Z|XmU0(>{dT9j8w8}y01CCpHYuo|__l6Cx&e0km7qhA+; z-3!qj<1M8%%UJxj7X<;eiyhTW8D|j*RGj&X9LUGJWae9x*<7VUew>W)-W5~w+WQx3 zq<3nT#FrBf=YL7B!Q@3l4$_&;1b6Nh^Bmcn&1IA+ zcWycVxB37P<0zq0pk1QBdw_egI(2oQWip4U$o1Xg@iuY{bCfmLg(AUv08HH*{Scc* zewasIF8mq;E?$4!Q@eD|9PSi#5TykW-{Bw!u;TO9&&-nh>ze(B+Z3Wt=#I9`=)!ZN z`t3J-jcqWB*)cXQe^8)8_RNPv;d3Vd#i^0=Y7`>uK72o&T}uW4IkG1y(tsLNFx~BJ z?SgeN?i%K8KC0EI@k-i7L5oOj#^Y!&#+cfH$1T-JG+@e>!DorMKioS&qR|tX0q-)z z)Nc29Z+w!q5}_tmW;fHlh@w4=IKY_uB$4XO#=-1iIONvqVn_w|V@omGY@yvu?=W=Q~-Zy@_Ia1 zn^po5>7m^L9YbesD?C4G45cjHen?}sn@;k>n447w38r2lLlZ?a&UQn}JQ`zrVwZ7O z$#=J6NY8&l@U@-$QROmXA>N`=n%sONK`?Z3fd5Q~9&BSiSf2*n9Ewx8NK>nfb=hC^ z#2emjWfXLni7Tr_02r)tmKUk!E(jlEC)-1)c(4vZX>bb-tMwvl!p>B^&oc6M_zg z=C-9#gM?a4q6!Z~0W+Ul>H z-bQjP1f%!<&%XY1uAkE;)Ib7loH1bW1Mf4fHM+qYleCL^{*~x#I~GA8RRyO5!(dKR zzs!S0p1%ACJb903KNBU8slmd`%+ieBMrr=8ak0GO91@)?^}(?40+0)($*g zJg3$6rIgXEKI`@Z)hw?S!&`S)dKKz^`fAn{H7W!Qy zGlPkxh1#ESZ)U2*3k{JHr_R{$lmv3AvdUgyJ4}mp!@C> zm6T`9Jqp3sdhZSK#s>X*nYlfx`d)&EgV35@EA!)G+1s*zaa=eXoIB}k^0!GKtcG{i z$@2F9TFp2Hyn6I*dQ-S=zt)TQY&FNN-}Ce)k!P>^ zm?-!5{cz1McN<-S_>Khy+H`NLb#3JB?qh@g21U%1=GmJw8&Af4i5~}12X(*_LrC55 zN`O}_dMvQP1Sn21r5gZygQ*zwkM2!7)EB=TkAHo~ zm*ZG!JvW@{MI$W@R=eu1Qx&Lepj1(j8~o*#jx+adY*pToN3B^52B zc;eo4sQC^SoW%ay9TfSMsrM4)}y#LJ05mRjqfx_2>*=!;)k8Xx=ZofVYqU` zu(-mT^WKXpL;YvUz2`#K0Ytj~U3isrsz^++KBs3}d!HFxOtcA`upQe zdFs8fl!yjK{=SJa3C7B#V-?Z&2x!`c?RmF=`WI^M6jJEzLolmrbW=jeK;;P1=@|?J-n&D-83OyR!c^kCalf zA2aD8_p~hdAHm%Gx=RXN!tP27^!-AOxzm}!q~jDSTCpsl1s0-Lx%K?!;fk}P79^c+ zwDs-I=&(YY%0Q4tZ&fB>G*DE@2dda=StJ0a^BQaVw0D)yXcdhlMe%DIo&Q{u|73Fy zs{bO2IqKLQ?e9A1J~VBc=(-6R4}00Ygob6<8yvuq#Y=@2!S`%MroW<+QkB5jUP`5s^68etrv=Wy2s%SF5>;IGbFfyF;{ z#-mIBxQcAN3G6g!^kp^J3I&E>=ibr)$;7wfk-)N7`tE2W#5&ki%_Zh&|OJZ%CA zsKEaq+)to?$5+Ud@}a>PRoSE72&Fj>&+tY3>mVWDTC33>$<~CmJDRiVZXRN{;)a9e z>pTinE5T>_S!q^?usE-4A&xY;j4Jf->eSRN>9V4Xx{@!x)I5Fl+~zk|V_k4VSzl(` z27QhjT8rX9)Isp+?@!O^77yev?5r~8gDz)C=G2O}i7sSZz+fZBNLBV+yx009RtqfQ zMtQKUW`iZ>S?VO~CxF9}vUVTRBzrArJ-;NeZ;kHjk-W&c@+C<^i4$}{o%-|449guN z5>D%px+8GBBr43cvWp*3CXfAv38|1OwewTdR|3Dw{S3rypAJ~K%3`P?=<{;3-;XAb zLWH%R2`D(s+wHFp7T9MW;78*7yZpz0o_(bcK(+T6HEIn&-+w}0q=Fi;clW3}ABI?8 zo#!J{3VKHi=DzmTqbZy_PB-cL&}fiC!QKi613OPx9h3M>fPS5L?+*vmxZkZdqs9xl zMq#6=QW=iS%D-lK|GYF_@Vf8y)x7VzsA<3%bb=KE$we{d-$RYbxq)&9Ac1^2R&kBn zhuFm#Fv5iXvZz1!if}i*(970=c#0LU$oc8`zA%ZwFdBA2C>!?PPM3Hlq;dSrvQM^w;%D{=7(1H_bPP-|Qa|Sow!nG4|_LRU=LxXpCdDWt` zU$qhqKgIf@=T!$I<@tM@rIUN#$D!OQ0|Fm~I;Z@!ifM7qxvUG7Yq!&|;o36KsUfDf zT)m!cD$Sk6ym2N+)98J&FOzQ-@$BlqUz*!jb3%S{-q$}^J86g`j|5&EJ-B>7#0b5g zS_9T0>6I_RyftW|_faGb zdrAJ75C?M+K?m*e7F1Vo7@Dak`GUYi%=Rf!tDpmy3fE=;bj&G4%KthPmoP^_ht$E% ze;%gDE&cH?*OpV|FhVJusMu8Y&|G0VfS+8JVe+d2Z6h$}%KRSVuxx5EnT1M+pzpNz@Mc4kpQB7V8_(Bh0_Pje{%IucBGVO5OY{uA!#*HBpKW~TgtcE(; zM(|B`?(mA(Tm`Jle%<12R^KnZEi@=7*W;g<(YBz1KjwnA^l-EM8cguoKkZHSC~LA7 z`T()NQRq6q_m%z#tT9@dfsJM04dQ;6XjrIfr+j&PC%j^JrQFt2rWIBY$Rg8GNl9IN28vO^E~~15~nvX6vC! zL=;>V{NkB@UGREcv-Y27Fd;9Kf&+yon9HLU`E!^_U;7$Y+xl^Q{UK1d`Z6T{o-HEq z9$D2ZgEqH<+4bBaO^nDiF z>JmY#Gst1-^|GyH7Q)KFGHD3pH%BnQ|VaSsAdT|b&;yB~pOXPHbi~ox|j55AUW%rKm_y%QY zox(knsPf~^xUcsP!wNzdYU(>RaZa5Gh3kiy$DMVH8ALlGa1^n7G>R=wFQshc+DD zMYSnFX(}bkn8jf9Ldtc9GF=Bw&|29Lh1C6F42Aj~7AU$nzueMzL=6P;pa0BsTh)MY z_1@DDS8X_bdJ;R0jqK?2LJRqG_bm@)sz^TR*^P_A&VDLJ(~WJAV0mJXTORmD1n028 zb0O_NAjfqWTA+s3=#&|1Nc9RgE@ptOGf%b14o4iVvzF-ETAo7Son@omls4n94mVFv zr@F@XD8Db4klb1_L0E9l#)HV(RJX<5{a_ozo|3a)tHNu% zK$&z9I*{gZZ#Sj@nu$It1OBXrIdSL>8Ibr3bB=F_+$h%;ulsPXGzv70@FWQ5odtsg z-I?EHqk0@i8?UUe4JRQgIvXtwMvn1F_1s})Zrk^UqMHB&MzlSL_Nb`qXWC$ z$lJ8iMT70m0tv1I_={G3y1W)_)2?9<*4uzRc5`^F(63)ZFv?rkB2>_F`XB|(ALP@g znaS+gao%M(G{9t_Y1TS$<WA$A;!cK*^$lRpjIGgK}y zDTxiG-856#tGAueww~-2uK1!%sk}k2_2d#_5ZG6e5u{%|+P|s4Wd*a&09RSI9_>ST zcApqRXZ&gm7n=(r_xbm8#zi*ZPjGtkJ_p+3xkQq`syM&RZTzs}THX@Qg1Fx{)i@G<(f_hM_vDXFM1$k1G=n6iCu40@DT z@z`aRgS*@nnMk5Haz_S6q%A?*(a_vu)rUw zBvM|jlLV!b)WZ1uaImsV_wdEdL>=($B9^YU;#|y-ac*;5U3qGOc1DAeW?D`Hwq^`@ zR_+#bq4VcVPxenwcZTizH1pSD0@>^5fxl_0aoqn zwsQwbk4Sg2xiod8O%-$?y-C;xFZ5VKU6I}}hIChObQ#60zyCH%1=9c=D+bx&V4Ws$|NVSc%SUszMUI5;01?hq)7guwv-6p>`;OC+z)s~>&+dNJP zd`uQNfwhNl8OY{6dC+?59`V-l=?er6kehFB%~Ic?^c!#G-!o#p<}gJW-q!k|f?Y`_ zKLg3aRxz&xPlIp0ge{o$-Sc9$_R&w#UJ9qJvjDwzWqZHaqe2-1B&vh2Jp`%~c0jPKZ zR z?ibbapV&j+Q@l?M;h-aR_|NeqPrj82s>}x5*v>5rTpqL1 zfNE&)aQYhQC2GEVPE-N!IC;7MGNd(JHBBOVyX%zmb6L)fL+F9d<$G9GMA(|8y{Ds3 zI?p1L`TqPe^-0x>+ktl~Z9m_hFWv*qZ|DWD)%nH(Mcw&k@?>|KK2MVwm(ovsB*q_X z%i?PA+CX+CCth(L6YXKYAn6H>Y1K;$NtFBTp7*tk1CsT$PFwm52;dRYl1OoEzRF(; zqy5zY?2yV3B}-;3#{5QO!=5C$U9pors!RXLy_50jWhed|jQbs7@29j^HK3AH*7o4^ zCwXXi9eNVGa@#(}~ac;um>p~PGqKOfF63n9VH26F;)ABh&_j$3%^+lh13`6Sch`LFVT zC)*-Ve7V~LOYz+XGb|dcwr3B7m)*>XleVb;8Wb`A7!(am*}M3rSy2`~DzPk98^jncy4&+K*FhmhZCL>-FeAIc@4 zmDI01T*ZuSSt6y_N2)nGJrcDT3Wn8UCTn7=Nt*N4ZfMSowrCCcCgvYQroL``TWm2U z%O0)8w_s{v54y4L%K&=_ljT8PZc3#F&Wlb)=EWb&#njxp#3CvOG02p9`74w*0?k{E zo1J2tlc3dUl{VMoElZ$qXDaz=LvHB8VP?uCEed`H^w@{wu_MB%^D>vwxaMlEIefCS zJVcf`=hJ{I3nrH%&D46p*C`$mt7{roJ4Rt(4{)hDl?#z{{q+u%jOtyuU0fzU@&0%@ zBQrHsb3Zflt82c{=8T!TV>`TCM!Xvr%QF~lZEHPM0NF@z%8C#=mQS|Ybk<I2<(_IxGSPqsAFHZL`QUD?^|8Yw&zanesq~`FTYNa1cN>77kQjgzbu{ z&-jayD%OaG3oZV5Lgz|y({WGLgCV3{>^!sh7EUD^sXqt1$;TI;a{D+qp%_5(bX!e+e z-w1P|Sd}P{Y!sfhqxd zn%is2ACnN6BQ0ls&C*8iTS&#JP+-i*y(?KdDe=13<%Vy34t|tNlw`zR+UoMQ&c0#D%}z?DwKY+i%;`7QFmRTCAUb9%X*-w9lLc>tyzMs4|h3KbF0B0X}u^ zlvIHS?#EvitgtnQ$jSI=>tcT8!n3zGK+)w4MCV$Qu1vhH@x$O2$U$+urTBFS2{eoG zru+zK=WH@8)0P6a`?H|iI&BDV?A8oGFsFWMZhlWn`>3Fb#af0x#kN&&h$+nB!F?ve zM4D5V??t4LD%N_ADfoTQfRxsKgkR8}F^_zDNR@nuVz_4PH619Qxx;iPPJyg^&pthQ z38u$$eNYFj*>5cqC)x{?TQp#W20Q6!p=FJBKAED>He?s&rL^xoA71-|!mMiB>~>jM zgcYu@H-&8gvfA=?8;=u9-}f_YOHjHI^G<@eJ5I8QX)4Z>l+L61S$q3pB~9S_;8AxQ zV^#uNuYvEie2w9e%vpWORkl$B5R%aEZQ}7$jHx+$DXVYsJ2hwa#;>+ga!@a(TVm?g zweYV1V`SLBTnsG5XjPk7#H2wgkc6`iT&2#u@uFd}lHe1=-%J6$t8ps7#|-3x-{UFv zN}fhoq9AK=_W*df%}vhL0kd7>H8ltj`eJO04!X1 zY)&;3R@$(s3_oiSVByrp7`VF-M{-N|*L!IcTrt)TM)3fyB>ThMzltZ3$+GAh5C2NR zA}I_;UDMTg_-%rvVQ)TB=9$0;(O%8+%b=#&JQRrm(OAt^pvGbI$7Rv{a!of!QP5=t zU)@v8VSM1z`^HvX{{uit{8nMUrwSy0mwzzHoW?zyAGhM{ST+E+ezHV*#+I03rGq-} zL|oQpm1sS^v$V`nYOp`Yurp-d@y8zq-92nj8yg27Szakp2wZpfTG9f_Xe(ao$=Jqx z&DIG(m4iHhChgVp5jN1%@l%om^B=1h14w4Vqn|fG2b;O(k#xx4QpFAfHbC?!mk;^w z^X^)=IxlevQ!Ihg*<@PJe4bz`Z_9LDl--53?n$93u_y)2=>R!ti11?uH_p(gE;*ov zw=o0b^CNOlrEGZnRoRIhw|0ia-okEcwoFI-$7)og5rLym_WLOq%agm( zq0JJ&?#Pbfx?Z;gHGoNM(c?LL_{?ba_gdfAh}J zZI2!BnXY7G{Ju*?bmT+Hc-~to?uP>#C+^VfR-~)T-8C02QX=VNjHl?=0#hoVS{z3F-P%K-2yQaxbCxJIVk4aimO??YnZBbH-C%?hr#E+%V__gnV_Y5 zwb*Rur(^+rkDg|%HfBl)ML||!2Mu?BQ@eWmd$S1!8%X{#ItNEF#QDb|jKB8QZ zf9LI9;?TgeEc)<4!}~sYo}2O)<7ROBwXlx5{rpAiU0z^z0)R#5X7L52&M$Jd+kfav z0(G;Bd-7fTrt`dM+XZ)ebEuq&d2Os>D^`_E6*n1`-f05vJW475=~)>(6SUZnu-6(K z5WKiccZlb@F(73tA_cmkZ$sqRgGTGd?f_O5>~4Fj$ZoKhsP~(2NX@98&lNYdIc^P} zCQb6R21iF2qFRCsLs7c$%;GiX-PvpTe@*vq2kqrduH5kMrBHB*Gwa|^oH{LLzN`5~ zc3Wr$v^dYpe<=m}r2pmdK6c1Eh*p~?ist_xi_gIL8><<5MNk_BgQ_dZ9H_-67J)gQ z?vRp`peHI+skElmRX5~72^ueX{nS|9*E>{au9f&AIdIr=NnK^BVn{$JZ2j?5`R)>< ze@|_8q&WgrK5zEf%B$R7`Ir0X7fHA+?nfxfGSyC2zk6kA7Cidm)EL{a<_$koVv?=})(cqtCd``H ztSa=mgaLRU9joKZ1&yo!NId6oJiMOw6vgXcCX*%=U#zweUJ6;gsx7I92JS6UAOdvOpW3%Uc`f8ct22pvP(XhFi}C?QhKYn4yr& zz6E$<(PR{z>s2IR%-&O_Mlgkst27jgVNW1MUyJ2y)wK>Afnx3CJ@VE0r+JTTw{`}Z zIrpD-(}#or0D53NKX~kMoI@o}ik>@PO;OM}P>cca`iS0FXr51Z!(%F%dT4DsPVO3Y`pY^vAZt*E|dXu3O$T&ipjHMb;33d(p1S_RQ^N0ARqd`aohe z5J&z_r)&D-B*|qiqoj{5Ci)fZ+X}I;sV3ezB%4hCI;G~oH5r_zfVD8%tyg_3`R*Iz zMRIl+@yK^SYJXoNQ&MhuC>JvUDMeMTFS}y9eudoskF2i@i}K&vr9&kIq`Nx=34sBm zI~9=@jl1_o4`#k*jyWf4zK419)7YysS?sZqBB4|z<55Z3e zH|h~^9WDvk|EWt2WwWTfB+IxnVaod{sfF{)AV8zzo@mef*)@v<9R{`7Y|A6SsE~Dd z;BfVx_7ZSq7?0@Ed%9fP+O-SmxouF-{{*Z7vJ=YCVmIyRQXn@{M-URTzI=N1U*jj$Ej=l)PQ5+088nji_Fiu!uNZznjA z*>$9;pQbBY@ITYFP=)phIQJgv|HAnT2Tm&%t*`55nGC%=2%Vj5=D#p{=N1Lqgb9^! z$l?^N+p)BUtg#T-oeMp&L`J5S9ItVK=S})!YcQp;Qo$dhzl zLOq2YA2N16GfE-t;y;#zes!BgJ31IgyrzW$p~IBeL5Dn5o5>Y8V@`fNWEvBaNHuC- z6^-(r5!JMmAbc9EQaV^I4IxBdF2=Za;;GHIr$C%y%7nJ=UtFkrwV-oe>{S7&q_@Z> zM>#dSeJ2g#7nKSzHvvf(?~)aNEJqfgIWt3+=@)hLW+XS5-04DQp5tD}H^^NUWVJh8 zX|A>usz#C*&ldaU|_8 zEPby6E%tM>d<WiFT_z_6aXX=2*!M@@fV$)>l=RY4newclA<^{7dw0h!o zt1qRox~daJEvobS7uTXhBs}_WBBAc;x+|KI`W!e-?xO0D4opOBm?TiQ5?{;qUHlebkqIlNl1>(rA-s~+< z^@w;vnsA1ul*YsC!(Z@FG5c%l+X~M1`AUYES;ImkUb1xZ9wy zNWK6bzn`EfbxxB_U>&=Y{EPKmXy8r)3xRq{QDh; z7xP9#n@f~mhBe0#5gjPd7R%RF4gr|k9rx|L#N?$}J>vvQ7n9Ym$@KP-1iCn5x1!5o z!rB>8Xd#|mzSx$y}LzzwH@1R zyIYfv_vFBdU^lr%QW&R8GxZkLq)B$+ie(~)3uHmwPKhCYs_s06A0J7X)Y6W^2kjW` zYuH2D6YK3pe@;FzhS~W#F_HmzasxAoNpwvL{FT3&K50$4sK&G29&FFrEtb)OJ%&Db z-?A zg#-V!CHW^;;}7NTdDlci9p*b%c?1oVO49D@YbKC`<5ABGUSo5|!O?Qg!6!}N; zj`MDtSn=gCYr5{EIuxoNgj(IQze)v0_84Hn8;etJyvKH-SFI9{BA1Wp8~{tLlGpR7 z_5{{cp5ODj1|9i!o`)IBt>|4E(#-#&>ztWx1Af?+!A-x`)wx`X!c^Lo>qhFg%hZAQ zA}X6py^I&B9^PiYDjbFeeP%_+Dw1CnpOYj~i*L%^8zTLF84EQEv-^!(===`he6+As zdu$;H;r54dp}x$6GDKRAfAFcV;i@_9tRa0ZM~COxT{sXg2|{s{@ayu`V3OG|9|X-`o&l79J`gY27I#iQ?2|gprI|4qaDjbJQHWjO zNZTm;1HzUxVi?)67rdb9COR$F$vP9L<;qO;pA9wY<=C`_iThdce+iG&{-p&UQh075 z5lDcXM8j(fkpG!JMuyrO#`%(Sz*hW!-3WY+xz@xQk`oI*lABrh@nB8k)~6Hpc@UU- zyzSoBaL4FAQjfcnO{_-EMQ44SC(x954$x2Hx(q7gGTt88g{f7;?_o{QKc)cv#PARO zr2N^QmqV##m&1m~Z@-LpQri$>?FOvwfo=9;PWH7Wo+7yq+gsbBqk1mK9b5f1nOlm% z@nU(57t7g=geOALoXS_!1|{}!eb^>Wa<8+cnmo;J&gM^Bn%IF(*kCj{MUrE?GB_+qD~Zz!2nq3N@gs& zru`Ah?b>?dVNrKc(IZDjmg+4sRh#w0Rc{I&$wnyuR|zT%)SrAw7O-xjs4zvA2k$i^ zW=hFeF*}qR>KRTv&hrK#`|8(#)a1qk5DX9Rl|3HHB24>0YhDsc=+1zYFufE{v5iu(J!^E}F^NN-X9S#GC(InQpW}`xaVu-6-ebqYAsQ+pscmBRe4ZJNHt{nD)zepU*PNt9`c_F53f^u_awV0Rda1l@zh028aRhC3Zl9c zxbi=d_rN6peTh)Xgensj6 zDDxe}!@=@!C?ws*Zn^2|K3V#!p8MxR2s{JUWJ;wmFP1TyF=_`$@)Fv6`iQf6Z;E{ zABEvM;;Da3E`e|+nqh!a?<$ zebZ>?Z#cs#wvN%f5+KX-6cRP!VsLkOxD#6%UyZ>Y*i(;vz|Go)XFnZmwFx0u7HM;5Lgzvu|w3r6udpm`aRg z-82lop6hjgHFS97)h2YjXs0}m)DZgm8Kyv*OaA;#zv_s_uk01EtiH!)i#9CaoXa>X zHi3FAkZ4e}K`k7$MAHyVc!GUmVMkggzY~D)DXOzpw)nJx`(4WY&>kjZS3UpPQZJYd zj#{CiEFcs81X`S`ofoqIQ{!c_g4A#9DHNMj@dKmOUB>u`k_Fi zWM`6W2rC3lgbqi*8mwRvbu?idiIvaQFq!7N+koGoFXHD9mwn)048S(H2{laW00c2b z$k-MA!*_lmyO4!Utn>SZ1uJ0ImXesW%rfGN<4@$>1~8s6uk9{Nrpf*$hd9GoJ(0km z%*NHdnP$R|zPYkxtt(&r3DXimwmk`XO|(WP4ysHLzSObm1#VkjN+6{ImFnVIF^h0Vw`ves%C$H8<>k94m!h@X?{(*9HRebG!?+^V;M(e8`vYXb1pW0E zQdrns3MtMM;JBx&9s0f=R3SB2@Ed(exa!5!jCOS7fECzHOWN$Gi1O{{bt^+*vATfs zH7sKrk@XjLFF1g4*A(?yye5h;+}O^`1R0+3L;de`dR#OuVo~8<4+P_ac|0etp zonidMVF5z;55Rd7fP0AtL#D`pK4%Lyer{*N)|qd0f!!fPv)Drivx5bn$^G%uf4pr2 z44%ugFD%nJ(bG$s&Z{%t1bO~>ZuSKUBAHBv7|*oS^&48GFB^TNZ((&K?Q^8B=2abB zZ-!KCx4!0=d@OCceerim>RLSL=DYnJ&e>2b9bh2=PZ4g9u@$`dXMw$cO0eXjnwI}p zNm?rR47!$)FSp8zppLVtkfuDWg%7`i@tla&PXePoKRVtXnu(KDsUFs4c*&27qNqKF z?G+|^cxLV5?;v?8Ll^PxR?)X^fNVMPhdi$NxQ#~`R|9?DDZRpdc>;U>+r?&~Lb??) zWosB=bHdwbkD;T-$2ntnYEk2*n_}&slAQJJO)-xIwYm#T8$h>iu)B|Y(3QQA2PTKf zy~XR*WdfVrz7#@mcJP9FV;tX$T&JN-)yr4n5Mg+EM%!PZ&S$&twGGHl>2&R`GsBZ= zl$iiC*HcZ$p;R+0u%qxin%~x-(fz?1zc^&MDtuu7r;A)8jcoH*9wm}n-pr!?oY3{ zXEXd~dD}NQd6q(OjOKcH7vxu}bN z2FYF0lEhlgpvVX6)}c+lc?$S{-tX0|H>v)VRG`A7BNQ*uezTszGU>)*oiW%P%5NZe z5NgO*_%|VX-0V|P>qqdyFRw7`K5Ss7rVH_q9?MSi`mhw#AoiRM3)~jJ2bmk_iUIXq z*Dl}1ALNDuKZdb;#6HRq*&9yfaw;FoI8rxBzNP;3Tlja*u%%w&5`1@$Rhhoh9)qei zVgWNnU|uXquea8d_RAkH6G=cUAV>#@1^V6byX2Di;_XgiiQo68<*T?=?v~oTY5P9? z{uCEJfmxpKOx_KKbT^-wVb`YzB>GdkK$5P|?EBuhOk}r?R%bx+vDfpME&}Bj&cG|% zqBb_27?Jh*8`3U!6@j3Om7D9)+y4I1@pmbZPxV;m6b*Y-K(a@|qPnP=prmPBL6;?rioW7TNRTAjW&mJPE_ zpY&-9iKrxR?wA{2;#Nqu1XRw!V(Mnp8_N_n^C6P7fU}<}sBxx74BQl+Fqg#qG1rFN zom;(Hwc>EiCYJXxjh1Wd!v2R%Zg`E2=sx?~ulygWf^w-JuWN!;)(%e#?F{NhD?u2$ z-p)Z5u++c<@z#>BgWsAwtoFVK6Axb4?i58L5I;_QH=|;9kNA5oC|i8n(&#V@mA$jP z+h>7)f#Kv_IZZJwK3E%g>Po&u0;Gz+$g6)gOmRBq8M1ZCm9JM)Xa#A03GC_Dkr}$( z|EM3up+OS4ndUnJbpmna)~RaSkEG;Z83huWE% z<0dhH4hyx_NnV}kUGqKPlvhM3j=M;*pjay5`;8M?f#cg0**kcmwT5x8h7Ku_7~RuB zRT$-;?es|p@XtY&wR5Hyj3i>C;v4?`$$E<3i3U;BE(`l2h6-LCn)v7r1cK7Ma}c_4 z8JoBl`m@cl>(S(7KvkcSDVZX`q-e9eEEyy#+JX{>v0QYeK}Zn!)+UbjtE=O zweX{CaA|>HnWy!;-oRj-m32=C#7A4DCUd+jtzm0kY9ju$oq?PTSQ>34w-i`#?Xp|0 z(CHxbK6LV|q z+OC}f_2Y=X%EjyjNP=}#?$TuS%s<*+Q)`7Q+{U*-vnWfFj1WtH?6;JK6iY4I%e(eP zhmrZS8?0cjpFpOYe(ZoM3TvJm@EYl|pZ-bU2%Hr->FNRc5Qe;uQrtheMf7My2pS=a&vUbeZ)N?OG}2jTqsaXsuSgu=a<0^Op&QC&x|9xMe<2p&U8D%3g1 zxkf1ba5XZ}S0YJ}DEAUiUHDJ*G)NjhF)%vx;c}_>1vhJX-^GFoONV$eUw4W=&5~lU z+JqFwF|Qby%O5kB-IL6t?=O1#jjPuWkCV!&bg0o9S+P|iupP$lx$glnaW7XMpz}}F zM(k}z9>WV_canC$om!0(d{GBA`olN74%bMG| zm($D;wk~ke1J)9&b3ZWNy70O}w;K*&O4fU>KLnNi>@GA-dt8f{Z0x;bmhP(mfn(;|H-o zRJFV-#d0mdCPvg+&b{Rvf{2vQHIf@F#Dg9PdJ0H*HvB-fPCLjwKl|LALtonTtVdp~ zTm%Xv?UGqr)Epl+KDeHu#eAdbj#PK&mL|&uElnJ^<*j$8I_C&A+2?SGDNUM)+p>qMJ25WV`4KgYk4<`H0XdgLhoDBSVlqK22U*)srIP8yS zWBzC~Ws$r0*1!vX$k&CMkyBY?y1!_UKCjjjrR}ZLaylK5{IYQ=~jb5Jan^98^6{Ll$I_Ki2z51>z|Tt##bDc7{2M%ciBYB0{Wy+S+X7N z5x@K5*L((5N8CrC)rL2KKMQ$D+;}Fe0cQv06HNl`M7@f%DtrF01@tsj&YU$sYz9bV zX|(kRF|_Hj9mkuUYdRLDK(|u;2WoMwhL@&-I3i%+aKizgxA|{~c(2j1EGMq%5HQisGSW?a$||}j zOVCKx`L|w54TbQ@Pt4&fh5v6(|6ddofsZT3T!&DGqM^Y}C544uZWVG;PA3LdJ8(XB zRT-ydt9o^E;W7m%{i6&VHsgo6jC5)_f4xmBAY;b{Aa_M~NHXBig9mK1G53T7tGR3u z;mTb~WydSiBhiQ*qc1wCNj-#`ERnn}YV|+9emrQwY&Mm4;567?q}*vB+cA^TQuD)C z`ThnX9{W?vO^%0ue%;*frM6gUm;TXxbGY!PMy4_>(-f8(M|_?*SE|DM zY{+oL8pcohk=w+v7dt5iXSzaVVyQ>|JkmtsLYYCE9R#FA)K_|+~Sn2w-`jtHU0rnEtu^P9oZL_h6E0a(EJYC{5lZ$qS8S{Iuhp#%>Ch7 z^1!Ul`b}M1d_m%eqcLBEtwn!VzEE#1O3hlULUy?-Q-{KvPcd|FWTbDo9KQG;=+-J# z?i$vbg*(yQVELd|XY&W$<`u!!!Ag3ra)4mEflp&42c+Id{O+qxydg1C;V*|g&S#QY zdhDU`Ui}p3rak0if#9*c8|?5EtajljtCzZ5%Yz>6m%P0B1H82`-ADUfO zy81FC(}_$ftE&!MyV?ee?$^4OoOugWquTuMDmNexS%V5`5W930J&@eP{e~(v6>A&8 z=meu}sbe2uVHn|i!tmJe61E@klw5lYA5JgipDHyZ;;<8$>)7)j=hg<$U*A3Kar=2U z5#7wX&~nh9yLf;1okKGP<|jYm{OrrDGmfur^z#b5Y1~LpGg-^I9c{XDk0#;$1f3(y zkRYWPg3v~Wco(`i-fdslGGtvEaj?DCapFuTQ}dHHYAbs;RM-dC=A*WsT8-=|!M-I^XdaJXk~7gPOlPI+7dvhuqs=c$HDM9D`21Wl=gYZs)Y6oDnpCiD zLi)E;nDa^dfAVWELGE7yv-p{&ow%mFOemtkWekkmu<#)^3QtXp0cVx@)v8lp+)*bR0( zuUV)?XmZ~54r#jP_k9{NrdH9s#fC78W9#7te0~@d$L;+NP<62#NZe_ z`DXo=!NjVl%%)K#mT|?qC;h?xc{wqqW5MKMO@-m`w*y-zsPbyLJ9<>z(YA8Gtn$@} zA@-jeOa9epShInmvI@7e8Q7nQa3F!zeWY&p|tkXpgFwX6sY5(OIa zlc=5n&6X0Z+#4kKM-`M&O8&w_agRw+OgepPkQ`?s1lqdp3fJBch*7OIXQ>-}KkZaj z=K8Dge9Wmj#kY&7D~uMxaMaC6n{PS=;h zyCJG>*`@CDgF|q0kR7vyx1EKz^ljj2iL1!4X-xb1KClDboiKekoKF#}q z-ti2q=}*6jK|K#Gj>%zde42`?*aSr57vmPT#EF|$KHi~!?Wm4T1c{CO_d@em6KRUE@9z0UG=Vxl(w#lwW;8*dx+7hs%^HhGCDEQ>F5JHm&TUOgo1D~h#tum| zgZ&jM&psrGdSG#odYy?`&thtY$5lpDjlOSYT4KwKHfVlh)I9xI(G+U!C1bEBhEq^@ zfn7^cH1Hbu9TXFWTdS`CZ<0YkvB&}KpjzrNXiF`YJ&j;i!$r!452{xfITcqmMjy5c zO?)TD2W@dbqu0$FkOeh-3Dj@1FLJ?oNp2GS5LM;{XI}0xYED53iz>}~%}3er)j0Ep zl#{>s19=`LBBIue+Cx}-l^s2}=>?7j%$!Qy+yq%=ibUAEyRwF?C^+21V!s=l@+ITM zEB?qzw++Ziahrax;}}>ce_efz+wVCq#bn@lz4*tPAN)o?RlZdKcYNCO1*s}7& z$wr3MgJIv}9~lD;{$(Aqt1pfW8MLDBAO~GNM1lNFv(Txa4p^Jo>Q*v{Gsd(tqjVp7ex$lQzKuRj}Qp7{L8 zBC#85pH@49KODO>n zo%exv1w{u|m}dUlOy!&W3bl_Zanr~b(adUIfT)CdLU2 z_2j>Cr7~$Y$0f3lzUpc7x%?IF3OWeb6GF80(0Yl~N7(21Dr8)+DpWpO%Y&$;K3rA!p)l?e|d8 z^3|8fV6r(%#8t?h&==*jmJC8azD(_(5B;+}-Jca~wLjh(dZZ`Cgrv*D@F=xXE_!Khh;vs^{=QMADgaq=Jwv8^$iFn8Yzx*(HL9HeW!#!iX4YBeUPDV?;?L zS7=AmhcgbI(%L#ucI(DsNfGbXlThWL|{at{0FWl26PyEIKpQ!&sVOhyro8J7< zYoK*t0KNU2+l4#aOUW|<+xq!;K9q?3=eUJ`_AATBO&w;@4Q4 zqwZ5obkOc+6wmp}+^Na9T@xH((Ff`&t9I3yID0*{#Meb?FpWRib^2}hrEDXM_lT!i z8Y$_@&h#AlPD={U1i7cpRCJ+9L=8w|_bl|Y;P#%H88k|~wEluZ9LC+hUB-IE1LbR- zOW&o=ugQ^~V~b9O{-z-K(c{*P$tZ}ssTxTCfs#Xard}!g;`4|-$HyH5CqiSgD_Nhu z5kyG!)J5K26E&!E|DX}_p2^Ffp%aw<>$nN^YDC>%pg1&%aH0hxS?;-R-BSFWUTQKxfZj{%}vx0wuGUyo1LF&>2=7jHD>l)N;q67DLXl;@Tz^g;^}~ zjyJ+!c{^AYcx!RGrE5+H1UW%f$T3#rE&L$a#{4RY>7?3@p7#p~!E(w`5-Dp#Nc)md;PC4CmR%oX( z@haXYi8|Bqrwg#Dn~Iyh4!F}1T5u0P!2JqX$A9isNLyBqq57cLs3SjD4(=iR?2GB) zJ)ma+sybP%R}5H58h2l3Ja17GORv@~ZWVzMdzRx4s148>okkDhZ>kEUFKb7N&+`|! zVzO-^3@8j-rXpT}geBd?{2Lo@4K>As~~BA*uT ztcU=!3j@aPEILq7c=Vc=H#Yp!{5ixPm+Z(d)3ip*i~L1WmHbNHyRD1fIMnfXrmx?F-nqZKZI|<$AsUzYNPt;h)yhU2ng4$YHNy~+ zySz1b@c)Z{5r2vMQrypu1#L^JgH|-|j9J*5WjuoZft=(w`us>Q*)A(q#LW%?7C?G5 z{D8A0t90V^Qw~_2-2XM7D`AU77$}dqAF#tJpx!j6cN267CqYIm5>dPe?Py}*%Z1ba znIyQD+z@y5Ul3%F5e4Oj5T!a3D!zc8d?)%&{z z%ewnvzXF-(Yn#X|iVH=Ww163K^5b|!Yt<*_^c)PtoAqNd31*EzkO@8C;z_rehK-?r zV9AgrLc#TqJrcR66aPZ%+hDLEIqr^qe(wG~H1KfFM(mV8;SK}tZjWU%)s+%lZS^i~ z*mi2^FYG4RwEr!=Hh+Y{AV#KN>A1wy>sln$04&v9%mY-xcoo`s**IB6UIz?nPz)G5 zM8SqZDC(dRwaPtt0VIH!GndqKb|C!6W(U!2pMD8K{*bums3=U<7CtK%sTQ_3Msgm~ zLm~b?=+^-6i+|R(7ddZrD#0n5XWslK$TF3@jtHUO%@I!^w9WiR19IauIy|C&0)A))nou_`ElehLq;*2rVuH_(t%2EUq0F=WB#{oeb zL@_i|DAnwbQP59C?O)uz)(d=Rt6lb&;uVwmEAYqD-SiQSC|$lc@WB}_X8-bC>2b?rGleDBM1ivSPd`Oi2pi^JqPWZNtW{2N-@QXXpRh37|{H&@49!s$~m07&%zn`}n0K!>VhHY52 z0ZN=OU8ZTn{WZeKJk2y(2)2drRu7ph_KvAZWHWrA2O;c6=thOoj8;qg+uj)TF>tzM z_z#%?HqXXMDOOFn)U)2V%>l&vDi4(#E{<0PeJi)Ce!F!vDm%|Vi1q~TY?Js&ZHA32 ztIy=Rv-T5y@$knfkX~mAF4MA6bjNL(`cpWhc-0%H{?z7dW{SOCeUi+Y!l4TOJSambct2HJMOh~jj^dspI8#4RZ!E?YUeQR_MdUZ93*}( zI%NyD`v1H;-!U>f8VHc>SlY$?5{H@&5X@M^qOh~zioge_65@BYF-`+j(s6A`x6#@D zOI&o28ZTQ86mslUx@!&eCct@FkvAJy14Mx5GwL8QNOp2v1vAv5Nn?dZIA}r8A!Q7J zO6w_exTh6}Qi?bhCFQ>ph6~t*1}1|YJr)Sp!RPg9F>OqeYgc*h>`F-`WI2>`i-JdJ zqh(M+GJ)psOF1Hwrnt8X_zCt#;tGm=&0xwK1ZiRaJKp|N*pl+bZ0nJS*$>8f1?>5s zM4paCHXE>=)bA2xcXCPHo`GO*#)Iyy6+%}%u1>5+e0J;As-Dnu&U1vB?Eq)2Z-s{y z=Tg(Ov>>^%;>}&Wdr%ASHM#5PNjC?6AmI-0a5Mh$VBgD_53G?z3$^))0aHhJpe7yN zc1*QSBx0(TIU|jhfCDn!z~WsR(RaN+U*!~YXoZ2^W)6F!u5{O16vts?Rs{}xQ10oR0^yqrq-BAWv9?ejKPlw+IyExIn(9$CPsoCs>W3|U{S23#tgRnxW!$RN zS=YCV0UKfp5BDJhq?KQ5V+UNl=Bt<+<2kGr8Ry>4F-0A@Rd6lS2UTJXG_xt5(^lDl zo84-PZi=MNPB!Xd#4_##`e7hs{bIHdgwBZz55*Xw*a8c-yk^$T@9M{WIMG;UbQG*( z$Yi=ZPB|xm0K~p+rL6^Gg819ApjamZ6U=tu1HSQAM%5_c;TS|!u37i!9kU%sc95~&ATV5|?Ga*71| zfVDGnr@zUV2J~8t-^zmJ$K2yAjl?ju(M-5Z3GT*>V+~U znX8~ZdEvjK)}2tw;ZVw_N6yy=i%{A6SWMD6kf7kI5WPwEI+t-~$ zNd|TnH%dSxEvTAU8{(`5fqbQHzsP8ht|F@;D77~@tpM~jv@9)N^W$jX&1_}mN2D1A zF6*)SR|AuU$`Rq}u-&Xi-}7f_vu6S5qJ1Kd`-TFFuRo{C&t@6t{xk{TEG>Nu*Ql=h z+xmGq#UmX~RXc3=+S`G7d;@tiemg4*z79`V)45yX<2hmgP#w+dOrB547UVJR%n>ys z$Z(QuwIi%fjZU%r-x@H+gsh}@7&Y{_BlM(168ltW7R_io)u($U!{Mr~F+W=`78;1} z-IsT}&o#f0d(RybVeex$7ai}3d^sp#A z*PQ<#NqJmvE6&(&+Km6HTmm1t3D9^tDPV8%@V2dtV8hv2cR> zeUHd4pqN8}4XdrF}?)Tt2Kp~5QV&Q4!)vp0Ho7%^vA2X$>Ji4hxRxW z%r7x{ENL70I)w0U^-StTY#YwbE7d56yQ!(GA_bVN6dpOxh1+mmTRpEa$^PrkFs<9y z3KC5rH-oGWFz=>^@As@5Y)fo-DN&A(3txYs|}P~^48FTj!sGp z`mXRHeI2>yMb+#2dtvKX9D1)e*JT@;xj|Yo)x-e?T_>ob2YdwYc$V4!^cg1}RF|=N z9Q;ywug4{_$2@7+H1oo`!8G_T8yz&p2kac2u~2MEJlhAx_bq$tO9|X%M}c*>3vAa@ zjSg-$?eSV%`OH>B>*bc3=VAkWm-LOE94_)uCHs~a;y}RK!XQFShq{`O%u2E^ra7$w z&`!bIAFD?zUIH_d+IjAz@2LgYkf#a`VKji{K!Xj7d zhA2uo8>xbUZ}iV&L?hWNGCurC(x<%UE8}smm&f^q^T$)p0=DAFK7MKRx9Kh`pBf40?$)E;lCiR}tk{`}SalF@>-(&4kk8J{ zyQeu6-582i%wK?48-M>Ag0#p4ji2MyVnVt(@)m)68K*fHlh5O8x-+(&8`Zu7Rx6U3 z(q?um&u2uEK?Js}IAI8`vm0-C`&yHdL6&R)>MT^9 z{vg`KUT1G{qcn~Fb{H9ke($V#ax*UCD1GEm_+leV6!ZH_6+LbL5v#@%Ko6mb@v9)( zQ08I(36@K*Qv9Hlc927l6UcszLl5=t3B7LDUs0+o-u9rm5uqeqV3hkvdxxBb`GfH! z%H%mM_MYZc2A-vFLxP>LYT4j3+u_Jp zvA3r!Ak#+(%s$Un+3yVQ?)E^GSm`nmFD@y6y6}97$I~afz(0f9_rgbS#c&vXnT4mzT@mH^MyXKvjg8 z#i3x6`%7By<)4DKLzj$Pc3U)Hu%^X?@D~CW@_1CO2;f2?iFP7kdnliDjMVRI63LOg zS&f0qDV#+fg@p}0pJ`#~KJCkpS|0dnio-ZF7A%JoC!qHR+{|#cioY>R_s9u1q;9*X zTBH+Tt_Ub&mvJZz>uJk%%*7UpHy1>L{%T-RYa0wNxOD5)Z^dl?=!iE9pQ9oB_M>bL zkz);esju$~AM$4@vPGYr248itY3mJhQ!-fsiZK~vr}Y>a7fz0^{>Et7<}>k3iEXiW z3>#e3j~sACAej9I(k8h%d;$An84Qnu=6%PNxp;S9CaBjW8L$$IeKi)Obb)*Mko?AM zNoZS2vkHKujTp7)cJF*q&&LUN5NMzsUQv|R&-pULJGDhF8;suo7zH1gk1(%OGP@tL z1CuQ5HToCyv()H7!uT_?uGX}g{s6kidR;A|?{8TmBXh`3g{lxwgN@MV(5H@MjE947 z>@IF&OoS8b!>uM8M~z&*(GfPwy7U8p6-*U3$mxMh86+{8(F+X4mSDIlSP3^{%zXf! zS&s+eITv@wh4Q!v1bkTHFC-1(pR3m(^L8<;4(VwrhT0JAP!&_Oh3_q@fTu zP@Z=R885@y%@v@-XqQZ5-eMOl{N^tgf?)3~v2z)P_lqo6t5{Plg+$XiMAqW*`L6fC zH>K;QcY_Om5Oc?hHmGD?t#sLlsn3>{h|KoLatHAK)WM`Ydqh81JnHm|7>P9Z-gpoixlKO zC{sP+zCHg0<4h~zO`T#Q+IsLn@)RWsK2GpQM*~fS&>mSID{f?g2J2>D3f!?7r)ICA zjF@e#exg{MvLg##$A|oM2lR33SprilsJW*Yb`Q3ZSZbcyD)#Drg>jjkD|D^B0P6QXCO}xz~u@(AIOVMSS!5$qZ}kyCHKW9QMvReD!*9 zM;X^!xvcv4pQj9~emCh*Eodz;9o2K&TXbME(M-TX5;mT$dnr5SxNwcwE$S`r8osJjL}7_{=~H5(}#{L)2H2ec9%#1z0p zr@6*Z7TJ)VE~NZXCe@8XLRTY44pI47Dc7|jk@oQ%WyOdl*{k>3g$Z|3Th*s9z)#%; zgT*}H=a$k(r>4lg97dx!{D~f^QR~*f&)!@v*SI&3k)Io|8cQ>_$i5npe6cB{6(LHm zfLfcy&WB2O_QCsxDepxr9nbnH-jByma7U7kTKi!#O`6+@oDV_(4ANa=HD7N@qP+9m zqFH|n1heZ1uEkn(P^`(;?qb5!>wsmZcuJ;j^)i%a}|_ZLY_Dy9$mmrBNQHKd#gN z*24!xkH+WYf+mNz0>N|1@g=3ReIq-f{Ke^+Hbi#pvI8Y@mxXW9aAP+<3$telahsCw zA@51BuB%Iv1%t{BM_%f`|Vy z3jo2bBt8E0OO!s_5es z%FA{XuCXR+CUMg#2D9~f97~!fntuZd+{`&Zt6HeI|y&?XrWg zNRDI=lcAV>-*&sAIlvTA2H7>_<8IiaDe-#~NgB(QN3C03QpW)#*-m5Us6d~*5CcA= zKCK(gtwPNY5w<4IN{rF}!`4@YMZLajQv!-K0y1<-BLYKri%5$INJ)1ybobDWbR!)@ zhjd7HOLv#__hYU9+G~G%?<23v0}pU`=I%EySLBFkn#0z)})kkAFIb|3oLrG#Y`w- zg?W(ZFt0WHVaq+*9b=SUyDymsOb!$RKsT^%j@;`d1Lp>l`4N;UC);V$>qCpi;i_AC z>S42!7U>7tk@%5pY_(b-TfJMUwT9c%Ioq@;>=Bf@?uBvbKaUNw zo;j#>r7zZKT>>24d6=;M z_)e$~Bl>=~ZGWh0XwH=zgeFYZ!qu`y0z6uf5RZ=%_xgGI*arb5u;;F_SYVY+EOYvO z@vQw4ByDCzL^^^W{4p3i`Pd6X6-`Rd>Vy4EN+Ulzrlf1(QOhdg)gk{DvE4X(n(ma) z$#p97E#ywB3?1Yx}fUKY3v4qK(16C{$u5HolkC!fxOO7-+DqC%KREt+~p zUMc-hI+G|O+(5qQ>lI4)ocYCv`fC|+2VR-?48tPVs7%qDN(`fxxQUuWe-uR0_r@i= z9RQu$K6_Aiz}my{))I_7C%h;aaT3ulmgdM4K#YT~vKh6HQ$jh6MMkZLO&XJo+|15K zx~=tw_LH#)+!@`w9f3S^!7l}xS^T@%inPT$_k2?&^n^ca5k`dQ^%M9_jLMp)ngVc+ z{oMBsKN1iJ?3=CCY>>qTj_}TR50DPUPrA>0J~~~(;-*olR6LS1)|Ggxe{RKlS!-*e zo8dnpRwaPScaPsTUtBVI|6De#kG-S6Mw5S9A<}Y`#d~qi{S7gE?iJtp`|#yi7iPo2UQg)E9>{x|^DIg0?1o~3Px z|Iz#fQyR;rzeb()s)$;@(%nKw)YtzK;WA3}Lj1nIDUh38(afk?kz2J`tVw@4!-cP! zm?_^^)BSJ{FUa_I)$Fqas`P~fa6+3@z>0QFuL+H@UZny5Knw zpV=|XeV)0Fd3~DTBt>`@y1CHKBdou@;T{HVAG*~-4>wSABX|Ako5g*vLF?zwB>}qQZI)uo$w5!=bE2&3Zgg@cCSsm6En^Pi}Ytx*mHj5|BKtN+E}8} zyq!wF5!gya>M>a#rgc28ib?91`W*^1wMvRM$hLCO5r zV=-eU4!@-&{j*HV#I zN@Q`Jif_ohZu;owJw*83V^QH%tjyFue)B>#&jFH-I8ab#{e3}NaE7BLPQhaJfx4v+ zZoHR`3+2u)l$SOskNwwsxIY*8UHDgbVepll(PrY_Nbs6h?KZzdS`)4kSg%Z-3H$?K z&vD{WJz2+nyq}~IEJx$8__pUVf_O%|z=qcZMc_;S6S;JZ5^d{0D^x3TXu_Yf1SQO` zN59fSTIxIF`*M2-PeKY>$sd(YHJvJBPPY#UZ9I~OQ-UTyeP=7yTQQ#CYtUi0)r0rP znPJ-QM4=)zCEW=W2U8(yIbsHsFv}MtPbnGiscGYXk0utjw}<$b_wWDBZT}~(jS93` zXwj2vo^zX&lk6Ps2Cn9|qn?N$c9h<^Mc79kpN#f_U}p)f991b?5!7qLyaT;d^^!Q$ zsKS-xcep`0Oz!}jMi@3gEpJQTr|6(Ao27%2_+;Fu1@3CJ4r#Q9HI!)I?;7h2VdeZ# z8M3WJ4QV+@Sw8P(bVVhpWFMPJDj{aJDwqc2wJXUN!YM*+ zg9SJY0p zS@ZLipr!oHRWw{*?B=2l#WdN;QqC4nDdYlB_Rl*Xit29&FsQ0g8svtrsK=UGmVTtJaS|C!hE4yQz^Cv2*jpy z)uT@85@I(4mR|V>-8G4;mRfo0s_pIU<`oHqMcc?+Z~NhH85`z4><$cT^OxtI%G(S0 zqy<(M%h=TO^}KU+;F8;>T}Si{nu&bd4UL=5H_4t^!i?mHZB-x041T98^^-?k9N`YajAo3rOj#unPeJkYN7_Vhcns$s9@^;e!3LifsABAl zQiG_}BlqiH!fm=|g4qcypv2GM`|x2T5$y##;sELAk-~Q0k}{~?i3BUQ(Cc~xtKjSH z_+U5^A`KaZ(@3S-=iCwJF22tG4#;NAvamxz>re5pugjQWD!?|!#vz9 zQC}S4Dvu)*8j+{CzF)<<=4F^sqIcj70X-8Q6+LPDaz~|gZ=&*}j)JaL2Evht^eYf}ASCkcM{jG= z9V>8_wnu|fH0I+6vu6phH=j|dD=c#i2zGPcU_^9RYoArf(?U8vFfi38k-rCh%rZRxKPro1^dH z>c9A)I4vk9vof#)V&~w5n2s0t2MgpFtJ01Xl;2rUJ1j_QZyY07{%0Hn`S4q39&GjU z$_t!R%Vk4rJg)5p+4Yv-fZ8G3y>LH1KHp_*wBPsq@>QMBb)S*JE-}&W^ zPro}un4ZP){w^$Nx9@`0v%dcFx9^}8hS0*oWggdSj_V$|`5af6%{bR(;+&H<>o*O0 z4pqU^7a88JK*ZZ?*GnLH2SJWy9Z^|9AXaWUz^t2&y~I{}-2)fjkn(i_D_bajEQSf& zYp#WdS@d8nsG{~TRBe+qE|CAg_VojWiDFX1ZZK9urp*)Xq|<^*IdD1~>6z=ba8VSG zc7yp%1kOj$#2Z7bRag{x!s*38H%`S?i-hXG6mP?B-hj0x9|hXHq*xWSP@*}&gmHkX zt|?)@h(W-hTBjJzD64v9z*h9W)W&`~71%W0KOGos0M?8HJ+2!6%5w@-n~Zbo-84BO znXB79+xeUA3$!y>xo^dwk6w0J*nY-rcFO9~4u6L)`*3++SI)W<`knnH6s3|$)8ob) z<@;ck%R6gtyN?x8SyTds2$+fbKIVZewF3oBnJ%_S>uqqc+M!IB=-+r0N6p<=7@+%m z0L0w|0`sWMW>eh-as}rjL%S<7?U}{spQXRC0u)w{{dbDnI2Z;kv?HlDSD%kaZ1v)| z`gKWux-Ae-Dq$xp&)dhu^YDklX>@GbCz_kwoon!u+<(R}nWq{zxY%)~5O>xUv&9I-{N$-|!Eu&T+ok zn24rPAmsRnfs5ztwaI?AkRW4IFsOU4U(L5>9Sekp4R*3x^2GU4zLr7Nky_9%5vSi? z_#@6KP0X_YUes>=kvp(xzD0o3$Q9WNf{^CB;a_7$OBuwHqVs^t;%x{b1CznwPz>We z8X~H1d>!~WgRW$ir%Ne2DOO+&K+LWJV9WxDaOFh}$sDaX92d^>r$&mjIY#oW4$tWji zU`M#A!{eN)YID#kEoH!E=J75<{bXS)AV|rTIF^k1PX7f^q_2|W%|PF%RUxDRnkL`K zCc?8%y|(|fD+<29AVrv_$*OS)$9V#^f#QYp@XdCfP`d9DY2mIM+HX8xLL8&b8}+x# z{@WQ9ha1wBruREl`X9TL4SsM$aI$P~6A+|iFai7M>53X>01qfnp>)L^+t|?A4&(N^ zA5L8hQzx*>P3k(#*PGtYebny%(d$^~5Pw=l?P<#MCMo?JN}q>=&J+p|y8D^;74l8o zK(hS}=Kb%w-(s9M1w`)&cA|Q^>Dh|LrtsOTb%OvfNgBpv@vwZcr)^|)rFqn}8X_B1e;{UVmECw0m9S3sK7$Kbn2pw1uUI%ryxNOwH(J*^1NWLF%B+6lZvS$znA|rA<@QVRp%8-^WX>SL zaNqTbsaIT60ePaCyT%Wg&FmFJ^ccospn}i~H7K&k2F}K4V%GcnY}z>gJRJ`Iw&D6< zG)iR-V(7qeH(^TK=Z}hC9dgAs-~ovFF(rLPG#BHy3O*Gj%%TJ|AO>23(nf9>8s z=LNA-KZ|43{`^|q-NdMkf^1cBufM z-ny?}h^MH|T)paZ6!Y8e?yfOe+=`p`-JJ-;8E0OvVvjhDMGSZ4gN`pAqNZobS%2Ow zL4hx9L9JmAN$BsEM$z!#H;q7|Ai+AoMxa4_`pS}Vgvt&%%);+YNAz~Nhus6iVAM@e zc#Cqjekrm8s+rK6)4?CiYZL$qikV+2DRU zi{2;@uM<=Pc9nR@hs1-(E%&xu65EK~P?I@+zKM#kn&zXx7qh>eHRIhi=1;l1CnnWE zVj#>2J8*j2QOa+u8XJ4DwCo3ht>9dAQ(OgYEO!lzQ2}{kSk;jR=D63p!)oQCm;EEG$Q${Uki&VZq8*lRTi>p5M1T6N-h7a2~ncBI6)b9c5T`s{FfI| zr!#OsvR!}AZ;FP}P5;aaL4dpLdGi{F$hJ%je3+Ks(7ovO>}ACp^dZWdZezRM(xcCT zx9}TIBk;JPWXEvFD!wT`a9^~$v>$7|T(amZe-NKw3AyjKr<|$FaKEa4Q8_Jwg<+Dm8>7shG);(S!d1IgH?r;e zb|>vn?ukV)A)mUZuB9^xWlpdj1bIRNp5k0Se}LiViS!qleSxbLRw-hC(ee8cXS*2S z_lyt#nzSE*FGRGH&AWqnc5L@5VSBfg%aqXDmwkubjP4QL1qiM6(x3B2bWb#vP2&xP zKg&%&N(1#Pn%Q?U=Su$qNgRr{W%WsVgnvo--{~T8z*GMm;$0tPDy%f7!#B1_6Yzg< zaIoFD+!8e)js1-+rSbhdyH@ESD1SNX7{UEOqPd90ERJy+8#x+i?@eZUl(hrQw^WJe z2LhtK@#iENM5Z@Sd^;cX7vFZjJPF3S$bf%xg#zu;TDlaJ&J`1Br3QzFFeK2MMJiQv zeHD`Qy2YN_D@}@;Pl9Bht~92KpnF3`T|wIMC><^`7YC|l=Rws;3-w5hfas#b5D#tl zIxgl;zfUAIDO3Kc9|;@v=ysDUWjMMS`OKy?`!OMU3;LV-dMkTR8v!ZjG;G(4qCyzd zjaJ{Fr$4neN0EEA#MCtGez`PZKZyv|u^H*?P8KhTW1bE*W)I-SG)7}E3XT-3l)e!E zF#7Zx`C&;bpN0h4Yso=R?;RR#PnJ`CVpHgdD1=3#k@m#+{bM+r#S6)YlDs`_jsX0U zlK`)1Xz2QM!x*`M!jkny^@*cZ>YAWjmG=_ebRC5OTL>dmDMjT8AYMq_vjAO(H9)D; zk_79RH|n8qd&q^hihl1Z2nWMkwn_U0&N+XSD!&F1&%cDHl_xH5;~378gmrNI4z@-Z z1-%aIPZ{*A@RK}JGiaUfiY_~_SEv`iFNN*ROI`y!T7$MxlT6iOx)Q8&y~||AK40rf zvFhP7NVbGZl!yl3eTp92PC8_W+v~Zm9(3?P0X9{>wOhVv0 z)L(MLE1i>uOvbj;czR?;B?PKVxCL%}2zmAOgli}e7#GH!tJ|JeiM!u_ic&0Ht&ye3 zM=3nYJh+`U?I7~03?F&$-i5Aq_vUweKQBc^VB2kMun0SiW)`BTdos=`?8VlA25PB7 zLrE}Kd6l;$&7sr`o@K1*P2@jd5Kpl+%ffE|bbA3-m&J^*^%*8-l-?A&O`hOXreOpg z6k4C#$zB(dBdy^&Q_Q}T;a6}4`Jeb8&^k-$X>kWnD-kd-m9Mvr!PcCVu#ocNwq zb@p5GcDHiovty8*#6kN4zw}#z=rg1u{cjdUkdP;I$UEbGpy_D298PxMyYODWZ`QWv z8aMr|Q<}RY-iW!NZLrJ%6r^_aLjrCVVYc*(iD?E)^EeIDfZ*WSgcJCXDj18T*7&Pv z+PpX3fnWuja>haa`xG*HKe0XN@1Kph-tRkNkqn4xwo1>9UzW-4-DsxjFZUSRnY(ZG z#}~Ee3qGXcYX+aU6arD+UsgYWp1g8J5_NrX2_|Lkv>~39!w1@suV7Sfk+Q8g$-BHY zIYp*&%*#BH`*uCe{#;QYbR@^pQ{q}^C5QAV@M`_%V9+3&YH1BP?@U{!n} z-lJFNU)0*m+!_y%ou57TttckEN28W4azp9??pz1e^=sgt>Ezk+O#;f=%F~+c(Yw|! zcoLha=cOsp|Le6JJnw#N)b!UX^>qFJnHBr5KemwpCSAPiU9Yk&xk353J{ zjt~L)a6ux8_B^YW*%b2*H@fJ7l!8@ZKO?n%(XM0Fk6$YhE44O6d0yt+X*#IeiH~Y{ zc|AcvFU{Pa+g`ArFpJ#ltans#RZm{ch}2kR402@Z*$mKhF;@1h#?C|OuMi1}cNGSW zxe_DhAm)oT^^A&FlHZ1BPgigKRYvys#|kfs0ctN=3ia~-qlRyvPZVqJ;5t$N%4f9V z-%Oe7H-Nqos_v6lXvrUSc4R`;7>Ip_+bfiIw{hkpeL5^P+KTifj5-K^V&YIw zAh6AJp-X8&pAH+9l2)h^{~^^SECx8hir_es{GJt`efc9lX(>BVzj?)9C`x?M`()yWnp@4|EzCVkhpnq?c@ zZ}Z`&0*?-HEQ?==fN0LJ-4HXg!=vGWjYw`#~(9V3tRx0jdUUwru0B z5*esZj*Lm}WjB;>=L+>l(?u$#l=wVBZJ%x{!u_uUDPnADSl7FYO^98)H}u+;3_Ng;aqOdG@BACiwp+zctm*Cp%*N#MD9Mky99@ zZ9ym(NSp5=f z+_OnYcRTIJ&04 z`!3)crh=;#o{oNMXeB{bL)pzhf~&-D#C{n3Ed{qsQ1YpNNAD7w6a80c? z$)td*&gD_tPf8V^uN6x9AuyN;E|9mPUv*km@6p|-pUBj126@)D?Mjs!2p?;F!hh< z_`y7I?Ard3Lt`d4_UNlAW@2_6BiW>>UN)kZ%Wtpx8Kv@6Na^?KXRy?#(zxk5#NKko z;@*d(1cWi{8aNk@=b-pi{SBw!6ONm<#+eRMQ5mi$qy()O9&xjf)EEy@J?fp7QF&rt ztLhxR1Pt9w-*_6g>=z?&dQ}-q{wdmA!kaR<@FW=_b7$CY!rE@75$Kye(2>MrIF;oM z6FY^)Amg3@e#u8h*oXJ5WC&L=wwH+-+85j~e}qeK0_I9Z(?=-_^CHJ8N4WPd-eOI4WJB~>1{y59%%SVH z8}c^84o6+2lH)4f3B;aFTQUpeeu~B^{tf1|K__;5Cz(i%3C; zI@aDrn@d1UdH;LUWUdc7p0fs4?f;5b)GRpTZN%B|v|9 zYcKp)Nb)pbaG_V(P;!y^OhmeL6mK#RaHV%S&ACwM%P4_dP~DN6=GwE$^f!-Mr$3hN z4B6f9fdxb*4-5n)LED2ZYB`TkjHYC$wJ?Z*9Y;_V?rkXTQ0#YN|MTAQ&74luS>A44 zw<*i*5g%ieYUUx9o?1<(N2;Zc3dy6$W`$PcSkWr6r{GUI`2S|S?*8~HH~4R@FuzxC z#%O=B)TnNX8?E^8g}&^@p0(r0hcEFPER@H5fkD)*xT;o+;=&BW6i0W)m6p>OgfCD z;HqyI#%Av_9a$p0{D7Rfi)mxb&8%F~H1(`-w9pJV!*kBxL+dT_Gb$Fy!rHI7s$uZ> z8H$iOC9hlg?8ezS;GHm8Xr5d2tDLu#-@7ie`)cg(uQn^qT6il6v~q1OG2(Gaq?D_@ zXch&mW`7RSs9fM6Mq$A2z8OeNOQVQ`fQ$GQhN&5Ar95_FJah$DLbLM+-h%FEQWr!m z8+LTJokD0(ee;^xD5M9KZkTk zcA{Bd8~w<3ogV#2crDhO{qB9l#D=)edq8XoqhJ39pB;Ln{F~$@j@b!fVLITj%kYaX zU)O&)1$UN_KhmF)Q4a_5?z&=lV{MOk%Vyd++@O$dZ`d>X=aqM)?oB0mI#AaI9|}ER zx6K+_v`y*Ij~9G1MbLtTO9*Bz-ig6j`z30y4AL#D&T@pexErXnNi$YzjA(h4ji^)k zr@L;Gx6$0)`?<+jA)gWWN5T&iT|nG&N#h|eq*Y#=q-?5ptw+O}<9o9_E|2s9QgES&+1YE_R2=QS-$ny{dPW$6B0QWJE zw3TDN%Ry}Kk;;~y0Ty3fV`iE0A~l!9U!hQ{V?DeI&0Z$~RBG~h9_r`?+xm6ei@?qU z0rl_%O+8EJyS2d;q3hsLx&6wvoMuUfuW2_Nah==cVT38e*-to-;arkzXGCgJ;dNwc z)RJHWo#iXSi?*M1Y?xm}s~4cLA@Tk#Z|mXYFiuv|(8Zo8FVnKh^j}DjX;X&=aEUx73PreZ$MX8U2l2a@CtNhvK`!LkNPl=ldI$+k>t zff5t`fz|9e#Uq%k-bPVluUpn8;@b*zzE10_%E|gS6*7u?C7m%EQn&J&BX@4qMBV#%bO!eejO z>kV`+(ku3@IYa9<1fnzB9gb_#xGjaSb6DN>}IyZ9{6Y0{kXoC2mm*Z{2M?83oa z_jB~7;H-4Y1%FH@pZ^{9I`9i?wayB+b_w*WX5B)-!#`}91^7$ z6ZRYdv(t5~U4O#KA-)EFhB*ApZ;&j;aGc@XfJu=@45jrGg_J6XB9Abnk%5Q0;Ym3w z{dpv*Lo6BXfRkS0vLibv#bpIP5?p)Pv5Z`G|EEm7Hm#ckV3);pACbp#9k* z_mgkzFr&%u^eYKWw6gQhP{hqEW6ctb`?!2IwdT7G!_D*Y!H&(JznKr0g+1v17|yEx zQcxE@ko!QK*kcy-`|O=KnaFe#orfeRl?Q) zBWn}bBU!*_61x6YHS9~qFWFJvVtLSsc}x zjL`BE-e-J#Qupd>Qa0zhj`cPBM||zpv0$~!_`Q8ka!R*E`T=Xu^>CDs?T*q7y4}KN z#tCOd3o6y}_2)4PicFO1Sw|tDFZB$=qiR5s!J2uHbLbvg5BI1k)5f*0a@Jp8eBzxb zNZ>+;6GAZ10(?^6WdRf-|7sE#O95((&Emm4(iUbu;FubTBW1{qcHo$Eqt#`9gT_9~ z>&ev+jC^5z(#iMAxDF$mH!l zjmI|-Z1w`F z7(|tW2v~QBV^5P>bU1%$hTj}_>HlvY4I!L~Aentu1fDAe-d@sZJ$+TRPcL5~a+3~h zq;7E*vQ$Q%w;}K>2b_cDwuPQ7YP8%`M$$jhf%p{uW8u16v4lWSNRZ$x173V!(G+Xv zAchqbVE>yt$O+thL%#5$!+Qhc72({>qqp$a_a^*kC`ue07Qvvn8`J z-`fiJfizP}Y$*Rnb#aa{1f-XI+^x5+zcao_z$!r)EfFo`ui3B9h0u#paMxIu+1Joi zfHEn@0?k{d3R776STgJ*hPknLMtDpy)od{E3s25mr4|GaOPF_>_k|jr^rsm`u_yqv zT2PNO*LHX#V;6BwK|N+Ykbkr#Rwh%y zNJIhX_oKoW>G%mk_b4PsF7wrKjyyfI8S_u<=} zp9z1jCB1vO6OWP|d&*E|NlAW@t#p3W;Kfi>D>iTn10LKp8l~KtL_)UW_#<-K^e2oY ztNGT%)uX;;O-xy6KZrOa3L3aH{d-j=P&-OvV(ECkCXO+PW#(o6(^$F<2=HsEd1(&Y0J?xMs<=e*c-4kfY;vK_vkaxeaadCck?am&BKeA%Z_>qqp z7v}_r*w}Qy9h*dRM}fy~ZO6R^m7EWtnF=1l)%s?Be_75#V_g0iIGBGox6!w?ocpWT zjE8ZjB(#H%UwRXNT%8vXk>B&I?N zJmJp~&YS}R$*rciF9x*7N}+DKrA!ED#w>8Q(!H($4-4GwzV z^>qL8%&uD_d-zFgy#z7rD$Ebr5hb&2T-)o(WiWG_83slvg6;-^3YI6xxD1GwXt*yr zY!beS#e4}FI;iE#ba}c$VPM8x6ic`MiwkY66Ryauj*~z0JNbR-vJdFb0g3eS!Rt>& zEk+I1CFC-F&2k9&Tj@aQMt|h){zmCRQ%S|LOrVXS@@LoClkk1^mQwxiF*TL8^`{dy zt@;OFs(w|mxQk`U-Padq>YMt`e^e;ztYmdbct4sq8TDqa7xzh`gz~vr z!wR>6yKX7LXpfW?g)~`zY#@RsT9A1p&$f9qks2{VsG9a}(0R_vOZ_GG!z{N!sl=WC zN%A{>&XRx4X*?;ft)RmH4N-w&kM~d!$|hwId9XX_*7nTULExB*tLdgn?6`y_dW zEd+&#%#k%9KSpdZ;$Hj7^H_3oe&#JgD@k&FBRL#4Xxr7^xTr3fv6G=ghi;-W-w22x??uS){OZd&Z-% zrrk3`8vPL}QwOFAhM8+;Ip{Iz`c9c$m^W0@4<)W+swGCJR^mTTP4O(*l?4hJYZ9fcn}n}^fIP5msL zaT=Mg6;6YeMYUlbSq}wl7LKNzz@!WOW&Ky2!YKyND^9*sT%#~0i>B}&$Pnxyn2*J5d4XXa)YStl>-GEl zw3iF4neT*Fw^J6Q^&?20PkG&;lDh(`tq_WwBJy7@8%yh#c_g*O_$4Ynzd5f-NgKsh zI$9Ou2gKSzuxIBwS7CLIqqZj95>A_5tZ$KO+$npsP{fDkuF(6@pR_p-y)h&qz)a?l z#INoRM8q_SG_I5_-^Ny(KP#IfM{YeIO1?-qousNHyyH?CzNHp<`p6LxeIfzA@Omu; zWfT?wvthv3zI>zSO%?DhD_p&JZg5BgD829Ql9Tbng{bFr%1t#R|2~=2o^+PhL(d7- z&~ksAtWxUi9n1$616~o2G=!=YX)q&T4l`6fPU&65{@ol?*@L0IpFBAJYan^K7q~#u z@(cGqN^T!6OABj)W^I&IxQ&g`PV-Uxxcssm_q?VXhU?^yU*#RFwrO)~S@k?N(|G&XglP7pu+_K0GRha)ifSj=;0%DDbp+7o%_WLcaO3!-ka=;%t z*4#=F(Ay#nNoQfuMXDHaa_x}GwZQxT#qAZsMJ|K|F^Kq{AzW(pY{W5;)w^8?G9!!; zej^)1lwr)WMH7c}7m70{mgc1+;?1xq0uq!3i{cixk*m0P&j-yIZa5WkrL zWkS!8+lG0AiHEJ9+ZPj3ZmixH_NV#LTfSA!s)+6BKr}2!iu5}3?eCN!>gGKSbF4JF z#~J&IhuNV7oQj@Eq2N2J-m|OpXXwDRU6=wX%}F*dfIR5rJ;I~VMw%hK4@WFhaOLH{0lZC$>1B8WglXbMgR4&{}UWSy zqi~A_NGNvs zci!eyk2rRj>I ze}z@=cz@`$39D@Z@*4n;%#-&RI}SiMAOY}bE0eq&SvVYnSthW>c$!#LiaK5G`~isO zEwKz0I``i%pOIEQx-OgH3$0=srb`LU6${*&S1oJXm6Qxowq35GW4T$zgXIe5GTrdL zOcljP0JC%C*ZTdcFu@RdSpt*Hn}+RMNdmS?-ptC#EMjT?}P9ruEer6uF* z2v75Pf$P4h*o%8K+B15>Mpu^Av=S4#_VJcfA(5MnBnOxg;9$1o_Vv4g2a2eV zwAVe+!@k%OYWb+KqQd=F1;hz@Ge0{~^CKN0Ys-)K$H<9(!@hlIKJ1P`mxuWW*Agz$ z*Q`HMpeI52eH?DJkQBjdQp0cXHD@CG74JN8lk&eLUydnPb`zUn4 z@mP6Bvhn+_yIpJRy++lxh#U1c{G7WsMoPE#hF!whzzp~`wB*~BW$ zWwGh{u{9D>q&up*D*{J5=+kYANRcFf7OGNdQ*WdpDbH7NVevuL-4CkL)_%L|_2Ic&eHtyuVF{^WiGO#+gM`&wLe2VizNJ74`e-hsL3 zBlltc1w}oY#YafEO$U9X8orM&AF(5@=YV^*Kb*gZ&ZDqtdi2EX??$5jc|ixB!@wQ* zva<5GpUD3kmg|SdW9-jd9|?Xj8=(j)=W5AVo>_e?%c^ z;ov#ma&7l^a&Sb3A#b_UmixEp7g=t}m1Bh=>4N9A33*O! zks0Xj@f_=uok?I~012N{6x`{cTx?-SRAwWz2*EkV4o>j`KV=Igt_rMU&G(}qt7m2$T=E`4RWJt za2G={F3uE|j(2(wFj!lTc}(;gSUqq%9)iHut~Qe84)*T&*L;?>vU2Y*?KDsM=3t?$ zU}eChh?@vZn)7@lF67+Jp-_-`wA=$`_6~WkG}#*+eyCqgJ(n-Ac*@OFbk2r9QLFVGkpwXXW+RF?jaFfSH0BBpj%bou zeZWhipd5|*s>fohgn0v%_*|EFq4L|DwF80fkGF=P$vF1fCDTiu%gQnCIdL#*8OZ6KK5P8b3S()x*9W2VP-C?wGzp5R) zZ-a`hN{?AEnCnkm?Y7@?|0NGh^0ZAk2?|PkXlvrZ?jjy%%$}rU z-AlWVgh)Uw2^LN)rdJ?1*bUWPV-mO`8f2@yfPn0+9mZUR5a?Jp{#FO>qwz4Iic^s- zL5?;iQp)eSDe5H)tA&uDLg#*R)blx<|8t==5|$XN&u_i-bhwdKVteE+7ImCHm~e^YAUNAG-#i8J*)acSzn10=ZcX%AGX@lBm^7|ap94eD5f+R^x}as#S!Q=1&<#%# zH+I&KWFFv7`GKvAGgI21=7i64fJS^b@fawVr#YNo6CWzKM(Eu>-$-h@n-{cIb;wYm%KZZl?PDJ4- zT5*W6|BRhHo#U=!HB~@k-T&IPN|bzlcFXmla8b$k3Ff((K z8EUKoH`!HTpy0BBUBE{cH!VybD*^`}luXdrWn$p1-=3*~O>t zw(nW`6g`ChY*CT|ttAoLXOUr~&w#H83k<+Jeg+R`wl~tiE!}elH;dMHNTJAp3RT?L zX(Z?W0b}3oU?+#Hvp&n9{q$e=*joqb&uz|s^Y;G}V+sm_BTCfA=i_^8@*F)n3lIdV z9rW?UP;ywa4mv2TX2Pt6=S$8yBQ+tAuAqp0%?omem$PrYk^6sy8ovv#K9MywH&275 z_|)CaJ6eY@dd?py3RXIrMSB?g!r+;GSoV^YmNO%A3)N(qlQ#6mAUOq!~}8I;E*wcVai)rd+?a;5|VIv6)vWUP0gcF15I3OL0Z`@4)xRz!a;;DAOs^u``Z@c(oSM6Lsu$WDx?Q-uGdIga&Ml z2h3KpcHAlG%XcRcDF@Ys)~^Hi$@{be((?ub;eg;(imdyNO-~X@gvYtUTdR!Q;WG+e zmUl?drHLS~!EsNKci*KRfR(xT51vuWC%=rwtZFdgYDOIEzw9_F*AiYkQ(U_PBfb=* z(V7wvIlYF5{UAb2u8^YxfK5edz`2q}S$ zx35Eo?1jds)qB&90c}f5-W_VYp2j5uOV)>#P@K49Pl3qh$l=;mt>exTLO zdHx_Y!V@s1`s9=Us`wwb6C?wH6b5>kz4S~+sdmSRF`bj~)oWA<^YTDPF&4e*=XcQP zR@Ku?2f^A{F7MLbOkXr-u`Y7Z@PI$KV?|CrntPb=kXSoYezm{L!dY$JVCq#|Ex)59 zo7mNNvUtIj=Z4Qn!mMS+0u%#lZWMVu;SJq^!9| zcxFp&xvE;qSi<*c^i~`amx;FI?%0BCkRzO>+`P*bR=-_6@;30pviSDt%Zdy8`xmFG z5hNL$Ag1@gBLf*nubA?45QQ)u3X0<_>_q=$U!~3xrwnC@b&lk9QP|2U5yRNqU+C_w z`krF-$VNpdG%B2@rW=_9%~{FQAh3W^f{0G?W;#Uz7ulk?-w=5#kC^oi@WkiTNZseS z!vGrDU$;Te=<-JDII%2gt}?N= zR_KALw=NN1fE8nX%$y%Rxf+d67=`H`pJd3i{G4hT{oEj1R?R9K5$N?gA!9Ar`qf7R zL_j*&=ucWD?Taqbl5~sIq)PO%T^AxQqfqz%g@ZfgY%H`79wP^X$ zb&GHCUGl&Per?ox4KTFut11PYbTRDNGJDauyrBfY5_~)?!B76KKOd;QBbC#?%6-ZM zL~2T_b94vXL1r3p9}D~EZ~h9`dr`hs1aw^<Zvq;T3l&!{LTDwS>nN}Pssc$=eOGHp1#Loe&c$% zQQweg#xcc!0)CA~PMHG%vN4iOLw|{pk1Ecg=MsN}MQO+~B65t!{vTIw8C6xIeGk(q zjWiruKvGKT&?O}$(jqM!kj_JQcPfWY2?0SGM5MbxK)O2*`EI<=bMOECeew&#G2CqD zUUROw=JaL}dBkIRQs6iO$`=u&nAjUR(qE_xZ{AF2sA!p@Qp^nf^ooe@N&;_s9jJ{M zAA%C25p8%N-_qmGBAiD?92N8809r9lV+_G#K+|+qaqC(qJ^tSW(s21=To#&)pL2CS zXu#>eS621A{n|v_#{Y63j7xruM(U0Oh5W)MLB9AbL zQ8(_EX(pOcR+O*`K8No6UPG_R_NQ$ z!GNOr8P8#zBK)mG`;*>_CUo(cFOLycHI>a{w+}xLCvk^MdEUDc#y0X)e^gj~F;4w5 zTLsu=2xqR&Mdxt1UVu1AYKAlKhskD911i2~qMo+L^m7TieTH7(H$2kl`WE=0vv7i4_Nf}H(@$cUs|IK7A)8i# z&LEu z%>=v>9^A&~38`re6-`4_XT}faxJ`R6oQ^id?MG%TX&Na6X^?>2#yMaaKOv8~9eEVa zT2ea;4?u&sQ)q4qTRA}8M0mjIx0)Ldu|*G{#LC}8?*;jrETl19x ztLaMb>wWPsS0H&V5?)pzhf*kL%YB%KQv&}eki~zZwzfYYWY$E{RUUOdRo^oBJ0)kx zOR3rJ9xQ7lEBcx`k-%lR!1;=KXK7#2;3Q!EK`5KYi!WAAHAMP+JBb(X8F>RHm&rmwoP zfJ+h1pQt{U2(^w1}mPeq}OiSph!|2q)%m%-TD z`6(muobrF}2VfBQ@0FCGKq>S5$cfr#RYi?cdTmxp$pBy)rw$%5>GEnJm!mioxQsHR ze39d^7pM59m+1$_vT->6D`NVpo!isFwh_%mzkA{n80|0-N!-GgI>%67VBNLNdzd4Rbag^!?5LzO} z>Daxjh^7NFvvYugAH4XliVkuA$s7H;^&@x@qq&M=l4ubH75VRk{T!IEf#65S_Lpm{ z*d+BGQ{@+qS0LL$J}OKg&tBog^usly0MLj?*95=f$`$~er+&{QfDq2Z7x9hnEc2z~ z9aH?}xQG~N*R9P~)9C4GRQlu-*o!N|!U8+bmsz zYpz-^7bpvLoPfBHk1Pw&Poi8yZJ%xrefMq9INbBP3l`3B1l_Io1VJCxPvl#>w_FXK2DeQ0P-;#A+i#}`VZe?krhb_) z#i5}nC?^bueZTOQ!TW71e$*ElfrBNDc2I)vQ;+))k2@P9Q_rU~J=H4{n&JIbtI~e= zna#=8S0xX26jWr+<>H=aBM`rpCs(Q)kwe$HuH+nKr=!*%$k^$Lezf#iDQ@I`Bku90ErQ zOUGM`4CIJZRX$u6493qR=|?%nN$E^1O}KKPUaxx4iqJN}S^ksGl*7$kVIe;sX8<-T zzAH9BhjCK~1V}_(SJ+WH8F}(pZ`7ULj7q+q=WKPCu{1{L8^-{FqH|3M@q!@gMk=a; z*c+)XfYsM*6Nqt2MjjGWxNr9O(1D1aX0GN_t$Uk|zOGVwOhoH9XTfUEi2qy&JD=;v zvqHZy<`8VhsPSJ(C6Tq&0<8(eYn4THFv_x4>FiTr7&(O zxgh$I7qEf(+mIMg+S1W-G7O(iYtA4Ze_Sse2S0}D+v);V(bQD&;+m$BLXY#wb~=;; z_vxkqN!8DH?hzg6JObOFy%n~w>^uRs1xL=k!8$2Qw7lE5mDCzyc?E3y*Vy`{e06s$ zyI%tcX-V&#C%xz0cqn$&p=uRsuTy@)R!?s0vmgD%KbhVS*D7%%4-b>E8Qaxl@j9;J z=|Z;K++WE;-H<}_>4yJCIWO!_GqEO1fi{M}@ToeTKfk&GO8X2c$UFK(&Hynpe*bJxli1vkO>KRema(~adnWbu46Em8E1c3{=>>GH#t1&CM8UJi;ca@DC4ENMc0q}+OzE6%L6_X zL_Ui#{9~;r|3BbFl+A`dPQFw5J3tqQRgR>FqP-3$s~uL<`z=4TS^@}ZApIrp?zC6p zLI!4X)u4MJka4OFf>JzRLcm80|5l#MMR81rs!%g2eE5ur<=TCgpV`t~*w77(kwgo#M$sJJjNf$8Tcf*hJX0ciFFh@9%b+6{Pg54!B8;Q zC{?2~`s1ZBdaTq_B#CFyK@W=)K##uU%$Tl>!=VVmiKPFANk^vZQtjX?>rWd0&_#AF z07-w)2I52Ek2*~Gsf>@xgRkm6?ul;t2B@Z))-3>F5GWj6gU$Ft*P~d=r*nd8ddaXd9Ti#Nm zGmg6YdrYSV>ed7MVzY= z4B^-dsXe~_w}#}i^BPO%9TdNXwA*u#$oZ8FU&u8pbtm?=Mc<E`3mz010`&qcVwcW<88tiELVFXHbI2A9lvMb+R`Hjk>2U)xqCSBXfj4+b3aI zk56-CDL;ACZ*mv7xLPz=BHP)?0@v*CSnQM5OSa1-{qUqj0u{t#ZhO~anTGG+T96-u zKl?X5L#a2gE132c1%Lm6G##AIFHO;_&8Ovl-4iukcZrS6J{ifs#{J9tSKSpj1wl&C z_Vmvp3tOXy^Y2!-UTxBb_3_~7ZDa@BU|6Zu8vI12&pW}-P(yy36f>55jT@6H?|vwM zA$7q4Ar5Uv`q<7gHmeAB)6e{jct00a@Lp4!^q>9<$N@gEFXvKO?%39H^H{wE)wqV$L^U807EII%BN) zHIoq**dl zL769(fq_kzuM%>RZ+{hRwkhf&(ESZQuT2hN-2Y{>BW4e09Hid8T1dbP}7#v=h4hURZhl^H<8{M5(qD_STXbqA~M>+%An(Z9uRlEiEfclwrNWmspQlrJsjyP0TW1ty$}n zRZG)Q4g?6vh*e>}){2s7bENP-jO_CY2y+mUK3ol?_GWzFWC%=rNQmW!8rcS)?O72zs|7&ieb(aUPwVt{HU1zMXdvfBSF<-|pDnR3%a2kmLE z2w=%BIkaH1;qD!iVUfv89%6aOO1UZkdoWyH2}_urM*7~5?_p}2X(qOo&Jy!le<+x( z?eIQ~zu*0n7djU&>Y$l~3=@T5Q~Ei)o06X6ds$XwR!QtC;h~-I9B>rbXc^b+l`O)c zo*Nw)bzigi$G}FbKk(&PPexwOo;C1TLB`qr18t{ef9JnQq_6x{eE7$k_XXL`Qdf0B zR~TNy4GP?s-uGD40dpSC@)^EM?HJ}EWNmuMyj0}c1G*Ldb;foH_w4G zux7Px29Xa_^5zXna@qrYEJ=gZ}hBql^r*VU51M_m4T>V6~OwCOf9FyRH=ox1#8aZqY> zAa)L)7}(#@w1j8PH8wV~r#0Dl1<#vyvnDBnx@^OY{nH*y6o9iN3^OpGFC_)OQA zJ{!Jg^n-aXGh9=Z^-A69y>vCV}`j>o|El20xH#XX12>aJaUxIY7ODe zhP4aQjSAOZKLrIaXZ`ms$OEhh>_M^2MRX#s@0=l$krltf@xKvq#=e+Nh-wz7x`m&0 zT#{HyZd649BJb=5J>oY(4OoW!P}TG|Q8K=Cw8?LAp3I}Uj&o+p1`BaG#kNgw`E}3T z^7@P>~uhdNs{7L0GXs&^J|n)F!24 zwOkuFP!wFJo0cLzE8NX&cF+4Tu}YlM0g5%0Ii`(hj>wQN-$XkT+Cz4Jjje)4RPTze zwe)({rv6)JnOJG6cJ0&7$Pc3Zuf0EF|OJS#QEI8uw&N|k0>@n735>==(vcT-c zTzx<1XbK_usn3mtX-VO;>*{9|+rzg~wk&LItnlw@K<8uD ztN7Tgjle=djkR{Nf-Ll`xZ~Hr0BN%I+he6 zcIk}IJ%%aofr9`n2B7Wt>3VV1SOyOTl4fmeHP;cDyV205xv`H*8I6DAO^|(JuYuit z6#ZebGiz6@W^<$Dx)sINSjoF+ddbmy31}OaGWwSq87M9VDSRziQQ9%i$lUDdU?lLb zGEOn0Z7~oj;Q7P2@?KS4nc~&bZ#7M9ma7+2a3!kH3Vz`Fx5Md@nT5;w<&7tGdYjB3^pwMR8 zc70pR*-em83iM&*mbr3V{`&>bD9ZmxaPbpFzJp?Af2{z{)~olY&6{FsW^G|4gXNdD zg!C3;ng4US{v9;i5m53Kx%q^I{mk?1@)R|!9T)ktft3_af>8xdkI~QhLE|;glZvM1 z+3I-&hL&b>qG$A?p*b{+A)G=t>7B)*8dvA9bl$pe_a=z;rdU6m(2Y%>etqXypw%vb zVw-u1_3CSA&1~kl&2$Do3?4PReRw~_JbB&5;4Lbo0#ZxCjJ7^(M_-`hB7?qsE8cWz zd*?h3osRD4DKhBg3&N|Bb=@QYD6}j(GtNr5bNm_TXN}@zJavy9>>!Nf_~O{gk%A(S zuIa`WXu9{1d5bES5H)hqa~C#m2i}-7sBLZVE^U+}TfcCS8%+-ey<288_>$PmbtiL8 zd&gwt$puF0!OJ;+VjVI^FSGR1If#I5UZpk%>+`{VqWG-qOUnNFGv5cx1HfxjGkw`+ z9DHT)V#d-A)=nND>%=pt7_=jm*cY#LSk{iw8+X=+*_ObHB#H5{<;=J6MblS#{2GLn zLav{(kvBDwaj*4B5jrI_Cf@Lsh$Rce$Wk)m@ij}LB*3I^+xJCEZ6-_vdsaSbljYei z^f;2~ze~Nc)I@&>IhA|rP0asRq`-;|Y3wD|x1t0Nt3?m8IRV%Lp`6E@2p{adK4N&A z8jQKHYljFwi}2yCJFPhFXKzO)y$#rPo?PU(u*ShoR6v3eIgm4?9Jx4%D@`pAWQZw} zGPG2H5VZ>9eRrqbYx&}C@inI7U=V#lz2Xr4$XomsWY6XOrv_ac&k`jr(BclXwTRH^ z+s~hf#kz(5A?JN1Mi%zo0||22XPTCPyyF(PYx9s0erQf;2Fnbm!R=M7oy}D{e z{gwztv-%-{vSW8RW0$F9U~BOhVPWQu;KE_Ur4MW{!H82#FYkx*?@Gl>L!;JRy&FE5RK`>w zf}UFcptu@ATr`jTP-sFBgljb~gGS#rY2L&!ZiT&ANBv2IG^S*2?z;IIK6kpQ51LtY7CZIG6bJm6 zu~d)*w)M-MY=}LBwc%*4)0G)>)@1itU#f~dlAJE|{k_~ORtiBtd>PSPm0RNgXDuR;v2rco&Lw^d9iBvtqhzA zoiQ8F=Si6;vhc`WQUK+PujFl8<4d^Uzz2(hCn{*~!C(~?X>)#*yuvvWrOIBQA1|}= zdv5H)FVJnf^ET~fA<7!>+G?6-UL)-!EbZj<*Mn))Na(?;EO19g*`kf|VqmYq zAABYS3h{Vb%Z7O_WE&Jz&QG|^?;DdHhR1$bc$;dAu}y?u{!^R;t!LQ7+tww{ioiEh z@Rx58(T~YziPp=XtaFbm)nc`*F&KK?N}SbOy9gxiOLMcHPJ22#=6e3Zpk0EGNJ3J7 zE_GXKjpM7*fzvirn#D|-1ES{-@?yK8%YEkk-n|)b@DVF4tqyF+eqHeFNJ?4-R$F_K zqUfyuET*DsgA>cthSkT)=kRgsnC(IpV18CEx7t0tqr5rQLAM*461n9+hHV3V*CaE8 z19-x1(5n%bS2wtvIC>>KljEmnn%i|&rMY3;DyDzLAG)~esE2-mXJS{DG;I@NnGEwC zUs+g@5u&`#S$;PPBw$;Xs9!klt@c9>^MD%&yWx;E`? zPIbgJ0nAn=(*4Fg$}yuB&Sl$4)gFfj^ka)>S4Z^K<@qqBCU7b|q(#t)E3`VTE3G3! zWDZW^DhT8h+`>~ABV&Pya@dJ}scSmdCr0CQN$!{23;LhvV~7yvQVWeZrYJ_+L^f?? zr(WZpZvJASG5S%1(g{)v#NDXq^rgcpX#wjELHi!x;fSZu?TnFFgnH6uwq*|Dhwv4B zR?Z@pNl@bg5kBPUdDOZ5q5j+6X7iG6+LmHefW|69^*AC*M;L*H@J6AxX3EbkI&(cY znBO~D$dg>P4BSPYT||hGdQs~VX`!`2nqao0o;ECSjke`v?r9?KBsoA=XUE81aLJ8{ zyt0lFNTJ3kYQF9s%Pa97`ArHzAv4&@#we&o^z%fLxJo2EJ(V zz4PF`;|X?O@Lbw>s*niRsBJ++-foHx3>Sl z4|0Sgvf64HUeHn>fa=`^_9R{H$5~I2M^pBRUeJHIVV%W)QTGvN%MYiqKswQYX4WP@ zGT&Fgcii_c>H*{Ke^Ac`-Tr!Dp4<`d`gugL&sg~)0`Aj4sZZ*}w+8VqUnPugJ3C^Z z!ad&pBA@E!Bm-v}Qe09*lcwWR({7(gO#eS9$l<0h`9ZO^^N*z>Xeh=%j6hCLVZ8 zC+^o5Ueojt+w4g15br&~(ziQffK9t3DVg-!$=B1R+@46}f@|se=QypF2xSN#p6}g} zJpi25gt$|6ebgJ}`vBb1eA9Uo2eJKBpL};D*K^Sa;hVAf8tNwM-x;seN-aK0?j0X` zDW6Xkiqn0^eK8O6&7W9>`v&5Ds%6Zx8wO(OIoraPtdpxB0o`IlslPM$eqlgy86a28 zCs}ra_EA>$F;Y55Fg#0K_#OpMfwaI|Y;$f4?(=!pP)@k6ptOp&Z zQ{@Lst=dsK^YP^Jckx!FIU~0* zWT*XzepI{3S4of9`tDdZ(uL*+W z|AGv$VkX*o+GaXn2btVkSZsTwzb$%OPo-$(yh6PPUX&g2_V=`!_RK~6V$`L$kKUp9 z(LHDkeDgXkPh$N0w?=}Rtu!m)nBCFZvn}h_yMr-V{JUz5sCHOyn^N?Qmw5e>S(7!D z@*d`65kAxuxVGNCxzzW&MhQ|A4_#qhsUdc+*D#pd+W6AcA`M|<^K$XPIv=!Bc~zX% zV-%#u;u}xnjZ9pb>@4EH>e9r$rro5^E-dnLAdJ1CVvExyuh6_vmbd* zPiwlg*peN&Tpz}xNTy@1JO#s%KsZ;^PRc_|6F*^7_HSUd8&{{~J;~-LMp&z4+o=Zh z1|FC8u#K)G%W zNK3N1G&B<8=jGp3(oPXjV^JviZUin;(ixcc*&%b){=)MS$BNQBN`$3Z3SD00>Z934kSsl6(kh z-NGRT$$;fDomHql`lUCor(}71yU#WwAZ^OmGeO zGIB|?lDO0LU!U)`q(kn`7M%$*{o?a9YL)cyyr0A`fb|4%IMTT04(JRS?+ia@ct`I^ zxZc=G{+(hH$G@w1va?V3EwuW4;Dg@!RZm~k9mpi8&(#)Q{s*1Jd3cMHeWiR(;BmZs zdkwjttQ^n>qUdU@VwRt8J&Y1BsS<4S$?I*m6)2@r99ZtJTzom2Ap6SB$dE7AxAUd# zsedlOpldTHuicN{8#w=ohy~kI=&j8s1%J01dL|;TCEFG|7s33kNYtk_bc%a_L)5E0 z@5e{+Z{Hwzdz<4u7+xw8f$T@ixvpGzAYHy}9T_FCezQF>t5mIY=?Kr&%3yc1$Fk~$`f5KtcDUBNAa*>UN^ zVK9ey(3wY#A?G*q3_)KgSy$-Bg#0}u)n$L01WxB{n0Q_ji2+EYjq0s2=UK*{b!q8D zm@`&F6QDiI`|NAch-S(5a$>mdbrsqmPoi)+p26@RM990&aRKt^xxB*jo4G~)CaoMF zboihp&cA9S4g}I7?+G`f)6*LE7UkmrZtC&q9_2=x=&SD9uwCTa5h2Tm*u8QoUq%M! zCsah^!tNc%4CQoxc-U4)r$$hha!;MMZ~X@MHRg>kz3wM2SRXntvM&@qY@$0sK&aJ% z=U0*O3Ak2)`(`(^6^5&g^CDW)BkuVE?d>;KXJ7D3pH-7C6ZIMAquUS{Az!aMnAl-M z0yuW~-V4qS-=1r@2FhS}tUNoO3WphK-Iq4}?V;AM= z>DP(W*Uix@ihqP6?5uleKdw=`odFHw^%r?pBa=F(!v=_x-`IA@>)mGUg$;sTeEMYb z8>Wk?fVtGETzwGOM-2CiqRE*vXoZaF1R|X10F=9$a65QQvoNpK+SXIJU?yi-Owm-q zI4$=u^aPEM>xY+0K^CdW{i)TbYio_Em_Wf1`};Qp+sV@vY0kw%TBBS_`>136->4^f z3#CsvAEo}D5N7!IC}_cHfF*ygJve<;aA8eJAhu9z4cR^fNG}L#U?e*VNH_24A`dIU?n#@1IVx8o9EsY$HT8n-k~{qc1i-?V^V}7d}Yb zNd5Fw8*=>Z=U&5Eyw@OlruV;%OR02r%kw$(ceOVMO?cVd8;f3t)Czeh|Cf8S{N1!9 zPUKvNj4@D-^2!C525PpTDYkcm_G})r)6c#aV2)D?dh&1nGn+yhrjX~@5Ssij^kMsGxt!SB ztE8!Q5xbuswqfpFy0OKd@0URXZ+i!;)LXt@ona`ucif?XUe4;<+TQ(U zR=XM{jzW2gy(G1YWkCke^7ffdcah$EKY^3|qCFK!VE1zAXm#_|6yysji}&*1=c$*oIhK@fhLbyXOl#*u_hhQvGdQ znvsOBb!E+)WD8|ApK{QcBIZO&=CQ7u1U(L~Ch9EPl5+z-@x1jN*r*_R^-}TRpyYud z?$TUN26Ba>gu)N5vIu}&SM4j{DAF*o0V!fT!jAf?o60vYjW1lU8R zKZXTH;u+_GLl5Z;;8{3UMs6Kck_4c)iz;xgS^SePfmTfSU%4fGhE`3>0%anv=9_u$ z89o6MX?n`Vpz~=bv9H4MF0r`Jgu2D3W|maj93?*3Hh!(?LQk|gqo&R1)rO(!aS<_B z-W64Z_5?3so(Td{aYgVH?>tSz3chcuT~Z}eRKP7QWGvqOwk zg&X-ix!PHw8(9_?M97f`s}$XC17*6{fZ>j)IGY_6B@P5AoL~NO6?I0ONz*10-h574 zJ!;>l$I7>&#q5ASz!zur{Fv9B>qp}Tv^&aH;_B)nOGbzp?dOO-Yj5y!98r+^Dx?|< zYjx=>s%y$H;q(nCxoPSq+vu%61NFDrVuRXWRT8qh6uw_GG|%pdgPWlgeU;lSz}2du ztXC-T_jvMG9$+O4J%TIwf50{O@a66yJ4Eo0Umd2&UiDLNH_P>{huQP?v)~j5w>>!C zb&Pe#a_D9VMl!=h(YnQ_K>TFwiP)>R^#81GUVHI91u}I{5LfkrL4V&jTwkTfId&)f zmKHvgj5l4(opaQ@aUuz<*%W+#Dvhr)_)Tq}+~D-O9rKr@<^eJxOHpwP#&HMPO9^*0 zT@xQ}v=c%(!a5!^!SK*EuC-O>ZSlRvk3#F`ZIAqYJ>J_eS7ux3LyvL7{a5Fj^`z-~pU{;y%{8E4H0a&WS}S7=zG>0t1#8Af6;ju1RoPh||T&nqg-~`{;o-S&t@=Oy3 z?x9sE6ulPux)l$`co20Q-3b0v-{e|OzGPh_YAcsjs5IQ!S`|(hzvO+Uov#ZWu`Gp% zyez;PC`~e%7p?86(y}a@81K`&22KW{6^(sg?tYM3;JO%`07nsB-BhObr1$oKXCPbI zYxAVfspV>0P4MRaB-LU+wbiU}d-Z_H?C{4xN0x{CcfP4^J7HsnCTXiOz-OXA%hwwc z_cgezJ7G0eaF^?QwqQDmIG4}jspq(((c?JlRBeYs{)o$(6#w9|h#y#Mt~2k?nDOlo zeL)o!!gK@~3-o^XL8!R(U&;-}Yvj5)PG$FUluHLEfh-1j-c{6O19Z2SJzyOWA zry-Cgf>-C=nXC@ia_v1jq0YZ61Y+T}YGQKrUqQ%6Wh%7kv4eq0gV@C5PkKe6%XCA_t^H0LRiJb zvr=j%V4h-Iq}C$McoP!`E~fb@tst~6i2z504|Ws%{l&_Tjqo;>UyhwdKF1p^tm~MR zuDrweX>}MEtNWhGhh(GwwAED31!s9xzUD1kg1<8xa{;`e5hzOkMM0)Lj(r%@?ufH0d91x)4ylEc^gWp z8LGalZRs_g(h7>nqZd2PYYaIG*AplzY~CxB#tG@fU!BhI;m5k3Q zdaV**OopXaKvqG55sO->b3XQoy|Nf1dBePjMi>ML?^Ew4(Ojo7YIe)yI+|X@&;hj+ z{;!%yf*-@|Qm$0)y{rZ<`08Efabf0_HX^Q$fkUv+#f&MB|DyVT&*%RGy+{Oc%pD_< z0TKk?67nzyMtYUZWS)}IQ8F$yT+@3I*bDlO+JIl0E+C>0cvr2QNC-DL1Lj@P{;Tf{ zw%1by$luTX>bw_a(e3z-{RH$YbOnti>aMK#IlxGDgVIFGW8GZG-+H?pI!r+}FMmB@ zoomXy9?S#3?;clup3%YuJ9k*Zc~x~j?l!DqLnv2aXAVp^aSI-1oFc%!8ontmSSEhX zI=2vG)IKH^_F4Q|z2*q-fm`hYO!26{f)df5(LQdSH2=|GYf<0DON5G({XvaDCYe%# zG}dlCBVh(eBZOdg<@6LP(fgI!4aF@ZJ&OBeRt!(_YYcm~@nc^n7R2^p|7 zIHus`2UCxJvV2YU*&OfJ;+Yf-Q$)QI!ohQeXAu%cDBq1<^+QI{>OJ5L!hF9h3@Jw6 z_~G!?lzTtD1All<;jjLy;mh!=#(QPvXkYp}>S{S-)c`zk-R)yPOZ16WJY+co(rOom z-PhKg@$8>(D5=kWI(tfFMy4295`^hazZO!S6tvhcUag;?S>JXW@s1G7R#`C)ZILy5 z1@)zg`l2lJ{J7qu|6bk)pr>n!wP9_sYNLEXi2%RE1`x{bp4nIlECDe~+$T3*a4k50 zaAiP@M2>BUG3uYkw(bdRUo&(||F#LUZ?u^LK8nBjz9N;{pMG9{V!mhEZlj?MNwz760JVs!$gzmU@=_l^PKc zSnKzN>K_2Ue0lMS$=Bb7-gSs_dp$A2N?q*qc%;@DgH}AK)~%=e3;zmeJeQNDlJ`YW zbZ)gUtykFy2zb8c=QM>&k7AaA)5R=vlhoxivp@AL^igx~#VH;fGw1lk8K83qfptmA zjps6eV*uXhqDlt>rit(pnfa4e3pGAIVFUNxzs^F}soLh*UFv4H8mP9-T!!xD zYe``2<@X2jf3n0b`2;mjB!|jDhmB=IV~WzLz(X<)1_9eiE;FT4XGPL{UuDDh6a2Ls z0q39OLW@zz=+2k5-w|i;SS1Abz-}CR-8#|UEVz(QqAGGtqmN~by*1U+DQuC05G>(+ z?KrX7_3eGLwpuqgC)ZMrR~L6e`V#H&aSf-mb?R|ZqX_@uPE+kUWgfj}J%)m0MJUf) zuc5C{hckXJ9uw_*vHB$C8s-5!&gbkI{E7qzA}O)!s4toi=rrYD#kW*rTWmd!p}iaVlgsAqx}_t%wo(DjzQ>rHm|jo80f$UjkA?1C8`? zIk9G{_C_opM1#Dzh7)+ZJip@ANa|rnEkZ0#Q}3c}F+7G?%cM0=)dD>#$)O#Bzk5;~ zJjFFP5h2A(z51&0hCJht^wrKUc=_(=T{oL3*^a_)O=sfaZIflSaTrhH+8FN`!akeO z0h>v<8E;l|RzK;;H5UkCMKL#ig7$ub5fnOCaQ5#!+FnJ^E$}7et09-qaU^=i*ZzD`nUR1~=i|vM9Nrm4K%v8pLPsVPH|QPI*8dD_LgO{rEOZIFJwyy1codqspGmIaOi#6R2&N78FyyCNq zVG>CKq7xz-qX2p*1<~}BrR730;DRH_k`YD+kIEsdz|(M{NjzN0#HHQj31i~wnG3ab zNZXpIas`q!?GnP-Dqlo!mvVk7q;=9@|4tkD5h5_Qdh?Opf_L{JUXIqnP#W4%mC|7g zt3giA)nvVY-sPrFrT;DBz7G-l(srrxi7brPn~bGA=VQF|u&nmGq;sb?Tv~3`d>8dH z`oTEke@vZ&$|gl_K%;MReBZ)N9C`Q*k0 z5cwy1G~||{4Z6*4IPGB=Eae@JDir+}?X2D*%K;UpTVVSu7uK#w*4FfupsKofyGxYg zS%%S1VcP9;?<)6wwK(^ncc2i2SYIEsYFqHx>yJfZy~R|)b@C+~a_bb^Fg?)v67Zi3 z`9$CTVD`C8GOVX1j~5=H8oBXc^Qb z5rhHk+^jm~J{)-Ludr=d0>gp$>@r2cEZ)_I8cmigiTEXZx~Q!;&yg}N*KnX28bpKxhalmWx(lYC9*)zcR! zrtG!MOkDa(p@1`U#+&Eft=-_Lwu#07{}*r{%gx%nDeoc2K@BHQ#ty>Et;`*i6Ug@Y zX^EV>A0SQbEh;AjdEPSbEr$4!Tz2uLD@T`_`t7~X&FGT1q$jDNtdP`;*Rw;G+6HWY z(GcggnB+Ei*7W(Cv}3W0t){M}x&dq!F6fgDLdK}q`u^_Lp5BknOV(ZMRsFrqnV^f9 zX(6%9mFFx6yhVjtz8ea4ccyw5nOwBuH&&7PiY^TibyZ%8#aThJi$YXoUw#wC%U4ol$ZTf1>132Wp6|n1Pn=RHB zh+(o{rEP-wYS9t-jHViY(|}S&ov{XZ1&>>_$VKGo>_6XC?-O~yR2n7)Unf}uV9889 zlstA{-CPdym5ypvJ|>J19S*rBXq+V77|&SQ``$$;8t(J@lJH;BF`~N=b@TRK=^^ADWTF>X##wqIaCW zF=1R~jxj}_93ym_-M_M2{=nwqv%=@HnE1a2O#}(QXB{I-J$opuJ&RXneE#Gw;PpYc zv!z4~Yv_P&-3bfWOkEtaWRP^C%bJI-Dfy4ij1{~_b>1Qy;Qk?4*kNH#iAl6p-}tBc z`bu20V9??`ufEZMmtne2-b%5gR#&GyLZ0N&*IyR_?noxTnY*6pCY`005_v+VC=e9I z;r-oVt{Hm46B#4i7j^iZfxb661~6g{cYUXm>A^LtSA+Z|Hhlci;zz-m8=JS>61L;P zgQ=z_0l-SQF3jZ^4i|XUwv(SYaNCKSsqVJYjy!l^d{?S_wf1b-pne_i(a(fF%ZEc^ zPpQ{4gE{kk{24lcs+WI9n2(0i*t#k)om9FtjWGCl4sSnsvbI*8=I*%6{Wt+wuIvZr z-%XQnjzCK4NXUiFS7+cT6MH}HYPhWhq+>vk6b2YtX&9u24y8j8 zkdkgCq)U*7A*8#zOB(5vmJ*Qe&LO{p&->i>e)m5=fHSktT5GShHdNuELb>hi5&NgS z`U3T&NR*Z)Cej)`7wcfLfK*WBN+8QD7F`x5sqrW2tMpp7q`Ixv(SWq#i_EnF`yxHl zv}oWrzcDSFMUc$2?I0`9oM;9ED|#?I(_9v9d%(kvZg#@s zeo%-bzDVl(l@&0#z+iZ8?kkw+XTHf;l__lD=r)BYX-bZsyX!5Ako||11?-T|%aGw< zSxIyNU42S?hb$B9f1WSY&xO9@3n!SdWZGAZ zM~s;yEw5jk{-AiJIh1Sf#UdTi$ezr_+?P_35>GwjW4>*c9q!}BG)pGp7=@ggH-pf$ zN*{+lIt8%H9r)X@>qaPCVyy!dQw z4#ii5nu+yP> zIB=hPjGk8-=D{;0mM%5C8osW=zGHLnIs0hTsgKrk{PKe6K7K0EAp8`(}#r#z{cskx^@aE{6Cpy3|7>J3_#Bjne@Q;9OodgbQftM8iRMNm$H%`XS`NyiGB z^4b9@8Y#D27VP3v=XvJ@0)D{mc_2XOn0s5s{e4d|f>^F9!jt+gHh0&H3I&eBu!0L* zq+04b(Jk2U0cv}oxgb@87T%eQQ8N0GD!g{yXva;j*Fdq9JI1%8ukS6dQ4X+~VzTa@ zEiu0bMT9aST$Z~jOz|peWoC{to-wMfPL9dmf8@F0|4wxn*O`{pyHjwNvYbPdCRd?C z>Lc8%HNG&kq}c=9q@r5+Vk$h)X(kR|rt8HwSO0ZX^On0Ml1 zOmnZ?+)T1*=u385aG?!bQ2#$99N4)L8Wb?`|f zwG@F36B+ET^EcM@(r4Yv(7;3M-u3LfOyWJsXu_8+jGtd38x7FY1Y59`cUo_@o*Anf`h6zJNV7tJcE7deYEX zTuXI&?wPR!DNPc+!l?77;y+7Bnwo%_$qQO`rwD&^jYQS*yc(s{q+{1q{cYqB8x-;h z7ewbn0QU2x`f zCi{tX#(TDx$dzdr1H&8CCCKAH%0cR*prJFVBx&D~RMuQ8(kzPgFPua!M>946A7)q$ zAFq;MFO%THFHRnR8bXgp|JlI@!rh)yuJ7$oGrz&$$UVr>2Dp|lFrR6`yLG%r((!b( zE`gI&Id6$#EVI2VJid9cBXiC|Rt;mnjSUMXyB2QkmrKCKrp70o)>Y}6UAab4$El?5 zhchnAi1ef*C0^f}(r+#bJ+SQ1*6d^N%Z*W15bjjf?)BFeA^n?!F&Z}&;m;>=_0ps? zDgC_`cDj8FEH@EPD0mKLI>R_3!2q}Zc1OvukS`oHi!i8Moc;12keGE=t)_23K*}W( zBa^a6$s#A4*WmtmaYiO_O6>gY=terq?B)_Z-|v%64^t9lC2MKD3LhLu{Iepei5U~f z!zZ6l?%|(1oS$pV+5bxhQv>`q5H_$F$1Y6{D5SwO+}{GSvByNz(K6#M3*GWDEz<$4(EA^duBeEAMDBcZb@a}Q8!yt>f7Uaj&{H8F!iYq zONScO0TQ2wh~NziGM~iS7VwJiW2nAfj?|(mIe*zuY@+icXy-{?HTe{^D@DpX@MYA#MRZC5&ZxHgpJe-qj-rQ7maLm0W&w6|x{GXIGh zEl15xsqXoW6gmsV0#4#P!G9!2I+QJSl6Sxj+#sy0p$4mPW`ob9 z(XjAT@%>MyL7td#b+LA^M@N{0SOB>lr36g0Nqn6C&Sk(ne~|W+P~@&+%b5%2I|k^D z3(|JZxVgA;n>{j2ip5&S(H8gY2)Sc>oG$u1z!hR;qn-O((hYY#ac{5kzHThJy)Lki zjA%ILyKA^X^jstV)3daHok$#Z7?-9;0T}+{N+R>mK6MggO68?|PSkAmY?jU9`#*t3 zANe*wq7 zb$Gw>j2pf~lqJNJlOuLldG@m8^z`@N9FFuqIUEb}Id9@zg^e409j&HlJF&*ZSHt^`;{7c!zI4uA$)8b=XnyCAd%MXuhQ6gI8)(833e340lGE~tqywh& z+kC4HH0wYNgVu=lZt4rnzjPjcg6x;A9bV7e3E>6LMtl`;pl%!vE@74ApfiT{u`7&Iu)YR;ZF`3 zx!^{lAwAPa!|VP)?m%j#boY$pqGjK4JyirjShcQqKPhy1caby)zJ6D==1*a_svgAv z!LKUKKzMDflrtD|%ks@_Jj1}KTW}!!&YdI#d=L#b2_tYbhWP^*u0$UCU`x0vyG1wYT%|GGZU8fu0)`WuN zlvjsd7H(d<;8{Veoy(V^^jKEjsjhu2yzUAi9QuC`t1b z%1as>CUiTn{*~;Mig>|%wg@1kON-!hg*UP~&J$sLiNh)j8!D@~uVC_#b*<8uBdoYj zX}y7TkYNz<5!{&eWVl7|BO>#tRW86vwnSvd2O!k%=L6QXDoG3`i04Kx>o^bgxkPD2 zs^`(k2+#7oACYUP+tHL;oUs}`)cREH#2!K|ZkNQCZgoX!a)CpXi2C4jJm3wnR>6ph z>z+npzQX_aC&LW&J8mLSX5ZcOiWa;oXG4pC8*v?TvycE4GVn_XOlnM}b;RHFj`keN zse}VA*8x2HcbpZX50aDd#v1)rR6{9^`@4&X1`}%?_d!mRI!5yoil_!9Gs<_oOfL|k$02;p&R$>gJFzGmjf(9cfspl3GkMvQOBsXRYQIhpx<2Qz?23ZoqqXiZ6c^L;%$# zLeZp8rdQIHYyvGQvZQfDrabZq$ssh&+mFDL#=lSz)Um1Bz{S5Zh>2e2C2BDHFh}fZ zf5gzu?iUY-5vM_$JFcnxA?(rC(3_E_BG`?A{dGn_;`PEIn*G#KPtxLe0vceFChcb+ zY4#Jb023HGOw&$ruA+24=g}!dSJ-yyZgXmpd)^l4amWKI7ixvxjiuG@lKRR7;yrAdN&YycNK1g1oU+quocvcM6n%-jH*e09SAxCCb8hRtLSZtcMr&@v<*DfndsRY8%wtwf=aIeg(|_$ARRndovFLWJWs zoI_mT=?^ApDqsmAQa1Qg)iY%j+)(Wk;qQ)-K2!@;`Mg@^p1h)_=~2q~tM#~C@AGm{ zs=7iU7Ls<_=sGEY@QTI+YoBQ=>a-Ap`I;4`qHuw1p-13b;2S#*wA7dC>qvAH7g$Tc zB2JY`)a37a!|}m5+|uu-M%t^Jz-WQi4I7Z6_&a0W{AN*4iDLp`7KuP5!tqF4nzOdq zuh4bL0Qxz1mpf1u=y-b3fxOi6qZLW{(&J<9!U2UI`Xar-hS)Vh_0}I~Y%Y91i?FcO z6(iw>o4i%#6qg*{B5cwET@gBd=wo^R2!z4Yc`iggLKuhduvj}&`arWyLQVAK)xB|! z%v>$eMo;S>IBidTrt5o2s(#;xeRA>7{JqyR8%Aq+H^Qi}*YT0BuxC!t{0NRe>9#1O zN8?pw{cMd0-b`mSAztyW1z?u(FVDLZ;ReSoiA;1Qk!D5(_Spc@!3O^;WJnG6p08C> zcdhU;D6Ppr)I&O8VK~ju1mWgVM@Il`#woe5M+i?}eBq2r zg&wznC6oL)qs;bciTfYuYiE>TuseQkFM0*XtayO%{5jBVB?uJd7rO&x+Sce`E2y5E z^^0?E_XynH_55~}cp6}{k5nAPgACXodu?HnW8A2Jr?K@wxL~4JdZ=yYC3Eo18(?Et zS`1-p98}<9RPO(BwsSqko8kIcnIDTe?}_!(;VLs7!mI?R-7kP{*m+9yfTEZiUp3LW z5lAbH>5ZX=d7XJfF?pLc&>tK}r`L{~l-djr79F*_X19Iw(sPa*_SaMRBuEnn>Pw~o zav_>JzWFWir%jZbEv)7EL@jxmJJ7OQJ4Gz-^Y6o#P`dFENJsD>!N0$jAUpf3xCvk*-q8?HOX9M} zD$ck?JEJTayN)UGNEpr?#{i2C&?Hd|+qQqY=0KzDglu)V4>7$xNT=vgaev-*4`1eH z_QZDA6RXQa^@~)$&%(6D2Mmqxx+Xltef7!+!4DZZB{kqkqR=*AT zZgZU|gpIbtJqDAd?NUXLUJy%SAlROSSlX^#^_HwS*4F^m(rh_4YOrpP3`3wr5IV zvnBG2Up+7#@v9VrZrSZAx*PZSTzQE*@s-SP4cULf`bl43YjO6yfwAA7c8C8MO43s3W$B%82HJ-v8Pcg@Qn3MVr zb7LUcpzSzApo>Y=GlW^&g&A4%F`zHJC&J=JXU|MB9{aHbjqW6z9O7CQKgs+gcJ50$ zVd~!>wfRsr@okc#Mo0GEY9aT2-)5a^5Uk99qykCHxNGalDliAqYBZ$AdpiySH&5R{ z&F2nC_r^@FKK7D8^_)`JHOIC59>!k3Z7A2Yq^xyP*!ncuP(tXq|7AJ%PHrB_8d!(x4EYWjOugtao6-mGLD@wy$5%(rJ5@Y6n9LHJvEm9OuGnniEceZ zeF#IXG>Z&pvGE;nSc`ujptAjc4K_fOG0e#CGClMrcU20ee~T}zFzOLOwW%4*cWR!P z_LtC#!N#tuFw&tEF(C$BVS8%|SR#r?*hDTXUJF`JmL%#jzrLCk+B@qEc}o%%~%LyRTh6mthrj=H{8E@DWk>$W%5%De$#PH+&;Wv)7wZwMM)iC*cM*9`$ z)e!o7$Q{{O0aX?!Cu3jVloN%rYa73L6XQ zXO6ZtNg>dWAcq8bMRPb|>IasKrtB52;kCgyI+h23_GR?4?8=ce0 zKE39(COP0Uuxr0i1AF@bg!b#C<~<(>=|QPp*Gwmm5%a!v=i_u{?}}4)r|3#zvWvJ1 z(Zsr)qk9a1!7WZlnLVkel@hxVR|+hLG{w?UP=MI}5UXxN<8@5xxEa;baF|x743FK$ zJr=_e&U;(J71DAP277CpPNd7pK6G85Q2V8ZPrL1M3W!mW$7DrpBy$~09NRSQR;G5$ zSS^H!Tt(iJowZgI3py;eFdy!Rf|}$t70K{}TGhQWza;pQZQEPe9<+%i@XH4)(*C8g zQgnCult+EW6wdq86i?u+5vdAR2Thk|ng7FF#W6xVk*s%zJMaVt-%Eo8(4T#LFotLspeNTsQw-$E3^<@_?gLRJa(&$Z!KgY6yC9$uBRMD@< zH5#vqJ0X~bz5XrVmlj;W^i49#bx>>A=@>>b{A#Enwl*0DT zm}n1%;RU&%$QS_F;X;ba?!bUGX7DIaXuM6*fr*}aI3;ve^;Zt&*&|`F(MDk5=EdCD zTk15LTYHjkX!wfK0_i0`VT`cVv{bEAE$+z?Y|+ny}Y~xEOr?LQ1kPn(du;? zfa3n(V)}+yr-{;o%$alm7|k80<}g2N@N&QL0`q3!eel|%%t+?riHj^x`~?3^0#KibxPfcqU+HAI`qC4DRi5kn4DcjFB~Rjt3xZ zPD<}9)a+=bTEHrQeHm&&-Y(ld8Spey=H{VpsOu`WAaFNAC{!tB_(oP^mG8k9eP;fm zD)^i!$J%-$EVakfUBW9-J-o1ud`25Em@jY;^QRAW6$ab#5lCaGY4jM>m)h%$m+Qxm z;S~&HG8X}~C~d5?_GrCErX}J+2X%vz!||IbUUIk_#4P~7%NhdFuJ^s%7(12vTdHNR0gqw<6;I!AzRuETj2Jcc zeAFj7=Qi@2*w}$AF;d5Is~f7~8(a{uU$~D;OWGpE^%wOG_tOmXUNp=pWwO_0{}xa% zA%DUrO+c9?WrHP$fy`GrW|c3OKwf54C-FV;N?upvCLxDNT!3%?U-!&Ez%KX^#2Yws z0x;POe=CfXn<8w8`4>I_9RPN!EyD*wp!OM~OdYoG-C@%Zr2FCP^l#$XeScitH+-rr z|LAlrcEVD2jCq%3{tX1>d!xwNz3$a-nEOY5I;h*efbCUVytV8@)4P?d#p9MXvm*Ec zYG*ZR$Ocu?0e{+nccc=A)vTFYe_TYrYLSk|Kk6&^; zgfSMJY_qHGQRx z(^xMy2GpS9wE5lq{qGLvfaQ?ZHHNab*4>(Fm}t5bi0bY>qH-{91(@ zs!i3@uCP;XV)w-RNdGJPVfpdICwq7=;Er{Q+jtW4tV`I_s?XS+FpHBGJ$o7{Y~iw< zZ#mCvNj=D!#yqPUzI}+~;Jq;I|7cie(doEveWmp}g&(d8pZRQhE+gCO?>?BU%y@)C%;ASwh+ z4QSR+MW-RtuK$X<72zONy=@5Ax-}>KqrDZu%e&HkN3Q(?#(tYfBnOm-5zRGzz<@Hw zosoePDn+Ck4LFWJ+4E>9d;qG=t+d%b^eR(LQhQ$+$A;fLljEMy%*$pl`O0m;xuT_fw7UKTgRsKIme*5tzCG5ju=tG_U9o;Fp;y7-(TY?nq=tr z0&30epPw9SeURuKbXK86V&sIe$D!|Mg+Zoq$q|ymu;ptNSR%pV8FdF#Lc*Su&~>bh zcUt}e*GyI?(#Q1+rQNj^=}yWFP(%vL-l(C^QQb0QUOnkzS953W3i_lw8z(|7Qa9fd z`se`!X4GCz8=Ak-y{EAbNkhmp&$YQl*Ddbn%mk=L-T~XIcpQKL18QN&v3q~|r*@9Z z`YPiP#7nYfweG{7v4fub4KEW_>1#$UBXVF~isA$Fa=={UWiK3LF6NkrCFIT(0jt$J zUki$PNS1x*djs1dm_FXoOPa&u@BHm}bF{u6*D~>2dWQ=-MjoDvroH|rkZ;svbwzOZ z&(~GY*HU|Nd=i58zMsxHHTzTC!7o8> zrqpuIBnzH#Jl%X42pNnspJy2^6-roUCbE7mfU`$w=zueGDiM71V=Ir@6TaM{-|?hj z4L3ho3+&7-^;(ikSuebk*0mFSgsBjB33>0q1$6%`_T&YbtLjL6<{QPE41y*j9YCFJ z>`D7AUV}rxJJ=MGUpM{!(GRy*ek|yN+v+`)z1-i|D*?09jB2_E$Y?z*3#5$9QZ@f_ zXwHv>A|F9aS-uWIlT^4-YUiGmFs7oD@{P3p&qMty;oL>T{5{V^PxQPb(N4JWnxz(r zBNl53j67wlnRm#Gyf3phVnb(zMe;_I)lhe&T!}`p*^w(|1=ZP<`HxxDs3Q?s6$3be z<%r1VSd+X!QEuxXFCfsdK#KZY=pS^$+B_M)Ls+%~dUeeLx-i}-wqF|!2=?+sH&}zy z!#h(rSGF)MP$OxaWZo%F<943!goN81Wt&fC2;E#IbuFBuB(wS9jrryeP2W4oBpWsn z!1n*=u!FwxkK04BMfoF|4rx~#!JvJSI9hPABT<*Filo3Ot8Km|&hDFBl?7OJBQJ1f)dnWy z)gm3q#Ebg|{6RZLX*a`=w+X{kO(Rr&JK3}L9)|ZzFtg@0%$(3cUjQq*oraD{Nmih6 zfX0|fGIcChLjOz4!5wu)Yt*ur$_=NKu3Ms!C0icU&#m#LPO|Vzd_YPMSOWq{C z4e$2zj8-HHM(r6n4q7sG;K27{WC>F-^lo@_xei+uJ$%qdteW{_0`G#BPncT#mPwdw z5LZ)Mwu#h3AtREXBSs1UH|PK}de4-KJ9!saMSjol36k-<_X)YoNP97ir7o-)H$qn} z3(mjR6qH?i>*~2#yt#hRqt-m7S3l@uIkJsPQ*rBQe4A5>JqVthWvF!$#!GFCxY^AD zDM;K%4B(F|AuWAq#5(q)%d#V~kO-8|`R}P*pAQ)66`GtjxC4_pUmcT#K)+A6y7p&G z)2wx+lKNWCN^E&sb8*|;4c7W&)&VQ_h5T73Xdi|(pMnh$nrlIVwCQjuD|t;n}5w|o5q<0JnU zb`T1*Q*y0z_e7tt>GY7-(iP9NN?`AE2k!Ys?|ndsJW9XvQ|e5+;Ms9ffmamzwlqkx zd8CyghoXje-$*ruBXP2$hw|=|;BCUdfy&(d?}ka4^KXg<=gT~6W)6O7D=;1&&Rfer zxhp@564^MS_44lQsEB-C@TD^4iLT$Hi?dZzt> z)UN6w7jDmO^TlwY0L~eolFH9}V2ZocUwP#(cS$nI}9{ z447WrPDY(Am!c8lJ9bDl)>&MO+IY<-8{wK$#I1i&O(sQdB+`D!F?o>s0!m0N8Rb^T z9~|lvv%PKW1k3C3jN2GPG8c5r3ZvcY z1_QTr@A{Nn$Cj#l2|pM}noAL>P$<>;a}9>-2*oEg}gUCQ!lN>oAr4(kyBAlIQCOOZ1Dlkhl2@vW~21c+si1 z*Dn^2k}Y%NskVkMRHu0TBC`p1cteq9l4sF70*$Sb=)geIWqZXgH3104p+1x4XKR{@ z|MqJkPR)a%uGWb_)CwcSqxIgy-bgxbe)B>^?M5v_?`Oo$a4ZDqRhsCo`~5rPt==)& zvHc6t*)c}hfHC>uy-x(#(FUu89m9lw1XWVm6?7Ey{5%y{DBfI0?Hpr0xpI}^C}T_# zIFY|HTetq1@-ogWr5)$O`v97-08@Lvk{>(U^8&B0T!5ct(hc+1J2^|;0VW|a%L?%M zl$Z6}aWc#~WXg%g@x|Nu+JPpKkp-1E?s^7unTE168fx!fAIT7j$f;~&Z+*@|8Zvz2 zLokKX7~8?CC9pb|qu{}c7`Wn(!OJg+047|!HTL@u(nGwF9yew3%8bapd1}M|b^&p$ zVjPlwH0zKe(J|7+H}8N8n)z~sJE8;LKpRlL`@j30hf=m<8gGX7kxorm=}@{jlqjL? zRMYSBuXUcV=|TJYrYouO4c$Qft5tS1yYL%fQtkdSy(s zsc|Z(LHX~qT$O!Zv(WlJx;F4!pS>lsAE%3RiY`^FaZ)H(DYSG4&FpW7Z_?ed!E<%9 zDo`z2w<{&a&}Wq6N;B-}ALu>*ztFYRa?|(`ImzX&ekfZu#j`50b~d0Y)G3a5s`+Qf zpBbllN% zD>Ol)wWPEot*hdqck7gmhPr_(i&jm$TveO-SzP3k7leoxMP44WG10$K4I%5~T%MAo zZ&bqjX*gci)%bv3ZkTmsA;Tu>Tq^IKU^_3W5QRm9Z)eTy8Acdp8k6nA6Q)DzUOXom ziKqYNL%CKx{@WNINDmSVuQ8kC!95+$KRVf9cXtm4M=L=Gbk7R8SfJ1{>#9+~$fh$$ zE+`T0SEjaY3UHBOa_6wE18w(mV9jt&jZb*J=B#Rf5HXZfH5Ev8REOxli{lXgeq}_f z6$IoA3CHOu1Wsv+{LiU5(7IJ`*ytQ}EJ3e56>{KbhRusIj)0uvsZeK&_GdxnMhEB* zG6@}N6m#uMqaZ(@agiLde=_q?*t+-A$O<@3A^460L3XA#3hXcaG1OLlnMNcNl}NL( zD`~2Q$?5WTZ?)W&-)_+iiy#^oU04UH6oPKn2jX*dq#vhcUw-3n-!g>rCbgWgRviW@ zof@;I_;*WZoawJe5H7A0{b|-W?}`z_{}%vbsj5<&+}dVdKLdTWFf;|07^yx?yQCWC?W+<*YhlKa3c_WC6VOz<&tq|@}Y%9TT zqmiaa5QE)Tc-yBh(XNQP)2Q6aZwJdyC!&!DvNqjY^ToqcvUym4@}(YE)?tCE$z&NH z<7ZwVNRCJ+Pv60&ZHLHD7?4M8UiYCV-eY&M&wDtjn&p{$EN2PZtKUeqSH?WIANUD7 zX-vSEW}9cygFJ`_&i{%B6K~Vca{`k|U5a6XgxtEAttR0GfOt?J1&Ubw(2Ki9GbAhv z_3Kx;`uO-#F!0q?s(Jg&$==%!j_gu>PD^8nX(*FP6Kf7dDho>>k#*gZ3{Sl zE{rmO9``|0@JDU^_F2|3;b4um44BHgfI(W&N#9pnbenx8m@4I zyLPE4$Y0>PMT_4ttaC-&OXB<3BUf7 zt6y#*XW~fIUD@s1Y>(9=fV&)%>!{FPQ4yDr1!esDd4CBvdB`UJ8YgeZ+e@mx=_DLl zHWh*-^Sa6?CTG{Dd=$IopY9pXTOH)I=U#LHOPedW5*Ji8168nCcg!Pr7myT~)GlhwQsF5oBr<+40K`W)8YT! znHMXhcP6);&ZGbfzSf6}4#DnCk;GN<^mk_Lv7q-!A0D17cy$zsas=A!>5;noS_0z* zZ2vp#8C0b|hA49Jic9^Q_o>)fI&F>m;4|c1^wVYds}I(HemS(M@UzY>@cyt-pX%Q1 z+NYg$V6`PR35`gT=zi+*>4gL%pdc_o%KDrBDhT2Kj^SU6p4GeCBx7&08s2x2I)9jC zsTz5cTFRK4H#8x9;k?GYlOiPGm)>JMlKYZF6@o0b-89vN~;r98~V2>3lm>zp;8{q+y@q{Y3;5KSp` zMs?yx(lo~TSUM`#@s-J$9g5Ss3x|&;Q&iO^(UbDD_mlZ z%2P>1q~i8TxDq#Aj@K+S$<1Ja<4!Udo^fe*RlfE!PsC$A0@_e4Or55AYpGH0hivvt zL_tUpX$7J^_5O|O1%GK1(Nynn?|PHKp_tXRu3O2>?T#K_W3pT6%!NP2V8=q|Tx<-; zY%uELQQzc>@jIwykFRrPin=pEO#UWo^b65pZz3U z+6sIsm(Pi6b>+QI7Lkhq*+KcnE)6Ucuk7du|C*J|8aly1&xH?AJp^;2|Igd(klpgCG$@LM*}Rk zIlN#9PAA)maM9WyzjV3A2^M1iVY}MWK1G1%&j9@(>M6Y@Zj{R>^;dtOtxD$00sWTf z-=zKoEbYbY`WMPN(2X6b&Tq^lVs^E~fdWma=4>ksGH`}_{IgjHqEYww3FK@Pmg+JN z?1_h21KC|kW|0n13BU?UzJC!gwtDyn&5j!vMFd@NYqDYg)V^hD&bIyr zvX7I7!$u8+fY3Le%un1o)prLRk@iDG zJ2W+6eb$fcXE4S|zw^eRA4^z(d7i=Ke^D@vif$CzrfKzN$Xg%qN$7a5~L81J%eyBuIUxfDXYUeUtQy zF_c}~1h&I@mBx%qnY{u;=d(#Yb<6!sgKmJRdZqRcH|Jj_LVsMw%RHLYOZmpMyVZAE zMg-(wwF(w`?y*@7@KgzRq2OE^K=sT7D1fi!){p5I!SacXI-c#ACU9&vj>a@E&x(#2 zSpxIDy>H4>AEw)^RSu@stKP#6n>{UCL*X<9&)|sk=9Pm5Wqwx1Pi{6g2Lf~KZl169 zK7tHTNmQF9JrrNm;NR2krQACYoIV_FrCTD$7Fo;suVi3=;kWOB->p;df9>Z4QvWe$ z{vfhP9yfKe2n9TU*vCmqO6Kjc#(wUBODpU-KpjR3ooB&5jnKf=cNASahp7@d zzwj89{@?A_l(L(!G{y6ET-ABgP}Ojs(cF9P29g^L0ZXSjkHK1MitNgiuJouyRhKNg z!VJ&HCfi)9631!L=rN7oJX*8F8{|HEk2r{Ge^ad2{F&r=XK&BW+R0;|grzW!_6Rf* z(02kpW2#z33@foNq@%W;wk5p(9Qdd%d0#fc#RRD)kEV&Bq`J2uh@vi|MTA~DG^G=~ z<;O8~YC_`;tuba@YClUM{mVzbuyD6&v4&=sv7I?k4M zldUOEfv$I(FdT2+a!P-a*6F`NW}Tn8SVu~!AP%bevghS0t?9U)V4@-p)>QGmg8(BK zea-gxmhJMp5sR|<-*$|AwsboJWJ+*+?P(4>ZeLUgam|#1b&)+;jwClmcE06`;DHsf z@L7Qbyc`e^kgQ!m9P&%Px!xjA3zS6nAQno7v$nUp$C5z1$^Y7U$uj=W&WrpQsW3^( zI$o=J_V>{$V^T@Nki=9fAY<%|$tf(LP=@qV#kd>t;2oo;dWd_TkHSBYlkad6S9H7e zqGvc-105L?#&BR8_UJYEPot|Q*xkqGWA##-IgE?mTJ$!2lDXt`bUBswd%*L8n#)gHhUq$P6PBAt`U>WagR^gh?)e4y3~R)X}oWqS>AlT?BQxV=dUB| zh*SO@MhpJhm1Ny&5rG};8OIugqTfco{v{E$z z&9a;N49@PhCcVt!#A&U=-oMty>*aAR&DY=kvOSxRNDrn}*LY|}hC)&b+p_S;dUY|g z-nr5Okd}^QQUQ>}$+{f;RPsT@zAl+k)%(205$k!~8(DSjx8yTirveR*TI=T_n-|^e zBgHHwI%D1h<-@*gU%Mt1WFGj1jJx<)9+h*OmGe(~99!a_KBhx}KP%c@3~7tUt!}n; zoG0)8f`{FCB>COK<5<4#d0B$VHU6=&`%0MelTX)>;2YeNI%X&)MAbAG4DR00tS#9y z=zvfe=!!}oZqt=o#sTfXpODBe#+war^B0VEw6NwFh3$pk{(R}|WP4zG54laq?LTKK zQ`BrQxC*<800ap?0~YNFka*AZ_R2EV2e7>NF3nzwWYU#4bb>M64ry?5m4W)v!tiI7 z_!X-!h^$hSZNh4{x-uhKn1pAzv9b2mW2&7S2-&S$@XeW^my8Qdm#wN0<5&) zrT2zhOAhX?5~J&~D(fg+XzAh%%3P{7)0336gd{x?&@UtO#QXhdL8EI-$Cdb-JBgg9 zE=+aq%T;ny6Z&XIt@v19PxKt|_t(NlEpLIk&RQqfF3M>J56XA<3yb6OqwVI` ziA%P|(8r=cut$Pl3Yy*!^h}pmI{IS4a$o5DUDXk=nLVAYF&S{2O89#d>_|pLoCPhX z>>7xKSp&6No{eUaE~nQErg#^tcwN06v=&j>a2{B2p(L%zU-&EVFZ?YX*z9;zE!t}u z;74G=?(QB8u#H(@8z%;>29{;az_RRpC&>6D9aV0y4mV`1e|FLT=6&2!Zi)VngkiS* zYfu(NkLqt`iHL@h-oV%ARldYiK5*SAW?By2`!HK8!DLps+WqHxeY&~V!cH8?&#AMA zKb_=$uiaZ&2}F3QG<{hfW`>;wKgY3^j-XAy3r#Dy>_FnNQOuB|N+pKGK^=8sfn~;f z%Cm+9jC6svS!hXEImWE(E7l?xP!|Ogkf2JM*grt{vbog%+Xc{kknr&jX*N<$X>rOtGo}o{C*1X*T`l-+ky=&VVe1KxkXHV;3 zdWuHA)_TXE3SzNprd&Yjg>SWEh`+RPOx55&3m%z;&Urn<4>Pv2f$|K(MC}FDC>Iu3 zz5Y(w6TlsAe!Htm+xq{_+5hD&Ag@G16zAgttxpAj%`bk~MBt<0-iCe=vR$*r(zHR)6TPmwPD_usH#gq1yCbd~xC>7^obrwoRKk(})eD}eV>PU8 zdx6$!d18>|q++)VdiL1YWQ5l*z0!WS|hBnMlKFoT02x3JN>n3XYGTMc1}z znfdM1&%C~`(cE8r7w5$}w!}a%r23X0Kcbrhh`a00y^g|r*Tehv$9h-(;xk^by2?jV zwbc8UKFfK`sZxAM!%7Oqnu{WWcBe)Ps05>(AR83KO=K z>^&rGgL|r;CQB>77uNeKycbBe-u0o#GhhN1OH(nExSS0~ql3Il&0zQ?kRSw(d7eJ^ zbUwU2;2Y;Vv}m7cv+cT1To$vR%J)cU)Y)n+nbNbxJGK(I5*qtZ^l?)ahrR^9C^gt3 zdc;bkly|8UoU(ZGYi9_v*YubFx|x7$VmatiwDvx`*Eqn-UKEzpEST(7wWsr|9Z(l` zq+w&DU*!mfOk= z60M>M2_7d-nqtNjqFHrt*$9t%xO7D`c30`A)6H8eRQu$xjt`HYid#c1R%Gxqf5pE* z;2`*8KUr@PzL3_V?@&s2rj-J0P$-oKfVDe@18eI#=T<}&JmSC5QDqHOHoK)ANxBsc zZ7wIcg=9kh3^YtPL~GyyMJK5bptcFhNm&-+b%+AyaC+vFt%1Vu|u6&g9GRLi8T}Y_7H4}^8$^WW*^L*@#lmEXm=l`kIq=oz= z>KYxrn&|@c1m?v5>Is7m7?6jmy@xTJU(AJwfJK3hdv{*wY}4wZdleBXa5~5aBWCH8O~cVrK`C8Q8&|03xKxBTKT0k zNs&JTVNa!ZT5gQTJq~~9I+|s4Y%6)qz7V+DTsUnj3cJ5<1$O(b$#22(L#yvS2Uyn^ zw@AF=KVQiA@1{k3l48)VOpAgQ+(ZnL22^CX8r5~=Ze+<{q|$ZdSTFPLfJAUavHgVn z(;Eh}^P3OLnZNl#Z%s{TLKKt}%o^cj=td}&2`B25bb4Fpn{V7Y*x@@|t}aXn_JNW{ z17H5VZi=A{`=F$)dom)IPdqGn_Sq$uQ%K;7bOqXN4lH4C-mFnu%_rdGW#cREsS`Y* zktAIW+B*2atIK*KjX~xgNgt>X1A2**53uz}!;3eL`~W=rC@HPzrL4!^3#l5fNLazO zM-Y(I*D_MqGI-pEjx@-=-aYX3?8Im=w!UoCBmCL==#tvOw9Cd!h@_WF>rD0$Wm0Mv z6E^CYh@9GaU5pR$0pFU$vsLaQFU@D$gK)f+Ei|fbU*IGb@jTwp7f)bMuU7y-x78igB>p0(hY2vogR+sbnI@|4D@Pa&e^y&Pt-0-rqafh z@Pv+PK_8FMVLhgytC?o_m0jFVT?k+?Uw?kBx184H81W2nc>yQtm@ib^0<72yFJdjr z&P?XT?eWN$sW}uW$c_vyZ1R^^-f5W3m5!2ncH@0K)X5L4*w(7GX$k%243j#)X2qQ= z=O#*H*_=T)D^1f4UVocW3JZHGd8=6aeibT-`LA}+#P?%*bR=1u@o>cHl~M7zY!tzY zt_4`=Fx)9VOyu$h^iAG=!@%#evAD!ro94+-R^2969WN`HM?+zUG(!I(UoB6@Rl#iB zzQJSWV~R=RiEaqlp*Qq!pXg}xln0~J;y}3`Fo?zSKDoELC8JZylp@7lPPl+Avu}Ni zbDGS!%9U z9ChKYiYi=NE2w>74xG-&;TOyYyIsirRwC}1Bmyo&sT+&zbDARlM11A1~_@XVOx14^`tWA{LbgldWKVOac7jAAFjo&5R!zCVzqGUH(xbDJ;vl#olRtsz+MgsE)f&K`~=zDT%25?@uVc+W^iLAAmt3B%~h0 zxU@Vz$kZ$-#b6PpnyYmuSP~%$tTV`U@cIGF!DR2ci#V*SHOa&yh+l@Q#*Rg2nk+>9 z5hzuUzLMO^eNT*uoJs5>2gTarNrr1MTG^sx7##j0`!;pl#HADRmu5&gqdD`{H2Cok z5;GDR9>7n5B*2of5}kR8BO>=gvA)E%fOeVZ*qa`wEvJ0z50VX6&O&r6g?01ktb+p* zjf@znT#nTMf1J&C)v`;Opk?&V@gi?}Rg-Kgo@z#Udhh2X#rd~hE%1VEH|(JIbJ*!! zbAF43Q5Voq7i1&l7GL|X{|g;G>-#BBoFYi}6R6WOQ8R)cD@Ec? zX1YoGTplp1&MR?4fTMhzeyp6v9hFfze4dMX1J0~a&Oezepp7Pd#V@6!moMIYat z55o?TCawIK2kdE~Z^SMwFe`}@DB4ZE3jRcLR>2FSbqUp7lic@>MT?*y)QE9n;)L+; z5XM17lf;6$nH;vy)oA|4Wj12L3=I`0xG5$_toljHhn#l^P;FsC&yHX_5H`Jxm)>3+ z(<4_;d)7$inaJ1j$!A9vIsr2S|8IMp(#-6J^djB+U6p@=5Lvr0iTx!PvhUd*ct6)Y z{}s5C|MYBvV+^Q@1H&}|MwhRhBg_q*;X=~Vp!#HDV7mxg3&PTq%iI=)C87_o zBUj4EQhrdsj09;co_et34i%``)vQdjJs9w6*#I#Tf1L{J&(7cfuVmzn86!x1NEk4=#qh@vp5#kc^~bk5l=9Y)+NBr@^Sc3G;}GkG2+C`$XRX zZ^IZm>Y3C=^OV%0bwLi091>FJQKV1xD0}U8sR$j_?x6OIZ7sHwxTjCoClB5sU5Mbc z4qr)^#ybBFCsUH;_vX=#4!!!i5eS?wcF%V&lrWQ7mQ(^8TGcmryBx|mk2uU&gUm3u z$x1emFCTwu%4!GCLwinc6m1}e-@M)J2ynZY#E62Nf2#Y$Y71X*y^ytYeDlTHH0n5k zGc2F|FMaE*xgv9qo2jWHPy7Xr038}vY$?*zlWaxLSfh=_b2GEH__1-o5o@1eK30kYay zy>;&#rhT%1m$`qDS3ulG-mW(OVxE#=vFfuGxy#D^i;rMUHpb%VwnI7ycO?uj4>J#l zTmA<$=fQguLH#z&;8YrMaRuRtlkAuMyVGD|B~DthFA$u?)nZt*}7&5a?Jo?%;a1o3$n_pwhdYxjSco6cCT3Y~>i+FG?WM%afs z#N4zOmA%y*`e$f%jwL&GnZTBT;*q;)Hm6*asRlZQ-<~Z~mWaEtm{ZEX%Ip zqDw<=koTSII~qC2E8r|-hcsxzi*9$FZfu{-VO+rKuRh8c*;CpxiZ~K z_dgxFmGk6=ds#m@3ToRol)ZklHiIZ;vsJ@dWLvPc;2oqn!QVNl{**i7DALt;$cyPJ z{(C)eRQ^B~(nc)w74>EOb_`q|Q8rXgDb{dOge{stXEUZitIwN#$kMpOvUem`mji=V z+c2mLS(VZR4vtfe#!{{4+VXRi5OztaO1JzGixtJ9w^wk!RrON9j*wxV;d>3YC>=Y-L~Z4jhxV~P@Kzp+QkAeMBYp*VS_{liDD{|Uz z&O7#A@2s(}#pK%h(oX5d>q1>UPu^Ww=Adc&PfJopdn$$O>MqbtAf7Y!qI_cWh`qDW zz65h}Y;Yei`USVi6W=a2Wa?X<+Zn8R!wFcDC z3lxq1W=C7svwVHA%O<2vIY^g%C=$=10e=3BKQ?n{G%a}8MoO4UMcPj7c{PCFg{o&o>-lX-D&%9)4@$)8v0aY0OorMVZsZu_ z%{}OL{Y2jO;?@EoBdAWSOR8??y^$-3S{^T~O2iA|4yTN(c_wwbnBwhlu<7tO!#8uQ zBKBXJcrpS1MtvnV3J8Dp)h#)1_Zb_-fwyJe&4c8C3lr!zqM7p*|Kmpl;*8fMi`Qi0 zQrF6sLRI6^P+GOucF$A4xVH=1nmo>KIn!ob4(ans1yiSHLr7!mCu4CX_$Bz`iurC1 zf`M+f5b&Vh3kR3x$sL=4zSHIi|26^T%_(~&{_ry78Tl5}NAZF7$onVyC(yPBmAEa| z0(Bi`Fy@{pJox+Bbg$QVrb;)z#M*`2E7pFM_ut9#Jz|Dm28A+#7Qoff+MpRs#EQgk zK=hwjudV=7uqNd+d#aee31~GqVv2scsU3;EKO4NT5G7$uQ+27>Fd-T>_SM3wC8o!eYo>GD8 zM_LhqmEIECNyLM+pZO&=yv9}m;yYSyGqfFcz7wi?69F!(+7vrehkRq~h|5=qwn|Cq z&K*fvA6Z?xHz#Dnhc|)LW^c&To%uogLuxV<9kip*(RXdqgl$1D0|noS-iY@zLiYJY zic8ZYG*3BV8Qpc|&Zymf7U_ov?bN}cXFMl-Hcg@SIo!>UKMYTvzwioNMBmk%3w*92 zWRz_3?U_uw+Ji`MHWo4ANqYq)7e!UF_F7LoBi%_zB6~V9w#rmXI2;L#wL`0o1+mnj zAUg1KE&K3s7j|fq$rU8LpUr(m<99(ZSlCJsED*%CDeXJotUr6f)z2>xj=97J{?6K5 zO?-kKYYW_^cEsIxp48fmAoqbBGL*Myym3gtX(v3}nMxTH)ySyc@RL5xy$OCK_`8`8I$GVVuhGmG5fpx&&Wdb!_r`De=(pIkc(%7^4YGES9Xm9 zgYsc8Pt*Jrj*m=aS6fp-d;M6Ib~t3IT%WO_TwJ%Dw?_q!hEh-WrJw^97>koa1{FU- z2uvo`L=z)m#4HTXx%?#I263rPpqIZSxQ!H4(8Zajg?F1HBjy;v#^u~{YpAVMLKdOB zUk}%lF0>WN4#Ec|6?9`LqE$i#SW?aQ{YbowrecPrs|puw^>BDExe_U>iv zm|A`aCr$$2T9xfq?kg%eU&4jisD~_g{&Sw(Hryc^yEHPXcXgG`@%V0>_9Ougl%)Ki z{HYvBN>t&^IS%^l?oa$mg27aW=8=Its^^jE>iJEd82-4Q;l2A#W^z=~jf|b-q9w)U z?)%PY>Ni1S?{XDI6unhB!`Nl&pAKPg5qlkG=$9rideCzDr z0Y0&cVm!@CzT9mDY3w&RqGPCRpCc@gT)T-X*h~XB{NR~^9dxW;6*|aHgVf(!6a`}n zi)9b(=no^@9VWoc(DkMCGTqa@TCPt7U+O9O%o5}`)ndeoz6)8Vdg&>J0m7K$1&Y5m zJSP#Yt%SW!=_wBDRvTy|khSEwVep43geK9Dk$P1LFR3a?U&QJj^y{*?-QZcf!c(u4g40(X;$H zdH?8?xdO?<6k#uo80e+0;+(?tfk|jZ|DFyyfksV~E^4fRrdS{f2`h;CWlCW_=5Cig zop}_o;?AQCegH~%(p*$59=2cYx@6wzS5JwoPKa0bCdnb}Xbsp;eN}FexFsDh%&{B{ z!?9F?5-h!c)5!d0ttV0B#e)VvJ6Z1Ra--U{a4Nr0HxvmM6P(6*K$jkOKG>AFE$Pm# zT$cl(yQOcPg5)>g!Fbb!S6!M1@KN>ccm2_t0@96(40ih~m&~191UWH!^HHi?rtF5B zXegpb*&&}o6!CfW0Qk54!!xl5K@q1^!k5RNp7<$U5Po4r<8{KZQGiWy&MYnYezrqq zncMU`Ep{1b3M%-T@$hnC&+BQA(qnl$7>QXA;u~g^erWp&qxPY430hnx!M#&Ez*)Ei zZ(b9cS-VJG8)wGia0I_5Wm1#CZn<^V)V2Futnd7BM5Nd1K>4YNRe@Xa>XTU9HuDSG zdEawSozO4XQ?fvsYF|7(?@2B1=##q4)ynqCr&f^zI2_2yb4f(w(QfavcM+8trXW1< zyO6b)L&VR43vXvrAojd{Ly7*ebW_++=VVr6yEB*6WrC?;BYU;`5 zReLE&T@MY2%fv ziYJY;zp}kc!voYip!cimgUlEvD?QZY<_s@KzES3ZL!1BdIpWe}O54r$O!c5Ek3_48 zloYjy4zokLXRfNkM1wu$LJTHkh;&YVAkcBLDyu3nN+9CK7TNVB8M(MS!k0x9QxmkW z%aW>)Z-b?_Js3YKcotjc8GX(DF?Zgs!I@r^(>Q%vLfCu`|0U~65?@mksrY+8W(5b8 z+?U5rCZKLhatCQ*h%{=@zx@8{)3&J)-tDg!j9Qq-k_Zb6^=;C(NE2tcbQTgt?uL^@ z=l#si>fAvwbXwRkh!_&WeRYzyf989SLI?UNBBDp5it+wEslcZ`^w;g`bhHNd4km8k!QBN#3m3D{CS)AlNn+T0n7E$$dK+i3_^kO4cFUUeR!_h?ItuqtnA=&}IzH zYHVWl;jBifQ?8tPkP@l#t#nci=pvbjVW?};_{)l@xgN!TaPks*(7kZdO>okvn7PxuJjBW=5Eu zd?Fk0HAVvua;9!|zxH~}{jr;PIBh$<^S*zS$*_O=$|?api%ad)usYo&%IRHk{Y8e1 z#Lxx84oXVqsWtX*211>0ztq%zB#(IapDFp!dOgnG%5y#iQoh~HYl~`&8)A@jdscX1 zeb;&Os(_Y~=_l^8+Q58YO8Xtsrx!N zExRSmbkJIeSMGIW$nSi%k{gM8_#`V=*IALuIVnxq)6#akat5}oG=G-7BMzCZW~K3=96JZASC>)dzFX=b~~?X%J|KcEJ9vAFzAJwm&BcNoK4Vg>jxiJ`6jJ^CzS%FJe`)ug9d+zc+s}F+Io~P-n`tn0ReUZpvf2 z72RSo^@*a4o}oTkJof&u_-Lwrt8~ArS+cMHOAxR*pAWs1hGt8>h#%CJ@RIzyI7{Xu zi>>pc*^&0LR#6%Wj`L^nckT#hpw%qEC+zjV6=2-OAB0lg=yfuvG^M7!Nfp9}Gzxzfk z)>IvDq{5#&-Miy0x~5nuq5yb*vJpD+1hH{{+sC4+-s_lVw`5-ONi+13#a!+sgtph- z5oWn?+=T}DXe7kYkZCnSBY|l}+yeJydQ`!R`v3-y6^T7#?zdNfrk=&}#ya7SnWl-(bq*pjWmaOh_vc=67GRiJq^eI5)4&frz0PnS zT6Tu%(Ka{2QW;-P$S3V?OvLm4U6-}4Vz=dmb@It#l79XsWyX^>+bxeVqgZ)YB>HMBgd|NiW6$>?>RzvZP0eTiIN}O(VizYZ zM_Lq^2#LdB`?SK{#8I45Nm3CVo@U`~jXkb<0vCzX_5$6Pe*7%=_EH3-M^F&A?L6|d z{Vo*5>azT7VRmrc;MJ!^tcW8{nYbo;DUZWN?nA+?k)Ukr)rN9?Mk4$7K|D)aV`@otiD{KU9^F!ZVpMLA*=&mK1#Ez5Ww7 zhLnNvgHbQI3ECr^gFUH|^)E!aEsrO5G{gr91t@}h4H-5OeXK~|uJRN>2p)8_qyXP! z&Q2Qzes7eFii^weHynx}{t5OMA;UXcv+36Q{o-z2q4{!VTGk#-wId!O`?Ov-LXG;QtlqiH%UkC5TA{ZSV*b(DHaDF?-~%a4FUtc@c(O z`w2kW%7AS}+a?Ko(x-hgtm46qg}nN&q@{oERXTMZ))6?4lVgsSz=H)3Hl{n2DiWoTq7@ z?-MmG9!8^LloNX|y7Br4Ti$>%9<1X~hPrPj0=D2ua;p>Pi+AhY|HOD*c>19#sI027 zYRLJUwvqIV6AUamB`wSgtjJ`O)HWnWq?PHsT$Zyq?AGCt9+Yr>Lr6LvWrmU{*t;&- ztV`)!_ws3OXs5w%!K=xYtqefedEygSEt#x*fb(2v`` zz2Szo8M0_K6ml}28^Y z+8aZq!5`Ez)>2PV*YSg9wh^q*xlOag(*94+)j)&P5qTch8UWH&@~`iB4SXR}u%N>Z zb2!L#RYjKQ|}g~u6)Rwu$F>m<=4)@og% z)*8PlO`5}Aujj^#w<0ua>(q3AYZk2x6Y#dGDGnYrZ{mPY$M#))? zCER~rBY?i66}4~$M;hQbSPs{C!ig`enk9S?vW%GM#^MpDCjJw>p4XgWzlxKcp>Sad z=Oe#CBJ#u}j2)JG}?1TWqNcIswo!DH{9S2)z~ znqS$bk!;fffg44|oiR8P*IyjKvgexM=lN>3x4LRSbM1wLVhfdonLs_jP?t`B8#7vI zCUq4f8jBVBPF?VGCW4)X`e)40V2MI$J;DG?yhd!3JDLfUA`;^(d(GZ^KJD4y5!8HB z3=PIeIm&uVL$?GkRjE$oeK^YnV~o6;BMn@_fCj25gkCJV>2E64qPY;Uf+lyt%Pbm? z`I0!567l2%Q$*f~R4cY6^7TYe$)Y-*>MIt|LRA`h^T8)_6O*?_danb=J`}L)(k?5w z?Duy1%Vz&HBI}ME$*aOwLckU-!j!z74y=)53k3o$s&pmYYgMBLok`Awwxq-kc{qTW zjnq(vuT3sHxjj_Q@^sgV1r{%J%G@7FM%$PQ4H&WhN1Yt>gC+$9CQ@f_6%nd!N zTLV*w?Ta0Cpw+C%K6-6?*`5erz|Ly$A7+QL>yq;$DtpbOyg5;YoQNy-=1B+f-+-T+ zA^t=jh)>{1aR}+vs2We0^``RC-A^P$yC(ePdtC^2-{1>Y^0#~)te{H2f*rGJ@{hJs_!&Sl+R>A&P3 zdLTktA5L(~oB!J|8H|s}ixZ4Gni7OZYUwbDz);n>zh7jWIYUk6@;ONJ-QvS0HN1^~ zzprrL1%uW!BuA7F>W%`CM9pS}l%Z#XWIJ^#le)Mt*r^U1icBS)*#sXjEHza~n z?t|d;cq7DP8Y{KQuPAw2v2E?5dsPRC4jLe^O=Y`)a5aiS zn@YBBGq1!Blv=_jHr1qT}Eo(MQ6m z^G)G>(Oyk@sAe_Bd+RHt(Zbksl;0oV`ChBvI!g!N#mO5%Dho<6)u-*MCKawAcZAq5jSY*e27}{M?xq3rULP`?! zcVAUdBoh~MkBcDd#tLN^T5nEdX10m2(H>1Anxu^XL`NxNBc;V2;3I(2TK*>ngfCW%{mW7k5LS{lQ+laYt&# zqfT|}X1cq-oXQvZHWIEHq7h9|g;;oD<ccVYh6NsckvGqyasyc*9&P zimtuzdoM<3Do9ph$oakKk34?8eGZsn$N~z^=h>=yPbu*yRJ!4u%b~;!VyN4t6HEP8pTA?rR%ivt?el1}I{S+B?rm+Y5Hg zIzvRgozNRary~99cQ*A-B$x5%7i??-9XNXeT5hSwnjIzN)MbSDq1r|>)8v3}F6p)~ zBTS~B{Nr12)$NsK?3Kbr3uwqUM*d=)wov+9(_ot}mIvpY>BHI%fqdNmGWYl)EaoaG zls*BpdBhQu0@9?OV^4cbr9!qK%- z%Z>qMV&0uyZ;s(Ynm_vZ-It7_l5A&%(WJ zC6}T}2>dX!fP!j$rADrn0Cw;Z9V}u4B9%ExVsL39mbTTD%cWFr#Y2<%;?;u?C$U8>FfQMmnf6rDFHxcLQs-Ru;|e%d+?|wyT1~amVYaCqHOF`Trh5{(C^E>E zk8~_*Q$EtyCb3fvr%7UaU^qN$pPDd~aUYd)`(-i$MYVg9No7u&F2HbsZRMVJ2?F22 zs$M|qJ&tE}Zav&yh?b~B1X=Go=PD;Q032%UNRs^U&4RDzy8qXnnRgX&2b<&{@yYhJ3JIn!UdXknm@x?gL zolRRv;x<%(B5Z}hbt6we4Ml2UVRwobUq+mQ<4nxj1u~ROAYO+R3hv_Nc&Dlma_q-x z9iYaV`#%t88KHPbIJ0LufO>ClP!Ay`tBldDPGcQRY7agPH?H=dLhwh9Nl(VSInYdh z`#xgkRFept_-X!_yn5uoqEN#ObK&L08RHzM7;vtpLd|bvKW4d&yx7ZcA4C)Rd6dj7 z{TAw57nn0GCz6A0fb$u2H6qUsh8N&0Vo|L2lW%K|zId0>L`)EQQ~^0rFnRONqmCf5cMS@XMs4ApJIk`b$%3&rHu`h{B`V1qe|F2%7qjOC3{t`pMkY6Wbrn1{`E zdo2Z0+|zWGl;iL4r3HUV>~z#Sbe?lCz&X@^d9oR_M~GgA-lmIpR>gelM=gmcrvS^G z38QJQR8JA$wS2q`+%{}8>eN<9YX>k+aFY2g2q>A)FF-)8AZ6 zAzW=G2fJ#76JZl1FG9avuB3L$(wY)~23b%Ru?B6gnSLl*u}Fs;g&VSdFdYS2j?{8% zM2YrowtZC}J)>d!nm8f0$FF^HjNv%O$qm|#Se7ru9xY`hUFI(!PqM)4*)fQ-2|8!8 zwg^&>xrR+9(QgtVHUYjNV_ z?-{$?UP#!>5P<^Mw)-V|7Vlav63jsAjK0kTpxb6y{Yj1T@G7>hbO*U z7{)w(j+!CXC#XPa+Q)x|TM14Sxd6>KQvXN9@T1|3lt>74nUa!el0ZJOlpy@7NhoLO zH>cyn4DLcVp5VAY=YS4~I(A{~KGH^U{#gCZZ$eI{!iA|=;3N7;RG#1bV>VY9^;6cA z%wN2F#sFj${72lS5us|HdG`8oz4Z}(G_sZX)IyRA3zJRPwg;L?yqYF2k;lS0bG>FJ(^4;J4y0`h*HNEVUvs?&_e~XKTy;svVyc>kIAzlhHZg$?! zBm_zn!LfG2=lP@9#Ptx*(aor3K*pqD4ipDe>A3$h1By>B$QDL$(!uhi%^O|8O6Em_ zQK7xL@3BsO=i3w(oZt31ptR`4bNOP{rgobLa@%r@poPNQkooW(}NHprdB6R|Oi>MJ#vDf`&CeM^CBtOQHLO4z}5{F4x;5G7$%Jq`$_UYgs& zU?KBp74llJ61Vk!?TEb9Idn;t47BJ!iyQ1pVeG>-WVC}ZP>sVj4zozv6xfwgVZ^VF zJObB`egAsM5w8viA~4uLi!S8r_YJ>Z%OTIPg^HuH(YjNCE>f-_yehrpN1YcVHb`q4 z(f5HkSbKc_9_fpQr1r!1Xh=qPhsBzc{M?3Z$rE_{*4z8IN{*HEqy$r6 zDGNSC_^ta<&?Zsn%j25lV>cl3KUQy>EFm0dG=o`;v(@AfBAqUp1*)o5G&A`z5VdtI zb?svb8O`i49!9u~A=E(yst3r*b|teO&T7%W&PzGwvD#_fpQIk~F5Fileo7~@FKNjb z4iYUeg}{1}y3YF)&a@`U>qPn8vjc3`!%_ua{>8{Yk!kSFMv6b9!&fPst1wPkX!x>V zKAzGYUe=ieGhY7k#bSFohrwcZ_z^f>-J53br6(kWBH~E7hh?ywcIkYnJ#*>LiNg(@ zW%&oQy8BHcv5hvwBA>636Th*+^KODQ#QSGlgNKb2fkR}+2}<@e+u;~-1L4IA-m=D&h& zL9XrNf%?coI8kc3T8M44M3Q~D=4k>Qc9nD^9+rTBDKa2nPz7&e=#G9Lx6$zY2Pq}b zxZpT9>xzuYr5>w7%vjL8zBDHtU`e7 zB-Xb3i-S8+qJM_TQX!YDrWTyCgstux`p{RC`2zKqCwlMUTu(77W;;{wo2YMN+G5F> zFgK|uzEAPcwQKVmTqvJwLN{{aUcXdPIzq?SGJv4Y^p?D?;`yt199JZACaV&m{*)6Z zgK8({Ut|_UFbrS@L^h`J8OI2IfG8usvU0(^7Mp{WeT{fUAjmZ**MAGRQW*BMW}T~U zl!CV<-M$bxQU7K6=%T0O5fHCdrxopdFk|h)6(uLo#F?#9cx*<*nZ61$fjq<6bPJE+eRV)J-V znCdkKC;%yYz-s(`cio`5)=0AW6I1ms^!cj90@myHNvDwO)x;eEJcLZD_8S76*On!> zMPpEQ9#&*K{ClqRf*C)T8C_N+`UF)+$`&aOR-GOtq-e9o+hDdCAh{Vj`78^~oBG3c;gnFhFI z3~^q_h1wy9+H5~sNxo-Mje@%joGAekh$9)O)-IGW=Nlh+f?08t$AsLLl&IlzfKr~Q z5d6pT(H3fFBD0p5T2iEVjw`2YbrT-4ugra&yD+`?9}Oh_e`_EWhS`^zRZ_%kl!qof z(x<*z5M0%)Fbvo@c~G5nF!yfx7wb9lO&g8ph)$xf&0A3{VKI>IceBxL)Qf5Hjo$XS zfU^zTzu50Pyh9;_;Y&5|Z7@3+f<(iFq%fcx_j;j&D^4HEd6onOD zU8p5dbogeOQ$hyoft_ihq~yfkS8B}KOPq+T$V2*IC8BOzku@YO)3BG+;!8<)MrwPy zvuV6Ys5H&l{ESpK^fPF{N^M#V28ua}TjOA{az$Xw{A2SJ0Z(RJZ{>jX&rNOdSbR`T z8rC7Rw-4n7TcYprQH?Sc3G+EoG*PxKkkW8L=K5$(g% zcIg9tcj&}OrI$z1h_NbUiK6W6omLY^o)Ped$gJr+#?OD&LqsY%*{dz>A0-SeW~HU} zvn??b!dvGWcz)J)GQTBf^fCHum7HtCc=1hJ;^OYSGoOE3>FeJ##+m9FdvM?I`>_G# zhRH3;kj=h2#2nc60dC7rgv8=wK<0J-wwFrKMX*3cR}+c`Y+nw%YT64zHKS{jUlC$|nJ%Rj}~;5c)>Z+wClc@4?FMQYld>KD!bG)5M} zwmkY}it$TQ3@VYD$3rlBZuEkJp<%Zh#IJYOJT~kFbbfUKDJZl{4rieY;^+fG$IF?Y z#6D5|1>WiZ0~6DuMHaLCKXv+vziTJ2kv^nmIhEwmRX4K#A1`qFv3c3M(38i@=p1f3avt2TsD5 zrWwLmQ8VM9MGIs&J@xUHw8<`q&jj$qEQE46=rN~o>NCTHDIn&HaQRc^SA+! zN(PY>#Z}8*vd!-$SXq^jX`ui9UO}r?@)UIljPvvUv5voFKFI%@Ypp7Iy1O z#$F8>gqYGM3iq`dvBgYl#;U*!a#k(rh~&Z{U7xUjSLXmFkg@Pr0F2p~@->t_a+254 zQ2F)ve*4hxk(qfh5oAM@MQwE#bY{_IBD^0>+kV$(Ftt=p>2=;2sha|l3IeIluhKJ( zHi=uNc&VZh$qTBszaN9Y7Y9%~KUbSKf=4ZWDB!Q6W1IJsw`l#e5&N37X>{%JH{Ug| zc0?3IN}R3RX5GHoEkFG#)l%Gzoc(VWz|b5OYacErmd<}g8e_J@0)B3bs5`_j zIbboB^-F{+-^P-eQq^CYuhsRZce5U?C@GmRyGRFu_Oq(K{%59bF8jCwyz?f%7`OH6 ze--70Ag@)(0SC`$W?Y9re#ofPUJKb$;zJt{#YaN#8ogfn zc@os|%s;(2sg9BT{i;=cMEFXhdS?$Zy}{pibZ$Z6I$r0O@-Z9YKlp)(FRxa`O?S)A zEnV^9xO^P*E-yS1cd9*&c=eyf;zv*I_MX@1Ng-Wa)FIs*r6=_vZ9ME#dq@oubV$-I ze0r~<;?)D}-5MdG%GCez<6t8VIBmIN=&N3HtM-0k2S*i)U!XZg;7gCEQ{ zNoeiwA8*pR1RmDWMt_y~EBO3{*BBtUsJX)!8K2Dhg4z!2TgKyVcuZj4!o|{B${kE} zvZNKrz&Qu5y$^wzOZ3mN#okBWC{w8i+HWduFQqSNq4X3Qq(7x|-WFOEu_~KQ(fg7> zjfmF1a*m(gu?LZAlkvzgWzwWm&sKtd$$&yq=gA3F0$gO~ve40kKJUrn_byx%fy1)Y z2x9d|mxYpEb@;bX$VU6fbOsYrGfZEbtmN(Ki!14|XN^pUA2KB%uhlgcu|7=Z^2zGoQcSqb|Yes)JVRW7U3IS3A zSx^*#xVf&h$wC^DZJPwQO@lZR`jjj9qA)^hSooL3UbimVrDngee%vmfY(1iLS4WSm?Y8f`#hRSX&;bZ;4bgw>%EMgMZUfKFh)Im1T zbDY+Vo(ktsL_9p~Le`cnm%k4fTshwfm16+CfFs|v@V8E6*GStX1AIg8d$t|ud`Z26 zaK)^Hq%SUSY(~A^oSD|BDOybBAD*vtEWPMuis|$uxZLz$j_1hfe}kL@6!A$824vCx zbLfI3(@A;GFw@@yS+nvNc1UZ(oGw+B_!IP8{#N+6M|t$wy#g8_sTvEQyJi5{#f{6c zz;*7kZqBO{Q-YEu{~%FNO(*L7{VMQL>^k4LYa5sI4ik-@Xf*HH+R6k(@-(+GX{&a{61L4L^ai3{+I^qVKW zsm{#0t!R1m!k9qGWJv(djN1})pRoNcX3PXRzW0KvN|QWoZksEUA?CS_537~qC|eEcm)lna9;r?&{tl0Fn@@Lb^uJ%~e`32GixofL zO+5fwO8#1Uvaz~IR~QzuS#&km&}Z1*4irP~fc)k2H0PHQ>eq!SY^aBTV;fB7AN%R; zZF)N$VQ%Gy`xEGElUUja_vaS|5%%*&4m`lF^z0hOjXQ#aF(H3pN=3q^M7Y(l1Vpm- z)^^X#DJUC8XxwwR9Rw{_^V;eE6^2CergHCkz8RQ*xZHIU`*`J#oiy>~MRV7oXW(%J z{8hGV^elI+_3SBCga(x0Ij!=OZ|M57?WYV;Um{6B`I+d1P#Qtdpt+U8p#g-Rp>5|L z9i(DEpBs~UkG**e77K3!r7Hzg8b;&b48(`#eEXLq)!;@tH9$~$CAIK>DS<%XS&kkq z)<8@qZxu~(5Me5HZekL&pNnWrCmupeL6=7N`M|>D^*dtZS8<{#QLO!2p&yiRx%YhG;u)WXRTIa!u-w=uz|qf|A!LD6!xTL<8F9LG8qQ# zjtSg|omiP8Q$)udYr3eAd6 zOpY9R_b6s{g+&D)X2XU*OMfi8Sr&H|5=wB5vpHF`7XBXXPy(bf5JvShcYKl0kqkX; zTe5vR)cM97a1j>d$7W-7QS_7$B`|zb#ClgZ)Q=irw{?QE@5djG`~7x;W{~C z`_Cc}-TA6n44;TEV5Hih;ks5baLG)2v}BZB^dcym>x*#v-S$NFA#agwj&QTJSIAf2 zqAGJ>t%pq!=fFiM9Ei`P7Z4J7dU*j{JQ0KKEkd{=+#~a=dtBu|Ir86L4uF)w1(AE1 zPQl5S!xnho?CeeV(t#aC~CXI=Hyur}2;zPe|2>D9C=Tql*itWJxVzZMJ-AGtXrCv?4EI9KXJS`)p3iI4VxP7XZm+ zdw}ANg00?6ytfd)4c@u6A`?&}KW@Qh9^Q&;G2Q`kED>Bv_hY7XQzb?G?hok_sWqxz z5BVJ?zRb&&Ej}dUpDT1+@_;$aD;o9Am2e%kKe|;P^obYj-28uBePvYCZMV0CfFg}_ zr?k{iL$`DYBBj#O-3`(V-QA!x(j`cDGo*Ca(Ea}Le$Kg{^L}Kp)_hQAUwi+ujHM*D zvRZ%MBTkX>r@&6V8~Hz20Iw==7L{T#7^I``5^oeA0eb{BE^Iq%LvaZ70wcd%qA4+bj#S;(Dce+49(XZ8We+*h?uv7#Qn>0w|E`&#W`3kzJ{(iYKVgx9bDX{V0W%D)z z#cIxwAWGFR^YnbcqLDo3F43k6&Y}^g^tsWKXhZ8R6swd%uDL!O!g@y*g6`HPZdom1 zo|JJy>5a^fo3V`s98;0>yhV(}6vWi76-XzU74oLFx<8J9nSY~xI<*GjfhZLBSid5t zcw~XKTKWNYsZQK^F>h>3*#c(yG2YTMGWQXhOT4F=tF;8}!ANTt z@r1ni9?@&~a~81|%<*u%BQ&`7O8jq@WnHIY13CEtY{3>=7LiIvr9p<~z&*q;G#vsc zx4uGhNWB(HD6aWHRsN|l`rB@E)!}DuU*_1vIWI&ZvLy-gO>W)7JD4C^Z|5H zfp~~nyLRx$$DjS+o9)~Quk+n0u5REslX z5p5u&vwDBzVQ^nI4CK1{5r?F|&~=|gZ8>^EiRa*Z+3uGU%*A^@@OU;;)slp`NXPQ` z1LUy$>s*==NDjm6q_;qNM{Twwo3CWA1o}CgN+vtb7+m=8CsSggI*w&QMtJPjM@D4F z@6{^M#N9A(Qr-k`uw*#6wTyfdL2c%dJrabzS^H2#CU%ZjpuU7ttn&_lU<01~Hi3Wk zP?0(Nbz3gj@q5)&g5Jk z03g6DTZ)i10eq2g;9T0bV*K|y0V+#3!?etRrdoU8twX)|@QJ`I9pVxgQYmq-&g5U7 zzOw0rk}HBlieDibcBJ#8v{mCVgBQ&zQH4+Z+yHV&2+!aAree{l+rn2tEiIb6f(;&2 zUO-JHP`I|^2?k_ke^2`oi1IzDQuU@Xx+Oy5W^fQwnKeNb%T{1=M*(-M2iF4)yw3DR z-T+&)KF9O^hO@`gd8^b$)W@FG;KPdu>!hEWI_#Aphv6+390IKm=f>S+?uZ6-8!CXO zCP~>Rs`O|aGL|I9uh-^2i7#PPO5XC>C9*=cR<_p)g`8MemYNX%M7|psxBlo7tDTb? zF^`^pKJoNeGf9Hf z4zQ>tx|*~RC8EjSi43?Bu`%$|KffY|OQliGU3)HVgk0n6=EpNDc@59deKK|{q>v`n z$CZZZQfXj$0f+n0UhZuS=J*hH7GCLa3JyU zZRqH(NJC^T0UK$rtZDcM@2$mz>)lC+sePp2^uF8uxiK!uDAU+GVR6gg+81Vd{cX~K znVU(!BZhYzte(gO4CE@`Qg23wM2RCZ6c5I?a@m#a{AAq`+vmAyL8VJz0L6G$PQ&@$mt$Q zU@`I`G;YQJCM45RwXglyRYf^o@713ONKdtD?DI&}lYk1x*~7KBo~b&nM@y&VM)H5{ zAh-REWz0Ie%}o1kiSd84?Vpk|xCj8LC!YPMvRCyq*JD16G2j1#5B$J?N4*koY!e9a zy^Jd53b8_Ksmmjcp(ic>;cjTKN&Ygo%7IWq=tK$%@h_e(1g z{Xe6u#S1dLfxqEd@Wjd0s$o*F+qrs+@Q)pSdrxbvSzVsUdDokG4;Y?2N3VMpMa+wH zNE*k~Tg*PlS-iT4874-EMtVke6?41-jc^Cb-@lJBu_E+0^nkAQ@%K;jLJX5sMY%An zX|G*14~3cWFTGvPy&?J>S`o9#vxp9aOYHfeEUwZ{|SI zO~V#5CaFYd;sg!*; z)6(N#tRx&{I>do9MQgGO$zs5AqB*5`4?|UBwaQ6|!4Siwx-s45sKxm7pe_}v zSoNX~m&)BNNQ!^fZY}0@ASEd#hC~k!3Gn5)8n;%xOE8E)7+AI=X&$9%Nij5tJBmWr zb9!bVKUaP%#;$0%a*Y3%vXV~PEgsH|C}5*uUsB9)ce)dD<-UHTc{Hvl#ut3Ks@%k9 z0YZT{CZ{ru{i=vpst0d?hXWQRg-Sze-_wEUj=*cXHVwqsr5&H@Tw@puU}w0R>W&9y zg=3dhT#$@0h$` z`Lg3{@#{V*VQb}MrD2T&pbOOdX%==z4eTzh%~ZpvwaYr=92BJECw9yJ4J4+_s3*~H zBl0_EVH?JG!vgLSbE#rZlx%&Iu`&q%f}UBph=0f=^KHrrW2MPGCv|{Ey3Ky3axNy# zvR71#f|m;hv- z6Lretj6v&yL}2AWkOHFbNC4HbP0V(FW`sY&-1M>7>mCt~7Vrs~BQ|}1?zuB`fcVLA$0|~94!Z7r}%A}QCoe=)3yq>qxyG&Ev0wligWlNgA$$X zmxzCEuq^%hR%M7w@Ub)*k;j`Pv4C!foGP{{d{6MMkoDHhMI4N~#M%~uFUmMf0?d*W-*0Vh83h^=15 zb&k3SavGt5+uKpw)6zOAfh@F&S9sz5aGXWjLSir(gIFPvY-{H-z9Jz9WWGZuHuq#G zpj&Y+uFp7(p{!kAcr9CB=_GZKG7G`o&XLvz6e~Z#TO^#El`D6BxYulbkNPlG(xV)P zE(nya-f#@C3W-fS_YOau_w#lWrIj8*<1)N`xYdG^fUO5w1meWA#sjZ@5XWrHU?NWc z&r*qza6C^;pnsZOY8ys53XePpqoiGch&p<(mz^sNd724@A9(j3iDzuR5t0I;L7eF_ z@g9GkU27jR!-xxH&{NSusiB!2D4s#mtQt-J-OL}*uPmRv;Cr?L9Q{+k(T_s)KSzHp zbJMu#EDgM={}y_Zc(~0PA2%z{%W3rb)y)_4F70r}?(dTHbX|!9N%yu#-hp|Sp+})R zE~+ijkpkhDro)L2D21r>+85cR@Z@52Lu{Lsm?YrCE#z_AcDv?`+_`2I>!G3O_BTVigjO2$p( zgpLFUz2W*dst_{;3#cP`_#VPQcb<7k7qy z;&B-;M-8ZR-G8Ts&pPVLsYQTj=1m3N3oO-@8KbuVPKE~KRZY6#`U>>f%T&qtz;Ks> zsUv~djgwo&AA|A&2nsp4FZo7f1HUy%-b*ioL;=2^G63)I$=^J7**=X~4iy2moLpvJ zlMsYulV4mHysxZ~29a{Bd z28HAw=+|e+oRYNee_^C3u1jjNpPO@77MID*mnyo|Qjv~oIC|onk3XxbE{_a0!H_#_Q znDJIo>Qg0`5{cM(bgUv80%5Kr%}iL|X?Gw`Oq<_b7|;>hoaD8)@<%FFea6vYY`rGB zUTAlebOFWQzA=a=y=L?|Bl4Xh*4OGOF?c%c+^6{&vfjXkJtB_*Em_Uu*)j~74< zrAEH^6S^N5IeDC>R@0QzpeGKLEM2Q;RooZ9duIEV00?si2&<il@jl z^EF!v){lc|IT-J4Oo{8;#lbqZo}Jylcs#9AEnAQCi%;0$u?t8)p~{yEDdmB8waL4t zUqU~jTK=ZliGI&|wkWfw68;Ch+_O~6WlXYw^L){sQ1ad!29c*-VK+GBx#cxCxy=}h z1s&gZd?VhGD=;uJgWf9OH{2XXf34!m_%w|)SqFZF_}+8v_S{IQnaVYoKmC9aoKj%YIg0IJN!w$-<{OGMxG zu-jC8Tw}+4i=yDZ+JDGPGE&H4>09>;9oW=yJC=g!=#|qvI>4|o+rZ445i8R9d(ek1h1YtMcb!LfHddKiS415x3dr~YHbrP-AWFo>E>j9(iqzL@8f%kc z#9{r?Z!DwGV3W@7YAvzgv!8n3>}dJg z$g|IeB@x0`D0^|q<4r`qZu>pm=OpUVRAkxh=+MWPw6WuTLG6A|E#c|!`q$DmV1Pu{(nb<9810*U;@B zuD4PV(0SWEcGWV+(KZGU^NgQ~XQY`YkMYLwWV2}i=BFf+6G4BW%+@Rn@Wq2=kR{|gJE_d8F*Zx@EddOd|-%TOPl638yG~lVrRR! zWG{31ck-&Z@Zq$l$BD9l1TKI@%AR}LHEVaA{+#@*mVwkoKZ36>qqY!+hy0%3r9z<~ zM9U0(0~~RQiCQ_}d=1U+(l@P43X{rsn}@tWp!=!Asp95R0=42(d62OzTv6Kf+y`t; zBBeJsi;AlP;UiItQRzWgxg`!#z*-<~cVUS`q(e@bE*=s;hl;|D)v>E%EWjW^yS&ooU9zBWz$u;z>&rrp5Lnmbn(v+Xfy}NE=EE~Wo{~ZK z4?Q3(q9z@WC#wO%We2GpP|m5Xy{e&So$K=o>?(PUv@%Tyzhh11I2w9~vKr(w^N_v8 zGY0l#*B|5Wa7( zhhRSKn7cX|@ji0viIw-` zeXwWLu}V!Oon0n%@Nss?v-B0X=Im=G!SH7;iIN6o!I-;pwyEL$oH^VUE0puV#O$tx z+xv6KJ(lr2OPJfj$ttv!+M!}Nb0ou3NE-ez=5D69E$&Wt-C&FQOLmzUVmDC2<1XCK zl+d)ux#Pq-i<&J^ur&1o&TPqscrY53_7SC@joSi4xhZ6BQ<;D}9X-W_FB^XwsKNXW z80*<%ko)wdcp;)NFT^{+71#9Q9#mg?4P{=-}7!$DPtKlt~f(?^y zTTs;EAXR_TXotn+5|TC|gWGnPDU|3bXkS z%K_Q#Y|mgvT{A7|kIQH*6eyhp-p??wl9WROI?FfK)fiW9S!qV=K9|7qUmb{M9U6EU zy~8s4A$0F>P^pD1vZ}3gAu^Ka+A*N1g|yYULhER%&wqSh@X^mxCVzoalq&sjrx704 zj6uj31=IYqm~N=XkpBHiXrx}~hLx5#GkPoKw))dJil808;Tqbf)36JGq2zab zB>@rv@@o8PFvbC?MoB?-) zm_A3u(YL}2iu)p`)TS{3iOuh&q+HpYv_#G<@88qXnJv9H_#LK@nSos*X&`c{B*60e z*kgr7A9Z!!zk&`t$Adh2w_ z^4a&iX(%x0+hXqMdmrP#0>2GlX7}D&US4fCU*4(59jT5LSXFihIDrm>|l)`pe|02DTh5{Ozj;8r9bpY zKZCYUpJ>rmoFG>U^@oZsxjb3B2?t(RANjTZJ?yF2Kq+PTaNUy^?&C@-!VIM4k(a=P z{oyzu9RsH9C)&?cCf7?3Z`WWXFN+YTNu>-04LRbjOs!v1S}*qINE!$6H2+#cx|ZIU zsr?R|@Yo`uZ1Ft!ENG}<06R3~ti?wq5L6UKLx}0Af1Ql{L?zl?gT*SmI8aB{6bbJK@8Yjll+*rcVoy`Rzx7`HIg_%%mVeMxIJe53 zsPhHSdle_v-uWem^hS8!H&f>*mj%|5oFJ3vy5$8SxfyR7wmu1n`vMu0#@Qgx3fOV{ z$ZG~i6QL2<&(Jaid1TXMjjqc>&b0eZ;db`=ynXKycm6ChZ3`zua_#sU-nQb5|9#oL}Hiv3_2*|3AyxX;=n~D z>%~V>BH98}XFtWa*<(Ixb1HD!3GxmVD&qGdZDTg^_X%xR`PhvX_~&~ryt2vStLsZ8 z7E;{v7tg5wJ9%^XxgpJET$H!^tT#j&tnlW=>5a3Mg%bN7>SYQ2ED`NjyHh(^A?_bp zEH?QwO+PJS5zxcwmbwzx+-Cy~7|txCP`p0CFzcR-L;L3EX+g@6T?k?mXerrg{HujAlp`- zD>5ladSV>#UQBd{V?iQL>oNvc&2~gmszXU5yC8xT{!>vuU52QvYmUYLgxkJAEy~z=VX8@ zD!~h?rk!fL-b@`@2_m;*ZQQ2X#eu~s|9{Dt3X(v;dKXa=6Dqx|7uQ>e;rEx+c?iH$ zGO{9L!iCBDbmway&dvQw3i^fgRfY`zpsEA2>_3w>4;*Tt@!Gi0V<;?8*J|GvS?3oK z9wA&Y3HYnk|#Q8UFv`6q+0SFroQ2g zO#Gk1(RYy{!NJm0@3Q605y<%3GMfc8ChCO5_^lBQV6Fq<%V+wFLRG)aesI+-Bms zD1RL7e8p+>@g;+~={ObJdW;JfoVTnOgLPsl@ufdr^Uai?1^&bR@?`*;z0Xgro2*4M z`F6_Qi4l9fEYL)MY^+6`VbMM{sn`qOexew9+~v=?oPJ9XYVy%(i8PgK30Lm8NZ|ei zegSrf0>&^HtYgOPsrSl`+qEhGjG*O~?cV!wB&!k*zclG{E^y(07ngrvH_HXVZM>if zD_wJ~Y8c3_WYb(|4ActWjQZ5v9tz5c?L{LcHe@0Y(msV}Ey0E${i=>xq#1B%0EcJGU-Q%rK`*vNu%zB2-wjg~)n0@sK_nAGE-gf^i z+aVMUD*`0>8dZ8<0msbQcK$lL*-*xT$LLrDD7~HrLA+(_t%AA9l4(Yy)LYHyw&c6= z3>a?pmXg;OdT=axL1r#cevU<%_$5{%o#kxX+z;n5yh4hw$sxx#!a{aSu3%gHR8x|R z0-j_iA8^PVi7{TzB9zYIa*E)bNk~={KUz5XPHVlK*87>OES=lJn$w3ZAr12_G^YYL zC2X`Lv1eAt`&xMQt(#6^CUrvo;Q2U=g)m)x>YJi_is-C0u4P;BhJWLe_hl^vi5=bo zAw&Dnj@rQ{6H@I|y+*cE^W{E0GHt~R6tP7R6key4{}WJVzW*3E4-s@)J+7w_Pq>UE zjs5NyqtvNWoj!J`RqA@$oRnp&Xth;8#I&nSj9sn3#CTefC+!HTf@ZDyE%ni#McfG@ zLn$;pkwG$2o?*%tByf93k~-q-<%H?z+vSwuU+Eq`g(0*qI&L3-A*Tm&fBLI2?nNWR zj{7Dyv-q)3Py(4Ir0*@<%+R}}cH#m=-d`8doX4SwiEPhj|C|qw`!<8%Yg?hD^TXES zJNN$bRE403Woc`ywt3@YPoVX2>kPD6kbx|BKQRT`2-eS4^Vto&2-o#dRfW zZ_xE#@9A7Yk`Y>;l0{3~j!jCQ|5|Hg<-B2|Xsgt59TVkm_l-q3jH@{w6|k5Bj5X_S-4o?yKzdJcf4Orm^$IvY_Xlk z$eUfC>;2bc6>UEnkmKW!+_2j#ty3f!cG^mJmfDC-qSH> z>mL@4ClL9fN+t>V{_>k4R3gl9`}v&1w&zFP1{9g=H)eHq`MV>f&xIChQ2|(}Us21_ z`-dz-@38ArI>~&dy`s{$r0d`GDGl^#qc|DdpAbn7YsW^lOiw;6A>;x}^01lf zt{dbDJ$1|a4|*DPSETcg}sSA*6`iDvS9YwspP?MkuG!}A|#ct6>(3{BXOi1>H_qD=1^ zMSj>S+R*4c!p?t`K06>fuI@G}0_M_3FZ3C2jHd@GO)fb02;H#H

    OKK~r(zjxQRT zIUK_WZ1T&3p0y()|c?XQD&^fwuzej`jXA`m!rHrbrF(<&%v+1(R{$$;JllJW`s>xc#kZVqOhKkUcxBKlyi09$v zo*jpjXaiZKnKyA8%sI!rgWIf4^3tUF{?LTGy*9<=JxKz@C-|V#<~^gr2 zVWMSjEz%I@dIUKf_oxS(#UB+$Tyu65WNDM6Z z6EDUNDO|g)8_m8L^sL)kWj`b|)2MN@wvfR&5x?H#pRehBz+i{B5605#H8x$s%~>Ok zobzu!qSU8>YmLG5TkdaDIJyH=nUfMldURN`RbDMC4I8XM=&xQHmT z@RHNbBAv$BkDQgBe{hW%`QEB`oky=QsNWcfxUyI)B%RSyQ2Rt?86BAX?%CKBI!!M7 zL_lZ7O!kSH7|LRtiWEPEtP5b|6seB1WO>A+)}LP+c=Jde7_$>Ju@h>RJLlwv2G%U^Ry$Y7IECIo$T}s1pY~vv$=w$->f$bORcGW z@n(#B_KyAy{2TJ@TD~{-v>Jr)s>_N!$xNJbl--sUkN$>xVy_CZa}nltXUVc|Se=>( z;kOvg!u=aWaX5Uvgk$Rd5voSr83j-$f2kXIN|*a`0(2{?sbOr5-vi8-lZ@nuBh%Q$ zG$rk?-e%Y0sJ?kX!k@n4h`u0iJ_PbQaxYL=+ms~fP_GgPLQP7C3Q1qq2Q@EJbC2-g z%W+P7zXvkEDO_)Mmf_EvuLPUO$BMUP=*+jm4V!a+w$}%rkN{HuYLQ;_rH%q@nflW- zUD*O$hL-=W1H$!f<&N$^tGpk9GkXvCMvx-MvclYiG8^g3H|`8{xGc%;TbVror?Hg@1z%G7{5UPCDfOzrv#HHRbqiSvTSL?nJVYQcCPUf5EIa)sq;Bho*?zU zrZ5>QP4XGE>}H;b(wxL`06SwD=Nk2Wk&R(0eh=i;IJQUwn=z0Z5-hFDE;|s|PdLt# zzO-bW-*vSme3g~V_qts!M^7PxzL$awC@v8{^K=wB;qePR1NL0Ze8J$iLcnZEcrJ8x z@oNj*V{!9vSxEcAed@jO2>IjeAd1b0u^(F1rVj&e4x|^Grl`k1f5dDF4t>UGGKd>b znkZ6@E;Rev;z1-E8^a_g<#`Uc|H@{mEOt%S5tXgkrej97jKA;8>3N|{QDHt}s-$OY z!-$&(BiA4cBeF)uTb&MNQ4UJd4x28==l`t5)Rz1l?8ESQo9M)fKiGDax|mUky)HQI zxmhANG|oj~KMfc@aCpuPE;=r-AYQyl^EPw4uJGvj32v%IhP z;eg5=QMJ9hj#%RIxB0Q?+n`b{gQXN@Q#jpIJO$zW%RNl1Cw-Hn{zKJ@xc6q;wxsd! zN>C8RcP=E{C-`?ayDBYmt8v+5-qLue?JkVadz+Fo2$VE`5@F3n{fT5LzLnuK3U#KF z>;A)S`tBk3>rfY7oWDXg7^&*7?x2B})MsjAVsDyI`ft<8nIFFLb$4Q-H+1~^!d1`S z*dB23Hu%7<$b(UY=OlQ3t;PG%y?RA^nWNcvdjUs@3rOI*lq(n!#Hu)#ErU5UuIaJ7mE7wTarOj*kLYya;K* zQl@yFW~j{fr}rDr%eYL{%w4XM!g`~=QHGCxi$bAO*FSYf*`n4@r4rh) ziIYA(HXxn*{f7#s8MPb%Cigjy@kh_mcSP}J&nAOe@>mKrMkJL|wY}+bcsR|>e97Q7 zU{5V1eoA|IJ!mqBMCB}ay`g4U3hrtM$ZhBFg!EP_H3BtP!s4$W)ZQQbPB1Wd+tW7q zl(aum_buOz5j7)fpJ2y(tLq2lWX>o6D+W17#Qk~0!UJxwOa1EYH`$u7 zx#Ss@vd8krUA&N1@?qh%@WTqC<)7r<3t)*Y8jHJ49{N0z?9L6R>kjt6H!tlmY4a9q6jPr5JK zd{HN^tk5T1!w`W6BPO#`h3Ex+h#pMqHd@++KGksa?EGbqw6oW?xxbko78|BiFFKZ_ z%J>Ul?TH=&qMlpi`>Q4YfNfp+o#tP#pQ)H4<~5c@o<~o41F?O%P;`y1GH3)cMI>0sw;L_In6(&ZuQ1B+0MPPmg#0cbeTtV+efo^ejfbO%WKmzzG;^dE6~mz8F8rstdoM(}m$mKANU7fXSz_A; zn>+OEGH;6f%16uYe9HJ1XmL-B>n6v3lHPs~yV2iq*|}AyTlz`{esGz9*4wU*eJl4~ zw5UIksum!Z!7Ua}PssjpiwS&R*S|t;hs}ek-fs8MBT5Z+Zcb=F>vWw|dU>EM5POSB zP{E+MKa@9m=QoO6L)k1gPm3g|KK!NGI&!pb?|{nUf&d}0U|hmU{9Hxt+#+WoyFbMe z$K~$uiVIVHW?1;3#-V6Qi4wCPNj`F5j zor7-Uv9Cr7lSTJdC;Lz+?q~r#8z6k2M!xjDllD?H7~7Lo;3i#PtSH5Vau3Lk@9rLuctVQUIefTEA3@AU z70?sSUwNLj(Yp$?`Dv^sqizkd^9-d*Yp#&)ns63kRlLx1VW?7*ST3M@gP3#~0qpwH zmL^U+w(<`3B;-*W^N(T#*$ChY_0CH~(m%d$B~fIrO>^{~+gttZk`jzs7p;jkJIs~Y zhf2OJ&N-8bhrr+1bQ z*Ngz1uxQO6$$2r`X_4h-vK0DSx8(R@7sEL)pbcpKVC2a`%(9gBM2$`u5$|#94O#DW zNdi^D%^}Vsn?z{8g5#o#?AF2J`D75uO1kRXi>Ai=_0n^hn~+mo8o;k{he_igNup>y ztYSNYMGhtX(*B_)K)V6@6ais(tDM>^`rZbB2y~6Sag|#?$D_`|Sirt{=|`kO4!yBr zwS@-Valjza(w3Zps?sRIL3gfm&^Zb;i>D|A*BgB^sQ5B<_?OEgOizG#a>s4fJjRzd zM3xw!*U8bH9G=hgtcAY2P5$w497Mm#7Cs8@16EjecHwd9ZpO8;#_b$H=J6pCIMZ;< zB`+H;^*G-J{k-OX>M?}nLso(NoJ^uH+=eFhNz4|wy~ToT+N`rHXFP@QIRH@3TeZ2^ zc)V3k?Y;v9ZGF>6d^b{`odX8Vx4b;g8~)ArS_kUJPY0DM3m8U(A3aVN0med(f63Fk9iI(|JFk-};X?Gn)d-*OAvmma`tqo=%MK@m|K- zmgPq{EM3&rC118>Cf1bvU#!OTCA_H(J3u&U)yw$j{=FLjy750qxM@2M;8z2L!Yuk3 z=qatz$Tb^dpHocpP-)aG1f*3>KJ{8gu9*`y9_JfP zlX%3rl=XGS`|)zeP@iuVWr6`vG!nTFu0^5c!P0`1pk`4|!HPov;}K>wkwT+5ywgguK+!6{8?9rmSj7aK4}{)kVwgK3XdeZal(@|g=-xvRL>)njVSpTvTPS7 z(r`crSCt$_WZm(Y3Pj>O0vzpTG42ZGQ?1;9$mOewV&v%JLCSEC<6?OiqXmDFajWIL z!u`@FiZ!0k#LHX4>P`EdoYo8L0vHN#?s%gS_jKJ77DT8KsX^+9fA(DrGkOMeTqgxZ$ zR`gcuXn??pL^bt{r|%*Piw=piN2Yh&)dyFP4^Hh;bfhJP2K+S*@0JkekScYhHkcvl z26L)rCgRVaXiz@l_Ytq*pW?1Ke@}Eja!AfX?x5?^eZq=f?jPkxkY5$sQ$IZ&o^+P| z$1iT`Ozk1xEGu^>GC`Xo58-7U0yVZMP)o|8gwT4L^SdQZ_Q+9)BQ^nB^A^vMigYlN=k%|eP&TG7 z_z|QhI%Mh>SS$#e_SO0`{dGXkHZqs<`5i<)pjr^#QO3a zfPQ+I%6_eI+wT+(6!=fCs>FYr_&9-4^l7oteCN$PR&8;mm!i|0I zr(H78SFzwtLO>mpK}R{4tuy;BD}lxyP!3M}w)^Hyg6rdu*UtB7JZDjd;D>ss%-ZL9 z!N+UCk(=if^0WeHxyc)RjL0$>%v=UtgmE|Mt+)QJxk4^fO_V@HAl{mvHG~V3nNChv z0<`He)(^j3jhkGaIxTnfUvd&=_=#J*09tLfuRLP~B6QUGB)IcS6Nh!HfxM9Pvmfa;m(&v|_2xtJME8YbC}%?y$aj_o#u znZp%;a40kl-U3R#f=Gq6pBDgveNa6DVx#z_MC;9PeTwsrB9%5L&MkRxX53c7^0SI! zF+2tqC!l$rE47NM**tYf)0Rp(B9)r244o`SNQMxqKY;qklR{tFX0TgltcXLG(>=g`ccbAoSE$7**mzjiECZYSWS4; zi{!LE+&^3@kqjrq0qKc+U@V`0d#ZPba>Zq3s8SkyTtb~ksxf3B&-BOW2v5sTYx6I* z%+kdza$*a#rUmC}z92Rmn8bg-2Y@yJv~ka=+d2LXWX&%EY-Q-PC-9yKO+8`aWc>0? zxn;xYu)}2cUoYOSs>>*VxLx;N*X-*(k7r6dnxtYBacD^ozb0E}JE1XmwU5h> z3QI^WQpDd*1ahaLqS+Y~-9~6FkcoClmv6Kx5*t3y85VNdy(TVj4B1mA0vTL@cLq3M zKima(3TZ08HZxNiId1dOd+m+_h=(yj6i? zdPh6BNapgMo|rkq!)~`b+M`2F(<>1+(s?Dllx69lm**yZ&V>kN)Ou^zsg{|-V}eS_ z_L*?Ed`F0eGEO6yuF0S_uq3;w&bnNRC9^!nksgMX=xgU)Xf54FEqdgvc9V2ijn6_1 zyFbba@AV#xu%H5;CVDCdW{hK;`Uo2Cn-byu944-LFF5C}XivF2%u~0Er91C7HVMh* ze>a06hY(Lmn(#yRT{T{m9@?@hmhgLWQ}1Kl7m1y>YB6d&8M~&hm2?{zU~1ab#9uyV zIbr_BI)jEzHA`WbzOt17y?O&OA&&9Ye%v2f_!WQTMNZ8@&gAO>%Inv_Z_}P`n{ee~ z(|>uRecLp79(NN8sx`{)bia%B(DYKE-MF_aOH-U_JVIAK^ZVD)Mv{;i<;2Nh@brmA$2guP7YMjD_ z_Egge(5imW#`)TlU$wtZO;~33)1&;{<1m-sZgv;hEg^mYIJ@pD@3+Hi}-A`N@ zGk;ovgiI4a3|-o=e9!ONCcu_j=?ZBd`Wd2DBCWRw7rnmKc+Mr_ctG_V3#zOQhf8rC zfc%?Tdg4?!{HUVO+OU(Oh|+ts`<#7Qu%OEKaK&*#OHk~-xzOPum9Y1fArc2dvhSeC zOQiA0{yY&9iqmk;VS=Jb${>yBE4MW%9#kBX7bEXp_J4A`4FnziD; zc#X_FCMskeTDmhH5YvQ)sS5>nz0J0l)zH_x(4K#lnlL2h(Id7Yj;2q>6gfBi=0;$T zn#wKtD^yjHS~#|4qiSkd9kA8EulxDJ}czV{UpVqT?>jv?1pjgE@#F!hJ z7=tbxx&ie6$JSeLMb&U&!zv2W64D_p-QCjC(jXw+Al)S?NSEZG#Lyi|OLup74c#*^ z@EyFL=YF5}UEdEF77NZ?=c>K8yn#|=N5E8cU`%JOp`BLPevW_XYGqaF2Z_*!)I}yNsdJSHX38Lnj$OIOM3{0ZLvE>;Y-C{eXQS zwInE?3o$+ssH@X#$JzlM+k3$wTUv`jS?7^oe`!9v#{bHESXS;gmA=*^UpcvxRQ3F! z#b_%pIrMqhxKUZCqIkM0a61a{z*=Id` zpLDNuyH%RQPx@W|i#&{Y?*`(mdl#*5SO(NE7b6c}UY(wvc;C}sy4#3aD_4e0^gD<3 zD<7h{9{eh-CaTcQ&{N>EF-giuuu{Kl2**-62KBEsg~{Qf%Q7QiZYcmw=;Qmm3+ z?;YZPU9I)K-t&cP8q0?axUC?Axxf1>A@34keG75tt3sF3vC25>W`cK$EDfCjPP(?U z>NGUnVK9x<909OfHf({Xysv2HcPqgAPazcd%pfR@~Qw!WD4Fj)P>co56n?$ zTT!uD(~PDK*JnR!;H`%j)05kf3ZP^U8RSqQ1d&gQFlQmA7>LrX6|xoz(%`M$S3-zu zR7tIKViv*+1-XAM$nK>H6}o;opv^M&Sn;|Rtk$#uj*ZbGY;JrP@G|}L(R_&a;&z41 zEROP%k)M}&LC#deO0JCsDKydDVA|l6RKQLBI@O9mvQT+n4u^~eVSvSONLsp0Z*YCl z=9aTf!s3_PAeKP&@1zY)U&7h~h==#s|tv}CJ zm7rE(!R4PP*jL13V|uV3h#_x1_#BR_ z+19UARrS$+x)r=GfhO&eJd=OsO%2+WuXMEw(e^4#Yw4q4lvODe6VH8`Q)K(%`t$n` zU2mzX2#+~85lTjRWQ6j=>u>1Nk&*14$<3(zFJeA9Ex(VLnl2EHFcJ5n$CKHsbyw~< z8enyA^gBT~=t!>%hf#J(wL}M|&Ig9jg)YA$oV-c`C6dJ1w&NJ3{F&MzTY|1XXaftSRovI51iJ)a$9EcB9;E_6E;gJ&I z$_&piBkuaW@6j*udtDlQy2DbhQU|%Q>jDu}dr#g~kpO?D z^DOX;9do3nA+Wi&VutPGI6T)Ny~K=k(#k8Le-6}=o0`p`YD za&x5?v!dHUQ9CH}Ga2>+yBxGj)};>RnU7qF`?2$xRb*qAZS4{**&Mk+n+m#3u1gy;I<%H8Im`X4yOk%u@$xFe@&}Tw0F-mT-PIGlo9ZDaO>3QP2l3T6S zXOSbp>!mp9R#4jYPS*Fu8v7;DHHr(!qMw*em46)laG7q689QR?Wsog_iiKYF5UEqR zcWMBmCdZfq!-GLP_#DwLsu8V6#U?so!#P`f_RHTG1jb9c?>AOOfde*`ETcdU7mm9}rPm^=Wil8I5M*uO?@Luiv+pU*`MV zsVKQUA7o@5kUed@V>Z^gFUo8w4H@94z2&tV&G!SvpPmUIEgk2UfvG_!la=&t%Ria5 zw(?t8aAbnP5#@Y_FCe9R+M;`J`!UWzri}MIFi!%#p#Jt>`TTg+Z-07;`>{z*z5nSZ z=3$K-laacwZygO_>Yfdc&`KA9AysP}=+>B&t%J+Q*&@pMMlxlF$v4*2BRx1q_C_E1 zoOgxEABW_GsqFmR**_)Vn2F74aEm@a%(wBag$r0<$?8J>aMQDt%&Z67j>=q{ z8}YkZ@BR!dM`1CuColJhLk`jSx&Qs}UnZH&jW!xUhaOvp=t2G_7=)7@puvSwu)XfrnhOhFqM}ln}POo>_{3{UBSXH$B2NO zuUCreWbR8+hSvn_7Y;N+D(ye8m6Ag~`d`fUG(a1V;;XMnYkm*j#o{;@9O4Smm_0KX zEgWuOB`D@Y_8(&@hL)a7^H&9+r{bM2|3oqK%JgT@0SKfY6>*WryqD5VpTPhIgM z(aFNwLjk7NxN;M1vBQ9QMYPNx98A%@(JJ~|PSgwSYAhgs3q)hi>_cbKR|5W!QhSQC zFY#PYy}pMyKfPBO;V5J2+ z8h4W{MbLQMF zU6`zZT>Hp-kHp}a_um8E(c^=|U`FFS*5Vva?`#OOuTmRIYm;M>pH#!F&j`gG_S3{X z8gP~U3Lprzp5iwHmB1Eu!lQRg@wgqXo;I=0 zE|#u_i@MM~F^MiW>GokpZJ)7VN!vEfZtg~DKAvBe$Wbx_gR;8VbJD>O`y|EyMPT}p z7rS>PJ~0N1J%G4|@c6)Yw8RrKE&N@1I=4&J5NmuzHFeWYF>7YGVjnT7?{tVmY=*w5 zcA zY6eG{&0$qGAyBUMW?@`{kIn8tAE{R z2(Nfmt{wyHBj(DS=cr8=-^%ZmA&CPdb*xYOgfeT{1GSAPhIi~rZ>Z7b>n*Tl5D(!% zk@wqj%-DBc59id#4XO)P*mb~i8p{kJ0OWgw#i^BzWRMVm}YD3kt_}2oSTXNmm*FLoMsb4$SOp_ zSHlqtIxsB{3G2_@4&R&z_#BSjnLtyV#^b%@`Uq)-L+Q<@L~s+#^JW$-_z%T9QEQYusMw8TiI;@G=r4-<)K_MM0G*xBX092UVsqRr z*)(~rz?1Nyfp(lbaZ~#=xUZ@sK4oH^4QQU*3#xIc=R-tj@6^E?9{JuM>RN8Pj>;Zj zC@Io*PsD3MKi}%bG?SMM{KX|4vX%W27kU}+wm*oV#6XblA{i{D%x(3iWsvH`^bWP1~%~T`UVxSI5fWA7&oqF+tU7lhE;!GwMl?p)HQT5I~8mVUpQc4e>vp+^d;W!BB+ z0`gYRqamKho7ex3>UG0hB4%%oX0v_2DDY5*ha_w!j4(A%^Tu=GnfDdvW928#)G&nl zEDt<^YWDE=Su}s>rVF*rufrQ~idXiH%yTC?QpO*k_Fn|%O7wxWc;CR z_=Ru3p;^B@#-xC4`+c3_9|4p!e>eN+h%FDJfWIaN-^*nSApG~`t7 zU$OCAI2k;otGnMZ^Xam>U`&t5^GUQjQ}62S=kL?k5(iGsihnOC>wfnB3|U#U*@pb= zl|ynZ=6GpusNhSreJ?Jg6{E(0-+0y?k-?$0uNkHdT$2{uhWV3b*Od3tN>;Y8Kpi$i z*#8)Se^HJgG_Z0x`)~quU(+{~ub93#X8}5#Zb!;6U+euEvDQI;Um06$7LU-T4wb{Ls!~7}99*Cd|2l*55C!=Va*)- zCy-or@F)m4N?uKmNugea{sU}5wS7&&po7|q4}`#PWvxcE->oQWqqjx0dWUu8OT z7}INRBOhf{j@4-~=n+X(m3ooTTw_JuSGkUyU=~Kh3hpurxG7&4#zlvIhvo3$<)*wN ziEG}&s#OkJPPe~q8dfiFjSbB`xM#tNdLkSMlTzRGJ$_+-O{q8|EJAL2KU;OS2fCPVkNWlj81Nz1id z`FcGe?4OpDb>IwEe+0gtYzVc-3uOHxS<8)Q(C1}ktQU;?zz{#uXxzvokV|Jp;7$EW ztidC`C@p!chn;Aj)uKhGw##HEH`V3FVV2xiGS6~G8z2_gCxE zNEd~upQM^Jm*>x;Qzt*v!VCg9nlv^5s*e;+?Us37aIJDvjnLX5xg-NG=IOfVX2VKi zOf6~R3UlIIpQh%gdC3Y(-%!d&r`dkic!dSoutbX zMy1L1zL&ssXOkb?Q=Xo&ULG&iy0C4Ux)K zbzomqoE-Fx4H`CqBDl&V+|)q0K`(Sy?d==x$N49^1vqo*Lece80FI`Z%&g?=rsiMj zsWwg3K&I{CbHg1CILq;zR;TRc>HjEGtMQ({XqGhpbglAYoa8mL#9v*ii}PPJ(~oh@ zNbs3vH1jboI8@P#G~>T~9o#c+Q@161T_6JQR1HTLf$B$4rT2~KSCvAv!LE*eD^E?z zT1trO_eUj`F0+0^X5{9t;MZVdeglG0w+e&%R_*pw?0ZnKBKEs7{-|2+*>AdopZTP- zzd|i(5Y%IK8>Zh=aTLs>jS5!`Xa1{7dmRdoTMNqH2vO1G{ron~ws$7D9r0C8X=kl5 zGZuY**N@Ism~G{+!y%qxLet7D)m{?PbXwvP6-zk`*xydC_F;jpIe|> zZa;)fSJGQ2&11LlFym(@^|2Qj8g)nVvqNjUF{=`7sD9(uhR@s(-`3^J-pbpmfj2Zl zY0Ih#`+RPE3BOJMC2`U{VsIVvoDQCv{E^;VcgW4q%AKP7q^gXLj&z#a#h6TEeJlJh zf4E6JT~1kP@ZW&s5#ERpE!&*4Xr*cqf1?K@dRVbd#Y>YYDk|!9g(~GdU4BJefOe{m zBxLXR`8hhY&YAKBlpj%MK?J6Hozo-Ky#!@_Sg#v6Cf*A9Go8}fgfWDZ;_i3rRs&^G zuoC&PIDHWkbo_Fuq?eAd2fbTtEZTKU>;4y!Ywx)#T-DPQmSCBn=I?8udRM*uDaQK*Y>Pnleg+e1h{nP z5%UfJr=%l(D!=9@vkeHYg4`oNnu0LNLeu9xx+$$sT?kB^@tNk*r29xS(2KJ0FnwwG z3^JNhtxDxfTlqba&UL_@$CSvXX3VblvlIg7OYcnalz;-;K< z&Ut~sXm~h16P?m?K*kb`TbPh4+^uQi4F7~gcUip#*S5tzlD&d`%d?E_cflWVdZQCb zx=tJe)AwKerFozmYCpu{21P(Zd~HuqdnP?EMZxvpFK6nrPfa#GCZfZi8JTdeQo zJ<8Ts|HxKL5)H(3=3ZedQ-sq=syA=i;}t!gO}c=42fyJ%xCW_C%sXYhu}eA~{>7PFhgP6buLess)$>Tp zjo$X_@!F zARKtF{l<4dG6QD%O`hb^y|5g4quubmAf6>NCFJ*u zC-BS?x$2z6e8b2V@pm&jr6~&U&(v1Ro0X{_@}Dn#hv#7r$9-05J2z6Zvx}-VNEkzk zOodufAh=}qUl%aDXiCan_~fi4s;*$G`r;)8JO%!;-VdDaLk})v-nL%k9mR5vxXAcH2kgB%Lj9y}^AiAZ+XAesJaMp@X>iauyKl@m zF0hr<>RT7qFtD<33Sze?&laZ=-_DIT!F;bboboS&qa`F*K%cL(U)pDBI(fIL>5!-- zc*v#Tz_6w>;+4EmWrkV=TnK61w{L-(D~urj{{+24_@)R+lL+7F3Bxa|q}j1$`jy{D z%krSV)bok|N7~jsoP!LboYZcQkMn_jh*>w3UruoM6z}MOv{!(@0lKu3l;jbS=W&(T zv{6#McvM_oMrHorKC>UA`@SGT6t;YgM+8*M`ef^dA`j#{5j@+6=|uE zQkJ(8S`pkMI~p`Ulr?F7ulv1VBs?$?_yQX54smcGbOuZ}8#{9ulEH3x|JCkMjZ|QhOJIjW$*jF z?rde+u_;%D?32z z_jD($I0EmI4OS(Wz1Yx3+G|kTNt3YCi8p}KQ{X3V#R};$f?!d1V@ToXpAPu(7-bm;)fGV#>yrBJU6^{RreAy!(1u7 z3BkiC8qPxI#HvjYGGVq`J_DqVdq~-=3S>tYPadwUM$|+;$x(Q8Yrds(K1BIIN0U)? z@sMz~QiKzq5hI3}IAh$AUla}vMs-kk?GlNZ$1n~m?ycT;J33}Cjk-y^!Q4{@cqYNL z`hHFrp#{rPYtih7IhCn|5ja}s!ExGIRSD>{dPFSc9a$sJ|=$d{`P^otJt6QSr8adce1B4 zdNYNIzC3;T0*5lKbjgo(g=y-%p+Ck+{eOzmW)#oF_m81{f5q}Y^zu2s~X43m*|G$kFrY9s3=_4 zMETJwsB1w^K5YSnwI^O9caPQ%2^i|JKVMim!VrSHfOpt2+9my>f2f(+ChW{v3_PUB z-B2O7z7^L@MQ=dTB#!u9MQPk+RNMTT&?aN_79vO5QzouIoc1q}b`v}TX$z2dtY*hn z`JMqMsfmAZZ(%hDdLKE1BKLPT%}<>{w%+d558ye89sUq;TbwnK6TMXo^0N{?hw`0H>gkibMA~n%e#+=_Fw(ov(Y3@Ja^!DYCPFKoVlF4mqsq& z9l|7qO*=4?+?0VA7Wn1CS8CBg{w6%wUG@ffqk}~5ifq$lh-AZawAkf&+zS~EjpXL3 zrVT%PCD|Z|tXhWlBen*)UWF1bmioiMjFP(+2<3Val_w={x!Q3c0_k6J+PUKG*691U zA5sHx(JXH#D>(%QTVhZMPiwSg1Aj0B2}w$_=-2n2OW>Awg?6YiA?cSo`{c)tF|TnN zE(s+?z3LC%_Savw!mjzA)L->8QiSA0`rRw^oiSoB)2x+K``pId_m&E;J%>&;OYk!| z=xW!s>00JyjNGA<%P_lmJoTMe&fw!hkPt))8G7MH6Ma+YJXE=yzT1d5 zKK3HI#0HS*D0+q8^1yG2VW1m2rew0e8G!3}IC;BCA4~sTvMJ0L8I+Q3B1N789U=x+ zyFjOgf_y$*#w^viaH!Y_!@1cnTWNe0+wrOIa-W}7Q*W6(2NZl*P3=$7qIY1C@-&AJ zBYcw;6^VLl7f7H5-X*>xpeYIPRClSIKX=$Y^p4;9=Z%N{x_R_98SWlJmS z-c{z<1}#J)mSK%fV{ZZfxdh|zY!^) zKVpK^uSY0?$Oi~r8Cx`%Tnrid&y3=dZIi|biGM3XTx z1b*9L^hGc=6k1Nb7(LV}7K}>i6+A zX)*RS*k!3uPuX|Yq(^lrr-4Xb6zDWoUWE2iw8o)R2>6((w}?#aU)vlfpC}@Gy~+|U zyl?Gi9Ewst#rQsd4v^8?=6I8T%P^xXht4izX7rUNYYpLQqu;9QOVkq5R8y!Fq#`HlcJb!WOm%JORv zTumx>L9HsBu2CqMFuAH482#8D^g=pT%#H$4M&Rin?_1$kyM>obkbIO<`Ie2cJlV69 z^1-(o{jQB|x>K#Oyq3iu;_sR)djx*}aR->^WnToT`zWwaeOlyh&Ax|DbT1#cU~_og zwDWuFdb~8E9n)I$h4NZF660^sH`h|nlb|KHIa@h8^26J6QPCR3qSTX=g{@C7JtZYuXCEQNno`BcU)Qo8pXCm69Gb=sZ`SdbL}EM@=%?@HuBCPRttaXxx77-xKV;gI zIU>BwB^wh@*rfO0_jq(Qjo6r%Cp>wCn6e~G+&I528};jw*32mW0n&r}U)7X&NpOMZf3qb)KNT<|172kMG>0J{^($ zrQ1I7%?3Bj<#P>&aSe_S8o+xqIFs#uZq?g*db<04pto6-zvSUNilrw6Ad#Bm`{yBx3W?Eh!dKd{krQBHDC4oPy`t1OaESO^glbn zW{)Iv(Td+`PbMN}v{>LnrkXH{HBzBi^5*t^nWS{2-v?ooy%->ABS;HQ=&rXWnO1GC8tIV2z$5IA}W+){;y!&c8Y46biNm=CRoi9JL_5gelg7W z#Meo}sd{+g*OFeCQ~x-f^BnV>8BY)kTI=>D1JZ_Zf84UJ(}x;WXE`3evtQ0G{Y1I! zUu+LPIF_voq05@Oq#3&vLK;DH<&ItIRMUqp-?5Dt`yn1PG#hg{b?au3U1V1vzgKx! zroPIeT(h9IjXPmRHcULpNW1n`50gw(S0kz&pIpNz$^qnljcRJ*4YQEG+(PX|v&-vp zd*47`E$;JieyCW&6egsk4ub>fD=oR+rPnK<`2C8uiUKe6yPYDt)slAXagJB?%Sc%q z8B+<&P?EUdz!$&Bl=ruBH%3=#Jv(^h!p!O zMMY%W6g{FaU}wAP+nx6X+NdjonJ`gvl8#BzQwW)d-H>BS(Sgr08bTNMk0N0+U{jy) z$+O8rtC8zj4Kh0D-PI*zDLpt~B|&BI>`oL4FGLb*6RxLa*HzXSW=-kWf77~>`1zZr zhVW!HUY0$p>&bO%d0(kOS_6xkp*S$PYF>KE=6=1TZoBKTAG5zsP9%%^;`_!M?j8Bp z+i@TZJiG`dx%&iA;S%*z_H30RZ_|zd<6CnoXfU|EEts&^VUoP-%eylPzHyG4eZ}}= z^6aQRU2}>X6UVfz|2Dk=BeM++cVsLhCx@Q5-#BjC@t_ihQk2<)Kz<|UNzQq=Z4|cW zS64ex^o`+v%Q$VO?4Qs>$KF`s6GRAm$nk(T!%Zgw*o-DU&NzVrpz8eOWMVpludSF! z#YyaeYA1M8mRBRKQ34!RDg?a58KG}M%m<4)BKr6GwqDZry2G=hF&r#jnexP6xt@Dc zPU69_nEk~g5$Z!omOkYU(^A1dr$DUu9R_XQ>69H`>k_Jszd8q`dy?FLr8_^v#@EY# zq&xebZ<`U2Rh4;fb1ogpyXe+9+mK(e)kF1~L$L(WV zkT5YQC#XU72|LDbSS0~zx2aQdm_L--!)w6c|=q}{mtLJK+wH* zBmQEd+@0rrVa!o6+hV@D5&r*z8>%USOJScZ;TW<-${ggX?qE_DK`SCe!i&ZmdVk@PsODAFgwBYlU{Ce7k;sK#K9|EEOZPA{l)&$ zsZH|9{HXfEKkI_9V#O}g=YcMS+3WTvr^@$LvN?}-Hn%KM6eqsP(#4{2Up~j=GLsHJ zGZ{js=oG&^CzMY^n7N|Bhe#$bBq*Y4>cP~#qQt-Jzq+{XO|8Nf$Ks#raxhi_8xjPBt)V_y;ZHsA zxeI}!y0FHT+O$TzqHm-IvE^`1vYhLK)QtwXt1v%iL)@?0gGh#K-ZEj?0&*<{Dhog@ z1ka7U#zqoOx_;eaLh;Fj^YYJtj|>2K(`0>&`K8F2D))FYeVgwA7#-Q{xxV^j{0>lv1N(}<-zm43so8h%=D86u^h z4AqQYPr8*{X-1;;Dx4KMJY^pyoQZ`7uLQh4#wq$0K4RTcDH7($TAR9je7RxbHALi) zJZTrPZ$%9M_-X#F0x9zv3i7;vJUS3M?S9aE%U48{l|~hB*a%!Qu$ExgsgRAALzR;OdRP zoA+-MDB3ZI$HRzN+?7#E<(A&Vnr8Q@7}mX0BZ=6cakAr9_yL_ay=YhY?+5sjuqnqx zStN^-K&8Tn1Sa5oU`%oDJ3R(PC#wIE>$pAy*#uVYKQUWtD~CU}%W2^~xZdW3guDP* zMLI5kuDI$STJIqd7p|hvgfZUMk3Zgwf9;Aht5${!+3w4eSHny%B8g|}rbmwIz&64S zNCAt!TS>9&F|_11_Qfr*yjr^C3cag3B)Dbl_w+!51b3Mr`#ij2e`>?>M3v)jS$dH> zi#*d*5P^lxo0Jf67O&IMpqMa_o5Q*(Q0XsD_2LY!Mf(`@?{wi>`uZK4;GLi|ZPkeT znl{_MEDUXz;@I?k(aZB>(!z?)_~Myo4|K#v=`pIJptCfbNYwb8Su3Yyg>T+e1VQ?2 zc>dHS{C*sd*fq!YINR2y>U)=P$wBtPw>&6_1Gz zrDQsAvr( zo0i{oy$^{Y=wvc(rM9Vfy4-yPXH+@qvjcL<`@q7ctZ*Gub0`8A;UOg01Y1yQ-hOEQJ^Y+1TGHb+|oC+OeGA@wCB!{XmdG; zeEM@oIQKFFBdt{eN=KhHALyh~&#Zl(%_Q7$rdYKHGurzHyWO|%A^g6S_g{cWQ!(i<|fYJW|WxG#>ns6HfJr15a= zpW=(nY}AP)fUjQGnoY?cG9%I^l`3<^#C;AAMy1*P9up!H-yFv8fPHk{^u=0IcQJTY zMZ%S_TIuIdsY@^t?vzG@w5KGKOdLqA$LSC91N)2P8N6rRt{90!3PtG|yG3A8=RTS~ zZI(OlGRvE?=b6Vv#C5*n?OI{l>0%K(ITVOadiLQr`BUw}VKx=-yp z={jxJ!qh)o-bJ6h!mwdES=9HM`M&+e?_HMj_#0a#V3aBv*5zkexTw(jMIt`|1eu?1Fj$J_^etsp3*=`c3 zVsLps=RW$qn)Nu_@ixP!s_`@s()E}d6IroUabVy=6b#;Mm((OIi(0ah7xoE>uH z>*@TF1hGbRT#9}7MM>?bCCT%{_;-hN(*85Kc`i%z^#kIkQB*ShBP0sHFxJ9`5|S07iP*>!uM8W7U#+BWVxmB^Hh3whHTM_PGd z&d-C;FWSskL#q$V9d|W6cZ1%C@4ub0BIh+Bx6pTfCL$}}r!^)6yW5W>#zVDB{J<1! zRhw?)8kStY?}ieMlJQvXP8@pRM4%@aZP2RG0Nw3gx3k6R(RtzfreO9IZ0tg_o5c0{ zAtHU6(fRq7^fRi}HCp7CMw2^-%yY|z5xzvLr5t$dL6kR@mNWc6PbX4cA>k2$8{mMQ zUHEugxKno>ra3f5qS^Td3*vnYis=gU%dB4j(p{vbHC+vgUH|C)uWCUR}SmXIvB)^O9uOg_{M`368 z@iu2IwT`BM9N>2s_j!B@(HPcCrZN$EkZ&onZ_X*|;hFy*p2F)kS@kgN0%mX%iymB( zu^gE#I+NG3no$7C-`R8FyE=d7V?wN?vd?8Vis(uyE0_CjGPw_veX_`wwDp`cxF}#?&8o*bfF-lj3TE+wo9-!DE7Vp6xESWeo^Z1 zTPrbm_xhwf>9gnh^_TtQ`tU3;^WL;~qi9lWKb^ewmwPN=d@wftJw+0!GrDN4IR6!? z=EC_FIkmmD*F5|h)nIkld&p)nC^tYP!C$sL6FXXgYn%S(dECF*(O}yvv{8XbiEvw) zu?71^^g!FWsxC#~h)_D_QuZP7%1tQ#&su0ky;$`B)l7;dB;ZHj`c)vd8}g@D9Pusn zL2`3$H-m(itY)SM(JDG&FCFoR^}n+Vcb&uX-2h@Qfc<_#dMLI?ruvOE2zgrY@w2cAVQ`MN%d_p0x}S-hY~+V>%^B1k6QZXpJq5s z)Da`DVQ;gPbz?6Qz_Zx9VwG1AA;S15y;qT(Wq{jhr*LeSt6 zhxo}VZ>uN!O@A#vi@9L7x3mq`X~QDW&yQ|6bG~OotkGYMl*obK*2KkdmzTq#rZw!* z`PoVDIULDZ4c~j% z9|V3cXRNayWIx12N? zwfn#SRU)k)cTg(Opc#WAsxAj?y@H;35+R2B1DN?Giy7!g*u>UEIr!u(crJS~;uVW% zq4;?hvH?*~@se^Jnpf0V$b=}pIPa&QJM?n>JueBe1#mwomvYd9EX-7%r&b+F`Gf%D zHGOnB_8BzkCzDfSd3UDOU22fS^KU!!+jT6#av~~&nO8wXv6u-@&m4MPSJWrtB&*Ur zB8>2uGm43|SX!vUe^3u-n`al^NAJgmSP;4(i0=AkE%wTYO91Gpn%EBC@M-%3Esm;; znQJ_Xc;-$Lq77iD(P_4Cow9lVjv`2;iy&gff_(2}E3@+#hv*hfs0mEWlNs`Lh_f&M zHg*;U>8)nV^Vz10es@=LRpx!uv7}msmojXV)(ADPAHm65bqT5?Kr*5vHr^fNK>YNi z>{cL6QiNE{+M-(9j^9k*cRERn#bG#qbMK~tQA_)|RTry_j1b?pXC$(k+Gf{y<5jpH^z$JSF9WA*7U)^6uarm~;AOAe5-AbG> zD+XC^a0hO`7^H5pFZ=vKM!9!{LL@8np5~I)ld}R!(6V4Ts`VE$vvsSGG524P+uR$q zN=RnLi}3_cs>v%y)aY{@`|aSjteOKB+2Q#GcqYwT_$8SQ2G3t<5b`#nCnu6*7Q^{<07F}gMSCxl%e$h{tffp!K^L)?f z!pW@7J6t~Vt(Gklr=LH3Tz<4NZVa{loAt2Kqpb==>eH>R24PC$93^6gFZeB*DaWWHQMd%iYbiLXLxs!Ig zNJyF3V9U0PG9b}CTmdf?dFx+s8r2_Mo^nn*PPa36Z!YEY5pHn@(!Or&0zi5Mk^6Uv`jfo3pn1 zuES1;n{qBro0+~W;5wweA&iI>bLAlD8vMJH0OLXM$P#Vik3l{HjP%3N7hmaV{$CaMYFxP$t+f*{u{H3{!2)#y_24z zduEPaT{C*PmkWqW6@1lbqtTbNjknlf`5S>0f?VtMDEg@dk5I@hl5hJ8%6%k;PE zhQ_cm-klwbuEe>m<*?lc*E}haWrf=Tj0PL?e@vTeA=fnxm4ne#iYr;x4?p=sQRKV+(!mn%M%f>aS!6Vr9;3)g5@vU`e|{7Lv-g8Azw88+)6o zmMC4^6`McCT(w*FEeecA1}oB+uDO@}2glQv+XH zyI?M_Z{bq^X~nS1B0>faW?@0Bx*Bx6CD(Dw*F$hAXSkbz%i{wd+aHjtPi1tMx43je0bd0rc1}ol@%=AihgKDl}G>coL=)0{ex{r?*#_ z%4aq8+Fb2{m^~=Sq-32cT-A@m!llL-q8B-HJKu{yfX~u(UgQh_N_wUlOPx6o$uCd* z%s(jjqx}V08j*_uGl>+(|0HqR+cCGBM=OQv>=wSbFZ;s}96~NWb0tz3c3#XGM-5(v zpf~BzF#nGn^dOywry<$Cib5Ti!R8jH#d@!I8B13BP9f41BF^b0hvuZpyz=nBC@cHG zt^`LFzRx+hbw}{N0H{bJcOw1DvUjbvJMp)*+*jLUgZ2kqIsR>rO>DYQ>!HAI#>kgh z@If4&d-`Qj(~lRHlj3MFT8!x4K1x{+{Urn>AQJ{%f6%YVdeQi>N2(R#F%3mdC!6*& z-x8(WP6auHRw&k>29tKjP`*j{gMnIU*IfX>a%nxXF+Oe;RF^1uG z3qB=Yz*BTgG~>y-jWP|ab85K+GG(OHV;U|eEh%&9s;{Kl6ep=WP&W7+$F=;a%5Uh= zWfc|V_+X{qf#a+XacMVDUv7z++E*&Q_)58Qc;;c*Vx0uJ2z7j4n3~a5^Q^WCwT%08 z8Nbqc=4;@OCq2AN<+{AXEek>QO{HVur%j#L6v*GIF}cZKyxVk4Hi2O!}TFwnDk1s33m5mCh|S;(OcSnCU)fG+E>0(#X=9`&ZUo(=n6lV)uxjVPtw z-!M9{)$*|8P0kj*n}gGTc%}S|ovdJTZ>ibJ{{qP`IBmBr>ehC7?8U5a5SRGDyZji= zPTm7{x2yZxsODY3fG{U|cM`Tb2w$07MzkIsjNq;kU&to5;!UH5(HIo_;$pgXKEuDK zMX1t?ieAq5e02a3gAu7HKnW|1MtF$yh1fae?>PVKE`^^ioN>ud%+e|f#Un#U#fR@k zONfE6jKr*8$W7?M-XK)zkUmVdjuqabcMG zbVP1tdDGdNX89%ImiHrrVy7x3713KvVD!72Y|OarPMH~djDgRQ zAnx`y_H@D&^{?v(*slOvesr7aJFjKC$rYWZ0bs7{RZ7Qq9OuIVqBLxWZ~u#5_26C2 zLpOY{mLLMg+u|T%h4H}Sefa}~;#Faa3)QUHc7mE%@Za6o7MqjHrwhwr@1=2Li$JPE zO6xwi$C<4$Imfj2mtYhJW_=0{@OBs*G{m%~k5%b+Ts=dNO?VOPih08ZZ zeexRJ)G@~3gdWTPl<)jgF^1XyNU3{fjrS8@*0xMF-eq(LxZq9Bze$|uU0$pfeeK!_ z68@1UeZ0O5d|-QhLK#tG)AvYxFFEBe@Q*Yn?LMw>mBf;cYF0hK?aa|~O zpanh4z>kYk<<2^oH;8^t^nsGGS9Le%k{}mMo=>T1h;i&(zr;~$vF%GCkOP0VLGuvr z)e`+EY-6sCO2sw!)9^=`VejdVKa0g^SEUKf7m3WR2A--JFEd3B&Z84AsB$dMLV*|LK?=jf zmu1~kwQ(R6VyC94`esPTSu1IKc2E+&+sF}nB;+n_!~!j(@5{GrCzD0Dwmqk1GaQ=K zPnHS1oUz54sM$#=?;q}sXtS!9G?n!IO*ku__qDYlsRTJ}P{(_>|7s)OmfJAXm;3vh zQp_FJhmMkKT2Xexzq1r1o)3rILs!(6;4VDB^HKO~rLMJ)36lg~Hp|SDtrmtkrdI@i zAk;G(wGRF+Cb220X;#!b0aGkTXawwoK+Ve4oSSXudsuP0we@Mw`&fZFtTDe|sjWA= z8<^GFMmrAI8^%VWQ73^F^|e$n)cuhp6c(fpzD3DhRRYnv46$9vD~~B9hgQqO0l{pi z<+A!qW}Yp%Uh!dXn;DE!Q7qe~_>&Aqa~EA|s43HcoF&$M9Qih6i9ZyJ24kTWZJJ7|Cy>vvcy26x{Hd@#9#|4R8|R3d)@ zmcPr?R`V&o^)C+6^(<5w6s#0uzt=m;`}V3t*43W=LCy(|-eUrQ#_eOIi_u?@tN3@0 z`wz%PL4NGg3YosAxohsmYc-4P03}X@Aejvp*`Ha%b@162Y=nTBjit~ds%E=Bzj`~$ z)ew1^<%$BGU%@Je6TfzJO=GI>gvIxFHG4j_?~U`40;;M17znUd7!7z`U~W-#P~_Xd zP6bA~vPEkR_|EIy*ps{OIO6;G66nFYZ4SU+nK2ycpwuxjCxB*E1HC*p%WTA3C4D|F z<-RWAGt;jjEwPDSZ>c#YLuqr`s43GKCb5v~WInbOi32D6L1=a&W3aGe;!xqWQcEm# zaL=oHx6)>~@p;*w!9R?Og+Z1Uhpm=PTEn+1Q-ommCwA>^Z$CM`y;Xr)g_7w`8XCh> zTryx2LStfLFHlp$*37Z;Sv`qoae$55-j+2JIMV61XDtRd&6_gdy+M5wtehZ$V1=m~VX+O4$wh;{b;Z(Q*okwlT?uV`GxIIcv(`^kDi@iD-j2+%EL zBUgw5JpjflO->B@urzevVl`sP^xJjIQ%TB&DL^S1X`>m_fF1L^0=pFQOE7cqN!&r= zAWP5}9OxRVUmvdou({3o7d9 zd(Fp2JS%dn(QaP;57;I6nsSViRdjp-MftOxmm1&1`eZY#7pjTfeA9ErijKRY}_R@@q*3aE0r{Cjix!G=^ zsQX+je(7>|x2kMnRCegVVke58439W#DC=YJf=bXa`J8q68{Zjwkdea3xS!`gW zW6mku24RFVG%HaM>RgDe(g_V(EZ+EB9-5|37zLHi>gkhSY|bU;lxeE(G^qf+5iS>; zN~0L7MK<*I&!n#?XHMJBo_I##-k3yzWFu`Oky@Ff7G}5ge#3Fg1WH5Ze%kA1BJU)Z zOl%#%!@%?OGGaORO+mk|JVb)orB%{%Z^=LwYDWCPrryPVt3^6#29%dXm=k0s8vM0Q z+sjY!5xLOj*3G}3c=MaG&($a#sq~3|e`{~kWcC}!$A)7y#^^uhcq0)pNe!VfyMfkT z#FA)!I?Aw%CarK2tuwZE5}DV=%PvhTDt<*T#kPzGKJ~nWndS$1TUO5*?YSGes)Di& zp*M+B($c1sS^>`$W#0*`hddVz{pPb|fd}lhVL1O7xC?Zv%C?9yeO>JOt7#%YGLed1 z|BP~4eq^&%Pm<^IoULj3CgtrTJm~9c2SUN%lbnK2I{JE%|I zGdSo&)e$7$USl98pDN!&#f! z5>~2AzKnG2UG@zo zA_fzH&;P!~s1`~@=}Zqb#+bOXaEomttbb2$ZC-oC&2W%! zq&-C(GOISDIRmO%+_tovAvS5Nh3_nu(!)C&q9HDE#gh>*#h$u4j>CRzG%kDnSl+)@>5M1}jE6d^Jz=s>wefj~v4v z$PzMaFZO~I-k|VRM(2GKvWnRP(##s&s)kAv)a*-|c2RAUO{Pv}omCH8^|tpzIz-=-|2p-`YoyOOrzoZVGXSyX- z16CSq-@^)M(PlPi&%%#?J~9;L+DsbW>Zurf7jerE6Ij0+`n#ZZsB2N44y#wW#xAp- z#MyP4u2P5a4mqrq9Jo(a$8)TkEiC3429=hITEz}=>@eNyA$*edOT#WqjPILDM{WGV zpDs90DY;2wrHR4czc0zLu*(6{ixDJR^`btTbeir0iUEKlEX&Kc8CMOtZD*AAJHtH4 z`M_xz0KkmUFU;By1Y)bAnlB2V-H6Db#mAchN$`Gh>0V97Mpk>4;+=KSTJWx<82ykx z%NC=^hHy6c!pk5j4egOio$It1 zn{imIi$8j>R=;)V>s#pX6`o7#!>lkk!r;+&k+*g`))>S}YsuNQiocuDG4?Q9pezBl zCJ8w5LkPpCwg(*QH5-9XHo5Q%5`XKpIn+y&F4JJ@__?VvqsGLY)r9<|>r5u}5h$yq zT3fF+EN72*HMzFpumUS1EFL!50v}?-2qsNK%|_n2nGymeSkCa?6iT-~<84Av2pLox zJ1}9Bwdfs!syN(IMTFWHh+8n`59@DS?clq2$@Jv^><}xy3liQ!2`ti$-qh7M(|oPD zKfqz%?lBFMDbw?Zcw*BUYgh!O_3cN!J)dLc!5Z_p`I-2gz7&i>YFujOC}*^544|xF zi3QssI!IFnW&kJABcPjUeX=kP*!YKa@6!Lf$I6@0Z0Pvq8r)5T5b?$L59K*Ni;F9p zvmJ+{A6C1+eIF6T7TSOHQlRj^m9qbAwKbmrE-oB(T<=jcCrT%kpr`yZd7O=M0?^;y zL%>ZB%rwPaO5a%JZ=?uGs+YN?S(btuU}gV};?Tmp*y`-)^obZ7iV z2E{;5G3friD9MNVP+O$hOpZBq458W~qYsz8;qe6Fl^5c7BmW;mbI*;DPo{-3j zsZ%oQe@oL0p+1}y{*B}?chy!;H+>~@qgQY6yY&siSdC!{`ZRozW=KqdgCnYc=6xR( zWxkuBzF5o}vEdip&;3pxsa9tJfTK*0^em4s@}k(ClLw+Y^WZyIiDn>)4;_Jy%GIdyzi%7u`*hj?1Hv7Pi0-iff2=aR$+1uH76{P4VYevWY8`Km0z0h0g!i2@5vBC>1 z(?$}^XB+d3%<OwKl=pm! z$Qc34xoAFhQ-`t(OBtUXEF4$;$G(}V9q5+Vm)`Ko@T)>@1Rk#vxHZ=cltSgLb76%K zZ&smN)Nojyj)#*Y<)yZ7Z`uiNUC-dN87n%A3XXo7(S9gsi|eBjg^iqEROIO(q{z$B zXE%9gt5++4E4RWA)|e?)h9aB@jLSI=_wO9eB*da6T>r^=*QDsTlU&%nH4fZGcTJEa$5@RQTL-HN?m}~ zevFVg)qmk1Z(Qmhd`_&_J4d9UUE+hW#*6}98~)z`(wa0``DL5Hjx3c|9{hy*-U;Pn z$h#~N_Ho7npZz;a3h&-MwR$vFm6WGvq2?@A zWi6h3Y%9DVF05Pipe_bks5BOz;&A}WGeynuHqfW-*8NUn1x6{Z9=Hh;GbgPR*Oh8{-fM)F@|h_^Av%=lCNA+)pH*| zm-bc`=of)rg?&We-n_&=@5%?qXN^somos4b?w=+1d7k|WIL~c836xUHByyRGl=!$V zyhD=JL_9e=D|UARw0r}rAW+hFB2dF#&NW=TC_0VSB)1;*f*UBJ#haKfcFsdAer7}= z-?++D%o5Zdgq#5efEI^XyQbi)PaM(!$byPJk3q3YzIxyPbv2FHn3+53S6EW`qgu6C z>k(~@puKu@q-ol=SXf5{87(qMg3})>*H~R3MQn5ScPgpB=69XlgSdTi*c_LnwP>(9 z<(vswt=X6)DqjyX(cuxZi)t7b-@mgWNkwB-^p{%=lsJCpoFR0703`xcB9{s+W#yOB z(O?B#Z~vP?3f9I$PK0~ORk#+SGDF|H-8P5|+-)~!WFo-Lgv1i!>8d>V|~z;f@f3^DZ17@$AFm+Tl?9O1~LYwatV%%YGcawQ4uF2Tkh{{dCI>U zvn{Ru)}*h}mo|^7z>0nOi~CxL<)F`dv6RDZBGDo`?32%fDq_NhclSv_VGKZL3P@3I zCs6(VDjAVRfXF`yRa#e zrCL=~n0Te0LG47MQ>VG1?b}_}zgzZeU`o2k2xm|xciB;I)vvxKSBvVeqYx8BrSk50 z5;iu}B#=sVGjpYj|KYGn5p>2?fT?5u?*!N3YOCtW)lJ%DoI3*u2#6r9a30MXqKUy@JQ=-NPr1UF-3Xd+-n}646ey?d}A`jvdwjvZrwu@`*`-rPX{)0dL2VjgRp_AW~z(*$PwG!1!S9} zOkjJzXGpDhzHxf*&hzoAzHMXEvAGOweU#I`lKGcCp0qKSV*5H7+1Z=ln&r+(!ib&2 z4|pZ9V@b!4(e3ruotD2b3o?i0YDFEz(*ZK{kl$C1v63)`bo$i@v74Ahm~oT2dUIqB zm8NRKK~X_1!gsa-oP|TkmRgi~#4RgLew|11E6nr$3s9CZUT5E`7{C5G600x+az;C9 znr0VD>OC$ploxRod^-2lnQIa9;>VEZHu%0dRdhScVv7HEX{<5OU)yH`z*p+3m5}M> zEcOB%NatybK|s{yCjqUs&3cE6ym?jEWwsm5W>=jThK^6e@_YW3tVcTfGX9FzYB!%M zy1&;q6uOftdL+h$B9SagNMc0Ha^ zV+Ok54~`l+eVrR-&(}HC22L)FIg|SJ-s+=o$zNx&Ejjn#$u=BhUQ2;~n9u_k5tg_! zB+lP;v?e7Qh6>AwKFvt8TU^6%6iqyCP(-X*mZ`#{)+fGD*pY&kzpk# z%2p@laGH|m(OY{vV4z*AZ?XVzwtp-r|Cf^C5a^-SVjq}DdM2%Ff&0t@sqwsPOx7@2n%FrD`$!-hN0r?q0WSzW;@ep^j9NpF$a3N=HN@AP zrJ$IU1TqWi`cEsfBUhqZEZzx@)&|HwT~HmwzjZ<4b9?azortP2YbGRrou!J@jL~6c zHKcjREOMuE$N5{M>X(h`jJIa^6B+?%AZ-W1-{j6V_pFP$gSHaHr9?1sd-4?m=!G{9n9_WC8ooCs%IL*a1;zROtZg>9~H_)BCh zVP&e zW-llDwjTE0BL9mREtDG(mbUlvlz^c{s#8o}$Kq$t%Yg`r8d^Owgkk%&p3PcOK=#QU ziRaH7`M);C=j3{q>x$>^l=%P+Xm6EC_P>^!_g_eosVwyaHZA>od5?O@9bLh6&C-P} z9Xv=H_+vsx!dH98D8MXI>OnF-+Cna-o^u&U_ zpGCa+vddo^g4dO*<2ER!z>FKmB;i+AKJ<6uP%Pasc{La-A0> zN&xo`FmV3z#4-&x;#b& zT)?9<{B4uTEb8QBGP2W8K(a$6VQZG;_xlgn)@c@Dxcr1G)q4{(0&97_@FEMO6meAt zM(BGq+9iLSaq4Rv;%2`Rvt)`bVj9({UMm9uuPFPhRtc5Dpxlt%pl7J*~Nspq+KU<;YzmU^sU>knMzyo{c3{ zUs9|$cSc^)P*Mz}*`+9<`4x%7UT3^f(2Zgf=9+xkeEAYG8zQ37o9F>vq=eO$H;w=W z$A_Un^AOI+kkfPc66&R5m`0blt47$)2MQC-F1iwHZ((IxR~05v;uwp!rn|tT#gJtUPCa znv~!54+Q7wI)D3b1Sgyqqj{qmce$CPb$a>yX0Dk$;9zbk9Vh^>-7F57pWNi;U6Bf7 ztX)V{HRm@WO0${0}9RTg2nxOLQOQH-u}N7gZPolVA$(uFq_f_H)m2fh}9wke4*um z;JnL8O*o=3qndSjyIcp{uRv<=G4e#6Ubh0ODZe#|YMp42mWm?Pe9qB4Y_Ze0eldW6 z=Hna3Ll^zGJQTPB5e(#^g&ld2Mmau{%6vDmnX1$Vo93BwwtT=z@_#||gEB(An~e|f z%KGoa=~CyjB7P;s?FOo9Ft71FbhDukv3Odk0h)twPrCx(N6iOZSoV-uepYKPzpOM7 zS>QwOEg?aq1m@QnW$qc%&f5c0%GcmnUx9Y}v1<#TogQy^sH$_wlu%H}~ z2_@e#G3L};5&4xkG1+knO^N4fQ7`CnGG7!>6BKXBHHde9#jnZOt=lf%Egqb)^oQcD zS!SjsJ0(Jk+IpumEnuWNh{XB^eMR{jxSQfu;+TST@2({QA{}4Okz`~f=((wFc%Q>T-UHHu);#a_n znOymUqZ^*tJBrfx{yl;V+rrd;hW_#kqHT8JAr^q2d9F*(!H~ezcZip*%?8G*D|wh% z661V#7eQ_tP`Zl8Q>U7kxkn~~d=`4)6@5NEqdHRVyz?#mSi>Ux)ZnOi@T{+-$$C`x zH8g9>b*$*@`FU2G1*qhnGeLgCOj-i?EmOm+%&bMyq@VX*)zGpOuBX-@8=c~N%*%?d z)%Aa{-=n&$z~vgU*cOml)GOJG9P}&hFOQ_&k+COlLmXJx*~29M=oZ79Ux+=SUazKW z{dUjmU;YOHBYMR@=v&kGKl!bM?ymnV zSN2ia-kd!g`2wGigI6~n6McU5WbR#yhG_5oj4@)Sje1q=59wGDtAOR${}u*ElUjX8 zA{);|swwz5JE4f2Yi=vHpuxc?%aDp1#7_QH3^;&D_8|8r0;ehlWgGf&+&*p(xj{|X zA8ikb$veS^h8hqScnxdg*4mEo$AS4zPv_K}&F-HP*M{_dQcgDApeAMjgp8ZVp0U^^ zg@6IQz_SKIvS4eH&s{9gbTG`bydg`u!I6L!{0ksRmVu*M_3vjDsw;rD%V{;L=pR6h zFG>-q7v6(i>Cr#67>r7lQ&JJh!286nx58kUG_Gfl<5QVV@64EJ#(i=3lXyv~F!PWok!WCUWVpU<-@>8L4ia-7 z?Y%BkbO1k%d4vrn@Z`g?;-VJ22)f*hY3SXZ`+yw*8~&Ygesd2w^>&#$`s z-<@T|AG4HV5hHvkcg}dJaaV9=W75xpd2p-2)w^FcGdm}a{5VTf+W|LRRP@Ijl=K~8 z`mnMYkwN=1gGA34Tj&U-Ds;u7j9>B9f0+kzLvmO5APdJhcf4P}ZnES#nsZV!V3s|P zN&}9*`_aAu_|f8^J^6o_-G2w+2g1S4epk=`Z^(xlnA;MSOa0VR>(-SN^xjdkSi;$_ z>rsr^$?NmqY&R7*z1^%LbURv5yEDXhm8MxvQ{o3VNpEUxbOCv+$}gY**2sM4yb#6< zP$S$2>Y}}T)pm&Ee&`xNvoMzMQK2|_rarzZ;(Y4`>70NfujqUEz4+TX?;9a}wU2o= zgU-~fTfk$k6dyZQ{4O@H7aN1%!YAT$I!(_Rj(>$u%e>Ex+IZ!RsjlUa<)3$W3x*CZ zTnF{PQfV>d)Tw_w13ODpeH%)?tyLw0wLjP(!pWKT^a#|n2gatUdLzbEd=q{ID5AF<=z+rNiEOXAe>H!^FuEuO+n_)iu7E1#i3@1Q_uq+@pSCfO)z`u#dOQ# zz6`M^KpWR|y^gyYb@X80_piX44&%OP^5+#w@!^KqLfx}Ar>Qy_00Npc4@JcLcgyr? z^Bb*hP#N6<_>q|isQN`?txEk0e#)8~SOshuU&!z&h1?~aZLj!;;cp+?5WHngJ2m0Eq8wz5BnP z{m;w{G=(CE17-lK_3$868GvEXR3rcn`viSm`27vzq5j2+YykVBBg)1z&w_3A69fkD4`l8s-$fd9inNH13GukK@ovX1gX^KM7I z{>Mlvb0nUP*aMXI15gUk4z>go%Xc{Uza6|UN|%>j&%I$L7e2Tiwp@xbr-&v>sXMlX zq=i!(U7d*?-GPWG+TIa;(mhM8f8&j$e0ez)!=3(0w@vvQGZ112K-T*|APc8WN#+}- z3)h6JIrCy;2y4T>`zhb5r$?576NPmu)ZtSLQUtm-tEs<-^`cBUAWaH<2O<*J(^2pF z6Fm=35i!G$*>$T)K!nCk#F(LMgdBK*^5!;KBwtKDj* z(xZ`?{qh_?f35f1bU4KF4_QD;*JzYkOv z&mq3#=DrXNn_3Wc`S{&8_XgDVa-W1SGTD3B@^!;e{kggr)FQG7OU19^npBf)xf;lJ zQ4z`-&hbRrv)6)ZsP!-u-tuu$$Wjk4yAi*7UY9L!5jS0)b4rHp+DgloX!VZ_6Z^r* zY?XFFqZin0A^46?paX6c_y`!WsJz7%A0@PMf?dBCa9>Z65Fq>hQ$~+fW!}>lLY!ZS z#=^Y*F&RXd1;jVe72|9#zt`qd&fmkD37Jk){^Glq=*xx4JWc#{8iXOcf7d@rNY7r2 z^TDy9!;l?X_V@0ZEpTSi(Cf&jxw#ECLNeZ`j2aOU55s z{W;X36{Z6^gG%3uJvh`uACN#jTJ7Eiz;!JJQeI9npb-?3Z}RSF3g(GXqTNAiBo^~- z-ao&Pg9{n5Z6q%E?B%F2!c)98v(@nht<;dn%lCe0 z!*%7xQPHzLNb>H|arqFa7dAeLOf@=r#?So6z9+kO6NhoX`MGMZ+Pd}4{g;+f;a=o5 z^KZF)2LnbgOO8(*<%@yd!5=};m}6G-(()IVw_cGY z8`bW51Zad8T);T&wQNvbP^X&PTmC5QuIO1}g~nOLVxxsei8rqUML|X1nrI&2vC#Kc zo1c08gQr49slO$Kv^477`Oi3b^RJnPJ5THIsgA2u1mh-!;rsU8(&B)lFNSg9F@F)Y zt;t=9<O;I9H2|EiyRf)9SKbU{|yl~|+h5#R>>$||ui?<21gQcn`koT@ckPhT(l&IfL zY-Q1>eyccomc?@w*>%#7-_F&#rbN48O~_Q`S;S@Z&HYrxX}u`h71hJ>x-Tv)u@}3m zj2HGubHyg8Qgm4ROpr5#)DWCf2J{=sC)>VGT`=PcQwQd9=b1j-GNi$_51N-0n6|R z;A_x5#{2y5mn?dW#7i5@G+q*2$pT_%9$9BR`}LSUlbR@IWM-A$d#CrBy(g4xuTMyK)#K1wf z;PF-Lw-<#28yc~cGl~O)n(u2qbL78kt`s^{(cgaLN8X3Q%@^>8+~x~*>Oq-d>86oq zwi5y4R9#2EOaYx&^gAnGjUe{~>zG?PV|lH3w3BG78OF#xZoT0z*qjOGvLy2M!BU(c z0~~|R4llpmadRR1g_zdU-&8LmYG|da+*rKmmaS15sB*((!)9ww-1erEiov%YboWu~ z<#hL7ShZ*u!&r$r?y`F-Td}FjR5+7FpBgCknI%U%^yVrpURh&rM@zn~sx2s69Js>& zx)nD^J$9v2tFU~ZLtOmLXe`BDkY2Dx$yoPm?~t{ig_v#lq_5l$Rv~-*~VqKC-nK6J3208?T$qeh*LX(xqpj?mS!(%^X-K4!UgX^0G5fvzs)7p78 ztxMYL$b}oMkHmSj2+WHU+0HzBS|ec^*Q?0Mj{3z8H&X*CFMswCbr+1$`dlz7h{$=VK%?nYuT^tW zyLmWG;$_$3lH4_S7#0{)2@^c}%5I;yjUO}|)OECztOY&t&Xfva6yPF{9k%*em?xS2 zY&UW@#a=|QPL?^2?f%`J;yMnZhjZJ}l?i1Y^>{I>#{$ytUUbAepuvlokZbP6qQFy- zgeN{vr{6!#ZCV_jk~Y#XAtf6^yP*1G7FKdY4MD|8cT0q1y!m7vj-79Wh)qHvH`e}| zi_^C4lO?(o4209$6?G=d}w7HhmKzHmN`t_AaM2E0TDta{l+I&$!16EAks} zUw}ADXkkA-RIRLUe0Gh#UeAx1qy_$cr7U$i8C&tRYUd~^2Ak7SlFrN2I2n>~$nD@e zpcy||%KX`Yn<<;A9^%rFmGgpuFi6QX|7k&(eLIDHDW{d>Q9Du}M-f=cjXTlM1BGPK z<=e2pdZu*79LpU!tcQPDDxmAlbYk7isGMU^98M_>@RHvOWVUc=X z=My&Jvrpj8`sK|IpQeSM3Anz{u+c~5PJBL&gl5D^b0=gy67aN-Z*%5PIS3CLma*l( z$x}SOOU>AveX7?w-NpGisMWmMMCJou#AjoLXa1jV$@@9e)cEG-rMLMfvZ3m9r1!MV zPvSE3#DU{Rlrj<_Z*UhrX<}Q4&<0P3*%YO5I}LmY6&NJ2564|3TzTTs!I$(bOHnzQ zAY~{ne<)IVqp0k_kk9l2#HHFnVTn*B^+(UIBda~?ic+Y4E#)3(cTnOqb%PRwH`zs) zX)+LXT)6|EfK!%BOZyr)#a}Win6YZN)xk*T)^93#DWPV!XSFI2L0^wYJ56 zb?gAoShKyFa4s&BW^B>v?&=#z!{_b*I$7@CXu?)lG-fhH=f_x@D@?n0dQBfiB3-qh zQ~yUBthV^8)&0}T5ZYV!C$2W0su*NPStocZW$qmW?&@Exw!lbKkBp;)Hly)&U=MAw^l(}pZQT`E6t2s}N$~g1A90q;6AGrK-ZS|@} zmNI5OR3LR8xQNh_O_rd|rW;|3FUXjm(tkq9{i7`&niYX#IJPOgC17*Oleth%$Npab z)Tr%onZI&W?Lrq zA49}QfcxSlLn=3?cEeimiXF>`&jamxp*?gt0mo@g5tYbCYdJ-b+Cz;>Oy`mEvy)(BDk5%^36 zWsv?PW_yNKxsz(gmi`KOUT)gCrHOGXLXyWhfXiPkGuGjQqhC5JsoE!lX-jPDF5YeV z(4(I@oeH1zqm_tWp^2&B00h@W`S#?~3as}ckIs6n$p$Q^iG%kEaP1ALVo!UZ{y>W% zoqoedX*7-NwyY5kpYBEwpGeF+pk?k$&O_p!d&t|zL~)8#6@mK} zgN0qOUa>WEAHq>rLn!MfwQlIvq%Nu|vQa2M_G>ejeRsKUm8w9q$)zgT)I z=X*LSv*QbSbYi9UrV42fcM_YHE5+-2Rpu0&ts-x1RuKC*#z55WxL;ta=-c!Lb}IM; ztCrn~)K1<;h7TQO{LL3xzU@|y7{50GUmAA~uRUM+sZ!dOVSS+$QMwkd+G3rGiUVKk ztpMQGH2?;n0`Ld zD68J>@V#hBmopYW{Z3U%$pZx<4nb<_Qm^+fPObxSO59vKDscF9{o8L!(rB_0w5ul|f6XTwJzP$EFMQgPTNPmuI<4h|Im|LW26H%4 zF`T-{_J2ApnS)oMz5HP;jQ1`UqD9OQ?Y9V7X?(An)Uxd-uufrV=;~NaQN+Q+q$alV zVR-kK(A7{2@n=q*JL1z!ng9kEcW*oJ{GO==i95?*WP>UbMQMC-Gw|PS<1-qCeZQ7F zKkYt{vOQ$wiDkxFRD6-tfxF$o_CUsCKrWBa))kLe|pxl=xJ|{X^6C`ioRYj+ORDK{D-a=xyd4 z&t(j7Qe3~KYlumj04Pd|T#aozP~b=h_AX)jDAKSx5v3Idg^d>Dg74qfdidvuPyf%T zIsWvRtSj3Clo!}BCKwiF)xYJCoAgtAyu%C)*l)c7k8Z)Qrrj5>4^27_%Ia52R<7Ev ziY2iHq|*Iwc6LXFj)s|W^KZJDMPv428<$YbI{yc)A71A7(=2ouYy4LYDv_Ac`&OT| zh?=Xo-o_=dF~>?zBJHpPZ_15tXZC@@!iJMH1~F99o)PNPjpzPA<0>qpx3G9xo^A^^MQ2C2;m%M;5J*hDVIGLw&CfF@BysyN#`m3< z*5J~7Sp2iu71S|Y&-|%{Z`1OPQY#&K#j_&ES4G;TG(ZGbH2z##Axn5TcYq|jhmG&x zb;&*L$xcjqHUSZTws8NbT5||6*-{Kdx|V4;bbN3#j@DvzV0^XZyk6Qme*?W2p*dlD z29rE3YQZ)yGJ9lV?%Yat39pDqf@s7!Kk4%Pyy&lOke%3akFdF{>?`qd{djj;TCU|X znN&xZNB*MGfqCmO>L1(LPswW?+tFBl!-a|YZ#F6fO19yuLZ8ji!`hG41Cn?5kjuRj zAie{Pe}^GBNKn4)zLU7+B{tlvB6*?t5jKh{Lds(1^UUGTP`+3TkKXV8G>y?zqTSOq z6WM1KoaY@&Ut|bLbVGGnFuc`MDY$f}?WadWe4%W471E{*LN9QL*lsP6{z#{GJ-Njo zX1x)APqLSSW57VRwAGFQVdLPWkRcBbMBXORP8HRyk8{kv-4S>=6<7c{&86nDc61Ob zJMhCWeCPC9^_JV~ZY4iGQCe&ZHU>qT9~B0%{XfKJfP>XQ4H%e&N)DaIRqZ4Kq0v7f9^!(?wkO;Ip7k{%(Kk zGZESa`^8*eg9G&^wMd5JGU>Uh4J$Vny~liLId|OOXsVKe^tnN?THH8m>F?Cn>osuf zW?yF`KN9*)ZiA<5rsspgAhf6=eZa70)#l?Rpc{}u&oIuxC#1F<0= z;-=)hLuw@Dh7()h%QjeLU*(!?cUD8z8U5mRhVCNPPfY?T!H6=|bjsH)?>ktwqfv95 zPy*_!twAA31?Va2uNosm&gkB`3SeHK+t(s>4p?40`$M@Jyj<}>EFt^hkQEi@>8~7< zi?W%4uIJXs2&KMmj1#VWdwr5l)PxW=-$8$9JO`O9l!}3}Azk+9CJpCe3V(#JJKNcO z(Q(%o?a{IO-Ju6ffzGQmC6Y#;k#6a335RY4Y3UwHq)TAvemB>BKhOVq-j94>KFpr;*lYccW3BU6gx&Gm0Bd}M zz1ga$iCcvyvsq>mLJ5f3yjus78EX~^MbnXj=xw|jeRs00$fm6-KYDen>OD@wW}2GZ z!I5!{aHdZD#^spK%oSwgiF$LDD4KU)vY9^fksK%L^>QD_$B&WquDtZH}NFbCBQ8brXmMx#Z;odc>T+HPWl2g61>lOg$!D| zS&Fd$yX|;fzrc%Ndmn&wLzwi$cvJRLs2ef0sO=Kfx&wc0pdue-tSo18!NNtksZLi4 zhmFhD{?6lzB@!PKnk55e$U%JW6Bk6WvzB=J6)NKS^&ML!m)@p&7XS`YHE9&numl3= z`NjixI!$XcnL(l_&k^1?%WOCYB{B5TM>|6H@s78waFM zfIP)v_pd4x9!Jb<0q$!h?GR!1LmELq;M3jOILqL3d3&>yC%SjF>g(hVT*z5PZB5Zf z*hLy}y>7cVRbso|6K?x=d5`+;nyNXLn78txLH_7+$w?Q;ERWMa#PY6|2b^WCZEoi% zUEj`k8<7SIVQ&>i|6kf*jrYP@f_Vnf{>?)Ekmyo;t%X_Grq!Xno0vC^NR5X?Ln3p{ zz2RA8!|5(;%XUsPx3Fo=O*8LHeLTNREVJGeAp!XjGTcV`or|B%@=+1 z5pm{RHoXwc+An29FAU=*G^mCg=~xHT?xm}THmF}>?TSZAeyMxMgt3UR*k5J{hI%#a ztUXq%CjGG9^GI5=+9qnrSDeL==L$_wK9`tpi|gZ-v3aS!maJlR76UXp_Fg}Q+Hr`= z+%fIO1)bpHWLK|58h3wjA@_h^Oo(&+`QT0%L3UOMA%v*f=?jZJa{_LM26ocMcCu(0 zN{w<7N&`YFBlSO+cYmwq@Q%;z@7b(Hw%Ma4-6C+Q&5gCJ7&*5bo01)P9AMn7aUa!! z-uYuxvAyVhKQB=<>K`|b4vm;Z`g2dpMBWbAar@RR;BIs z<2uoviEVWoX+ZZLPMd+dwde`00t{KF1&G&?co4r}o3#;WMf*cNB zmhVMaSl`R2<+oWHj4#Cb>6M+9GYMpy9(LW${OZM${L{dh?eP8L`=b!N`^A)6MakO^(`kiFMr+Pna;FEY>~6$yEMl+Tiwm zeM@9sR}0B7`Aw)Z=xVB5h+A*P5E9zDmq6H%ajTZr*a#Z5U}%=wJ!x?K&cSG_xMXWp z%-b_G`lsCm;BaZox!L1dfw+0hJ`SH`7Z#v!)ju$2Efo)dou*wob*ocZ-+tC&ZoS;I zuJp{F+Xg3`?ZJ4*u#tA`Iv7XScKOL!qQzr|LyB+ao!8mz34<5=PkdiqkL~n~^VFL2 z&irkK#oW~6Ce)?drzy8)ylYx>iB|vksQ($NZ5Ht&STJ0XWHNXT&wI$x-f1a7><@o3 z>zfm`7qJ}R&(wjv`5)WmS$Fa zAryO5jWlb8-2*?aluNld2O2iI8IZg8TC>ymv17JV!Cg-}>ho&CGLX&2x0%N|hD;gh zD01|pB~0v$+ zN`4~-56);U3fJDt^(>&s5m*nypK+fD>wdYG#1<}Q?Y@V}=Zy(T5XTTOzh~#ifi}pK z7o@9LTmbIb|W#;QGNrzfYH=($920jQj`v+sP7Xy zE0Iu!!q3a~SDQ{`uc>`81z7>qrHOx%U@m*B;8P%kM;>LA+XC!-=MqP@c;&tu0`liK zP0Af@p~?`QaWY5P9@r@6hpSbCUFRFjUDpf|Ig_^I2d(%oZ-`-F$!sy0g9)&`nijr9 zxg`wa%M_Dw$XyCF$xcbXc`k&p?IM#y4mj>VqO1lcasw2Cu#{4-BOnEg?0Pl7hl7O$R9(%bJd z?{TkjTrFBKJnw^%XZXNs91t4+qomHtT@yT;Z*9W7-TaeZ&{DO@m$SIL%>30KFrpGm zWBb{xh5q!4fVgpF2IoLR!(;Wypn^PQrE&W4^zU^=I@10=dm0z8p|THwI5N<=zVcaW zo+Q#e{l=N{YRlF?FCDZE2FL@o9@V@r23lKFh_W)o?y*RJ8O{Lw*5|kX8`6OE@QnUJ zKMNYtfTL5eLvMOFaHm;%y-n*vWm)nQjyE^KjLqbeD{Arsej@VJaeOlN?3F9OKiTBd zEAKWRvnk=^N0H!;epg-Vu5JcZpgNHED0UKQDAiPs(X6m?`l#%6#qkMzkk8tp6~lY& z0=E0(S^GPPcpL6Z**GlfneA@)ZRNv|RJk8e>dCe}b^*3A+P<>GtrPCcn5MS~92zdYhth<%}%N2c_Z zXQx?XIX7KF*!$5wE7q0`UtX;;oRmCc&Uvkza+|wbDGAqz586s;6uVLLc9|s^VjH45 zm4Yey(fD}(!EvBbB+FD*q27zFvkmFOo+#+J7p6I)@t?LC@sn}6se|{4l=8RE@M%t+ zM3xa41!=ioTEz-asR1K$vHRpG#53l2LY<#O&Pf$h^fUk$+~j!}iT zIW+MclBmHNq9Ma}s2>`0!=LGF!AmT|%rRs~C(IeAxHtXsZ;kih0(0mdjBTmC?_d8B zd=hHI>D|;J>dN{ug=w}xKG{BHCC}l{cWt(Jr8#)K+%xYXrTg+l*OH*t{2$*q`xRAn z!8>aOn5^bdi%dt>{nF*CFC)!Ux4LQfqe_9p=(s66Eu(DpAs6zxjU~$a9ekI#`V2A5 z^qF$jq9*^A267b#%NipGtwZ%!Z*4JtZSBexj@!*+e-PR1mU*mlbMzo3-LC?cS`=Yv z4~CtHdns@#q>n^o3PSmvRyRGD`n#Td~J{UoNGIE=IOBlNcI@Jo9(?MX!D8kHm2 zlkC;Q=9SvCHoqG=`U!+D8k^=*HPI_g+9naGextO;!$!Av}yn}IM6IhaM7 z_U*bb+BTPbCMHK?KTvj32)7iaGAsd6XH{l@Fod2U5`0m7m6%fkqnWIdBuOG&tkTH% z@q<2@6P0+CDYcdMS1Z{Wm0&XQq8YX(I~Ib+cAtBB$A=F*pW%_uzxSdry&`oqjKBCg z&e*0p5lM9}J%#WVp>%r7+vxtXxyrjNw@vUJTgW5I&6-m93}lpUF~N}XET6 zkyJ}?q(vXh?9AH}E-)r#9yDFQY8uPqHVqQ6X6A=V7Pl%82Xrk_6TkVqiRuuFbzAe- z*&ZylCQv5*U;%qTTvVK5%Al8L%0NC{KT7FW|$5%!PMS9-Ecf zK!=g?;}$XK#O#PiXI|lsUNk|8 zL{;FXOXbd!($sY>vdjVV|CFCiZ_MbOWv{Qlc&5xjsJi4AD<4Na0zQSc#M}oa&+il# zfO@r!N~7nhi$o?;%M=6L#DRMT%RAamHaY}wk`!T*8=&!54CTmHd?m0QF5!>heaTI}ISx;OQ^7Jbw={Nzc}g;8|E z^mJ2s4>XK@e-f}jFv1P{^tVG>)BU9STHFWNkKa|kr)^6B#iKSRex+Txh!%^J!8P^$ zfqtCC(#yTr|0jH;(URI#-^&rU=X?yB2_mdt_#J{ARwdgAEcf|CY0}M|#y@z`(hsrh zMTaH8#ov;VOI&?7eed2k7&)drDVDBZFSJWKsH4MOdiL%2icV?AjoKpmVp)Me zg!xdFV!4}K4JW$yg@8!Nz1@iW3pL!a>Zi zqyTtv9H=b$1qdQ5Qdj~v)3Jk1zuMh!H>>s?CH$wE-wkQX?Q2GCN^zuLN77lJXfI5Y zamee!kQ>nmV=Wxw_UiRT8F+vd!L5%Crz%WlZc<_GRo^{PHS8zpyw!9A29nASi_`4M z?=%FJN(@&v;xxO95aX3;{>%zINj$!s@sNFXu!}@S=z+%yev<+b4Li%|8rauz?$V^@ zc*t}3p7m3CY#aP$5o;0pv)P(x-)VxclHuV?01mvlm3g!{z_m>}Ut^(+srn1hG8W0r z?`6;4OSs%X;O7IUE9b8L=S-YVx)S|FzuCkmfmEgx6oPK|%`>UG{pUkbKMu*4Bb))- zw6`0&jL1P?Auyt5&A8_ zb?Sb1Hp0hP{`iLW&^w~IW{aEe@HZ*7(F6g>jmDdNP*nKu)5c?-cXpz;>KBz^5kkB- zFJa@fYjbS`ka2mjl`H%Ut0s`x4NjhjsXj1ZC;r~@2jUtp!svyfKfS;g%_;GN(T5sm zN6`21#3Ui*D9Pi{x8sFAyM6^b;9^*jx1HYSeH_lt8LK$-bhg&PpCHF!sK}fvqG7-f z=kk?}mfapI3RAXG@vziNfqiQ~Vwb!zq6N(nhL-azx>vNmSyMZh2f`x|RBtVvc2YeyIx=n(3#4urBFK7YG8* zh5K2V#eV%J4@TSOdtkzSM<5N?I16%6S6}7uCqiud9Rea7p!RK7fb7m0%;F&=njuu1 z@zycE3fs<@2ugg5B!R3%2wp#i;L$Fh6nDdDPXs zYigKr`TAX^V8t}+bSt_8SEu#KRS<%2hlL#$QWF9x!jdISg7 zLNg3xGjZ8@0l#oh#ddR#r=K2O+hDQJ(uH8`uu;HCIfLWBi)X~ z!NNJJlFf&COFV+D_4sx?HbTLhrr>C1Zy1)D@L-YW%5=U<*r9f#NO=vV@IOH*+{b5R ziC!s+QbzYX*xU;2K;WhujN6t!x?KpLGT@dW3qA;@(JlX`xC>!@*1OH5|LWaATSs`< z4arS&f@r*uF-qt@t+aR~I~qTd3~|AGIh!H|jTd_pTEz9aavsG@J*6=phb*Tewm8l0 z1hF02SSfKt6phHe?${r?9$(k0abbzH-~bjBC1DI9-UO#JGRY+ln{_>61Ja_KV8w`^ zK208BgAKI1lw6b|uOmBl!~WJ{j1X!AM4irws7e3c#@uq3Mw>ftEpFdW!5<2n{;0*Y zv|i^IVE!H2K zp$qBy3qNEcKwEEe1a&(zUp*q-_ztegJz{RwQm$<}gQCdCf)UUK&ZcA;7ovmqGnIXZ zH9nF`Kl~_8`3?8N7c(<2`9HE|Nrtq@YA#H?(x&}jtns^Q^C|ZpzcfW0q9i>Kh=r2> zh%;-0taFY#EJW=DkVyW7Lw6S09}aL>n5P0wX&6)aeB9~P*bA20_QUE^{qp+q)_zL} zzzKxaXP;y-Tt{TiusmW#1|45wrDAL48uCcOxWF{gKk>IvTWe7Fg3J>)$s_oT3^MJ{ zk_0HfCfV#?XN`yo+}GB4{L>fTBJH%3SDERRS1I9L3jGyxht%9&14j2O02~V*A=RK0 z3&lh%ulaQ$jTIalv_xXIHfstGCp2$H@0m+1b0P>_adW%~vUoT^rg2C~<6s2>)n|=2 z-A=>+X?X$G7bC~bzp=+cpFI-F9FcLwRQr@BmG4pvc$J}J$VU_lohWR3zWLkL7Vji@ z-SoLZAG)U2>sVdB#ZE#+<|VDbz%QC=sQ|_}`Pun7&_Hc)v#xK(!M3ff=lE!hB(IlLtGa))BeLI?UOfm`OcE()@*8<_2PHa|TMH zqr@$A0X1*;*LH>syw<<+B6r5xk9DqU-WU80Tt}ZpD9fw)Tfh|z0uSpNxA9lRqJ{$* z8QG*=q4-k)m~Udbd4ct=nlK8jD0&F+9P>{&sQ#NJeeQ8JaIAr26l0%m=hXjcjRXJsi0q;M zYH*XAcWXQ68YV=ej;a}@VNS}ska}Oj!7h9@uny#zN*UyIlltV!i^YIqE;>odj)yo* z6rww8LfLA#uQw}zi4X|#aoCuo%Nc zv)c8CrZrPvhqjg0AGcJ4EQ+vDH}a#rqZS)&Yl%?#N&?{bOGX#bofU>Fzc4koLS+gz zWGPHIl$&P^Pa{An7^$sHX;$uVCw)YG=g;lE8s?61gY3{%6dbB5;doD@9xB-49ZoeG}Z2%fcF8 zcuxZN9Lqc!`|b42`x8ed%~ikh!-CuF^qLf`$+@Gnl;+PZj2wSX!zY7JKXEvq1`m1+ z&e49_{PkieZdj6_*R4ym7=w-esT|-G!}?2JYeeBXT5s?2N9hYJKA%5OC)C(whH_&~ z+dyyg4mf?Ri1a#I*qM8+5gKmrNw{xDWp-D?quu2{FgBRlsZSdy2pTwi=k)ggW{ToET%#*(x#l9~y zS_4Fs{MICMcQ}pZ9~ohX6MXi%_P$SJ{Fm>GU<}J&_^2m5!5acl>fA$RtPAnd1_x9# z;;+cVam-+}4JM7h1XAxwDkgheJ_K`rq+cT0hMXha=XW%Cs8 zPaFBOC)f(v){cETX9Ffh*KaiqL6!(=LLXVVt~?+Lib8WnV?%pJu{y*p9RQf{&`Txg zZ!rBPEqw-OAL8nKY7uZJWpWiNP1}%9a(a9U@&%^1O_AHD8XELM9_Dp9&oJ$ki?mId zHl1qxB*iJ(=3n`uVjCUHZe|j8h{YU;ivj z%UAxBO$jYI<*|#A7%iG;g1*^1DjDY}jJ>j_(u2!b`C$rUAra^j42YC-XA4l*+wk)Qi5=2;d6>Ht~nY6}nq z@@hx9=pr0WB8WJbgE}|mAime-Ba~NrG3XB*?nTcSR5wL^=_f-fKLpQNMte(&QlCyO< zIW=&5gUm|F1v|VDX8TKx;v+_iF5NoB>Z+i*L;9%W;c_KO$$q7h%x~r3_f2CvzU6(w zzEz!zlvz5Ld`)fgj(5ye(i7{=0C11Fm_QhWBMNNfdD09i`zi1cZ5m3z+7P z9jE<6ij!CZyA-5``%jDK-!{M2k7@9(`PKVe*5`p4j|Z9mrJTA5kGd ztOe9cmV#l5SveE*F3$>l2w| zoYbcapu@fW=-St$b?;1)2;QIA_0d~0#)v^k97>lvKE?1p%3Y9gh)2@?%E!=X58DND zOhyxQsRy6KWI@(FJ>#5|(?>LhI4sO)jj8yv@GQ|RD{41t#0mCXez3G%Dm_1#6EVyA z*iSyM?tIJ{dijX91%z2utvvt1*!>qteaNbWZ=aiDKrDv}lyOQMEW&JJa>ai=rU=zy z?I*}Km6k&V&5f(=)L*&@jz=#(2H#6>ib_2qbU!xgKUC^gL`9+c*ruJus#_oRp+l{U zz;3<4K{a+Z4w=hjCGMvWzZULsaB{AC2t$ZSgknHqN5}%dkc;Y>WUgp$fuy8Q-f@%l zU-fQ!TfHexxp6>3&%y+j9m`Hu5F8HE-Q@NkR1~~;ZUU*Fz<^&uB($^s&s3-P)Rj*}N{QW;+PGTU3v3fOq$H_o0QqDJwRL?^0Xo z5yl;&X)h$2OnGbsbj_w=QY0P~9ysh&+?U-XE><;wJJY z$wFRj;Z9>y1=UF(H-^4~MdqKTAGeZd$pBfT`?t^%3`Ycku*uDdV5MhemNaWKHx4_b zHBI@ITs)gT;MyIrFRqa$@~DTMPx8a5T<`ot2O(lYjssvRh~SXx(iV3i0d7pnIaUcm zA=41m9y^BfKSJvMFCne{_3YqW8)&lxXBIxz7lUdA0FI{Q0$%&PSB(uuf& z3reQoAH&S%H0e?;$FJN^Py{Q z4@XC`z-=^jV(UpP>5OO7SUdn5)^xNomG@*ws(O>`_)<8#!x7BM@TfwO?~s53eVFdY zT3%gIc=ad>3zU2`g!v5((QeWjy>d8GGp1{kfOgyU*O?KWt~UfWE_BH5E05I^Cp2JM*#oi6rGWG2Wcji42f2)PyE%r{_B6NX-SwyUV`ljvx|CQw z$7gSChUYQvznwIDj6Ljsb^!pYn(v3W&o8OLiN$3zwSQPYuH(7v9NBh0r8w)FdzP(1 zvk>_7Fe@tp!)S%e@l_>xJBbNL?B9wzWSSL=)(JSAFZ{1w^6x%h2oR757Uf#Ht*pkn-4Ps&m&5JyGY+Ny-q z02mkY{fWCDEP9hX6tLMvFDzdzeBANXKI2r*!+=;fSl#Nmr`Tt-=}d zK3PoOF$==DmFP8#h*d;X&g%L>i!C;+^aX7UjS=4HSH6bU0S{b;se%d+@YVw=TIOGp zz{C=Kve3p$2g-u+;@JHL#Tq9ez-ZI5G*iEuI?!-MCimestK z%j`C|fFv}}9`)u$JAzZ!?Jb#a`3;-Djj0?EQrZRor_%kaIK0sD`SwG4@7Y8L$^q&A z$M#c)C5o?H)?CKwWez8X!(Ig5oeSAJ9&-UFp|)q9U79K8TYKt)k^9B5->r+-ruVsj z|3{U{YZ@+CkER5C~*eD?GI>>a4H_5}n zZX#vSw-4ocbz8RSl=ay^=rPc1Rfy;KrQfnHN94YKxlhyC@)(R0v3t`0=I5NKa94@n8~9&X8%f@fM8KkgC+y{(k_=R zLCm4iL3npG$4<*xvDOMBK6s1bcYCMg7D6h$E=%IQ60e>U3?>e94T#b;x-ES!ccY`m z*uyDTo`Xr?Bx1lUQ;}npPw(IyaF#YIR#2jkq4Zh~9+r&4)9I;1U*(W_5eHTaT+|^> z`QT+@wN3iRA9-@q3C*FA>z@qhFb<#<;%;ty!;$+9WAocj_MyBuaBFmdSl%KHNr7zN za%x{5fG0D-AGS8tz?Ox8JP-SrUFAi><#R!zKBT{3(doT>3|KhMU`X?zE$nnRDCviRL+uaX3 z)b$ZB3q>Ml6QHccrDMGZysR`&FJaB9v{tt5i5p0rMQ)6aQ;aBv?!uF~+JMNpF3>r-T$ul{y zKLyg{FA9-lAf{-Oi+gv$cvt33Dv1{alu=d@p;uh#P8ui%=Fv)TTxrAr6O><0g--me z!TLTWy+(z%^~y+;`%tupAeO^$mKgt*6G-0Iz&3}T#G&*ft_FUcix!*zG+*T%8Y#)d zMLcNaylU67(1Y2=h2s)=<$V6y(JXhos;9P2E(@&i$;0-)8bSjZ9`TLY17a5xDfi60 zz%!ITc$-K4|E50~2;rv}iVRV0e>#MTpyas>ze|1uwV|w2K|lJfxX)J*J9Vf#c#s&{ z^4w@NnZ@zTbYAzd<(@ky6emVK{p)i+pKlJH<1|xmuVu~X1Nyz3-6IDWKu?< z7EZX&CNmAUcD{}P&Mt+|hhC({4Avy0^HN^reoCMWMvzj(5OcYB;b_aO-q{b4aED*4 zvXxFW{9ltOv2#Cc2n&i5Hm&ADPOV2j;ls)TlFb7Kfv|DN~8!B zcwkVe7)i?F*GoLpu_G#)!+fTfgnogUA;qLZ@r+)7ru`SaD!O=nw|omQhID>xTi)k% z$^@PDl63yy7@m&z=2uQkpjOr|`TA%5vkAOf39RW8e`T*mXXHuHKTIJE;uRwnDJ+*Y zCk$beVkG;Nip>f@1=bY{w5cEaH+&eS62~Yy?hXkZS2#3;wp@lU6MzEQ(7Q=k)Y#sk zEe5j_Ve!vvpC9ejhTgCONaVgNS*D)NTb?dzi^`uUEPfLHrUaJ-z}1pNsx#{E{tQf< zoe5XVDkQRQ;ihV8kwLG_O%&(*)W!x{4gPrGY1 zk05X&+v*};b2Er7{obKDd$)PxqTFTZ@eVv;xuFI|*B+n9WwBm0e#A` zQH~xu6)I6JJCMkvo8rl-IkcilXCwaJ3R#H5`tmk*FUcPlG7tAnN{sh3AZ*ng=NK_V zLR9@m+`*UNG*;EWQr~Ta#eb;9zYB>3Z8@D?1X1$YRvt!XRIT|e@0JCr8EVEf0H><%pMt$&wEgk?V06`qDk4mYn=` z*BNEWxnQ8nUy2N!{i@_W=q2$i$5PwZvjT5IT0d2mkWO<|Fp^rkMp*n+gT&%r?;bi9 zuBDcDc6SYjZ6teE59AlZ0wRytk#K-WGiK9V?=$jGKM3Hq#tMyJd^8v! ztY1}JE$vK(g-r$1mB>NG=Zor*miGID|2hYhwc&Bd91Mxd0Z%~cyQH<22osdu zwrop^My9=n(^eytm=G?f_X>L&#DpvpEn}DVScH18&D2}{iuf@}-!IcbY5g9Ai6KIc z=2t$jCn=+zv-*o?4vnMD0gS-f~j&Ruk@)YZeHrk+)D3lVsfJ6vC@!aDs?j1 zFVP#8DH?1vNe3kc;(C(j$2>&V`!Asdilc_K0+PQx+b-AZkyY4%*S*Pju;0hqSx!3h z#(|w}>k-5={#Pqyb2Z@hB`0V@g=+GR8$OK8_pl(1S&#E3uNDFAFy)eQ6*RkA=AxJy zTYJ=rtv0HbNoOJcG=i3}uey8bc+fbH^;s6(rs?5&K!vd3rpfs=Q2{zM$^U@DYa8dB za^5W?vbZXvjpP;wJkz#*mbaE;eP)qfQ3Bj%r7uT_N#LAhIJhdktDC5kWH3cI^jtnu z`naRWK8Q5WPZgrjv;(t9Jt!iIY<%xdPRfY&CtyGoD`biyf-C~KtqA56ptsk+HzZ@jsOruxrl(}IMqki`rvGP_T>!|%7 zm&5>4aS&2X;(Zq01pgMl(CO5Bi>%+R+)18n?ek%3IC8pZjjp`@kkBVjPUXb4aRaueN@fygahiG_ts@IbC$>De*+IiM{)GhcVFtT!K4_pLFQ zsnmV&i3sn{#%8(-YOmSURif;BW!kDEQTe}Z&^JT0dr9+f)`r1d$U7y((~HzWt3_X^ zx>bvsR&vd_TnZt>uDJ!>re(OcRhUzJVP1d7GG7iUxFc8&e}=O={)c8TCn?9v`rPfh z;((sd?&Rcpc^s99fN|E-vFp=Aa-?i22j3@F$9fl&SZu?3yFPP)X_XW3s0CKi$Qstk z3WuC8ceRRH;OQL&xzsHk?EddZ{2S~fS@)zusj{;l-h?LPiRJI`{HnIEa-%BF^$9dD zD|;puK5e01U}ibQ%w5i2EWO)T{E(-nJ=EulI;b-%ua9M=l`i>rdUES2y_o8*xZdWt zSSV9BDKdS{ghbS|m2EOw={0^Zp=(e;%?D?L9|1@?-uY~Ea*TdNsm5fgg|~T1S@*GX zwXr{egUEjIkOrmrN1NjYjPH_l2$Sc}&a5Vn$wT}x{Na8^>Qe>vk5caN*`Z2}gh?DF z&eSwu6!upLa{7W&yzSff8s2^p@vhc z8?(ohZkIYzz%SZ0llYNOpXxomF@69SvQ=-~#{rsra0@JR>tG?DSg^>38WHO@#p?Ah za@Y7irqn%2KJh9&$oymT{wrUl6$Yn2qL#>aVLrk}s2TuErhmwLy2-;v84B%nKbTXF zU;{u!BRT_eP!#;X{!I!|KMpPv2b~Ar0BKpygPSssR&$X^9={zVAm@#vgGaaVdt-Z7 z`ic89}&v6k&t5Ny@uJ)~~zKC%j_9oj@s0kn9U1IkQaV=qk z{00r~m(SF+tfHArHbb@VbUfT)#q_6i@OC+%-6;&aom7FY3P&d|ok&~j|r$-c+uMq>r^+*c( zT<%R>1Q(Xy=WO5b_bMWi5_WH;Qv@oQBg~X;mkAjHm#51t5fz8dIwXi)#Mmrej)GiX z4Ha=H>?FF$m$AT@C#4~;3dpCisD7Vy+9ce%p;W4N5p<}!!k!c>^9(H|e>sC}{02Vh zM&Y(`7du*@g9SkKKZsYD`|-i}$t%q@ z$45ou%70dvEM3eW(+NY4yKILRt&RgOPkTMVCzQ!jl1mghNPCGY={FetVb(&~@=jmy zgE)L4APQfLA3ZlM4@y%C+u{x;|EO!V3{dwTsb@0X+9{mBfi0^ns|0Y}w8BHtsEENT zh-!kS4ptLJ`LGmCr7svmP6p$^{1Y34;TQ@q^+HR(*H7omRNXrp*$u&?-fVri?wcXA z$FfV31X+>-!7G~dVnw5@pBD_?iHvMH+@%}N$c)%q^6xRr#tNln`a5hB4wyx`hT(r;cpS_x|AQ+ULwmDHFD^+rTKgR>`1!Wbgf<0pV%whCjz51tBE+5e7F7#H^kxwS#q}b6%LVIPPY(*!sP5@@uSb` zCrK{^si{whJ<8FcEu9gOnfx`K$CE}W4)cb+q2NBd96$ePz?Sn@mEV5$tA!!6b)n&_ zL#;KGUky1D`=vl0&sjvn^W7j}CHxss(`;;!@MZ2=5%hCy>?;0mymfhff(_~!UJn-Q z#og(jLK=dMjbJ(5cdF%=aqidXcntia<1pbnV6g4d_+DbvXDT_Ng`=Ys#qCFn-qi!; zHlH1ubk$D@hcG}<(ejZP1sM+q-X!f7xex5+DV-Q1cRTp1v!3X^n8B=No}XqB+RlsAGduVxHmOwqD|Q?UHY z>N@i=O^FpAtLMRGk2kr#Ga>I@-lQb~iEn;ynh~w3Yqo#cZJ%*|^Uv*);QkihSoJ+V zlB2bZ|Gz;jA`s$4*!%LKR?ZVH8)>bwtcNGd<1~mUy?qKfUOZ^&)DBe+&Ju|3)&zGAl3d_>TNPKtWB=svYIkV#YV0 z&1}~hXncTM^ZTsFhqc$nDJ`N-t6#rKdOZ8r!QLv~^0CoYa!;k-_z%ocw_Z>a|x}RB1 z@e)Sas+Of5)WA_8ML6_<<`se~Q2iwO5)c|FDyiRX-2P=eJ%3G~DqLaIej@AU-o7Ov zD7DJS0R&Y)cd$i%r%|p)$BH;8JeAHA!^~6XPaO^|ju<1n&8Lw^T66B=fU%Gl&VyS% z!g^d@k@sfBKHG0B#m?^q9F}qcKzNiMIfe8ayap(!S&Gn4R=vLy1)f9P-htHmna$NX z-uMgUrjF#NBTQ@x^gx@>#mYF1BuBrtUY;}EcKe%tu^IJPM1-9oIYLWReF5sB=^hOh8daD95KI*;A<{?7DT5>z`UHOsH|>_RNa`qTPS$YiM89X zBHo`F_s?VU0NowfyF~wKalG<~ccs{p#IH{Oe4Dpw(!y^gS#+me7)GCwfycPkwf~kR zI!M4O4MJLA%__LqM&)ZkS$sxobFp=5AD>}2S8jzD+CH?FZW0E)l)~Fk%{hkuY*Cw2 z>r%Pb_tb}Tva)I;SXP2!VI`$IpWZ;CH?AQPrcE&V)$3VFqb;X5@=UJBM6yyc-RWzw zY;yq#RsOGpI*tT68I@y?D)Z7c$chn3p0pq|9$jK&CDwc1pH^P=u~nX1h_6wf8e?xa zfO-CtVfbv7dw&9sBg8M%b)yOi_rm_?j2W=(HFq*((kI*+jxCw>1S3@wI(^-o#%x4Lg)Q$M$1 zvLBs(2X~M% z$AW9>T-d@h(#zn6@9T4bZDdJ=Y7Tgr^X?+a-|J}MjyDbGC1SxeZJ?tZ;KXd1E7a+Z zdK~_tQB>xXTtVn$o0mG$j}FG7%6s+p5Igym#KD{_#=#0_k%xB&F|r2cl4evDU>kIh@~rEtZ13LNd~Zf8@Q)U(IE0)x zZMX@~5W!;udbqr-^9zwNCLcXR+=n#gzk(_`qPG>8#=?)iK(83Y-_*Vvvcd&SKN{%` z)mPjyBmM^>OqS(sAv{Bb5v0+p!c$G7wjsBoyThWiY<~~Va@=n98!X`zOp{z0)9w9lJL4(>nlZHZ zN{D0BfmGgM7V2ai{{J$;0&wMOnJyHCHU2es`kzID4MgzVlmxaeH5lcS|8)6D|F_Gx zDC7q<5-q>t&1}iXoCL-SCYhO(dK!n^3z+XEPQ@1#FVScf$=b?+Rig=!mP#9OaDY&b zN1=sBHNXclVR|_8#)61TVPIW5~`2oi+9W(1&F>j@;D;h9z zNup$NsAhDUt~{%`lV01t*>j#Nar1xjTr)`WZ3-@mM}F)vLToRj+v@KRfRzLA7tLVb zYUt^RF;>|8J2=yLw`%>%i>I9@hOS&?V^MG7hXCpuQj=+ee+I1n>EsvkAEVPJ3OrCJ zD>bi4O{k#@MP%Ca9P}1@L70EcgP=qDC1PzkGto)Y5MZT}0^^f@S_6i*Xb}jm@l6d( z0Z4Y$2rcDybJ?Zgz5udaO4;=6-|DzCNIIKor?nmoD)TftJrap$U&<2@2Gn@d0`2{) zKRATCLP|H2*X2XRD~S#zFq0mc$UP4-@Wk9t=NP9LZk^LWsqZhJ4KHprU)UR#6^o+!vag$X77&FnD{YYO=?_09vnHXM zocAfZ+EVk8<818*MhT$O!~x|(tpWA*i6xlcZu~rZpAQ`rh#Xqo#|tCQ)&GyIuMCK?;kE^lP+FAk?o^N(O1is2N?MSv zAp``ZySuwVhLY|s0Rg3l5EydE``{PnJ?H*~KQOVLz4nS-#DoMRJn(_7(H-TpE5sh& zgzPR|*n)dE0&xRyO*(@#_#%4ip19`_IWwy#5Hz;ynx~UT1Mb7FkPd*z(}gpXgPAMzY@Y2LS|vh?pR(;s1qZ&8Ia>`1Kff;)~Zho%ay=yD7*&ETs!_5@z5 z$csFS@>aTgAFzR*uFYO_69potFDHT`0=@)uH>}fm4M~3I(Bh+xjL4bj71w01W7q1D z;WQTVbGlmPL1g2YcWB<`<&@&Hp^RmM1`9Y^RqOID_DikiSvzr#x2yd(Ll@K>CwW%3 z3u0yj4Lkj5>dxHV;rs|%V^b9^ae4PC3vw=^6ztaFOX2j@`pIYKxD1VAymv}djl854S{92Ac5-NHLD>5Y|>ht_|N53)-d}Ij*MXJ|EOTB@R87+CJ&zXFHN&w zoAsV%UdCQ>Jv`d^5z6qNO99Ut&#-(?Jr@&ABS=pSRwNNX(p{6TxxaPvCYGA{YTS#P z1npAuJC&ode?L9RY&&oYGMDl`t>YB`L9K5-6~8{9RjDTEci9?1i>i(`;YqEF!rklt zXwCZf=qM*iX*8+D{4-Yfzd!odsxuvE;xnb9-x=$i0=S>jw+{5|5Ms}y zbf^^ik2)zNOJglY+y)+Hs#YHDW2D6tNim29<%fM8>H|7&&1b+EAQR1OVUOqVXkyK* ziN$|&*7ULQg(hI@XJL{0qL7P^N!eoms6YXM@kfoni^B>J|EHoI-zUD8aix^jZ8VYb zoPB#{wEqkBG?EJin1yQJHSZR^=$JG#3=; zNKEKk0kH(qB?Uf2{P_jN`?}4kEx{JfmT32`6D?z-o=*=31( z;)NltJnl8uA>|X$S~^Opa>%I9I$klwi_Q;Zp62VNSt5nkOo_S#_G!EwL$w2np$jKAl9oZ>5cW~LH-K(y8QRnRI=?VOkPYczUht*nD zp#1Ld4HI2SPH+mP+BJUdU|t>ctv`F$QLw>!RHi^Qr=c`fDUMatT6h+Vkt5Hp!m{e?b=u($(K?$R8q@IEOJ=N@#+?SST-V;i8j zc$V0CsWIoF@7MC7zz8EDxF(`E%Z5t_p+u6 zFA!&yD81=U(|q%NtbBXl<;xJp880MLH@szw%ih4@h;sh)sNkb_(r>bP8_~X|AM77( zesuK9LN@zw!KYu3EIo10va4Dhb7L8e75X=@G{_@Tzc9w!#63`g??Z*61Ec8u5Ix;; zuiq#pTbjo&+&9gk#J@+q55y|{(A2R^7e+pz-|*O1N&&`V2K{?g$(UU=BVc$d5{UrW zd}?&Yl{t5dZqW22)2sT8r4yQAt@w!~f%T>H^BOS0Z=X-(f{`V+5eLi2r#jq?*2{p1 zWK8fp|0qEAMt-fP?hK7c1nXk!DlJ}aWc={WwVM6Soy)nC6*GdcDn;+&(_6NoTYi{N`}*Q02ZM;1 zt5B$fx0O^{+vf#LCJ!#u?%_zo(M%*n;GuXZwz&&#X)gpVqm@kj&fvX!+~Q>vHq9k~ z98%ui*O6N8Y>~|bi{&^^ib-{sGrrc~=jM(GD5U0;+aWbjAO$*sC17im6U9-B4U!3L z5MoNsC)*u54Yl^m-7;wXu6S?AkmR@Y_wsxE7cj~q(218L^^7LJ{CgJ6$4dI``Tk@2 zOw?K}P(NWR@B>bCe%VBr>obTy{u~GK(e#wekT%nh={`Er{0xfI1U#bmUUwj6P3KM; zB(W$j+@jF{jv6=>;2vnZrR?~u2(&J)jff}almA!_>99421^~D@n8(m}$(vs;IA?Ag zX&!6!zp_l-G(dI-J^It;{}FX2)gHNV=uk@?C16tp>_ROn9V&!W5T0P{#W*Y(-|;{| zDKP^k*Xv*Wac{qkH)F;Kb##MGH|2|DP?MDbF=zcl-2T%SyXCeEWy?_0q{IPIU@z`r zmPp+c=RUKPUl1$MmW#&9m|D}8Gp3>RW+D&#c|7?>!E(Rm`yh7WnwC)P zM9dj(#Ism_hBNSxKu(2IURK{{!PkGD87bW~z%KnarM$L(N5H6!Jgu}+QT|e6BPNP< zx~UPoOBQ16YChVwyGaSm+(ogHsJh#d@mzW0`j*7_C%_2ECe1u5Ino#qCzK-B)YD^{ z{c`9k1rMLVX8!3n!71|`a`Ngm`2*{H zc&7>v`OOc|AaKx7*XDl2c$GkF0JZcWxC}kC>SOcX7xhHKv8V69jfW%D&39Sl3r0t$ z37|-AX)t5I@E0(Kea|!ZYWPH-TJ*6KY^K3?mnSs#4%jd@oRzc{%`~16nV2neVCQ50 z`QdJSey8#KTXt48V)9v09F~wx#QSW7j&KZ|VO;-fg0bcK@m)t!mlP!|)jrx6hUuST zo5XgJFeD3wr1LHnE6cIntjH0^1aE#Vx)JZDW7WAYpD;`K1-087&cIc?U+xH^R_-mM zs2LNkgYnPDghI+kdUEEudS4+v({Iq~PPE*;?bsK$@{Gltypd7$6!_H|TOC*$KCHfT z=IwiyZ_{OKEKh;pPG;u9^6Ul$HWauV+g^7z4WD#pdXu#oCy6=WaF69TMCvk;@jPzn zqRU{0O05G2-2HmzO8#$B`{1BlSZ@FG?%s8rQuaCPmPA{f)rY>DCNca@`raP&C1MCY z+G2`5RbmUXeefns4g32y>W*wp%ac@>q0tBi45OCYL#9}jWuwgAEywwD|7hMjHqVY4 zB|#2XF+BNOiu{&+_sP?VqTA+O5&fb{Udp+^R#m-Xw4F$yOF^sg&xz(lE~)5ME@On0 z?LcHfi{g9?Xk8lo7Byb{?udE<)#y}`%BR#!hAh;lK63{t%bosWoIXGob)K4G=Ev0FOLDVKrgCso@rX{?r>IhKDo&>*>#qyz^khdv_s`)fTZ}_2 z_f;qXX6)@6!u(y=vS;Z#T_^3q^0bQ5-(rJLyTja|mF?{BVS_gtQ_=HctM@bq_K!l& z`i4ZTiks98uiXwAcvp{rxWp%@?o`PU&ftf*%c`S7pmV*+*vcMMhH6!v63zbrSnO?6 z;ZGDuw2N-1-O;mZCF_3dzB>I!v;8k@M9>ZcDs4)t`K5N{XaR1Ei>9*$p{n#(#muDEq2n`#j;zB{E+f3@sYbI zdr|BH_kI5C)EZ{E8Y&U4*ktIQspkq*?&lcat3}SAmI{m85JCM{p0<6vYf*Q7*eXVh= z|1%fXwH3zU_WB%|_q_6(`31lDEV8$CoijCwqY|J=gNBMo)xS~t565a8;flBn(l~62 z``}bUf^1tna}Pv{g5rit6sJdF~Ztq{3FYCMkM zdAY~)=pdz@Q!X8a$V32M+i%CtV=P0#E}ynap&wC#zKzZexpw7pd-=@vasm0F0vP4u zyXMo~99jwzB-(qbbsw{r;{7u9V88~FG*6lcB+ht2`yhtOR)9ShEtr$k95~Pi8;Rb_ zz1Oj)Q8!-|^6r{fA^VNLY^Qfk_{1IV6qD;o2 zATb;|DLNd{!=S@ftU8|cc$~gHhBRz3Cvz@q4xoaQYths0XIhnaLx(Gy83E4_Cl2Gz?t7h~C5Laqv?PCA z=zmlt;b>Di5xTflSJrfn5}pqy{5xm9z?m}vQds#tmXlqv4+_{nzu35w-d$l;I(!|E z@IOMz;v;v7xi&UUY(R9r|1({M_$ z85PpDt#dEh5<2RZpTV2>|D)L5mRzYGKb7-HvMe2T< z4(gpSCtLb#NWo11&NU^coVTfw-1;p)yk~Gt;+|(yJ1%*cRZ@CE8Xvv+U!55g<{In0w1mKn&ZFbwB>xt!Q;SAE$ra!h)+jcm37wG9ZstP5}Siqxpw~7 z+_WJ?VNMdBJ_D!U{lxC^HZQ3jY+(dKht_8cof3qL9q}}F5{oIgVgi)9u)NUK+yZz{ zTSIylV2&_H*9P;=q@43oJ9&Ja#B|wS-mqf(Kt{&wm%I0*n|vsigy;BA^q%xbNJ0(3 zy4kHH*51^)i2XpRt*v1XxWRaAG35gNL!hZaYq(@F(J`Po|G%hP($)>)qtOKwP;coJ3}$tQRuyd{ywH)(pXad#~yn{tnQr-UYLL$TDhZ?1?`!jIJ{$%rc={8Q1Xk^h2J6?npM18zp#6IHm>5shS@_{K}?6;)O8 zo+RH4EEaCV@qRBrhcU?+S({6USr8SXi_PV6uhN5MUDApl_~o5Is&@3E@<sDsNnPMUrL5~;; z1V-k{+`>`U?$-GlpgD?XnRE6kyzHko6~E#Llw`-lb&ni>E(sDf{`m+w(|vn zvqmq0K#3q~x0yf1Q=qJR{0-S*NPpjaMzGsQSlrvV!m5v_&7GBh{7%9>Pqqs}_eJ2c zjSIz~LVTbSf1ElWvYkI6*6k`2y`Z|O4mc?9jfd8h$cMyL^Sp(~d@?%-BNm*5pkGLO zeJZ2Qe7>JDjw!YWAl%8re5go_6D}M!{Tg?&S~*Ayr5%(mk(ysfUdZmi*t?z5N%gWV z<(7+T*nvfVW$kdcR5RkWU^Ua7`I@LRCsoF3VbK;^S5M+LO~DN{q=e^i9<@7YpLppc zaFu1c4pS)bjYr0~^jh=*L7g$lBp1pVuxF8O+8YrH!d6?sU5I@9KKEwq^)mtzm#&|x zs6Gq|r|j1l9AxDugdHVOk~8_?{J$*RB#C;@f^kLfes~nCr{G(PCOaLcv)JfY5@M>- zKOQ&i*#|@YZ5vmZ+*1X^Vq|CP+Arrh2KQEg$UU33U>nC9z5V zI>}ooX`A|cRR}Nk9bp{BB6V!zvGTJcNHq;(h27}?P$$qSZLyh*5*z$g{C+l!+Qgo)$Y z?L6Uw$w{_clmbmQuR_DRsBm`AuS&+mlA9LZfAtDdFq-^;V_~|lC1K?j1H|&27d7hD zbC8iT@WAiJiXbe=fU#0Dj}0JZkJfjoWDY)hlEv`mRs!c;r>L$<>kCw_RNB4q-!xuh zSVQJY?JO@B5hLS)iz1;CPRmh8I>jbEXmcC|mPv-LAkgS9PS}ty(aCUL>Z|O68*jM^ zZ0xBz^DX58pg}!L|Y<@{CEjpdB62oZuz`8vcf%ODBljz8d&L6+O zeUcx5d{bYr@u>V!)cYREl>rfpB_*4kLhgNPnmu<~U4yHZ?ZiDx09aShWv@757DCV+ z^c!Rjy`hE7!p(om@@;)xJaD3;W7-Wpzl>Mg$86IihPW(k){k4RqR8BFx7WWJvyQl` z$qgLZr^#xlQk-EJ76i7ju;P(@M=Pi{dQhZ&XWNy?gE~0kozPby2hc`7#{`Xod~}`G z9~{6qVezcuv+zlXdxQGj_(7LHWNZI2@pT~YV@#ZhZUnt7`8 zL9HAS7IKDn_p_o+u2G~XkcuMNH?9$x^Z8*DKwPPb%Zo(x&;zgbl+KxvBxMs_bs!Br8^^8^fM*U2Duqr2a)In0)lkmY{Rcw2}StZ{hEt=JAd z2*b;Vs=cmf=I((*XKAjRv1glI0sjWo4BB>yvuBc9wzgitJCqfSH%u_x5A@ye+imUm zglhj)wimQQSbY^kj{=aWr*x|S2vGfdE`1E+lYLLiDO%!dUVWonVn0GKx_6$aXAabO z?l|oE$vQ+PJFMXQ_n=z_o}@b5+ZPNRW|Hp|onNl7%bw|=06O+o*}CW0^4d5$-nEX% z?H~7Yplt$d!iz2AP>Qv5JmpEhD_*RuZn&WtGePGk0GA&;<>RA8mkj%;e7s`a1#va) z;FZpkSE6U_kmZUpFq)t7Yhp2;bbA30A56X@7iX;;!QDTWpnvb^tZW*YCr`t<4*$%% z^5?AC?alyP@hr>rr5}{SrC9;lnjb}7gTmZptsXDO-IEuOq&OX(+je;rdU6Mzx5d}5f|lOm`3|D6ACi&s_`5U58%TCT ze(9qn@00e9BhJ$+?@M4GBE@#`MJ8Q{jF_Fq zS2HGA7OWrYZ^}13dF{qkVZo;k#g=qKvp@nA=_w9-;N5YMf_#$;1B98ntH^1+C=S&; zApb-T7oh`Aqy!>0VGd?7X7mp2d~YUOsGo;%Rl755fW$jOeoDt=h~M*X!UZSFha9AA zwV0}GLWU{IoU35ef0v%W2$5!^+g$j`n5AWag?OZOSKF_rKhiA=V$t6H$ZOmRs|fa! zTccmU59xO7$M~;>k`Xh&LrmXVMOx~=KYA?fq<&&AfuJ~!oNj=D!Kuvraj~v%5LvN+ z7?0t}NI#lM^rlRx&F$hV`j6qh5rjwt4Uw$+^8+MPjg4XyCkWUtng+=cdqZi}frK!f zccDzebxM*g>Mszk)$eET7xv!&!BgQcv-M8kC4`*Wq_A5#8?x@VPRK576)38B$`#eZ zg>Czuzqv^fcs{GzygPbH6Ud;;3<{vbem8?ex! zCG5DJYBLh;yD8rb5*S3uK!Z+8Q-VCCzh4T9=(YW@u8AzHgH9iL@O}9FtFGEt^YA19 zyF^#nJ1xz5Jl8H1x0ihwlR8c}mAJiL z#MEmni7N_pue3S5+*(OEDGuxcDn^H{5A(=g8c<(^YIL*NT2#1d7ssP}ydh;1D_lwb6K5^7{eiL*VNs3m@3zF!jkQ+F5Y z@(t{HpP1;^7-*C?xo$(E)F_@nwWi^SO?28-)-NI$qEm;x{~i$! z)+2IyR>rVnBgqXQSUx4xpFN^Uf6Id@lM}X|rUJjvocJJgQ7hGJDYVVie@w{O{Atig z_Z!`*eSU8?)q@wTIl~vS{zj?e=hL@AL+8T92cRwj@*ahOOLyP?=ZlNl-U&^pdFd4# zP@(DiGhtld>G@cJY#3w#R3Iw{Vqsng6suFq4ED2r*4UPN`N{uPnM52r(mPR6_j?~R z3_W*C1yZDT8viBr4?)9$?o>8XD%1MiBa)G-j=Lg4gk!@w%S47;^Qk-oEOOH~ zT%GC*jmo=IMw6M%YaoB*>hRT*<+Z=sjH|tK&XJM6?=jMO#B8CG9h5J=nat0%t1Exy z1A2_W19nP_n1xY%mQ}(gw+@O;hd?sg#U}MqVCxePIOg> zsO@Au=Twu#-IJZ8@vKZqdLG{k;L=2JsjxK*(*j(bzmYUGh*{>i6$x9zfwE&E_JI!W zM-Lq7tQ9>ZTWHM21;?kKDu9ZbzwOHqhC*k#U1d&ngu~AO&GkFjB7}X%8V!A zbC1bal7H4S`_OmxfWfRxb-^91vry1?G@4%QcX4<0r&fB+d4+<0H+`Wg`%b~W{n};@ zZ_gE17#KCTO401qmuOdR7`ygL-^pO%Mb`Di*p(PMC1Ovx+8<3CdiPajJ!RT{nWD!Hj39NwKT#@=K5Ggq; z3b?{-c)t;XlLffGE_$d&t;^D|>n~qgud>iFnAd#6>2&=aEWQs%Se4S}fAF~&v zWOhn1CF$(QD`cAG`}Eh%2``A8S3pYi(Qniw=|E!d^7iOY>)hD8U;3Nz(y=miKVYY^ z%5f}DrC6OI?LQPu+Wh@qjxIW9F_SIrjvr}Q8( zeW4QArJXc+=tXD~c@I8AaI>QpOXl8*PN4DVW#@$O*?e)%*v&G9(7(Ia zo%ElrFSGHS1O=H zxu#EMXXAcl(}$efpI7|Er0k%9wKWU&aOm0vv#YUY6|r!Z1DbC4w8u1xBpO$VL!c>! zooPQf@~;bB75A-f+JGJMteE>sVUnR!nBKj}Lw=I6cO|jzsD5%jdH_NlMV&hu5%rkw z-~Ob#7SwE)j>tgOcq+r1E^=B2Nk>K!tK+No58Jay>xDB*x6-zh>nIi^uTOV=L*~Eu zX5&sB%K5qs%Z6==n;qt@{9i?JdV% zweNfbl;BOv4yx;?mCCo#xG|2&2f!tw%1f7Q#r? z4Du7dC))JeXT_o9Ap;m9i);vbqRaUZ-% zkwIOtmEu(~Cz5o?90!D8Jtg7NL#M%RRQpQ=VZPUKO(T~}BarN*YlG5idM+pa+3MhK z#$**X<)E5sXG3&rZs_uElYcT>nu}8{A|yucJ+p2kl157`xnSi#5VAy~bg*-&8NZdJyq!VkB5mvpUCVohl33Q(7l)lkH3k zIjaBs0h}F71O_ZUIN-C_obXD5+~fq(bJ#d1QkbwT_5?F9KgeJa7Z(xHhkpmFa?(Z$J4~;xYH49frXq@ANVHQcNUKU>;$rW4MXi z-|2So6D*#^aKa6t4#bQ4WSZ-3@FQ7ZwXfEgUlMZL9_1RIu5ZgxEE9QQBt`Z0>F4Nh zg}jeI`Y3n`jV(vV?RW4V*Zx9zpmi;pvD$R4P6Fs*R()>T?@s&8#-s!Thj;o-+fHe9 z25^(Zv?lq;>(^;FBP|Ir1JP``CJsx>l3-%1G*a$sRDZ%k&F8)SuH3w|pUlBi#VZj=2=>A!bT-G!Zs z$U0FO!qlUg>~S8WLb4a)ZTP^@<;8gp8X*DVVU4}$Kq3A&4PJZBvOp!(FHqRnf=Z^+ z{o2q&(yg);;x&xoR&>mFPaQgx9Y5S@zjmN3z(c3eL1#Fc01JL3Dn1%!%+CV;;g~1CnQxxWj*U>x zj^lw(@JXfd?xpXC;*R`8CKx!8axVZbR98L3+S}XboI(ThP|xtUyHKX*Y2PD{Nv7iO zkU0&Itvy0TRgmCZn1pqBUjEimi{ zmCA<@KNbDzHnqx!O$G+O(8y}Tbfu*qma|GUto1J?LoiM(7@FJ$*NFtkhu9+XX;WP~ zNEmS)WaL`;U5IixLU^;FN3rv6C~H)cCIVI0-!Tm{qh49tBh7Nq_-c*H4F?UERz}j* zN++|a-x^wv^JL`;B=N@V4t%o)*1Iap!kIkx&x8FR^|Jh<)Tc#FZmUB+K~^rZPkoj} z7wUz>o=vD82g5`d9=KqFK*f2dFWH{i6^xTD+&C(hnC81{CFUInxVYq0uq>Fw1qI%t zqM=i77r0a7gR);4{*kLR$dfT-8QewF$P^0gjZNMk8c3TTTb4Q{G$!hsx2p9@P* z3r2OmzTUdWR#l44lvvc+KQ{jywSf2c(tHNHwC>RwkMjlF$WRAvw;Q6wj~0|ke@EP> zi21l&xEgLM4-^(HgBG~RT%iQ2IJiNSXrKM5<%4o^4jie_`fCX5Vucxo7MoMyA`=-< zeqrHrFr>AteC^L=<+aZqxV3zL(N~#mWo}M0JLXCtYyidXix}@_T@biD;3|&1^7#=H z&(CkZ{u{T?PjJ8rThVsbY6+ki~j#1jf45*h~?%|8Lz@?vQPk~)>#PZA{@0dT0 z2U>X+?j){V7MLGjBOQG;6jIaUdD_r^gGU8VezI8UblczhlP;YfHYjd$MHQ*b>d1@# z12dVlzw5fFx1qF1XU{o7NXjrh_z1lHE~_xy8taO~ezL!0>d8dI6sog;OR$gSxr3%q z<-XQp!!>reW$Yi6p)1X>Wm>&DR^NC>#T|u~?omCk^QzEG@Ug!5{oRxBH}~3bCh|++L@JmOsckx zQ*SKq+7jXBzQZHv#+pEJH!VA&d@oFrxbx>VNo0YhWk&3wqaCkjYDqoM^v|bz(Gp97 zX@YN%Ni>4<6xVvklwtM6cB5jKp1B~Ivo|>Ywd$m{mGAj6FEs@6q|YIrw|*D{wv2)a zswEvzPD{*QTZ7m#XL!xrnp!GZQK9TUDPlRf`f}Dx(ZfA;jxEVlTg9EDF}@Av>H^Wk zTrRf+^wifmpA(YY>k}l>=>E~%%x*I2CWnHJ?Xnr=38e&wjAF74dd+VUd)I6n9%4;L<=4Ew zRZ?2v+rbXvm66G(ZSx`~@>@genmfes#paCpSfaELPHKJeEnMU2{3;=@@%leZ`!M>4 zSToaeSi zUbi71(x*j!?~TYsgAsH)J+D|c_Dpp>Z!{HQ^=BYgMFwZET9&X8%^488rpOVI&13iF z)}YY$(zno4P|6)ZdkJGBh((0Wrp%#((-83(@*=6WsKnTY?!=LrJm8V#0 z3_ymC8nMAvZp+oTau?L9>(pU>25Lq25l$Dq%?^ZH*VF4_d&YHi#fSSM(8DqwSGz&z zZch&{{#0qcbh_vK;Y4z~(YUK(IC|L6i?jvv`Z`<#u7m62kouMEdlxPyGKQx~ID`93 zYTfIIDz9E1}oYAnuuHrsd}|HKOJ0DJ5MC6V5d$JGT|KBP|kj;=Vh-0;_Zvohyu^BFd2KN`UpPNR!YZ$B^rB>{d2br#inG$lZi@IU7{IXjMJ~TH>BUj5fwMu|JN3TqtXoYB z;)vGqNCM;5gy$EgFVms*VC_CBE^8`kE4gMrH7TX(*@&Cf)SIqp^$HHPB^4B?oH463m#&oOV`c} z9Xoad%b%iUqF0<;PJjdj4@uSrUF<5xL_65_ri$8lJtE7h#=>FQiC0EpYe|Hkr_La* zb*+O5j=DrkDf&{7zTFuuy%@D&G}5-;Uk-NIA&lO&25*hmDP(=p-?1TXvb4OOH<25D z*vY!;J!txz2odO$r&Vj!7~FG}@d@-nxWw>?{#^(+$zw73bYWkd;q-Q0KUr_CY;D5{QH-co6*c!^RD5= z)tD_p;0Lc;!L`&CUEVjXGuXZ{E6}ejb>|}Y8}Kc$+oL$ojIEsgm_Iju57ikzciZ-N zVwy&*Y0QLq)Zz8R6~!gU+>aoU7<`Ho0m@@{n2x+POy<}QbT~A%o0Pg2U14UQ6!rlb zC)ZTGPiJTNAMnN@3TP11H}}W8`3v*^1MNeS{wTdm9*XP&Uh5_K9ig0?taN zjjQ7iX-E2;FYtArkfMZGDhYiedUir5oP-_vWvh|n@q zxR8jCFUY&pajq}^D^&seOLzm=fPJljEY?;cw#ilY7K`fr++khk_1DGMJ}&Yk*qV!6 ziWHU4O-3)Gj3!Gva^nvMCptj%zGb!w5ea@3qP&eOS?ehF-qs^>O(`xk`YGxnRWYmP zmSmM-N#=9E`GC4%EbhVT=TN^~Fj;se8EqbM zN4u=DKf(Xg0jvdPTHh(&QC>{YZlkBGx60t4hQEc45Eb+xHRX~0c-hq_2)S%aIHrhMmd8G6*w6$a++$lbP@~UVAMCyUv!$BzA+9DlQbNLSDe~QZ@D{; zIIsN3`y)Ai^D7YEo&{Di!Y?&WA`p~334A+l&&(o!Xv%LaaK!LLLwacKuEDgef%*Wv zuQxT|@Un!QT9rTpTo57R;CVf>#Td-}gv-bAGn(qbh{^p1#l(`%al>FT@8M)tu@3py z*U-15hOD*HF6WvFt{WpYh<%n`Az^y!`gx;+83N$03(J&VzG#rpK_ zx#hO3c=BEj<>p-7+4-wRV&M2IJ{}Npr{QXZrFsD+ffF7hQTA%LcNrPh=%*IpK;Pn*X^$4%!SWvUtk6 zrnbCV_q^Rq+h@X9tLpHkI^!Y2r+DM}4BQ7}p)UbHNVK-SRqrh{edg{(rND2NNiX9S zcZ!*x4kJa7x39n|MPa`~bUj3JNg8w^39kBXSgtM~K7Q0~a&aOIVEX>SMB;nTq78+r z@vD4{QZ+4+Uo7!Hx`*@+(cl;yShY z{IAN%3$lE5bQ*lj4+YZpo+?sS3KhJ@Lo#4l)`+xS-Y`#x*5huw(3Z__} zM%L4mEIYxwGh~b`h6`co^?X_*_Sn4)gXhmR?j`oP?Hx&N1AqFp&17<~+~yIk(l9h< zyXkRR=S5w1-O{ajO5q_Jv6`8vJFa0aefULgHF!F2CYF6#c)?yUk=qn!z6H6upCAf9 zZoUK8R7ZtX5k1K#+^{P$8D0^1a&{Mnws%8lzONHO)YCkC{i(0|= zcfr}nvj+5rFoTI?v0V>VX*^nf$##ABy=Vgbk&Eu`74BO`XJ*FH?(K^N5#{2WES*bs z;M+NPPP!$svWB^p3LxfFSnYKUo5%zy*rn;3-F?V~XEZk$o++5gJVA24EUO|{;A8{O z@6DTHO7xq~$32xvExqmQ4h2}2>sS;VE9F-DzN(yOD}T%a%x6_!L+2OsF%b%k%SY93 zbReX|vA%f!8J1&k2$7%6qQ#Q&qR&nJ87#iQr zf$|!UnK!t-doZ^O3Z3s98wnrDK>=Cc3Cbcu82w!gTmGc;3-gcOjS3Qsiv6AqguX|g zPc#_Nb)}qi*wXL?s$>tATQ>%ED8jZD%ocDOhbF=hI((fj3LCR~-($5CTJ z6jomyJy5G0`pMxpOqqbB0T@}q*9lTyH%aP^kyf%=*~gAhao5GxHRJXrI{I+U;_{IIX_nbKOg|pzv_z%-8Z9b%j`K2?G>}WtPKJuw=T} zB_jExK8M+biaKccREfPDti?zF9;+*IxDzCQ)<_y_WG;^el9Tt5jaKmtyT^GV-M&e; znCW+xf3P{>2NJwnvaPQK5b<-7zkgEaZR*1W-QDjOu8@vZB!{cn;<|Ld4Zh8i!uPDS zg)gqP2rhi#BT79-+A@V+hJ)*_GWM=(oWqgzp!ZbzPD>s4@eWW>&2ZFDcgXuJmWJBU zsBnkT*Vmk)d_8t-2pqO}f9A~@qypbjR@+)^l`16z!>n~|`y_Vvu#=Rh(&1|p-BK1- z-#ql#o9kS)51SYT%?{ZrMXj=Do$uDQHzj)buiyWik!rsbzC!Pef!;_P796p>$Zqjq z^;^9(HugRatUnyi!ReqKNG5Yupek(dA=&J?($pJgN0#-)g0vQEZ!MnDL3BV`Bua?ESi`L>*`mBU z9`L~-Yv6+!I}>>-Vn11Q87+Y!amJM>QfCqBD=WYu{oSUxRETwl)7R3*{XVPH&aeWK z#$A+t_I;3 z_<@+(*`NBw`I$)^p+gMG@zb8tXuBv)Rlizs;G+;%b~p4j5tVkg7YZuWjrmlEWK;dH zt_&`eY+fsp4<_^6v*(06oUz0dl~5y@9lo27Rv^$W#&X=sG@pp3!lY}5uNDBk_sP9F z;O@$r`N^zGeMuXU9%VU^?Ij~?9-r19$}Er>8~pxZ%|J~3CJ!d@OLmv+O}YZncfgnF z33Lz(${%O`z3ukdzxzOmr=R}v+01t*d7Jw|6U6` zg4#Q20D17q)@dpK_h#U|8g33^Z4>G?qXBRUCHlHr35gwB$B*P?dZ7#2qszfv304rZm{XWefkXsF?Rg&3D` z5GJ~c58X_%ly7^t-YM%7+^fmvGfHij7gBn@pUJ&#VM4Q}!d}jwU2>Qi;J%A~`E#O$ z3@);mYTSGu#>*^~Bp$FJa}?u^o-EhE6*uukwSZ)CeY#lLx`1y{SK1w*|E?ZU+}KdX z^IAFWw9xA1Ce>EezWP0Zzxl|M|9 zsEnn5B?2vK;?NLVU)GMOz+M?jQi(8~iC^uocT)->>H6M1U3-Y}d+j36oZYbKz1YB~ zCDIWKcezvPN2J4Yq0k!F1`TjqMHnV~p50|E39f-c`n_EPc^|pD#J{*YKG{&&y6i$D z#*rsfo@eF#1#8%jd|Oq_;=)90lpwoz-Z9)Z3|LMQhfTzI_=AY!ZK+JE?OvStm^hjR z40R5kh^%ao2Pb?8*$er!mt5+ryH!bK%5=$ylOwR?rqA_!`3rOXA>_l{)TX67ll3yH z-b9Ir&ScJ(eZP%LNK$6G=Jp#l&-Ul2XWW^DXQJ0ALlR2bUa|=%9(8Q%ywBQ01 z`R)~%<|}>RozuMC6^P{7IpklF!0i-T$+KPlV({U*88>ulH8$G}t^;4k$EtIz`~O&X zgIIdS?i-odPGwT`5^AhBe2qU{haTP`;$#bazWfiPlVP zv{{3Dxzdy_4NG8$I;F7IHo4++U+wAc8=;X4Lf;hR%yagh={|8^VtkH_Sg*{_B#1xT zuy|XYo%u85z?u;kdWA2H*SHIW0M5cRMjL|U4Lc0d%KOzSroRB9TvPKjvbMft0zR8B z%BW80jOP;hzWTCBWos?0$^w4bw`0#3bGNJpzr`JD?1eF>!HDoh*gFSPT98)!Mz+AUVA#rE4CZIG7mCFpPfD{ ze;>%Z<%W}0?z0(zhk1TUWslsLINOA>@odW~pnB>8WkeApk6P4%Q;+|vjiRYhFv1Et zwAI%}z09L%|4x+haxF10>f>n9#2aGMxvg-Eb_i^D87A? zwb1>PDFHX978dNoO)$xzk}Yv53*!Fz8W`Car=#{#T*5Vz!yHzNUiSs8>e_@+`Mpk5 z9d$UNWeq>u@KFGFEGgIu%=HBhqb3$Ze3&Rs4(p?3`N~J*?p&wCfT1a*cssYw^bx{j z`C#sk5Z2L{?vSh88jUvhsoF2*N-pfCdZ9J%;>qvKi3K{==0!bYQv!^4865SwfPj6%2&-igdkVJNIEqz|*qE(V?|UwRN-D z(_I5!N-On`Qjf1p+Hx&T>z|Y8i7J7+vxBsopA4;gz(9TBduV&7GM^|fyV{IYq+nG-e$=jWP zrH)(3kw_x}*j^nf;cjS@WOve6#3BHztOjmlfH;m=4hVmJMo>RC@vp$C?-p7MlKnwC zLXiG$M+yM7t?~|g+(?ND?3(c;uAzx%0M}&?jC=(^T3q7(^uj~a%~N#!xL2Pe*Q@T7 z;Sw%e)=<+S4TvpW4~5(3i>ht=g%waxq!kbpL`oW@q(Qntx}_wCk17|VoyncCJn9uoO->DEH1v&d&ytJ;88`QrIcbLx2fxuh7AeA8&v{O4iWK>EV7cXmqbFIbNS$E4lN5a&08WQ{S=8( zMwXL9BbFZkf4$@V{-uImCDHO<72=pwxBLS`Znmd{4{r}PO+ksKrsV7sc0kAgju+PK zAH)cD?@&EXcqu}%Ue{*iTJ38+Kf-!~6Q=ExaS=zORwy5dBGY-T?2A+;nQ9`Jhh2I@K zAhDsToXr#PvgeY*oXkp^LmHEz2)ZsSDL#y#;oQGXdM&dgm43)N`{q;Rt+x^q`Mkp! zxamz*nS>`7B8b%)sI=6Mi9(L_&V&B@ja~NAkeu1q?n+0$@ic_;t~iV>mbl;GMEbsE z#iySZyjLA{3(cjQ^FGOO2>o#U4{10u;fy4_{e`pt;XCw>%{L6q@};r_$A(wq7ng&wD8P9`3vkxxfxTW0+A7hJtGjTY*G10=Da)`a^m_Z8f09Ih74r%NNf!h z9+;!^Il+43z}pZOAwO#eh^FQ@Kn>=l^8Gv*%m8groVV=(igIPjI$SXjvxiUk;24&ct01&$w#qzv$9wm zoePaX)|}B;wx3Sl%lJ+g^h`ItiTPrx=&ymCi3^gx`R5CIZbR`8FD;; z4-Nf#w4p`ycSdFX&x~?U+q6uo;-ll5DPWrpb9`o|gOGIFT((QqUe!XD9oZfVC#ly> zGiIDiZt|M^Gpsn=y^H<)Z%h}bA1T$O3e|Qthi!cyOP6MDkykj1&N3+NOExcvG^HTXy1)XJjoQ zLz@FJc6;L*oGoDYKqk!Ap%hJt>Dnyhy0r2SwOliV*THQ*D@lqXo6$&j^^e|t^h}f4 zAfy<-?e7f)ZVp$(40VcKg7r0yd-m@VroG-hJ47+^0~=EKR!LwFNeulC!9yI z`uqIz-CNG=oqO0dr_$9?;6ST^E;w-9Se)jchyr*~#DE04tg1EjP#ZX73#&$0a%i!5 z&LI-2=d!@DtSeL~@NrX_qoKk#6z@PiE>cZhzU6o~42R4maPG4O#P0r`ak+8_IclX-4ONg5x<&rOIU8&JfSF@0*AgW3mPQr)YGq6oqURnq^Br(AJ1_`g|6~?ARMS|h z*l&yGUj<3Yp!jriq=BPC`LwD^^C@5fALj{lP~_X4!TDzlrN}wq0>Kt@)IQ6LCRAI` z_#e$jb^P8SHKW^KV=Rj)740v~mGmzp7?lE#cQOX8q1c*iYq8$CgQgIN0NHMMLfOp1eJ{8q3`cP-TAmu zrZbC%PdL}ot|#$fV<|fW5^_1b_aj{@EG`nj(twxjGx4!ae1`^rUH^}f=A#9vn0my- z)s-1L>ESCQuB$Rv0Z!6~rUqUpLSZt%Ioe>C6(PUC$C{!9HYp#JKsiL;n&w5xpvsl* z|3f2Z8d)=YcFsp){H#J0>t{_;b}*ZIdIhI+fpdzi?O%ko48j??w9N{c7b-mHWjKKc z^4f}VE?ANbr{TfG^J#p)$QZ-)zAtMRK*Za=DZS5@pG%bXp~UT6fIP-{r@mhhi6ke; z%1U=hufQMe>F8sr>6V%HE#LEncj+nn#mi)UTAhEl=Eir$R)I@VWJiMS5EvJ#yX5LP zms?2}pO7F146}$0@rq1vx6sj@sp869T1l+NNjQXX+Q?)IF;7I_6UUu*pj%iaD-T$u zhO->mGyD5VfMi!>Lv+-M(9uy$Gxaa(t z^Vg&eZGw}wX_)><{Kz0M$2JUn%o;Ph^?@~|0EN8sk$gnQpLc5pU8m~y2ro9qve6T> z(soT7p%#;J7C|ZjE1}(}_3}OnI1(Q)nh4s!js~t=;yp@G&-N3N3HyLl_vl|tO(QSlg z1JVM-$z3*?6a4|f)2|SVMn-S9r#yq8{HHKIkFd@kU}dT{3Jw;L?ceOGW!0l*z`P0`vL@J=q)fFmPY9UznndY`;*TP>Ck0ZS%iA zDzmxWK6SCfjD8Wn(;M8Ns0pV{ds?mpufQ&e#51+>hdfg z<*@M3gXAyaMD4t3A@5p~qqH-ad70HghqLO`GJH?APp(fbl!Nh&Na;=)RC5yv{piFL zu#H(vY15?iH(P39q@n8H+fA9D7+#VF-I~4^p_k5>ky zueip$&f#iZSs#f!J}1Ok99_=IY57WlTFCYY$!2N23$)V3^W!+wKGHDKa4e62DIieJ z%_HT5>vo1W&H6*30p3Q{mj%(cQ*uUT7I*y>=4bIIqiwS9n0)zj_CtzxNXvJLo_;)% z0?XTk%?LG5{u^^^D`>QCFFS`!01k=fJoxG&v!c-=U*|)pxkVdc@T2Ez)3cw_+35be z@sBnwJ3@|1=+*72<0B1C2i1Ga|G1<$mUY{#3ZDnB(gEc2K~lir$RF2>mFcL&oi*fe^WG(di7hqUtG zS&9~(q-5=IZ_a?@9Mvi7VU)KTC~n}W5})T-XHgfewqxRWv~Y3Hb5PGoEldkgsGb%% zdhoS_a}p#eK$DU_PhbP>8)6ccyk%@Wb0Vy?$rZ$y#tRB03;QxkC|U^wVW$Rcg4&@T z9&cr@d1@n`umQ0X%H+706E^HL;4Sa0hKd>3df}$>a$fd>U%6%G7V}v~Iwz*F zxviVaH5rpaE@GU`mW6&c=I$%+9(A-=%W{_h-UC>Q z4~4peXnO7!PVSRR?YKvJXO=w^3rnDJk9YB^a2k%Tjfo&O^H zi|&NvOpM0v&Fu&*>S>sf;^%8b_GZ)k(K&+LMcZ%lkYKk|4QgL0#t4gl!)Vb(J6}Jl z6f3iIR6mTp?n8?4TUJb^hoO61$98odx4)Y#+D5_pzy54lJFGpbWxpt1GJVx`}p|> zHyB>rJofY(x5Qi4g_c+N^A-E1pW(c&FIgET>OCr4(htQ{-CjvA1t_=5-9TVD8d%k! zkeqd=VhmvWqgnNpu>g&76&?g)0BD z4%|X-`L~ay-Br~1*ZBPV^S3HD=7D}tM6G!P(9M}*&4Ts3=#dmhIQ`b?u^ybv$zHe$ z6|}0KHEVaXvp-zj{yN`@9+l(@o+oZg*M;)3IuKA{&`*CT+};K z-Bl*|N_o&k=5opHmo)E{!GZI>q@!zlIh-RluGL#s0! zw1i%sQ>ogqHP4p9H6DHW0-k0n@OP)HBL%(Ay@zF}e%N2b34@a3JQIf*B!$AK@A!zn zh^E$T6Ss;(R=bb>5R7!epUw1M?a<$biJI?U&mROZ9Oc|(O2l@Pjgkdy29tanp7?Fy zO7y8yU#`Jhq)SY>_O0)1dV6yUtww>_3mz=LzfZ1GL+<5ZpXRUd`@Z&d9XRnp0LT90kVx3~?U`9PH1=gA;Ar^qKPZn+ZoY0HSYU9Tf6lP78?5h# zgNOO4r^;V5#}hxT-SZX2Z0rZgJsaA{#4+rPm=FuhN|IeNWp4;z*tZSWQ^-sz`OZod z=2Bp0hV#(q3C5*$^e}1i(bH+{B-%F=7MtKC@L+bTu&OZluPgCRM!Bx+?6ankGUp{>ZX#p60baqz6`Pq|lf^N_IvNZ7B)SJxgKVb97b|ByrdhPgX zoTLbgyw1I5i^0x+O|AGzW6h{I(?UtBaFj}T(CZHXTgDH1$th_7yMEq^C1brR$Ga!a zC1aW=7JMsdUw`K*zz5EknEi5aM+sGrppML~>3dvjS=d>}4pho1FLRcJe&>7J(x}Jz zlg6~uTYACFK|htP@ z3y&`jw6WmI15x|ySSNrc!g)5+H-lXZ4JikiBKrVKinJ~2)^R7@C7i>VQm^+*8*lIB zFMqBs5$&*hgTw~XUx18=ZVvZDW4Y2bqL^3DF^XYWf>~a9^o^zyHY-o}EDHj(;pZG3 zjHKX%)C=}!uR6x-QZJ>IHZo8b6jtz7@Oo?2V2IZhUq_%?0%`g7_(uwPd_LRV9u)lV$R0`j;RP3zX&SAY)HWDy%cn{qP{Ll;`$K5 zVs`c#x?1IF@&yYWc%=XREzZ3HYIJ_-XWXjR2}PG*ElBqwb&I9U+S`NKSkG3s zNj@H+DBxt9T#M2gfDv9A&p$U=Aq-lJ;8>yf-4P1JdgqU(iaD2CHzRrL@=Z_btnF0Q z2CVsuLFH|O=ttLjS#EEj$NbeSOxBYV)lcX%^g=@c82WFUYV~k60orC&ls>n}4>!6%Po%u(K0& zg@P+1&a}Yw&`ryUjDfm)BdMkhOnGRv^=N`5l*B=EjGAXFiUT2q$gHGvbNL0Nt+mZQ zqvhRIV2$DGY?uaHFJ`B{z~-X=gyIL%NK+b^XJXZ zHJZMr+DD#rEFpjy8Z-37UHhc>pp_?uxv$-SV$VK&>f}+~$-&-8jsC>yR@LD|(~p^U z6Xx;F0m6l7$?%<5X(H9rK3T|xLRz+*4QDUFyJJ$`&ubin^vASWp4d3mJBG3c9 zQxuVtOo72s;2de%!3~cHhVn>XN=jZy?B%lia9LJiP2JUeO zp-QLN!`Im@r8juyL!rX6hlA9GgeJum$N2QGcexzFHT2rT+?(T8<~(nL59 z=bz8Wk{|u{;!#INm(rAELpbD}x{bqVN4?LK$!gQ$}*6JtqMDs7Lu=$Tvn@U?dGc!OD0&soyafQ`bsnUo$~BSRN5&e z9@W~EK;zBaw3rT}X?bs-k)BcdU zxJGLUXu<&=nCa~=M(5YdA^$uPlu@N-DA;e+Ml|Mf%2|C+k;uAC(?C4b4dl``2P z3)mH>g!T`H*V|P&L(|Pft@J`G<4te6-PVCL#~eb})wtp{iH6tOmwuI20o7eZN>jrKJ_ZTejxQH2w^!S99@gA*oOR#1<@lDD zE5mV;smc1ug!eUZ?iK4a>Iz*(5LMy(7El^vGrYRwZkkB9pBU&dpis--T~I8fv^A+g1C#gf5|I6Y98V*v@uHkYilnY{ z==LAzD{CA^3a?Nm9e)ZTxgqWqdg#o>u=SHq4CO=l{X%VeS~S7dWLMH74$;;fzHP7* zweO)RlY6ZiDO{-hy1$Z&6+XgLFl!TQLo)?DAvn++F-{SXVWj!jfNTi61!*744(?+4zx2j0H+rKu-YY(R_BLr;?m>P1 zx&iNuOfs_rou`XdA_SkvMyHE&vY%c%Jtc=?1C;&8Pgt|kSv-eoV|pk7{i??w#{qlC z!_m%n)b@MukJJB|kb{Ddk2mvC4$D~Pi(x>e6yy0AW1f;}MNGh`Z=Gu0hm}RH03jpD zW+`IzaCEe&$#H+FP)0j!&VE;+HsADO$?;NX&8me9{%SI+RX_~If+$;#&fjjFRNQsR zsuaoYL zy87rk0FG?#J+aPioNux@b$ju?0}4WX&v3H;X*#wf4{Pw7^nDvyk@?`e}TnP4S>D5?># zdBqlSu~R_tZF(Q*$#}GQs-VQwK1rf&&)q;5lR<*Rw)Xtf+ed+3oM46PDK*TA5HGcx z?@^L@6a)I)oUzGCO95;a$u4{#|Earc63F-8Xm70kIaGa4W6g8`7;oT<+w2>`HfWszGE^gJ_9Sl7ZVCv6M0vcwb`iU z5hQbfy)pjb_-(zD2bPg)!{JpyDl=PEptUDh~5ID3LkD_&*;YVDRRvg z@tL;|Bk3{ssm(>_5IlJ>T>FGp1zclvaZoeEG1Wi7&Hrtck zgdMNAQuzL%*G>Iz$0LpH+=}POhH)Uv?Br>9io(UiA^Ldbcey8q;U$Y{57;`mb5 zG3W7=5JR#yCVnolge_y!zpAGDr%mG*OgCZ><0$1Ref#VZep=p-TXmuFDp&Lsp%$~T z$5T;_*<5!;U+4Mb`!#k&Fv7i-<@Xc+dH!Vi+(C9XX& z&$yIyTj0D#CB5`m+;Msgn#gryIie11eUj9u0`hnqxBsdbs<0ba${rbpS_a<%yk-+7 z;6F<%%FT5+YivGn?zAAwO=pKe&9|j|stkDUypT6MJ9urhf@wJ z_E(9PKy;)k2cjl^i&<4t>%CGHaN2Un6_#=5oFW9ChIYTvoh`l|L$*&xdScApj`BM| z(69RUS+Qva*{}IUps;AZS8~x!gH~FcNSdK60fYq6=WEu?;w}ECN%Ysm? z@HO;(6P03eyU*+kR`jm|S1wsEQ0g{E>BKXuz^&yIYlfbmhxJ~ZF(1%2o zJ+kjE^0xj>o;W!~KFfjk;Gi>v2uSAGY>U=jiZ2p+WmIcBDN%S2B)v}}=)g2#KkIv*kJa~*M zEe~iBZz4AEc;jw3xz7JIMtEm>EW~M}I^{KB`fu1HSKmtc0-3T!@`a22UXgwqU=MBD zVweoR=ip4;p9Ta6=#y~~*hdX!aKe`qcQ7k2>D*pz6`&9syuebM2kW<*PuCdTz6#z~05d9( zIr9e?B{+wxFPa`za*i=+oomX`7%wBx{K$X^8zeXDTnWsC^Bz?Mkd?8>7N;5Q^QvG| z@#Xw#{rjzn>&940gIxz+b|vJfhhQ#J_8DY_A$?!!!<-^jKxy5R!M6o5+~vTfZ&~M0 zJL;6eylt6}+Xgk}MJDD?4SMtF1GIlTnJ`~MAQg3hTw!oN-Tc>`IU@xj!(kD#oy09t)3?n!0-x ziC*F9f8FBGIhbpVp*X_9^F#7@f~4n9zuoH1jjtBps@O(eZRFJDys(9oKDi#e$IQ-7 zs;BVhlCG^~QXpx-kT&x9f<@cfZqRT-750Lx(uK4YW?v^pi}7yz>~Cf7zR`G|9)}PmVo_ESXN!&uyRPH(^Q>D+$G0Bz$9-@+5&iA z^pb$+cG=)mJHcp2!Tw8}T#oc3-(S|01V_P}E*1W1bDzYK*rT0K8i9mm%UF4g1V;)D zi>BVwIktUQ(%mOD-**?dS{wsYY7Fkjq~vc^w>WLI4hJ>vT{PW?cROf7xwy&(wb%Sm zD{jE_1{$h4e?abHaX}>T+x2s=+Gf@S!nqweo({Ah2==OOzIZlDSH(TuaaJ0^b>s91 zR!cK~l44fAfXhIX0Mp0#gs@9+wll+7<~2}}ds=oMcuHj}sNJYj=^g>(?ds+nOgN@a z7D0ww3?zEv@?TX_F6slO%6rO?5&bVb`48dd0Zg4^+uR1_1b6d9qkvP-`lq`A*B^fj zt3&Le$EN`j`FD=^$Tq|{$Ue^uGSP{Dafr0NC1CZeU77l5IjL=LoMz+qathQ=L6B8T z6{QaT@rxl1**uvh9v<%6;(RYFgkoEWX{G$)yyQxU3*>OnEwxb?wpkppNM!9=qxl;V zYFydATq9*ft^d$lu7*2DVRLL99QCr+y{)5dZz)zcU=v|0W{##PXKdt3fiHTYvVKe@Ewg@TztSIHng?fb#7 z+f|Y%L{d}gy9u$2-w2?IZdT$Tn@*p7n7L$E9bH+6DBv6`f*;*>c!9TqJD9DJ7M@Kq zcJ51MoZWI__|<|r0f@^!f->zG7Xtq@yD?uepjn@6{3COa+{=uC($Dlcn3C7j4XDwk zWE$~x2ig$G6jrPK7(LMq^VPH~E!bL2l8P?d~C~Hrl+17YrUlp?5P_Ra|1$UF0EF5!oex)^J^6!Ju{ZSN>L{X z297=e-e2*+a|<)ta%U^-{+G8)o#WoqiP_i4P%HcswDbvk%ctZUzb%bdks>hnNOyM* zrIdZfXY$?-Gv7!d^3W-ule$g|z8B(n{McqiQR>=XU-OQ^4v*CLRfeqezg) z2Ul*kfdg^TB0$Fm$Z?8E9}bQkG5+KLVpIyFlmOq)ITZd$YqtoAz$pkzFk^|^jybN- zBF#}##?wHIpH%HMU)K6n2&`Xz=Kc0Hffq~{H^A~+=NZV4a+&69EvXnoszE)IZ_)a( zZTM-dS7DdB!o@xfR{_(e5H>JBJukIqG9!i682(PsAFT`{fwvl+0pU-2=vVyB5bgRF zm7L80k0v`AIwfDclpf7O#9TS$GYY*s~M0ySHxwvjd85$Hck* z`fHQ%)tk;V9-L)^wF9LG)1LBc3Zj}zg{0S7Hzo_K^z5lwuqvxOo5`bcb$u3=!bFl! znrfxlOcE*MW};!uM2EMLDp^p=-nwCg_QVD$?GcJ=%X~{}aJ)EpE1zN0$R8qj#zo3y zZ)X}?qhyYQr(sg$VTr@a6vy#g^7I@2AygEZ1uZePc=X}cG)n*SWg`g$j$7osZATgL z;C-)t?5a52pkmy)Q8f7MEZCu{AHj}k--3b$N9KjU59f*Y;?7SDE=ju!orcM7x!#mI zbY9MMG^r?g63;YNEZG6ayYwo*!9l27)S_o8!BM3x2$M_LZLi)xO1Gm4G7{shieGC* zQp^>zPi!%z1U%ap_AdHN;u7g5Mtf<$j7wUb^us2yBpa%@C!gQ`iZe%KrFDkIgu zQA$2Cn>~&KvUbolI->rW02G^YN_BjGld{Ur0x84B|3<3Yq;oHJjCUWZ+-LmXLh?Vh zdFba`1b!3G-s*Xb3@Z0hkBD$h%6hy5x?v$+m@TuMjpw4exfC#XPzr0gLnNN>L}v$m zyIe!Cp4!?Wc>lqEPzfBSQuNL6}onCz8?cuC)siu&_>qHp`?jO-;9|m+f z=!yU7;bNvLD&ZOsce5;kx&@s$rC{W0)V*io_#Km0G$yfeb5XG_Z}%C@bo%Sp-=XL+ ze+#vp%QPvfpt;12&`-Vn7KIr(@#>YOON*E8Ww>RE2{6f1O|3X`TsjH;6StT@D+&#N zc5UCdJ`&cx`m086S9I7ssWyT{{|r8wu(FTPHUQ*Ex|uxCK=ml9Z(?|}81nuN)sh{0 zKu!Zm^x#|Zp3iE*I4?Dn-KNT0s zKN_vVTv7_eUNV&5PtY_gK9_}4HNqAB#c)<_Nf?$F6vRfBQyg{y-^S5Zpz-}lD0Q8Y zS?OR7l&mD1#S;mW1H~OYvG*;2^*-1POZof(ctNF>cu2Wo&z7(CjYFkLsc??X#04~& z95C?Fn?X6?Q+W6e7?gVV`$I@0T>Jf>?QG8=XRYxyf9w)NLdib+ym zjo@V+pMgZ`BZ*n}EZn@qCj(L`gv~C%1sN|UzmAmBG};9zJ1M!+6X&;W&Gj03y!*Lo zEq<7u_$bM_1HWrl3F<|f+(=0zns6>D=znQ*(;m|dpCdHz>PMO0^}#JU?S-81Ok>~i z{^Y-I{d+y_%@l=^^%ol^8O^Y5=GNDVsSCwsYhODbv{JlZQ!7;^en!g{1FVtQ-*(N( z)>QO(xH#mmoGRO91D3^Sko8=^1c@FD`IsK!Y_K$vgYa(bG+w8js&-TQneUy zy;&+}xC-h@QD6`Qg7c|nEgt6EeQ7+u;3NI&r1vt2sTF24Ci*=(>|b~g7^rVLyq;@W zS$oI$yytM$t_k@^dr2m(nv8zMUB5X%BQJV*3nG|wr#Z3X7V)!d)D_K#dy3smc;Do- z6X1%@WO$hLrY)EX2xns)_vb3=X+4{YRNH%0RzU-$cv1I;o@{!8D*V`FHV^+=>*@1= z;LV%#HLLfGU=D7SXO@oNhEL?$lX(6HH>%S&B7SSiC`#f>K9F`2na@>|iJI87t&GPh z*x%L8Re*7-`offB|4ii1*yK#mOo$%d?5M&8TR1H41fMZf^bp^BN^<7`T15NtO-?ed zxZ&(lI%4-ju~pTjd$pJB$$Z|F2;)^?7qjBjjw)u`Qj;s>ziSs%_^|$HehLp|c%1XE zboXBbYKwcDYAQeF?v6s`s>>nx>K$~j_SZmkJ37O$Q~lZviY`@me;Lm2wLwabpHC?; z_KUgS1z{vOyrhV#OkbMBAM6wJw{pgiYA*$O0*d%1^}TvYRb2hO;NT5)BUo z0${9AIWL3=YX*j0Z$6(t^y3V^caY;=N(o`(^rAZf@v#WNmM=dEm}}#3sad&rwbQenQag{Ofs)bEhrUrrP)_te1~m z`IJ`2@6>;p8WA1AIRKOilcFRr^?;Zv`sFi&2`wcS2#M5uoR}hvEgd=y**n=m#3XTo zV-H14{G5~farsoSIX;$Ft^tI4bGLN2ykiJRZ{MuK3T`P3>sGHO;Q;D%dhS^OUMxG? zaQ~mOp}D$8^qX0R7ICbv>s35zIyo3G)KBJ9@2Z;X31yRhQ#n%$f%6{} zkOWtG^w*`xD-QwR!(qfwZLe_EoZ@B}r?`eL2Yb8WX&v;=hW#TTg5(=!b)gqd6z>($ zRXxCarQ>M0OEcKcjNfKz&3^;(cYe8EcFiD+Sb&=Z!|fzI4d1CX7ehO3 z{KtJ)(A{=z9fu&Oj$IIxymjNvE?g-ql)Q({9!d9iT1p@_Un3#cr z_9&^6d)kFq_6wD0opLpa1-tF(c$m)ObTPLvP6=l(P@{}`Cgu~!9%-pwOfZ-ECw$4$ z*2V&~8SM9k{Z-)ltGG>KG`+T%Jw0%>toMv)rh&dg5n%MSfi5`=YO9`p-7Ir#@)Hqg zH1F*2Oy-q;GXD96F>YDZ0g-6Yeb;+NJQi{Hk^+z}b45;FTPwT%p52U)Nw3w)rYZ$6 z{qI1*g+vpLw|tDfqseE5y8-R7bJfxZw7}TIQ0|08pVEHtHUqU|i@!k5T_=3Vt);~4 zwa9kM{NJhe&6Ze!9+dxGOi}fJTX#3OF!dJB`74rBC_dK^v>nCq>1ue?*A)84oM=!t z$7AlZXzg7tJ-m3Fc+|i=L)2Xl>AT$F0-*%t>J$0*#A^?Fs_-W>1cY>Qkd1?X>2qCd z8QV4sV|=v|Le9mIefgYfu$l5?NJYSPC60a^JP~Z~Yem3nz_rr*n5|4&7qeg^s<3MT zaV)$3YW{F}F^{E4_3#Gc6fvwq_7AsW*a}@$L z;ZM@_tMQd=*NseaRlPqi%V}2=8LXT-G>$_$OKj;j{y}rK<(wtaleeiGNaT11G%36$ zm^|nJ7p^EOyF`p-l48`e{~c;pVngvf&xy_(Gmp%EdI>D3k#9dp>UP}l`gH^K>i%HlNt3#0zuI{*CP# zRyK|m3AR=itvM)Z>!ggtlMUW2oIr9=dPo=T8KJCdnOyzZs@Op){)*^^Z&4%G^BDB# zpya#?)O59cqJD>tBmMe~(5CBS*7ICP9)uQP$O9R}_e3o6D>m^chw&nySEY7!8Zf$e zzAkT5{{|^eA88e_Ur21x$qQVfiIB^UA+M^Ar`$5IQQUC*CG%lG?Ndy+Kyr)?YVm2) z#9n5(qyVw0+It5kLs7+;^a|XpD95I~&GPR@gc``l@mY|X?gvw?GVum@B$*-r5SdmivWw4&s z)!;$rkKHHiKJAg@YDLzX!w{EY{vtUfJ63tP0`x-3`pbRFi0JdLH;bg|0?!E|^!{7G zBj*gZexxg70~|MYz)l!NAN^JsOwRppouvk7@ago@=71o_<T{G;o!z_{}wVb|wFTL8xJ}gR1n`zYRyKVt0s+vJnANPJ=qb2Gw zeM}$_ZbxX1qme%?TGJO$7}x3VLMFopSkPtBS27tF57xhX{&3|Jy!V4$IrQ{Nt$CU6%L_=h+7&m&%8o{;vO`zW>>ysEuQ*;YfJ* zT|O)QkZ0@lC8F{g6OduC`sXWs%RaEZ&^s)e&r8uG{UbRS>5fVt1EVxI!P`*6Z!j5B z)2deYN%rhuYm-GDAkn`PQv_h)pX&^1^Z%%b4P!Hlq?Gk>^stO9AB!D<9u-Au0{?^H8vCfY0^YOixkV)5x;kx3|$i#MWU?#q0Ssp5`)aciiw-h^^ZJOa?PEutZ^nY_# z&H6~_%`+dn%)i+ix&{SjcnQxm1+4*SQXgh;*-%Xb(3{Tx@ZD!zL9m)2HVrXPfbLev z(j<-8H?hNOtHvR(oqqzum9MGuZ`vm7ILEDin`Pp3C)MY4b{vAODQ8nS-p zhV#eeqAN!mNI-SZ=ULp?zC53;zln%%!+J%TTK;Ip3{=6^#Og0o%>ER=($Msk@xaPZ zO8`oI(-CS(ncU{Vz6wIm4dWkSq0j3_M@*aSw3zb61_f-ivX5HdFoJbgQ**ZT*;fQZ z_TCQS>uPVrSJA$HCg&@UqL&lWVOx!?liqz+cA@SbwO6)eW76amPeb1B+$;w*@;aXO z@JZgpSnRD}ci#|O2lRS^>698kY}L`+@5I-c$u!rM~e)I>F z9xMK!P=#pz8u9Ss!r@StmGbsY#Ri@uJ#T3ua?Y7oY1V@14~y1Dt((U5)Cw`eqLyo4 zV89o5Q`!#`NwV>-5%T2~qmLlg45kV$k=qQ8tHuZ|VuUm-Z0cH4T z%9#i$E!}>*)nPYf;G!o6n&l|!a-9mK30*1c><+H@CPw#(>}w_etLDI5ErKXq6@` z-`p;V_%pX~DuoznJ#@++^`xa=iw8=a=;E4fyFD;ng5@Z7fB3I*U5WJ@PsbNFygRz z)LKw{$Id`N!mQt7xBb${0~10~)h8d6a)jk|2U{M?yg|=&rt8}$$7fpRBGh~vkj%YxKDtk;exI7MM z=Jtx|3aXPr9%;&38?B&F&R}07EhtMnGu9kU%BZ80cC2jX&J{pS%oBdTPGkIkwsK5_PYSfN>_aUaw zf&F4WtEX@3Zj&Ub*45wMs_j>TL{ljw2+4z5|IWLSLv9{#pwVM+3Cf^EeZ2`q0C&7nWJ!^3I*tUcBovA%C!jlLTg-x-O3w?cfM3A4ic!NCS;Of2S zgt&sVhphqHn36VHRyntrh0VJlhoAI~hXjJQlOfxLU&*^tPC(bE?a1P#6t{aaA2!qK zj!PY;C^T}&ORg09_Jf#530R6_#OLh7;KPUefe8j-;WHPwo0zhxNzdf-KNF8&txbE= zX2(pxnq($p1tJP4iVv7}*5~jnvJ>5wAA;2J;)FQ4m9clH(ShI zhbm{d)=crw(w+J8zR6=epn?Z%gJz{=V4JFFzC=?`CS}_Gw6h*Tu~-E>pa#Boy)K~M z!^j(K$3}X@UdWiTy&c4n>?D08P0J13pSz-GKZ|ivH##r9l%N|rVoNwx^sADAq&!`| zQiklk{#{vDm3U?n4oK)6q|6IMSW*ptyDY;K^y&Kql+^*jPM)bzJ?vV@cB5eBb4KO9*R40)~sn)9WMfOzibaqpS!4GOv4GaDKu&(XWISZ=Yv) z{M%ZGwo&WeAGaNPE40Lk+et<%vtzE zT#WcP1P`#AMuo!oqkS8$MUedfg0c*WZ8JN6Bftf5x6N&S@pApq0M+bKt}q2_>_9tM zh+w*yT&$vzfBGZXCvp~q}@{ubX~J}Y6*1sog)4V7sjku$JYk`_z}SJJk-D!^8s z|01|Z9;`o)9`yY9Wo7tZyZAqAvvb*SDx`R*6e7ONUk!2&Rs~EX)D4||Xl6g1(QAD^ zj{)ir%1l|(xLhgsSTbWmWyS}m%cs7d*j9dKV{_G4M$5GE|1xnoA@^UGBe>Y$W{v%59 z5kV*=Ba(Grh*-0WiaGe*()&)%rDarrv%+Qt_4=MiotMLr#bGq~YJ62ni$q{QD~0H% zsXHnOfzIn%V9e&&e@aTrG+rmqSxkKI_Tklt>vP&;jQvNQCi*nWApx!oSbq;TgSdX4 zQ^Yl4l*Pl+{|br`PwC?i7rAw0ny&~vy2PWNA%m={9!&Pz=mUpF*ofL zP9V75om^GvYi_IT+ag8n_(11p;^$3wlWh}pg#c`H=MKPum+RVz>^5ifaw3F>aYUtw zZ&{3y=6d@4c9Gs)YLyRd=6gUTSyVb(S9lBClxy${#kM{Pj2`ddN7OPt&_7f)1$I5Xt)G z0(5c)12+rNe=T}?4M{_?Zmw5c0(X1l8U97~UeaXGjPnA3}s6?6* za`t8Z+2AZm3dt7sKKQ{U#fgmkSLd$C1~9+p&#r!2OVq1pi&fR)@}KDk2HTRii2?E* zuncq_EGGXl-OX^jzZe&$7G$sW+#(6GCB0QTufyr+j;J* zrC{2596NFNSWHWi1MxzPw8b$#o7cZO2JaVwQ5gXvvh<0M;S42=FGqVZl1`?La*{H# z+jFApA2ma+yU9f=`B#TwaRnwoq6 z_)xxHWVCK*iY#fL<@UU+u_U z^{n-q!oxd0V8Ok$5SWH3NcFv}thzpx$P<-c$gV{LI;1#oz$OZJn_DUDyCamPh^sOX z6UsZ#zZ!sl!xH}5J`mosJ}ifowL9fujdACDWzYIsT^iNSokH$El4=UNF6V>ue*CA8 z$)*c4bxX~m}?41 zOPPofk<|lh=?q_o+XQ6-L>%p%+>HdqQPAYJ&9B6(0zR!V&6~`xZ@`_^hl#BJClL5b z7~YZuU)Vm3;s5;X-$Mbh7+PY#32_Si>uJ`L(a;Rs^j()s$TCKke7!iL&yD7yIv<8Z za82|IBe~pV!~gsab)OVr+xx6QCyZ?%xlr8yBIcDv;zNPvE@?HA;#I2hU;3@-Ai~9m zi&i%vH)cN}<@*gQnmq2ZZvPJwBojX_ywzT6?X6wguFDyT7-7;><3ejSd+wbi(0dhQ z84`}UNAk@<6|p~7;N_1_S%IuGnQ>TIfDPnz`2?iXEZ*W${E?JrOw@N7dOv@dsAQ%` z+Bm$YBzarAMj7$Gg*cMp2+mH5l15}!(sETusWy)q2iQJeoc%M62 zz#uwx`+N)d`f4x&Q3rNwB_E%JaK%^&av+|(cW3X79MD#%58sLdlwGs*B4fXkj>?)= zYSdP;0@(?bjhC|)^!;n%UcEdS9Uv_I8>GYM-jQL%7jEeL~X0rE91FZ~EuZqA~6R z)q)-=mZu(hHpFcfQ}jjRR%S#$qakKCfO5TzDf))bWl>c8_tw&rHoUO)vI96w>GheO zps`cm@j79rJn1Wii(m*>K8dy0W_R^8=K(27_D02~O=oh9V&X+Hdg5>>%0=dWn$DED ze#T<8L()>-C`jTc&a6bY!d^|bliO#aO0p-h@rKb*JJs#+!olaDyKcdXhGRgv$aYLE zKWfWlU4EQVsVE|>5C8Yyr-v%dIC7GHsefp0z>r#P)J%JX>Ne9a^;sm=2+e7rsA5m` zj}viq!V#{4PeVrE!qrpi|DFRON@}gkZe9zMCyttWaY+`f`ya2j56+mgn=H@$KV&)! zy9ek5)E-<%FQ~}J5SmMjf8Y|#$~s-XF)#7Hs+d1J@lbZ%2f8Hs+qamj25DaKuWMFd z9dlk72Y=VAF-5-3x4on69QhBQXI*WxYh08@%_DUbV$ur@)=0+lQ5hrZK_9w6!n};Y zaxv+6{z(tf^rN9$z`tWH=ie3X4*FVYQYk5azJFRI&^yI2-dj=O{BO0*txm_=Bmz0* z7HvDj(h-Rhsqfp9*2n*)#Cx#Zll=`O#!Tc(W?m9@aqFf=^W4!s8?!~|cjjJ(S9u+@ zg_*R%JQv(PAtWHtMa~g$YVjy(dRZHjoApU@7yD;A5B{$|v|NIFz1ybFp<1ul{QF`Z z%Jg$YLJ_P!Kw}myyCjno;H>PdxtdI*U6;lm6kGv;IPM6u^GbLA+O2~l!;Q(?arJz-j)d%1{||9O%MnL>b_^Ue>=@u9^+D2(y9?2dttDcKB|m&5J*62q&jq?R;}3?>0>! zzUfDPE|G)qcaF<>*GT}i`rb@Tj^{B32Q~xj-}Wr#b;df^O8od1?@yjP1hi>C*}K=X z$WGdSbRXqX3T~0?a={cra(*ixHjFXxEszy8>P_HqYNAr*IZy9B)t}ufp<pQ+<+Jg$r z3oTi{u| z5D4M29tOr zG=Zi9TUkantXB~CF=e89jXb;^KIq57_JVU!kQj5_F^%^<0!a8DpN? z`+q>Rf7o2!k4}#C{aM6I&mUSCqVfQ>+KRgL%H|FqJ;_*r6Ekc! zc4<{%<5ey9Xv`n1p728CI+9$b0K?_x*C5bH_Y8@fNk@P2oaG0w%kZN?uNSIp2 z%jouMz?w`Z2E)S+tW=YyQMu`)4mHRd{Z(zZCm<*Ef~Fho@`Z|7mUX4?3z#CYoRYFO z?qPYKs(yTkx$i;;@h_hz12qbQHL?4CG8>doDe+&VLRh}kD-=(x6aw<=nG*V3GM#DB zU{cKS*d1ckh6Ir-BhhAWO(Me2D@XQ9xHiWg$`+mp66NCT+~J?wftgwH@M;M8gprrMZqGXJDx-u#DNhqcp6R3q*Q2VS!@Q2_eZ|A6V2Neuex=MPqm zbYB0@-iaz^Ey5p5@+#(_?X992)Sax9qm-CKdRvb%Cy>d?ZQtRX@IA*+JW1Pnc#%z^ z)9&h4{P)RMgN$Z2&T??A6x;iMdfy_0w9ii`dQbXe&IYr1x&{?NLTg@{_A;TF_njNT;LCb1cP{t>zK_A1 z6id58?Hg)>AO7XhGvF&^P+H$Gb%FiX6>r#cPOqC>p{qEH2jUhKS?tk| zWApzJ(X^Aai+{|Fpy80tIYnDE}(pNhC5*(8kh-VUpzaHG~ z_%k}6IK0g2%f5KMFnh;N{7S<>*~JJ^^WWX(#q&su%I3?Q_c0jU6gi++}8rgQ(Hru$)* zdcU*d=RI=RczkoqpO`;0g1231HVKVaJDF7R`-nQGpLrF|QyqNZLlP2z5pY#)_Hu)D zHpGa?Z`%S?7EJW|`rRM=6LARog5febyB~|ny^nXUa{)^E#YqG#blbK(VwpmZ-qGcE ze>3zk<(x4XA79nbb~+_rsoEqz%B4mOV2mtT!&ZRqJ9O#8|6*JFObPG7A*mQr*{Qu9 z#>~AJyhHNhb!q1)w~!p7u7+oOGnyL`vpBD;$=ZCF-eMdP+kIJ4-~05W-`#Yp zl;`eow_6ZV(hxI<&|Sf8A}s|BuWMg8TxCsLgES3AbA!e&vnb~05p|iCf_FoNzARXf zxO%U`&5N(xbR0hAZxUbumd7ks3AK)yC`*h_YR)GdVo}_4TmFfUbwgp(F^zUT7*K69 z0l*RxU=|DxwNc9&^zE*A|7`q}xx*Fza^??@H6Be;hmTgrvzSsSAwK*5w;8L^-jJ@s z@Q(N%(>1HyLvpV?wU9#objOfc&FKUXumRF_X1VRaMJG}-Z;tX=U@dh@ zy-@hm+W%5`2SIXPyJFy)(y_)r0kDOGo(Q0~)7z9f-4jXcr2ewi{ai!vxM$34IuGB> zC4Bqox7du!1kJPRqD~0i)=##dwbZ^F@k&B6H@^!m*j{KniDDSHlzv5l^2)`8+uY=y zdxg@9Y2O^a|4=N0@ZeEa(}kzR?dJ2t_+jP`A-DLACb>-c6AR?h$XSZ3en@pp%GHJq zDwI{KQ(#W0%e^lZ9lwca_LHP9+TM-TlR=~x-qkRJ`*J4tPm$@6^a-^W(X8k$SE0(I)+MOg zZ&$qTC6YD_Qm!)0A$c6qWRbt%&2!|^k?)h1ij#V*TpZdXR3VGMWeDTj`A{f`bzbVv z7Cya6~o(fRJgFiT&6gSg~Df2Ay*6JI2wieF(g+wv^^m`cEq7H#G&D zpllzl?*9Y7`jlT#!@(jtx_3uUJAPxXhTiO;#nV7K)BYhbSi;7Kp&HW+U(qH3ErJ=L zQ-AreOR-syQy4xP(*iy|rZOQCLPH8o0&3vwhxK5(7s{U(N zQ0wRZQ9GgkF5=5zO0Y+QpW&YC{3MxB0`-W477TV#ep$-v1l-<0ZP z(&~rzRfb51&`jHdgWE60F-)!;iLoi7>5+^TuNsVpRG)XAO`CH3d>a*;_r5jeB$v9G zwtTdjy62||Bb^aiebqAg=PhrG$|q9e%X`Ue=`ixg!_@+;6$;1a9bwkaN0g1rHV=$G z2^=UsQT?A?C*zd(woG49ctQF6j<9>|FfW>IyKvyxS6V^zP_)xv*h zP~tP9Z1bplQ=cHPt&7~|Zw4C5QP$5$&mL2{od^Las@=ilPO%H zEJGGZC0TVb`>Yxay<+HvbU*pdCNl$Z=5gkt;H-FH7!H(`hw#2=stK8mgPl-OuZR=O zQbEw6t+8}vTcKfwP-xM8^QoZww1$#eG-F|W&Rgx%TA7N7&I9w=XO=Vcnzj$E=Y{h#^*)S?e@Enm?4rw2^8XFhFN;H;OWh1*!H&Atvzi!@R^63tJzrlt zMJGh$N)awK9e6!81l_oEX%#V{V(hlG7jQ!+3;cNlM&+)t4Jd-Klqd6|nXDO2+Na72 z)N(Fj(bX*sg?9zU>WW4?RNRSMExkKdy{~)7Sc>a!+}5Lbs=qW!eNu}1)!5;0Tk_PU zIo3RZ$r5Nn(#ot8uNlI+MUf3D#>iorifGBA{}H2wLwb;w!v^x6rXgj~F3gy|{&ibe z{}ul8p!=RR0U1LGBlQ|k8C&t~$x)!mw!{{>P)fz_u zTCzf~Rj^0KU3%;{Ip4)Fq(LF`$3KCF#0O_&K-|48^x7eJ=~M98Sgn`rCK-YMm> zWa@)5d(akAY>xPy&q8fzHgbv$ts56#G1WycT%)IDW=*}IHKR9 zIE@le5iEM5lUUQb`E4C=uq@_c=hbl#yV#1c@>1~nKc*83!K<5*NebOWXXXE(_kTXR zgnvb-r+)P^EKlF~-7gMUSE2mQIeogZPzwqZX%SgYzu%p>UaQ?}y8xlac>VnZiyGeN zu3aEz@ruL$Y-xvu3+`fQlS~J0|bARpBuik zpotM3XrCHX!po_ilHnQ7u6D7djCs){PHF!qZaES13J8Iy@?*eJZFziv`M7oakq}fZ zLkft-$EF)^+2r2XocNsBZkUiJj1p(~ts4wYbiZf5ZIN?N5M&NjWI}<*aa$rEe^>b? zu=>FxT>~M+-H+44wry%>&|%T{)!olKKf?&LrWa25hqvDUNNc< zs`}?e>zHkq53~vcBPFZxXgHPqj$y^Eer*B+>67e5> zmp|_!)GgFyv7TjP`H8K^kY1JAj&~k_`W}%td;VrWS&a~q%+(>X0&TETM~~#}`Ce3_ z(L=C5bR3N?)UxBVSTe{-0Lj?=pK@)h=QFajnQ>Zw?nDhziVPJlFvZpp0&t&;VX3LQ zO7LRjuzr|zT&ewXGU)lARmADDem4^p)A!+T+R@YEgJ6B&pk1?w9iH`~ZX(B5yI~Ge z*@!-H4N2@L%tI~lXj|UZP7&O}IdAaf(NmWHymkRjtLyq3!6rcR(!njCiuDP*;QRIQ8`uzqV5(ROGj zBjIZ~jfm=!UVjKN#MS+Lu0~QBUayP^8|9Ija@Lr!Mfxp={h5DUcPD4xe-ihqw1;ur zF8*g#eZ*tqT zkdJ9;MBycS5iI891Y|?_K$V_yfI3SuD}YN&;p+~9W7FSsee8UQ*pSnjWWX2Kmwc2$ zb^P<~l5OzWkETO=U3uA67j=JdSZqUIk~kyZ7_76^SVlbRx0-GP%4xe^2>O|2B8S2x zD5uRYtg&=V`$?EWHK(VOR&}_1rlFA?E4f)pB!@lUl?g#8T=gwCY(LQVHg6Z?LAOYK zAAu`7pvm2T)@=PULchZsF|6Wne}tXK5ck`6Qg@o{880_d)o$!R?Wvt0h-w5PWnR+0 zwAuwf+k`*u@!MK4wI$VWZ;tdhh1JwTmZ6lhel61~pFf{0FZpdH1V;h2$d=Z#M98x6 zRHEM>ETDfi!FktjoIaqR>7x}qLyz6Yi{Fk1M>AokN%$6nJ|hn8-WC_iukib;`JWp_)=087sb9|q3 zu|2J@)Nv)))KXEN;#DD()6&%1vd^@lrmHzixm3`RzkOt4aT$$SASA{ahk`|0B#eDM zE`A|%J~ZHT)+n%r(*V?_X)OA#A&1{U->gk|A)U|!*sk~7B*`b25)i<)PCQQej&qD1 z>`e#^XkC%yN1$C*!&#SRhsA|Zr^}$>MK)K@f2Tyq=J?@kDd7{sCb#zAsbQ_-9&#Cg_KCyrEIc|ULB@Qg zOI{7_g7A!EW3xT_%m{OjOx!n&HlB)AihaERM820G9i69L_reHkBQ*Nxhg3%OSn(TPoT=sF5w}dE>y1m3*m#sUvMtgoy?*Lms3RKkgD3n z`*@-xX}sU%-IkYObhO;;I+8@{7sbDvfK3;!=f6(x?!K_>bz^JaV&Fn*R_lfEW!uCB z#6hxmfvZFj2=PRFdz6TB5$?YxDml?UV)R)U!E_@{6A9hP+l_^?m6hQs&loMWxafn^ zEiI@yKx!$I@8ENLD&0LuCre=25yOkrXLiN4D40}Q$9s(-ctL?9ah4;WEOs{~?{N6+ zMxK*W46CyR?hh527UWaP`;|Y6D*9a4y(7!TSjcvdk>kLHs? zt9}5tPwTG=?!OhPTb0BD>bLx=y}SE)!SKCyLa1K$%dkP#9adH6FV{c1?l z69$roa}~0$gSPFn3TNFXZ&==;6r!T%v=N0(RL{zzF1EE08h`)E|2yX@p0dft2D#-C!vyqToqNgZWFBR_Sl@jYAbZlNaNf5ms@P zuEVTtDt7nNHRZexJ{4$k&v}?I8RQKUO>q+s84~F28HklJD!eAC-a|T_)6zfdQ2%m9 z;uoB7w3|*|Z){AAQh`@kN9Ok>u?3}}NLz3cbU{1fBHeiN)MO!1KiB$jUUy1T5X6q~ zu@*RMgi!I5`-BhhD}|(17xXl|dsOxsbHeEyrfDL0NRi zT6$Bo+1cpg8sYWU*;xiLFojCnh7))gdgMySVdIX07Lp*C2Gq7Qg2_6x78wv17|rE( z{n!UiuAF3lE22v`lS_S-XL*Jgvo8}T0@U6n5Vz1`YU$3!?^oh?RBhgDq5z<r28G#R4LIy$!&gMxi5SQzs8KwB0!$Ldj%p(r~B#mRDJ^ zsC~=86oJZYH1ihUCne%GOdUgb`t@9Qt$ICeOq-qH5s_@@`$`sT78wLlOCjU*HloxxI)%Oj*A%k?qHTKbXfHleRjeM0~=MxoCZ3p{8 zpzj~{LIe65IbfbAD~eK`@f@5C+}RcMH*kI%!AioCf5SkpAdrBuMG?(CaHWq>EVbOY z$985ktk>+i3iq1ujS|^g=4o)L!HGCNkgqWn9^97nx3tdqjh`hgw<7&Sf8NHka5K>4 zx*(gS^gQT7`Tg*4>bRf-nNOxlE&stS)IJNH$=Lgf!s>IBc*FSj=S#n#cI{vkjt@w;C{7}2TBl(w2acs%4pJcka^|;)1(XL6+_xbTg^ZuyGu-_k7 za<_!@zb}A)mkLHDMF=6{0GQWjd0c|6w^gOfrZo-Q!#3GSSRU%1pB%VAM~xZXBgo4l z_G2p#Q3MdjCg5`$CCdanxgR5wcaEyQ5F`eA|nFK3&FH@K$yPL6_Pwco1FS{OmBAxdYH2@teAO1XYI*D_22DznjH5Ic6B27r0X}N zcSmni8_5m!KF@T5%oW6xTOpE`7W=JZv-vBpz+$0Z!VKarBKY^7PzG#Dc>*2r@VWJ& zDM1odKPG4kH%O7;hM0zl0-yfY_S4Qinp!0C#6>p@|kaYoSv z_w+YculCrWQFgwD5)B?$IV;xJeHVw|CO}zXwPG(6I-|MwSo-#X@ciCY)lrd z#)<9%j=}jdlhTic&bOd9N+Enb>sOWhDaS$H#N>*SQYIv87gZ3D76T1;Ipb~FhrUJL z#7?AAdqOcqwsOZ3Lh5w(L8nBXUndK9;;!l3<|W(r7i=_a`XX)PyKhQu)2=j(*5k00 zN^Uk+NWOk=Nkp?8<||a@Hpuo;bdrv>bY2rXEnN6wDz1K`!B{ntgg}HUOh9RpE7bG28+-EQb`)I9I!(hzD82 z61luHyu6i3{i6(G*@zpG?JrR)OjhJDo!1n9ZR0xl z))D4MeTqkpf#I{YMzY1iGx)KsBJtQBd$L`c$u1{khFWB2?SmsY`@JW>UvQrSQvIj) zRespHsIvfKpno7MNEXv_f&z)+fHE0*S~@;k^72g=WZF?^$B|jU- zL@Q|aROTpT-! zCog=j&2_jV*j`h`>Cp^=8EPu?*QZKQMR9!l$=RZ-`)tMtxG#a2y-W?NFxCp?xm!SH?zjv4S zCn74=Q{Vg|E|~>EDBblg1yf57x=WW#KkzxZ)CZ4xkCdvW8}(xEv@IWPI*ENV#ZpcS zce?8RUM#AX@0+^f-B}4r-9flA5+TW{>+nPbKClC?`@CuBY>SFet-85>!u+-*Wj4;XH}0>auhUxc@l>HW^+dmYe+#S z-fGyjTL>LTqJ~NgI!_;?XD`o|*WDQp`7T)zYb#UxZr8n0%u9C!d|4O~m|M(ue z=)ZysHM|1+1b-Pp$ms zKVNK*s)dkxmhv6e_Rv0vB9asqD@iW8I8!U~{9mx{^Bsmz&vd%5>o-O@I@q5I+4qvF z;epXm)#mxeZ#pr{?X%M|REY}&l(`R5rTHMT#)*WIg`Z$WTLLduNxZ!n1bHuTLN*nY zSFyD(L!QE$#)q01G)~KHxfWEAjIMc<#8?MLyz@<6R?8_A@4L!FqQxIjGS6hF8D_H4 z*q_H)*7Nl+>ap6a%SmtixE5Gkim9ih9xXQQ*7f>WwDvV_QNj6Kz~9~t1~whPev%9{ z3l1|qU$EpDVtrMixB_lgukNXX^MPd#^s1kFKcq%j8a~fe%jeo?I@CNaG5OBa*Q_%B5dfQNHLMOa|n9+p)~K~ z#u)eB>(~mlMn1Qi_4x;|VeHY)HN|FE*>Nj)l9rmve__VT$i%Ynkp8ARr?A|NeuUas z%xdJl)MCs&BIG%=)RU3~NRdMntOxOi;)^@qBSfk&?B0lPwie33ca`#J;=6sAFp=Z54 zy-;w|mC;LWY7*>9Dx`=PCAugLAk0VNq= zz1>ruzSUhJygZdzY8o(SRtr9?Gr0Mr(_4e5xW~)T8BdHQ=o>uQ7u%V_M3G%^qpMDO zaZX(HYjYzR`o=_AAg|Bz>29FWyVvPk7choLHcb@EcD=Sl>-w-fT6FgcxM2rBdbf)& z_m#=RK4<4!h6qt!F&qZJYUa=i)b3*7ZS)m~%M=1F&)~Lso4_plLcPEPA{PsYeFGCR zmJIb7p}KU#rFJTH%`uB!izSH_@-TDHyaV5T$z#qb z1Qsax_jV>b(`E|iXp?o3gwjjvou;3pNo!pCO$?DN0(Bli^w z|6_R{_Y|5E^MFMwM{H)s!M%V`G2iB4Ux|$X_uoRh8=`#(;DQgklugXhJ4DrxUCB=T zPLz-$ZwuvTVm5(!zwz)ckh~c`(aM!CUAp{NW7>1?76*N)#mdeY&3BOTJnpC)T={B3 zcxOM>VUl?Uq8hRj7vLPhr{ld%GIGB+(07|AiCg$YG6gLkf*-IHsS!ozZu!4?`! zAo5pFb`qaS1u~mB*ZO0K^J&#dw`?l?|9_QDQsjg+1=Zp3 zu?Jnaa*XeQse#`Prn#TgtJH0h0w)E$8HpcKcLlpwR+9Zm)4-#M?_z{w`XcAqo6GOX z+hskBuH2@k{c}`681!Xe`M>afR8)izhjF8aj5RRl-p#M1v7kq~@$NLF9nHc70$$BI zS)~D4qC^_+e!rA%i|t@p3*BZ@EKTFKOGgfqTh9eyO!igH*HyJd1&r=i#UWB-Y*Cq( zwEX^Lu3Zr^=8_z60XD`ubr@U`dcTWx_q;81Upimob7VTO`k40(2h_octYPr6p49d> zV5lI%R^tn&l!-Ig3ZhvsY?*^^Z4`OTOz|og*+2JZ?=77oR2Vr!3!b34+9d! zU!WXb?M%44~l2W>Vzm@@p?ggXIT;Hbt!lYg@zByh@ zn(wsXi~hUa`!TmFXTO!bHjHR-xkPcoW$RjE2XS*n@ri5mrf4+#*Ek>PS2#{}3o8CN ze8H?l+_M8&_0aE@*2F=Dr_r7%(e* zt4)|9<`_TL@HKw-xU1@K^4G}DO8bci-!VaZ2FMh$7X$yC_^|5Jlwhvjq1lbFp$$bS zF+LWwV9o30PahEEmtBa~T<&?W!&7^UH$Lv_%I}^q(Uax1>7g{7mc^2Y&3%d79;kf0 zs%Z^0P|k=CBt||qo~VU^80?SQX_1)H;c1KtSWa?I<$p8o1NMHmzE7opYo3^N>}7wf zf0Y|t2Jti9$>%m-E2lf=W(LbkCfm=G#~y-`=Ko9b$h_38 z3lBSfd@Ofx+>QnyqVM6T3{wKTO{9JMgk;2sTkZeRSS_yh$l^^yMD# zeW?0nby5dys4)!x&~Y)Ve6Wm#s^;rQ>8U<9uZcO~FG;$R zrm8Le7}?#eCKfgL=r-QnI-l>R{Ng|g7_TVa|aKHA`7OMkarecZJ)Uxr5&bQ+EEWbfI zAhfIihl!ufH7pm_X+)l(h5Zo$E#TtazFsmo(`<%D$|t06R)_Q?BYOKiF=~<;$Hu*P_ zH4e9^Jb!beg`eCT7VTThVtfuw_B-y6E3#@CHLKMPANMt<^o#o+dmneTm2r#QFB zW8o=QU{UDi^-s}GBTk8gsmf?tEF2dgG-u|4LX0~I>In!T#u4Ys71$C1f#_w-bG~8d z6R@d<3hUTm*plj3zcT;5g~yC1hCSlh)v&f;)0%H9O3_x!F69IM!}L`|cP`Q2mE-LQ zGYYS1s|vQa-6wl#BKy(jUOGr*N{9MreoscubwUp03BzKE_LSlWH&0_=OaaSrVHDSl za{TlMoEf>>rsFfCHp*G?B{hp$aB424mziV zqs#PS8XIQ}aPWpSiFwR~7*gH46|ymuykM^pGRj?skn5W`T%$l+(jOK~gayt>jNjC@ zM?_;hgQ+6xN4>C0RpDjL3oVk?3xtMM?#4b9Y3H9wd8iCEfdk8Ujrt@Uh7#Vnd?bQudpkmo9j>q5cF^9lou%S>RF~aE_~6e&FZ+wU`uk1+DFXsW++NJ*HA4Q zhP*msVBjwi=cFO_x#gm%DirRntZuQp0>_j$q>ULnD3H>Hr zzeT$x|M%1kup0uxiQGF)yvtfcB}+|UTb;SvTAvDzk2N#mqUwzZO9-T~d~{1`hqm{g zf?HteX$bp{v$rmaQ%fbfx5AJ@!#QG(Jq>u+1L99C?9@vTVLxrBiu7q8epdA%@Z%#EKFk0n3+4$z4wVzwpYo_uS11g#Lfx{Qal|YS&iWU+;Sa{=tTfMh=baw&!w`&A zd9fH|-ok+7JPdFm{eKr<-uRay?Or{pru9<$9#l1zi=zvwlZa&m>3z zDWEF9N+bS0GDzdP`IL$*9Td!6i$HObTIbGhC&m}%3b#yl7}yB0XoXuzCxX|jIQ-eG zY)fnZK@y1D7}kX3h3*=4K_xfRwO65yXlqZ*Jo%@ z1|tvE5r5Gv$|9#6kb89w0r;wT7=K8!{)(GDRWwYc+^{gnfS&ZEOP!)qtYgV$v=GW0O-hDZa!|l=qJB-~#o*1KYzbe&`t`)Bzmpv`R zH~7vrf2pvg<5vwrn2qZm6tcbVEVS;pRb~4&zKUm}C32IS@;A3g#%GlR6?|I2`Lu)04_hee zmMaP*L6W(p%y7o|+9tX}CpII=Bv*MipgP(OewlGadhu;{KWW7w&`EfxJJgPrQ}F*P zwJQ!S&R^UQ7u|gMq$=9}dCoZHbjNwD;=}I87 z@@GA(5_c$OEZdFM{e}Fe8k{5kvA)9Jq{*j!uF-ci&)DEK`n`+ag4XIscP zhJHaQ8?lJLT&kMMLPuQlP!y(pRz1HM?5~B3Q&bWm|Ljw8lxdb_n<;6!@p#fSmw)hz zth+6+FC>D`s?Mh@nz>sU#5ja3d0WW2MfDwx6aBG6ufsg702L4t>`E`C=NdUr9<>QH zil-hAay%KZ_x)_Psj{1d`pLC-xL*y@UL~PSBOFiG*Odu$gChf~b-yUXW_u7WpkcLR z#<=jwG^wmb){r9BgHfjY#hovkOJcHAbp~m<_4T_k6jvNcJ}MD?{p%Qt{uSYp+z(Gh zE|kM;Cu>?dG-ix-F3l4mypB2VwQBAHV9-#1k_@4h#VPnmx~Byo;y zdAd8;$W?>YIEMG#lQ;b2b^NR39}yv)%SLWBUF&~^IAo9#{lD|FrEG;DOoGn#onu~1 zJ+DF5q9}h=6cV8Sb$uEcdyhB&{Te-(@qi$nwJ>TY$EPvf=iw|vop9L*v0>F~MXkqa z$NsH69ucNUGZp{z>!uIGI{MDlb;OEQF~%n0LzoP1Kex{EVr)k^`JaVIM`B zN_qUMU4x@d^%?*As)Y=GuWgj)vr zE1&Ar70v!)UXLe#*=p;1$hOjaN{WD6%gU6U8W3KnJ902?ow1rXU$$$$F{oH2I&PaH z%;v!WurY_6HD>P2ML%u}d&8%(Y%BL|q{?$qwFfdVR%VYnxU}rFJ^Nx=hEYH$Tb^J% zAolpGYcz6;>XEVe^sKqJW`PA~HO{#T1iLBY1vx`HwwKLMYvR?m_U_vXy0vOr!t){v zrK1*(mDP-kFsdtVT(qr+5r*pXC-7~fLkF}T)&#T;PLF4l;zCSOJS>yZJnrdQyZA<` z8&A%mJxyUx;LRNAC!bA^jjv=RDhv2NU$D5!6Yj8N#sLIubpC;+Rhyk69qu3s!z4!n zi|oUUgFssv>E@(9<2_7JvZGiNE8dcK`$#;-EpjqsSpb745Q`c&s&AS9u{2Bck?x;> z1k8MD8F|L$1{(p9tpsYP1&;9#p19acrU3@a57dT8L^3O&LWeJ+?`&b%A;qZ~755P7 z;wZ4WVBR6cG`N`9V-%KW1aXU>^$|yy40d2lh&y?b(id8I92vF$F$eS_Fq453zaC2D z3ZDR`kZz!MR-C)3yOvKn`~%(jvmKI-8$T6)VC}2@&LHc4l+-CRR`@{fk0cXPL}zNy zh884~`cm8J_UhX8soLL;R>#>YQsJe8uGx}rnntNfo%#E1oj$1abOgqr76QL$u|-(u z6&My+fGl+C!{MC83_&i9*`Vy4S^0YkQ3&ZH9{Csml-a-Lik7Tpo9cGNyDQ$`pysz8 zDkc_cGcHW(Sc$39C(7KKz`t#wYFO$j8j!|luu^zE?FVcs&Us+Oveh_g4;PrkH8Y=P z_grMCJn96FOAzLfpgGoBeP$Wlo^P(c5$9R4jq-#ELi;9Uy!0H;cKa48K6oPfR5Re0 z@UXi-eVL(`h9u*$^73N9>iqEkarKr_RkrQ+w=_tDbcb{bEV^4d1Zk1(PDv^0?(XjH zQo6g5?q<>PUfj?A@Aux%N5;@GV8wYI$DF@8PjCf6CH?MrCRdr_@E?m2S6SE9pD;W$ zx1Fl$5r|Hf))H|-=1_{dG(FV$v?LH*eY9=s3Cu>DxbGo~Bjk;^KMc)IBsc%o_N>Wg#7v5%_}dwGEkP}8l2 zsqvdoj)A5&H!VDUiTyqr@~Md6c0w=~`0elmj2@cxy8%I8-XdPnO4`8dcivhVMdV=@ z?f$qV@-OU{-ZC#hm5G$1AtoSGWCH}#ZFbKH6~@yjk<0iik3`r)+q{zIG2QO(qvM2 z9+JCs4*A+jIv=VN*0^We`=1XLR8F^3)<&k6c1iJNjoMkFTV0pnOc+`1`6(r+?f;-8 zIHV5PB}pNEL(F|xzhTr z27HB1&tDParAUrWui;U*V_vi#ao7MTwreN_rp{6ya)7$?rRM+H-8Zd5qY?Iy6U4q2 zIhnU`GZDpk@)cT9L$u|0Ozx?plV|XQ^Cs+3#E{kNiA`yAy7SV9N3Rwh&*}7aVGwMx zX(E0ugCDLD-lW(kL!-~Pf6#<5|13uYY2I$Xr%I#f;6M0+mOXO2NOg0zK>6jHKUG04 zsOo3gbT`JV(ZDWIu!VCk7@R3ZrCVf&aqki%`18qXd--pct^3S0S(xJEMC`W!bjsv> zn_pjo!$8rW;p)B^MK^go7MJ}IH{UA2ntm6%fydSryVm_|2xZ1#lGL$*?bKqEjT7xW z3E??X;VRO#-xDxbsHG4{&Z~iG1gYh+n%^+&e{q61V1w4j$0(ilb4@&T+2~OsNNGsJ zWo0974O@Dd^ikG)tH$ogo_X-tz(hrGCP7(J19{fDw{? zb}RSW<{frKu|OPEQ1#@LJ8?^9>1X*PEB?k!T>=z#AwWP+Vn#UCN#t9zYnO9|DR4hs z004(dGcP!I*8O+c$U2q_H$QOARYsiQXM>p*l0YCc=R6oIXGy{!g5rpqNeHW?$e~8R zBH9o*NY*zmO6tC(2!G#GIwlu~8Q4{3Lo==kfj0n1s~jZ1E!txb6u?G*l!f}wK5u-- zg!gTPDUy(u3A_V6;^rV!`^x&uHQTTCuRa+uY4$&3A4IW(vY`nTX*{|KY%F^6951n} z(5lpJCz&_aeW8MYsMG28R>!O8Kd#eXGCeOL7iOD1ivYk#(a9BPb*=OLB-_|Fo}$5^HB73ReAy%k~Gwwq*;Sz_ISkPy^Y>XUb7z~s)( zjk4^+X}TBQFV_gS8uqeHZLHYB1mXJ?tdne14xN-!`&H&@U4e}0HS!5z+wF99XB$1@ zx~LFyomFVPhFS?tcu3MudmW+)A8Vz6XLg!P>5*5CjeC9$Lss3_d3kX-z;MdI%Bl;D-NLqggj^ zzkFfi{}GvS*jlV|6+_P`P-3E47o=fIy(0q*gGen5hFgIRtkZ8tl}7Tvz%UHR9SXD# zbxe(*%$MuJLyd9wgqF)8dJ2b`OPfjp{uURi3Z9u{nlwcKbRON$!5D+O*c)HLxHj+;#(aq_ zvv}e+v{s5RqUb-99|GC%5F#Ml4vYxDJkVG}aC!4N6hO0Xa{9-WG=C2V7=N+rGgVouXV=&g z!}x2k?Z)O4I?$Q&G%?0IZNnNgRHN$hVM{C)&3`n+^TFePm@RQDboTVBA~nq}9MwRv zGAIb9PHjm;A`$`6(Skv3xBRW?ly+FKnLMPMgh;ANjvPGyVt_QtkS3i;`uX7l2KYpT zZ~|oY`aH{2$Nk#;cv83~b?nx?1{H`~WW|xA>K?ouI(hTv1bp^ApxE7SM<7y5S9fCH zAe2)am+1_bkV}b44z3ob} z&J;fHs|_1LaiSr~B2Sv3!Z1hR_+{>+=@ap1$ZWfPR#e}cb&J}%RnZ8wl4Ke#QX@#_ zu4Q@j0-CIGJ25$N4}kVfn$~?aYEf{4*XcEh-NqryR~Er)GVFz?L1|+k1`D_L2V+cF zCDRp>?|`VxDk?m9smZd9XH~`%A7Zc*8N0-L;`yy?N84V^CEhc^1PchFVH-`nUD?*&4a@YX%p-vQl|)n#sJ=Z|84 z-FekKw_$2TzsHR$HQSOl(}uXh>y}7zZsfhg-u;14Gi^+MHzaE@#jcVCS`OQ?XWSuB zt!$kTC)k}v!DJd#N$dJw28+Dp;?WX`Xaul*0{+MLkqT_PPQ}-oBGFef(TN_3$#9y0 z8w|Q0q+mPL%4+6tGc9boNAvNtB=0R}8-lxUV&??$G|~6%{wYc)bR>w2{t=a)>{9f? zG3|Z{5IxZ{iPk(_-x`*E43$l?aWZW$aMY4C?DoLtvya+u>w<1PbHX+b7)#i+d~Mq*2bT= z<>Ixl2bnq*w5=DHU?`~iinS9z1(p!%(XRPoGc+3EyHrYZG?H}6T)I})Z^kwExa;U???+8sZI>r;Ayce|Qg`V#=>@x81 z=IT$IdU-0JOuwu2*UM`Di0gMEDBsF|4TsNqfZW*ssTF#2Tsu159*z+NjDe3Fll&1+ zf3`anUQ{nhsLyhtK1LqaS?;=igA--BEu)C&4Zmg@9;_!s$!p2UssmQlFm&KmOspb( z7v2b`SBcUmfaXnVebBwH69pseB&kH^Mwrx}XNf}e(>PSi8P`0~tsQ5mQQXMTy&AQ8 zt6fp;`8>${&mWx9$siIYI&(=LRr&c!y~>4Cvh+CSH;qwgX#!-d0lZayp-=y3Otb_3 z#DykpMCHzlK7!d&T`*le==^uY&rLCzzVs3md2Vq=%{r-chcWKSJIbE_vd}Nk|N62n z3y}k5i_A)PD5mdJ@WWg-)Tkt+;4&d(^8`0{ej^e+XCz~hYcKnH`NYUxb^-}OWLReLKhQc#8Trt z3xtDIi)|?O6nIf9QVL@INN+e3o7HNC({ryIm>zET_cUO^IEL#>7X395@ zEr=<>Kor@)1WWKWdoR1HgfgAJFM)l3JonCVvO>?pq9QmtVKOzYihgX!oV{6 z?Ioc8J!7Vimv4W-y{PlM4L5gK5I4qAJRQ>&0IaqW-}mPd@=-_Tq6 z9jh2#S+?=rQYi4}j}<_VlfJ&YX9k!kz8GKPk2vrUeOx!%>NBtJS9NpR4zx&Me&WXn z-}8yRTKwX)+LC)=AmGh+cdV6GB%VEkC&XMdW5e<=D&R0np%qz#iig@Y8q&XXZYGBF z3;|4UME8v?Qr^cI7v&eBrx0_hS(_Nb_B*m?(cy7>Qs9B3TXQ??AngsT$WD@hf3dBq}UhC~J^Vmz=OF7~O--Ath;1S4)Pisu5VLBP;l*K(GB809D`yasc5 z({>D#RYPGE)L<@8BfW4YEa|gS^SLhsFmE=U#|+VK?uj(&ZIQ})ShDzj(I6P}JL6{X z8^?_wlRWp-`@3A}CO(DOj%rSrUpK`jKW>m#eeCm~OC_jBMEUHIiHzvr(+7FB^#^3V zv?9=_UNWrsyl0UZM@&Jg&$RenDo_QdF{6NY+;P=MLb??__QGSQ~Hg)9z} zh)t;Vn~|)RxYOGOHiDGkxGU^>h@!DkX#w#{G+(>(4kT2u3dOnKOC%7gIIKIW8P6Bx zF_09-k6u-t3xoAtek)b|+wR6%5TPw`F-RJv6#^X+?JRBWV5D|Jl3!1zTc>-nx zSoI8BWJ+8%yZqGfpboq&yG_f7lGFSrUrS|GwTjTY_UYX~sO8?iTvOagi^T}3e7uJ4 z^<+E;|LD5YkuQzNr~*aA+?(+A0PzN*B3o^E1pH@gfPAAzu;8O5sb0NjIaie58}|J5 zcX+@-#|7&?VEZHYe)YbhPVPN$@uz?oFeq{l7ViJ+m=hCv2c9Iax}<1!z4-0~c*U^X zY2mzfKhD}4mx5v6IWGXR9uO;#e`5YxB{Z7BuO(L)j=nISB{y)F$!p+2L-@#HG3WR! zc<~$a5LmeP&pnz_Djx?V1#;9Xw2K9v4-_6_J|Fy7(*vD|4_x1z28a?!8DIq;{Kg(U zW^)}38~#nx7bFgBIP{1&`@N{8PK_%$1yj%^v_FocJy-1BTyh=^byq*wU(o_sAv|bh z=%)knP7p9V4tWYX+o$T>Xu!|UA);!`su5^!iJ_K6!+%cM3t?Q_SlWIV!+ph{UOZXI z;Ml`BW~&;~X3tAnz=B=eE|E!6O;B=kj3&~jyl}|22%T@*v5>J~;*) ztrR_ZBesLiW@PHg2@4}KiR;Wt6PyD!GB4xSKoXfX5);^acF2m6K{-u}eP z33)qKNX#?2gnr;?#rT5s6LYLE$AD7IKAul(=fjQ$30dkwr-5f_*RMw`5H*<7>SFl4 z=o?j}VUM-pcPTIgSHV|^AtEZ(hACPA{PcoDHm&L8reky|FMS$Ff9vjL) zi}G@MYG6QH9#6Mav75dniAejObm*6ds`e^yj~NpC6i|HFez*EOo*<9FUdSF z2f}u97as84_6wR|&}=sUy!(G$09Ds#>{zEZSsqzi*Uy5tzKj*K)>%+RaUa7iVmq(eI;B2G5rJQD_Zvz92!c-^2sjdZ5Xwiu5yC=0nk`JL*pl z#;QPj${q+SQyY|-*HXk6n#f#G1(Ut{N4TCpg(oHi9vL4V7y$t7YQs`J$~%p3Q!?^B zcQEk~VgN3TeBK2|Ka?Ey)LkuJlz<{Rv`=Ec9ZAUScM*8^xenIzd!fQ>WxK^&wh1@ zO1!)>RJ{wD3QWj^5dwY*#r^C5|87zQ-zhX;9G!x;)n^BR3f{OPFlqXb*6)+rae+ga zI@dcNC=^9FV%MyP(9XNi;NJO=346I(cd)q=klQMLgFT-&aXJ;}bpYO^`ezXCW%Ibl zF5BUOf_IBMuakAG%>VVlO^G2*zX3k@tcECiRfMDCEKEKCW#!N!L*Gt84Mv`SI^e$_ ze=W=4+at{1PY+s6iBp!0@)qe$loWy0k(YbFwE4IUr zOabC#m_c5eIXmw~K*r=!3OKMdR+eFb`?=3|DI7ZFOFcEwea-2xp=5w2Cg2&$@H%lT z)vy49FApjR0)&7=$0o1jDz+(B zC~=O)9$-TsE0>t)-Y`$lTXWU*juzcng~6*kkEIru2FD_v&~0YP5%*aDdW+LanPNc7 zUyF}xT|zo~m*kwOq6B5jn>X)8-PQ}sz?*oR+%e;2ydvlN_-~f@hjrn?-ZNPc&ZAF5 zb{FQF6^}pgFVW&HNIL$t=HDbl)FHKfoXkQBCwI{C~5M-+$P)OkWpkR`qn28&N%EBx8OQ0uzyK(uW+Cm=bcP~p`A z`np=f*=!^>bI&PF0Zd+JsacmLNHFePY1K8pCJpRxE3f#SJJ|L6fQUZ#`f_G#4JBS0 zAA88DqCk?ZqIKw}dr?=EmecXoPJUE@`&I8nyuAIzz5YePd#MP?imQ>MTqUXIsG>Vg z)M&IZ?cln_>l6dmWB4S5^BshxpM~`JpY6u6ecph%tq)E{@%309T+5d`MBlB2U2Hyj zBjTQ&{z|iMLz^&Sl%X_C4|&kbUI!b@N@`e&toJuSY+0U2!mTNivoje@5||KD?OhtX z0njKH;UaBnpR-MY_z6B8HOO5#N!1zXgqvKxMzYdOy_}@_HPrS?XrKV|WK#%2i}Ok9dwwUoGuN?_SvamIjB%1y~-`#pCQF!R6Wy z*l-XC4ufiTdBvz-uI0y!P-ZK|^GVjcet@Aru7N~-I>k{KzeWKc6>%pc(|Zka)!Rtl zcD|K*Q)k%*hzEJHQ5cJcIdHu0yo+{8MAC=Y$A!okVA9Tzz7iejT}-;Z8dz_)CDWry z6c$=0!#p;EVzTh)8dTbjq#T#5x$u~gBq!|iJ59#mT`r2P0&K~Ea|XQB-SJHB^Qi5x7~$Pg>v-Bu zzwi`>NQzEr=uoFcx4IbJNlHpn?HBY96DbM3_ogCl7q~N@WQ6XgFD_pkxSJ9&PEw^8 z3LepUn0x2T?>|~h9rUS}D<641YH?gid0{f{=Go)(kwmwVkh6|@O|rKLrnbdp9#ro~ zp%htT1cP0F0fT{iluYEm=35#Fh?z)6-+JEA+A$uP=VOzn!GX~WeONevoN^zv&}G}W z_i3nEm0cQmBwh0Hdp0Xf2VFos_7T{qCh9mRx19GN&e9z|HMhi?QTRRIu6zGXut@pv zua@+;M)WLV_$*&m_9!*f;uGa82&K$!nO#*Xi4zd2vyN4r8>?}6_Ct~7nS*d3^vQ1!hb=`NsA6bf0k4`uaUUAv-Ah1M|{N zxRKZ{TYnJMfFRjod0Ssu%(!+AMZk)-<8Mx!bLl)AYWJd3mSw) zD4B|;MrII5&0yh64QsprI#rubEV7{n!(GvZUUnpqF}K%j=C-_>B@(a)ED%@{UD^|4 z=p2#>Sd>+wls*;1)D1;$@tq|}dR3GtB!M=O0`bc~zv1O3B|}L>4z>K}*zvs|us`xm zNIg-sbmaJ2u>tT2T;R}Ve#M!dd^NzIgOb04v zuu;cW!J&f0BB)q_V z_r*QI%7RJ06K)%~(2Kl|MCIqeAp>2jiNLUIoQK``()Fz)kCfTk;C06u9{&!KHDiK$ z%yOfz&#{?B)zle^(`Nb?GyJzmL$Lo$?De53Bw^x6_ zCWOhkYa&4ynq)pyeDrvgcnXnb9w%TKmeGBXdY!ne)qXpYE1BfNnGYgaN^2{mjHSnU z_Wb~&dOL<2OmE+f#a1&DwDA(C;EAttI%VoN8mi@s`l#=zxQOpc5ckc}%u8tCmPjlV z6+*{{To^!skd54_xwP13y!J8Pt zlh92-_3?K5HMQ;3d&0;%$g;~{WLY?edN5k6xC?>3W4}xEnvu2=cfVb>@nvcyL$?jB zBSy}BQfwS}jiGX4k(n)4`zFHhH0Hvan6FxK=EaK`32tzCLy)gkHBG=M-!D10-P&i! zFZC#1O)hEHr*cIi0FzExS7`0`A-&^k2jnAxaR4sd8O#acp01(UWqZY$<@m&7A!CKg z!t-q@BLliiZdElF{mcNL^~EKx>b!pgt4!eAHf|BC{4Yuh4IhGkkCuJ)+x5%Y>x)9V z%bMHRbp-Dzms3BHOVLqfr*67kA6`jF@1kXoeSZvU_~Uh$QUUFk@q# z_{n=M$yC-f+a3gSh93WQCS%%vuLy589L;~`b{He#B-|Jcd@(2VY=*F}Ue_C4l5oHb zlh3N*Is4_IX3HBqtM0Or%K7#rl-1gC(g0N`_fuU+0I+c}Q%kl-#C7o-xC-|MvQ}_e zx}FQy-#MlfSjtKew4Vvo`}f6#aJ^|I;;x3%4DPqBoFrp?sKxC+eu9dvE4QuYLwKc) zNqE(L(R2SgEWAr;>Tw)JPP)rjWs!5-LA_vId04hqiJUYpMCE_IH=m>}EK*_-*4+$0 z?ti8nJjEC-$*6AI&*h(*7FUZ)aN|Iy$YZ06r0KwmU!|5n$$n2C&^YCO= z9wDtc22mYaheT~b?s#T8bHhFMC~-(=KuTr4skCojeAJT$w2JvuNZem)-9c? z!Vam9rmCvEQU(~kEclssuJL`?bs(xno01>^;2l2}zMi@e1=X^?8Vm@Qc0GD7)|lL9 zw?$qGgZNwK!Osw+Jt=z6@A-=LY=Ifwj0a$D%koT!-SRm2U_b;AI&(FeSstn3oKm+d zOjpj7HdNkRZWWDs5-;P|O#8J()tQz)ePJ!ly z#Y>#n+iIxKp?xgBbjX4Zm90~q1Zc##KH7`C(=WS<$BfpE4!h?zo=K3*{F9ek84aY1 zj~u_e0z)}In+$kgVpxVN$qKC|R_29uI)#0-u84_!t4TYTXURo(nnJ;o0v_D#EruYQXNCo^b4CR@$8MQF zo5W)D#|Zj9Wo$2!jerEo_n2cejx-dCFL>WgQAZLN>7gHHP4d+sxoa1r_Bx~;Whmq> zqmJxZPo^DApw!0#WBP=U)_yln@Mhqloi;yjPI#*&{@U74}?n zAcB&{92Yj0MxmthU$CEPtujtVkq+zq{fs2jr@P;H*q{%@xt`@BJPoW5f#INh)WJ>~ z#z8Ndb?pDL{2n@4=MJiny<{I(FFcC9)2}OYyw?zV`(bGqJJqF~sT-P!N^bjMpL{^4 zP`S-Irc7V0&s88zpe1O}yyLzf^)7Oe={exKt8Z|%6Y^Z!e%;5s%~>2DS9hXcZ}3fhMiApcbqA@fjVV8}2bf=*3}MP;RZTLI^}!(|uKJ zs4k6?kpH>xA3?@#;w~GAkhB0&JO_TI)g~yL)*?A7z1<3_Z&|cbJ=1wYSdCYf_gu)r zRR?nT=@iJ_ZFM;T(&82Ac;Ov}n?TVC!=*RXILgj)xWd)zr*=K%nyoh$?sl*<@u_*c zeHe05=zgtHUFk*ijR3ylLXm8;*4N~Aan-^FB)%BduS8wzk>xa=q!8u@gqy& zt2mu^;)TDMJvI=rNOqm?f$Vns?bWONVW*+*wBGK?N;S2IJQ%xV^|d)_lOR{%g*#CWFf5#v#P zLVBRSolIGoXTq$jtQl;=-13Dlaa(+6Qd~Yxt%Ub2Fp?#mMd&hMKn_#mfL(h5E2}&{ zRAwi}bGR4(;wKJ)`AK7G_p=cZ+HTQP&gCg2qUPOT?6G2pQ?`}{VH9V7_pW37pxGx_ zBV@);9I+@<^%cx+2sX=y%S^Yi$V*w3d#%5;44~I%=W?mJ?P;QPn3KRYY_8NM2j4BN zgC4|tzd?P+iK|0CUjCt|VX83MHA^?cQs?e*GsoG zV=Wm+h%$!QR znp+bx1j=E9->5Z>Of$n4zXhj{L(IX{nXalzQO_a@qy`-F;XZl_QeuCZ+N^P+MCdw0 z@Fw+Uz4sinAUvpvnm&joN-9zm;xftBUw6*nLn6d_UI8olWH8ZuCNKJL^_W*u z0CLu68@R88`hFO`$(%Lo_Ke68NA zJBKy5YV@Ofht`a1vH}O1fmgzJa#0ob$xdt`*$1xGgA@(HUM9yS;#$d&`{CDT?KC z_FDu=x?$qJsC}%#Z#~bpNBf=dQ?R5IoK>vu!4+R-i+Q{|$H%`a5+f9#Xrwph=3nOG zZ{c?*CYV85@+EI^*+gA;vvE~mPEKptO|h79AUKctPRHhQsttZV^VUoEdN?p0(CA10CxgD*Z^r6B34(tDP zf5P)MvSQZLl`>R~P;93E{acE?TFEQV&o3%Gxg8*!`=oj~*gac1j_+K!Ermi19@|14 zkH2aEH9qKkDa9Y(9-aJgszl!bF4vx+yr*p4`>w2PD6K0!uDdC1_A#S#G4X` zRfHV+;@)KqD&=OOoPBKdinyNVDQn%q22UXBW}Rja;@?_zwrf5#k00dKKhA$Lff3~~ z-uhyLa0x2ublgTmxi7w?l)`Q`!{#aLB$ON+8Nj;b+^3$vF&EydAoQ zM?d_3%~MHNh(L$nAm$2fY7CC>ZXwkpGWIQJ?AD$*PRp5hyL9A!%bfa51_Oe5o0Kne zKr{q4Kt_=&`>>TE;g=prH~kbKgzFTyTuh~L@i|BQkj>Q{dAAKr>bjAKIbm6bDO&Md zo>Mr?2as4Qg1T?fDc-luJoMK=7!u$4sx~O5C#gfnWybZBxw#qP{=VbMTyji2whqng zc|sRTdQ4!mGEpVDXCl3sZxi?cBn(7IcuffmIpXh(wpvzlTjH!D4Q;biH=D?^STm$2 zUsIlvwLS$3A#tCoII>DsF7(>-U(zH2y&+$=`6(cA<tFb3A;n?wzv6)-8c)Yb>9|7dm;6xg( z2~t-ROj_%aY!e$f-MDswlE8l6x6WxZuuEUXSKq^2rWRym*QK6-!*%JK_JJ$c?e6VP zVXp{k`6U$7m4L6VL=%C1RdTHscuDH3Y3y)PEAQ%k$e+d{cW>&u9>sJ)+8OH8x(1~{ zBh?>^8&R%eerYQac>k-(H;XbpQs(M=Zq101d{5c00u27l2l>zR7YsR2=8uybhT^X5 z9LPQI^ygIhQ%P@=Y!@y*Oei={A&wMA+S#_p@^M7^AChE(3P&Cy9`(ExaBX6{?>Xus z>6eyiFgaG|upkss^Lnw5%vl7R6?G_Qsl5nQ4r|baf@aPSISp@fYOzlJL zXhSSkr%oXH%=Y-6Z1t6*k(0ie@=9G(a=)j?p~3R{}N<#kXyUuhWI#^>k(yvSM8;0MXAx=qerT`|USN#_-%lk0xLlJapu zz0t>hZ&gIECxiMEoQ`k78l9wZ3Q=+)=2aPoI06-#7J+WW=!uFJ$AKy z-VVP@wBe5j+jNb13YrE%*KE_d7XCJ-_w!GO>W2&uY8SK^+icrz0w*uAB4l_}B!!*- zDjlN}dpBxQq?k8a#@xJ(@h7NG`ZUQPLTNom?pKgS+kG~rcj>J1Ub;)hl70};%#Za) zUD^`&Fl-WSyFXZm$TqHixXk?jmBFQf^Q{JIYY0wTWa^8@hrB#y>1L?}$8Mh#UJOHe z;ao<@1^xIGu=ig10hxT&`(_dIf~$NJ20T-k{${i;GkYn8E524)qaf3K_A6rI$?6vU zJswcNC|@SNBIQl_`T+FMMRq*QnnOJ=udook_z(h}TXSoguMfW+A z{gw?|VCenxkZ9dSowt!I8?g8Ijwr-2A6?39uyC^t78D8g4HD7J`h`2QVIJ_vT>El- z|3ORa_a8DiJ8HO9GZ!}1h_DfJ7u@Bt!!H)8=hxpx=~;}OZcf*abnLcESs;EV*T&t>{^Tjl^*EpdZCvSJkz}1ls#r zTLJk6Pjc-E$JC4M#Se2j*8a@^iIVQ<>A&PpH7m*V@bFt0^esieS;$ODueb2c1E(U6dPUgAPV zcuuZ_Y!8VG9r9LjC<@T14;?l!@@}Jn>q;vQmp+_o@_g8i>-zpr&;t$En7&!xJ=1C$ zb6JZXzp)+z1Jdmj*M6mu#F3ZcS+tH@W94lP1(Ynw1Eoa?M_4k#dj5SMRCU?D;U!z* zI1{EFfip55tK(Zwb-p9wGj8t}2le*$j|lDSeyHxj6(?vXKA@_GKXtq%3T+<{KvXI*j`ckBRFOiFE*7TCSO#L&kU?)owZUg|#^9-yTt8B3NKP?Ku zVvieSNkKzKOpfc~IeP|BwB^5Rqwa;4*HO+_OQZNWKC{1{GY)i*EQv0u#}PLV^4#^B z94`5COEsp|NQc3NR?n-PJ}U!FYqkFnvExmJX1XfJS!`C9z}Xx~{_f%8pz2THQf<&e zk#*EzEHUNuRthN#($FOfKr`s7UVn9X3?14^LD_Mbk!YPdnRiv`>n|_4KOT;->&X)jMkOkQWd19v;R=1Mr`U5v zD|Rug_fnH&z3zGYW&M84rHzUw80oXe9WhV(6KCgrc+2r8-LYS*oE;b8jzR>1)cn%_ zMnn|xA!S}JNM=wt=IdDb;cfjD~>%vsE}m$5wJB4S%q!~ z`Uo=xk+7YB19^uKk{~LAmk>nyrOkGp&Fb~s&vgt6=Hq>eTH)u*&+bd-9KCsU z7z(ixIu#$?-blVx>T)1)kHSM~H*)2Pu$auc<<%;XHX3JiFOD;N=>sw}$;&NPiJx~N z^lod3U-Q(H5J8H5<-hNw?#ySs@VAboM*w!g$2gZdJWTI2vHJ!}C&X$uGzM9%_HV*u z?)j9EpC09l=*4d^lJ_5`<{GEJ+h*FD-dK8S-H`xNbdB2ABtnE}52;MTyw;qYUbJW= z9cX1CGN9cvHpgX)6z;>6a1e+S8fCiSPdN|LPB-q(yU7{wm)WnRFKOFx#GpH!AJeb-tIc&mfZ$Z zT?mxH>Adf+xz)_O%kS$Bu00`X;jOeYH-F{9br&PrpFAC|k) z)yn`NWWM&erp{1pK&Og*m=PJ}kv)WupPGZ}5;1V&Qa6{ztp?+kFr&PMP!o#|qnlzz zc9`*ONj&ZSG-{*vL(jeV0YkdSLqPgPkUC$)>0x^-!<&Ym&V?VL0A3fUOd4%d^M|N$ zD0+|@rkuQ*?4~YH7zGO^AZy>xb7aii66&?LRD9n{C>?1_(Bk^+vmHB>K~Hkc$TTWx zC(fb`H2#G&XYuf-L4ynGB5tzps4*@`2FA%dBcApK^x$??CxJfR_;TS!VSOP7%I$}U zev|@@Dd^Hfe=nZfW~Fos^kQZ|AnBxi=Yv-I8ZcyzMC5T@DY^urLA4sU@hkFXh3*i? z?t>g?PJqv8JMNJw6hC_`8gfQW(Vi*O}kk zN5Fr+d{Js0$B=5EQ|K}0%C7cea>%|a78@_@V4HpsjCmxzRET<&hbEfPWwDAJO_v_& zu-!84koDo#@Ptqd{v3vIv@$8&$F!OKkOMR4+%$2#$HhALDH*JT&08EX+UL}4;y$fI zUcePD`mTeyux5ozs|CL7D3FNeQ)Qz2w@=Z)Z$BYtS0edtbT6WMjW|3sGW$gjd@d`M z)$e3ZE`Ra|7xq!$ency=rn7& zy8AyQ3DmH~kkMnd1Z|!4;1i3L>h&rKAm}7WnvK`uS|op8&SwIrQwFiYlCJmKZ%Tco zE6lz^e-QBPXyz_?*ZCL_=Y8!9H?!|~>l9oE7hox_+fz-i^!`89=l^IIS*p;Q`S!!z z$78^V$+=?dm$X?oEw3`oM1{{uB#mmaH~IUN0sOI&Vvf_ z!VmoiPudg(kD?+LUER(m#nKu`GsA-@Norcd<%xPI-QKDnnDXOYZ*sK$I25PHT_1m> zFM-3&oWL!IA_jTLhrtjMHz)_A#5Q8+LqdNqhNEUa|mt2l<$k8muLoj}^kW^7PA zD`3%;a$&jWj56_$7PzKq{snwNmE2s=VHjV2vFfh?HFaE^Q=EfGpTyKAo}u~)V-d>7 z8EYW0>K@=Vprjo=&8(xMun?_EfW`WVxPDYKQ1_tJJ>n2DPeF;hnMov8IvPpbE#DC+ z%(_L6_}Gr=q}=VIlKE_T)N4k%g5SI;1o(UehZIb}c5uFd^$Ex<0(H7t(_*Pj6Mq9x z?_JG&wdJ(;BbMI9p=t}yg86N-Oq`IH?#Qqu)U=hqiI7r5wv-vgX8*^~YZ)CpBrNF| zp0dh8&RB9@F2l#Xy`AnR5Rf+rTl?mT6Dl_VyM#5WGOQ?u3g{KnBBA{u=Ns$<{%6n> z)1fx4Ibg|?PnunNs*y-UI$?|KNq{Myat zytksB+a(T@t^=@P*^xwb$Tjw4UW=|3w?#}w%M^HTjVkTv-@-B>asY4UtUJ~nHn;6S z1!ZXB!y5*zn>^u?O9eic-C)tNrdIDJtKW3Ddir3tH^JRA|U zG;+hU{Jwc}%IE|d2d>6PWK??Jr`A7uZqbC4HE}$FTv*^9w+@FuufRLY)i+{cPR*EB8$*<`mlrU z7n}jaw~|+|(6`>7FlxfY4S@w+eH2xO!4KH9c8qRbNB0o->4P$01b+Dn2zVsKtLA*S zzv8N*&&?pB6 z!z}ejLinCLv;OwyUToC(dLUT0CIv_7C#S20i(e>iKn22oCN8)#S5p8~XTJGIr47$H z6UJEj*PM$(<@Ngnm^qSB!~#r=&>psElpQiJTpp5a4rkNTrr5xAG35cC?pR* z4Sj;A{Qjjud=|}WfU8>tZm1xK2WaPX_p8)(hCZ{UUT%loqBoZALn?1JaGTWGZ>|IV z|5Apvq18Oke%RrmffZ8)fhzxXsTlouz?$qkeW#C^1M;UxQJHYV3mPI8v~NB!Qrb z(pNJjZ-Hk3dH*Fj$JCv}|UM-7y?NK5AuHkADdriA7Jz&9lfu%2cw zFztCKc$?0S28a>{dDSARdJRMOC2OO_!_yIS@F0}|uyXx;iBR^shw4PzPU6TPMJ#I&k%OtqTf{1o#4 zarKp9RW)j`5&}wxG;9e0X$3Zdz@|ew1ZioI?(XjH?hZ-mknT8?$~2JYfJ=iGDd z{ly>R0iN})d1vOGVa~mmbo}!HH|0dR;k~iMw;(W_yU_$sbb1;)+!0@)NN8W$t{SBT zN$Nf0k7+Pr-Rb6AKTwdC2`jz27I@UyuNT9ueZgQpZwU=P20eP2je63Y9}#OiCS7Cx zx+Kz#0IRuwJnqv^GcU8Cn>W3vq+hztYetd%O$z;Zfw|fzP)2EM{GwRFM}9@vxe+g$ zGznk@RwPNq9Yo>Wr?K1*WJE_=myQp#RoEi=QfRc$rQ-rKW9g0brv)4ag=>tQwG0L^ACr_x=a7_UHFy{1fx3v5j&7(1xvP4UfH0mv>YrFu?FGt6m9 z*?|sZ=C=6MuJ%-yx4*o3Is2{iZXU@`>U`O5>@z= zG&}j|2L6AJPg2dF51s*jVCTYicMW;qiQpD>03=L)mMi9a&=C+}zUVgFSDs_<4Q`m+ z;eU;zHFWv|0NdM3y{LW^Tm9U( zQmOoT-S*s*&9RYWpP1TNT4{W*-I}zo2V@Nec`fDL;o6d&Y)qAGoW#jG%H#(*q`v(m zyZZDUA$$!T)pdm(t?B!E4mgnLX4X+$Jec+imA`Fz_^k5yP(&O%Zz&i2(C+99;t6rR9m)}T}HTv}QwC~$6rg`&~B+yrp82$;jsM4Cifw48~$CqMp=oB0_Q zm+*>OFh? zZdu6KJq!A7z&9S9Sfl0EYanF($*m4LDQOh7ldIV$;<~YJtC&6pNFt` z<2BMUnjo8u1nAcfW^N|(Nhf)5)W8u40|~%!VMByzEkALab!=I&;T1}UvYII-xZS;p z(UL-YwTzppBK@=v6K2YqC@b~mwhWU2*7BZ5o(UeGu9%iu6QpfPQ( zzvB(clu_^VWHxQ_%HT}*WVwxu-lUI0i{I&u$aB!8 zWP3up1#Yp*`gp#DP;!SyyQ_1who7_SO;_wX^!JTUI;Q+g+qhHHx&R$`fz1)Z#S+?2 zd|DY>ldg*xP055#MfH#sv`{)7$4$h+!Y9hU5vo!Tdhs0QJ4xn(QW}GKWJ^e@nu3XJ z)Ae~T2URyW{@AJ>B--GYGIk4iuzl5BHOE+R@&}G359}ksx>=ZxtuYwG=Q=6(uv&&n zrDopCI2ZDA)y?D}Nw!6*&jvZFmLrZ~_hldAQhsoKe<@stlq+3y84y;0rVP&uAN`mO z5G1;JeX&5y0o=mCn@@3kwdv6?M%tK85oF*^Jmv2I#sF-pQTY><*LvS$N!iI2RY;@4 zRacsphz4l)Hh&<`S7L$5ZSvFCw}iaRJ6Bgvk~DUDd7Fp&&p$DJUaj|}ir;>ud=QHB zWu(7f_Gspghc}Yun@@jim-3L=x0I#CIf*>@e4*^yOAP%&HL>==PHAU*3-|M5le&d` zs4PU2iF*sL?yeAm-ixsZ$}M{~rmMUwl;^wS7U7G2K4pI`o58@nF{{4E*@%J2-@5I} zzqDazwKzf+23~=kwB4%#SBGsfBmr$H;%Q=!j8GQZv6DT%TXCQYgzvTeck18;f{K?I zFTEe6#LDZ)#;>JmHZEo0@0w{KOuo7O`C>vODG_kXA+P%{Ik=I!9dl1#C3o=fu-wbg zhwj5qBfaUN%RdMQ7M6t~&Ys60`E718eFV|T!nTdtB7e)kV^8n4At&xE>CQ*>zu;`h z7cC8@yheS#hg|%5g5sAL=Ru_gt;Ld;>9WmKKqXLHW7eZeO1tt2Y)-OiwtlH(PM(_l z`hnMZrFH)kG_=F@4eytVY?Ru$GI@32b*ecDi4}A>d@T(pE%aUg4$wc4s@>&~?`*4{ z2;!Wrbrn(>>SlVlz@N8Rcc=-cE1-Q5h4+pvJ0nD!+ES0mVfUH#db$FAG%CLPt5GIcvH@rWjlLRP3cwJ zG3jgj7XI637X>$v?c-^|m$j1EX4!no?;JEmnf~masufs$Il@~rQRPn&CNvniPn8hZ zPDDv$1}&)iAN>{FRmO6H5^qfIc7busV?W-b*d7RE#zG6pR}LQGB*v~h-#FE^`%wiB@HX@A0ufd&M7{E z6ywu(bfuop&1B;R_h<02N!7PXxI$Yp5SBX@_pI|H$?j_F1ws?E2^}|PWb~|0=4yVW zjDbMqa2=z_Q=st%ZK@D-VcjNa!EvZJI9{d3O>kKfdN)s@Grc5;(^mAjD?LN*ySFS8 z89~!dcKdZxCB~D94)>yefa0@ln;>%BCX=O%4`PttppSmx3$s=UaquZk2Fj-Bi+D|a zT3V#$Kf=sq&NVm# zsD3ENV*$YzJ^fa4@>W6Ntp1JhqqN3JGS!e<2?L02$$j_#aSbgoL8sqZ_5uvYlzEP| z!~(&+{HYi-^2kiF_1Eo`ZILj(P3PYbzP9>c@HnBS5VU5_LY7?U+KM|f2TajT6Huf%!BdAO zGEJBhPPP1|D4d()d_|0y(Q&{eg2M%)84|yj_3%|au=2pLfYT~*BWFoop zHx38;9hL>TIa&L;?99#3ud^<*V&CO0AUX#nl8nHvjo*}(UGKk?LzLl;s)B~hMV`np zf6Q5y)unh?fZeQgDvc?r( z&P@eNKQSGc3r2=^i=r*+B=;+CIk%S(s} zwlgy-rDEEb^!YR&R2?2~e?0aQ0=L5t>q#C0 z?Ehd`wdq)L$0d$@+EpvmgrgKcKj)8W!w9g==jrpVBa>_H$ogXMB0oMy_^J*~RIZ#E zR9IdAa>iM;VNu836&!9iwuLrANO*Mwmk={V@_@Dd_+TkG=$9{t$KzE1jqBR!9#bRqVpbPP*%%Wro?Gs;VV$siMA+N)k zlRxBoq%@E(r4^0nwDBE9D5-q0H^X+%e)CaaAE8tubzGO9MFHQ(EPq`=Htwx`=Bew zJSlhH9R3`Bz#-NmQc#BM>QI5~ZWSZnAJG~kf?UsrXqz2N<$%$`){{@Xp1*m#c|aMX z+Mn9LkT3X=zi|jEh)nNumH~Y#!VgczQ_kvqCC?WeEejF<_MO;Bm(B{=6{B%BJp2viSqV%UzWP>HA zK`Bcb41nmci-y6_20F%ExR(Czq|!}5VovNoEhW?jnLLU?_RVbbd?R747pP|~$58+A zpAzhH3nk6z)duDm-WKT^!nh*68BT(AcTRt@FmB5MnGXN6oiRZlsx$u*;d~V>gY1gy z9zuGKzbGGbLU?J3si?!BiPwFn^-F5MNBA!~8%Ic_zTw;UOky+j4>#`$0~vA4-MKPz z|2QV>qWx zImlgBzdIi+BGZ#d#v#i$UPWWZzkIgHMVSo6$?9E5D|||zWG?mvWxzCTK0!gLWc$tRh5G@ zZC=4KJo6u*{W}g4zW!?z+|_CxQ{dA7JzVn)^{?isCyLEgu{CMirnaHMqtIv>P@VJ( z)Nu}3CLv|cIae(-L(`p}lg}01;ZC_-U9h#isqmy*=DNq0#V>eo5Rj_rk^Jesq z+m{4M0=RQ*k=!K=j4%C!_;(aZMD!ii|KQ;x!Tn>Y80CI&!Q!%OeLc@k`Kcbg`t93) zIA*|40nX(sV>rUACl{mzBjl5J#Z0@KZEDL%l3?4K!6-O6QtuA~++-q5SkkFP{{&(frF!%OJ?e?iJ?f5gttTT;L!EDiUT{cJ(jpr3 z!V7pP^f@?I!Cr*jL0tqKG@XR))L(Kaf}tk-tXVW60a!}>0^(Rfh;ndmG@_)hbPVSM zb*`LGR!-75D_1U#RGw^0n@jE;R$ce6OI@2wWz)Ajf9zFMRIC^NhQWnr_PC4+Y~jr` z!xD^TK!7EqUzU$28EH!l>ULYOr(0)f(x$*0ZjP`X$SQTPIJUDvQp<%_wuEHf-)E*a z^ANW0qaXX*V3hh|6m@y(bF#6n65@R25E*HymJAuan6%&(!C8i9_lLr?a3Z*kad~ty zE5ko43-`cNr9D?__f zYwXXgu+)~NngHR;v3FiArXQhjMrdaUJ51)xuW#y`(}^ojUW`SnuiM*lL*I`-$^UsNu_&_&>xYhUMEqO#&oP zAv4fhkD^tESF;tmi(RdDhg_^8I_$9-u`KT-9VNdFov1wu-q5@gNZG4GA(12DtoZg< z%``cz^wWnD$$nbrku{qqMuY8>=G#z<6&{;cA{bY#1HCo-4_A_|ff)}bb3Jdzhs;jY z_~*va7r&hSWR2)0=ePgR&y}KzEr*B)C&CYW$ayO_k`C&xwIHcIs>Nl|oXXVcBXgEe zCrw~+l#cDaa1r_M=K^2yjMim&CJtf!RE8=kMZXwYt>!QN35G)xnAko8cV`pxSjxy-uq zF8l}w1D)+iPT7#VaiQ&EOwajZ+xG}b)T%Dt@wJ`Bc~MEjLyBq;p);uvCl>~DdJfZs zh3y#0VOY)R;HE7Igi>;K7_+ITuQ(P*_M#KTcC)aAKc>vIEcPZ_3Xa9C)JN#+1^*V^ zQDMn$=tUHMmSy?c(bt-nN|87IR9%dzelq*@r=5sy&Wi)^85Hh9?qRID&f+vmAAkFg z$nZH221p)0jmU82GbB>n-3-5(9Zuc(!hG06M_;MRDH-KPx6Hy7eq^1KSE+BrS(o1u zGi>4JYE3r7P%mFylUk8>Elhd$s^>%+w$$uST|**kM|Orfk!g?(JJkEdN~US*i+LLT zXpom{6Kp2TUEY3galW}%n3~^5W|ZK$hAO&mgcZrSCl1k<#b5c7O6E`a-crqz^=t{UzBTA0N0}H-k%y? zVYMI&%D}K}Z$0=@z|MSa{E@Orc9c4cUpG`q|OqGc= zGv>cVY*YRr5n7n`j{R@6`8?1U*Z1k8gVR{%R6yPG+IR1$dR4ns^sk z6z(DI@ht36ABhsunB;AU04>OkT&ek~(#V!>+eEHvsh&y}zFSm>FGr5ABf?LSVXUrCJ`Q#0b4BovX(b!h+)ITAw6AeBBI9nl}kjMQHlsj zjp@AKixh@8i2>ailnZmmm5zhgVR>%1wz>`yKG`!A(MrX?my8uhF=Ip< zbDrd^N~Fg1P?suRFI0xLP1mK zDueR=bNRe_io8}mP}eR0UoEQ2ofI$a&)@l^wKPi-EYbe7@9|pQW(bnhF1QZ15NErV z(j>7Zs>nb#h1@Zg@F!2io%6w9%mOn6wGEVbe;yll*;|w>zJ{O=qtJ52U@{aGTid^E za>xk0#UJ6(wJPW}`MkLaDZ+rh;@| z!L0{wI?^rt#yw7qj_5CF+0VHbv9tWkU^HRgU~e|sdyB8Yv%gp9uIe;JA%zvd7uI(# zd@2PI*2}bi4STd0k@e7>{o_QHcAhv#?)QOS#d(X`9ftVfXKKw)HxEmNEicwDi+Dnw z-IxPo^tj<~0z6K>8oCCq^fbT^VRaGSQZrmhMQ(T2?GcY*igH_Np@(=;W}(o*10TlK z5$T%+mh>WdX((%6ForQ2mJEn2XO+`LC~&;_*IVMBm&IE|rz$OCL{S9256C-%Hi*RE zWEVdrg42c8{4L#nmp$~b6lln7%W8#%M|ZPq=Eu%tLO!$xt%In;tw=Ga0)|_>SN}kE zMY%X?A~6fL_uWH!`tyS~2?$yX{HeTH6JdDF9BD#okmm0}p*75;$Aa%H)HIiJ*0Z8Y90*}g zTVV<#bty`IgeJ;TxSnSD7UWqA_PhJXt_f4B{eo}R7j2<_c>TUmJQ`&$rY$CYSy?H zsDG?qG!b`p*}2f!to>m)gvE2AtO;gwev$cDtO~7f=nVJM;HfU4c!=Uvy=8C*BZd^2 zRy@YmHD&6OJ%rhb8gQi|hvZs}W|-IC4I2$prerlPU$<-TNt=cU32dibtodV1j>z!N zpaIoR8(-?Fl>(9^joTBXV-KgrA=3`}iB%s0yD|5oR1&rigtIKJA?b^Wwlwhq#U4g8 z7MGWEsgWyIzi-xQRphqks0pc;N`-9OGRqT`PBbWrMSufZ3aSVCx%eWHb>Kw6!>|lX-nCiL^j$l)X)C?mPw}*ob5=)5d ztg2y|7iRAIkFq~5!Ow%7Ebsm&yZN1)*)8ygpykschrq1PyFriHs@Ei1gQbsfb&lB& z(EisuSL22U{iyHa*WmoH>5baB7htCR0plVNiE8gg-RjykbrhLd zlE|KU%D{Q&PF%RtZZ?_JeQEgLdr`byA_cc4t15~ z*1``wNF_Q{@FKTwHRqhxOtn^c9C(F5?`^g9hdM*H$jmh3u^ zXmVjcF9LloeQ>>V7MrVKVT0%;v@%$v!3rLJ$-dz#IP0`~e|vsm28~m46{VMy4D|Pv zSppdSBGwFg`MWGP!X{=EX-`I1IxK`=-u#EsJJ$}h83g5*IteNxhM|dbHi%XHS-uLf zHk2J6ZKKhS2}S`sZK=>q^y_tNONGyal7+#v%k+-i@Zuu{*B3 z5&;<6P>Wc&8os~&ES6SpKeZfG7~W~Qa#h^CI}*m6 z!|)&-DqL^sollcG`xauPxakP}c@mzq+K#h&L1Ru}{p8k9VfT(Ps~Tq`8TXdep&dV< zDy?=2QJ1FqA#n&!unT5(jyp4sgCev+FqUm9=ZuBCmn-VsB(y~qJ_y@19 zpdA{oWf~+sFrVv(jtMa%&#xOMFSH*g_k1`Q`OWUodbIzi5*Gq8U#xJwuo5&i@A|BdmqJ{j4WL;5cDC#4qu8cd9N z7#gF ztKvw0QEf%P4vywy&q@~hOlz%->gZaphMCG=WZ0p7XaY6~+m#0kw%W^C!$nCgU3`F@cecuOPg{6Ll1{{fI}z|ob%{p;vgs)M zMv^x6BL48OpeRlyW+B{?r6Skm-NJ6JQ7kyj_!G)1zsb)Jg0RVJBnW!N8^Fz26AWHk`b93KaMKO{&)2)n6@oBohBY zcbVinLSr`bxnJ4x*E=yj^hakYDV)z2m~$5r0DZIQe5RQ{NA+1=T147MQ?>}rmu zbI7)TDD%fq%ugL<_m`DZd3B$~lKko&kuF~SUZj;uzz+SuGQWK{Z97Zi#QxEfA0+TW z?4EpbKT75G9U-HGCyn{IXfw;^S5F&q5y-_*5p(VpE2nXs^&Pou5?3E7+ast;?EH@6 z?F=wLAfsR6k>UKDjWugL9xW3UE-e0BX19jgVNHfyb&|qI6Qn=9>-qfNBYK4?oc9us zOyu=(p6UDdvoIjY;Xk8&PL zCvLa&ntT8w^YgfUQjNmdgnQLi{a9Ouh&k^@8_$`_HywdMCHgQD;AUv|_-QbN=}a~9 zj9H^2RW1HK^LuPJvwVk@pH4B|a`}=CY7}KTZ1;Hm16JO8!9wniSBzELlmj$<^vH zD=R4=rz+=r=4oTwhjV&-_Flh#|H#p!N9UoV@8>~5ot>bLJn6J;&1ws2fqoW(HJ2Kqg6m49eP^Dve*LAAM^Ma+Z0k>#Q>VFO*+q-yS(oM)-hA0iMqwBH-sk7IYdepGzzQ*Z3YIhfv2{raPQJS>d<=G3QmVRb##61)ewLrOTTn_DF ztb$N?YZD(Za`?g>)P913MDU^IulA#X+hcA#m)_GuU!TX3Guh)U$vC>>DsS;Vp*up; zq2|d)Sh#VR*WfFtgjRCVAF5mDEj)m*{XI`e0ep95^$B;~u%9-YVw=}xP3kUACpe#& zeON{Le%|TdiZw0$l+6>uC;C$QFBJa_=6l}CEub&f;~_;8h6I{4?0BF%(vrt$fRBG| zL{#WYEB=S{`R6@Q^cF~IV%+|#p=wB#K7~+(`mFh!)W#~7PxOQq<|X<^vuCB&>x9Gz z`i!^+@&8VAk`FzqY#v2p`Zn&5qkVM=w>_T903aCj>SE56tIBEn_PcmhhxK5?5ANTc zx^g2^Mo0_3c-TZGX6PTJykV5avn-FVP{X(b`MfA;Hi7u}(=k{gxXs5J`GSkdfw+Q0 zVV2$Tt_oHCXYxe@EUIdjU>w-6JZp9jdeYGnMm0VW)^j{Jrg&v%i5-GHnHxO#qZcd- zjAuW)efy?=c$e1y@-7C&%y+vBFF@|6`eOfCpkbTqu`)up3w=dxA0p{3#sdc5ReuP7 znOWJsL#|T2VD32T~8ib8hUA1}8VaqcxkG7E1ULRjR4p}PtO z2=u8%j4HEOU(H^_n}9r0>6`?-JpWiiYI-uCoKJPpl(UqMh3*X*a*LkWMU^A;WAg`~ zEC8>fzgAiM5x&Or>ADaLZR(F7QEg`=(zT#W%_~AJW?}kf)hgFDl2z6K@Gl9Zm7=3x z!G0yP2;`v~($0pS>$uxv;woCgm)dkRwA4La#_%DNnFHm2#yxB2?a3f^z_>q}GUL+5 z?1(tidGVc$*Ljgx%h32o684@@GWE`kinh73wypHXfzv>)vGg9t4Prd+VwCx&gf^dj zhZJV_HfRXL@ZD$U&yQCu`(9J68f&=S7m6vi#-T)2^EgABUi*W+rOTdTojy>#Xo6T`gw%zq0E*BjHBeT# zTG-o$&)i!ISvJJ`8#d)~Cd`P`pd9d@91c+(7O5xwRbi?|3PAZA9*h;t zzG%`+NqKvUwt{q0c`mUlyX~vbsWPbYmhO+YhRvlFAJskf6$`i$SD~ijK%jpFENSL|M~!d6S$ocUBDZ z^F7Z`@EW7H9@lz!w@Z8(4>qhl%_fhYwJ(lqoNJJ^XntbYqce?);j;SZ2wwG77){P& zPj=4z!v7Yf#90WScks4&9@mvODi0MXhi|mlPb^1?Ip5>AF9+%GIQ_fRBe9h1de`Q( zBvkQp!}ODEC&Zpm*-xE^PA9E~y$o8|FJ+1~a;iy1tPp9$Ap-?Gy5@~WvQI2Ml5~#= zy2iXz_#1E|jX@t8&~)_8tGRbQf1-pjYyP<6I+r@%p$^!&JZJorkvgdUh5|H^?~THP zxruW(x_0YVC%JdS^1he3P+M6mO|%dasRBhgzrHE}pIGguaalZ(zh9?c> zZ_z3FtAyxxpB2^Eq4?6|3kz1#Ppj2U%%?^t~Yr!!c)!PC!L z>7ldP;a&uBGCTFyMf1NLeTdlGMskI-$&cdmx~R?Ct)CSkmg5eX-%w^Ke;Q9vx)weC zm-u;pWxNpsRsj*RG=^6Q>{S`{(FA6M+0t1WI%Weg?+5l^32(uoIPKY9nEYjr4CU*4 zPkU1%KBz3D+SBl@X!BlysTLpm z#&ce_Bz0ckF$~DQpUg-rxZD_P97jB)w@91B9e<6dHh0zkQo; zGl^M8=6o4_LqP@|oODyf{$_RDXy^xCsMc}XNgt_C)HDof0aaD+pJp=ps>gJ-j3am= z_NY(rcuqt@mu^FPZ9pU2Ug+7KV0#TY^f^6-bOts$oj@Ti5V~>OE z3Z8UM@@VYLA{S|ko)l}exTFUxa+>N@UMk-9y!Bcv)XjUZBAQjylu%X* z>UyZN?SATOm)A)t7l-*vHcdcq<@?Kbk8>KHtwfrJu4CQ)q=Ck3)0f>0su(s^vy9|G zPeRpdQcalxZt?ZuC>c zq?t-i$u$s0TAHRdl(ZUb_i>6w3N}{QqoaMZ&8b-YSXh+t0ogAZaxL?vTXr%#R_G}s z-1~%WoLBp6G(VrAd+D)JUZ%dHG6Zc(H>M_nr&>8Q-JtI0@kNnzWwU16U9L;Vt=_dM zpj%!<%+Dd^A4Kp^j`0Xu6Nr#Y1}&^)-_J{$5Tvsz=h+E#&1nk(1Z z-V!nA#2TRDQ^z7fotSR4oOwrzC*_#onZ`ZCUs{ZV5x40u;!U-f@+2PILBH)AuG7VLg^HxjcR%=MSiL)To9wYdvpepcr%%MMCFO|L-~sv^ zwd_l~pIw+{A~uA*Z3i8Zj%ru4kW(93XTpO#$LN0TsKHUZ!a>3HUHLEg-u@-Qhf&=( z^t44925aegA}~2#iS47y50lvOOD%Vl&-j{am+xo4V@Lmqs(of2@(Fq9Vix+XzCtD% z@-F#GgnKX47AZbX>A$m;2fOGH$z0vCwa&P!Zcd9qtMa_(SvLjHwLMtYUgdw__4h0+ z{(tf0@W3@s6ViHM{nBW0f~ogHHLKuV4NSCSlijrafbA5_R963h&Y9#4`ny-Y2Dk9t z9wku@4S7>9LAkF<4(&^iwWi0_LP4vGeV`Y?l&Z@F^?)o{va8)X{f==H{~J!3J!~`o z;Br&fP58W5U}MQS5|tIZ6s5@5CMV!|73=hrEP}#xC8(-AA2S)%!f;k83e`X_mbaB} zeON5RO&juFklE{r=KdCO?RrdrDT#~&4P2}(p%PDD`ThOvkQK@16r*Prh6R~x6~GU} zMqYf&K!cTquZKMJEJ2j4pkXAqW zEr=m&a>hZnjmPK=N-TU>EiCe|< zokE4*GTQToBUeRY-Y?oGRk4w8iQaWt>}H z>B5~eEdHcGfQgAzi5_gv{q;I*p|_^UBQ4~jlhFr(X&QVrt!Ui=KPE4azYvkDqiws^ z6p+zp5~E#i#!|wJhc4LES_2*DZD_4{Is+Q)#XO)h@LP%ccThv_+NyxoN-?OrK^E*T zW0j!umz_wRWB-`AmR9MnKh5o*vK(0=AsLC~(Z>+IC9fL^?oDu+AyMD>HuB$$%93pg zP)*yvRLJ5I54l!-#Q8$*%7Jj~{i!7G*@whV8!jb+&kK%+6fo>2gsV4RAjA6HD~SD9#%+HAgezz%6n z&VF|Z1g1>sKZ40``;)U}aT}Ri`Rg^s&cyJJ8m@|tu_Y?J26=}ti^tL$G$-dLY}A|{ z%k>{<0X}KnmF|(O2kYUztG3_jaYg8Rh{;dWSzI5fJ0{Q;D6&MS4ve^&H<1HORc9!RAl>%o=V{ny5wXD$*(b9_@cRA5t$zB-w?rN(w^Dh zEC1Sm2g~=1?XJ)|aEZX8+*A*2Ds%oX3n9<1S7kgrn^+l&u^@*GX+8{&r@6ZQj?h|G zEzc#uH!ujKOWQ0!LkZ5K6g>TnGMsxg+%IQxfCL8*rx$CM}_+Q9!u_*!1 zf;OCX>W0vqHKt(d9EM`GoG!HBOnDdieue)`2rqCVgyW}J}7x+KBqOt+Z)&BRJ} zq>i3cmC#+&ehw~w)Hkpwe>fc=fU3XU8uw{s(t+Zw`@@3F1l?THBQ8X)3?>N9_&?U1 zGRNZ2{KGM*Z1)lMBW+TF9spOC5h(Yr_ZB+MHL9Y|zlF1~JNfd`wJ6i>tUrZjFVJj| zUTE(Do35lH`wD5`eD0UVmtv_e28|j_rBvo|D{B?GBkAK0KkV6;i(WyqH?MiXz^t5! z=dlUt_XV;@26O)r`oto(qp_3P?vD+7OSKG$g^UNxxjfso>MrdhKc0RMj4rMC)m5u} zhI4i+)DZkDz8v{`XIkD^ zU+pU1OdC@Xy1;!DahEYB7;nqL+*xw3;!403mPQlpn5|={Qfip8+q3bv`kdjmrZe3aMG>cUz|7J}{? zeiegkpW*u>e;%kgj3BXy{o~Lu5sq$QUw)fTB_+pu))R0p`5;jC^LuxoL9WeTQ{PE@ zF|Q8}kb%h>vytPY&=S;Td9+;qEz$*C8fjZe6!9b~hy*NVPR>KZ@S|sgo|3ltXYyQG ze(v1|0>Oe`c1ji@P+K!l%_9%rF`C^|G^h9#)i88W5JnBa{t|V*U*a;-y*P|3{e5n& z`yQfq5@6Nl%XTdk@gp{Rp3`>_H5Z@m<(6-a%RhU)D`cWJ8_ zcP<_^m_KdlR286?6e9&{dl2^pgGFph-9GU1g&iR%G0SsZ&9tL&=K(uor3~PW$St!8 zNh|bW6$Bg1Xzo2=@vjCo(0ztRgLWRX1nu7gBg9+?4OH9FD&)IO-9WYC`+=r(;ApkL ziM&UhHsnQ@)jE_PR-POFS>~N2D7h+O&g_ODq}}=DSk9yz4g78+TEtHm`arPO-w8*h zc3#zMvgB!xfO=;-Phc`T`W-ToWtKQb$KNp)AI>iA!_8N zzAFn_riJM?EDy82dUWe2IbS)8Nz79ywI940enDTS@~a*nsT(1Rhra9sI}h8fh>-)U z8JF}<*z(RZ1^YjHI3O76`rXB42aT=I8v02Tp(l81K25N_UBW$BAp_RzuUd@Jz>+<{ zP#ENfLZz>$MTP~ThLn;j8Ri)YYZLWH_S0y$nQ&iVe6rJ^4n8Xtr;BuT&s?Wfu3*m{ zO30t-cf>eeJ92~NVca4bGyl&heEmEMuNLRplxveMd)qGZS%TT^Tb#C&jZ(IT5<^Q4 zkjyE0T;%@?B=NkzPS8h{=&%!ib6f}l8xYz7z@+dumC^r$GpP>%d1o1)LvBabAgXL35ppq#-tRvO9)`((YVZdcE01clP?52w; z^hKBT5bNweto2~5qc57 zpA+dJG*$R{AY@)CtLRD`YK?2QG9bEBY%5bmNka*tBGJs5$a$DFv}fj1ouqzZOPYE%{>po%{`Udx@^TIiqcjDML+>=O%)>3UsjUc4}je4Azl%iIGB|Hg0HBA=i&& z&h{shqW31M%fPo_1_Y-*H<=)Qt!J{h$+c)UJZ**>m?wQj2sjwrF^FyV3egwkxN7Dd z?!|7SWt+}~=8{34H=Fyef)B&Dw+MIb`8S1l&OZymJ(OnOxp-98M(`rDoG5#~W_e~w zYIWXqtVA|_oXM{gI7+RXFi0(!L55RO(VMHgdpr4A`WZ-9ltdQZHPL~rcx^A^V~K@b zWQ6pn{;1^IuonB0BT@lZ&>o;$ns3GeXdxx2FN;49;K$pVpU_ayAMtmQ0*ocsvrA#f zH4US7pxz_bl~$Wb;n5g;L!WDB1@~2H)Qv*OBjQMJJFs5I)C>Kin_1MMn7bdQHUavx zSz&!D(!@CWsLAZg!o7>n<3oCj|GK34dbj7Zf+e&CHps}BMS&}J7d3YYA4XMpGlO7A zzm_0Ix0uCbmz&Jfgr-Kmxs!CX9M*abKEV9+{dKK&vbX@Z)WG;}MD_+M(bcfS$UTGr zjBwf0bBxJO5}PV-P2862>+PjTzZbC%9lSih{qD-mb+17JqdomL*3f4 zg?uJtE`iTwZuP`5>f4L1_fj9Bi#6d7_o)u#F%9p5Hw0PXPvH6jo>=Dtx#pxC7W}TN zQP~7a%qK+f&R=V1B~Bm2*-tgoV}bPqMI+AxH==i$vaXTiYALITIlgm#%UF)Jyd-^> zJZXmv)7PG?>E{ z-@0$GN8h8nrEHog+amMJC>Ls=?i#vs|5AW%cY&4U7E#%N7D*Yw^oj zK$ZM;U~Ca1_j8oZipRLjUY7`?tjm@F#@4 zASM(-D9EQzlqV#{vPbX6WYXB(T+yl|rsj#UaslCl1$h1&*nQ$%gJ!)a7w98Ze zT+C&fpeUf^nAVLOtiVg3(<&6I)fE{=y`?x8@;u_`43wu8{i5~tg5W19`;NkJHKPDW z9#(7UaOgh(@8y30UL9{gpZ4;2cMB!oD1TD(F}mBO5jWr)AgQ$ci;%z#v<{g`ZFHNX zRF@LOT7QFVUe_MGkYfT$wY)ugo$MK7$k3~`-wJLAY8v4s14j#E9Tlt1{s{5d;fpyE|Pi6E>`uxcKIR%YEp!0=4wfs)qE(br^~#UQSjzM z$7_g&-NFyN;czxy+bIYDy4o4%B}#T0&D6UTBueWD)>q~yDc{dx0WhbmlmWsk?g9nfjuzpde_xM4ATQk>laS5Vf`dWMQ+XVLpR+tQDM?D;VTt6^fvvUWB zBfXePO)&aTOt^U?17CFU)K*Y{mabSbUS94fm7;s!-VOdRd@#Cf0kBq?Woto6d@d=VHZYZ^2_Hl z+I!W2fskeY5p`gw;{Nh;KZ{w6F6wz#m-ws1Ro=t-_~qs_!h|%3QJfM?>)%dqa5=B4BZIB#*N!nmhOW#s`6^Sh;eL75>c*cx~`bs7I`L zI)CS~Z{WvYzX`B zhIj@(#ci*p=Uet!JcuSMO0$MfG)O;`eMrn*F({tU^lLwcJVpHJo?Bbecv|0+Mn@ zTyUgD>=v6C(vI>@`rXL9Sq}wjU$F4CP)Ngy{@5>KzHhF_Bb#Q*>sOMYnK{=)TdH`_ zAg;}4u2i2T|GT74cY(W9p!AhpKyIwpQ3|yHx1^%uljeR!=%s+6ju5`})2K!FF`^~&qHQws>jz6wHwO#*rm3=n`=l3d6iM_s;*dn-+*p2mg zMsn#-WCN@0#RnQdBg6wG;s{F|ouWf{O1|0+lt6Z3)l-<&LCd0btG*VypL&yPK{xuw zuYF=GYCUTw6Ka3x`~%+x8XU%E@b$tmmQlRly{h>jdnhh$=P#>qW;ez(ojZ3(DT37c z5jB>!Dvl(tqJ+!hk0HQwxC|;8UbZ}CD=Gav(_IJb@v8cjtYK8UVF!ab#OncO7JrPG zIEx3hjLye#RQFh&0|oxsSnI`u!xjUF9ep;5BrRRl&g29NcE`5tRG4eUti|<58R3}C zoWQ+6*3!$@s&%tU7R7`Z?QDD!a;?R)*1PDFE z3mY$`8y_)92olX~c%2ro0fijO3j?bL&MOc%a5Fatn-agWHA&UEW#T<6<85fpVdBK> zg(wwLcqL^>N=GeIYL!8Y;TjcV4cD7{*}Gh)$UQerPZkr8Qfz2Hv0xl1ICGqYZ{LF0 z8?6jg>z+j5dcDYn7kKg;Y2+0-w-bCdIIH|viOCyHL6Q6KL|X!xdKC|xLa%8tor~Ts za(v`ymLsnzyFL0mTzY&~XP4R`vJ*wjTPiKFi5tHtJeMcjh!?eBse`?g2ID4N_NvT( z?w}vNHk=x|V8v9z6{r9n@Hn`1i0hFTS4U`zplhRks7MKj*z#2##cO4vqrTp{%9l3P z5Y50VmX*Xq8NZ~LVIcO$5oqxX!Ky2q|UKWa)q;Vu~b30MKiysXX$7bYXE%+vr8a_|*#Fh7*F76T)rB9kEOb^&& zsg&N5AempFySAd%IU5$d0#0DEx**5AXdH1(tRKIAZ_q2okOiC)&^6YF(`9;Ucu<^)Rpd212!-ix26_|;fchRO zyvNe{;0oAKZ+{A%v?BvC0!VB*Q<~@A4HoOkZ=j>Y#e>|`Ad}DA=^k?zsu^|LDE2X##R$R5ApT0J1+% zU!V~woGGhGVvFXX)`7mGilWgwz>X?z;brNO^I)+SM7m|Dlc`M@j_amB5v?wL%o&9} z!4O({NCn`3xCuyjf_Oer)}#x$f2gBL#h`>+J$NB=-Im3PoNHRPap@)hZ@DOr-hqRG zu2^7OElLHC_n7JLrxqqlngK;krUpqhtk&V;jGUtx$}rkgJL5ZW7{Y8d$LOtoUhZXq zgLuMu{D2ctDJ-DwV#!&JFVC$fIeSB9L%RQ1U-zx^--rTezj~zlJD+NLzG8NuOrL@F z?!$+!V8uJ%-0_|7hip1{KhfqqUs&K%JXq0{1x_UgK^DLN0z_b5b{;&RxM&Jc=&~^l zT=s1=MGK!lLAol!ssWS>kk2$*r(rcjp6od1+B$SK=atycmO9r%y!8vDZkMFGMF+sg zQXgF6YxU(Re)!Ly3dSX4zcdZ6cepAzhG(fCH4PNFrg1K`hoP~WFk-Ul9HeNK87N@& z_a48$lub>+1lR-*C7{uwa0=wRNl~We{oI{+72^A(9)$G8Tjg)>$^wgo{93IY8uI@t zUBuvS&GhV9my(%FW|cN3$QDZGa{{bpW9UMgWOwEv*8pl=#yn3be^2Gn&6eMh^OpSg z;WyXwC(?GrvO3rkjXNH`fKP}2l96*O!Q=yvPy6=$t9jtLbjqHR%)M2!tR9A>#ep_e$NODTmCQ~z2oaI%vXZxk{ zFY|7BPcFGL`|1G=&u|2YvkG;!i%Xkfi1>R7v7a8V!!3O`kXub6c0(#(+NM3F?U~c$ z<6(*?dXAjSKPsFFKLIBS zDE=&u0X=4b8tHxe?0&pPo9i`X;)4KBfGa#~8_pc&mZlRa~G0lP7y21|`Ik1N6niIYU zkoH6&YwP!QbOP?&U))fdbHjp(y_6}pFJ8>Nn$lRfQ813O!IvN3p%9LrVUBJ}+AE@5 zDj6U?iJ~^0HSbclNoFyN_-_}0Ivc|1b(7wCq<*iJh!}s}%*$#&`r(tIsQuY$uuY3mO7dQrFi;e@XwgnN3>fX(|mH~)?g+NiGtua5`wZfM$UT%2~V zsdZ%LE902obE{3;RIw`yp|kaGuIEfif4MNrnxjm|9#IVDN(j@XLToG+n_PSuK&rEc zX@f(Tpxf+un5_x8n-6l=9wp6pN7#29%8uO{D)J@+brTOfXvDx&uHk#pv_$v?wq!Ka@8ew=Nzi z3aVOW`qT++#%aqe6dN_)T`mqAuEJiQb>XYK=mJMouz9@>YxVSm8UJ^rO5hb?!(En_ zYahmmlwpm=bNrUWyu-tSf@Pb7g+&5&L5ar;1OtHO$A80d;^z&X>DoFNrKic+Mf;c8 zi~K=U2kaKb_!n{jfO6UV+-fSY^w~uI^@@Zk6u7KW2kj9Qo=M0!2%4`+7#%X-ePCu z1F4y|l?0%Fd?3;{H;t36FRg*A#HGA3=Mzkd`hww7z|)WM)Y&C@gp(rwg_En`BN?PN zx#$O>6NPi)c8vQ^NMLIIA0am(jZ4U%&pv_d;0tmS$uc)DWe4pr{b)m;$?~27)lEs9 zd`DAJt4WMLqg^RiOskfD9Tau54U0%i`c6*wqojD;ZHDX>F;_#E=%)^J3o=f$8=`@} zQ$~UZlP~BKw2~8((o;lJ`K!mQ?rhDD@b$Q5qf?Gg8gwZMEfL2fu088N&3$8E%&GA$ z^CC9CWZ_|K#f#jShM|RFw}Qo*Ts&n(;;8lu{R&mo#Vdu3M*ah;MOZ8#h@w^@NAGh`ULd#MBO@oGs8@q? zoqo_jFskq@|8Lp-{jn-CM_mTpA(XX)qBcZK?+syJ5P)H;S%|sWPYz@pHm+2L;$Pt- zp`DF2TKJ^BM(J0c#F`N5U`W7m5ASf@Cf;wY5VDMf;y2c($zDlUE6#MHZ)ZD2(TsOW z0Mk|kJKlnDu-Cd(HVrVZR7vei;lY?Za-6v4EWNPyg~*P?WmoNq!$feBK2yLA}6*2F<|ZTx4cQ}ML2*hwpP@N zqgjo-rdF@asbgTX4qIGKxThRdEsDQV=ZS`4mr|BMi_TLo78UqvT|LmzWW1ck$(NGM z7dO3<%^SF3DUi2@iFG0lHdcA6obY1JSH3t*Kd9cnTogxo;;zUQB?=7gzux_g#k2F{HwuGl)GhfPEOVx2VXA`$R=5u5)=%lhjDr2C%)wB z*gWT-*vTb0Y>qx2KgH^(&PhaM#LaFD+A}te2c(Wwy%AiJz;rpx53Q4uuLzE+Ao#Ug)J@WYC4aTbupp8Jv%9e=2^5rO{C8^(n7~bX z+gZT>o%G2|GOV+!bHq0FH)?_p$9@kt!+7!(?|qfIMT)78a#fMt`Gi(nhAo{1>b3)^ zZ{JN1Yu){eQ{kxfQAwUd{>fT7uAcu}gWq5H3DcJdE~ABP6M#r0kGiK+aubpijT+)Q zSThH_lp*0tt^VFaHeIC{xnQxa8arf80+FI@)mZGY0x?^yECpYs=*x{M-I2o_TI;V- z!vJeF8iG(STmH1Zr(qg!%Xk!KI!utK3AF0PHvQKbl^jeFpN_p`FBrzCuv3`-?^L!8GWamKwSq1~K$F2^@ar@Wj?tZ86eL zWm54z(CSrsmNJ_*8u zXo{6;adINjyZE^hrQfE}akF(_MRVtTyH!i%CkT3^D;XDa^Y$ulf*6h)sfag`_oYST zc%D+f&#qvtF?v21T=;G44*?|`Dl4xTmV`f#oOtlx2SiBdKcar!zR2ZU8|GXgY4yM|p_T^{bVf6S|C3c;Z){g1l}2%nz-Kf4-dI5SRQ zcYqJ4)TRQY>HiTa-n=%#D6=F2O*!-?6>1U5%@DymSYRep+cM%#&U>ReL5Mb)kR1PO zTB!a;){XX7z-{K&wtQKEE*o%Zn(#e6sOJcO@0;qpFi345aw2Eb{}GhFbh1v&9Lq~> zNDYz^dPt{pbpM@N{YeAl*!BA!d-7_g{HJr!sU2X@<^2D;HeO#!HUB{-B&yCAQdk|K(djh&UTf z=Idp5B$WZ~y>DL^IIjZkI@K_ufh(*KO1C7T`|H0x{qbu5FBw3GHut-sM%b#IUsW&O zS<%Go?}^)DGLR?c>kg`9R2H6q+a^$vNAX1vQklR!Q6>pkX!r~ud)tZ;IA`eLBAY5- z^LqZr7mr^vBI;hMp5%3^mwAcnQMWa$ddHD(VP#ME6jHGge|ycJ#;w&*?1@$6&W4!xLF)tFm&Kq zx*GAgYEpH+JYY~PHwCglxnLSrP|R*;Ju$!Ris7#V(t__Ad^ntFlmF| zH@N#$C$h=5EgEUFUR;iv@>^`W_gDFXYT?3p*v!|2kE7o{Dhv)dM|!t61CL?DD&q*) z`FqGsC^BgLuqZ14hpXx3r&KSx!fmtCUA0$7Ap2dx*MAXsjfvH{}XA#+EYXc z5gT&@%Hl1g8di0uTeRup=DJ5dVWIz!bk%uT=Cqa|GhgAe;nveo9vI~ZZ37adRPamZ z@E?rvYW2|1ld_Q%;!Trk_v^ z2H*)`NVdN79>HT$XvI-4P~2{doOtE*%X-yEkZNk9o+aGogYRky)OUg5dM+rwnWSBh zMI3=>Uo~p)y&2p%uzT589I-LKA9xLZAxQ%JV0O016Y3&<{^?W7$aXUexwjdPFxJhy zFK$K;T>*Hq*)%hkx68>+Ka!J=2n4rubIcDE;v)jq@j^B1YfLdq1zw)|DZ)QOOEZoP zplT@B5-e-60yH9+sE(o?sP%aLE^_j z8$uiIq|qoY|8&BUp>y-v&qYHt&}1xD;m-42HFTZ~C?h7WgPM6GLtQ(cdUz{otFvXg z{}R{U<>BT_e$+oQRYxqL+&S*l-L|a0v>#15r!0t1DWvv<19cM8NwvQB1t(q5FjgfL zVtEp`X7GQmQ##v5o!5cW57D2dw zTW;ftcy=5`rK>9MD)eYGyyb9L{Oi~Epnq*>V1``0FR)5;`#7*s5C=8L8`ADy@-&35 zuMi|^H3zSczl)?bf;{W%P1Cm#~)x zhPGkRXwD5IRw^z@z9VcTYZ8OSK_Gf)mF50vw+K>Zvi!H+Zi1^^tIpZowXc2RbN9bd zId38#5=GBcdOZl$HH_q3xht>$_MgM$x>~ zQQ}WvRv@PSqP*pBn{@c0Tz2Au@9w+vG__0eug7__@r zv|RFLhap0(e@5Gcqj3AeCNooP%Ns02@7Iaj`eJ<2+<+Uz{P1hUoJ@z70&0AZB`*I6 z?PJ@I94^Tl-B)?U4OR%w-vqvY`Z}I3YHp>?fts&l<`XTykQI^Oc7Rqy@fr8xq$@wu2R-_=vY?eR{qw7|MYKyW@$_5#ig9ojevhvYFZpay*rl%~7r8mjheqcdJ{V z`%Vt8UXh{3MMrAOZt^;N;C+=mpW}sUrfq@9Xf6TkIgxC8H}$54%#(g2(DNLRpzWi? zA*xQ^;8VcZQH0UdKgiO?sn>b_=XbEzae7`osx;wShQ2*J*P4e`SIGsJaz$zj0>UVq zH&j(WSd388xo4}F(VomMcb)s)2%6sSRX9lp4ZL6W=GYCh8^SV(;H&$cr!wWJ)2%Z_ zrRaM#ZRE*Ct^6W7gYmhUn25|K#l@rtA>gkCLk7mZLnWx5*okd^|79PFg^GX9llC9- z_N%j|kqex><#daoU<@5EU{aO3Ee?&IUaqtJtcBETCLzFRUF@=y_q0BZ_C%ViKcp!0 z*k8+5xqwkOE+76f7+O|iLVd<>-Z=-b<2!>3OQe3&A|E*trRJE~ z;2_V2_tV7;)|=q44z%}61x|!~GwAQI@eGA0tC(rurMo>6!H4^>z5ceGi zopkLbHz}sWiWkOlJPAG8!fq*~3#Od;CALiCdLMHmpoEbpScOj78OP(`n zF_j(n=WP30pg)1CDJZAXBn4+zXHc&(BnXE1r_-jZz3;u6ee7IeI5eIWL(0d0 zKy&Ic#U*l>8(msofWds`G-*}U8%*G5sRZ5Mxy{EQ%G0V&PuK*^KT)@KuL+bR;FDn* z_!N-p0ZHfV1*_KAD2NKZNj?TBt|*BF9Fc=QV+Sd|ofMPFz@gH{L_Gnxcpp8Pu$6-J zVEM<f_AV(6IMrkq0T<^geCt>oZ$m~?@c_2I@lyZ!S$R;O z_U?5hA^UUIKOweO#+%IFhKX56lHOf0+&vs{I`W{Zr|p}F;j!3wC>d)FvLt4cY4PtUj7kNi9hF7CcZo!RhWBE(2%)Z!S0YlV*0gZcCyF1 ze|Y{|nNae2K3KFPWV{178M5`PGrEoNQ$L!j^dmO{`46)&hUry5mrQ?YAbBZNs6EhA zJ!VNsJSv=@Ee>)i*LjW%I4=&Parp>z)lB!kxz9Itt)`!(es5xh%mR0o@&}WV*Y!d? z-|;AsN8Dx^HO?(@YL4Lk^ zl>dLx%<~Z^Tl|BQ`Q7_rwB#YgTrS0+tp-U$yYMJ11;0WF4IY+1U9!LGtXUU#FqY9* zR9wvQLC2Z#d_NVCpyjBW&3yShU9cLx6H{G~s{TxY`|B2phP~t)T@h7^H>q`8=A{`Z zwC%1!4ef5I)rb%Nza;VP2d$2UUvTeqObB)z44Wnan|QMx zvX$=JsP9bax7EaoshytcdOn`|oXyG1Rjnit;2(f8o*uc4ZvEIQINa~Q^`yW-?D4|; zkmwmKZ^l9i722w%iJ{~(VvFtTNrLQmVEjWmNCBk7QAtw`dcm?w+U8lcOnl&ea@s{i zfaZh)+`yNt2PO>2!BY@FJ43=LuZ)gTjnemv%iR1_WMgFI?d0KmD;p zY#zvfU&P7;&;UM#^d05I>_P1u9~~DkliEyZ&vJ@7oaKqAy?^Ta^-{*(zV<$}wYVX7 zk9QRnjaUmw-U*jB6LrVCg&>3 zaZ8RrpU%`op#9ABQw$NMW^Z}Y+m{_OgTgdLREb!}p*Kwem&Z1}Fqci84ZE)TX%V5M z{X=vmSVv(=A?dc?0Vom;QN9>WY@2}6)BNFt_RAx|Ns&LP1%I4y8lhIU zbS$f0zW?gh8nye^LzU=wlW*(FlQ4>fW_Y?s|B$lS*7b*_l>=&v3%VN;1d&hf*CyUG z;vr^>Y+tKq{MLZ>g_2z760(M7|FZYBb0|oY7fX@E?tySdST>k@J>ICyV4(W>7%z)2WK?9u}3s z`_YJ_%6*86aZQMZkraUN#NQr4*+ZmRjucL*LYvVM0M}A;Ab%#u)*W#NLon7#&+9nc z_gRNVP;69~48i=F@40&n=;xd2hM%`_2k|bn2u0b&Jfuzi2*ck>i69C z*u5CU>23y9Zm`kj5*Jy_%zyFh{-H~|Rtf=5)wz)q>7fy6R`%t*E^+nVBsU46hvPWb zq;=E5M1)LpB#;7no_uja_vkXv{jba5oEG*AyCY;{*+nMI;S2?{=x(P6-w1etfpIfk zga;gbM7QTmhXqz)2_8qK)k7@y8ZV_V5=S1^iF3JhQf63>8>WTQsppdl0{Mu`!+6DWbxCp^R$4yI_AZR{ybjR_{|Bz!&-?Ktkrv?=zSpo zS&%Je8?sZ_grOKdIW@QY?;RL2ZX)O?r*MZc^s)v zH)F1di+t5TB%UJFjsxC<6lNf)R0Y@$R_*GS*$8Vio@0a=MJ}OqDck$2K9xGIG$PCK z?5ml$`xcg1fQ5ooBsO3;ns*v_`jy>cw4C#2(?rvuAH!iPJ`sh)gJ|kmM*F41w-?r` zCn^sS&yKGqYII-8Z!(?h%MXn5=I`r&jE23_(esAp-o~hytSC0-c(?N&h<((eqNY!_ zZY)lt_lX7<=hW~0327&ZFemS|U|1J<4w~_iuKm&~uK;{GgUg!k+^W}29;WT?{oL6r zfNSuLOtnxB?MABJD%i*OqT*6}Of4{MVZRfi6$=5spNEWZ%SAq_blC(C;F{D6zcIK2 zgAryKjg2=a2|RerMCz);dwh+ZFX!X5z#7|547HXEEAfW+8bYDtM6@9KWPOgzy0ei2 z$yMos?Fey26?aW0>}&xAs3~aQZt9I69+Fma^li{J<0Rmr5Vs(=^NWoe?ae+;ipF~G zU`+`y9{^DpV%awK2Gbq5=%+k3Wv7C;v^`_a_=;{b;Gym8Xi< z$q?ZZ4ghpSwnT;2Jm>jYSavy&MQXZBjQ*$JPc4>5-Ajrh-Y2!zF5c4LCNOZ&gadBT z7o0aB!N%Um@#8CG&!U#^MryrQ*psv9KaugXjO2P(v9FoNtH=n6D8WL#&)B9nB$BPK?`X)tfu$>nF=k zi0zM`zni*_y0h|0d_>Fls|u z1*S5vrwYd!v=raIGeUCZP8J;GfjCVUM&y4%fr zLG&ugjxr48ZMZ!4nRi!{ZUPU=2u@M``ODPn!e+9d`@=ZKnrPKtOo+Watynw`6m64R zWI3ODDJw!iXI6s(X^BXPn3g$uLaM#nT8DF_ylQ}~$L@IG=dV8TLX#I#;t`)5HEY0d zqf5?3{~$FSbkd>zW37J;vl2IR!iJ4~H+hwHNnf%2joL=pd-M9*{pdS0S)?vb(OWw5 z$;&LP1u{O%0>#`%Lskx3)({iknkR}02@2OM@SY8PmuHXPRE$Dh%NnNk_XkT@ofJ)= z(NHV-UcFRUz|G$y=VG*%aV5Q&>~GeO1ip75499%kCm(3u4br8AvoHI3^`qkNA8I)i zwLsR581SygxaVQLR@$*lG`{iOR#F~{AT#ZWBQ{Dw$u??8Y5(QC=T;Tr1wPjTTOMt= z)z!D1z3+dk_79ChI1*NKOK-3I1WPJfX87ypI_xe3aFR&9PAw71V0Zukb(-aS|2ypz zYrzI{J^E@{5Gb{N`S-%*IJBnVw<;x?SdJaPfk+Wee^~ z4A}cnRy0b*q*bNY*{U8uHD^P;Ao1ifRr%)AUs!dj_1pRnLLlz$bE?n~%?;AxM2+U; z518cNB2d(y0QLGBLf?THQlfWa6n{%mr3HF@p7*M07tXY+N$Z_N$P0-i-Ej+-%)#Sl z(LTCW$h&Wtsddmt+4kvbM~f(Q<}>CRX)zg=c=r*APA$ZxpN@78mD*BUNQg%+ufWUH zHnUKUe`xw8%KD)r2N@FGClKupn7)p|7HB&k#fZttG96Cuqsa`0Wesgdsf7t-eBB#; ziwiFDiByKB@gVd(AufP9`Od=@S$Gt(bM+xl9&&grIRJ{UTYWZy(;3gp3%U;&$ zzap%QC0hEkL+Q2!hn4bdFCSQeyMjq3T6azf)67#i$&isPn#(M!FdGG# z(Nxo#F;6QJB+-X}{QR0ff*TJ=I|i0@S|b!JS&DiN1wc>P=`aUG*jVIJTyOdoY$3(&E-P}4M$^&PeS z&+n>){<*@5&KQ*6HT1Hf?J+eFERhyA8vPd`W?i9WIg)r*kdCb#0_Oxg`hjN9NU_T| z&LAI#1@Qa&WjDUWw@%;ZfxgQd;F^@;zXO&aAOK!}s>qE~jD}0rY-Th*XmY*|x_h3* z0bkaAi9j@%Q9~Tfu{HnvqfLXHhZ@Jy_U_53s26cb+&$ZQ34KCsW=+S|n&A@0}<&nCK|08HZ zsvy7d*`(EclGZ2MNB>+JFe`9059U$g6TO#DE%n$%Wx71U2!U4tmU|&{FEf9Gb@$p~ z16Hx59`t2C0jum2y1+c@svinID9M6aM7zh)r&WI>xt;DYf3_!Rm}hhsygRapAV{{gg!N+|w1)rk`;==J_|#3>Agq z=h>&SVRmHP?`oED{H)}##pjrI{itdE_Y77%6MP?1gPvVDDyFbebHAw___4ATkXnQ_ z>7`sFxtMg~JNiQfjIRmezVH%fQ-($Z*=B39A4|$W&+RXGEn7jDdOd#-YRqbc2}Bs6 zE||_u`wY&t<^7`jDG1 z-|HSYpGIDzPu;G%kK8N6fe-^K!%lep@RvTSD=Cq3En?wSv$vhyU&*{xBF=?|t%6ic zkUUT#zH{xzF!R?AurCpptXwhsMD@rNQ(GH|;m9KZD|VkA8}R4AAhj-rX`h0V@2e7% zkMGB+ljW@gN7#2dMh|6S@(_H6b=vI*4$Wph$qPB*dlK0n_l$iPFMuP$E7^yXY|KDm zgvFwQTD698+79^rdb;cAjavw+5qVNom-I z_Edr1eT-`6On23Y|`{g59z*aDk9@eR)3?ozN=+x>CAm&%ZsC!IEU;FRa*e)hi&{6NU!kt#z-p01nBJVb``1V7aM<9N46h&&)BZAH>Z@{X~th%Uw&RND0e;W;0hxaNj_9^7Ac{M z0vb8a76KPA0Q**?y>00IJ>u@Drvat6>+An*)0cKvnQ0^J1(FHWv=4>D>Djx0R4l}5>QRg-W zEu0ju2b{Z3cb5tP1RfyQn|1TEi@{s(*dMBPqFeh~IXr2p6fB5V%qMcb3;h*-!eJJ~ zbQ#wG+|Y^UZe{T!_wL#rtS5F+umnu}y#h%zQ%OR6s$>_c%o*0uC zf>-e`>z6)It(X&6p9BBH72xcNNa;_mLM>AQ1*66|%$vAp;@7KSD#sr>mTPvFS# zi5q1iE)GLAef4(#fneLbcZ9&^t!<7vu)WVq@BapRr`+sDEOcRLlwH0vmCYBc5&k+B zQg`rGlxJ~m0U?kR+ow0&F6yH*J!c^hE^HktYb&S2#I?|^p<3RC`0!L!D7n#4?r#LL z{n0l!%DWVD39lY-yV-l)b2mE%oP}@{A!z(I5sZ-$vWqM7Q4y}U-^oBqw!Uovop$k3 z$4yB;fU!9ILnTpWN)YT=O7zQWAD2_XGhOk02Bce0Fd=*g?Di@15GX= zKotjJc4yX^jF0%=jXF?9wDt<&DVZdMZ=}NW1&t0;{=K{RyPMpC#2U}rw%dZ&n^^B-K3HW+aPaFO zz~i3qv8Z9Fg=od3%VKLfsTlB5jAf#?54G~x2p7cCCerS(6v?oU~ZBHYCjhcNbxLi(-ZtV(RJ||Y)9}OXwqS<( zZO$#cM1Bn6V@QmVS`{KiqcXG&$jr`?!*(#vCfw`RRu7Yb}FpAHN8l4)uRMtD#=B& zJ+*3<^{?qEO|O#Sh`Af(zZ-U%G375rEQ1%TYWK2-x|oYn?pZfqH*$)3j*m4Ae+%dk z(uZ-c3er%TelXi4go2)ANNaeUOn1-l5w~o35&Kk3(#WkLC*t~$*Sw*4R%EX@QPH+v zzOlP;3iCQj7M%&amqM7XZN5-}p$!#cz9uy~{~ZgIvL7VZbrby!lWVXNL~~t`UESxO z3dwfbDye{1+ie2w(B@yG*gjj_C9CoR3M5QCQIP- zpq1 z#62^B{D0*}B1gP@8D!zy&^f4c3n<%1dY)?4n$Hhxnnb$z(t2c$EgrnX!KB6 zkHy9}`m|;_>}~rO9bX;+WEW9VNS_I&lJIrkdLv9v#$Ef%^{k8c2$dIu)_D}3gm}as zWa18 zG#Q6sGWh)w;~rg=6(?gT#YHJP0`j_C3zYA%^X$PhQBI(|zSVfnLXt(4E}ZuYY?vxP z#TCfIUno!RdWb&JpvKQlE6%*T4QlPA3OgO9vEoZ*v>zkNrGRD^>Bs_3q9S{@=%TA& zL9p{_v0XFIqC5-angDew!?C%z%~cWs3OH-wl^a^Dw-RmWnW}90-W~%4d!`2T6NF?F zWJu+hz}YhOfLpEurIzr$=Md%Bn*w7!cI@0OE=qOU%vu_Tyo|^SBn7p%MgjF!@}o># zFbC`B+(GXJGt5n>eafBEWD#?%9R>Q@i8*whHPhM@4HKU$wE=r>AJumg5Oukds zXs8GT4+3T!l%_QQj^XC?sTM^UUm4%g62^7nr4`jBy>Qy@pifqD-SI0phv|b{|8~5? zC=oNH4fcNm4Q)xO4vqcZnF7AAw$q%4E@n+k*uC6VeLkO6f_;Y4ydnJ-d28Kjwok*^ z{-;YmR3ekcAB8yocMV%gBnr2_o;89q&|wV3tX3U*GCT+PEk^iN7bI9I|C3%SyoejY zLs2N`Qf>?e;NkO%stVdP)0wAw&?>jO*JV{9$QP>yo!YU5!H+#*P{+PzVy@<~153fc zG@J4Q899HTh9Zlgk;d9g*Q=vE{IVRgGxWe2MoDjGLh5@K9{_ZhTx}W(P(F$;p#O?5 zuXsY@(DeHhY>s=81oIXC2ELHLLfO6Dp@1gs>RYh*F$lT|cZ6&z2rykpgrT4Ey^YSU zbt{9S_$WYf&}o4yrM4^?GMZJkU*0~@(Y3pG-ZZsixoZlALpezBp7jkRD6WJa#uHd- z-age1>|gP=UPDTv=zA4(E@Zy;ZGd&qVm0I(y(q#S-?8aW3_}~{jgJ`(rrq+%pFh%p zz*7(xjhDmw2GsFZ9PN(P=$r4Dy&X$1@ObU3lKSC93IoP0=pQkEm0S14{s)_!Vw%1@ zF;$FF|E;c&LtTSlDp^VZ^%n(u-{luJ2;nMgVwXBoyS~bEL+Hz3258MLbcm2_&6I+; ztY8}BQQ+pQ#FV3AXCvQzuaU@@>f@58<}B;+K6_Oi?;&R7^Mc!=j!mt)(H)sB#R505 zU=N8ZWIJ*9U&RLkU(8nTNf$I622i?*usi@7zw2>V&<<&O%_$q{y16~+X%|yI;WKd4 zWlKeVACR#PA=A<^lD5a&*RQ{1;jU|i8cfW|teuhFAg*)vU+A`m!6Bn$P|Pzln(*w^ zr^e;V2L=7MbQH1 z1OTXhpZ5j%qd%N6#Cv|u(R~d2e;LCs-k}R;Y>Qe!4vslqacwFuRzf08q<8>H_{M*A zKT`y6*O9k~nkk#*04}jizhr285$MdDaaR60`Q@yUt(u{B`bOFc-HI&b6uzMjjlrfQZAYJ--rikcCD_*9xS291Fe!7ELH|19?@iW; znjRyNUOA+3pFnQ&U|KT^d}3=?vKqff3@P_t3^|R=07|Z(?R`wl``~TC*AUqa=|fe! z@X)aq6Zf3$Pj%qqrf4v%+m~7du6d7!6o#R2E?^P=bmKv^?hBrC#d`l`1Nm`*#$819 zK{qzb#ogjr5V@=_0Zfn4swG%W0@snbMr$bcMG?aoDTAX!7}FQcPW&d0K=??;_c$Vf z?Iob{`N)~m1gmUnQUN7=ea9XzrR|a7Mo3y{yJ&z#nEaQQZ=O86;32^W`y3-d5ur1e zY`4H}cN?R-JuXl6$)eJPlomRo64`}k(D@*)w94dqC@1Xq@)x&Sv}d}Ky$|HD|K?XWBy2b(7I^f8`61N)|YDCd5P_f{5RK2pa1bMxsW=1 z3fMS%>t4F4Cc21QQwXcFr_XY}awASEWA2ooi zt?ni=)LAG;2JGQ)-R8}=GQ5sRXY~XvOKO^Qg#L|(YCLhG2U^?me%CB@JR4%!-RJWc zs(BaS>@4uQfg^m(zZ`tOoFyyDu+(_!0@r`NiNVJC@qe~^IE?95DGk{58oAFsY~XP4 zYkB_bXmA>)XB11kvyPx=)|SLKv*to%<|N2fF5A~|Hyg^wyFZI36zphX+`?bB`MwAEhP=ot}NB3*37I~rF&lgmWRB7f$HD#(63=QBeCdMpFS3k_^*m4TbN*QKN3)W zo>8z!xn(k!ujG^Blx(Z;IT}yJ#=grvf${FV{BKl>j*~w69fuo8a=3agg0Alzy=)-|eayrO*)LV-obkkCJ&Mi10WOxk2K$wd-u7sstj; zh@fqUPc5u%ZdT&UfHUOO!QEC<-i!5*12P73uI5E^r|!Pk(I zD*UpMXK}0w zJE{Lm3uO$q3Jng*>l-Mi>|1mX<18AFbkzA#2n$~xPeL@`Y0Lw{AT)lg(H_x}**1x0 zr2+P*&u#~wENLKA6wji5OZhu7PrB7-^MCVmJ*rjJr*)cAwmO&o7wA;p|L~9XOMx+T^WxkP87m zGujpVNG#Axaj4T<;=^{Luje7#(z@4POzwl)Xa8`w&-Yh7lwIoi5B0DLP!C1kSH1#v z-wDcU>4VFz4F(&Az*JVZ=YxgA;zCC72ced%;!>!aK-CN&{EG*BI^~OQmg)L#zc&YG zyJpxicey>}PVx$J{I#2*Lwb)X&1v#D1oPnYQaY$uK>|iN;~8uEvsJ+c?>TYNU1$8~ zGoN_nMEo65$q~>rX5}-$H`?ia;x-I$M+$bY!bc5!!%%w1BCS9;|C|qCT#7w z6=}qyuPuZ+-}ua%*B0rO^a8t|(nWu?K<%v+kx{ketFaf8riiz52+WY&a2U;J^%#}h zI{H%5S$O!vLCK;cX0+h=HC!r>PwYr4_fd-%8R^_-2NXc{$nJ~RP3S+EhDV!|y#8nx z_FRloI54P32);n0p8-bX`18=r6MxUrfV{C^T-vEI6d1dz0;wDJm_lihhJ3ibgIh1J zGtFmGTA(eElIW_hGJDpiYl-X*xF+Ts@9jf2M7K@x)H_jnnqt8PxFqt@X z@M~L+;?YX|D&nIwhg(wdS%j#u=e#ycN@UDtZk2jm+D92;$}4pvmTwn4?yxl3s6>9n zGc4cRcMw-JZ}NXIR)&l)d=*!oubPMFdD2?S`}}Y*&bymsV&4TbcbwJGN`DXB4_x5YTJMG9V%Z*v{kca{r^-E({2tuOW9nunP>2pNlnjT5FJ| zG)+z;xU?JeQGhgFAjt}=bC(x*cJH4*FWlp}$wwTVc-(Xnm<*9*1EJ;S-tQRI zLy*e!$REW%kr6ugp$=Don^Ak%<@Km2A@r|eKS0Rb<9*0KkQ0VkpdNqR3=zS z&k*@fZBGi&w+)}RfU{ezQ72DGXIWTme?krmH|d|Ww=5F7Lj^8GozVp4w&S9Czh)mX z4m0fU1#nyi;o_@kE~9fzTTup5tYH2AW6I2ye97>!#GrV5`?WXg(fd8W=(dHl?ar_d zLr+RH>*xf}*)*+Cgg+fTyrwpu^hzS5U+F9f{A8}oRy!iNx?M0WN0&;H{jn3UJGo7h z(ZJ#l4bbrY5lKI8EuVv{8g85J328n1uk24d4r9#ohPf5W)>g?dhl`DVgK15)B1Q5a z15m?rslw=Vc16e8@)Xq3L9wE{a%$CZc*{g}*9STW?}Ktma5;li^@EI*EDN`{M)iG< ze*m3uQSwp>yP?;N((Q&O`H{_ZNqQ=zWM+w{_w1;NlVwhL$Mub?kivL#{?~|KgnL-c zzbBoe=FL*eD$STI3fJ~p)=mlD9Te89XocQgR4N-zoRZrHHc5oOz-%L@?NV)UML?R(G-&`(fVYzBb>Y zA80}m?x3K3m2^bE9a#DBxh)(ZyVUIpMPzGl(}ndX%9uBJWTd&hX=|uqjYm_mkL44d z$IV?p@81q*;b(|~x+5Pfa5z_Pl~jd(A*`VITUf}CXl%jDoCGnxF`P1;_idXDEw)=|1W#aL3kzw+s z$cMRMWR2d2k@~TgM;AwSv=$~C16uDCHR?aTy(=h#)Zo09LDx=$n2)z6?sh;h$^jUuBndjGXL><)`rg0v24@MdQ1#!T6Ju0YvQM&+T&mmMl)+Thl>jvw^ZQ;W8LL7x*SK;k*wcvZBMD2?Z&5_>B_q%Zix zQ+~0gSuUUj@yTXjE-8%--=CgFe}Mf&>|EDI0DH>hS~yHFUfN~oH1qCxE5oq()qSm_ zYP9*^T*Zzs)O%|3VhKjy#hBG!Swwo}b=if_)8xHBXy@V^7{vAu6OkaJz{1{>LbBYw zJ8@bL=if;)8KV`3$zFsCKlF^{3a>4H=|kUn04^4=_`*iVua_>qII_jKp91Oloxg>5 z?|?$i*2C)dF~>>8mc8RALFp!DaO$sbOUF>j=&*ne0>{eAZ4T)E_QOR!hFf$@t0@d_NI(9*F=KVj4T z_b*Y(Mp(rpOe{XyYo8x`E>_4go^{cz3ioa{oOV-jsnSEAQpudYO8Lj2Pj~}r~B7-c+a>yIyqhHY=z78Ch6F0r* zQ@n$rQvX30JRy5_fv%C&M1#>ddP3hpK%zdvP>aN89GEOG?L0XVu9Y)Mnz^b{uPy|f zYykztlkA$dkrtwoNw<~5vC59bXFZynL!qhe%|HRFPJyD&DoRNB4l~{2iKmcy30SSm zu@FhGOQZBY$)2G1bdB$^J0wsvZ`_y)x>HyHmVhO}cB*2hke1p=rb1Q`r#@+DW|DVc z1Mk!4wr|*zq`rf$k8k`3X!h+`1#t3$n2Lb9bNN!ERSKn%^Bd{edvC{bBB^dllg^lj zn;~JDr_n7#&k8z^2UBi6@;^wcs5icL2v{@3g1G2qtmgL zV>40#-ZyQ`MUg}hVxR#s>Ri(jQfp@yc zp_QqR$5rgO1pf~Ic+}3KM!oG*)D``TET&4H#G69GEY!8u!^?<`tWFCf$U+F7e-9#Ek!e0;$o_0DAyl) z0p^M>aTlqFx9Sz>Xy2d1zl;`S75_3($Bb(ZZD6bFIDIIq)X5p6Y-0ifFeC1x0Zs6r zw26yix13+cTd@>1R-xWq=h5eV3t^XKo}kB*mvhH4lCoD8&N5PSNy4)2F*gvNcfG7t zIdq*r(6I#jg{vOS`o;fYZ_rWHt;JvLRnN6DQ4cy=5mVFg{AsfpAKfPT%;Aw&UzDv3 zMuu=`b4RCr&Z>7RbM&S{C_AfVB%%vjyrNQBm@PXk$W}84u?|{F7?W_7vo~Q{MqcU5 z>CSU3wx6h*Gwn~Gu~jpA?%*bAK`f|nf=b^#o; zW)iTU#6gB6y|0=8=nXs#C4d(Xm5_BOqkMm5s+Zy}oaE|3C+vFu3wfOVOyUMP|I$T~ zyx5bAra>+7a6E1g=J$tBj@6(L|1QOMZEPyR%UENC}~1Jq7}mNx*u%*vvqs zw)r)RFb_H7JUWkFb=XauJJlZT(AtOV$s6|z`gpok|HqiSpZCU^{wk03(f;P!#n~7O zstjw|rXrVE&y4~{HzrOZMCA_CkmmEahldX4tL$0oC+YtI$-Gpoq35Os8cV@b_eml_ z{A1_F?`6)B?^w8$1rKPgZLa05M5hmCE5?x*`+YVCqBoy2t>UND7s)K{F89p` zq6i9~z3fhJmK`U-;tSV>ijvo>o~O5LYXQX1l>Y<8SlD5xmpllTki@@mJQUYaX-K(S zR8Y^#0PUk7?3ZjYt799D#p|PZ(!PC@T*AoDz2|M`w$g^jDJ5Xc6yL@QXBX1}DSTfa zPis9G@JY&0dydhDM6FsXiohX*h4w&uP6c=;?+*F=a=VEk2zEf3B9B;D zaP{{Eib$b-?@h%5VF|(nI913LGduHQ5VpN0A(@_V)2LzL?U8<|b_&7i93?}PJ2so% z`B~op6%9%2x5joUt{o@m`Ua|sA`_h4;|YOj1>x;C4e1ye2f6(7XZd!L{2Ze7-R4|$ zgry($99y@?r@fUjc=jT5UmvEp_=a^66$pLFK}(YD0Pe0C)b+jwyG{+?pV&YbXK87z z9eJ1;PKEfgQ47?6=inTZv7;18DGA`1B# z0(uwf*6|>PM`Qpe3XF%q_hc_fw5i4 zw?!^1Rg_BO(y3&#cRtvOb+i#A$MvlsJXMxZpbO>#edI9j`VuQB#jUfjUU)K8D}y1# zc%&~)*U*>gc9^p_0=R&$vIe0O$I_>AbcSZL?r~fy_srBnoT5gs3I0s)y>=JUP$!WV zvvmzq5Kp9lhP+w`dc+(-r~LViPmv+`L-3UJ2W#J5%y{~NS!ttRr9!h}m#8n@;bFT! zkt3MU+60W!FfA{SX&PAZ>ehlZ0n~0ux%Q&j7rIG zedssB+{@bhnUg{#1>G7+c2GhR_GW=jLPbKny)t-6eYnLa28{Zi~Uoa1H8vZGZm*PNgeHgXjx=JyO#p zEC|YYRZ@%=Mu*%8uhhClt=zM@`EyFocDEd=dIWm0{jm8&60Sx!{(kCS#TG#FZe)+U zUKOM^cbVvYLXiMX*kB^5Tc;j=TzgSYT=;*u{r(?r53n8q`REXDr~l>EVgF5l3$R22 zJuO+Q=y;(J$-qdNZY-a$5FlNj;{fNzY4@o$fQtvepf9WRVIem?yZe&*d{dHukBo?Q zF3dNKBqAo#RKjJLK^f$&a?`q_W56)wX@4h6n{MGIzTA&eu_T~EM&w@L!Wd?Dnf&6| zYliKGO>df=-w!E8W)s~b2Bb(+M*(1~pO4Ux3E_)f=8SXpYr|cEX?PK^l9kaiUL&0BT2x@$q$|W|C zI9NqhHT;PCi|#%+cn4uPeZjO8-=+$cJCLO&AfQC+!!nNW!bFRw9GI0fLW&;c%xsI2 z%QGKEU@;aL>vJ%gAXc`#%M&VRxo2+qNi2$c=&e*y3A5ZqJ6!YeY}m9Aw#gB?(K8u1 zx+BJZNE7D(^oTgZ)A!KZ8Lgp<-j6Q#=abqN>FAy(RwYlo%9}MM;w}=-Mx0HS`bXy5 zwp&NSA4r&OgakM);**<)e5q=x5NG%=Oh0~S_Jw*L;v}z z`q}J(e{e#o;5R%wY$gOVz$m7bTyk3mb19A|kwGbf3Aj16`$IWk|dI_tPKe zwzi{mKkGu0u$o3o>4zJmLk?_a%$uqABt$s8jjv960i5HEx! z(uc2KsXruEf9e?)sFAoo zy`zt!ZuypXSBBv(lb||^!K``iL4mMGGG%YyS4T~xSQK)qM>zPg(X~zWyK?J>35Z=S zMgVfQJSl*&O#nz3Deq!UD7h6j;latFBJo!wBeZ{kIb&a6Asm)>qm|$g*PZ(SQWBhf zAb8yUYOn3MZ+YNn`||a$rKz60eu z5R>w|SCI8);Uou#t=~v_$wU5~CYw;be+bEMG`$sGa>hCdMAI zih*GdN=aAD0|#_+EZ0HGQt7Qj8)0^b5}AX$)IR4Zv}EmJfcW~*F!Nb{V2iWuq})tB zRekt`aZ{9P%3|nL(kDnDnbk4)^IF(Y^yK-b4KQA|e&zFyHk}!h&Yr8ocyMif^0>e{ zK4o>Z)hDhYUWE5e^!px9pgb)%85fKN>qMFP_}P4me(|++Kccw3*upNtoVT|D={gGZn=;TceW0J{#>>4RKm^rF|OOB5Y~d;Xizanc=O|| zjIr>pqZwSDdp#60bfV2EamjH96n@HO+D|uH?eIl-DN*#W#PE#}XQ3W$F9Cqv6HFX1lx1m;+v7w zq_*gUDeb z`@jlr%O{{QAJgD&OIpDD>vRyghb&Rm{e?yF#i7O*mN=BHFEPpy`v_!*PYr!L1nu8dBN5`lW9V`JwW? zGD1hnQ}+NjTBvPO*c@0lX`XXbNBV6gnj>@bxgVD;qk2l?AwQ%41TGZ|Lnyf(e>k}9 z)MuWtXq@%k)K2TGo5j(}&i#^^f{T3n4mt>55H@78q@q_CKx*|D>E0jyipWBQbg#i8 z0@&Fxp@Fgpima+sz#%k{<&V)Q?#0ClU+5OrSKSQ?BFBEd1?>b4NZpG#45U0XtL4eK z%LBhM{mG9EHbY8QqN1=`doEIBq7SSX?rd_AW^-;8H7&iBZI=*m-$I&HB1Vheb=!P; zPw=aTgv(OINHEX0SvE0P5+)jnUdQhmhY^*=bc+=XOYvi&zPy#|`@H1a`;Ea`YjQuD zvbpmqkgnmG7^pq3r$DfFkkov@s&rv#?D->+70iOhz^FIf!MkE1kSiCq*x|mD8;0pn zbSE8}Ta=2+G+SKk%=qg^XhSq@(am*o?%m9--CLG8k}3m@qy<`Bovo#Abcg8V$$SsP zhVfxq5Ht(loK_=&cD?pYS1b+U3lIur8sb^LR6`0NC~L)bk|%$BkW<_im5vw z@YEc6G^6_lnY$|QSg#F1C#Jw7U==qR-e+jT_LVawLrKit0tfaYq~Uxnum&n3QpM+gRob>{#g+(_N^&@sOpmUQZ9Kjv_J_8#Gi5jq&>%MashKN9Qa!)UTn zSb@&3zxjTPC4yg(%pND))J;z2elaI0G}hP`hn|KBc^~;&Ps_IBRH)%RwL6Oyljc>G zy<8wh>&#Qh<|51t?x*wG6PH_p5dJnns;`H&u8EwP=3Pl!vRDU13~u35VV5`z>3pml zxB0On{;_=sm_L+JAyfD_uuDHc`<=n`g@&yivLB@GjfxYMOj;FVNTt$ZK zA6~G=nN}u~AUK`;Dl5b6TWe7DUbW#t5A6FgCt$stj!CbJ|<_B!dltUYl*;$O<0Gw=nl#k2jgVg+XhS!jt5F zxBRMnKD^;O^ZKG6k2q;$L65m&PYM}^)%%P)0#DBHFz&=Pe=jChgf%seC5`}!HZD{J z*t9U`uHZ9lM=xUVX?x3;KZV92lU=H0&bN$lp}w@3c)3xxZbf0Fyb7@(lvQD5di(|z zF-V)+1Z=f}wL^HN-{{D9Sb%=Zxr+NbEfewZw-ZHkzZkF#>5qrNmRFA|vu2Umn=wDa z@a6?Bc9@xVQ0XoYMT>TdiVPhG7%a%1Awrr#vNOaUa$73>A3cLTEF!X% z!(|!4cbs!v2!(l&AP>>x^dK;khR3!LoVt_(z=9h-}qz!u$k1@+#=4Fu#4;Vb+ zw_mam4{cdl7=wfcC86`*yI#%m4#fVimdj0sg=u++{9cOWf3@Aef`SARLnIs_@$>_h zY6#TwOxflPQX0r5W##Y> zvkol;lKZ#Zs|bt?ps0}qmv&7KtTMg)Z}=3-rL2n*SAP;qbAX!fNzhwYU|1ZlXP>}d zags~${nCTbum$Yd2Utyy1swlipUm>@A|N0Q0ZDVRCTxM-LIA!!|3@1M{jWeFf&9+{ z(zn*D9bO6Rg|{;^>UW#>4%{s`X4;lDEi5pTlLskInP$p+6c>Xwi>3VlAMh+-*wyO0 zWBX)dEX8Ex=i?nT=R+cNhs7X`>f5d6ChoOm27bkp+26mx)JLSIhanfvv$PVlhodX9 zI}Y*Bsx_3) zqYH7^8Ei!lP15*+V6;QRN}*F@kHg9eCQEkv%(>#*e&2PTU_5V9Ju-pj@#fRk($@G- zRD&@bOFr_Ehc>xHdW}Qx^wL>PrH8MRu~)H?IxDsX6*9G*fE00whV8vHm%USE`_)Gs znMO{{j&e$bvaC90y3;w;$&L?&^*v7arAp)ST{Z{pK7JCbT%zSHcD``67%!#V4H#&x z2*?27)M*=b3(E;IPC#yx=HRgNbF&GdjEajEb9ZRjC2crK(WRjx(ry?Ka0N@^_HHP$ z0acZxmx3&9Y)C&p`;NOhs!x0h=rBW{l~icAzrr5XD>_76Lr%9aoT#(?`=8EllWQ#U z!Wy9+xB+fE%`sS5W?|XM?55Ux(~U^eVK$e3GI(u@m?AQYk?C`niVvlE{6zx6p@k0E^7S(Zjym zOUbd%y)m9OWBw!&C0P6Qsv>p*5pUHKBvpYpa3LUT#(u{e)W}uKwZVJT5q>kvVSY|k zRj2QJe|)Wa_Ka0ExqW4NvUs5%J`#*kOy#?)YQ}5<&S=vYfDkwMbkcHoVlBaP>0laL zYhq7`TixP5D8XM@DAdZtU6KxdioG~6-mtN%L1At>n&;&qBjQzUd%`g$o2}Cp@hAGY zt7p+)Pe#%>O4v)~keT>mvb>mn6j@`sJp!8N6>IyZ0P&v?cv9YCRQ)0=@lBpUMg>^r z$m)#VcZ~`0bKMLhk>k7LZE8p`UOj3-i21{~90Yi&}`i?eE0_m-*l(aJyG%z>`^_<=MxJ zweR;OFenHGhv^>^rKBn>(!wLZHy=Brv8i?}HS{cWawc;q7$Ks-~6TK7uW+@EX)kl>g9m*^fK^2lUKC_(AM=ctqI;!co>KyVQJ!bMl3( z9v>?iYZcoveN{c3s*A`JQ5>Tmro}*Pci(*z2gzaSG$Xq1 zw{Md*6Eq4L-%#Js>9Z0<+cEEJlqm?xWsMRpDr6hDIp)`oPb+7h;skdNlvU{_F)*3Xh;)Vy~^?iYJO2qUuL(KRqEixYm01?r`s z$;4U|UId4hO^k*tt~X1^x@+IWru7RHmzF7?6CS2jnYmM_3SE{?rxi9%BSC%T!EhOt z)vah=+JxB;@-`hc48#6jU_PC0 zNHl(UMDEn!`qouZXMjpM?xl|Gv!_c2sd=4_g{iJ(d=Ns>krZ`t7{tT9_)k9JdcbHl!KRIdqaVI?DeQzfbd--wx^pD#yP|9;ER9OjGa z{9j)vbG^UcEA=1an1Wl1^|1BO&UH4GcmCL{A@n5b#qUM0zUM#X?6mVTSVSqU)~DFw zDp6&WChl&2EcF4>T3ggC!``>}@dqyl*P7!;`BS2g$`-0+fs!rExEwYC;GFV+&IJ#6 zfZYAU{G&-yY@E;VWp+m@lo#?|ebo$TJP#rDv=vw|=c~1YEm&l>xWI7Ol{vC=;0@$AZ+X0z4Z`CtJ@YuPr0<-9WeAv z&DKYCAqMb%dT>c=UOB}(%P<1g0i}lvn$=4RNb1Q5QwHW-y+kH>l_06pR~~#=R`-}^ z@vJbC`iAY?T=7nNGGZ)y?MBH)tWzw~vP$b4e{s|D&w9YOgR=&h{r3d55B<1EcMlNl zXzAmKzGcQ*a#hWrw-w3CIgWL_w#K2`rnK^6jP{sA<~k{MIv=Uabml;DTd8W!&<9qE z*f(d??2=}li9l|Ct_!x|zG1itTc-l$W80>&7>vt5Mm$N6#8B$8)2BR8+Juing%yG{ zPw{3gKCV{f;H{8j_YyCZ@i!`!zL@7$qdOqjx}ofjdqCfsXGq=l5mDY;!T>z9d&50` ztubl0WsxQ1B+f#cKtNH^*EXw)j!5>6!gxEylUD_gF@Hd*?&hUzb1YtY%8)z$;nmQo ztQS>`!(+$}zJ)owiI@hGN~Yg|qsC7Br*%{^e(Rg=gT@qkG`@q}WsmRcdZ}PaCTZQKxQ?BW!mZv&%V1)2QlGgv~%vx8Ei;>!22)iU5!G z4V84^8P9QoNer90?I~u?bBq4~@$TaecL|%Fj?Rbgo9BAGHHr!Msl_Fqlq4=m3-nX& zfBudi;7;w0M_3EaQzCc~a4F)Q8Lvn_tl}Lw;;KphSF8=foac2_3((lIpuw@1zxd8c zoFMGrT}rN7g#C}2{$r|eQa`y@BTNQq|9^-ZCW3_k`IB}%2qx=6prq7pLT2&aI71@q zpMTe_|7j6^Y`s{%vk2K`WtM2de`q2DD&NR)1edJl;m0cGGCYE3cekq7&h>t{S}yc# zMdRii4%>9@EJXJPR8O8vBCD&Wc}N={zxOZifcqD?CeGIw3xGtcZN=tF z#WG@&_VF;$z6X`*wIvjukC&vEU>2nPn~lk7ulMI01G$tHaw2xgq~?0Z_sqpq@9R~+ zLV{GKBBkBj+l#dDQ}OxH5%lKpU2{kIXs&JL@)AjWI&#{Z!+s<(N`$(1?e3c&lX2svxTaK6(4I9<>xzB8n_19bmP*Cw# zT0^bQF7s?Ef~KV>st~W!#i1L4kbR1uFzEs$t9Ah;y| zax^M-B*1)4vLN_2HC&Y60LX*YxNsy+GF27avB>%?-jpddcvxM$4_GB5F^bje7F&>l z$ux|MLT!TO>nR=^-7NA04-SNa5pU{;aHrqI`Xyl&Kj?Ot&TjbKT`-!(qcqMABWH5k zU)tZXMyO73`FHCtx;WxtcLYDQ4?R=Ew6*lj@IcNqGim2_kl?S8+ z63-#4lpch9|JZCg;B$?4SxrFzQS)f2;uU5#e)9Gab{v!T-96{Wc`Gn5A=f}l>YK>696-^+) zU)J*)jcuSKT?dMVuT*=UAaB$*g;I7>DV~W{Zj*9LS3?OjQHeAHYU@_&h$|k+K@ReQ z-_?i>$`60vU3;Ma?VD@ zZ}upRv3A^-vh4jV{knoL{B1AI?cIphUk&*t3e?Ptwtx$bLb;Ap2EwO5h?=1{;acst zo#VfHnsCN{(UmSR$(td72Y(LoT*mc9MN}lZM2dSUSMaSubxo4LlpRYaR1uMX9~HH( z=yYAj=HAwO7Z-6G-f$UTQy(eBN~mg5)}+7dmaP(~^mXA$**;iejt;Vw7V3Ba4f_T^ zbjRgNlP+WfF|e5$BQfV8>A!yzMsaOq@FVP~-B`)0YsI;%RU9N9YqnciyXEwh1;2O- z>ui-0+r5$3Seca3`6&rA%+SFj_c;v-*UWXH4cbKL;Etgt?iEu9jc3(#m3#YqhAQ#b zM}r(?fc58r9_&glo%xyBKkDp}weU@C*J$ewb0LrDu!_F_Vm{f1k}056xF-DQG$pj5 zo6{X;A&eZq+OOB@=pEyGeyEG{@|B!*g%w47|BtkRWvGZiUKNShx%+?c-~R$(6aaTg z2~|dWs%wh?C6=OxlTEI$@62(Mpz*8ba~7|RLW>{LdC)(eOP`BICGQ<%I@8GZex`d` z$)J;Py2QQDlL>1*`bdq_YYIj4yO+9RsD0ev#-rfJolj7dIN|UIhf?LzwLKMgvF)1j zFl~O?ZSgzmP1&0;DKI`~iPBX>@S;zQz-v4oPOJP+@%Zj9G+s+^iSW?_CT8%KFKtYS~3#TBjPX;qPcKQi@=J;lh6Kgw$KQla$P?-=z{)3r;IU{OdRFkSR zBB3;O|HJAYtAuH3qno17AODmx0sjXPsATgaQOn{8*3SA7l4@zA=aCx-VL}gwkZ(%o;(ze6&e= z$GOCzj`0bvUzpy4_B1{Uu+38WwYRr7RI0ncG&Tm&<1@ z*YP0+ZGrE?Vc+BC>xeEh*Yggy`MC2uwOF71y!Z3{&g7o{b!pPkOQjEE!%-+E-mUI8SofWMi&18G} z+`U({(eAJ;Puox_@2$n(#m7)7yJn)&6=!2JQ8& zGW%&dn=s_w@mcocCIDa4Qe3$E-}fO)ZuUEpC1mFKM5I+y8=??V60zEUeA?c%S|;NO zv%BcSER`kQv?L?*irqjjK8RE1G_sU7X<5$=j#yP)1YZcIj;J>VVzRBUR-d~4GVG3J ztimNmM(MvTkT)Kh01xP@h#zD0D)tfzivNXG;1S9U*kh)iiQt#~3YvZg%NhF?779W= zZ71jZqE{9= zQfT`~*Buy0{h^+oN}Iq=hdjWQTy^GTCcpBnUCe&-in&3YQLm$cn-6-q1uj@^h3jX@ z3gMbd97+6&8d_FZ|0g{eWA21!IGA$cGahi=1>4tSh8^RtvYZ3YURnTL*DS*;BY{~}8JFE&-C)NQCR4pMi#x~y6KM@2F1wx`qS$&q zCEkg2igGHnIz*3f|1)b;C|I!SW9{VI7o8RP=e3n%M&4S8Q2<3<%&)|Mt6^9x@T2RV zS|23+WVU4};qyeaYIOILkII91D$BmvnL5G2xe-~xE#vP ztR&fY1KI4|Y*&}&1l9Kc&h7r!50FUx#2AD1iy&HewPa(6*BA6mW4Vw1tzv28d(EW( zQWBKL|4tYYaRrrIMh57P`R0rz<8+B#-v9^({uWtZrEEUj(Q#}x)!3n*_Fx5Z*DKGG ziMf;PYjDEaC3C1&8wJP9#IN`9I;Ri(N7Qhd8!!3ZVh&dr?t^B^Px5L=k%Mo<~5%jT4PaQ%CErjX6R7RkqRx*YrADs zjT00TD69Xje--<4>@ER{ozEWmS&{cyi028La@T=|x#z**i3Hd^e|COol2|_Ci2a&p z7BtP1^Z6<5VRp-bV)E5tDZN8PkGr{V!M!Zhkla_uUFglkhIDE=Nr7sq*T@K^JLl2Z zrK&gf(nqOMHT?h5jS`{x}vF#}E6rIfvp+)qc{4GZCZV=s`JtHxv zIAc^kAc%I_KnEgUC5H~S9AnpRkG;cP7o%p*y2AI$e8YN@0s6{0+`P}wv^bgW zy20ezL{s%T#NRFQ)>`l372ctdU#+_Z3N^2RC9P2^AV}`XH1x>RVrWHN9~R;BOWsXf zWv?Vbq%|rG#GFgj*Y39V=U9UfdzoHg03sksFCsW@#1-|?K0MO)=N4S-VRc(8Q z*@cfw^qr?AvpsXLvf%jJe~_snRDZh-1{eE7)#hB_qvu<$U0}Q5!l6cVR?^YT*l54N zwECgIIV?-Bnjs2h8AA2k{0<^9{sDAxIO7=GH&UIeb+m#{En4t7`~hDsXe@EVxQgb| z5P?+S!t>&nHSe9QpqP8eBmeMx{)AFkwU3S68BW2NiQ%*S)7tRl$DDZyDqzZoV`!d} zvP+1J#YP{mr+KN87#e%DlN(+CWm3@5hHN%n=MOq5Q>6V62adI6|a7 zBwjUhBXL(G!K$JCNVU+a=N99ha4yL)~^s!q>Bhs@@d&urst3yI7d5&Qis3_`BVwR}{%`*J1FAKZA zt94?$(wWl3@*pIhFej8DPhC$OyAwV}$`z<2)91@ma$ zOO0ZM^-%}0jZQ0WlJ;j&wqp;6rWUfAveNH%mGleI#J#3MinL0%j?KID>|sU%x#BL+ zUsu8TK3YysQ+kEdWlqbtTSX#x=5B7psv9j1|~GSAL6%XC?niQ4M@7LiLYQhNtKn0!b5f z#?gM%rl0QjEZ^UZW1}so)_Y^P1XG!5KgZJvud5g;&Yw~X!wnc_mD#bV3W*&{=mwC4 zkuBiv&%?6~Y8Oa~I#*T$BsiuvatnL;z5Xy~hr=A;HEBTLJ29Mv|_YBx; zvlbKvcT}gZ3y$OHT!Eg!-;BU9muE$FsDxarAzi{}`BvmSBhtK4&-8PajNGWO^abc9=4abK9EcU?GbmAHCR~w~x zDQ`Z8zmj%mLPhMLX+{E>6sqa%rYZ~}G0L3Ue2n;D$8tpgpdmeG61_#KJLmoVBZZ%< zrLQUPnO1F?UkFVvq1cC4=ujYywmp&+$aCFbM5pf{DRm9d>OT&t_Nt z(?cJ(d4`Y~9S!(v(N(ll23iBpa1MNaH=`LDv7N<2aPoO#1S4pyxtV<6Gx_hds*}5u8Us#+`BCLs0*xE<29KlQs^?} z&eN(b^xqghV@T2D0eP#fm3Jcua7j%>0puMYhgFl=E2HNf;lGK!R>dIo z|BH}i0{PH)V(nusl8dcs_*edtrV!-nJ%VZLfEP3}-5-wUW8NkFUH#}(5|jouxWu+3 z{*C8m1KxfZxDQ_g$dy;Tj)1+{{PpIt)%kuIzC1g)r2WQ>OECG^kkx^onS!izsuY8=i*`7Ep#T+c(%=P$?k*azd&AhJ^;yPqRDN(;Ehw0f4Pmzm7!u{ zfGcM29nRHjsxh_xIw3JaF1We(b zLGH9UUvqZ*7H;t&jzhWfPx#P|FPQt%u^;)>Pv_Aoi)xXj!d!N~oMFsKZ@vUO)Bd&O ze>gMPNa~_=BA$5$N@+{|gUb1uP;)G9m%#5dLbW_gzlv~kAtax?7I7wVSsN|mX=m_- zilVc(JGYsaPG{DhR~2rk1Q&5rX!nw~N5_+p{CB4xvz~9Bae#-3iZU+rrA2+6UZquR zn>7hh$4GbZzsGPy{64R00c1gK^EM$HO=C;u@ML{R}=^#aQ zBva$djk749-;9r#R&2&Tadc=e0e6)+*&p zv;h($n!QJD##;RESJHDGWK6cMDX9m1Q@a`V$%ex&U&e!?J@b4FQIGMtNf2=6mM5pm zl&57s9)AM9|7hJR(M2DEF_ciZu{TYSTDk&5$yGm~>t198%s(7i?7$b?;6m4^yFuM`u5wC$i@X#q~=MonXk3fFzfZ_Qj(Au4GSr)@Ej2WZ@?eU6ksA z7t-4Q!`E9zMcJ@xpc2wU2-4jk9fNeYq!=_vOLup7gLJ17!q6q%-K})T(0v|!zi;pN z?6cPS!>suO42!wvy05yg63=ZMOV=G^kNp7h61B=dXIPmtG7#pj!9(luLybZk8=edk zl`qOg&Z(5X1w)La{^iP1+=~_a3N)TRH&@xG)K7<#?}$7 zqm65YW5j;xjTMd;7(H%EZ4LtE!!1CEys1xnGSsf}1lTg)>P*4<>f5|E$G&JX&-s8e z5&VKMJj`f5_~S#z?QOhZ1v8&1DSW48he_yxKDvPP_(gC3ECpFkEzhO6isQ@wb^&M; z`%ln(z2ml6;&I%QFg9>*2J1{}i@JAHN7LS;v(Mdo=)6?1G_iH+eW@LK7WETIYuztF zon3=;l2vAu8zh@2_k6aBa(>1^98?5k@FMW2_>6)VJ*r~xSLGhJ4Pdidd*;@E3r_{f z+JEp*!AW@0kA#siNG~n?He12t(@>v0f6B15Y-2HrT#vkaG?Zp7D~Y97+!6i%XNcxi zAFyBDoNcF7j|wdk10B>`jr--Coq9j%3o)}k!;^ZPh}!R!6s6*|U3YKQE?QN$p9=qL zp$8^%HavWOJ3CkQN59YQOff$s(RHd~LaZxc6JGqYrmL^fEDJlA^DH4Wa#`R%nGdi6 zpYhu2fCG5IFaHy1iYs=BtoyO$Z^UweF7tHUzw`5hijK6~2ZcwgSVZ9YB(q!5G+ZKG zfTl*HQLi}k>l`pZ+e1Z&z#=Z=2+hV&!*5b>R5Dt-Km_(8Ibp6eUi5i@!aqlMFc*w^ zHUnX-seL+^ zmwNdGPkkaQ9A?UWg8{=**Q_ryacVeEP?V8`hTbTRczViZH+)MeQ;Zf{APzUa(S&A! zBI~yyj_rVomAfXf42G!_fT%~0vB)xxwEl*zp2b%B_QZE8Vb3Jl{1%I#Mmp4#q(u@K zrUmLrfa}Ry+ILrM>!gK2dQs~#;EHnCZ{W>9le%Gv{ZrHhGd2~#L?Fk$yq%cH<^#>Q za<#+a&kKwq;+$^T*Trvl{+iDbsN6Jb)D4K;OpB5;F_5kLs+V6zvbSwYI+w$2pP!5Y zqc-xbG{O7KwoG>~r>37KOVeSDFB>$UqX@>wU5TgNByKZ+SZ0{v#nakj?U~cA-yErE zG%4fG_eukWtAP%j^zFufULZ+e1RkRtuz7Z)Bv3on*NAe6WXLtV5n zS|?Jr8%0HAN}h!HfbM6FC+vKGO~reL#b8QcDZj+gGyvA}r;m)wME~GVLLe;iSDKe_ zd)be1i<^Spg!F}&Dw>MC8P%RlY5;DX%kB3UcncR43{bxhvK!Bg=_E(Uq`mChiq2N< z9EyvpNxzFF;m17Qh%N8=NcGEtfKPGT^T<|8E1UvMLvmVNcah3_a&oDj|A<$CU5M$XUnz+?$0w+`*s zM2GHVg3BC47L9FUR5TW>1kPK&Nc?dR94rbkH1eh)(bv0ex-krB_5b&?DF&xk)X- zAt7E3grnY1*Uun?%lyiU^LBct9H1LR`a33Gh442CeTa}`E%a`p`*tK)?x7=ah z>td2ajww>KWJ(%qZG`H@N+?m-odpJq2G<`O@OQ2X^Gr2=aS|1s};&&KyGV%Jr304lFIIJ?b zK}OWvU?A(;x$i#3fEZW%UCfRO?&&wzxgfmgxj7|p z&p0!4Q%jC42bT z5btT_JMKnbyyV5KcuF!_YmAxSpooe&b5_pHKB}`4q=O#~C!AS6yQ<;w)V@)>^vRg0 zmLabP=iyH*TXwAy*eGC^v;F1cPCc&RnvTA19an{`uepXSs3}cFy_DtnTKf=^)YWE? z?Z9B*;No+>GKG$!=EQ}&UAJELK)fkAMK{<%HePk1hMQ_s$be0?vNN;|p)0&|*+3Sr zm8c!<{9X)+`$3Np(C25G?L-;iq;4DDBcq8iwOVGNT^6ZOHzB?maZ&bO{mp`qsYMtN z%DQk3dDn;TSk)fG)>_W)Wy1bt$P=BV{9IBo$k!k4pyR`9B7bY-1;kklAIV5iAUi66 z20c_YlOr?(-T$uM9KFx0_wuzr|DoXsdEAq{SNojsDl1}|^<>_P9V~hzfrnv!6!2q9 z%oDif;UZTzzhXO(7bCoesE5a3OrS(9NDYJa^-jN>A-~!&hCa!yjLcPED!nm}NCp9G zIf%d0j?dK&>?_KsMkad;qe_A-W%qS1TWOE%^4ghdDr^1S?^?k=A&Qjl0{vCs9=N`% ztQ7o}s+Z^mFZm`2s7M&k_T%wFV?ua_n+#bpH*eJ5OAIp8<#1VgNWlFuo(#N~2L>bN ztz&ff^J1Pm^1p*Iun(iHzx$FLzDLV;@j^fURse53mBuR9`Ac`Cj7l1=rLv;&pIgPf z(5*w=YFu-x=AV)&4LEj4t|hIOvQynKNQjZ(-i{zsiWRVSs&`3qE=OjNRb96zSWl4q zfDIOWGfm%{SBrVeP-%^=ClL-)8lPXw@1t;-m&;*vI+&L&4>#|-Yx46YFbZS$;so5x z3KLt`F;MoM#f(hH+7)%^B30b56UcV9sFIu15F1a|YDi%LA%;qDWV$F|>!PH55^F(W z*k2*uG|!2WfOXgtd&qpPk$)?X`D`@^CI*QgQxIWW0fD11 z`28ABug&~(4(oW7+{RpkTH~*dYty!6U1AJJKd%O^t9Rqy73bEg6hl;H!? zta4-ku3FR;0T2O)P}xdm!GN^&Kl^W+X({ zb(ubeJBT={xrddpV={L&C5oYi{%bU1BdI<5fKdnhUZM^)Y-gHZ<_?pyHf+4AHjZ4T zoahO8!Ud`&>N|;@mu$BRSUpQ_j-1=f{nx$!kDQK(`46Ru*kv&|fv!hOh0E*G-^|4> z##av})M?}@S=)K|wbZa;Llgqo?=2f+yhCw;O{s0Co@IoERe`9FxYOosnlc^GLFzf`Cz}`2MEEK?w3c^yM!(L{N4p0fI&809uTf&K3!Z1P5i4 zv&>MfMJ%vs)xu{WJ$HCemrc=>y2p4?bmY0|V(0x)NXwRq7fA_17QnznPvvu&D@CAC zkF|QX=f=~1;5KLL=P9~Pl(RY;2T{K!>vcMqds5=Yz47U6zZQ9|COw`vz-tzlpOE}L z8)?Df{X_|hT{5$O8ISAa6V@-{OOWWjA=|)Nk~4=_;*mV!dnF56Xpx0oIeFVq)?Ybr+lzgno5+spi;#(GM+_Quq{$ zcMb;*-wc)Vbwh^w-LiOU?uelL4@b_%61IQzL*C^8!;xMs)9F-2FdF^H0$6M-gkbya zw&b*mZu=6OLiVx7bk!w?!itZ702301uPe%N7q4f$to-nidBo=QYZN>6xsC;1xyH(& zVM1o!OQ443OY93tmap&DD&bD~!|sPnGGo;$mN?ab3_hC#GM6#t;shp6b?>IfEj^7l z-OZ<{hcm${e!)G4Sz-6u6pxd3Wm32qm6RZ} z&Wh%d1#(&GDo>7b2jP0tSl;VAGVUT(>XDyCu}=uDNyMjx(&G%2e_; z_aXuB#RpkLAnpGt@y+>K8YE4vqmJ=rnx5f*>o;P-6n&% zCc~6!1alGC$oKQFR|HT@{_f*LNRdq^)l3GgJMP(oy_=&Rr91h@*W$m)^O)SY##{_Q zK*;SPMGeuooxad4`Y&h{>^=$|$JpXJ^}taZMKH(c@dYX=tVuiIrc?TkC?J+v5OCza zRo80aVW4-3eEc(b7kf{G&TUb?J(QkNSw_DeRh(NQ_f2+OzeqWOcTr*?FwJx(P+M|%VX>sb4xyaK-faZ;on*Br zvYQ)be4PfSH_$SDf^ua*2~bAB%{y!#PdHiJxY4_Djs-Y+hG%a;!a@$8bTu)#Mu_v* zKL%a@f`cfM*{X;T61SZk%S7#3i!)YeKZ6S=5O8ack%_Xi+3N2(<34a06tRfHY1~g^D^5r-TeG_mTbG&e4?ry`8_O zYkBs(7e>uqw>fsB;#=C-i4GJjGaV%(?B5hDY`3+Xhy zD2v}ELtep|kx1CUr9P;99$uKC!~>1sNMph?lZAQoUmOYqc7fazQ3n#xS7*77j*2RP zhys|d6rH51DDOU7yg6lMDz`)lKKmec$%5U%T<7t2`U_M86@5IwR_A&bTE>@e8b_@} zaIk+|0lBuRR)CzeOf2BgrE6N-@t*KH*o3l{v!ZRfMmo*+Dsc{V)5+rPCPpN0kpkLK zQ4Z01zVr*`Z`LReRZdeXBpP!8S5B~-E>VA%^Uu?b-qI+^p;%uH+Ihbt`Qlub@HqWE9?07qe+JtnMjh<+@7F(@v5G z`DKC&5-5>HW@pjo1Jb{kO=OQ$9~F8IFYgeBE0#7?pxzAcI>zF=hyqNf`VquC@JFZv zPx%+i=eqNmMf|!GnNI~^inG4DQrx-7w39;`NUjXH79U|CdKgRyfw1nRpNUTL+9;k3 zpt`hA&a02L>X)~(CXbm?)N+o+T4r-8uYD~`{6KOqOACi$NVo^p6>JGq{LJ63w zoTeDfzqejXax$E@_BJE|X`;)~BGGrt=m1_ViKxpn&&qqmyN}YxMtG!BS0P2G)^%|( zM9lvbBhqm-uTvqd@@VL>JnRtTDR%IC!p5wLi`sR`Wk;hlje7ug;YH=_8CI3F%AaDL zvC5y%yL?U~CCQTAE4j%YX+H8yuy$V?z}I8s=w8+TGjt>Vcj(64|4!4g!0=NZdab_4 zX*&EcK%*!5D%!`(hO9;0oOen{Zc{njIltQI06CI?U5N1F6{9tw9?L_z=O9Q>2ipy)TOX*h!*~R6mSO8oY<~S!&wCr|L(8V(_-B@X?>@DC)WT zMXQECfbsUgJ*UQO>0^_nl$Fs}IUh@(g+YCN#m@@UWso_o)kBF_kDwcSAe^5GT;0;l zEWGo)@q1~UC84%rYj;EyO)chrSK9{;K6K$}cZane4*~KuSQiuAUeE?KEIATWs$tSXx)SOg~zB%rwLd_yM<=zh-!+p_X zqIjB6+ zf55Ns*(G6V6#n!tnaBaxSIT5wY2(pSqtg3g`cv8=u-mV}jV=!)z8MyNIulL$6S& z^i`;D1D1#j)!l}1@w*oQ1At+KMae7n;(1sjj!Erw?o>@G@hDA*UXB8thU!iWW+^=G zQg5@N$%t8wt2a@uYro)zIgh>ST(@nf$;|uJmqkQ%kspiO+^r1>kCHsOB{IrZ&n9L`&UsA7H1$m4{T&aUsD#p+WQ0H zj$xsS31NW4`@`5Dg?{vfAdLW5@RMv^W53@3GvhYJEd~EsK7E3>_)hX`JOyLR%Zo%B zF()BC3jPTO{yeuLhkXTpS>6(?R7hGx@R3U9EZ{#Z`dH6(8L$__)QnZer2pZ4@WLLQg)B6ov(y+AiUUvZnCIB8nWcZ2+C0G23P@rPt`gfV^DtOVKVV;uneas zJb{J$Nq6HZR-j5yurxj1P!gEg$nxu>^y5UYka<=->(p;jMybWVJ8N=D?GgGbs1Yq% zeIH?b^#cLc=;F9E$I^)*)gflMiQy)U{guLPo38*X$Fymhr6-S+*FDBXcrTUOj@Uwk z^uio-844~H#O3t`_vDzg*&%`(Blyxnt-I>fv?F`%`jhbI7scbORp7ZS!>yNA9%DZo z&8_R(f7~u@A6OPVh2I}_hvl*R^R-^7EyRetRnsXDA|RixX@8H-KXf3QW`1}ZY5h&x zs(Ji?*Sj^v?R>oSR_-u)>PqHKKw0$fIqeY;XktL+!X>R(;ca+8bFhFl=M&S7e4n+) zu=x&8)|U8cswW+6xo$7k}>|2?MQH9lAbLyfX(3o?R zN``wgspL)k&!5Lg9+3x~qZ53|2p+TjW*7`@*|!2^_r%uN`0@*C!PZ3-VIKvoVlOfh zl=AOiqY&h7CdDhKf`xK@ZK`+09%(cPJ%3}Wm*V5izi7$(q=J})JWsIuiAFHrNGA41+$={c8qCq8p6Zr%zD8bR+^*UG*m|ve)Z$O;w9$@l5-y>Y072 z(7Po`UDnkWxubo7pU4odOKPM|*oXd3IGlOxO6003P%U|5T%PmrR{}%KQe~wVA}uV& zZx~BGE##P8naZ?iEttJaq5T1!S@QtiDi!UEAxa*N_eHKEM1zjJ!x;;|k^1b59#cFF zdeEK~{8CO8{@6fNjkF$1i6Lpt9?;nM=8jDhlT(P<0+kJ0i?bcs9n}Uvg9<_`C@W%> zu0D77nn4;#l9|mx2Y+HXR(P_gJ=!?72t0>x6>|$>qut(`t4TS&?I}5L)gq8|*TO7e z%4>ZeE&h@JVw{-4pf&t_qfrh{y_l%T2)+! z=#ys{eJbIec(eAA;oSh8Y8I6mqJz)JJoNQn4yKnxGJi(n+T!oM# z%azQ~sXk&X^I-7NW2HXek}L^S3H9?vAU?AXrYE~a!=$7?Yx62)Q4H{85Y?nzCCmU! zM6qIt+=KM5sh!{_Qs8uYoEP|f>?P`^lk8D%<+_Kb5LS=VQq|@GfM0kmJCa+)c~Kmz z*@2y@BsFSc4&)OLSF6?|f>&DAz|2lV1$vd3G$8beI7O4j>lniKm9CR%D+!EV5jg*2 zX#I9pG-Q>p>E4|AG_YTyFKZ zXd3TbF$6WjKtFv}rhkCYD{=hXqHl*i9?z3T4_a^$1IK#z=_|@|9+nQ#_lT?p-6GU)CKO^FtyCwd z>zc7K>``Y8;DN1YQvjaS(1fMEI0_|4_oG$09$zX%34tE9RpZ2Ve^@~TIL-<` zuIZFioZU8?+o&|l0^26r5bIr5f1T%hF}LJLpBab|nHj}XaMD!<+2Z!Z!*skK&z z1P#EfWduscn#H6`>IJMIm1fpON?8?G?s@y*7b z(i=dFGTArG5=tKN4g^a$(|1B5PT`!lUwv0*Co4tF4y8PkoC!7P6#)r0avj6crmj*9 zXCjd27f)2s2{RSRW|?copzrawTgTOPX5Z4ou*akBh0efMG16l2*NZsx#96-SA|vvZ zIV*lnz^BZ;IHBS9IA1-jPT8yiyjxzFn{bte4(QWo@%7eSEMw<)sc&GAdsE-A#?~FH zJEY936Hz0RFVd~oG)b>lG>5)iTaGgJAPw>fs6Nk3OGVwOHnrCCh2~4~%9NQ$h4i5l zf_+vaWON{+;^-f~CaQgvUizz>sV4`?yTgLm%7rMRZ2Sjc=xlFLsk@}xR`VR?BN7wV zPBWZdO?C%20+C7cDQ8t7QIBfV;5~4tw}ucBqQVUpbD5BhhW2;D(FHv|lkd$YtYMh)j5y5X} z22JfOA6;_`UiickQ#b;i>7tJGFC}eRatQjFB;N0f<)s#(PgLk%(rk^f;!{P@vZPg^n)OKyP7=<->5C;8p>8nCwbB@f?*zb4uc%T&J+Yy z5KwW}KGgQ+)3osfjOE1Dld|WSe*H=Iwlc!Yp5%o@zLrs3Bo&5ynjbVoiIiF|zj6I) zM-iy_SbYqAlX_-DIeriCy7J>z4ayS>kQ|sGg`060%hQaMh_}YlbtH?g8aH`>0C29# z0l?oZ^{&k5cRn0O$7OXP25mbu7Zr_OpdL52UN4gd?U6=;iz04EE`Qe1iMOQOBlZDJ z*(7Uin`ZZAsIUqS*}R85ue7hoefB74*pfDnDmgWdgP!rV2A_wvft5FIir7SU?I%Vu zit{F#qZfzFj;IW0qEK)7$Mua03Sn9OQ8+_5O6z=QN3YBoSqFfW+QlOoo#~g1ggf)F z&eG(@HNCl8%Lj?SHBaIOmL|nF{DegX{a9wQ@ZD+9H9YrqYjG2DwRwQnnx_pHDh7$4 z$yS}vGlI6e9VOD8@jQrJy)bm&WoD=;S%$_QCGQGIs3=j4UYytUowXkvQ)SgLVTRfU z-{Pyc21Z~Ppe96fosD7EnDv3OugDXAVMp0~oQCo6mS`6I(v$r!x`NNghySVs?%;t5 z;D)8`mmOzQ0A+ zyZvAf*uheRN!M7M+<2^41kM6kJ=&Z1J~V%j`M29v0_dnUq96xFnW2xoF2jg>4ayAa zdygySq^hC{{@!^~Wz^%-PX}keO&hOJU&r;V3U)99I)UJ`5SXKGOl)Y9|0$*F0yt{+ zoG+*pz4=eRfb{t}$@z-Qs;3S+&*Oo~iVXuMU}8vBr3$a22|qw~ zM}Pi_r2hVQWQT(IOnuf*_2F8#cQE}5+1SP5^70Rk9wxUlOg?&_hOdhf9W|OyZB?)K zX01;0Qd8}*7UXGoG5zgy5&JI+SIbo4MT-Se2|x#Jt!+WK03o(r47x0TH0hcQw(c!s zeTtRp$8eGf0F!sL8H%6Uv@#;C8gXRXeUFVEGh}%b3BCS^rNgCeFQH#$H-4L2739FE zky$$7e8!)Sqkwp~WlKM54jFXakV{KqhbquZ0wpm3^8?Ykt3O?@5rQ>sf0XIzq>|bv zgq+&Fy!&{FDY^N2bJ^+KV|0J1&A@Y@t9TWWJTRvgLxNS%CE6tG0=ksgrZUwf>q@%m zi#cu4a=iM=2VF{|O(;N1dJQ)+wpO*8wRz0K?Ke#IW7F!gSnA$RhR_u+r)!ocL|xMD zwODq3d1m$i7qesxKRafQ9Na8J;01Lr>9=m}L+``!HPbON4Lj#B?E?0E{ zLAG-S#onxYZ&l!<7onAEOV2v)_=u*m#M|szF3@HF+vnZ=9qY0@moh=}j^lpyqcj;e zr@%l<%vSKJhr7eIKV2w~DZC{F2@F1eg#}vx9a_DB^4+oc_xvoVlOG+)jt1@pCA&k<~g-k$GCpGmd5 z2SiI1bx`x$J(dmLSrna2u7xjn7P8LEgnLi#qWL(5-*u*X;uY2enChgwkmy7R2`aoV zZjD|gtHV#Q^tojQP`AX#kUA73v~q3p%;TI+#M($EodefbxB<6wRC1mNY-XiIL^b@y z!sPepDx6Js9@pFe$oT9LUPsN8?2g8l8;z?!tlr-41dG>ux!;@h?6 zFGuNv&V2moZNfBT#H9&-4pKbu`zmM#dMC`PXllPq;iu>vj~4iHDaYo8yj-DMNsdM6 z1tc~S=Fkb(DdPlb9{()h#CKd)EZK@C$8(_{_K44NDheB`$B3!o!}7mw9258l=rI^5d#)6=**CSbgK%MidQRhfEGZ;uQ(oD%Jx3 zS|z5t?<|u-&U^|9gv; zfvcaY%smyH1I$Qab9DX#Hv5xy#I$hM*17u#@DwRRFmpc8{n<O>Gr3{>{IBC?-}6o$FU%xJPmmqDjIh2%oE;MG0*z%Tl?gk>cdpa?6gl)`UA;vYL& z(a}ZhEV`ckx2;s}l^s<*^&3cEWN~byx^;e5fGm_CSH(eH3K3N4u|p2VXJNXp_6*No zA-w8@gx>89vRYHrtm!^E4obu&(LKJPHW&w=$j1xf^oO6L{`u!x!(c=>P+Q6-0scG2 zE!XAJa5|AG!M0&fTuwT+{WM!YNu0UPc8R)ksiN4@yw1{a5(dNlPNW(W>D1j7Bh{NCuVS&TTh8eF=Zh*mGwwOL-ets!%sFUsBr{2wa%Kxy!tQ+sDucneZ1}26OI9x|tcq+!H2I0hiDMP` zv7%PE)p5V;>NH@YJ9-@VhfC5;}_cKJr#OjJb=gfIyAyLVXB7(z;or zGl6q5o`slR#Vnh3MxYyU!`*2>K5izt(2aj0znSbaKi94mJbTA^GbSFo@R27J9U<$+ z*}INgX%TK~D!GWO`&;QqJtq8o%9<2r^ggF=gvAtbX>w(Hqxsi`kwBJPkn^`aYGC9v z9GJSUrAa}SHbThUbQE*$yIzy`H=$#DD>KU@l1VSQ=;&Ls5|gvAW@@7;rnfg`=Uo;3 zr<_k46vED0mG53z{uXcj283!4uD#;m7~k}}?2tyP+jy{87ne((H*ypI@fncDB@VMl zE`XMnh~u_=Pbh4#%J@XQ0zn!MKBax(OQn|Nsc7EmS|T)dUv9j(hO2F76)-PWQ3%6n zsyL8HedoL3idTXc*Oqio9O(aYsWGHCDK>u3=^Mfjh0`JPG);)r{GWJ{)sW4Y)~Dcs zd6BfiH=~E^ofJGv0J#PzN5-c8amws1r^_A*5txes)Rf)Jc}`ipa$rWq+^;HIow%Ng z?iuIQ6cColcVy>oBKnO!xuevfC=9na9HV1A2ay#d` z`CZO!>d18Jwb7KbmUC2cnnPLG5SIo0*_bLdCPyyRdF|wJVeuV6Te|i zH@ey~N3o)r*|Yj@kR9pc9i&=RV0UdT4kt@?(M9iaIiZc=2<-uu=KuJTz#CW zY}?5V9~w&G9JcKxF;RT`2j2c3=BLAwBz5_9{~J-5t!~*V%{2+E zJ+gQ0)A)Qvjo9n@kGkytE8_MHx_O`NZd{1?_F?m2-nb8h0oL*kOk(Qz6eLXEE+UH? z{Q!~6Ap_KOf}-RB&=_+cQWq^n*U2;@OAw>e?{cbx2$Mh;>*-Lxr}B)s3wn7~?XW~P z0mG_-=Jb9K#7t*gio+r*bO5_!c<`-cHiT_Vc`>mUaK_F7hbrUnv;7g?t`wJ#Q)wI3VrC*g+#L_G~Qf6912}SQ2nU zc~Mm5+=^6dTcKd*7gq*`>_>*%R66N+UQBPzV|eHZ9MYz~tx0HQJ-F!=PVI48j0|>$ zb_}E@!0aYP%=X^_cP)$`b=*@taaq#IdpsTLf)Fq@Z zV4sPScT`k9D`^~LW3(2G@23Ks)jK_uR9q!pc~E(Pjc=qQua8Sc5y(udGclh_y;y8z z8bXnQ!|P?V;863a*NIbIj)wXqVtdkmjnA zralg|_17r32Ybez>JZ!Ty0IY}P$dC5PjfS-0-S@g$$_FIo%>`!rU3|)nI0izOkLe2Cw<4P*?`KiN>ToU zy_0m?lSuH{+n$~~Q>U@4ds&hxAA8NcFDBGQJ^0l3w6j3sO_dXTO=^9|rs}CvnoeR| zyOZrdBzi6s?|^Yo>Ca0?@a>ee26-HT0A@HQVxNEHuGN5X?AlTTJaPBxOv2Gp<97KK z#-Hr*tf(RHZ>p}69+P8-wQoDK4}FQo5nx|zPx&OW2<>tfbXN1nB;Kqc)-HUumFtv!nJT?eY3 z{Us!s!{ZlkBiI8N_~3a-_qebm)vin-yv41e0(5doM^R=Ct5?fvo#&P-_q$n*qNj~P zz|beg(EO3Z{xY_}4Q84Kw&A+i4~gs7H8$T8Z8%CX84urMK_!p$Kl}<$YqXNYPqK() zFwUib2v`ppazH-q{OT!`-r;Curhmtiad6H+H7(~;+XC@TJ4O`gufg7<2CBs*Zw`e% z;NaI_4-;w?=URr8*Lmis8Fw#9eAUU)?BR$IKY`Y+J3|)vqM=H0U(Gx94P;rWP06w< za((7gjB)?<+=k_yCMk_uda@i|@8^1+AX8m-$S0qkdZZ5Z&K@cV;%{j62uFOzaghKO zK=~+9M!PiK_U`|C$OpgJ^LJ3B{N>8JkbhQ~X2sz=(O-G&b=5QseBsB$My$w35f{8MoPg+P2i{dcO{Z#i|TN>5O%aOMJ(*H zH5-P0I`HMSe@FHW%?cansZ&a0*&qY5nc}9&Sa=|u*%IOe-}mDJ0H=J(ppmJ3OW#J- zBDULmy$fI^s=NHWh565>aJR-mwuEY#Q~UgX#&Vm}mZ*rT6S)qw2&F{ENe6?t z+SjURzugxGs)(JRYWCO41fO<|ysXMIbff&MC+ud7gbIngsYc zve`^UJ@6$?d_P(*7jR{QvfKuK+^t*<)taP_Lw{hy&?`|c^HJn=on5X+^C)RKPVa2l zG{avNZL9*F74zC)(9Eb0d$hZ+!2jHCJ-S4*|POJ%f-w_ zqcD?TkQ-jz==bYrXs&VE$16LZJuZa^rm_twLGWmH?WoZ4`4iH$m0D5vPx>s5+6kU4 z$lPMp77MA}?{PK|%Ye$+j) zAw;6t2e_iJBz;SL(_6co3EN<%uxyHBXBRa|FMwYq-;jA8ChZ z-tf;$Q3$4W8q07Sun{VV*DbDPM;hHMW(SEQLP~~9V9Wy=EXO>y>nv?wVF&1P=+vz$ z<#=X!Sn;16&I1h=^X!UVQ44|N5Q3hLm;%dBrO!rN83UO$Un!g83@uu|8E;Yb~2iljKgdu#3?l_YM z8y2r)qDZ#xf98iOGF4D|)87>JJ&EjiPp*>(?^#)W2a+Tqrg)OZMA=@%9p&Lu6%mZ$ zVt3!!uTU?4ifU>T=*i*2*YMYt4^W{4c;5I=_r(R6TtBNrl-b7tjRgw^K-~fsKP9LV zehpl!2>ox-&ic}-Qae@oyK!t6`ffNp6Opl#(0}Q zF;#&7r=TPfYQ9%bn1W#u=0+0Lq+Bx!_G!O1ol>N+23ODM9Wiq$=4rW4e}+6$RY5|j zCPQZHjgg^9a&M^?-{p@zmhaTOJEZBu%R`F*Ui@Qws;_IiYp-)mM04`|j`NGkk3te& zI-0>VU9#rT38)C4Os%ZEJJ2P-2{BcXKlfw^FrlVeVMhM)&oRpXl-h{CxDe%|d`DQ= zOX8e^A%~n8eH(UfkK`b-LD__hgQ$Az_1HiQ>Lnkes^g zv^e~MLFa#t`aIvo3_)_h^zG-;cd0hv$nN>ArT&!y=!Se6_|>NS(3UH$z78K>CBq{~!r812*O{B45J_uqiHop#7Z_QL{w$6ukJo8#>$?x@#4 zoRTV7;rH7Bp;Y?R#;o^P(qN3pQ8yv|Khi4GRa%Zes|3p94b+Pqj9F@{r zJ|j>I*Ma;9SjhDB?5YnYeh6CNYY#?_hLAwuo_GPmyCx)28QXYf7W3$0-PH zvJ!BJgfCjf68Dkj%HoBJE;tXT^Zj%-mu zRN}Z;3fp!4fgU%rK72C?cnfuTN?)iohYstK{AW|h@Uosi$bOs9D_E}fMO)En)5B|y z(C*?Ga64zPzhrH*n#H@b@YqSd?8q~QJv=kwxY-$mFd}z~AJ8Cqqxrq$WJCoNXk1L| zz6*BOuXli^mwWa7-McTjwE!jxI+#q-qx382Btm!52JKN*EQR^@Aq;h_B?pt_=6skL zYa*>14sj;+{GuTYdvE3a!gLn=08M_Bc#SG)l*4j^)E|0|64=J-#RC-sZ+5eS<1goX z(S&Yy0=DZE>nBU1mz+U!=GS=%;ItddB9gqL2Pj|HirF-OI+j}3qJxW z=*-d{OFt+VJh~9-3|xffIRgpl&tZz-Z=!LIs$VAe5oMK!JpyjqRXtxte2Byrac3kU z)}JlqL8iKFmZI1lb8+CvbdR2>zf9VDmHd&ef^IV1Wn+x?h+YU~`$=lLMDq*IAR$DcL@Yc4pWgjE(l>&9$)>jT_OV> z$t1KQUSOK$S#qNY>oMgm+p0GpR4*l!9-z}WrnE4a3gZ4crj!V!9B9jNO5TyV7r*&+ z)tbKlCf}p0(d-9Z@>YyOuDvjnFQD2m4I+@XKhs{U1;(`gMlbZ=YBB&YXRE=Fr3a}7 zA{os9C&@|l$Q(QUEjP()l_2zS zT^+VwO>Muwa{BZsW7GZX{+`Cl3BpxXa?4Q5vFEYcSx26W^|0#{_Cq?+EpmNL5@6NMwTCMNC)M&BhWdPKF8 z5ylaw)mrG|-1-G?D1GQbn5UUp|2+H8;iQnrdNiK3j8gG0ht0;x+Djw}rdetoPxu73 z!37=AC8ui6Ql2jZnxqY1Cdm1z?z+axDa-5H;~9))D%7}|K8U?Y)U?i`v>2m}UWxpX<}u$r5JRI->ztTB&(!-&gRUhMVcgbG#dxP(P_^bEtt zLo1f~;TYpz8imhcUOoy+Em(FKYQ`Kx~>Qbj6MPEF4zo99gTIQs92YaOMqCM z5r205x2>%7SFt9hM24~}1ukPfRL@D?!gO~-=@L>_>A3d7umQp#(f#D~$Y(3)jODy97Dx~?}5P2eN5 zx*l=ngLUQXq3ZW`zLkyhmfnbfDHbT%oQ$wTbAxc~^BWrUXbq0MB8l_}GbLYwZqP}{ zcd%k)6&C6fWuL;A*~T(fnl)w&GNIZCg*~xo1^ObS_ud;s+1g(&O-4>=gyo+F4^iIH zt)q9;^!hsITw99imoUpo3_A2fbQIX1{O&2CS8-3~P|aU;`fi4b1TZDb47aWRJxrfp z9!+X43LCxCohdx3eaNkK(2O|v6p|DKYQ7(oP3MUbPQMr5V5O~4*Trm*YO-YuwtKhh zQ#-9^Ia^5Mac^WhTL?O2E$GzIk+dulhU}|Q-Z^jR7Ac$DJ%3a{`kL!sd5XXc$ELmr z4(=))ayr?+X~;zPWKZ04>ca5Eco%hiE#PB1MUiflPV9_IZsF7OCdjDgATHGjfJKFRTI2B!zZum0P>|`N* z^sWjyQcW|HYt+~b|-s#Tyv6|16mv}2oyt`3x5-RtZorqD>YNfRoni)rOe`H! zs-{vdG5a`owArQb4B-lMj1WVkq@LbLtqRx*N;QvmJ#IXG6Bs>RbL46oTe&_n#c0k| z+G?@O5DYg&q<< zY>*G|z=hz;fw)}?3j->qS{AbtlnJIOYdKf#4JLT^Lvwq^0~&uc&eP_~!I+!J?z3yO zTi7`#-fU4gS@Ws<0PS7i|3lSVFjN^g+uL+Vi!_@q=|;M{Q$k8Wx+FJ^bT>$MBS?dE zcXxwych`I4d7l3{??({#J-?Z?u33}iYtzk^z&FK^6YzUTtfo^+NOYx%tcF?1p1*EMve&T44EdwtueR*cGNT0tNBh?P2>0#z7FWMb>V~(U2z!DU`dQ-nH zV9^zbhCY)LHlr2|XaC9m$$qcmt~FI0W3%tt(kcnmNH+%CwhKu>-05ETp>jCK;V;ez z-B1?)(rfku*E9V|r~eh@p10r9K+IUi$&CCgq_RM+m!TKN=rXa9ztZ<2!nC{)G^%FHNYjCy2q1R9UU1P8B1;&5TWzXcg1L(b zaJ?X-amMG&AYW(3W+@P)RK82%!VzODTza<_&WF0liD0R!V76eAoG)PnXZEFlP#!+1 zWUSB}hGMC3bJ5ct<}G@!l8_&yuUI%Gi}Yp6{_&a!$QHl8!!ROIGX!eBv3>4qQj12xYL&I{o-)z z96AzIUAYwTB);WhG0tU>*fbD%*i^M@n(r0?&f(Me%vb;PFn2QHFy@4o>6EDRI$~QA zqGgfF#$b|s%(`SxbbfDo!9?v%a=)vvHYLI6XcCEm3%@C<4PWz|$a!>i2p1c?c2j0b zX^W(*FP}Qs7EE)NZM7S-D{6Q`QlXs4k1~yxc!2rd(2fu{=!ZiIStU%ofAs!G>re%SQ*54{E(}_EldT1d%}Fft7Bi7urhm>i5c5Qlg1Ga`fOx;mU z9B)GRX6-LQO7D;NZ2c9rRdO^}L7TO$4i!!1qRA~EjfpStpDR%RVk>b&iLNhcH`I<& zuikxo(VpIy(DCq4dj`2t10u^|l!yyrkkE6~Dpr%~GB)&rkCEAeaL$zf!{|`|qe7bE z%fgl-^;qt_!x&$jS6EXOi->dLU9Uxe8TjezzP$&afRBx9k_r-tQE!A>VTNJ01$_ZY zISK;F7@F(SEkBgF4L;T~$L>jShDyAX8fmH?(}j5Hj(c40m_q#mxGFB-nDxLmmaLo4ff1t z<1{@mhG_g(N-jj!dcZnNVI%3|9vz8E)PLLPw{CimA1WVpR6UOfUn(9>F)k-!)g^Q3 z?AVstkE&u$Nzf|@%^TMy=f|Bh`XIW`Dn>J^DDM$^9; zU{Xv1E{Refz`bNh>2QRq(Udak^m!u$OFL;Ij<Pwk*IFo6D)`DX)&?O2jP@ z&eqF>my>xZD*7&KH_tNwi>+#ABQ_mPR;AtNFxD_i8FM8S{Gb{i`52uIn$MWhET-b?2mm($av z@STO&^8{4GQHwn5U@6&_9L-RGdzRMCr&xM6Zp-CEJW#~j$WlgazG;F-wM|h( zi_)&{m8^V?#aMqpZ5hk)`u;7(|JHCP%q)vgCH|}KKgTw3`8aI?Ol3^nsYE8uPvuv9q0jMZ?*z7tU$&EYZ~x5YXCUt| z&?*9Dy4Kw4UwH#W77@0Yl|pQ{}8hq zo(bW0rIdBcH=02|vE8VJ!Q5!CTZ3H`r+hUfQeCG{x9GW!o_)<+wqvEvp}dTy{VBj& z(S&hN)^G7+x>~xFV{Ii@q%>inPVK4*WM8i~hog|xQYMNJH(Pd|-Ui8h)e^&*2c$EvDio)a@_<4A1M zPn&j$zW5yelBO>pgqRq&%VE!O0qhY=Ti+QD$Fj^pS0uTt@V;hXKHa!{=3S@Jmex%ID2(=q4m zuJ_BXjUhD87Rw#lRMz5~$#@IUe&c|^*yaf2xd#qP5VNpqe)(_v{ZIMub%gAz#oigc89z$*-a5SGO3y*uL+lQe>eyT33b4^AY8SUo|pwPU=Fv44{;CS>*TKk(xP zZ%H;#PJR1NEh-HS+7fdvmMItB^!q1z2#|6oL6AxkyF&0)uIC`Iw+Ou3kHT);=FVb> zS4^g+^(Fal9S>e;KKT+R4>{1D(x=PxgJOriV~;vwdXlz z^j8^l{7?{)A*HEcz5f|N-TE4b9cDcqFS@8X8nKQpo~N&^zfUw_kRjh8MXOpwDDyKf z6O6x%eHm-IMkVVcl_F99Nb#$Wrb@r;2<#%ecFbK>-B(@)E%Rpp>vDN0MZwQew(szo z9}uW#EWwWn-c@CkPSZ4&W~?}hxVEwBF+lDSwf)icHDK|E0gx@;g)JH}>luOUV9ma{F3OauH- zt@N7XwD3Npu-a_RU@~y%HP~{nL)jz+xZ4AEdkQ_1G(owB&jI^#qt^*>;}3`+eyX^8 zQA$yoV7Xv#sRHzwaT<7&fwa7^EnoxQP7mW#H2udV5?yj&DEs4_KR>t`aNEfZ(C*CFa(%9<>9r@Ti3x%46TC;2A5G z-=`e&?9{*+^sZCbBJ68zD*Wtlgr*=b_Tch)zH^~ji~qv{z137N1Cjfojw!0?r9njj zbPmU?es)#C#Ez@h*j>fz_uYw+^DTXm7rLz|fH0WK9gdvyDT7JLE70l^6 zJ5Lf*xp^xu5Fc^;M0nvX_9AnU;>7v3hR7FTDYX+#z`dWRbk-hFEA$w=qt*kpb=h=P4rhqW-~cD`X{$h;Se3^A3jJm7Wuo zRs)ehNp9}-OAvXh$XQwby$>8{W}DY>shsD^>*)$zaLXr!YexKqIotD7*$UmmCKq3S z?06+R#2seU!!5$}O1Po19ywy*!fuB*4re!>2Y?c4qX}}LABL?(A!>QGuU9w~mde?I za`#H-@qzdp8f=hC;XDHa0}P0#4f5jy#%U2g(>E6#+9oIZtUB+T=ztK5iY9rv+8ZX} zO_$uPwa*2uV6)r@)Y=)a>y>Cqc1TDTJ_G*k#{=N9b@~6ek~x3O^OKCHEmAQ+ziQ=V z%|?6M&`BytUmv%?@|fNZ+45<3pLi6e)q{~ZC9fdN-vNllEitX$4V*wp)`GGYIyL)Ic|}Su zhLk`J1ktls`OEvD(5?rNuaRaur2jQy_sKf( zOb0Dgd4Nrg-wFSLCErD`I`8IU#^c%X;gnVwc;OafWC)=s%+uHKrNDoGT6R?$Og+OJ z>!oF~`LKTd;l6LHjpcdlHsy{)8{Y{d;HVuiR#o`bxg1Esm59pYp7^reC?jmG zTuvHoUf44CB^3B)j&Cww_p!%ofl-gb8hl%~uJzgBcG+Rp^9bcl?5Jcf4Uam2Q4_f6 ze~VKCyK3U&`BD)o(OD{;r~>`;{U|vKE&P9#=){1DASpb7z$?v*Q@$I``rnZ71~rS# zj^$Gv`#zk;ipH-B*4ya%r$P0-SrNn+M7`wI>YjtRAcYTYiZ2!yPbW`kTR2dUQ{)bT zLfZ&;Rf*ri7jqn&g&QO_&1PJ}_6WMof!l5OBepqh@-Y2s@70`B*DEEKI=a2O&a<*? z>7SPC1vBau;|}$UZu{aFtA7D$t`qE>`{$p`mLT5gc0#KqCS#Q#8THJ+@SoZvMeb7O ztCrW=WiRbtX1X|NlwS$o=#xPSWCm(V-Jtc<|9(OsDXSe!uCJ~roQmb#;3s9MzTvNe zyqx&$S1+t>6JpC#?ijUbx+;Wop18MA?Sa5N`#TNmOHrm`&Yc>KqD8!)tNCBmtf$C_ z)k?x0f$QQ93WvQrtQAjVKk4?gR-41#(9&2O;aZuNPHFvYeSclS+eI!4>_|q2Qk-(?dbjY> zI4R*1v$db91iR3qEf%w;<&heZ8EzzqcJLIi`0G-*tC?DElViiryav03zsu-Vq?uTY zyR{^0)L(uHYqJZ}>y$)FA%F_n{*>=Lwd5PKBWfkesJQg|$lBK7((JS_?~!NaEREze zw%Jn?^x7tWyrV@i?;W!Xy~9W`c3rHYwfWtde*&rTW!W)68gnCB5aF^~EVLS)mXP!kLd1wb6M|Ej%Z zNni*i2}w(z8OV(BcfBMQDG6x2Ks{rEkWXN*0ui;gMceutCUZZT5y91V9zUpR{OuyZ zliU#Mdr*krkP>{*qDi8BDLr{O#VX4@q4Zjqvkc|>f?p62l|jq?$}pK79HB!_Zs=8{0;gADkR-VzlSqD=Wbp)&oF>d{vQn) z7iyc#r+0d2TN~pV$R$}#cUc9;y->M9J^?*7>;7rOCh!~R?+a8+-VvRDIYWE3n>+=A zp6?N~X#f8`v^I$kTt7S9Zix!|04+)emC2^)s5U#K6&Z7}LpXL{d_)X;bn3JrCLkB*w$S;y(Vc(7-zQ zy9M~@BHiCLco6lvEO7vQEYh$I3+p?i7b-B+$wn{)#3 zMA-5T@tqLOf$_Yv_kGeyc2bMg>?Jse@NWj+x{sFOiv%>WBGU7vuW~lpI?Hti1@&zz zkdYNMKS#x9`N~+){_GP2+|uR_@30&|;c94;p;-UYbzAQ@fEH4e%oKwf>&c2)7#tiq zreEY4{Lz->Vk_j(M7~h`PyVL8`*&i{*UI}{!Ax+?iLCzjrspkco7L9GQU3%dUtlpN zfwC`ailMs9zDb&RmxrV^T|^@r_6AhxPVDnWqO~Ns*1;U-%dVh$k1g^46b0(_y?E%A z%Eb~TS-s0b4+HqR6qEEk=^x>DKG|I;^)+5D;!37>z%2qO-CFzF%|{4X&=!3G0c{J# zT!K_@=bCygnosm!XIlJMKkEx4+ERK$g4bj%Ww>bw$a~pW7;STY!w(P(pFT#|)vS|m z1)5D&7krA}jd?b@zPMje6g!n#^NAp)+9Np%lRDc*+|ZYsBQWDDv8vyATyr^WQ_vAo zPS%v$)V|ETsj1S!e-0#DAJW$LQ0tvK7rEsdJ?j>H@>{aIp0NpD6-~_Wtkb!B+6~I# zB%@#1am~hXegV(E_OM4{FYed_46+drNfzvvuH-dgfytMC4KdxkCN|E6{Vf-42dTZ) z>ybC&bO+Ypq7Cn}AROPZUbo+mE`>a19V{#h(*W-0!TehH<1U1zUWlwLtQbH0QZEF!FM5H4jh;1C> zOjIacsGp;W?WEe;&Jq{ZnkwA;WH-Y`Kttm~W#5bRHa}A~bOY+3pH8fga2syD;dDl)}XAoF?W*_vOD~H|bSm=i~3O2uIb^y!(NZC$edie`{!Sh_61|PSuQmnS+6yj{19iaUE})e0p{p5`SBITCe*|YO=xqy;sDO-qtCV_p{Eb zNZSrBq~q_8V-6E`(!6#{zgL1!rKzX!l zO=k0@g{uZxCSJNhd(@FM8f1FCSR)34s}GlX>@r!2!{`eppClr zuK+*Fx?;wqnqdm=(l78s?;Nn)3-v$DO11Ld3dkafgGJy;Nt>4H%_{n#q_FO_50iPZ*1vNXYFD7M-rzy74TQT>?>N92TRMp zU*4?gtK;|n8E|M*>p(z{dt+H3S-(aD3J%kI6n>b;ZfVb9^khd(tm_qAP0XNPlqpu{ zCdp5-b^8)if|8dA%}2rYing!+yv2Xd!?+I29@2tAgCCl}ysljb?8(iNt&3o8Qn5?( zCg@~g%&SgEC`fF&qmqz1)1&IK&-+R8cHIx{&*9#O!}@UwI^MKjsIx41|5p>I3538W znZiw@qtd#W#wWH~?4)KY`js$%HcLSSbf&&3~8=BsieS(Tr zt&;B{Lxtge^M+Q05C+$NHMEx#9eM8GhW8bfaJ+eOdM3VZuVYZb}Sth4T zRQ~B&mz5a?uO7PP!BdaEh#!cu_|NYs!q6n)spNdnMg?>)^kb`Oc`3FB92f>Z?k6QA ziHZJAPw>Jzr{D%&ix^8p&^p%lKHrek?1q(2CC@xk!Al9}oB56l>;+E8sp@_QCd(AK zNd}ND$a*o8FD)W7YRdo53>Ik1A9xw^x9iQg7i!rpC zMDuagi#tzqvTlF2eWlW}UO>e7ylN4EkyqY>@v8XGE2A18GatXy@5E&~S_(WUyPQr) zHaUI%q6|luy8!<-1Zcos4@?5@1=^(Rfsw6u)6yOB>&EOFbG&+lqM zd4j6Ea*$k;rD_k<0NJ~LAeph$6J(gUGnkG6#4J?PDJXVw5Rf13MoVrUhV2VMYpK0G zJaykDM5Xsda1V9C$Nu~dtD`@OW0h*>dCOZk11-^;y5-fX<&g0Y?an{05&r@w&c;rK zZMIb~epdTHG=KBvf+!)NV2hlhP$QMo<8H&|HuGhg0lWfiocuCe%4kJ8y%@3mqan)q zwS?C8&3OO16>CP)sL{wnWNumZ+CrPmF8xX{A2?t=5py@{a8DH(8O z1-MB57@ahT$UnM;j^eLFl~XMB2HMK3-qwdh<13Ei;}k=67cAkuC8r=4AnixbZJ&BF z;Vc##PiJb%Tsq*qo)h;)S2nyuxZ~lFe>bze@A~W&H3-?sfJStdg{LoYfnLk5jPN_X zP3U8*$SSeUu#W6Y8{4Rz!|fJ%^9Apf03WCP`|Zjn!XeoOnUU?QwCKneaH6-rpG0cc z(?mU+fIrOnDA{ebj3xW^^3u$IzF?Pc{~ldI@Wcz=Y1V&`y^n+5cQULF~a-ocxx_`CXg1f4?Gz` zpOb+kAMN}2eJe6pJ%tzbtu^5nV*tp{tU+7B{{CB+&h>X3MXpAZhSWKvG~Yu%`3e2x zHPnS^v?37`#e$j8x_wto)GDSg_s7>L2we4)FxnZ!BBGFvO!KTt^1@n9j50_fbqj~D zSHCR3xUPpKq8yaCMr`4)bpQG`Ka$-lsT{N@J5NnF_&a-FnV@1~N6M){ZLU}7Bj_sV zLI|+Wm)rs2XL?YyWp11T{ZY_BZcNFN2+B;KV3iB$Cc{P?c`WPqUdL+IksffD(!eJhwe((Y)%K4=f8MtC~P>nd0J)oAhR zVAcJUL4CL<45vvHmAf0{Ja5SMlI4f`{Sr$cIDxhSp^ZCOkP#-^_GFUmj!~7N^GJ7qoj_ZU)VdPKgNJH?LCyGnD z5^El&mkSMRbigmIkf)>ESUV;AUFN|gK@^~kM`hYVT~%7h_mk8)*&8jSHK6eRl9q{j z{y^U1$sr-+(|1IdmIEIeln}0)$j7}65Y{cnb&lg(&wI9Bn8*hs6lO-qreddq(#nQN2zD!?`?r*=W z9_yi(lC8+LlYje90(%3_PMhQ1{ZDn%yj)QlX<`{%j)>_ z{(uC}LyA-T^=meWydgIt-Y6^PIBhw2O0-q7m#hFg_Y7#eUNoS z-9K@Tvtd0+8%t1u+b7$&CHCGq7HKKNW4lw)sIkwY=M2DoaRkfh-Qex5<0n;_JDUqW z=f2Y|9W$HR!6=VzhCIJe_!UwDX2z7Y`!$6t1$mzq!3c7GyGd!ATLN2p$9BvV=#I^I zt1pOk&RXxG!fW+$S37dn3+g~uGVg>;gi8fzrH}BcXxegHZ{<3J!@%%_>l$A??8QPm z!b0C>3t_2$vqjW_vclox<@-~5J`X}McmT&5eQ|o*7oBl2SxUZqEQcAu8DPo?{D$=` z==U@(ipDK@h#LCI-*4oY>4(J^?GwL;2kLZ{lIlr&ch4~}m6~8K->fBg>gf$&l`cGW zZhLyVT#21)?zPUYedlny&^NMT6lEiQ1V%(4anJeF41-}I|9%6Vb_tB-yXMv*VFtd} zWKt z6|w|%DP~ThDc+L@x=Y_0dE1}sI&}?lzi(R8y=y9nCyr}YkN+5g5UV=LAn_q{t#Rbu zke2jsdT8Q)9S0v__-Dr{t$l(jYzg5G20J+433wvAA^rQofr}wu59GU$EwI4B79n!J zb5K9WtbN;wDBb5j^{Wn1mxp&=l8XSha-7u?sje}2if@u^o@J|2bc$2jg;|qmu53Wu zv1DG}-1rg3KydV-J;5Y~Md@23Xq%_v_n{y7Fo30BC~LS@P9^>0Of6n_MhGkIFH=ac z0ac|gI~1_T?XlK`A@ehu3<^_askFf*nZ}==9^{+-eCy(Zk)-}nVQ&06*{qoYtA6SD z)1FTF1gV6)ombiI3m#YKCC; zHA_D_q19M&;6-cSfzMGWRbvfyh8uqy;(FuxC-q;DUsMmrfBju#-wX8dIG{XAOb(QKj?* z7vM^qTwf`lA3EMj$>14hs?}ty1rFld_Tzq+Uua;Za9`<4(IXBxvz7+<09>VT0Gr&v z7UjjmVPG};C>JkY_1(P=aCq}@8_NMqC&>;EGrLtBaA2fZ9oPk!L%Blan|`_}0`6?$ z&fKB9w1RY7*>_oX3PAArjDeP5vJ)|A82-iV2b?YYM7#iVRds!3uWvBR&z=7#Or68M z2p5iL!0Z=5z1at9-`@!L861EFw5p%{wv`0)p0w4sh#yS^tb&xAL1zZlUI{XO_oqBH|t%L$uYAk_ZLR%V)??VO9hkB^9dd|s9QbE;} zD-(`gk|LfV%^JELSIR42T*5VPq~RYHQ;;OC_~T6S5NvH_1u^)=EH6lKHw zQfRxz=hJ69eWDq4%rln6)VKU=3I%p?5CU=DvEsoVDUn|?-~$%<{q;rZr#D38*JPca_Wf0gZtIy`O`)pi z%eKR4Hl0I1oc7v%3xnc2p^;QrtHupDE{}Ob-$Fb;A@@##GK%BOm=y7$5D24)MbE49 z)HrqX0VS<7a07d+dfBuEj!B5R4xzVw%(5hM1^q{fsTU z-y;lst<;HZIk_gLiJ4q;dTH;&+uFiC?}tV~_F@Op%M7t=J2!nBniQO494I3W#wp@l zLri3!<+NV{N5E8DN<>IK$0oS3m@}kcke&RZ^dWZ^o~y(u)M6MX&iLr%K?Z$sYl816OY*vzY^Ul8C zTyIj9ou}tal$_cm=ar)Y#rs)>EdJqh&EX4K48tXQkJ8N*=+)LO4xftZ%dKAN7If5Z zX`VT6Uf2l$p7bhSV&G32K~|R{jbks|`vq8POnYEcBXO+faEV{)I#2!0srM5j zEpP8JG@pIZ8g9s|KX(v|Zn?IeaNMMO!_DhYy*T#)FrB1w|F;W4(lJl9UmpL41Javh z(A1gH+`d@RSPZau@luDWU}q%wl)c1;aenFOdHeUO$e66tN4FgI__U~#=K|7?wf@hh zK?X_Np;_6kq&sSsfR>Foe%kM&|I~e!=p2u3b|s?VY+HNxD2M||%i72P3=Ev3%UpQI zs4aaN@{1m^PkK|@&s@)KVNd%8C(DjC!`?JZ5p)r2CfF3FVm4fv3g&AWQey-KueK>+ zZ8!QaJq!Zd@!fG9EmqJoO9o{yVrXy<#5G6u7}H)HCN_c=Nq94?=qB=j|oKdUcC7XpfvhtF#*v)RS_IHvqDW@)_hr(BvOId>cZp;Ds0 zWVfaKlzsJ{U*n9xuHz-(t;p+@)4!S!GlmWNhCV^)6HOTgxhnG4>i@5Cu*U%f?JcC! z4}oN^dIF95$zU=lqp-_0mfxien?XC?%lP(x#i)PSI|Sj|G*F75&z?oZb~gtHUlXzF z?Y)mHttkuLuC7R`V={Nz2;CpEp~L|c z6oC!sg4Y;Nl?gX5g4j6f$e=HP)sTIK9f)D(=J{b#4)}UKHhY!c<6;jM5^A-yLKE+e z!>iKRtqTKXA34{@Mq3zg*q%deSaamv|I_kwHI{fWzUe{g3L$}j&GJn9SNeO-cU*ED zZHOVvhm=Y9HxUTP6zc8hCU8cdK%uFV6Y z?eX|NYCg#^K!JH`yM*)cn8i56VgDf~pco3C;~HxN7?!o4c>^W|y}(*n0Hg_{1<<%F zhtF+wSPwnLM1jk`nm=Gb>d#L8k$$dnT;Ad@SsR}hWwq9=Q1&dG*~zNZ>&^Yx0AQ6b zp|wsaqi$qpq8CF+j3b_(Qj-j+c?@o1=Gs<&-7n5jaUs28^Sjo2OTB74zaU!&J9QXE zHEtOnk6}EE#&f#kgYc4W&UJNjUavC?7iXKTV$k8o;$(^#GLw+5X-+?LPE3^ zz>C?@8|x^XbySteSnVO!X*)nXEnY6{_N5NRLrawITdxX%Nw0xmBg=5N7&BtMu=yiA zxi(;%hI3flP6S@jhGT`q>tcTpM~M}=UsrKt3qJrd?5LJzSK=0G*q%TEnD5r~Ex;5uhKWvB zKWGxt>7Bg0Ck(Eqx3|s6sind{eL{f8B9MbR!N`mN1(B^*8#%Y73au?;aZwyIinkcW zw{);sG_fCAiOG@^OS7Y|x#=_$?kfSNlbJd$8L!!bP>$cl@6fNIV$o@_HY`V!JV1%O z@?6H&tgkp99r$jOT?3#Zy z4mGuNjU2xNj2MFh(kX=7GnyRc)pRoQ)$&4a4r=9a%z+A$SGEhjrF**}N&n9HM?AwC zw-NlL4Bdx*08hxB_Z~n9_kBVglFZ}VE!ot;^A!kk-RLj+J&A5DvVvZx~}xEbKoH6ssp< zfQoJLU{P_yW`J8U`_7S_Y-Cke@(N8OlJ~IN6$l(uxiXG<-`@`v4&QmIuK7Ob587(k zB^=Q#2qIY%!~g0cy2N<*LJm>_VRfN;a$Oto1iK)kg<1X|3)53<8 zxN^)*hG!=!|&~?^|J1Dk_{fvWT=zn476gv|&MPr>Kn}%Il!aDC&SGkYU4^fqMx!}gB%x|5gdSJ)?>RmHHowBgs3V{ zy#u^8o2S*=<-IO6_zXuh5zO@G-o^|U7n$qOji_gFCj%mLWzUHZ;p1exRKUmeKYGMt z$X>R`kKSGpdv}b$!7I&D^^{L$Q>5QCU7BT}dl16jbm>U{g9XRtD}H>s?=^-SF!K$p z^0_v*2uQyk6rMOM@jM|*ab!&D5fLoAx!}?{L0=j#KertyEE}P@AW-rpd0fb2pGN}O zu-+2zmFWtvNpY`m!1)yKXud)HE1E$TY}@P)T$Cy+a&xw171@4;8;|*joNP%>wx;N_ zMMP=J!;JWTK4XQXzA?|D_6%jXfZhP?Pac&`P<@zc7AP*Qf^_>VdMd1i9y|G+!2c*S zpspuG)Ti=f`MYzd#LALKh53R>$K}UE;%%4G(?JnFIh?oL*@E||2|zhBl&KcYXFYgR z4i=`jc;A$RVqguKWL`!R&jMrKl zS@68glDky^N>+zh3o6VreMB7qgsaa+hx2I!xb}7OPuRf6)tI3qG@8S-f7{rEXwqCR zDXN}m;`$59^%D~8?c}B{{vl1%%dC`yA?m!e_&TM($rqd4?H(cDoQ@#bRt#OJcr7uWfqFM5B zo=mwEtr`H9Kuq;IrDGF9u9iC=M~nv+HZZVf=V^coi5kQ#>BC63+1P^yJs2!T=v{Ef za;U+@?N{l24p7zrUv!Jc!PGFv8jA!`^-R0J6*}nxJ-)cD_^Tn>(*K%#N?PAmiwJxB zfv|Hskw`AZSl6J*EM_suDzqG|cao0H5ffAkEZYp-pV@|oQVK!?11N&-G8@Mf=4Kl)lFMmW zbU9ktw;pW`e@?a`7?!W&V)Iyukg!Wq4bhG-m)({6m-l0D^rcF2|R6#EVv;Eluyqi}|@>_<>}INs4TjUYf5M4h8)Gpj`mM*W)#A1BXFZAc%Do)9CX#uB{$6#DhIA`b)T=cu39 z-G__P0=#98=cuz(ueANKDYNfQ;lF3A?5Ri?REiO#>cnoC)UzNdBY!6zVL~TVL`|Bb zul)xQTxg%#084g33Qa)m3r&xb7*?y<_(ebSB5Fw~!{A!ZpCCEY4t{3yTrd$S5?C%w4QOT8E8rhJTJo>?BEF?KRrPrC57hgkng)WN5ZGHsM_1SFTe8`ZQ*4Z5O8HB(s3E1GC)4~p~E1iq-_ zCr(mH)uZq&fTqpP(d9&dRO2FlVUvN5f}5l`F4}2p-UI z-7btd>r^z^B%)^~w~98?mx5dVn+0GhUOs^G#WLq?Gzu7zJ19J+8G{I;j7V0q?^Nbt zXsUm;^N@!q=nFz?W_2BZtrV)8E~^IShhy<@)b}Xzps&UyMqk(^OO8{D`)(l1ESqNDg}upp$Z>=V5o35=4ntZ^9jA$ITS>R`l28Y(omVNJ z1P5Fo4eEnn{=kqBIItF{Ig;w^8~MlN>4f_LKVJesnX513NR0*6USQzk;K4hL#=4m{ z-e?jF#V}Eyy2vcuPDrk_^bsB2G@MItv9rbosF@Nf0fn(}cO`jT-}W|hqWgUb@OxuYeA=-YU-amcHH=eCR03AyH2$OTePD8Q#jYL_MvmEB{9M9eFnNk48mwoqV zF3Kyu*?O6SC-|F(QPH%)HNZ$1Mdb#}{fxhZVZ?@tBN6*r9Ar|@jgU)Ki&*42xA!@3 zc)~tRAT-KWL{h^!8l9SzWQC{ric>6BwF~$~g}B%%Cy(gHIv~?WX2=V-iiWYdikgSi z%Q@XlRUMKWn=%Q~+BzMWx@wt{CwX?hpM>4d*ipS|O+^WvtiM`7r)et7t|Q)Fvr_p? zfHV+#VOKjjE2N zl!}%@gN{kl#?Af{tS-^)ju2Eyqy*!khirS#j`HrO(h>@5&Ck()BQB7G!7my2dJkdh zoO1f~&y7-*8W%xicAk1Ge{laPe>?)OL3>%On4wEy%oO^?@HI}{grzv2aL3%p-yk-b zH;8TWwus?pIJNalG?iMeyMKU2Y8R&vQZP0C_upil2-_8!mt;J_w)CEnz@WZ>HxaYy z`}Im{&>M%+_%vODcz8+Z0J_UoLob@>mAtIQh97%uo{j|g{^BHVJ{wq4sQkl^@pIzJ z;8n>`maP{!&w!o2QY>>cDlTQ~pn4Q7(L}Ku;!otCvzP(7@0pCVT<8i9+axIfPl8f6 z>&fO?SK0@-nCPG#gN|*AkG}Xi^ZesiZFm8$8qs7%=hRE zMJCmZ5l^=0AvY=yE_rh6QtexJXR?%)8Y%B85IBpt^^~EJKguUi|3JxPuTND#Oa&ZT zY6AkFwkySIGW5x~YX^nLu-GA~FobTzobIzafV(;=eQ|(GFdlw)Oo2u&3ymSrCU@cKdg5-aaEqzs>gu*S-#wWE}18j7>24tlX} z8_GBcar$&Ak$nowqN(Nr1Ic%FNSSDq;?9vmES1>u0G(VFw3)-L0$|EBkP9k9aPL9! zK}JqAyaNC19jcJXUTrESqytNiY~4$qd2d`uW6>FB!Qns3+42>q4UG8E!SysAB8JE0 zB~j?kLTRn23U!E4aRpwC1fAs-9eXj~jR3Wdmbmh3XA9<$PaPi3gr=b6co9?VwpHN% zRe*+;s5N!<#DQs{N&O>$+<}!H!mG6-!gb)GI_ueLGDYE0kc#-B)N2rz>-c1(n&X~` zV+6yVVA?qed5)pe-~Q00H<`CV*(>Urm4Cc@sQGwWdCR z?=oE24Q2mNg-RgFnp!FnQ#1PdzjN!qj*cBzR}m`UfY5=Zm74vD#_8a^02}&;P%c)? zgOv0P5EYJ~rC|Li3CAJ8(b4F^j?u12TeRN(cB^u6a6><@x`L+p9=uFy`=XnekmHc|ssqHpC~^_lIb`F2E@4uOND2xIBn^+Y5x=LLQnXP>LjKt* z5kwE06N z>ExNFM(=Nz>>tfwvLYZu74siXAad23+KL7OC-z8GYVN4gTld~}=G5ae^U(l)zr3U@ zAiv}@4Nw*~+@$p**bmE_tKu-^ch=5+**XQzF~hu#wHmNq#6ofC|4gzzNhgtoV}Zi} z*Q8(76I?Kp2zM6;)O`h+m;5P81fjfi$b?@uxkj&>P4kPVP(C>hGu=&i>`FR09J8d< z&7rrKpisrm4GimHmp|f+qbSO}@?KO>wgopQqIk5lC7W^1WdF&UhF)xU=P{a(_sdAJ zI4u}F1}fW2^E0ox6@G+iA$ab%^@uab+NaflBY!#P%?Kgk>E{GrpaYv?eHTK&u%Y?T z566eP3IBMJGufgZ^r3vcRCvYMY=e;GHu7 z5qYZQ^flHHed7KRKmL#ouC+@l@D!8d+q!u%fsJ1yVX&2OA$#!`a9a+szm>=VqFolf z4LtW-(*}Z}&tisp;t%R$6GdI1{(uu;F9@<0Qt2L@bk92-v+N&pBjpj_l62MjBmuGU zYvrbp^8X|3Eu*SzyLMqfQaYp;EnOlZ-7SquH`1VVw=^tTq`Rdp(^g<)N7_z@%-T zmB8&q&A8c>UfR8G6E-GVCVXJ)zyBGY46RE3$eRS)&*~lqQw&y4SyD6fXeV9E399oEdzy#O4soH9vtb?2Km1F`{p3cvBzGl!aTbNx z>W?m)V_K{U=cuvtZuhz;47_hWHQh6@!=xdZ-FeS9_~G+T4%NwX#0-DJxx`p;Ij zYyQ&|Qq}H^{=4U5uj(PEPO?OIh^mXCPt*%HT`Ao9ZHVh-G4tDLAZ-Ck3d*1c$pu41 z=~-WjU&aJIgqTfsV&fw>8oAxv+)pf)+D)L3I=S)Q zhWEDx2vng9hS`TiO>5=T8`?`r-Bwy$E@i7JIiWtSVt&qIzb;a8T4>o*sO~hfD=s`C zTW}a%{nf)lOT|2A#*d_-(${l5yZ$R+YDtj!-|TT=9Y z3()@g1rhCk=3A;cg}`=zg2zr;Rw2JHdQ%O%7^GU{4Ic#pkmj<9Li}S&DB}gydBSFA+@w-Jd%Q>v_SNFO)be4z6=H zil&67)9-|g0C^gx1NfQVR;JX-x|;%E=IxHt?f1WEwe5eR1qH*3+5>$mXHORFspDSJ!TVKR~+Z@$?4=&ox ze3G~0fwZ2^L}E{XZM2CbO%J-K z#if*3);@+PF1<;vM4#1gCXKlKom(>A)7=|+EKf7)2K0hFe}w4MwIwwNo!5CLg$UwN z^sl6BA2wR8%0~LPcfjL1YA9-A*zk2fH_L`xD9Nh4w1-#N!k zY$?|uc`JD!h%_!*C%ZjRIs+Vpl&YGzJi`KY&lC*Zfq|?8^tS>a?*pITNVHGd7rK@s z$4zu%`!1pEVs%9sh_rw_L!%9JaOGHxZeWXO$X|u7tsmq#5)X4zDl}PKn=Z63*rUM& z^m2<_M6JI1uCs2dS7k!`@X4_!1+GH;D#P8fEg6UvRDkEBy`gXw_XG-8f3~7#eNZGE z6#tDrNB0p&zaW3|Rb{n0=DmI7P5#!dGk8~K*6E+W654WP|Ic)OPj`z>`9qJ`Os+Ss zEao9qN-a1V4d+Dp!5f1_+IZdMxv>d-Mh6$~jA$}Tq4I64S_vt z;Tv%jq(I%$e!%+a-pBC?9j<>w>oQFO>8woZT!z(P7;_Pg!(X6Nw3F3eAQjwo!Vi$z zj|0D8$i4IZRB8mc-nS#Aep^gZ^$NBwN&x4&w~d@UQ2tIKokn!X9lMLy&{YwNJtXfE zhjMOXm*D*ug+ph3(zUz7 zOQ1`Qu-h)>_hp5X1o-aL$Lo@IKZu3eJSigVmbAXoD}x} z+IPJ?jUb_?ul4trGfh`jrf#w$x#GYf`%#+Tu4mm>3r#+mzXrOto`a9em9>PAw}LmK z4Vd}DZ&b4vS6UjD59-Z`A#w z_9p@=somla=b?5a9HXSDq$I`No3hx)r8R{O3-A>Vsm!513ZxGo=(xu*z=bvVm9dZ{iMue?X zMtXni{+^K~9%qCh9AoY4ctHE<5AgkP9gblVe$~{I4Z4+vJh2lli(Iq(OblxX zDtyM*GFrk!s=0m0Q(0DT#}5HEI+d!#7o2`pA4_8IMMs!mgX?s(jV|UbPSt|$540CViN7RJhe-GG_UfZK@DFv`)jpF{ZkFN zVLYbrzclA>i~b8}R~n3ZJtpf4A!L9e^QqJtN`uH}YH+Xa6>DPwmj#Y73Po$~X%3`pxK z5pU^dkWmA5nOW0E@T0LJZG3k@9Y-MNF9e58P)5TDT|>KKGV>UNDTA;zSQ zB321ee*o$FuRAG3LHH+1m_);|B7Fn7fO1J(@A5^Q*;&!`r37i;bfp}@k%%u9t>2}a zlYM}^q?b{im}AZ1Z-_$*x#LR*m6(U$n1%%LlE@MOg9q2p>IvW1-9hM{CX%GlCPW`K zS`QtIxLYfk6Zje)@X>;`y;WA2A_TCnNeTHQ?Phl~iS`y`2`Z1i(m=)D!qm#75SQGS z2|!#&buEp2jlz&1k8@Y}^!u%wmIK~5Z+fs-7P@@Sg}Ljd$9FatCDh&1)hK>Mv5f%b zw4b+R{<&gqggw%0dClk4pV86TnwBJPWlDr%cPW;g-oE;GI0~f4IpjSktm_*`y^Q@( zqQkI=$e{G%vPHyK?iZm#iK>M6v4<;tAs2a; zu`IRof%(^d4M&#_Iuk!cHoSxtsza}h{kV+vz*#ZaKAOAamwtc`z_izKBvYdK^H@2- z16!lF{_xC8+RIHysn9i32aF-hAL`J}B{os=N_33qW5tTMh!e0|<%%yKiTv-5Pf7G( z39n}H<}D@~Vmyu+M4YrIyer-SA?SLWd9x-l0H;{1cU5Xl1+lvIFr^xo?*eO~kOIUT zECALP5PK5tC(o+cN$4Yv6wl_6rE+wl|GBf}=JMJE`+w1ObjWCzfhBML>qh){Ekp*^ z!XQM&17!5O5R&_j{Fd_((aS-hOodM3IXZfJ8_48+g9$Bi%?$cbYD( z5a0o5ybp%)($}RH(JZf*swR%{%N4@4#92RA#ZUQ&G=6iv;&=~b@gDuh5lnJTSn33q zo*%b|F>LV2jVCF;z`P*u3fPYUioFQNV-9hmdYUhj$i6MXecQ`zrJU6ZfuH6GKWY5Rtde zKzN8;c|88TW-at2*7$B=<^40yYjR3n6^}I4>VuyWNOJ{DD&Ypm^Z@+^%jQoeUgKb^ z^=)6Jt+4-G%G!erqxV~EpCf*eTHGBaUu~bcR%oR-rj!1AMu{1h=;xXV!+C~@yhT=z zn>C81go9~>F5?$*b(`q0id4_okPO}C*`u!;kwAx!9*%s~-Hm}C^J2tDc`;@D@wG)Q z>qy17*fNQ=xrnN5%tYlI#GArV!Sf9m#9XL=)?r9;Qz!J(U2V|j6=7cGnnyPeUcEM9 z_P0S99{0VST`$Bl1;8o|-ZdY{$aO$KIj7rfQ%vuRPW0T9x1DR)nfTBGDxQh9+5Z_S zOCaWJ(~v}_Bk|-8Sj&8P-O{rH2ayFi3V@i8DT^lgYe^mwJMkQAk{fs2EB(L#zdBh4 z{YpXdYUwSNgVW+Q9@N|c+Nt#9OSL52jek}t&_ zv7B*!dV6upd4)%(U~xyH_MAu(5@(!0uI0}X0VQKT9)7iF%u#@ACFU{Q!5SX{x;#b% z36S%&gf=RbkLSB-U9nO79+rMsk#$U-pk?=`SIWLdA-bGX8mfbhjp6we7Y`x_j92*7xY+?Wy<>{KHug=4Q(a~J*dj-)ZE=8)E=?zez<&^rF`$xI z4sZ_;lD+Q&R|igHd18fycSd7Cbw&kLX$^|QjuqtbtU5|J=F(PV^%(Ge z$)kZx(4CL~pxQngWKIfe$<_TWYYBBp!}`oSzR0DnxKHErstWLHe-@xkaR#flj|tu9 z1~@ELgKk!9aQoXL9@O%SWLD(zb4>26CdqtH@{)dcH5u=&zp|RSTbO^&;rKRaP%8HK zjVAZPB!YV!)VL!h-y(<y+8^24&;w!Q&*&YDqZ49OqkrxN_?%>Q z3@oYsr%FOMzIwGzwwPx!x2LD@n~5CyoxRMj{bG-(%ITWk&I(oM@!ePdF|MHD!?`BT z8mt-26aPY6$ z^QIYW&~A>g3LUz?T(j{F6<;NH$XXH6r&zV25xwPUvE^fd!`_*47n?6hGVdfE#Ob($ zqKq7D9YkIa2>I;_qP|S1Rq*F@7D<6o^pk#c7g^BZOSJ)~*xDbI{pySqyeriH@^EJR zG?|Vc8LytsdC>H+>BzYiB3Hi#6i9!bPjX*YOJA2Vcd+C{p^}QJT1YKs4;$y@wgmt+ z@2;p`L3E zL&SW><7O$sh3QW$OdqDYPt>+Um(osnDcP(Bbu+rtnPr6;#H-6?PvS+y-pxAsP(}X^ z7Z9xjDZf22(TN9S5d{cF#gUsbYD5Dq-D`{5$HLY^eyL>{=AEY9ZS74=x@QiEm@KiPBm>jrGI15} z;U`Ro&&4iFeWIT76iCHxamI_(-gSzV0An*#B|kr2d#csyui?NAqBM8%AcF+NH~(EJ zCmp@t!_QWGY@IwmO-T;>U{6~tS(XKb(owXpII?E$jLLWOhLy3=fDbc5EWgYBE0+Z9 zi@vTUmds})xv31p_|op7EG<9@GlzSri=cW%PY0(nEvFfnu9hw^FvyDPW)dcAe5s43 zwbd=@gzYd{3?8g_>wc}6Xwl7sFDrr6qTe%7e*@L0TE(5EJga3}Sn$8sZt?`$D4AC) zf3y3o>h@;>b{x=vok-&k7Nn(8Js!?5oHAyO!BG{B2$}AGu7gvI%+&FfpMIu*-ltn5 z*J!P6bR@?rRa^WJP7Dl~x*i(D9{E89r-d|Zl3*IWk)XRGdzl~KxBnTprhz)S8i(3v zj$6bua^W)%kN3vhszoMCRZenfHJ?Qlr%gAXBwQK?|YyUG_236BCRaV}h3y+dDrO6opD<&pBJ3T*vk4hoC! zzI!KjC<3{THsiNSs0~hJ{tAFBRfwz$0SM+-`NYE$jc!vJ8LWDuCeGCI2e!Pg%>FR@ zFV3&M2Y@1Ot@(XynK~4UV=WuFV%+3d@Qs%M@2Dxh;>#1{>RqmjAe#puh;_c{l0Iz+ zdVGF$8y=atKB{h>M4`K& zx%`%p8l5>l^8rmR(A0KU

    ;HRDcUCjW{z?{7NmvqEx_b{ktH>BIXwG;hGbk#xOPB zw2A1YtQ{0rJyDqCL2fe9Y2yBGcx9Oq>yCAd-y)*=|3cSe!N@lq4KD{FajGOo z&n3PYy*f0ZlLDlT)f|W2S?&QLr%syqmGRyL=6cZB$EeGou?}g@@w`1-^E0FR%@5^_ zxk2jbH3$59N!*Zi0;M}Ku>u@wUU5_Q1kOpD1VU3BdTE-e`ms(K9E*<1zD5vRW51v~ z&>dxTq&1W6Y!{5_+0NoR;*e`no&NPX_+B(i)>Ecl6W;Pmz@|w{ki}a_c9aDtDCZ2F z#20QB_i|>P1BgZ?fBCl)x0hI-^73c!_!d=cuj(8w6s^%EZ3)~{+(2Gd%wj2phr4z^4 zT2o$Z#mtfPqYUPDLV<=5s$LhHRriPO&vL33MwqOrn34v@eAG+?@YIKWyJj?4DGDHo zQmM2%{K95>_LXRbOg|fAu?MfnFsG95@%D;}737ACeAt@nUA=suOi&eQZDI&topAA7Z`F6*r;WA3JlRyQ|y z^WJU$ev?M{L7%i+2=8rU1^o=iVZ^o{%?f|*rnKWBzt4Co_II+a4<;G!_TRNxwT=e0 zR=*!CVkh{}32R6Yjty`=QCs5|H$3C8sGlZ0=Ez^;HGC^O5q!FlkP&2S`cv{Rly=wZ z%8kWZeKW#hAc(*3Aaikd_9HMrP7}r?F6@-}Ra}j^f#EK$CRA zDAU^Fy7gyOoLr^rJ$R~^vQdV@}xv`EYR>Z z7MsJJz9rkl*E|p46UQvmoB}!A2Rd~2iULx7Z0o%IE}C73q_HNj|>z=Ji#b^JK#tVr^8L z?qVn4D6N)L@rjV!k~?zj)h>t}j5AI~ds$H(CUVZnC=jKXyC zb4>8Lbwe}MEwRHljFd3b@n6X`s@iZ|sMWCHJ{;=%?*{_a0>@45x1pJ->L#u86^0}$ zq7$dhd(V>T(vLn8-dt`h{B7%%eV;MRsH>uu7Z+0oo};&^tbXAi1>o9e32D;nfjR+? zDL_$1EDt~xK692+ELUl8-sl-N{x0Y>CYUzgZv)iHQ{3zxb@ofXU6cs!YaWG(F+e+< zO}lTOZ?sB$LAlkB+3Bi{wgpfhC11K7UL~2N)RjbIPnf2&*G{F>BP!n+-^a}#H(k=C zZyeH^f4SZDo+}DfTm3I85))4R61#`y&3{JdKZyJ8AEEh|27;wI4DMc#iRDR^{ONeC zf&k6Z9-zfIj-x)u(k!f2Lr>GF0hmKz)lTyyDGTO`$3@Iwe(_L|EoPP$O^rJ6! zRo=&y>=cFXtxuP>Z1W;t!3(wy9)ge%ZTG9WrC3J+=_=rOBl5*N7~pW#AT=u>mhM2! zc|{-z+ruda^OXU$$$Us3f>{V`(YM|lgVOFzNV3sWsk z*em*qdj+=}{c*g@z6RWf_1OX}UUYCwW3+-w0&BGG9XM&~UyvEA6)G!f3gUy`P(9wl zEA(13fTmQrE&CH=ADadM7FFIhaDY>pXiutbe@8}fO0RE_4j_n(-84MKGwI<5nvn|@ zw^Jj+x!>&cI36ksa$VM3x9x(c8`$$@(vqjJ#(}`L9ukmYPf^nT{*1p*&1f@~&TJ){ zmRCnbRE{yjQX%_05JyTznp0{4P?Dqd@bxg@=1tMJ+GO`SJj{5L_f)oo9!WwLzYpk3 zJZVnm<~4fq$>j^ziQYZ7)5Z5*d8YCKiY6*xnE3&nhKa2b(nBMEJ;j(3NQMOBDH~9T z1_Vd@q`i=3>#IyT?i!IJCf~V0mW=T)|1@v48#J-hFRmX8ZDL*DG%W+>0pQd{$E1=A zX4_TmerCMxwDJ!dIo(_gSKrL~j)UDfr%fNlbeFXIhNK_y;JAlWZ zK?UG&MaD-^r$(wgBSfm2obnkTQV_E;>+1%;yO8!QH48ORt316vPqHF}Q*DVjTw#BO z8qTyvvGzUTO)=ZM7SLGYtDf~U;BVOHhZ>%Bn7m$AWF$D0$*YP51_bJIc+FT*P=h~E zF6&z4Q;B%er}xf&3e%;wixI4~R_^gYhSQTrl90|B7Omo8c{q2Be(U4ollmY9`4sc) z072$=$J@dJv5-&zKBoB7SCYpcG$(t_0gHV=2s*i=GP?jR)P2jM(^JwIF_c=%+q zpL?n3N*^r{p}4>Z`_wTiE6_(So#)9c_(oAM^5GRX`OL158)!ncXg`{Ex#GTRAuy<8 z9QUirLUw{uOr)l!x|BPd1m=$@8;p~W*|)83aKq$EWFx)%_F&WTnb!Pl-USG8_Jsh% z?)7L%Muw92<<-Jud4sozNHK0H^L4MvbT>7X}+*S#u(4VmVK*phg;M}0p zUVgC<+sV%6NH?9MrcF2N;nDc%s&iOB!2{QgyB?QC(Y79t`$>jCchkFDIeLoH7O$|E zeo&$2A*lV{RokNV&5+tvlVa(JQz&@aHOvZP|58{8Psm=ZM2LkyV_Th{aZWYNi`T78 zzD?o@X%kEfhOhxzH6Q`S?>B5d_Sb&YjKZNcGofLs@?j>ZOAhzp!(VjuSY*GO#{0Hf8XOaiNb_1_u!_exjh9&#ouhwztq_{T9A z+PmFw$K)3n=Gtf&Dic5z2G}}aQ+f>tBZLR=Zv%LMC;LzM7ZfWNevcNdaQxe$sOK}M z1HR6?HwOMZJ_sc3+_%1U%HEhtZmCs_^Js14HhOuA4nSa|M$I+BXe#kj!lkYzMf}qC z#apRZ+xaVEX!nZ8pqhQ0sU{B+Z4~wW&S!O1F`9yWbYiedd+Xb6<=M^87+W3DR%#Sg zO7yLGLA!x>@Hn6gw5{(nD`{z>5<`2>ZhqMkjG4km7Jn60Ht2%&-fkvHt9QU*UWpX>ra8G?cT^LPXEOAGGMuCN5HLD4oh7-n!*1-5ajX2Q>}%~il%Ei(X0Gme~?8&*&CjSfn??@R)SImXp2p-q3? zjoDk~W#u)7;ZDZPIyouaM%$9xk|kvc1$=BZHaFHqmGlBuv~}n{#HI*oL#{JR=6NUw zY3IEP{N6$+&`ib$Jy0TRihedd{{H*ReKU)Q0}4`_L!+9w?LW}C2QO=S3_G54|SQh08*cfdY!*L)5JFyHWm|NhV? z3cTe;TjCnjfW+nXa4SuIgb$cGw6A;E?kIR_tbb?&6*9jUcUnX)lkuh8wus{$3zv4> znt(8gzk;fgdcHEz`}|>hLAe7QVM**~3hW=4&u%51#MDamsO5I>D;L@?t;hju4nNxr z#UnL96PB_We}g3$X(k(2{OM@%Y+r5kPc?(#Lo5Yd0iP^ z3uinuTW~zfK4++r>Zj0Z&y!ctRr!=s6qW&>?!9|Hq)WCj4M%(-7A>Y$Xt_JTQwLl< z=J4C5?>M@$={sxH=gkGjhco{jgZn`+UU{7VZ=S>ynkN~{H26`qnHOmi32L`4*CE0) zg!l9m_Y9B-6yb~}VG=f?93TJt;iGo*z>Wg!IsT4AOgmUM$N`Ii6Tz4gZLDj!Kxwdw z7*dVwm0zIyDMkE>+eQHjB+Di@xysEHGn2Wy3Z2$v4sM-6BM*|RyYh-77id%?Vhn&I zx+x@QXKb>A?NIadJq&eJGx;g@61o9);Q^&}=cn{<(X|NM1-NK;YC3Y6gnVKDP*>hj z0e-s53+%Mq$Y6XnXC~L`j%3?+N%34o!ob0U;PN$C8&25VmvjT6jwNEi2?0M0^XeDe z`S$+t1bG4i=C}QR!vmc7y&wHvSLK;J+uCQjOk5!Lsy_cB0W)S$M<{!Mm{OoVi7mr3 zbiZt1#4!nEqBD;j^^z#8U&eV`>0`9}r4Go-s`8!$DM5IPQ>?(_H8{{cGxy2q7a1`L zYIs%!B_ggAA>hQXIBm?T0RIfcj>V50&-M;2cq@hEEbw*ho9#il&(Su>HB;MtgtttA z1pmi@9emBuyva%iCMBAu_}J`JrKOd1%xVZN-oBg?X z=i2fFSQo2S)Fq^#&*IkW+LpqIZh(^pa#I3WfOm~>p7aycBm#lDd2#3Rt=%G8h2SP` zCJB?kxonbN47LqxL788)Z5xYX0Y}9kG2qrC!em_9@r&sB0Sod0Z_C-_w+=r5Na~c+ zBf371yhNs8YWJMdgX+G(v;x@WTua;Jb8mdt&0==Gtv5CAN*-sVL+~1l+r)4Cch&$q zkxg_WaHTiO>>9YMQ~|W?Y=!+Z|A01g+kpeUo%Dy_W}mYmI^&z4ceJ*A_fCT-ffLqf zw{-pbQcne&waZQ+@eQoVnGe|(JJHBm+@EjbRc-bXr+1`y=OwN~a~K41i^vr*sAeE43qDdxqtU7(6TR#YprW3Vfs03LdWBdi`KzJ-*#gOIg5KjnfkO zTp*pIol>n=>Q63s#UBU|h%^kgfKDRI+Mz3>MQ7l3Zp(hV(Y0iqE7HYi=Bnp!1 zd7x%Bm@g~fj}fKxUvKM&ZfU4}D+cusN+CvQ*6`PHp5KfZ=93eYA9u3P`pDe6e>Oqn z`W$4X%!Q70#++R?0=Ba=xw4jaWk>1Sfp#?>w)hnl;Ml^^pX#pZ(mjI~&^Km4AdWf} zfITPo9BWCzslyEg=NBHB6WGQXSHpW9cq{}1GULMUNwkswH=U>hKL+z-de!Jve~s2p3X)uHb`FG*1-rJ@oXF_=zWqn7XTTI#TW=LSlKJVq`3< z$bgBdRVRj<%{8COSRqjtf4HK+>X<2Z2ZN|C)i_>$#~X`VJ5vdfbqs2!Sp{lpR)#qVX?3ezE;9*^LC525zCCMu`mf{tgX^$@}*V^6i@Ip6q@)73|+ zKs)ngpeivpxb?&9jj3&7o9;s3qmBz5iYSvr9_uJBCvVf&>XyIu;mwtM4gUm=H7?l- z--cST>4s@&x{?DKkK`$~gM4l_WYkEmOazmqU7%0-{SO(GN~R@wCpX4M*CB3SudsF( z#TX4RS@vIK{O11;8DA#nYR_>Hd6~ku=Xd@#C2h81kXN>{-yq=*+)^X6V*oZnj)nL} zdTaP4<+=_kcRf)g4GNm{$t~Ff`UB@NkOb_PU zZb`0J>X-Lh#X`MPWOffA63S)r`3{tMo5QXx{C=!@;;n@k>FJLy(lC+VvmgGXtsfEs zIl;N4z07aJyD44PC|mp~3v3%|f7Ig1$M3~I*qg2e^m2OnXbc)_?tXezFu+~-GAI8$ zX5Uo(JCE*;#;ppk0UwUH=4}mNtXdA`1V61#4mkG=*kQ#&i2o zN}a4V>@?CG23h;w{S3|h!J1b`bT%UK4%@7TiiReGSw+jn{Xid%l+9@nTSDh zoETT~prT=ETno6eDMdkDND%;aUaALjQhkH2(uevXS+C1T{Sf-dPmXmPCXSq+AKx}N z1l@4ne&~GY|IzOZm>ZQBO8@p9kQL#&3M^x2OXoRPE8cC#Aj~CMSj@SRHiNUi-f0Mq z@q|J%^FSIi?iJlK&neOKZAKi@(<$01pdI%*8mR}HyK>;3^axJW;4R z(_tUss__PT6W$80JY6lAfkw7*L8R{pM}$ijg#Yf84?x3!*UpyoBkB}w&+fR=rEbJB z{(m+{b3VANTSJAJfzAK3P?8~Us?U^)ZO3`W zO4bBhlt2}A#0!93d{NQQEovCpZpas3h%O{W0a(UXsi9+2bo~{)oQBmLZxN!_4QJWK z8udU))C?|ACXv=Rz2{}XMuHl@!iLJejUv;9L`LXpBr1uu%1zl&=^Tqc|G-0JhE-Nl zb~4yr16UX{-o!dGb$ygHr_uy`6sL^TEI^VZK^pPz!K9quoW?2Kq7dRZhwTZfi|7s6 zPf%&OPzftm`lVH4q$Usa-3{&AM0_k#5PdkuC(qXP0|t8I2a0Kt%GD=OszgDJ>#Mnj z2$v3y2)ZrFTx}xzsWjSDPej_iX$5i<`GKb;M-il~TucLz<|gRiwDbu#h@XN5*lQln zmnI(l`c-?`Mu zx$=NI5v%unb+CNgO?m9gCpOV!D;FG={|hI>7)Dy-QV#@KJ^NfO!!O5XH;1dp4~t zJ=N-nP=|8nPpKS&1%pLA= zGt6!CcyoEio6B@(8ldV@@F>F= zuU}*R`xaz#nER%jVf08k@h1;yZJX9DeIEC&ZrFX-!(;Nf28m9J%^0F*3Pp~3y0Y#v ze8WM!XlIt?bNJ=ziG#jQYqL_G5~SKQQ;nRp2u>AnNtV_YDHt4%Yw<@`Qk%T^dGesk zP9EI0M7dmf`?G$nuuIKWs{wzD(%Q z&;yKny@5nt?JO^y&KS$48t{kl`Dq*a{z zGdlLkU5y<`Q6Bc96E7}B8ljm_UgNxd=t^(o5*}mlK z=we}t=MRukif3v_?Bm{7XBKgoIkgg9DJQt~1|=JF?fO0F^XziIfUAgH%y5Q53*%=_Q6KT(i|`;)1bE1Eie5}8P?ILS z$d7a^>2xuAehe7!NZ~>1l4hx>cVbd!s7cE)A(t^n##HuxrWmL?RIuYXH=nrN%nDA< zV{`T6y{OIr`3L5V#tvDzdVJ6lNnI7UBf(A>R13RfMUawFR7W&6B}9nW-_`D3Cnh1E z!`;J|7k2Ul7R-_e0iZTpH%NMrQ};_h1WoB%m;IQL=l=!U>t>TKB>{#=C!$d-qf$Ht zZE8(pq$^|`0kHTzQEwidEy?hN#imig>5MD}sBK4ooUKUM1A@*8XS^Jot5SpEl7m_= z)C;BI*(r%@2`-U8q@qHByy(<;x0cTfV|6LS@$!4#P8r}v3_!py-I;k4!AiBWl&_f* zt_|q@PZt11Zze(`j=w4~|6cu`DXdN6zHM39U_+q7E>UqxN>t1rN#ObTJjuP)CZ6&f zl=`uW%`i6sEC#UxvXJI^t+XjZTILb>m-0#NBUWX5IbE<#<~c`)QY5hI)5Y;`r^sY% zbk$U(kZO)7{IH!X8`;xp=~d*pZ6t4dj(8|vg$F{vH|%6v(xRh3HPu6tdh*RSsZ`HU zOE>hZ0DpOuZ`a8MSh;!?#@-Bxgm=IqPZ~_V!}6jDP|?S_1P%E|Hv)7e@)=8sbjsur zJ_c`=Xyjn=oI7q*sINkmF(Ea8(fh_mzJ^o!R!G$`NI8ur%@Ol)7B_i6F4Hu5pObWv zd@|>gRZI!yy3NZ>7(`?+0ewhT1rFne_P_yDW8bOB35nL00c2V`V0|$_4Ew~upHzbw zmZ97nh-Hsgy*XI*7XQ*AV*K7Vl?sUbptMmr+5m;QXSI91d*lETwZef;J77SyB-6Ad zAPmeSl4yV+Z4tguEIx!MeNuGfAsD58{|WLv+6UY1$+F-BzK{e`&DnNH8^x&p?6Tq$ z`erpca$quPr`Av}@JwmP&5v8-r+d4zqrb$ILiEGIM-~g$vMX;A47 z{oi2riYEt>*rfud>rkt0r2y76lxI2!0kexW{-K(R?qKw* z!3E`;jphajyH)h*V3Q|i^b*@oIj)-fbk1U~y&&b}9$Ul|e=a}6pOk8Rzl!(k-uc@E^TH=KA8fy2}Tu* zhpxN(+ib-C9f1DoU`}-5DO=}u;;a0D3@tJf zt<~@djuqSZ9pBFe)v`>%ny6RtqC7S+RwicZLSbX2{E7t4*T884^V@a}v$B&b%FsIz zEvIjJah8uS7?Db|wpx@HJQMz%XwZ=thb3}Am`r1w`1(JC?(Za|pdPb?0&mX3X_{~2 ztFMPb9OE`KG#aN!9{`3pl*1`64WFG(<6(@0C*L!;Vw$wyjM*8U+d1|$E-(EaZ=At6 zpdtRC0gAkfs5FtRwPu&9@?OqO=Wbzo^xI$xL?7aY2-Q%?j(l@G-RLOy+!`|>A?Vd1 z$m+-qML_LEKvYp;$jC1|kua1KdcU+C0Vma;nCulEyUmn}2E2$&?|25*;$6GWSWFwwW&2qNA*!R+N_T7)s@B%8%$U@h~ok^<8prBjFb4*H%)XWF~M3z+t z4DP{qG%CWLiCuzw&sm!pQe=DFBwWu7loXdIoZQx?Z&ZHRgh%S~l8f9izaGsYBpD!J zu)?!p*1>$=`~D3jCqia_V%ekdZMJ(N9eoo<0yd8#ZW+)d-4c~R1XVxwc5aAZ(cu># z%rOLTx6zef@h5C4L7PP)totdru8Ra7st�= zh4X(Q*RJ)7hA(U27O{e3l5I{U9-P53fp4pg!V>?|?J_ z^fgBSHd5H{@Gtve;EKzAI2bb81tqB*BWPe~zTafD(UVoMO zD?-cbs^d%-noRfw5^vPhDToX5Px*wYz(Kj6Am=D;G*}?QqVT4;_Yi1keB1I_LI1+$ zyY6Rqbsv{@S|l$NMEK0ckT}XE#OTt;H?h^l=$4;yd(i%Z?>|r#8sJJn+TZ)eUiPGA zpYifL9|`Sjh+y0C=(UzbiGVJFc7K3&o7EI!E<&`D3R zmpgqdvq|ZwpGO>|1%0pwBa8_GYpA6W?NKEp{O{@)@ptvZ2F3gQuhkF8ZieRROMB8V6(b!)Tj;oEa-oz8gJr1} zB2b0O(veZs5tnMdm37P_FBZ08jW@*DdvOwg4Hxt3I@ld8NtCAQGC9af$gAE=s+6!9 zeY4c?1IsM-82k`IX{7E?(6^&Wd9&rc6N< zLj19Jsk>c#nLib{r7jgAcV&_X&M|@|(=OcARD3{KGlBdFMU*60Q$_9(EbsN{Wojz@ ze?}L&bo`)Gv;Sv(JCbO}7`dI053uxJG+fT6Lza1LL z5GC7_0{4z)4VcN@(sKbb>%)Hb^3@p0uQyMKy&*1^X~ugwSM@KY%Fuvk{2+gLW?0IX z;Jx+jI>B%drdR)A^VR%kRDKb@`sQqgdQ)0Gd6Fb;A=VREBaE##eDKLUo3)B1N79#) zlW(ftL0LuX`TPf^%$`-bqgM6ndW4*&c0`+$Po*-JQWvdkf8G|3W_uY(tMtC5lvqBIDXTR>lAe>*TU8^sM^@J*O4i$e{i%$H9oun}--d7CT|@ z)){FiHda&jKkkiOD-`I(dS~#K)a?s>ariq^0Gz6G6Gkk%_N@lM&9t4TZRg`Evdd>Y zn4HV`8;io=m}CI0V94V|TGPyG>iAgtDP_CQ@GYZ3q&_ZE&EjVc8gB43senEjuMZ46 z;;-JAJ2b=uux`vjLVc1N=6k--c>n>6c>)f~G6ufUW_f?1mp)D6?p%`IpD<{wCT&*t zqsvW!fOdAHPX0T0C8FRqF$BlXU1zqz z^vYJQ2WXw1jP}9IyLzT#R-J8^L_Krbz-!pPj?M!z*HE|#1dwK)6u(@&&d$jbuMt6i z@7pU3tZ|IMEjaGK8SYzPYx_$p#?SXwLgGWg`&=v$gZ-SW7Gw1a(;00ll>dyiq9Km? zR6^V1qnDpf`D!t8l*wNORyo#8anrNR626uII6@$U{TqvV*%4)0q(l_7fG$@ICjEij{1EfLY-oO zF$(D{;gqoV?Q%ODZ!SrgJ`E|5RR-yN47Al=cdWNX4;v)c7AJqHEcl9sqg#Zbt)O38 zT)noklxW9OB*i*i*057r8^RE)#u&iC=#VkJMEiTI%;A&tikP(OdjtS&1~H)Tpg>z< zq~ri@kF|^1l=v$LxKm2%Jc_s1^9Oh8GxZL|yW_K#@Q3qw+LSL|7xxk;lAD-R0F7`$ z0k#$gXOm`6;*(TY$$ZRBK^@zKMM`=kJ43F1^m*z-=SEc&ntKmi)Jd&S=SkzT^3TOA z0D8UIMKPEoVZK+_*|~qzy7folWlNX;s${=@9k=~M-Zv&qm~|ZtsIdh9?rdG62JTCI)psgj4t@*q0wQUWZSvC~o}?Fg zkNayt>ZwFq?Z-sKd=}r#86`%0a+|%6t_$r%s z4X@6_1EQ{@=9g7eCpea!OI;bT<)KF#MC#}lTctO+(7#Z;j3%+;JRYB^j~U?^yZSs($#KHxz_yQ`3j@DGC% zw^XkmaNLAQJzuzy8J8F#$?2t}NF%BFg#CIvW|mG&>pQ7w6{O+=eZiV6CH(fR+~$v> zagRrfNCmv8+)_XcPUJnIz{?d!ATGs?74eWcj-06w(`cY#m6M=?XZyK>e!p#0TK{PX z1&VCl6-pH73!JJ|Lyd7d`9)P&tXqhZT{y+XvznpmGeLr2A5#=eCvn1AXZ$#P32g{8So*~TVgUV^uD zY9=#KjFxKOXS*Pz^pp<<7cZj*Zy^U0_vP9RT~S_5K{Ne7Q!`)S$>!W8w8JjBMe0Yd z$&oasp;su2nw9Sg*O+DB$}mTvH37UQ%YyI{YPQ$t$Qj7}Xq-P)E74@jtOT&Qu6R zwHSFNn3Aj`Tv;`EqTNoNJZN%rX4hU}M%ZPY#j1;8{8cDgJw+tcAKw5kL94_a(3kwP z1Oj0AFVy6ofS@WetHqUml|^KLg|0wGGgW!`?KhB;h*~g2eyF@y5^|xkm>I&&Ax9mr z5Cb;Y0#^NmYW3t498fi1W5PCVMT>UXH7su$B_?Hjl&uFYqM={Txh6Hq#-^WcWNN~> z<{hAytfwD^W`XwVOH|h8hc=H&fAhksmA{G*PW46&I38PZ;7dU(=9;C)S*7lhU^kwx z>fs!37t-{=)uhMr$GlATjR_($z1LtO<#I*~Z9a~b8Ct+npl|jhxRi5|^(QV8x#DGb z8nk~(&CU>ha)uoAJ*V*Uyo)*t`Ec_&J1ehK@14EDP)X93lV96Ya-R<7&h2%x^=bK) z6OS?Ufr=uWpD&_M-$;!+f)wF70>Iq#bQ`>uxHKjNIAkiiG-}#92O@{^$~7)_P#IvIOGvC2IyR`f3Jc4GV$X{0zKTLzk>us+468dRIZ+n;(J)-tC@1NIQDnzcaV<|FX^Y^&1zc*QGijQK%$#X`J95ifaFj zqh!+w-ZRM5PewaYr8TGuYC|dB=E!B|h$k)LZLZX=h3+{)<+-NLn>qfSmh(k16c5e# z@8OGC{Nocl{IyY2dDs<>?AX&P9wC9)V;XheNO#^l6v+^cn{kV)0pe^`+=VK++l*K6 zyt&5$6;FLna_v0>Ht?4|PfW}1fxOpvw+sh`n~mmRyir)5rDpZqt3;ry4qp@o#{CJ+P&TDa7x|}`XON-dr zj=F%TI0aIIdX-rFTTD)Lt=GQT(fOcd^95`D#G?8-DW$hl=5M=#M z;I!>Y6$w5NNPK!w%UxQPdAW+G&9MkfnlPc|;88tm4cS^y7TAx}WTx&jcpttoQ648Q zUQON7%4eU2M+^|`+hL0#GU&~EUW>W}Z;3X%ozD90EIIOJHLtqu&Z(s`sy!Kdon*6p z)xOTFWKGzS-x~pRaiE*3-TDYN!YLS9XlQ!xC&jD~-O4DlRrY>0%@YKTIg<5I&17^& z7q6@M^j_XYK{meDS0}26{B`KX$G|g zGxqVZZlbxmy^?kUtq=Ii6K*O%A5iWo#cSEF1=yjQY;?JE-jA~dv2O7QocG@bD>1XK zI;-qZ07#&v_wL+7qr{}~^UE_SUwBDz`-=twdnJGd)}8JmSfDw3@Ky}h{n^+LDN`K$ zsizaU4L4EGZ_B`)b}+?`7pZo%p3-3LZ#hI;IE{Pc+oYh$MuN<-f0Dz@`KsEQSaB9L zpOXWwg36q3JAcFpEa*MU(wq)k4eY-|yo?X1kiI7(nnp)5Z_9VnTSpVjGJ}8`2S9bR z19z(m<#rKI0@&)Xk^1|(CD>=uW?@(Ft4<}{FvmEWYd9HInm})&nC`8XZ+B8FR!Nms zr+yoWI|MUzX1Zz7Smp)u;#i}ZY}nIAomFO|Ht&3M9c-liCEk^ay8=d=mx*=hXZ@j% z!+YvumwozhuvPjeL45!<{++$4I=#~fU65inNAxq%?RCr!zj>-9^6I@JpV(bDXB9jW zHv(&*qqa>9-^cwoGf?B>zfb-^f#1#&Mtt%3{JCeq{oj*$>=o=n`Jxt#_W=37+vR_G zkOA^&*Kg$|U&JP}+GXTR`MmcADbKDpq_2Y`t5K{GnjY_0gHjcxtBzRx1K6C+Cvk2i zfbwrK(;4F4^`mpKhTAiPV;Vqz-OQk6&{xH7Wip^=hbQkQg<#wYIJYv~F8!(fAEGuubl}ARfSle@Ap}OZe*aoz2)5qlbVX$yPcM&|a zkn<31e~yvrH1uJ+ulP5MgpNxdS1f7@_r8e$>vGpl^R1kTinp@g$uqJ$ z<(FU<&{us8W(iQ5(M+rF3*-geEk5zH(nYk@3^JW4$OKh^??;^OI<+tj1{u=A>83(r zL*Xa4T?TcYY%3#C-~N%yWu$)QkcS?eMKqt+{*e*Bi#eeL?1y6Fc+c*`oV)Xwu2h4+AFf^E(Er&1P~ zALm5ed0|m|*(Y#xpHn*dAI(@@#)N*s$nK8OG8% zzofcFRqBhwJpBD%KAV4l4(PRe0YxSz+96VxQJe7tdhaz@mhBNTVHT!(+b0U6{aMUM zzb89-F*yKv;ur_M&gYdjw#uHf&bpQCH*7xp*RNdJbPeajbiW{4P33=38f?V$ORGbN zMmQ+iLT+}OfSnSyeH%g9Kc83wn)m(?eL)wh6ge9un6w@5c+%4N3>9z+M?ND}Q*IOb zbYC@+@4rgzwN1mXZEF&cb4B)hYs|C8$CT&hKe=(x3hRDAv{x%dAktsbp6@*%Kh$KG48S^UL>GBl ziTXd|mdE*ix&9ctp}O)JsQcjPc3#Rlxu4)}V(jRvFp8C2H+n55y?uG$p#BUuLh ztnp(~LS{|O6FfGh`NripCXfJ)4s zepTJIGZ?tkBiO-scU2)fQ~E*4F4v8^8GWr;(L8+=ix*UQey?8S;o$+WRx>E<*O7p~ zPUXNG5d9DCj$N_6uxE>x;OqyW*WnIz{(HG#Q);k^bn@7ju;W8sb(9r8MW=0;1-3C_ zG=j+szWYt@q6e{GPCMy7G+YG{2$`-LMT3h4J9UHMharccFx1E@@|F(%|h0 z|1>e3VtSbqLV^ul+KdV-%#-M2IRp_PsI3;ZfB5|Rv&Vtfb{j|QJiX__Bywpd4mn6xry(del-)gy-VgSR(c9xWp?+8Lyix~&JSf{n4aJ=u3XjjoqQ1BJ+@fi4=JK(v{Ts3p3|$Uzixnd@9~;gr3PYUPZ<61J>hQW91ePO=DNQ?%~Q^+0#?)$CF8B#&ueFX}edvO@HZUuh)O zv)g8r>+*r@siL6k3?>$uodA@yLi60#21hSF>)w_=n@6@>s@YKzLwm6iPgL)Y zM={1$+1NBJ*1{Vt2xTEO>Oqq!W~+3gLd#rAoVOd4zsvhTUAQuJ^MiDaex3$S@+D_R z1T-E~7WR^Hc4UsC(L!XCs4 zu)cepxJ`a?34)-j#2@PZ_$rSt{7PTDIxXu?@}AH)KFX2=b#FPB*$^hNyofj@*Cfs` zhF#jGwkgIEt2inZbfOTVE3*Pl=J2@4G2VdZN0)K5c2~vu(&9#uRQDZw8}S(KWRhxb zlM))buP06PuhUWsz5WDaaxG?XG`f~B^NZ9h$g@ubIm*twj@urgKjbEGJ<=7gLzoVf z*MWAa{T)<`0yITJRI|bi!3`)yyS(ydPLp`S)R_Z~6?vma?K&wmOK zJ7eNbL0vxOPVgYeY#6ks?ws2c0g2bx{VFlMQoNZ#*8`XcZ$+-S>>h!JQ#eH0)aJ=vY$#OGr?1k#Av8R;bhg8P0H zkd(B&r$HetgM0uHZcR$zjen%BA~Aa) zJiO0!mpYw&TZ3rMO$$DqilsZ_;G{5lm70Pi%Q?cO+)=H(Y7lbvfdGJ6Oi2LSLM@Yt z%JCZS=Q7X)X*~@N6Fc1q-z?Exzd{NOP?*a4c8fu3aqTzQ-VP8tAcm_;B~ij^Pa$fu8@8pQLy1OJ<_{R zqL1+W$VhvM%0Fy!hqoM7KfJfY@Q!=EBTnd?)bJaRRM*Xf3sOyZ;DTm0GYFk+4!@$lwZV03dXWYeW$iTy|zI=Zl=gXDn+CIENuy0>i zzxb64U93hWcCuCC|91V@x zV(pdby3Hv+^q0V1d{Mh?uDM`g3;Ol}QowsvX_g9M0^-DQ=>$O1%||?|A9y?=Sak9;p@>vg}K-TIx&vG|y^CCdP8~MBb0ID8xfTW8UCk#ia za0h799(A+B*U8DnFzifKgcpF&Hs{sVv3xDKKiqhjlHb zbT#U~G!^Z8pp^~KK^dtPqY+WasB8dLe+#5dHz!rZlDM{yKWJ zmuu3MeC$J#Dx)wC*!VFK*DtW0)S8A;cx6)VNg3nv_7*or0LY(@2vM~SGOTW(!qpqX zKfKO_Pj(vDK}&O*JHIZlF#aG^lkCB(2GqnW4gQ?eD|QJuiJVac42Vy-9ppO{7Erus z@`G^qoLhAMg8P7O9)B?IsHyk8sT@&Cn7`$_K)(ho5_xjW1F!Ggpi8?UvT6!uv|q3C zKgfT>b2(x$4om=KOu%kT$2tsaF7b~9>9#=5Loe|X#W4AE+%xrUX{;CjtrqA`@oxAx zDq+r<@-LBeApW^FL~W_3q*cNE}ot$*eB~+mx+9t;QK#5+vL)=GLl?h@2_mW#?$G z2DGA2T=7sHSQMkhW@OZsw^X`rJ^#DqhQmMu|9qP=+nEpla`*p`SOqWUZ!<-30kqpt z9$Fp1VUbu=N9>@_pf%iEW#p~N4LCiV26?RcK}V-kJ;06IXHWnX#y6L|25~&Hd({{} zG+LzkmF?lIKQYLQC!2A@N>#M$B(u8Kk@W|Rm8Z)mfgr~ia0*;FVD+Goo6I_bFpsP$ z3(N_vcLP+_r-hxkgv#O+cD{SB`dobv{h=D}qK6Zyb>~7$eUf+T) zNRt>Jje#d`+j4l*B^>u!$R2O#Hl}u$CxVwLQd{6 zVU+qz{BNWd6cz;_qp81Ninb*Xg_0olF-!4Q2dd+DJKK1ItECTE$=+VKl90iizck_K)p*r43bY#2VI zk&xY+#?Mbl$L1C_pS%mbCFXd9@YS2PJ-!zcWag(;GHzP0Zu6aqdGV0hQA_~96h#}4 zJq;TNdb?P9aodR{x8-!f$7;e^BYfN+Zz=QWit=Q*W(UB3)MPPKp$VDA`> zqFI(jGFeC~x=|fF<5v*$xV)*eaA*eWyMgWHcnBNs&`<|jGm8rf9kaz8? z$5Oo2eXoGiSo9`nmp3f9<#@K0y?B&&((<7zfIn&!!ND*aKv>DP4;P1+u*fpsg9GyU(Sc)b*Ama(6B3`BeAcy5$@(;Yjii_Ew zR^<>ZLMqd*=ZHN9j}hm@cJNL|eD)WBzK_I$@ZO2hv!EzJ_)W4zS&MMH8%<+GTMd#Z z-oF*!>Z-u7%#S$tpyD(HJ*i;UO+HeqvEPrbKQ`GwhpH78=394F+Ph?0lT zLXNuGKOVZguRGLtdo+oRmwwsDDl<+>zMFmj?yr3aq7>0+reT5fH2=uh|K}%NWP0#B z532v!U`M&pPx@? zt(>#EJqeuH%x2(aaC0YOx=3@n7&o@4aP=Rb-!HF_y_y885Oh*n@BgO+i%=O-hZbz`xKL3HY>u?bnuHH#EajlH| z;Hcw}P=8q-^>MNaN|_jM7=yYUr2=Y#!3QBh^6$#~olG;>37_QMWTqTmV<-{~;Pwg_ zS1rZ$mpt%hH~jiLEPuRFSTTDIwj#V>MYI7$LV3txr6Zs{$bBKX29bM_-CD1TdbskX zNaN5vZ??my%IiTif=-jU*Mm6?L_&{dQx#~qp5mJrKrfQrqfz(#`7LlAV@Mc#+sYax z%z$v+5S>n<&mDxOPoPZQPMeld>nQeMeg9}93;DHwcoY0pty5`%<2RaF&_yJGE zFnGr_NKz_%#d%;JCnZ1RfT*-NI?|uwz*3>t2DA}^!xz~2ZVQY zp`e(N7l&plgZ327m3zF3qS3s6*d7Q;fv+MXEIS1kp6I1Aqt(nP)tnv3JwZS0Y_5uV z4g$QyUgL3S_Kt)&r=1qVeD6<+<$LbAy5L$5or~5;cKAJSXWWG5-_8gYxt~kE@RT;Q ze>bG@a2xo|4?9*HMWQ*SnPJ+wRTZc8yKM6KB#+U;GkMGDEGl+KjiMya%6*B7vcO~sEn`kSmJL)eo zwrbTd2fgqvOHaUlzPD(-T6`P{dKz20#q}z`_B>?thhba>V8jFR*^z?kL{x2jLy8xu zz|T~2bcaC`7Np<@Gpg_Vkckao1+udLNoB%U{b`o?QB>wdAfu1#H(Jmo2lg}M^x?7? zifX;C?e8fBm)#=iG8daY-Q;^cey$8)7jE3R9R;0uidD$slhTK~oZ{%Fd`*Ko{)LnR zXVKNrF(;)&C9%chrR0%HFgoM(3_`pyhWv~cNO(9fC2m^)@`HaD)h`{R^^7i%Vq>l0 z0wKMW1e#!0K>IEZi8SZzfCduiU?!ooJ{4JVqH!R7O0zaB=ftFKJ^0)T@Uawl%}5nj zz^IkHvDXoU9i{Ms9?n~F+{-f_o;OK#!2N>WZG3;-f*bz)ie_r(fY*F!3wkW+3uSm* zH*I-1s;gT~k~f+^50?Aq%WU>_oN9{#5=;Lv3ToJn200^!YAHM+qS*K3*(7w05SA8oMHW;xR?*HcfBWXIlR?N{(Ru&mA=tXOLul1h z(}vg)&9X%xt16Id*6G)PO`dK4m+GeRai$-I|!hc9l< z81<%;?fhKz9NvZYAGd6J+h>(2cxd-~L}UT~!3v(zp(KQnqzj6>Yd!ao$kZI}E*jQd zk_1-ypGrXYchPRy-g?;Zkx9By4X~&g?@WJAMLBT}s=qvo;^5Pd>?NfS1+L!m_viPa zr@si`n}h64J!(Q>PLxlhCEn&Yh-Q`u)~X*DtoXZs#h*e4C1^~Mbcg!yBfUg;{W@up zL7aV!$>g#y37ysL7yTeBBonkgvHAUf2P!Eu1{PQE@c%C`_%Cw1{-VwDlr~cwj^Y_| zlal|{-a3UB0B8I8O{V-WO)6d#0bzB?n}Bx#A}=#t)~GJ4@aWK-qg(*gK6~Q&6-WXO zdo>_J1V7&+x9V^4u2tslqtKO7jE~I|?7MR{h*Mk!^|Qd{?H^y4%%$<1a=QZA>K8yD zj|I8-b&>UX)Po8s^QW?*AM9v+Xq`CDVqpGl*?4*(^6?HAm8JC zCmNto9Xypxfs?%vOpQ;Bd9Cjci0MhL6o~|gQ`$Cr3GJgcckIDsm6SQTi=&BNHj_bD*C3riQu5xLT!K+(^KhIDTU_{_mtEaPU z!ywM9xq(8`EFVyrFPc2SrpQzQIjmjKxYo z;{Y$u7W|&6Fj>Ac^y~(%ER?O)^>_&i>i2{xgv`LP?+Y&Pq*U)K_?Z7^Cbg+>ifU6= z{>f(3sToj#Zo>=A$*vCTmJ9%qp3*fTTmGhi8Snzw_sYZa_If3%(5(d?%Yd$2fVhTS zjX`0Drp9&;utlDY*G5esElI$NnyuCTJ@)MmvT0&HRurDD`_H$3Hm+Q;+ucuQ$bbHN z?Ek@uq;vXP!SR&otRjg)J%%nyI!i=7LY>jD2l;$&1bnH`)6_s}VkxDG76hXL>VS67 z&5`x@Wrp%E3w79gwl(E2pR7wod&`^(`~ZZ5l0we(8>EpJXI!(J<^2byaF4*%*?6^QM^_vp77hIhEFgg9a) zT!$l;!+9u+6H#WKmRHANNq@+-jDnj^g)0y8Q?Fic7E>Z1?|0TcMo>amh3_8w12Hw= zuQehb_yGrvkHO1GF!Zy{XA$|*&nZ&bu2|ERP#?tkC0|5WFrh&E6ETq84=^i#)$@nm zf5FzelXWZC2$Y?*{G3q2=Y<9k*gCXDq_H6q@^L_W(Gw%_@w5Pq;I|j{U8jq> zijR8<9Bp@t8p?CHvrC9^_XMIl5n8$9O?@i6JUBAicvfnr-dgLSn`VK+09;25Dhgst zuP^wY-eq4%Lrh|xwIIBu#y>iQrbK%(QF04AwtL^B0#?Z9?%Q3%*o)D=a}Hx5n@rB3 z=FJ#%%oIDi`i=y^Xe+H6@u2lt>4CKs&b-+PR8il2z!;}N!NiLO#8@<0z{idhtsG}k z8tgTQjxS}S1fosa8fW3C9h8Wb-idp5f&fjlZhy$`Cv#|n?r^{Z5(Ou|_F>Sbu;dg( zs$k>=;$NLc`k(?ayAynCwwgckKge5RY;)&4B_2lg4si+{<%oU#v z1GRqN=V;Ud6QtN*9|?+_asde|={^h@P)^JB`A`b0|`S7QRaUZaN1$w>p$s_HVmSCBjlc*v>E zLRTwrN^#=mlG=I>%XZRYHqh5k-;@Fcz)`0X;ec_8-e2^0Se`_$;h9}Eo%QYa^7fQ? z4QdTtJwNJJZrNq97Ax=!Tjqr9m!mQujG2BlFd7eyg5-*?ie<0Os%YNjW~#u8pE8?_ zZRd?uzL7Ijc!#FC_A`BkasO=3F&@=e%q93X>#8HkXzm)w=(OsL#h;nh!x$;s9CE+J z5CmirTe6^-dhEtO=?VAe1tb|V`lrMdP(Llhh{tZy)x1W#gE`{vh|y-2IhOR;n@&7* zOv@MLaCI#so%>~wnY!JXTu6$`w86CR|1HO_2 zC&2fCQ(+|+Kpi6eW>GnyyjoqOhl_s+8__bGjZr=8q5sZedKzbN%4nFOlBq^E-HW7k z&gH0DzVAlcF#y=F^8%J?w9m(6#EpjTAZR@r~7nt<-=M(C7=WXykO1-S#5=q-sJEdh@m*-$j0Qh`Z93X1cngu0nGt zH^y?jqJNbs9MAnC^?w~YG^_sdW-&DTMon=rqty)yC`Z;&|7`tEdaOyCdI<8_oYY|6f#5EbMzR&4mlIv*# zFiZKqLt5|(@7wM+_bfE4G|0=T%L-yOb4GB0% z5G)K+Z90Q4wJ(=XQrz4^&zn+u-xCK0vq#S?8S*$hABZ_E=`)hcI;U##bq5j7$wF0(PL4QVZn0`yTC_qrve`awx=8_i(!VbK# z$C7*vYIovS1!T;~9O!>HP3eDp|JvN2mDgGO070xFBo&bUOct7ksa_I?Bx(N-{}h5@ zI!b72K+1Ca{|-#S_pnRtTuDJ}1=~G@S04$Q4C}`XOVpE9zG6@4Q=!|g188NJrzxRT zv}7B37okeK!b={i-iWJ$^5T4g{dlrTN{AmS%W~tft>18LrHgroGk4*LP>mUZva+Yd zf6jSClYV~iERUL}YJRRQ-XMjMyP=63AsSW=s_D)n%)(f{D)B-ZfxiERv{`9I)k{KB zg}ZW|2q4rN==1r{tekv}wUpoP>%&r!=_-UoaRK#J+3c&9;nyBvnIM)r965@9+C8;+ z0CoY?12xULxVe#oY(jNj}lxY4}TwWzMUV`v|ff#)Q+`z;zBPdWXiSeUGl%cXi>}L|RPE zQ-?QC*E>bxm$wZsiVxn)srog)V?pgG>RdGPS^u4aCG)299NRWmiWa%Ht|X}bq7V$% z&zVs;Eu0Ze5_nQBnNpz-!3%Y)W|LO4;Jnmd*UhDbs&|@2_lv$511Eh~dF{%Q2=^SB zOgU{Vai$2+Kxjl>8dVTR6B4A+S4~5lU2l28AP8~6l53MXe!&p;z%39@Nj9NBGCH zm(Wv2zr}$dZx%p{=8MS_VAPwH(gT4A$C?)r z6*myyTbf{z_VkkHMy+3YD8|Hst8Eidzb67p;X2$8qa=!IEE91(a#<+Rec3*pbF#+} z$OxFsaJY{W;V#QH=#F(wXMP@un@3&jdb39ihvh-yj}&!}*Y2z|^*&eY2s?^jmXHqg z@yEV+qP)reOEv+z2`_Ab!eRq^uf!G4%ul!**Fl*e=FGeMUlc%EbcNG;sDw^@=?j2O zbBvML{txIpxcv3LTfJQb;1_a`OecvLG$$Sthe;NC%QmPP?PEY%G^@y0(8nE@BKBf4 zV~uP(@>&E7RY$22Z-{U1~?!_o7ucDko z)l-=Wk!uwy?{wg4-<5f&k#IFxn@e!-I@jn8qgTcyhS>>UQIeg8RjHgvY=-Nk-1e&z zsL`ON>rbCuvbL;_<_3l0(>~A5*9Nk0QB0u;8|4xyAITYk4=pmyzcR04x6s^mAUD8U zD26kG%+UKAGPIgpu!nwQ*cb_Jgjl8h`S|_Xs+WslRc|3!P<7H{^-S^r4|FRv@QFYAauZOU$?IewejUt zuUhi>RB)ing=q8lY960hjx>vcj>GPr>Kv_%sY(iL){lPpn+u0@$2)~x?{ zre4l-JaT9(k@xe~I(v1xHWf`06GxEOWwG&-P46(Q6Al9l-P7^OP(|yX!pe=k+@@g5 zNw!>Se;6am$CP}D)UB9riHDQM>4SZd1M&X4`22+vq+A~t(}1I0qmCItdvsBKNuX?) zdL3p(*aOM)cSy`u(*8>(j0}~8M#Ep775{&eJ!(%{y`mp9y91yQC{&|hUt>a#XCcK& zr*=`?YUHxdX~Zf`$ayzCN$9PkFX~K`dJ0J-Q6znK4o56gKqi==jwEK5Vhi*nw)vU& zy66qwbQVk%jQT6xAX&MMwb@kD1IZ}~v#1j!9T~FE+*t*>hoWXfc~LPndIitX zDWx?eIfB;L!yNF_bC;>Bu$@YOhBj@PJ;SMUDKzEYNNnK%hUfXbCxz4jVPLO*Lb5zJ zKvoj@^0H4(k*ewAoaCQfS)|Xot1i}W(x{bu6CTu+Uj4x~py|>d`cu?48={m};VE0i zSvc%a%`crJg)uEQY3eg&+VQGdn9u>z2;G5-34HW##6fjyU3sX{hG}p z6y?ODw0Y^?3tOAd_UdJqwCZng*aq#cYCF5VSr)*sjPEuxSXxIR9P(21t?bK?{ei3C z849R+lyLaM9DWloRTVVbZLqBo^cNcKEG4qH5A*;E)m)U%sbnsi?-ZX5e>+@WR}G|S z;bsljP8NYy^vbez7rcLM2)+lbB7*!6s@$ zm>!LZ(qLlab;<9wn~U1|+DPK@3usPp;C}Yk8b=Y^6K||PH}AAHH6xNUDFPPuGf4;> zn7rTK(R!6chOw8)$6FO7xqE&IEc1Lb0#f`0rwyh79YUQ9lHLQ zaL1_gVVkR47+~5MtHe~9M3EqdLE7@rUg($i}bJhJ%Hl8_jTuZqWuE?2K<(X>QMYg#!b`K2(XVUQN9J0AuxeTysZkb93);Gq zAN7~Z^xu~G#5dN2*kFWzMe2PgSU@sbSB>QZ)0P%WW#O`A<4_yj;^zLn?|})hUwZ{U zU~DcDagCCZ*hV7RQ!I{lP1G5vf<9$Zb;iH+3mNkHL80jq3qAeLg||7 zJ*qT-hR>(>I{ec>vkAQMU@KGrh#?*?f}yY% z)S68l3S@WKf?0@8)AYz;`QDTNH_>|UXXM{YJO=(B>@s=#&S-GxGcTOh?h`zkR~@eA z6Hp|o8rAj9APtZvy<>O2GP3YXr@~iyy1XR1;#;VTKwFBwAgo!+>B<;9FwPun@rjYHoEi%QVP>RM+XC5@4xka3~{*IMOi)#Md_ zL+WoG=^tq2<*3zpS1xf+F(qGgEvDuRp1p6_5mS}oUd!Q}xCUvWO=zWjiF8KJp7-CO zRWOf-2?^9y_y~nWDFTc}r`J>4WEw$lBM;wg`RMl(th>>G|C~{}p3+0SI3WAe&I&rpUTJp0vK@=;j`e5#j2JnCo6xg7}+aUk=11eX>KO=DAYKBq;Xl`hkj zSFKH{;G{$MXm@Pn_WbJ_LXXpV;r9CUa`9*QTVhVaBBQU=PJPy17Q&qARc0!Rfpcv^ zPy3%)Q#%^=oO?RYTA!*3RreOE{bULl9K;~ka%CZQsjF^Zcxty?JJ-8~lsLS_9@_gT z@DNsq6ZXxJ@Mqxxn}b8JoYuu^9?T-FB1ss;)Pr}d_9VuDn0c03&16daB?>9YwC*Y@ zD|w@b!W#%*R0&4v%UJlzRj_P5`FA+hR}cpW`n$|W=)c+Wp_YKYjT3WRB)thSu|c7UARy3e zKMI-6@}m?X{E=5BM0}Ui>3kpWtHHR2w(fJA}rlU-pB zfNv9pnfook$Y&fWVt4PK)wPItSmf*Ec`YwDO!KY-!~?JQhUF%VQAC?1*Ryie75l)wdsvtOr+|ZU-~$}K^k4d% zxz1kE(||+?40H4JX90hj?9-)Gj3=f$&3eKtU%>4n@hSh9%AHyaG*(7_YSfZR0iW&0 zrUfyePUvIj{<0|cI$~0u7H~rX@~sI@nv2Fy|}vzXxwkL7|p@I}^VIYR#xT zc;+AQ`9~-kr(W>LJbH`5vqC@q#Xm<~yQ+{;>Sy@PRUw_q-{=SOYg$wxN_|fHU5TVnPT;%vDlch`Q}#{aB*ipVbz zUt2m;%h8j=&bkT>|SYHv!+of{NlF=-fMzWC{c|@a_j^ zER0A7^~0cHbKnT=i8FjDvWo_%1Vas+D|4A;lYkQt*TN4c)cA@AA_n4BQHvo{vbsU( zK_M8RG925dY@{lv6aR05gcEdK)^qxjYKG7Hg9qmlz}@_CA$E)b1RWs^b8mc0Kesiw zF^6PO09CJEvYwCnLF1*7BA2aj;`9P{-X98I#6%x*aeHiPh4epH6`x3L+f@Kq~kNcJYoV3Um zE-prLGqOo+&L^o`7%Al8EyR|3N|c}#;pC}`s&3s?i}?M9g;e9?2lIPCMM&`u?S>QW z{0DfR3aU=~UIAiZ5+LL{%pZ9ta_N=I{N`Orh*GepJ>HyP=pH~dWsh5GL#vLc$JE4E z$7ce=4K$Xokr+d~U;r~kJ?ko%s+g3;>x(12fO5wM=9LN}30(4&CN%gp*Tf~dLJzan z-UkDXK|=7@2vzclBnRR@pi8e3pJA&-5N23?Bhaw=7%uR`jxeU_Za1eG!^*|{yFmLk zqS3V2R`?#!5?#w!aaqk&%7(MnOWf^N^+CiM+QTMHqP^NLRdWEwig!=b{4=_4h>oWk z>OTNtn&K^XE5%!~!)_rLrD8_N996v-yPol}Q#h52UDfEW`spZU1O%}8` zATEqhh7%{@^Lv?jR57IzlBfe7Hsh|T;<>xutxpzqL!(|+H>B{6*VNBYkcSA-B9pzd zIWGSoRryd~e^b&QVtWl|ipgS4%T*iS7}he4;A?!AO=o(mq`8bWO&{z7qt?IZrA8^H zs{*kFy}1$ByV}tX<#V5=2lM<6+jtEueIOY!t6N&1f~NNJZc`;^X_EnOpmy0j82c!N zJjYe82+0ByY&rLL--E;Zti&uIhN@;4hXbhYTo}?}kMfz&Dq2Ab4HB0co+dxS2x@iV zM^P5sYVUgpuXT3902|rEZ;jO~&o}Go@{7jGHJl}6XDt$d_1A16lkQg#+=iN4F6~v= z-F)9wcfq#0oJb}Ts_EdWNvd%n1TuHm_cMC1B1*FHWNmHh4SfFj%`6BOm1&)#6lcXo z!ophw%>16>my)d1827?sgaq;1_^Sr7FyFOuAEz-qj1#wWen=DA$U(nA)IgU{r`1l9MvGNS#q7I5g7>AtCK$xVJvZa(0gxtVT@ub8Zw|M9 zf@b&znELq3Z96)9v3ny8j6x&PdS)}NVvxr;iBl}Fq*|D`O9Pf3ZbwriKu=}!my4@c zd`wv05DfDu0~bKB{Pz=5a11nLIg=6un;;(%ar}?y5|G0^VocZ$PyNq2_>WTPtN$wk zN)bPEIX^hSJl8JM!^##NhH9FsSzQ#5h)#mgibsu!(!~+zsco~UgR|f9?k(sYh-@)8 zHt&sg{0`M)(dnSs4!QR?amVCTSNCJZWZ|2SDEL5?&VL6U-%goNatOYj5FSVkeP4@N zPA*)S#XOJ1{-LA^VIA?%+BS=%YJ@_DtAnzH7hP;!niJ!1v>>Hqt-cS`kdr*A5)eNC zWfdJepU;l1%l-;6o2H6?CkbCQZb{E&Bl4p|tPCaNMuW!Pv)PP>?w%!6gy6yX_|X^e zTso21bgdwc9kUKQ&gZ4BFZA|pWldc9J=J#2CXneKkYxf~{Zt~|<<3PFqdX!o@NAhe zA9;K$P{Z|*mSSu0r@_i3v_E7fdqr=F=Uc)>5h(tNuZvDg!87Y)x&;4OCqwB;7NR=9U95Nfwq+*5Z{!k_22i<2W>zxGOR-OP$0Dd7 zM#X8v15tJ*4B!uH+A7D@noNRkg1^ z17~E=1^ir8gPrs{u$8iX0ALW50C*n}TdDUV0Y>hl zlp|djCEdhmJ_96*9f+&Q#09oXY>`)&`kWuHzfvyZZZK<>1McTH9(sk~A;$tH(1#gM zkSFT(n+-7pn>z{)YRYw5C)JxV7W(HXw zxA^0v&`JL%j}0IO7Ut{i(3vUw}m0*Zb{b&04DH`F}YFKh34 zOE;t`y#EUA*T=!6)UVg8t4I;8V%(c|j3k59KfiBxK0qJY)ppcn+2oYPc;> zX_RiHyHjA(-5`xhcS=icQUs*gba$t8r*ue{ba!{dUA*6S&VBEB?q8n=6xnO8F~=MM zMKUfT5IPpmHchmFHUPaN`NksvSZk5j!taSHKXlfGE{ke!M3(w`yS`5wgR;3aEJNmH zA3k2N1@4r>ONaM&qc`0-XSaSU4|>&yJ*>EsDDD;q{K6y*JFmi|R8q7&83D83j5^OL zk2squ&w0}0sN)l3P{3cB6EOPre_ig87&1SEt?H^yR!7Z=ulbp3po#PDtc1AZnHsX(VrM-6{d2^K9HwI z>CP9Me`>@aQROWiRO9d>b|ImWN((^pvmfTov_Y_|CUhaz3i@dsEW{8<8tC8N=D>NxdOkqHTVfS_{H=*Fmgw>>_N@r&hUQLUND(#=Ivic zBLe1)$<<hx)8aC}D{k-=QxPQj$VP@fmxTjD$Nj2AN8Ep~?uT10_oN6b>`q!oxNA8N z#9BCuXd^H^P7+nyA9W=DA{({2SWmIrUg+glPxovcWcUHH`(tCNDKV6{@$ErP?p}p&* z`rt$^wI*;B35f1h&rFlEnvC!-d^%kq1a{curW#M7c--hAp@e4@J<+QM3C7EhgVGcA z)97VHpHW|*?SC%D)3E=0F%CGrn3TT(-o0P4xnD2FnBEGV(13L7YxLn~T-ndL84B9F z8Owd*wgBKGXNnGZn{xc^1$ug8VAGPv&`E@_RYEbZ-{HBHlV zryr@cw-S_y$&k(p{r3ej%*ixG4FbcX&pe{2HVn{(OgTboYFGW=P(S|RY(ti@MzZ`NG(ON^J@$?V<;7-%73@7r zhd}G1B5?=H!Je9`>@(>i0XZTSKK|v>IT{UOPjIByquYdFwy&F9%^}Ab@hAL~n!S?8 z9Cmz@a)uVp$?V4;fW~gwnM4SeQG5JW8b356|CVVFFUa^)R~wx=e6yp5oS#_N6au7Nafwk7 z<^Lb>`MpP18zJ56lw_5wlOPTVw3~x zds~whoq|A1(+q5q?)%788CGtaajVySIl1~@ZiPqE06mB_{X&3$bPYms-b#T+U~X~< z(UI4%*j1MJ$>2cso_eYHbV`wt*&+iMb!aNyICZb_E(jvu94?>)FS*r>{W&J)w~5;C zv28e)fEGF%p}%wr9l1{=8gEwU=BzM#%emKbcaK_#?ORH6bR!ZNSOU~p8=Pf{roAdL zP=Zg|rtajheBZk7yyYwAaK|v7U(XVLOM`}Bs))Wjix&^auyYNL@=-_SVP4nv2j*jB zX)=C5y?ZW7zIJpyeol``VnO5!$9dl{f$ShmwF<#gATTK{>YrZ-DNZ%deUzR_?7T`E zlW=pC{1i?HPvxTEy*uWF-7%s4VXX3*>LZ`1p;uOid zss9|{;##%9|DNNeVdIY|o=#?QoqzRbO-H!8t6IX|S<`vg45^e8AK8=cc0@Y+p%Y8% zo!UVA;;Oo5;uAJvyRO2V;4M=4%K=eK-~#;X&vbQpUcplu+_W_PK1GRm+6%#k)x^!5 zLz%RS&&ZNN%2BDNkd9V`F3eWGO#17RtnH5)Mrkb)jb`CjSR^C@nkhX2ZXqqkI1xA+d8^#a|QN7o-X)*g0%R;!4FZT>;FgRNYZ^GBMP|@!f{0X3A{fpG|hhZAv|1Q1N<><9+VIZY? z&uGngc5a~(=(CH+M`q=uRghj;&b4O!g65J% zYc0#1dqA?NH5{4#Uc+t$YGX_V`t1|9B@P@g|>uY_RiHFe=kNKI@#+(!#B>IB|(L#i{RvTt`_9*&tzmVkKHvtKlGbS;@ z@bPc+n+Ko{Zdj9Yiu)-Hge^Ri42>i{u2R#GIC{|YQ9-Ja?Iq@t+>O3o-5(@oR}YSU z{)NwF4AROnwMW`3`#y5cdfiTziY`adudk@7qIfIH`J43yFUdK6mhfgpH=A(>z{qvaqU zWYU+eb*NZ;=9EMRUil>C_I^&%unDJUWM8|b{yq$ZQG>Y z?tNIEeYIs-Xo|nxuW%&B4{+LG6Ea}lY#K2n6x^AI4DCI&1%yxLSA7S3=$gbS=l{8& z5dQaq0uRu-&x8Fx*u((d;1G8(YlcdhN_c1vo?rfhTvN|qadF6&3&JbZh~-4Qw2O(!AD_mPeROFls=X6WsFz;|YI@*)*pv>xK=3Lxt9>xJZe ze}DY;g3NeAU0`Y#c2!e~xPgsQqTjoLErt zHB`|(HATsCfFr{)B445Qvz}8FZ`j~r6i~93#COQ{>G$ig%m~haFCKTEOwo99OrtRw z+lRK*s_i@MJ?r^m{*uD8?xtN97)@(rzN3IP-162z{HEFaVL`cKEp>lh2%jFIn8IMe zD}yeT&p&{X1V1iZ7c1+lmU_%R%{kYfemwAp6@M3L} z5I<_pDp))_jz77~vkE;uq-B~lxtC~zwMJdU|J%$+WD3|oq0~u%p0ge?U_%f21h=;u z579{W_RNJ@KPpH#rX~m~m#|aMAP15;ll|diGxXjdxO+lv2Dgg15@l?mGV4-#1QZm`DNc_o8VusX$onr}|@ z(GN1@TB^LPT?J*9f8h&aPW5SDw9Y*XJMuP_Xdo3i(kc;q7750T-=~B{5f=Z}u)kcV z(ZrFkS8=lcQjf1akE(?_7=6k@CA$dGW#pS zKC3#><9g)T^O8F2q$IVGC#!U|XYhGT2^0hACEYwR(u@U5+w;AJR!-uZ>7-;(Ma^fd z?|@}p_FSMqJg3e0E(*1=jWSZY-+7PFra$W@PI-}N#u`>R&EO!?Og~#8Wa5Hw9%sFciDbsmS1orK zSucAM3h1q5CRGSSXEw>er+j4x$>%%WZ1LYe&gjR5B<(cLfv7eCQd(CiWS? zyug-oBENf~co98UqG*q3`PuB&6O!aAoB;6Fb;r3ei)3*JBs^^8@*Ak1Qp-*}2>@Wa zJoW1?;s!ylC3z*&nTP?Y@K?xM2ryFFYaAEum{3ZxH-bNAtPf8G*0`JIjp7EHP> zTOQw+v}aEnxL9duvAN9ZZvSe391_jOan}n>Q!}QbpNGOCUG#$=GfV!M0Hg}lNsz`- zg9n8+VD4;z_V7QiUT|BV>*`^ac@6g7S3;A$<{&8aRV*I$_J0QKK=7dcdre@^=l|Kq zQgP~MX#wob)_(w?V!%hA2Hb75<&umna}?YY6~81>;SJID^&w7;A5Vr_DZeQT$Pir* z@zpiV0we6^_v-~>Yj}pz?C-PL;Ni3{FG6)E+y|2ld}J%GR?%h{Cz#;?)ioy(E#$c7UIKeFik#3`(0gn`dnybMSLTOzN5?@vxLX?3V!ovldj<_I==uL1k%oVTWVzgop9HCy4ya z=Ir0N>m4oSpA@;%J;Epcn(8Klqyne&gfdaW`xJk+ZJwrv~Y^T~&*?RMEH zin(3oDPdo2iV4B;i9-Be1|b?#5X!CXlE>o!Ohg?!O6SUx%Yqb}WIt0S_UDv*W>61(xdDNh4ow%tR z5>~}k8Z(Ty7-z&m-$66pIpPkK-=KN6ZNa?!zvykU%6a!xPlM_kH$pHi&hTX|k*xTTqA7~E9jT2WYK^jP1%HXO7 z%fih`v3N)oT)!}jje4NPZmxp(e5`H`D+IKCRBO5{2z3l3TfoEy9$h?^2Z|8ew5Qi} zW%+L#S(QyTTAr;Z@v7NTT`fyKGo6!Cmus$&YY~{Dl*w*15ym;VbGootzE~ZOF%_I% zSRUW{Dyj;3@9CT)6}^;g@vSP;R>PE~POLAY@;TS=(jV|^n|~8p2jYUGe5RN#8{1Og zj3k~_BOD?SCCa{Z#hdG6s0ZGjoqNfnzu8bGK&VkbvT2@KCUVXUn`c#*+K()f6>tZl zq9v)uz?;CyTMH;W+Oa#=0_5iVyFQkNzPlP>HGRIc9a*~v=5XWISf!|K5d@~p0pWRP zI-FAL2cNIKe~Xh~erzH|M3W*BBO7ET0GTOAZhk4{utW3l%wH)hSmN z)H}X^ook#BYYJwQQT;yh{_79_GeohWINAC>*1{XAF?=_X@H;UzkqwV3htYXff~kt3 z=0*^4%ltiwpXzzHR1;KASU^A9KN9V&5aorvDFVir%PZo`5rAA;6$4jlq_{6833Ypo z8`8q>LRy{>gxNwzcqng~u!9D1I$0&e-z_+7#nEt{fgf(#S*aNxkq3$_cc$=V2eVPy z+$f@*bq$R*_r8O26Y@%~MguFC&OO@A-3hrzA{W8H+P~oQT)_L9t{HbMBE4|0ljkHDA<*5};j~3iU()_$fmuFse3RsMJYevU42I%t4rt9Zb)DZ`qA9 zV6i9HG{>WGn*e%~%F;NegLU~e2APgD>s(iko9!-GSe4>A@0LoAO7Smj%oa^iXphT^fF(w>c)2%$MLT@iu6S8d-}unVS??DpNQ&tpm)=P=_LT^ zPtBIXo)V67(%xBs5Y*j6SEI-ji_^%fs6XlG{mA;Y} zG2NbfDBM%jVwOUxX%K7upGjpe;2ht`gYgGVHC|<w=0eDilUJEG(3FIw~!s--BZ_X4=0v$JSp24t zWtfnPqI)6$$naQK?ism&Y#75lEInc=?xK~V|1g~RHTJH@0k`8NZ=NZ8%Hx_LZ#+isr;2J!ZHO|;(nE2hZC0|)SjO`^f@DJ!4!zl82GNSY2`|rPt zNIgRLXOhhXS{uC|^~|UbsP_be!J2IzcVTyM|3FMrlrtvdcj{&sK(5w&su_2gp#xe( zMbly^OTPcfPmo@ePaTw}5{qU8vwo34mr%F@(V5*vl#3TOxmZrlH!xokl_%HTL*z~p zgB|&cG23h^i6p-jOiJX1x`m(8R7Ta_q7U}$aJi5^x7Xw^T3mnhIm$fs&msFz+;LxO z991+2d&Q4JrBXhB&UcUxoY()4e@4<&(<%*h;k&f`tukoVa>g|Xx2m17FY~}tWP!~8 zTELvJ;V3CZE=R$BZ|i^!`Ua&`C0R)&VaeT!M>JU3sZg!%jD$@kEoW{!xAa8g*&~tx zFQ)#)dNk)Cw4bu=lCBw-f=!SeBbF~=i;SrZS>j5CWvGmk7MGxfuuI&QEw7DsVwot} z@!xuG@51$9E=5m1i1+=oQ^ zWKq3&Sbek1eFpu{hEJ@#FbxO=n4b973z!g?gpIurp4ykx-~k=Bj_ zbdbfNBfm$b#PsO~$&20!7NdEj4e^5g~On2C=ec|6cQ7FX6p7 zuY=Xi0(m5_7Y8LjM|ASTd2Vn2>>4^573s(fRE(-fbn9MfkVd zL#hMyeiNTsNPtdqa-@honloLaf$jFMphZXPHRVOGbQcQ};qzU_4atbvRtk`(X8l;$ zzS$#m^dfyN(R8JTZ;SX(|AyB8^ltz$M?S!dAb8x#z#%wJ#R`)yI^(Y4_NZx(ruzPJ za@aW(FJ*;A_v^9SS*U_iU4)xx4AMy56x39}wpANlS_G(1*iJ2q)Rb-IK;~4ZFM# zD#blC-o%u;Z*+jyr6j;y5#_wN_=f1z!H=aDvmEDXYoUf2!5xB7V_9Pf#Ppll06T*1 zWJmvmNT6Q_2qJ17DIQbHhvGJh4;us285|*uBMa7p+rkNjrvg*}L7CyjiL0<9ME`lB zwCVBuy$?kHvGY&+%hOQgprzK&*VVo<&yy%bxY)36>Fg?|;$j1}NgCMZVJtCjro10e zR9jCS7uq_wHTGHsI|A_RlUN`^SNuaTb9ur&tLQc4xPMXI*hE?3b*2h=%nWVDdR>y% z#I|IIMs}A9hED|bKvq4>ubs_|v0tMiU#+I5Q&t{c>QXz2oc~1U0PnQy^PHyG1rn>9 z?3!;aUlAMJE?iR!qF+4D&0|&?msto4Qe^5c;aJ%&d~RHKa+9S`(uedevw00a*(vKu zfa@9B)%SGy8wAGqLkP`RYsZvgLG15k>U%J9`T9R0&Gm$Pacu!D#e4nhFP(IJC~ ze>`9{yu+9A%*v1SdAuvuFbnZl>@YU^`U2=3idnODZ|n- zTFVm2Xe5njcW9BqdX|DjQTrt+Szu6m(2MM={-lef1(vglVOs*Xj$je|A@*FaD67WeFpJ zG}m5uNK}rrz41+7+!6VMR6b3wDPhO+7YXe-iKTgpN$8zbxG&qXc|xGcKY@IW{nz?A zd!(W;yKvp2U808dx&B1;QT_6IpnwQgOwi`)Q}BGjCD=3+HQo#)fR_V03cz*Ml* z9{0%IzmRkuv|~N-nDDPX9tK^+1D8vrW;>)JfCf@l&=c_3o&--(H)$}%serx~G5s5X z)H*`=`F8tV2KCVYH$bD17|JrTbbnQcc#!Npb*s@A1t!%2nR{xNabSZCSJV;yUF zgt2^4{O9KUtuGq>FCXbaNVrqdzM&50njg1-gSsSKQL!h1j3325z zE+!Q)aHtA1Dui^B(qp`Q0Nj*G@L0`mj5k$*Ed|jy0t$_~~%3KWQYN+G5l8QfR6LIzN~yaYAc06wn7FA{rb#2ko%M z;Uk^W9`6D|hiff&yt<=^{O~-KO6YY-1cmP>I2KG~$xzf|b)xUz7w3v@4eWmkxc;bh zxujT0q#o~p3=}Oy40?Q?R0smX5`khifQJ~GnH54Qin!T`SQo7F>roIlmniwB3FAy( z{}NR2Qz%JF7#i13DD}vAlXO!;^JN#XU9G&dB>`=wZ<$?qD_@?sgHF6>t#4Nmh85?@ z63~!#y5?x&ndqf54NCb2MMYH@B&%IcgUOzN8*b}gCD@g2D)*dI)_bqO<;gl-??&FZW%G6=|HrZm-8Pnt53{%s?UwGi1z3n)8w%z3eHg0*`{D*= ziO|`xuMLL#-+}&3YG45Oyo#8@0+!r*r;Eh77mD)-*o; zGjyKDIDETIF2FhX7C1r2S3(!_kIzTF!r7ngD2i|3^8N43$lfAL5zfmYl!_W1!?3{* zsEpkRaD&IKQXpdw-iOWJDs48w^_M9toJfL(V4*uKxCgg!gk>$lzd?mInUkT`vA!>b zg7);co(P!4Mz9Wo5uNzti|RtZ7u!g}{mOVBtgDky&8uzafKXag@7d-72qU1W-Ebcf$c;cIG(8$Vi2I-Jeo^N zM$Uz6ot>E}pn3~V!Lw}QWEgYEZNWfgL z9>ZNxVbmyJ&c0)HR1{zjEvyapI}e?4E&mK>l{5&CwIvHY*IS|hzoBt&>p9g&cdz?y zi!wKbQLWy}QpZT+Y=N?~IS6%wGw?iXwsi!V-#GF@#!#sAi0(YdocrU%g};ax8F?hS z3?db5u{as91se!-Rh8`z$U(K*jih=L?Ri1tui{=P>1$Is7{wR(P>^ z2NA+fv>H<1Zq@Y#<4W2_TGl1 zgGwOljjLHZ{#2V3H?wKr_jCa>Um6ynftB!?Rk-EX&CXGKJMT|WFu$9s2rzb9H&@=# z1kHSc$rQwvx{y`&m7qd-tbB;S9j$qS!BzXO-i8f*;%bn!WP% ztS)re36NxbBgvu8HasZA*J=&ZhLhUWg;Jn6p(Y_;-pEAe%(9WXi@lPlLo*y>24?wC z03}FvzgPYN{0ZN3C~X&o~H$A4eO0GaPK^ z0s!V80c~&wd(FB1>8dNGCnCF9Vuh)dGW{TMckLC10H#FY#>J_X7J&(Z0wI}}*aXyD5yH^Rj%zQ?;vZ*I`d>tglh&r{Dd5@y@^lHUa31s$$S4mll6h&@@;`0w|j z$As4}e~l;cZkY4jN!ymkHiWz5j%ptXK0J5+;iwxwO|yw<_6F;d$_hE*g1tb1@ zL+c0J=p&>mTa)u<*X6lKO4(=Ba{>3(WC6P{E`7stgdzob7qUpkZ@59HE9=#1G2i%VZGon)6$$A`O)gvgZaOPfd=HU~F}-G>Ba z67dht{=VUd;9t8H>scxw(mBvv0!;0icZ+u`srT(#=2xU2S$EK7DAr%6K;2=@nGMG} z#u?>X`h71LYaI#02Qx{QZ~-Jzj&HGtl5rfS!8^6YBfV=7?`;mVZFxN-Gh%RVIPMeV zFnB)40-j#<3n1l6w(*iAEs`JXI+`Mwd#k0_Js zt~ksE$c(N;pxI`LrSuXw5fXds~1CT-exKXThj zFp=QnpM|%DLfDCYSV1)KXSc-PCN$p2O_tcl?x>_JG4gu!)JtAT_J{kqP+-?Z~XD+j}f<-c)n>SzUNWEWp;Ci~DZI+pVfNq5p%f-;C24Z8)qE;~f3Qp>!1Pl?2V) zAAjo6%C9;C?&e^g&ml|@n;tqcEA8ShtazKf-Clvd_LsJ zC-9MLvw&Z+<~HRk9T4iV|1)x5r=-6K25fTmhBgK>NB$NZ#g{Rf%$K{27fbK!>Wn0- zUybm#b*WJ%+BGYW+uYjxcloe~ei{#nF_i%dY3;4My|7ZMaQGAEr z*7!@|=lF>}6kZfS)g~JQdtE>RPx{ATfYU7WjVkZTenW~%FlR}7n(M(vfP4jZEtq;Q zo}FbHsQBxiT;?+h;eq^u4>kq>vYjO}wKX^1nmnq=%zte8)x;$V9NM^;Ny$scVFN4s z!^zM|3W+Gd$~?)OnOugM@AwnKF*+l`-WLeJg}PGCKpe5(uuhOA9X7n z-R`yFDM*U73($O(3sk)t0Z2$p^F^IeCm112OY>WRgeiHLEaSqYihjYyJO}65TtEy| zYiJA+BG_cV?x)1^LLCl|@C)-7Vay?woa_}k`fxlup8YtBvBtXz?&Pua?nK?I-?F@) z7JIW+ngmhiT?;2r*3HYyfKU5$NqnM1;gl>Mc91Q9Ge6YQ@wViGx(e!j=7`Z3mXrmW zh7_8HAKjPri%x))A)YHi|NX1~Nw1J4I0|Om0#l0peukIDeZ?SYtyxthtLWi)h(o5a z%FcuD=>4cgv)w$(U`%0Z-|j5yR_42~F%#Oq+dcT%KN_%7WCL#9wy3f5W(`&K66QlR z8kx&ZtJsSnU&G@3F&IcIZ*yi6+e%9pDi-1M zL(0}u60A;(TV--0?#Z>@7U$m|3Be2eUVElTHurPo8CRf4nr$%jbGf_BobEfS za?b6sv|=nj?#;K>Gk$~FF;%Rr=ENSQn$IJy$!C2eR+A^|b^J^B{k800XA@heRxZ0a z&;aoSR$%ph}>uhR)zr z*EVVEnk&S6A?IStT~#aYL1(M>1Z)b-R^CJkgfcaW?U#!QC8Q_P0mDkt`AT2Zz+rvN zVhA-ji}v38!0$>y!XrdFP`a7Z#>z75zCT000A5ENG3$($CxIbQZw4MjUlpL7+W(c zB3+4~TX<6&w;S2)@^0@JBJ_suiF5oi`d^JDAqb$8WA^nye#-AFt7^Nb6l(!~RKeN1 zT{fGd725HSqx-=fgN=^E$3qFOh&MqKS$o9(Y9e`v);e{6Ji~cNI0vps?tK`+%Ynn! zNi?K+?LR5Mx0U^N-9z-sbGKa!&TH)UG#ReSB5kKw4%r)Gq*qt$dm z$f$UdHoCaUpZe&li7Hs~V&UFkG9ke2NMw3ue|QG0viS`(zmWMMNDnHgQQ_#e9iH?& z|AoD;Gnj;|!^Y*Ce+QcPDa-}(JmMi7tDEoLuC(yiUrqZA4%J%}nP;N*v-&%s50=q z&)v{)UiWeH<_>s}@>!4bY=*R75}^hKFR?KAzgn@7@0Yve3`o)5f|#|ea};e_3&*1{RN~bx(kJhp&z(+L!BO(F?NTZ36pc@po)D9` zOQi9=Pxt`G94D;^aLglJIe$0-3H5n`G_F4e5XoqaWpnxmDc$Qv?OkFvk0H^X!BN;g zay(=HpF4MpMeX-qS~9f4db-DDEhRE!K>ozg&&!g~#U}~`hVmH}0{3iLr4JxdtxZ$ z1%Gd+FXk8!<(5mZ3JyU!x*w!UaH!BHU*69-NLN1r6s)ZZn!Th6>H1|0I$; zDZdn0{xgIrjLIsb@SC@SLvfpO6oSrA=@`RfXSptl^8!HLA{)l0d&J^jIuYX_`1ka- zVj@UT+9P6@xd5@e0x>MqHPfX&G^!M-NjC6PQH}TTc(fy)UGP!z{`wXvOF0F-IXG+T zrv^zs%jeNVpJjBUMupBFOtHOaq)?#*T-3R< zn`Nw~vH}!VVbL$br<~KJ5FNrCJ5{X1akH6pYaP(+%XxxcA^RhjBO0x`8iGWLMapVW z3JV;pve)-RipU=jDlGz+<2=+LZdl4wk!*59JhJIo(J;8Ly2)cc|IFHd804g?fbAUY z#Z`K#pQ7RX7ZqK9>e76Hg8paPHhHO4crq$iVlbhT0OA2)8@_mBX33n}{fH8_f^=6ASr9e53xky; z#qZSrmK2%8be@xIDgF$~2C5zmMe3Kdf-;(7?tPiMQ{%O;Db97bkZ+b|WBL1D=C1^( z-33MNEpWN4v8;@*231^IH`|>YwF3i~=UShjtIh!!z%3CklwwJ7Aji?-AvXL;hOGzs zzprf71h_lz}~l zX~Shouk;ocGPUDsaTvT{p+I~tuK|g*hH*VkVEn|FIV_ozYct`#cWUkN64+Q}(sE>@ zjb?7l?n3zjJ{ORSzGs0IBjvrcL1~rmD-@D@uqEd);EH-8_@-p6nxAqWyp8=@>uk zrCHIXVxU;6&;4XZG7KtD2a(5N<+f}4gD$xi7?V}I?hjCL_tH9Np5gW9rtPLxfJ!y_ z{~+OHN-{u$@IJLf&W1CRWiL(=L{j5*>W}qj_vywiyiHiQ)^4tZb05gsKR zO*)wXy7y7)7Jo4OI-}a*uo_i~UBbC`^vG|xBLfxpBkV#Mve|rlarg=qdu|LHq4NzO zIX$E9x|NEtSl-A)gp;a`{Zeg_2UeMsuI{CJb?F4euMaq-PTOvhj)bn+s~1S04e}?Y zxD{OJl(GSmf-!q-Mv48>9jE?X*rK18u3%u@!boe`5f@)(+1nI@9M3@m%7q3Bj8>4`a!Q{>ze#D^22-#Fp< zSpGbiN~nxEM~z-<6)As!5zy?1vz=LezD#sXWrh$;l z&`#1n+9aFalGxsG6llZgHZJ#Pkl`v~m&DAf2=8{8=aLvs>IEgK_l3g#2!Lz6Bgy+} zdb=yuV8kSwc<)8OUpN?K@bMQPY-#xTgY~kztX$v45Yx&&=k!`l)Ur=}I}mxD#oabu z`gw5$b}egs`&CIwlG}hLkcRn8c!xD`qL7Y;d`EM4)xKtokFa0^??_wQclxUU16ng{&<^bj>s3`bSGf|!oZxMVV3K34`_jhD9{M=5$wI|ZexF&$ zaoxj*8s`^08E2PF93|<>32C4k%>gyPOWN-?Mm$1#v+cJP)%vel%*Y7gCjFScu(#o| zIR1zKs7CBCPbhn+9vw?A%!2%(`&1lIMl;ondw6SA`;LpBkf(|bnkO1P#0$ z?53*y{}RHO(1Idw!b@&MV!vueOmuXnEML+%j_~fN58>NuH#rsYKHqK(JMcH{UL^UBS&ITv3s=zBqe?1*GI-{BaZ}N*-lL$rW}xyH@SLHC^Z930UEd!3IzYGU zs+~(5WdHjhq4fF( z?iOlyCGWB@Ax8zmhZ%|9V8+8rW&8{KNYF1PiG*?Y%mro>?=`qYRTfqs;m&p7^NzD z&bH-=S*CN$+?EBZF|1Ae z4<~$$f4)h;U09#o1Xq*H+PH2f!4I1{Zk##C-v9{beCvS%&{J$xAeP70VL|y0>RWUo=Z?O%nGya*s-DR4f#pZ*Uz<@)<{d0=jiQ*KNxGTz}xy6;dlw-_|vI$ks`|; zcsgd`=;pYlCfnjSAPi)cDQtW)38EDYanbp5>Z{xAL4T*=Uz_p7ls3~_qK7aW?#nK~ zf;3UTqtSg!IBP{e-rO&o@JP9N_hw?2s9#?_l=s)o2^Hp{Zs7b|Zp9fZ`JrDPr;}&C zhcHJXt$guk+IF~yr6|@qQy_~=&AUjQgZ}_%rh`_hME#Y@QsZ|Xx>w3L`;sGWjtYI z@A-l%MQlvRn`+8^)l?OexqpExaFgp5{hs8KIa82}PdFEd)KAEk*@MnKP$7~5sb)g& z#+3}GDLU?UK}G{7L%$~RH=OqJ4)!q;EgWZmp?ug@fmIKL#meoNr~JMgKHlsUCYHm= zD#)3V{@|ei1n#5ESa2JkmfGmBv3jS@zhw@z&XjB?UA}wzP#e}^*pB{(KlHca;DOKz zev`LO9R~z0T`v7*{gL_ZVnF_J&w(b;0((IFl=#z`{lUjUF1qMg{m5{msRwvN;Xurm zXn|P~`2k1$t289sF*^>OC;?50t`E1JmETTvX-XdHNG)3et@U=iO)V2-F1&EpbaRPh zjQJLC!~_;><4GyGq)yN=LgiU~<2b4C9FJpL z>Xi1+mG6J4iZLp{aZ&Zk4jaB|iSv`TpbeWJKS?vnd>}!dPEYu-?6yxk>tEj;6qQkKSqiOc4u&s$tj^r6{ z?rkkc9Fk~>Nrj4~Nt##~!IMsH-Z0KvFe++&+q<9L?I=tPpzTkO_HH9;ZFvpOI`0nw z#71DZ*d+LA-onSNs2-ylVrp*&aJoPj>e=!+=R&!>F*S8%Xlzo!k_|{!kzEm(rS& zpuqEN8Y$BQ_7sqD^7$rheWm4avgvmF2C0<}-Nd-)(vIhb;&eskxSNif-m>g90GTw_ukV3T;#VAjJmm^5`TTzihA&jVM?*n-x z5)_GkrRgy3>bCLAT;^hqOVX8e!`APgmS+v~V89@hjt4sLh4_G%!Ddc9@0yCsW6p!k zyv!|sgM7)vjPFG+{crey?##F^r49Fu^woU9?ha3-=CE#2`+K0?5WC*yq0fxN(Q zXDt9}xW-(t#P4zH>4uw<|6~y(9ePk4=%*|m9f~nKrON^N9%b{E7ioKSryQab8xyLZ zrL_Z4+PcpMQu~lOU-z|@d3o1f$E2iqOX4y29x<=TxS`gMm2hGdGJ8Za%)0IeiRoHl zjnX|g*q&?(3Oe1s5Y7&_Gs}4GZf!kDf;#~#nN6t9IcZC)L(I)KB_wK8UxuKHy5-eL z$(lDYB(w7j&2#?NV5VgtLDOa6NQrniM$_^>&p7>S%)fI-2+aXJ*3E z;FoJx@0#G;DUjyeL^~?94(-LFoX;WZv@f4diZom;EAV2q;JN&3{Cn52Be~IF6I%lx zN_*wb)R1v4bwf(ktG6XtV}`{0#DjlZxLn|Xse12Mb6scL4fI-;%FO@6*IPza)v(>d zlmgNqp|I(0q`SLYK)Qtu(jeX44bmyyNQ-ng(%mWDaMt#IpZDD78Q=JR!hrRIxYoSp zH6!yT<22k^k@J3Ivo4`>KKUb>a=Rq|wPG7RynsGCpIrF@e4J%(nd0jrPPPto4U2F>}QgC_Una^nz7Y z!r)iAiN6jbp+0vg5mq#ENb}(O+Wn~*w)=6Kt3}l0G38{7E_7Ngx~9t`F{oUHHbIRc z>?DQ!RI-uFmvzv>fP)9)BoTgpt&KYGvs;W)4_kB^3`ETphHZ3lB;1Dp=Vj{-*2p`3 zIYb~cmGu0S{Du9zm@!8(74$17(AUuCbhA5upjEnm4abxzc8l^80DDzl}4s?o1-j<(C;3A&Pa}N82of(0s!bEe! ziyPtYsbq%#qAOmX_%v(-IXqpcyu+WI-zlPeFKXG%bRUL@-zt7=gnfOJk}m_OE&g{7 z$DoIZKy(spT(}hC&;1s}x|4KBjq4PxZJ*|j*Wj!Swx-rLW1`8tPcbA@nu;>aaGn-B zQUlGij=w~9h`9o_$;(DU!$ z)gF+pT$g_h-V(o8XEOA5D9bI&aSo3K`(?yNP@m4S^dXO=7&_;iMqL0^Nvu!V{=-L_ zZG%VWhx)iY#qCjFaS{jW_uG8t`Zo#TGy6M%7ipta&D)+VQa5E?%q|RoNHn%6`f-Mk zEueG+y>s^MFbif|T5Z&rO@wXfo+e1Iazi6aJWep=k;d`1Ct_EyLzA}e0+4o2 zbB7hF+r3Fp)ItVuGpV+=#RZBeevmAZ^2MKNnJzs3*pMgqyR4X(R{#bWBeT8A9ooW# zsTYFUmY(J;4IIZtB$;smNQ}j`x03_T)EW3Q*O_a39{X!RJrs(zQAJn1z~gBYDUrS0-t4UQa@<`+_mGs%JN2di_3+e2 zVP{7~4ab=cJO|3U=&%2WdT6=H@nwT=&a%@5`FNBPU2r-m%rR5LLt0M`mXz_wD9&vcRg}BQ@4(*yT3K=WZ4C!y1(%T5gKR;w^)J=lu?Na zWZ;n`>QENF4^(?~(Z&ESrZ@I}GG;X;uuC(;$*^5Mf zXt4_VrIT6?d=KkX|9gOD({rR>9Fwfz6JKOc)@Vj0BQ~U;r<5!A7M)m^=hG3(qbGY0 zWI|5)UHA&6$Ij=9tWs>}fvIIUsI`-AHsdPLVbW(S#G{dEH<^7ep+ zZkBsr7RIccl9xOj3_rzYVAyEkp^jvNA~;&V7&5nx`Wjo!rql~QebY$_Gg8ze5Oddf-)+SCehHS>kB%fhd5rMpoBJ{Lbs`G*z1b zS3w2WPGYJ40a?Bv(I*UhZTPwzd%O*rz#KG3hQJYy*hnoxwP$jz30Acm3&Vh0d)IKB zug900MCnbouKw%tABq5GB#3j&5tnB)*RF}i{d8F&=>E;cz7iH9S-vK3&W6igc6tgA z+5qjo<|ljpH;~={!cm|%AR|~Z)lvy4P1=q%b$Q?6g+;L?esWY8`vP^xuTinJD1y3P zriEMWgMcBp2h~i_+=^A9J+a=S_FUDm@Ccgu{bN zou%~h^=Hnncm;utSS!8x_#8`tXua2J3PxKHW(#kD#)c237;XCS zS+SM!o<^xje4DQnuGC^`xd_19LCH)W>8(1lWSLr)njo|RowsDOU6phkN^_!H7>Ww2 zlANdLw*IA;4cv6eG43Lr zOKpK<=5hKVYkv?FO!{dPJRpSlq7L@!aYs=}@3@fkAQE2vnBwscDMw<@=hYSm9l^L+ zpP472D2%?KO{2RNY`7w|m+qqJcltaY=KQR;9Et5z#|lWXE`_gZj$Asnaah5to(1Il zT8Id99tZJB=RSY=5J1hMK3ZlnJ8HqIUU$1vc)IHiq!nsSq0YX>k+s=wAY(dfw43m5 zY?$8--OK+10w|F`o~>&lo*)_)>;zo)A?Tfoo9>0qBI>Koryr9~*DpeOZ&iyL#8aQb zG*c`Uy}#te$LI4kNVDOxtCSNBk+UUe!z!!qkxaXopm$usu7%9!3y>uf6RE}1 z-O;kOXq5fPc($CxvxxmmRMZP_PYzSsz4RFIDRmfG=jn*fsNQ&0vkoRpb>LYI^PKk^ zTxtP{s>q7I_XxzipJ$8iRWubb)_oW9s1Z9kS6D9Pxx!q-2>hXby9lC?ubp1hMhWl$ zLM8O~4-K2!#Zku!O)f0s_tKHv0AHx4X)10wRo?hI??fUe#GF|;S*U_eX;u8|>48Xt zEn(jC;*Ot{y{C^-hAtZs3z6C>S!TPn)`_n6YA)Ou1(31NlwX|^nDy+4gZTt1@^hZ5 zhxWgVN6(!$8|K+b8IK19@>Ki-M-o=%&bNb_!^t}%tbNL57p*A*`loi~HsxgzBUS@I z_9*Uv1E^Se7BBsU;QB4r&awT+Dp*E`#-*8AFDddYi#~W-Hy>80sKuNk)hI_0Q+LP$XADgY&B1tambgh-4~IjP z-obFMO|fgS>*!eTx?DSqfJT!%o3F&5j=AuGt=Z;l=D%jO69V!yH2Bo>)P01Z?oV4t zI=4gNJb;`kG~0D#naV2jliRM|q%$Wlfj9mwvjb>pAM}{l17XOcBwh>aQW9(yX^Gxe zu5-wzdxSj=35Sl}iYW_hO=bx!A0Zesz zgPD)yA2sQ}k^uGP{fuof)d*v=m;0St)sWE+2rxci!}M9k?`Fdi$@tp|XOrjsrJai# z*9jz^;Em{0706bk3Io%gJ}Tk(won@9o?2D?K{oDK@yrb8q)@PrmP`X1HmBjidQ zep8l4-Ip=`ZtO+`7{G`R-r5)E`YX|p_+Npmi#!m0gekrh z8nLIvo7W|sB3AkEPD4U6BUCm&9^WukZt+$evsM70Wv`q zI>c^03Nx0L_1OKGx*}^%uPtLm_xnhD-Jj~zmt^n6YUQiyPKXOmP^f#6 z7=nM^WC%HufeB5(IhLD-!W@yX3lbtSDsxpp{QB%`m6iycWau+mq@?rjg&A&L0q=qd zKCG`(`XpNNr$of?7ulC3ba)%A2AVJY;>HlOXspnPPJ=l!Vd zA+#6VuP^ybdv4o$sZzUsf4EP(A`M1p>?I5m}XEuKeE_mgvoMTapBRnLEG*W(2ocT>70Nwuvf{&JJ))tuUV zvl35Y=e@^22Uw2jt=KX__ENuecoC5jmbL^yi}f`0bG6Eb^?Dgz{a#VDrn zQjTzkCxIfAf3|C}`I}J*ITbp4;))>7)aU%5mV0kSl+$iy|K$IdeK{c-dC+rF$W=OmHrKMZuioV5)x&>_Rs!Yz)W`@(-^E40DvIAttoYu(2y?~Em5l;=Zlq9O zshH5(1Tv|2jUl z2@BaK$=Gg<4PPa9JQm#j!flV?7LeRhSi_JcFwA^9GEese94x<1tNRFdFc&;dZ#Sc( zb9-X-pU;!ivFlilf$uVPpaMUC>skKNg|ZXDI?9~&^0-?orfBys`Uc{i`5f{S{T^Mo z1_vTCQD=*(V7cW428WsB{v@h;9S|SHmFK&P>iYo>qAPhPWfSQKyjgUxW{+S7!jDv0 z&+sWmcOFhT(Y*ion{@%D=CR8zoJcD#ULRM9Tc*_w$xkW&_#(&?e=qfNN`@>E0S;C> z!)tlf^;Z7t)acu)<@czdZ+X=~HaN>+*rf=U)H)>181uAjOd^S%C7kg`Hd!-yp4n=y ze>D~}9HqD{J|p)46=N)L-V}(`TOk8-*xgLT0eLSU-=G|;0HK}?kOxVq71SxR{?4E) z6pyf2U@{?Vd*5s~bR^?0gOOPagW)!MqE_lt0&c7VW97)rr1PwAWK=Ja`*4K7W7RvY zy8cML1NJ!Fea#YPSgF>qn%>~rj)bc1p6Zskj#xe>=UsWFGy`);I$6GkJ&O3xMjG2S zd%`>ONlJe23(nSI-VV__sTSQKCc}mVDLu7hle}gM9KhNJ!JV5JYbYbXUk)_Eh<6!o zJL|!XfhZgD)QY2&#D(QL$@o#~`US_Gt!MT0 zlT|R&du-K2!|OoRt4ix`?8O=fsJ~SXws1oK&Lsr7S%Dg$wUt1uD>^KESV!a$`P`V* zH47etc^NOsd}A;TWSXiLQ>~yee$Fs*o`@8y(a87&pz$K`exkUDOZ&?itvAzW=;Po* z`=Pu|2rDF9^2Yd=aPuqJg8ShE^%!v(JuEn-ro@Aiw2&2CqwJbO-BEgeLSrw$VjY(y z>&04M6b^`XSP%E|_HCGf&fYvjyD6dGq`W1ziZBA-SYk^+~mj zeccdKL!7_MGN9;0FFugIgy8(D{CiLJr9I#^F;0@`8eLTO>WHy8llXk(O!Z=Y9zF4B z&JaBK{ufga)8AkFNzso8`1iX+NW(fM=_gM{<_S!(Ue(TX&qOa1!|oU9BfUDd?5a0Q zQ-9v)Y?X*Y%?tpHQHJBydcYv`FD4m#=4j``&OqgEl`T?)kG$UBLOf#myk&pHKfW6C zWPI9jG&pJ*z%R{Vr+R<-IQv>)L45I>oziGsU(}BiF;86BbH7R}!PRKrt|(W=szOn4 zibO`FMRsetB$F834X-yL$>(o{uLdu;05@(O@i>@<WW2lkI-IJBH?d7K{V#tbAW5Ld~sH><)%2J)M zyGlf;py7CUVk>_o?P-@cJt%HRBVG;wHIy58?S4w`#Dw$kgt3|$(N+Gi1#UX; zNY}VBY9jM(x`O0evx3t;(N!PFrcv{G+9i+en(c`0gPY*1qjLD%@J=cC0SQuJFNA|Z z$}zIN5{Iz=yFCbq`Z!}BdpBlsJi9&}>MK5Io#CJ?gNS}zpGA8wEy?01c)dKVauQ`@ zRY&ula9wm$2~>}-O z8ARWElJU>=YZ4@ZuXkxxuu<}5{ZF;TMZ=kvVHen#MR>A>r^?P-&d>=%8q*sV*@1ry z=e*ygy9l-@_%|{|Jm(;twuL2^eiAdl$o1}-y75|4@WV|95Vdl`e*@Pe?u;l=Axu1Y zlims6t4H)r1-4&Jd@uPP!PgJf0zZ1m1O!PbL1n=7Ba(^z<@*K5pAQ?<_mTUZUBhxf zE98rSDIrw}bL1zjGgSoNWqiN~vEj#9M#DfN?fH!U$G=mseqR_iKyTsq9P?`f8DNKW zyKl{_Yb8?Yx@Hl`^5g0J9dC;z+&9h-oFf5}-zhISvyTKI66meErZx3N(``IM#U7yU zY-1bo-9KMNZ`4JFM*{3PXVlj)T`IU3d1bRtbwbp6I<`%(cjQVxye`6TG2|^lTE1PbP}ZGz8&T$Ep-NJR7%Q(C>}?|10DbdN z_pSV8eFwkWf9c^me07a7=zPnNu+_<(C1A1aWGqfPz_S=87 zl;a+&c$|Sd#|pfEenye6I*EMBRQOUcL?fFdN1uO#Rhi=>kA!>QseF@eLEsE4#lmJw z=!!{XeTKfcon|vg4y3%o_mU;3OhZGbD|%|>^R4yC-`rscaakp^{vJ(^`O#W`9X_Cc zd)=7+>h!Co*Q$C*Yb|gG>sBY|4Z{?4;gJ$gQkL^GpE;!~Y+5q+|3& zKrrgOh5y?~VY)D`17q9t3eK~n>)XVvR}ebz*C1O0)TVE5`#3a_lS;k*QN%gLBvUks ztM0hmy++t9{B}<2qzQ0;RxatQRX(Zwh=3NLpKy&$@XwLJ`LG2$plf8y4Y@5Oqpvdv z(6i2O&%fPUCol{=Jw=EnU4TFh0L9R3lW`KBX7kj_ni7g=%zYx zeBG7cyO_hA*l>ByH6@AWtCxkHP0emnbK8RH1QOw$i^{MT=iZ&c(ZHo%G~|p?luX8ou>MYk0L_>qkI6U&t^4 zwe-dFreoThJHhcuaO)-Qabn9&q6XLm*z$P-K1y;IzIxElK8;Q6B&+nQZyE~sU|e36 z;bY%z{!0JeWr)KSPKuu+-F%QrlM<#CgWEn0KUS5Bx4#haV%Fb}4nFr@p>{;riE0Cs zw_&!RkKJTgg$o^)1)OGuQtyga($IVyMLMWPG7bbE&_+^?SL8l7(II+^+`N>0;X0)5 zm;DyawtJln#B)RG{;WAt15yQ`Z zzzZB|#m)e^48wdezQEq1UHmSF;}kc);{TppJ4m2bLhfZpSjYILmYf&Cx`_)mEyu$y z?H3>iz*nhAkl$c2ky~MTF17|;(uF3t^vMOF4H#_QWGOK{el7u@`9@SC-nx*6@WgHUwUg4)VltrPjE2 zjR*xkfw6-{t!U9F>om z+AsZ43*fYRnw76U9EMmNl;e|}XrMZpRE8-xi{5-o-$!g7Ho732u*EFMD7Fe7Y$h7( zNbW5zs7AOqmZ(|*EbQblwo@tI1)D>gSxK(_6 zW*=MCwLj}I+W|uSw0kXPV_$gg6nQY%0XWVP@BlYK=(2aQ{X*8!w6M5x=| zmhKUbnmc7b``S6@V~EX!xI0POdqe814`6v-tNt_#pt11g;ukqOWpTGW@DR${7GJ6MA1um<}e9TJpJj2rp%U77ltj-??mafw|$|lrlqF`(iQK z_nficZQKUWPvJAo@S#jo@~t@c65lf&LSJ zA89Gc=@*gPK#QNuRa%)l!mKeaU&n)>25$lncn$Oy+etFPeltgfWg~YVG)z5f1CxjX0TZW#Pg8yo1uV&p$lLGzbOaub&bDT9W!8G z@32R}jr5rD;eeEM@navyEm536UFmvnopZKhOBlSBF9&0^l0^h5XuRQ-{B(oWZx@oq zoMw)SHqS(P;v0B_Qa^MG5+fNI@#W|n|*p-*Cs6^Ro`n2f$n|T@_sg0zS z0{<0@Gt9(X0VqqH4}=L3nEmhABP27p2|cLB+phNBij^D&>^IAaozUEK;nDKn;j^-& z_mT3kEx^5g>zO^3LLI(9uoHf{VO|Y%O8uhN$V|D7i%9cClBRSON@tE;D1xY7E-Ww4 z3IA%s>Hbl@XuOw}fDBa;q$ZIH@hdas-~%%CJL!IUX9-gq>!3`i|9dJ2_Wwt+WkDjkQg)%PD z*=d(2OmPW2!K*ubTiC7^YNU%%Wjd0KmgodvSf)tjyz9{-x4)N#JfsUld zS6dVA==_6Qc>ZXrax2sWJkKgXcORMV4*BA()^j<^r-(;KCx%VTUJWMwWXDxG2$69v z#L8Y?G>u?>Y#+3TZHaXdYSF6J|5^@l9cRpR(S?1Qa_ci%fD7+V9*s9)20ROjag?e)x8J8Io!N1`2JGpj9M>qUThgYb$CuJzd*C2}=`;fi-KOP9g-wIl>hY;se z_O}L#7`+vUGVR3D1O3h%I6Y^r0wk61QoXS``b*3+HmgyNdRg1Q85__qNBhkyFRD4ipOV<^wzoipLQ)9-BR8-*a!%uX?OM6vX` zTy|O(BQicE-NZzWIC*#f&lS8+H^t>2Xsu&f6E#GcdVl^HFFs%4D@LJ9i^g*ngWX={ zD}-dEx!tm{kDxCsX*Mm3IgeLC=lY)8fWQMTBkv4~iO<0*FdH)fpF4PujNl0X>sSVK4Scr@Y-WgD8)9mF&Q1OaA*TRV?xRkG{ z1vXb>oCqcEwK~jI`Z`g&wU6Al?YOxZZy-;?&<-wNhIe*yeNW?tdPfv1c!1l%oPOHE zl<$1Q>{ALlbaZp3N;3MFifWxx*MEj(rbD+8XP{j@}eY8d0f>pMVcC2s*+fSMe80IF0q% zFYtiUYsCelBTR@%sr`Qvz2TK$Y+Vqs>VyZbRG0 z`Vhgj*S(`YGIW3g)1Wqs{Gcpk*>B{NK9Ij#NAntAlHTOI0l8*3ISgRdo*&g7GJFx@ zF;Pu`haopW|D2AYC{^5trnKw>&}q2q5r64nESNOFH^`e9QiIksu{|W($!twPue~PS zeMCRT;BxgOehTj#ul|qZFU>D@1{>T98HL58zZ-nZUycJD!!sVx`udlo_x;z{ z4U_hmjk3T*cW|QUB{{z7D^s%Q@9?B#nZCuk{`BRmO2cTe-hy{@RI1oBzTlrj!-^E% z)OX2p_lx_h=LJK*Mkg^Bwqeg&I>C}Y4yffa*?%gP6DqKsD5dMsFK)p)# zR1#wuHKlA9>`{BeawV3Z2bn$@su>Q9e;8V&L#Vdq2< zbh8xtAj=`;d^ZEFVVfJW_tFzkxRpWQo(dJr@P$po;r%ynxRG_WZQ3m zN65aHD(Q14HXG9-1r8r9FnPAEd+A@0kKb|v$B=`TeLxgjDEM**_WR=!$fOuVoC!ue zaE6`zX5ld;>iX8k;b-gn6LaEBH-p_~7_Z;(AGX$~rrGwE9jv8XhH^g~T0>+UZq^hN zB99k;EI+5=%n50?{AUol&Q56j#fV@30ZM?Bj4#p8cwXf}<%jM?qSOr-A^W<1P+K2(!L`ez}9kDo~W^cX{B||v{vLc87s+lc4InxAHCzfQSPj;(Yz?^3wUk( z*Ex6mP7w=`^r8K8&c+YmjZ0FjIuEcxxUu(wX-}0k^1g!~E)pGwNt1y^J3Y`@hu}}9 z(3?8(AqRg)i@3;(y=4ZJ&ll`Nm300P1Ip6nWIgbObQ4ECtTc)o`k=JNIdE~`LKW+M0CGurK;`a^DA$vBu|rKX*YVfRv{O?(`WJaw zA|~Y0pqdzGzUB0Mw;RrKr_^vU$~z|aDju~})2dCJVtN}}*|f`AvccE9EkBDup! zYYUm$vL8okJwGRa50=V-9R_aZ=S;ky!5jQeKSsw}sd@Wrx~HOLua?vu8qSQ9o0ek^ zrn=NdhILUgS{)L3<)%rCDau=4Y0)pr%2HD3K??*?QG-Op6t9z`V*A_XL=p2!P}xO- z#jDM<4*5+Kr)_D2(!37rnC*~(fDD*_)E8xJDgxr$OwsfV%=@45=BR?;f#g(*pay*0 z35Pp0-_Rnhq4Xiei-p9Af^bLsP2%~2`yTm9HwgP_d9G0Rhg{JlxiuxHS+d+aL9+^Z zUrBGem)f7YKCu%4TK75$bnQPR1gQzEA71W4V$I=&$gQ>?-8sUySN_ZgIc z_eF-Dbb;*_gq;tiwz)5OYabkszP=+i;9+Sw|7^)@D8FcmOQvrl&-+j}<#`1}fO4#- zD_mLZJBXR0%u`gtH3*zfS=Av5j1(@D;a3r+Iak$ z@udZ3Uh%Rd(x5sX9lEe-=Z@|xoWvNRJAoOO#Gh!K1AuCQEHQz~Vg8*p*KxAx{u8+d zT0SgoaP_6C$2({M8HUR-JgU8a5qj)O;1T6+dvabA1gU)54F5w-D>xJZ@EPry{`O<8 z+3lMj<@+$_IP!*)Qr)xe)UE*|Bzm?A6dSxJJ4Cjk@t)`yAnGjOH`IIpt@ZV;&hx5N$b~Yr@!wr zIuy^N)nV(?Ep)@=aUY`3F$Q5-E+-nPAgsB%-H^v_GBZH9Dfv%ZUbIN>*@6TC)~dKLpk`#h*hT^o}qv-|dt^LA_kz2bK@$9_TZ`vLEtzK^aPlFE4O%PqjToS}kIBw4`E$5IJoKM%gp7GxY0 zta|*`HM{C~X~A81!bvn=46f#DQI%o&mrh_08^+U9Xpy0D$q14v6vE)5(SB8Kn|gP# z+r_0W;QG5|#q@eUcxhTUbNd;EN!by^&TU}Q7jwHLKWEF3iKoDx|E90n|1y=3fLajB4cL$& zLIVcO$rNh3fso5VV{zXQDRGRDE*LoNgtCmE*=t4>xoc+}tZ%(ps_8DM)X#^d_E`cZm{l%Ha-CZ-k>`LSO;78@%^Ns)T?Bg1I=bz@+ zNm@iXtx>KVC}e=Xh%iKAiaBh8y9Z1O(@+`60cTJ?-mRU_@^!2c%Z@=Yp`ByhfMhH| z8d{k0et0H@2>p0q{uagPFP!X~O z=Agw9o%2#D8ws?b8cteLbWE$1Ae7{r(a^s^YH0YDd{}46x7jW>yuHJgsYN@jlmU_x zHw4mSm)7_&)LFzW*0O*exK9whE!WxX?zj?j98+SOsZYd$ zlnDC=zg-w&Tcy?v#&Z&OT#!YCC9xwKkJOksNp2z(A6M-+Kgrg}@HpqCXSzs2&5uSg zNj2;%w7meDiBD`<8tSXnkFyP+S(>>Q5xji9=_s}vUu6N>1Y&!ioN!e2`_~ww6@`FAQzLWZ>0{>Fcm_d@R}exavL>rDo)P_MrQvQ<+3zxznj=D<;BmCU#ZCCv_} zlGx?`QcCXULee%KZhH~Y& z6CGIG#!SP@LA)1zlYW+aFI6L(xfHzDbcdYSq(?NqGq`t<+0(nHr0MwEKmxwD%3_#}Jvu0|Fdpy($?LOicZOQ$TSViE#HYZa$U%(CuV9{or?(A#>7 zkj9?(kg>X;<_??CgR5(DqTk==ZEN=;>V2BtD&d^v@$Feg$cLqIQw6kT^=uOgk;{Va zr`e#kAJkUWjhX*N7@8Bb3O1MfcdO<`X6D!@N_usZ1scPZl$;LMufZEnxn_I^)We}_Y9%~_E!fKC4Z^dVRwo>EVRjgXUQ2Nhb|^JMFT}hp zFL<*yUMXyBpgJp$Q2R{?=x$Lqhpa|~*Z{rhO(-BQ@4p~_47(5m3R@78iFTcx(U8)U z?xSy(+Q%59cT^^rtH350N-`0Df+0746DWcxtYQ>x>bpjl9pA`VZ(zmXbP zK@@D6Q%x2Xe(x)gG0s0Jton?;N*P2VhaMz9U0w0P_O<5`3VD^8XJg!s&IRRTMN5iCjtb-kYEJlW53vICsEjO=p6RP1^?8(%r0zvO8}cmS=Y|KB zOd1|Wqp2~fZ8Wkm_w_mXGxi64x2i~KGQ)sDBaM6Nwr*Cs;|5m)>yB3|stc3OP1VlV zdF%td?MLr#ETZ(0%3jT^-RUhn3<$QLfyMBSQQ{J*28+Jw@mHE*(@OG5Ya~S1*2GRG zE8}Q87SC^-dXCZG&o&M}y0muDKmPV+Z_#rvf4AcGP(}*BDUBz6M=Ujrw8UkVbjKVx zU#;)uL3F1`lZ;pjd!#S*WQRk$Hr~ovD*KKDH6+&WUkH)fM@WC{1Q2ttz#z25D+;_5 zJer8li{cqPYFzc&3BtEQkoofRbd#QL+dh9(J0{S%OT+wMBw{AQTqn0ak0sPh^kS(y z8&sHMSgb<|75L1==O-z4=BN|aT`nHUot6R$C^ z>3!Jp;dsvGGCge@p?vRdxBMrL!feYf+QGm3u;#GE67Vw2!u7Tl3f?s3e$pb$B@oP)R%FwEE48wCRW3+5awAxrYd~->ACzJt6y%kzn3Pn zJ)$pg%yxs&j#sfU6*k((e=jfTFWFO~fIxAp2WdhU?5!tum}tYtT+xM@>;LtD;)%mj z30Ub(bOuLDQ97Q=n@+R+BfxZelFHWo5SI21nh^F&FREHIq6@DJlYi5HJ z_TO@KmK6xR<0GY-mCpWe^jpBAsba?j=Q^vy7SQ9USx;r}N@(=*vQdi^kFcwwmVLli2zFkUYle^|`^4j54ybn(f=(9WU%6|h_zE(D>gMJmv6?e$ z7HxLEb)j(zKJ;=@;%G6skmd-KVo~VQlKpg+;NZ9fxqjnVv+y0^P7n{J42DeF2{`uz zsDbo@me0t3fT1iJFqDaQ>rWC!vb30LZO}|ejG^Zii>YI6hG3l=H`H5iLwfpYf+Y(i{-u6W7Zp7`G36AZ{g^li3(;L88uA` zBvzuSQh`(POhBvTMNC%3B-yk zGg=uN4Cjf5&AQ2T*NbZX9&%aXmbxG1cJNzwgH_Im95h7!>nUTY1Ahv|*|42L{q=(W zF(eIm8P8?1`ujk&{&;GCq=EQ1yLdPg{(9FXwO}K6DY%M8gHo`$o$Rs7#HI| zq`nBE60kGM^YE^HT#c(zRQC97i0>U>;g$9Hymd#!Zh=q?kbw4nToj0c(S#rsFf zZFQi}07tq<4t3@g=9pZ-lazs+#->lhpyMB>k_Z0cuJb5QN7dvGt78n;(@mH0^_J6< zq)#4<+YVQ)v@$A3hVxgTxC;6RSDTw}n(g+#$X2S?y?HO?OFqy`Cw3_f$#tN=`^|o%&h2+_n zypz}xN>dbm=Ba-8@eK$>hiO@GO7g8S89Z=9Y3NFKQSxJ5S&0;>ofZ*h2W#f zceR8h)T@hVSnUaWe=r2~n#Y2EGJCqBG}-k?7n0no2OZxgR;J{*T{P5eNmKKBU6m@Q z%L4YYgNyRv4|>CbPmZj|KClZ$V};RqwNc!K4D@4>klT&hZ9=3nVQNjGuqTqdNAtjk zRuGs0@!%?FzP5!T%aw5;+;Ld!ho4lON1R@Du5=ywJEOYIU3hgJAcF5e3I z%ECWn{V!N|r&H_BEzc(|W+ya7T-Aq8q4yHNAy-o8tK-KFSDtKn{Db!FU0}rKkUDW_ z;{`47CfjCI{6(x^{tsPm9TnxnMGH&FfHbId4w91M(A`LvbV#>IH%NE4bccYXbT>%1 zbVxT04D~(u{@(cRUH5;6HLPLIb9S7);p|3+7zb%=yrsGdf=SoOx~iplf;NTkMC?Qi zaFCGevC?e}n7(H!ZsHDjhuwLPsjsiwdM{c%jz>9x$E*JU7eE*XDU~616o_*fVZogM z(+>X>gv;jou%}g1h8?omNbWP3>cnNg5B^Biy%I5bJd1<$m-K`;zTl64?me?66uLwD zt9SS9=v#IrViLrkW%G8$SUud(Lq!cUO@8T19oj#tcxqz&!VnWiY;P1e6{e-CG93)8 zmMLiL+k7ZsaRON9Z`l9A_I26jlXqFap^{$ydT8MPUT(2BS!f%0X=tctpE_t&Kt_H^ z>c&S6b-o{G1i`7$e7KMJLRHM6>K`P|g%7{aFb0iv15aD+4wfAQZOOd*;Z(zEFIMh= z7Ou83&TLj1(p+KQ>WQOZn(q-gUI*k&w;3mh=Lne3mN}t?O-FkHubadmV%EB79PxOH zJlGl=@WJ=)A2pI%;8MQxysB>p*LN}L@ROjLzT zCdas&9N`xFhGNXROt~sTv|cYW`-|$oAUxlp%ZGmPA~O8Y{U8c7EE6x7P@}|_^dQL3 zUp}SLCY5K_BOLz(j(3JJ7ZpRq>Bs4(2gdJ2wAiDiH@0Vnn{g2+ev7u4UKOiclFBca z<3fi0f%stPk+&j`6;KR!9!YMdQ!^x>!vTVu3e1L7aHYPHQp^mxmFho2sokbjut z%bb=Ng`a!!OU=l+=c=K&VW+V@`@N-9%B6b)&(O=sU)z%!qTA|c6_jm@R0BDOzji`j zZ7Yio0iU!{rtpL3CZirkX;a*ws$#gpqr!R$Dl>OK-KJ)iH$iAvCl(UWMl$hTl9LcB zZ1_#gMK{`ibDRB_Wb z?veEdee+Gm1?PfB^LUc(6d6z;aAwuV%_tut?w0r;|Bt_*EjIox3@=IM@@A{dx#Z=bv4n{~5wh31;_Lf(XW9AiUDC zDGYsB^HT(u4HWz#F?l@y15eIr-W#W<17S;yPD5jC(am=TZ#Dt&O`&oYl#Q^kQ>x~b^Zo6+L!^c>Q6xh@A34kAWL>ZDL^Xv=-p6Tcr3P< zxHU$ft*;y6cx)#S-n*`IK~R-z(w37J?t#20LnxvRWFuHB76MwPiP7nL9g71Pn6RQX zB^XJYaTlQp$~kWau!ai5uivh8z;&Y1mN`z!_KpQ!5n{+*pvd=xZUpavRlK_uS6&m% z&)3>n_J&)d{4V_(2GLP+E!QDfx{##dz}N!|=`3{*&H}*;nQXVdG=0X0UH+X6BvcvF zv$M*2B9qxwi-$O&?(eKM?cMi@H5yXr2_g_NS3xC%Nyd6avPG$#D++p|E7zrm zm}X$I#(2N9QV;l#YW@=tjp8qN`{a7XP8mH_1a3UsJ)UWKFUXLrmR;C=4{qnWbRs70tvGAse3N6!7(G(@*shq?UC?N z_i1%q^D{GpGCtXgN`c^!CrgJ7Z|2PLF2!xY#rO5^%-#(|wNA52o@%pPo3#i(DB$h5 zZqWu{XcEhK6Yp&`-z2=Eacg|*15g59_liC}qDrF7^Xki-vge*7b>@|iN~?ax?}{BE-{(2L;%l^@d1rKz z2kkKXwlY&nYkR4CR*Cp@;O1YPH`Yk$v!H7PZQN6=)BdXz@$O_1z*Y%FsC!fepUrFJ zwu;llB;L66L*~RV%^cPtSM_>V1fvl(B&|VEIH}N5VI!F zm*7=>rA(!!rcp`=W|zp{2|S#r-^@8o%^|preQ5zbbVeP3c%n$+L0UDVTjewHeCJzD-O={X2Rp4a$U_yoXt{b;^wDvQFUX9gOP zS_Q(k4-Xr?T3pxjVw+bMZu{GuT33krqu`-p&V`$LG;Kz)Xk@8SxHGXnu|Tg!_byP| z&5`~KGoU6&rCdSd!ciOwa0$5MI9(mkXo|`iy}T_x(Hl`6P?YVGtpSPJM5`yG8`)}C zgNW^O!{^)X?|#u{o>f!NYq&whZ;~B{*Yhr(}Oyiy|aXfL8G5gPa!zk}WF^C9ArOBo)J>?_@~l^F@-;;E*woW~(uF zT#+VD3`Mb1HLflCgk10d3V?>}6FVVimbEtLm{!iLzp3H=%<~>?Oo%sdLWEQNbV9W6 zYvs7oj1#Q0sQ=SP-7eOgB%TcDV0d?oZrjiJg8n1mAR%*Nz4gcaG8BxH;}kcv9KA03 zn;y7fdW|lDPmvyaCbEl{?wkPxWznS73aru1^~wlbC{$l}qAFfQewNbBGX%wuK9@X` za_b3Y7>_jy-clC7lVv9`fspee_NEItu(~9_?hhm;Tbz#vKKW|P4Z2nL&d^$-$5?VmHEy;s6jMK_2LOS-XoLg)?03c%5$t#=o486faS%wi@% z-ldCOZVH7r@HP#XLp2E^nnjiyHTQn#hzCY`Nv2wjsY_WSa@Y4h=_)hz-8&Cqduavw zM(=CR9=^iQX$4(WkM5sDE!?yA*nwworyph8&Cue_t-}?(fQG#62b7$+P7s1G+aTev z-*Y@$Rciq3&v9#iAgzOA4n=QhOwt1U$AXn6YrllvQnozG0sAd-Vk_?p-0e4tzae47PeW!CVYy)mrnycxfrN<-AbGfym*1X}bg!D^ zG@Q0QB45A9^lgxI%@TIW119Cza8lrHPFvw=1?s?d#TUun;=t4G^aV|xZc>q+<-Tfs z{a$_##5}sWh-p5NP3JuvoRNe|5Qk79n~|u?=AOGNe9q-7*))!=;_3dysy&`xG-jii z-POOI<@_C)ZV{>ISKS@s z3h!glvIOt>N%X(XmKEkYy+1gX=cpe>9O{7%xy8=V<^QatIgRlNqyr4Q$zrcj44**z z%R;fVSTF}a#KZ-6y*YBfKAK;Qb&z4SJueqQNTfX6NuayX+@MH1;LIl6Mu}g7*KlX? zrwoJDDI;sIkXPkyzPG<`!5z*cHJ?bb>us3wE_~RuA>N zZcCX_rQ{H^OJ_`ar<;BA>dXT_)3JGIqF}ldb;NQ((7nGoXy=>k@>Mu#)_%o7W2q~_ zJBs>j` zHKjL5`jm410LQ~X^2AVoCUv;=QkcQ-JNr<~YWOGq@ z!5V*m*m{xBQ&yGRD7c=I;H1S%Z%Hwm!>cw$K~mF3^D?1&?_!4s7v@w*K{i{cjNl(S zQ>euMakLcfruOU4)-*Hc9ru&gIQ1pLN00v3gMAvVxfct^cN$v*MDkUI@t9R7BKT53 zUFPM+T%&2+qy8Cbf-IINrOU#*OahPLo1pZqhApQ1|8W~BNFbh#QWfPwB4wbz?W62x zT^o!-_`+JwH7Z1Y;@0YSz|;2TM)AMM%m@8v7(X5Q!7ber77kp_%zo~i`jIU2Rp_NPR6MWc@@9QFI@v_`^eaZpi$D3 zaWlsQlcVA#wfKx+dT$7ea9Z7SlItDPaGqzrMcUc0X1Qo`0>$|u2KP8tVNX=+@J0ik z=Fi#ly8;hgyk7Hc=RZMY>`ix2?=)=wYe;U~rWm&gm5v*`6u(r3dIyzeEZPr`!QeIm z;<>?Qc08mkbSf8k;u86)y|9T7P6Ge|2xv`^4EF`|g7@|XX~SqEnYP;*ZWdbV(;NU@ zYWqZ&lI$@sDmYGB>OPq?Q=4%a4Ywz6MRt(u=xRb%lGmX{Q&&)D8$VRNS2PYEeX~qF z*^!pTT!jnv73M#0OzjdH_%9rmRXyzmWi`sp{yq?3Mv8@bnthCtt^qh?SSWkr?E~L`nByTLtq;hfS7z{;Y@*UHT#g zCkjTXAgzJ0f~w&D@uNe*(wDFsViYLf8Q^ZUQBv@q-`v;{DEQMxqfY54kcB=yVBr0Xj%*} z1@ZaO1W zp^MOkl&$&|3=gTm!oa%cXNJa25@+@n{sxdi&c7mvS6E;2$@N0%)0GQPKbeGAE(c!N z*S!tFwiz!ttJL(EX0ah5Pc+C}Y!)}cyr;sr@O?$jmj1UHwRUD7^fZoXOjm@h+Z?CF z32;VjZ*{k=D>5cZmyEivD<`jbfQv~EQ7F~0XMZ)9Ua**QS2Rhjwa-pAkZdKNhw!hA zmA`)miT~r!t`EeE`TCu!?mjGYQ%~MY^t95362#H^_K7UbX&yS88mDl5LsGp%VemE~ z>dB*BaMZ$a$nu*jk>!$3p3Z@j=v)|IiaXFq#=s6a>qhKOF4798FT3qi?i1W!Xk6t& zoV6Sv6JNGbDl!KZJa6N{8Kt~u_J`{XpF=Mbfz;mU>-!c9+QrMq$exAz@4O+JR2N9L z?N*PXB^$d^%Gm`lH?Ip16dg~yJ*k1@Pwo)i``KwAS61zenWD|=7coq>JQGXM8o%TN zObY5X`{vU3cB%6amhF^c_{us>(bH!y%4uNFz7Ibeim0Ny2ll*)txUfmxp{Gx6a(~6 z&zUdY5d2!R(&EH_7r`WM(>lMr4I^7z{bjNs4u4=$$0$5fPSvcaF^d1L>f; zs2)Sa!tO&2Ax+v~v>L}9O7VMAzXn<}ow`7HGk^q+e#0PdWGKg+5Ct4bhS$FvseOcF z-MsXB6bEc+nZmjgF{I9pkb95Nfn&>2%29jNR5c7LcTEzh@7m=huwS1dVR%2>g)6J7 zqXd?RdDnv>Ee!#@-fZ1xR$)MC&;Kl@ui#~QXnEhcK$qPs@SO{TM?%Gf-|+ng>L5nI z`a;(ws-@EFsr~;mai30y;sc%$(lT)WR8MRMxF`_IxDX|e%nQYrV7eCell@Q8TZHq; zlWc+SOosb~squkgBE*zLtl+^r8%oGCa0 z6&Gx2*-eNFL4(-)rajE4C$Wi%+|&%i(@jXeEZWpkV`ZU{J!t$f`Xfo9*j>oT~)05*%`7h&z*#RTFDhME5t9SQKqA*NI zas!Z4e=%Az=@gHA-ncwV<@kiwU}qdI%*%<^GCO^rMUyB0f*J2=wY0!Yt-*J?cbrYR z2ckgEr=+f-xsQKLrnOxxF$u3wfs_!PtKk;5lB#F)qQlh zX9nJU79l7%Sm%_XjN6o63E?%q@F;#`G;>d zFMyCj0JV21%lHI6@VRd#U3hrr$xLP+d(s+T%Xn`ozfn6Qodk4*Z6 z_q{A5hnO0I0>cYy7L+5)-0@L|=eqYFJT(_F>$oqad(~Sdmm3$z)!Kxjq84;jf-vcI zi-^hZs@ADS&;`#et0&}6*Rchim^4?UfCr$eq^wCDbNaz}eP0qd&XXH^BomyW91XhvdUcC6%fJ_GND zhtn>OZZprfrxhw}JV(DDmFz7p)$g4bKNY9TU1vz^=)j%YO1=)J+=jqR?3ffk*>al zKvFQY_d@zq7OZ;UVHEu<2|@VTyviS6xR8YTUcqMv3EhTzMHo>XkV$+r_*hvHz&{YR z7u7*NmdP*_CSICv+i*`;4;BYNQz>GTulrTDNOqm$^<2(2XR$T~(TzpW#D~iu=0o$- zc4#zq<&3N50zZdp%l-v+u2a-Fl&J=TIS^ilNh`8d+{`odjBW^cNa1t$6xdNm`0;g< zH*)BB$KQFb3QB=XTn4sQZE1T!{iErjH(D@(+MM^j%5VK10pj(hJ&0ABtf_U9yZb=T zL0n>bIZ3xf_cjYu%vHZ!VnY+qCgyb>zn)r%iNPf&P7aR7?ngF4WP|q;5&*=p|YDI~PT~xfkCavFFcFZ(Al<4s1UXhdOMs zW)WAR2J@fj#~*X>@FP}L0t~>HRiD={$pOO>2%r8V-9C@N0IaX1WW*c{2>K62nACw( z%mBR}QEccx^n zc?(dG0PPU96t|Z62;y#D=D7g(lrz<;zImKi%oCWeKt3d%MS%NUIb$Tv@?*l!LjI-E zA8Ke+v>e|crqHu*ScI&YfOW3QJOzf_61G4T0M1a|(Fz1v1P$pMoh1SLiwVpl9lHnz zuKM?avF4DX*_Xyj*SL%W%d;t7iyKUg&)ZN0C)Nc|6+L#O+(U(STU^PuVx!Azg!{g^ zpr~wE8w!3lw|;X@53ZRa#$KistcQ83QQlM9akgC5KJyfECT@ zZa<{(Enp={o9Jo@zhLYH<)G@EhBjKqnplI1m>`#=B^%hk+JS3SLg%d7 zFVV7r1WYRBc~v`pvk*-IQ()sXk0b}uXoG3`==PFlzDQrtoMBes$iLOdF*z8~`jljk z?3a9&6j$a|j78nh`oWgL-|hg$w1~fmLy9FV^=Sca-sg?TzQ>QL$;?fd4L7NEs|b)e zN?X|yLi!E|4M|q+pfsO2CbP9D zY`&g_xpERsx*^_hAkO4cuq8+_Jh7wv%?dgcrx)3ledCVu2M^E5Caxx*qfSLYL>gy# zNus_e{LeUiFZArYaZ!lNR*yQ+h@I>*&!%gkqF4WAOPW@@(7+F*5Oeq6*$E8Bb}S0O zOFy=%pPnVe)=SvF88XTQ?JE}SUP+ z>Qi?Vi2g%;DiR~rKdA1rs8ezJCuR6-4{uD*vd<0U{2K`6>|AJImKTEznt35~wYMnr zAD3+EuR=dgZ(P;5xAsR&fcX-y3}UcNW0P zt35X}%96N|hY1WFcqTv#YC~oK+DWPu{p$$u08l?tGg0f`Vb)zoFi$T2C^x~}`_3P` zN(wC_rB@$o6)-<8BK4Xp+|~K#Hh|0Y4(jqtssJxTcN=5f#ke1<;395h#VXi-om0nq z&yUWkq#TODgV8AAB;dwUT^tN;fUePif!JT0s0AUOMC%p$)I7Rp3xbHVE`m@n z{j(V{8-ag>j9|6JE^KP#E};#IOLIp7{9Sgld2DYgTODhki~cp6zu~Dz1rrYnQ%R`l zi=!YPtGo!|Kbk8FG{_SXVXte+=2Fdr&pgdMX#kza5^2Q2uHDy!6;dbWB8e}Dhw)p( zSf+2?^?h(*r1FJ#cx$~q!4W4;#gAMF(@NvGrlEA^v6-usR;3ct&Gx`lg) z{LKSOS(i<5CM$Ovc2sw8;0R0=_I)DSsim0ARG0)UyB@mi zkZ$E_uTXTS5=B}Oa`(<_i!Oj~4*K}j7`^AZrbyc!?=kX=Ow0J?NT=6$D7V9U0XH#w zjkV2(w08X64zgNrLx&?#wod#%lN{g z)2BH9&|YrnbvMmaC(ve+#$MLd-YUE79}rw*6_99bWcXSvm545wJ6fU{13yx;;*XO0 z@<n2a6!)VUuu(ELC&*vv=RyjrlVU|HvK{GXxvv_NhFN9k@Lf{~$<>$w z+J_#4bdyx-V$OeL0{lNwjD=JHt2 zR--SOfHwO+P9S7{0h!;6vS~_;ObdhR`29t;T_H$dF0%Y}x|@Z(ItFc2J6dJC(3`t4 zE!~BVe9@cuH^AT3Gx2(WW>+PUBPXd!mKI#dfWD{Ieem(;buY#f@7bG}{g&^E_sqF_ z;yuM~@`9sYBe7IqS5~%$6X)trIs+Ao(=@Ff3T`Ui*AulH6g;y_y=9|LWh3{11M zcYWb%1AoIxLli{15#=_)Xk*=jY$sX&7StL2fFxqzRs&6j8S+2K=#R9i__7xEKco%uD;?Rxo@6Mq2C4+?YO-1}NA+}3q)#&0e3i`OuU+TP=wU!BI#ZG$) zJZnnlU+EKhW)Nw$+2z`0oK&op^i~ntd=g->`z|BfMqm@G_Ofo#BldNuvZ|+wWD!n` z@?4fbzDSDyITqAj2V=hH)hvz7Y_ZetD&vTiKD-I-1HJUmUtzS^@1GoL!I>i~P_J=W z4q2XW*0G@bW?leupc*JNm+bMx+YW9ke`hb0@JvcRPtE%h#xlirb^A0a@6GBixjGRI zIL(OjoavS@#ktWnow~^ev_2A%^L_bA1Q?idxqr*%-)9#QS8h`nXHV`HF#wlTpmtlf zr2tg-PJ2O3#OQPpt6`ic9=j$rP*Gz4;L|YP)2M`*`7M}ceDrgHjXCzrv$@y7QZ_n5 z{V#Mpu4$n&gq=n6!E3%q(e8V?1n=eK)v{x1EIKhCjpg062r0rS^&_P;A+ z&c$%lt(@!^TC!|drBvgzs@l|cb@LOisr1paBpTJ=>tIfW%+e25Qx&G3qKD7aJGe#W z_lfI#5xmEcv%TW|@0gynItk7IjMi|K8<8(Q)VKV~QnRsjdxo6#50Zuuf-Kuy>O5q4 zD$y?%u(Sw^@2fW&j}$c`=-R?;F-l3{1`3M!i**Nwf4qgCO?QZevAk(bqQl;f^FMU*v*DVq7F#kvpwIw@kqgE{OLRU4?u7G4p`dgvRJ7N|CTS=uL#m%mMZ=btUWfbhT;=X{#F;XnBnQa3+vBFB8`DIM;S?Fxb z(L>k#HQZXwUCrdwb?7S-_8Y=79*Mf>kQ+WRV}@yW$peqQLgO2CoU3e01?ru4v82U; zSLz#;Dwias;d?`KHQ*MDIvpL;Al6Fz zGvEEz4_lziCmLS=#@GLVX9ao0i`8$8B!ueK8|Y`Pp>V@|{HG|F0TTR5o&hiKscsAh z9u1^-75*iu8;3e(3z!RjjO_?Q0d5s8vb7wEQH~XOz&c}{tQV*7VCrknrpJC9M;hd!cJ!i^wvW)-Rq z;wgyJe=Oc>f?p3&1YC#?3N2D&cZ}F$R zkG2@V34HRO$ZdV5RIkTb^=^G7N;hFNCR~z68S+y-67sre;YYK#n2dn%L(rzchliNW z7_ST3reNgoO#}!mC*&+gUSt15^Be5dyy{@0M>V)tPpqY6!2rU`_NF+`6X5NbVR94K zc8Rf8T}A`A8WQ1=iCy#pz}3C#2Vj!M9Gcry-qMNcmt+dZ=hUTcmI-5a*nd$qo&Hmt zk$-X+Om#(!bVsW_Wmuk65FWH6uR( zpQdd=LOC{4aHi-7H|lW_Hpz<;k3bgjOTC0DV_iG!WD1Xp@bia!LsU%H)0Dhatg zTEuqTCws_BY`d!EIWp8yQ6=k%#TyT?M(Cyqf>M%i*&8M4(tA+^XHfZJ8mnMn36Q0h zb51pyR1KXjRJ93PEWq)xYknU(DxCCtoNsscMLC zfUQ8?)lo&i`luO~t6&nF-jW#=B14*Kd*H9&;&+GoY%gkkr5N;@fFp&z^5d$ArEB{g zPW2#?rX>R+7TF{}%QNj55`%_Td!LC3a+O349hJW za^Js5^S&8c{*3jtS8I%$SFk{Vg?%#GY2*}`>cPt$m=qnC)lOvLCARN zqd0)k&hst%CLJ;N^!(#Nd4AjGL$UuIZPD0(Vdy(wmUTtT6^>%-#l(l6KYkuATM66o zExJ;#nRR~?C5!wIGx)R+T0{Vvj0l3IqY}~pzWuXm5wd=-^N-&7%W|RK$#;B6&w|^q z#hT0Zx^ny_iZB3%P#Mu!5L1y$-!3{bJI85z)H!yi{ExBvp}kp`O*{W!5jo4Q z`j}|@BzA|*;V1OWaHIq1cs$8z&*jOaF18hGCdKx0D~|HvC(94UZR+scRTk!l6Q4a? zJx@iq0jAcP5tm@yCF@~sa)KDSg5{!>Ua}^?xO}f4 z%x%C{8?MlF$dCqn4j1|98^(yA0I;mpf|5IeAp5OuLw4DZJ3iAKCQApJ5PsH@UZS#` zey#5Nch}P-p|YjrPxYGcuIc!)iLj6wK91O4L_`2KGG$lWFG*X?CV4dw>d?P7O%V+= z9$vL4fJ!H8@M)+RpntGUdN_ZRrR^@s)w@yL-^<6Rdas9+yi;1CYb{!0yZunWZ(IRr zGHuA2C9v1DEB<>v6X4-ceZ$^_9SVZs3qK&elOhdGX=a!Q&6hymPq$!!Wafv-uGaW) zih++-aVfne(3*b7$g1Tzgg^X@u$Q=S`m&JkSFy$i!(46**nfdAvb|BR7cs}mXTQC2 z-6dS>1BvDl6YU_QDOV1rdWWe`i{$Bfhq>WrSeBs&O0S=b$^(zWkE+L9;8iR}_O>yJ zE8;#fK&<{W>l{ovXxd6xd8c}%qy2>fO~+2hfR?XwSjT;l zVO9w6*`I<~Fs<$L04c4f1n(0^62>Pj2iUGOL0bB+LceL}hd%GOqJ6Ij)+h{iatwn; z+>h)Jvj*HrH3WInQF!4~mgz&Gn41l)E^l3z@!eZkkYuBRIw^PxDt%GM#$bd4qs?PH zjUO;FSwsPrl5E*A%J0C3g`pJluVUhPP^F;eyy5^HTS|~E{4?Qz_-jMjwr{${A^hj& zF#!43q;jc)7any&-OGANK0bJ<=*UGhPcelwZ9b+gS0pP0p(~{Wu&oTJ#<$`9k3Oe4 zBsY)wI&f-*MV|;1SQ8=e;{$7#5`tCcOrw|edIBgh=!zuEQMz8bdQUr=$7-pp)&gSj zQXaaD$C{ZjuaTjengz|E4=cvZ5C1Eju_kX^%(uc%<7?5TX~evQuTVH2)|FBzzv>x8 zvVD-N%VnvS8Glcf+f1kx^bngDHNNz7JPjq)ki7KFQ9~RezSWsnDhDkx7TPdu zt;)S(lFT{({kd?XO~0RH~Tz3!zW&Gm0xY%4!%U$uR$-W|ZM65W##Z5n`1Sl5@@$#Y9 zOR{O?^T~1cCcHaBa{TqmX0+k}72f|V`;5moggE$#oI4l~yw7bq_#|Gs={6qaUFKBJ zIZ|-D5#$kD_FSrJ5u@Gp>+6SZ2Y{!$;iNuR(Bv6kpEwWcpB(^vRWHt%sgIRsXcrVX zIVsq`@JpjpSDBA{aI->?#Fni1gp9jF)zY-6Y!$<;0u-7?PqF0F#9M(}vJK1Gl3S)` zh3|aFytS8+JbQFTLdCz3$hxT$d9CX5SmANP|uG{0ej?x}Hy|Syv>58d!W-mp@cBBX~7YbeluG9RM)$5JvfEs>guj=ZR15Sp$3!-J zbt!?<7~@xB7g*kU{7P=s4=4E?{Vsn8nX96k5iVJ(U#OpvI5&p$FUV=a_UqGnI-@v< zfr1L2zmyrjSl#LmY!&a93F#Xj{!-}JCdQk#9|(;={svkX<$k2H$A+_TT?z!f9p7^7 zD2c0ESHJYKr(={II`lds=2Q_Xdw~{-*`FcGq>+l=CE>GWdFYD{B)tbhIk0Lc-+!bD zqah~;NNfb(-!&4nEgIf39~35u67g!&#Ad8!R9eV?L`S5US@AgYl+P=lt?ls-Wz3=g3zp>XA9Uk9k4!ztf zahpX+sFrm5=|o8URaDbd3EAukn@jX{^y1N0;+pKKBV8O-=LCc z*X-v1lwd0$Y_D+?yaVV?U1l81Vzp^zL+rQdHAEZFR+VKhTz|GB7Y{>SQ_piFLfH|3ZrKE}VH^mH3S?ym*D*+GmTCX4OLWU6g>T}q0$S>SiOR6?u(k8vTz5%q|gPV&oWf{8fH=tB&8+&Gs&bt5I?U5 zZlp+wCr(d<^b{&)Kg3vHbgZ=55{}qxz*HeG=){rpx>58(*pZe>R2B?`Xqp&L3;r`- z&F^EF@98&GBe2&$=Km6N1vFyuCN-5rgOHWubdS0dSr>Jds!LJh41{|ug)f8t*LymDzB_D8h424U9O zEJKVRKRblWl!eV+gWpFTh_~`pru;4V*o;IrO`|-1riXwf2mzi*2r7e=@Q7c?@ShnX z+7pE@$d;UhCwDoA?7a}Dbn=WjXs4_Mp>vCt{8F1qZu{!b+x7CokW87uTxZ(m*e!`* z0ZMo{o%M&#%*zwSReZJTN;cG;>n13sE$HrW(>YAxmx6A@d>^$UgQHn11EGi=NI#j-_?_ofqJZ_qI`caIIBL7JO zNxsCL$L_+Z!%~-F8WWxpX*5P^VVEAq&|izpUkWda6#3*^-V8T?mh2ZjAcA+Qc2<`` ztoUvoul3})BdY9?unifv@m(-DQZUNi<^K>rWU+xm8rkIFFr-H%bo~Wtv>xPvD63EB z(3YeVLAL7&ae!cM1&(=W3`i|Z^FRv+Kbi~>q$hSxLVmE)K8ja1R2bn&TKQlq04EVf zJ~?%f@8wTK5v0I118z=GFnL0$%H`7i?=Il5NZ7dIZGE_+#^!o~h?C$AITQ43D%ztg zikhy2Y7@~8t*fXv|^N-RF7+uj&p zrWnn=f0XsY*qI#5Rl=CI%|1)QDbS~+0#6K3wE@bM8c*By`$bHIY!Da~ucW2*TC5hA zgi4`SByQXVJl#-uWj?X#3Q(rOZ~Y^jfxzO#1@SErEpWuU{UlS zddWE9he;>+3lLW>-z&g`g8Dl9XGA*jPixFKZ($Ub-(W;M7^a4&J6LPRG?`_MH5y8B z=_!E7ok1D5Ct?1h=MGnH$9I$HWsW6ooT+>_;d2MGcW*Zvl`(~HooPLlSthO}0jH+p zg3x2vgJHL`>yQ3pUCD|)9qSIapCC(Q_oq{xWgTEGaJ4+c0MODt4{G3O-@KyDd;;CV9v4< z_H^#I^?G;s8PdD@W|QpojiCL-^2-Gi>0RrlN*fA{anxGCLk1u`KT-M4A8*@|cd^9# zsaai3HK!@bJu8kTZg*I?uWh6czYHTZ;V%M`NlDb6E}m-4V{ZWKK=MXS<3lql?fb^# z)>WxT{rITprCa4;)Yue5ft{L7BbfMfm5)AjRZUAAKlqKwdN!j{%lAsfa#_l8}CKFXyp;>GPwfwZh+RnC) z1g3d-sfK;gley8e){u~a_N=0RziXb8aeZKPkW-X75W{%0SeXzoUze2~q%KQ|uDYGg z#{BV#}4@VAc*LC^|}|;!#@@gWuyKapOoL#%ZCImDbqcrTx4;9ZK;_P-Sv{_osQ!+qio{2grmmr&8}fr_6U3MKZ8Id4HvUls zEQZPi&2QW!1KB*3@>p+QQ{Pj|;Rreja;L&D&IkvRI+0|1kQveRo1{=@LKhNF2?|9s zX&$3C^&~RzgsV{*ha`a3giy>Kwk0nI`!UA)WTB2(*;-ZxQl;12?B=GUQQq2opLdmN zSD3xBNhOI!896heu!@!=6!fOIfDWUb(9Etu3Kq6s`9Ei60riAhXpMKrt5n6JCOhtD z{gPwS!rkIZ$yi9-;EBlT`1Pl)521&z31_c|n^pPN<(JACowx%(%V$ZV0^_Hl6C6ZB_f8d*%EFd+p(p zhn*G1;Y!$1+qBu`B>}CfXH(i6r7B)FQ>8!Vf3bVvu3)4q3G+OAB)jpnX-V)(+sJGD ze&ng|Ah;+hwxaDoT2x)D(Yr;pnzDIw-7`6`BCT7+!xy>jwBYSs`={X#y|EQL*X>R& z@UZvz!%nJ{nQxevo=Ce=g8>)MEHb#@!5wTp>)hrpL3;33J?rZ3qHy{E9BGE;g}ds5 zYjbN@KSXDx@M`RK+$DC15#=}WctOewaPi3Z!C$dx5R7tn6*X)VCPID#96mrBS)yMj zzoAMmiDo#L^z(7gQ!ch!(_dhZo${mKJI;Q-PNDP>*jMMp?nE3cG&Oe@$wvu7w`$N^ zKOiP8h?;8~j++thBU1}GvWZ#o;rzL(6a@l5ueo(M7aI$*QKmaIHr_w^^&R8d zq|U6G&gBY!-g$nGyhNPu3ElRzY;)#KQ+f!$>jLUn9s9s!#fkj;8YNA3xg2~X*e#}W zcMf+|kKlKv+;Q8-cXH}EK)n{Z6TjGn0{^*c{8rl(_6?9?mSI1o7)i7B6}Y@bH`p0M z0TLQv6E<4B{wSTF$6#Hps3-*-dqV#|ZTi{)apvxiw;X0xQ(@eSDLuQ>;!0PU0o=Ro zUjaBDf-<^9dl6I;tI*M+=^JHKS15?8HG@sX=w~C*J4HIjKB55m9T$L9AY#l>gJ}hT ze$NDm%WJp`$eBkp7mqIPo$Vvk+hU)@S!OxuhM*8(+`2EQTHvdP>J74i+0gX1yT6dK zmCvSxqeQxOvGKNS9|n2bP<&w$2GGU0-{^W+~cKT=&!I`}LeVuTZ8 zBp|y|ETV9YZx`uhuPtS76(teb_^P$rEEP? zs%1y%=BGutnQVR84CXr3EwG;Jmu3g#;jG$K5LZ8$tbvwRqj#*x(hO2|)!XT{@6Qd9 zUy3-PTPG>(xB2gw9+ticQ;74G$`F6NOyO)0@B^m-6bUr z(hNA{x$)WW-p}6WoX`AVzA)=r>z`ok5icUvw2v%=CGCBtq=d&tDo5miW`?&R3+iv5 z^0vi(z8Gem3r2{~C+np79W!OXpV)HJMvvNMPoh0DzmQ#qeG+2j)y)ZGPx(ZPLT_*u zYzI&}rzBq{snq4IWyY1V4dOffc3t3d)3$5>xc=0>4ADPTz0-G!BGhl1d@Uf05Acu% zh=KbK1^nYjEl&aWy+cBA_t5Pav_d&52Dj3VBgDh$hTCy-eD(RiShr94t>QvS!@EIE zSN;Wo>AWdOmfY08hVL87E+=Z8BlolIrmbjPNNWI%YSzad6l-EzVS1YAKe2r3v^m~2 zIvtsf``sstrZIh(mkIv|F8!MqJlUm_^-6@yK@C`7TS80w+xiTHz%qaZ&VLM2%Gx6^?to4_GI6%++&p|G564 z{64ifPX1Hz)N5UxsMeQtxCf0_*z|fcnZqa%2bt8sxTfo-pmA(6s?_C)m29Nk^i&+>UP#anhin#u| z%WQ^D9v3TYjwW}kO55xiFetT6{dDLY_0J?8WOC`D9+{5l+Rmd{wLgW!5ZOFgtViV> z5t)$OR+}ErZD|PI>R2Qtdnn~iJKP&k?YW8PIzmmbz;4kP*a_r;e&@MVrV~s(MiYyj zY{;3pZ0N*`_rvyBBi{YAog?f$P}%gkGJUhKh4EFvEHY4FQhq2TYbR;F>T>`h($r3N z^X+>(3ui^K@Jo(K<<7vPXgOsViTM|6R^1E0s`i0$?!*R|yd%q%7MXb%?;GTgl|sar zn{N#2FNGe|O#L%AZp_XqM~~To-RpCxK`Hw1uo~4W+V@4Yb}76(yM77$yxV#CxB7s> zMJX)%3a$Pj9t}s}0j(&8u@gXJbIpmbgc#%oY0S4*?=#j*%}X-#Pk>6uxn(6^)h|r^ z%pftvS`wH#qZ&C>|B$I})>infbd5yL28#czzwUW_j;x*Nhqq2}tXptpz>31W4HeFl z0K5(#gmzh-HO49UC|gsu;?}UkISAPbV%fGWoW7O!j}u+; z*2KQU!FYnnr87G9fVkh+4et^^x5?sk_dvbx6l zLBskRw%Pv7$1U-zh;7JC?D`a>^RzZ5oCi$5;}w)w@fHwPVt)u7X9)uFk)t6+h0TKZ zkIT|O>=iE=O?u>_+;RU|~>s2dZPg5B_HkAOzT!;X+lQ2E}_3babxkXqX z@e4Y3JBh`nBgV6GxhDW8Y}d5nT=~X zzF-fUH?x>UDf%B;CSrOgNdSwcA@Ef&^*E(2ZgZV<_BL7x*6Nr>BW$rTGex9q$eOw% zOGn+eLF1SpCH-(D=l zq5s$)dNXJry1Jh)5x($~@e#Q2D;@k=f88ZwBv$~&oh&Q#vF_xJpUeV}f(9yU@3=59 zLf={I>e?idlBQeWmQD$j1EoGgQr^#SRNg^&!9h1KQJYP(<&v@8w^Lh^OP~M71eU{v z@2j#o#SX7#l(c}ZIJrbmg!}8p&Wgt{FwBzb8A3yfYIE!uhsO;9vwOg;XAX)n(xl{_ zlGpOgFzZm+zX-5r+8r_cc{%+$MfxG5$3A@&$!6ztdNzjfY zFMqbT@R8Q%UIhQvQQ|Hi4q_W?hIi@d4?k$_wY~^G zjuzaE30Q)-gR0F^cz*=|(mh(2*}O6~nGB4`&w(20Yzcmecv2tdNE` z71=bcIEFax5LVAyz?m%x$v?*7(jsZ+XX(RO5tkE$@m=58nLZ=oAJ%#$n?!;K2;fSjQ4f>*CK1D@-FNhr=b)2 z?O$-sdt_eU(l46^%A!s4LT4?kJoU3+CmN>ZwdemRcWEP$@lT3vjx5?;FT?f+zq}lbG&I3RN#MO*MTyb#e!47F&s0ml=!E-1y8h&+ z(_C-_v*w85C85iyoPlbPcr+0{C8qP3_%z+v z4S){C=qEH^+{H#aDJDP^Aa$}}ZZoM_!w3a zWfniW_Q5Y|&bjiVp6)sm)`f)==aJ-0m#(?879nv~djmMrm0ROr16OThB6ST6K+6H; z^D|CJl2uO18uPY(8U82ZQ=yH$ntN}8TfE;ZC}>b9=YI8acglgEHfcqfMA4})1d~fLao!5Ez6m_vLDQ(q`^Cna`fB7nDBQLDY|t_^GzR(V&q-! z)YNl0D=N>X>Q6oas*%C|NU4U)E4{d3#8HGLiUxGR8Q{kYd)y(04=9cDyRS}!k}Y^& z8?39%=SBH7`vCngBSWC#Cxu5I;Mw&Z;_Apd$@6IK{XL0q%0RUE9`xwSf7v1D^QATW zR^4S{{UK{P0X+6VC2)@hRRO8mLeZy(nD8cvzbj!w`qYTs(H|p*$XwCM<}}x=B6DP^ z)+8TaU3kSiP)gFTaDJxZ^Y8j5B_ONs2{@+AJjYi~F1@ERihqGw$rQq}e`6ZwkC<5X z8+n`MivVql03=*gZ7m`igmU;)FTAr{>dztxrhnV?4;?x)o%ab$YMSuzZ2>Lsc94Pl zNbE`+&4L?WuH{1?nz2xdU4Ut31yU8v%lunzSN+)+bitzHW?I1T6|s`b$Xy?ppy~s9 zi$E9a6i+WNh%6&Nqyj_T^z(&=cH2gt;)c&u2Il@Ey?ClRp|wS# zU;}!ON-RA&krP8Kf^^Tb>`4Szd}qvKp>2C?o#YKWTd7DF~2 zVsZ(3vE!w#HK!6%Jp3c0tVNLRJQyxZ;M%fWM`k;D_%%a>3_QKatD%r#1C%!jGKf*w z4$@6m4o{kPkA1V?bB;~pRa(J3&Gs#lgbTad9p<^?5aRqtQIET1iHO;7u67v(c}Upg zz~-|OszI8Uiha5o`8e@YoVRoimNu3Gl^AkWs?`)tq`t{C_HBiPx%3i}WQb(nPw{w& zU3rwYYa(WxVrFqP28m;zp;&CR%WOdzTU5T-2#D-kpPi6Hkt>>q0oB(=4KbgXmW?3~ zET1$nk4C1N6-V2qZ5%#nmxK0Cze|LUQJ9HeTwrI~>ZFQrqUb^Pwn8B#eotO51I8g` zx;N{Ft4{al;as}0#LM^TGlgrTYVmLT+3_Uhx$54@9RNiSXjyDg9LUlUNW8UrE2G76 zAOwv|pKDi>E-B_S(Hz~u7@>y)>&~1fy|G0vhnBY|%Iio;?Z1f2Wms*7aQZ?Pe7@Yv zMii;9Rg{kF_FEe6;g&~Owsq1g0s>Sdmnl2=ck!i%`V}Z!U357a5Gj)1^bqZI1FZ#^ z_2NPQL%^1kk*^h+sk*fB$lvqrof6Qo3DMU8Qju@jIdWv}m}=x~`Q+ICoQVR{!rQXt zm-JOE8lcHV7Q5)HIWK>@c}mQ8P%QpyT}pU-G!q2W8QA!4#Ihsu1B^t}c!e}*ldT@% z9R`Or#S{_{>|a$g=)FR>VD2)n9}rsAhqQA21b2);JET0*-!+Iw1!?RBUqb+{Z$zrv zZUfe{_ok!7-lq$0cDjV${DNpOOUbu}(6F}ub^V+%onH%hA}lLLp0D6EZq?=J|`TvfZA#TA`= zdR9}390e{TIz84UHX<^1MgY~7i|){!!)#n7b+^t91nX7k-d7Zsm><-J7|#Z>^SqOR zRG%muVx5$rbC!|nGNb)PY-_w)bos=u&ETU})3BYrA-{n8EvwVmSWg-2+Vpvv!iFF# zxCFez&h$^W`p@z6LZ16P`60u+`7`|=)!zVg7pswH={Uv`c^nN7uf(s(MVE%+n=0`P zOg667{!>50D1DD20uJ>BIx?$KlYq_q0i>OWUO+c|< z8z>IYQAA&ZLO#RwJ|9nK{+6xE1D<2y^E$3c=cQ6)H1E{DzZ;+=ks=?KdX|>Xo#lE9 z>-M5AN_nf3?bZB5Y6=#Vv|}Ge>acDoQeauxsEWVv{&UwEw};JH2ofNCNqYczzolJD zaS2Q8eg#NGHhax_c)7mg&qnWnu_3Lti#(*f5-66Ycm^ZLmhUSZ#-s%EPoAYI$04NU znpqyj43X8CAgmI&U6TSH8h`nn?tKCHBnd;9xHbLn*}W+zL_?K3bZ|nN0thqbA68kN)7`ttbcY0v%BzVTjX$o>LSr z5cD}WAoTc2hgry{+q<()BBx5f)m62!*OI+6X9iLK;irleNs+--J{}|@ImhDP9`r%v z*;g=E#^lZ)S?gPL_rK$8$12kKc77CReg#rI<;_$W!DVA9x7zpPD2+f2aB1;<0@Q%t z$l65&Qnu0S*{93c{W|WjNor(zHMWS(^KwDn9B0N)K( zAAVbWW$b)7-o8ZP^*0?u5-Oed-WN-_84NBZo9r!=gpvMGXsRzRbwS}Hx-H2Ohepw6 zQY?|6J-J&*(FPmo%>Y0Iw$!q)Lu&W4A9Xq|+9j_2!13eL zQhQwpM})WVil8$VKAFL#%ta36#6k?Yp&L=HH!{dd1+`3*kE{Aqfm*SB&=}Z)_@gkh zU3z5GOd#njk--*L>*IErGZ}4XL^XZaZI!Q!<8);yBEAw)iMaRs@`FO(YJw`J1?ja( zSF)l;J5@a58sZ0RELu|9Mfc`T@pF^KosApubi7;s!287|i?3;z9@17HO7(4Zw7;{m z!HDpt1ACjIgM(fj$anj31fvCo3D7~CBntnuqT*9ST2w;a&KUe72Q59>>KX(9907Mt zEdPFtRF`luJ_cp_TDpK2!zrbHnx7@M;;;B}70P=@=pt;p>JAOo^Uhmm?RtoNb~U!w z6`Ve5wH+u}uVlHg>3tz7uS!#XHYLTQ3BJs`Qth|^EWdGpaMhMDutf@zR;Y%p-8h+d z!qf=C^!tdg_x%U5EnI|Yt5LV7$tOfbQpgQ2%G!up3MYE8zH5#srhCEh#0>skcw?J{ z3zzMrO2~=fvOKBXOJzi=*<#j;Nia-N|Z_W?B;vp?b8m`CxU}nrE3uZ zsf*zs2c>p5W|HbAFO*XKr9ka z@$y~-$206(&E>pTy4{Uz((i=mf`pSGB*R>EIZ%er(-}KGDVm+`(nzx*$5Ki{QU_}L zxgKB$nek^9;->5kBi8n1;+?X95k}p&^Q@HT$=EXtwOoHnaav+WV` zSsbcD2dhdZ;xHNVn{>Ff?0^6>Dyjl*IdmmD8|yWek`n^<~7$SE@quuh-EMre*mR2s;)AD@(uKP1}j_tG`iQ zd@;B5vTeX?6LtCmU)Yv}qy53(NEZ&X3)YMXJ90R_|6MW;xIG1D(`k^b&UR_r*yio} z8DjlCXVtk$W7u_`W3v=t09a^C#g2K;-z`Twz{_r|`N7pCzX-zQ{4~8YQZaeEPT87& z`~8uSQ%dxij&tIOV5T?iX{9^k->n_Wk@qF_P+y-030Zv0VZn9o-W2_inZHnWYls|} z8RN;AIE>hy=@=R%tnrLaj06scGur5$PBkfqiy_cV>Uk;e-I_$(rXKMb!e`(*^b=Mg9pQA!9t@`UiMmuY6Y zrZ3-*7?v|~t(88nch*Z^-OW`vF~&NjT)#?|Kk99~SInw2|KZqjxnWNXj6U(! zE3$fB+d}fn&=c=aT0VnAy^n1Szhdz2lS8ltN$#TgZmG|G$7$_>_fbfb*=uQJ_E}#` zpD;nL6!IqWZupc@K05WkS--z)xiUSh=g096hcje#GwL2Oh|JoER}{71yvtZjNUmW; zU0m0+qjKTegbC^KjVU+jre-FTJ%1>bd|d%bq(;)p-$jmYmHn{iVl^XTrvT=2Qyh_P zDUipbP4Ml<@#grSY?(tn;YYy>@3!8B!OpNYpp2d0S&=~tD4Il|Qja!Sai7F@S*UOl z6;zBk20xyKSl_RCd1YU2+Wp3o1jfz}YaD+-e>gAxTh?w;J+i<1gO&y$yh@Le>@I$? z{H3~dzXagL9muAIYTA*iB}6b8-Q!r!@{BXzC-?a4aFxNk!4;#w4%VMKmd7^w7TR9a zq45JsBxq6H>`L|!vmy{CR{Z=`DAqY|s0 z2b;tneU?!0>;t0N)5hVc) z007^SQ0HBwa`hXR%LNQvjzr6qc{Swck)=8us_|{CEal}-IOteN6eyrXOky)c9Q8RE zJ5?fOw@78dl;48Lrr|CwGq){)FTPXrt7w1q;*mq?R0lUHk+r@9+Bl+J8sV&Qc`ZS4 z@xkn@d%iFHN<2YOQ%k+)M~!<)YgIO*?X?R|2hBvjv;H}@fD9rY3E#KB$ioFWfvq-@ zOAJf$umQF;yG%+=onk z(6AZjm0Nvi+dC)~yJ6nBOx=1YMz@Vs5&XWD=QUd_&5vC{Od9;6hj^)!jH_-)c(iSZ zP1=J+b7fs=;XPpS?n<_qneeM@ahA?F2J5+|k<8wDU17Y3z}PYQqIIZug06~cQvQ<# zu$5cmfAKnhhS^&kUuYQ&o@e&(CG~$%!%e{)rlcoz($Hd# z=P`TpR&}VBIk{pc7+T%pgTPqJC?q=IUF8!5KbTg~@RR7E?#k*~#4n2o-%_ z-upFy$tgVQ#R?=({Hk}mQ~t36s8W=I`jdJzgX>XBgSfxJ#1U?ly+$0Uog$#kecZS|Y$dH-znsQ}YgJsnP4`f5|vu34dX!24E-!v#r#zWd6UGXpYxFloJhl<(!C?OouvLYQtJQnycP|0OrQYfv2!>HP=h2)W7Z@ zU*XjNqyopT&lDtLUr393tFxkM-Z}^Mu4Gyfl7_M*|5lYsD)NqT{#HP$kQjEp6C>*_ zP2@EruDt%S!6`GvUq= z>~goq+kAu-E@VVOLNeu~i~`@3nUDE}b!54rNNz5F?yDCSf2im#Q0r+s9*7JSKgZQb zO{K$kpXHME{1t_~J0&^D_jD=Lnm^AwS}6pf)YILs1PLic zz`fx-ze_v@)?$)Ij2p0|PVw_%EOpPZexLIupgQM4epTX<%5uX#ekz&BUKwB|9vAdW zZX0loaNTJVO+bI@q~%MPl5`VOO4PdCN>69%YGMgNf~kEwx4Uv$`7mE(z!Fd+;r#3G z5~oS4JWig!g5jm{za-aVHG5A}LJMm)#kgFFtzeXAjt&>!*Qb#(*wB$R5};NP@!N(` ze#{{?E}p@ANZFrT#Q-_uN6!Qxc#k)c27Gvc%7^=vCKby$xh!~Zz*)z{_|AHMc3AMe zNU%Nv_I4Y(;iT<#u{FJ9`XvOlJ|&KJANrTfrJV`<6QUS)H!7s8tRQf=Vt$Aj@(04O z7x79fREg-;N=~|j;kM0J->Vqf3X->GBh@u_CxB@VvY(f#%I}KyWq}B}LU*ijD;81( zf{vJ4rn#9H&E`H&9BGydWlUFk(DQD`0r>=?0mCf>10LuX7Qd^BWd!L@dCf^b8aWfa zzs&{bx7ugDu6<~W!d9D{S7)Xt(%FbwoU<JIp?J-z>fRf9w8WLSp(<5E5{D`;pYY>V*+T z{o$gai#G}e6KT!c;eo(}I-gAJ{y~~!u`a^`G5aEbw)AnkDj-QSkxv6=Ik=fiT>$hmTKG&^`HsGfSXp z{(hA-nz{5&>7GRq7V()}tQ8?z0q@Q|Gp? z49N<=41Wo%*NPQa8n$@|l>?qb9R0Ng;~vh&xB>Um4&A6}QoJ&f^waR)$vUKlw~aFd zFEKlh8?yKP)^Lx9mvQF^{I4WKvUtK~sj!C8`Ul$jvX?Wuqt_8a+*ZyHovV)cbZN@k zRzB(9Xg~tSTJ|Jywh6YI;eon??3+$Q7ujX;>Gf?H+nRqXf`gHB1rTiLted3CfZE`> zbWWRD3!%Fu(I57On3cbMelB!ebGl4P;L&HlC+&UKTpS9VZXT2U`)@p}+br2&Epz1w z^}YsnSf0h3i}5BU5lx}?h0YY1XR%+eHhyawca}y%k)ow|iv^6FZ`<|FNzYJ*sw1Kg za{3*BI7FrBgBgeKw_>zFJ@AEAak`Fye+Tm%wTFs!+4EqQ>Cvi@!*xMV((Zvv>Qm0H zo(A#B{X^El7GPMzS*juXU}aQjSsW^JQIXxI+Rl{C2*eW{yvJMmixf9w{})o+YGkt9 z0FMf%>>&vWtI94giX>6cy8z@tHUL|k#= z9;To1$Y8%~Vd}Mmot2TdsoRYZ;>M(m{NN8~IAWwOL0HGio&G{wz1U;IJ=kR3%wi#D zcNeAD0N3ZD;)E;R3dp>!ptqJfu5)RYx(IH#17b(C@c5URkjNbKFfC?U?4gjVIYdH> z48Pyiyo1mfF!knOsh}fHxp6L_X}DveYu6W=!%DWIEb2#Aa1MW{WKk#^~ z9nqF;GhQMuzK%4BeoV{3R`@ptV zd38s8ffVm22xanctv(*Tdi=7VE$bUZ5PJuP+Q4x=?tnw~L|N?CjE*!f$Y>o*BpaXS z?{d3QaU624Bh`U=5NA0%FUM}0KIB>8VG`x1QKf<}b!DGT=~v8TLx@uQ`=kP{;;F)! zrHh4hlE#jP?9xZu8YH#?pSV`nYJTFKf&7nlja6BI_-++&fS)m6mG@%8OF( z3@p(~B-@f~O8Vp1s)a8aVHadnFz`T_rvwOa%$(CJ-)m>Gz8)q7!q)QZE z(3P{I`qOSV$1hX)HdbgZ!z%)dL#~tq){*ZHbocg3-vPMsko`373L>U%?m2-IKhhjY z^_7eWQWSPH_wBXBA0>YUiz&A@%DS)7HoKq%u5!#VZYVDr%ro|Y-}s?q&1H_E1U90A zCwp94mHa(iqDil}dPqK43`bXJ#ThbpDB~<#d*B}YcH+&Ue{bq&Q?F}<-PuCnpU(Kb zRV`n!$D2AOQ&%f&zz#SAcD2G)o!7fcf77rV7o+awz4KPU#8AemQ}F-=9qk1~3F#`w zyP1M$*L4Q3d-uoIY3*Q?!*USm&RH-}`&9$98Hm@vD~1c`EO>0u&&SMn&!}ouEYs>- zj_YBT-UDTm0fg$Y+EDr;4SAZ%(-3Cf?!YwgnV(sl`XDHQNXQ6)wO5ckHYesgTzCQN z-lv`=41$+27J&|3p2Tdf8M!-dp%{BVntm-CtMVkdFFTKVaY&8;cgIA?<*7h_s)Bxb zN5{cNpvKLfkk$kk-!Hn-VaXH|RQQLQ5Ggn$>$Qc7_Dl6AauOeezysXhcRO#@mfV3O z=QtjdIk6-B)_y6`Uo~q^35lK&0CC6e`TvD2A7V|~Jbc~aHd4%a1eBo>hWEeBR3m4Q zcHf88lK5Or9o}C~XeA`fCdg!>{2vHeJphO*NI+aDkNrx9$>Bb9P>$baCYXm`jdAAR z;B#&KW{S8+yk{zeM>ggUbdp#+6MM5)xHOB}=elJBwhyUBrC1}JrkBwj5L0FEWxu7i zTCHUw5BZ^rknOrf*|0F-WS?GZ+2DyS=>W2!Qk3)t)&&%XNyF;={qeX##7Wir{;rC zrPAC<0T}(Dwfa@eIBdQZ{vg@a@VU``sU($5`q@-WGhZeAdY)gYq-N`>~d79))0_p zCahlOD`O1OdE=Oj1F~{b^py(LPmV)>KhUz02iGCcZ?Iu$ver6wdHs`o z$<8bC&TqjtGlnJOgjBLo3SWdq3Dk78^9US7eg(6;MC4>>H~T`5)f@_ni+KpQcvDa7 z2|}6osDpk{_t^ocIblCIQfmCTqEtQ;sYe&qXi1~7G|XMyWQ*d@(p_-lTO3F-;c}{K zze0fAsBTYeo7>5%)%r>jtx&d0eAhWsCXU#Is>Wqn!DUblBLT8YpNw73<|`W={2F%8 z_2Uw<(H!hcni2AyZc95qA#;#`6j=E3DZEBv{bVB;OM-xdPSBd`svlg;pxU_@x#wlK zywv9qkFoC{%df1>0+l)N$r(#cm)Z&Mte9xz&`KxXjR9Z@wb1{-5-9&au*7>jc}<~T z*oQt;qXW;LiTB__Orft|#n_~3VbWvR3QxB*w&4~BXm)RXcqFoPzSx(!e>7AyHND(N zv`-HiR&5A~a=!JdJT+QRUz9VK0-VsJM9U{OK=Ah0*LJ1$)bh&O=P=T;Z-GD}y8%G@ zAKzU(H4o@}UlMiI==~B%JElnT+f6#mB0HcMnwh^ZfQWACmi|6)mUvR|^cE(%AOAgr z`2sp#S2a{s`YkX#rXN|VoBxF&&y>3^F4wLEgs2?ThjsY;7D&O71Rp3>_px?yT}*Yk zIXb^r4u@O*i!;PfEwtP7=DMJz863)l>NT3pExaF zLWh-br8Y#*<;U|6Mwi_FXfM&MQ9^fj_h_x~sQr`mkADWh7#4t+~zkdxIA>!vV-+mJuChWxdm_YZ@A$5W6 z$sf?@a45e;{s;Q{g#e z-drGNYh({1%s3FA{~N~iK4ApWgLbz*fznxUUb(Q`z}B4PRZ zzK59aUo#iLbaRuR!xOANR$zonUubap(+_O~VqS1jB(20fU(&(OAoF{ zGcr{wG(~$XxPoy#PwD)I@v}*Vq{%pN`RFo73xq<`*B8tA8Xd(@U8pFYIG$^3ma5#SYuvr&&$U38DdcL`GX3@C(n(QmmkVGHoVNtWbkanU#x3^^3g zSV4*!3IH|0GGLvl{OO_v(TLcfX?eO9LDJ6KY(xeD!KaSISsOcO?HCGrhfxQ3pSP=8 zqqO;+y=<6qho(`VdDAfaM0qut&G~p7dGs;WiNd(!G~P8DU65dXz1XNQH7$p0l8w+2 zjt@W-=b?nT zJ?_slv{actZcfru%ZsPaofWze>&FItq9OCj)b63H`xe(ELlVynmdumQ?TswgNqxoO zr$`1=Y0fJPM)%gQsVgrTl|e~hj5Rgih8e6L`-4A3=*Of&N}7kV!kTBFIJV9q4Xr0N zfi$OX?4s1I_)~4nWXSHM?l-Gz9D3L(_ZKPF4oBUTEq*%W`VrgPcIA&*5n7`x0It61 z?#RkWIDmLbX>yos1)=;7yU}6X{dsLr4D0h@|00A#EUo5ST5fd4*@Y}#`Rdtwa?b8G-U=>Aw-ZAMb4vNQ8j}#d6Sqp2n!Y7PZ?@rX@#szit z#TK6A9{IL0sV10K-1#tetAb>tTR}xh?&&pv5gBlW{pG=c!~$*YEC96`F&{oJB<@L- zLL!b=*7Q{@S+~nS!lqP5pO)RyA*_9GvV7%CHdGwb&ot&v7cIl?%EJPsZACN<0&U!t zR1xFmwsC$kH89gsC%ZLC+=58GFpjjH&N1;a&2_s_+%S%K87ZgbA*?I?ggWN3*C^T= zW$AEowh%coKSIY_wZ*)5k>rFxc^b7KpJ)+Uu#BJgu^E7lre?ToSXyg5G zpzK2vFMOyy)tB=W{)ipfCdN4WXf3PPD}*llym2S?zNxHW)Ba8h^g9C=zJR!+uql=> zQg<#ZvSFXW#}URu2l^U7K`h)NnBtA=Rx9a2R`2yZ77P8tJrwj=?0K=dU9TcS_{AT9 z5REbCrz0S?^CfVvoH(Ry1?z%6~FZ!5Zz938U8X6>k;NSi%fKF`n@l0usmce~HL`fmoc8>R5yL{Q!p(a_bJ~Z!4|T-Bqd|OGDAQD30rRA#_1TOaX}D?$NS$qm_A zR)%9n4sK+47AVv~1CABg12PdpgM*(^sq$;=Ez>R+xD z%7a+JdUT@HI>qovGB$5v)UR_cly+viWqZE1;{0G0ysZ3Ir@}sNH1AjtcqryxCcZ&s zhh5%B(Xm~f2s5YZHktFi_~9FU@xgK(p{VhwWbw8zHw6de1^0qc$olB&lJ`LlKudF( zE^MH_60i^c_ciq0v122{j8kIAlG+B)QBZiRNQ<$=a@)gO1(k=<>G;9dEc2kci9^r?i0Ed>Nj;!s>>NZI7c5 zTR2tD6{p!>Q0ccXs5}DL;S%U>UXaONBsT=+#RU1pb<~U0RHsNAUOxCICD^KJdL$H& zO%$T6IJ3upin<6@4>avR;18F5{~Gj=hfsMh*;k>bsB;O;f46u*bAqx`^2 z@JZT_N?vrC1o$S`MiKx%JM(t5nBrm)SAfV;w@5iQls=fGK%sD*PR->lllOT=siZ$G zyCZG-Mou%sAe1q-ebhVrF#0|4ufWbW(k{TKGqA-7hmUyz{y3w}M`nel>{FVv_w*4S z3&1)zi8JJ2>PLIen!HN-&Dk~G~sel(rBFfJH+GNRd#jYL2 zQJMq3y!=#|$5cXeSciigQCfP%-(pi=?(u|PTXpueK8ImRLCh3A5`2yEOOj!kR5b}P z;r9`9W9t?L-TZW-rSIfi{^XjCdG})DOPvmGWG$y`nQB)44r@sjz1C}~qis8ZDJ7Y) zb6ed}q9!YUi)BFJ__KC^bbQ+lK%Llsa(xtl_!x-C2}x()bn`^qugB%Qut_^mvCHZw z8MBZ)Emo#D8&jQ$S*5Xr>Rv#3rQiB97_`&!Cvj5uJ9!DXFDr*h(N+< zwRtHP4eF}}%o_vlyhPLBR?O>SFbF3u`#Y4xAB(FWbaH4~wz>smixzmqt_w2$t-ex5hZXY>CK6l8!@E=iIB0#p2gu>17-j|Hv7p|E zDfnb1fLA!|L1k9I+?zPXHKQ_g8da}K0xSA>(5A^8yAEW z44|8k`<6xQubntBd}g+Wg|5xyJZTt+V1Kt(een0PUSvaiD}Z7+#MB#($Dpg~@eNLM zaGc(jUpkVvXFHP#7-$UWJ;(=YNUf9v&u_eV$>GPp1_QzW7!33mydy%h6@d@gV8gLpj=_#7m3^<{(2rodM;&$w7M`q~=ES0|{2G?ntSQ0!~dh1e`kS z0Xzayq-EgGVf$bqc02tlwfei#4ypUYaHW-P(NhIn(bZB`;f(mn+>32JbdsvFr_%dF zUw7?xWo_a6;fpd#l6W3ON$=RInSh3I0=KRJWU@D?JAp6}ZGi$cd4)64KMc`lLKdkOWX=I5&(qn=Tig+I5>;l`O8B_y=X_RAuRo!q%l>DXM#Gb#WLVz>ubJK$(D6>Ol}#8 z^jD8I1>B=ds9<{pId70qeFNOeiXchg;? za8AA-K7Y)rjD9L*lr%4j<}HjZ8+Lti!}MC*hG{4!U6>hz$o+XM<1ql%QyQ9}p19la z4!)PX7~H)oZBSg8+U3rpx1GA7KhRG;u=b^iiM2twjIE&o#*=Idxh=QCZ;w)G432Du zp;9T0HBhasf5>h$xI zHT0`Nkv*Uao=bNckBfH~kV}f;COeVExyOZaCHPGza)hqWZuR)7uHve`0l?&mo|oJ3 z{}t){9GpcZVq}SFIb%uceb#*l1mA0v@G%NQz@7bkzQ zo|G9t*rC`BwT)LyXBk%QNV!7WN>^it#p#IG4}LA6-atisvuwxlEwb?=jp7-nN|dsd)IBN`g8cLkSe z03|@5X&P@CltuWdKs7Q5lu0J6aK!1rZ_TxTOHyr;40Gmrz#ekGO58)x%%LV@C(A<# z`4P;Qmf+{|u+-wg{tA6ZKh>xUGnb?beh?;t59`{sBcy&IWFWehf{w}W$Fwg$;V?J;#DWE_##_wMq(e7q&@S~?9d1$=xW?w z_iYmO3MCy^%h8Vm9sZb@x+36>+uraTz@ z1eSxkb59a(TdPQni2&1HYX4YUo$HM8{wF9|ee#>nmu}&KLrw&OIo=`sd^0C2Viyk9 z@L0R0-)<6+Af1Cv5Akn2nTBY0Rz>9*!Xs_Fep3sy_a%HMt2vdV^IJZSG{bfHSrJO3 zkdZ^6ap;zQe#QBGKnD`^{QUZ9ljv(Q`%{XPa5+-`3c(dCkT)>*^1vt=)llaFs7i`O zd)PnN1IV_L8T2ioX9@=|G8S-;vH-T?&UzG6X^56#0PJncm;&+5 z(|!+Bq?~F2LZ=W+dXkn&$zxb;)~)kSA97^)q(h=uf@YEOzqO>3itv-d zX&g8M|FM^228_&G+L|gFa7y>6nk)*W6B7f|tsvH0x_@`w%~scbvju zc@>ZjZlMpciq|V;G9jUA<&cq5o40dYE7QzzN!9`CFuqpjJpJ_Ec8OvMOHu+%^r97> zH?4Cm6TX^&9`7c8h!&@YeR?2-COpuxzX$nz=cASB=m}x_+==tc+mEo%j5&XnZ2JkY zZVt3a#{e>>puwpBjMiLW~4b6%{FVOc4&p!IWjtJ?Nm!3af` z-lrq1VB29`w&2|Y-hC|#f>GBTr#pddNPv2&ZOW9CPFK_$tH1cexpx@-{W3HttXfF z)8Tp7ld_-`nECB@g;P%P{fzAiav|9W*c0w6<@)23){%In^h$W+xr@|mC6mv7(BLCsBI z6UACDH3hqei-Yk{H|;iYCt%Gh)!?_IvSz=rT*$L&d^vjJXNjN&qvvG?z1X2?Wn%i% zFFYXdXvl&56UpBpOwD=F4Hj9pT%ek-IchzK%{(IJeYiFfym*(Lusm4flQL(KqtM!} zxW0R?i~~JaGs$p`DUm6rq5$HyY)ogh#C)h^zy}275!~q0GWlGC7V&N7$3};N7+Aa9 zmIZk1J{iD{s_4f)*CGp^`V{_{!J*RH&!Ivfdaq0>;zKQ7V)$oYo%Ch7Lwdk^XZ~B+`=u>8q3*N6Rx`_pzGBRZ0#UtaV?FIqgDw6J^KO8ei}&@2-{&Y6 zP8#JH3X*~3Lzt)~^@f*^wFe@)?%VV^=OA@A__y(~{u~Wz&X<;I@>l4sVMLe9Q0XK3 z9di=erw|JNWnZ%d7W*;9sI?)n|A(uuj*6<`x2B|15Re*5LQ+y{Xoha28>G8aQa~D{ zJETL9l9Z6{W+>?fX&CA|JomnLJ>UDUuEko!?6c4Q?bs=(Sn@fcFuJi>$ujER&+dPL zpiEApI;(7$z4&a)kM8C{wPNyN+{QIT0%qHl;*}`J!ix5m=}mXkDCpON7YGx0$wA{h zHLj(vxm@Qo8u;{P+a*!Yms&UnI^6cXe;>C@X?TS_4|tTchVYroBaMH#uIg)1kmq}D zLdva;g2|q~=_e?cfNQ5I@LJ36iVpIu*|CJ@Q3+NN_&g}9zRgYd`2yk@TGGXJE3~Ex z)IaX{V^;Q{8f>VVKSlN^>~kI0tQ3#8R6HV+wJ40AwTOs$jl7z}7U$Xae5sTX;}!dT z?Y&0jqKGN``ZbDL1iXHMFTTl!%#tX;O`G8NPP(t=TnT&7nbuGC9#aDErdBv}R&RWk z_l2%@v7!h#t`c)DRP(uAGpbr^d0RzL-?rr4?#>H8X`hn#OtpQrztS4$OY3uy_TR?M z7ykld&7MtD{gWOxWVK!RN*~|T zg(oNFOi{@fimHSB`?14wp6hXC%y&fhrU=?qDI|DG%k7_eYDGC%Apv7XvLe_lg>J9J zeA#u`ZJ!5PV*4fK5URS|q%YTadOWdS z7@#j-^b+5fPRxz>WwgzFS>UggAB_B-94et+EJ{q}2OHq%-vJ_48atBsY*up1zJ4f430E1$UqQn@9{{ixR^NFmogzlChLfKfNqbAd?bBoV*O>x z29DO^Sr;4;*F;yLZ^)p#_BOsza0d1~K^t?<{9Z4w!)MwF3E5PmaU2ot^1Gg~x5hsX zlzh$=)ClX=f?h)m3wS2Bi#O(OK{HqY0_#7wpx#c7oOS^^`Gu`2;A8Ya8s6?}>u`vXSit|_9aLhQpsdo@&Dd9;e zaR}Y-pbjdy&XyrTJ5#}1_{?23THNr+0GdCAdUo7tTa=+KJs;_?j3ZT(rijOQFE$l0r^Qx?7U@*_*P?6Sn?585L>j`gk7J+0J_*{ zzf9K6RJmw~En3dPb1`xS*s!@>#jor3c~iaw-@^>SMg906{B!FQZSyw>9(b*~+@r%`?I)Ix46&*Au1j%G*mjgbJa9`Sw{7eaqdY~D zJRkK<=KwU8=D0D6=@tCnJOC0fb34C=ir6!zC@g=Cny5@#*!x|9N6{Wp`Gm->TXnHc z;R3IrSt&jrY=`K-QTO`ikjzGJ)y+rZGN$*64Z#uW+yEX=ndgPsbRxzpYnwI;R?D9s zta0|hzoetG>rkJi%{G@09IT=he+^cUKW9kiEZX_mnniE}zNJr8GDck4o!c6;f6q31 z+4Vj`4mpQCEB5g8Mb?KhL)T26N4bBLQ-=(Sx);PPTw5ZyeSk8;IW>2wlRotNE2Q(r z#Dl;N-%vBe@quu%Xhw6MIKn#rypmB;9jfRD72jzHoY;nb zUd`tR+OpYVa!YuAix`pKf>_a^Yv#uTcvKnB4+qPUPKeWTB6 zd_EjFCjA&)`e~kkC5Sc1n~>!javZ?J^jn7x$g*>{{ID|*byvO^NqR`=zftG<0}a|O*-T%Zq8^LR{F!Ha>{am6lvld*@vE^pF7$6 z4AA~NOPxQ1kKeno)e5jO1yv<>$aw#MF~Nu*!P1e{%*dWUjLTz^lAnZ~g-PaZ)W<2n z=Ax?viZylM4`GjYbRGB@mUj`Jlhv+xU3#cTPkz9FF(}T=R4YS%X_Qvz%eJSj>6cEM z5>ov?5UnqYlB=L~gyL}1`e%zxlk*BH2f=?*SwOeDdtUC)@fqct@^rWxGMp}Zy1}X6 z@!CsR%;xR(YK8y57ATekoTL`q6-M^;)@Z#$nOxv$H}*bXfiB)x zBb0`e^OH<**(OsLB>jSpJVqk~5_2GxJLteuV}8D<{w1e%Qp&*aqb| zFj6FA*|ihEqueN+;r%{sh6{IVBo5dx#U@7RsV@3^|8&@f*7`=$3#{jn=Jz=*(3l9B z0IB;W9sw8iTh?&Vb!1|db7RaPxvw{QNYwcW!uCy1qIxfhaA{RSC`n#m{S6FYX!)X| zZ887R`18Vv(6!I1K;!XrrP`QJ8Bn6)nxpn`H^a9)M*zhFbTF-FJHHC1(+0&Qt*p4Y-MZXekqA`ZkmFL{@gll>-LHH-6KO(qJ&~1kmEA0r#WGgSv74W=(vAm#)l?f?%x8A4P2@ z*8WPpbuRdYG?U1H_7&>#k>X<06l{fQ z{1dRj+qoA&F-|GzWM^UgsET(;T2J}hiIu#kU?3#t=jWy*gswRInROAEm5+5^Vew08 z3$p_HMH!CyM%>ud863ASd9O-ON0{ z1YFVdS6^FZ3);V<@6>ADUY-7QgqZS3brkdUL&DxBifz|PZ#kmZJQK#G90u<)MlxnB3t)rUA^s^>9=oa$x7HZd(NrAWJT> z&bEbm8hE<>W6Up$xcne>mNjT?;NIUu|hK#?xVyG!8|p? z5G)|nzClK6Gezq#K8q}~K|RjNStdYYVf4Wh9l;Ubg^SHs*82K)D*GchftP)UZg5i# z!%0{}hEsh-_97ju>Y<$WD)jR%@6_!bb?gMkojO9EEy+x0e{1Mpo`dEPIK9RaIoPz~VJ4r;^ja7QOrgbhd@m>^ zu*YiH?#scf+aO=5Rb%3dYkW}I_`&@T7W6Fszi9gEf1Fw2GBB!)!TsnP8UCC8bTqo$ zN?CU4tU=@u_=KHcxkk&l(f(fhy#*WRH@@*o`(FYJtIaI*kE{|502$dZ-K=0XiC5G{ zz;eC2chNz!yV_H7kwp{f*%AA@FGlwhzEZTE@a(FyoR3)qaUrO-I3#?47WiEna~K!m zuh{;&MDVgb(kVu2OQrMXTZ$eSRBC;C*dJi?cT!!&YFsIkqVQ8^;qUW55|-iELb`=p zc>l(Uh%L^a(HDr|D)Wh5g*4%xO7MFQyrj$cosIP6!a_*jE1EafX5uce6KJ&&2^#of za09pO3MDWXJy$%%A=;xdrty1>S3+-9$Oe4KgP#YwuYK7!oWAbF!jAhWthge|98f?F+2WMn?#U;Mq~fDc0qD1pV9S zt<1SLMHV1P^53ZJKT0gP$466O0V|KK=`3I$B|hGi2uA$)>PYIj4!DqcPD=LKd3 z5y_V$)8tqfV{l9;hrFLa7EVCygetRr@k0R2q%z%}BXTglp%}k+($DJMs=-30PHjib zb)4b1ol1V_ghZBVGB-ZXR9}fg+`f0BW_6b47Gm$nMR0`aUhTH0$1h*E#i-4++H)^G z0Bad?+!DZ9Ji_keQ`f7juk`s9Cvg_kb+pz)65128+O6{*8H-O^ya}d7-34fFdx$(T zMt!foZ>80#c1#sc?UaCdwydDzPq8D}m7`w!jBbCB3VimSmI=T(j#x=4Qw~yq7HzVl z(erHQ0#T912^zMu^7r8H{aIi8BEA5}|IuuT2YlI^EX~nqvp;EkW~d%4M8r>e@$HW3F{tzl?h8SkP~Gf2r^)HfC~BS%(Bwfd}!O%Af1goNs?=&`XCn(s80-8ikXa zftJI*Wq#na5_tOHuJuFJv^_JOq?7LgdoR5~TXLoP=t*h7HxHP)d?&}6@@fS)Q*(5X z=&w58$~xvHF-ZD_d49g=P!{TS^B9FBtL-KD88z6bF~I6pIbH;Z5Ji&v?ZGEBK5WE7 zQuw=8JNaQ3l?ba+lA2}6nB}%Es?#2rYuFbLM?|}y4$uUjVjkJ@;tpESM7$3je1mzy z6n2097ELd({sLDPtGukD##yTaYTVCUAb!?HzR+Jp)9d~%H;j(0 zPWyZ)LDW}iU?(NXr?iE*DNf7wtUp(3!- z4#(&C_5cUaw3iOkbERh)SdZHY-RUQvKB>;U9`nggf3nYJ(OgB`%7}AHak80clrM=c zXOkF!lCfgw3FJ5|7&MS+*=D9#KE|BzdMFkVM4ZK|ui&w9QuwJ6wVVJn-$dMIPt^_c zbEA}G+$xRzwUOfpukp9T~UxDibZjjI|Ko5;3snHBO&_hs<%H`uVyrGN~=V{ zrR6Log5TTo%qy#S<|NqwuL7IfL`OKqec*B`jc6p{*HaYRBY%d|tm}RS<$1dxY>J!3R)Zz?-V1}h+8;;^O6fcPzNjD3wqR__%j_Y# zeYL=FT6RV;$rg&kA(bT*=?J-$v52{-3}Z)g4Ii{0M!ZcmpZBtR#iIArQ(9a$TWf9| zzJhNnM@l>TO-6nBz3s9*d7P4bI)4_<#7B>+VBwGz)a+2b3%;uN2&#P155vItH3Vc# z)JUiBMP*I=2SFr9VU0-;Q>p~p%ukuiQYx1g1rZF6@B5}=E8i8w-Q1`4Z5lB4$YAo} z`!tL6JPo(}EC{gI{mCzDMRk+#SM1tM4GL-l23uln8Ceo`x*K)5q%8(U6`S|#EIEna zd$r694rwf31{|Y%N5;qf%E;!JI^-q&*P%H^I>#nwT7Y|a)cwGsZq}PfB zsaoP>+u0%hmrpZ$@rEi{2KD}J%sA0lR5@RNi_J-98q6*7R5#B;8nSDu6K6e=Yx}Fs zhe)jZ>SGl;bjvn}Bv4aR&)*NPD*p*h%<(;w;g*d=!e-Ial0LilXjiL*ZS&3)`5dRo z^F9D9`~!U^yBGNeQWD~J^nNL^v)W&FKcjOt-C23?Aty#S0YJhf~!#fbiyy0~j6u1{8)vO}PfN2?%|$-I#8%$~pi^;MY-OzaQj1tyJm zI+TMnjAxANjB6#AUjbiK5PPEMB~0k5)2CnB#Rd57{k=QUJaA~!7!g!ZHw(=&1at|ug4X|CxZ_zW^Rzj}e$vK$e+mrO7l&8LspUmCj zmUK#d67wiEgEg}vZ^zV)ZE7u__(8vc!NMSY6Rxv-3 z^)jPrEuj|6Re?x4*-`8-E_Njs24_$PH_7VsfC_J&h}UUj{=!#4lrDKhD?34b#NQQa zU&&PP+0f$uxzmYqY5sUZg*1+A_h69h)A_rERm2U#Hor8+Q+TsN(I#M$`TJc!IzPge z-?aPObkwf(i2omW`mtjA-KlS9@Mp&OIk((We@Rn<{L>zQjbJ;rCXt7|!o+Do?+<_M zGK8hpuJH}#&N9<-CCqqN_M|x^@!^s+{C@z(B7>8^pr^dq$6bhZw}XE{(NRrU`W&l82~Rg2hZz`>_oV z?fCcnIL;DBy4wxJ-8yH1f-iob_1?sbu^;OvsZFfL1ie%N$V5Zf!x}H(NbWZ~d(+~1rKCeAZ0kf9Gx;WrB@kDEEZ+;yIJXDbrJ`y` zT4EDjy!1|UN@)5oO&uUUu$eg7^^)Gq?*hv=zz-@NOcSnF@p*_fS5>2~p`#XG@3SYQLMxcMds3i_a>=VYc<=l)&)) zEl4XbFI@fbn{C?k)6;bw&Db`JAS8uZC6>&m0=hNV@qzPdk*!G9pjCeP5Gg@{$Bw+J zSm|o~y713|%(2x$3t+}>fnvtkUzDC7&%Ak% z-Q*5sYPKpd!zRN*cV5kiP;z*NY)AFb zlAf+B%`(l?$2#=U&w?r3_4{74GZxIJLhdlONnu!=7bOvjy}XioW?~!(>)bWvF#P$q zpx2(6R(lGJ`5%$siQ8_epLc8~3;XufR&ev;6v0DT=A0Q!Yi`l*{Y)L$dWzE-X>f<` zV~xb~FGJCJVx1k@S26~TaA-HAfIr%RH#yZn53~MIhc*>F<`v@&Rp2dMJ8GD!A%8+? zgvuVB0p&{c9Wagh2*UoX_LFw&a*tN=ywkoIOnCUgV1>-NLf{hI67wO}=5V3U1y zrc@%BoRm1FAPKjYu`bkcb*hf__nt2U8*pMVOKOV?;>Ac?aJCPEzcV(L?i>a`1fG$W z7^=)+L4n)g7DB34q@_Bx*y_0w39lb)v($?^`gfY^Z7UK&?pVX{8)b&fO=NF+ z_nR^ewaNNjYnfm7O6r>5;|{|Zl{KC74g~ZGZS&gwY_b%~`kFej{3_z1XF2%)T!)(2IzBI@Q9IC6}nD3mTX&1AeNl)b5xg?`WJ14Vf!TN zyrJ{4095;$AHnyc?|?7j*dpcUD1l0G51$8E<^tuMoBFGn7bVBL{p|SDB!L8^braO@ z$G54BG2!U;&!X?;kKN(!tH>`dm-OyU)NXTA_677I1(TXZe%VUrZ)2Z;eUt(q(5<6b z1#PVz`Iut`6b2p2&qRChGyfP4{4TLUZEki`xTXNzZiUTkbcVHKa{*V}+THSVxJt$~ z!I!>6n_uzY)dF8uolg1nSt#BX4HwFIii5@mbw$VHhvnHydF?+neSM1WMX`uiD~IWO zTeTNe2vmnR>Mlp;{so&MG|Kj&b&fyN#Q$~i`{)02t8t~d;W9`hxE#Zg<-{*!LMQZe zhRiRITXK6A;kttens1&e`83&?fozh1u)aD(0hpPmr5 zGl-YWvJxMh!EWPh)#V55B$ki@Hq(nP{5HzVt@1tKZT&tIoyJbC_|vAvSH-D_yquO?e!U?1deoO+3HOhxC6*BTO<^KNd5iiHgGp*YZqA1@DFn zp%Ltjohb+lyRC&Lf@~D`r4s3~n>P~%hTba4FWdfnhDiM?;o1?N{a!S4sl6vzbWk4U zf=uH%UO3#_Vwtn?@i;ZzrnsiWW>`Nr1|frHbX^qq(RKvH~S1M zEx1uOafIRR-%cxuf)aoJtn0%L^5?BInpY$vE_nE5>M`0qhceo;N3Fv5WzFIyULcwj zE1>4PYcdcAhC5>_o8q(vHek2p&t$X*npxK`ESIVb`vl@a%0C$n5Ea8blUaaaZiNM& zu=!OSD>rQ)zSjaj`4&UgXs(xwKLDofP-EguUL_VmV3dj6ZnEJI=t<=pF?~fX83A&x z5yBCoCWl#ZB;jx1y?*LiSBPKw1VJB#&C!&iwKcC|J%?lfnzNl@=}Fie+Dd@y^w!yY|$zJ`OLL=tz(dp362$M z3aR5`U`cqjj?km__JryzJ;7b#CfVAvH>%|0cN}FtDLW&`i*Yh?#Z;p}>t$q0f!0+! zcdiO;B-2>Ib@fz;1=mN#pqKGH!a^&yKlZ;C_j*ndj8miv+Icl(@*6zMsZn@c4v6C} z;z*9O%fOxF^fXrKcE#cdXrcY!|9CFCq>>Q;{YGibcS`V_SD;sWdA;zPa^BJv)fTpi zfwO;CSaMQl=w<)ciJ*k5+(VUEr!C!oh`hvqr&%BrYpbq8>FpJKCJyazeOc^_kr^}8 z;fI8kt)%*f7qA|d@i|7OVynF}TNhV93)7H&J>XfRgyQz7-1SH_`KGTJ0%tcq`;zCQ zY_&!*E+TvthC7wiSiX^eMAjYayYWgY9Iif3f~!G(5-On~97&3fgA#cpglYTy27#dd zkKf@gg`gK~JthC6r;6E1XMLEP*xhr>-E5pD?H?PG_IiGEkQ|D%BI+W)9f0u2Mw;FJ zl^DC7Y}N()-UTABQNYMJ!l4z&;4sQ4WrM2`UIw9rogvLaMAJ}JVbM)qC;S=VekoA4 zJZDt41%7LbcvDJ^@l@M&*#yw%!0F=fWle)CuA%{nkNRIXuqx^}Udw~G_uBuxyx2h* zfIKt^yv0YnCXs+#h3tMjyT>qbr4HR-9LC6;=JpsNDb}$87QTDKB+_|OeQ{#|I7k5i zGF+2Fq%lTLAq+a}NB)gw3u zYx0EIrvBOkBw4AH3i}?aXL`y1A9_jsp!^dfKH4)M)&rSkU|F|SBA(hqwxX#{MLByn z#O`Dph|?kk@?kFGUwI~!690piIkY()NngE2B|Je7=ZTqS^vk0VN`#Wkd}nf6Z26*K zj0hC8$P`Z+1=pvF-&K-xMB_n1h<`S?B3r>Jzfkh<_xDYWn&_%vE;L4wgjnwJ09BFE zc45SQr#Sg~h7r83q9X&aMtS_tzu<0edUw1`djnDULkm3VouX+}RwTBT`v@2-%l*&R z78KII5_e)jU&=QtMo__Qv7;Xfr@x^(eASE}i~QSQ#jR9lq@jQJPQ%2^G`X4+K@x9# zYd8t}azd}JjW}%3`HTDO+q_ZmADq`uyPmAup>_}}wSd&H2pnsCt!$+h%K9Ja9_XNg z-$&zpX6-jvxsu*M+aQ}EQPx%P&+$J5{4jQVOg}Te(C<(T^_V&#DroL~NcJ}|v#mOR z)ZDYkeOKYCv@(3)C*#!oY(8)}6TggC@-aF)Omf*o!T+JvAjM{5WFLFwgidN=JW=k; zwe16s$h+iD+BCJi`*ujwTCN^P&)KBT#u5M3%Xx>(-B#X?v=6+TDZB+1BwcG^gf|IF z%&I4HiN#gtdJzJ*At%DJ?sb;!g<#LMh98%|dxXn6SZc*m!Y!hFb)@6LG*z|ja$BZf zA;ZpiLzUt5T@#S1uY?2t0Ao}Hg-4Cup?crKg@yr*Fz-ZP!f5{}wN}B4f`h|V>Z_Uf zhAFxq31bJAN*es{Sf8Z2vgOts(Z`B^J%FcKCn{sY6o-Tcj~G*hh7QAG{z}r1r*^v+ z|LH}r#7uqljE%11B%`=%FDJKi>!d%RhLpm7QSr}E9Ti<3%9r&5g zWI554b?K<^M!C?TMjq33B}xO7)|%lPv#&uBIg$Fic5GV{N+h;uysf5dz$~D8hBsvY zi}zt!E7fC}&3`S#dYOCf{e~Y6AfvQw4)IR8{g3FGW_qHE8RYdN?_A~LM@+OHI>>Qx z41>}?xT}wWOl~cQ;a)gJzetJie#TqfV4`zaX^VS{3?sF;jgkqhu6fCy{uc=w=7`qw*KC-5cJ)FDtf)dR(Vo*6(61 z?3Hk*p5odLLAH>w{Bess+=R_1qio)#i=}S=y*52IyYbIZdz^6ZV5EyN-Qd%dB*>*w z368QiZS`o(6#vuoH(Ea%&YhA1JrDj~ja&IDyYES2{pgdDnfm1v|1{*bL zTn=rUuFYK6;u1BDHoHpuzYjl}xZzK<%da?V$Nx*fj1p0Bf#>{7rn+^ z=922eOFU7#feYniybs{%UbqPp9@jn%7H&?(0>fzwI2bs6CRe4@!;K-Ik6uj%a1iX< zB3bZYJ5=qX{r%DoS`RQonJhaAubiPFnBk-9atE+>J_gLc3QrWF4UILVkd*!~znsOI zf%Jne8>D9K+1HTmRK32|Lw$)9-D9|{a%F`JVDtnuAhI+m=%3-=;%p@m>|$392M~M8P+3QQR826vb=@#XX7rEkfU~%aQj@{Aerb^A267B zF4`UK=Gvg)Pa&&s`|>cLsp8V|xc!i!&9H?w>)B*!CW>vV6AX+t@2;2$;kK9Pp)8=& zO<@IuY6hE0$`2Etb_voe-%x}`GPFi?)nnaTzF=+8b^+loC6PC?a=ggLf=&aSaA}?f z<}J)`U#=CY3+DoBrp&TTz^5|93UG9d$+NJpSj%QS0It%XeIN&^(zRk?eHOuuC`KoR zHpdwvTMXb`+dbd;msPwVsC_wKl)vv6s{MGfQD-4w3BXy_4NP2C+aGR?$B7>kznD0J z)48F#=nlYLF09iqq1}r=F-P*n1#nT4sa}Bl472$Y|&=s!Pgr`UG~4gB!hI`Blb99B{Krh5Vr&<;N{`r zMPUV7E_F&`Z2a?m<8YEU%|z^T8&DkOanr04{j+!RL*u03f9cTY)S3djCJ1iE&v*>CR^Q@|UuIoZviz`Yqn&VB zEZSOdhCPt{2Q$NQTo;xDQ!~ZaWYLL~=hd_s=Yn2(A>?uRQEZ-Mi&n*I-0N#uDMvHK+ zlit!N!V1|9f?4gOPm}A1R9*ya-4uvpSSsuY8y1x__inPci1nN$`fu47i|g=2h-m<; z;@uv}h>}Po>77ZiZ(M%6X?8X3Z|s_{866*^b#6AwhWkL(@jh(RiKnzZWk@FDt;;XBchi$lbwJSXz4ps!n*1v4dq*iX85mH z7Y2)&jYIjLB-hrK-8ZIuxd;O3ANO*m&!cKm$OmHB)KV4UXUMU*xyvi@h&e1=cBOTm zkOf!m^L|;VX0S%X8^tWQ1w+@z-G8)v2$h5QE7pkWz6DW&sJ+%ozHQ_l>^@QB-BXB; z*{n#fF4+12(+%5!D9JBl*eB0q-@S;yJ(foK^--1!;F=0Dn5^S2U=kvDTNnq)7U?Fv zVccwBG!^fJj*5bS@(QrL{S9(v^5^qxqUcdrwfxsuv2C62wjQwzBj?NjW0mQW>PqaJ zk7JIoX#aQo;1YEc3rd#iP$jy*cc6@?!zPF+yaJf^o&468f}Cvi=Q3o0Wz!x&S8i!kXRv&Z?fE$e}JNSQ+2E-J|M z0`Ir8&W+CuZ~K}_;s!c(z!}~dMD(hj8U#mLS8q9`yh@|tm*PDs@(&NKjvfDq$k0{< zVGAo~#jyOrFuUp@%2mi&i)XCnCioBNg8mP5hf0=0`=yAf$bHS6pg1qrxCuPhgt6yT z|0ra}3pQ))$K9m)OW6fGcrks;jO(8sp-Z%_wK{N)c1=F4h&)!9MNCrLjHT4=gv?C+ z+H9fJ#=}1l^dvtO%tB!6{cD3R!%p4Kh{HAD%4{_0FYDLRxwxfSM@**^viuFW(=$TN zQ|?opSfjW;nH9IQJrvX>2u2vT6QRIAV4As{A3YuZJ#r`~x2o#t0#fIT{y^9L+@if& z_o9Qt51UQyw;jOKmAG_~jK~guNK7^=+c{9eDR|Kq@pzh(#BVo-5XjfR3eJ7{>`Rf~ zIC^Kfh9Y?~zdxTMYI@Abj^4>zJc7#p{02XcYBi^KpH3JC{fPFXmWMjq?HZnq%ZD~} zOznLv0-M;KK01&n{b3MlhcJ)?cFbSgK=JPD;=M$O=m%x+eZ7;6%)$)Dvhw|g8FWjx zTfqN(%|6nMPK+#y$tcbTVq;^DwnqGHk@2zilmYi&_Al&r=OZ~(_)K-i{pd&4nb^jPih8xCZT`?x%J>)!?Aloe0ub%U~rdjFU-K5bZh%IlO<_+2>k5CtJ`UECCV zF)QdYVOsGtlY^z>H)G@up?9lpg#S{&E+MzGSAZNVeh~afqC$=o1R>d^lku-SiHRoYTuFX{^TST`5xt`Pt>;t62a5@Lb zX_JcnD+Y=p;52TODh-D7FXu(bKL1V?ta41?sT= z`cb~)dC15IPc|3BQV1<@(qc8X1Z<;6;3^gV@GED#Of;{@8Hz)>QZmWrfP^#gZDE%9 zQx9g~Et=&I3PZE;8$)yuyV9}C&loI^rL`}|z&ntzFw?x91mvTi$S-jCCD{y$qa?Qz zZYu_4^;0M+B*VRq{~}DxjEBlD$qu_rTqWxuGN*-J!iGoE|HJtV{nR(fKb?SGs4pRW zh;d0|4TC^2mmOg{VY1Sf3ofvjwWUHqZYBSbjWM^mfJHRMk`Q6~$(fvrv`)sQDPx{4 zw>1`OPn0e68f6xd7=Jq`={HppP8?2rYb{JOe$=;p^XRvs$XY!C_^;-tZ`(PZCRVSM zpV(hWh0_C8DwSvA+l}A$L+4i9#=V^t7 zkQ}-DyzxKb2>zQt!E)y#Xq>>^|6eRRJs3c3(r@r$*@7{Ib%zb1mbl~qBV{{Z${=o{ zgRAU#gsE1bSd4wZ0TvG3^=nLIVQ*aPq3?bPhnDobcvf3!#AG8kZ~q<6@DD;=$vuU4?GE+)E+FzmV-v ziPS0lSQLexsdI5xKhcorcRY}6H7`BjXk$yn>lnW|#p(CV|7!X&>EtsS;5jrc83p=X zpLGOhJf%YM@QPHcc>jN3Tu{YtohEXsidHP!oyUVZa3usDps@$}qTr7@_z-=Rc|n~n zlpP+sVH-8xjB2w)1>F>z!XJP)%+;Zmq9n7I)nbD^$I(@&>1GjZ=ouTU59^`Wv<9rBOQ z$3`~nzxU6nhp!9l-=X5S&fX4^;t@8G7S~ZusnwV?_~r7U8@`_0BiYfyXZF7+&>AF4-GO!PI>5rekQ+ zBb^za1-WO~52{=ThhIE5_KDFq3T%s$1%&V#3uX%E3roVTY)WwQ-)?nc&X_|fa0uhf z;FwJ~xeVF*FcY^)BLFd$Xl5^BdRL&KB=a5ayTNp{5~U2vnM=BA1A%`ybT7+C?L?@9 zKj9|$7pC&+RXWarS?nn9u9x1KF7HuEEm>j16u|Sf-$A*jV~w>RbYg3df?akL(`bG< z!N#}54Et^xz5G9?W-E32DyBXidz4#65=E{Ta#$sss(*?c7Fd!HG~JxS&cB1XCtd2l z1r-RIq(lmXXrRW<8v zQIygH;TusmsLyKoEFL8bB7yoRQd&Q+FM|lyDXk*SXR_uUms^rOHzRIW!9+nS5p}V^ z)t>;qOUz1b_agMWwBHob2VF=81}opaBR9{f-|+sYJKp+ZsLhP~PF8Ux+e7t(5htpZ z3(tgLXd_;CH^X0nIt<1odH8{R-ETOoE!9QS0=i^HQeS=5`^5sP4 zmrEnF{{T56@7(2fizKJQmlsD0kGHJ?sEz{eyb;?w=I5vb5V{=S6>=mUCNIg*D^q02 zyyk)|{yr{r`0?3p3u~MWoi1X?;i&+QTab)$2YLN+CqhP7K<|5f6lm0(VxuDwrfKD{fNz!Q`FHp1RbP+5nxIwndULAfvQ zC;G=pe__ZiGT_W>wO{&3`rAe%OFr<9`JQ426P*nj)0k#A8+b-HIMhD-Y^7u~jKH?j zA%-2sx?&GN5){Rah7D`mri9E8&k}oZ_C`%J@6GuV-kc6!|7+H^gaST$XQ8q0xlx;M zCzG@N77DH#;&8}!?<4p4g;Qn%_dD|MlG~jAJ`ifF<=>1G@8z|*>j8EUlUemig8jDt zt&ac~GG#TS!p>TbU>A+F2xQ@jT%T}*s0or^0rbi-tUGf`zhfh@--`%Y()uKfuF6sU zhGPbx+p^?^67V6RF#8ZpTEaa2lWZEU!RQrkdq1cX0iQF#D<}J0zpuX_wmIKLPZX#^ zLwZn&`;^u16|6LuCoJN&{9{A;R%~lR5vRH{nbIVR1SfTNChi zh2Yf|BQEAH>d&{6Fn<<571BO~Ei4VhdfLHn|q<{7ZPASog=S5qd z4yx2go$Rj#ycVWH^@8A7C+XBw@7%KwX2A)>Fl&k;dw#_yIUcL@<_DUrHv$JA#qlpi zZ=R8Rew?%8;E$HN#n0XH7;)k}LEU!%nvj@~4`l zkDTKz%u}L(LAMzVQk2Hm<9|>A9R&aX(?N8Zjx5wcGc~NW zcG6DK_i>Xb4dl`&8KbSq_en(3T=8XBdMTXC!q$nb7o|{3rYI3Fc1gaHF6JW?-Q;Pa zMyua8-{%wae^cJwiri3t3NbQ2zXzGPV}|BTQNMAC6;QOhu5>7sth@PCEUsfYD{nKz zd6Faq5mfPFW}+|7`0^jcLnE0x1f$F8E%cb_*+jQJ4XTMk+t-wS=(H~gzF(36$??+& zV65Q~N|7L9AQ7%-MMxsef4aX6`N>C0FLK$~Ux8y!t*6|wi8wI-<&`2wHY9SB4}Y_t z0d*I~`k^Hl%II&s?p(YjsnTvjH>4!&=j_76;ynW_J4SchWwkRu#5$r`xwsd&X^&Dy zV}Em`4Rufh3ON%b8)ZRy?nfhExPHXn9ZwbeT?>n?UgZb#dVTTQg(Y^-g0m3r(sNRt zGQ_08VwY{nki!D<^qsMYf^x-lmy3xBGwujH9R^=l_QzUHC<+PaMlr9ul_oou$4<%L zNBGh+lNH2~Z|l`t3C*q@HFUT1;1lY`I%)l}5o7q3Y0PDia9(0Cwp{)NQP$BP!!#y) z3$gutMUL}_5df#v>K+vO$n3^>qA|toa$-mNQ5g3Hwj=hfIJzLqJS5ZGj^s&QpLt%8 z5K`U3wFO(IMqH|E_OF)Mh7kvCSEjI7P&H38vXq^u)&Z6-QGhP?xV7CyDtrSTF`z6C z1sx=1FuK+Cly&A$A1XMW=21T)wu^Kbq-`&p@!kycScR_ZY;m-lfua3HMsn*≈I5 z2dNXN^%FTM`or72W!V3m)1chtO7dmyuZ=@CLn)o%W&~1RB0k1M^sSHO{(XTijnX`G zDq8?~~K=hPV^dxZx-$lI^fKJNIG*l`LD?~-F^4d@zUp!9MrZ$KH$$Rh?2*Z?!& z4=lKgI3!kYb1ruWcf6EkwA(_ll<2gc_z}l zxN*AYWLEc_0CD)Byn zgd7L>yPk`;B0^KvjypWeZx*5Ac-te*@BhVBiU+#a;ud`&4x9=j056G{*?-D$YQst&9*9gFgy6Qfdc& zbn;7kVv+(3LNj}>%IAKp1C4#b@k(O9#xEyOja2)ZQAej^WM6{xOA%8bE3Lf!Yx4;>A>Ug*futzIgbZ zIiAR64G`I@^`;`Xwh$YH_paJ%*8*Y+Kc!6D(ZeZHIZ>9JARhZRHeScmmodvgMX%xu z-W9hL&UR_qu5Y&58wwkQ%Z}XyuWlX&g;hmri*Sirjj~Hlg3csF@uF#gS~@|_4<-J9 z4P;Yu`s%h{%!|F@u_AKY3zYRKyvrILsQZjPU%h0lpAlH)^Ivulwu~9m3c`Zc;c)ft z%Aw^n7`IgWYhV%k89bJqxT1g5r`po36kmSS-ieecy;g0C>0%*x#0ll>y#DDrbkKxS z*T|H8D_?zK9zLG`Bkz(2@Te#a?q*Ll{Ub~_{k(u2;?8qrrEEjq!zhEHO9Wt}?2lR2 z`(aS*#9p_U?3tV8lx3-=$!lG*JGgeDiqjkYsR!D6HO6iKJS^y7@{gj8>Pv{5c^OP` zi2F$;%Vsk7T!G8trClF~pbMRB@&;)XoOPeFf&KSVzXF(Nhj|nP6N+P>qc#mWN^q8+ zLB$NPI1y!MTY_}p8M749HJ~w^T>JT<@cfYZKkXTO+o8)<3N@b=mJggjE7vUFBMi8& zJRbHrsW*9GQ@inW1UjS;?u8&zF%wBlGwDp#>IYRF2GM~EI;PN&Gps?2_S*WKIkliL zhTW4I<)44I7P0t4;u~rz6al?M^TX&@SFYX-ezBYfz=jviN`ClWhMsHN>YTwj+L6sP znCq&apGsTuVx;b0qC_Di_k_pN=VmQzCa-SyPsZx~$KN|{e6tg|v#%{O^R8AueR^I2 zo0F`pfczI4)BZ0Bj-TG|N5;yRyK*%{M@`+qcO4V53!tlD(??ewoQPsm^6vQDrNp?4 zLoyyvJ*?0k=_vYsaEWI%>3@%b!LC&I2L2o1QvV%ehRWE^t)x|{+Ocop#0X{+fgT`T0FdYeRovs-o8!3 z{d|w96S`J+S-p{}=TtD7hjoJ*WM(RWu|v-65xLFvST)rP7AQdt_4sErmcQ^7No4-Y zFVmIm_-d--f#6sG0M#aoI?13(ATM0&{C?Tx@q+&H18hzg<8R$Vtm^!BP2X5>|TuJ6Y8JnwnF^Z&qg z!OZM?@3q(Z?cz)d)Oc>SrIL}@k+_T!mmNk}arqB+tIs=R7sXfSE>$Bw@-09keSX8! zwia!>z~Q4MwhxT9XJ+>;sY_dt_Vah2E`bR7yfxuSzOc*Lpl3w&vOioaqGq#5%acXP zO1J1jQRcrbb>McOYARxJ*heN=@1u5JTlj!CV~IwlFD&(bSO29WDM!xN#>7rN2?P52 z*3VId6xM5lrzbU!2Ghc{h$LJZA)Ltf7a(}Ek`BW7q>eP6|FW~1JHEKfxn~=QQTobk zio1-YVcU9tHd4|s(hw_nHFmN@*9)rp0zi<+O$t?1wX0J+zZ1m3U+m^xrZ#PM+^Mj6 zPM1x|aCrC|-itupWxB`-5SGXO7#A21s{>p&Ibxd^p zZRxEcMl6Welp!Q%%gtcz$Fqh!&C>Em6y=|;)1~ho8-=dz3(C>6HnYGrsSO_0fi1qd4-+0u-}6jGOeUPE0=H)kdro~- zow(h#Uvbgjy`Qk%YtipIt7oG|yeu6~B~ zI!96#XH4v;e$fxIa+9>txqefzw zl)1(4+|H!Q+&%#U(8=%S*|H}i1|Z30-GGlgIs!YtMif#T?9&P-x9&&&3M9hLe@r51 zEKrpul^yf67OW<6oD*KnP%)XLcx*mvy`M%&i}+|JzLn?3QE(*Hnm}nm=hs7=%>F}q z-CD<33EybTw5_scSjv^D)ERO_ZO}>*hpX&NVqOZ)JHX0zW2Fa8FK(L7Xq1LFLxXms zO?wNr_VdKQ?vY{5X5&UJSErlK0}S^?c2RFGN2I8Q*Up`9XQSuSH9^@Ay|Iz@v7~Tl z!ypqRf`7&4&5+xbOxwn*X1ZYLR@+&D{9q>QgetxEJstfq%)*#gzWA`RskEE37-t)SFLYY=VX} zYv;UNTb3N&DOXbf&`SSlt?9oW3=dnz7Z3fmGVVy?SdQuw-ivUI#lgNY8?wA8muhW= z*Y|YD6mi{c)&4PunX{8TMp zj-nP1>Vo=93#q@hJ)LbMnloZpni^K;nCq^4IU*DaOW!D}V(Seta?-Q&LFBO!KF!K$ zW1cgpju2zUTcK!8=kmxq1Q*pbI;=-cRTcQ{vZ~MQKddF}8&8Mf-w8fUXEhQIS}>Ov zM6p`6BMNL+Qmjfdd~PiGkTNAq$g@S4y213*_{D8aoYiZZfHQ8G&a7AA%?FBeewsCk z8HNjMIvDznU@kmzpf$hW){6vk17vl^h%}foJ5MOF@72v;0t~rnjVn{C`(Eq3m|N@b zo4)iLOOoGyMdv@3pO@FwKf3*fa$2)Fv+rBi6vJEHwjiu>C$m8-Yqf~CjecO@n0$zNE5WLB3NKkXCrq zvs6wi3#IPMIAE{Y3gkRzw3j_^qXbGfrl3CS$rc2jY-?KAX}}dK(8}za?$=n8o6pu- zcKM~&VCe}RgXW!x+jITzBKzx2Jbc*g7F-MDY<9K-Utijcr^hsuJ5tomSwx!`|4@qB zxC?f!q5JGJ&Srh$sOyBWbi>a zpqnR5S1StdV~RvQh%NkKQL{oLfR!vjba;R>WD|;8>7(v10ys2(d!jBE9*JTz zX^dA)VE*0mDE?>5qetG;P3g_G|M(dx*JZw-f{$Y^^hHK&h8$Egk>WUTK-R5NO8`wKS|D|$h+^?h zRBkH|be){~5vik|?Ssqf*J^_=JFCU>b@f~E5{gXfnDAD#TgnYiGU|VkWIQ)WEgX`V zd7Ag!9I*9DZdi}!QSb|TeuR)N;A`I+2!moXw>>H9@zn#x=o~Wln5a6|j4@zx@OKLy z+`7G=cYWT;;y8eQNw-NzOF5Z+TgGL-`nA<|U4Fyv1q{<=Wjns%G8#sif%$HLYFYC5 zimW<>1lyN_kXpk(^T|{h6F3AVPN6XXiv0Df*nl$(MeCMCoAh$vz^a8X)%W~Ws+EG7 zSc!m6vZJ3r_-zbgai_C3tKyYN`KG%&jpx$>HLWixMPiv1YAg%)tDZxZ7GiRRU%x#> zvE%TIiKgjwQVYub_B8{d>J5uW(fQw5lO%7{%bl7;@MBI(C}M)j_#f?==D!|WQYcc5 zC-En~WUA;%@n%HJzr#C^jTsd=9zNfT>STI7YK>}JB{npN)^jMh^C18h9I7lu|K6xA zOpr>^oC2F>|9y-!pOPrjm7~QY*eM_6O)6*?6_8ALENAv9r^ox`&2GmpC;|KOjNF!w z7hW1@_QS6ZxCV$(3mLU zj0=u!dPpE6oY;}}*Wj$*lGwHuFTIK;$MV4c64$eFu$rt!U8Q2DXb(H?eNLZTA>EAv z_Iz6{t&1$L-4a_Wwm(-Z_`ZVJ9Ygg0@=^l}(Ck($#Ruo$R%s2!PRK)!WP}F- zvwTuhVWkLGMKKE|oDg_|sOW7K0AsE*cd1hTS#3r1yCSu;DlU|~hs(r88HGPhoTJE% z>lE2M?ysjs4Z+yJbnX9QY7KATI<4~^(Q;J8uDJ=MUh)#_G7t8}<)=M?sx|q`uaEFq z%&>Y#3>^w3o{9}(z@3|0it6pXTR05*Vt_T_s+-7i{zm0IjEzhD}>>iQpg{GHLA7!BcL z*>kJ0eC?&7Z+u$A2p)TimO8@nk~ABNg)V-j-a?7HaZm4c*me4`o-Az#tw`Be3FuvR zO~?&lK!2Tyo0bA0YUXAWYVZQ{O$-W15jyPNdL0RWgZ;V1&gRPqRyMysX^%&GX8k!w zvj^G08uR?dRAvHPyk=5?5umwjeHWMvqTz&OL8cIpb3KZ~Rqd}E$tNKK#bf<5MooMt zLwY`2am6wnDFF_Jys~gz9lHZDaXL_!dUe4N8ezVvP2LqezYfiVKqG9XHaZ$%X zS>Cng6k|2$eKHPquC_H~`?B(#55HvP z8P#2E(q0_|Z1o}5{BL5qFjKeEu(snLf8DBlo%3ZZL?iP42$QdE(XEFHEQ~?&c!l`a zrYdG+>D%Z_^~Y!~LJL^QIi4^5+G-wb-cRLiBxY-3GWat_Ps_I?%s%$@k^ql}K9G?% ziG+-3ES@4tsmeZg`JPjEk*esb!Bhx$&9xA27wIJ(+RJVoXhs56T9%Y^1WY=@K_cfD zJHq#Q@UrO5k*sQ)#5+{LN)fq6v0X<1#Qz^9KXS8IZYw>Hr0u-jcJltNDz)K! zX=xNqZsXlm30C%bQQTSJ^xmOD~QnLex(LAs1GuZ?Td+vjhTe1+Yc+ zkEO5$IyU(bMt>yt6P84~67)YipO;{EidwvLCX6Cvw_Kn^41K@Fp)U)A@w1%-K}e6N z!iV!ns}%a(X1ZjVNw5`81fR{s2vclGO^F9x5FS4smypex&qv>XuZN>(rYL#C7%XAx zT`|AXiTdM`(bcEKKw zmBRWwHaivwpdY-hyWjIvF*>&OFB>t}F+I;3tAL3Wvc1k;lKJWT1!dG?bIqMtTz(}A z*;%l({BX9a_+HP=`RlhpJnaha19QPb%EocMfh+5XuNWqZ;#d_b6lgLVrMXsAujqgL zsv=3X18M5(;u)g&7n^8|Z$8f~9y#T5m%Rw_9@DJ#`7a9q5evR}pL#{>H(wnC)-|)c zmgO-&(2e+*Bra8d_FlYU#?*H4sXmi;L9&@cd%c&X(!PRJT4x%En%$eU>;*NSB~Q{| z8f;?*ANA!t#3^#m%(lOfFzY-fzr~bq&CVE~GIX6r+%-&NoPDoaC<(&MNiJn~iZ05S z4?j-VhiMNNI?*YByj5}8jEgazfO*!s#9sZnGIG?aV9c_&Xgn*DaEOe2<`2iM%ILYQ z`16Zn-Ab4J?L2WBDbRgyzXZ@_aPXzF%Qn@AIWB%SoJ+oSp`c~4T)H1Ur&#XmjQJG3 zd0ZK$$qLWsG5~Aw|EyiJX9$?CO?>k{{8qoWGrOMeu^%b?CU!E!bAsVO{`n^B-=v}J z6@W7 ub_+gR^I_BW0G%$gRPOdzVcM`qA6^>mR7ZzU1~EW%BURfo&}XyRL;vk4uD z6WCR)>Ldx~J7dYaR#z-|9j$&=i58TuX!#v*3{4XNj*ab(TR`>7IGWLZ^7Mfg8w%pV z`RDS82~t)19eT#)tOqVhj6`9f`-rb+jX_U!L9E8QhS z6JS`pB%6EtHkzL@YWhoZ4R|zmsZZSumZC0?1mp%cK6|n=x{l<1UDWZKy+4b?!{S}O$+DIvrH9)RG@q50 zP3Zu@Y+|*u{4#gd?x3lsOb(Nr6Th?Z7cE#l3N5?u6vW>My>AYCp??w->GbtE=PMGG z&|Gjn{$eQ0&5_lblkcAH;*!gxQUcvIHnPcxvAl}qlAKddl<1M4r;sOXN24uK`4+$b zjCddK+B^_xFnPJX)f!wkz}a^f_8LT8!A$uO)4h_GY2|qiGvA zi&MHlyb_>3iR=|N231vxyxp~j-#p^Fdx;H*0c9s6Iz~FJV^x^&z;Jz5dlv)m`oI-3 z2h)x-1&L399IoVm*(k(}aJP%IP($NA__|m%SKIWnec)5JW#P??&sR%D$j3B(Gg1Yl zhxL(h*B9akNR`S!iIw$7-PXr@SKhX}Ute|;CH8XK2-V92uYxD2>u-WClRcjfmlfW0 z7&duAi2J;Lb!4)*VW1 z5{6sBkKApO>bAg6IYGdx)?djZ>V|Q8`O$QVn4K;OL9{7`zZdHdc^=a%gi!4M+H_Da zh^f3tvuD)WoNuvm4xWm3-^qIbsRvEGRuicg!C2Am!CZUN33f64jo1IlZj*G-wfL+$ z14Ybt&ZXTWio`>G2a`0q;DnG%#ztqI?g5ijQ%}04**=f592oM(58UY>ZFjz?;xN*4 z^&G@seg9(T^B=_VBVcp2`n{G8H#z>Pp%Yz3Y!ub$5E>QX>*Icn((xCf@oxt|a%RB5 zt4}W9;ke#sa4~c|uf@+h!Y97_0yyHSiwNTMwG2xRp2rs8ZT3U3XS5o8g%Pmx5(%Tn z9NBi6dMPKd=#Ncm@W0e6KzkqC*FSBF&yyjPgwSoKy`}lY>y*I~si{Mr0UP-E2Dpwa zJcBt)xU#UBgW=e-X5rj1`B&RH6)9Y7Ftf#P3XykJXgnPO=5eyQ_i4vbUtB?-1@d9Z zU5N`$W~p5B>p{K3T8mnSuhAbFY>~#NpKJdfp1b(X)@Q1|tXy$<4DGGDKDI)#AN#V} zPCOux-bjk~VlohqmYD zxX+~xi-jAB6qRr>v9XZq=*;W^GKD>2THMk=QaHPYFm+V>4KHo^TJqcP$Hn}9p0rm~ zJaNG{U)KI5>TKhoK&qt{fFtn+r+{)_$lEo68jL>~g8q^?;zntS?P4+~urO*bh0UD_R` zK0nqYmtG2!;Se3i(N1#wC%M_P?n!g=-lnO)wtz-C&GPg5`UtLtICJhrO!j^0D9mPDKDzj&J>Tc} zxhD^dhu|{NVO(`=N^C5(lQ>HYlip|-lkrU$VY!(?^68#5{0)47mpL~Rjm zY76ffFs{?xjP$IxH0G5h9s_HFh@;@zZnVeeMu%E_8M|8Uzx``Qinz& zAAvlN=Yj}lyjdZhk1k}#TXO@Oegi8o*CkaA()eY%#z+h( z&81@>v;JAbixjh_iWP-)3`wjS%!Y7xT?^uN30Srs22kv*AVd6~)y>&G zJmOaf5jDgW9;-{~0-uuUA_TcZ40)7P?vGScf`ju|x}PqNuCqxF#C`#gId{B{ThQV1 z$`VZ<9+%W=8BU2etzY-h0BXJ&EUE}K&9_rX7a0TSMg|Eavg_j7ka*%45+4%gOhFUM zK-fZ=7nSF|pi^kHWv}C8y7FcpGnReZ$q4hz)!WftrCT+t!EP&EJkeq{L8Qq__E)WoiB zNc5m-ll+zYu+(zcw0d>*Pcrf{0z|P$5Enw`e>IG2`5>X@@7d6GU`CuG1vHFe*w3@@ z_tIiR7P@^~kO1}3)PA2^P|ZOwadiF>82et@oQN#G(1;59Gg?I0#>3H)=!CHG2Zx)X z?8$mn6Uwih+ihe-JIhANEaonLBGul^RSpb4u4r=l7?jBNE)Rq0!^9yCsX88epMHrIEB2N`{|xqG=9brS*i$6qFt zDk7$s=|%9QGIkxRT}syQFAsP8OQoJdM{bwbH9>?|_Cf&kz=-I{kTiH}C?(JIBz30v zWK_az(HCGv9(@PSzb`R}-y$Zbqkt%Yx)1;+@Hl>Z-QXd{&tJH72VCr!e$!~j^%0ix z@6Hsk`cXV=Ime`C6XbKhY4E-#&yDQa=IVUsvZMV$A-Zw`BM4_g>L>WbBl)KK#lm`Uo0QXbod>2Uh&+~Ayb=fHb8F$?85qSCH%@=>jDFv zdPDLq+bBxxccK2?PtS#*0ex#os5o8#sNc)3Ernp3^ns6&7r;PDt8l>?PfX^j?f7O> zl`-5YBNV8XF4*OQPadkJ7XCkIjGfQ{ldwNroaP+U7!G}mH2lvq8KQGG*kmR24Rr)g z#A|-!+xVY7p0h4gbB#vGyA@94EnhsWV5N^@ceBRsw8Bg?CSHYWF`Df9__|zvs*hd@ zjTYU#Vy!)xcN)jv@J%3X$nUmds?cTUY&=+!y7>C4PaPEXPnH|se0Aph1RNX^l#f|vzu=OswAhRg(dqu%c^`Z{$i03|F}|tPHAwc_DSP#fM*_pPiO*^K+l^J3KSZ6H%7 zl|ImGM)!>T(fE0_5T6g@1fDU#vuep9CaTLhY$7=h4y>;;Ujo+82^1vbRaJ zqc#(?c)vEE`wGnItBskgMbAU}ofhk=20m#9X8={P+^6c$Ui2++$1doGkk)Lbh##6O zE`M5S5)vzF%RV1pMQKJI@e71GZe^EPBCCpr^pH0P+%GFWrIpG5g3{2%{43w z_(sIv`&jO_ntx@$;;<1vuLve^Bo}-R%<}WO4>nbx51{C^4#auXoI1DW3cY zE!2Rrspoj^VMom<%w%Ui)@scxg*oT=xqKL(|48>+#JrHA^21U~g+5F@6lox>cwfCp zX_FEM*`|$ox=Dp~ktYj|wOunLeuo;oK~x?vA7tXLC*fA?Lj8lU8h?l^hg!?-sgu@X z7N&7W@KFQ=cuJn&_7@G|0CThbrE814aA@5iPzpKF;z?%v3~aN6`}bae+-vIlAP79X zXa^E=_A%v4j7#M=0-iV<(}fK}@gdI`X)ZV^5=`OYG_Wb=0Sy|<)TdIydEno9Hw-(T zB;`e0-$}=l`6(r3+uSxam_)a7Bc}>6sU`cpv16-Q$ASmN?bnw zAl+#vY(rQ!SDfxygdZA-i`-w$?5@4Lon#uy{=gLMq?N3;M&0fPpxE|#A0_gTat#y} zi?(<)t&ZUcr#{`F<)G|cdtHe-x0ZVvB_k@s1y3{!!8?7KtRDfBW272?ue=>dp9X^a zzbhKfqnJ_bAQrb~kuRpC@ea7b5)TU|x)BYcxfR;U9$WJM>K6Chc&H6S;P8(&==;<# z??4e{ws638)S@sLJjS-@Wx&p22kiJ?D~iTa)4<;r;4<4)yI7-SC1zcxU!ACnl!Tfc zp}2w^R)N@D5ucPpQ8ZIkZrks7Z{>1Dqu&|y+qp7F$=1$T1m zOwFYh0fKA|ZggUE*vkh&)|x{7$Q+6Lk>eP{cBse3#ge=vxmD}{m{Io)H5$&j>#F`K zD)Ib%rp&J#+tkI=7v&;b57FuQA8CTkr|JtO=0;(0C(Gm>oG9X5*YS4_P z+4YB<6IN~M2nKYiFkH~KZM*HroUA{?wl-%)j9c5}5I%`Uk+Aco4eMjI4MKw3DGHUv zde-a7l|~j+=htA+NT_%vBu?kMEP9Mq^|H@@VEL*^>s2_pj|+EC8=@q9iX0^Zrsnv) zRvh8_CS(`cK_hg%zBLMXRQ(+B3tH85eqS)lFyo>3$Hi z9nQ-HeeRo(L=U|Vx$w*`NSbb_i{!zsWEN1fy@GI)wKq3j!WP$&m@*ay z6F%4Ie>zM85$(c67e{4-$%|~ENH8cWW2bIHq7*bHVjZpz+X|H|x~%JC^!eo-ugo%7 zJOV7f|5jYran8}%TaJ8*!O_d&MV$Gnjv^ypnH3?Tl}UsI%I4x&1vrTId7Cp;>bEjM zS@F(7D%5N!iGi2HT+S7(oJr1LDz3cdvdTq6^ML1GgW9Dq6$%q5r!8GQen^M}r5Jzc z=JJ*t?#X(YqTx;;>4R%aRIDZuJaow#4B$zH_S&WaC~|tP30;E{{?BHc!{m4r?r)SM z^+=F*f(+omu(KY-GUww{z~SH|u&rr;XOjeEB}~Ki^Q`wX{kG8F$q+#(11d5|+AX9H z|NKE%RuzDW<`0@&Y;&_>M{-W70134Dh?P9_=MR>F-a>^-tVDN__B}7%ixHt8C*?i2 z9EvH2n!oFLveeBR0R%Zd$g7y84w;o~avVa+7CE~`_pc$DoNjEAqOKw*{D^d1y-vKB z3ozaKTrVt_`d)COuoSY7b4{T2q6wK0r>)W(drD%a?JJzgfxV%m1_IK1}wX7n} zKXCLetjm(YU4CUSj=lPl21T6%0Y6*$+G=9dz?=L(Jl2%o0S-i%Rh6Nog1{ZQgXSa( z1x?Fi^;gUO==KHBaO11yRS()h`A!MT1*bjjEqvBw zoz#W-EN{e=8kOiwn%_G9s`D3qH00vqnM0VO*>-6)JJ_CB8&9*t`fJxPiaqG+qEkJ- z>N}z2gX35>S63nLhLV=!m0><`laX~!ZKd`ggkp;5(6>XoFjO5>Df3!`^(zaTG=yGxD}G<%2E;^$}Ja~fn|gv)CVi^3BP zwSyJ1ia4-UeWNw?GV;`AyqXP|;K$2*WU;a;ZQ=nt`j%xAgMVR)En%H$eT@4>CD35a z*Y%Q6;<2JAXt?{i%TV8X;;1(TGPRY`c!vxqESMFnSHRe!wd_Ffm~wK|2t{qWvBf`j zP+ke2wEbD=#O_&dKCW0&rb7(1IJc|e68Qy`7y&=z3V%3 z@8dMIcj7M#6_76Vk`fW*h$1xJ%6MDTcmq&M*m-SMna^m|Ai3A8mlInQ=^`^OYwMTs zL{@#v@j;jLk5I_n*DXj48{M*8rB7o=YE>umWv;*dY|h#fp@<}&`D-Si`j44lAn&0G z?4DtyPOQr1zvvs_H6kdkfys!!caXjj#mbz4nJC;vwNOj_7z%ka zqh#n@Hy#RGj=GORL|xu`?-wk2o-am_=D*!k9=| zmdg9l?Ld9mLWWYjIJ1V$Jb(9={3n!rL+2-Pq5|mUe|~}#_hd@E;V@6sd~$9^(YH{= zJdmx!W`>`ktRHES@yl2H)C|{2($i{#(G`Q*+%gfA%?l{DowKZdZph>DTH6SKIBG{8 z9rKA{GmqWpWx2!P{V}0pEOgZ_MTkTi$^M{IvtunSX(;6iuW;5Pr4vv&y#fxvJeH3L@zkT|o;l|TWD);~3aQ!l0n=gmW@ zIL!k@t`9ppS}r%!rX}uAB;3wj52=Gl%o!Jexg99FNw!9b2 zb2_gShEG0r`Uwz{nN*|v`Z2CI?}s55i}!ku&0W(lYKCOseL78h855=k_JQYFjOi zUepQp8sq%6_?lS(#}#V;btx?qg|1s?;(MaNu;98C{h+E5!gw_taTM$SCkYtpT*s3| zS40fHcETdfo_>7NP$;Ii9cQT*@wA?1=g=V}Lu$sNQb!Gua~|AmGm`<7o`|L1yH>i)wM;oT8h6hXjE(pFg=K&4b-M z04`0mU!FEXHvzUVB;q+c{(ZI}VjzyRn$ayr*6(@qp=R%d7$&)rNW`za@irj;2(pEP=O%!T1Bs^zfL%-^>zsw_BR%7gdU?DWp=Gc{52XLb{oU#`p2s7=iF zl0atOF{-F1w@&fr)YNI~giiyXjN816W!20C&kUWDN8mh=$g%xXQZ&_KSE+>$>~(Dc zO?{xI+@z(N&n{i~Se^pt4)7WC$mMcxfO|DB7F(-7Dn~t!%pG{{;VJ&*O|Cp~FlzGS zp$v)`FK2CCn5O6Br-G7cTp`qPjt}UDa`HR^6Ia8js^8bloD-kM?~#(Nm*JE$vcQg; z)}-{a0$`Puc6sok(A2yw+<*^7Zh~Z23W!NmITbb>SKXRBa%iktzWLz-2#BDY72Rw} z`c=EDXNdwNlTkh)fZvIwMs89uElN3UYzI+t2Q0lubiEK1z!oM|_x`!q?r;1AOs&j{ zz!AN@F~jBw<{ApuJe}Bp*oTcw(=0S9s)P~1(6b4_73&eMRXW~m2CK9xHL+1Tn(K-| zT8akpHnHV>43=)s{JXG{_sNZm$yJjZgLA^0uIO?L6H)8Jl9sY@6Wu9DXZT7Z(?Vh! zD$skTw1cOvF-Qyl#|F%lrxjwK{a6k3ZeXZ(7 z9yP7+HU{RM@hAD?Hon<(C-rdJY?{WE-_=d+Rs2@*Vj6vM|07%H*J3~!+a1yY_r927 zqxmE=p?vxM&&%LG&Pm$LIO_LaGeU4@7Er6pZ^3*_!1WY+i^aj?Fzy_`bsx|@dA3CH zYe${8E3{r|W$WN|NA+m55b9$rrwlDfcW7ap!f5S2BjrOKJHQy0dG> zroj2PZzv-E)cvKg5x|(?TCAWvuNn6551&cr`!eIZ6E!_caXIn#CNejo#w)qzeHcM` zP>`nK!()kKl9TXZ_REg~nXqY78)$8>k_H*v`oE9OMCys>n*P%PKG#ml0{~ zh@Z$pNO#dxl)SH&n6|$=6?+8o1NkyPDEzrC2<$;i4%BnM3xuKQe0CJJvx^S!4ABe& zX;LSw%L%YQktxBGbGr(YzbNchoIj$frxR~bBm<z)`EOSNF;j=kMSFMSL~&bJ%OIdw%`& z)%vsxWY4kHp!(MJ5*`YAwH4p5pzZNAm_zHr8(X_Md!YcYn$ z*B8hhn5C0S_kDK&d84LwTMVIA#M@w?8xrXg2GiTDX!f{eh8$f==*%*SYFHa0pf~BX zSwPO|hl(fB+?-NFf_mIcc-83bK49P8Kgzl8`qL)rFeMy1**26 z)*|F*BhxrKE-%vFB(*TKC9Z$G8FI$8Hzu>s{_WWI6p9djXjeJc*H+ zU&HCi<`12F?u{LhEFJ9;@|IUlaUDmgkr)!GzMD6k0NxXQVBauilr};C=7Il-A&5X3 zNbdZ|e3DR31E%gZJ)WWK9&Eq5L{*)YA-VyhHER|r5SO8-QwK)Vdc zj!*8pua}CbC5~n59~?lbpc}~ykG+arTs-L#W5GTE|?T&DvM2PHl{boAq3e;Ro!NPInUSF&mb=23#xH z51M4CESbAOa_!In19sN9i6n+mC}ENw10y-Uk;_4|ZSi}LC`T8k>TaU*B7e0>)!Mfi zd1^i5y`t6Y@5P5@ABrt{G4wsaZJMIa#@8gsa{Q6UA9|`Ze()9lSAFk$v=*+jsYl+A z=n{&Aj}s2)fLi-R0v0+^cm(`PK(+Mk#|TQo*O0Y<-|!MTe8p-MzI?|Afr^#S9+)bh zCfjJZ*AGot(7yRnzaM`5ifBk?z(DoZD-pP4P%`jrbz#&;Ui`Oc=n-~5w51#!8m`L3 zb!#xHE#W!pO+4D;!H(|JV=VEJBlwP}{3OfIuZQX~IsRw06LcV;L4mXROJmH^t?{zn z3e!UXChWm2hTIPTJlMat+{uFny6H0I9GK`@vnF*7mT5pdcQiiKNRc5qTm=Naz~Btf z`hq8D4a7ygQ$jwDAGX_z7`;(lEG*mg0#o9xkLEmjk-BS@FEB8wXP&tcMB}&ncKwjk zW>_N%P&cn$i%QxVG)0@IlGjO-9gyDd3nj%WS-11ToS0fjrnL+ zZ+vLaIbv8l`(LJBcNQO_Y$gIr@w#vdAiACM=)PvS;IW=%fi&Xu7kABak27Yb&&*Qw{82;H}~=T zuNII%&2|sG6Xx!9HR6%{52C^D+OxX3pS5Z-1uvdm;MPf{LE1uT5%M%=?J;(`Kl6pl z;@ie?&sbq>%!M!d6pWg-Y8`=V@qqbqip{9z*D#ji+Q*v_(52|wj6E8#7t|IT>jrlV zw=Q>%wecOXy{~l+&N{=2|2dT6^B7xG>9X)o6IPK(n9GGwFb>lVMQ|AtYvJ?$yoGwv zn1=@TT^R7s-RstAS(c`#Lr>rVc8%K{bFUh`ix0?=4*$H}k!X7xZ20?4L(Hmt66?ob z$C1*W_2E7M#_qD2NLgE0$&?W$evY}_y4PIgoneRw*C<9m^hzR67Z_zsQ49oLCJEHr z_(@ZhEslatiu&Vo)He3{!@pPtM2_eDi7lUuuKX$F1T?<+(VnOPocZ6Xdqp=^O989^ zXH0DO=j+E%Wq6gg5R)3A=dB;t)SSHHg%6t6WXESr2N!$6|2duW- zOZ8d>YVg>zDaa$pArN!qkDt5YYao@!=qRDf%=&o#>f-|+r z$)S5N&^KG3g-;jhd|cZ*nifUgsL zx^AJ`hdROFlBQF1EeF)yNt+3?^k>V++?QzBNoqmvWA`qal{>F?WF$^n$%4+H#-bKK zazw-6_N>iOtEdcfg_#7Y*mMuxPJ1zVYHwAs%L0zM@m`dVdtG2Mhaux?xwm~u#F3)X1DVm4q>hl6(=$nJw>hv_V znC+Vm=3lT$^;h-~cFcqJnUcp&KWICBXjDVy&9lNbqfs2^`slyWmvy zz+b4ZSTmUYQ8Xa<$Vm5Guf*)jQxNhh8Mx0I(&pew?ndLba?X4tS?lvJN`X=hMmDH7ntg2KcAjZ@%NW} zO4uFnP}EahTgrQH{FA2ES}R+as#x8U z%C&v|KfP~dyjcx98S=VB$z9JcuY0^M^iyOA!80TZg{Vi%1XoAHyc>jyTFW^#A@34r zU@ZyOj-b@<+Oj-=KdG9?2@##v%NA3vE?RJHtiSEHT)3w8?6fHyQZ-YhKZ)V6T>bfm z&cxUG!1yKq$cxh2!A-iK*wg2Vufkr0S&duW5m4&20F0V!LzsKXq$phH*Td^ZMk`3%HP=yfqi?!xVUv$! z+T+g)k2711J+=i)Di3Gw(tCPNIi|;(Va_xpjE)ICYWKmfcZ8#x;n#3{9pEprkvK6< zjb!>I>ksugJMNn(ux+pi6uu+z#(UjpNm&;sF^}m6Cjfo+tJk|o@q7%PF&`YA8J7Ml zAlQ8gh7?zD-iHiw!#zQ?(>e@v9U%`z`MH?D)YI1o_{Zl@D^bNnTCBa<4jOUh8x2;s zN_yRw#ac>QGzQWH+hb9j5Ut;X5^6kMC$CN;^!K?dW)lIBb@ql@m~}UZWsqh-WwtN| zbJ-@~P#0`3!_Pb3D}Hu0k^NB_aht)htnTu?>*89m(;44uIhHodt%eH$OAyfVIiS~3 zW&{d*qtUs}g`Nch?b#_Rw!XH9ZC58TocG{^W7`-_E^`<9;zKZzm6WTtttE{95D`I?0H%4??z05HJV_a0o}lr}0`E%v`mO%Ih= zj*yI!lG4m$e0p>;Fi6hxl@!!Ph08-Iga2ZMwB_@-#k=%NTD1jX<@odn>TUE-*c1dB zz@WzVw{5FUc1-%L1VgiT2~6X(H=XF zYowqknxg8>y}}j#K(#fVN5tFcwXiT%4xESItArWij$~E62YKd`qq&CTs5~eMF2^5! z;2wSroZmQGmo$^?AY z9w&PbaoPS5+B#yrV9q~@7I7TlYms~q98C3k{_-VT0B0R=2CaJHlkXLJboi;WVkgGe zWS=m51lL;3+8Uaf&;tzOXd`l)z-)5#Uc!4B*-1%j!-PYXIwS@8Jq8-a>qo075y*tb zva*j~7MGRLCQAhC5!SAzn$(uPea*vCoP8Z$*}D4H@G1I=(a)#dNO2epoT?){hl}SK zC!5o0Uaot-Ev+qOtyfE>%T)*al^?q9b+;=kIVD4vXc-+w0^td`0+u{BG$tG~-ANfn)wCUH^It<3|cPRU8%24NKYKY6vAY&4KF#5KPz4vK5 z=9pp0Y;Qv7_*kK{(k@jyg`eUS6MKqqnCFY{)YexpRuy-RU20pHM@XRNXIIih{JSMk zZaYs_hiX_1<$H3Ms6XZV{_x8n72a4%1F#2W)0)TqI`C7hsoHI7@lXc{{$)JAgt6HD zpW1@*s2@rjGBrvTD2zP?gk*(Lw-O4vU(_{M9qzJ}eR_MMh-h{=&`^pigG;U4yz^b9 zKW$%_-X*!<;OC>(G-gOF>PPzB71rVm6VbYP_P|OLS<=W^6k3Mf>x8{T zXSr7#ZOV75;>9w12jxzsO}$AU9ldTA=tD^p+va-DW)arnXMF|RvNo9P00P8Cebn51 zLa%an?5xMl1m#61-D#OV;s`gb>g8hZKv%}2_7x4YVR-G;Grn(Y z1RQ~v-)Me@LtiOJDCxjeLV*{j<3vT(kgPG|=lZZwYl8}W82#t2t0e|<&d-xPZn9i4 zlu|ts*8b0kp|>N4H%uI*qz1ev-FcqcYo00yhbQvIYW;Lh%7KKEx3D{g7jtMj`}@hJ z)`zo|^F8c#ADjI~t12H8hwL&zTSJrgu)D3*zUnq=wl0xx(2^t!?%*rJ()uO7h>nik z5qkO<#@S1co8pcCv7%5p;SNPSNt3GuZ*}M4g76|KbssA8flY+5LZ9ePvjbYr8fjAstdfcXu}oDP1BWC5?o1m&AZ{ zcQ;5#N;lHo2uOE#e-Ezr-Fxlt|IBf~%sl6P)_q+q8<8KHlWZBCto+x5*An}Vg~gLI zwOPp`2NoqcZtBm`eUgMWh7~Sk8^!l2h8%zB=6WaZ+h;fE2Ypf#^j$!s^jMX56k8Mv zxhyO442Wv0>_jyc^AcfxLSmS%Jkev5fY99Wv8)u0XeVeJN+6!5h7+!DX2+( z_2+kkl3Mz3YWk`*%7NA57EUTUw)hQ&2Op9YPVmOoT_*?>$&a?Y@9K>aCVSW`C5~_y z_UdVDF>h@WsDdea^vgF(if!Ud|DsU1fl6=5xNfGecp#)U(#SA zs?ILx!ROcUNw(!$SH`}USVT9UY}C*#z|`%3r_GVoOm4+AE@^)=FnWujr_Gh`Dmkg; z9ipXaH^B)zQOK|yqQr@K;B>td3#m@V5+WhuvMSh}-XDIxtFUA&lf(a5DK_kJ8ajPJ z8TtLArpBzqG^Zr*)>V=H81vUY_%xJNlW01}(tB=iXFLAPYc%6FjI$@dv+|bRzAn*&8=GwGfdcAoLrs@3g zIwK_LD=1ywWB`xX*r;Ipv)Krwl?oj$DHP~mJxY^&R{>5p^kF@vuAdJXI1d!JiG<&t z0?SDqp>0=iBf-`T@n!w+-7pesF#0f<(7M-EFxomdQh)_u`Q%Hsiph}L;(EbEugznK&yo`Du5IEm`(v796w!y15mOp)k z33ZNrN#xm2^NRmc?voRm8PlG;+v z(w9A0#k{s1_cV-xa5D;t&|j_D(h)yOXP~#v9Py^|6tl z$rM~;R5;KaP3qeM9~7jue2|%!eby&&a$UXX(RowO@K79TzE(k3+D)|yQ}6>*eK|ti z_X%EILF@?mH#z{wHF^qH=@GHjr8&OcVoOQa#J)a zD9z1P^AqToLjGNk(}WDIMj`>sw*EFdwxj`q0YC*GZ@ z)*E=>lU*u|BfR`*L>@oMv!FdVbUuG$@K*2qQShvp1V7&F8&-Op@;Nq=Brbjb?}RI* zeU+Z`o-l~$Z;R|y8VPPzvmBfxjDMi;1sS;zQKTv5dcJRD;4j@BQ3VfA)IZ(T4JjmWX~HP|s}eFvykl(SShfHEVaSmOO>V7{8GC)Bml zk}YmoM~*eoD$jtb4?FRlrPw3VT0Dg2EhPYtWFG`RPUx-`p5r~ra8dJrJ1Y`s2`DRf z(L7hUNRcuibj9W5MKFKY>E2LLU`xoDdC63Jimb1)FM2^?7g}JXZ76 z$;A8HU{3J>q0TeGZEC+y;6wHXcusr~ibVz9DZ1Ma?UFVHZzzf4D|!1|1@&IN0H1=# zAK2*{e>D4UE>Pb7S7xbTcY||8yT}325>|5K9hsEB3>04uV8%+`f7p76-+cIqZnuiIJ?D(_OF7*PE7JNnsQofP+Shc2XDZ;aCjh{t$v z4Je1a>vS=fz%?#K<>Y!&8-R^Om#b-I^)ivq)X=w;)&4&mv;lScv6WHx4SvH;FGzr~9pMXu}(?CZ4G z=!GvrdCOy_{mC(ANOqJ-n97RUTidaE4YD<=)g_>c>>N=cCK@hU9}%_^a*7=ix`GOSV?6hBn~tX6ko2 zMC!_Zu3sIs(JdBl=*vHOo|#3p@wP)dNb)owVu(2rTzX*!r%CpSi4oqfeec^MFjjZm zKqUKzjQYAtp^e|nvAQcq-JQ%`GBA=Zx`hQl1UyXf1BK&t2fNu5cLMn%4S%|nI^2x&{PEMy7l8Xjy^_={nFTf;OzCl*FIy|`~f1$_- z1h|f}-}ZWk-+9(Nl{0t+1)jPTE4_WlKO@dbdt0S}`OD<|3&jQHS#!jR&W4X{hD@5O zTLInpuBQY2^m<>Wq+Vo2?GF}N=lB7CxR~C)cu8y*0sKZhj}0}_W>>MU zQ;w1M!n%3_cBO8IA&Of6VxHZ>zRR%m((`ZonQk&T2eqFvH-Qu21;h^H&Y z4%5|>()?nqik3I24i#pk!^Rw5G2+8e(te3(_v;YnZhn4>z~@KoXWFG#|4xhh7g68* z7g0YZUUs9Tng$0#n|F?q5ua@At9S$)tPaX-gB`5}Sv{|trpH9?i^BEq2}ko(J;m0( zhO$f;TRzsCOb$h}2((svHMJhx$&9M%BEiCu0oos0az{R9jmo$ZT81FYz4%zhqz*;I zdRQ=sW*(jn>|_vNm0B+>0m=A@_Cd@k2TgeJL<~RsNf5Y@20YGS&U`t<0Lk|6ZRYcB zbE8U#wE5$)o;q6S?R?SRGUAuQku54ZzXif*;-mm(dv1UufJ*vd!#-UMp@uwcAdWKe zK{LpyUGiaE44-@+VQh_nj5H^<^B&d@!wEHB{8R@_^=0K6sL9N~%(Zux$bgKC+G0QU zgm?tZO4iI~7VNs)8hqUm?h4hGN>^@|yY+j%)K~8uT+V95Ey<|;A$H5I3#12_eiqGA{i82g3(XoqL0~mzvabq{1E}s*!@#Qqtdx*W<;8KQ>OHx;i%WK7fDSDDw3@RSo7+s7;t5R}u zc7#dPor4r^^r9i)RY(*XqKug7Teg8vSH=F{5MCOVHtNUu&ydQgQ>SP7^~Af|*{MRC z+kq3X{V!904uLBpy(wHuk84Wlf{~ruQUziB9Y+ORk8&LWTgyn8!xoAE5&JtG{&vD> zZIzc&K<0M%a3!2l*j|l?!I~-^?NLyC?`78dn$3P9TnJ;HH2@X=wItr9m)jq(K-LpE zx*~AzFl0ll#+^RR=^38Bd)W~^cUT$BXAI%s5qq(eFx)kz)ME&ClY4YJapx&RO_y)H zL{E`wV`i>T1V|6B9#0_CWMySnPFT`0<*g5DB6kI!=SI81I+|4>$*n8ps*eonO8Uy5 zM>A}p`qzg?DM>rw6&y-CO7|z$A7AfARPL?Kn?c9udff+z_>al!YA7Wq8Dq1W!VG)Y zvG2Lyuak43FU~??^W1!n^0`OlEph|qKL_`8G!mUgX_ zVln<;_J^7a6^~R7+`%3QPCa|)4^{O!*_)siwrHtHI`)eOKBX?Y70ZOX!V5i}wb(XH zaR|3!dwum|urn3ie~o{MWwsY~A@45GYI9-M0m64v}Umd=XdR1g;y68X&xS1~ zpes%9OK?}vUrYj85fJ+S5Y=(=v1){xGW|9^IFHrNz)zmB(f4Ln1vJO`u^rJy_3cLlCf8VaPw4w01Ln4Lj8*eO zhb2Is0(4BlQ1)o-5(+rq>rCVoPU2$n!KoGvzuOPyE#aCeDLsDfbmIg80{<%-ZZz)j zuRNXaRkAnP{%)V9^H~~H_)(XZz7gm2?JkUMU@FjhE7NGz zt@S*Ya9(HxDL6LIJeF~$f*!IV?ao7OR&go?k*UlmvPtZKnqEyh3^NA5KxWx6GEHjW zKZqGR@Rb>xuxSE&Z|I^(Daqnfv$TEglVBto9VJ&?|tpJhyIEZV^8wM{f?FeCp(QqoN+ zFGrFT5sN4y?V7xQ+)$w$`hN29Qc5LvQ?a`#a;-H?G-wNlyi>C*C~H?f>jBkexAS2o z7(t(B4>Q+fvHunAD6MX1Rj3*3vOfd5Zp2!<&|SLk5O!O!m{qRaw!l~5tE)tj&7n}= z-VB}a$v)8(As;!xUFTtTs#-WSS3T{R!vXq0iwcR$RTWHrQMK^!cojLtB*_NDPYArL(Ma3B;H@q6UCP>Us96~FqE`;SsRTV zS7w91cbS5+oK}n$>)hg50q4b}Prf~_*d8v*T2;H~qpxJ8JzN2?dr>TnD2RHt2{Fr? zAY5=m9OeZp`@q%xTuk-ecn2}yovx;dtM_8*&Lp^K--)yRXFQ3v9ft7?b?Se7f6GCs zW3o!K8QXhawUcwh&Q{+#1Z};eE7OVGk7wixL8JTq0Bk)}{oup1KczlBBqPF+nDj;D z?Vsa;om4Vp=0k2udg>S5?iOX`764v_sLhSsA77_H+Cr~j9`QG4A=T_|B-iffw>+Py z`kCSpWFnU16pL=zNzt2%DIS3<2k0!%$zvR$74fz+4R%VPjQx#KT^VwnxCaFJdAeTXo3 zKX{DpmOS*&_g^A2ucK{cWMtk57)YMS(Bp)@oVe#IGXF=(l`uA}P#WV5@eL%Z5s z(eJ(*s!HY{DeG+X(qsWhFE?0ycGV1B`)_8{byw-hNMTMWSxq3Rk6V;W)x|0=+8&+^ zQ1(^X?po4EdY!?vRRQfqFHa@~=;Rz*R3xC$wq6t1^ zcxSUJV#Uxb-mdu{ge^Gfi)e0vO)z%VsE_N$2y?I562S-;KPP%|f~;PEQ&51f!)Xj= z&15p{kmT~08@v6k{~3iI3n4udxe6_e>7!HqSE^wptzn@3W8|Fnc9PpoyCJukILqHB zdL8Ml$(`5KyBbu=aP5=!`$icbnth%F@k&tf^o1-6 z6oC=tAMAVHSRQd^@)IPm-2LUCUc6DQ+}*xced#U}<=>2Sb53|fIr}2V_)ZLbVUKE2 z$2c@EGRCRR!=a0mWgd03$1!xNWJtV=M8%uW#w0D~dWDupj0Wn!_H(-j9&*EAn%D?ppN&CB#Q>o z#C&$5KMi4nPtj+1zKlwqE#?sdB0jOLhQ5#$hUQ8qxG$0hu|>>vCC>HKux}BVY{|`L zn~|cJswQhBi$zRdesm=plmCrx5}rm_Lw|^CcYzc-fVB!DO6!IV|AL)=0DkIU-lSxG z^RPXgE_xf%cmp)--o%Z05+ZU}SO~O$ClhA(PLMiCq@N38p9`TLN8v&*WDuw3mrwa* zgRiwcynK0>QvCDQy`IJ$=sVfb(f?|M>cmOepJYs_o!}Ii+)N*8uap=;bbTrpE1tew z`M1>w&;V|XvUvQgyM7Ir|3kUE@7!@yo9v9VCQuC?)wDWsRQO@Y7X&q=F)9Y)UyET3 zqnEAekN^g0QZY%APS?35MukF(1WZ9g@-J25^FOzYYd~BeakLyRafNIME6pyM8opeM zS-~+AXHLO1c=B0(S-`qIiA?ZjA?v7CA^VUJfYbhP08Z}%M)|JZ_p~z5*4njh7BS4b zM~~o-7Jj*C`(B!%%s8Ki$KGFxU83K*>9`2&Yu7$6m^!BlD++^F37lLA1cNZ)?4qKH3l~aX5oxdEayok2J2sMGO@K@14WJxpSb zoXsX_YG;AXVC4l550|HEXDgJW5%&QYpObf_e0X8^>j7w$z2;e%`zm$#p-y2*{_(0$ z*CRqr_h!VUB=Fc3l3ueil>`e+Q&Ch{f9_eHV`y@=lOB~M`HcM!D20H!mSCSGMoigJZhjy zssrnzc2N<~7qifpUyQCfO!3Xk%e3c|*|88qSapr0r#}d#^JZ!6)J#h_V6hM81b1Lt zcDZ$6m@{5UB)cq2f6$&TLBnD&rh6TtpE7~f`BwafbmLY&F+)04VqRp05SF4GuD-YC z1_+a;1{FEh%+oE`+o119vXdT3k#NjGbYdMNaSo_7TpNK3G$FMg&SSC(_DXe>waGJ! zt-8XpwTr<>Qqh)vp1MvHXI*ZtBpQdi;c1*W#N>HP2?XxP2v+EN<$imDzs&eAtmb&aYBFSxIPfhNvY%9wiBW6l zvI^O%dcT9`WhbQ|Ms`A^7hF}J0vAr?^y)tIE&MSZ)>*NH%Nz6-CzI6me}s0`i+J^! z%ck?pZY8Xefw8S%9}wgV>?C7BcZ}!!#Y`-w-jaCzhFemzPNF*If{SN2dt{qn*ZJnq z-;@9nO6@>o8T*3{FeM@+t3}{~0$>-U>#R$aqY(MBkMAcOaw_Kj#^=^nk)A(>QzNba zEK~ULgk1?K$gh=IBma^V%D7eGm$*vUkhhyA^R1Mri}`8uG~zO)3n9vR_w=|@`bOC^ zr}6&h`c$9rwXAmYT9j4ekUOR|n|eHtxk~uV59v+B`P{It0C_}M`vs|Cc3s+^DnVvZ z(a)B=id8Ohue^x#NfSy_9PH&p9|?9HE=Z^zeg+cOD$bnH<iTORUG>n9$MpRu*L=Wu}+6=_54H|6AT#au=OnOCPM*063&>$A6O6>wQ|HW z_WRd94fUMEk4wx*m4~P4NVIwM`6edmU>l(Pu6GX!a$Y-xy?DMoVSBt#eW;cQG zHRqHjP_5;Q=>TSt#Z$sWhV;|shQ9RE=>`JH1=$_*kqWWY`#0NT9{e@LR>Piy(TcQ7 zFIPb53nj013SvazUUK!^Yei`%Iv{8$s^51Vsy5(P<$rrfoI9C{HeZ*yZ_oPPy$7^X zBqH|hBlwBoY&+(e^{nWyplIVmgs-vA%H8kJUNA;^z`$?^d9R=8b+bqNHRoLJB!sX~ zcO-iO8Ez60*RXFGkfa|uu&;{;8Ak-E7B9`Ecb?-N&|^Iety~b6N|P!;OK~0IEV_j9BJe`^BB>HuRdqqb4)%YU4kbT z*6W`S+nE2@MAacUar0q#Ka86HyjRZo5||3WI)GPFHs(0)!H*P&7t!xnq(9S4gIy5% zFObWT%rZkaUj(K)NwwRyJvy_u_V>N1L5kSe8qH*tD*QJdzGUyRFS^>?ml>Wrg|OXt zsZHt0@vMT@XL4V^g5=9Ew`JuY3T`AFi}u9#?hP0`<%{ro-|+fyU~+{H5VnWxXh5)} z9BCnx=m%CydvRZU(H6y=py#{f=yJrUc=!3YZJ*yUXNP;;sQg+5+EN}ZCxLsuz-Xbs zCwu}-A#urQ9%2<}o{QHxlEaeu_8+S08s@V_AV_3h_Sh~|l3c2(n`EjU7%|M<4pPJe za`u$E`%&_1V-1;5r9X-pmD>^Gu)oEkGkkr&HA4M9`Jw_3naU8s{NXNzHD-I-RGPoUF?el z{Rx`0C2Y!{&3)~;plCLG%klnZ;3C9h=uIysWoH4wqHv5Lt2B$(-qF1-M_y%s7$)41 zyD!c9mi7TPXsbCE6sg>9Kd;4A2d2$g~|F*hB^cz}4+?;^$T(-FP|472G#n}X_7 zk)DgC9v?Yza|cHR>w_K9_tyQoMH(kLyhnfE`HJKZxz4i+{^jFa;YAT-0#1-z|1no~ zLXRou#CY{Z15`3D{8dj?4Urlau{XyC01!q)Fb6;bBf;giE0G&!OPG3YU&Jji07p`? zwEPQ_PM)%E6}5;V20kvD`Qo%KLd|$~vl8B)MUj%v*?R;zAP0|)8mT}(E7%KNmhO7- zrNZ4G$U1K>j?4==kE0)>T85i<1yEoRJ&YNtq|DZ(Yrk#TgcBT|un3GntH3b2}9c3-$F1zF5MLfyS)ordg2FBAI%BA2u1Na?a!cL&DnPK~41e?Oi}r zF++;%(VUe^eCwkG!aREI(&Ja;3H4cjGxe{7{XVG=fC<|EoA%p)BQLSXcX#akJJPe} zLy}P+Pt;puSeR_`!b7WSnaduf?Z)GtR-dcg=1cXmw3TX?*Lq5YN|z za1Qq*u6m5@%$R2aH$CFrQrH1VCB-|hV+Fw^12U#zfKa1DUxW=$NyJSX8$r_BlT;wq z7=JuoEs9-R)N!=}S$0Ga9k_02x0T?pJ~o#g=&q%M`OV`Q@(|jmFZSeqcA&D@BFOVt zM*!Vl*Lq2?2|koQxcTwBXyt-mjM~}jd_b!(eIC9!sCRQr{BwDYDZap5xb;$R6+?fR zmC2f+o^9KybnT(p1CP*ji|g7uOWuMlI%i&(tE74DPsCL2v(YCEG)@l1_4+Qlybd2S z8y?-4JX~b060fmk$UUq@FXP7xo^s^DI5oKz{O94`*p%h<#?)snAbFH`i(PYzTe^l9%oq zoaFzTXJgR9xPKW*Ktsh^cxe|_1(LK+H7+aS9c0>fV_LQw`72Y2PRd;U;nN3ZeEAo- z-H;Ju{3p|vaeqg>-~P7|#4My+&zsf|9bP>C=U`sAo&bvBxv_lMYhe=Sexn2o<+Wjg z=KsgY!|C^;TygPLIm!n$oiKo?dA9^$PUNGAIBmplBn*IwtH?=WKE{Z=h<-`+80t`) zKG%#Qc0_zxZ1BsEeF`0gkH>wKiYFS%5R}9GDWPvfHlo=*KmlS5ZZy{(lU_H6g$4BP z^G5-`WM1K9-i{rEJ0G}P-m9_my8uho$MEr9v%Y-T1!v0K{RRJ>E9ebd@=^WMsy#yL zT@WwNUPcgZ4$3q%Lsc0Iax%x5A1%xFYpLdsw??nQk3YO=ucP0rhPvLK$hu*{Xi^x= z=xaTi>P@`=mLQc|XHhLbbHQwoMk9w!Wvgt}ZndMC#<0J-G&#lmouX&!2RbY%uc3!{lw!hz1tz6bw63W^aCH!caLixCt6 zY2?|qMTs9`S~lIk!poywwC?mlZp)rjmE2HcU;^qiE0D=@ zWQSjYt&@NwiNMQA?3PLoi{=pnQYrra3YI3@DXtQAe3=^LqJvWgL3SdeUW6L17CZNB z%+DbFI)GVqJ@tFHJRFg1F{f%wn)qJT)L7K`*t@%E)-eIFbHKRpbeS)9XYA>N&|u6d9%R_7KX&N z*o4=z2*ZQ2DJ=iWID#>tYm`3+cI8BD1~*Pc>a;d%dL_BD`x2auq$W#ZloxS01bhh?{Nr>uWNCfS zs#Jx`!!#RetEd-yOVKI@I;-kyCMz8Ji8!zb$kp4Z-){FriiI;iD&>M;bHOG1iCWBD zW{=}T4R=l7@WAvi8kqcTH@$9Acw9&TUnMw(v!};TEVwb~4j4;CwI^YTA~d?+!;4s2 zb!O41g_-RVJFBcTXde3u*82v`-E6L^jT^r``#9YdeCE)f$<{$W<$;zHnt^^s_uZ*- z($ZsY)#MlxaQSdW=}9{oLkn;s%@cn3bys}jL(1*w$VeB_f8|(~&rao8PjdMiC9BLJ zi@gbB)T5|8+qr_N08p|6duj2R)@-QMuTNSFtV4SxJ^vB0(t8CbdjRO+9o`E@bj`ni zYmuv|&#b~fVKS3wqkdabC9Zx}A9&nXqG`F=uw5bu>}X%|I{2}NWFSIcB|{*bU!`4B zZo$7-j#b8vect_@__{PiJiA?u30CRx=Y@ozI?En&>iQcSERT{L^-sbBTPdve{au-#AL}_(G-CgdjqeE^hS&Jt2 zv{s4#*rFiQ`HoQivWIH^Cy1Y`p4`~uSL$gK(L(b|{0xfxVS0$P&&U17p;F!m8lS#^ zg(4;e$-=fiOtGuUtB8*eR2c{`G1mEJa3OCPWURg5O~Zf`UzTa@@dyTc3$!ol)t(;kf@bOEn{%6a! z$L8H!XQnu`<*#?fa2{r4_lkz>d*!PB2G5!FZt}~H?aRuVZIH?&>jRhGfLf^?gMOyi ztXjt3%Wo_|RX0;_Sf}-;cuo3)&g7b_%-^1-l-p<>)@YK96}Zg{O?dqay6njR2t*07 zAPML5uLV#65xlCKvFBOp7v?F}mO$lyZ`^G3)x9iJ;neqRTx(|RGM{t*g;ZGsU zlP9m8{Lf>;1vPjpfwCcgb}l^$CP9vmIttdprBVDv36xJX`vuut%2RQ?PBu~X$D}cj z6ix#poXiDCZd`@MAd@dW#kI*VC6a&(whf#!BgoK(T_=7}sh?c?@wE$h64W&yKc7pQ zR>FqTV%M_FLNRU>I8a^v2I#9L8{u(i@hs%ZF`UgQrOnB4V1H50q$4O?s)bM5o{qt@glbPC7*W)s92Yxc7dxva2bYd ze|i`J!QRz!O_=TFjWnRDvr?_fFLhXM?N!6~QCW#+l2Xq!1-~NtcdJwKuc0wxwQfe~ zk1-1Bm~#*5$3^jGnZo>f+&@P}_Iyh#-f>2XdRulTEGLhCJX(&Uk@NTet`HAtmk2D2 zGe4J<_@~LZt3sENRfBo~a=Q>eR9Ix2Xw2bZvCYigow|+WQBK4qkBde(3>c6g(9_)1 zb(>P%qoHsP6OZBzzCT*)*}Ii~zcc99a`lzzXGV@$0@n<(;r?zghLiP|4j5Wff5$)X zhtQ@`%YPk6T=Ey7vQUd06vp~&fJsZd-M)(4g~3qRQ*xPhtvi_!ay$l}ybqL>~;P-T`VMgDNK;tLK;4Zay0{&OiBp%rR3Sll_ zI!R+W0(5|I)ti{t)Fi9 z^=Nz7WHpSyF1(N6suvynFx~9{>ONizt3On6tDBs5ip$e7!g9*jtC)VHz&~&yj zEK+Tyoo2nuZy$0&N{kCa+0j`RY z9?#hV2lprk%(9XM(T{tT&~2halx+#k6{yZjUiFQRhaYO^+AkO#AC))2JH7)L)iUqx zKUN6lc_4z=68g{pYX6Y$2|TOAFP~YflVT@bTsrtu-%8x#nFt6A(r7O4VB?lyb2k#n z?DvAL?A-ZdfK(Rw`X6xBte3I23avURWgvO+95wr=2OU+2H?PnD%Yh`=RCMttP)5U6 zJ93ez!Lx#O_&#qOv7pBpzE2pR3pDs|3M=n0|JYv`e7d(dLr*ez2HidXszLLB33qA7j`F+|iVsPi+ImiS`(#;Xul$uGK10cT!`53v2Llx;B&{al5;v(ws2 zUM3gAkgWl^^$#ht7rBw|basf~78QNiM3(}cma3hc&%qEHQ#pUDq1%PIV+Fjeg}e-563Mj(P(SC{-3V83w9W`JvQ za<$|7)9mD0??5Z)xaf!FdG?X;98+9lDF&F}dd0EEp;m3sgCf~ttTYp7w-b3h53J_Z zUWH0qgdwGdpiUr=0b!s_?!bmY#D?@GH9~(J6)bd(AIJ=7pmI{W_(%cq01J) zDH*k(lZiwY zq?+c(bH?4CPtYx&0`YJS_1!)=MomT;ITQ>eU$cvI&9f3${wP!tilb?3{N3tv7t8pQ zwF}4tVik-Kimfu);Bj|$6)P1Us~4;|trcbgj^M3706Dv*uSHi>P#&i0^N^IL_ z5?j1?nVKUJo$Q+&H!5>tHnKsfdK)&kiF3p)MP{6HX(X zne%UO)u9%?fJH|szYpyF$r%(HoXh&IpnZ$CnW`5EqWpddeiN9=B=f+_&zt~u2pL9N zz2f)%5uj%tnKkl8O2gFu+s3XG$D;Ssd4Jcu!lRzSvcgA755AZ!Gzl)Z0H=eL&r??` z57OapgmzG-Rl6DME6G;LTrxawhp$6Y#{M16K!SKtZbc?HwGMsIoM{jjgQ+*y!= zuaOc4L_0Y`(E-10AZHf+v=0p^=CFyUX6&AdCzO^s+k~Q~RCz7ll)LTgOfQ78sG(voNK zwEdmT$k`c4wVf*+h>Tbz-AvnkDZ3G40iKoCz)caxmggD!3}MVKMa1TCak&6j=eUHG zN&=lC^5C#bW;@#a%7I@3sO(rMcs>24KIrtMK;OH1*OjV1uav4f4*fJ{rw0S5ksW?Q z;i%Vyw>a#0r^cX>Ir*+xp+S{DelIV1rByoB;e6&GI8iaTuLZa#g%GE_rYX6lrx5z0 z$0^{Q_5R1S5k?CJx@Kjx?)s8ykkNuO9)l`M+TcZWlbqTivo0asrN1^gcN-3>;m zX7crk_iR-cmn!%!W0qsARo(g~NdJmnD~^m!>CZfxq{qUl_37i)gLFx`X_}^g{G?!E zwY77@c=u-+iLuI`fn7X1cb5_PG8jo_#ruB!gVBqEvXh&(Y(ltg_Fu8w_4~UJH&HLk zV`U0TCfZL+iJZP(pA(4O{Grboa=q!A)*JO$j;qAr&CQB%5^ETY@mtIxh}&`6Xp_mf z4syqGb8b{PZ2jENddc4>ckhxdL=sT*hh&RMr~eN?KO*jQ@ar&ewVM`xUTZA|5Og*N{}e9IiS)*IK|J;0lqQLG0dALM zzAKu`CF%kb(J)zxXcL*-CxJ<`o3bHv;b##OK=h4-+2+kXCkZR(d&bs!zQD>DN9Nqj zq?1f@Yv}JKRN&ppVGAOdozPBhSiz}0zFu0Mx5sIaaYEcx3#emc&Pipn&wfLLGVj}v zeTE?&^Puq}>CYUJQq+LD9Y_4z78i3vD0y;gr9DRK>p`I(`=J{EkcMyA(u|Ja<>o%! zw@nW%QPKEZXP08X+oRWw?TG(KCU`%fFHQF(Qu)}pKb7!u-Mw57RgQc1=v<~Kb52G^8s~C+&WyKQrvvuC-ZkrV6J`Ypi@xL$Bz6imD{j}x`*!F;xzT_2(ZB~-ARbrr)1Ht3-xs!fqJa8k6D7Ei|tzmU+Y5adv+ho^q-$4%|^8a3RUq+y>bX;ZIDDI zb>E?S%m%hMhqm1+a*Kspj&aHdqn^>elEoXR0IX3((KW;#uit+}gmS&wkP*(ox z7fxR+2@B9iFxQj8WcGFVZG}$@af(~y0VE@q1E~NmA9KvP4vAc6e)mE+#TF;WYG#SV zf*Aa$W#X@Lb_uhDSy_DFinb|HBf|m`O>uqx`jDp8EU|Q6Bvb4`ax2TNX7Esb()Ks_7z|1qoSLhSgj^6@nb2C-gi1ol6$dra?{h9wztz^xYymC2c{$|8P z?C^b+cuy~*Sot162a}}UIy-|3EJ18|#t;=AF~FU&p>k)$0ovOPrNWcKp}{Az>%Wmc zHCyjidtXp;+aI@Jlixnq>HD;Bp6aWcBN9`kNd1~k`&!S1@z)B~{A-0GPs5J|IV z3mMIg=I% zsGj7ui~Oh7qy0wu`eobvFfeithdF~U9Ii3o0h_r)Vux&8^8JTTPD=pIu3(|Se5GNs zp2L7TG(Bk7=-|v_dxU1Cjp)<7xX(Yj8{@tuM_X)B1I@@F(?_BkK!Gf_NxDmrd{c&l zpjfiq&jjZ7+_9!KFY@jIB!pGbNPMll0=^x=iWG>|gjnsX$ZJSv5mT==V6{eJHqM5& z=$1rL8g$@jnUL2uhw9taFaZtn80dhS69TSK2=lKu6Y9304L^{4F@OO+2pVT|8b{{>dc5=&3KIbo|?^`xgi8#c=Szz>h`SggV^I z_PhVdk@>as%j(8I1=R5yFE)EAQQm#^Zo}Sb zHw#crw#OmlrAnQ*2}ls1%=Dg8wcI6rE<(H6Ri1{fk(-}rdypFToa^e%9p{uM4t1~b z+!ZB(lbEh;7Y#0l91AbiR%nBo5oj;i2YT^+iHx z&soS8^MbLI88isE`)C|l^@ZYZZWVL%cd*BQtL$j~K~}4jlUs}x8jQPwj|0HaMC)w+ zKmFXH?I%OYD!!N>ILDOHyjG3_VHXzWNCH_relMfK(oE#oMNxCUug`6keR#FeCZ56j zt2vsdapf67RK;P0&;RIKgTTVUC#?J_&1vqR2I;Mu?+)F|h%`<7OYDxQ-HX~Bt=qE} zTNy6vt%sC3NNL;ZJB$&m2kf6;;o*~?QMT<9?&AI&^vyGcv&xDC=e&+fXd6z`uiKFpX+~4jK`z~%NQ&Ad) zPTVx?_@Nmn@+*Io5J}hxgAqbOh;-JN%OI0?5mEjX3v$C%uQKe4q1$D|vAEBnRIUq^ zjI~|KV2hEF4t8;-_-03e;mY-g%y#@TGU_%9`hg~;q1GJ=#u^`+{krOg-f2k!;u_Ne zJKfyl)14DKyzrz>N5SwfpR~sbsddl;!o9txnh!ipj#WJZ20oB@|Is-f*vS;`cA zEx9U;qdS5}sZl6c^VAxyRK-|wuI-*@#wlEtoZQ1UsHHaQc>m{~uj%;TC1r?ty|JphyYQ z4I&^74$|G-At~M6t>PfvEikkoUD74p4MU95DKO-a=fQV>d%x#9*ZB|TdagD1y4NqC zbwv2%+dn5+ecX4}TM@`_GFHB% z_+KxsA=D^r2Pqqq9zBU$DsDsKV)}vfXdC5uP`}JMqiEZ!1X=^>PBBK#;NV@{$%600 zkZr;wyW1k+0!NsWhzAkHC-=U)>ry2h1J1TzF6NI_7SeZB{YY0E-!pG1IqZ@TD4Yo@3*$A7`FeGR@GX!8Z{8-Sx4Zm;J4VPdwC zV%|9U=aE932%*RCkF?w`Pca-U9R&a(XU{0rVgU3<8A(Kr6cGg5TH@d_^VmoZSJEEt zTUqN^P`X3j8Wzyf{hJ&bwd4ZUmxEDGoU|#wy6tYcafFJ4p=xYJ0#40J4n-Y z%g&%q0#ux?l60ynpq<3FCk)4{i^tYqQG_Ldi~}3>T{hEfZ+nPzzKQ2*;fC0B(OT>* z%87x>WTRI`Bm@sb%k4nFim??Aa+UJt{`ll+&co5)q7Lm(&ElmVQs0}yEAo$H`2ZB3 zsHH!!f5k{qy-*y3dOxlV*M%pGa<$oG`2cUSvE-=4KKA7pBgIkGS)YiufMXNWYn!p2 zygPL#do$+9ebYiFUk8x}DhX5r84U--ZydJ-X^(09W;gtyIA`bHXI9Mf?0gzU9l03O z%PSE0$H8~W!kIC=bqGep+JH#p4J^Jr)F0#3jPWOnWb%JV@rph=d)(xx-^jhABW=h# zlc~deBy^2=HDbyES9~PCy_Slnn?e06w~=If!uEl$YdT_`>*AFgrGUZQNswyWDmH&z z?*&}orP`r%=AWJ8Du>lk0M}79u&nCbKFjQ>zyby{Qh+Jt**p5=-*T!l?HX~ca^P1W5r{7DPVUiB0@!gLFD_%EW)8MF;8lQ8$xdi%;E7WKy z@f#>sEs7!z0*&=Q-Y`9vr#c-n;~%35G6^|m)OZzjutRVOf>%(t&nu31*_pqaW~*22 zDVEF63?4`(M z{;LTmSB6KV?A+hO_wbx!aebYJSHHj^-Fem9JPD)vEqJ-+^vaBKY5jzqg`Rd8ZWYwpD?}J9YsvR<+T&aeNvyy>*%r!;On(Qp5YkB-2_|W6Yh?MzJlO;RH zyiS1&THUo@F?bx&YjZ^7u3){k)(vtbzd#=s51YqhHTh2#fGNLq7=^7>tPrUVg9H-5 z11CeC#rX>RS{t`cTb7huOt4n>O())Zfp_PKDd2c^dQz^T)tp^n8=}>khVk_`TR|n) z#%IiLq#a&0V@HjZ<#w|<8CQxf`*a`!9JYw=!DaS+nt8;!c$1=KiHwI;#AUqaSFOwzF;XFkA-3wN1}BW-mjrgDE~GZ!K$vQxY1KTKH2&_7NEQw zu;j99M0x>J9CHo_jk)tbg`J;ddr3W@Acm;GGa!Q_x4ViKzrAktSa3Ooob^>2wvoAJ%cJ-FxNKPNSbp zg@8whx?3BH80{RSm)eG>v%kLWaynOcWGtqbDvX5g)IMV0Fk+cp5r8w^W&b8;F3fp^ zQCn61LbXCwU6U8M>;d57wS%f#HXxlpaHQc?)9jbr-k12dBc{G>~a#{<3wo_-T7eYD^K-wmrtnU%|2mU zt+5^>k69nYzT3GIcBa--9JAa%bwSn0%$Pn$(&rL@LrT$RkmfiFIr_)q<1R9}gRHuu-aV96i%oW4 z*59jukn`0sGqYDkfy_K$Q<|h|RY(g!`Ol)PWAl+!$FM+K_dghBLVh-Lpl$`xUxEm+bG@NK@WxH0ZHHYn4U3IQ}-q(WQA0#B-W zbCX^z`EQBDa49x$IozLB%*>Wn)QkJw&-zpG)M1&T6dvYY6F(K%urt;evJNVGLu8oG z|E@kY*WK{sq9bn6Df@XK_A&+c%bz1d^3OL8x*HrcYAE7axn?K$a!qj;FP90fN#J9z zf&fT`ItVrDW8=P<+JwOLD8!S~H&mbc7aJ+}?aP6Iz-Aiu>d?4+Cw3qm=)jRs)yza0 z*GV(#uYBs|UnvFKalH9H`0b~t3al&SnrePpdx<))f!<|839?N$KcX&ddz9K85!T9W zLJk>O+e6pjX)`&rqD7}vKLT>d*7Z8{3(_vW>f*98Yr-@GAfr(8;W%k7_bXH3xL$RN z>)J{P5<=_^T)&spdv~Dgl;5mbILNq+jQ#`>IZ;i{NX0K$61nfOZn5O^hlq42+5JyE z&al~($`^>e{p=eJ2F=-Fo^4X#e#E8VZjpew9tHu)4u&#RWq*Y4yzA~Xvbpl?nx);U zCwRl4s6!^Gd(~Th82TxlhXORSTo%PTay-5|5_`{TQggXN?Bk>=cD$EY8rMa~s2_y5 zwy(t-y!Mo6BKJCdTJY65!S0MxD=!kL3zYvw^F4bc6d+25a`KtCMU298-ZYJP+5co@ zcOoXR*0&SWC`Y^8AT6ye9CZ(f%ykMp$(Tt4Z~>70e5@S&Yjn>|TAKQYe_070ZVu}Q zJL!Wzpfk=Mg3jTv97pcS-zGy=RbKZTJT8{2CyB112~Y?Yn=uEZGbqXUJNlv6AN`4+ z1Wda(eh(!m7vuzEO`2}_>t^iW0_w2g8z5Q`9&;M0bO##bUNgnfXy9kqH&h;^_N!GJ zW41`%)%kAmV&KJKFQ-W@;^_+WimN}z^iTQ^h7C)($s!v}31q5}m@WuMHGS!gHZZgG z8*v{$kq2NB0(0&(g9jK}E++&N07|69@B;_Km`p!GlUM>#;>gFf>!Q4F?k;xQ9>7U4 zbkTC*L&3TeulAu2?*#*$fnX?iSpSLdO1~-AwR=y50UsLnr zp0$Vd_pFO3WHk_aQ~SW`KUAZ6N#s0}kI?&osr8|gSl60c9>x-|T#H`)qIvn0E_xf= zUM#GXB7l1!;Aeop;F%+DlYgT=0&q}Aqp0whs#9&}*c`Q=3fdP&96&?yhPL0OJTT(+ zsq{XDw86V#VM-}QuH}cL|3I#McspfM)AdRr5kMu6!s>wX`WEiRzdvfa{Mc8uoY>f8R5)A=MiXp)*c=x2w~p ze$a9@+hq$8CR)hjKzWSy#&oJDioRXTA!|H|_^E z>|k^9#tWmmT`Bf-xMEEbwwB&?)ew|rL!y>40*&t_-6*0A#7q4{YW)LuaDfDev3|#~ zRSnn=eOxaeL?dgzv8eli?YDeW=Jbsfc3a+bh!~@SQI90kEvLvJ37K(Q+zN*58)o@6 zGdUaQ@p9I_iNh9Ym`cAW7EuIosFhskn+0K4-M;mq6!4`-^cnh<=_{RhMeu+7I^7aJ zxqo*0cbqH!XPoCo?D`Y78ihG^p*0!T94B0f3zW6V`1~-&IAhD*p)3xqyz}z$ht+2)B{MH)EDy;ih6=R;#B0$=JowW7!>QZo_oXg z5cFua%%~ds4l%ads6E=cbN5DDH^VUwn3s+Tj#Y!a zh=p~$WP+FcHv58~0KB|y%vjc|*$FpfyGGU=-I(l@#NTK)*EQV!chArMPuu4;Q4ZfuWO|KAYCHSB+ z+KaGq&6VnHAQVK0P$;_nFqB<$Pvi)>ZVx9&Rluh0^0vY^Nnc^D*+bxmbYPK@hjxP` z;!?{fC8TXTw#Hu_lwpfXF8c*q#$qF2LIGM$kYfJ~Oy`B_9Db*wn!^chm9+YuYKV* zzJ|_Cs)`}=`t-DHy)tZKmlep zKphWw%l)DCJfG+ku2r0JcE~JT^WC5_eZ4?ux%_~E{QR!)&YX;xT+?aA%lJ&brigo~?31E1X|5btkg5+){l9ZXq4 zVQnFg=M}%c`rHqY)#iy|dyF9SnDk^X+XM}VgHR6EIf{w88|5#ewv(@h=(%H% z?jcJ-peUkRCZ%^KT6l$KgOOss{wwHD)iUum*zjGjLO`(+Z=oN3gHt;@pm%@Yl}Ba% zdn@gY=GogOva(p&{aEJ$RmsxWD zXjpr`K$sLtTq}NP`xR*{<~uvH%>O_jvMGh@QeaiAV5Pc@c<$pebih+!R&EE074ra; z8F}^1-v!HRk#$+Lb+V779^KNyV{{t;BmHU7Of^_Y@!$tyQ@l{r!IM$ouVunQ74RxN zywVli<5(Veggj5h$(I$1fN)nZD90+VrTWaRX$Nqi5hIz@!mzDTu0DJ7;^@s8Z1U_K zF(v*QT32CcPXu8u9|2_>8&;W+Uf1s<&J&JysXxB>2cwLX{+S~7nodbQcPtHnGV^v( zEei~LZyeQtI&TY_ zzqeXyL#yY}awp?KP`AvVeRz-i_Qs<>#bhuAGvq&0SpCrk_nrh{6z%bI!@n}4zsGiN z4G#oJG>%g5V!?0qVDCTC8b;Eq8PPVDR(Ga1wACpbo>3vfNDGde*O{e}{9~-xe1V6p zrw)tUUcZabIPpSP#Nn*x`!&ily8$#b**C_!=YNv0+sFXQ=#98rTlm|*Q+Iq1T>E$G z8vJMKmUI`7T21VQUNYDt@xo8j4xhr0n!Sr%^LIQ&k^8U9ajF}*h-YYkaVx*zKK2R? zg)qT^N4g#$#=nmXHJI_?FiRZtQp&p+VFf#3SV!w76L$al)kzmBaa3&$qHs!~W5uy0 z^6q=iL!Ftxg>T`$dif0*`7|I|@9>cwsU7a?D#kBJB}2srYy)f-F-XV`i{J#7Y=)r6 zKEg|xIO;MRA-R%ZcwL=J?7H-r8m*`h?ic&v5;^ZAH42K%S-dtK`J3@nYw$UbUFp<( zcTr=eE{*-EKi9IOW>i8ltngi7)7Q{yf1YIZ zkFt%r`>1>GOd3aW!lA(+Dq+x50Nq6Qvl7h*iWM3&(d$a}PTz;gcJQMjUOy) zKi_@1?(`KmdU2r@ufW0UB%%3~(g%?0RE2TU-%%RvK=hS8E;T3)Ed_Wvs0D z9hM10JBG5$B%1!EwtJZJj9?^E14$J7jUQSG%@1;qD_Y_x7Gp;%s&-?GP+C7-U=+)v zG-C|0{I7rI`5&qM{k-lY6v|)2pdZcD$B;71c z#+|j-4U*j2k>jX1umMiJc{ztq?9$ugjI~HS3ccUG&8>_PX|QsELrpujCfQ||Um>> z@p6%SJs>GvwpYe`!>}bQLof6Ry4VA}AwFv=i@XR&_HA=KU!$s^6a_E1X1uwUQ&GRZ zI}s^;d1_c!HaZinj6Ud24vU@$!4{z4F=!zl$i);75CQ06f><~WfG27A*I|73orsuj z@@iE$00MgYa2Q$+3)r>xip%N1gea$z5E1p;vw$rb53$uydt8Yh^j;*9?W;^XMD*=2X0bYmlcRTLSZGvQP zNz80$Ly@AGf6P_ zAWx@I3+n;?hL;BZf9+iVC>GF67YHb8sIGP2Tr`e-?FxLneB2ub1)3oIzu(LLVYVM% zsFsQ~#J%dc+PV^hX81iBaPrptV}8Z+wB4 zvDk#)_aYq1H_g`jGC-g}Y@YUAt;UdTf$&Co^K=N|0Q%Ie@v?iui}XXnx_}#2VM)JN z6Ow@9A|U}d&NFk2cvP;($nbkxXOaR=)RaZV5SslC zQ1Bso*51`_V6W%zq^?~l6WMtpGo4YC!o8%oE<+#x!WUnGgg1QNNA`;{abYQOTYMXR zK(K0PyJX-`x8dTsE7Whm*Zcfy&jyIfl?U6hOLunKShu_xLSi&#C)fRybuupCNu-_g zXV2#tVjsDypXU1+K%EVBb&;F@v@Jee2v~#&-JOhnb9;XD1l7*kE+sE#zkx+}JjpCl zT$|EWR>wOFiO1<{dt>bk1O@6kXR6>by#n!^8Km4`wFT$j$*H5(y$Fkue65nXfVM17 zlBJlH-IoPOs-TB!R5~b>do*#_3>AXa1w8-SiFrPX!oK@7o=z&vFcoFyIx=4#r{ZWo zG;8+?iR24yi{X%pm^ifGo@+o4n-AO8=KN!!nEb0GZHWHqSYjANR1 z!sGRCbxHID4gj^|I_dL_+C?7v;$9{X_F?x+Yr3DQ$P#xXZCxAj9X2ccbhe7srClvE z{5IzS)X+--4SYV5I=4=6q%lw0CFZ3CpH;b@V_tFTiP>ao6WRVs_}U`>=V7`q%+ za4P={foXh-UM)I#?JCgf}&Ebz()L1)T`*_~4}|QIK>HHAutY z0gTcLjZ*XL?8v`!mp?AP{Y;eGT(3g9+({=Gu-*X?paCet**7UY!{-=f zNtMy6Z4xc)d7cCTdBY#MnPm!FPsZhJ0LM5WYYu%o$EO_zq&gA~kSlm=fr77sx1?)1 zQS+x(F?fV?VaQ4|u|mTt_cii9!0it zXjC0ZN2v>>?eBxgyQI1%m26ZP0jb8pz>Zhwb}s*6Ux8s`dL^>2O7fL>7;`F}z?~9u zWSpozAKj?{_B2Z5+7>-*(7c%3=6i|uU||FZoRp=fMqX_Ofusj_7d4c~ zShSpoprJf_QY_dInp8ERH2M`U43^a}!nteFbvT1ia6d9}F~>CKW~7Vtsy zuC6@a$e2&=P}Nf!y$EvA%kt5^!w8?Q>td57U8&D*f5O|;=({Ta?G5A17cOGzCUu{o zRWLA@^-jl$54_t%#|~r}cwUh$;7oiIxY;dGn8~#mn$CMg>~?=721jRKr_B2yaaHd_ zPCUStEP~7rSa)?Et4~PooR1P>%=iF`cjF80%2F(S32}%}ioK8Fg{&75yUra!^1tAN z8jN`uT6z)#^~GcN_a`%esESg8>=5R;=wOt2*;%xW>|_)fspyn9LT#j+QN{4z$V1?N zkcTy5G!7Clu<$PVRp2*iNi#0KH!K7zk+L}UIka8s+t(=6QIia zSlVV!NE%N)oj6Mr;u{rZ%r5qsu0Eoc)fN9Sf4 z_RWwF^x@BBkhAKD@Y=Jw3&d9Gf{IT#Y0fpvLqvo5gdM*T^hcCO(xBc{bwj$y+wnN0 z3IkT2-?1c+CzeT=YO%k(7f^gYZ|Bs2_)qW1O0m*A=4YatK&$2pz2ulFo0%TTP`J}Q zNXmWXbnBIdxF@W(&fz~2j3H#Jr%yj2k=juh*2#O+J8FHbvGQ_}#*db;e+27kDMb2a zpSxAdGVy>oBuc&he5p#@S(HfVm>z6Dgt=@MfYx^0Pd8IZwu^OIa~;;3RH_n4zrv&T>>cAA!gQcb>(8MW~ie4|pP1bHhMLsqcm zA}o(M`tlQ};C`wdm$I`dW|+eAJwW&nvavRfrxrPavs=+K;)D@9FVL$Xb3KWhJz&l&E9{;zh{ zYcWgmiVw2#4}CaMd*i)(QRW6z{e^lC((Hj~n*QkG77OGkc-k3Fgmj$u7dF5s@?R77 zD0A>@j9;d&2=1euNAWWFk?ZAMXx40lKj3`VmDa(wA=KL>pQYx;@QV)|Lwv=g)_sF} zpZf!~Ft_^EVyYV9KNm&~hzw4O`SLWu2A>+g8c5>#^Bi-%Mz(!dW**=b;mkC(l>-2m zdgE#o(jILr?Vxo4_AMS^(~t34QV*$nvSjp$Sth;wPqu>q(M<5x5);}pzwVSLi1E%m0<^1JH*Ads zMJnroyO>c^IZ;z|VJjxImS#JLTI|%WUG069i!K;n6m8xb9=8g)c{WTO5ZdyG;=E|D zEX5#K8g}dWM$(U$L5KC>dfFB$8uq%g#ru{GEl0stJ*2KbWT?>T@lPpdr@4n99XxLE zpfKXy_Z2~QVE;nPKdt=Px?)4j&m1M*WAcTT-U7MkAg>aSx6L z4%Km#ms;;u>=qx{MO+XHqW=6z{L(gG`1FAd82VIOzpL~aW1h(vKCEg4E8;Rjq2V3K zPTR{wL7eA&)B88Ugj*oHOsZ`YV)?)H8fqFKY*H|AlGpKx1IFnZ)x9o-H#KQ|IA(Z5+gaJ} z@@WXye=n^oqDWY5xj0Wbt(SX|hvPPezK)mih{v%;(i^2rc08O%ynk+d^OjEm6dtCX zWGgw?1zd2Q#rkh3ZTA6h4st%Gf64=`V7wcktb4N6iSYm@2awqGVZ%5cVek{wDFY^LU4V6u1_D+UWS=`+m zOCd2}Woh3&ev0!uKd^%+4|$Fjd$k3EueRr>a-MN?vNT33!79vv7cW~TlQk7K+0 zRn3bUD++%4VSNG>*r=4?5z_?CO5T-ZW+JQ!k&XfOT3!*V{)R zA0txe^xVaY4=)@l3II;cJ?FGi6ou!Qly8nEG?UPw8X_7Lv)xC|He(F)3e+T`H@Xdr zGdgwJ!{$w_$Cq|z2asBDVGm3W+^5D~beDXCSf`S?n_7Qq$Ov781X64J>#9woed_8b zGH?%@K$_QFOi>w}!$yVb&WY1R?0yb>g`t(QXpQ16`Yt$1ZhnQ~97^fQ7d!^Rf8z7Z z$-U>Pz~betINda0I7w*}^u6_c6?J2UC_0UornWx_j{2V47-jQk$B!xc&}y5Y&j1Eir#IfX)wg?WT^rLO^T#LJWg z%AF|mefP>}=o7znN>+C&9d=O|fE0YjO+p7&f|#r4Us9ClfPz4z2ME_i<^b<2^i#Dr zI1jEP1v_Nb{1U0Pfb=D}cPeKD7Xku*j1o7v#jKXmJ5vI#(e%KTHmn(xgqT9T9ufYW zD4UpZ^5yj|>fn9yl2(Zg^ntj7H@gbB*(|<#$Isc?0C^ISoWtiQL}~i5(w-DYjxAqd zip%O)##N!oa$@>;&5Xu3`V)5MIKl@G0E2B~6EItDizDP2avrK7_u z&>uAlc@T$a2|^thF^u6YW3w#h+f5#}SHPugJaMKna4&$67_cky*||P#Cqo7@c-VcM zfj>8p?o_~2j709=h?Tcft9kQ-M1$I4zUl|T>k zkwh11Mcl!nT^tSPH%jDbYy}6eGj;lK54vFNs$)Z^pc|*4(ou$&r<<$(&t|Rq$56&D zJoertW=|94@oOZ?EKcSHW}jiq7)1tObf{H|ND&qeMe|ADHYA1Nd}*@1zbzX-U$Wg_ zcABi6C5>c2-KG6* zt!7~RIswA_T!|s+4t{J!rYvG_>X)fz=`Z=1@pppjh58}zqM^O4#2Et!2m=VOdks7# zHXvz}4_zt$gAJ%sJ@S24ARLY5rr+egb4lYML*b#>9pAtI5Rp>~Snpb0a|ts3wZlG8 zV9ftnMEU67eMd?C8bvlUzd543{fApzVt{=@N|*FKAG@0Hmy`rm`jBGEN^jXYSxKil z;)emr`8sk?WRly`5K2!5M8D@LHHF4%l^A0T;kpt)zN|mBSR>{jC1A)+pWl&qGM-~k&H*BrxCh+~KJRHUl^aN29*KSeKy4~Ol zJ~CA$7?Ufnam-DmvA=eF_w0<;Kt;B{@b!b<=4$hq0@Y}L_;jODj zaquX*@!lrFLHBKhwA1we>rHQbf$U%@5KdTO>DbH263J~t>zCMrCDiwf5Bq+!+wN*G zY1CX{e7=O+<4$x6&9hv0h&jIWDOsKp_u{32?PATrMatVOt8$uxdU99UMXlHLBEBn_ zF4Ru_O`Gaa`(x#5fz1B%x8|TsOq{Q zyCDF<#%79d!0L2$Apwvu*hy3X2VE-;&-iDT1On(ptIa25{^}$Df97K5bDYh*)2W&s z0>MWs)QWv!R#qR_tGz_4^u^MzNqLevcKMVeuDZ=XZiavAr4%;aTEa+pqk9{!Q*1M5 zB=BuG&i!m>pxK!$ip#H%2cX{Wb zl>im{K4$!?Z@on3;rq;R&kmQCYvITp5Y%CLd*$BC+)+wAdcsbp_j~B;uUPyIpV*Mu zPQssog!OAv0e*O?AUi?=AeIvi@%8chkr{mbG@I%`Biggj`tc%VC=2IsE zin^)!jJ43h4(pa-Meab>mLb;`%~wseSMqiUn$$!{}RPafsuFL;-r&`a})*Q8?>t|v)4C0O@xcH6!YUZRmdiWQ3j0+S8o z5dY#9sDH!?T~qPMpaf8d@mIR|zcVlvw-_gcqd9H%RXb4;U5ubQV9bvS#}T_;2YM@d z1yxKg-e<+xbbffeyBfi+y_M;vEIml@U-f?Dp#RLo_IfOnXcl8^Q*IHWybz;QJ}XoJ zQNO5jTL$%iEAgtvCHBFbk|9w;B<$mH3qHg{4>#4y)f?Hxh?~U>GMz-#eV=szhG0B4 z8OKnVGT5GIccCkh28iaT*ddjVZgGz@kd0|^FK`b7#Y^RC$z&$)iF_gPJBUWYEO&1? z3<_%!L?u6m!@i6H-xApAv2cdPHqp0;q|5uSZuT6R1u=zQk zoRV-iao19#*5z5#nqz8~7)a^}l;D_Era_|0`Ie7zrPcyZ$Aao>lvWf1w#{=f*khO4 z4&M0Un9kr;V3_Xc#ho_CDs*zzHZnuqNlq`=pjBWwi>U^-9RG4pCQpCeHdSJXoN{qY z1j*}xdiBNmeC6Uy`Excpc0dsIx0~=G4@S#BVp433(`Usg0xrL^CWDluqM5Ze*KaJY z$?K^Yn+SCbD*pkCTtGuHxrwHHG$@LElDzSOll9l?J2gLu?{@1e%#XTjLHvI8SLjgH45Fh$sXJ15 zHb5FzI)BaeorHMhBUkE)li-R2c%^|Q1!&QUo7I+Xgs0Vg84naEV*!c0Q#an3dSwrE zz)g!xNpnoe6&uHH@s|Sq)qW{)d>gFMNL_8kQECB_WLg9%?kdqui+Mts-3D3~DD)IJ zo?8h2@XMd%(I9D;Gj>2}b;i8|->mG1G?^F4cxc)-JCI8K0b7oK8kj!zA9vae$oL6`qZ_f{k_9PJJ0nCt54&Wv=)M4LIweLE5hHu z2i_*aLb>;xgu8Rdw03RIOu9^|Z-Ln>kjqj|Sgmg0L#VLnCF z#zB2bvwi|z50p?oAr0eMrNki8G=g`8LV;=)ps5#|x!b}$_Og!VJ2sg;TE{wTA3vY1 z0AJ?fm@WbcMnA**`T-(Eo))C}nMWHEQn@4kGxTtn@jkUy^(&r!Hu35BTL<(J^nTn& zNn`G!@6eo?e&B?ZEM^LCIs_n{sPzisydy_(&?Gu~f=pJTpteYJEa}19FyOgJJwZ~( zQIEXaK&>c)l{bE+Z3x?OtruZ}o4eiw+^M_H#hx(PC4N66sz!cISlV`pRVK3p;`_F* z^(6yGXc|++yjd6S_ayE4V=0pla9sVZw@N;*4G$!dk}M3p8Z=7jJqRBG3^1&1PMibm z4A~M0Ounh=E~tN&ls3;bqhvh!uB$Y1$+#VMJ8fEo=+D;@d5Ck|x8nR7Wb^DsIe7!sk`vc2= zcw9nm*6ghEoxay8YMFfQt!&O(v)d+9eSu!91Osx2&L(2C7tcEFYkTcyV0UHuXN9~; zAD$+v#_q}Qp$KKF+%Ip!EoE`ea5SFCQ@$%owT8>9wyO=)Hchd5| zylpud)kraL{i0uEsYF-He+-lWj1{9~U|ZRJ;)}=q7pX;$`hgDD%$e;+EZ-B52a?^lgWT@i07m34*~ynUxBP3=3vuNhgM%#((-#*` z3%t^C1&78(Z31V1_k6>)4W&MQ$2xc+cO{eH_qGunE0ApvS#P|AO33ny zUd^`kM<&E!LoKUJ>lgWO9L2a~KRG>^ExeR+;|Z)B)3Mi4Qu=^P#^snR9&}DPg9rHI zxr&w6c-HWUTot$qDh)BVyd&3~Fqjw#cRqCqF~QdL4;>WCzGZ9a|sE5I2)L%}c* zrFG4{ zNyHXl{qR2XN2X4@Sbny`ADi{NIC3YUk8qU7SQ~ZQV|Vtd?CDdrmz*&qr|df)E3wpO z4RX`Y;`D+tT+~S3tUa5$wH%LuP3#np4kmSKy>T$XmqBz!7iIJM->mJ9S_vr#rGh$y zA_a}*qk-n@l%EQHa`!F|QI2(-yEQ+Fhe+yulpyxOuCiB_l7|K+Ck}2O9io8TY3z$z z-ksb-HAS)k+Y>2^>2bWapn{{RmZ;GGgaXIbrM~=yQnIBdYS7NF1u`RXZV&`pnA}(Hn^?ADRzHsFcqK~}q%ggp;fnAZyHW;MwsnHoo7k_*+%12FTZu(-O7*RmG zT3jw#Uo89GvIf~=!B6<`mGqPao!eP+}2M zyNlL>RYk?nSloe^jN=6&mbM1I%3}|Ri%A5?`S$4#)qcT_^A>>cJ5c)xEW`Y-c0Q}z zy~~#}yWELbjWV@x|8;e)SdmVqdfCa^G%BQRfHBcWrs*ks1}HJNHvlLYaiySAWzNe1OCb4 zTh$$Bpq>WQR!0gSy-h_8XJAQ+%bxmNc`TfL-=YtHmi}HAT_SQ~J)@xLYvHW_MYHX_ zUAbB_kTz3NW3Q*jc|A!dKxcUMoc9PK5uso}L{$^w?$M(XhyZ^7h}T^+79Sw(9!$EQ__=Kq9rdz+pGA9-oJ^5O zdRSvpEB;A=nee3e(=V1!F~Py7qn`$m_**!%joC8m5lj%*4g!A|qM$m~xO($v;_h4H z$hTjc=VUWX>o0@rZ~N^9Lx7}#^XxY?gE%#vb+;eLJ^2OP>UW&@q?papxoUjq8Qo6je6WfB;w!3r?(KofiO$X==zwj8JLQc;Ci zx+^lV&5=u?>ziL)J~tu4=%%L~8YVQ{*fayh=ik;9j8xUIKB;CO#PPf`^)W&~=Sq51 z{8!mLim{=$hKK5y*dqQZ#yJ8H@SI+~WdN$3>X?%J;Gnk7l~OHYvH*J%mgm7_W}-Nc zC)AK){w|RU_t@jmyH9jHuYW66&v2Oos5c}Xb;cMY-OHbrUBXeLiR+B zR+sP;bJ&Qy+6&c2Un<^((}gSa@WaO+!*9#|PNNg;e~FipibM%jd#w1%T@`lJrm4JJ z{4}3D4yr_4241XsHtu=_=A-P=%{O7MaqqREGkKEoWv#i6e~-r~T`)NmDU4W2I7v1e z5iAw#^Cg;?|ItgLTc7J|9RR(FfsV|B##1}%kxwheZ)6LtpjwOiCpJ^*Wf zP^zHha#{7Cs<2Gn?MyJ#N*(fVtlzYTp?Wj$39ql>)pX^+Imq^J``qu-)iY!Sq9#tr z1`d843OhA9D2MG5_cG7wGZfzPJ&LkA%~bePS99_1{1}{EVv!R3G7|EJIJ7zWr0crZ+5q4JI zeTG|6givwaikmvoR1*o$4I`4D+q_a9`ok9TxHqJexFAqv%3m^A=n>vB7tCFJ%ud|x zX15sjE7RUY1SdgK?!$iMpm{QGaasDG+KaG=3JU*9s5w*MVZ)P)7Zl-bG-yq;zw3YR@|clF>a|J`*E$A9vH{m+XoF zmea>tZ}AlV&-O))k6c?}V%cRHUg^3ae>UpXrU8UUHSTf)6gvO?&B=_`v|2;}g&&aR zoK)dozz>2XBpS5C;F{&*)1uN5 z<2$%P?HSOuIST|cl+?KIWM&^?#Vbtqq!1M)Y}4uu9=R3FpjR|@DYj!f*!B@!F_PCf znGWKuQ7$0wkV{eUCqdgA0!hGbK1@CE~aF zt4{!SrTr$R*ZY#=#I1jsr_}npC&@za!f0*Kv?b30DGJc%&=EwSrW83&#-hJo^jJKj z)8$8Fh!KmxlyaGVVp|XltIGG6VH=T+TAmHc3i)=xdNN76efzn-$J}cpPHt&i?>XOy zT}}-mW8{fcBwMopM810u5NhpUOV#}2GeZ67`liho5uSeY8)s&EgtzoDj!#(nR-@?K z2(#hp%~c2Je1nqnghLJ0;;%RP9T@{cA>?atYqPXgtLRCBt)~x_<35nf-I_ zvFq`}iUGNn%qIy}9Kh%<9+v1e>FymjbF5{4@*w75BcPNl9QUX>I=64LcKYsB#6NX4T*UZ1@#&vB-`>cun zbFe6kZO}>Yl9u9u=kn=F`+KBtgc8{bv3=HUUCAG!aU2a}f#soTW2BpEAliLIVk+|E z6`{`}M}l@!3a~^I*RbQ29;EzTqTjEY%P*ZR@!%u@nY{SRwuc@8V0$I1HMb}9Gsc1` ziYEh7_CIOm=AF~xx(rnzuDvc1ldcn92ng5yBD&|Xbp|YCZ}uBb{P@-4_}+Hbmh;bA zWad6~g!^yi9xF;t4-+5po!^sX4uOEKsy?LmNgYyg(91d!23Acg>o%o3nOpDVv?1l+ zPT4tM9Zq^%7=vAy%Vk^pz2=a^iux%mr9MH1N5~WvSi)~6=tHGP%UbGdWHyRz zpC-nL3wML>TlYk|Clfz;QOxg|JPW5t%jAj7IG^{8ZSCB9Sz(-ay#6N(fO2RKUF(u) zyg)c$bqMKj+_ZHJBxkuW0}_qSafi>SD%lTh?c3Ec2A1X9C_!Xp`EI7=EGO-r6o3$v%;<&JDX;NIk~<^o`$wFLhYufG2!V6fTKmB!oO?uvtZ2j z`JJ{+?jyOTXt2h7@me`LC6wk~7NFy<)ddMKXbCO|R+z5m%E5EvD`y_p~jWcRWqzzb-TMxnc@Q!M&fe~24`)K3w zC$YGhc9rHN{pe(ftDQ9|YS=JrTP(#2JtsiHnvHt*Y|dQqOYb!hjL3V6|jYt z`umfR(k@rcSx;#_mo5jCXB-3<47qQ5d4Gf%%!{onUAzB*Hl0pcj?MjS$XR7ePatK0 zH?5xZ2{V+rE-*jBNc&FyHBfUX1-?9EsnfI#Yup~TRQg*%`uudQ-1L07QuGD|EHJ4D z>T8uymM%<2rBDVDm_GrgIwI=y5*PKlA^5?R@MGZ=MMjsKPSI8Ix8!K>q^PSAdsyC6 z(qlIVzY-mqI}mswum5o6gx#LBy)XMR3;kS66M@-A3{~is^BD^DZCS4)!fLa&(ptbpwGU`?*-p*AaMJ5oS=io3z^2f zeP6Kw#CTwHx3?Z=-Ffr}QC~Ud28;`~)0^e>6r>yW;Pu>pWgEGh-!D^4aJ!pi4f)#S z9}Q0jFjS=?35R1{$68;R*1#+_VWh*aP_S=@@!puj_O4RN#QfHnN9D+9TXPjmd5!Qm z_WiNyePX>;Q**~htHIxGM<=DD2-A9`%N*lm#*ci3{G}L1`r1=+1P)!;AorTj2V`0- zgUM3n)%+j!Em;}gXB+;|X*41*r}A^^@!~T3({CrQ9f}agy7jfb#QRFslb!jHNH{?R zgWRiv$#gwUPa~=!{LRzI_dT*L%1u>d%RQ_CSb2dlfz2Dx zqE#o^k_oq0eGPSHAFJjO0m|ie)?|Qp6v%D5geJ4{72R;Xcz~}V4a4H#!xqj6zeAqR zrfD-U@tx=%$frliVpyf!^`0Otp-{_4&x5pAH#uV_qinP!6Lv&@H*Ja3fYepic_YzJ zOou02B5uE7x098R=a)g=?zGeV?a+xaw-ERt`e$ee0bi~V@mRdMyf*gaY}4H({nzEz zTDwF%qn*{Gl zVt}TsM3PTsh?B!nWJ?HJx6$-IIL!LmCHF!d&$rjWN7QwT8c672sS&3zR=qJLh!hw! znW(z;ogt^NiMoH(f@-{`k`ibuAUws?xq9%=erIHYU0zRZKF;p>Kn z#1XnpAj+HE;!XGph|xS-0BFg-?4xyHax*=QJW=F8)tjkEK8IgA{94EBzA|v3laj|z zG69%zP*eUKn(0+%)|#glWF2tuyZg>|=kzx4bVQ@q#N(H>P{7dc=n;nImj2pSACh9X zV@313)?!BS+`5}~oqF8Bm3=h@*@e1PujQ^oFT%c~$CJ#dh=Z6raK#&+9c1Hl zV++y3=)lWPkwDb~bg%`>AJ*%Seva2K+R}LK@7%}e&8L$&He3Dq@CL3ye(vN@&p1

    XPDL{ef))oM+94jZ=9_WH;pzVs3~E&^(5yZ^04`^Q=g zl+zenjX1sngM%mjZ~-Z0(t#FQp?MZ9)3hB#@l0!aqUSCm#19m_lwvfR(nJ)_1OAdW z+qJf;cw#JkFGhwKYoX0h12KcV{=1rw;J3f)wzBCL^Npc&EZ2LyjTM0u-{id0jMF$e zxDIKQw>V#@hA+zQ`-va>HcRU??c*lf3aG29bEFhNhHgP+Okz}zyw}bgj}xawJv0%> zJQmFgOD+$mMNJ-$(iep*kFt=K2bgT=jyOPKpN*BUj&MTI_mZ9Zh$|xjf}97jnNlNrgFN%W!B&>B6#v>pEp`G*nrpP~ zR48V5P`1E~99zOr`AIwk^_&o>r!&|(Uh~^9jrQDeAf6V7bmvRfy`w}RX8yw+I=tD- zJMQbq?&(1jWtvaEDOb2C^&LfBHP8<&dBb;}^X++KXOD?n5C=&ulNJS^%z5+_X~68q zT7m3`uzCNg*MRzzK#}#)q7(c^5LeP5i(Zz0H=*sO+jK$BBMYqIv?^@_x}RTnKUM@d zjKu(rPm?wD`~nT}4tnBT_yI3zm?; z<^c)VHkG#bL^eX=?<5(M`+GvW?gHshC7cQC#3+s+DWr5k6%6@wm-@}!UL`n z2_GJcBtkYAO`$1@In#Ppj*K-I$F%Se^UMV3Wt3umybDOsd6$9W zUbwH6vUmM$I%$4o)_K*hf0r@+3fQ2J@Bd1Iy_GRl6wqTb-FVD<_m{GH-WfYJA(yW>?sG|7aJ2# zh0sPXpMFO%xg~61$bj@^C3K`jyxPu8a#Rf{XvPM=-2A4Y3Pz0c<0VZApzH#}ELRXl z4lS)yBA5G1HH7csl7NtV8sgo{_coP#A!A_No;z7ROOIlMg(1yv!@o3c4u-2HWfcho zo=*8KJ;d(^N+Px-Js#pfM^tK^2aGQm(@%wvH{iWpjOuli*a?k(ewSj^PM+Md# zvp~$nO0K8ZS%H4uU5_$h4*m$WM_eY0fNGtx*a4JfO>FpJG{FW}Gsb zE;?EGmo>CN&Z0*t;}nISLAjq{Ry;<(MkL;eo<{%AiWZq!1IU3nA=PesVk%}c78 zYT|Dq{>tq%S+tvisW+6=cbspj<*cLSL*3ZKQhXc>We&#-ccBw1Zhr|bzTe3SR-iAYKD_3mFos!uGV2A?vwxAOJnrg(uU6jwEbCJnz-n z?R$w#b&p$YSym3Q%H;{3A6KTs0fRf^(|5gag~7D*9}U~690i2Thjm-zldym_1C)VB zJ=(&S<_{k06aFb>_1KTlhS|cr$2|6 z?BR}9kliYL+xxpm0Ro0Yv)*; zkXM7&BaZqE%1X}QHt$_F(kFZMSC@SaESB)9$%5HZWZQjm{?=Z{|mFg3w5uT)vWMaU5#&qAYm6a96 z^mHARPeF8kA_)0D#lM-Evi1!IwXP34abm|e!z~=yxK9Z7(ph)L5m+{{72xD?tOfY< zkh7#2;PnX-^$_y<_#xku_?Mc{aN!>3Wpf;2&Yy?wON4(EqVY`?hJ&e%-NozN&MZAx z9XV6LhqQL*aZN`P5xbs!sDD^wJ&09z#?0`W7dW%1%Kb^b?syB2CzyrbVCN(BOuOA{ z8C!Ukv~a!dT%`z@mznbC2||oA+6Ign^MpdJl|7uo3$NyoPp#iCGRtWyC%C7pJ-mu}pQ(V4S1HOW z!~lox48<|#sNJEJ(M8I|Z2vMhMffWJ97H2hd0sy}#)I>YSR7mNi zxbQMufOHiR6&$ve68H(hN)d!cY{F6v;MAKzixAMMgib3stg+D{ncMAby~VVn*0gJLyb55(RdVPPSZ9+X{6PP0DAalk0r`K_36aG#e+LB!$4Ff z;tg8l0QLLHH>!vt>8cusRr!OKB=(meMK*6N&{&zF5MV z=QG&~5+qcs#4JO&gaxv>g@BjE`(IdPK40G0k;{gjAIpp=Kn^Ci*C=9Hz6~h##92>G zXTgWdgXldaQ-|`&u-jSF59C+wna7khR4QnLjd7K}>P-ds^>#J`Zk~e)_Idb3gJF@^ zmy<)Boh}grRr~IwFtB~F(oKa9f+?H^VYPQ_2D-k>4+sMq-DfY#h0}qT%8gL< zS8(?oCX!N-yxscZTN>*!gJMM*JW+ z%(Fvsi16I2Tq1vc53tL3p0_x)KHwl#L6Zc&=i6WR2nB={-|`sc$6#Se8k|8S3GA3@ z%2+04XvyiZ2@dLOq7IhWr`8UzT}s}7CN)7Fz%V8QnLr2t&C6(i?xO*ON(@~M9* zYT9;kT1>RFteU_v#VD!SMI)CNcQ=}!iyiviLzAirlGXyMJ%NXQ5H)blmZ%Y<46WoM zq~USij0@w*fZ#$x&!{J`5)oWFS0jwRk;AZm`MS`K^(%iVYC@t}bJm%>jt&M(?VEUr{rIzK`-}9cM_Ntp{D+rsFY=q~4xpoWi z?{_V`l1H&VM!Dx+(Z(1DlK>KG(K^(iuumklDP@C>{GW)9xk&G|3!aj>65n60YpNRb z$nAOVx?oYsLTSN=s;*<$ijcX-=n}|bVEjg`I|>AiDT$|GRJE@2Rk@ooe-6_jf$qg==7u6N4faXZ8c_rimou zU--d75;lO2%Wb}hIw)XD5yc4)5y5X0KCAxGr^QI*lsYhv(<)E9zA#~dd1~3m2LXF` z4T`ef{%4<;M|s_RMD<^t#sBys-n>XA$e?Yp0=asH@GnH5k8FeZEC;?#t%p&!Ls6PB zIHMBNCI2Sx5bIFX^_?K)f1$xi9{{E{`Flz^A$nOKc58FdC+bblFNQ_Y5I}QS51i~+ zK@-0;TT0b!3;hQYzc5mE7?X2d=zPiAd?)M>Zn8dx+i(&6Fx2^5o|hFFNMQ3e_P-#8 zMp*l!^A_8df@fm-lh!RcV1LMbb6_!>VR`>-^5JZDBKOf_1_PKNDx;O3fSWC_u0tvC zt@=c<73vXgTvu_En7eH-(Mc^|yr^Zh8tuE+c`wj)pR!e{`&)bnC+xwdcjqF4 z9awQDa!R)SE;YZx&d+`@ALU?;*#;x)G!1^VWW8nRtK2OhHvw9~gp$8p&oJ55qSx;! z)ZBWeJt+|#+L(9A6Om5SMN88Vt&xv^(y>NmUI~&$);zIT6h?6I?JSUPgfuu#KGq=(WYzE9Z+<1~^lkg5y6v4u^9K&eTT z6pZY8ixP3o!GtNXi+%Ky>k`9$nJP&#>iR=jORgoZ-|z7dC{lzFm9vaug|3{6vFr$~ zC>X90grpx_Y<-_qaS#jm9p+r8r}vGQas~<#n$*+NHQ4jH#+!)ijP<=h!7?2gGItH% z6h{+TRZjgQ{EplFaY|1Tgja~am)?w#ONJo(&UmVaXTQc9LJqu6M7o0JfX*G!SPjR0 zela%;+m$%RAV4A-&dHT)79Y9aFW4eXsE=3u)o#Y~O7{g)BBw@| zPbwn{SBTvz8qeoOm6&dquctgeuVGa6iqMtyYewH|h?W$`cX`5Ge_qe&JC>l^uUu;oiPSes4xkF*)P*9iC5L zgu+}tDT*W#KtqaTRh`?Kia7WHi_95zi=;l-;fhe= zzAN=YSUnR7JU2An+m}x&G(Rb*vlc=&3Pv38LWBOK8HX#`WB&y zY5Pkt!5T#?nM-6Bxe62`y@_O$Z|jE)IuQqyEHQuI&@EC4!{!=`P7&c&@muk#1QE2b zmi88X3SFeP!ncz+FZ0;jchJW7QA=lMVz=)gQ~#LbhCzfPnlUGyX_eGPl z(Y`)%EM7=EfwAu`%r7bh8X4M6?-=L7MAEnooxwy>9)>=~Qch}XmwZ`vfpXew{`@3> z8jGMv%&aTgO0>?n(dP3L$K|1{Y!U4|CXMDM8nbn6q2yJ?dfhG_L~xWlLuLo^D32F& zQe%=hYCN8R4!!_S8ZD7G=+1MpiK%kWuSTcZYn;DgyHMby5>rFIupBVz>9_p`-zyII z#AHDBi{uTFAT_a<06wBt)_9g(M8Gd*oM!)+T@^Z@4$p-qCV?G9y=`Lqs|fGs&jpJu zy$aXAt9LLtuzLS$4;BgBYz;Gh=4t?f8VPni3Al3T0Qwx}_2=8rxQMYF?i$=CUzd;3 zM;U%dZW7-{v#vlcX+O=AHW$LhYi_W^@@s^PD?T@#3799?5T^MIs0}%A(UbX1Y2aGZ z=j@aCc@P7i*sfe^T9IJJ4o`1K0B;xVOh*9!)b5Is+?Q~Lw2=vnOXqnFY~64Aj9@mF zuv*-u4YWHC-ElGro8%t$6sOFGFmp{MIM(NE8K^`!LRLCDFY&t~mNQfqN=bM`l5g?s ztf-qtFFdC55+gYQbE`~k=P}`-O2Woc^YfEJmEYVa?VF?-{I))l7^>Qy z{ zRBY2LH|orsZ(?h`2=XVGVRt|H_$;Pp(&Zbu0ZuCTo`L7_0_ya4%xG?Wxjq7&>8f}N zc+V_*m?h8H_B<&pKTIYOB=NbqCz?WuD{=?h1cTmsGTY1R%M+z5JO!%{DsXisCptKb zq*d_-{O%M|*e1Yx-k-(sZZ#BXuTAQ)qp<={j3E0V$c`b@JqIc6-5|^Bw>`?U;+02V z<`)Z4=3HU^^nZz3NI{^*9@TPUYgq4dkNaV%`sd2($HcMB$U}UpxuDmSoyoyq9Zvzu zh$#zXIG>80U|6Z6C%;6}ghIy;|uk z%1y+;nUHWk<_-GZIiOjX+;S|Ei*}`JFp73%Xuw#$=4U^gJxToj$FC>3{-|yNRze>J zdKR|gnX)E`9R4eF-0F`?i9;&j=k%TT&{)`qcK(~t3omc>)by(@?H zPHBg7iz!xCCKp^o$Minh8r~ZM3Yk{OinJ-NiEPA2Z3OLm%&qRsxLz+I&%!RQyZ=D3 z7Z$gxZIBT(cvZW~Fp8eE1}sl`AFc-aDdh$q?_}CRH`4iQS)3kXte$;9!LO_un)9gd5#bXpl);e>*_B3Z(fC z78lt?FKQUNuvtGG{ce$6s<6qud(?6*Bp^O{#qK=)^=lAEted=M*TBn&iTl4JW<}IM zu1Gx`6<0tqo7X|;MWp^P(A0ge8=FubPCYk=1=o*NZJD_#_-tG~+6-6ovSUkx7d-$n zToeWz;Mh~`))4;s84W^#K=DQ4RWw4}PgGsf`2wvdr|@y#2`L;#!oZG@jQpzw%XR@@ z@dfiC?iazegf7)v*U46zd(Z-%wBTQqdO^mqRs(QQe9PWxtCshxFN!#W74`|L{ zF0Gwg6Psq4bUa5$~hIS!76XB#Q8XU3D36Rx1#0*c1Pv7?{R5ph;i1tDKjKF+J z|G)F4;IYh3s@`pgb_q72t#mqb2x>2%Gw-tO%3NqeloUJkzUf<- z8tvhkP1N?2Up(^mx<+h`Px&3uq?4(exw}2fG;Z~XL0^1gM8?ff?#MO`{S=rSO$D=s z$~-v#gL)~$BN@*~*+?&{<#seOJturBYZnz8j?dnmHvOliE4R!v)^2gvF=26wR+c9xJEvT_t*(?0F8|={g(|W%xMW5g>BU+vH$s>suO!b;Da1tOp1KGGmt_#Gd+m<^MQaOkA zQHEjIVbceGf4%U>LC{%u7$0@*>S*lYhx!P2v84rf+kKm#MhS0MwDG*bK6xHHB2F>_ zR<-Ojc)%rAC_Up(*OC=zJcFXQ@kozryQ&p;Zm7uwLbo3{iH z9BFu$dvl$gI-LXwOgk4+Nvm?j9gb$K1XIe9Es2TUw15jZv zWR_|lpR3Tutdx?m>Dcdguo+zZn8Sw7l)8Kw(PJWfSi7~xW$r=qobeE!`wXrCn|_$! z$fkD|^8LGu2QmF+T7vyEt=~+AZ{$YkUk{60X~1lhH``=6NqB@u9BgJ$t`_vpP-wjz zF8|Xw=7I{O&`kTdt|k9RZv~_aFn|Ad`~KQNh2)Y2bzODV!0hr0jnAdi-(!M}>nx3< zrEnC5NVdvPcWDyG_V;VJL7u*UcJa`EckzaUp(^%TdbT$l+FTSGQ4DU*U*lv4o;3`Q zT#sF+-JfqFK3T%ntZxe_|K!`1d`C2*7`F)7^4TMsk*pF=vEPFJj(KyUe&q!&)cBN` ziDH*YH9*GGTfJq3$}+ zu`!@1?LKvO#<0I%6yGPUOrTp$>aOR2{KPCK(a^XptM370HTVq z9>LlmsPojm9RFo5?u+oM6@!Gr79f?jly{r3_P3Eh@zvw}?=J<8%8LHn$C{UK27LzH z<4b_cke<7+9-B-`LH)`>EPncu2p7UOm7i3|Vjh8AMNkmlz)IhYp<23YHN#lUq^8|i zViKK&d0P=naH(9D0R9W5IfeT#rD<25)mqIWS~YuG>`fjRnmkF329AmK>7Q4Svg}LvhHB z$vE=?JY$HfiCz^!I%s#aWdg6c)8{eeJu237x7UE5t#B-R0$X0#A-6_v!aOvQQQ}i^ z-A6bGvTz2Os)kc`UDEmZMDOjpP1PHz{{X zNKH6v(Q`q|$HN%k$qhjAuqC%*8kQ{F!uk*@R& z=`kUy+Gg_w{LxmF z&BoHEW9B?Y=-eO9U3w8Qk6-8>y^~vE{&&iq!we9Bb)i0XDhm<@2fl!@!0WtuhhcV- zuEVeazMuUp_Eu7pAnhL5OZ8+5;=w4$8o*r-Z z9i!okkcvVoez~Ab+&%jrMAncotCOWvV4~_h9^s9+#@cY};iOe!IP~<#04g7_}_IIajr@pYlON%fJAT8oee}b zjz7&D>*;1bg!}Y;>;0^_==r%l9O0N+pu*EdH#~4M5_%o545y$vFi5aTzH%LDeWJ;& zG#CHE2k4Nm7PJvbHfylOX#b~1{gQ-P0cD|!{XZD}chY5c`5!>eqU!ep3}ge_9mWEJ zb_=zS>ZC(W&yUj&HW0aC!Ub1DxYIU z9%mdS|3Z(-D9a5zmmK%Vxtj|5F+y^xHd9udwu&?)bhBW6qfbea*&MoBiWfoojv>1{SdV<#D-s*>X(Ldr5a$Jl4g6Ra zUh{D+i7F;f9R(9lmYas^?nN6%AiwB`YQ=8nUf9!#`)`P;5P}~<(rI!QlnvJN->mjl zo@8VWs^t)8mf5&GOLC`h)G&2x-%@E7%E*5^=f%dT#i8P>6RmOdsBlJ}r|ACf@COZI zkq>B8q26`#)e!yhmli>X*nRz>ev4TEsvi7-LVW?rt6s3+-}s#D1y6FH)yPM(A4Q1; z%({ZpFYJMRJsyyj@}f~KpNDN87f4{oSs^o!sfwv9H96L)QYEk(pEXlQ>_gltN9-!@ z>x?62H=6?Qoa8;35!7$QT-bLz-b$w6Uf|yyj^v8d2!Pp#`e46fw4gU~j@T(Ajz z6L=qYO~*TL6H$8K3}k6K=*9V5q^{`1^l8zge^>x0V!M!R4<>b z%x4$d7R&|idEESf5*exQ^91Qt6|1c23Rl1COOAs!DDIHzUsX=@M+2hUU3W&7WDPjWHZW}5 zjMW@RuS>1fvih$uO>QE6xvq*H&2L!P7q}p;z3U{d6Y{@T*eeeT;ED6{1H3xD+tIE_ z4FOS2A%NMh^-6_bt8}*8`564^%!Cfl6sN>kD^G+rKs{< zX_?GVtA$tKpN8C9_cA4oG;AP7mlhx*wFR?c;N3>tdyMO*^v#q<_%t({7k72W&HljDO3c-8#PP(2<0kG$`IM`yZ`O*WaLtuJ*E29x} zx=*N@j31cm07I;Bt?b><|=pwOmvDxg0c*;H* z*CVcKoGAj@wGTEAL>ojKTkPgb1VOR)K=_}t8d>i6R4Uakb;fByI)ib8))?)7rU|G=zF zDgBQ`)s3-n|JC@iKT;7-6MSIyNlt}(+Zk5vj@+}I(Yrg;$9*_n!N(SIKKvbf5r%`! z3GW~Yr%%#<@7dF_%oMJQgF=c{=W)UvSaTf-%3pUW8XAT!z`J@Hca+ImgX*GrgFu-I z4hus+h`%{G))0~2Y2RWgSxVYB?C+6lLa+dOJ~A~56kX!ni3-sUJv*B|&B2)bxdZ_qarMVTh{897 z0MzD9zG@XYkg$<53d(Y!49S=6tu37^GUHd>4e=)@U?uW3QUH;zOMQ@R^o=EjWuR!v zu|(t&^UGs2VszJwV`tMLWbAQxtDH$Z;7U`jM3c->yrc!RrMCXoa^LO!T>_YicSPMi zvLF6b9sh|M=0Ov}*pYsa=MVlmBE4LB6m{^qg{=Sk-wzNI8xG2@!hYE?ob&Uv9M-17 zmm|ucT(&Y7$*k#|3S3LHzgNBE2U_>Uu8as zQLG7uSbbo5DZ{^!2m8 zdtE@7?LrrfN-o9*(7hi`;&pD)UhTQzwCJ1^&%=hQMpjYHO8y+pYYF(J1ey4#R}Rq` zT+RWf8whWDb=EMtt>j5X^N2M0ZzT_XgSktFbc~8C`4;OV5c3=2r(7}7M4~|5tm*_5 z=H{mrUFFC;Uq2CAIGo6S2 zWh4H6SO2mRdu`c`C9>xJ<2^K-I~caNgZIX92+R_+5U(!-rVRD99Td&P(& zjQ0Qs5gnP_cyD3EY^jlW@W(d~T@IqxBv+EO?Vc`r%BDFiJ_%WWZn1P8TwFC*dkO=Z z8S6A5mdB;S=Z{jeH#IyMmhQza2$dcb4ZShB>#=zjZ7mU-fEuHi9kA&kHy-obGZX!}l9?|V zVaFTm&X14}g7XdY&F@0~`@aD4;v0_t7bh_W`ysp4+6nJIqcPCr(-lo;^;?r+p9X1X z7eLb!GmzWg+Zw@V0`!CAg(>iDghYj@&ae`nG_KW_V`T%{n~Vl4U$hB5Mx8E#Sh=Q{ zn1*^XfXi(N6btp;5OOhAXq@7((*$&&!ZlI!jQn5kfM)9pc+v&8WT#7HvI`=cbV|oJ z4f!$Wz|s)%l~{6?L{*|?o>7uYGGQKOq#uYo-%W|J3*HhG>|j??-5>wX_nVzBEMBik zS9v>tS;)DTyZ?dfw7%a%;2~4(X(FM>52!bO5feAA06w`>zN-^zZhczKs}`so&SR9P zSH1^lW!aj`-FzcNMdb2H{n4+1cc*G6_v8dV!!cZe*vNVsCzSvFb+kb}VMad>VBsyN z)sb+}4z1BT&QHniJB9IBc9SBqhIy_#d%@6mZ1$NvRe;E(FlMx64!gef(m2`JmX}Ct z^Z7Nm#_>uZspY#XfsIzlBW9aQX0nOhPw=r~fH{kbWlD~99dVy;f=@{m3^~z&D_AIR zQGz@oM>Ml<7d6T-SBf7}XZy5fOXwI9=$*X{0|V3pc%gr7LN@ZvlOZ!AQdPS}(s+DR)9VXdn6%{u;~D$w!S_H^t;`4Tm*#Bp-|tOMKw8+rar6 zegV~Bm@>>nkFAQn7LG9)a{U6N2_|qD0u&*5;w#s_o&EM^-*1fZcLiQ(2na%o;^aV5 z?6vN+IG8Fky%tHsPQh~#3+MJ!Bj}Xf4A#e_dgHBL+R!)O)yt1BlW&NB$klPsjxrEu zclcN+SGc5ghR&XM{{z{ufU$P4EF{9`M*>b3L!1`?Z=My}VWA4RJl9<462h4#A7elX$x{m3rpj@Xvf|Y{Zpb_RRst zL`Nh~Ka7s`KJKh_eobgQ@iXn#ew-dUFLD0#gLvFvDmwG z{vE-=9fji9wQn4!qjeAv#m}C=#Xn+DtZPR%{aB3KSe8A?F0&Rs8*Q}6@LAIM&5(AX zQ4>jg2NJ;WymyGDpCqnj=eJj&s>alTg?fo%_#Spw{|8m1rr#Rdun(4HpC^yV^Z@=+ zGtonusLBfBQm^krHT=euJ6v>w;J9&xy9-`|Xn!tB^o{mnbJYFwv4 z=E{VFpww7)bfuW-I75X)IaL$ zb{M)vR%2}$T3Bl~oyX*%0BKo!*Zp`~#0cvQof^AwsczQ_}xd_NkAeVNB3O^;QFSi4rxnH7sBouKaY#K{2&E#7vV{ zM~ZUQq?bKip-~0Q?A+VvIG5y1s;Sq{^4^XT;CGY5!(yLRSAeWGCxlNgu{BPTMY*%8 zHGTaRKMg`ASFS_=@?bJghd&#|?&9l6=$3oLM(~T|>XxQ$4{*A=K1~V~zUd{*=B87+ zh{=ekqkD=@NIq=7pnw8y1A4kS&Kb{D_8=gEF?ge3fVki9V$9|j^n7~{2{+L*`Md-+ zD}?B}fIj;STd0S6Oe7-)NhoCZiMWrU(I){Dr!5k>54)S|B0q8mJU$XF9nJWt#&P@> zK-L9bhIw+d_eLd9KzPa_6$)p&Su1etqT!nCTxDL?)OBH<){1NX&`Iqcwt4U z7zlBc`BJgdQZt5yZZ&t^)b%VLkJubEJHXM>e?|nHc^M3wcxgmch=HIQsNLjlqW{8c zbAV7lLIED80+*OGzGTbM(y@{m3a|X*cQUu&AMO`=DT6{Zp}0Ivnp^z?*EX*-AwBR6 zcH$|whg0>f+Ocg>rGuYL=dpFzJB3xGRh0a*cD1dKTlP4~$Di!v)Y%Jf^_pcg_yh-OfhD((q;R$RwjmINUQsbVKdkO z;bCToL{7vzDN$0M5V#uJc;D&}EpvfbIpI0yY3=7)?vt&QX?%P=u9Li!DbRC!_S$X{ z0hGRWEb(5vBkm13aK#zeE7>Q@%jT%5H95-^&b9-DuRSw$$8Ki}DU0F#lE@?WKq?x> z{?%NKBV!irXJ$a_Rj?#Z&XoFnvfIW_o@?zAmXNO( zhd$dY;cI?Yapnrx)PPi3_gomRW=>tUmmV(AL5s*db)q`d%a^XoPnv}Xb6j+tS`{-2 zxqw%Hd=Mkt?j^ZaWcMZYARBdLHp4>{gTo2*uU;XUDS`L4e}p1Lj=O(1op+GjYfndi z|BNue=e4ONYFhM+@rcO$g|-lbMSu~*-a+);5yK>-L4MI9S;a4QDes3QP9L#kH{3XnDL4vdTjCUf8kUTOj*TuT>U zQd`uCPXqBd+=-Ll5-ZvG@&UORi&3cx&V5$SK7^REtIpYWUV{-v2`ii*Z@et-;i=fo@O0ssBsbel3W)Xu4QAMGN|R z@mE8jmXQCh%hP$ffb<@P=6|$fz<7-LG9LG}Ys7g0dU;T z8D-5n42gt{e8;xwK?sPG3s~@Goa*u60^b2X; z3LDa)AM+6qjU8Tm75FO}YhCUMyQ@3)r0<0d1AJP!5g(CIBpAOe3k5oLHQGb#_w*Z; zYd_hv3q+;v_{_Wakj?Ns6Y)Zw z-LEKb8J=rXcnE}X-u?`QC@c#**CLk3)^Wkt9}E(}oB*{b>F#xV4=c}wLGIV7TwRWU zoimn}6UgJ6uaWkCEh1Qz%~C=iU%)T)@4Os(Tp18!Y_$71|GB^^i#K?n1tH<3m$flq z54)9n1N7-gZv!MJ8^K$@NBPd8+a572oi~W0C1x?9?^H!VTaa)~Qzq#w_qOFBw^>asO-iXQ1EN zn`@fmVZW`&{pr4KI+~hSXM|~q0!AKBY@O8^Z5;S z<|%Qz*v-HJ@Z#>Q7+$JX#G5 z@c>@~$s%7R5VRPN2c5<>&$OSGwo8J67Cj>2)8J&d@We^cm-5O&T5@dD28fyixW+J| zz#7ISJpDX-SrlP2y9i;K76zv9A(d9?=m@`3-gb0MBBS-z+vx=VsYCIMN-Ae+-lu@h z)Vka=5z%}Wz;+v0a^IN`WA*~3-baVqyh=r<;Me|*v@yzNKA#ceAl5*{nZ4@W(9^>h zP9##L@UcL?%Tfrc^!$4Iu|{ZwU_D(iw(winB2@89C#7m}GL-n2eueOekk+(5rgYW3_yQ10DL8$)z&3V79@{TdI9uC2I z>-S&trV72dVbz_NvX%1SKD(kG)@=yMD~;tz)PGYaWLh1#U-dHaJi?Sed{;c;nzWg? zjj?H?VY-o>j1y?S-+m?XC7Xo}JyEcelr9!K?kpOsia@y@#tjR9KkSMhH%tf*moY8uxe?|rL~m8LLID{;&VIzN(oL)a_9)33t= zHD%17sg8)U`3zk!2$)9deLbq`-CM=wAL{_J#KfZ^rMk~&!zB%Pu=9_brtYp|FtaiP z=0guXqUniew#^7zGD0dpP#Kp`$L@{|GnbGWzj5EpSbr}uvxo5HuFi8P>8(uN9gx`S z+;LS`;(ons0nYT8CWW@ryHJnuS@iX|Czt2n&bWDy$P3E4S1gIUr<4~QCswhb}#jO!vB@H$a)amW4gu?guomu#w9nk%hMBDa7hUlz#4 zDw#v}fA|>v3v@)mMtmYs4*h=L!*rR{LaMM?%nmr>*tP<8RA(tZav_TMl4Mll-c8-H zQx}~ZP)X8U+uTcVkyKNmM$pqAJX6%S>;4q)c5*4vrsZAH7IFrfEfyNz zG&&e|u>Pnrtu#p0w9tW^L=Kv!?&kfM9Hx@46>LM9m zO;Y&IF2fEj|BHF&YxV}grPEA+eeddtEJR`?Y2-l zl|naJNSPr~oOBXVFV>tnB)pby64Y;Z%f(6TGR8w_G`qXGEYcfFy>F^;{Y`GCxuoLB zZ{4m{{`?!^ejqyRx^)c&@FRL^NIl8!Ip1C!b7Wn&UAK9J_RV|sReI)gd95K^ESMMW znuR||A1ak`^_3kvk(zQNB13*yzc=9&`p|&Hb+sLp@~m+5RI?W`)dE?0J|(lTB4(P% z@g(GNxgmvPOO3?Pc*14ObH02DWF6WMXa06DW7Q!|(X!Z1m+KR$8GAM>XW9qGV?9|00f?XegdyN**h@Dbr{Y% zLr_~qxAxx@ZI|m+9`S5q*FBGU7HmCFkP%S$HawtiHY@0j^x?4F_DG5tMy~Xwo-tRb z7UdF~tyDb$B+f;2dqty`XDHdDSr4<$#pvrTV_AT^A<9Xg>n(Z)jlCBK4-Ss)VjQ;e z`9>nUQ~KT8_L-7j6tUKiYgdV0J%7XpQoC&i|CiJ_|oAb$uuPNC{jYXdN$nXj~zf zy<9)`X_@#-h8$c-p4GHU#WTdGm9^$o`s%}xv87J!v;RToCSw6gnlQQ71yWU3hLzTO`EgYl=cQHgY%MJ_I+VIn{2`riok2}&(XZDATO9?`4BYr%x z^n%@IXkk`@5Fbe;#(S3TzA*JG(mG3phGF-&7B(UE=Ej#q{uk;EOF7n_JK{63yHQ;2 z8A?Y;8yV6OX!a8ISy|B?y&ua|Dk-p6#FZ)ow&0FdGbB z((G%QTt6vV!Kqu+9=ZU%PYzJ}4BR5>u%2W5Nq#65CpQu*QQGSs2!4@-Fl^go<7ai4 z7cZb4*m-$>T=^8YHNUV<)4})q#bsU&1QFE5gO9*Gm=kf+thjZZju)GoYhpUs%lUlY zA=h>Yy5RVXN5~TCDw1076JC4Vh`4u7F2uD;E0o(zxSNs>2PpG^Rr2+=4d2q%u(L!O zE-Ji}MCPg3SxxxRu;pjZyIqR~`^?hj>y=XIIj&0PK9jO-@Xn^~;mqgKQNi9aq6Tm9 zZ0mv!Xmm9`{bv_dM3lZ%A=>BX`G~sJx77or_iiEtTSwmg{Px9fFZ2`M;)1byB3T@T z;iLG%9qO4P%)cH4@Y3FW1e5)9jr6Z%LaZ4r#^xM7#88tn%o=;DQbB|hCyN&^6>h5EL zz{3H^2GZt`U{Zaf@#@nm>yMuXfT_e!`vZ+#`SCxjYK&{c9N!+ID3}@0Cn-9_i`8L75%6bKi1rWR*zZ8 z`)uBUd5HAdJ#y2F*B@tnR0Tkoo2`g4#xFLJg1O|dYIG7dkT5>tNMUB8yHOL~VtkT7 zp3$5VofWW6XL-_7L zk|7~4pkc(S{sb_8ph_ZR16F{(aU^J3A4nIme#}oSSzdi|Y9=a2UZ6-o!Pil|m%y98 zP#Ow6Gtt#Qe`S9zxww+dq3)-TXm+6btTE9m)GE?WuSv2=UVj96#VoXw-h;z^yD9x* zlWG9>wz$P`?`|4(1Z4O=!t8nugDkZYSTn;;U+HrHvgwFAYi+nd2w47)_lWcJHWbwz6x^~EG9H1D zx62s|;4T?GR-6+L*j-%-K0jp)qTv&`^KnBPm60<`?M1g-#czi$zb*vYK&k5KNq{Um z=p}txBppi0HWBaykZ5bmD6(DyV-KOyktwPzvpq+N14`K5{#>x%P3_OmGESLxvJd4QF&9WN%`-+t+Ll6&V&u^>0vro74#&I zu(N(Mz$2 zDGKl1*P-5Vm3@@f5-W9s0kam^tM;Fqr?-O_wt!G@ToBburxH@cK*# z^hHyuqI`dWfBzN2+NblqKO-nk>i3G^l}hDX1~&w3(nx{Kr8)$lXQ_vIcYGA-$MZ*4 z)XVOzyNFdJAnJb``-D^UsDFrAU7LY#+AP3a-BToeBGg3%$HE?b$NYv0Kv0gCZnCJr z`LG2lEsn8r{na>PtD|!Q>cr8!@2O=a8{|@QG+hJ?iB@i@T;@S{Px(R~dfhQ-e?hZK z{I+&vojIFTY@Sj`D+v!tdi}L=??AF}49O;fs>%D~_$h}@ZmVeBQIR~j>cmh+hK9RE zwm)k*@&C)7hX*4#t5S&_?EWvY-d|h=QZqz+O(b7!mG~h6oHQZ@QO@B?L4r`TF%Yfw z%GwZhgNg(=$yloq{xcdwH4y4XHaW!n*3GgY;8^!cLi^bVZTLpEky85&x@8tZO!}pN zpoQXD^`OHox%U}HwJ%yEZ2ZdKB`ghIuWVq{vN0vC5RL#!98c!ahsPSfS~P+nukGyTOiLSyp~%$aGNK>LuL~)Kl&@=1~xOSPHOQjJz#s z{<*Jg(x%cxrcgB`4aLe@lPUe|mN8gdMfNS>YVlz{?J2II9`A@QzXzX2RzW`$#a=4r zHzEw-6JI5^rk_SPE|T7e(17X-jCUEea;_eMdhr~nBCH6dJr(vTV`juZkrqVpO0rv;apf@j>%3U$hJ81aXbkMn9C*S#nkez zOHpCiGgH2J^g0e-?5>2rU@#>nTIf=w@2CuAZa`{?P8IX`{ji};fVJ?`{{!7iPLKoR z&c*W51vZkS=XeF2Bd18=z$s@!O5bNxQD2=P9%O@lQ-%T>JVhX2Uj(>5 z_A4)wV72*@YgsY4_9pS7{4tz5ca^%ydrCx0Suj^w5pn7-NFsroxT-rn8%a%9NPOXC zy74_j2EngTLRZcgsBna4H~2h;yt<^X6^u$>v@(Rgw!Ku3+*mM14M~Td?9p_zI9&*9 z^s3SxRY?dVGG}=|(h=cHk2@RmmNR6*kguVvCn)^g4FZ136V(BO3miUSE`FVv`>#Qk zsiZLj5!`^a7x%ece&ENLNqeU@RH2&I{d$}+7)tV>rEE@1l@O2lWT!~lT z$Fa}i8DQ6xjjX+aaH6tI)k7Uq_GfviU5HlC1Y<7-?MYOQ{C#L@yyj! zI&gZez6uVRG#8TKyXmQ&5QxiptPF0uQhSTlrWqUeEE0hR%qwNx^kgHjYd3~O58e+p z&VEepNA@J`1EtuoztdyQdzXU1gv2YV1?7zX;v>PT9(XksB96tH&&U^51hc@&Yb5y{ zn|oZON=5{u5~Ut242N@wY=P|-W@kM$eWLLS{QH#$Ew6$z{$bb|-Q-O791yurA;e7B?rG*hG8pK3 zn+5iBh~JTW{GGJ_1LH~3L4;?oO0iqKi^~6B=RhEg!Og44t?}*u??J{8dx4Vj*MrOq zxa(fYiuVV+Z_e#g7e%N9jCAfKUhi$BofeT(frO>&{?K}k*E{MgtGGFXTXoIGQ(pUJxOx#-=j%n@sc2U=ik-sz z0d?fDd7t^^vCBqcaZ=U(5-k2b;@)U?$)9SZe!_YEsj{Xvu3;1ypjZQj zzKLiY=h*@-UeY^oR9Y|Gg1d491Kj@8T6Xw7bHSOY5 z&5G|DJ{VGO0-+4%>K?++KrnO4^P}EnInadE7qi{x(0S0&-3P=QM6cSeh!#J6U)t_J zH2^77T^o)`PORp&-@c`9vnIR6CoX6+?0J0`Zl8QpYhb7b^gdsY@Z9FoYeaj5PU9#} z;Vd@qlQ=UubM%jYn{9v;?w4tJ+EhIKqo8_L281{4lMPZ>)U&+kBA{}k zePW|Pq1RBbg8AN#V&?&5(_E;1!iNXsozu=(2*kg!WyT^b?NhUGh;ol{N0En(ZF^s4 z^rK2zb>^w^t~+NWap~UzvNuAdtjiNOWkn^v_)v3%xWG^7nS4a~(KkHt%)f&HMf>Y0 zYf}vsr?2&?*&uJLKwyYd`MWAB0eAaY$x4~V+GJKj@gZ@t0o_`~pshX>%Ub$h z>mNaVq?_s*v^5u!e@~7;&^p(FSN5rd&j~JH?;T_c4+hYRxB}*i&NTdGgSUQ>U?i{wyCKfB#qg<-nGzq= zQBKGk9Et*KutntTFZ$wX(r&OGsS?LxVLbDYfaVeR2G>Z*?rx(z*Q;9fWycGb z-oV@J3rEQXP(Ka)`1PbIjoYUcRBON)EVzPN8SwyYCIbj=Z%#_sP=F&?gWPREI3A!I zrto_Ta%w0|90~Z186ic2?;qRFCr2c7isbmK{k{aaYa%*QffVewfe2bR(n~K#%%LtE$1w;G`9V&Kl$3F+aE@T+kpqSPnrlq{&~Qw5##+(a(S`FGsO1Lbefc-E`ohi+O5yZzLsPd zPF5i~<)ifkaUftpU*%}K!~NaHnebpGuXvBy?8V;XMc#+Jr|OwYehN`K9Bsm0H`vG! zIl|arrYgK02b~P%E#EjL*GkW)xWOaPW0aBnRU}y8HEzPn0F!v!2VRF%m5&*;dD2cn z#&EM*QvruRou*4!(c#X?E?|u#=oc2V;;gC2ql^tr7Lu-QWAY>H&|$8GV$`0lcj2xv zl+l#n(JSseMv{y23Yjgr2!O;Iw0Q9rPbsjKo6uMg7ek_ufLe+UBKenS5~uu&4yZOT zIjs|DNOX^rea{ce&?m$v1AyBCCaTw)zjvncR4!c}k2#&asQn6cWm=d-OA6CxU@X%> z{lrPHr;VQ1_lm;%V0H`q`DH6&X6qx1y3{n@`t|4TK84Jqb#nBQcI}78zT%hCWXo@< z7q2o+Jpz9Ao@kTe*X+Vkt-~z9%;-cBN6|R(8s2xcBS#_ay@!XH(l28vAjxC<$U_#5 zq9Vw0N^i*rYyoNsdQ(%*q6=$a>Tuuq3FL(r9{Zzz{Le_CAP4Yc`6N*r;K2$R8s$@zh2I9Lx7gO0+4l>Av z!A;q1)atKGU%>#Qm{%)dF(w`hyx*1gy;T3KZmU{OFYxQxpQ8O?=Oyiae%g9Q7IRP9 zR5PF>gL;~s)vNcQM0zHeefruI332Olmbgnz|0WWeAoKS#=hSohnB@kY3bEE^z5WNGXTEg4 z<7g4Q%sGc(d?aU#Cg~`C@1>A;6)Ft^XVDgbJ7Pz+eH=gJsD?bvsDz&5U$QN}zYbj= zw?E1M8_eFvAny3^CZ6fSg)xi4JdXc%9sVioH<2y^%wyJY6#Q)%6|9aTwHO&tEc<8k zG%}T4#jn2Z{OQH&(v!EaT-_t9OQsg^&6q#T2Q5$OJD>wF_P&%1#L0()JcRWd8(+(_ zkjM;^C0!AnkT9d^L|B!dk&fdww~G_0X7yCnFKLYxwTo;rh3aOUjN@;vY0)Z?c!V;# zs;2kto=v+0xsh{>u27N=h=Iwl_5%*Xj47r%&;LlYMoF3c>~svf@(?w_SK?>(pAyn} zBgSYZKso6&zzH7=^lSeQdNEi6tlfq5CHggYs+lRZxgIJ&+0}i%;P}v!F^C90|;2H1H*2os50z-t;ME zrMbD+;8@*vL$IIlvsMXJn&uiCpYr({-~znD@TNueTRT449KdfOPD2SCjKOT7~waQ zwvxe}a>W(ZE(>HlfIm6ZM8a#Fo^9Er1l21GN%EQTyC5Y;BSMW8QK>}II?5Pc^44x# zFkSS(Z}wX(msO&?m35+Y0HrOkwAIsz@%Mn3VF6i*ad}e;mpGfo=qEm-(CU~#YplV+ zDZmVkT4mMAX9@Ws_#*W%#U-3c+eBz|c|U(->8RFKcPdScscqs=sG7BOChpoc0YI%z zu%T{oduVV)suRhrOd_W?B^0XK3>S?x=N6sr(8q$D_PqQ>oTFY167{3{Vu#$5L)y8H zyqhGxinVecEuT*gI_P_DiPAd*K_nONl!#7Ni?R zJFlFVfQ5bWQ;g2wT)^FKy#ZpD8~!B=A+0Cq4>>SE!0d&dUVC;Ia`i|D*%sYdTYtz@^cFVXv=tq>10e zjXIXVbpEKYp{YavtfBamQ`Allmr57OW?RxFCop41Z;*w&e{ZssD=zjm91e)*$q>3l z1SUC-WhY?j*QvP>y={3NmRA>Vyk`wn_8{kCM_`CJO|ByWA*p|M{A7%W3k7mHFf*&o zYCP)7L7$=WtPA#`$gSpHU_`Uo@6hRW@khVDR`E0|%}|Wi9-L$4rp(N7)&a!Z)b16H z4zUj^?$(J2!BP|NB^C?I6lKq6sX+aD8mrjCfLuaQ+?6i^JJo*x^AMn4;}SrQH-ajSw-j)WDhmYCFoM@ z=CB1gjQlAdsv$u;ty;eofCghW0Lu8umJzOmixHAnvp|@gW7F#`^ zI^MWdI+@m}bi*xm)vOmjgjA{6K>LHKa4yj4l-@pk{yjIsj#dxxAw8NT1KLP}5UiuC z5B^T&kifWr!*Y_|``jeq+%D;UH>*8UFy=W6RYrQaEVEAtyi%vl^b`4SCM_jE1PD7Z z$CW+%ukgSB{M8#K*vYKqxfg~;dE>vE@}#w*(GOc*tR3B5mb5s&L+l~4$@0;aDH3@x z3A08YoiIrbD4u~3z=cuJES3ZI?NnlQ^)g^!w zn2CPM2N7aY+%_#`g`1ueYiRf~gqtqpiih|9{N5a2i*Av`;vhoAhdQKa=?;3;0uA|=FzE$a()PxZ&3&A6s9E4Kw2NjIAeVDS?J8>7B!GuTX_ zVILbB<}$T8!!iRBR61|3FY_G>(Qf=JG?1V2@>iY|$S>y1Y}5N`KH6_u{}_D?9n1LO|{a1BC| z<&KjAps$VAPqsR@SuzH?pg`9hu`CAoVS$pjqo^mZu(llqE;2fFq_ez=b=h(__i)42 zcaVnuvxQ<%m0RMq$#Z_oPOATjC41u><&Mm&8}u_#i`41P@1E1VrQzXqUnyfg{riAH zhdw+;I{Ef*Zno%38pn`)|)a^-Lu#r(s<=1j}Gak=iv)mX8eGZ zGNVdYd*#y@BH32qWzbKeiG&SNyDSb?P#7@XMjb%N^2K>}9gLq8Atsr1k+k@s$D!&I za3#MK1Q{@Cs*2Hl#*NFxCbS1$$FqN=HPI5URb8H!`-Y4de&GF$L4;6h#u(*N=j`GZ z_jxz7WHiGa^<+GEh;@@pKSkgb`uL4Fk@{vNnC~Onq3`m5*H*7 zJ&dgs`LS~OvLl<++4OTqlZKm@39i5Cu`jr+i?O~*4Qhu#IW&4b%jGGr^MBUTEpyd2 z2}Cl#LTU<#OXxljvjpVOcaf7D=+(Bb^KPvQoXI%84tBzA%EYIX1BaQ7v;$O%&vh8( zAUQY$l2u{X>!CmGEXoGNpn_}*-@Z+!Ja2hhUB@m8qkKWyMGcjS(k47Gz6AhpjpG+M zyP7?GOX=kcxCL+7`*|4PeF3jR#ek$S8&#pzmF4h3AAY+iYTUTDI{H9zv6k^bji)}= zIyRaK)2VU9z)Gbl?wdHk?Rf7~y|?IZ@t6OD>PK^&+3h)KGUU6?dl+%?otMsbVx_$l5n(pFC!xBen^xtNN(QLobszsia-71OwfzL zTs5LkxsAbf-rg4#F}=wze+N+0nWd6nFsY$ve3*^0ul-QI=L{Jqt_!))KVS)Zw%x^0}*nd=6kdg8a!YKDHTqXMEX z-|nyAp4X0TxkpXun3uC<(J=|Hr}z$D7AY*dP$9Cm{%pA5hydxwS^u{obQ}}@4~g>% z=8Kr5e_dh!fw>a&aX9)nOQphCZ1!xIkjgd3L)yCzlykQq7P1Ce^!9}X@YLWTSfOCM9vyxBej4;v}ahmhqHmT6@D*y!!;Iv1zn%OH4i}& z$2m*bJCy;6nC6Q|oirv_jKFt7-CU?fwDt9-HxB@@x8wATa_(#2*X{sfA-+4pjV#UI~gE)!bOkd-RepV?nNp7Duh}`-tWDBOcObK^^`@ zhf+U5J(igP%_PMoodie_&vZmtAgBvp;4IZdH40?2(jPTQD#~AY;lDa{boz^|1VQ)o zHD~;+J!`q;fF7?X;=;lPR^=l_)~WQl?s}Se1v$2+q#&KRGboU{QWSI?!_(L<4ZKqt ze@6mvA0seP^R?QEQb^X#^#}u`!E$~QErh4{kU+UiP?=BeS4EDRj6hU@s3A`b&_wA{o-ME zMRX~xzPfK0@^CPwMp-J25U^k5pFiszw6{P4w?vfR)1C9*tvz0Ii@n-Di@HD7#3S4j z%B(=6e)jwE1d++upETB4to9U)GeA73gJtgy`V2Ttqo)yO<$l6oek9!PcHMenK`dh4 zCh(HIBs(pJ>v|wA%0!Aile>JITyJyqvtjQwICV$N=7KS!Fs)i3Ez;SKKj8uWc5&c5 z=+$X<(QlFxL$$WJIOx?Cv%J>U@b@kCSQ=qW$0(OlXl@*8`DyGH2aTK7y3|hUoHH%k zsb=A}k8c5{Y>Tz(?Uj~Ni{Cfmzuwoj`CM?2G7IpP&W~q_Gi91>a-h~jF3#5N42u@_ zF%89UX6~N>m3@)GMb5oJ3fhCf2+}c6bkv-2y7JSX=xALn`##HuLmQ!~HhB?HZpPxY zfE_XcyA(VnEQU{zhy*#M3L&i9bM7Sm)z=FK!cG#Wd4sl?V!^BVTvI~QxHRV#SHYhP9R<3&uk%T5LcIa zm-Ha?m?jo1?v!xhNOKRd@fdQG8A>UwtT==Lm7@@qop8|N>Qe$VP+2$Z8wp2X2M0pA zZlk;z6uos{S1EuZLlsJ=wazm2K zpT-Xm!PRv$k)FMFpsbl$*pkmT(cPgzhAHdY690ZsJyDf3 zSovF)gwHEXm>SaU+j&!P`%B`ke7rwwKOD=DQGUEg!8@!Gzx1>$d(^b>-4BYMJ%jl? z?fIKfmv3d4RJn?_ggx-NMG+adh?{2qMD6-IntNZIQg%&ys{L7NIz!3R)OSzG;r(7* zp+FB}@sXVZ&xKXvceLdm-rO{@ag9LhjRPzwzTR$_*Knd}yvlb*<+GW2E?SNL!N5>c zpc4oO$+KIlQ5LDCagEX`skK$3{RgUc!@tRGjoI5wwud2}^i9;f5vu{>gcSMj%&77p z@_x}3HZyW~P4{A*wZ{Tqt3w_>pSkX*u<+jdGnbf_Wtt<;-K88x%WB8;Da_f-$=GCa zjn}TF$qbrCnD>#EK0PFl6*-S{OOy`0-?{>f7mp?#Dwh*CR75pP6yb-ZMF__8N{bW& z$h)Zuc*ivWj!HW9OD$&*0#hw@ah6+BSxfV>(w4Jrq*>o-BCrlXWg^K!vXmjpulT@dF2Y>kCXnR9#PQKl*Q?3rl^k_0ZhGb z^H5MNPIbmUdAg${cF23XO$&TI7#IH#>tIHG(U^Laxn)iqIfx$Web?#dLefxlknNdG zWj{&;h1gg{P<6Iv&Ecsxml516rbrT%h1fyMomXOjlf-@37q}%&gqjq%=r)+S_Da~B znVV6CMOB!a^>R4DkK;>R5A@?YS@_~G%v1>&IC7-RoiA2qImVsg$|?(ITNFSMfF}W9(gsJX##u>8>P;^24yY@`nQ* z-oI$k?ySDB?mL+&o-t?q$a0Ib;qf)k94|Rg;!-P8ivvxKrui?D{)3LN(w(b`$he)wYZyC9Pum7yf`$#`%97uT#_&_hA9z!S4_cg!sz<i(fRJxSs-d-K5zFOvycuLu%019EErw`MU^#zu2* z6gq0%BBnnAMI%u=`N5XoB@NTKCHSGSDtSM1204mIm>=KDF%*Yy<9gP zAMx7p4)!V*qKSZ_P>BjJ>u`lY990XAubjk`^C?>d@jt0^@m{USPoV_UmADO%?RV$vle zJt|=-&6L)0;k?l5>wR>D(k1M9IV*6^)Ge5F+R2;fgZ7qJ-529$`N!(nUx7eIAT?SH z+~%pdp#)Kzvq<&+w>-Em+3j~{Hj`T@Zn{!WHvME?zkT-dfMfV#TZCy!mu(RW`z4|@ zgf93Ud}9TPY`2hT26ybAaJhEbCp@@D0=VMCqcVDrY(wP-aA2;^j-+LE{2jK>ycK&$<^}YZhYzpPRLkNqzv(f}(Yn_-r*kY>0g@L%9uIP=s0jju#*$Q9}W{z&EpP+ZBSS;hB0 z(MU0s6@7*is?+&9TD+o(s9Nxg!> zOGPmng#*w%y*q+kzjqe`oP+~65=JnqkIKO`y9(HG!oNGYLdSX=O-P4Y;$VuWD>vkll zBj8kJ30Z)HF}KD2xG zi&%+v3^er|HO9|k2XlCDD%XNF@yB%whjbe;xK*=|RgNus&zP;;HF9fHCDmYS#CW(B@yeKX>&YN{8~EdE;uRXfAj(n#PpLHse_BEdk!paaXV?7wp{ zMK{%gs_PC7dxNtj-Na?EpU@(Ixp?$-XOYl8fx-w?_Q%%n|BS3q^c8`>c#?(*!Icd8 z*B??sf%^gvb~2A3MWGz1^xM_y-N;uw?yYDkxe{-EG%1WlofYGgso%-Vcti!n;L$uX zK>bRvE}bpnGuxi1FYY_Y7t6yNR`2?v-@hRGQ**ulf#H+a%7{UGFXo6mcy!bfodmL& zkYr2TFVxHg&iiE;2pyGb&pVz(KeO8gB$xO;z2O zoijgQw;j`4H<+Z^g%I-o_SRic>&x{-m}!{y^Y^P@#r%X5@wFuz?R6BXe{uo&cvg?p zV!CJZ=kxeVhrA58C0mQZ_EyE_gQ$Zdf&S@-K1%59tLn>TyRbR7 zIVw*6qPTa9f(^C2g}ZKUru67a=y>QUxyncw=S!q27M8l~h5c!SK1g+%JWLE^gyK-@ z&l=X^YYKbKcU$?6KzPb^gS6*PN%*kku2Eg@9z8+txTgA$E2&dEyal}UFZjL z!72_k*4SXH6pWN{{*uI5y zauCYviDGXnsBDJEj*<&I{)D{?ALWEC%3vkSbYxl}HE&Z{ELy+6BwNUSL$t#i#%#qd zU{Bsc!S8$;0;Btv$Hkh9>b?3f4ad^?HpqgX`nxZ*+9%ILp z+7yy_fziih42cA3OKQSt7V_;#{A_d3KmO$stZ3N0eGthwaIq<1_X|Ex7tXkn6c*W| zQ0g=|7gflDEnZAD2AP2Zr{5OLKvlaE4igZXQJ6bs5 zf$fYtY8#FkypY|`XWu!V`(IfAN|>cX*-oUF$U5+L@W8PUk$zFLZAh?! zTZ&8n_t`v5odIU%csTXID86y|I-+K9q03W$=Y|zHcjfDLo;b0gz*4s&Oov_T;Fq8;C;+WRmlVft6@F@NR{p z_1^Z2Pkfw%czM-*|9U}4pT2|IeB(vxI1c{f<%pQo%y{WDBTK|9kC=7+m=@Rvoa!-0 z@HNSRV&3zMA+<+o@SZnzWN6I3UhR_F`_^EkGGG~@KQqId31WyO{x^{6)oR-Gg`+cY*d!>RK@YH1TxCl-Ds_HincTw@Bgb`P*Yv zq3*ZI!rkshdK#qEBzb>1Qczki*@$?7n=EFb|+rFZDwl&;T&^!*P>+d^IrxD}xnjB&~tR9w7D+C~sDRm-`Ri7BS1* zHOI8^S|-HDIj*&F8oT%~0TnCVp)Am;*M$&5BL)x%AghixPIZ}7&Cf62aG}3Eo;x`aiBABl5Gqj`*0}pyrPkmi2I!;85;pscJ8=0Y5JEW`2wl-qTna!n#Y~JS zf|=$~3PPu+#gPT!8`5`s;8S5T*xZY?(Nrd_O!?4H(wQz|D96>IH1(g*_pXymG_FH! zM>2`x1vNgN+n3lV)xyZZT_8J7aM)yeXYgJIt`$6~dAy(UcVU|G5pX2fjK1*;l8-Y# zL4Uu``X%h02i?2wU8mfN%NHK_S-=Z@smmbw2w{o5=FPi_#2TdXz#5QiBY`8D`R2+A z%&gfeg8k|dUxpI$xEa9uxPZ0hP|EJ-dt7C zP)cb3^D=~LyYnp~CI3yWPXWrdL?^qE*u|+fwQzM!xYQf_Nbg=RxafM4ye?uQ7|>q8 zU^H{-5e7qG4$yN?G|8RJykaU2_0mLzHw!|~ma zQ6$x>4TTA2+pZZTu=^=HKj)dfeN}+bTKy|MK*>_O`C|6X3y2J!^@5JM;yjaPYXq+| zpBDgj1+%W0F*>8B^dBJANtdMD)KJQ1pk$x0o)5&m$G6sRt>1T{WrxKjo6)k)cTio0Y6F))i#MLn8 zLxu<%SKwJVy?L|dm;=|0Ta1brrdmekC!zTv-lMM+$EVOaKMTIy(ecY4G5xaF zo>Bj1QO=Q3S*tv!ZN0ew1o{;=Bn^RsP^Z(BS%DGg-_$vWiDn2HMEw{leU~`XM}N+Z z2~Vv}9b?iiS*zMMHk>6zFn;~5?EBdDZ!5T!!+=9iboSJ59ZlD|GX>Xg!{40C8D8ja zdc-{o_m3rI)dW2~M?Zvi;bEf>>=>3q0l}`?aUh4*lB# zi~J>79l@rVRv5COUprWAEUJemAl;pP#3oIgl2p`_^j0{=E9rqq;Of3rc%%b8vUsCz zeW4Y3SER74qRyIR_>UtQasP}WJe0$gNXxh7wO~gtXIx|Ac!hIEFG03@sP0KEud=4U zGPGS2Slj`09eCL5&)Tw+HA^{3UBXNt(SJ#tFsH_DsA+XdSq-Mpp_g-@%iN+eLFK>% zMA;0*svq17PXmvAM0Gd;lf=b@MLfZK+w#?H3(^E?R=U6Q=sz@TCpvzZPAwdqUqq{K0kG=3Oq=YqNWHczic`6a&nc>oT+e z73XW#a}p$y40HitGxho(QX?^d&Wd(t>c?OnMPe_us$T6G(%feY)b*H&X;BMLYIgNI zFX8jx_g@EQn&_}QN<0Q87aUmnVGOAbz<|WX<*FJ4QswR2jn{E+KSaJLF=f<>Zhpn> zIQ;FC^umtS-8$LKB(Q07qpv?xJ=34H+SWQ1Ezh1xKGl(2n2gDOrkK$#&YX0k-h|SU z@@M4jbdkzL^0mwRUrzvIH8fw(`JTMXEG#9(Ek__**z4$ zM8fU4^2K3N)?$jpSpieW<8sDgjQ`j{=s0wbfc;8q4*gJNF;`aHrCFc9HO%Z>6t5+A z8x71t8~H7#s7vYE(hZeLRwV^Xwm{P9oZd?;aGz;@!Ny#QrCDn%w6}>?Q`_UFo%E_)vWCQ zZ1ZHMgD$jwy?R<(j!{8St~JMZcJ@ZMuoG0w`$}f84?ka!oI1H7;`;xw^_EdlHe9$c zp@cL@IrLB>B{9@6bV-LGjnWO$Ez(MNw{%H^gaXptA>9o__qp*|?|Yu}e&0|2%v#L4 zXWx5Y89g6as=L+WY*$lDS#ttge*_~~*)ftjM}gDpkQ#AOCOre;fPP4dyG}}SYbrtv zVqoL8_X$4kc1s7?C*w*2;L^2`TgG82?rTvN&Bm?1QSCTGZ!9{(J%r!zYtX-?a69lg{%keiri!>sn$h*Zm72an~2NQS);$ z{Ic)P^8PHoq`i5Qc+SJ*T_ffdGW7Aw9yAytd55jtylo%a3vnelW(WHE(uZ^!R7!8| zF93$9CE8Y%uXav87ETNAidvMuPguJW%o_SAzLrx=0$lMTC4Gb`+3X%%7r&+C>bi5B zf3UAgQ=vU>oa4qX0np(yLRJtivw)`p!lAQAWQkLCE$2}2_o>1BuzaDb+*wUdj{A$O zJF8B!TMjOdTD;j+m5Q0ju44_H!e>(8+nVM|_DFap1dQPoOGG|QAC`ny%9QR9$xI!P z1EchI*?i}^{bO<@?3OlaAgl7rju>QTBG+Ulf@U&JS?PB<0?xQzykCU%yrK@*X zYw>0||D{=9qoFvHBcB>YBmN6S6^e}gv`)$}w$W;jD!m)6nhl``)zVhqR@nTGx0KJ6 zoQ@?0LESOizXs_iyd`@r$NyPBAOA#uoTFR6MbSTq5SbxWh4#p%D6!ZH&utk-%tS{7 zQIpeZ3x9YC#)ScOeI@@HqepBx6kH|)m2JaXb%QVTf_~PnQ{lz`cb+`P}`=li(ay0OXCoe z7-#(oaxmdbm48${eU&R4qiOJ*MF7^L`boZ5g0t@PJD%Po!sJzH4AW{o$*wd?Z8cxc zrK6=r+fB)$(c|7`RMzJ{E{~0Cg2B~VqaWB6Q#z5db}KN>X3>1kz;V$|PpI z{M9P7*FPSEC9;OR;*+bH7Bzqz+OvgPxdTWTB`A%A2#*m2&`!HpRM8xn&c0(czf7#j z@`*2!p}bA33fJtPGRWYkHwC2yuOw&VrM+{@2Q_{R@q32$y&7!~NG%XT!^=^&3npKs z^6FkeVh~3jSbVcjkWJWrLyV|4^lp{L3F{0sE54NlN2r8I@Mgy11jxubr<>OozfW0n z0hjBN?(2Re@EQ=v1*Ut;9|lQafsE*6yNc+AtVH!^Z0co)twmi@HPZp)4Apj+~AM7ObLC-mgW1rsbLKfFnF-m)#1Z|4zAizV$@Khdfj<9 z-x2!ft}&@U8JQK$BrfnF+6O|o>eNa8AMey!k#xSOT=4?R5oq-Gsuv$x;-ydUJ64YA zTJyX4GX0xmQR}j(>T20>iYu$Hy-I`Cemyu7AWr$FK_CbYa2yB(9gsZ^ZJGB>(>m8J z;-Vc4QOr{;z$tFbO80WRR#p{2L$N+FKhZr}Fc{$jb;;QNhi<@O`Ns5-NHOPnm ziYM{`!_A*Sg3uX%9|}0UX<-3zM{>e|R^?TQnuIRr`pkB;tQzO{*O%Cv>BW-R&|$gw z?H4r;Zbv;Gt_TDEJkM-wwhHxQ<@+qKeZSLjo;YL6QFA(X}VACnvD~n1tlX_1?I5Qj15IBMJ56F3e z4tb7CsXaT~=BnerVLrlXWX4|~i#jutA4WI3AG)hL;wfaPHo@b?&$QvxSW`1lQuuqGR=YuKINe4`8-groh@6*o#VnO zd#@DignBPHdwmG6UiCcST;)fVJLn4U;}%`yvO20zbfDt*;btAPakQ9WVinGpx9ZJ{ zqShSydT+)&fZ!m8wP7f|&8j{?|IVH)%Nb2o{jz_b(Q@Wl*5xqY$YH^ zAm$D@wfzV)5s<$%P9{PIm|kDIE+q4!D9}(@U%#yR74rn;5@~`{jjcGa=qW@<=g-W21&zD&1_#_Cyd*=3Ks*8XPs_80BY1ZViq+-WA3(o_SNLe5oibl=4i1o zEiPpNPBF08cz+cloVFL*3v^dMg&7YE!@*vCk%bv`NR$DZ79Mx$`m2E^hLA~j%0FP`2!I-%tQd!Gn6Fx>=*Y# zBT&6XnFJ%3aKqX}uC$_T2AH^#cs)DcSs=&8qSyPTaOTh>Be{~8auau+iiT>S?zwry zjFT%rSon72*V<`l|2uwd0h8aHp48s%3djrCc zea#nI?O&g~Inz%TV`_^znJg7#DzYuVEw7C|oJrkIXjR)GEZ>R~)Mz@GIwg~gMoSe? zpwIPC3y(}=q5Tkv`b6%QlIA);^1)}#EhSL#F{9_W#B}5>FLoSQSG_%oD}^?XygyZ1 z|Nn2`4F(VwIXMnrP8&wZOWwtn*`zehxi43-vRf~tUXZWh&^)kT4Mi&!Bh!VkA_H@9 z5h6*Gy=fVn-X8Sx#c%p|9<(EGKtYe;KaSs^u&$rca!$cw5aFSNw_E-T^GSw!xl)91 zll7NziojO5PB4}b1Z;)7ItqQlIAN{0>Dh9bik2RGW8!Wf)_9T1<^;wi&&71b911Ns z-L>OdBB_Cx=wOay!?CQDG<5Gt#KzY|rD; zPQbHnYcAA^6I`N|h=x&J*o-(##cvJM16u(llSa07K$^T;9&F#-g3VpA_PS4B-LNmd zkQ6|RlCM^XQ)D>w!J(q==>sKd1*AXSwVPqYGc;;Wy|EZ)e9U5#jZIfAH{+HWuGwOq zdT%c&;E!B9B`t+)_o2dq^RYDdGuE?n^bzT=L1^dD0r<<{+t%^N(l;> z35B=^u65{!A1Pb)`9ufDA$KL>SYM+gSFMX%snUaIfe@&pQOCtnVaAlQ_}T4XTH3gY zl{}u$ZYV;fZF=LMaxWyqTtM7ZOHs|IHAy)`dWIp>5C0>JI%Oi*BwLx}=&DB1(AV#G zH1s&>3I^=5EWcudD*4$p4CMvzf_n=Uujt%7f7d{4)GW(?D1@=<6y%Y+q$VFXZT6HP z#KSTK<$MEw>V>A7^aWy>jH>-*+;eHP?6bE4X-WeTX}Tf0EP*3>>Ha2n-0Z#_aYFRt z1U!$MWi9oA>-Vk_faO79Y-SwXFN@AvL_9JC9RcE!L$+k&eaIJ)f30*+0)+vFh22lw z{$JSRqhRTp7}0zbQ=V5N1SR=r>?tEtxwyv(kUJy^yWpG9tIG2mD4A-hU0rArsm~74 zmZo%-RXgxj^I0hd zlSeo9x@BYpI6C7jt@2_)JzLCU?>$z8uOKc0QPH%Zhj*j1mxz|M=Ux zBTqm|(jTP_4PNSx)br1f#tG(%DO>gSMgVJHJe@`>oHPd+66ptc12A8qlnDcBWT-}8t+QokITz2Z;x&9Kdbi28OI`75RL#t*sH!D~|1Il%JwKoX}p zCucwk%9uHoFm8*lEn(;C+<1CplA;_B?axQVw)BCMR~thHzFF37099Ka^G*pb7914G zpFf~tA||W~V3F_OBK4R@(Q>QeB6XRX092eR`D)9I9njP48F0~NxOg;#XaH4bFiG#~SSPS}0D*&u%l;MJ_~F3&q;l9ft+wbwuk0-Nv=aOYf^4RRUD2!1 zOu@GlD1S;_U>P*aW^>|MW?7`_vG(GGR?z0qSq4Sn4^Fur)`u^syWZAwsuG{`D(xJ&AmvNfpii?oxfDrHaHw8K z3w0c8AlMNSAp&QyQX(~;@LzgcHT_!cEOzluq4d6uc${gL_d;+=C&xQcPW+;AzE+$N zdp3MBKu1dLw6`C;eQgBS*KTQI-~SsmE3Sd289^hQ`Md0<-g zDs@>JAx{Rpj_cI~%4RRzH$2k-pUB6J_0?-ZA3eSgFc-8;2(SP3nu6>_tdgLl>l=Kl zJk!)=r`^VA=(c^S4RIC8aKNYCbL_Lb0%j-l=XMC52@EBsGR4?!3=X=;m6zVAL)P2R za+wFJAe@6?R4PvBQfEJ1GW@noS*4zE_4@S|Vaz>R5Q(8;dA_HTybZ$=e^>KtXhF%F zYSEAwBdO~*l{wI87Fu_=71#Z-aLQ|?^pZ(v^j;QpNb702rh!LjiyF#e#b)E znS(u@l~S{~s8!RQ^F)O>g~)ph!NEm)k8yemPuxG7Jwwz(2lm%`$6Lka3+^#+jZw`u zCc3K@`He=!E$vLQm+BLZ^hTzI3%n|Pyd0yxG_clo6VOjK{$HHzNQ#x(R)@G2cX;7@ zui`@PhnpU%6CdH^t?IGy>AF_ILb8aEQxTkk`E)rlHjA*o1Cn`N#9EU4r{T;Q=lg^v z(wl*@TW(BY=W}zz?WJm#-6`!>PVwP4LBcXYQq=BQ{@~lF=Ey&WuQZCzfY?%RNp#os zLyhi-pVFbTn83r_N2H~1^yvNr}J|A^WCAOXyIi?SYL?> z{eFnXt>m~ROuEX`RkAVR5TVigcUAJx=LP9;BMZh7 z@1lG1({7#u+IV#VwJ4yJIWB~0Z5;Y2ice#^YF&=k9^ZLv6SzGqk5SgW04}g^Uxj-R zk3K;vUjD#19+tX(tRktuw)#rq2Mo~n{*HFeD$^i`>mEDtWaGqPiTEV_zMs6=T8xhfg4`|72TWq zk@t`dSBjcB*0o89aEh(Jqj1VN%1sB_$l#c5HZ(X0<1d7WuWy9<54)M2CUwts`z1HZ zX!NWU?Jki==ZVsT-$I&yv5a>QAd`clnQn8S;7hCuilwPjB{eD1e-!nzi!#H-aBK@GKmAqKzusE~M!f z_m$uO^1u~#7QQ~GBF^!kQkr^we#H8tfSj#KiCgG;c<8d7^3?j)B^aUT0eE~1Z>ij9<3_Y-xZ)GczJ-ZhX1cSDNbrLx@JmI$#2ob) zqEb$0nZ9)Bf3>|VMnA-v0jEYv<;VPCV(thIz(L2F!7Q%y)`@y6sMoJ#OYKBA{UM=hLHzU*`xj31t+tye3K<0*RJd`3jc?*dXc-nM$x1 zT?3%x1?Xje-ond4qY_XfrcsGue8$;j;&buu$kvyv1Cc5s%TXb;Ii0G(2Fh>J?8r+jp4(J)GO_d{BX# zz=f!X1PbnAp#ny`rQeV14uE#Vjlw?Q_RPn=Jgx`CnDpxdv$pgnQ^ypKH*nB5u{4NH z|C1a?wDF3rnzAJS{wR{=h&^M&5u3V+XyVp)VY&`s$%^RM;0JU#8xKAhCK>%FVLyx7 zL+^dR)sWJB<44+vh$F&>>vHm-s@D+{8hVA0+rtM@=YoNc#|qdS+%&mjxLq8R#`Ir% z>xMpB6s)eRv<2gs^{v#FY#0*lhqsoO7mJ-stc#4Q{?&kuc5d?#6R%?A`fWt1=OwcK z_ZY_W-N7|e@8bU${=b4BOd>$=qZy20KeDC|%(dF+UnB#rzc9vCiCNW7h?$i<*tGLD znjY?Ib7S7W-TDMrg5;iAHVC`n;$VbO`6=l#=1^qfbdwZJHadx{Ovn}xx%##TkyMJU zqs?Ryw<@hD3l3i&IL|v-K*+lxAAxk!9s90A-4>wsuAQ%mS)j25O*#deO2BA+qAmxOCX3Vni(CS+&7`t)gBPdcdRxxq`DVCDQ{<@sGLJP`d!_R}n* zJ)=>rc`)e0B4U(mE7Mna)qtGx+D8vS3#oPRa6n-IT3CRDSQWYfP;PTdDw1QIZfaSE zHtC{5KT|q4w=a~_XFqTuvPM(Zgf#$0XE5M*4GkNzS_uk{0@9=2diliTIMqUrK4I;U zK&a4M%_AfwOSWW&awak9p5~Dbfm3yY3(VP`b|ecNpnd4YiJvt9fV1qdPIP<@X;f>} zm$nPMe}8oWaH`Zt$sF!J6e8WnY0K}AGZteNm7w2GjEy8e{D8Ucs*e}0K0_-KX!rxxFXZMX5T&TN_&2t8)nRGj~B0FH(03G&owvh`{J0) zC6%0P*OSoqXC4CaQlmMJZVv3?a8yG-GJdR&L|l5p&J;!^a`W7<@|>TxQ4f$So+!$v z)(8?Jb%~RdttlUTB<^8weX0j%crvQ4qy`>RH_D?79rd_CdR@mVe~c%&Xk@EbU^P~? zv8@t?>hUD@MkaHu59FYt2v(1`lfOB9IYv%E>Lzjf5NSKMJngp;InSlgawL2j1&t;v zSS;z|Y|YiyU3ol;1J|OC#NCuMUQHrmS4(DlU1^<8{#uFB#dd0)UPYy5#d zvM@hBr!@Xr5&w=E4qf#8k%dxO6Gf{x`AC33?<&hIM; z17@9KX}MyVxqeIZh-jxs6UIhtDKAnE>cy;^&L+zkVf8wIL$!B47h8-m0v zNPqtAXI{N*NOZH6&Fa1h+d>kOw-jo{VZZLJLP=le^+I*3($)D4#kV*X+eYg5174Ho z3U3ZigpZmU^9O=`jnnWzbp>^G-MS$ zU;iJiBO~eKHQfA;QL_WvS(;u3sN&_bp`F!Am#iwx>(9ve!k_Ga#upXgI1JCn_oTrC zo=2lBU1KeP2A%p=5~f*2)i9~0SF0@)1PQfL@C*(F3>F0#NYdV=wg9nQa{?p*J-^v{ zM*T?-ZbM(xjU7O8u598T!J1E9$kgM1YDW#U4fL#jwxxJsrt_=XGn@t9olJ*Q>6>dy zbp6P9jLsUI5w{jhdL4R@e<*F)Vd7q;V^gluJFg74^HOC9y+q(&189)UJcXJ{>*2Pg z66^^dqo9&gWp{DPIKIDAPIcB@DKON{bx#hJ-Y1M>FtHmNaUVhBb5-<~fckid4vFQx zePlWEtdgK%v(mgvp@X_sW7)nU=>@fq&{a_PRC8I>vjZ~gdrOC{B?awS+;%0+31g%l zWD$Nx~YZ=4-OF*8T^gt&4^x8UXwZGRxsu{%orE!f+`mbq+08N>9)HggZ}O zHbrg13o}7$Byso)0#lB}&I|r0C}An3pW{e^*H&DpXRr2Lshxu%^8Wpr$=|#b9w&CzaMv%FGozt8qw(6%CV;Xlqg7B zy4^}7D{(YWJr|zC$Od1L6j`?Hl4eJ&^%U!q!@m!1`BAA+>TG?pHVBUc>OrlH^G2q+ zZIXi&h#&eSgxe@*m|s%c@{TN2P?MHG{>nuV*T;mz98d9rvQ1VQP3Ys~vnkNQ?F)~D_wJTEq zX9TyjJw6O=tnyf7Nad0K0BLS}@@~aJF!fvzb3YQXm=#Q8 z^>#jNzJ{C$%`Gv+CO2KISYc4qhZeD!45tIxlrqLPl=^9%nzw9nP zi>)#i*^*a+Dyv>mRg(jnYG{yJ7IHLYPxN@z;z85`tPc;Ekzd!M?4$sG1iu+~g?46F zWy*7vF-}>H-m`B!;&iS}C)=Af!D=KT(P%`bs86%+W#!Xy-&nwIITcSd;ygq2dhuPj z)s`)m7-VT0>ijeH116YiLOx^t`Hb6N_-_U_+IdT+-|-!5xQ-!O4wC*h4xGaLx&&DA zb>n-*8}KDlyyZZT87gkq#)V{8WZIXK3C1oHw%74BnAhEdFl#8SXAC6y zSP96#1HeDhX`*ESZ>vlQn~ImZ#K^X(`-kVb=!=n+|6}#@CKD=(sW11jw38oYrak%(X6>IJwVf zr9yDp1Y+&Rv9sLSdRYEy0?bz(Nds2D4s7)W+h9wh4&(?(VTSx`gX4N~TMEUHAH#%K z&sjSS0k7c{AWwGQ^JHMvyC<5Cl+j8S2o?7z&WV6&kvsIiUcE2K30E20UZ(xGRp${V z{3VttgZh;^h<;Y-nxkz5LpviTq3pG2{dMY3@yGli7G;VINaUJ5A<#5={_%Ih_Lf+m z71x|2l{mRBM)!Q8>$y0vV~syWrWGpUaT<9g&NsL8jj~mt$irg|WUHp2+5lLPW>hOf-x}bmEV3`>s`lYp?9euE&=urJ(OHca@*Y^JQN-!$LaNY4DHIj;l zh#xd4wKnSGU4Ls>Wk#_wzuKmL@M$p?Tt9c0n}X+@{oTY0ejHdo=r-=@n*|smd8Uo_ z7mW)Ds3+puzQ>snr_v9&U<3SqcK$d}0hyQn9tKqKNo+n17U#<^(5oo60795Zc%L>u zX61B6FGecbx%&dEf|{*w@A^|MaMSJw&GK_?OkEXS+WPW|vObLRmKT(C15E;qz2(yP z0}h*7RYG!EQW%G5rAazZO-ScFcq`iDUPOiaJnV zZTO)1`lix+zbbeV;{N`&j&0!3OO-`I6S$MShJ;Hq|uki$XusBFC_2H z*^f+C3iK~nG{u*u2=Ofb9qb~I!A}LC_=oX-tb@7KBdwOzzetxpdy*?3?aJ98!W&C; zW)6d`NIDI;2o?jV;17xltl`j>As4^dPCEs0-fCC~`i@j&TFljIGv$+9d-6(6m1sah z)suPbQQeTwZQuK|Xf`KyWnCd~M??#>W&))m<+(eKu$Z~pEDmeEuSu>Pg6&Z8D;;eT z%}0?l%W$Gn&zd!{y^idC`uE)*)Tx1W)E^yhBWEg=`Q=>U;sz3u`_WW(=>3+V32%Zw znlYM`69SO)BI!n(`lvGuvif-qmh0<8DUF*v+<%weI7d1|{Tvi)ebRU$X3bvdFn{Jm z_kf1C!grY~U_V0hdx(8NvnpEo2sVo#s_OQn2=#Fr%s=q@3>hVJr=LAI9h(3wBI;N} z8RdBt-u2;3r-!{LoCrU3BO@Z4!qd73J69&Nkm~ zwUtaWEVV9>j%xj8QYF3pC*@0T0ec*_ ztw)N*>17^n>09=xP5P_)j$Lp5;Qf=j#j^YGVC<+(Zv0!;Jj8R{Y{sqOs&IfNW(>_o zt5#P?UQ{tOR5yRoMjbqf$%p){5sK168vpKW6?znTv|umWi9B12w!a!1ZN)mnW=m!f zq~sQPnF$L6>K5>utL>9(n5CYXCDs$e)=YB29!Y728#ZMVqA4L4DO%fEq^1xwWtFXVNyO zTa+pPz7ZdVXc&bpv&)H26AzFY`WB-8w%44ZiXnL&j^K{i&o;iR7w(xtvh26>&DM?f zx?pG-DvcAOH_Iz7@FAAy*UEMzRxXEfs-^ccAoN>`n1KTIosQ-2XR=$9RKW3zgdT_c zos{fPZ%GOTj+ypJb$;PoND$v@bJRgA#G}@ooO5W2L__a6ApX4TJ{i$Th`#WmQu&8g z_FXCtNO?;1VjlrwTN0pxi1CsI8{Tl7)Mx6wcm(tM^*J1{B~scBpX3B!cvY2QdA$p_ z@D3)oP*lDt#tMYA9&{nk>cCGkg7)c7l59*6i*?2OwhxYaxv`r`t@?A%j0|&a;~=)a z$Z?%N9co``>YnqJ8HqYh3Y``1_%OoVB$4ie??3WaM+%h{wQ<*N8kUbW1a`UkIpTMy zy`@>oDdTqeGg71iZr{jLM@mN;aZTrOAIGkl<8SG1HAjX`AIrgS?N?LuF8I%$@@KNp zc6nVrrHyO|*|*)kXGza^ig)iss!eq@Q>%G%KaiWx&bIP+SToW{B{$*cNk?!w z8YUm%nLP^{gmb>QX5{xA7?llPIJ_Y_%xc{DkxqS0T`RTClE6kchv0=vOa3k~Vy;$a z7R5|~wy*Pjx6Q_-8EDh>Ej^MIa*CJLdq{(U2W>z3%vLUXagpE~uyzzzV;P+}a7}aQ zH!TMKoQ_chppWzdt-km$RGaLF*8EosYYzw3J)PyAwNgNERASd1xNVxAI`2}4`dDVP z$9)lH=rdNucRBUz*Io=rZ>wEYu0-IDKyOKSA2+|w|UttcH0$?dkF-Kno*yc#!bXi%`dehBXJ47D*-IYGfC-U!cCa7o8lPw$XZw+@m^`*8H@u`7|x)u!hK;f-U45ttto0 zV%Pa#V6TC{y)Gv&V6xb(Vd zf6l6t+OjpIqF9x;l9Nf6)iDMe{{>--X~i)G^7fUR!*sj5XbL8*P44y~kVMCdUGo|U zzhwdXVm7=tI3)|#@yXECCK0W5rtZ@^u9>c`)+lLOQ*UQTad7jPkFG{KLdGU$ec@j= z)ZR@+iQ&8j**1KW-7lUrX;PZbLP=jJ9$_9eDG5xii8WezxMAGGSTJh0oEkLeGVbZvBD=f$h| z2z&(Kk82s|&c5RmNu5(EDv%I^$->WeRl%foD4^=RkOiqPBQf8Pyp`5ZEC1r7mad%+ zm|#fOUrM#u8zwDC$X(7{LI6ja54paE>(NXJpu9`EMCJtw!Bil*VMaxgb` zQLoYgZ2EGVDST^MoE3)cP&-jTgaQWmY|X77a<6)5XWG7DMyYe71*g#ZHk4Skg?s&iUjLFW+3uBqe-GVnwA?U*;Poc3mjQ zu>}99IgO&1yMvQ$*>W5PTnyEcx4!G7*PWMB*6Da_d-yZ1O!n_C#xjb|coqtj7So@L zx|balBJ(5E&8*BUZRvoOBwAm_Q5OE0aWuU;^L^2C*%>ZC@0U9Nx7ouG06`+nuvAk0 zaoVb_JV%=j6HXbm7ofM100%=Aq2QOe+2b(u_7&_}xn}J8>`R2I`zyB-EWsE0m^A8; zv>^WrXY(%IWY!(j94s!;iMt|@n9tLbVU*I>mhifm2THiW9~a8IJeNMUM(O?3>Dvp(-g`&Doo5vSY2#{E zKa&mlJ7DPmXd}us`#Qo^8sMkL&id(bMyKNA+iB%)2*EO@{;HUDV5(-GcRW!NOy$l+ zeuQ6kG$%jKq8yH2X@Bf!e>&RTqB%zs|8#p#)ti_^O&$o7TkkatDmp_?6Jp9|t~2_Y z?M^-GXJ%N0VAxE^JhcWw>4gWp$H=Zr&|S*hyUGf_-MzOYx!M|Qb{7bx9PGr7YbgDG zt{*FPQn6X2!8j`>=H8j;@aw6C*_5up^R)ZV#08{m-u6<7lh5J?fYHoyq@z~~2FI&;JkMp>n$?b%&5av{?zRXM}`WKM{d{ic&>+K*-HhU$-BtXpN8{$}@+C%YW3mghra7u7r>jqEw zo#HQL={hSHmo5Z_X`S!)al z?fF0Yx1!bw!ATMqY{#C7Hl=%*Fjp~t+9k{jBC@TiP}FL-@1$BKe1n+NZ2?qHJ2&~C z8d1XW9T61^xgZZ$Ll$F{4+XPNiL!HkXPbdLRS21XV%Fq0UwfbK841*4O#~o)NJu8^ z$;dwh>?08>gkk+0;EH1ie{{k8K`#%*0-@F+LRDzn@A^!(k>tbk`NN|9g| zDoYIAJ_=H!LKYFBv?6+NhpBELRH>&nRFo+4a3bB=mm$Y(cy(Op_E+)>X}oveknDYwx)Yi-WodRp$xiNBJL#GEXlI}soygBU zcqYwE@eU21i#2&SD;T?+5HIJj@Zc?5Ac!`DH8Z0?7kDj-*i44}tptWT_MmI^y8p_w^HXri83p)=3ZXfGv9O}=|hlagz)rplD1r;1h&x+a9 z4<~MgHapHgf=k#BpL-~dGag~%!@uC!k%yNuH++_55>jcXI^H}Lst_;9s({3~=mQx9 z*EllR|L^1-p8oykRB<%N%I)Mx3?B5s56X|Fl|C7WWpVaa!>RD@&9i0h_`S%qy3Wo; zYIbK*Df)I3?No|0zY&y)1WqGA=sr1s^sAM7k>EdaLDA!kI4jfuC$;$zGd13{d%LIt zarKuoZ`l1q`Yk3lRUPrwYU?Q-_w}^l>V>&CWWg<;duEJr+>|fcEC<%XWR~%SCXFst zryzU_>_(gZ_rspi4O$&aN*5q^@2-wTwMaX^^h$djZ6vi1Zet8Id=hBxw-eQlz5C|p z;x|D)Jf@lKSE~pBF_qhB?uTAVutfP-XF0tMe!<%`E`@d6KBnOrGf7io*TV! z@xvs~`H>1(tNwh41=%!zfH&+VDDxG0kbbx#Mo7#bzMuEoD5^Ml9u&l%C6?(aWP8oZ z(3mbyC+T3_Z+15xk4jnnTP`rm-aylmuo^oCNnS^Kh^43cnqpl9>e#}6mveS$rV!wi z!r{~6-fQbqyy1D-Zt;VsMS0+x-_3dbz57Jw;a;yD_*BsRLgPW3FTxtqn!L+`RVP8L zT6qyI6bl7tJjJy|g_c2Q3L@;C`150$njd5qhrC%G`CpX`5P?iF$^f;}Zd}pz`CcaqN+cys|8c>W>46DMR2?fLbGR1^3=~0@;yWwm2{4%0`iG zKHA0=XrBXiDH0q>07#`Z4{(pzK`i3Q+EK=6`1w`M`xoCq#(%N^q9XQ`Fq+@s;y>!k zWpU9fBX`0T1%d)cwg?mpSgP44HfTd5DIok=0n{_vzBaM`d!J6FSLXk)x}8o;sBOGHqoa$~3h1%9TVqRUSP}bgeg0_p&f%u%7u(uX7NFe-Ls1PXU zo=QAaX-3kSwLSfIWawb=zSjL2dH;_aES`dVz#)@-9cubcGY7}NB*f5Qk$S1POkQAz zv1aP-qWJ1mWO)HlBRSvoYN!3Df=af zQILR8vr1&w)tL000rHeEA&K~ziHbo+K~*6eO|S(mX{-3zC$P0A5VyII zc?nEpX$m8j1F;s_s0^y1pvvzz7*|!@I0H2{Z-2`?6Yy75ztY47rZXe6X9jNMkw{{E zDgO?rUw(*Ius5CuM~K#k5z?JAmbp%v!#!BL>Fv!r4uZegKhIn?9;oc_Ov|&t4e<9osITLxqm7mzrQ&SWSi)QHY zaXkJ|vob0>0r$CPaC*n_afRX-p~vspWTj94v2K&95(YyWCR+5c2Una?0O5AVEup8PR;wH6OJhV0iz=@T6?3Y7P#HQ|J^1 zVBoqGY10mHPJvg7P8&n5%fSHa`<<$0ARltgzTAuIxrj&CBL8KrFW;tBc9fG1Q>_cV zyPuQbQ%_q#!Iq3tV-X-G~F)wa*{iAyI@?cnEJzcCKVVfy}3SyX*~)lLHHLUK{B=lD-+2vhY|{IT*zv$^H^-#OV{x_o0&R^OTu9@wBM*Eha81wwW8m@kOea1*(--_@P)9F|}Tr_Z@wQt*}04eLCulAAHD)T}7 zuHnKQ)a1~wtKm@IO!?n9$ba5jsUT6}|3-$?`i>(|77tUoYG=RD6SO~{u^8L0Q+(61 zKIEe=s~{l1&JRVeo-t;9mZsLR{M16R;f`FpMaTMboo!+(TY{Wy?vJ{!yRmj|Q0Fgl zapV>elJ)HyfQ$wkyvsK8N4S0`JfwO?AZ;RePw`Sxd)cYJvp>lq$fczUj5&Y0dTBEY z=(Fa`43mARzOm6hc*0u~5fCPBLd~`cG~Fy!s?ob$p6O;CY)RO%>3A z2@NjKj(tM{a%PsKC&^#wldn>w|%^5K_zSpKaMq@*rEP50nfKD)0m{ z=fq@*lL1XK0Gt}sH0!GVjVlZhhRA;O+m8wgql=U?(UkAi2)fCJNEfd%mj2Sc(0}*_ zS#(gGWv)>S>dm(X;~Sw8Q_-3q%7|ao;x0ILky}_zFv<4UQf)Vgx>L+ZQg0Hvy3C3n zYB^Mf%+%5-Nq8{)MJ%lH)>}W*M9@57!hIytBbpQ@QJ;O>Q1Z!K?*SUNM4*+!2b^rPx9EoZrArk9P zxUbmAW~dZBf`F;9ZcYke000oxQ54`q9~Edi;l5*VOg>#ghyirElM|8MP&VOn;=vN+ z-BgbQ0xUyJZWa}&}(^a|Fsvj z@s$+(kfwD`+}S87WGnN5_QoR3Pd!tzOWX;C_T>g$4(onbJ+*h9KRzK%sZ>qAF9W`Ng*ak14dW}O# zMrF!r9y@WBv5J)IalEF8%b8v<_rJcNECte~>TWc)VUu!zW&ge*f|`_MLaCLk>*w5; zPn}!s&re0*LUcomb}iO7`xA1rAhTf|LNB2?1ac@O9#9`_e}Y)=^3SS8G`ofdxv!)H zWEiteILPZ99nD8=gH|BxFB&?|ds^Cb%Gzq7TYsCl-V@7Usk=~V;9E&6+sT z6PN<~GM2o)`S26^(O+?7!7W%hYrkG<1)g7@RhQ!@Bc0tGKZ&MXE;O#a%K`qKFv;Ne z(%wBLtY=FDQPw~hNg509Wd%wz!rZnYrRvQv3fYa#bmn=}Ha&OcqIb;4N6v|!6U7?< zIL_$poIVrTnr)_)!jF#=yRj!Rksp)n&nn*p|ASeuW$QvD6to6O?ls7AXZ*=1Knd{zMk+D5IZo338xQfWFQe zW5o~lIXp;?ANdjCY&}cMtK$z`)x6y4Pkw zL-}j^omI^z>@zR;9|wl6uUnY}lReO3k=@{H1weo*6fLB<`1sP!GUVKXXGyF%*%Dhh zbN6k%d2^%%Uu1Z|6$B6FZuSV;Hb{h#*?nTM)F9bGRrVN{pc?*kzBjW<2|HG2fkxz# zk7{(y-SZ7WwVwn4CQi%`Z8(yULBA+l>tc6x&fVlf@QzSFm$5$h2Q_(byyA(Ubxjbm z0N2nZB@C$`HSpb(XI4n(sXO%0gQ8vmP~yL?v2ZkFu&7dA+$;K%_0fM##Pi!}0rUSN z5mH+BAhVK1?p9F;)>o`8yeuJ!gJ7igwnP&L`6+*+01QBqkUpTS`$}ubeLHk3Xqw_| z^qUzJ04At7xU63UzoZDg?uSBz-6GSz%2K|}m3TxWO1`pA^X6vE=CG!bqdu<{dFH0> zb>Ogi2#s&rlGwrHU%LA~)g+m95&W`yf*w7Y@_Q-2q_bU?RWyX(F~+S%y_`rhyD=nP zO#A~WO^U(18rTLQ5%Kq_Oz{gJ;6LdlAMKVKN*OG9jZeSphxhgpe+2LpVg8Ro>yFa4 zyErX3NPz$@6utNjYtsAS#-gWK@7tx_4Y&S{R=3@_Dulh$#Mn_&V#xALZMu2f?Bv6S z|12VHz)X3P`9DG}$eaL-c0+cpRmK}un$l-sIxd+EEgIplDtlCB-dgN5czb z(`8bN*&?f-`!D}kOKlPSuzO0++|Me4(A#W6<`COirFkrwRX;2(&CkI9*0eeeish-Bf5S}6uqCY#zSWm zYZWEUA6bV+Iet$Qsb%2j1q{|&7ySJa{>)P}u>WtWz^>PJZt9QP1!KYqZZ+Fpv0Nkt zLP3TKEUQB>sjqrxJ7(m(XMEN(WZ3xgl}9b9Gax5L3ein0q7(frbTvzQtB`5faV=d+Ee z`Y=WnV%*lsHlT@&GQdb|yU*fK9pP7?^E6_IBkGOWVPiu_9{@u~oeLGiE&Nji%c7~{_=ga!<-23zO4>2Tmv+f zf*UlejV#xfYKW5KN75@SA1Vxr$XjB?*nfH~{)g;kk+@)Fd}6$U+9|Z%F#FYo4D>i+ z;p3tgCamQpeJIkmEzP=QT@!PuvJf~3R<0M|&Hi{Ixor1%*)qVT3IengJ*r!pY_R`Y zt_*1;-b*q|1O<%bS0KB)XKKT+0PE*HnZJ#{O-d4cv()b_gO7eGUV#4IBBg0En%G>5 z=6Sl}Jt43^nB6${xv!688YZ_K{fa4UY@=ACp?hni z6tCm*dPiaXI!?LbfOSoFTf~JHLKd!ebJf<#7yadooOOZ*0MN)!*8}^fTpa<`(?H+l z7^Ohb583eNi2iQwKR`^5xO_x9o1yU_1$R#;1kGk*FDhr2mgZE#vh`TpRdIR{7z9|u z#$5a;e4H|QmK%=kU#4uI@R%olbg;Rz;*tuHKTg%qcL>%x|F%;K@YcHC0I6)Qv8eWX zNeC-rGd>Lc(N%J$>*$N>b^mbj>^2ak?c~I&6IjiW>KezF0fVeav03_--=IjR0rT%Y z`F4qp0?YA@e5v={i9(V5r05Y4{fK&AXKv;62eVHsWja(dFoEgTad5nX5~(3dK_%x2 zxpwzd63fNXngo&E2btY=S;5Ah7PQ|^=`z|C9xngZ9Uxm* z<5a0$qFOqeobfqhwp9b*V1}NvF7NY;>7K$+!G@@6$-D^Ssd6V*LkM z0pHubhz0J?TQ+xm^$eZpe=214#_*Dr*zpR_sGo|CS-t;SCMW&8ct|2(BO9s=T_l&k zE9;c|8hl%7!s`?CQvt=dO<%)ovtcesX zxri94T+h;d39+8D);lA-omaT;dyhJl#`%AAJfr{Ucom;?s8ltv(_~h`uqY<2BzaC> zp~e9cdpK%ZK!MH&CqmzMV1)s{=92Td%oWwhK)9jmx^bbVC1gPW6UgO+w(Uml!%ep^ zc?Q$%OpfzajKuKDo<>qvQ~QxWU2WP?#~uo+Okn|yy1`!*snfk7JSb7_tf*;zYKfRm5G(nr%~Nv}_T zA_sRYnK4N<*2NkvX7+n%0_Pj^UcL?;$$Qz6@S0E0J-Bfc8+M`;S@ocwY>om&2~ zk;G_dM6Z~0DsNt90b*gR@||&hfaYYmRKqD4$eGL}Bt7tUIYndf=0?*PP`L-jh=D=x zv6J5;2qM9Jgo%3TlyuLcNfTbpy@bNZZY@^63Sx>n8@c-AfIUN?U(j09K~7+z;!leloTcetLD$G>BdnRxBL^2tb%J zz@M9gC_C&QmxIUk_d&{`pu@P78gQWGc;q(M%yjhL3KJ7X^&j#rFlNd9Ry zeC8R>)Z4kH{ENy|c{49(2I|L;H|B{s?T%aXV`@r*Jl7bjGCH&|VE^pYA_UaPk5ZiD zkdiv$QsQ)1ruTNEz*nUSf=L4v9((^KYigj0s3`}%b?KD^cv7zB!g;Z-o;bhhkydCg zx#{w>)O$Y~EdMedG|KK#lL(N@Lk=)$voWHkA6*A{lj@e8F5I#W4B1{o9+P@+VvRuMf5P(U47!e}KOCXV~H(-vxXR_%bDqwsX@H?FL z`9+;dX9+gb2*C$xVWN@GL#zhJDc0d4HXr*rw~bNitFuHO3>rl)Udk^2KEhR*_uU|} z8VSVEGCC9~jh^K{Y&D{Lo^mG-0sb=HRxvn)36|e!S}ZVvlQ}0JwBI%`fH`GH9s(wI zFo6k%wba|o!f}^wsjb1jq0ktaTW^CY>912Mz$(%~*I8DKG6^*Qc$D|;G@2E2!k%bn zD0I*N*fWmA*zdl8WB|>( zq-0MVFk=HRH&fK5D!%q9KwOuLH;t?aQLle4tR>} z4{`oDq@Om?UL(Fc<$iha49~GtM>%Qk!a66HRrY=KxkB+li&tUGL{|>jf17jajIy`0 z$uf~6V#hMRs)_&}mHl*rZ(QuGFC*K|8`B4%5wQRH_KMs9qALZE`(>yrwf5<;H@^=@ zbwHOX(p`!=r771!G`pLLQvuluU=u0)N#OI>G8jk%2ufTdNlY*GlNRmpX07UKb{M)o+G7Nec4aa=3@o40crnR+k_P` z3|MuIKQpCS!;SMD^ShjlahX#r=fX`p41fSO5E7|(PQhWf#@_^NXcXnw#p@|Ul~5i2 zw(gL)PzuaG1PLm8ZN^J|+Ty?ll2cGs(i>n_g1Qk?;QuBbZYlj~#wW$KlL<8h0@7z! z0rl+9uOX9-tDo$yEkz6AMd5l`G{-{qtU~%GF6v8U72h;xD5Iz{O}EI4NP*oBYj#r6 zxmfh)atpjSardQbvl2DRTi)&vc9&uaZWCm=>3}z)KtfxSTKdzf2vB2!H^qcqa5`3U z!$LSxtP0+kWTHx(a0o_?@|udPf+IsxYd2%+Q@ zfMW3GDzYc#CF*Q}^9y6Iu?z>v0u26K&K2iG&*5Xgn&LDka4w_BLx+CZ!zU?Ioai|! zOJx#mpyB8h<2Z-pZraN_KUB1&AbubuAx~AG>dA+cJ;WJkuhE<5m{c<1K#iRrco^a= z?Cc#{;2;MCqqty(Gg4dZeQnzaL#G&oj1v{3apnq30tZP?z2{u=FQvP#;1MB#4B?>FW z$fK)BoGz$0p-y7O1Y%@EgT0!LWe=jBg=Xl2+0j0hR*0Q6#M#v?8V6>9g04r3n#Y*u zd0nv|Blfk{J?DQS()g^&C$F6YiZ2$RgV z2Slf~;&PifspB{p8BuK%kR#iI{vBnT+pqCe2N$~KLQYrE?98hFhqDMq)B5n4?ou~~ z6S53D`DwME$4`l?tAaSW!Z0(pfii1`x3$WA(@JvkT79|Q{FQ#@W2t^5XKkLkZ5q%e zgKTr--%NjZh+xDQIOQ&Kwkr=hW`_q#{d~}YZ*2>Lh{uAqsC{1*U-F%rX{23c1w!%S zkT+v+_I|$`h|?SM8h>Z0T5VY+Va|cDX-W5~_KuO#Y&>@JV(=|VBxBp1k!JOtODERd zcnuiorp;}BB5@#6B)YyB`uZjXa8nq^Ma+uRy;2>*3}Q%hzuzvASD?MPd3F-2d*OjQ z#!wV$yDS)~3G!|z^<%(2;PSCN&PCuFDz8QzfkZ3)dZ+F*=i_n$?oRKj9_#d4XiAw1 zINLKt-;{lTw=ovfS>iX^O?UQV@la!mtETf=y+foD%4m_gK9_TU=W0fj+d5WhT|dyH zLr3}|)J7M1IlZ2;a+3I>(AR)Oapr85HVx>d6UUo&icBBxkbG9IYK}Z{4%da6O~M&H z=nX9@=Vr!>q(jqJWJ2$|*d3H9NHf2ibwAkC$h}$cS>|Us9;N^}nvuU#n>>jkP$tnh z$~wOKqzos8^+`a|2gfLSI(2DnRj#))=$;rRHT6g!^sq@A|9#$m;V3Iv=1tW9T7G*z zzz-d(bbhRexK?PSdz{I6bhf%2Aoc6Gr9z9k7H-nE>r~H@D_JCQo)jqqnpq`hb_3^s zVhHJE0RaA==mo|Mg`;5Qkum9AT85CHuazb@`ABbQrv%>r-|(#Lhd7BynPE}!X#1E zoG3yACtfyUpd95SINiD};a=K&;bUPD?GWXj;k4a&hUuECP9`!e9qTPvh9*%`bHVtx zv9aDL1tSXmd(wRjSX2Pf@3@ra0SuxqekMDUHly_v>2c`aN^oOvv6poyH=L4yR`vtQ zTsU0oW^a;OIQN)1_W~j=S5!?06kg)Vf&M(xGRW^{9AdH@w{=>({H77SuH0w8Ow$Ik zQ0b(UmFeE@*Vrln1>4fxn_UFOjNV?kWzJ$wk_^}M*m4LR^FBCUtMkb_;pB9k% z<6DE6c(8j>lEV{z9T;3)!y!Ab9Avh)K=t%JZ>0X4#yEUgiz9BRtw0J9=lYH^TULEj zn?SO#Y;j-?2X$$EW`t*(1-j%6meV+eKiG3Y-EzjFHVoyUcX~~*4N?u`SfT3X5}v{C zyvbPFPnrC0RTOPYraMN=!=|r>&LXyR;{EV$Djx{1c)Dz>+1B?3z zEwXB3%)PU}yqMWTzXGeDNC7{#c)xu?HKM@!LtIFoQ*rQdz^k-6&vz_2#>}hn0~v3d z0bJN#jlo{QF?pUKj>M0~02$QeS8NI}!VAhUsgHggSD`QJ;p#P0seX6H#R_643jjSX zspBySua|Q}`7QL&qHnz7D%9EcnJ@Y|i$4ycn`Lw?$UrzBGdKxD_ubVn68N1xHJ(a| z(S8BJ7?BkbpgGgcD|8qup+wALLvmjX6NA$0uUwE)wu{2E6QE_%Cq+doFoK1V2%e;1 z^zQiSvlCz~`(-CAB>l*-$%X%Qc6D6ZmzFbbo+x1K@{PrYsTN|-t@8MIrbpe`Qnu}u z^09uul<{0qxWAo-)@xyazUT@0+-lg?xuGBcMi;;UUixh`B+QH7whbdA4X;no)$FK7 z0pQCi>_PziMk`-&7djDe3WHs1%s0_Te-OW$k#zk#m~+-&cqAVC%`qqeO#mZ<`zDS< zhV&9`=u!_KkLoCWb6pH0hkd;TA@?tt=H%V$?BjAaNf-BQ3C=QO?tLenjvZh~Qq#ou zwM9!YvsO}YlxQeI@{%3ROUW&Y^q6C=dswO!iN}mUvhA8k7r;W4#ZFj9W7UY<^4@3< zo@EpoLbzCYfjuV2qxQ}jPDQK0O_}0@lDo1ApK2|0mE9ej?K90#a2&Cc73)WV9p&Qk z{za{|Ykd%WC%Q$2V*MZw27D=+B-oXNgjFdGr8XLBi*-~(C$EKJH}r9d_JrxZ_h`)H z$I`!BC>KK>YiG;$7200_`7KsyYVoCj5QWZE7clD=!~XOYD2}(CXKVahvEW6|jy}7X z$?v@zy-&DbKm}YPvXFd<}x6 z8CQLnCb#n{sSAJlm$a4?>fgPZJTj9%Ck;n@ivNR0< z2CA|FPzg{{Qd&F(-{v9o)S8iS%5tQ9sxTZpj8!l8(XaIC**YOMSHR>*{?9c|wc}9{ z5Ocpxk;;!Xfq^%aS*+x9Y=Z{?Lu|xLxzBmT$g-T0ynMLi96Zm|aiT~fx~W8UlWP@+ zEMTQFq2A`PM;)_V#Mi8ZiV2LU`qp;R6u z$%0%5z{rWZJyJ8^SYXvSjbpTpWeod0$RyK%Jh@N`IY8O2hn&*_&aTwzI%#A+dtZzo z!Fw4y6cv3etD;!oKy%2#1#gai`S2YfvKtl(drt^fWJVJ-b()eSKy>=b=y{HCj z^9CVt!)q1FBT%!Zrm^sJUe%?}<*f1GJLhA!T8{l#GF_#em*c&ONNez0HNFkW|I>t|M7pnKa@%#@tQ_AS^MbB>Ipn0`Gj+l4~8q<)^>BaKguDXneI726O; z629XVX1TWL=}{y)zN$Y_%f+ukzIe@?{9wDm6z`rOpYJSA+%fk?f$BDVLYcVRtV&1j z@LUsbvOOLycf`$U~*>1)#*`e|N(hNbABm#%=v}-d7QaQ2d-PrS zriKp`6rAo#(c0R%Z?L6`UAO&l#J56DM8h=t6>LrP{q~AM;DOL91oi$wTE30_P|nF`@R-%r!eAbE_iZiEl>&&1r27)_(+NxeG@iIt}#P3OLu z93fmN#sC}oh$ZwJ+7Om@4Zb&CI-P_BV6ia27>b;Sw}*~2Bjl^ho$z){e16BL->R&u za^<)kDJFC(B`LWw1aC_Qnn8aH&Usvj(ZBBv|M`?R*|s2|_zNDn9`u-D0n=Hh9Au`R za7rLK2Qp~kUdKfho1)~(pM64yGn9BxgGH8{wSYp?g?LP&HzJRz)xb8X{m1-EpP|O1 z$4wm8iATI)sj{w2QD*~Y$)Zkp{e&^-G z_B%&Q5PTo;UeE3E9EIrRcFuV|-X_m)G*h%*cA@wx zs;h>u(X!1Ovb%x!?j-*G$Vti)u>)2lLt^|nH*A67S~MKqT(qf{8I6mz%3~V>pEIlo zsGY9iNZ?;AA2l~}UgDVPXBO(5fjkaV8ifk+E&QthN6gZzWecQHruM^+7aOEc-_1u$ z1cBgc+d;J|ZI<(=iSHhiC*i-8ydX2PTdDfhU9jYuQVC}=VdeqP`zyaEw;GE*4^gsz z{fj}uzp#V>KU!{;yi2*De>mq$v(f$(=Fq^@dmX~vtsM{Ide)iqCMQk6Sm~G2h(f9F zU4J6#U`FZ`rR6fGzVGjHFbllK^~bz7DOF}6q5r|cEYEu)c%9|%cXH>Y>Q1Xw1sO;B z)ql{9z=2DB!}?9dJgCTFPRgbCJ-ej6_6^|A5epG%8H*SeiuEkO!#QCwq3Yf({Zvmqo=+-2WkD$C_1`gbsSH%IBq@>bS-Bgdh zKIYr`@TD6ymv=b^dhAnW^McdfLQ;>|+9)WcG@sY@PuE@2s@q(%Rw?tYP;(ZkCHSyJ zbE5N7%P0NlX}==*!Nn`-6D$Bsqqr1i8XIh(QCBVeKwl{4igWS**3;mDDp?-5V)CRu&Y!8D|9Cf4!$iFn^14Y^JHp zxjY7<^8&ZIX)?ZR2wAfpkW0qf!_+QTK99dAw|Q~*j*9;fFXWR=*lzBrl0eLuaxSk| zn8921El2-boi2}e$yp`{Q;5O*vlpQHrv0fFGxD30fcBhwlMAu7uCM%d$`-buat~#D zM|4C?!O|6OY5A9V%><`uvT0}V$y>h8gY>d{LnDoF?+5f;IIo+BGK2d^rD(vVQX#jc z=Wj5jU#Pjte}k{vCo`Vmx>GO{=Tpk_#Ve^h3g1Uf^i8hOo)vQBX{;W2m-=(Ec%^QH zFoD+OdsZwvIH~61?EE-ZSQ8x6T{tJl$~xR2NsQhY7?XBH)-E`EYGhZ>I9DWSTGa~9 z#BJTkiLDVcZKeDZ2dxDuQokwIbLk7y`j7Y8(c6SkmwVwer3u36B&hP1W@LNnSx zI~qx;ZN?gG;;B5^yoC)QCFoBCh_FxjjQWFqeT|)<=xJzcS4G=HqW7zSAmM+G9X?R8 z@aPY$t?)OLtSSmUIJ@i>rIr`TrCvr6@%9eU=Bx#Fe^5GZTeY$QU?){%DF@;XZ_=LZbU{HBaB2$TXF%Xe?hz0{y zwOx7F=pR5y$8n(juh{RcrJ-%R>gqfyH{!0P|xCGlRzrl0-8l2OL2(&|bmzx>m5&%XAmf6a4YDg(9cm;RBraMDG zyUH!DYlK^8_>@}%6BrMbz#emC9ZkpI5^zp4P;?EZmU6Z5VCrI&|NR!fbzk+$Sp_0J znkLN)etMWIx2{&XTLmcn<5)kn3g1&y7i4bt*UU=05MQV(X_F-b{-3FqNqR#j z#8EfOMCBQ+lR%pI0+}6}sy5vMM}{ryLQ2^{cQRge)aM>6>KmSDJvy@iF>ofRCgO^m zHKHn~dc%%dQX^r<&s92fRq5<0|5A7)F-MI1HSl@i-&h%3>W?-Ce$mLhEwUyE;7#Vx zOnl7F7kr~SL)7?-X{4zh&fU)@Z!G(kO-|$e43foqCVZr-A-4l7UT+3}8zKrO3li>Z zw9V?iM5oI#c>H>Vj=dN?%vI=tuLK?r;d$`trdHBL(n`)pISMcJY>$?BlexGyTq2TK z35gkHTgA{R+gIiJG^UT<4pt6~u?ujqvrsNvd-mezG|q+HblE_9HX~s{1Or{wBkf6y zS1c<#&1}1wzXmhuj)N`kMjBmgiOw-emW=Sb2=Xqc=c>v0&i%P&j59gyzJBHWE&9<~ zvXpp{-}zqosgyLTt109UU!`+gmhplc9Vxj-oR4z*#3?ZOIYO(`E0r+x3t^Ap)P9Pf zpCu^12F=Jg)MnH>9=*==wFK!9X&+K!I-cWCm~Y5A20=_M`+%JBdpJ2+4-U>9T8!F=TLjweA7%x8~L$Y=0ziYRRT6@T&bZmV-#(biLY z-6;L{Uz-F5Et4Q>q16WG+G!%4M%r*)Sy1nw^6A}?bS(Tzfzr#NA+&bs!EcguKe(Py zy}Sw(2H>_;BJls({hZ3KWiI##*tN!%+*y}F%>j3GGH>BW8O77AK!-bn#5hm9Y4<$YYO@R6M05J;4IBdn}> zQ1UI$JHq{Z*1SY-YK{PVOyeNx-46O#>*(z&MT}Z!S-y9WO$Z$)viqSaaYy1!^9V}8 zD`8T@EUs?{DPzDij@HvuY&od=(M8Yk4|n(TwNnusW4xz=Q^Qlw??Npy$xr+rv=}68 zhPYzao}Q?|_eWhrx2QT#;e^Vq4@;^v)@StFTl!xyXEsaqruzwUOe#{OK8=4EoYWRdFTj&ivJ4;>YVuMbD|9L851LtJBzW%)$Kj80hL& z5_qT=&fwkDY~2X5n%cGDe$lb%-@D?alHB!g#!c&3s2O$TEe!0RjnUs00JJRS;!44e zDU-%VhfIu!r48HMQtz~lyP-3jFM{3?7%tvD8>C9z4glVD%B)4d4yOztonuwDOX0Dc zQ9{5Znk}(_XU_dN`2)a;0>Le3{+wvT=g0!m`sHQ~JT73pT;E!PlHmoWs4sf6lH|ID z*UyHc-VF+aE~4~rTCNYeKVBUdcaYbpkR0t~ktVQG;-W4-zA@pVAk0!#&By?Ajhudid<<(z zd;%3GU7*J7aqJg^olz|T_R4og=I)_$vrJ<<&l1gCUi4rm-cI}Ent0H)Cd$H_Msi%O z-vf5J1y~rvY+!>wQ%X!%ZdD?sGY5w;WW zhg90VJ7vJLh68o5&@f)>`0d%xV4YBF=3_Fy0kZ6YKZ)T+un&X;HCy$zLCNwmmlesPC(vMPO0qjj=xVB*Yt=w`HttcKpal)Av6-9UhV!u z?rar*i3bk~Cp&t*d!?vdrT4(dC+Nu~%UK)${!wqahk>XVr_*Na7*(68b5?%3Yg-I* zN$E%AF;?(E_TWtJf3jCHCSr*h%Vc=_=DEDpr@l<$BM|+dOZp_=4Rx$EjT`2hVcO3U zsv*(J;e9G_Wh+L*XXeXk>-U9x*E%DaTErS?vRvecobR6_bTM=BF9<4fcPX&G`SxVo zb%qDWQO{aV8X0G)>_H|UkB0Ov)c4%#YyDw654Rs6#osXvnDP{Zs33Iu&aY(PN*}k| z_*^qL5=v|hZ9qeXOcx&C&sUTx133*~tSsQE!+Fw=byCAS`UOeavwDn*L=gQITs zc0oi#oXsv3XHyL37C!Jrj$}<+7gLi;pV;+AG@lbjAvpM6CLmqm#~ z*g-Gbgjyu@!B)F-^vcY~oL1CFi!Zxt=elEjCw&ux>`4li8Ygv=MDo%htjmtaz)+d6 zk1`BCT>4)xV-YNSA8fSYTzMlX;9HcqluKby!PEDy%P_QQWS|MVjx+tf3rN6!enm<*2wbuv_E(hBU0KIcX<9s znLkz-zyn2XCU=U%j&sA?EjD1o?^O|ggfNnl`&b*4%SMLTOKi1H5981g_l62}e6H&H zFw&1wkiU%RF1$RNuF8XS!Dw{(p*T&dLzvSbMS3UfPgo(fM?J(ah}I(D^!IkLg5o1E z7nQt)1onOFM6A<`GPH@93BN8nKA3m_B)+=Dc>B<$KAJKj*KHl73PQeD!J!ZBrz**| z5Wz8JcIS<343f$`u5UgC-hb&s)3%hLbg*wPZmL}cRV*qya~p_F7JPYiw}FIF2balx zF;KKtrUU*u0SDId`g>L&lgdQGs|$LJzwbKg!XZ5^MGy?6Gd51~l2No?Zbo;R>N8~D z>!5TPvsH#xOesM4BdaJXbeDRxwUubUq~3z2YSC2^xltxltnD|@J=)zN^VOMPk-N!ppUd##3`>N> za3!Ah zO89Ol_Wa@}`M-a?b^6nwio9*?b0<7Yhb$lWp**Vz&KzZZ{rcKL)zKhlDS z4fnrQH&FTy`;B@JPm|vQav#*T2fd&(Cb+FE5ezZmXePE>34=ydrCz z9DfgGblGFSs#l=#1!27BN-+9|HiLPD4AHW|*G!!3pWYCODrIfq9BmDLaQy+^S?jb}n(dkn#HV#EgC)7<4B zl)KP}ycR9UfomCpMk)iv&1gW_eodiO6_2%3aD+qH(3NO$ciyc`8gP)G6#K3C2*reN z@#tHO|Gviq|HIug={UgY`#Ss;T;^nfxn~bN>4K7=z zbIbHo3=rt`$=oB|!&H5g|5_wHg@w;{*;Sja@Mg-ZxXMfSSVc4CR?20LV`w$}#WH@h zs(Y&G2|5N8n^&UjFl)9ruU=Kt)bVw{^_+)5@rVp}>z})jRV6329NF-33P1FlokXF` zjv6%O`$bq{sR}68Yp+$my0rtm-OJr{uCstuw~PpQ@Yr4{Cm{h%L8olzOs{tYE-CPT z^ZbeKr-HA2BMQLn90xqy!dEc1ou@SG^soMtfrC{BN)=VW(^q=w=KCwimaf4{6&tRD ztJr^c0l1{K-|dodp4oF2dp>nPWI|4f)(lI9%;()WU#eXmx!-n4Y+&L|jC=td z656j+_wT!EjPd|O8|D1_dbd=gu@nv{H2hwDfKHJITp-?K#b3PzIV%*m5wuXsFb8XR0LLwfx#M#G90%a%-WHR&r16> zd)$F1Ld}jv88>MI&Sfy&*Hg#9Q>I$n2Z`)fy-@_gW(~&+t0f#ur(^h>I+uLIYI1{F+0@P1(QkvR(n?cCbADvw2V|i|Y7Ely?-n0t z8=W-zaFkJi`Qv~wt_2gs}qFwJEDwa>&vpp`w1N>GQNXzk0%ZvUAM*a5L4pJ z3rSNcV-sWS{k3OFDB4ukW58Czh9u8~ISPI8gnbbX9N%q-7l2*QYE=;NAKYr(!V~CE z`>kPvN0QIh+nm#3ogkx>A?@_GGsU8Sv+oz`98|{*kiZ&4Sfg0?uLP{{PlpTG7Toxa zuc%<&R11kT;lHYn`%86wkxvP;HV6DVT>qjqvU(0aynX`%;1U=em3Ridjp>&7lqcYx zZ^(({nh&-Si8o{C8oDnvM#)!%(_o7d^%`nE5bP4;GUN1RQ^C~Z3&Uzv$?O&EK@SeP z{)5l2g?;W#JcPZBv8I^wrcl)7rE0SM@PLb(Rq@2YO}5e_<)&X3ANN>OT3^6ZBiiJh zT8)FjIa9^JM-HDr(@8n;?i|OE>GS0Q$;G5LTZB7%d<|XH_2E zrb?|A%`MkDeQ;<w^RM4Q|P|P>iZxd_pd{V4sT)igL~z5VOt94pVsXRYci?7 zI9aX`%M%Yx3`O2Os-`4Gsji)DuWuAUovx7T!F-i!q<~N{cX(0HzYg^?>Dh~g!o!WH z?GD^qXB29Td^-$D#TAc|#H~fx){#AxkdQe4H$(x1q8} zylZmuMqBkl^HdOHfeA9GWcaDHGS5-fRCk|@sJH3w4c&)JYrt&V6pk)AWBZe5r!@Ke zhXXH|ou20vDfBi5Bc|;v%@}yDKRQNi9hUG*Ft(q6;&^Ldq^8M<9zKP`;-i!M6N!+` zTv6o%Y5_eQg|0NGFt60xB3eyA!;jsbud+G=TA>|ybsYME{9*q}Yq9~n&S&c(T^U8k}+Z8hHv=ekh_U8rx6sebJ(fm8`@1Y5m_R$HQ z=P76OriW5n5l@-ZgBYyo*6MW&>?;73F6Vb0-5ELZ|%~t1ek|)K{JamTbYHo6T9xae@DxL4TY_i(I@a<_S;th&d z!;QfGzDm1O*BAcRl(_&E7l%>tj|Ms7XuVlPFoce6JXhz~Ydv^Bdq+qge7AO`7Maxc z^?v!ymQm`K*3t``HAF72kM^TZmXO?b+-0$JyjsYEhGkuW$BV!rV~k(+8VZ2ehL-Qi zYWdY6i2}o7Bii#Zk>75xZ1pbX7xJaB@$=sx#e zuM}5l6L}h-vD%4*`q4R5lBrjfIEqjzq z4Y&rpEj)SIjyq`lZk#X~uZn@M;1)=P`HNp@96HiWUCqJ$0?34CIJ^XB zsD&$;!0*!HWJ6>x6g%*_{oBr>hQ=)pOYn>RpJ@e=E)ylH!;)pH@taYK}+E;{JqNdSJQ&60QEYoExqQyGp*1C=KD>?-mA~Ik_NOfDJU9ZtB`WBtO z$>LKT*W^?CAY~Ly!PcYk?JlTQgJ*uU$X!PYCl8_ftl%~b39+#h;VE_vIkGn3mF+x# za2N7N#!N{Yko--7PT(og^rB9IVA=(U485|}9%uo7kSgtKvO`o};yQ@z3NHytmK5Ii z;t=V^r4PD&U_2R*1>g#CXKcf_(nB?cP-WPz>(dvC>z2)%>x{^ur(o~HTll3P3rKXn zHaD;GcVk?BI~V{u6WiYl!QkwJtA<7V;Cj{rBFMP?x8}YFdZGq1%Nf_lnSe!gSAa{; zB-fqHX4!bRK)Pof0u}a(7dTk+nuDs6)zohlj51L?>1S_PUkU7r9C3sz6*fXnJFc0T zbxQTB5R9GzecIi@G9)Dje3>D(C5QIT-3uT$Zc-5DP0!x@1vf?EPX_;D6jZS6k9+Wv zOrm=vv~5Qa`P;vnYnW&_rSnXGbDmaS2|0Yt+w1I^}5} zhJzp>DE_5pRpBv4Gin*DQ*kuxmIbj9fEXP%SFFMdy{wdvk3 zm;HikNR=F%|8mD6e+e+KHTxcDgDOiv&T$#8CqF=jduHD;Sw=F-c!n3uzKOJMv;KL< zzH<(p;8%S(STg_MRfuVbBkL#L9l?q3Knz>#Cx=~U0nVsj6%3(H8(gzWMc1C|?@J_H zf18u*V1x5FS`9E5JM1L&W-TDU0}uzZnd!VHmmUj9%V-!QtVh4K($)rjP;>pdh;Ng! zbl@fbiscQp+))4Ssx@Z7pFj_DQYU%mx%?Ck5zhueQSjFW0ZfZ|;9B|*FnNfXTtYzj z9t=pyO_f{n^|7{@SQu5tx3RJQn)C9urH7dgg$MB{4~SH%t7j?6~F4jg$;l{51X zq;q1`uGFrA^+$&Z0jH85t+fGXRP+59P~YxHLykWARsQjbxig3O?G5Oc5f4s_=^UVN z<#~rjsc-<>UR0`c&cd)+dYzpD0;==y&D2C7oaFus0-{&407)Ne@ltgcHl*eo2r9hW zLG;*DFB&u;R_T_iD|LnD%l^?2!R<%BXz>ODEk_3RA~z}XfcWoH)5W1M9W1*GEB5FK z6MKBIKBJTw2v}|Ma;45ymN0&`L^#KE-tLzH?ZSd~7KSa@Ga%Jjh#wO+1x3YP4H(`uRty&^qGEpIzrq#4zvN%el>N1;IHt#9se)*4nPf zIg(iK+#mj8+7X_mhb8qo5j?L=hr#fKcUGR4uOxfch&7>msOl?$>9e-7;8x^Fh`6Z_ zRCGGqR{9>pc}&pNC*bh6H3AOl6qj1neNi)NQ=d?Eo@w-ORZgH26sZ zXeLUe61K#|2*(u?JI*()UQTn^jF;sgJo4$3zcD9qN%GKc1!i|=@kCyeWxg5aK8Y^8 z=40Ga>1xhbSdE!+U#ZyrOR!0*_(%dM-%z>Tftih3^r$<^J5$q`eM zoSn1$fA%QR+%}Zl05 zo*g?aDwvwN{aW6z6}Ygf?b<>5$ret?w8w~RQ0(!8fr9_Ri$yo>i*GyCLi^ikMNgMC$z0MjfT&O+{T9QLr}aq zU&wiSm`-XYr@s#yVTB!dsh()%t)I$+>HcJmduh(I!}P&O&8&^rXNDfTM7hHlV0tAR z7zz|*3!@vUAue8@Q_z$%mk={M7r*B>M{VpiFEut~39jM=0)YU}r>3He)5%bX$k81^ zC}{i51ooHDnuC;1DUBZB$wXv)jf!4wIi;mD$rn%>7m!JUlq<`|W^40ayNSVVDQUS! zL4EkXO%~Jf6Z`G>L_?zON0if$&L>=tn9v`#rA+^2s6r=r`Kx6FP8MqSU7Qr)&GN3_ zVu)KW_7W6yO7GPq`Z3=u#67X!Ap7mBo@;{u2f^F<=Wu|GIq68@4V|FH1s>i7^5`Q% zy^|j?GV{BQ!2Y-1T>Oj`x$ z#W)t{rdsqO;7mp(diMX3_10lgHQf3zB_T3^v~)M}3pDpjAwpKIO&)pWb@ ze(2EDb1di7;I!3ZwcO&FgdUEUj~8HqE@e}L6ZX{r!Y;a*Ujbis8=hf zO{$v-^B-^mn1)Me4Jp%jg#4siQ(w}*9pHE8;X;D1|E{(Ajhzt}!XHdi{Fdr%38U6= z4ny_~rG$3bJ$&ye#oFhmBTem|NQ6DB%`aN|7ncD1yuBn7&7ndhGJkvF(UVEO-yKbe zqZh%~)meSr|5$z+@qdh{{^5XJc0rtpm4@&&98q9W#EY!i5o7V6QQQ1kQWu*__@=2S zNap;2d>z=-1ysEwkn%N)95q0nq?X1s6FaKw$$+(TMpR1!q|#Ovq@?)`^Ce4brtfVg zjs(}l)zNHuT*#cluEE7mchqJx`I(~x^)jHHkudDnV1#mi@1n9tppM(nd+UC{A<>`s z-g>TXpd1JC+@0PG@m0shkV*upM^GA`7AD5*W-t9Vo8A?!p`NQbdCd*qY8c#4kuYX{ zM^Td?!1(;ev3B}8=UJd3oE;>*Oz_=m{uQN0%dfE#H8208ofp*&)~=D)O(Bf+JVm%T zI{jtV=1yxq`At%kgcH(n9EN5q!O=}bzzj>PdAhwlM-Ikn3r9>R+ZPqG)EINNEI5Pf&Z*X*wt(y*O2leK4i)kV>(tT-ZC;TZoDY@tT=91)NxR_UI8Y$~ zVOIfb)}HvxBAstaa6oS_Bjg} zyee(`3M-yr0Ig?6naaNT4864+& zHg}J6(w-h#bWG!QK1g;SJvmS~<$T$I1Gs)do-vsEQchftmCYh{A%gZl52K6ceGGVc zW+KoiQplH#Xqg|R?6HtNgzMa@z?XoDS`kk#ml}?UIs%+&Y>Ne#J|z3vIMQ~F>O)}w z69?X`v(W2$(W5)&1$Wsicui876%`-To^ep& zxp*};VCN@t!-_55ZQ=A_Z8#lsIIlQpnxQVJSJ%`(A8xv6Kb#2Hvv7Q_6O zz9c^Q$R)Gn4-wP{#boTaIxF#rGUYwyyD5IiLwTQp8aZ1wXP5c)`wqfiZdcDC>@FMy+VzG=I!+o?- zw$(iBAkLDfj?aa)#$vsUHpQ#7h8a$X$L*DFKnRxB2D4>xN5(GjkTn9=W48%%6;CX- z?cDn5&0yFqg%9-vtYdh*g~vxi4dYCtSwnK$Xk6gJg88o1H|&)Fn58xlq>Ctjo|)EM zJE@oTkg|h$ejm8XJC3y7!$>G|MR!y3)n_{4sDQhS^;m%Rx;%rV7;TJtJ*NWa_Nr#q z?o!`jJI<>L#k(OLdKy;Y$>I?Z7#3)C=kxGg6ZZ~dmLb9l zo}GMZK00Yz=b*h*?$U(>8>|c% zVu2}elt2N&0TLa)LiV$YE7nceRqc-L3m2{nh^0jG+cVg>KI)r13jEg{;l**6dzz$a z?K9A?OX$A2-*|U$&OJ8Y8QXgzXEmX5FzLq~dObSCb>s?|FTI_`Nw+6{DyolmbleZw z!1WPXnge?3D{{`>%z_)sn9NOv-Znk(L#*PIJGRE&h8{(OOBp%TiMZjA)lK2KQ^?U? zU0K1~x<#vq`D9Vp_6nT)?n=92_HZxR!SxxX{#-gtxgJd#c!O0|U>44VKKG4_T*Yh7~0kOK8C~+uv{zVFOV>sTtxuejml9(blU(Q>y$nhzEU;BRae0u}{b~f|XO`l8t4TWE1 zwRSRp#IbtN7)}|@J05Ak4Td(!&9)IF6;D{u&B4#!O@N1XnhvPN0Es{ikO<=cB@q%< z%BlQTOhr@W+)9IDWqdX44|3I0%w9SMG_>&y7uTpS2|$)f0DFK1Oj)!ETY}_Y44wag zTKRP$3d)S%e|1eUT_sZSA+AP&L1c$(I=z7|Zt=8K$U~Hixou7b>dn7NA96QjLpFhP zn&u>)h3OZ6EciC}d)rN@QaGzjvi%~2$_+7kR0kx~Vxy@2$@se0UpTs5d0|s;Dy%7T zs*}{l;z{GzEaZr!RDL|3tCw41=cLrnHH~cwHKPJ_I$ubDwVN)*0{wifbjtK^XHr|{ z8)t+sy!Ti|ezlT8cs0>P9=SL=HM)SoVKG-hL;q?&+v>|oVpH$NdksVOkW{lO-M&+D z;D>J^L;0_>)_;|^eU)3qaLg&(STABz{rh>T#*=D2 zw*KH7s8g-Su!7+h=Wt14miVB>lr*7mBh>U$CAJtzgp@YE?1v%?lz>X)RI^b9%B%T5 z9ziZK!(WbGM(kP)zJ4{QPdfw8r__jqw)NW4-YdaZ-7VS?CQE4sh)l5^4(oaDV++{G z;I4&?r8KU1u-SLCczOWDzEA`#4#_#6-EA;41$a@fbePG0ON>&`Jqd0 zwM%~$aOnj2n8IJ>D*(hc;k3uF9qXB3&FZ+=1qI1Q0c<2Sr}d=!t*b?L**CvU!u;5n zu4^?^!{@JTO5tH?q1SuZXyt zqr@@8U@T>^^>>bV1V_$H z12I>(-tnwX@wf?1U0xi(YLqIT-X6>{kjsFb~TuHt=kX@b=F*-2fsH(w+u zMT2QytYVb@tdd-(yR5x*y*t0VFH}_wOTzDBJx|?zeU%fG)t2;#1hb*?|@-4&y*QI`h zUdIeVWiFJTx5;}wlOa|XJQAs=`#<@oN^&_yphUo_IL=&MqRfjH`&)>nOfT^L~;BhbvWVu<#j8fw3v1oU?$%R$sm>`jBX2b_Xn9>~K_LFWq)%b^SUs~vU?|6l9 z?*FJ@%!|2^W_fZ8SCptPpjFsAQYmOSpkg^1;`&_-=@OgCT_4)!Xr>QDw#rN7|01S4 zDqvxIuE%Z_ehD3>&d2kb5_$0R<@bt+{kz=12xD=xBG7W^PaS0Z}Pc14L&Il$V z4UK={B81NE_BzN{hUBOGmG`6Wd;Z>jG>y# zHL~$rf9I`=3X^R(m@lmsaYdbyj(ll%r4wL=J6%aV82MHw;sNjGR!(rX9nAsUoa)^oiCO zzBBl0`IgK?o*t zBo-Mk{lvC>)aXg(r8bQbARWEX+k6M9I_?(svjni2i$Bp6jT8?(e`)tc0_=JmsQEfC zYa>*&_fG}2WHQSio_|yotMGbi+0WpG>Je8y3$I*K(1ahr5B!WU{je!}J;}v+Y+I~2 z|C>D5?v$+e%Wl(rAkA;20&lzyp~$MT=73S87}15ZuPad*EQFigU3)WU&`!LrQu!l7 zx?d?33B>Klmjp)B^H;fwW7YgEk9*SUVGaAOalnDvy^JBhjP5I*n<@#?l+k5{uvC~n z4mY2$qv$PBYOyQ4{J6c_Q2E>21AhRUdT$fY+(u&(DZ`j>n;8WFH9r3fN(p$FHNnf%s?KRkzr4k7K_F>%SP)MQ-Yum>wvUed5*RFM4FQmLv3n z*JuWUt>ptc%uKtD099Be8FhI)^Gv#D&ERReTNcjC!tr7O+bX>4kJUZ&VjcU{Y8m}` z-(p(YIlxYL;YLS@Um1Mq+R69$(^r6wd=^%pFVNSop%-54pGBbcVvKpx4G3Yi&EXbs zSuB8qnT;qBULp+?;VOoSMoc6ErLdNfwki@n0|K|XX*KcBc<193&r;|2U4LDz`|8fv z1P{KHu5R3QAQQJElxxG7n6|j8)BAt*J@9Nl6@wYWfj3)>YQ^wWNNtG)Jz@;b-eH!X zd@5Tw29?JFhqH}-@^$yyFz9_PuC+(tL0_37UIwLos)G}d{h=w53nXj5PR;kMlH_!! zB0R8plGK=o9Yn4uPoLzH`GXqH^c~{B!3l#tQ44PIU{i=2dvj&ozVgw+x~b}~+jsRr zfJN6^0#fEE3GbTqs80sTv=D$7*ncC1Z*`Yd_j83Dh=fv&{ii6vGV>eXaaUsTE-u`Q z`9OQ9#C~!?{-{Yo9ZsTC1UiF93Fv5Xd2fks#(BGg&J#}bqA*WaWF()mUmB6kmqPDP z24fV2LNkg&E+T|~#;NOqt zuNP~-m}z+o`nNBnx=KMo`xtLaQX3VZ+zr`??1c@O@AVPIdK<;b5{XUMw%Rk3+W1lt zTGs^zP~Ul|?ph2Piae1H<%kP$N&TWIpWdX_SD-f!VurqcLhy3q*8c6#LAYO^8=3nbK zJ6@CyM@G%O8~o`eCYm!!3wEgWK}Gh@EkxBtPUvX4pnGt)GzaMg{?*&LB~k3fADB%wWEn!sw9Bb@L($C!P9aGU)sLkb z7P_kU?J0W}R#U}Cg1vu4>oMzFU<5-UBLaVth#c(?kAPB204CxyF zx;xKzq%3#Zsk-{9`7T~JJzZ(9aA8hA{C*sDq=`NeC&H-FX8eVR*5S;X0 zqS9~BhpGJX%I2>9Z*7F^E!gSMJMKWUU(y6yUN}RjG2|+SAjWtJQs%0UDgQ*VC+B?9 zc}|-eX0QcoH?0H_sg!I zEerS5^nNS4_qr;wn4j`? zp0;hnxd30c`P3o!grg&*K??ncfE7Q)Ov-MC^<#fMLb;bv*%`QY+;ZidXN&U-tj*j~-%}-vz6GdTiVtU@2>Cwk+4{62FFALdPpq`6hf$ zo^Rv$N84@bLq<;TNIR3Ju_FvKfb_H^}!Q`HW z^|)fy@jh#)T2*R?_B~x)rIB&;w>U3C0ZM8qmY8p`Djs1&<=~hMl9Dw-IzI`g!(<2c_?qf&nj#KS?sI}67*@ommaC?%?f_B}v(y&d1P zH>apGiMilkC%cnkof8#6B4hwiY@^hf{#62lkLFJ?li*c*>6_J#a*WGl8{0WXe2e_g zs4J>MDMxQoELpx?m~Guh`<)Rte} z*x6+LUJ2B%`k8dX94un{{RcNw%ej--|8NBs$vK5VRrlK|Q&(Xy^dqIQCvoy}zmb12 z+Pv*E*Ot0c#aYFqA{G;57(pz}tTRX(PA}-1OSQ~McbglZ@~cC(B(%&}SXRxj9EkD} z2I}q_(QMzar?B%U9Fo{B467n_k!-`8UIT|?W;brSY6NSx+JzrPR~{$A+EoHi{)`R! zAb{PvBP<_cg2pbx1PUn|o6O_~y1EHw@sgg^vg{;}4?pIUGS*zbUtD`nf59(Y-vA8Z zf2aY2K~F`qs~#~EHAKN?=h=IlXw-T(zn?pu8Wv&0@1_=YwfvP)qs=_0l~J%sr5YZ9 zs|^*(!up)gx6)0qtk9mV8je~~*Jez!QWjNSBNY|?Z8+QA818cJDFH3PjS)j1&+Nmh ztt2$Pa2&$o<7hB(`H^J6#C^OHC$e22O7B>Py&B<|pqApYb6mRXA2#b6yq-@nTbV6I zc1GvxHjonov+fILZ3Br9h9bUfsLFo>w%mOl53P5Bdbxch(q$0gEook7Zd+*{t1;n2 zeE5pQ6Il>#&4T{v-8>=r`8G3O@}n&Fw$oM{kzbqu=~O>xd-kQ8!uqcLsg~}enfK^7 zhD$-9qLbS&M%6MqJzJ4V=ME>eQ!oe%^Y~KwEUUy@^ZAaVYIxSLq98@cbeMs>ag4P?dUOSPw*f8VNqo&_JRr zT{jlc_?ARyi_q%e!drIheO?luBmQ*1)g$(VFu_tHJ*0^dzy!IK`mWK*cOr9MTD>P1 z4`&_Ii{N2#?Gq+x2bbAmfHfoLyIT`6tn#Or)2)c|%d_A$KYM=Mow^R!Jt;s09A9d7 zbGzPBq1?ScXBg_)0kVt?h8O97@(Z4fj<|j#L$I539Qb}5zv)($+4L_4?SDlFpm3gd z3ys_N|yO_Q(>L&hXQrEj}q9^dHeiX|p`RKT17@IHWwI)npSMZJhUAtUy_z9=-IeZZP6FL{U~@f9?kd`co$` z3LI9_&B-81_|Y5tqXuIpqSQ&8VfsjvSg$9dm4C!757iewfAJ$+{w&$!P~%;zZ+P4l zhB)(NkP|szWM`5Bw~;^7ClWujYH+)m{D7m?bM7+uwi`?F;!jvf-lHT+Wo3sG3a8+s zC0gWtvDpT@pyI@`Cb}U1=0_!VS^MO?*`Ga>0Fv3wMxc}wIVXb_{oH@&lA9GL4vcA` zM)7`0SJXsdDhCb2+s}Fc!D0i0!t^ZC7G6^gv(N z470N}UYt_-Y`3^;h=!ZqyF7qdijsQGfEbXC)JX98v}5SudzS*!*ZU=;Ye%k+S^yS? za_6SEO8}$-TS{vmUgE=}kxPX5JkZuvx~S|KxhX3cC}ow$|3y=GFV>cNkzjuZ%f1=K zax;uekkxJHCxgRsJ?k)oyIc;2(F?mk5%@310lvDg_&v*c4~5@-URb#zo`Hd56*$M#|WqF;8ft-B=G2`M9X2!3)LYc-!2jd>m><8Lcaqc=r!z2inL zq}CC=C4Tn{S=x+X)?MgB?o9y@5%U==Y{kT6y(k|-+(uygG&sQk)`Sv2P)=Y zQAl&@YoRuV8GMe*)0e?hTY|?Jd$T@q2 zzfh*)bh^~ZheX7K~({esCpea7ZZ zycaF~^evyKGtQ?YKHX^ujD+1pXNj?6Y0vmoc|R>Kh|A|z>GL>09ATkD147fjZk&x% zhx4^<(-8SSLQKXP?2j~Jd}Z4mflGFFzerkQuK9*XJp4Saff7Zm!4hf2e>5~ot9FD7 zRVjGliH|Q7#ihI0Qf}4p(lObtptW=B6N?y9i3;ay)94Q^DV$Rkk@^6^=2T%i{ zds?jgVhOf{w#BL3@sRb!9%=l=%RDk4p$V=v)tk7)XC5dPjTLzsHL3f9tP^KkECPZ3 zM5+N_E)%@RN9^P{st#=`a0}rcS@ZnVZ2C}`?*4PoR*iwJpK3<5N`Y{)vJacs8RjEC zMxGGNJN^-R@ltONxVecuPIv9?LmXwG*EmBKYuzHYL{>mSXP@t1BlR59GMPn;2<3Xk z-7w)a=joE>%rgzW#db0OZ`X|hZJ!WO=$=!A^e@!?LRHL4!JuWD5u`GxvCERYcoMo9 z@na?mkG7xhd#s-bEK2voNUP0B-7|HbfKbBX01^9pskEq8OsTik>VbrK0P1(3s=9n% z&xRV~Qr#$qg9kFEU(DN}Z~C1y412h|{3oZPW965Q*L~t&9=bv-*yJ#tZ>C{N^jBRr z$Qg9`vJN`BWtp1x zkC%}@B@bN6iAuDaxZV4*GbLXi#L(^V36-h-I+OM9$UJv<^6qDoErtq*hgTfmrgDek ziZiZrk;*(vvaJ!(u9`;r`4J>k4E5=4BpXKq2ir*7SU=|{jxiSl-@r+JLSLk)k?9=c zU}>7uBtMnPG!hV7_9QHe1ZTB>51GLCNrQPn_RMY^|4V&7-?NTpJ!3r&Mz=X<=L-6+ z)V?C|dl+|kJ~3-09U79pE#ly@o9_=c(OUjw{I0<4*BZw8G&RBRe%fzB5q}-YjlOZJ zJ7?95){6EpSLk!uvBrr!Q9uV0cMs19?Nx3vz3ai}Stk?vP|$YN!c~@R&JsdxSNQ$u zhuXCAHsr_z&SQ*MR<(n;Ow2(a{GG6Q{=%U^1zGKkOy)(0R(oqVjoG+qb3$v+GO?}_ zHwD{I`KK#sgq)N`+Uap%O)ZjHZUuV_Zk}cw2l_>m~ z;Llv_PIcp&ESg8U1>YftWi55lO++)mR=K< z%|=%=p*dS1ri8FeOY^r!L%zrhZE(uP48vDU(60G&?XX5gpZ{+cKtn8G1|^k#f^PZk zcrN%S?-sfReA)(lrkWB%t}t_%NlL?`1-J+zT_Z2U_8SS8&jQyc2)BjM-K0{2uT@BY zbrhqUsy(#!cN{P6^HIFGh6%~|r@@_cm)wkz<8#Ia=67j7gTJE06q)7495K$aXzvyC ztN`*x0$MxW8z`qmO=^KK7ChrUK?IK+BvOjX?q*8v(0)o949N0R_i2KvH z-|m5+0twO8*U-XdG26h$g7px|uz;Wv|5>Iz&k}g6Qp%XyERcv_lw_x4qNS5dvFa)Y z*sC8R8VhJ(A)I~M>wQ=OAiJij|1w7)DJk#)MCg^k${0ozo` zN5F!B!^}{S=nuE15F>k+c$dZ%}_`pCuxdqiX0M%Zb z6qc*120-Wg5aOUcAJcQeQwMJ`-5IM@(cyn-Tq=ZNX(LN?%liYZ9vz`Okw6FEi;VJF zu_bBRCYOD+_z$mW{*Tv-_=Wd1Gpd$ZDg2>SC{7E-@F6VdKU%a#u@k6;)c6*Cn>Kf8 zh!boUN8s=F4M+62y1;nWcHR4p3fyfxk*uK~2Y}*AIfWPTw#W-95tNRrehWr`VT^e0^690)IoXf}Gt&P@z4P_g$oH?Jsel3F-*3u>(T z%zFpKLzmz4FFD<}aqr77@%Pp>_@-WGgxdCdUWRS4kUvwN$B{c6h_np7N1z8>!X>3g zT51s@V&qu>CsB-+t2y6D3=uPe?I`*|JuL!^Vsjr%aqgsU{qHuunx#>VjBL*%^|167l?(?A zsXV#Oi{2;f|5e`nhqYHj?(#T9xBd5Rh{ew6(?N;C^1LGyhd{{O+O(#tJ)di5T+8JC z?-GvCAnXec&O3FSG?f2UQ^Y%Y@4GK={R2P%h{VXY9gMDYB?$+%8Ro(_Z+*(VuYcMs zOv`w*pR3Ar#%C4PZ*IhG41CKmd(mB)p}cQ^XIL6&+sbj5^J zeEi4euXrmFscnwlCj_dDJU;$PgsY;^VOSfgb!(gG114WTpcriOr5KE0H1~v48MLU? zVv_EAmL9U(dV%ie_;v+)58?mWfGCG=#$>bUzFOAod)=VVXvj}MakeKnBMtN2>k(BWn2+di_=>X^sCBZt52@~t(mu8}bi_y>V64v&}y=)GYbJlXPvGL}TR59&sh|L7GHYAo_i`Ck>M=K<=|!l2!h>_H{scHo zs0fucY*)YHSq8ul+d8XcMIIP^KVZ@ci{Ofrv8Lk z$wWiz4KAE)25yz2TN`dF&ELoo<5Gw)vK?IcMQByG_~yF@Pzb9S^fx!r8+xbwmSen0NrY5m5tY6f@Sdlwq(aifgPE9nbk~MBC zz8zriNlEJ^O|29P@_!oG$z8plLfyEbSxK1pO>7(LQan?lhnbqxKrPE-rHju-p78v?!t6m%{`&1-e_AYe3J8=nD8M_vvZfvi0!nQn@Lp3E!snC9 z`4$JUiox%Ob5FfIr3cutF4ajO*Mz3Sz686qe+Ftu)f#Rzbin`+zTp=3dIiHqMPoQp_YwH9gi2#EK@cX!U54u zdyo^ykMZeGJ_7{rb1>`TfPu)OJRUGwXQB)$cT0OBw$SMY_Bj?W=n#Ljxjc$?8+L(_ zk9{IoWAsVMqvHj-I3w_YIqVWnBl~F>xM8zf`}PewC(8JO*|G0w%;8+5T|bpE)%I-r zR{lN`N$EB8vScA6B94sI8?SeFnQ&)|1)8hK{_NY~Zm zoRn_wXHY=n*x$)(?kIEaC|MgwHnCTz)GjJkK))VWMcv0PW=SHrBSb7)0Tg5`KFWg=?b%KkOCFV z5a{49q)#jbo~!rjOFe<0EzTSr)i4mSvb&b1k(%eFWr-DiWemK=_-$>@(`q)gt1Ap%W~z*Q~TdE349%d_Xg=4`S> zwt6=ajYDqx@$l|1KULsi#on@our4;JNxr?NYClTi;2gsaCezu(7p0)`c#Qd7kb4_W zQovhuEj|b<^9XpyO-BQ1;S6!zV9GVQ`};4KQlS*E=y!qcGkuObGs=6Aa2zL&Z!W1idJU=K*U%_*^$ocn0_NO4$>tOJX zu@{X);2qr6@%2{VgBCz6-0_9``VK`EVC4G^Oi@H*JgU4?D&Xz8ApUiW+8e8D?v7Az3B+?@+Z12QbjE7`{E^1!F-*A9UT_V@^~O27;IH>(!EDw5Nkuxrj!xJN z%r@naJ@eB)JF4m|EAU7x5d~xBlfFV{?T&Z%Mgl0mXqO`w_~4&}o8*2DYCuBfBE-ue}qMgQ22si7TgTLMBwg3Ctf$kTQO)j1oKuuZwv%nI+JS8 zzYQ14PwLyG>^&j$o(MUdxHn!rU6x84VY{b}Ia>t945XdS`K`epJh}ID5dT8Wiv@}| zwxAOxT|d-UM&NqF1Y%jgQW?9j>|a5)u6|f+v*z!qgMQcWOPPp~T2kzWTBxtRY8P>J zJGxPv?%$EPVs7gF+)(X-2@i6PCjHXm{`-0P|M@Qb5U z8pDS$;Bp#_k3>Irrk#=R%|`QL({ExOmz@z7W$$EVaj*y2@G<3cj@}i9h?^8N|LeOM zX$qhwK$Ld$57#=9L^AqurVVBea6$d!pwa3#Gx(+c=NfQGL76j$t5Onl zW%CCJ7_+~8;}p4eWL#!D?@})h~Vde&cmgrn}M&NM4rgZFR=) zQR+F@6M?s07!NU+&^`GNZMR<@y7qqv1E+XyT(^W3eQ6f3LrGpale;>igzR-}fNbzh z5V*rDdhvUjQHSS)j7$LUq_#=omEUTVL805k!U^KjlL!P!+I&>aYr5k{^iR-;9L4XE zaQ_))5w|g!uWJR{k7L5G;!V04g*R_XOoQl>mDn~iA3 zWAeD*t_?Cmi&61(A#3gtkKJ9jjkqmt0vq9`%%BIza`#FZ$)@UM>j>8dG;VABJt3o6 zo8^(ks_;Y5OT`JlS*FwNdgNo)ZYDct$#``5c^N^;0YLsh>YIbGx8YYwM15aDAqmf@ zP#WCRF=bn@(z}Xp2kcoSj{v@%R9^4E#5e5HA-Kid^+jYrcMR zC$&2tbzNqRIzPc>v>aD%4qRul=2qqP55Dv06KNS@-yVLSV9ToD zvqtd-em7Dw`~Ll6E6Qp}RByo>lF}M=4WHd9f}yQQ2IA#ujO$NMQ*r@K;$#c>7RMZ< z0s*S`!;pul(l|u)$vdk?w8qB{fz_g-o0Unxz#&Oy|1HGqm!t4GchZeDcN7N4hdAm~ zN{#;8^pCSZNV(g}M6tvESqAAcHs(_NMHn04_Eso$?zdtIsRS*iPTy4gG~jUK_np^2 zc)ICQZn8Y71j_ot!MmLoGkb~ewSUtKu8tknXx(%_T^;jc>!jLg>E-?PJMo&`7`oc# z6W9^14>0BaET8v!jm9Vr??&D21P)dbT?sdwi}U4r2khks9VRS?$2#yN=IoPo zqzS0>jAS-2{Qn7*-Y#&DcUz#$DF+xE9q2)LW1!4JH!w9cDgT`rt&asEfx8DfYK>~w z9k&dS)djHv=Q6;|{=dWu(f<-Fvs9k(pu8>dOF)b1ob*s3) zS&H*%X~y|(fxqk*S+D3q%5OK*tefu0*A*Hzfn+Ve-@R_rQKEctw!8=6;Q+eTR?&23 zVlr*mov>Tv*1bz1GAJ3h=wF0}ts#g^5{_*VSBLsQpUjT~{@QROs)<5cQ;V`nSK9PV zYJ4eP)4{~X7s47kk%xR5{~?IBo_%UjMrZGRBGT2#EsVx@@lS0V2es`+8s^&BAY3*& zBczzBN5gL**E+V%ZCza3=!&m#wq*Nyn)>C;c=Q&GEv8`@U+uKp)Urf^vlX+wJ_*0~ zdk-z>?cca8#SR-LKn((CsUavT6p;B3!|p~cDtmukvM`PwewUvUVy?MC2CN=3_W!(5 z)jYfrJOl!{cUCSJ$K6;noPJ?i-vyD(U~(yNFH;}Y;f9jxwD7L8_&vx92A==LuK&Z9x}&zFPyDi7@@99$N$>V_bqmUG99pe!n_1k8E+OO&3*!J3;{iWP<|0yU2L7eYCGPCBh3uZlJi*s7){VF|~ z@Td8AV}oz@Qz0`1xGc|O07R(ob(Y0ug(5+Gj${-b{GFQz;i z%jJ7isy@-A&x|F{&VznmiAiU1=gEnw^z`5UBM$f1hO$YO8U`FReyDwC3fyRmbh-ZR z7rcYcoEOA^+r7J)t@rdQ@j(5&=i2viAub=?+8|%s}CL4 zR&}$S#~JiBvO|wFICMN!jC6e$)44BPuIvL5SIJH~x1!Ae_G5*-+mA6ySjhy;<8{Yt z^{}K5m;J!P_jnqJsu6ToQD8Kh8$ns07S)|r4Vsq;^Bn=R$XZ>4e>d0d{CuX=V>xT5 z9=N#?up^v;-9}X}cF95u&<6rme{+HR;VaI{{6D7mZ#>S&X~r8UHGOV91XEDN9|7E5odQ~%;tH=97I;SH2g9lxxQ^y3cCxvYkSI(g;2X2cvW?0-96*z z3%()pWy7JG}20f%K^JZX+s%54zP}@m<@OqkB%`zQeS5nLocdfEeqlxOqoDF>4eTBI)+r zbjAK$Y{d7L$)G>==Sq(AUKRi3X1XiDHJ6(F822(xJyrj4gkXUns6TGfD?5R+QGAr` zXao2{10i21#j*h-M*ZG|4GC-%<=H7Vx8%PQBp=|a1#KuA7-Ju%bH;L(>vS7{<~<}n z%q4eccm*~{(#d4hG>tG+HenEjCNuZld)t2I*C{0bCpK;RABl4050!|7)dUe*JfhJO zHRfx|V7mVyb39~P+iweQ>+IGN-@D-VAulpmvCZr?j z95MnLcx-2)rE&HC!GMc0#?*^9S*z%LKR*H&F66E@{75f2M*ROH>#gIWY@@DG0SQIA zb0|T&2kCBUq>*k0q(pK+QaS_#grQ4HO6ipDlpLf(q zwf5R;uVt~(#c54mLjh<(4ms-HTjUH03PIe_y+a@R22nz?_aPh7V5ugdt&!7IUrpW< z3>hUJT;`+P8|*~3uM%a^u@pk^nxN`Rqn9-EFrbQNrI^P~LJ;-i4==cfRvsS#W7Jt3>W<-lePv z-F+*!dTMQ_3!TyGE9*DWGvwcA=v{L>(V)9GIJsSMi&(e~ILyBj@?nqb^aef;hnZvyQbI2*F)zn#%hqAp+*nL69{r@m|Xd|5)xqrqR*`-z?u0zo6Y zo33os+?9Oxn{_@1>XOAufX9@q9yxWs9rfK!NV^1<{c78QWxx94HX*o} z*ULAI4*jmL1RHXSMe&m`X9G_7=`OQD{b#M*2* z4oAB=em0Np*{d%FPKy|}CeyPSZda5L@YrSya9>vAX3V+7`30V?)kbKaS239usu|1L zho2;B1dO-?OKgEy^MW`4by+Zlvj8RX7v;!X4h83cEf$csb}Qp#z_hL^83)iw*%eR$ zQW!>&#_rDVo&u?sB$xu=si86CWACSJSp)JkHMLTKhY1W>)lPk)bSRhWKgyVLVf5(j?-%Colu9sF$ zqf;DT>NM60l-C8GJ#1)*+!@CcUCCL>f7cQT#+*?s!F^}J)%sQN>1WxUB-SvJXsJkl z-SVD~nXy47@E zrI97i@yqz&Y?lF6!MkyO)EmGyJYZg(j#pc`{@c0SFB&)1Ls+-c)J--t*FW@xX6@rYThk=niMYV_ z!fx=|HYX<6n-|Xl(J(2xe-LwmpIQqIJ?{+oz!npk{#=6(k z#g2QZO|&QfGwFQTqpl&43h_wC*m_F*hVqY`NGKuR#N|({-JqQa0ORMNXafWG*fp|T zv?NyDmy?}yqq!TbQKP-5qdHIhsj;4S0~?__1x#XzI7Ni|2-FmQ(#)aJ{&GXj5D~!8 zO+C{Ke*USrgr+4|DAx29;CkG2Ia8&V$yN6~CUkErFldi~+2qpy6A86|j~Ebbp?zJq z={T`v;|wjv7SJM{Z^WWvq?I5S{`Q30fLXy%*Wpin8o!Z;)nTDPKZcSWOFk+pkW_7haQ-dSwRAD3#=Yv`3!+(8fnEnW` znK}>fyDxjaN63E8+;rcIf@zR@asayDt7>96(l6N8FW-5JM`LdJcyZHoN7>$ir?qB~ zFZG2uD@7)zV1xwX>Fwf>5+#X>p z+G1N?#P2~0Z&a#=G91W^sRVj$`l5Z}2T@L^MS3=GOp#iC216S5Lt zE#Q_Q=50tP<~&c3s%M2K>1n{qciPdXKlrHZa%MnKn>TZSm3w2Wy9;tH&mkiBhBG%|K-fVFR6L*a(U@PDnpzI=Z@V%k@Ao>&C!={oJDy8B$mcDPuAA+3}<*e zHED-LMXVHbza_@56+u?ZZ^j&#!PAwG3xZ}bCwt)jU2i1vQvLjFI5O`vQRj0Mren#piLUzD8T(ADlDyEWt z*VWse=df;yI+wM??D61IuDPXPTh1oSImz-rH=|*6NdqKlZz{7osb6A697}CZ zhOj=?Y~EVD&>(n47RER(KK<^c)hP}JG z$dwj0Az(o>8rtWFnglymB>&l*6(^2;njbm6&(uoEl2 zOghngfPw%v?8Iw$htdl<=n%uaYM}&O4F4q#G-*-)(&!8&v%K8CY`bg~cL==d2I*<| zP9{$;)I0h0#uEJQlSFX#gtuPvWB?3*s12lpqpxT12ExGB3daH6ine2V{ItlUQtKCZZjY@) zN_&9hNkAVb&ga2AozoZ~>aSOiY0T9qCwSOgrCP3be{N=K@eQ^2uu(DKQ{*XMgjv!y z4*U5T^#Cq0#N5OR>0ivHwvf=4CTpX{1~V1tv=oSo0RK41n_zr2pc5J!Vm>X~jvqht z2atMzXhc%CgTMtIq&}8C(kD4U6S(~`ybTw4A+IO#NFIh@qcX=fB1DXg>hAU*g8BK- z8E6u4@M++0<=O+3inzb^A3rQg_pcGlv^m;T;1@FTNpHp&_Kqr+NF-TDVWBf+olu7s z?W44By8yoW)1#6nsaGMtHHD?lxhE+%opf>Wffy;q7=Vw!{2xBz8YRZrJ+q!G6J_80%8I=Eh=Iv67)KPO-y4@J!E>e3Z0r#LUJ*@f#&Ta zOyW9{W)c#TP690yOlr-?k0At_Ti5Q}$6o@F^`qj#Le1}1fS3mD(uILj>%+3?LCTes zT(%UIE=TQlfm(_7@x0NOv8B#GPE>|Sj3TKyNX-M;7JHj=6#r=6eg7c;aeZ~Y8*FZU zvsBmKkI7*xVb<4g*A|el>8h@GSVJ;h%0)TGVo$j8c9}{pNB;b&Khq{fQ;!7d;~Fou zuc3QwZ7YYMz;VVHpUd^#TM=JcIr7HIz70GwznFrRJa>x~s>ZIL8;r+_BI(z5f7{y^ zVzbQ-b7RBt^0CP>$FnT=aBM^(*s$*a#^6 xC0+2z_h49?efQt2KuJ_i+LO(O2r(e z#-oX?CA2?k{CW(g(Z%Q3cC+zJ_+i)ni>ptg!KV{mBWJ3!E1z!zX6tu1m2T;xMPd0j zuK8*XcLY-J!(Yhuk14T0aLS8_a~qC7-S(gitS@Fdm9?M409g&bZ%dGSd`rD2V9ZL|giC@sM<|m9B z^sgV|8{ekHqh~)Y;}9yTy=CgnkQ?TvKy&uEh{2SQMs0Zeq;`O zO=aJ`c{mw1rqeP(hh-5$Uzr&32Q3@>j52CY-LN_v*$(iU3rL#7&nn7LE_^Afx`~_* z*cS{vp&u1-tFuI>z?Q;JYA7 z#dEZNs;rpokK;F7T(L8o0zs`|_60W2WarC4U)_l|0H+b&WEUqku6Ld<<#` z(KMCaYd z#LJ*hKZ4fcU#J5+rH(@t{)*vfU1MMH(o%vO3)%l8E1r%Gy00R0Y&iO575_Sd*4N&Q zv+~E<({o~@9%LSXpXFI>&TVk9uB12eL3br0h@o(=alWItH<3ze)t=eHFw9S2Tuo{f_8bo#|aK?2*&HL>r}59!4W!GKyus)UgM3SS1P8@fBujtf4jI(iBv z77hF}o9Hv?^K$NP)WJ->Y9UZLHQl6l|<<7e9{MLrBHRfkLAHwIpAh{Q>UEW z&exS+E0w)`#t|lIc;UOPIf2D!u&GewqgF|QThf8CPVC#Dr_&Nr61D~j+_Ey^dno%k z5UkR8Rl_7?$m~jE&rC1&Z`SLM-r`r3Nx_~OsVEEc&RO!83RC`Uv^>yd7Hh-vdLZ8* zD;jHhekIpk_g9~}2d2Qb(&K_vdEGfC69@yo_?qiFZ;3-pqEEZs8h*Dp`hK@ZgLm5= z+c~PpXxpt*NPHqriAkn}N}n5j&;lIa>Wdq_mO7O#8Ga?0bw?cYy9e;BG*nJQ9-T<9 zT)6xNQdiP3kKCOXQC+p)u1vG8wZ}FIE84qYvaS1YCi4vLWBU47k`(Jar0)GCcRK72l@5RVgk(L(m_6mTGMD6zSBm2Z7%h@CL@+d#%pRYl#oDtE&l40^7h3G z(YA`~9KO7&%zLi|^Sy~4t7R4-%^WKHGl2E4#xZ-m?&_va>Cs!8m;f@z*x;01UC~m4qED&kx2;%KGTD z(-wm*Pb<~cpXZfLLViFBc0+y%QX_l?{3tg#v>p#9gkBQHBdu5i zCattN`Sfm$-Q)f&Z$_Eve|KlM@WLx(++@B7#5e`?3Fm;5W>a$lS6-A>oz zMAZ*!$0YVl4}H^o=ZD@DT@@zncTjZ%Y_gka6~h9Xns7MkvQrul+iuI|0LPB`4bT62 zQvd~Xoaf(_x{4t&hiW36y5`9l5`}(@9(W3J7!yzk$khDvta?7jAv6_DSAVWY?8xUH zDo6i`xUPC1w%~aHWZnR368qQQ%}z>b77JheBG-;ue5USsV(s7S0JHAv%XdR*-p8Mg zdoGWUv0B2!b!FgR6?CbrzZ3K$Qr))&t(CYPR4PvsY z1nvw>uZI2(!>(A+pU+V%gBe;(VpdkZ`7t-YopVuwrC>Ph{{?`FND0q`_jzi4B471Y z=b5YM|5HE{5A69xl_5-MyG+L?J~%T}1?{S)czO9iEeOM1i$d8Ck1to^NTH&(ni_!4lA%W214K)3e z#AgM`snLI%YrRO1=lo3&wJ~SeaP%a;c#%a>-8iPK@>zqGbe?6opq~L+FwJQU;7c@i zBOjORwa2Lz59dYB06w;~Ifvgb3x!d(6SBQ^{Gb06E%tQzw>D~|XCoQp9$i1HpCic7 z{HlUg9C&F~G`VBS)jlML7(wj*;zoVFD2i-zP6OR3nFa!T$>%BhWw*-9r>w|86-W=f z>UH3c{KK8HixEPF5}YBUNY8N(3?>T0H#VbIpoVEQA)JZNaPz% zGNbw0u)cY;q=LuwOFQKx$>G!N;K%?e*A?-BZV_ij)fd7CAZor#Br3)U%YFM z+KX_O-)|<~xU|8OUP}35TX5%cvk5AwQO3G9oIlXTW98-=T zcB9L_Xh^rL;;bwesIb}9Wy$C8rI)#(wgOwkiF9y{Y5A=4V~?9c`2JPvWm!Ghn-bEt zI-%l{z23A&s)i6{knP}HCA@W8!-WOZ%WRS%2%xAxtl%xQM} z351S@`0G}{4>>Ca@Eqay(3q5wacMbLIN#)k8k#Bz8T$y?PPIt9ioV>=Vk$ZI*hyNQ?HKeSq?0V6@uud}=%0&|MGiSnK8KQi%QR?Cy(s&lDJ$ki`*_37n%{g~ zoMJcR3}#?uHi>TWoFd4YE{o}nL{)B6%wD>xsfhhICW{&%pri%`z&81>GJ0TlVpG4x zjVJ(&Tso`_-a&l3uf@saxO7cg{lT#M$UE+?lcsw6D~Q={KO;O>$*QmV;; z0^azEgUHlx`CUFTcO0s}g^%!|=qf*R<-3YJ;APN6R|XExYVv1A#|4 zuB$1x|FL%LL)3jGyICm|no z#ryg(wgpyD|>+Aa*j!^qUCXk2OXKxotHjfg?x4V+S)xDwp?p4e9okSXAXd?R^!F`^D z{plT9nNz>*2308Q3Cfy|Ak9dz)Fw~7SSUe_Fy5HlRP3V@jgr82<4oJ^+$O!cFL_Pz zd%>!82H_Vzsji8)+}1!CJiAlESRzcTk=(kx?AIt&U2f18obiTlcTU@M5$+Caw>@kf zs&;awbz#S)JJ85^30u>65tztOhUqzAD8&~wEjaC6la!xLR_Bc&XQ}%HWx5KeuSwOP z=J8Nd8w{}5cv+|Q>@&{bxDU_&>`BbyvfGjhdqUqBf)+o*J*{Nt;X?Lm!IAp@uxxJz z-8s!=xu>~J@e5CTitU?p$9l1cHxvGxLu9owWhe9fyanQRRfR4(DS8kj;g14oMfm#` zd=ye=FNKRLZVcEKGkOt<$z5bwmt3ip2t|09^_0bUR$)~c6n;s2G^wC8~Q~^ zh`pJDAqCX*H&*L`5%WV=A8oB%_aE5%ux*^k z0NC@62RWMB=A;RhqXV_hf`SO9gsB`s9l9{}%oX z1e-zcn4DIsoz7V#OT!(y=GX87`s;tt2Idc?vc|QAgtD9jyJndjr=87tMcg^uUeyDP zM{F?80|VD)g#Wyq2#8QN`b#rCgR|UG~OH#tS+T8n0QwxdQn7-TWPjG zI-zIlfUWe6-EP?zMv5?=vOKBL79|+|C8>`tb&;EjQ;tNukOoCw+SCgGbB#pCA9@zx zO|~cdP*k0>PrZ9Mteos;?XwY*xCS!EB2kxGa_2Ez!q`OJ-S6I*cfqm@w8)D6(p{dy z!E~TnQp!1(*gfH7&qU`E#l;%AQJB*#8D6Azsy3HWoH_U3+3fm~d zQ>5ifoO+@CY{+gjE?y1dTG)Gx1KO1!Nm(KoWM??*@V+<3=rt88RgW@6tKuok@A`i* z>yv+*d__>qLyzCS{?Ox_ui#npZrO2#I;GCe2?L#eXN2=d$;p8t%)Ijht6B5`DUtcJ zZ2>uMi_rHZYC<+kuhK7Tc|J>@e%;3{x#K&pGVWu7gHS8?G9+;pyBjN;m}hLJ6&f(l zT~zl@P@Ho2DJhAxy1Jg_H~%e5op`^uT;-cW*>w4)KKLq2@z~N7m*35s6W@1L$RrTM3`4-?H-QXQbdyMB) zNaX-@^3W>X=St-CCbcIw;-^Y{H7$c*JWOd7DcILSekr8mw%IHw4Tc|~bQLIN#@Yio zrHK)E^{ic|rbGqw#H@Kg!0_-DNKdqtf#U_Dy8)lw_G4J{yFboTl#udgE6yKL4D%+v zvu2<=xp26AUGisKH~+_SA(x)qy1pYGr-?pqPayHa?vd z0FVW88Kn`3JF>6IqB`|}bKDjf#-13+$W{hzZ;1%mvx{(bogqwjr(TQ}UO7d@YdWh6 zKiBa^Ez1-S)p-(0TTc?HG2vO79XoZY>Jy^Nys*06^3r-&6NU$$u%UP<0!RLin1S2A zKUROrt>{E$Yk$tlRt!1ft&wJ1q$E290oSPQo3uXB3Ewi0cnKp2Vt#CThpV~d8W1Y2 zE_e!~Gg4_bN;yl)-XPd3o_t>tS2k+<2+2DK%<1 zex+$M*spQ&dcu?cwp1gaj4=L_@PE7?9?&f9|MdQ<9uCzyU(PnAEcD6gsqmqgWg4|o z-1Q0SQxqPwbV%V^Md@$^ei_$Mud}ZQm?!~gyzs34aEq1?JQ*UiP=WwAthJ*&ji!e#(jPleAj6)CNK8N^|?TvG4`Oj_(%O8 zFMvMg967pPZcI(Lv?r+$-`0*=KW zbG`94pEnMDyrF>y#LcgygSMukBNSE2v4tQDPaj z!tQ_51Mkgu&R|_n?%Cjp5~Up8q6R$a)lTd7aj|r!S7s_)lMYEyOw|=54=Ra4ML*&g z*vLFt(fhEjDxDVk!1Y>G4*3vSO18PSfR0}dlEjomyizdPyPv7Rrb>kP@kht!g?5L* z0#HU(k!LYM7bqvZUm3;mBDBe7&{E-NK57e?U=GZR)>?~IKAZ=FHN_vmh6S5>cKag0 zs3t^LvYuvFQWEfZ*!m$87#Ed5FrLgrY7Nx(Q@8>-WO%bm>9M7C}?6xp6GNpmaT z@!Bt9FM}N!sMX&`Gjc9#BL>nhEV$JPb;5Jy`K|!hxC-3bZYsC2@%nlT(5LZZr|Fo~ zIq#h#$wmdc4|g_>&0P|d^*0S1H9XQ7wI9_V65g}EqvZGj;83<-|f$#p5wz7*6lut~PtOgyhY{*_QJ{NB-_A46r$tQE$vd^kZ5t5ydG=2NDX+#I0Ic6eLfzE% zhdReXZsvKuhUv!F)Tv2=&@vG&7YH@bdU*Cgbm|nk=lw0s+T{+b-O(xB7#*o&9eX9~ zg)^gOO$=l&DEdbgor=<&NfgLhHT$4V!cJ8WV^2<8*=U@N#4iM<_T)EsQ zEe=iBffV(AXO@WqC@3cUJ~-}vn3}5pFBKa}8vNT{@HFSw7h#WSZ(#T6e1k=Dp4Iye z&%4dRlu;o&(X(;cnY4C<&0%uXmk)`bp92u~KZrIG3s6D{s-Y64I3Yh93wJ0p_67e!D z7mtp3Oym?7N0N0RxvW%m#~98;RN!B)KEGPuZ$mRGNHb$}gLMP~x}~N?7sp}^sP8^A zLYf?8=$Uu znT@;G^1a_`5(~1SSgY-;%nEtW)gIT2a!oNGBYfTZ;Y|8wOkxB7t4uBIXVp$LI&da+ zW12~BG15lY)gcRv7Bx`47jQIr8o)hdPTnvE9ewg2hdD_+eW=KddE4F2Su+RqqucAC z0i{bbmk9+MhMuKBeF<6&k4PB+7dlJ*SZLAT7Oz%=6%~HNTaB?7IOIr3EK!3D`HAA> z>*AHgpgAFTC8<$M;^if9iw*giQd;l%Du4;4KX)p^oZTkG16;q4~tLl z3wD~hln`2G_T988!qcE%??0)@@+GL-Z1!a1jg+>8D+Oba%y;JS;Lw4{3VB3tVNAm{f+cC{qPW~a?B6~ zE$X)zN3^WwMl?`nUoQ)HEa{PXL9m>A`GmoOe(!qAJQoxIFdTl<3AP;f%q%#5-iD_D zk)JwHzX2Rr5hitJ3SOXkl4YZwfL`)gMiR!)-@|o1vXHA*ASWB2^{%j4&$N$CU;Zk; zGLt_xh-x_@Veza%Ret>N6saFNAqZMBNgR{GyKiDZ0oNKy@#r>Rm1mgH3@ z-n6ROjrMsh?6`Q&Ej$EQM9I`0$b{4jO0#uiNT^f!-`{%}YpFjTUp1TPl!xz3pVx#R zxQ7vR5#t;+^$^boy{#WewFeqqn88#6PQPpA!D?)*=1#fkR|Qf|nKY^tuHPU1$C=RP zF%~hCnvlB+ zj8yZfOM3x>Dhkr%;#%09^ZAR6w>8K`c=l3@{lgG;!lvPX*17Svc-J?%Q0{+719{%E zJlX93Aq~!`a>Ew4X#?tv6r*7o7JuRQt(0K!C4bc}&==MiZe!oY0~MEBopJXa`c=c@9_Z?dmtts${+pDP&U$kc;51)wjkPC7Id257~`DqCr;rSgSE@+V5Z=3H+(cccfB>n!z`kN zt8dGy0FN!d?IZ zqgtodtF+jhw1rNi%%Z5dl)c8yisb$&`&3MopmENez0YXXKBx`0pCOFf5*G@MU2}>T z*BcSJGOti+y*H;$NukW<&DUEFAIES*Atfl8Ibv$TN415eju-&dHj^wxveb0o1S=r! zCJR3x&gJlmk&$@Zaw2j?QKUuG2CfI z4aJM9(_A#ok=SgIWuu4TDL3pb@3Sb?IghP%?{utD|R2854yp-a* zSy+`$1J{<Qy@-9Sv&&EC~!^WEXyJS38@Q(4iGu+^pz=qXqo|0e0 zO(4*RtA16L^$^vh;ATj4w_xc9nBJ>WiG&@Y%-Nj2k=hr$#>?(&vNv+?Lf)Xx zcX=4%=Kcr=?(@pRokcutKcj*Xg}SzLmPk4Upu?Ez?o2^oX0ktECb#+m?~Qkz`yP^Q5J`hUYpZZ4>&M&Va=%ZkWS<` zpdPa(j{;R!Kk*jGTI(u_sf3J#AYynR?6~xQZum!NptMg!zA%hAG$yTy6T^ti%f+tK zgNk^bt?1{(jx}t#pMDMCiTFjM=0+AZr$xLwWGoJ6T zX|-J2fBA?E8XY;YADFhJ2rjknB=QE#3tRLLHJFRhcUn*232Q^=PY;=7=Z0Nhmh(Gk z5q2L1S-VnS`?8xN5eS`dhwI1n0O1J4>C>FNih-!!5cGK~h=r4R3FHOQW$stgcP}HN*+Py1Mtx zl~~N#cwQHI@mk-sa8zWU|37DgxgairH_i*3*)3DVNRagSu-tGajc(!hNv`<0VN;E*+j1j;A}EsHJ!`kNX8F* z_fbm^nzoI_2q*&PeS8icyxK7}M3gJ;Zsu5OhyCVXCkM9b=bv(-Dy)DZ%}m5EX|A;54c{aZ7N(cs~JJlka%Ekb~o2ZqBdYHRbIW z>ObtO;yeH6D4ltiVSZJIC9UOwxaHc!d&=E=DoARDx9>T;+F2m zC`38mn8O!w_T9t5hwTc>`b7t3a9d&u!kL|F2iPp028Cyes3O#wKF1dZ&W97!piihE z2&z5-2p0`NxQlzD`_{(99jQg?tJ}#80g{gFm*WzS-{7r(llTa4Y90l1M)DgL`*ad>K$HhwwayV3P={MTtckwBRWp2d zA4B07f7$A~SDxK2263eF=a{EUIrMEwDp0|qgXT&Yt_eoY;j3fmQz-z@T9OeMx=FB( z;GsUWr_4;qK#r3>lGdSo>4rfWiei<+>tI}yF`28NefJc%s=-GJT~c!)naZFz+oJ=_ zQt=tz^qU;3Gk?xw#g?cdMhf{eZZ>{}8yrQNLIti?PhS+f*+ik+ThgB%Vq?PZHqkct zdteY}6{!auf~nW6piXh_eC^28?)t|M*f<(?1!&VNBtQE%(&L&5q&ytG%wqhnziurl8?LAw>tr8(vl*G3 z(ZnifqJHWCbsrb!QQh!k*sy|9J|Fd>11jhl^6SWDhd(q_;cKCvb)tC;?cGVyie zokCQJ^VD-EsWV6kc8RzQ*cc^G?jKE$&Ez~D0+%XQR^?#jRkiIhj!qOnB-!w&`TTF+ zKd$>&rhJIyKd+)#%+U(nj{U!(4qLTCK#<_V{B;l3DZC1(kV-?lwmqo-(v0T!Z z|C*A~(XW!A2)qx+z;0z@a5u_AyT&SJ04vo+N_H{yo;B@vB~(ir z+Z`;B)3X;Q>&7MEsXQrTB`U*SaH*4Clv4%U@0V}Py$?h)+DT@qrbFC0PK*65soZs^ zjFtx`R)H>06}(oAH#k`svB}D^xLe-tV>}W-VLrrQ?H}vkYPtDQ^gys0BJm-V;7fV# zvht^Zys?a>km^?sq@+VpvxL*qryo<#RE0?eVf8Pmy zeN0h|6qvYEY5n&;+$gRdX|6?;)+B(+SpAZE9p92j^ogqgGu`#;wY185A{5=RUkhvp zS~`Ou=uzVy;EwN@(R`im_g4$f+$=fJ!2d``x=U$F_im2wDC>iE7S@VeE*~JwfBmy7 z)$BondMtfON(nWl)pt-*AhqPg{&5l0b@k!@LTDKy2cD1fP+W3)j-0hyVrvAJJ7 zUA}tZk1uQY+6eEcwg4%HA{#(C@I`_q^cYW#@mlqyb`*xZV=%v2C1ct<0b%3vvyy>ghl0vY5zwi}q7SZd&^+`}PyX$rq}8U#15zJdvHlEUm5@1Z z1C+#fzTcO3N_rptH68IYt$@ZF>5+0iRKvj!L}=SsIT!jT-#ngE4&yIF=}jO-CoQat z?um#u^#4N40YE_@D#s2Cwj8`1>?>i>-UC=Hp-!`u1+pc+y*HVcsHQg)Abi6=nSF1jbZEMn4GGL*1aZr)MQoU1oc zmpXFf6VF-x55m-8!FX)`;elOE*(-M{Qs=*fn?Zgc-1!|&K2Hr{0E&Xf@-Nb(5jO$L zWQ?F^e71&*rVZX2bXyUcCThpG#6CJB_!8z6>rB{T1KT_nE;XYOd(2%*+p71!#^i59 zLv5gsHHC3r&pA`#Cc#*C(nrCS{2x{0-t~r?B??@eDu3!+`VxC8nFtm()VSpJ$i8eb z`BPc2t^KC-Q{RG+W=jUcn7G;jO)$?Rlxv@aJ@={Qn6?p{<|~Ppamn4TpjOH+H_CI%UKB|5k{TOl#4 z@Xq@5h<-fzeZD+JNSJD!hJ&=VZ2;NhjI{L0gP&ff)L8__o$m#c!jV=C^wWr`aNE(Ekvna&bY6+z>{g*`k%U z+rM8c8@DLWJMjWF#rL%4^+_A3|4!MCWE854>=q2Dtxaw@|264TN``a!qOR(M15hCy6suMO@E9sC9lDP@mox zo`p)$0LCfx{njSG!jlM?L8anl-75@y6QKcHTAm@`Qd0*$1xgKTG)G3SwuE~Gy~xvi ze!D!?(aDh}TfUURaAmR$gN~95oCW(g8Jw?(5ifgs84K$%9|~ueiWiPQ5A(9@T<~6$ zce}CpgP3h(&a{|TZA`?K`T6MZiw_I?&kPH74lo^VbIA9&G8S{rs~*b?49gF)=HdLd zV#>E~k9WrUjrKqvPZp-s(!WYXrl2!pxY0lAEe`Jx{q*BgRnZ9Jp&uNWLu5>a1E!S^ z8w>$^e?T+*q?&f7(G}wE7%$DwKEM7tE$j(ch#(RX62y{8^V(;I|-|0*A{OX&Jeu7Wp`_4VCN7uoa?;#$E`JfcJ#&<`l`w`=R%xkYn#@yiLz z0F2}BOvYh@`^rlK4^L{;O)moj zfx1sX(AMl*aoLvWow@kb{Tktj;t1M0G;vQH4XN{>k&_3=IAR=Updq(!41K~_K(y^( zW$stS&>dIQZ)}|2fFDSB@zLQR3p}*Tr&wuDctVqMo1_e{1{(Bez7?Od8O#Z`YnJ(e&y9E zUzDGw90c9eozwusWI4DGFXEIlIkHVnhFq9G2R$YMrsKE0(zqibDkrQK$PCRp{QYYt zyZx&*0kAQ(Xn~y>cx!B2BN)5bthh6H`QnYT7POr-k2>)Ov7^GHYvKBNLD8bf_rLE9 zDDAjVt!o{C6{ejwfc8|AiG}dM=ilv0&{@?v^E<+2uiif%-u|p*&Z)H9+ye$^XhaZV z_H|q=ksP49VszT<=3U7dArwL1Gt3$jCY@PVMjZpu&j#^HOToXBW_=4AWG%xi!uZTB0NG%4tCnByYxZ;R{bO2rMAbL6kCsIPruNWeA zNjB-{{$|K^-n%W|MRfDiz@gBJ%IvWy_Z>~i7DQ0w`G3d#G^p1rVhY6##$mSOGIYh+ zzGY44v&J(&$;`^LPrq#cAbIrTKmofHFk?A<4aDZ8iCivu%;d>OKeAC#|1S}mfNzhD zYJU{N(w>u+<@JlUd(~#vrMDOh(D-kJl((>L+b58<%GC_W-0*V(D&MjWCpSgG0A8ND2Z1B8`klcL~xk zbd7X(gVF-h9Rh=bv~+j9dwl-!{LXu>^U*IV!=AO*ol8#KDmJE&)2bt z;;Zf!`4G%=o<11t_4+tvp`k)OMdtGA2b0q68%f4;!@c391HYV=K53Vmz3-v8c zHam&EG`~?vA2JnbSnP9p1dzTcchf@v9_PrW*v8rCsamUuGu}1Yva{(n7W^(t)3|Bq zi?c#R#MD2YrI{@|3c7?~ds8Lp0{`h>>TT+|Blbn`$my9N{Q8wX{$sR^q$`b>63+gxEUAl z%<&akJ%a-=7`ruR zchPFMD*YE=1Uqo{uT)<&!bNlaO}ztuJ?j^?RrW73+IXew?r;7zmh;H{-S26 zw^xDvSED0fk$b%74%b&+hF`-{IcV_4M1;X0GVAf0C~f(g>X;0f_=!@^@)5EHpQa_D z^V!O~)Mv8{2iF<9U*&g9)c1qzAlZU7?cIrTr-cM4Gp=Mmy+;=WU~kLk7<|@kAKq+Y zw7qjX_rsX%VLobL+2pZZ%X2@nfAFtmu?vohk1qAR(U(bYcO2yDQRzm!Z&dUSrFH=v zQZ~**&!!8VYcZ(YB&^#5s!YR5+(|4pIREgRnRB_=&X z8Q2}6u<$q91^9o&Asc2jFaMJ0C~=E+GWWQTO#Apzx=zBjf8p|4k^J+hU5-3Jar+Vq^Y8E1W3Z46;c&rE9G zSYEtt&%PurZW$;kDi_f6&>pFFdO1ui_j1%RjtsESJrvLkca#DUc(`)AP&8gH@!HB+ zze4`V==evKVYKk4S>F?C-> z@sa9nBgv$IxoHQmm#9;ml&M8rkD=3#2Z{0^O(uX|!!lb1FvAQ~CwdcTwkMwmdmCt5 zx!70x$hV6P<;#}@gq)U~2!m(Uk!1?RPD3u8E5^azFU^#0&?5Uuk=yc=GN@(J_`jD) z(>EAgx4l3=#t*ZI8MT}mi%(9vaCUx1ZkP2moA1^IMfkc#MJ_Gd)#+X=CY6N1@YE_0-Ed!|UcAy#krMh_K#>+13`s?xuLO>TUUuE)nMt^T!7cT&m z@$ScuqI292doPH`Klhth6s8~45jqdaX0$g##n`I{J>McMN{#n^@$XooW+y8kj|*!@ zNgrG52OqUX2o&3@RztY-Zm5+zHYhPmYNx8>MBHZ;T*) zOG;~Rv)3VbKOx6fY(ER1wd7{E%96^f5_uB@p4!5`jBN7s0;D$X zw7T2u{CX8|u7GR*T85RC z6f!G?kL?4{bI@npdP4*Fsr$3Tk!Q&n7d24+8&Nq3cYdkv{N6JQfL`mP@+`0Wqdt*cQb(NxjPwyCOz)&6Jar=-9?w7J4DbD&mea_d5cy4p z<$buk>#vs_Zk@m{Ma{=SUn5=OcC3hRW!ZYkY4pKgH~)ZUi(RJ}w<%`Z?BJ|jeO4un zMDV_Sn`+u2aK)ZDDhteT%dKTbrfw+bGf*t?Vu4@mILfuquKab8&gl0Mjpj)j>1+Jh z{@~@>#b^$v2a|l0E}uph@|#BZjWP~**G1)FmnRhk-v`Aund^?eld;V=_u*NID}`8s zv_RH;Kt!s)1jIT`i=>~RhVrYwQTDb-); zr$6@{MT;@x3;ThwU3^VXU99DJrvDjzs!}wtQ7q9qlzAVROn1r6{Q}|wqw>ma=ij*| zxqL5_otZe)k8r#ha+iVnlcq0&2p~uR6h_%x?KEUu`z{e@2!3Um<4ZrltMj@zNh=+Q;1TX<#g{rlW4<`c;#%%(jmI&XaP|hyTb)GG2 zH7#ZF6PAJ&jZ!iQWp&?*xrY`l`qKrX6bv)g|Ct2VgpWgTmsvj3>Hoxlm{jwVZ6vFx+4P z`>HYXaTT0fYWn(T*UAFF*A8we9C-?Lk z1;SGrHhEA~Fl&Xa4=UVH`Z=EK1*ZBPulD}(j9^(@o%G(BNd|ijaYeRAcZ&4nGvKdL z<}b`53;x#g8>koI*SczNw&cCYIkXz5@wEmbZo&j`zZL8i5v_r#jGI!A-??$;S#NpW zyMZ9Hq0hO@v&M-RjC!l4x#79K=2>VIG$PTz&|bbvU=C#JSjap0qldX+)<6G#L~JVR z?shLuQP=;z4Uuo0+SKxT#Vei`A;el;Ac0HH9=Fy9;Fd2jZ9hw_#<4r;d>po0PXx-F zB+3M6JW%m~)FAHI5BQ%RBVW%nPs?5gkB@zN(icR2{kx-EYSI7tarS~8MmNAtmJF?( z05x*ZAX3e<;v&f!&w*k?%;pkmbnHI_%qbH@Xho#ndT`lM&H= zKQ~KvZK3r>FKnJK{!2a-a!1Cu4hiqZW>s1l)0J-j0(Xu>_K&vvWfE85SwAP6w-kLj zeANvvAeHk9TnN<=(uutKuZtecN^65D|Wo0=j2o7D*_YoxT$kKBS#q((3>>@8?wgs z0~sw$k2q-8!iv_3n^&*b*4Id|hi*;e+c~0Yf#*WM@@12AUS^y>pI) zdzf|dg|AgG;)DPvlP{xZp4KnMZ|jFo&d3yehniwPvN--LD)o)fs`DnrwelQmokh*P z-^^r=h=ullChy-hHj@ZPA;8kAy^Nr9G#nWXtej2Z zZvw5JLT1bo6=isVP6vudFdT^)Cx;Z^dWVlAi@_0Ps)+8`@Z6=--$MI_ryo^Cp4l&w`kf9tmv~@TO$!IXF5&e9o6trItHM;M0Mz=uWwx^aI1;P z!^KYN)%oRgi92uv0(!Ry3M12|`1Q#+FZdyV>3zOU2GuiT#uk3gzNC<5Lf{$NRZmNw ze`O)3&7i%hdDZO|bsb?_K0unQrUxm#9JHJaXr}pjex9oc%5X_Vo`kVYM)Zi4_jKjI z31oB$Jm>ba*ggB*jVlWU%H@cTq{M%}u^q3ClrK;(w!O7J(9k+rW>(|AW&s)V270>b#z zit6L)Ec{i4YN^eUnpZUXeb{=`8=|!8Oz0Md+yXjV<9*{s*)BOU6hfPC4yUanFUX+Y ztGH{06?L;rV^GQ*7E=ub(=4pB&yo_E8JUqUbAGQ+@+L%$opA3U5vz1T8twR1da^}g)on+SpJkj0BE`blF)Pe-7A$n7|X&s_9**D+%aQ^a zJ@uuU%x1Yi?pm9114=R{5f`5d->F)|`G=l)8s4e2>%pr{@kvQi`k{L~l?IBSa z%9PU04^W&GvwnXQ$=yys5sqB9m3*o2!8I+(F&JyXn;>=}*Gc=uEEdr2b4qJm31PU^ z6E`1GrFa+G_gE`}VB}|s()CjV)!RpKsaDj>gvzYBcqZyHnsCSPkS}rp$5fIQiem!# zBhG0M#qdi-zsQIVg1DHlKO&O!N$i(9Dtn)CUxt79*qnFEmpvcQxfHZCfg2w_oSI2b z^?`1I=qh{z-H#hn!gg)5h#Gf5s-^W}{{>k+_5X19@=U;$Qwie`;}F5(NM9D-gQzj@ zEMbwd8x1>v9%fpE*8AsXv%*tFK;oz99yzGgVxRSMS)4!b0B~ zLf0-%i39qbLmZH54aL)oSg`x#$~#NF5ft|Yzbl4aNA%yXMhEgwA!;d39G#zuEe@qj za~X$d>Rj`GZ3zC)u!G68x4yNRD?PRawaw^Pn}u4kgnIvLe4>USm~a%s5o$2%hlu>) zs*i%-;J@(O>JUg-Ta-L!OP~i=kOS6DS+Cbq<%{k|+gytRu+(dAhaFk6A`aJ|S&4Ut zNiSum5+T5IT%kG^`|)V9POoAG}4>WLyk-#G`SkU)kf&2}@IEs9TGV2N&>%#PfE`XzNTjCG$X@Klj;lLwcyD=Vjyas|`GQd4YK#5Qh zc@f?vcI4LN|NE>L9YVs(N*uEJNICPqCj~he2ZU^Yr6XKAkJyA zZ$=g5Ihy^aAcn}pW{g+Zhxa?1PMJ-kwZ_JR6!C?Ea;;kx8wTKs&|0Y2Lgw`CGpS#* zeV5RTKQ0)+c+A*d^Kno0b`wi43116T`wBcUBTiu_BP2btY+Hj*|Dxgk^Es>t+td8o zQ{ejR6kVLX1~_$fY~@O1gCY<}!j13c)PGc-RPcw3zUA<@5aM-?--d2TKgq%Oxmezx z{i8RU^|o*!6qmk81wNds=_ixpfV{HlHU;Io=Rt2kNe>NP)4}8L4Zmim|drb@GD^T;8l1 zCwL{l6O-G(FffmU_kaGc z*(L+1iGfT@5PDLDf?{vVCB%3Go!{ET;mci5q1vn`usWaxC6EZ{-GMQ-zk*yzEoH1~ zoj&Q4gjK3WnX&yBc}stMYoc!NU?RXe-HXj>W1%8Ino zFG3l;9@MsZWuU*_pAAc&H=;Qb>zQg(Iha$^aO z43L{+*VWm46Pv2A6igTA=4ngwYmBW+CtmUw{a9z`;%leM^-9iV3Z7y8&fsfNJY>@t z`a6rU74V;+#U~5r9fL8i|3-w041t6K^h2L%;WaYs?Yd;TjZRi1vyCtoe#k2x*(p8~N2^q;=6BYA{K7qD49+=@Ep3XnH+CulpH-x!*$C3s!! za`$WR*!t4dPcj&q7lg)cgtzg~Nb*L!CI;#E@hxkdqv7%=H_jY4g73;K>9Y1sq_*fX z7J|5O2$mT^cTO2-O!-9EDQ>;zgCC&{XLYK>ahrg-+3O(ga+S`R0`T>C&&HoOO8GH; zFVDe;LoR+}Pa%~#HHhD2sS1p5uD|_Z$qz$9g{bPX7~%N~3yKJ_$k;8QeAE49BX%OG zY4t%E4}P6{@2;>dIksuw&k@=i!_81L5*B&Rld6N%8F$O6s@GpK#_LNw?dvJ{*i)UE z6cJX=TKP5gbbw8J&Y-9qcLcjo0x#0uvJ;DW{O1el4jf))w{P+G;;Ukar3YbD`hV$ zUj2W<4pAO}t{zw6aqNrs260r3d9+*GioT>0d65v*UF7w1FUzBwH0bYUckS`H5)0xT zeEz}sYWB&aEBGN6|tQ=el#u%mnPaN%68u4kvA#C+klUf<{D#Aa5YHS2W*2kmFVa=mQ}iL2R7P_5qQ z^qu~_9XEn?HDAJlb1se-Jfw0$bv zAADaY$E3YfWsvEZQm=XzjN&MiJD*YjfZ0oXjR64mJo#CTfQ$%PoVAFE9^$zavPc`{ z6WSU)A{NSYXgsmGG{?+sOl&BnGu}TB0ou^8QSGR5NtS3BjYa#~3Bh5GYDFsD6sC`b9?k8mR z#NX{MVHwddbCY<|xe(KFEjCKwdl9aG5d&wgr4^q0G_RU^7Or>udBpjAEcWT9^xOU| zx(~lAN`dsm%lLGGOouRJW3!iOZot9|1E{IYQK9$XPEsW$(I%4o+-0U4bRqOysBUAK z7Z!N=#GtYO$y`LB*YB4f@0Dr&_D&vE%47kW_~8yJonxp;+Zor|&&rnpl)_Zbn|~c# zA{0QE-91{WXRuECPX-WBqVJ)k70M4Ko$UPnO5+O&>9G-XR*QMqLeBc2TtmD9SFbD?R9eEVp-vS#*YwiV)4df@vX0WjU4)U*_Z5PLQ; zNW+0__~aeohJ;zKyJOy&Kb(&@cm72%8T+4+ro0^|-%jFBasXM_YLr(Vxm_7RI(8$$ zpBv=ZTCg?z=N1oU)){;J5$+X%eAl|hYId`4|90@86wtQP95#!}yR?@&Zzi@vI_Fpj zRP7rhZ`CrBr+hc7CnOkh6xu?qWX@m3e)FKcs-^*|CDuj`*&L#Pw^i7_i()vE6J^a0 z)xE*Vbfu$zLtk?F%%#A8ha&##l^du0NyI7ugB9d#cZil3`7#}LJWDfR<2R`{q4xbY zn+x&#&HmS)uD6H>x|+!TBH`4BHt^iT%p=kp)FSAsLE$`34=jQJ<~2=fIcR8mdDi?# z_PSl9XtNuTZ@3B$29GudpX98Da zhfiOy$^+FULc_>UP*sA1D2k1c4?~;7>yJ>L3Y5N$l4D+Fd6FI7K>lNH)~Cq_lgb$! zl)Dz0O59kVi1H>b3}(n^a6Nf?@nvU*q{g9G@$(-tX z@4Sn=|qYj191T9DJpMaJ6^xDb;kqy{i-f+Piz-KWVYP zmb;rKn|W=@YH(RXiqJ@vMt^_%BQFDh&RrRLAs*k@a);9ko^ zF?C5VIg8=I=~Rbr$DIQT8)r7(1#HzpXQ| z!z_;NFR6`MJOfLy-AR3(%$k!u+w#MB`>CqJ2uCf@*ElGKD6^4fDx+5=#E5Dx+*l`- zY)1NA-cI|?eGPA!6TV81i6!+G+{JaqH-Y;m_hN1*enWrX1FSNJ!FnibPwq0%{<(kH z)#xlX4M@~lpa4SLj+J~96Gu$l;>dBcI9;L|kk&QNcP>pY-0z-&p~6mI*#HwbDVb*f zB8h9>xl`Dk>VZAUmM#44A>a7@1dWg78YbwfN{<+gp#AL%@v>msN{?1DSVF&z%9(uA z&+dVNvmmC@S7QjlOcvroBdkA;(vngA#bs1-s5&Po{sDnW5*vkPn{6jfpX@&Z_B6a5 zS>cuU&Jp^yJeE# zy@$|r?u`bNw*HSKrb zY&W9sgyb>g^LOiWOGjX~Q9 z=QVi1ppz8-8m$+E-9bE6!^^9g?nWeO`We_V-8fjvDYmBr@vFm>XHzkNVNRJ{{wz<7 zHQa9AtMGLWyzA(2NT$I11BixNud#M-&9Crov>9zH{t3=MG~%;ke_@*(=T_dkLa_&9 zqzPjUR@5uOKH|)*meGH7GhF9U^$@!${Pm8t;ykxRw^)j+GQa}7QC`4v3V57aH%nKa z+I;}rh>kS3$p!~(#BTzi*8X`*nwkVF>x4ULKwQBH3NNGJl_#m!h~S!3mVdA8=pFch z_3!%GRG-s#{dfJ`mfgD^I*`isWu`Lhjj;{+2I91}6@gmex!*KYPcxuG<=e zciR>;rs)Zb#=p+u&mfH|GgFswX1t)m&{iz~A~!3Rp}wk&@+_n%37q0v?KS@~b{NoZ zlP}YJky=7CwN3~9GG4$b9$Q9x)4oSnk$3JGCXxN+m-Fc6 zEd0uGSl-_Z5BY_lnrtx+^vg>-7B4p`lCSt7%5p|N!2pi_1r;Go|L0FY_a|ls`A(P| z)8ZV)jWgs>iXZwsGXyAryBREwoZksdWeT%p)|_TAAi85G2=L-G(e45TM3@-O5v2pz zA3$eXMu^nsOp$kiGcV~70igkN)z(a?p+)WYRSt(<$}v?V}4bWOwO}L@&!NiyvFV3Cq@Y zB!}Ey`5mwu=q((G?P+v|lR9sR@0~ACl?H%zm5#jW#I_>FEIzt-lBQ0$(GmHk0YUHu zN&CRJAr4A!o3U_nej+~V{~#V!YA9=&a5<3J1RAuxBajbTsk5JKK>KVh>)v16mGrDR zi_1Z+=JhWBqs{m;4$vjMbgVqi`agW*KjKQ>e~T;3(VVnE0MUiM#kSU}9U6#_3Tgl( z>OzoAsdXWz0szMI7CPXa;bbM|#&?Z2haI`VP;7e_;e7<~+p{|c8PG%dIF*b0#2sys zg4$KMQw=lyEI_qp2Evc1vEp|%5qb`*Ik%dTS%8@pRkUoMj^xUS=mc47=bL_fLq*SXgsk=xzJBL!4 ziLGyhb^bL3LeCL=HzN-%ZbIpb?80(f^*P`wR4!vXr@kaZoBPDr0)1AgKJ>eUEE0`V`uJZ#A}vM zX4)tIY&Xdx=XO3hwsBDzO4HJ{uD~xn0_auScMZ+>hYj17J^$a@2koKY;}|dXmlpjH z5PBR~&R|jstf!{{Mg(^I5HY^QlRb%W{|BQNzxm9>#X3Fy_Q?HwEo;oDH6w(p-fZSJ zZLc=hg(=G+@OmyyB<(^@5}@CI)xLNWg9h0njOJ{Nc(FA(im`%i<|;FUR}9tPXB5%( zPmn%u(w<>76~aBQ@Z5WOCVKXDbDpeO^5$d=t^@Wt!q#|z)^VF5vLS(fgEFzGTMHEm&WkTYGeCym6|$FFbim9!^Ooy^ zCR|%=Jr)vH>l&i7WDNgquOur71b1|3^t#c{G1cH9n6Ub1k;lb zp5Wc)`*rS;xL%$4>Y9Q?x!pN$Y%h20%975qwC9e#Bz%#ki=|!EmwH{R7t?|RNeWWn z1>OM{h4aCM*ii4+`6J5t;g+sG3AmtU-G3N z=Tb|jTVDRwW<8Wr%SU?=y{SK69o8+s37ENM4Z37yI;{M{UnFg48nP_~bLMY!h21Mo ziUpNMC^67c1gtFMF5bK>{7PyS0G%YYG>q&goX7+0Ksq8L5Sfv#8jgIi(_s;gA{G{% ze}-rkGWM7?_}*cSHnchxInTr`*I*HuE}U`jrN^IbM_JB?U~EqrOuE|Q8uN$YR`nqF zw3Co!aG?#^k!Y~msPlhMg@f` z9O2}yD@*`x#gu&2oiuAcNX}(O-m>F{IMZ{_Ec4_Y+oW`%wFnx|XM3DJ8w~%NlU78P zWeJ33)rwXBzrAvTC>oXDHlTv$ps6VO>Ut>gh5_QV4BiSo7*M9S8T5_X8#d5P0~r>dK2N0-LL0Q%AewLQZ^Eb}Dugf=^$g(e=n&(4`( zK73nJf9jOV?bu#>eefrVxiw^|oCK74KIXhNdC69hn`42!vPDOJA(u&SJ8JO*!`Ooc z0PzQT!CktCWaGw5>zBw5pzGT6r^7?DaB;kk6yu+M(9%RUX_vi2bzQok&Mi^%@7DC( zoRt`IHL2K3kSizO13NjknPW{FA^2ayH9R$@uwS0=9=R6Y)!k1Ii8yW;O9D!9a$N-3 z9*za=R{>KqVJc&Uh@+zi`0}GpT(Ghg*tFVn*9RPP(b;XMdrk)29dd2HM}COC>#^L4f<$WZ8t;d7W-n;(GX|E+3c)b{$p&(sUWVvnaipo@^%u@P+5$}&!>F;6N- zV(Kz*hq1BifnD5z(c9*qUkY|akAbqc^X12>WONDGH7zb#6_MBg_a;mQXgzG9fTj&B zpt*-h&rXy)lCJII&wD37wT<6xvADR*yR&{@$S5nX;s?5Bi9?}XKxt6{V;(Ie^J|F| z3}ZyoV!`hiRBS10qA8Tn?j5pDG!oIwd2E||_V$k+<2%94 zyI4@Q!j$7~af=Mh6nq&-m@b8yroy!UHBGevs!?4>XYe8#^vZIBs)h|Q0}|C`2FpQ# zD}QZQzib^wE-=StJ20OHG0W>wc*gt$Xao5`UIdxW$gL=C>-|3EnJBrqVZ!v zw!8zmw4Z@f4HO}XC!hnga8;%xYap@d_4;MT*PbUiI^NgIVzz74o~8Oq5AWlTq|G^& zS>vLh{KU4S#&oKM;|K3=15!RM@O@CZ=?Xdu#Mclwei@O1-2b%isKqVaDs@SJNj?z+ z$Va)H;J+K|h>3MTJ#gf%8Ym|&pvIzoB#9WxmhmQk!&ieNXVoz#2vcY0NPZJ%z*vUs zBj-lqgE)#klG;>k6z z-DjN-NB9DTS8AdfJEun(Y6T}Ee#Dp?GF4DaZJn=F7IOkOrwz3^i_xhU@deEMJdHy@ zzugRpso7SNt#zRa4U%w#R-qu?h#Jj_i;3gCj*OcmHvZcWsDx&`dN~Tr;BMNFc`V zn5S3JJ6&iyX=3;but50GGDhU`MtVI#CXHL1h(ot=V8*e!_-NFwi`6zIy7jfZEW z5jW&X?Ih7AL@)H*@I7$s5YalvNOcsfNXspa8k@^X?}RKVyF?Ie{a*pn(@^GA>AK_( zY)ux36N?1=q7g+-?6s2E^lJ2u>~^7b-UmN^?`~OXz>u!5pqID z`1pyiG?wEm&OvJyf$f;1(AG!pqHHFchA(M2JQYQ_I~tF@j#kVeGneMW;H6)g8yKx* zg08Hsx8J#Rf?GFVmdY*oU4r6VjTuAR!RWCNKttl)Hzb*PuC>l~1V`o+-}q1%daX$L z{8Aobw9hiqt`*<;cYnb;(C-o!7+7BF`jz>a=S?Q{gW&9vBU zFdliS4NB%7Xk)4g$Mmo>d)WCqtt1kfcOnnO^qzY%CC~%f65aczRt2)w=P{O6TNL^z zM9gaqv>05{>;I7G68k{WWZEC#)$XWEF-N$1Z_$6(aIh6^1Mb$Hyt^sp8&Qw{@kQb1 z>K%_nt+bOuMe%6pv!oADSo|es949bD{7OyOXG~t05TFZjgVsrwyXhFwb6<( z`n#dd_MZomzq1tjvo(AW4B351Wix#tu6Af|+%1HwLIKYgHO>*V2K~A#zntlryxj9v z<7nG$)bZ+MmHOCSrQ(%){jyvU*izZ;hIHGCcMHD$IIiSnxwXCYCjQRGI8uM3aNVHs z-Mb6A|GB4zd6VL|DAyi@`Th0J^>~jSkw1mLGt!Q@Mv`XOTCb%JN=V$x-+;NLZwT64 zS6r&`p|K@4_q&OD3veT1VCj9KZZ9%^XxN?lf>K2 za`;oAP2cHO>Mn_!btzb?rz+zn!aQQ(2owvv#d}&GR=p9Qt}Am*NgSv1qJOVha60_l zR1#kFWVVAFmKr1NNEnSYDl&c_^+|EU=bte)J#4 zCsOhOM1TfH;Sk!->ga9pr};x#_01|-$z_m$|JT(MOtWgg-zoYB`< z@=s&ps&THEbZv;Mj!xY0V)OOqvm@sj)~UKGRfI#)WY!t4qqPb@*iF>^$L7@RN_Sc~ zxrT^E2JI{?e`fyv3}LeY_{$hjB^}j80|6?ygr=`J9&q8aEEOri+Y7BhRysc@qiMe5(Ba2Z_YYX)G|AAW( z*@3Zm3bGpgwogq4KCg@Ga@_XHlW@fknpmCM)-7KxR3sWYX}lA+l>%xJynk=UF%^S) z-)A(|CY<%gKGm7nEzk~Zd)9f63CQti2_-oFLzM2aeX$B|?{$4F4K!O`{pjn40I~3f zgHh{oxl-c$fDuyIT8Q>84}_5$N>lFi+$S2DTx!d&ktGuz7>v^!lzbiGB-I$)K2(HP z`C)E!vS;XpIX*XE?VsNa4Cw;X<2Sd%4BJ*=BUyz^sif9ijLs()82zuoP%E(KZ?w9N zq_%A-yX7feHX}1p*L#rdcE!A}6g^TQ$f>b8J6+f9mWIZy-vLvGwKP{$wHKp+L2$nK zm~_;4!{g1+WajfgjhMy=E22HDZ+ywcos%Q7vS4uKUrx2v5HguC) z6`45%L<*f`$RD%Fw>nLDJvEemlY0_v3t0tMY*!0A$Tz>Gkd9{T%>V%5fF52dJ6O2ki! z03!qc4LQuBdb9Zma5BK}p;^bk9v=jcyv&O0_VyM<c0*GnF_Cruk@dx&0{1iLJX<76JZ7)&=&W`^Goq=M~S_+XZ1P!_XJjKe-X`m^TNRSqLEJfIAO1L!Zb_lYi7>YDcMfdCf+c zZ7ZTTh@5CUW5XUeFR&S9LGL!yCRq1fg+Q&L^`?Alu}9tJdnx%dE;MzI=L(m^-+~F~ zAD^t*=GPmv9#I`>i;Ba32|E*LT>FyTrO>T=vCFPtG)^Yd0MWR;ZgR4iY%139HX3ii zA3T;!bj(UcbW?TBtJ<3ac^>LYCd!NqIlV1OW+j0!vT6P*(&BHp!_G};zOlWj)qVkd z4urI^bQBn?ri*%Xldm5-ZqGWA+k^uRqKW+)Hha}pbw^JUrfQ`OYOXozS_)g5KnWuih> zL#9wWDNpq~9_fPw-F%9)EZqnmNB%9h=av2fxr4+E?DB#5Tu1v+z-qjw;;nuoV0~QW zl%bW^GJ1V=^Ns?3uLHQH_ALR{*K0K72{sr0cW0&rrktq`C)O`UE7!HiS{!V7@p6%E zAmQU&620;-eLosd^|d7>DMe*4-aqb27!JeYetM()aKJ3yjN}{6ST@>x1ZBdD7h`2&q3RgK1b#Q*}fNKb-$e}fa0_!RanD%x=k`E-g_-n!3@Geq#9!?BY&1G zp!J~%LA$}kljXtjwKIfq2zs`^edu*8}ZY#8Ck?0+3w)0J7? zKf2sks+XkGS80){$M?t2dz~@_`>xToEy1#B?c$rY?3)?VFJSn9I!Ghs-8ZrQKyI|k z8k+=4y=FnO`MI6Cq=fg~%Md$NJAFDE-812{*L6T9Fn?%PI?hv9qxKWeD9{Er(M`X` zZzhi-4d~{1mSdc>9;i~ERpEHCyIBrwV&u+jkYwCNSQhRXdf9V6)3^&pX1!LSr|W;4 z`%=aQHBTpyq<$u;S)|89z0s1m^uB+aep=+A{ zx!PT>1#liT8={V&Ld_#!fg^t&-X9$)KMVA_p+eIMUYbpxjz`$wJ!rNj17!`!SS@{3 z9EH8-ZW=@N;sx334lV&8goa^>{>ULIqVzKc%QBc*eB)+M_iVu}2N?qeQv#w&^ERbo zkGLcRL-C|Np{mfBZJdb)S?X)J@%fxY<{t(X^TR?2X@2ss?o#INX<=ahr)nCB;}fYD zEMOm?k#EUxZ!_EI$oMYLVuE}f9a&fwiFT5*)&WJn4{U4QU6Nqy%nwcsSkqQmMXQ@m zEr|22yjW642wXa%zvf|du6NZGitRW4A^jXthCS=W31q?mw9ojLo5K=P?Fg+(d$}(C zy!(muggiKsSkX6t{ZhsZn{`1J-y<-+kJ@Klq@bp(=~xp*7HNoFZ?(NB$O~}5$-KI^ zmcIwdJ#G9v>9QsrPF7PV>JrbCDb8_01M)Dja`HTre&kxu5(|xLwmY$@Q;x#7RG%w! zN7zo-bD^VZkGQa#aWVf{VgZne&&|B_OfuV?^Xhlwl|`URe?sM|{pb4hGb~Nfi?cZk zM@;HtL9AggRhP)I7iL7hh#D|J+5q>FY=Sp5UGMF=d|AL7o2RAtdcq|3R6kOqVQga% zxX0&b&0woaz41i?-MPWFcO+oN^qBoyNqEqVwx9RC3`gEG?ZW4Zw@N!y(q)Dle?(G= z_FHode>C(^7IEgBHj4dVQYQaLz>0~pnj2-J0q1%av&+E*0fW-{ms&lfBrLqr$iD~! zxDGvCN1o@IlaD{LRez_5jEF)fb&h)rqu(IkRI_MKqu@b*6%`yPN!ZO-o$cOh z^F2H~H*_uXj1rXIaXFc?`Qb16uDR8(we$H1<%xBp>Y)oiljTwUM0rj#fbbYvCgX^7yryV+% zp2jcu^p?O?Ydbf$?E`Wtl4){Wxua%Y8y*+57_qKc`iAj!aq z8Iwte%dB);NQ9e&eWcX05?pI@- zU`lTLzjosX^7qR6*gxARklSXFU2Oq0v!Uls-6m9D)R#&1ymhO*X;R;Vwxs`?EoBAk zlSc*Sig*79S20TjFcm7e_+QM|V5V0I`$9t6@4y<$VIZ0-F>l=@*33;SnacNKvPKvc}im}>L45D31Z zry3;AZeaCf_=g$`e+*K#2~x%?4jMbnals!erz7ZO zBp%Uyb(R8rZvoQxoI4J6G*$n!3SRW%=hqECC_SH<^@O^Rm6Dw=>Q9EccR`A* z-l5#m?m2ko{oC;tN)rHZw zI*wi^^2nPkO{xR<>?UYL5A$OxpRW!kG1;h%1u?ZcX*E~kgdl7Ks>UiG#mnHYswHA^ zV2;qUu)cF2NU2b<@IKa7I(~UbycY$MD22ZM;J<_s?8Z0squITc;T;(q=2xE|asV5x z^~a^|2U|Sxjzof<8x$zt*%L-@``H#em019Xkq=0@bLx2KI&*G9X0tNzy;f(vRuhH` z5v^ChoFu`t0sF2Wp>HyW0t9@YFiC7?H~jK3NQ(&UN0X@AXzE?V2U9pJmFO>*Dmr8l zO1ea8EGS;!06!V{|0C-wqoQostwEHO5~RDk8HAx@0Fe}>yGx{V2>l*o4i+ z?)I!tUTU0vzA3_`8M2qIxTT+_4a9UmX>RbnX%Zn4zIO91NuDN4pjlS*O63<6Cz_cg z6&fg2&OmSo{X(m59*9I3(;+slyiRJ$AOhsjVyS;5Bb`qz0C` znp-V%n?oBT`g4ucJRpvk!xh7OvDMtR0Ctm43cumIH(d`~Pdv86>!c2FgT9SvHAuPQ z*ku_J9`X3{%sS&d+4Glzf!c^NSg}FCzA<}UhY(8EmaeRcvXoO#FKsp$J#bh)LN4yF znCitj=TVyNycXAO`jn21o^((>WQRG$Z28S4>1K*@*`4vVSiCR{)t3c!O38AfD?3Vk zEdy3BdF|1Y9JZLbqqaITBqn~(18;pVd&>OU@hz$xk=YliC+ZOKZXO6UU0hGjkH=mHOaZjOg~Ag zd4rPVhj6QicdWB!sL&msp(T||4m1Frjs6;Zsv&FM*!{49X7J0BHq(8(<^5q?Z#U( zFw@hm&4c@$x=zU%-QhNy0PD^2(Uz#bva@+ul3P`;uhiq|gR>6&aQ8JnZb$p8P6eUv zQSIr~zu~x+qoC`G2qp$Ohyy3)%_nDqK5+4)(-jNKrRJVb?m!ZVevhNpX z%DJ~_$Q0?l+bjCHN46NhL>qNBukxio5ew;g@&#&6EDtq>8aZBk7j_j$Q$@hR9l?^* zd-uAZ`-Z?vH9xFpVYK&DwCjBKtrwQ1jXkW8BG2u66tGS#lvaK9`BE9UAQSmVU}&cU zP7E8IkS0|^J-)BrKd$%Y#T>C!fP+11fizhb zKgLPfG!b30V=m)#{%;zrO&K$Gd%=@;%at+F1=8nU8$W=MyyC+#rS%%=@~T3u>+uid z-y0tjt;e$Unnee{TUPea;|$9dL-qF5G|8S8y7`!b*@EC5s%2~|2hM+#g4!630W~&yWXt+>YA}wO`bAaLg`TgQWmr;Ju zF!VX8f^gxj#bU|K8ZD|j@z2UCT(ISB>4kcRtpkdz< z@e9l}QCyrMe?;G2mM4pJ;kDi@{R3g+jGo-K*6 zmRK|<0U&$qs)DczIZnLqJIf?1q<+7{RZoK*i&DEy3s~N(pIE)_*xVlCBEg0^w23iq zyz9p#AKJEmRX%;wZ%IK1=DY8n7(E*C=A3(Z+m03OvdXjOSo(ay0c@CH7fskR-YA@R zL=P^wCD#r%%4IZSUN9v3d^{C*(AK0T4SkiNbi&uFc>*Qba$i1i#<`U}h-6sK`Krw+ zXX}SDIIeZAkD)B42VGblOC^)a)`)q%(h*4~1r4+Pnrr%{MDMmyk=K^)vDMYZ zwmTd0?FpE;Ir8($noZ-Vahyeb8Zxv(PpS4NLpiHN8vn2uV0`LEN!I7rNYM9)=>E$$tB zL2*A*Y{B|Hx3-YODdl+&xu-R@jlnri9r|>4e)0lsOGAmwM46S7+DV$^<+)rOmOigm zGzLlwKO${iC+``==I7|E>(*bQpS2j5Ad(%QZfW4YKDWpFwlIN#fuQOmZ};L?|00@! zvbis_P0yJ1oyPdBAMP$RWGSKbt?*&inKj!v0j+qhe_3aN;wCt&Lu}itnPo&IA9*)= zTOmlPa_6y*#{+u3j2I9m4w6;OGAY!nk$MR^Hk>LPB5D}3HzRPYit5&_xN_j-G}4|P zc2zl}*~~#fK5mVas0_&5HujdJ(*t1_>^68{V+esITpZOW})Ygl#@}}W=+x} zvC*V}8-UPFI^B<}&0Ja*&dcqeS~ji$I&3MaREY%JH?#<3jPLl^6Jm})aM;lA5pMR# zQZmY#f{bEQCf9koD;L9NM%)oW1uJ6Bda80TE>wXMqzDAq zNZKy++quy+FXyF7gAti(%$ZS`sw*hUsii;aBJLed$8F?G@vIpm6Xt~nQ|lB6)jsD3 zO-WR8xRE!wa1;CrVzS}5k_yff9;?+b%WJFmR~J%eF#S-;rR;6m7Aw|Qgs`egj=p)BRQ6DtQ+%FPp8*7wgzd9f*w zT4HbeHYXT-`LE^^F6?ev#?mA*EZak-y`yfgHe;wJ#(OPz=x#aJ{bAjIUYwRN8HH^g zDZB6>m-B|dx*gUi#;+knv~pl8AMh-j<$Ss|fu@M=E+G3CIAxYM>z>fT9Cor+<#|6X zAdio2%6CkmqSWU#@+xHbt*4*c1vl)M$y9q}t}eRZnh2WB%gX2wCZVYN)D8#O@Q@)zWXFs2eEonE4=g<~2?u+NI?szDU#pxTf5&Z?PyDwq5@^ z3yL`mXFfEf=9Wz=4TS11vA`FLsaehD$|M;xRirSdFYq(h^$utmrFXv5H-;-k#L;!V zY?yA6i@^aAAs!+k(R9|mC;76E8p6eQgQ*79BUcruCAw#?u@du4rL@eeAZu_NV#?uaGKN1q8CbQUmnXpd9lJbjE;gd}9m21gxjrY(2Gb5c~x3bkU zy?2<|oBu1v_TY-&P=VC!eg{H#r6|=xD1Dl*myUeq<;WA%{soke2a@Mx5iq%42Lyj< zkgV+`YB$xBG7NMUua`28{3sbv&ao`#BfTh7Fl;o4C3nPfpk}t9k}Y+qp+%TtTPjS| zp~t7D3Di@!NMBx*ywc@X@sMgx_K&7hcn+Q?r={~HZX`=&oR%0S4S)|PVEvu8ZA`yC zu}D1cj}lo6MTsv95fAhmU-qh~!Vo)(3b zSkl7P`C$ovHT|OwIo!M4z|31 zmM$Dka4(29$A*(+k=UfS|Cgpfl6qn(-dsQh?Uf3;On|nLd8@_v#SDW0FdBtM?-DA5g zJWap5H&SEXW)JEStd8}B8Mxd?7X}X|mNl4P(|bK4RQV79BZk_?cB(!P=N@+YvR@#v zb*?B-is`*an?Xf-JzeD}JS&Pv`ylcjzlQCr~nbx-${zeDALiLwX@ zCP^(UkFp>LfiiB<(xp_1SYHTcORp63?uvBF$B%3WIm^@^p7!`~t|j(i$*lcI7f@#R zxmlb`NoG@Bp=TifHwcOY7&yJzDZ>^(`+Ur8o#@Pp4H~i8^Qz_AXB2PEb|e56Xo^Hy z2LJ5>O&R#!X1kDo?oyg-1*RyXLP89Dn+!rp;+I}D%(+oxnUJgg`G!>vVUMH-`8mqs zH`{|Zq2BwyurxEix&)Y-Bq(RwB%pZDqd2vm{HfB`e&mJWi3{5^^8}w1f4TM*>lISS zOqrA0b`*Q~^YGny??16q8rf<>1EwSWWY@8N_9fuF+CF+T{X%ZWaO{dd5^VA(6Dj-b zhkllk(#}|Ta9K|y=?5YTBz3ilEYszVApp>9n zhhypQLPlF?Uh?f^Bi&3!H(6^!Jv+nPu1U1c@_Bay5^%jrXuCxN)P&}%bnGj64=;Pz z*zIEWKy4W7ITzJ8Uz`Amo78VOr{=~S{OaD~4Hs5NU^H2R(LZ*V=kMe|JNj!O#BJ zs^}Ng=VOR=*rJ!YLXWpJ_eyfAFHM&=|9ej3(SUlyJu_-QQ6keTBGvj|mbtwBtMms1 zPYQTk#9vXT>7GkF;Mx&Jv4jMDc1Sv_0JbS8sc$Q2nrg@%F%P;|?7oEh*p9SRHbc&0 z?_zne;K}_flZ&M*lL}k_H%RUU#${hIK^;tRZk32d^mTm^pFJ;$laXJa&pN@v*vl02 z->(9|^tOL&(c|S&c#RO}<&q^vBoJw~Y0(IfzxLF^9Y|rb8Q=0kbZFk}OX6X9u)I&Y zmi?n5ZZxf_$Ae!B^tZOfR&ca#$IZN}$yX#!omdB2(;F-c8qs7!p0SIZsh^2O5GpV0 zbd^5X$W+``VbBtgG`!;oQqcv_oHj^C+7G*1GUCFD8^Al!7kSXIP)S18M2)(7D@9He zUWUH^C9szY*Qx=Yp?D)X$}E@B zI1oWynfHKUehxR+?s>!`!}|)!z*)HS4OEs(!--*OK-=wbAUnmATJNbJ^_&xle>F6f z=w$4at5hP3zrR%e48qjC5*mLK??JYo+l{d&3%D`^3Cp=v0xOl#zcAk)<#XXt$|h1t zgvuu0>)}W|rBO>8vay)WE^bukk|U0>#4Gw;K*AWbYKH)ETPS)Sw*Kd7h$YVn4V>w( z-pHwG4o!61U@^#k7ts`88%=-|HSN}uPhQJEOv>S*qC3-B?aa(Z@nn$T4(2m!t`Bg z$tPxDK;d39$NnYz%fFn0ut3}jEn*PzdFEJyl_#yC1B zZb4pH;5X1SAT0Z?u)AsK$gDKqid156+8?2tSs^0;7{IW&k+I_4salZD->(JAX`toA z?sHMrRq^ zS3Qnp<9uvrne}W+nx0EIluqmN-^K>Uc~_cG3TYiBjbb2~JvqO0%dal6_B#hh>(DYqWe_PxVZ16z5!0 z9HnLgHGXQxQve>>N6fnJC7 zQR5a!@p2yFg+Tin=6*2ghtN^MFaLTG)!zyBl*SKv5=ho_2CYlQvdNuKzMvMB;~@0q zLnS@v$yX?5gBLWTk`i{YgiwpbF2DO;G+av!pw_$Zee|Tj*m{^B*Zp{T^*`JVI4w2L zGZ|+p0S`&UJC5B%A6f|^0F*!&5fp=x_@q|?6ni?}V?S9hmn~s8sogxu{Mw=*8&Z4X zK+d(!h{sZJ@T-uL z`Xh~dWN|D3A$P787Q*_xOH0r^AQ$NZ=hxM$x~mR;p1Pw&ZDCbYQLn}6%mP_xwAaI{wt0R?%0cS2s@ z(Tsg11@Fpw@nlH!?9~s-^@1(*ekK%ZI*fP`lnVcZe5q3W4yPDDw=e!_MS%^{2wNJ7 zT{dNfQkLqVU-x3x7vyv(Sioc}^pd2DmoN*K>(;S?PO6y3XS|S#ZQ|6@Xrc>yX%Ez& zwtbqTzz>JV@_zI450y>YN0%p{>*6G{%&U0;lK)ebwH_p8==|-;*-MlbPK=|5=-md4 zPgeM2~m#yldz!uA5YD8&jX!94k5q4p$>*{#); z*6&xOZAn?Kj|tuZZIzQMK;XV=P+T5joZdnA-V5r_-+ZDjz4FNo_=m5}w}mPsqv;!x z>6d%yok|VbO^aVKL3adHhkc{=mx->_foEOptqq;nK-_KyA6%D~B3>6HhEF)vID>uD z0aUFwJ|B%`i@!A+6l79piCl-~Y0zN7%2 z9z8x}%!J`OdrEhe<#(?&iZO{$8j)BA^f$UzMba1je_Ww9i0FH&GrxUKrSK9S#|#>M z`*tNsm2~Yj}X^uzObzp2WRJ*Kj;ga7M?+*-z-D! zHOX5A<^51U=Tyx7oJG&^u5)d$<@$Y)-A$^8Mg1^JexnwPSdxY{0%BYzY7V02SGv)S z2QuX=J%^u82I$ET&%+;-;17^{j7@{r_k^1!yW+O!F6LQwHB!@&s%en5ZOuY?gBjoi z>@Pofc7#}Xgk_s$s*z=8W2j&$gF%hW=K-IEtN-Z&aD6)TOQvpBPE8aJr6gNy#C*-% zV8x}{M7Ij1QL>u zBSRl@H!Z&k1B4BO>`B4#u|vu4&_-T3@a&*;&-EeqMVTR`yrJ&FfxKfup*~}eAb4~> zk~0I~GM>3`UZxx)<&ZzNuc7MoP*qWxtvX8&-P5=z!4JT|iby-8?=2^>L|)6gubfn5 zUozuR3-K5vUO=^-LI!?_V5o$^H5|t&cA+;rGn9C==f$M>vQDpZ;)4^y;!rK}4)J8g)OP(=AJ$?H%J^%fs6}P?dw)VsC>jL=${4 zTqQjqc;o2FgYrK3ilT&@ZZFCWSEW~i2v=I2pPqeuHpiq(H_H%-=URRHEkx+gw|8|| zLYCT%#q$fLheOln0LDL9eVmc8LX8|vD`3DfsiRxva&DHVXK2@f2e-;>Atd<{8URnG z>r|;D(If6p2W^bIiR5z4`aYdvC63&0XCr!9MHK&RL7-kZ6x&)>^5h~<0|XJ}!7>p! z3{VKK!vp5khOFt}Hper&)YcB$%4Z;MbVjs$h72>+gQRDD~svxMtKi{lr0wAlQu#>>c^82$@_bpEg(cq;;-_6D5zL}hq}4S%7{kw zZ1y$HyAQ&RyfM&a<$LfqhT?oLzFv}uAZ=}?kuO{6wx-9DN_GaBOPRY zx#Z#>#0!XfS$)W~-9OY5ZV0rvhVcl2Ah1y^^yH77v35V&d6TdJC*)AOR>H+GvN1>t z@@7vZ%^L8BKn5N~#4hXChSy73!ZCuNBE?jd%CD4mNEtG~oZ1&<1c=V)bBK-Aip_v* z7fHE8duoT7WD?~SGEdO9J*4*Z1Bwd8+@92JhOprx((;+qpBi*bb@RtDb#5DJF_>8= zSJ7E7gLSb!Vzz(@c8f{6r7U#S6JeLJ9n-2?S-ZVX&hw_0x13Q5PD!BsBAFX@&#?Ma z!YM;|#Ytsx_^enROVTLB{jz#V8ZJ=8fgV&REqmdwdKkl%Y*}p4@rZ+awdc++4r7L# zSnuuNr4x`ClD*h}aF7)3UG{|OkrLO%=brXIkn`#DGn#msITC+@bVi8wac1V`zCQxX z2wTU8l!H9^ct@3|=98YLyTUieRZ*l_bPN_#V9GfboCdp6_L?L|;y{_RXWfiQd!4Fj z1H||dT?uRg#=hazD^0d(xSu#1Iyz-le`Ih#9!54bdK;`zX3-0{Wz*T#@P`WDrZl{P zjW6X3)Xa;rQg&KNHuHt+#G6q_XRmlC@&f>ga7iO26LDrNugKVxBH`Pp zsM8z)VJjcwG_gGNK?G=SSqD;?I-;`Ql{TwNQ6M~+mGUKd zFr3dCLvmzey^fPx@HAOxwspeiebkbdy=8LR9(80al$RGZ7J>cEx|Df>0xH*&>e~Lb zcjYmf*^|``mT?K2+O&xyj*&8=E}s$06)w3Bh{h{5mK0P=3b_cnsM7?C<$DVC5;>-~ zT|199O#qK` z7f<#plc5LFD;DSswZo^j1!bRFlBKMsu-~pHK$a{DUq!i~w)prcndUZYXrOXJQK@t^ z-C^Hb1hUbd;s1p9;s=s28X+Ll6@~*c-Ok!YyKL3npOCp~o9}Pfszrk+vMr^>?`UXW zw?+mc)be^_e>cwSRK#7;0_5CU8{z|a%;~NLlKtJJ8+O6tkjsFpdb0{M1w){lwO?9> zqS096^;l52xB2sGMPO@H^a&V2ex^Gr!Z_pn+%N7e?rUk-Ebg@69S zO?2*MiiEbv!zJaB&+H0u75B8LMNEz8=2Lm#!n>o6d47Y4%%M0`68t(ymq+O%+qXK( z$l*7&64VT&p{3PFhASjLT04A1-f92#VT%c>Q*h1h&0DEy^e4woq6AZ{6f^@tm?O1N z?D&x)simISYSfI$Yn~kc1KZCW{${4>+|FDc&Hdz>$ws@3c^49Rl>PZk7AO9|79bK# zch43kv!_C*=d&Yw-~zeKmBJ1IZJUjz`{7#85Rs@WdwxyM*F=@m1W{9J?GreHm9`eP zycbEFaY!cA!i>Qa=bgKNV9RXn&5z{Wy~d|cwYzV9XBm>I z%#9aN5X(5ikF&aB-1i)wKE323y|Co#u>W+m-l$nR+JuqHYkd;!5VJg_dKE_fmRNY$ z_GSUODPdYRUM%X?Vz4XzRxIJ7ApoBf@(>}-KAR?F3eAx;)0{DaxoC|9{<2+9=)>8) zaew)}0vKpVwN5!$s(p6crequIHeP#u8%^aMh}Lhr(JJgA$13wk^d2aG7tPoqv0-Sy zcMsvS)Y{!zCxq`3TR4-C_m_Q#Ue8z`DEq9-e#KRoMr?QoA_me0q>c<86gih%SN5?6 zqCzBQ*j+eK40UrlZ@IpIOL3({PaIrjm{j-PDhc-8^y$4wFksjo&(dj4F?}=feAOKaqM@5sBILn%jhkGRzH!(u5MZDL;tOxez@;{ZfQ|sb>ae)1Z7s%9?GSSNb z)&=nGazDx^6S-mqO^FYCF-Srqh%Vj3Q(4o2Sw@7g2l0i1qa0kJQu|~FOxYyx;TKwp zyIk!V4hxq!AmL^P;iDMQ4Vj%#B4Xr$`Qm7TrG(V``XdqJpAAtUHN^@;EaP)W@_K4@ z9o_==$*5yXbpC)B=d8cx;t}tLFh0^`k+oG-CR-RFp`k7 zA9hJ?me%)C3vQTMf`RHhaDlu@0qZZplQVL_6%WC3-i=k9mk44bPq(GefEzdG!6V^~ z3gN{UN0LT-GtVeO_;c;PyDYzz&$@`+k>45_ENGXA$p~AHFke?m2$tY zu^)%0q|yccBi^tDvxy*(DvaThG!v(LKPjc*8>E%j+CjzrVga4jFomC;Wq9S)SAHNJ|Dhy*W-X)%27>t(FTWa=2(pbEc)!YMnBL|q;f^z(h19`3CVq6G3c zRlAFL66VKyE|>B8EdA5uT&vZ@z@K5^*bps`s+u}2wxc8iQT=8;j%Brxl2Nw z`YoUM1YPzkn+*f6I?C>=(lIt;Hk6_1L~p)Q%zF0gy8?SiJx-0mw6^HcA7mnNf%)Jg zZg|{C|Mg83CXZ*Agp2+2E3JM4q7w3Fq|rY-i@e014p$$k_()z&d`BOeIL5KdfMk31 zN&$vP2%y`!UK@3#e3bl=>`xpo6W{e*5M?Xu6addP-Y?vl{NIJLMD12ZF#Em|8tNx4 zggBhBB5%*H21eqM8Jv$+l-?``!`E? zS|TjWJqZKrb)88W_w#12MZz)Aqg0dW!8fN}DvK+Lt%FvXPo5Orr`YSWh*v`5Hm{>C zSUVGMZw9*}y+@BWoysWh=6h$H>-M&PfPZrZkoZQy$jdm;pM`HbaNv4#U(F1j+z?PY zPwV&E8{Uz}i$S1E2CewC$WMSvQAQx>a*9LuDIMrZX-vYfqpd>XfbzbVe8{S^{PX3d zwKgA(_5GDy*BRjOX^?As@lRT)$t)oZxt+2vXww4%SA6@Grlpud`9c1e?{069zn$yQ zufJ~U2H!Jf&iM^YMl#?R)xvl(%HPRWTUWPW8p1kWz`z*E!MfZ>@&8<^EzlawlzSGJn9}WgYM2&aYbcEAL zNtDuzK_h?d5uOH5v8^!7i_X<&){B?|Z&+*B4DMwPWiEgnRX3s0c3=dATNTgEn_T=X z_$_#D?zDb*)O$-rp_nTor(Cf0(g41P4U?kxa5cEWbT`7`SDLPdF#VLGlg@BM4V0 zH)^rI72J3RmJ7Qe4R6n=7V9($z*YNAKU^}{nODSr^+#>Ji{4(z{Q&1ZcGpRk^^w6n z51);_rUmBB|%(-pnRY5EYakw#mJZBr6%CF<~L~`F`%Xm znJ5NP^YqwxP>eF}u^uL}ZZw7W2I-urbF8(NQhAf@Ro(Ee=Ys{w-q)E*IRa7xmWk4p z_m$Ger4!KKPIH}Z%VmxCyRz1%zjgU{1SeGgtY&2z|KmA_o580@aZd~FF5MXySMdt@ zDA%3%Jba|?IpIH3Jn%Gcl>s+E{rhziZ}!8gHO5Z7Y5Uv{ZFn&>04)<06QHv)V&DRR z%$fuNQ5+=14C<*|&>wTe{`pwUj=lGMH2V`@n$N*cLa1-Aw3v2)Oc2PFP@O?lkNmZ( zVhQHBK;4USmDczgb*Jd`FP68f=D;L26q| zHaeya{MW@_(|6sF72AXl_rDSU7m)`y50QZR?73&1Wmh(9=y5pnwzpsJ!lo~Y(QTYo zEv9Ez`7vIR%Hpdlb)Rt;sYq{AxZfG>57YMtQ-Ke)i;6Q?ktNj=cD5yp2!?nXLK%&U zbsQIXEAeD2>XX+LIyImlsVUqxPayGFrB$`uuP^R!V?QU_PUgV>^roi9tgFqdcIr); z11&)nIoZzf*U*hM?A>b~IAz%6xx&UmDLc1x(8&*zhMQtGmriqVq+q|lR>@vt?Kt{- z@pJMFP+3hpnYor1`BGPycjWbly@D7P(bzFpFS`YvseupOKfQGX=0&XCjDLKsJo-Sp z_QPi9?TUjgK4(}ep@-2jMD@yknn;ylY-60guoL*oXfaQ+quD>NRrN)*ak)c4$UPj8) zW-X-EGfIVD$#~N(F2p}Z>9dwQWsh=YMkN|NF52wJGIpY0?D;VZoZRRA{JoiW69i)z zzc`_dBtM(yGI%~-u|oTzhJNA%?ji7uf^_4^wC0IfDuqjnUB&w{d(}bG$Vo-#v4iU9 zut?PbZnu!WecREVu< zPJrp0o&ZHII!xf;gvj}9M_JwsZjqv_0St5o0ss&gI(HODUapQ+9-SBC{a8tdJ;u@p zu;Zj0mLYvctw&8jXfOHQ_ox(on9D2D5_>UtsOM=yjymhO;D9_%3|!-ank;&35y=ZKn56>IN+0lVi2S_AV0 zy1Dzay~@ofuR^siTjwPx;wj|ZkF!D1lys$CzJ6viTCwm3rQOuFZcCrksgI zs@44Q8rE(J#gTH8%F$RPpMG$eWY&0nC1~f{TRmX$1h#Ak^`lJBMFVqC{{&1^S3eI-3q&34dn{#$t%k3*AA)igo?zT}M)Y3AXsv*|uSI zETNVr;gZ#)ZiwsrlHl6&fa@vln74HW>lQZngWi#jv-og3;W6lWpHQILZ^v!sMk|Y1 z2ImHpZ07^~D7&v2c)p&)8MOnS0|4VmV3<@W+LX{vL0V@6V7Y@$c||3 z@n+IrjnOOAiZ&T!@WCgYjm;9cEfzb1s!@CEQVRi5-o5T)M* z1IZyOm)G+x~Ve9UU^3Wvc%sUd28nksU|F5TqnRDJyU;&RB^F5?ROjQBaxZO z@`PgnBv6kJ9+<}ywK?2VQEr9ZxgNgPvrr#-lox@Gel#znANnBd0%F^tBTBI_UBPj* zAz{GQ3)!`}Hi&;aP(5=> z`p$v)2y9E%6VgU{r9V1lOT@(>>Q|o$BpzLPg0#F&87H?%T}skQhjNT%Ie*z$U$U#k z!4rnrU&ga;FR+n}Z;oY-2nKHKd?}cg4=AoRT2>q@mfHMK6U^%gy++N(P@QHz$tNWB! zPvpYMJ3eH2HpQi`EG;XDLZJS(U)L{T65)(f78EWYi^*#{m7_TH_SKK4%2W~>d5v>{ z29zOJkPYx5H_j|xSXMi{{BAeu1DnC02-=_j47#DquTB)Mz z*;&Cx`rBTsOWRl?tc7p${cN;=+%Pc7di?K-5bzTRPs)WY^!W9ahr5-VX|30sg((I; zEkb~$1%%F_Q?GIVkD4J#^^bRtnyqC;rB(QCs#3V{(t|r5+1g~+&;62&s>*?R5m`Gr z|NaEl%+!(&A>5)3^j$iYHN)J~6Sym<;e7P9UbaNUnhe)}8F%1pD0C zoUI-h)PSalm>eJRS-PV1qdb<*y`9*D2LStQZ@tDy8n$QOw)K)0Mdj*_^W#o!8x<#* z@@~9@vL~6Tk4p{*at%+TKrmi`SE=%1;k@jHQtxa>d1wG)oms4e#lBAbEW~V1zKLZh zd_RKd3Uwj%c&%^YokPK@in#oJ`QUF_lv!H#)XZS)q!4bdmI;1hoTLV#LwG&SGkx*M zuj}S1OdEzL$bSP&eKLjsk!0)%LxPEue_}Wm`X{idey_LB-{DJ$PCMTV?XN?-vSB9I zU!Okm8_UqsDo*4|EQy0kVOTX{v}@rxX0hu`7IgPKBCv#heFHpRVHOx4W~O#1T@tm= zRrSx~bR`jDB64q+_(mb6HC-;t?bVoT_0i@cg+1YyvfeCaNRunFq>VxSL$+S4>+g)M zIf$e%cn?oeA0~w&11^RecOaV^xhrE4B#PdWBUO88!sff;1HKtpdF&(dX6Af5!AMME z{CNi2$sOUVtVtH9SnmVL6idSXDiv==uLrAV_$li|CXE!YWkH7ICf+-;jlXrq#dZ1} z4}cot8%KA?Eowhoz=%(FJ*N(N6_kB9TO!nR@{~qPg<`CyBC`I`<9~lShWgjz=cfRO zaC`On(=|#pF+%JVwo$yHnEhqVtz4NEtYszm-8jNFdh0!$!@JT6*fU^i>y)}e+Y-~U z2TcC|P+>aXWDi9-6lrkHsG>s6O(}-l7lok*kt?@2v5?#Kw@RwyT*?6yvMx-@e0}3` z*oj?rpRfny0x$e3g@gG;JqP%2Fb6w(!ilFd7Rm4K7w`snYx2)I7jjXirk~WakMbC! z2V8$It#r`Nvgfl)&~N<>D0lx^)6r*!2R+Ibg9BCRWhqNe$}xuR5>q~L+ZLZO4F!K| zSYS+yTvjJveI2COJRs$Y=_JjTU0-=~|G@a*1J8^xZpV0$K_SYd%nTJLz5H%0%bQhdwa!A4J$d{DUl9csqL^{KQD^Clfj9 zlPrrx?3ogjDI9SqSy^@{gRU|DGsLnZ{@-oO6qOT6%Vz~lgrR6G*t;1Ys2Y=KdYs@f zP1{>1LcLwolm&+=*QL{K?P&S#B?qpq91A&kK+thvd|^Xa=|k9 z;9l9iJoD^y{(|>+?q5agl4F2$mLsI+pj= z%KhR>`3}(5(Cfc9!nTED$Ew|`_^;#>4JzNcA5a`OU!HTj$p|$91jw%PXkqwZ=doJg z{}%{g0s;Z)N2|I_f^Nb9P?x!SWV|f!8ZavPJOi4MA)wh(?d3VO7#-Uyl?GIue0hsL=n_3QyC9Q}GxycAs-=j&mCo1}c z{c~sxw&e_I;t#bv`F7WKq2+@1@M~KRa#dY*>=sKvRL6(>`GXA;uKA6{PM03G9P?U* zXzQ)%$|Oy?_oO8KcAABQxPBUdp&IudvSfMZdL7z_S6vSk=Pf5n4L3z8EX(5HmOunr z!(@Iu+_Dw(ew%K7mXR;8E$tr&WYc~i5A-a~H<5*)Z@)6~-SogvHgMCoS1Cux0+zRd zg#783f&Kb3ydJqxbb=h+dNa&$$~m3lUjcC0yQsK@Iu!b}>6dLY4lGnG0>VhzP`g=6ri2WG0Ai$ zH#H9aBe1dA;d=aWsM}kGJRFWxUU4#PTVf&Fp0$fna!Wb+fVL^1VHueKRI9W82Te&o zGG|wi{n$HMrlru@=1;N2mp{Hd>=U zhnGmbzpYUA>0y)=@XN@6T^PdmKF_7>M2xcc_VFlK2=Df0OA!>sLsSk}PW^!41fqQP zdwRzYG~cAEu8d#&Mj{a0O(tAsH@jZ{P54Patlyx7`8r2{bLHc4pY`3WS-z!sW+f{WkXc756?kEQBuOJJOuS7Z{XXcIjaO>vPr9 zt@__W1IfO|OF;ov5$718$63DW=^K1>Ac!4S20f%Lp{dhE@Z#aTiShcoch%CA^YPP< zKR!n%+-C&%sVA#u{2)8bC_vlakUNuhs#i|udSZLvfDm+nXqVfy62|9)(u++H3EO5{ zxe7pg`y%hK>nh4|Ol>7%GYt)E7lXO{e*7q(`K)5}l8K_WCan=`zL?@88PRFOPS^acdKP-Ld4P~>t+H5NGzv9;)*ng_O$t7h_iU%7IS2}N1H#O=HU zn(dbbmI_@357-*kM-1M3&3MDKP$)E?Rv7ZB`ba`wWN=$&G_#Y=j)P)&jv|27`Sz2H z%e3EL-w;75&XeB@<1b%)R8jriak1PfzjC`RP?6HUNe|s9LjCW{{)gbkqUIv<<$P<2 zb(E{06lWwcOtHj-vpp&e$8rw&$fpe+6 zSL#t~C?Cwyh;|TqZhHgNRHfbgt}w6EPD;^TIen}fHTc9I&4rE?WhHh_G>iUMaR{80 z9#qNrnPRtrL)7vSgBVC=)`o0mC4IkyHH^~%nY+X1JL2-#g zrT1rT_&4lw7vG1A8MTQYzzX^824zm?JRu01eEcEa*1SExrpQw!UMx{pt?et3{6xcZ zTFV!?pI6_OU>?VOEDw*;l6{PMKIP!HvUQ(Mn7!-$=oYc}&RK4sQpae|6*!+J3TG|} zaLRhkoM-_=RRt#eo~G9>fc9nceJsRLZKTlWDohL=b8O=1tTS*AVSc>yOpl6!$pPbk zk(YKAhjMxJlPjw;df+~p#U`K!!y_N{V08kVzq<$2BDKbW(MACYy%o>H=}CnUT=5f` zSv?YUfE|vNNy)UsPkeV>qn&RSh|Ioa-qbr~A4jsutNo;w0F0{Q;nFIe4ZbasJ#7fo z!Z>`BXG6M;!vnX;3S$L?*nsWoncy1d9zA0&q_opAh#8gBt37ME9!P+rysNQp&RIH| z>p5Ro4Xb_-DR8HyM!7A`8F`?pgUjw%pcl@bemJL5(Ux=X+%WJ?qpT%_yjZH_!J<+# z@4WRG(r};ipcS>`l9GNmYEd3kk>H**m$TQlA>t4YOHIi z_x{eF80iz#3C*)CD~q>|bScCkz}Em{s?ln*CFP?rdErhv=ky$7RWbLkhse2vub0!` zNk6DyL<^OxhsH)}l>&LLJ?g#eAtpm!zWQ%!3@|AQ;xm&|FZ>?70#Un9#6%?{a&cm{ zi3^e!oguFacoJ+4{k7Mt{w5hBhYD)@%PqCjUBmY0KJj;>B~hlR{jIr)V|<=uMW;cb z|4tG732Pa711;~&kaV}!VkUW*bs#C*=_c%#&B>6HJ^cSM_EvFG_g&k!gba$5q#%uS z$I#s>jii)FGk|nA0@58rgQRpxBPrcAbW3-~Jbyf|`+Cp&`8*r%wzC5c>$ldij^n#T z-PUJOIU}tq#;k8=hbI_%QW`a4pDa<|b^{Y~5?N*75a-QO!eIPR9wfi8-&3aYXpG#*nmCd@2Y9I4Ti(LLa+b>e;;WC!TkeJK5&-7EZsMY-?P9pjc2@=6%{; z&HhrU8B|amfCP)O`tFaf`J_-GWb|OY^bj>N@N%0I~{=k3B8ln^~`bT@E{Sf<@?pD`?Xi`cBtH^wUP%SA+` z(*Oyh|5Zqzi#|cmJSR?N6vdT&*-MEYA^r(T@ONl5mlClAC&`U}6!JSj(Na<9MHEBMi6mUvr0u+H9&x((rsUj+_(hdYpwAvh=*o1)dhta-%u zT~q;czsf^xg_#?i1y|b4PBjP9tcvqp$R(X36(Gq@S`j!Y=pSzQfp53NUkT@0g|%D8 z(P2;_nrLyJ%UsWejmPa)&TTwKGcmA82o^6THZiOASCD$6b}ew)vOn9eeaiDck`Y8f zmkI42q2z=rCR%q(SX0S+gvp4p%9Db>(opGp4sd0aDmTYZcaxAZA`-s}xG^Dk?r)e+ zHA^&k-VrfNenaqK%EOp_muiA0PK)4rCz@7o7A$I9qR$r#;|LEj*C$DM=ob^6`-E2^ zGW61Si=~9``g6^e7hFrQb0lWcI5EJ+L~?nsnD7V-P1N z)vM?0f)!iA6g+DbR6cz|?IK))!I5QY(RWPw#jiVKGOlGSlkGBHP>DGq4e64i?Qd@u z3DHD88*Q@olD=!f@`xghIncUlcNN1+!HJW~VCZrjd}nw);DuCa6Mt6V$25Q75sN$< zdN&6!E{6qF03PRqztClF@dHMUcQ49)MJS%c9Adc9Zi^$}oAUhL56xSM1wVBFh)Dlw zUK5`nSYwf?fshTf_+EZUn#1g`P||z#8`}#GiZE^HdL(wdC;};Efq$)G3NFXROPgiS z_#{J}mfdwTPHVOu=9b3~$F`M(vNEPO$-=Q?5QUetxtwZSQV$__`@B-~B6ma12ZE{L zuSOaMf*w{`Cz_}t)d0aB^eMzD99HC;^um=g;7z;N?-je^$UcsgEd%9fpl97PI9Z;o zD5LUza=jF@)RG{5bh!vEEicyA4R}*>fm4gLQ%A=JDuxk#P~?x(=m~tVlx3C<^nB(4 zyN{}jlR~TEV~W%wzd2GQv(wG(IAO2jX746jY} z_rg<}^pa^OanJ)lnN5Du$*GTA<6`aa=bQRv&wtG3#M(X?OdSs@lrQ^wW|%8_FOw&1 zgyFrxut*_r1$)vZ;Sf4a#{{y!vqU0auz7C*iZ{Ns1=HZ6QFVf5hrZMNHB~tT`Z|x$ z{GdG;<6K3r3_Ns~C7ubGwdQCEJ`N&GPw)Fw$He|+h6 zcsUW9en#yHefU?;`cE?oXdy^u_b6N0&t$_!eE4F7s;z<}S*2A3&3$;{NM*h9BFk%B zb6j2mhMM@pXwsPdq92MMj3od>XmJrJ25sB7Z_iHUuV;?;>W2x?b&vXayP@10Df)nCsdW0_OhYQgOJ}MQ=KQ^mb?Q!p4la5G( z^c~ysa#3l4^%z4oCX}jO(u{xvSG_j;F+-1r7qN$zqaXbtXewK3i2Bb6M+DQhuY)rO zJ)2TYM}qz|w3fHjf0}n_z5_$GU-*ooCK!J{ix-R;TT2anRrbeH4&zZ&r9Kut4;%Dm zESRp9jqQR^@*PA2x`2H-F&#bNCKJYreTh>++T?OzcB1$O$bM%~563=_MGJbu5Uaw6 z(WRSm7EH4dxSAF?MDV9=d{EsFZQHAf0)Sa~QSUfwE4}DqF4E{}TNj@G`l4VcLV4;F z;j?Yu9YP%%dQ#whl)XI0CKL+=n}vOv8vnU*;6!!i#j)Fl)QCjv5Ol)80B4!}mr%eN z9b*u~`%SOQwt6Iu);Q-lZ^kXtVyeXV^h^{{c#j({ zAX^9gxX-2wn0hP{cMbe&8V?V+&Mnwj_QO)CMmo(JMSDD2r9xx&!!JM0)+2$%R*>*L zUh^-ft3t1?Mu?~*dONyDa)WGn)^HuecbHW##9ZF-XLbAlt;CM4%ZLItgwkQ)q%9pT zP-vsM`nifC2QJzYtkq;8Xgv6sN<)^a@95c-PXGeVBd3g zYP7v->&bZ@xnseoeW24xt0_FlY{>cOi{GfJiX;2DL0&ycO^T|cg979MA>)8mw)^?y zmx2sJ9L~>130Z($)=jp-v;rZ8U}B<03g7Can?!d=r~0CSTN99lj&D0{Gc_k$^7- ziE=%WkmP-v<`Vc7!#IXVWDBR_9HJG+ZY4z#ZIWV1s#0Dwy8OLTYLXLpW97V~b}d}> zG|Y7FT6SCp67QA%@HjnC?%mU4Dh_~?A~{y;N#E_P0ii0cNY~GmWzp~_*BVhGAs$-= zIdRqCgzPq?H}ww+*TL8)_Ew;Ye)02WvJG5IB7~91eV>}Q);f5?E0;dXPRUVT{^m%lBhB_16EyF9!EIOJR-At*Bv{RsqA)FS;MWsj?%%{JAY_A&T zqbEQBV!#F1xGXwE;8hKm!BlENecgHRJH^HLdYLBqr5klc&EHQ5(%+M?u`)b+S3*JHy+fjR2R&>ue9oh)_tHU;;&qNyt zN`mC|ff`B{FXP~nHrVfIbIP^`G$qI1TBP@dPa#s8nG`i>Erbgv0kmgrow1#Vdi9mG zO~Fq-o9bVOM)l@}0GWix1*FYRMM%J8PbEcjajD{m4GEXG8@x#@)wB3^EjZoEH&H7* zqcmZxZ~V5R0YbsEbww(hP34A?G9)KiG#cj=+*vo!2703X`K)pV84#~3u_OFv%BTGA zl>f)4_9B7#uQo)5MsR@pZ6^f}cPgdl-3D}9$yIt|DG-qX4d9^t1&nAdUcaK6@JHna ztI&L9E`N3r@E%X=PsAb*u(VnOmR4qKpv@5c#ejq!sWPjuj~w6214Px42P#eSq#*IH z^695#r#W^6a8!ZGmUik~e%LLcVer$1X9K)-!Kq3-w7v%73wv56FM8PEqz!dqCmKO1 zHT!{Y)ne4|+pXyG8p757$qk2gnkGP7FQCy&Z(W3evmpI6zH30kDW!U3S~_>$9gqQH zGXE*Xc^G3|lpOvYQrx5uRDQO$Jz$p3** z@axpU#6}N+_5TMlypr---$78g)XeeCLZpw4bP?s6%KbXo`t|da$hxCJ8+9uCEd`gB z2jZSo!MyMzvH;e%Ydbw1;qCdG)7JGj7PR%II$7-NrU}8A3>gVY+fz6Q0j580Sjy7A zyA`vXru5bNnyME%a1E8Yu_#4m;+eSvL;RGcaS14~4M7%D?(#jQPm5HtkyP?}neF=1 zGS@-Lr7qlP4ZM#Q$e9hWzYZ=O+5=}H+4PEmlQeG;?X6!uUbba>C?|(h!$Y}3uVTFV z#YLx`H(ewWnKBo|&g1zWa?PDb-|+~clBZTy^8o|s0+r;@6WzQ^TK=sVIO#QOOosbb z+L^#>43d$lml*cXLtf0a)vO4fCav.X}_FAX~7APo)fzq~f6Rn4O`zfCb0*M}b7 z6$p!;E0}4$%>nJ`bW?pr*R+9Ry3vj<;eI^zO^%qwKN1pvUVJvY_gW#OC|r$|;dZLs z1mdjDUeGc`%LxIfOJJV?MvYKG2~k+TZN4~Af`f>eGlh~kIac||p3eHzcRiM1qu?ck zgl_=Y-)&D&Z-MIP^J+dnhZ@(;zk|MyB2K!ib3ZW&{iL&8hgTs|fNYqnP=1FIE z7>VvuJ?`T+IH$wK3^D5!vn~%I!jKGndE^lBIq`xqK~F5M8gLpZ-Kzn%-%Xs~wL0mN zLD9JJEp%md@h!k_r&EXeSOFYN`bYc$z@H-|%9q~-zmKcb z&3T;rRmqTo#7lF6H!raSbkh!$z0SK)!#XG$q&^`{FS=i_f2>SCPrIKdpyJ{In&@+Y z1hFo&f86;omYxj@LR{>S@h{&NHs-)jHTTC(>ySgR)>l1%%T6E75 zxO}E;Y)k1}KRQG+nfWY8Q7LNbh2=`kvpYn&kXu2TlmtU+<#YNje|M9>{`l<&rE6&A zGxU8uQC2;Y+W0l)4rD8`Ndf4Mb4oD2){omx+KD~j3^$toWkcEu-cPr)0Z#Ya8R-+7 z`{PLS3l8syZAai%;s`MzTW7awtn}6Hmi%21s5-iGzlT&cCzRl_fH|J$@CL(U<^{KT zrWG+&SP>9BZjr+4M-U7M12?Qaqoay=h6yMEm+1p~wm#HB%p&|@rMu<9VJ9%bf%Sjn zQ)R794lp7{>@)h<0PL8-P57EWrh1)9<0Xn4ThJYAFP!^&DWk)=OW>~tfJzQ%09`rw z2}nTvlqBKyNJ+x@QHZg;46) zfFMOeqZ&nZO7ObXsI%Z8HgO=9C;BZ2&`QQUJ&*ylp$YqReoIhh-+s5mVr{xr`1pVw z{)Sb#3PX^sB?_DG?MCu0!gf7A&Ln|6JqXpG>F_RW6a_u$T%>`KRpduqEQZ-y;6eF9 zL;<*{3aGT9CJCVGW293_DE|Me(*8>rFhFBQ7Wwu9z}~t?!||@+lyGswHGH?%1GA9f z&h}3Pj%$t2wUKKU-Uc1jWpA_$dWu4`p!5U0F8bVS0Y5Kl5ZZ+TUfi2yx-pi8?>l>| zVocW&oVqqdg!+88j1yUXJM+RC_1!agR0{MQ885akd%u8zwd|~@{ zr<2cNxoIRu_1k`EG^P7w0j(sgheD_9yTeOQ{2s%_t8(sZ(xze9jJW@!1{8Rt=(K&s zdOWruQ5=4iJ@ zUT%ZXUF~6(#~X_};4!~8ageBAO6oQ2J`n-6N$35sIer$*b3KkRt0AHYk42UB6ttes z-Fj~bWOH`Tt80xmaWkv7;kxUGG#OTGPv1U$sAe9yWg^NDU7;b7Z$z{}!7=Os5T}j+ z$Cs!T?!~=T-ZAk>+xzLRgJLOaJ`)53XusUQZIXmv?@3p%QkH2v-?xO^4 zaQjO2Jw;wI!dXvOF1mimG!%^SXV3^t44Vpmg0*!LHpdpw>yRhwZhxAT$-9&K=uDR( zE@KpM|GGV_HN3}V>o7XB zW4!t=1k6Yaf@<#bpiCi+Hp;rVRfF3J1?kQ}H*6zDIITMV@;l0s#!6~sTXya1-G1&+ z!wPl_dqR*df%*GyS3oGqv4Z{`+WBa%SDGBT?ldWaSH=yB`7!FXM)VeYU-55%6JSJY zl#0t-AA>#PFl#LI@<`!%P)sY@*{9cVH!)nG0u^U z3Z^8J3jLo2HdUGb=q*InG^87qwzx5D$J7YM%MUSKsx1~hNEYU3}Xi949!+AZ>BeM8PIL!C=)Fw3_4uN~D zQ(O?Q{12zWxUyi&+@Rb>dQed)FGe;bPjPyKoQ=b|G?txgq}#e1m=fO5re-F1EhB}C zq(a2?4J%@7v_y}Ugt!nK!o6}&!W`-**Ibq=k+!|iHhNrTNVi>7$}*K#L@l^zaCIa; zGxoMn7;{}`)+SxYhiJ?a26hUJ66rm*yV*V`b^o-xn){UyBw>^lYT$NhUXt_3We6xz zKq1`U2D`$ljqn(3k{AVlan0C!>fXXE@Dy*=O1MCmjYplcOg?Mxjc&!>JOb!rW|wx} zfJZp2Q4>@>3oieXno!p%UpIQ2$9!A;Bb|UbYiUZ`ik0)6Iwc#=@6l`UKqndX6-P=F z=zd$ulRcDl19sl)lYWTQs6|)OlJH~*r>zXFN3Sr~Ph+`H^!KTKy5n4-_B*eFnx&Bm zwiWI^o-@R$iF0@krG7svy>}zqXv&4@b zuxP(m`EAD{);id-6BIv;B^VTq(mp;UB4y~m#}rSYijT{haQBjo8cHvsp9olL)A<*x zNcI}u?kWd5fo7A8<4&TcvM}JXm`ipeiZWbf_0KQ%Y28WEhuwdnO$a{%bT)w%N~Em8 zEipq;@ouKh`&Y_*2CqE&5N%Qgss?3{92-S)=05vdzwc`rcLMhizjrlgM((a1ym&Vb z-k!X6Bc5agPDeYO&j7txID!5TQrAQG$$M7s`qguJ*Vpa6`Kjzn81Ypi-)mI#vvygd z;W!wncEPn?q&pe?TsCy(-UvxzLA)g*hfPXW4DUb*I5E732wp93tI&VGZcP`oG3!6< zn~sLxRb`ZHK48~}n%X5yKMC9HGc|?G(om7=*mDWJ`xnTJ(zqftEfHPqb>0EsOoG9Vy$Q|!;LshtP zwEfZwbatPHA`1k~I>``5teq%ppqv5Mid<}~WisJL+2EiL-<0+L+zdUqJ!0 zd$EuETDKpzkfjpdeuQXvy1Ou|%4H$LtlmPHZeq8RID|T0&kwK z^y!3qoNI5JD0v61B$Y2W!vr}a8wT1w2rMIYg4?$ln9wG|tO`)|)R0Ub(-)L@vPrp3 z+2`45QZ2YV{}LozP=A_!?Of9*%=f&QmC0@9W&TQVEQk4j$-3VpfYsPFMjJlutU{kG zK37`{r*Tqo6L73Ui(gV8Y&}+uI(6hp!ywrFP_p!56pNvb3X@~yN_0k*LCFdJ^_Wor87N!41mbIrYKE2ruUB3b|EMv~kx1+pNbZ-iA7KMz^&I~bdd7vZ zSjB_c3BwD{^sLSCZiZfYN*}9c^mYBlVU`-tV5fz08ON`=jfGSUrmsPx;PL!jJ1kMq z%Rfl4zUU&sQWe*Plb`hLfQK49u5qsobl%pxoyQO1h{H6)p{g_YDR$R{(I9OJiJR}^klzq zA#bKS6p@ITh#0{RW|#T&(wJn#WUXLhztR^Y>@2dcQn$PE+jzFk3;PWCUHa#$^b0O- z`?6-Yc2!YVRN0MJ8<8iuC9I|a#*3Akp~2>+ngp)aDy#if7RN;Z+?bd;F~T1fTrH|a zrB>ptJN5o{=4HaQ@K9x@AVINk;*~Pn*qkV0*wg(=~6G-L-sM0tL8DA5BrsHC# zSL`%4IY}F%kD^1~Jfo$CGD`qY$VTCs(A=-m;%ZXkOM1=6W|(G!!b^`J=nP=Kd7dLf zg|aVwTFz9EX%TgL7Umqgi&WFZC+U>h<%&Sn-}Y-P&yA-=$jYcj55A>ZKbHX{hCiP8 zlke!}b92lIN+qdLA|LjbiiU2^b?3qQUjkt#{$vh;#?4~9*R_0+cN_XemnyCTafBin zf;K2_Os-DB7whA;tsnDTF?{;N?-guBw%uS+Y%6nk{C9NKn+C2ar2OV2n8`n7lU~RbK_Q6QC`T|ULrqFI@EAYr~Nu|6mqngOu z4^{~!XBpSGm33qEB$l$59{ghlX`7M-4JK6{aTA>moGymINSB-j6WA9nDSdXV0z6p* zamNr2*C{Z}0BeCcX+gTQrz6DmEOpx)#&3PogUY5pg0eh^oR^T!i~VGc-%KGnRyv|h zd1a2AL56SU0|&i@z2*$zeuNq=uyr=56&22YJ{*=%N*pE{>@i{BvXe#RqQ5!n-P>}m z#3HvH3RKU*K$+7n2<)+U1&diggErDUUY~UsMU?$rh@F4I(iO;6GMOQsi!WViInW(p>5E97SIuhxa559 zLE-w~b@}2X)zrj=R8m%%5r}!07&ER+_2=@mWfD?amICl=>d zjF*^lU>uppeL`N@4R`d2r|&{RXFAv@hHV4;oY$+%I_zVpW*bLgcA zjEAf>5y~|Lx~YSuwa(0#KceQ;m2{2sXRD=0AMitdct6^)a`OY^MTD$KO+4hloO+|v zvKWP7fpH=1tLT4PB_A5sub%{3_QKg+sfhvJD_wekH-T>|4}kMvq;2$#d5q&CvD33- z19eL`yP(D-9EcbPsXM)3s+G12+&q-P?0tzw^~fbm_B{G!QI_$b#_Vwloty`*W->p5 zPZ(fIZ`9x%kyan%LajWP1U>J}>T)?Tc{3TGY*0kt7DEnR7WfbcL<1e{SKvSUpYrKL ztYNjF9U=P0KK`U=T0kUxEA!GT#HrW9^b>F(N0^YEljN|?hfh4C{v z$T-%1*5^=WGffD>P=-+T8N>*DBjxZ2bMcNtJ#jzsj)G%fCq_pGVpqInp@<@WTzYjX zkr;HF_tjY2TX|xEotm01X~=|O^NzT4n}=Q+`nQTVPs5%X zr9tLWZ&E@tdHq6KImI$Xe*q0uhV(!NK%z?C$5A=t5S7AF>K|1h#!?$ICoLT#2W>ne z&vTG>n_n0ahM(tYMXva(Yd#D62B>;UKsH-GNCBBl?$HXm*orHI+)BDUsg z+IX-9tQQWINk27XJ!CmLZD zUZpZ5{&vYW`gjso!pQ%Myx!`8PA_@meSh%h@!@lkZ$agw+hcGAWI{&w_a-;yPj3!? zcQYBO?uDI2Ib_sE_jCb=4TdzS>|IR^AI~FrihW*E@_>rVv_CVn++jKIqK46gJLn1m zKgj_e>DwB7Cfnz}i(dv?YV-D~l{(t?>lSz7+yGCaK!+z1wt_HZXa=B3_6!Nt1x>o= zi(~DlBl968VWwpGoV-;?h=0qnJ@c)4mZq{sQTP+vcQ>Jn#^3lcTO{j;M!)jr{fr`% z`~O6B(g?&wUNw`~qH_)Ho(0}38hvgu+`h|+R0qye-jwExsPWBP{q;2&4U_ zp~#*K-+6xJp50L!3S%Vzk1qG$^zDvNf^w9g(5Ho=Tk?MY;ReH|6tm6g{SffiXU` zBm(OxjlW2Q^0=e2A{LhVDAL+R&Ne~>GAl3!#-!abBfnM8f^(zRNN3_`JmUw;YrD%+ z1(_N$&xv~`(Sq=NnvkREVdxED=l)$t#Gfe-e2wS2o-n;w=50D+43MystqfW&Hkb8? zvU)7E6Lkfv)Ck*H91gb06C&abJ?(m@(d%YfpnGy5<@<>T5h#S*RzCoiVSY_b67&m$UPhC%Kx_oN{*vJG{HEr&A~5H$Av;(A;u z4>}IrGaCFcW*+7N&**7yT%*oroSBX?qbHD&dPb#6&q;du%EjoDll(6;HKHYi*g;zC z;7dzb+d9MSbXejgY|}_Vo5wt75NnWEN|)^JG^2PL`%Ck3THUEY?47j%#Ybg3S%;p3 zR(+vI)Bmlx(IK~k=po=LRyjMfLd7ocPx<;}65*zP7=A?+SrAk(+;jun?7JlW5&*kv zKD?$5B?WC06%VpQ*r3(UWSH`Y#FvZ2B*cSQhM)UN*YJ;RPDk$%p&I>v>Ta0)R?jN- zUSO)^gsV2a*0dE7&eow|Wk8atg!q8Fvwa7AUDxF`KMSYH89i}d0PdOYCbp`%->(u_ z973&Cs?v`Fnke(ne6b5Uc+%P!62(h@S8nD+X_5U3ul-f$Np|BHtonx7tjT4r=fgxE zaO7=DObGsGVbDtT{p|&oEP^ujNG;pG5t@&cMGxan%U-2YoL)+4t#8XTT-N4({xXAy zLrZ!MSvO+SfC=77+u)btL1h?ngc%NUBNMC4vNKKmAjwmGhe~@k$jt>e%nCX|0^DFM z!I&vMpH9n)?wDv}N-X$iP9~1iPos4;%!Nh>BfLo0^!v~XUUWt=QUl?ETzCA3(`{nO z$eeS5Q~2Jno1bdiGOB~wzE6Lb#jS4BE-uZ=x+=bsXV*DYvQdGMTTESd>7IaojmT5~ zXh%a}@@(7E5~hIY_yT!}fPe0+_M1O_pzR60sgd)1R-Dlyn@xQ$qZ!uzFY2zspYL@ zbAhP?rBi)D>OsT9u>NV8TRc%#%P#6{bn7rkGqEgz1eSL|nx=o=QmhHfy9ol7&YB^ico0FaI?` zymD+rbvYh0A;d_UDKmVq(M%bqf)y)pHuMnO@ME+@g8<4e4wV26A_C6 zc_o4k1CWany^xT2{`e=1KJY=&yP>kqL?|ff2ctQ5Z3)?$KbXVftyYzz!eujX8nvyt zP^eH>=Ozo2eo@n^x0kk9$mXY5xHiZl=O%@Z`wbbzV^}PQsY~MOpIw`@IqBE_nzqJD zM(fyiB|Za^?`{Tzh51E2L;UX?&8qCYpnPF>1ierj@Jq-yZQ>rcI|@1Xm)C2(ho59w z3_nZyYl5HVC>vUjI~hv2RC6!W;4SzHReSq$$aY@FAiKhdJloN?a)6tC4ZFJ%J zX(hy9x|o%Q%Ud$t-PL)_WgEshWV^aM4u)PhzLfJF#To%X39C4MaZSqlkBdAbGvXN! zrKS0ZE&JB6ZbpsA6L~ApMS#C?UScuHJzW5BA=YB6vA<9=VLxQy$pBqJ0n38*v#)=& zN9~qi5I>rvmQzdW_Pa#gB?^HY4-3n|;{>JU?R7Zx&Ujs4URYf?{)*g6u8AdRmBOmw zWj^#7@(f0SzHhhSC-ER4y$$4e3z#0QXuq=ST6LKLSjV4^t@Xs==%;8C-&no12D8z7 zV$c9_l3~!YFX#;e=>24|F4Cds$5y%OQ@J!1Qz_B8Rq)#@gcG;#Js3pq^c9To(e$q0 z^2o8(LcCPX-a<1GM4@##ml$cKORWIhaX5c4+NCbUfQh&q<46Ft0L{vC*MoI7>*-W+ zz!VUIFS|(zhz9)LQoC)gS57B7g&ABgFwRenYN0GTtwC+Aio=1qEsVpU#q)Xl#`heT z^}`=Rm(E#gwxc8(+RxFar7pB-?tfUrS;90yg)fr%56E9!V+nW-!x>Llso`2D!7M@y z6L_Pw6uKU;8u54t>T!kp;fLE%!gEvEjJ4n6eZS2K0CIwf=2~c@IZ4BA@Pfd_{P;UY zwN0Z*=Nkcdn~{vF5q52*4ggFu6tHf{=zM1BE22ba zrHV!<3U&yn7=VzQl-)2B#@JjuidOZC`_Vm72s!PQLI0WuF8Z+ISr0bg)>N{_0sDFF z{Vbmv2>6V9#w^|D;m8RgDc!$`ShB`=^XR6KFY~u<;i&zE05Og%tGIjgr+qqW_V_YH zJUmpKl5W!&26)U;yb9n66OqVELspAf`%Qb*;}fd@=#NVEHgX{A7$yXON+%1w=1xT5 zoE2YVg^v_FYPNNjR3ZV8)A%Jj>tAwR0yq!cfvk>ltdh)36z2r+OP%9om8Q{?(oGCk zR&Ul0#pjF7^zY~803_*!-pn>p>Y#TAL*3yDa@u?tW=4K2daolsubHxw6sbndXBn?K zy^>!_JV~bqwR0q3JPr|YLQ~w2P{?gJ%$((di9-_b9Uv8YkZRVe97~s&#=<$S zMQDdZ%aw(sBeVGfFwwBlatfZ)nkXPgmGF5V8Cvy=JbbCS1yaaP*SD=2oM$xXC~xRR zs9@43+8=LPVyTQ(v?PA333vdZ);#TZi5IB7#FrR35*x{pUl$)E*$U9QiET&XmXr8& z(Z!e}Icd6Vh-6`xaVUm=gAcW*z;!Hu__*_0pOtgm_#D=&ucpuSH0M=2MUBy;JZq zC=FU{d^$WQ;|X+g2W%{I8c$yC>0OrNeIBnR1vNtaZ4%5>jiKx})@oA-kaAs?FRFQ! z{gsd^2Yn30g7-Oh$`Ntro8N#vd(hO>dWSwrB&~O&i8wTt$!qC$A|_U0j3WH5EC&68 zHlW@)UnZRYOt|qPPIaRD9qaxtQf9!@hFhgSA+ZdvxTIN5z};AIS+GBxpCuqP(fqn# zmYwZ`Mw#8YK&UqV2{*A-j!PU1rxhzvmP>4qmdKf$;y6RnpT=65>m>n}n3C2@@>0xC zGhZD|e0*mQPnr+YbetDe^SutY?Hd~!8Whh@C-^>0emFdFx7^*?DSC~KhoCc^>-iC3 z$Ai>VYo`>Ag*2;U3WY_N3+z0Oht4XnOjf)*6DQctBLafYOv-n_1#D>~hCnD0Kway-Fd$GMjvGW2M zr?0D~=aw)yEiP)3t2sW8b8UMgL(X*U()D4Yt64QhV3JCtrAV#m8DNQ?5Fd@-Jm9Lc zivSgGv48%l&BFPkt2 zHV@@iMbcqP7?=Lx8VM&xjWd(Pj|;@((a+Eh)Od=wJ{U!5{ZOG2u_y5q>LU@uIyqK4 zcnI#lJ3aKQ8LQ>TcjwX_H$Gy3$?V@#e2^aDc?UCm7f6>MEh`+^3YENJ;d@{><*>`~ z9%jUXdCU78PF}g=l!%~)0F;ni9kV_0S4s_o^P_Bg7R4KMmX_jB*VA&DPX@oX7`FKI9cL;C0|)B1_xO(mxX zmjY~|Faa(_>*u5m!Fz#w;e9fgB;|+VKU||-=Lo}=Ha^m{=YjlRM`Qm+{fx|k_{uj- z^d)Nbg%4Qzaza%xklZFyibF%!5w9u3=70WOnw%=?@5hV84HjeVKRjE{3Z)q^l7EQr zLY1HBj?rK9FLxaD88ffeC87_C4%aftb?>{$Wgu=>=@9v#>tUC5v%DiDj;7L$Avhw_ z{(jyrl&2*^)1yDT)-2+zF~=0*04)d z_wLS6fUNIgdc8FDDSj`vy8tXheX5#+NWN`=?m6P`DedbhKl2Sf^(Cf_!uCkEF^5Du zvu!L$vdHbOm=7kM;Bt28z?BgxCzWUr_>Go42(Q#-VRr|+z@*gLbMqN=R^9>T6qGj6l(=c6c>r?}COERgo@ zz2G?2*ky+>$J4=WB-Ngx?UyxVe{zrfYVa= z&aeZ7RDE4EhfLumv)ap3?=Q$)Ohz>_z+uEm0p~@gggc@P!3PFC{M+x6#veLb$p*y^ zsDz~;O#zFqUT+v)slX)Zy93uv7g2RISmG3N_Qwb>*o>a`;61ZC3bPt;vJ6d&CQUgw zrLQvry*9uf?T$_}?%#|R+ET_Ef$4JPDg8#@U`EfUbVML|-DSicexc;2n=@wZ-g7@( zvR<1tXklv1%rWI;K3!mfHDz|{6db{vr%*j~8*2g+XW3VW8*4;WVzR=X>HO zLZIGQ(vH-ag}k<87dZKcP)4#VERpFDJpte@tyifz?ui2exKkH#w%gIgh0>xh}M z>OD;_t3(a+>8Q!W;mu@@cic5*2W*79tw!epz{!%(?+RvNgXa7Ouc=Q>y#=sFZonmB zbrQEuX1<%cpM+WKka{;Qk4vON2YTet8%b}yPFmyV)l;hI?0^8mNRthA3jOyAPa2V> zCT-MiOFV)M_tIIX>kUeP7fFA_gj>~I?qrM20^7=T2rliUf%n z?i_chQ*)fZ9_qksY5D-JTJyl9zRugr5Vy>36I1-K>u2TLm%)fx`Q?Y*PM@k2p<#f7 zv$5#*;~VpvjR+05WLy^ivkkBk01bwYAg4pCW7q(s>2=L{c;%X3$HS1d>2`}b-|NSw z9R@XU2?w%0e-kVY>czIS$_n0W?h2y5DlG(&ok=6$7u5OwUC@rtz!I{^7*Y*;se(4Y zK95rq`ld`(z5>m03%R32^OSH*XMj4}8vozMtlk?Zv1j%yky?Wyw!8p9XV08!y2NasgnAN~qIaCGRj)@#X4^`TblX8MetLY?jkfBu z%BI>L=!pbY-RFj)BFQTtyt&%*{**&ZY(#6_sv^rZFjV>;ME3hVK`N_?IjAKBFr#@! zSRx5~3^>c%$T)ixc63vOeVpyETaZ9Mu1^NXYJ1Fo$y?jjxF%UaSh`8jx(c8@3dKBq zN9oMK=bZbpe~5?FCG(5O(^lwu{mqxAE=~CEK~4n6c=~6NCx=f3<=#ovgGGsr;_uDM z!lDWm?2K7EsJ7(E5Y&L5FQG+xTTatiSHkf|5B9mS%H@lj&j)hl1#r~)3yK5Xe#9x! z-@;F4>ktV!dxcRob!Oy!JbHJ5rqmT^N`>ExcWV*<9FpREGF+9s`#LYgix{|bs;Zt;*dLHJ9d;d{=_PYNA4LH z%>KHH7I-4@J?8|<(PwEZK#}WB_pcD@{bKKEkWWYtMf}b%` zsV6z?2}n3Iyex@-Z*yi|zxJ1R^c8$p*|2L@Z!2U_ z8Hwsixx<_Ee#GLEpmR}6_#u2i~8*S6d3 zNH9v$kB5DohMybjs|^NAQ~M=n21b9Uhl&2ThQ)}t)wMdQASPg&PzbH3+4a#!btbl) zy*WM2fWKZG{(qD_^2d^=F!X`qMJC@fg> zm*oKTEqyrrKV~VvH(+TW&YV3Ky$WNd4$MiAJ~7zoH-(-$4$snFcp248(ve#0g;d@O zU{feYYz`+rf7VL=td!dMB$fIYY>Ukld(`e;Ko|glEu$ZXhsIlB*^0M#`a$D zRL~0h_D2(p-Yy&4%rwk#4X_a_>G?$wbY%&01+rB?2*z~8>j-3`uc)Y*Bqk09eXhDR zy3YYW?IqD>Aw|lGDe+s+#XpjZWp-BM~4p3X*`u~9P_7vZR0evJ5nBoYTD)j5{WmL(ptFRD+%%=+)-k8;2{ zbkf=lqq3oNP#^m~j*}}l$z+hc)`e1K{rPpBU_icEivc4EDu=VL)K3rnhug11UjG*g>ZDn=w+|aI~XTLVP6|NLJhbMqs&_$dg2X zuFzsEGSYn;-&60>WGK2sqOHVIU2Fnl?)Ltnc;>2zZ1{RnreSJw@ml0NcivFP{>}kT z6m!(aTBf5F2uRnnMytZBd4+xM&BfO%nQEyR@2V=J}=YT95sI*x3wsM>aISU_Y1X(b%GJ7fT9q!o}xX_1za?nb(i?nb&}NTnIN zVF;1#?t0gF-}~9mx4)nMP-NDc<2vd*uWGZ)2ccEZy3bt}aETZjzc~>D&rw@aIjL zmV}|!wLf1RlWRhlg?D$Y-Eow3u}Lb{Xgoyhvhh7k{tRk5B}hI-D=Ud)h-O@1HT6=F zY@+PTU6x^a?M_hL0Qs-TOVg@vzf*u)!XiFfz1Ew00$hp${qad8hy-|1n`FU%B^^0` zSZlq6-51HroYn#cDM{yN<}YiW7=Iza@xQD1leZPVO;VJ@#;Tf*3s9RNjuWvJ_K5^KC29nytzNm z92Jz$zsgK8+c$D6PaKA|%-hM4f@ZD<49Xs8O_uTqw(vhLhI*~=8h2al))Phs^Nu5R zTQ=;vH$Q|j7}VU^&1H)0a6Fw_?Xh4VsdgE)4H=_^%Gaco!pe3fZ;bqLvv-ae!DL#^ zL@^`X8L#kNAI9kfWIaMJDIIGZ^CK|g=vD8viIrTW>=$n3K{>~o&#Av-jg~&gR~-7( z$incfikFF;Z5H>C)N#OXCr?fxv)!auN6dENYAhi+jdzj3diV<0%9gE~Kw$W7q7Xt2 z|BHmrLLLcoe^PuGoWFrG*ZuRCsHeBUUd{M3*C<)lJQnr;;N?FQume{V-_-OocO7(& zhCZ1Hlhr^YEDEb~l{_o53%2duFU~?L2A{|aJY2SyaY~TFx{!XP|6_S6;|o7y&Gqj( zluhwI9Y)d-$D2LfuRU0TU)fr#UC&bWT=RLXzW=1S?*sJGPR5;u-2@A_uICAv?QjOB z`Q?zm-*`{uc$IC6kp=RYye5coxb2JQ}uF)TB7)R`*r< zo%Aoj)t)j0G;SJlpEnR7*b`s5bqgWk25!*^pQD|4;=iinuZ~XiBD#7X0%vQRpBDp< zm5AIs+rguyli^!lRlXZB&{K)CH_YV9>ZO7?{1hj=KmTfI*DQO>pVHgOcYBiUtmT_r z{y*eX)KP%{6XnL!NFUwgP4X+i3bjRB_}Ji8pA7h--(Kxf$p!fIne6)^Xwr{nn@n}U zsVj$T0AFt0i>`eX16#Bn7a4mI_b>vL6}Ek!6B=n^hchAlm)?asVoAJdpF^HY%oXJc zr_7uxgS~nw9$)1A5!`U91pb{5TfPUG0#ge&Dx@r^fBX!^&xT5iE4O{RrHc#vvLN$6 zxDn4II{-kIxA%qf*|k!ytU+G7aB}3XV@Lx@qh(y4=7sxm#7-GkD6l{;>5AZn4c6bi z2T91{(Jo}m5iY>wua`$dP&`ZU6Qi>?2JHD-g=~lGjp?BV)tG|q6VJa{j1G6p>YY+? zy*FQ8txZFI-~f;23;)~(Etw^_KSWrxyVurPNU^oUr-Z}nCYd`$PJ9dqDGWfZeIt+d zNMX|&4ICkfy;B~rkrI2&wCQWVsD4V}WDClxX?b(wahFIO?Rp%Z^wqp}@mR2VH!+TnscO>6lG^VZbsCW!Vp%l8ahhM5 z;l3vcFEGL_vSt-R{QJ3>9$s0i+yeJdH5C&A1QB{A9v~na?@z-kjWG+}5>Bw0;)-ei zd8VE(9kY?qM=EPY+Q_PRU#0OdQQ&$p@MidrZJ2Uk+S^@sqknxs0Mj@W9&37u!dn3+ zXh`rx4m8|FsAnO3`4jus&6U*u!8|Dn2(l@1>E`NZ;5t{=IYYjfg*+vmu*>INpgeK> zCZn~pHEDj+%XjI~74B3; z(m4MbdHUZy6)~bnZCZL31oDn4be(P81t2L(!%bFCB2#SAV+o<**XY=1PJ9m#)dx|{)P+dpS;0COQ^uOs2gL`O0(nmgg} zPF`4b_~X#WJ=8z3A>rJ7HLsGBON+mogb<7`o>e6z^)(%&8&uzgFb}kN-S*fm=etFf z)*e$jOpRsz19qn)bHqOokE_3VHUn1%6{)_t|67f+)RhJM-7Cx+BhLw}Mi*tQ9G6hk zYB6)mya|jva}=qP0#{M3V0nF-0HeF!Csw2kFtjzNt6u0>T!B~- zl7gq`liZkkW~9L39_Lc+#fu#Fg5-mD_>a$@>W|p(f6Y6djv8m!4RLPZb$HR!e?YJA z>y{x@Z8i9v;qolNdul(s&XogePa%98$-HdYJo=3Rr`1vwCd<3C2Wo)}^MlKlDX1(v zcZ%*Yo*m|oBW~L^@qLqOCQ4{^J@_hc$}#guONo}dONsY>IX{C3snOuh;NA_r zr1jrVFcT#2ktw7q`;3O}dFy{#%%gPLTpd zxAq3GE15T1AZdQdhJ>#)EfXFN{YZG4nmMS?^X{$W4C9D#XAKvVf~odeksm41E9{Hr-M2oLQS~=)2kTN~! zB!Tfnp_z^YY8U_nF5FUfU}PLVW^a^KaL?v#HKiFtjl)i0(`0`Aqeh%9{FOfALve7_ zO?T|Yu*+d}#-6@UH;L_FS?+>3h4g=i6(q=|BAMa1UeisBM!HAno{tap+`Zu+T902m z43eb$SMu=}(LiC3l10Vwi+yGm{a*gjbZ&9Sr#azRR4W}6qu=b`K&koFx~TcI8%OmMjO{1v<5?JK(_t_?@v7OI z%6p|0-_;Nr$w z`AQt2RcHO`wZjU`cgYnWND({J`qn-JeHkEsNsyZ5K){#_l@6{>60iT#8np=BPB!*+ zcNydbDo%W1TCq%+T%Gab`Km9#B|IIA!w+4+Ql#!8^WN^lG?p?AJ7qw9HoeUcO+QF7 zn1$&Lk(Q@Sxa)LM+=S4~f}bLpWAIt8RZXoo&DUgg6q0ddeB4tqP1AOrdSZ18;91#z zR>#YD;0}UUK)TDLchmo>(c#5tIPF82Tn+KZ)_&?JF}J|}Sh|!6k~sK}_szp8N=u5W zXb3pFHhHiDYW5RM6WsYTHg{+1t<^)@uSTsEHzOtFQY^yk->hl1N3{e~WX_`3P6h|1 zhY`JeZ+c7nE#t`w*lGzf7bA84AyPEy94J>X*-h{0n&`g$eBAAS4s20-IlAU1-}1Xj zuwu(xNr=0orY>wN$Hs?L+O#IL{{s!_wAt$eHM|U}+nx7*yY05&Qx$KsJ9G4itITR0 zh165R0T!uo1wV}JR~qoFF2PiE5w}k<cQgsgeD<-Q{vwjZPQX3;lC^YpT5Cw|eho>m?JxYbbpuw1Dmz_rrYC0|R3= z__Tz8BuehkHPEe}*czc5|3abk-nJV(A}l%y+86yr8r16RAIQzmTUv<=Wm4bSh3+ve~&vC03c)6kUZGKU=Fb(Dj7uZQ$Y zCaPT4sYg%n>lf#2nTqcJXBU7QTC-l1T~drAyAMh1e?iz6Q0I+rmhm9)eSH`e#7!N8 z`85E3UEaDl`L*Jpvhn$UX<7tEc6k4Tc9oo?tD^WoErBHzH@PsJR%FG;+3fNi3E&%k zShw^g2L_m>p}cR(Ck!P%ez`2j_oItcC)Zx+!Qy)yGv@$fJ=84$C}Aq&3Gp-p~wzgh&3QR#D78&7BU5#a-*dx(*pw3-Z)=(s?)e^#$iVl=Wj~p?{hGL zU=V0J#Yy#MIktBVPoLW+^L#QqJM?0bB${-t2(IbCH{CzhWBB9L3%og3tM}fiT4)SK zF|gV*2X9!mK_seEQRMz3tt?l?7KsItkw5{?A-|0a~XyjTV zL8|2I0wS<@UH9m~`txu9e(@4vq@y>sjfoUsJAPoI*t+l^Or+MqUd{Xv1--OSjDyLDXdf$Aff7{VznCVVIg6j=DUj&BkXMg4nu1ykm(*=3|{FiD5f!M zzE?_%G(%Z8vOM_Xef>NM3lOQ%f0=FXVxhYsg3Eb$3A`|E^Y+B640lNw1WSsQIhjPK z0Griu;kpNx54Vw5fRBXMZvEsa-FK)$}LngQ-hS9XH~TbEkO_uG*dKWAHPYjpNVt0a}e6w{<3I%hH4+>%&S(wUYoIZg6a;ZS9gA-ma1FVKs zyz1$uCCjN)r*yuc^gu!q_pgjRy^DZPHp{!ERhGxqV0){+x-=IUBk zkHjRw;Q3o{)X?aE0UV9Lw^x_87;*rf(b1o-b%CX;p@BCRw+Ff^HA+LoH8zWD33^tG z=XLc-D-i-i@-ZF%^>_avkQAWIr#0EJp>O;ZgEAAiQ&IcMqEe<=I|1XBfi!t0Eg{O| z6(uReo%Su_=ypl@eZW9IL4CuG0LoPpou$cm+uW_?sjNyA>|C{PIW;Nb2OBm(reWcX zQpjH(e74iFd&R-g=(4}LJ-KKfM=~;HLQRP2RUX9}tbIFRk{q2UJ9rw;s`HXn=%}-- z0`t4&L4$>(dKqM&k|;D%WHHPen~e1!`@zbi^6K+!1B;aUBGZG?(sMft&LGdU7tGCl z1>~;1;NfUQ(m|s-ib)g5d;!_K89_?$3)(rZa*4J$_bhqXvV;$a;Bx_MCWOemSZwfF z`NJHt8B2pF-A{_YguVTSfz(Ftg? ze4-!0_5^kX1cpfwH3HWkFq%;cZ>ALvz&vS0mO{DfM1iW?M`5=tdk{EG|qiBX)dFo0h_bkZ(%bmR!94-lgU%-+w={b&7_+$C{*s#+4)CBI0v? zof-uq#RZz!X<&cIFcC=;Umb?(bZSsT-(BZeLUU(Ho`c|pQgc%b+U$oY;41-=(W*#m zu}(Lik0O~w36Sn5ds_BxhC(RdM@#YJk8f1L&B(Dp%3TTMv*V2K1tbj6@r+fS`EtX6W<>=<#!jJF!mkZ!sRcs2{ zoGSbJ6zR8srQ%d*)mVkt<nPJa`4TRysjax zO^4ix#40+5+@V4p)(PEM3Qc~XB=-Y_sr;`i_#R`zBpqLB#y_6;i17=`cdWSKk>j4s zKtKvLy#gMB8?VjDSig@=r)L z{KG^&u!TkndhHQwv<6b{fXf8WrTz-&Pj{X-NgdWLwanDzCXLKv%cd49;hhKHR@2`_ z)Qhww&s(l8K5c!Gqg4u>a$m*>n8xqhaYh#u9)misYvq?^wZ?8aw!aQKSS!{6nGV-9 z4!#E#*$AyW-4}+-=vA{UyW@zSvuP$ML3zS`GzgtMvbr>j6?o*wA4cz4;WC_v-k$>fr530|j z*%8~a|4ei(TWFqzS?wV#vIQB{_i#gl+)(vy+V_90IQ2EXr6>OwI$|;=$8po+x+`b<-?O47IOM>yX9(LJ$=1X8SJIMd;#r3x__*XZ}5c|CE z{{>JmR7<&A%sHHc+Hwz88js~~gvkZK_1Eh&29ewu_ooGe<(_BEi9AdHTR8mJNE|>z zg7^f`sc&@T$dOsbaLr+W506V2q=1LwLENmU>;EEV5Gcz0$6nZ*e@wdb-KO9;%XI*% zNc*Jl8K*)$){*R0rNd71pItyfZnx4|t>hKAOczq0Y%aQ8&}ApS!!m_ecEDg(5&y{U z-pX3@!PbnslC2U=?t;-V!nT=RhxH^o{b|1nrm;nbcvdd+A(17bN(RU81fRzrw^^clA_XL);7X!ZHnrID<)Q`FXu0Uni?_0ZFu(dnFG1RU70%aW*z*}_V9Ng=_ z{!FVW7RVgmqOBWG_Say1=!LQE%#CD3&~kf8?f*u`NEhn^@ya7(1ZuZ}WD&0>EI_56 za|hVASC!C3noH8{0r#CddRc>Ek)fso^m94{c?Zl0TFTnl)Z$;lS7CVW&R$Rv(AG=+?S;k5@v($7caxRqTQzxCF&`|aRCP`vT1Ir#Z%U^VyyU}zZUF?6U$Nv>-AAbD&({_Qg1ENttDo}50~jk&SXsdc=h>DIP--GDqt z(PNt7l0mN+jWbV@e1F2FS4HlQtNb&l!C`P+Y_Fna>3}>=WT@aQ^lnq#h8!629JaCi zwA9l8oziepLa6-k$aAZS&p{Rg7u<6^UO`@na@=t#{0U1aOL4P4_63ZbuKvnp75k9{h)VOQGIJPO;6NV549`L&F$(0Ja@63v5hbFXo`;G=KCx@YE*C`gv;Qd8DA3$E=n8_Yj;Bpy9s_ zp;3?xdF6mqn)1ga-@?y+SG&8r1hvRK$cjH1o9(G zYv=-H+9*fcPJI5DoRz$BL&EGluwew_dYBFsO%nE@B@f`_sCPPED6RPzz417yHTa$t z*lqUW1`T(q94@>8idntrJE}*y?J^h@-e@4d|GNK`r=RCUhcVLU1M?nbf%%wP9UCiP5$-?mlYY~Ho;A)^B;PTmMz>Vp-l$?H*vspnU9(GWCzZqNs{u|K4ld3 ze-#I1uzXe>`(R((0Cgdlb^unVg z4iieoXWNqj*sfZQwrgQpP2bIv;<%Y*{AWtb ztydxUt0?^XOvu*8->p|UaGlnj(E2;cwHSudt}G?y!$?)Gl=Uozo@qRA0lBtQ8Zq2S z4=Af+t4OlI7g-C<#;wZCoM?^THzj26Xwmbx1rt<`Esn z=r`~3G8^CQJqJQg+>vX)q^l-L_f>K$uG(0Tu@ZWTgHY*vlY5;RHcw1Yc_@Mrlb>`$#$-=lgGEd{-6mVTXCd@ zw@Vhc0wv(lb1VR%cg7))Ob!>xP0(18oIhJPrmnOpTCfGt1@p<$* z9n`J0hQU{KTVIj+giSBBR>>#`$DeeTZ7_}L+R>>_jo3M3=RCp<`k+LL9Hht`jLW|| z>~xgy9t76Aer5AdDpa}shIk=#T@ol}=s+1KxEb-a3zR=`nZ1?125xo-Ui+IHibvNT zO}2RYOM%;pi&o-pJ?D#e+@|GEM~Rg3B^>bdsZWdn8yPSb=I9McZm7Sq@8KQ_zB^~J zZMItjpfCAoTInj%h7!zmpkH|c+H{j@*GE;G>6-I#3i*xlek!IZKpPhvCURbB%oUgL zB7#gL_ZH*2BqNvR>I*LNeA`uj7<-xImwK@mmW$y%KkLFxzg1}jw}L3#2uoqWG>viC zF4t%5OmPl{n-OYqvmweZ1zHm)+s2^VhxbdtcXrqmlM)q}@2SVEaaR@SAKFCPtGn*X z#Rc7pjtGMTfqYlQPFy*2V<_8!I8WSC!t`Szb{oo}2P416?;F|)0zi1$gTmi}TnmQ- zPZB5-nII$j!)d-Iq7(u-$K^pWyR53Z(fego0KeT?w-zIs1=kvsy=Ty238Nv0v>s0B zRCL=kDJqT;jL!ad{*w*rbYbT5J|1@N<{H4dX5a_HR-pzx`%y`*GONb?p8>)Su(&Ie z|INn{b1=`PAIWEC1yBC@-jOR2n;=KpM;35G#soacFhzbyWmBNtYdsCpC{mn-uQ5G2 z{`POqL+LdHUYot9+D*xC%nwv}Kn*1OQm5yr5t-a1h&FdhQ}p`tuj)i^4JcYoI57F1 zUvJ5!vdSOog0aEzp-mnQa@!#j#S@3+VcKxnu!8PYDIKO5%UEX^tdhjx-EK4*T@#i7v3R&Wy(s+AB#H)nS4F#I3va*wBOg>hEwcxd;cO@n@^aUg8{jU zEmJ;el_b|+G*tf#MM-^X0rt>5p37?O%VwUL_t!W?w{f+6^ZgNL}#GZ zvDDJ34J*U^9U#tKRxSxB{tw^cy3zXRj7G;gfplD@W(g=V%*Z?wyToOu(E_FWjR4na zG@@mtjq{-S81P#208tC$+5NN*y&+s4x)~t_OO0<_ifl@OYY1~Z%az5hgDMs53TU3IC+tz5yUvo zV4i1|XiK?^Y&A0F!GA|6 z>q&reBvv&TvY6a<6sy1k7dghQmND&2QS%I*FW$VO0Ip(t(yMN{eG$zj<1}r>K>BNO zkepvoY{}h~y`mMCG8HTVAZ6MvUm=C}x_~8lSj7h!tUXqLH3SdM;>kwOtUV&c9J{HK z;HR*-#v}RlxOWi*r}QtZ=@;x?6jN>8Rt!;l4-BfMW>uc7=K9_5|At&*4Wu*}(!lja zvX5MBZ-&RN#jhVG30pr65St8iUp_dqM=Oxn2D!lGd6SH)-&$pPrQ1Ek${1L*8S?S) zAQvtWB|}NThu*g%ZYst1lX*7PB}@$^9_7kQDeKd( zJCc5j1WZFItyvu#vXO!j>phfQD3K94Lq2mn$io#*WA$p?sevwo!PER z_7?D+Us&}7yeB2-q(c_;)0QKxmFc4`WuPs|X6$B?H4_;J)1f!WpcA&tApI9cOKlcA-Uo+ zu;#2=5HrMQ|5y@A^-oU#uoLA+7-@-Xs{$$%-riGHnV$VDcp!Rh@%ayduH(l8ZX39z zE>Z~DNOw{J88EGiWckTYp(#wKW2txmM>?B7;hkIHYLMAV{%8Ny_gM{*BHK#F3tfbS z1gKlS{nGlseDJ9FqUPs40~YP^fY%IFxrHMs`myjcZ*t1#m!Oark{`RjL@L);2H?iD zd_{Ukla-IgL>?TpNW@9hgW_G21LpM=(9elZQ>m<^LyHk&5!+HZVP#5*hDws#Qff7= zp^%L{A?TrkZfUiA5StLWN$+v9n_fD&G3>V?_PL^<*m1~;^!Q#`nP#WYN3(F%8z(`Q zrw6hT)new@HZZSW`q5SPE-BnW6ufOm6q$YZesU%p`H3l)KvZ*;DGl07m@1tNT`B~* zuq>|Br0|xL=+K}#NM)1D=edcpxb*t-2Rw;2 zN&=~u8B}-q@lrXYcyfQ0ayg)sWi?21h1D)`7utwZ}z|1w$WU8 z+E&g9n4$0$yrCM<`O220oGBd6B|B%=Syey)FuO8}r}GBn(gdupLer1GYZ@BlYkEYg z5f-WI&b6=QZ5`wf>W4l05((Mmti4)z)7;FHp6n|(zab+*h?ls586No|(B+tT-Qr~g z@KX#;Yq(1c9w6OB4l0Xhl$?$`GnF@FGl2ME29I_q+|jyXv$*)&-qp*FixG`m%l%V~4dwS%Y>i=GGF^>p*;p!vJ5* zs`B3K9%m7xuOT*wM&W>%K}xRA1We!eS!I=vP8q&DC7U+mql>zV4tpgJzIB*JohYMf z)E@X})`BX`nQ~U_P%flfKQL)JRuu4frhAVB&(V7C*!lc17tmcPfoRJc@Uz~0iYdFRI4P`;C6xr-+& zz8nM+Gg#JH6+)?li~P6S#e?VMcfBmGW3d+BFCH}cqt1D^unOj+_N7-Zg|dwGplRjt zl-|2Sz?k9a`r`w6jRL{pc~jVs>}8b$@uZ7{ldjlQYH-inbK=n#s6r~xtz?oqo^`+3 znOWxiaO}C7zOawof4ml2E<$k5BYbP$AKpEti`ykN#5Qm9^v$skvJ!FiTikFBNbjMq zKVk9lq`cz$0O%5(U#|Pv*6TF~yG(JHtp=Pz0O zLbo2Okk7nhxui(1H%w|>Y!S9I1AaSfENdM%G!lJa!yqJc>TKR(I7c(DWYnLb`PJ znjYj$c#&s+-`8}drg^|!XQlIDb!Is*CE5rE{quT6qh&5^zgj`T{Re0`lq%nZ*LL)f zy{rCPk`ln~%hfQ0%Ud~y6M2S+)GXKTPorCut~M1oEzoJuVgy(&7A9~2j`HG_DP%Md ziIXs2K3hK46!&d3hJJ7byO>=RBxv~g$5UR=o}yVakdZ688;ovczLG%BJjRu+t2@*u zxe|T8#LaJ&5PY>C5NiIVZ5Wtqf{=(|WQ;Z=k7oRj!%A zv`HNnQa25dZ>?`w2nXUZa@9a~q{nK$+FYQB?F7qKKvXr>OW-ahu{yzR27dNGbuICx z2>n}gg!+Y<^$F+w=o7yIH`PiTnOTTdfp7GIcr|etRe0sia`-m&OW0{n!a+U$Kdepl zo=EbvQiH{xp4_>8-t*T}V;EtZDPn9;@X!6tpa|Yc4ha$t&-<#eHwnX7qLo9?(0`P! zLUbi+Oh(q!wc)1_7s)2aItj?E#mjr?Er^BTJ&tE*aS*21;ej-YzJJAmH2ab@9rE}m zE91ul=G-bUs~^0rei2*Ah%Zg!LkBqzKH$&zy?H>?AXz`ts+dLkBJWey8FxIfBkK|g zBN2%eOr2kf){-?yB}&*87)=f0Srj)4GMe56i`NerU?YzakmqjhT+F`$s^xPE9{pi^ zP31x1JCprNTGyUtD1;08W(V3W;(SWB0vrFsC6{?4Ak7P!x#4Mq8^MKM1g~s(#RTvHTVU2?^mS8 z^4Jwj!yV{IlK@|Lob5EN2}3KQDk*+6X#b1yH<J=db)+p=3rP&H;5rKD#d33*c2Lo+3#UgfTLKQj|~Qy%;z6hKEeTb>>u+OhO5 zCjP|`AFksP-*y~le8iP8`$EWbHQIR6tiYiQ6OSQul%hF+m38|9K>*2lQbRUFE5sZo z@>~wNrw0bco71eC1Lx~|N#9_N=4t(8U#~?~WIZlLAFC}p2NlNHLx9?YFs0cV9xfK} z)xc)0)81WPZ3h@)cndHz76Y}Z4t3`lm7H< z^xGWf1UVKiq_3F2bXTMOjChvErK`&OL<~{+ZwuRYPq}{F-8+QYM3H0ij#nqPgK@_B z`R+sIqY@OGBQm7DMVZc{1ZI^@jv`pEC!5#bP&@&Xi(G(kWh{B#Ogd#hO-R3VFeI;o z%na|?=YSw&z(zF-^$#4$VImGGZk*?g??&+Q_ds*vyhET#a>ns^2K1|Q#|j`j*vHcp zVsD5VWqVr^wNti{5IsuewjJQKStuJoLXH`xSi#t@K7Z)u9Tb#eLigXxA-X`TD056tUdImqXJ5p>K-d68<&ZsEKiQoA3ItKA0q(1%OZko@-`<_t z%U+B!_5$d(@peT5DIKq5x$5e~SBaSdDmNuQh>$x__X2b-k0n!oRh4~O9CJ$zq2M1} zog>n@dDAn!JGBoaXzIWgz!cny%q)7FpP(A0c#q25Wxc4b5gPH2SN&$BnG^uYQ|k=^ z&fKD?H#teaX2fFIJB18Ua^3z4G}~N`R`Dv#Y`vfw(i(5pKffRiX39S!N0ic}*SH>A z#RPPlnD~4%h*5vxDIbvNkcGZLBcChF5YBC&U09aILlGbsgT`dO z5=>G?X;ccsCM=cSwJVw;jZ;}?hOcbx!CSKBkXay5U9qRYPLqH7LGnZ)EOSxR6g_O4 z)Q|aksP6EfX6SMuptm0+c>6;McB}CO$=>XFt18zuzDmEdZ13F$jDi)s6!vbB-HTwC z5){wc)+`ymqL=i0I8E=V3}hO9=pqk8)!_8w>=BQO%#C)ErpPlrD;p-(l!Z^PhEw{^ zcAA2|lAk4)B|0NYL08cc&z}kf_CnOIlw7_Vm4~$bRkUwB%m++7b5UaC`-eSq=>fL{ z8&|41%7W4vOAMvYXO+y~@jaa6@$Vb-Q?@+5A{W}U^mHBpuk-3fx7>p=V0?gASXAz> zl$EJ_^o3sXPa&{)4ud5461bFk$vFpKOJ_uQc+P=FDD}IOe{ve+=KQc1H;#(J@+Y{t zC3U{IH?4Nbb3IDSiW8>Sm33XF&mcYagl#PO-fE7d8$?0E{g)6{?)^xC(}T-#wNH*B zPs&K``U-k4bZz%ZxU&TkI5PuX?)XPn!X=$!adnp)0<=d>NZ%#1qax{PRP-6dsR4zY z@KOD~ue(++o?S-Tj~vGB{%y4(8blN&ICY#~#qGY^hJ~(1Q0keI$t3Vw;%N5Njf>ZX z@b%0-2^)R=beY*f>Oi4!LDb&pemBxjboLx84po9TLj8qJR%P>nn9)MQ6k&n>x5HX{ z9P68boY6Xn*sWb!ouJ$1XzH%L>_$pnS+6>$CB<5_r=@(f{D=Bm7PM_}l!f|v-L1Ln zluiU>mgb&;w5oIf{#;}%Z<9;pFVv;FL4pNIef zYrY1`pin}(RQ%(|*@UQYH0s@!qyRy@$u+7q`w*TC;dNm7NMZX|)T4Z^RRbs@>es2&?zW7wqo-cG(i zlz$tY1~*(0KWNA?_Z;|q7$k5|qkHzy$UFauj8*v%>Zv>3m@O)VHyd-F?f5hf`vdX6 z=n6Xy=_a(&sS|YB+aFrGmAAu^)Rq{s)|DV!TL*O37 zj7ZC657m+sKT-K1n+Q=t=%fBlel&I%*Q^Bn$P-nix+YCTd80-vt+n+9Vf_Vs%MUEa zVg>k*0RbDN+eM$-vlF+rxb?VZd>~+4;ULCf)TSp%vWE=*qQ^%K*q<1HZM70Ej@eS9 z0T5Q>zUuVJ*fNXG8o$^)6A>3v6*ohN;%)LcwbFET+R(_d^471=M4tDg)P~G7_1GIg z83$-lR-V1F?W}pTP4pAhqK1fRiW?ORW0I#hp=xr03>eJ_(|$7AL$4gjbO#ighXD|L zN1DU~|M`Ica#tMxL;loh6zLhvTg>3fyN=jUQ3D{=D3Y1ZklNr@b0Pxq>cuJr(P4R& z-yO|7an!k?<(L<7gx}xqM0(iUYX1$Gbt>%W%H(W0%T8hTR=RIL zOS8&@IjW#^LQm9%)xm3xQ=kN6^N9=z9CK_8RO-{^{`3JGcj|U^Qx<&bq5)d&$BH`4 zI>X=n&_{cWD=U&RQ4k3f1v|TcxE%hkgJU1G8lMZ^zXLyKgJrvSn*$fE2*7j6g1pp8 z67eS&I-!`=PDhY)fFGw6i%;rLSW)g4$J39OKSko;^~3sB__qI(g-N%=ClnNv3I1V@4vCgV&E-1dZ3Vaj0xwF&7_7Nq|kfsU7 zJGZ{rUW^Ik0WAZy)+Q@xp9PSEbG_S&7u?``VmXnGw`GT4E16o#>d|J7@-$dI(h#~k zjVt>4%goMmw6;*-G>dQ9HCoRM6qy%1wSF+Z%$yQ|9*UhzW*pbmE)ZtJ)0pHomnWvY zBG&uj2V&fw-H|I;-K-Dl6FuXLdp;*HQI>5xXYlGUg6|v39K=133^326{&pub9iQrh zQFtNJj7tl(ecg<66ai`QPL$fF-a5-Mp}uA*?4!aC-b38{0-L`KxjV?JS;i1Hita-P6+xBY$Z<4}9a8HU_|bAn&! z`T{kgaf&2oq?O$>EZ1^6IR_XE<{x(jk+I-X!#!;8DSug@I8k5lLDSEj_ZLghX3;10 zdA+deQIHI+RfXrW90Y2J@+vV8i0vQHL}v0vq>aUU>vV$cGP(Z=MS$N5F7f*#;Z4|P ze`M=8B8^Y=4F?XFee0D+i&?eut;J>HMebGK!^Iu2W`~bc8aBwPkHWYHq{~{S<|U&u zs4lqOV()QvqG^H>u6nugVm5O(X5TEDh`0_dyK!bXB$ntw*dIys{DkuIUX?Gnm4l5gqv}u& z)^nWUHk59FW&S1nZ{4O!zYnd}t#JRYyp?&ti>dsZ#6UnN;_@_sVA-H2mqnNBBn(1|; z;{Y?{#Y6!n>#+4mUoM!lSFcl(b6%voZl@Me!{U>;Gk)jIo#_@Y-mWKw2kqxbs;XcM zeu@{oXfz(|5wVhzl4|y4z1v&j*)3`w38iXUtz%s)eD{pi(mzgJ!E%x4q4oNgdo;in z{hf`%ibUZG__L4VutnmwKSz_u?7yK+ny={-JS~6ta_I%g3^M|et3Dwbgp9`a389v1 zmo|fNlqQ~#9h;8lW`onIfO4%b#3VC}2^Yr>rDzRW(|FjxK`QEbWgTQV7~H2Cm( zampRxOxBB=5lG*HXy{b&jCL*Cnvz!Maql^Z^vf$GVTq~Dsk|Nct85e7;7BQOo;ki$ z^8J;r8%~rF0c&^Oi+Z=`@%KWNLgeUgp;Q{x&HdFNk-Xh}et4Wv8XJQEug$3F}vkkE3}tX>EDIEsR;?YWS#w4HiaKf!QC9ZD^P#L2&Mrfv#J zuoR9u>z>YYWoJ6_*QAzV?%oh3V8_7aKS&atEmd-iT~f@_U=<3Q1LOyg#Tb3-#}cjx z?-oZN_yd9O1_cBs9{AWyH);mod`cF6exbXm6T)lbM->;jRNLAR768l@mA)?+Ba8c4 zFQa>Eg%kfkC4nYqu;<;)Di^U4>w#8d=4jg^i4qbQizd-NH$(elHgbINp-&+h6PTLp z94oO9@WsurS*XW&H^PP9+*Q#sF@^snbo9z?YqtG_w8%19hNT;=XQC_FUJl zh;ne-)nJm>1=^^?_%~2n{B{!{*2e^b1fAS3nz~1PY_FICMSk z!iFe8RBVljpc&&ZnENq(k^rb-M1?%h`_U(AGqr;dJFVy4&_Z4F6?T}*5l|>Ch1|ca z#XTLCUelFAaI9)xJY*wylYWtT+S+}N`G~-JHk|~|?^{GAr&OUP^PP78G+X+>bZ0QBeHS{|CaQdx-YAO>&V5(5|80jpKSBYT$1yPZ$xb`XllHQ<-D<3ey#9J7^30$&IO1LV}J5_1-cvbYyNKZ z_YqVNWDZ_@x|%L(#oV~E)!s#atUv3)mRp<$r*~_W2k5Y^8)2CWA1OW9*HF6t2dao|5j(#T#c80HmDYS=icTioiYY?hWF#`Jp9C9 z+vHCTzp`pOeph{Udosx6nlV5~0*?gN;UBTvbIcRHmKcr0iQ51IEqx#?)Os}46Ye8K zqZ5{bBL&!k9g|OM2@%JP#mAwu9LI8V1yCSmOhzp5&*L(p{a3{?kskT?&WejNZ1%qj zJO9&TYz6i9@g;psv5(yRD z)G&=*uT+@g?nAn52(i~`&R{$SJA^uvFlDXWC7jr+lX{(5Y{Z=f+@xCOnSy0{S&%Kx zWShw!*l%Xh7)`=ynaVH47{*$}MhFyQE)cmUH6VnoJXxDhy1|3j1ww;ziw@KKh9>D| z=fXQI)fY{RlW~~9vFl(IfkT9v5|WKdp%&ziWm*kBI9uThCPif$FnK1|i3u*uK z)EhZ$Cg@eyvrun-@_+kP6#VkyJeI!e21E@hSljmA76e}1;pH$cqI}f#Z#tz>V(9J?LApa4X^@g`rMr>tW~3Vgl$4h4&KbHpB&FX6zkBbq z&-uN7x?GDj%nbK)$8~+K5LFS&IzLB09M3bfE57-<<^N6P71owGIzPju$fDB9?4J1+LQTy*_n78Xc z-L>n7*8CV3sA^xfEx10hdAf24+Rqu7NO26d{UvIBi;UQ`)r}p)IfPyPI$`7us%KVM zDSZ3FwLXx&uogM`8J;TVnLjV>t#ad}K>}++@N>4G7MmjKdC;{zAcL3J$=GMN36i@& z`e(|ml+20+yCxnVRi08az1z#1ek0RxpJz%05*BW-}y~noZT+PwK3rVEet_E z$rUvWZ)wq{t7+h1arXMBzC3=dlCS(GVv(kYJ0w;op|WA}7KnUE!>qb|M*wHT*yMcc zNeD$2bQnIea`BF3@K%*iYCd&qXDHVF<LNcv7zPIRz_BWs}7&j<9~H&Wik}YbEz{234H(V z*#0xy0!i>0qEmEBADO<;vFm(MD#nvaswSjlF;V=*B`b@JVTgW=Pg?oIB((W*<+wSl za%7>vuK>N;Fwt3FOcO&U*ayuKiz;ZA&dSA(leDvo&CmxMhTY?+2%h-ShXHHJPJO4= zlNgUi-SBy5_r<+15GR2e(5av;(kcRGQvL1dH5o^WM&x*_jfLgy%jniDZG`n5a|Tp5vTU0$)VjODoqzl>d%9E9IvA zA|LQZqr_LD*G=h%!yznQ#fX%7qc$G5-M2Q#m@78!zOsC-17On#!f>zk(Ra`&5KT*Ew(o4xcM00*+loZ zaT_66$-5b+r02x4ta-=~sG@Wc-BPPbG&g)Fa90i>ZNzIpyia3z!tGRF5qcF-QoBNvuXxucoG~N;Hy}uCh zORhG9_F#(wWaldhrR_GH!&PFu>#WTFxm z`0=*Ni)Z9%+ZHamz7S`9B_`m6;v;>co>Ae}Z5y6h=!Aw|}DC z*~{^D0}WZkw3#V-J$#-X;(nu08Qe^VWlCL#(c+iaZ}tT;)aNp6`G6KmpL0&lNk1ro zflC*nx=K~C$`Dd!x1?jJW8w48U)x^N>5$T-6KQ@GeICH4e4tW;o=BW|( zdx_tF|8Z|nsmY}Z<+s{kch*iy)me`(Ig-9IaiIf%+vPLFIFY*%G-56H%Uc4Hd-0fc zcKFL@^L_k3TE#BDb}^c?PVFU^Wu@)eMM00z>r|m`t~?GepDfvloWt^%;hDyM43;u0 zzcjv36kHq|!hx%c4696R#S!~kstW(L{bO06xp;stYaSM8a5H0#`>z?36d^$%=jdWe zITXEaGJsV+;s0J?(?sAbaaF+1V3CFnr@Voo&!>5|pD-VslAfszyH}}}r}$#W+|cK5^t=s<0&Y!uX4@E+;@T!?B+DJ_DIc#;DVW|s3M6#ke1qr{3(TEv#NN8MXUEfU7e z)B5B_IM?Z3cho|skZ zJ#tdFzSElD;eg`=G2Fizve;XnZGl&N{)XGhXM(4IhjE)S;WH6BQzhwUHZ(Qce7{|K zuHqbRLZ@!fIi)edD{;G@QdATa`jTjcGOYGd)W!t6HSV{i;7OZ{_>r@$4ht(}*{+~} zF8&VPQD!Q%XCsu{{%_m5Ni_C-Apgnf$%o^!4-=pL9b48zY)E`>@$kHOzC06BD|S`h zgYYEY1P0SR3+HpY7V?5vF}QGc)av4_)trh0Zu^Ws3qB~L)VN`KcU)CTqeo`3B#W~h z;0@MEu}Cl-;S9asLR`TJghwln>w)H8GgJyKu6{s?W*#@&%e&(fCIi-9x?ME!{UkZy(u{z$sE&8yZx;4;h-s^Qz zm}E-?lyC-7vLU#H;{e~C_scD;EW^qv5GPDh17HbJ2ez5G;CS#ei}+%-35d&IO^C?{ z?e1P9yDTpzW{gYEr@w?#gOF22vxuS&Io2qjRYu~xcnGI)-`G?DfMxdR2&<^F+0mjFrdLw8ch@|CiQL-b{1n)JY+f<>WH{VEcKWzuBswH#LGO)*wb6w-Sg+}Unuvin;!8}0v zXXzy4&N35P*3edGL5@|cDVE}vYH!&{+o~L&NiKchnJohH5P5haYHX;|rcCsBC>e>k zQR-vXPL9tp=;sO4*eDtkQ3|RUU{TYHbGJ4L^ z7Hh^}U%3qEeqo;r_ZHP7LP%~gdi`xaa{nbN$O(OhuOh2g9diJuX6zklK7Q{jjZ{d#_VWVRtJ(9mzmi7Z0aO|}%%JrU(}O`0O8-~R-5*Lm3)%g7oFadk7nIv=4!@rh zdi=c|LN&UxRlh6R)Dc-5L*<6tsGM*o%ohx>K0K|w-+!;63!?KQ9Q^fhl8&mduDn|4 z3wllmT~f(?0|xc++#M^8!Wv&e_|Iad^G$$7{VKFrCxXUM2T*ATo34U(bcv9wE2AIWtuCI zwp)$!`<(@4QVAzkrIUtTmE~&{$L7;NWM}PPK711v+fXyN#aZY@&>7PJB+Q=H^A{Y< zL;*l6EmsYCE0924l!^R`-vT8T=8wNSg7+?of&WkZt-eLJoq6Qip0a0U%Y&-G>c^>z zPslCGxiNeh*0alA=E9yj%0XS)+yYitM_X|wWs=YRAbEy-7=#Q`twYQ6!;%`r;Wq-% zFKvjkk=cL#TD<$hI~?A6X=C_WxosWowG!vC@e=Ws69SwEP{nzq$M850#DQ(8iZKju zWeT%0=|)c$(MrbQc7-)@d!}AZ!mcrUSX*HOha3QSHF_GRx?}PJ>qW*m^H43{O0rv2VX0Jfn5oi33yhrJ64+F`iXP8j+(pn|6Qm zi(0l*wqLaDovplFTp}&J^nqYv*YEOj>)~6SsH_xBL|2*`eaXK&G)WP@ZWJr!GC21c z^IJL3vdEG`t|e8#t)RDv4FV7j-SRfbfpFTdIn}YWS%9W5USBve43@ll-~ix-1z?%` zK|_U2edSSHaBh_8L$kW1CXEcb<-2$Z&k<0pp0|CG|K9dF4HXYhaf>`VN%up6w47$3 zCP@=gE`f8@aJl#ojtUf9h1fa`ik=VIInP6H;I&6fw7UX`!k|$oLThjcN|9ACvK72f z)7gfmh*Op4Qh}EKaYFKd!2FMlfeqzOPT`~FsBV$i7Rn_eV83qwB4&8~8vI^0($-k} zggn-rgjytSwM#xX*4D*%I0BJ4gnzFcAFjH_={ckUjo(bWZ2R7}mF`<{JxkB(E%a9+7e%X4^VFadu(-t8jvQ~XvYvOJG8RgV>^61YW>j{P< zSB&$2hb-cCCX??GsrhOZk&OP2h{ebk<=4=CbK+Zm?{SVO?$42e<f0g+|e^AS*&GaMrh%dZYc=+`f?L z+|fmD|4KN^!hgTHsN=O`Cisk6AgN9aC5ir|dF-Cg$Dbzq-aQifdVsP~;xw6?TUhE= zF$w9iC&3M9GXPNeO`h}r-U6xupBt8(NU;MFoMjdo8QR2)x|}4-n26VwAZpP-ANw57 zUdYFpH_^&WN2=t6@+;EFwYg`88M&eUM{~!yUdNOTU%gEys9rajN7EMzv*$qOE9$QsULQ9ut%ivq z9tqpq_4=;q9$}bASr;VU_C3;O9raT@`pHjIt4oB62wDTKmX^ii%eiO4gV{D;D~u#6 zGlxDTeQ>UK+FGAMK1(tSMDeq%Tp@QZZp_ipt)%J3)1Jub1l7spGNu2?*ovB z`QOK^$Y)X}Ngmb8CbYuAUP98hJL=>Q?y5<-H`kY$0ogQsLF-6)j0p(mdHfZ1@EvLNqOvt=ECRBimS~Q~FN<57?smQ1SiYj|f0|AnPI&K! z+1VeJ0(M|N4ve+nR2ehMWMrX$dRaF^{*r7NGFh^@eDEIrT9T=Ola1RsFJ+YT1ay#Y zSED&wSVadpuOz*qUhhqRP+D6YCE4y`gWbA8rU% z`^DrMVxe(Hep3{5?@MWF4{I3AUxJrT65IBCXELL6noInNee%QVX<P|`G0Doq1s%UZ8rl{k=bkj+Uf3$DWS9s|2C6+hUTB2mcyi&uq zCA@acH7K_3j;4_}-iy3Zp#NouB>;Eia^e8dW;eV49ysXlv5t+4*2$sb|J209{(RM7 zGAj$@;m;Yt|MRl|%>yd!^j^S}WpYioG<7cCZlT3I6*TUJ$IaoGv7J}{H0PI2DsmH^ z&i0ezleFUZ_;n5zOw$K5uNS;F@L-i9gGXe>Ufm}aPH}Ek;%K(K$LQ5FNPwe0EHY-$1;lkF!CSEAtHkQMA=xCu)$0x-ORUcD4gSasbi5IWOeVhnyi(fQJiTm{VNSoZNLp01yg46 z%?Z*}e|uOnQmxevO@nYXoPLgdeP`%yJdMHXT`#fW;dU^S=0XI?{f6Ovi$&b9(f;JM zy}`#zPF_QRPJIM%iTe>!F2Qn>&c3cO-7BjsIXCl0k`P#X0NDsA7{RniOPY4PQxC$3-qTok0Kt}dY7{e z8JMJ#+s(DU5;aw^nla^=ur?b3Z~fgoHIfvk$$G3Kkvh$?!s~8xX7W9%ZG7s(FIz$s zPU@O?(PEZ_-pR|{>_e9&FJC}yeIHQi)0&m|2Nu2iDYtXkeFlcmXAsHt*#ltWcZJ&P zb);bGB_!Y=Z_NQ3BOY*F8BM-WGKGe3@}=pKofFd6H&Jw!hhgdxOE&N;X2=s67q02m z)t6)@oIC)V>mf}0vv8DeH}>6}DnJZxZ>86^$N4Tj}`3MU)w`4g6!) zDpo(FLIR8K=>oK3Nylp+{OJg=;+(X~sD3LvP{e+V`z-srOU*yO z)IzAh1qr@aCWAHa;OrYoUIMTI0=6bl9fz#kO|Vlq7n*bHU%RJ{&~k0VQrZUH*dnY{ zZBr~<4W*v&;sxaMIPkQc=wIJK#xhFBvE`y~gFmE(mUBGsAlLGOwt1hEmhd_Sm6}jd z_#eXn=-g4blJT;cvo{HRSya_qyj!;T6RS|E-w$;9)uX@NzPH)TDJs1~njiR_S8$L| z#Dyu8h;J3ELM-{ZV9{kl{O;|*n}_H11c~>)k@s7gTvauLFmZX~V|-oGUnh z>>7%)Q2(wHBhh){17oq+nO;VM(*G1kApedB4iLfB9winI{Z9w^9E)d332Y*x4c(O# z6=y0rGw#bx&L$B}c1ANpIDq^UP8ZP@?6&t>tQY_R8<0Zp2NwCM0VflsV3SaucGI~O z^cmCwKfQ~XTx8EZ0xwmc7y9Mf@t$6x`{-qc8mr0woO*xhPfA`%|F0&I{hBV@^R(k% zO=9D^e?12VD@>_4O)!qNMZpUI%t#mH^!R2I;ELlSo>~5Tf~IiKiyr_rXrN!>3u)z- zM?`b_w5YTjUZk(cyf!7#

    !xmtE$x`m3m7ZGSx2zUmWMUUcoo*}ei`K7i*zM;2lJ zM3R5ADlU!|&+4emHg+Na6OY;z{hPg*%FT(=OjO(#)!a66l8fRAJ0z|y#;xhuimuYP zR;&kgev&%S0A49%&wYO;c6b%BS*sVmzt9eS~yS1O3Qv9Nszy=#RmO2Zvj^R zLZX$jdL4Fb4!4f5jVn|QB76qF{Dd2=pkIHo5?J4vW~-%ETHg2fVGf=d)_J4kQvo^% z$e$&$+Y(MXX^n&Qy#^p!X-xaP+NGk#{Riaabv9?{id!~cQkDQ_-pFY$kV;{V{4#pS z?QMe9{M&c($T(HiJPfRaIL{NL{FfE}Uh9)rlavy>LKZktDk~A6u9-e?JzEMJ596|a z*#0`BFp%ZZ3N7oB#SDI+I6GaNc54Nk3J*0|W<0GYKIYZ}Z6s~FZq=RxTQ5r&7PD;QWna-r3%Q zC2X=E`BL>~VFL&Hyimc5agsf&jL`Nwm#Wu^V4jg49W3CetV2*FEmQG!=?Rg?VQ(}} zLKxw_<%x=i_}19WMscn1!Hb9%i>uy>3qPhtySwvYzpvbCrN+QixHZQDOJF@rUt){P-r zy1}<%DiLNacv_x4?oa1YK(Z!mC<-x)nF zvDzYyIpHJ0W%B6Sav_|!qiAd}^r>qM#Y_kPe7H&=nH-G)aJ46T`XKG@+LN0G1J6>( zX^dOio$x7aO&F%oseZlp?eQDd(fu=A+1Pb(D`)!L8dd1TKw_#BiC7YA&U+@RF9+?bGPfi~2p4e4X z8|(&s11DA91f9L9uJap%^G5SOn9D@=1+cHA_AnDLe>1(GLt0DxsQX8V1XZu+&Ndi! zL3C7X$T=hGD_esbZpGRayW|QR(^ywmw6-TS-<6( z&@@aKO!P~bC9*!i%iN5{$op+Fyz>>uOvs}B2f9xb!jITSEKGc=aSSUg-;^u;V_k>f zOcMG$o=%x|TaxvV!JN&|Ns0_E2a|_YpPIW$j}9R5OLNMN-eyc-jK%ABq>V;H{>&Df z&3%>+bJf*;Tb5jhkKh4Ov)UW_U}&LIfXA5?RDw`{S5zcPc*`BK{Ekmmfa2eScS@?Q zL*Z9@FV+)EAy&n7p1c2&)X0JYe$@61P5wW7Noc4)Pk1k$!-mni&?{UFrNS2nxZsJC`o0&r8UE2FaS!M3Tq+s>EKC$F zDndu2`aUQ#1T=0haVZT-je>!-XMG8r+dDkKf|=@J^5?Gz&~%?gZ(C8AmZxbQ0l1Zp z$k&$1+8ILw5j8#^_elyvRrBUhU3sK zAL;fu_=SnVyq|PLnCpt#HKL{j!Egt*-G?IN2sP&bvS7O)@vFed3o0*yjuPwWi5g-* zGTI@(8Al3(5|L6}vUi3xcnNv{p8r|MW4ZN4UH{;lw()%7bDra3g{O*ueh67VX0EM@ zv^jV2FLD~n#-URQkMvXr3EB#w`dXEYG@fU#iKgpa>|{@UK(fpuHN-T7&{y4X@MG~ZN!(MExPw7NyD+;N<^;aHYdgN$W3bbhT>*3A%Dm5Rc(ZY6A)Er2x;o&jxaF0 z4bBsMuf&T6#9(i=qvIO=JnnMvt?^hd_1;8lQ)$Pkd11VFMjGjFj#Zy~l8LG7 zfL|jtxmn-zW=}-Zt08h3baW3H8~y`eOWaN3cnnI$F6?TtYe;U~Nq0DZag%;+eE`@@ zlX%L9O_5ObhRowZ{I@VGg-oDWgjcyUo8y0mv|P*J;Ql5N@x5KrVoV?2T^; z8?0#vdZYw;fP}`sU)|^A0u*M9m9ihqle`rFd+e6POIDkmu(t|>?(wW)zb80?5D86b zKkBZe9vP{YJ+<371JFy*Sc;DG_aoROJfvPzPkavy6l+`7tIbZeJ<`Xdv%*nj;*&T#9nv2jzJzG^k zi16}j6XV|-IpyW#WNoSP_okMhr1q3WwE;W2hD$_tjuWrm*Zp(H=n%*{uEd?HnTUd& zcTcs|^@Nx#r!(4!1T<*p%_~pBpV~i-!BUkb5_Z9gpfkgLioHCGG^*#WzM<5&mJIk( z88%erLt*W>wU$qGF)-l_lx&C>9CLR7jDvZ*yaS>{1?f@Vgg;E#TuHm5hWOsT<%v3pL9PG%`#(ZNY#dlSTnxn)DSRw4L6yGELgf=G!d%q( zr@m#kU-Fvjds^3EB05|oLXv9OiFO=goT*A!Z2;S=u7JK z3`Yd&@G^Wvdf^{N;nCm4!Pn;dD1KJ=H%eEQl{6?ynkKZ=5nB!d76oKt*JHs z{rWdC9p+kyX<|z)H$XXGN#dxMAp-GA_3>{#SLK5w7sBNdCCf`oLf?%fw~r96s-^~2 zmd+(GrA#NHR7qCvmlrdsX|B?CYWswgjV24<(R=|Ls&LsV1PkN&#!e1BqV-k~tpYm& zOGuzBXVJ-a;IgSkN5jW>UJ)oN!n(g83E^;mzd4$MMx1BieNXVa9boJPf23 z*cks*$eBpq%fOz!wWt`YZhE$yfO!q^Bw<>HqTe_BtE66LtXO8tJldYQ9oD7lTtr!< z`Rx7rauZ5^JQA}HOP}K0nhwI}lft`hmpRLl$l^8lQK`^AM#$Pg^z~rcPEhU*F(yQh znfeE}@)if#v`B$}yk<(goe6~fd;sMl2pH}as6(1&W7*86o@vbHfKp7h18L#c$EJ61 zf^^bhsk3AYTmhdXx$WARs-7QiThSMiK-`ALBDH~RJ(K&D1iW$hzLpNut@Unck;0XB z~8DY^~M)0b%C#tB*iBl2$i4*wbq>wW zJvCnZBV%zx}hlZ~6z)9Vf2jOIgD&r4+WFZkWX< zmFYqNHy%CU;R&~`Ib5^-iUf>rTAyQYL%D!*fxX0JO7@2MxyFtY=J4TGJBT`lrm!La zb34<~_ieyU#!K#!)OfR}%zH4p=7(AXLW8wXkejSFdt>g!bi?~5syC+SQuTCuM^}Hq zRkm32=h2d3f~7_L6VlBHeYUOl$;^9CQ;XEf;l!?1pt}GnJDgmQ^iTG2O<-F=Br7j#9*`z3X%vMcTs+|3 z?dkRWcmyqa?H~UvujC>xq8X!mk~>?W@#DBr?L?{b($#y1ZK{PE@M?s!HTY3twF`TR zhuqjO;X<2sYX5_(j?~l9DQ~fKgS9_S71fOn=Jd|#Ysu7Xv$<%P&ax`m|sR%5Qj8qpqQ1}DP z$It>HHlIKr<#mmt`>G9_foxgdp33IQlU-mJqV6hAkw?Z-&|>(51=<|#s;!P9BtDsR z<$vNa+VbXtmF--H?Q0z z%)CN=azMIEnx-o;`Zn8$>!_UbAO$y!@#>=t233@|$xfw_uGgFM(!a zKIcgT79+0(o(i~b=%4^n!pbB9jg#|#)mT(2m4}E0gydnW|EEuVFD?p_lz?~O+>+s% z=5*hi0u9hl6`l4`b(Tln1B)ZAeV`PUp=UfQPOu{K;l!X^OLmL^m^M4poN{YRRL(Bqz`P%t(pMM@tU{Y_>h5OO zdzSk$6MzIONQ)%Alr`R)afCSRJPNNISLwkVMGC+t@W|9joc+(e#{dy%M}J9tms8k} zV6;&4aKR^8ll&Etyyg4kA$cCUsZ8yMbaUR%PHR}ynqw-qgrD}}1s2u|CPl(^H4yJD zsvCdr(UUjo&P4B5Vcu&a10@k+B^xbzfD==5#j;$D++0zSa!u)Za!IEo_`a(|L0=l^ zNy8<$X4iAw8q+|5b7n9N2Is+xi35I@d}EwLR!#{wG9<84pww#t0%~*=Yve{k>5%q`?Cns8fR}r593UFLuu-s}HU;v0Y4G>ki zuHJY27p&{aff#g|h`E|N1~9}mt>?ej;a5|i4%##HVufbE6?gT;OPm;@xJg>1xp$X; zi&o_)XY1-O-I4+xB*YciV{w44yP7LU0@`5Uq^N_-rw?EYHVe z#KZz!rl|z+_;0ibsJX*>LcA=JI*)vUA{$XP{A9Z!;;hfZf%#>q6}nk-0FwSq)`rYT zuJDwWHO?_5$M4^cyj1nxKIalD&7c~#9C)biYP(jsa$AZ7%*^$3?+hru-$?NP8EgkP zlzBayxX)((Zr?E-fIP{#|$9OBpVIzJ}wLdUj2|iltW_ zrFpVE!=ezmELBbiXP@EUB2>Q#qap>`(RXtL2@XR4)Io%Q)j=Hg3eu7YH#Fxs3~ioZ z#TiFEVMhrl8<`P_H9v;C=8hlO%MG=C!+ZhdX)YL;)E&ggt3nW-Qu`h5I|+toMeaLn z|14h*M*yp*Z+wZKQ3C^{z3HeA?OAE=m%qB2&KN3f1YpY&-BvKNY-5-2!7FKfl^-C< z=BP`pSuI$1mBAQIKaS#=xtKQWo4gI9U^;P)4HZ7$zn|ytdl+etdt$ipqY{brUb{ml z?Av<9S%iUIC4TcxN&U<2glwJ6H)UDgC=BtzH*&Q=iJT7|C;iQ&HYeVXmsrGeoF7e~ z!3uC9ub*hnCrmeYN?!bay+H8kZ8N7R%(r5ziQDnRi~tyGH&ub97Zue>4^;n@LJ>9} zxK7zA!}aM4j>*^N?Ny{K>KDi2?Cj$R`yCD(S{YacLOH$1X?0#A`5D8pgMKLZ*zD>^ zg)e~8$9R6*31)8|Uc3jw5_t4K1*ErJYS#P)cz(>g-|Ovomp0`M&QVBGuI>EU?MpN5wSn5r zo3ZO|``EIk)}W}fp#SPU1F8L&9fDquzW7hm`HzH%3vMIApET`!FBWOp+e)IRkQ*q2 zKVC>PMBh^n$utkXHMO7tZ${hxdR`T^Ga47j6Gdg@nPfUrk>DJ=4D!ElD8QQZBQ)4Z zaogF5dF_{@IOHzh(cJF`7F6z!6)FY7ua*l9lvLY;HZhH{yP1=l7Kwx4dB(loGx3N5 zI`KQk;-lwU2sAIgri45dJf9W8FkCE#yiXi19*#B5mzS>lr56H>^L&UzAHbnMWk3y9 zcpMV_oMqgq`X)XSwo!XspGQgE((v{=g5BK>(c0s^akoeme8JYS<11xGu$W-U1GPVV z&6$R`?_clyf-mm6vKWO$PSZDdbo3IKoNE-h)5-O<%q7`pMn|Pf6P+ql&{tX$pb^ss zQBTeS(V7i{2Rhr(t)Y=<0QE?B(lW&?kYRePxHvAWPB1tf#AQW{CZ9Ur^%3ylfC|q- z+IUsaJ}di>&VI zsY)n}^sH1J`yqn1ONut2pK^=;cKU}DXxlWE3~txI>?+J&H0ZWgWks+?Ip`pQeS z44SGtwcfErzZW-|_KiK??#@z26oDuhM(EpGa^KRzOn4p~EHk(4#}Y&4Df%4l$x7iY zJ~lSE452uTiKiEokD=m6@pNt<5X#rST!``PrXC?MnCMVyJjO9Z)Pv zz^rc2@-D;+`tTQURG~hM8P;-!PqT({J_i<*tehly_etBORNb4yQv$sx{uZ!UybS;B_!UC}! z>b;^Mkv2qG^9Q9G@pMrF6gxXdlD0(wlm% z$mF4?u<>J*?O`o0jU+lW5mo(28)ujwqQ*}dfu(L-=FoKA8cse1~N z-n6CVTTKS*nIlruB=Y|`>zJ?FX<}>7h!yr3XlEVLXuXTK{D=z z%L{|sZppKOT88K^t;2>=gli#4TC~bEBE^4ok-_mo+z;S9Jv7I>>Mx&^#kR#vMP1fKgh%T1+5X)27HD7o#Vg!N;#^lWtsfT`$d+)tn=B#& z7)M*w^}>Me$4%s{u7f6l5y~RT52$|V&XKT~@#dh=;D!i9i6iNBUm`{B^(}-j`<`{M z$N6?T&s%>+L;-3dZSxc#brR^$3d_$YCA;LhSaxTiia^W}-;Bq_m8a!DZ+{EWGwrA% z)F_AoAzeiyb2;Vw6RPR=78+mY?RaZW*!`?rg&y5&POX$*H@9uEny(l>3@WhbTPJcq zmR+JsMJ{RIjI8<_UIk39>yx_FPI;g< zZaV3Nxyo)ZZ?fakU_-kPkG12LoYR3*mdRs_Wjx<@?Nkv~nA-|F%?Kna04{}K=v4Ns zg>v?I^TscX3k=P#Hrm52UB&z@4ul0GIucUwsXRDVcj<9|S&DdzgcUM`<%uo2VTiXX zT22Os5h=RSmfHpfUeNrP3Y!N}_J#Z%HR)-@?q0suYtjH47+joS_Kc2J#kJ2y4k2D|S7OCEu1ZJ>l%o{I^@J z>np*Im6imy;jiSwRQ{(-Q#(0LUo1ztDmeV1v(-;~>I`zM1wbiG({}xS{?;s;zC+Z6 z_$w0d3HH|~xM~;=pzp7FA80)yko(@GFYzrWe^5XNj)wT`W~!9ON2e#3V1Yb|a`j3{|0 z4UogXPOuBT7{RI|V7p-A^wB1E-yY!r=&!%7V+Kqo8lucfA*hhq?U6Yg37Bu^K$Pj5 zuF?A^rrWH3mqsFUZzh0dBu!ghB6k)?I^HefwsK$2yw>h z)u&OE3O?~8Tx;ZlfcGBADRxscG^;iJK^Ctlf*jf*Kon@)X~pAzT=#RZMjE|~v4 z)N;YKJz%M&@D8Q?Uzd%gARLrJ7tQZJw?l-Rfw+^R>tj+NCJKO0=~e}-3WN8GsMG?* zDp(nTxpeeQGAM4*nuB91nWDGF-{d2zDS=*(aX&6lp=7yma=Fk5sDI*rK0$~JoBRRg zU#w1h9Kc@iHMLtddq0XO>_O~y>AXJ^kyR3IX2Fp%m z#HwxpIFo%f8yY4n&SJ`!W*a^#uTtLiM^wP|zofwMJOK;|xU;w9(-!ZRREVQ1hqJpb zDZf7gzOed(Dt3C9G*bgMr`PXux%7&ZL8+ep&^9i@&&1q67^lVHU6KR=G!jub(*}-= z0y|9h0mlRClE|k@&;d=^DUVqcw*Vjmq~uOlm(JmOE@?=*d{qkm#3+UCqM&LCG@djB z6Es0zf|}OE7WV_&mFnVmP?V+``^>c;D|*j9jHcDVJ=|V-cSB)PuI-C-W<%^_s&{5$ z+dUxB2l|`45tS|0@)hx{qhXMuwSB^J>>rgT>Vfn1C5JeX(3b8H`yl@A^5Hvul@9Z; z{9CSXnG+u&s-Eo8#@X6}ar)x{nm;ztdXSTqt#}1(A-BBvyTV@3&}CG zXFbi9p6I}TI4czpI%I#xvC{h%@b&)_#@y6vM`KqURPB9^T$^pjc+N!>5MDoRtJ zzwIC8wEOn^<4r`HMLZ@zk6`1K<dyZSQpApjj2HV6xIoZNr9#y{B_0n))a^)9JEH1}KgCt_l+zqV|p*t@2*xIPRfXu50zvp8J4rGQJ}EoaDHn)H}xI_h}Vq#FJuHL zx>NEq8J>g&R>XZTUsR|9E5A&_P&Wde;=1!FD+_kuGX&7cy~Y-Z8gQJ~9je_WoAE5g zyFyhKY*}Zl_8|$*b{|TZ*EN8cEbcJ#cudP-)dlkWF!PzQ30*`QubF$~+`cP!JTt+> z7NMBv*p>~4Cwlz*lfu2iIvmL+RV8qc;YCG}@ibWwJXFPx7TrI^eEdp$k{xvQTURE% zfyt{u2ddHFmSW##{%=_$1vNH#fU9@PXTD|MyQA&7_D2_$qU_>&LsyCFoCLG=|AN3iw~>rgtDv+R$GiaEJHg(UfSOvr?gKG9O?DD2MrZH6 z3C^C1wC}UN=h8ur5OB$@;4i(8!&?jcS9^F%@qoO>IqHE8>;XUb%1x01T8A0S5ce;@ zVlOlf_E|m`2Rk8o^75-Z> zRwptGL@oB-mr9RwbUtt15*6KlGv|TCi2&0?2R0W)hU^!neb{v!8E*?XTShs&@EQ;q z08XV7vg5fJt+RPb6W*41Z$(qP*$X_M}RCH;aUNr6d@51XXzLc2swDYRf21xz-+k(+6c?Qy{DqP!yFxV5LsiIR z@ww?UV)w|!z`*8tIDPkqmH(599*IF68<1@3ntr0A9)qp;P9$`LG6w<=`EEfSPxM(L zYKwx1yKy5|idPOw3+EqD!kb3p-$Lwo{%W-6cKQZ+zdHvj&212w!pq8usls z+j_Oq_d8@?@=Z;ihh6_=<|1Q_+mBd>d~y`4S5H^*g4#6Z?G87*H-qjS*A;=)5&LrW zV!E{A^=|#MyLZWNAa4bd>JFi7=4u9S;u+NA(} z7yNNVao55on8X8$w&{L!P)ngMK;A!ScDCEaV3PSU6o*dE&oZWHapL0>UR$9!H_dzi zH&pz7tmqOE{Hd?fCijPo*{kH=<7ZJ+KEbq|zWAJkn)H=JyKhql6djQ)+39OLxW|UV zpD({sCk9})rV=g22#-|-TQ(U3_m=osB*CgZmNP8sc7Yq2`1Yf!C#bF7k3ewmr>Psn&SQ4He(MS6FfBD_N2UbVDP58NwY&-fNAx!+IY#r!z6LfQiM6o-t37Z z>cjyBG<}{JY6;Dnsj33{3C>Eh90;!*=8e3|lL^MN&=Q%o%AlY4Jdp}lpLpBfJrZoL zC}b@$T%yVnRFK)p7<2o^XQ8BG>*o7V7J0mkA{}WS^NDH}4n5bG@eRZrC4EW1V1JRd z6Qk37{R%zZaI$VpLSHpq+)QOK;Q0-%Vt>&$+;Iy-9zFi~lYMuJuS~Xtd{dBcYO%<~ z(MsG?G0_u3lYv;Z!lwL!g)0H4;0d!NAY>#W}|oK^#nGXS^8@^&|#GIy$>ugBZ%bjSZQNN-?|BTbC| z(o>+F5cNO{N{x~ric%rQxGUq7em;=3r_dFV7AR15qEMT*V{-xQ^kC}F)>3T>pIXmp zKtmD-T6-%8+y*}YT#id_0JbWF=Bo631CX^3cTnpD;7hlC(-;G)AsA3vEK7MJ>a3RY zaD7)!wN8eXk_g_NOuWwtIMvA37QsJ-z+n&z!T*o3xA2OpQR9UL6cD8ZrMo1D9J;$( zq(eX&>F$>9mTn2@1_9}AhLUcOW~li#o^$T^p0n0n>;3`2TFjnjKhH1yUK`p@skw=_ zD&c*PbHQY!MCfV@)vF%+*=x;cGWnspnezR-L>kwQgQajQ=%~uh3?5H5co15}cQ=SW zh|kAY1zIPK9uM@3(7X7p==Og5*$RH4)*<^r&?@D<|~X; z+cdPsZdID}>Qv?@A`#D@;T=~tYRU%CLzP`zvX1BeUN)>cS8*i$N9Aj(XnJ3dJu;|VF@lQVT=?=un?qu(^{o(y z*>rM@Qw2|tTb={iO0#}gc2}V^p!^-l6j%$5Q4IZ3H)?L4sF}E)J zfL1OI=!v$lPepFD7NDgC6IHS7_PEhx<~O}Pr~@oAV2Mj#JBi!tPY6eu=xsDtqCPrP zHG4J$Q#2a;(n{3eUD_W(ML=sq!dk_^|^_Lk0 z#l9>1j5Xqhwp_wtISi>8b2d7+?m*5TU(w`ksm7V%;Pg^Gu}u+N<%8ajN|vvq@QFIxmx$pPbvVH2`>M#G6FTs2c);eQwFVkfGwRouvmz?6eTVZQ zJB#u2KR09ip4aThAvey!{yS|9B zu~u(H8AmR9^;zu#y8A~>tdwO=gZ zNQjh2bELPo>^Ujk($y3b#b`y!^5edv;QAr09Z2nGBZO-hI4C8TXr-wqF1L)Qxl6zt z=ozXZgA2WosJ|>20qRqlF;kS<-BgM#h5x{%qtCbrX%8MPZzeGYcP#7n$qpOUyTm2e zw_3X!QVbkA9KVmx-;>ohAv_m3;f$}1joq$a{t_^U$g5hINgkC+)_Uj>P$(MlIo`ku zX+T$oS}BR^gv;|fq3it*vBj$;huiG6&ph^-qNkEvb`*B62F;4kO4K5y@ zTXJDphv(G#%06!#a$0KrdQ|8sz(o6;<99J5$(44PLSR$c7}PaHtlzL4Lv|4p`FIap zZ>KHsTVSx!)7-BX8;FK^L#H!|ljPqFUt43=@SiQr(_nqty|OMQ{-Q;q8->0;4!i&T zz!&iAW=9tcBUWPh@=ISPXPg4yJFrEaIeplru($t`I(qC>qf{t;fgPP62 z3$VQLkp6Siqr>XVh7F}FoJ%11SGxJVyc~g9YmBE#NMQ&E(WkMalOp1=b+lfN#)mm8 zV9Ch!-Q+M^RgHaL7bCpt|0a3dALxx?4KhA0xSkM)sz!g-rS^+ye^j9sAH%Bwq$ii= zyB+3X%4o@=&kVU`wu%Yg1QZ#4_dI&5y2&kAir}EIf5=(T3h@|lC9i&6TW04d25!W>)s za$?O0iM6k{k1619HD(hREmIFfLUWT3O9AN4Mjp0fxy3{M4hx&z*`pLbR8W_3-icx^ zbMy=A71g3d7OU{^zoRUq#IjfbjX|&zu?-g3C6*U%y!WFgHdU<;=a^H>4`HJ{qH|Kqxec&zq zfE@j2Kuo8v&Q)?%L07<<`Hvm$Rw8>sKfl#=4H#tk+^0QQBf2ijGYw}fY&!0H1;G!r zMHxmwLMmrEXX_fODJCkraq@p7dd%KWlT1Q{fG;8lJB}>q=TJ3njvpk2hg1()-&{W- z7^iahRh4qsCJ|f2KK!I$p;cbzE8vo~Egc00S>ZmRsts`M9J82Mz5%J7V6?M19XRrQ zVYQLtEsDT&uQcHO)$`u5NvPTEHY7=U@+r{EWF_~D<*AWv9Prv2!M4VpNlP7%`U~_S zNZ6;_znu6CA}ZBql~*?mR8GA@16Yn5rP>Nr;swMu2YRhvu3&HUMYQRWS3rm^T3+g!3PnmM3 zaz@fTk!nwGbce4L&P2jBikx!9jd!0RCTa2nwg7i;id)k?9gcx3@i+Im7q{N!a&o76D+llP>5bMez*H)i_3X?gYdzIMk zYGM0;@+7(6C-Fs=U1*`#H(=()Tn=wVK;Y8?*Qqnx(OHrLjB)Gi0}p*KuJMJ!to&e+ z<32YiCHW-gUjCKSy$q591b4=1i1Q0l8kP>9_Jy!`Td!>09wpDHhhypBfit)EN3Se> z$cG;ZPD0h13(&8VJ>E!3tQ;&6gP6%=-hC!~2af%tQpY?dLc$-H7&{rT~xC zEM?xyoaIf^T)f98gh982)l#O~r{WYu*6(KVM%veI2)WHg&i@#7{CF>e{l#<+zxQN@+H~jmm zOsYiB9HN$143{KMU-C?s$RC8b`RKLu+F?79yvVEtbIxrkSAB6KQqna=5w~pPby+>i z6z!)Me42{_S_fD|+N2K&Qe!D#Q-TaJC9mMt<BrWhskgMEtvauPhZWoTp8XtZ`Q-1X3y zp4_1Vq2+Z5aRSL~D`8s$o9Gli1a2z_yG&=HOu(g-)2t+clz=$#UFSxQ8Svu$Czy2* zqu8bQ7Q;fv8@jbU;kbWHuMZvEME@LbQK8#1+ZymLf!J|ecsqh^lsWzdXr9#;?f=-I zpDca-$PtJD@qv%;8eYPU|9)<>LQ5c01Gs3@un`R~+uw*LNOX&DKcRgAhjZ9%DzSgj zKJjDkEuq_N_i50Bonkbw07mobN%;igloo|5gt{7k+78-8BSTtGgqHw#&Q(T{-%RFD z`<}_;xZz*5vm4~MFSu|*v%w|_8weptAHk*d-4wc{5BvzocV|Cxj|XNbY(~1M)U#Ey z#4Q*tHqxg=B%zy?^~ULtRMM#Q#qC6j&UeIn4!;Zo&RLRn|3Pm4uiFNis^HkK)H>o3 z)AC9`MWA0la(;bnp-=M@G2XrlL^o7iX@$)!@-bgAwVed`_+Bg$e#i4`M>7j|l@imf z%4Q9Rz|qpk1VBhq8>H6^S=L2$ycCYcJn`N{iGn3fC=2P*UOy@J&QBrHxXUM9uSQCz z6gjmko2Z{Js<5m6EKaE6R+h+q2B)vP!LYkr?yCcd-s3NLfrhbz3N8>mss%GE8M4Ym1CA6*5@N>BUbL@Y+%?fZ={%ubpaDl&aek&8tz(y>oDAW|678Ki}P z-{!ma4X2MNs_;q9`&ZF?tf30U6{xCXh?r_;4vWsRik7_AY1DcPh{wF)GkJA$dSKl9 zueVC&70b7WpVM3Rcx_IP8q$&C0#C?b(IP758km^@9Dk8NyqOk^PuxJgXU<5#p|90= z10SG_&5itayRGVgc$7Ih7Y!&}iq|%_;=|lp+|9P0)lHE(W)rct8ZWvn=i?-MQ>dDq z)Sl%hik8vKY|z8NiZdEdkxbCMCtF;4H!sHc9}(P>eji=x^RC&!a5{MiQ8X}VXZpMQ0lNY*TsZMz@xyu05@dE|MA4gm2{6K*HY+1-wyK`CKvtWpI>KjtiC4c@14J6DDD_k|NR zykr@Q-3)?IT9<)7dHr;yZG;bSU{4QAt?G{-P*E`%qY@ zSB~oGMRE{WWmDSzV@si+?GtZDYQhczIEn(sbGAy|aaZ2(S4bT4gzWvkett?U=t@(6_rVhD)BZT4dVARmxIeJNS_VgEDQ9zvm;-yrRTwRimaQP8=ug<&<9kec zm&2xqB->*8{Z*wb;wPLl_X>XPjflebB?0+8<=F|j4?Z-n-zVNPmZ)}z#felofT!fu ze$es>?ez-EdqAdqenJw?|$Ez%7Htw6M$Fcd^A3cuOG@ZBNpd1tm4Zt)CL<6(zaljr=fR~mJ zA2W_xCC39Jn}P$+vS;A^`9Mb)U$o?*Eb&BvWFl|Rt2z^vXTShVaC6}L$Na0sFk6DN z_JqHcrpYu$@b~=E1E!0>XuzO%;PH!l;30=N-@=}RM zf1*F+AIHz+R_sEWHtH%lIQjRLw`X_w$E?~9SwXUDCrbQIVOA}B)HVClsw0C~3d{sR zP?Q(nG=DVC0#tKjab zzN3KEw+H&xXV*$Ui~asCPg3s!xD7WKd3TOCrPb{95@a59#hGUw59qx%)tzZ~bGltR*ZapB9M3X9H2Xd_FhLq{0>5k~3=+ zr{S7c#i{hbOeKl&IR&IZHvHST4iEe%=HUBX$pB0e*5nTwN(36gTM0MYE>qFnF41KU zSjWru7ACWUq4HBb2dx3Awmw;J*-)Zo!h|_C5cK6QrS3N9M<}JqY(+xoa znr>)X~dxnq!R6 zZ4!y&*F>*X`%7^r zDxfdE^wDDx*fKdvEx(5?zn+@>uHRxR<@-nEQpzIe@7LssM4^C|u4lp%&DvqUJiFf1 z7kv!H%Q1rZYE~0j5oKT+Ucs+|G7xY-vnly5J|C%PwSEgoX7*T0H|8VsQC9sY5oZg} z_y>E%U7>oc6JIBcY0@t+yq-4IJOn=vL|$sMV3rtDG@m-|5@u zf~}!9{H&lK%loRC3}~_=syd4qtB*Ky7YnS~Qzbf$f>yMD-)Zz#V#DhITjWyg=Bm`= zN&liIF%bN~54K_kk*!QJ=A?Jr&Quyt_XjJdznMa;uQ=_!P^5vOoms`tO&UHsqOw9g zd?J#o9PN(pjT11;=nu^8Ju=%1fUb+Uo#u@8hth@e{N((G%##nS8|z1!_Yf zzo?hbaD7)h&9cI8uc-Uf&B|1`>V7ncgRD!iS{`VUxOlty!C|6-0fBrk7!i3vv?eF< zC9tAc3pk3O->YafIjuz;rSzU!Z%$;`*SG`&1~`FJWKnuxq={sYYZWgVp?CiS3B%v%J_NEH7N}L3`nYN zjnke6znwuajRHQyMC6RHf657;!&QByL$1sTv&nsNJeDWVGRl9wlw)2X^4q23E|dGP zs(W}x5axD1T_nKq_D410|4dgn%ztquLU!&_6nHAQt;UA2o+_B^N>3S#fP>^o^6g}+ z0tAQV`}|GU2DlRb7pX|Ga5Jy+VWfP$+~{4HYXS-%on|XJIr(oVOb@aiYjv$}5xzVC zU11Mq@Mo=es~xXUo-TCO^TKctn4TEs%2sDO;&@T2s=y+<3IuCkSlh-A0EvdIJ+Dpit`(p@f3w7&{Ur8cu`Y0Vp49MCQjHkr|HONflrN5Ou47CziRQU zVl5fh3%b4MoN^ne!$ z5f#)x<8ao%zFKO@a<^sZ2ujB>QmeWN*rSNR7^!C6a$LhSqdjs(cPVJwlU^K0{(G@o zME$+k0cUcp`0F=2)PYS$9yBj+`N$4sY3~bW%EaaPh07Tz7K3BduFzY#0rmN!{L2&) zk~I6fAw5&H)<^fLulGc$C=wB(&QJ2|7$+d=Xs}6cuJ#Wx3AFzrWf}tVw!3vFqy5q< z`7$t^W^Y_MR09~o;f-!R)TUXaZVQnKo}K5p671W%+oFwkyJ!S`t>d|i(k3-h+u;|f zxu07xn3I5I_FGZRsy6et@j%scooY2|*H$uQ4!`$KuvL(@%B*CjXL2w{ooMg(zqZ)FHWgsd0+BZ%E~MFOLCh<$yq?H_kb+3Bbf25lE9TKl zI=l5e64D~B-}pKe*N|q_Fh)1Tn7?|V2^M=JEsnVYM%}-zMg^LAQt z6L*0_)rls3Zg!76`xQ{MokFEyv7z8-3e<6j2kF56r)R>iK4Qyu=Ow%C2_yb7P}o57 zfFO&}YV@J*#c|NB_Z=FG%u(#vm3prx!N)^)vzkxV7??`e*duNDbs?4bPbuqb+DYjQZ=0DBF@bN6d6VI(UF+yu(oRjq!u2EXvu z`nOUOSmg_%9-OJv`JQEF9(@$wFYvTv;t6)#o+b4?VN_k_xr8-#*RA?to z!X+Sx)u6bsMg9Z(>>}-4osazDGUfk3!@nm@;l!2x!kfdo_~N2Sa5=+qQu^X}-^Z=a z{G8(-dob|x*-&J;_u(T$uSbIILpyy&QE(v!?$rznD|e-z$ebz>h5&gLg_7fJv&USxYTC>=G{gw)yT83ELcl zw&|l>dd-UKZjR}cIF6e%82|;e1;UGybLTG|bX@a0Yyfr8GbFG^O^;d4?b*-#L*Mz< z0!6|g>;rJzZbIpsR8vpU2|V3TBlqR$BqU9fyK_zZ2SAT6j23?us{k3Tx@h{gr<*y)N+Dd$I}lk(c$e{DOUwrw7eTk)(RAiCG< zmbz19ZZrV2`3BFP8|%g|OthrSP4#?9f7f$#nFTXHZgVbQa6bLD&84!A7ER7_x1C_^ z`;PkzemvzUvj>^zW2b3`UKPr_Z$si`8Q{Xm)a0Qo6N5QgTpZ!_RTloRA@(1>f^lkgqIIM)N(k2jKI@m0!N9#mV$8YpUysiW`PlCQ#L2ltfQW>=O~W{bXCIqVAl6t7(& z-(0g|cvWiIJzZ1HGw2vcvX51JOK;A>I1K)cz#}OASlJJI;ifk}(&s6wvkNXhXs9Tt z&k-(sqJ_UF-Gv_1g*_Sgr`JM37mb|mPY|V0(%?k z&Nam4gg_TpAR0Xymq9a$y}5q#NxO|&JIOOYpwh3L&wQ&&N>?8W8f4pb_zMdBeQKXT zff(1*l&+foj@WzU&4y6a&b|6~oJ`J1&M`t14<>23RXijwB4*xB43E8p^Ud@OnSks? z7=qw+&?&uHU)(y^nJaZ9TN_}?>{u+eRG-WiSNxDj3NA>EN-~%A|10zcBn-%uweFD+ ze+>I+affJ-_m6`w;G_+b0YT-Xg{?`{Qz6C6F7MDbIn32j%uOK+^`#Ip>z)%%BF^x~? z_fkf8 z{5pYm6#Qe+Mz3V^#{kRS^O68;xtbjLr~DlNG01IN-M>R8fA`*d#p}cp;&C`3>a`+D zY~)(5^;Lmh5z_MV!2g!eyO0PxIF`O?mHNQ_@&SBC6VvfU0#0r8GqLL1RSN-eP7tXN z*bPlE{$89pRo5fI|iVfA?W9Y3d)CCkDEo z1mSqSKRCmC_!IFbrpDj?uKi-$>>L~>3Ib5$a|Q@|<8C))wOOj}|8V2KkZTvMyH|F` zJJUhd#atHx_zr*K+)x8BC&{*-QFj4emZPx0chLU>To7|+)p9BUa$7ZxFJ8%Ur+aCe zD{tsQDF1cIbFYAZB0!9kfUP{mnHVuo_~Mu*Iv5kkh`1%2swfC}egcS=ipvU@RN}pX~2aM*gsXs~Loe#=Oji-Q^R#A(dT=)Um zH)7QT>^a@>lr$EqF-v5|B1b^URwv~ITD3g#i%{yWvII0g<7(sB*HWhGm3A6c{Y9u~ zBNiFn9IpVTqO^moC$q%>jy8kT z1`1O}y*@}+571a1)y6L%w#R;SolXu?VlW|a?7SfYtw#Z=O&*=U%B%=^JmDCJ9XZG2 z*+u8oCjr5`&mEy5-yI?0?_RE+*`I?w=`Go)dOVIdhE_#&8pYm;v6zWeqr7OV{3gsY zjHsnpT6og(AqiW;TxbB6I-vo2z}P|YKB4@`R}J&+NISdai;!ce|c5WJV@V*V6LB$5_ROe&$3TZSKm4c zo5>~~%sFHEf%dbk&Akr#9-!$BSx_aw} zu_X>IrZEL_m|=^(x>x0;;Kn$OT4a%|B7g#{6tEp%FU_735@!W?6-u>6&Qd?79K!*{P33IRSh;t)u2s2M@RFl98ZeSVqUZ|Q76=PoJpvCJL4jZo~qrTVk zh}uHc2+}#r(ZEN#&^-CDr?{<+eTs4hq)iB)6~&Gn5&1EjR`5r6RNSLoMm|n+*P}hJ zy^Vr|@@x+CbyH*$S>#1())*I8oZlq#zI&q=|AkPAq2NS0ROCo{+@xNj}^+AFCz|BA5LX~0SdR=wNO6fn2~oQV&QHY9(W4CwLEhEb_U)R zmnKiHXsuMnu6_X+9<2u}^g6b~Lc%WcST2qfN9}Vk-m36c=nmUCq40=PC(Q5%tVQ&y z3M|HB)pRCvj)A}EhDP;qpHhT9(-;c=$G#be@h1x`m0eEgU0u?O>-c&YcwwxS? zNnWC@(Z7`QlP}P&WzI$^8o-zu?KV(*frgKIq4(3x$S8jx^jkUq!S`>HM35z~2e+Ne z9tvO=^$2|)A!Prt$<2NtX~+#QiQfEXt>T(5w3>i3IFIJ)S{c<_Y&kUA=J!qJv~o#G zZfnq>O!p$*D4j7;>k#P=VxHVgPweK_8S7P4pI*^)7uU!aMRAs3B~1-?l)r>-{|ax* zEk_FXX$^Cr6ni$)SG*;)aG{r;fs|N!BTOKFk6*`%eE3`8PdV<2M6Bqg(W~9`!k$eP zwK=1!A(l=c6aS$=?yot1`%4>g|H}>Wxa@%zacJ5F;a}IG&BmEGCxgVUFIX##Y0b*f z75;+Jq0>c0&O}*L!Woq+{g7l_q`-W|08Zh3Q;H28ZqPK#6fw!u0;-i~&osr(A8~6k z`wE}}KKIZA!t;|~(&N!VogwCf0->A%)dV6<_aF5rYZjA9mvqzAm)Jlpv1PKAW;({{ zH^&KXso#3U1zuq$wZm zw_m){{Kio$Uek(z7=xN(BF78ltyLuMr5ETTh@?ng`}tbN_bIPj$E;rFk#M9lwKs-L z+5p`Krhp~g@fV0^8@DEOHc0(QNmt0E#^UkEZWFl5GxqVyZQ-DiFxP)&&sE!P)q}CC zjiKTp|NNA0u^|VX4DI{=YWRSB9s2RiNB;54nJq;J-ov%bST%h#aK--g-)|O$X_>U{<;EN9Vc>4=#Q)J@bDB_#l0ncttn=JwQQE1Jo7$ zv6~(uRvmMUOCz+Zaz=!>;1pq~s=o7B-}QT2R(|qtKG_}*JNsVZ0&>d}BAu*HPsh9z`jJr6g^YyHn};o*hh#U_PYU1A zEgq51X#ViKe$JN^&KN?6z<;?WQpu~s^p&fg_#S?AphKoz`@vF%I|b9aco4Uxv87gY z)nT>$_&}et4EC@(vQ2i)Fa^YWd>_C4|F=^*-El(V3td`h?sYoQZw=&$eDI#W?mNCN z=L6m>1lis>a+(&^k2xkMz7OyzmA?dPU(1m^aL#|Yy~nlxweEMD`Sep?A-uh2$zz_q zjl4R9@~7xIPG<{|I8Pa~riHFN+E%L=ycJZIh|%c`;!c9qqH^ zX!@Amn%3j?61o`pk%dL1m7PFK`L9k~RO2E`*WzFXNAO|K)Q6|aToK<#$9X$-`x%d% z+v6IaC6j#7)75WiU0bK%fWsiAx(1g&$t<$9R6PMqY3oM}jf)4tVVFvFn}f+L^Y7aD z-yMDksHqgv5)(I5) zVO1*fUrzg-??n91MBStCMVzet+4NB>$IqV9;e8WE2PyN= za*(Fc|CJ(EptwBt5O`Enw@238G2fqiTT{tdceNyz4buNrc>;EW>Bs(7c@*?;Oxy7d zhbquHl>#5y-X!8`6U&V77WM*1f>j)eR&h`31bl{)J3nk zeyY2esYdT#*FBgm?fI%y&R}3T<~~lN;uoABTB%EXEY`!Vz_B~#gwj4Rkt?x+J@D(7 zWJf92K)BNy0;6X|l((j3W&VoYo){nuxGajRQ^2~=K^&~Vo0 zJYBbRRI`2Q@Op7IjbGqKxMt`;@AK8|Wb!coD0dOPygFwwlT>Qix;hO}zM{zkzIErJ z!>$)3x&Eb{6v>5rz!&r!U~jb|pc>n=B}&u(@d#w`Seha9j?QQd_30W;E4WaV?)ILo zp9CBXEaEvPCQg0bw4zG|u7AruuK_9&~UYfT_ z50WI>a;v65xwKqBr>2>@!96>E1q`Y+s)z`muZQM0dg8!WOxS`$iR(@6xoZ;n+$c-(W4?5kHkQZ z0RH7(UVaPM$IHLh!HC-!VO!rA>8MG9Gs#B%G`9H$SX!fP30>zj+6I>%9!}Cj#?NWu zT|Ue89Af38F;qIGp@#qa{;x*gAtx2VT;DnF=vdg=0&((o?24NjMe~z?ArA8=fp?fL+t~ad~&XR;LRCV!Uut&!4F`fNw|e7 zQ4h$n@KK#7=VDMwCM7O;8o4yuKU|S9xbTnaYGKGg4xSIh^VlKlKszl&;38-H7s&hc zV$G`Ms9t*&iZs%R)J5dB89VhzK>`hJ+6NMA|4QlYcoID~dLzAJ`PTxmhW(I;%^n|5 zVC|Kj+;;y15U^ngA()$eVJiG!=F7JIo9ck~wS*Ls7~AvqCm(sjzYbvPpjdvDVkG%4 z%z>|fa*Bedq%Y6iZ(rHs_D^6kqtAVwIbbhX6V$o0i!b{CBv9Rm)j)H@>p$oHlJ@Z2 z6*ZlkYb_*Uwa@~nGF>XL5&Gg=!tvn!k{}Bis6%t9G(vwbgQy%pMwcK{gsbT|Y&{>wj@)9( zw_9mq7ogAFqT1k!_}3`jM~UMNPrtzSlu9=VJnLzuSQ5q(9bKb`yp|3@S(~~zP%%_9dF{h`~VqVUOhq`M;y`Lx7d?- zwE!rTZC2s2Zlgo}MKohMjV5JUO#CmwhEx`y_uUdvkBrRsrJAG)LV(0%#^{Q!(cU3B z7Ned^#9xRG{Bl(N`941?_3DO{^dM~-|5Ed5IHyIiEVd?CkLMsf^R%(t6kn>~cO^@* z$H7RGx zyWPTUqmdyVTsc=-+|8%4iwX_$)mC8C2a+=Y`wI})WtZaqVYrSnsFn>vS`k%WtTDO& z30R9&<$&%ODaKe&v#ICif?}$ZV{47bAJY$Itjyiq8uHu}U4U#e(}}5Xvu_1|98>@g zMMpb5Qx7VgCCh;@a+^-tETRn`e*BlIx664yXq!@nFe9n?vOYFX({IyxCtnR~2sL_y zy~YE=DPD|c0Q`}|6i_1s*7rPh{Hkll@K}Jo-i@WZlXp;9vQK;}a#>5)z9E9Nh6_|V z0)k0W=y$K$E15jYa}6hP*S7;>_rMFYhAveyQUOzs&$dzlKxHMIg>J7cu`X;_N>@KW ztg|V(Guxze6d;N?@L*jF>KBaLpMl86J`H<%4i}v`fzE?WhTaDfmD01^u`}p;lVesL z5fR3_v?FXTfuuwFenUx(oRhY%TJE)hl*T8x0sX(>hL=2M2)m&$X%;lAkm}3%^;wn2 zj!jO5k@_SFbqyes1Mc>=)VMf&bhEs{U308x9%?{b!72J8b`niap_qltl0lSa zhW&fE54h7g45~|Py~*?Pv3vVWk4A!Ck!QfF>0**7X3CGvCDS!m^J&vuc{u5yXg8KR z@afG}d>=NIJ;45&;}B_!a}Zi6MDlSvop6E7R5z*BnOgJ#0qPM8V=J%OuFGphTiuEA zUh!ayeO1>RkQ; zy01v%^SINIP7o1(~`-1oav=Ci3AHNSq&7U`jTJ+?yra0{BygkN7UR4 z&XsWf4dyVEtVM|fNAL}?Qbzu(IJI|?hdB%5tWWf2I=ZgmLiESuSS$IczmkL*7lM?r z?$sKH1Y5cE4qA=!;de%Xj~WzP9L>T%;Od1xnVT5n6%ytI925;H_rOSE`C}p*bevOD z&khB9LM+d?(8s8R*oO?#0BOSi31Cw~hd*+kf9C+aQ+#aorgnnY`-nZhH>JG8ec0yg zV5!WFH~B<7x;?d<9e~Dq0vI=kIp%ae={@Xgq`P@}4$Dt$M&Y#o3EJ*@1-ghG1spg3 zAlX>qQ6-*9t!sm3UkN>%mP&lIKawP~(<3 zuy1BKWwjCqCm#&pz_@vF?r8CHi=?Kw%`9gYThm+7hmk8~OZPb^%kCyn%_gsq(FJz1 z(?~Cy%2w&6QIM}}6!>ZY5>V9q8il5aAL*kld~Sgs`)C1u*asi74g ziH_vNeUn6@skDFZnS87G>s6T@1pmyOD@Ma^Ga@#`@8(>#J<;6*Yk zU=n?jcV3b!U5(deqVZa7;_OsYDYZ$M=xCw0twv})8K@w{3kT!dQ7b)RlH|IOQCw9b zi|p!%Lwp$tO^|0gv1a=hxF~sy-)SfSB``$UhXJQKmw#KFF~ly9p68uL*OYe;>T%>TSPHzfvpYR4ryJec zS~rJIv-D=yX>`xyB~rn!)RNnhnxBpUnLNfP*pE{aRJOgUYI7LJIkj;|%7BUYy%&`( zkjsWO1w1kC4I*9vwOO@g)7&8kqMeYx1+Qkq)- zt`@s(+537tK^;+%wt#7qv!6YP0MAE-E$y0+O=Pb1hfE_IZOAxs#)k> z;A$8D6l$A$&}Nd0-&MS#uC8FIL`$0lJU<@DYjvlX)7RSEG57aN8jsfPrIpAac>#Ka zzw|Z~66yQvRh^yFHUEauXUhDF=l8e2;tt zMC{NhU{Ys_q%d9+)lcqxM~$l+^7U6V9j+eJ7q<0&&nRNE>t61=-1t1-Qxc@?y8vnr zKtOaIGH7~7M&>V4)r*YNhR_#7#vc9dV;1k*(+<_ySnH^L782q$2P{CyvF-Y}*c$a_ zRe0qJp}keTPg+kY+8>5omPGrG-f(2VBOTO<;ifn<;}&i@ZJgV<3$m|kGUrV!w)yoocf7hV+oQ~Zwa{F1K74( z5I=y2S5oC~HErrEKH_bz3WW?WUGz()jow~5&K)T6{<7B2v{tUn9|;qFfbVMNPMe{p z?TS9F-x3TCG+W_0mtgIUp?`-yebQ44w;lvpXR_*aQKDHlz0y6~Oow2k*n7`|tPXTYqW3t#itJ{kCjk(AYAC0;%3!?Bb);)zzKA+k7XG6F#-rv8s z7d%m4WqV^??xCtG;^54uu*r)N>Wlu@Ookpoz23BX%>6h@V-VQJt(1YlP$4=*pdV!9 zKu{ZjmNW2wH$R6~Or7*(A8ORirA41K_L*TnCJ!mxCeA^H;mlUN{dT!Bg8=`NNRGBw}AE;kZxUg9l{#n`) zn6rY^(lQ2Bv;X5aKFxa3C(vU2_tN<5J5panuGnF(m}7d{b&Wtw67tEMuq1&Z)Wh3J zbS%!|`(d`X1h4aaQdyH}kGqm>(#5DtHyb3^z38}X zOh*%>f3gEAcOAh~Iiiq!g4zrVAJY#U=Fb*Vwj}LkmrGG7^h^%y)$MO~N%34GK-(=r ziuJjTS8w#9CJ&Za2WexN?)B8K$*hM-7Jd#MO$}H?eW@B7(qja1@(+FBlt3m)dOHKs zh<-Bl$_or#Q$t>lIJqeI*zWyqO;MQzZ@) za4>^o5b4?oM?$Mv|G2HQN0xc zucCnUKn_CM!_6O5fRRn+HCVeqTPrC_E7+T=P4&*Uc8|zrOwrSbK?%Q*IJ?;&l2r)L z^A}Fu&qzm+j$X3P9(sOLnHM6|iL9m&t1B8t4$X-qNpT?}K*;x3et*ced@3R|%TdBj zxAU|zAXB-+3&1jWy0MKz=gHn(X_h_qm|& zyhxmXy&?(f!>b9r!ebtVUGVkZIKiRqhXYDsY~+6T&J$hFkLWF|mj>t%dVdYedvvIN zykIO}N3;n6267m;J_w2_=@a7ehLgN4$R?q{7(x&wCd%+k46Gd=LGgS|vJFA+;`Dz+1ga{Tc#u%YRs02aFR!U)W;Sdfn_$94wFI>9 z0`PV9E-FlD=N{q{t%dK*raqAr9u0mw*`Vk|I{$O4G3S-~h-3@;_*-JQBY9IQjy>HJiCWO>x2EDSbI&kM)Mm5@DB7eDT){Jckc;m~8CQwVf zL!rEBOyamXwsyI!cHS>|rFecJ0n>C-d{W@4YF_u+?cmOqr{!~Rju@tOuPotp~ z5MeDHB)KR(7VD8dP-V#{>ln>u{FL2rZX8{9Nl&!(I*1At%8?g*C`r|sXc4n|nA=j> zgWon#Ufeutz%gck;m!bSjN9Od;!AWhiW#0V4Hw$beWI>8(^={=4Gwdmcru{A`&WNj z)z|g^WC0kxJ^RWm!WxoLpbyzKs0Jb?`SaPmFcA60;Kj}E%a{HDe3;H69rrtJ%J!`- z*`k46S$(IB*%Nah(*VhWlO6i~^&GKxBb0ghdwSRng|5DsH@!Sp_#TFy(_#vX5WwDj zTK_=xR~;|2vLOU`gZFKb<+KdqCAWj#r3b|zOc+Pl_>cqf^5?>S#a5Gf5 zD9MB_OBX+_?xv{4&iRr4B<)OQBoyoAu`+RpKH;AY(uROcUw^;X+`Yq~p(thEK{2&< z!tla-`rDqyz8YHHUVlZ7@&CuxTLwh+XzRn$D2-CmAt;EXgLFvGax-BXNf6JsyPkws zO(S`#4P)5C)u-hBW>qt~+Dec`mt z&P1=@QevMR2>P7x=vG~mzr?Kak;_d}G!4mYClC+<=@7CVQCUz;i$bzmg`qov`MDXL z-|@;p!k3|W!nIh;Z~ihEVDhBa8t*}jy_0;V#()%7udvkM-Ml-ukrmgJt?K-fmFl4B zmgDBhLl2aibdt+55i@Qw!78?|b+?Sj)`dSE|0zP#HArAtggn5py=`C)M?O5;(| znJIcc>)_r=nmR{bhw%Vm8U9|~Hk$HgUuF}Y%x4uk_|7PW*|{HT&_uWB8S+3IYXX>T zTx8&Ns^^|VeT3-R+S)W0A&~cPT180(Xu13rkOsykGe&TALl3&pOPW4z;qO^))I1b? zZZq23{7H#0bz*CZNYR=mmPM~c;XEkbV zwc=rGUyyz+0j2V~kpY0)HQ?qGAu;*IM9ya)TKN>BNmR8(-_5GXoX+ZV1Pj8fvj!XP zo3}-`HjvdR862_b;pvSon0gn}e{un6 z8_Tg3=DtzVRg9{5?cA9;*Nu=ZJ~HvxDImjB{0`mGKKRQeX@PxH0!^gv{c?IA<~_Rj$`#VfW3 zn>;R$NJ)TZcE_cV8J=9RS9#s&DnwfBD0bg^V{9ZAG7g*_ZUDH67qnpCXOvW-dg~m8 zJV5duYII3%l)vZ0dWw&OTXa@u<2W;~-_nQzwBf1!ISo9H?FLK~EP3=fEbi0aUwSIF z6~}w;QHKJEOqS};$as>tVgMqG&4{B!0ZCBtHwj+l zkDD)URNNJD^|%u*_mgotR>VqW68@{!tIOyVSF7sO=@hqIT7{Om>=yN*w=P0kuZ`Ns zIzOP(=C;K?hHDx-o-%qd8-0vvFhE^^-;l`!;D05t6*|k`L*TS!%jGFJznpIco`aKhY z9^VH3@MVVjyQR(<$y}3OcSN=3Ehw$P5_1~v?59)BAb@es^n+G#?Y*(^$qqyKrAMGu z12rY)ts{JQAyss*y*nI9n*)S&)tMN^kr@3oY^eCO({NZMT(z|jBs7=o4-{n>`@WzKS zX9K)yo*p`qsb@P#ZP#%gOfq8bFso<*O4}xG)lDZQ=m0pHGBm)|I=%$iWJoEJHGb`n zUY0Fj9K0<>cD5s!GKzOK8o9&0HA9^Yb)Ht)c6^JcxsGcIwB$W5mCt$A>cEqpIb`cE z0POi~M02MB(SGGg0K@3>Jf9y0VNjf*Kht$&h+6}c;q$&S{QmtLY*l81NMWsW_GoM} zRHa_eAxj`o31y$NI;FEQaf#+++?REU*lFBu>_@@CRO=it;g$6?J)hMz9(2742Bo7w z9JVf(z|eR5vwWL2u|8h@S^h1e^sD%Yr&Oq?lxZk3w}inaNP}hr&$cxRK(}g)_vL+w z_pP*+`d0JU$4iVqF>fp@N6(cdo(yB@H(S7F-zc3|W8Hkp7r+LIKgKPKtT*hTe+MA{%Y zrW|jPST{gWJCeE6h|QTg2t4aP>Pt_DO2Us%B-y}_i5>V zZQ$j4b(b+5&=)Z=htUx*{jN*Wtjg?0OVr*d(aIBxb}F6CI){^`c8$LR3iDOr*-w{4RH0fdnOI9n#?>lNrr_?j)cpRo)BA*YS7E3xcu zW|$Xp{?Cwa^DLiNr`Uy!*Q`pAwjmkI{uu!Vt9f+{+F4Jtgie*Jo2`d9P zLNK2!4&=Oln860Ko4N)yk-1o($#WY9Cx`mjzRghl$&|db#T_gC#+m%HDmFu#S2KM^ zp84eI7Z2y9^jwok|7IEwVD_~aXPQ19IInWde0&1ONj9&vB>1T}$be-x>KJZw=KT3vcwlcioN+QLLb@K|BMLk`qB`rh z^lJ^zd!sU3c6f)QiCV|r<{fyn!NP<$&MTP>N!&TP1!6^=vIMAO$#SH){A>p1uz_2y zZQ>lh%u#pAZeOwg8b~2EkFt|XS>ZuihZOhY=v(fx$TYDkz~U-){$sz^N)@IkSBFAo zhu?G}M_xb!!_^YI%oM%lzh0sV{>bURXRASm3|#2EAy1B7eAV|MLj4!uT#zxgzZzE{ z^}L{cyuP&8QMgKKhQy_498cKdwSaLSr|J?)mQMkLG>ft4v{&XWMh9X`iJOmwq?0YD#QuFMq z%e#@gk6@dmkm~VXnE7m*lB(ED;dH0pzDKy}H{ndR+NwwIUh`jQPIM2qIB|PPbYB-g zP?szRB_kq2c&tp1VA9;qr`DOxXf(MF! zX5XFw6YSVZ7W9WqVe6!_xxN9Ck>X#gMV%hOkC@2?D=Z`|(J&))zd4HLgZrROSX&Z9 zj9|(gk|i|EbP{PTGTdj5*nNY8w;EL?Fxm$NJ<~?{g1iItz%jl(&EaOf+S%6@#urh8 zqh~x2pF0U0}QUD7=TT2Ps<=>{!k$$fm3gs7uy>N;Z>soi*L1Cn^`Reo*DjWX!U8L~QBlSMQ)fax`D!ypKX z$fz>z^S#LIobtiCYL&%9H~wxzFxgLn!X$ozo@ub{vZV0Diy-MOG07A~iN;5O*9cgw zz8cC_A znI;VXU*NIRP1-N3G>1f+yF@r1T&fx>;r00BDqmW+#}qf16*x@GW$n1*lkl`Ne6 z*ZtDY(%he`B0b+55zBVss<0%yzUD-i~9&pka1fV0W+t8clkV;sVQOt$?1hQa$Xv}$JTgadFC$l||9cKGa6Pm>p#90&I@agfar(Kx)TqnEr^o%^ zl(%>J8`(G}wczAOz-~?|6|Rt%BIU|em{wzhTlI@Z%h}SiIaz;IdE6CcVT?ZBxA1Ld zC8cy}F)XvuYa!VKR_8e1os!pLoy8>}0S$O!su-ntn)S=FU8 zBW7OTO1eXu(!tQ0rxGP{KE3Mn_31(gK3=V{dh#9>JZZ# z|27T%MKAk5o%qd`AA2cEN-eB~NSB7jjecDR4=5<44C?_|I-~AvxtN&d`ZgzS*g<}V zIeZPC0(FAliVdNo2$Ki7KY2>ac+`G%(3f)(?Tq7$&X8oEku@e(<;aWpZkSvy8Ni78 zOLq(yWei*!Qby}}pqL@mHfN75xO98E^Fp0Lys$(BuILnp8@1p$tky;^xtaOn@7OA0 zV@?Sq)YVW9?Bp~0g?fGs#uJ(5EQ`h)V`1HpsY#6Kzx&AB&yg#qK&?IQ=r z0*|HTf*(5?9TBzafHd8{^1q z(Gpd4Zy5x5cKL}*D$d6XX|VUa7T7r@v|59(CRUWQr1PWxcUW!zgHC5Jv`DBHdo^9= zQ6rzS5h1uj<}j!0x0f8LL&6+>69K=D5K-VVpVN^QyubMW8B;lMcU`UDWvhX`p7)bU zadzQg>x`iP*xcy_;*R8FI&TJ5s@M1?sPpfrS-+bA*kFD*(SPOf7iIGT`|I4Y$8Nz!d z;D%)-o18(=ycU&~7_sUv5iLA%$?eF>3u)3vRGn;^(wIck9V_GW$)CMeZ*b>f6SR|D zrmFm=)!LjK+IpefYho`J(`3qBuPu-sD4>D6ee_k}BSv-IO82CBxhQc@Y(}2wU8LK$ zoc#O%XWyLxA|ELkjPFUzIEdnGO*-AS*`vikcTML`;fG~mYQqb*3k z6Vb;b=6sG=?FpMH@%y3NyzVrC>ZO>*Dw7xnhw2>pB(b9>`gXU{T6zzkN!?p+Vlp^0 zXq=$dRFCnQum+$lH8fYRQu}z!=aK^l?=P(j-1v%V%y0Ksskjhz5OEiJ-Js!N192f5 zr6xqP8{gY7dcz#){5~&(BPIaniwhGBEkZZt1Cnqn=U;|F*}&C&qa*BRqe|t!aR~WWzY0V?#5!SswnRL%vh90O;K5Kd}@-B*%Ob%|-um zp2`mngMAn>tcDueT<%lwjgz4R@$OrJDN#C!j}k!Y%7j5Bkmtrd9$^}Vsfx*1@gz!M zm%K%re`kWMQB$jTkr_h*et+UI@PxZegmkxGeF4qelr}pAv1ecb)tKzeTVBKbOrWk8QvN93}$~ z;Y-8MOzg9s$q@2ykhBuRS{*AZ}&^Wu=evyPQZB{a>M+nbox$>$jDLn4o zNp;osj8JiE#^&=4R0s^l>i*&NGDtj!m>~3}*u4qscEK4DXYIEyoKSrD_&5D-3Ia^J zOd61RVD^i=HxV}!&05DT@S2gPgH~?a?I!3OSEJDt$L;f^s;l(M0Y@3$D&x^ixIo1Y z7){A>hCTs$j40wYOW$b8M(%IU%Q;hf%dF&J&ST$fgX!IRYtxlm#`Q+54ht%R-5^K<2RsEV}~CgCZ0jsXT=@wY~rgjzn~ZPgXcW9F@wgOibL zq*JI{->pEeI0ce(YYf;1i(Q0Bzh%t47|ew*#~4|a$cP&j>Qj!PadtbXMra|+bBnF}9detjuYHOEj5KlIiKg>bYvLona$A2QfDk!!e z$`o)MU&5R)skssb=ya3&IYb!mJus*WNh=pIn z@nrhd3z!b@vs->*(y9TMf(t#*^6Wb~HLGnVvXHjP^4ptC6C71X7v?&8k=6{$e$HB- zjSuPmV}MqQ5c+~vtqGLk8masiX?brplQ`SF{eGHOB^z1JWg=#XV;4s;>UKb-^-C;W z?>}X`u>^6n zzOHeV;T@zvGf&%a2o5E&ddmWKa>FVS;8|?pT4L=SY$efdTk%jj0jPXNJ_VZbHI?V; zjdh-)m%zSkO8LIF9b-(GK!QE9@%`(2k4c;mg|-WcPXRrI6>KlX(d<4Xov?DqQ2PF6 zWygw<*kG}_wbRo=_tD+N{rBs@Fd(aO$W8v9Q5`kEmpE;gy4km**7J5TJrPM*5)JDL zsbR25^scy;%`6abu_XjZX-EM?i>Jok{wQ-?BUE8wcYOI`Q~T+PCLR$otr>9DaDYBL zrt=`;NhNne6anydRXvA|rgUF~JN5(T^=7o4vQ%w)NI0Z&8VRek)A8v4V0EDHv1EhK z0`oVAXE||&&fn+{CKOQXe0&3V*tYIK3%l2)I3AG>@C6Gsx}BiS)t$b@YL|9maygy{ zK?n<7n}eFR@3@`a4!ioz7Mn#nnj5R}#tR6={2K>ud)V(`N@%^rvF}G?!6VUk8_a$# z%V&#QlD}8#$)fQj2>?PlD7BC|^4g=6YtqU2JY~fIs^QTv|7f^ZAK{xcckboA>3{i3 zhS5FT4Cz*%oD}`&-aD;y$W;k@%&GA63Wzyj6}}K)i{a#y}Hg0Bfk>+O&5keS=??ygoJX71uaBK4PfEbkQ+o5}k;X6057 zCt{mnYUGGtX5}+Rqq8DLMWxfEG|@{6=GwP>E*vK>&)xbKSB$=T?kgJg1Fb*t57XL7 zqI;f;9TI|#8NX`ee5^y~DX2*~>Py{rJ%y*?wzyi(hsv@9-v_G$5liquAq1lX6;o4{Aa1ZE=U;FBiTglT)U)Yzx9Chvw@m&Qm?bMW56(EI8?Lh7S*goihGH`=e=a z$h}~smgiQI+p@vp_>g>OG5HoczQ#DHeXC}RYw=@l%ggT>Pt%mz54hFVU3=4Ju3xDsGR`<)b z>%c{?+=_&fe@>%d7>!m#Bx`Nsiczn2PwOg~q;ABsi*_j4c7)9hyhX#K;q=BLo!2?S zRVd1hQ+NI4M@;ouD4v#EBB@o<(EZVyO+$WzIN}=lRn!|-{H8JLPbPRu#Qv`0iB`>EBl%q=ZMtq53=_y_ExAG@lhd7qr9D zw(zOdcZ*&f*~Q-b@GDKl{SQ)p)k8{KL!N`pxJg0MW9Q7fk9rl0t*0oqvu$wK1f@mr za@SXgGLOM?C2X;;2KwOW_v{rBK0Y9=_R$a=sGK=0N-(gqFndc95cRZ_@Gk8h^=M{d-yC2t`in85Y%G(_=T8 z7IHf6HA*U+V6i&pqtxm_PojPzTSolGg+ z-P5T@_sy@FhPRtc1@k8*&&Nm{L~HE_(;F&G|2VTTx0jw#t~Li>SyThPxoRk0H@g*a zlDO{;VozS@+$h&#evWX4362-Aoo){^4k|6s8W>~gD_7vt0v!|E_^J}G`~Z$zf}j*( z=u44VTRxK8gYT&uk+YWCwGxDqI^##$V^>%7-2D3&2nM!qXhSkC=&Jk)FG?RG;H(&6 zebmKIFL^^x^r3_APH;yhDW_%vG1d68-#DMY$3n)=wkItmRd1@bMfWFNxiD5TA{s;7 z;s<1*LpZdV8uoQG;K|jiNRXOP7PMFD%g;sSjQd|}qm=0q4|$tyvJb?N!jjxVl7IWG**-yv7h$O%zM0V zq-~#G9yKOl@6O3mCNx-{tFNr1DnmmTn&uu+`pViC0c<+^t|81pjJSb4fUs}tidyel zEW`vzwO8HfQ*zv4*iGfI*+8yf5V51bwB;p%EXU9m57hWL$3itAYOsRcFKLhXjQC&pEVR*W`Aj!GGAv2` zUkFW%d`UR$e&%mNY7%pg1?V@RiDX!}obvz`osAj_!c-ki6JxHC{fiEuOISxn<~ri( zoWxUHnf{crGrV#ONL$wfG9-cyj>i*w^Jlgt{J1gANc)q?WNbOThWh!PRh`693Z9l) zhJIPq_llD2(lcV>FwI>g1(mLCJ|Adl4prEWUZXBNpggotQ^JtLR|~j|r;X0dnDul( z;m|SVSJNoAqr&y8QMY{VAFZ8y>oe-Z{vq$!Hx6bUh4be7B&wKfhbe#)Yd^*fb6F_+ zM$mGCf8ys5ev9B5&;U(H9LyqzxLwkgUjdgAE_=z1<2{dUx0R$Ij+=twS}rer8#2^@ zTc)*d0gG4u&z9^n8kdJHT4%b@v?)XF;6#VWTJ*5Cv1_{rQOX6{n-_j}fo`8+uD*pP zPZklv?4qzti%sn+#^Gsmq)`#x9?=LUjbSw&fF*DS- zB=z7pFq{}TA#51P>t8JDpxrzuZO;-rTe#(QFFIKKKzZ*Td}H*YY#h^@#6^X328}}r z+ke{oDoE`-NG_4K&g$g4W+zp@2W%D1(~w4_wM*c*1Ba*&?vvfH{20iREbhiJGtNp| zAlUO6Xy}Vr$K&rOf&}l^%=;Lv2Iq}mj1PPYVE=Z@L*jh+!%hhFal_HWkr{hD_S)^A zJ~w`3Q+pxxre)BmD}~kk6klB^=tAIphy#Ow_oVs!+2rQ`rKf>ZHT}fcrpD*6F=U41 zZ#b_O%ZaIBsK5cGWP_nx&}vJPFnwm0OmSY+o1KMp@D?&;G_N&;4R=M zz?||MdPP1K9~`EzsN@@^^B=#Pq5Q&8p0qCEmC)n3j6kPlJ#N%&$i23e z^VjR-8#Dph~Jm7Ik9u_3z2V{4_cnz5~ zAM0P)rJ53%ttos~z3>eauNtU1&!G^B0$7e}{BBzAoaZ>V&FxBc9XshGwjCWA|-k6KsnM}A#zGBg=%f9W6<%B-D` z4P6MK*X_EgjdP}`c3SLtv&mnnXii#xkF<2B>l=aO--G8y%t`?3^vc>ACE-9MM6}e5 zc|AoT?KW;tY0-XQ`}{@eh|euM$}>@(ugfc0BsE1p8ftM_^ClSTL2Edna7tKPLBtrB z+f~yxI-`nPIdap;?+#hPrjU*!q~#B3(lMPPjb=n|Por0*0w~NZDd)%nR(X1+esB5R zY%zl9#q3FHtX{5gm#zQ?xTzV`xaYhwt+T*d+f!;*npQ?vksL%nc+kYXuxgihipR-hrTB@~1NMt#nR^@2smOj(cm}0P z3It^|KX$n|mzR!ayDB;SGRRvtbt2gF0#>}drz@-?p-GEILBYx+1T>^o<}y_r6B3<5 z=A3=@Zp%CzaKHd}@=`QTEpz@I~!6)Dv`KPr$H#BC(nt2$+!5xG#>p90k`$$=}%6A$*(u#8yNxcw%o$p7-`goL1&#*ECO%WOY8^Pe zHW(^$_=o0KnC1uOV2KNj8+&K*d%%w~#YG#+nM`f1&}PN#r0r`*JUzm!rj37__EB|C z)^Z~U_WuWe;rjjL1|i6zEbLPiTL`j#q6mLaSnXMtLn8ui#PvX>Dz6OFpbEj2bB zcJi={>~nEFU?w&YX`q{RfuXE+#x;~KAay9HuwO}<22jHcNKV*h| zWk~FAI)-o%v&Mc;J?QtbzM`|R!{y5y=JvvwZhBgt=z_{woyExr= z|9$rJH$Pi;C5(Fdw1jkaNT6OIEXNcN)vG}5X^_! z)K#+Yix%6GJY<{lJm~r>6}=;b(@ht42bUx-ZWA_Pw3H6dxmL|V{IGt19L7mP=0#GM z_eB(kt#>w)5Vuo%mX7=k_+8!rPe3|nKkRw;`UuIkmz={orva~tMc5&YI@&6mmI@?% z;;ov=c;mjEBBiOLD#BCYJ%|K&iw8#m?_@guTB#!GyRW39iw-mU$;lvyraS*=&6Z?t^C{nyYnA)ak!j|%^&6UvSuU;`mC9<}D0oNV? zB?`YuA~kqEx8ghlDra|S^8?f8TtxoJ9A>V8Q{9(~I0=*DDsXysp&iOZNpRjq87FcM z;k1_h)N}7(mBNfdoA>@?a^ex7qnwB`Pj(pu`pt8{Q@3R~z`r1ryr?`y6fi-4Hn~6> zFUDkCXzo8j&8e9i0c^9my2R%|VZ0p1uU^tTD^X$-;(D8{9ZO{B9VB5HVN%X9rIlu= zCCNqy4Z-B(IG16i`((gGBY2?b>#Y8Qu+zN$sR;oO5vTZ?4`|f=;e1#%cHe%JQV!@| zo_|Vzn*p&BL@y`z5Pw<*q0@pK8oP?v%CDk(Is5hZkw*PInDGvEKS*>EDdCR=z4m|h zlBGmurj;r1UYi+JHYp(d$^OQ(oe%bv+#poM%#z{rA+~n82(+uGd{;13$Q6SpJM4MC zXF~G=gZ$lB9=nwtOZDbCB8^4XJuVoPX~yFBcV)R&g^CLpQ#igN08CEI_Nb&1cGJ4N zyKm$(JJ&%+8poQxoFgtmtM;yZ%|k`5e?SGvwH7xuPl9A_r@ir=qTTA`+HSEAn z4EWWQd%UI?5AlBLhQ`;yJEE|_i`f6u;m{fH5fha%EG%$k$~w6Jd6$O{S905{R{L(9 z$S9=Bxh7=ntlC*5W_etK(juJq_=9pc1`EK47oCbCDc6%F{jcCB1awM_as6xSzm%!> z^#x~>^=lwXvpnr8L#JgH*bWh+6J!cuC8UyX1~`tHz2J*A6hY_l(cSou_6TbNJiVr` zsodrHZSu&_(5jD`plVzuCZo;NTsFWSl2e#&?Ic8+m)d*}O~3F#HNWj38iHRPXs2k) z_h9r1cF=fP*F_HnFG2xnkDuXWQ;rz>gD;WiVL^@Ku8%jnc5P1M4+DWrO#Wi(ys1O{eF^|bvi{QW*`B-(St=Fi$f+?1S_*d~&=L@NRPzi2s2`dmQKri( z4Ru_G1_7izf6bH090x_8C($)OAikjh>Lak3bpBdSS znprI~B|dj8_xs?XKL4+Lhhxq~;52%D&dpR{` zZx)O?JY+2rYpIe_jW8iY)7X3^$2?nql4O^HY)o7`0rMk0!w7n=mzVnNM|z7pdXws( zAg#jO6&GKzBh5pB-Wr6Z(3j=P%-3OK!KO+G_EC$l_wOSI29fTpf8 zWNq${V55<62B2qfRV-l0#Bz3_W*hzmk$c-;`ZsT1hJ?E;@47gDDb`r3HNgh)lvV?~ z*@vBaBdW)hUyD=p{DWF9z^pE(V}WG5Q5`f7c06D1lOD3+RGOtuq%~aQ;--9-r?c`s zw(YBJtv2+}`$9A@)h{7!{-d zlxu$G?SH7#*{^)T{)p)Zpa}IAOW$FPmWwaXkV)w(0}Gpd@hNM{3gupK??W@RH!Y_K zVsqx;C?twg4lm+pnJMCi8y9f*a&+4pv#;k?*&FI}>=_sClBGM{#kV96W;I^I8c4n0 zuo`y-JTDhbJc$@?6F|l_DlC15#Y|Cy6J0m-^EA35Lz*;;KW{y@GU>v>U^Kk9K3p0}ynE&N;`E&1GQx@Knjhv#sgRtJ6b19Fu$^9fji58Ln ztl2pH0LC3YzioT;h7khZFr$#|4;xZR4KQ3G`=nkWwc9^Aah>}SUKZyTxg(dGTY^zD z4CUJi{F~I8tF$(AIQ`n9XvAFopIo_!UcYa!X9_IxWL?PcTa|wLuA=(mUG*8kpg%xS ze!Gbs4!GPiK1zsSDL!zx{WGZIkmw+k;vmmD$fkbChr9h}=?SM5H|`N0e$Jy^p%XB&kAKxH(`E`eK)e^1VRAG?&RM9Q;kRe92vlMSFXQ5$&79hynm za7y|qa@CFs^pK{d4fS$ftLk7Pv`B6{?tkH|-5Q)z)P{|4Dzi3I>ZLm^59*(JBCq@|ic8LffyR#K~r z36rvKx(gVDP8)9CM?N@E6vqyFalXtj9M!pt9Z$=zax75()WQc+m|)^|pmj!g`ul_m z!@=f8C=hxu7PpVv7#ke=i-7oxJ!*@&x?Laq9MEZHw#V2nrM3AphgHFknTK7KaD_>e zJ2A^@>r%~i1dEQrf{4eY!@O=`I+#6y8EJB(7Repi52TLIjq4BG(2^9066_)9$6XxB zvIh(T_o9Q7oHyvbO8YNONA(sEfGe^_pXa}<)!Z5ja?CE@A8$J{r0Q7aUe$3e-L4p& z*h9Z^7D@6Xll5&EhN5%*XPO5-1^`b?n{3}nKgjEfYklzkM#I_W+$bH{t9_1Cp^;NJ zpXZ|&%2K6GKY$<4XeX*l$j49xdLj%x407f`*0*R)BXi#}VVt}&^HGx;Y2#>;4SQ+He_Ec5v6(A+ujrc8=T9&^GUDcGUn)2b z+H1FP>b1lkL^qvjY3V4%?8y%LQa+^#Js3c6sNh-<^yiN>z6Ur~UF=?*I2+20E|dhu zf83-ApZ#-_?i;zkc-rW{*P9PUnxmXMr~ZRvcq>RzX?F9~q|~=o<}jk)BJAhxRgx#8 z{qG){t`-ZVlm~P0iMhMzk{L1JpY8k2iP!cH;3cQ=x-6yNyU5l~P(b7t;*4H_N+ zit|SJBR4aj;RylujqiTh^uubMKh5s8Oc7QvY?|`N?fN%&KA>YRtS}24adKSE={Naq zG~B|=MQ~-9KJi=a+|<>QU*7X}0QarPPlfnn;Cz8bum3by7JwX{=KYjWsd6X%g;IH* z;2g1_QbQK?M9noE9RH?H2IFkfp!$6Fk|K__bQC>CzLCa53^X=Cl1(BNFlHAe@Dlay z^A6l`jFNz{fCHs=GQZ8q(n1`@4#^zu#f@r*9~HuhqYg`)_X^9R_R3(BE#mRmWZJ35bc;mQZ`;`E& zd9|rG&76)*??l59U{{Z&;uU{a-cEb70XXuHoEb~XYopx1TP~s+;MU~J-kB(y}*qajZQ?|7U^oNFml{t6u+sLa|80hj;=*uIn4RxLX1i-_Is_WTxW zU0hJ#abfBZIzrFyn4|h$-~oq;;veU1QhxM!)n9kdq|L@9aJOazu(rls`Q7_y$0fYH zQ~4&q*ZxP#=|iUr6AWT*~S?j!4Ers|JpJMKE?{Wpo}s3;O?XhjBEI+##~r zXWb}Fun1?Yd8b~k%vjHqF!0tSe5>&93C?_ohYE8)S9`jW2J`*W&v<9zzd>3}ij9Et zaeWG|_l8gu9vz)^$F_6U+=vj6`Kf}qTrB|I$fmBRpv1%{d81QOvbH*wAI$R~(Lt&Z-bI5A|2b zJ{KnWhRoqStEox%9*EIuA|ncb0T^=b)XzlYQ|HxbufEpJ0B_#tbmu{Lf|^ zI;{TF*rAdA1klR&F)eN)QZ>ahEz#@kWi}-&yt|0B;v7ngeaJMD#y?Q%u5Ys(%x|Fy z(!EPGW0D3Ad@!O_+#)~bm<}R~)5OH-GP**-`L>(W??(2}eSY$&Hu<(M`9mOPI>7>AVrdV6bO)sOg|H|3 z+qJ6EG*B{6WwyzyqAlhn zOq(wFEjo(<5~|!!W3mXajRtz}-B`WyGO?g9E1rqz2D=5bJUU*~utLUbcJS8c12j6% zB$Y0?n2OI2%Htwifp8qE&m#T7*97xI|Gl^RvVKeqm=WF0seKkF-Cv-ICytI3_BDJF z@NonTkx~tDcn~8fUH#9V`ui+_T|kV7<}+(BxA-BIq-5IEKv3@REFyBd%f>E1n!{5y zg{HoC&I;tz{#dgf6!0pH)d@@ZIy>fRL|9z%Ym!-P_cI+gaaI&PKc*xMtJknT5!)X zc!fQF$UFwt73zrMi~%Ex(Uzr%6Aj+~V4$yb{7Rd=tZ<12^P>a+bDvrxXPvLw+Sp3I z;)K9(+;rsTs1W*&(UG$F_oDnZpke|VGqq16vX<=iV(dOF&d9lmbiH%)9DZt%%t3N4 z^N%jWPe3*oX~8M;`)*bMJ(dwkhsCZw2UOj%ArJFwlK4OV<0CTqp=SnFXZLp#)8qs_ zmd+;Ow$X%EJ8>3~ilEU6o{JENkiFK*c>~Z{(--51-~O73bPLzC@|rs#;=m2rO$dqL zA?t}WoowCJg4yzB9^c91WrVb|%Y&A+*X3d?sS8YBopKng+LlRItp0T6%dF(u4B>Ue zNK?rRcXe_`JNI_vCOwtrKH<8XKV;w{R%osIU~w4g{tqe0t=%3+WzSl5`1f z*NeoIy)JDr2;vHI6-vp+>1db!Ae0GEdk&X)Y>^N6f_=&Wxd;9ixp!@5|I?f#k>XsG zQkVcxW29Vt4?|h8y=j4@un;@Y-WULs={^)4#3Z{FO}(EI$kQ>VLt;!Rn*UJE=UUdf zaCm=N<~n!Sd@91xfZ?7!8`DgP@Hnakeo^CNynQ5Ib8$?~J_<5ZJbprx@g%Vj>V~q< zSUr8c2&b!L9R#Kxa`RmUA?-5Fug}S5QUp*Z$%3u$#ec!C-5!4ZJ}F)q3$d`q41&5q ztX{zjCiNN(J$XLNb#I|bkn2Qwu>KBY1-5Lc|5IxFp=x6sKo@yFTyvI^J6^h<3c3a> z$Q$YA6<6%bI^)Q?+GYMsDBV$ z;!$rVX=V?r#uEa9`{MR*EW#$D-vKDU4>d3U{-`b|n-9$@s+HK|gzTMS%$^Ok&;AlJ zP+|Ji8W!qh@f^Gp+;dqp}7Zo%O$-o@)p6*f`5)Tl^OAb)w-~ zIrrj_Kcc&PCYsdsVGhyL+jw#6N+V`Db8qopT$U7urN1i2{6$}k_8DDw==<(6a~;qP z8!E4cJ+EqZ(gTo_&w-|IXVfBy4Mc^Pkvdtna)Cd)hJ3*@(jykA5dcn5x(!?-j2}v* z>rPlNJHV}m$11j4j5?BSa(4X(q+d@f+!oi&z6~LyY$BufNDj?94V5DLg|RVQjYprg zDS8)dp4pvK`Hyi8;1T}?Y^JfTSbMpbW;AlGh=4IhQMnB#_m?s&V?kkQu#Fm@DJ7j% zlFHgE!}h$ep5Ro#KMpvzcXp7_1Ie*{58FhEoAw|a;+ZHt5*4$gRbg#Ac($Z|!%Zjd z{A|Nd*FA1uf7!|C_l&)RziLb515PBM6dS({P@5H6?T3wB2W(WD(IcuQDGw@etQCfR z1&1dQ{9%in5ttJ1f4eILxWHJ72VZNMQKI6%i|WcAWraV-SI`BsjXk^#_4~uld4|9N zOrqyB0AEHb|0_3|HceiWU`?TUMnH0fz3j;OoR#}jTbzA}o;PGfOyxrW9*l}nWducE ze>>*I^)rYZ-DrTDN6_6|Jo7~eIq(xN=}YbPQl-gr6NSn10dt**)((Yxwd%u_=W4fJ zD72pMTE=Ss8R-SWp6@`=Sex^Pc*9Yn)~AzQeIE4((I*@3fH(2E&wdp@sKR8#V2JCV z(_EMkJ6g5HO0z18>E1-1shc8=%2?0-d0be+w^8*yy5+-vXdI6RxA()IBuk1u|HW1# z_B^#hdk!-MFFJ#bEGGe}BGo^Eo^~(*;6W-b7-MiM+0G@i-T@sv1l73yFOQY`xEbaC zZlQ~5>qch(UKP!<7(5vRj)qz zuzt?t2R5KPwo*y|IS1?#rU~QXA6RLk2WlwpO5uPvXChtMJM8qMAy%ABKbLyxv2GWK zi!FIrv+4roi4a%cwgdkHEbSz5Rq*)xw#yCrSv*lo-{&z`jmNdrTM1p3u!bBYE~T#E$cV%g;DY}KR(donQM772Nnx1LI*B*#<8v2MsISAY5_jbFU{QtagWA*bPLjt4JDJdiSaw1+wlekX|r_`c9` zX@LiAptbSN1B-Zw*FVBJ>;Tul&^-hQ@)17wj$PgB>Y{#8_E&}oYAtMu1qM6CY6Pp| z16!z*)fz3O$yfJNpM3Yz-)Xriy9t8FZ36>=ho-14 zXkG>A51&0qR91W-ATwTsT(naK8Utmq7}V*0Xx8}8SM3T$cJ5~|){BgzJO=$KW$O&n zkd^z6xb|rT&BKWMcoqYs}VxW_6nyR zuxt=VK#Q%VWn5HWPOFLYT0-`sX*a1o^_+LT5(}L6m5-3}#TD3?u)-mG0IN*1Rzx#Q z!E-ku4L8%C?^O1?&-OGpB?(@!;jw8jVQQXEj2dzX!zX92Qdhp_&||#RLt7F=MJjgZUmP2jDhtZm zeY@dVBJC#H69!=v9|9xp$-_G(%(qTo2&&*oUKSVFcuM)!83iN(1S(}dul4P#R8`X! zxhstQaaoa@Xx_bxa>}M0N$(ewhs-rG*|GVLJbZ$B_vF2Buou!Z!N1$v_*~H18FF3r ztb7G*Bd0m_?-V)$4~=BfnQ7Oq|0uXdGp7FF=Lk99X$4DaFJ|?&MM5y+u`TQD5Y`a& zT!|lvn)(<8lofybWc)AwCfSX8<{e1C3_Dl{Yit*I_&Z+5+ssw+cRn6F)3KHN<5ob<1Dy_Wkh zfU7joPje`OYx+}_?vC*-5NCaPb51M)`NUfj-sF4W_E6d0D3V)P;tO=frc73&)Gu&a z2cOCQ6pbvDnafX+Ms=61`nHZvb;UeYEL1+guGgaR0g_bWB4%@?!d8AkB|YSQ_h};h zMB5XvN1k!q7l=|n0h8|~s9tS!P;Ei)ns3)Nxi^;FZLvhJBS+6RHP0tO$ITe34qfQ8Ibg5e~ zye0LHT0(B~oIE6Z&fH=Cevk0(Pm=eaRyO_6e}~2wduqHpeTes|dHECKdn|e}X712Q zFfCUf|ByAljj-@xaZ`WoA^VQPj#(+FD2CbF@BUrgMSkRPW|cohL~*pYad9vm4oJ>b z2awO;h0sI_3<01T{~~_CuI{)Gx{T%6Dc?6rODc?En)Zh4+#vM%<<{54HQA}xqyJ*E>VM)t^ zeE=#N>}h+))9Haaf9gW`Vu^ptfxa_Tl$8npt9}kDHHPUNf0NjY^LU>ZYr$-j+>MyF zejhr0Mk!7yE`bgVhB3cJ&p#`t_8gBR4qmm?t=II!@Dn(&!HKc+Ng6%vuVfq;n?#dq zDv77AfO{MFMELi*DK0QJo0J=x7o3O5?VBdtTGVdBsG!)UoQK}h=MKX90FB#1yL=1; zztH(v8pwGR@XI{V0+|9^ABAvx^I>E3&8p8a2*%>Re4SS*A1BLTP}1md^VUWv52lJi z`Ra@k19?w_U|XQE^NKAv4}aG3==ZM~KTHTag+spb;gEOJzDupWA#5g(d6usdloK$GHN*7W&-g~mX&p>2kU&v}|BJ)rk&rS_1tTQuE1 z*I{aBF2W3;agUX8M;0h!7t!{;ad+w{1eN_YZq&RP>(zXYu2pXHpdF>d)s&?iv`Vr! z0A~$1Bc`F^#j=GH+-0!{=Q~l+q_}J%~cZw6q(Ds7Hl>R zy+akt;g#6{eMPi_+QZ=0Q|WEqQL~AB-=1UTo5y*@?LLp&K(VjT(X8OH8{8h2%Q41 zW$A%w`W93f%p2i@SU?E<;F4MASim`dgb6r5quZ|Zs>lpGI|Qq#3o)@M(*f%CbEX%c z*T4{!RE^A!)7UJr&Nk2MMNa>Ps3FjT%lm05=q@yMo73Nu2$j?T0RChmE3}^uJ4t0w zRKqiz$Gp>npyyQEj$1aYzN6K|(HN3cNO94?wRev*lY!LWXj-myRVXh^gW-jW^js2R z=Sk!xV6-dXw2DZnR;D4Kn!%MYp@Ii~!VyN@k&T3Rc;QTV=k1NPT$k|GZ^(54Aa;2r zeIpBB7m2-@>5al;{itB^v(a7~&?-u;xO{(WP-#<=0GGoA?cDOzsL5=bC3@$wk9A(Y z*h}hPVVjJlHi+~Sfrqg9KrYX`C&ZRx+l)7MhC=k(!12pM0ZeG(w@}-cA9{%!6-QpW zwtE@InE$nh7eSNmPqL1Sk>f}1$~vof_ExbN&~a>52gi->+V~i}U8$JW3y`hi?C$A) z%M8zvN()p3{J_XmF@D>`Cg^ZtJO~xioGR~{%yl(z$k8_b{b@YZ zB!aJ}JlYl?5EzmKf|Wr=NJz^qjD>36FVE$)DzDS_c$QrF{Yonr?=IQ_M<>o#vaH3kn*LEYp)2*9Xp~ngve8(U ziocP?eGUnGx03~z&xLI&z6*A2NlsU`*jXR7Xw{~RZN-Vpxi*KYVTHv6SkO&_1?3|dYh=TYbjACjHW&8Z| zSUKS}0Sx$xVe9QuW3Fix3Rh8?#oWd`w+M?s6lu7H_EL@lj|yL6g=*Osk>Wh^M-fFc zY=U+x*(H>h)SC}wXCj4Re_kjut=~|a^xM=?$@_75YGARHZDMu)`J4s>h6i-M_CdRA z)jku$xWL{=qDzs7ji3fhj>p>UzMYYt!sn}@x$ZWoc*CVa+?JHJ2aeuk--oOM?|f=d zSxc1bxhH{*lX#@qi|ZxmGmZaRk29{%E3(%4<1BW6iHo>W@g+~Y^oNGQ&VS9m!k7T4 z!sYYAZ031pw-2T=O%9=Y4u{V{T3-SCbLIlS8`XY_XrYliueS=TP8$?qg z!=}$ElsZRshFv#3=*oqW7z!wVRe+jxXx+Ig_nPc%tRI1cdPo$*A0`46r`!ZCW?!TD zqF7L;F{IWU0a8V^RGnwY!@J+x1#gEJ2pH0TjG+od+7pUG)_2YTXW9|nSg2GqyWVU; zyVhyMVyn6Tg`mZ{DV2?z-=UtxQJlHTJ`83#%F)_^h# z`pbz>g8{Z@NU{PcPTf#=AA1Dy!4WH5WvVRU;5@&UJ&9(cy(!#@%c2p$NQw`vf(@<4 z1^xY050+Gr`Ay4|;_=c=0=tdhEoYw@D#ZTqF?YaL&+lWg`(DrQ5(kP7u z%5Rfwq?Rtm$dSzvRi?>allFrJDMesn*ya#>Wt%XRYQPX$Zb%}NVPGyOituf^!FXwE zonMvdV}yVtndUo`oo5_h{YKb6U>!+!RoVnCHb<%jgaS7>+)RtYqChoW+>=|nPZeFj zBJJpxPjf6-RFSs}ZnIA^trhI~f^ETGD$zL=K`IP=v`9E1HPAz95xP5Bs(K6NYj`{0 zE$iI{jCB6=e19O=t}FHYO;gs^O2G<|=w;qYIHE+JySshHBA*DIZ^VI=k+T`t7q~rX zMUxsGYpOyjLR8L*F_wcI z+!4Pm5=P7>qHiOi7zKS?o_?S;dEY1=@CT75XQH^yAc(1@DA81&fL?j|%N)X3%aBPJ zQH;T$sMKOwuI;^WZvUE$_9mle5pDr>;)3-*P2;#MLL zcmU0_$)P5p#D2kw`wwZua_xlT%DI6aEZ!=nb!c|m@E~~I77BWDpdG8Kp;%S$$$4Y2 zOyJ@?qwhXYaN~8RsqNVVwd})k@W?{^OM$z{!M*~v2Vnt!ae5t%u{f-^{xs{z@{+@> zlufFEjfj3!B6f6UoAcXzpm?e6MoUU!4ma+#`6QWPixlzchE)I^LC+<3AQMQmq#W%%u(_(txwb!< zkk2R@^Ul^$Lb3jd9UStDWajI<03|^I&CDsA9xL$`!*`~M?sqEOHk}Z2-cu=T9v_^o z{3#XQzdWEpeJK-vtw?unsAMkOueD_NO!3v_vmmNeOyCmMaeunA*mRvs`@(e5c)cy0 zQKC#TU|E9Zx#mVVjb7DEK>j0`kScd1Syn5Uruzf#r|@IXTL1z&^BRCfV`<(*Ug1iYP*l!X(ZEWxR5PD*4M zhpI3?aYNJ{+!22`M<+&=p|%6RD4G+qoIcK`qg4l+UM+^!Q2tkGk7~W zUkMn~5>>I+Lk4WoPrQHM=(-ur5^_HbD~20)M=|^6Wpucl3d(-Tn^^_P9MvN}XK?7S zKzb`f{a*jwVwAd|+e`GGzcT68_|Gn{w(y0iUu=_blQ%TiKzFJA`a|*s5^^^6ml;$` z*)~cB`fGDNfT++rem^-kREETHIbxb&2Ez^kyg+&H#n(@tOxAxB3BFyg$hnJJaJk># z+1VBCJAzy{1s$Zm(U&<^pRrR{J1Yra5~!BAi(Xk>fS8HBj(w^J_1-`yXMc_c9a#f( zG7z_H*Bx5xr7xEO%5^eJQLbv^N3XXtiWL8!U9c2`oAVzchP6rG?Yixdz;n6TN>+6; z#dqe`>QxH6{JVV`K&T*N1k2ZuMXui;F9aKC0)AULFmLQ%%}kxrLWXQ4cWWZP7yG6? zOQY%j+d6R8X}vV<^5+o`yXN;1|G<-;NQ!2JzO>`~9G}3{t-=G~O+EzJG}O=dq+XZd?0MX* z;?hhrY)j!C0lYcXP#thv@fUppf%&oK2W@uwBn_LfM9A1$g#lf5a;9U*!hx;sfa~4V z)}aZaMvTc^5b-mFeLh?LNIO%a0z!&_P3L!iS`7?!1#RC)eGtC8gF`6gtSDQPpw{s{ z1=3%S(<~$YGYuzh(W41;unJ{nj@414q^D%vI1liohh~uOqi8y{C1N2=1BPN1_}xCj zEtiqKRp)NVYrizPn|j<{eg6L73jUbrFAkXJ4DU{%DxH3F#WK93o@YTLT5R{aE&iBy z%d0xM!wGsoeUG1GvSeaodM<@Yo+tVD1%A8VwJrq!glaI&SU_QD)@|uO-^qaeKdpc9L0=)h#qt*g16`L!;ka+L z^b<-e-w6N3BWtSwWyu@pZ>QXA)W*u{RNCyljBF z=GpGjzodUJKmj)*Z@MdV>OfyNXZ0M49*0VlhI9f7EXu5aA!#?1%wGP@Lg(mT9{~s& zcqss_%tO-}Alej06G)Kd2~9vdXX{2)?&{6%pL=S>2aHYt;bx1IUI}o$)b&cr`LQsl zvfxP}2W`y4MxoOYFpVHl5RQ?ZH0_MY=%8S&xo<{ujosc~abd|tQ8=4zts+$3c)~}R2 zxvSRUg#vOD$E7mYL~uXyOO9@8bF|dNuSo6n9LxY z&ZNH*j|jKf(Zob3t<1arVaOtKs@$x4HjJHjgZns1$*_`DAOAb+)UPQm7y2vqL+vl0 zq_Fdy@y5uN-5&yfFm>WG78{0ut^(H4# z8Ks+!%6qW~-Zx@|QEpTXL|Ja1-P2X$_&AR*T06Yc@E6kwS~9$QQ+Jp1WCcpA0L;ib z7yGIiFh8);`!qp-jn`<3_c1q7<7c0Zm$>rfziUnG%!XtPoJNSM+AMWttx-E#kjLIF zLlyc}1V%^@V18%Yj1Ni)APqA2lJzswh8*u|RdV8kfo0bWTr|ZA@?K-_EBTYmskHuej$5Gp`K=J92e6} zj%m+oc9Sn|{7I#bQ=Szl%}r&WbP-Vve3bvO68^}Jx3q5l>&6Iq)rk!j*qm+nG&$VP z6r1n8Qf1{3kH>NwxaA%X75B`Z8a<{6TUb9io2oRv);bU)Z=Qzs-B8!GcQN zN^~_;S}9ne8t3ULt~Rq<@3pOHSi)6vK0<$a7xSm)7=q#=nK4%JBm3iHwN2nT5oX7y z;`H9!mX~g_)_ESX*b^lDK0YTOl=A=h;(DFpwPu<;V-3r5of1a@xbhdFsECKd6E`)q z2!GGz!%LI5keA?NE4U8HxvXbbl3ZSJMca4+w`UyoG#O#I);UOsZS|gmd}FGCdvSM4vwadKhG@2wo@{~ftHoN4~}2%0zz@R ze6UAA9L)UC3yqBwtv>YX3K|f zBb|2^7g7-$Jqz>aA>pUA{_fRi$R%j&N7tGZO&$jnwRp8QsCT?0PXMO^79Z@U`a1S1 z{XuBU_mtN;M%0Osc@{H;#(-+a*=^zS_Z<6hV<5Xonf%}DIM+DrKesWDn}EsXaPw1l z58=cGJv)O!xqBm45m$z}EODpC>@}ST(E@S1X^82yq`;`n10uW#wN9(=2Ek$uBUBSC zGU5LNejhtwc~z-z^kZkMspo_sja#POe0LqXNcGo*3k|yAQ`?p6Me9-b zeD;ET>HD-3wgYj>K&L$Z*k$@GlLDIL>A9LA%UwieKk}lt->-M~ANd&B6U^s5W~DrC z#?OX5uL{38!j++G&Uc{{%o;d*0qziGsvMqf90}!Kp4(!{&F3#R(zZ@jpI&cqGLB&x z*Ge=(41pjaWI)2gW(fAck5@rOjK}zEiMVuK^*yyo?nrH#5c|`+H{L~1<^LEM0Dl^% zQrb=)2l=i96dmb=lC_RmeZ~b=L|0800|-#y?!3QFux0od9l50EdFix-QOx^Ql^&H$*Sl6l&sl&%JiI_F*Ur)*5$A2U~kEE_h z+@Zb9w=HO8zD59xb|gc;r|Z+ykgMAu@nr~haLg?@?JeO=vW)T0EmRi_#2h%Q>x0h2 zRxRIZrRc3*DCX?G`TV5UvVU+JWjoyRtMP)^BWI1{mJ;vhcbA2+fD-fxlsb*e|0$HTu4Xf21R8RIz77&2PYQy zarOK57ds;ssz2#_XhO*+U5IP_{)I=HA(6%ghzWgUizL)su##Ra7*N~^H^XAUh84_B z!cU+xNKDD$6)S)M%ZqkiP(S`g+5S7TT^ul9G$W8<*d9Zi>($$m+p={aG4U8c= z>KSbK{>X^KWmGSZtV+y{Yd@8N0@S($z!Iik`%DHt$$e~S%cN~qD; zA?4Ps-*SFe>VLt&!Gd9z9gGFVZ$y;|ZU#V#2;lG<+^L$70ZK9Wby@nXfE=E;vrG&N z#GTyKYTm1&ICzZ23qmt~IqH%;p;FAoWwRzcdT$Kg&+>)Za@gStV;Cd6p z4pQL6U*q$MdlG8X0elyPcA!!8gNb}EcfQlf@4P09GzNfsTYXJJZxMZD2Q`Rz8^MmN zzinv{L0ll-M2Opo^mgHp8##W8?B>a?YT`RS#rRTyQ4HgBFN zb{TV`tWWbmg(Idq`Sx@0ns3kkl|aTo*2&GJs5Tv}l%I%5H6~gK4E$ zVQ*t9yGD*NG;kaOv!v5iMV|U6Vv#< z4oYyPds+if9hTe)^rJ9T;0i^pRsex$6DmC8Vh3{(T%=(Gwu%@Xilt?14hRr{v1l0- zz7U>>23n2MY~ZT{PSzwTC+ZTC{f0#SRWwd7)eLU;ExIl{wu)h?tl2ns133bi}jsIZ9^NPaWuKv8jyL zJH0Xhf)G5b33?9ip~;Z-Df8}#vPbq>5mu&rE^AJlzffWJVM?=QN~m6_9-jl(&Gb7F z8`H6pU-qYjdMMhfkj(#8@xW4m>)x#%%#@=Sbsp6Y4RiOAze}wgMYC_&FP1*nuKb{r z%E$y$DE@a)0-!q?ytmV6GQKk9A~5)DAb%4canp07*(H|H&Zj@^_yF&>O;J~u{@(BC z-MKLUgvbjUd8|6}>*X`(!GS|n%A8?DVkeFk4=Ekf1pFyG)H~#^8rt)v*gld=lb!c$ z&b!`gA~I!i5E#EaDvIq@cfUy`7?}gcuRHW!<8{Uo8Gh;6%d6x)kRUbSQY4?@HTanT z^M*tEE-xrR>-3(zBrjJMV!HIoJnAi}UN9?)G|>WuG4-9aA+A5YgMufHD!r+Kj_o4* zrLp8Q^eN#CraQvHKp{Zn8cL#Y@}hC&A0m=lnl@I4X05hSZoJ!iz<+D!G? zac707s|!76-zD}{*mSPp1`ovb@E`;MIUl+tR~T1lCn#(|gQB+gzH^4BBVN^b5nPT5 zU$6JTJvrRH61kXP5#2>KNQnb*fD;&xM>pqSgTqr=A6Eiqa|A>-Xbhde{3Cutn8L9U z%jJHtyK6xcAfPIJq31edu#_}_v?$36v)9nk8|vE~C3-*2R&*HEF^}i@7+Bz2HdWPT z|4Ny?fxcK6DBcI`r5kjA%575Oi4XT$i_aTu+7XWLuBt{VBRK_3f#}dLv7S*@6Bg;_ zA{rs7s#AmUW60l2Lg4#8YDWcx7#}%JS7ia+DBBiK=Y1K+hL2@doCdZAH=9%$Ij;^W zbuQ+?yb|Jy)^9o`BejL)_RdV-7cQXreoZqUPu8EF4r?Y8>(j^jH_g%%2|AKkJ~o$n)|!+-N75Q-ZPdXBCG?hJ64!X6BteD*etMPQ3XP=Q1c<7Y zOC@?o&hh0hG-$)h`1Q0aM5g%_(UR1?s9?jjKl#^!<<(|p0jN%$5G&J&JcBRh9fbrH zt#6-=;S$5q(tdd7n~9^vuQU>T&*7&9|2ceX$gLh3{p%JqLT!idK~22GoPdR47I~A? zML{FWF5LF>(T~%cPQj+f&pUXxTHk{ErMQS8kcB1j;yaYZIP$KpPF` ztk0{Z5kOME;5I76d#<&uw@zf!iAQOwXbIATR$F{)rJai zu)PucQ*Tq%K07k$DNag$yQ2&+PG;@^D9Fy&D0NA1xS;KQ#NQ8fQkKZDSO8mi{cCp% zmdb@xEr%r4^ybcl%c=& zoU`B zyj}xrLvQ((*?I2VNa0}{pJ`y|K{%r`>H?pirfKl4Hg_XxV7oFNpgE|%2m#z$@ofeD zJaXu5jlTC&7J_z0{C>Z1cXB^UwAST)9hVi2j{>9$1RNg<<~TX$VxHX5Z#nP-xslLm&A z6vv5Md725xLEPYUfjxv8HObV4%a}RsNPL7senHe1K$_4NW{Oiz zH)#U^3)_!7b_ebz=*rr}C?I;_%W+?Jv{-pYlv=Yrq)0&CAUBLO@SzM2H=M>d4wE)Q z%UcVkl2Lm@=O<1=Bzj~VBG$Up<(}+DM)AfXgX2OZ5n=ge!o8$A{9;UShw`R6arXfp zN8uYF+aAhi=%yU?>x1rXEt|lLjgj>dUTfy)9{Pd)Lx5%`C3MlFw+AL~F~_?Or0JFJG@eZ&I+9@_Trrb1z? zyzfK7(le^~qTYenLrB3Ma!d_%)qiug zB=#FrGZfF}>=zRoU~y4Aw|#4wGUWK%#=vwH8tsr8#L-;nj5C@N29cY3$=909CRd#N z0tlSo09+~^;oD)Zq{VAW1EW3tpKXWh^Os24P=X$omDn&g;oFTry_l$OHIy9W?@gepZ-}3<&Iw6ELZj{ zq+%{5&5h?@F|*%OseMC)7y~Eqtagl0S-3z5ib6g#af&!P&{uv3X};4b-Q1k$@941_;qBnk+ zyz~YApMmE~atqDXE~rN&)o{j@CTWy=Uqwa5Ye7?;sO@tSe(4OlMvBk(zGQY@vW_eE zy4{0v&#Tfj=gWzo0&J#FirME;F*gGV(!`6?@5JrLKdDMRO`H7@;@m2;KUEV8x7$p5 zKnhl$T71V*puiLi&pKxw3LLqs#&@dfYo1e^R1O}vQ9nfX)kZMuyIgI5N2uUzgqz_D z=T%wO3?)rI1t*}{LBhmSiGZktnx(%RA5B!;@MpV~%KR}F`rF9oKrJPPc zXJ;V@*!db_eaG{?1XUC7Ge?c#Af3PJ2*U&`*bW}ly(j3Sel%JFL)K)PXN}freJQ5R zP4SSf>{r;irK-a#&iVcZO!4>eu$alhFt{?CqG{f7``jmPdT~)~g{-)g-}X!zHU(LQ zMWQW0_pZjBDV2BK92VXE8n@fo&!V%V6x4$zQ{df@=U$MX^G_!HH34kuk-V*uqFCGi z0XPsdEWE5}16H+j#i^CqBZ{Pb_e(RFY%JKq>?mIO2h~>E}qNs@gOgnlv zZ;dh&N)sT}meNyUi#i~nid^xn-7*_sE2b5(;okcu?sRnvaFy^}ah-kF zK>>vFnik^$c-8is#F_%m)X;Am0Rq7~@mRnYOg24Q$0F8^VN8xsRxOku<{Y+#$*OI% zp-}S17U%t3ar~TZ@Z#Rz^$wh!)70rrbJrKQ+k86~zlOeu9KmyO>JE5F?@zY&^%_$f zE+Oy0r?g|9b(pBZzm}_yDavfZ3{0%C8AiJkRd_5`Mep&}9*b=!OsPch#Rdw2Ezq7* zmwutO(p_1?im#O?>9Wf;BQ_U6qnhG6B|ItFAeUiu!|$NmhI^IGSPP|qj8F{(eyw{9 zbgQRs#!?k{O1JL{>bdv#;4S&^D#@u4al*M6zdDW#<2_n zGGh8GF`tHLydIoqE{JBtawyz0{rz^SIkl%gs!IYbf{tQKZy@`2>aiJH?h0L2cA7^} zVaZ=cKU@8Owf?zmSxGS%B={ltSz7nclCZ-DPT(JJZ4GTXBtxN{S1?C1dzFzm#_=J8 zaQ8Ki5eF=W!A*W!)93hEUI4 z8Ke@*XlXh$R!f~NCk)uEV?Adk&La%~sT1tZrQzKS6*rPPRwZys{)zXMrdUi#DBXKn z-Qq;-1o!e&BhMw4G9vr(R&3~k)sIuvcz-)24_>F>1+MMsVzC3Ujo2-UuT+CMPC0Rt z^9bwOTCZTLmzN!KE;zu|iYtcS`KV}kAeU4y{x{g2TSObhrzn0DjG))Rr%ytn?XE(9#IU^JLbz$`Hd{7aF zrga&=eCUGzrc~vuHWA=dm`QG|p^*YyUMW1tI*m~Q4>@a{wti4#WXq1-s*-$a=3&J+i;Am0fepN^JeQ!YidACdcm!JQDD z8sR^fpPGNJ-4Bgq0}hJlk~WlhQB=HeP9Pn@)YFIR4<{@ErP0ZJ;Yib~lsp%D%UXj= zi!0$L7-44S20MJkKYRF^%K_I|8rLMY@ni~@4E>wCEUa!+WAWg7A;oB%R3vA<4#Yy6 zLanOO>{tmQ+yifKGz`iF82w?Wp2PK{cYj-w%2PGZiAv#FtFLRHx`3j)O7PIQt>L?mF#T}{p#p*$e%t#tA9B(5!?F-E)a9f3e8P)^;y|t2N*e=_( zRh~HSy4e2ycQVlkQ4M)53rDwT<9AmeYBpO}<>xZRq5f|{+WwQC>qpc?;RXK2?>&pu zjhU`np$D|xqDe8No%t<`1Jm z|C0q^nL`#x)|-f>>|A)L{Eao&60U-k#4^W<%Yit2&Mi z6#VD!B3=4r&t~hl=ilt0oK~Ep{O)pO?qBMZNf31!;v#V|)7j@{gq!9u-%4sZ--0o* zn-8vdsIy0VK=Jh9Tvb(7f)4m!pMqg{SpPH9;3?AE%vKe4)SPJiALZSYwExT z<55ja3LKJzcK)4|?pJ3-jhq4`O#py@>Zy{%ZHn33>)QLCXV}$OrC4Be$HJv|lClaI|q54@?o+ zjWyb-_1E;1$rUc2UNDk=m|T7RgtsN3=!bS!Jj4IiiWvo)C)GSMuf**n`1TkP-CDgl zG}sy^&_(*OrSz}JG^J{vw)!+#Z#em;W6t(z*I%I3|9;Bl876(2zT3LVvY7Xue6KVI z+;h^o-2m)QJ8|8XQEcyXZ;Oo!0l_p*l(d0yr>fG@pH4$&&m*rLu;gPz2FLUT`^Apu z>(iIFR7WF8h`a~{bpPOVezG86Gux8rU45&1Ah&*5oUARJ_z{Iy&{E6y zwuffEoUd7R199Ze{~{;RkUMLU>B2=LF0xnkP_W_0yc-A@UL;ERRmLxVp~~U|-(N^V zPf_F|8+}!VgoGdl{3SP=N6B05;}W-Ha0Dedpb;<;Yc-YI$F_l3xT~KHIv&ZW9EoF6 z3{I&pkz80^PaNse2B%HK0nxCxN3R-O%X7fNy3!EO=hEtWzZT+#pV22q&t9G{(CEmq z652)#t{+!YlG8GB?v!|&J?petF`XSkULfF((UKK-v_^ACwMoAi#vX}P z6@HYYsmaVm4@_DV(WnG+FXi5nv{s7(&5Ey_#epNmG5ZLDN)6DGeyrQUgcV;cI22Z$ z=u1PMPkwt&rV%9R+iZQzZe4I#Zs##DH@=KZRlCy?^!&O;(yd?MQ~dWQPk@_f2n{p1 zN>8TynNT7&V%xTkTz3Mw7|D}>HtXCpRHa>ieny8o$0hS>tRQHtcNI)XSFZ1=Jkr{q zBf=gIa;7(yvS_le`_MX2&E7F$BaZZ1SllgpM3mV+^T``tjVkpvs zaKFp*@QCjbhcWLG_9D?C#6NAs%-f22O$0zLI12!d@But?9^oQ=Y);`C#ZuQOpgucI zJ8U|rBWQritD&tUF+x}~7hWscEM|gV=GwEEtNZQnC&d*>=}E}s^M%wqt7AE4Pm@Wx zQ#K4;Q+%~ojfh^6CrhXXvQh#O`Z2QEJ{5%UH4i*5HsQDClp})%UpGuzoCqNFgLKL4 zcE@f;0A~lqSG)B3{X-j5{N^$LJ`8-6>QGJ>>O(|b!f}G!qqhS_1-60;HsEphU13BYq8bIT>HHr(DelmuQ4}R`q4C3 zr5; zK3D`=-F>vfk+K2}>CP$!prg!|69BowPq0a8aH8$Ztt?V0Bm>UPfZtrep{2y;<{dit zmC;oGV>Kl~%t9!9*_bG59Uxv7&7UiIRmVdDyIEh*U;4k9eZ%VjkWtOjQy^hYwuB!CMJ0)JwO%2@t4`9!GIYR`q-?Y5nN>jmFi|%i;g;> zkzBE~hRSKVEujFAx4&fjep>#}pF#W=km;Z?7Z(#|fVz+&jc2T77F4`P^%gtW_~;sI zA=Oo$C*&6cquq}@zpVzs@4PU!f(Y?)HEU}7dQ{HFO~P47IEea3s_3baH?F6_W~=sH zOlp3eA{@YcSlhWEn0&-Vpb0pN84M5O&bUXSeS!i#AC!}xVS{Z7i6wsf9zyGl`%kk7IL^CkPjsX z`}|nO>(5bmD%ondR)~yzGz^uEKHM3-0&g}jJI9JO3dV3{dyjcvhsa|{g%18~5oXtW zbFBX9M;BDa18%8avQ$j=_~R!r1Rlv_JYWZX{vX!XKI0G6X$-=jkh5K9mCWw&FVrqd zk{GEgrO!B(KwWGDKo)8`ByhDrl(y|aa9)FSx}*2g-EIc-z6#2!!ALc(g{rRd?|BKz zB?Pjx`0T*!Att0yRwgdH6@C&SiocBQ&}ZP)1{)nan%XJW`lW2yw1TWM9eAMndkkBqX24`cE|kqIWuWE1H%~C{_1w z`=1+F|Du}8k)yHkUVO76Z z?|FB8qnBc^==v&S@6M>jN>?9m1-U&Ra}YIG>NP_S9PORY7KXled)3{-W)xS zaq!c}FkJvZO7yu)XGCaf-(s);$B1B|%A<6yUDu|#3bY8}%pZxg`YlmO&FeZpG@m)f z@@@is*(mNzs)JX#j(bwzp^Doj2t~7F#c1FejX)$ZER4}7pqI9 zt`Q?`WI6@8PGHzsz&~>Z6UF}3K&a=NjrItxV|s9t@oikhi{xk>CSRqP_MGIpq32{kJ>SCIbvJMdhtYW!r&n_~ zXVOqQlR3nl(49Ueu?)>i<^)7)1YNPJ?on|mb)lqsk)I*5B7`sh4_{vyRaL+)O-V{C z-6=?ScS<*ihn5cMZlt^O&|T7<(%r~Gx?4JhZ}Y~z@4a`e@29TiVJ+Ne_B`{<%o9sB zU4;k?AULmc==8fXa>^k5UY>$*^JAkXv;VogMgFT$oMqbRvoBtxfY4A$S@<0M6hXWb z3(7J9s$Oy`E1bnnV9lie>NWbmv88AlqLDly27G5&D%`TH9KhByYOvjXg4KLkiT0YTMEkoEo_9p3bw5JxAzh$U zZbqYsDzB?i(Y-8g;&|p#3O|T$Wt5^u3d7<)U|A%FnjeJ-_KXyAo!hw1%)JRenN-u07)RX z?-1tU%imD^>NpXb*6GAzsE?x80ehb{JFhD_WD-;$KK)wlr_QFQg;QX1+(vopCcy3@&y5=0Wp`i zj7nUV&VXfNfODexI^X zvw1x7XN^ckvCYYP!0j10b%E&JM>_oR9=@aTfhPwQAU0_9KYtu-h@t)XW2`OocYCX~ zJqhaq+6t%`GQDVump(z2Ogq^FTcKse?&uKJt_emkmDt2`eASPODn`b6m&6znLlO@? ze4MWm^24PKRhTKQ;FfL)HKuztt6i{cEs7uI>8W%CMbf$L@WyF`FL8DuIc9mYc1dE8 z5c%Yx2OV(1U#Kn_4Q5+yvo}r?Bme;5e>{551x5 zhv9^P|1^Nt+~ z=p>SGd(zWr-35%Tcr zld*JPm7Y^-gT=~6{izE*cnrisDs8n^NQi$Zr}Aq@Q+5#&Iidx5W=pRGniKJKnkr~T znv#xJfg~=(IpAUyyj&zQ+=AUgfVeI^{4fz}eLTL!!`-nm+oM!$M!{`Cf^MUmv(72f z{69c-XfPqB%7emY;<61}ktM@i@4l5XhC_GteuBxT zYl%Bu{nH6Cy;ccPxS@~xoQmL)NOSv_M*DoKFqi9jPSdK5ivNOHoj6pnlJEhhn@1rk zJ4AP*X^B@ps?E2UHVH1TV&i9S&XN9Z#zPK>V#GHMUJ{Pd6>lv``RolC=?-bFkQBv( zNC2zM33=WkMsq+(F0dUj6|LuqGDW0TUST4D4T+`ZQZNZAv;IQ_a~D!L1ZR)O`GW9)m8R?Rs1A2pD}l`|1vMWMCs0^mE#Z-U(y1RPYDacTrgYgU>McnJl)$~*waaC`{8p3V z(i1`RGrk82J*yspma6-%soA|Epw1*EACepxjP%U%8DZ}+-2NQh_ZN#oMlo91XbGt; z35*UUA<*NXu1LikZfBI;%?F|k=U?QNCOrw_*kh$53_QmYh&6}2-JdGPI6^9KYzN&g zKpWn*5xMMx3SLnamQ2v$2VT@3I3$bRZ$;D5BP zyQ+SHRPeue)omtKS9}@gy6ZHkSpP994m5+dXBYTF-2Lc z(6RT15fW9##CR_X_YQN;4ClQ>-0j=D?b2@j_=BH8aB*bFco^3k2=x2du?!{KAnjjY z9h=)c&&FOdM=O`o(;u?Z4h{|vPfH&<(+{e?^`5EkS5^Ihz0V)km-qRCf2%Kg0?Oe0 zpn;)(a}ymd`ZL!@j#lKHe3`#)W!~YU%WA?(xXG1<>zt1Q12Ys+0*(Uat0i&^kVQmb8NK^@&@Zbx^ksMTNqZ_=%F1 zrzWL1fBXvP4*9qxC1qvTrXbk(6V38j&$EEt7hcU*YTo^CDu@DAZ;5-V5AeVL83S$Ck=ec?%zj zXR-?~uSvOKeHouk!P_Z2cOqdkfJ|_)qnu3lRE)KypF?-4l44>}4!|TCo2X4-gCxkR zfnR5`reKEq!-1tVgv{=-I9qdhdb2|I@r)EMy!DTdbr-K0C?yURucZDs7S8EFxs4T{ zxg^0l0Ahk9T$FOnFFcc*6&2?bs)wcPnvMXxGhIG4Ejl5Jm)bgyIQCPgloX@!N(egt z>9OqZZ_ngt{Y`1m9+Fzy8(29e@uoF+Yu5nUnV{^x!fItG1VT}S*(!J_6+>k)O=0`+ zBRS~HroelG7uJ(+NGcCh?m~SYu>+ZAVDH9spvkUjD&^RC#%dT3Hh|2}db6JLZer6o z5PA-WRmS4#uV^>Cd>krI%Pg9oqAjEX?q6j4+ zol4(7JyLjOC`=wv@+3@~u0H5F(eV;oyZaT(fIv!xuQO2$P(j+6E--Ol;e+vZc5YO2 zoeN@}%j=qv*{UPF58W`B<))0#CVa=ibj@$udFSINycxPs)Nq*3BD^7EX3*kjm$5pw z*>xma91?V=g)ECmw&A~|FL@%YenrL8+MFqShVI*J<5gS3Q2``aUOYU(9P%{Vtn~0U z6aH+9BbHcfXw zY8B7Hhahe2LFaLz2rhd`W3^10Mpqiua}R;kE?vw8L8-Y5e<({k72!!WX4Gwu=STWj%v%qhaAd#DodIn`wc}5ilI6w z?)u0nvsP(Sm*9IZ^BKqKPyF&8%yAG(I>o%S#wYUqTv$lG_uxrudHQuyb;;6Rb~R1M zsWZ8R^+wq4MV??ehri%iYx$1BIEVWEvhixVSRJG+aHMgl#W$Z; zXvv5sO4JX8Vi9J0V9G`>VAkpalg(I^Z>Eo zBh)+Zk9qZ_jOjsNq<)M&{H0a>V|(GKAZc-aOapvsaJRj+qIzR1i)jSo++)*kRhyUX zgm7-;C*cdF+PrOL&u;stw_ap_?RvO_%V(~)t*TTGB~~XI1fow)dw;%9PFq+v(A>?w zzRXu=4lt6?CVMtrj!he<$PX2X0njse#2w4!J>R^AS14QEQ_SBk`eU+>C{QaZ`G zbgJu!*)YCY33c}47s~r|LOXSFr~Qi}{W~#A2EA=`e~)a^sol$t-&&xf#&RVjXkCt|3P;(ULc_$dJ=H)%{L?b`3^?`$_2ftozc^22ISs); zo7I>H4QGo^7Z)+s-50VvRB^s5e{W-~k%fF>1m}L8G%1NuWV?4~!r=htvf1CHuyD^7 zx}R*e#6&=kW9oR$Dp>xGaEnOr8c!p+FFL%Nh=IVTc(7Kyhb{%1t*ny!>}Tm({p5R! zw*7TE7SuBx>iXtxAkr!Jo2fmp0pKn(dU7NK*dHFhOaV?1gtpF3VYaBWdN!-0 zzk(?xDu}w-AWKi)!z)gpavRSc_R8}EF<$${#pwK;Yr8Q={k>f%SZhsQ)H7}m;zsQ* zWP$={XWyNNPE~c7D{AqPCNk9Rq#p;pjb8rF+cd2dolBVt`tTkRoG7~U&hS(=60>$}mRhN;bytmMt$y0G z_XAnyQqvS^ji}tdITRlDwsy)2k3geqq}U5Dhj28`))gt$=ER__#xJaxk*D;*4hsyh?oAf3 z+w6S0Y_#u?MY??vMR054kF7;U`zC%xK!)?D*ZNHUd|9w9=dNNIIU#%LG`+PMa^Pco zCU3pzEb(su90`Cr(@o?DTtR!zRI{iY)BAT@F!x-%l)M%7SjD)I&#mJ$-6fo2fh?&6?a4O)Q!^5Mzt(y#*?NCdo)5V%VG)oe z9!^zHuDU?NE}Y&@t=e}@AXYcU`Et5Ho6{vZhu@q|1r4&1OmWGq(T)FH;%F}SeRlLdcqTo; zJ{hW23U0h6{#-YJX8AimY_|0`H&*gG%)wk0)82t#0h}pf8H^>jP6=Bh3Ziu#)1SLx zr0p(gpMEs{l40PWE+m|5H0#dt6u9tS3Qjz~kJfxD%aHNr)2y`TA@8@2@SCN?5ok#ftS1r988L@?PKob*dAjkXCe^`{GsTZ zN-~Rk*x^t~&cr8#a!?y4KloN}48hBD7Z59FcRQd?<$CJ07T}RT)i$yke|Kvt1)&B1#_j(Y<+&mm{15B8r^rAH!wv% zF^L66Lh>FQgbqJr=@hmSw+epBQ{r|v;Dz7mB!sLKN&ZbYfu&QdwQ8#EzdCrOm9>;a z%y5MDR}|!y5versEx119m!BS*k7Fek_$o&|gBSJ$TOLm>Hw;5c2lfBJT>m~OrX^1r z6)5R4F$AqSQ;m7k?QinJvy|f#f^*pXAZ8+LN$a2IJLDT^#;jz&+Yg6rzq-Ydlf-QZ z*jI^??2|mR1{Z}lEk^b?qmKRdTg>}@>|u(eHq`l}KTXv|EOC>$dk-bNS13?R=PH@q z1&wxH5;v^p1kR~lbvVs)J|(fVQXx92DylqY2I&j$#Bqs<8*i!#hPe#sAJi5`Z&$X?(qTaJISI7d@JSRzkEZCxV6 zY}|sgT)3zsRsau9{TmrFYB1-x?q+@_uI>7dP!x-w@eNzKQY!)(W#Mm~bc|26wX%Le55>cZPq8+|@7$DXBwbz2~$Jrxr6tm|}rX@L< z0=-*Fi9d@heTi5AOP9uNi9XuA2vfGKs@sE?dl!4Mf`v1-0nfEmKs_hzx!6ku|2nb% z3+Q6;VNR2n(`sDVJ^mi)SEB))+^z-hfso#dc9z>_n5Ox&on6PZ<~3xoQEe}BfS2=ETizaTK8Q$s2+WsLiEcyFSjECyuPe@*Weu^K>@T2A+r;efo2 zjNg<_H_l!5>sv!Tb?-tC@(pEm@4DDFKEr_Y=g!-PAFat^n$f|Znwv}_p(z2{gxlI7 z{v`gZ*&L7DJ>tuDoMK6)N#RFeO{GAu<$S1~+R$h@FGB?EO@iRmA~Db_V6Z(@P1eMa z8%R~VBAwQeOhWZgwi01tqtd)&_~qKE)6@c}9ofVU4E;DT*&WGIhQ8s+FPLZyD5fWt zN`0|)$Gkz^?ve+i*KAa#y5)Mxgp6pT5)t63F6g;0_V{}{L*zvqj9pik@C8pL+cP`A zC|c@=K>I{?^FaklpqmVPvm6!|`eY}G{Uyjt@g#fD7O}{4!>2{1R9n8zSRv4QY8 zXOQjj+Yn6%R-NHP=k=BDYM!Q^`&SgJxnc?rMmW{CA^?|6=cI=X1yAg4k)B%@2cql9 zr-yc_H;p}29pwTezBA;&wTYO+qdJRbV{;x9{QQf8o1ZmSu47= zN~~)?-FT9P$fi2Bqwj5oaUHQYwSqQZr^^22iVSOVx3>t_*9gm zVGlkMF_(7^uToMn>_O=Mf!BH0d*~n?e!_ay3tS@-s?hE0!4=`JuUP4Jny5CH2c%dl zfr|0D^z4@}i~<)j4Ntmj7u8>ocvJs&mV)o!V&@5Rex*BfCXOVo5khpGBmD+8N01`d zpRhi=A)mhc)~FS?_8mR$X_tE*2D10_T7hV3sUU?};qqe?VA(n&Bm7Z>r9di^)5=sBqVW%>wCwImqflW)JKJa3VOOD ztF0D#JeJA+;onJJ(X~fX{Gp9c40J^_jQqZjPXP*RF`3zQ;O(k=ak^C!NH3{BqaX#7 za_(8yI~}_j?^gg6u&>@_gfkL>g$CU=7RFA-z3|<;CRF=%#^tR)qThC;lN67Rkb_Gy z&07HWTzuL8q6wTpMcmRvW`zT^*tgTG*ZEl>V zo*o+9AuOz-Bz_X=yf6844%G`^K! zr_kPIQeR19bxZ^Jz~&t*vIaAMoFNfXSWE?B1~1XQ_*v z|CB`BTY|iMYPGu$#qHEx_nEJS%m!3LE=TM@a?@In(^dF)_-%$SNQxw5jzT4 zH%lESc%>C`P<#08*Yd)a2B!l@Yrl^2$&v_+zdWD4lm(*k$uJ@$C|g7C0pmQHz#kEQ zp8pG;&>XQe-l0QcAd5fM@#m)7vaDi!CW26-H)6oo6EeWbf)N8euaZwnmU|S3>4jse z_~NN%bo%M-@ZANhyW|_OMQ^_ra31}3oDX;HR2!$=0aLtyE(FN+`H@_+Xr8$N3!|Ri z<1>c0VqZziSWvUL3;GI}0tng^{L(0v_t!ac7s^rK8T-)J$6eVBp+J$hJoQ_%mv* z6RmP?P)j#`hW`))Z&7rz4d=Vq3Pt49uVhWMYbyYpa9=J%y35L|0>pAb20u5E$C7%DI?*og;RBytP`#J37sl$`a^$+Jk<>h zdAf(beSiUkKUDO~4+`!?d^zLJCOo};xCHJtX9cWjti%(S3MlMaj}OokJ|y=e3f!gh zP9mr5gGBks;9==qTc?ta3CI8gCh)qM`<&y3OT!ma?c1m3(RoQ7A&YAm#KL`Vu)X5q)Wt3kpEGF}^9MdLbu+~lnT$IYx9NG_mj1*gL=^oyd8{zcOL<@{mx zU-|K3@!1e_fmUlYrVIF;)#7I$PN!h1Q^2!aCG<6>o?jelsc-2lum+CQm{*mTez~2- z1hz3}7A-5Sk#3%VPoRHCx%s0F_~(=-*{HRxb-r^f%c8~1vui_f)vfxj70FciVL*f) zgl+jMxQAQgU^!obaIlhkpdKFjqT;1@m`;^1#mGF_{TXWjB~!U&OBE>wn^@Etl^1noK-TuV!_7jYJR-{miT{@!^;1 zVGq5tB)pWMo8>0grqS*{DgwvZq&Jr`$||~!m>PBm2zBA4Cz)tElI2;u!~Cw=!@rq| zseV~Fdm1{8k5ptq#tZGlq?Gq6ExAwg*2iX+&WP%?VT+nNBi|owyRTX+(D%11@L{*& z-*+(Sy|;6!%gBF9wYY1wMVRV{08{GZ258f&Pbz1Lg+MxDIVK?kI(|{dtLWYVc3zml zh>mL)@6v>f8?ouWG2Xkz38y~!#tUDz?5Pu57buVte2^^P;vx@1#ZXdL)OJ{~VgCF$ z{PA-%wOthkqp@cn-=mYP^|~6EZCYvv&+t^uwGVrEUoE-9Jl~99m(&OiZyBnCG^a7` ztK{TTO^315!z3Ugntv;N>Hd&90Q6wov=|&p+P6vgolG3utEh@k`v4rFI|V8)e`gIUJonf}pVvV?Xab>FwmFWqjx@o>s9?{{%T!m^ejPn}wXW zel^?@)p+L3g%T&&hfN&s0a@_C%i<+tEyF$f)N}a7CQ&vDO)t}agw8TyR?*7vqo6C5 z#o1@PK0jb<^KUd{lk5h|rP4WuE!`~JRVxZ!tJBG<{r;K)Dsox#cD+8*e`%!?P~w(L zVbUOdT1S`%{=%cd(n);H(H1%StkR+9NDT+Ff*ZvDB#;2F~VU>@(&3j?QI|OpvACfwpPjLZ~Ol2R#&!(8wpEv9h8jQI<00i|30m?228~t%yN%HnUtnG98gL zWZJN2*CI+pHyIVZUx>1bxNpE}+#oX(whzDx%#Y(%10-d64*dVB0jqiu8_ zeB>y#KFYAu%B?U&bLT$V+;IC8|rv$g|w6 zQ8s{+lT-%(^Eph6|D z24YI7>YOvayM1J8N{5MSue!1bI_IYEQ=^x*_+u*#doF#^n2)@cE~MY<3-@Dk5Hp?vgm&3rva zxK{Ek=?ysr&PR+~U!?=iFNsRn(N1CaRSYZmdzZgLWg+P#aYJLToA?SiaUk2CH8gLV zS%6Kzsz3&3UX?JK`=+<5UJ_A!YqxVXqZz#ORWAY5v?dClVSr!R=vMRD9m!5dl_>IlzV@t_*A)KJI)d(R4={c-z-Q@7Hn*_Mt5EYUhIemv&~3cXO3Fv z?cU@&l$?+saBl7OnbVtf1c7_LCXe+a%pY{6M{IcFuf zk*2+bc<-6r>RyMb5>`LMn`h0+tn=vTN8Za`4N+QZUT2T|&qbz;G_HnO+-tD2!@U;CKqAxE0~adp5oboARh#Kvn@_|aG9Bd zaYDRFC+I6EXSFd|bcRn#m8L}ND|oN7z0~h;C4jc_L{lTmef7v|1BSHu)H^zFN@zcE z$^S6R=q@Og{DIo)J~yK<4e^V`ej#A6x?KW9;~3cR5kVv)5vW+wnYh@h`b>7tJTQYVAmZ^qNi@ zN4{^b)C)_!8T)J8vaw;j?#_YUN|OY=)PRg;U@R0rUg(Yl45(S?4N4_zGZ(=2RcY_? z-0r3@1}iXsXp;J+&v~&_$1J;&J{lG!?n(ZmtmmG1wG8`z?6oH_hfbhvDqu*b1`ryY z4{;c@bI^FVF|MC)yMS`&X$lr_s?T{pZaK(h>Ff57hR+#lF$&(30+;ffcB=yHAN2m$ zgq;Wl?PxHej)2;kX5pomKOB~ez^st9YzQP_fLfILYD}dd_REw_v0vmi^41=RlqS;r zzU{7*WPlRsszxvMVMex0seg*0A`!8Rtx-~kENY}wUB52&qms+SN1%35W|iT1Hv*w- z4G~t~;g=KUOTCS98e6~VO7mhy^v?VI;GCN`zvR~xlXnGcQ7r9JZ~v_4sKM#2hq1T< z>v84HkP#zf>*e@ZH_EYPtZ{o7ClSI9mL2WGrwA^%Ljt*In(+G>ODG~YuG|Pab0HJ) zgf)Y-D{=`b4#|wBQmgq=wfUm#zZH;8h}HbDsx?l!aEa2 zuuUey!y_3uq%jBo3GY7Hsdv?r+eWPHenkv-sE98r#6pzk#hdylbND$A4e<(fnto_d6Ofv%3)(VQ+q#NFeGalFn#n;reWFB`B=u; z^+cW3{6jAaR3nxpZ+})Z1G%a&XKn(rO{{y|@h?pG)d^i;Pha9y9DeT+DIism0W$1D zp_ot*S}5%yuI!jRQ5o&NPC0h&)IdOHQ{-e}mVs&_hxt=EmfKr4o2RLw%Kh#gz59HR z0VmH=E7|SZ>*wp+e5tBe*(YK#GZVH&~*|{i2^kJ?4F|v3U@06V!Fih@J$E zE2v*<)%r8q&WzIjiU}L!0CzFBn#+ZgBwooGj;^Bk`pg}X^^?ojp8~VAKNXjGVZ7o{ zkXE~x&FyVhxz1i@zOC;(;Qk3cj$xenTF+VwUCxx`ibCLZ^s7LIHu=HipTh}waBNkh z)`GpkZ86C24;aQC7i^VBoIW+)A7z0p^riPeio2I%OIcTYK&WSKd9lF)ApxN`Z8oPF zgG=Nh-|UOpAca09U})7@76n)rCI7PZR>xAkc&J^*nEp}sBIuw8ANjbz!^kN9dpMKN z5Kf~&l@pVPOK09nogx!R2i;5%yEjcyht){HnzkX$1>$xl1!b7(oV}G?rJeib)&+{9 zx|kIsMcgq!48SX&9G|ZzR=#Cs`I|YAEW9V;qtlB&nQojK3|*an4_)$O98-n}QzE3<#YbpZeLz z3CS}a7AR-XBSeTJ2M1vxy^uoe(;6n%0M(a*v^tLlzMQD`0wPuWvRSLO(mKK<9 zdpjDqi4}9i)49S*TKd+(3nOHKXV$RgI_i0fE0eNMLMnTM< z@s6ZVtYTCCmiw`M3!f|z%As%mUPN(NepK!%+z|ziBLE80TE8PIE5vVvZ_vMy-yXok zn*V``(U|5doK;&D&)ABYJoeiL?9XQd-nVUQ8vLit`F!`=3G=Y%0#gbL>a$hFxNTNd zEm9|P83LK#dXX%r^CUsjzFPpr5IRTlaSmhp52$^$5LqKO!|!TKbKE5-J-d^(AH(Gf z9QeU2#6?zQzY7^r(A@cnKw6---?}co$YAja@PF1NGTZ3M9)5P}zJgN8DNQpN=^Y`ef@vRjSZmi8{xM&@N*VypPhy=OZ<_-(l!nd3x!S1eI27+9kZTu!7x4S zT$q&{pKOiT2c~ygQjy3e6IGJLdhOa{38P47d40o_D*u{_=`cFBv4SVVoGe2y`EpPd z9MG#R9FQ;o7AQMWS(iC)F*`?BFVibPAB>yp`@MY*!AYv)Q9aa5CQ=(kJvvUIZBbx1 z6wxFE3b_0$`ig|e=Gg1bXf))9Y_;MtggLi;0+DO;L@u+UKT&WGM+bySznQjTaG8Sj zDaaeS;5McYlxB&H)rEKutVq0+vqWDmhq@94x{`dNy*?0!I!B2AHIfrkf-V7H1;m+P zzZqy2D8(Ii2@@e^xT~By0;!I;I;cx|cccJt_u*P~F+RuoGLVn}(RfStZkHVW^FH`m zTiDoeS#*ms>3wOJ$fQ`h?b9R)Aovv%_ZYqr8lj;#{x|(gm_?J-4)jUTgHKQ%)hEei z{&r1D4xvcN0@boMo4#ZhxB78G6+qWNC56GrCejkNjnIh$vpq|GBfSJjT!5OmfF~$y zLGZbWBDN2{Qhf#mziublasV*7bwtsxem z)VVSm^Kv?0J2NI}aD>R0QBI?EG9+bIEv=?G4-}X$w<;QksDa(3I=baKJcwZSKi(-j$~0at0y_#IV@0flP&6Vx}Nj-Px@jBYK*X=>om0} zRFZL&J65{vB^kis3%o9p&s-5mE5^3yNvWWfXkz~Sp%cCgn$+**{(O~5EQXMMCjrSK zS1xwgc6gurwq<7U$1$eJiA}}m(lP})5x36X=YEk%gOBb_7{@nxUcLqUN5d~+PifmN z8Xt^#4;Ni81g;-pd!fZ|nsDfYDWlPcK!^L~Zdsci;U6>DkHA*)ao6+O2fl=`?AfxD zihD;wvLLxOCGoYvLc6xkKjzX}ndRsMx{&EB;>n%x7`?UZC(j{Lxo71I6#DlM{Z+^d z`ib^gu6f?yg7(hXi~wuFoZUful8a~V0nPoSxMzyQX)5i10L>M~&^5s6c6}aCimN(q zK6C21k-PTPq;kRX-vZ)Wg#MG!^@vudD#&SC(h_Lmt|}-ES#g{cJfuob?3&lqEv1YR zQ2l$LBw#Y1wbd^Cw1M6>`cCbCF}-*2{#3AzIv=#IdgKNWQK#~tLD|g|U_`TI$#C&2 zufz2W7rmnZyAJQ!lo8C{-%avD2l=h!8-Ny3J0*2T-aeYw#4M=1BF^&||FbCKF64)! z9_z?#-FKU=`ZM4N54^>t^c7GF zh(Kd_6V49mB?67SOq#Bz!w^$Rhhg~s)Q1J6X)9*g+ILu8u`bDOa%{BXkMgqISTj&X=4)f!tw=SCFVxeq_P)s8kYJk zAh7!YzU9%fty;#+O9cv(^77v;Qp-)D$L<>9tM&|iIEWmLA=UNY4~ltCQ&Jv{=don4)IaKM?6jw_eFB}wEK=B zehR$>u$p1JOiYK8M!kvP~FFz`$r4KN^>XYKX@XjT6l(%%@h+`uw0^fzOOt%jH zL;1&=0MHLz6qwo%Wod)Nxo2c2eN?l5qsDZ8LK!d}--BCpl+ZU!R9<4`aDkE5 z#5^U-lnix|F-MG98oQaOV5gCDw6=kb>Je_)10SB?>R0$EgjG>@eUJN9U$}bY(ztEAlqSAB5aSn zoG7iTylkw)VmP?MbKJi!nrB+}m+r$|CY>vnFTzF`XX!plF|~dAx{@Rm+(6;SGUqK~ z6^$2wvEI7q41(Wkdg^?a$3)qdzgNviOhD6u!6$R^j$Cnh-5krMo%&lA8j_CtFtw(6 z`^h_hfd*H|MB{Nu)zU$@N0-xu$+aogbF9h>%|o^l$%d%PD$(U}`v7Ps0? z2Fu-qJ`zhY$C}8F65rhFKx?M3?pJb^_j6x`-WH?nBB2RBR`7*%>R9v9laZGHwCY@y z?u^_)yq+)7C;DFa$5*C{j0(YmtNh_BVTlkm5XwjKH`E8H=aH?slMT*O$yZINu z9?5Y~NLMWJQ5Gb=^KLr+&N1`;WI{e;@VYNvUItyQ7-JE_WEFZ(I`l&7&Q!g>6qF|g zXx|FGB72Mw;h8Y>KO%qGAo;=0(pTZZ5p>L!*~*?dmw67sfF3CH>E-C9x(*thD!Vl9iWr7w)+wJa` zbEA4UwU}MYwr!=*5I62p!XnXq1QIuq6#z>q!RW?qkEd-3@r1+NfjB|>-q-V%yJKmB z(_v}<@#9{-<`)rA1oB&Zqp@hHdKB*7BjIif=4x!ixzJ+*sNEEqI?Qxvh7=odw>$;d zwZ^O0!x3IsJ{k{hAg+*+IQ8t2V1IOP3?D>0Qnth}ntzbJ{cKwPz4w$R`52QpouhC- z0(M=OM1d4g#16e=pFgS82dVV=c|hr6gC?PPw%)#n3HzLdUv zsj;%ae?5>L=r9xw?kV~u;Qw~|vJk44^%BLx3|O4hF=u+sR_32^8EjPZ${eo^pH@BNU!3Z7Kj6@h&rr0k`zY_uYwEwdAXP|cA5J^^dOQazdL(vMj^u9POTRpxtS ztcT3^OWmAS`~U!+9~amGrLYZ?LgScSmye>{O8*SmSV}0jm;XKz=zM;iZAZ!b$vrx^ znPEXeX+e%S9)-;xkAaQ%MZ#EddsZJ4<;V1w7;hbAkX zBnZJiCeoomhTzBet@zCeQ@yjp{OiWy9Szc?7A}@koo7hW9^T&*bfDBK&bXzZXMtR# z8-LulbxODW3-3@3H!I8v$kL^|okl`2XJjqeBQ(*(h9g)cQbXeF5WVp0svhK3k>}5q zopo;ia;=u=$8DPt)DSJnG@xO?#$6 zkEaHpcs+ev`L77_M`Kd4%m% zv8Hx(wCe0;|0mEkI+Qn_)8fnfJ81!xi~+V+#0&WyXZjzji34)K8@5j|;-3;% zfnzLXAFGEVN+JMl@2O*KD4i{`%MRm{Pnh;1hj-gVv5J(||%ck$h>e;Nx z>;3|>%X*2eAACkyh%nj(L=(afigp^;d_1p5^`BMGd+etr|EIMk5f|EmB?@7EimcI2 zVru;>qC19^GeJoTix?l7hY+h|GdWOVLWeeeSsG{u8DcUggER?z&5ikE+&P;U}>3!(`N8If(vG8sEC1iDHctY|g;;`=a1<-S{L-i5#+NRCypmE^W2%!KIEF51! z+q~WwNDJ@cKYcu-y*O3~+2))r@Q!*wLdqxL$%(@KPVQkbT6rLwD$BS3)cO4CJM7#( zSHL9+enUg}kX+1g6=IHk7L&kc7Pl56pmf@v{N&2DB;lR@T<2tNMBv+;xO(3)ib&Jd z!*&27^Lw{_;3{xoKZX1`RSzgO7V18{kL*yb~1~YJyyqNp1R&}xl?!XcERwD zYA3}&?hQ->LIq_R0#V4m{TCJDnNnh@eIAu71{HNLZQGzYbxp6M(VGoBHl~nm=R$># zZ?Ca)S;Zc{cOwpC{iKA+0xJ0M_t13~`I5S3ot#;48YV=^BtTLwh!%n5EYit?XSD;N zUCwMuQ3#_`QFXX&ra*lRUog%udvslU#AvP>wf$I@JWL}M%3cMJPREiAk60#LE<^R| zW2Y+5In%3I{uq=LiXeO5zZgg(&q>=Uaa}=A3=paud9b=3V#np3o(J!xFPku8$b|x7BtK9$=HX2 z(U(SnY6qH)%^UBoTSGM?4?6%?wXy&xX;?$3K1^qG&sHX-F3RX zG0`CZI}yj?0zRyhbttxPv9CjKk!;1ZD>(N%%6uzO??V@ErzZ-QUeb)^KA5&~BBt>B zmT-t2E?&>~j#`BcxXu+Bmb%T7P@uoyGR5V0HWXch@+%s9Azd%|M}|NyNpDj(!&ilb zlq;sxYn~q#u$7d@giZm|zON&2h4uHguUTX?(H;ZHd1e{|6_G$enUWsfL;ldptdg+%P*97(a1E#)qww^n370 zDfe@itg2^acneb^ICJ8NS4y#{j3{&2?XK5i6}|6~9=Jd45 zf|U0dLrkh5ar;nBe$W&IbZ$e+CA#??P8Q&j+zpWsEy0CWLR1bkVy_^P59@S6fS;!c=lhuhY+j({Vy-YnuPj>dMq2|Gy>P_= z)aFw80ASsW4HxslEyRl1`S0~&IWM*D-N~B@8IJ#$wnIgFFk|6o-Jlf1Z%g?Io!PIL>_H8}=`KZ+3w8qsO-`E)V@^CSI5? z=(wsku5sBC;OQ?)KDrjhRN=Q9@ZEhJLP6hm-cI|}fX%U;v}wHk5-=4kdMSI!DW@HN zN?Ga`y5L{C=Z-WeiBLykpfR9_C+y$PQO+m^i%^qHEkL2o+o><`=FdeZ)-X}^Ba%$P zgQ*Fm=+@uVwgFIAPM2h80yICjut4JRCURRee%Q!B2h>zxSvx{ zbf@)>BXI^bKi$b&ZfRb5Hu95py04ZjJ`f>urOuv9K4yCjkz3C1q%?&J-lN)?Bgs)h z&9fJ+00S?5#u9DLIQ(@mysY~+>zXQ`vU#hq;P{`hw6utbW81FDg@bS4V!&2a*|vq& zfA;7&oUH^#XYBA7;MaP&@REN*4f8X>wZ%@~6eaxVogh<2Xc?oNCO<;Ud=HGEiAWm; zMF-miOEMm4CqcG)GdX6hhTlYV)mkQGkY2&DkUac6+O(;OPa(7ws;hM1>L`B_fr-9pM{ zZ#{ytYEvM@49P|6`SLrJOSEy}47p*a{Y~bwLXLq@&UnF|BEC52zXoEaPK)p4U{%jp zoU>S`SX6BqAhP1lDSQ`wY+hIR>?oFFmqCzido|@A{tz>w66z|7s|1X~Zf!FM4i?3X z7lKDYBeZ2|`;REI_J(>UUfM1|x_Kd@$}ahW5~#J%8PVj>`*N!cs;bh9|K`(1C!VUj&%ma}@{-ZHpsrD-i)Zx-oyY0!3#(#wA>9X95}v zbKPhLsx9>oW@_I|=m%l*)xjQGuz8SmyTpZK_^(x)zS8WfDL1cq*5Z=vZ*&1w`%6-{ zKP9KgM^Z^(%qP$sMJDz3?}a)Oip^34l*PP#Lw&NBi!%`439a8cZ(D8KQ3=~;T1mo5 zM)DPi<1h2i{y);*GAhb|+aHyX5Gg^rQ;?MIE{UNOX^`&jmhSHE4(V>`P8qtphVJ{| z`NwL4vsi$rSE8`P;s`c0c`wF7w1cos5Y}!qL3f|Sk9<+; zSk84f-+BisTl+59g|~dEOeXpm5%|W_aMmd~O;_p`Y}N3Q%gubU$7^zkb7p5SP#Z<}a%0)@KU9*g5i8#5Z9gm)Eo5TuFW`mxbg>EWWK+cySIy zwEiLF|JS~__AH!2^fW_5D1vJ39<*SXYLUQw90j|2Ki5#Q`tp2pZe1TG9@P5ZaISuk zH)~P=+*&mnDsqf8rR@&FI*#m8CF^eDp65~Dd=@<+54 z)!{^TfxJZJ%&COzuuA~v0-WYVxE$ae6{Y9BH@^BS$5Z?ko~UTc*Eq{ajQ<0!mMcE8sOTYTG} z_687rBlmEANX=^+P3|WVG`g2^C2!CKNw9OybpXp;*k}))-`+23A2L_NX4cB+@MQRI zF4^axgkJR++{s}NPAk7%>-a{SRG9cN-x1fwfa>#g^P}ntqY(_*dVCQZMVH-He$79> z6<{t(eZ>m-4bJ6Ksi1`6t-bf9@kr|`fi$TN=|PIsv?O=3MFc%OFfJEZH$0qSdwUSR zau%xz6TAXpCi+((Ou+>qNW>(!7hDuvliKI6GHg)^Vgi`}r<@iCBB>eUKZaZ%P^=6T z@V~8gIT0K2l=XDuhBX}YdM*Tl2^w`%HcbAYWJo|XNR#~dS+KLhs9Drh84Qg{M+x(i z3dJjh%IJ{AuOd>9e))LkTiGPeCvwqBys8UdcK|lyqE!N}LeN`r2+zX1EdKtUI4ss) z{`1-ap=5Q6MFSlw)%NJO>AeJhj*U%*?9YWy_jfz4HbKs(>$i%^4R5YLLM( zHH1Ariv7da5|2h>nFcCRiOD}+Hhazjm*jyA6#~zovXIF-k ze^~PQG#Hz8`e-j{1V~&1N`UM7BW<9X8RjYN%|C}E#`=iu>n_Q7H!O(#ZR)o`FmK-n&R5j9epgJ3Iav{+og3%Y`O}tU^rX?+TfHI$}Iql>c&3$V7 zySD!?46U%70EWqg=!>NSSQP`Z6dxHY(5aJFM_7vtp4Af(^$fCVp5@TUtVzgpN#$*s z5<$|M@KOfJ@K!61{!wZ;J=V95r_y9t+@@FLJ@jJ|8W%5mdH-$Ree;-BVg%z#y?vVo z!vi=$-YU~vSdOF@aG#=EW^A}HlM5tLTGKem|xk7pu zm+xq=$9skAw`_(G1d8#ZzN5*_^7~ZSGU@*5!kQh+0P9ZEa!ZeW=nyiqyVrcZ3VX8- zl%YPuLa;|t3n$>7i|F&rWlss+i8V7x-t=vjoGweNKx9#KZav1Vqduogh^@X0{t+sQ!)Cu2N z$u8jiI%rUin0q{Bh*)nx6y1MX5G{}qkH)vQWsFa~vkP^ffVlQvp-W zM=j)=>Vns*Pe}>%NqpGhgMkMgs!?Gw;((g+=0O;iyhW0o!SAP!zbGJ{z=STcxG8hw z-OsR0^OJZ3p!o)hLjKQszVz&)$IZ9WisrA#e2-OX4=t<3Z!6u>@me<{#EFz&#MPT^ht6QeHGepEdHJKrnmC|mgH#-Yk4H3SL9^$RJj zgz7xY_Z;qwNPH6qcUEH4X*DG`1So^DfT28i^lCCyC?xA70n@~q7jX=gmg;>~T=vtqfY#aIs;?P;dkz*vtREYGL&@ z036Y-WOcW=T5_D}ENt-=9_PXKgC_vs^B^gX0T!Qslx4;l5ka2(`V4N<_iP}o&JWmI ztJ0oyIBtcXtj216274Jkizu^eT1KD;8RJTA7`%a{X!(2kr)u@mI;%{xkChcrrAHTc z1*&~5eDl6^o{D$d87pd$LbA_ubo&c%=!m-)3y&Bw_B8v;%!(Kq_wM2{O;CR|w+n$~ zMjGTPvm(t2a$eJcgGK8S|DORoItS^Yc*n@}FDJDNLQYY)6fP7ZoG+GC5mZR5a-V0T zjo~r6+%-~JnQFPD)jH0mhyJLV)rb)uBP{X0Q%$DwHH=c7XMPv!^Rs=s+MU( z7~Ey2!vFP|XRAXzL-Q59PdJ>r(6B#)E$Wpso0jJ<*%i;c3?P24-u$Z__RpMK`Fq@?Wfz; zw@g=RlLFS7=h{D5vNku9%Qu68{{`fWPUlbEg0y10&kGQt!jt=|XkskoAFTfEuI~3u zyEFcnk3tY_lJV8MJ0KZwZ3)jrY<;f;iWc2=D56Kyh0&bHv9NTiqzj^n9ok9T44si&7zZDnVIj?qV9z)LEX813}v zx|}R(`Z*o<3qqM(ieHnrOE9MhJ9Ci+DNnU>5#vT9nptvEC8*!vCbf4BX3IssM(N)13>KQ`R4r2+08!IXQU#yCS<^BYtWmel>U7ZXSRxOKZ#0& zt}y(JO2d8<0ay72Z7j{?T*8DVw`mexHbCR?JhHSH*x091IK5xR?+;}WM556AbNwq&z?xmG=w&rRJHV2a14!E zli?h&w_q~eT=@Nc$-O9=6mEU~xG0m1g+9KbBBlapC^D#jq%JAA zern*rZa`V!+elpD$2*MC)R8Mb)I^*8u88^RN5z)iqZR7^wWjuB@98^Z+!Qm<^q)22 z=PWChm&75a>S7S-sOPW5%A-A<@8ksYFBh4nk0&iWKc`WL@WuDdaA>aap$++J91@3G z((4g!Fm4tkEZoNVO2m;uZku*~GEq_JZb*ZAS@Mtrx}fD(vgk!KbuP9VT!@tRdXq!H zbMbYzV5mt=GlA42;OJS|p}M(<@r+C+F0FMJbr|6z!H{j!WkRQ-;7ZRr@;MM%33c0F zT#Fsb@bcS92E}76pnhxLqfDL1<*R^WAR(sPD#|R&PAJn2P3!_uc70vgX1aNxG~$oz z0wkbqak|2+h>z{q5IjBeSu0RXusU>xDoIgVFW9izvI5;onW`l_y}xz<1tHMVL2#Vi zVV$d1ok(ap^`i2<&;*nPf)%;hK_WxOF-o)!DTK9hzn#l&Mva@*F~@&QujQoVdHG#v zLpqS60%iotIe{fhBFz38?M*s)EFI+()Zl;yfW{b;?VqBDd&BzeR%9~zAFwOb*s6;$ zz%nx&+yi3;t?dQ_A?>eTbo>Ph{DkwTGJNxxJh5@To%hs(1yat}(nKeLBTiFPZCw|+ zPZef6G@DMIN*!ccsSZ75ECmeVj#ADylPC_w5K#|ME}yAFRU=e0!$m;q z(N$j_6_Kh55TZ2K9p3ty=)hNH0I?b8c2FB<^-{JJnPsC-tn|`%(wy@+KZb99wTTPp z#3)Mu*PAqV>f8pN*8CZjS3U1du0DLN@_w`$WJ!PZEv-K!UC{~tjO1MQY`K2}m^@q1 zfoA{?xcI+@?R@YBZ+0dxVKeOj*4~xMa(hqn6RrC zVkP_1AGI3ys6C(!%80BAIwtg)2t*0*zu@_ie%&wV(1w~lls;r^j#!7>q(t&Ajuv`j zon7Y$k8ic#38E~Ek@j~kk~gPo(<|GQjAJC$;XSilT&1%El$PIo1efmTg*P)!#8RbH(ur zM81SPE^kHV|GvqxT;im5W360G{`-z~k%#479ei3ryJdQE?Y^4c3g+G&+n1>SgQ(pQyohF|05QP&vL{}fw+Uib)SIM zm$voue*Bv@Enw|k{y9^6lD=i+e&CRivUDKKt6uwc(q(`J$X0Tuh1;DAS-9(@?gIzX z(^6bKw8)?7PD4&TDs4DZ6Lg)Q-Rm9~dF#sDRMMGy(oJd!Fpq&B%$nVUfIJHo$dTgoPwy4phE z)wo1L_h`Q@#r8h#H_(A09!?m5vgXYa$3kV2=?y+Y159!T6zTG*Jzuf`f!#`z?CAtO ziv8QzrvA+d+Yv83ZA%fK&d{fdvRCVou+sRX@YGm&LPs1=h3Y1z>Ga#yWigzc+vhs*HsP%n!)UX7JAox(+En@(YE;piHj2TXBo z=-&|VSwQ?tW7`3H6M~5%dPQm9HIh!r5`>d1e4zwu#8XR+_T~A0Zy$?OHJ+i-DIWzm z${b=tDR<->adv!&_vJ1zD$`_ipr_%c0=0x&P0+2w{Tx7{B0_%R)3#Tem)AA~)B8PU ztkb-8c?JL6p5uJ!+>u@j!)fbnusg*>mpKxJF42VM`1W5bDG6`+Lk}#apVQz;_yRh2 zViBw|O`C!FLs*VF@5?)Crp9M5G$5Wc9s&Ia;uZM_W4C+nS8-W;0l3CJGsC^Nwm4_L zHx2O>-s}TK44(f)OOZ(vvyr}7vc%XI+Pjw5vaa&%O2PPqVcv8*GwOz1t(^@kXLoGV#%#CcJ>4_rH=5DS56b# zqD$?dlIbPhL?2N6%zz!$FOA2-H)YDh;?Fw=_Q~eRN4~8)*Do#@sLi9jP0lIfN-<~* zsoC{HIk7jKbBMZAisQ9k)*g@q^qQFF7XUWap4QGQD!v>o0O6u+id6(ytbi~7_bZz|ZiWgCg< zwsse0O<0=3wM88RoKe&157-X^AdiR&GxnUKwoUuP(jcqdO6cWt0b+-F7zc|i%o=bq z*KJpGed|CEu$Ou!dR92d6h@z16m+QC>!m~uOv=N{S`0a31sh8L$2kPC@lf%Y8Ae$` znT>&nG&o>nQfWS@ZR!O^E=iB`jlExh9lLwJ`3CT0a5Y0)gQhjak0cH7+c~zBo;M%B zX}}tbr{mTvXn-B>Ko8e{G0&C(fAtaK&WX<>dV2TcrNfxM{l_Xe$lpF@#b{EE%aT!{ z$(&pqG${$Mzk6jLFu=~SQWQa1;jYuXk4DcT+ZyqI*#iOr zWdc!Df?Q&zhzhD31Nb3e%h5`&@_g7ob<2S>L1cXC#;7vC)k|6(WgU06Y*2aO=d&cJ(&iCVs0boTtJQ zb`SUqX#c)%rZp#^%w2IS(&f7a1yK&(TmE)l#vB5)C;oYPTmY(dRDT6vx&+N^jE9 z2P?g+vJtuO>={*z>7nvH2%c$QU_Eln6A{B%tv2a>U_$u2tt{;Wp!>*|FVL?bo-O%v z1l#fZMKWDo&Vsa49R0uvb|wY0X!O5bwW%hc*zc_YYnk2+oQeg`|Ujyl&O!Z0(% ze!Rxxn@09!=JIO*z0u$$z0bwB$ad|tZ3ghX_QfNN=8^Ps-Ilh{vkXLpa&;tD`SKtt zU8L4Yu}RqW3ujSz%-$XJ$91Yf#{X?i3SXGa8hkLn|sM`n>s1>eh5_6n`7?O*z zYfz-4p=e6O?d0j-As3n@U!*|sMkeR_lY*yPyJy2S!WI>yx|T%8Wqut<*7^i1dRC0S zx-<*??lpy8Qf=p#8*f*Mp#1PlKr-mSzxHvyWQK6bs^r+#5+t^UpA*3!(E$E+ zY98$(J*>!#vr?p{m`t~_yO8k>eSy>4sDJXuE@_bOU@O*Wrx zz~K75zI(Y^uE;&d)gMLdr}@P%bSt{nl({kbmNn?Q>z!0LAaM3Fv_yOf`vW@}$ZcOR zNZx2FR=8C-hWemox^$(*p*i3Jo>ka7h30gL%lisz`|PSd89ieov82WQ4G$LRsSGF~ zFb|U_{QN9isnTDbnFPNoA7EwzEt7H$CuKD=%Aa8bi~>1j7C(@$zzM(c+@*H;=|(v4 ziLfgEfXfu0n*=`=69;PF8echa5^BW7l^QfnQB;|5pp^0`GMQ3WdnJu^s>4s+$^Tx; z!rR<$&%c`+x!n+d$aMJmk!_El$9O~4m+kk)(!p|v|Uu{}sKow3k(15z44+b@E<~<&D{AYb>R+NztNsk}$Vs1JS zLk!&#HNyW=HxI;fgiP4^7%uW$!Z=K_wcid|A};R#@UH7e0K(M{UfS?i!Crjifcv=5G~zC5z2Vj(oGckY zHg9(trQL*_N1F3Cq4@pft6HCP7MHr~r;fG0u%mn`YHzFrMx5srp!TE`}v6 zZGf{dQ>bD1B#0EB)KQB=4ZJV`3gCM&*QY>l)p7IVe28TsJ(%wxQ56*tL)(=sgVaJE z2C35dk>M4k$KAhOl)K&$La^e|PPrMuu(0H4wM>uwsHQU>oNt3g{TsN&ga5bD6f0(l zTQ=ndi0S;?<*t(IXuHWXDJ(N41m=YPmy`zF@S7gy{eQyj*) zO#7$>irG7M@ah)w6C--_pJEGLqv|agT@d(S^>#VQYn&0))iG+ z%JbR>PbG#t-_oVjJ9iCo!01i~7~S{oqT1+K*$j8Y*$ONVPa1n#N00CpREJZONpP8B zUj0s=v0Uq;cEp{AbG4^(&)F_CzgC-cti_3fP68{O^fh=a(^Blux+0REAI45$vju$& zmjzeR?Fpw}>Ro?VECktmS%JGo=&!hdfB7%_1s?S0aTz|-3FLOJIu%1qh-3hrBDWK~ z5`BDcwwn6ICLQG!u^%a;DxHvNp-#ItCr21P_(~@4>qNwX_+;^o`3JDLwO(4L{EOh# z07F7q?7}JAIn>2lc@Ly-^C|iT_(T})AsL4m{99;sx0}SoxWih`N55-Z$J*M*#;gEO zZm#F>81X4HGyvQ}&%ffZwa6MC zB!$AO8tjTeDsr%$EbNKj!kbt!D+Lc8G%M6LD|xpaTn_A_mUZ1E42!4v$rx^g--R8g zBPBxuzxg|`?YXo)7Hw&Z6ldhtlfI~lA-V}=0dhn=JPNN!R@2iXzkDA*)GYE$0}LzD z$C2Ykx9_ovFl#`g@~{6It9LV992PYv*-myBgpPf|)FsKN0%cmlzuJNyH50r^nSiz& zLREc$7g053U;SkxDHpd}hYwR@RBCeS*^u%~{F2>g{#&d@j;kg1=vRh99#@`T9zUH? zvlA_FLNwk%7WL&IuWZg71BzfEH}Zc#_7)n{jYmz$ngX#mo5ag8ubDN=&e-VJ(wwGDs}0^os#N? z`XI*)eQxA3>We0u0PMLVixHKdXG_43)x$F+9t7GioiL^Z;a76bcf@6QRhl)TK93M@CV*PWFIaapj9c4e zlayj4pr>bVrUeuW(G2BNO9=gnK1SW~W@LlDOG3BwzVnBAy^xWR4f&So?phrvDP}zQOVQYF zA;?eI+&>C^y~kxAg|_3i1=+7?az|qeYf&3<-i6v|47wo#!d2VehYN97oLA+lNOF5w zMy1zGssYV|rP`OG&}zwShWHzy%8@ZmIYO%(^~J6*kqE)puThaVvX`R_qo)qVpNNBNNxTm@XsLnPE^iDdM1d9y z>kpD#6I;PxvSMUe)wGcTQl2DdeaiPGX4f@H;G*%PRFQ$qk8)x=dSL$s819Ad0qiv% zC)1hVl_GiM&Tp=v?hC$b=EWW<7q9LX6XjK;`DU5Gvw$ni3^i2db<9iXEcV{~m~Z$H zW=G!I(p@{;%aC!_=F@J(|FCic*`mCH+qn1)&p5-+6H|cY=bUq&g0~riA$;336lV1a z=-aEDAp(Mo+(1@Pd=7cRdlQW>B6KWVkY~iyY`&Sr6-CU^lDg99cWYFleawNdO7~eo z6^i;-d?yF7j>r#A5C+P+aSzyU^Ch;LxBT1Su*a6ciw6{9uLpH7_J%o=%6mYcf`MiT z1bjIYjpz=Sd|PHMf|_XTn-ZR@9FoW)P3zg;ozaJar>BqN%utu z|Dx;>HYr518Q^6KM`W^!3|K)}`$c5A6f&&}Xr`_JE~{jN6E?J*CHz`{2%OA5(Hc!} z-}>y79}9*{uQdLzX*&q2FPqk-=Y3w>@_U~4cTrk-Z<*k=%VThNJm4xTQG^i9@ z@~wp1;44P2Bx5*CNsFxLhAaj&7!I$eK(bkAJjAitc{R*i`MXG*5-WCxA_NBf&o?lq zp}2KQ&l@><#X8!5;QvXY{^*?l4fN(j2!2fd))m(qaPjXgqe_b)JmS^veufIBPr3D7 zk{=kQ0mBh00iWA!5vXcqN+N3AsPW4+q9pYtgHpiHU*_Y@*+yKMuGDox85te!8$GqM zVF@sX9}m+Vljio{|EeXuhgE`CqARY*H_@xL^fd@Jd_*Zqe#YJe0vpEpZ(lb4Ap3uvo6Lm$^g!e&rzSaCfZ(6xB<+D&&VxzgzlESNm; zde^oF^!!GczC3Xd4fMhHiedkEP>+Tlg$DRb*GTF_nesV%c|g92WwbF$A+by##CEJN z2pp_Y41?n~-`t?xkoq^?lDzr^G7=V~!-jCWdE5}Ex0CWJ>SDdAO$8phP)la#HCU>n z2Wy==)JN~Z^U80{#mOf(BNdQPc;%4+xJbSb=hP}~h) zB&vh~@4MVSrN@^_60q40v z;fU&U!EJ1RU0mxdTGV|*=$Jb!;|ReGxS zpXNCx#~6O7E0>_}zN%1+dB17>U`mqojj(=Yz2-NE0TH>J z?(%F9X5Zg>0_U8{nF0GJX|!~Na!+TY-t0L;Catzl(o6z*>bou1i0Vvr7Z{3p{9M+G zxf(PqpH^M{U~DULXiajG@E*zSHZyUqVUjBe7vob_4N?oH+C|Huy6~Q5iY3x4)M?v? z+lJ!}Fed8x@-5}@9VR<{9OI}pFo7$wxklGCJ@oB9R_QRvCR?bnTS{iqJs2cR_B%1w zo>hGz;wuLm|9OyqmImeANvs`Z>99al73n-7o;nw&r}OpS<%@=FG{#2Dl<>R zQ6aaKW%28_MI%mriRm4FgOKF7kai1l9AJ7pstMuHB&xwBwulO^8BW?UjU?VX%Xvw4 zWO5MVOfuna3fQ#h-oZN_%3_1du*((0-l+51QvJAlpBT&L;U?M zvFp1pVrON&-@Zd*^L>^&Xb3V=+h%p}K@vt=#|9h$?1rDB(+C3lSSei7#sfuO^CED- zqiMX8-+C~n0I0h4l+Yv3@D~L*16RXL8kgdwuMXMxq!K&1jbGxn7P|9ODyyKXw7Y)# z>IPlU=xG0y4qu#?^sS8DrGIuAJBVg6oTyy1QST}haHhhwDl6hpnW>E)S#zo<&Nky{_~MA_FINO4#M{ZN7DTDzN*Bs z{EknABBw8U+LvT?>q09V?zyR;L{N01c{8&^vkDccNMa!)7SPN$Ca}SYycs*_o zWMj{o@|!g1JO>d$H)Mq|4eUq^62)@Tj?;93Kx=^8a=?DO~paG_b(R#)h8w$cAFBg~R zYKP#PXY?O!gH)!3zkeHjsVMMWEWyOqd`2r$;%R}15G!(ohxD240>{*5b8m_)aKae_ZflPXebzscVO+~w=Z?Job4Rg}g zFwEya_g*+3MdAs=+}69D)Kc;Y&Y!(@OKWRu1KAeR33LizM$P70o3;R=Uj?Ad3if4t zr9Xic=N%q=6@H8`?3U;J|HClg0h`yw>SoR0ags!aw=@Fy{-qgpawNw+gVf=Gc4Z?$T9q6f8f{ZHs@XPK&_@}iq^ z1lo!PVge@=cUHKZkrEqso`rW`Vzb5R!J#vE-eNwRiJ+;O$loUQu%*2`{E!1EJ47D^ z#p7GtoG&ibCHR5AP1H2R>qNMvIQLrlU(mf0=Hpi5vbil%TvEXflT5-_v}}R|ela?= z8+Dc6Fv|Z#wz~X1?Gi?XUsU#u*^4T9gYr|6&zgeKJ<0^egA@O=zxAiqZd@AgGnVr) zNlYLaAP$clX#07uWmUKL5eL#5b)f|jAoo&M?STK~h$TBl{po%z``+gO1IBMd_;R=% zxone&)1N2J%!KeX!3l=3rs ziF%_;LflpCxAL9`EZwRnKAy*c307^(SIrpzgo>`}Gu=~bdL=&YLKC1E-iR3MeG^&g zqr4$55OGd5nQAx8Fb=KPJ@Y_lE3M&YCo&m6%rUwUWgn>4zW$at$dIg3~8*7P(cWXVKRIEc}Hy zbtr}M-z%_5rZz+@xSG}N@A4O(G@Wo4CLvfEZNx!q8Z-cfU%jcQvPUt^#Im#LS@WVfFOGX0B zmN8OustmEaCvn^Q5T2M9_`9o{srO1LFxp>@E_n((9he&a$ZT$=9CyN(H7zAs)SDo# z9OZ=02wX3MWbtP8DD!VGjZ1(&PGf?_qeFE}S{?684dM;_RMkYbvW?Q04lzu+sbD+t=yVZ}9!sdUK9G5NMZ2qmnGQT@%<>)X7tx0)pTA8YGCnWqq+PAw zg3w<9YmF+_$5%p$b$nBb!Sm+J@;z{P<1=HMvaFYYmEB9Li0l3}&zx+y^6m>5u1H%j z$!RQGKncays#xPY+fFhW%v|g@p_k-hbE4$!E01Yq+(FJ5xYkme*K~gHPh!pK?EFioxAyt3U&>LZz4=us27wFrx>p!_b7g!TXAeGsB>%liR*de z_B(Hpqv~tKBFmts0+>v)w^A2c`65s5`$6WNj}wguuJc>GANV2|b66ajBQb9oAum#% zw0G~W7E+&Yyh;bHoMDzN>|J^WW^{AoZd;HGg|5&5P6#}|=SEnF#9U!MtPAgE9i6-|z=!7(TS z&^7*ZG-LR*12wnoU4DodGeAYBz{YW-Gx7gih0lTk=GG8(>@KdSAlz9R0%o0RP6Xbp z$sBlD$jsRY`y_*rz?xQjh9U+JtZ4|8Mw7R%Z)vgV@dRw(6IBH9#7YA72ZEE;7;+aHpmf$K;rN1xTr;n>G-;e zbGv*ymf0A_A{L9q6K*Ltzk6QM()9N(kw$GA$1L4GS3))he?FbE%g_?zh2mtvfcwnr zRTZoKM*{B=mFHBYRNB_9MMF13j^v*bm^L_15;ZM#uFX}ffA@3>-?S1<1ehO8Bd~+C zde?2I8dE-dFzPDe)^8vCgGTVt2R=YCv{ZP0r9D+0+VxG$Qx9s#ts|y0YSTikKIgdD zL8kVSdj<|_;y!`7qG0)iPE^w4NuFWIdhSf6>MpsBfuj z7Xbb2HdI9Jnf@ZY>IZm#>H3aDUbpukUM66+%D63XRRdluh9n613)USG@W$9vQ#rHX zH*0aEcf!W)>)4_+1v*bHhnVe0*<^3-;nvGHJsw+DWQmTV6#hhyiH%3o&64=3OJ_#W z&O@kky!G{Y71I0yK|Gl0^?k^EDt-rU_oHLx@G5O$oc5Y0_iGC| zW-=cPaRi(y{xu11#Dc!#6S*4e`_3_9ln1_dCB66X_0VMmk}pIv-^=8AypGLGkBa(Q z+s+pU7%996KV9TXZE6d`+PlH`hhN?@dcfOou_)i@R@fULE}s3>GhJ}mEzYLSkjKzE zf5-NuN31j}73->q3b}-342dEa zxN|cSrl~;JVVv2horIF}rYj#|+Va*Vi-!^kpHjq1zpXyhbXR#j)B$#0^$gTqyR71{ zYKQ7uJ=}Be`lFB{Kv9#JZ?qss#_3qQ$6IWH`|9D)wY}#)b&C!OjM^={HX8&156=5= zhDe06=Rb^Uy&EF+*W@ouE;H=4K-X!$u+GTsxqK$K@xx?|nCb)U%y_mFE`qTSS=6!W zTl>EepsOh4Bx*eD=p%9_W+%q``+FiOoAF6s2Hn}Kmi^hs3 zPL69Qp7iAxA=uUz>8tW|4Tkrek$y1542QP70fBo>Q-vP`j(#3`TTkgl(xK+WCzM6Y z{YUt~6K(BI^nYgfUoURpl9M18Ecn`~hQPrObiFABEdbii84&WEf;kuEtReWs(EFyE zQA4KrwY!tWsAuy5DkJ`{uxlEMUGboEAc`mW5j~;3Gq~h=crdTbmJB4L4Ig_A{C-Qwpvc6dR2ZiGKW5IFXmt{uhrGnqKQ#b!?ULBPz8 z^-{j*(Z{XbnBCyDhFA$_r8t$q+nnmXZ%GOpK^#E4aX-Ef8y&##a*-61DSu) zNm|1!5#OYK+L7lIJcioz1s*&(dLfs>oW^0EJM!){7NV^CS-u3!Z&9IFb%h!16ORC! z)$}`dQ0<*BG`{6+6ulMB-30U0kpS#ba{;>GxHWhCz>7=iFa!RXN)U`O#!8^cIk-ou zutbbHqv$}#CI(d<_|c$VMw0PRv8$<1iLLzlOeSF7$-WXpA`gojVs`rUv0Z_VhrhPQ zp2m~7e1ZoQvM;UKKm&$(hGb@5>LMG>!g~0O+!dr<6)JW7yV1y=e035XCiK8bvn&#& z%H&fcIa%qi*M>Lq$)e z(vq~7U8q*=L%;V3`b53ZQ-o#WWe0s$a^;c7OF$s;chFdBVM3-?yV_GtX7R2Zk1W0O zu~~dAVp(|Nq1Iw&gouY+o0baCs%J}c*cT0%X9RLR1bpMWNK&rLb#vrsW5T{6;^!(G z^%A5r%@U%$Gp2f&aY;9xyV3yx(Pt`%lh^&UDRjJgqPo!Ov_N6d;Q%As2jf&cp%q5i zjPXh)GXvG^(@q1fGz(E4`2bE7Lb$1rudbv8$TB7A?nai!Ps0m0EaIRKoE0`yb%Rtx zbyTtk0zj$m^Gapc{czrD6=k}7$Bt(I2d1bhq{$m>LPdtcG1rk0L_LN*K1txLcGVQ` z9hz)n&8~Fz?zeE?f?dGtvxHzg!W!UMl6PDrTex~Mna1RK3S7*{K5wCIi;Z8%_N4Yh zw%4pV7q@^&a-XG*ZX-$&#;w6VQ-d^>{U^_Jx`U0y!`*FV~9S^g6bJ%=mx?6VL$-D{~kWtBawUvyX)Wk~GVnSn3e9$6a&z7eU)q|57Y6r1Oe(u}nysu^Wx za_!NJ?Bc}q0R~RZ?DtR7^XreKL>6Hgm)`ZBW|7Ts%L7a#=AA#(>&+=BOT3EOyTkKL zg!J+inH#A)MA;2PBv-bUz`LTnq~ga?*J)%{I9EAn+*KpIYws5XyR)&Axkp3)ajH~p zUqb?QQtU56&`YQ#wUi2LD@Q%={)ncbYHmvX7>F2I4P%LvLM9a$_PtH9wQZ&&+z8hn zRb(e_jnYDZ2YRQXsepjPF@`U5ZS)&k5nM|qtKlf>F!KfXB`YB8wY0|;onbqpVERbp z#_E4{fm}jdRL1ljP%&z%(id)0A1T7*3Az{a@GCET-vD={;CYQ%Fu9A4a<09%j~ZN~ zFms79zPCo{QTmFX@Ix|JUCnj_7|9XeJ;Go&Pm`s|%hC1}J>Afhj!vibT0%ZzZzdrs zHO}G4lB)rAj}{Z6+bjS0_9oHJ3Ntmy8K?bQvKQ%Q{~};=rh(p^UPgLN6dNBZd(6_- zJ>sv*hste~V^-TVx7pbs0Rb$GD?|pD<63rlm5kb+q(b_@9UrXpZ2!wvT*Xg>wIh#_ zm)VK>EXng`+72!d^_7h{w;}YM`l>MsW+o=+B1=gF%Blf>ok+kX_wapa3u!SY3^g25 z&Qc9^!FGN|A6O9S47SbjUn*GE7#B(bJ)#DBo9GoAjYB-rCaZ>%#0P-qQT#gbJ|$nN zQ?8T2qqC9i3Qore3P6MM39~5{AgBFLus4{zgAFBj1J0H}&c~#0&?o=L_lE4aco8&ER&DD%Q@a|@CIgK6NjV&0A zrqsi{edT7<2@dq!J zr%zcqX|XJi*rL;0R-(t1)3+T4c{czx*g~ZvdP5nz;-r{a5T($8Unif2CQw)2=71d9|rMO--eihD`uct8?rdn zN6hLF`t+M!yLd;0W=pimbY;y?-Ib*J{oSoA6Uqthh7vEp=vmjkC&6YaRR{#a-Y1E} ze+>{yzKJEpdvZO(O6qw3#$H}@tOI4aL5w>VAc~?l+@{}&`)p}ssfdeYATX0xcn3YK z<>fx+Ay>n)J;vc9pC98n*KQ~eSW>iOZiRvaB{KY@ko^LyHs9RHxbe#U6|_$Xn6U4l-}?OO6oMUsvw;8oNdovlP$-K;{{MUpjFuEV z`t8$gtX&-A+jRZiq{mcD@|3JmU6?_NwAsQq@m9q+Uv8HOZYj+|In5}s?VvNumQiH^ zguwWtW2+VO`^5(Mnqy)+-+CXqul$WWDI}Gusxr+(69Pw7BrRQa^Kkp0(Gdl@-6%aZ z<+Oj~Az;zsEm`;c+E;!-Fmfdg;YUe<)s87|wY_!G2&4M#vSeng5sj2yJxz?2ZV=ffkU(l7Q8o|%`&*ETmj~^Qn6zwV- zyObH#e~E_H<+HAX&WL`>Gy7~Qs+Rq5k0KnT()5*46QyWD2*gTG_%=&5TelF7j~>c( z`vZZoh{#tb;Ex3mFlWaJuCw}+BkQRf!HW; z@;q0&0_J;nkadu*XzM{$?f?$4PqomZ&`6P|5?@kc__rO)IRnW65tYNPMjVB2)8HRaA{qrI}b)LXF`jGm^(L|Rprst+N z7pW^xmMq#z0_YcB5!YkH>U;fLxo{mXXXixm!JRBJ-lV{wz)(p z{;bGJ@pu-w_8(m7edG8icEmLz>|#d?cV%^Eqq;_!7VLb&i7#~7g9uTpddHrz$0i|` z%JT@PGp;Si3K-y=nQK}vh)==x8O-42A>MN7K6C>+8e^)~^99LT#7$8~vEkOHvYL|S zo++2@A`iAwch@M(&^dT-* zPL+@Ty$3X&;)h=dti@?Dk>AdfGWXjGa5_kEKB7rct(TF}ggbxBGAva6v1F{Fu&eP4 z>C*I~=E^tJhX@13vps%n-(xoaEa$-~thQDYJ9j zr`I8LCDNqq6{o{B_07IK{rf`&&$^_Mywe^}1+qJMH^2Vq%@_J?eUCABD`b&pjJfrq z^c2rYm~V^pYJEfrL`HR;U{d&WPxK9gDaVS5+MUO`59) z!Pre=6KkKqqCVT}K*?^#;LvGiPMtU9ptddJ-kTKdoi3tJygSs|Ny-tGH2{JJ(~uKnEW zI8Mt6O;znfAzHRy_@K0MW6~2#Y*rR#1)p!{ST#78HWsSF@Ul2X(rYw0PvgQ;)M0h# zz;C1m^s4YA#Vg}o51#n6wb2b%bC}QE7Y}m@+nn{AJAB{H_Df<)90D6#k?Kxs%XiJQrzQ5>619m2MjNB?Yp@Q& zi~dWB4KD6P&d`D;BCt!?v8-*#N&dC>OWDvYqN~OD82ktyo_zDubtGf>&FXdZCKWqF4{Gv1;`+0#I5b&ECLg(#d;>$HYm4}r+mY~v zE=8SGhED$tqRf|P_7n7{SU!8ts~gP2nt3@_Wys6UC2pn^{DGRoZsBKR!5`daO=C@p zmNIP7L2)qH7hh0Y#F2!UrgYl?O&}UuHF*_*otlA5`MFX?gs-T(W>!=Lf^W=?A#FS- zMy}1`rFOPHc}%LY4QJ6(rq}snN8!Q6j4+!YD}PUA9TZNc+uPkHefCl{S{aQdIvQ?N z9wLXH%`|+T$7XMyz;WGoEWp6)MYke-rk}J~5Dd-p_}Gvqk|$JZLb{@qZnhskk<$R8 zgJB0s^dVK24%+dC(YfL{nPX;&OO%d~o{Rky3>xxfkOP-JtCIn zROzASM6AasqQDcI1th@HF=nA(rx5bMZUp_Ja=g#_1fk}NIyd|>F25wxX>w-qrt8a-AJ)yx8&6}z=iSwN{Y{{| zU~tuJwG?{xzth~stYIrQ$k%tL z6$MS~MPH8#^OHjkzhF4+_v>q1Gd`mcENmWc;++#%##qV6tHA#;P>ov}!Ty5uog3}T z&#{lGxqo*{Z4pV}Lhqv5(c`44$HpbiuRq(VUtN9;{n5#F<&|`>$;WPhBpV!5opkt_ zdV-+T+^ktfft|+UG?Uj_ujHM^)HRdZ^-rQ5v}12x;AbG^ZFSI}>%NJG&s8C8RNm^< zPd|(i66g=y3uB7GQ*$7VXu!dmC5GX-_ux0dANPSG@1 zXbbz2cm8IYVjq4scd4N(yvz9hz{n?le3MCFrW@h3N?^qnL0wA(FUQZIg<7o4 zDj8H7{(dUV2@^%lJ>8Qzp?$=;XOu|#;DyiP+fwq6u)LIlw5=l_+lYnoI_v$BW^EHz zd-wY3gta(=Uv(uloJV~HVRQN(PVs28Iz>E@$zGh>(H_&k{rh=8PAh`)>Dxe&kWJjd4r*%^{6yI>ob z_+7PIQ?|BSZ6g~gySZ-q)c}o74A!ZW)8>zE!67}5Lr++7+Bug~FZjTyH2P3E8GldE zTZ)u9V%>OyGdNwW0wx|ZE3Sp#OhESM{R;qGS3{)VTFo_K&i=iQ{^Hne*I`Y}Dx3B% zo_Rup545V&g+4j{3upgfqK1>NJ`!2ouIr_K8$oloQ4!3A#u$o&@XKP6M0PpZa8b(+ zmfTyHBgWSCpBOGPsXFK@l3(+un7F-*sMXRXMkR&wu>4V^U}9Q2+jQtF%+KdD#x$ny z91TGAnEui{7IXjxNA69E@7gcE*C1FL_lf9M*8(N8p@Pd+@!>SJ*rPJqY zP#>Im6ICBU;NrShx$ee>ZDD;zBLzQ#>0!<#T3-8mJ?w3nu1|ECBaiw=4_W-mq8!2+ zV*{YEC?>)Sg!9gnfX~~kcS(}{wABh|Jr`;kYCLdQux9FcPUI(BDIB`{Bjt=;AYan( zyde9#95N888~&uy<%bkv-X59ox6nn*SoGfKJ5qqmw?Uz({wi zC)ISODDHGRa9mlNG`+T88sbc|Op)~O!zG?{hAVbgN3)ZPpkg%@srQv^ADIVkx z_CS2TFc`SPG5k;Oi8xKk@k)~a+2$kB8sWI;;<~DTJ zQU-V`9SBag{^OIbi_;HV4v(nWhRc*6UGoQRR`HtVD}-{QpXc16ZEO>HQPgT2kk;6* z{%55>%yw)0@cb>0v5svXg@G)Hm`s$1k$*_3m?c4Y)my;6?u6BJ6Me=u^dmOIr-JZx zX%{icYd=XVl&Iq}vuK(3Ona4&Hh$~*gzDhvR*G?r(Ao>L!C02YcRbh<6{~Wv2=PEm5Uv|;7`g~Kd6K9e5 zi?6u?juVG9uM7TVa+T%5hbhMWrd{y<%7}2>3WdvTAI@yX4?G0tuZNxNz1HryFZ3{# z(OB*1u3z<)(CeTbrM~Tr%x>P5>^c`i47?om@wPg2y!<`IO|3D;2?|IG^z=w~<2|H$ zq!zxNR_BRXHB4(|yg$x)VtLKV_A1mI9b?A-Y|3oolb>s*WNAKd0o#B4pQG; zzS9Yh(W<94P`Oe^8Q(b0X!;Lm%`$`rl_EPX)iV!e1d7s_G~GE{Roye8@>}UfHyRc8 zW6mf2ksP0G`Q4U6zseWs)|dW^vwGlTw!YDf*rUz;*B}3Z5s?TAqeK9TKIb8B6#A76UZQeg(n4d_62Fb=Kf?sD^cDc zhhTmJiqAI^PuFkU7~GupuJAc>Et7YUb1Y=d8~L*(T_LS`hpb7B0sD!~@Ac=}kHhCQ zr^UyEc{turZ(+(*6;G_6B)rX z&d?xDN>}0+YjnTP72MwK9ys7tZZ)08gPRc3-jLZVs}GsTW(}c}+3VE9MDE)9u%v_t zd{+;SNfyvWTpc(owRy)%@WV!k0oS3rFKN4dUu%3UBnp~7H&<#zk0;J5(0_Yfkz`Q% z(Uv#vYQh8A;>L6BVmjWG49l-rgV{f;0M;gruVje&YvGeUe%M+%D_bc4Ylj6j+#e7# z-MYahHftB%dFR%2;9|CU)E+nnr&UVNKEaBJZMb@OA@0{nQWUBOawWVH)=dA~k76p{ zCJr8sERYqt0NHU>c2gLiqyAzz)(GfT-*CYqddnVPesy^UsvgxQrp6oYpwDlSgD2p6 zg~p4%Va{jnFVlRpPMS6-_U(qs`T}MS?2FE7kocNk^V>G!?DTFq^3o$XzvwSu7)+T9 zjoS!c+X(K;y`nDg;l26Wx4_ER1YXT73PrEaZ{9h)TN85A!cGOSJnAJ!Z-n~$%Ea57m_G_niJvUK~DXHLvbxjgt3b*kyuyZCA zZQ0ncb7yFUH++28f64MUgHuXP0qc-cDm$up`Pfo?`d=e7WPHN!eMX z&8rmk4*Z*Gl1zt(8ee_zuS)B?d{Ejb-xB4aQac6XnL<$FaK{=OR;fXY=MtphvA+ znh^H8{bi6F>L_sVhOL(DZeEVrSdDSFjmJMFQQps&t@V!Wsim{n&xOV-6)Mfe+RnEX zS~*qu)=d|v%kItS#^ptv()um|wb1cP!(hee0vA;=oYQ;53DbvVHCM=2d^nG>t}ql|u5Eko zn`j#Y7X}vdZzd{wt^VfkK9Lh$wzScmLDUaFQP)7`Nea*=YLIen0xVb!s=lRFU)G;j zv;h!C)}Ydc&P(+Y&maTEmV8Wf`*5~Ij3CZdB9h0E3D)|4H>o>(tb=ADI+&gU(-`_NJ&wk^!}sh^`XqXROd^&9Hj*k{t6WNh z!ZP2GwgjlQU~)>}9<4BFHlA@u;*wxVehfkg?qM%d%ufQWBnXhvcN z1H*5(B?t2)1!XXlBo0GrQ7hEI5$iE&f}`uDUa%)*uy+Ns_ny}Sj#wBqC8VnUIQ1~L z4BAHBRp(|saZp7@_1jVIg6J5^;j#22SsIeN1yp?-9@-EP_^$82b$ z2!Ja`3Qt~m5($V1DJ^;lz(=M_tn-6^=MK(NtTb2(BCrmc!Ou{X-2I=?9!~R=4ll^U zm*o86-;0Zj4dx?yOUKIOx3mG2A(K4`Cg-26ftI-z7}09%K%)7;u76BSSY6o2>j5lOAme6Hq59p^v^AN(H# zj(7#&FFC?XBQ2)1u^-&GX_Nh^MpVek+Iau;DXXT-yUE9G+I)?#^kvwDFh$2PvkuJ{ z3s#;6dp2!hOx)F>;)aU}&@~Fs*oLSZNnd;tSRvVS%1A4uKs-5q`6)vBbOuRj_wXbP z%vfz6ySuq@WvgiP)l&?x5b<8? z${d_kBR#>3>y1U`Z&FqUe|gmamjRBz_KgVgs{OEL{}o3qC`IL)nKHK4rWAL~?MKK4 zUKsfSEtB~NKeA1Q8pmy#j5<`z$N|go4x!6wqq*z?wR?UP-07z+e!TSexSjA$u*mHU z>Q_SR!XDp>UoI=(5BJIrYAIGz3#|C^XFu^u1B#J>l<05#DZ~sPpdyy6*IyT-dh)2C zf+4e$HC*sYr+>Jwm{m+Rv={J>cJ+sLCBOBt$7LhASqUL%1?=->g9WxnSFxvdG#Suo z`#oXW(V;$+QuuHzrLRqelz*TT*X!uIz zbP4fUy*fFMxoRUi&oTuUN6oR((RZ{Ct@^AJDQ9(i9bu-o4p#A;Fu=fV#v)sJZ>V)= zP5i)^OxZoyc7%i_A$$A1G*!i)3x=@e#eAF&Yr^I}?tfwGMBwj}Nep<5F8%ZW1YiG? zb!iAb;>z3vyjv_{d7I6|CbdSUKNb)0B7-$$ys&uMv|Tc}R^wFe_`b#%I8OQU?J2PN z0<=|iHzrwl5)S~=?MieK;R)R7O*#(F%ccthk3vPA59xyEGp`^oX>CV9k)M7sVL6@P z+k*d!w(&>0s7E2^Yo6>=>UpCcrNXHvH9bCPZmc?#^F_B0uNe`eH5nP<08`fb3fb~# zi=U!;F}PZa$m~ocPs}g&u=K}cNO5UkEQ0w1gFX5uyG?Z^Vh9jKu={oR7sR+)kEATf}4?CFZO;bfJNPuF9fo`C#aMgGCFT03rX z1>r>sgyNma)Y8hcJSG?nq6i{}CTlsBU_3iMsZjSQ+JGPWtwjiw-qCFFEZ*Hxxg4wQ zVc9rXIFm0>fG*#`MmW0R6$WK(uSl(?@sIiot0U6*XU|TkCBMOSNgg^1l4K4^kbHTnul6wN-bbJZzNx1ZdtBHdx~g5oBzu#a=DQfyttci68;`5u zkOxfB?E$wYo}&X30v0!3T^U2K_>n42I4!;q#W~q z9D!whtHzh@bh+X5Je!X_^2D1|;OWV=wuhpQ1*M(8tkEP+Za==3L)vNoq^1oOJ;C08 z@aUSo?iFTd?=$Xz`j`?556l^)o9{>~>v3D>nO3T;il;fkt$VdJ_%_kOtoxbMpILL5 zxpZEFv{zk6t5(zR6CD9IZ#r7TYrj(l?}9u#$mV;=b+w9m%n{L>1{%fi^HMxi4t#I= z{d9N@wQ725JH~{BGE8pSjN?*nDeBE<7ut^-ucr>n6mpV{UG|{b)m5l3PF(EC6F%Rj zOjEsY#DXs(zj_BZxFfEj4trgxp>^wI&l0*@X32(@UjEoGR>TOiznaK@mD_A$#W9hU z)T6~7*ExkjbIa95+i((ejNNPTI1FhznR@R8@FLMXG>ETT;%v6FA|!z=6gFWk)Kp3K zPhJ>r{P2?A(3}j8yM8(`*hZ`2iba=I{E!psLMDaqNcOx-TrO;i6o8;pup|_|rESHz zB7|4MZ66<3>}<6DOTe4aDuFDl^9+7y{0sQ*bN=E750jP1lT(Tbu%jtqIP!%{b!7Wt ztV;>)=+V+2SmN)a{8a;tLkEanAduvX{7;gvyq7At;`arljsG14#s+7QH~6>hIduE? zsE*#p`(^2kQwCP=5^Y}`^SRTYa;Z@TSN55=*&XRFDp>YR{Y3&fgJtME+xa**zhe8| zK=#D&1eqSDny{2nK=VM%d0X_Z5l*;*XAI!(6|mdVEeUwa>;sNnlS44ssi#d!b;qke zB693NNK~VdsZYfDV={`uRGo7SPfYZoLE<;OdgHHwmztmG>L(c07X*pZsog^LV4MkH{QVEwCfnXd+x#z`nih5LNTHkt;F6=( z1vsie%V>@Qldes~HkWI4(#%UEWG zq^&IzC!we1kU8Ut$l`|$o9Lzq{cBSw;Zls=+BJ0I$p^+LZaDYZpZO-6CRI_9jFS6! zeon;pyS%|p`W$a__-4E2s+-1F!`4V`IRuK1pV~1(`h!9sU2e99;SP^U^&#X zxIf_b$UmUZ>y>qG4*29@lil9+%izP9OO@aT9}0xMukHMRy)1HrklUiAHLifXnSd%f zmKA`pUvCQ!y{9krpsp^jyAgEwr=ELPs;J}|S=z~JvZXg{XY+I{A!k)PwzmR4k{zzi0K;Eb`Bs{T&v@eMNxwqb=Lj`?8wB1O>>; z$;V!I>T+yC7NxDdJT2+dDPE2vK_L{($Bq?M$Vd|ZoC@KhazEJN;|fK5&RQam!lH-{>Orp~YoeRS_nuzjT-ULX> zprqKZUY)ZB_nF9`cK^zgDTC?u)HGMs11UAooAU`_=9SX{x5m(j&jhRL7GsBpSgp8g z&~B~-TKT-Esyaxwj)h9t0yIr%~_7r`944Dwo>+#@MgRBk-#LJd-7=`%IoJ3!m@PVssher6OVxG)bDUcs+4)SWj9T1u^;ZwL%65AHWD1`0$qe6^RWPD7jt!m9*k!&h4z zn6%A{Nkr+I+%HMk{{>q)60e5Il?qFkZ?NxcO8>*wo<3p&_K;3kIF>()vG|_iEMG#@ zdvAq$bTK-s&Wa$3hKewN6H%>Ux-((#$5jRU;v8!rz}Mz&is1_Xow%p^&7OmuqqLn? zgA)%JC+bD!2f`^!w{zDVcq|%MQV+SNQODFU+z^Tb`n!hTYrYIitWihtIqH;VCZ7kC ze*Gtg2eKk(3mQ(MQ6m?>vs9^(AkruxgBPz+HKm*7?Ru{If|m56)(*Gg3y(#}Y_8T% z|G)#0uIB_m7MBLHpwlf@Ds&+;y~Nbnt$>mdKAosh_7vY24RVX)I&u0bpz%hyBLu)R zppTjr1^AROoBl%Uarn#0;}j_4tQw;e@_oV{1h@^lP1Y1RL=8orEG{6oftd4W;~J$? zVXPYaRa)Nw}_!I(+=Af=WxM9uQ$}Laj?P+H@ z9)?F0UR|N!KCJ|t7CF+)WC;X+c|7k`8<~1Bph;!2oCc-3Zt*)0LT)+wRVT4xSL!Q7 zbk#9Qgud6j^Df;loS*ylO8H;?w{Pc6v_xIo6yktWkm=y zvhXEZN;#?2^P=>|R83WN*oBK1QY%K<8U+lc6m#zy*PB~AjOq0`l+))eUkN^CAttg7AE_P*Z$yYxSC^N-quQimdD5FM4y&oxMU}Zcna0ax)01 z=&Vd}!||M(B!{v3G5X##t5Vw6+H~INrFO*eDuchcfRkiB9V5YDyF>WVv->z_;>|yC z4*xYES7EB~y;H_|ze;ZiFCN`U)wu$SVl4PZ03*bbAv}` z=x{pE4tqbfG%KKv+C5ExZ9oDcr1};Laf4{Wq=q&KT>9!{aaUz``|5H^m9#k>xG%qn z2cc>K75>n>UWy5^`N~z_S6pGQqSI05cBe$?7EojWPa4zU*Y#x+@{Azodoc$qG#plcr-{I^7rTr{R{zNox5y(>~NBCT5o)kqs685Zf9( zvHudS_21k=yf-r`HWqB6ZaxaVYH08W<^;es84YvKtMBb?Blrbr zi;G5*6C!ZU_PdAOH{Z*1j3+8})be%AHB*>YE245T7J9E!I>*EyuFt4v&e_#|RfA8L zt+U?cOmEG}@#lDEPKG>)1H;L8CrhQUX+FAWcMFxf(sUG$H28iTxX2U|VOw_ZAlP&` zDI@-AA^9J@If8y)wzNG`87==8ZMz4z7|anr9h)p}ZhWrtq+OrEI3Whr`}nvDzZy?tMI(2%)z zZR%U2+EUx5~!M4zi46AdtwhHvJBC%H;ded3LteFhmE~ z+uHPDaA8I_v+2@|ncH|a5s&8`j7G%8bexdl-*g>f^LRIyPxbBSg|{q#&FXDoXE-ZL z++hHwsrQ$`sAoTYm?$yO-5`L<7h9LRvT>jL5ybUQAi1LM6w{7zi~%PAb$$n!&wWe+ ze^UOkDvXOaGff8H*NHZiUNGv%z^7t&3vh!!i(7USzo{V+eEP`mn?b9C01nPVgs+$W z7wO_20MjgP#c=;ib)sgvm66Ko0l0I?Q~6gYv`KCcZ3Bl=zQ~_fa@6Q*wgxM;d;|cJ zee#(zg$ebwo?|s^ms1;|eA0P3d1vE`hmRo{LBPe1le`?%w-W@(Q%sTeQi`1gmQY4= zDMf1-bj~Ab%XxLfwD@cyAsBVy7cwuO#riqF>BfT_>&{XB`d&p^i~MY=yXrdz-Xrk6 zimch+{Pko%KN4@Ef&%&l@^{FgPHMMV)q5V4Q02VW61R4u)2H<8rO&rr^1X`{?Jvxa z3)t5SFbZv)uNs8D>;T}U$FE9B1eAH01!L{y^58ET-OhJSLp7g~12g0kxw1`It$O>E zaU5e#&oG^>CBkk`vL{GLdcj<*jEPCmbX+d ze3a+;!HEZT9A1BJ&bZ4s)1hnRgPj#4>jpn!Lnx&?`g_OVVqUezefJ5p+O!cu?hk{0 zq0cm5n&)2hoJ4}~mB^_fufq%a4|%R2weX*pH4CTRHot1Bj???1hG=4hRp+H0dojc{ z1%oxM$W>068Zohp%;o`X7=4~iCM>T=Ikkv7z-W=bXxY;99`;mV<(rZqAgLmlPs3RrEM6%!fT|Cn z=6LJQb)3dMmqQNs8;l7PraLm?vHwN23MhS``-CCC{}=Q6hlm+I1jJ-^ZzrE2-E4X9 zNUTR#i1Ik&$-Prno&)@%2mz-oVt0+t8U7uIS<=sK-Zt+(6t+4{CQ`&PTjW_qw{7Uz zUU;sF<~~5>rWYxMtIQLN9U$mA@OKmDD^0VypswlPW422hcl1oZ>PJwO$o$_fa#E!X zWZ(v|kV%QPZw&>Miah&eA88nydg!)7|SJTi=?* zQkGd>dA=IDwpjQi*NH}hM*rvsYLPd>6*L-*nq$Vv&e5>`ZYyk{{heRliZvY#FfAfy zJKtvjCQV30rMVc&nTGb6Q1Wq02iWa3=#pd|``XQb;E-r<+wtLEpq4X1#oVs82@C%t zgCvmf6T_ift&V=i(EKWr{9!A!kz*=63&syUFcJ(JGG$$g2>6m!Qv*0)C{zyy$)&Oc zX5CZEx%g$B%VxRNd3t!rW&>}e!KS~O>=A@|sRpdJgRmMk7r~5Oxkso8^RH18ruU|p zZCKc$OM(DGEKenvLJ>NDbj{sCSO&MD&t-XE zo6%mzEB@H32kS~?5?EEL<8mTcJzJz$5c(!{&KqbU8@)3I<&5Q%v?%d7E@BrAu49}$ z9E~H5la{x3di4(q{dl+nW)eDEq8+R($su3Kp6>jm6?|AEA5ld_5<|8LQoN#;_y!oh zruXy-m_o6LC(U=hKJH7qu)->qjDTX1_{BUZ@POH)yKPGOPm^2saqM=qt90 zvLpD`ptqUC_ObZXdrwHjP{-ETFoa-{sVDGJDvLm-CpGuGY4lbAhB68P#`9k=B!Q}c zD*zU~(~8)sWrM>X0WVTf=#Z%xUEkA^7t@hpad^8IgTh!!!%>$~!9YzyB>375uDjUo z>JaWrswVD?a{O?#;N8nG_YFf8T-48!SdafNJhGsF1n?S(hTPZc|93#;KZTW- z_!(H$Gvd}(dKN}8fE>j8Uvdx+{W0+@7j;I~a|V0~P6G}4hH)F9AJDjO7zy@kFE}4c z`y%>w_m+UiLU$%b9%PB_PfT95gON$RS=t;Q9k5-Mm=7NC2WCa8`Y zSUjxxCvW+I4

    @qS1n3Wiv>ma8w2X$4l~Tf-8lXu$vfTJBMW55$>L>>@*)`uXg3w)X}CsV3#1N(iz~&tk+c_>#2&xu z<2LhMb7IJXo11aTF&Sr{%h!G?7Pe_R5d<7k1JAok!tWfXp06zSQxDMIOvYgG$Hf9l4?#i_ zzDD~IA$Qf$*I(zTp$lyrvL}VN`LB1)4K_H-G6Z^MoH$mw*@3go{fg!{yM?A~5qkDVbEG zcj}4_ZqFvAYi$(@D~Dn0DiFj!y=cr%Mzg5dwGI#n~%pTc@^y@jn$aH4D?lGg>QO z_3c|w;mvkH{d_u7_wB2CsWW3hRU&`$wO~=QpKgJCwujkPO3-vcgIu!W?uRgNUZ=TA zYP3ERj94YHzex-!$z#3jFqV&gSq%vqId6(SuTFqKP2^=6-;e*JM)akv_zD%=3W zz9Qt;Kbhq8=fF502svw{Od$~{-KO>`kjXEBO;>j&8YgL zYHU^rl9*+j4ZFV%eKWiVz0K%Ft0STKZ{#0E`kyGT4KFeucQW%`9FOpG1;~w26eKJg z9_Dz{l^p?U4sUtf49qrwsN>}spr7`d@;l6BZX(6O)ijcxA_%z&td?Ceh-MWHxnRKo z!vvuH7RU6bcLSAk*%nyFH$iRmQ|zJ4+3VI!k2sfAA&h+0*!$v}8BfDOh%f&m* z(Vc6rM0X;Lj5U}e&cWEIXk@UDz+pV-+WxNy>-pL7eB=3|E0f`zO z@jbOF16C$RCnT=5oO3(P6dTw+1J-s9mJA6m|Ex!pFp*H6@_%+l45L!65}A%6j<<17 zr&Z2oBo)ErU%Y6XX4KLb>tz!()hwi0|9kolP=3moN6b=TDwVHQ|2M8F;crs`xuir) zh|8;w6;6V`UXap_8I%LHUx%=j8cjz1X>=hO8ghUy&P68kT={>IDpAzD9Dhs>V4JHd z>J^1VwcB|Ik+1eeKXziaA1VPHFdGewPeJ(m$hXmLYfq@d0xHL`lU+GU+xQ1;5lTF1 z4gpEL@YnC%Hsbn2myl=q94V@(>~p(z)In`%gdk0WJoQ$(Ngy)c;Zh;l4H(Ibdq3pj zM{r+3X*fNngT>?8wajiaMsPM_hD^A2te-zNeluxq)*WtQ#Z8b6X>?5zCJmup)k4|v z3h5fj88CU~1_J$l4#K*Di}ozp^({TfvlR)C22dU!<7|#Kvmks`)|KFy%g&@Lub)Ao zHHM%pnR1fA*A4iGtS3YhNd3Lsd6IUqmi&sWys_rj$5xyI*hQ-3q93+X$eyfzy25UL zHA0MbgS6dJl9(JGJUo3FCmC}(D#}!JN>5(pLO|DPbXeZ~GU?CzTbyOQsgn1#!M00z zq)bn1g|;Xvm)0NwDc;awstBFo!ezy6$z#0euLhDgo87`o7C+g3W7wZ2fhLOw4`>U< z?>+sOvTOW+YV=l|t1=7@(o82hd05?aCYNs%^wKR|tcAvanF@$oin>O& zBps!PwJ7QJTLzeiAxRB|#pk~dXg96xJawIVD$~KaGp{+?*r%P!3Mv@NNzfJ?8`SBk zXpU)vkvP76A@p&-9sv@v;7-R!ouRih9UPF856dsUqLmmf0cN^8;XSXHJ9n?5%z-d*YcR#ln3Ync)EXFGpYR z==}tz8+6u+V9oS2=aQ*DH$@>3w31NVOg=sAq~uyHOrt+eL(<9@pzB^M91e~vyC%kO+Q=zuHM(GVVch%;&YgRS0 z>!NJ6j|iT{Q3A4&BVCf$;KjqX@L&2##=Ygc=e0b$CJ&v3y;#O;wcu;m;x8b0*IV>M zQa8~ZbQyQCIa7hVOE+efgO#gjyxvtE6c;eHVYN;rFm|khvm_$rJB$RX7N zjJ#xY1TjaND`YrM2N0JTg(?N?9I?2G#+e>eYm;i%lB50Slf{;4@h#mG`3v-hjFYM%e)sRU1}LjZFF`ImS*^`iQ;>!euNMW$u?S9p#A?~ z8U=u9@u#2kl4nBY1M-{)?v1V@<7ebX@1!-ey1(Y0iC8VIAwN_Lfmam&i6Iy72a1*V z!A{>>GYoJd!30G$R9{Z|?=YEx$b`9oj*+r9=1V zv8JXcSfZ;qw5b8yDWJax9sAk13nsPM>%IES;axwA{Atd14{=cM_TjsxS=V?W4FNKc zz>&{p7}TJb6lK= zAH#q83WD?l;9YTZY02F^uhZb)ozguXk3VfE6y`V%BaDcZ`YNk3_7V7(M?^DM z_N4S_v4_Hcxl_C-kEqWXe-Wr|eG+|NP5vA!VcNoN_$_PANc{4(=jxLz?D|(DWB|?E zZP99sj=r6l-Kri#*h)n}KHy*fj_g7McSxZ=&|U0tMBOchtGW~x^YjM5>)SS2%5Z)N zQ3;V@nw3UwU+ZtNc+0QuLO0C^!g-o|DNd7y6SIZRXvTM^FZn18l|dH8Bgg*g`}Ziw zj8EP@E5L$MP5(H|ps}i{?lFE(`}W?yT;9A|v>RT@*Kkv~9d=0s{fi{cZ*2uS> z2qZXy!KH+lc=1Az9~`Rv+p6@p!2@{PmF1Kb8H2|L*U3q|>ybPDVLd)?X@5$Z=vGvd zJ>0z}4Z*!*KaL@E;Vd+Wl-l?dEu;1DVcXDZbK6E7)AfGTbNtmB^0fr!j&xA8#UHq&WeucHz}iP^SnxJT*`G?8+x+ z7qvzzozi=jw*6tFr$~O^ef&TSFe4?c@94RC>XA+I@$aO7WzWiF6^Bn1ryvVmc0z{bnsAn~tppo>==|FL{8pSUd zydp0EiI%G>cnBw(FHyYsSd)51)uI{GlcbV#JOb=r8Bz>{$VjAjU$zvu8)e4xjD#M<$K6eNmID*WF6Aak~`$@_OFKSB<)*Ww#K%S|Bts1d?b5*@0kTpDFf0l0&u{`dtPqn z0Pp@B)(rk31<_SbD_jzU4?L%dy{^SQ2#_>7?n(njNj)M}LkO_SLz(UWA#*Nk)mQbr z*&!00kAMJ!4o07gi4-w0AmOC|+GP0G^}D6ySu_uYGOF$J+}EQR#S zhK`1{=1>Zped2frqdrYEVFu`5(&-TPoF_+S*BLHQTv9fj6KXf-vA9CFEkD_w*Fo_1 z+8KLtJq4+`Vg)LHjTh)z5CQE^b_VUow?xFQ@kf6(dE6{%D_N2{Ru8_{baK%p`+ynFf-JPy*? zYxjZX*k_*K_s|rpac_R!2y3kl%T82eZoxi}dPvJq^$4S$Z+=pJLjq@VroToE!Uk)$ z1UcenUN2L{t^kQAaZU8h7c@~9dG$P3cM2yUEqhWq-fe#G!2#q;h-v+C5()=hCqp>V z{VDBd;xU(}oOA{k2o&UIV?R^ z-E+=Nh0)s`ht30iekz8+oq~NUO%yQ6?yk7z-{ztRjHT5hZygZ!I#6KNH!X1MdxI9~ zu~qV5v2wb@JpW0nkz`ZDcb4XP5y<9$u&pp{_L^#yyNEqzbDU2B`*hSOE~d|_&n1BZ zQso}``V>3lDMaD6Ec#OFMfV2g#8B4(kC>^P%dm2|l=o!d*+dm6Fo`Y#r|6C-6M#Er zG3YC5;6;)hzwhxBS^oodo$1IL%Q?FWmH_`^RbE&8Z@hH#TZB{30v$0imB4w9GR~%o zO&^^ZOjyR_{b}-iD#Spsj$D3AJ!rcacaQ!X>zXSF=GXkago7oeZxn+`r55J0IBN># z;U5om;1V3XbJ(?MmPuc$I!XvzUva8UPc2;ynGNU3n=KADlq~%@0(Oao+2lC9HRpxX zKCi(hU*C%-oz%YRz}Ez)iH9+O`kpa{@x=F^+8C?#K&iw7O0X z-xFi!)$wya0_$u{?0Z($(Tl(x6tha*E-|21Gp-eLapzrk)cO~>YbO}N?Uwz&TV?*I zuJP`EikB-`EAYpeQVkYSVkrv9DG)ks^BcMd2RX*i@@INN1uu!x4RGj$?Rlvw9~NPQ zJ7IfSNG|4RYX!bHDeC(A8H3Xc*yjeWNuCYP0-g&Fvfb*`XSMct6%;*Y6xrw=T2=;X zJ64`t%tG1gyZu=sg7(KY%=u0gvj(i&dG@-%(8*wz!KJ?p5Gf|9NjhF7zWuQh!&hRx zzjHp_)V24uUw5Fvb7}l1|2|jc+MqpX-r#zUfy}sti8Y>G!5KfM*mQHGa_Ais*ukIo zpxA($2@I{Dy)DvY7+ZfZ&uGMT*sNs#aKu*JX@Vur@*1Z&1eNZ4A2L!!r5kUI&LzUr zO}364n90E{@@GE92D%Z{u9NDruu5%mg;-zX+b!=s%aS`aXkpWICI#T|?T!>mzq5Eq zJXt;+nIKx+WgUGU)O87FlQbbk9Exi$6D+A;S0Axps)cuT_!H}CjpXu>Ne8KTHKiEO zgZ~d*UmX=?yLK;#fbh~15+aSnAT8b9odeR+(vpLM0@B^x-8GaV(%p^3&^hFgzlZmn z@0@pi-}?QSCr-)-GVhY%f!tqayoXeQdVx*9!V$JwQ%a zlv-=O-AQuePdD^HA$iCLsG6s17oNLjMqQ!dLp0XBX|XpJAR=cLYFBJRhfbUH468xT zw;Uk}iLsj4I?n+idD^-mT@%;T;OM*V-ITq#;&|gn4WFA{ea;E-m{}`apD*3jcE%>P z@Gh*(AfN0uC{As%4wc0V8T`s?!58B2u&7;QNP$f7pk)MG020$0>P z^IUSC+RpTKJ`mF`+o)j!;LfA|m9CHYv4$s=SC~#PM2kxY6wY==5q*9M)ltoBd1kG1 zN6yvy-|@pJ6^f}Xge4?5EOZ>|i!U^uU4i3`Rv|P6JO%es`4n+f%wH$@x9Ob```j0W+Qa8`}u<*y& zjdQ=(E|q-m)ump z>)+zqY9Ze)$yO!}9)et_qrT??S{1JxP%iU$-lLgrnK zkVKpzj!QY2sKySin_2kZ-^ldl0xE?{1~k)?JMW^|&n711R@3Vl#Nlh2(9@-g*F( zT+a2z<{O|wxf;a)IT5dZcb{W=by_+fc1usIGqLdqO^Nj7U#G}NR-DiQc@~owv4-O= zRScT)Fk$eE@c0`|_pV*uIZ@Y6>i9$On(f`O9Q&>a-R`V-qNVqB>aw|mcmEsbIN{OO z6knZrn^7%5Fr_PSAQJEKFXiq1jt!quIM1r|(M8K;eTBn(As!pAG2d{&kER2PDbA_( zQ@?_(PsbV8MXxA&(5k%>CeU`hQ!c5~WqubHBLJaF2^wwU*`&9huO%Ty7js-Alm7m4 z&?bFJOnRsaWKa9_VQEKslgju3I#zh1BYVpkVw~_v`srqkRCvCqa27nDc#}~^0fRyC zalYEAtQ8i26G+cuRzA}21#}|_a7)72%+i5F0q?5#pY=1_fxv`VDwHCe`b(JRs~{4> zPF&CX7>w46Fp+P@7(tWY)&Z#bM{ZL&6_lbGueVJ zql#$KPH9W-MN+N9SaTG5!~9ug+DW;3cD9(e&Z%)pdvvi}_iNiHEiijZZVI7gqN*#c z^W8r#{L(h9-JZr*JQL_Cu1$mXRg#%nbPwsApQM{FbHA7dnq8`zY^*j^PR^gV_6*&d zzlw&{DMn!H$!>e2Sd)?5L#SL?-6+@6pc&rg_MOwZ0tO!j1(*(^yT6UCQEAK8-0(tP z?Mg0Geq`XGg;w3O@k~G=gMiMa^a>o(yjv5jObSe5;il|x!hx^R6b41Yb@YJ^-UgZX zuC=F>_doZqXCPqpJ4|O_OPbZvd99f#-+xCJ40zif<0Yn=|ED58`@14Sxk9#qil|tV z`u6kAyrS25SD8`juWw0;HJ&jIDa>_) z70|nOSL!U>tLUaPf;+S?PekJpF;buKrq()#pl_17CUcl~JsunLLV z%M@AQO`ku>HKGse*6c86Lr1}r)tf0&2^LC4XX1ONCKAzX$P+P(_=%J9b4n)_(2GVi zuiq?Ny(0W0r;AdfEsxQM#byBl?1#(XVmni5`*IQNoW0TlpiXGOG%`s$TWowKZh;A` zTthjyg{MoHFWe{Vi`>kle6(RFf4%Tmh#$)2<-Jhi)b|XX*|Ub*6?mTtXdzAMJfmZW z9tbBgP9F>`nBumHlpQhuG1=S1U`BG+yWsHCBaXa7d(D^nwDHZ9%fj4jTCl(-%|jDl z2{QZ?8R>5@vrLl+>9?RdIJlVUSS}OLYQ!#MD{FAxxc(uP{PE*y-1=J}OUb!jhO1o% zJhOCOAtrGzAUc!})VrHB|J|=IME(1jRw>@Ay$RWf$waHdC1tf!FRnqd%FQRl_~|K0G+1+)EX!F<3^%JGq5J!@ zx_8LzfbstG^3;?&O*eCel$=sHQ0lc$JY7iINR-y_%*nn(^&gr%PsH@DF0y+rX|M5C(%$XrJ7D*^wja zVcQRH4s!$SJF$c5=yFP*-^D7&r}3vIC`D=b)4#||@}m|(H%W(l^+!~KB~re8yv(O@ zu#K;^89q;;mj)}O@?*}fv8ryd36-*Scb3*i-?H?%1Cx%5m-M**aE+;6wHs)$o(?Lx z>iY7}2~*KVUt5-}KPq-RSh+%@4{m(gc3w@4%oMo5tLFj6j&qniG}OWo$L9d~s1Dmd za-g=nhl@@6K94_RsOPiyA)T?mNFR@ZV`7D|6xt;H*G(?xAL6OBYeEE98U5basU-=X z-vl3OQ)}NGWpo)QHX#I;=)K?VCvFw<@7z!9lr#V<+gF2N;?kCGluF zkO=~kwanh$>{s`o?tq+tk?K|kh{7@MtBA%|3FBMS+WX~8;okNc&Y<>v3riuEQRlbo zpJ_bASVV^WRAoQiAO;Z5D5gzn>0n?p6K4qxJGgXa?R8Jmq9IB2BKi9F0NH$j?|}9# zZ({SwR?=cpd-2)#zObDX`OIhS#~he#FJd)`O)A{xE?%~m@qKEpus*;;@TEpRlZ{b= zUe7ijr0r9a&s!RUe66fcgJ%a~p2z36ixg(c@x=E99&^QTx8aubuOYLB*O|NKKc-Oy zSKt>9Mph_sy2OAdI|$kaNVq3lq)M3p{VY;=kOaq4;z#cE`Sj6x`|NW|%RxcL(gh#2 z1-UbjY2Gm)ym>0B|E8H2O<|+o*Er%aQA(oBvD#DpjzIcU{ScrL>Igr_xrGbUhYF<@ zk@kQyZ_?(x?-3Gqnf#Kg4SZ4c>gv5{SJgO!jans9Z*%V%^K7ST{w`z1f{fwUf1(zd ztO4Lb%WtBVUqNNW-kfI}tP;;fO3>w6uI{%wC5Td&@iTbZ6;Kql5gX#7_^n)aQ0D1~ z+3ACG`7=scOOV&&KiS>D6NffM3xba339||qntfA5MporLwi5e9Hh|4?015&|sOP6g zaz24wDngs=n_ZN5z2w(u5Y-k_CEG|~3Gwxhc&YO~faLiFByZBR{z@y!jWL{ZyAwJz zX}F(uWF=^@1u)RGMch*chXrz3W$7B{BW8S8&ap^YpXe?(ilr)1#Zo%1^0r#Pi||5z zDYGL!&qi*UEUyCS-;`Xxi9xD4zI&h>#=-~XamcU?v|zMYIm28hUASmsxuQ3xA>nq8 zutj4I?-y$@cWY_sr7XKq@fn+sjD)|B!~w;a7U>D8Qtakd=hi2Uh^^<1ucu!R4D$gg z3fsFRNt-Fw)%}zF`J0qyk?2D^0Z~IgFZ4M8nb=0n=Z$Vxthq&c(m83}yoT_Nn2i7E zaVak|GT++Z3kRin7Vs$(4GHZNR=!;TLeN~k2pTN>@$(j-FES^qPc^GC2G3=$G+x32 z&v%inIc%Q`l+^AG3oqDxlq`>4*O8rUlnhCXZG%fJ5Zx7ioK_0HtvU(g^ufQl<2$2N zugC|uIb&ZY8NYUC;jky?gw6dsHfJYKt}_f>1U8-;cx`$dH3TUwytcW2w-5QnpI{F61)E600=6TeU>$U6~RvSNA^dPva zYsiR=oO%^1v(zxwZSzhAdt`c3pcOFbeY=%&U5lOHy6a7$(Hu!}uZ$(7S%cKt{c$}| zFfy%xEG!HP`fkyUq^95=x9AkrDlr$jz5j$Wm&E*{!RIuUs)ldBHj^9YR6T=w>++ef z(X@7kx_}hy>vk6dH)F+Od^fSsP9u*9JbKd?(3{yM9abj(fyiK2c(RVv+&*|F1!%Cmu`?o*P z65y|{dmwf^!pEN#wg5^F_O;BcCa49lB5er`K?N2~T-l?8ye_J#R)wHZ zQ^}m#4@JWT(%*MwL6nWpbCW8H?-F7lgCHihH|J{0?9J@H8asrsFDJO;g)x0rfB74- z6dFtkA&Dr#Wt;p(NrJna$EV7M2aoaYNG=K-+vnwK2g0TExpB3b*dsDAN z(n!Wo?VI0P|}n%e_iBFd=)j%u+zwt0En3;1VW>2=Ib zoXguVjSLM)IwlP;YbilY8sk>Jp)xpe2EL22@3NA5RxpSgs{mDg1V7T9({b-q#4q%;~3nKHj zFG|15iqY&hO8m`f=V>L~e0ie-TaQQlGukbS3N_*qMiU>Dl<8$P1hr6VgP6hot7yB| zUSTs=8Af0o+{Ys3x_yTRc@^QRl&$fY0>kgCTG$4TmU?s=FuNDl(0og!fHc=w!R{Ge zoE85H2|4r@hhx|aqIBCI0U6SoYup*qSi@Mbr0YR7R;opD-B=cZ5ruyNw0R>#G~!Df zJ{0wNC2WKYYyVCFZcd(65k=!n&pafR;kQU%_)dy=B)?~jeV~-h(jy>-DV)>(>y!XA z8@j++tUhF|t5*uGyt9PFoC!Lw=*unJBuLJXw2E|`fb9~8W8O>+oRJAw+ z--Yj}j3kIPWve*ca$`iIuB1cU$zpvQThixFzVZGghdEb}%u<<+iD4pzg5EN|T#im* z`Fg*|QB0bUu1uctm&fe<<;HQqIg(!}#b{%l2cecfc6Bw3(HuWMQFU3o6b;EY^8PUi z#P9}KeXW<$de2&)rWui&7kcm}aaE^D*svSVf6@6ctao$~fanb5gr?coJ?94HOLHtYRy{UG}?o zZYKxKh^0AYphTYemwiL9b$U}tvrt2l@XfhCm*3qAfxkD#rCT@>4X2ce>*f*JI4@~1 zCG9=OV+VoMWabiA5O`~=v+~b`0=pUwlkDWR$<1Gx6s*w6x{Ki5PasO>2p}``yxMVr zZ;m_D+G(s|RpxqJ;-?|osoIK)J@T9uUHW4>J(ES_7vfgkVqXg#Voll=jTUm|4$V!2 z)tlg$Ks;ceLnzity{4cpmW79Yb)MS&i^}4zHN6~-LxpHQOR0xp5~OOdpcU2&BFzl! z*O=m(|D&@d zGp$crLHkAJ=RIOsmN4_jvIa2b-;OH`SD#44D44yj39mvAE{l5slr`-VQU8~HbY)Lj zFp2}v1Lr-sUH91f)UEL$kbBGp@nd&k6Y|IvnJ~vIK@=7K7x=Q7C%0N~%CAnG zCoI8S*hq~kz2B`I^x8^aQlLBPp%upAX;3!;#QN_SFF+gKwn#vjgZ;zNV;F6AqXRff zm?`=rrioJM_Po@|zm>l*KLgNxc2}$ry+Hh=NSkqzR)_DO^$kt!8lj1Jz5d zF@0olpOj zc!M=|rzv6z#uE5sN^F-vToA*a4;cv9+k@%oK%||w(mW$~^EY8dmudH$EK`g#1+ooo zwZ55=${!>Mm}K(XPR9&{wg6SHr=HL@*TlGl1!o3C_(;%Y0!_Fo>yHfVvNp6tePK~i z;k&lhW5x%r3MoX50ub|3-P>o+U7Kuu4?2rjw8DnjKL@-AyIb&6!L}_VTbxl#mFDT6 z#lOxs$YwGFmRg2jHGb(8QiTiIOWJD&w7a|#Tk@0d2W~&tUi(>5j|3jS#J&XNiT`LH?h6odCt(E}MJ95>f# zJd5nQtgd(@ueG#2f{0v|??McJ6DL7oVU%Rp5`aLSzl`tmo39nYJiDSzt0VV=zc281vJuPff2!1Y&UqFyO4U|eSzei&*2JP zyL5j`gW5c5+*@H}&Z?ha5fg9HiI{6YrXzMEtvu_woQvLsKBkSjWSQ;tif7*Zt-ZK_ zb^Xm4Ap_H?J&1t4rAQc0sJrxlgz&B<5^#8)m}NeB;b&yyft=V7nn&jKjL3P^zP;#x z)=P(-x}|PukR7!k(z5Ok?3$#j(zXG=>zXK)VG6?bHmK%x>96%%O zn-|t+VXF2XdlZPETRlSeuOl1iQI3YjhluIM1pDEF@r&Ly0uiAbyV`ZP%2dO5hI4fn z=#yn(g5PnN2pZ%`nqf1%$;UvjyRWh{ppDu$8BHtyMC@ULi74aU`8_xEuJ-^Tan05X(2!nALwG+hqFh{YoYc$bqR#U1? zS%=pY#?R{%!AV0NAKY**+Sm+<=t7g5w3ra4WuL4p7lOSw-4$4wTihdCdE29qaY}bz z+?9@B42jFb=b(9iw_hp-^podaYp_#?Jp}5@%Et`R)5gV`!9b;stI&br9-&7_cFAjx zQ8P32jtjT)Y4MI{_RpaPVV4H{=U^YA|pHWk-8?qfowQp!!P&vv$YbPrNX3OMLtBuq&5r8dS(`b!c z!G;3e@QhQ$%!m+I(%$8@EtT6}NDB-fDEHAy1|BU4!bd#m1}*zSxs~4g!V@D42>>Ow z>v&SMKxEi~x@^t;5ho~BV#;0>+4He(HTvvoV5Zh0)-ve>U5G=PuTgxJb4MgBePA1g znN(lJCl1}j`3v$6qOE#zQLhUeCihDFW!$RJ+@@JODzG(|mjvyF3OwLWAgp<@PH1^k zlUM9RaVxHnj`!BC%8JOha`Qb~&wl{cHZrosvjNbj>e>IU0Q$RVjmo1e_VLX; zN%40%&Et1~qZHV3nn+#~OoK*V^i+o!0|QP#SK?Y3YBk8au?2OtttEftzEThJuf@)b zhZ+i|(}_N6G(Q%lbp6JL)5s~2lGlR)qXetA zLAaf2w7g;xUtv6X5dxjO`bBr2-sQWzh9k~oB8l=LpdtC2{b+)>U7f`7B-|a^H78}) zqCZ14M0Uq^?38GfW*eI@MwltcfXJAKJ@$yPwliVO#U1oOyvQ_erKPTZ9V7Z%fX6R5 z`4}9K?pSRt%%Tb``sedBn~3HXky8!p}x zm?sNX>GcB(Mcf_*7j=4y`*V_0Plk`4+yA!wa;jI|&}`FY=to7l{iW)jB)8GCb`H*U z_ZO#c);&KE36yvic23yE%w0m8=t{Q`tOB~z0_~*eGXQ71P<>DRiWluO-zV)g2+JIK zc+W)O^=KUBM_xcwchaIqWx6cwsRI2S+Hsg?6cFkSq}DwJmEta8L?Ye2V~7r`Wpyi&%7AgeqhBC&ihFJ9wM(J8#YHg9VDyo1<&OOlw99J!rB%|e zf<^x@07JFys6~#)TO@8Q1gNmS0`?BEO?8y76NmaU$P2o$`yr@s*xziKYRqwBtVBW1 zpo_xCZVW;@!upks_h+8@!VJyqV2!=F6p=@+YdQ$vx-{E^RcwG*AQb>$1yY<8|N1t` zQA9x-roK0!eebGLaWVV*;`v^~ykX(8CYppB&t~XoQyEC07q!HeY5_91NGe8&}EFNFB+-roqX9IBFm=LIP9onMs>Xc}SIAb=bW zp3RY4ePS$i8i3!1i@T*=2pMBP^T^{liQL(1ZS?yj61LNa|GP}+T#FJCX{+laX2bWY)j1`etFS6Dm zg)XOMNKAcF3T_cdBi(JFvJ_I`yt^BPRknhlZJj~Q+ad>k5!Jl_uo zVCzj5Hc?ohN(X(s0qatn#tLH~s%Be9`sgqb~M&6Mxm~Ii;i@t_KW4)}}m( z_w3Do9~?uw`Tt)Rm(A1+-jKkIEh)cn!dqRE5-h2Eqy69EF$#l!zL9;?9Rj<>C%3@meN4h#DNHrlv(Z8~*A>_a2QKG|9sM>4 zh(|6dJq+LG12lPjrJ=C)s4UM8DGF3dvVLy|$=3}pE*%(=zRr)57V|fn+q0U_3cvP~ zqR*-SpDX|y0_kd}6hZwFKQ$5`WdL-xE3EYZ?w#sVHzh?f=23-Rp-;tLESp$EitUB& zKsZ4$@CckmPf>>fdb?85HSQV?Iy14?1dp}m*$0VT1lyr|!z)qa>;apyhHxlt%gsX(GW6gLgk!Ih@$^lbR3F$_qjz+#wH52#r>I7 zpuSKib#IoqsqEop;KNaWXXT#qV`QQ|=bj1>nRo-ak9>?pB9yn(o}acExM0T5WI37r zU3UNOLv-40#f2h2mLhLSuZVAUzg#>iZ}jcal8MTX7xW+%zjn8V*w{6th{6pVvGbcu z-%a&U^no%oWm2qpBfxjY*fE2tzfCvC?#5~n=C`vfP|r%6SOvtuJi;CT5-9^$L>@f8 z^i^KB3?kP4?>2A7M8f7J(B?(!XEwki+ZfD`2L4l-0$?vP1WMYQ+Z8nDHXg=bz2QF( zbn;b=)ILbSFAFfmH%XtLPckgAEq0S1rkoe_Z6E|3R7OmR8RgAKy1EoamLS$G|6kz!+AL@EIY^+@Br4Ga@7E)pW)S*h@vG5f{WqFeAH?bm`kJ$il z>J4^&2};l}NXs~R;S0`fd!&Ow_AZM`c@`=*T;Gd(0wF z{)h&!3b77}0#Wz;uhIBh71-BquI70R9Z>o6)s@6pdk~v{Y1BV`=C6^V(`x^m3H}c+ zg$g8ZhtvPrRjuy(w~W=AdIpxfmgG}bgKqn%NgHhgWw5vOX1eW6Tk<_h8h92IfZ?A_ zZF#X98zpa^L{fjTd>((Qa_ZBx;(xSc4i(i>^0%yx06$VSlYh<7oM%PobwEj-mgA$1X_@;D8d7$Z3K4|Sgac(e`@J#IrxCoO2n|n_ z^7ZA?N}ftQDiCR7w2Ngx92n{ww5%>s)*$vu7(T9(uNad8HG^AS#ru&URiAb>}&PyU62^JOgY8GH}p zjD0=BAGsPB|Fv=8BmO7^eCE5iH7->geis(e3YR~9+5E#x{=_Roa@>u4Id+4tdD03W zY_SFB?4tvRDC;VI*?TwB1z?x;<4jt%@aLvbW$Hr12@nbqk1x8MX=n-ju|7BQX`FN9;%w z`(vaA+x|3-l~3W7+JQ(XvVEa_*98>j2nSPd(MveDPOy-bmQjCYGmp zp??s1*O|+jj&XPm{t6M1xC5X3#CHxCc*Ick0bQj|(kjJK!i@`8{%DGQus^UsGLtr# zoC-&w$bt1HNwNY|eRkXO!{gS*jNQ!LclFPU&x&H%5QT4qXIb8pQ#(Q4SGXQR_XZbf zD6NflpA@9l-F@r&+-1lEs0$KlfP?+fFkvI=aZp&uLlrX4%}KWa+;jxf+^MmW;(kqB+!WaUt^Ti zK3?nyT}04)XvxOrhQtW7E7e7sUll+xDHVm&uWXF3%rvLXMEalK)k>bZ(Apf8U$!45 z{B0EL4SRy1pXRaBZVs+QLV6@}au(dPQR8h1O%6x_MV5ZN0&SR2*&55M5J?(?`QB6> zH)?2K9X2+pw#k>h)S%}pje5%>*U^ZbQX`9t+a5>~R$-vWj5QL-(AvP?3JaG}$v?>#$az>&7m;$;Wo?sO*Qf&f3E>bxHL*E|x zB}>sFzd@;2hYfSfanOm*Jf;Z{zbFunSBlwcb{+}PNiC_6^?fzE0s=#mVk15~#^m+L-MWNi8r?$5ZjTn4)cg{I zJa;M~I!3fN7F3jUCsuU3sB&Hzr6bw!_<@Uv|d^qPjG%v#r9 z_6ko_Va+>-hhOwkU;ZkOW50Z{2-Tm-l70J9_}i;BXweA=X@0oZj{EKwzDzG;vQ^D4{HWdhadDkPDHAU9qN1= zq%HYMwR-$*gdf{#8nD2Iv|grpKF3Druf??R;YIQQMHkz49FSAIbA*-3JCmKT+x1g@ zXcoD-j6cu>sPnR+f7Kkp-EOO@xVX!`dL0PK?yJ2R02$1u4gEH|2c&VE!Nn90 z4}bo+73F9GMC@E`UCR@htG;e>qr zBo&Z})UfE^4og`BOBvO&uOU!`26-$XcLS))Q`aqQ@Lb?mU?j-iM@Na9ew8=c?2utj zW-`V_IrEvgJ|Iwyg1=r(dS7pIC=?ZM+P<#QA-$(~Ap7Q3spIgoyq;>Yb8z@U^tndK zgOhKRd?e^3znxmgTSi!WP;RE-CV*&ghHuqyg`=2qdoG2`K_V*g!T~)-OJ$V9l}&7w zCB)rU%U9uMhIZI@-wbXbPVm_DP4n=MMdWDIqd%5}vIgOw!y`c}$^^!}Y0fjq2;GA` zcntZ|Pcre{dUeybidBGfq&nq5J7u9;sZmaQTpts;IYjr<~$ElckXG22%c{Eepos^-|`{SGSWDd=H)#UK- z7cr^*Jr-R>cD|vro?j3&4>T;Mdx0C&LZ_pJ&v~%lFK1)TM^$)kzgYWBUzVSe1eiT; zex%A;XXu_X)XKCc?56KLDu<-VRuHRnlEFJx#u?t>8wGIoy>I3;V6b<|r(ySOITp_M zROd|ddU?ZCiz#@-dsuV9^M1t@=qG)9%NeW&lG=hpjTII{q>J;_zxJGO8c)HCw>M$`%5+xACFwaCs!VxuusP1e2OG!BBMXh3t`s>$~SOz`3JF+ zkQaO3)p|jJct7#j{M|F3@h~u~_hEgMCUG*7)c^EYP0l?$L& zeTkx~H;@a`zd8;|4G=-daXO8PQ3zi3&M2}mW&KXfk8}g}RJutyYfO2R1N-l?>#iy( zwed{|mlW_+g+k$Pyn@V@@Rq9&#GRwDf+MPe%OSF$aqb3XSx_f`0}$Xyj$Zd8je=`f z3%eX`2KlaIC3x0~%|K7$9BCCQ%GP_5nOGNWs)A}HEroADT&ud0b3tJaNN|}k2`VN_x(USZEbYmLP!gu%n}ko!49{LEMz1X65m}r% z;UzVbG{bR;97u&$f_`+BXj%U3V>qH2%(zR5xE#))DZ6>4E>4M@yenX%U=n=%fE@>0 z61|_f?tRL%W}!_fshQ7Hre^TDD#HGgEQrar+U@%=B2PTx(nygsN51WDPV$%DUZKB!$i3xout%Qp5LAW`eEfZj zCXBmS40?%3VwIo1$gM~DG4HV6B$m3D`b+de!j`Y;PF$1WNkVgUv|&Z)D1^T#L%h{l zqk%T;PnxfOheZc(``G>^bSC+iwNgG|rH_Ri7d^MAp2L@+o`gl8S_F#F1><)aH6PU9 zBdUf-?GsrB@i-rph4oi3r#ymHU`W(Y7q5rw(WagUXOvWYgh5??8yaknL?2qb+;Vb{ zpCFJD#ZyIjfdqUZ!9ZHdvS*wNV6VZa(?7R@ zHmJjJDX=!K;iiUG^+*V$laOqeXh8{gDhN@rv6cnZzW67dDeY+ibK=gN{)_HO3f6=! z2=z%03Ay;EBc<#Pd}OLcb=X8rJ)cDvI)dI}MZDkGb3Do(Jyg&_p*6}duds`TIh;4y zn#GV&czPL1sEjWP{T}8_%kv|Uo9hj|%vcm5(+Iw_w{|5FqKukpjYc?%1`oz2_dxy| zRgY>;cII_0#-e%4C~d;DGGwO$gG3XV&|?hMkWo?6u#C9RoS$i(u_Ndc%*hdTgLOF6 zjny`6VbY`B%ppJg%e3J2*yaVAQ12qJZNZ&~HvVnA@ZoNr!qrK6o>d0dhIgC_Ub>sO zr?85z#<93N6-)ocs~BX?cs&u6Tj!5tZ=K+P+9})kHiz%}VqwUb+&!sTTsCNzg33By zc3}C8{*+l}{U!f{_xDB($@AZ|tP!5oPSmC+Pca3 zw+(7=P;@NpWgu8~M(nDwpkVO2vGsRZfl|;B|EVIU+nhy!V1c##mnZXCLwCA0v?bX2 zsMin?%S<6{Ju-|ZUg+^Bnr>rN?&hA@zKiL*G!7j~H!2*PuorB(v!AvlhVFq~Pkb9L znYpFDNC-59%SxMP?f&kjiO?j=Os@YVxdmA-nvdME})dVBl$b5-0pJ+<;n?KgbxaH@2LlFvW2g}wf zDjp+Cv6CxZh*=8d{9#F7VcmTQT6s}ivrn6n@Jrf2>_9UpG?-8k!t=6mGzj>=A>i0=Na0J^ORn6NS$ zc%Yc(b>oK7AiElt%j`X_zn8?UDi4hEZ7(SZnY>fzGQVwx@X*|La55G(2*W3GYrbux zV`f$wUD8BNq1}ic6DBaGm>f}}9@PBA&iEUj+mSi$`-Y(Tg-*-9`*YxUSv1ul zDQr0qgLrwCI$EIofg+6uQB(2r9BY5eeg%|r1k7G`QBHo6hg94P8@y4;xg_`F=^EGq z!}dng?w+okgIxZ2uhidMashnaOh>yw5;b+u2@4xDc&E3jBs@A}&+w>Xf!e|&kG=JjeePJJ_uk`QJb zw{jZG;*x^o8G1@RuC=L2u{*dqWO)Fn@rWqwlddlshpszr6>4{XQ8wiH${e@um2<-8Ar{viuUCio?4&A#6!+?z!*%J z!j*?l2}P0xw_l%<*Q}1c@T{$8egogMBgZ9*oUQqNEa}V3wX+rs=-ue1HU&4l1JxAQ z2bzwhi^^rWvBTzDqD0KDl{}e|Jzx9xllC9|>a|mCc^5hV?FEwk==6i515{WLjN+1c(?|)QtXU`4l5?qRmuYQMs z)JGM-PSE|U`^y%&M#zEfW|gDg1dH(98AwT`KU5Tx1yz)d8m7e`VEOS^atd#d7KH`5h8L^gwMN?)`&N;Vh}4=$Tw-54PPA82L)>3V@;kxc?o*kCvxpv_ zA=27|(@frlj&oKQO48WmBRlOEa`BzBIX)YvYDpZl=zSJK26c?^T@4lP7QdgI4;SCo z4?Q|EE_D@TeD?2{DCZht`}}=t$)EE={U*6*lI)GIWAv^Uw=R+zgLY4>2w-?GW;AZe zr7{R~t-@4=ijLGbuEI>^y|YrHDL@y?SP7kO^7HXPWI)xKC&8nZXJuxpvkjhAb2y*7?bC{Y12yA z!)ZrE-mx}ZFw>IYcaP7XrcNq*t`H14NmDVEn~6k;8)G zT|WIv%xPP6On)~mC#Q4i^A|Q9z`Jm=`2EgfPe9YS#jH>kGCc$%l+b|{14ZxOwa*Fn zPkkuYf4=?qtKUVDQAYjsOG28me|B*E^&ScmC!r!!-x=YjnUmF#l<_e%Ecq1CkF@aR z)=)|ybowPM7k61|FR8Fq6=?T1-gGFp(qjwja&3WvNIMtg3IKv78=eT9TNq7zBq0sk z1T9qZUIszekOH((oNT@+3JJkw{Y_#9wSffOQ4m@PlmOpw)D!vT+Im5#Q^bb+bx#!A ziDXT51%4#c*eu8cKfAQWVRSQx{R)vmA@F`JZmBILf+i^cOU6yYT6^0SgCbT2O6L|Hn3}pI{$s?1m`!x$YMfo-)Dxq zNs>^Np^cgK5bB)S4NrT1>z3iUJy^kpHx;}=LI#LMO$;|Zf7E*0@L;oc_WqQOTruk+ zVR$}ih0=dRi>zfzX`08bh8MEqpJwU6C^ulx?00C5y%J5Jj3{+m$e+z9*htwFc`X&7QPf$l2*O1X7^{yUessb zSI86v*WJPIr%I$BgC;|B=;3Q2nVHu@XyyhP!XGBqz^On6TB=8VEO}1Rn=}1{V zF0p<>WbbnNCN=O@kJ}}5I?-v}(}Yp>=-r_})J+=T?aVdYYIjtoI=_!!^Y%k1xLh(D znOu~C7joa0p!o_5;p|A^e#UjjU&bH8KtHLmHYJZlOey1_%knj@?1U=ZXb~!3FUaI9ITQR;^ zSV`Gxer`1aHpXaVRx(}4Kxdit>!b7aPzIt-hrC1j7aGzPqP=f%a+3OcX!!vLq0G6Z z>sKa_|AUCG#0sael>62Ccl5pQsfNMV=mOP71xif$qlj;)EUj$$BdOo)L@ku+!sAL@ z0L{;)a7UT&apugPAByU|l8fioKm~!2^(X3`r4#P(3s;L zGMwEYWpHHDBN{nnLciPXBkkR3-keXrAdJ_+Y<{6JlzQn5M9J<%YbslzNh!xwsvn%F zv`)Ej&@lJu9nl!cq3|o`un#NSLOKjhXBy1{Vx$(vPKf2yBD$2cWAC{01 zDd|q>6a=ZEQ;}AX66ux}7#NW5Zs|^?JEa^N=~Q4ysez$k-iy8W{p|aB{_FkZ7njS0 zm&fm@^E|2ipwH%VDp6JnU-gVs<(p#J$s%a-ewsyZmlZwngTD9+#=`7VI6cq^gsPp* z&tI<~3{$GJ{>3b*XXqPWqZ3N=Ka<=`+J8x$|FLuz%@>zSJlvk<9XJU8hkVOsWx&|| zuX!kQW)O~eX+3+X1_ltw!mUL?cj`yKZo^S_lxB%7=+S)Pr4kf7wZV^cP7knJ&YxZs z$YTHAXa1>$?~i>nz{&bDNiA;a*%kTDZuggKbBb%yW~v>20M&Fq#ccV8ZS)_kK2PB$ zvbT0z#JKiQdK;$^-HVaF5&tm{1FHBz&m9!%Zt@%3EHoyAr(fjW*0W=Zr)lNt?eZYV z`hx}l-1TszFZr(?{v*$i0S!Son41ZfGG`}Lp$w47+t=Z3M?Wmtgt`^1T{&%1QJh71 zXX-1C^H4Rd{G%LM=#Cz+H>ZIKAg_(bV(V227F+bJWDe1F6!1*~ze&0F7DMMs4%P$w z5+TjOw-5T<$PC`&74~6}NmhI8#5?V+205dZk@fF0QM1$OEvfrfpPN$4QoRvmWi<`` zl3vx7X^S;5sYWWs4z3$teuDsDShv+bjQ+Rhey>K4wrUZkaJTzZ2I;yxb_>5$cW=YXzrH}EP=ySkbkc}Ll}2Qk%8Xb7}pR?Lc%@&ENn} zw!;Sr3atO2+oQkGO@l(w=NGvs@d(i+Lp}bD(D>Shm;v zj#K&6<>0_YU!C>V3~*pKd#TwsN6O8IeW^+?%wE=*q)y23pxXWl$~iJIYw%N6U*+wV z#w*x5tvRD1{i1q1g>uxpYWX>a3R`n9v#Fhg+v8&-RXCYCm|cE{rnVTjP*AAgF1UZ; zJ)&B13ONZjBZMBNPGepwSMe6tz_d0@&mM5t>YU*@MvhBipwB?0Vm9P0C3BA1OQU)8 zK~Nk_n1DFylBLWcR68}qvO-Nk{y`E1s zrRFcl?Jd)b{t`enDD<4-9x91)5JSFf4sYdd1S(ua*VIV@$(%6dFbm*sTVs8*UIe)a z83p%AoZ=1)OPr@xW?uw0??npSn88}`z?Pg*eTmYSEU7=mP@okkCo_xKtA%;%0B%uL z=X%9r4CR-nt2`(xR&V#)4pp;jT@u~pc;cQx|8YLTc&U)_R>Xg({Mzro@oNWIX||wp z$f%?KU*LrzpHqdEgHk_EZ}5t1oUVqe_Ay#&)jk%JoT6^4xM?QSJeH%J8Z1D!Nw>r9 zZ7q9eIwNGk62Znao+sHt;Lauq{Zb~2J0AqqcB|lg^HU+;1g|j2eKQkqizDP#m5#ZI zdEKWmc!kkD_xO}xGTO%Eq?`SXy((=`v#8y)!>g8d{1^57fKX!4~S|? zoo|z)s-tn<%G^xKdd~V2k>Joioloi#M*46AyRyWHQ=v9gQ%aw~r?>ANmng6rZTs}6 zH?fU+Ml*cG@gR44htW2}Z+CC}T59@O1}1BBKK};zA-tubn}&!JU?n+e^U{lQXz3`% z4}F?(B7DJ~`DvOza929f_`{mt%CxQpg|LxX9slsqo!y#uo@g#@{fz+k2j>r&I zlF-~2{dY&SCUK3gL7lNn#J*C8p%ubcd}EF5hql=HTM4l-C2 zivy4Ad37IAMEfD)C#T#oSxiG&V;7`gX-qXKhFLNnInqowu7y!);+lBzqA@@7nFE7Q zK6VxVIzZ1#z&SUe+of=JOEPAv-%;CG5tNocmR=3*1}veEJxll4h_hH5m%W}_QBL%x zw#oFNUc~f1Xc@A*+*YI!JkN6Yw77U>rP76!Q?1w8E`Qhi1%b!Zb=yI4<&KK+o2EE& zpK{9B(Usui(7;gW8n1ujbug7El2f;-Zn;ru!^73*mrK%7vej9$w!J_N$~5+yRSvp) z|E$X}XQpq~USI-D<{&u)AC+9^IQ){&4`Ed{e?9a9k1NUCm-Ewh`?-ukp@wi6zL|P8 zm0iZ_Y#mDzEYiBQs&#Z_SA_QETepFdfnW1D+KwvLNiz}m0x#odx#9D#O^-pL+DQCx z-hx46$PG!~?08Z5-DaBae34mZmzy-SCerNXEpNk4Z@ulvp7NGt!L~19_=RiaR5m>- zme>zWX$7y^2&ws9u?^EVP0_vz2*UwsD+i5P@8j2V_$5gB=xw4%FcI96j_B#sBNX4U zWwibm6W30a4@ye--z9VIHVxXi7>@ePcNScbgwqvYkcYE%(tBktqMK3eS`vbj@CFCi z2VU&RYxLHkY*szr@lk!mze%LxtU0~}4)3qsff;Czs2;)zi4+-wH&o13^Yo^JDHi8% z;$r3oS_c5#BP?e9D!+muH9H8IG4y`majJ0GRg#s~Gc+N7&VVSUJ-=PCZ+bL6B`6n> z6~@@)IajFl#=yhXD>RWbgg6{xzl$Mhv5K2NM0^$IM20KsoCmkoM7^FD9Jt_iVz2Y)-*(&T=`VQKe}Lul>1bbDK?ZNiQ?O5rgqj(7;kG?zez9l zetsxlHksshMC0KKn7FI+bwAp<{!rWB4d5_L)mL;YXTR@Zb=(HeJxxhPTSfZTZitNW za~hpcx@;u9O@GLmrJI@^P8xVGIHw%t!Ql<8!EIdopdrP%X^MRECeAT{=)m~^?p4iY z)@OwZ{vwO(a`{XOkCFkL`W(tD5*oxsnG-XJga4eAH+ZbC&3}P9ISrL5IRg4=F#F~U zhJ4O2R!&WF*%w$8$%0+!t9xO|NBWPjd}hCJX;~3VCNy!qH!|g+dlbl;n)2!Vk9nEs zA`}+0QOSu57Q=4*()L>Qi9LoFbT7O>d^>`k3WkP%%i@&Jf7~LauSu3zN z6DB&$Wy~|FH6tIN4V&>jRH!WBQUc4Fy0Fw3cvRD;NL=yGsOE3oV$svNKoflrW(BXW z?VvS4pxs6&k%Z5DQ?392SlhnscFd9hKZBu}m}-A2nN1Y~}7T4F=&iSoReJy0BcJg|OsY|ZGmdQ1%%vN?-6 z`TP?22eJ?}@l=!3y9U!t_v&%+H7D5!ckvG@Lz0543`gfjL&X_QT|{rsK=$4wQZ462 zW&Ju4EqE-3$&Y?d5l$-Blx6SKUTeM$x21!2@yy5 zw{jOyIIghiIV*E-Hp~;%r?_^#X2`1#Cjf0HFyKU-nn#}N0tU`QCLlN*=cT6 zzoG2YAr_$rc^~dNVxa_PZUhj}wKw9U1k&_)ttL2)0*Q8&lOTIACq!}kVGU zt>22v%^ew2(A7K;>IHmLQ4W*B8IH;SQNlj0Igf&_E>UhS@?e2{DP3)!c+Y-| z&-I_zzN*cRwF?x^V*>HRWhTUHubQoH%UJMX3=L=|hR>;dd;pg#Fl0>e>p25K4|m`> z2+xa!!4LW#%P~yNNRU~$FlH!N1JH|}mvqO{fL^2rc}~O`6j&NLXS}>Eq&jzYnCLN8 z5}JI@oB8a-i8S`d&7V+w`p8}{%vbPDKrxPm?v7l(h|icD0DhC(Yo?c3e*4a>ge|+f zJ@!150oh}JHVO+i_#A)BTq>CDqF-E?KH=t}(xXdk)SOX_Qx&p$R(DF6G95%lzj_F? zt#a=zRYY7&>W}Ny)s-?B;dE_@5$U3+3gTOKHm#;Ed?sp#ts^<-*h>l~O-y!;F9`oJJtnqlVn+(tg5M~w*qeDI1DP2JC%!0*5p*%$eQtt(ZNu%T5$ z`Gcl!U^{ivA z6UN!*6_;|o-gsrpdxkC(t?3Y*>&!gvtk9D+fb<|qyJQ|F5AZ3ztwMqgPYB^Ci*!BT z2UuOB`j>MYWrtN<9fMae3;57Bg9?Ua;l5vn*9;8DlH@Sio{_2;n zcPy3l6Y1h`OHVDAW*}w-@lDB7Xc4gUo0~q|Sx^C*G@D=tmuio>u zX%#r{iFN+b^;z=5o(|xlyZ%CpDN7%i8Q3US8!yvlmrkz8Og4?p@5+|W3PK zuuaokE}L`vO3%2pCXS|L|8U_=E>U9096DJ9qiXzyjPWoB%W}#Ev9;nWJ%br_@0~5R z(yzTO{1$ElN^ohmv6nkZ4FJAmR$iYMPSr{i{(AOFP^m40l*}ECW5|H4fNN+F>5H`8 z#gyb*1l?NCGP|G~-X3#3EyR^j(K+ftbbnVuj%l^UuhAI;%a4x}vbD$aPB~I4J6wGJ zcgs8E(4#mDnA+O#)})Do$IW;|&RqlxT`0^Revjdm zfz?t^*SdNy*@7-}Tt?)0p8$TTztPIqtCTM_WEhypbYK!=;|62(XG$+!x}Qt2aOasSqdiR1v5=V~RH2YxKS_qPCH znB-#+cmNE`ej>}&PcoTrI^udYHsrc(ndx<|p_Cwxl zVM{?G4|ib!NGI_lW2^FapLlgN1N5+N)z?S)0w0OX`Q~&Yf-$FtYg)`olTeLb7W`q+ zAO7|coxVQsvH#7ia8yNI$-bAA`LLg^2#h=rpw_e>4`wOK#kQ(ByVTHsYM96%iSzKa zl?;dKdwU$B0d%~2)?6>r=^>!dqRQDLJt$PVZ0U2Kvh-jiI-{=Av&3OkpErdLj#8}r8&lAC;Hm)d&Idw4ebj7Uo))OUER(N| zCM_X~#c10%^0Nl(>SvjEqri5=^Pezw;+WzKR^rQrfzL`o51FazW2#AHVQ4!dp&%=+ zXQvGN^x)^JPFQv0A)VW32W#3x@|xk|l=&4=sKLhl%MTwJbYO7`tLltSgd9wSUC{lcISu~LS3zAH3DpCThI*YB^(V$ihh&7b(C6|7@yIyClQOprS zI+tUAUSpo8$57FS>CCKBGlAT+nB{T$P4$LVTX?I)fsQjd!ujR)a!pL!lNPMR>icAI zT0WEqc3$;6PVJv(E^hkkLvMgDh$c^*#EA4)fPod;mBpDB2Lyri5d_85d9QXecT$_Z+{Q9WAeVqOsxmoLZ1>zt1Gqcn{52^6G{B3YVEk zpD8e0?g6^|VFB2YKa+yFuo5dfYC*|%SO{3bsZOpTWK?9clg8ZBMQCN*iT0{tRGDfo z@t!{wjMT?E)^$X{a&3<6Colp(JlJnbeG?YMm}B0o-o(jdN0R!p<@l@QGtlAP7RnIPR(XY*|Qro-BuF0|`yH-@H5;;32% z{d?Cv5riSZ{FcM^$;d7VU~o1KbnxSBSy$_bthvlGQ4xKQY_$K{>N#L6 z;G!GsB+RDZ-wY8zC_a||SXEZzfW~{Q`YpUv>rG@VZm8O;Wp2kn8bII?(s33;6*scA z&2yo;H304q%;+?VXoJ+#%34OM5%|AJd&C-ybhcuk?@N~Y>nJWo3S-JY6HW>gb%w7+ z*~tQNq0v8a;d_QEQUEZ3&g#h|_Hpx{i5Q`~kle%E-pU}H38smi5?Vrj)P%PC0Fh|% z!Nz)G2FZOdY>vEdbJa@l*Fd`!QRSSdP9`#|FG8M3)O`Pe(Ov6>9}D&CaT!*2nFvz; zL{zmin7G~mfdMhe)R|G@@Cp=F-~c1y8xoTGOi zYRAxn+@#3ogZV!DpdRt7P4hoO2&*qsEN<>$@3F?ml4)+>b7v_Z^Mntk0KPp|JxM8_ z%%w1if(A{_@Bjvi_KapyZJR$ZMW*Z+SPFdxM=_1-#rHq|zPVwx!b2IFeMt2K@+6-G zm46BY=Z6aucV?E*0M-&-TKY5ZgSOhkQCZ)+6VSqroQT;#G{{Q>eHH%=j}6`at*)Xr zNXFdewPDvM4b$$=B6QQ}Fsb)w|uhVlrs(M>X8OT?@tXoTu{>Fr$MQdTloz>#FJ&&hmXMzw ztFsO9opSUaK2^X3dJY}`^c=!>*?NnyW|{r~A5-)Nv5D+7@KO=XRPW0>$7NL`*gs1G zf-6p^Se=dlK~)PS4>`r+?1aSHMAnjmqA_8^_qn^pLYWHM#N##zkiygWb|hUh}%+ z#lGW6ZZuC!@rkfzg$qc=UWh{rPdrJS+q@NjbF8tO^6~0T`>Md5{vzfNs@9Z~voXYD z2a4f^C#^e%AbG|hD8(Lq((^D21TOG1^NRk}iu;<##RwdO_uT+}(;>xecy;qDx7i^? zqs?o^VMDEydsiHFp-Rs^2_)!mZ-pnB7qU63O-Zl0vdOfHH|3jq6LfI_KIy=l)`+hH zK#ll5wbA(37>9=VIgGynWtS-Y%Z%<@T&ABNP^#nL_Vfj^FgMzvr7FD@FyPl`S+4YpNLe?&z^sG?E~(+_LIdZq-&1={Ba$5$=bVB#1zdpR+NF1J2qzT@LTKD zl7odg$m0)6YPJWkkc`A810eufticRKA$5>apDvl*WB}0IK-YLAdtXN1HCLQSzqssV zRW{?>tsLK4hFk_dDLZNWmWm7!lx1Px0QMNl1?;{t9HIo^pvc_baa6iP5$Fhf)oK_D zNcK1R8N?U=38>`Kg@sHK`#)p!gEl8_4OBx0KttgZ=pS86Zc$^jKMy0`6M?BSc6|~~ zcpX1|U+g{#A#I3>22)UNtLrl))z4k=L6`v~lAo)1wi^RPFl?P~aS7}AV(Pld09i1? z(9C{A;3fH`@uP{|3%NT@i9O#~4v?IjByM9nLmOeX`1Xl(>sXt9U2Gg(vg7h@g6J$J zI!KO$ZZMAR(to~qu$ZAa|AL&GOLsR0>~iWvqkDUm6EOT$T()Vn^NcX&sARhBRh*&* z1zll9)aMRkGUbRu_ltnOhl+8TyYmV|9hnFlKT_)*)g&ZIW3k?fr3USX5in%)Yo#+a zDa!pClaf{P{AE|zl~7?>3|Io*p#XX>!XRF=?ccu~>sWjAI?df;SybcZ;OR>_FD%<* zf9kM!yB(h-`5NQ#CX+JfE@#iG5aNZF2RZ0NU+VrAX-@YdP185!qCbb>Pwah|*9(m3 zh!eVF?aiItclGA3h`Z_LgTD;|`z8pV*>pXtrJBVY_wHPjR1)de!|VhiQ{42@&GUc( z1mv#ldcN{degAuhk@t+A#W_$sdwz~C>g{n}*5TqtLzJJ{0y+v(+K-m29Si!41S#?C zq797aT4fF5V@1qe={4U$d<>f%kUbKIEIIO!9dVd+7Prr%#&z)}c})z-zQiJBJv(?B25Qg% zREjQC%)D@>pswd~$;g<*RBBj7!uMGA6m21ydrY>r75RwnJE<tP} zHzG`Uo94QOdT)|{2nQNQ@TbdV;E%VatE!mKp)|xFxVikxD$X|jLz zrUvXnX<_0e`pN@0agOSA5A8t;L7v3h&|9w5lg~gJF32hzbSz%fxsPMaM+KBiYi}2U zv-A%eJ6zIoy-;AhiFAfp{G+nYPpO#Gr#XQ?cq0F=*63g8lZfw6d2TU;+3l% zIH45aa#xNqjoEHk0$ubPLSAMu5(Om(Xvf1I9$wM=S1`q(7ZZ?Zj3B)o)o%oEy%&W9 zAeYW@G?O~7zA(kX;Fu^|-r^db@Ob%Vxtc;ROm4<_xM`L8-G??-mhm@)8owU%l#oq6 zj~424%KR-~79_7Ao3I!Dv&t*J=_XUr1}V`uptS+h@0148L*q0nxIWRVR~ZC1uWc0Y zOF%ZyDJRTXi3pC`lk3}!u%~M^GyJa5O6*^4UQ<70S}btS?qd`M9{Mt<3SnqCAlNk9 z=_e|jbG-(D_8LE@!ij%wo05?s+BIZrG}C7@2>!}=jT&2t2W}pnC;tW*3h^PNfGKfN zDTt&ORrHhp!@frJVbLeTzT%&ZC~jEW`3IZfBoo6C8Z!zSGra2`wNm4KH$aj5swPfJc-dW_4*8sG z#03Vg{~Rnj2<)vMVbt!!heo!-M0i_{Wz|9t)eaksSmc1s6ZYp5NUl1#+Ch4LOXx!e zLzlOC?}Hgy4J0EANJ|vQKMXPb;5ae?*ise1P`PDvBk(tr63tp84x93th8b3i!L!zT z$eXGR1rV>PPY9e08gnn&;!vn!QR1VH6kK~PrJnbTQuN@N&`@zPlkHIGXER~YF);x+o;oE^{QxDmokhbHSxv3s)MW9rGqgB!KnY?x7$sYN7r{&dbt@7n% zB4E8Q&vy7B{p@cmF9(l(`nC4Z5>s}k9DiVqjQ;>?8Ey-{>1vmDHM^g}%h}p22ME4l>lH`*IMyGQFXBu&UTH6b-X~Wv~_7Lo}Y8z)StS{kOJZnr;Dc77JltNZ2 zQ*{942U3Kd(iG5Y9~7GEkD%8H4c&bdC`(vWZ-txHn2_V4Syx_ZS8&2h18MWD$$4wD>I~H zi+!(E0BT&aB7HKWv6uuUYlh07WOaKO68X_W>6&-NFl%R~67^gbtSwmW;WpnV0gn(9 zvMEr+zwu7ABr+e|d#V`w(Si2@=jy9^n?EBo1`r%lSGDS;4hdFox`F9>h5yR$)Is;o z68uu`x#s_NmWcl2ED2hvCzvPOKOiy*w3E^u==}*-m}4*`q&cp3E(6QmN&8mSRLXDD z_TZWr1{(332SPD9wD=LFXomz23~hVwy+V=tIdSjO^nI^$#lWAYbLeFK)iNW8yxQVOL^e;h_i z&6<>tA^4-ZR`0#`kDXLZR)ie#*CtipD-)i6ULAqhrn`wK{hy!u_dy%+Q*A(eN2nGFe}ucF_cel zK51S}a()%m-)TB>iV--85s#ndVg|r$w_Z9?1_x)RSxpp+M@QxRW#Y+tt9YRrej+EY zbt!L(I@06hVcW=b$GC3C5|=n)$|vy4*oQ>mzQgGr<0(EWC6Y$oz09(45~H+yTh?0g z)nQ|{8pogXTLQ0ad;3*~8S%Sgi?PZ^cURFcUrCkYK4QpB?w{M^+nuG!pS@W0tEX*b z0m!4zNcqRsWMvVl%WU?@*j-{cIUEJ&@c28{f|N1JYBYV>xT5~vYy`ZBvqn1OR&7=| zuU48$SDQD{7!2|>_PEoxufLjqBTk2)(tcbRiJ*ZRE4|fLWB|$2M=U%tLjzoM`8jnr z3M{ceH#_h91jWuL{8x;ftghKK_8DiWM&`;`&073(i5@7;)+@bii~8co9SX^QtsBRM zz0N>r$W?CS%5r&v{%WpMewz0o39c1^TRdY-PZPMeS0(V?k%A`4)eg)4Ws__7%eM57 z=PP9mHvENa_5#;;mKhJD_q;cT7{9rhx7vHg*nQpKX?|Oc1BgM3doh^&f5c$!%}#P{ zbS8ag5sz)PkKeAewqyTHG2YERp%k5?UJQO^PQleBuBuX0aTseS*?BsxTKj9EV`>o!nd*%VO%dW+xT{ z7N?4Yq#xm08)0onxNV-Hfu0L|7`P&RhSLPeSOrQs~w$2>oOj(v+Py-N* z5C5-)B{)9 zlo*Eih|^PAx~m!#7XSJN`+!@qg1vE-#~1V*LnY$4gYE@pgOYhwtsAi3PdyZC0`bmh zz=_6;o-u|r2187~`YB~msyb}SV3t|zA4!4{e)@OrPe!;sOW_(suUay4l&#oN(>GqA zfkahvO;?`DMl{*#Ek?MPfRzxn`ZETQLc0`0i4R|xA_++UqN#eG9H{&RQZNJPiRP_| zTbYaAV0c^;7A*?Nl~wne?ljYN_C6?K&6Yh&I!>}02S(9C!>x3g62xGvpk`zD#3#F`;z4SrXP^L9x;d)n{ zEF^N-oP*mVqm<6c&MX{6H-7yrX0_0Uh`Bic+_f7W^6jUL?$)lh|2#TOjQGaG8M)xY zW4=0g{vD-&9LeO~g9&J8- zo9wa}nn|}*Z_JzJ)in9y%o(3LWvVz@8M>j@a!Vzg3VM5#e7#}DWVvVb+9`MpLOV<; zG){8aH>*!z4rLx%LCw&GGvL;Ht4PLloTQmoSbM6()!nJXp1BM#-Mu7=f}oPkaCvMS z&pEs056}Ora*AmOd^}w4dB~_TqmWTQd1eoucg#IRJ@3Huih0Z#WaCfJxSN8m1eJ%R zweGY@xMO)2ih6l~S&B^Pm=X^(W)OqD(*rjK{Mk;b1>n|1dbjVs-P0sfswlvyFx}Ha zhnP|IFxI}#?W60?i%T<)>_)S)D|;MqGX2okAx0fCbcmVX5M3>0UdHo6XC7}>_~TOO zW^n%dY^TedI+YZ{f9x=4yua+QxFz*=0@)ZwPodIwUmolmoN%>#E3zaxAjKVl%DV}* z(|hPhnQ9wkX?27&)z~jj>Uf=@?;I4D_qdgCwE}<`!Ay49vs=$=si)Gu=)+@+MbzmS z2)Es>X&BRT8nyG;aKVRpN>&1@2N+(`JqK03eFChPzu!8yUnD>_Aq-#_n^ zaU-N+nZC{IR{0~_b@hI{ZE8RY+Xw5T&pCNsNA%hd`+;Oe&j$%_z3tyWn4{dBzsI>& zuSK-&E8NOZEK=_wJDHn20~$K>KH>E_imy{BTpg0m=OAeOh5B=Isp@n_uYI%oDc|p* zdshw|%j96+=_d&F9j{JsYc6?|yJYx5v25tLd2}T1bHcf+Uo5ByV|L%i_7G2nfBT~lsDMUrk*5$qtPdfow|57xy>fm>kflyc{1@oFqbx8XVmvVo& zop{I3Za?($Y^qFWQ;SyMu4bFF$KV#DQr?BterXf)&ewjg8}oXDdG@E|Wf+^sRk0)q za9UvbYE+eX8b=^QC~fPj-a5-3SP?lZCw*DX>J%6eYw!aj%wqb!|9e{UuU`+7^_xAO zxcAKdw}XaFAcba=E8Uc8lh^H(`~$6v9Z2~J+^y&vu`+-Z@$gOg)3Tt8&@fH~xdsNv zo8h*eXimjGj5uXU!vvxn$a#2xv@x4o23 zLEG#TYL^T1lx|7%Lg&_<06M|FQL(S(-GALVJVSiV=u`93OcC0}hPyypP=6%zQBL1U zXDK>sopEDKc*hEg$C=*yx1J%?A6W2ynQNV603Ub{h-@!(gxCY zdUx=Yj6%o}W^?HTF^eRh%Suiq2YD_$kxB_gL8(FeFh@D(2Icu05kck<_lBuOG~A5m zGfG12aBP*Iq4}V_VH5XTV`bRbBPGP>DmqTw*voopDxW_r4-?4>>yq;QRpE1Z+LCWa z`w2<#0Ihq=HK7}%ZNCt@iGTW{1~U?o04Nen6UlFrj3l0ihPYE7F0 z<%vL+sgM#5o~o6XJBH_sj9)egvemW(#N1ybuD#YUlJAJDOq+7_A;R9j1p8mb?-^ac zt37?l%STivBL7ESDDuN%`ON^($LQDa{YuGkecNZ8Din2mJomwNh_rSaV1tlauljja z(?VaRX`Yve%xPRi@%}BjVqGYxQ$GLZi7b9fG4osBa*yo1>h_hFFJ|_hMmUSa^w*ml z^|Q$wZu1L?`+Io`gC%byTObf&T<&1cv2tU3Qea6}CX#VUpMO;2@(3}^NaeFVgnVgL zf`MAX^Yvxxj6uf|^f(wCg$jjrUAehwS+0)~Mho7wNA7Wee=2?&7K~S$jG-lQHAI{J1Zdy% zwA%Un8pcRD@~;4e+M~iHCIuQm@<)mdxxmCdtM&2wAtu2Yz?gR8MaAOu$S2U|}69UUu7E#uOq%W+=%Y2k%vd>)XkS}3{q3)Y@vaj4o7daj6b+=E|UH)VEAun-tTRC!zIJ`(vw3C-brMoF5(Z|6bt+lA9$X-g^ zku2JltOhPz%w6?vj=A;uC}W*dT<$f|Y29$h2cj*9)5h8X>p&F2c~t6t4`ziOJG1QksKvXDblb@5qzO#6+uWh#2}f+yPe?@ucNm>fl_w$>237^2HQEYf z2xlOWF)7uqJ)@ZtR1G2?!613x*Yle>UL4wuxVT*p@EIdxv(UbA$;aEL7<_OLx%uca zfX$k!j|ZAtx-N#;EYuR&->(vw!(eE(yeGEjTM!3#1NXGF`+@NVU>|A*@|ey=s~DR_ zeu1R}{X_CxFFCm7OWo86y^{g;P&Fx6o<^BDWuXm5!7gTIfdX4U^QR8= z<1B5zzF(@4C#fqZ#%Ok&)} zwfge12bh8cf7ttaL*m~?AcY9h=FM^LY5|&bJ8ALwfDXil`o0789%E^X!QweY9qK=k+PyMh{%#Lgh76E&gKZ4I!aG2m{X;>qiq ziJ?1yr#7$lM{SZ*dPv?}uL_rUdm97WWFnkO=uCcY_@W<%+@M&gM1sZk*Hl2u?Pos= zCbQjaFoamQsqzUxGj;CIiF~KF*FC~@pc0xQ8F&1UC$y4(D|ErnbXUn2Osmp=D2oMs zLG=y!Hy;k)?l=9qSRcvLQj6*kp`tuAX;Wag&HuH3J@<95wN2W4rrkJ~XtWb4eFe&7 zoNY&@ti1~_1xw|bWOpS$*UoOiQ3<>_lEAelJQ_-$#%L#7yawCJ&54h1a=*pPp7k^$ z0!nKVp>!{n4Nz`9wsy zyo2{QzjQwf^F-`E>X*J%Us?J!<1sjEzc5Gf@E$zbDksg+o4^;ylf^{M=6I_wAefjW z$GXy6TVtIVD#?p(KpxIT4`_fn`n1ivuO1|?&mXS}K~gxmjs}5xw_^fKpQ?ffe2Qjs zee0^z)NuIu=3$;B<}iSw`pnI2YI^Mj*H7p&I(8KsMjCuNxP9+VzuI3-nUCxe?uG7l zw=`f^e{{WM1j6ToS}z`^E_1!IPSMgJ$}4 zQ=yxy*<25EHI!f9a?I&H%=^UB*Z(IVbnK74#1yr^&;?#A%1l+62z3%4ev%XSP|`vJEM5B1;FIDQm6lMHQTYoS(J@Bweg0 zuxkwR>zer6WF~AAj`AZvR?oo*xntJAa76pW0H5j6lK=tOd09}5uj?w>2npx#Mi}ea z#E=xgSwy4eM@b>A*N=3NM()nu3HFe$dMZ=+9paw9GsL{0x1n6P-v+GZwuN# zSe4AZybWN>cuH5l)B22f(;q~jl1(PDmk!)`WZ!by3<`q(PpPe+1jSIp+d$&0`~R%y z_=~7AD?Y&ek|*6X_lT+4ia@XdG%U%f;J()JRbPcF)(+b-%)(y!udM+D<7aRALI(JF0s0P7h9Q7q)HxN%9eB@K;`~rAQTA<$HSDrad`#tld&43yo*hPsY*Tof5p7jEG{o zQg4%YgBZ$M$=GDwj&iJ%aWHA#Mnc~~-9?1%7$TX)nJM?kfzAuYy?g2ZfMZKDD`gkI zT=wt6?&%Fn>SG7~3iz@AFQz&99OVniU_Rcu8&$rfVZInLT3hb#FDE2D;cGpFsq9dd_e7 zA8nw$rdrsJIvjZ1K{t-2V!POoYUApe59bfhp1=&rmG5tTw4=;U0E|3e780wZ=!V@V zUUQolZiF`YH!aT_jQB5^BntqT3ODn(*7HZA8CHNHE$64=-1l>vz6M9JA)zWfFD}jE zK#wMp_TbW<69A)j*+6T+{2e#_<~n*q@-0lXjs$4FVP1TF7FZvnx-uL8M}2Xo(o#ZR zGdW1iT1&yGH-+p7)+O-k&VQ{P--pfEW-#;t$11x|q*?w=s9h}FTj5ji>u9@J*CeYq zd~2qMdxT8yt1}9ik+0*(;=M)sQYIN)Y-vx(e?GEHl>On1KREVgZ0TXLpUI$5pdw2> z8V;LIvtt}~fcTRD5B5d~0o8RWU#`#ULWQSG8E#++I1qQ-wN;RwA*%SpO3Lje5c0JrF*=@Gu-XCUNN5a5~+N06VlPJej z`KnMXjmCodU)BF!Yey|=GlVWmErDo>JuhreR#2)sQ>}~ zKyQZ@khidMT{lu;8h*-6dvQAMBQ(+08vr`v|7lQTMpr3kIVf2Y^9*IixAeqg1sHnX zO1smCA78V5W;H$-mTxz-f3Y(RpREQ>aRBM6oLbjkP)drHnwK*PmbN0dCnPnv1+N1FlVtU4bQ` z7sLGh)F)cAc`eE%eLdZW^~CA1YDB3hL>o_#PG`BcZ#QO_N!uu5x@u`Mo%2oTqXr@S z&-P`;4*Izd3nl_BHEdFK6(sY;H%*BfxH?K&g`l*O@m9r>z%=S3_d)5d z!f{wag4H;_ACr~F4EjDh_dioeQ%@<64oeFH<{f>Lt?Ynp*OgcoG7ZX~601QRsH{mE z5MYgCEYx4F7At6%CZniqpn`k^GdNgC03}8{cnLj&I0VEOW=#z(A57tQt8Vj||Hu&c znY5CFNr9A8f&=wC$jf&>+6jY)MmqJf&vzs`AC$!#o+?3%cf_ z(_`>e=rGTph2q)}&uyA^icgJ)>lp&w=d!!*BK_AayCKiJENDe!EB-i3JxFp%GW*bXqwl6H7(N!jyh7t5?Z8zdJP^klb}s(ES{hfl8E%lfv<7CQXl+^%M973#JH46AV? zho37tpV0A9Q+_nhk`z>tp4*ox3Vy;vnK~Uk1(eK&X+mTX&Nj>CjM+n%_I!|EM4_YSKEfXtk}NX0JeUOvMiISZ+QMz zE4p9^r#9d^(+t0qch>0mP+8zakwF(R(W1+4M+MEkQ{ZOngr8vTeW(P+5!S;whl}vR zUH6BUajz_L&ap2a&A*uyl2%|awxL+O19Nj_QEh3J-hEIlsEL3(La*?CoVg|aF%h#j z42E(QPY@_{T|KBzz`Ev}J#Jsj$|8;0iu#bSsa0nrony>RsY&<&uod-Zz*x6e(T9$o zhV&yl+rLxDN&0`71or|(^N0s*Ls4%yD~&j480nLoUPtbt4=OoMJ@Oyll)dw=fY|#h zN!*zuLfz!{X*jg1Puao`pS=@Pe5A;7^YwW-K6DNl&o`2=`N3>e)#olE>q5@u7pA0D zdifhShud&pW0g2z>ski$j9kye{Wy z8Kiat^y|(^R*Fid2rTdB=_Pr+2p97UUbENS&`kQ5BJW?qxp4Sn{mrccg`N^ANZ~=W z23BVZ9|g>POWXXmENweZb95Y&(HNh%TJ-5lKHu7J<_WETF^r8J^+csCrL6G(=-w$> z6u}Y}Tum|*jQ9wQpAqBioC<>&K(&*Kz<1J56zF!#fy2KsG(6tOCZMf$D|-fEchvJH zm^t%JiYbWk(hSX=(7(aL$) zKy3n8Y_?Kk&iKd&L35!sF^>KNgPS1n?RJ9L(2K&qnB52t&Ocol^St~RI*Wtm;(V6B zM)!o24>Xy9R`swgs-nvTqsw5ir2R~mTk>yQQjy_eW&U!WrmkV{>qU87X2ZTf5h4x! z5O7-Ur#KoQq&C#g8C_S-35{VBRQ!O|O6-gG96SK1JZQ#OGirw7%n`e+MCfYc; z>@pnrOEw)5Q}K-V413xTqlU#BYkf#63RU50aRu$uIfu_z&0cS!i=6JYZ_?CEH3kDH z-#YbfM_q#PH^cA#H-Q%gjhfHZ(Ki(U2x1?5DaeU(Tzy^+mIDBj57&Mn{ObQB>@TCD zY}hVfSVBN)0V(N58l)LY7(%2wrIiM0hLDyR8l@YgyGx`yq`SKt2If7vuIG8~`}@{< z*ZY%M{DO0ibMIrvfur8ty|*Os5c&SFQVm*F+M75P`2*b6vI32N{StBPCOGOVtqL(G zNgpLd3zrfou-y9g?W{RD@B6ljZl_D!uhy#iyc2}Gj7ZS{xxi>zLRK`>iuG)GbWvxI z0>ycXS=F#gdVU!hI*SLAXk6GY7>gZ+D8+K=%jcJ7O4?YHR{CUPFJ>;@(H+UC=MJFV z-2Tmzxb0e+&JGtttBdYWVzH$xJT_4yyqqjSa$vWh-UwC$8O3>tYHbvkTCO558-_qP z^LxIc-LwkCs=4!Zbq|Km%2A{`39rD;haf; zu0pJZYxV>Q{rgGQl#!7O!fEt@FHj!F-^aL5wr#p>`WV*m4)R6MtCV!9*6Pp3itzi&~Fc_)j|TW^OzQ`ijstN8jXcdw*qDI1zY$Gj*t&j{9#wA8*2)()u;7mF7( z=dxL8h+eym6d6ob;of`$j+g$k&85G*twi}tE^DKsv9z8E6lF$JP9IeP#-(fO+w=3hq0Ib6fcmfCDH zxO8}^+Xqo;5J;Jrs2f57o}BrS(q0B{6F_$rI1GzI@9ZTrlMXYBL1LF=PyKa{c`o~o z_qKESO!2D^j~9wUr#pYJxa(ddpEh{ir9nT4Jx2HIq=Al#_KQ<_jw#i^bEdzbLz@Fb zl}^L5CEa3Y%8xOy&?O6PB?1^14y8mE%cd_yV!4Un+Mnz17`>Y13z!*tod7^y#hp@G9|mv{3`I~)g!K>3&UKT=B+5rVtgy9yO%ga4SC|Lx*MMIOx{ zA+d~zcmYgEag+T&FkER?4&0n@UP>6PK3!55@xNbi?^^%A>b0zaN10xK-=o9wlz1$D zK8;8sjX+@2Y`&+v%moxuRqX(US?5@w`ryf_Fm_&D%L=Ny?LijE( z`9Ii>yT}K2wkrYKOsCA?8{&{1q{^wO5;C@e6dD$;5OU^Uw1g1EY2;)|EV>`$Isk4= z&IUR^J{UR#er7RP&=!j%#QKFOZOzTB_-8G8vw?-EMl3bak^1>~I%m|G>1*pCS<)Ed zac3fJGY|8Dj=(K~92~n)H=pnOjIHTw>(J{&NvfYS!VwDM4CAMz|Dg)MR(zrJ2 zC{#p7?E7OF3;HC17xw9@r#f~4TO5b+94$xvFllwo*bVDV!Ar3?QeqpUFt4AP+FHXOhem z<*PEalLRf| zdJdrlNmMe@(vrr{Gs^u9Vd}tK)eG1KqR9kf0~%9ALb95n<_^+YGxr{yz(9aK20*e} zOoByNH$q_6mn`o?zXb+ZOS+kAL=&RoKdztkv~r!3<)gMY72mTFoNe-pzbh^YU+&pm zMPh6XSVQ~{mp?Vv5k5w?`{-D(T;feRDo3KXoYDGDW$|J8HxSEwOGe{+5C*>ZOK52?TpENYWV z1~poQg`>qBKq5erXhJ^IaEa?5QZ3(j)<~~niKo0T^x`LH*7GH2cceq<6@B+`CFb~F zMFqkXvLuT#4*uE1g?K5n9y%t-Nq&_FBaj16aC%qZy{)q?o)U5N+>oGYJj#s$=Fl;7 z#}+U|TgCsyWYe`nVp=AH%yYp7XpH)B*}2(gT!sD8K= zZSrX}M+}O&c3GScISnaoGcY>|{s$ZdJ|W2pXK=m3D0ukIKFRlfaFcEG*n;<3sQKY} zn#Ga#SlFOV&l{5)Nla2+L$^=Ct?R%=5202##Ob(UJIUE@!z}bW<~%!aJIjeT0pxVO zXujUGED}WxtQ%8MjRomLlkN{0bmrzNy9SZWMtevfe(__GR%ZhAoAw)Uzr7nE-{!oU zCkzn}e&>v+8%P0CqWNA-F$*o}qZb3>q_kK0Z&J9#-B{hbxd+!6gn!TUefd{b*f!f)#RCb0N@AQHu zuTQo0v)}W%-n!Dv=iQMiEbRf0!JiJI*~3o4*mVtFhx5Gff<-5$h2ze34o`El|Lh0j zG9vmZJ;v`HP=Ec-iZmNvsW0s<*4W)?`Pj!0c2GvgJg)j=g}G?UujWrblZqvK)Dd0Aiu9{G8?j*XJIRC^8zwh9xSMcrBL^71UrM3 z;H$rJvrYr7Ac1~o;sJ4dNwU05u5TyymJ1v}fc|^r2K~yu=zyN6$mf@a z0hHU0B)moQb&B;JXe=Nr#`MBE%@h;^;U||M|;w?>{k#Ec4>QU66Nj^p$QGwV-3e&1= zVur{(o4tkmi+&NRT~?$&_9=R)YL={nMNbd zM|H|ogSYJ`Byg>W5tO>73S82C2lY&;2&>J$<#F-GVXo?w`)Ajf;gVU0$86}IWsx9XhzqQsrABtMyQ*L;x3m>SHDqE*1_r7(Uc-3jG-Jrr} zfoCr(yM}pw*{kBb{1`Ex(s^Y|eKN+0zhuDNV^kv2c7X_uwHNd~eG%Uo!*T zq0i(6)q*N+`3i<{kY-H5ACkE_i?E~YaMDvI6*`7X2HF%zWJLC#TqpXAIMP55-`cm2 zK%cEX9Z6BBTfhgg^$7bV@sOWO=+wSvi8thk0I(EGh4`Ow^q&KQ3^E!1D*Z(~-%+|0 zPd}LYH6_LAJr&xKYTX8@7CubOC-zlk+dTTSkKGUK9xW;=3e+uQcdB03V;#ILmN+| z>Zwxpjrl6s)k=dZ9?~?Mvn)6>uK-YaIBSvo+J^EsUe*-+((t-s@#671`R7pb{SN1g zChv37=m+3J$m?AY?yAOiV{ebqj8*k5LCKVL0%D$bLk$Jm1s%4v@pz)kGYMiqu&9KN zJ=@xhL4GoRQ}b}eZUG_nfU8=YRH-gy0kBrcAP`n}RMwWL-7NSRtWLmTETy`oN~>Gw zN@9^VtG&{3Iq|Xa21P2MCYiU}{VDWgva@oA(f3}fX+-mK;nKf}LmCZQIHwb^q;hN0 zz&0$kv)cbdtGvDCKwue@{_Q1g|E)2Dg4`BU$#ISwXIES14dVC1lzXGjN*FR# zg>x<^2db3B(+H#`DN^3P6Esa$yO!bUW(;_@Je`3SM~0yGI_JsMbB{ye+13v>O%6bv z*g`UqgrJ?snDw4Wx^f_h~i-l{iAdT=uD zl|UQPZxobCGA;%@RP4XKS zUq)mt=EWJ|l&^+CAA3i!=tVTnEhja}ZR$!4v)r-}-bG;64dZJXK09KXlT7rSk_-sRWSxEcs)X@gu1 zKg9q8{@Tvn&7Xy4o3%9P9Izy`))bJPOzd77AmbP;V9V+(`<*sPD*(cSNwUd6 zlA%UUBhcMFe%a<_av>;)DD;}0H~l2%MCb%Ad{sfN&aO&RzL^y^m;i+o7j@I7wuG)Z zRN<>Je5DwT)u9>9aNlISv1tzG^%MVk$pa|^Ig-%fCQ=+DOiU ztn`XC5G{&TE{Tnm@=s?t>?17!LA#4^?nWJ%2T<$o*)BcX4su@tjA=}3DANmLkN3fr z#;1pdLjJZw7NWpoE{rtAK47@MCO{polm@=Y?3X}xQWpJlY+{NV!4Y$YOY&#k0k@MO zjO|aJRAq^%q=)WLv(t)6@PB1oIQS9IQ4|ZA{(~z2rz)6I5qY%z&A`g8p#gQ{CZDj9#MSeGx#NR47r zgaQ+J_{|eqL{-gD$b~tEJvP7hagp$IT3`g^XYzB-q(9XQwu!je$`O*6{+WW2q8ySS zF|F{z5xo(4U_5@S9ipRMf2jNf#BIj8vl}GoM#kCdJ__V=SA>gtZ9(&Fb8mRR*C~H6 zlYE+yvXcc=PsSUy+?Pg?*66FszB=&)e)f|5gr6)eo#Gc&Rn5m9?+G)dHBn4>q%`4E zxd$S$q0#wrM~SJAW>vCp6))KH`;SFQI&7lVO8aNndah``Tx*vG07#NJ?M_c z_pJOMGY%saP=$a4R3W^Qo)zR66unsyY^R#{eN81nq28zGm?`I75iaT6rhvD^u&ctavE0hZ582cucW6pQ;S!EwO$C<=2KKWB|A@InBz@1JW0t6 zIj;ayWf>V><~rrL0$f$#5o6$v4Z{E) ziE0iwi5d^GT&S}|e^Izphx6gPw2q9^TkPBM#KF6#Q&Nq<#Y0Gw&64c6tt;)6K)Rs0 zw2;)z?V+ZX#~cgjSlDEn-HGC07ymf(rVbyMqA{0>6VVnz=#d`(_v`#GhqKeCIAih> zP-wWFpgZ_2R+0Dq;Wj;4d3&eH+rA>7o4Y`U8w0v)jfL1x0$fN z*QO62ZT|kLr?RU0^J_ejAY4Y1d5Woem@6Eaf-?%Jnlu957I?=H)(mv9eONo(2d zZlCeWVNn`YRB7GGN*=bPYm}WXl*K`;ujEMwb<6fqY|KiJVUExUmbU;Qar|BRo{1t1bw3y82Y|XSVFHmf?SA$@u~g^$lfZdgjmu z$_q5UfOAUW9y$f?a@=LEI}U@F5I6ro=JPb3Q3h9yhB$9&XM%C{4sJyqpg1pkZd$MP zO{JyHEaJTa8hS5FK55JXov&*z?@6zcshTvce(1Zo(w8iM-1fYgP^6HQ>xHYNiJJj!8E@uo{=Y0ZkAJ?c zy>a(zqEmzvO8%Puj1JksqIc}6Ht28CFzEN%8;}m8jW~1gt#K+f0W1g0+iSnB3H-Q# zOFP_}`AV&>6Y7U={&nJnm{ME-K5Jdes`2Dqn!5SV4!=Jyy6WQpZx_Iw47W?U_crzp zTgHv{_htdo;JC|F_NMEJ^On2Ywj;2T7xwCbDB*gN*~qi*#~zEvntLrvuPz0`^@VFc zXj#1W`=-H4Yl9lcLkE0OzJy-549_l&D(0lvvs;SEhskp0)P#bR3Td=`>T2hG8a2Y( zWWYqE$wJ(V%>?l^ov#dZTJ{t=1)g6|2VB6TY9^d+?-|fgzHOlebr(Z2{n*nRg?zfs^+7kZ&%$f+3Q>qkj1vwWyQ6GJUuk!Y3xmhumtm zp(!w1#7HjhkYe^U7tmcCP%|j?iw#i^MR}1vq9LPVKF)0Ru5FWKD`ozXiFu%wJ$)Pn zyeOBZL!rfTAO0+P{W4dz!vOi2!NQhKajaP2R>_3Ly@V~j~YsoEv#6_Ln{HKPNKdoCJfxE$YodG(cl18YRA{L?H=#VJQ+p+ z+xKFz31m8NXx0r0%*0~}qgU?*&?9LlL%hCwfBc@~QCSIn+ih}HgnZ7OB4UR}nE-muc(M&V#{7DX!($wU1bU3(lWW^l@S}*p-)LgHOsp z;Mno|!tsZnin3mReK{oD$$@ZRK|lK7Vg=;7mPcb}2|JYr9;W4cJ^L3zxItls)X_uc zDJ3j1vn%pnDmN-(qqwdBEJNr2H$@Xpg_tS+PKaNVmc`Ou7>J}kNL12=1FpOf6i1W& z5m+?l)4`Fwkeiu%brAJ@lxUH)6>cNV0x)lO)B-R}4GZ)zv!`L%k*_G`A^RofBYTr; zB1?a5vR0(-l8pRor@ABZFnM1%#e)H&zG%_&APh(|DoMyR&)XKVM7KxNQ(ujUTbf;P z_nUyE{bv%vBx(jKSFF-CZ>vLz6%XFm=AQY{x-vb>Y_KM-k7aLoNmS8QAiNnCG&t11 zB}z&D;dB-Mwo3ZflEdeTxNgh@sQlZtX4&M*EXqyUI9_%Sz|S+?ypg}AT3ea$GPrr8 z(QIh*XJJYO)_FVXh?m_+B$*c@Pex_XnQ>;n6uwKl(6Vfms%QQecAv?jK^P4*oUul* zSn{$s&VrUiH@UL7OIqk3UR~X2f(;cD_0K?G`Cfp&VgnZg27q0PS#}r2=#5UApia2kd_u62aL1>keISjj zDwT187I4PCHxR#6Ab_8OaB1CtE#8N%NF_vmva6W<)6>n}+r;Hzjc{u?U%2bwXUh=| z4Q%G_qcQ6)Q*M~}xmgnhKb5<8Q}lQb%lvfD-r}_blU7dfgyM&}T;hX?E;0y(m}k4h zGj}VXr^;}I#W4~1HbVN`Cu+OYu6Wd*8dD{mb>FebXVU?^dx+7@M$JlZ_Sb%vvJLTu z`vxjuu=vf!PxI2sprtV&a`{(Ok!7!c;8L+*InXPWkIjq))qS4v)ahYmX{1PBGK^rt zlv`8JX86g+N{s=o^r^%jipz8-`eqDtw#ICIl&HjAYIv)}9b^V1h@oRQFV95!kEQBy z`VxR*bqz4Et9_!>#@kn@i+>>|G)F^d+fUX-H0szYjM<=*-a;N7#u7`o`D9b^?toT$ z0znarM`q>9`r=%*tZ9>5!raN0L1<|F4`hVt9+hQ*oVetrO+1ghz3V5r(D9%amtk{B>AGDtH z_JK*uGWOCoqe1yO(dJz{!S>WO8w6>{X?v(2=6iFqTj(})xrm|yTSnf*&cO6v^lf!m z_!ue|hH^al+lQi>p|yeqgukvG#c$!8y~Y ze?E3%)&Z;?cAX?x9Es6UHcIKQI%A|pU)}4*K zplahBw1_(*$R!oEfm_hCAZyqIOHZP*9+=Y%M0cu;REfFi{uG*FX6FdD0cW(}&Rj%A z*{DD?fqMz2{+AXqcIYU;J8S>Hd$z)!8JJZ!3l^FGN4oweGbqN2gN9i+`@4Y}Galug zHi+Z{yF^_v5MGgZD*6Ph5rAh_^j}rrmO)COfGbkN6<{*6Ak9k}W|~aLci}_lksuu} z>yZ*S|5|tO8!8Z9EI$ITC3m$r(Vf`LBr1R)UDC;}&$l`PtI@|0af9fL{CWSKZ$0u^ zHoP7cWOAq&6+*o)>=TXe}mr83K@I6SAjj)C?fQB8($; z7-+~|4V+yathLGNRZ67+DRHi45LIZ`mfkQ3ivC7Q*1|>GygigRFz*OGV;4lZ&r{Eo<gty&e97tRgW|~G6wDi9p4XJWX9(Zp$hXK#MpOh}KYwYL$??aJ zUf^_y*zzq>V_KJs%>A35nui}fzGGlhy8X({;2sjwbMAC93V40AioTlaJ@O&N48wc1 zoh9*skVGu6l2ceY(8E&2AjHiAEpAuMKRDu9JU%B?-Lp-}ty# zC7-5zRdnoS!9BkxwAdGJ=SjPUd8!xOB6F=~T>REn{y9wy&91<;%(ZcPl&U+b8O>7C z%<*|k!eyyG-ag6N$lCn(*sP!pnZ{#Iqz zExH!W9ZV(eCNLX^FYMc$0fQ|M3XV@BTmldAtVL@&{T$kD>g3dQnA$Q(-c7!2)#G?V zaHu%zxf+;<>*;er!|nZ7rqghtcIa@9z|D`pMZ}+4Lsg9H>*Jg4ag9NQRp*G{XP5C{ z8qfNVac7c+B@qMBQJM1&p@fG0?x~(7_YFBPAza_f{M6T2+1IX3<(XVOcBER!Iap0K z*zdP79W$=d1@Cr@COv7v`b+()6^G6Nk7oG8xhHn`^{h+wTR$&6jdZUwP|5g1BU|Nq zs2RSqxrETCNnN|jBS;-)OulBN8<@D}_N|xH6S9-%Zg<9Mg&K#yqoIf^9>^k{r7{5* zj6`!XL|5@xrAcyAE_A`e)`Uau`JogPM$b;w&&WN=$=s$JrpKPoDS2vEjzl-QWK4V-r7wQ1MvcQMAI-h_E6G!_EUkHrFjj(JVg1A@Z z+@_sAefD`g?5rzkR12KKFnwD8qJ_N1 z2vaWxPERj>qgrvEZ`;6Z{g*T6WSv|WV6HxdS6=Y@JbgMEWr0nupB~p%41SSq;b2F( zIgNN*wIJoDptb0fLiv~{ca!;5EC4uv?Lqt-s~m$VU|K?Bi6{; zTEf3d+!uYpdmNpv>Qz0vv@nBf7qY$Jwig54Lt8UxsMmNZC)!=gV;w~y7v#Q~-7n>~ z388uVr5hd5JOPs5)XMw65p9y}ZlV?UatqCH$}Yc$#CS{F5OgsAa5@-+Jl|*T8_TKv zNcL4JWA;FS(}`~x!$$t})5{{p@p1XrKXkvG3Y>p4lct92Y9;i;9qkjDwvXLmy%!ReDb=wFc-Inh=qIrJ7w=9ISC(xH! zf~D0^in;vvbo#vA1$d!Om}|jiT3%WXDSULn6A&kqq~T|U+bUZYKDPt*o7v<9Y)kc= zZ{+XmV#P`0mG#R|312l0D^qywfYetcU74CH3^#~wRmPo;oQZv7MST;O zi#ltsD+fAAWsQVCo}vlDkzUdX6pl+L)^HJzUb2`|P4af$CI~J)u3`tjJ#sFy-<4$u zw6a{yR!-App>t~3Z5#>R%&8AK-{i0TIzK@?;j?21qecyo`pMLmXnkCLbQl(L>UYG) zl6E>dnOAQ6Q%zicDWA|1DXiw~G6|LS?RLXd#DU*P3o}Y^{~oP(TGsQg*h2RQ_Q^$M zSUnFbsH*|#4=mOhFSsC9*hszbUsr7+Qeg-HKTsk4XnA&RpmtyE{ut%F=YT+*KSx+UiS(44l<>Ca7&Z zbR}`;87YYceIlL47UMh<-|%AfBHNHpzgRV<^ig1tshb3Mo4pMRo_pX|EHx>)ms7dm zf7#}_;IN8J0v6fhPJb~mUteA4Bfm7F3DaNTxBg7gTLWiC9ZNgbZEH1?$9G!6iSI%Z znDfM+zfv^qu3{91b|fnMaKYrS<0YyXaQR{l=xE4w@Vd76A*i&yL% z&1wy+=c%9lFNf(%wy3H3IVXFlzLy?COC8phge5B+y*+tmTSk9kVXs!e89HQ>Tj!ee zJKi}G2JMIS8|Q0JWi_%~3l}I@CJ;o@6C#ORz~k}vD!Ea+t!|evYhEp= zhS9u@X^y_yR6QmC&UeNWjPy5jjo+HN}6xRiyhwF(v= zpe+scmNxs~RQ(~gr;#PMYNMq~OPc6szDEm3%_slpSBUJRLDv5}WJwjh$cuhi`PK5; zcQ$9-;X5VlyAi}!-IH|GjmqQId8dmu2$yHphB)EZAT%2p$M+#kZ3svRDr*n=XLZ=9bq1$aGX(_hB){zqJL^m zb!%t(#e+N(>_y-_s}}Ve)|BDnV*Zaif?(AhKp)S;chb1z1ddgUZ?=5FNDwX&^8P6@ z3^nkEo`3LYzFu(he-}9Ni&kqrMJ>_r7~^lBSRpLSv%%rS20j|xF{$WfJaax|7X%?R zd}Hf9;@J^jJ)_(vrrT4mdZ^#$-E-#e?Av`txEsskrY} zc@9*YZ?7i^l!6-j=kziu7}v6ZB@CFRnX`Ay6F*-C%rQoA(;?+5P&mqHLGp#3e{BmY zJ-<1_$nZ$}GT)zV%Z8{nLgYZ;Y}1g?)5uOsMM(6GH_agkcYc_0Blt&fBkP3oVI=a( zGL!I;-6Or8$3exXX&+ed3#KC~xi!Au?ioYi z+iWba(Z()BPqO>V<{-2MvUGV70%=ao0L6CXqDS3-6|LNa!uc~o$=1D@6RPap8C)$f zyvDbWfgw90zC6k?mdmgm>p|YOPeSM`dmC!@ZjDHIdR5TROB>bp)PE{>-uh$R8hI-R0=b_zZAW$8b!7~_x67oM2Sn&lej)*R$7|dYMb`+ z>X+S+_K0R%6hP@o;ZcZf?3YK+J#nYe`)ZCxb2fJ#ZJF$QYwHzt<%05$EsWS9Pzccb zO0-mN0(Rq-BGWpu^ATK|yVXrX*oy2~+bS+@yVRAeX!Mp3QcI?EcRZBhYlg>gI*n=^ z>jC~1%Ug1tepKfc^lC5kBxkxkSGi@A>vn!fSvw!Z*$v}znr{B(FRMB4Mc!<#&gpnI zFiBJT^vE;jKaFoDp@Z+fpeX4lILQ2^w-oSRQb2LjThlPofxeD`)~RVr*hqLmbfGmi zb##(Pik}`_$RCbp5{L}<{REWujKuE6zGmIC;6pb;ngN(+qtRyF6?#Z_kDM0Uzp&Bf z7N?a9^+YAZj}+WaDZc_;XClo18Wp-2Ek7)->d4C%VI7}={UKex$SV(P6^N)KHMnjp z-`_WcW`8Ho?@v@`E8*vT7Jr@9t)BB{HmG0tJJ^x=n?NPeg*D2eYB)8~ta*H*_Q@hm zTUZ)1G<)R-Eq0|iEHn+F=h|APo3=Nn}M2NZC0xWjV6CwSYT9eIfCX{8 z3ojF6f)6CU;yPwuw}jM7kh9A5`It&Cf^Y%Q%_YUp=7F~g#c5ZN}>x=4if<@q6tmGU`Vbm`2JoNk?FF@B?8LtH#7Rf(}h$!{Rs7ZDY0K zKUCe@d;2!zx`q9~bDi`LOU?5lMUX&77jAca%d2ymwm56WrWMrA^j6IShNGUSaJG|) z=cHdfugSRfe&b_hpHz%^(JvhNVPi}`?4~E>bw9)7)u_5OMXtM+tDHgyCg1l@$q z^Iot7lx=TQ$)MzKFXg25m>G*%D6CT<2|A_uZd)sy`typgfn)OikG$uQek#G!oorNU z6DUJ%CRKljB2RvXu$>B-s_Hm2fZyHP>F3qiB)2Uv>!w7-aQl1;^HHhQ_Oici@jckY zv11Z&XcdgipDKMf$x>%0`4HU&{WU|gO+4edo4fkmbAe`X%yk$$iwMgLdiDP0_%d}V zQ<$ARN4xV$e%v+zzJaXx(aB?AB!zo7@r6)*=9z9KTbL~qwtvX3_3ES5;=>W4tq2_E zXZpyt+Z;#~%^meyQQBCnJ}iB^n{Z0Rn{eNDo%60u^^Uv=`}OsYy&XiGf|j?lPr!83 z1MTfidiS)0fP`bUp6D?>KxFh`ZL=QO-yF^RsKG8Sf>qytHLV~BKZ?@Yu3 z8f^?Z(tpjB;YJG{uvr)QGEm}8eEP?QMxe$XvD?1nST`DT;PNy{Xj$ORd3+i2BlbI~ z<>xgH^HMf3HHUX=gnt@UKUTzify&3T59DjynvF6E)R3xINT>y|dTYIVnxjEYIs6wG zTHP}V_(rB;k9Ga%%S~GIEK<}08jpqRQ9hmNmN5CFWn%0Y<#!s1+y4QmliWr_c&aV0 z6LiRtv?MbFxQ0C%3m1Vx)D%XW~)PJbl0% zwJ0&kO&?*8vWP0jYJMi9X7^jNtB}w#8Hrss@oA6-@pIjj2)^J04Xk}ySqo>-saxa@DqrY3><$E>r~Qe)IIrh*d)xz-U5YjE z$?ToTPPO#W{rGzpsXFk&R=Vu!xt)h9e{`A;`D2|AsmK15b2qMn*D%F&jS%hx7}{Iw zlOas4jMil_jI+CT=gvKTtEi8TB=W z&0-j~l8{CZkuDtYj`)1GmrbTFl2tu2tW5k6b*~)lfwiH*36Ev&$%@d*m)> zkHU~QoOH+%V#BM5{9kNow_4Hsw`yZyrVCpKItj*#E7?K(dj!_5B%@wLv*M+6Mnid^ zFFNvb$-~m>MJ-N#Xb*eQvuvzm7IvOh+56dM`DsjMoF%U{+D4An{TP4*Mes|E6zbqMCJPH8g~X&XNyi(b}t)=@k+u07qm#8%6tTE)SB{tOmhQS$qV9y z!R($eB$>;u1MWt;6zQvVfez#ts@+!C|J!u>ZE-0b$fsf!Q?|-%nrG+HjXmQ?P6OGwn@^uU20EMS|O~o)hXh8}f?P>(rOae@)bjPeJKfnr-u?=TA z{JpjK6xnBt zovm4oh=$&@K>s09;xYz+=zJJ|dyqeW>}9CwiY3dqqIZVqj$b`qJC;lUPqwAtP9yt~1PohyhVZ>}lkA#yU3$H9*monftU&n?H35AwD7_U8yja2x#L z(i8R`t9A4-$5sJn`(s} z2&h*o=R3Dy4*I0Qr#J4@XC{Fc4FxX%F^6gPxQe(yWSpOYqaiwx`o7svH?0W=*miGz zCO&4)A&tQ9KC~=mvQ1pS5sZws?Q|vkF;|=&dPsK0d0za`d|X)c?edhX?Ri7ck}_*e zW~2G3O&90U_;q`3eUV(*%`*TYhM)G-Qi`Gv^V?}g_CSW037Y1ugKbT^64aVl+`XX0 zVwZa75p#5pb{n8}7%!Ds&Xuk0Z~!pH3W|Ra*7CpdE<;Ropvi?BcEjqnlv$g2G7>N$28^9fpD1W)dd!!86nsOm&W>*SM z1?JcMnLj~~d#=LDdZ`W5JPW9pe~Y%u`7pZcj#BO+a$2K1$_;rYw!$CF_C{`vP6 z5zJuRQrka3S73u&iBXgS{`^Chypn7UnY#j`K(*doRBMv_mfN{_-hQp6`a*{mO|wQ^W}B! zi4eZ`M-xYnWRX2avUfh-#1BAtwmHKr+IZd$aY1NA05zwYgQS|!TDHt@-3s=x=*acP zi>`H^eN!z_a0okJ*TGT050t5R9H5(ABc_XYT)}$a+@0y^S}mrLI`dw$&`W!B{JV*E zL33Ez6PYe(v07_-;DqlGb1;{k*6a{Dn8qjmt(Pejre0L#9eFK>guncpT<6p!VA~y$)X9r1)CNTBI4T7%~N-W-$=Rz9~5&S0-DWRHGMUF0I@0KTcgL(EXhs1 z+c3$)tFZx0)^Bp{w>l@(W+5KR9@oH8@R{2{_JD>|k1zLmS!o!A%t#tP#mxx&eqd-j zl}g)$K2hHUO5&l4@o0Xe&rj1#M)2PFi?F+ILdB0?=&0SOpTf)!i4AN~Y6w!dLePF9 zqE{>Uy;udxIqZG!cr?_@iNzj5-m)( z&s*E67VSG$@g08ty+a+bm@}=qdE-#8=@iwH+%)%w#V#IwRim~h4FyMkjCTkGIG7XG z$8X2-r+?qUjCk}hRFn-GJ?U%+-IM71zYXEniWa-><2`) zP~+%uMtKITRj1|`3|nAWmouyLaj!FGU%~%F)?0=}-K}q+C<-bdC`ib_5Q20|C|#p; zOE=QpH6tS3jdYiEgOo@k-7P~mL-(BD`0jn)|Jmo0AJhw-Yt6mxdYOxWj9Br!~57Wz*rH`kPFRdr!z zs55qx-4Zru!}Xy){#knd6?I595K9WS2`{;s#wxEI3@d4aBUd4W=qr;ZDlWDFrC7_ zg$cJPCK=SX+p2m#LRDUvVA1OyiBuuB^UOx!P52i{?vW%x5IE zV4ZG>#;Oj&WlD3_nAkwPK?!Yc^6a-}fItZv0I~$rh3o?@)0L>?+eeW&I%f!_`D|CS zk^36bz5P*X40`eMb8Jtw-^dqo6!VR4u*b)y@amlz9U{STQ;LTf4#tR)@q_Xjm|N65 zuAg2bX0)qHo_+mfQZYK4!AczK+rWYsXEk#t7E^MvEB*Qm<(OpSnh&UQJei)sHHQV-pMb+TL zWwWtofRmP6R629oceph@UskeEmXMZ7tyO)-5-x7G7et}ng!0fN5L2$d*?a-fY+d{# z96AG+VS6;?uzwxb2@FUV{Vrzm@2VmMZX9y?t0Tvqv-htko1z!Hy!-M1NOI1;YlA1h zeiDOosW|Gb7k_UL3{2r&d4IY;;aATTZ^9JevWGeHb&`f1e(dBCs5Ifg_`|_tkvMfmsbf>9cVd~N$g?fST z4s>N1_xKLWflQQt+=ms2t^uyatEf@@%YXhYN+xnul$wVt9WW2Fmh-LGCDDiDfnUa? zkG;zdauL1wV9)ICWy?A2cP4ie(bcwcU8b@iZEsMuQPKYS;`2~h?xEEaWr8aL=v8#W zS9&m*hE{S3&OxfDdeth|7xBn3wAH%1pgFMLbAg+3;c5}#PN_by{RiD)J+}ouUEyi6 z-7Du$JsBOu#|n#iKt4Nw?fKFxLOI|A99=r0Woq>Q;??Au_&4TWMexm1)uNsllqIOu zsln%b-zZ#cy_y=xW{XkTY#P3&PNg!)T=2v_l%3}FV8V5M9kjW`Fc{()Uk9YKCT3@9 z*EkiUD^wLQIog+ZtbUg>WLv3~sT(QiK?d`cogA+Q&;Mmm!%s21d}>`TdKLf8Z&7T^ zDbi;=L(`(|4bh9q4B8#3<==i1t9edZAPsvaES%Ngv=mQcU%#3l!Jgi*9`XR{hAzO& z{cdG%)4>@(UP=J8ON^fHeCpE32%}X^6WlHTFwT5EyI5o^F z0(9$UPlWP6yOB4V`9V&&ZMjL*eaQ*86O4pgRv{vJlAxd~P)k-$delD1nWU;ph~{r{ zt5gs90Kxi(xNrSe1#W8Fke9fMiQ;LxvDo)kMkftM5&Lb&(V#v7*oLyIE;_E1i3wH?!2vO&NCs*M-{;KO@wsh0WWz;3 zL<*WoT8g~c&7QN?iX$O^c)#ENxnObZIHOEUsufOVIS6eomEG}23jDm#jN=`7<&-Og zU4Uw~IFJlG%O(o%RpmrfLacQPGXG&wyRJ;&jO!5S&xf;2DTG6O@JJiL0tFc{Y^}uF z?7(8C53wX@;UXse4n@uhy9vyL9cE$5aPL+QVqs@y)lX^|oc&eL5o(QTl((ZUEJL%O zbp5>A%%0&ot3RK5GY(egEe)ZPhDYdMPy+E6=3?@s6TphTbUprUZp~F$7P6;9CMWtg ziT~;(KQ!=ddj+w^D>l#4jc^80FBB|J`V2Dz=cBUFf&ra=wd&Y(Pg3r$MlDwpcE5J?z%3`Gu6|fH4Kz&w z)jA~R;v88_3%of8>BTq`_ZZ7w1fb0R9A6|3f(q}we{R_}FF>J$h}(7Fguh)q-9m1t zq?3RbhjWU5@f~*rf~!yJK_8M{besPp0E3yW=d<7byrqR_K7FU$h@w-W_;HEco5`6ixeL z>IqsVKZ8-Hk+80Ccnij|DKM^0o?w*bB~{z zPi%;CKe&TF!d_e&k*0~?-ykw26|wtm$Yk_YS|H?K`otCJqw8DFOYlDtr@jO`n}7h$ zUJVAZ!FurX$iZme1c8?-63Kp3!7Dq`PxYzpibb5if_sJMtC`6!F%cu&*V|o|am-zx z-AYOISiB3;C#&_@pJm1QI1zFSl8o; zU9ld8$$Jvk5`Y>1qxBqq6U}ONnKH%iQJ4J?C9U(Ildu--nN;Kc$Pi# z2j9myQAYz^b9A|X?g`3L)S=VBrR6Z|2w7<))cbM~czl6iL*HzD3U^l*J!OB-qnUv- zA@M5TqvO}}C>z%NU)o{Mecc#m7Ae710xxYXM#}+dL3vl~A~N?1D#iiGH*<`yLY~#W zwClZI^7gmsw=;0K)t8fq%|EpcmE}Y!42_)mJ%Zm^oVqp%Nx#fytNfTZbhPvhzZmn% zb_qHF(#qLTI|*qHHYrb@W@|0p)i7IZm6g`1nesUHZ5~B50xgIe?q3*^+XuqI9b`UuF)1mJ*}g%A!z8ZS+Qk z9YvmIv+HHB6G`GIzk5eFU0ud^Ttm$P=Ui=&U_%WWfkk=RV&8kVPnZ+{XZR%|G}rj% z&~RO6jvJ^%>wb`IPH#tMwuRsws*Pe6l;TWO6)em}T(z+xZO_j3(ZGxdvsH)c=e_2M z1BJ+b>=&1n2A_CSWUwiaxTlYEmV)us3z6v2`6ZrPRVKgH=X|yk2dDgx1)19eYPTk` zS-`%pn1pn_^gj%*P!&1=N2k|D8C29i$*UPm{VJc3#qHT^qhyX2QxR^bnQn|X=!#~) z+26?VQDpezv*>k4rol4Xmav7sv@_D(zzEJrmhv+P-ld@*N45fw zdVaob<4+uefSx_&+bK2~3%s$dbv(xVCxuW|vp56b^(DuIq z5Jw-a%w-tO?&;5eO2+@TiaF4m^l@JjkE546AATlxp$K)>hZFfkLPN5FnSetrppvmp z(p&Hg(|J=E9assm(>CTpRsf<*o3knV137(wpqyeh72+JcZ{2;k(!)Utdu3s5=2P2( z_^eWzK%{~<6g_l)O_IoWrXCaAyfZb2xaw4A^Km-J7#~&1Gb8?1L`riTI$QMITU43v ziyRqsCMF?A@>~nqqHE3tvDoRfM$MuMv#?88iBk}k0@;Xq0-o{*j-Ay6{C%lWabbHP@)h9{Qi^y@W#lc zqmcy_q`{gT0!`OV**lmv(7R7aRm)k&N;rX!YQ9<7_d*+}{qsdBx^l8J#bVN?(YnUU z0wg9i%X*^GnU3C*A1=RQN}&eorY1>QpBu$~1S=q;hA*r4jE*J@R_s{nadO@!1;6+Y zZmI0QGjfy05jpqjiC}(HKy&V%z*{0;d{fRf^mD)E(Itgx526`ccW@CHxcEcUWn;UGl zQmijE6P|~1>{A=jf`3fs%%UAo2Vs}U9A9&f8con7nI+%LMz*>yOZrM9+5d`rFyM-u znOYDWkbUu1h};;>p7mC6L(X1+ZeWNDUB~R81DQP!%bm5Wo(WO9yLcSnXQR8V7%yYrsdL8k?UgLE-5U1an=I^|K7PSP#kOC(PbP0LAhms?1=H6=da(IEFiERR@4eMf5x{hfF^VdT}0kdK`~er zYnz#a`+c0}UxC#xt_snb{<{lcmT;=J(f@FLm2BdmbgCwbPj&CS&3Sy^)txk7Kos!a zH=o${2!`M?hPw-MG`2_0jd)`?!6=RB>HALws^X&>Ypu--aUbP{Uh9mjIenu(+j54G zy(mUD6WK0k{^;j{9{L(ojb80I-Tl#>`8ePNL(sRM*gn~~3J_qj^4L~ZwK~n;M4!=M zixNG3q@BeX8F5c%N*w>M4}VJ_yKP`yXi5OoU*Lcl{~sB7mnNV(V$df7(jb<-_l+tF zie-zBJf^;pN$9eic~zaa`aVq?7+2C(I+4f?O;50P@0C^oev0f`frqpXydJw_&52aU zOY4aj{qEFY8A~;4%m%xxEce-)Ss^I8$~_i$8tc`4wQhGDT%OQ`D$t*`d!}K1V~It z4h(|J4~iIF<_G>>t)D@?bQbDQ*S_zd{HI6TVFbrjHI@PJolra>v(zitqkLST4e^CGHv7>aoU;8#Vx|e^mFF{` zq0@#OTw+WHNIY(FeJr+|<+cBX!4D@Lu^YR^QRpY^5G}(S1HN0WzFLCUA#OMNY4^rm zrCNu+DQUk8lji)qU8O2l!aModn5o432$ba+d(HZFsejlQiU(F#a!S`qX= zx!xoThJYhTjnQgm3L-#FvqKgp`auZR)qYL(#!5qgtL+H-+{?Pssg(cj%Ui*EcR;)& zdzxV1q6YFsVApyVNnlkse>sGHxj*FlE95Xm*2k0@PjA#5O zH}3ftzJ|3i`UaE6v~@w>{*{@3U3vv9bTfTfT$aguT^vVqirC^=t=%}^##d^g%U;c{N5fy09Q}wYILS7m2HO%*ye=Wz zlkWrAJl))|JIaaln?m|8jc2(2iGA5j3rEG0jK>dWF-R)BEIJXYghRfgN2$4Y=;lpV z{F17p7$wQqYo^K<-|^)hC8r;!{H=0VEZw;z^4%N~JH)T3T*t!SVi6O~(qT*^o47Vf z>Ah0#v+iHhJZW`dv{0oV;pssuvdrE2W)sozS>>AEeYO^0dGAmxj|`bl26we#*-c6# z4_<$?3gN7R5E@XNJ-fmqlVwqSqpv~)uT-xYr0ogu##`Ak^dy=~*dYck%t1fu_z=K5P8X?)Tw?2`iupu@2X#*gDZO9jN@#eF~bkP}A>(c}`8!Pd{o6*!>lX^yD(x z0uG<=A%5=rOmBWomoNrdLJovBVZl1*RTq?o&^x?1&(xuz9TVZYiYziz^pFnd>-Xcnd7$w{5ve#sp0 zR8cEwwCt?ddoM}W6o`r?lLA|xhCA#4(C2}-p{5%#H`F#ClSa^z)pJ3SdVKCpGxrX0 zJn(geli)7(e#ExxNOzsC8>I$!6PdK4?4gfjp7-Z{^Y!lPpYIwp^U_@t?iKdWit2=8q}#CPT(+;tc=#X=s$BlOZq+zz~ zt&)FCpM;MEnCJ-drdtD~M_U-9cYU!rZ zA6NV>v?wcoZJN%rxLY$r_FCrLKcEqhHI^pA6K;+LH_i<&7UkLc^`o5MxP*4 zJbvoHCEWA%^>?|q^q^B){>~*IYk%6HT5cWNd^5u!YQdAt^6Q7JkLvC=AqH=n`K!*z z&&yeKTtM57>iQpqs27s5O-Kd1{AAb6u<`XLad2U7>S87qLv<}RiKfAXg!5OU3H~JI zfkwd*5|kuQnVN@a1kzg;_S%zx`$Tu;YD^9*Al2IiW3TqTiX3xs?bmHXR~RBBuiR6S zsoh*x!T!~GKZ^Rf-8xXiOI|^{LI5^X5vB^JPJLuFaV6a=hANFMTZJrUmo;%TrL^@` z=R<}OWl9%pvi_P5Vo)2Ur3kk8)snCKcTCU{Z0}9g`=(D*3$WGqo-zD*FZr~I#yI>q zeI6dh?wXqY9BQ9>%8}7u9-5n@u*t6}xjkE(E1N5Lrx?OpWzASq@`vM4u@ad~aEIx) zN6;d?EJ%MxC(!?iDW%&ZuTA_)gN;~JG2L+_Gr*sDXeD=cd+r3OHNvmu_UzryWlvN6 z=fL=j*xoU3<4M)y2JX{P(WBMrd@F_-9xcIPd~qyr1Cg2KHWIW7#$r9Jot!b}4>TAkHubU*o{t?6{eeBcIQsmaqG&`9ZRC;3~m&_^CWKx-xY)@zh)oBMF~ zlWh~bjP12!;?2N|08AIWjBfVB~Qn*)}8;~(fKi`>LA*+D4Y=}WC_F-_#+j@TV z?2CrdYY!D7!_YO7@8tI?aVNfSKXa0%0>&0Rzj}oV-iceYL@Nhiq8b-FZR4NFP{)m| zHV^n$UT+GhTCO0&X?9+YmLND?7{gbT`Ry8QQ8pDn;wkH$eR*HYS*-GXO$td9ok{wa zOH+FmQ@2de;a6D>WyhE9&Da#JPR%LnpGXk-2QRMw%+S}7zL&JwWq~=N*q@q*j@S^w zOxV<^JuD^rfD?8#TF6{%{oHi_fSWr7F-8{KWpVWsM zD>!D!@1nX!vII+@z%?8&e^0R3t+Q0 zuP$@w`=S$#0lgqXTXB`EN1f}e6K^dI^RiOp6K`xNzz4N%If|CvOG-}H1ji#+?W#3T z(`&wbS0{l5T;{EWW5K)-od!E!c08QajQ(z;<*bH{ zh9t{-%iWHOPUETaslt@bcPLgtET}ieatVE@LK84zNu}UJo&7xysydN7(Munt^ODYX z!41e^hEv#U9h;RKowKSFzKUjt;{ImA1h1w84ha36Q-e7Jq#}L!R$_P9&q#M(x0V1m zy8Hh|pfVU917Ft){o zeG?ub4YvxhQ3=NzvaYbCPeJ7h@zDuV>3qB8Y&B4ChJM3YF7HG&iydHfJMmC)4Vp4M z`?>q)uWl?RMsp`w9)W#Zvz|-utUUUZ8T3-Z+{NDVmE@#%uLUC3XHR}Ds>??Zj#wUd zg%p5pFNXq3SUok=U6I!TzJt8fATd_nl7_j345YFN?Q0^(u6B`xn0Cvh+kLy6+ysAo zv!9vSg2&(3yvx(_Xuu3A5T@cwh1~>xNQjLlX@Y`Edhv~FH#=lgS>jgoDT7wvMo|To z`+Has@NgMz7;4Vy!|9tf50r$buTKtMNfWw5pzQ7_Az15*NkXrAlz_!|g}F%lR}5MO zOzRz^?`QP0GTnTQ<#GX7l<(8t2CM<{S>U+4sIDm(;Q9sTN3Qozw~ z9y>+zO8j{vL*gAe(dDox=SVQ)`j8Bk*q%>+*gqSHE7ey=0yQwXZ8xGSz|Od%0gH4z zywK0%g78_p%rFM{q&VYvtEf$hKAjCI~Y+K~yICpV`haQD<@qR{S5p+H~v9kjOsSB>6;<`!>9E-dH} zSupKC{Z(BObkIFuKv8OWrfl(Y<3s2@Rr6jcW=I9Dq2M#HJ-HA$D%PkJ5EY!XZc+E0 zogYWyG;mLmBl3|OtD{SA8f2OB3lY!vHZ*{Kix61EB=;$pYJqSG2!6#mw%e8-lC^_d z_0C%9I`F4XaQR+=JkDm^2ObJ^mxCFj+)#pbmm}_^W<5*eh5TIsV?K0A+7A?+$UD^eU_b+cNiBK2Xs9@-ux4t zUPjQUSKoZN966FouhO%j#Y4*YEa@zF&bTpd`Nl%OM}X+GjyKPdu~t>ejn5Vzd3=A0 zvT^F6{qdB)`RyHp12VBV#3^p$aX*>5H-s9_6~S2)AyxKI=?}cL0rBQK+O2PuPmfi-;G2T{@MI|a5O@#8XJ5kB zZH08p3^}RwgIoLPW+TAOy*7kyD~_P4S&PE_;ei54WUc8# zknB?og;uKpy)WB57Pt<7m?@Qcqu*w;cOdbdG)aeAA-BlF2RG&fX)?>t2ugDlMu&EN zm#DE3a4V;ma@lOsj&~iTNtmWd96_uAYVpZkLZ*O)VXhV_Ebl)4(NHW~0xOkRM`P=p zRZIt-6)=kq{fSITL3j=M%#QM%3gR5GuSmz8r(Enrk|iv^i)zPI=Cu&1SQk(4zA)so z_aX{czMD$P0UCGbZ56PScSiJJ?{`w6CP1V3{|q-V>+9z|Bqq!HA1*kofic{cM6O-z zi^laih;7m*$eH3ciQ1cdMH*+d>(T*4ee?JD0WJve-^QOwI5K~nVm%TBnA~-prMY%g z*9y&=Qj1DkoZVpX8PI{2Ho@ZT22~lSX_I_%gYv*q_MneQ_ zm(b^A^{6Ze^D-)>68JhL{CTux97+S;r+o~fuBbCto<2*O!q$muv^A8AjPQS;iud!X zH~~Y0Yeiea+CB)!zC(Ry-K_K{KFLgac69f6H+F$QQB|?V+M`R78XTZp3y+lhTEWnq zyG8-5lg~wP>mX^%Ggi8E5L_QmwS2Zc9t%;Y7BEc?scD22BC%vW=(#YZ0p4cP6>KC^ zP`uHmB}!X4w7pSMAJ?Y%xV3)J16V&I+@+*M=R`ifm}6W`V;$U;bw!XDip#zkF&+uj z;rBQoGWUVTF1xt4-!tXy*5tr6#(MQmw6{KTD42sr%0%!vl(neVXEVc#a$3l z?-03|58d>iYG#=~8Fcj*yp`8QmIwGXbPGi4PuqGko;-bPCS#s$ z0&y;J5)iwRu7+f@H{59NqTdZ)PY~>jF28*OlU+Wbry!@Jza~5{^k|}18(jI_!%Bnb zCcY)+#Jk??j}3e6JwhtqqTM|?enwZf2cVx$fo`Y!ja%Zq4$}W-v5n~!@jIVLXqGI4 zufHZzcDSZ=M+A_V8G3fF&}a`brEk}3lo#+Xv4X@oq^t`{HBfF0X)m3L^4v++jdxU= zP-MF|{ch&rYy^zIxZyZ;i*Ct(5jHGeY?yL692*n5E4J1GQkwdWy~(YnE&`2pTFNI8 zzA?3zFEE>i506y&zEz-ihm)a@du)*=p?H2vG7ow^OQ& zZzrO=F%zMTu82%=etp|Ak7Z$kfN@#JtTG4bu~oaLH)2Yr)S-43DSM0eRx&ut&MirR zyZ0omSkDDy4}bPQ4$xIQYfNb6xAK@BYd@*AK902Izg#_ZdkgQJcTwVvxDWf~ZM^Jh z_2rKiP>LhjH`HN~IThbleAgjrlG3p1co!6U9KvV-JYX|5_~{f+t@qb#AK6veJ`LDwkAhj){4vhLH`2 zXUED%aM?VX*-r0aO zj{0l5Jr#-bG{6=QHSI&60pTW$>}@1WYTWnj?m!7T%H0Xe2i%af439VTrK)ue1MjKS zQr+P1FCC1N1c6-w>$>MJpEqAkL79Jp+*cL&ugZNm`>+@SHz*Cr4LCk;Qx?_bMFvwT zJ7?#l&$T^kSC6s`4*4*Xz`(=zIsXJ03TS)YFS8|Q5N@L4%x}tM+*T>wn%K4X=;o1> zfn-XYRY;=Ps_EB+vfdg>K;E)|ockpJdAr>;#qBMiDLy(Y#airlGoN!!{dELh)o*{y z>PLnCv%i+ka}KUZQa($q;t~~FYwvTI1)YQU1RFKH#~9s03<{4C51(oS$jIvqWOmNh zq+{y}u*^H^a_fQ;U@r+OAD(nT1LV&bCNT}qxL)j^M+@M%^@1FjwyP^~kH*w6!~{qy zRN4DOs&wRI)PCRXni}l;leRM-z+k*5FUr_Dv$U}FJzqlWTl3tKTWjDv=k(5r4|BeUzaR^TP_um>+kbsKcl;lr&DRD%bd&*K@&n`G78*$aEzvfB8!JNt1%w2cZtZ;T5t6du@{7 z{cI>zy#1g1LKe@_DCX)}3yv;(dx|^57Uk2>Q%aH*Lk$~f z>CTm|f#T%X8>T+<9uWZDl<>XqR-%ls%51u6YU>|4}`mb=XA9S_+GZm8WEDPEt0HNUo_YR??~`L(SV^Yg{g&9J>=ER{UlcE4=@>@ zxt-Tc?y)diI$_RG8>P3xh_h$n?$8)TqKHTV$wIT^~7ty08 zZ@QFD*%OwRs52{hN&7b>UhBzZ!tO4;8td%^+-On}^|PdOH`IQuS#6g;0JHCIY7H37 zp(hd7J7Ye>^?2HJM~V9w0)^u*>m9gtA7YE1Fg-#1maPcg{jB5jFSS$Y6Q0p4I4SYM(ZTBc z5xB#v`QCl6SPLl9jQCIo^OWr)U*~(f^j}l4Cyp_^qUVG(mme;xi>hP217nJuEcxiY z=~Fc2CN}67=zpuO=)2zvc*<1d>d6>|5BFnU`GLkU^4z$?N_n4emCbcq5sn72smVyN z+oSM>J%d>>l+Y|YytUxr;yF<+jDIaCK~$RcgeQ%1WrG}uWN%+G?5t#)ufGVSMy0YP zq8v%)71<6L{s5~3u*=_(JaELlQ5W}W>_+PHa8eok$?mDs(3Ep{B+wW(;ARZHhgBWd z0uu*i-sOr)`b&_QBybMA(Cx4xF6ib(U*U|bD$RUcI?Nf27+GPo$$DISQoA1@ggv=R z7>MreUFs+e1E5QR^!owK!QN+;>XuM0;SfDZGb@zc>J7)u)6TdPvEpPQ z;9(Q~@Z}=`Fax9~0&xn}2Br+MKjMMXy#Cy52nm5;y)*`M=}32LG)2DPenV@03zZyi zilS#=V2M#OilEKJD_Jv*-qNc%re9;z%{jw^mc7y0ujQqt&_`fze;Rm!^V{&RF%F?v zuP^;cgx6Hux<0PUid>x{XZY-f24eiPj?#!Op<9nyfv;S*Qe^1cLst<`|i zeHpfvvhF1%-tX3|U>u{xJ=RYkphP}{=1k<1@cM32r72LKeWpY;sa1Ni1D8tc6 zWcuUc7v0`VhX45U@n3vhH0D$@>=_kveaP(anygJVqScMc__Pp_>!f_I)OD}OUD zAm4WIxCIsw^kGkcacxb`iMhG7)Rm(n);04AzSO-2|h-{2| ze~~*s2ACD!T1w!C0sTZb#e&WW`_)p{5F;M$1g9XAJ^cbg^mPHq-IM3{j#P+<30mB@ zxGs6+-90idyG`pk?gs%0oQ=`#N_#(t{m4i7FqBM$&j8$V+?!)*-N9Q#2lF)#D%zH6iXLU;A%D!HbUbDPy-d1c$aO z^MA3*ne8*i!o?$RLhA9 zXcGLj<$45F%HpHzRIcJT1Xy>k<~Q009S1djH(uGeV%3dezVs1X)RV_mb+fM?owLwP z*{cWNQ0(#@G_}P%DQ?~n_(dRqR?U6;S>>n$Hv*{IO#!D84_2_4mn$2eR7Cn{!}ulS z$IC62pU*}$j#>=42w(TdRpQ!ne3SHYmePCjI$m8?O3L+#AF(>8sA0>=%I;#EJ~5BiDmyrW+DG$@EP!PqN=dfp$6V&nawHchBJ+&A6{Yt{V_$}dC?~y6&&hfZUR+7mgCahQ^iwV4YFA4sES2!N1 z-U=BTq`V(VB%%i3jWK*8?u>J^46%Kj$j~7M?1E(IxclHS%u14c_a*SJIOm6~3) zCQzj0wqy>FS|Ib=;M1zp!P}Z!9{wi@W)Vey_-L|Wdo`bqJ_SqgClaZE3a`1yi8O$R z={z}B6QVD03dl{^^e;D+`ds%?_X*U+@hZs+q%s{rB`6Yeg@oFKiz4VW)ILT>T)wliWV~$>^=jKJo)Lm0R|@vy zn8XP4$v+CA1FkQJvXJOh5BA|T=bY|wlHFcax`1$x-Y5y{Xl-0;-I28fCx!PHi7%x? zSK6f_dp-d9WIdZ{e(QaJ(Soq}z5r|^yGBEl5gCi!CAJ=H9U|Y*=c_f~r|&nfMjuLR zvfa9*BfTyjn0USRlZO!H%=*%4)&3es!Qac{wo(XZe-_%fhysS$M9rj}mMS43Qm1 zV_zc9N7LOsiK^h;|Lvp*^AHQ4doGj@mi%XZ+yDE^%Ry8KAtr%tZ=Y5aF$bXDTrr>M z#pTNmM<&)O_LpdBoVpZ}z%|&wHpNVI3AwJf(YBrlK$&q9a$%$s_$p;5rz>g4VnftI zw_BGFXU|4gDJ{+HYn_2t$P?#kETKNNi~cw(!N23viKNv%S)s-Wz$gqlqS`5NquN;h z1%`rv_^DcHe+4kwz7-mC#%v-@$}38oP+0=J;Bc+9MjPObqH3x}Gr*>}<-EF9f!*xe zmlpD-{M&*Q%k!5&a9lKMSk>AZRL7LQMCg846Nj+fuJ_Hq4U>XJUE>rgj98j%2>UP z9x>DeT|F@po^k{#!(-1Q8tj_lpKa`QuwS@+l4`CVs@2@X>~>A120{#9zTR4&v&Iv+ zQ9&!+PergD7p9;>?>ZyVF1E2g;K7N3cQOIx`HjbO;Eh7;n{QMWF`1FU_)OqU&j5+I zZ$h*-sJN6Gi*@^TWT=Dx0vqYz$@w)F$s!0P zM$+tJrb8ZLaDUO7Ww`5{&nR`@{si?(g z)$L6YgRo-kRQo~to=1#8H?u&Q1Mq&3Rcncl5QYmg;>|HzuSL?OvtYv46i_cf(Ju)PVhGP?M%5&O%k2v z2;R8^(KmwqeklEUI5&8E(f#XWeZZ+2Mq2)X${gGun00H1je!@ixELk^7o8+9mGu~8 zjn6%lN}JpZJ9sgTaW|xK)EYj{NuIp^kmHmP`SI*bu4-kY{c|ieUxEMwXY~H{70L8X zv8i9#nDz^yjLuK_9|Tytex1K3B`0zLXN#UwO|{A7fy(J2!sZC%zD4qDe|b5g{Pq8eQ(bLPhqQhwamLP zL9fJCj2XOsV!Zv=|3$UWoWpLY>C&zrJ@@`+y7b#Uu5y12xPya_6Sp}UXRY4K1bU_Y zVl+3!!YvdAjh^AvnR~_E;r^;r`kHpam*p_H?-{%Si@OR36|WYRdj{)*rVob$o(680 zXz|!B7U7>B%9nSu=2N86lmYoxhDTym3YGZ#*Vk@O_{AZWMWMFOA)AX?JEs&TV0ca+ z2VTF_mh^I{32F~6JmlAl^Cr{8A5bLMK;7_zkKgy9=a;>{>)6Zmza`ItvWG*==UuX| zxGF}UJt&6(`m<`}!3!pa3G%QE1}#FBuI)2+(m#tc{SV&n8(#JYYnV^b=T|XPS!-x-v?(SPMheQ5_rrbs>A+n>4>goI?p31VDhnfSqfpZNkO{DkeRe>2u+d+TK`i9AO6O`zJoE6U%Rdal zT_*+sKCu>AiYO^sG{W6T0OSZES4trpBhO^g@Bxm%oGkthjv4}t+9z1lc8iqjuxA0W z)VX8Zkqjf7AFMIC39@X-E@ijipVp{OkBv7@=A0EgNmJ%n7j9Vlz$hiSF}25B_m3vLz&w{GH26}+vi#n55OI6tJB-mQ8=Uf z{WZ?}s3`RZ#_J&Za;4K3Cx>jz%q!S;oZJ;f8Ey+hHH%&xyo8fHxn*T`U;p9vj$<+$TnfP*aY& zEGk73VUtf1QDeeJWymZAPb~))=EVD$X$szX4 zf@WFimwZU3A&3dT^a%3&w4vVc&Vz63kDw~WnaLOhUg08oyoDP*@Ap!^>pjT$NoysFe3o zp&XMp*@y%^!WTHn5Xh+$^Im| z&PbqM0KmcqiB3>ncMU;yRUlQjF;*36V`sa44J~XXUAp8?GCbc>7YlDZiy>{ho>NFu z^H)DYrLV(eUbx@_Zd#7G$fIlB^Rb5!h11t9QD9K9Q^&zU+-9(z#%g|m%F znk_-NZL;<)h_IM7X)@?r7}pengJzwU@86qkb>QeL=mzMeKk~ea zc(~4RoT^vfk&_RuL#q_eMy&bO0f3q-BcDwk=l^&YR+0VKG;MbcfD*J_z;R{SlR;B_ zXIfZ?Cb{ipE3cK^;BirS9+q%S*v#A8Z|YC6)3wK6+gt0t`MH^)@V?1g&-gD{TZzl6 z#{ysS4jR;5bvpEoVGj|1@QM}i9mk&5SlrRJ$$m)C`w4FfedN!jnqDX}@?Z;aCh~yv zUw1eD^f-*_?i+yas_B2F$Oo14YZbV0>j0-HOh0|6^~YHT?lg+|d?#Z|w;l-^61T%s8eiB@VKpF{!(PwO!Wcf zQjx8Mls-YNJ+3=MB8`_j&d%?D#!rMzMcNG_GDR}|*`PZ9{S&Goh=MQ*oRZuOg=|Q;P;gyS`Ua#mi>L?5$)=B-* ze{m<-zi_9@RDfu)Qb#AaFANM9lacK3nDPen*&XvFNS{9=l%S(-*;i~)lpwXZTaKaK zMdazrR+IXCsr-P7*xd>I_RIl1(4x#Rv`GlJlPvTwhog9!As^0V@EQP;$G6ZLbmud{ zUw~tX?AbE%9{`YS1^*jK$MKd4FV_$N%u%1S~&n!a^~*%isG)SYEq`l%nJyGQS!sR-2J#0nRpz> zKQHPo?>#y9`ps*1O^#&^1jP^U7})&M!1gpW+$%Ys{YPY6C&g`)(R1_Oi2Ls{h50*` zQU31syuon%MIZtigfIryC>^K3{lllBYubjzOCvW*+kXt;X(|RUTOKl9Bf~!7u`tH# zGpu`huMMts$FKL4^vq6MZaJfVv9&qrL5k@`3PbW|SKC51Us{3?{%nB)c}<(KDS(A2 zN&id(p!L4DIc>pxZ9e~OpbeMa#F^xT!?UkJK#l(@cchrR^vsX8MY0OfRUjA7%D1_XS9 zQ6}c;UDp@xoIr0_XotS2E>Lvkx7;p6$rQ!wLUcrjdZv=9tKpzGndD&xEZ*L&3;xl}1 zc4_kqU`YS2Q@0p^6i%SX%8kg5S2FR*P{8JwY)m8Oj=dP3TeUHjq=_Z*qxEy}ruFyI z?*%SJuXfb$OXiuTd*#-)f*h7E^+oGhQ`JqoCKOCU-N{2{G>XTmt(=0M(dZ&rtJ zl@#5~rxK(MRnwde5EJe6{ms}KF}Qrtg? zF3N(4MEI0%cU=lLb(|03{l3n?Z?DxtAS5n+HV9+1WtU-fJ8=dPwqTwmTV29pthak_te2O|}t6CheLo7FA7xM?(|uEt^y` zc3E6=rz1>Outn>Ovm3Zs7ku&zpr}Q8B>}(d)#=!DxJmaP~rc@s2=)u8iIdlP%)58$(cww`XT{?szafVO|M^!?W#) zf_`zOSBHG@e$;v6Kxw<<)4smTv%m8w>FX4yLK+FC57rPf{6_jzUO%13&#iLk81K7L zPGM1<_e0CGCQ4z+^DbiyX4ZT&yr+r-*eEsG$E((2w%v1c_J+@mnB1nLUe17D;TM!R z&K_Tm+&;8XE;2xJ(Y-f#Ge-$@v_*0oXgS^cEczLnCx9>1E=)|-(i8cU_Z%2S)z8w1 z+hgx^#W;ZwZyh8UwkWZz^dD$ZE&vw!_XO%QXZA_UKeu4a_`f@nz&|?@cz7}!r+309 z5>bGA&V5x$g4W-@7DgenA_Yv9-{f0h0H)I^w#G1x-fNRb?)3Q^G2vTIq5^S4&7qNL zNC3oXXC||z~8Y@G@q_mK)Fr>PEW(fE7rj!kr9WsTM&K3r7S3)+2KIB3~5 z@3jzb=w=X#bh~0+!K$s_PmXG7)qPBQBo<*_xWtfgGG>(qn6%9r5~Lq#MTibxU$@}@ z*QjEG=P^&eD-7%(#~n1%Ca%4RJ41Bv5JCpR#B>K~4iR2mR6K4Ze` zPGw(DLqo31m$D^-M5w8QtBhx9<0aP{#AZ%(bByC422rC_uxPf(MZQLVY(A zV^{}oLMq-5hi8`(-E;Bsdh|slyz(0Oh01w&UjkfKABIvn|BLmI!W)>g&X2U5db${J z=l437VY~A+U()f0v)|!0<{Q3!d`zb`_6$6{rHNEOS4qCIwH;=bMi6k%(AmXc*U~Y3 ztWs=VGn7S_yjpR#dr>|Hylc(3EcEYctQc(fR>_~~q^;_w5$6J*;U$a^Zm4XVhwjvR z!Fm&=(%@;?|DIPm>nB4}y;TDquP4qY2ZG6Vr^Tcl=H?!TcETSfrQO7@k$$dva$?|f z-u$*MEk^FtpMPjt>LA*7@5sZ3;(B))cCh;uT)HdbTOk-j?wfcRz;u|fPvBN>QsaO1 z(?Ds;E{vR+Rz2=Vh=Q~gefg)Z$NHETBP^usQm2Mnb8h`c@~oNv#8~#W?C8 zqp|16*fnlSMJ(B?O9I_jD_p;EYq>GON7f+V6O_Ok?x0HzyPj7gTKoi|YrOlvgqdL) z$H^X#4SU9&--myPB|ZNgqGvgjNeYwO}Xf|a|jW_j{;Sa zy^N_Ycgk@53?x7CWH$jv0eiYaISl~=W0}=-U)k*THVXy%M%m2fb8ew5DRa5(zj>~u zN*!(K{h^V80D@0a@EmH{J&gf*|TFAt3=d%OAf16>I-~QW%mjTu^*wH??$HP zkF{2*+Fvz29t1rn@a5nHY~+IygkLD*t+}_pXq~1WA8HQEtBHo)^g7TWYO*?G`ub;B zn0#6nkuw{U0!FX%L-W0(-#!w`zrzF0o!gTIe{89%mo?*2Nl03h*ji9I74d8luFWJX zjt4@$0PE{1FIwM*Z$_&Pz2vIB-?vA76`xfHa8tm2#D7fw>8LwLbnwCd!%T_dGJd@F zL-^Muzp1=lsf?Vm2bSs!{HH~YU?$5ZV>tpMqXc5^VP>qb)#h8JfXzm-*N^wz7hKfi z4agiDK*vB3A_P7{8#0m^qKje#sl45C7k!_ol^h)hN5BLQZ!%S7cI3M-pOej=o91oH zCU;i7-P8QLS`94`CPqRUC_2=+d*{D&WoA)tn>@RiZ!)i+r(Emu=yf^w)YzvXl6-sA zr0)I*&oB|9rE&*ds_M-u>Ay&pXU`fGujww1H^QW*!9)MhqzaSp;|1X1*S%f%j1U=O z_+f~TXY@wi@x%F>kniuPt4Ky&-t0R0&p2J38-FhgosoLAQ21%blGiJqjr!8)y-p6b zl@3g5Nk3pp?|X8lm)D!Ij+UY2dSGX#K!aMVS!h+Nk!51I0=RW|I+nN{z+$VajB|XP z#9kw_`8GvTJ0*JNAJU$p-_Q7T1{ti~o{YDW4oWIKmEs4_5fJaZEdq7@aZx$Q5CnQX zPeg1c1+h)1xa=27l(VlFl!=s}Z)A16o$%=f0OG28AsT*X?1K&cfGFD|zKVB7knTC$ zb}X=y5p|J&`$MqQdRe9F>*?Si+!$vhzA8{(lwL9~>0+~FABYYc_IZhRnS<{?8Tj+( zkINurQwGe1QkHjm;79TD)hYCpcHT3%{hN8#D?MbV97s7c=Fq3Iz=dS=_zhPVc$Pc>mz}u6VJcInugZ`BqlbPgIM01{H>~GhdbW9GcxsK-f4?EDv$!>tx7(zq zr%qihylDySNZsFf<1rS-WY#}ntV&JerVERr5Lr#A-#&tysR}Vg@D>^CdHPIxPL}3- z`x`~3zov;vq;P|z<-*szG$vQTy_@0N!?R#d2VWs{^);gXG-dj=PZbAOpKN=BW#gtN zj8T;-Y%>^hBMOKE*}36Py9&m&)ZgfOt()LDh$Es=LX z5Dl*{LVwllW_D%Ka$;4V?>u94&35h(=BI-4o{tLP;a-b}zQgd;A=W zqHx~C=ScZcdp=*IRrrcyV0%;>_2(G;Z2RZiJ7Zo;7`c=jl{FRblCbjt;vax-=Vk(d zdGS1g%;n4o9xml2mxH=|UnoVRJ}ozyU0fEGrco11W>GPfntL@q39o6KTlOk!#_@tTsk6D zMJ6EZS~-mFe?aoSUHF;}=?`~@9WIN%M&%LcN0V^V{w)|h&r>C9q~LT}v`*qRD841V zsB^8J0HH%eu3k>_^8dqGIn8fm_P&w&@v}h|t%#xdJIokZA`G}IxH06MwKl2%r~czE znQnStD!9|9AYV~Zm&#K;!03qU{1dE_qh0}-#0_*Y8mZ62SC$&TQOk*+bU)&DwZ}+_ z-=_`WK2FIHjC&YgJNSganIKUIUsnwaOIhM3TQqOOoY}bb3u-{IA zlx1La3MVs)*1*R&obyUN**8Tzn%h07z$fw*$sliD?&YSv`mIPo7)O+TyPwnukxOZq*( zMv<02p`U-y!+$CJll@WT>-~%-o>?>Huy`iARUZK}dbL1HIs43i?_o5Ey9)>#dU|L1 zhxMNjCOTL)UXxZzJvQb?K_vaJmJ$z@iKEy&(-rfRL~G^qpFkUI&X(hAP=XvgD_QO0 z=Zuk2N~fe0iR&Z@8E`pT1&A;Q+{Baq0H2kgCOaPAK9_ASp3gh_J9>+I;Kum9Bdjtw z?lIi)+wJD(1_iH3;tMQMf&c(`zEKxEB^&t89^nXoP{egXfjDXT8LEF7VZH;bm19!# zIJg5uCLjx1heVJ_0KjP#TbHWz0igwjb2-SY=~TsYX2foF*W77E+P5e(f6c~@Rg9Lk ztfZcYQya@=-Sn1I%O~KmJMA?#H;E~kUZy>WPE$g-vSbY~Duj!=RwR0Yuvl%|>40UMWdk3h%y&3nze=O+jiMbmP%W%rA(%j(E<17+t+vvsA~h1>ds0@a63P)gk%vlj(ZBSpQ$JhyYVda?=9G zhk$n|B=oSd@@vXx(ML10MeLv~VfdUi*9HN~pD8Jh?%vYN6$R7Ir;SF}LQ4&Z-%EnW zYFDp~7jkssqJg4TwLQ;xn(rP1#wPX>K!fJrQ7MeR-L{7M#bL1ztHUIHPG>4)gOP z;!>3+kdsLjk@XuIo}CAOC#Gv-~T?YtBg*(PxtdA zsGW`dSNFD(`0@6Y!A2uCj#Ii;R8!q3mzXC@djndIQ13qn`rP(qulIq7)KfD-3SNZ* zY@UNyB{Rped#%Q)@3wm$Pq|5eiqhnlb_pp`V(@HuQ#471@T{e+f@x zRn+HyjYpvTIp$6}v2|GzMJ~EY&7X--D&XYI;R~u@+6l?&{X>vxq6gfd_3`kw&j$@2{CdodwP<3h+pXiey2mBL z!KPcVstjm5B|03GApm=?rq)4P3Pnqr-U6T9*ifKVjo>dO^?uKP+&mZRr%WhxDgw0? zI+-sicbf?&c{n<-o(ja&zDoh_?RM4SL~}rPo&9AHHU7EG%HRi#%u2U^tup8x<0cRD zr*=IFFtRcjHdV*_XT_b7frZv!^>^6-6>c@jVWvtM#*ar*nUD?Hu(Akg&Mi^*!u8a}+M8VqeTUMlxb5qc}!0b0v6qO6P2MriP!xup|tr6|7RX_X4VC)XfF8Yy-gt>4uU1TFD3ib@awv1?HIj*=r5 z(sEn_b-$ymGEUB7mwD{GI*(C++)|NL$$Q(s!RX#rlL}40t^cs%Ixu%Q0o${QPiGi` zL!fB!=av5N#Y7^*K+_5t)S?DOJ8+u-d#_>4=xt`abs~-}?)+;-8g_8^oKXL(qYMm2 ztmJ%k(>i9NWGfw&zfq=O(tTfkygZ&rFMS7igVS>NDa4KDgC=1=m%rF=5`arD)U`%n zQ6MLW4af2R8wV7pPd#vE6#HBlajLQ`*L=d8DxJFta*QGL1Mk=RxdP93ysIdH>|c{m zbMP2Y{8N@PuDC-GG3T9Yra({kC>T#tuwAChhY44SmTJ!*f^4k-_?JgPZiA^MH=%Va zUiGT@wW7P(SRe(uSFnp~oOz918TV{-fw8$R$kYMcfqqEtIM5dS z;$Jo9b!$W3eQR{~hmh&L#dL1xOl$`m5dn}7esbB<6qs{~VUyoj3VT$WbGfz#KfEFh z()77n`*p19sWE{Knz(1C9`kuX?(m}}n4FczFe%QlKE68t5KT#q@rjN=QwF(doCyW( zd0;Ucr=7u~R>%%{^3`xcc`aNzX^5C=i2bnM938A~ydn6_{$ce1C>K395L+;rnV$kX zGT4_S4_LHjS9mErsA#W<*s*34=BBOSXX19Mkc$TWAS>tE`=-`LyI8}080`~?;MNyA zN;l_Gq}|)DvRsfc8aP3{)8Bf~($IQku?`YYV6wqfDm zpVE5&Ra&51IdN_J>Hl_Ah~moPAN{U1e(+*2NYo&AR}}D7jR6o#7R_Vmj1#v~?O{T( zV6H!gpR_6nm_itMhj!+!t@gHh$<$JdkAFKE%JnJ80~~qcvrz$_6XTHkX9Qn?UZ%Hg zS%ml+7f)xjq8lmO11+l<6*uG)tTkSI=;u%OmRA;s+qveqKXB~<<%G?Kcfe!<*V`0>uH z6#E+Nw4QU@bLCNpB`iID$fB8nd%WY8Ri3N&>nc6=EIw}*%wtaBq8)ibXIi)fF2|Gi zYvEYtQmhTmAKA4I?5bV=?)l7E(SmtsOj^$VO(JQH}1Kn_l@Ta`j(1 z9_4_H`)>vYvO>8dz8i}^Fcsr2%G$?d~O zrv}kR9|WfsCy(1{Wx%>8+%%d_Jal5;cepkJYLE}Q4Rm#hFhFoXX~-@$W8D=x%(T+L zlH&1TwVJWNaGR5+a0ZsUdIpIl`9wAvzf^nL3%aNwui*sZed1U2n!f-vdc%TNT z1;8Lv+{fA6I$@}hJQ;`OJA=|sMXB6&h&-pq;(9K^rZRbxo)jY~nsN?AC{v#s{~CQ4 zA~B1ZwV$b73&u5CQB`*Hebm6H6AVnRv6U*2i;vNw%^X`7jp5_1{QFsFuTWmTXTKI; zr;O(p=NSP6zk*`1Rz!Tc_98^}RWk#mS)G`08|A8``p`N~=%tQ%(38fiJ5=nT1#VYmk||oen>WO!>M+ z*on`>CSn->?>r(=++)Fc;>yvUJSC@pH`C$#UDd|gZq$_q^R^YeI-?^-CY5P4WD_TR z3OkKsOByqy7^qV&c2`8KOy}ZhBEjSgwI|##4WGWCye&rxbMDliwojG-%Z1M9^??X411sfx#94oXXJX6 zF@uOIg)^zWG!u#}YiYqFi+VvrU&Yw)kpKn^LxbDcpI;SK{GP;E!<>7cSnZu=R39uu?{p_dmJQSA zbqdSI#L1XstZ*V`)~7>{g{z|OmJ5uH*VtIWo|WhJ$@@H`$s1iePVs+0E3;D**{Bsi z6F#3)3QbyihX%0Gf{qI5@LVOCyK%4w(NN^-iLgkUx{Wt%MZlFki~DC`|7FgiDk_A3 zZISAjq|MNU|eEG+Z6OE%f^KoMv9Q`IO;<-e%TC*+I?<%SBgweV!1j zyiwBKVMiAsrl(R=qsBdjg3a>3rW!>;+mQ)mA3SVC4wed&-TmAD3ZAV3VeJbHdId}B zqDP{5Ru+yKhWfQbm@1BIifZGMp|D1sybUwI zFwdEWV0Og6W#9@ZWgi6u?pCpE()gMdDT+{$4kcS$hk4&lEz4>I%y%aqvo`$AkQ-Gj zK#Xj?@I16uGA;HK*V9y_tI3}avWmg68wZ9+5oTH5>*sGtrv3WSpACfc^?b?)=F^9> zAB6@{eYx+NThS1jm{)lx6p2|yRtB%f5i5xbY?o!&r)84uHDgXTV56q<7korm7a&Z% zTP3NLL=|eKoqZ}?SKh~w8uQ|mXQ~lG#=cmJ{JVL%;g}r?@uIZG-3dnAtN){MRyO$OG(K8YuzKXKMtTF;stqBXv z3*KP=cD6`gJlhjSU_bmwFcNjMT2)$EG?S|GGFmW|M$pfYb@Arm);L*D6@{^M(-+Fa zfI+^j{dmgR=fCCOVgsv$dH&H%<-{_)cHjbAWH&TZ^tKI+*3 zk_#hxA?H0x0h&XccmpB6O(&*Aw!w3(zr^V*8ajHhB0u72&ZFj9dR-f*6+D4jUsNtB|rz*{MEoQi1R(RbUeHAYn#*;FPu(F-+wCe?I^r@ZHB4+W3P9xUl%xn)o(?+ox@ z&X4FXK_d*6#uUSLs7#;cge1@S0umo>PNzy7F3_NN7xD_&>lwb6uxKzfnH)b{nR}8`g)x{|vF&xk{m!$8TQ>MN$R&S8_bmgW|wqjimfw^(dRL@KZ zB%=-=FfhD6-(v?nEk$GxqNS`|$jw8Sa?hTjO-|2~T4PmI7n^P_vd~j;Xtzt84`h<^ z$RBI(ETx!r{As;`FdhlUUq2Tt*v7Qea=E;=NWT3}wB4_l(1d{NS9Ow3%Dq9X!)mW1 z|qr*i0n&YwNxrT{DAP{DYufCMRYfJ5mC&DT$wdt{gY-tRo4!F6H| z9{-s2n;#1RdsyEW#EjPeBFg+1X$Gm+ghZWLUdISEKTd z^!g!n*B7G2Qb6yIZ1QsJFhTA0Sc8_*A3trA2iX)OgM&=qByN{&oUKDgL*4YNtF3FV zS}M5pb17L;-%`LI&?o@h8S>%q`1LN9KjIWFcklZSxfMZZ%oHX4GX>C6QkFwu7{s0};EsRou1#&^_z@ z4AmzuOQbQ|_PtM11vYP^Ck{{Mu6Bm%nR3~zCYA@fMm+b8M7PmQRsK0S9A{0;A${w=3CEtMZT#ANyCcIe>7(^_ zR%j)v|BYVS%d~M9{6y3A>_)ASned;kEpXHh4mO;Ez-7LG6?a44MD_l+nAlf5Xi}`| z>r9JJ;$^6lj#vFkCAANxpAj$bV%Ft<^#d2()B8tx1gT0U^owFz!t#GdUG$s@!*X+& zY3>nQufka6xdLdh<}}}8Z-1qmVg7P%;m{;NRsT!O0r=fv@COu1uwLY8*PP~Q5 zStW$KmM5-~;@<)r0$Kz1YSD5?-p%y7Cz&^VQ|#2P(ZROBJ`UapNQ~XR?38cki1s~&UMwx02dzHdohFRtv6*Z1 zzEi~lo;Y!TGdkeveLoA?K7bw~xIul1>$eD)`bV^s_MLifBYA%Yr$NdSAiCSONPuijeWD;b3~63K$B zDh?luyRARsb3y*T01m!%;v0QT*Th;D!Q>CRXSPtVG#@>?^n>FpDPIi2WDTb-Aui)k z$}keA&B$gJzEVcTo<;X*Ly6_m!Y0Ac#Oto&DGnXNqjzkn`lH(FKy`;rkrBUTGK2iu zjcWZ3%Mq9m0c67ONUuKM&gn9D*32pZG}g70a>ZQP&H~+@f7eu45rW#wpjmX+&L^M{ z;+$|D!PTrt@)MP1Feeyc!<(iC$(+=Ow`y`r2DfrhGM%=N4eu?epewVFePHdMggYe0 zBB%VBvd$aNr5cZm_CMUjDid{Zv3)d_)L8u(riFS&^;?Qkchep4-SG}Wi>P^V26Xp5 zJU)j^SNFJwT!GeKB)F zn*ixf?aYU$dZu?XIV!P&f>X|j6A>!~fm45_etdJTWdrjwV|u<4KIkHhP5FvcGYdg{!=%lUTw=7ZLRd*OiZUJquYYZ~i} zqs8Af>uCE&gA^c^e>SR4hFna!Bsne0C+E2Lhvjikc7jJr5?e=~O36b9P6IV(FL(D% zfUL|k)_v?aji0;lZ%INL1yl%{m;9-V@FqQXgyy3nX6BqGOITE6^#ISqK~Wb9Q?^A@ zt00*h!y)@P0-9z6B|x`#pvObqp>A2&xBBrzCxFRng)_(q(2e3 zF$x3}fvV`ia^~ti=4SqVG6c@Y#mW2moW*{J+pDR2fTvCn_g`+UdmrGoh~NbK*+iz_0jER9o7$#KDgD+TEV? zu~yq66&r{rMXgV6R82y$Pv9PRp&H@&E#n`|!{MU1zxn;99O$7%q#D51M)>&|ab-@s zDc6s7+}~$6>Xkck<7zC{`0f>pEditq3bq1Q`L?}ciu9qn?6{ZMp>bwRbm3ty(J>)( zUuRZLA{ro_xG!%z@hV$*D;n#Q5TkE=+|;*x`pu`F5g-2$HO}#2ylLv0sQ*~N`leW9 zK-l3qp-|+TuA%Xll*$RE(914%^Y77Bm3BroJ0y1ewT#N2UX;$u^U(>auU00AOK?t9 zj+FVtde6oG0$6?wWn@7CnX0mhPQlWUknr1rq=RQ)(Qmt%jYvbAMZn z^sl(@LKT~?i3~je4WXckIKW7gEM^gk+zSoO*^zHxyC=GsA4^y;hT8^FFc`r`Q9q2MsFeG4UuTbGth!*`d| z09qwIeygT{O%Ef}6W%9QCRrR{YT+Ta^mf4C>4Sx$>jqc`RJkIc2S+CI75s6P7)Im8 zw0Aj}2JpP|tQ+yr#K<^Vpr!znbiCpYIw`Q0aAxzGY~$GHCGf3urV@QFoQbL++hL6- z34L;XCkBn}VK{gqL?7XBdeQIJ+Bo3-QA|W^jdK5o?bpK+df&~w))??X^a`BQ=(!e( zmCCVyZ>Lk#_mk;;kTSFDgYU88*F9V@gXe1R7;7pmf$GkkSmcR&v^aLy=no2O7U8K3;QEW8y3KrV2XbuK(KFx)T2fs2|rt zs$2ETuIb5ZZ)x2*PbgAA4TgTbPyP7QTnqO+_M>eoFuoWd0_%=-jXdLWbQ|#Fioe6_ z!x^<*!_zxlZPhJ1-tDvgJg5e$qt}W^Ed!6mupy^T>wZZmN;>yEn^#g!PjoY+xzB7G z7j8wmS@$8SAVy8!khKx%F zDH8^9|G}He1I^KSoq6#qcmLyD`=3Us_mOud!ksi24MJm<#7@?4e%x#+WDxz)TL`F_ z`HiD7k5U!~K2%$_!~$GvUA^jgeq`gI885BGHQPT9`FN3^mVQl-IZgQi*Wzn!!<>@^ zKA~t^UX0SQ1GT84aqmh=?g}ft`p>Q)vJKfY$N2>5)r9vPttvP(yyA`ZH`M=@W=EGG5d64=J{_ zx^4c4EZ$&zzXueKIhB9rNWbl6#D<$yy%k@R-OM2Mxb|3VfRGcrGZ%ah2!SeH@F$Cv zZJ5mxe&4Y+A|~}^h2 zZ?^9yATom_622|ou1aF~cumjOxUQIt>R#QCo%jh!>bH`Ft%g51r^tf*{CW`!%8A%Z z@gJt;DggNO(ZxuS=K@~M;r5Wz6%B84jGZWZ47hG?mFjS4aWi2!Z%hsknn zY%Tkx3FVbY(75KAIgy_0cKRF}{eak-=0OS9en<9MmWCcztY6xDIT0b%m&D^yVf}b& zMunMoq1yc8&!4>I_xRRDBST!-CC|Sr^O?cEGy$|NIo^^wKK zx5)D)mXD)lwyJjU`zW|+byGW#Cf9eBpTVpG@f*oQ)sKwL5^If2V}bvBI`8zLImz75 zr1sR`DH2Pg8;$E1L-KqeSrZD7oMcDPSX%gzUT;1q9evCesbqTTlP2BY$6nhx0MI)5 zDLVDlGeiV`X|WMe%93uXFSAhO zRrEbEd2s&gRSQ(GQ8r35-{}2-%Xv+t$>Z#Hf(U`LGs~Y##v9y;k?s1HB3{xbWu+mr z)=TQS<75h@W%8W#&C+hhIa|EIbl}O@JOS(-}4J zop$xaARTeuO8t3bq`Cpw^Ep-E9U#2;21DS(+Y;GXs2(BTM^<`Z+mg5q2zwQxRd^XTAkTGsdl~7? z(VI2J1-GD)eHLE+!k(KCyH^?9$$4~Zf|V=RjR=?2Nme;tG>3ri?L6Qv4IvnEx9(Vr z7VF=S5+8W_LgJeogU)LkUHb5A<^3;-KRN(GB9=lXMo)c8sbKT9H$>gYNrA1i3TMRD zangI^7ZX*nMr%pN*&$cXYyX&@a1Sr$aG1%{#)*9? zDgXIn*OUzLcvCPt2;lxoKWkdJ0a0a=nR^pJh*-V9Bhxc~qg>wyK_Z&ka|gBRy#%+c zJ*?}BSfQHdH1&6qoKTMz22PI!ODzmxlp6+DG?M6j4rE+K zO^rI`4V2h^ri3?PJxhSbQqiS!_*C?*DskI4jj6DMIMtd%=H&FCTD3LMc6s_jOQc{AET zF?Qa(CG)h}fb}9j#ac&fiTmm?;OEijrqyWwxAm$DG-D5>s4p2FWtOfHs@_KUZQf?% znys}lH8KDzTuQ54mh3P@f4jj3+x9hF?oclE zypWoZcZ&eDK6}3R!ErdB-CaLjL&g5El`;!_W_M7TLHx5cFdlHu_W9d!wA7{03yXZbg; z3_c%zg4IhpekOR!p%;6cb;##g=AKVo#Rl37rtGr`MBcEHwP;M!DJSA*h%AA?U*M@N zk%#(guGbz+xKgJ0ZtRG9@~#HNqQFF;OndI2pK9xv3k$|( zi-$9fVdbi>NH6Og+C2gmcNC7VR*P4vIa{h( zM7Ch9+2Tj}*<`gg(;6GCcdy8Qm@clQ>zX?otf=^ZUb}YDratil&O?bE#5S~Vfu!YW zoIVVdNBi_dtAyWi#(4)@n;i%slPWXx8k)$(qhFMPxk1y)$%voAr!_8PU>s`t)d! zlP=5YJl(W5pJW0xnOmh&aHwxk{N2Gc?Q+ie&(v%Xt^42`b1XVpQFL?t}v z&IZ6^BVzf(fQd=q#X|Rwe0sZ%!`KF(54 zIGLC41qyId#qZo1>APXoQE*sWCosZI)=-L&;oHh+Os0$3Y6~`H0SSR;If`4-gXp7F zT-G{7pO1ghKCQm(SAFUI4n1$Z-Q8N`2rbYl{{SX` zk#E<9syqD`G7RvJ?TrKC16n?M)Y;?P-oUb*q}e}T=oF9~0oUT!&4%Q>mHv`d@^&1i zG}TDD7SL87oI~w;OJ72s&tD`6{4{S$tR)vO>o{u})5riA{^OuT=y%0VFP#k|_~U*p zIx9p$^Nm!pNqUnE(Dq1HpuA^>TE!vQJvDTI@FB}3RfS4;;Yh+`Gsj&W>BMlTe|6Op zBY=yU`UZ&?=DCyZ$_p6SzJtfxL=~k7`uo)H9jHslB(Jr`4#!(Ps@)gj7Wqlm?D4xi za{i+16()QfPqh5-FR^+HqJh8K9dbzQ}RrR zr>Yux5c*1=mTU({hROFmUsGTH6&FeVii@A_N8&lwlX+FE`;zM-*>^Ng&@>&j25d0M zByV0cASiB^7uA-r^^uEQnj4v*?$xX29qQ1s>WYM=4ddY1fwqDppZjEdCZ zT&~n`zbi$ks@mBq5sk#v#P7~qhj$YzvSr9l)+Iz+Bzxdg76Hdz&5;Ry=sJkJFwb!g zqhj5LR4a%c`N1QCy2}D6lU)19yef)oZ4S8m^(4#$F?T?dI#y06V7yvA!Y+us+DGeH0^Yr6E3pWWL)~5X@}Ks3#dx036Wjy zz7UaJ`XIY}wyn)~Ptpj(-erQmsZ=NO5EV>9*Y)v{K!~$Yz>X{#BRT{*wmR}cSAXeO z^g~cqy$jF|AU6D-%AMSel&AX(;nbo(u6oynjP)Fbj+O>+xC{-oN)51g`rgLENZ2heBb?EfFIXE6W%X#_XsvAliR*hmP&JKRD4C2pA7&n`16jA?YB z(aQxT%HnX6csAHkRUcp=Fj(CN2Al356Lo`LP#vm0KY!#V1afM& zj4OOAQd*rYuUDC8HpMf{Ua{(HVTX5MF-KYRL|H3fO49n+&hRo+NQ5cU{jl#Y z<7C>z{&5V&N~{8E+*HC$9~eQ$9M*7^taxK5ULP|7_c|tg@bgp8Vu4IoO*ER=op#aI zR%zbzfnJOgMGFH!j*(5E6t0@IC!sZ~VTOw@$31umYKO-Nbm0&Y3z$OJ;DO{U6Od8A ztvS-<*qa1wLr?znUhW9!88~$ljOJu3wH|tx$|&gSO2|{UoF9JDO@2BGB>So81ON~Z zVAZ|<0y%R)HaGe)zIRv>3Egrs__>23t$KpwjV8%&tQIh@-*i%mXqjTKnL6>GUq4_1 zJX!;0kA^$WYKo3LdUkN#4WDIa9(I5a8?=yvRooH-)l#{|qs7DBPkh(KZ<>rk?nhGZ z1`OAW!{p&^gL6tszdoPl(fIaiRqG_&Ya7$|+QI!UoAzY0h9~}uanGtkRwJxEn1Lqo zOPI~W-vq9_l!ortWcu=<;U=^6X1UmsuCy-y-4xvAwy(0nn*)gf44&^|!mbH*0N8!s z*co>Ce_VZKT$2skH;6Qd2m(q>ML?uOnhilfL0Y;?I*0TKNd-oSbb}}jBc)+GkRyw6AWX}^sf=kd=2p@i8K9T4Tu3he@-osY>%vZy)4Ya7;2`6e9Gykk=Kat7-M;wnV- zbYvHDBB0EDJ92rc1<{7<4oEhR&#I|m4ld%?V7zAr#Z^_7T1XSVUXLdh?FEnY`Gd6nI#$d>0wBEY(Yah!MEH!jHT82a zn3@H$?KZow_x|}o>o-WPct?14wyGZ|Gq3Yl|BpPrvi|Ov|xH z+tfW2WroOnOTHkN4y}3Tnd2A}?Hw;FOqAXbI1StEC2B&RPYe-yn0rdn@S0l5ctk`Exf;D0=qYD4vC>{8dNCj<4)LSp+ODUW?OEZpnG z(7A_PS{N+rOX@9Y``f0!`%&AilFyIgUnI*d3X@-lw#aVKW9rVW@!9$hu2T+RKK=foPb>^nO0@a{zWBzmt>*#B*a|aZ*hht^$>7?6S zL@EU5PLnPPQtGi#ZdN78UPB1?kNwQvu;QkIdwozfYdOG#+N|1rFy%8{gWz&WwkdJd z%xkwE`Ai7tG_XdY+l=@5)&nLMUto8RwqP!&C?j33U+?If4Wj^Ey2#g;JV<1} z^W4z#o=lepR?L@>XSb@Kh7#t(NDyFTcujFt0y<78?63)w-FmA%f=mpxy83MK9#UfX zQ)lr}KCQ*QX->?|Dp#e2IOj>6lU%2=u#IWB2qZ#CU6O`Bpb=j*pB51O?#2~uizPpW z*V)=dpVe>_@lCuzg*0CGc^uHY`BZUvSAihV@ zC_9m9J3g= zp-RdZj>TDLW66nY`LGzpf{{CPt-{gP%KA5w5yCyMLnieI1BY2KFjNtdh>`S&dHN_nq-I2J0L_dY^j2wf1R9g6c-s*8!5XRCq zcrpgxJu?_F0zI}{ah*7|S~SOG8129(qC>p^~Vp>FY&;IBV*l-USPv|ER@EQlI~W5F`xu=Eecfl>0i(+C>tNSZla+P zBpPHF8KoC`*@QMwwt*flIH%SUo6*}DFL}rO>NKo%se^#V;6gzF+!=^ow#?4vfgjr5 zOi#3QK>P8x_dgZWY)PAB9^xC)d?$LaVLF+;;KMp#8;`lS%}?eZ|A=bCv!9%WyJl}C zx-}42^aagX3#HV7jb|Hu%V#amk1GbmUx3vMN5l9u#>g1Fz1b=qw3lwCzxUv&&FNZS z)A(sehCUV{j%}i8>ntbJHgMKg%e4o7lNHo4tkE3(m z_ls^zL%C|1lE@U|P2)x8Gn_xp>_&+%7N!L8|u#MS<{*iP4`>zcfb19);ONnKY4V#;wq+W3?6h?f|E z7n{n|^t8fV5F}44JX4a=3YL#*!7umEEpU`+j_T=8ejd3fyWZ;Q2@5A347zo}x14dEUxJGG;Zf9@|Ey(|#}}B5Jxg zabJvI^ghNbIFC!)4HqjACG7JsFDj-Llejaa(Y!^9OZ-}U{AN9SCz_n@wIO8yiP9PN zu`d5gG~$qKbSDX+be&a{`yt-SLw@j#&Gy7`kc({14W+gZlE z)p3?JHbP<3Q<y3I+xm7h480$03=_pY|FnIE+YzK91tKjrB?tIHBY>m*zdU3hFEy`BAUu@6L$ z!D+x5;aTz?&(-3Uj9NI467?V~q)+;o8zjMfrUByPjne?^AlB0VYln086oWlw$Recq z8OUXtwu;9#7?)h7c8fDxNN*M{EeJTW50=T#jD7Ow1~XWbP+wkA3D zT2f`%@vG{W9xm2Oi@w~+gzXi6fIH6Y9+~<1YUmsHB+-Rz8F_5;q&?dC)Bw1cTF28a z1kF{xETW@o&Gulu(&+XNKL9BzJ%LD@!^|DrycmU6DANB4YR1xQ z12IQ)kuL934WGJ@Bd(^4184!G=h?kE`nuK1;=bUe67x}xL02{G!QtzxAtRtxE+#IM z$JizoGs{_}f{S4Got?Lbz}C<5KtXBXP_n>gp&e~5^v!*1{4fq?V( z!L*nO%UbsyhiF*MEDw?P9JfV*^WUw~2qAX{scKz$IQoPd*plsAILUM;7ViMZFIIa= zf%zx}!yFqR@_3Po%Bq>%KPIs#Ab$;B>;6Zb>_yCdT?9a_ol&?z1>a+joetFe)Zqwi)cm3<7M-ybVXEVY1Yjna!p zG8>+9*M7WUT^s}UZ1CsRH{DxkL}=a*u81YYQ#RVFC=~!|(O8U5s3bstn~O6Jj3_*p z9LQ_B_{1V33p@||9%{7GF6-G`zwx3W=%N`yXVCr@(*9^(8Nz|ONk8%Bn?5m3iYo)& z@P2Em&6^456)@qJ)ohWOFFh|^rt?UUb7T(=5M&gfgoC?R8}QZ>W4EuU>Cl5Ey!;Ix zP?TRH9-z5fp6oKdq9|2Rx&otx^W-}2SpjJo1CgB|%w6x;I~RTsts63Oc5n1ZYx3hfSy3coenVw1 zM%7bZwGk{wwk#+r4wILztKO=|!@Ywl0XGe2B4qh_>cY1G?CHVTFtnPw_Wi`usZ<1# z@UVFTzbENMeNc-Y!8k6ToMasTcaBZ)US;^8=v%8Q_PoDlFct8y*O)^H3qxz1C=hSs zvl5(Z$DTF{Oy8y~Et6XVo_SY7LH$f-2EGU@1+beNrp#{q4D;bskjNXefTSur&^uWp z+)m(uxPR(>Qmzzx0XeNP_loU(xoiHVf!e7VFhA-fhpiqx?4nfBD84>+^cvB&{5cGJ zjbsBl&9uiy{?#BADX@-2K4|9Cg#Bx`_&Yf{>2H(a-$NV(^WSYsP>Ml+2Y3DKCao=< zFrn9!6TIpl&mRWaz_ZNYge?8zXHzF8I-sVO31L-MD|z(t@x#n8kKb#tlpeO=tjR zf4+&PV7uglnK+J+TpLVYcdG{m)Xg%e{fes>6jE3We}x3MJ=U&e3#_4dB8Ios!_a#H zjMRjoAhokdyAL2mB@e#8&RI^tIr}puH12L0Y-4p^8T7{LOM&XxmX{e})f{}SYoOG| z%Yr#Bh0QLK!aPK@RimGElNp& zpx{5w{erX^gdLKkw0Iw8$VQXVt_AfR)js1e9I*MVap$Otd@ln7=D!s5fMe);W~C&6 zkeh987D7>EUQ}srBQr(u4ZJkRp=SPB-0aGVFQpT40jW8f`~J2SE8%VSl2>&<-(!|O z`D7?OFiHPUru~7HKH;`SbN+7QMV;XlY)@t96Xl9u?v+o`0vG(8xV}QhL)%(@>E_UU zgriXA8~K1T<2v}0ZB|X%1jjgw*!OUqA4XG|rN?#pPi^RBafg0wr!-z~FK!?q;GgP* z@5YD;!W2f%sC503t!j2B`sP?WC3XQNq{zPP+M|&_*#WqU&}14vq+I|tY{ddmtqtM| zS#lEq`%lrk-2$VXWS=#x(PDK~MMYm01!1;Jl9uBeWq~g(@h|GXw$N{QeN56WASNoR zV|Txrn_sN-vL2zEpamTL!AKl{0-QA#GbYWFuWYYYo$HnaiwzLU8_^0OM+sHlKdb5z zviIdfHHUl<{+hDR_XTu5WyFpW;yG=d!7!9f;`gIZr8RCs>%?1l)Jvdk>fZ3BI~UpX z(gMH-AdjmRVgIC`Zu92x0-E^QB#TM!WxrKMgat>x(#A~1HxfkuchiL1+#ElH^xtlp z%ekS~~tBx-lY@3n5U} z9LFLWR-_I{hckyPuV77=9siqb`l24nxR?0q;L?#R{ zf`DmY!33)RX5YBWC`5X0*LLwM6PPO*b?V)CsbsSDTZ^DL*WPmpA{uXHBmv-Wb6yHs z9ZY`0WhGl6+ca4J#D69+_a&Zx>P` z-=VfY@f>*aY_XUZ?OU=KdB{IKP{^|9#s))vyAYpG%g#&){6J%z-yLHbaWCII<^kwH zW7JI%LQrON0+_5;3NnFgb=Af}eOmG^w!VB`?0bk#6{vxA_~SVP#-m74^>0}F+(V#g zoFPQkSHY#5BT8jg&H!L;`?Zi&+2$0L2eEYo=i3?zh&xI0y~rk$ zhMT>_~uG+K(%Plu$#!1hrR^Z zpoo;C=2EfXKj7Vn1W-dRN~DdytlkZi&i4XUJ5YZN@m)QpMEUZS&YxfdJq+cXdLLHJ zF-3M#jvLd?GWtZ1k7@3tQ<@ozglClO(}VCAy-t@+=&3F9Y{I9wu1>?a#5ufa&W7W) zG}GAvxp0P#lK~Sj5N!`5W|(<4$u~@>;BqzL9lK&5Zf)tICj~b6jdwhlFGT+vP5VOw zSW~w@q(wbk#)EZ6thN=sLF}#vjD{R+)jPr|1Ve0{_6AQr4(!uC#(Hyh3R@!6q%`k2 z^d^J#vV3A=_}H*sIe@XXoj+F*UM=)l(A~mg*^lnSua+1FQnOMnHyKr{0$Wtpc{<62 z@DwiSnX|yg?rfaClk`<6SLWepf8TicG&nrI8RJN4>(Yt6b;%c?X5#L5a8o7INp^$R z7{(}BtZ>&8bRSFL);UY|=?X0R8-gvXD(wYExi#+s0}^2|G>#ztd4qL#O_O}>?r8V* zZ{stWJUi7#FWl-Sc6$-BNfa|m|3Ty-kIpg2?CyBzJDJ}*XE;TwRFeeMd9AV-+9PRi zbwO&bPjRrHu=Fm(l^94{R-{Iwxb~!y;v0`Dg-%lCP0nC~h>DJ!1uL`~Z#o;$abc}6 z&K#`A`9(-!r@79S-#?CIS-t@_)b&kEqHKXt7-3(}PX>mp$6q&PG8&2w74kAdr|T9j z*X&OkVDtuFKFW}H8RVeJ8pS5|@To87Wao-o{G}5#nTL3~3O0B2b%nAy2BO8l6Y&9# z8VzecBXV|c8+Uv7*aT$Fr9U%Fq}urJLRXXVfAut9-8O#OQ9DM40bhl0DT|P4eTge6 z5|Nz8h1I)g3HxSsv+lMYus(9fCOrMDko@}FoizO9yL%4@GdlQq*X~_t8O@tT%)@hC zV$YtqUBCL3I4>*U)JnV<2{h)+pLMU>DV_!0**os>)(;C`u6Ko|A)?l4Hm&0J7hS6y z4KDWPdUn@bWxV5BfQmE}A!tAN?n280j!RN)5v<;Lc})+Z$p*H!gBk$)3!x|SIkx{L z0KZ{c{~@OTGSbPkIek0mHgVHXFZ$c$2oe;jUG(Tj~_i$ke%fV)nu_xkVwqgVYsQ z5kmI5WTFE2&ID!VPQ_6Zca6TbFkejv)^^k}r@O2EJ1RSyFd!pKN&jad1-ib0EWAJ- zNAD|4wj2`Ow{~|ZxAVJ6%Ygqr0?77jn(2_DPMi;o@2DNO#k~MorDH_(93f{Rd>>i+ zK7RSTSJYBY58rxA>Xr+0Ds*wPY_>U7ROyUK(lOxF4=7JtdGRYk3Y3x$C4;Gb#0N_) zKWwqK$!7_>HKJ_VDEAk7p9KM9Rb&4Fn(<4L7G z5^|-&j@RQziLeXrUYGRP`N%c1{3&mUMqON3a`{U~DBYb^+&;ytTERC*e+A-0pyrLf z?7P+ZUxQs@kY{a&1g$F||4iAOzN+Z#zJoq+rS*SfLewPfTuprPtN1C1b>yz=7sbhK z)=?bmg<)ticHstuZA3F-< zxP$q+cL3%~3ys&m2Vdzz7+0IW2FCzd0-(ndMq|+B$@|mRI>+%tRSLTu-=kpEtB-do zFWAdX)}NH-v>nQqyxPQdx`LPX=21|Rl{`A-0&Rw0XpWU`zvp^{(Cc^Fnw5e=a4Cv& zLMQ|mRikVKD+oD*BN*%l-oq8@q+yT3%vAqmP-3ch{dhLf!#m((llK{;k{VdE9B}?Z zYzH>0fl{#4@oUf?n^QaPbcmmyL-=IP6k4h0!1WP;qaEsIF(^XboR4VuqcMA|Jwy^a zie7$-B&oLj8vn8^hp66eG}1##)@iRE59rHNAMi@QQDue)WX*c`H=_ZE5+(4ofbMJF zm}dJ!Slutamd#ZF@)j0}=WiMKegUVCXYZ#Oy4z?%g6y7Eutdnz^z(-Q0BSLTl^@i0 z83&dGm!7}FJ<%hJ(|}Eo$TnWdpPR?Fe8>3EH)rAm%cI`%ajJTR5BL)Dqq{YG^}h58 zh~jZH)0l7wDnX*>$3gm)PY2ei(v}cGQ{IC++8P_Y+*k{J5PpE*W|ShbLJ1!p@uG8-Vi*3u&%=58WRG zEaV(a#}edUcN;GB;bHl?U7!M50|!t)IZm5XYVE8%>N%|VKJquzPa3s({t}~3X_J;2 ztPx%97Z(1YWl zhrKu5K)aIlei>D)!KI74i*+!Ppt-DQ)Ao|K%W2oc9|uwZg6A zm;Qb2Kgb-YM`8cj6;PvEFyTC@M?Ud+2wPE?(X1Z0T^oovX8HD7RP1s8JY-5GD_sR>?vCDL# zBk1j%i?t_b%DU{l0Y>Fh_IYPvaS}(iH+o>KwXvu$@O|wT*AF4S_)oh?jC_Mo zWJ~UDcqlS}Ezs(GVW>yJT5~eu=)bkzvhA4St3#BIu)Z0{JiIeD5Ku*2WB$H^CIjkw zo&~qWayAxHu`%!|dSogdG|hZu$Vg>uWK44?sO^MxBba5%)X5r~h5!EWWq%VDR%t^= zfMOkpY%qgJ7#H8E%@-&>P}sB^CizZTI9^#V2B2bbP^h7II~)kV?Hb3W300BqxrI}R z%3kH!-GOYE07639WHce=7A1gx*$2lRX9n60(^fN`0cks}CSjZpAhKMwV5-~dbRkXR zhN`F-#i9?O>}iZdtesiwD~&q~aKjkigmBmIIcB7 zfxX@Y(0epBrY)!6-f}HH;b)BGKvOcb?!=$$y%Lm;67h~ZfxHRlk&aNd=ERdFudSa$ z561_ZMPVkrIDqv-UtG4!)Z0Mb3O^H|PfA(0(INmIez(RvQWTG-*!`3NXr1j0k{Lvk zE-7bw?gzoEYFuSA#F$T#9g8>4r~P9if8PVTM+(H5vH5F?O;QXhA{By zNky6|PtrfCm_VsI8xO^J?W)sO{Z{67()40tM%8hBE^%(3Vhjnoz=7G@II)(7stwXn6&k$f3Ht-Va*Ht|w6vNFpg=Vf|wXf=da*|ed z_a_~;6y>6NE{{=!qv2cQX;DF(=Di#Awzmj3ZCG=iE4$GGR4(_{4FBfS*2-+R5$J$B zs_~6smQW|hlet4ymNoV5l&6IZ|l=$QbwmL}O= zwr@|YOqYPlcg>S}_SZd5|2qm%cThxAlGgczE1+fwV6=r!ke{Q?> zwxMD=H9nH$@hO(Zq)#zxv6b86D3!E1o$0*MvE+0lb<|3Ug(kw~F_4^R|X$Oz- z?n*ro`1#n0tWR&Aot1GDLgjndbyAnZLO74lnq1pqwZDj<8su@vN^6FjA}@U<3wY2v zU9hQtW;|Rg5ZPnUgx-+G8^4h@G+gg5xfw~*!>QS$JH5>`1-PQV^_}l^m``w~hK(CQF6xg{zbP}F90(5HqG_P+|SU)&?wq9@9L++lrwMz9aZ^r*1^B|HYXTxtNR!C``EjJ6#q0) zHsZlYlSKaxKB?QmS72ce48B@N+tPxtM}$v+Sj2ar+%uG%moZWB(qs7KPS)X*6o{?R ze!U_47WJOrQ^|IzKd!jN)Q|qu7H^_+6PQ@I|D&R0{#QlW6|}!M7uNlLj@tU%IB5xM zG#vK_>)^OEwGreiF^#nI0art=P?Zuybf9K#>g3neuCc*DX7Yuy`3%W4?&EA)Z)>gY z%>RtNN(Dh_wsX-ZD}LG=SOvoRXX0B1mwYmKb$Ulp_S22mRsvglF4C$7U`u5FoY-qG z3~-RYfOB^NPZLZtZ3I600VFB4i|{|ZXGoRm%`992Gpeeh@9Q>kIK)h(MBCrMxId0- z5GES~carK-aytmVolcnBl(rAKP9R7UI05KdzvwEBdk=VOoE$i!JvOTDQfK}aDObY5 z*?Z`q;Y{ubM{;Q`DvM@-*>(-yI zEuw@{1`dnZ20IegH*2rs2su{F52-E9(B62|8I<_6T;1!*PigZxKJ4O3y#7YNk;J_r zc=OU;QFu5x{Q{rWTX!p^_9aFxYxL1jMv^T7Kjh||-guO6*abfzP03w_`vX__zrB-U zzC^v0!Cg=P@0E6*R-m{omxG<}b>OcTMW2Ai5FichstUhvmhUCstH#%2;8tZ3If5X$ zE8Q;&RIj~aievAQKMWAqEBp1>qZ>S0<#g46JoVm#E>1%Sh&|YU@$TI8nza-Ht=VUL zBsccKb4@abuK+?e#l{d^hC$bpX70;(z&V!f3Y1SDaBU5I=dx9%Q%2oN$qYY6jgOIS zTE=)u&6xth(K;uZFm0bqH!V)Kbv^~crWCXS!GrmjAXezWon)O|R6k#LO=V2RC|1>2vOH;er&E?wR5Eb93gGyQk2 zwa)5{{=$91AHW5a4fiH^@iLWcO_m~n)2`1c(rEjyzTB*B)XMaIU~1Qdq{jybojlKx z^STd+9G43^LQQNi&m`G1oBE3Nm?jKMT=RP`FrnY6907gUy3$z5y4Os!2AL^jttlt< z9sK*I+#YNBdkW7_P=*L(@G;i7G{>{&!^UQ+CDE2A+DM;ey_ja1AadN49oJwSrlXEQKIun3u(H;hf>tpB3b)kh9S zJ%w@ddUac!HSLysYa(#)XVV+mN>~^V6Rup>TEhX3@(Ck>qhtic?JAAK%OPQ*!UE4Y z4v?VVZJ56aciE_od?gou?6y!=eo}GbTUfYGOIx)%eS%ennR2XksrxSLJm}&LGkEX{ z>OaxObA6Hj%8M5_YGH-$9~p!G@7HWbtpuT- zH_MQ`YfL@C5}5^fCm46Kp~9NUVWptV`Xv1}^_q!>r_?IPlhc*GQp9bVGf;WZ zso<|qdg|hbJp_Io#Vdl+=)_$f_fV;U(nHjz4-4JLPK}Q=?hNcUVZTDlNe_+F)_J(N zK7U#t@)~eD-{=1EZZLj*?2gTMXo#k{bf*;WgbRY%H{c8h+jBp?fFwfU<07PSZG&$R z?_65}x9OFQF6hHRV#;vwRlzcaqAG;rvsD*saR_h*iB3kI9Hr>dcOJx+r#QY&8R zxEebSVSfOM+a%TS`S9X&JWR4sQ1&ZWWYQ9$!z|^D62$mIabYWDS9Pa)b*%VabN!{` zZ$f=M4~yofbvb8S9>QfnwOA_TWHLW^@|nu8_o7@oYCEfpvG0u+dovQx&z|>tpH-nc zx3pCycC0jEk_`^Y+b8l7mlnsB;A^6T$N`#A87acPu((8U#x zs)!vEMd)w$=?f}W3|hWszF57SN#LSvJHwa#hkib*G{#<>H5 znF^4Z^UhA6A!UGR2l$ff+f>p2dANL(JvZ7AHEgHAruVVi<<&QL zd*ak2=dnqAhdlHby1Y+1KV-%xR%>aBx%bOq<|2vM@Y)@Hq+SZHw-N>tZc93x+VC4PPa&s#1p`@C-S5XTzF!7XP8f#>x%2=aO#d{>z(7Dg zx^>$k8CJSV(L@DC1b`3-d4rgYldWzOooe zTlPp?`8?DJtz>3iOWl*f3}yfe$00U%3dJyUpwRY?j+Ty%ydf1N8doqzYQGlMabn__m$ zf(L)+6vO?$b1HQ?{B7oi9<9EU$*Y-dfd+fZ3%BuBWjp1TND{!RtY+YYbh=JJ?JEQQ zJP@(PE^_BIM}jN5c@z0MY6gX<-q>FQ_<<33qHBvMw@LAe*w?{Qtf~VO;F8Ke^;ydZ zof$7NG|+TPtos%?csa{-+tS8>9zy9~FElPLl7ReLq3hlcebRit0DIYbvnP?R%BpdW zovf!f@vJe6;UW7jR$oppDk=!gSHOWnmzYtbL4>Y|M7S~@K)R{sK03rXj zX^L|aDO&=!8=lFzcEum~-tA)_+}~L1*{>Yyr`0XF_9RZbyT0Zw#rdGIB%4SDYc}C> zO3qtjsjEW&8%v6+ycxsaS_iCK;7?)Yv?fKWRKIU}guOQ7TlTLn8iQ^mzNMuf5IRPj zaPJqehW9_iGFXgp`&5YCHU z{@nR@q74e&-UZ!ex5xPRwcA{Mc`u+91hH4IExpVs-VO0q6?|OtE!)-NfpJppJ=nUi5(*cnZ!NvNwXq=Yx=AH0 zAlSO^aPXQ^T9D1h-=hhh_qhz<0v%*en#S9B$2i z8(+ns&QEJyxBO!B-cOq{U9Ye1pt53q)`mZY^R5}Eo>Y{^hn!;$o+^u>wmZJ8i!?3b4Co#Shuk zyc-FoAMIxvTSg`O@=*aoL5J@&jGAhD_{-YIw%58qKq-27Ascj-)4@;UeR+hmz&>Up zU%cr9Olw?M=JEzRCmLUk*%#Un5w&|yKvlBISR2@kiA~fahF2w2ENyQ1!iuq#D_LG# z-_wS!ggWl(zt4n66|`tD@$j|D)%!z*uRo{n>UI6GPqy5a-9yi9v53 z*aDL(q&)~^6^A)9fg=X9q{CJnj|oF#629KGmm?rxw}z!>d%|tVNxZU^2mm zTo_3|+qj8cSU?r{V(MuEiOK%c^L=%pK{o)S4iONU)49E${-rE^8t^M#mje9e9F%du z31xwp@Oyhm&;@)gos?DT%SL1Wimc<@BQt-pg@ZtQ#b3yF=sF?u!PSok9adpa zU(vn##LhZ9RtpJ^vlsMiRD&x1nE%=m;ROUbZ>pZr{pA^;`Vyy}tB;>rW{G%HbdkDp z7=B;ERV&mt6)8JM&`vF<()}-&^Ik|k$M!nE4BdV-_SI|3-b)bB;)## zBwU)S348qu$!|A80TVQtq*4;leex59T3*X!^18@s5jv8j;%({n# zrEj{1_&g`5dxy%onbO_J`cT0G!^i7QMtlscgKA^AolN;&n_x{suxPHn>Zl0|06PH_ zP`;k9W1h(yY}ed>i8Jz6pm$d-l!Yev-`4;#t%;QzPzyFgLgWsNIKEn?;AOE#0|ENm zIycS?!fMC*FQR45N46uMiT{3Q^wg317Y5LK)XJVu;edft;^QsaoNk(mldo_cT9U5` z@V={gD;!8acYS|b#dCOQoITj>HR;`qMgGZqhxEMWE{L8;0#4*o6ZKXff1TpfRA@(F zU&x`l{e7lSi=~bg;PVJywpNS(jyqu0`8)2=iV&Huf(t4j+L1;~;%VSYC(7#H_tkO! zhC85GMGU3>_2UB2b!F7%aZiz1mpN3hq~a;rRKijYadOV~=vBXwOU1&9!FW?x{C}uu+{E*<(95| zrKjFKGamW;_jd^t=(b>3_PBcvI#{V&)|Z;rB^GrL*)%nslgVR9qoc$0oD@WZQ(N6f z)gnot2EkpSn^h#0B-_66LqW*A(3tL_n2&Cs%nZKW(q>q<*b>mTXHV)wJq@ln(@6qp zFUPCO*k#Vy-5vJcWuH}&>&Yx4U+BDk8gN?$V1!;w7Bk<;!2p=PrUAR z@cEUx>~$Wl<90v7OAII?+x$UD@;HsPp#ZH+*wbq>si+zn&jNHvEez2p;_W;$9G#ok z;vPOhfxVvqvO)=mlKu$DZ4_Ju4Mb(JZy$0~m4M5@G^^9 zFb8;IJ9lSibtX?{*8}UWf$JtC;OoN@pl<4s-Pwz3I$_shBRQ+Q3yRYCM=Rbjw|ht;{=Y%xbg-8kpdpE(~?w(YCoM_b|HvqIP?Jd($ zI$syxJfyIzF@346LbK&2jVx>dysmuGTK&EQjEjJ1d!K+w>CAV=mT+P?;g{=(T+G%r zUp>V1J+t8<-eEK30m6#7K@3zWdZO=iS1aG!497)`@5=DlMD$7hD{w8fwviRX|Vo18m@2uaBxVcPp<~1&t7w{&=7c^wJ`rd zorL90%UU)%J$Gz|y@z^4HyGEoxZb&~tUvG!?O<7aYu))vn&zoz&EZM}P;4-RM>lyJ z>h0jJY{(*$nbt@ruEYux#H>L`+<7S=BcUju2@w{)cWeIytwg~wIHs^ir?b@L^z6oiyMTZ;B>DhzD%@k zX(NF=AR%n*KWnjFsfP}P(QK-ZKZA)0A-Z~;N{t1NDikU_S0sS7Mug9-6Zh56`O(%3 z#Ln|gWx>QYisEG=4ICp_SWosE7fTk4g~@G7!~SK0%T@or|BsF90hbM|U+P0QwR7Ib z{L0zmqTw$3Okcd^7j4L$3*Xu$W!(~PoGTCXKO0fA7Cm$iuYa3X4UTdr)n8ee*M>X+ zt)#~1=xcZkxqj@K`a=Wjo<-+Nt^_oQpDrGKad_^S_P;5_*~e}eM_@_+EVBer(*p$N~w4i(+{laM0n6S^u;rd zcJ-NMT~*MGGfTs3paqi5qwRJCVpHF$F+dQv#ZY^l{Ih)=j1_#J-6UKfa@lHXpc_cw z3TVnsu|W>W7H!paNE_TYa~7FD9CEAF_#cjnZJ9hcd~rN^8zc7nQA}69`#F~8gZIzR zW)&Si0IOGT+`Rd>jS=}LW4nlO0({oIh=5$SHi6&`0Tgw$C&|zjgZ=q0eR6<$QFb2H1iUB7I)`2HE+Af`S`vZBH3&WWhI+5W6fO5l-?*C!wBAo4+zn!y?vu7Nr z5}Y*NGd&-FfCZe#$Vyxliaz7i>f|vC@*Q2$W*IQf5Oh~tyUXjc?N&}njH7ET5wgu^ z?xYHOl9rApPK?l1wSK?;BbVQ1mTHaPj&4hurhSWq<<)&sP|BNH;E}#duJ>-Sh@AH$ zZJq>uQYTec=yOkB1RgW7>1~VyJ>aw-Fw@lhP)9%WYCy@{!?5t7fTl}6?d@_VcAG3a z6xG0^xLWx6nWQo+^5*I9TDT%`Z^Hz&HE{`mO+*V8Ppy7l7^n@(IVtKu`Jd=cgB> zQTva+Qw3~t^#8HRmA-73jknPwJUQQ;$!f+E^Or zM-$kY(LZAztkLl5J?$M2tQ`gd8l$H4I_bo-vr@^U0m$wlv~qF6GvS~0LL+}M@0LWF zVx!mCEg`u-T+7uvt&0FNU_|xg}V=Q4F|isgh5uc7N*ovl)F<0arZ|IG=84ID;s? z)a-p*i8be}sLx^u=n4F7k-f*6AP4^~S<%~EuOSA?W$px3ZMd5l(4ZJge;qt*^ZtkQ-Noh!z7Jqk~0Y$5qVQ*pz)E!jbHIJI-C zA^==MYCW1+%hzMko&L$~v-aG>YXaA6a5hxHynAiZm8sV4a_V@`B}kW2pC;~PSG%8X zXZoS7=k!R6Z_hc8Kgu?zAHVdF0(L+6Q7Is=f|t> zveMA+H@%tdm70e=%?E_v?s!)(97F{9XL-K9cnZ?}lJ4xY=2J8|h9mF9?jvAJU_4eD ze3*W5C})_WrZBfWSK;aeo4nWb%S;$o3t~A%$Ki*$P0za@gmWEEFQvRblrJBDR7k?> z%nRP&R$YQ_%+D$BME9ybkJH3Cj0Mih$==5ad6d%AbXd)d^_HZ?hm#+#P^IAKz^gL^ zvaaA3`~FXKA}1AVeJv0Fd`ymQRrw%>%A; z|2d#^fh;LW$UC+Hv0ddYOmmSq;U5$ghpXg@A3(_x#{8&21Cd|hN9GeKF`Z3Yj^L5}DR z0Ydqiv``fG^!==L=WIdq)@Q&~uOv?duTJOddIwOkX};f1-1EJ(cI8lh8neRKe9-(& zH>Dt5m^d4l0W7Alghr)UvcC~w^9~lN#FKq>s1pC!M-sf&1iSwJ5b-?yuQrkM5kSG$ zFBWNLmQI|2whT9t$}4BZeuP>@eq3^0dZ__IGf8xN{Xs|!(aik>va{JcVc0OE2dLPF zFqZ?JpFYRP0Ex5b8CAbsZEx3E*65JhvySp^J z#olM%eeZccn=xQ8x>tX*s=k_4Bl?@o&YIq9-S1s@G{j(EGMvv}Y4JnoIy+8A?f%rA z622=;o6K6V2q9m*wp4!$#DKU=x;toPs5qwMyfcGKe(#_aFzUa!P0gx;)LwmR_e7tY zXDE&fd@cW(Xh2%#?Q6?#_dW9ecda_~7J7F*v#nw74d!)r!>$N6KXCXy09n5y5_N(X zUZ|R3-y+c9q)%7lkPXSt%@rG(@~K{MojF_nY4%gugL<)7MkF19x5re=T)0^`_BuQ5 z9CISB+LH;|t<5-75cwW>*Wc!pvLITK_4}h{rN!ez%vQQwL z3S3ZvClDNCC7Qc0F%DGZzs#!GD)6EufW#F9#I8JDS3rP7e|h10(Yys;D{ihbI(j|r z85g^5MYjDrqHs{rJ?CkYlraf5bRpf~EhErYW3zL-Eaz#~;v3K9q{|vHN3eE)gt3Fo zF_PboattUPU;miEx9pA(2bkKu=T}SX53bW3sj-9$!M!~7(wb}Y1~%{T=+`pu9#?Ok z<^I5IOa2Mbz1>EX>Jca=Jh*5sio;OA4R>}bF@>h@Y~Pu7Um3`WoX5iMB0OYi9h!)gDtx^Pz#DfOKZcD88}PN_!7I);=C4V|2RibI^KGFC5WLVn z=d{swN{*)I6WBS~{kqUJZkk~}?PH91aljVi;hnocNy z@IQna#K@Zr-zobwyJ6Amgz&FF-;cQjZ@UdDs!rJNY9gjJvzn;=3*s=V#z%lV9_8A?;AfB zBb4xNpic73#rlzZ9BX0PFS4{qGJGJo)|80=(A1<)#vSb(tmI*ctK=7u1_jRVmSAeT z00>9x&zR|+zl3cl?D!Ie^mnBn0Oc|*U;&U=1=-mB7uUTcvbc@BYHA1pwiemKIzv-j z5)HSif05SmBj83NkJ%8H)mbJ`u$nm8FDrc$NN?XirwC|x%~sXDk@phC-h20+5eybP zi%T0C7C=QM_PVoX@s0`XSRc$`l^Q@@yBzf(ZgfujE(2aKwqKUoGfWtg2-LP8`k}OJ zU`KO|QP@+vH|rm0bAzNxBl&HJ7Pspx;O%RGP#=6VI6V9JNdS7gO>Y0d<+Aceb!#*Z zaUnx$&BJZi%ycWz2iT*Ia?>CyHpDm9iYPWI;F!y6`=GvX*w?z)a8PH{PW%ShSfd;U zn|eH?C$xBrz^*XC@ES0`)OH;DsLNH{`5|-;)G#j60Et`?S%C{4+?~Kw*{xGNZ}I3( z=Bb==$>yA}4ga95lX%g@RCu!Rd=(c#R^)i*BS1TJ&D{FQvMq>1 zo5tfqk9V37JOPawks~ane)}~uU3bLf{ha#x1K-*AzuNA8$?)@PKIc*en)2^AU@bR& zsww~y)gIe{Ys4DJTK3^=nDqQHtxZ&T9ZkM0&{8I|d zD=e)OKzC{ZJy*}wH$QJM_+=0vpjB8cSO(8|hOL0Y*R0_fNiW-cg5liswXAny$FJkEA#sbDy-_ z?+49J*EgN)t(IGVCZprHI^2IL?7P!hH(OT-!fB zc51afLn|b!t- zr>d*avVHR__uH&A&wE9~?{xMS>7bo99O#)|+rf#x%l7Tc&%r=*G9hBlD2Zzcl7qIx zYT4xFhK}U(=p|)ss&NdK&dRNh%S;Y0-1^5JTdf6hL@Z`A1Zr=s+pov~+MNAf`lSk0 zha&O;tGcwllfRhUt~G*rUH%{wg7yBl^`t8fU|Y^^3`n(q#KWwr{xfO1xGlRE;({CJ zUn}dmpZ)3mrOd;AtgJT130e}AVI=y!VptQWDr)5Ch^$8GJ+R!({m@W^Z;i-YfAEbEWvpLT(8W|Hu3n}puXJ%o+7Idq z)2@|f$MljAo`%tkI|NBzbH7sI-a3Uoel>p>vku3KOmLL+rU}H)MbK=i!Ep{GhYi+Q z4O6rw*eP22KBx=|(q+b>epsEs*D6rDj^3D6v%6yj5tEY^#6B3b9K-DOQZdJk8ouXPxUz!8Z2F5)`k2cSG~V4PkCOj=9!1g&6U2-;`(~?FX%?D zUn-8(IcGjGjefE7J<&#!e?`G+lh#2h*yE<#_T$Guk+!#Ep zEW|cC8^YjC4ijvv&0@ofTShJ&lr9k=v-nrzLH%6T4B1&g-0wgKm=-z!zW zIzz42(>LGmTd4$iqv8{LESUPvhE2c)qj-&nIw%=DD%LNdmwAo(cRI>?=r{N}H0*2N zysJ+g{zV_}oattCWOh($;&(oH#g;0sRYx#iu^R!rXd1QgucK(W2;cAUbA#T-cA0_N zDv!lCEHG^kO{WudJ?K8E`gi{7y1=#<^L1hGa{fD~|5GxN=?y!!iI;411m5gWAakXV z)m9%upfrrXr`YvjG$E|f>BzXseq_^%?aHeOqYPdrSILz#To+NynDy;o zCXhW=08)Ia{SJ)g`6nKppjeT@Chp5O2B%lB3ReQNgi|vTIEip_&V{)&5iAS~f-ei25rlmh~lyF zK^$v9@0+y9d=f0!+gi0>3#?8_Jmx%UVH73aT|V2B4gN)Z7Q!f+l04&?X-m~3sD*fyv+rAi@4hLfleW{8yxtvCUkI(26$x}9Q6^KSFYU~0+*)R=2#zus9QaDlaaN3* z6STF1^sACF8eQKXZ)@6eq6oz-p;g-T|D<26_LsumP&o}%IBkB0P=`X9le-W@W5pLs zq{J)=DsyGq0TYm`J5**e5-dEr8*%%%)0K7NrmA-HxDzbx{j{@2kFAA~Kj+Ys-z$IG|%837giH6^bikiG83oo!~iE z^eUBp+KGgaseblQ@N%dug!s-i9f zh4>T5>3<`;t6SOS${YMLEu2<1;FX)R9RRj{?0Z;Ye=Fm@2Q3+K5?>+mYS4tL%_*>^poanXmA|(!YvE^#7ydHQO9SKFZ5p;TOqSJu$-;VtMi@DDuhIt`e zR^8b;{6psdDY{SrhG=rS=FmIcG8=9h`p;;5joN*&j5${gy2Ql--kIX zXPASClVw#IXJx^y5W!BjqVv3XpVC7y{zl=ZwABg$Ca>egaOgUkPROb#j7#@xSV}f4 zP1FP*E%nSruZ}*GG|Z15%qezaj>7iU6LTbRcD3HdsCKrBT3QtjEsPRFUOLHA^B z9A<=3;kfwgY30hExrTX+D&Yo7Tcm`FEn)A>2khiE@Ey4gMsT|N_8yt-B1!v*VbAzGMOG z>~Q15vkyQ1NdLZEhbwyO-L^KnMPtFfB74n{VbT)y=q)63leJ)dMF1C(KOTIn%j`HP zudHVntj1d|VFcb%LwpeE&GR65_2W=%@Oi{YxWi{s!Kf)qm|W1asR}E|07B>GamFRxa}eh zv3?w`N;u5F3A5Jn$Qr)gnnGZXDd!C#x(x)KN!K#qDH5&%RSwW8mynAm(Bcg&Gbfcf z%N$L}|6CcTqODuUnG_9Qk+&%j%;wR(EVL{BR6{DeXI+6SK-*lEtY1_HAX;EHuXl8e z-Jw|nXFTTwLdV;$x8>wDCR@zGyClYOI%N-y?7-k`Wr#~{+(8#X2H<6^gC$RagGdYF z@2>7*<+TbFZ;r#~6fpcG096uJA;(bk_dF(R2605;S%v&W`e{T774`{YEfLqEUpAgqZN z%ddESQ;n8cX!?}gS}1jKXg_`xBr@a^=q*{{KSv^#cRBAN{ouHOzK;eR4(5;2X9f2^ z(D&uRYyxawrn~az3bAE!bG8Dyc=8wW0@3u%}pXiTd6OPyRlH)QHwhnu@B{&xtOQ*+~uBol6yDp zfaczv(LSqAs5CrI^68$@~=zjQoF zve&?bHNCGJoPQ~5RrdpINBO;(yzc2uZh%Bj%qdX31AsRV@{mH23)5flW^_c|@!~&ta~HP)(q@mU zDGt)I9`j^eZO(M#cpH?LFUlq5Zwz)TL*LS(r5ru^_qy*DqmQb1DX2~Ize8bqlc87| z*`!ehj0`(U@o*+$f2wPa$0lwIl1)I_mRt=Ty~qIz)@PZ2!Onh}-8JD@$Kmh1J+p6I zPJh0aUH+C+7h|U5Xh0C(G9c{oHMCz4#V>z{Z6Nv=06)t&FYHsHYM>I9Go=^Gy>JB+ z-xAyHi$LE>*4fh-yk391hVS#FA0so=8iEb&Aol^?7X6F*m$-y_thl5JD8r%;c+=3~ zjp%~B@1onfD1~n=no`%M*4rm$^KS938e-QO1Dnlc6sr_)e)TdHpGQT|Xp05b8a9TM z%hxk+OIZws7CaL+I>$EuAQ%muawUm)qm!nlsp5*5yYE}ZO%XZR_*MFfDw+IfHc8>m zk?V#VI*%FkfFT`L;6fkH>EpGUS5#~C_#@dB&?k6sylqkP23&LLX6IZS*etKpv9~ya zcEAD6X7cMn*x7+!#SiJQfs{D!v-drY5fot>{}v!bs}!VU3&0bYN&y%)NUyz)Sxc-q zV`fj`spz0QQhVh&WmROuUX09B!ep=?In1>xA8Fw>El{EF&E?^RH;8tKf#%qqvh58% zb5JM}J$zZiD#WODmvi#4*Xoz5A;`w3-ffMsb(ZL|5cBH$NaA~5!5rV|p)sCID!>AJe$a0EQ&-T2_eEGv00yYX~7I5nMSh!XL1>@Rj#b#eoHsfBDvQwhh~5&gDI=f_O&iSS#8I zCAaH>&WQw6M$2m)wb*Mm^UiYy@$!iG=wbZtzGgD0Nv z>y-;0pGf#ZCT<=7_!(+J6(ndwNl6ZkkiQqqy`~ zkcp8Aedhkhj29fXR{muQnf%z`(!zPG6~)55%T2SFtnN3 zR`iCO;d44@YUEWhT;a+Tas}z@7g_p1iH?o3;*qRQVQc0Ssr;0hj?iDTE9PZ(L3Ab|XVjE|a0PC3I=S7psYf4{kxjiM8AqmBNB5=T>cU~E{no$mBd+wh|3MMW_G74hgknz60yRhU z-l8q&*g(#k_#=ZVL1*UKPK=gDVhL)}xjkAsiEcKckF*+H zC=dMv5L3Y?cT#q^5xd_^wkZp#gSq>ygGc0Ne- z2Av~W1rqTmnfKS-s%_obYZ;|i%gqHd+6wdLSHO@upr~Rl-l?UH{))5SZ>Qh&8sYCb z#r#KayB~(T7_NyZ%c$`lE<7<1Z*R}K0l%g32|yN(@9tgy}=J&9jS>LuSu0%4^$I6P#IcI~DHgfe>v*xXIUZu};}*oJ>WAbLGk`bn*aY00 zTD-%h6$_yuWM!bZ0ZDJKRwj1&SRzD93T#k(Iie&jdZLt9vZr@+nOC+soS$sXd3icUi9F9Q-TR9m2W7_i>EdB;1TfHG+#n$IE(dJ0qk?A;yA_Hf8|~+6w7v0c#0B za4dmvs2id4r1VB3cDij5}N-N)&h6QpyxW0BGxc+O~2=z?SDB z_8`84u``YqzMsrnKn{gkuIx{>q1S!#X2z~QqQ@v7j_Q&LEQ&SVGcv z;}61s=V7}O?IZTf%2@691VAvS0mvfvP>QdkxJlY3M_8oDU}&JlptvCjA9_v>AgzAb zA=ckI1%%1;k{lv85dg0Y;5RhxpxRq-k=ZL;6vB73PL}B>D&)6H_R8rEUGO`Nz(S0D z8a!e&L%2?ev>4(v22#e=+qY=h;&mv@!`Wx;dAiB#Yo{7UEs?fLV;xKnCa$puR6=9u zn`ScK8it}DlCXu0(|EG!OnRl&1-JzOhHgA>#m7QqCa;=1P+AiyJ|nCp+|lf0D^q5mVim+ue)XW==K|Gn26W!C)Z25w&~tIAycLAK_uP04CSK z4u}DE3?fg15Isp;R`||xF|%~jRd;!&h6!MV>(55;&(kbMH<1xgEXR%q5qrlt_E$jmlsZA!Nj8Mx8Ga-UxF;;c%AM_$Gj*IeHDIWW*5e&u<< zvB)8VJnWle{q>_ZGx7lO2cl_a4F*d!!%R2&7YPnPoOqL1?L4k#OMBn?+BjZ6o4RVF zOm}J9{c5pt0`i_tN8Nva_ID?2Gmf}) z+5DHe)MWCn6f)XoL@ZWEv!)6J2M(N&5D;%=#3?g<96!}|)1a7>#F+wnFJ=Fvqr`^j z<35Y!X-=J=i>dzzWS_B85Bf--1+DY)GW*jc`znEvkx{23_{Lpt73k-6Pty@|O2uZatVd@}N6k_@K+v!7g{2ta1r&a67=FDR0p%ducx`PN z2o@L>&GX9axaSv!PR?}L<47=A z-`6by8z3*cInU$9YB_FTZ&A9F`W`tO`+m{uf4HM%+`cReLo!1+O}tKlJ@{o&Tn8S* zU)4CQ^nPXo4^Lj&m{|#Xd-&YHn*kpYR7UJVCvfpQ%zCmLOn~6oDmKK-ppe5`C7!)b zJHotowJ8R*XQ**2j-(8}T%mDk{a*dr_}T4iB;5ELT(a>7MSX9o{c5BH%VpXQ(H+QQ zT_EOVM(GdoZzt#+LO3;AwL12G=ld*;5I;S9c(+9KIw8-7-{Z3r%zR!}^*;$#LFd7j z9^y~XS>uoDh&c;|F!P+#8R0=piCphqC?n$kP)1dFOH0keutT|Iq%oIF^Y|x*;KPE+ zX75WxHSV76dLwzi8|~tW%uOe`_=qCA62cF3>!M{3uB6brVB~dYz2g>AU_USy1cM1^ z#|IqbfR&jrDVZAjOnIT9yiv<%{;_^~kFtF!N+xzO(q#(L_1Y7WByRP?g{~je8tSs* zr*BZ10dw<`itNi6R?oM6U6)7o!P1+NgnkT8rF6D5gpL{}p>5JL&{rUnAwajyX^(I@ zJV$=o=zms^^Tudu7~+5~da_Dh3D$V(KndbzXf8?l2?}~%Mhfp^_mo&3g9;U)ceD_5 zN+Pru;5N|oDEB!xTWW+%%;X(uxJqvIC9qeWVt^swTisTWC0u2Pz5~K4`|B|oszd-CF>{R-Ee(^~>bP>fs6H@Cx?Zya$NGkki(T?`7*8JJCAej$2w zyUUS8gz62p`WwCNDV6gEKi01yyP)Oa`O7ORMFBPw;m7^UW1XS)?)$4ya++THa9RkV zx=-DH@KK>w9Iz}khW`sb$$;2c44Eiwl;WF~*d1odkeF-6LU{G@v$Na`E>tjQ&h=f? zMh+?MmEj3836xM+wxNFDbJ4^EFN}}*1@-q6oQ1+vA}ws@yb8$ZA&m(3`K*)>_Xy;$ z2T**@12lfh9~M&-xqz8Pj7IIf8sM#|r&O!fSAZy|8F&v6Oq)i=m89rA-veq?s+AFt zU~~jjkNzTlI%Ra-%_=Gh2Pc?)e8E^MT*{*_$R8*LqV%_b|NG=#1>QF>8%;Nf zUOJZiL;Y@qJk1vh2bfaE(l1kLLN>If8<~ZlNK!+YU;@~RDp$G3`!h0b4|Ky?D%J>U zG%PN=VXn?PcB2^6XMu%rE+KCwpw1StnJ_7YaDamXOB|K(P?0b9*>C74G!}FV94v8x zGK?%o)fQ=lB^OG43X~FxT&pQNewTS75Cg%}Qqy6-@FXrVX5iP&dIyXtKO`2|OMYql z@;+8W{PxO zavipY?FQ{PYL;&RG{Zo-Au>r{RZL6ULnAR4ONJ9oFVI*`gni8)cJ+n=D)NK2-N{gmY$Br&=!7 z>V2UikaU^aiKj|MeGRUzcRqPd{fqcK3T934^^1Zo2U!*MWAl~Xfa|E3s!8%9D=tzgL+ghCsV}8Ir^s;aSEVzcA_Ie->QZM^89OXuF+3vwYn+S>H&Fx|N z@fN|wsc?3&@;&KpRPcBh(fqepmSawG>^)GCQ;N40grC&Rf9!EnpIZdIa5@se!9m;k z82#$6ts^Fv1g?ocvj&Khw`nz|s+iCGh1iu>%iJg_leceWxfL^QEKO?(USX{LlCe99 z!GU&bl-K?scH4Lwm>>pD4Rg-o}o^*r3<=}`1q)^6*xT?HJ}V}e_` zG_%D9Fvjum?a~g`w~YDPkB4o4{6xTtbe8>)(@58oE@~Ok+f&qA9&D34bm=}07ly9%RLHjeO*zxKd))+9U(Li0H z*e#DD06Fbpilh;?V7$u69JDcyRyqMz zC*(A9!NaLU-AK4OHk)92L^6xU@b@1A9tG^}5qzO%WtQ!URdXSpd&sP=BWO``M+5-d zjgYtg>E>U;;of6{5jexz#NmdUuQhO~!O}Z=fJPj0Ov7PZOtcm=scPlX^RX$V+E&6V z=G4s&&R&4fPgb;cBxQn0`B?)b0*xVbEpUT`Bv(Wo0t_cY-sY$$^H1q|*xxEUIxhE5 zC{Z{+5D4Mdw4UgpAb1P*=va&7o~Psf|s^H(1I0*R!?6vn9=^BBHMrZ>yze0~pOlncsN5ws?{+(4I&z_*@GZ znF^%0X7`d0L0tRKk0r$Gg#mR&cOZmzJ1&QEV?dqC1Fh3dV0r%`WImBYT;V*=Fv|_|3q(_*`lAR{XQo z4^d;X(eCt(RMGC_jbKXrgXhN$3uC23Ix03PmJuRqux9zg&}cV`XCL>j-tZ&)Y^+9o zkT77(&&tYtlCZBj2io==L^|tfkhdZ~jAG>-IfxT0T=IsDEP_;I|5xm9AQPjvsuB)k z@P9YShCxFY4Y2ZdB&8rgNK^)CEzcDU48Yss4~|V3eZXnJ-lK>!hFeWaIT#Jp zU#2{0E$IsE_^dT9v3R4>i>@hL`__0WO%;`f_l_33hE`g~%^&^B2{y*Czn3YeClWhr z)-%)>ybpBuHgZc zn(huK(>MZgtiHo z3569<{Gvj2CVc-9jTyQRwf@ae00BE}0dKut#_*#i|EiHdurX-(E~Um)ZHCq3nnkmyHJEX$fR0-<6m>(**COcz;*q`XCdw+VM|)7qEO$Q z0_^>_dSofTqkBss$9z=+Cd0*^P;>KM=p!iBEsBgx&Vt~@`{Dz&OE1kwR|iuA$S z^=F@bC^Lax z!A%YUY)RTBw7@*M!Bl=qTo`xXQz{sX373^AG<`deg(c-7a7pXIz#=#AAP z;nii{)ywQWK_kX3t&-w1pO>Y*$5)an)8w>c&Z)}=9w7vd9$(`qwplXCx2eHg}H3e~p) zz<(%xUEna)VgjCF2f);?Nq+7+-zTK$c?`Z>@}@-bc-Dwn)Sdfz1$YCQNlX8InuEN> z@@w-n#XHBQRIiR#Ctwf%y41_j1Uj}5cp*os1gE79F5O`tUJ)sH$nq3m>0k{AJh|Iy zNab1eLIex0L>MHq8N2Vzf{D^)_fy4>`C}g5G#U)V>q1Y>G#!09)3&6!nHR|;CO677 z=EY;;!;S=Z#6Rd73h!98;0Ro0a#NxbE?CbC-dQ$L28)7BxmB<)GbZyta;8o|4X@4? zMb5SVuDKv1X!}U9?r1|KCWF<+u=X^lU(OZAF#+JeQV)Pd4dNabBBj=(Ml56wFKAAL z`-h9Fx{HlA??whobDcBJR;!}@n1&sAPwf%xRt`HojlN&xtL=)qXL8Vngb;T4n@M*}uf>o4}z*9lK;@$u1ZvbxKzua`_@AOGes(w9|`%79?R87`s z|6JkLp(*tV19n(`fGhKhQux;gx(pg*i-r84=!k`0>FH9LrI*2#Hn$%&EvxYh8dUU| zlHEE&D5dFcJDr%tuGh8*Bp;K2+PG~SiSY5*^8|+CRLJ1DhQ#P;V`VzhBe&6xqye@Y z^!^6z``^lXCSMtfBbW&AsA@VdA7eDMy51e}zo4WEd6HL}k2b$^tjI|$$? zG?bP}B9VW6yQr`gpFj*O189)EX=oG9!31`7sD6Tet~1M_7n0#*yqY9k@C;Mjp-A40d--ABn?#c zR^i23TdU@E2m*ub(#!AWp(VIZy7TWp!k)!(g_%tLidMn8JUyvY1&x&{NvTN%mSdUvZlY3 z)qgOA%k@s%WeJ)}?bmr@qe?{87ylQ7r{b0=6wIMblgp+WTHW+nRcQP-bs zY6XV>q6m_(iUG$~3LLK?NQcbKX=|>;=Ear*vi>Wgf)QQ~kvb`CqX5Mc&m5CNsP77md1d4~6JUM3_Rb zKKJg8CDCl~DJojCHTRZq=&Sa8Xj5}E)q515DHFv}!|9NypAe-rApCE<{ zRHK4LX61LCg|BA9rwYf_Fx45LzE{P-%~d-GlRhOo=xuQC)MwG6@P(={ctp18GWnQ#4ftDDHh{DIGuYa75v33lmK6W}$v-yMc*NcB zoEr4W4z+v5ZvlB;>gj4Q8X%OZahcG6fwAxm2tWX4HyNw!f%uMt6a;SIp-e&OEBq~L z2?H$UQgdRs8v!_Wc?fH%+X3ZGf{|yDDefii)velwKEr$BsyDB7f>$xR49nviz!y(o z25My5JO4?p?L+1^)GD58HXE{G;`);eEr{zV%#hRqSYmvweBs3XK3Mcgiwpkn~9pLSZ-Y-}16q$Y{pGNBo&@#e0o=YYcAcF|mb%(oaE#pCxV;#6%^B$L8HL%zSt(E)Bx zZ#+`e)T()R&nYzAxlJPH%KiuWZM!xi58XMg^nZ(_4S-r4$xNH(kwxC6sP`whU-(=~ zxXJY&eLtie{XWObqU4$3(^N^AE$&%Tr{&ctV_T%)|k$*+uju4ODcXI9p`8drSCN%RwUW7=i_v}2>Ta5;| z)SUc2qK2?VQh!?#Vbh(iC3HxesLA~c>*YB0a+zwg=%}wMa2B8QV!k*fE;hW}7|>gs z3!W3nBY;LqT=7~_EmgZny#)+GZss*jbVXj>KUWyF=mF(5=01jL4_U`m%`NK|HX%5qTp9eZNb% zt>*QD11&Etvv}$ALp?3OZkT4oY9 z6K~nltemFN!}LJN_f}r}_plqGq^SzjX=z@5XGFG~X$}#IHM4S|FvX&Pn6JW?5$Q&L7~T ztO*m8x~yz~{&B!^n8C=fJbz&MmYkcsxlVA0fQzIJ70k&PRwb2dTx0IHFGGQ2fwILhuc}Idd+q1pSx}%STIJcOwJxwP8as zWV(?GMuN#&jneN~c(M43zZ=OD|3+3)kldX!U@k7(-q; z-Z22YW_CJfxLYZ8f58Y{kY^Fj7w^sFkDAoq-0Gde(egv4n8Tf>36Q>6g5{rhe>MS| zX1`l;{QG0qLy_*+%S*%a)x@Q*f4oRu>~F=Tv!=!$5T*rlWq)lHxMK`!b~sne9;MQKEW?hn zV#k7)>pX@0gxC>kquwMr-tkM%vd3a}JJH-}>jzx=6!l!#6?}**X)6<0Wliiw*vx(@k*eqFG&@-Mt8PSmB(SV(=TtRsM56 z8NA&QMfg@Bq9~Y&FWz8@v8bP@u;CgHwMpm}f5AfV4AfuKkQ|Ypb3MdncX2gc!=wH& zhI8M~VYDCTPu;a!%+uRIpL#@|i}OKf{&JJwk29Bq&H+8WPx&(UB3is7hzeeg7={p({GAO+3_mVU95I6jFA|G8S0 z)^YJohPlxUGegWRvo8OI1lI+)>xLGY>Q#1NzIk-BbILdGfu{=i?(fok^e(cym*#p! z>{OY$wIN&KibMH1RM8OemE(g-&H;d4a_kaF*5zGhN2^$09ka`6r;PwBkIdC48~qcibf?urdU+t+nz|C&|DIS~+eue-t;tUDth-HwXo#)8*DpopjQEw?ctm2QGp-A zTB@}=iRr*eymcw*9oO_M1nd^zxi&WoBC3Fq`IY9_V6Ts}O!#Ev8NLx^?BT+hoQ$5_ zd5wUSVoK3)hix8*7rgC^Cs7SG8@&pQPi4(I12{-f#ro3?3Udlu;K=n+wcg!bZ)(A$ z^{bPAvH;!?zKQ({Ou|{34dxislMN^?9em>3C&D9mU23JOC9%z*ycoD^NU5S_9SdP7d}5Uk8)dgY+JvBY_){6<%>u;k4{`Uo0PD*Xy} z7xg*iwCJ+Z-}Pz&mjgT#SA$?s{=RO-weOY#Q8E6Dh0oxOK>`!NAea{oS^Hl{tjSyG z%36KA*UNhURz&=b9EXibSQ~?g1UAR;q;<35>Ny!RKR;}}-HZAiX{bU7tla+}XI}vn zWy7`$A|(ydDJY?YbazUJfYKq&(p?MEA|)*?(%sz+64Kq>we-Sy*7yCs_xt~I{&Qx| z45NsSW9)P9edV24tv|Sp)h;_KYCNzi%{MiR>9C{jngm0{i`Nc=`?x~m%ZFPX`-SujLWfTaqF$$hc9x<6I*FkA z5b1#Vtx{^(!l>TIn2-LGZY>)i-VAvkJeT)vhH2}#zHJMSN;PK*0U4@<7U6+#yvh9M zEenaVZz!3#Cg53S<2|xIHhS@n@=B`S_VN)C#ANS`Of_W=28Sh1DZu4*! zxi{KYP2vaPrpJ3FS3sHFaF=@YgL;y<&!0Qrwcg9sr4aNq-~Np0z?Y^Vwz*#y#uVt8 zbl>rLH@JC+G(ouI1V{|xsZbRg^7>Ld{uz@Bxt-%8jM1hL+tK*OoQ*fjbJs3Z&p>_X z->}G-4%5^L7Bnv@8X{m==5f$C%b-Gzi$|O(!tx7-N?Q`iZ3gxa_!QVL1!i z9r-^QZ)w?Oxtq3x=^||f?N#Bgshn8pGM1Su@wqr4HDL_u(jq^uNu_G&+3InyPt&{Y{+@0qZe0JYFjfqQW7%8)m!>kl^!D$=$ zc1O=Uo5BRSuT%8y!qmvIXS3{k@L5QN?kAX* z{ z=?|8civRO}iH5{)M#FYseT&IOfO$bY?m9?Pj&)EjX>@@%?4yaunS zRoPcId0J(3;XiI2BCxHHR$<5_(z*V7%lxYirQnl@(yv(uwU{qlhpL@28Wu4L8cN~X zN_~4yf712$bNVDc;dBX<)rehk%w!5Igr|YmpoSI#urO$7i#Yd$O)HPd<)lM~-Seu4 zndyTa+$rhMPIMIf1cl07AR^PMn5+g63-F_H(r>RyX#0)(i*l&pzHYrE^Vm(ACxyUu z!EGza&}itpdBby--{^8*zxJ;O&$5$pF_v#+n@K~cg4}n<)?Q^0IwjIfM6A8q3^j=u z9Yp>N1o=Xi1!U~=QKVt21vXca0(&WyAP&pW1e@^QB)^M9pfwyNktCkk7kP+gUSuCU z;v~a+WJd$~`d_o%X)50EXce(VBX4K^8g#So1u2o_^4+oj1P44an)t#2d6xDqFy*wK z`#P6T_hggHk9R=Jv$A@K7i;&b);n;3mGcL0!2rwSthM_q=yDaH)%PTQn9J_T?B14L zHP63w_c8MYijq|DLB$9Jt5P7V4`Bz*J3s6u1&@hKn)Y=&H??a5_LT%V<&S5_nV2VQ zsiBQYRwq>GM*i={7+l`hizfGd=`X($D(9$Q2NpikS>n@V(n@B)AiX|yyK^rCPNl7- z1ZXVBm3RHJ_9+Nq2_I<@bg(qw&4I#(Cnd3X&BtSMosbbP)5O{Ob2xgfE(2u2>BevZ z!-8`WP;sn_h8Q?@p2FI{F&bO1>dA#udrUz$KmWN>5s7*fm-~1ue!qiG?Tzc*TDMoAm9)B@ln6b+!}0MI*kJx zb^C3T9K>^5B>ZcdfIU^o5Szc&Fs=R*=|+t``R2aI1qk=r+@X~A>Y0P|{r$G#CMsQ2 z=^R4yn<3R6$ZTqjzbWY?IJE}~VWsjej0-t18+Yd!5-Gi7xj6Woj@E!Nwh-@xKPwAU zJIb?~_!N@w&7)`#QfW6($5PC)GdZex^G@TV$X4_o90DnL(Hh!vmEN)*QFaWEg_5g$ z9k}tpUcXG!do}On26pugwttj_P)#C-^P4&EMQ*ha`7i9t=quAPC#X}v+;8^00CT9X zoR-r`gB$VlaOT@+o$YuWg(?5}UBYsb;~BFlVD%9sT%N_w*_xd&C7sLkq!sCXjPUHEukDWIX9E0Am$7quzG$1yzTv#I zeTVNyW)YawuY}|y((RkV7V0-1@OW1xhkI2PD5E^-c^_DMdwHr5-(v+3Ze?CPD9)o+ zqfYrpvP9n<6dkuk4j2Ad{EzX5VQBE<1rPaubhZC=JpV}jsNfPaC(7!4S z@C@^NNB$OJjJ12vm-<#VV&jFEH@=n%bpX6!iOfNV5iVs`mP=KcZ_|sOjNWL(v8^8R z3CcL%^i!aOG+COEZ0P zP(83d+mkGU^X~8`(j(BMVZyh(fN`4DIq<8Tb zr<=#D!C6`75Bf{nd4h0CTDESS4(_ihhUZj(73k86_tgv!Y@qoRBaG?8{!4&ZIGLO3 z6&woCeywL(k8Y=3`PklI-tMh?-z&X>}> zH`^6ghch@2p~Dpl-R&EX#ttL2!56u5)#NTZ;)gcwPgPK_Y03l!-WnWUI5#{{IE;Ub z09ubu_B4b2)6SlU_B${)3lIFLdOa~G%3Ph_ikIq;1`~8sK3D@MB>*QSSLIbH`We$P}NWx?%zXL1z(E!{g#=2*>Kd$27MW8^%;pjn@H z+QP)%WHTBOHKNBuyW}ibmtYxtjU*azH*^n76KnaR&t}=uNeUJ~lkh?kgaY&P3@ z2DkjlxsF&OuoEvj>YsbmGd7^t!{5irHWYC)kX_m~$V{*}E$?g)PJ9@^Nq!p7G` zM{Yz;sR?>~^UL;NKovQTNnR$%`g(VO;uKks1phukdotc?t4h_LoxQB?8<*~9DySF; z{eqo2caC!p%~CA`N%Tk7FLLc@xzyg8&G6S>4;XJFpAin4pjX^Pb+cAmO-X?Ymy_TPfOM{nx`X3g8U|DTr z(-}L_R0aKb%AYU{Jibkm1B0P}w=G@#r1q|p=_k_7g)Ky2Q-$XM86u+*|j`as-TkPjX4l?pG8FUZc3dMWCwvZWc zm-W1b&z+>2kz?y^2|&{gwG$SgH3*A26KoFoImVj&IQ;`g-wYIv*DVv9KS8Kz-gn0j zTW)ExIiI@#p^ju=%uOJpM1uycQPo z?&4&$>94&@UPf2b)Msixa^LP1nu6rsVoXrFvQMww zEOww{3615e*uHPzx2(`NArgOYV(;OH`l(&KZw6n~2<}rVm*5^4fDE>UBzPZq7Ad$& zn~zr6h(cy}K~E2(H*CD;=Q~(iADnpthU+88?y?qcdk@;!_M09DijB!PX#fV!*#MUq z7+PQF;1m4L6cZoR&0?`-pTVdWvA45cI}g)2 zZv=JzOehM@Bb!GMZe|nh$0;hHIKvDvrrZu@7o>@ml*|RRo(9ETF`$P4aBZvcS^iNV z013Rr=J9Mj5=;8=z-)E&s)^3pV?4P-Wf^KSbI~x3fk?y2Y!MOk+6$d6lT|Ge5L|nG zf5^UlEzeW`(o8MQAM7f+7ktBE}b-{0DJhIjS_l~++r>mex%%1owS zR;Z+z!btP}tO%{AU(*$IISIq8>*!;otG)xC?Tb1<*P{m~%fysxKL$_SX4>oLT70IQ zt$#{diGls7=4h3WdF6ktYg2BBckq@`>JuGEbFrkD><{ZDHS&da;`=L^VEZ!ruXn^J zMPcJp&|xrodic6WP)li|#y&QH7zcq7YAL{buaod7{?lN|#&*Pgdq{6qeA)fPOG`T2 zlj=r^jvEsMKZyW%eD&L7Gw;2MOT_0a`7SNQM&{U!Z}@2Ig9o@xF!3%U^S_`-dC*)62kRj%My3OA@z#B@DKafwcrBk}BD6`m>r5&_S-&?J@w7 z0Fz`IC}G<#!&#F7-Wy5q{$u)NCD?x?^aSb5wR6-x?&OtAT@}wA{Tm_vDm};?-0(a0|>!02O zW9TaOHcH)TgZ=M~stUuwSOx|WNfQ$EF#(8p04`(Zvpb$|*7G$xmL!x^DQY_Q&N{89 zR)K9(aif%*YkC8Vt)OH#!W66=WmK0kqEW|=nXhaW%TFW)-#Q?TrnFTDSWk!21u+z4 zxq4j!40CyJ5N8#=De^A_CiQ4wBxUbSzY(@%2}#h~4BvN#MK%=Eu#upxA^CdXB+zn-}6)74FHc{;uyQAy%m!-)hs3#N$xTE7R{ zOe}L6Qg^It8Ts7OW;Cbjsua+{Y-2iQMy$Ea**?;AEgfoOAbk>!`Q9y`p{SoBF(BD-Nz6I!N^~dkX|ej zwS1p~d&IY@{wC2~|I7N#dv|XX@nma(7&pE024CB?8KkpEZd1_1NE79Pr@hDVu~AXD zK3M~n5=3;pcg&nqXoJZXbvoT+@ADA6-yT*(#glqnpEk%kXkb0;O3IxNfJNP>%z^0Fxk)ERT99{O=1>`7!CB z^dZVW0rWpbl78`T+gKXQk~Od>BseeN#7Z>XrSK8~0|{%85098vChEfLn|*bHH5H%) ztw8|T-YxtAN7=y~Ojd*iVYb@d4zNY1r+J#rzh0vfIRaKkj)MKn}jEHN2|d z4qA6PPJ^!OY)fHxSbtN;m0-zoV-*+O`TVxX+rX@Bl`f7bNL#aI33T=ZHI0%)qf>o(!&E($sq} zOO|CGVH$~xPNgFywJTu80Y>Aq2v2wfYq|@0v&H3mg}U4E^rI~u$Y^j7P0L00+j5Qg zfdf6Qirk9_^ZsX-^l;S)To6}5`n(M`n6WK%R0vI!XrmRrRy-hG66k?cDvzTzMjwfc zb$fhMihftx%*DvR7Sg94SpoC;X743UjO;dh7Ry$ysvhYo|EV+6K!Sns5PsNu`SFB5;$ay3k?U2Ucx@4?pN9z^WEbABf+4^`tG)m3gs1@kuh z1RbOt-iZqY4acWh#J~o(|DbqRtpQv1X&<}Vk)6l#7zY0Ttpo*3_Bh4`Wc?Zbj+4Ekmbim+uF}^(HYZj?v zRKOAOLa5}aR3`agK)q3G75YZbkwu~BK7}5QLKkT7gz`2_hsq1lK z!IRp1ylV@_9 zi=P|FJo4E^s!Hv7Kfl0Oq(5~K6bF5rs`zF`$eOZlU^ayV)r|46vD_Xe67*|jrXY+k zdV0ds0|`D>8cpoE0bcM`sTX9`Q)tE0KhJ?wjBoy=GN4~>s+KY$3X|RzlR_maL>i{A^ZpI<)KU*KFa)T9%^x)Sao=|i%~GH5 zxbDuZ&4}pkIctbM(7ELO)_0D^veo-Juidxz$@J~R*^U7x7}g=5*{R6Yoauu3fOL5R zfblc!X7A9q`wj(XgK|tY0zNCkuOm1Jj+92xc+VVY z)(Qh!LYzefs28*z3+{o4fkxbd>HvUyiq5g&H2h`x1 zStEwk2Nc7~5Azi9KKwa!9{pVLHU0K^%sW0Lfc9A1@6}b7^#|-Vpu^adR`$`SNYcz4&?1ps&w#WpT2PSB~I{X)zO6`6#4Zv$nKQh~5T z1=Ym?9T0)dw8beCvV~+wV}HXu6{fqcz?h?yMjLuQJBSnm$mRVrKQ1r8;91=xRn&s9 z$89Qpiv@Ep5%)JQN=C#~iJsO@brU25iM6d~H0TTq>c%)FMw>rep3CjqADZ~?@7@bX z1bX}k1QX_v=$ao#g2{IFC+K|}fLG7oFz-3%`@rZk)>@`-`>w!V#Z~4=`I-OnQ^yui zHp(37msm_8@pd|YXqE_)X6yZCK{mN&lSlSA(qX4)@awE^>9qS}_9@3?B=Ckbm2U=& zO{qg)`<%1s?A|YcIT5MjsA=t)U3V4}Lf`PKh&z(Tl_e|!w=K^E+&u^eNQfURDwG+~;W8)S-^`G_q|X#)3eH}QMu|jL!Lg;f zQg>*AUWk|02lxEbRmra-M zfCgR+vou~(n_H#bLy-dm_o=$g)=CBcvpIl>VL;akP9fT(2bQn0hd#_Xq`6XFaex%d z0l?uimcE`M&)4AbdYt=w`*%Fws(keN;5@b?)ONS)8Gt<6PUdrjJV9AO;O}PgFubAg`IB4wOpihC_w4n7(KavNdP8%!e?KX7riIf1g zp#j;PgT!HvZ|&C9LlRk(C@gr;1%mX|)k{2+CU z!rObGE+#nUP4L%xWGgQm`R}x5ieCjWz42yB1+T;ZsUh~Mwxw#hrHG6MYD_|rl`vUm z|2@oS5A+uUSDb+!1<=Wr@jBN|k~Z@DNSMiILRSd3;WwPdF=8?CWdN`8`}iWw&>3aV zqRO%+r7@Opd`59+Q|hDqbBJ_SSvl*bHTJ5AGrLBQ>UtDo04V=Vc5uYwY|HvFZh>{f ztv?v>P`A6b-~p-WzHMn?ba`JyFaEt(eLe&IaXGQ&U~-1*0A9L5@2_&&`xe?|?*^?~ z#P(E_$m0{?CJ`5vvg|P?PhW|>X6F9`?6lzjw@EWsSA_sXwxvPzg?AdDEBjTH-OyX* zAr%v!x0gyviW;;=3;+}KyQ-j4D}iA-Lt%XNA&5Pyy0~~Hsk$5a`>|F^g{!38HF=oND!L7$)XOCCD`QGMy&)xzR=lFkvFAlK9~31R=5Dwy#sfd| z-=W$jV*Uwc1eNdvya1{l85uq@L=^U{ZpHz=2827xqOf=Z*4H;Pp3#iwjo2pJEB>7o z>k-;SNxT7bS`3Fp0T1+*sA79GM*yS5nlXj1Z3;Kb!kw%4u!7!Y?MzK?I>OzGTEfWG z0IWS0bC6;3O89NDrVHkBU4|T+7TyECtA$Z3ytjW4Ts$xVxdG~n2L#rkbBX4Jf4Caa zW4V8jbE02myaYlPuvKnigrRJOV43FUUi~DuCebb|BNvYF-xr@P%0sBtGkq=D$`Wcp z$JvaJ2eJMdw;Ew6vm-g@cHiNN6PCpL6<@h>)O$wrA5OPo@-KCkzgVcT(RCwFO;=jJ1W$+!N-AX(v$>I&t_?Li+Q~_@`>f(!d z9rzI-k~YHtN9+$QL-Uw$vo_DuY>2A|K)Zk6zl5{_73`?jh{na+T^^*V>V=kpW07xb)VxgTpIQv| z_2>u{Y(x8OWWRp`b90w+A#P2(oi_r=EZ>9bip3#fw)trU5LRl*#F%qhO7x-F{-Sb*1xAwPrP3kOAAktPQ8OQ#I-xs&mOp!a7W;wtkP=_g-lC#FE|V`t==s^ z`dnzCMt{%pUTI$_-OZ{~*|*z_)0EnstG3c4oM!>DHs&NgGOU*ODhBw{dY*^Xzj19F z4{ORtg$wSnqc!!tojIn5Q~7&8OCMDl_11vqd&C|SkDxuE$06Ge^60LF{JOE;r-;?& zwm|kwU9;b8H7x&jR|!mFQ@0w_O=$U*!7nTQeuGYVof7EACD?<_y0lIhB!g`LU~U#<|QGS_c>i?-G4zG~BnnbP)sz9ex@VzmpF0_u7_ zFAqX9uvWjkDak8lDl3|4pV_mHWJP!GT69|vUww&tVf^M`O1qO=_`1F9jN+5gQ5lC7^S1?`8eT=LB!dhUyG zls|&S2%cAh6fUHlr2%NYgmxRh{8A_Qdgciy-9-LBhgkqAI%44{DUZZbi)c=>RJtY>hWkt^`3$a59#jd(BFX|8W1ohu5j zkK>B;1~ym2`+Yw<^LfiwB+bv^YDAw7q^~~`{%oCM2OJSHt0g2ccKB}ySHrR)b(d^y zAPlImc-Y&x4y3sZ&?fYV@EUBJifn0>UBw=dHT*zdE9e#zlQ=2&v8p4&DsQp-=xH?6 z!EtXBYgpu$MxiJx`}p|Ez}OGDK zzX0!!K0;c5gl)hmc>7RyzzFrUHpLjl8cZe^!*9b28s!ofGg{Y(dxSdfv+kYA9UZGe zR3S~wOE^gFBRs%8HC=(tSbys_AQxWH6$Qi%nJZ#CG+*VYBm}}xqRRgK z`yhKMKJwFpa5hoU&EwJq!9hS1&IPM>}d_KV`tJfB;B%De6G+O1f%j`EA4 zen5FZhw)PQS8Ey-RWv64Cigxgy}}eeH&wZT&KEu$t!j>#^^w0qCET}}ryagjySfjO zLB^}$3O~r~9#WYi=ZILpRXC;-x@_3*z+*VVbx40nx9l0K9H~kqf&6*l8e#X_SgT?- z4kqAF=Hz;h{IAu_A!7H-b$B+&Qb)G;Ik&=Ot?WX6y(m->^kx0^X;m|07tQo|62tdw zgb{nUh?kRu6WydhTr26)N=L@^59ueuwUPpZ%n|y}oFa$hW0f_dQ?*Tu%P>_ejo$Hz#Nu>)Lqdl-$Odb8~ur)&89r=ZZ^x> z8xZ=0+Z-V2=guw`a*Xp>zE4`Aw`3|nT*n=c^AyJ&hdzLN9aGC&eCw^oqsLVCB2$b# z5=NIT$N{+6GY@~B$5*O&?DG!+Z;k|01=$Za=>J7>`7tA~xcAF7_*?knjxbl6e)QxORwi(oSjvk@a`vetc#-$2xqgY0`cUbZr;8 z-**tUV9(r?b0Eo5;>G7I?l>llEer0xQz8*0h7)BV6THa8LPVLT6qKY~j(y9V;04xv zCY|m!?D!==iZbMddnc)w0dmCHu^&*v?uzV^CE!NWk2_~fPL|>yAaGT`^QHG{5N1N0l^FXzi zbf?8Pm2y}F6G>4JGI7#!)yz_q5&6V{^3p`>;|d+^l@^`_klH*%{Y-d#H=6;{;Sux5 z(UT}``kaU*`t#nRtUXPqbpNYflM<7WGG`4b#d+deNOWXCr+T|)^dozr=7^8iPnffW zBi=6s1UefP&uWq{(WeAl3l5JM+!B=vHnU^lN|53h#0z)moB`9&e&wM3FYL(l?9Y)Z zf#*dt$Zd%w-zL`%%@!nFq};9Gh3H;Z*MD1447Y{Z=};c{uI2m14)ZW|n1L*LUD^?% zovi*>!)1Vik1iEh4KVi_mI02vSeU)K_+w{q|NW&u1p6Vj6(|@W&2W!S#N@J-PyiB1 z08TVYK4M)^%(q4#S|B-&_K zget*;n84nHs8c!Mj01XZ#?gPeO}NM;u+69gd$F}&>hqTNpYf?AFoDpdQiY&_zqE;M z!L5hb@M^k+U`ghRM;f=u5xr}|+yC8O`%{Ep#`pP%i)Q0U>RuVF?Vu>Cnld>kOk<e89*HnpW`S8wAWMLY; z9M@hZA@(5-<){)nKzu!oV!fN|?q;O49+-(fuza_&nqLb^uSP^ zKBZT0qxQ7b)@@K{$i)TGGkg}0{kCrs%K0z764a_UQ=d=hXYjF&DEm;(!!6dz4?&O3 z_`A3m*Sw56?`5Rv+UIsvo^A?m7^S?G9W=A_i5Q=@FH2>n6^(o$Js%OMyK(KglUo?> zuObMP3gF>~Mf8W0cJ@rSa4nokwa_Hao+rxAY%D^-**r zAnH z9NuV_{L~jt@pE|_Raso@1wi^QcMEg}ce_f2QPm|KLo>NGAZ_KEr}6n1bEApuqG2D) zNFmYVNYK3+NGGLU)!V~C`t#a5CMrk13)Yw#CK7R+)}$9K^l7Yiy5p3?a!=Zs%tEKl z{?zA0zBMP^=!sg(d&<8(wLEw{1UK!^FH*7FY`)JGmDyvimtjy!{6_MA;BHyJsT!yO z`CE|Vk^-7(VF*s=U4Xz_v4P2Go=>eZs*xnD2&~%e7<1cE&PlAKClJ2l zR*p2)d)NEH4z55tS>xK4#)^b{FW(MQRE)iX7ySl0f_{)0G97o19qz(sBJ^377*C3s=mX zsbU1t75PSJud`2P?-ejG+ z@TPV(I^Vk6d|A(NRHpL*%3Td_O7S{7kACSW=!>OK{}$Bd=$8>_75pOhjZNxpLZ z9)5c1D7Y3r`^k~Z)PH%#4IVUFv+cQzR;b7qnsraaimedY= zdP2%p3?XK%6gGf^Bg49B@BONwTn~JQP|3)kbt0f7|mOuM@7iy};2r%%x9%a&WaTEEM z%j}J>e}t}6Rp`IWmkv(^c7#!DQM&Oh{yg2@vhc(b#BCU(sUFoS-7?DDW0VjTPB#t90BHgM60^tg^`aoH?p8o|oh43&egmoNp(C+2i&;|it-8!o@Y_w z9Rqz^R87q}K7(}K_G(T{@VPdFBv{jGO9#m0UWl8L?hakKZ!h*Dn=sYQa6TcIi#mUs z(^iF<7E2-m!$s43p}Oz5LRw!)pgoM|pyW}+`L;F}*M9tNuPsC@vIMyq@qa_1%MDzE z&T6T%@nW{U08XxaUI1r$a4;Kx-f?GmJN@irky=L2+Ns~glr#ezd!`tb!qk4xTst$W zXQeh%v)SoEpLek9|7MeT~oyHo@sJNU)7clplJ6>HVp!-l=_D3DhPcq?ZrDd@}> zk^?AY?b-Y$F3@mU$zb17%nGS&DmR@J%ltpu)Y)6?R?=abLAn3`huG41rDooyP%_>- z=~9*ovM19tMEY**OWr=^oDkx?U5Aa`&Dpp2}ce@%fs!j@Kpk%C-g z@4|jj)H<5+q%Fj|HotMwD9V#K&et{{lsJhV2gnFr;_TYZ1+E?3=o6>5ACrS^!ET{H zDs;riEMP=~A3%URUwei_QG|`(h$8>tV)fLZxNEhP=#4Fx)pm!+4>s3BfF~yT;X6`KdZT&*9MCLqyOWIp==#|tFKB^U5iGV-=Lo16RyAxS3L8n0g z@BoCx^zE1gohAYM78}enRSZQQT{Im*>?7!p_x!DRMqyuG+;zL1)ct0|uAICtifE6w z@UI!Wqv;geY){1aAd`BfANNLG)K87Ox5n?8p~Lx>`7X@RUKJ%D4HUCz4-<&JE>%co zPic7k*8R?5z*cTH`^wejfN&|>N3r-LUYb(0m*(Ke6C3I)!i@)(dfdI&e|j}OFA7E- zEj~Tk4TJxLF#NUdxNt>+dIGuofjuHDHJaREmdcQ#^(Rqc!x10;_bZ!}tKr)ys5~3i zrGc-z1R7|_G+VOC#H&*Bzr4-I*c0Pb0dJR}GWaog2oA5?FI`hW^s#OW^SkssdBhQV zdF(f8?qf{kc3#LGJueOnhUyYnmuW4*=}V^5Of6@c73Bc`Rdu3z3zDM zGT4GE=qjs$Z+^t@s^Ehb1DXuYeI?O0m$sO<%j@HluMuX7U9Z-B1U|}IrIL^WS}xE5 zV6Ime>&n!H^p(!z8VRJLCE)M^K_@bvo>pmx>2_@ zW?}4c(>WlA4;S|@7z}IE4~n!2VP_v>Cijh4VC2Jvj>3|?!Sp~3DF)l-uZ#30ju#XbStuQTe1(APiHbfWY#G$p8q`6M4SjG+I1=jjyM1F z68hUGTo1J z8-g-xYw$|YpIsRa!MVB`;P~H6yX*y#8YIPNZwX_GL!}r%^~|Iq6+2Hz@JvanEX;OY zq#z1*S**c_L1#JES`f}ZPb_+edXY{5OT0$!;q8DX;Hn5!PP~hpEa`7QOVmbQ)mbt7#b$svczfk8V;m4gx30KdC60B z45qLM0autWqyeonaD};hC6v2y ze$jmAH&ST|d1;Afp3dFIO;w4AQidprL^<0)zMG zKM9semLU=uN2B)`__w2^u^J>J&x{xten!O=GeGY^G8XYk{M6l(v@y2J0)hs%!FWyS{e1a?;#&T%i#@!>Qd~3tQYkrK>ah z@@TkL6Lyza2U&9>MU*m^SA(iSq(>YbZTfd}8}UjThrPG`DV-8@96}ovGpspG_D?7AT=L?NJ2;~aWrTU*1pV;8iFK&q zCu%F05>T2=Kj!Xzo=jJ1MY=rh*Jr+L)>shl1|_&QE?+>88@qE;HP{P#yN70nEfL%n z**r@_dnlS8l_s5ag~$KZ1#r$2ej6CX7xZ`#+}x(QYD7UjFqF#pG9R z{VL1g?;rmA>;KPBlo6ghgw@d=Q4Y2PD5Z-@_*kI!EMLhvo%hk}YQ$x1K5`(eu4l*d zN29}N_pH;oeuZ9(;GaJHa%1H+Sj%zmH*OT?DMorSdvmsQAz$&;iylU z$oyF42t~-RJmBg{f#nOwoydWySs^#9!QcI)J#Erwh6`-aJm#6i9FuLm$bk)ZM)%%1 zk_I;GzKy3Q)>(RyMqJ9nA#yBSiQgnkxrH`f98Cck29Wpd_#zWs`hknCy9vuj_4pk3 z?bP2DJwWAe5RG0QOPHyuDpf@}ftkadcOi=u=2@Y@YvhIpvGt@<Ul`}@zjO#Ebx$#=C@K>q2H@JMSU8Fn+ zED&_9t?x_{>J@-$z(&Ytw1p$9l2B~3C?zV+&;&{r=f#!Yb^GtW4ieMM-&GF{*R>Pz zK-jqb)0CI4SA;6UR7dZQy>jQzzM9oR+2DVI$ycm!b08z9b)HW`vV^y;dwquKiz#!z z7{5f`qbZ2W-(lM`@>j&0yVYT|!Sr^a@5GZBjqx^weCSDSeAp}P7}dH_5qt)rj8&Dv zjNzZmQJE9nRn@4UTi$&FDhFL{3vFwax&?X`rZg!poOgR`EF}GT=JOvjiYdr5+Vv$e zUOcWJ^RJ`ly6&zz(kR3jeLef+#t$T&tJ8kmDGo^P2 zTaJCE7aeYQZ92V=_opbqP4j1@t*hS~UGG%fbm096+^!bp%9z9mzWneOVRq(>EtYA$ zbj^d3(98#KS~id!-H6wJxy;@3gW$Me;KirPlJ+o-A76but&K{Z$7spC7>{S5Ij4ok zrTX(h9}IU(P?9p1_y#o!?$3wF@>8F)nDn|BMZWxFuSEENef_VW{U5$XNArvHxyU+_ zWbydjHWAt^Ur2d(KmP62(PAYU?*ef4NqTc|(_}5r4iTw`$l~&{0wS-%b|Y?Wby-&@ z#aNossjQpWn`e#KglfKxkEr)DoiO(aR)O@636xC&Xo0SXdx3EN(yszF<5tH+dtY6; zN<`auQ8@#{$Y^Opk`$xu z93L`3UWape5+Q_Zq(eV{w#A~;)*g!EDC?8;yqPa`xQ;=-+01wmo}5Ag+9=wwIt&^x!Ve)&nT%AP|lS_`!!8 zsNo^wt;0* zP)IooUtU_6W-^evcf5Bd3{LVY=p%)4XCG^AjOu^j_rKyDGt1KBN4_a6doj2pFS7rn zNPNZJaa)Ht%lrxK!3UZSz0WIIU*z~rQqBk4OJ!5M5QIM7h;un#Y+IW(2T4xTx-6R9 zMK$YzA-6W*M9txcva}9X4-M3GvqTqV4{F}~JJQFQ1ib8O9wv9@^34vk#f#!vt*Ciu z(nuY@-Z_cugy%9pVUPbADo6Vh(B%UCWIX?Dz1Qv_OJ@3m;x>NYYFewcY`%wqanhmq zM^%f?I+I&(-UsPt4Jr%GpP!Tp?$K{^S)+)ZyXB11K1Y!8efrl&18p1PT!U11$iLp( zk^MFY6B_ZC0#w{(BpLqw>;7+_7}!0H?-MikU$`f<_vXEi#Bcy7%_KB_UAH0PZ0o`* z#`&0m!7ApQi~hejd&{V*`>ktOKtUQrx--%ARY^HP>A0CluM=z zl|>oc`$~lnQJ|*QP$wresW)20(=e%MwJ)D#`=dVxJL}-kt8;Cv0e{=g>&eDN;@yvr z}fuHa-nSZ#>tKz_UG3JR@?oe%z4LaLOGdM4A z&J|=4h8g$(yo9O`aHAcXMQ;1+x*o;})|QIqh<9}oFzL+xAp1D-O18pT?EfHwkZG&z{hn>Y z`)|34_rhs)I2pvLJPv7gFCb*Ks0&mGywz$DpKG#M5I|DOJq1f@cTG2J*S`+Nlx>Wc z*wUY@?bNe9%&{TlU#_b=Ml8IIYeq{AV+B$KX_&t_IJ!I5Xkhsh1Yp*L4m0#Y@mWOU z7Jc{*gni0K<&0K9!EOpm-7vvTr71z9d@nTf_rXKIa&C8~AgLf@M2*q+GYBtQ>y}?p z*(T@K6xnga%>o4_qYULsOM5L*r6~K>JEvLCiFk{(E{KcwKn_cx*%)-vns+&N^)|{Y zqpw+NNn_0|Y?3AMN(|%PP5w#cJ5a--x6B&~lSFwRJ~UptFAoJ5p7ba}DVSqEHc(sZ zs_8IZQh8G0{-3cl*{%pP)F(c(vIqn3bhzn$uvnU8V|4v!a>(sFLvy?)s5e|{?$&`< zZWF%Xwy&-^M+_c|CuvR`|G^v--<**K1sfkMcFymY6A*b>Okf(fVrZZ((t2Xq?%r)X z80&_sT~gHY?1!sU0NBO`uCI%y9(x#QNoV~x;u_9>y5(++}$sV^{A-hh)<)Zpa}^GPc{;nhK-|YSmj{o2Mxs&+BGj+ztZ_bbrYp@TX zYL)U__%eH31y^o&)Se7KGSW1;kkJN2S7a;_6fd9L^Mwgp^03?9#vDCxK*F`f)+0p} zH9?#zxNfG@P_2HMKv57frFsr*e}=haB{v{q$01#cn}U_yCSBRUf94eZn|Sdet^q- z2$}KC+C)6T$GBfx~?iK1&h zl=OJ4rK(6g z@c0;}GHaz467HKNn;v2;gIBtBDa?G^>96jTN;;iVO4_C6b!H*gOZ)*Kh{q0_dO zHQO?M2b@PzqT>}ie)9<~c8LEC5sWw1lzsem?;)8mM95%iy&J5*bWds*V{``@D~doZ zzWUNLNbJA^6K~ou*{0L}Y;`26bOPELl91u4mSS5Aa+5Hv**wfro^}j3>Za)bGph*T z8Wq5`S{m>CpB6Av-Og$Y(Zx%}-|brE5f+W`Auh{bCf=IUbZ`YxL>tB>t(@|2H`P$7lM!Jfhj~l_oXZ)M!W5gSwSe8uJ2os6$CE?XhvVhl!z7x9-I2x|XKA_nYQCNHseOreGynLt`>G4RK z_mynQR-zAN(dRTc)%Z|`GHSRzvEC6}l~@qw(PNvvtqMDvbOU492Ghmw2(HG_D6M+X z4|&)sdcPBE!7IjVVk6fvOpzGQeL>vwKDx>OO#rIfXyDi7lQ(HrcIAj){`OQgyMS1dK* z@z0?bRnUl=To+eN8P8luO}S^%8$Of3xuiFPKeo!4l@SLHA5Nn?h;Q4)M>fV*^UYGA z3$shC9{2n9y&*EIWghYTGqhO>KD}+^6wEq+>$SU9@Nh7?lo_w7vQ(T*rh<$46q zus2;LE4HB#H6)KB%<^eVLxTz%q#u&5Iqk3wA-f6Vb51|Z6#RTJ5a^>Ekbn62+{(Z= zJk=5kS^6l8S*Jc#uPe%~y{qks^|Xfdt-({&RTqJGzY5G9O|Iv9M_MUe*ia|k>&gjd zCro!yq>bfwL(7}@`kq}9zJZrc;jzBuhACQ==GRt%(-6^{YkR-D%UTsGjn3blH%6f5 zU|Ha{XWaI>8D2A#b?;tzv4xL*ABFr?rUB>O5@m@al$^_=s?#f<=#}XO4^nbH+U-(aL z!!;F>e;eLKga{X$l@ExOJ_k0zK!+9dhiG@+mKVE(o{d$e9``{TV>*MOR0OFV{*Ato z($ixHk(~`cdGO1tolO`c@oBzi#g`KYE7Xm#8PC9QkvY<1CqG_p!%Ifxie0B4;i@a z(S%|@hANq{I7{+MgNjIXbn)=Uts3>Qr2Bk^JmyFS_jKr{R~2cCzm|Q(G8NMDPVZhdi3~TzXzaa@W2U~AE2|aipOC-nF-S)TtpgS7lS767P43;`bLIYzZqkQTiFFt{s+o)^ zWa08fGK*}@wkvD4jI&3jmr?MrDeS zkZsIiyNFD(uC0d)I9MtuPkQt1OymkCbn)GRN2NmL2tE1@T0Q%%*(a&Fj` z;UplAf_AIJ-&EZh_n`yS94x+F;yPA$(3&b#akF)_e16v^v*?TkH;@b3a-F@U86{t} zzhX#qW97tdqWQ?IoHR%I@>Nn&ZxKq!+b!kUIt zIJ3VSe1ji}I)j~{#^_vzhfs#T@B6bXlGJj9InLdzJSoqn3#_z6*quRRr@p6ZeIN}| zIGM7OJpQw7vb}ZlIequQC%q}f*|yoS7=plYr+4Fx5jigXOEfDyQdRGKG1bSmX@>%! z?PbWZ`Dr6twC_y)1Jk^y^b&fPu;J2vm*OyFU%38xYQSw;8)1sP!R037I#Fw${p?(b zD*9!-`E!AvZ#(Zt2)mC=RgGfh$tA!^Z1p||`yl}KJ}dmoCTH`j^p?sq`?qcK7*PPu zpwek4-ai$s>SWgOgM(c;`cg1l+D+34l8;V8H>XPD5?K7A>VzuGZzPut29p{K<$DOj znq`+vyTC?dRaxmJE|L*r;Ya4rg9ioz0jlE~9jA?v)Z0%Ky!&{(lecXi3f24uYw5!O z979%w%_#YeqcJKJgn5g@7L0k>ERwf$=xwCKz;IQ3`bYHP7Gn(`@Pi~4=m%4c;`_T? zB~Hcd`;hN14dywaS671)puTATD7}veg*_qOL{SY>Jo@{;kg4u2IC_1#{4rOe39P)v z{E_Lji9uzu=D+TrE0S4Lz9ZCeGxzX=ZE7N;r!yc;9YakyN@Q~2jHUx$2o$I+O1r5D z8~O-CRzd6~!1ux}&E8&s+PIW9y;7qUKk#H5t*cpxM`B2jl~3@fniyQ}&m5{S}v_ z)2G@H^T2%muLpJ5=Cc2pr3oOs8d!0lKlvLOh`XsK)i{x3u7UB}QC7Jnfg7UV9midh zMFq0kmxGS0G5%L3FIKNmuV2gbOewXl|iky&RkAbE!M!P(9M3KZTe|vP2D6&ghVvi z>)y`%h@PSZT{9Zm@1;ILqZx8otFgU!Gx+EizN5HGDM^``{=F|f3AIg`y9AN@xb8t@LGxQ z+k+pFW%!D*1C>9$C)? zvWCm*{COU9XoZPjynU)gH;Zd9fmM834btX6Lq%$c&6O_-u5ya=M+p z#acA3b3@DH&eTW*@EJQz|7Kr5Z`6R*!)>NFumA1#*$)Htn_?4)D`j6o zWG$e$Q>g@+`hzX+;}xTv9JXSK7ufhgE!_qJWc`-@K?{+0_FEAc zHT))trESi+x7VVLC>pKg;F6@_0P|A52;ve~m_M60Y{9K-+8<}dk91C#eF!(D$z>I3 z>A`a?C9CirP-}_t#wx70k4CG$IH=D1qw$nrc_u7L!1nw;;2A`x|8zJyVX0qF%$H^LWQb@i|51yMyo<9sr|dR>a5|loMRt7Sx=*Jtp!M zvjma2w-2K(M{dzQgAOL(DMEKWtaWm#>_aNE{c2UOY{y96llPn8gKPJl##rkUF-Lpc zYm>_%JBTj&I^@q&92uXX(`fh$U<{*#-&-`CCzAWur|Z)a_pvYu0byq78>j#1-BA4$F$My zLaQX##4_K@XI9m-JDf)fpWzm`aDPKQrv1s3X&;yS_=|E1dDN3Pg?6p@>Bc{B`&!$% zNYh&ClnwB7(wh#3k)hEhHx(vW@WA;e@cM#Ll~`e?n0D67k8_(-O=I`RX9#8E4FU%W zPE**m=KCm438|bK@&G7p2hWV8oUiTNhlcs;QC%rf1)Saym?}hqdAI5jN_3v=F{wu; z+-sEFR-0FOp9^2t;5VMvOj`W9D%an=I6SZn5z<`3?gEQ1IdU3C4)AjEj<&%97iud{ z2ptB^6~tVJ?Je&scXI*PeobN3EscC+F?iK$Sm?d-o2M$^YRL1}`j6opNi^yfsdZ!S zdUfn+he(Zj_}()3FXwBu+LIStff2$PbbmRndc=u|t!0cSCevVJ4;V{OZ5G@Bi`{Dj&q+Efr}6{hnR3vnzS`T5q^!a@eC7-GR$8j}k6_zu zxm&joMjaKZDvrHgLHxz9DZZ*C^2xuP-^Q{blodkX)I)ZB0vnFQCZ8>4zOi$fW&}2D z^sboNC+O7IV8ynLoAAQ=gEobZ^(X5!Ait~ptaz9IhlyA`Y1 zUMc?&X;+qxC#i=wAjY$uIYOx9{*hi!tvSZfyWx!u;axn|5Jg7DAy6MO33aezukA^h zK81I;QJF`4X0WS1sr55s%Sw z_(T%lUE#ug>K{R+$mOEQ>BuLZbp@Uq6F@R;#qwU|$(7~W>d80y;eZkrm)kn*ux5p& z?e1N;#Q?;~vxs+9a|bf;@+zzIcK6_p>Z(r@B6J=NCRWuA=V}H0H{^0H=6c;*P=D^qg9|hcQ29A z%^HabsKCFbbA%eT*fA^vN&=oMGPvL;w?+wz%c+5o{U@JG{s|a8xuPo};%`v@kAVH3 ze#l7^^XPN^l%nk4IOBAj=WjOVt|xlWfblt1I6~816wpY)C1co$iB(4UPCCSIq}BB5 zo0=bvjxFfS4e7$QzI%>1>*obTi#S=DeQ%)mgFOGXpeuT-8S8f1Pu~nAT`a7O3@X zLN}&MZc4somVunc>ER)v??Mc4@GOGv=S)N66g*`Kqx0!^R@*p+3hdk*DBio$C9eei zNju1a>GBjyJsE=@bU6=5J@M7)7Iu*g1!)tncF4N^nED}lM$gPoBp+q{HgxW(wu*xn z-MZ6v)^ygshYE#@n&Sl<%OMA!TX<0eefXjvp3`sonk@IQ;M7O5h9qd70;vkY6yi3C zuF!cgE@`+6TVQVY<%jF%tqE%CUw4I$BP@8@EEKul{-**k3Kx-Cr2YEjM^^t^lfFV; z`y^t2*MZ+xpz%=pIutsI3&Y=agl%KXw6g|Zw`n}u94>-IHj)M<^8Dw9R*dHxFWhCZ zO0#4k>g|%kgIBb-wfgbPS2@hZHOBhT$)`B-T_PEazeWoN#H(0Z5?a)#gw<>ba&eo= z$KE=%)@g@8^1Gj1m;Y&Htup%Lg!!pQkT=j4tZ@nL?%@;FkN)HzBija9)r1L!6kj*) z+5S=x1qFY5oAIv+bl_hTXlbYH|KRe=MQXh8w87R8w%~q(fplmEV? zU@p9xJom+xqIpUh(xTs*y(q;2{U?sNYx<7c9={GedbpOt=-+lIg8!f!?sQ0AxufVr zKx2UlF@zqOmN#OH1I>de?xP@6;|&gl1C_%?wERKi+e$qV)2?t?%k%u+qp6gvG>SJK zA3QfWuWSs!;S4!i4A?K~rIwPrTitg*+zQ`}bVu~Ui1Y79>hY~-gL>Lm2Lf^9IxDjN z#pg0@mlMwV*~8B@>SgM_Hd`R9a@KO*CKaQ7AWdQkC#c>}F}3)m$6^4f0^3*&>>ORU zuO7mnv)iaG+eQ!5%je_KP1B=ZD=jB>!VsVB!b(qj``EW1-;p1wr{GGWl;*W@>UaP2?)J9n%_Fq!9u zsh!)J4aFNS`t`HB zFnvww?lgLNobsVT=z~ar@b0##d+;5!LW@D%OEpBJ0#w!-a4#VtaEFS1m%iWfkLmtQ7p^DZVQ4sZk zR?$6~dbUN$~cX zx(5J};JvhI<;CD36*v|kxRm>itG4a@tB5sXMXUVhvu%lEB5UCJO$WQa~wvy=Mn zqfJ+jTw9ti7V~i6ve2ZrU|)8RcVPrPtsxJvh4<-dsitLPdVlbsguxANykFBb8qwd? zR5oDjNBuS9=H|_-Sx(f~XP(*ulZP>Z*aZ{^iCp|%!mS-gUs#S_IaMPxZAYk9U!{j* zwUN>30q1fm;#Ja4T(MotC;n-ke|kIlUcz;GDgaw{k+G{u0mHg-&6ARCzmd#yfK8 z@GO3=Cmt`miv|MXBL4e7-p(x2tN#p3qQzjt(ncE|laB6E396}@TF{4#0$zi9jaNh<7|>m z0|y9ul_VG(u|?!Xi)Pl_B&V&kfhpp>%+5BZll9jC+C!GIYvf}5ptZ%*^i)Mp@=}!k z0~1@rPx9-;iiYOZYY{&1tbtuOUgmGC=8FklT+jik^0%H+cDvMj@kOfI8wplQM_Rp9 zkn6!a^qQO+^9Am&?rS8L7A6=Dps4|~Gtn+p`Xk#FhfeXt&Gkkhz_tG?QX8&ZTO5Ry zd9>YkK)&U-phCoUkr&#+ZAi;0u9K2^fA@Tc=VGV}KlUnMPZ>)$g4`)Y&ox%5IiWX5 z`*L9>Y=-e5I>^yvQAqjiIK13<^8KQVQw?Pp4Be3_nJsy>!#)R7J=az2yF>psZQYoJ z4kS8Y&*cS`THd!nMd#z_CsDt?nDUMd;U*iRNryilWJYQ=)A9uLglpr#707n@>y;Sl z`{h}hw($r#jCW2c86e6q--*D}fvZw7cYg^Ir%1JpAOb6!?}mpx*upO2Qisr~!iDw0 zrFy{qk_t{KMo$~6ETx zi}r%b%1ZXn4Df;HK%Y?w6bph^#jfmogmA~Q88l6*x7Icxpl39g2Dl4!a4Mgd)aw_&>@~aLJmi&(2CuB*0K4Xd2`RdHSPxw_C=7qC zS%nxHnbp)Yj!i=P(L0{)JB+0%=AsW7J7SY{*2Bx(WVD39YJa+?xIb?|*NQGE+_?M2 z7&JSaH%PA}>-lijUrJ%F)7}cQ{caAeN_^9qeJ<@;Fvt9EOMUh0er>nJ z1rrmi3Eql4H7xPB1w z8FGg%S}PxJg_ilEKlpr0A6WXXnCplzpR16%m*B2O$KQSoqEi1O zc%>h%m5!H7X=WjkrcsO*uHIvEC1jQNBW6agAA;MI0;alk+g=|rIzqdj!G!#Nf`WW! zsOPkfk1x0*a7t-@!iGy^XPeVgrR-^rn+CGx`kyU0dDOStLW?UGdxopb8H66-dUXw> zvedmp1TJj{SACmvZmQTX4+3X-z)7Ot6h`130Z& zY(!XcNzic>?^?>yQof0q?}xUx=%~Aj@Ylwx>4&B#A(aY5)VISujBP##rT+hIe1`z8 z2qgVq#ohlImH&qf5sQBMfwFN9y|Hcd?^RR3X?AIhP7K>b( zwc`wZ@ z5^tliBzLtGeI^m6^0V@|?(7eX<pGx3}->p^@Ol%PM9zx3lp75%DsO-EJVu zGbwJrR!S~wV8qXB_Qa4exw4&;^)8Q|_+Zj_{bpO)eo_!-&o4T$u+j&~hN>6&Qat<6NVla(b;Cklcm6WFD6KwX$?^^Kt zv57Ead0i^z!PgQN%M*+|mX#1>g%t;Xy_dxm^r+GJXFk?gX$x};N({-Zo!`vYz*>V{RL<5Z<@tzE4W* zogcjX><4~pYxoUm5c=&^Px~ydvPi)F8wR@!Xq5=XWp+2F#Q1Kol-Ee|;(r!Vyh$S{ zlKi@zveXF-!SssnU~T8*_3&YPH^Rfk?HTC+sZX@3#P9w!j;BTdyJ!4A;+bs{%BJ62 zrJNtxU16jBI2b@Cuo^U{PN|yDBb=yR+7~6R7+`V|BdA`7Pxl~IBlh-Sop_i z%o6csF-3g*PYeLhTOu_ih)Rw1n}EWdde5BR+K!*8&`uw#xfob1ZBP?*1+x0H1>#pA-aX9#IPxMQFojq z0FL`r?E6SO)~21mwvc3&QgFOP;1yF7guQ+3d6(^Fx9nB$!zpF+>%uGwfk_{gBT$YH z=A-}s>o;L!lfN$^CRG77Q=6Gm;pWpGesxN2(pS>E1GQpaN4#|2+DgI-OL<-Jum-je z$*RHPOkS7h@j*mTFz}MV(b(z({2LjpymbcTdDFQJNmZ{s0M=ec4#=F?Nf$-lW&t9- z-}nGTx##PGy*|F0>O21@^u=YYxJyRMO}Tp;*K?McaNF={GoFJ*G+wpXCdq#bclM%#r97-vHWM*lL%g!J4gcr9SC~!lFQqUgV9u?FUP9g z9!b>xuK^x^bEU5Q| z!bUVJMr3k?TaphaK$z9H2h44UC7O80{k`*23o<3GoJ;2^s=q?7T)RC5+v$AA!E=@0 zb0^4QW#|J|Uz&Wyo$te~`rc0#tJ?3dbIrN&3T>0I^Oy_BwL_+MVZ7yiKy_23oO;>@1l?95J`de^`G#(w1+~yZZLp4<-Hc7sQF` zcwcfWqlvNCLm6PCTV9CSBNqkxX3dY#SoRjY%w?%VvB8y5s{ z;tl_0=!t!FDYr&=g8Z+{>wh8Ezq|bZ;fG?PPshR=z81Dx312&Ts;Dr6aB62(fd1Wz zjCqXCNNiU3+AdH_n)m&?6bnxuYwH4bw_^xEyppVJ5wrqhh#<8zzTlFJLz##Vt}VwB zsR*d*3QuZzxRW%P4XL}S4V-|Zs>q7mngkNvgRf4hr{-Q{NZ6l{RwZ6e_Vq9d$m!K; z?jZ3cN{Tw!WJi;Xquk&#`SP!zZKN#yKuF&eCYmSipDF$})f*V8um_M@=(o|s$9JtpAx_7Pi-_QoY@|GL+8G< zeQ!qJO$SFe-JL|AGO!9?%fwKCTKJHp;rXX=9_u~9;t(gV;XIE~;qh}a!k$-4aluYV_3 zwMll-Q0IHu5a#kj9#pk}xw0je%))eP#_5ygdMM0#vg0b}9%x7?;2Ege;0C@B!BhCL zKUeotT2h-M(@;ou%EWnJw=Irbya`^9;_bL=w*raBRX2;$69f0at~tu?TNdsqenay2 zb8$9d=A1`)yvCns-xDVgBeb`pFMeRKzF9I+?+J*J9EYqTLssrx?T@bS9w+d>W~;G! zt?tHo{ah+^+2ZrmgswKLctM}w&i?#Yo3eYVla}+>-rC7FS?%OZh6X>EXTgvNYp^U< zbF>tQB)?P>YAvT`5P!)5yedu$)ZMR>Yx#ASHuN?cXcBY?j4}k%YUWMN4Aiw()i|;<%lE3tDjg z!X4wOqj@jFR~nQ0$iCVKKbXgYphDApmqy}!c?#{dwLdTt!4JUIdF0U>XWehF8L5z% zUdLDuo^hzVBX%bd-+$o?M-`X>$Aq+}bRoJecyAkq_O0}OJFdkob{ki~mV=a% z`b#V0jhk9>jJE+5`-E_aG~3^*SghbF;O`X#xf3gcAN^sS2ZbFnn#L^qBHfv+VsM>d23a>iJLj- z{wt8jp`KQrdRIHp#oSbe&+>VWG6xm&10|GzMQ!}{A6zTn6m%7u&+?V>oGD0bZpQG< zJS$e!3T%_D8D8~Sl{n21rT%Aio@%j74rT<`9__*1m#g2pT(crB{oo@!4f}q7ikFzm zl>_+{u2o!3LH$zq31qTYMU_p@1JK!QeDnXl1yGam*@UooT@}l#n`x;@*=K=A^LvO) z%-B|jd(&_cQHY ztbzef)8qEc>kk<3^%k4!)oTGy-7aFlgNtz1Z$!p-{nu<4 zW~JWGCO8i8<2PH+Rauwl+CCNW~n=9|d5%>vw=V$x+D8 z-Em}%J24B6;e6akA*%h9U>5_dk=51l_`E$RAd562l%5oF;|I3jp@&iW3MDc*417cuOhNv`ZuK`++u6RD|Z3 z&;4A~C&4HZah%6YY1DA6Hhg)1QKTyha6f}Oj;{~h5dz(0e!*R0CljAh>)k%=UYr@? zc|uxtit-pIBcG82Cn;@5BS_Bf80u<&!t#NfKDK1U_pEAg5xpE?Pw5i2ZE#Pf-G(GQ zsp|cvP*VR|6^Guk@=NU1NG*bEY~QX4WYwM5e^1W(Ql5N9jIZo^8ToH}VE-Jo*VKU- zcs)@CBDCCn_#%2bP_i3a91Q9(YgoHP4PruNnZEV`Z-phm2e#I>qc?ofJC;0|(p#2C z-Kl#UxpYHVBjgc70PJ4n;LB+hI!q-w6DP-FCOG1|X?rhxva64f|8s?oYE`aJjEvnC zO_T}(QR1-@3Kf<+UYOP_5n4&|`mQ^n0>=22uUYssis<*i@%&Bi{5Q?`W(NC<; ziy<_kEXYi%2M|foMh(h2{NZYfkzD$;NeJ0=OKOV#g49k z#j~2iMuRN4?P4(mX>V+mS|}|NL|G+vDHNdBGu zl;@?%CP`rD|yAPfQbmXo4JKrRgL545@ndrIxbjad>yCy`&Jp1l5aJxnH}SM zDcRabb{pK{4x-ae$?qy_P$WMmFd?jJIxMB)uZSKO4r(uf5NlJorn z1`?RF=hZ|%avuSM!LsdCWJ_DQQQT9!*mqgc0k^v}g-qyj{W(`syqXAVpMagOb^Yuz z&PC|6{3A(jio2xu1kDtQnA4UvNOjmA*+2(T0zV{nb4&5-m>oFP0s7WZ99K9NT}FW#{o^TNP~BbZ3nv{5i$lJFQx#!@^<)(Pvfv zpkb&>pNSYh_*%WF1qifw+j|LT~+=@K`|MZV@K^x6brN8yaeA-0qyFfxR*woKe z&kt-Kz=SqK%-G^8qgu20&lR?ne^kq$-KaS>vP%HHf5uwC1VR6gv=+dW67>5+q}`$! zF{XHx-z!cA{I{^^h#3PhIf+dPH)*(b;V-?aJ_#4noPP9N=vFp{YqXlJ zb+x&H7@&xwQnO17eh-atPW%ibz@dWHvt4@n%2{QNL8PipA>FvoLDNrl>aPxwETQPq zhhomd`J#Kuog>APq}piB$hpj6^iH_ zIq)Vy(`(C&K)im$QN65Nh5CK9tY40v{PAlE@;<=_wpBpVZCVCMx@p|t%0G%_W(EVI z==+hU5$2!4R&NF=ysS*UF$3pPKus%!bX$9-Ukb|yD?W8evNgW5$g>`bH%xv7UjwqZ znUlyS(<7u&iVO6InbQ)z)o{M0-}g!c`!`hqV2vf2ZgOg+diFyqNc$$_#>+M%vlf!ub<=Av!pSz5nO7y`%%?9wxh>Z1GjO+HZG3Hj;t=^b$d1u>Xvc+V@^BzI%S`yg5CySzR1A@{iR}g+u|(psa!9 z)`XEu6++cBIjvXi`e$dL+V=rX>Pw*NUpz@{?hmKgD2+a$&-V+%)61C|%$aK0sw&Y^ z_C+L`^0~bIML4O|TDr=+$dRqQ9m>zfyoMsr<99(+;znNeEr_ThX{}p!` zU!ZeBRn1P0>HO?^4M4w;PbaS8_yU)AID=BqyH}TR>>aXVjkatD(f78X`GNyN;4n<| zLO8hSC6kx+`IffBsBt6Lo(>1yD+GW@0eS89-o`lv)(~Nv?Hfy8aNN*&2)!s=rbvCx zc?q0dl7=2F)E>QmaSK{op#ds(8w+bP2SUHGcTA+zGydYx%u**(d1h!n*P(mD;eI~ z%#7=6a~87AWYr$WBi@i&t_we>$5LQ^Vw%~F^BjZ^u@%vhdmn$96C8vJ zqJ7ylpz@oV`vx-JpUm7+Iz4ivpu62acUrdcMrf5YhI7O?^9fbcp1$XYFq2cM+z6$fKh9IcG30vzgkA=%3wK?b+oiy*x(g~ zj@x;9&8ZFclYp#jM?OvXRCibg?w27f_LOMY!8VhH41NN~0VtS`blH?j+OKmHAv=SA zuK7UEyiAoB8`F`#3HS11Ub;qi7c*k?8^dFkXM@hP;pm;g;}uOk$p{?_)<%AFYwG?2 zT-9QvH^=UKNqS8$(56vJWem(Y*J6R1;nwAN`K_(4|8s@+N9r*^y=zh`9me_Y>LuS7 z$=4#24BN67ZM!3BwE6xT=c${~Fx3;|0GsCk4aH3C9T=bAx675tSp_`SMlw%bXmfy& zk|>4z)b_wDFrZyEoptI;)v=x0ZWVk)+rEVS{^K?9t^&)wYIRN#b_kux?=d_pzh?_1 zRbpvKqv61snHUH;^0u~=o9Bl!Jt>u5wo7fbDk0+HoULj*4NCOO>qf2CfefeZj3dz_ zFC!ry`z-Xl1Tt$QGgA$^L^t5-jtBH>FnSGfsP%Q9=GD;77K+FZ#cZM18}v3aHG$YR zPfW}}KLKL};ED=sQExsXW{j0cZ(|?JPYIY!S=0Qo1oB4HA{I;@lW&^lt7U|Uu$3k;NO)AuVSA0IiBDqpjoo%0%AfyR^N=zN^4@)Q3_OeiJYD$e4BfPC4qt30m+j$s$5-4WG&ZlJehdziQ2G9m~B` zxj-BV%|#PVY{oP>WQGLOjXpu-%z=NIys&sjRfC@%QTff1sEC{)SEVlE2RT}cd* z=(R4811}dePq)LG`HK~T>JIB6gNF%Hh~abplKW@>v)q5I5g!OE*qrksPgDHoF3hk` z$e4I=)s#0ebaFMSZnodg8XeR5oFHA>jvp4{!h!Qpp##}Z{iYu7j<(X4?}0oZ%caMQ z-0PMaani50NVY?&2;;5aQIDhLA%a4p|A(@-j*9aAx`zc6L6Gq{ z0qJgOknRQv=^Q$g?vR?HldFVb%ifbDe$m*?V74uD+FN zE%XHQb}cYq7#v(K6D;W1`ZUj790oV9k7V$bWC7E_$iC0D1e~S+=80W9#Yome>I8_H zpSz?v_?9cynOCwRE^HsMN*yqk>h|5M9TGZkT`6tsp7@^fmcmXi)+O@GT@UAco3fnU z9iw-~xo7ZupTVO{J*^j-PelmQOICu)kL&@5AOD7K~#G}`CiottVpGfWCq>DKcf zPQ~+n<7c@VvcrtCFlzDKneY-mxgz7xO^OFgoNTizCLY4n%G-gzqQ+8q&tPXB}@Ns$;7ozG{x1-9+rYEc}v! z@LW@}vwiN8_nv)y@SP1Fj^OAd{%B$p!Z)nTh@}zd!CeUuL03kkt)=3J8|m{8WPoz@ znx(hxo1**~d7z9DoKJ6)%MpQta&G%P5KpdGSq?98Wp0&4*XzTtJ_9_d7M@WEr$T#H`mmRv0-2)2P;@ znfHh?;c5-)V=V&DsA@1EoNy{2@v|ZrHsWQC5=?f%yG_2{yb3f z$h~FFZ7bpPx<7+KRxMoNFkFw4jm-1C7Q><7*c;wpVinoVH&c z!Cw_boH~3)3vJFyl4gwHjd7Jk=ED^PV@8_r5wLfBmQ2!yu~z%N!7#C1bcmXBKI$&7 zo8$M2!zT4PXLISz=N=Y5(Aqabu1;W-%ZU({<3|U)d&=E*_I?mPkIKitP(0JK)7dr= zasp(Tx;0dqG@7F1M2+Es#v3ZcQYOv2YTn(z_${LqFOr$B>}6jj!2_g1Dr$3ZpQ2i>X2}I2U5Rni2P94#am`Vk%zQo($>{ zgR`JTkBn;#R|?9v=;NDJJn(7fVRy=6falh?{(7{(kL}qJd)6XvHvm}Bok37VEs-tv zpeYIAt_jCVFZG82<&)RWhGb+BXI7t2bLoQvBh(Wi2Z2G`Zyqq*YEE+)5xkkAl^Ve3 zI{Q}VPxLqqk>!IZzj%Jw#QgKQU9HdqHlvD2F4{LuNgPKvCx0CX%}ZChoUJB4MhwG-H7!=&g^!f+Zgcjxcs z*3(vX^VZpikkBh!J9%6<0*`KfqGrTD?#H_OXvH9okA`*wgGU#|{4%cG15t5_6lQ+-3 z(~aVminA|C1{UHU1*r1{5X3Pb_A2g{_*CcRyK)@uEjK}vwl9Hn$FRIVG(sdLB2mP6 zXXS=og%pUY92HaXXQ@=z7Hv`rFobNMUj?hflmboG|AhpB&wn9<1DB0W|NYAU@u@OpvSy`2WZYzN1Kw|6bPwNWAbmU#@-cC`A)9^*GPqwv_c z8PKfV8tXiWdmsyjsT8UAE_lI*-fE~xm1ku*?Eq(bo^XPRDTOq!ZJU&s#8KH_0qVXQ zYmfP;^`LRKH_QXL^Sez3H_<3%ZDJ{ZRRJ0ksT%b{?`KVzm!%(MeX&*HA$t{vjKVUy zad1Bn!jzBCC-knkuodQNeQED91O$fSXW8{2U7E!FR;_-poXXkGRg8_YBlKV@SVce) zd$>5R`rbdcYM}QOcWx2=$vedDgBQxKpYErN8ZlpgSnrbX)#Zk3Isw2A-hMr6`}!?v z`^FW658xV3LZ`k5uImL~e6*2&o1^Iv*x}`HvNS26^7oXv-d^kdpQELq3Nok zcWpp+OdvVeP>&>cYwA}M*FAmHO@E!KZp~rSkZL#%rY1YIL{i4ZyE)^ zdJaTDc_L3&j9~NqssX*=>8%XJ5nNGrFhn+Gx5Uj9e#XJ|yFTglYvc1?N&%fpI$*SjnTWE*-19_2efw+|?g4iarCh0#YVqTZ$)yy` zb#z7&Vy1D9r`sEyp2dg3)h%*QZo=Mcz^qNR&H?wKliKG&!{N*!ucYJ-M`%B$k;`1J)x1U@ttejpBU#3kjS#^^`PK-8wYk5w5 zP!yuFZKNb3K_$v#p5ImkBzZhY!0Hq{>PMdO*gS3WmJ_nopci~QvW;?I?d5Ablca&% ztLq2GD2|vZX(R!$Xh-ARU~4qWw1g7-4v+M(MJ-N^0T=)Xgw6~sm-rhsDw_i@Ki85} zwWp05PkI&cXM>oxTZ$(II7o_sjD#X3@x%liBw&@6Oh1-bEM0`%Ct34DR0wQpj^H_r zDC6MQCG7NO+criq#)vQ1bF-I;f_<&(zsb}FpJ^jC)B1Y&U%>U;Oy=M+-nH==1X<3h zq)z*;TPP&VE<*v^kIA$&ZfX(5@Q7yKDutW#+^56jhQ`*`q_h!#QwL*X9O9hEIf!g) zC5-mMw)~xf`P}6$>%caj$61O!6Zn(_C;v8gnyd!|=5$#s(*N5o{|_o4v`5|y)btpK zos*;fUE>%At3HQ(v=J(!hhpFw4*{koEs}*@MLsde4%O(Jm9MMjIgely;iABV`&vM(|q&3_zhW7yMC-rs}Rxz^PvMlV9`YtkFV&!>E%-Tdwk` zYJJWvKo+1u1<~uE2K})#4b%GzHk$YeyH3E0ulh{$Bh!|WebKI@Hv=63PZX2R6II(O zV?U5FTCL#RTXj(+|IJUiWqd^hBY=S*u?pnFMw3mB!7^<(xPV&a3?cNzI`A+$NBQuQS06d2 z%sO`vR3(*Kid~1oAP`?BTR@I3xur4L0NJkhnPc$k6IsX6@*dN%aH(3ZkeKoO{Egk+MQl<- zTs_#K*JsblX56mVViE@a77B6z@;oq~)&iNRfLp%|oB59UKvqrX$tv!$F8%)0vbXnU z&i+_Z+vI}lfk*g3M*+kutsyH!#2UM6XvYNy$iFQfeFO_@Ng4=OKbj!>=u|Axt^5uo zVX`Hkxwo+hNRQi;jl6tGeH41BFq&?ktW~>~N|2{4-k1j5kcC%gE%_Pl;A+QNSzGat z&l*(OzV3NtWtD@O8hUk@US1{AxCKPcu;1o*q<@#>O3lx`Rhg@@DZ+mLDY2bG;sIdl zjfOwd{3Q^sNQ|rC6WntRn?=t(t!yRX6g4>TGxewi{GPK?u;L-yjM{WkEDk2a5#Pwq5e}KM^&~g!& zm_l&P1-s||Y3ZN*K%XMVBRwT&QV=NkucthFSugqNs0083XU}nj$lr>#Giu+SYlM|J z=D&>L|Mo@p8|11ayE=1^@l>XK(&UgedV}K^cHBAB?O|8t?^&NeuC{nxjm?1l`PH=; zsVPZ>;!q6q%@F3)!M*+*K=O%m+{cnLA&;3=0VvUwJ=xTUcCaDUsAt_J74^7_?0FFq zAaWa1hbSZz*fbA=-6!+3mGSu}!0vZwu73rZ&M13+m41zARyY@v zfWAoBqbUeHYmkYb9!$JSj;>A^AhADw`E>$uG{8@S624>6SjOj}f3(^8J~_J!H)O2~ zM?zFO1J=Mov}i?E&9>d!#861Sf>@4|-v@E8n8-btQdaR{RWl=7CjWQ?EEM8Hq3YC_ z8SO364lC6~Cj%#=OvSTOC*Sfgm!(@OTjXw?zPAPP7(sT(+-R9=zC(G)a7P1hNGRIhtlZwN*}MRQI2-cxB^;132q%1a4irZ(19a-xwn9 zX0Ep0CeM8T5s1fSx>XB}? z#Trf9`*A3A5kuc>q6C}tX$y#U$}!31{(5N+1cK_BbKL-Ew>n?7mpT@JY=}_>CRCxD zgm%NYS)1jJx9%?KG$j}{3!oBqsU(WHd$BN{mHiC>R0u;5Vk$mAPV;uNuDhA$bq?H< zu7-W?TngpWnF#9VQ2b}z6L)jM%v1Xd}b0E3_~b1 zF_R)B0`7HOm=UN6ytOisXtd76cTRGnI>o>6zWhdf>|t5% zv2AY+TOMRWnT1AfsU^Ev=U~dv5<6Ji{)R{Z9(?9K#U&e{>2KdReZUP>HrCqk#Y)mU zfzD(DFUoCbh||cW9o|~r=+&%s3y73};MdsdDTCX4`K;*@_w@#{3LQP+l$nw7Ri$7& zV}x2``{z4fd@C zKH{Qx1K%u7n(k))KETxmDsbJYt&jq2W$1&fvt@*NA}X}d6lHDa*5`ril~2Cm8gSur zCb5uH1`m9(E_L5bPV0n7^6M@z`FHXEvh=f(Nbr!ESgR)I*%iU410p@X>4--hO3LYiU|hU3kFz{MnCXZ)3ZSCE(nRwE>eiiCYQ}e7(|Pgy*~50=`vrYM}_U)$u-Wt>^C?&A-_A*+czLdG3&3A3v-qghu27=t=M|V zN2Z7XkAsAd8K5@RQI6;P&IXRC$a$7F@(~=BGlSQw?UtVgYns|ZP7GhuFrkPubJ&|$ zCosIRl&PNeQl}^BIVnvMWNvb+}T!RM(j^f+BxR z1A`u%4MdT+Cm)aX9Hf<9`}~=Ty`Qbwx$YyV{RRg{IaY*| zqi}jjQuG-5Z!GOm%f+&(7q)DLVv>J)>{PoYxtcQ3#MG?0R6*N2yHCFhmwn#N8Eumt9H5c3;N{hV8AEqhvWAZCqH?^{9@y!4f&v&!=}vC~blTHq zt7@IUc>GH#UZbzFbPC?4(~$q8Q~3W8=>Ff|61*QWyv8+kO7eda*V+P}{PW~9EX5l= zh}F#evURbnNV?oDbE}#BoFSY0B1F;PYaD?*XS~$abn&JLd|u$G;GRsVEUB9@&wKO~ zSPK`?qO|^T=L2>{9i~StnCMTF@As8a>O~D~;Xz{0Wx?}==nT}4w_*pA1FW;zg`vG4 z%N;T8sxYg*bYph@u;2BR0);Dg^8?33W~@M+z93$;)=SMLZ%Yze0m>PaL6Y;zl>Vxg zUyYqNtW{2o#MPbrDU)=-ycvP76w-b0!j#lE=qOs-fcL&1Y|km)yu_r%`1u^r@s*Kt z@xQe!zW|*17FXwuxIo--{b~doeu1lal|#LdNDt!@7I!~IsyL0 zpYq+#OH@lVO&glDepc(N_{>d&%Piq(3HEZl;hb#>hHTWI@nSu^-0eP1=gb>pE84Ob zBf)Cza-*K|Wn6zv60o}i|2BE_>J~QB!~tB=!n>JyCj$?eX>Y3#Gf++k1TT)Fm$?Yj zvlAqHOnjTDcwY+mK4t*gx&*x}nlW0ujegi{dx8xGuROQIE zrJ6{OSy^NwV8mt^x|`huWaIx_4p^00+q>i3t`q z4x1ng)HMnEnB9Qbnk0K=u;3iqL~Bwt;zPhHGx01xeJ5b0@g%&FR5LQVQgG=4MI;6I zjQ3NEuoT!dKFX}Yl~n8MnTSMq_wp>V)Vxm{{Vj8Jol4;QH!v`|#0m8&Z@8n}S6p7C zK2YDjd0SjjgdP1g!S;TpVD)pfgS0=Z_Xc%1W}Vs3pdr`X zeG_QnHv8S(jwt7+sArLBcKWX~QM9x$SU}WNKeD;n&m(sa);}GRDUyyQ%bQQ(2rgyK z)-X+?sZf(k4}^BS(I{Q*elvDA^dx_#DT@GJrgx)ysXuU=k<7={{PR(ygoO;Vjvkr& z+_H6;VJXznrR6xm=FM@re`dwt^n)3F1`T!2q+#o414c%2uglwoq2cE)hQ_5o?18<5 zink-vVyxI_fHEWf>QO{@6F}|zSD-@dm)OXd@*dQ>NAZlt8g=ahcfMwXek`&T=J+$8 z{2-NjLsKKFGjxd(f6p3Dln-uSek~79R(_#AV%jRi5ctb(M8xXsJ|2p3Y zFboO~@d5%4;5HTte0osotRnw(iKU2<17M-4_B#%R8eDJYOPzJA-O zP-&v+dXA!+SpVVn_vxwHYnS<G{c1@UJZCqob8bgxx-z%aYXCtfp#DMC*Rs zqrH~If9t`-TIRN+wGdXwYr?oSizCtCq^7gh^WAXV0SANsy*AYa_xYfna|vVH>}z}A zqLzgOwI+(i!=N^)5BIA9X=uZ{@D)b1xS(=W+=|mjrM+Dt`DeI&STs6Oh7pdeM(7pq zq;B;--wO?=pR3yVlcM0a7Key+KXVAjjp@#W4B#0B+<9XwUX-_qSSypZ15 zV5#l>rJ*SRzX~zoypJDHJ*fvB%;4w+O*TLDZqbg4xrS}5nFbJwyk^^Bw$bE!;v`rd zd{li-y$`q7{QNZzGnzr6sT4es_vuf4GJ60`>b^d!dbdRyifH0?W+Xaq1Sgq}bq5M3 zy?HNdvg**J`abB=819JuHb{DS4RTBRdRY#vgG({z_=iU12{KDyw$-zI>gu;n$8GCY zl!xfcjBq(kZl;i6zURz|O^$2zZ`?}TR%0b;0wM%0g)-9-&G@diulsEgQzlV}=5%X) zKpO~?;iVYeBI1cpyZsITy~lNae{le2jOP*PP#bm&MXNGw;)k7VT{qPY4qdI<^ZYL> z2$ptq?I+u|>R>ce6vUmQUep{re4qx(~+& zyAVG(+{tw~1hVFa>;zf7K~g@HM>!y@vkuj`Fn{Tl8lLP9ulbSJA1lsv!-oC&Z&EHMx3o1kvB&$`usJcv-{I%$| z?-_*#NPV5XZ+2L&G~Z%?e_`;^-(*xMzw9j{$)8Omm!Oqb$r=V2_;^Ouj~W}%^uPqa zO31VN%w<>Lu7>9bh}N^4(cO_tyo$iv$e5A-=v6pw9|c@#I^PuKGynSdX$EasJ3}{m z)m}wcV4=AEP+uk?T2F@|DFz_Pd-37*TMwYwmivT?Gocw-;3`0HBq#GB4MH7XPtltz z8hiHIk9vL$2{HYJ*8@&nRq&9A?>di5Z$AG%g^NEpvi5%@QMM$!P-A%QMU{o@Z7S{PICM?GD)%~{1CO>Yo_Ab#1%-Fx+*bxY zN+g4lGG(9Dow$`MsGy0*w}#R1Icl4vN`c*}3g>%y^^sk`!wx=wUE)h?14Nh{dv|74 zS&X_bC%(W8b8`^-+F(DbWa19ixmJ<_%m^kSqYJrJd$yAt`#>=Zq)azsySh zYbKaStU#y6e#;edZ5QQmM-l$wDy=OT&Tp5;PDMy@YTej4SakVy+hP6zM(qc)n8@d* z0SaWz`_*2qB!A?~q+6CN)q5g3iomY`^-k!#%|joYuu?HLicAZ~%9+2n7(iIBa^1PZ zjN~Lo0ihpn)NBwS^yBRC@Yodi0qF3XEN_G^*H5+IQr$^qyI#nDqGymC(Ek@iKSf`4 zNXj68751-B^?$rY@t-#=JQHL}+6kZD40IeO0JZ@`+}%PKAi~sGE1wE7u{<%YUu}bfG4#v@F+6t@e+DCAOobkOAQS(y zi0e9*4CL^S3Sh9Y6h;qMo+SAjJ0p6H^=fT0FbH_Y8-C6~w#o~EMr#@$TUO}rE)yRs zVj?l4*Elqv-G6bt*fJnKPi48*{9w1}pcJ-3u4rs^4H8XYFq(=^m0(Q3HzV(isIMb@fD0f$!aXAx^7yTPX-_C=9ep&zt<8a^$hyES=ugblXV= z7(8;t`+4qyMa{Q5>jZzW*ECp(n_F0M;eY9^Rmv=Q?*U2j&0?tos z?IMfU=dY?AZ+?B#Qsb-6D5p@nfkgv;WAU!AbU01hfc)FeFJ_y>ThXkfG)%+mQ{9J= zX^njq_F8{;Ui7c6$aTBg0G8uV6}o!za;I%MBbl>F+_4i$80 zke(kjt8wp0F>AVWyP7lenkPs8=FL>-4nxMMPhXI zV`QYUv1=XhUlj&tYi@}**G{@-8%OVnr@=NYqqYMU<2YBfY6RkK3o=`rN<1pbnndQ& zx^GmprC!P05t$& z07=5&M{~*7NxrLUjpF?yCUZHv*NR@mkd2O;Poif~nD1UE>;17bbz6l;w=A{|pXIYD zA{nkky5p0R*0@Jar|I;yQ>~PR8@}_`8?!`|G8nNe|4+)SGO|(1fLPW4mT>=9oSO}L zIEGP&U}bTKY9@$mS?0MW`4>>>f@Ni*$LV{i)U(Tqu@@9|hVwJOQ1jFNjPmK3E{~j+ z)X6Rj!Ib6h#a;-{9^|!u;|t>j05C02`Ya1ePM7fD<_pp-f5(G(z448>8=ck8-$!|0 z!orvbFlyIqj5r7pMMK8*t&Bsa;WZXb+S?XV_z7O)N4-|@5U{Bq1TwaWyo)l%%mFw6 z^L*GJXCw5%*6cK=g=_4|t{6bqstqiZI}7hL+yKOv241cu$rE~lACr2=ED+}pspOzc zAyNmv=$|1^1VEfeW34qA2ToA{%|$RUZS@k!rQhPBztMq$1c4eHOnf&4*jcLQ+UEW| zU%!*}P2NEFc;RS|zXJDQpZL>W=3A!|q4nSwCjKf?Lg95LyqmA^E6G=tvzT7Wv3kP5 zp_*l=w0C)&v~WJtYGY#uigcK1UR>M1ob83>rj>Q#7I_B)J}O(#V<(|A0f6tid^k26 zYOU5L+NpuO9EzU>@y5s}mXkW0rZp|`5X}d|jPH9++JO_3OXDj6*?DI>NaRGaKKBku z>s-~C9dr~|r}xWMjQ`3!EOm+^j_#Su_ZbEQgwR!tmqX^;akBy=)7L$+B@%7B?h(e= zD<*qU*TLXp%Vwb_t@PsaKR#;_x4*C*FmLso=ad7Tl!@RDaMOi_vA)!(WrzrI6oaQ2 zAQaE?s(P{xJya^%2a0fuGBt@IUh1HGPuCCI>g=5gyUI^~`6a?V223bbqGx8aZuBYw zlLrJzVyUYob8l+_uhhi>quR@DaYNLk^p?z1FcSn54 zm67*`*vj+tj!aMYX-T4QnXUW_7Jiyrqh&q%#cBB~I-&4%*?O8vdHLva2Tt+;cR~(qf|6`picrFl&Xh z^vUyhclwm7y0s`f?3dizbgu+Du&&2iy*@{z zp!zAKVV}+HS@vSWG~+C%mKwQUR0$Cy)aer&r6s?NUDL&ntZANd#dbq#X2IE>(O9%s zv2|9hkIkI-j)Poq%{3Y=Vzg~|GqF*TR7e$WUOuJWv-*|}c)~f640j^6_4OsOCX?`K zlIMH=(DD=0=B4 zRo6&}#H5qsEATAg1A1P&4!@2=*YtAh{B(RLw1l(yFP9G*c3Jf*Up6DQ3l0^utit9C zk)~10-8JuUNI4lt!zq1teG$J6^S74fapY?sLv&m*@^|!Qict|Hdm_cbdy#_grP)xIhC6_Rj7Am zzv4E9U74zlr~`ck7zCWvl9ES7Mjskkj+R+>ToRNQCSn#>gn&!W@WEpsuO%-AF>wcs z!6FmX03xA9>C?vD*;Gxw<1JiiLPLu==Hdz(9XxjpjcTX2biL19sV55td)Dk+fDBv} zpC-EeC}ls7&&liJQg_p9gOL@%i_SmLy$sS}UtX`j&h2jxx$`)ZPo8-)b#OVDe@mtz z?68}>KP45|{euiiC2DsT8~0fF=ppioAO06>wHAGPa&bV0F?&g1`IAN`g8Q5{u9aE> zj(!|nIrD~ARu2WZ6_hN z$wsjY{Q<}2s?n?=vSr0mVzv+79qkd*vpdBjTlIF`+tjzW8?1EZem;n@cH%`VrkcA# z{(PQyj0(gsADHN>6Ytb;_t){r#dKC?ga}48z3EH313;{)0c;6w-}%C=dQMo~ z&)rFS*?3%1rNAr9e8S^Fc-odFnYpN9PNZqBy^`L&D&3R%sIzUy+l9%MEUF@W*zW1^h!VVQN+2`FV{q*D_$knf1{~xcRc{6*iO{pO}Uxiym9*0%g=A6MEJ(lfv^r zUyu7)!goB}Pjf`to2c~Fu#$^+wE?-$N8=_b1YZfqzapNJ~d5(hjMx^>hn_rL4a*7#AXpKdrrhN?n!QG@&+I4NYp5GuU?@kMD?}$ zxaOHl(I`-lX2~42V^&cZa|qX<#P~>Xl^b3A9?CqoFXAbuZtcb|odi3a*=^mrU2-nR zAyF__!7Wk0zi~yX8}(W+%ojrZ)q~n(6^EeT01T9!%}HXU$r=o$%4wzBp5OlBx2|FV zY&IM7Od=$*J{#$&=#8kewU{;9Gqq7#fRNew2L5RyM3X^W5#gXk3KXB#ezZ07fdk9d zuQ;ZBW-AmmQx%_iR}>$t{W8N;qmoF_AkXwc($X#!m~-%w4~SCLm4j`q-c2@bZ|=)~vH(C9FQK44+Q^B11yptXD9h1?Y%Q+ zocSBQg>Z;9%TH|H2Yatn`4|X!i+}#@!Qt^Hy0YDtIY?I!n~j+HopIg-V-MvH0mp@@tL;u7_b5!%Qk9E&&i$6Jw2kMWmOw zpEkZ=9{6HoQCR)vUoOf# zb`2t|W7nI8B}n&B*Sy1}>KsZ$@-NK&+zGn`ly3+O9b%@lg+*Jue4q zxTcK9r%=QyVnndP6dRyiNsn_bOfmb*WwwZihSblBM`6>63|iB1Ps~|tPI27j1O2+p z=R+;D#hnkmq8lGtkAZIbD0~HX^t&GSPjUBp8hyT(qF%E=3HuFA>f0F`qxWs*b4O$7 zX|EOm{{-|b>&~L5?Q-}L%lXYHrC4qq@5rQ9^r?(ri`)s1MLG$-0*W7e9%Ool}PuEwGk2WNnxSzml?IMYh zDyx|}kCTLXXoL@}Gs`inXN33YiEYQCa0-FNP71k3>F)VV0>)XVO$+Nt3_w}28cThP zc!6k+jucrD1RK94QqG1HT*qLlxW+RQbw7XJRW0i9M3qt{OIvkEt3I`V&c-3YY&41g z?3?WGzLAgR1rNOlpG~Mx$Wknh=6eAGeD5iUCZ}+F1s&TL zq~WhiG4av4d~%IUpR7@9Tw@vmrjy5gc??=4%lO(GH%|cEPSZ?%$(y+80;Rs9W#ZuK%EUz3*_Trba0&=>MKwe zVPlj1P$<3!GLub`!mb2;!hnp5jks=-pHMZL6EwF=e9~AA-dv<3?Xtwod?xUdyMAZYfLm-J`97Ev@dI6V%sh3fzqjbna z=b9kdE#w-(f7vVT#~1ZqM<}gKh5t!e`-hzR_c-|PUIYj|XjLfz)Z_$VUT+n+_2wr# zO$9~{X;^+520(-i9eW27*4eNqZV2z%*hU?NPXsbrZk%lktSH&3h(Gglm-eGspm}de z0|(Usd(_o<)DR9ful~8~Z;V!VC)^NF)P(#OL&M2P8yOK~4dkK>i}bPN#9<1@2>cas zFnzpQ{Y*3oOPz@^TGno-)wHW3JH(cgt=ZYFqV7q=jZsx}nea)4Leoc? zqV>4n+DM(he4(MEWg-9?Wu>vy%@XG6v!_{}trLPoDDVvS5T6=#z3YNln@Zx-{SlTy zkFDJ9*GPD*J=#>u@dFK{L})=2n(R-HRDj;CX{*qbyrWWDt&#YhTJ3nKU5C^A=BzqQ z{>0EJ)%$X*kkce!*LuxTYAhIuTn8tMzw23@R3DakX{2UKNT3Msc(Un1;Qyg5hUgRebiIV3jQHl=n*&a`>GPsY(ie9j7wWtE z;Ak9&-wJb8t+u+4(*+*qJFMV)eF4A+0fT(`B^pdn=3uh68i@#3p-KCtc#{GAfqDSI+9n$(YG)2mBtJH)1!rCabcOM%iCFjgEcbRaO-l3xegnyR;v^tj{Kd!-I{wxKgF(`WCj1ZLf&7AfJ4B5#?+i5sfr{3B_o-{vlIYRxrn zx$-9icN$e1-RUcZiddb>4D;U!L&8Per4&KHD7D)V0b`d^gIt@$oArL03rZb$%A02y zMyw$y3+i%T?W0Kh>aH$KqFGn`uBG^aRY0ql?@S1AvQUT4xN|e4t+d%KGzAVr-?}=Z z6k)8*6#T68b}5ntihizk5DEC(CD?yioO;}%%q9t9>_&Y=@VwX-@oIEhO)Wx!o6}}{ zqeyNJI3RTR?hgoo^gM+AP86%V%8S;IEnbV+?s<;xW!6b`1nGJq3Q#L3yUF0-@xOf5 zF}Xe`-Y7Na`hQ;7D1c1_C~Tbj!d5qQPno6G%QX0TTowJzBT@QsOKrs(_H&A4VhC9S`-CG3cc7EqKwd2x(wN4wEko#ttV27#~6PFMLs~s|%KzJU6*( zox#ekCB99zLy4kQr!JkFdGawV?03T2T1Pa?+_Bx$@>yl7U(~Ii#dZ(q#&HcDn+bSK z6MP>_Tv?BtZThRC`LAsiRS6zc?yGaDzavI`+#_k;d{c8-aXY}?(=*h!LTZZ@Jsgyj z;chu)$UsY4#}@fQTFZ)qie8Ck9M?D)`0!$E?sD==4xK*l(9{y!Zs!d$sF-q%R9<&} zwOtdJ_!&)s^H`*A##i(@g08~2Ycz7k<<;o}S`D)MUhe%pfu#U)57e8Ro+29S>IRYQ zoK_O-z8I&}vA6~m+hgXK37b6Li}(A3)>9uzZ5WUK1U*(y+@UOI^zvzlGhJ<18H(j- zT5$HTxuzC45`UG*>v_i{=nS!UW810eh%OJ2&ms@eea-G1WKGbjg!3e1U5}N_HNOrn zTpK_jAMZ!571(pD&_shb{Go_wR)wHde$1wdKhvCUns6>9VQGntP5ri&cJkijRX5-s412;IGeRmu2+WL$M2>k?F0J8{fUMCX6<9wp%9we35+HO zQJ}urn8;}$xnnK?d)~1NO9q)K^39Z7biWCj_+9e2P z1+F-mM&sdh#PnT{m2~0c8`Z)EnynFxiKRZDCWt}`az_0e2@MO76NqG4ZSCEb{P|O8sXDIo|c`RV)>SrDv zMWy9!;Qf>ZFVcGBx!N3T>-99k^vhnHnjDpEV0flodmrTl4_%ziq~VNG)yT)}aDHeK zvI7nVJ1E2=DY$IO#uaaa4KY8ofqXF8^|oE!F^GXG`+TBp67H0Bo)2kbl;co`lNZTp zfzD@4J_BU-nai~LnT8VG^~7(L=2l`1yYKCQX{FaBDWxB{l<6D8wi;Mq zo;Be48#0~?>ud~J3bmz5Bpv_XAyHI<_TPt|G-VU}kZY)Te-)7diC3#b_bS4lZSD2Y zt-J!J%26#UJ&!TgXl=Ko(d!AaN@4zUfli9;6<}g&>M&Hb*`Z8I?p?fEgbqVhg1+h9 z!AgA=Ph`A)78HvfCx*jP#J}X1)e+&ggi+#~?`4zz*b&yA?lwe8pk^=6u@hQTyBg0I z>L|12@tyr7V*0FoL4OWw{-Rua|IC}4^z&`Gxkn%(FL%&}(?_MS3_$I}*#8x3C)Q^p zuKTWF93FJJ9e6AO8oMZpL>wEcCkrO7@x0xhmO0ItDj7z)yBM z+u^@peIa_-mC3aN_6|i+)HD{(LXyMZlM1*t?1inxBlE^c{y?%BBx#y;DKs5P64~m~ycN3BKc|83yZ-g6Er54A@y7`Joh3+hSb(SI^4jJYQiO z_O0rXvjb@Dxia6L8BbmnHN>T@4Ab%F?qGNOhil?9IGOu2d?$XfSra@Az4%<^~iY#iw00%?Hk!D@y@*qaimAJu>oem#D1&=Y>< zIY;6#WHXJR#X0mmYG=4jS}t3(S8r6-SbV26I}9xq^r^JVOKDld_h{Nw%c|}A+WF@9 zllMYRK>6M_iRJNkM5^^%K6T|USRryp4LW5M(M=rZLMpR`ZrF$0KL!R-NoPotCF!~2 z(I{?vlF5Rd8BH=$& za*@d)Z{`D{O`=xG($s86J4MWb^e z@3#6^Re;td%kmAEeXOG)I{_x-!3FRyH^RY*$&v^=ET_RGdRVai<`WKl8zzS}iR3(JJIKzCcViwh2E zQ51NTQw%MwRN)e0bLPY4I_uI(A&h!pW;U#@{(afwnUI5SY3c@AS|VIWQhwo1b12U5$D_RgxB}dI;z*vyQ+PJ4k;nZ zGB>Y_ZD~D>lfB#3LtdCb<1O=l(i{-AUv0d&>q*9w2vvTLpw?;ePdM&~mvR*PgA zf@gACcnk587M6`!7wW8W;hHPUvFjh*#=e)B#u_IEO3B_(j((3@g1>ME}63VLuBo~Q`2spw^!t54Hey7N{B z{e&;sQXMqpz2jYf!2Z#cpjFJ;XqxULR2Zx@chonVJdS?|I)aw%iEIr_PYoCze4js- znD@PONW;=L{e<@L+29*QGgg-rEv+G*vd_$@Ggc&D6*QZQ>f+uh!QI(7VR`_w_)}^2^j~-FbnE=qQ9p!K=M`?5f zp4S*PfJJ1v{XsJ?m?j?7oka8%q*dRDw%qtLP!X44W^Vp2bRakt@Tg0w%b>PcdPMF4 zb&l8v<`$_3ewZ*zKNm~X{+@){w;RHBZ8>f6#7D#5r@33(U+I$T-ocNsm)n_cYs<9O z4(8zr!F>mVU0LrI`AdktLVOWr@qcyVKCSLHweWhzw4R`&q<>14i~)Hwqpr3#yY{~u ztN+`tE080dDlrIrhJoX`FWz_d`InvN?<`cgkmeB7x%6&kQGB%uk=KQd_DQhv6U+rG zgy0|q`wNxp4V(_Ebgd(rrlPhaJMM|@ijDDUOLLZe_k~Tv0glR?!i3SAg`o3Oe?mfz<2>~#9CKDEL)5Bj46tp?r$zjtF za-+pASu;+qhyg(pWo^xlOR$=1Hc<3NgU zUUCon_YX7vzE(^N^oKs-ccjaEKafj|S5^~ziUvE^4gh(A6O#}6a$i-}RWs}3Ja+(o zvl#qZIrhP->5r1_NK(Kn)759-AgR`a?ki@wot+4+HJg7l=NS%ie8_F z-@e=Q>lumOQyQZKy&S)mMtxXO zT$?~(+*1ZE{GlfRtuD)4gHJ&1b4F*h%14w%3^T&k`_y8f@12q1L;*RX#j7yR0dd*E z7uGWbF8rV#y4M16OQWIflAbc4%Ie)`jh;3W1T+TZ3qsFPQIMxm_f2@H8h-QIQu+3v zc0ss;mqnuKy$s9_@1DH&h+gc*wPh4Cfy(=kv+V=?5(Sv zb5%RYfIbql!z~@Y67igsO`8(TU6*M5@Dzz|E%h4Lo|``K%->?He|d^oIaF0fVl%?h ze=yKXx?rf4Q8x5o&^Y4lsvwi3oXUF{ywe}DA{-6EZek6~x zlPu3n46lT%m^@COS2Njc8?c#ozGl^AW+=wa;jWEt=nZ%Yr5W9D^mwe&?sXInq+etc zoS(!4w}|~nTgdjJUk~i)XaiIwDYPllS|}JimbaK=1X~=dJwTGbhs6vKn20$eI9ZYF zO=zm}>ya^BXp8-`0vc-W9!bUvPG+%ys%hSOa=%|3)psbyBb6%}#4jMAL*aohj zVOF!^)bk~w03m&S5f;zNC0Ji=9cSNlQLGx&ofO8+)9_c(iqNzW_9u9{tADeuR#$Nw z#p`%QR4H~>)iRVG$(3ARXyI7;m|{1^$B9tGhu?tCS5qWX{%`KO6aO z=n}yG8X1^!1)Ff*+pVs!v@dsybuf^>NRjrp2Je$8>{;7|T;&`0`rnzu8Jnn$y>?5N?Mf&ZQUnva5M`R_#KX%!f4+Jab=xm>{xk|IjmW+V z&SuIz9@9b!Vr2>dRlK$J54~yCXpYcfZSeRlTAu+2%o*p$b>!{VYdbaV`8gvgR)@;h z{@{#RcSIjfe(a0x7!C}eHS_(|ns-vedxrJbv1?=@w=U5_7&~zvFca-csDgLRh6Hxx z1q#aRt6{FEpZZ6tK-;ZM=&IFC>v+yx`7!DBdoyXBmZwMK<}q^P#6RMya@$>K9YawP z8dnZgHI3RA*@gDGkLvXi$Mw@Fjc^zAT_X&ta0bB;pQEMGi_`i+4}?Pa`y z*!Wd;H2x_Mq;^^ad0vO;7GovJ8o8C2a^Y?;N#w}cg3HeAb}u>d=FtNI4#+1m&J?yu zNhVq_JPqWxkm}lV8`y)eDBO|zN6Pjl#ccHYHd{nG?p>V&K_!_>5eJP7ha%nBWc1j1 z!8Zxg{s0%2ZuS*EIZ|}IGs^vUAqHNdz7J}+0?VTQr~52A!1z!$9fLI%*;O`y1Fd6s z@@>Jb#u6VH!_JY83Xy8Iw+Iu%EK{Hr_uTm?U^6L)9B1sAw4@}y9hA>Yk}Fq$a$sJg zw%`5L_Mz?8_vcqmfBW&Nwys&fqOIMpjoRgPP?4~{{_~A{dlK+i!<(RNbs7R+{!*oX z&@pg^kCWEG3ivZ9>n6E_hR+|%6aS>2s;{hEr9>U=+}wn)OI8V=1T9Jg^E%)wnxNxa z^!;1QeKN6QS$KVfw5Eub4K5S5$Is5(OWKFU9T1#BnV$JBm8k@dl+n}!)bo-Xk~@s(_3CDX?r>2Hp1JA5%=6R2vYh%R^W+<)xfyG@qYHB_fp{X~M>p5OX@}iCVO~uR& zJeQ-PSQGH;HX1xM(DX`f1Ceq{>j2!X2N1E$rE%N zp&*spd3%UnRe&1>T&f24Q_`@i0f(aulhqlw2a-iG83)d~zTYsE2+k2Mr;O!@J!rNh zbQUfs_wA*7BH}*a?or(~6fc8iD?++UtO>IfF<0*@6*>Im(e&2WT$g3pC&-2dRLQ(&*C+Z#6^x_)?bo40o)=?9 zgCWOXMIMJS-bl!~7oNGZ@(XlL0m;$y^I8xRXpUTO)*R^dQIN0CcpD!ilMDMVfP0h6 zQEcA^yT7T9c5DxAmQC~mnete)Or00qE#W>?Aa67EY`jMEVL_Nk5@qK@6^8#^g@2l5 zPLqnc&tjxt!*z;Y9|fZCg|PC8|B;D%TFbi7RMJV*I!N8K-6hLr11Zp<<{h0#y4l=? z!KVQBLcPa^E3b``;iyGp)aL^^I$o8N+kT+^g%yEzcu5vh8rCZ#Nm%j$^FEx=c;?S; z{uF4HcRwmo0BX9gkN4lyH1ju1=!x~ff4pBtj(X%(DsV=d*sf??K8+lrjmVDnYE9)9 z-SttMvAvnHS(yGeDMV02K}JmOjY$9rM{PyCrVV~eDoOpvzNZXhA0EB4PG^s~X90rr zb|aNyFLM*pscsS?_ycI*}s%Q;jF=N|-5yzdKidBX$qO?N>QgYWf5@y#M>!b*P z(C&V1z*tw{jz#DC`3X>n%Z>;fcsWiu3qz{Om^IYm{g~F9mfYRU&{2wa3 z0@h?%Jr--R?2|2SmeU3=W}48;R?y>=MJ0t<{4Ed(v4KNYA}u!24djr%*!f+ z>Jm%jRe?YoBpzbqBOBSsT_wB;9+Wm$|+&xo#0B1K{7^Q(_ z`{RB@Qva6{L0#^_083Kt?6(3{Z%{Os@*hg=D z)nww9+ppw8OWFODwv^>BNA`~*8eSS+=iiJGgM~P>_?23=nzqsr+b_jV37edPuNouZ z880hRZ}Ow`MJvJpq?&u{NJw~&Kv=42Q@VtAG5osu72TY-F<5va-&0b0# z1ZT}nGc)}Zd2SEuVJz@x?6R>MB&z<X_(wfvolKyCe$hBsYMa=Y*>56q7!E*p6) z<3f!-HR9vgf?19tP$T0j*=Wf_iK?wcqn&+fW9RAcM|fdstTIz^91`jx3Hu_L{{bqfg;Rz+QWo}3Cz*3yQIL64;)4=BRFSYE7tyqi?29TS2rjdn$%E{Q}v!Pzkd zZ^bJ6f!FD0s8La$(n$+K&WLqh0J2L#U$^m-e7B;Fo~fBwcFS`GIVb}7HTgmt8QH8f zU@bI?6HRl!M3TN>&94N~SsC;$p*s}+NySfK-4tTHcmTP{RBo|066*)RHHN7}1_U?R+1KbgM!l!&CtvIW85X&7>BB$W? z^t@A(jiB^>1IKoUnPT!d3R zC^URL&sF9i%o6h$A{qE)_TH^OpNN%iuL&VLEecZ~svyP_P=OX;UlY%ej|GQPO2`GP zd`7*8Oo@%TspB`X)5%K(c%a`4E5M}D89eWax4K$eFjA#aL_ zmEtT;J}_6hvNVepu)RZU>*MoW057lS_fqX)#yrP?RjX_a-!~G~MC(+iZlKl3UT7G7 zya|AuyIPKW&YrIxrNH5#P$R`lh^u0o=q$2=`tMd>L?{6=0nd=8<}AjvYB#i! z{_FBg?kSWP|fxs4d}`?d|>bb#3OTyXj)qprm6hB+${4kxpQp|r@=P|Gl+(*ap8nl|reblva2BCA%y*qHWWC=$`v;XzMT-FLLfOYHfq<5gjy{XBP zVT@##V8i`KxBL-V|0ju z9@|_S&=^(AN|MQ3C7}O(T$R zEwVH%7C|Q7l?dtkdmmeNJ`WaV-Fs#G4(UE~5#V>DY(;fP`VX-ttDLv&D{LX!}R(*&j zDL=x);}m3(<0KW301fZUlkJ!jc*dk_Vn~gXE)Yge z-}c;O9%y^-tUU+9NP_1!4a85Cg92ty;CRk-9d-4PrKx6DE2~K+hnhcK^M5NqMe^Ke zP~An31tI{d%njoN_ETDc@eejYa;xe8ao4otz5BVO8%rm06}wKB!|e~^p%951`bwYL zx9mbEee<`uah=%M5y88-t7JnJIgur5p@+`e1c_BNJVEnV!9+S`ukQ&71b##|WZl>I zSqWNe@#yva=#OdLd~UU$ZZ~wFeUnHEbsilkz@Bcr`Rf0yDlv=Ci2UkNsLN%yHOU|H z<<^NDejMZb-6}i^RRv3Y&RZ2xQ%I3BRr3e2G{C>0mS;q5W9}U7kqeUfwnuMz&@M3M z+sJ-kRrjSc7qlLXXc+B)dtbQxN)^zTjO&u>Glj0?igP7$qIP@ZI9*R>+s^<|!)afS z<@xHIHumA4z5470N@KS2oi;*GIx30>Lf*4Afcd;aFM~a+!+mV|ThX=>c-GGkhXGADW8m!)nRaHh8uyhhx}0ZLY5X)wWz z-80x_y8SUvME{6ft!$LEudN`jDooz1?s_ZR{w7Wwngiy|r+AwY{-aX>^`m~sGCpRZ zUG4T_9<712Zyb@hB23f%i%9qRrFVwe(HrcrXG+_(12MOTRhjuhh6n z;O<*Em$Cg5p7K@qck{fbaK-QbZ1<0j zF^~1avOQ*MhcG-Y*_f{7w3~^LrcAfREtq~+h!)+K1V@OOVez1W6@}h;vjWz9=kc0b@*X=}ZC|0!&95PTdsE|e?yv%e>ZrW-)dfsb= zG$XerPJhoO2^*2!phabndF|mrGLWudL=JT~Gq1xiu$gg{@jg00elog8F7R!y6J@(N zV3-ro8LX)Z!ZPjs8h2f*?R1+vV|OZzhV>YSQPW(^Qr6tV6r3#clHI~Alq~Oq*YyH~ zFY=qol=aB&u0K2&%v(3gy>DBIuK{*$WW-*!u*mN=tt5)mv@U}5v1Kyr++}5U<@3pG$#dP zCgH}jG5LA+e>ju#Ibjtn^4X0Mvl6)P&t^CL>~VA0GF;TyHcW9=U;RW*4j@inTRkz6 zy)0z)-X^70){9O34hkZ#)#HYvp^^WRtFIRo;%WBC8wJDfwuZ0N{oZ25VnS2IuKU|r zfu=y=da++uG=HCzM~DNv)k44 zLxx~4g4K>3?Z+sJF$AC-4;{aPAfrn>ZiQL85SNY9d!dRFHFqzGg#pC}W;KTHXdz1c z{`lUmCImSP)r$CoJjh&!&8kTnY(T~DEjK&%TG*j%^>G|~Zo6XP8?k{=CP1kvEin{$ zN+0YjZ7%cjoM66*8WgpR8UYT%u?sS25}if`Z{q@2w!(&U;!$nHCYtjyNX~dkZ?{s*V*xZU&PMZ$UCS@Ja~9Ci6l4qTY@EgvFt&hvwbyTbliSZd;iZAH$Ne9<8Jn|l2p??7y_|1dkeJj#=P zrMBG_gO5$ci@CD$iK|e6N^hb#K0e+T!0-(NWC^_%D1VjsAPSy8jp)7UoqM;x+MP<5 z1UpqfW6HK7jnd;1HDJXVqFcf3-M&hwYOMR&x4{5nrK;t@Li1IMypL_Fn66ERm%tUX zKRy)Gu;2K0$eIWYu*UTr-bEe^DLu~?0OXbCpX+EQRFo&vhb%W6s$ulBhQp9^lbRZ# z5_{WRsqR=O&hX{p`yjzvMhMRFZ}D4?Y8}!rXu4D7w2zUf9z8e-ra4YF`(0ttu-Y7e zTXjGrV0#?i>K zIc_2&CIJP#zSI0e1(6r<jA;V;?k)@vKH9(;pJ z9SOtL?p#+pp83gHISU9qXO3uuM>hc7_+6;QPN;AJD2__;h;<(~p7Cs-F|i6wEP|b{ zfQ@uG4x2|BW-BO=(Yl7^0EwfI;ct$6r(D$lA|-HF*i?RL?k``#fGu;bEextw^D-lN zxuvROOcT#IMO@0yV&WKo;V?p*Y_oV|MwV$uM27zd2H$?~+4g@0Menge^gm*Uk(K!M zxzPv*-F^<32^9?c1jvna^o}yk?gq`s_^YR0xNP7&Jud|fZp_gnz0&agN#W7>goA$R zYg_OXOjxS#BY2d1UZ6bb$IAZ-)%WeVq30j(Yj$Ji#V5`X|d*RrM#Mf+* z;%r~FTJ1-Q)Q+6T+SFb;?+e*p3(87@quQ%WA~P@JFCE3{Nbe-?ELZ*A#rcc`H`Q-% znEPysZ_S2<2%uKTr5rgou@j4o zs`S74pdlQ%ANh6u$9aAbfp~2HfbrC^*Zli$TZgf#PZP%-`nyNN;DuMfI1_mrR&Knj z5_%@*%A)?%#yMh6{G&A2mz3{{3$>hRr>MscME-Fez#~8jSz6S&XAC@hO0YCDb2llG zCN?Uf-XY3s1UK0LYL>Y#P*uVCPPq=| zeuI}$Kn)Fb#NMHq>^-~jc%En<_g(pg6tK@fC3tq!P~l8#GM8Bee`+Hp7YS04VKaGC zC$%tzU11kCUgz4qpZM) z){XW}zNqWPM)1By2&T`8HZJlJ;(~r6iDN{m1ll}FJz_ngvR-iXJFn^(3jI4`IqI(W z^|d3f&J8@r`i(Ef-P;B$;7cpdC$Yr{qS7zBHGDHXp_4;FSRvXzZk;!_5SQ@1@+r zj1hY8(E%HpC>F0*>M~;J;{07nymT|07=QrAe+oeY&SzxVaGNX?ZsFthIp05*e);yuK2zghIRM5+F*NYdceV(?Eeg@uTBW=D{mU7xm+fcEbiQ2XK`Fh+LwSzFaFKR3`4|$yBj9>i0*C|3u|4`ZZpj z`!A_LjwgB^#X?J!;z;Wc6YrhV=h@Ce%{xA}18BA2VGi|Mdj6_zh7LBt(G1j*wmUd# z$}G`3oB&R9<b|`nb9OXEC7k@r&KBIC#YKZXe3XpPt$7&aD^~Ta^oMWO<%gm1c8H zJAXfNkOJNTMpvGG2g~(;-&(dt`YL5)-AeMGXRxI|eK6LUV)#CNZo{>@>^bmkvy7cW zXhzP;Aqf*oX4F7p;P(Eab1dn^yW&bc!r$o$Ig?^dl8`p@n1XH-8k)E)kO9Qf_0;!g z0&~6wDG+^M>$55M*b)!8&4yWNY>Son#5xArG)eJXzzVB*pO>$-nsaA5O$PlGyV?Me z5AD2E(IM3!kT@2=IxK>YOn|6&hc1-o++8tQd(*712TkHkifm%Rt`4Af*)69gHArb2 zIO9Xu#T$-4pbssT>!!6Na|!N^3iVD2TBZa#YyB2|4q)Q3-uP&=LqR6{*73;qJ4(%% zRz6tu^=eQ9QbV12YQjotXpZWh>q|GF4fgOS7TFXHb%;E8f$U4Fx2qWZS+#p|rs;u( z)vL~TFm{t4<3%_o=Vv6;^{mK?0OH7cS=$LQSYAD_bTcZ7xSlGVE9p&MF=n^Q zH8R<_bq-5uishe&r5MMKbc}gK3fRavH>@X8(*(-ED0fda(h+_nmzRYVpQwKa+GQ|7 zv)&-#+rYHK*N8eR&|eh-`uv+~T~pTd28r;fyV1K`z1AO#ZI{cNRGu;KVDr`)&8;d) z@iOO6cT9rqANcdBESa6MvHU(>u*f9uGRHm2Y6r``!YZ@GwbGn5HY@#iNMEko7nWBU z%v!I58sd-yZM2fG?^cObbuU^N_qiyD=5Mx)S8YR)q`{C)5SD%I%##9{9Z)O8Pt*4) z>`2oxQYjRoPpE?h2}};{8OpOz;#nC>#Q|p5D#c{KFFZHd+7(BJIa^vbK>`=L4~0`8)J)a0VAE|nKRGw^e2SS^(JULkj9OlKJ)C~ z6)R>#PSkWE-MaW*kaE>P{wvtd-oQ`+IACDADC$9l5~yL6b$#eQ9EW*pmb`S*Zi)r? zzFQdCHh~8oJXUnz6$^b_h7DCO;6)jMs`RpDJ0A_!+WH&=PLzLjLmUYN`!S-9=>Iqg zRdO9^Or<^-8+#1BFRmO~@6pVCWW;#hw7SA{4?3pCtB$&W&5sVf6B-d-LfVu=ypUok z!{1&@Vz17M>Bjn_(<`Y2#PpXt)2jM?SP#pMH)MEWfse{2Jsa!O+0ir-j&qGr1Z_lq zK1#JL;m8LBJi0%FA)udHwDsLe#*|Y%eIq>cFMyXOingFJmgQ%9101-hB_Q!qjR$R2(@xPp1}U=Zw`I3M3rBN|l3eeyuXv1A4Gy z30r>TB?o~m-mkzb^YHEW->k=%jo*0aGhRxZ{UA<{cJPd}4QP^yWEXCxD048>J4*I0 zpH=$Q8{waRrLyU)aPEEYR4SoJw2@serOT4YL>}6EfdBMZR^YueHGD^XDQEuFvN+S~ z9mL>uUoNk-Q}Bt(IoU&H>WmVsX8|y>I`z^+I&Mi+lXMChVIzE#^SObC8r=Fo82XJ7 z4=w~ry8yiIGPbL;0&1wZquQXccUTn6=NE;_g_Qat><%)|7aH@MXXS~S6r>>AzEdH9 z346bFpLc%8u%S8m*m{4re7F+`{5|9)NP9CdYLck3J&z7Ci4!d!SZJtqmJJXLYeY|o zE|H|Z4)e?q6}&SN@~OU}e-2v4#LJJZDv_xF426LeCAhE4%{=pCn3|;)`ZN{NIxWn1 zbs*nr+a=15J}vLrki6ZK-LOS}9JT>6Lblz*`!o^A5RANV-^al)WoEaS>hj)F{eWU# z@?3Jw@0~EwGY3Fu`LD&xIK1G@lvbX6&YPssv>E;T!XD4gYs*zYGG8bD?Q(7Xn%JZs zN!pin!KuC>yz*@9>yQ_}btNcc{0QIG`Fe2rsG~#ZEQEY$`?z;h?&TR5#98;KOh?OL zto2Dh(E^Mqp0E1-tf>uyTj!FCXaA(CxMH$ByqNEJ1rB~-1xQnlLU&eaa?C#&p;TH# zyYB7(%3FbLU`$8L;oh#xx(36R0O9$d)X@<;0p;jeYLVmHG6e$zjg0pX1za(5_s<=4 z#mE3(Y&wq8XB_`ADJ=W2ekrG>%1H^=Ob?qH&><*4;)vf9>|PPdZs8?EKo>izM90RA zt&K8H(xh}+J9qjdE<1~j7-Gb30aa4JAdng_ZznaQhB{D z^=2|gbhWL{VwlX~D{{z<_7ryK5L&B7A?7Pm{Ma^N#H78hH|*P!s{T6*pcL!8`O}YK z&crGqw;I5%G?to{BAeR~1&yiR2jsaE1fs{M*%_z?6rZXj0Hk}~$TVKP{=^1opt#Z| zWPl#Eq|$X_Bxgd%)TU7)-Lzx*<&wYuCr0AzX^c4wIfu;JxBB`u*9UR#cHJLHo z_jF>}m42U5_xVwg5M9dT&{Ao+XiE2SHv`b)Sd**K=DMg&{ZJnlRo-w1zYdnN-FLlr zlc(vzpZ~@oB}j1IWNH2R*TeS%I&3{lkhqZ44W0N>iCC>&isu>NQpNu4*KFWFB4o7> zwQK1ws-Ar5F!$O@z~N%oqmh}v=zfac)JbXppQEmD;7Wiy2Q!|}d6F-2P4{XN!VjPS>Y~VeqMnhaE7J5 zvAA{1U~(>n$S@3sEuJ2IHBN~)-FNF}lGl1oJJrpz_CjWqBX(IwacP&7`5ijXjt0MA z`JO}2hh-M>&^C>#Os@OARrSv?)|1287w76F4zjK#S~QaYQ{6L?xoq)gy{b<8MSwwN zslV15rrvr~Ifk=9a2BZ~KeZCvP&<44yr?0{V%#mHhoT-~LX7FQfDRoJIFPO9pMn-? zzmCE4pkHyr>R-W(QJzrr|K(iMZujQsjd9vi@XrI+3-@Fs|D=Eh%X@!;qRDRRtFA;s1 z6F4G!yYa?uLN;Tbzvn7E2Kse3Ub?peb-fw=Xuq%8!t;0N?N_Ctz^$Ysth{z ztFMK)rU0v66&HWgxrEZKIT6J74=3U!;sm)aATx2UvtL z`aj&{8E+s^Pjw4$=HCncS?)lcQrL~7DJICfxp*o1U&)IB6zQD3bGcV+WYPWVS1ozUwiWoyXr-F}iZ{7daA zD?LL>f}GiBI$zvu`$Rwe{km&^7#E-4lB?BzS^1FHJd#^4p`J^E&Pq{Q^BPvkf=L!z zpM6#d#=D?^PwF;@dHueTuP2eg0nzNrGghHtATvDp!fJ~BqK(=@O{b5+Cv2i@*LnW= z_f+l*kB+B!P@+XxRNe8;l35v4w*6z;k_=6E0b-cHf^v)x&d#&Y*y}=#m-wW$mhU*y z%wjs?hGaTFXPMCE6VBpbG$$DNg^E_tvq z^Ii9qjZdRrqjJK@xjjkug}wjyoy- zV;3nO2i1Sj6n4uk_@{pVhaXUJ0VisRxF}#q`y9IqjwA!>bu$`FpW2Xgz)SZBRC7ii zGb2*x3DPYvU{B|dg_ipTi(t#SN%FdyKGuqr6h+c#2ptm=FxVl|+BPZI8(YSRSOmDw56}0)~qg0S#$?>!= zr~%c+NG1{?vF*`t7p9=chN)LPXC!g)pBmLo)>Wihpa#Ey`|!D)PouBCyxX_l%uQ(n z{Sw_o1lGl%NQOK|Rq*SBpHA{_&TGe_RFC_t`g%N-vy}Z4i$~g92flotbMIKtK8pfH zg!I%D^*uqE5(J+lbF-6YH8_=2(N$HRu&sIl_`vt^ZfIm|jZPLTx}kB&{}i#K+1i_WZLM1`cFB+8a~x^418Du_!NLlbAJ^Xj z+up~q?P2Q0tTBUavvbd9+0|xjQiq96>Ry1pjWRiOEUCPH*`sa7MYS8+M#{sjD7k>( za3Z^*Tn;m|%oP3eu0eyndrPWKpf0)B>La^c-uhF7# znJz|b_!jxuoTQ_^$Q3-Hi6k`|UrS6H5*4}KX*bKM`}&?ed5g#N*JsHoqS$4%lL@sr zQNYXaeWq>VW5pMm@^Y|DrL2&X_~TV5t8c`tOmp6Sot9YfH?beTzoTU2>XZu)bCzAv z%F{MzK8p+E9yleXE4P=w!#lQ)fM>79P2SQipya%;452sJ<@Gnd$UI8DjNtZX@T-o} zsE{Eg5u%_Yx0$gS8p@q(@25+;$NIDPxo0js{hWG(Zijg0GjIuFWxBpC8fU5sn?b#T z8~hf3zskzY=vSg%?<@HVO>^$Y)47}Xt%<+=yY?17Q4{-*tuuV~$AqQ^oM8^PA-&xZf^R^G!2$hez?t279aILF4IQUcVysNF0F-aPq!w~8d8A$YdMg&bq>XDj zOqj%hdxA{AaRq5IQ{#HR;f+PYqtmc7+C$O;HV&=hq1Z3J0qWG;`^V><+_jInwBI3q zp^;I}aHA9HwOFfQw=3k_5s(@ssOEByiP?J<<(Hzm6{Bg&68|JR%~KhO`nSxvSWA|8 zB4AU_VP>C$ z;atw4*?o0Q?Ifenk7gHd9l^-crd$Dl?Qtb^JWtYc(N|1PJjB1NSKJQV`gZU>vw4rR z_Fj&Q_maXR$KVpkQmQ%ft#Nglv(Ff6 z_fRDIl4K;NwD@CQ-B*HQ*@h5CFyq*vh4x z9h%JlUb5;d?%9Ij7dtL3s4tftdk+(a#yi<`Ek@j^LvDU8)p$&CB5dry)z6KwjFDPQ z;vIVjFWk2!6j4N2E&iWnx)Qm5)HH_2C;0%9yWbo<%D%Qvoqr*qy7|g7l-3}*uPtL# z6_8D}|77WW<#CwzGnNuys)#hwSun@*HFRgk?pTaIa_X0j6lI9)9d+ck<4)(Qh3BY# zR)`U)nyko+qy`9L@=Q9^rv(Qmm2|~NSv}0{pE3cd8^d7ehJOc&?^WlfPn=#>Y&u)_ zf{?f}z2F-&Nx5D6yRVEK&9HDn^ zLAs%1BvkLLho@FCm402P17Fez$AB_F_CO;&v2)2S+@s07F{4Tu8IY{ZXm|rrJA)z= z;(7^QvU(g3A}BD#@izu@R!=$bymY09ZM#4Z#mHkR#+YEV^|Ig`a0e@JT zURp<-`K>=rc{X4xJ^i=Bg7Wb%F7Ao(p!k0zzyHHf(^yH%2L|hATndf4FA;&WtAzHm zk3AD)_`479CTWOxk(4XTD5NpdNm^F_w%kFi=*H{W7Vd7~8okWf7H2>p3~ zq2YltLH1(jKg~8XB|s~OS7WJDsL;0%gM7+j|E!6(SJmuVpaM(4hTOCbDj(QULPcA} zo&T8Yqs2U9q&+PlyVo%Id56B}K-UKH@Es&N{Ru=5^#FNN+g1noz~L?{*lD(d8|-(e zfbz;+o>zK(4X1V;aJrNi#mu{sRD5np*@7nmFwdrm7Aa)T`L#Qi zd?LgZgjwKjAdNbopjZh=;O-|-z}Ib(FZ)wkn1=6XQ?LN($6$*3?i)q3u3~fF@26co z>lVER=guFVeUVy_S~$=>N-OHH4h# zlevWCIZJ^%p)jrI1HTMjDwke{4AwP=Y$_O}y#?7BTt~cUi1P8wcl#MhjwS=w+P}nv zN`qutJ%-gKZ(BO=Qd)DNS>}*H^<+NTo-HBxrI_N6dlysssoqhuP4!S9a`rRp4qWw) zWA#$_NVN8p%r`$A3-@U>DyLDR0_iVBB7QDbnWEN#IbdqYfTYL9!f|5G?3Fd71CpK$ zcPwrZvf|yg;9(@_;~Fw+P)2OP@4{(^brvk-JDajamY+z?9OmiJdXuG_nz>3&#@ z=ff8AfqFhK+}ZTzO2HqEqy>^$TD)L>r95UI+il_4tBqn(3h7qG;FZdi;hGy}S<-0{ zZ3WV6sC4K&1HPCxwe%5A51_o|WgMju**S{mA$#%SYz~Y|FO#aPe0@+Jl>XD z91BIsC64tbU>;bfm52O@*4un&xYMX-yp6<$DS_C)kj2#U`B=Aq3%uGZuh247lWGCj z55P{<}91`)*|U046}QEdSm^rg`}M}&C}y?`Nhy}uwd(cchSxh60w zQ=EA42N-bw!CaKezC1Kx%q`uaY2cZcV|&}2jXB)WT&W<^exoy4_Z<>-Y9w)9`W8d; zO;zTZJ6qN%h4Y-MU>YHp&l~->X_xlE;Yq$WKi?Oo8G3-~Ut3+~yk$7gIWojT^FDKq zMlcYWo8AIZE)W#U%%m-^SS30b!^l;@wK`e4*qmE18+dE(u!!*VGZJ}SWy-BVw#=nj z`&SG*Rmp`wc=)ifVN0K_05gSjt_Ij6wwDSy&Rl)g^g?kxFFFNl@!#Aq*|2$^VPk>@8%+uaGEvQr1j(!0~Fy-@>}$=%Klq+c4gS<3)e2u z&q34K_odgwp6ymE9~ap_+y{HW)RkVsg6jh_^_oO|M~eVsOH#_#G|rfC;I2j^JiAgp z;hz2ZRIyd>!u?d1p{ulH??*(b50<>}X6>pSE8&z#I5rThMqS5)utT2ijap1b&8Rg}G@ zJoaa~pI?<=Y8THnG7=QYbhz1Hq^OmU*4DeiMALA@Aw?iy0w&r@e-?1sFF+LrT}gVz z_g~I!W=dXo-!D6xWBm`u*MI*r?f@iduGOO7fiFp!(0R%NgGsDIkoV4+x}Hww0{I`b z8C|VXyrL?1)M5TbS4WWwy#0mBpJ6yl9IY7gnLe`X1HI08iqQ}4)7UziDKn7BV zm$qC!`%^Qd&esp-40lrNm#Jh4wgC21tDOmS&8&OU&Ci_e^E}|#tjt;%WqNrtd7b5C zL9wF`9(|VZW{x9DVX3kR@PqGQ6>51#yc6JZSQI5XF?UZ~zXl=|_@tW9raSGZLG(o` zV?fxk6H_OAQM2ipsJmuxEM-qmPfj}v=F8ZtC*WJ4Fkf}|kTb{w$oIUs@EpV3cVE5X zu#a|DwU`bPAP}(*gI~1%Zm=KC*1a(YzuwZR;`RY@mwq+H1kBI_F<;*!xllWm83Jym zQg8C`9KV!2LI4l){k&8QQAtEK28cZmGlKI>=6#2_H9NCmblX}g{PSp3*=r^`3n z-qpZFqRo$BSBp}ZLmlq#(Jl$t#qCG7XO6BJe6RC;#zGPm-(-3kRk5f^1c&Y-dWCWW zUnWZpP?kX0TxtK3A_hv)aO<;?V1AG*rT=@x{eK?V`oI5Da_672{U#kX7MfiHM$+PE zK6@p6Y*TMV-&XzwGroI-Eh6!mHu5`(Dp9S= z!Xvc9VIZ*ztb!DYC=)crq}@(ghrY2qrOBWr z-+IQ+ia6aSI{4{XH7`g*hP+hv%liyY(Wu2Szve>6xm>T0UqhVPHs2{Gn?M31N4saR zw5i_f*^-&eu*WFt){>=MbXe@G>#e9tF09j}Y8FJ(NeCm#>3HE+30q3@J6d>3r3Su>+Uzv|{o)f9&8BECb(RIa;=;!BUmIH*rC zaD&=f$qV57{7f6=U%21N9$E*$L#Ceb8!FM$mwb+=!>YD#F`n|E)kJHi%vnyj*)G_K zKz8Ry6EJ-sIgQB`SwU08_2!eJtNJ4Qh`bqo+qCb4zXJt2(^J)b^GjU8^_9qWLvaVL z!x1h&#ZKkyfSWE6+#ezO(%*jnXrSTM-9~T^ETpwj1{f))ilG-~l`iaJOpgtjg>7r> zWUuh8#**!MY?HIufr{TWy-qUqQTvM!{*L*mI|RrNu1>T&!mmC8o3Y&c92-{09jw1v z5t=oyW*pFaG@YUf0hRf1l@grn?8FKi`#LT1!w>=B79e*xRsj-fSR)Bj0Pg ztwM*;J^*}4!~TpzMiI4tk~;Gmgm=tZJIpRBs=@dEbn};9`-JF8h+wM8tjTOmSnf^0 z{T3cMHG?`c%%t~c>OY=<9H+;?NaKXB@#sGk6bDYBx}4J4Z$DX|^1NCHbfds6&vm&A zDW?*(oEY9yi^)S2UeDxtRnzZYQ>r191xmr1D6BlJsFII$8lJNxI_K7yhvM`4vzjZ| zyz!e#)}-F&D!3YB1#2LZ*L62Iz6;Z2bvsNf^^=b|<80RzNC8qk?=qaX#Tob|q@65P z?D~Gp+L#x`(SPcb-*8+-s`KyAa`9-os*Z%0Ua3dI5sxPs?V|*Mn0u=ZY+ps?rGqoX z(`oA4xx;{`F~B{H;>@H{toYsw1)H_*(%VBet)Z#`Y4Z852Ae18Dy5hel*W(x?16J1 zy)u8b*Z#lT>ob8z@94aV{aORzbW){EY1 zQ`NmtYCN~O@iVC^ndcBX8{7QWCF8Yolsr)MEfk=ut)??p7G>XDp8~Bg%zZML6Y)=P zjYOS3;_TiQCXJW74L1di)k`9#;>=5G=G+UbzpOi@P_+nc1w9h9Nmy;6#6PJ(;aPuP zN?v!pxLsm^on@3GK9_>oX^21O_L}(gvYYAxX#$ywB@V^%R)k+qtbd#~qo=8BlJhA| z)-xa*Zei4JasRQMz0#{Ras%=lx6e=B-nwaDLL3K$vHJ7QZGfTQTTaP0s3CUQG#DLA z_MuwAOKkdeHW0l()%pdfR{4DGPai(I4BNe&GZQc~I$N~cdFA|#xWgvoVy<)lYFBS~ ziqf5ZLUAQ~{$xCc6D{%_DO!lt0k{Bi)5V^0Qn&U-#Od8)>J4zDVH>TuSd%&rl;7+yp zedgfe^*VC~>NO1F;cIlW(uKVWOCL4GMvugfgjdhD_T1&)HJeFy3ou=|;M7>JY}mMe zGFhopvS+``P~?oe+}Z&yxb7}te4U-OMg$Cd)vEV+B~_W?=VOkdgz_27_inYn?oO?< z1s#BFz2TryuVm7e3+N}w5SM9q69rRKn@p>?oyT{dk&@+MGfo)FPL8=#3(RCk`oH9e zXQAf6g)Tnx@cs93NB@T@AW(`H#|u|57~5jti&ofpZ0nbO=S+LU)j|5gFF5&m=ZYJ_ zj%Rr7>i0V+RNUk%Cq#Ulk$8I{7`N?km=%!xyzpGvj5M7TVl?}NhZa>oV;xF`?kK!eo|33*56|> zA#)5LB%q}@HW{+``YJ@)i_?G+mbL6=ZjV#tD4UUL7V<3pO61B!c4wY-eDRZEK!RnU zP{X`Q7hBZ#*E@0XF**sZd_l4Th6P|>=CV6R6^+AwfWz;OMSw4K%k*_MYIy>{@}m;B z`HSlS_iQL}f~ysDz+EpbU<2D;J{r?`qphbYHv3&O-RPbJFE3x9t;A$GKJ>BK&wN^n z=TB!$w{c^%hXcU6+F`P@ob|Pz%KX zs3=YfoJ9_b<_aLx!ak&sIn%PQ>!Om$70@I|K4)RYQ@w7@hAUnRkg(CCRT_jsp27_3 zldeNQBKjs;<{}>F%Y&Q<+AnnL&xY7WM;5;5pWJLi069H#)Z7o{=b}p@xUz#E$MFcL zc$;u1N52A^xF#!@94tX^HA@m~DYAPOyPnvaSr6l>%)obLz1(XrA#Lcu z`b`UvA$#=sFUajSnlEfQ;B4*qD`2Z;Z{O#1|5UJSA>JwOFyX(_9pQNR;b2xVNU)kN z=pT-XnC`_JbuD6^hqC|hS^vw|Kp_mnFYS$KkLs^GX*>!zrn=a8553y#6v_P_>xWj# zx5@r!UDJz1tDhL&0kI5j8c~)N4>-3zhRQOfBtT}8ySuES^sml~A#ILRMQTCw-9PVb z#e;4q*!_pJmNuL++pQK})pFOl0Wpkzv=h#T`-Raa(*%HbZSg2CY9#+CU|SlHH!ok5 zFUNf(ts0}siL;C!`&&aS;TqDap?$$QJn!L4I_+*!f>yP^Wu#E_So0vlFYso*-grVb zxaMF3DX4V59*`%ET)9|2N+&0;zUun<;ZuzofwYs^^BTo`4vW1Nf4JFnwex|}yu@9N zpt5HG%+$P_B7t0#1G56_l&+Q1&3_?v!mG$ah_ImLQn}{>$e)+(`8%Z0_7}!f(=OY@ zCa2eP^Sx*b3Y4*wIlooZ-p4uGKQ51S9vex%yYZs|7C|f0`F_B<@5*NXXH#+kNs2-~ zB_HwG$g>f^9i9IW-dGuHYN@O{yk`YHNP5Ru$K4IukuLi^97618K-97YT9;RdfpmF%uS&u z;rwf@auhx}uc(t6$?KXavQ$Z0{S?E)d;nKdQW5K?1W{V5vd@jvZpE9+MfD7mi9P#G z5~d{GGNM!ZoH}b2qB~izr<&~qk`}t2?WDTT&Qp;kICD4gGdP(1vRWP|0DSPeU5Hs#^Ek>P*bbiv zob6fN^LHZSiV&?-GU8`~GLFNEH9<;KO^z^Iwo2!O`-yA7X4n6P@3k>Pl`X~ZmZ6w2 zcmZ7@l_kLLsIF_Q4~j6w`Gam8hi=ANBeL#{-{n`kxlZIQj zFlW&*QlH3o`B))MD#_C)S%z&)jW?MGt|%?%O=S zG6bDmE}zaj>C=x{`z>}8?IvY$fp~IyVwm&Rx_N)}k_&m=mAt^lRYaua?Sqqk%Lmsn zl@IaBFTOmvh(XODK`}7|YjOx+TJsQsZe_O1=zy&rUQ-?2Pmw@G@laeHVjUB7a zXNY5W3<**ZsR2{Q+o15E#NS-uL7P7ahgZ$1{Ol+fE8p0?J5-?7yQMO{9{RrJh}WvB z@`Zy#z}X;tU+CR%jb^;crEriUjVxA)NxqjxaxAiAoodS!eVuK6gvZ5)4E%X#>&5j` z+g8W1r)~;3d?{h4tfvOa0wq9%q9HH}oK>p{=FbuyC9KHK$tMYZ?sc+hbKOgc3+iBj z^siirPh;Ta3Cen5E%6*|dcjW5#jX|@q5?hhWhmsBg5>{33FbmHjY z?_YzBJs7mu1YuG%3?5Qg`*m^(QC7nkDaLo`j3l0PYu*s{cFW!5(|0khZA!}$EK0)C z8bjEgDXjy+GorvN5Fc}vM423HIaGh}| zT|K8$U8&Q@V~5ws#L1M_qf9-%oGxKKAU9}yVD#-|Aj?ml!>#JDgmp=ubZ8NHwD|4pi%cS?|p;x5^zB>#KD z`WO;5QWSM}>_s*fsQ^Ve+iqlY+Yp*0uoLTeHq9?aWd0IaN+%2 zRYyewMmWmxSLJ839O(r}6L)IfAbZg5KFc|@x)d#NomWn7UPVNS1SJHe$4~;fZ`e+L z>Y}dGs;a_$r15@+1k2;V6$A*?X}tV}Q*ox9K6g{2=+lOwJF~bm1O}V)gBJ<;ahGx~ z2T&{P?0nzqK9caKv_&ThSy>G5rp?hjsQ>nC)F6Sle!!ItDn*pR|0b2^rD=H{3hMn74deCFk6$%r`0< z)BZgAoU1uP_9~}QUp$B&O*BPs9UqjJ3qIP5Gp8UaW0u8jP@56y#aKDn<%4Lih4p<6 z^igY9xeY?tm`8V$p#@G5(YnTfGWrxyMp0zx?K!_Um5fd7Xu(=$ANMk1?QD(Qr-7yC z9z3w^QKu>pG3y@_yh$wS`D7I2*|iI8HHsMKax&<%Ue-RS?%B6?0=5$eBQjV;bF&%3RVIJf?{Ogyy0DHQZ9A9q!qPI^2p`oV2ITs7r^@mX zceREafQQ+Gd6qV8mN`zu$7R~@gcSe!<;9&!OvtUYCsRVOE!_5#XsZ43jvB23X>;bR zpyNblO~y=J{|Cn<0WdIJSi+%24wYC-wX1!E;0>Ggoue7>m6bYKY%1VMAT>Xx;m67q z{xvr{<3S4NZ^Z!6X8MjZL%%$uOdrlkxiKpQSaTE{m zI4VL(FQk1hELy~)GJ%BG&axOve@9pp%$N7=Ti9J|BwfVvBh#~jQ;xo1Xe_f(2pw&F zqyAaqo)GP_Yz2;!%5$i;K85A*gBYROMU36|hy3+^eE}V3z%m5X6lI?f?pIt0QCRwz zMZ_Lyyb{rf38+|#j*uq1y7{ud2m6hZd=*sz0T)Nu9K#ymuKTiSlQH?2TI>B@&KY8J z&D;7e-fm`uE|m5&&N5!v+vlow(!gTX9!yO{s}@kAt$wCkZTOl1$zP|R9X3vEDX6YN zzg+@JF3=rGnC6dRE!nA^Wh2ZpDc#RMEOQ3FzPajgc~Z^!N9>9^x>eb10;h1rm=y14 zy|=PQB8xJZ8c8Qx?>?*hcGv}koD6o^6`vJcg`-Wvjv`xqx4~5cS(s@8OiW*s`94g3 z$)D=yJ6!PE%5QT-`JJJM?tx!dMWKRpI-qPxa-7rnG(fAaslFUB#CfDy$eM2>?>L;C z&N%eFQEj2s35F#n&}?gX$Vndxjfq8Z$DCxAy{vaHzU~H6*YkOBP3ii#BME`Oy9xA{ zqwz&F*Uq)Ukk>A@{eVfh=5tj+p7V64|?65gE{?m^ITN|ZqE zP?&3W4A~s{vJw}E2jUF_VMR_CmUq~;Q?iPL@-XrFZf+L zxf18*MCSE)$%?yBCx_Z0(C1+|m*dg`)Xho&10;n)A9Uh&Sjl=GnQp5ss}l0zOADSo z%zHRi-*cT-lJJV=nRztsaOnuUH*5XvtRn$9Ju(pa#D54>AfZZrq+uuv*9Ip|wMrD=)nP7*D!{Al z4eJ7bf6>03Cmv7yT@!?0cJI%pwz58)o6P_H9+5W3=NLGKpivg?dCoTPDN<9S{9vBc z_<~DcGCQjA8&(C0QBI{L0vXwiT9FX@5{-v^Mb_k)_35rNgF21KN-nPRvInB|O|vyP zoZq)0PS75DMgyepBRUQX31?#|Em$yy^wKGYlWV7`f zEX~ds-KY-m+9VD4ev#Krtg62nbYR?jRZu*}Mp@@?QZN;y0P^4}8Ojw+W;HRhvag2A zESEL9h4k1uUpePnu+)FX9(3ZDjJqBxh}Rj zO@(i`dls;;r98-?smGY6h!f1qsmcPhn|Ec&)qdl4=5~$7ia1L6ElA9tBkCV_dh~*L zLf~7iu<>~QNjO-@ID58Gzl2LB6RW%Tn<&2c`@{ZOhMD=t%NcF;2d$n9(i*U){_E_! z=?XM1rVe>dszO}zn;{cD1Gc%LZBWc-L7pAXk=dJgz0U&>lhk!0WjP-{CU71Kxm(V% z^46hmR}5?pTYf-RI3_E94CQxO#KF+EQbi~EaZPkL5;Va6D^|>tJLZ10*5|PK2by&9 zw7z}xGu_3ll7ul^{%{% z*^nWDmP$7SdIjfsJsVn2(d-qUGToOG_;{+Fdqy*7mUf%|^h%{)(zs>s8*mVtx$%qf z%y@%GxfVBnq3MGo*lItWikw?8*Cy{lC)}eZF0k0+acD4yr-KUZMr5)w z1mdz?M4W8&5=@CWVSG`-NG$F;G96c+X^@HFj|?R>hn{e@H+mV!h_B)oza!8 z4B=ztw^E6W{5i#|-)-KT<#PpWF}K2DokhUy5HVY19mM4PwkLwwFKPqJ$%I`7Qi7Vw zA~e2ln_HiRsH@J7X$u`WG8EC80+m2pEcbClR@$)(j$sMqS`U!9SDo_yHw_w8RGm5`10~1j<2&Ym zVk$hm#^ZjlS$YTeH0_diW`0Su9*}KyGVvOGYiB<1ARMJ|-=RuOgNo;L$vimi?r=+k z$p=#lOiX)YLEagH=yQ%{pJ{MJ?&S*}!R=vlgM`%yUp$r5JbdzDmp#L=d?b4fxAH$-(0=P%FazYlPUwko;8zp62|~d z!_DFse95^VfbS=#XIqfjPRBiymz(lMQCv$_2QR-TRp0Rp7Pwk+*_RPsT)#x0S2jaO zWzqdDk}x{zkQ$P`tDTG&_r1uhWV*qX&w=h2f(V@Jw~(jW) zQg~B!p~v`Yxw=56ch+LyqE+9He>Aaoz!tCdH0UO{`Q=RPgwUv=0w`jgIpgQ(^2dA7 zNAx{24$n#4i8P9u;@ZgjmOfsil5T#sUrOT*FLJXnqA=I`7!4)f^i^R7bgdIFwu1sz zMQS4kX8Ua`;vxLq!T5@@+mt#Ig(u%HdCB%qoWI|6aZY{yd@HHSq^BYYyL&KL$5>GrEoi5f#l}3Y_6OnoM(hiGQp24`9>no+nyZ7>F8g#t6atosq)q*C z;(lfjiV!K988uCjXAg&?YO%P7rspFkX-ruL#upuYS~M7*5}wW)b!kt7a}YMDc*fA1 zl;mGt26yAfpD zg-S9^A-hQBAoyG!)k0wiRCol*TRKp38`^eY>(`mG zy&6f2W}j3G@=bOOI?#;c5k4uTaHJN@MD7v~#Ci*2^1FCU)%#}1EWv~SctWV29cg4y zmvJ%R?I9J=(yWy^k37`o26MD7q_QH);1GiWmnS(Fy20nQP{ zqdd;|22D%%?9;xVN&g@_W{vWwkIAp#kQ8vMZZm^^YZI;qe%xaEHMT?|pcd+>(?FVOnaNMqQmDS!AOP}B8<@vwXo}US>n)wY#W^oKjG0YyUME@v~9wqHCAi;FVI!m3CgOw&{KW?{P9ki2vjS7{ zNb)+&>q^#4eRWaH4c?9Q-_PC=Xye%Xw-i`|N_%kLnk(>``6s{9>pqFGUW?`>)oHz9 z$ zFD@}IF6!P3ag-F8xB5Xdxl@nNG|0eUj(w)@k1AOkt4;WQ!mr37nJ4Snc zfDU`@$g*X()iwlO%v27PzGmfnd>F=K+?(Ah1qQ|zSIZtsMl`?m9dLZ%WHh{DyoxMY zVz&hK{vc>Uad^v>z_l3%*tySgpYX-m+|5Nu*U^9{e2f(epFI9h)%rT)_dEi_Si-=_ z&{783hAlF7v|Ntnn(Ds{5u3+RzES2@F>zN#VbkD$ZOm+qUf*+zq&7SFv6zqn(sykk&@ ztutN5bKK?4R_on8)4$WP>VeL4j};(=HEG$tR3PKibqLz;gBrkeh0tb zlVt)ui8-D%T}wn^!GDmUpGMtv;GtnqyHt8#veK#?3@RFf6Ko zrWX&IIOAtnz1w|WM&D-Q@PK}U_pb`|wLa>ckBa*j zt^+|`VxsQ*m8T_?>{VNX!F}zT+I>4E#Yu?I@o^S??T7}O(SORYllW;D8_YzJ8}x6p zI~c%5g3*)JMu${EGQaBuHc^ZSS~-s6wc8qwi6W=GD#GQnKpHJ?tyk~?!QI6LvoPP; z%+@nC7r*8npa)vx#3f2nV{rpL(3fQ}rz->M+o}c%*ak>l5gxT^MS@k@+T;cKrEtw1 znUa|sQDULpF(Y#jKOM;S%oPcJ`bN~OsQ(@`U^}otA0<>Nd09?thpHL;u9t`WbX<Gm4~zsP=4^EC9k zWA(5YK$xu+YQKoFx1awt&><#1POE1B6bilb+4verAiXEzi=4vG`LgrxL2saPomIHN0{~ zDMXIH)GR9uT|Qa2VlZQJVVo7e_9j8zMbCO8CqxA>W(oR^-;RXhQ|*>e<+>ZlV}hAf zGIEA>d_OG7j~{z;5q|llGUXMS)tVz4^!nLRU0a8oQ48HqIjShWoUYcZ=jco6uL#Uc zGAN^1_rC0W(&^pS0bhZ_N1{hKr0L3o*agZv9T`(TX2>)L9D5cP6N~mX_ijHdXc4+v z8);m;7iJl>2w6QSaBF785&;f@)NkGay|{G3Mxm7=#xwOWF3@G?7Gk?Z_p?%i+m5L~ zG^^(u6GEaMhb|wM-G4%bd)+1eFfgGTke{h0p05Px5y6PB>?O|WrzZjAjzA1lT?%dwwnb#G);nbCM8wc{Q(966$T(_YWF(Xt@i7S_GTUbM_id;$J%M0veuV!`iGMF$Bpq{KD@nu{|4EOZoi%g6X!cl zq4r*K-Le1-hhJfecmDGq#g4-8Bhsr};^1OLNV}1C7iiu+Ei_q`yq>fql<7b?;h{fl0}LbYS;R@8tv!I>nyPmFGhkP zs@|$_6Z*=yZL4`hH2JmU--tJ5@g^_6i3C|fbS)X{tJhe#+WZU*|UlN5K^5^|A z;&6wWuf&9O;PlfB^4&Ou>;27xxk_ubJgyw~cJ(GjyT71r`jncCXaat&sH(jM%PxTE zaI=&RC?8+&T3L81MQEk9(fG?mo1f#VR(vEq31{ivt*c~I9?3ODl`UQpqThOaOLo<_ zG(!VkzvwFc`hLC*9FHm|;kdSQc~P|7V_~^0@`ZQljsY5LzQmf;Y{TeXqT?8Ey@>R7 zLeYtvgE5iL2gp;OdpLZqa#{Hj&q);2je|PVUT!{i|C)_%B{yuYvT-0%=bN*WeXc&P zbe=YOiDFg|3gU9@Nnt)>a0Cp@?gcf^fFvOZGy^zpt6V+#`r_M@JQzr6q-nsZE-3g_ zX7nv)gut8bMN}J_9$Yuhk&@vWnszg&F07r|iad;M&?cVvt2MhDx6y+qI&=0S0CGuY z@uIBLH+7gg=^oU8XZLu4NdAl6_y@>z^YUCUWlUV|nSnelm?0VzBswAno>sNK*~$NP z(nk0PHZLOtS_A*UX6^G!b)|J<-BgRVm*1gjqtlFdgxd^n({xccoW*{Fk)Hy!@Wi;v zf6!c<PWEXyiAiN}22lR1|#z)FeE!^|VKniw|I1x*H2Tl}s-q_575gnG%$Lt}N+exyVxwh_FW!uf`!eOA$A7n)gZEh>kP! z2|j1Uoo>v@rs*mb5Q@GdR+ko+*#`6DY$`6zNY6kzkHp>kfwd6p?dpE-LG#Zd;z)h> zkL*lBtWnSl;z#4?CUCp&w~d0y2!x0EUQm)PGp(GjUuk!^sBZRZ^7p0UvHH4qDLe)j zLwcl|I)zp_z4Wg$&Q}qVWELXwYuQSo`fGPi$0m_hb@LZ>aO~BWN5KDR-1(qM5Qjf- zI-m)S>K2(XIDNjlSSnMI^%0xnJS^amF!RSihcXN{3#-zyO;xj!9U|kVetAm8!JWG-TGch?!Ir8$v2a_lB@o)zd;9L5Ytnz_7Aa~~DS+bA`e*tre_!y6s=l;%K zPL2=g_v^KK-=8s7CI8lnNF5aW?saU^HA-)8eYxU)kuCTxSb!_VfVf_KnDsc@&3W<& zV#<7i5z(v|5Y~ee3jA(Q2@6<%3H!%RI={8$g*|Vxm2%j~pE1QsJ;+bKjq97>N-lCy z?E20T(e)4)a0c?FhjH*}QYG6k^S@M>=dtgRD|)Vs^$h>!9t9@xQ(~;#RQpN1`L5JB z9CE$z$z~^`!{qr(G<&@HxW1q;v$x{T#sL$xgUg`z%?`T_%h!I?@~%Ts*n%a2!U`dkPYuojO#aW%C2qlY~g((yRoWb}0g)4srD%6$jh{rc1> zhZc3-h&8)o@OTfY588i~EO#gd{wCO3q+S)REn%ZpPa~(yUylH#FtduR+7|B76U(~n+8+ev!?J~Quz$e5@+b1= zq6O+&j`zRJf03CZ6fVu+N@{#hwr5ioGCNp2PE$nt2MfGx@Ng4&Aq5c?jbyRkv6PwT zw0%e|tZ}F=juls;#kmxQMqXGzpsvoOuW4MasGsA+kGXC%@>B0(MJ2EIl39~U zK<#qO?a}(R63Zq!dDUYq-u*jZxMGSp8caGEP82e_E`lT5!t=S#5!4^5D_Ex|_*1h= zwJw5BCmAC=q$lbbyX2YT&pF1J!amnya*t*q_5LL32hQo2-AcbX?O{%ztp>xt2ijAp z8tsf*g^%Ix2r>zxY2f=l-ce6IWC`-mcO~rSh3c3Yc>-rNO~6I;q_%8(CPU@ni7x}c z9@HRU>yGj%3a)54e~r~a zN^}*YajO|jeqL_KjNxydnW(3AVAbRG8L#kmNmgkLnODa!PXpddt7#8Iq=h}AUDyo( z`MClKovRfl^}ni!GMVANvspG_5aLR2+@3DxENJ5=2lYv0?g^*IA(ufM&yY;i`W}$W zc*x_itIITnBW5ady1Zw1>T1bro&*(=x|m;e;!OM<5E1wJw>+^#ooBg}6TgGI$g$Y? z4sDo%;zJf7OI!`M+a38qJ-(zXD7FUxN{BaJVRSSQO;fqk((lhC4qsN=Pw!7wJ7=zm zL%`$IPGd7w#sP@(DnH}kDh>^52YPhkzi8$Hb;%V+>;I*f`agf$P8e{g54!x;tWqN@ z&_j{mWx&MBBjWk(s|aRgQT4S-Vj16!o#sGdeoI~%6do`2n={sd-_h9!V~aa`!zD*8o7Knz zn=AH#rt=ZrD-}229ldV*OaCoiW9~!3`I=+XZ}RU!J$}vVnkRmtqcEQ)$}chs;xEh$^`WDX_BBVT zTzB4O*AgMJ!b$zigdvAg*>4v1+~LW=9RQ+@-aMwqq5L^IWze+*8SCl1D=mIM(_xS+ z>wVm|PtH@d4^;0Hk7@ZK??WP!AjM6g^gl%;fFMzxgV*C0XO%DgO-$?_?u1xf!=?}H|I<8Q` zNnWy52rJ8_RXIuB58)Y;(Ko)3F*J&#Muo$o`IQI$&mnx6_*x#N7RAv_|3J5@X%2p- zdxKcfPMD*_uliDUeCKW2xBU*;jwM{bH_~(|G0!UoV%urEpA_Fywtp7901N>nB&xi% zTws~)J{A0Rkm-l_i7hIR{*X0Z0m~D*(XC9Y=XA3{5y7? z>B{IEPY8BHsyb)XdI0>n7`X8P3mzH}U39bLHDgfM6HOgZL*|r#*RXj}%ehsvMRoKd zEC>m+UfveBxkNUap6&JZe4}{}{RC6P`2jVrlmvcQ%pJx}p9wR!Ah^Iw>m8qBvwiXe z#Xh~7$1c~h7r(z7GB!HkGADPd9*1#CZeJ{G@P6-pV<+9ddSL}?Kn>c##G+4K$SPq_ znHptVe@b}!R?}eGOM~LI$fJ26cNURz!q*u1Xo`6Q2jt>W{laS_LHs0G0a$@I+J8a0 z8vSXN0uzT8+5ps|A`M;? z9BaQWUy?_(YzG<_IBf!#WJ3UVc>P{-H6Oils&1)ufH99|A(t)Sb7D3HXnLGR`kBMB ziN-47icy61EHh)595CN^e-@J1dGp&OVqW=`%BDvC_m^*SoPha-K!lzCN;2W5m~q}#CxL2 zf}HkRyEPh=je6~uox+XRa1HJ4%IdCGY3C=t#?Ktpn+w|D?W~fVCw;E2FI;EzFPs~^ zhWl}akfTc=<=;JgD~WYSf#&=*{BXIwlN9-P(dJp5zpLHuC1z#_lv%rCIPC=o(@2qD zOO*0hd)eDpYbXXmPQSM}$#Y_7-&%xy7!S659li+omltTg>;;<}#!!ouhNGHSD=^oe zcNK*&*uO8(j@%`-eH8qqYc5J3fw^lu?i-tis76B*!MdabFm{sRT^yfsliO31$dSx` zK@;1pnXzFMKt_GwXNy|f#Qg(`)zhp70IkIH(_TE_x{%YVT}{Ut-ebRJdkr7s-z=y*dzDEL5g=}Mhm^+iD&1?aG>N!#fhz~D@!UMFQfia zE1RdXhsijyvE1=Gp76EERqANo!6_K^R#c-jR{y_a>7OAT%|G!_84%+vMA!WX?q@hl zq6%GQ9~z%(~u1leUO%s^rhRWder5n$i|I3`chMX7TzHMs+T&UiuzZHm5hK zP(!BL?`fT8Tn7W!WOmT~N+MH&h(m*%R_{C9o;@uTQj%$b${VKe!829$^y(x39#+*l z;aqR~N}l#D!G&1%gtc4mi{SI!nZAs*ila^jkC@6Rhk`k^+5VC^esFLTlNe4Dc(h3v zZG63w&(cI+E{L9&r{QbT_Kh(B1@k$`ZQnsU4#JUk22zz}Of`2zAs4#|Ywl#(S+Ygf zeeKf)9~z0b58aOVqAFT%>dvnXhP7f$%C8SD!*x?W{&8i*2UtCr*C47R=T?Ge5FGs@0YwQM6Zl^fi~>yx>TJ2^y`w$JTK#X+(Nw=N9xCN z7X6g#RxdjPI|Y|hcPt+Jt>*C)A(9vLWLoXSQH^q)arVRRT&9w~0d@b$xREHiLmrU+ zOpEPbkHEji;Qx$_|G%FVqJcJTaIR_bT-|)@NnR`z>F|0=o)MZ_F`fwTPVsRai{pLe zkrNWQkONXfN-qL2%Y>6!4#hj`ajmUF`L33hmYA!6b&|`mokFt%kzS^< zJGzG4#SX*RbFi%lZwvtLj?~w@G!!_Riwh`dx(7`cU9lqHyL|rU{cEz~5t8_9EsKxm zY4_%z*ozD$hAFe~D6fst!NUoUlqcs|_r-4yhyit8l1XY6ymFLJxGdvTU4qbJEoKEp zR{%$2e5N6@wnBx|7n5WB9Q z@K~dL`-r%boQ&0uxa8z{OD|lkv+3KRIbETSUT5wS5}3n}QJf!`szyvhOnLZ7KS4vg z8?1|g2CDmU3{vITj7oUIHqypc^@0~7kk%ST%>(F}spn#E1pUi|sqKq*402hQu`8)T z;}iN0Pnkuj?q=to5M*A>ZkdTNTcRE^_d(=r;EvAdru`5NiTcJC-y24~RQrBBDx8j7 zLh+T;GNmM3Y_OD8Dsxj#*+p=N)90#_44UcbbJP{ys0JKz2o{UHhl?kSPby@Zz3jHKB2)XOQk%+&1Bx>ucj%w)gL?o}C7Z|4_Em9Kaem?7sibB?r#m zc%lZ5z7fGWe1gEyR>j?UX1GeI2+vx*R5dyO`>;%%Yhnv*q|Nzqm8*2z7|ftoSx%{pJsl zKsmrAQ=U(eEV>)RHQ8h(@*NERkl_9!R9_hQ`sn2px;Sa}c>TNFfIuhm-!~nvfQMip zo(Xg25D-SnbM_qO4Y27&#dG@A8>A}YlZa^s!kY^YE! z8CcrLm_X-1oy*kuFtWr}&wM*0o-hFHbP!J8F?Try%)Y9l{U#x4Z-g*sI5kAdjJ|zj zdj(WYFsVJFNad)bzr#l)=v2oUV-8(p{VOvB|5i zJhCJhkkV4wQ(1UU7b%P>a;kXDt%A^q>Jhyjr*+Ulyv!y0BS02jvAiJE(UR0_)=awIyYx zm+X8YHP4yr{h(~BHq|eFg(cQ|4u{)IU^7d?<1ntanLr=#yO24_tDaz*&nvgfSn)H8 z&27y9r!uzzN-pFmZI{ks@%__eSFYqr!S+s@QzQMiePl&0U5lwk%FwyEKo2ulf%31y zi!Wh^BWQUQD(*wMM>Q&F#Zyt8wN~P6x|2J^;BQ07*x6+3KWJK5_0&|cLCiy=FTc$P zxz{MZa3J#BuJQ@nA6fgIhfm_Yefxs)NSX|kyVzW z;_$|l5VINIs?qioWjz-w=Nz&s4{xD3HKd0ZPJMdxCHW|OEcNq&ai$5LDW1}t#Sb5~ zn(-s_n}g=4dX~Ht_5!z6ij}uTRoy1u6lBu#@uk;7UN@1+E*WQ{5Bi~$Y)7Sa6AZKc zS=CD)CL8VT1zP;vyAvfV#$QuJ33ITDGp1-(1e`qUPcB*2&UpbI=Gd9Y^T-~x&6tR$ z|7mz6`$@NDeY)H5qH_b_#Nz&`KW$|ikIvVXOcgQzgY2Y0qwqFHg~t=ITa06c5NfgL zCpaqy1WpH{a=mH+c`8qBy;hTZYN97SCiOi3)owB)my=31;yTf8`Mo+NIStB1SwCYb5KFL6qQMT8J=CKO^V1*q=*FgSi}G@VC2K z$QwMTz}s1VMzBzfC<=B7P*-?G2! z#q>6o6K!U|MRMj`lsF6Gn(OY2JOgx4!4s>Lf>GSaTwsYt9wMTb(=8%2{m(7M=dM zG}*ac29KLHt`&;&_S7mVEnUJj?FD8E)x&{OO|7cE;;22ff<0Ly_ecG4KyK9>1@tdn z^$8xPy>b|&2iXza{KOgci8!iA8fDfFOoXzW)}tdna+ky4@w<%xEmK67B})r&&>M8ED^7zFT_w3KrhtC9hET@E>*Cx*C5; z)bp$Xyt=o&N8(rfHCfdTbgjeeqc)4feUol3v$F)UaG|QpZK_5U0=}CA$5r`$9PYnM zwOswO!^R5;N!pq-6p8$z_%669!D@JB;e>*c1;pe#XsO-7f6=C4d%3T^}PYq~- zg>Gg0VpoSOUu#>Gfc`aSUNTja@ZdD9%kSudKvDOP@aK2(fs1k>?FAi2N+X=_b9E|& z(E-y~QL=j{X85+QG1}F&IB@FzCJ~RA%(XXW$n@Dxn3#K{z?`?l>9o4tX?c`JhOc))WLl)4iNV=#LTzTKgc$t(Zz?E9 za~aAcU`{!!?7&RyA1BxK{f||8r^%B4hqAYhit7K`h808}y~9 z+Sg|L-%zpt%Uh|g6MC{{dcTT_%Xn3N7Rj0@E@@i#XjaKGPG0B;6Z{uv>F-bPt*?BY zHs0q?+R$BC*-$cE@nHm_Gve8g;)_07caKr&VC(1m z&;`mTpSwN)$FhYU z0^-5mvHZwNNt%($npXN0!osG{Ql{wOxLwJw%a!dwg2+g1dl1+eAp&obKpr~OS>(QL zD#IAt_7vtPr={af%O1`9MZsLRt1W(^3o|Zf49*=?Y;D&b%C*SubRPBVo<7OcO&dZd zz*J+1w81Hz+ZVUYlVB<#Uis~7@?&dGpJ6+$tKVD4>QbmgZoT&o076X-w7Ew2EkJ?j zHXzJe6lO&t$hFpq+Q`c*?A~%0yIi4C@A~266g_F``Y)Yo*e&+ux|_+0afA$!4wT3J z35C0{ev;WRJI4THowL$*)?S8}KPfmE%{{%bN3skygGD-`0eSi`9{1;>x9=JeUF}`7SNv>9e~)ViOX;dwf4;CYE?un)@3TOg{mr8sBvC34c)? z$g9C6d-?U~&HM4BONC4jPC@x8Mvog^9el_g2Tt6W!)W~1Pnf1q2kdOy5GG@$J(1uhCP0}H5r5(ly!LYD@i4sClJpJ#(>7Q+ z?H5|lT%Fw`!0`SEEE@M-CztUaI$(^-_3^+*{acxC{?lrlFIn{2Og|=0Slk@3qEOOT zOGs}SL%M)I+TS(W{kXrU3%&8mT9AE-@HHf2|_-cz-$$%I@d7*(NR}lj$=2UF&n4N1!`q2AmPq zm-+ECg*Yy_1(prXtaU!?{FvAX23X<)q+P(+*=ux#nZF%-vI{a! z(E-##2&Cr}jo}7LXtL8F$jdQ`z%LiyR{DwfEi(DzksXrayxc3KmBIU&Cn;=#(o)b~ zH+Cx>vDG%@+8oEWe|b^)EILZTrApE-#^fSrAyQ*6? z%H5DJ)d?>tr2vf5a<@rhv5pyK>d?x*CrTRC{E&L`^=^-Yi4&0n%31$ZqFumZ5H_7O z&8M=7TmB=x{f&Kog->j zIFC6_iYPa#AKa?EzsxC=lHa$j3o7OUr}U0RS^R@2$yHzPac^}b4F2mc@o&xc-&!mG z<*nBXx5K($P*R)y_|_6s47!mO!8fFcxN5S#BXx?-hLZ>n7d33rRH5*IWMu^;4qS!xe@8}XoSg|yn-~_G7`kw z!j*^U4L-V}m+8=a3Ln9bE@5%RaUEvfw{`Q@-S5Rs@8PAHe&bUzhPqmr4v>!_O2ERQ z4{q@Q-EDpnS)ulzAz?BjGQsxh<*~!_frtTpx8`i(XlFB>z#)&%ynmrQ{l>9k%(AMV zEsNhC;ckUvFqg&^4T}WE9%oUHvGwElEy6>S!G3K`B)0iA4feI3y%FR23dd*4unU43 z==NY?nvfZgeFl-~8u6jqcEBX|RzXg4*}JQbW6@4;Z^rTSs-w!@zIH~sH$du{-#XrG zb-arO*1UGZwE zO`Wy8Z39rf*XMMD`pTd65oHnTEuC{jsr;lnE^Q%Eoq^KhyTjXO5g^yiWKUXPr_*9I!rAZm$%8z57)IDEVXNlW9C^am%mo z@BJ37F5h}G;2&#IAYa#HR3fcKSyz{J&jD%g`Tn=mV0!fV+2E z48MSx=mceLxlJ{XKjP1FG@w`K1MNTo(>(e!PH9ieG%;!PeESbLV4};lKDg)%M(4r{q z^G$3OAdTW2J5JeI%R9M-u6jjy=ae%xq6@|N-bdR_T>4x>fj$kDqv_cH$h-GQx*Nba@s*8vT4*IT_yM(VXm1i4SKeF3 zGH#cYJufpBKP!GwfyfBUvhJQ(($zY=-l5;Hx*m0nX?uKPF!Nqxel;i6Kt z6vsaQEa3C2U%D9`aAN8a=KQo(Asfem=N`WTFU*Y8jq(aIYzaB_#Yu8z|%fb?|FDK|1SgHPfg-}K^+(ihbN`I#J!bnwkS zJvixVQus2RF$iIC1%wxUXV*9^iNl2pb%UbM1AInlSB|kzVR%bKm(-WpwqRl_q_x9?k#naTjtaAatg6Po-i@8b}V`-PF&snxrahY4VR))#rShd%1kHRpQyp z?PoK`AtR=-sK{*mXwM@UpxO`HvUYJz(>_`r4oP=JD*?fw-SRRarxzpz+W5kB^hUzB z_^|>`;-9qiNwG~%D#ob}m#qF5=>bSm5D*^rl2VP!JZ@Wz0vK@%nHHq=$hR>nbhawD zXLBUM%dl})Z5X{kKnH!A&vwDrwFQ993*V0iZzxzUwv&mDNZ$qs?mc!*iU=a`^*;ll zJ`AUvFT&I}?Yg^c3n9N^Ei> zi7k-YZl^)2K{i)XSTom!#wo$!21NC-wZPb7=u&VpVRSaDq-6K09tsF`6|l?Gx>xw@iI?Tm}+q-b|S(|{{Vf)im( zQ}1cI$)P`Y1Qn&IU->-8_SPb18lUWJhm;u@|1|cUu<~3fj+7S3TBFcfAA`_l)n+U* zFm>YxNtZl-lCQ9Fg?IA%Oz&_E3A)ciEgieU@y3Tws2amfx}U$Gi9r}|)*N}MJ~2hz zvrt#TxjOg3`P9&0y{qB~+dd3G)?(x1W_X|I)NJsv(AmnRnon6BNJ%g?7pz_Il*w%v zJ66=>i1!pZ^EUhJ=Dt<-MGiAqaq%+h#3h(5-FXH^kzAHeSRG9Ep!vp(n|WZePpgfC zHOlW0+zdSm^qJ7~E_CO)VTzO~@!`>*FbEM#$id$5^e-~T^Dd?39J*P?>0O!^=Fbh?Jn8Fp1vWj;;@6#SM@t7(b;>%JcY1qDv#v z_{QMM41G~_fv$(TeN%WGx8W~3Lc?fKnsC)YXYmU-=MV^iL?GKdubfi{8vDqIEr3-Wv0lGVsfb9+I7>iE~ zIE>CEQuwggM4&f)@6M<`%xCb0(Oc${k;C-*<>~>%Y@e;iE55-?~r)WBSJfAY9w-#^X zXgIl&Z7#?9kZN$UJA1zU9^pKn4p#TI=gq44! zJ37|!rHQmzl&Pd(?8lPdrcoDDZblwo#mFZC?AeOW#HrGhxORG{W;9!qAZ}g}$^Noj z1TG1FWh9k~8WN}xxt9U-4D5r(*{-BBpAja%v;(ILR|(z`B}{v2QbJlaDC!z7`yqk` zmpi+CdGlDYstsUIxdLk)wCN&#ynTm z#e8J_{&V*3pE!;8iwtK3_(I7_F;@ertDHFI@KR6w7(>YU(4vbSLvI1WxQ3$jN}5|I zR`Y6?Tf%YY3N@BJf6SzMF8m-A!~dc12S~Yqne3W4TF;Y3Z>rR#zgh5 zHXH|iOqi0A9j@uvGR)qAegE=Gu^4*5i4IBhB7AMibrq>HAlKW z4&v80agVLn8v@xy!?3*r&qwW!w4#Ck!KWE%jXsqtRHWqbE^P;tvg(Wcx_x?gnL%#Q zOROat9hz`s{I<^Ji|iUBM)((r*cyIJwI&=*ryijMQ;Y8u*U zB>t0b{uo~=miuKP79V%Wqy~!!bn?FAAS>zPxs`g{KNaF_;_7gpzuZkB6{p{WF#9Sm z-u_R02Ebkovs_Fj_>;c5t>7K|r6)t*R-8&JJ2ld|2HX-^#cGYbL#z$|M62p;dBp)~ zz=le;_~UdS7k&RlZMdD0_T>s z!WRd`+y?@sI~xHkP~H6?IY7>Ms_dM~A?3d8ibz@kNXR{Ee`0f2)5?k)td!wGk;3w? zd9IHkUWpr|igp<llT}CcO5?xf*r6z0c-C!Cqy` zZ0)s80MbMn=aSjVVSdfEQJOTbHT%pAqye3}RENXwxH;o5&aZ8d8C~bQLwJ(VZw+U>iEJ?r%*o7|9D0_FRl&bqcE#fC*VH7B= z_46k&22mEoABagJBIjSxVNQjIp7Bx>xAH{Ef-Y+T2GjC^Yucq!7 zu?rq&JA!3bb((5O9UFBMci0Zij1*Dt()r9|`v6%D7f#LjXojF@>wcV!O%|<5ll0)} zaV#zi8R59vQN<*Tg59L{BH$IwqEU1tF&HrVV`SHnU7cnP$0UziWh^g1t_b}$*68rS zEN9Dg!<2Bm{*m@=LTgh=tV@?Woqc@A&-~KOCC3}4D@}2T_9b@XeNsMOIGgc!ogYfW z5}vd^$DXL*l9X*HXT+=P7l2q;ol4Q4H$ff`=jQI?8bje|wS#0}eeF!_>9#iXZB#p& z^eVABYmAzpWkmu9&ONtFmAQUnKOYeQvoM%Wlo{672s5DuxeC|KZ)}h>v(F%|;y!G? zS;_$5(oue_Zg7FP)KRIR||Jtr%uGCvt`ywt$85!#R2ebL#l|k1RO|5JIzKj({8x3 z>Vit{%NVom36Ks(p7;hgev2U@j`>2!tnZluzHLT2rzA1usVJA@pfq1mBU=$nEvNbgUEOJ&w+B`<_P6*QSwUlUO(yaV1^6l08XT9@pe_gFGvuIkPi-@$Tc@0P z58>K!f5)pn*nZ7-8P`Is7E~S1f`1jPT;5k$mmA5|vMaUr@s%H!K{-n-Xi;O?L&$a3 z)fn($#2P^wxrY%uL(aba;-raVT{^z^Z7P(^R*Ju6ZMf>@5eGdIlA)?PR8S3x+##2N z_M=9V%-(85_6H3wM9UBIE(efjyvvj0HfB6-79!ScbhstInfM7dN`> z0)@Qk7oFb_lAJU00CMPgmqbx7xsj;lJlng2_u1@Rn{A10*law_c2CZ390$|(3;adWeU6Ta(x$(W9RC%H}KT^c$P5O+UEsgkMMyQ4Y2-wa)z~)QDxqzDk(;$ zzt(S&Y_)7nY_M9X%PD&DJ^{wVv7DRGJ6}Q&P*y zPORE>JOVPz_(3=udTH{(Wxh=^HL`4NoO3&4WvMuGtmhhadw16-8zvH<174Gj(R-_w zS6Env8t87^n0`1*c98d{jIhQwEVPJ&p4$k$0?;L%NQ=9(xR51lSV1t%Vv;wD@B zU_Q|}QLc@$@_*H4e_E)z^OBI_aEhdeK^qK|yxYu?7MQhY7r&cIc7?{*u|G#x#i#t} zt)8Il^q@`@;~rO(=$J>oioSi*c6Ab zbUxMY9bn|OCNhW$J(5t@-Nfyf*o=C@_u!N(%s_S)8-qY0Bhr37@|1OfSUEYZK_f!G zD28mO6c{0F>c?*z`86CnRtO?&3^Y)+;J0X*K~BSlH**N*A70mg3jgris&4nzvKWSpwq9pE#Ba?r8In!n49~)RU3{ngCSc7 z+{oh;M^-d45oz5Sw!0fAe_6d!S1VOgeh|O#mcl-%Kuxi2yXD}N^7y4Ue*degp zF^d^z!fMtWf5W3>UL8QQlLv8|l!sWAzjP|!*Q1%!f)Cih;$5|Nx*h>{C*4YO2Vzg` zgV}L02B+`4zte^|Q2qsA@1!o-ko=Y2_*38Sde!}6QG@<(wWa?XmbX<(1zt;laM>GJ zP->5??JohA6P6g%4Lm;pj7Jo4xNj5jRZ$^XSy}t@lv9p=YjrlZ7_h{NEe0|6Maj|G zghvyr62}skKKM6$UsC}iX1X-k{XP1pX$~9t0%_M<+WEu6##Y$|7hkj}EsQ8kXM6xl zDDFugz7l75=Dl`U0aIbQKhmR`TF4X$kTPYVD61@mh9=HfKF@vu%be$HNythI;%@x8 zb4TY)C0*C=H1mms9Mq~BqHFY^f+)|k_Y~GCQ_T(RtXH5HA*DzXG|nmjVvp8MGj-s) z?kyK3&pH+^La@$r9gUl-8ZrJ9`JP1g=8p8?j`>d3=P(sIea%S#VZ!_L-E2#h3M=JP z4cBSNbj|4*)A}qmF!}|%31W0{+a)Xe28he3m+4Gu2^HsI?X9s?m3ns(q(kQ%>MSxe zk!4Ere}UajhH-)o{sJ)JltC>hd4PQHHcx5qD)4c3*^9mr0v;KlqM^duu9^aK%Ben; zlyZGIG%&Z%=?8WejQca9H6%!c>54{`kPNg^$?p*Xz=dcqfn`~K0+T3nmV)QuDw2o? z@#qE#nya`Kv7_xHziMS9l8;mqr13$QAjuT`itKgVnKE){cS)kdpT>8UF`JnBl%0zG zS-2f)Z=yR$?;zYgQJEWRlYiUUdDS2A;;{qPIg3Vksv81Ktp)3u%YjEoDSyw=xn#NhPD_-^`b-95VtQrnfbQTJAe%g4kF(EDUItSRvIp9yiyNM{>O$@3 z%MFLA*~4mt<88j=trU6n18y;H&`pE97CXd;vq~$mUJmb0K8mBLe(UE(-Rt&!3bdp9 z=P}%U1Fd9Bq7xtat0jRB)#5y?Qg`m~JThZ|MX#bSrJp@;oB=c5|vsB2o=D8u-dy|>oMAJNGE>x#u z9G`6e(jl~DvMEQ0i96whFZjqImmE24>~LDA0*Hy<_LU8@SL)_-PinfPqgUQc|0W4? zpe)!W=H;BnEj9XOI9e zN)cVt_6K`@&>Ni3OAL{|BbZ2E6@Meotq!ubNxogWSlWs_a6X;Yn>iHxZ|9-01*Z!Oc?Bch&Ex)^pc?H{0J$RI|-KA|&jaz+^=&Qu{2t=Ao zF2(&Tp{M&ju{k;iyj&GquSQ84^cQRfnkGBYpEMK{R#k4Z++>2~&v&2m0nRdlw=d+b zrZJ<4$DW!?Ppnt`g{0wC^^RF=WbN%3z^sfWpCP;p1&AO%hR?z!fP_6(+T7fwCVmT@ z)@-5S`1@8hPN-z2LXy|lOu}HykM*q2$@kLUetVO}1||9;6VkM`SD=)Dg7B~PLagQb zbSht5T$Z&f1ZTO4aT}kuMuL3c?QksowSLVXHI0=uhx+E{?U@LlOb+-_z{K$K`;#k? z0Hj2vVqa@A(1ifK_T=zoOE^geLswJqXYrX(Q%t-;m@#OrVc_>E6=!8uTwpq$XIvpY zPrOZu4;~9Yj38m-ZpC^rDU#SbF{$yws~noM-)DDLztr5d&yo#WMa9%Lv-JK^ts$rR zXQPOHmLQ|{ifSea>=@TEm7@)KzBQSvzS6ZX?xA0gwmaavy}}*_>;uA9gHraR>Bq@+ zCT>YdUcews`)yViff1y}S3{>JN5TX3x!&=qpla*Ned1*+S93zYGTDMiqwhx24zt3$2N z$xlmdn1;UBYLaC?a^BJ9mpaYR2^BISAJVMm_+eX}z4ox>vMWN|_np+wqr9?~iQ+w4 zc5M%U10L~aWy9QIhX+fG%BIN;Z8Lh`0wuT_l8M zr52q-=gaUpGs*%|S=xuP0~8a_2rEHy-lf z*L;iOr7+x7?cWUW(N1VA5hB8=*an~z5j%GDM|$tC(aBHXe1{Ql<)z96efE4Bjg|{| zq3@vSpH`<;hm++Pz;jS}33VU!r)s_8xQ+ujgUnf-V5DXm6DjuTGq z1!EP)jxho=jC;q-cg@6s1EyjhUe`AiI_TA-kw?Ul^0&a0KK+^(gcHWJ`u{n^@n3HG z)7@Inw&wQT3k?u<-N5Q!bk210KG^Lg2$tc95$>-Ur!)YQfIqbM) zH!1)53frS7Sh&%D@!BmDpVNWpJv6Fbe7vM)MzUkh(eJGi^Rq>dg z&D|UB>$M~FdcUYxgt+^2wFZ!@EVuLqQO{R%`B#?4eY-@{N~S5VfO_ZXSZ9+i^i|W* zyP_wY+WERE#ourN7Ftd1s#B9h@s+q&Aw}Y?|LwP&GsQ9*;@BjiO+ZozPf7P$+$u5N zbNe)gp{5Sibmlj9(T_8Rj}7`chEL83uPo?gqY_9BlngdHYC|LEM-FZZr~|Vi7RObu zPIGc#h4eQOCf)e9?}A1bNP8d;0HB`SdT7cF6wI`Kq2m)K$*A0xgK^RfRy9^bOzH(> z{e^C}7xVQRjne0bsr1?ndoijAB1gXj^h*^0Eq z-lQ>3P~lPF?EEE+a`cb+C|yhthI)iGZ!52dA!WGb_2`yeB}~~feAc{EZ+3RG9I6O0 z|7xzbd}=i2PNC?`&5Tn44vkaonaFk>?y#N!Hvd?TDw<5|9nPdq?KKQ01=qo94iaM) zjR&*sy0#Xr3TF?HZc*brc9u;GR{c|zy>pImbQHjD(e0e13z;N2$+H3`0vZVny|NDS zn-YSQdVWfJ6&QeTq%u;E`*M~4i`=ZxBQnzyzWXs#xiZKgwE}LAC$D|+IaM6ZotF2^ z!uKp}UQEvb4q&KHG|i2UldAb#J9mO?@|iUDNq?i2p~fjjsmi@xQNT#P(lEYu{%)|Z z7tyLp{E34?@6C+E0ck z!FZBdo3;BjbF+&aS_1w(n{zsCRD>`A8-*%`qEj3YGN>U{JxogY<@URV)oXZ7t+>~D zSXf_tKeEVBZLtZ4ni=A~Y@zfH%g@G5t*)uOq$Gu?{PrDHtNA?(jC{A8(e>|@{kCk! z`KuHLcEGNm55e!h3H*nX{y$!jNH2to?mSiXo*oV3x~AQ}XwTzHT)E|VO0_Sx>lq=u zksH$|2i}ib@0jwjFN+QyH91H|B)S3u@jCd6JNo#z|59Ri82l$A9+{ee>26q7mBu6P z%qwD_JPQkp?6>fJ7z%!Yh_Eq5)*#yOSWt?)>&$M4lKS_?8D)QBDs=BD>B&ytcr_2H zLt3h^7;FhuoAa`f14hmLjYtH(=4w5wqOaEZC>R;3v&*eso*5P`)@0AmXd2KxNxe&YvK48OhXVwh7&FW(E85 zM~Sb!`q5Js(NsvyK403L2Qqm#AZ&T|%gbWk`>o%x6D8ssGg84-qj{|kdg)tSa^QD( z&0R!Rj(*q(dbgzYI#$XS+J^a_U(C%ZaIPhnOD1xR-FnHxnJZq|z$FR@Ume_Cr?kvl zfNm}?r#hk$E+zOG^FdVy&`E%sZ2YZ+U{30aDUCZKX3J-bgqsOmXy%`L!|Z+ShBth+ zH8VyrKNy*LYIm`4Hj#dm%V+X4!Qdv|`#v(Y+&x>Hd4ra6ADf~8apH`9#Zp>|AJtLH z;T8#kdD4;T1Yf9K|cEp-5m`rks{D- zJ2MYNfTW9qK9{^{d#RZ+b7v8QO?Q?I%&YH$q+58*Jk=Ohu6N1fF$eva}?yoKf z5SsjMsV;Z=*?ns&3|zav7p5!?%2Z}W;T|%8lZWWzKkLwO%GDWYo_`x4G^>30Cl?-s%cm6v>XudP7fz*QgM&668Pp{o)jAl*h3aOO7Qt2p z=qp|<<7$zmiIVZXc|9(TddrL^*H zT~Ct-0kIR&$ItIJ|Nfy8vrG*+g`8_!iOBaj$socp;U7MgY|X`DE~jDT5OJ(;h2bf_ z->cSWG3)mO!up!FzTd#zb^x6_=Y)-t7708XQiW=*5N$-*m%S9KOTQA(&tj760Vb=& zhb*aHBxjH32uuLD;jR5BD1&M3sv0|gse@1WVw1isxEr*O8ISg`yGHNoP%JVvp8?Ol4)@|*8CuCI_ zSoR5huQh43cq{IJGrcpUi!un}?r_r={58z1=%kZ%?C^7v@{cyd5&!TJ)lwbR6j;+# z`)_i}Kl=8G)$=C}>J)!hy8lMVe^c+Zkm|Pjws`J1XV?kivn=n_Y`qycUKMJcI*V6| zeY;s&7a#s>H3cGQmF}5$Ix2pyaWEuJ9A^w-{}&_s7Xke7!-wo9akLf_d_pL1F%x4j ze%xI~z(g4{Nst+;cr#SJ?jNceP$L%M0BEs%kEQ>$D3S<{?h1ki_`*ZAa;q3mP;XllbTntk|2*oN#D=MW2P?T^?D%<15l>xAJ-sqbXp zpf$Bf0K+^#+pBW4$Zl#WYC=t00wIHu^sxoMx>*4{blMae@kr41<*G{T1L_D^;$zGUY(%o|0=i&d@B z1#R%%*`;|Fk|01C;X_7sSiBT6f8pS~fG=*X&#t}2W($bJ`Wpv*I+eP2)jXx+H}6LF z7~`2+_*<{^XtNr91AN%FNBD?0aNKsiGo~)j4%iWeK?KrkK6cLc(Bx{eC1B;oHMzA5 zhI%bES^3McGZJLD=}Pf}ksED=VE*b-<*D^)keam;dP4wuj)s}@%^l)R1J$M-qTMDx zX+sanSiwi1mlmzmqxijR%xIx2{%dO@c6ocR@uEp&17W43FmsKZx^wSV=3|a2v_I!F zx8|zTPaEP_0|7lZH$K`IXds$ha{~3$>2@`R$YV+c?y5es(GriLFCcCeB~D{@Q$NP z-BV+9NDpO>Mworr-PnJY78k_7-xW=;6?pCNKg8Z&Z$-v@pLL&iNIytjqdFgab;n;g ziQ_2ZVm+cyhSEq=$zNE}rVhE913EA8oLQo?+G*N_r8Yd4{WqW5kOe_|o@;|0HV>s2=Xp2B%lpD4&tcTA3BbTz?-ZsBT2n*7X_a+DKpfBBL*>1}zN) zYbGauC>n5_+Y+15hc$0zn+YbcrdqT>g$r8Qlfgfjo5D z_)3Vi#*K?Olw65TqbpX8@d=ykm=1%p@zVaO3|(^oJ4>1IUZ-S*OgsuaExVzbx7_h6 z7h6_y_Fm~%-etQLv1DrE8fhAvc8{P^Zn=Ga5st5~w`sDL-a2jT649R1xj9~?g6=7=Yn?@% z1r8;7Nb`l1E5ASs+)V@+b~IKx2g=!IFsr~9yt1B_5Y0#`l7Bdz-{YVUR4beJVe$gKB?XCxE z8D%Ccdz_?@cx&AA*9!b=e%8$gdiU6O+095sulx=ADvswN;jjZ0=R;ce#1(FzH?c{2 zW%Dtq$mb2M&okoV&_UzTvD;V?mQ=>Qd!#1c+)9NP93aIq`3z5h#V4Gi0^;V-$gU%R zDNc(bdL+JFQRA2tc~N+J{q!177%=F<&-Q295m>cu?p(lK?^R=wRaux)rIUnZVxfjf zAjtuHpPmO+-@TRFT|oC8JAevsMeS7(40j_2e=&}^3)SwX*=!d{4EM=s@L)%=-Af}= zx6cn)w>mV?EFxP40wpxHzO^-XG90yGQz8KL`z-kBEDoHS3~{>gyRBAie~xIdk|4Q0 z?WMl{3t%?x4(q0zKMzFe1bb4;30DHGe$5B zeDvDdFuGQhKOU3*h$trm4XtZlGAcSwu@LH;++zns@8>mViM6zh27x;z0e@2=nL;=h z4(= z%|@f+gcxFCGT#qCXLfmCQwVxOa;<*1^MBC&skQ9$&tOHc% z=!!SRM+!H#ALsf3$_`pz39v-tADM70-4zgx(WN(v>d)dC=d{C6cy8Wl2F0wk+^uWY zJVH7<-#oW0BCLJ{IJrBk|C)-R113nX6O5U7`s_{hv{E1=3t-IswA7A()38KL*cbiCSa8&Pt%3qd!I??~o$~v+zEpCqJbM>y;T~mO!*c~eu_TOj zj$+(IW+s2gRjxJOBQqCjSx`b-s>NTcxFu$9_#m2GH`_7)yA&Za;Jd-xK85qFE5bFG zFG~n#+YOoOJRrP!z$@vl^y~=O_D*PWB6aW^VD(FknkF7{kYz$<8^?k5eY2|oW6Db- zX#dpYY>~2G?06=oX|lMi5q?FQuEN>N@2EUUY?2`OcR{#>@(oktcCGtnD|WoaL$+(* zrJh(XPP{SyXfF#gxc`g3no7M0l$}F4)oaebXV+>8WBrzqe7&q!$xm~!WB=U)+}n4k zua6Cm4P;}IxrzjjffbGGvUi_lGWN6dInLP&QJ#aw@TMa{ran(AZR7OzvMD(i?@uR9 zwIB_j;HdW8+Bk8Cy>JPK2WdA7GlbIGq{iHa?SH&F)vpT2!Jl7E9^{fGTNlh@?0v_8 z)s=#vOJY|4iD>4@-#DyhiI}OuX7svwfbtyxH**%EYleigt~x36**#K+j5}jKzzOW4 zZj7dpt^zbG`3SQ{c>}!|&YZnl8>1XW@22mfJP==gwD@82Ls{A5kK6zPde<`WY;Kl- z<3>CuWg77iR7Vgpj{E~?Nxh{8dkGBb>qrfrl;pzq?kXROvF`fybz|ww_QCRxPo~pE zB@6oJtJnIpA;+zQmO7uX?{4^*{ibF!rL9*r&<8di0WkI%!?+^W=XH%l>ejo92-dd| z&7eBQgR(j41QTEoYoe}iL{7Xh=V^O}nHqkP6lv_{MP{yRx(lWamw7LD$YV_-4Sb}w zA>NPvshZnuCI4JAn`^RGI-7_Mb8X{er@fsb>QWlRn6g>fGcHGI)ugn@$V<@jqINK< zVAr{R|3SsHAkbQEkZ}l+)ebwX8?0mqW!HLmdQFR(Jpyi{?se5pQ8x8=aUHbL=Hk!O ze}DyXO>(rRUHRTXo$_FYZi6CM+usJF};zWSE?<&T~ydO9?u20;-@+NKhM!PQFohpQo> zUuc>m3Oe{(3GvT>*nhv!R9*NiewNj+91WDH!2W6Mw%#-Q6?mK_0+{$+>ip7ha5r>e z{(N?R#W=Ne+%_qpWy@l<6ap<4MvrMUF^t@%0(K5T60f`hRO)_3IfD=T|ey1tA=My;YFhxEGVp?-A8#7&`)BI!R@~Dp6l*jjM z&vHs5qwz6k6JM2$hMW5vZ@_qlqPEEaFpo5M22{Wz;Vw_$_~4d0W3P?ujwwx9F{k); z@-ESF8z(3A@hxhS!LkR6FYo*7W7j$kj*L`Q&EsStX`~p2b>dA^ z96Ygu(04Um<1l(`yLyBaWpZiT-2g@=KF-Lk-d^{KT=XDE*?|o-s)$< zq{u#>j5Y1Xp33Z7X;s2%hwIF9tQ%~#qgNT|`dt>QTPKNN7U!8+VD6hM>f#s*{`NJS zr-yFs@)?e3>-n}!XqC+lKn|_85pq4TP4O2MwiVWGJiQ&FPsh=s~{Bt$g8DHnKJ(^>-`g4rrUB9V}{J53cMc2wR zr@(|1O+$7x-S$7dDadVqF+=0{N?H|M*tj+bfk@!#bEgRnClI>4Y}fjnobqpOE{D=Au-FXx_164` z5A^im2wExx$#x;svHL2lA}v}!>89m!lMWwymxt9zWSyD4+#@sHpc19)auU~W{O1+I zGNf0HQvmwwpa0zd?-!_h9*Cy773ib=Z$Iy9fKI;S%TnSYVbG>&zzrQ3k!isE1YKm( z`JVh5KJPfpV)$W!x-J9uNJ__ChGTmpUe*IR-i{_LoDsp&N?hS~^Bwea|;d zie*A6mooPADElz%Mf2^ie40bd?6mv{EX)F5Uf>*^5TQU z48YTil)?nJr9tYWmwOv7hiBD)_wwr7F(j8#d}Jl7>ojK8I>rQx!iwQ(0*^%px%lkc*pk zGE+}BV$mM4Ut83%%MmTi-c@7y##4{hYN*iVR-kY!(kLFPOdT;@L#yCswGqI8y!`!% z$P}Fx!x{=aBc!0<{50{w^H*t7o%79Gzh{w(Yjw>op$+NJyPC?TsQ##&%lQ7tglAF%{I=!39ub`X{Y1(>v{u88~M#jQ_DB5a7aTIg!ZH)%v zoG4*}pjE7rmMcY=HtQYEk+5U}QyLci?hLZkl9ydylbeC>`5<_jWb@z^Jvn+CaCV#MJ2>)CeZFSdc@kF;{rqBh46-yH2>q2Cc8*I0e`od#R$?diVv@@vY6 zD}9j(jjuPFIUbCMZ_6XMyoCIP(IZt=yq|c#+~2~5mAxc2ZMk@v?ajyAn8w1%s0p@d z=T#u%#MxW2o zsyt9Rq$T64S@*xV@I`kD!PK^=s`ASM%Y9bZ$K7EEv;xAfO+}lAtbl19+o?(mRP(`r z2Exca4O(w`8Li0mz-Xp)^&08FCrRdwt4Jn_`?Rax5Zz$KS*U}oWCIwJ!mMxKy(8x_5JuAL?2q3 zQ>D>=_Wuy}-qCQjZP$2$ghZ6+kqDyq=wTQ_^xiuO(TUy-5xw__9*izT??XfvZS*># z_d42Wzf0cd&iy^_Z++|g?^?_-%d(GSAA9fPJeL{9B9w;Kba`;k>Fp+FEA+pzb;4NB zso5wkN>^`U=NkLxigw7J-2P4VOx=S(S%QJR<^?GC;W$N@)0+D#Js;GE>Nu4GY2&!F zwZZKCF-bpnOTK|U-R-EqXq#}B*YEqc3fE|e==P=iCz3@4nn#zv@pg^ZYr|nCQ4W;h zLK>UqB{icHVLXKe2aIy5EMgbSq4mAX=%h3~M(jG!4)NBtTGqiUxI2-FNcT^@ILZp- z>*uaAE|2VH87Wi^xQ_`XRNh~HY?oRT$SD69dHY{}^s>;Op5d|oKC5biT@JbjV1e`e zUdTXP*97g0e!4fUzw;qL7Nr+PnSUk}*5rLKMpX9o%nru)x+@$jYb{p!{Dlp(UOIao zoCiq=sF0(-`Wnk}^W5*>tM~p|9Q$fx>5NkH?ZOYt^d}9Z?9O>|mj+8^m3L_GKYm-< z81SsLnEdSZM`DKxyC$bX3g$g#bjxZCvAUq{@$m$Ou+dWh%TL+h9Ao*uHa;&Q`>J?h zy_1}3^E4S-)G`bk^?`-68I8ND*4LY5oxWR}Ef*iEH|aQ3us4LSGU$M8!ffp=qMfvc z3SO$`D|_S=!9S$%-H23tLToLywp3hWcd&SLVdXW&&cajSXX{VZY^mT!q&|jfCohlG zGF1&QpBK~sAS&c4%xRQ0G`78Jn>B0{F~;AB5onw)I&JFzw#~w^ps9@Y z*^Hd1G4rvh{fv~TG0~FyM3~R#`RauOb~DO|sIH5Y;j_=pt$E9p2ahH#w`;Or ztJl~~q@F2ZxR)P|Ig1!Px#;wX@p(Guw3cr51D+d3`b-fdy@r8YsgkLc^vK$Ip$;Qv z+;BRlc>ha@3D*9y$f@%Y?-E=tGU3qX^Ts*(#m6$exB%_4mbmr#2zd*{qeMH&TBwJM zk=zT)7pYL8?q%_hGjsDC)|(Sj>lX#q<>Piwql(UnxROXw!|NBa0xdwT!dVox<$rZk=F)gGi z%D^Lrn!DunFvp|7ABj1{CfvJ_QpA&n_tG%;K{ehguCL#D@QCD zgeVdyja)a?in}~4=jYI)O|P)Yw@Fopg>Q66QCQ7QnMW&W3#9n2$W~wP*bQP6ZZdtj zQ;Yq)OaFBPiE9rEjzBrA#KOoOZH&0K&h*p@Lv)p8o5x2JPm;d?b8pM7-RCKv0R4xV(o zH-Nut(woqJ8dnkkV0o%)QGp1*NG$@A$N5Xc3AVs-XVfE@7#Lq_#yFtgU-8 z`PB&rwnUkdu?YP07ZSKxJDzr>HaD_-($zt|aw}92%Dbd$mVVICL9cmOgu;iFxrr2Y z{*i5V%JqkIil|rD!H@Atjbb{DfBOurRlba*vL#qJBH{?zeOL;d?{whiLz>E!8+ekg ze9l$Qs52LwNZCo;L9*%Ql9AM zjMmtB=WN!pE3V0$)Ec-Xk}z$zKwIA1y$e(A$P(-r$2)wRZwroR9tp2>Xl^Fr$m_c^f>{wl(C=4{a>jh!>@ zi{tZvPOLgq-qv-H+}cGJ83ey5zHha2ojQr0>ip~WMC&w+^(M)(DgH8T#cNBaVyz$1~*EoTbWRcEk%@hlEU!elU__AkX$Wbn7U6 z!m!v)?Ft~=E(oP+E+S3f0+^j_iopl;Ic~9C9NA~WVJ*;mgQVBjr$wjVf67kbLbtle zAiuyd0|I=P0C;VLN9OU4Q>RKt#&d@c#hkfJgEKv4ZS$OV)F{tGrq6RId3A*kS463x z9D^?)Vlytd^zRKNuxMbJ+!tc2y%MUgEgC~&RYDZ0D`M-gtp3Ar?_j;cdh5rBNkscE z>-|fA|NEg`VL`#pGaj|;){@%0_|8EurPMW*&sNpwGCSj1miP75X&T&BzC zcv7U+eU=KV*(HErm&>j3yUFY%rkuJ@c(p%PY(>Wdugmijiq9BWH1cirT7iImc3y(~ zkn3n5-=c7TI)2D?xUz+*%ok#nw>CGSswU+BNgq5kCY^>nh`FHwxkf0q*2K-r=s{2$ zRuhB1&*Pfu-sg63Dlp1Sb>X;>w1G)>IDD&Z$DyE+;Qr_jESgW^x@;H`YzcHwGJI1ZCEI z26Dtr*A$y|j$3X@8;efDCu5OG+iYS|2Id8ds)&h|>P27MBkX%i^P(FfU)_f}l?aUG}Nwz_2lM>ZipCqz;uy-!_)>u!(ZH6lR zqlNP_UZE_i5Bw?B_12rWdCS}br}uZ=%(I^N%KP1^1#O^)*xr=Vb_qKSB8-2C zxOCthUuk#RW_;&q%lzt<<()#&H`D+q*~AwlI6Pw;Hj9d1KIdCdF?6;Cbhi1lN{bgD zL+Nj|wG%88-9G}cNu$^=UB`9IJt{f$YUF_vHd-_}yt9xxW&9xAPk*b0=sOHCLwej; zwqIW#F}qI9(TsA#gTgC0ACl52CV^`fP5Of#{}xATi)6$YwhzX|^5~#dCV7vS7{yRx zgwawRbB{Ci!2oB1!JcxeX^IKXE2XyX^dF3}3jcWu4Lvlu(q?V9qptY9 zOy2jnNUgxzL^p6H&iODM^wY{-PVTC8%r59NhZ_l8z@>0ZX-%}c$|mp`3`J_q#C7BMxKe`Gy+f*+{^jX(S(VnJPRku ztum~gc#)Ic9K0nH>^MAq&!FyxLz-`7Xy6u8kKLrs9I~!#At9P`Y6Z#?-z~kdmkIPHlK@~uHL6f#a7^c`DP1Ye((R77G{g=apFO-5w#!f|p;}Z63&wa3rk|UXPZ$8F zN2PczMOq#98D&&`QSHO|rk5{SW8*MAB)1pT_^Dl_^I$aBcMATcn5SG9^U2Tnw_E0N z`>&^(8nujon|mr4)#o+t!>t|!*yF^0`^Jo>_V$fhu5tjwkeXSJPH*(7GrF!IqFiMi$!TQndY>YH6~x%0 z5i8N|UMswQb^_7+)-5@stQ_yMGI5(`U@b57F>I?r>8DMN*o9evH=nLn67zAjRbjK2 zuImzAQo*|vj4sz_vht1(r;17jm=cVB|V9`b?f=1@oLigy&BKo6@Bb# z*d(k44CDI;+yk$~*>o!22m=nEYTJM$X zt`kcZVN3$FuZ>M>_KPEV>-R#sH*UWCuvaq$r99;v9I`ygBzXJmtno?j+R*rBGZ@qw zqu2tfuzQTkr!XKz56&)EpK=$;JwU|w;#xZWuI@|~KlE>B8jXuh2Wi}-yN$nJl5o7g zF}FHT6ICGa*|e=&O-UepFNL=+nVnA7n>IsLzylh*Nn~QQp4}tEj1f-CQ|@1Nr;w>@ z!b0eWHzphYxZ5wz)vV9+pQkM~!Mm|2Zniw0vmx>Mo>(!Lhqed8~BNi(temAf5^(?9iNj!&G01+jn$Dkc!UqT3deK05(pI| zS!dQNl@X#oly|{OGJ{dtVSBCRqBCGa)7ghRpsYjF_o@K{)cWG@k19Iy38QBq5O^Ge zhvlTUFw6-yxY}uD4JO?hMIiPlYLCA>UxI`{;|K9lbP)$#Tm3~hLce$LJ9g3WN@NF} zq~Jys{7Q4%4Pq`~<&=|rxy@y^;}asP=cuG{-+!@YwJi&>?mFj)RP1e*?sa98hftKmW=7{t;9}vb;x&qdP{KuI< zXPnH=3x{uF9??q%Shh0(gi52Sg;xO98}{-wpFFgBfYxBJ|Dtd2>#L;2W}=(K>@B%4 zS0M%q$p9#`^n{Uy*yBBERM)!6iJ$YFG0mz|{4%#*P2!386$uTXj#X`EfOhR0%6+G} z_CV4D^3uQ3?p)ZKx9Qdm2G?u_NJz1DMu>Gb8phex;!m?9-5(R?-%4sG6^|SKQgn5~ z{i_eUY@x#cJ!<>uKWN&6?EPG{sV9&!{YQVpMXQ~4l*{gL>3xS&bqh*Ad$i1~P@UDm zOv;SM0>C1LzS=1`p41Ekgz~s^$6m)XUQsBx>vCLfn~M}Pd|LXKq~lZg#Uxpa8wo`X zOx`DUKh`m0EMVYI;{-aXVIci~eDg2wNmkDF~SHG!|=!2RQ) zmsHBMca0hcD$34n6ixj2jEp{a8f7Z#Py&;6E*>@0^%CbLA~tQT*XNs=;EP&o1t zgrdXT>k)4GgqnO&Fm1Z`6&ubysLul2+@j;5dE#6+DQF~i(i5YuER?bfH&lwipX}^v zPhx7`B$_z7!v`4TOKOYDh;5^x@jTBXw*cCYqZEx<0em&H!{Tn+gKt&cy- zTo)zbb%oO{9Hov!NsQHmiDdaZMiYkyCN;03yUGaF7%ektlEq$f0o%#kgp7K zkJZv51B7}lN4Y#4EC3Y)U$Pp^23@CF4H^TYS_ax;wZ%VPpm9i31wLQ8Wq9JiO`bf+ z?$1cAZ${}RpS)<}GsDQ+SuS=V+HhUbQXMjUpTVAXRTe-ejIxHM^z@>H4bgjX^O7SOeSU=#SpFFG;7IZ&JOVTim}e%Tb$2WeDOtOmYQ zzl`USI1AHZn`la2xkjrlU8i|?*_NeFiO43C2qCe#&Re$qJ zhIjFY)jNJi1E8|qE-$$xb`2nH_JEN%`(JD-3jx>+6@MRI%PNec?>a+?vo#$AnmHPy zC#5la0kO&T;63vFLN8E*?ki-^A|XSm2OflMNL;c$IR}r2$EsvpiyC~BX`Lvo+_na% zh_`ilh7FL)h=iSP>{d?YYklqtBijQYYJByI@0MiipvGABwwyWX43!Q>h|03vc_lKm zH}e7Mw>!MnVd&87bn%R3V;SU@mvz4X(sxzoss5FjGQdxO!$8N-Y=((G|2>T1Mo z$lDTL#A1NU<}S)lgaT+)H=KxHxfwc=j+kiGIqb=?>A|F-@l3g~sp@VlGc}PA%tcLC zw=>n5ZCnpKfw4+y4j(NpVoN<|vSL4ob`T9TVwOMDN zmaW}f8O8HfeK_HYbgRj%){4{5v6`B^RuWi>AWu9iJW^unbRQT7$Lu<7^XpZ4N5)E9 zSOW?+lD7dz%=4!Evvu&5a2uK}p`s8c;St1y3FS!NEoa178-CRN@W|}}Us&1JV4nLk z38$)Q(BL8QKscJb5*KPZv zabEK%V5;jNSa3aMQ~lk5VYTmK7O}(Qx2X^s5aA}o9)wKIBMN79x~1xaKS%Lf0=rkU zK%0NLutkKZ*MRXT8Ot)-KjZNq-0D97>|2oul=73mF0cpIf?$>q>{A{9zYPL+Kn{)X zTYR@+MImJga@$pRn{|*fFg3iGo)}M*5nuR-j;oxX?wV4|c#Xf2)N+}tbszKfKc2TJ z6(1Tk$3*AZ3y0@I1`*rQLDkQ-zbeE}UWoKXcif-f4=Rdga#cyB%sRu6NE$q`QlB*{ zym{Z#t>qdK5q5-Y^??`-IId1Mr35}fSyAQde&uO?AjIS=)g~dFi>Fop*3!m7Ges>M zs`eQiLvOs$^Tqht9v~{auP!T^^OlTMu4DPi+Xcm2u9*^%stF}7-bC02Ps-^yJ^+|? zjhc@uBOA%W;n7eSu_DR8-PU0L)#@WA`AZQbhLONT5HPfSQ(#pI8;w3E&*J`F&#&Yp z-bDlRStlVGdK&f~xD@5uhe7N*WQKQMAkazv(Xh;+R@7#8(R~28^pQ_+)Yal8m_g9~ zF(TbXj2=9o-Kl}ro0#rx_`Hr&qA}3_YPmnVXf_-gMBf%YG>V&llh7#U^zpznV z2(%0S%uy!LbkzCO8m`Ov+A3cM<^E9kX^r?=m8BJ2ZhIrDIQI`h9;-I3BsCTujEVYpoT&SSF#b_5R08nPh+8>q@nfuF6|(thdQO z@G$1fmphsH%Pd5>zf3oO`?qW75FjJn*d>0OlP0c?+}cunWjYlE-hwhqt`%|NiE*Wo zXXJjq+)->6B4?ep5Y}5cJ0EIWuopm>#?IvF$5%)pb0Qeo&BUqBFt_Tc$`NK&)!)D& zBfGJbC7*lPn^5{>O7O^TQfAj2GZ2yGDSV3MY5=?Xjq`Lgit-XT-EGf-89C$0N{w{8 z)<&`yKX0(3x4c`fSYZns>y9DyMK04i^_~hXzi2D+hj*_pT}KYnp%;P%4_q)lT?}loA59A5k(CSk;7la#gKzj{c>&~L+50vj$rD{+sE|{A_$D41eH$h@NHd9rMW4`R>SVK3Ki#ao^+ww(E`?+t0w{Q) zW+QkJNm{Xb)Muo+94Oi%I!rw6aZnCjqUZ}mJ|$h_%Bx;-*OC-@ZT=4v_OrSjf>SV0 z{x;d$2P$$jKy$WCpnUlT9V_ggFh?rEBw#lab0`P=XYySbEAw#yrKE@T*CKpC&Ek8$ z$7}03(VEx+q0(w9ZOqH<7#t;l5ZF@f_>T0SvHK3jB0c^gn#+24mxEr0K8TdYvUe>O zi{Vn~gXL13>dZbQZOYx&%js|q%~$>hcI&=#(_pTAnp}=SsKM@8Yh67&yb`kzYMDpds*VuL-1Sr za5+O9&*h<#JG+ri62CO0f7iTRn{i|1X|yK^n`04&Lc|8qz)OesZ(DUdODG+J^_66< z9mylwEdaQ#Zdvz}E7u$p=y+|dBwfa&-~m7llubvZ<$Cu+qitr_e+`7Y1Y_KaSElil zkiBqG$lR59GQBG{Q(b$QfExt(<*9L=wki?P#XKMK@8*TTJ_pU~BvPMK^HO3>kn8}V z`9ElW-L3+1=MPlkP;RV`0O7Oxxrda zWNhLl*8v0pOyqK#3CU@^T_c{;!(r^<_Q@#P^tc)z?;Y*N_eq=7B7WP2JXX!PK(V2J z1!_n%W|7)q4l>c*1dy8ZvOvK z;fBx*p$%HzCJ!g+$H_giYtKrDx_Gz)b(`MytDkCQ*0bhZz?9hcO!5?)#a4)E3}$R- z-&M+><>8$b4}`jUATNz)_+jtzYp#5zIzc0Yc}H<+Bl72yHD&1_t#oc77$q@rqw_-}s#dRIQ z?teQC#NIDg^dex~$M3l$?I5$2{_3Ol zB8#ufT@kC?&opPU~lRO|^m*T`m_H>%L zI83oMw~m%#SJjhox&g|S8Kmv}`28IvIG+@^H0leNhn*b9qBW>usOz>pSG@hmH4}a3 zwEFt`n*1l^DogTJ2gZhfb4>KHw~1v0q(0<8QPBITpMjl9iCVChG2#ch zCAa3&6U%zLz6r#M3~$W}%gipnxROxTXm?0uc7{KmYaUJcF_t~zTqybYS=eg8bRP;e zN}Znuulk`j2-dT0Jd_>toiJE3J$~l;CB^l3;L?TZYz!)84mkWoft-d8q`mjyonK8> z*SXr!;?M8jMW1FSnIQdvq(~3Gj+PIU7)Xln8yQ(^>J@I2UA(TWq3_{1f4!Ihl~>El z@h?^O$jW=yg|y8Gtp3FXpM6e3VzHGbz+FJTVW_$IMBTl&?KawyUs786YP@IolcXtx z@D6UN{swzA366Cz)$tN*7ijmW5|BN4mzF5CW)Yt2bM3U)i-9$bfP{5P(Z>6R4{uH{ z7nf4&dF{n|23xrD8>7CqsH)!gBJa+(iwjuzNu1Xz*`;h0adD+&QX+=L5fy^$DGHL)SWb zUe?t@YJskism0Vb6MflhxXhuyK?P7?hD@RhV{K>D*L*IfbR4IW0IRBBf+;KRHX{>g zC6`!YI}VXiH9Qq(&%`skL`F6&Z$7oVT|OwSorYi>>*~iohmsZ|KMp| zlPEjKGW~f3ZnX{8v0j1q^98ZX&WfK~J3TGkr;atdJ0S=aaaJSY6An|un??0Q5b6wy&P9jmmV>|vy1Nr8Q! z7h)Nd@HGYN zkS2~KYO+YTs9e}+ianE8Q_iyTAhuRk3}0J zX@p$@XEA(C!nlzFnn)cKTEE!WteE(fd0GRFx0AQhl`=tlS;=}m3Fq{rHgW|yk7bPrG9swcEVt=1&r|OlXOf7WZGkgn znk>f2x4_c$X|&i5b|OFsityaIaJtZJJ(OlUv8AJJrWW6f64!~Qjz-aD49GgkK6;|0 zlM6un*W2{=2X}hPRAWfKZ+tkqH+{J8Ksp!gAWtsl!t7;)UCqzb<(hQlkl4Ay^F_w= z0x6R-uiY23{rOek^pBQ~I8IZNuXER@l30-Lz*t+lL+e}LE2Zm*_G^s) zci~jnZ~tX|llVsm1|c>!uAL=Hx6^f#&B;c>xCb|f-Y-wuImk)M`O*Bi=Q%*nCv@^I z@Hb}djuJ^MmbP#VpP4RV8@@H~F*Sm>$b(}^hunE6Xy==TK8h5k3Rc94WEI z&n@+NO&MCYO|9np`HR^yFVe`T>QofG55mWg;T(JtrP8Nf8B>Mp=8AaEWd?gP2?*a>+z=mfg~R zeLNU|`q?UcoQZYE;b2jC?R7>BLXC-Zb%A;PK7kcN z%5leCF$NXoysH#aY0qid6atWS|2_Qj7U-Fc|13_`m5hmzoh*2yOYgw9pdZM9zr-F6 zH4UC9b1Tv~aob3jtd`F1>k|HC>z)2&$(^TtvE z*f#Q%geQ0Zg?#TDs%5$({3$NZg8X6t^6b=VxpIOp-dn`JC#B(2Kut2^R6;1TT_n~q zLt_VV`z`H(pw~$RxB3k{H%EprhZf$BFF7lXyb}I-Q&zg9~1fyn74W>7X95l-}Mw_QLqMTtFv5&Mv)^k~e6w z_{j9i2;MV+kWY6-xTfj?*QzobJ}NX>Y#$C?GxJ8~b?pm9TT&_ z1I&uc*k6#$cZW9besRAcusft>JD&cq*!JRa0xD(oNodNFm9(dftG%SnMM);j3IHZbVs^EZ;?n*zc6avP-CMk-720` zzK=V?W}cPZ0?uo#8N&?Zzs;3@GhqkMHwOF<3s_~YAsNK@D3077Acjh47*v=lvZ#S@ zZha+>GtM{p_)|j8c8P-C`nlIJK|88bUyS$z_mHqhAPK>S^t<2A z=I61rxcg|frK0+txZp{b_dE)VluI5)JlStBzZBaU^E!#)o_&22Rjr=b36isM=6^Gu zNNM>2?bf@RtIo~b+`7?$4aUOg#^_|%(#RmKDb|0ca}n|!9Nil$j(wq|MIMw!_N9EI z6Uvzv^$WTgqIz{YuQ!QiYicExwIgSgwOE*6H2)LtRJwXFt0K~Ed##1))H{3;%E^Dld(}eo zyQ>HEFQf9We`H0>dRU*;=`Sk0IRN6 znnjzz2keF0L8_`Re{eU{ zB>dL4rmk?%nMwl!pt+%hk}vehN(8pzSH?M#Da97cpxq;y*9qCka$*WAL21*YE>BLy zhz%>LN7Ow~b8Dju-7(+Oxustxv!2C&g81`zzOOP?rx~BpB;iP-`zfRKBddoR;<}YQ z`mINONcF$gK#q%-7S*m-?M6o{2cIL#YN?M+2N#I%T@O|cLE`%EAos|;wYV&}|g)0$u~`w1`C zbD8kw2%+Nya-dSXD8fggiA@aFwhZ%KFZ)^wm?V#d?)KfNUg`-3x08rX2Q8OP+I#|! zVVrr!WD?iWM+`X1v(31xE3jP&7v4ZO{BcvXvlmqj>Wu6|^J4krm&%C*(HRIopGgbl zdLBvRjWGeyhNov~t%Q|zVjUw??j4Wz90#AA+@Kv+X;;9|RgY{oJ~G3%s8f9Y2@ zRVE%3s29wtQ#+gHf0RSYeV0J8votEU1$r~)2|qtf_lDBxA+O1IML&=9ERstNtiG`r zJv>Fm}qJ= zPHD@V30`li4fif1=WjXd))P1^!|x^47>;1SST&<1%3)hpo4TF0BbXx8=x0n_D!Af+ z0*GqhZ_(**+2LD(kSur4gvkiavr+#vRB%A+$W&^TthC_6Iy?xtvOQ9C`Bi!h5Y=GLqAvgF6(lVH0T6O3e zxLXf|bh<2!jb39{C9mn^8%x{Bp}}A_=%L4RK6-;yr@1hi^ox;Fk>bN%q_@JwM+Do2 z1{xKop)Z|sBh%x4Moa3fLzzBXNr2v-91IEiG$=m>7G>DtwN>@OpEFw8QEY1CtjWWP z>JNOEgj*C%PlpeZB+fqgQdC>xjqGWZzC2s-FMYqp-QgAkHB3`5_XOKE#bg)HSte&+ z2d|GttyN`Te)9Z}z)mHHmQkBhSwdp^w|@L}(f{{@s3RI7gQ7h9qaJ27AjeuXo#e_6 zJ2T!;&e{OnHWfftR3`oE_;>2K1L*dPLEZNY@S6+zK5TScxf*HU0dxp1l@~GmIOB{X2cbB(=?Nr5K$l3z$S(dM{Gc()%vrX1gF9xAwUbywYdO8jhUg}c|4Q>X|&B)<#cmJQRo#9 zy*KSi!cKHl1@{BYCct*Kig6_Soi{1cUJ4REF|T;O=Yi`#!EJmf&yO z;QeZB%h?YZx+iq3*$AT+WA!7=>1*q-^tQ^o;#DyUJD334g;W?Z*&5wAqFTV|XHyb! zz{jHj?g;nXU97B?j{a^~YM#cPZe0B%*IL(d+6;00c!Jo;NG?23J!&sab6FtEYhGxK z)1#tjdwIp6=;s}_A->@KEUUoDONMvz~96aEzXRF2ZIlw*^_a`(m@lAQ5 zwE2N6ca9Nlt9_5APV6T0T7CN7H1tTZ1pM+GVY#b4!?|f!7&7+>%YJ5Pt%IC+-KTO6 zo#WL;gQ6LG)?Rk-_k_+D?C4hl0q-xmJufOEso`!Hw!9hpSE~c^;qjaTv zlyVNd9d=Py(GDds-lEJP@d&k254-dGvL?Z4@zxiDL+`byL0kZR< zUxgrIZ zrdlY@G)Kn~viA9z;QUG+8S`I@t~)~Xylk3F-2c^e7(eoV;>?Z&IM6W z26q;*Apic9J9nEx^jdK0$YeNPRv)3+CgICEq50!Z->{$7h%9Zfd zIV-TR$%rg981GtJmdErCN9cFOpO|XR^T0iELGn1PdYWpZRBS_<&imyDoNT&#ToZ#P zlV6wkDQY%6!tGM*Qwrl*RBR8bf~p{5g9$i?=MA`nErakTs7hr z&Tk8hs-G|$CDubhKwLd>5M3?=q3!Cvqnocg=X9op89?@+&imKkxq)Biy{<_`gND51 zye9{CTt>VPS1&5HRD|LdABKs1?Y7gf_Lye1T^C5$leGTX{h7_m{m5*L7veegTC35% z1tOnmt*n^jt>JfXJK&7%P0bxOWx9W4JW(+;xQkueHa`0Q>B#?Io{hotM`QAI#C%aP7bUECQnztoE~lpGl=C z#ev^Aofn@^r*OQ6_nqElnV1%~%TR4P`$Y5To9>GAT#`DU?5GjvJM;WDv8gi8_2Cp?xdUzp0r-=chtKb;H$uM*d*&f zC{nywtN0lb2GhM8p|_ zMcjtO?ix6)3GzP4zBe6)8;7UO0gUS|pHV4t`DDj#8=+OYXmNYqkZ&~!--!x2q#FVf z4Z)T@yF3u!A2%=Ho@-_Hs~4B!bHJ!>pLjc}+uvg{!y03Hf2p9N09X(sW3&gvVV*Kw z#8p?Oy7tZ1_m${$o(7$troZ!7pQ}`}zZ_CLc6T9xNn+h$DGjP%EkjknzQ@b7>^Ac;4TJ(?sPr zx%(|Y&U)Zu-KurZ8){`K{)9~zmm2SX?%Zi|wyw&!_S$nX6An`~LEdF4zIwSZ>TQ>O zy~8H?)LW3wRtf@yShdOR4n+H)+jFF*xxUX0ffi#;a9sBH>Dre&iLL+Xk^ zviR4z4VfD-T8*seTJ)UX3aO(tHzN0)Xe(FreNTXl|u z;^-k?&pn$tz4>g(npC+O@~)Y;@uaVdUXgSx-Qn-gHY16yTUQrojS8c3dLucs0{^US}~ z+<;g_;^0)8d=YlFfI65k_nVlwv+?Z|UqW!Q>-z52IVK}8IK70s&3k`%Xi3O9nG}Q@ z*WcuR;zA@MegGmmzKD$z3QeF#K64By*jmVm9)IRE#ad0Xx*Au;#IqJ~8(nqcLm&IQ zOooBqie`KFJJrMgw`!+DNOmAEG%G!qx^6PW6)fy^lAr@w$NlGxLuUVD%-PaK0MohY zBs0=M5){N9JxBAzfKrY!yZ(~cUrC?x<99Jc)I2wy}ZlC%C(Udj^j?b222t)5tr&i>5B4Yrc{ckp|n}#l7uvx(*n$h%vxlw_rUE1S~gh zSNi_cV7#*@VJ!R5IFF!rz~>rB=Jp?pnJ#GOuKPs99k5BXbX8LG+o?S;Khl@4BT3oY zqL&D@h3l^*^cyT%xKjau>=vc@GXd+%Xt)7sQh=E9GPgbz~;`Zr;)74#?O{fIbQ*`@Tph(DI$ z7y%ie5%hmOr4Y2j6Hqfo`t6Zxlh>wqN+f6>H2D+c;E|<24~DdqZRC^c^1WDILNz+a zWf)9W2unT75~NqGikCXgl{ignW1`#r`T%O=-p3Zk5V_i_G+PE_t&JnVRMnr5VPp4e zXo|mff##B+p2!1D^U<3NL@`6gwR81#)6r-UHoLOYGoL!umh;Gf+}#UnBAKcmWfZq zaP4+t>9wi$MDg!*56(~(w)S+lj2qbdNE%pyU4O2@wM^pQZ$%$?VPd#2QTRQE@mwC? zE_8Xwx`u46+&&3&-0!$qkQM7besRi#bK zxiULv=$TQtmR1Q(RmI=(Z#!>%k+VAq{6Dq#`6;- zSq=OwB4^z+#*dq;zK3y0ugUiq!7A)az;_`TC_ix|Sk`MuIvDQ}8QshuORVQ5QTA@9 z9J@-NW!BD78BBLhj9iG{j|AALaKmAZoOvM6rv65d^vF(?6ZP?(WwW*v)4GYLchYj0 z^yw|Z23*9MUE!c4vjQ9m)}fkoR1Jmud0mnUa>{zG$4TT3$K2}SXxVxL(mnqo4jK)9 z-loRPvWh%j%Pb3;D5cPfC9R#kI`u8{>{2V@k2_h})Zf^-%_~%XzdLvp$}N6B6wJ3X zIl(2WuSR>SG^e%We-bbQe7Mn`ajlu3gP8(VDDA3=-piX!r%wh)_hl!~=1-@4mM&&2 zvr;G~3_&o2;!`*;ANRtw1#rb--FHRL-x_)hnZh+THWzLlylxRY{+3Ud((huV zYGBmaKGMqFo?&t_nm3nN(ra?3=V;n~NSFesKQ~Igs!Gp1pt?8XwMF^!$Lz;vDpG|z z2$ZG8OJCmX8(gyq<%P^^qS+wtkL}=mbTd!oE-a4+cXY! zq@S3^NmmP`VD6!96`~7N|j&u}P(801~~FMIEMdRna}T zlMSS|%!h$}QO)>WD^j@kT^e=1BuYK3TSMH&RgCfO$>jer-~RKx|3#kt z+8%6Y;qi?;Dkas0vx;n0^j?bDjN2%K|59-8+b+v?;i8*^G>Cxq$TIH$7f2Xo#(jSO znLhW!D5YSg^W4s1(tz5suMEcqx~AGhsmIZ;xq}@^ATZ_pgoErUm9jCK>V+bF!kAZ& ze9u^zgY-B0#xowG4G$gTWh?)3R5Gm1sZcMTszzL~nuJNKK(FwZ0m$vk2I_S$=|wf1fL7E*r--ED?Xw4d%s z=E|Zp=Uuy=PY-`%%HOmUchEOzH8Xzr zJ?7K58|1+hUaaCcR^@~$BB@cD%}zkIrMwDk1BisPD$P1jkha{$II=GfCd~l9KYRNc(;hg=h%2=?oXJT4hwMUeMNo*8==a_A1x__6sWjqRp4a1>qog z6UAIh`rz5Yv(C4zPDA!`H43sTmcNziMMEd)fLTEMhaUg=hcjZYgP}IY+R$Sxj-ze% z?#7uD)Z@oCT=qn5yjes$khE=d@m5H@q*3x_-BjS#XF=WOBDO1#7yK8d!irg;-OAPv z>Hi*7ee@juOzS`t2LA~t|Z=ul` z(cdWe3c;OA-vr?oP$p~Uf#o`5|0<&+J5H@pE$wKU9R=cQ(g#KH)mc?0Cu9RFH8vZ{ zEbz0Pa9+V4E8W>WwBrs?f@(+_LR_{}ViT*Ra55k7UQVT6uJ^ITKUHs7^qzP$1PZ4s zw3+cT*9GUrfSbNV=%v2CD#ipOP8=9b8Hw)?CDl@susHjyxyYF-rk3rUO>KN1;ADsy z-l;Y*OfI1)g7tR|)H(vSZEo?pRM6V=b9*h_2VkS@cVp^P4(5IiAkKrTm(s_)lbZgNfh%$=Zx!d3aoHX72dXvW%~Nk~FJU(0)$b zf0&88C<$5rxCa+Gj4S-j)D+cUC4?Fc85 z@_2nF4BLV2;3M8)=)MIm38+fZAD*Magvwj$&+qdtUdLs9LyXe0b zwf_Yy|LEbU$SajlyE7LQq^pTHe=CE8IVxOY=JiWgvMeq>MGu+|4Xx$*FFs6VJ(L3< ztp}burH8x&dRz^pm*^iQNiYrUgnd2XN$k6sb7hE80s{2!_v_EMnlp5gSGg~r>hfK;Euh~gvELre-x5z_ zbsgFc)aQCh{JpBc<$!u7pb-U_pjF5NCLAZIjIoR1pfmCQtwRVf$S@^38tfBN{XE}j znJ~D5Xo6G5ggWxV>NX>XhmuHH#|w6~o?9!E(x3rxs}_i+m;#Lg-BUiBYqumrAnPE%F0^I4{dzEoZ$tt&7CXCkJbiYY*MM{Qc~Z z4?5t=7!%=#cL-&P07i=JMz4xl)|h&o5MNu7FKb&7zY-@0>sdklpjJ47094;8 z5IcOKp*J-?Om_q(rcVXhvWd5 zJbQ!DfAMahDsDIzA&1lEUlKzjmY-P#U0har6 z5$Z{B6PUD-2~Y_iqa5q06#q6m3gVR{>x3~=X=VcJdn(E!a#y;N5eu{w)}4O>jUVGSDqnj4XNAu1J%fXx*iL;-Qygo{h1!-SI%6`BYWWTG7luRW-Fz=F(_$_J@4St=}LM0iFiQ$ z;&=E46<4kxTM&|mjKGWl=i|Vx6qLE>YT3k4coC$rLlpJ<`~4L1bK=WJ7Ln$1Mco&& zpP5A+E0hyAQSIy3v}@86x3-JA`(`|VJ&q5=*B>F>yP4k0yRz%sA>Qgo_!O#PLpV&M zv>#}IVUC*_5-?&zH#HoF((MfCl_RO5TVW0q+B+7H0K%R=WB1#ki8z0p&1bFILb-he zsM@)Jhy5E^g&1O|eQ=Zfhc=q0FUagwM3BM*|A?BK6+c_oXLs5XVB&d?JML^<rzG^RBKREbkr&#JGdx}1DZ zS+S)C_+#bQs$oV|zTb&c+_+Vt_|y#QSIeIha&QyYsNS)k9yH=J;_ zTPjeO-qTZc(QQQ#3Mw%aV9bG;zJ;UcY!R2xu5SJcno;GPmm4ub_zAYyhIn((c zJBt18W*H>M$4QxadMNcNJge7X1M?Yt1S~yjl$YoO(VI%XHNo+jDk-qwxLLIw-zYG= zl$DU|KHE1?NDWOA;k?URfVHUma$O1t|Jz{Y2^0UVe9B*q*Zw2^0Z5*z5n-l%f|ox8 zbQ5>gC~r7PxllBqUSOE8Mwcvh;e6lgp}iAaHJ_IxejMsug7HsD6{d=Z4Yj~%X|a)P z>Vbjczd0lyq-~#ON#Fgy^Zk{pkn+L8D+Lir0I{@8XY9irTG{9i;(x<%fc)}K{^9b9 z(L{8Yo-0q&hDC$?7mtbMo*#+h>Z{Pi!|*LcxE*g$hqO6D1<-9;3 zb~n&WKOBA(Y;$7t$Mx){x&t@S z-|>_ek&9f91G5W@i0L_>hOXVK$R~1U{t&`?&%&ipR+}uzBk3gDlubkH*DXLf*_@;> z>W}u<&Iim6a;+5^Z;OT{PLdP)pGnypgNVq2UjizVP1|638%|z~?obzStT*mPv`Dt(=8yiuKXbe7VPT{0miheiO$UUoykk7)92p+3cl*U$YKHt5Z5D*q?f3qvoQ zr0}XY6Aw~DES1%KW;b%~`#wIv4zl=BSRMp3T+7pN2uxhm1Vk1yE*LGy`j|Gd8;5Ps z&wH2~ZF!~^)#JeE9}k@{`YbYfj(lkpgY7O&_Ba;-1RI-TI0IzueBct^_mlfLH}ZLi zv2~z~gTg6gkF87KgYj|7!gObMNO1%B)^rv1>tt@E#w|H-W$TWdxi-5*?Y5Advi+E7 zDsE0-@sICk?LW*uR&@j6zK;F;Xsch>ArY6&2^eXVsJA9Sx$N-6A51(DN`hyv(>~Xj z9yMlTs7cyJJy$OyLte!u}&B zK2WD0YxFfXEMFw;=hmTx%(G!IP?)dbvCW+v{uiyz&)Fyv2E!08->jc#G=ErzKAbWx zGA)>B;7270!O5~MB{)JSpJWCSdxgb4*49C{M^?)zMu)zeJ$HWCwuzUUbD8+VMy?AG z8YpJT#6FM+r_)OAB$C@dgS0(qbFBQ5FSh=JGtsQ&tJARP^o5)iwsrEx-_SfF{0fy_ zbQkq7;oqqKe@y6qK6%UVV{C1WIhowYu^5PfPQZ_l+l z;7(_M#%jYF_$qybbrP}60R;2HRs0fZRq{T6o8;g{%U>Nag*@N#)oZ~!Aq|?sFH5#2 zuLr-pOyUo!1ULK<&qkt_a(n7ziTf*`pLswq2o8!C zOodh#9Nmc`jW6jucIB&}T5KVk;;(lKpqr!8-o{E_hG*)_BVDxD+G zEy{Fu?Ytp@0pi!m5(qv6ei+RB0(=(;0e2Lzbl7D|mgVjEwWiJ|G(c%0m+A_Kw~Jt^naMsnT(2Kj6J3j;+pXC86e_Tcm1 z;T}aVWS(8N$!hVhi80-M7iXj$evbq5nHwLwj|gM=l;hZsJdy@4cvr*#mCk0+g_lFb zOO?${oqG81KaeBHmEI&nBY8Uoem;g@dm(o?UyKhLuLeNzwW8O58+aKvF~81WhFVmmKv@7&A!90b@fNJjWuCY86v={KhCG$8=ho9ltK2E=sV)~l2h<^lFq(XkUFnfFk z{Rl+omiop^HustZnn5mFjw5xJ-oP7Sx@yIBa)?F`*PR!T5sh1>zSV)qn?6mhmiwjZrbmf~- zVMkoLgK%@*$LmugOSuYGr>`P5z1f}qp@F?nxI)|@k0yUa`0wKSpZ@iKxfre{JX#q7 zwy2jnoX5}zlBYI0+~W)W%RlK~zc_sR5~`c>F^Fh;8SXaU-?{N1zvOH1;**Z)M|pCZ zY8l^xrg`5^zb2_pKmy^e0Oanb{a!J(b*Uu!l0ZO2XtlL9y&AuxS2Ne>0>aegzr=88 zj#UZjy-Yiq{3H0oC6)X^`IB%}>#T%)*P@k>S@=;H>a4D!o`vXt8{5?hiJN%Z^*Lu*i#=1aNM;Dbq=K_iAxa#TZL1P=eQk@x7@Y_Rzt} zJrywCAZ&8%S0eQ=<)F8FAdAc9)!)HX^K0OeH^+*O|sg4TLCs#3ms6}!7uQd|hA zVXw`tc1-dXA5Bn7$pXOM=1G^AU|TmRUmbC5j!5Uo$Uof6uQ;2-2}9ibi%i-q+Ml zdBilX`H@JB8O|=qkv<>YQ%>0uoQCADe9y&~Hhc|fOOcBSeB$Yr4@yB`swAmaM$C#t zRi!cnRw2Yv7ta^q7sj5dSyoXoE~MPXB~UZhj-x;>7sZLmPr`nA$@h9f1Gr=FnfXQO zG^@`do`=^fftuA5GZ~MZGWKO?=vsAXFUIs z1=s^RTo40RVL^Cfq=c9A=*?A=z@~M-RY8jMPc^2Sf%#vr+=Pl>2KJ#9bmw#vLLX9d zaa4WP!iLS%eMczkDU2EvKSWyMthCZEUiF23vC)90)~qy1(EHZ}wWfI9V#}kzutNHx zjts=J36$p>DN}-K+@WRp;vbapwD^nUT249Cfk9f@5!zx|fV(V*%f-nas4!Le)V|JV z0k$|JoSvpw*=E@?)>CkG`FlFBtUon#cF6i?g1^)BN101uxXjz`>;G4DUm>uk@Q#r5 zo?%iI9k(-eU+veV+XyRGNgt>9?)o1{K3Md!HSXEwZBlxa_Kcf) zoKC$vT_x4ZdRwh`5zT_fHvwl&P)~K8?Jrrt?c>rALbo7{&{QVtX?t0CPLI~jkOkQl@lB*5Oq92Og^Lm2_g}uN zdv_mfqxf92nZxBkI7OiI<6=e?U=TKsUkhZ^k?H(VqOL*YiScM)x%%o>PYGlH;^t0V zSDqoT7_meViQ6)EET#r=6uFa7Gc0+(!3o+k z`;AMiIy#M$78np{O+&jjEFj#~xav$nWaW_m%YhaQyAU3b{mtgND9bi1&r6fE;*5~b z{lJAzzB)qqfWjCu2U>y|;iByxr<(w`kDd7frpHBN81;7G-d+>Xqnor!?wvaBg)v%q zRiX)@zqWhs;(q)w^r^v;=LSe}^9kBQJ=tuRT!p-)_U>;8JU6Y535)7ATAwWfbEI-z ziy0)M(3ucFs{lbdalj^+7A!h$iS5OSNxSvWuY=<+BPvtxwfvK9@812g?ar)qr~IER z{l8vF`xDmrGf7m)Bk~T#1Caxa&hRrZ{7T%v+h zbVM2~bbNe2pvN+-YOd02VKu=E`SpVs>2~tgwqvowVF@q`Eh3spO2Z^4=WPJ3bX&Kz zEdHM}^z#g6jn4kYE{c}+l`*pWw-S-!ZpZn*S?b?tvA?8c%`G~&5xhrj{4w%ZQHOSM zU9syWjWI_3iWoo{#;(`SvlAA9dskJkWopCT11Xb0Wo>Lga)V7u1)RaDCVJ-K?zJr( zJ0#u&N|ZsgIZ3|dPdQyllK-=(ri2x`gDtMJ#1h+2ZgR1ZGOFSiZs>~4e#(5q92==U zC_bvni3DeEd9pILybA}+Ol?!yfi5|^fFtj11p-;z9M&T_L(`kf`FE>a_P?~sQ~OeY zs0G(^0|u%q%FR!WZ8wiEO^&~()DM<_A9ioQTizcHAHSdC_I^yyl1@WgxGiB(wR+L& zO%ct7mSZnfAvY%|-L|F7@=T5VJ37|7+p>g)^@E6@n`K^Bq9PBV-vbZYv5E* zXzKxzMGiRsAXmDUo^V6MU$5|lwjG=5vsYyCnpm?*Mn*hfr+|4ic&VAE=1r(ID znBRqHsANKDW#innKy-W}ZTWNj=cPV;)|9jjQ$Ld}#AL(fMrh31P_9KgMd#LiVd0U>=d~C8B-WVbJ_oEG;zX z@!f>U=9B$%K-$CJwyryv)q)=MENL_<^Z>PkJDVeJ;SGFUW^Fb4VeSx(Dwh~EeSQ9M z9x(P(H3U(dhg<;?z=s;CcP65QzEs?=+~Igru|CdNX~D-iTS{kR}c!-@y;Ii6?6j^T>nwi~wo% zo)nk-wh!h>B9(16u7DI{dK#Z1t%xU1j2)Z#Fs3XT$Cm1-7)c;-0I<92$G>+mR498v zq61iDQtb*ithp3;TmBpa$@}R1Bf=ae3qH}0kMmOu=Hw5nJc7_tiyYORbjl$05fW@~#AU3bRDqG0Q`2w& z=BF{XW_Y(QEgkj2D!fYLQ07M2_Zy52$*C^E&?XWevm)%Ugr7sN48hHYTfsJXBXW|B z!RHECzyi2aUL@#rsl3iIJnH~(buTS$VhZr!n#!`7jZI<0qb`lHX)Kl?FCdSF$nQCRNdYicIME1EuH$Xh>I1zGygwHYmoH-<%)=g^sZ?VY}^iOy+Is{%6w|hAh}lJ7xBkaUT46bYHVS5rF@%a z=oNnC%s8AsQduj&GLe>s#t?wwhTBHQyW4R}@I2~wWgIxgM|sPA`#BO*-IjR z`eo4zjkwtov_}g$Ddy7!_;L!-(=vxXerdeA?!53B!`AjrCzrt(MW>7_^=E&qtfD=^ zM@^|@pBPnh17Dm3zFVBv5`VY2{rEhHvQcZ9Y~#6Y4S40;@#K5Mi}g|WH7{K1KF`AM za^0mx`h92A`L^%GC8|G~+lxCmB*F2|U;Fxgh#}pd+g=NAbyg&7TC`Q74vTzq97nNa znwS^W;#^OknRRj!@;Ds3dQ;W^7cr9CC}Xwk5I%2Ac4+yZ#|SVKUmDJQtsdiRowD<_ z;^KFsDPQ5&4FD#N!H;P>wzEh26Z;(Ya8iJC*=r`{>%9`LQ^4f2(09nQ$*1~R^3J-*-X?cpcLnlZo@*+}DQ z>XP}Vo>SiDHD(vX*3WdnqIKpUGJiw;;O?8D{`=(2w3(S|Wk2~I=Bu*#=UU7gDAkl+ zm>1abb&L+i?1!gYMs!~2uECh19|m~SZV)eBszikAoll0lnxvLhE~wugk`%?&j@f$dIOWS-xa=1+i!4x`Y}nI#Go z6zJ==Fh-cx2?13|@9hz;7s4-YblU(YN$!~0=&&NEF@h~t)ROm7&@uLz$5hbfGluc} z?Wd_vy>zJ51*6kV^Go5>#kw2WOX;TMyIT0_ek53hk)Ux;RV|V zExrP5$YWR253m^__mcIZXR`A*r``aaEA7%83l5P6cp{ZKGP)i^038vN z8nu(%n)?)oP+GcR6RwTBw-lA`9fIHGDY}r-_UDt&dzjgU?t8rZ17!2|z;Dm{?le?g zik0nIGXW|wT3OT{%IHys*gM~P?ml)p?W~(2*Gq3nx4@&w4%| z2Rkg2@S3jz`)?Q*KK5-rI9BP8xdxJXH{ChZgTPXjj@Nz-+18zYGPyK08u=YbnK`jV z>Mt!3IA!gy1*{xUdOV9?4fcLUL)n>VI7P#bFg}#m>_usXn2y$5TT^P4ZP?l|?R!lzkJf_Q-bY9BsTrz(BfkR$cuHuOR z2r;ddJ>C_40(aeA9wEhSAmO>}4e|Vd`~uqp{p9yo=@0n-=U@=er7$-N2W1l{5rqCA z7JD&}z4Uum>kUU>1=!*e5x^E?6A7_*L2MjpdB*C4YZ+0mF&_NOKVJUYC zU>bt~TfF?q$}OFCVYtUT+^7#lzHF&|(+rrLhsW=SzT@G0xw;PE!K`AN09?JUeHBa_ zx-*E|qopGeO+n=8*8R~%Z}Xa;fjT7xQRJ$+PCB5s!8PGLIxw!AcItETE??V17HY*K zCu*yFuB^vd#7e-fKkre(&u73o^g$nIdfhlv=WBBWC;WUeA5bbMUSE{=lf*quqFFk7Ptkxme-Ag~Qz|&R zz!Lv;ls;?6^4ojbgKmVFtZ1Gu*w~K3BHs>gY%2B7 zqL`%KmEy+g@B;*zC%5EcdZH4$X**Ccm0#X|MMb=Oc5ZuTrSlv`us5S zjc?ChVKncpUJB+JdB(83z!QvTy+6R~*Eeo-uHu@g{H77OO97eLS&TK@8Udh!%}A)@ z&40)CEB3#%*lCe65~fqm6Jw00C_WRA z;OI+N{2|XD5v!4*8WkqSeD>rU=%$v;!NJ^5MMEt=Tr>b zIlbxzd48U|>#2smZ;V27$;}fDKck=*vbQ9W`cJ-lFTjP^Zy6=-R`~ z!%HB)52-r{tCY|SC9Q)#-q;O5zc=V&=@}BPC~^bCDi7J(iq88_`p*&RW5G`~x%kF} zm+JanayX~5pL82t6!xZHcG3~r6OJ36?<8NJ<@ zSm$aqJR9U7L~h4grQfC8J9W8zQP^zYkbsCG8@T68Co{;EEMn+`aZIL`9kD%Qf5v zojNvJEy=wdR>myNio|LybJEGxE3{>enc{8Xj9dMZNr*$)YlpL?DPIX8oktTM=2WWH=0 zNcrxI@#Aly>tVQ@UYX-#Mo0ln+_P!vtWXOH29I?CDela=ZN35f=w~2@#6=cRWR|37{Q^ zdz39H<|wiJIP>gB2ab|TmN%>6t&9!~w5F4LR|X;jHXj5Tkm9q1^k{c&^g2(HPI${q zXlTYpn@qdJFl__9fEk6|ouFS(z6|V721cAx2c90R#aPc%QdI()y#6=PeMEf~;Dqo7 z$r%5QNdJE2|3SXDiRio+f<0B>#{drphn?Q&4ZCRqq_FjK{5EM1IXC`0Y2X*|5IK^784>oENA==i=2{OEq;{q(Qw z4_+LIUICJ91(2~Qw7L$=O)98_67B6+_h6oVY3#AJesadyz|-DtPj^hEnuA_-K*6}H$w-*3M4g?T4Cic*}dn` z(dH?{DeQ2OsVbMj)zC$x2bsu8+&e#YGW0eSGcQgl>9BVKp>K*=1~D2-__I3pkp^DD zwWmu6<%tUoSAr1Eoeyk9NCwKY8ymm_3J0BO!eFD8x-!Mh;lt4V3vK-KLF+EI?URPu zXh`nrz)$_Ww8Zg2eT5hwyZ6E)S%D-YAjXiZwz5Ns*Gahi_0uM;Yk5qbaXq|B{?}tI z;|hn}#(jEH*1w)c^NN=jDGSg!tQ^U?Vu;*$MBPJ^+6z;!j7y#-kpY~3V6ZHSCyx`l~``R9# zY1eOIT&&?)Nn0y_R$$C1x5CUbJ5Gb&>E0%*m<(9|BVa8jauG=^sgDWi6FQS})K70E zoQ8NFx=I8R7+R!v2nP-=X+AP3W(*K`Xzv6zujE&(4ju{Mv-?s1t`E7Dl@QUuyJ)`2 zZ>Kw(1qgf<_16xg^|qON%if5|^8ePna~9uYJJ03Q<2^B$&&@rZdT%c<^Y!<}&5fRz zm6ow;z{1k-NIwW!^!8V#?Sq~nOBsrWScZV7t@$Lim~?Q;jl)W7t2eYu-s$q%ROa<= zn_hk%>YuR?mG4*O=ZTi#EVWSWNY^3%uFP0IQFQm#{iTICuYwkY<$1MmbROe9@|zG) z+|O zzr%6AeifYIR3OkPHGF{G`u7{ghr#Jek#By>spr?Q;e@{|EA{`x)x35X{8uv@7N$$lWSUGs|eaZ5dWJ%Q^V@n*x(9$fe6Kht4S{q!v2WxiE4 znJTre3NRbWG|R%xUof}HcQ&ZDOG?Zp@zEgS5^CnIDff@9BT%W2+8W9mfd4sV3bR-8 zayRG}c&0>TjMseUj1^@vV}q~O522-8(t!Q3QyZ3y*~c?z_<7$`x0BrKYQw^Ar#r(e z?d;?3BpyXC#To*19QC0a1s8jzkoI*sBU}2$yT)Us8*7%;S&jJ8CkcQbW}FxK63^{JT(k>@F_>`>fHC9?>djn1)Tux0BF zHEI;olG}0X|B4DHoN<4@aefcHB%W>NlB{~fp#q0~+HX3g7JJ)pI*Y5%&&0gX`|em) z*>`(i4AhT4_n;#qsG=8z4IZN;%OPCZVeKRm^I0$(Ec#hGeR}JL3TnRI={q&b1E!|& zTPJDK2Yle41`Tk;t`jAGCa1B=!Gxdkf~5OzVvQeZg7x(;vTPH|kpJWEE*j zHEnSEiR527>tV1SLC(jjR3i8PGu%#6zU{8`4N&ACwL|A>3~GP09#px29H|8i;DK$= zkC&4N4&fJP35L~pb%_HV_s!ZaD9@fPu4EKgDaSHFyAA&x#+N}2Kk5K&m_eN~eJ)|* zc--}W0-u$PC74@eqFIE#YfVKnEA(!2kEq0eE?j^dKGN69-U#AieX6~*wCZHM3rFoU zWyjSjrE!CNyY8->%7a+;rQ0f;mcRT|{VDjkLDlT(mucr}Ouwk_q#Bqc=HmoOBjwNA zyDv};+!A8l4wj*UV;a9cB*qIL$W9N+ekd$~IY_&K~629y-&Q0`{3 zq38GSRoQu3??6=bWt(65DR0m|-}>q?-j5rWx#6>?*<_S9C!0C7NB9aajsB2#qwwvQ z%Y>%TP5Q_CUbO`9SY8EHjy!dgK2q|_Igaqv<|{vtxrHCG4meGC0+x1SpmR6m1X%nh z4^phWs}ZEMvf|(g2pwdbz0bm??>ba4<$Z0~I#iwUtD%7}kr=!((m(k_)%^x)zSYtM z0f?9CCqMN5g6R=N@T`+ov03#I0r6^8Q4lzOndDA<4EZ_=+}Wb^+r(ous{F8FgGN0U zE;J!Wi>10LY+yqt$ib*HnmD9bs%@ksobFNG`QacKNOK+qc(cvei8$(=nnFLO_UQJB z$=2?|KKgmf-D{Zgr2}dQxgW*=V1%rD1NX+>MHipqbp{;y+0X`4S6(pwCg-rp9M_?bxSPWQs*#OId6 zNx$HIx@{9?6EC7r)Jvy{&(&LI<_hROgRVlecm>nbQE2=tcujOo?waae({)OLHqVMF z-#aK`lJZAl9(QOQb4FdI*BZcUN1|v)OKL8bk+5^>r1Kz$O3AUNvy@^}i^Cw0HTVIq zdzRWzJ*K`sA7|-20!24n^R<(ohC#rdvh@oMqy2%nS?!j#RUw-ReHcP~F?;^09}J_$ z!}}F$**4o!*zqGn%xUf?F_kw?CqMP__E#Ln-1;Z-$gZjnf9QUa|98doANlB?-?_Z_ z$VZgpvzo~Xm%`P?XNtgfV(HZ^a24ify*U7-dlltaql-)}9VwMMvlkQE1Gd0UW)3T4E1JoqU7UcBN02oO%%@RX+B_h%koN zYQ(f(ME3a3(Lv6G40KQZ0*-!`jMzOzPge~S7e5FlDlxicah+%^=FhUOC3cuFJZhc{ zmAXA0ZzceY%Q%Ma-wq~1v8PfrF!4wZTCZA<+;hx8S38={3S$d9;^D$c#wdGY$M4z?>)V=%5eN(@(2yG|lzdBE{V5V9~ha$<39v z;|%2XfnIG5A+K#Xya}xtFK|fb9+UZXqnfG+^NbW~u-Tmfy`YY89l;5XbuHq|jOH6a#h&l7Vtl~B$k*4NrCYcO=8(WUtC$oX?fS@G4NfTmFm}VMIiv}y$ z)`Ik@4!2ns+}gg|NR5^ReNke!Nlsrex7Duu3RN<@o!@gmA%@6aZFG7hufdvb`i@_I zs7vByIt|wF66MolOsF0fl14S!L9Zo~E4xU;^s3A&xG;OV8&!pYRog~sx_Yl`Oq}yly46nhysm*gvHu2)glFx~o0Fb%AUBCqHxUL{F zaGxS*bYR!pRMVhOYW(qDaWe}JsELpKUP)Jo68?O=Q2Cf4f_t&S8H; z#o#!HgEp4M19lOe`pNO+59W{~3Cz6p(c4avCqKFa zb~2(J7l<_8intrjgMZ#xjquwBajGzEof~IcfJ)bFPQmkiCLD*}XBz{OI5E_Zbok7s zFK)`lTuf=U8a&%VX!c0xido45Pk1KlD?tLEeMmxI?6*Gp&&MeBU}bY#W+!WE5`O2B61kjlVUt zXbvy!WPx>5iWyDp7N9DQukvltw^&$;SX`;MHZuWoZ`7uv=LWI7LS@u_O04$CN;rxG zkPnuCR~$Xt9(N(5E+ii%eGErH)m@bw*oth z=2xNy?SW7?ozC$8n1;LT*N6%Ht6x*pR9$P6KFtdMQmsLs1XaUUI|diH} zm?%L{?%QOuWB0yz8h37g{TbuSZu#70JaKY~EbAlBs^exeB-v~%d*3?W1%S<%y-#sI zYyLD*`t)Z-tNuiG)8XU1=AOv(G|BWnWYlRM;LeIdx37Ou4M8IqfNkQj?xG|d237h~ z`){%b8!>ociNTVF+=2CMd0Me5)2Rhk@LCX)8y~D?-#y2a#M5Nn&#C|DT#8pCpT~nP z3X&^IPHc>Xx>T9Ik>%okHY*uu3pou_L-&{1P7X~wDNj#TK1$=R?X#c_bElN~S3SzQ zz?KkLlQz-{D333 zsXA+$^V8#1GqIlUre3qwTQIzHy+y|@z#jFmk!Kg!#Vm-<^K&+CalQ@E_9rs(PnS7X z0c&gjX^ZhnL(9SEXu3T~{EsQLGZUQ|c;qmOq=<7T&oic0~+RT&G#G|wPw2@?x zXwRDinwIlM4d0~X$JhWj zJx=Su3mg&28u>9q0>R})U}8@FzB<$6nfYwO zq5wJ<)0XZMh5lx5$XU)zgFRT~uok7Rw=;~E6tQhR2$y=QBDqIXKW%6`uyW9=JbUus zvT^GfL4jb{0JTFKLcn#R{DN+{M?2KW9MNs&QJI^O_Ik8%-3!yS_A zx}_$H7dJlHi-)T6(ol! z;#~Tq3o3!@p0RFFYS_WX^MbWC8;q2pQ}j4CEq_f}<5Bvg6VRQm-#OhIDD;8}_C%ey z4>iJ&CSJe7Ruf4jpOtEFEyJct&?<9t2mr)-p=|6I!ci1|5n`seWENd{Hzq@qV)W9jA9r!l41D~S#qGO z5uZkMl`>>_o4Tj=?C5!`e^b*I-vvT^Tv9*tw49rd3I46b((!FsV3##VgUJFmq&^kU!s1!Kb*);{klH`Bb;M293wWd= zdGY^%5dMk8zb>R9gp&IOq?wFoYYH}ffR`pyg?^3j#SY^|0rZfjx7|L-p7Dna-|Q~^ zsQp}Vx|xGB?AXQ~Saek$brcftY(*AN24gh@PlFmWl`vF%W=_IW)RR#cw8BnLfc|dz zuE{50Bnt?6#pe0(oAvS81D=0>%Bz=UF;IA)IEz;2{-nQ2^%4~>czG1?kZPSVW;581 zx@8ACCX0UzGJ9S4q^0H4A8PMe^>KzM4~0}O?^lZH;_X>p)0t4~Lbm;fw?sc2v%aDx z_*_#E-XXT5VrC)Kre;Pz()zKsY>E?4<=Z1YxAYQhea`agcDv|?x5!tIvc{ycSlZa% zX$IZz?}q{XVcy4r(bWb^mB`eNiKO2{Iuw^sq48w+8708{%N2*66H-lXGp8Gf;J_w{ zM2Y%++b6d9*yTm!j@Y(SRt~5Wan24qm)OSi5TVw`Cx-988Z!trRu1%Ar@DrHu6DD) zdqp9h?cQYp@EP{uB={)2fpW!(L>Ala4vo?b0z<e)ezwVs8qD#HmtI zTMk?HUJUU{O2TPXEPtNC8|E7Ol+=v`-C|Ief48;zEz(KuN(fi)z%6Qi+71Yh1r5(%-EdeRZC~HT~%Lrv@{VzW$B5ZOArE2=v?oW zV09Zy4!fcMFk zaSMmfBp-Y8u0QM2n_TW1#EfPGPl6F1___ozGq~2f4R}s9j6iUx)t19ra)5>{PO>ZT zwiE0oSW=0<^1W_jdIab>Y4x(?65W-?d)IgBzbeBIVxL$YZ*O=NVcOqyj)dbL18p1O zZ_NsoHsWMVQw2tu1phTHoOLO}k2ViVz}f&;S6{4e3ZWTycWW}fd(W86f!V3<>UYqK5yM$?mM!EPB+9lcoB40dEhkJ!RFI!g@n>ZCk999s zPZ%w8V(q-$LYv5`&2EWZQIVB+W<^hh=-1z(|%WSuqojGn%miNb9;{&V_eL%Ek$Eo;@Iad22G1S z!EC}7SGPqjk&f@9W!2=dnD*84Euo}6jk%j(G6m6j+NSomAsKNbQ5ksdv2sx)8K?xX zEIjolcT`7e1_Ukhp{4_svId+v?BDowJsBO;18TCd_Ojid9ik& zP=td}@L%`mpB1Wu^@A;|q5)8yHZ?D4paP?hGD;n8GWUx4I&k1Ur`em}Q4K7yzgGy6 zZQI!TiL%%^$HFEm`U-)OeO^b$T!$V{)9e=Cp^K>*#kxm`*$?yhmQi=9-GaM*X8O-f z)ldBU=3ijnYJt-{E!u8rVRYlEajjd|FwP4{bF0-@z=BhLv0#@aa6lK@>ZVRv;^tx%DQI4VHR=X zGJV#g*!ac3hO~B&U7Yk@5 z(7BGZFNZeut(~3;8dj5gui=Ox5N01VGp+Po;b|ozuangNl-5 z&)o%T_(2mR_6wB0D@!S&`w(>~y*rqQLPo9`u8A!rnRoAxnK}TWg-;2v?theCe+kNV zCuc~JC>3oFh%Pf9F<4@a(O16RI6&|b)N<{K49#x9iiMy8`8w(0?T0nb_4$7`-Mc}8 zH$1k(6w!Y#WRnn5Xu8i?zBJ_wXwfD%EGID+4-Y3gmRQew-6gUDO*&(|F?sKq$gi$; zGK4gJR?jT!7%5yn$aE?YLf{K#TJDXTt>cp!y)6rCyzvZaDCH$t)Q#4-MzVh z*Js3lsaXEd1zvVn9<&0pEdF8k7BX2hxY4xfZ;UMA5T0+aUYB6O&)LP}+Shmi6_gcLo4l;|t>l@&l%uPcUH21! zI(rbo49|p$)e}E@FJ5DxI^na`U?QB&k@BNQO8_LImga3c35Sx6xe^Y-o~ONlmTCyZ zZ`g>iUC76X6B$BmOpBq$PIvsww8y}SDPIU9@+X0Z9k=GFKrhv~&Aoe~3*d)&Y#TQr zb!ab>t$Z+gsW`P~*<%^6xriQM&vd#_Q+FMeHq2RKya-{%)BV56ZsAKo9<15muZ~8D z+N&F39FjFY^+S(Ok~QE=Hl?keT3Ftjr!JkE#0qh;801=yFl56UBTjTDi|hEp+K@aQ zPm?NtOT;TjOUh}!2-3ID<{z(aDmJs3uqwk>y80B3av18elbFttwtLSuimF!xmHJj- z?*0XWc@iYlIsP_5tOU#L$PU_f{wV$TarwWF(fx6RxWg>Fo%*PAZPxYQlhc{Lk42k98W8HElKX`2R+2j-e3vERyB~~Hq1GF^ zG@wxWaR2AQ-tr1WG(7J`e|WlUAq<5RV>++dBuiGmjpwd4iz?UCr!nm!Q&Oe1)tX*Y z9D8&d@FR50uWP+T`_OLlT!Wen@B-S&z^n%Yu-5zGB2Xbd1bpw|1W2Xhf&!5+Yi52PZYe4J!@n?2&ux zrJ)e(jm=={HL$9RsO!k$iv_3-e;#Qfr)%(dow-9i1GLz_iZui1gSHN>8K?pGd8Qo& zFfC^Q_LLA;jO!)ENyhGEv55;Nba_jeTD}rrMAJa7buv!oz~Z_h%oSW6B6lk~&j9&$ zSB>ijAH*w1W03FtF8j{N$LF=bwWWB8=W>fUZHYAYh1WYL->$MPpD}Li`C-p{VBLmu zQwK*M@(g*SiZ5o?F@*nz?>oSc^iBpzGr4l{`Hs)t(R_f+FeZh{C5K&dTBf$&7B!Va zKE7uQe_en6K|}B&gz%7RVmh9WDTb3AFXhb$^HzaAed60%;5dzsUYpq1Hp;`Mi86!$ zle>l3Q-t{Jz~dz=C+&FeJyf|hEV4G9xXe^|eASGV0Sev}SX`C^x}d_D3^Bb9?a$q_ zWx4I69#O_*JE^71QPf!z%V07T*|qBhulIHdAwa9c)?f9AR?cG8=ue17jgB!bmgc2g zArspYdrZ69T9ags6KtJn@ir1k58QAScTjWXM5K|uF&Os2uUnhNCkC;<**s8WT zuI=y=-mf24-)lvrg+DuKn+*RjQucCrTZt<;P0hEorYC0f&t{sZ-Qh5Ogx$M6JGkJ- zpNAm{_*c8-_oRM)hFhs+h2uv&Oda-Vl2J9yzEUWtvXaM zP_kC{xF{}y3xLK5-hnf6;^RtDkD4q}+f-3Pj$b-H%A1dqNcwWgEh}xw__e9hs*E*k z*^9%EHyXU}9=*a2=I4(Z)o+^?9w^~Ze1)via0#HD=m)lPc77lsaWADR z$GG*1Tb|9en2>C`=Z8(&@1ols*uICbtj*GTZjNh+dQx8>$La(p@a%4?3k=|LwHyPW zWv!~1Gz(^Nsl{#}EBG94T=zk35sqV+JDCeIF98(#Dno5j-D|+Av3In$!K=ckQSlBtpECWB9zT7R2u74@~E0l7Ku>= z>qmCls_&-`mB_!}kiKr@d!&dI4fht(3$5-yGWmSe|49?UTbv4gQc?uQ=8YokcnAA% zoX@)E?A?X+xK;CNW)6S>JaKJZsUqEk%)iXW9Nr@?y{>_uF-4 z&Abti+hzv$|1LQ>Uc!FqteTs>3jb8Y-x%}XUjeEl1@Oc#B^cTBmw1g7Mm~8s+T+xI zJ0yveul`L=J)q&P&Sdo~cACsH32I4gKiVBiKOZ*sIbK})5kTHs`r4cYJKpv&g15GP z%CU`k>A26=2VqhE@E>v~G|caS{Z%pO32O1P#h~Vmd87BlA#t#?&if1QE189XMx%W; z*jPV@cO+=DhFA42$}cG;e~L7R<-&SGJf#(IMc9_*MupTsm|#AO_y61y+>H1#_4Zu} zLW6q!HYm0E)tl9)C5*gQ&+UI3CKRL^UOw&|UhA9#s|_v}$fLE>;gVmzQx9uaa7z{? zgp8zKO3Hw=^1RX5sI7;MaL#L5uWm9*pL@8MTaB&*bj^!x7;m;*l3%qVO*qV0mkH>up51BJ$yfZ`zVqOP|w# zl9F+rn5$G*uWGu0tPdN}IVxzE6pahJIKUXRz?XOvZ&UtX>T+h5UfAOEhUHg@kDm7JQcicqoJu$G5 zS$b!QE_>N;P+MHy=%ea-Wr=i^VSM`C+Sl*RXWtvc*|BE(w+Gs^o@DTIx_`#fiuUry zsQBz3FJ31{rlWwk%QVZ6I%}WDb3AI2MAn$j1EC1Sf0^W_z@Rf7bjRfDCuf9&(Ff+YA)n(N~ZsTBxs{z|Eh2GMwAZmS}9|^U|UDbSJUV4!G4`0w4LI z^i%El8uoqn-59CC-!=+^KCc9E3}EVzgTvp5ILxFlnRF~inC#uT>sYf^P}m)>Zz4Xw z+!@a$2%d1Ch4plqo;x{@0*b@KcH>Lr|NKP$43x3(9%1skhr1f9f>cOP6!eqB>otAl zOgM5tQGePc-sa(PlGG!r!9k7GX%@!8&#S)4>%`=$?E$}T^CYkT#JhKc9&Z@6yJ5A` z)V;dBP0wI`Cq*7Bslh=C$Qk0qEppx}sK9D6K2!Tvhx*cyW|PKyq@1Fk*u*Qt`?zb@ z=&TZLXiTT)J73U3Q0q5`(`rWoJEM|ys$_PLn640-INcv>&Locfe8B&!OnyStE9Hsk zAXI(&#L$}ThN+n}c}7A2`w$J(!acp~!Py95GwU z%DPh*JrAO<*8wV!TTwsnI>u8SOj}SG3j20&$>^7$R1mrEL3t}cW2Q-;r3Rt%wI9?K zNBfbWA9WptQ(GQg3bQXq>}_EMiOPK$dbh6I`;e{UkSLvZ0i38=WSFCNn<75K#pRhk zC|U`1SI>33uvbEH!(94sEZIE^G=3FZ2njw7_faVO?h`%XvbrH+{=*NTof)M z+nshc12afBHM~90$CNqp+)y72UQeZXgWZwR4wO<_LF1KomQ|p~qr*ul!u_{oGUuKT z7cKyx^M0#BL$ssvp<%PPY5kW3F)41Ymulza-?y({GCLr{vr$3uwV#Gx3E@a0`BHWmU(D6O|>eya0qe|AI zn7@rla#~*-Mn_nc)cf;eFmPR!!*a_QM=LdU+_ zwCh+h61!OkSCL-RSz5`C5orT#IO0<%Wa0E{7uR`(hJWVF{%F4gBBNqZ-N{Akcc zBxgYC&M-|?3)1q8J0tq;$)3L(UnY!V;k)UDS2^i{e=VSd$|Q#L3g6~&Ce{bSC;Bv# z0w9u9EQq;ujW$Y^(N+s#np%{7P+~EmmMWEVPS`K5BBeM6MU2hAd|TNB7_)zvKx=ZO z%T20K!L4^lRCVTcGMk8IM?UrT6Kn_B+CKA0a1^^)A~N#Ey#;4wShKR`rX({!8@G%* z6|=O+aX#R@UL&C`{|X>lEG`Sc3x;in(8AOv--7r0K4+u_H_ZB}bNMH=*7*mKMj0<_ zB8(^QxO~&>w{%EK?r?AWnfk1;ipfl2Y_528amgARyAH0o3T+g*8K9lM5`2}jD&5@J zjjT4zw2>(S0c!TWS>p5R*{5_8qvLq1hG*|muJu){R+W2(rJ8>m`FNwh6#@K3Dup~{ zBp9FoGsTk2q8(&+mjLY2zP}UO;NWfD@jlCONzZ2*5u;5uD2F3XJI9sLIQnyC4H+G4 zzX1MAfK43bcaXP%GT#Cs1# znaeDzGW8SEQAWIGB5zUOsNNIB8?x#ixE;tUC1=78ArrKU%FJ!ycu><=OL`{ek)-`k*KBibtSW$CFMNguME>@1 z<)_w3G}H~ZILv36)3 zK`(yh%5kz}x8A+-SoY)4Z-OKQAtg3qup;l`xvg9z8X4k3-PA6cnQ zfMgZ2wCg6@Xs9ySDY>u>CKrFb`Ec)T(hGWvf%q1;>#8JNIFk_Ny{hN_$jpP`r*f@i z!{_g!BU^^*RY-(30_NV3JPC|v5_H?`>98HjU9t$!ARe~PwukZH*b)|Mdd)o$1g*#5 zbql4cf$6;7atmg+BEbRa!cv=}>1ku;osvGsf$1Z8{-$B&T9sPfM01W;{z~ zIwo>Z=B`Mn#mP}wm11oyp9Q%4Ijfj{i%(xwv;5|?in>5_$Xrmgh0nzeIn3R?hGO59 z=kpI3RCJ08bUF;p|EjX_(=J8w6HU~?yO_Whhnh7`oR_2NOm|am-ldy=RI@%OwY~W9 zxzb2+RQf35erlfas?`P9Jq zbD0@LWu^8Go3Evy9;>%uvdZ6dp&!Yo#`%}(o45Z|$N%_?E0p~+k&}I*bDGzV91P|N zA(9_QGhnao8ulydx0}Spn4GZgMu;k$5lc50yz;jTX5GKPg_xZ*hNK_Te0@S0ngWoF zH3x#&{xk<0QTfRq@7Yk_(My?*H;>VkU*BF+sFjHNKIh&z$z;bNHa@F4>=p|fIQh0L zUD84N;P8RSvJ_mi%I3A3L(x2l29)W%Q3Q|o4xHP#+BsJUHKt!zHOj>xQ!Sq(5-jR= zY8pjJ0@+>Oktm>f%tfacgz2R>#T!MriT){U)4|1$UTt&UZTv||7)l$HEfpO@W4q>s?1GX0% zcD+N>?_@2CN>=K4F<|Rkmz|vR?f-p2hbwi z#h?>0`znsuAPCLmf$*}Gze`Z-=BX`T&OMi26%4V5g>_z;jVEX^~x-vX1>;Zf0b(T;z&iX0Rq=={Y9bTv1{JP1YBz`X>R4MqjT|1 zh01+v?TK@37J#&2HVCI2$>QRuP4R|u<<%A^|LU!h`1+~DQCOeHs; z$5{B5?aL7ZWq(@9Ne%ODWg{ByhbxkYqpYX5(4&5>t4ZcX$zQCOPPpcB)oxWJz{>uYaSLAA9@?KG;CA-!bw1h=Si=+f)-35PJGz(uUr;;;=zj`F-wI zr#7`dGx{L2f@`vl+x!L1WZBtE*dNH{g@0bfyc(0s_BR{yhjILGzxc|Hhq+c+I`hO> z`BnuLa$oT7f~40<0lYdh@pjwk;DxrVB6+FXck5=&{)6f1h3$pZh+oHf8n<~Qi))ne z_wV_k-P6wOtXH4%r8Fr;SnhD>7w>Fsy?HAl6rJTbX5<-`e4pq}CR~h++t^xpBjPPA zj{0o6;qaqx)na24yQjyF6PyNpbI{CPztY;Zg~1#Tc&_bNgulE6$?J11q^R)yls$Rk z;q7qaiu7JMB8;jpx6)pis%kdPP6QUc_lpqXx(?3d&&b=(%jaKc;Q#5nuh$)CL;BW4 ztYne7(}atBzv4G(5y&+-2<{#_4?N}Nyl!(%$)Uveu+GTDmyRQU~|2aJp@*1;#I{U`7z$j z>O!6fH!u7_+5`M0tk`j0pBTGw)Q8{9h3a&qq26E21w&Me^#<=Vqg>ZaXCEN1c_6EI zmPKV#2$sCMs*K8+*FC7zO9XJsnZMhUqG5N;!(Di(2c~#y`{ETgSJuy zvm3Q;6o^t&UutIPu;$RdEigM$H>9qdgcq;$*-4D#v`x6=Ypk-BYAicT`rteD%EKnk zc=FTDr7}`HG{CgJh3yD!oE$wt?c)9jM@H8@YsvnjKZoD@N`emm)K&K9m49ai{{vfi z68_gT!}pDmfNx&4(F>JMe|r|-5I_}FzXV=;rs53_=)%OAn7n>`xuLALdl|ZEbKd0R z)5h*;eM|dVV_N(g}}$hx>3lj^B$Q(MUJ84*LQk8$~`QTZ!s`Ys2S?u$%x} z57ih7x+@yM?UddRx9A!dY0STpHY&%+%J8q(^x^1r8nu_<3^Czk5+B91QH3p1GeL7d zb_UTkZEvKn<5G+JfZcLNIiu@V5M_wM|tlcnz1(zG(Udj7hDPu-Jdw&pTW zveFh1NkCH8@MLde53>JDKl{GDnMWiXnXN#IlEKXx?0L#zI4sup}XwPyCoJ$o536{7$=w_c|)=GT}d+M{jV=@e7j$BX9Qr) zswkYw#3#&f6rk51AcQD(CBpqLlU~(+?)?tukf*OSSNEEj-!IX!@~ol~SxpVKKu1qb zkW|BjlE*nfkGl`U!u7^NkZ$fq;ANKYuFSZt0yd!hSB0yqHswoO=e?(|w#}SlnB6za z%$L8y!>(Ni;Zp#jZ%9+#1G24OXoZ$PnQi`Z3}+B&L2Gt>Eoj(+ym3L2iR+AJ;zqTy z)NX-bAf!J@KKcjbWpDt+w^wAX_BZPLuOjq+ctsu`&1h%dkSw-htSRY$&qwDO&11^m z=rw95jeE>Rv(&D!3$0kR^UGcdI=Vu5pJz|%|rpSFqkzL}kzEjl9qCq({Zb7@HekE>x?VNrM z%zpwf5Ch@fVFNLbsB9B=1|MQ-?mj5&)+(;luJ0V~sx70EE9ELk59%DLtlBVXg%O7P z=Uv|ppDeU=jawfTw(7ROHcrwYX>cR)MwjMV$1YuQWgb=@khV$0vmG5tGTb;tbelbo zetiddYOGUn>2=%H)ovoi8*R(brQ2-SOh{kachWv5*cIKJ3exx{m=B*~v_`m&wO0j+Goy&p`UlJ|#bUk;R_G#-~Fm7&jLIFz73C*EC+pv{)8$XbT z#<_~-b`UJKzbzs;hpMy;bzQ@wHlURhz(s>htB)R93qOWl0npyN_-O(@pVA4JpFx(m zCV8YGnk;+}w8zMY5(D$wVgYKRu=dwmGQePlDZZ5)PXj-I`4%pPiSRtmA1r&qJW2(( zQ4V{imM%9-p&TuChrf7w`RnPiu+pr#Ohe zWZ1F3@J??~%Z*d=$L!2#Nb`j%LbdonR5aPd$UJ7MW%TOgn^a4fL2ohR_3$RpaX61KN1 zj>rFg{h_Cf1#p0N*`v2eh{2Lv67NkBX{}GBcq;}03al{h$$rto5FzDiGf&HvjoXZ} z)RAQ2swHO@f(qFHMJCt*C!A9*34e2r|Dh-Td+W?l#g%D=C^Kz-MM0SZ8s_etTnwC$mt$d)8P2(| zimUJc{I9M_qrcp>K}t4wx%u?g^St!TkQYPe7SqZbiiTyQ@=(gqs7to9o`>bry)u(b zd%3d7iW?F92aVgxI4f+DJb#TF&m)2A*CbQ4X#W!d26sh^_)>B}jqa7a{&WeTlXnKn zgzPj`)w}T6EPvxk`Oi2!`RLSH_cnkidzi<6UbVg0-G~s|J6z#fBH=PkQj9381$U!?OUfAaljeR@Al1pR^ZE+32LL# z6UP=#Dv*lw+YUw7UGCbaWlg#@=99nAI@DHE8s(1bIjwIm&xA(yY4#`B`z{-PqbW^2Q~mS;`mKD^5>zy-COqm=H7}g7EG=e0Br$*>EzZGYYEBfCnV9H=jg;m==xhIFG;6&iP>SVuJ=ra(8TpWmQ zb!eQH0@4_{j!D-pPWyN~JuLzOhGVy&ZpaB)tDU}uoRMHoJmS5$jLpyhVnYUSAG0CT z$q(Fy31Kik*qzxpM{1Y~9b%ghLfHrqc*0#9V$SV38cUZXmp0r+4c5M}+YJ)mO0bc% zx8nFw=Gr(`&DH_SQcHK=H8&Wub%sSLK}%NT_yMHqvW{Kq;&XUlcJebM>dBN|$Y($c zp?2iuOju#bNZN{^rJ*Dk2u=7o!OXOE>~~+)wL}4`sx|ehc|Q%o#4UH zBhN553cBZ0TTR>7&vwS2B+i2b$?g9H9G|OXxTg~#q91_S|Hov=UtApASSkukiB?HcsD;kn(xUkDl=Y^ z6_N~M)0_=L)uEwP8^;$Um1Du9{B zhWj6*fxlvO{Jpw-hELa0W#QL1UlOElkyrBhF*fdx=1}HV+3_>Vo($wj+3#`q19A<9 z{R%w+Zt0496hG1^SVCIJ0MhiH?>#RhzW zd$Cc<-vbee^BKC4S8o#J^K6MdK__n5pReinEO8+o3R2}#&p&;Zk86jJI>vwq+B*1} zD}Y>|f;{AdaejxluZbd3HDP`3Q3Ia60C(;xG(!@?*(ynIrE*=en=$zH+e#O#kwfvt zXihdnVjzdq?j!O6#Hu{Y06C@$aCS_E?{54S#T?215yi5%0s&RbF12Gk3E{r*>-&%a z1UT0f@8x$YCj!tUa&E~%pHI2!C4!(V{ ztF(Oj0*fCm|M3&fyh8sMc8$mZxY35CxWj3y$`a$%=`n~Ujue@csFOCq{r?xI(Zy(a zUvP1421S3Rs5{q$MHo>O8k2}`|rA*t(F}2 zhdtmiPNeV)8~{}c?5(9MxtYoeDxk7M5Vc{9UgIJM#02l%J{(AUX7HxYVnhJj!b+tK zixxir4LS37)&NzYH9ZNC_q3u9UoK=$XwnEp2YX{arAfue&p)}%e~FLdU5M4YXek)x z6L&EJAdzrgj4FDXB>+i1s9F5t`DT_dxE}EYO}tSP`}LBCe_|}aVO~kez#Pp(X}Be< z!oQ;(3+==JwTjt#>Wa;j$AHpOnt3$8`()y33A9a;>}2yMXEq#`Cv%LT0h2YG;!tMo z&sOsYz+OD2COed;v}il|F($sXua^_)6XYzXo?bjC!PDGC6Eu7C;0Ov;g@znbLE^W@3PG&nGX^OC?MIU7z7$apgjxpXI zQ9KI@a2C|ga#hQtU-m&rotV?IZ{wcDCkCwUA z&0OiJZ+E1SjI|x5>G7ZW?FR5X>8Ek`&WrxvB1L6Fq$tVZZcjDu-9rmE`=rIzb6~%d zHy{hwYzUMDba>n&vM&-yH2Q_7%IFnZVI&?ZJvab?Nu?2*-MV2r3z%)k96U3_9p2II zh;H12q;lu+Wjj{IN$kiFwPc3UWsroM$#C3~Sruyrazns|6PwvCK;hH|e27%Q$t6+9 z4vjR@E?G1^`Aq5MOP8AxktV(Oh;e3e%abPJhPG%X_Y~1aS^*tds#)#y67*Iq5SksR z^*vQ7KiDr3ov)fQWA{0fEdNQs`Vee#SnC8TT6M1g$i{fam6MU+6T?xToPDM!71fZ+ zDT&D-?>@onh36+FJ;SxG&Ux$@^&tIJ*uj*W4wrqZhU@s2KLK=O;KS!>Ib#Lx3JY2b zfc#{3j~Jxq7JVp>Sly<*L{#IK3fzyp`|@e#Ysxzi>`vZ)3;JvzDy4wx8kg{ygnaSXgOW;Rgt%qwUN@UiMCdPtca5Hz z%wG*ZGnsxZr88Gqo9zOlyG@Uw=0645xb-uX34$r1?E)?+){A1!x% z>PyV@cO8!T8}>ybzMDB*wl0$MY5k$Kk`lKP0aN(($Vw+2pzyKYIueSD!QQc}wY|Gc z^+4>AKNGtn9rCbH0e|qke?OZGoYBW&4Hmuq`61apZK4Gq0t_nwLeR}m)~(F?wElK* z_gVG(`vMd`Yv*D3+)x9vE2a*!((?_R(9202j% z<4(lOd)@;9UiQ#U_Ulp!gl6~rPLi#83QB62fYzs;P;Hq!`ff7`D-cew1Ka>b@_Wep zbT3=rq8&E(EbPD)Kq{y|1wmk=xZJn5ZBPYVV*m0gpeipj;QG#zV0q&eG8ZxV_Hp2p zqP#GJC2*en4C{4^c3;r(S}MH2jWJ3oi>u4y9u=|OX}gNyl;^K!=>mYi5V=4bPjQ0l zI)O^}uk&&N+B@0U{4GDT#Lwv#+k{an{D%SD=dNZEX16RBsYAk`oBUMnGZS|lPRANW z;=p5-3(Lv=or$!$t`Y{KbChPuxmu|ofHALP{gL8W+c@o|EH?4Jj@Q-eGiUbY2O17* z`D$LVvze!KZIXN6eud3m%(SaBRbaHhv~$mfWiZE&!1^D0O?YWbm0(#$H2<_9_hzm_ zQzIaEB$(hMUf_LrlIYuE`254cB&)~QCy=R?k>J*+CEIrFb1b_a3_H2J*g!t7Qgp^V z;rx%Uc@wtQ_0FJy|5!esBuB+E=svCDEK>2MQOKaVcS>U`Bpe<+ZT9s8@L34^$+S|Z z63WfUs;UjQD5G|SYJNLxD^708zx+6i`QiGiosi~Z)3i%K{C!;QCh_4+U2aWaisC$z z89iqN)`jrBjMqouSyHAuXHumE?D3YkPm|L%Cnx@HWuDgZ&n3Uq6v!B`WW;E=5{%$i z0*!faZQP6|{o@O}lBYJu ztj$H)*)cG?&%D{r_*kdUufk9}pI|9w@je4N7Zt10fL0b)chAk)$@QyCIcq_$c^aQ6XI%r%RRL<>;KYCi zmS+qx>UydVveO}ZBs&*BQT4UPaqU$Pm$<-~nENF!`K}C@r(-TDP)ZXvAW!Dp5gKPx z>p*tva>x^t46m5fUvA%_Gv*>Dzyg!wy>caZPxvRQX47|DJzL(OF3(~8m|^gdfNQ_ASCk@1CFpR@18iU_&6KUWJQ4(i=+ zAER9KEFAocNleg}30x?-hoWvjh7&JF=oYU4wy%FLb`$2Yucn z*}M4k?ryXy{BT5sf$v5a>g}`Gay3?115YE~+K&qsJUYuh3OSfi2{^GP{3%ym<3UA! zN-7-ATl0EZIS}m1v{ex9Oz{)UZM?$c7T2TX<3)a7TyTJWzRW?*2mZL#E&bZADliw- z28+Jj`DT(?i8)zsKy(B5VjX-8l2^Qw_m0b4zNC3k#2(y^D+k%dP3il@YFMH7Y~WMt zCq6P988U&0@7%R{w4Z=uiAaRFbzIm+j&~S?AXfwRPO-j=0sN}=E3Z} zR$06Gfe+KNcIn?&#+2xqz#9hlOM!cHE*oC3diA5A*5>Bme+p**7C`r{QE(Lex7OZ& zGdktr;bAl?(eP;S4=2ko-t1RzkW(LK`XXbuR`4|ZbmAL+O!OU~K^D%_FFd0-yC0$Z z18k;MQ!AWrk-b#`=H+sSUm=to7rLk4DN-A>JoYOb4=K*8~A+OqIY%lIW zl#tK1hut-hUzcL+Cgv{lyZrE<@wPmm#ajih{CpvFPLjDTx%ulY!*BCN$?KC7gE_{v z-H!7D8AV|4;QYW>-nu0JY4^ige#gV8P>5MgCAeLAH@&%sHG>a`_Gac&RiKXK1UgtHkB(h5%u9Ss9 z5qGq4%*DTU0Ws9o?TfaeMvJwkSzh-afv#l^+$%j<-i#A%wy8_6reS9 z(4jko$>tEu?xb%@`NBV-{Cp%SFL{%=ox$aS2Gub1?+uD3`!9TV4aBIXQQC^va!Z3l z@9cJ^XGQiqDoxe-q=^}yF_5Lxyz&;xUO=%{vC-W18PK9^Cqw>0coCIx-aK7&unj$` z9neX0Go}g}v2{p-?)M}v-BWmN$&ODlx#*w00M#B`Ca{dh4BD!D7eBEk*9R7**Alf8 zv;k92vZofTpeip!=HgfZ0j`gC1ytf~M1h#QUlryysp&8Lk{)-6eYd$1B|!~GbJe|z zhNubRbo-@v_1{cHxxP=#Aw@ittP;5Ii2_+7=YVg2EU>z3GSIyN#UGS0S)B}AR zH`}*NxFEUae7*9%{yX~xD>BVc5tr%rD~wfK{MgtP?zLy@(C+2HSaLu~X|=Dzk*PW_ z4D5RWDGWB1K>zWRh7alWil7uWCH<76XhbPhY3b}_I_`awGHVhuY*R=d_Jh+uH5Qua zw^>ZMy_WmGhkT*Ie?dN>=*9cZ`_i5-#6a~^$W8R;!^0Di9cF2lk!vwJ*jpzSm42^J zO8M)~wM3?0Qw_dy*F^zXa3JDZEd>*&pr!KkQMb98+?5O;cC5mg!ohMW@El0s#Z-F; zMYm2R>M(_eHVr4GTsBM`XQfe$G}bbUF13b6Q$s&;jU%?qhtenZ=;6^-58PObniDSX z*U)^Aq(?a&l78UtCPCx7oLD99-9CE5ofN6Rk`X4qEWPO{eXVb-k1|Af-rmgUfMbwm zr!CiU#gD*y!V(%rh0LVAt0t-JNHBt%et@B$J#3IB3T>Gu4esLY)GXA%fjI4&sIQNFpG{UKL&iX3UPlYS z9Yl(8p%vguT7YFhA8sQM4n>|vbPqirB^r6=xm^mFFaEjcf>RVuA=H~%^| zi0>5BE)i>dTkyJTXpX5lP+Kwm|6W^vW~cuz_`9E!q0vcKx82IElyCQL^TlMWSddw) z+Y4}_41%^2n49)>-lvc6zwI-C$w7=!DjES7(ts}EGHUBok)D<&!q_W2J7wNRqQ=Nu z{Z-Y(K%KcQqHO%8uGXMR98?juuj;G0;f*cq9Rk=|U5pmRWtJV>zbd&qL_$jTw~hEXy2C z4l`Xv5MEI=RfbrjHSbLoc2%yIgdln%TmT2qrh-a!b_oC&U*JojhL_U$>q^omDI{{2 z4;nSTHs;y6oY6T)vJqi=xvO5hysxKJ8kuhlIYqkZ9?^u)-nnIMO~TOh6|Wod0iY*5 z!#74m8kp~gop}n}lB}{@CH{5`+3rEN#+PXheU*+g&FeNjqLPi$phg>T{OyC1a|0Ib z@<X?Z7gYF|tc7ogpO@aTtrV$Z0|Hj^<(6Ak4m=V! znSajiwIZ?rE(jeIeOGzM?4hvC=04v+NQ;m;|J;ge^}>?@H?s`Q8WV|0_Na7!v#ZlV z$V{2BVmedoEG@My2PGX~>P99W!yd_ac^@i@e7x1U(xJ$>ysu4{w6!{=rUC$ustDTAb1Y&1nR|-`fHztiydZfs}7Ai zIf@M`Xfy4E%UhcjXPHikRM@8`p2*PxCF&L^=5_f)N;A6VLZ@Njl<3_s_j?W9l}PDOcf|X_SWU%x#Egs$kav*C)nf!^&0{m?6mZl;te~ zZ;%WPAqQeQ{ag2yp7aO1jPVoMRH6?O{;4Wf=ah(OMF0}UL>7~p9({AEwCPTFph@xZ zQuj%w2hScUmv8bo@Xw1*3&jBo|1~Vn>SlC561vRDbE{L0ey3)$i<4P*);(ZzL!Id^ z+f?lDdOUZD)CLx&_CD+Twk$hKCNoDdb4L{9q?RA|#7oH!sIXGPmJc7Jkhe`AQ;*48 zjL8D2itU4HEA+kebT|*ev3OnGV9X;%hVnQoUa=WnhMX0yM7Kb6?CQJOd6+yMAfH?V zAVRQRfs_1fGO&~9v&bkjAE0ho-%0Y+JHSFDhI4-7$LC{US|L4v3kz$iloK_yIfP9C zLf+R*>2h`4K9j2tZ$l#dGwjn2@s$MX_pi*yLbUy=ApJ=eL3X_|LB}PhDn! z;_z-~Qf3lYIn8sRj7z`x*(;*(u#xz^+-jD|=jma|P^8ZdY8W*QB zh;|)wk%g({%w9TQb?=--d-lE-W3qgWY8#R=KTns%20Sw^Pm~5E^|>P#Z$Y2KZ~Gk# zLOSzu)w<45ly!OZs-o~!_dCZH7$dVrZ_sc{2~A>a-J*UOBa0nW1>&C4z29pfY49Kwg*;17F_+~HE`TK zEMJOk!NLq+Lv4s*pMetAlO(CEtTp>ijF(gDFW^gnL2dr$_35`wjH@vi0+$reRx0 z({sL=->rzf4{humPpuhZg(f!y{EBxY-wT<07tR5amNimvMu2X`0edcFw6pLCU{bPZ zOpy*Z#%Ey8w==CKJRLFN8Vp~z5D$3NQdFPPzcEku`d-pH!xicy zh`H*q!CBgjmjE8C?YtSVOu$EidDs^vNN$+7tNpf$4-W1jzR{_V zP8)vxPRwn*CSI^{xGl0|*Rzs3Uka4uOty0gzQ`wFVOXJo{ACjmdv^ckn%3s)z|G}hUFvknWbwYU-+q7#G?9Un@=Gqe!@y4%VWTA?>Y+XVBwGWS*!M6 zOsPA~J~lrv_RyT~n*&6WmgEaYK60V0Zg_1_#_qhLK|c z)+1$cx7z_MhA6^XPWWZKb(x=!&h)ynuS3g!UJbvM^v{GknHs!!U)jjYkivgQ#Qv+| z{+}Us|M&(O2C~tGPaKa32naBNp$Ns4d(GYeJY~z*_XqL?^wl7mo7d}7ovD@KVsGkz zh5y=vM4iFY>HtYXr!hjQ*AFi{qz4_t9yRTH@4W_+_F{0QxqvLFw(9!!2cPFT4d9b4 z>R5ZzcBbY1LMPk9(#Ho5a5n-5m1=_!DxA=!?rql1Qn&U5l}d$c{Kh6qqM~_s+wL=I zKjtwCUkp>zo+|p}-%P zKUQk#1!k0o^QLOwdvXzB=JRtjzgxDdVRtWq-GCVV-Td3|y{2M;hA<%5V%oiZNb+a4 zxCQ7Xv1!{TrW@G5Z^e`29fr>cnC0z1Es)o7S_H>py}KR@$kG5!l&44sIqe-uz2Ly} zS8axSOUUtRo|o6FF&K11Z4*)wdLw^~58we23ASekb#@5H8JE@xXnX2;I<7Ebc3(xs zBK|5&z5Ms*qTNpkep2rP@n7A&pRiR`0%gqF1S{ zu~j+Pu&CKd7HNT{CMm$v_v81=$>zUG(oP;5%o$;6_B#|uVYY7X)zR*X(3bn})7Z(k zo4ftAKN^cUrWb-)gros0Ys7^OZwcN#E{Q+~G z6<~r)5E%farN9#}2oA_wu8^nzM5-sOmihgep3cmpTi2T`j~``3C*5u7O*>x{l6;-O zS)#Q!JJ5cq`4=VHAbonmoV^=>Nl5d1y!B*=dpHuvCv$_nHFt`B(2#x`@BE;`Zs+7! z*f$ZIye{s2RsYbyf-b;3;gKU$#mqKAeWxWxLwN3pCS##6r$-#vs1r|dFhZ78t6oM1 z0s{}hcZKv_XuE7~+(zb`+MnM<$^XRd@Mf4MU!O2WQ3JF<8-{WoVI8mNe-|5}an43{ zd?+f^=SSz)m5+Aa@syIz26ILJ`CuqQ83#@=MYjJ8nh+Zz`1_d1*rP=Za6_Z{DW}7yMcc8dv z#_V6Onc2#QVOsKM6X1~&zx4ze&~1Rix%!AvG`r&V!JZHO~KMxq`f9p zP&whz{Hue(DLqr8&N;D_0qfO$*SzWcI$@ZoEY0JIfH_;*TUiX4m~qV*$R+1oq#^5h z`Ng1vL_CrN!3Q=sZGoWe5mgI^9+0_#!BD0AQd951^FyI7+M7{AQ5p%W3!fscZQC&N z33#||<2|cI0K08BSCZ1&A>ky(qbCG!a|8q4?;4|?+>E63Jrqm>_CR-Ub&2dc4@!L! zIs)0OoV07T@YPDzwV&IQRI83yPuFDG&jag-5zF>~z*Y9=%bq(UAaJ=Mfp~Vna|08j zWjLVQ@834)$fod91E^7$*x@h@(7OWS^7E}XbAvC=;cIz_XLU1#;D`+;85tbH6;&|a zzm@Jtme{w`cggcqI_9f`x=ff)Dm1Tb-*>wwOfKhT<&GJ&bCwazLklx z**iYF=ggr9KWIKC$1P(lp_XuTlmB_BeSjLe#Jl^*Gj+joii}PoLAd7aI)}?8zX2zP2xRD$uMMjyF;yhx$e**hN z0>dWF3+S=BEaj^{NUvd4cu(*&l5*~7Mo3j$3|Qm=zjzq!dHc3M2G>0|F3nCQmo^Te z$f`rDk+mqPi?i7+)g9)-nSz?^pDIl4TQ%#HbHvY#odN=P5PqLMA+VML(r(B|+C)dtjVMT$917D9OobuB~->`%<@ zkoHj|`Xt~2=L zgCpKc>4aBeTMl#f!q%^^{Ax#1U~frGo5pvr^N94DI|K6@(!XvSMW)q~qJO833_T@L z?0ik#D)P##In-_Ndeu9{d|+K8Qs~H{1J>YA$N@7E<%DQS395 z`AZ>?!^E6WUpY1|$HU+^(Y8a?*HmSGK9QO zh`G%7hEQR+c?*7A3?9+zG{VuhA*Alr6o2W30oqL}$_?nLU#GFCPX^ZQJ1Fy|E=@8% zFqT1Q!^*GgPW45cvedZ1C_3@vQ&t)fay(ZGgE@>-uCc`1#$g@8FhpX}XZt`N0-*87 z*q~qK{VK01?MIFZKBsOkA1^EQ+%&)Hy2udpE7WkV+cFW79A`D!rVgXqUY(AaU7rgL z1@bZlJmy$_j2(%3tVU_fFFHM5>>Y7J4y=0NtT*)O7931esJB66i1>?d6FB9o2@-Sg z0mbJ$>#w!XMZuym8an$pS*mhyKt31QV8)mEzC(u`#j;%&V65$_{aL%fA;Ay#F{$&~FP;f=JOBlczKcY=j!SjyzvtMq z6l$9FFuE>@Oka)vYNmKp$1JQ=AL6v+B z%ZVMqhQEu`0!JNLL=n6H?TP*mc=%tic``b1oz)~tW0!X1@hqBQu@Qj+oW`@1La;@? z`s$zACZRiUit6TPCzN`j^fIoNf^5w34^FO=8NAw*)t?y#G0cL6amUEc+YU|@yco+; zsfI*(u-{-Xxy|M0l=~*8gKXP)uF}VNM)mDYoxRFbl6@y}-rzCdWxnnan zN#QVI!>D<6TB9~<`Y}n0wrPC&Z<05G*|>qha5+>rW3uX&!Dpxg*8}HNK77rBg%;bm z-=BP#lK^Z9l`aL{i%ZVdD)3M|eR(VfTnc0pL*rJ`-HgqtqCL-|z3Pz$H#J7B@ASVr znDKCDtwCVE-<N$$Ql2p_n>Su*kl2>9l$`c8Lb_S{>+Xq8N4Ekf_L&LkTy%t0k@ zLjC@H!1wFZ^1{H<$n;S`xKcw?yPZ$LwKbd0idayoP$}q%=37&{<#Qf)Nv7mxr$=f; zJDjXZT5_ueBOAd>Z9COHLk>Yg0RLOz$JGDhM{Z2IucqTbQBCLfVQU??bll&>HlW|I zJ(6pC#^|)EW2@LCl5ULWI^s_UgYjX6im5wrgG@|gZ@xbE%m?k=%I_kS5H$BNhxI#f zA+7KWbjz365vDL<3gNRKxWwFhzI!w4q&7R3YvBBrX&h}MQ)7mG>(obi5~`e+-|49% zKi@z8edETfyk{VY=(~;+-}9SUMvy$$=5mBpAxqSIbKeP#ZHngH{3DEY+m&JUIdW>F z=0=yy0DXhXZa1B79G%?-@BzJ+-RB>~^)$V=ciVOk zgZJNJOATD5u`Nd!t~UP{)zy>beMCD))>bJn$w2{!%7IR{?Ah#3OO&tuUH>%Esjjfq z%#j4q{N2dSC}d_}W|2iA|1VQ9T59D`)qxRuySDZn^k^z6(7WUa{Hc+ zO0FcJ?WDajz07MfKB)C*8y)`b;D2XV@Cn$c^IBWrkJhh0Hug31?z+_Jj0jKJ@{@8k zKRkTg)~RJcWwibTtU7Nu@-sxg>Bm~>*n3LD$_AF>9cTtqijg~GVE6TzvoD%{c`&7O zBN2ug;`#f)U1YD1-FcOk35x5#m?DBlcw=qlUHXIBk(-%;9R4Lk95NZ{mCl@4 zYSWTcD_m_Nb?1X%!n2O^8Z^^9V+v{pgur<*?oUkPu{O=R86%J%w&LdU=T&CGeVSJl z4b{M&q9CmgyK*Xr7&a7K)*J;dMe!zg%EPl+tO~(Qzd7pFOw8%}iHeEwjG3$i<~tskrX34GcM?kFrfUe`E58idaXsd0 zC_&gy%%goG&CI1d9v!%Hwv3XN3aOfKOvVv&kopPdj}abGFH}Nhodfi%rDw{G3OE~# z43O1tj>vb%8_+(5W{64y*P&Do^J&E*=+SOdaeYM|n=KJMLywk1oRREE+jS%y%nP38 zllRkBR262wL)uF@s2<@*jP6RY0li=Jn{kF{hT!p?U)5r@Iaio+Kjl)*+U(7n=8z z?5Y?GcSU_19~^glz7gOwN|Q5F-o*2M)i7_!iL;!KL9zwqgf5hJ<0&bpI~!EV)3C6_ z?^O7^G*P#J{0Eerob2D_`(J)x#qzkR4>~MdkkJ@QHD`WUpW+=mK9VsC z9|w`KyoZm=+oPE8d88PKozQ3kFP!fAj{G{Rq@DM$s0k6w?&sZF=_&4|W@{!uPqQz6PmlPu_ z=H%5<%HrbU#snRoOSe)iP4lqJP}GDDHG+V1yR<5B&2r5l_iWyoTQF|BXl2QkJ$rZ8 zr^UD7j;o-@x6Y{6D9^$%#)b(am=3WfFxzs3`9sTR)G?-FJN@NJ5f@tg1n(G=DqRe! z`;17d&A`%z0m8e(K+$xLaWf)xONIIxGpY}Y9r)pcCw*F5Xi*tq#O2dI;RU-4q-Ce z-L~EAvmSlbO({VL6L1$-q@X(5rBQdLFdUE`6U=g3NSaX?cz&Iq9z1qiD}+LR-T^IS zNp|DaPmLKbN)N)UjGsaanwl~{4J;B&Wo?w3yRQrQ_e?eZP)~M;+x1_NDr}(^G6K*X z^*dHPIvvRAE;_h247o&cC~aYvv!$Z&GZ^G@E8++UNCDn`FPB}9T!Z|1rHrSuJhOdP zKq$Gnm7^hHM8$E-(W?iov7T&@HG?DL(Az4^nlIq#1(^Vi4Z)UrEI)5cM|h~0H@~d@ z5d?RQHUB+Q>eG5vNS71(3^?_}M~i!}HxloeI{Q?dMtW z_ZwN$t*sOp8Nw#zz$IMFf~H31*rbNl#U;sQ70_%eqS)d$Yf0~wE^q|`MY!3TqiF8G zP1*jE1A);Oln|;J{myWen@s0jKy3_@phPuX?K<1Ptvve;bx&IcLPe_%3y>G^?YlbL zO5f{9CsS@dgdye`HV3yUvPgmPSzm1YsVCC>%7V*UI4k0!jy~2k55NfAHfK$`Z0N-e z*Bb|KlnH-ii@MS_4IV_vG+yGjQz#0iqasSU6u{+;JZO-;fwfXwB&Er3&n=sKL)fsv zAb4Dp$L)p&ZOfP|6`;}X_p-eBJAUaM*`Jd>!!TRx{}p-12FBnq3(vi9^gH6>(s+Xf zq7#_t+y6*&kyO23Ci*%g%ifMRkG^IsqR-u?xb}tS00fk8*5cuYNo{C1{X=dJrg9y1 z<_|)*3+86v6=LqLGp8?8_PI%(rRNTv5NJRRjHPdt4cJZkt(XTECHIwN!Qr}G*A+9BOfj@PQ!)3od$}WA zxxoRr#-WrIV{kYq-OL^VkBL1je1ohJVPZLAuS)!6u zgnT-$OuN<>)LpyNLd2C5SCQ}WPIMKLjIwj=8RLcm4*LxsD3=pC(2TM>bStR8SLX|a z5#9eHjS?FWSqib-Sz)PnO^VaYUh`Wy7ksDhc<^lI!A${w&%DadkV=YxLLl)s+Dmm{ z(W%&+`F2Vo7KM-Od)%S!II&<+bYu?#fNim$c4|=LxkUCI&>@_2`P?$HUwX4H3{5?2 zsGh1z4qO5!8t?>))nh?Z>91m~I3A99A9d6ABD&oD5;$CD!Pn}TW_;PF1uZB}zc&11 z{r8Ae&-dr=x}LmduDR`ac%=Xzhh1d>XDyF6i9zW=o!1^LCPb0=#M4B0DQEva;NHUJc3V zPS}H2yWZRxhJVR0v_P|d=w-7)j543#G3J>Mf)}B9W8Zn8Rr~hqN`?Bnw=&148j@o$%FsHCS^#__7gk&#WwDwkM1^<%| z@6L(**&-fxxkj)0r;u?@n#-{kA`~d5fq`xgJ-R*Am{-~af9Bd*9WeoQq(#dQ-uKacKi4W(-pjQ#e~X7TZow#zjA zt3lRXR7A^P_WmXvAFqA=LC`^^{b-OAhhE>hM}#wvvVIUaH_p&2cG~l~{_ud`{od_6 z3UE_Pogn+nuv7!$bBQI@x+l?^J+A!%@IgGssxFDir~4CnO3B_TCgUFP+NWL;&aL)` zKan4L=xK6FPSzU=9Mv`#-3?+p*2KtVfc(9Sm&%$SMoO@Z(LY=i1ByiH(Sf(&P$@^V zzU<2&i`VwYq9o*iusWw>b=QvUUDwEzQ^Isqe_~yIfqU{Vk^6+59C2D0v8m>_e^#Km z*|$d^RomI(!n2E4qGy8DbFi$pxG6GWGVYl76@sC##7Ie^~Z&j}|)5HX8yS%HJu%TWFAvAd%(Q zDETz(3yY^U?EJxb^=hXt93u`o3hzcCT#S{4oT2Fz=Ob9%V8A*vq{ehFD|iT7Vd`I` z?OAN4a)iI?Q|xZ#A~BvkU2l-vWK*0qnOPkGDphj3Q~zBq2esFX+{(f;>ClpRBWYiz zspxM^+AFGErZGPKJ~`XO{S~5+)0HCIQgJc&GNUwoVsFH*X{z?!7kl;8*IZ5xA1LZn z=QH#~D6;T^r0n-ZYq)+~w)_LEPOY;|h%qfk$c{TK7eru=7Y#HdR&F7G0W*;CgsT=_ zLfYAtoG*oqfPOBDq^TC1FXzVXQNJM+G19497PSguJ>}3Tv12&4^FMOlAKG*q$ZA=n zecql2&ZytJw>L8q$na>aB*)h@@mU7ubX|3(oc@e+1W zu^NcM8Genm*na0ba3ON&WOzS)i-jx3l>dYsH4oV+snDbcRr*eIPK_qixv-Odc=!3o zVqQMtLE&zbMD{>ZC{z6m&+Jc5!$HgX_gyeA zI_{w;nkfK*45zF=rYqqtN|2eml`E88Q2k)H#i8}^Y$4Nz zm>_5WbMo^v1HOeygM$3ufj7vR;eOFIV^G0R6DqTDOV=M$D92;jbZ=f-V}fK(yFTO5 z_-|Za!T{op53R1WDl-l^?;~K(ZNvh zztJcFjjMle=}uCbie*bRH=Fcg1uRELXG)}Eg(rV5gM(n+6k4w3k2Su5zUUu#+?)R;vPv#N&)^gC- zcIxQ`>%hHqx1w`ban69vI)E#^X)Gb^Pv}(egA4RGQBv7ERsyB9(Xa`3o4H zi$~Lw8W*||3E}I&Oc~XZyz?HkdX2h$by8hwPoyPWN?B?jb~1PPN<|TYf?#tJT^1jN zU5gsoy=A8c5518C&Y8ISZ9cgG5U-<-b7h_}k`-Iy3T|9BfbsPrF*&O+OE;sS(yFw! z_gf)M&FJBln!dZR%?R{iTdk%BvL&%Th=hEI=$kJk4>UOzjR)ptb$3#peqE6I=&Rs{ z=REmBJIC%Jl<%8nf<2Mnkg*osfM?g^NMwpgw|MJGn|^}o>lLAL7p3q`c(3lpxjQtR zpPJ)HW0LCLK$W4}(!O`aJ9PbX_8iD9MN!?er-A*9K=S5ohMy**W zy-%;PBK8T8r*f$2mc>>Gk#ZiY7$Lnee0Y$mfT-sB#u`2xwn_ciL6R9L&H^?kjhF5>!yq|^M7^3^ROnz^{8ufTE>`o(?L0ix zu6XrKu4Y6H!ibAxTyM)TpZxp$&ud)N(~fECi+?q{|Jra1rifj5%TZ(3vO-Qrb}a4n zLtghvMY#EaxV_QPCHEhT0&VApZjE2#^b|8UZGRco8`vLDZ-)BeYV84l5obvyF(rqE zqEh^FSA1D`@F%g><_^V6oQ|N?;1P)E$fJr0o=sno8;DlMUnn1RS{&}_T=8*gQOfYP z9ji}7KXI6G4;?`4HM#zZWNhFD!QXGTCMSY#x3%mNyiYY%1_r-(Sq$5~?0=|SWH$KGZQ{5x3%n8xZAwlg^z`2R;IDD<~>S%<`)fQ&pwLkWOW>{k~7YFT-p0jlIDoV?Dm-Qt3m<(_Hu&Fg}1LirjStR)R!4 zbd6pRs@1FRzKWmH=T(`XjH0RheZ+moY~syOm7fJwkNEvn+ph&4W2*)DUW0xo8qKLETbjS~UdrHV9 z-xdqcR*7!s;>La*Q<$WvNWZ57GUCcW$bL24t=~5I8a3*tqKf0s;{~Ufz>n3PMQz3t zwI2TfmyqV{kCnbZZj{fhBdZN)s++pgXY3e(PvtWNz?Y}H5%S*^EhuK!FT?9t!16Ab zV%V03{Zjo*S@&AssyZ8;dt$oR>95ss_!5bTq*MO7RShF}qjbS1oy$ z?3S0@&?;C~Qnij5`dNKP3qrE$l-=1YI2}0W9<9b;VNRlNGzp^ z#XS#4q%y;rFa>K~aNP`GvRgO!D6PC;D)=TS{VMxJ;gvWd3zOVXGUhAErC52nZ7}sI zyM${1UxS2qUy-N(o3YEBEhiuTSDr}T>G4r_Gm^m6Lys(+`@IDN`aayC%} zo?jvG(UpDW+0B2|!kC(S>JkKH%!|{=`gPnuuUlhW9JU*1WR;*T{Yi|oK9b%53URwj zzi>83L|gXLyONui;K^IQf7=5P5zs zt0@@|qNpppBzHWzW^7=P!dG5Rw5{CBkisCj9wHhQI`!6ej_-6lGgPYEMoB*`Rpkbl zT&1?U-pGA>eKX{uh;hwko00tfr#coYRNhqeSv%H%%c#{*vVg6_cz*<25qi`ZkSQc4 zU%j!3_G^Hi;r=HOg zh6~*eVV0$~nI=E_7FYbY1M^Ays7Sw$ko>mqE3Za{5vs zJ715fB)#lT!f*|J%=XKwNFHEC{++BhsCdo~4|AGJp$xyRWF$)KtA-mrD(z$0hyRz zWW6FRx!($=GOB-3*}iyHLP#2+mkbVfTV8$gKXVwGfEig`f4uI-n$<)s2FsK3ZN7 zW!3eqz^BAC;MrVZDN^i%y}v(C4(lmV9r$=67N5EPGJMvE^g0qI%%M`PQ+2F(2h_zs z$uy6USugMWI|%;2UG_)ZR&n}Pd3bk=)&Gr>^yv9WG#>h1TLAWGD{j(f*F%)G zM4r@~h29T9U@G}C3BjEJ{S!0^-`1T6`WBjN_dGHwfjHA{f^cO!vJ{j+wmS44` zN;Ez{G?s(^F3TniC{u}Fv|}`$h0A(R5MYT}^o+DQkkgI*-v(_a^Q7t{Z%zZX}nt^3t4$ za=rHItEQ9X4c6h{;9xESRgAIPlQ_Id%I&4-Sgc+5clCH{lkqEXzrHg3uj==>RP9f& zPx#!Tx~;YPr~nMtIc87xeMnHfrILSqkfi+=Kz(5j&(81pUBffRTGJ=*poe_7uej>T z#@RnRBerkg!x&Wx$*xrqX%*wRca)VxVj8@qw&H&)Y*W zv^Q64IY_Qk-S2esLFK|)Tf)3>yuj$4(O_#aO!ooUtsS5l{Xo87tG zE6#pWDzV*BKB2Fp9krGl85S?cZ&)mv-d62x*hXk!NX1k%0#Lqs9s8V`8G3GS_r$cm zNqc^T9uQ_6zwtM5;nI+7vd>C+-5SaF4&72xDSZAq-W*ZQ0Q5)GfA&Wl*(tZ&F(&qy zZQDKG%y}G1+XheG6MfoaJtV+z>*GAa(s**Vp)0osJTGs1kCt``-a9^YUXr zzRLgTifQbOsT312G-79noW=oF|QQD1| zO##@vv$c*RjrF9!N+Jx&D1hP<`Jz3?kJ;)u7{A3IhIba-+SMxs?A}p`tWz^!r8&HP z9FQ+xMSfUiiL0VCP(vFGk<{zt^h`YK#F3dg_z(4tuLJsh4vWwmU#A9T+&@m3Rc;xz z;N%E3hn&l#>Zemy0r;coBYsMExy-q_Uc3zhNX{j?9Lv|4u3b;wnf(Pqx5lUK#QWzd zC5e*iUm&mzb_y*kWFjm80a|{{jw8$krMh#R{$O{`#CYmauc!j$0S`%qabf68MPF_= zSh2ZV7%?KACfir~dO~XmuL8=FI{sq=#tFPmYR*OksIF6*8z7RC%(yt*qQI>JGs;pi zo~*;R74Ob|JK$;0A4w9pNPsSR_YBu(g0I;#m1q^n7W#(KP1a3l);<)hK%p1j%~?&i z){zgjfoi2rLK7+_@iIl7pMs^k!4Kwd@V>C@7@)c{8q1AwD0>a`I>Rk1%pT{bb`4Irga^ zrpnY^UYa>bz5o<{bj}e*zAc@_zJaTrKA>I4M|=Y!~B-{xM7j|S(AiKzDXPY~4KThq!EIwFfr^LleyT0Gg+e2xEX!1KtS zm0ZHO8pc0P-V`g-gEzkCKu5Hldu`@sL+=PlncD8l0+K=NUFoG&8*{-fm>|;J$7sRHt>q3CrAClSO zXqLe451Ks_W#o#cWXk3u1(NjWz1otsKBY8)!y0i?#xBajX?9Z=Mk;``KCnn%AfK{msIB)0n#}q7P@5uHT-=Ph`Oe~ zdWSX7BSHidC{tFs1JT$nCgo+%C!jQ+A7Qp{!=tCOJ+`pt|y=>>1 z=S#O4C~=`~u2OO-<1Nz#r2zhyf@cvSg_<%rWV!v*UN`qIWcijGoRuwnPAc(ILmaJJ z?tmk)5*W@t3G%qC=<`1?L9Uz=kw;k=(B6pY$4KIV2=V;Sj>Grf99#xugyt8}sXF>a zsv7$gz|I$D#w|Yd%Tm;(rHqnn^$q)hFvklNFXl1J!K(+*1@B^Gbih@N@g)QH3jve& zDlxt|AP>~F-7fnQ9uoM=K^lpyc{jMU@kpg^%A51sM+r2u0kJ7W-2I8wQ!;fkhk|qe ztX*o3bdq+zyF~UGRY24k4>_0?6%ieeMdy1YqCJPUP`)`hLKMGFNh^Drma1l5Pjak_ zuB&WYeKY4XZmw&1u#pGeZpCG7Tqi4!8_LkuciP%K5j>O}-oRZLy`T9nl0Mv@qTq|pr^eN|Z=O5$^+eFU5E^01V}CpFr{$KL zBWFH0Z}jSSzkiD4B{y(4V?&;f6aKB5|Ce9z+){B8N}2WgrLM%tEXR^RF^L>a~gQDQ}Q#BLDb0$eaINY$>TQfUgH&;>!-@)3UD#IV%ztj>y|OH!f_;C*ZA!fuhqwB0x2=q z8M!~@^KiyW0Tt_Xc;m@~rajcy3}-@&IQQOY^dTrcE{Dz_UG6$Lm^|Zt!Pig3w{ssg z!@;jaOVkUV*hyN6lK}h$%f7Sn<;vn-mIt-x<{E|O%YMccD4e8F4CtQxL7<&h^c!i@CbA)-U ztzXjTo+?9@UrFn{+?ShTkB2Z5oJ93ZxF+zDNNhxUR^ihBh8XGyv zSLepw|4gFIcOy&Ba-6w5x-#Grd3eSr*aIH9TXCypcSK2Dbse}V`&t4g5Quz7G2!rS7O6U5A4QQSKZLP~Sw&~U~ zwq`()q}P+=w)!)2WOXM=Vbw#s26cK_Eb(8LnAk+m;)Jp+JxgJ3}lQ+Y9V(4ppczvSux zabwFf?LGNTX-3rte_LFP$o}Fn`{XistO%}nrh_ymkpgX3F=o{h4qx$ve)um+4atkF zzbP}zSY7_bkx|NgS2A$jmpB$yz~cubTX2~a6-n(Y5cT_Ky}#v^<`&6Ip_1E)>tjOq z%carm_3IV2P#bXL8o8C3KCTrlWG8nMP@@J>_ocANYRPrEmD_mWPnJZ0 zqM&|GV<(VkFZ5;?0Eg6Q@lD&OfOWDcAXRsLrYkNM{FNONEBbZArtMblF>fB*9l`RE zjUYzGkgD=~J&XeIZG~0H_`dA{Z%;{63P?ySZRgLyZ~LD_X1VKrm&h`QZ)s4`dHk;L zliRMe%b}4p+c{LdAIMHoCLSaTrCf52F%AGIN0pcl6ggNw z!QWBNoGfCKLidyzG81hNFV8C~bDGWWO}ETDN#*jW%Tx8VH#2WLQN9CxEK?NSy z*{#WXuA2fCfNLMqf3&}L^8D;7Ph<512g!#p@{I3L`<$wM?j46LD)ctxSUTnU{2q$t z*eeJc3KRYUXb+RiZ6~4fqRo=0%fb0MOc_F@NyS^{-rfF(yx|%N$I+f8AmA?JWYMB@ z6)FXm;|WJk3HwfHIZx&-Io^A?koMcvJQ;Y&87|nw#|9!ZUp6~Xei>068Wn`HkDMSK zCt^M;&5uux#3h4eH|TOkMB{7JEp(uU+Rn|u>DfNZ)@+w;>5xf93ZzkTu8Huy)Ic?qAYuToF4Bxu_g)ZG&5e^u!@1Dcv{ zzp7`cBXW<}J$Y{_QW20Mv^zATNWRhBIXm-6oARGnv_Lg?slBW~HQwq!D}OQ}wUO$A6&1+24Xq#IL=q$a9Mn#>{I!EbhsW<1k@8G1dcXtY)d8XvTK{`((xf$(ipK|ZK`w;>+bB=k9b<~T7d*A9?rO`!Qqt*Cho0+MHQq*`AFj1v)O>L z&HC;od84SZrxV)SzrbLL^d3nveq=9D& z8Rvs;s#dL=gTL?9bMjF6{@^A6ZA;0Mj7~)IMIM_0{Jx}RDEzEgT2G$Cpltnn;5NwC zY^bwK>6%NmuXugU3w{lpL9K{2IZ5iez*2~VMP`SXQzE5;u5L z_%rd_f>+GVVRnWodk6eU{os zoD~K2MYoAS0qa9Fa6I!}4_#R)+brM%Eo@;jftGOZ`nE(OC5$#aCb@fhp+nK1kHj#&@Qcj(;$_auzSyC|^V!KK)I=d8Mf zy@@Hs$lMM@MIJn{u`dT0GFNW~*guU1rb}gHEX5B-|WhR`kUW4PKz4VEB6jKCMaNE@_&Z_ot^N0DbR&ZUZEp!k5_ckpT5SVgUk>w73;aH8B| z5giu~hoSORdAY2->JF}70qNKz20w&o^klfhJ&!Q4W=Q|qZ~F|An^n-Cv>H{H`2T3-0MMnS5|N#!&?>1? zO{fSHmh_(;dX^f_92+PDhYYd4F+Aa)=PO`SQFjHb(m~hP@-=ks*{PV1?{ok8;=8x- z+I68A93<3-9%__(zZADAPwSd|r~Et&CIv4uAD|3 zG7xoi_*K8^#*E18;d`06V832%th)=Xxg4*r(P!l@FN^hEfoj38JW@TfILq!yW+{xU zx3>BV?B$6^q7AX1ooCkOQ%i9Sopi}{KJaxB8KA`${r@S1J z!~yKv`25#7DTS{enLyVt-$8t?puf;_l~}5Q1g>P0RR90_V!aT?k*7>y)w88u*zK&g z{lcpL;x|uN7zq-aFXge|V!zE^Tw-L7iQmqaqJ!rV3-kq{D3l)sFoQccc;^ebB9T0k z1mh~ozyK8(_d>HbA12%`Wu8x}cKN8*8tvIRFXby{$VdE{N}yIx;g56GWy;2oMS5~4AB%Ssc6 z2vFe8Wc--74&TxjgrYDqoH>JuscdX2DP2=rcA4tO8_3s@ReZq3X7f)M8;<6T4++mX zt873pGNz8^4)a1^vs=Z7f7!*Fy|M9gE`L*L=d1?s4%}dfVxkQY|H)N#8=ju6HOd;E z)7jk}V)r-1y}EY`Hga10IpROs)jIK#Hwk|^HU1)~fnx<93_#Y3 zq>7$-F`Qyw8g^%q)$L|&C3=6bd?cS>PY1CXwaHNRh~3g75=T@qQWs& z>gcgUW;DHIU!%L7kRD10YA|l=Fv39m71NEo@n!)oJM9dQ<1w#| zb5XW(AXyx4ZIsJ+_u+(GJTt~G<`4suLKhBYOoegc%^>T{k$Nk|zyFxV10f|S=DcR~ zz#=tZ*Iw_A;#yy4!FtvgV1{}bQ&dGM-bLNas+-DO?5z}Y14Utcoqk$8lf(2s0`fkB zuboi^v!ys1m=ochR!EQbY6)Mz-@yr-oFP&PHt}|UABP2O>~|*rG4TR?zNJ$f>@j2j z?Q9>p_kS3B%djZeZGTu10}+srE@`DiN*Y8;x}}wn7Lk$|LKLLCyPJ`2Q0Z=lMj3Jt zkQjl1f%hKwesrI6{^vUHw-LX1;jDYD^{dsWh&S&TsDY}hJD0{+$2%X&?-O3U>X?p4 zId?0K%X%2|T3{%EB;2ysSAXjR-d*`meLx(z!G_>~*C^`g1-j&TkCbBy`JC{1YDk2_ z9n;%}SLCd-_e5-1XEF{e6N>IRcvsE7Jy}&bzylhB+1w1e>CHa&$qtZqWlc5sBt|YV zq3r202`Be3gv9^TH4N9GSVi-}-Mkjhn11^dn<*d=a3I`4%Fhithbxb2O3h4XyK^$5 z1At#|yRA*z9TqMUo)2OlP#`wRBb8|siLTs%D1U_lirR$t-ds!k*RKNcKOVv`D+_}xuIGnLqFI6!;_d%JK^YFVVv zO`Riv{$a_PDF7nMY(ma2e!ll7c-32CgS8yKtfh^ASOUTNQUgPxMru6LXz!VZESiiv zq;TCcefHgGA_8QPlXt#f7Z+@9f_R!EZTQ~~ixppdzEW4PTH)tifCS#FPWnD28#1x- zo2NuefFr7WfEqb^|NQZlB?w!zi5>!fNQA4!CA@0Fttc(TNpBxqM!~JVg2GYX0yhOp zpZz5c)o%Nx7QE!{QPffVhoku4L^W#Uem;O1KtwBYsY3xuFuB|rGO63Te+Nn8(ExdI z|Db5vjr8Rk5#FH1C^_1s{uMiQL>nl`2}Ki>hRa9q%X2M9v$Urnv|k{9;i#VxV6wFd z3iCIIjLVYkoq8o_O z|CFb597KtaLkHj6t>x(wpKoS75}JE<`*-2Uw#DMxUSOqg%o&VTGOrW=hJI3;Aebey z?)!`J`vwV(-4rJoGPm?Md6}PHay)Vu4MVAQK`H1#I|2)D&8GP> z>|f*Q1#HRT(37zDR#?>Gpep+bg2J|rOAnOJez-l`*cCL3g9&x%9SoOvDTu^QujC2t z0$^Ejdw(iJ2GRm(y^!7#D>}t)%ekrsukA(kWPH~6yFgyVP|tA$t+VboX1z<+ zb`L#%YL3O5;SP^n`psj+mKSMiOGCFg@Mu{76b?5i8QxVe1mIqaj1VZ3$-N6v=x+KP z2qbF$gN_-z-P`tad+1bpKw#+4b28l&DecN(@RH_v00&M` zBPM(7&R*S&!jia4!FO%y%9wjJ)smuXS)h4d@$OxjUe5Y=azHjD&^`JD1HlLS(k8qn zp+zhU5EZkEy%PK*Fx+B+3*5+R%hzw$di}h0Wr(O?djn3yuZ@erSA71I2rK&Fj#=dUQj>0Zps?QSFL_fG1y~D`P5!aKuH9UH zD%4`~%z9eE+QpjS&6bS^Y@X(m_iHxuQ{EYkvL>58DTvxmGHC(X^`3aIf$7OHP zbpCj|=V>}{#BDXh8s-cQdGUY9aZ!DCLuo2bux8a+YAz`{;4A3z%0@oe^S zSPOUD540|%t{1t(j7wNrmhbc_H9tBokkCUOSw4_T`QCGJVBlB~&$d|*eV(42DS#Vo z6>~iOvW5>ND>=%aQa;kK-*g>asp*O$gbe*eU!1uw)TO&TA9VOU_;Oqy&!!(Ed19V@ zw75zgOQaL{ySi9Gn5K>MJ|k{h!C}>wzYn^Exgc3#L-gL}?j+M=XfF0w`;|A45s^a_ zZ_BwM^rikbbn6b%EloRwd4TQ5r>%=Genzm4Ae-fbbh01er}Oh`;8Z!jO1*#i-NBlq zVMkYVhJ>X0(e7FFaaJw$0}j;q2Aub^jNIK*6!W&)SbCMNz7pQp-q8B)nRQIc@iJM- zj*ydnaEoz4Wx3 zqMYWn-)jT!3bQ8Bn|&Dk((zzWumt^UPBqPCO3$W0o^$avUrbk;2cifOtEr-%B4EES zOZK0e8TI8}YZ%b`a6ONo@l zo)eKf+f_ADS@5J`- zpzF^iOJn!vsZ(l=RbWS4e?dp?{~+wmE5&7wihBh|-i7hM3kQ!9cjS1XNaw)x;KR{; zj_X^H8)PT>sb02;TVR6t=QL@I{uKFp^xn+%gxbpCZYEz&qx!b zYS0Byxc1uNqk6YePqYXB@`;m13rf7fUDqhj)u7dovl))hf0Ae>E6xA*0{s*ZU%!2- z7u$a0)M8;S@{V&~5K$MXl1dIoN6@P2I`52)JiXMJOu%J(iq`f+xMqmMU%)|ofm*E$d{%{kk+ zEg$BDMA@|jfJs1T=%W7YLH~U_H|3=4H6yn)gz=~6O(GY0b*-7Yr#^IXl>29OR~2qC z3(Wbw_WvhX>a#7@@eVuGLX-OcfH)#0@7yqSov}cC^C!?D5dA56XMVFMkl~r34;qOm z%n8>}jOW;t_l&=8;<1^B7?CE>29?uaNg4k~*CL5kMEIMTP*9Exyh?E~+D`H1{R8v& z6j?xw65vtdoz=I6{IWE$m2ikUt9a)XaoOVrj#!WITKs_-1}a(;F@C*vic74Y_TnZ^ zwJN4?v_~%y%)soBu$9hxJv)V>cP}mKNd{0@yOx6zB1$oo3A`^o z%S6x+7;HV0Y9%6QTOwk>5%(A#*BPt859W!YFQ5(a%kU!vOqV@Pq<>F!*_>LO*Iop$ zsl1?ib*g-+^Fc_f&Qxop3-&>%)Ku$=y0}fsM>E>wWke(ZygMdiEX2nGQ7-ZNeQE;B z%W$NwW}CSya>lYm&g786gSf};e`l|dTIF2;{@-TYk{FB>W`P!CH&FJh$+r#b z!s7ETQ1`?l7dgl>Yy0` zP%*o@YjVp*j!OdGH^^uL0L7TVscb~#F*l~vX_4ZWciebf93aQvMVWGqyFVMb&Gi4+ z)rFky*bWotW@Ef`)PA;$De|ZyFvrPZ_B)wP=+l;uJcv2%y(S|GaPsUVt+|f7Ub=Nb ze@_C#@9L0Y~J)GVdfTg~a-SLjwWMn#1JwC3mYWAZ^KO9IbVPrL7 zP*`d08~ui%&BEOeVrk%P=6km~2SKFymI1JKY@asR+1038n1MA(Y^eMZxvS$z!%xmn zL<(n20#_R!P)Z*_x@w&xn5V>s1OzM;dAEPsan6=PN49W0_?HTdGhxs7TFR~oSD`{} z*BwvvHt$!?;{;M?yFxbZ*8E)boSbhv*M>-jP)uyuld7vCs+mzfY2w_`S^HpZQ1d$N+e~2)DmH;xyatkZH5*P2RDD2aee%W@R<10=j{A2K_2a>v;{LTip^m8B}ONCtz*H%8Ap+r36{Wl2x|5ECr^v~OTmHhb{M+xPG)%vg_? z!{DSajUikHbyR;|UaQ}h>(~$C9M-oyRk+4X^Dj%SX`4p>x`jV1s)vF58%Bb4eaUmY zqeue(Uky<`wQnV5Sf3e4s6VgY=&M!$b7a({EIZY2t5DgG=CMd|P`X97%P#_pKDaHF z<@+4B7xP&3S&gDmS3ineoUIqzj^+L;#yUz*Z0d`O*w&6rt9g=PZaA!x)nuMfTWREz zgwt%VNWp>v3bUUI|2a^md@j)TcvlF5@y7!`edsq=1x?YEC4ZEj!qf3cKN@*H;F&Do{oTq};u%wIe|U+J~kAC(K# zKrb1do*$L?j6Wn8+X5{}s3pA^Llpppd$`-&a!*?#K>j!M?ZD-?a-EHf(=sq0PbOCl z>qLm2&e%w*ej3>vv*McwjqvI)4o!cuS7bZzQ4@VNjg=YQf^y!-3h~Vi%RJw?U-#6V zOV(-t_f+Ehh_tq}$@+9WprEW2hd9b`z~!Q}6&MeX*UNXaY>%j~AJ`(iUC6jyIbWo6?O7taNz&=@cuY-7UHNm8O zr)gKb=FMu~Xk|RNN%{HDK#4+;oqB2Xwu6tRs`yFnL%}F+qed+wmq~2|BPt(7*0Lc} zTvCGeQAh7Z_-H}BktLJyoL0%fPs)GN|NXbgr(vRK79(jdUdt{}P+a&}TBohOI0`}~ z3zt~{H$}~{8{F1rR5Uvo|xU%Z`jg#yPnbO^F3iHsGznAN!^z)`+hzWL zHtwOgjd^{RC9kPql+-g`v)lRVL-q=(%mS;2iv*KIt$?g*k-}gx<5RQzj@ik`nc?CE zSgCrG!*nJo(D@Z#Bn{g&Q9ADiHTdV2^4yUAiJ6F-!9=VrH}~!8*jiI&|8O`$Y4drI z_{5TC^fw3((FNpuCnVCv_1rp-Ya#053Mj&}T(0RrYQ?`*%LE8p-Z^4+u zBZluuPs3ueJ~_=4H2H1CV`6r@dh zpSU4(4J6@&y6NXT6?vwny-xh+%qTPC^K{6!KeqY*74#&=lB0FcopoRT7MGJIB$n+^ zt=HsQrk_V=_@E>y)Hcj}nreZMT6b={ZmC>llv})IzmFZRq}dB)583s(I7O_)sJ;?W zSqn(0YekkJT%ew5!e>9*wM{bIKa|<&)xXYqxP`--`oQCJG_&HwRWid&t|@|MQ9xf* zKCYg`_pDeK6;g0o0xb4G`K-#;a(r%(=}%quJp zy&hiiFhV#LYEq5NL;s>u+&nV_tu!tvlCzOx45zjG(n4&;p@{Y8%&fWfm7Kwx=X3Fd z-A|Ug;&E}b(j;{toCA1bTE*8!XBfPyy{N(nYG`&T@g~JQYblLeo3Abp$Or#R3qV-s zv=BCK>5XfnCr7t&1uzKIrFd#==@z#MBC@< zhr*Cs?W)N2k+ngaZ|R;B9%6*g-%{{gTshg0n;dF8mU+}bI=9ih>1l$yOpO=4l1DXa z7Q6`6zx7pAJr%^o{7U(|?B4j-vfJq8BsOa|n}3WvJ>qskJG5R;w6&?uboKplpdyD- zYa0SFUC6s!twuh0Vkj$vjezdiHwM>NtvJB!-K+kFx#Tfs@xA%we-?Xvj+1SY*MVLM z(64q`(iJKHS{CGEi8c}bO)qX1D(KwA?7eBF@y9s>=$m2pG?(%GI`MbC3Q!AVt(m0Y zHD6eM2riTwFJ!lWy8!DuQAcDkH0V!b(X3#Q*Q<2n6yfBB>jH9v#Pvgm931K2cDcSQF zX%0zV+kGT>xEyplcU{pN$pXq^xzH-IRJZh-`PK&6xF@m35W3y3Jl<+s{3Uv>)DLxC zSM>r0j;Q+i5%{xL54L;PAH6s8{itX+QlSLBetAN6;G%!^<4qbX?3ErjCYmjfcmt=a4>%Yz}i7s{t8pyeaAu0T?NAkGTZz={@NsEC@Tufu}nMak%hPF=R|ewHA^p?M?RjaOqXY7> z))hbZE4?!wVp$@IU)f{zat6QERXtofwJ;5BCB#r?4`aGf8b^&HF^vd)w-mV$}ir2LoAhWzTU)5w(BBH_%;-_1#BG#P`6E&u$M>7 z=%iUJ775C!JMyz{Y=ntO-p_tKd?D-@7oLF2=PX(uDLIL=G76)7PISydo)i60E=Go) zE@mp`m!rH1CEVy}z9AcD)=Pp_{_t+NbvL`UyCaH8oTW@83~q>0GT3&0SNlnJ;uoAe zp#Phz7*3){fU53=H+Sqr#$y$mp*^K zR636t-y2M^(#oW+0oO9=hg`X(-O;J#6Wn%;KGvQm%Mtf-2RSk%9_u|$`F`ZJ7o&NC z(&tTyaf{XwG;CWAGqj9%6k-givRfI7ePxYrM|P;0!awjb-?QpR ze#fnTf0bMJ$k;hbKNoKoX8ZG|>q<^Zv*-a6iV5zKo}A;7wR>h^BZr69m>LCs_Gg4H zs$%(?eYgEdzeiy!7=%!36fHcufJ8Xbet8f|J9dZ0yr`7NI+}TMH@A#HELV1_ye9aE z#7{}vMm|DFTd1w&*uHfjjy$S(*bx8?#~*QcDCG22&404GcnET8o0C6t6Ep3g&B<#_vvcikf> zjT^=g<`=x_FO*fI>)XM^UVDz-wJGx!Fxo^ly&TqYYDIER0yU&UF#7_J)vdV0qcI`H zFGWP4dF%S9YKInlXdutp%~0260czCJV3uSE`c^-1plQb(IuwAiUD_u_`j4wnLA*yM zEX!YtLt&?f2+vey>cvc#WZR{UhuW+^ERr4x+UO!7#O;+!r=6C*C5umMe4an-d_Hz_ zsXJSgVMTa15e45r7Ve&g5PoM-_cf@jws)#rTUmVoC14D!KhAa=lb}VpHzooW-kA}EqFi~EZRIt&~C#P0kH@fDZd{}#}7SFRW3Lli-Hx>xb((nhcmN^ME8n9PWvfsd&yq+2`0chmCaKK$p3onPPrkT6m< zuctqEo3|Y1Y6_i$_N(ZfQc~iM(T40>U6;DWve7p{$#!n$o9Is?e2n; zYTlvZah@V^bBCOV*OV(}5dJ-fc1-gJu)cVQO1g(@Ew7P>S!f&;F^7k{2mlCl%k#|!ZMyZg_b*7X1Y3&ewR=lw>Q7&tehCXe`24EccK@n%n3lRNf8)~?UL1J+`Jxn( zhVK3-%U*@qU4Cv*mUb{M_xGcrZmNi2?_!wA)}+$VOVXt|Yi-jXjq7u%PzcCo_+oX1 z@)Z8tTS_MPc~bTrk0%_Y5rpX)z91`tI=G#!ux`8KXj-Oumd~PlStqkW80B`YT~hLX zGK`6%$Sd&bKu#YsTFm3t(Uy$ln5-eX1;5(xJg1dxb*!Lm-E}aPLYa1938?I*4mpSd zstNI8%ARvvh5TrHfV&F%HPWP+phj|q6R7hX#z2=1;=h0!~pNG79LtH z-TY+f0&Rjc0RsimxW)|0;PT1|#S)JD!^8tUo>M#^)Xm>Y1N&g)a7!cuAD z^DkKHgljMI+?wGu#WQ^4+OPjYUYFZ|d!;gN`gm2l(I)g_<9ld&MR5-sJx_Yep;X15 z-1nCJ*4kQ5mdPXX%#{7nenUI&r(?Gno$L}`^&84(BCz*2Z1K>h_vgk8+l=j7&pm2> z0FY`bkhU_ZIxe7VL^#52fgckX_o#a_q~D>0*ZlbpPy@yq6osOE#>{1b-P#ep)&<`c z%D>VI2-vAzdHxO@EfOFvFHoAZ*tP`+UC-ar#JFrtoakpJ36GSTj05RDN_;Vk+-h@< zEC^)Nzj_NKumFb+=sRrTHm9igvDcP7$u1K9F;RX$`2=7z0a4-;=f+6ygSlt{WJ@}zDbjrDG^qFthuCuLb9baw( z*i*-)9ByC6W|PY9ECXK;(@IcP|8q&yp@Sz_;%xg2va2x}WPz8i383^^6oZ zKORVMNl05SlGt}C^5)HWB2hhw#n%tl7wC?C5etx`ir9TGpnmeG<~GtSa9z1Ux#T6W z?z~dXdR-A!3{Yn`rvHjMiv-bq4BCYoS(x712@$N|8|DJQ6p8<9K;Hw@+(-heT)?!e z;5$%W67PDkV)Me=>2%Q7X>T0FymVGh$w&QKG~oZdLZ{G=L_cy?dEMFTmQlz}_j^k1 zm9yumsUO-LyKoRVTrnj^I^D0eI0zG*jpkLr?Yeo;^wj~`cwZ}I*Ktv~ZQaaN-l6~H zU>BP7!sW5yE%?5@#ahx!R5H3(b9*lFwyw{BCi@n`*B5^BjMx&q5ZBW~2>7?pm}gPC z9_V0W?vbrwWMhi);8*Y4ODbLtau@`;w&uq9pNm>Q(?H_Jf!O1T|L8ma&+1}k9a&m} z4hgMOBP${xNM& z)VzObQxTr(%VL#6F}iaqB$Z9z7+SAC{VG*FDB?p9>`v2RR%?P5x6P^Jt(d89c(HV5 zQxlaqQ~9;)li%nrtX$%ORkj;L;K0>q%|_qepZT%vwAYDPZL~TViO@62f(>ti;s(I) zBkQC3*!RD8#ISX5&zv6KYpz#U6=dvt-uga5EgQ`zQYM&G)5&MBv&h(TReE<^b>z^n zU1~k;D>*v*V1TJ1q1*biH|PO#Gu;=N+t4RX$+W8T8b! zPwe1s<7ONfDK!OOS)2~_84bB73r>6R;M!j!UzNI5GK8u+_aHfJ6((MVEF!h zwZBXSiMY7fKHj}}HE3q~5NkuEm0hLRW?5aEk=~loMS``Syv7N{`dDyR31s+5oJrw9 z4J-#0MF_+06}+Uw7dnAoka z>+hI3s$e7NiaE?p&w$VwGF4E}?_FZqcIzL#8ypP~hA*uXwZi6j|=pqNvn2 zZ48Ld5A$p;zTU6LCl;*uo_I){??sew8q|KYvwrfov$+F;^EFpRuc8AXGLb5(?p?5F z_gF96TO|w8!k1M=0fB>u)|(v4=*%*E9)jUx@AgMyu%UKmfu$VljTaCz*lST=BF;d- zq3B?_AJ)|4%-(AH&YHeKewUICJbC66SNNpnN^jWzd%`o(gHub!9g|O!?rr{VGHHdP z2?4n-d-XPi8)F1r`2I`jO}bDW<)0Ektt7^ep2qmRY2 zbPuu-bO>Z(tPgxbM#ogjp8wlMObq1o2V(;K1h)>eeZ8Ttt|pWO;K`@By9!Qmfqe3P zW$*CzmpV&tdY^9ipGah$_+;Aeq30dO4fFXhq-K&=d5#Xyuj&0y{hHsFOylN#c_^QA zIk)YtujYM-E5QoWf=jhgYn5H}-{#8NhM#6w-v}?7Wu@H3k^pN~CU5^R+ZEoE5IwcF z>G-YPCBLSf%HS=`hUFm=^id)9g^0zNjx2pS_o?_|QpL2DGpC&#m+yU`kk4b<~UaJ5<8%*T84t~NU-5`r#YH&f<5f^hVuM{0O(SblQ* zq-9>7K0I2I^to(DasDl7f(f65f=I$N$@cXv)_?L{A_>1uri2og{h=QJw-3Th&yBrN zX}ex>&R2#$fA(!WSxXTq9bIZaFn64_E+}Hn@Y2AB=f9&-!T17rHIK{nJ;;Gew{#Aj-7zTszihlhPPfG2+bBX zI|}>}(Hraju6DxY_!2oqZTWRI)0uH!?5B?}g>_s1;_tnJUZ~2}45%Tv9hTA4{mZsz z?M5zl82RBqn(ZAi#^_+aCj&R<;x+)XwdPws?Xn}-=mp#TT@s6h+ub}$`g9Q`9GWLNNzk+@7vJj2@P`MW@ca` zlrdr7dK>HVb00(dn;W&37%_zF*+$3H6>xKz=&S0uei8cq*yxLb$AU6Yk`9@SZ8i3D zjr%mb$BDIW-mjLRZ| z80co&{#}B9+<%-oILM7kHb`9D`xU5 zoWGyf&GWGzN`$+DYwFvjs})YL*={>kn3bnkrh(BiCoifnm@c3Gt0l;6^Nmm?&`sYt zp(x-|jfQ5a*31l!UW-T^?ilCkr50vr_-9mNrgDO-%h+@f)18HzyJ0 z-S5oWGk}RKuaS)72CMzjqSv_cTwT??q}hj}@m0(Cz}UpI=_KlJ%JOOs&x_2SS$`q6evbpx%tImw#)nVz zPZ9VX-XJ-09X#}UuJHeykpA-g zKYSR$JxvB{*Zsx#+Qz8_QY=bLx8y)BhZ$xk-0`1?s6x_T17)zTzQqhF?Fb}BS zgu8&Ztv6DEZ=r;4MSPzjYSw*ZLBm;5L+|uW2%{3) z`_CWY-=C#qwzeGULw2mVDM{=y5BvyPC_>xLo4atRMKLP~2o<04FNi{l$Dy|=I5 z29p1E4wSwgMzoyxTeQ5jZ?rvjw0EOZ=s79o%Yw8%OwtvkGbj9X-h9AOgjBgd{uTxs z!nS|61PKmpHmCmC0=?7AUDgPui#Bf!YfT>@X4Gya=Jt3iT`7q%snoSU`PCMd7q}6Jb+x{4*+mN8WCnj60UJI4hFiO zd?)5!MM;!ln;8?7bB+=7SG6`Pd6=y$0j6c#|LB_I$tu#YoH`9Qa(tg#$fw`0X*VD) zpGS(y^s6g&MFHS49&t(V-=!fT!F==mi`_TKG5Dap$rt`Wf9u9d&5w!n0WwV zE1I7CD`sIkH8;`?UY{gIJa*&6D6|&xc+)8SwC`Yi&`&pe~bz%3-e8Ld;&Uf;^`r5lFkfDCh!S+iZ zysN6-%kYi3P`Pc0q~ESMw(!Y63>MRERp99E{aSh9J5KUx4+_YYmFDE9$hO)%2lqMf78#2I6vJ@NE{3Jj~D*#XDUZHPZRCXjeO>k z)4@Kj3nK!P&T%@2QR+$w7BdYa%B}>B0opOxPP?-sVzeE#R$qR8Uf&Lm)aS8A%9+?6 zB0JwaHG3z(=gmN8CbMepdziLD89&?(p@#7-BW%8Ar@kS83l=V@aSfR_bneP zcsY|~xW_A+P}=kVxOAU8{6+n(SFl9M6oR&2G|%%%iq>xieyawNL#M5nfwrsp9?Tw1x+#i}4!6e-ZW4G+Ixz4I3xkAB2V1GJ zVSH#Z|J{csi!wBO+jw(~9WQMwDq?eTUQYoq=@B#XbioRi9hG8~*0SO+OJt2bQd{f2 z+?7{rLbq(q-}{(6L4RGJdhi-%q~mT>Wm%Gba!}^J&BVasxLukiY-P)O8>4;L&Ra{N zgsJp>lWy%8gW`2&a=<>PIJ9VUiv5&Cx}iOh(ME3CR6v=2csDHW*%Zjwovzo8e?(Qb zk-l5Tq6uD~Iz5t8K^400CR4a(z^uLfiW=vcvb1}_eqfxovFg0({LE&PtEDx9%<-sF02BjL4ZZNqU{DOL zIU9zRN+t#bHXb${3zKwTvXp*>`j&9J*_O4|jT^r13bOVJR# zfy(LdStZjyYr;Rn(SMOzVSaeo&n#R>_Ich!UmUxPtA#*cbZ4=4$;8onf?F*>f}x8+ zoOIbT4pP?(ue|4Oi$iV3`IG!Hg{kRb#P^W4q(@ud$YsCy4~;f@bQ1yf{c79R_Y$_@ zX~@qpAC(bO2Em6!vlMOjlC*5j2-EFIPK(xIuVhLrcPtv@E-0V=9>&r~=l3l5G;jiw2$ ze6u#wf_&%oeD5OwAc#|a^3%4>`@4rxLgg=hKGIwV@c@2XF@0Y{rd9-=S>jfjhBEh zQ3`x)$=`zR0LhVSpD#TUc7G^Lmq?_Z8V&1WZ2!%*et+1oO?Me1b3J$0p_Rc@6@!@U zQYR>%C=Vl-94Cw>@r0t+rsP?;oc;H{4)V@RM%T?57+z! zM~Lg=Ekcg0aHI(&N_A45qh}9GOqe-FA1CA#j`z#~ zWWcHO%cIq%bJCwmJ!z8FVM-%6hemC$u+2za42kW1;KpuH+KIhHDUsudsHUYgQN|Kw zt%oBJavL7E;y$ui@%}P+)o_(2USF7!UFj;9geLKbl$7IRAUh8Atki*!-I-e?8c_K{gxr(>bM$c)*jn-MPXS$ zY{y&nL-z1c9>XFrv-`UlOug`G`j0H2_Mb6gwc+13mUnz=Wa>vt z^Q>#H++nxH0vm3u#>mdh%g*Z#aRa$|jTuf4{ps$LV+!kWU~NrMAFuuA>JP%)?wy&p zLhFE0)8j`CR6AlrVNUS;+jZ37&>HehS$|J5e~~g96&Fp7rWyRSLE?uTMI9z!(&Dg1 zC3*BM*#oe{(nl>=8s1VGBue=~(v=X}g#{z0HL>9#m3IP_mH!cA8GA#u>4Xkf&o$Ov z0L#g!woW?WhnM#cCiJz89B+ZI)>xWlixs%oXPXXEB1%RreDI8bxuwP+c9NDy>Q>o@ z2Rwv+^{tc%8HHo91i|Cv2^p2C`lNy6Ij!5oOb4ebPVlb1=)(&-nAaLFd|YEzTFHwJ zv#gM=Xg=-HxcBns`ROEMAZ@V#(81S;I+=J;v>r4qKc^1quM-~b&?mf6N9g3@mTydU z84IA|#V^y%@*UvOcfiaR0FTvP;HW#pUY=Dj^gh9<7kiD)phJpjneKl5@H?7)rrj2% zKR&MXq^>*ek+m-bu1tA!E#Z!%5GPC$yh?R`5V|3|C(cP}dx@dHq8C&RNG6VvE5MWh)!M-M$NX~5jcdItCY1}H$Cm)Vc#5(Hbo zgZ=eAPQ}*>xjD$2jJprrBd!yewej#H(#7=iKY0jJ&qMZ=k!nAha1AIZ|c@@*T1&O$PW@=6p{ADs7#9Z_4F8slBKx1ffsQmu9(B_CrILi1*Z8?VMdU51ll zM*Is1q~Waldz*|Q3vV_RdGWjY&2}2(jLIdJ3Sd=88)IG5{H_B?bIgX5;{+FJYsY5H z@v~fFE{JuVrj{{vJHyGRS3WoR=2EG%cYUv%5oT0HV>%e^U7t5y_sj7iy1Z9A#8x=V znHZ%pOM7e-6KTq2U|X&}O)e2tFzEm6;t)K*e$8E|d1uQ+cM!SNtu0szT?9DtATLrg|RD$i1IL649w&79#KD3GW}4 zGF>f^3^jr<;*e<37E375ScqNYksRnl6eu38&Arm>j}Vd-^XR#YD9Si{vYGVGXzl4{ zZ&A!L0yKsDa{i^yE`sE7V32<2CRDBMr-G+ef2?4eUeN}VJ<4-f3$3I|iEH|fJYSu)e;Pnt(D=L894X;m<^~(7 zmx84z%KcKVT+z9d;?S&)C;roy9~#q98Sci`74QLY8OJ$vVJbtlAau(w%OKy$Q2gr^ zr=gGIJ$qd1zOkPgGs6-^%Au1btzZ57Q*pV}j5PBZso9{Ni{(CTF_r1gFPj)xuk^JmKu1BZT3F3zj zY&Enc44R*{7|_|#Y6%b!rt#Hok7X(YZ4UE|OxN_65*R+FxT_;5F~R$?*3m%HBa=k` zE>&0}JBP3)FUU=`PIe>*)V0g8#5vCp*@4IRBgAChi}}k}x8)B{XxAS>i0E*?O6Zp< z(jhz*kO6aO4x^r`Lt#BA7oNh_vg0JUW!KdXT@cyM_0UM9uP!BNhj9Mz--Y09DtdC< zZ38YsjbE@V7A5#7z4TeE4&pwxcg# zm2o^-6nTc__pzeHfyog!E)b!rA*UB1b?bIjCCYldWmC6Jqd)x7V@{DD4{A2oCF+u% zjlWEF&qpK+3?{=AvbSbCkDPp1y-#0r0D#RtR4P$cs1CkNonWE@MQd>Wt~ho+()zBS z49G@=>^cen$v;YV#p(Zn*Qg5^>fnHRGR|FV-U>In-TlC!MP?$9Jaez+M%b7zD~)%3 zvgc-Av2FYbceBOJJfu# z_!-F+XPR%ssS0v57762a#V+d^4#QjYHKlNx;0melwy7!Y?TB6ipJ!s{jj$q+=J3Eq zIn61`xc`ApJ?Li~{sgkD3ClOI?N^BPzb@iaj4z(MVluCso*l>4Ak64AOC-)6`K!%? zfGE?j>=c{{2(D}Hk^m+KSIk)RM+>+-BO?<$qGOV9{~OD8gQ1q>@KEf;R@_ z&23UcBGytvxOG}kdX=SQd^%?mzZS6zUJU2zJcUof+Q)r)-rZziP`?#Je<&$Eyl}L5 zu?T5u92Tqdw$+}T(XP==Gl7UtuBW!RZX*qrYL3dCUg=Kad#i`K*xsK#FKpwIpPjfG zmg_HKI{v0G3|Y%_h*(k1xCSsQSpbr~P97$(rl3OW5qt)#TdQRO2fpM6ZE`oGYTOE`Q_pMPDtq;8(VZaAR1@)p+x@f?ihURi1z zs$R>)3yidTA0NBS6(6qqoUiX#N}MJk&THphIoSprzi7Bmuk)vKTZKwSh1i8mSS!88 z4S#%gv4d`9={^$k?C-f@GR3^$73Z(Tk&J~~{lUbpXkD_AO@bxo(u4WTBZqTj$#?1D zc;Ot{A}>X(IGoH1Qm51pRJsJ`eY*3I^;Ed)!kd1MvK(*6+Ng>Oy;Td`$hC>?hka5X z={mLIl?&U@*Cl$yI5@mrl!LB@y`d^<_#}@XGEt0w4;Iwe54u!^TI|;i;pu*|F&VM>!c@QpP{Hhc_Wmk#}yqA`M+N#~77?i76&z?2A z5;FBZ&#rVWGl8N{(}AW{HRV#@iysvMYuS>l$O<*LYbkwF3y$X7s^umbEopngjdnAk z!`kBVXd=hb<$`cCK1gy4zOu#aEmW?>^jCjgv$I9+oO_dvr0Y#WS4pj!=C9Cc()PqX zR;hLheyMu*+7{()Giy7oAc~s6Gu^!xNb-(LJXMsI&>hYT6!aBbhOA*5W;kw8+M3}s zq5i7#Dp`Few|kI&-P5ZjSYdRy0kHa!a&BE=4C>&1A0fc)?7{#B4lw znBPkWMYkC5QLmsi7)y=zUd?gD#Wc<9wWL>X+5^OJYr;Z9wX9i=$UCrk2JGK|h`V8B zrd_4`hVq17Ip<{rhb3Ezbm-``3eMPBxA@fH!D+ZHG>v)di>minnb)xA{6@A+m7O;3 z?U;#mky!ZW%bi+r>Y;EJ^gBDZ4DZIdUtta(2S3>MEiNB9pdd+p7tqN4+gOu7vheG^ zvqU1JVz6(S*}@vNcYWH_7&*sd+X4{hA>8TAHV2jxTdqFeuDqJ(=14^ZtPOLh79sM> z`EFRDpHvkP%Cy`cIb!Gg#l)Y3OV$`@=)RhMde3zUQkQaZ+}*V2j~|U6^itZ#02g0l z_@Ho~ntYHJ&-5#vwa~;5`@n!MJ!mH(P9 zvgI)iM|mp>@%WkXUhr!Lt=TyR*0dGx<951y&S=En)|?qA8HM{HWMb30(#b*kOk!Uh zP1lw&N47wBI&5+zLh-%7F*gN@dRD2lH~VZ-ero8;XZ z{h1zn(zfZRj>F~C_l%(`XCp6Nai_lS2N&e0)g|5(@gw>c?p z^NwPqt;~K&*nJ!$0!O0}>Fz%xA&<>;DR=MDKR#H+KHiRbwoSK~ zqN|+)m3%mv)Y%O=HY6IJGAMoRHqvlup-%!!7Nx9bS9}g?E3`}YgB9cScI4QGDSRio zrAElkhd$O888y{Q&KW-8r)i;yXDNoihn@+OFI^&+BsdsAriQ^uv_;oORfi;d-7 zHP0TOz4&d|lUuiWO9)k!l^ZxO_PJldn-=3v)n45zZ#Z{Q=?8L9x6dKQE3&e3dBs;| zMwTk1bJ$d`2(5fMy+QKbAa|(s+6jvSmOeheXS5UrvG#k%C}I16utNo>w<#XXOB;p} zrm?d&V>(YvqJ))gc8)%HALOJS(q)WU9v-ognBAY{hZG%Uu{^T5#TTG#WjA--)ZCH7 zl0&YmYknFrIn!+g{fJ{qKFC9*0X?aRyrRswo`Vt*?(>V={j6w$yzjQFXd;X|v68R6 zQ_C3P%m`eSl}NJCfs9?!bgxPS$5EMAb|?GZ2d%?8m?FIWEZE)nqPDImq-tTU=AvQg zf*t$2;JvO*%I%91>lH7UcesXbB=W8oYp|2;`T6cbVbxn#M{TjDDfMQ>t%OKCinTLO9IWTZVk*;Y?1P0t(P60vkfDY;}7n279R-qVfTq z$u1j&5LY%-o~8z{tkQYm`v0F?3A7gJF7UjaVeJfIm0*Ve@CQ2(Q zF@%7H64K2G(kTstNJ^uWbSW*}F-UiZqbnnX{>hSYBR!Cf@roiTm+$yEHv3@#3IX#!kA~ zf$3`M=PWVDCfeZW6KP+MjGQcmB?3Y4OE}yn$tc!S;jI#>_>@Bkxm3Awg{!JzHr^ zxSa=!IJ)3XjWQnJTT=D&hApE`&HbY4SM=*~ z^GVc=9bfCy#or`^^wwz23hDbcp59V8$~zB}#hz2F>4G6Epo1Wor~=dc%uG!`8ui0)N#7$z1&CdvB{dm!H^mWJe!b_9~%eO=W8PHfnNkshYkH#ZwI&^#fJ%if@l zwUebTx67@+(%?IeGUJI{+4OiPIAct0{|YUmV7EDk<(RVh zqiD|-n71$7BkpY}pT=W1@_DTw_SL0v7N1SqBu1=l#W}~$(-{j_JnHhJ&;8^P9$cqs3Rz{XfdRk9#EBB+AQC`>24QQ(H$ykxAGLvdu9)i zjIY$yOAerUA};j&ol=@^`<&wEOXw-oey?Z!m$dxpKss}tj;(kO-mCIjxpXXFgE7dI z?}Jlubvt}X^|ePYgk=%Lt^F$N-o~Hb9ep!R&n$oPS?vQY2|g+KaU3QnAUWzPxE7uF zR6&XgTUAUK@m3l7T;7MXR{CYPYK-R&-Id2tXBaF?I<$hk7KCA4a!N-;*0~WT(Y|Lm zEm)ipU?F(3XgWa-c5S`8bj{*Kz(e-Fobclt4AK{D%ESp99-KiF1!J4@s7Fm%tV(T1L`>{F;fyAH} zmqN^S`lU&qcB`rf7MasXiH7p`yWdc7RL=QyqPZHD8Ggh;_Fbb_+zP#}G3$+yi-$U{ zeBNe(add5UuxvP=1ioyaX?}nA6Ql5okZ7fanvL6MnO0e2&9B&p8x{zYYOc~2-p{B*u1ca>YQAvNF(K|K!mH#MsOkz{Ta?ND5G-+~HV zw9vB7A9U7OU9i8HW5Wv=tFN0is5;yjdEq$Sh?&h(ye4VWY>@AY_>{5)U1V)#22JzS z_U*&4l(^xzne+)g1m$ri*y6Q)WtyU~jnND4Zs9R^21s9uh3;qtkRg$!3<(nX9TV%s z!F)U_*|qP#M?7-$5eTH(u1Enzn&yjka-=hv9A4Frl8^p$RRCcw=-B4-ue$cC+yPI(5BF%YquN_}(TbK9TS&&&GqG|x`PwUKY z*b`F^F}YBxBVv3doc9oOjK_H63~XZ0hoVb@+Z}>9QAl0IVpg@4GG(Kvy4wd|e##~e z2zX5MAN0FCeirp`RWtT?Xl2R0qbUCpLzVZ3&-;IB^7-`B9G^_Uwr^u)$IG{da71h0 zY=K|{#;_@j#DLLjiZRHoKfcdrkGPz|S``*w?b*5CSVR*-JbPoDh&v)v;hK2Bthki(Im0_859T*wiU#lY`hrkgHvu?P_vUH))= zI>=jOA}KS1N2Q|7Y4FF`N`9qJS`Ejl41dg!Cm>eVolg_>N5mf4O!R>ut8uW-!9^rfDD zNSvz2@A}u-!e(b(Y+SR9>a7PBtbU&O48?GgIs3UjzDVZGkb71UvcAZ+N<(XQj^d+y z^P4>S(U-E+cM)#i49fX!Jv3jYk4JdqRUWEC3MO@E=GZ%eN(6-R{351garwu^?iY>U z#jAaI;8|6@K|f%Z2v}#T_B+7NIfE?Yft!_{e-Lr1sobJMcCzM{h@pMMOaIT6We_xvic{HaHXf)%iOT4>F-3=`>aRR zOXsBavxUY#QH2sQvZtYFQfPMYiRnm4*v)V6X|g6{!ezm76C}HHgKPb1%gT%!*J@vl z%DST@#;TG$vfIHp$g5PV0ym;zO-c)S>`{*qT!1vei|*J7U>@l8WEy4We*@+R8#Tf^ z#H#ga%>P0?rHIZ`SFh*gU&wV0@Me29QT0&ss$SZbAI*!^hx+Y*n9d6?5Au|c@d(){ zy84En%dBZKs1|+mgZ{~GCZpil7d3R^#G+j7nJ7rj{7K3-1#azi8j^UrDMK8Ha4dv6LHRWHtT{`k?`%#QJc zx}=q)Twr+uPum|F4QIC5m~dw`5MV}qOSEyQv#GgtkEjP>vXumqdg2y*;bYjC{F!CD zLHluDx<0vdmf=CTyaxE$Zbx@ZIHT>`Pofm&wwQjGnP>w6;c=l78)hquf_9Rrn*mBn zEu@N&DA6jTt_L$DUh~LMyK~-&ekd`k;)n1)k`M-P`5g^y@OJLhlnzky+bX&SGTmq| z={KjpCjdyg&1xf8xr@dthCOkOOU}5ZI}hwnkXMgjc)ul*V?`hf`|o$Nt7T7* zk}BZ1N*&j|F8}b{%#W?&C`WH*@dO*1F#qRdj*(Htn7n1pvgt5!=4ClL+T0P-N57Pv z5b9}TArvel+G(uVKt+;Fh>4qPa{Q)l78tdWY*bk#N_gG?lE!Z(1DTejK4)%KUI>(N z>I0(IyTNO;sFB`4q46?z>qZNP3cBghxodC^ve=b6JH?@lk(aFEw{W$e&@fD0G4sEX zA%+yM8pi9{Z$`?_biOCC8FEbBplsNFShYGi5J1tb70;O^?x(ByOVaW948E*%J&!<3 z^s2)UN2;fd%jpexUj)TPt54IFHOaVI|9a~bACjrT)7&sry{ndxCvhp%A%F@?0j0uE zqvD_g-%?k8NxUEX#~)`R^(jOt8HSvsLEF!)GGxu%dfVM;wDXQv$Gk->e&R4u{>$>@ zeo(qbaE7%XMR|?#GD~#EQ4U+X9tSd+cWLsNHsBdlC#T{37i&n}BAW8AOlbJ13LzsQ zU2Xod(5TqVtPA z7)P$&c3?SWonq7}GmSJoVh&ju_(cK9xhz^GA>(i!&iuB5Sim$K(0VR);G;WDT=^y3Evcu!0X-2Y z>!>^!V0^DY?DPe2!NtbHVw}AWihdRd;`ppq^Vr8uA5ao<{WU5n!9R_X4*~K(Dc*k~ z^J(gt7uSLR73_^}X}g>DoVQ*RiOalk7*<5%f;E!moH$cO49YWXI`+Zw%yK}jELj%^$9ib*tY3!d^y}|QirWt&Eu4=*B z)^Q)00@ciZa&A3aS~d!t+=J!)4zi7G4*ePrbQ1wI+E18kMVD2A`R0b&SV)a)cZ+3bVc zYUJ*n12uYY(ddpaaz`XI+}83>-0GWuYB&1h>Fcv(#Ui3a>8#M{e3@qnjy^&Nc|;%0 zzRYuy3o4w9TCCETL2vs0%z%g4EEIvjk)&D?FV+_a@H_`6-Lnl;x}Q zGFS-A)fIW-nY~7V7`3xebfa)f*&x9gz`*GWt6=mIa;FXi8DKALkYgD7k$>-PmoL={ zOKr~sCY^?d6YVlrekh~?L5IGRQjLr>^78HHA9lM-qHe}>VTv?4qM67WLdKeeF~rYS z&4L6y*XonpDYM{Z^f&Lm)M9S@s`yv(ViimXk!cw1kn!d!0cSE1gG=_Mc0^TV#hU z6RN5Y(wYYk--MtWu4lVdZJzGWHt>3*pML@?aJ%F7kGOh;cuVA_hFnA`ycd(sPp;e% z0F{~vc&JAdtz8*($eA1o>kTg@P>A8psU&CeSSuuVF!?|7eKFuDp2L_Q5Mx=gdwzS{= zrek~kW{>FL$$3QGZTCno)*RL+Ft|#Xdj}OXlGFP2uCXv;HPg}h?zQVH913^Mvtti| zTFSvWJTx0qdgHX;k*G12$cwh~`{zfO0RXWiaxh|7^klAbgw~;Pgg&c3M}t*6z&iTK zcInbtRTK6B)}!DdBI`_O$(|9s9V@{x&r9y+@?}^5cx`S-%wP#c+n3_<{F8~?!L{6x zEYp+U{_~s{XCx3g?jhnI{|$nFq4$3-Ms*;s-%m!qH8@?Mb>u1w=@&!RPx>?8%MEF3 zPYKk2Qq6+OQP|Sjl+Kg=vsotPL^P$ym(WoAMob|fBx=b2nUNda{e;HzcWixx-tMc# zYz79B=$0D$d#nXq@&Ngt7navsI4VM8sT6Q#9bu*$ zZC+Qk-TGCrf9uDPPB{{LoFIsqquL`ie}PT%b)eQvi_f!@p^~9Qm^_iaW>69_bnqo* zvFS$ZPByN>#lBJr*8#;-!7TX$YOQS_LwcbM#=4{+GY`A2kCm4PVP}bkWgdJ!waYv{ z%j;=$Xm4gP^g|9JaZTx?_yLwRGHeP$qwnrz^gWEfrA4H~GYn^H?g%xVY5P0QoBwZ{@S;K3qD%7;- znZ{@5Y#HzrJxiT{N8*n6rzIi9GpG?m#hRi3xvQ{6T8W20y^xM~TnmrTSXn5(7GB0} zf|V204q^{!Tjw<93Jbeel5rS`zb9G1;!DS7svHioc!;uo)}I3PT*k7VL?+hb zp;bRW54`PxAIl3LU5>~ZSUbagV+GWYIp>$$o6hj2FXcJgRig80XdW@C|9>J2a^vp! zc`u~mUB8&RW29ViEiR1)btC!{SLLYwEg-ngF0%n;U75Qc^snf0@TGeU$vm%j_eAPB zb<0&qao!Wgo&*X1YQeBg1=O4K%S{U|Kch~RrPT~=vUVE@*8hTuD{*C)|3k}aBe!geFd+xKyI*4 z2`hqoJX#lxJZOW`oiXMySNdK?be@j=-HF?#J)sVb?9|L+JS+$&5WT;I-n?lEdbs%_ zpJN~WO?hd5Q`;AJ53kw?d2sOs`cF5exKh4<+e#lMLH$j;z8BvhYCBzTrKQi`^VpEo z=Yv0mo+i<4ml|69V60n&cfF3|+83Vj_k0hM7%}hq`D1Ov!}wXlFeFnmf)l;nE8Cqw zCQ;LTgC-|Q7eFx)Crd--kduZKj1S*D*FhJN{2eu?K8QXK%87q^62Hqg@?zh%||rqw4Unwy!Drx zD)paIN`BSN?d|f!f1d5(pl5VNF7POOD#pEwl}({pA4rb%O=Cler_l{E>4V4CSq5w7 zEGX|^;ZXQ2;jfW3qz(RfkU^*VlU2YBkaG9zM{{K%X_FhHu}`Oi&L$YB4$THow%$ZJ zzF;gc#yr9b>}LW$olqqSdL&NOEim%OHd4I3by#YznS!kSh~Qaa+Xw`Hc5=~8YVtIw zv3>t9w02b8@%gckak#_l>m0#Wp;D`NJ~d5WG3bZ-(WfDa&b--Zi$w$vQdOOOk6-BV zo;FZ;xS1%VF6c3R>W(TrV_Yr8cm|RqvgG!BP^`P89#J0x%DPBVwQHNy&mEuOfbS_2 z8AGZDz+zGd3aqS3UAbUe#wwBZvpAluEBft!tWxU&h(Y_g0u1u+ zV(9i{g1j;@rG+~CGu4GXafsNmNMiT%ekBLe20P#8Wbn8^VA@uHm38(0{rkgj=;tC< znLeF(-dqdeXYCU`-I;k&f;pUa?8ygZkX&f)n^g%_!?sDj7~$}OeJJ$b8k0SlALZ?ih)t>b=qM(Q3)><{{#>_5BX|4=4H+OvD4gYkN)B(bY?U%$D9< zo}Y4?{{=;YWFLz{n_(ea*ea6F*DW7u_JlXML{e~Bd;Gxs&|%JMe4NhAIZ-)IHLTy# z49Y#>US)CPHtpIZ@7-o~ZQ80`j{WTK0q#Q(S=Z=l{@^Au;tC`& zSXuT%(HR7^yT3s7gPfLyMH5oFmqeC+YQtN`4^gFd2CGg`uzs~^4l=SII=Tn^Y|;_2 zesv;#N6LrfnwnPrQmcUrMApqtS?b12uins6%-7emWwUcm93mb^XdeW^vERheix1veEqs9f>1)#h*kh=U{WGC!mss~PMeg^^APw2~;2+;<5t&Jb3-;gV z%M77?WBaB){G0xkJS}&EvO4Nq0$3S%;#(!ZeL(Y=rA*a4&~UGo7DAz4jKsNzpF2+D zpNWsFQaq92Zm(ZV-~pia0BwGybF(etT zA6fmNS|ssKiMK377$thGPmcZm)e&3{%4IQ-B|*o0!n$fXR)hNy#Z|w=I@JN`AVMoN z-(k_d@3Itx8wj^B84KGVn9JSLKkwLYEax}U;tm&*a{_4#`Rolu9t_O2|H>)fIAhpY zKqryW2?3w}g9Uy;99lhUlZKRZ!S}d3(+wadzWCMIey;cACh~T!W1LuYV zS9aFR*TGkkH6=hsQqAgzr%AmhhAi);)^lw43U7C+f22=KPhN@3kNhdl?2 zIKgs4t6~v6OHGi0SrPcMv5^t$+0mDSKUT%WKSa>rB+)3~ZW656?ViSf_zi;bfM%IP zSQA9a4G9h zY*isA2_q?0G~%6FmYhEnPFr4rfg>!nCOxs&awYAOc}jm28_BZs0I7}pRYc66byXfA zw<%(c7c7+H#P0FQ(DL|)jCGMekP1o1Eda*&EChBOgK1SZL@KnT+7?!1SlwL7^33ii@?rd&qAX>IGZdV)$!0ppJ z5?$7?l?3`gZm%{@Q%{zqa|uWlFk(~!z6H4e(k!+siyx;_9kB0|9;}8K+&673iS$jQ zTRt1s@L3szDeuC2d0f&=vu30<<0Q53v^3fOE-VpP{AQZ-J5&5*;FcOgm03Y{^W)(= zZECOZ$Mb;*_2eDA0SF|<{NXLl2~Za=os{ei23r*@lg?9o5l@F`rhv{yx6myX2&)`m zQiN-I7CF>eEuWkO}3rhY(szCnc^)t!zop zZg~v34k5q9!#RU3p46l)BYEyRa_1>-Y$P<~u#Q_s09vnaE@f}qH5Lcwd}KZ^3_hU@ zUD)Y#LD*C0zD(*1^#Wz$0^j~vZm9xWnLMtAk{VJ8g(lGGG z_$LPpU?hrn;2JMC4;A#dugSV@RaScy{j&=IxIPnS@R{oKOIQpxFZD4Fh*eLXoM`NYsLxvc zP{B`uhq>WU!5Z^Vb$D4;hkV{fO~;pF1Rl6_<(Ocso!Ps30l&bDYnIsmJik|GTFFz& zQ7bhKs6GZtMmY9>qO&nUGAG7I@`23R4*HrnUx$}~0at-o*B)iNAay0`kZg}7NnR)@}# zzvKIN4c+-|LQ7-0n{&3a+23YO6j6O`9^ZEZXwnDK%ihc_OUQL>>`7AeY5M@E#M}qN zfTjwSPArm!h)f`?)f*Yv`&aAa^$WBo17Ns~8OEb#dolH*amwsu;-8)Y{g+#UpD(a` z2|xCsO$d2kNw+_g)I!-q<>)ZH)wusc7-2-!Qx(T0@e@7ZBHXYSV-w@Os#NJumhah( z^+hBr1F*a}<#=N4M@C;Xr-b)-9__QLb2^6BlA` zwcI_kSLz=W*jii=QM+yl6fAnR-Q|n|jbp4kE>L`O0~s#Pd)5Jm-mV@RYPjWDA`Uml zDwSnNdTrHb07>b4Eom!VSXkaJflm-TNq6|kXEH)fc60)Of_{@fCsn@d`*V%cXjwv< zbWC`yY)qVm=6O7}`N1Ee{^DoS5&l%&;oS<6Kwx$P#}nIi2!S&c{wklY69lTYMU3Y& zcjZtO55*~hOZZ8lHF}uN?6C5!@EUDj+%VI0R%5g$`|Tj3r&EQNL+yD0KZpMDlBg`~ z5DS8~wl;eTcYv)<`y`KXfq6x!qZ=>14l-?C&(+JAHN!E@M9=0K28CGkZA8h;3QT?q z68K{aKxt^!j&z>3R(kG+lx@$ijtS%KBQ~({2Oa{f=}fnnFSZH3U_1C^hjpDZU9Q~| z-{SB2WtKHQ37-T3^0oJx7G<;AH|P_%mX2wv30(#ez|18`Ua?1T5_&9*i$vDsVW0N~ zsQ|<$j(DEUAL>hWQgsL2fEV5mYqa`-s4F=~^|lTRzuNA&5-cm83P#n%p!@gmMjpL0 zzr*jsm7he`B+?}e|3S|M6xqa!zw}!#qZb|^V^^?QIQ6d zMXx2X>WBHz>*&(TuTQ}?5~Z1!$D^Imp1oc?8e-KLhb-{VOC&X zeu0AF!;h>3Cy_Uxuzn=(H?%t#Y5Y@h1o)-{T z3vx40%cxfp<0*1S8>AQ)*!j8o^1hxE2m`gt0GG-UjRxdkgLB8J0 zqJz@OMOD>9n_jE5mD{7waweF9*XnJ$P>(2}*`HA$>i+H&q?NE&j0Grqgm;U}0%~+W zOK5!h=Zn;}%jDSB6Q^Y-RXIpkw`bh$9AU8H zwyZ1MT>ZG8r6xA$6RFpS&RM*r_u+l&U_E1L@NoaXjj2gu`8TvBe*eJ1zoH60eETiXLXfROW$f!h7Ot_2$bD{XJ!=&HRxW!3@89&uwGw$y&egtnqyu0 zitNMX8HV}_OzJZj{U|rhiP6a1=Bk^9DenMZZD_^qv+3ymW>KZz!mCS@cS-4=hz44|jce=4kz^mNSzK z8hh#Cr3K|pHW<`=c}xF^L8y2HkM@$hMq&3QTJF6hof@h6S)A~^rB_2X273z%b4T*4 zgnHzcjTLRjL#O_>lZp7zE83D$te6B~4mI?H-=V(^wXko)3FO32UAu%jj>y9c~|nPweOT z730i-bT;B%w@L-L;xg}@8)exu7>j6Iyh$3QGbsYRa{Iw=Bd4K6t4z6lRT!P0b<*UI zA?)Qn;umFv!ypw*mWKiv#YHj;T$3svnEFV}C|}CCz$n+4Qr%gUC`;-75{u&}Q9u8Ff|VM^A7WAFFqe;jgh*TC0c3SXN@NCk;=EuLq+YHj2n0Ph6F75zf$`; zT_Om^vm5*xW43OMjQbO&>DCyR#>Gl6%(M;Ws1f0Ox)W%Fc7gI ziP!P|PO_c#jMoW2!g}ffrJ|_t7b8twak-i*e5;J`zkAY#JMOKOB0! z4E%t#?6cXeO$`QiJZl^i;$c=J!={Fpl!Iic!LpQc@cOF8e(K4qssptm74ebhgf#}v zzjgp_2XlV2qX2Og->AL)g-`TBxor813$Yv{Bz{^I^=t|hir`6S5Psw8eO zaE0B$1+60R!hyTKgDtr9|bim3gkCDgO3Kf-4yv=}dl$kUkf?%h!;QdkZr60KWwyfccwRNC`X3 zl@>ZEq&yukTks29oFW|#YClTD6|h9OYQ`g#!pn5c4h!@XiwLCg>*2R!d0=KA9)E@y(e`}+`mk)twq4Eez(<7@0@XHdnL-1Y$zf=J|20BvUIYd@;qI15}=qa zCxl0-;eW|HkofM07us3Ya}d_VkGjef2@y)+t?gz^4~f!KH;I&WYQ8abMyQ828l;!d z_VJsUSr|7H)Ogiag5`{FoZgDiiJS$}1C>8J>wO%!eb^!1K3i)MQ-^wYZiZlAh60zK z>oEGe7piufDQ11D<{oCY@dLMtpjCVtz(4z}Iq)AFXHX#ySa^T+Mecrhn{U6zD_!i$$6|{%>uq?HP+u$@~+G{`pXHKfB+!|h}zXg&wsK1wZzBjO!Olz`;`tzE@A?IhR(ozHV`WWA zH`6t|PWiPzD|r^QLFf5L|dNHzol`Ka$x6ydsI7KYa$x9k8KjGMcs@O5l$t_7`FN zQ-cES43_-m8A6frP<+CZizwAr+m)d$DB$eIR_mcg<|^m}JTCBSP5}4DN|Xil|4e3U zB3S&TW^OLrLt7RBs!wN=Z+v08M3Y(z8SB^Sm7S%}fq8KUCNx0(Tjr$+*(}{xAVXCA z+AQ{AmZNxZ2`sEq(({=EU&{6YnX_3Sg8f69kFdpMC|%vhl`q!ryxUZ`Lg=*5-_B3C^G- zhxnKJ3rXJBlYd`y!hMdxz@EtPEPX2Cb zP!RxOozQAMnI!Vki%%o7!Y*%HlPH!|OB1notnw_MFOuEpxb)PzFNV-QvutL3Td!C> zC+wMB()4(kW0IluxyEmv5Jmn|?8|jg!w0{=^4Ev4^PxQYo1j!Dbrk6p(U0~wKL$12 zkEdG6q$JwZa8fFqyS=ZRtjDb2e2dko*eDVBrj{p${svqpeE@_7-W3L0ATCY5B5C8pJl#<^$mvHkwO>InLkO&w8zmKBb_8`5{EnB>ypQgCcF{bdi z{zjDi+Xt5P0}w->%E5LYAVHV2GlRPf~RGj~@i zSHwBdQjzf{QCpiH&in=Y*RNY^cBo@M?Bu7}o(suCjSYkQlPSDHi}dnY4bn&#F6XC$ zB9&@_zbYr#a*Bm{vJD6AU72k7yuvl$!qGwRS;+bZHRU-53_1u^xU>la?|AvKYGq*P zPz?-<_(aEMW3m{HgKyw~vvS;Gcu(3zd}H_e(?ZFI3Ksj;7WSHBH^Tg>Y4!~T%P>xs zqT`zwodts6OS(19CJA8)ArOd;1LiXBzxCaH$=nU9hf(sDSn@TwT@%r#^J*hGsIsfZ zIXb@WQV`9m#;a2bfW8wVnYA_}I3R=soX4_%={vhW^xbORJ3=P1b#F`*9++M61zS;{ z5$#~vsYfhFk88aCMH|XR;3hYZ?_HL{whkvXtWx2c`n&<((IFD*BA?Tvzn?&#s)~OX zZwZk=Rp%7Z14(H~x3H|ECdLx8aoc}I>_CsLG9}FW$i)WBI7eMKDY4~W_fW~RLZskF z&R69!jklKTd|^KRjxs?;*U}n4P?Vp&Ul}Y5*d<>1Vd^WOR-9877J{S+4G9`L3f4Le z3>PSmy0tzdm8kzyk{OPEwaUBw53v7lH^e_*FP8b)35-r6Qarw4W)PAH90rD;23F75 zGEB8iuA3^q^{G@$6w+Fg9Q`8|+$N*~3+GSLaaWn?(nzM7NhV(3wDpFuAtZNFH`}QsUDe%Uh+5C> z<}YM^g9rcy2Od3>DZm7H%roK~w|<>vd;r6)kVBOf4Gt?aeR&s@uMPBA0nq*7H|RcN z&&hb_ME~-X79N?p6l(WsGcUHSoYP(8E)jHrEvZ z(EY>jtI=-yFNf-#|B!M0SLzwLMxp+zcl=+6}c;lkQ;rAzB;O_}9@e7v&#J>^C2>Do2NIQY< z0BNJ;H2=|TN}CM#!AR|p#r|j0A{~PZTeb#eq}|1P^=Ixze7r>Z;@m&TOsbT4kf^Qe z!dJe3T&e#&(Et6Q`>Yecc(``6O1`={-My!uTfhECp>~DEKqGLoQKBfqtrs#gAIK{C zxU7PCS`Osn5w(TGAbC~7oPP5<;KRKuKq;{-w>BaylTLBe5;jYC+=VK{bYmrtZ_m1Z zMJ;}Bpqe9EzoVin*AT$y(c;_Wg5WBRlwg@5%FNg;WjW7AWR~QA!r#uYr=#FT%SuW6 zfD97hRp1CeUEw+aAKWSuuv^R$xbqC_s@}wXjW|r02oBy|R$%Ox?u9xVe+L8C4aET2Qp*2Hv263K~d@8{Ll^iVyWrv zX{w(U<)c~2-v``0|Jd!3SJhLt9PZo*wOt5`E}E>hP$dcH zVKE1?1aj12!ZV)9MdSVDI5A<-HBgFkg4hl#j(x!T%Y8ynEM*!b;N7o60~}A4 z6Q_nn6A?6!h`(m}|4jkKz-%S#*1iySJv`FMswi)KL|J_5viXRb@}{hq?GuOzTZecx z|F6k%h>Y0wV}cI){{H`>gWA6x+_-AM>stx8WT=3SSK@A;S^^U2uc`8zeI?eGHa%dV zLtFW!ECgCnV!=+9AkW>-vr@4ZuS8kk=f~k)Z<4- zVlUl-89xgbjY>jS(}BGU1)~S15@c6+`(3UuPu(Z9gc8d-P8;^fpbIownzs!uJHFKy zq`xUQArGjbqT}(@8H7;?o~eJrRuu;?L>9k*K7yS0q76E4(fQ~#>6W#5?%kc>gt2JPl0>CquWh<*~k zq!1Lf)mr-l3Q;MBFV$?LOfAP#6*Uy)CJ_`;IL8Fz%je^7v7z=gA%%BEXlUkRQzjGr z6Wf$nfRUR18bmD;cGR$~ED^-BkAxvymSEWy8@9K?Kn_d-&_31rZe;f1;Ax3B{w@q( z=#!|SOlf>L@@{1s0_o?SEkU$5*qt+|WXJz)+Cra^xM-r8`$U{8R4|Kw+|v(a=2+)k zW02~e=dt<{y3lO?%ABXUr1OuDxReweHB(LsII4W_$_7+Qu9o|Sow4oB#91ZO3`3j#%q7N_#N|vDhvR-Oy(Iug{__58U)L)B zI$&=rI_D*P0i`nF3)JFAWoC;HhJzY`JXjL+5`bSCS$$vE%@~F>ZJ!!kfC`W@v=CI% zIbZ|1{H(i*4QsLs;0}V6d{u-LgrTl3021yW`bDS|qR4vo^XRqc>JN3L_Z?tGhnO&^ zcN}~I%^c=r%aCx1-^{p?QKxpaPhwl7YQH@)=E8Jm5Tn>C8!f0D+g0nd4YzF=!%hi1 z=EGYW7}dA)ABlDb5da6Us+knjf1aK6fH;xN-@~9TY2sTO2!L4*4h>Z58y$L!QM3`j zEH0vAvZ8c|gknZQGRN1Eh#~~0^BL)|guH7UewqHCVWD@5M7n8xrgJ$7|51bgnFxM= zHoZ*fW)iOVYIT(UY0qY3IlE@8(*=4deZoyb$ynbj>zHLj$J}?9x%s*|I|xs`xih%V zor}K?*b2I4agMylEb7HFCKUZ#zf_ zYIF^QzdXS3Lt!mQILd>g>WXjNL(5O?@PclV3#ErAv=^iov!p&1+y@RIXm+2BQKwVO z#<*(ISvn!y08k|j{d5FJ3K>9abPlH`njgwt;-HFkq&8MS-|hBC4YRQ=Uq;Q|>sHnQ zwM)mi_OSTwy{nF~_I~NLbCi1*#$mF_EuNMhehF(MbGkmLRG|pyk)n0s75m4KQ7D6| z-7{-;7SIZ?oa5pbpa`r$=K%o9+5#DD+e+6rueMkzKAzXEdms|+Yv)-Y9(oJ|Kbko3is zKo!Qm)P*?@dHMh|rzQE46(xC-sy{RS=2pI~BB8cjHWG)WCxEYqBZA{=sZ~N=#DTU5 z0>DI@zh)wS^&|y^0ut4|hMv_chp(%)#ByKhJn6dAa9kX5+;CL8)@#FGshKcT(~t{a zA_I<0Zhy-L&nnu?h;U{_wS^pUH3}ipshk!kw`n01$fM{&y$$(89xK#0$A0@bU5Bm$ zA+9C5Wi|?Vz#?9InzyW6mfduF$^I5~sa6f?8K(AyRsbF!GZ(E%!U9R8J*oyUR0G5l zk%!|OCmz<4Bb#~%ngt5%sxv+qWCmQ~OxNY_2)fwwH%D!{4VX(!AK1Q9{NJ|7N)eTj z_o8SaJlf@*){~_sv4vGX6~HG^iShAv5D289&GYwWTEIz1{o&o)lH%7POLLDN%)eji zsUKd%jLxlq@lDg!XvS4y$_+TqxoSC>b7m=^xOeuTa%WU^_Pd7un*3AGfjFDfa=RfB zhLbII2{6TcfYj5b!N3#>S?Tv9w&dh8Jy)}8AyX|>6W7a@{_Fic- z%Vr9^MYt=)esb33-JbIm6&daxkh59G?i&?o7W3$KgKG^r@T2<$!#ol6lt6Y* zg9#!okyOmlXgN_SN zDGm7P*2NmMBlUYkI~U6MDE}fzTuU`PLRxh}mlIM;7Xx;$0R$;9N?DKJ67bLvd3y7xQ-_kxU{3O;+x8_fGfL?A2)snV@ptE~iqK;R+JGS&Q&K<>C1<*Iup%kzmO)j^&kP(?#hj~mywfL*tssDr z%QZU5$Kc!1kBu%8v`zWgxKW^Q~Q#|?|?J;&4YA}C<-@%BTZlB81)pSvmOB_?-`6!0Q z^S?PH^49foL%W`tPAd7lUHt4H(hfjBp4RI{p1p9kxGEC@L~`pW4ka$Q5_#fQP%=); z_$1%o@jyJm;+UKFM&W~uCa=LtpoD4G?mCI z&w0`N0{zX(LFqHbW$IbV!TANv{y>JW^xSqVquPf#(E_}=tcNq2t%0NQ0*{5*BY?hH z(6k0*(nRhBo!DHvoWtGaeGRN0AO_2128IVCv?1x4lq!W-t=lUdp4Qb)3Koc?f6d)Rk?QAIcNlD zJrEN2T^$lr^AnxovAGbtT6%*FL5rn3K5_I1q-19PF+yeo!xqqE@)NfUcSNqqzg*#b z6?E$XuPqJuVFoF?pG}pa0uV^Ps?uJ$gj3pI$o;&$C#%=RKji_6!z=wLN?2EQRzOkU zkH&jCV;EPiUu{lS#Xtk z*`1xF^^U|KHuC3*Ef!u7;xXr8m$a%(r;Z%?I&+^dCvxk02IZY%BI}YWEE5j?Z#O5JcAi?BhTC*j6~^ZbgO6lFEaw=n?+vk03E#oz}4EtTG?#-_mef5vcu!v zpp~W}iQZ@u`0&9dZqP)gjXmiIjeLSs?$(JnQLQris_izN@&ik94rhfSIq2o< z-I3+c#;u9c`;#h*k=XWIqN)(B9fi<3qxF@kgGs53QdQ4?2bs=2!>zJgA2LpYl?I@} z{MIjEwkiwkw3Khj-fBYYaHZ5wO1ltUw^RJTX74z5eTwc<>=UJKk6?Ol!{e_^X-nNu zpvisl8fvleS9_X=JWSEbVz{7+z8|soXIw$~*6q)yoJ=Gxl9qWU3cX(*;^c`mnH*r<-6pr)LUae8AC$JXr$d z#j2uu;RgPe=}lD=YjO7Qi4D4mL9@8pr=g|Xxg2)h4UbiM9#SK5<<3g!r(KiyW+e=R z?||*Bo-b|#d4p`$4;yImST(A48(K!T*6T7>wff^F{f}d=jH&qZPOU_4WbM}7TO4h; z6;!F6)5yIhMr0oSTxA~Pc9Xz%faue=J??Un>KnAn(1?K)jFVk-M+0?irb{XtuCYBE zBlW56`=O}QC;yUd&&VXe{=bq{o)GV2igS+o9%rlq$}gN1>0W~eufd(_@xsm{4Z|Q- zM{e111aB=masK7($rF-)RQt@R4n7~a)o6|K4BqPgm=x9{@1awhw)`?YSwXXAvGa|_ zm^R8&4Z*59hIotq1zV-jFm2k1#Srb^KAu1IBV47gl`z^cP@>ig{Tpc5@;rXsD=5al z1v7X-a^bPqw3AT4nx*@$MMg%}S!bns6ukr!PBR6)zPXLD_tr%gS=iZix%RbtbZ!W4 z89$f?DVZ}8wxLQI>A<`8d(B!T5c5Us6vwk4k(+4A$W63gW`@3$rG!$xHs}McY>GCs)Z`ok z`w22hD6u92%jl_YYNUO9!MWj-ZdZ55trsk`(7Fl$&STEJVb>8B7|^Y@Of9=K7pAx? z9{f6Qqzfch9qPzdP~wRG*XSxYtiI}Oc-pY?KBfIc9|crt_HP=LkLiX=*r}(Fg&r?C zUkSd%BlL6h=mh;A;;aX*<*NU0>g2xDf;xW)LHZIatDB*r;V7?(RUpnmzj=MmHFyX) zTw|2{_7LT^dg-6;)*!=LMwGlg&zISw{q7KDk|VIve5p5ehW1;3f{P*+0OMZ4<^hHV zDzj)eGhIN%$cvy-8RGGCcOz6;5WZsUy18ypvzkkv-UK3*B{K z2KMZcqBcZvPir#ieJJ)_-)Ho%$PzCRYySzd{3{Qgj4F;c) zxk;sYH<#ZRc3hJ4%`P8aeAx}f=Tl~DRkn?7;f~Rjl8K-w_Y94~J>6ReeD(CS+DY?K zj+Ss+@wac5OobWX12rKYT&TK=V&w-JH-5?F!z~>0>nJtYo2z4TO!w;L1xp?|7fJF0 znGSuo&jM+^NS)77efhx!ioaKmM;)Wp!XmOU)zj=L7Pxt(;(hsc0d75s>(i`;awTLm z)GJ40)~3BPPu~*W^73;Ae&Qs7>B>pgfovvZsnZ&jX@(2974sfeYOh3S?_mL#b`o%# zgd^lNeN`6+hQ-?}F#so1>ijs3)Lel7HTh-FUD z2q{yQZ>uN(KQLI^1uIjAcNvygnBg?hm5=IuY~uVK-v?qQdakOV(;Hf}v1GLPqct3` z^D>OM(vGs#y>eOlhws8K$CCIinw0^X|35+3zrCM#{q972^8Bl@I75ER;O*6Ua7OLs zEY9LnaY@;Qb0MJ7`KC$d&qgQhsRS`R?mn*@!_&6HUnVw|GwZ%Ad9tHAQmXA-15oDv z1ZkI*hX;;J)ioZkNfho+aIb89BuBlhA22JzX)zns1K%~UPO zCPjBS^O1)KgM0!D@2-T0l!N(WO&WQUl+Ge;yBL@XK=oI(Q&WhG5Rxq&K?2vV<}U>n zJP|Ev$!=kuX6=&gSEj858_nEFMOy_=0*=KY-GBSY zzJ4+~c0%F3)L?!S`$XZjz?Ed>eV&%YBrNx*k@4wT)fO?FiIkOTTRDjCf|O{56$c=9 zaHh56g@9sYxBW86|KI~+&mbbu0z1p~h1?A$w;7K@5B70NQOHkKEQbTA0>*J?{vV=C zb?IQ%T4e!K?>acfS9h~sMmTUn*h*+T>%T=+vNP>F-8ZpKS1R~<8+cElc0Q4NAGdn| zbC4=3irXuE#J9kF_2ajiawcIN;U^&3B^>cQaJ1$MS|le?-s406(nNLm+|mo614O}j zk0qT0!6htIaLdcl^5w;p^tO;yk6!^2HZD>dxSs%ielkyy=aaqIQb=sM#3o=D_%f*K zSWeJ|RkRDkyLh_AmzRj3lStIs7#~f}t-KKua@k5RWZomS~pJra{pen~HlN`QJu9R1O^@TisAxiuaFnbCl5h8*H=6I0s0 zNzOtJ1!2~}h_G-l+PB<77i|0td#^P2BY=bTndMuqqM+G_wls47(+?(x4hXYox?<-i zKSS{Eit9gLo1MD}de-{E)IOIfVy5VYWQ$#L@OJ8Evv?4%$r34CW$V-Y%eQofv*r7L zGIHP;uc2D=p7iEXlivNae3|ak!ohPT$mt@5&m$vS3E{881GZ=9UuN(0iz#3&m}#^#%E9L;82HB#~74hJIZkH@0FwSB$(jtTHAtK{%Em^;lUr5uGH ze&KhTCh}e?OC@Xc&&l=o=XcB2Q{3-XkbFoJb>5;f zqVd4qjV-?Jfs|&N5{6Kg?6}>upYIyVcGgUc_2iHEIB@{lus;E_kNL9*{!cH|G=73P z0~^aKm(1FwcHsd(q|vLDUR|1M<>gMve=cG;L;)QX?fu-%kZov9P1Eo2RjcZECjKcS zBVWE%ZEUj!?T!NzQ^?FZSavt5;E`f<6 z&9$6!A?I7nu3ZBcfBr0~u%`?3sBW0ZxM`nYvx%Q-zvnNI{!?)aOp`5Hk&evzqyxiQ z;9-8|Xl}W^T?TJv%2JFwJ@pLG?;65e(wMe|%C8$muWVnOG*z?t{S6?>K2SlGI;AzN z3`8A_i;^9METyxIWL5DZX|Nes&h$TaA4-8Nw_~bE@zB)5U+68SMT_b1-*h6`61V&^ zc7WVAWL965POAaKN;qAqtpQnR0#@-^6su{lA0#~E%I%ujYs>r6T4zbOUj%x_RV9Sd zV*p2%;E82mU@+<(q(~HYq1Me9(#A}w?C?Ud)$)u<1z4WM%Y>;K20jVZWoM{lT0uEa zf;l>^=HiIsaw0RTx$Z?itbHFQI2LH$F6`DcRr@k>0g z{Uyd|cY%!iS9t@u*TI~1fB)_0HG0N1V3y|tl;*CAK@LG@id+8zZ|Axhzw0we4UAjb z9Opn;Pw!1|d+_@5R^14=B4P1;+3qv#@Pm{MvEjRKkL1uS+gzf2eVqF0^5!VDVs|Ta zaS^clb0PDm9jr?S^z_rth;;hujlZVKLm0HAc7X|L1u982wFt}A8$mT@674nB9LtO1 z=IXNmV!BRXNv+g<+lX+0+?F^zl;5n3n%<8rMIhp(q08(OX$Pz;X#<*$Ak8l>QIWBd6g~%EcE|k=KBi zX}RBt(TA^Kv65cH5mk>iq`dGNE7t%zOT@zjIQKg=&>ZJCS2*?FvZt9M^gcYyuIRcCAMXBt6v`fyZbU3n5%Qajth=_gZlNAwm~3 zcuo_V^Z!N{_$CbgjJD_Y;*-tZqG$}ld#LA(A!e0VeJj!rEWSB>Ubud9c)@!Jk2JJUUP_Gj zY}_7dM3Za|{q^M>9z%KV5lv+JT@6hIyMB{yf{9-=7LfyL=vJr8i~7?|N1Kf_KF9Iv zesJ7*4NjS;1qDAFv6}S}j;Xuta_-_qkk%-#&n6$vt7d2}kQyvxb&Qc8d5$mb^m%-v z3Da}#0#PdVR)9`l?6XA8{;J}mPbj@C#*3{sHCjLq_^jBYblgwfp_DhsFPcDduvRX< zd^Rcc5As#6bFqRdUXpg3i^p>A=xkS&9`8>9C5rm;PGcm8>EshfEzqs$h|>krgbw#a zcw)D9q_N@8gQC_6iZ#RfX_{#6s*)f|En_HPU;ZXi+8I6Tg{Ln&=`L9{#}C-hA88^u zWfaY7PH87)_m7q5>npfQ93fRiuIf}LIls~{Yd@U;x-u0(GsWT%DYY;=?06<~?8W-w z#WS0x*9xSvTS*2urE7fcdPY`WT;Y{EivvXk&O?uH6vZd6+Ta>mR7c^`lNB}mnhpU; zt38jXP#HaAtR!EuU@mtN-!h1ZT*ZhOdw2!DTK{Mjz8ok>u9WiHp+sdG8?^g!q_MVY-?%^KnfSSGq5Xx8tsDri z18U0_U!y07YNV4LK@9L{w)wDb>-8slnn9!~3UjPMBY-Ei(3Qy%+ZZRcz_YuJ1g?z? zU{}swWp4jHDN<>SWM1E`EZ}gndv)>aY_1A_iV)&TpafJv_ifyGX2MCK4}U6O zsOHm$Jj)ef)ki%SoTlm6kW4}LhDqhtNfn9mTYWE}nE$^Es5YDEDqo&a6hf zpucsd_la38+kr?cC(sAivVJ%@SW}tzp{(gD*r2~os=StgE6=)G`dCv4&sgcB5el@z zz#QT7)X2Ag@=@kVnxxR~lcLk&l9r}b%HYbNDSOB2DW`n9$SNsPx`CEB^HFV}g?P}o zHTOrz0lOrLz*c7(!WqGarwvDtx}bm@S#|yNAT~sbWEiOrmC)R51y7(u+h(nf*x2St z_EMjHG@Qlof_yiCiQ)RQURpDN<=XPd3Z=i2&%d+S_jj}(zMh^g^xqsBvDAJS_-%M< zX9lwa<+BSG4?aDD*h6tibM%>7Y?rb*y7l;9n6F#vdl9W9%4}Z)MKm3Fg~rO|q+^Pw z)nv1HM~njceNWU}%#NAKBjUpoRT*s1E@i!SckdAqXOED&j@{w{qX~f+hs`uif@Qyc z=4t|zvK;nEsL|Hfr}JQG==O~H@QV;WHAN6kNmEakPvxI!{nee`tSaG1QfQ7<#i7fx zmk-_)vxx)ufkk3M&tDo;r%Q`Kxo{WipAGVw73ymM3^#Sr$ixDgJgV```zSVg?;fy} zCq9MPSULuPDpLMr*+vS4{2%T6SIY8t5*GgH1yCp2Sx|%(&D96RHL8-J@R?q9+2W?) zU~lB>hSQ+atjq82sI2;3RO3N2gXP+UC7wY)*2)b_;H}!1Z#h1+G-7e}I;f=Vqdhox zaC>P1Lfv`teZ8i{Xi@uTAhuWx-@dHkk2QIha$FW5a9xeyGoIQR;I{aq_Z>|<gQu1eFLO*XJ;kp6wsfE@$Yx@Dlx0q7dfS@xm1oz|=^oyon_8v82FK#5hT;n7^z zZ>|er8WSe-KW$D5LxC?;N$7>4ABS50=TgZNa`0%~w1&8QD~HE*Uj9@|zoN|+*#?6` zm(1jbtqJj3=os0fJ9Ue(uAT@|snG4zzZsL{T-)E*pi`t4Logi<549 zd|6#Jn{7V_!jH{mzg`Tk<`|0IBy^;_yu4=kd~a{*t3C$|YW&z+x67ZA9pO0QO$4p%~4JH(nO%DhL3B z_*=66v&)=p2$DP4?HetrwMufd>TTO7JMr#QSu`~Y>Vid|cKuxEq~tOauk~>1U?#uy zZw)llqIg5l!&vscd||mhm%|nZZNhb6%(z4|7$9^dHibi!1K))o^JgTow7+G#g>WA= zlGJtU4=86rjNs3l)qV=(tceHZGJgpY|4|VC{LiPn7r{uu$2EKvQ}1PY`Oi00wmp3a z(qzVBE3FsY+tXTNA{R`?@4Y(09q`hBc-GP+`~?1QgZl>I`MFuwm##G za4#XvuS60cv1(8xou3cuYYRk`M49z7FIhAniRTUJKbEr-*P<1*$b)t$(-RFl@BWa5 zcf^?GP^R&c$mhz0BYUwGs7>Yi4V-p+Yg41E$_Br+J0q63)kaSvJa*!9j6Cx8xd z+lY{(>n)m5-qn!vtBwsfv);NfTPH~A$i&jjH6H_*OVP4!bz2%8kK0b{U#5e`G-nJx zqK!|vI+o2bhj^o|jPVp&(@_X|*B5SOL89#2k10S|d(lKoI0m5pHUIo(st;dMD1G*$ zpmUCu{qYILLJg%S^S@sBoJ2K+I1-Or!n*Ve3{vIv0V!n)=|fnsC-IosxiF-|T(N#6 zR*Go&iPtg78YC?faPZ)M!%+6U*<}k85LS2ZHK@uTt?&GGXB!o=Ksg8C{nV^I_SXlx zV|B1cD=7;5iZD5DS$@~cS9$3iwwl+tOO(Kn=9v!H7;n%)R0<30A4*`+9#eerDv#xy zxsJrm=o_jt@txQm|CktC&0D*@{%{XzDC#B|d;dJ~q$&awGg0Pi_&;ym|JgwL*NbDP z<$y3TY@o$yq&+xENFVmdTITQ>_98PCb7OlseLj4;ad}Rv3YaNfB{Yn2F5Hi17QOJ# zY~=`LtH)?Z(87Bnrw#OS**SJMV4-Bx1K&8mLN8t;$K5O)6I0yOX``K!4Hd)@_?Iq+ zd36t@^KK}ftWMYc!SPXIo?!)TTKdygU4`$;`6IybOqx^snb`z>)38ub z%J<}1gF73H6w%~Lv$g~6S8^^4QeNCxu=|1KPxhyPJz5#kg;hWznJkJpZ8oat+THGg zQS$bEy^$?GI+XPOUmuu)-I=TdYVSzrY~H1Uu(*coHI94O#7Y>OP`pmYE0cKb|-@bWx)CvN1GuWp46pttSJBvzL&vMrRyx zS)s2_P80~?HMYQXm`haUkS8aEwj5(0t6VFveM0$?rUx3(H7)y^`3TRpL~+?{X~I`& zBR}mhue;pb-}@lhIg}dfRhMT_(Y(~Hq3>P{QkMMYI@h7_ zNwUsi&V1O<&7c0!UDn9~bO3xpf8}`8uYWH6gA>*m(qv)DE1EwiJ?NoA7=0xX`DbqW zA77~E9&7;@-89Zv+%_G~*GvB(1cr)SUVRFC(|W>e@uUC3Fjj78*k{C|uxKn^ps*%S)7&EE^R563$TxNt7(+w{!zRyXcC5DF2@QIpIJMER3 z0~*^>@A-oU7X$dg9-j6#;(Z%0^XbMU7HOy~ivPPTrptW;=J-K*pYiY2>3U-uUR{zZ zI2+V#Ue=i3Pv$q@ZjbU;RWPuOs``LVi#P%rrYnzc#NqlYlJmKWPCf@p>*;?ftquC9 z|9c+^|H#u!!SyU!5P#)av$AEcwR=W)a-elRi-S4{Lg%vprte{UqVq% z6Bik01x`TE_q2#)7HxdeQ*H1Re@|EKFCJ}Z$KQX+b~?Me!k)XcHWtNy__64V>2sOj z-zCL0+&5$Ep)9Y#~ zgt}Wye}&0^2B7Ml{qPL8u-Y}ybKd;;t>G$dcw?>dr(^o}fvcnWhxCst=Vg87KU{4! zDq7j-3jUbsL6mnHuJL+AXI0}_RJ*m*VPX0p?r$F2c{-}pMu&GPPKsXBkHxglqZ~&g zPC>W4vbtm8Z9YugURHt{@>60NwJ+{AFgrAs~U3coT9D* zdP@Ff%BU{?8ErmjoC%CU?|x5l3Dj$y3xlL%x#i~!CM0H&x6(Dl)2@4f-=9qZUktB_wi@8A$s+d|dCil52#}{qkllmQqfA4kW^dy#Ng?8(UG(Du=?2dP5a4te>mptM)m;z~LPQHbP@lC($ zWX+BP{dEj=L#YTa3cy%CTfBxvuPJE7H(hYF<#rO&1ry~B2grII)Btia2TIZOsW^}X zM{qff^Aas<(O}UMtT0=nD~w_LBV^n42iIVYn+Nk6-gvz6+&OgwbeiU*e-4|bDZMbA z={*~3_s%#q>%yE4Xf_!IEIwTWY%|f96da#uAiLA30YE(hwBRLwhK{Nc)L*(F&jrrp zbhGT4e2&G+)<7noz{#WWJ{SfQ|5pFei)OoQ5#Em%Il>2b)?9ZZM z!-uh1tEE?4nb4kK2fx;&kvcHnm%ZcJqqep+EYq*D)W%1{{Wj`vZ1nJVHX2642j~Dc zY7VX>MY}lfRo@6o#p45cl9LCxOC^hY#UGqFw!#2p+ttmO?LjJ77Cd5M$4EUrdZsp; zmcpg*%{+3Sn`U6iam;RZ{pEcX=wiCo`jG>_7YLp{Wy5%i@pi_Q;~G+I;STamEVrec z?ngu#ooB;|gcqC6=!>G<3I&BSP?0wM^kek=4(-R>c%0<#8Zn;YNN9`B`Qw)@59)Td z-nUuYyeF|5p1tWcP(uFp#beEK*4%%=zjo6*ds@E1w-LUab(8zx`D3=TrG#n)p}6N& zS_si)_FFdszi3%Zu@B_1R)uYgr3)Jb*2oM5Z|*k`9r~b;!X*&O*CaVz;sg8dz8ci$ zhDizVH3EOWolcRKQuJn9J~yu!#=s(f!S@cF)G(sq##$0I;=mZ$g%Dgj5x?;jq7#YhuSf+Uf~`P=XX`DjT*dSIR( zrj4Tl?aEI0+vx-yTLeWM?9ObIag_t;+`ouM$WI63Obf4Q*Hq%=}{s33ll0 z?DXpE((U+9TMmr~_l|}m3P~SYZ;nwKPjU?8JmZ56-JDF23*)6HFl~A@u140Os4MK5 zJx=s(lR6}0H5X(vi@cNygNr3Y|5DX&h>zkGfj5RhUcE6uTEDZ0`#|9Y?U+GE zAdAN8vjVy@-iw|g$|N)L;hu(@9cJ^W!#y5XMKxg=FmtkpYswB*4vUAiOfP*9Y;kZQ zFzC)9e6unv`pQ(L^wSM;HwIh|daW?SY=hL=MWV2T?ZVjr6q`EcSWO(9w1Z-k+U|e+ zwp1Q7x?zwxbO+I67SUpfQ&6WvlJ1_!Pt>B_G`KyMF9sV*m|<>aMKAC+e}s!9YK@cO z&V4b-&9Jv-9Uz#~TUzA%Xr#Q}J9Wj8T33`76eMT0GJRMz5WLuQ933%Rgx)v3;K>|C zj?aTgp|F^9Zy!_Nv|tEP_+}U`(a)16+@t(4{);n8y&3Fl$U9w8)`X>5mG%csBE(YBdA9)3XKExF3f~-5qGtNM6sZ_}o^)ZDdEhf5x7!+92`m)0xrM?9+f}NNZ%wuMmT)*lHf~3` ze39^IqsXjPe9#&+E9MWMI8KuAGwvuw$9MiH6Fy**Uw1h?Ch)JA{aY00$b^97H%TW0{x~oJ$=OpG9>nXPWXaOI zX|l;5wsPX>JrCGRx;PU~`$PVSF~FEQC$;{p?P%S2)B&G)S&m(H~p;(eEUlrMY+J5a@UizP<+jv=z7%oY!3OuJ<9TDFTOiZv$dKB}8~BFM#R z?aEP-l-6*@&3V(@sdd4sVKVK+t_YEB?TbiDp^RF2qZ5ZgDJ;mOK4>r-isp{E+O9=} z6(Z17TGM=Pw}(2-m;XHW6wG!R;UM47rH?PLL#n;IoGvWgq{7&$F%~iNa)9gAv?}0Ve)p;NW!E!GiuvKJDJT}r032%QtL~}V#%864T)m?OjuNn?!K;LPrM{i z_FW}*^%|QVqKmT7Q`s>iSzR+G&sSnYk~tSrngX6@clr)n@Cw&C@D3*Ct!ZQ17)=W$ z+h(P;k;mxJQuMBNtoHY$C~^VHx5sO;ehGPW2&BKRWRvnd9Z=A^589>9DI@21v)7vhQh+@k z&TeyU&g0vstDrVO4})s<{=sx4!dKFbY#DQ&M$3brI#KdYY2H_}-^4gu$shJHQILoZ z8oc*W5Fwi;>qAsm9*Y^TFaIb9S>V3QX^ZkykaO_@f??GDIQ$bTRk4Ox3hUCZ+cvR=0P&{AeONB zgT#YmLf5p*gG5^s1;w};IZ6xL4V9D_1 zeK{(sm)u~b?iCSdVvxif+6iz8vzqCv5|G%DpF+I4OCC;TzWdM<5Fe*M?{)`nO9#T0 zu>zDi-U%M&@1OBqg?+)z?g|3%Q#{=vub~)l2S+B7FAsmdF#%S{r|!BE@$#$S#pMaW zu!6v0hPS%QS|MF3yIX{U2iO0K!N010$Q2q6-|@r=HE!z7>QA(;?{zXD{N3R@m1)wj z@eMbBU_f8*WCZe|G-w>j$O8pj(Y-QnuN-4T${ z{0t{5IoZ%k1kunic4zWIPXp^5wj;(1W~0TNI9xHxvHWV#k3b2tehkVwE&on=(#>%tLC23AIb*8Enad`%2uJGGp4?x1D7X4_;5L3!VYf<{eoo6Ekiu!cR5L$oK^t6SZ zMQ=@XL8^0>Ehq#R!EXV7S4~&HN7Z!!kS+V<<)eQINjYS>Ng<~|SaSlKz3tqg^o?w> z97`sH)%pp0*;<${37cgkR=?7_b6EB?1a1FS%b6Kpfty=*pvFz=5ye0D(X90oCN}45 zU423SuwiYR?4sE%{Nw-3xg7F^fXCH6dv!U}LP;`BgoBVgor*dC?bQ@(k9elt>L50C zf-^EEw{9|0YNmu4>-gjlG|P}8rd8d8|jvz00Es1V~EN07XMCWGW?hV*V7 zuA@U*#=ESZ+CX~ESHwwM9g(s+>AY&tp%5_&X83K45F-n@3go3#XU1T|7o|uWCMC1m z5WZ_I9kbh3x9L*QoSV|3duA~we^O#+TK-aT7rjTJeEt{dT(&6hg8`39GH18J4{9;P@!3_ZZ_ftWSuJbfV77H-Wk3mdemM3T zGhC*%W{JbPF;Ptv+*rKF#;P%k0;yX@)ArOLt>}(Kli_AiDj`{Z0m!=1AnRIJju6gPQ>HXMUJ$;ymEpg74r+D94=QAr!QtpART-09 zuZ9aUwac9`7371p816T_xJFEoM9d65C-<-Z*%G!RN6MN70lAy)8#IlkQOB(W{^2#$ zCaBO5G~pCr-)J7I(D8ULy~St%l-Y9YnlF8!`X8+-JBt+V&A_E6*>}Xi)lr>X;#=pX z2cLDV%;J=()9xe~d_n-h|1UlS>MFRZSO%IqDSMzY853YP2+s~Y%rN--5$-!cs;_h( zjTE3ee?H`A5Zj+>`*?fq%yaK+m(mnbxz)Xs;!zrbcB{`ge1qZYZkmnV3i`G|AS)S=<^0-)dFasO&_~s$nCZW;jHKyOC~92@*xB@SJBpi$_bQSz+VO zD7C`D=N-1(DoT&Wo}a}}er7oc5LQptO~Y6O<_eyaOO3fVS87HF$od@!CYDX9b4yPv zgqLl0P1=l;y{==|xSOBN;jnQ%6B`B@Fl0>(yfBaq5%svqh3=UIkd$ae<0)g2rKnX2 zL<3Bv=RG#gU~?l;&ECYHs1~CF2>KdjuLAcdIJn;mO%tw_Sc70EOIuA&V%&gcs z1WJjQ(mKS|7`p?oA_dX?zW=crBV2D;FiN@|9a}wQ&kf7yQE>IxVxvZZ;?B0@B-zq- zmI5E*s7+}&8{JlxQs)#UiH##|BrHLMi~SazO$2|j+KS)Z6Y^bYYIB>zEKY!u7unHn zYTOPr8_o72$ThNuIF}7!Yb8dC$!!YeQ|7B=Yx8hZg*lz%!-Gykk(d#pU_gy0=6fBQtt1`qMsX6iwg7+ zF(}26bKZ#;cg=qYUkYY~uWiRXZ)&(4CrJ8}pOhj67r&j-8mH%6*yA5_={?GqE7vz8 z1S%rFP$L`%TG~j?BpJp6h$smz#`DLuDVUAfyz@^{JR5DcRx8mjSA4~=m2HN8ASY4< zfuID^zZZqFKH}9dncAon;-Jz#)Aw=Ok1Z6WunN4yxH6J%(8Is-aKT(HX1OA;Iz>SX zAir;Rm-F0m%`^dn@@LS60FQ%jgut1INgGea1!M*qLVxjwyj>U4#CsO&s&>q=X77G$UPMEFfF5!%d}3_!?tP z#CHuC;Znh=+>0$o!IO=xz{l1e*bv> zQ@->(7w?47h}XST9OpeLlxpEU_~yzyj%+y*!51e8MkDS+yu#GQ6UC0+4i07ikn^Q# zBnS~)S+()D{<8NM)6Ke#{rRj_8$O?3q|Pty0)O0_4M1RYS^qW$PELQ4v-9&8|N1ng66c_lT9oKuN?*Lg9pCdpL^aYSY-7)N45!Ch>Y^3^aT;rRTt!Cv0f`?xKyJ(r?_8S7of2x zrsB>-v028sBap77SU|O>_qyTJC zZaMh9Ys-A$on>no;Vi)`9ISIVwa(|0PjkJx$MX(Ln|RxNfn{R%B0fTOG6N@i$$p5e z>3_8{zDg;GeOsez5oFf#BK~keV%nquBLcugk1B$4g&fN3LKX;5n~?``9!XVG_OY#HyLr@&P6wsN;zy*F-$pEE!I`6Ql^Iv0%5tk^qSC4A2Jb zQr9*Sz1(1(Zl58zGf62$rZr?ra)dZ27cct7o{gSfrzV#y!)#9xFwftqFUkU1sn(&U z_?>RNwgss)-Ff`P{)5c)uR!y$1DwTWPSf;VM_xS(DO_YQ2cak z_DG=SzXLXhF9fbQ#`IYB}`&bj0H{E%EFrd3_-;n_CRKK?~KmsIh4z+o4_gAA)!X%g6I^+=k z(cAc-&mwI8pZ1y0^Z0oLt?4>p!*160lx`?wzcz-b5V{4qY+5P)eu0z>xJCsiJG1*c zc0~X_YS@a9)l6t+C17Het_LxqCmgcVKA4h(6;6$>?e4?LJh?Av$#k zCOD`#A;f2Fj>q5*$6DOZR=yM4Q>J>mLxyZNhj?zWsN_G3I_w{3Mox5A)(leXxCp+B zrS29oDBi=C2tSVTyZ1eB5$ViI4ixj6p*b;Mnl#2aRvuvWOPjc?Pf;V6+k; zoVM^gT>7Z`vV?-A+6`nY3*p=JgP}2Ux0RGt?wDGK5X8kR6`Rz4aB=4Qq4;;2L>|?< z`$DFGbU9JkcO$(fd@`23mCN7R_iHG3-5RF)iunjP`BjT_J5b(Ralr&`Hm_kZDE?CcWDmyr6 zV_ZN52HnfXrB_MI-bd_ggJ(tTvd_E-Bwck}Il_4xYD*#4cmD`IDMrv6Sp(s|&rX2x zC&C3u;k%+Kdug`*l37)Ra>u03vSnE3#Tk#~#pzTcW!it))Iu?CkKFVA7jjRX7!a)5 zqg8@LhxjyjT_Fa*IT2!;D_4OSCUUgXefCloY?C+|Gf6JBQQubg#;z@F4bVGGb{Kcj z^lX{YCdb0|pzj{?c)ER&OQW20D->e%ZxhZmi-49WYU8r*NNravggfRDCZBedoo?3J zNQI}>oo#$R!O;xsP-q)a%K8n~!Mt3*o(LGOCwdb4=@HG$0Ad+Jg>7+2~2OgZ-7NZG{)A?zQ&hD|gv z=yL>QfKjaPkC%uck3SZLNr8cIVRnNE!1SL&6Y;*jhPv)uZOF(UR+?*e)6C?^^ ze66FXQ7u_OO{Qn5QI#42gt{pp*L3K;?q*xncd0#aT&aU)Pil{n>_9T3%DM=9(I~Nj zV!}qr2MdSTqmk07cm@2xYRP>OKvXvp?&Qi1QKG4BxHUHQF311J96N*vPf}xsV_UFm zyc0cMA^4 z1vgIL4C_;j0&o{GBbgTup2XObLTe4ykpSE_jI#C00r;RC#^`TV-oKO4mui?groJp? zHfGUs$#n!=Vk5i|tnzJjvk;*toY@?%88r5&bVS=_X-d1eJIvg9OG^P7mvK0HsnCu~ znxvc}oIqgm_YTHImSXGijngr=qaTuA`@U#-IilgO@x(X9O03Tn0T4vp$aI*58PHHY zt}+Ir(s8tER*Zee)t3!o$%pvZ*@~j zp|$culiyXNGj)F)ngCr$AxJ)wm37@%M_6r|`Hr!Vys}FV{jZtyuddNWWk}kD4XvLW zjRA`KkN)qPt|?aAd6nh^VRi65c4cI>x+i@Z1=LM}|B?VHvItlQP{G%>#XylDt=%!t zYZN3-ub;K?azir1D-XkC`DO&n_`avTh)E}@Mq@QQo0LQiQ3UdYG#7$4?HTg`d`x7e z@bC}yvH;4VKt;`(NK=^C`(l+h?HIe@DtJ!%tl>DV@T`W?$dl(ZMmRUro{t! z`8u2xYJ=OezO8-&CM=xAvp>vIS)nKcz4t~xY2|1RO7ZVbf?_FlGWv)?!!rz~I|Gnd zxz(m|F8%9u|d?m+qyluu&UesdOIkbUzG1yZtQ==tSBtSEwGWmZpMR=H?%d zb;X1h{K&QE?Y-X|6G-aQSE=Xl=#*gS;_N-PxzHHAkqZ7<>VzwM6u-;@VVT?^R_k2s zk*WS~IQoxwya3T>AyQ_De)wsu>^Vt_-7|UH#^0oM`E!FRsGYsRJukiNh@=sk?%dcnXx`UaqkL-de2BUDo1Y!c%OQjzF*B zy35wH>Q@aym9)^a+Mv{mfKqG6GFS9H`Z{MMD;1$4S6ENPVX*o0IM4=?r)cehk5t({ zR_H$zCx=Bi<^_s+LNuH*_AUd9rz~6I)*0E_Ez1vcSsl!dU{FYdLTCOkhs15~Da z@Y`_DL@(oP1xY5a)crjHiy1FTPu$P_+-t2+b$1_m2x*l{2Nzx^8uTg8 z49+w$l`_Y?3sJM_@QmKcFn?2n+miWi&u=+7gdx-P(H@w4*%X)dp;^lWX*VyD;q79EG}FscM~P)#-F)~^zJWZ$QI{DHQ2g|Z27&RR^@6tQ|!SS7T})^bp>v% z6ojczn&>WcX7<-N*sHf@v^4iHQrktE71*PBwSj4MKntAox#gp7_sWzHwD7L^{s z1G2aXGjn_0(WGg*-=V@Xq*7!RFrc#6+RBK;wj+7rL2o8XXovQ+K0cYCS(7EMtzKm2 zG<#xx;sxj^<|Xx#&Ju-m(Uxs@DZJ$5038x&n9l4;6XSwb`i$T)+m^SPTQpxt$s48x z)hBxF&QqBcepEBncX}MPN4e^9yXcAC%4cvoFl5mk%NEAtF%zkH^{%hRW~*q0Sx3t1 zcP5d*NbJ&|@fC}>J0H_aE*!WJDo}N6fAo{#TVnnf4)sQf=04OoEV1W7r%Tsw>g0uv zr3}%(zpS#fqFq5If>}b73NvCVMHRy@>u*6fY1yohT02b@!YV4;)I!vyQyzwdQ<)XV zrmWm#m-un8DWHPR#ir%48p$sp3N>Fb`=>`nkwDdPgVgStWuZ!ABdoW1*u{_e(W73s zMc6|Lyp12WI%s-~3h9-PF;+QggJt!3$7z2O?g{gvzGad;Uhs=PpRblEzqk0|-O2y{@P$vr2vLyKceY?5_R6=qJX!w{6fhZA+Pq*kf`ICfHX)O_coCE_vS zZ9cXZ$eWUx2R&TQ*hGCG$qdlZXM2N9N;~ULRKM({cKw~x&*MpJ`H1^hS9em@1%piO zaViyh^?a@+b737#dTWfZDBfl_6=vP_A4fk?jtX(CvR04prH=CWmU!-fosnw~*?7m4 zJyN#vw(7>Hc!^(?Dce4;a5&-=*(p|w2d03o7Jdt(c{@=n1rmh5#N=-C83E=;iEnK7 z8wxzsvpUFyoBZ_~mcaRi!|bPP93<)bQw zp^;;E;fckVZSaJB*TZZ;3K1rn@-naiVtwUYVCJ(6o?X1FBhp~>5F0=>6LZH z-|{G#SRF&Mowe1Iohe-(57}NknRFo}Gvpx`;#_A;W_+D8FFyf;m@m9x@0ItqG&fwH zM*fV2yL$=;s07YX)ckIHgfSFhX>zAnT&@J%muHk;g5j8C!lvM8*7Nhb4-C;@U_od) zY>65)Thbmay2MOR!0Lr)-w5G}ncD0uTFbf{dVPxvgNquLaF$izn%r<5?t3ZCA%^n+ z*Kj%?&+n~a@sY|lS1h8*PvlSME$rcv+2t=2yal5hjm4JEoP2OIT8kC*NPQtq+7@ns zWcnP1h=jzVXJiflm9=+#ai!HV*>}u$5P+F`+uShmnb9M@sCLSFu3Qk6G$!8fpnv$) z0Jssxbfkq+q)|I|1MNCNn5KaSK+X9e--tb3(Do4!)`MwYrNAe6ga}F%r1J(~L1bkR zP$qb(q)Ugyr}eR^``0L~03I*-D99Qk3^FY8yApUfWd`KM^fO}WEOymMjVK@9T~gU; zZ^m;OJ!uWl97}VrC5__2gb>ewieu&?P!v?yDrkT;7S$3t4=W>y?PQ3C$06VmRR?cO z2c0$1ZY{g!SVhD50FZm5_4o(_P}c?C3?_?v&VXGlFsdy)s2EjADq|D-y1@8=r8sm! zAH)d-vn~cRPY@VYIoVgN2Ut(nO}26HMX3vt2|EJws5*F)Xc4~O*-K&{8Un_!i_SDc z^Jh7$I##v2T;DlcF%@wcC0SwX?q2KN_H|F~0*_5HJID_ax-ff+#lJ}cLQYVVX12~5 z9;1-(BljKidT0^GB%_pt(nU~(*tJG|8N-8VMVOGOvkL5#xV&$1}}SPl$#bKN?Ai06Mg{7f7s^S-lBv5` z@3WL9_v{|OQsn5J=QF+QZ4E*lOFV+=9ooJ54C7?=KM;~xo_@GNal!HuP zAb8eYAYtqoXF@XhIK92%UGZ8ptIAe4H=881v$^_R>dv-T`wLEjCDkU7B=Bc0H1OOl zI4K(nhIK5uSu05@9wv29dtHHRR3Z@*)RhR&Q>F7+;B>1oK;ZfthU32MJRK(; zl5>T*B2Hb5#iV;h1Kq9P^sf4H7qpYFNm<0T@(7B)*#kUrlH{B2T7g6m*lH^V)l1%< zII+$8(hIeYkKs^&xFmYbABs3-Zf~kkU?q9A2%r%mLUAH=Fp~XMI`sn8g`y!?Wdz9g z_8dXFZ!fjHfp~+G=Ig=bNOF|8Ph@(Z4w%4oUJN3S?k4hZnrko0njP>9yrS8j3(E;c zs=`FEYWBlQx?V2UyaEMnu-b8lgmdJ?_rgC{7}%`3h=V-{_P?b_w@Du@IALz>Ic zUv0UmD?P5b)n*VQJnB=nM4lb9;%sw8&O=ZQd{V7bMC@gL(q*)4ajvXL6y$@qL0`bS zGk7z>;xWj2u+ApT-~97=h5ZhlTtW4mr3SIhBflP@bJZ{ZO*6Yu;dya507pRPta{Q$ z0tOUa#HgwFrsb!$8rn3oY%K$tg;lmJzh#7AtCGqA6*O?qeuc;`#h%Ia0rsP`*+9Lb zSsx`%=kj4QEZ@Qm)hKhthRhi7bkWiz!{~PQe&?-eV0W4>+I@gpDC7F8T~8z~&Z^hG zPW|#L@%w{^vWI>>!h@Jk|I^dWrgh*BSp}&bIplP-xqw{+e6uB3WRW=W00g5~Zp}aO zuzv-bT`^_;OtUf(t0K`5lN4c_Z(A8%e6vW^>>k3V{z1;i&x~(pFF)yxbTvwNUbuK2 z-CfNon3W_hC43t!cGtthw!50;{fcQ`8BHAp(**VE`7E5()yX|)1@Z2Z&R_(lt z09me`ZB?7LS(o~#{Ra4@;MgR878lY20Ym>QgwM6Z->Su?n>;0JJi2m1g(lXf1nt#% z)X>;Em*)LGWFz&si(tCY`7lHqOJ_kh;b>4F^h;JdXX{T7ZW;3YlY@7zvC& zG@pI1>+v?e0Av{%ur{EzeU6v(mS&1TDYQbf=PJf#-K{b-D+Vfi(Hzpx%FkVwhJ*`F za<+qEp^f29pVo`8>I5&4>_<$btD7++_3$ensyr$>hRb~yS?YV!k=i4`*)RIZrL8q^ zd{~9=OrMUJs;`siO^gcYQQJGwV-2YEDvi6^mS@Kw>I8bd+;Ijw2kP?jX_ZJVR&(|A z@GY8kgK5d=EUU&0$LKc3C15Y0WnwumQuSn5OYiDDNo z01W1?k*5o3FX0j0rlxRUmR_fJusuVcRwaVV?iaGxKksTVGJWP(Tx&w4L8pOiUl&xV#*K|W zRPVdisnV;G;nn0trNdgbUD>~A%ukC132-H!ofx{Eg+sgykT(oMgr)|4$r+Cp4geFm-GNSDWB5M}3{{n~Ahd z(nd#|LW&2cv`DVYSMOHuc_CJ*(x?B>EKs`Yx$3^id85F>*a4o-qT6VO=;1a=mIN(e zGxMlXvC_Aa93=ySs;{;K?Jz9JoN%p5UIe`aUn#DbS+p+oFdD7wCHAvJx1aQ2XO<>Z zKFf=3T1)d8m-8P5C3KbYc8!A~?kMIwpRUli#}d5i9D(=wvWDF%Zz1428SP5M_U_BC zrfv5q^N;DSB+kEx7oe*{`aP_n^;WG8DZ*Kgwo-K{pgPr*JL^&l^J}#7Q;XyIPFSA_ zw!?6gO{$oxYWo+_+IiK}Wya{v0OUh-amubU{8q~Gl9mqN3|waji@m4OiZ0Cw6Cu-D zVc7{t38-yYDZ_8Jji5*k-B}Ss6<18X^9BjqUWtolOc7?R0#$pv4vm8R1U>8m@!d*~ z*SHlaiI(W*IF?>5HyRn1*n4>+ePL9}4sYc3yZk}Jgbh78pb;{@kB%WF-!!2pDlxg^ zO!{~@_Cu=0*mQ-8DcoveSh%+%sb*hq)MURf&($w&Hef&`;FDlY1*CZ$&%`kK1j#I}}PnXb;vx2H1d z59JYUdT=)R0!BCMT6?d;L)V(J@Xc zt^F)8KL45=S> z?(DjUwf8W4B7EP}Flkb%;Qrne{x_5$N}>zO($TrZJk$s~wIPe*wLqFv%m=n;NhV7$?dS2N&nF6Uszq4IXrym$khtFjg+XN2}Vg)%B>- zA_#9gy!iZl>^tUk=6+|GJC$0SIl$wzRYW6ovc--aPC?_L8ox#&;dbrIL`t@U&3m%I zlW98#EmP7x?~C-^Q23^m?WgvpSFKC11A5vM&SZX*{+S>Ws=saUk@S-A@CVcTLQOX$ z9+Z?w1^%}G{Fq7`pxWxK4-@V< zF8}DR&;oRrV!3B?6~vHcyJ;~;f!g@vGNFCf@@r{J(3PD!3ZjcpSuq}UnOD3cB zr{1;B4=918(9U2RobXOk*WOi1bLm?A){5FWp81{Wy-%x?tSk*VWEQdWO=zzsXX}Ce z6X`l4y~?laLt|$u`r0lUiO%fV-;4edGp^q_m_Nl8(2zxHklkym5F@!*WX;f}{}Eb5 z7Y9{QELW6Lj(icgk?;*ESSm6Z;*%$NM1~*AvVhua%K0Xb$KsgmqIqt-f}`B5U3UHu zSAasGQ)TDCY|gXSeOAxhkpKFhTE;0mC9XGmE>3c|%AGlNC+8ZWFY3e8h<7&an^9yd zZDi>nX(QUmy3DTS*+NBNr}tw7x77H5{j4e0?#fk|-ZCCzT5*<=Rm6*J^Sf_PWeV*8 zv9sQvf(4CN<~R$yxJmZ@=5*JD6r`I!uN?8fK9&8yy?>~2_X8MXyhV@0{Jp&O!Y(?sgpGiA6Y{{5mLZ?&PX;4T=biKz~fXLolH~D9Xr@E z*U9INtfUXfvea8R*K25G=EoD->CrWF;G|0qBIdg!(DD3p2E*th=9m{9pFM>pdz7+m zr!!k;Ge^qri<;*$T!`5eVspuQake#kg2!gpup~!KMk`PDmGX?iS#YYesZcg#^LmE} z)5Tu-GE&6Oj;wmVCOY3O>)mN_!JDCkq1iyA6&68R^_admF-u?MSj*!$TCm{fm&DsD zB?`L+q|wJzFTmy#%Fxc|i*ksaUD9*aCYDUT)t~v^Yv<47b0)qft<|lqw;n0&(Wo-s zR+nrj*wZZAy&_w<4@da4_ivv=DvQR!EXZYE2y@hLk?}U{O$=xphZ-jL8QHhf80)L| z-`AWGz1{C=(AkX$V(39+>)kosDwR#(=-eDT$tl^G+3L`9B}hdTT~fQf#O$tzBav^A z9kQ-am1gW9sm(bH)F@x;sJ`Qy(R=LnFXv#mdMM!_0{sQiA!M^p&&o?ehW*ahk<)Z1 zrZjU{uV7o$M|l=|H73(c0aAH)+8OdVar0@RYeqV00(Xgo<7W ztcK_azDq)FTbG&7=NvnozNNRfw?1YRmg)IA4OHKk_11K~k&SjNU4%W<;_YE(*h}2J zkzs$AHkoFr+%7spUkNUnsoyf_8#Kx%ws~O2p;x^&xN~rG<6v4KU1Wt5YOq~xS4mz% z(|p!=r`==hyuVX?v3j%fovzQ?ZnDN529ya;Tnj8 z8u+rOOH5rM-jWWWul;>(aEG1bs@B8;D4$tW|XZ8nxMqivz+c^2VlK; zXC{NhoGOom6&V9Yg$Ow0n!<;EId_PB`HCIBk_ByJG*}pt94#=+pBjEonH4uTj#{*M z(a=sm`3`GL?|e-6z|bNwx6|Q~atYGr&mb&FzGd?Gulbql_lPpO4eza9IH@%rJQd7z?Xr@_0Q zYqxJPJnkX+H;&N%?uFyxU{wa|h49S;guKK}u&%ttUN^TLaoP?ZAUd2iHLZu{y2P^k zno_wg=Iv^@Oz*Y~h5vfK|7yzqcQ3TgOkBT#$oXH$!Bz|g6#ShpGz+U^EU_i``Gg(& z(I{1ff!S4{?|2*W>FmYXY}qUCN-bip9Bs~E>lbHSw=4D+wbtMBQ77eLMb**000Mz1C9Bl#i^kMRF$@dHMwafvBOgcJ@MY@FUq2G$01{+&tF+1h z>1VmGJkpyw*g9%}#(PznME2Jx81|5y_bEH%86YCaED^JVfm*F!MdM=-_?Y%f4BDPRMGMdZ)#CJ`LgAP#N~j zq`9H{Vk}(&RvOa{wONW;Bc8F(k**4Qad9k6EZ}9BTbRatj}rSJ1iB(XbMNARXZ-1@ zOgxwi@PU^N#|%h#CM~Swm^X)8+ugCQgjwo3Z&Ei ze@~+LZ%pI+HApIgTWUl$5u_8+k37Er9NECA3zdqIIzla^M|5cRu8zrGSP zyIp-zVJNIzQ)agi{rM>{E;wg`DAq~TCKtbaN@%V9#$**>l3gcw^wBa28?&oZZl(T?r1)~D;U9Mn|Hk-*VU|~sFh7#NnDR2K%|N`^S!8S%g|)lkTPqAn^brxX9Oy==#24b zW7gUJ6x8_i|I~p02e3mG3t$HxCD}g06O{Zt;h}}c#HK$XDmO1=XP{~=Gaxg)pUH}* z`r8epcF23!UbPq|e)0216G3J9*AxWygkSh_iquw4pg*ay5Lsa{^6Hq#TiV+>o9>o= zf%E2=2Z56)L<%dcwN+mo$9HQYssACh|NWz?cplzX@oj_Yk(0MEj2^-XOM5^4O3cQ@ zusBsv1UYKV^ur!07LlI$iN;uNz9;CLvxAVrM!IF4(&pM27H-sMHB+RGeckrcUwxaw z-a5NVO;T{_ICD(qWJ@|i3!!EwfhWqahm9%NbOLR(rx97?#9&ln=?O3wS;2>iRI^Y| zvLY)GQ=9vZln^P?^CifpuavHGtiAi^JK%0zC76YV@hX~41a#XF1=Z_mE7-%vBjikPgFtK^PR5(?sNXuJ&!fz3nAe0xf(H0ukv}UTDff^`@TCG}0N{d~ou|G5eDPk2l9_IIaLEsl_#+(h7(;|A+au}<9oXkGLd z=}w8f&arx{=&`@}_22JXfBs1E5+Lan&Bi}m`uU!Mln6On0JSvcv~BwNS$b*2d5FOb zN~+0({-x*BAbEK6eJJj63#Om)lONgZ-+8{lZm6qv(bE4bAHVy6Tq5v4)oLQ~|3X2G z%x~N&h_Jl3tv8(ir&v3#m2Eix3Uu9Y{(k~u{0}z0;rtt!eSjYXm#eNwrrRwxUC}oiOxN2AYqc)gy+aL-pi@mo_|Xnz7d9`$KdUuzZWFO( zO?}0FL3BR5_|839hy8vbu_R3ZeZRcGmphvaHSGF%q)6G;DSu(Nq|QuK$!r#~CDQtL zi^>omOVL-P-|`FQ$qs|`kwsBSU%#ZKTrQ3!fR5EAL>v|AhWhf8oC=&1Lrq8m%9PJd z$KG$>6ov$dOw z%(`65YyxYmZqarLbdB0%mPI$Fm~}$G`oEZ`;M1Fk49T61wAP*Ye>@Sr-od2H%%g0# z4g&ZI0XAGt{FLgwm#RMh&4~S1!~389So|%)jt1$AH5xy`h1Pk*iG6bAiZ{M1{wFB6 zPR#vJU-VxLqg=$DUz0CC!&3eiw)!_c8UbHBi1sBZ>`(K=59;g0o@B9bvTEA+7vhH`VA`Ye_xRPb;oU)eo@x;cvF*jJ&X&Z$SW=x{@J)~Jb z*&lce#J~8B{mW;U16=%)O?<(Z`*?o?7G1SDS_ON8RV7Kcs^4gx7+ijd02;05oaV=C z!=Fs{)pPam@~u-$=^n#$vUkIOsCY)aSXK8Wxm`BdjF%R~)3XR8(|imCbgR>lGm0cFrE1_)?KLw%eGs^qLOapI-9rW zyPZx>L{2yO^N7HT0vRus^O>2MdIL?Vj5{mCWM#uH1xTx=7)AT!ts}*_w@od}UVw+V zp4u({qt$4aj5iy3LIBU7?}~&WeTr-8mWFB8!#90-&^6m6M$0-|{ZeMv>0g!)z~@yD zZ1)XXH+*&M;Le(#R~WG6UD>s4+wO4XFU{#zde+wbCH?}5+FR1`o3i_uhQ@nqxlf)v zIZ|Z%=7P`VFV2ei&xRZIH>NQ5T3K4w6klIGhVjR1UP zM&;p;->-~ku=b|G`3M1rb>0I^rRwil@{9^w7FpTdA+c4E#`>CmpDT3W}i{{A-- zKL^d-gPI)s@NDnr42J+TI8Cek|E14A@CxSsas2*&Ywl@b@CEcs*9lPTgtouk9(G@b z-uB09JO7;?;~jB#7<^hD9sJ<*Q+~4TcyKW5Ef#0|u=6vIfJ3 zmt?#f>3$>KZ>0NwA?|LZ`;Bz}FPI&IB~Y9dcu%y5FeoaVo!` z6$3Y_`;F@UpJ&k4W5zbB`;F>;qq_fDasCgy?nZULQQiLwX17t@Z;<&Lz59QukF?Rd z-{{@{I~C?e?|!3qztOw@1N{D3)wU_&(Jorke^|J%p#G&X#aK^QmnqX>me<14@=Z)k z47W`Jig^ zrmI?8#X8j|`y0K}($fp(QkeL>>_sBJHHTz47URC|bN2Fq+HC1dAID>#R$^f@^gr40 zNqT~^KoObRSd=U&gXo1=jZA5FR2J>D+4buO9N-9M}8eqfR%Bo6|f? z14aC1W@bJQ4GkHK^?@|@^TPaWs~a4ra6FLSs-y{I_2DBZCviP+Wn2 zt8=(w*5OJAddArYc}7!9Z5JbxkdrWey!!3~L$UkN9l^G?byV>)NUC|Fg-D!U$BD_0yD4}WSk+wD>;hCg^ryT@m|Cw-^Y zbcj(C^u9RbqQR;Jmkk#eH@92zdYB$M-R5Kkx$2n`2H1lq2u{m}TjZcFf zCu~wsJ8A>|IE-t2nj9wQ)eg&~L#rbozKNhLNK6*C@uE3?g(lWC`DoGXZN6n^1vPNIb}@eV1%H_*E1?9M(RC-gU06tRLyd= zyBn(`l`2O%iYmb-D#0|58Vo;eWq{|9O%M#9`I8Z?e!VUkC{hjgKId57^@fMIDa%19 zS}m1xe0&`HyzSekXK%y8j_umD3p-{b%#ZW3;&;SZFc5bNsKf77zN@J@twxHs*VGGf z46{j>a-wyP}6CChDSZk{J4dcRoL&p|L!HGvpn5i*(Zh9-xt3D+2M)CS^E|N z;X*-6z%Q!G&0Bw26r)f+ljEr*60y1$K*EDZ!IGzTd!FH@NIDUNeQ0v3z3TK+#Q;G; z4r(F8&$>x$JSN8#pHk_6)~>1uuQ+O?`+@H4p@B^)_o>MlU5G!ri<_bDf4n<6Fqt_y zkU7{g>S)!uYtpf6F~Dpg+syxyzyF}n($@zwZf}B9{YQG+ zyH|)e>H>rjHuFUqiL{&R9_=o(NG2#@U7L~R3s@2XXCY=k1hY?Rq>rgj)G2sp(tfig z!$ynioQ>1f!chfMDgEP}GZdARsvBus>iqR_M zWXCSKo^_$X%FI$9!JD&ifAJpnlK2|M)YMl;aP0n$cq&n#ZO_|x{mG4m%@_Q7WNk!F zbTl<^e>cM0sO$N;;D*}lJ|5-@alR{VU4niX6WpE5!gpY_ioEiE+fH_KdZfd!M>fT% z`R11#Oa;V!-s;5LZSS3Pzt%*n*1oaj%@pXEmp|T>+`WJ1FV477-CoH;rm9?VO0*%7h(2LETac!5S&I(X=2(D`Ra&mKpG zS6&q|O!XSE1p~!u6*tSQ8Pv)JlczXqlv6#r#yUx)w!SWx9cR!F7-A}A#Y8k5#;Ylg zDDyw)ueu`Z#^}y}XvbjgiMjba)ZE#s>S~^>pVwDOUUjqjcK= ztZNemod?F(~qfGy%Eo` zINgd6CgE>YO<{7prr$nxJ}R=DZ!#-$)ASHYE*V!vqq$!0SpDm@f3I*b&b<3Gf5-DH zHye`lQfIheH5oq~#cZ1A#mThnZ5_07c)PU)^AKR;z}xD(7<*Go%UJE#)(NddC_(2O zS;vuQ&Q@w2B@>S&NFI)siHW%j_JlLhng&gB8MkD-n;R)=ccw9V07sRx)EYv1cYlA% z&}rJ+$%i>BHF((@&Dy&KaXo!`a_y10^T!8=6JX#tT(*fuQALv;BLtK z@ck*hS~atai5=By*Q29-rKDQ%hH{!{N3Spv08-7mIRSAvL=OuK3zOf8C_j)wK>c3z z_KLQ>*(MLM1#9H!r}*o?|Jr$)T75AxdnVGlFTQ|U9sOw~qRHf6V_A0$1rx-3mHLDd ze4Ew+Ug6Q(aAfv3L4U&!5BA7g2fyXHpGYp)8m*f04vsXUA<-+6)~=3FFo8oIb?zNs z4f(4P5mt$v!T{0^$)p#xx2_n`XG^M4EC~%Y#{2xQ1Vvt~=(<+(x*PZNvStSwLvrgM z)v9HEeqM`skUiJw61;!YNu+Or*XsniiA>ZnQW~_0rzF#sdyOnWDkFth8%~W$ zA4p@?E`IT(J}Ap!HeZ5NWFLU=5?5A=a)_x@9qdl^aa6CbslAN4y}v0+IWgh=bNm%@ zRWHONmNI}QnY`87pf6hzhJ+m{Bdmvh-`XxEHX{$GKkccAMP>63e;{jbyU!4_-EVnt zjvn>CgnJ75`E6A*GfFph_wkIX>FizPkp30NoOPe%L?}dlP3w&*vM76g{L0twp&q5= ze#dknSG%SUW(g%XTQ&-}4I3{`7^i;aC6!)E`-YKpAVwd7IiD^KYW_jE3SLTG%VXmTlHKU^~JA(sp zKa5W1OMq3&LmJ}@4z|p(&-`#G8#Re0jj;9Yo%>H7Ku&X=@`jl4Le#+)2K~*O>z6e= zqy?BJ}rc26aZPvq%(a!bM9u##ge0B5XMtme^ju(y72`A6F6pnM@@ zWz?7273j0ejX_M5-uC;;;&)zKki+ALBT@4a-woB%E!|RDY?&>aObS9w*1;nBQr8#f zCq3Ylv^SqfCI^~@yjYY!-%{p5t?z8&Qk3ro(t5u(wk}xKQ@yzqK&|6%B3a*focr-o z=2W<4s*8)1zTYM|9^+(4dj=V+R1>x-Z=6s3&QEYpqSh#5I6q`N>Hd?!ZP$|==V!>> z&@yd+9KQm@DvbKt6B+9cQG=b!NK<@yu-d1laqqM;SR>ob3n`EOSh6Q>Lj)^GpGIQd zZ&vA;QiCn!<&~6_l6t_`@@1W7DnmHd8yT74tgNhh5>gu8sQLtbwz$8DK4XN-Rx+j? z;))l>JILc|qsFoVds#By;vNf*`Mf;}l`RS-xR-KU>q394qyVW7v-y|z#O%d=QU|{* z%np{3r*JSW11gibIb0iD30l*H5P-_gudUA%ry>%a?Mr;u;ibx{x}VQT&6QSwlPiX2 z(>-sHm9+E82`;xVc$yleZc*Be<6A3l&b{}xxGE!w*xIN!NLR z{G`+3qz{MTcK(5P?VQF=z0oOOOZZxgUCGw@?s)>JUC#;$*iQ|GbW~r{jgF?4Ivcch z-_?um9j`Lz;gz~e&-vg5`a;I*vC1}T#8}&yMEhiYT%g@>bO^Lxrq+nFa(aTvCNo*> z#7x`qM{xF~09Op_CL8*1$E(C(NRzqSHE?I=7tD!0=Z0Sk)HFogGVR7&R33bH+wynP z-LjQ_hU2UU?5FHQ+vV8wq(v;=-WhiBe?Xv;*6NWsBe7wpGkk**o|la(t19n@010g5y`g4$)=9Z zqzv@;vZKl%cRnW%!6S{tgSQa2Shu@{WELkXUQPPdk}0XChUB~NVcgj_zrJk7(VJzJ z-{Pewl_M~zk#(lc-%?A}F5u1x#&Y)yE}cu-kz47^<+LxnCD=kj`yw&|n8t69WR`MX z2esO%KTqFkSy6s2SSA$Z_yc%^$K_<$jD8)jMU@#~FVo1AKw)6rkF9+)(FNY&zKe$U zjJmpd1^+$#`&F~bp-`;`TZJro_Cfl!17LX#faSV2>_$Wa)A8=xQyFTML0hZ~TN)Em zy1h@AW(Zsg;hv61!=KX0=ua1zqeQgjHHTeX*1pbjuXU%7jwugPRI8a7 z?wWx^Jokw$PM5hTCIYmXnjdsxuQTwqu*&ug-IOR6;zFY-t)Hk$>!=2KwA ziB08}7*eshlZj8I>C0;%!61e|B24MC;?C7#%<8T}akb?mB|Sq{EB_daQCi#uN>7uv8S=iP->gIhCvIf@mF zXL)w&HQqOVAObF6@E5z7#EdL`TuiRQ7H8Sta4r7k*$SH8o}Qcc=`5M3uN1x)TNw68 zjiBMymD5&RO3tii*NJ|Z_8(_j{27Avp5~ohb#ILH_HXsYLx6b&kZ`eR-fTCR8471U z_;gd+1)mtzUZ+0oXN$Y_>FKo$Ym>zJbK`e-;Y&$U30Cj8HX|4c2-uU%9Zf3y#qV*_ z5S+5Ak)x3~BD<-~!mITfG0s4U7ZL72pH>b6a=$=P)zFh%F=1qSw5uxuo!*~ZAIJLWu7Ui|A;nn2Qk8aR1?istvkk#hDdqPa~RacS$B9UJxUgU`Zc6u z9Rw1giXaf(t*WNts%%aEvD@<^}pO&+kF?SSL;_+J>7`0}V$0k0A3 ze79i&p1==hgU~EkHX5d|ewDWPT}Acn;cEPaw2!3@EU^(qT*)`_A4^SKhpwp$Rm%os z&!#+%8#1Ez>=!iG%^kfPoti>BX%8SCYAV@Vika>1LXs~B^F)siB1OYZ%?NyMa>*AT z5%UK}zw=lF@)*t?`!ucxbX~EId-F8t{n2MpWsnFGPOnq)U5yuaaB!$->5ngB4pLzko%tRrhGL)4s!!08pRz;X4je0eQQ|vWdf(n-0p^$OC(~QqFq^kh zq-W-7)ikJF=)^zl1H-$L`3?7*rr>@!pS}v*n;idu<&TM*OWjN<2(kX5WuM!IYRYH$7^GJ$dWnV>Of4JE);F z(^j~b6$?B1a9zSpN>Dgj?AT|gcL~~nAwvn?lPz}aS#VTI0W{Cr-}o67I`FHSQBjhh zXNNtUXz$%^C>2|rLuf;@m#kSv(b0Lg*4ZCO_pW$hFEE%j&IhfS*CNTPUFYtoLo52c zoK(}%^2g$~S*ika-rpl`f5aT~v3fYxzJuAvWZ4#qG30B}_sx?%u98Xx>YkzOj=LG)2oKj(T816;5R-$9$&8CI%qX+ti8+Hh2OS1bn5k7layoE#s}-S)Oh+$ln*2%h|q+-hDPR<0YWwA@C&!^q$E5gGDT4 zeXPSJaidZyNYtUQ*;8_S5U{eFBj04`&>=$x`~cUmk7?qwzcIvK|wrzLah2;RyXt00&M~q&bnvVwo^pU*U`kv7_w71neQj#k&k^|2iTnBN0(1bZsz#n+Tx4US@z0={_8< zN$PBSqsD{+NLb-xJDgk10z4CI_Tm`491THAlc|h7oLN;=U4QEC zmOLOE{Fqlh_OCHKevaR^kN#5OjQL=T2=l9N9X5?>fFV9LDjvjnAh`52QBQAgQR_Kk zRi?3SI2K8N{GcI(t8HxSVEl_KU*@-^8K2@KVM4xa3&6DXzHFYLR^3W{uDNwy9jU^V zbl5Y7-Y-o;-kS_D=$4r7iaCI1bW5*`pWHWLkdig|bEuR@@r%n_zZsfAqvW-bio==< ztzD4sW~!Dkn;Gw=6zYqUK&UPGYrQJ(Zr{jlEnskJieTA6x=3qQ%)tQyhW(tk&bjo& z)zlnf`+e;OYZr$5&Mmif`1_3TOx&Cwsv5UR|6_D90e(M$gOj|JJA?#tp!aUW|y$L8H772OXJ+!*PH#KV?n zP3H!)7aixuQHvJP+GIp1#imJRbkW5l_~&v36RsG-c4rOpwEpwCT!(Xd>b$kz zWR6*YpUV_(E9ljsA?TgvzdHZyCTydYdW9M&=O*yT3aHSVCSNTZ1ZkZ3`UX*VI=GTT zN$p3V0JOJc5YH!H`W8P@+5I@Bq@&0?C0Ow#?#pc=s6JHw*}Kyf;qRR0hMAhID$Y{o zeSE^`C*mLdShMZro5V6vAs~q;z(7yuVvGU=I~KFGiAm+iNq$9tDo|0CN&0}7s<$O6 zEJrwW@4IxoZQ05zfSrkKdH^8bv_=J&M2(QS4zOzYF2mdbq*SD!E3HW}ZhhAqlO7@) zB+4qiOi0y)oT9+5EtX8gjc_EJD6Ycr55=u+I>H!pGIv8)7z*XhAhmcJS(uiV_E*8q zjj!cn^-H7Xv^<_#H8R-M&f_kLAd^ZWeG=G6L3BzBHL38B_)c2>lS=9Lf&p6c4P-Aa zG!54@m|oeAAJUmH%UNCV*Oy9ohuj5AY=^9#+BTu4(-DO|vlgP4FRprsx7Fvp=X&6+ zZmDcPQLmFK1Yk7#1+SsXP8wd8NfDaOSC)t>nu4-y{OLu0Qf>;tAKO$+xAyBJd#@#G zU(~(`%BwQh;}+t97b))XLjOp)=S{T~&I`c7>6?&7NU@@2vqde!o%!1hq(%$AFdW@N zA~rXKME`bZi8p10*SB^BIXMCZi>V*E2*jOXM4J$PgiT$k3{1<2$MG1C%VfYxAn_~_XiDu1o@<}xYRF!=$UtTohJF2mFW4<8O4M55E29pY2B}fG&){G zjblx6HW+A;LMu$u;~nsunt6}NS$lGzU?xWzoJ|(B>PXj#qB-4W#VAHi#@5%B5sna> ze_}YXCQrO0u1A#IV%uB1V{|34D8{JyFgI*6OnrgRp+rC@|CdTgD0=}bKS0z900$T5 zQlK5ZA8H6!YMzKFhZe`#e$B$?$4!7l)J`6Sj)EjqQ+>&eMrSpnvE&`{_$g2B<_cHx zp;FueZ)jctR}$Gb1gJQnMFqEMed5^AR~Pntfi8bCVA8KeQa3v-&bK4SH+~AVyRgI# zV)aMf;kwvL5}^iCU;EHnWt8M69$2yyk-%6lj?nMux|95f)NKnMrE!ETJTlV7FuDFs zTkLHn)z@OE`5quIsoL!gW_9>yHrJ0o_-xB@d(8-rjQoibrmlTecuhF zfr9o6ky>Rco#?f9Tee$mwe{O6Lb&DMf*{K0n~>FmCS#vTWaMa{7?m1+<>CPo~$NyR2VeHMWS8lc&+42u;m&wt%)DqGo`?T|0j}{%am6_s=$%rD4J%`lbpA_sSdhEZ)P1K1fRB1360Ui`kBg6c>-53`5>8VWQnFl=`@co419GQeS`Pu4_xtv+<2a6dz&i&XPO$`L zAcLoV<=awSU7d%h`eHLDRMbrJ$ScBRgWS$)E%Gf@_=*z`U*hDzmCrlO0-)x%txEbU zW{1(I+z+fw3|kf;*3_@k^K3?G-EWXOv;F;!xn-S(!gYOJT*b$-Eq(&Finacw=l+!u2%aH=Ub^@9&#z^) zrMilJo>5&X{AFvB=jAKv_`a7%IFiL3rc)M}7qJ~}c-Cg$59eRC^yk->ix%c#ZaF%< zQ5GkHbZaKAN)+bzW-X2_UtkUq5L2)`kRTB_Y?L+Kc}QNBVi)+T`95trmE*IgNL3ciF~zsd+b3(kK;@!r87POd8}~9aph?s)oxaVob?%Z zqZbk6l3erW9~aOkg32c9vGWLY4=CLdYZRYLtwVG?T!G7A0rmuSlH%?6 zdOws5%Ma1L`~~{F4|7Sygtbe2t8dv{p-gGekDLPjwHA?t&_W@3@!r)jmF4dZd3Ht< ze?H(R=PtnaKHRl@WIB?Iut(#7OV+WR?(jyHx*k^$zd2UKu$Fd<>YkNrw)SWi;h!rxA`<>Ml9wTf}awpLnin7=k4Up^ghG z4$K<+EE5k!X7Rhrgu+0e`&7Hq3Gw#y^l-MXAC3@+qj*kY`p%}!ob6Fqt-b4{WTuxZ zwdVWd!6)$jcn46(ovEceF7aK&oprw@K?p*P0Aj4uK5}uf4_r&lro(d>1UJ0kx;5r< zLK+vB!DIXL3T`|nK@3D+1*Vlp#9mN#rIHMHz#w8;u7^mf0U9v?=_N|h+Z;~xMXKuc zjaZlfdT=E>dqKbr4VPnFfFzDS!EbkA_O6H`j`$Nq;O3K+xCA8BI&mq^CkN4vfyT16 zPrp(Oa(&b7$BolnL9OQE1XtXyxRHw8INC<7)V^Qi^{cy{iGi>?N+9>P+=-l#g@4_p?SK`uZ(I2r08>RaCl z3XRa17`Ff0S06rp9mK4;P(%MwQ@Rr61Y>e$2eTO{)o!0+yThTG{{h6T4EJ;hBBz(z zC6S#1ukkdFOYa-54K#wn==bldP7=I%n|Xf&Pre63kdNLG`foO1)$K69tPS4pZoklQ?1+XQF697?7a;!N~=Zm#{1vu^dqC!_1>%A zIS=+Qate)&Rm$q*Kj-}7E;N8tidu?3lYptt)--Smtkbi8F1M1C+V;tjub9n>TfxZy z0st*U87`MuI2yIB&dTa<%Q=VwfnohPsu2OK7a#2sjpmD4PO{a-gq8Am!2!AdRYczt zp?lJN^>B-Fc_KvXto9b=INg!@RfTK}^dR}5+2t~x&*#f}LdCfH#5JHL9y~)R=}zlY z{}#FwTrDuq5aEa}1PTizMj*OyD1^PKQaEx{8YQn6*dsg4b?8f@{2uim$;YKnX#@QNMZCgCUH1|3CIrHUj$HU`S-rQ>8)%*Mk zD!iigN+UT30`M-6%_RH>5;euy z`hUhNrh_WWIv3W-6{a!xP$jXH>;n54)YAtu(=bl2e@jyDh z`pycAxgHujj{2R|TnAEKP00aLpHt|aL45)udRht=5d8{ZX(d-AKjeJPs&IK96Thus z3u+&X@om9$CBJp}ll9<@Q&rlMEtIcs3(h&9I8@`VxD+6m=ixXO8p2Vn@nWeb9yww2 z+G%}9;6;LbRP%KQnog(LL1VG8rM>^x-DpzrBWlLeKYR0 zwM|^4O+!~9si>)>vZN?lRW$8MsA-{no8NOvI0lFiAQEv??8fZovxzg)5yadW=e;D#-sT(gtH%7i^zBqCh0G zAi2W55s~D~hDuP0MG%a6>1C<*Fv`8JKB3`m*3mL2aAWH{DC?i;QQj>Xp-}sR))yeG z3(}MsE)k@n0Z=$GA}IK#43vw5Zz~#LUbSIY=vMEQ3$>^x2b1FmLw0sEgqjegBOIK2 zgVG$X5of#IeTa~=%7;A!UwQ8{U%?olovOz{$fqD zmlKpTho+<`Ahpkm#-E4a^lu<3%_gLJnbW@Xh|+<&bx>R$U*YDAaA*G*$0?1;9p$oB zf6O(7T!?@C;-k+5Tb?Z??n{^Afyz3$1;s;JO5emdiCpMxcp(*bQ!6}ufmzUwn0;CU z<>MY|oHK_rJytydO|g38_5i$`Ih0Z6r_CECcu-vnDTkA7S4~6o4<#;Bm#e*CDk!yM zu+bpm?DOk?(r^6tiyLzY;yU>pWnV#y69VgJXq`mbnX_|+4;*~(DO2Rgi`5xvwB8(A zT65JC$r_oNyY3&e2y*NGa0IL<-YM}-Q(cga&yzO+g*(13))$1oAx2P`pq)NpBLZ~(s_DCQV0w9;xrR*Zb zkxk`?*J**rwgQnlB$z#Ze~;b z27KF;)lkAcA96rQVm%LP zQIJfMs6j$B++scjRC-ZIg}Xaa;4XGBG&)@;Azqu`5`X=?_7K|A7mMV?dg}vQVPM3d z;~Gc|9U`D~>?5>u-3V)waDM5L!Sce=AL~~1;QzzpG zK~FCzw0mfszKS6NdfSoeu>9&0dWh%QGEG@cJjC#0p!Cw9rFPnVqyYo6)#6Hp9~p^{ zxf$WH;K7;uqEmnk&WF55C=Q$zd+?m*1e#J9qPD z;8z_R`a$@_T!x@dExLQ^t0Z#Go-cVo6>)vHEXmYA1~eZ6t`E`}qO#LsBOXTYc~T8E z|6Wib5c#ri`O_+^sf!Qe5PyD|BIMA6;>gk*F<!}QTYG_!3hJlwnLPqBY-b}?sZ#$8}yF`Ql&kfm#!09?e$|s;HPO!e?RRm4Mk4XvCdQa7Q2Kr~DvgpYD43IC>h zPh_N3>`De=!fF16!&{mzH)Oz4&dxvK?P^1Qf}l%k3^#p61}_LMEWBOE$tMPzlEf++ zxJNuPlHjufBAz~SjdzFc{>tlN+S$1^$X*C;I&~pAr|iD1XMI$v!kv`vk9Xs*(=~^F zLHNk|8t9!Vq%2yXpj+|29aS$+-L|n*4gst$3sN_<=P5&DGf}r*#f2(#X zcPFDkQG+@M3Tx;e#*ni5RaIxW6<#*`vW&ISwvRV!w+|*P;?~!FqXDay3-(dYwvb*p zqB(~k;rc-ZX}M3odYVt~e7DHakC%te0tmRZFK0uC*8<4{X@K8F+A$*}Bc`)MFX>V! z&%8U3!H2}h9px~DEI)u|*45BDyZnJk*lz$&7ta^XqQCLb1#aUY9g}qasx(6`N&q+H z@Gbm`jM@y$ENe@Lv#SeQNL_JIVXcU8g&^?Sr)M&hgZUzrY~&p= z5H#g3*fBf?5gQNQg#tfeAlyKyTS^b248v?L){oM?AAbKD0LMj6f8j3~US$%8^bIO> zz5SEhHnrK58tNmz5((8A_mmW0gZ5vs`^Kw=Kk#u}2Ipl(ceh?>MzU88QP_Y41Q3(t zIky7x4%N+1IEbLmx2vL>bJgQqq0{Pb#Gn3P&{;m%X+_<=V%=~d$5BAWv)Z9klYDL( z!yn@yX@C^7-EuYhz~IIW+#pj7l=vun1U+&}y3Hb@d;5&Pq*?m9rUwPh9gRs%{arnf zObxZJNSWyl9MRCF#nadeX-0r{tAfLkw|TXOpE&~#?Qbk_42(C@Mh!vgc}U7?pjbLj ziT^3&=@xHrZuffp$P#i)mYtMNR|wUe1&2TGz~b9+g)cNe3_s-rCouiXDx7^TJe}c1 z7l8i(Yp@{P?-qkT=m>s^K6pG2ojVN42Sv+#DmYznQ|=2fe0Riu|A)n9*z+N2rSISO zFP0N=;h(f4bceqdil0LDY}!V^-7d-$)9dxe>&9OYYd_M-=Wn9Jn3JX3&HwTOviT5W zbhHs;U;)D{H%Vo-;eV93ADXUK1cSjS1zAbafa>8Oy6mdx;M3M zyuZLrx;trIu7^>C->&1YPnb(N^?Bw8zEyv`A>&1-(I<~OADVyHyEa8M)E8%rBSl&t z8o8|hKX>H`zQ=L;(~5SZdGa4%YP(rj@UAwV|M_c<9CG=Cm9^y8=1w{mY5y<5}VSDKZY$6J9Bcy=XRg_Ltq)>1&fy{aP-UNwVD;sM`_dD(hiV{zo zJ}vg7NigGCz~drzBng4sBAIx`9=ZQAGE0|noHC9i##%q}*`p}Lcw|`lrqOo=u}ev$ zpIJ1{KVmq4^P4-x>{|a*mM{~KnfP&5 zc0>R%6OWnr2@pHX#A7C&&bY?!0yFWL7e9W|Mt4^*FCO#azh9d5!+mSai^sh92@pHX zi^l@>bkjF}7g(Sk3)GLFw9%u2SfCyY)cF zJFN6>0;+wn(mSm5ZX(1EtFFhY>&IX2u+lrM^bRY%8{ZYcJbKKd|E>f6!$lJF=rND} zyN>)1bKdFVi+S;w7eD^A_BiInV_y73h~58hFTTES8#7KkNw~SWd6j{&amP>Y78Yh^ zW(kRjT^HLZ6iQWpzuU2%PoIv;%F8cweQE66+1k3+z|hd!t>N?M&s7Z#R;^NeXxrnP zb#NqV#A)~8=jD#xT0Zoc{vbJdCXq-KX}@_uF}tUyr-4I7(b3V--%@mqbkBYlH?z^WJC20Eo}$iw z3jNOa^T^7{El{9^^{-b{af=clOpZj58Q@#{u_76a4J* zKRTEM2h?9Dtbn6k!ZeT3XM>OW$;Yx#-;^4{Nls34{WDRI0wy=Nm{Sj8DA_wR|!JGk`GVXMA}TW*t;r>|Wp zseAV4Wn7z;H*Xd%t*SkGRjbY|)puu8a$R{&a$QhIa>o(!J!gfGoccDmR0U#3g}nw7 z2r;_d!_8sQ*`~UO;Q;rspe7vL4nvG!fTdkpL{!Q`B_f~qxur_!Fz=g3(k$Bxrdr<3 zzO?@{j0VXliqx+;$zyb8sglp;y?ggkWKwn2)YKl-8#W#6S$E*YWti7OgmFRXzJiwVmi`O3F$d30ENhpPvB%v>a$hRi>MK{F~= z6)79kudIKQx16^;>vXQ+vAivLxw)MDJ+CvWZ$;ekfbrS3yXqMR5RC4t?e*bu_Kl( z*)}fw%Vb)^i~F3~Ve~vXIU~ot|FZ)%u6Rx4IqS^8^*s&u@D&$Dc9IiesFeuJ-5_^9 zQ&KqZXz2Y-Jp7IVUoaxwZPhPf>Ik&ueh*;~O3m;5y(Xf&@BXFn_)TUTUr$F9Q37V? z?Jzdx&$=X5uOq9k)RbFVy12e5(i54bhBrbc`YG*iM7zSiq~piQtQ=33(Y`x?HEBKz zg^c$+b?Q`Sy)jMCt5${l4tAm1waMAI_q~#$M^RDHwWcJ;&+m|VMy1CJ)Ic)NUTZR& zf)-Jngat^E_HOr?I131u%K19Za_7KU*%HgBHc!WO3)lE%J%vH`ydgcMb=MLT6GP8m zE7yl9i!jV%X?s(J@hewR_0bCdu=T_>0SdHbYcR}=8^iA@8wv0)m$odr?Zz)^2Sd5i z?NdGn-*uvO<+%G;-bsWp9I`u#rVUOSHOh`{pPk`}idyDTF z_H`S&br*!B!_*AP)J=e`Ui^v*TZw$Zj@|{{w9X90>Imb$j~`S<7!x+4;bX8O%BrN5 zF^*mgb6Cg1T3W9@^h;^I{+f)}=fAV7`}!?9Ji8h*#h%@dlGA;77X`{#`@qP#sI0@} zY;Jil4}1H2yJFn>x)b2jbt0Db?qDeL^7Hc>udt&;5NKie`_ z2U=f824Tun)7s37hUB%<(p%k9fF9k?lIK7zppm%!7V-Cn&-)$I8)~W(5^d_z6#>FJ zUzjQUxp|Vlpr;bQYpmwBQebSZ+}U+Jga%Ry1vSjzr%LEaaY25_SPucwUu(jnP^1{lmSQMnJ;xvyZ5cI z;s^UGtJAZ1JphI59VZopYczdwwC0muyOzsJ(XL_FqlKuW9v5^$3@nnV!>40?kapeANH1E*^3mnhd#g)ra$c6)8F?Pw_vM$cX1ex8 z3u6jXQMF~9h{?)vYrnVb^-Wp*7lATS1t%jSBKE>m&tQcnRNU1PkYMrUjpp=$Y6 z#73D?JVvd6j4OOWudB)Hdz`8NP)0cnd+HH~@B)6q}Po+2w2+P6Blpq=UU`obS`L|D29cN>Womu&$ z?cn!CU_`Nlty-1926)5M{m#!@1?AMXrC^v#|bjCDuk?@pcW;Ay; zTbNp)Zd&2gcln6dT^O-5qv6fHr8Ej^&$(Y%vyiq`diA$fVlrMIZ4WRcdk+&UiKRPpb<^otGfo)6*p9ZD>`pe1!ms8Z6Yvhy`Q#LC5~ zIh?t=D=m7)@C=N*lguq^`%zm|pDdcv-xj5$rKJ^Bd=I5gPG*^+K{{HQd|rleaOuB6 z%rK?+>Dt2r4MpC9Fg4G0jR~RetE>tO3tOIRxE&Q(<0Oo752qO6aFpYUrV`AYgWMgD zgQ(lU5XCIrZUw?Fc}d(*?{JidhsO!25eC#OW|g zQ*opomeU;kK3Ni}>&ey?<$Ym?bbS^`3Wcc$JH4Yo#Ei$tp~}CpEXh8ki#ipp%Jm`S zlqM(gClfZGuyGj@9x67pH;j7x1{q-er?AE$cB8;mdwzY;29ATwjrGYWkB=riS|UKg zM+e1rFmvR4pQ=eq_0yJc#lD+LS{gyJtKa2>@Xwq%6B+eN#*z8NNS9G648ctTqCyq? zS9!*u9t=V>MWYhNX%oTeuakT1!zMwIDfX4Sr1aFOb`jbb!9Astz2a2pAAA?_(AFF|QP%wMxl z`O*Dyu4(4=-dBuJzH#kbFt!TL>*g|}sl3w`uXl+4J&9_q=D+58pZ5}j4SlTzTXq$m zNBgo|`hdVOCnGh`DifmGt;(czlkrWCInGY82Q)m5U0R-YQmx%71=4M3pX(O^0g@PD z(<&AwphKWpkpNKk*_|i2SN=%#W?_x)ckOrgXD#u|YI|v_i#9n$`U`@_+cWW!C?#QF zNl3mpJRX1b{Oh~>RUQT0W|K(mJ$idr%o58a1++h#DGECIVycNV+8;m+11b4oQbk{5 zyNrCoN?jZU@`%-tD)0_fBHBeq=%d}u)~R3#`l>FxV1_*Xbnc;ZMNjz&>z)k7{?!od zoKe*F7bz}I>fOsggs<0c3)DmV>T^IlFTEzrN9(sCQu?zUA>z*cTLO=epkvkJljpK( zXB63mn`HR#t_FkkI(pA0`^>_T@#g#X-!~fEu0J?3%+c-v+}LE*371W$Wa zSorj`VsDlD6>@6dOKUetZZSPBh;HL)q=z~RyQq|ke!Ex2#TmsZ?Q!N77Atb|T581f z$(z9m$>1nM8KK9KL_xnZA0J|-zzRnaAzZ&fHaIeL7J!PMU0dgid*E4eDE1yUfV6YV z#VD6$$XIyuLx&VT_qGxtC!Yo}p+y`US~ZdMn~>^d&W>_t>j%w`*~3-JTkJo+x`||2 z>`UG2y@BJVS3mXKTan^=E~K`C)^D%Bt5hWF!Kb1%sc4DAkBDt?zQ6LY^+N%YPAC)U zw=Qg{$*3Vru|I-i%>mHd!E_u z2&Tl7)k(M+Qr7}xt2c8bl3RZxa%a{ajWNr)LbfI@q@~L8e`@cnA!fYK3th_3Ra~ZF zX=!P%QiwKbt+|^{tcR#5$<}wR?hWvgLxs&vP8nk2|RI_V*GO25)hc_D9*6w4|d#qkC_F z@Ar+{umH~A@7e6m0tVF6I8!UpTzoLa5 zgd}intZXYbCIVo0#6WWS8BM{V*<|qsr?rr$-dmb*LB6+HNE0D~! zw@mpNs7%@d$qQ{Aowhe6+-L)20!ct48tmlJ$|A$6jE&Dg^}-rvs~5(0$A3I9;RTrX zH3b&T`2A?b2Oe8M3TW`n;4_YY7w~_P{U80OXW$`-Nd|9a21l3>{@)OiU2^s^Gb9_^ ze6+HIZ9b;?nC3IZMNr}~2KyN7V>X|eKrTwfJ3a{`d>G-w*gjgy_z&M{tonM;?kLkB&W&Bts$X7e$d&qS1dtZv32P+@*P=I3L6K02e# zy zA2xS_s(UKc9&V&imUNH1A`DVyU$(`u`{g%IP;E~+>*CmCv1{m$9~`6f55rq*=QoO+zQ87`em99Y*fZ=?=1-o<+jZmwf~K}V?yvhECkrbf1{BDWB3@u#~40Ju8xkU zVGJK*_~;~uxo=@mk3l^K^%&GMha#h1KP*LyrD#W;;QE0wiTUxEAOCOe{x=-qQRCV- gWO|}my~*XDBsK~6sn3FnzDe7*sBgaWtI3J~1K||u6aWAK literal 0 HcmV?d00001 diff --git a/R/vignettes/images/hBayesDM_pipeLine.png b/R/vignettes/images/hBayesDM_pipeLine.png new file mode 100644 index 0000000000000000000000000000000000000000..fce9937c8d2602e5cd8734e9017b4abd006a4ed6 GIT binary patch literal 161915 zcmb6Bby!qy*FFv-g472@Qb9uLZjdhN?hd89J4Zwm36bt@7`jtMkZy(sLAs^;w?_fv z_W8cY`-dEYvgf*1uk&1Mzmb;}Lq*0zhJk@Wl@Ncb2m^ya0RwY;74Z)66V5wIf-o?* z9+(LW%S#9g6U*D%7@Jud!N5?48R+XPiqleld4{2{|D~IT3fbO8@%8H_ z+hF+$zniWQ4ZRR2^F_?jw-PQKjp_@`LBIF367i{@pD$zi|UyjdSV^Cm%N@c9xrskjRgxoL?rfwf&G7d5uih1%~`luA-n~LE&%iY`tpQC z!2Uf0+`s?ZHsw2y9QkDM#8OF!(Di?r6@n9&)qRb3_|HS2!@w>qI5+Gcc6S_oTZ4(A zQ+oRI^Fg)f#GD$q9YLUf9o_dXO8t2+L-|>HA|KCfA+f77+uS4OT%n^8{P&GrOPZT5 z%4AlyycRy+!@#?e$k7XdcT-P1AIU$z`ef+!Q@CIU71h=Z6(pQ$XLYZyLOcs+S!qwe zr}&L^t2J1dZ5B5%VrUB^+n)YfYM>6pm&#f9tggQYp0(`+niTe@=8>=;fIz~I?){W+ z$_0F2pyYyUmxY8xtaa_z(v!Qd5wCu$IUCNkAYYCS_rv3V>1~z(Hl(fo;_!T9`rdyE zS5o!uMSPdA7n}K5{`#p$`odYVM!?(+}#k-{;BaToFn}O z`gx`ztp;xKG~uh~6+uPJCM8JxPkTDb5@@CH*oRbcbXr%PF2cRWyL$AhG;v0c=yT%V zPFF9VB3Hz2K0)4rLPA1%{SSa(WD}kg9;$ijJ^CpT zfmx(?3IElznY8{o1~weHy57Im#Y!dhrPB4|U_eChXp1R--1V`+z*BAUv<4_If zk-dvd%#U^85APOzQL6cFGU$gH`w{`awf9NHNb=XYfBV}|rNGm0#?D8TTRa8POP=nF zyb)2Txz`@yi$To!K)$ov@XCh%&@g(=15p1b%;7XQgU%jjA1~!WlfwR$LH15(#y-+E~N#p}a#{{BMHG*H0!AFi!?M^Ocpg&BsC{uXM8QbL)NhMN?}R z`k#XSc^!}@rft0kGg(bESEY?Pr>M1cs;=vb0d^8?iKQ6Xwa-?;v`bv->KO<8^B9FF zF9QNr!@_tSHMCheOVEg@26hMeM&7(oirUDz_VPu3PI=lYT+|O@wSU|6yJ;FQJ zPn+YeDt^dvsa}%Af)UXc)O#IU&|LxVxoy)O%XD?9U!sKmt>Q5{9$taMT}Oxt60X*P z&s@t23sMJ(*jw5Og=^tmPzcx$T#iIMxU%X$FC%XQ`{4sBtED){zGP5E(1)E(i8i#3 z`cD*ezdeBrctX=i_}HHxex+dfEm**9RvPPylxO&?=nSan>Di_QV^e{6^t&LS9?fsj zF8xnIY~z7aQi!L#wCYbT3;c|vvt^@Kj-&Zhdp={yRucL;V~=d%3GD0Kj% zY^-yB8uF(Sem>1t4r#A0D{-^#RPsSlIxBBS=Z5=nq+H~=3fK&v`s&WjF7V>AC!6E< zIRCtZfEDtn^V&*29u@2QWZN>l#xNn1IA@$&TMb|Do=+PO_Ln)1{b9n2J6x30-~bx+ zm#=tpx?bQdAM)s|ySvm6;!QF!?DDw8sBy0^agEIse4el{#sJ&BI{6qEr=;K~P>8&@ z*)RI(m7|j1oyD2Mb!W4YdZ+RCPQ|JQsSZ~@m*W9k&&j6e^vNG$Iuq-Vs9aH46? z(5)T4q^+Jw6CWjWKZRmI$2u?eEJiAu&C`r#;88X`N+Q^NE$!$F;SyMmTTgYNL-$?& zVd6TAqufW^LU7Of3v^$tgvR15l@IB3uxzw_4Hh)P?kRObLR~(*E8_BvC#`p^PR;~S zLuFmA?K2s6@$v*>q8t?bm(&HkP+p45@U(0{Z93ie4(S%|K7L!7d9HgqtJ;vK*C*bk zoN*^-V|M-EOC0{k`G*5eUhz98dDZTphD;NNQT*%fZ9f3ftF@T!`Mp6#L9Glru4u9x z7~emooshqFzDA&u8E_d16#pxl)(a5}-{z7Y+N55jKw)0WUF%aWin%3NjhsRKwNZo1 z^5Ufg1ixAo284%EeKxS@ZfJOy&D`*8-}m!Qv`GB^j(K$A5Y9%f&{cHaf`*v}3t_{* zB(F=usFEl~n|J2%cz489JsGPg5f$_RcE!NylTCD996n|0Qy35mdUb|yxnfY8F^Abh zbGO5pneKrA8W)Yq)N8x1*8}YSu;|{sze)wybb)b3)c9i!vd0re5**VhWRa#4;V0N% zWE+$YyS3Gt1R1$tZ&CME%0v))*V6KzvvDSRDgU7>MyOPA;-`(KCf z=OD7C2H9}cZ|YCgEiJ#)sbG5Tv4!Jc?oLp*#qlNf^w2ix=#wuF!xn0ZEbsI;U{95Y z86`+XTS1zlz6TFuytzsS_3FU!aT(B3|B8UFP0cOw2&OyMy?1X<)#?{m%Prt8Pd5ej z+>@=Dm+aO_-qQYp4U0JRMnL*4xd}!DT^vW{L!0A39rx2@jVP;|AisqLNI!8+NcnH+ zQ!?5NM!P#?Sm=+{E(Ml!a4l#jHCacwlQwN?n`-kmJ@@MutJmU$-zQH1=am%Rvsp+b2m^}4GQ}YPU%FNQ%$E~EatF(WO*WOkNiSL8^`ZaMr+0SW8b;|g=k{H$R5GqLlz`YFxRc042F&36Vf`mU}9Y_*i41Pp7rx&+@H zx7ST0c}vXnMz{(!>NM(hL-G}bSBA`T&Emneo3AV)``Bws35Ip+-eRzM?@$+7U9dzS zzA#=pkRs}jo8%apK>1j}3*#=@%nhIR7=P7no*<%Z(PpTM6Fy7OyVg2cQqmI7+edDY zUHK~i__`ozAIZ^9v)xS-{Ifg}NI8I)4BiNPlo(OFLEz>7IniE{DJy#!oV#dTObQGD-V)`K5AzT3?-n9AXuls+)9FX>$z{o=(7 z=?R7Fs&D#Hvw=>70l{n7KWmrf063w!JIf75z11QKJ42|*sCCjxljg5zc8mDe^BnW@ zr3F)?GWpVS&>-sMX>YXvnjP+wvo4X^61QkP8EI=t{GuqNkv9@z z1rKtmRz3T5G*{q5V( zm+O%}Hx=iogo65T?czf6uysi?O8Q~i$W$v$=ks)9??s-9b(Su#=%pI#JkLs}4I({qE_JLBb z^kV(NgTL&i4SxN=6g9C-Bs9K@WM%A-)lJ_ce0&#oOXn$Ei1k;aeN=AtR6)WdsO%84*^8yX=k=dM7w}&{{HN(E%3p9i1v=2s^4GJx_m>#r@ zX>P&SQ4o-NwbAvZg@K<8Jf5F&37?)>(iKGd5pr#9Z=>Q5$TWDgXoehiUl_Q5%~UBJ z0~9E9tw3I&J1d}YeI@c|4A{m5ZOghpgK)Qc3r8F1=M9i+Zz-z#cPeSPe@#-3#=m2+ z0Lqly#Lq^L#-q|ky4{xNy5)iQO|gp7?S<-+8YOM#z}t2+Y~+X8R2?T<>P{h!R2Ms? z-+1n_b?as0W*c6Po^&nUL(%PPmQCVbXa7beBbPVIqm$c8ue9~#vE$Nu(SE&>=ZqM; z-Ry$qy9F{0<*nw&$Z)I5DT02)Qy!4W)rsuFg=z$}q`q&sYI)`5UB4<+`9q*WU7Jq0 z_gA@a1~j%)FkaOKbKuV(G^_L@G)Eac)8O^M_7h`A>w#3O`146o1y3Z!*Z1Sf(w@_X zj5{_|T{MpwS5Eqk+aIi+hEw2%DwwQ+bvA7qX-C61zHfGbBpc~^Y83ow+Q5VH0jNdt zkFg_@9BaY7D0R=-tlRInjSA!{@NN!tB+W+zsBm!FnAM}MCI|C8Y{nH=^oq`!R@e`4F7WmIIUh@Y>!&>3v$rhfs zOezO3qU#2n_Py?|CXv&BHevWw;S{!eN)=r&ojRxRrj<~hffJ8x8QXwj6^vF@O{&Y{ zs1(o^;r6@|1J6yO;3Y@^2KnqnEfEU`r@)A4lRfb~EdBGzr(K%8uFC_pbkQ^FA*?{; zpV47e-q$B=4Lv^fuh!#CE2XV3mzqG6#i1OYlb?%qrO`Wb`i)stTh67e?>XCJYxBrjeHtXoM7&B`n>TO+))Qew3E)LB4Fa)8wvc(T?Ik-Epc z$|~MZqoX8pxoUYd!c?Y1G{Oa|Ta<%4M=#{e1}ANXNyd&9%H#N@51qDhvLDwkJ}V!N zA**^kTLLJ&bH8J|+^O>@j2EtYQ)O0mNL zMD5)*sCFG~&J>BiER=vB*mxEAMJAZNv)`LJ?%`%Y*sndaOk*VNa#PD>9@ro5mvpus zC@=_3>S243Y;FRQc-35!*{khDz%DG6EMx!Hr9B;z+shc8k*P(43?!~fc=too^k>4g zxq`QM6KAt!$MynnRhPHJ!Yc^S&aWCz-#0+XjCnusuP}k0BL_-$Y?yXCTIV-kJVi+8 zlm?>}s;9ge>xoQcu<))VuGgVJi~(%z@Fwc$*iyDH+k*4e{Tzdz|^7FSQXJDI7M z;J>_xWEN**X}$npOj}sX(-o1Ts2kvS=(~CzQ4tXv7affpYVGR~tp=si-zU6{<~dR7*0C90xx0blZ1I)E#eCrOG=Y2j zBTXHzrm)^bt_eX=|2N>UMOe!NoTBMO(yWBLc$Rp2W@a@>9!$cgcOSA?3BzBTT?l|K zjoW3a{f|dbppWVFW}v#PkO&`=wwSJejO6m{sG-iG)yp3wr(6x>4)%K2)0PsQG`P=% z-TBdJZ4@3GC1}@iYG{$3uBR)kjmK~vJ^G+0IwWIiqsgPSrmxz)+(s){RwvI79^5b< z;ln7k)%CEbZ&8PJ!1ce4Htx{ssjW!(M^^hjG-@?up$*KcFNBB%;OU#nYX}L3m4fkN zeYU7+$}BqeQd*_)e`I$M3O3^<fB7oSt)7+Q!ppg z#WStqbl)dK#p4v6nqXDBsN&xLYGG#Y_I~Xy2XFd{f&2ZXo-LpM-qa~|CMTxFCnR0Q zfjv$#@U7oG~HuuUuVm-Gt;HE`_tQxq*!pUnkfdSS&|Z)nPa;g ztA+}>VsA_B>$1AF(165FC-iwDTFMxIf5QC^Dm3{M^l!GKs`ttOtXl=L^S zSxnzaq@>)*QM4wdG^sCDy&GDusitdJ(#DQ;jed3mC@&KqLTW`eLk-xP)_B;0G4>=kHF-5YyD+!jO)B7c zMV^rDu$3Po`!KTzCv>;5`8DD#xE|#4#wS9d_(&bgS;0L~HA$nv-3vusdf8ocoM090 zt56|;3axXW$=6sKe+)&mt25!z|avjmlI_J=)v$a;agu%($9-KtJOwe^5s#=?2Q zY-g}4-+wn;Tjmau!rX>j?%o$!xY7nb zBKTmizGS5#1a9HXAHTwS-uqXxC$JCyV!c)2+G%??X81Iwjmw6PN5NK;C0MN9JKAG_ zx#fe4vr1F4bMdbxTssD4E1{l|WAhr2xmJJ51>8pxtcji@+qcc97~vu4ek5tw7M`}y zPIjLT9eYeZlz&L8Om8CN96>hbRro=VYGg#z%B!_G=?VGkeh`-&=m#0Pg6sY&KCU4_ z1o${sq`?>k-r*82rOsvC%TuX_D$hD2eCn(Gkg^JtmHF1K1b+wTpAGfwC)lrFQ=u4> z4k4t=9%rSqa!pUFjP4hVrGg32WNZwY|HM6geTfiR|3m2)zDu4bwRFF8j8?Bi%BCsa zXzbNB8lO`SxN(P8|23fW*G106iCJ0INrtJ@%qSlC^#zP}I=Q#$LAk zZ}~!#F(NT&vOfG4>qK*s`EOl=P=QX9UQt6$e<|uU0Bq|G0F+A{Y1gUX3Qzcv!UTMg z>SHTaCY&Fl6>A`DI^we;4m7yv$iw|1J+h12;Y7bDl3-yRc*zg*nr>Tb$Kc-|_2)vl_%L4osQ9^xQ~myIlv1 zjg@q;KsDsLURi1%>P6T^r`)ocPPj&SP5LxrKhaTiHvVoT`r`Y36}!dqEO>RgLS^;^ z+-R3g&5k0Q<^2MmbCZCTu0u};>E4Ez)S2@GE>1#}&TAWugjxtEh2{;!#z(fDd{{-} zAQd9_SzGS|{i$tLl$nbcYH#(`;bw0`Nm;t137p6WKI=ne9tC%wO21}%)OTq@fQ^Hb zR&^O`H9l3vNwIgCRnT|IJC+QWnTvt_^S7U%0BoAcUGQIq^z&O=%;Jg6rZZltHJ0Ps zj2sX7>6d|)Yx7j*MG!x^f1GjIcC1ELN;@Pqju;QlrXQsRd!0R~G(%W%2g8iji0245U4LL;ju3wB7 zh<6s~tpeOHm@vt-K7e_2C^e<{X|ME!IJ|$h%yldgNC7PEzr7-u#n8X7aOk(Tg0OF+ zJu~mJ9BB77On!@pjc#Luq?341*DJhUfeQ?j3MPmkqPkQMyP+cx4R7uGh}_ojuepf~ z-qd|na#~O-C%GrY?B>*L6pU`;<_m;eZqsPd^;N=v>ZgV--|tptf_xAiEoYPs0BG#s zESO6}dDTua#6?cl^I2vRVRS|ejtoy_>Q5FAB!I2IWS9cx+5IV<>r(uu+8MDCnG5ZT z)K4yC8!r2;50NF$R8y?tDS6pzq0qDRlDeW|mV}Zj^->+|O@2Xt4XEq3-EizRPy0tq z2u40LIi}BKm{3Uf8i;j4`H^t>JuA}U*_lzxhg88!*+$Fp=WRE^S|9~V8JA<5H2$up zYqqH3@%D!fUZ7{Ighpx35N^`7nUtj$bb)gjeA1@$h?p#duGpPh0Q1J2 z5*6S67x4b$kji4jV1x1(v`NRc&xdsz$_%xfUd`$fVZOG;vtCbGr)!gyEc>*hJFrs5 zH(a?9&=n{DlXenlhElXKnZ{}VI^!_5`1*+C;dG*YY>IY;aq{$qN#(k;hf~13H_^S< zftERKgsWt2b(Gy9_Va7c7AJel$<|UdFgt#x8bm}O!rm(BlKxkuPsxIFr)XUTdDP|s zy$xMNKL13K$lIm}gSLla`iQJ%7+Fl|?s-6Un1$*5Enm;ez>nR~+hY9HRS$lq1HkxA zMU13oH zzez1VQ0k@$Kj-bx|U+U1dOMup&)E>6>ov`_D%Zl%&E z8qnT8U7OG!>FF`nYI}xymz}d10r`!F?BCswd9G%#~2d3hpNc~dTI zQ{39UH29va!Hj#I(*AHXbA^=M1-c+HH*#uP)=|fZf^{Bfeu{jb!pio&D)0{K3&$-T zwLF{T$|xnRi+=isbL*&I561-zy)hyLZ={$>Q;Ze*>6>X4-|a9oB!qEzf=h@Zh*HCJ{SYK zD*u?)GB1cix7(DseAK%XR*~cY6^tJc*Lt)+Z4k$4lO8A+_Vdga{A(#Q z_e0$(xEe>D#aR|TC0`66zv23E%w(WF+@(uS!~ord=HK02{_L?Lex_zV=(xdBRFLoA*aXx~Jw=EsoiYF}Fj_Gf&QLIQ32x!yHbsz* z{aWPna*Kf{U20ib<)8IVqzfAHmZ!ar;V!Sa_AXEzOM-*E&M5{yn^d6lN52 zrxupPt!wtX>;Stwn0>;YKi;!z)S)*C`#9;9CeKUS9Vuup$joSeH)!l4_ zYk`P{-9pIC7x@X{4wu&=9s#%PcP@b@xEOY5dxs0#I^%EuchsAuhBDob{r&!a*i>~E znWs+Ec&KaPbGotsoj~mC8xPDKZDB^_7FqW_GRV%zCb=BVW#OhIBJ!$uh}3<;g~y7h z_%yul1dngFl~+eGYhb{>$_)X34W01foC_<7wto^He{XwxIxvXUCp1@!0_*1iX2}5m zZ7_U<_-8oV<^q*J!yCy@#|f=D;YrKMz27ZQ`!UEho6szx5ZqJmXTMmFdP0Br zttJrJq%996Y?FcjAOenvN2g2tX!lt%SrREw=&OpnEki75}7e>8Dl zGdnkAk{+sB<|7W4$YMp9*6zH92)iX=#$<6QUYGTVuDJlx$e?roZy@>u?_w91m zs-?puD;-AFs_gf%dr=jS(ST7zUeK4B5Ac zq+Ktp3|@elv^_(Gg-6)Z9Z@p{h1?5EQc4FjoS4eg8&r84{?;_-3QGRdpx+~)uK)oO z*0=$iWh+i$+>Fd_Vymaln?q-{>ALixA#3Gt1w&Kkty~qh+N8XPVL&0FjFPS9z>!%f z&&i44`0Z)?7?aQ^2E}~#67%upk3JD}Jrji$$ps3=i)ah@ya53ms_Ac^5ABpDtqwnu zL+A}r9m9-2Fe)63tB9}Q-m1o9i;zu>imP17TeGfKMY2k#nYg!sElv>X4GxPqSk|Siu)r%3Fn6zp8mRZ|_C??M*s7bX{@5lh*tpgtSk5fV}h#WZ(?jr0CXXJD*8N+)bfGcSG9_1=eO;?x0Z|lgW8(V$=qmvu5p##1E&0S}HJS z$}=*|Jp!WqGieuLAilv*IgH|+$5Cok;-F+*XO^+ce*o@kx+u;cCyiViN zHi(UJ+$%S{fYb32v6x~y)*(RS@|ih~ed}njOV1V;Ga13IONUv286NV5Sy5@|IIXVS zK6%N-If>t0lh_w1*jX*_rv(HY_8HlAOJ{r6W=Q82fjOq0nYoi(HJd>Az=aOJz7Qe) zb0)bNAO86%f95a)YL2q$+e#TFp>#<$CmZE@t#E7eRFJ{cjNZ!hk#_t0H^M`;6Rt)Y zmP1{=3E5FzuWGiw^FU-Gdn@T(pL)H@QvKTKIOVuP((#SWPK7a@3Km3%4zN-Igwo1O zo$>KP5fM=qi<78X+_W+_88>#FV*;d|{d$=H{%q!4%LI}m+~W90xjs_h+CT_!k0u9F zQarmGir$hNyWS`9E>7~IQW?ya8QfcHrD_+(G|Hwf*<$V83M{vRD0wOJOkx);;d*6w z)wprl@#r1b=+QWQ=5}{3Zo8MT)vEzM4WA0nA<1e;FK9q;+B0>T zCh>F*Y6ivVh$yHdIjKMB_i~loLhZ6etG8nQRA)Q%JC?q3g@%<4$Vd%Z~M1YB`TPUEu zcX#mId|b#{s#vXzXv)?iA?z#p1OYc)HuP%Y(5q>pKL6`#3IGbMJGvh-r(=L0lUI%e zsf$v*I8lz$t)>{xJl8MeDOPUxn#Y<`B=M(tHmi=k~Eyu&csdmuz#_5x&oA7=^ndA8f6ui-Phk&V9JeeyJ*zwsk!=pwW z(0HyaLfj%QkJeNa50S?j~3nck<@YnG`kh zyV>vDcdFbRUqD;xe8D?LzXg7n^KD`-0bixsex0kqZFC!%Oi^J)BHIEwBMr>B!vS4N z@uR7s`Mi;=?#|AQxH@7bttKABn}!fwpcx-7Z^s~()Sl?z7)ZCo0l>#UikTy1Kna<{6_0xFXZBT4PiZ`WUw^y_vpP(2Y{N5YQ8&v+H~HXG^m<1)7T;GxjqP}78q&Mh#n96N$)`e^$!eV+fN|1vIzW6omCP_8i(dQ za$YKof6&RXHhN?+Jft+oKre~9A*x!8HzonuX-pPBi*>Q%YI^fMfU4GuSyO~v*_LhI zb}b4>--whyMt&W{a19WQZWmryHnr7oV?>|~R_1lN4q0rqsE1lz109y>6Lka{3CxU#=Pr%AJ)MFL^!)LZ{~8%@u4kZH{x*APhSf8Y-ttC9k7-_c10Iyrq#!|^$W{dUb=xU7Zl>v?*B z=^_TK1V?`5FGOYb43>Yzy}=><2i2Q2le#!5&sF{$L|)4yRk3T--OG+*1Cj9S(dCq7 z-W&0#OO(uWzc+nS$`WeJ>!KG&&d|Z*(*6KYto&WrrVAB{DOWGC&alg!A}h_~cHK_) zUk7jkK&Vo1#sh>uON?d-Vy=s9X2SJVN9hMogx*%w@qmYO{gvxIa$ilJiA4{Pp7WHP z*OYGe;4y!n%$#4Zc-A!7!L!7iO%p#DIU!;iuZtJ?O}qYEf3o?ed%moXsa~-b3Q>(x z_;gYJloUHQR)S*5jzUGcdRqu~PTx_^z%!bm5~a_$jJlDcslYsZ4B~(F$M#e91_=@y zho3LCYjudc>f3%{;l2VoK-MlmF=~an=5OVNE$-`eu*txyx(Jiyq?h-Db*dEubiSJl z8XapUvI{DxO`5IyIr80U*h>J1=Bnd>wc6-&)b$gx*Rt4M%EG2KFFeoeSxTJfSaX_rrZrTn3YiYbf<^J`Viv!p@deZmPDvK zjzcA4r+*`i!>Nyr^{yYzmR4@Sr!?$AX3_puF{7={-ji%=%Sj<7g{s_8rs?4D=vDQ1 z0kaV_dSNNre6~AUBV=dmaRguxHAw@QLwAm zd)~xdtgg8~{!|n2Al(>WF$t8;zJ5tcE}%Ho zEo}O4WzQfipqJh2=p$X6UTUk|OqZ5T-#5MTT=`XZ%^aT~f-~XLC-YsNWfar7RR{}I zoF5h%lEGE>z(hA2z3u#dH06_O(8Yf8MwZtl0N6%Gj_m`Z{A|BSe`5J~pi#3>if+6P zaaYoz1xDlU2Ol5e5Y%VIXSsgip^xQPlA`qx@2{QA9_ld|V0d~?;hDh^I-P=`sKebm z^4<}`+zIl|3F@0w<1kgxWXOIx9qMA7(Xd&WzuU&uwxk18Q~o7$s46^nkvkx*WDl%e zunBlqR8>oeDRTr6TvTX}FFd`XcF^SWzZrgzZAkv=Jwp+1AM`Clg?Z?l5IVeQ3~{PlZnPxIq(EJZI|zBR((0QD_C?%%+6AHq1&#LPch#+*ptcSy=~aQ z#QGt#+?ZkGK)hm0=W}xq`uc%rpydXkly=+Vra!jii!`of_q(5T68lPpXk_mRq=4bU zb(Ia?4)Fn(PiWvu=jIzkVI8#mF@S&j>3E=8VEaRZqbKOaAHns;c zJ%nH%;o~~hl}x4)i4KoexxqLcH_fxX4fC&Oq-vP+=w#Cu5t5p5gJoPSJyZIW8|o4+)L<|AMEq2U+_0TP9CFh zPy^84M9?W-Q%3D6kB`WfGbTKcQ3bpZv2dv=7}UgMv8Y&Cr86_h%pVSG(-}-qGPj(1 z2hq`VscrVBQ`4Ul(y*47q{XqRC_!2b2sG{_b_|ZJ+o+hg*hzR&u%Ut{n+5&dP8OfO z6SB+NH2O0e1T$23O;z@4 zf2NVLxfkVsfrq0y0ib@KctB*mTYj`rt2I$egBbE!wP5EfT|k3E5C(=p-op^y$D(&U zB131ts5>wnIS z{ipX_#RVQD?Z`e!lFcT`4<8n}NjOXvRx6r3WnTlTnTF)){Y>3|I3#o^C8NX@~wuMhmasnU~BOuTqe~u zt4l0T26VJ4b7}wBb_15TG{yoveK5LGMB#v~y*z*`qUM?85vMSbNR&;k9R3pH zczpv9P+T(?Lz2h-2mS`eJ`1!f)V65H$m^yEealQbz_1(M_F=$qey-iTJS49Sf`Eo& zI$q#J%_s8WJ1sKBl|TW06rgrr2gm@1X$GH`@{q}-*4ZJU*=rz7XN08XUh5;uWq|L^s7cjMxL(x7ptlQ z_gqVCwIsluiuogtpxd%-m@X?u1ch9ba;C-j4MS)G-$g|3Q;BqaYF}R$WY4g~e1^&e zcN5$Fhr^9Fp>9F?kR(qafLVq*-OMh{B0j;8<|mBRY8;&QYIE8c=I`$f=c1;hTyCT0 zLr4+Ge@n7=v^*`DSSQ#fmvBI7dn8_xg@@EpVgN3Y2H1kw>!B?~S3rxmF3Di7qJ1Mx zv5Q`#YT?*g9cE6z7lP0^`=1jqKjexHAF_x`<25F`u$VaXj=M;A#@JdU-IL3%g zNB_qrtFOEfx2g}oN+D*`qX6h^!}b=GV^jm!93$la$_a9-d~_o=0eIjR*o;s9 z8ZU=>IIPa>Zh>~UR!qAG3Nw#$Q`WbUkwY!*W=;0Wfu0&PUf`y_gojv9G9H zpwl1)9QAM7;wnKhG;tNE>CR*f3@|goY7^J493pi@(RE=)_)|2=$rEx)zF=-{(vIQ# z>GF}lj;;n3`SqUi?@1LzR%`%yhC!!#{Vo87?t~_VY$;0+o1O6*xvhe7I?GmyrIQ_I zRV@(;V2!;zb8K7U{h=bBH=H^T@=I6R%$vp8?Ps9ZC#(_ey!Iej-= zaYGm@JB%^z(kNnTZ@`{@tw)lG+ZBrc6lnJXcbpi3vAG3LwPm|30LY9p2T*VA4mR@# z21#G`Tv@Iah|UOmrf1Yl%ZB+=+{CtUniWBP+oxdU1SFQ5MeCG6*pQd=VBX`%T8m2% z?D=t|llxifwE5(da>c{GFZ}S2Nj^q=muCQo5`~#XEZ2l@P~?%>ps+P$C4B@}%+*r( z(N&~{tAq)4LS5fCIk>i=Ut(-_gGa;TUeJx(k3~k7>Y0eTb3xaaVHZ;pUKu@J(eTAF z4cHfVXL2gEM{}3ekDC_19w>r)3Cub9svDDE@BjVwY-PFGzORoPSRD#OlAq`_gj7!~ zU1Pn_xvjyPh^VNw&#}p7msM@Qg$NK85XTrHi5mYC$6#Qs0YcPTL>6A`yIkVfy=b3; z(cFZ$&A@82-(J`~UMvXHxZ`(rts33923e0LPUBy|2_g?*+n}n&uKRyA3Bb48l(th9 z8%`1kyHmCy@ejEvvhlXCcsw({7!YCI2@&}8>XEyv>yJq>#{nZF=LXCAc}Ewr89Vdw zd)K}4EC&!M&yA^K5wC=Ol}MlxmgfbDnmB*jA6kfxqlkY<0&pwC%JYdY06Ht5gV^P? zUO-*Fn9!Y3A(6qxnkyQZ4OLvS?hDcR9XSL6Tb0bVBXO}eLGPEGm#~lc#)gE|bnh9A zRaTEWFDAYKqPcoy`oJZ;kgy+{xoKLhBl~3>6$i$THM2^BfODdk53Zi%n+f1{QiRuo zbHDYCpk_=&RI`+OhkBWtI$9dC7kewrGRYOPr;WVUz=rQ7?$M3~`J=|n6~hf@v!1p1 zJ&}V)zX%WCODKnY$!7fO*GaGcCJ+h#!VljRn7OL0HNo%nkdo}&TT&ZYS!*~3mY?^0 zK`Kx;m!3l zZ+fZ?S{S zQQ+X=SHN0DeZ56%rekWcz>{~`sZaBH0FwKp$(JP8>(U0}$EblJZkE+>Z7{;k6azqV zLfPW~V>z9To^d%kdMzrsP2*tA)o>{vg}Gv1ig3&f2^%I7z-*w?c~FeTzS8Hs4Og?w zy6I-hK%lt^THQh1neqQOA*77JSpzd|Q|u|>Mwif$Qw|P``6bRDvcA;@p75{^RfBG_ zQ=A^UlMBZ;#VMo;+VD@!&cZhcm-9{J?{bL za;)Ub=sh)a8i*4_lOn5Y^RV@MkX&Q`l2+qxYhOJdd0V<}XRu3n9Kq#Duk)CtQEhJ1 z{=8rJX7BOoe|qk1kms4by@23wyzFoXE&niiC&UbHMs#?T-7U_>%CSLbjZJIPLZ@mRI;z~a`)ICwe5ekHeeQa&u-8U;CHWjHkdnU}*Sp=s5ZrOB z48<|w_vxu!jHSJKL5 zh#NJddHu=nYw+Wtu8_L<#F5z$x7gwPKS$}q|~JvT276n(cn#oVE4o0Sny`Q zNMXFla!3hFWUP{QFjF=5Ex0-EHq*UrLSz(_$`)6j&k)Ue!+5A29x2S~U5=$Lv_k&7 zuJ@0Uf#7nfrDXi_cMan{{<$4LbM(*cct6tQ&etkU<=89l26g;p)he76JPkt~AlZow zG#)C&Ho>h^ScNa+16CFsqx}o}+TRb^)z%7^lQ(FcKXvrC<`e2XUBJ`r5t=1jg73V|UqCpMs7@mV)E&;%%jrn;o4` z4X1!y_!ettJ)~C=w#IK-_Oj7R9{4S`?3yw+vmqriVvYPDJ93U=>^pwl1+EdU;+pGO6s#W z$Y=+R0UrCn%OlLzm4||wd#1!>!o!>DcLw4fywtt+mNmHgt;@6ekk`I{W(i?ptC&~a zX9XBor)(>&_~jWGbY_aDusc)^6{la!WbhqA-6>jfoumu6D5f)zP*^I=QUl=ZMFXd& zS5X+?3%$5Sp>Hw*mg7g}(~DH4-?m0gc~;E7Hhxe?D`mCt+xt6!+JXpbTV+vQe}I$b z8qGVSy%ID${N9_Vsc$3s4pe0vXWprGhV?g4fCL%5a%2VxO8@Yz$ymLcF&sLDk4C5~NvWj{R>BIGS_+%vLOT6k?Aek?v z;Lxk(K{;PuwQ^Gii&0m-DP4a)b20C3c3{HgG;aLsZ6AQ!%-{lf`PAaef4@sSB?@lQ z>O|2d#Bqa1>rk6XRfE1rq4R3U!Kz?M&64rNb|!IY;<%FT$CW9%9OADHeY6YP zn-s%?94gW!=yp#K2V{p(>+fufQNvGThmC+ay+_1)?&nZWV3$Wd`P~Pp7PJP1gI40h+|6!$!JpK zbKtp<_zIB0eGJ}G2@P7#b+hpXZ*YgkOxsZwi7S~D439E-VL@AKg6pyqdQlpA|Kw3m zm-m0YVnzF%KyC>yb4IvgRYSU5QMHmRRqGzs6dRi^QIJJ;Huq%Oe~?>uvQ~-aiz3(k z!EBL@^gYMgHa&5vOiF{&TMt<6-zVMY7V?_Udv`=LTIla{q!%o$V?J)9Li5Ks1nIAF zh$0BheD)kU{VmoIL?^1R0DWUFL`+Mo-u?+=zf~)K|8phJAbsdVx{+kv3*~twaM*lF z@%219aFYu%VCm!Bf;SfRkC^vDKztIWp=$j>RT4{5tzhc2XtO}AZd;>^{Q#XvYo|ym z%AwLBF|6UQc%3a7>Dgqv6w4n8T4E+V9f!J$7PLB|^qfWl!@b(N)9oZe^U zx-l2I-$UJc4^cXOuiqX8lo&bXKp7yZ)PK`HLbC^~265%3lIHn;m;;mUMv zGm4(+^PVc^Wy!^7t@H*fy$>MokX3m;W0o|7Z8ibUKFM&crqS(XI1xX!-*udGZT#-g zq`DaBvbU}uO7dhTxlH?zqK-mAOxif$YoSV!lZ-i&NVASLX=%y zs~@KsME1G(lGpR8jJT=CPbZ&qFCE&Nr%ZQu3?@X}T78IL_+=aMbNL$hn^|YMS*pWx zR*yN$!Xn62xab)}sM8z0IJR}J2Sush0MddB9T8ZP(W%l+P1b|-+m|b4cd`zP%P&4G zfY{(gF)9A)sO8;&BobMbcK!aCpDCEw-(>)$Zd#!}|8ui)rcCv8MfrC~cInP%8{cuI%2O;68`nI0BI{IhMrpQp{%RMZ6Hns{e<%@9`o zVStCwcJPAj@ik=jqh{V_2EB8>z4x^=unq^oV(Nv0`Lm{N0hC7Vh03y>F>IwFj#hUf zy_+5X`vd#aC$~-fN)`x*qRP}}l6h)egSfO@HtYj-jXoqRz`BCp_w9KrTj zW|GmnWUj5A5)SdoXb1M{zOq<*ymY8UuhAg-j>$A00(>Cb{4gpMw{WU0sF~4ZWV&|z=49xU{_ZL> z$MsL3m&s895ejGLIsSi~U3Way?fZWcB8gIDD|_$kk-bSqcCuIY<{*UA-#=cb^V01e*L_{@>wS&ewCk2Wh8-wcv(NK{t{$iVRmjt` zT2v`~SqO@K-qM8YQguJO$xlxQ=?Gin%?_{VArj_2OC6YQQc{-qdgcOU%h4=Q)UVhM z6!kA8x)hZ)ZCOSf7``hD|N*M$`!yh^$q4ixOip)SC~%ETR#o)q9#N}3-rE;#^n9t z-p7(EeZsQW6|WVq(GQ?Rh~Fy-NgI0aPJLA=>_xI7kj0!TY}0m`K+L zg^mTXb8N*0pVu}i^s3X8oj|Rja0DnKS054`t>td5S{lKrv&LuZv61YHo@tnbd+Mi1 z34idLQa)1TkyWLd=={uIuCs92O^%*#>X@Ny?6Nmpw*4piPTsHHn;SLJ%@gnMeosk35D%_sJg7p2IF%8xJ!AeLV}H*s;2)$Pk3Np2R!d;P)#^GeQet$E#upE# z(W5dz?J~mxUWmb*f+1*hUTIsozsY`4W4R= z%e?P-fbM4{_qdDY4T5TG{RsvXZ#{sfRW=NqF+y=mE~`cXFjmeCq`>ULuig2Z_EDBm z7f5oA7JVe^%f~sq?@I5m^rpUDHKs5!HYE3lxNj%F)X~scb@pIwwTVXQD*~ZmaMLQY zTCy6K#Sg#w%~<~eJeR02BhFrLNfPI6+924VW;%P6sa`O67|94ep23wRkkOolXYoeE zZ}-H?zrGvG*ry}1(3L9nhjy1LESkov|1P8M>Y2%pc&}kLgRg#oKs>Zj=?wC4}IXm*wx_(*k1n9XGClsW!3XR%NwvJb|o-kP!@)%ZJPA+qO!4Y`%D+n}) z=?2Oe@M?bUXyxcvx@XzXXU{hOylrP&I>cgm^tkm+g>TvMyS8Nmtux0@sCo?} zW%8o^#La^dq&SfBKJvpS~O>brFJdOpdSA^jfjj z1|(x;>t?K$0(zm2d72Hp1{o6EFX`9mEnXiQMnWx?`$==GH9Iy5`v6kHTx@z3Ot2&b zhvk}qhhS0_v&@hAAGyGNb$D&r^n7n<&pd5sx)gQcDnzwgG~eItda<{Vq6W1M&OSXo zDq2cw6J!esG*;|S^Ol({TWCr*pJj(dr&y4{Q@jEM?Q%jT?lGuI%D;k47ey4~|3YSIu_ZUPzbonyGiQujdW&Py=2}Byt>Q zVK($bLjm^_*Nq^28i6>W5)q$fyem+$0G*SH*kRzMcBCGaInyOO7 zpyo*yhSj=Co1IbX4mTW|L=I(SG#?Gt-&^@eYf*0Ovu9T@RY?C_xiVarzeENrTdm{- zW}GWZ1&d5gq;pnA(IOq?(6c%c(D*#WA^MMx6pm?MRgtvPu1X-75c$^0tm3ZO(6OSS z#%}D~HN}gzUVxO4%>i0ORG|~)j~xQOR7MF?V74XL-hYO)x1{iB*K6s%k@cA{BnT+= zCpzxA=UH`m;hDKddy_EW-RU)H(7WkfHZvYC4<dt)V%QU1|WQntJC|Rm#+7}Ud__MXnd+iGJMMHEWnU3!xztk>&h+JVt^eSy8%o^CE z6NZyx?>j_YZT-)vp15)>8a7?!p^(+R=L~HGV3=38;@70UXipoGKnQPbY2Az1IE31y zf_k%cni@dIta{<6Rmzg3_SgTkq6O|AqBXFuiL~M$qk8@c`x;VwBX@=od_BNh3jI?J zl4r41zTVWx!xvoD)Hk;~?Mat=evtk>7&l_DSt;iK4)z4;3}_}X#U^Cg%iMFcyv5-n zoY!ey45|!wed}htEIH;i9Rt1W%?NA=GEfI*XhQ@;`OgX;$-T&p6{ucXV<}4=g&;;Ba)Gy(EPUqUU(b5F1-AimtHC7 zA!R1EessaQW+D-HONm~t+)DGMlS#}?5yF#_pwwvFMM>OV z%j4M+QUyH;g;?O-*Kj-iR$gP`;_PS&9WWuh`A6vUn3_7<^2Tkc{v$YI@0xCqt_|ry zkF}$W#M=TM_zC$8-0tFZbKx44e!n*?MkSPsx8PAA55#xIFDjt{aq-DRbia!uH2F`~ zk8lLKN0o618y<*hual=@3D?x?Gd=-(X>C~e2{YY7_cKV`(Yqn*3B1V%A1>injy~X1 zbKdHn|IhUx^FUE6K2j?_fl%Fz_~9P-8h6yyieRPTslY%A=w*}OV*P2XTJbo}UwG+0 zzI>r^da zoY;R;zxJn*3E*5bc%Qx9=@5#(2)6-HoqT;MZ?5JT>K-$R`{g1fiUnt6ckm9NeI=`( zdhWvE+nf2xfKS4#DuUz%)nR&$%w;-MmK6KMH8u;PnGL^9yvG;zO}3Jjxmc-jX?)81 zvSl-%dXX?T){I_|n*WxF02-eyh*d!Zs`Q=v?0?$tb28ejw$|O$vjc*!aRO&Jm|R}S zQBtoct+1~Y%07W3-LO`lGiB(M$@#KPKfs0{hihQcjd3&cKF(I0u(=s z{v{YAfKne5;QqTd5pg>n+|SXX)}58uQ95yZ2SRZIo%m)!4xd+7AArV{?hqqMOaCO` zWRQ>69^jw$KDA1c<@Frm)rh+Pm*L3@W1(09?4Lr^LlIVXy0t?6w#2Eh6ndARE1tE^BD{ zR?T79^X;%L(hBLeEo6%5})cG>PfYPq`ktSm8iG_+Ep`oo%h8QPIy*FSJYsTY>qCk8Mm_~Dg_PyQxQfTakFqjZ~wY4iJ*R!gfKj^BiO9nc`;rmQGf8@3pRs&WZ4Up@sC6~mNc%ipEAhzk^X*M@m*8H73?+qH5e0GHZD z4^Pp~|0)@L^n20`hy0i%AL+nj`Ovi(R}vUJ`6N1+#6Yc3kAm)8^2indk!hJ=G->+J zrYqg|k-%I5_C#P{rT`VB=*+yvyYsf3*JVkv(JCi^#D|{QqHx?AmQlrhhTlG0jdFN0 z7ukO5qfy>#zleWPjj4jL+>YDac>l57FJ)5nWr%}-#|5Gom5+muOuMD!IoWBjh$x=G z@Mz!D0sXiB7~XJd49+%&pE7;C?_cqYUg%Rggf?L#rv9%@V7SSVlbm6jUX$;b?+5aS z>vnG`G8gnVTyH(DNPXmn{L9p0%0GMg`mA_tb2D(eb0DP5Sx;g6Rzm2dZc@dT%K5^R zaFPCbWrv!}gnuiiTHOS&uAbMHuVNr+$YQ*SQW2}{=UtWjf7ls4leA^uy1Kq?XMLwy z(TWjr zv0L8eB(N}sR`8vD=+OyCx&RSAEcZ7Whs0t~<(^+z+xhYVCzoPazNh|cZ7jtzoW_R; z!>|a1$cAe@saZ~vq{Ihq-n*~S?qZg>yDC6XZeaN0VhnE~L`2q*=1Kx8nHus_>Ane}mxO9~(}r#HHlgS7|O`7(&-UJVM6-~v8|xhN(-iRDo|*68}@ zCJn;Z{GHm2b;JBvUG|otHbSopgq_jeh@>baA_q0($1_>3c;QY+gSE|DIvo$q)tzWW zQXpveN;=z%nd?iR_=gaMG%cZDHZabq0TB1UfK%_okM1gZmK9h=-8m$26$>f#CCZX0 znujAnHC~@)GYF(Md5WGNe0@E8*zGjLI{q~0wjCC{%-ORK^!y}UgjjI8su!47o{){3 zFpx?d7B=I!muObA^u-ud938C(G&&F*+AR)7=n_lxW1H;^S0v zyFxyQ={bHtJR|YR9gcsQS_<%NCWnB;{5(5211HR?i2!!z@I z8jeIDAT@+A?Ov?EL1ZQIvC>y2QgvPWWngALIk*8KA^nM!PQAw{$*^$9?R_E({Z!TC zy3e7`F|kp22gzoztvRxLQ-}Eq?H$awL=+vKCN=$>JviL@P%aoB#KCSA&snIh8Et3` zExp9Gy-a{97=yFSUTOQ2QiBxxm7}2bK$Cm7)XwctLQ%)L5I3^_y*YxZz(t-wZ%-Ai zp%Hu)E_mvY40#A(^El2koJmJ5Jof%ghGZ)d)sgKFb4rf#jkkRg3|xc5N=2PtmVVQ~ zY`-~j`FCCldmHH(OjEIoU1B0cB23AWPx5l^-Y)7Am3m++G~?Z!!zuBRjFo^wit zwG(73C;OxYYRk9B{rP|rTju0A{HUY9Se}1;OcBrQ$UBFBx>R5QX)NONZWx;Lme0;p z_XoIomXpl4GeeOG@FM_o3oJ>?x1a&C^H@4X2fKl~0bmJ99I>5tn>XlEK$1%Ek+2Muko*8bD&k%UM zInKLdzK`*`vg6ME!js#RCo0ebjK^kOtx&*zUu3!>rrk>hP!Q=qnAlLS?9Xckn@;mw zeTenI4REb#MIk&o(#b-d*uy@9pteyOS}W36<5pMr_K55R=DhQw*eQ;yJPu5Y5o`vZ zvUc1z9o?$A8%_vYRbw2idNt^k*d|aVnt!^J`q*+`d6a5uZq6-lJg2(b8mc4E%|`=O z^xq=Ll6>OhWUZNwrS6$We8Wp=lDF?&xbm!8;!eLsv2ym1JpWiJuos_brWKT?j!ARv zgaX+t=7&M(09)RuY8vH+$ZRn0uK;z1)}R+ISZVayM+(rxZE74z~!7EVH2Y zhaoO9dm*Gnx>od|`yBu`eYT?&^!@ovCA3}5&|0TR(i&Dg`k2C<88*wtF`FA|9&gFK z2z2^N=TQ}=nZ5=pD z!zJK`$rIZ}LY;U0P-==`-=4_J6+t|tOq^~e#5hPsMY1Xo6HXS~uk{oHgO8WYfVfOA zg-t(%Z>eT#@di;J^we3l2|wzZV%C+)RUZ~0Qx(63%fIJ}u|&J!6E)1A7YUhso~ndv z8JniyNIx)8`*i)bz+iUlVbC53l<7QM8YYh8RfwxY(wFrQ4kwtavoAIpYb$A9V@N)8 zPcYuSn29v*>ANl5e8pP#@`u@XvwMKblr*&z=nmfU;&3?zcLNumm%j1y^;;vCeJK&> z`;j~VdNL+iXRajrr{E$pk#fm`{gE3szG76Gzqmil=vZCKxm6L!$=cD%sqDY>Zjp_{ zvHhOB=i!sgBh$1le3=!_Z+qZJ7!}LC9jyo4^r{dB7xh{!qJdu0(n!x~dMg(=rT}a6 z;{DY{xK7cyq$~APe??c(c&D}gP}19G@Lkn7!R}G@taT62+j?f8sEEuMrRYpUYo|8V zv+3vAY$0>`>Ll@BzO1(}1;7G)$fy-BxO(1U-ats92cK|V!mv21N0stcPd;RL$BO{>%e6K#V2<}Im#zq%n0_-xOY&`YSk7}*@b|Lp2thhA> z`Q!|Ff3Bs^*VJ>x!|zd2Q|CVkk6IpBkr3G31m8^XXY47(Bl#rc|LkOYAI za#W1A4RfGQUOgzTRApsx6em5Nx*?^!Gs$7PoaHK2o@%TFen7_-V-rW+w0KQgmu(s~evzJ;>u__$kXsHUBbp;l9&5 zi5>HN~T!Ca`z{JNXg39%!Fgg{p0D9PKm@!PFWV@XifCvC^Jhg=-xZ0|)AE!>Y- zJpgs-rVA-dS>_W8?qMwte|GlQ0iEPzWw}4rE4anp5rN?9Xzna|?zj>RG#a#}LWVQp zt35(fGtk?~+!ysTz)ky~s(0-sMOpPzIKxgtIk%X_La+*Z{<7`7$EnY7XAAXIe1UFP zeScVzy0q!27W>rJhi^Ky&#jA3D#|oI`*@B$cXKdb{PEUckkw{5n0?ishmVUdNoQ0s z9W**L^?mlcz{>WazO!qDRO^;k8L>S=eIL)|**kXpQ!I2=^11Hll&Lydv<(OhT?mLU z1%}G3`lb}g<4;$;A7NFLI6ZdVe3P}|d2(m7&smF+3+`WDysd>+pBlK24@eVuI@;$V zp+ne<8$=-)iaqes_L}FlYkaxZ`7^I4PW0Rv8{M9{4MrzBI%|GRR(JX|Q~#}IwStec zsv*548ZJCjw+MqEPC!WSkWcf7f8t>DCVcZ-tmd@Na-*SMutH77oO$z) zMtguYAqzucg4+fIUiS9#ndN4DigxuOA>dz|Z8QbW+r@gxja*Csm_+~?Ft0lfU^E~T z_~L7yB^>UoLVAI_fyUmLgsSVq5y%~aj_ei(*@vg8`2!>BGsRY-gP$h!=8p0TVv4rh zvb$v%LAco`(h;YJ?1jq9G!1dK`zwid!0;YEBxa0(I1yWT9$9IRpl7;;n!xi5x7xM5 z2BmF{D=4o5w9rRN)zCw7BbDYqxiWx2+yTJKiI0Nf)&k@lL045H6+5W2+&NY;7*u=PQ$$Ot?m z_j6Je0X$F}!J}iot|Z0RYyqnde;7$@k!xqkmZC0CYyPGzWRT-BbZNmQ`Bw20)>XK# z92thqEgsIVKTp%sJIhGt&=Hd5t1MusP6r93t@hM&nT(KY+VQ-sHdr0HlSv|3Z!X^SHY>Ctg8Y2Y5Gj`sfMY zCyU~z(C1wUB_N(Tj=&7fEd)6J%Fy?T!IpyMwX(zQZgKv^ZX3ErPnAr+M;p>taOITZd1@?0&o(uUl}(7=LW9l6aRl8R!5aO!iUu20OjSw$5Zk z!~=NcDBc1z3QteB`1n`^AT6#iW5A;bNM}Pj;(*^LL2#|a)8^cT4|iDiYX(_SGd z0k>01VF-;K@2|QA;r|(u7A7`L7$R~}vGSlgx$Dqn05?AkwQ!X518;R{mW^oOZir;l z6PkXPUD$RIHJh)K!se(_OR;yles-{RXqmXIFU+V?*cq7eK}6@v+wC zU=ci0|1^kzFRyl);^$z9m;4pR*jyCwd@Mv0Ev}L^-j%|3Hw5Z!lW`U71bzByb4{c# zW6;gufd;tld&ln)<(l9ocOEtO4jH1swG2G0g1)c0k=(5u^2BV?#kbZ}s>5Fo9A>Bs zxzag|n#8$rrUJ_Ri^6o>;~>r<3Yry4>cgNl1fQC$r=5>OePDprc-im(fo0zV&W)!2 zV%mi{1HOAxDclF6RPZG+l9?u%lx{x7s!v;8BVDTgJxHo0iWHOr5*?>vHm@32a}zIc`uu&e}t z;eKjQkN@8i#pb-GSm2sB)Vk*ZQ5O*UZPKc?ej~dwJV`A&1{H8b}jP_jByLTl||^PUqe=Xo!7Mt$73MWU2B` zdn91m$iVZch9%;#|8z(Kx;vQJJ>s`ebw-t-?i7s%M_HRfcbRLuVp`$&;eAtSKKEx zy|wn;zfU(0h^Cb0*(LnR3A~sGnpg@Vkz<`CDl3r5a)Imci-;_wu}lGY2I1dh01Z1! z>C6*+|H$XXC$W+X7E_kVeBJq;xOfzhJ6z0MLHu`Q5}A%37^C!M*(G57gHF`JJ8lI#g;`kN)m$pMVWdzsz~+NEan7Bi}8jk9!*bhc}FDc z4_zH+v$(tA%TcGd5-L)cCtEgRwR9UR2R0&1;$9`Q=+IZNU8nGa;@%=gT(>6CrooFL_ZWrlYm``U;yVBK@B7* zZ2sv(!R+&eU-GFI?bN1}8gHJ(a>>S(o@0$uB_W zsva1dy9WY{^ac@QQylXC=4WdwToUJTs~p;E&aXFzf$W+{0SLNFTO-3rhU&^L@}KuF z5I5o!fJBGksTWnMbW9(g{me==jjh$HF-ehiBy7nwsN}{x^nyMOxwv6!9Vw%#Ug}1KZ!PG-u*6-q`?-> zE)uoO0j65KAX59RNJNCGsU0O!c1Y??nwAv;Q`aMyL7P|D17 z!7U+m@Ocb#OVR6b&lLSF(`gDRXAowUWD6!V?jmt(>;67x=YJh9qEE6ch0FP^^;xpHdC%bg6fedOy3RRU$G;X{` z*lEQ)+%xm_>qjT8%)I+o8hilLp`dofLP#@kKHim0!R1Uw-Q^s%751Sh(ay7WGIm%s z+ds`=Yn2&i@w25}*xS6ElYQH^QLnf9=VgQ(kUq22GY;|5|{w1Yau*0K4E@d zI^SDiI&K;PTZ;rLlWNX(*>zR1&P<-+YKO+fxJ(}fcIj`d>!jfs$1|i+W4!DUl9@*H zlvLMu^nTQkwd(v>>ANiILbLXnQcuoy2~Y8uCDl4cJ^U=1zhGmCnX|tvR~enO94Z!hrq%sJHuU!3bRD~*p=I67!8vN*3KUI#&KTPp zj4ya^<~6mY>`IvYmiFf0CLld8T?;h>m1rZc+%-eadN zuXk`Fk6s=GM_3$s>{&AfdGrA_fSn%`1pE4Y>t(th+w*kEwcl9OsL3yo$|Fylco>v0 zZ41iF)CiHKr8&@=)m{ercU$sS$tk zD^iW!f~2l9RO9k)7)X4oWDtTJ=+^2N-@F;fu{x~t#f9R{ALdS}AIalLxqn9cH@xy; zx%d2_&b1|jyoeeX7sk4^+WKrf*E(S=M&qNIK&IYK`9Mj$BwI;F>~Maj1!*WQ|I1v6 zqIPf{1ZMpyPWmeem}A2@SDpHZoi^IjJS>!?O}fLw4#wAfG+umMNL9`Ad3J`XTMPBM zV6qmhm#*oR&FpQP)ETQ(_G?Xjr@>|?IuRGS82SjLWc0>9qJb9MKOQj3GvYbDY zK*wOCv8VPv(D^>(<92k)M2W4}Pj(taT8t~mMO;uwZGV&F3LaNNL@s&=B}aZAHnMC} zB%^$S=~>#+k8f{D-bushQ8aH4j@GPkQvGxZau?(3+OU7x0pbW}w42n%q{w&t3C^4+uFW;(`w`wHY~oRjnYR09IWJUZ93z@7d?K7A~DGJ?-Texb!)`smOTbyUYu zK7}_@Os%0i`Q>)M0XMbPiPY~v)_xGZ9BCO^Nbnmg zKf2NQ6OqQn+R}oiEk$z6u5mV%@L*K z@NcuS?u(yQ<|IpBW$MIt_g(l9d5Bg0j94okzraMVu(f5ku;uRO)OU%xzjTZU`Nw3v zj7)fpzXtfN*o&22^7fc&1p`SZbr1eP*SE;Af7$|~0S|WIU*md*{aZPqMS3575<04= z4fiq{KVafF8WLSr?05eM&at7ZGNmQro0c<`qVY_(R2ek*D0 zZX`Y@V?1h_)}ZAluh|l;?|r(|un!SQLL%DdbRO9RN)NH!(WFY?Oj2JTFAtVsa#C!ga^jsl%eRmqp`uV ze)IZLGYY0T4Ol`s!N$#!4F}J`>|jfSWbXZ&^=HlFZtf!1cOxFh_7E2&IHbENmO*dO zs`~otag_pAH3h z2N{d{cD}70IERST_ZNGW(;)n@5mW#Y-oCKBoHQ@8U?)pe+UaB<-hIoW#^wy8Fd^l5 zdxkBB#?t2bN_Z^?U&kFKMj}6k^|=ayoq=bWPFbtB&F5gn~43$hfxd5Y@tq(!_HKl63D0v zWt)7YWRJ9-F-Pq#*rSyUQ6IJkrRiz2l`SxJNB*)ok7T^9>mMyU-C;NfT(~UB$jf{+ z{4oYMm5JU4e{~626ywd4!P3b}ewW^UHzb>mf$_N&EQ};3xY|JQV zb--{!ZE~UJG!?Jxg-I4Fgb9DjxW2|C@bLERXgKAgRJX}G!_-c4CE3c){R?IV(sI8O z8N?ceeGjG0odrAG{0|l2qtWR&)DwY2n-)u2D_OoVPoaL@QG3glIv=t@OGVGu zz)>*Ux+%Lin#<$UQIn`{k2fpxOY@b}^)#&En?oy13R|!-@Vr1RnpV)*HO_&Kn#w+i zw1iu=lDBXM`o4^e#Mpy?Ux{Nt35(0J%UYGa>Ti?(MPfR`Ju9QN;-${0oJ7b>-=r_S zPK}$#0hrLah*@BMKq1e;ipL9EZ?G|L^Zf`bl_bSAEDN^vKH)Grmiy&pw)0NfMV5?- zyB}*PGGR4>X3Kqxx<*BlTGa~2Wx)dto>Dh82bbm=V{0UGX<^DS6Oqehg&1=S-DP+`W>4E{4p!f>OiTv>P*J?gg$S z8L_aX3A@MD1DkQB72R%-S)}MAkB|!E6FIneeS7!!Nyf>^u5?cg)oORlaSei(MXHB$ zEez}#^-+hB?AHg>TRHJ7i&t(`GN=cVn5JIC&vu(kmSeFDRF~B@@u6z5liuJY zs0>-T0nT~R+Sk{d0fruV?fKd9hqrd4&J#(RL+5V=BJyc2oK|cbG?r(5*M4g(;c}Gy zqYZPMK7ay6%qW!cqf1d{E{th1S@m&n!8_ZU9|4(*r~S(5@XSp6Oji7hndC=rju7W2ekAX zBVdOS3aN2k687i)w(iLHjwhRibrw+a7#f*?Ie8hD{n=1NUXPSa%Xu2Da1dHafkhCD z{hjlBqm`mhEvpyv#z4G+8W#`t8Ki6>sa))Wn~lx6x!y*6ePTLyz+_rW%R$gu>!Nud{V?ZOkI6wee^aqhAMi z{^JF%Dr@5{tNjH}dCkWn_G$vTo|4X=&q0I~;G6rsi99#l0sKA{R6q82%w@^@JX@!{ zv_pqh>G}*jCwF#7Lvj-{cm?n3pfhH^^$+A&c}N;b87*DOF-aj2;z5_m=}ZfMF8y|{ z7+Q+ETOU==Fdx5SBxu!EKUZ+prCcNZ`tdf{PfWgbO{m*^h@xx|Udga2gPk1`pU*Sf zsaZKV`luYf{2|u<+?hd0W`M}Gu&}2t&+zX8f{49<1ZoJw*vdh_;2=dF^{{pI1$HnB zc=lup9Lc9K>&aM>m9t2<7%!Atf;k*g5ogwDV*LDeyrJ2Mm>FCb9I@QnFCQ+K+$9f{ zmr)RiY$~v6q|D~g>)JsTFg5$=s=KfUm6pWNHYSR?i=!hWR}qoGz{z4jyCqq`i!j>lH2w z$nkyNz`5{>=Q=_yeUTt@fqJ~{`iOSXFei_`C!;!(ipuuE0OM!nj>lnb_CuYj4Qff`Bb^BW$<>$Q= zps~lLFn-bY&M=IDl6^s>zX6$VbB4Dj4E=r^8KA6`@#Dnu%WBf{SdF{J3vQkQCMCR^ z(uHCa`^6|w^PQLOl_@b+kq`eC;#hi-Jm&BBR)$=pOOnwcGni;Hh~gOE z>ihC((1XcPW%`XVHob{lkEPZyv7^)bBaJ-O1bvH@!0T7%C<)AQe*)vtuecdECH4*E z-`WQdVqr!M3g^Kb>OQL{3~tle%WMws)m}~JCrse9cR5nASatEBQ7C=1IGzw{EpuP- zKb-C1*a0;)WLTWR8aXQm> zetWQ`k?JK*)L=N^i1|C)3!Ui8Rv?**&p_5b^H<(TqD{>wKA7yUhK+&^GfoCThifSu z?sp`wTtBg#f7mI>w_ENYUbw;*1!x3=C1ZA#^QEI1_$<`#j4j93tZzOYTcz#2IBdF`k-N>WoOL!3BcoH$11Rv3L7~2Zi{PeY8Hj>M&{IjVDbtB`G zmL}4rv-M%f3;c+#wKe6}j=vNoV|wh@Msr0mUEVC<7ks|s1_TBUCq^swY|xXa2l_3F z@4g5iL5vI_@E9I^aAlIu{|FNx3Q zVg))tv-w#IieB!^g%tdny3hA}WYYMtdqFwBW=9bgUWf75FZ@}c588dSR*LyiOo-9w zD4M4bAdmO&(=gEWn79iSnA(p$d;!#An$1pMZ12B^0ZfC9jB-}u*0YnDu+&h0sb(O; zr|&ayOu91-JyNd{z`U~oxK*&+&*z5`afb6zeX2J`A51Pt|5_-SHuB6DO6-}xPW1iFd#0^p^X)M<#YMFo^6#V_WxY@p#+7;%Or14fbgb+} z9VV8_^{?S%S$rGveQ@bkKBZO)%Sh;PuJF_QS*6Lr&%E67-TT*{{(=afxj;$NE~OWU zC@_5lt14nzrUcg?uK1YfsSPc3?YafiF@E%77`E+3L<8y$9n1??S|G#g+R^Dr=Zq{M zSDPqO$`YpnhRDB!=q3OyGNyQDF2l0s8Yuv$ZxO!@V}dQ7WXx#>Q&TbNm5AcRIrYx{ z<2wmTBNJv+Z9u*W5c_mYOdahHGArWk%HYWc1D%`WA1*_7WG_)AN-+0sa%#$(}WjLFVu3BH-PJA>?CBJqJS?n&Vp^3*O8g&ZM6W_S|4w741Y2 z`)8uzhZUvK_LsUyEEG6pnwr>KhyqkJdmw(M5AsF3JY_U4pVz(sq2z31BJ(qf@2RvC zTgF~{XIujNs?iappP^~%ho|PE#v|*>ZRu~Q3J1$gR%XC`eMNo6*0rLFCwcx<#*vO3 zuI*ZG8E~4+4MAp6{fn{Fl)Wj8RHoEW5otkhA_4g-sUb$bltwXcVP)jz2SE=PNCy?h zs}5mb&B&$Y#^hc*^i+kD#Gm+`4!!y)le`pLu03uiLB~ibHJozMzZ8vTV<{YtQV=rK z4%FT1vvY(+=-bo(f&~G*h)}k(EI#(-B@yPhdC!n^zzO0LL@1Ni*-t)lWYxFh?HaWl z@a^+@%taA&$|Iy!P-n3e``CatuxA9q3wII1@&S=l`iZdH=JL!^Fj&GFmNFW6J15T^ zM;QeBQvl9>=4%}@ZTp{F1H_QSE@H^dS^q#ynio7x27cdg7xa7gmQ>Qdwr2YpjP@m2 zXU@821AXAeG9P#4?|OX3Y0ZG&j%nyL7jNxqaL*r>GIxqPFtb^%Vms3!stk#l^t=~$ z>bw>SVkusCYU|1fWa1X+Zr28$=TV!Wx}_!3<|r~&D&OA4IZxG~KqUz20#C6im=W_^ z&{rf|CEam-(U>`#eYQWb=IKPAYdiTq5zfKH@@cGs$Y6~P?w?mrX{qB%qQdnG(vV=V zQGl^<)I16Q1kbz~1sYG>ds1wYq&0=LC3bwTD9j&pj zzW8ShJ1cYP-RU&L6fJ_fjP)l`+g|qC6|s6GT?%ViT0nMi-86P0dYpS#Ro1zRgR1{%qEBKykpbxC9M#|Q!K6z+f5J%0_f92a z1$LvNOwl|W`nY&;&GxZFbg2xf@^j^t6JD?I%!l)JH|imXkj^;+^F);nFdr44tvvtzQY3mX^66*=U$tf*&Zkr{#nJb z$QxaRZyJ^WL@gBecv>iA2zPDG{Oz5Rw%CkuQ(_Yq$_tYg`|$C;9Vp1vXjwbKS`=&# z4KQ01-M?$sP>kU!S4gYz;)Jg+7ql^*=o^LJbaJ7ln%s$J*RJ9DTEFN;&jJ>gq;ARG z>j`VlfLCZx!MLY4xplbOGKmhPm`FI`L7R8O8)}P`&3a~b>)fQE)Y(kilbX@IT<`n6 zS?|lrwLd3)40gRz7KJvsW|E2-VdHlY0FzttHFAGDw`*#A^7Tagr%`?Mgk5QgwsNui|9~R@?*l9XPME^!wU64*JE~AoW~n4<{l< z;zW~!N`YdTrl8sQqts3ubzHpbd?ZVh6qZ5*uv^U)*NM!A@;+hBS_l`pB z+g5COt0%oYqwr^gd!xn<_bJp%v}#gy5=uR=I!w%lfOk?kaV2$a?n6cx%;u63%r?WH zj3Y6M5Oh{qUGv}fv;(5qg>DCp>J`uVv5@p#DhwLcP!xui2dlY%Ux!bOEHlCDpD?E% zc>1DB2ZwF&F-R*r2`o|{{%HQOf$=fmmsEU_Xlhm0)W;$8`3()cg{{zHV&8f>QQ*MY zI|8OQpM}$KfO}4VnnY9a8uw#@Ml#7U6dA>rZyjXB#%h%AkUq@-356?<$D5Emw#(HO zkgZ;D9&$cg==)Ahj80-71S4k@7|EYoZ7RlSX)*IjZ<}sq45}UN<5|Fg#2d#BA6a$O z)o^Ti1r9i8%+_Qdp=nIyQmNOD7`SSIn^)rS80#Gs6R3=118Z006EZ8C2CQQY%tme4 z2?uoa>Z-S4!&E#lIkfmU!{mZ@!?7TA1Cv4_&pN20Zpyvf-r{5uunGeQ%GB65##x;H z$P$rX)>`<%X0ak@dbioxwIZK9kpq1-6sLWxjUke6#n%2zo@#O1Kw1DoGv!R}s(GZ9 z&-LBbU1uVlEC!CVcejlB!R_iFX&m0EX+CWweTS*?fef!^L6CzSZ@&OD|3=w#NBL!iij*^-!HnJ~c+ z<=6TAmk`p?HdzlvBB!4hp0QF+D-Qpl%a^3|CPTq9}aNh-vQ9Gaef^EVR$9lT>umUfwQ>HFIfB zieWIb@2^MfhyRIo3e^mWWy|=ALsI(F_|h1eUH+B$iXAD`XpRjJPMn&BkkZO(>r_5g z*ywiyD1|wxd;WKUr(jEVurZ!B4c~U9^A{e0zRsHrIp%2vsIwIz*@y7(gyK$iDgi#u zwqqZv&m?(PCv5BjqXsJ;ubzdFJ6VXl7=jKDHXBu;UP6=er9MTZYv)-r`~C*Iz{3v4 zvE9U8s<2zU#k$Kf&Vu@i6x|wg$_lDDD%s3E>o;2?fvB1oedNlg?H|`<6D?&~qjq9@ z+GABB6TrRgn(7|K=C)dKHd>>cag`3Y2Xvv$2D|2?8bP88Ob}Agh>f3n)=^_w^;AiiDV>aH|TyJiz`p$h{Tb5XW zExnuD&jc0Vc|Bb!CmYV}#m(*AXw^-$AhQEWNs`8m!L!`W{+y)F8&lM6Rt!7N`yWt@bDn-W%Xas>tsI>}B}2HMQRx4fJ()4l(IS7EEC=a#aL;lp zkke!p(>zIA{ks2DI(mpe7J7e_s=lAI($@_3OCx=5t{=%X&C-7gcpra}=uh?rsq0xg zyzUjbB%ytGu2`CY^ulP~Mod{3|Lg{oPn*~7`#=nSjI^{7BO-QJysaw)TO@(RY&$kY zt}3@IKblW&FmBLD+J5cUpgGnda>rSd`iL_@(hQGR7KzmB;Qt8w=kU7vt!)^NZ6}Qy z8x7mIvDMg48ry1Y+fCBg_DW-;u^TkD{jOZqwfA%H@B42Z$-=<-8{-_9bA~)xkJcYY zeWPFrW1JnUt~**m8=fprn^}nK2$SM#q9`&e{%BZo+UR%7%PeLvg0L=(ei#@h+G~C$ zR`kqy{xF~;p`!UQ^X+cB5v~}ANt_p#VXNnWYSh&jon`^_eZo1^ndz97BSh@9^->-J}PZz)BrK*?9Q)gB|!+)*U(7=(U zZRWZ2#tg7!E)3g4%ciEpQS-KEkFC6JyZYqd9mwOWZ1+)4TQbXihtww)K6yTy;F|=} zpf{q?lI!R;`@65y6)P)C`CZTl>E%}*x1G7@&gI9`GS9K`P(W{iRQk56O z_Y98=L+;&S!}p%@y+vmiuMUi{;iEa-5y>$zSG?EWCa-WVU7n2M{P?A9CYC z6sy70#WDZ;IKsF9r%D&>Ilmbh4RN#_w{$=0?PW&%F)8ECN)5wh#@a+Vm}ih+oLCr_ zYDtw9I}3GG3YonD(e)dQYjTARhAJBtp2j7G1=gZt@Tb*nwG@UXZ$sn!Q5~~P&pl0< zhCIP*^Y?<6o;L!HtS!IhmP`5hi`|_nJCBqrGmGeM&|4`*`2;l^in?2QucqDg6LT4U z%`Qi{ZVsG^6$PK$I6tlZ*+cnNlKTd~3~OT=BC>ySr9w~NbZbqc8j9ub$>d6}c0Qct zlQob1xRaDp{}JX{H1hUnOK=G7-D)bXX-$p9XnEVLSi~ZAVy4wDR~&2k&5y|x3Nz)! zki`D5#Yr}U{H2`9jzqDF*7aD@tQ}i;&F0)B$>TBl$#5Na4EH07hUvnBOd9%>|9h-? zX4{TI4#5TKo@5$M&zw?RWNq^CqK-feAqbGh@VmnLb+YsYeXLpZH+WejexA8c3@FE% z_uuG})J%<+QbP24(uVM5@s$!?xRpMdb$*oONXb_k;`NW0emh7+HgdGdO}Uc;eZZu! z!MlqyjV>8>F~=79)ed>+NVrMbZ;Du97~d7qLbpIN$E3u;^5r)lEv9i~C9*0-2WKr}pNt&{pDH&=gJZ8$|Q6oVMogI(Oxl~P&R^Qc#d6Cc@3jMO)|mf| z5Eyw`wPq=$weFTmhU?t@>>}hY35vZQk(*!UmV+rS&&vvbngqYOZaA!em$MJE$nPo~ zcqtftg`{^qV%KnR>twrwk*|A*%Gl$W-536t9-5HvZQjbA(Xz&r}bQ(P5RhsbJ5h>#@RuNrG@^DjDQC&N3`Kki@d)>mnb|2-rrpt1N z*7LF-#k_W`P3BoBjKS&PTcf2J;zmB0&*@r+c9{EI_+U+niOUv0+>*XmxkWNWXL|Zv zY@t3~rqrf{OFEVV0!hUW3Mc0ShLH`t1occ&`Fqx)GI$^$mZlsdKwqfCWP)<4qFCvS z`{g}#?*Ts!bJQoyonnFS|L3EYklnG_6~WmV=}Y5ru44%ViQZ69v2p@!&GfdSe)p<+@w`-!uD?gF2dTdRcsX(_f%D^B^J~x zv2XUCai6mL88x74Kl>xeN@3_=;>Pl@ObYS86M4j6Fj*@8sN=o1}*?QZRl+u1ArX0KZND~?L z4iJ^hL@uDz9yDNGhZrGzp;dbIyc`xAu1ZFynCV55tK|^5O4MLG~>ixHa2U*X@+lnOD%a z=a!*@$wgH#HztqyufM$0jN$I8$W^8IrhEJWRg_li=1Zw@0SdJdU;El+I)itd}avIxdb>4K@ zvKS+v2yCMy;MT|PWs7&PwaTVCPwRcEbmG3MMupqxTcH+N574K}gyPJJt~Twci0V8K znpyCO)&+zkpLQZtiC8z>$TJ!#)yxqTan z+-P*+ar@6~Lle%YFp*m--BOaGM#c8SBaiiTTIxHxU$h*fXoHHe>kSGt+CyV9Ks6hQ zmEmYgskUendo0L$aWIMnD?8d?Y=m7~+&@R!){Nj>N8{pBuj7VM5WvBYoLo+Ks!Z)O zX13vwfmM~Sa9ABT1YU=bbGAIVg@reDbL!CbIH=KSrO~#RYWTSHnvkAi{veCCrq?Mt zhEh?=nB5~{fhe(4wB4Jt%?m|_Vg2ZZsjnW4ZNg@SXkPNq37+xNXx}z67LQvh_=W9% znGApkcH}PFg_xm-`5XE!K|bORU7`^2U(<(=Na9uujcLjcKJ zlAiX07Xu1azzu#XhVm-zRHa8Zo=I!Rc!_QGdpzY`7A-yjB4U!xSj)pP=opSVc%g?` zo-|$<`B{g+jD*yZ*+6ASb0zBm)0&u@{ky%0G!w;tN%iZcE-AoB%Tg>^;8*Mc;cM%5 z_W@g>Tw8sERV`Xwz7!CbD2rzEO$6Wo#WgcfLea3~NBsu}z>vj>#q^q-2lIhvp$aWZ zA)6O?8q|m?-t|fS{zgbDxqFD=LB3sV{iF5OkE@&?#iP8nxv`Ij?iL6~{0Mjvl$BFq z|NWIy&c3AX#^2|kz2HtE5UgITW(bBLGsv0zJ9NBA?~?>~{U;8Eg1Zbd-v)cfd-w69agjt|fMtcCG zD!I0#r0?+{zmFncQ%f-IO6gv&a|LJ>-oOb;Z40>QZwd5Jlb+r*9))sKQ2!3}|6qm9 zR}ePJU?K*hPH-3)_^9Ma0HPA7BLCKu-xAgI9SW^eYuGC{X}>of#su%JoN^qKR<$t3 zIS`!Ws!0=%-LOw{kU~#T{-zNhC@>052`-=Rb5UD?Qk(Dl2K;*Rpbw7~?_2r_gOQlu znSS#hUb5)1<@1NQp+1|6yM@GUR3HO3{rpk<7BqH0N9_jLp6S0~0P;jWjS0R=wsNx- z*qGM>u@c1%<}}0nwP1Kcp9Vr@~JC{G0 zGW;uJyyVitO#ZP`swjJ{1StbZE4qqum+D~^Uj8tYc z1cjvf6V&;v2kE{aTh2<-WbSl3bZNv17Z7R*K$yF28R-@LGDs#m6ZC$ii*wZh=w;cX z<7aS>`qb@mFpbar(|O!MQ&O5Ly@D4SZ1*LUd)KunI)XggWRr02 z?x3>eH+oGMSf>UG8h3x?s26apdHGsA)LLTkawTon?tLn)sN89(81Lu{YNV%7&|cxd zG9XDau&riIPKrQ;e&+Y^NN+7Aab(zq7uHpPc%QZ$E@qBW$%LEb%?GPtk4affM(?R< zv1{;gIRBERLy0Tf{EHv#ZqS? zDVD^l8G0(!fDkAX+l&4q`MRRN^k*a26_KJeT11ed5l;T%qXM_%H%=nq;cB$~QMO>1 zH^+1}WWRnUleRbWcaoChfK;gR>e8tau#|^~24875N3#p%?Ov^jfu`c>lL>r%OO7En zD*g5pHqtpPZrCRtpKe~d=L`5^drz;2#+{2ok}Hjtl3S-6pEyF+b@?Vnqgn`X3o}9uirZ}tUJR%$&&SU#-O^0{y+=$;CZO^MZ3p97{ zx_aD-f;C?rscEC>mv1;~73g0kUX!_!fH>(QGSp>7_Eu|3e)ZS7Ph8x*I%0i4*?wk& z0Ojln;`ez2MPEyxjtP_R&}GutY~hprzHMt|g`Zm5V6!=iR=}x$RG&t!HLBbDC6y?_ zjupnlZwZ=`T@b@9W^j1e+ez^;krKzWuooZgki|4Mc!6e?21|dtE3M+^`ER=XFMWV7 z<;Eb82p(W?Lz$g}J&B_F!QzY{JxG{JpV1JI6SJaYTJi5OYA7GS2IX|v6&p>c%(;ln zrxw!Qt9V$(@!0Ok-l=@+Szoss`- z{J(M+nt%mBSN0mu80swHzd8gVf$3T&Qo!f)5!{^*`$VBrnm>^@~sT_RQu=(Im6% zYT5lXRPhk_Ls7!|V)vPIX~}Y#q%$@>v-jxTXC-YCXABvtK?bq8iCTi)h7NKJu(W4J zW8xpu4aAMV5CNWJ+xx~SJLK^#eTf-8I96Wu=05UdH)2QuBL^G9S^yuC`I0}u@8AD_ znG!G)p)mz-DZq!uOV%p4u}mCVi8jK>Ncd$#ZYS#N2rxRZ8|vn0vu2HIQM@_Vj%H%@6+% zsAT8!`!0j%U}~^+>q!~f)5-8M77@ep;Y*8bV|5th+QAv0XayrXKFzoerW~KRel6k5 zrt+oC7uiO8!wO_a>GzV+j;OinN zi(pb>7uz*>K0F$&iqjMG62I~oG6)H)+#EK0)&5lFw`LMYJ0jNer3_jB64I=)=A!Yn z3zJB9q-fu|w2R)h#IE@C0te={%DzV*rMnk*`LNWq989R6Y(Gwb>7kl;11EdCy>sy zy^|C4`Vudy{&oJ?;4t4dEu-AXhnf=xKnR&QJ&DHxe{AWQ)kt`snloDybUNGM29Qnm zK`fnZ3>mZ;3I6dyHXayu&#LYH69gK)j=m1}Q7#laK`h-Nk<&w+HD9#RWK)wj1=-;P zDEu~XFQC~e$Xc-DtfDH+silct8F77({0o^uN7GB0-PnIqZ68WVJLLWG&oyLovlHQA-pP8b?$y{n3`61WTxS%#27>0$4)<6i7E})X-O*RfjJ%^; zgc)3Qh_`xes06yt^wu3GdOOxG$Qp;dq;IeUD?*&cjoHy6t>OqcSY0uk`|1<6Txo2X zXRD{occrAK*~8$KYG~i^Fl8=nh@r+Tn2HKWAg_R98f*g-zg!$Jva7}2S)~0gC!=67 zwB%VaNtO4RB|jyEcBzw(tRHW}VAD1{Q`UuHs)nV+GBt#305&HcQd2}f9+JEe%71Bb zsRTeioei(V^%x9MQ;n_!AI+>yj&lXkvAlbwJ$H)WCGN-`Hy+%XVX;`vZWZ1NZ%I(r ztDfxN6$&uU6^W6%UmWS{(=D5!U-k5`@Qa4^rkJbX;sAyBwmln`j0!h-6=_ub+-648 zPu0?%*$RCScM1cHZEr#AveGimGwA>HIeb&mqCIJ5{W4Iz<1)^5?GwKsdA5U8%+=1L zMzx(|1YQS=WEY{_Ri@tNRY*7dt;7C_&r@8!z0A%z&rjAC#;k-~?ATH12O%+9)}kIl zT>dy0OqkoG9LFT1>8lZk;w4oa`yNdyf}%C&gA#0!O_*9vo_=jxCTh-~m`LhN*z~Sj zNZ3Q-S<-BaOz7~2Mw#@iC!?yb-|>7)&dXJg z`rUH1x7F(FnBTQewYL<|FPYf7Hk0Q+EMk6n$oGzpIFa^_CSUcbRr2&ziYx_Fkt2Ai zV?#tT+ihoF{+T)@ozUdES@7BQNoxI>Hk0sw9;R$5Vg!Rbo=}2kPs~AG+)%VitE3yd zG$pdY+T)P6kWjUtUg}_E+cT#oYgz{sbP~n947wXQ#s`#eT+dz-1$;7N+H(*Peh_gX zfe$Uo&CsidZcwmIgGk$JhJ&b^1)d?-=+3%>Bjz>{)xo*7s&}=r=xa5st~r7sH79GQ znL9+7Q9hp?(m6Yjg6zsz#OQ12%kIz4B^0a(WjRpO!$Tk7ZAipdR699b>>oLppjbt< zk6drr7-~n0FE)9Ij|mll@n#z`{h{{KBWRA~Pbx0l2NMuUKn>+v2UDsp$xuoT*{GHf zSx*vH#7&M*Vfs^}|HMYE53d8xj|U0a38~UN z3%$xwGcQ4A zXQHTf6;248*dfwOG1~FiA3ry{z#w8Mi=ofPChV{(HkXDD@9k#D%}8{7b_vJv?AqU1 ztKFIeOTygZc0#6FFapNJlTskT$-Fc+{}T!TxnQo_vg_G-J@SXSnm4SKn-E98i-ljQ z?j)PvWy+;Arr>jB;p>MNC^Y14OoG7J^Xf6B4iUb`%1j;a9hm4O%Cnx; zHyIU)*D`KPOE5btGG0q+d+TZhqXjAQs;cU5_@^dXadm4STZYw?>ic?h-*n?~G&Q|T z?#>3oOVwlATZ=59{h~AV{i`dBUH+pL^_|x3rHjRVnNnoM=XJ3Cb8d{=7t-{{!oZg& zSfE)<4q+JzX9-zpp%<+Vo7f3XGRGUxey?#}KZ)EZnLrtcc@Mur39S7JQfomd-xPB% zq${`DFwsGbxy?|JyJ`J#m6#RjV<`V>Xh*py@~XeUZlCRI4s=r#ACU+WR6@B^=scYp zRn%^Lji!2GGfz1>k!$Z)r0^^@%#R4w^kbH_Q$+Z_C&U9pGv7CaA2%{)kH4Juzj{N` zzKtGy%nnYCyB(dF1>XkKy5}jx=SMQGx=zejLAMp+&R?fz3{)o*rSL|4H)LiGXyu={ z&?eV?moFH_-aMJ&2tM5Z&>4P__S9DFT`NSAYc>%&To=K1s2j3Y`#?&eXB@m^a9^4NGytY`Z zQVjT5FO%MWh(2-by;oUp2>0326PzNM`CNcS#7U*>Q_8IEY>FEvDuoBI1J=b}Ov?0H56!#LYgYd~MBBAdM-WaeUR6<_@L*4!Cc zRdnkr7D3QRblmVSVupkH1*3q?BjW(cGnU6zqd=bdsTRAI5iWp0_-PLK1Rr_+Z z3v2#A>P!A*)V1M(W_$X#Xg}rUpN4!FIv+(sd^6KrP1B85(Lq3iHMUA6sS(-17-?O^ z+0Ipj!>AFJWVnBFKTy44v>I7H69ULPta!!JgI3vbNZVV)(!<$Wf-@wv;Pfr$TyecA zrXE_tzS_rq3M@|FY;Kwli*(l?S6m;VBLp|=xZ^`O=+&P55O&0-KH%u?UsXWY!IA)M!m6t*KR{U{g5FW!y%symR3v zjDcP97I;Thdx;da)Tdj^({S;g9M_Ui`9=xu`%^dK;s9KEeT3x=-;f&Lx|S)h>`=JD zSa&`wU)A~7EWDVVQ`kLcmZ%nYww8m- z7d#(+N+#_?f+En_{>xJYtWaPMp0`(5~6cdj5(Nqtcytw9`#wTXsmE+@vp>GVVhd)SM z=NgX45j^a|@>W1`JliC?)f7Ip{5H{V2>eQe;|lyulSzq|RaU^2Xr*)ZgiQlMm7ZH} zDLMJ@8>^mm9?GSn&D|pZqf3#E65aY($oZA*cdWjY$!>9Z%FQjiXN!zf#vkI!?^*~Y zqchLEvLb(@2}>MGZnPNud2($NQ#}3y<$B=K9q(;q6y6(J>A{ zyQlG{8d#nadzoE(p`;!}g(aIhfbH<80BI76rJKw|fA&cL} z`Y%x%C4D-@oyDsOe?_2MpJ$u(#k0EA=N zVGj9e-}Qfx4T8tNGt^2Hy6L{?z!xTuV10@(27s_AC}2dqliEXJ$P_S(c39N2b;iGl z>a4}qrm8SIjSZWtJYfz`Z>i?C9;%7b{kH0?ZwepLIjFIT^cxz$Oh@WlF+=3!klgpj zNBkKWYg~$sXVrFcKgIXO!=n07%V;cv6|Cmp$pEoG+yuDZ@)eilF?KK`Es}k>LZcE_ zcVY~^`%VV3_f5_=Hp9@D#CyfL$7PQ(RH{H68)~mMau1W~o~@@+^f^E_s-pYmy3CQF zyN?h5>$rGE8SaD3e^xPn;&T)n>xrqk1&~nx<048{D{5^tK(p*e#FQ3>il zhofU5Ctv*W{1%B=`#~+JJ!*%{WN!6RiQB>M@E*|yt~|rMV+N0Va=eE+mu*g%UHbpj zRL&dN3oYfFO(o6$nao{e07S{Gf`m*D4yi~jm;%B^K~m!HxKB#CQEui)dzc1`-{kgj zszJ?+Rul0LFBAxdb@cClsHc^7#wn(9mkUbbjc=f&AM^YVy#Gr>fFmfc{iGvpBCzT8 zkL$D1!2G_1e1*@a+&S^~^z~dEHmx}G5c{U4YX?n$Bq57BmX=$;|twz7sutWlag1&Aq*XO#Yx=L|IsrIz977Atxb0a0>-KnW$1Xw~l=AqiP z2Y9LbzkMx-4GH$(Ig#zV-ev1(n~jhD2bs|@Vyhi-hW1H+lEnvE;_M9EA<7+_SqX6- zeJuTL3V^|JGIKo<3i;Lfq>tskL-I}DVcMJXb{0|*yJpAPh6G$|gz@j+>1XhDhiVX+ z9%6AT_zXpI5!PCeeW9@yX6EJ7NsxV%r{tSWdXr7)I>m3BtYs~I8djM$GQ2_j+(1XV z6>7eC5Ob&m4G$TobLDjTb{ShdHG--?=io#43NY`6)rVt1U_zKk`0=9o({A8YgJ+^~_H zUj+m=J(He1vzPij$ng-~%=ZN7?j*V~c4qDL`zalZbNaW&%&w>9kjC6gf`t4_B%flI zmB3;X?X|U)!XE+L~yMohjM7okh-^3WHsX}W2pNc^dYFBP?K7(m+B}S}(I!-$|uk*RCkCo-BtlE1? zz9r@Mqwl~Z3qmL_XK~*Ovsd$QCc^oDPJn--w^a*x7_B?r9}xXC7r+lp`V&6K%(&Mu z@CHzUd$%Mw*Hg$nb9JNP(4z0)u2PuK9Z^wQiH)T~`9ITYgi|@&1>q_}6($j~<>GW? zpj;x>;U_t_8<m_RtNc@Z`(aCLiz0^{df~#FmaF?Bgs_8?`7hh z1_L}j6?G3BhE$lVdur$ygWI>&-)Yz1-i1Defmp~DB6U7p(MyJ@lp)X5FP$yRVH!2m z4*$nM{$Ik&W3i9?fDR)%3b944CU{Ee zNra>S!M*|ZFB8tu)V~cv&@uSrgqZ$-p2|EjEiCvAQhCeDGyzbKtD0)4hh zX1DgS@HaQP6kY0uytzG5Gu+xDWt7DQ#ABNLKLnH&O-x2T5R#AEPneH8ats~W$2-Fe zV&o%G$qFA55wfrK|s;liIHVYk4Y~P)>j4GNl@KAXYwb!6qPJ zjY-qmS))S!(>DEOSf3V1V0Y6@ajPj@lHGOq#Tat4FG<8)C4S>?kwyddefjt+k(p@a zSw1d}sBiAACbt-7;C~m+tZ3sLC*KN6d_ie?Pm@HR_mbmNn$%OYe_YK`_^v$o| zkQILR>Ec0W$S2@mzs+roPD0cn0*4EO6XH?H(K1993c?{sw|sAGRlnXy^d3x8!;7th z7+fU}{2cauaL$3{?Pm`|tWC+inJ~AEsDTgBp`_bSfPsrJCbiKkSfLHTmtfN+DAXK1 zn=+F`^pXwzw^^@V%v}r%TsUIm$o#Js@xS!*0XssWdH}B|%L>2{*Xo_A*yy?8jERfL z;L7T&Mu&~f)puspwXnlJYV#r?K|B+j{P6Fc5x7z=5S&k<278W*;{u1SQ2{}xy>}_q z?Vtk5E*p5=Tw$bGgn%@2V-mGVLX@!i+*OfU#%TK|1MbryiTR1n*|N@^L?;>si5^)l zogR|`ICwQgft_(TI1z_d&3%=g!N#6E`_gfN1oklfgN$|NuQ6&j3DG549PoH)Bm2Nzw-Wq zdo3o@O{$IQn1O1>7fl7Fc`G!TO|c}AI(*rlJ4B9+8hi1_$+wqB9B@e-!0wrnYTeKVA&q2tmFtKMWsmo-6>w14E*%~aU zmMx7Fg+-rHR^WQn5Yxr(Jo;;6+1g?DVuonHxB}VF;|KedaEdX6F)=En;{|uvQv}!I zG7Gk3CMnsBw`6XwJ^XUVi$m5oejtf{K!-LjaP;c6P4B0wbK3TlLs!YBSd`*&hrkY_ zKPT%B_l3$aT_$;dVE>!EXCJ{|bO+bRB)l@N8la#u4aR{osoRM-w5>DUP0p(`pr-Uq zJX1v7BleHKqzD-r04`1IYoa+f3H3enRpZ=E{Wb`odOmDO6* z?;~II&+0Z%adkCHt{Cbq-vj|qmJjI*=M)S2+rsdHhKRICXM~qZWKB7Lt$gfEcy-2q zJ?BpVY7ef%d1Fz%S^A@J)14;Pp4MsEG3(~0Kg<4GUe2|*c9pVv*f!We(zV@~r<|?m zR8*V(H_TgpaKsbl{Y_UXC5h}E2`CxDRo`Q$r<_JnS|vpIUo3(Z+jK|cQ1&|EV&d#z z@Y83D0-bUss^;axddbpEjZjbWi4g>g<>!dc#l%aKMR!C{Q$a{wEVI%|Q(7@nO(%pu zDv11}(4#&WzMHde2=n{$Ps_?D0ywi{C;Nxp{bV+om{e4w@|HHUG3dp&!xw0vRl|mX z&@BoT$8ig$^iPv#=WCDh)~T_>OcfjkPT%;?6}`}ET&=rrRIIuWFl$aaf2`UB*lKg^ zR^$D1e#V9PACPt;N6ro;8mt02!$T~}3*Wpf&p=H!=>6KAX<)c5<}9fmh)&$7(wz(2 zSzG%S)p68@ZQtMpecADbR^6dRQh561+61b#4}g0)^e}L7CV;tvtVp*D4fKfE^i|4&seRqt{g&Vk;WGGomD5>dBZesBa zw?o9$Uq+;U4!;|)YcQ)jmKosk+%Gb{pJe-8iMwqYyW)b#@LFwIy|K{gCmH?#!D+7m z8KCxu*a70eEGx47eA`2@z$x{lC~wcwfb4#z(T5OJK*V3H;q9@=e9!%*bSD&JNOsctv};Rk-o`k@@{?c+h8vgAq`=3*rbO&3@q@(e{D4im00ed7)h$}}& zWJv-!oYVLCt(0X#2YDx7vlp0u1k(7TFJJ$;Lg>XU9|L@f`aj$~AS9UhZW?EF&DT0? zrb#@Z37V?X*0L7!El#ZH(vOi%+^aqj*F7c_QSo6GC@MxL=mZ1r?1l4QJy%*NSwh^s z&Ygyj$jq#$F*!x~DjZYi1`>j_)fVFYgGJv~sfuG)_dJ!WrZoSR1WE-@Ve8SuA&#A%d`rs-iz@=>v~xMZ`jq^?T!9;XIQ9QZ7D~R$g$FnbI@9g3S%5 z z=!dpb)LcmSkBsuW87t4*yY{?s3k1RO>zEE55=*~?*F+6B%47lYZXxr zTkoYN1o|7;RAS8Tdh1bbN~dfR@5ENx?%oVG@<$;8sSVRnlMw}! zD3@#EMuMNqRRZ;#j&hIE@^b)()q#5K;M*2(+75U0)hr&PMIUP-sg6hLiztT^shCfE zC*392-wO}tL({L4ulp4o&QuIrl80sQ!S~s{{T)LVV@$TwnKpD0j@>D(!p--&3ybqZ zGM=XQEs&Qf%W-sgTM<~=RrT$Omf255m+*E?8sIV*z_}W{{AIngLcS%A!4z8PNn3xN z6Y%a^V@IVC<%!=C-;c$eSl^zqmaN+BAX={-M0wPZ0>l{{3*L*AKNtPDnr+F`QJ=BA zP4PTC5%D-5GMV$Zs?3|dYuS&SjgUzSmBm^On8qG@imB~Osey3j>*!vYsD2oH)QERw z2iG1ql(% z*WkIyj@On*O60i$X7T6aRVF7FRm2w-T!dvd$es*nPcYfvSX zExNP1__`i>@VRY;hW+C|Fw)RH3fhc)@m|Z<;aITFw&B8}Z$T9mdQ`-pw(fx9z-QK5 z{jzqlfvqgvtwTRD84)cN*SFUH)GD%p(H&%ZIpYpoBy1&X2l0CiMc?xgeI6H9j9M*U zXax~MCu-+xJ}uKh{=)B{)pj`F>M?05yOiG@e&AIlbi`hMRbITcd1-Gubico+^c5d) zVYj}aZRbj39FRYt+2gn|)Vpu+cE(SR|8V*k=w7KrxEzlkKypIO^*K8uPtiwgF}wAMj~J{p7XxbQ*n@D*yc48 zbs6X7xP5Z11EO7aoUyybD$C5G@c4wSZnZuD_eOV~mb2&zO40Q-qRz3tNUkxTq|o#E zm{$;P7)~->N4$PtM#Za|8~zAC@9%jd&6Qd3K@B}yt_o;ys#yUfk%)7rMD2mh>o$vfF>`;Mv<$fs`rYkAfZ%aBi>$nYg=(33HG`np$% z_k$?4EZm3iUjh;)n7XG&?q|rOzge~n63&@a099*e!V@ddIoK~}2g7Tr79v%XUHmyMfCIxI*o+Sx{pr|^^}5ObxZNFWe@uca+PA{_De_6MnPwDCT#)X^ z(j7OF7DN0)3>EKSY8|+wR{|e)&y``{TgrOAAdV}ddg`g%d;F!TrdMBk&RlPIO)9s? z)je7@-hw;uWZtxLDX+Yhnq*8X9%NbisGv>jJP66!Dm%DnQyFLfAStDlgO-fn2B)Y` zdXS~&fDgaoiB}Yg=IPcEzUX!#RHF+XdiLgI`}?#0ZeYNfk@VPTUrUSI`gcNl3=?zU z=IlPz66=WP{vVK$o>szXLBdwMH`Iods7Le#A9;?y%L@r^ zHzwJd6o0tgmc3yva%ehz&t+Y$>=5n#&rygp@os#wiq%j>IN9Dv#+?mU0)LPpUl;4^nB&Nqh zps?`1f4@++v)ZjbsuX#Y%_W;R6jW%4S1yI$<)PdFl_^hQ3Psm=ut@8UHDh{Fyq0b zW$s&i_Ady%l{T_zbGV9aqcc0awwyBx$9Lv>k=md}yJ-9*ajQpPtq;f~j z^6s*k9FCYltsVA4yxZQQrjZLo9KKRnx8@sqt;J!1AL@DMF(q@~`#Zi(tv%614>y?v#wRKwxXd$rx-+}>&}+NhCF8^Ua;;fHBR z{mxW5kyU65oO8LVf>$2HQRBU%!Bq(Vv)6`2AzYmOX6FEa5QqASzS47O^2di3TxgDV zDtE*rtOLKkFFbzcU1iTQi9hN?A_cPubD4^$pzp7b-`m88S>G&JWHKY4(Y6%`)_ibnK>(L)=^uFu|Rb5|433qVZaEMd19h~(->;lH0}G zJSq*i*{ux;@4 zu@#Ilz>jXjiw=fnv|POKZ4e-%8dp$A_ z%^%*dOQuk9d#sw1FcoR9^v$843$2~C3)>sk#PZ(Sas@^l7kI^fEDxk8KAb)u{hx^^o zD>xOsZxilJIT43xtkt1BG2?^Hy9gdO#}NL>Ltt?;RX0K#SDmDlo_>Z z)S(}5-Daz;w3NXT3EvqJ*bZFtcJX13fkeh`iKELTVI*LcN8A&7ENh~&kQr%w$E-3e zF`_o=L8PGFBz_GxKZIG~x<=9ZuoGvxv4h*o>$`)YlHzTGf8iVS)2&A^H@)jIi|l4B zg#WcSne-T&bA5_FW{DsY+=1$qfFL&b?$spGIh6nFGA0oEG%i&$A*a|I9!3v3;JRrk z$*i_QRB)o7P0&^e6;YBM!@8Xpg_J(s7k%`H2Ru@4GBn?n*&5vnxx-lQpzpq(ZB)y>!Zw*<8_&!jo2Y*bmv8kQy4IMaP|=)V6NB z*r(mKM6z!{dUz56d2VB=LRUU#=Lo#UJ@%Y+9&P4o4LEjZ?RC}2f&gbQiYo`Zs6xf* zl%<7-=C|kfDxt_dAPv&#TbxX4Cgu3h@BiW6bxHt6Vgyv_1(J8I7qr#lWMBP?ag*sf z$4R-lkqFf5%PvZXXw*WpKN_G0ha*J^F9xelSmA%}QPo6}qQRRU6`nwedkMaSz}eGg z*-t(XY&jF4FzMgLi;hqBjdDF1ky^|?y}MPlx0lPa_mJ6z)i)a)08a2L9~{giRN_lq zA_rWVNqYp-ToRK!7-CULo1N^6PRl(DXg<9zh{;XX%+-!bN-K0uZK44S4D@BnxBS{A zX$cQzw(1%LyvE|w-(@4K}}{0i1A-NY<7y~ za8~$yv*#)1{jCzz8@BJpK5QB+`qWErCu@kuiQ#Bw4I1(yi^H*V-XPUX45_Q@HeaV& z+f{BIC=az+u|6s>=X}AI;ngLfSawcnxUnLT+S-WPuy@z-4BckaM)_i0$Upo~sAI9r z_ZRZ0j%(O{#%)0EO%UleB!RL3sWmQDUcn9ViPh6CMXb({1KUrJ7NbsA?{nC@v>pEJ0Y+?R4XjqoT}sXL0kURGteXyMcSZCV#~NL> zo#v6KTSD>qJ;b08hjlIN9{d2&cJ5=Y*zB*jpyvpXQ>5fdv`5=N*q-y?p-b3Vnqh#y zD4kURCnz1}2>{d>4Zex2J%GDu@~xQAa1Hv+|Id7?S)-@p!B4TRf)8-O*b?!g-a-UFgf@CI;Scjl>R z-lT42d}#NJ>OigsgiwxnYSr}^{d=L~QWAjbGw~ryFy<&-kt_Zf49h1{KDRo#Vk=Zek=T8eeVKD6y!Jlp5byZ&`B7D&*O0L9+r9yD zHk(d@Kx#y?twFT-dwl3PiIdv9VRpbS&KhY}^fXk63T) z*zq%z*dcp~z|7FwFBdXLnlbKjQdAdcBS)m=y8(Q_u;L+(ROzbiyW2l>@GH6UdMb|Omkai0*B1FFRXM*OZ=8+J;tnO10M%G0M~OKK8Z-*;b^Mggbzfw9K-nZ^=2zJVkp* zJ2f>QIJoe;$}HCL`rvKgUQNzDWjJ@QlFdUZZgquTyqe|F+XDiFi1Q zO1*0e`pZ?&R%MM(WNbmwmDU{uPR}qf`uaUQyZmlBz9tZA_TU*?_IgQzRxo`k z{L$^aJKaL(k1Trpi*wcY-9$&aBRaAho;O?FWS~6WEw4q>4T{X&@}U`|SA2~1p*&A} zsPd3}Ca8$8Cx)}p#ol3>K8m>b!W=ntO&r!7rdzngr4)%5m?mp)HiO_mCf!fi^*!pD zMy-zFvJLaQ*j+bUyP0d{@A?s3twckoe)Rs}f_`guy%}XrZe(L+ zw?D%l?c2>j`=jF9rG0j`Cg#|uW5!4t3X+&VMFv4CqSp==eex7+-(N6MKSm0|OM8d- z$k=h6O~l0v4Z{>$ecDy=!|-Wg`aAo(I6t$bbU;M1+tT_~>?%f;P(+wZ>Y?pm*PIZ) zs0kV=lGpxNnOP?OlFC(4uKm$&%aS)j7m3Q+XR-nG5@|_$_E={yg8wk8L}ZW4*IwZT zA9jc25z#xnNgr5}JoeGdxAK*GQ**6*F<{rnL*$`GOWM>*a8-Dr`YZ=MF=uTG#Y#2V z2S)!9OvJ@H*!6+A~hX{KS&`*sNe+ z9lB9;)o$q)bz|2wJD~pxS-_rUtk?BuW&wh69a3)e8uPM9W^IT)w z3a+)+ScI{5iQi+VMBt~4y`f$Gf%QEnn0r=yh07z3J*U9{8x&II3~s>lQX4>jUJ_7o zavqpuq1XH2a53?rdYcOPZSAAokJZTR){FiG_s_p|@<1-ulzajX%!2Xg$4EK0H}rLa zR*r7Rh-Iy_5_NrMB^SLSXMaKp+&2`r%Nxbiu=1tmoGk7pj9C!j35R!!T!7gO`A zWjK||xK+Q2eK1*oFRa~Ud&Y$pgj1#^GQr)Is!GCw-d881@m%d0U%q9}C(Ese{?TKe z6PT{=be+RA#o~(ccNkC^cnh;ebnltA{NU-T_T#CPnj;0q2>cjFeGFYEYfQ4cY)G|3 zR9n5B!nwz@nj|!>dT;L5mm9YzKQ(OX#932gXQMt}6Ve@GTh>t zh#|sy9~A)k&JC<@!zbj0rAIm&i?_j7djZ`>khINwN}+Oczngb>7$od(jo4M+uZm_J zCF@P){m4dZR>;L&aNJ~-tJ@%w64978oVHg1sSGH-Z5qOn>IoGA(`Ou=Y_~Y7M;n`;k}M~h zW_6XSn4inURq!Xn>b%Ws9$y$e?CMNc;2%`yT7cG-CyT; z+=8^6a>jFnd`~rgSJD!#NWQq6r!R!N`BhBdVNUeh$s4mx8A02!^bq-@GDM97UZo3; zHn29)&6{%BaL9{L&GW8UG4Y<3m(Zz0Wwm_8#}ZH#(RqVQq?Nx;CAcON)R)vbh-seM zE>!)|UZ>l5wNr{4d6CB&TaE0g+*y3k=qX)~!&m$du-&fwb#Lt8^&xeavaB!d`B5#z z4-FX9H;Cb+c)SQX=tTF4yw1iTb@CcQPOk3(zag~kbAfB*>T8oj#kWhj1UV!`FBPAY z^WF9lm`JisYjKG2tnzp@qc@cy1ec&*x05ffNps0P$mx?^D|2Wtc9d`WY6-X$nTUE@ zohx81Xl8@rFE3Jw5@`B?mZ_}%tY-mzErd)D9_iww1YF!!7m0)HKYQF4zw?3KDcxRc z^aC*qeWDXvaw5$|QOU&=`PS$r(c+SMX$AG}A|Em%{~_*rK(VTTv}C3Nd3V91&^g4^ zF-~N*Ae&og5)n1;+c_#|>zZL%z)1H2QbS{koS`WtTF4#NBdgmmRC}|g0`)9ep6cwx zVYaORjk#T&lJXN2mARIcCiYu-l3Es4sc`&|5}VE{!M!L8*m|ah@!}WQO@vRPulH;$ z-Q{xZgxOiM%4JVgu-hWviLo8XrgzFhAH5xS=AS+%>wfTpkFJBC9uJ-FiA5RP8U0q@ zN{Zs@OQ)@Gw?x)i2KV)hmnf-EaiM;jAQwJiobU>Luxc6kvTG!czSy)I>+Z*OFM&uZL%72weZ$6o~<6dW*dU7&lovcM}unpI#Wkv1mD`l8X9)3XYpluL}WH*0zSEEAl>}% zpi9G!FV_>SR36m}xDB#nVa#GPDvrb=Zu97b0{<{Cc-ThVQd*Se*Bph-}a|II2@0A;nEYkKZ zE2fi7Nepr4_z5R#;||Oq&X*af$YZDS>b$bExj)NJKxYs_tK~;N7lEx5TR*#oOfHQl z21ALqRSr?M`a$$e29UM>lo0u&eCBLaB%}`sA&1i`&`Tc~x@@XlaVwOOp?LVXpq?U! zgq;69%SVt`6rX)ZS*av`){flF6KB`PDUPZmCBFf^MYeGUZ zr-K@`Jo4z~P$cU$k)7Lz|r=RSj zYK8z0=;WE%3jWDe$1iYN7*IV%pv}DUyDuA{qTSYoX!lozYtnb6r%yZwf7(e=FxWZ2^?0d(n`wGmtruS9xsI>j|{!8tBXhVxc+w%YsNUZQB3T1|o< z#Eyrf0j;eojQ5d$CY;h-5zD5ZYk%`_CVQ37f(sL^VYIv(%FmizWD|7;Y^kZZ#9YOy zdoK6o8tvR%@6I`g0oeYSHDgBz}uY*f3@2y7t$-iM{CVo5ojm9#e z{l$iAMQ-Qn&lmt`-@V*3855cHbI;^e`t5sw4s=o^fTA)?QT1RC`1HYK|`X!J*7xpux1tFN>d=plI zz}c&_hLekdGE zL}g?KnbxB#jbJ0~R@+?Wpvp=6Cz<}ZgFU21@zhF2+$pgq2thonHXjU5yPKzdG}4Sr ze0)5_+xTnGtbO&h0pZPaqk6I{;r$2K@dUTRH^+k5Yv!K>kv2$RAl`2;*xd8Rr}TX+XEb(U!#L$Ee~y4s9`vex3Y+W(muAEw*oQ>R`*?p{@2j zApoIj+V$82qOa@wvbF2Zdt@1B0uvY0n@>d@x=7q1?)2frhZOY_DE#CDF(My`zA*#` zA6&Mb10%psSGDhQfv*u-KGnc+#zZy#P$X~j{Qa0ql==GT28aC?bfHz7PqQMVpBHSN8;&@(=y=nz zPZzI{j^BEb?waEupB&5T5dEq?I*BfzS|I7IGmqb~j-&NHl-8DxC~m8SESzchb$Z`) z!@-LB4dKyjV$as(!}?a6I#hXThOaxHPa^6zFK*X~s@*0%(k1fuXHzpOd@>+UGDbId z2LXxel^ZJ+_lFRne-5+%ar*aSC1Mmo$5LfX>VoHeiKJv8LZ+X+5*r1WT-Fgkr2DFg)xIzLV}p zase&Zo0#Nfa$vIC%3j#>v?K4+T}W#srnqP4v#z|z%d}KhTjb8a+%e>OfU;bg&AAUR zDp6-8WjF5IcBk5#ZPHs=cW@mzZb^|0Nl_6^-5c3$TrJoWp@X~;oFDeK+iPn|UGZ~| zQOmTXjGXZiRcB1=u5bubx*mj#77|+V#dEw`Ot`fY29vr_HIM1dO|JQ}GVagDx28YS zXp;SKALU*bIwBHPIYNx`_i@8NPj&w}`~>(l=*HKtm7)VjrfXV)A)1&$)$xF0ARPPX%goYUs zmnpcw2en(6c#Aaad*Auo_(Z)ou3BwYT&#L0IJ6>#ivRedno7aAQN*6`#=z;B8Y3*6 z!*y#-X_ao$fR+ebSN^<*dr$1+MqqxA0KQDb^+qrYxMRrCc|kxLS|z^Vdca&+;(smK zj57N{mjhy(eV0Q~-;ZO|K@zC5)yHUaXR9}g*)RuQ*{MJ?A6J8@Nc$%kVt&mc{C+BY zO)rp1KzTp1AJE$DT$q!-sZb!OZotMrdHG5oANB`Bz+A zl&VD;=*QX(6Is1}#7T$qawYPF(9uGlkg!4ZIZ8AZy(rdrrLwgf&^z3cNo{qQW0^o6 z;t9L=Z=V|#Lcc0Y?9#}ev_CWcdnXD5Lp(K)zgE@Bslb@)OXRyDz%(tiyvsGJJ5o}m#Z>z4l2Z7bM=QzicY`@JSe2uCG*>-Tut5NqcQ0snsjg50ozRaN~VtHV+ zGd(i203`0h)r+SxmWDZyJ&;zhI=}D0?*%WVI)Bl$3zZj5(i)aeH+0P_T04I>Vdf%u zbkbfcL*v@N0XzIA&r-{Lz94rdAB_-kwU3J)QK>??RnZfZHEnc@ zIjhWuvo35co16DZ&rH4Gg{b1oZNB##2{C*ZR4QJYJ+4x2jiF#iUcOb=L?NS83StBw zl{U9Ur;{{_+bYNHh{P~Y=^Y=oj<6bqo$N65D7S{{w^_`LMb!i2U#%h6ddqAPo)-U@ zD{}7+(y9$Z4(e8e^nk(QkhoDNc5N5#@YPV{?{=b=QKQ(`(sD07dmK#NViV)MiAt}! z?;f_mD-$MmCNZ#3iu{PwxgcHrD~f0gle8=E$p)7VC|0j#S>wQmy(+#`Erp`J-9g2b zZ>Bhs#omjVX({Uoy%G%#u1#%o&f@sT0pg_nHsxeB_1&sAT<=IL&JJegi!Wea#Gq`I zEv~uu6SU(t%SD}SE~C^lK$A<^od_mHc5>G|<&Km(?|dBLNLEd^o0Jb}*tlLb=g}ZAZT)rLU)wZZ7(4FeE*>GGJ1p zx^}@}{-x1mf$hLbiyelJaYmn-dZJfwgh#W35F2c!%VdVtaeSN(a6*2bz{H!eN#(ddKh?x1_2CZRQu7oN>(0TJ~pMDHT?;QMfK9VJI2yXZ6&8`avRc} zWVDJDjntq{DC{XJu`!0ehevt?M}poQU+3Fi*A!vgcB>Lone*(PdCQ@h ziv&+U7=LP|{12sSuv6>Lz}n=;Lif(ML|k1x2P z1QuUfJ(1@Bxm=_S&?)uV`G(n2dsdtwEYnZjDRB=UIG4kK7;iuau&3XBuwO5Qui|SJ zHo7ZBiJlCt$Ux_v7-9zCtN9|h;L*jnGAP23R5NghEJcpo_OY_86ZM-BZ@Dxfic$De z>+;0~P3_loNzWMe2K%T+QSO!HU=L|D_jMpe31ITe=g`|XCpozUVrL1PMY_QW=F2PR z1nwP<_+4~|DYsF`BGU|aF1~OY<|OWE)HmuMC+CUvOS%r@-he~RqTWHtRIOr5&~KY? z>1F8C;n1L1=exz3VaAN!%J?~(8B&AynqRUwmPVDCjwwno^M{l4bBqxJlDo1-Q^y89 zsh0`mJzW|NA?n9Z2Z*4%CGdB0Wc}~a`#H2o8#vcju;Kj~b<^%->^n9%>AUBGLdls< zI)(?Zv@ctY`gLK2P;}Pdu$%!QihD;(DR#1>558f95c|ld9AR7E#$4(3C@+rg&$HDr zdB-|9W{H{&%r3J-1vu@o!@AEe7v&eOf1q9i=S~6zu2TS<89z6Am;s@HqJLsS)5#Rp zUrt(%apnap&9@yL?}_uzRC@Zr%f+-|gda{KOtz&E)FUrh{SK2a_3NGua;K3$j1Cxo zd5;6>)WsFWf4%GHq@#cE0DOc@;wvPxf@wpddKv@$O-YL~8`s(_6cU~L1+X4xnUH&W zMi?$trh-|_&KdVQvLB#5d)dYk!#Di^-7GqlpWdb2vXTzhKWek=ajRDtE611+-Sd%s zt8O6^6<`K3KhmN60y2)`|33ETBqlC34T%;GgP?`rWLcR1XL0jGLe78x)h?T#H zCWl1gjTHJPj(iBkTq)L1J_*hS$}mrj^XDGf)`vv$@++iR`}woX7nqC-v4T53L`ru@ zPx5$qRV(nv*$3upMa4?DcdTR15v-3;={#(PUyln)<AdF z!H68M0jkKnpS?gpoRg>(pxa9`^gVt+pBDfKzz= zhf=~E~$s6D>bd8&9m~;!L;9{zIZuJTITrD_P~yNm70`P%d}CY zGS@zWvwPvF1BzDYP<^ptdYt9!gK}@O_*(6o>hy0vY%gd7&ujA#$LNEt{H8E?YAb##u&s|3~tQ$?BKYVR4%(3yT}upBK=f~J{=N;MH3b9YHW4RXLqrJilIH=HRp?= zY$Bx5!Vq(qoOe@?6teS&Mp~`rKqZv)1^oDy2DH%>7<1u#W=ehbR z1jI(s`S;yG1GfNKQ!-ok_%3UpPpB`A+O`PGVMl1t2?HhluWPiPe?S8WXA{b(7SHW$TP4^Kk81^K8a&#iv7orro+~Dy`AliI>Y4?houI&nTX2W9Em-%x0pc*&t|9Z&T43Ebxh`@>@YGn@XF@c zA}m|;sb*a;;!9JFA0Q;knMayH-ZH>kbhkE}lAf3=J6ZlLTiuOAVM>Dic)!nsW{f6O0KRMsW?)IP#=|~HcgIFMhE00jhpOE!2 zQcq@RSdo?94n42Z4b0{|arWCfj#fDuDG=`|m=tHLA$(-O7R7ezEPCd42szZrz(tgVis_tMq_C06t%#=_54 zm7fLylbN|*x=m_(GD)HSu;9G%@ts`-dmZg{yj+I?QDU+ee1TjW-f=7I6BlZB{Ac8{ z;MvKm@q$-s7gB^W$fA21XT7miEn%tbP2|u@ysS)*9c39y#xX-A`q6?MXYj3UG1?28 z`T8)Mf%nY}H{rg`&&RgeNh7HwVcMHgjghH)!Tu*zo1hr3skkzuQ~MmFH}n40f-~Iq z^+_k7HaK|L(SJ69eF<=-izrY?p)&$J*;Y4^C(;Jtvfm&e4KyaGkp@4x($9%-*HCu5 zBLW!u7921azZjA4>31UMRHk9xrJ+h$^pA2)!%JFO9!WadbvQ&SPbofgI&CO3p4RDD(u~R; zoJGMUEZ6bDSXr@!SJln#`s~(B8Z>scgO58s1`68+ky!Y2^ZiL2{askB3*Vl(8>>fA z1LN!&08i9-$b;@D_jS=@rIdufpJ&FnBIQq6#ad*6blLk6S@`Y9s+EdT;)7*}s3|t} zoT5pIvlqW1HSB+smr)qu8y6$vk(DRTxkzX1Uw0mnRWkFnvPQ!7D>Ko|^Y&TKnXNhx z$Q5Zmf}R^z4Cg2Ng{N2h>kFgOfKSzT2iQ98xt~`@ZV~FR%Nd%@nTGVQhE;YyE`P%J zv~1(rp7?7+i$|h?=(C%)r@1sL zq9;XHk=;gJDXz}Xq}M(D8&yt|w_~g)2vhcfM2Nh&tmE7FEKI`KzL6|P=H7qvi+KrR zUblf|1(#*PhXy*zA$uF`WCVG6Kf}9{^(Yix;TjcO@yyWblV>zFu7cPRL=aA|Sr;;OxiEen0WYJBS$0J&vI_xiUW-Oi(8thqRzx z<8E7aH{IW^#nportr6UVV#M0ee{k5MBBFByUX>oNgyX-n5`UOc!b_|A+}IRg)_+vw zUmrnc1i(U~;`p3@dgNEn`{jfGpXbFlZ{p*>3t*G#Y#%5QPjj2rf*XNi77;kz4vDebP9sn1B2vGnK5VcJqe_im~ z{R94@hg$zgtgo-WBO}HUU_FRGzJ}6(|K1n2A4qhJ2H7~g3RuA<{qI#n*0BNTdDlnr zzXJiehn?0JnWKmNM%m!ef1?el?9v}}eO`$9T(f~h#aQ%QaIEDI59WVYd&mGX z?=ho&`Xl$*b%IpoG;ew~mNddie8=j&;H`g+*cGemy7P*U zmDIOI&gN6Kz$J)d{})c}zyna)(U)xc1Eub-YY50@l3;avJ$9W2&)oy)RsI{d2uMno z$i0OX_CqHuG9U`xm7Ge>th3p>e$f~CdBbLl`azJI+w1?n9>RtKSR#_#Z$Fd;NSxqX z`LY9)BU{2>hpO)DF>SUw|K3L2Jr~8rX7QM+`~L)J-#>Q25S_*cVdJAeBp2d_&w&*g zA06oHe|r%5W!=BZ`iBpNfGDyue4Xrp>c1lJ590&rFhK3pNhkXK55E6i%kDKmba^N5 z_x#WJ)qfV?{tWOTn0P4vPO1Ix-9eaG0A~OHTcR@N;`J-4K;9vq91Ljwp&LLT6c!dT z6ffH@lwT?oQUmB&B8AR(@E>Z!Tmcc2r)V>`MH;Bo{Db`;9~VUsQf$^lRF{FU_`m=0 zlPw^+09D)Z{uz7@VZee*WF`s8BiT|Ikp8Pf_yCSMu9?F5>wB!%h=L)|9-``>R0)>-zTj|}pf39ZH2GQSbHg{Y0mRVL9#p~vOH4lghQ2)0ZQw5^_tU|>P zpmHF7_OJP(WrajT#5=U&g0#r=*IK$Lkc@c~39S)-Xg9TopMv}34@MoN7?+=@ejjYB zKd7GUK2e#{_zuti-W-gAHpCuIQ;DH>5KGIu^Yv}qS=Ceq>T(?395W^%kl8vxN=&LN z-6an$Fhl4*|fvew>wz_T@L&ibFKTOHh-FDd^MZS^NLmUF)DX})8#@*fD ztA0Ot8H9C9hSs&+wJ`a<%6=J>ABlsv)YsFE#tvu!i&?8<|Hxa`2U;GFzC1iqEis(H zan9Eb6k3zIK%g=h&c*iLEoUM6&cvtq_c)ev@Q(fxTgYQ9{gt#g@qF{) ztrz7fM9BHHc_>oWF>S~S?wYM0b4H~jcvr}1Nq=X1VzHZ5IFgZZXg^ZU53DGUA!;CI zv=pCmXfk%xFoB^uew58Oa>UKcqmyxnb%uo|2%rrlcxjRZsilzrLSol5#5E$;r7~8d zKj;ofVBf0K2nEwnpC_fGO+!Asyf;^EqvXaT)f=H%r!Qr*g9r{-l@8x2Yiw&&4XftR zRTnipSNYD_`l$frZNEz1{tNYs`jCBqDzG|M#~z!G-(Wt1cn}aP4-9_2ai9&3pdITR z^HdyB%q721NhjjM_(7#LLd>a2Lrus2$~7 zx1P$mTLE$<#nu6Z_By*eMW%LN9vmT!z3|4pyfFomz0n%VtL48X5<_~@Tu`*kv~%%I z7!vp=YoV|~zRkuhdlx3ewqwMkSQU8Kr#SEm_c{c*-tXL;2gPu&Q+e-3PB2@vz2~Q8 z?_)MO?XH%y4c8;q`_8hSj5SJCo@TGn|72jd8cj6odVm2a?j})5cXA0A>6=kpQz`u| zevn5Rz3*pYrQx}yXQ$nH+o@ zKq0W0vNJwVG|iBa6*fKNK}$Itm#gc;YKib2nKu9_W2pJ`_vCEr5RjlBK%D_r$EP!& zg;n0HUTI!PJoaK3$MZ9sk)cs9JRM9i1HCEzWXb@2Jw@;D+WwHPrZD?lDDcY9`>BH- zLj3QtE*U=fldG=e4e9fx8|p}0_#-{?lTRFDK!O30)(B;QI_Vhto7RA;L%EdoDVnaE zf5|!wG3JS3&xhteN%fPO5D?(O5mAe~+y1FGK{>dKS%A`d{hvzf?|kX+g-vBCS|VX_ zTWRfT>n%&M|0O_fAZ36*i-6O7{~|328HGbol(@FJV>~*Es(g|!W(;%rACC@(Q7sOHWp9}nsasYku010cK{)gI& zFTa$a1Ki2;KkwDGf>dVN_XRDiJLoa$zg3wr8larVy;u4dth-#0Y%_q!tVCE5X_!VReJPelKrzPnTb=*xv;PyXzxxTH@uKp*}8qK|dhCFGwM z{_5(v1j7mdBkDiFxPJ*oUn$j}iv_?~zXU_?Kf$<++c0d_|FMntumCW+{u7M;OE8q8 zPyTd%05DL30Wj?U6ATytMmE8M%RkgWGyuSe`!6t5FR4KXD<%8~3=j$HlH;}ivZjkd zfSjwQw^#mYZ~zwyX&TU(FEsuOa{0@$5v{@cUm+o&vT-#LFHvrQhsU3A zUPI)StAT5Q1)BbR&NYZB0kwL$9BPsvxg4Dk0V5otyt08S(c`vcTyQW!PO)u*>4q99 zhdD^_uEk3u#Zv!2c?Q4=pck>Cd5oXa{;ioS&~{0<<@^VxjMdd$N|#$(m9rk`99yGv ztrZgFSbf)TfB3x+5DpOh{g0mcYoXsI2tq~=pck26rR-l%^q*yc?g1#e{U!C8^8eoC zPg%rTti2TM&AMcxKS}(Ml>nHQcel2O;Y!KJQ{7^bGm&)Ok_*UYwU4&=H&=U! z%>VL&gKBE;2brdi!)d6M553u(Dqh{skc%T%P1NkQ^3(e65d$p>gGJgAY8M$Q*oHFO zgDNq0&-2}<-uY^n*yWx%b*Wvg0Vt5+p?FLzy^{`=`)!u2eL(F8jD|yX8>|!MZ;vcx zJI}xF`YyE(@JCV>=RaDs8$j%_veqzO3J<^k#Ry%@ zHx3aUI-Rn2Z(VWGA`CI4wW@fxQg=s9m>BgtdLY0d93~%=A>73q#b0hHJR?Kwn9GpRU8P1B4ZVd7$*BAtPL#lJXpoWt zl@#w}%tCy%0#H09X+|-wk`#|^R|;~9&d5>6nV`IkK7&gCmHKx zzJIOL9X5vW`c2Sp-gQj7W+b-Xg)Cfr^&MoK7V9&YYLMEU#IHAmWmBmsMNac@ke^28 z7oYQVnIU1J@Idy++Xry1wD!7YX7E>%9EyUCc`e1=2_Dl(l^VBjI;Q2Hcpgz>-w6XA z2T2g5ySg=yV7iPr#y5kPe#H-6tw_&t3WE74GQ;*F*DtLMgz=LZ(xbRYWtu(lVE&+@ zVVut_I*wA4c5QuMHB5@rNq|O;-(;Zyg!n&uA!}6cT{u%PaaOnSe$@KKCIIFZKU~WF z)Qi;CE4c^7-(m^HH`r*wr)8iHuI$j)u`e-}MD@NKobi6D&KkrDWgk;Zip)?dc1rN} z^|9qXu+^8hnQ{yu2zS(^rY|<9X|7JZkK#(m?#*rA3fOGBBQJd z{iL5%9TH-AdnL)MghOy=1jZ^Cs=u*}Pdkxc-poQT6nGYdXEQ6&aA(a#8P>>5s{sAn zA64k89`f8b&abhIYY68io#yHt%G0xG&yn(P}QQUsh)GLAwSo`yeH0h zLmsZ$`@@E7cCITdwSy#)<3tp`;Fqrooelf(H(XOLV$6KQtxd{}3J2_6jX4x@=oA~d zH75qWQVx*8l`RM5IU+=f3Nw+#?LFsR<_!X*#8HX`IU+|!J}@IQ=&~QUe&0uWe^VRX z+a#S4eK;@S^-uaeAmo2X2MV4gBLYaAu)>DIG%Ffz+lhF?cdwbC&g~hoF zM(_=3Nr%d>U!TjWPiP!AE~W+25cgDzlWYjs{y|bMfp~P+4C^-ZpS8Rn;^6}tx@ls6 zuDGU<+hLgwMYYLP@0|vZ?@+UCp3Ae)cPhK(Ni}R#X8AF8JmJgQuQFl%{K6w9L$mly zq}e4)bx1)mjnqolXo2cN)+SHyQ}+pJq*PM2lgNy3x;`DBvx0Ekc_W3<_KO@kz1InF z-c2fe2FCpoxYK8stf#IHNm2#F1YeBk=_ICBiwkG!Ze$oGlukKFhd7>>3bhNHRK;{& z2Y)O&KYN4UrnpX_9Q;;SCB?VECF}jm2_ttohKtumWR7yO423yeB|bAmb9Xwan*;P_ zlO$E1-_u80odd%*cUX^4S9obny<-%IW22vdsN$lN-f%*z8MER{d<|6wpTC-qtUJ-) zePVFD72t8l2;_)KP)4Y@RfHX`vPckatu~sBym1r&E}EaA88~}MIR7%uuKZH9Ge zq|!JG>RP|y;=l>1>cgw}C~2nu$1h6WU>2R24F+9Z@K zdOzn)_qrN(S8y6GG3)9+&KbH%jb1II5cd6N1Y znT4u$a#}dYJMU!_cYO8$)u723ZJh}3b{?H5nHX5qKCV2C-dg4d@mK3+2^^t3A2Rko z5R2P*>bWj257j{%;8fqLld_rnkQ5SQt{*&|;ES$wyzj(ohAKkVGIh2%d1w;_>o}gz zTZ^THu3`2!Qqt{BlSIiZNA%BR+4ny>n<}0wo2h+6j3b%TV8+#_v=<^i_B zCE`yO%al^k?No->fqwJQ>&U6nJkQ~h@B5)Z(xRPdzu}-=M$#y-Hh)gZwt3D4IQ1_h zw5WsGy^zqbpYR4*v!~c9t<&kn5q3C_OyXP;PHH+oH+hvLUG?T&gE^NNEhKlP zc&F6Z$`xtg$z*4vmBJS%lXojC^m7C)1*3zr>H2UFT+;}vMzu0JP3I_1NUT&Pu_{d!kYS*zPTyAzPK5zIqe;Q-{3A~Qv_t7S&M5uu5 zEI=K2ywr|*Ob#IdY@Z+F0O#&+bA8X;9?>_NhlgM2_jiTT@s3-}N@td6y)FXi($h;# zbf)GNtH~p>rN_u?XS%A~8-367%#E(PS3#{cT}?1gv1U~pB80-;h|n)f%TpdF7&9)Oh{ryIh<+!=Agf6(fp_Cvx*^m+ zt(aJP_Hkd$2=Bte84^pLwjzNUHpv{aHzu?5U~fj-waY51hxnU;8`LcybFXUm*m z7IAK5Z?bKpaE+;KtDi*OFzyU#%*ma9gWaUO`?Fn@lx(Hs!5dMV0lch!enRpNZqbqT zjffE*B|epzRBin>$wuO`8`puKv39c#GT(wq(TIVC`2L6KNDzfi^Z4-$+MsA`yAHXn; zEwYF(Le=fY0b=G;Jn7tC+1NkBV^TUGzR==L|6PKuVT2$;9W?w0qCc|pei>aV-437D zYE1r|=dW2?XJ@d&&X85Q2xD`_?Pc8@7@&228~Z@@Nh`6|Sp9;oQVO0)^g-Ufs$B(+ z%F!x`%w|68)1igFD3;UwAUu2%=_E<{?kqjnl+F%bh9J$#smcX$1dl8QS>>ybrg5*R z{CL%9;?7^S#>Os43eLD6(Vm0=r{bIzsi)Xfo84!M;8TnkIG1T|#Pgd9^ky$+uBgQ8dOOXJAk7TevWWqaGkq{JHGiITXU zi!;?uDVVpioXtn+j}|q{=04xuv#P8VZR^{mx#2h+IF~YM4%2mo2_~gb!Akuk6_b3I zvf`m%~SzcO_^gigUbP%$bWf1#|8ep64Zxwo^kWAht$ zU7T)rtD0c>o$NY4xsBsa(i*WDs~w=|dc{G~!y<2cEEt*j@I6zwj-)8FRtycd{oF(2_AxB7EZpE17|sc#n90_yX_s zplm0L-W=&=rhy2fYN?Z3&^C;Sa64%K4z%eKk$Ivm%7{FRLHcv!vJDwtR!!>q|NG5lnoVcq3lc?)Lf@w=5gW<`GrLggA^2%*J^hhqbd93DG z%yR}S(I*2JiZi3YBe@4}((de2v3%2(;9p?HXBJKn{^dX z5DQIe9MFCLP1DD#D?ZCw)xMP`(m^)m0eu7U?lGN_j%h|m~8^(q%>gGs96ngrFV+t@Cga*(<-q7syZFOABJ%MiXE1M}QGat0!T9?x>EzxWF zbkoSkeUU~k9k|CS=pG7AvG^lV0n)q7SlyOge)%i+u?cL>8rvaI$Zi_7&go%Rsl#Rn zi^3U2b}0PRFfn@_KlxUKW9p+bQ;(hNmK$@^!5rN@tq^JhI8 zHYQ?q$a*T?YTl~N=PNhwHxPw0V6YJ?Bu$)(0(8knj62(eB~SU+^tSg(SNNA%>4=eW zN(GEY8lgW}AB_L-VLs9r#A;rraHKZ zDa3M^1NyQU;NVcnAk;_;b8poRQtz&G!icJV&8*&1N6(`R(sALSKYD^{rT}{I+~sZg z9ZD9e#_JXcg+}>^%E7HyJV)nMNFByE^u!H5=h^a6Vc`i#%sMxL-=i_B(FtJ;uAu_bHpzYA^k zh!A*590ZK7Fpg0xj49^ueT$xdf|4!d9L$>$kbVwtd1?6RsQud=1VaKQ+2QQoY03%U zQ1VD0kL9l`g5{!;;LWniTAUR>j^o$K5k65J!0CX&20|l+%>f4n(nP?1f?r$=G$skQ zhxWivKL1X111(SC19xu6S>y)nc%EvVlju>O;{t+vGQa>o)?PFa(hV*Yy1HWp3&QtmamcYV!5YM(ka|{W;m?I;~3Eo zXYHX}CDb(;IguN_y(#F-S!bS!!mB}HVImt!=3=|t*SG-wvwGOrml|^;GVdK-zIR+& zzB7t=(9Ihv5Tws4FIRqjtn>^E6;|47#f~&1%Abn1I@sbn>fP&t#_aDs(7hCTGDwXQ zsUycbJDIZD9=&8&yG?zfqYfiC?2Ptuu4k2JxzF_ zks=^XZt|5w6EAoY|$i$wBoylLf_1rLf~M4@GzZSaA;Ii?U(2 zs?^ew=AF2)^UtzEREhN}jJ(>3q>_sng+zB_Orj$n7wTA)J#_n& z*oBii;jozr;wpGW4<;v2J;Kv*Qzt4WwT-`=ltl-Zoj`8~r-O~wsiSd7KIUf_%rnT} z`;!rPb=$LgYX4P6*?RHM@&#{s3t7Sc1~t4As5SAdzZH)}a|=mE<-hI-Slh4{$P|-3 z%3^$L?d9~An`DXAv_zWzKKuE8qRIBt!z)9TG^T_5a(mT>Po1zWkEQTI1qW}B2;-zi zep^Lf?oKJ|A{KI&Syd*pNw%?QFw``3#l9rShPWiXb_QBvC`Li!V;JLeyZK)DI{jRY zqcTkD4-w{k!_Sv&>AyN&z~eH$kO=0T47H)RI+~iOXmHB*dIedd)?|*esF>-tE_9UR zG5a=9YkmO@cPPixt?>TrZ8E9K?I$Zv(o{1^%MlD2yXmbi`HA_mIZ@*i*-G^t;%7fI zBWGX!E!5;5QQY;?!jH?0yoEN!7jQ?D&WCR2^(mW(h`}>5@~LVrxpMMKnP2Ygs0x|R z<<2^++(wJT!JBzl#bj#IV!ncFw?#m?A91?YI zAb6A7O0%q#?fiBkQxXBkF@=n~LL<<^Wdc@;(+|FacZ^>5SRB_**^f_~Ae{UqM|+k* zu8LYa)$GUgF)uUhYNb-~o4H)eu?^-b+nT^y=EHSr zecG-AXR^$Gp_Tb6)LB9Mbz(r}qDotAQ8~|5tT6oaBoV11ts<((to8>pGpDGla4HLB z!Ov>cQy{6PK$`T%A=~!BTn@L9OMX0(?J(Ckc(sOUpY3=mn9>P1$Rct4)Iw;|5eF$yP$_Hp|IbXob9{#lolNf4Ak5$%hjnVu7TC-SCo{-N!l;b z9I@-)d-5H+47VO+tL0a+8SWN{n$hOSe(ZKPATDv#C@)?t1n$$9{AgOfs2hM)v}D$Y(H5Vm92VlKdLx;a+zWO2^mK(D9^2Rm3N%JF_Vd;Yub+?N0^V}4RH)q zIP>%14Lk_v%KcoWUE6`KVfRJ@^Bd!QQcr9?n(4!1^U4Z*y2QrOaC_Mbq6cLz3U=da zr>|sV=y?+-f%!JY61C>>Bg$8ag0jjb#Rr?1p$2tk2b^YHwAonsFt};R3HtvkbjTNk z!E#epZyNEXr!prkr)yPCO~Yx5)`(j(&%w-9p1wDa=U$Eg6Su(K4TfqMR<=gHn=u;~ zpRJ8uau_uPd1l%x&2$|;U8y|g{L&eL`hOjqZ6klUwy==VA)EPd zcaGD{QF}G6o3=3)OQCe_!WqJtFa9oe_ZCi!shAFKV8i4WmxNR4SE5?m0^<(vLY|Z3 z=dRV!_&T&&EWf~6#Yn}HTV80%x1e|!rNrf#N4|Kj^KP&-K|dr%JLrky3^jWVjHf z9+h7Xv3ht<6-nFdo>nvlzaj#;!%j)VP%GeZ3y1vrL(Jhew~U{XaspVF>6^VSs`(HXm5uzfSprv zbK?@VJ^1}d%$t!4f`agnJHorweeidu1f8)ukJ*q&s2p+0(XWaP{JV+R)&E94*bfk$ z?>qL>8zD>sfb>N9+{bNhO^oQ!>hhTD__!{#xvf?>UNdZplz+vLOz8%>D9W4*${am{V-WQ z?B;q5db*3O%)M>A^1L1Q;O}9R+FZME4MQawyjnOvD8whiItjmrl$(#@lkndpXwy~6 za9|M@@-E!Z1Q2Fa+3KvQgyN_NPtyev3y$jF4s+<_z!4>Eidr7*F4PJy`Yz@VM@t-W ztdtc{Xh6|(7o=ExiF7;^6UCl|;*%t#U5E4v!3-G1$?Tx?kcbs-?_G#3G{_Ad9v_ua zzGeF;S0&=%vCnQZ zWwmbvr1QKOT;$64KDr_4?Jo=OE>S*k}-? z`b{S_P0NO=!4PT{+~>0FrsewGY$yTv`Kgd2^EPZ_H|je@hi zqGQ;RSx4V#JHAgOOluEtD~NPFG(I&Fi?J$SQNK;+7QmOD4!qdvMGlqg7^IgvK9Us_ z62fh6!5zoUA3b-tm*m<|dWsU_PP*uluRZsW!j9dQ@3^YvcH_tBwy!t2-4#JjMK(~* zkglc&uQ7@QfB>mvs}6XT&ezy7b!6m&GR2fB(q|n{uXkFx({W9_GlK9l0x052!Ds0% zvc(%P`?p5|k@;M*UG);3YEvk%@?I}Sg^f<*>C5}Zy!31u#%bZvY?mc%SUO@vSy*3w zum=vt91e$DBu`j2VGc)a#H)>ipsVlVnB`oK%B0~jP1%H8k!2exE}m%TA2+HdZ!SzH zD~mN-s=4a>k&cIHI!OFjciP@Jh^Dxk_|an@GG@XsQ!gpjsGK>udHC_T>~l>kw&46I z+s(k)^{gxXAZ2QzG3(wKT;jtlGZ|rMki5Bk{BZ80w=AT*UmNP`>OsI*yn?YTa zVe3Jpx-hQ2TW-yL2#hViZ#o{h5DJgal9iZ_X&6>IM0UWl7?<}5d-Y-pLT!=ZU@Q7b zqy2UJyNrS76#04MBjqmf5F(uo+8%BO^Hf>8`D_!lwn4;pbqn#8A3t{Zm!_6Ryk_l; zY3RIs%1pU%qBE9sKok5AoVG;@VohJPZnVc4b&-#AR%O{#%1_V{-P;=NBEhie6WT$& zrAkOWJor2upaITj*Aef_>F+IDw?H;2B>gI%dTfWMyLg%~QGZ$QSAU!XE!5G&_<1=w z)FGIKh3K+6D#6)nL?SlpGRTWI#B^t?;HrRLfP}DCT4Vk^g+=8D8&%byqf()x_%p6E z^Yu&oNX7JedOoeXpeAeUnrnp53U@v0Im}R2;7DXHiDw0u>C7)awjR`rxR|>tN3rnb zM&YGW1xHWHhXwFUVr*qnn|F`odlXCDWgoja-}x4v)+DNhCR&!!o!aQmg)1!@b<By zA0roYwn?}KSK@!(7UExm$7je`oUO2^m&c-IRbMo=={DAhW$x7w^;!(0Gt^QN{~U!m z;Lw0yiHGfy#{7O8R#yLPFhWO7tfC2cFo1(Sn5iqd$1@o}CmNQ?SCu zO%fX1yEybEcfoM|yq&6Uv%<$|B+fgef=2JoIVXx|nHR$-l4_&Q5K@`ItBJxF8IMv} zDrvE6`3pg~R*;10IxEaA~ZyNaA%o$}0x z#ci>k==9gIZCsm8A-4<58gx9{OLyN3g^|?^#1c{j%iO~!C z@iWJw>v;4UDl^}#uE$T)FiYR*A&!^b$Gvu_WD(0ojoAyHtr(0}6ymIyenBT&V3iNt zlejw%XX+dl|*Ln33fg zh+jB3QLhI{Mq(>_j2$cc&08j zh1p`9dUv5FS2|=8&xVbPg`)>2jobZ{pTtlksYKzPf5-^f>BKRR&pVC#a_NnBnL~3l zLpGHI?(I|S7qP@^PT6)YmV;+@&0ZxovN^2h(l7}%ys8fnwv>+=~I!t#@8idCS+UeY*Ac zb8O!mQ)!nkcsX}-K6C2n8r-^}6vz~^OBjTho8j*Aa$ZE~l-(s*L>~HwIbCm0e3{Za zGgR-XXm6QdqSZgMN>(f1qaJz~nAuE~=cA9^m=GuAQ)ebo*y+Y%EoTfM$PqCJ8Ry{W z>sig@{uz6>EWP|Xpz~?%Azy8pOrO)2H--!jb3LzaNS=T_rL%5$D`46~U>Vebgu;vGyTov>vct%jD}1PbP`-g|6JV(LM*e$cQ-_;D=k zOWVg%&{jVo8|=ERgmJsiXm(bTq3yVk-BWd~jo9~fV&1AcXdR2EqNon1r{Rb8nbO^t z`y1d|KP}G}PRX4~TB&9p_%b)3YIV4=)v0NVs?Zh0)8IWW_dOM{4B@s#9-Fy#-Yr$3 zwNmPFT9ZqseRvzU=ql(EUE7jj?xgD9-TL~-`{859DL2vn>kJF=_#~YL4;75w#<7WM zqMS86wC}mRI*SH7kv%ml2&8RS_Ytp*_pUe(_K9-0B9P1W=P!a15i3PM_-nO>$UTe> zO6@#zV$yFm5J)nBYa`oknU$OrF<~`2C*LhgYwFAr(BYgg8nr%)K$8yoF}pw266}ys z6SadpxkcG(J3tvL8gf*6kcB>fdudDS?vbs|JQ4Z2ef&EBU32S+MoECJSX6>eK3CKn0xp%tT=>Ts4CiM-EDLhOo9}wYO97+&Ovs4KG(Oe{$u)@6&jgm z*M#>(JCWNu?f&uzg;uRdo@kJMjCGXrTfbhg&*htEoClw1#BmZ+6c^a$joANv(v#E+ z$-1wUE(k@m>)%2zC%3ukF{d_>lHfto*)-dpeE|abBr+ zjw^RKo%+28js^FOAgfTp(3DRQ@dJzNU49#LVtwAb@tgB*cgy#(0kqN;G1!7_?cEas z4}Q1T$GvhI3&;NGN|S1tZ!*Cf&&GWQ?J?!H?)~{LrdwSOxJ4-Ib*rDcn3V%p*0S(sw1Q2wg&aBtwO-EcFOKYV@G$|qm(1M#H31LdTIs`kfW zL30+b>~Kd*-JMLz*nmfeIBn5n&Xi*mY2;Z)d5rt^F#zBMm?v>Bx>NtIU$|55)ZlO3NWYY3zcmeX>A+MzfIVl?N@71gF>)>gtOFw z3om0V?;KTgBzRfnC`I-C!Fuq z{Z&gQ(rLU}bU?xkDT)Q^p4Irmd$4S^9%_I%%SHUmhH!j>SzX%M_~Oo)@HtLq49)8L zLQyLZE6Cq1h+kJ&;<;g z6-h4HKCrMg-V zqunlBu0771?tbOc^fCn%NSDn~u|OZpqh{nHHs4Cj@_-m}8ciJd+)Cfgn>qaD0%WqO`7r&0RHyxuDW zwTKkiiniaes6|`nG?lU5Erlqzh($5;@EIP;EF)qn=Fjy1=V^qOIT{5O(CM&bVXRVR zqi133DssZy1f7l76r%HWU0@JuQHa8Tr`31+F{mB__WPpU{d^1{{^d(HAl?-)dxsOz z&tz2cEmCxTOETqA(6n&MXXd{;-;9eNtxvFO3EM$yo_#mywsQK{yl# z4>C5LccIkx*<6v|wpJe!Bk{_1E|498CtzhH`tL`UU;CH*8!vx99GRym%GHkJRXz)E zn7yt>!hR?|Qw0i`1p&&BoYtbqmL{;zL zEgGMA zX`)Q2Rkz}NyS3ltCSV`JMi9>kCU{j;ty`Y(-NU~mT!_Qx+jMs-SJp;?24Zt}!b8yP zT&tg-Q2$+plttfs7LQ)Xn67EPe3G_D{r%apa{WcSKKNqw1K3%$rll2D`h126mh+#R z`nqan=?Rij8*V8y0oLaV`LuX#lKT%6cPz6n}(>h~*&EZ3~u9}Ox zhv;OI%b~nHW-1xi`jr&rUsub3HlEEOmX1F5hi(tK#C(@P7E-P$)~Q?-grXm^9=@#>H$q$M3H=dy-4G&N5yNO?+z@orHyba)=62ND- ze2OU*C|WLes$rBn!kIHZQA3??Xtmq~H{ty>H_1CNSWln@39^gW=hGg~q4n-VZ4;@( zpbd4ji@AHkB=s6j<0jj}ppC;?!{Fh?&7ec3C41@1F6Ds@`*%roLcWu{T;4?X>#LP~ zM8Rv#j07BDqo3W$y(m$5@O3n9@cXB0!%B^5sbrW4DwtnefmSx>g4=e5NLsXv$sRj^ zC)Ih%&%UI52a~Uh%=WvrIX?qDwu|sV?|i^vsK+NxIXecKqJ1PtB&>s`;*|O-tv38y z(^J)V_S+jvK}%eHOS(sDHi4Lvp9!rV=D+zO-oPbI+Jvjance3i3D z*C22r@@1KZi*DO{D#)KZsX3ZWU!(w8=D~Z_!@!N)74FQ6L{r4e0W(I78 zsc+H&p_AaZI?RdwXM?$0cghckOG^zGOBA6rl#S$Oeq>>l+GryEYZzH|^!bV)>^$m(uMp+#Qy zTjQv5nsW<)MLs}{1el3F&yFxh4(*&?UVCsTmao4K+Et}klJFWe*lm+4bYWCEO{x<+ zP1~Cx?E_CU)BlhD+`yn;;J=Myt(-iyc$s^fwW3$-x%8YNz)7r9zn~+ml*?+eJygf` zmb4j4b3z%Kyq^Kr@Ddsx65;w{a)j@mUjJ!{51pi-mnLU)c&-AuF*)apxe?OZEL z^Ru=4|7+v@*qPi@00OM;->)6gbVknWYt_I{hb|?WR8kCixbFxTjTGyrp?V4P&H=Gu z7#3(szjJUL=4zMXzlIf8`eR%&^Tmn~uu@d7ksSTH%Vqbu*LtuAwMMUAP6dvT9MF9F zU$Wh*@+_`gJ9dM&%HT|0B|kxctD-34^MGd(MP5k|d+;uPN>&>a=#kK|l;)w%mT9ae zyAZ&}%lBokMLBbX85L@d_GtCMHBC=$<#RQfRdZ9q!9c>*Z?>bKz1@Es5Bk=(rh-wv zm0gmK3>{BM+eVgjv^4`LZXmFg}KZ{NN}r}{koKU$;%vbB^*dZQK?d7~k$ zJWRMLp-BZtK~coxLd(>Hq_eO-_%eBt0ZQyRI8VSW6*tef7IFty<>>f}kK@DcD#C{v z_{b|;W&T`d&Gt|nM|UGCDw8Wn>C88=5WgC}|81;~o@lA0>2vsS6S8}Qs zQqs|(f-3i(w3+hFny`CL*Vys0D@e!E2})4qm!m85@5Sj)q!ATqp!VSLxzv0+7iWUY z_C#->nc^LY^@ zYJx{3>L1e_3E(w#0ui)@8KvrT4Qr2j9oKO4dn7G2h0dA$F`D&^I<5ZpnA=i#$&Z0O77 z`N$+J9)X9zb8Qy5!?i2!`9M&*oVepzL~9p^%4WNAks(|aUD2T@w9+9v^92(KZZM|lGGmXZ4_2C)ckY?O2ue! zJEV}V4Z1bH)wy~x_W1=GHtt=reUC`jpb9y0^kMC1J07EGcfeHaL6%uns@Ya;bhhL0gkLDu-a`quK8I961 z=`c>r!J-y`P=Ks3C=|3(BRpt4CbbO=uqTC2)4b+J;MOUF&o5h@j#zyJ)z)Co6fq`Bdvs+)`oK-4f`@*5V@? z9Od@Lws#-E9Vvq$3=Bq{0sgl#{Pbig0I8L2-sLhn0yHBfDb^p4>*b059)Avgwe$R9 zx+0ipl&FK#`Y&3_BS|k#ISfpT8?>hp_fA)eCNQA%GXv$#b z$H=T7YIpVaXZt2leodEZ>IJ!kMy_0?XB!%>yc$<;K&MIgy`3mt4$ZhopGuvU`__}W z?SCoa{jG~8C$GI@9ml&&;g92M*S8x3(H-B=j|x16CcQ(?>6#vx8qduIRcXM@B3R4Z zcFGZ=K3IHnp{=nP2~^?hQcdKLOvffidoYw)Mp8HL+!UYv5ii6A+@&_MphjMV%Xlo_ zZ%+mL!7G9k=4c@#Gil^CwEoj+D!3_>%bl(pMUNF`JYhEnHlK(F7c;5JDeq0*_wC(v z#zY0Axk3=@kjGzkfArl@W@Q=47ocqX;t+W>R}2V^U7l|B2=!`2MT*}eZIv~JwSX@t z3k;Hniz5Uw8DqUP;BGBlIVSMcb`%s!;4a zxyoCT$4MwY{W1POx>SCM!DpA!lI=?ICL=aj38a5Q4BNA`HM5S!_Ms@w`U!CkCOo*y z%%-Ryxy!@m_947e_u(hd7~?DTSil0)?>q+2eAg8 z|4ta6K%t`@MJ_~is7DAuUr6VH`~G61ZMp+-l{B`JXBW0CkvC&sz%vNLDZ=e>-tq8? zILSfo+C{;*S_U?Y5YUr%r+wGd9etqLYukj4?F~$0K zadXs5@b%==CL{He->UyCEFH@JOVe7k|AKrC_J$-xBifXQzKO}u!la876Z^J6rMw*& zcaUCOP>JPf1h1`{=1{P zrpAGgYjacD=CV@CA}l`p2^ufoQO1^>>CrSNGy674bjAR-tx^=##<>`KDhdi{^2egJ*^sq zb&!Q5>GxQ+jjW;oN~WJbt75(9s)eDJ#0#SX(kN8OMT`-hveodFcQle?}QRCqb0MXsg$ajS7<-KF-TD2)?c!v9s#b6==L4Ba5bE1-2OO7=LpL-yJOB=AdBWO`jLm1%oU5{Zle(|Pj%q~bt%*zp)-RL_PV#a zOSn@zrGE{O#J}`RmMwBwgDcTXITx<3qgW9AD5(hx25a?Qw>xr_VIu`J`Th-s{b_EZ zgwARMsQuWW109yki>Fhx=+&MOO{OP@-Oh3{-%tDARC$+@g(I+ z5X#SCYEtglkZL-43}z1tC(q$bBwQLSsY#jGji3a#)wn{?mq*WHMX_3MDN$rWiw9k; zp|vMIIsK-;3GC2e-$6*88lJwx@p3eqBKs}AB}yRAq4u-B!~3#(%q{n-HyL;_mQ~*G zr~!GK8&}7dLhchm*f2k0lXG;A)yb_&6`0Cz3F~^f|12#n8d_LqTJ-m)P?A8R7YX=m z8GJ!{mVbvIHLpmR1%9Dvt;Kv|6Cnsb42JV}?jD z;FDfL0S6iC+5DvL>gprSr+o^Pow6Et86TMc#H+aXz!_Ke1Iw=V*g?c z37VYe)m<|i`Byc?-Q*EP1SoHnB~vGoh3l97xsvXX7Z{qmznvrhCe*jB<*auJU<*o{ zfBo`5t7L$Cw5-&6_#`&VwKof>=trNLYEDDL)ZtE&mll@UF0jwkcxT*%XdL^!3GW1sAPc>;SnfDOz`A*>KeIuc{ z%R{N@3DJ8&`_4%qj6Bec7I3@1-OR_iKb9-W(dm4r0n+UW$opDKco@k@6|Oe7-;(kK zxf3r9B!nFF^&)R63jP{`Vo3p{LFf!)ki&eNGmu9E1$vXDG)hc};0Qsh94Q>vx|HTU zhbWGFdooHgv{T2Ho#oxHGl72O1hMMcVBzkLj=|E;tDzwO_4gi;;-_0GScuqTp{!gd z{4)$S=*F5&+v31kBO7V=jaL#HMNYr(4+scJIs^%-topUw)vUZT(=w&z4F(gq%XEH$ zQfP}iHbiG0qUT4w@Li(6*u+szK?hquwnDhQ@86>Qj+1Z{#G3dL*5g`((1PfO(4}df zi|~8BvjIK^6;62oKtT1bo}>JUOGqTeH?C5E^NI7NC3hy=qG|7T7v+avY+R3Fodg8l ze+(JC)-RM0f6ty6b!gj8yP-BZ`TWQ-Jlxna0));^f3)ahk&;t9MnVV-(BxWChe}ky zlg&TS2e67+$}OD)D|Jz2S)JJ?{PEcoOM+elr>=W_)#|29ff>KkGzj`5ool!?1u)#D zeTmm2MxrlHBA-?Vs((}H7>N^a%Ump<*@;IT%AY@6&0(Mqe9=~jpWM&R&29bT>)dj1 zx8LSp8NcJ3rtwD=Z2Jbbzx-{-o@gK?J>lZ*H4pIn?pHwVO%5fYfZy5?kw~rn$|u5{ zjP-ng)2(8lznTd!!6pI_5K=EMD)+|ski<5AQN2Ea;-*~^MSia^@WZxN@sB45x+`r& z`AsY+L1TO8Z=2FX-M$6trp~Th*>e|LX!3bSYF(@eZN$xTn^cO@6we!|iaq+aus*n- z`{I|V(J{6R4@J(u%br>@{0SXEZTY<{JY|$6hoJV|41`!j02zyEdaTFBTb9)@p6Yk}-U$3Jyh2Vt?Y&p_&FMA$Et@A$ zuRyVS05O&USeO10NYg;0DCq6%km*4@>{&{SE4XBvwGLBIJA1lFw)4%?>YzJihFkE9j^zkq zp06I_rM684p0ze6pp{HW_o+z#AZC^Xs*v?R;TWz;q$!p#Y#_}_#_n+nzr2TM1c)!A zkRR{|5!tapz{e{*B?{Yn?g|4@179s0}d0i^(Z9@h^_+%c8}`WRAR z>E>99ZkIfa+@;O5S3qj~a#eT{{7K?4A#h0>!x0y7WkOzhKmc#cS9?qW@#{Ei4S;Nyz$%I=LHxQW0H%xXJ@MkwCym{#lRVvocN(B4UZ<~1)>0Z{T}7^ zy<%c~@9w$|f=7UXtXk3pFKD-UXK4~EEE3RJ9^5Bk!56Pzw(Kw-JkowH(StOI+=-lh z1~hVdoq?4~z`8o@EyjGaM0{CmYxA;v|J92XWHtMT(tQ%H6s&quA~zi@xVEAKRe%q( z>G{@ZDRtESnesc^XYhG`#VmlJRUp4i z^MHN4#OjsE)QD-*ZEt=`2u;pq$zjXMb6?9*`_zDe68PByr19|Wsa)X1Vov|{Uq1fN z=Kw<^hYZ2WB(6OEBsu0A3Cw(sK52qg=Omk+Gm_CKf?#QuY9p08+jOT1=YuE&++`fU znGQVQWezRI_1*sRfxz5;Z^tD&h*cVEK75sfGv^|}m3fwcW=khiDynO2AA+iti0-xB;Q zlc2Utvn2nr%dQq)($fb9HIO8Yn-q1oSCa!_c!l$TT{S3&vmA{w7J!#w(Y?qbF#(_V zG@Zi~=D#i)sH`!j!izq=<(g8)jo+ zbOw#+{@Sbm@lQ`P05^FJBnc&8Phr5;J<*4@#-_sV)cj`wr21)z0M2?By^-bDFaDJL zUi`on1v#zBuoZq&+}Xe2t^W{TcJ&DJR$U?VBWADtX_MsPa@(v41PRV15wHGl()2|nDGl+XVmnyzhd!!C=rghmK!ns?3NH*34(g8 zvWYk1=OSvGDS;v$&vr8 z+n4G*ixuo@KeT`DX(ak^5wGnvvBbra+b6_oAl3)P1 z?{uhq>tmtu8#g_k5oXaMBMS`C<$3iS=ELge7r{GJ&b_$1bnasqjt`+O2wmb-2NfoskUOX{Ug_LwOVMwgZc4zy=XW^ZZEDrx?WE`~NU(->qoc4p z>f>b~)OQ;XAV?$_h=|bpsvllqm5t}F2y?mE9)9UEmylIV`|Jg~)^^dcRs8v~eIIuX z%pS}pzqS$-Jyk0t^s*%PvWW z?AG=Qnk-%7`P;)R%C3{93DZwb5qEE~7LQ#c81;s*_xeTI==(@^QPrGZl!%oHD$UQ0 zL5OtM(NgsxhQjGa5xtQSds-t1Tr2DlwN#uuiC!7Xh6`2y#!vZS*IJ;4*r<=9?xwQ4 zigQH0P%IkW(p9_bN|<{_dMEi@T5cw%38N8}z3T#@v06Uw4|x3-w2=g09y_vf_#@-c zspW^=EasO5)5;CXG8Z4H?W%S6;x0S$29GM0G**h?58vtr;86|Bht#ni7_L_DMfTO^ zL#X~J@@!7(1ophVn4YC{(bhY@H4-HR3FUf$Zwu43n3B9a5G0Xw{#PJH^{I0OlIn3E2PcZXXLTUhg0-6Zb*nH}P(NKkcDmcZGW z@fCsuToiy2zU(3n?Nh;6mGB3ZW0X_SFy6_M6s8fD8ynFJmYUWew#i(y#qBRqLAh!) zu_K&@eF%ZryHWVU@)z^`Ujni8=dTGS_Tf6#Cj?_*sC zm0}6n>vRR3ZLNjo8@g0O^d`UPG$GLupr8h)$J zvt|R&HR@V?lUW-H-7?j)c(WWhhN9C#HLHep8|+4_mNUaveatq7K6*tsxi%%I>VNoi zi1GJgmiR!*VCbutRZWja5T4W^PDY}fQ=V5_E)oenxmNAro3=v46-0hY+bl|pX;xH| zD4(=%)816+>(aLg`v(Tps=u2P7@ivn5P%ufc9Tz)xl$Ax8=pcO2LI{>U|Q1`rUR{Y z{fRdvNa`bWl9{=Xyb#U=L4j9kYA)U`r&iw+RqhlR=+gtZQTT%D8YKUbz+zIdad||Y z8Pun#GZlmRhF?F9y_`01EA=pSIAuz#keLq(60JXLo7-jeLaPQ2%@HGT{v5{qy%{g# zn__o97@iHdIqcd*BB#LlenRGdD@#<0nJXoAcw1DU=As^)i&oft6muW&+TH{Wg0wsX zNzRVrJ@wQlI}B9BC*2Pp3^#Pt9A?p9-aD=%U-c{7Q&90YxRpIC3(fW3g_P zX>8a>{(^J(rJ301vu`^CPzcu-H8>Fwbb@bnbxppL$$gHYv&5>1KB#2($E$b*usV<2 z(Po~qS_V!2#qP z6C>v4LO~iSV%s;$)FYRVljR!t&6!Fm1w1fIzMbU6GcU6VcNb>^kdJBYdsyE(Bej@# zfQhZlI$%5&{m$%jD0>6AIB7KF9ZK?QaIEUnz1EQo+W%*UXanj<0%0v;ZAD+czIX0c z4E&P7ee`6Z*9s+R&x&h9gBNSn<)VbW(rwjPm3et`KaN02hT(8nK%-?oqTp>Jg9*m> z0FhSxC7`e5rTgMyQF=<7a!Q5UD!HpCjBrP#8JL+j|IUG` z!TWl^ShIvS;^-j8x);69EijmEH>Sby%wlpCDbB0AB3?5OxO0YLcc(;;z4NYc(A9{a z1zlkff6m=IxIEXqj{kWKIuDQ7m!b$S5j^dK`TphP`Q7fhzqjO{pzA733|t(Tr1QfL zE|d`q*b*5H<9;V8y#3rfbvKA|{!{UxW_O?e9?!G#4JIU!%Ifh$K7OXg{nBfWbDpKi2s+E%F};}ioJ zi&@|}t06J+-g%IC#rKJsfi;oA)lJgUA8PgGsdEPCK(tA!q9b~$6R!Nmo0Hv%D$%CB zd;!Y*E)!lzBPwWEVm-SvIiJ$e$fv&PmF4Dbpcy0?3kA1Y$SoYLb6clo-&gFwq=aHP z5e12sI&@Tini2%!o?P<%cX`5ELN6+aXqs{CUS%xA1aT- zo@SL{WjH0MI%GCwb2mg+jYc^A5AW&6ujH1z<=L`4&FRkalF?~IAZG3d8jp3y&_|*v zqz^mBUsbF{`xKOXy1F2+hYVn$FUlCdBt_0p#a@)-KSq#8JAO6`p3-!L&ixo`E0ac4Z{% zj6=qog<>`*3R*t|7k_UY2k#i?Ir2f7^nt$LNW`~~anW_AM$o4C!G?x*mK97-Y1d!B zbYThEA85(s!ys-w#aUp64jrt;o+MSQpyt*h)<5m*_|h*JW4*~AnA09uO-Jr3=leQ_ zMKKNb4TX8*f;Ell>gQ<}36&{D-44V`c=RY1eroTLWvvn7%Gp3|zyC1GKQ;eR48K;e z&lMbxJR0U2ofpbnkPEruAx2Exl|`MyDSTF+-=Zlk+wJpP=NYk>IT_xI6sLOh{@O{M z7!H2kTjR6o!yg9+l^>1o=bbLcFXzuZ+S3*`&Vt*@u@*xQE8Gm#^=@{*tLDvE^Hoe% zdeAp=m@v{+A1%hPt$+e;T3s}}#aiDMX-pi zMzD7onan`O;_?>t^=0(oV26lN5i>WYgN9CeVU6LeT+|QgFeTavbM2-1w4Kk3%0Ikh z6v;X~H~$B?;PvD=d+(C&6SaODaU7Oqq_%G5^p*s*i={J?he#N*V54%FuL>j`S$0Jf zu@_@MCVRD^^rkJAtLFo0pGI_W8au3{_ItE3b~_w}Sur!n@CtlClg}qs^KYXvt>EGy;G84}Vz56k;qSdQ5T!AK#3CdjOE=jMLL|AM31Z%~* zw8rlg|5Kb^J=t#Yb)a>rY`B~cT%m`DPzf`fj(;5Aix(Vo71_W~Fbe(YAJ3+r>ij7=T*Mel~zK7a#npH_4uxWGXtCvQ@haf0A_SU(b>tK-1Jxj~0P4Y)_Zgw!>eiSun z0%J+7gVvP>O$a&3Az^~Qgco2<9HazPjUQ(o1!r)uHxNt)`@`2mSn5jfr`ZS0!1G0n-)kChK*O9d98+d>_Eil5 z{pt`-#HLoEVQ3V0BQVJrLftvC67EGg;cHqkv57DQV2sbxTsyqBU-^s|Jjdg*Y6e4) zD2i<#SAxNP6|aanBmXC}^xwXF8P>D(`#xKNkcQb1UU|#&IdvF43#q+jQM^)G2&dV= zH@MP8#v%+6ZNa3)*7P5ErTofjx~79zSJ6Y3`(g4;#k$_Y~oE! zZ#$F_JPL>!_}1NhDo%Pu!~eGNUhluPn5zjFS&5mM&=|Krc%1mzH8LBljR^m6)+jD- zgF?zTDQD-_P|y3OZ4L!I9S(8w9|mN%AOh}EI@lh%zQpRP1Rl7v8%sGGNNrH{VMuJ9 zXh?8ZcCa54QhKv_TU((9N&Acqvjj8yj2g<}(-OosLa7y}wdR2f;oJ4Uh7Y3FSiub> z9f^uLUa!tUVc5&n!S>*)SRoAHEMgVkzMe^gb(MI2&oKLc-yGBo)AZnQYVt)g0F2xx zn|QZp3|l9D^zw;&JRefZ*e zzl0hUw5l+$3|x9=4LOIsuq8|wygOz?7od3Q?+LZnN*Z6$hH5m9>%08p7%Bvu+!MTj zOY=5?0Z|M!T)BVbmTRVb0A!xB)dLwPm57bYReXaUje=?Qhn;5{V|xoxy|V%1{Wd6a zy|=Zs>fi|}6SX+%P5EC=Zgl7|a_Opb2od_y$mQZh!D0Ru> z?jab%VZsUr4VrhkgJ0IwN&ok5{$vUaMTJ*3i8kZ`3;Ro!U>+|O!crPfN<|iv=kBsD z|9SE?f|f>g=!aGV5XhUu`5xFu(9^|LmH*{5MOYqX64=XHLY@MRHi`{}sBGjx zy;oyTYTzWJ4o);vn+S45g2+c&!|)j%jNs%P&xgs=l6^rsr7Ui#5)FQ{BAA%Gd@Xkx zo(E)iqXVR$hiodwu9l8nD@S-eKJL1EuS{D>(wC&l2t#Q0$y!1@B!4_L)rK!ySFSA| zJ`L81Aq0KDT4$vX`YVlqX|L2H4GQMxKV|I_RCnfav(jvo|5NPozt;5wbJ~WTS{nod zGdPWUZjW&IEn*a*Y3OjLpwM1b(i@v-i9^DZV=zyud2ph?(xo&f5BA^VA_i}LQ8lLJ zW_%}8@_P59pbmB8fJSY34Cz^{^`rNO`fY87KhN>;^H9xd+3DS5JS{Jca_PV(gX`5g zfu32*^OAX=o_`uUjUA}99;CmH9*lG8Ghj|9x*?O0f-ey5m*BU0+aavQv7y6XGl>o% z--B8BdXnHLHCBz-uwN_ifA2}*AWpDg^~n6YlX%_$!Maq&Uv2sMXK758X+L1QlmNiL z^5F8VrRXjkznRu{s{bCM-Uu*O)1`T6jiAk#jW{2rbK|A<5@0otr|OKt|Es8&$yXzh z3Qm+CJ!bVNfy_?~-I<|L_f9W6iKFd`MJ{6y4Y?(dMEt_%@NQUg%|+2>e(X4of0(Bu zK#sl;ePTr0cGuj*4^Z^mdRCFxd#QJtwO5=mx79q*`EaWJ0-KMt^Pem(*aIxH)DfZN ze=9Bj|5&bzX|qB^2KnUp!k=U&W!jz4T71=41QCJ)z-%F)_iFD|e-*i3-ZL|mDUf!0+XF^q|r1tHf zBta1hIoYyIO5@jG!>BJvN;{x#4|SL`eS=#<-R)oA2>7vB$h}bC-&4yxjU`x%qhk{G z3Wj5{-cB)0l^Z9&3&M<;`W2+(CEE~X=t4nV5ekNSIeyp<&oo^We??$z*vY94dBB?n0-Y? znC6O5+)p)ny@fmlugaOMO`tqi-+1}W3g25<`j&b;jSG2%6n9~uBP~uz%>5#wWzqg{ zg>G~*dLfKsBc&-T{r*ilv*T1IDh2Y@STd%!P6L|E&4aN^NHru=2B}S4!cbTjC5i|y zmTqu}B+o)jN6;9@BbH&-sj(mtof&LE3S&!s&KPle-Ce8Fs{ZZa$$JnfO2kx+VX}0R zK<0%6@7IDZ{1bh$_vxzYqJLL%o6Ygd_R!+|z= zoYg?3%C2bmkSam;Zp6k_gV2^iQ1)|W8tq<#LV15Gp|7jiJHK^fsc2Dv(Q{#I;N@tz zbF*Zm7h4POD^-yGRXjtlC#g~Ok3y9nJM&=*M%i^-;DZRZ^VvxEdRh$@OO!ENr#Fx8 z2~ILwchIM@U?K7dl)*(tNc)jAkiq3+r!YD?cz!L*bekq8R2R;z%Ue{4 zk_rN$&N1gngp$LeFrt(|Pk^k{8>`wZ!XNC9M}7d&RJ*W$eTc`d;(1`}S3OX0P|t!jX{{T}Ub(#Ucs|i{{aaDak~>474eb z=D-#nCpd|TekE2}y3_F?dIa(NauWP0gpr;JDxnn*df<*w{Hg`|EC;$?-vAM(om!a0 zgXfOY9fciFCe7pMZk>7w8nz77SM1qxDL+%3C{`h5ynp!jd8ux>AC zB#~2b6=vV{zEXh(Fe(G>7;G5DX<6s(VhnHv1Hes(@p6gFk$^Qnv_b|=jCCIE__vvq zMr!>GaE&gqqy-AxJ_BF8XqWe}UDV-Stu=~j#L!4QteT#XT04(7xefd_{@hEO%5%sx^d*sINjqNeKv(zT|Drhy#p z%L$N3<7D!NpGs+76+5%94am{tsgO>=Lz63c%*A(k4d&%ovei#Rzr5b6@1GD#lUb=U zl=wB&P#}pahnTg)gMVMhS3oe}+?(I2y|JgTwvsW;cZ?h@js1PASj0riM^ScX<_^!< ztgnyoWy5Mo&}6x($btzm>6X_{0DB#23+VB)6*yy>fFS&1FfXRb&CnuLQN$5g`*f{ z)Zp|>_Fz(tVDF_5V*uGL=&<92|p**&KUd}>Yiuc`kuBn^osjY zh`fC*++(^bIZXQRr4_ZEqx=*LYZOWw{T7yMW+T4Tn^9kalP0NIkf1u4P&pp3qB1zZ zlxwdJ3GpHqPxiOIv8WOPhQJ$KY7WHM_c8*rfgmmpxFF)4rb6A7^DvjoLnXO!ltv(-+4wZu8oe*q`(aYkhR8XJa)x9Jw!Erb&j491sV zH9H)CXHfjLXMsF130R2~`gs2)_m%uZr6sQoT?d%e6F~+|@A-}^7k8Vbp_aOJQWIm8 zoILgO6FbMegf=$S?FttU@TVMP*guWs_Fi918IChyOoSB7>`d>^aqonnNRIWkm|-$^ z4fPKWLw}GUgM^`f0%@d1w)C+=%I5VcIg z$=5&(UmpOv@j3wFT?rUJ6uC}cCdRS|6S%JPy>R%rV2}=Xv zie@xJ)fk>->;l*BF4Ag-W2{c&guv}VddOnMKb**WEoRBY!bo-1@b=Vr9a$^ojL&tC zIsvfck3YKSM|wiTAMXQapZ*xo!s=96TCY``pr)q6Qa5GWi*l5cD}1fiMP9a43_sBL zWWI7ekL}h;ubvC!zD_y7i1NV|pR%R8sO#A>s-b+)lZE)_X)`0yvDZaUa6nFaJl4$} zxJPQX+Jh4n75VM;RK38P?DOG3o=l8~);T0g0yU__CCX!uI*`1ugr9ZY{Ah-H& z{s5?rL?Di?zX&F2KOF9rNP<@rvW4|ZaPHKHpMXTnQRvJYDcWqlxKmdeOxMX0r#0 zp!E70gZ1@97)#~FEB;RzuBq6Ynx}oQ%*UoxBZO}+ND{Gh7Nkb|9AVMTXkVhTqGZ?o zQE6fsT&xIXZe;q`ujtY1{e#)c}aaD0je4YKB8^szSE1$|XG^ts`qKxt9C{*Q4hHS3>9c zk}AE!8JKUt^drRWu@@YAJ5~P6lV*^*PAs$3d6g zFcpheg>WzqkmK-2A|gKIzG%Z*tR8sTZh%RgD-|htUFBHxL#eLI? zEtB~g^7CDOA`DXPJ?*d6>j#tT2tic98SGyoQSWe?;cQkhcEbB6cuF_DLpW~n?851R zU~Cx5?SBUxj(N2wh~P<8^>fNDo zdaTMK_?Hypf*D?Dft_b`-e>%e3P$TLBN~CIb#~hYzOD-u`Fhjn^L*6DR3eocG1BYw z6wz`Gg|a?I?s@DR5Q6;Ca8P)rZ}sl9q$vyX$W*AuFF%MwaRyx>I9as6;s9e6eH;jvMU~1 z>mPnxGhDu*-;!MGC+;^g>RiG>SNVxW;h41ky!(R>KXFan+(DAdbI+jg?vC&*bOS9R z7n7=%H`ItiVl_pwTf`kf`&+>dMGkj});h2WB_Hs>JWVVwrmvMaxKtcv%2E_>f4q`4%cMk6Yl=jE)ySV%84`IsxDs zlkyB~runU4hejs-N?ggkwyWfZ!elbolae>0kt|N$3FHe>AIniQbFO+h^p?i>K(=21LBu5&$r@~~4z;p0SJPrA7>2K`-_Cg>4r zu&2<;SDcXmD_2i)0GvzU;#W^+ldaYeq;)EPNC$uNYvJM;^JBUF^&97d>h|qT>Y^)@lhw^>2Z}cIBtUJVF4m_3SYQw^qz$dO<#XxN4H^qYzR^^_VpSUn%2k2mAS_grTJ(*j!w{2Uu((nYxg*YyPUuP5W#^9T)^0SXWok z3eUe+mCDqvV_Ur>H6^A{t+@}K04>pYjN`{)pBY{tE+{qmhSGY6qF>|I^Q z_ZK4REj6ocu6R+UKV2?t?4E#O596#B>p%P7xbxrq|7E+Kcha#dcSvSz?3d$S5!&7B zF9*G>7Ww2=<)zT&4Q7uSLu>A!yUp9)b{kiE#v%YNoC3RA(cMCkbA4?h*1U{#6y5uT z^lQfA=%(jnP`Ks&q9=fEk9T%w>%pV@KEE3_8Mg)Dxw>PC&kN-0U!CTHY;_7W06Zc%jEzcP7eXJ0xUtj2#l#E#F-+$y$y({i|*l*7= zvSt76w_(_rn#5B5aVv?P=l{HsMV~tI$t^r~fl!K8ofq47iK66oCePvW!h6^^?X1pj zc;D`cN)nJzc?DbqkooU8Dxl>*V%yv(VY6s>BHzSnx&85Ea4_0971e{rlqc;t*7GYU z9G2x*n^x$|HXp`O=!yO`s&ml|&l?korV*($e;93el&xJ^ro5&OKgMc8k9Vkoiw^Yo zx{tGHiPfYdrL5H`vH(962IP=!_HOPxvSQMs^%XQOZ&Xs#r)=4!@9Cil(FSA=^9nbh z;T4M6LCOIg2u^V3K}?ZsCB#nmYuG-SYs}tJ=!D9g#{^7ru8m9FZ||f@g^TYAi%Zw5 z$&+Vy=bVJ10RE7lTb$kklbJ~TW#T&{n7e*m;K@Vu5E~E%Xh9=0vnPq<`#0W0)`@f) zf^UG5oh|A!HE_aD!@>1(i9FW;4>Ayi(nCvfoqTsH- zmfx+KYEYj*zCh@FWYm4`PfobGc6@7a@HW$m|bu;z# zdt|4Kn=J;OO77>EZu%MmM_yLvef~-ewK{S9f-A>k#k;-R%sAW*BVe3c9b8YsTk# zV*|j}uKTZsS~gq`$pMjbq5h2e87Vo6Xayz2gMZ)~C*K1G9EH!5_ zWCKA<+)WYSVDy`GA~Ky43`rB+iwk)R9RL>JXK_w&vI9mLv0m}C$1U{2jSp1Xu@|zd z0{YGG7Hmt;GIo915FgW zEJnXO=K__dXEMQ6YuadVw?#4q554=5vJ!Q|P~@xR)*ejeYOD__!8|1MH{U$CM81}t zpho(1zRBfy5!|-Jn{0Ta*Ty&y;9O*FP5Fkc+W<)Y#I1cy`hQcM(Zj1By5Y@BBG0}( zmpDFkY~6Pc$Hqu*2S-kKPTfcuEIsKbJ!jD=UANPrZ4jDZP5>^1ToT3KwNfzmbwVoQ zEJ4&VVMH1;hF@DiBrRVSW5ND9ZNQl-q4?8NkEsnB^7)Oz2EzWCrOpD$a~Hs|D0m9) z=MX4cj4<7~@C{$j+$*-pZ2kyq?EGJlHvX5zohJqy2Wh}_zi0<1;c@1gj$!w&z<#&ByZ-1)1j(Uu1KT0LttH2I?-uOE zMZP&`L{XK+5A{p>Q-H6&KUr41OIX_>8@{9V7>{4sy+Q(FEFk;UJ#tva^N{CHQy2uYbWx++3?(TTkM&c0FB zQ|JX@?C^$xKIFl+vPZ!OWzg(R7aS}>$pb6*I~)Y|G0`Q`&K$iO5xs zL{_x(&pRhZPlySFFK`@AFohu_LRP5*JoAnF0Pdl}P2)=8K3l6NCb@Gt7~3jlz??YZ zGdXiDWk55(a;oF4&!!04u0M`6ieT7Juba5r0|4_?!Ia~CzNDDpcS3HBEZyiG^&hoa zUVeDfVLzsJQU3fKFo-Q2JH2!R6rjj8onl$a>2+?gv1jD$yX1iFC#!f0*fCj~sVIh! zS@#g9!h=95JG@NPL$PUijCfM=a3QBp)sMeE(gZD^g;G6|Amfj|b$Zy$nSGXI^S*Bh-_fC4gBDpush4; z%2Z2*+;GHK9&)#DwKOvrxXqvY?BR3MMqS^V(eF5KGeX`I^aZOpzR%}hPm`!-j*Hfh zF3Ur?rn3-9_w5-x^KIl$7ACFx68aKYx~kJv+m-$*Z~A-8Vls8`x4X0CEKTE-|FX@_ zCf8P+PG9FG{$~DQSho{7Ik}EHn41h&l+aJuy_k+q8$^>d-Zn+I!Uor{qM&B%*&?zk z5Scd>_;Hvo+l5(Xwb%OPSTWq*!Y1;mQGbGaLsJD`me-F2(Sx9}K1-L-{{5A{tj=}Q)}cuxgSR>+!`GnQ zSZrhA%Da!r#@(~QT2Q>EJf3c3gb0onSHK z7g9O$yAQ>AR{1r{c*Fa;{cxk#rixeqv#H7H4(`@#d^Hd%$#_b&PddBR`qAbpiO=Cr zd^uR1UnoSOL&K|8YTr^KNK!;Q3=hH=sz$`Fb&^9>rasWjw9YPgDCko2Vm&W#J@gX9 zx0zWm-H6bNYPq7y2jH4Gzr}2gpVuGyRe9BvJNAEUD_r^4e{)wqoOjt3kx(f=jEwV? zXRq@Z9dak&J|I;oZ`z1%2jS#@rTBN zDY;)n{~lQW1Nt`E*WZIEp1xdZQ=8+P`_|zPQ{67ZCSR$wCVV=ln zcyJM>=?Y26{WX60*bNS}^=BqAmu2hIL`cdhy)-$~w;M^#6U1p>%OdUrj>w+EEBuLS zGw!GJcY5Gk{ODzk zG}N_WjHVg(J)ELWbY+Dn@+X6)Tsc&;?)FTLrB%Ox^+MIrFd(U)F0>P$yg@MrxAmh|re|G3 z3z^xnjYiq-W>3eyG%e3rc2KIZKDztP`AMN(z{%yDd4;!|*RvW{I74q5xRyMu)1;CO z{7X1+{%e^X|1>wCMw-(*mDe)3uTM}lm9T(ufSLtxT7+<(==YwsJd?iA?6f|YFQcMR zys`Z^#mhw=&#t@RycMt0%%Shsu+Hz*@T%wGw4YLhAoj*?s_NJQy?Z2r%eh%ESDW`R8T|Pm3S7d7fm5?^Tny zfW)xAQpD^@0so$-_YBoQP3{6|s9R5uP8?4f0l7A*%&9lWtKp5qAW*ZSWqK|VZCl9Z zG}*1b#9v}IA6uF##@@FhNjmz9*r$dgNH^lEc+$qw;^DYu?d2d*pL9nDFr4CB*##R- z8^K|n=f^<{eNWu&l)`Q949(10KHy;)Yx7gPhjMty12*~_mkLSOAml=}*1VBRTQ<>n z+nD;-4cUe|aX@!6HK1?B6*{jJ|F{F<^Zvq?7Vkqh-g^R@Ny)dWx5>*bB-Am%1GtguuqcW?S*-wmETcrC;;NW_SdcJhfddOa;0h zE7W{N^g~^eednzbYO0y@;;QinJ1VQQWIb-~h(4_eQboF0~K_5N)Us{%B?L0-@n^meNsS&2V}0WC(ee+f22Ml&-dlC1lZ`HyMT9RSkOuOf=)}_wUvkfAT;g15G-Mn6uC}b^L zMZ3A=fe;lhmQRi`DHg3xvLoK9=i_q$S&@>QXVMvL( z#DQ-T>g{CATi@oG4E|24q&yb|>I_<=-s}-u-E!83z`PS7#&nHql8qd0iYr~QuUmaq{UkxceLPb1^(xVB1W@M|apK((Z#%m~!QBom zD=;sX{#&nIXx!5zxT^Glj7#bM8>CDJa+Ij!vQB>W?`RP&{n##(1o9Ew>3myyh41UF z@`|1IRoLf-CAW>|#9pB&p{7;+O9^-^0u^g14=Q*UETc^+Z0bgQi~%_ zL|?}y@x_7{v-;_^KaGu4#{V4b>ksV~ev6C!6*`GV?i*WO*l3ZKxa0Is@3x|J zhm@kc2U^-e#|X6cmfdtDQTe@JybY6(*JF*UPV%Z` zjKSnaS;NP$nmB%;wI{Ora8g#Gb%O3TlQn? zq}yOIe{MvD+|!w7Nr|U_XZ*%SHT_9X(CA#?A@hKL>Vc8%LGZu}nO8pjXPV$z9&5CPU!vh5Z&<60 ztjY~_vbnM%{9>PpDlY;J{p2{YG9h=)b86WZUvxp4ipsntzvzpeq z;|Maot|`w#NngJ1s*+3@4xd|jq)(nFI&n}F>7SEzA14mF3i&pNNgAu~GCLI=w_8g5bB*<3i;mb-emuyTo2=tG5D{Arn z_YMfQ7VWj38ro^>YGL~J$DmChCCS84bU8C5oAwqw2o=J+jIp)uvA9hIpWQWzzj6^8 z#OmU0i2KZY2rslssg45}wkZ`q!i>522TIZ5;=aH&NP;)R z6TgbJ*4!mm>}Kb;e;Nzt17X1Ivi(R`&7cuA~DCJ+vZvE>} z;pUh?aT-@O<o`Z{Uev5EV<>HdUi#`?v+V;7>kwt@zI~@hlke!_Kw$V)pe$T0 zjX#4Kaag}r#_$hA-Lg&>gm1sx!d2{gsniH#7L!*_wavs_h+q5{xVS(`^gjzURzGsD zOm+*yp@gJCdrndKK573Ic~w4NFpf*jgOBoGC?tP)py2e0eMPqGqh%xvMy|?iSFnzI zs~9^oiqI?xxfXY=`y$I%quVb(e@DitqL!)QVQ<04NvgLw`w#V0*OrP{rQk>o+A&{ai|_j=eQIF>k#czUDK0=KxoG z3U#y{*obScpNy~0Vi{yOwNE|)GSD|xiyf`xH zjdKna{`&{A*EfKWO}hS4Kf{Ag=JM)fSBrex&3_znH88M1K|1j`NPCa%hZWJMy`*xB zAa^f*YST`gt%O3(@EqO4gu^EY^HHLfKR666tGJso zF}SwI5;_Ic(xgP%_prPEp?7YtYMO3;UQFfgFeP;Fu1CV673&>Tw0UnCiz$dmbJzN} zwY5QmpE$6hZg^5d=-?*CW<8e@aRn@kc!39oB|fbxRlBLX}uHd(+UdasSgp zVX5?8(V9O@X@-f5)h>iX?nR+k@-uX%|Bl&3%(_NiFDb}7(f|Ak+)a`*XhQVM@xz{d zZH!_33s$G>9RqL7_xW=iS{$^HXO>Gs48)RFk38tB67)@h!7^@|C`f!t;gZ6>Lu)*B zhgbcld_iskt8P5IjLq98sjqp%W}&kx7P!kVhC&gHBkO}U=rblGr6Ih?HM@S1bib~F zPEt>Ewq8bwD@HB=Vpg8g5nhEg&@ir@P-16Ojf&%xY9Fwbi#Or)y6>rh$XUOgy6301 z%+)-7R+Z0G##PlBErpqWGW(u z&*RK`-bZRj#Ij}sOjv#(S%U4i_M9Y{X)k_ZM+1I=fe$PT?adRAb80OjC+DS(MtW5e z9Ar>(Ly4Xxkn3BHc=i1Hyqk4JgfEDoqv;^q>*XXkT5o3Z?8&F!;Vud1-eXA^EOS21 zURk^5xc0}h*xqfHWIW&;0<)%FGTxlS^wJLJYRl6!pb7b)dp)?Wga0uiH981lF#n|T z1TCvH`tb)43`YV`n{T_qq6ka5Ep*&cXu*y{mv>qHe@^Ru~n-XF0@|1?t`R2ieS*eH|R<>Rzr=h}ZTaV%up{*HI}+ zDNU1tKVx983=Ix;Yrrf%( z>UHL;}lxfpoBk1Dh~=iK%qn&SdGcx+?Fu2C6s}Yf^OO z-@wT?X+UmyMkATv-T(l1m1(<{vg?1}6J@ql4ynV?L)ko2P_^l zoZo;!avmg>#^@FRi^iX<0!Pgg=cx`eEwwokweBBSO0rw{;oUaOp|Wvcjpvi?)*^y! z=nc!kTat%&Wi>~1(Q@@H|9BO>-%Dx&)1)9j7UzZ%KJh$yXSI^4`frUE{6)whqJdFWyX-kO_U{ya_9>tuLvh&TJB_?Zf zs7X}AF)z6Q7uR(g9xe@SIwA;#cb===Jh?>%w1k+Th2w=XssBdX?Tah~%^&oB!-oGp zW66CBwyi3F&V%k;|?;me`K5tgT zQP7Ia;sTgopsAI@TE;!q-fOc95nj2N6a{v(q45c=$tByGb_NuV3^$H435mUDLC|N; zLg6he?W?k$jHJv1xdFB_eSF7Zu3dCQLo-ZMz>JgMCpzeaO`DWOH~$=^#>K6U+r75W z-wCEWppgGD?~|Q;ywm5)>SYTpf$c5+{5<+rLxpX4DCDtaCm|T`Abp{YX519XI8d>> zFRHH4YG4O^PSJPumLrK2-|#l{79FZ+$E17c;2+nk zo~1|1rCO0*k;-L&OPiwc>N_NlmEAh7V9h9Skaw{Xf6H{1dadN%8Wr?IULn+Hgmz!z ztI>yZUJwk;itV=I_+BP* zsl?H|;qSI>XSAfk-}OANr<3eHCmLh9TKs{(RD!5Z=1mOu*?H&A1iOrS$nAh7ai=6$kQ!e43WM2Vyu#D%*jX+r!YZ{`#R}aKD#+2v zkK4<)jqaao*SG=!xYKS_G)5C&w~tIEmEnDL?k_oCG+-H-`8xT#J@o((Dc#eEEY<-b zudWpWvd{+s0D6=K)XNm9k-8t>z@~jmqn*2ic#KS(!Yk#IJ7jKFB*@CisY)9G4?|u3 zfBCw^hBGM+-Pfdkj!-$XU;I@HlU)%Sx=LMG{A%ZlNk#OGHF=*N^>Zl^Egn5oI4fF6 zEjm@{H$~E)gs42vJGRApX`+6HE~Hurl{_2c%-^-lfK(6r{&WvuzT9`~|KvPfh|GV< zFAQ)aQ{G7&!VS3g2nbFL(24x5JOZx_OlzHR+pLH3xhNHjwFrpROm0=ByN)G$3FeCL zL_2&tM+i$G;nU z12*iph}*Q1?h)~R@*|2kFdia*tJ;-iiK~^IJlkzjsqSM$K07B%PMNxwb;E;J;=A2p zfep1+z4Ay!o>7^843j_68xVJ)d7Nr@MY@=T&u|9=tHT3Y>LJQ4o6o?hoM7hGw!t~> zr^&9t#QqqL=2v9Yp`woamM^4v@dS}*9NWzywH^4$8jSgmi~hQ=#}y%7v1h2J%8lAr zi6xM<1~b@pr%Ouh#YM=!UnfizVJc-m!O;Y2TxJ4X^ZIf*vb0Mw>D}xFHDe=W`54w$ zvs01+IEBIJ9SxJ|nT4y@shq{RBsb|EGB~LRKn`xLXQ7)sPUG=dfoA)a7zRhn*dC;q z_JR;U2QtJD-8mKMdm8@tg}&kIw)zzgewuks@CD@XzU>Sh2gnRN?OJTO7NMZnZh>%* zj{?FhY&7)1j6q7H>pz~4kzNHW&LdCT2>1av6HTgr?Vj(W6-0&uRUt#5zhHf1h-YnR zJ47Ea+r3|js-FOD9k`kz>nDqbqRq*`ia4aDo@76()a~cm&rR&7X%c8 zKT>e?VDxYI6sP`Xkt&*PXC8qnl?A9A-~!n6!l(brtlKpuK=&6Ir2Co=Q?+@S5MN`X z$}IkcXwGUNtB1c7s|GOnq;2J@1n)eM&3ubL;L+=q<;F(CT6T$i^B$70`(FMcDl;m^ z&^56=J1ZogbXUJhS%8JZziS%$QIsh67h{j_YUAwh%@%UdhQfY$wIU)&lHsR!LJLwL z(A~B3i0{Ib1wWVT$;wPPA1|wNt(fGC_{Op*8*yVBE4x?gN2=XdE0xmJuu$fmx}WD- z@~DEd8%VZ)tcB@-9ssYEt)k@TPQ)15U7S)kfp?~`7N%~xuwJuLyr{FLLA#HwYx05b zZk8cfC6t1!i=v)vWiCmVjWA)k?be><JrC%WG|ymbCg01usDu_Ogg)VrLi$rbHWEF5GK*c1ii+&&8%H*E;Zt5 zp4&FIN`g^ZQU}m*o~&Rr&fl|L`xDRX{zR7n@RVBCMr)CTwjYF~NO0ttyjhwzde)uN zcxegC2nHMj8YxJTnbv)0pOo967j zTn&1}=Jm)JkDlvY3{?de7u2f*k+;%VADb61&o*}XTI9j?MTm!mAcM3!n|Z0euI*M^ zv`WyeRb_Z6q+sJ-zM%}+eDG4Z{)KR<5wZTs`ujT|^@q@gFI415@&+a4OIW%a>+Cnj zO<@v(LOW+j?Pe7Tg(gOa`+Y6Q!=&5F9cdd7QuzGz=)hW*yrR$4@mP>JBV>BnbP7*L z%rviqXS)9`%Vk5_uSohQ}grbJM6mlmg*?D`>O?(p}G?c@KW>nfn? z*ph7^B)Gd=+?^2I-CcsaySoN=2!Y`4?(XjH?(XjVn>RE6&3iMmRxegxp>Ox8I#v5r z)vgn6kmx6mJ_)TUlv=I4#9qA6PX<<)>bA8Luu89Dn^knXYr?^#N=*d9<~xwl^k@;c zhT|#)zY+;G&nKv2e~AE!K9pmMB(hGYSMM7a|Uo@V;gC(=Fh^)!!&opY1 z^dY+{d9S;bSSO;R!km3`w7-8NP%b1qzUFht?(ZewABUAFJ{Dv(BZ`}nfmbA_`~JC- z=)Z(OhAirN!FS_%#^!&2xIJey#kP2}S!iN~FF#8Aa_$uO^?V6TV;bsGAv>tWuQ%H} zBGQSKlq3vbIscXBjKr56H1An_*~Mb%fn3xc-6?Xaz7p+ZEfLI2HiD_RZX?&yOkb%c z!NH!Ao$20iQ!>E6gv~j!?`2KTYq~g&JezT4&|y^u7^*?r>{OPAX-;pN2?kpU^|?ts zDX;y4>8EZgZun1;{Ek}1_?!!q|qQPavOvRs%jK!_wuGQn1I_O>54CKe*9 zw~wGiPi?Ud#_U}M&^9Q}2|&G;89TLnmTJgF*?V#Ox|cTzvL+vSTF7-yae8PX_w8Rg z{)eWUu=&!qV-zCjakhf6^{bsPDrA0u{+Bj|fHnC$+h={HL*30bb?hdKcpx+C6Iom> z&ptgl3x;h;;X3`dlh)@3^lqWc_)HmC%z zHoikM3ZJ;v%=kIaKf_*-x7)wO@RQJj{RSs94&y@3UQ2Voo>ORW!dDtT5YfX70UaMs z$^xi;I~X*3Id%zfh4~i?E>H*QI$ZNR!G8|kKeWVBI%6EvAtJV2o`G=x)OksRiSl0x zG)9PkgAgdI366qE%OR0-KX?i2^HN#?#;2|y!k@IyK;gzdrS}5?8Dr_f>gi~f8i(^9 z)oX;uYO!Nz?k6Iit{OTbgQ*%QKO)`?;LYnQ-1@TyHQORRdT#cqLNevpI*>BoHs?nm`5$a4#0I4 z=WRuyqKY`YW(^xOl7dPkc(!cTXi*jtwdxz+mX5uqGr%9|4)4sOR^6s!bXcmKrZe&m zaNcY)IM4+)}dFGXWi+SZwzq* zBTuB)UPL&tt0OqEwY!K7`<1qKnYF-}#q;MVPr8&e=^4ISBDAIyUOnpUN=>*#z;C>LY@1K$w+skmB$nD~-bnJM`ivoG{h&lmSj^ z%;_HzGN`;J1byR9XUknW* z+tj2cP0U!yY}dUJP4bNrg|!{5dw%gEuz0k1R#>nZY@(^Ha;)FZq;OWbU`+8whqv;Q zAuc-gO%~<~yG*4a5@eLm+e8;~zudfs-|8=R9P8&k9ImV|4i9MRcN*)Z+7Hqn^w%a} zQaMD8-WdU=zR&MpAm2INJF-gk1Q;Q%g}K9ZC*zyG&Lxv5M+^)vM!uhEN8**GSg-6F znyYORp93q82YR+@O*Lcn1X@oj+@2LhVb_&&c`LL{u$*OBBo{!& z3Q0zn?6{sEw7%%7C#TJQEQ%cLyE`)9qCF)B%3H`q<{ax6W(Z>hJ3q_WoF1u$vX>DVFT< zWK`BkQtCI*SCEqZk|(7S@$4R9MqQ#EOZYLrn4J_=i3ZxE3t*nVB^;L$U{B@|O5#_- zMvC1WZ_#MmQbA&=r=J2w)MWs2qf$k?~rrLM9$RAl)64($U zElhwyKG z`dpp@fbjvBc==JBjqR0TWat$VPm`ihN!)H`mDPxiR51B4GpkHZPVfqko^1v)?MphJ zipy*IWdGRx$)IV}HUnbwwa4CPrd!3~BFfqX`W;6D|DtM33zYB&1Y3_Q34acS!;erA zl2#>4BWiZJo}DK2rbet(-i|Jpyg1;_b}RI&lzosL zWI=!D%7P=NhEq&LXz)>x1B z*yXwY>6mQbd}%jVQ;7zqb4;cyb^>Sb)Rn3*&(qjd=F5O_>{us^q4JC@alZ+Crl4%N zZ16WMJWG#UWnq_Eh>NaMaPRT2DUMU2r_yY zMP`4c*39s;E!`e2n?~|;Q%wO9ve9hvVY)4uVJBU0ULPSZi^8C(EIY{))3R`V z_5_Iyw_{|{kZ*ZUmYy^VME_=t|KBSAal-nGcYgv5%mh>jUnJ4~lY4eek%G&0yG&+= z6(w*}FeDuI7esEZE8MZ9&o{@Pb7N7}c}McNQ9 z;X4n|?j=^(bdeqdAHA%XH7wEMlyYlqu)c$@vT6tyRRJBrfUXed8c= zCll6Lbl%*9|?g(MPXf{k7WI}C=VDhv~6{SypawyS%q7~ z{-5Hh0R_?4<{=ik>)V8GR?lP^KkZVfLbraucU~w)qBf;oirmFc>mhjL?xrnSdjKOe zdO&57^cy{RxRUnfNP$n;rxExIoUSX4Rj28--DJ58#a{bN;BY z7-7r!c7A-V!fF{Hl#@&A_9f51CE&KIrqB-L2jeg~PAt}D!b;t)bjcW-wstNC+1MNG zxRLjFw(Vcp+J`}er==~m7R@ghYcKbNCzd@6vby(cxd-U=P_HyEAb?2@_RlLHkXFE2RlzuTC>2=tFavok6EJS<}!+sp5hDkg@f3v1{~fLM7) zv9Y-l3snhNLQoEspQsv~_?+PJVWfKOZ>%>~2OdAc3-*roRoX7KJe+5rabl5jd*BK$ zko^FXVN$wtpu_K7ko^!X8LB!BY_wJwR??qR}}FmjMnop#)l##KfXu4JKrgIH$QUCd~gs4Sn2^4}|w~hCrJ9EuWdQ z8uHe9xAx&FCbbNE4T0!43R>vdrmBYFwJ`L9#p0W}1tNt!ERz&RvGiVuNh@sxqOn=k zQjVk`oLZYDdQ8OQ@`XO5obRryyDcVBFHBT~ME(KH#!g5dXtDXPi}hFRlCxUZpmJ9; zRm0+Ip~ozG>BjDop8g70k@7WyunI83EpOj;2P}e=DY~t7y)h|TN1*s}RWmNk_HzXJ zLtV1$r?yfqf+IF&$m7&SYB37+60tSK`s-4kCFeXtwn+J)uDQy}P>96y4lFseH z0VB7YO`c$GB!V3xhQ#5X@+!&ybliWg*#5Da21egweZI=1lt(3`RtPhv#<>?Qv#Z6W z_n`-^BVCIypQ>1hV^RRj6(r`R>vcV%E=9w{ekf51XYz%P*tb|G@&++ybKLoH8VDe> zL6?suO_p#r>JQ17GY6?5m%_TPZ-Q65;w2X>WoZtjgp1qCqZ~XpGiQz;Tsqq8CVhiE z2t5{lMJ0lE(mlQFzsO!}iQk)TF>Q74=SJHb8V3nWU8^9-by~g9uVYC#Qeu_{0E%no zRe~+Slixw5#3DOuiMAL@jETROY7x8@M5tIV)fdim_8~b>bP6lAwqflj-#jV!eE83Y z{{2ozHYCtXOp>&4jG^INn{b?d8T$t9jsEbtS$NHVb8ucSBlzdca5oY*3Jjm3&%ukC zfbVk2%qXPkZLV9`ZAp=0UV~-j1R4bSxF?$TB5J4zIIMhJRuqOmN4`ZzLU#FfZaR-# z={9Ttw_ci*>6zMfucL8@E1gwKTAwS~>catRUXqLfsFB0BJRPQ#;m$i!K{&uF^mVxu ze|S})8CCM{DydSf132~q^FDcVu!EEu;lMxR27j6Gyr60P_>KDPsj;-235BcaJt;Sr z!F4n~6L)N6ng!KKCZ^J&zi8`50uJRr{p_li<_=v)e?z;EObsxhx+BY_?$l<_+X^Uu z_lLH`xh$&bxFwm|2;?m_Oevk`Amb1@L`25)U-4kUG1_>t@7i{|@TEgrUxerj|c4u%nlT!{l()-g<%OkWTZ-4ldyD5 z9ODIXR!f$)=LwxqtF3En6o{A$#YAt_x8b00&`Wr}F)T(p^5H`lAz}F!EO9@}xCyE5 z-qqQ0KJwF8GNEbo6xD)X3|2O=QF6d4v}dZ+*;xz&O^>8~-BCKyCmwBz_U3yUl24yl zU2c%ZcH*-6kDW}`NN4(VwVbkxg}t0`?@}AS+Y1Rlgtbvo7H7iG zZYQlV985AyNQdxiS#e9JQ1DChu~8nY9~1FUHmi#eJJpcNBEoZTL=Hg-E$6kM~P`~bO$ zh?rKMtz5YkZ5HSQ5@fuQ@s|#hTyIUjcF3Bo4Zt_i??&4PSnv=mThQX_dok0Va`+dc zmqVlq74=451|O_r6X*{8dUZU)lJg)(eiV`Jkd?jX*O;b&1O&kdspQl$JJPI*p${W> z1XrEtlXIqNj%Qkx=jnqIy4?z^g#Rcud~PZY?dodU)@oPChWG0`p~te1B61%0vR$Ff zgp3jrOmB;?iXhR`j+3l;vW-(JTKyW2!Y>ZJ*Z>x6pLM7|rSi+XjCevoP}=MjR94l{ zYqY6K#5y`PP-y-g)AKvEB~v_3M?c3~==oLD{27D{j1LYVG`lf53gfex+{twkC_5R~ zwyaE}D=Qi-qyj@8(($`1hM7KH-+NYj7eiv}Ga2<8{XD;Pm=~z(OGXufdh;YDJ9)EV z2r>lB3F;{r#i@j#m=p6FTI6#Co7DJvQ50-{$|Xg@3JVfVlh??_4lY|D%NkFlqNy+S;v;+H|nG@jjxE; z+o2UtrRkM>k^#MKfe`~XoHph>;ZhaznM?S-RDZJ@#2cKTeI5p{uc#?n>5Kl7T>M63 z0$Z@#hy?aqns5CAtPNu?kW`;c-#rkk%+RW+;NCuR{3@#^57Vl{YMr9dKA<%0CpyiZ zM^JUc5m|p=v~y0lS0_8GdAPu1f;W{gBI-HmtoeEkjpO0rjPN{kTGSxf_OhcS`8VQt#LLUBZCkG-x|FeMBWON8uJzc<~LW$Gf z7ntO9TC;e3m*f$zx<0x7Q-#4m>!l?cI-Et%_S2ATmHOxG&S<%X9X&@IAQpIYx-Y99ycr zt9njcV`R=b@os1LEu8Qsz;#8&F!0vQe7dw8)C9I^rFRjm& z8Z`8CB0-h&{wfKUD2VY_$O-eySH+g08Kz^XJhPq05-@t1gb zA*WgOF{m1g(5A)joYK@$o?jXI>8*F3KHH9;JF7WMiql+`Z|nlAK!!e2%Je{mQ2#SW z@Q+Nv@4x6s^f>)%s?JQ?2|u6cgf5#<7o6w28;G$#oo&;l-}U$u?GYN+n9)i8%x;OR zawK1=LgQI^%K!#L1dH^pDPJ6hCCCdrEf^v8m%0~P6<2|w5QVxkLyLCf+zLfcTQW_Z zfxb=my)7H$NU>PdS&{kei!OB=7vr)*(OZSIkVz52EKp$mpOx?L(2H!NKJ49CD|ln0 z@I#+wn3cdz`Ic|VeO*G4`YZK=U1@-RO`YBgjv&kaObulWM~W1_RF2c`77i%)fo~=x z16o(O5ccI%nhf=jS_>jVgw+-FGFm2c%{9e478%O7;Jv;|@UHt) zVCq>*7G8jEs+-E}#m*tmM$CIl*zgtB64=e;?0^K96G!bdZ;Yp`E7BAZ8Jw%n9Q+Tz z9}>&R^vX#;D49#cWOzRW-kgV${R;2NkAf2p9P)^%MWQv@yi`J;2?%f!{7$W(y=fO? z=5Lrd8JJ0U)EYd6aC2DZRrsmjkx#GqKu3w~%qE)mo;DK#J_f)EZrJEu{iVd+6FKd* zZ9rPdvo!CyQd5>K{Jq5C12c*KYS8kJR{2c5-i)jctzaFz zM%Jql_mvG@Rh*%SF&|kAVBK@n zk+QW#5 z#Eh^iLPz)+2!Us=pv<&hmbb^roXT#9G=EWe?g9SBx>YYVP*#iuaK(<7e1k{3NsmNi zxP(1j9u{}VUrb;z`t(Ns0^(4OgmGqnYo4ND;5*GbqPWW>p|(#@RHw$5r#7DrojX&; z18CftWUd3>{t=kJU06e@6ci|&FC-cbx9LXn-ET&CPF2lhWVGw!yFIVJh`RO;ZNNqk zYtA<3m(i{<&mIjo7Pt^asKR&Y;7C}%GdqH+BTuq7uOW`%;N>qi+v{FEWNb>&J0xTx z+fNLu^mWDwg#2rm?BsEVA=SWCH;lFFokt6RYP^7BkNERtrCwRo6=ljWiwZ#Xh`bm` zR?<_7R5SMu$C|@z4{;2jg1 z(a(3$QA|xsHzLMtEw*i4Fz=Ln%PTC=c;~*_GsIVWP4#NMyNXm(Mfbi%DP4L%F>(0( z6QFdhvA4N%1PE?*_LmaLu$Yy{sVOK#x2_XMJ+F>#Ls*qR)_LrU5ZH>2W3{x^irczL zN)2xQ#&uQZFx;Lopqi8Z-Rk2^o=uf(Fm|aZJ|!#KB!TS!)tFVU7FRvhBQ~Yy4Ti(@ zBe-Ey6u}l@C!{_U`tzD0I9A+ zIVLdd0+*}m-A}aqPLN_4Sch5_{>Aj>yUz6DJK??=1?Ve# zrhpAyjss?(@8#=+YRfSTO}zbRlKARNb(2+wh{MWPP7D@I<7#G|*UtOO`T{HBz#cc9 z^ZrQ@#C5%nS;|2Q3#Sa=?q=}?^BhJMX7;<7`D&xr(2Q;(+vn-HR~`bZD_t82kq6e8 zqi4~5jo@%woz7zg!hA_!PbgY_stjfXvoCuH2q8S2lknc!o)GI{6g!{qBD+fl2Ss7N z)PC`V0@q*17>PO$O!O3}ID358%gsD9s^VhAvv2NCqMY;mEVwZSUe-%jrp!x(Q@n?+ zRr^3#8EoeC9WFlAei#+(%M^Pff64P8N@1k-6W^#Rcz9D8{24BGP(>he6V_wn<3i;r ztMUszEq>(!H=084&3EdSyea3WgRh+IT)2DVLH5R|`m4_zl~);$BN(VI=ALk0tYS2y z&@>O}_xBVfkJ0_;r&}{xP_*C8^9j|eZ}zKMz3e&N5{^^{T)#b3x=-Z_$L4^q63mwBN}&MeKpH`kKBO+zQWlvWw}eL43H;7b!zW*K9bCrS zk-)xWP4JvXF7ZeNaN0`vm0iYG^1fhFNyus#`LuIGxix~TeR(}1IHC6rI1FJuvJ^j< zfY$Y;^wrjDzX5a$AsoyQKu}y>tvm=$pFv;*DCmyft1N`;%U2Z(hhHt(t05}DN(sWC z(O?F_d-1@*ujT9|{8TfKVQ6g!G8EeLDCLf(S%)@DY(@+y?JOlMKctQ@P{d&l|2#{M zKf)i}-C_r2h~X1GD|#R~ZGu~ty&8K@Qxqf)WyFMEVL^nK`U=EH;!lL~F!Tv-)ASxs zH?kwgfVHfk;1O$Sd>NzXXj6eoNnqx#Gz+qGR+-5zk$0B8A$e(YEQjFzi_`nE31?N;nqI4|75YY(1||?C9=lCqBV5!@%{^l01Zwo zsju$A-Nb7xBT16DWcriqupl;Lzrib<)C$CnCiApn>E?7oZURM#Zu+CR`tcTr0rzLG z8!Y+0BsNl#PmVFc?e|f#MiMRG#dnX%?PJa6N&0iFTlK$O&eq4sA1C~DGDTP*`sk_s zOx7e7EV9wEPM~}TbBfJ^dwE!^H}W>zT^MzrU>(DkT>G%%A1SHO=1iksiL-ZS@sG!Ta7$vIG zrFg*y{VZ?E6`laHb82%P-`SR=%{0@VspIijjYvn|l%`z}pVjMhp{drGBA77{GnH{- z`Bzh$zZK-2TM9^M%-wdB>6Z@GeIGaTj9Oy=_BXOMG3s$abTFwOgn6^JOf0jdUS#5zB4XBo$&wSKU%_ad8b{lT(N{AZwQij`3 ztTtio>l?IV8u0xrq;vQ3CaNR|7^YHNq-_trN0p1+Y_OE{kNYfd`@t%O25(X=v~{1x zJ4Q*X)W1y|YfXIunnBrG&=(^*4~O5Zwn?%$vD#hW$+e)(gu$WVq);kcs8#l8CppG# zpPJ-PME(z~1`V3jkOn1UaJz?I;eLv1cqCnDXhZ0>!hsjFcW6OPT-PTIg=7p{u_mV? zBPcxV9L7A2x>gk^{&pCPK+3i?5oA&_aLB{!gQ)a|}?dYR924PSi zRd~g1u1&4WC|cFdryerO^cWIwv6a?x-HQ{&01s2`mvS`9As}_fKbI#DY>{f7;^+{4 zEgV6J0(!-l*FzABJ@lqb1OF^F0)#5^Vxi%bDW%pCzhxAF({YTwrUdg)Ki9%QK0WE= z_yZoG;?Lr>WZ2E6P!@uiFWnM$FN6E@ykJ!snp3fK;d6k{uc$DakI(V?i87n3*ZZgD zGm+--8TM0k*(_Ps#>RQE$a$#CyX{q;$p-;KNmJul;0B zI~c9O9wwAEiY@|GnqNyz*PXcoz2CRI!O0Isb->lV-_+^Oxo8s>3cS4#y>;*3zAp{(59x z@h_W;g>09LZIR2}kdNf@pLbdAh~{9vjqAT}I$yqs%x~tdo)@;yqqDdTM}N%7s#@TW z)W`muLdQ96FdcfO_o?|xe0Y;GkjlFo+1yyKw~WrVNc0sWiJf9-JyPk|cq^`Z2xTsh zJ?(Ch>j+B_%(2Ey+5X(9l=-Fy5TnZuOiOiiDtt;aNmE}`B(kESYq^M1KTuTg`b5`q zRr&ye6__dJTbuofsvOJaMgy@53L5|;@vBs1d<_t@BnO@jMKqQ^*nfv(sF*lNPVwEY zcfDRWK=g(u=Nl_hS4}}lL11PT=)PTObfUhOxnl$zspi8WC#WX-k&yJg>^`J}zL>Dz zm6-Kj4lBdm6e9G4q9b6S@W5#U z)J{PMtth81aB;Qu_j>H}lKMtnoVSfU3h?~70bAw@^4L^8?v+$Lo8N7s6XWN3(tRGJ zr=w9;Knf`N)^;sf6@Qv9m0r1iz_V`snEr6fTYli+00|{*7>9_eB-9|=08Lg}=UAiJ z3e~6iU{QYq?jHol-@@HSP_=AAEgV7s-q;H+r&B{PQ1J4R8udObo_5_bHQ@XN#cuZn z&nErqE+<-EK_Pr3mM)a1Lu49rwQCvyfC1}`cvW~iouSC8E2FiBADIh#N10Q9{yi^? zlw=s^&CGONrNo@4NZXj5taSVGfGhVgV1fKCNi7nmC74J`Np{(oUZ;P&aUru#MtScG zz~I$p+7@(*t4lq;T7-^DGGd~3bv{rF%pyLX1r9@&eE$3QuCT`kQ)e{pr5|C`*R~(g zzQ8t6_Yo1-0YITG8~FY%+A+cnY!*nrc+JkS++sx`Wecr<)-5`_LG5Vf!XUHMJM97i zQ8#q_>jLbAtp?h_mFpkJjJvshtYq_zxt za!Um3J}EPdPUTq$MsoX=0PIM%W4;Q}NR`CFsjQHK>2(83#gcqhvhuRWmjoM~mP}m_ zX-|;6rD#;qxkcsz^32HZXB7ecnb%t~jm3jVBFl-;t@1U{Q|a~8Onuj^wqh@?VVzP7 zheTPS&;1dmS)Ws5p;)`8^?_CUG_snfo@o}JV9TMD&ksP zyp$ala87hT0NK*INA9Fzk>{Ui8@P&Rv`|m+BE#)Ti8m4UsEIHwl}r&aw*2iMQwFT$ZRN49x!wLYTTQQR5+B5GyYOHpL= z%p1Xu5c2%k%UMgwkxV&Nz-1S=6&AN#xs`E6A%TY-;yO=Cw3-mmTWpu~W5n}s)0}ui z4(uxf=D~ZX*-n{eWOnh)es9sr3%gTjk6$+)mN~<`u2ev*U4|RV@roWgbz^|v!)1|~uv#Wt>^BByXvS;<|0*@8%>+~zd8EX|7Wln59b^6{x(#cs+Y z7pX`2wQ!8-@S!G-lx$1_zNu{j!Z=&g8_LyC<@xF!sy~OD0BxM-3oagKv~Qs)qFzp( zDBSoTtjB+kBeX>F((4)V_{W^TeyYsgQYtN9d-(!qRhx1wnI2DCOf}{Eg_^&wXE8Z| z_3c6vcvb5s$(&M^h!{F<{~nMK2$)M}r+%j)XyF&l>m2i0-}^59iqlFll}HRcGIGNV*^$`>S{Y%zSCQR=A*2G+6ykSD?#(L`<{O#9Im z%A14@y#{aJxg$NEg%(!{S!tmts+2`o(mG646M@~O+MhA6dq^#HhQV!)UbNCe+7bglho`5FB^Y6uq6}R`NEaGkhL3uP8MkGk?3%wKMStO4X5o}P zl<{gEy`1`ph6l%mjrMDE*Wn~ml@qLwaYqMi3++Ye?k+sNqK270?VzmikWOWVWB-S0 zGJ%QP$xZkpGtfIy29{}kZp9H@8z(m-jV<1^Yix2|MC2 zfP9iX)om3^K0}<0ZBNzL^t*Ws4%00tb>uy3+)o-4=xMQ&J1fMg1uCEkGVl?lstdJ& zc33AV0TFyNdk08;R z;7X{$U~0onDhQ6?m$z2#2%PdL^%$cdw z_;;oJBT+mJshFY>@T64m=zF?sfWmU87+!^@dMPo&iPbb zGWcYZ-l;*19rzVRh@74v zqRMJct_X1iTIzRX~nn$>vEaSBsk9@2e6K$Qh2Z(D@c0Jmb~C zSMzou+swzXnwO`}l#&qx%NMOBc?%^xPJLY-iV19qaj2=PNZ|u8sOJVloIST2DC%Rxt4URX zBvJuU5B-1Gl@qaBi#z1+8#`cmQQ{p@K{(!(w7>c-fs^D;Oy_C9|E{osu?8-pJ9~O; zqjTJFGw{dB6=8`+Ei_f~0h#=A&zO<(;pK3%!o{t2WR{}Vx1;Co^rRE@wHzl7e3*~^ z-HgL4DZth%*H2!Yj4YTTe7sh7pFCRbJ}^^~&X@EJPCwG8aQPayQ+CA4Xa~V~g%#-; z5&RX@^SYDBh7K2Np?UjQJ8*)=cF8sJAftu`ebLn(q8Chfj&&G3AkKr~v`jQb>81Lm zH|#xy=y}R(0^CNV<<%U;I1x;IGCuG2Zm1;UW{w13Bb;=M(c3Yx9=G{dApYqaCIqsH zgv@DgVtcF4FjRjPh>%EBw~HMhvTVig=PX`_MX2~ZrU56Aq;C*w5W1G-%_iiir7%yy zKR8B=P2ea@lPVV<`TaA+0a=(-2alGxL3#mM0dfH1RIa|fg zD4~Gs>F@yNJgvE*R5&YA*}}lnIn3824O>B}-rK6%G{CQoZQ_Qgfi~bT$td4Dz6^QI z(W4gXpP@YF5-(eqRbNxEdg#26_MJGP0|r_ocS?s85rz75pir>Gx)7L{ab3-+qvRa- zuuhuIr5L|lMYrB`KCxfs;SU)t)p$5xvb=xaqA|abB+dh`*I*^7q3{ji+Mi%s%oFiR zjpbcN#z^)+uJNkNN4&Rmk6Tsu!|r#mxBgTJ0P1E@J`De! zulK1(m~aqN&eON-dpIEbhR+~pIAEmm`H3!hUh?>9^-^I%e-I`4Xs*M~r*Nj=F<;6M zJDELJ>Bs%Hbd%tyCko)Fg69x*g|$K&aSud=0@FTUK|rGKaZAjTd~4TKr2meTX$Us36MZ|=y@*pn;qmQ)hdg|Y zx;~PPh@{_h^OwUMV+J}LRx@0k?jte|g;IM$>BUobPs25g|Em|k#XK4TDaDke+iH>U zaXvY8aJ)>t9@~0umM{5)phG+=$Ay>7K3y2xrxh`De7O0RdeBSc8P(@}8NQ+7bKRT# za{}SqP-ah3w{jOmIav5HwE<(}$>BalLq}xz6aubrUui0Qg|fII$S(=U)NEWWDSO`CUx zT&>vr$aCmuC|g^*Ur;cGsuUw!Tu|WpqVWM;0v{1^Hq=CtGYe3Dg~HI!z#q;$m}X2r z6LAtFbmd!I>$Rwbo`ijdK+b@?5K;8k%5>MY0lW9g$WE;Jgx)6qP&-^#XF+7HYg` z*u{J~H&w|+LO|F8>p}MkX9ip~RnKO#b$tq+!T^-&)#oRfP?U;Iqa-uV>Nja% zh2Fkas2ny>J%1rxTweB0E3Av;tUYy*FnywK&zv!ntg_LRjQCml7Zy&0k zm>z7kuh#d=={-Vo>t0(IA4BH8g-63PYW;j16p*aO8r9*`z~IC|n!Bcny|l&oyrd`} zjbZ4(m5O5N2D88hS;Oob;-867{Dz{$DP@6E#0x$f`B%>;q9AZ@4ZD z=Io8BHRwQ6t)PmIf;+3vw`4#Z@7Lj2SidCKTuGT48$q3z2USAamD%ZJZ9IC1x>-+Kl<&I$9kAeYs6yd}j_k0J9WJ_E-K z8v^El9Zj>Vd0AFLHDFAP1bG(8GXsq!@-VL{DgbC}(u8z?bXvi>r55P4-pIMXXI}nc z26+J>AFI|i3w<_u_J0K%R8f9Hm!ZH|v!rAPbG$;aHvtD4UR2O2jo9g_vzn^Ww5(5R(;v!>dRiP zad0K5=L+^oFaNACZFP?yW&#EL9W38)|BwTeB_+#WQicxFOa6srKgP5L@&f=ZDsf;- zM82kaUW*qSK6eG|j--F{RR4TgPZ0f_`Nd40WAj%qoIN&bVQ?4(j97yI!3Hb3SiTC- zz1ER#9B&5|wyHBj_>PHjL~WtOoK*^t1JIrH^c|5*(7`}Ier>R|kZ+!d6aUR|{LjN@ z8EIfWlrdSt@aS;T(JHVcJULCYV}2ffUfVv1EJ-*Ss3l;&n_>zGs4I;^)L&`LP6Hxd zg@*c<%Ov`a&F|eaZ@(sjaVGox9AhF5E==ZnCZA?UvPt3HnaG3)jdIrbp#GO1_K5%k z)z`Ny88jo_NCUf?Yx5ItG!sC>XO`C*u#_R8uQ+D4V6FOW3?1oO6jBog*7AebM>vSW zwB4fe)Iplv$EGWT)uWOInrs}EZ6Zkucy}u?y!mjx2@LF$0Hthcpt!8~@iPR#MZNz6 zj{ATm;mbB4i_Gs+Q1+~IBS~)OcWhOD;^yW~n1087F`RD2H79~40vxt(el7YSo+DE)5h4BzjA^Cc!=1*@0-bi z2tS&~O!nqJ&j4foy<|oQ^^7HAxJi;e%YwW83#|En-SKsj#D|&GC#%(eEo@?h9Ff%a zY2B~Osw!GZ)T01;j0{+al^JJ(8n3ZLw1}7+8de`?t##x#H9`vzer6J1W-@#s{`aTA z8RmIEsQ;oV|Ii(ue8gagqhNFQ)gx1*N#L$@?4VrP&)M25pc33&M~XV zPNp8LU?=T#SYVxr!VI)XCeWY&{*Uz5cH-&|qul@QX@Eod=g~n0A!>?mjcJqqZ9$w! ztOFS`TqZGKp@KL+NqFA?+SZK8OXbUR5}LTK{qLECUvbg*_26Tz?reu*WP(bCL#uDP0*AMq5*bc#RT4ULDhALdhM6)1(7o8)dZ;^os z)S51=ZPNcME$!dKu^Y2sM)%sFt=4l=fqZgKK!ckm1r1|?kcsDz1MkBa`QR5)0Hoh`3QhdgpVIkvN&Rtd zfp2r10iH1pwkR8`1pMAoXa~*5bAb%+is7bw`>8>bT*VkbcjDUYpbAAKSPDNruw37L z|Hd|y8tOk5k$;M=BAr)I_A7Be_-{yhPmG^yf4XewFAo`aA-EE7lrvh@`!&9whgl{aDk98_3UIEmdgozp9AtH{45H%CR$h= zx;)}$(z9)0Q)d>@G2e4-xp*5NRoeCfehVE@#Ub{=myvo+Xo_M^0Rm^e0@?d56NSCh zK#mDv1yhFh5SA3}@+Xlb$A8tIl{j8UM^vr%HwK)+|jD1s$Y76;`%DWn`C@7-~B#O6e%V~2E-K{U!)h_hbg!w zS=go^e~^C%&M;+At}T87At^fokY$L0>9?ovVvUwW{c8UeVh~TrgLZ=GHn{gpPdv=! z)tVKu*8PTKF&(3?XaeHpD@)L+v8B8kN#62F*Jb(*WvFiY=)fxx@TqY+J%4zxPZLXn z`R`W-+MWqoh#rxgjXJ(|bMz*8F{BGHPyyuf?gLzwnh**gj~wYvwv`9BUH?N37NIrC z&PksHWN_sNmz`g)lMI9t5|9%Xe~SHY8V&3?7f7rX#=vD%C$n3~1X)7_I;G>I4b##- z`ZX`V1a;Cm3cCBp;(U!>)JZ5^NAuLe)$Ja*j`LDF|wX5qALSn-c%8 zcZ&%^Bwl?jM?#l zRB48N73{_Omr8I_xJYxqv_%p~7g}Ee)5(Wf*upe(d>Wt``P+B^Uz+^jeOv0kd|%{1 zZzvdgdH2Q%JhnmYL`0NDXe3`d|Mj(IibkKxk!4}kfsuyh_*eXFlDU<#vU$9v>htic zwOXlWyrpyXD~0I~SJm=_g5c2LV0`jlG0Va4_JPYIY{r3@7&<=VhmJ|tD{bTV&tAet zX^-6F_iQx>_KK<{v$fEJ1?ExSB2{bMy~tsttx&c>=ybr_d`38!lL}am)C6J3F?ZJn zihUY_)5)7UQ1H$qe{GQl(t#HA_4#4ru!lf41YG*+?*WxR9`(Qutjwzol#JQ<1A*2C zeA-Yqu$!tz!f^xG3lrDnfj<+1v)kqGa4efIn*d&aM+O58x&b)kOG(|wp1oijJNeB; zZCD`{y7IlR#=wOcu=xa6bMvDv4aC{$|Z*? zVEj!YdTR^od@Gl|ztPSkSf0=;C^C@|v$jvFdeJF$)RqPQ#%5k$SUbSinB_#M^A-wQ zt%?$e)kQl*w!g>0NBNiMQ~A~q`d0n0TlxCq{Ad$ybNpMklALU?=iw)E@^at4VPu9F zE<`k)(S(@{dNREJajpJbBrc5xO*P`97Px!LruI93|de7jrUlZq${UfsH_7i1qyJx7k^jc`@lWcb{`XNk7cyw>5&ZLf){3uMR+fYA;A2U7`#w z22!$tyK`SU{M2j;#L~r4v8TT9HU%*lhsxw^bKi$q%zli8fedbp%34;1#KmBZefn`8d9+(zfg}bsRWbF3G21io5nc%wREG>iEPoROD`V+xJ}?wlf?AOI1!^m;~-z zwBP-GP{2z+??kM|xlrev9{MiNGwwU8dnqSZ?<_2G(5G!viTQl;Y{ z)J_Wn{J#7ylml?TEEt?!>w;hHVm6Yb*lhnt*EaxHwryQ^Y}-c1>9Av~W83Li9ou%t zw$-t1+qP~0r*FUa-TQ9UUv=tKs!rvcwb$Nru8A?mB##eZ*#)my_bqDVIM5fyz8_Q5 z`@xfM*KHP_jzcj^FL++C5S>i35pl%j%Br6s=D&Hs+&mne*@|V%W@)Y(YzDwzQ+^3U zO#-80FfTH^m5q1!gmuv#kOTV_BG*h-I8tP?YzYzzjYxxUg;=CN{W2L8*PCznXyR(F z>H&`rrGyU{FQ{{K9mU^{T*p6lxQZBo<4wDf1HON}xVSKgNkj-jccO|*QTdySgM)Oc zDcbJEJh@hSU{Ob#-fk-V(`;#ukI!{6CiH3@z zueH-2p}PsCt9keYaCz@NZL#xJs=KD?_Jo#0@#I5>UC|{%?C zf^XrB>=QcO9_rSIyd4r}HoYtM6Gi2B&CW=ez_tVjeJ!M;!k*=bLYuPLZtRTIcy{|o zVCdI6OuW2b8nxA!sq2do5d$N-E>uv#_n!32D+*!R>R@WV--Wx6H59mUC#S zTMQ;#+!Gx@-d@0nJJ+0PcBRbh)b+wg9s1n=b)G z+0@a=;xZ(5^-rS1J>SO))UGnyJ&ogdsJ1oIfpZ-Xj{Q0n-a`56j`rm}-XP7$*)h@! zT<`+N+iH@;dZvcm($7F1HLHmmh}(63jQ&o=<|!=QH?RL2o&$w%kSme-SPy$9@r6#A zn%xm-h5H?eX5lK3j11AJp)kBO_nn63BTtQ9p<%Aj3`NY6QjFRo>g}QB_8N0}F!Zja zT7ZEyv;#~kCS>+rdG|L;ac<}_ZYCnoLUI+bx+Wsn+135QcurXOTV{3*uC)kHT9t}nP2|IA(B2v{o!2v) z+l!r*{I9izS629JG52TM7F-73+4TH=ps_3HbA?QA-8gLM^~f0oeHUaJ61+C%-9CHk z`?>mT2lE9fk3jfSVLV&JR-jC z{>iO^rB8W&RA1yhS(8TRhM={r@UxM+K{!oG|G#MvK}52_qedZv5&%8Vnc5#u<``QP zdbTCEKSELivco7OYJ5)1*&qAqY9d>&ds1x#j$Q|&moT4uor&^H!G3IbL1VFUkne>K z+NE)o+TPuu4X$s@UQb)ZX_lt1pnE=JUt0mMb;nP4)!jJXm%<)`G(5j|KK8yw>1_|1Xxhe%6$TN8UtvQp`5RaL0 z{cs~Hu!`0XI6wKMJJrg&VeEp43ap)6i0qcb`9y}1%ETBmii)!^XPKFSAA;DNz`aQf70lB)G3!d<_TbYQ+UZWl!L zrp*_lPFl7!Jf>Wtn&*GxM7|cp#^$4kL8b6$Y{*Q1QhyrlPiMCwBJjUNSiAmMq4$qfe zk5NarFJbzMOlkMh@^u_Gr$^BZ$9Sd*5^v#uZfH|3bWdBU){$#_mFf_bPPs~3sZIHo zN61Dhq*mEnQa&`Oh*KbzAzigVjfH;RO~_9O7~ z&qKR1MLox9%2BLS3jdvA{;dE_i|1Q*dAQUY%}Ysrub$p1!V`7Y^(E>JMl80^=o9NG zz&pCEArFbfPdgxsaaQj2`e;0>31M<}zLJWil$8EgwNtW~vlto~p=Qy^l50A(_%hUFmRTje@iCv{ z=LE>KP1MV?Q{RMvRIlYV=VHV>@j((FpCsQJrpfAcZ9`#RX!-rn0aNpZ!(3)9=yVVrmFE$Mm`>!k}QY^~`VVshtady-b+z)vmcdKyB zlnL?426?=#4)|;FlSsnu&TC;%wif#+W#v7i%{g_W?KzU+H(aY6lFLGGCq|neHkN&% zvFz0*e%+D7d_orP;GPaVd=9-dl8-Cgfv!|+EEUi#lrmVH)F3DfuWd~>3o3(##jA06 zAvwU?)yh4v!wgq*d@tSDLgCOT*LRc?T;bt&^tp?hC}_7XKLV3~)e=qeun-U#Y)?)q z2@_TGKDJqinn?{eG~N#aXyUMl{xuv-sQvsjH_0!+;H%uwzC#X)St{dLnoI9ah7=1N z94E%!<$Xo%#_d17Nf4cN2Mrn>O2Idbsc21XK;eo(plT6(+TR(SEvBh$lv*01fR*p5 zJkPIxFaXQxc|P`Z%UxQ*B46lY%i|%NXaJvANUMHkKMN8DtF;_Bl@j5zmoz)ZjSWj& zjT8cS{;~pJFB66Yy3EkRz(%4kJmbAj@%1-lG(yLREd#oBy^N-ZvS$JQO(@ZZ$3a5x zU6QzTjI$=M8)MJKDh3Jt3%Om4YMCed2SKI};Q&$Y2>$8l^IylgUyBfU)b);&$`UbH zJsI{TZb2k$_DeJgsv-)!bS&JBppiuZfkz*(3S0qMWS4~(qn^7`c3ue6S99|gLe)~A zs7#DnAni-QI9`FeB=;3Qu8vn}_gfpCfb^p7@zLEI@`IA#YIV!-@Ud;C|8)o6g2;`} zcg74nae|7|=3TC^Y=9uvykb6QW7+{Sv}~ftqkR4T^EH~Ij6hO$K>0h#D~QxdSaFU+ zShS10=Ae4G!brjbU0}T8Oob;1|1x+~TDGG7my!StvPQ`CR#JFTHjJL`v1bnbw&#^; zgzRk#X{CcCSd9-PVz@~0Do&PF5)r*iPA|cZN5n>oHw0fpJNE3e!zRr4JiurP!?+9*bMA|EEgo_=5NSKK1uu&WJ=;!0x5Ge*a2|TfQ z8aL7vTF%x4WiIy2&2DT>mIbk~SuR@X-p=qR$6&b(44PZ1UMfK|()TA+0p1)?1`M>S zpS7Eq$SI_|PMom5{-lPvtg=|hqsMQRYIc}dBvg5ISN69Jye#Xf8WA`UNldM8VUS{K zvp=HvwY{R5d^09H(t}Wg3_-HoQyamx7Z?u%-u>D#j-_xO?Mh)Dtzc22a~EM|64<+4 z870L3C6V=b8m~v6EY>u8*8bR1QR~^HE98im5c-0fbWVXVIU}GJN5og%Hay&Q>tDh> z-XzF~jx-sGXY)7_o=n{|CMO}DVd!GROtSXbl0Q{4XNeux%mVsi`OdFh79SpSLl0O( z;im`LGhK=1+lKw_RTrd-lt{`3@6=n-iS(|&rfeDsU7}p*?v|OLxHL#q3Zi|$_H$=` zidYIsLwFq!Gp^kJqLceAGvwRn{uVj&k=n|3UtM93@MYIPBK;$QN1PzCFH{NK_1-a( z=uBc+=}^H$+Q#s%jp=o|4*)%W&~4xTt*H=t&ezBxS9$VGpSxst2UuVaSJ&-1VS~w< z79}@#TtQPaeqcw^&67P}mZ%7k51w|%I0-pgip_|nT+1Zf^;W2(R#B!rgzgksAzF6) z9a4wsiEs7N&Cf*I=Wc@P!;GsR$3VD)pA*G?@t8VFt5I|hjX6t|RIeuai$9v{i^WFv zXL*)C;6H@m`4>Ka<2|RgIc-HsoyR3HVjlfhVb2B?_}NTsfvaS@FsE28lZst_--3qQ zW2#jt!xr=TT%iE|tDq%bNp&APcR}LKtue!Fj^Au;yNA3jWU4&I_jw7QH3C!RWjnFWvH#h3P&p%UlL;$pT7sXBde$e zCoWtFh)GgrwDW@Ni5!RwyP2iq)|F15PxrTn-a!94=)m}8>BVnQur&m?4j0rc7HT0~ z!a&^4X48Kw>f2Ko#>iIhc=hchGG+|A=8ppE|FzioaFC0dopF_ZzMIKw_anTdWJE29 zEw9nUf(NmbXF+F#BbCx`b%vG~fK4kJA_vmp(vMl*)!qPx0vq?Zt0)dme@)PL5gr%c z_2|HJ`H)766;YvQ$FZC&Ya&Vq0`aNoXKPxV6C)%L&-#YaSndV9n#O&3<)y=xCcKod zH~B~>tSivb!f5;mScNzn1v@J^4zCs7O44{qdPh{M9>iYChZ8`*2+cv6$rJ;M8S!vo z1&4Yc4x<|z$K#td<({A$ip`D5lW35$Reps$M}%dDk(nj?x?3Z1$NLSKFwoKw#xJ49 zLYMY^r>R;#XXGKPKny zs7|I*+%?)ZJBHtV^}s0^)o%@#U>udzqMV=J5jARSa=vV&z(V~V?F|N;+jH(+Sc>rO zbYZ4=96>CiX3|59d^iZ|iT;B4z4yIDFh#d8lbbW|3{S#T2En*2g%|n4(-xN#Vb76r zt(XRCQ9#$wo-lzNE-)45gKMd~Tpc_->5%Af;lY2_!v zrmP6*{1{wi=F5^o0J+g#I!{H4Yv(92Kb;XXq03 zjkMKorAN55h>A%(s|PXoj0O9{?!~n=@tLb4QRj@oRYC6eup9Ng@VuB2hA+(P3E$zB z8rpb{@t_4n<@x~;eryLOYmM>-ZsocYYtd=EH>ARe{+zMMy30J4elc6wB=P@6;%wT0zUn+eR^__O8r-CfrDS3Qx^qiZ z03a4*4!IGUXQ&$n2*rxyy}^qBjnF*KNK87_gG3221@U7dLZ?m#im0}9J^vUiYC64O z)M0aZJZr6?67u0OsINy)mBn$3uirJ;uI9nTq1M~m}3LZPn`h)BBrBMn& z%myA7v%3{5xmIGR$Ji;MPtPr^T)QNdvx(fPUjaIF|72Qr6xY<@s=lx+`)x_<}a)mk^UWSS=d3L;0`T*8TxatHI#Zo^ZT zF^^;F0P9PcFPpIfB}X`kn1d1NS{S*;NOe?*#*6ii2W37td$n4Jh0e?Uo z=VWgCB0o+%!aP#(xvqit`&hbf6uJnZ5dV*1HVoNUGc3(otT<*oRzi9{9 zLASuf8y?VkL4S1_OQNl_C7s>2UXTAu?t$JK0#$mz%1$o)5+|^Ty;6RS2>%?X89?9} zq5PGlG6G9{RR1UFa=3TtG9HXS-uEkz?{#XKr@^4=VSI`cTdL?C*$RMv76H`4=Q;D? zcKdJ7A{}HRMeUPxekWgIo$sK};4WKH-}()l6}{gA6moVDZSDph35z)boNr3GObhz# z$3rX(6T}Ma*d$_-jrXI)`BS`{y;^I`D7W)|ehal3cV{v^`(8j@W&Gd}Tc7eC<9yBP z727&5eP}nXTTYW##s+Q{;V50dk6Bmm6DRY(rZV{j*ed2Sv4T>VO4>@IM%?OXi@Qa))^*}dI-1eli-GC*(He)&p54WfPjWd!gn3-@m6fsrCIyEFPcv;p0Q>nQ(=rI zBL3nTEKm-Me1og8Pebf?QVA?$mZdWV;T8%Jb>f}tpo&x!t8C6ho}Bl>!Q&Fzq?1U$ zPisPSDOWV?h5j*cyeW8t^rJLpfP!*ni(AXqUO=wRg#RRb5x3I$FlYhnMZ^Mn6pKIQ z`7myC;MsT*yZE{H__`Y?^?Ndi%chb7l@S=&;x#`s`r96;qbBnmWr7x0P~pZay!m@_ zgn(VF41!nh(5nwyz^}!Ew?%Fx7dX*xlKIaM*UDTc@|IgPQVLMhaVTlD7Fi1zyPHjS z$7UMNHLqp2#xw zu+Hgl!#W?8e7-1A%NCOY%6j%!8qn8Bu5ugD9 z+@>v22ICj^gio^g(BDe37Zw2IIOG7Z1sms&&#hH-U?JuR`X|X$f@~GjH`23pUN`p_ zJU%WHZxGgAZunn))A~!n)qA3Klnq<|z-3D{%`zU=C$$Q9}c zhrsV2Byw16#D;a_{KU&I5?an6dAwWRsjeXh!k|eZ`XF96Gawb7VE&)OF50^X$%Y^^ zn6F6(wz-^sX8=NFb2WoG4nrkNq%x>I2X@azZEzz#++bwiV8eHBa7E`N+wSN1lF&Em zFSZt49GDZcW?LUO4dOVj^c{vfA#Y5*#%bZ}d=i z3bH+q_Lb^(Y&J=UNBnIOCCp%VqWxaEH#Q5=qIL#cU~CNSTm^Ac%rh=9cBks;%Oh|hm*tQL*DRty3$v+QIPFZBNJAjhI9|cTmXE?v@ zBDhLRYtggy^QT>-xtZTH&c(TRYB_s>8s(uzR z@vlaWEZr5?ml<@88Ct#;Gh+#LZJ=N|9Yf8^8lBPUKMt_9RUIfqlAn5|pB5N{8~Qc;e7M&@7YxAQ zGpB~WE|?-Y9Uz9QgoCf68_Wj#%KWoC9}VgXCa3s7AdyCf|UGHIgAx zEU=QLqT@3^P}^zTe-qq%%`Q!5oQj?YV4OaHFgR{G&$tuqaeqgSZe3#Lcz4p^kP2#7)~RC0R9q8v51A4O=ok!iKw0X2dO1V}?e|{``P!4=9oBq)ONHtoN%TBoa=r0=X`cbA%Y5xFI}8r{00h_DtA=IlDwM=qqSHGB|nzfg9+^&k^YUz+ z-!ifSp(?+oo^p%qKkNOgV(h(xG#Rc#nf8{Hhp9eWU61`puNv`g8=*lI)}aGBMN$wj;9%g*M{^#KpOS18BVaAcasPN;Qx>^G>Q+%w`Uv_rhZQgNRIbuZ?Qz7_x& zL)|hvNnX)^IcFCz$X>7YEMagGzQURrQ-uns*%*^duCU=wNV_4#zHBTkN#=6nMR$64 z>N#&kBDDA$$m8M{ZosHO;8K)W0HQH&W<;k_t#((CuTKS~V`V1eRHT#ozmPa}uHtV^osM&ZfH2(L6Z*HP>_}*ThBjIu!V!i5 zf7(895-}*=+{~DqP+(Q9cBwrbIoRF`|}~~Yu+yISL|gSdZF9u0Yv63 z3NvFm-;h2Cn_Z{Olt2ZvY9(7c#@FOWatwf+D^LL5C#E?zLuW6Zl64i!+nDai_-#K$Mj|=lMqNJU7*;D(-2q8QJ#0*uIhJblsX3Lc(c{67pzakeym39wM2Mf zO>WMqkP|(BH!5G_XzSWH+oAxnCO|N$!!8x*mIxF|s6D|P%47P2AJ6JRnM~Y1tpZtW zGe)>S=Z&ClYNAAnKxP{e0@XXc3<>^NVh+c_ho8ZO#9RM?{TGr_%hwxlcJD1z@ar^u zmLW^?%D=z%Esa8${r;+Z1}&lkt|g5>{LLyWwT4Nt`{{=jkb2{bYKLx3)AIsk#(=z@ zwh9TPY?DiOna_(zGSiU@(}Zh2HeUhc@qIQB}& zk?Il>I!QO{?Uj7SoPUidBmHYU7Qn8?C}drMbgW%rCTLw?j+DidL!umCk?eMQ%1g#1 z{%LnoD3tVKJ;oQ7ovrZ%tXfM&{ku<<+wmCqJxQ`0pCf)@;>&3Acu&B$1?1rLSM}!A zC6W8=quAK~=vf0f(3kkg6eK%cKMPGfN{E05Oe`f6Q`>>WI8RX|+@ZF>QBPq81r(b^ z5jN&^XbZA{Yd9Ez2xKs5q)^v&I`H@h3@Px9Q}&%fac#4!cjR(J{w(aCENdW}UR!|e zf??IX&Om;4(|j})+h{kr1~OS7W=<3CSM|1^!pKVDqbvcnHB-vz#@Y+*R>-w zY2m27TEbmm7~l+YTKL$$AAAWYvcX#pkhezG!3*XKS9bUE@P7nCuvZ(%W5-Ru(Mcc* zj0S02!Ig|hn(|rvWlwybOc=Oo&#+e{9Yo6DV94-ZYk>@i<>rX)3a9_Mf6B;|+yM*c z4*kJu)a4e0l?m@#xsW%`pv7PgS3rA8NrL}K>~zV1YH*y(EIzh=7GJhMc%Z7Pb(AXF zDYfjp6GiE%y@G?je7P1?!Frglotc@#K#1@J0>HR{y`(1QPfG*`&(p?-;CtON#=d(m-jP64FWV(S(F!jS`dA-y8u1NjIQ)ptv5Vb2yT9^Z@Vemu5?iT(JY>@SzpHnA7-W~5b|+U)^=s^o*QC<;oWWieo@h*|`d9O9 zk1J~Ud`qb034crLdb@<}3<)VD>4dh{o-3lZF&7jq! zgAL4h#J+6ZBeh*UMbdpA9X4kB%7S7DVAner}FF6HjF9@rmg2I2xA^^vZ^)Oy^}_kkh?> zN_-B=eVE-NCxZ)Vcn>r%|E}Li#t0Yx^iL?ht|TFqE!;eov8wxh&nO0)!HuaksYj0D zmEfzr9(PXnQ=eo=B(pW&M7HX@9f-8%dX(4OEry}aNVKwEa~R^K?FA7#83AhAPo^wK zz?^I4hEd5t2TQCD>9ysJ0XUe8Fc$yIRs(y6G$9$ZN;-Z%E|Kf{Z2rYx7RVW|!nk;` zTKco5hN6VBT3WPnHQ=t|@c(UoPXZJ+GJ1w^2$;8$EM`e5-_=f4S(+Goc8^eAA0&&# z;r}OSDi`jA38}Oe49B`^TA&_{V_3oUS=7$GI<0;9rTc0I`VyF0+W#so*10luJ zuf)WIK2Fw_7%7g^pTtRF={=`Z3jWw+Fa_dw*?G40Xm;?`-RTm!tk&dgT0POKoT4`VIr7 z2~DKdGti36Tf~+&=T%E~3u3#2#O_N8bI2I}Y z1XJEt=9aV`npYc zg_WejrkX?4T2kfUG-EmT_QOaI1$#HLp0zMJiC7C6F?bU>y!Y%?e7SP zXX803zLmvwn*Hs*`G6e>WnHmXP;=CRe`@h1`rtvP218!LcAx4+?VS4{=y}b5=uI?P z+cc4m|oo0}8+V%zHbS@!UV*@q6?r-wonp60edqQ}rar ztsrO$N%IIYeeHgnJs~{*iHbLL)jc*@ey8{d7@+)rWuFAS^0*=5b_O`+}<%$oiBua_ue+b(q z8iqTP_TQxvohINyP{b=xKP)=*&3s*Q7-(0b!4PaGU!7mD$>j2a%ZdGQA){iGE3iTJ zCRbzxTrn<iWiUj7*LodCyjHSTy*48N-aem~%XZSW*l5{2 zK$_%{u&>g7Dl7%`v zL+=+liT|(i3OdcTsl9bj=6~F0+!#I=aCtN@h%kOtgo;V0UZTJFcz-SfP^xUl#fbEX zilCPopbp&_5aOiRIV-d}Aq98BWm-*JgVPi2o6KIY+TFSAyMS|2o*IfY5Q3Pi4JyD0 zylawbq{xlZ`G7YQ9#BdhAByC3#1pZ*vKXGK;)ZhqBq#9c$hh&QiB}u9?i^HS%%M4g zKNh_|brCJWkQe}O?st1FI|_Vb{O68365E_D!n@~CnsX6@JiRF)!7=^akjzp8y|s#aqIf5@zk@ZBS0r6Ra7ym$cU_sD@(wk>ShWQkRU2h| z9xd;(k^J=G`FU1yc3A@(F?gIwkgB)l9%0C>VSsn~ho`}f<@@|oN3Ei0dH3!Hv9G8i z)Z(A*v@sId$XqY2kOoJ!1=l&=FIT6%geO!r%BhHl6-Ob(Am4G$e})pLjdBLxwt{q@ z5FM6CEDHsVPlfXJxPRch0-y)6qHfzWX=fCYFHcy*bQ%>t&Z{=MP4#=!Ze z8VnPXkX6DDk-zM$QAPjp>E^K|p5KAv!A@TYm%#*w`kBJWN;#)U(3)POxz!wD%OSHg zCT=`Lj0Y_UoB8coZj#@9$sN3~;)EW!ao%B)y<7`NQGu2QEeA!*{Uc}G-CElkBJY<) zo3K!=_7W6FWo@?83NEtKU!(2v`?AmV^js3+Aq`%z zT$;`q9$_0MmW19lovhxYbSa`)DhP1BFMwbU4tRLDG{$y|V|*_GQ=n5DvAa{7Y7&ZC z(2AuJ1i~~Q`86UZU9YVbk!tnLx0R7_7GoV717a%L?0@=cd^b?2d98k;PM;gR?wUX^ z3qg!n|AG7PsVDjc6D1AT1DH5^$*-@uVn3vP3dr4tz>t9aU>|BZid7`&iAyg@>CiHJ z@=p$;HQe#_O%@|Pn~J%A7rOb|-zHZIGdJVHg??Fku@^h9s)^!i03M<3A_uv2?zMDA z8mdm41(Krqg-v-+tm_1nGB30L=qt~u`E+yGz@9IrJ%%5GV$3zgzI3C52Csa%k5?&)c(srZzcwM zQ!7humS4IaI$^X9&-{U9_aE!dLP4H1Cg0f%`v8AkgPmV1xfITfrzd1@WL<-jDiA*t z%bM;tfZ~oAoG|3|mQIm&#u`fKLTqR)KmDl@?D{X#*>ck#IL??D&u? zp?e(;5Ixtc6x2CAwSb-@>zCdBcoh!`J1 zxP~gA2CKC%rzK@h%G&hw-iL#;O7XMp(AwwE2iM16f{nFV4}Lh!^X+WIMPvmPZYeoC zULj4dU)tV?BFNA-yP0eqAPuHRj{&U$tpK!CfFL5MG(!1u0y~<6zM(tKE8C3SD ztM$l#8?rq)aGzT};mrz+Y&PzBEWXfgt;tq$A`9r?IJQKYL5TPA8Vt}j9%5+G77C-? zIf}D~AojYu3B#znFo+{}=&nLY63*Q1MRCIOceMec$#M<0$Ho_&b5CylRK?bU{Q|wh z{B{v~+y-UK>$2m0(7ItY8#)e{O0%s8`zs~M8MrypMLJcVi%>fDLuNH;zE_gdGDl=9 z7c9GfI()$27Nl9c0SCDU$)?N%nH7yNug8|)2sOVS=d4)7BKB6Ofq8N^RfUvhYc`wt$PrQ>5oIil;hg@TSdA<9Ph8!Paq`e47<{$%#w`tD} zb9RzAF;Ub&-qg>p&l_w1*UyV(&7e%~tQb9{!W}ewUYG~lUECs?5dO7;fJ}A(egso7 zjsE8zqoB0+>8@mwfebK^^Y+9FqJhY6MO-c=i$ecjY{6d`KpYfj!aslf`}@5?qf@KF zXEr2MNEuz`thg8h3juR6{&SDM0IzATawH2|8pAKu;hoT}D0FH$zcdYL&AVd*6kPzw zy2CV!55_F;zmJ+fk=^81NYp$++hYkM4SnS;dU|O^I)gv{o3D^$(`f~-`i2ao|MtY* zu|AOBZ~ z?GgAy`v;Gk>r$(WIF>^u=D>l{Wj@L!_wF8H0ty6QaDHFRV#KGO=4CiCu)k{;f06{g z1yb4QXKZrX5A~_Xq000AFsEcFoPtH)s11g+Q880YRZ$D+#$l1=OjfqNwh1bn z%M@dwq$_4dBg|u7KQF?_uA~W)*3Qq4c>f-;wKUfQmQgVd_hk7bD<>a$67R}xuK7s| ztz^#Z(Y2|kd>xWH{xiz%Ds#yR)i34*x&LzXF!~4o!_nI`o9&)9fazcUshaR-g#bEZ z_t)4O3R;zV(|xzcW3>#XV%VUj-v3>%nuM(J``;DO5Kk@mrUD{lu&&|tO+2k z0M>pKlAYnZ3D%QBQI~NnRO9_kk*+)SHBhx3c~dccO-j=p_xseSrV=%K#xEu|Pu>CsS$h~rnLjB0jPJgrkjh|kC!~<(F!5lI(WUyZDb<(+dEL8_ZNN`{*2H!(WqEt z8XWT?Iget0#fOu{P`lf8L;K^&?(h^Hnu7!l9Cwd1>;R@Hyj0dUY~;IZlMQ~}RX%^O zXeQy{2D{8w(>7@&DP52aP5KmC2)~ybo~J|mSjnx7t*&sj+}A9vHn14m?`h7idKtF| z4bgS9V|fv#0w9$D*2q{Zp67dn^lXkyA~E;ZjUqEreK4)?63qBH9;zb@%0mpYnM&BF zp;TMP8CJ!SyxH%;c3+fs?S**W5P{EZDhhsD$va9oj20Px<$TAIbk%Rw$J1J7W4}8V zIn3h$^lrX(mzp@m={g~BE|wR8GSMQ7eLNDuYFEP`R4j)l;&&iyd7`6-FMOj~3?lP8 zm@71_bdm}IIM%+*k+4lOH#~^v~ceI236} zvf&TIXc2;juYCk)Ty`i1HIkIMpU!}U2Mjl(Rk@T8l2A3+IY;nv3x0;n$D@g@rH1U8 zcl5j>L~iY}H0{h3IWiH9R2^ix*?t^*EdE+)1yI8>Xv^TW9}Wm?FG?Y78#`BL8{{JT zkDM(taC$9^=N|X{w4fu0mN}k`>GX$&)7fRS3`;%wo8<)m197z126lCSd1al(cKP)e-_@7yG8nW-7PSkbjdK=mkqS+U;v8H-^r_Q-^N`I=PqSzF}rGE?!lM2~CbhgQ~1zJC(3-A|r^M3O94Da!Vtq(G= zEm#xE69Ul>o{EK|J+b)LzkG`+)Zyh*^Tn?%lK?4na<6-m^>unDHdpxI zt`;ch5gQo#?J22)fJ}z?%5Ic_UtGe?FtSA_8xYo{fs}#watVe!Y#)+z36)O>>+1?8 z?LW)O$XuLvoP2X_9voKG(b09|H+Ma}K3Gzhn_O0fmK)rvuEKxrOsOh@jmL?lXWrwP z@2o#c{xPN}$h0?otb&f5!(`NOsev}V^+R$R%z&33 zqlX1Ea9jEmts^utfeSQ!+bwd27Zp;~_&TQT#g8qGl&6y+FRqYF@?7;~>j~uFC-JWY zZ*HE#^0TnJKj8!ip-*ikysllv&uzLSc2-mvWMT$4e_k_?l9c&xzv*a{k0^cvQQgB; z)7)O&*o(Ic&gU9WR0!k-is`clO5NeMbrF3v7{iV*u9lMx+R z<|H8($Y&W)tzYyB=1;4%6T{<3RAcmU?L_n0Ss^1j-?j>jW$OD?H4I2j{-j(0X~ulh zR7A#;^2kDVmOnZz3}jj9-ka<*G=y+!#rx@!N)#Q!*;jFb7WiLqx&R!648>}Fbx3+Q zmkz}&StbT#A93+X^4=b9S=0Vv%CYujx?$F@K|^aysV@_Il=?j(`Ia-3co`$66xQvp<GyxWP^{xE2tBA?frOGG5p4 z*1r`pVf_HyYunE8zb>3L`DVGAvJYO zqJsC?iZscSFJs5D^gYb@^x4)7gOs_8R1CWon+;p->+>YhW&jDrzi!upaBCYQd^P<9$gBDA-2h9>CAtG`pTJ zVAb^n*t>(qwxQc+Nm#&V3O7h!$ld2fMY-n!;aNb|RXQs>2Vv1sNdTvNdrupy zP%wm%|9N&7{rHy0YZx-V2Y(UW;07-xixd~sF{39PP4^}ZA^7s4OAb$cngOcGXmG_Y z`H*0qa@`m8_*#bfqZT)H9uIZ?Y`Ag>A*$!`lR0m{CSuuK=fDKa@+bO7m`c5Db;)*1 z!VDcx3Le+S%I@=Ss@7Bsp3yQ*l9{wtL5g2WL>#AwoI%?od$|lQ-kDvDpX=pYnu*!D zFzqk~40Z&wp_^xQdkV3lTSBB>Q1N0@I0kmN$r?zeHj_|KiZ%(b!}CW9w~L9ag2SU? zziMeXl}X(hhW?chW`UqgX7=60Qf!^TTL3Hl6~ZC|Ofs?&#HTu!qG3xQr5nAm`)x2m zYLcATFs^$FuV5`n<|uza3X&-)11KW$S95~d2ZKK;U0)vU2@)zVtn@zFa+?r~D_x-1 z6Ff1+Z#tXy>MPQ9FF_VujH3(QTsi*I9Nf7&Epzv_ic^3FNnFz!SA<2ffqW9%xCc5v zw4meN$pj%DTqW2E-1|D$mBK+4=aZy){b6m;lG3;w$t6k; zz`QP+r%yepZR{)k@2Y<_y+DEI-%C!ieuYc7vrhdQK20_vpG*292Eic1w^;S~9}d^p zbc51g3@)Jp_yM}qF!SSp02zb;n$T!My_=Vftt%BL61AZ&3U)CRh{1(R2@Y1Nb!I8- z!dD2Pt+pUiZKZ5qI&K(Kws3Rx7tQdoOte9&u*kK8LNnTosN#r-E}cZ7VZ6AegDb5_ z`xMF(U=E*T`hE!7>hfx3186uVSmg)%cUNRu(%n_eW{_9Th z5pMcT25lRWUh}She(^XvZpehPRgl)BuBtwOUvw6gA*W?C;BH#`Z~NGvd5s7JMcw zgMv4+bV{yad~*+ZQpQ&=bLWsa4@W7;UV`3I1`+Mub_s)*ztK{oy7~__3mrhsa&OB2 z9%}U5TFOpCdj;Q3Ons*e5o93T`YIWp&a0Iup(N&J;OHTy86e+RR*!AoH=g{5STQLF z8HfLje8XevVX4*oc+&dEp)JRGzdXKxV&v}*FNUt-=AL;yOKL8r&=#ZmP8LH)(uvcC zi8pv|U;V89*5HEX>HG?}YgD(v+Io)A=4UGtZaqC!6cQPKw6TuW%uPTjT0f%X_ba~R z3-@-ewwxYBsl8Msm1oJThn!Bwxe^+Uj;4m|b50(5ET9+3Z33I|rb}UkYNR)?f0x8- zSB<9Cw>fl|Nm5gUZN4w92)SVq0IB?2$IAyD`EatP{Ssn+YrfopR;Y9TPX;OhI!vb% zV82|<>57Yx!IiDJMmk}0o-HTc2X}-zP$cWDX#TR@7Ub}FmzkQ>2^z2+@%0+5xL`UZ zRbEfX-Fh>;7(Pe_!@#P{vW1!2ey^J8OW{Pg#SeMoGMC#pF@MMdYwRfg{vqHvs1|%$ zT10qUq)=ds$1Q5QVpq9Em2rd5X8`P@hZ&&!jiH7w}K8A%6YU;+gOLU*V zuEg)ViaZN0T`^4#@d4>|`$lA|*EfU0dtbY^rnq4#4YmeixFhp?4xsZI6+%)~mu^*u zt451NBdOt$N4-qes^k@W1HrWe%74y>LkZB{8(57SXdI+6#Pzy*Q7L=G z?$aHo8eQK#ybN^gl>gV>Rfa{mb!}0UP(f+QIiPe12n-0)&5+VD^hme#07@t+pnx!h zAcN8%-5pW~7`mhzq3>#UMI!Aq8d^0!`WILt1j z^W56@a{W(Dk8b>lBZE!Nh~pLPa5Rh4_$8W>{ow@nfJ2B*aA|t`1r7jO9A?2`R|U6O z{pSni6L)1q|d(iBC1yIifovd1+Nq8C_!~SMl}0fw=|-wtMDXsi`6G`I7^a z*W9$r3AEI6lXHF(m3EV{0MVOohMTnES2!=)c&VD#sGcXBKjvaF#Rr-%cSc2lUftWD zrFeswdzs7Mb2zUwmN}*Z;PxAF6Q^k9*jn?WZKCBlEquYt>nv>+jG|;qbQ$@pi4Y~) zHm=0*LN=8-`kTQk-Heh^1+yA)#WDB(yoq(##v3TS zhD=fCjlO~jxbk7UvB5w_!%OA|J=uDgR7~WBtezOPx;@edbH)U+n=yZF(dm8*b&wY& z0mJza2_Frbx>qo61gY8!o@#TQct#dgnR!@?1e~M?(XQ$-MaO@Vi05i{idj((R5qy7 z_H(k>o0_@l8W}Zno_y6VycFW2HB!u!{@mT5;PKnKAQ!K&H@#&Rye?bdfmFQv^oY7i zru<=UD)yht8KQk>qj7L)f8f*KZ7wN`J1I?CJ{k##_1oX5`~R*Qt(yP zYJv4Ec*q{Y&r*%8AhV{zSPxc#3XFk3tB@-r?={pe$lii6YA}4o_=?Q=3_=YQ9mZ~_ z>@BCWJ5Hmq(M7HibA@#HH6ek18wP{Mb~OH=osz5a2R2|>s#huf@|A2f7iT>$^Z@_q z_yA){<#$l5sA^aA{&MN~FPR01qHKt#FiwJYB15kzDg`}R*JMb|cwc`vev=~lW2@j) z2!(6z0}xXiZbZb^^k3n zO$<~xpz35z7Rn~m5YFPRXlqVF`Ec)wVZQA9?==q3h z<7mK|1C=v2<*AK2JjgW*iAo`C>G{099Nk@#LfIqI$<|@lk0EN z9ai30>js@#_a(Xa&|p>?-Kg+b5$(A`%frmJv~eCK=&w zn46Td!JNy#N>dLryXzrm_up6%JYsO z@*z%bkmTiRjIz0lpJ-lTgyuC(*;{HlUxK6An~&UMArDZB00kHnhcte! zi`OVr&j)YSx$^Ju;CcYl%;*e1SBDB$aE+7RSXrcnX)62FLmchPmk?p)4wy}CVW9<0 zC1re2jfxUBWgb{`F+*wSO)p+R7f&QU(D2q=lZ?XYeU;Lop)9Wc4z(9IFY5KJy+&Jx;rRGX25Ket#IXHD1 zT>Y{K(3fz^gq+h=nKyIrJt`-$BY@SRsOt9H#g1^qpAu!$1@F9ZzcqS_l0rx(w(f7s zF}!`fpYAJKde9f#k>@6aPY4i5Umhe5XNTSF=0JJwGO6!`uWa1Nra*K~^V_-bm!bR50 z;K3KUxrbJfPCK3w1`A8?;bcKvB5)c*JJk6XlrOuI%a4Jw%ZE=^wfx+XUhaFuajLG( zXvY`@soPczijAFIn>j$?SrLMieP?-M&N0WgBf-}GtC#U3?=^L-G?5q)RSxEp85u^k zJkZW>kMZH_fXB28%Fg*|YS79*M7NZi|3Hx{B+E3WjeK1-%i{U#4D;p;>M6zH3kM71_MGanwE$J%QzoWX~)K8$f|C9@?W4U2`6X>MGFoS|A9+fk}^t^N?B0XUE0 z(RHujwQ5yY=+rySPube|tIlHt4os{M!pQTXjaGJzk;G!nWppXaJB-Z?@nl&RRm4F# zY=Qy#({C4m96L4TDa#f?*At-BMURj{gH;;~(|uVb>_o_~yH9jUHpUgIsC1|O5!og8 zqQ)KEVRrPrAa3d0qy5r}MszSAMRB?`ce6=FfMhgl84IiXZd>=x{QOkCU}jjQBqf8Z zOUuG(;k+Ix1iA`;Qf?@Ypz<8z=j$Xpg9xzI&>yxvaB`_XJ^LjDP)(}#a7R#7o^*RH zKv}qE>Ba}5mki~5GrLX7c`Lm>3u{C80_6dQwhO(Rext!L zW23jSvQmwp583>WRf2WYpW1vG(llB+t7S?>Lje+~#iER!*Gv48 z%13^#?hCyWUKY(OpN=0aPy2&3x;Qb`qVgRQwS%MN_TFmX$?Ue3ORmQ(x4YhJt&wEw zXMn8a8BwyMBj={I&M?D?giT>{vScXK;_&M>_Rl24t$6deT< z)P4x1Tu;4}k$2Zy4mz!al(*~%bgXXk4)YXzdbqp0-Jp1I=rqGs34dfB8*6Cl;BfG~ z0%PwW`rIVmV`I{(qz2QVEX0R_#?}$2Nrn5!(Mkt#-y|wY@m!&Q8P8usc-YemAzW`+am!@*ReV&TqZ2!N0_OzmYz{+cw`tppg1+i~Dw8 zDZwrIxqOyD>R+q#T@FBzA8pcj!_8d${LO!}`CrSkX8^$3O|%Kb|Ka+X(ImNCu4|pz zK?MHbJHN`ooKY46zHKX_4?M$_k9h+JCp4}~(7$OlPx|jgr|BilArgD;V4YRDrj<}5 zcS$y{$OK@Hdm5GTmQa>devOwR{_nL!OB}W{`;aXF#A}dRPe8$e|7X5fBV;t{&kW6#b$(S2p$JIYopV`YJ@D^ww@1i zJfOL@Cm+_9t&3;H*{eQr(6u-1SG^wW$yJ!QIkl@%`TjM3Y0h%W;MeF=u|35G;^C30 zIoYn@!GT<-O6%VAK-5;+SQUmWmA zUhITbk}em82%N0AY`^Qv%QRT2T3Uao=gPTuq+-*En{%{AxRLS(M0k{LwQb|hQ?)gS zK`6+`BFL7Y_o+qP+%NFvkMYBp@NDhR=~tg`CDrasqI@s*#V z?H3@rKer7Dv0SL!IKjHjha2P>l=dHJih{4xN;Y|JE{7-~sb}uyAQwE+Ibz7)=AQ0A zjGAYNLnFyY!p)Etiv( z!B@DJIi$L$zG4ZZQe8jhOMCrQDs(Q!)8ONjEe6k$k$-@^nq6MddAydh@-g??h#zKB z7C5sKn61y@)%nH~;sJF^XH(fxr`FNWo~p&M)7j<3;7Ufc+>~_q zSQ{(Ww27PY?T*sriz0tw01;QshiyW3$+|w0gUdLGz=gT?;J&P)Cti*nCG(=4VGjM! zdw;-!pPbZQ>3-}wNwT$8NvEaEN}kbgM0akmfLH21s0kf)y&x6hvp;5JlghWM^RnCl z$$12A(d{5?VoCK}h`J@x*7-(1qUqUHmTnK7L@33JtP!`AVnM_4W8QA=;-}lW<3F$! z6jyplI9TrFuyW|4F4lV-Q*3FVTmsY*2CF;T902_-$dc{M+!0g*g<$Z?%KrUpdt&X! zQ^`*v9r9Ak_pnhy6Py)sAZpjmUjO!Qqd#-rVcWFj(FmfJAv)}RA|0g!z zNM7sEo}VFq$8}b50%;+j=6h?c>%YTKejUZQ5>>ec!k{uvDvpP!zi z5BbOX2zd*Iqx(jRFTo4Hqy2A1wB&i)a{OyZ{~FT&nj!5AT8l@Snmae#y@dKmi_42; Ii|9Z5KaK{!X#fBK literal 0 HcmV?d00001 diff --git a/R/vignettes/images/list_of_tasks_models.png b/R/vignettes/images/list_of_tasks_models.png new file mode 100644 index 0000000000000000000000000000000000000000..c08b8f2e2aad39a9727bde3241575a6aa3b922c8 GIT binary patch literal 167209 zcmeFZQeW{u2NI09s5`P#ypPL=FG|a049d`_7`mVgC08nwfxrteAiR zuB^R{v6-b2003#Mfv&E+C?)BL-Y;FxKq;9q~vP4Q&8x=kd-FbeZ5Dv1-y{a7}REfMX_;L5vyv&UYn(Ye0D|OFD6J zk5l5Z6UqQ?JA-ly3j>Kmy>Y?Ce)a<=5XYph@=JwiT>;NK zVKo((Q@_4yA&E;+#Re?Jno@M-v7G6sEd3jYZpy6~iv%nsC-cN8G$M*3quM7W^tv%n z-rm683#|qk27gmpQ_K|TlHY~(62if&;Jv_D1yT8=@dB;z&fx(v2oAB{qw1N zINImkU?PPM`1_L&9rQu>Ka}U6L8-v`Ei5RWuQpnU7r8EElHz#X!$)Crg}q&Ml7L8s+oQdx`-MudpcN#`vqiK%LNOcfS}l#U)BDvG ztFAt{D<`&Qi!GqT(Nty%I9n1KUru#3>{a*IXF?29LQMXppd7Hr>q)WQ$HQ`Y^85H- z#ZG|(ij0g@?Qo^bkS2+Xk0&9Nj{5yq?P=A(D|EWCkEXLlgoFkS;X@Dp(Mc%$8iyn4 zj?dRKRo9C$(0I&-qe4!AHPt-o=j2%vGhIte^>q@ zO2A9O)H=0ce};dT9fFuIha7Pn^YGyRX_f!2Es|>A)+hZYF`c2RT{6XoAM1g#N-Ey-BIBs+4(e`rDUZx>@5*r@;A zh3Jm`+LlIq-g$T}wgju&j!*a9x&lS19?cXXNSwNQ=$j@tDERY->wjy#3KeKwyQ8Dt zeVtvdflvp`H!;XdOO0hXnc$F6;+rA6RK6 zmg9X4lhyzb42=1l9$~i?^{mQ-VoSy-HUm*`9}4Wy3_rBk(0o#oe8~Z%ODQUm-20iK zNR61zC)_d(4`HFw;@sbk>x_a^DSjg(W}@rz^ww>bia@m_{}{TTue=P!MGaCYhFyoo z*4|q5uiB4Xu3~1%GgEY#AkhOz8;ZJ8ErMntbpcjzm$Tg_s=mRor*D00NdTd?(PAQ7 z3rBd@av_sg797N)(}sVUcJ80m1ZIRq$TPxP@s60bt7-?FWI`k{Hqb@nYr^)%Y|Z1b zz`j&NHDU+(3iMtGF)K9KSh>63>d={;9=>A74UL3GZMC5T+1TL<2NQudKVCX_;@$|D z&30bG>3bwXvsJyFT}!AayxeP6Y+a7jy2QO18e=4Fk<-$CZXuJ+8p@9mrW6$Jf)3=s zZ28L03NF@X$P*z#^PrdDr4SJ~&RyqPi_uvNycpk|0E2YJ(34pK37!(U{LD+3wK(FIg{~=kb+HKI+)IGob~Y5bDC|#F5&wA$IxK;^~EBfMU*dyBeDz9 ztB3l|vlJ*dZu8wQdE4dH#E^N6rWAdmVD|8?={NSXo4z{|B4hn^=CHNr6>57gv|;I{ zk#if(2*x$@0g0&h;|iW21*WhBt4LO5t;lrb^(ezvR4L6j+@>l3e1ik9)ZL$6182u{ zY@G>msvVBd%0$B$EzudJ==2)6IEu(yhzvL@gER5o)D?dz2yb-t*X%9)L z`q+*rO5DdI-I^==+|x$}qL)tdT^CoRG4o>Lny?+J($LuJx|~^~um!?Xa&zu0)dLn2 zxF3$MEPGCpumz`5qYG%V`X#1Sda;;b$H^uZo)1Rq0oq6D@%LsRIIk0=%qPmcU!Hd1 z|J0m8SW3-xN_yZ74Xu#Hv`JfR_N*C-##MA-3hdW5wgpU&4A^7YYj5 zK<%hJ(tD~weMyJm$*NWIghthK&(VzANf zxJw#20KIB*c6rm6WfT7FLP)iX^_S8#$F{vsSczMBjdLe7#H==>GLAsxAK{$WcZ;o# zALGbi(9N|lSj5ZEbO@+`e()#c=*c*b9K2B;_)d$m7eY2J1XGNUGXU$3l z?HDq-wMRhs#lyRgM!PN(u1zn(Pg*RTYq8_3{&BQU89(zmqp&!LWyu4hxnD^Q!K?fo zPuO%3PC400I-UF@-$T6vz9=8*`J7LYO_2w3j_N$xXXtT*i=+F=HqS~Bk3E#`lGV=V zgerKj;KJsST$+H0q+i^$x}@%G5~lGJ8b{{bG(pqSJx!`{T~9&EskQTY!iJM*9A6fm zx%lP&G=$QF&XJ_(F>NEx1&6JrkvN*#wFl-&h7)`6N{7py=Q+ZKdP?|azplrrn0OkG zq6jYg)8Z-~l)qr^({zg35rF6GlLz9wW|#lL#)n4i9;9AqO&kcCL`dJpgku94wa^u{ zPM}{qqohwDi-bs2O7+8Ky{p7Ks6`$*kkOHbzT!XLztM^Z(WFiu*f?=C=f1x(_LGo9 zJe~JL##0zSa-4TNpMcElc5~|VPrZfL<|#3`4 z|8~4&5bG{mUVYyyJ*T=rYaK+fNaAK*tk-~3@=|Dmplv&dN4RMru6Q*?64Oa=d-Bt0 z-%m8mA}XE5(f&rZf)HRU{PJ2ZVs0haXa=7V)`tr&2CFFBho%hb@)&E3isZYpHbuLM z6Yoi;Y+65xbl(kYl(@J!N4TM!a#BQvEx>*2ynpnda|(Vf`Zm{QOD*xr=%cgu)rrum ztBdwx_2TW_Py2`@`BQe&^FhEj=BFWT2A9%uN8dUisL>O^g1 zd=$jVQ&i;cQzs`>IJh%XqyPWgSXtQA-HgRNXTzH zCs0WG3qQx+THrPBl^iyfELAdWj*8XE57)y+={EXd@w=i-)!4*%B=BlumL>{27VJ8L zd|5ThzPP9g%2V&yC}T_Y4#-MIrq{N$2H%Ewn@v^`_Gs;#b#+>;I{K4R?_cH4q_|`# z4E2sDq?0}I!(>?bMrDK6oF^8nA%p3U7A#avjv#fWJV_HC{7tB+2_#`CZ_E>Hk2C{( z(EJlel(NH{+W|$MAGAaQ6bicy4U*;O6>eW2f0X8^wFISE4!h82sM9q+tzTB@50Zdw zLJwg*CBgm1j$pU9djnW)w*ek6CWY0<@CRQb@?^R(knL<21+k~q8VG-PAZvaF@}!dE zpyp8ubMw4&kO^0Hdc+YHFx+3E7oHvs@7I~0W-`Ve!a*Ere8;Dfct9s?ZX=3mG)hi} zBFkH0d*Tn-S*cZx?fZleJKIHrf2ui)19xMxtQX}qQffh>aa1gC1_sYdwHIL0vDhAC zm`EoY*cxnuI6^b5ZSka+8_tZ@9lfpbFnb@GRJ$##AQo~K_-HLtgIJxvd=8Eg z3U#r}M9vHCnvA=Fsr{;cE^BA75cpF{0d4)1^Mf_edb4I;Sle4XeEW*- z=E=KapSL9IpB!UNs-rvkh-#`fAxt7YE$QMlK%;6z;^*_uicwK4kFb3HU54pYpwa{| z6HUBU7xuf2xtVrm4IdE&5mL!XI=7!Hn)ZkAb0nA3325HI0Nf8XGTB6$NOMEvW>Xk1 zL=ho7znJ8Bk_GZZjIJv3$uU7Jt=Vys$X(P5z6yS!bjs{#N`s2ik!B*1m|@!c&T3{p ztUoi+b*hDOm<|;n0_)7VOH?Gos4iXGkBtz{nUq-KWgZrH4YOFSCJO|JB}w!_a(J2R zb6y)vBXL5tbMS5C2iKIyc;yvBs7wk}D)%Jpkoe*f%0lV4F~aS0!1=;%_#nV2D`6u? zC%@QV*ZuD0PzR|8_XqYC)W}8-YVz@QZ{n0nqj(>Fx+xcSs2PQ0da)%`mU{B5XrQc| zj12Qi)DE@qM#^MUa^oQR?NG{zzL>8^9;K!Y#;tHJC6g$@Mhwv$Yr#{k7StVo(#&5O z_P9@qvkPD&EOocnNgi&B&D%$ltNcOu4j2%GbV(%=Ci2L0RU0`cDf63>)tA?P=hb;m z_Q_lJLp2;zN|aRQG3tT1X!V>TCvoywY9Fuhj8HC7x@ifergbIW&w{UFrg<|outgv~ z+^9R0B?P;lp|6~-Sf&f;*qA7dP~`!3HbO35**lb}jHsM>4%(+WmC)Hdhe1Gz{B1k- zn0yvUJ6(*|sw1}x_@3PW-$|+DF>geq+JO<0cP=OGGd~sM_Nnf3^Us=R9#ehlRof?g z4N^bU2vb?7gBr`Yh7D+Is5amx^y79_X*)7il@!Eh=bZTCm5j3hI;5G#S6{_v9C27vn5)ck9G`35_mdDy*1Qm4KwNM z@@{vi=mI*G(6NIYZtFox{CtQysH+AHA7?(hI z6q8Ltl+tGY?mzRgXOBKruMON`?I`4K$j#{AI|mI3lDN@B6N-_+k07stXi)iJSk9{p zjh6a1JyjyHa&1!fZtt*H1}b5PprS4i@+b;9VgUbf%@Vz53IvqnMuvX=$rXY>~W6V z&-eQ9HL6ysyf*As+DNPwLf0AK=W&M2(Fy{lV{$J{34^dw-SO%`mawr?F@d42R;`A7 zh08g{o1;Mihht9WywDc2cM#vYrV!A>rcTZCk}hG^C~Q7}_zA@GS+9Y-{>ntSg;=Sk z8pbl64XrAAh`A0{2odmSA(FJtN+4(&zNe|T%#xM&p>>@U znDf?PzAf9SWGG%Hg_jkGU{23QI=bF~IH8i0f zDjF~hO?N$9HW&&^Y$Xv#ttbl?fuf}xBuif5E`p`R8lP~YU%y-$KHHj|!`&#sJ^3%w5|HE0*jvFb7S7|b3x>E)&tH8AR(JWklXH$Ho5ezakVDkJxJ{dqVp# z468!CdhO<&oaR=m<=bvd4lzsGOpBBW?AZ)J<#VG)dNTx}K8kNyr_*tXLPix$<}w23 zI|5_3z;#s_W1%x!JOd)2()oRR3A<8YDGlg5ELt?j#SlFZ57c009orap=xQJ02;a(0 zoS@j*?MM3d=f2-X!H30M`A#{8d3A-5hAB}=__JUHw$Gg(E#9NtH(g4&OzjcA7ig~3a>(osVdE>H0J|3Xyc$mJu)iuBx+`*w4@Tp)kZNq1@Wc1gG5K*yYo0++<(l= zEmMRP;JneA3BfC_g9gshzJF|+l~)N@FNVa@>K@eZ7mbH*-Y{>$3l^~}!K>{0kO*RZ z-ra?7*^b`|dR}aoO*dJ@AJe+yQlvQ5+jpw~WeB^4Zvcz0((F0sKnvmCPfc~H^BFi` zH&pbDhoJQnliCK#`xW=2QD-jclPWLZ^+#2fU1U-m57S4pGmK-dwg#z?b-yMCN=g4J z&++2AyIC^30JJ;&eLC_A9EpL&FsL5{!nEpXBrV^q6|pTS0&R6Uz}*&LyqbzsG?XWt z`9+gl0>I=I+?Jl-6@QrS$QApYr3l`s(TR~hhn#7<_GHJrJpnRR0U0dUgXZ-E6B&6Eruaq77 z-#!PiV#-0j=FZz59$3dkEzvkN2GOfN@h?gPgX@wSx#U&pI=qAF)`G*rIhUzcEq7By zsvV}>Ff0-hMJMbcr`PIUT&EcA(6b{baw*v*#-e1OJFx<$7ni~hy*~ZcX}|}h>(Lum znmvJbdS${mks6M6MVvLT51AVcN;iC-1X7I^wcEYzimi%1uR5LUZ+Zmfg;Ss03pis# z#0GQ&lM*WC*gCA95pBVydD1pmmm2mYmy&7`FBs9X-IX`s2^BqFS00gRp$-Ysc3_&< zD}%%}mdif465$14<>y2TgQ0QqY2ex9AidGqaA16o@F5oNez~N zosQewI^(eE`tvHkAQ!MseP%X7?Fkl(lg9{11=$b& zm|UK+s2i9#y{hfx3YdXZo7EkN%aC?P^%Jp)Oxd4c+R?$PtDH<%YL6r6wTfZddB0A( zh&xF8mgs$cmWr}m2ZZuIFM78x{WXvkq8;wt1hUqtqRrJR)0$(>aU-vGD8UheZA{r4 z*`u(@DO*Fs)yg&_4yp;?44&yjUnl@23HpMiCa1)j4qIJNYGdF0nqTlxHT1J2k})JU zOD(o8rF6JdgU~uAn=)5Qz1p6aRH4~NdA&qK&nz6g)V|#U81e|_CoDfG)f9($(GV1* zCK|FYR))1kS6xX>RCpU)33^C0a(AJ15?)H_n*Gx$vNZbnW)-C+hAO4F7NU5h*e5h4 zzX?1!u!j}~*3DuF1XOd4rS1+jn`!@UXu=oUT-ns&t?Tt0S&-Rg^=lMZvXH}7fm$XX zeyS^p)Hs#F-q+u_KpsfoTNbD2Dst{QRSL*0IJTgxKxJ7AG*IO>Jl|_k|0V*G#uV~; zAjno&XGp$C56M@mke3|i%bCX_d&q$K>(`tEUpUSv>)5KA4I*Hd(bh<^d+Tw$rhO3&TW_ zdjmdYIjXQ`VzK5Tk!eM2&|T4uPXt497YP+0~biCPekWmx~&C7{KYRAMXCQ3rTj(MF+ThN_+=ptz+_ zm#yVevcul?$A!!TSATEnD(o{ov>fMCvy0kwNvfMvg<@7|kxMejuqICG&!8vCQbcab z^j_SyF99L^Xjtqf*HOnA!WzrVc|G#G87qnI7u$D__(Bmzdk&8x@Je$s%q`jg~XoRbnLhpZlE5 zpqt80j>R*7#TDTkoi)8AcB(Pl?VM8ak)zd&h=3^b++>%4m!Z^$jl~ozj@Ku&h*WLy zH-|2yEN*Q(Cwpd}TFFNhtzn`AHFl{8sPNBAL3W8-=}#$dq%3}vd3+iHN#aua!Jm?+ ze8RNcw~^0i6H;{jjN8M6eepubnW~ z2ADF2y1yJG*E(OQd*08Aw0ychFl<_idfz|X&nkw1PlQGG%GPUkczHcf(8#2vAR&a8 z$m+lu=P$P2xg-(Te}2qm)-?fEs3hkXb9kq1&bnk3w7Vng=LN?}L(HuCMl#43nDH1A z1^hhSKPe_-LIfG2u6I*5zYU6B3(vAnYePUo;|}HW-i$00wuGDzfFu4z2$Pq7ljM2uLbEJ$r%rq@_RmaDDTb5xL{RFHL0DOkI2f zUafUl1v0D5oTy{wY}ek98#xZE7M9Mbt%VOqp$VEl5wy3Tapxw0g~^{bAQ#K0#wp7o z-V|Fb9o^d56r)S29k-H4bsqZWfGr5B-QHYN**ww7+U{nW{mM?mwpO@}+THHTltI?y zk=@=H)Za_t{zAPL+aQlAc1KqJ_Y4RC4wuV;v+8b`I$UfWOyX`}MK4-1$3%PwK@cy4 zp_Jz#Mls37wVkg1<2`4=EvE%u%srN+Yk!|Jjjg<^GSQCb-SB>ntl!i_r4srDWZ}G$ zmK9_&a&!VNbtyjyf9Hohx0{}Ei)uW5)zRm;f^~vYSE5U^6=Y!xRPh-Cp zhF8-0pfjq~$DqI5g-Mt_S{!_0TjfM1!U)%4Ga}v`g~vy|%*4cL6(L3B_~w7Qa|pEg z90P(W)yu;=-OxW~J1qMH8Sn3E&pvbekBYzDBotrIzKgG$7cF4hSiyuq0sDTw-oqn! z_t`*nR;`yoAmUwJo7+v%ous?utm@Yukl0w-Mr#R*N-E~X&eLoIz>Dc!n#P7#gIsNQ zsbCH)&HiN!NYkMSo)cmwtpZsiCB^?n>HZ5*ZrhX48cQTBYkNS;$sG#ck>E_Mxaz+d zGkX2ozeoBJIDR zuckc^EcbIxLye`H+6qx}Am~3>`8Pq0$CLHK($elkD#K33^#?^vk4x7bmZno;lhSGe z0qvo5LMnra{-dSURx{ZTUr^)k*K7VuRtjKHXNgaN0anfYoAn6G@UAaz`+OexsD4Fa za^gx$;AHYlUY-OC}$91!H~$x>X}K=o}d4-c-)b?sQw*@M3IMKf4az$U4HY2ShEK>|MT0Z4_nxHEe6yi-@H^XZGe=W(@j$I(XBGae^fikndI3p)piPo1l1c zu#&<|1$_{9ei7B_*txsgt+1z!-9d_SXEvza0F$_#;~FrJbDu(!+Ot5iZ8d;wE?^0@ zm^51`Z3!!t-b56UHP5z?Ex*wczls*BAx`X*X%H8{?WQ+fs+H8KubxwtxC-8OR)hW0 zYYT!OFF%Z%??L zBY|;~VtCoX9Vg$sUTi9Q<(8B%XLkKQzde+`bS20yX7l6NlvIe}BrK{vUi9u{tWQ=x z^Q|RR{(!d`|7|4+!BHy!cFMf76HkHb#rpN8HbsW1M^q42`fFbTMPdMCNW3SQM*%Kt zyGhcvCOfcAC9;*Xid9T3bzcSu<9XO2vmz@xuO3n zp_fP}A_hPsF9HtrpD8K~uH*$vI2dw7sz;T*J}byA8jucCyHcQwmSy&hch}) zSLen9W1lS*Gm`u`?tt9K0!%CLU{Wx1v@Zfl+KiG0ZtH~VJENCzxCjqs^f(r&n-Z2O zyB&}eFxH4nvB+e*T}xNAwEnfO$`5vSARjGj7W2Exdr40En==vl!usFP?YhPF8yev= z1RD@f)xqrQ2{4Ef$mSfJh%TVw?JP7NvMyk$rt5>Zj~jc=)N|(0KmZ5uI?Kp6jO0o3C8CF zd*H&0gD5g_@t!&jFhpW?6ld8PGQ5T0qtWn zQuM;V%F|$XZpj=~hwg)&`(09d5(RVxucFPk98zx-<|~^Z@=*2~n~)ovtk4)oTvH3* z^GJf%s8O)!ME>SX72?STplF)~Sl#h!-Dk4a5lq;!I}tNxvHJz_IQCpT;G1toK8xQj z*AkLxzl&3|MjCR74?xNXm%S8`9JMb=PuCu^XqKw=nz3x;*k)(+wfPNjCYzop zqEf<=njN)$56s)OSaBkVjqiw~Wr4ziK-k0<2_Nasdn^wcHC#;hKcsa&Y;ZlM`P0uJ z2#?M4LylShVDtEgbqyK<&PDiqzKB(L>l2OsF$t?qLBIJX@B^V=mb}A1tX=b~dL!=; zZXS>%;Bcr!hz|t9VO=W32ij?p^G~T*Xphh-wcEhT3*YbZ>vvG2#Dxy26tTHrwp1>8 z?fH)xM{Ci)-Z;u@rS5HLgq7yRD)7Ve@{%=T+vIep-^jFzcQk4c!wJ%_%lk=g$lO{F zhXz#*G^B1%`!TCBPB&g>wS9hFWn@O}D~-dI8=PO|)4vRHq_BO#kT)C8GhjI6<-_r1 zipk5$%_cm94@(?(lh|uZ{ia`7JQd>w!_V-f z;@%|mnKOCbk4)CflHX^*H<3si6@we45PVnx`fWPBW+ogbxo)amk5jy`dD`eUEM+>5 zJ#2=m?y#OyAVti6U5^T^-|H|FGBk%zW3su+G%}|u52(@8{?fSq5=-{*^6J!pwdD$t zb0z)YT@x@BV`@Si#iJIEGrMCIn2x93VOEwqo11HcXx!8a&q2RZ6dki(r|`?ZQ^ihD z%2C-*`psdTz4R?{{xb>^_|%pRM}6Qh?~k9{d2JMj;}q8$SF3P1jDPS?V~(&M;?ldj z$l%MD;;lm!n2Is2w<2|h!)Uxh1bBP^ue2Ocvi?w??XApvncCRq6g-1)v%0{rd|t5J zY)qd0XoNT&hs`~9vDwBn*56F-4ujs>2vIcX=L`KL0EA;D}obA5AygOSOFlQaseif6P@4| z&zFMk+98QPSw2$wouh)N-UVu3l;T1)Ac{>h+(G`9ylJiEy$+e?qV}sGDKN>?p8rz2 zJKKljbP9TNXPI+@X(eD$p6wn)tsYLLd1>4q)nmJ>4Ct%d|1p<~pjI*&zj3&DqJSSp zS+t0csMrW`nWRs~nT`h)Eo+D{ua9J2C@2BJ_qPN(JBx0S*s&GZT zt-^f=k*Id-V4-bwEx>nN21kkiLnC&=B?dBERxkNuje;GFYtj0CX^bkQ)Bj*-?E?dNQ;b4uB?HozVf4eHisBRzJ5s7ud z5p1!fk9RDs0WoPEi{@wdx% z8P#^@N@+u;Y|DTX`V9F!q z+$s^W`rO@`Lo>UUpL2!uiB}0ax(^>pP6pKqOIN2H6`$(oM)$Ot?w3AXxXh#!*I4O;dp15<|r**;_c!Icp@HG1Kam) zbp!?23R`QAcr%IwnJEU)sY4m!g&PR;6`Z-tzboYFgSX?AW{Klv`{!2M1J+9Q?)tRh zm0iaCqA$LwV~62b}%WAE)-4xjDFq{Ju|#FMHZ?;oMa{s|YR=$VCKWb#=cxikc;3@V>PS zbMVPK6-;sM+S#sI^sc<-UzAzh9uI|Qc-_hxJzkkQBIO5UK4lnvB$MLbqd@6dI zMgOf}ej~5r+|v$1T8)ztP*oS4Ig%I;cXE)!`tjLfI{5RBa#6gfGBIOP2VQr|uIW${ z(Or%Hp0N79Qx>Pth_dewg@~It0d<6X(xv?{1r2DTNR;XZb1IrZ)>HgySR>U7ZoPoX%(k zXtI+YvYR>S^Z7{_bldlwtQ_l#OQ$hLNHz((Xd>mVKne$n_GDiOrcftUK5@l12iV-X z8iK7DB0l(O!+)+zq-@~x?=Wv05Z}H@n%CQYcjt;mjH^``qY-8O^KcrIeDR)L!NF1p z4I+7H>NEG~ymwk`tUeCsg@~AgO)|nQWtIH_7Yz9zGHm$f=TAScIA;#wV)%(>L8*B!~{ULtDSRIjbdfa6!@@3+4x3 zl&Fp`E_m&d5?If-bi(f6#U0(;#g@nCmwx)(kgxR5&gw*8(X0@ICaU2MFRv*>)~@oM zFk8MCP85FjIG70wYvUSdl6UEHKJi&biI(r(}B(*~1v0??rtf+yTvMf5B++yc6rg){Kw0^Zz zYgIzQRH;*9^<|1gJ88E>Se_j6_b@k)B@F?*a_ryJxA&8{NID zT^}PyDYO7Dlx}#D1Iu`vv7bO`=uc_xuW6P~vU(i3naNd|;8^}xmA*uscJvkoylAWxJ&FC4&=GvWl7Ov(KhYc{Pt_2wo__{xf(b$( zvjob2EV!z}5Sb&f2FO}y5933wxVcRJ08wPmN%M!I1%0Vynj}LfNt_W{VBxL8-D;dt zA<9l2JFHFUFYfFG7tO}QTD?Ynr89(HV2C<29SBT zs^!u)R;brLEi#P;wc7$Yy$3HPqh%eBj4&lp_u5=Z7o`ITcJqJT%(a9jUBO-%?q!9jHec?oMA*+ z_`&4mY2E=E-x*Z#DZDjA;u>~E1qKyt!XN8$cp5R{PFY3EmHdU~QACAumGk#Y(Zs*1 z=NZho@LuJ&p7<{Off}`Dh-8O^l1Ok0%#GR$Mdt4yB(Fz7&k9#6t$N25R--4a5`(u{lf_Y8Qu4iu zbo$UeKx^d5Mo#?ryCtMc5C43A2N|MOr+g|#^U=s@e{AI~RmWWWwfD2Gvn6M;2ub8D z*~$x(X!-*#yGz+0`0Y{u2zk}pezkEd4GRz14AG@CwOJg!vd|32)zj&{B(D3=q8a8Q zT3M+WNz62M26sFu*UL;s<#M8!Naw_KN@xtao=mDfwvJ52qwAGI$G>~t@5SXZ$3wYp z^*BZt4@4%Ib9L{J-%B)W1jWizyRB*+KiXIr9{Q-o0G;lS&npi+DY@%bLLru^vVN*G z#UX^1OlUEd5{Hn;X#p*v!Ri5o>7?!8(d@kaZNT%%O|dm?nf^wJ0Y@0_B%&?D7Pm#q z7y3EdWNIK_gVymqR0&?%7W4hs$c^!vL~BuL_w ze$uhheL8Gd#1J<=3y{uL>J7V3<%SWUTZSYws-l8+fsgu}KPy2ngXrajgP~)2m}d7i zwhBM79UOr+pJWSFISRZ8pfMJyV$I&peG-E9@v7_dg$*6-WO>`Lld(J#0NChv?|lyH*Bqg0XapA3@Y3KLL= z(Her43Cd(LaNw2+a|?r)3em?L*T=L>&Ds65$`KfwbEWUudp!7N*)ft%m;Bn08AU6x zuDa)OGkH49N}Xlc@luJX4Y^pZPufeWa@&`nTeQ0OCGRU$3n9-sgB0vh8Bf!R)q%>c zBCN^{GyT)d-_26)#{sW>h-sO?GtR#136I+vg+DO6^#d#c@wf{gqxZ*}03{x-V0Z0w zx-LP{&EmwSkI&Q|yi@{eD^_qbleo3WsG_cHcnB`M!PoLt%g&=nnKbCTA;POJ`6sMe zRDPW?JacBQEOh0UZ|d-d#Y8loSbZVJ2W1H+c?btk@RDRwe|O>M<@?MvLwemMEG8e z22{odL)?bBRg~jZr!YG3|2>$p4k3HsA2R3~;|_-zIKBPk6|H+Xi$c0M*6?#Hcy$UF zKkU+cb=M{Fy|+BCws38mRH+MWF@4U41NyrIX!~IgaRXS!A*1de#y+c=IaW*h&p^A9h z?1L0XmW@i|c~218N})-gE$^ye{zD1FI)H}FCt`{gfEjsTcLe1vSK?YWI#`sp4Pb7& zElQwlEj7m#!?Wqg0#!^vQ4CluwgBE}Ng=f3P`y*U!0l-#1I^W7U=>oT4ycnmg{+%P z)LUb_*%vq}`nv13UPFvhz0};(jjw!`1G0Lptbv>1rc7}-EAsI$tN)acmcJK&nDD@p zLzQb@a}>yK=wrqGnTbpoMD)Xj3bQN*%&>iZO20aikCA920J zoo?eKK{HG6hLzY+icD$`B0g@jWzi1_OgaJcJIC;+VAC%_;qXu3D=AxA0UcPaBf)|3 zd25#$#lE&IpP|ae?s)(DuRZht?nbW%Wr5)D9ECE+$-*~iauj3=XwC&(yHCtpd*w%D z=N!pOlEoQA#1@l4<+lMQlRWc z(_F@OghQjkGF=ZEI4?k*sJx+Ig{KdOAA)sMNBXiUOlHs;55OwAO{L;|z;)~lzQ}TmxQPK4{uA**GJQVxI+9byw zd?ErF!X9Ujeod4)SlJUjP^YO7dW$kf6i#XJ3DzGa%E#TO6BNTjd?^w4c31>!_7uw| znowog@L`Fd1_<&g%_=TyJK*Qj{*-jRgOQaY;~>~4?$(*whjP7Mot0cU;C+u(c{dG5 zsdEf2sCxn&Exz2%p&oUc{5anVjbU0z^Z>`Bl6JOP^f7)fLbDmm4)hD2!{pqp${88M z>E|GxQ{H9-+bg_IC+gTsrNrPyyYK0|4TZ}-j*kmdUGw=sulcDU_OJk!7>~~s2BJC7 zr^YUSb9EZQne3{?h=HvWQvM6Nu=!VmR~W8}`di<&+d8#=&EpSevW9QEjD{pK*aUSV z=oBLsI&62^?RXXu%ChLp zwgbsnV)|2tJ+bWyLP)z-)|^5M0q|~*<5B!?1Xb@u1LW!=Mo<|h6CZ2y#(rx zTu#*NkJ5KMdgyKh8SJYi?0XjMz7KtL(}Ydow_nrkN}oAS@0TSz0bSL6S)U;ENYE)a z*(%7t3j?if7*JEqcg9J)N0!={*PRuPmwe90uJ9!s*E=v+EICV3&YpIxR;Q+8)BQl} zO>{#WNGYW7UY2df33!QLyx*HL@|&Px^;Y^=1w}%$jk#Q{Re~rt9bbmmebjSQ>{xT$ zuJeoAF*dloR%1;zDCx3`3YA{AnpJ)R}f@F)?_ta$XhwmVv_so z+4hC$HP7>b6H5&iCVT>JEO($}&P=1y~=`#44;Vd+#9ny+^hKSBB6j{UGzaFJ8shk|6} zrY!=MYQW|_&!n2$HN^mcKs2*IR87DPIE=@FrSr}A5Sa5$w2s(5(%w&%j9PS$$m^NZ zKM#6p=U-vgu@SvZ0OjS8^Gnop$pSfEE^r_HvyISNqrg=Ogxy@Rhl>G?Cnda5gyG#MQU0|hN-lEa0 zg4lEX=QKHPTA2inBR*u>RIQ@5sf8iu!WH}nk>@1-CfT4D##7#V3-FI8>t0T)4=7mo z*NT#cjM*(NHSF@^iPLie=^3ljYF1k3(RVgN>wBvv& z?>Myp6a|3r>Kp^Chz8`ogJwK&>jzb>fE=)xeYwCKbRKp=jx<->%w`GP<~yCYFj!0; zHMsQ9KVT9?Pr8|fK53_3wwin)EWX-H5YLPy=(|wcYy^|Ww*rMnHagL0*AKju{8h}baKn)I%E?ODfQ^&u}<8>FmNdhR~e z)TU^^7Nrqd`*FcSN0sfxD0Zi%1)U+Bpzb+tGBK=spzpdOUwPkzAuGFYkPZaIGo*i+ zgKKFO|2EEf-zO@K#g7CHJ&=PWIvy}&L-GpzNnzdE@fe6$_MlfG8FOHd^$KI1;se~J zRdRI=9;VUms0(qRps$~I&tkR8IFXX<&&8AnjPdvxFv107;er&-QUaLfKS1R%`ZMf# z-lHa8xFbE#r&S}18ChIDbB5`<{Ew z+?hFZzR$;}YE`XWdp)~q*V@1JFZinBQ&~CJ9TTItr;jH(q7Ua7M_U$Ss&WC?R}X})e$&f=FsEgroeWA_+~-cpbHM_)BgQCOmqP+j=4p(7r)ZVH)owd|h<4-^kg)s62IBjysA@?9N5habU2q+7XR=ZOUgm@w9Nev!yQ~^oXP~8^#-1{-ot}aLAfIMy>JJoPBc zN~BCqWt#s%Bh*VTr_ys9$l8*EMEgW98H%D41~Im*vVSwG*hU0Au*%sK`I-5XFTj7= zv|TZmTtXXHdwM&~8vTMLoJ=9MZwy9fOfHvF$G9TUY!qdT^3e0gr80>%7ZL|P*Vq+F zLtj@McW1)W!gSGrPuEPbJ;a)nh*V}>BpE4~B?%OKH8><_(K5OxSCf|YM4NkR6z*JAQYpARAS)Wzmr&XA`nbaD~Eq__;W#hopyLUO&5zsS8Lx#lIaaOcah zrnt7q0TBHP@ZK)HEV(KGZ=XRD2gKo=D(Ane-FKZuN0s^Ut^3Xa3VV|PA?dKX%6mMB5=j=L~8?9~!OCxxD&@PCd)6 zrx>LU_+~^rJKk@{zn4vDcwSG4HGMuFh##l%zVnFx)aK2stb`1PqtCZNOOpV`lfVri zWiG0&^G`AJNP8CR_06D0R`}1a&wtXK`<}R=Ed44v5g0nK8J-#eW!g&hr`^4ot-RT} z7Lmy|9YgX}tKEAxJ?Y=x25%;JG(^!4vPEBOc3%#^rC~H3ivd--BCAlW{JAi^>;0j9 z|KLS-6WziAMvuhAt6mOouw0hEvg8C(o|t$uNHFmib$;F@%bsb^d|-~LvKCkSK7LV5 z5hb!cnv{2#4CDpRiv4o9w_(*V(m+7V22|d^*dju$O7{16u#!4a<#Pq*-r>r?uAK#$yvxmE> zmmh=}Z-6%}kQ;Lwm&ylxWjPQK91r&Pleg`2ugm?-V95M$e_YXJ`?d~4cLq9ImI!S7 zlS^VPDUNV7Hym-5;0Qk(24698zKztQhh-gbSZ`3V6Co21Ku4G8L>Es~kwS14afest z$$LF|Y-~MZGD5LMf5|Hm`AW|C>`C6${_33H>5}JM=z1gveWW81)HfIG^s`OOdv;|1 z8#WAqU0lg`Y3sb3{wvpR{@A!It}x_3fPGY#U9{1*OWOF$LmOz2exnQZu;>@y%X1T@*!z#36e_Rkz}t zZO1>nn6HE|-}|p$Z-cxYF|+gwZVYMe)NG~rDN zAdon$%DL@2@kdk|b#N)))kyO=gw7ivtT{wAwui=w5b6G<2Kwy5KZ8AIDCBSl)75)Tl^uvjK;pa7O2Di^?ZH>bSL-NF4X z*6|n@o_~1Y`0=tO!a2h@7zDAkj}0o^n3cZ@#D^X_mq}?RN?V@~9AV$igkqPPg z4%a1h&yYj-J^k98m7W&_2L}xFN|J+%%)Py~koK=wbNzD7D6)z!`%h?B^OM1Pc<#EQ zv%l)un5X@;zH-j_W&Si0SKfWGBZSY$G&JH zv6u`m|E7H7UB=YF4S!{jNz3DbJU3$_de9;VerkJW z#RNl{41q|YW1>z_jVTDe+bg0RV!e&R5+!-UzsS1rtNC;@&_I?`!?xFYw8+XFU zY@>1>gzWY1H6^JJr{vWv{+eIZ6DbE9({Q^>gB|V+@J@lJiaC}L1 z%wQzTI5-nHQU=~{yz zl5#+YhlkJe*`EPEFUdZsc@(;}bYQ-Ien1mh+;93I+=P1efUyAGBhl}>Y>O(0a-}-6 zbo-qqH6**{Kage5GctlpJA%vcu5`kU$vELZG63EnTW-g=p0}fbit-`&QzJxqBcs9w z&S^!d3G||}9ctEpGVT8zzxn?Qu_BCRnyAjpL)V^1XkA~s>0`C}Dv|eZ7K3?v^^Xy> zfiO&YCl@yKl?}hz`kiVv0<-?t5x1*c*2fR;SrCxIAIm!U(be7#uq+$xF!UVe--M)U zj5E+64V>0KmK?O?+zYK#TxaH+3Or~5!_RmKzF}gv_E^VFsU?6->fm6a z_|~&ZX5np0n3gMQ05GB@nGLYvHu+wjEtcofs#0MQGyu~mXkBc%aNCtr?;-; zr|$WUeR9@Z>WtUc>N75k()|Z`byIrcyhT*e`D9IUay%iLi}Cf~!m>Bu!;-TP%-SKh zyfgF?`urM38WJ7#r@I;#uOL?EvX~3ms!sk+d+w1D^TDq?TA7&aw24p zgc^8ah%LU6;OFMjw>!cow(o8!m51`oGq_rxtzV_n5EBD@acww(#@{J5tk1?Cdz``vPhIJ-td ztF7Js*@9~kg9oY-yQY4LQtb|7?yHs~fsWa|e%t|W!zH8((?&j7HpZfHTwI4EO^Lyf zTmEq>sJ8`3ruST6Yq~(5Yhh2)mvhxPl!o#LlC5qpVOQ>d|NqG-TR-i}WQO6YyeBSZM`IqD^R z70JAhzaFQN(F@C%@!Zj>JpToOe{#D1uEwE&`Gjk_~S(d4W{PLD#5Z;oiHaw}V^gcQ)#q<`p^ zG*cMJ4*;em{dCM7EcVIC@(3XMR$EYrlj+2Nb~;)ws99vafPtJ_f0fcY z)`5V{(j`;VT8QFl3bK-{1$K{wQ9gb1yJuP+{>O! zmSAAKVUEy9ojQnxa$p{DAA1ML(`n}f%C>~Z;m(Mc6+hgefN`wFU&gk1ITf-ME9GAO z+=kV;!1p>1DH(G-E-QY!EUg;(R~oc$wU%jg^L0flar4f6@sZInBzt>%{!gPr6lkr4 zOPWD@ZuuoL3pJYe-vD4mx>ZrUbY1A1w5|jvuNqrm zE2H}?Re>S}>Gnjn0|=}5_r_DJT!t8mxbC~+0 zD3ls)0lvQ4xm9cuFF&NwHN`cS|IV&!lsiJnibz?gB0)oqX3P4>gHG9 zPt(GAcH%FYL{YDL!GwXJPHA|xC!(%mQ+=On46&`q=Epa0d+=U=1TjS${Vi6KcNn?i zhF0Yuo~Zr7JW#n&F!cP%6yBq3Aw2zg{a4CSTQ0;N!ik;iQ zgIVe<60?;s+3YEu5;#g2>Slzm+-(72t#uYu5Ld2vZW{{y;7*jQMjs^b8?g&a1SHAP z9e=VgY3bvYzvS%`dLgOJH=>unyt)sn_8LuU-Su!%afS(;_K|ahK5S?`A73mv%gj7Fl|NqT4^LXew=S}pk8Zts5gjk`23;~ zYGvKx^%augJ>(YNSYM+wg~ z2IVr0Z+l^8XQTm5sr^Z+Og12{nPlY+rNpk>cHJ{Cq9H=)N~Sq|$#3sTt-r4j`w;I* z5y9(YALX5o!22aJ_i#bC`KKiXqknB>uPB#B5HCfs9KrO0$nCTb&GsJIJT}Zezr7*SR@+5G^5eOBKwv%6(8OBh0 zDhUl$?OqqQHA&$-gMwn_hi!$;l4_j)%&9Y!wqdMl52x#-$;V*h!d<7L87>v5r;K@B zI>c*;BAyzfvI@340Lk>y#)sxw@XGl2;Kuq?&EjRcge>QxAvssYv@ewGCyh~^_4d61 zxD+}_9=gr%@4~jL>hOD0R5QYtk%BfNr3^}pT7>qo66khga46{qav6cIk%xpxG(_|P z-=LoCY5$}$pg9aj6l=eHr376hyjXF!TGU>#!!N;6Qej;6IcS|cKLM1KGbGP5avCIu3X@ltuB{s0`vR38%xI&F zNYz(Itaj%ifywGL?Tu|toA+*%RW#0q7WU6SXzKyl9aLvI>%Rmx;|bj9wVl~_&Bt?+ z@4#WlyLDScj6FbAFcx+|F$B{H{4z4im!qKF0su- zbqKt{=6~xzSrszxVNP|t(8icoT;%Vm?1h@B4}R2UBLG&wQpg*H8{b{Zo(htC$S-^}=9+g@+p zsuOSnH;97cK01}E9|E+8D>nj*inJD*7ydBSPBrs{q5iht^1cSsm4W3sN9W`bc3nx@ zA!|e=bdSuihTwZE>CkQAe%3FsedQJOCwb%WRNCeURs932hBk8~Y`~URV|l)>`nYMG z;a&*>uD*7o?E(CV4%Q@(lxTK8sq^#jN2NwRaQM+>PBk6X1w3~E?D@5jxt})qtMYn2 zgrOJ26U{zc0=?l>!OdAWfd$5T{#Ss}jqiI@h;+}<*PVhPO4UzoCpq*h^U9yx3$Nn( zeFu%fe>ju2BV2Si!^+TDA2_!{l+0Mp5+a06I)PCE8U+tNXf!hZG|bw&vm)mZoxb%B zXnKX#oy0!8m8*`gxD#p(inrB$*e`3c#!H`=)>fp*7G$MBLC@STl1YxNPwnH4#al}P zDYc>(88ANg-PeN~H)b^Rf1mYX`E#S+T|Pk*UK)I0HfEE6~t?D5-G#;XIL zcnHoPue?Ry8ifW3w3CizTelpud_|}1?~d6Nv?8(Nk_>hcEckz` z4@Xdj?A)6pf=Lf`Wi79aY4RbxNV@Ic^k37>S2m;iJ{zS|WHS_an!9!5H<`+eG z?`R(2Y}CfB0m86-WT>QBX8f)g_jR?Lk<$j47mv1mLALSj4i*_3gx9|eu049nDW^Jo zE|N=r`WHd32&*WT1g2Zn^qJ(MbW%5fn%Ok$?SWMUA~7}bKWpN(T-58=wcER`Sv!=j z=d83)K`w%6U3n#xz#_#h)@-I%jiT{C43SldT}Ez=L>`r~wjX<#Or_o(e{!>ekg&xc zAAJ{~-c3!|Y`K|Cd3?(n^`knV^)*JUYBILdCo#`@<9H86^C97xw3gYYuBuV>VF$zI zoGQhQC#PMG*WPjk`+J7{rNn~giC#%o#UJl2-F>p9WC^62UsSF$u$a_bHuUaY))7YZ z4hHVZH*AItSd>RCH>G=*d7TmRop{t46;C#gmyQviFBU>^o*eO4KbYV$DWd#9@|6Me z?Y&aIDChNiB7c+kvLqDmq7U`?6{Z@-7_#*0WyR;)e3Lv_H4}UW%^O}_T>!s{)oXf9&vj>Gv)jkp ziA9y99omzdKSI+MdZ5o~p6&Y6MnQ+}xlez?3H`z1u05#|XjF6`&3Cqn0&CG3uU;yF z*K~Y-b>)hLzYT|rI2^0$>=YjNVYHjnp{6(T zz)VWJ?vfbh^%v|?wM3xQE?F(fQ2|^jBQY|`N}O8qUECsW-BQ*e>1$dDB+lwM^hDxK zOI%ArCF28OO?!E{IKVVL**5ips@AoSC1wY8|4f#WvY}F+Oq&wf*(m^*FnGDnQqz(> ze}AA+f29VN^fhTKnsNP8&Jl zb&W&-R+&EbbpDdZZE#DRQq28LyFaay1}l2c&_|-M?XYxW*x!e0%FwDtm89VQVKzy%Z!qbS zF4H^1iXQ5Ez6m(p4%NGNn_n4Y-HqV}bbLlyX9?Q)hGcz;_@d28wLOv)w|mwSzJ>$i z5`WHZU2e6HPr^!JQm*v=#4Vz-BGj?8$rz;THL>Y7%*qVN;v-coDDn1ETyM|-YX62( z>?n)_vej~UN!{;VDnOwaI~%acDNdCjKd&s!EIi+qV+zP*Y6>km=!|iBa^Cl1cMA3P&R^#v#Xdzh<)JDw(RiX-}stxqa62QrOw8Jm7uG|;tWAPHXBg(%hvyr)8yhs1I)*pZHO&LEfDB<3-^p6r_PGd;pvPlJWNq#te{`}5IJ5|eXBps7HSItY1o)& zB7dcJn#x4X#GZc9oL~Q`9;&20->QYi-Lo_g(^=(T=YI9%I{dut?(^05;*KH!RPV$5 zbPh{u>l}|}W}O-ufD|HMt-s07(WC(lf z4uy%f?nh5`B~W^5F|FX`1Mi=>%t^%vh8QdIH6BE;)u6rm<;ikBfY)w_i@+s#kC2=h z?8qa0A*8S6*I!eM&nwW+z(%iW_k`oa1utjOMKzpFv}}yYOXVVGWxrLN$-s;UYMbBIGyKPR_wD z-c(K1^}bb4jBaGHmsw3YoGpbwSW=;sR!?x5opmpfjbvLioIzjk#)>evxjTWkFlg*&v&?F@>z?s)Ky z%tKqz{tR-ld0%X8&9TUBO5W5vUZ!qq-0L&b!t`NR5QT=e_fgB^ucrkd$i67ACZ=g% z&ffq1MT4)Y+f}pfL&!iBK@MK%vWubl~d$c3I*W+}DK7mf@Woi?FK<)Q>8`k3E z0&)x>OWC+9ClQq8wT>O$|~^ ziY^Ub)i4pB7QU^IexOh5ukB4U@bo@<{6tsgcoIWgWx>6$>PSK7x$KQ= zhh~wCbk`65-54@fH5FG7fCyAg?3j4C?kM{%@ha99*K|odS#tC@ySJ?`)wIOdwA7M8 zqG8dYJ(=#ErucaqeDft)w;-o1KqcLG0=XbJwZQwRV(=9zPrKdk^;4=#d4CX@O&^TwajP zq;YJWR_I)KceH$I)|INPWY0D2`FRB1Z8n`Xb_WX>qM$MivWwL&m7_U8>e~M9cS{3`njjc-a0v_Ux8xk=oy4X&12$=m{_s&}s zzBqWo63Op3bsEgfq|&1rKM$WBVaDT*l8>wueK^#_`~$T3t@fBm8oO|rrK4gO>sY;F z_qYnNa*?c*^SuyRU|!rSirG_dW49dOeGRGPTqz6)2albKpVwqd9Zko{R!V8F>f{G; zO+a}nZW5l6h@J;06l!TF32NO21LpFvBFXlvH*`BHy8wo8C^{Kh2702YPfu*>dEI@7 zh1J)6T-=%B3o|)Pji8B$dxp-GaAvY7**C2z{Q^yi4LtnZVlzg{S5o*lpkBVe>z3fn ziwiKz>5&Wu$1%BuOT=_2j;f&lq6+kT1@&6&gRsW-soD`7xRGXd#O=I#Vr|x;0BYnC znGNQaYWpNdO_5_&_8Jv$%!<8g>AKIBzhJD$Vb;xsYt*9rmQ6L{TYpe|^s%)Of07i) z<_}$~bD9SkDJ;uP@;7`Z-!QICc^@CLGP=gf8>EvjsWU1O42#L!uylC#xrT2gj`(qT z2DQp=8?~(~->tC}0rpTl-;xDaO&rb zMVD1~hJi9Da-G5zRNI2BllG|(gIIv__FrriYk$_{Mp)$l>Wi#`am9s2@`M@|XfGn% z3hBY-j*?~*zf_^f_flS28Q16x{g**<*c5$lGB$avqQQl{=I=6SpB}!aW@#hXp*<9ODdQ=0I3`+N~vjVHBPA@Fpt;Ut?{(2%Lg9Qi*`>84p-JE^xJP(S!K{i@6Qu;(j>fj-wzK< z`kw4I6I}jYz=`+s9L0urn0?l%g=kz+4Z?l0qqTT!5}l*#F^3+0iM&rmgT@19Xo)`| zfdM6OK@Pd6Do^Kc2NAZnhm*a=uQThCI>pB`;PBTv+Y#@%`_Hpvi;qKf*j0j4j^LuR zf7YLr4WEJoOj*~^wEhOmFqea4x7*iVut-6hRA)yKiRJWQH>uk!!2d$}f==hphfHE#MGd~k6RU+{i<#tP_95w)xMyHmzmGnz zwZ;di;+7lk*2_aZ`F1W=aY52>&kRHo?Y3k2QxAclucJ%(6H^^fy{uF7-Zymg^ z9HWogppgv^=lr;cmn*hfSdB+&p^xH`lPs{o7pL6n>8eQh+3GN58_=6|KF%nvH;D_~ zM~5^Q-6@klc;8i!`Fhh`aX(CEH_8Li<@7v^7WwBXBc@|=4Z`@nCY!?78eoOAHZB;} zJ_*;w%e;uvn-b?%IT%bVZ+(8<2e}v1me6z8?q;@ubeB-|XE1xzI!toC4i?&Z2R*1pgv?klcR1^ViMJ**N!}0fb z9O_bcn5#_2MO1Q>$;&sB@EcFs#IY{8VET_7CFtJh@ zhd#0Q-#k_tscojvG#Wn860Db`hobdKIWVx1ta{t_95lTy7@bNI_YH&1mHzPwm zVQyu&zoio1d7G6AxEZKqaHP@2tvivoHQMJGTDu8!XwusX$-t{9)y*G&(&TTq!kRd_PNY)9bd1dz?AFU^GGI!r+^aVr;Q`N?|1^ZQa5B0M->R4*j} zPzDdH2Curc%B>M|Jen-KDvw-LWu+yAT&(vM*a3qtpQPY+Z%yI!vCY=Bv6jbWIJze))u5~m{tp7>hS`TLCd^d%>*GnA_B*@IF z^)_RFC(&be+|8+k>RzdjrRv^lTnLxTj+&HN)*HTHE_g5P^D6f`Gs9D#x@FKtAlT0g zhTj`p_Kka>5(7J!8P%$<85#60uRZMTSM_<3KLg0*<|iMFA9}4kG?q71VyfeoX1C)M z6ci>`yFS^~$2&ybePuf#|`tmna6c7Q_LJ(UH=dmHV4 zKAq|;I4M-K>KU zuM%i~W`VBQsp%>=o!db^h-HPDvHlAsdRQxjz1|@%=?a$(OPN?K=UY`_$U?Oe>aV7C zgp}snH(i+K{ZbaouQ8=(cFf|!{K&tNYD3UTjWA2?VlSF$``qJlhD&UabkR%0V%KCk zFG3S(^i-#eh|fnpU45M@I^0qqYSrxVQ$W*d^@hT3@H`JhJe2C8i-tH{03S-@NuzEd z)lNHTb(iYl$n?klCw}xY(NG5UVmnEoEBq|+uEC^ZgJe2}`$GYEY6%B*?J_K>p$hiF z6r-%2;xKVdPgJiBy>;Z&@%dp*@A1R<{1b=L@xMMpO`8S+X>$Q}@LjKE>JXnkSQ%Aj z5TMnyo;zZc$x@{ZZg23{eTQqxdWgxEmA&0M6L^9pn-<1u96-cT-}8iU@XK4xcUS#p zbA~KI-3bKQV|es|`SH||;bU(!X#<-wHDPNW5B-raSBBGTGb)x$N>jGilWWHq5b8W+ zg~pi-v*(KV_sO%5L>1B~kE-+P#txNI`ok|7WS~7*C&{x6osOg^Kh1R1N^OurvR7skUb5lo`=35L|_Qb^5jWKCZk0J4w9=$1$m0=L3qx%q|5jU7q79~LzLyd$fV_poe z1@nzH(O1e{@}~o?FJVW^jk9H5E&VooM5CPzf?J@3EdVomMTV}5`x+* zd>KRhWwr<2t$xmvQ$ws4w9AXMx@A3dwe@rCU{jo$SH@Tq(l`Srfb5s>c3q}-fHW<~ zTG;M5`mCA7{i$%v7H~>`8Am)Q%x;e{x^?v@7e^4jP%UJ7XmW`{dGnU>xHkq*fY`O1 zrGZ%{-bR7N&I|E)x9PP{sRj5R%6O9KnpA{WDbj0XhuU*)_s>N}o8m$6bK9><*1g%p zd$}HG>^WN@k*#@WCCjxp6ufqC4wc)a&j+2eup2|8U%{c(F8JO(P{!7agzwE_KTV_! z#=q<`;%LQ)7PFG^C$g4@S}lc2&QiFm!SOuNRN9gGmG&Uvbo4i=Q9! z7VYR>8^(63l@V9o?DiJ%K4)js*4&e)npO%Z=3Niqd3E27cd{7eV4LOVt}Jhbin=yx z-BwQ$URk;W9(lhIc#AX_^ydBWb@d9gd6&{28j!83)ufq#)wn$}@31=^4@`B!nK`Y& zI7%;DLObHLiZzu4d%YQW5qe@H!Ln;xJ~jb}lP?M=^4Xr;YI_y^6-`S`8}_(GhsVSK z6Of1NWB%_C|D{=?L0&wXX>ggwAMXvvQ-@LIe<$TYe%`#0-){vA%D~hyMLl&74V&;# z5nv9PVD>76gawk>OJF;bmdQ792me z9A4Jvoz1QXd@gDr;AEn#5 z2?QeL*&@L8SzmMi@ia|!9L=&#PscNc*||KPoWn<7x43UAPOu;P7qf+gQdSVB?TG#m z5_94J3Ps_Wyn|hk%1YbJEKaXY$2yUfryEg;v|8mB~EM#G4lkY@X{<5Y;8 zrZfS5GNm9w5I(}y7*xCs<#yFoB(;&vYgssjus=o3{%wL7(tzYMS22dY`fAJtat( zy_?H|9D-w7m)VA}gzti<`Au!EPfb>6Wn3#Ich&Vgw-0nW^xTlH`RR`-BUaLFYYKhy zA?v}|AflBiW|JZD7#c7_q}$awf(>4NDB1-#y=^vOs073qC)Xmp z6g~>M+{QZ}`ea6$X%1^hf!nBN3EbHeqEIw>D)0$1%V2thTK$3ci)r zUjB~JAGJ=n9_V=Zb3|@+gQY(<_&GabSr_0xz<&g6bJ~1#pD}4E=nCf<7si&sSX!|@ zyZ%+>9!rOn5A@NVuyLlX23wt8+#rB^-I-_E~^^*p93ZCH(@^s zkO{IcMc~=Hp2%fA(>fw;m;n4&e9JCAxhwUW`PV(YLdBJT1NVA}@iMDduRPx3SGCj$ zGbt8V6FTE4SU=7(ov7Lb3j6dvi8-Aa|=Ift7TgUhc z!+OU=njS$zdYwP!9IOd`sT;72!i+~HEv*!XQ}9fh(&SdKZA798xEf?<7my=hz_FZw z5o10MO&2ym7lII~D+>``TbJ6rJ)=CwZ zn8>@pIMOy8VG0!(A1Pok(qLtvfQ|Zc3e;gIOR|4aTQIPRzm1|qFXtzykL6-5;K7v* z|8P)KQZ8LstR(tjbl+Rp?px#Sk@QSrf{@ z&{hQ%rf|F%kwwp;y%=JTqcw{|RN3ggSL%*xWLuXntI)FG2m>6_AR|{=S*}DGhjE1# zq@so^zR4`ERVl)3I$A^Ch-XK=HoA#nlFM3U*&75Er9jGRi}w_>9cL%5ounvy9zZCi z9K>NQxT1_5R^dpzYVNLgJ;z40XvVxpF?VC3%Bm~(<)TnzEm}l z;>H133%<+pzmeIHVU*S!k37K&>Q~#^YU3DG*j;#*9UPOqZg7*vcoX2@LMfE6kWmI# z_Eu3#l~*>`?HgM|nwzLkgTdkqxgM!1F3e77;|?3P;%U;vAZK3gOgOVXv#IbgV}r&+ zsq2H&3Gp6yGYjj=D1(j1OhOfY#6)Wl<*t>&ls0lQRAjEeK5%hw;Hkj250Kx5PJ@wQ zw4|8OC|^*-dS(Jqy271zX2mQ4kP?p`aTE^aW91$g%|ro3533K3oV52vS%e#lrtkJf zsKy5x##0yWk1?mptdArH3F9wLkAxpiM~W52e6R0`VGWZXMF$0WCX&5~HOgRbj;Gfe0e^s4u`CQl$%NS!i$3~vSt{25n+vMrI z9zDC&f-Wg?ywe6$9@BIbaKpD@S#ncgj@8fFj8{}BCh|hEL4^G8<%tBmS^Q91ngF?- zI!cnD#`$1zi->qB#}&tDSu#}_;(7ddk)4DHY3hw!S8?XSXk~8`wa7et_k6@Nvr^^+ zUiFT-F|dySrdS=3%2#_6YrxG!%TQ*U9@npz5?l#2jl!&)P_iCYD#vG3<7Xk~a3(Y)FxJMRVTKqeL9cGb)$yWQ-6k}Bja*liiT zryq&fAGk=RA+5@lQkpZb?eaw?C7pd%-PG+MiK|N|KUYqcRp8V+A0I+cMHVF^HPXTr z#w8H&djsOfN#0Q#KZUnp=glxP`I&{qwy4~+&m8((|Fk$9?E*`yVU~e z;Bl~wg)-!FB{)a$29nZtLY0+%Bcd?AH&ueEJ(Ry;vl}@bPY{$VRDHi|9k7$NoYj_2 zjetV`2=WoxazHpbJL^wnup3H{{Ane_hUJ)yB?lW7-G$KDlWB^FT9LQ<$lb-noV!5( zUX{j_86IL~V*5j={*cNWdGSm_1u|_sU^onzWl_A1OPN=-`og%0WE-`j!lbW%aKc}S zNNra67D$^p1)MoYTbli{RJ>C@J(IK&Ti0Bnjp*<*BNwfKH=SwP(iFJMA;a;} z4eb2W`FQsHDQD!Q6j5;VM-teH&=o1V0je=wq2O{7xU>xbtZsGsJs@cO0f zGV5HC#RmVT)rj#}IXLp;L8(iLWos&ZWTr%b<6Y?JtDK#-%ZANOXi1L0n9JQwR`HJ01`*uz~BE=U6n{V+)OY| zM~ttT^t0^O`PI!J{S56XM)A$ylFuN)kC_ayT^BTQ{WACdDK#;PhCT^8^HGM2|FM)b zERDr!XK9p#U^YbKGT&;lxZ?K3W2xQ5+({D`uwedUm~4=dIAtwv;>aXmMl@!C)i0md zC!`Pe+3qD(Hm8Pq8fWPSl>^%oJG;1LI^$`z|93*D)?G<_tXg$M)cR*w^q^(>7fB*MdF*+{447frF{+d(wb6kq*&jNYO$1Z--bk9b+JaFw3x_JdcVRd> z>BTyz{fO{=_-Y+QOBwr{?4GGd#!L2p{45p31dk*MCr6Bd5CxeM1F<*OlQ1AHOSb8k z$?Eo3=5$mc`%f)^d`VtF_$U-A_X`Sd;J~R>e_iRZCB0EE3D7YdkfR|j>oU&!^9Dsc zb?Em1B0xldIGFCQwN1&sqV$W?a-huWyQRkVf$?T|0euLXb-h{nrY6`^YfPsKXhwz4Ot7}4KS5rS@rttPUJ zu}pSnI)ZF>S<)Z>#C3%*Mxfj?o<wl7gsJqg(J)aE5+EdB7>M^XmF~W;_vtU))jA_g{juTy-70~c z%1YRej}HQam26Ey!>MB&Ai9pfKvwN^J>}J1?o)}A(!K}cD4gV@Z2vTcJ-kQ5rZdpx zyXYu`CYw6t-wt6zh7cq|e82wsr1O4T-_C-(jVt|8me-S(HHgMo8NKga2Vvh( zO*n0v>Bf=>rQDF!{HUMAck^I;aXFnycr!)%i2oy41|U<%b2J zS;`XP24GmlV2cw_JK=PsbKmzWh|J{ZE?*j||@g`45-2Xorr69zJqab6K_Fp3&LJ;9RIYAMg+=*eiCgOdd0>srMlYnC3p4ZWk3jpSfl3kd1 z&fFdo|FdYs9KYeYo_lHrF5MVQEof^5$@eKd`TrRp0FaIeg3HP`V0fwiG~ZUJP0kU@L`(*SOyO6ddj%CkrEHTXly4X3>5OYJ)d$5JR*FNDmBbK$5R& z+fZqpxmOPbO|IWO-(?yo4_^VV)9dj=!Z}8Bn-c>AX4lwt`)C`R?`~iN6lmW+z-l%t zA67crK!k*Z?lmG{m7@0#a&9yWSwi}wRdNk)^zlp{f+%r#*i&L=2^xhKgOI4n)WgxM zg%|gO^#;Q7oMcd&RFrZlS8d*$7x2%InPir>2iF-GwFTH}Qg5jU@w7T{0SY~wwwCaT zp?fHaJw9M-1V2L@sCEAqtkEKq9o%}yNqiKP7y>^&b0TifnT1=QeF>NmM@!7<~VD8xSFix(QBvw(gWtOs`F9Tzs(iYZGa%pcbw*a z%HVgSwCVnMB&QfL{CCLc-a|M#I@)5QK!7+B#YSP_4H+t&WlKN`l8ce#w5W2KL?SIw zL^PVH-YBt^P{S2Q9;Qygx{w#Dw`v)ohK=Vg_3m2Be}BQ}gg=@!SFcA%fhg(2o@-Y@Xs> z_N*-eEX;8+&6M4VckB#>Sy}bQ-0FKyaPL0QmcW3ve5%B6W`QPWt|{!sWi|R2R~c(8 zg3VwP%q-jmjtd67;$!`kp_vh-7~r5YTaIWp?>gWB_ST)Xj(M};ylyoXhDwEG>avi8 z$t6)YtaK)yk;bL>epWF7R19)fj2silg`>o>Is;GD*hZbhb7)IR3y5FL^GQ7%D*0dR zz2#F}QP=jF;O?%$-QC@t#@$_mdq{8%!98f>1b26LcXw?dIMccBd7gQzrshAG?_FJW z&R%Elz1I3&*FxZUmV|Fl*Qx$CG%}W9piDWk3Nt}jdJapKFv95S*`8*Y33a9+{I|X^ zwc>$+Erl&q(94qKMQJ6c^kyMxfM&O{q(n6;I-281sA6&~%gEd)l`$a-;JHj)K+z|x z3)T5F;Y{DO>3c?_A2By1W~n^msv z3z=VT@F=5QoUyX81mH+icYS}u8qi&V*eVbSzS1F-9r=OjIAr?QJUVenpm<3D1HPc| zXa_%cxrONEJg`AgUn5v=RaRGIJho$OZoJ*XEUcd5`oo1!xjT3xGquJF3MM|UEGzFw z)bUHmo{T?lmE`oXDIB-?2`cK1CW6Gpy-r_2%M3PO9EA*)J7+UKbOl}Yk_(CW+oFfy z_{OwUecZQt4VT147{8_M8_z`o=;pd=SRtXoY8EW<=%tal&z9c1n@?pT);Nva8mesE zdBt3B^qGiC)52FDfBTcMC5V@sw8wSe|CpjQnSwyarzU0dRPHSFRUf|CVS57e=khLMIidWpF&KQ&ubtoP6Vyt$iE+6Nq>@g`oOSXa&es{PL1cMtwhR z@p{n2deJ)2ymQOrIz3g=9yR!5isGS&#r!e`D{*sqNU-gY;v+H`7RS_%H{=Uy%eRjv zkTZKVPcofM5(DZ$Ri%x)NbrGxipeo#;|`gwZmL<4Lb+Q#X?QO9ec)8nS#8xrfQPas zppV`?mK2fVpAXFRv5-v@lcTZpWQC5^I9$5ZtFxdyquk2&ELssUN9{aAyXE!SSG|g2 zt}kGz-k7g*Y6h_04XU}MZM4;RR^iApHJ=;tWUam01U7`*De1eAJkhCaKRw}$&k{6y zAK-Xr3)Gah5V5##h3bDb66K>fyfYgc)wP3p8gLQW8JP6R{Bx2((fn5p7XbmNZ`@gM zv0Pi4K2&}2TvJv80GJOl*~?X`#}cEyLi{COu~7mJ?4JD+1=N2uDe1ywdCZ&%h}tz^i}ybEecf65 zh+Eb)9`wh|UE4u+yDWVry@6$ZHF+b)blx|4&Tn{h9pGoI77niB3E!s23UGkxxRAM- z9UJlys_-EGA56T97BNZvKuEY3_GaF=JgRth6-Dq*?Xt!>(YRhCy5nZh0L1Q`N6=rQ z%L?Pt2AaN7_*fRNZyD@G{&jrr-`G6p zjwYVROu>KSDsZUZ8dBfukFq|TK)<8vnMm+Lxa#mi01T&$4U3}5togA<-nZ#W8{%ar zR?5tPZi>>n_VqBkXl7%KIIyq6VO+#^3>fKg*fp3G{?-(Dzjer^wYduyQ=iewS+JL? z)*cR2tWjFSTvkzxv|14cx;j8|Px35f^Kg6sZ&9ufGOKzByZ9rDnYrH$f{Un_$EkXj z{KiqYW|#@cZf)8bg6)N4yzl-UoU(P4sqwU8X+5 z&!B)~f7rATI-Tna0hgg#GrNEVKE&`Nv zTa=X35kAvb_H6Ly&x!+AxHh?0b~2m=%|wX3W-Ys%nc|wbb)x3mkAk+^z2jD{0Ia+r zLa6jr1?3gZPI?pQl&lkyYNcD1kGiCa8!veC5ml*Y8-=o@)i9e0#4`+FEK7G5!69tD zC)8Icckc7fF8n^SFSXaL?8GjTSmQQ#psEQ*UHC%8+BlqMD?m?>B+{sf{wQQ}#$=G->ol*(+R=Q@1k@_jRQnW%D-EmA7BE_~TXdp?tsTKR1 z7NqWI`6k|!J%5EIC%H_LUT5}DQ8U9l6JW9Ec~y&2CRBY-t%*63`?#cBwp(Rjxu`zE zP)u&k@H(Khbs!d-dNvP~*OHyKb;9-7R83biA?Xk7q#7e}#=|QFslD(m+_tWR(@L9V z3_cuqx_|&OOP#BxdamyJd%2`o$4Yw>{U#_9l&hz@Y?}4_`z`h&g`pk@%msmxdg$-+ zyyWB|Nd%xq93>dG_I9QKJG0>Alk{L*n1j_y4oj_;pV0>$j7VriFqW^ z#To{TUc2bnmb=+wPO2CEn5dhmmlu*ZH_zTJwN)qIVGeB!Ry6?b7Ss<$D}43t542%R zFTaZf+lW1qa~5HQCLt2XlDr)hR7E3X=2z6{s0R5kgSc;TBV1H6<6r|;4p#)_b>Ka?PUS0B6HSZ8fM%!eFG-!O9e5F=x>cpvWGObUgK7TB3v5JSvbsGW zBTi3(E&)Tqv~F+!i(LyBkz41f=Q(XFRBzrB%|97WDydr0)~zCEP1v@w0q@f(+)(z`}%;NJ6KwJcA@HV zSaFK*G=I!SKmT$b@flM6x=?@jlbxYB0c)_rT3%_3ub31IOFv!GHYEcTxL&f+S4nHAW$_E2-F4r zvs`IoTiV(;W z7NEDmrEl%qyo0V&!~MYRLAT=)uYRP3y%3WtZ`*zr9U@m{x5vLyZAN5L?yTC*T`fJP zTM8wcAVPGiYC}^xsIS-JaQl8!ar>LIS4HK8bRc z{%9tw+yhY7P3?#_Vqty}5ZJSsmKv3%xMW%+r*CM&%;9(Qn+LCmW?6!zYu{u z0l!4UqAwjb@M@dM`AG0mY*>q&yl4A=M{&yX`0sLke&4 zkfHic9(Z#{m%KXGG^OJ^w8x`ahL^hd3qm~=|HPZ7q8jU#)056DPoc0t&i0y%PK@2m zT+0H%$0ju>bbTiIdwWuT_P{B7k6ZeFC5#+n;bZ&de)-jLXtTsz5nTE=q*~W;7Yy&S zTIw2bz~0m{J?8|g$ez^|>X;%@J5Hb5&tt9lc9AS1yYlqkgol~Pr&pL;W#ZP^V+sX< z@Cd6n7Rli&kQ{Vs8j=%IY6@itjqPj>k|sLEPH{B3f03Eh1%N}bq{>~>_|8L+{TdvS zHrfR_duzDWlIX9aF$S60ct?+Tkx8qH^cG@m7@WNvA1dCR zYjS_VSRrILBkHa`-LRL&_fK9kImXf5@eF5IYx)Zs3+{sM5ye4Q_Ajv`(`FogvtZ2% z;c7&r9pin_-QUJ*-M>Wu!N7q^5R)aXK`r)Byz{|S7=KXFtfB)uSJqp6;3sjyH2>gG z6vSD>dcwBLG847lkF^2xzHpAscTNVNV z&phuy6oQ~hjV6>v?2e!ZYADv%-WogLSHZVYP|)aX|V{d<6iC0#|urMhX zY`wfXz4jq?4J8RCqvl^p)_9*uxJ>+hAv&ES9O~0TP+AYX-Xse#?eKR_FdHc zGF+V)E1(5%Dkx*rps&4dQo4ILP;QfDVNT4q%S1BPfPgr$JlpJ_xfGh6Id9|nVXqX| zw!~OVncmPyQX_CSlC%z|m&#Xq#bXY3J21tJwJO3ccRAy6?m3OMp8&up`1*iOq(mcL zRahWu5C0tpKH=T_3|$er9O_-uQ3&>S5U%CMp_&1&DvFvJ*EC}71oH(Z8FP`g?&M>Z zPY%x{93&POgyJBU1>C8OZVnf=zHg`teRH0Cja=G$gFdGry)9+KoGS`B@Btmc&gPyh zMc>sOhD9kJPs_>Es^+%cl_Y4rkui)l&UZlnILZ^lfO}5N4K;7ue)2a>d7fE6ye7j2 z4X)193dyBBSk~yxXdSVrB{{lCS_CzN2^5NgKVC_tedxe3&tStAh z*|%6HBDIA)2PJ;y{mO3IuT+0V;%Q#HdfudPw}G7aUcQ|Z3;fx>inlR$W=l&|qf7_jXZ~%d%)+p z-8FuI^xmV75f}tfI|{-yEt66ABs!H`cthQrt=@|D(^l-YtmvEoVtBK|j_^x~N9#{# zblupx8@%9=Y~BOT9fgCzt<=}1IHM~N&|Qjt(H zH>^AV@-#Od@wGql(+w?E_q((A8;UM7ky9O$FWEWxT(d43cP2gG1CEb3;>PMyVu6uh zuRe<&LI19pjIUSRL>F5TBF^hW+1qY37a|*7bp$LqCB6GO)-#Ie z44qx%_sbG*4C#uK8MxY$V%OJVYmV$274tJXT$s+30XgEf)%zAJJmNP2#}Wh_$VuSn zCK#EL$Q|Xv?O$TFCO-lO;^cg9S}s`kPuWbX`@`CH`F*`#Osrub_lJ~zP-q-q7}A{Jj9Ov&*#?O_(?|Jnb$7cZFysuNH;?W104ZN7>RGSM}4&OfoU~{fNw>hypO@4Dm z$Uf}NO2lG2^d$inh@5@Yz`RSW?L-7IjMartH6W(CtcDv0#u^WFCA{}7cbalM5M5%6 zomAhwC)urX&b|nC7SpK`VSB(4*U+otnR--pkkT*>ZO3glG`astHC!$bCHCpFv+X`(NnIIgAr3B4vTAVPc#SlK^@S3yPltU{&<`m5#9v6}z zJS&fQNE1zfMYIuRqOPqovkS1R5741;-ht02t0B$lkiIv6ibeARPLB~~M2ne@Bi4ax zL!HxuEj>-k<*hqmaa|5sBo>K0G4m$@GdxE97YCVttftWUr6@LM5Hh;-1|U`1SAW>N z;~w{uwchjWHjud)JkK^;3vfGZ{kW7QKaAL$%l*qYPL-3##aHq`a|PIV54SBEFmiYX zesqUZtfm?b$-M5hwaor_`*7@R*dY}Q;&DCEY}!%dr}yT?5SNKYb;4y(!u=rq4;D+`Ma%+?iBDGT0)JUyav&U>;}ynAzmled#bn{m zTSN&I8SeY#VHEhc`sE#3MQ{M7hJF1uSzJikO68x5I9Gme`!kVu3f;A#PJ@N?X<@0w z*T#0)qc4_o)Y8HEI8TltxjD|?H#)_>UJ~Rilr{^yqcf-5CYd_;4F47|3J@P4@x78z zz?ObC-rGC3z@hhM@J4X0x#Mo4oOA&TTC;`PSDf*&rnjDXxgD03fgt4kCJ+=g_SS%< z4kL!N_HXnYbs9C{;GlfXm{~cf@qb-4eB_gV4Kv{YxW}Lnnl;#a-&{derRp#H;PdfN zYR*+CV56XkP4L!20A%Czsm(Ce{NX&^`wtf-M52Mrmrgkp8Xx-a^QnF!p2C~--;XmC z!q4z8j}=CY#bfY*M{WL-_gSK#V*a%4?*9|4o97%N5%vMhN4DqMcx4>2IKFtAA8p#b zTK%+ASVf;oupxlf*`)O7J#`+iKlI8?aB^s|e!P zJKm9aj`ps9`O=|LYr>Ng@U~X8{XQ@BklqUTYyyO^bP9k9iHe2h__F+E09BhVO)daSiGH{3^eKiFU<8Q zs%vaAYbAlZ+?CahBZQ16!_%Lh{a)B?oB$8q`?u8{!PT|h?_A6EKx_%UM!`{8kyFxV z_`m#M^6`#}gEaJYBJ8=cYwkDuav1EWm)X_i)}Kr4BWc#@t@!uP5;*-*c(6^OI$dqQ zQtMjx+e)0Tnvz<&I!MlzT&%)@r`!gL`Y29hIDL`CB~>D}{Y8!bkAV;O*pH*grwsWw ztf4Hc!3!KmCX?p~?&<=EGxB-6Qq0X8TWXm5Y~7d#J88Gh7|%Y?bGDkhNaJW-x*Rt4 zDxYy?J^2n7CpJU+Iu^M}zhrsdEHE&yi$-0jjYumCjfm*;4geMUnS^25SkjulbN`va z`Q>=9yfSXFlbu2_t4q{J1x#0Y(M`K;^cmuxb`r1UQDitrh|z%W)L_Tgy(O>o7%5hY z`5Gg-fiF%>FE8OMwcRNMW}Z1DC|}FoM&^?iF^FXX$R)dRj|i$n-@)( z@OiJ-(Rk1JSP&dFpW7{>P0=3vdY2~g6}1~a_3TKw8{Ef(GoLCx&R_qhY>bQ-SA|XH(4E^`pETl1g7%v_f9JtuFdOgCjVjHDzUX9@3Z5(d<0-v zIkJpR*9OiM>t3^a3VHX zDl%vb%^2E(EY7r>Cc9{b9913smrZ_W zLT`Pn#beLXwz}y9yg)mhMXRm6=LiZ}TkItKpHM;C1_EugTea$YNyzF}bW9mHU8ZmS zl)aM|2aXr>s~M>(_1VXD&$j5Nu3PZwxj#PN06`^Eg`LIt@`qo?uV0cp!PKZeUSLtb?K)Hws+LLZB%m(0j8m ze`tw;kEDVH|EZtb(GP&Dl(LU3^xAu6`-`ijAn_?k{reDziLcom^p`Rt{3RAl`FF! zU3V>Hb@2*;N1Z7+MKwL;Np^dsZFHCJ!AVcUu%766S7DY#7z0iUW8VKR+I*p+>{O$F z{xq||jAAY6d1??7>ncPmV#$Qclrmytl6ZR1Armj+Zh%S1TeDFb@4H_s)NzUPHtEL7 zP&%gk$x`jI-%#gwBkxybhE{Lkn@h(nNl3Lv-RDc?A)2PVXLx{d7J^R$SOQAA*rSy* zg}iDs!T&fvkzVEiFy^Q}!8Si^)gL~D$k6$S8DVofkA)>eACk=fb}Y_6$T_fYNYtXM zS3uG-E*l*QWOLo*A$ZaRnXaT z3SzMALcy}-DaV**8JG!w4vk~>sC^}thZHH5Q=&N?OAgk!rLNYU5xY5QUWMVe$^<1! z&^~Jf@|gFOL<&<}6QRt`%u!Pz_0^BB{hrq>W3fI2L_>Ha!#KCmyBB4u=CeL~dP@+o zF~#Xxov62%04mi_Iq9J4x!r%MGrr%2ux9UL8mA(7-tkf}KL>9k&!o5laf|4OTF`O~ zB75a=)K&=u;cr9edg#wq=btm_(GWN(xA(s;(F=Xw*sGbH9R$T;m=4ABBU>Un6T%Y9 zIS?)6(4t=4IHyv3*)a~qL{Z4arj|-R7e@{Hm9{rf544bp$ok03cp;_(smB-zF}>~T zd8v8?I*lAx*^aaXkLS}#a#v}x&)xYF>%;&{3W?%tBWujcwhrXh57dbxgP?Qogqxpl z++Fqk!wcsMg93eqr$gvBwN-;lSS!z^vQfWkoNe&$8y57QGy62lOl?w*(tT)G z9_^~eyzhr{C;Lu&hISuoiq$pS2`+v)Fftclo-E^;-1)6Fhnx(X<&=ev?6)5f$ndu&VTcxfwPN96q_a=pYTP-|5)}%1p0P@F~nsY!dmoS1o4lI z=Q|wioDn^!k)D%MY4U4QyBo6Z5sj<+wrlFUxwAr!8!K4?iRXW{^W&8#hpmRX#A1`V zLTngupP;p}VtE}hN4|(lfYHN4^V9kM+KjzokWAYdo9iLLYQkK|9%{*h>FfaMM4e)@ zmCT&Yn76Qnd%i``;Z>PFYtRKwb5v}@J7jn{tK$jEES2Z)9A!AREHRdsPIFbZm;8C| zD-L0DFR+ZP^hxS!jnU||k$aAW<^BJ_E;I9o>laL2emdm-8A^YyosmAdQA|jt(>anF zt^2lLx^RdUokoj#&q=rcJ+d9ynf$FjQaT@xa<7I0iDtCxo1?ixvWZ458y{Q8B%N!N zN~&eYZX!*GN+I8TCv4(Taw6FoH_-~eh%LRsZ>IDtWEKn$eMgLeU}Q|!7bDmV@u8ai z802`*moS3(3)yUG^ZS);G6jLGKAub-=RpOQ4yflxZ0%86-!{J|9l@ts@l|oVjJq^5 zX;{CR7a49Gm4CSA9MOagA>axKlPi{)FJ-%q9_4Cx@~UojV_2= z$g%~4{nuw*vg=Ccfaq0D+$lkjqSnb|@-W@YV~e_|F3Nua+(UGe#5Y9_RsmP_`hNGY3jWzDnjoBg#Jrw%JN?C-EtI&njVaW*n4^mPp8a?m{@b^ zqLc-~#u}oU?cDi?@Edi1P(UNNMdKI{ry9cey4GB*AF8#Xl`p%P=QhPEP9@OtmHnF5 zvK>SLj<EN&`YeFetqo+_4qKYjTHTS_f0TA~O88M4I+wVtgo$;l${R39D>4m7 zaQkd43dY-nkt-R`{%_Ee0?MkQ;^Ip{b}-Qj*%?KeSLn-^`nAN!kTgk`p1;|~mOPlln+G=*5mTq;xHKUz#t{8-uFR&HOm*(~L^;tFkliWKB&XqlL0~e3?&OxaBn)of4K$F9)GgbGN z@7j|EzTh>e8*w%<`9vDzA9$n5eV^(&gcl6`@$H4WJvMUAy6LuhzHHximK&Enp?=``U$cT>j0r#gUZ=4O^ zeS&K#<^y4b;J-L5BbteR>B@whin(PU!@zqdR9!3EV}I%biAE`- zU*(9eNg=(}J?PhSfAwg6*F_2r0jrw+-#F+0V5zb{zG>taewGpb_5c47 z_{R51I-lZ*3xR@yB0{==*UO$y+)?yEzzfma2ozqC^}oUNFXj11-^x>m?7M+u&cvUb zbv|w)8rR%a4gORGg=*3Ce=$)?pA5VYq_7yIEJ^#LZrh{j022XfMcSU=%i|Io58=+w zEtqaQS3H~BZTe?5J|6HGMr#+G-t@^vaIt^-xV(^zKF|*NwXn9sW6tpvqnt*uJ7gkoxr%B=(4Y&k zJkrX^iJ4kCD`Fj9N+Z?RJ!m$zI*-#^eZZrsxMa$Z&vSZCswgk{10-%ZXJil3jq83_Rj zLN4;|as8Dq5fB$8O@%|sfT3z|hcrn*Mf7pbxQPlI(4K{&vSxd<3T8d)+k@5SORCje zSu56Iw1P9#Ativ(BG%pz%0;CrUkKDr<#q91_^^P0$GmwDsXKnDRy(EotX5J>k0|tV zm7C)ee5P=c>e^BjAa(`A-cC-+Pm+{bi3@|lvEr05v3r*m#V3Fv!F_9C;a6$W5J)ax zA^bSyRDAo^znoNHT_}dsYQ5*Jf4+ZqnK!%tIa>2Y<606ngH{@xbP_@8%*VVie$bVw zzffN>=TWp5m}F)3*->3Z3N|ZUchSyXoElAHChKHm!;&K9q4#fmUbh`tZ*HGDbS#ue z=3{ogy#YC-3@96Yj;F5^axB1GJGb~r8P`qgBclGIsXK8Jzb02pQujr`!{omFjFo>P zefuOT3ZbaEdAVeXcBs_Uk>n=w<^peuP_x32&NAim^3%!7Ax~H7y6;)#UgOuVR0wjY z|EUdtkb}iI-~E9`rI-s`a5^H5pZK~5^pI`?&6s%}n5X;>sW69rBjro4{2~bc2p@BW zKsn!Rj}e1JB9YL13cTf2C4}2~L??}7Kew99#Y~I;$sBazFQ1T2eNfvg!u%Ty@%e&q1ZI^5QWT?>!!OdreW88*V zMkO&7xOVdw_D++5L(5SLI@GBRh;y=Zg=99B;j=tO+f0vs9DVKk=r4nY=+&b?O2TRW zlma}jwlkF*oJZa<(c}Z_Xw7|hQS3n{?c)#2NzoI}m2Y;WKuOnvEt(JxLP7C>nnpJW z|6Y%N&<8>fM)WB%usJ1^cJRjQd&L!NX8E+~QJR|S5Oj#S|Cb~Dxy?E2%kGk?nbED_ ztc-k@L5$a&v4AcFPqO7E zvT|2??dt*PR+W&f?D8}oED553MK7}TClE? zt}&li`be8nwHCc9LBkewU(uh1$RgR?h4~H{>m9AtYW#J8h|!Cw@6Mm=RMlC)=n`yK zpb&jMmonpc=7(NRJ7*gfF5Xq!+mMn z*L7+9?eNY1T5+MN#y1Qnc=f<1h=TuD13pXMvvb7bA#@N&tBO>D*MMM!yJ5V@>+=?R z+v!-E2!&6&;)utU%=%F;>LLq!LjX!Ki(WLTc17RqDdw@S8(fvRnA`twmgwX!bXqz8 zOIk9tC?|5wN&_D)HYn!JA8GF*69W?GnSavJ#c#WqJ;WFA|UokJZmw`0&4dlOW<=ZbGq zX`AePP0a|$pLPGL9-kU@rTgf{I}E*8ZX@_8XlDG(en@@S1CiuV^`%84R!OA+%t)yh897Og$J@?}; z^?@0GtbP8Y#DB17sSq30cK1PX;o?rv+17NLuTa&*6Y(jj&-YZ?WnZ-WP~i1_W~GOU zuEFZ;n(r~~*J!8S7@hx`5YPEnZzD##E*UZlZ{Z29L`gV0@2Y@D+^ttlOWMH=y#~4m zz3TeE5vFGJ*4?AzHw2Or#CL^9Gq+&zFd9de;yl9D2`n~M*ceCU(=29fgf+)#b|6i2G< zgKt&Zovric!jhJg3X|90vu8qwU16NBLMq9N0HVN9f}#l&gTt4u{7*ZB2sP_?WD6== z)1&vVho&raO3jO`F>`>+!$5+?dQP!ekrp#kYh%%bSUp|*rfn$gAc*PuJ87}d7=if2 zPBn{PiGq044!aSZYE~=#XHaxa*&+l$#!yP!ph1zjNJ@WaJX2%B1Ggty#t{}$ou>SWaOCIfYukSl!!({DpPo;_1R_F?y1)$ZA&?EH4_Gqm z4@ttg1eax8j|-}Ci@2hEmQ;)ZW^0kn zlNF6t=wZsqJO=+U5ZNvCTe}BTDn;#^gU{4EM`~?E;RaWUAmo*uYFsU&3XpLGuevyGcWm+V*CVl2qKkkqer|0nwkl)KO3eSjZPSJFh zU?NG@&K~q8dlqX1MHJ%O1cM!O|S*N=CV7=(S6tYrHi-DL-Ha3sn-8M6P*T zzqpwqPjci;XNZaf5-4+m?)FUw>~uLe zXY!X57S0Y74kk0T&ZrY4IGrmkL0aaUJ5KD*(cF(^y_5xYCAL~>Lts3#vEE)onO;pA zI#cnVm@5Vyf-aesv%BO+H~_#Lg00s(0gE|SD2f#3@SE}xWZ<#{6z7m%x|nT z)L0JF50A&t6h!K)ry(85^L}U89Ym_5?XmK->=hw?t5{81DYJUqrTAK{oNG+VO)4jd z;dGRdh)p(-3`+g_-{mdRZ0(KYLN(4 z$z-qi(t&wqfiOceFmJ`BBp%mv^M8`IWGksjee`-Xt+p8dR_>#>E80 ztrLETkDfO+3vtPtD}-ajnMYK?ERZ0n7IS(ge6{2)hIEEN zrG4aQN&&()Sv7n>>m4qY($mYwiXY)MZM2*7>k%J30herr7@;5js%pqVmPxE*9+oZw zy-nLp)Ig%@ORp)@okU90LHBc)k0#DU(3IemAnD5!C1u*S;8OBAh|O(Es4Kk%eEYY$tf8X8rqb*n z#XDBD@%wTxRr)C4n;iTb&s8$jQ?m4+6Hs!Y2a~&j;3&R-^ORAhtZF z5*{ZPS7{Kk$NXj}3FLrf1{%gPLdD)fZbD}?TYd!hmuEVmsR|XxB4IAs+Fsy`>0W?c ze<4dUYc_p}N?_r>OySTfk<#oR_ypNMeXI_wT|NmY6G0h$yEIDMJ+bBAUow>?hwiEJR4 zFk6vBO&Z~_?VD4QNxAR5kAzKR zZ_-6QC(|YqzLE8z5N2LZ)#MOt?2E`>)L{W@J`8*;L3WCcQhAw5Vf4 zSg65TUAtWPu{t>eqyYK_g?WL)`Xvq9bg>bsIPVSNB9M@+q}=CwJ`i6G`ap?5J_>$U zn7J+)%Qz&hpQDMWvCu#aMXwRMLnWZ>%|o6BaDFm-B7;0nD4*5tl*&D0BA(bZvcLb^ zc9GMx12uC$xW%@g&QVBdZW|B;yhjJ`?B8K&9ncG(Fd#`ge~DlDY50)$Mkafgy~=JZ z>KiX=O%xrQLj{1Uhn?AVsd_0H2pi$P7V4y*WtytWhlQ%8hA`rKC@6j@@xe?heY;H` zwN3cw$BVqxdijDbjBn(5dTXq_dUmx^7&dS{G%C*PvIH~8uTuQp{}Z|_%O&}dshl3F z5yIZB1OFm`oT7a-s%e@;Zv70lm-$WM zp3)pC)iEE-vERShek{QvV*eI7BhpV#FgpV?(2t2v{`NeJCC`$tBx_ra1gVr9)izvy z%eai*3f#^{qCT~Akbqr0=#mS>D~SJ&VYwdHaY;eV>wN#AX&=)?Z>DXTN||axXi`|2|dDS^sgAXwK$2aLK8))wMZW zvY9mFTJZU_Y;)UZi=i*;K>HWM`h1D|4sjAeyUXmJfHYnq?8k|?tFOi^La~0XpnJkUH_!OPCnUY{ zJB*|KkNh7^cil z>kS^272}%*U4FQSRZ*O(mRKG*2SYYk#a;er!RE6zFta!TQVI;vsAb5433xcn(AWOF zb_N!GIh#mX+41?@z9pBQ3HjLAsU}hY(RQidov9ldFos|pM6S_{=ot>*ddA|*I3^7rM$1J zX+#-rWkdSEvhl|Wey_B&lk(8?>?WPk&Z0B6-g_`d9>!*TvUG|i?5d1g5OhbfzV?7y-*roH5}k#BX3>94e$-iR zlYS9g3+Y|lf&?91>vC6f@j4gp96uZUsQ<)5@EHIv8T4xkTowgaIDe5%e_*_8#+aUY znR*0pe%vpEBneZhrg#Ea%f*V9)S5`^NIapmDJ^&r&-@Rt0plkwt;H16PoW_&cmNZy zh8nibIV~^8fI|2mz-jmRMZcY3f#n`zyg@fVn(w3|V32)bdWTVLD9dv{aardFX106(FDFT|XFY1)VDyoYJ-Vgh~pM0h`b-fiU8Zunyrbew!A;RAMsyjD-E-pKAQWK^ylt$^C|I zW?AJ71CaZ}3Nf%g04##3qXLfX9Cy~T^Vf~~8wNXCNXpTfNnDgH#8|aUUfex#k0uKW zuU%e&Y)vk|P>jBZ8+BI?b3g4r_>M-vHW^rZ!124G^U)YqoeD88$zw6Ehwyn#OOBuN zEx*KzRj5phWT^2G{w0kr&Z9|DzN{L${Awt`VB2D~zX7*Ot=PKw_UL9KRBP;=MPpR^ zAiZJw5Ydtsl_WbPKV@NzWT;jY0AXoB5}8Eg!b_|nb3~q*HT+c%V6Mpw%B;%pW2_2Uh zNpvV>;Sao~<-{QLSF1UX-)`^=Sw%pK^#{1aish&bWYhxtjeudoBSB+nnxj9T+vk>I zDE?yUsGptjN4l(9|CD&s*xhMGF9JH{+KIaQ&Zxdq7$gc1r{mk=2|uyU9#wEb#2v=r zMO^kmr|;xgyY|Ik69Hhe-8F)Y*%&eDuv8n&iMmHR!A=Tz;ul1NBmXNEvqT zuk;5z#@c3KFYgkDz!;<4Jdyi9e$IqoXUHRa1=@;IV&MC` zbz?Bk2gVR)%4*jHgnm2-UM`CQ)TDBInsBjY=XT zxtTmVwzRV7?NhGmx<v)l3?RL!Vu0 zd16XaIi4(uYxVIkQPZ`7_Vck(Ihc((E%giW`Wv?H`MPt@)elXI^coE>Y_ zd}XI{c9+E^qgXa)7_T_lY%x`hn2+1}<+9EJ5(`WGko8&$lk||G*T%Q?b0RQ?Q5M9|Uu|NbFntA1<#PKURMBuxJMYO^?P8EFXUbU*%qImjQ_jUhPLgj-0sNQ{QSABu7Dyq?8LoShW zCy=`OB>CH}&x%KTiJ%>{D2I#IZl>|zrOOhDNZN{X4buqvS&)ML;ihpQLk6qe-wJVDcA??~oILQU=BjiVJ ztIa>=l~h?=$ba@-UAaec=9C`8wJ+T-6yc23WjM7ie8T2a&9PXt--J~z!4s9JPe(>( zZ{Ip@3_=Fado;(|vl_Kv*jx6Gm_*4PX6)_g@+E{$yrP>98&WFP zciPY#_Q(1saa(6Oyrdk{0h4MNi>E*|^4RUp&bp7Hfhu7QL)-PdT$#QZ9Xz|K@6yoS zuAT_NFd%zdnEeWs+sNtc^@&LOyGu!(D*x)}g6_$F89a3&ZU4sdr9l9Uw`;gU^v+dv zmz2Nd(1mI+QRC#RC`2t6UeQXCc5#^2i{c8aw5ooKIBNM9tuDWv z9$B9J6JmkMdgM~Cb_Y}wbBq2{JhfjEWWQCh?14%3yY0gq74%o3lpa!pDp~F5T37vc zyeeWgp;Gg*}sbxRYH!{SwPTm$0g1TxQk*SD@|97$})$ayO3 z<0&P7mt*=kwQkRYb9ipf?kBDp*t&s2%wbqwn!&N+!dDKIHM^YS3C_*?Q;|SP*)e>i zeF@zcRBMrCP;c7|D;1JUsiKBL*};&~S}7%IMpYe8f~qo2p>?sQZiA>WXCc8<6sL)K zTL)iZ-E@K+V0&@9T!FK{@cxUQm3!FnxO_2ZR*Wk}n+RdG;*G1&_}MmMHjBa74OW8rf!TYiT_#$D^%Mzb5Z@%A2>LRE1sXN0xRapBOX?|_*~w!kYXW)=bM6Yiqb$}6MX4V!t$wy;=vN3a|dJI zTc~$ba->L8xxEZ8#UkM=$Z$ZTE;O)2Dvv12TeTi*jbSBWMqx2z$Sgvgi0YVgDDt?m zzG1>qFoL&3F|k~S=TmlDz0&+d9)5o~&!JZwp+x6VrHY)L!DH#6yq19Hk9?A-OQtNK zM);dCU

    XSy6YZ{*cq}UA$S(Qcre3NpVDOsdgaD7IqYTGK*?5#j&Y3wQ=)aiG7ro7WV^F-x$zB$@;VI~wQ^kE{rxTisDxeHr6MWB^>7;o*?PnF@xHPv{ z;gmM%aFeKJ4OWQ$Ks0@eZK?pr^<1SY$!p@}=g8<&u4@wvKu8@pj>ZX9egw>1#IMy> zu;yi$BgR~TYqxympnT0p!ww%R*Prs$N&N%kUGj`xa!C2jr(+U4#<6ogVG>9xw7Qc> ziUnFc)DXtg0yIF6B8B<Qivm8T?bQ9Skx`WQ5m zE8Eo8q{s#ZQg9hl@F*WFx5k*_ZPk+F1NT|*)P?iZUqHwunhv_mX2Ug$=SW;}R&Ma_ zAr4B2^ZW{#RyLr5%E*e3dOm3+ECfmE?zP$*OHLk9t2HPoInC$GQB3pqx8mgO2uZD( zkPGf5Qe(@kDU`{R>N0;8kcr#m(%hq7x| zHPmu3xFX$&FP-BI$~0JVLMjlaB>0~6m3KfwV$6RsnhyBE8n#963? zezfnUyK!k34|S1UOXiT@orQ~8Bjo!Cru1S9MLM5*$Fxm7rH=9FI8OjL-w^`G8 z#6oRZQGKRNQ7quc+_)_ue7;G4CzJs}q)6F)#W7!`ZsQNb%XR)ExXx8H?b{6sZ<^d1 z1K)y`_EHt=K|`0ybnpF}n!~AwdOJ`A+&5uc!>THM>x$QJ-fR^K9rx zm{v~i(FURgaVt*zyfa3eWmSTm&PQv`e?qFV%b$iP!ny+o@YnBn>C)E{_~O4fCgrFt zfv4j}?BfpV`+V3E2Qb&yQ4?L6*YH3I8Wa?pl5le?5Moz;&%^vKYVxJdFeK3K@*Li7 z$Q$Mu;%t5l@mDWlF3-_(TYEuKd!(O&(Zr@#%=qmi}*t zwPD~EUcwP0Qw7 z3SP&n$rhtlW=rs8x<7nn%TCP-KP7V)U~b#LmijWW`qt;qd*~K?#IIzIry;AY69d9M zJ?Z8CKJCL?v^`C04)4OFA-dw|0ny+VYa4=R!Wod2rQ1shsZ^|2x~G&#Ucj1Sq=Gq#I|Qx3^~I&)%CMGa zcS`R%9Q0dOy8PnPxx>KL8gS)j?=%qGbS68KmU_P7z*P(Ueq9*$kW;qL$d7=y?+WG8W7Dwm*CK!ej+5BleH_tG2q$9!P+gw za8?_IG%fU_$1{4>#`7!L&REg#$$VE@U(PXxZ%=?DN7Da$9VIhPalCNExH97y@LUVa zMyam~;sZ6)+Ap6*F^vSiM)ABylnA*g)^RC71nBaHoP3FSM~BJ$;F=-(p@5xdbs>bv zEb7nu7Ev{8ZJMZ621Gy<@ciae=^87yCoCJo?v+2d&$Qco6?x*2B3$}MK-!g4`Vq{^ z#nzLKs4Bflv7!%EXhjCk6H6iHMUtmN>~ZXa(f#WkBg!OVv}a@2%iDk2`nxNAXX{R-L z8TAP}B`($9vPv)F(JfzNfF+m4&q$Pnkpgy1^GIlIbtZ+&MG)!hAX#+Wu(0UnYSN5( zYxow!eeUy)v}d6$`Tl|Q6yJ&}=E$!kEQp6?VE0hG*n7waI&6(Y>i+HY^Zw@K^k_16 z^n6Te4fZ6b8mhkJt>7+2a8K*#7}Uh2H`GzKf#ePv?@s-Lc_@|@k*;6*c^lp+rPC`8 zRseWfIiG@}AQ|M}d?`BW$1U`|6TNr9*)uBSfzh@@sbzVF+cw=Siw&+Dy3YW&ELri_H2Qbm6k zBemdS2}k=OWuTObV*EcbRCGLF@Ov@(`ufy6-EaF%lbhPr!-N*dn%jP0atPdp~hNWvHu*?G~k0)~P^3i{;>VIr+{r3#$ z1OQ?>#D4y3GTeWjuVutRjS)>4QMZUQ{U?U_Ki9zie{`T;;$l*W;JzCU&-ttW^3)_y zu(e1fAIQ#in6aeTRlc%reT2dF2{bl!^; zweqXr$eEx6>mXAt#9xQTMTd?1&<(CkHi~1}VD^-1Fs@e+gW^$(x!G3QmCNH1kRJGL z=KmZm_8o~5SWYp~e}HUQ$QIyDm8ZDNi{d-gwI^?y4`1cpv>z9)sfWI0d#;UrZKY+r z+{85sCEj_&;|u%}wbK3&Gd&_6GY@moG?R>blc^|_dCVy|dF%RT)-|6uqKgzpUN8G6 z)6B^VAE$#Z-=XE!qq_})S*m(`^M7_tIZ2@goyk9^8qu4aN`D%h2RMkjMTGH0^l@8H zAyCmPbGVS^HS}HF?})eit?yfKYDBzRUWJ=J=WR#L}x(}_Xq4k*O?g*nz z*~Do`5AE4MX%}$Wi}lH~eoze*F4zoIt+8Fw>qSdP`YRs=x@+22Kt3FAGi=&0@C3F+ zeK5XvP**(^{BBL*9<|Wp)8OB9%82lJYx++Sc?={}TU7q>rb0#$quT;qzl;?8%}t~H zpxiF2Y$hObw1p*^Q4@(s$Y@`1Ov+}+hRA^RrxYItGOhL~96BkkS)u&O)`9%S>VR8n zvmtqx`-s*9Pz*{>hn+~1DTOq~i4roE0vAhG0g+&)NGFW-DXFMWya6vdXK$k&6&Adp z@j=p~c>sLbJxD_k{x-(c(9j+Bfr*RP=1Y}c2ZGxY0M&B&X4dDLQ?pI(Bi48iGtGPd zS76?6Eb#r&1B;^n299w|tbJjZzGg-XCFb?torksM^7x(_T?HHRJzksFqjWUkpT+6_ zsCQN*&WX5o-%mBgAgSzly<33}%rz+tS^uR{1=ZN>TVzid71;QKnMm7-DsoiuZTmuJ zuM&`{+bsrvqT)WVq94D#%PN`pq0_Pw!yxalw~!SfM7*|}AkkWOMPnM}S)n5oDxqx| zx{!s~-hiRgBzq=JjCT@gU`OGP`CyOET5b`Q?Mz?#_3k&7HaJS>#-wsq&v6TA5%)rw zn~EJ^SosXCV%lwmBW>Y?tHC07@pM73(v0*Ce;O7c{bHHx4Qr)2rBeHPrAN@ooO?t>S~l>pvnkAb5k)SN*2qcQeI+LZ}=i%;`#GaAth^3 z-#62M%qaH^Am&aAJby=9gd`i2my<^eTk_t*!dAnY8wAbI8rLkO z9VthxgpbV36d~YnrHi`*MloDkr^L2*!|%ZXckQ5aOv;3}V`<|m*~4!e2u{lOnakV~ zjx?&y(qfS-$jfO9te+nK-lrJ+e>P!r0M6`MU=PcoN7? zRJyp~(onCX`o9MWj);IG&1DX)iSww3Uo*Xn4l>u8^dVCfK0W>ut_Ju0=7({kk{C2d zFEZy%^<=So*%r>}=oUi-1ts`SKo9K5Z(R90<;dFAFt%-x=RNQWf}E%t)0m0OsP-Jg zXoeS=r*d$V@g+rsiqVW5J+v_OVsWE`C1RMoeoi1DJ2Rvo!7VrZ7P*>pqJo56lO(5R z6{M!Wo}=mf^|7!uj_t$m`k7)R;xSQ-;})$ESM*b>eZ<5-hs1J!;qQ9b3g0G8a779w z9b5$99663yR};&A1vPD%nI}_?cYZfE%MluiMC?KUO4-|fGq?PnE}5Cp)Ti?cQC>(0 zQnFl+jw46hoP3RL@lotrevv zGG3la1&9+-3)5X#%PQ<#Er2k21>YPVcfUmw3}=f#1p{6!+Novo7FTwQ8?~)A1VtAk zil9#__v#k6*lH~15t^luxH62y4eZ0;xe@_uQ)tRG;r$*a_^9_aR2{#N^69vT7xSib zYtAL(QOg&Mc8wc^^*$8#&%SZ90bR}g5&O^+>$MKF#)C(VC9@MyL6Z@%)l3LHQ$I3) z==eaU$;7YGeF%WUIL7>J1xKja>YW6@PK8SNssLtM z-CqN<*>bq?uF}h%97~Z@z!UVqg0RP9pGP-)e@Ti@a&&~+-55&^o&Z`K)bsM{J#VpN z4%M8qZObb!%1?P`d}pUIsau5j)W8+1MJEWv@y?g3$~%{r`z9Ms6eRD6Pv<1AwBFHd z;ny)Ehx@xb?fh?i$bY;;k_3l31zk(;em;?}wZ%N|{vx3IMn4E%KUThhytdXnri8o| z#nN<9ubd{Qmxh1;9w=+KR+({ly6oW{jg3zY^xMAhSN$=?Mh;FBpr+9_6Z3PUr*eH- zd~~I=0+5)UR}1kY+Z<%$RU%vEo(U}tPh=urFnIAyb6;5fg*CX+U~a{3 zgM@!xb^#Ni6}AQ+nRu@Wj<^-*E`HxFqrD!Lfa{7!K!GfYeAds z$D0xOqWuU=hHH5(zp@Q??^Q3U6unj3yUz6p0a=K-WYY9Cy=Iz$fQwEvOa5 zMEWnbKLJin;o2nyf`D1b-S4m@kOY^YX^Es{%ygps%E;_l%w8pK_RIdYJ;WRlK2AZt zw&g_FHar?vBKhuHLi}SE_$jgulZn3Ed6Nd`Dqij=P<#yuf2J4I#DPSc`~r6X>>q={ zcR%byD`=ax+28ONUf1Xq7dyct9zMXf9kv!mUzfLtB@WA{VQ(IynXimp5#ITp<8f54 z-%vNe)*E)m$9scx1wXRtpyT{ja65fukeF9u{(_d~_aQRP=oRX>ukJ-FY%%9Kbsef^=Gy@l6 zkxmP0cV-v%aNlJlx7QB^$db50@`K-0=WY3`QHwQ{K~`mwG#(?eBQ)0S)^xMYaV z)=l(5Pgd$ToqVia@`1brI7YYAtjqIkE4f+T<7^&m#wWyVFvQoCY*&%#aWxERd+ z)?(tH9K*2HZy${LL%-T3jw{cKy%&uL736`W)Smk@OLBIPXrQj~S;6j*Y#(bfDl*f7 z5Bh0Y-l67;&9pdRIXdR}B;*l3Exelm5Ynawv0HtFHm21AF$!-FJQC@KyWhHuXETf1 zLI1qVa7_KK#3O17FO0tp+wm`H7~yDbyW|B0lgzj&%mw;m-ePPe-#$# zg0O;mN;fXJXpjH0w>43g*`qMMgEkPMCBkBH|`&}KB7i*DgFr9B&0Qv1yp=4;ARgv#}>4We$m`?2QVNNg|xYNMnUZB&9M`71FtagpX@KRcS?d^l@< zwq?(`+7C4=WE6iA-yV{hGU@(W#xbcdG~<-^!tZqs0Q-&bX5BYZ5|%Dp(#f%_yo*L% z2!74xVdXpXVVwr(?(2$y&uQw1uB1?EQNss}?wV2C1HiM@`-H{`q@9~7>WnDu!AR|$ zye(DvWhP_?v?lcXkc!xK4Tn5WOvwb#F;tROwzIu%{sTGXN4eg4!y4M7i960 zc*0hyQP@~6$o@>LOJsPwVxLowN&Wh$;rXFR;ns)`_Uv}ll%}}YL^}jw|3S{-Ejcz8 zZXt<=b!vC>b{5O}K@UdZR1f&T&=88n5KRUUpy3hI$wDXD4t6bX$1A3n6r;bj6qF5Y z(HlNM1+RrP*9F&$yPmJJOXWJh)K9;Km3bWpxSQ@MmHg(4#Iw;2AUns1%s{%&znW4g znAC~r5QnGnzQMO zq8_YKB2}Vul3sr5+K#Y#2EHhmWKSZHTxd}%WR{e=V~D*yvCFXP`L;@_^)#|>l;{;r zc7q#us(Z?p73AcO-$Oa`O zIgT5j^CUrDmFF-4&`DTvyaRXXR%Wk+v zKr4+#dURPs7BM5GI?n7V$+R|8Y~xGemsD~u)IrqtvG!CONY45*o=zvjlc?HVkP2B7 z3r+*6uWI4yNz*sAgFQXPQ7`-u&svx5>fv1$vGLTGcXKh~C1=7~tMQ#;I{lAM?FjMY zNr(n{yeY@2B{n~vQx}&hJdtrW#%`G(6%X~r8k=sXLi&!Tk~qK)WU>FwO{<%YY!T0L z2sy!w+XTm`IE%Gki-rJ>rZba3lG5s#MbD6nuH^-71;#KrKde$9A~~l?XMhF<=fgam zieWp^3K|vrF#@YvNgau%_3hK#J(K;+rs}TL4^!5zkoJ&i89pa~2h2_3vk8%OQXS~U zH8iEusn(dXms_lXEu7J@%&BLuQW*d-U->0gU*PBrkxJ-k-S1;zuT>?BMH^CK+G8Tx zTP^;wv%;^M7v2Mukmk&x3atSn^@hLRgJ)@tWYlAu-t^n8$Yqppn&_?Rx-l7cgUKE|Q&= zV3rT!`D4nW7y4&x1)xwLoH?9(WeGxzTwro>K3e%jqwAH@{=D_zWrXpCq-t@3G)u&b z`TdKz)~1HZgs^OJzj(Ej=({NlgTkciMv4GTjlZe1fy^e>)nCbhlm1kCzP!j3K&1wA zTRL7ic&B^C7u$saDH6s$#KQw6oNc`gmK=`dKzU8Ea!2@1CW{Kpc-@pJfElPllo8IN*2x(-pm`c^jI%g-^dB-(&? zGRuaR>FX|prib=7`wMa0gCfJ?b}|axM2foxWT?$F7g7T|3`3tM~#0TxehRbe%???$*rMD#OTj@DgGz?23gjxG`+q-hOzq z8J8fW@(MF(WVq!ZWkeBBB9B*Z(6s2|%aB&43yrpNIL{d;X{eh}3;#gyk#A}Z%3g9X zTK=AAHE8TH9kiup&eiE{!2#Ja(SaN3cWqgNil@f1B(7zN3y|1Htz1`Kj@r&~VOxAw zPRF%Yeit(HDa|r$t8uMTKN5&GPaPgfkK|dtGu7r@tpC22w4-~lYk%#zlfOs5_VN{^ zIoIImBsHh(G{{rG*`D)>g7drBm>j*OSlT5sV*I~#GIPh%#T&ERm3`CeE3;5* zR;#w}w3x$g#$#78dD964ZfOaZt$5x4vAo0rNL^8?M+|H~mQb$0Tar3+5IA9w)jxA0 zY+y1I#xpLp^g@2DQF?~P@8PSwkVrCJpSLm`io=;RPVSPah)WqvvJ8ZwswG!NvWyIm!J5}*>4Kbij+Rp_Ecn%jW&u$2imFq)NcEVb}CRB-^uy& z6L*%#{Psmj6{75EACb(8sod-Teop-LoVTmU32l4IWLI0n3P zXd)Hw!{eBskZD0J-Q$q*p5UwM3v#YcN$kVBaLNq_|AdPZ=jz@e(lj;~a{$>5U z5X>}-ri+iPfWWhS;o~zCsh%Rpc~(nH#HDV({b&?mPYA<=-+hS2$E&l<8R41PQUNrk zwwR1_m6dCAuLT7o9U{#IkgC1`_OqAZ^0dB^&!QQIWc*SECvqHTXDm*OOq=N8^RDQY zYH1b3A9GUS*_8}9JI*%C?GJg!Pirc~W}0~@Yoat~C?{W+5lm3sim}ksGaC&>=hW#) z9^`H;CDK1l(_M>%6)N8_E1NebaZ_i@S4{Z8u$sn|vFT0GFC;;flpCpgQ8!kRiT7uC zzJs?bK#~a`$Ke`dND&acf_#_b_$C>a5wx80x|N4hYv;DIN}}fhw5rJ1Eb^e7@Fp00 zd9zl(G~kc%+J>ww^Sd#$EW)6+6O8J1@hj$`#t3 zme63G3hyybojRTAuqkTsM4u5Z2H#4N!SO9#{UncePEs(4ARHO=xgRQu$qP}zPKNfj zzwB#l(-W=gNC}U6_ka)DZ2%WZlXib`{HWP=o}81&n6+s3H3j~9*mcd6_!WxTE$upq z3Lo?8m`{PSMXg9DP}V~xF^LRV8jgR}<|TRHQCqxQD6Z+9RRBw-8){g|zu(q>gRrHp zIt*VwVN{KXa9CbE%$WzpW=yiES*-kgk|29_L!aqA`+Fk@tuZT^{nId8+`ckbXa;^k z6TPY>dBwfK*KqL#Mvh2MZC0<OV*vW|3~`^b-_sjC7$5v|akTFSB$8@J510QoLwsv@L6! ziV*(Lzu-yIcKJsv5DpFM9GAgttS3H=ZL^K4$|Kx*ZQVX*L{_xwDZH9a#rL&ixZyUd zd6oVQu3W15$bBiK?XcAD>i!IUYC4nX2+r;luJ^xYhi(TC*KYZ8EsC5&tBvJS^9IA8 z2PW_MyF`H}(uxLbw@EhZ(`#_?^W@)bGskX{rf2AmT;01cT%zf) z{lS`{3zvMO6Z)d-g_Zrsfl?6$L&)0{V^Za!@ck`kjWgl8&v@{Kc6#WYm^U1|9VBHU z)7I;kwYn1^9p$H!`#|PdC<2>WgwZ6tQI~SIz0&y;m%f-tDyy@`7dbU&V4Wrq(X>Ez44L( zKyyDzL$VSP9X|m+|He_t7k;0_&ebbAe2~cQ152Q9J+oxASiL%nUf^f2cQ$Q2W_(?A z7NSK2#fu2#=BiZB7Qb1>FH@@T&Rvd}7F?aaH_`8;?v}N>K;Ii|-zaZ#aCe2Tj0`q6*K~a2vkH5Ip>=P^E_XD#p{UI(uN8?R2ym-C z%q+M`_S*3r=$rWKCACOW%Ar;3r$$Kh9d7Xd%$EK@MN+p{< zKelyiL7}*;mZ~EcMTDi1%S5pffIRE)#Ly`{os`DX9rsw1|B|uqi`IPG6dbMspnk$W z7n4Nf%6oTyu6ZQw|E;~@7!+ri8ywauG0(6PH#6IO)_VW3Ll#E)QWX`6)N?>)vDMg3 zTJuWR)5zT%s~TR$Zn3h;!3@yTa(Ay_E|(xS?nS0r(K6VpxW1iUB1bo~Ap$J)sj=&0 zW4w>FO&QReXKP6m){q&0URBw5qFwN%!&!Ql=XH)p2A?89bG?DV{{yr`2LP5m^_3X^ z??C4fvC=f#@YSx&X762rm~l`v!BN}VfCSz$3zpcS7{=uX|IbRx#`VC6FPOV1&XFS; zX8Syp{8qUUNrv>8NM6j<`%-%;M2i>keSCJt49gLKjdb-UJN_n{5;eV<>5=2*0av2| zi`fU{eA6Mt806&#wZR{!5l}TZwVzLyS`!myao0Zdk+kp>J{(1#50Z>iYZQ&&6lM6J z%1u_(6w81_F_pG!@(_C$86yT9<=e_3w5a9E8++)LoB09~dzW%W`IYccq(@kbiw*d1 zJ;Q&ITZOe1y%~;MI%uBTH^8LT0@rjFW&M$h&OJQ$9`uP=1Y=P``%LQ8{IE2&qCE}+ z3fZx6%#eNz5x~rwO|D#?z%&qlZQ zPW?ovx=dMc?<@Ll@sNAowPC!wY;kqVDC4TinRsz85~xQ$WD`PZn|)rspd3FNvc{uo zIe=@JaEC=c?aI#J%O*Y;Gk&q5JF;zD3$Dp%R-%9;7;f#KH93;OMcC4pk_}qN-f_?N zzj$eYU0ERP0Z!iQn_rnL*Yv@g0o`(K6?t(}x$!|*`|(Rq)sDDq)@?JPN4_8ECymgA z%`}*pV&q>_@Tcot%nMg7-k1m%1_0j|eCvLQMC>Z1el%vGJ} zwY@fH)oJ_j;dazQ)~_9nZ{9Cl4TMV!JQL^)8Xo!Qj2``~@*>vWn%suF^KEe62hBh%g=7B;Nk>nIKex(~A8(zvVu4XBv=ryNhn$xkuoU^T+Mn z4JDSX-6?HpcQinUHqr1c{hH{ivB4Go9Ud8J`_;f?L%MHZ7fa-nm&0sP+GgBe{&3RM zc#J*>txt(!UcPHS^9A{%}~K?ni|iQ!FL-n_^wNnqp*}`UUwX zl0t(9<=4-09UL&}=JWRHU(g-J50RMjeoE_O4PrzP4%^3fcYXf+npz#vu~%Xdz3J;c z$Zz{=V9D`?75=+FBq`G_olZ3AZvcYD)-*k_9xLn4H+svi+2&L`I}D~>@SI?g&D}rI zxmnh!KCv4$yjKmy33^9Iu+6-FTnz_JCVhQ}nl41wvkW_T?{zpoA|((rx=ip}huYlB zs!zxss~!*>Wc0@b$+!BqlS^O3S}2lV*kf~W|D~#A&}IfHEh<1dEg@{Hf4*6#)cOIB zbK;|pFKG9JWUn$`%ldL5IhXFW0Ej{jI|d!gjz3|5qQ*>SWsiT#6E-|`LUGQYk)0|( zP zg~F)X91QMSUTFI>XuY_mzVuC1)GS3OiYM0?_c`Xgt{0mf(#tf{+aAAuh~# z`Js92-aTG)KBNR~;uD|hVMcN^75)HH`AQ$Gz1MugY^co40ZdE6!%9nkrDCMD-9330qMqh-s}ao9&Kc<|hdam_=hLfv4GoPhJ13{$pzebG1LU-*qqzxc`wS%W zvZzB?V10w>Weqi9l>s&DRmfNv&!Apf|Iq0qlAA$z!(7!X_}_d~zQkurYBu~co}gEi=JfGkyx%*mA#P!j_|gR^ zM8THWF!s&J)YKn`J0L9+pn8pBk~Shl@t*cQ2z>&NMU%|Hx?);tLI$@ETUG%&Vb|`| z8!`XvsD1&$lONA=q1&t<5N>V|zAD*+&tfxTQSm%rJs2| zNgQatsH9WDLoVkBwdZXo(7PrFQ&+pK$9l3zCJf#2+%?d2h`ful{D#M*k+ypsmTn18 zk%^4Y^=JHDFlrMS{;4d^Bt3${eEVB|Td?nu_ZNRrmE}TNdySbh5=Rc>&Vi5?E9CD4 z*V@H)gPZ%?+-QU(xax@T9vKX9e-qn}&mywd?3U0Nlw5X}Q9uEMXP2(4NnJwo1IS=U z5!TgPjM+?2-HIGcVw}ef`Pz+c0y%S8$|cRYzo=@*{p}HNSUNq5!Cog1da^?xv(p=K zdthjGuR{uJVtS*lqm&&PtLm{MHz@^z*z^dX*)+L++B=?JHf1_N^p{kns&3X!Mk%h! zdIB<<>nKk(ui)oClKJGV>4Q}j^K>nSUQPi%$Le5r!dfj}rfh zSM=RlPmTzB{E=^j4I}DlC(65ip@$p8~NObL}u(6q3!>|QIlpv)4gBmJDVRYW|Wj|QpQ+D+exC; zs6zpUop~xhg~Y;+qpWl?kP@29B&8#KV(h8iF|4yO4@q>~N4_uejjoN7zW`TZZ zAR;=|49fI>`fIihP1$@vuXQq91c*_0_>rh5komW!rskR6;{I!V{6Y6r1e=ehr?>ZT zi&-(%J6uVPhkCKHJL0;~aT!8lcf*o6yCG$7-Wv#fF>=)^7kb|{KZ_uOpNV6Cu ztHPR4A|>>U#20`WBILWL?V8lL*zO1mzg{Yf+|$FJub%mq2u7z>qd_{;Awk1L;e&N68(80rRl>5Y5`KC+)gej#6*T!R)C`Z|HX43!`jelII$O|^{C`LaP zPFo=iKOP}|!96t)Fa3i9jjeL!q_k40jQAZ%h0A8nbuJP@s8SMD4L6TxS`36mX&=H< zKlxWaF~KsvrrCx=rC~(#$;HnBXPDh3kyHxIMX_y@MaHqI>@802`0cP4_NQ{% z-u-u?tR>^?4STM(i1ElZ=j@uWC|*5UE{Ob7ekpg^2hxv)ZNeE-MSBZdiZq6@FeE#| z#-X{&?L2L<(g!0H;B>`A+vmbz^I1sW_IL!P@wAU3MixJ1uR$>RxUyN-fJQ>4G*+A{ zMxoqcjdJH}-}Y%w=d>IOA4*z?s|YGgk&20;I4O&$ol*8DlVU_D?nL`Fl(I&2W?k@q zF+kBMiUt4vui{ZW-iKOJ%A@{4quZ=S!hh#aLv%o}egXjaT56H<)DIPYYY&PAZ!mu_ zvRe9O6p4K`V4lx2faFBd^Y8YF@pyjJka&s_g*o^Ym-28i*9N(~<#wFn3qj(%Hz$21 z)T=!hJ_U;UOjDZVU3~?Hjh?BL;YCP5@Ww@6ha44CM`^$;dUZ^y`q&h3U0oPPQq{gD zFbgve?94_YIvxgZgB(da<%}soT!9@D>|7Xup^SMkzD45E)zhJ-wwM6a425`BxMNFY z3p>J{BYtauQ|#>{h1<9+73-0|MS8>Uxx*%bgHMC{%(FUp`C?2vGKFfJT4|bmgChg# z-T&fD#LD9TuQ*faofFwK9#XKGzf@3@iWvAi3{o&O@I^pFDB?OaY1~>eTLLdra;p3X zDF-OOHE3i~;a`La&E^2;1{=^T4sKGi3cqUJs;LBwnJRLkeTTOCG_IIQo0{@@XLAi} zJL(l__S14@FPO7u)vy6{nk8T=}`_)SbYF%3=x$l=q(%@C= zm!pO}3mI=#39U&3$$PJT{)j4g`Uy!zBIO8m@xFJAXo`0)Nfh-~dvd;)OCg@uo0*+B zV0}lZH6+P*UIeSzi9D;}eFBh`M>vyB-$!Lf1cPzulI`vW9PuEx^H? zhkgYCF%##pd83LmWJmH4JMzqhPd=a{PMUd(U3ZwzP;@+Ohv$q`KY)f!<)xnSQj$23 zPo#98jnX}tDT*;(=gg8pdSjFOcSMAz-zT}H-%}-5hV7-hG{%^I{_3LdKqq^p z1k%uV#x>a%Cm2JZ$0Kp!3(CncY{mPQ&Q#A{mtIRt_Tl^{qwUW3dYW(U5>YSM8KKO^ zrwf&1)%|VwDBKz!D)o=GaQNtpaNV9@C}*W9TJ#vO+0wf2or<iOw;4T3b2B_#!9~DU$p<&6aAaT984Fri|wwNOyv!p%0GpF@ZQI-Yc5F_xy<mVRi!6;lYEM2dX|0Q z?G`pz@~%j!-{|!pUHIuF4VjQEyr~o=u)D(-C4iQ_lx-hibT@?lk-B&YeXX`IQ=~t)3eiw$#BfsW?B< zJ=0Pw0|gNTx~fc;r@Um;zx;}kC^1%gJD%;E%{#%xG%8c826}@BHervLTj3=~^?)ZH z3t&fy1jlx5A!?m1_-a+^4Au)ldSvVu+XBPjO9!Z88;6LVTqU%JGPI{6k#BaKH!{^> z4`n<4Td$6+3w~iWXvSZZ3u03is`%m_O1Iup{E>C(9P4=1i^w78IM>LYsTZ zdZIQq7p+NjsYL&viWm&v=5=@Y_t%a0NPie)3Pei&f4GjX9NJ%x^pE_xmH5&NC7ph2 z@P4cJ&i0Y3ekhJLZ?R7z=r3HMpn~Xr*<+x5JK{md;2uf(duDFnIYBdS6b#I9|CGOz ze2uww%nI$V*xjr6N-TT8WeFE$C+Nd5ctghZpUIBDstSxf!GcKkOZiv=_^AI!8$<>H zj0E485o~#JS^xQK{{I90FQ1VuR8U_Z8r;8s_|jBRMjNcTVM5HLs3{4s;-tR|zOj!q zfIm@`NP$Bm%%lt#!t?bNv{yLFa(09BIC6Ex4j3;5y*|y3ZQe))J<&JLRUfoSt5^5_ zsx$)5iw<&5SLT9l?q`lZ&Lvm;SD^mnA;6VkuNVcUy_dQkba%NX-pxqB=B3%2ORs#k z9?(nfz~pkS{f)W<51HQs4fF%inP4!g9lkaoP(v|-0GVh|ziu|Dy#`31_}phzo7=#e ze94-e<2pmg>YV!9@PRC>bRwg<6uQ(N8%>i91jj3is_AG-odA}34 z1Rk~Pj)*5Hu=b@xpLCsqZh2Ro7(dPrw0O*PDue%;laf=bnOd_yJW!0o>{Q1r&&Rdf z@kscad!!g`xjEE6FJ%?%E)K;u+$Z$^fW}r>3YOc}RXwWX_>Qas)6G6NF(?Ux8ROE&H9&s1L$IW7;+Ui+4B%i$g*rus%&HcEOJ zj>f_r`rr6ErzlIhZCh7mW~FW0wr$(CZD*xzyVAC8+qP}%X8rr@d)ql@xBIvrR?LX0 z7%}EIdhfh$^w=a>)se!lCoDri`e~G~#4yE~C!6(ev3|yXMvT@SNL@pt1=8Hw`cM!* zbKxCLI-6b>kK<1QM~*i^>cLf%YNxXz;*E_EQbp1effwrOy#ssHEHRh4MN4?s9g_yz zEMUajM9l6JYY0g2EF$M!zvWR;ld>J`BJ_7?Jp;{5A#^|f3ON`)HsUIuok`?mj;c?y zESJ;e!kYQYwVo*M`3RCA-XuPDpm64!HCD`+2_4IwFbeIjZc>n5q_7G*oj2G9#+;1t ze(XkPTKk@m)nFf`P}xHq#U_ic^tE?j?77L5Txz7e>S+AHwVZ}-(P14aqpV*d?`2%B|18Yl^mfri(X?(K{^%}RLMeZi*H+8GUi`RT;o8uuvB8yq8*#Jh?7@hFFj(dPnlitfMJF5AeGE(cAk`BkE>+l;2wkTBXYTURV}TZ%=Rtvh z;tOuUNPNaxC!c{O)S44JIxaig#+GyR(_C~8PXE=zzqUPH5Y+Q7eM`~0?V?evza~+s z1wPQ7+wE^6Sg%S z3OVd0hjiU^vi^g2QrGg?e$Q7~;YrEhj{>)REj|&ZDm>K>2ZKX)n+0T7($~cYB-%yT zfmwmp$4R-c%N)?4pdd=09$i?T)MO-*-RJf&vZZl1*7?t4Ao(YD zpQ8O~q^?VTbhOYjEc|za&1i-fgQqmIEl-wBRFkI#Y(v)}74U&Y()D-p;I>IAITT{e z3(ml;R%c$PV|vARab$!Vhm8oHZRm!fsz1EY{zex4)2HZT`9XXc93+b0zsA=HzqAL*9*JdOyt7JWQ zFb$F?7R0TC2s7_Vm48!2-2FRvc%9m+zpERL34n()ihoybQ>xq8PJx`r#3H(57Y05d zv;iQ*;l1A7=n5tNuhIX(h3~cp$Z$tM92EnjFzMcZ!G6vbTCcA*yeMmlrF)%alIH5| z`*y-|=aaTW|4&F&8*jF*{;DAiR*&*4uTJtiZ z@Id+AJR_xM>4F_H(Ry&7%;Fz5J}#Ex!LB-;$HQlT#iP#e-#bM&!=8;O6JZlm4(dun=$-WJQXbc`T8Mq)8qaw>1$A}m;M;aUwM z0@D>@tj%n^Lc*WlI}d8o5`L0iE885ZF6o8F%)JP^J=zB1HwqJT(`T>ceQx z-4@>8BaCt`GPsE=n|8j2;4jk@{TR7V$AC?n?`CmMHp*QY-@$AzJ!_N`JO-JYe zi-Ba1+567eZ>l3Pe`{Zt31B-igA1lLRC<#)TSo^LGeYFgl$z@1f4MhUaS4gnHKrSg zYD}J8o32SG{Fbkz3MEmiqIyvB@q+3wCpJYZWjMu9Q>4^WTL%`;YtY;vN0bWZDq9N< zN-(G5vZ)Z(@h@%Q-vC!sT*>JO{RAWTu3{k8YSb#sO|_CjlQo^(eC_C8HaE;cwp21@ z3SugMJikY?6ZDR=k}(gdDJ{u_@y(~1D5MobIUB0Lk?zbSpH9$C{}wr#Evs2Q z4_7HPUqMW)qmlp{RmwkuU0ab~fWar)RUOT8g?b?G1X@EL*T|8)zSijB*qTI0ZY(ws z#)v5F$@;ZuHMF#*_$McL7# zDWOW5z4-ZAnXMtJZmB2gl$Act)?OthK*~F^_*^JU^T6B#*-6?4&|JO87-y<%*&NMO zHrrsowr!OhMPC()9I0Ka`JTHUUT_%BYi#LwZ9B(A;VuRJ2ru$mMmmBjBH}0r-U%ar zZA(ff?5A1*ghJpFYNBE_^OX z=!2#d*6VtB)JM=s&6eU+Hm*|?C{wcQABw!oz`Sgup#aPf-U_PFYW5o=XV#TtNy$ol zJyjB~=bmg`V9b4HO1;W))-8(yM>9BK5b}}D&)AV(oL6HT;+K59;6$}@`;OJWO@?p& z(VF1>R$~$Zn-_u#;!xWAofklHrT=?p&b<|S0w;)8m(hY-(sV0SehE!BC{-}^GTt}f>94mhMJZ=J$SK?1#d>O$A zhQbdpv4&&Y1&jm?;zbEg3=x`1&d0Dy(GP}7FxD9p5i>~hf1ZjBzgg4ob6ISUPeT9U z5FCk9`?W|p!*V|Zn-TV zt|O8eH!&87AD{$akZJPj88%aSy7}?@f_!wr*1i`9x##!bSi~atMIHj-`pGE{uJ<(= zQ(zh|ZJqp(Hi*a_=qj5JsC&Kl?EHa(rxuiWl!n*NI5;^brcem|d{uplcKui}U?|MK zZasATwAgYDsS%d612>-SLO{SpVyPA8tXke@|7)jA$>N!$7Xz=CDMWBw8Cbd>1cy-Q zo(~kEAR7F)!oXo_pfK0M>GJku1p%98ChWQJjfNV-T<7#suWc^w1dpW;T#C?OxnN{Cw6@mIgSQ=N)A+fsY zOhD}}7q1l2IY>()y4Ed<_M67bq@V<@_99MjR``8!{4fK5i2hX>I}YDSiDeF5iH=zn z_58Uf-_Bmo^zT{f%{u#)RN%hoXD#rZ(P$qblDI|4?;BPC>YDfY=Vp-PfWE= z{mSh3jPpLnUZKCB9-1iJ)O`XwaZQSNH$nJ$!R7MxR(L(}`u?uWy9i!hJn&HQ`Dncd zgLuk@n`HIX`0)}r0EkaKJ_JJ|oj_O`F@D<~NI>;KD5e(cEEt{GSrWOk)4Si#DJ&H~ z!lR+jg8e^?toO&#XtRPSjmN2D5NI3>NtfOW1myyG1-0zpZcGmtkDGda%Zu;_jgMn~ z;_D?W7EOEGl{VqQ!aabI3+5y{RL;=%Hdn<`+`XVjC@pGA0gANS9$qKPDR}BI4PiIJ z5WorKW8BOT;ZKv7J)6RpskA8ZD~Jiw?;r}A5{<&SMKps`W#hi%aj&K>#t+DStiSsY zzT&g^nNV8k<*L+|ktXjEW(9A<#11xsmq8G728wflEhsJ(%_}OCGeKeTxz0YuRvc9< zO3bV6W3_ic;*Ekvut`UdoD4y6(c|S0O5{zO!{OTULsAusZ!SS$>-nMe&456GZLO;X ziVqd>BLu#7kcDktT?9o&N=~NJ#J~o^&NRS?I{}UgqawgVIxdj1silfXO{6$yFVYCD zEx)yWXjh~RYb;DozkxMG4?I}-rNbKdXKk)8I(IETzJf%eVgOBrdPM$+EmEiHn|9T9 zYQx_j{h(qJH&Tk6ok4N!Z&*~?(a6=?(Gylxt2HOfh;(mE_X86(U0{|3Sn07 zA`n(SHDb7)#YWd@bdn~*sai6IuYSfZJK0@t8NeTQW#O@%XD&-5J^+=f8* zJ?C{+oTtaxbbA8Sy_CE>@?rNCjyVnQ?TzH9xhCl-_Nl`Zy2yO4u0#7pr2Z3eqz0_1 zhlZ~JV6F(bFdf?odqhP4)fqV)dc&+|(r8uN02=`Fie0MH^&*Nod1@Qj6v{k8WYTlS z^?m}+GJmDA?NjJ|BllrBkq!e43s5y3ml9WyfGLx;Ps8`?eL7mP^NUOMGPT%xH3-e{ z+EFx`KA_AdqhiZ0I+BOmy(<-j=W!aqq8$yQI%>1iyu+vGN5c%`#R+IuY9%-7%(q$D z0+|nJoK?N>*1H#y!xb{&fHmF$221TyMPJ$i1szMV6qZH9J;xNMr$+ZU0721d^x&>t zOWpOIRrDUom^^DVTMm3F%Z#KY zW}_0;5VP>{pH=T`$^l7*1m5fD0`0wIV)Q3#;sd8E>h^1#tDV?Ph>_p@YtJN$zQvtm zp+|YuKr%dC0g~49-_Bfc>^!fq6Hvdd7qd8!aF{Jb22iHuwz*FgbgpB;48_nn-_+^f zz07}YdcCT7q7Inii7=-CcRA5yZ8lme0-Rg%nD76AF|a)&-`uI@;sUcIv^Oy}*yyvU zwr??K$$t}t`@DI8v7?$9THik5%eV4C&jRws*q;g<&&W1dTg;yJbw&wddq&zkwxK)Yn3Yn`tOb%+0BdZl(EV z{(B`fSG(D~(&#fpqZVo?T7ia8a;fl+6z`C$4?7h-{Zk}0Jy(Hq*vYE5GR(`hNS7)p z<|N-mNB#l?sZwWmS~{xY)A;XXgqZyl{@fb~S89A`+$NdX z53aJwXHUVgRuF0@SJ}D2^{_<7a@P(WT~U(RxQg}`P`DWWgz#!PqeMc0WO{%tt@f}| zop7FMF(RF=fqX)2O3^4eywF$Zq&Y!#xfyXBD3)y~aHlHua&(T!W`Ly{)rM?`z6^7K z2Oa?ab3JIMOCAh;WvQET-|zrZqPNZdDpBn_a4^edkc}`l^o_^dpkT!uFqo@u4=mvT z)h3J@Zk)<+?3)%V!P*!1=)=9&f>K=p&RfH{nWZpVAr!_uAf71$SdlxN0eSaS6Y8Q; zEEx~u(A|i#jZ<&I{4^6DhF|Q=C^=}vkvh+ykNa4?obGt1=j;MQcqyBH^~BH%%I5^` zcIIJyC>H*k6uE3JI}F5RV7uc~$Z=7)Zq?glQ7?V4fNHhjY9Yy9RLf6iCO6|*@bwJ~ zxw;&06~P=$=)o@35P6@gYc&NwOjzn)LrN!pa)*qMWKN5d@Nr7Mt&%??Zbvq5q-;Kr zfBl*ITmfG=uKpruSCB|`?+jm#u@N$;icUShJBd#3iVF~KldU-ua*TsDI(|So8AnBz zYcwu?z`7Lv++gudo42)w;iRT%_l0Sr-wnKmdSS`|8#r^l;!-$QZRV=Ec3O`RrJsD( z~!lFV5ZkBk$g7VvPExWwkL3)jnruffBW zxodn6uKc;j%XFemz~$wGo^e(?np-TkLl5ipl4vFTq;_q0&=9np*|L`MzwXbra`>?E zo*+6xRqV21z>gi5YG$*jp}p%$YK*Z-xIQK{KA}FAJuXU^L0$biDN_$j#~xZdGl9P{ zPd;#**?g#}p=4&s2i@5l4z_)#URuN3Y@zO5oIOV5R0vXg+ZE>u3(51-jy!#6r0*g1 zU!Cj_37$dPVvSg`7Jx`s3Y=Wc4=O?;qY44#g5*Ah2(iIMqWA~UvSo$ABBBV|k_j>) zf;CC4N(Tr^>eo%|`DG72`ozL2%+acc`Z~S_qR9vE$9i4OZuYj7VX#S`q1A!2M8d$b z%~VRNW^sIa)l>EQ*Kwuojelllpz5oif33omns*Lh0!4MgM+t;faovLD^M&dNVEd24hQqoQ|D;G0n*<6Z!a!nR?&`VJB<> zgv|Nu5YvkCd6nbeJ!EM#=g%a;xZrnuH5)r%{pY}i5XiRfOF7Ru>a$619?iOY|Qg^==9h{+A+>~ zi|sb z0iAUQ!vMX1XjHe3lp_F<*FP$FxRgNkFuBa|3G?#3$5 z5SptXR`-(Sk1p3y)nce`|;$Wwi*ip{E-LlsU{&KQT^2*O_0=uHWfDr^w=AH!aprtqK& z5ol?=s1r`4MxLW+oaiAM_ZM~7-H??#)HTnoYz5d!n<*(hmTXA)2v!`CCt}inyJm7h^Te%X>L6pY-O; zkt|?esntP4m<-tc1b=WnUu)DZa5bmxRf03~x(jv>V2C_e7JPwl`8Z!|ZgQ4DOOt}l zFsJN&2o)1;KQV(>7Qv&&`w4c4gCZZIIp~UnGr9>p_v`?E(AI>C2HmJ@wQD&pJ$m>- zOYROVkL(^3Und=jfD~Gs;yO-0$4v3lGFV#mQuFl>AbRHZxbcsL8M#iLF=Crv%y2yM zpG1pWMP@&@xQ}c1KMw>-Wv#sKK^(DtIm`r>h2O z=1f|mh}G$*IE^e!Jwk$h4ch>Qx}V8 zO^=fP*dT21(5U~Jznw#9Xi3HI<_1S!NaSbn7{}!s%(S5WNyb-Jbw63zc8n3Pv@`zb zCOWyS_wsw1zziF2nYIh2pUT)7gYn24Qes>~O0d~GvOCPm=)KbL1C!8ZS*reWAgEsT zv!{PP=4;7b`3jY=sP*J@AopZsSuYs`)wMsoqm2dEIm^}U zWQeS*rBJ<s#Uo+?+F zdG5TN6XB2600`lcMA3VW8T5+lN@8asidIe5>nQXo2*{Ix$RG^kMIccWtv(u1Z;xit z%{5066EuZSS6 z=!>tlyE!+|z3>K(+t@Axs>;+RvFL%px{zliT>p)IaN46Z#?dZJkGsel!Zvb_vD+8hw1{12=@Y|ZCK|wj1EUS4-%L4<&U&aa<_~<5&C>P!9Zl}(JMlS2D z<|p5V0qCJToWb13zXxsYqPkonYtph`NDt4#ejo9YEI5?66J8^@uDY2#-a{fL&S6$F zL4~Jqf#-VnaWP;`df>avF-@-oUtt220Tc%`&Xf-IeLh`M}FSoW~cH|HnSlExh)DTk|?Id~Y z3R)6)NROR-Tn)$C=k5?&#S*|^-*NaqX30BVgN~YcwvlCm8U0@0cnYprHb@}zibwSA zezX?|9wmqpFd#Jehe(0(?Jl}2K7guEAZH|`sDCeY{Dqq%&0$4SR%_6&prcR_SAxYc zcK64VldV8Jxw=_eq1F9DNlU-*f7c{T-SKI5^7xW&!Jinvzvk1ciNHVji0 z8Qs}22BDKuiUrC;3?q|6<=eeVshRsZ@_w)%hbFx=qk1GFDH;l#OI3U?{`!zF38R`9 z#kz7n$-z;nOwEj>Q--zdsGJ5iOANW1(LKp{F9P{!J!4_Bath^{=!UvSxdt&JX6{0m zXp=8m`&PNZdW6zh;Pcu5eclyc-$$U`+bPQI02>o_Q_AcS)c9dauj-}3_x_gbgNaQp z)33|V(Yvw5L93(g1y~^9E?JRBC(Bp-cKLOSosNp`L|9AG^q@}b zgr35WE~nW8yTdL_ndyUr;@<9oc8Kg)N;!&0E4M{?Jq}}Oltq=O3Fw{GtJ>e&{8kP& zJM((QLTD49k_Hxn8zypg6m^w33nIoL%INv{IH;L_XV}!ah_df!Wff(Ni=!7B!r?=k=U@ z)AtyR9|YcUa;O(L$TSx9A@T9T>EY*wGC^;#MR%rGa)&Q^zh8|SOLS(}Qz|+X&|IfO z&D_77DIW6%1q}4^o1Oqa9J4Grz79yGcIT;Yxc4UI>7Cf;Z-|2jSx$NdbZfIG83L7D zC~vd-f?*5>!jPW#<7I3&oR%VqAX0_@A*LeGd>0p$$FsQ7E&Ukr7V3T6$I;Z`q4yw5 z_UeS9#;PJ_<~OL|{5BzBUd50Y$;%ndjlIt&Ql!fY4eKkTBaqG8BK(l^d1y@|o8QIz z1wm#xa00TG2l9^V!6pVvvsSNBUo^cBAal1|;gg~nWg3NvXW1R8-4ilf7h-L1Q6IB? zr+7k!hfT**c6gZ#=f^?ZG-|b!<>+cc@YZ0ISwj3V3U_?rQOrGuUPd4nkM?|qPF)Tu zb}K{1;QAV6B3nYPSD9FSfC##`Lqo<4LYfAp&8p!CX*d}xJ`X!mxJ?j75YcTd*Vb`` zFkUhgctGRcT!E{!JJ5B!9+0%X9v0|~EDrn7`h%meBJGMgbhok8UtaN0xYQp!2|HT@4%dJ(ZE-=-;U<@ z@4Zi;#lu63{Ux_>;dDW(lg+iZ*eFHub94I?j`HKl)6$PscU&Zt8tyFXDu79ZIC2sL z*zvkzGxqm>XN3T9YNtj5tUA2we-K-0pmImS&CrG9Z9O-WnoGR?8Y_R2iusU|S0``9 z1wa%AF56-o3v%_EbZLu1kOAuR*6j12a}SC-|Ge6Sx!J{c7a(qW;ByCqKJ`07U>qUz zqBZ8ur?UqB;BO_}y@McQqk}`l66lTplc^oadia_?YIlgmA;Xqyw{FWl<;V^gJ_N3H z^{;+LRRZNXWuZk0l-Yj~+u?D8by!B$XJ;zyup?RSk?ZStcrGzMenk$4ssnY;&H?=t zCLEgF0|AN#*94H#R^c#sLioVc%8zp6+01;oVJDN=i&6JeI z9&PeFH#y^(y;3)?Q`axhho3PTo2cN8=|>A1L>{x5o}W4V3Ej2SvpA+S%Zf30bn|1E z_t2~HBs7hc#KPA&UFwBW2qUzk3k-ko$QXuv6;551G3%&L$-R@I?P>z$dL_rX{VluV`L@&{%X|{1rLW_--JT-rB^pQX zHG6IP@#sz=V*DXpXglrRB*wvrrf}n(Yu#fhl69r9GsCA&G1KGZz^5#XBkt_oUX}3bj1jF*R6#fjEOx@j|s>Zpe)eK&%W#(Vg^jT<@TT!^xUM zJ+jn)n!47dKpS}3^>RmP<^;uo%JwBOp_m-ZL~EetlONN?0PtQlnQTmzQFfVRmaFV| zRL|h{a<5&~nWg&f)W7fJe>&j5p`qQs1h;KI4{fH`);xYnV0PtBy17;7=6+|3WpH@H z45k11NB2*FtFA9@Z0~2i_s6ELfpWFp?G*n2`mleNO*1I$@W{wc8tK>j)R!mRys+|L zYq(}rxaWr|pUA^Q?Zl)#8KBfJu_Pa?j(GEhjzgJg;)m;TXH&BaG zN)`|2C^M0igyhK_QacYtIjbVZ7K_9GzpUnecy0ukpJalmu2%SD|5-}@AA?c=eB3}% zglaiLvH#s`|JLZ1*)1GP_>aZG-;4jZA^7`~ z_6GJlzZtqj{QtXgVq5_E5c;1OLUDf$>Hkw(-?&A;!1wYb)=n1<`%lCDuclLegNovf z0$C!8#r^MF`8PNG-#cj#!FNXw5@y!Mm6IXSO=}dC);mu6zd!T#tl;0BVZu4NAqiA- z9~3h8Vc6SJD?NxTHZ~+fxNDf~`=jS~u74ydroaV9M;o1yIQpo}bHjWetK3$I!rn6% zm**+asfJA8pUG%{4ThMc*r57)ELOOD`Xclve5%x4LX++_G(!21I6p4zP@(FSJ(Nqx z^yvd9>~+426kmUX{(Pq|;7OTH?TVd`Owy^Xu!wp?LJ|^%Hp}i$0uo)5aE45g8V|q* zjPUqI}ANdVu#u>t){#kTa&vyF5>d$mkJ_ zX;Txy%|pv`Q31J*Op5uN;DR!osUDh$QJwb-?hya^D1}*P5Uef09;ZqJjX#`5DSu}I zGEmS7);%utVzeg;s)RND1dtFfTAs&6!xCa?-0gQIM+6;6s^mXiAA#kYl!VL;%*4dB zLVqH6yS_cOjQXn?@mmSJS*x(RT)to`iG1ElAuIhTb*Wn(npA0pI?F}Cr#N9U3&v6L z=3UDT3J?CCVgNgjnw;yA5K!DG39hJ`Y<+e0u5MGKrka}r$7QkxOv_!OCU}pNSkk=| z3FRh%qDUQ0U+95_EHz^Yb+O;XdJuv9@^T%W2|9dHyEM3M-FhRsoSumG?Vij?&0IT+ zAH#=(vM!G@JUL$F2m>kMGf6)aTc)OhGRgkiuB)28CA{yFArF}o8is}uCkk+6FKyuC zX{)MSwWc?r$jK76opo2-^QeeUG6L?AF2Acp_6W>S<-~Iw0^M}Rm0Q6KjTXu4X9tQ; zTe$N*>CQOK5o4oP_hY<}mOJQotu_T28CkOQz#9f4OFD~IAcFN~hG?Yime&(mnQ}#% zDB*YRXI?O;3F74M!E^X8Up>qXFP6ro+E-WEmH9xt1J=^gQrq)pzpO&>=9Rp-hvx&h zH*g9M*!7mhFJvhxDpekiAd%O5f9BTBFf+NHxqv%D`(p)cuA5Rbym&3=0=R>arze7X z>}{K#a3bN1o5z(J)T{T4mq7nWu8Uz01Xlu_D0q6vE1J0(V3e>%Y5|}D%k?e{Bu6Ks z#Gaw&^JTkx7e$1lpABtEyS6NIA~{H3R%a*R`-+Ww&ou_JuNpA|v#1H20Us0mb)h0X zx;6yP`C9(L;;256j+3K6@QaoL%tTwAHHbNm>4IdNQVsy-$F}{2{!428nV?G$6Tt{M zlR2i%5hB;W4Lklc@$Iq9M?a?N?`qrkyC{+TUb`CGso8RSl6fcpj9S7((*CoJ$}VZH zz72{~B;kR6K4Oz*{<9f1lft5&bN()9TOKNcP|~95 zL$JZx-IZ6_OKj!TtTW|!-1msBvn0qBYw377zPedi5L1mquiB%P)F9q;fAiAJtp8DR zGt)hN^n6}7Wp{C{a%}Al72HmF>QT(?2qdu8g-`-kYCEvC5oYjJg{Fn`MvLl%WZg`L z#v_^@{A`+t%;Oc%zVS0ZU(?s%OFJ+cF1#&y-4{litAFgWxN2Z#A8(zR28mAj)@k)R zaHnG94t(`f4^3VYhq1%Pqi2L6_oP$yz69>Ww895tVo0XNtbz6C2V&?t8;FA0m2YH6)uGwri8ejMJ(N zKxN)?e{KuJ&HSkvC7zHt-y}rfGJ^?3Bc}@_kJo0OPCc;9`A|Kf(sr(J{TBxX!cBiT z9GGi4CB!cntD!Cy>+ZuAa-UU)`YoPqGXAL>tB^XL>MfYZ-D3)EMz3)VD6w!A-IsNs zk3hMD(5aYu-I~caT*lV)E&ewX6@#8Q#ZwROQx6+JmV)iDN`qqeboagriJH~Etk}Fp zp4|#zi5U6_ZnJ|v4wS73m`%=rG-n*Tc_`JioLfDYJQO9HhPlRk(p(n6tQKJ(lite3 zR|C^e0~AkxM1n_PPo4*mlw|m-C9%NYyI5XW4N{q$*()byc1KD+gI8<8wnwuq!r4&1 z(O8o=ozeBtSa35b4Tc2Xi64-&5UbZG$vSx6llbUlPu(8(T+b|1N_0h1S~eZLkY|&^ zo9!^wUNS$?*|P@A)lPrRZA^EIO>`c;1>`P3eZE95D^-I8!f*u|H5QTc&_=}F^z8T- zyG5@ao|e&i@UbVQ4r*l?q+CqSK6_}~JaSC1u8gd5A~Rg61`PB^Bcl&Z6YE=IV93z* z@?Tf0>z#sB?O6>f2)^wih5zQgjiH;Ks!R6yjv2y!ch$iifNcG~vOtH1N1k5x!I18H z1A*Ld#PBI%ci1I`W`fzNJaE~)zk=CmXnnFfyf^c-xN*1C=2)8T5;!m6f^+6lZq7lZ;@4E+I*G2!Phn@(CXw|rInuT(c-Tnli_b(Wr+-n+loL4 zwm~9imb!6?5vg>I_70BRF{C~w}3!(5P?GxOV{a&bsF zzYL;%%KvnsD&h8m6Pt)cV%Qo@JkXa8T?+J**-nf zBq&9GxM`tf!3sS0O6^uI`x_pz5=Pz}rc`!+)+QIKNyfXRx^dg2CgRYra=EK3A9ao_ zM-Hra7QRLcKci-}sXD8*LSihMITd7M?-?@nt$72mI)XD0Q0T5;9#kIU@1&{`89BI! zEny735w=vzqnj_XI=ew^G8F{hbS6{Eg_GQeF>O~|$ERn&2DZT1LDXEFF61FOk!MF8>1+C94Dr2YFP8l22 z+Ut<;8lsC{>%E#}XdGuB88e$m1NToR8)eq18{p*6lakPilvhyro&`u{4*6!Aj_RKU zh%(kkc+&qBB1!@QO}=*R8w9;xet0S328ZQ#w95>%)xX%p%?1a#F2597=$=?)^EJl*MqR zhjT$^SM>&G-L(TwP$2O7J^@k_XeMoloLA=a0}zmV9&y%$u_gF(>df;jrq~}Cs-G)7 zUS&gCqc~_zHOEzZ_~Otj^}4<2S;p8QPM>)AJYyi?R0PueUJdg;?#&R#Y$gf z)>{IDCFY_G1^ak7&d^p#0V6i0G&3W6u1Wt4xR+m}UzPESM?EM^4Hx%tFBf$fh9pk* za}*78dgF*-5$9-Awv}ew@3ATMrkCOt1Z?S1#Mr$0ai3t4mbpz{>eU`bBevE zp2mmkV8d%?lUYDbCAP9^bne~Q&Pw2zA3-+pV~dbG;90Poy>qiVj`(*{Dtd5U(Z7ri zb6Q&=-3Goen)2YHC)fJc&W%Dm7Ms8=R%e!$0&6iF zTS}pR{X!ZhboY3Do9=gu82(6RX^veO+_L<6)uL8_yF-4|mqecEW6{Lf_Y#A&tXi+O zL4G_bqyRIJXUBW7bzY)O68v0^cr^8p_mC;f$ikc}_l8A;bccneUuHmzY|9-bu!_zS zE9biiqXc|n?VdReut?ebi$N3Od6DtTSA7BX!j@%Izx_hppUtkcbZooCI^+0U(8Psh zRoYt+#y7fEIyZfOl|#+7YV{?uF^4X<6s(gwdvQrEo;w&pGIf|I$@yvTAXCG&<(sfX zMMd|k$^-W7CwJpzD)oZ4(x1k{mSL+H1I@Z^_>H*2SX6?U1^9ta;+6>dUGrz!`Pp`V zk&&Ml`ffLn?KwQdJV8x$ z!t{)fC|hK{-%~o*Ex&*~83fVv4s^m=RIM3EB8jp;?%+k@se2$h0G2row1^uUM{Ner^S>qVv&=EV4RQr825FSgt(g&T4(~FSpmGchsn^tO z{8|#Evlw3fNEJ^klp7R(SSTs$NiC41FcD>7K3ilw>Q5=%a4BAuqoz-z)KtjQlT5w& z7}mjNDMu}~T1J(%>~*!g*ep8u83@!Y-c?vIza^iisJuXmlnOC z#{9-w7QRilOLhlaf)NcJT?AC27io1kD&8fy&55VGBxts@DaGlgT(J`+{_r6*?%`K7 z&>_D0yE1Z2`yu1-px!)pTv7`#x{!}9+At|r%z6UKOt=wnVn^Q^;$AX8S;eT-{zuw!W9+&kqAz1OU{qY)o!=^scbX1KMJ18P?y~D0Y07|0-6{Fm!~+KTp{&V zGJWy5aWmvbikP!Fce-y$@QO|Q1V+|CrC*5{m)%-*xpb-&yYh;`_d?%Kzy>AL%m_iNkZ}YYAAQd`sAMQO%=RPeOAF z3pFFEb;hp^RkCyWLG%JZnT&psf4i~E2m4D?Q$9vuBmx(>ak(tgo4KcVMWFMN7GF5m

    dFo26Q9_E{3{`|_G8CPUnFSAXLg)^%q(uYz5qh9F?_f_yb zOgk+r#GLXeJZJh9x@%*DY>2E@L~4{(m^1x*HQ=Xs_n8ky5iN|DQY%9RrLHU0$kRXg^-cPp0#NQ zH<|K|DzTHGN>e{<)H|U`pZrvc7+SofYj#310k@Qq0nk`;{(tCVUg1LwLl#zr)cD^S zu8bq%^zsgfSz!cJ)q{1oL+4JMB%esmi0nA3(DMqIC&C`uEy&M{krbQdT^3j#Fz^(| z4DJzJ!+zEl_}SP`5SX6KbhPaPafER^)W8$FFWPtF;Q6QaCGt0jlQloj}vQ zC7`8JR_fyv$Jt6h+0$sA&&C$s>eHFA0-}yS&g&zy-qlbp&kn|aM6Jp{&GLDYS{>n4 z$NtN$(cg}8cUoSSOHCViM9fi5S z-@TZ8gZt97VIpxB*BR$;mn1)U^ZuaBT2b(8Y*a^_wiEfaw$zuY5!uI){Flc$7g!B! zE~7NY1hcXm6#PxtCqVqxjOAL@7lLr+Ot5&==QN{NA2augHo*D>w@+^JOPoF=?2F@dd{;KK-zk{p)^8*6@;v?ZJf)*?uFk;{Zih;~LUK|mf$6(SABg%FkfuvTuDNn| zn-NRYrxj9Ch|5O^X@7qR%sc2FU@t(}g};w!0*j#p`$J*dA=`vDKlEuB&QQfoO-NMO zADHlu5X^`E#2=}0F&2G#^;Ou++@dE&2J9S8fzog@SRQxMqJU_@{gtIEH@lJJuYlKv zK~7G=11{oQ)m%+V9>>d@p7ql=odhbJsUbN~7e0@3 zp`wHIAE}#9YBS9*g%&Rh-_?Iiab(>RI7U)ZN}@;;efV%tB<{d;AFZqsaWmE#i#GL~ ze%u5ZP&2-4i?mVa+EI?Pje7j~vxVxMapWn|kt9H3n>-o$Fa?Mo`N=z*U-;5QEFbh- zl2UM|JdUZt(Eh9|f52S|qgX2kLX4h-ia7mR+XYkwNg%m+Nt#nO8XQ*zW`wZV$>b)8p(0q5C2jI*S8F+FvLCGD1HVw{|s`$fqnyJegOFXXvmaF&}S|9U?iR*@g^kQ4`O+Lr-R!H&PV=? z*(K#pWHAS0g0}(XQG9TQU4`wHXT-%8wWzrj9oeRjRDsf;;>ZTiKv`b=8++Fq>6K`y z#Q4}vMm4%LSso=T_wR^doJ#~`WN=-@PbE9dmXfhfRdcT=V2)G&!p{=Se<;8(d*oKi z@;J9Y5M%2F*=5fHks9hY|HIW8!6n+G*rya3qCh)=17q~>m^{_U8h(9fy*5$9fZiPC zOA>_Z72krwSbsD==Q3wS9cnaJh!(6NpS-R#-NdX?8JRNkIhWReE|C!9-4qt(G^UX| zm)d;FPVGnT)%#HK{IG9NuK(7EXiM2^Up`T0NTzCwJSH zn@s^ODJJ=U z*n6ia%er=3Gc)YSux;CRhHcxnGi=+oZQHhO+ZFl!|K3%#+o@{R>fq{Jtc!Iq$82q^ zG3J`1zt7t{i@w=Y4|2I=dc=xoDc))EF`B{pj9+@XquHdv%5$0tf{0^ChvBNyl~m8iV|@5_>O^kPr$x(wFp?b z4aB~T=T?4I$}H6TurN3$<$vkaS(3>s9WU~f4J>UrED3@+B0bS0aWt3u;q`V?afsg z^#Osr_9?u4w)*K$|ti7%jH9_F!k{+BNU4-ToDg$Yt#K%9U5%3n_pNjJ0Mlv+2c^a}HA;htYn{zuu2BUL z)`EEDkGdj5oAFR516(=*q*0_xO|H=(PVu94$&gOCJ55S2;7jc`4A7`)QMI;+SK2vqnNZ2QUnz~q7>k85Sm~PIVqqXn(X&a&;0L~cn-9Iz%CmT7!ypiG zSnZf&Mx)8n#i=PXrR*Y?1AQQyBhcYGhcv7T`xQs))}=1GgaHK{hP+>K=`zA}M|RRp zU-BxK8FQmiT{s18+Ta!bsrFl#lg=flqx)hlhn2&49c9vu-Q+2J0O(gsEQ;ziFZ@GX zNBZN5J!20Cmu|Di94C`j@~{UX0D#YPLXBETXDe3VWX_xs7M@~PYpU~V$oeV5?jjODk3T@kZ|Ut{;1=(lo)?N zhV5k+@z4(@?UX6Eq|2@4H<^E2B47JL28UlMLRLX8f6O5TN8(wZWJtz1NHIWlI>O^g zRRH1XenZ>5TtWpXCYggCG_c2`oG7PSaR{gk!vkI1F`pX6=lB;WOLvG_(kYk;CXr&L zliJUz!;R-;p_nUBa5`?>dyuzZ7UeR~-h}fkPmfI1yFf(r0Yy|c!p;E6m(v~MSE&mc z)7Rlgb+H?b$G)hCVmcW9>U<<88e5dA9M>r5iJb;#o;+R3-O(l|XUJAv4nmuXaR{Ab zb_o0EeeE~|=5nqf~kfXrm$a@6s+$guCU!Ty?@8k49h}gl2j8{&NG~h)a-vQ;c zCG9b80`FlJH78S?c@1<|0@Q!8;L5B;vN=X&zFWjn;wqC7R^jM$6i=SuicKCRm8hpk zaxRLUhg%mvXL*m)9_qJ40>x4v%MArE(P?Y3>)GB~53|Pe|8ZVw)#^J@h0KMMr?K+RXQHj+N@U_H8r-h;BV-5J)z_Dh!iVY<{sNt#;Ac(Tyqi~H%u{JI0 z8BCen!TAXZ3+oStU%U1VkAztN0%eit10^hqalL^r;MYGASjTTySZ19_<6 zJqgZ|_UvJ_8&op3cOR^-VD=eBO``P!rRj=&{R$q1%4>O7c6pg2-8*vPkS zETe*?0-Wm8lU@MlKH_~lT`0R*P}bi0{%{3p5F5?b)VE~@HWLOu)8SU7A2Eab7fPW_ zfcLnn9Pl)zF)&*R>#V9CZvFW1IVJpVbr1B~m8NO8g2lDE*s22Eo zc1MA3?G4mWQsv0h#^~mC&*sNENGR@_0vhG_7t+9f{}z8@mTm;LgLvNYI$(u%6O^^k zx+G|Hq<$VHL#lLAiv3dpPge%YP2TR;1)rh2zuDJ7$gm^Q4Z2Wg^`&P`y4njaKCZSf zU@cF@o?!Dl7w9)5RGcCV9{M7>a16Jwj}1v z05A`OL~jeO$2Igcjz2saB^)XPz-JsNU6AEZA`GjG2oAci!r+}vw|51GUhlD|O6T;Z zymphdOIW=laP^6vJ13;vujj+NR~Y1Wn-_yxJY36MiALnO`+u3zfd_U^uV_id7$G?O z%CdOv6P8aBb0O-ybqIsV=oKIA@#OYKI;zku8t^Li&p$orbX2T%9U64Ys8H!sn-0R$_+eXIuhLO1;?Ye zk)Rn+{>Nz^U~ZeV>K{H!Ks#95drrFG2NCT!J>w4mIRv5~r1bxR%%l_0+B@Qz#qx;Y zH_e}MZtRDlMHRK4+N^Yaz!i-wn|O=^W$*{RT|4q)m!>Z2TdAZf##ISX6As8a^iJG- zQe%34IJ0h!HIhmSt&gwOxY(MRM5IHC7E_Ao z7NjNv`b&;^p~G;1XtMwF6rq)cdw$-F8u3cU5fv3hNDvfVV)?^sY)qo3uYdFTcI6FN zkmVkO;DC0qPC;z{7u=zS1~QsV59#mk7tgD$P5c*MB#_bm3a^CK=IKzP=_ zThIS<_L~|1&Z>CbhG7K}016v`p9xnjj`~ueu9yKKe$fn#9YibxbM~$q zYbTHp#N2$HC=oq4%ERXXjy_@s*9=f+84jcEZB#nbjV_aSt&<&|BDBiZv!-iyKA{EC6g4-s1)JpVCu~9V2nG%qr$EI3dn~^`atYyYI!y=2EiEdWlm|yhNQiXR{ny0C zZpIA&0)80B7Z|C&LsiFPD*rrLmdEV~s(tjxKO|lEGMtI2>5jgPynGy095&Gu^k;a4 z>>u$P7tYW%Lx?eQ#aD|G{tf1X7S~KyXv%{>atRl^t}ORKVkhS>ydp?bTM0|Q@cFvr z&go8bXZ5A&vlWccY$4KmD=N5n&N`4F&+6`Mk1*4w3z`t!Sts>-^NZAM;iOyaN)zyu z6ujeNM_j{oB|Q#}ICFL{Co5$4s!c8m+?vNOzT4KwBRoA`0K1M^4C?s6f zhCq742rB!Xz0mvmQFUWVURQXwd$OoZmxcQ15$d*YE7bnC zH#jxe>!$4ic!Einw{w+n=g-IY-k!lsr~MA__83?ZaJ0auq!SZANFntkJYPNL%dH5A z*4En5ZG8`?bJkam(vZ79tD54r&1olj)1hCD4-UX~xMXDtBlHqZ)T>G0+Nw!C?s|&G>tb-ayL?Ren7+@RKPwdQiOj1Q=gE)Zk?l_lp6!w z+?vIwZfKbf;3P*cCPciWrYfa+W9MBu#QN$DcPpB< zh89F2qtS7op*$m}xZg0BV7vn^50`9$ClXv>7lI;nj^CW|nfQ<@pWD=J&%c=R&h&9W z@+y0wS%~&PP49l;RLt%cMMXDp{E3gJusPo?69UDiPjCVoe+tmZahzD(WFM3`EX70d-59o0 z%>CP)_O30@AL$7 zrp_?GU-|XCyn}i5_WNxa4lY7foOxGyWeT?=8#>GD1!?z)GB?H!wq0b)&-+dT$$oYI zVG**w`qE;5;_b)U6JmDGp6PgM0SxU1G<5p>xqGKHsoKVN)>p75u)ECjw7iOud+9A; zA1N9RCU`? zCKE28VW?6Vu|?n#j5Ab+wLIEUPvjhM!~VEPFw4@tPxG#ql(N8v=fran6p$d>Uprlg z*a9JW&18cup7MYmWF`Ov2r zfLafV?k@KFXc2WUqqy!EP->8b#F1`p1S^%c1Lk$YRq2sZ~buDoFV(foNnoHaW}kK`x*RSCLGerj#|8(`W}Yu&cK`4eY?$f z;Z$`}w{{R#^19dbz*A{eD<0S|y{qC2Eko={$@m(*S}hqttfK!uL&5!GJ^PA{ zt2xb7_l-lK0~AlBBCLS0B%{Wc({z+hH`BC@{09*W-Cr6mIUVI!*k4#)U#hek%AVj; zyx=-sV-G?^IITIrD=gjSH`N4v1qGR2n)Bs%^8-E!IOm!-8wna1qbL(r)y2--s0ICw z_%Q3>D1zYf^i0-QCEFLZ^j^{(NyU+;?uG=FPhHJ|Y5oYL2xo*jxnsfU zM=oTGQ4O#B{0Rm1EF=FrYi1w&m8a^iLI4Dr6LmLp<3O=#4!IPsUOBEfWWDl!qPPj8 za$ow7)s>XNc?l6|t&Aiq4Agy2PWaAr;Ddebj96LjjD40}U_{@msZfyX&E7Ld!(Z#S zkw9UQ`*2pZ9O-GL>jg^pDC8Zy#vxGE<{=?l#Yb73)US}MX1O-gGzo{%Jdd}PK_c+x z_j1RysNbi>5^!u*FpIX29R6`WbqI7fLg$?CeVz1PI2P)#-oHVtor$^rsI`uemj_0< zc0pwqMI6(CkZ6e<=W~h>|f}Ij5*1xTVjR;#youEueMtHF#mtpLfUnUAaM8 z>K=NP6Tzxg)g))uy|6}qVQ@#=&3vfnO?A3|aLl0$5>)J${-1z|amhvgk3_`U(t1_? ze-L69)spFp0VstZ2TBbxDIe_87iz75dO?}R3|D}WgCwAibSTvCgG}-1C)GSq zG}Zp}WJGZ=X;*IR0Hd;4wo68#8Sl~FSICoSq$={x22Jq5mPNQ@3ptEQEY-l4={|}v zF<^Q%ft>QzX?)qRyav*GCi&#&4Q#dkPGAkEFZqR9_Cvxf_x~!;o8_VkBSAhGDmLUl zj%p0VrIZ*L8kX?fy4OM^P&ejc=@euXz*F4oB}}xs8JYM6BW?Lqq7#u&$d8aAl{K2J zctW3&mvG^Xneh+^sK3+)m9->)39T3`*JLRrzL4uDTJ6`Ai7f<@5mm&%SOWW3JhK%@ zn{0Q>7uQ2tV$IH!l$0x&p|dx~h*BUAj^-BUWj9hUO(>_{qWgRKy7X4&VrMH_+BfGi z?uR&(zDE8f(Wa!@8uoFW1ujYpeb-7W45BXPR#UYA;@8a@y$X`}le9h7HLUkOvd=cr0JSBzA z4Mz_0LLOqnB49JmG#RV{x=BvIaZaK=X24XA&M7piK~KL8vNELi80K+Ip_DO#Ua7Q7 zN)vjL(ol^}AhkxdsgR9iP~oCeU?I{jX68Fh`4&ol)Mspb58caWH8A4m0P$Z`$5Rbk z=J%hndvi6h|Mcc#$&Wh7abaF>T^>3`mn@@JW;mogJ@U4aw9v&u}d#^bT zw*1W~It7wUl`^f%mUMOnyGmb&HY?M?w^u~ANZr|mR*^EuFLmdc%8V1F7#hT?+Y6+| zT;STXu58Ok)YjE3CzMSV;a9ZV0TH(mhk7niPWa6Jqcr)91oJXlWk{DzYRToH>W!ra zzSTaVwj(tN|9JBBN!PXvoUjO`iu6;jC@y{`a(R6rI<~A3dD+Bw@VduK#emqZfKC^A zeNRaQWgjqJ*uEqXdLCNh>2Hudt^Dkn^ZUK_$jAwfkA@~35-RYjk8QCf=X;vykv|^6 z`_NU(^kA{hM1k(tfKM&t_*?saVoO;$t3y2+QJkWk`v3H(>zVGQwPW@;u` zGX1a^N#B6+0?+l2hqhQ1E1b^|xs55VC^t-K&?#ypQ4?g_bWCl(*+p*l2n?-*gKwl! zNXZ}an|Dx)ds5ow5uL+j8fX_pZf*{cJ{OpL6g)Rwi^W&^IY{~7*_va~X&!|fB zt0mXQ_DWUDR-MsLsLKuzG6}wdtd0#~-JQV_y{&Kbdl~a zV@3uf=qK`ZBLl%kWB8iDa-gL1oq=vLZhOfGMfu1ho;kZUKSJ`o^P&qE+YhG+VL9T} zpi#Q9`$Lw!#1{?yoZKD4&x@Gf?LAO(@>te8j^Tsv3FKexcSQ`pubFJ|mkJ7w`?WZ{ zL2n^=1{CFscJFwwx*vVjT2GBGk=h=INWD$M$Pa%#@(^Y8u?3tTYEw z0M_Q>x)&QaQdU%+uF=aiDp!k5x%R`2zlg`bx5qUHcK%Hy<5}d#4Z*pF3D#A5gmlhX zZZ6XPRvqHjVnlZ^+JJzX2uR&P><8Y)3Fo)r;|g&vM}5s3Gp>@w!xj*>s59*?!0=HfR{^j}Y#4BL(6HTs-V?c2hd3 zX#iDr(#gWM{CYkP0Dd6E_ly7~^C-U7dGJG9W+wp$wnX~7&# z1+It&NXamg@APq_T7vGPPg2X^4uHAt+`*t}Bte3aKVB|1cyr1rtpB?a|9J`_dPm`kSOkuIs(xQ;rH=XlA ze!R6?2)t!|IN-m-cM$ElNB#KNM``6*I(+;5z#%zqmbe1Ioj37(NQ(=oDNfZqt;1K*n+$A7-Vq;!@ z&^SUnzZ$wFvmI@JP8*_|g4)*dAzeyf*=mHrja`OGxdJ)522}@3p}PH%vY~4;KdC2M zG`s92#$2-iBUeq|oJ2qke>U%*>1|rQ{nN2tC2E2=>Ur`78FjmmbpFw^O)~iQH3vuT z*gVyCQ%=k~LS4}(Phl-&;lPNG|1a}D`Vm=H*m_T2Zb(Jr>&KtSA@z8}Yy&|e+)>^F zQ0zH>elbCI-7Dqd-uy(zUfK&$47+|e%b7*?1J;P!Llw)C1y?hjjSUl%AnIUdKY4EP zc>g-@fK~Z^kn((veBveu!rz!zOKQYHIQ9(yCnzAy^@}5{>8y&Ej}!(ZTh;oCdRy{0 zK8OxGvyh$NG3?u_|J;C&5O<=G(VPx^E5mD?#}8_ls=)p`~a0bv1!lW8-S>)9XRTo z_6i%3Jpn3X1nhkvL3cB3F*R>kBGb?H?(jzYKHb3JibBXd>U4ZmtW)1l)8rCWAum?H zy7%57|Ei46i(dtgp>8)CA=j@w9WDtoodSl z$LzPc4#^ko?Vm%UX7nFltagptj$O+zxwSHmO2tIbtS4u#1gmjR+B`#5nCeaX6So5% z0KlmIb8pV2bcf&3c9QRc&D>>Yabm zZh>N(Mz>x@EeI=q3vDuB3F3oF(b2@()}sL`C^f_GlDCIB_9247>L8tws8H!0xISsNonjzdbk z2*?mxn%?0aG4j@mMKs(Y>XWV|KmaDxQ>!nrj7n^Bd=-Ay_lgUT-cE#jvR(4Z$kTfs0H-jAXEt;Fp$N=0 z(V~8L&P%02aTWOj6S}o7}(plYT*JyY9AC&TtzrTo5C6Gjvn`nS^bI% zy++X$JX5F7iJ&tU$LQ9DmVu%uz(Zx|C+U8Q+Vp?K55-PgoldpUUxp+X3)>`c*5bQu ziVN&x^+G5$*55l7QOeA8qSt%V|C34K|;&p%iTnW}VolZLoK5ZI{j9Vb-EMOCt24 z&{u(d#V;kyXM>qi`8-GS`&-6nK4+DC1mu=EpO_s)rl1{b+>d!Tn{9%Ecdq27DP-zh zX#(07lX<6YT3(to*l`G?i#=&0bR=hM#O3-cF;WG8I3VWIQ&%uU`*fAT2)<~vEht!} z4hc3II6IdQLmxvFuhAbrDmx@LA}!o3lUQ=+59xy3nZFv7e7DB9L5GJ8 z=hAY3e6hyNfOEKmxz#>Cpo?MkU<~UpjX;AVbJ;E5#5eG;EmXy1MPp(OkNf-u7hc-}AF968i>Hx7rDhfyq& z1+L4Q=4LLabpj#DthEJt?XQj?%}4%3~_&a5kq9t`3k4 zUmH{FCyKY=Fv2WYg&9Q>C^M*vcuTS!*u2i~6ogL!PRWhkp;-RLnDrSwt9GfB%ZnNNd-U2IOauN<>o&}UCqL<1(!ufV}yhQbD%UwA3J9g zQ*{kF@f00aFi{1Mw%luGg`p~|4jh%QN9aK*BHA2C(O0Egy!GlXT()E$FnX7Dkpi~g zq=F>?VA!ak3=c>dc03kEys?`CiNh?YoV#9S-c>pkzekCK@N<5ad4Y&}h1tPUg}S|; z-NShnAWA>c%}q~jI)Zbq+O=z-xU|v6lULJ{^*-cdAr15T5So>hskn!9ex}_0-6dDy zS`2ziauA{B(nGzAeF-eR3tYf_3{0aCgwlUUf{oiAGzUZOW1XKgCjKW}gSsUpmHV6! zf?}HVfdxq@w2wBVN89BkOfP13-ZC=>^}<@tq-NnOr<;3y)STjZEPweVPyX!}GR~mD zy^rZ0AaOxmji$P=geT_IRH6WmAvxCayr|ElT>}oHgTs4Jd?@A(o)m{l4)D^aq@C)s0j;hLKrWlAh$*DXcTxK zdl&yqC(nXE-e(ZIxg?D=6w4#snx2>mFoyYo`O-BFN&wmn=lNW6!IcHvc=0k@|5c54 zy#O8{-lX9t&5CS6fyRa-?=arEtC)P3ma-39q(do3&hwo<9C3URf16pdb0EaH--dgA z2t3}AkFAa=^!NPq)cg*ABARA;%F)o4`XwzGRto(gPxvC@Q40LR_I z`nuS)0>ofZq0lsCd}8qd!5;$C5Zpe$rQf;d3B_EI&Fl2|NsQsG$7G8|Ji6+ekonqnikPL#JYtT;M!M zjj#Gszrb(Rtg6Nx{X%*4?mk2!&^nPh#N?c#{Qii~(}ve}|KeN#*91NQ$<}|>GE>u9 zV7}|f^om!nmj52(^Cne*f&WUZd;-m24S=ew_PpA`sa)WGJDOi?l#aeW2}WJ zY8kn=r*MbR2Pyu720-AJ%3u$Oz+lbYBfY_+JglT^%^IPVBTplm8J*ix1{!rR} zgH*~UFT?DJD%{~QKfQ)A6{cy&Lqdy$RzXirYJx4wa5V8gA{b={5p}li@1Q^cu$byN zHgN%usKl1*59Ko(G+J-D)@)y83*}fU)7oI$f-`YdMRQ3tXm>3Id8mq^zhi=o5hJg~ zm`x1LHP2_aIpkTJev8EDOb%%FIUnna?F@ZhvM~3w9L^LuaER}bZ-bOIpuZD468YWk zH*k}a_u`D&Zh&915w7x_T<>+=zuE zyD`vDqPth<9`nn%k>ZjyF=T!NyB;c|XOxvj%7A)$oMt{3dH!i{nq5<~aZiPLpbDU( zTt_;UcCj%rDe2lXLkdG*>2b_9N5V&a>{ z-yrN-PbJ_Lc43Nu&V4w5@%Wu7oK#RTbSq>e;kaour6?i-`t4URk`)MPYHG9vH7NHQ z7<6-kT2~Ub^VpQ?Q;-tza-xkUk2n^Y;J_PehBw zV0&d@JO3(G4+Qh#nDHU*E>;QpRhU%1Cm_Uh6=rIDk>_Aq8W1HrJ8~GZqy{totw|{F zVM@zg^+7ZA!=-(zH?Dm8So;mMyk?w#bXP(ditHMMP~^VrwZ2g#dka0r8AIgiT!JZ5 zxUD8gRcq?1w~X%Kw9*R#=NBJ5_U0aKay?XT^k7@xk^MCHEuzGyLd7-$1Vfl^8jJPd zKp4tRnfAx1UT6w}CH=#tw16DE$StazQem@{V5Y2cz*SRtaoBUrAgFiy&8Qxs$I6oM zOVr%BtY3&-=R6;LSZ^daJ5^aT&=H}45dy&5<3*pf_nZ3kZju`1t^14?S58;ymGDvQK0JYPPZS1FP%a6DWX*6rqcyw;%2)30mxId z7^1*-P4U15N49mNP-VFttD_a!x}0Zb8%ibt^Ycw#%i?aVieT&n6TG zzRcPQ#0~ab5i!~qt=R#}MMZV&CP*noV`MDlvEj;I44K4ea1_|W4Nmev4}%+8Rr77F zq63ng4ipBFSh3zYsTG5ma$@I*Q&qP4Rjo-$A&NK*-g-zH#YMG>t`x2hOOSVAI-Tc2 z%4bz-nibH|a12o|Vn9SR3}p<@ zRvMD=Xd0C%!)`Vh=^+KdE3G-wh z0@wq4m&BH6mX4YAKf)oOvIKMdm}I#_Gz_u-Aq;i&z<|thT{KqRD=WIcCPx{4Ta55K zc;*1g51=S#a<5485-q0R8eEJ?D)i3S)Qx0?q5QffmIPDy?Xh+9j1=IY15!Wn<@g#K zb`R3Zw3Oz_koKdpJDq=@lq>QiU%WKj*Ybzn?)JrivzH{axowQf@W_`b^2pWsSD-N= zTIoyP*o~f!Kp6?5of0d0N-D~FlsB3naMv3b?Vdi75(s37i6+VvERCyD&d!eUl0P@> zW9SZ7)GGwTeqt(qI2MbhEmy~|(@Rp*cD}BCxV|p53$itcBJ!zc;G!z7cWj6`H@%)+ zI8#(YXl}IlzTna=gB(tWWv&Ul+&7ZGI6)WPI%$#d9XK~p2%#rh$PHBmV54h2Ig)`;?ch!pC|Z)2j^R~f=oyvTPPio zO6*<)tFHVfPOIQg1fN0a@>v#cC%}pZvuM;|Q~z|67PF+Nj=u+NfEQd*z^}NM6%xOM zbio$`o5YsCOjlA6?>1p*uZOJZRjUQRz!RD9l4}D(dUPYdxx&qK)4q&6T1Fp*}cAz%zGeAIwiZ@Yzs*5M;t3<5VmI;Y+W^|@0Ef3cF+&TYx zKCJdm_VFHWHDp`*IojpPm0DL(3R$UQH44kTz5-ZZ?z;Os=5n*N<9WI4ayZ8j>kv_h z!*Ch~YxhJ4@R7C+ogHEr!@)@mvmI+Hrj-rLwZX+KDwIf1c^A$YK@GWFXkiNDra!W) z|H`W*;ftsEJzWRlQMq}#N1{;Wl{V<<;cNF0l^llIGPd}&tec`hru zwhi%(MR0Fm$h9K-aek`Ws)XFh+=(;rm8UcCAt=M;-2g@DQYEryD~peypi1ST;0FwM zVTOWU%!eRCW~Ex#;GX*Ru~MCCRH{z5oEFQbOJGu;*0jF}GeZktV%3`8W#|{*E;HWG zMH9MCz^LSAqGvh$fK2T_MB*p})Pq4Y0Vl(wY5^Za9MRb&p+&5AvU)tcZ5$rqa7HXU zTX#R7gBzbP3F1+R%KK^_-PV^N*PSdG!}75=s9YGIj#$Sf%Irq3(~6uVNBeoB(@KuM zy+HFyN=yrDc_^McpJyP>R~;d_x$ml6$`8I$kqjY}gQq_Fo@#Eo*k$eRvu={^3QbkN zy720uE&3-tDW-3&qjHEebPb;U;asj`yt~i5T>!5`RN<}WeMjC_Wt?QHMG5*T~d|WR`Y91`_|pzzC9v}t9!KJ zEc#lxX{XBiyr%~I(Ntda45AlT6cqRRFwI);o2CDm!*JvTK_WY7b0Tflvr;x77!}@T z#Z0ub6-P-p{S~CoWyQQq{ADX1sBOkW-#~(Fe%eJ_{Rb83`Unn zF(C8_Ho8(WJJ`h8Vif|@Dqob48C;n82s-E_Jt!e0Pz*UnHPF`l+PYqHuy$MzQ|pVC zIYLRV(snVjv8+CJsqILCZRNT_9mvs3>h~A2E8PsSEafac9GGLDPiRl7W>EEE8=%Rj zB#{`0`?ZoZMrhQTPT3rRZSw;+bunvnj6a~@RAxfZ1+2wDp*s@~mmdGkyRBoJbZB4Z znU)?u9i4lh*UOl8Zc&l(BeG1Qg&MxipkAD#!D?}Mj;OPSnD)Gdbxw|Nw?rP<_Ddz z%fFTOlymQ!Yg3Sf5Z0S7lT4j>D<15-3=V30BYt*7TgL&G@Yl#c3M9;-u2L)Z zDxWhLPwV7L75*g@F4{*n^{bgj(yo~8e$|#zy$uBtHKO>XIWq$T@J|2CB@pdb?=cax zPr{QU(0D`j{HK0N-me4If&Cv%5L;~ZNJFs!mykj=$;$T{)IY&u3zvni#s3JYc@5%g zKrvzt#6DQ43G!g7!mt7wRVZD$VqdWEM36*sNA5)&&8R4jdvln+mR3F?GWVaI3@rFG zYz@8APNoZ`uef=A+lvl4QgWz|QH>FO&%Q?^T=GPuABy%ZIoosZ#7O!&+${VtI0MP--MZ|;y1%IpY6EUza15lDBGw^(3V$*z z&8JB+?zTMi(H5ZO6KbSxx~J38waVe+0Faif$p5Y z*S3&hB{P~n4d0$V%v>bc(u@_)d392=&E+~<-xU?q+g#2{Ov-w2#J|O- zp*%195LSW2v|JH}$?zsdDT*sRu#sT&L+POIr^{Ds-)`np>``x;XEqhWJXXt+jB;%5 zSDd(NE5NJ_5gqrhgYJW>ccU3}8YTN}9D9Q&6xmet^y0nL)*5&<@FCu>qTpK>_wP51>c(n7B|Go? zc!xt=a(E$dT{Ejz&l0!|N~6iOnc7mm#y#?B7^dQi-~*f-ca_vOhsJ)Nw89Qr!bt(Q z#2>>?=Lz2x5AV@W8$^ZwapkH9*f7<3+|^tqFX8pH|9{tw(b>o2s4&ZmGgr z2`Tw4Il6a?A&^Q(W{x>LwN)7#b#n&yGX&BWNs`5q^j-Pd6_tL+EnjzfJh1&=DUko! ziu13wDtI3<0Ua?CS>CLB_s=}=b9c0epcr(Vh;dKQ5s*t<`nbH9R-q3ar+7x)uhg3b z89i%>!YNaHdmW@$2H4yS0lTZ7Lj37jgIkjzEM0&>Ecp<{Sa(!L-&;)y#-V zGe>~E=S`^osWiX)bB+B&BLde+PD=F#V8wh1zP5KAZQ{A`kb%PwL&_fmYq7tH#bi}g zL&sf1SbKM4geM*oWKGyqh3rXTflW!#Z1)B0y9y#b6l`=E0);2fd=hMf#g_fqZ@mHO zL)FCXQYBRi5qN?yNTkM+`MV+o>{2tVGE~}d(LlepU~N&xA{m7*r>F%dOi3b=MjC>USbBTmflEz}R4@%jD1Yu3tIpT~*y&{~s*0 z;|XY2u%fg)ZiE7oWjEcgI&|POwF+8ENlYks(+EULvM)R%M2uZwisX^&NuHzSVh9>B z1MTW>%3Hmg^VaKwo_-qxOJ^w-T5XJ(MztoK0(hQj50Y5Gtg1CHB{$?YuON@OVY?C; ziot9pCN>5jVg#sXBeg>5acojrwr#@|bP7^Y90lFQ=P4{H7OP*Z)>AQvtS($w)Xm#D zDIGt%td6;=>ed14j+w5GF6_8HBMMs4?$_mBN8;pm8HES-2B{nY9Z%@Q%SKimQ*K@5~5>@^w??>+R z*C)+{Qb$U14iyWFPQn9K7V?QKAdn`Tvq4bDtSjETwai9xVH121x+7N0NFW8=aO69f zA}diMhfsjJGiQ3acc3t2d;FncS?}polnOioHW5cg+W;KPQqh6x3cGL0yfH=2^T47560?)u-^Oq^4nnY7v>-vEZ9beQS{=bBTmI~W zw4C49Fd=oh#Qe;YvF-DZa4pHCv4hSj4t^*<9I6kgY)ySH>JL^!`>r97Sv7*_jl+Yy5}Iym4mF(FYWiL)nWgdmZ+CjRXth5UJ< z@>sha7xLFCUmEQ1Hv|B;Ktlq|vCivWNQQ9xgbj)}O=Khi9Pj%!D3e@0y%AIcI}t=S zc1=Y38d$6^+aDssC_*@=*T;ZmaN=zs9SV<6rCW$Yy(Lw>_9djdXm9 zuQufqc&~8O`?SI%FKR_ZiX8WdF(Hs%qBll^hjQB9@NYF= z;5%g87>^bkaNH~|SZbCD4?%pAEb;$#0)p`mABwUN`iP(5qRZj-nOcnP_v+~wI+?6T z{|9^T{3Xd2czfPLm(gWcb=kIU+qP|W*|u%lwyVpwZDX?U{m!hn-kbRkCV$IZnI|(N zA~WKg{n>jP+)n8ZZ@QMK&u*m-|467*7y1PjJu;o=D+;3HafscSA_i=Lw!hTmYG~YV z#3VyyLRs)HwD^BS{P#u*?=W(P^;=T8kS|$p`8hK-zY*=HMD*?@IhTnSkRxM9gAi#!bN=NjbQ@_`11A^esNxyb;;#0CzZ!0wgf#Nvb;oLD1>tIp+-Y)};im&CZAXPn zLUmoP_^~U@mky>Q#=RGKNsjZG<$k(nHFOgKfCxW@yeD2N?RzB`A0)Dm676e(Z_IDC zpXu%hvUrRFBOMBP-MCB5^Ma!SEDp~&g+AqmYfYzRgZ@dpS#Ny-%-l<3Z|!!mZ+8y? zaqlwcLdKIbaKIOv4TwcWBX)a(kPDeML%j>#KHe_5nUKWFB<`Irdl67cvhLSi&m`jf zB>&StEt=camG#U|lS26$pu`!0`wqX6^%k>X!wuOm1|B9-3Idf8Y_^AAWSYoBOS0$B zUdd#-M35>-Sk8#HTe5p=%1Z`<;ucNjr3fFA(v5{GWEG5%+e@|M$%V`PhUupC~aK+c(lMqt!mi7vuRlW z1b8sy1R`btr@n$YheXiPmLaYVN(VO{Vd*P-{@ccs*H`<*9|uDU(XGhflnGr{{njQM zktI93q|_mby?2T6t>0hoNdRpI(=7cR3_pV3G@g_8Q9nNh;zE$C8nH*}SIrXb2v~Tw$I52N8m_~|16AeRJsaofeR?aXhuy7%ywBdE=YBc2Ke_JL zac|}v9v%Wx|HE72n}p(jsLt4z^#0Srgx{I&9Z&80AF@ zNub2r0lyYRzg7QUbWEu0K0gr^T<~)3E_CJTyyj+9(`oV(SLK0eT%7GNQCaZn+IsL^ z`1NVufQiNEtTKD9IdI$TlKZMuG$fuWp=_ZTpWXsenoKnM)#K`=f=rig%)-ZiY&o;p zt4Gq#D)^|1(|vw*h3O%svb=2lqjNK0FEG60#RPJ!FL>GwtEaQ7DT|yRKHZ!N)tZ)> z#SSj(iX)CY>*mHS^GI)vqH(+3lQ7Na<4ii8#SStwAMKSM)pu*n_J@Qi!wvZbOv9Y` zLBoo3`hfDKHkmZG)|vN??A1^EFtl);$ie?68oT zTL`#WE!vYHx$h{CS#@2JIZ5y_ZELifL)(M@`lg<&{A!S9AX+6wMmlu7$2E+}K*Wq2TExfjpl}ek+TK8ylQ3-R@-;uZU}ALPh_C z!1H-QsK)9D?QTZ1pBu~X4sK5?BNXVXFI#@OzCDFxVy5l=Ht5~x70`oW~O=b+P@(UG%eND zsdUeJMPhNN#e5iq$2}u$Ou6#o4PR#~$!j~k^xB|K z>G+4R(yBIuO7nlsNZJ_<>0Efl(0x{U1g21Btp^$}6;R(e>Rp zC$0CfsVj0CzNYWY*?i%tbKz+kctU}$=NZOgM!~YxnVW@kg^{E5N|y6{7MBFROFI4V zX1c?+Kcj~|>`jIh63`itQ#P)z3y`I0UE?7IVQw=ZMVrUTAbKv4^tGRnM$hfO+!xv)X4*dbHbuzN;kP@!qXvdVtsvaW$N!pCZL`byKP{D5m#Ku#&S0$Q9y&7j zK+m&^ZsDAEE-0izssk5I~rlL!(adKPPXd&N=Q@c-dKuw zowyntKOFuFOl$6r+>%`0k+E1FCbjUf;KY;uYjFj!u+S}L0o#ae$Bl+vSn~F^QZ&`so<~EP& z7gk;!`ZAX(`U`&sa9!0~;>Snkm@P9E*rzj#oLwZ}qM*B6=+wb12qha~U|K%ajF`~o z4~WA)Vy;G9gefRJlnz?DadLKb(aufZX;eK(8~D7%mPja`B0rhU-HGY&+7|q>MA$-J zn+_O1lpk+nj}aW3QIuAkJbS?{@-ItVyGyLkr;<@ktybHWm48b@3}}np3CW`5P%ak2 z5bECBLRZy7TX+nvfu@kO?&dDNm}Dy6C`!+B(}42N4jCAhcwL*AOpP4KJe5xv%ars! zuz#k^%Gv}XVJD>c3Pc;&+*g<~7Dzh;Esv@=J7y`W?AEk*Rk?8hFK&o0$+`W~u<-#E zb^j;k3%wAz;4Vd3srq1DIgKgpF_s;kRox-+)!dDvVv)r#R$ph^h=epUBv1+h6TXh-dl3|q& z0S!)mhvB_7$;cz;E}&xnVJ3~V(0mlH2h7wDWim(HKdh|+>1!v&(Zm~6b-Ls`k8wq< zLcWAjOQrD6mL}ZM^)6B3`0Ff1DoXx-p9s>)<7+?YKlNYQ{o;-32mzc{M+VH1=&;9R zS$6E?*+x9dI-4}1dCpgZ@mI)g;#hp&Pqt=1E2#_`LaY!jp$1x zP@N#{Db2#IROiw(w&bJ;t!&arO441TMAv6iUy3O7y4c5;mzOi%cL89W&4#e4lP0CS0cNtt+xCqPs(D(*H%&YerP*W_N8$9SS5;vN}6j@ z;LLn#PwVvlK17!*J(3nC=@9zpJ=W#Ya58p)l;)w)(@4S*7m@JYe+x|WKz@>2(3w&1 z-7b=DF8QeXo59S5%6Zg~Lc$dlS7DA*l+%A4*`o$i&3vM>E2^hPgmeSA1+&W8!7#uV zU7ifhwxKja&4#zErIh=E?}}~Q#GE}!dvJ&!f+6Oh*>*$EGSQrhXRIWcqWR!((Y(7i>P9e8P(!NHN+iklc&)th%a6YdXu_D=4^#vf5m35p8ftYf2cw$Nt4!5L$R z=FxURQf5;jxaFc&1e~;T`Uuv5o&=P<>S|zN+uE&lFup(uykHb?7Yz z$8r9I3N5&xHo^U^kv8ml0P6W%TI~M>-1y#_{$>9_|NU7_M1{eMD%YX${%X&@@$k zBWh1&2y_MDA4B&cPj_Au=^kyX;T;{>$Y-Z)-NMf0)^#$K$w8*YpC}8>irKh5QaC!k zKl=p$^Yj*=f6-sL?u6a>QO04hjI#&%>bSxF@8ypQswiFhi5@?KB`_?!5%pcQ3>#0( z?0tp=n4iq((ZG0u9r58V$7^KL|bTe64BQrFp=cLUe zo7g$B#YgGhUPyX!gvbU_wHGA(l?^fCqGX!CY*SdHjOW3*F|TPgEZ{i-6>6yd&>KWj z+QJ?cJx;!u7dRp7I6j>$L0G2v?=-F$*iTOyd9%zQ%vW7`s;r@%&)&$GoSmKhET$TV zomhfa^AB9Z9dc_psS2KQ6`VGfP~}VX!xH5yySju=n_83dJ+6hks`!vaYObNirKyz} zWscu21fFDSDG1c$_Bi=l;<(ZhAXC6(X; ziX3K{VIM+1DCqD2)>%!2Q=&fzvphmxM|KNNZs|&w$gl;oY~VM9W&^p9{6d>+<@MAx z(}_U>sjF?peiYY$U(!33AQtvxO*YnfwQ9u-!NvQJEwW1ImS*if~z`2+KFn%B)fq)sw$(56-;B3V?!0>`-Egiw?|d zRLd>TldkgQdzzJr(z49df#1(Oq${fV9FM5RC*Q;^KNSz^={BnFPE&?z>-iY++2NXp zbMhfV7Co>WEiVU)7lO`?Fb0BMsyPX9|1eRA7Yy!XX-Ddwtq`nXoAccEr6VZDcvxHM zJ8&-KkJrl6s8gkwYax2e2HuL?U>55x)u)s|68tO;n=9+eU<;qr7}#7qdyhbW7Q#Y& zwrCb`S>Rcwpst{b;A;55bR+6Fz#c3Am9nu$U(UV?mxwag33UN|-}np^rYSK;; zP8VDFmK^f$q$za_VsLM%!)$R?+yc3vJ&Kj`Pmmv@oeFck%?%?zWUHRb4$$JdhJC1m6Nj7?@WJo7j*57H}}j7Eb1>N5-o3lO1O^? z4tK#F-TqV{rj8NR?rVIG>;7_fr}ly=tQBq%*Rzsgqo|sbL5G2i?R?FvJxm~F>YDv+SE2poNqpucKt;r`+D4Oj zQjY2uT_k8r1Y;XF_t4CRvn!5}{1~_zn=1{^+khjR+y;cG4+4XYR;XkOPyfcyxpcb5 zaJ#+`PYWOM5M8!b*U0YDd#l_Ip9td0S~E8#{5sxBxb$;5j|GQ}M#3tsR4tXC_Y2N; z@fW~cJN@yz&ek}*v8{fdLy!`bXN=;gZ~*POa>4%6_jSGBfu+z2W|BB`i@&=CE~XP* zaF%l}*Gc#z;!S6GOJtnnJoku_!YY{wuMJoCp=LM&VTNBr-or4I^jqv}VEf4D_rq_$ zH#lpn*VIPYZLdp|?1Q{0*O!EP=ymP$(;f-f29B-5@;`^)Ny2>R$#HS^732kh9X&E@ zWEVKYUa8yEffuFrBoY%+>>r!y9S`c z!t1f`9=Rui%VwP=PBRzcIFYz(4gXDsH*S& z%7yE51_t<9rTY-CM0px`)0Rf_4cm)Mu z->sW&RhlsBNTv7{hw}|}KEkmT@hB8w@UWtIgKr>b6COU+q`3A}@X>6n zxm5j`Av_0DHGzHN0fJOVV~7^YKgG8`++zjTKAs$~#Wx|3udZ{CCPqdd>wF?Tn#2L# zT%SwkPoSZC&b{5MZt}_>3HW!dDffmo$1}V}jKuoU|1e3{2_!$plZT(0kttbR)COqR@mv?w=mY(3L~5-i$EdKVAd&T&{sfy?58~xMoH~dQ`1C4>Vd4*S)VgjrlGL?RurpQEv~pSim67DdYHLm#kL;HB*Y?SMS_EzA`gN*VghAcDrnd@1d2mI`EeWfAfao$p%-kZ|vN}FmUrZ)Fr_l?+UAl z0LSR$KOvUBL8dlFTX?_mjMG(J)w}?{CU@}JPZm4hUB#70F-WlYEk4Q3hS6>Bz~)Qt zaF?|$RyB~{-a6T$#~nIr2;#cMc(`r>#!(r`+9NZ?J*k#vC16L?!~B|}%LLL&@DV~> zPLJ=kJ8wh9MEjd+)Z1=M{eaqx$1)JZO8QKiPBe@{>j9FuaqXy;IFdj>ZHCTMR*!EM z59orj?vs(#&UdM0AvBNa(@+5k)-kU`wub?pj;%bY>k*jsa{dTZXSnpLUvYkdnj|NT zbsmmZnXf)QHB|*OJm}H8X*yH5lh(~wm}r}upS0S^ z-JsET%x0W|?1MXA_AH6q-FU{$O|d2d9+o6lT4dhczL^!{WPu$9+c$2oloss%g(DTB zx!|lm3Lfq&)K?V4$sn-rqyyJkrHK6=>KU%k*lHG5C^W!Z88dBA3L-Z5;?&Qsw zlIaV9FxQ{9olE|v-|TpznNe(SR-9kdW9gHa>J2KW_mqtts!4C>Xu5_(Lp4>8vT+mj z`ri^r)|OuB@qwLn!+4UI0nVI~quY}@A9rcKBiFKwt22GY9hJ|E_!tlh>UArN>|m< z|Ld~FJ}0<@!N_TiQmGxMcQ6UbdyXJzQ#Pg$!(vX2gh|r}_-$eKRqHAL)fxK$~;*})T?M(w=PHNq7#&O57&6^7(_ zc&tab+fljH9U+bmp3y%etu=}mQ1@-%tf;1WSG=*pR_8b5f|QR%0Z?KgGw#!XSh7DVq=Od zSY1~S{=s(hBEASj+<|r8kHo8Fjs0b@+GGK`@4KL|Aw!v7S)J|GJauFAvnZNaOhRHX zCd1q~1P5rP)bGf*;*hb#z}4!9`9B~5<%9z}$H<6`c&v5c(_E1hmg28l{$t-GHT*g|qfP>4%q-yR!_p8$2sqiHcD^5DwN zKPk%w*c?k|x)|5&pRE?txrcf(RRI|qXnPlzve{0|7EfFnOLNFy+g z34V9qQqee8aei<{lf)=97U*NMbpJtuUGzfpZ zvL!T3#+R-QP-#_3eK2MBPy*(SXOnNgtKjJL>M3GYgL-u*tM^9d{$dn4)}W(;Z}W|g zaNvOTmH0h+Qg#6K1HgWyhC3}&00|;4yJTzdHsMn`3orE}_LXXJQ3XY!aap)Ao71?T0=7CAeg-&|=H21-IXB?{6)QS-;i4lFgq|Mx?p9UyZt{;> zyt{>o^3_56G8sdp3j0RY=o$tOw*0~$y8^$#*EBK(@47)TohwA)#f|tkH>Vu#Gz=5?j*Y_*&ZnV+arGtMqJmb_i#Pr=?x>ZEl!@Sg{6$zP_V&F(dnmdMT zT4LF0hj`=FW4>>0UPazeS3@nV8T5ymPKnNw*j$(ZUYUiI?annW8eiSc2=7$Jp%5h_ zJg)Ep@CN&;=6yJ5wEBq>4W>1!OdR?NR=?)6q5|}?dj@usB5=mIQt!fK>~*ebKo^MV zXDX5>;gtbw%!B`5mD@ttXj8hl`g3_e5eSLY(vk9;^O}LRu8M1ezWSk8l*=&P5k?S-uI?ecTF;r} z*Lg2=ai7o?a2+Kz&5ituXCG}si1?zX6vfja{hhK{(N{;}+H%-jfLhzxKQoR4IIJ4U z>e+nn4GPKkBQ3XI(T|#|_oG_jMMYmWh7djA$#104#`Jfi)m1kbsU!%?&ABm0vi!1kA4}qkD#QZ`1_&_Oy-7B_Fx1ER1c#EUH`H zg(fAr30rV5U*B&6id&)d^6KdGC-n}sxG?^sATwz+f#WoybBmxy&T41BM-MsyD+Ut| zLTY7*UK{6(QC!05U%j^s02{gDf-L@B^OrAVdzofF(x$+67nV?LvG;^sOUUmLAKUK` z3T=fe>TDLsTNV<&5SUeuE2yojuiPA9#I&GQJz0+k=^J+WLYF_2TwtTZ{a$rQJmW}$ zvwHPn&7F2&#K}6xFCq}#KFu#3nk>MTy&I0^o(&dWeORx9_3 z#Xeotz>e6bO0EL8$0Gz%WU@gnRdm-dD5>Bb&#~By*@2nh!{+4ju5SMJs=)D2?fL(x zlde*h+TCtuC}AocF*v4Oz8*?A#|7LKWEH9Xf%Hs|k&jiDd2a;8UI?y$ZtVQgOr(1# z{YE3|=YU>r4T(L)S!QM}wyvyS*da3p72B5?Z&8&0`xa{GKmRJ*gG;)qLh)G5%1}ts z02B|LneOVj#8NTwU`U%$KnFgd5Ag> z;7Vm_JpY(}E6Hr#WEDQ8pTJk+?$4G+Q%wt6+BkN6yV)_EAt&A+7k`F{@)h~gNEar= zkacC81s7T(Z_Rt9 zE$+cClAoU0fDRKEK!-`NZ>I{38vqsg%fZRAbxHk~*-r!R7aa{( zSX>;>3yU^4r$ikq_uw@!@ZuI(MI^d=#-`E4kWrTr?-l~SOYFR}@**6HLz>u3xlbYcgj%iLLUUHCUhFZ<`59NUQxnr)QeyH;vTvjbEFuFay7*T2+lrMV6^+G>Cnx>T& z^@uSdWi4<>Ppsb#qvOon^qt!unOAj3>h={t#ML>XKO2Z4gm0dS!;=&66jw570;_V< z2D8O#yA>IMM!Tol^R{R#mzFI~s(AgyL_};)6oYSU9DXizfZ?qC;xiFi8;WP{bu)rh zLnYF$=yW;b8o$4cT!-KvS?4_*goDTaefCQRjz>`9pFd0H>Qya2Va9SHf7RM<^L#wD z^!xx6os^AUtFO1U*h>pgtQ7lpj+f4(@EkZ$aNH`nK!9K7-2rT2f4+H;!UG{PpxkfZ zX5nnFb$`V>*4?z9*E!WK@FBc_W%s;J+0MPoG3mJN2K9tReHht5f_v>b2u zMF_jMWF~whKAcJgytQmu>!0>J9=>IINl;BjaBzi>vAcwQx96}XMUBKd3^nc2huA0S zXwMmYt{Ks7`m(LkwRU9)QY>wfVuHoA7&TK1R+x!)j`c!GF}f6|&m(3p9r3(-WtzaDp;=aVPWhSaFKKj$qLNyoQfsM=hB zPzOm3CiK&l@iQ~VacjBMIUB~Q+mZmw`vAl)g3oHrBo>mv6*IF@4=#Q_0inx?dfQ+# zSv4Xv)@MQBUN21L0;&-XR-deYX8&!=SUNZGNG^C<1+f*{-(uRCX(mb_hwl0+F*-$-U$Ndoe-?%8~o#g zRb31ntB@m2=Z?X!P2G+abK6RA-ZtYlgqPm=(oD;D-n#X?T1g8{x>!1gZrb zrIivHQ^trQqe%$Dy8?{dPwi_iawos2+!v|hW==;(4yX{NwEBDf`|4C!&g?$rOy)Cg}1 z@IE1OzCVr!L}O+D1w?zPgm&UYar=rSQ*QS0zTHrhWl+Wcf{)Y?*t|ZOoJrn$VWcLK zaYpRL|M>a)15f7g`a+R;a}XU6jh6&;Oe+6v3AX6%oe2pNOvNC!75q z=yq1fWg;2;e-CN@?Oz$uwCx&lb;K`N^6 zR9lh-im`1eJxA2>5UO|~J(pi}!St8|UQEejg8vr$03giZ$bo6ki`~6RDQjE5sq?212W0CnBm6(Q~jz@(B#pmNFUjJdNDX=24Q~Z0( zNX*>bp^Y3e2Lv8EOqn2WA;Pkb#81YifRwB>pHro6(plz8Uez$Tf?XkehWUB-%!>^H zCMVdC8QV(mpfSf-&wN6*=oauK(P#XugUG{_t{UViAmAnCSMAA&Y{N1sKa9}!ii=dS zO?UZ*82#Ajxg$Tm_>b-i2fUG0bD-l`P@^3tVZN&Ohe@aSv49h(`A9I$7{E}3-0Vm` z80hB%bphZ2f1KHu{7Tr;tb-A7-@4xhyCa?Hw)u-k<|xryT3Xuv$H{9qrck`qAF$!e z9xrV+F>AWdTqON)D+6Z&R__w^@3e?$u|Rd+KDSyql8RS&7v`OlZY!dhu;OkT+(*Jc zV$+kt#*Q({hz*2WD7`G`sq{|FEe3RUtq{>{KBGz#`7Famcl5yr5oh0nr`5T_dAr$Q z$VVm$8O3a;t7Ke%e%p&MN0Kg|v5gOlChM|DP-aYBI#oiic2>!Hdt%{jq&+Q2Ugs4B z>YgV)OESWSsZZ1}jO`Dh=_)}W9|F=kGKhYpN&UNPrEvY8o?N!T+q7sH5n?3A^18zw zWiLz{IFUmJu6n>PB(%Rv*{7(wEomKZ>Qh>StHsCx;Hz?Gga%DUx3SLqIWkto`Ui*b zLSY5fgziY?^s1vW*o8z_PdB%Oa56*-xl2cDu)?2H$i8m?%WAmqgiYM%kbF23V7A&n zi+QPZ_(vzdRq%jLC)SutN{CA$P4FhZs8s;j(!ohOZ8XFDckk!(c8%3W>!04Rx~n|o zra+5(LJ8;SSa`C0UGz)U^|}*Q>ElpqPo=U6)62o(cNJOBv>xj8lb7{~U6n9AXNdM&T z;k#|RXUMXpmG`!ohbeDyuk;$FHsg^$tExCg`G+AIpv7{a*0C5@`pTNN>8e<{bbMp7 zqezg|;mB66PekpP#;G)gskjDFi>m*cGLjDzl`}tc@N{h`FiVUDC6EcGx7htf#V@W()%SkeCg!gYM zCyoX?q+~3=vJVBc6TCI%zv+CRkas&ATUQLS;8NpdWjnGtitu|n^UPSSUrEecWZ32= zskGD#HSZq_GR!OAE7~czFQO}iF0uc}@?YY`P&vs@HuQDIt*b*bokLW#yQTAMWwXoatT)&06{xv0h)4e$Xf-U@ba(yjoYg_Yu z{Ivz;c<{TUu3K@GO@O*S7a0px6raTyO@s^AO+7gonsbn zDw7dyR1BGYW${BQbEyZ0QZhd_n1&a!|AmHaB`d#VAL*MgXepkr52TJ&UFTL@!NN;$ zUK_)I@2}hCPnDjd@K*z%*7k%$Gg0Apxqqst3#ZB}42i8a;V~EO^i3<$=jurw&`#?ay_SR|O6bcOT!HZ2ATkOY z_9tx1KH3VEygCak9kLJ9dZY!2t!}u?*BO?F?)_j|6MP%BHoC%Uff(wQ$zzGWJ5zexcdH%Z zTe5b~1dn;oQz?BF+&##gbB%n11_RG``n)?r`<7h~<)?!82{HGe!ryOn$s^_FiY&a5 zi0=p&)*DeN9^DaBL`dD$NU14>-Kphuz~5ajInhA4ZU?*4Uopjp8Qs$HuAs^wI$DJ# z0w0Jhmwa&H_Aj%vHvx>EAGaW7I&ts&X>LZAyl_>T*l{2Kalbd|(f?t;eY$A_W|nR! zcsdDz&S2^|gwFRVGu?ic@hXJY=9Lutn|u=EUYW1L)LZ;qOQtqQGXNK(xAkKw=QLBPLLYy!LJ|ZyQIRa)(=kNRP*1iRJxJ&@`BOEha^Yx+u!fQL;i4|c^U_>x7AzO|V zC)(}V!mBP-xzy`6%HS^0S~jm_VoBFyPpw>or@7V?m8rT!vK*5Ef0 zOIjD|r!_^8D2x9OSnGizp*FRx`HIzu?_7BR3*Pp{ z%jxkXeKM+fh^eER7i+9!7{0e^``whWv@>vYeDjgUJL|c7eEWFWwo>~+aTbz26j6oa zcI>_LWXm0>jm5O_iSe)-=gIhqRQsjpPXGQhwK6=C={Swbfg$Vf^(zWQAj7dag~Fn& zBL&BR5q8;lZjh@d%v_B}!#g=G02$>K_$hqO1R>}k;k!5@8? zHVK%sI-pnMq|+k;6*GE|&&MC_XEy(EcNF_?+t4}YAxt9Y=*(fH)rOQbXw-oh?lL)y zx0#2R5VL)gAc=99QyL?7LLq^0H~A2str(Tloyos-aF*#zrDdGUq;h~(96>AoimS1R z)>9lm>p*ce{V_r{ni*2ZL=hRL^woaO#D#rmqK9?90m68?V5#db{X6v{bs1O~pV6=ifPV>;t#y-L5qi=z<(;XyDdnJ` zpc3oPl1GcluSk?>MzsJzM$@4t$vbqa`jyl#dm&C+b3O&{mVC9Tpt3oz=RK#U-iG%z_-bn_5dF?NWF`ik{QcWh+rTBgyLu$cmk_@e^ zb0OR=o`Is#rxgTe4`5~)k@4IYkDjNBfjMbab=1>L#x1X&1&-fVVUhr}to;AbvN=4! zI+>nDaSitGy0nl3GRVorMfm#mne85|8sTv?@eoW4ekN0_BE)4H^svsIQ+%EMX z|I&yq>o086m|3YTTqCNO_2aI3Wj{C~^G@2@S@`6ups(;*H7FL8H_A6<^CQfUH42CJ zE_HlUUrA72r{55LX1-7dK`C?Zxwr-Gl+kY7lsR)C&b<&1Ca>k@P?;1aGvhg0 zxe3wFtoggMfTOR4aM?AYv|s5sES0|i*+?gNnEl~$75*v{LWLu|LO7guuJ!tdloY5D zNXd#_n2gtgr#8wNM92J`r$zs?r$w-*ocso_BZoIgTqtp^lI&GZi9DMdsN?Ly0q4aPDGT)v8_hy>!oAj{gPp^GshsVDc zBV-Q1UT}U&Xd8+pNja`_6wW?VVx+*-@&HjzC^)`TAhNhCo@j*?MsY4BBNs3)hP58S z4YaUN3V5?&ohc~G2P7y7`gW!H5}tThtP^~*8C3kVBR^N!I1I@)*YBvF0AbN z)O@*vy?1snGy)z;$rb1Jl_YV&V~ZRJfj4|kR&Hf^{WW}VdExzxeNA}ryqRYSbCNiq zE1fS(R%$&mS_)26HN(~U^-F3Aihvc`J0%p;D|#rP2V z3&<}kT|EETXK55!HWmG_>RQ8SjEvt&=8&O;(&8?SUsklIZFNVQ(Kf7U;_~m-HAQ7W z9GK5MGMB7i+b?k*-v{@m?rh(L;CVCHCMHzsFJIh})U)yl8Wb5rZ`RbC0U%LA%hl=I z(RK}*ByAUJzjNiODj=$LScBiR$#z%_lwZmkB%^TmcpbJ;L%0)3S$**tlQqu1gP(#} zbR!}iA;4$|$z1*zQ0ZptVM>BwN-$HG{e_tg^?vec1QW%WG!UFSOa(LR;<1-+rbHY7 zT=N@ZO!pSIeBt(9MYThewOKtBS%c^MFbSBVM@VBM+a}mM`mc+Z4_|w`#TNNTkGDe0 zz?7vneLuU7otDk(y>pQ-sdOV6Ct(76BC;!v)fGnE&PAPh&{(p`N-DFrLe#Jnw+q6* z50`yNy9WSC4io=Oln)a=Y}xOAn_heGR8UNJ-qKq83&A2n-)_#fwgVKK4TNX9S0wjWd@R^|=f%+GFSEE%yv zE(#$n3q6Y^pOm!bWPi8V6&wP5?AjbAe@vxdsf4G|*aWc+Krnw(spaw)^NI04k z+>}W+sMRa|opu~X_O|`zMtS$yW~UmR;OBp}t@FT_R*8>~jZY2(_kE!Yxi0AYhr2}o z(9r$CYM?J=5b~9$^99<)WYZ7)dODDsBa`TRWU#TNy`GNR81v{D^|uSp*awa5bR15L&UN7(q7cBAX~1e9bb?1G4fH3HjF|qC22-rr2uQ%`!MzlF776K z@?v5nmec#XiA_F5={KrxX>Muk_*kBO$H<=~zBU|l3JO$vO&bAUtJDb)c(?2u0X=k( zrpE?e)r{oaW9|G33 zi{v^sf`$o5+f?$J*}qWW0R0QK%&z1PtTsb?4?vwSpcQQ37SSJw5sbyROsLgc5gcnr zM$pN!*wG*OLO_LK(0&b0T!skWz2=m~gxrIwsi!=*T)H=IWba^7>?0-5~r6ORSosnCuFmcv;uMyN~(>p)m&Ax&s~|9B&A%CI}7= zP&e!Gi=(N@lkY0UmN+tzGrY_tw2b+3{nQ@Zi5 z^31IBc&Nj%g|Il*1qIqnq7%7<)y!YanJl3Yh*~f!tBx z`vs&}qc}lN{|DDnQbIRUTb5eHb^=lfcO_)nbxTCikbdJ)g^S_JBNi2IT<)mIwLgKXff3Oyo>Ok+ zH)0_$x6nD1xrvGC)^QqmrX;nT=wV)d6UTyc<9ss)e-^V#8-PVGD#S%fl_%R_3>AfO zu*VzGlA4g(2X>Pz~>MTg{Mlqto-McP8RiCCRPtCH68TKONjzVLiQKQ4hMXAA|n-BOdTH5;`nUk;m*mc>z0J=C{yY$VcZ zZmix2S_PDX{?CW;i7%EjHs!DPC#!?m!qjCZ>tSc5#rw+6r<%YwEt{rK+tsOMEPe28 zL8Nyv*RP)ub5-8=#VF4K*3Ldm=%<3CzX+{e5OOgK!@lc7v{#zTB2oLEemVL8Tf7>m z^K{EV`F%W+XpG&pJ#Z^J4zv?sd;*kj^#4=gAL{>iL~MfmL%Cb?O0mOub(SooWHWz zTD_^4kgt0;@Yv{tINTyBa(_%2Gr6=^+}PT}6Huxt5KA~5H!}AdPuSE3j<;UQr1z9Y zyqUxp%P8X5R|y8&b~Z+){;`I(WjQh*&>*M|^0iFl{W%LMl}lY@>QnpM)+)&Z?)QDN zzBFd2ksj$Oe$kJk3$3^GQ&)BWMH?h*1q0(1VGzFuHI*%)7m#eXBpr7QuPZKx z+TQZvg`1?uJA^ptFjks(>lzRvT09jhUyeABUo}mo*JlzCYF1y<>{s47&v-Ny7|;HB z8>?)zHb=?=Y~_>F^r7jbG9IARH<=;V$c7@+B*g5wyfQ{vuc!W+qP}nu2Hsa+qP}nwr$&X)v3AG zK8PLvzB`v+M8=n0nZ0MePixIT$K%Vt@L8%iab^n2-k=7*Ks%L=q(rZya|($xoLR>+ zKQ??IB0Az)J%YILbx4X|n$x*$GCm2TER25x|J`+9(Jf{$mWc#2^z?M~Jd5)#V_#F8 z-vg5xx%P-9VQzT3)T$4OYt@~`P)(dj`7m!7#_Kn_t1QF?Jj2+OfUCadNF760nWG7L z|3`-wZ(XocVK;!!}BcrM{u4=@0`Z-e%=WS4=?5+GC(3;J@K~~bcs**6rs-b z*X}vt6b3bD+V@6RSd`Y;roX4_MAT&~VcqQ#kZD%k6j!^aY&L0hhFcW}DZ@~DInlPo zhv(H|p0Sst#7)hvp&ipOM+R<^37T>By}Uj2FI&_oOOiF2{;;@yV^vjE z@s6F+w;=hZC{6(3B@{1Ho9hIU1iwSxT5esV0W|Zb7gCD_Bw4g)JHmad6F^Xq%87?E zo6)@0Kyj)KQ+We!2ZEH8)aUH6a4MZ%77Yfiy~7dnx#&^H%|{@j&wIy@c9xuUH^C)= z_{0M2?z%bDi@9e_d?T@Bc{Ze zgY~E35NgzIUj&Yy14$3q@?L|ZT2$mgO>Mrw$ePCwqX5wv7X{F2isCQ<(tKQIb2_SR zZG7}oEF$MPqUC3n_2G0{fK%bgsJ@u01f+qJp+C=@lIXYYF6Vxn?lzx$ znI{o{?!VVR&mX0Ck_6wN&V8`tQl0DTV&~3|;ACV+!3leae?*Ege#`pnetF@UN0*Ml z6na;gXBdNquzqN2Bfm%=_rz3b{{eG&F3ZIXlB}?0>Hu1u4?ZI7nzu_&oeKRrif21; zBIIWgpDLZs@0D+MZamOpk!Ajw#&h^qi0G3LjdLk+=subE^z><$GhHUv7Cmy8CApD+ z2!4Tmj!CT}B0^q8gx8C-8d|y0&#qHdLUS0%J`*@Lg6yEO-_ZGA-=9&iLTa4}UW`?s z$Lgu1Q)bv#$1S6yWax8#R8jbAV(}!{ZPTMctrys78g~ zWU2vt$5Vq15E5Y-e;C+v)sSJqkzBNG#e3AFzFwqgBva>T%t)GUnBU!l*Zg*(58fj zr$?P&6P-(@P0)@w>gnkoSS}{b+zIx_5Vv zxeb$YxGVuENZ#dw=1$vKXsn0{8rAD=0d)D;Y8^I$P z%q!|Q_e#7no5m|XyCO4xnJcHCf^vrK-`5~>#9klIqw|YCpHeKj=sX53pkwk+2#_bH zIKCCxu>$BNnQSdOkK{Yz`ucA~AY-|`1vTG9A9wj*FBtMN=%`^qZD!@F7T)iTjXuMI z?u{b{|53z`oRf~-&n+K_p|x}M7|!l|8p8ZpTa=9jh-%aexv z@`h4a(>j6a+_XoWl&JoqJQXNxjgFXmS$rE1|IV1Mep2&4#&BZ%&o6Ie!O)m!-ZGVK znj7db5z+kg@lz^aBylqI(;ne$4O(Hy#R z$p~OPFKfs&-NK>82H@ma@diV`=gtPTVymax=4s_8!85e4PUiRO?HNy}F0Zh5&w>18 zf%X$P;-C#6$svzmJ)xuiXLX3uQF!5r(RMp`si!gQ@l;Xt%SPno0eMm3TKMl%f50$9hnPu{gbm8E4-Ju zA{yOkUrVv<$m62TJ`trH5Mr72g7$thj>8}j)s99V_W1JTK&BI?yt??;U-|N9d_~`$ zo`~u7h9J1@&j~sk9h&FMjQ}kO!uRkr{Zejp%9dn%6P51EsSmO!uhHV?_KYymS)zx& zH*@rioHvx>nbc^TV#*m9!rlad;uFrA(TEz}gRNE9D&x;aK(N<%woRoW|l46Yi&2#~M)G)(B ztFy0eMpE>RozcBjtIe_Y>I2l%mXiy9#*EYb-Jf5lel!Pda2suf->ehk>8T-rDA>u3 z+Z`|ppNn8d)y z|K>`HMDjT}FrM(^#E=FK%c&M9B%bVlN6LofFqX)fUEZhl!;(7@yS4Meo{TD%TTmjs z>nQD#$+ZKD-14bL2|OqVAwiNgHvDCHK5a~?FiT>|c-U1u#n2cRjNadyel`soKKPBW2Fl!NqsrfkCC_QSU_p$tOC8S*XCEgN+>MMGhJ21U$s z@h~!9z9nzjH6T8zqC7Nrn(U(7Lv^#w8U=}5Ol37;hIiUByT7LHL`0&{C&1?^63cZ?Nc?TY?Y9nWCUm@QmDE z7LY>Z>7fXD_M}9~&TrNVF`h)D$dfU#)IM2Zx)K+cd<(Yu>nk~ae>5STt@Kj8`@E+d z?z-VXwuB8bn?G}E^|w@zdlvr~*SK-%ovQSVffz~7{6X1Udl8N)e67%+#`4*fXC`%} z0I5Bao~Wl#=|C8x2dZBMi3Gz2MQ?Qh#^%C?hILq86x{Ao&H(a+&WavO)`CX)qF4y~ zae%1!34Qve%2T~DNP*j$&x~rO<<^?*At@NAphpuibiO*IJ@FonC$>O|RdWNPU}Z+5%&_<@3{%n2~YeeAF#^VYFqS=+MFeoL{k#`6BZ#S)=ZHu8}m?y=E&@RI_oq z&G}XR;yf<&I0mn?@I9-5PIYs~O}A#-Nw&wZ)g5VVrea9s1{S+L5R+>Fiu|wAVr2+h zSLDC=Spk1;Q4z4i$uu!H4342u83J+NWAIl1+N&6{x>#x=!rr;Sx)2stZ|_KBRV-at zHFmUv)-ElYFi`ocnGALn^l=Ynl*mRq^Z)vjKJZ+SJ2s! zYSu!{nCfhlZw!Orrkv-BB|`Z<#VF@8ljkaZO$27B$DF`I*|9y zTTle?xC^Fnhe3X91r1?N6vZm8H(US>V^j-ZY@Pz8(Zv>pwK0h8P1(XCu@QzkS(?a^Q^Tg6CBS)jQ zM}r!zHUwOsPx^m3Q+mqE9M?Tj+y$rV#S}%W=ALQ64El#|eWb9P8GQn`%DSz5AEhWJDS%I?odnf=LYkb^bw_R)&O{ z)m(bnOAMI$;dI=FNTb@kO(B@)0p?Ebb9(4Wj*oXC5zm~fovaVf+~)Y;fAB>K~6)ZK;#>2wpy`4_RADcZSX>+C9-m-~P#d;GB~g z`nMFJ*TA3bx#jDBQkT_H!hBmHd$? z^_RC|+-qPcFYV}y2AQQ1FIZAQ?vrVHStuquQ9?8%xTjue7YGhWml18g2sBZBtT1{w zoETCqf=xL@#J7PTwm0Bh0+3{rp6qHRYHS4Q8Ppn%NY55!kgs!FWQAnB#J5e>k?bj9 z5`ykJEMQ_;WGT|h6aN2x1g0PWiZ|f!N%L?*=>{7$lo;G6CD1$cd3_E8tBU_w4wlDr zD@)`xFya}@o3+YnxfW*K$?I~JJm!6(N{-uyH5sd^WqN)e9MXhKV+`XX0zxp>lcatc zZMItVXyjBNJEMUI6Tm}<{S<5}M>JYskm_+}ASsz1^x`yC4q|FyO5{P4N0o)kvwLYj zy~&u{bO=Pj8$Qrs*^+rlu+)gP-BZ=Hw1f&Q%Q~z~$~6eqT6?`5L!>jA!y=ODNxDFC zI9$P_6)RCgLugN+{@p3Gt?$A8qW9*RA+!e&7FE&E$EU;bu&HDa_xTTnYKx8+3$(JW z@>t0eoT6hfw9Eci-yXxd^@(M2rggDrd7-YT0zmciedg6Sk)*gpt5Xm?%adPT9D3O@ z(+9m#Tzzl?*{`ukb5_n9`1^A%Z2}ok{Td!GHn9$&s+$JO;|$jHs;Ge}HMH^K2c~5! z8Cmh&4rol9Nj`pokzORkiqww`R^fV+uk27dhnX8yfar7TVa&{tNH|3;m6Sp=D)Z z(f|H@OXYIA2bNMN`PXk=4Ct4y)$#dq`t136`LR8SV)edHi(~)qR1FWu=5&sf0@(I; zoIw~EJDoxZ=<~jiL+5m^)Y#GzGqE4z{&AgdQN1UTI&5@iRQ&J4*e;^Cgr_K5#1N8f zMaSc+l{#dy^*``28aN;#sm!3y&x?=bH!H%eA=dAu-u1_)Gn*&!{QcMD&U1f!9v%dJ z12jb{PMxVzV)?@>u7jp?qi$lr155zUmWRk2gz?S)SjzsczcXVt+z3iYy#9NFqi|qt zStf>thLw^~es|uSF-lY(Va#Ic|8~OnznInkwETa6PQwD)!ykR_Qk4C7k@G*bv<<_@ zLzSnggGd_sKNH8hpLoBGreUk&%l}V({f-~Be|tlyko*7PkYeeQkM3M>`u|kx{=-NL z8rOxf;)Rb5dF}ZREY*~dLafbZ%|O@S8(+0?wuI_)XM4k1POEC>i zAq>tLNUr*IqJMXl&IddO9`zhNac_fe3jE*uM%PREEeyYgLusx4Q#SeEZqx2>x_WU0 z*?pp~wfcXLCzf>0-glrR{4Z zqpA;Xb`jIYuQim^PO1rSt};aHoQK9#-`VrQqfU&AtY}`}>1i+uC(DeWGyUwT#Sqgo zGe%-C4in7;oxA2%sf%d%p@{ODwdH7q?@igJybqA5#8HAHY;}gM+uLrHdwKO(t+w`F zs1{iZy&38=1E*&*t2yp+8QZnrR&wJfP#j;%b8h;IZh|g>2BVdu)ST}Jsp>zq*Jv(5@|v!IOzvzI&5Q`zvOoq!I1VB z9Fz}ZTmB^E$~LY{v(W867-(5FUx02^s00iu%rl{*nLy@hgpB%vKpJ|4e4W)%g#dk^ z)dYSa_tA13k^O726*$on4j+p){h;rFS>UW!qBfRxL4WwrH}XOAq-XO-iz>eWOj}SR zhvWx;hR}d$45_2~@=!DQe|-E=0sZRK1Hi`54ht#c68Q?(Vls#f+ne*7cUje%JD>wR zSKQ#FM|6OV4fo5d4@%vhvh)-nnm)$=mw(pPVQrN~H59U0e9-6o37SgT-3lk82 z7dGw4EmJeOnf3Qwp#xePhfa$)JmI~n-QksyK#~{~$SBXqpL1gaO|LO+Sy$Z?W0WIi( zKP7T5`H&08wVxG`VsU4s=vbH5{T0}J#>dKR-y0ZmQwHMC68#uTsx|p#PtdO9h-K(2 z2g@7DcmX+12tf_HIRwL|Id$U$r?Ac~yRSbhfz>Wux1`zuGt;R7HPjk>PlZiEU`E|I zHoHsD|Lk#q+F-K>wy%^`NwtAE*r*2=LBUy9TG0ZS?|@Hs0y`jT-dzM#e*M|o0NHc8 zBwqC3;IG1UNU0iBt#O0(IW*I_P>_TYAIqO&Orcx?o=98ST`vU6|I1XaW~6df_a3fX zLSXc?=5kJy-Y)Y;b*eW$s6mQcV_=N+f1E!1;PJ$R;L}ibJ>~NzDQ2EX$@`Y=01Bd-7i~kV%J3m?Co&%^uy7=CK+w^k~ga<;S4i60sz?2%$2CB~-s4FiN2Esl$B#_3t2=;I5XFEy;$IOu8UO^eRgX3?R z9?FyW^ZD%ARel7q5!L0j;@ zvkTCa7mHS!a?QBZbs;INTUF;)w2G8Eg6kBGmzVXI%xrn@S<1Nrj~IK}x5VfotJ>Pq z4zBM4Pgva(`S`%0=45Yyw*)na01cw}`0|Qb1P0d6;D&JN{oso|_VR&CEs65Vwc3e0 zZPn@%ue)A!jNa~#P+8>D(*N_83Ptq$?~0FcTJ40k6)R*KxA{)4``4g3$>X|>G@A@n zdG*2m*IvfA(m2(*Am(~+9(MA8QB|5iFG?sDFU;INTuxKdFMs|gN>;ggD44?D`7p0~ zKyt+IeNIkB65o(zs~vz8v~}tAghHSpB^vk9Rc@E)~lI3&Dn&@E?*;^$eYIyZpLb7 z@F0c7@vcC6JDOv*9~Qfy4VF18p|-9E8yMzzS4i3e_FbG;!WN?h#%d^?0fhv%=?V&x z)?5-TiDmOekyRW;h>@qUQ-t%KRCd4cnbWxuA;xGY+Uki)2gW+BKqA_)qlM6el^x-@ zDG@bTk&xC0b9r)tG&A1zC1&YBO_oP=Ua%i3r@s&TQcMBH+t!0ae?guQqLPdsNqI3p zzYt6~=LgnOKc1bv==8qM;IGc&T|Hoh_d7z$H@di2cxCjVSq0Z0(pocQ8zVBC)7_E3 z?;h#$@dFeqQGY@-UFY}4J83rn2C&J>?|L_GmTE!ES-8R!wQ9vLDD$tvnn%eLckUeB z_O8s8Hh9Z#3qO!#9}c^t%(Xco_s;U3WvyhUP#?0n?hB+?Cb(7|$&9n^BjR0r3@)DN24|sz{P^S9w zNA#b{;0*~ykc!n@`U8d8nAQoS>F3egSQ7*|!@B1vdeNze1E|$l^#DmMdCF0X9)yUO zerw5-X@Wh%YAp}wnHXTxr~gEAHz2M>pxn{5T{!QrmhJKf#x1~%bcB>z>t`M#qmmF@MMVycYyUWh{O}o zgzQgJ%5zLkYh**WS4PHpvr9z9Ue7)N6p0`{!WUvhWM+b-RD=JbtC>2{(I!3x6jldh zr3Ga9T=b`MVtz^JHaVgRn|4WLS339)vo(v#s+3Mq*o(&dHL~)o`CP+_he?G_LJGkp z#)x|PDJG>AGUTHZ@(|-0)JQ-q0u+>5Y3e|G*#=>>=Qqu!@_W_ITZ1HTxmuZ1+FfxYHdST>^P9zC$eFd= zXY(E;Hsk~TL_-|-Droi+ZDUDT#f(dFlbrE~EEL^)P3M$$S?Hn3md_Spgqd(adzVVK z+0g{6%?eKx%8Ofzwqtvc(~wla@z7QghglKO zqE(`U>JA|xS>64_M=8Q=M@9R}P}!VGcr(4F^11h^CKk~;rn^GY8d(`^pEtOZ%G~4LLY*k|+{6N+U zXsZ%yh>ko4V{vgYx4lzL8D?DYB$rzb{8x3h_QP`CnBiG(Gb2*H+Le#JeN#q2>3XHR zLqMLUuBLcM1fk6KiLr5pIUHt^bBN^gIV;_WV37b5llRSdko6fAbL|VwefLw;j|TnGWCEY=t)B3}HrzTF$KvMpo5{i)Eucdr(HQM$2Gi zar-)_lSEO6QN5i^*J6*o^zr$P8tz2vIqp1;od3BlQ)sIv;&*5J0$(jTA>Rrsxez+r zD}Jdogv>tzJ?GvDQWXx{Bpi9NY%q2p+IqPDSCLpnIWD2#pDV{d>vsnb=@rW01In0- zTdv8%Y3k>r{r})4w~sfRZB$d>OJ5^r1_FUP)yjpT2aP& zO`zTXG%bu{VA(+Db~OhYK|gUO#}YVyC{T0FmwrbXd0_SWDL?cEk!zPTVO3P5ryqdX zma*vwP;J~-Dl=FHzR5y)jv7pN9*@H-v@+#`7o9mGq5JukZ|ai0;bIV!X-kv8s*w%g zf=5XL82Q;P-m#bFrU-msiMOrgzK=!K@F(Hv(&7URsv+yfRZFOxq!bmR+M#q2aw-7x$}yZUgLxD@xtZD;Ee5{O#f4rC)>JSO{kWRL(Jet9&oeUX; zG%rQfu@fYPvl5g9YiRJ$xkhGP(SLbE3 zKMr>Mi|(wx#bVE}?AK+O?5Y-KqC^X9j-eQWTrI#q&HzhQh>}CzWwxNp-fYE?I;Z4S zRpAUJs@$;%RMw)1Hq#FEl3gekT3nc+rtDc?ws{3R3rQ4Q{_WTk*|gyn}QuzF}8R{u8|c=5lhT!XY%aY;a!d zL@umPf)9NWVR0w4#Bj;;iqJ%yRAOAICJjm#QD^XBb2o5+Nw}%c>}%rF>6d^{&Sa@; zW_<&m+T=)xA^D9C$+BcHC969akVN?e+wuus8-g>$F*e|rJ+JYTsmf!j?}ROY_{S_D*NLUOm<0=&d$4_C>nbrKbV9)Qzw>z#wI&qGL)n- z;8x9|#_c4Muf$vX{Gwt8{A!3I=-!4p$l17T@dC)_4baJ_sDGelivWEF&&V6<)Ks%& z>OeT}#w%>}$1gPAJ5ohD9f&x=lOHAp+8b{ibid?jT!#QdJh zQf?^}r)5xqHk&_H4&22Ex=DO?C&{f2d1)e zn#8y)mQ?Y+8qV4N@}2L7_}$m>uGC@Y?f+DvEcT4V@rw(;7vh{YK+y{q5m>N-NU3QA4a`DPN@gYF{^F6YGvq6S3s-F_z}goR zE_>RHnO9cqU6`!6ri+$a<(Nj4bLU7;I(Amh^$1NoVqsk*19pT8F_*Hlw==NQ8YDs} zPN_(w%uyb)m;UZ71>h)F$~{C*9A=wF#EOxzcT{?KRL5~mL+j!T2}TcU@fV-J<%X!5 zMR4vH#0nahmmr6YgyGdeCdKw^c>@#lEjlt&>D1Ej5xa9{resbGrXV;`l6n9AYGm|+ z$@->+dT!4#t_zb?tqT8>2`nM#5wpM0C;{R>_|~74R)n<8P3BYV+?Qc_+mUSa8xsK` z_;If!`UcUYlK}68w$3Bv2W64kc$v(ulEpC8oc|dl^~WZLB3kxOrUJHO=FDiU9;8cV zNtXb({T`Ji9U#aCpiWVBGe17Jc)6@Cx)nKHbQMzy{LM?LepY2os!#N%`E-ZS+mv^| zHP8N$MGZ_rXol&09g&^oAXPDiDc#Y~o|L6Ke4=g$v9X|^3*GSEX`sUcOX*SZ0sVVf zqi@WsMaTSH!BP&2Z0KygfjZAS;wPh;Up*T{;XSCwp3n{iJH@_*oLeHhkK;e0+}=P% zjce(mU7=qZZKq;79nPDIFNi3*A@iqaa_>WKaukY=z`IEFmGfrGVUCv^tnUS;yvHTg zhT4a;!wvkLH1VqN;{1+$X9PvF*xX<5al{S!W)ACxl#=izjr~mz>KVArkF+Q)ez`4* z#(j)peTm+k3mbBUa}H!q=ebA)jX9Antp{A?`flbP$p*c7Z-{IVMz3>j%C6pUQUTLS zBi(3xjq+D&aieK$-{kLVOh=#a?n!gs2D2z{&5iU?qNvO1COLD74VYKN$@xyY1sp!i zB_r#kMr_VGDF38o$35K0-9HG@N{lj+o3$*AAf@{y?AnRtC_LCCZpi%1$KYl}17(C0 zRHc9e;*Yoi;ew@OhOv-bcikqIie^uFh1^lmyM%Miz=AB3z!tRm=&d&2$iXx%V~)5A zcL&e_v+ERkpQJrZytkh=`0r3WMq!l2ATOFWIQz*%d6O)HrY;;hb4cJ87U82M%6@9& zoaCirFeZ3%tJrpxams0xQH&fmer2uxoWfXly%p1ou$?aNbCBqP)zzC!CPb-UnR2- zVj`ur&4H0gc*6r<PV$o=84k^{I7I zcnbey9KG#AHzl=Q`-G|lF2qi%)_?p9DpDMU#UZfGT#LhSjb1dNZNO$O5_fRK@bGus z7D#Sbrx`4jv7?r$&cpDfL*l&OAB88}ytBJc0a&ijn2mg0w;fEx84yVg>=!|vonPJ} zU^pY8Z78Hmav7d*HDMnnE%tFT0oiy^tI}<$c;b4aI!XmlqC7thC~A*B=rkGkl(*Wp zB5s)Mgt*&JQI-XzAkn@+3imrwk^i74@uEqDLkCSsx!Hx*3DXjH-e17!`(k(Y3pE@u zz3NcXwmf8KK!w4ymvd7apfJo&KCMJ~VKP;&aJ*elAj2z&LEVG7FBz!!15cM-o5);z zZ_d|5p*@YUJlHQd4?%lCq-s#$W_!TfP{50nEJn;Fk{my$2C1d-LRDpupD zQM#i2!iF9-k#05rWAEb@zW}~6{bI8o^^BpcKc+4hkp1nwO8ro|iJ)YwqyFp9i91l+ z18yO%5S5l6>`s-9Qs(a(8}Yn@_)`m;lr*!OW)ivMXA&)d#em3meO7+h0otg>QI^|j8>Q)ZrydcZfmjs(8Cr}U~Pz?oNYWeL2x5!g?L zc?P{xUjSr|W5UvT>n$qDS$*Oh;S%kfW2Gtnugz+-z-cd~Bc~Tc!Z%NEwZ8w3es_)@ zCK~VEGdNiD`ICpABLC7rX8I8v56sDj2gY+Y44A%bdpVaqN;8(@xR z;mPXS><)>YSS30$`H{v-{p-TIjO5x)n#Ze77%(X`(rrxQ?7-yiFw?rFMB%;Ej}+I> z=K>1r;%J9QWxVwrV(g5luJA^MS@rzpbIwQ#5!wr;5)NKCSMA2rZ>b zPM3QULLEB-X~i@{WcNC`j;Vbmr%o6)Dj)e%w%Z(Dpy7As!!N<^7)gMn7bV5rWWmTdC(V5y+y|TN#v{2hm7s{7T6{UO27q$C9M4{hVu0|nN77f$K9n9a6!=ISY1vO zv`&XB5TsAy5C?9YD@bOL+31_oA?ZusB%4>sX`}*N$<3%zpK;=mB5}qI$G3^^9#(w&=BRr*C%iQ17# z{_&{^We!aEP2%I_L9Muur7g(H=?m5QPrJ`%yXXb75tC`zfG!WM_=UP_o@1wW+7MPA zcWx{L13I`3s@8tw9aR1L{=>vsq`*rp#L=E@hwxx=LhxS9r~W`eg-lB}Q0XBHe3a|$ z1-%?JU@yDuPE>fxL5;zOT|v`w4JT|!;%)nFp~U3sZo?f!<7hT@rXKEyW~I?)2+Cii zW~!$qUFwO4lTWwq#RNZjA9Bi_;@lrH*OtKcJ3s1G+7X$f(Pe#cx`7{E8?^=l6evkh zC{C$XAkpb7WtUf~ z|7XKgR8p+G6{6zq@i|E-45|uDXCynJ>GPf~aYqQbKWr=#M0;s`2XS|O&2PS;ky245 zNZO)t@C>r6^qAc;t0w#4=RQ1SH}JgXiAlT z$1z&F3(fL+YSSc@EgMy;N4-i&*d=l6)!flwa`R|^taxhab58}ckqY>ktQhvDQ6f4DT%>bK~U}!My2b98ztuC;mNNfxa15jrA4*>U45CoqzRoe<8{? z{+FCCwAffJ3#_Qkx*9Ca830RWUmf&?-=1aCk*I`O){ya?vNb*|_CUL>RiM!PuLw7tjt*apcj{}}M zo~A@L69ETs)44X-g`?vX!3;(o=@Z&p>GFdbT#79+Ua&1RQ#SWIr?fmAQeD0z=9_PW zFxNML{WLn|G2tFVBA(DqJ}PQl;5>o8B-rrwYy(Vn)YisPY1#JcwB> zIT6jiNE?CvY3sRP)Oy*`7rWZiFk}!pRAkhzh{bwojqC>ysrr>G|&OL!EMHVk3s{WDk};8UaE^BWP&lLSEoYY(jgLo)1bl&9CszEYSY&{B4jnc zL?mK$zQ+GHb!Zp4IWta-@wfmTreEW8&bIvR>AO%Mt5qoXJ?UElr&Bfl2d$7NEwPZo z7jl>=NQ93PB_GONfi&iH|uJTrp``mTLar7RsZk5`<+3xAW^myToxNg8Bww0`X=Tv9&T+zDh zWllAmCZhJd&;0l{mnxO|7B_2H6h?XBEL2#nA2tZYuZ zb1YwRxtU`~^RDR$7*~Uvv240weQBN&1+^Mmb<5x5z}AF|kwixT5X{rL4XRAroL?5Z zK37JTaKt)CB?X(FpkQq8M!@ipWLrS!sm`^NL$rj$l7Ayq$%2Y9Z4W&AIRyLEQdje! z6l@k8k@vYJ+@z2KR%6z*3|S;JLSH__bDXO4~jUl zDawe=^am?Ps06U;9x6@=JiL97RD`%o>O-QJxlk)&#NepgAHI)i7;W|+kzsV^-jFbb z`tvq-FYEXRni|Qm040Xk6*4}g%eCxLbT?%smL`~1<0Qte#Hweq?kkX@suR}N%D};a+E<$w_jA@2Y>3`w!!Lm z0_tts$AeviZRC9>M?kYsSagClHz$tcg}TiN`70vNpJJTDDKe%>9p|&O1EJ=P0ZRK% zH6DF{F%BhY7u=yM#^k*0e9n7HxgORO$%?eHG7Dm5wxtgXL!;2w0oQ`Ma8Sh}ki4xA z6v9oqt@pcea42bAG1c^jA%H1I+#Qz*y zvN&ICnhFBaCWW_1KDE(Ug{N66eJs+}v)qtu>C48uUug3q2plqdLVT!R?IPH-^3-DZ zu)gYMZjYFJJt`Howf+(3`z(1YPCb+1h>#_~Rqm5{UlAV(gEq z?&|>1Y-(vu7RTt?1+CNMeb*ERPM5RSu*-bbjN0rGKsd=d4!H3MNPRWXvi!)$Q2{HA9LQDWMtj`m!(82tPKat6XmSn{XK zob#tYvJ7a8uDf?E3!GwxNNeD|V|l6lB3sGk!~rEY?-8^WpF7Fs9XiJyB&=b@7DDfb zSh{YlWMCL4?m(T>>oT0YdEBbQr|z06N(c`S@EY+4hIeb&)Rv;}rl#2vChd@uKn~-N ziAK)}V>AqHN(Up)l>6PJLCnF_j~!{VO{dW#c$lN7NpEXo9L#M_4er5X%?A|c7GDRm zp41H{HHj*3yXN+wa@x*b=Y2}RpHtU^n9{cp?o}1#7~NTt^Ktb#*P^h*#^I>mlKp6W z!hg+-5B~8piX|z?R$bA%lgm(e#OH85wu;gV(yeK5!JEl(es-`>ubitU1fmATRKsoFM2NO^xsT; zgk{i-Ue{4vO5AhwXLBMuV_vB|4lKsgtH3qT90Z|W?sG}H4(=>>2xb{MzpUos##V$M zS;A#Sad{5C&u2?ZRB7R?ZA7zge>Lw*pgN!4LVS%+jqk|XubNc*sSLyCL{B62?=v9P z!8|<6z}wFsSn9Fis)igoaxR?QuqCfd+dhyKBZ5E&A|0{eCbot1Y3;S1U+HBD1su)v zHw{IGyA!VQnN|6q#+8=QzuHSn#kr=S(6b@$tco~&a`hHP7qf^JbBI^xgMUT0$Vt8N zxfUUmEwgn3#x6*EI^~cd4!|A**G9jAFqfY}-*-bq&wb}=k+i8M83ewr=D2%2J?j+* zj%p8uYOw*0E3``9q=}%R{5|=oM&o< zbj^tm5xvDF*z3i+8`*4@S|q(FPrnSCR}a&H#=5JFVoZN+(6nI5qncD%tGiRnxuNPD zhyOaM$Xdx0)R11?(ul%k5@+@|vM7qt>;xD9&69i+ByBp9unVr2#?jDpP%95c4=*ko zvFs6KhUo@Ci&_1W%Rb)nWpRY$_W>adbAd#S0!XC?v`Dl7Npdi1iy35T=}s`J)R`}m zC+`DLPUe1e&74Hl?r#j~3G#xl59IR=84K9^#_d*{1HHrhfv0sp3#*9{^-zK4S}=iG zE?l~06xuvXG8mXJ^BvCsd2&=rFcj@Ct(s{9bZUOep)jwhpdiqrpip}$-)U^sd&2(q zAkL#ySDCyn*Wql{Qmiz$w?A&^&`3EP=0kCLLQ#Gco|JY^6Hh&xsI{T^*HYAsMzLKZ ztK`Tp5hz7TiA?xSCY!}e=ix$$T4KtvHc&w(&v8;Jgq)5({v97AIW}z-4wV1Q1+iwBD zq7bQjU;GIVDfZzxr2q*Dg~ILFnPOZ({*tU6CZr)fy^90YD)c2zOkz1KH>=&?EIp}! zF`fMlSVE6KySgx?E$L!~@5LfKQXRW3kw9ToN8zuQjrx49&NA6OTE5(D;pYm!+soqy zr9+S0KhopJ!cOQbI3W}yDtDc1-yw=rl!;U!yNHQKl+nhO{SOo<+^M)~W~oSMid`a| zXnd-ZVoy@xFA);qiC87;wCuXapS{`k-?;>H6=gXhg;uu`bJG)RMt}nNovl*7rD+>B zGW_H{c)lZf_jN4e)v_%Wrdf^c=T|ybIV>NJruYN*?iKao@Ztl4s(-C&V*W3>&N;f0 z?&?TKyM$q6TRGO=wtnb@{%+qRwDJkR^yyDonBuU@CuKHa^mYj@Y)U7z}{ zP_iFvvUG~zB@^9|#EQMN;~<2Tt0z5>7Aq?1lqshivKpSl95MoziC^~e&nl_WlmW8f zisteuIm|vXbTkze49D5$&q6V64ISVV`7TG*sIUOqwc~^n=q4PA@4ZlgHh54cbvmqI z8-BA`g}IfQgii4pm3Dw&wJ!Oelb&fV(*vkvx~Y9`2Qi9-heRxw3oI-Y4GW}P{M3Kj zn&~<5uEIF&9L>VFJB+1hA>Fu?9dV>r7P0*i>9B<=6zTptzm$l@kD`fnhf{Q{Wl<- zLYR8nTKP6}k>RBf_We6SK+(Au^ff&(569`2WY+mGk?LaAcsz&PrU@ZjKS=KHL@|r1 z5;yBn_S#8)L7svR43o&IZB@G=`t**? z{ER1HHED87{afal-*sHzfh9@nwYV=a|K@`yk{tG%XdTRBv)zi~S*?~I6H+U*V8;ov z%nksQ^I1p$%LpTgY1@4tO$0Me+^q4$`qhqn7z7#4xmH4vsRTqm05$R+>lP^*uR}Q+{f2POJgr&=?&V zd02BYI)1wY4>(uh6v!AEAkOQD!?DEbot~VVK3`tiE*nmEL&=E$#iy7fm;(Sk+DJzC zt~|p`CpeHrC-@lMVtvwRaS3w8G2aXLL@7xjeXV|b5^7>fhgJbnDaI-RR%GnbN6M+a z7PIOV)Oc)fYmIN;cw-b$Y+QrB$Z8`X<>zYNQ4>WMn;82!7~ISali_lM6W(e>>Jw^y zP9u0*QmI(M`JhT)O}pC;JP8pesjAK(96QG}cmtJxLbPS-t&8gJXlmIuMY+(Ka}oMb z6y_j7gOX@+(;NZmi7FJ>Vl3((EhF_!s|8L8Bui^&VSqZ4vfjV6W=Q4_vkd}!J%djo z>qR|9afPM|x-p_SgOlfzYQx%={56_7NrU%yy%u{?zRzc)_Um3~qa*1ky7PODW@e%FX1nqi-EdKco{U4wL}w0Q8jnZb-)Icx<>Y90ZLZ;OX_PK0AkPDP8oO=gg4A~Qbt6cNiT{rs!cdQ7wRJjfoClJXDg z_?b7Bo(U6J;m(a{g7I$xJp&$sxFIyTo!NXPleWjsIrUCt>J4LCWH2GdG7nm?kV@=?Ef_o^Tn2dmy*qC(_d<=`^E2s=#R{wi()lA_Qz- z1^T@Av95RM%I5C}Y0L7-D8j$ElD;xT``sir58sKIpjNu`*8X3L(P4CY;JKS8%dWsn zbn4&LR}SdbY~Y`_Se*@2_)SIaq5j}o?dErs%+3IU>5Yv@0RjQ*i|s;%9B5@I{v`dg zMJ3IiNGS->3S^Vh`I6^tfA~R@oqtih|5Nqje<4=Q)e!F!Tko*$9x5l@(CN3wO4tn~ zP)e66X=z-q8RAg?HY9)p!MJL<8N|LB;{RkUHC*8kAIc(Nlci4n>(F4DH3Cw=n3P2XtbE`f%2R5Q^dI? zsxnLE4ecfk{qV-U_{l9GZUwGvI_OsvrkG60qI4!2?e`XE8JLfxw_U*s+E2?33*HIA2tl zGrPSRX>dQJ9mZw<_1On2!K{aH$4*%fU=7{>6c3?++pAwgCrJWJ; zZpQP@W(%4}I-j6<1*WhUE(UTropwt^yHxsrkE?&K*6hIa<=;@-wwA0iP5=bK!9NoS;?=Z|zoKZw6o8$;Pbc3!+b zH+(Ntms@QpMo_}b%HOp*vD2T2(*@!4pIlK{IHWIg7d2{S2oFN=lA*;b5PTS zPKf>!JNJ(z?}EGCI4Ae000DUvN!e3tQL{Yc^dqIbHqRr6>UYuCu4y%jxN79%Q@HqotTIx7@b~=?@3rnEYCZ4GdrEvMRY_+1=Ah2Vxa*EJX+@KH18h4k#^>$ub59=Ii z#v4#hVkh}YNeEZ=T@Hkua*ak4EMo<{A>D!f%{lFGWPC3Dmad9#z0veWUa`=Oi?nX1 z?RQnaiva-9n=rHPxv#sgAsGvoBzG#2&&)evfp8_!WSHa?F4TC6gNTN7Gu$No7?5GY zx`Ch^F-P*1Li9Cw4o3d#6LD2L;?382CU>a&zed*{98eqvW~|%4nTzgP#qGUo}g552iC;vDtN%s*_KQkL0@y z!NSgd*LZm2>IeKp?Q{?rHfCTG4o6y*qkQ8kVg?z8bkLAr6f&O zBXLGD@6@4ZWA6&tru4s7d|AqI2TEC}BgaFEzyl}G z_IXIV?{C1Fva;0VRq`d4>^GLQztxNy;;sx0LXxP(C`)YS2W43WJOW40j2a}cJQGiW zN8`r|oBg~wIo%|GDYVS48`TfyK3Z&wW?FgHnreGt=iT%}4ydSOH^QO914$*VD!*@6$T7jnr_|dc?4>mayG^GIiv= z!QArl-%qU5gfOH>pj_|yX32$Uyfg3yIm$u_RA{Bu=|UJ#;IL$}c+Z)Db*kmy;LZ?5 zS!DWbMLINjZBj|KV{MjtZC^NR!tN55>lf25#k@iQ4TftX(dW&9yv4%;B$jd6o$IFk z=mLCtWtwLLPZUpKV}}?f)~Om6wJ@ebf+<@z%1hxpaj|$wMTPuhLTApqpcJzqCuJ!p zwkD1Nz6wlw`z+T0H%r023k$`|s#soU2x3B3@mQH_8`g=DK)9BHZESB`W00XyY<%(6 zFtr+2JD=|6A(piQjD?#Qp0!;F*`!VCrFxWugW^&*|RtLOf_c76wr3`h(sb8ua*qo?b1Isy9e12<=jp{~B=T znnW^$DJ(Rbe3h`X0xmM48T38mQeK)j_-rU57Q~Ho^~{8}EHz(EQVYvUixh7njN+Mg zg&0Vv%-5$zyS?L}e180*+vi-l-)F2W22BY{jGnxnCv@xG> zhVZ|KDF9pOdP7N>6%tZX21bEg0MoL?SfW~_PlT!3Z*x|oN+loe`c!mb0^g?`7(>RK zye)0{XA0QwFYT5*gfr4oLQ-Nv*EDsa$=X+KDMz6d8H=*JnR~la*UlCjXuf))yq5>O zy)OluJs{j@yhCW$L2z<{K3k7iaHYkMkENezf@=m4-5~}=oaQe%T>J1y8x$Gb41WAA zs}$XU?=qCQ?dShMahcE&vVluryVpP4WeM-{jD?5*0s1QG%b-Uv5)e}3Pf+C&b!kTh zYSk^}iRL4gD7lFnI@?v2=2dm~91MvHMP1eRAojIeAAGr{9jvqMPDGsYS*5`O8nd*l zgqvgfHVqMLdf*5j zs173g`kgvMx!{CVVS<*gBvte>%lNO`R+k`^akK!}ziHaNH~Sr0XgU+rSqPCv${POz z#5gJ^rsl1}Q3#i@PXCNsFW3RV;_&yU73_%tiYXZF{u)YislE zkD5xxTQ9%dPb>y=8hvaJP*|1aN3zA=D23gR0@Mm^zF1}CIl3I8xy6DU75G+NRnxAr zGOp!IAT6+dio!llc@xiO5W)Rce0zj04;Ww>gis36$%4|-CC>`vo0?v&#LUJYNT!<^HyM!aK zuVC2s%Y?_ZIw0>Z&$@fIE(aHj(Au|V|MX^pL6FF`FOllJ$b>BUjgC5{usY#(FuTTn zzp0w3Eh!H%)-=ohjbdIU9O~s zey?qX)UiU@=xnyHsCcVgix%ii^3PiX68-TaAK|AqNXX39-_cB0`FKC0|CKbaQts8g zwaD8HY*eQvuLB-hveV718T4}9XZ@YHdV?y-KA`B@zY&?xCj2u+ZF1T`!8hI6fz`589r+AE zL@|LkIGT@89epmenqO|=Z_XQ!ZazhtwrgO+W0$u={pq^|E{>vSDYBmv%W7gA2Yc&x zI;ig(aeM3J_@kRvfm1l_s4Cwkb=0$1#U6ct>o1f1m$lX+yT;Oj;q}%62{L9Qj2rObI{ zYU4kE<{IQ4Ku#tKIDvR0Sz@seiq1u$f&*1hPtIf~&T(Cl8I_u4Y1-!%xreh1KU^3_ zwte2s)&pvic*SVm6i!$xQH?NU>S!I!U*?`}i;iC4gEr5ZdB=*u8RF=^n!HGg-sWmm{gLN86q8?bY8rCqtVmqkcR$qUcDzZIo|gn3n*hC7Xa6%`h%E=@e%&j?C+ zy~Nh}Bz;bF#dHp~+h)|OVc0*Ano@mQ?Mf<)N#KsT=k2T&CRLuU8>!g4p|SWRIlYjB}#8y=g?0iV>g2ysPM zn~980Rs$ujX8YS@b*)RiH-Gi7=H#3v>L*@7Z@@!?KWSKYD#W!tUg8_vL=C59Qaz7#)P_L1>3sP>|4_ZX$$az2^%zsV) z2)|4;cr221aPK_wRhyn)k1&16FHyHNr&_#O$|k2Yj=E9HVV#azTvYv7+Keiq!t#zU z)4nT@Z4{=_E~NK;nW*&Xqt8aqW_PH*C+mYsb7icICG@Npxl_R5wfJ~cyA-{FPJBL@ zt&+mu@evv>MtnBD8oE7q&^+9C?081@65;x&gk3{BsdKfLvY6Ri5Yoc8{JZ=CEIBZb z+y{H_ob&Q?J4(!D+VO!8L=#&<;k?*3OxZ8JDMukYlFlps7lfuqAP>FP-P7_MT8$z` z+)Fn^vk(DgpBW8fS%26|9m7t$W%a(VShd$v8OQ8mI3*=zilE+X;Tdnho2@pnT&N{P zKP$*CmS3gnqW%j#948Sj{|)dxz8jiSBt!q6Klwm}FX5;i1nz#`ZAtdFFF4-s&&8@C zFPCE}KIQ4gQ5p3(Jt6!n2Z49#`tdyI{wd+;XoH1wZKD!BdfjE=y1rpVbYyE2JPH*zXKi->I>UO93pT^mL}K-eCbK$dvtC-vtGYl+cNWK$;ys`5PXG%CX`kxcs` z00(`-H3u}^bc6E2T2=vd*gUXuk!yO4WuO77WeP7B*&cGQJQb3~BbFmyUjF^dz&QXw zQFE6P(V!#qI*Xnrrv8h=u_C)-2>I6M82#+tB$Jbrz?(c2{f1jYkM%4IN&^cMiDS^@ zDp>5n+jQk&F(N8}q@Z9@*dmPA$a=X|lzu4Vt>YD)kuj4#jyPj}Hkp3L!K+2IdC2Y& zyho`u7&)7Z*BNt6+VXKHx`}fkr2O- z#Y;g^AqY?aYNUG~57dqLPXC=x@~kN@hx~=rRC2R;INrcP0ku%r$tJMV*h!VN1bbw=qB$dZZ$~3oJMy%*w#XKu=ogN8s32!&W0r z9g)|Iw_q&HxK+3rb2gsC$VHY=Kg~bLmFZYIL7W-y*b)nNsL*LfOP<8;Fu7CO z?ZCl5VL@C@F4N}a4k|TQugWT*>|dcX2u{G#Eez7)x0JEtb8W~ulm#p&8DvH5ArJ3_ zdvy+{5@~V24JK@ga04gmeIhAK2T^spXB;~>+abEt{%JsiJ5h+jCPD4PaT~5kMD%-U zm7jqg2P|hUxX1c!YN$h_X>k$kJf?8%pmf5ZVHJ(uTQYWJCmyORXwYl25^39VN~@yn z_$^_d`o0d1Ao9>s#2;UQf>erI1%@0IgxH2kx%)>cNu5O**SmUA|43H(3ZF!hN<#7? zUn*j&C@b62Nz)_BR6L@t#!d=2m8i6t-H7>C#ft^$I!TXXg-KxrW)K08j3gMlqT!r} zDheGkJNAxob6zk+7dfK_(F`R`<_jbvBq3x4mv33gvnXc)O8TYJvozfP=}ERq@ty}Z znVs)k1pydCd=-rxa>U-`!E~tZ zrNc$Vc_*_71_sF!g|FpfL`Fj`4Dh#LUL~p|75A*tUeRjJFCtq8F3R{OIx);xDtI57 z4iKsCPL;YtCLejJ$;RzTJMV=L_M)nKZ6ZvS_cdXnv7X$f;eUf~|L(#924+mSo4_$Gk7msBnyJYP*Uk?vWwQqs z$$>4EyoXb&*`}z^On>Qg<7De2coCZEkF*jF$6rLY=}=!Iw%E(YF0NOd9_wFZ2k9Tu z#Vsz&6f#R!VKRGxDl}MR`xn-Qi!#2dE7gOV{j6ZS&Ctn<&Qs2CK4+Z;Gu|b%3i-W? z5tg>sl>t@aKctC!_@03c5ilXF;PpoxpnvL-#;B!MdLaK*e%d807Kw}VA!4Ras?|Yh{LfkdzvV@y@@b{#L#?^G<}S^=Ly-Np0Au;;U)@L< zE}Nu5my0i%R&ha&?zK<68$R_Kq{^3CNzw}~axvkrLh1Vv&>kdWTKqM7!07T`&YQVq z`OhrIhPw%6C(MENN@%-kg`BrkJ_A;1u}*ko+RntcmN4*}={T<(H(@N9Q>CXNn_rZi zl~H)cZ{af-e}io`3qBK1$D*@xbz^Z(URc+n<@qt-Fxj?_f(&N<6I)9AI>wze?1#TC(RE1idEZ^YYoCPg zWk2i&z{G}gGM_uqfMQdHXuAz*7OWNvh**COE-MT(ky%RA4C4{3Bpz^ zr;$#r!x4$zio8s2oNYb^qG@?C&0q&~@;T?Yl;m>w6Ty$6>0H1h@gR@2?=C7huD;HA zc(HSQtW+kHkB?hgyM!uP_0S$#p$z@(w%YI)BjR+>U~W^n7HSmcIZ#C?(aiBqDSLy3aD~q}L9WPiJA4{=b7W}lbaMGy6m$T~KNtNNM0;GbH*T_Umz*hdWC*;ap z^(W{CflA|{0uEG71tk7C29OH`3@@Z_n3{0eAb36+#}IsOUX!x zpj0wXrrk@<5S9{BEK2K~#D+KxGI1-$3|?Rw?3+kAeeiou{SAoA3g@)c`(Y>R%TZi# zl~yZQKH83$9MOhZ)i@(gDb{?9yBY=T`|!N!p?una-9Js>LUgDD5+GL zmDd+sQJCmka{byaOzc!hlE=o3cxDXY7C|b>bG6(AdL+{fbC(>0x6!PDWQ^PupPoba zJ{-u7_yJpg(H95FETCO$OgJW_v=u*GjhQqdkn65Vdz1^b?-N_xB^iwpeavuaQ+1nTE8V!I zaPDozI$N}?XM^iYQBiWuo-~}EX!v5<2bss<)cU$HmBM~o60KEZk$5fyqTP$TrtJq- zTC~gRN^zkb!11&`Jvy?PwQ*XP@+#~{=jaPgiOzWz#Hm_PTzM~Y1@2lG#Pe1;I6v(! z*R*$V&Ji%`m^eug|2VE>MWr3DtTiaVir!B*=8Eu0cZ`|8oYHIM3mm|TP ztkj`l1G&R2s;>@lzx&YOVu%06m$rIq3Z7{IIr= z-;kNo>8}iz6{Ov-XAhb&D`7M*Ye##ufh6L(q=cxgxE`TS-<{P1;-^;#6MVT>jS2N^kH@-}nIF2smF|_3iQ!c3g+LW^8^?WdXG-k$e5} zPQW-8?eq0NZnMI}TQgurXt?;*nzF*!hsTU6l8zc2cYJT4Bf>(X8J(X+I`f>a+mq}8m*t~sd1mze&e%O6n=cLdd+B~&F@^1i~B}3CjYuN6tk1gQs4}?MyE3= zKJ>Wodu$|#<|c!U96NY9fMsGbx2 z;$1D!#a(SJBKqgICLgl1*KbZXE3OVB2sQT$KX~&`BF-tjUu<#?LDkI*8#m;3As31^ zgVB^#WfWB&S^R`AgzX|OJyxeRq}A}E`UynSp~>0f$pCa5GWe}DFfS#Xv_z*(m~)g! z3kX>+!_zC|>|!fDn%AwqPAtM>MQv%^O-eheOfSd6%M&pr)nV|0O?XE<7A^WSm|-;| zEZ^oy;ZSr%$ipSIbRbgQ-aR@o4EKBb0l1%G;jTgPa?H|yfd)i8iJuG)7Hp`Yk}Y5A zu;qI{Zg52jvLHx|F8$rr8Z8#N&MlcS-Xkcuu$_&dG3AdP$SS+2Qk}PTsj~OY_$~>M ztxMKTX}+{VsI5OP=OO3UiehT~AZYu;dor8&eBrpTOK~tMkwUFlP)i=OFGz)+_ICLV zuqww8KdEl+9bg-OWGfbj!hm9n-cTj+qq@dnL)Tq^Gce0`5Go->w$=rfuX7wgo6H&oa>Yv2x#2&EihxZ1z{ufR;~wMnKQr6i zCMv#aecg2OjPk#xy;mCu*h{pY8lGGCdFI9f zBip(9?4hs1UwaG#1fSZjK&Rn|PezZkl-7TIg?gaV_PlGzjc`%@L?GR&i(aRc`>^zU zBqWU_a+=GuHAuVb#AC4ex?{6ixWgc-xtr*S8JX(DZz_}#8m$MUn7%8Tv#s!2-t(Zp zW($#bq`?(t1`7`)Ga~%XpQ;oOsgOILvGTO25`p0SjkD%m&_$uc`+?LvhtBd-j*WKK zAyJSYv$72?eR#Ny`Rj?pHY5Cz-Vd{2jQ8Ug>A=E_3Riq(@2nM)SV3Z7TeOxv0CTGm z-#TfDU8m{{;>_S!1IZ@#p#!N*)v6#E?|skuBsc3h4H3fw;@&5u$wIi1r}<@V^bc_T z42;nV|FSA!^T@lg4nTkJF`4E=2q?NsTauW}p0f#=upC>W+c@8LE9Hyl;a5<8=f?JxjU7GEQ_VOTL@(~kDjtC4jdScg> zw5f`wN3zMu<)~><*ZQdqCAy04u)!Lj{9O|kmjEIOOGrdpg^9RK83e8{jjAd3o}WY& z^%;%Dq_d)w>BIVDC-Cx?A~!;%kL;P<0_~(aOq{uM zAt|US=V1~%Hl-3t`u^0>lBA?my|&6yXg_yCkWTF00j1$X^}B`%4+ms5p8Ud$xRZM9 z^67)*R-;CPkWWLT!E_c_e8BW_kem3$I-R*yrWjGuf<$M8TLld?k=x+RW0l0&;@;g-g)jrt3}1;G+l{D^l=o*e|p*aVnD-6p)M@{+}Yh^mj? zY{3vt1ufU*8awvQq4@x*2-XazIu~UCkB4K@Y^vOYIe8N5Sz7SHatIU6G6w>2H$*?A zZ-^)gVOy~V_03uNx&5^bOuY%Xw(iyWA3!ktE*2BF;K|nBT!`hcv`PGpq1L*&(*v)} z=2Vu%k-Yq+mW&7w3|fTbXz1C^(&cAq>dVfL{Wf zy@4FX8E%EiZo3(Zd@Q>xOnTBN&i#Ob5)yVyw`wE82#UfLLk5E_lVYi|T1;6-4LsUt z7;Mw9S1c!^KNBJPWF)$R&`{~&uigj+@?EApz;C+4fhSYz}=c)B};u^O$LOSunE3OAB3Qev9)jxM`nxi1PG z5X|ncU-aLqs2SIDmiBpBFhpkja#opTX=n>||0a^ToAU6K%wi)bgeZ%LFH#BYbEiN) zYwkleo+OH?{44&V^ZAsRs>v9-l&8G<#VstTBQ82SgqP9s9_M|8H;1H8&HEcZ)edvS z=#F6q>E=6vosRX*N@x{ZR5Y66aLSgsQ~AqB0Wcb}ZZL@%RuLeW^S<3=<}3E2x?z-w z3+qGGhVfy_yK2T1I>=ZV2JDozYiUI-tVUlV)1*3@Wf1u}s9}b0)~$sKmn8mPvocp| z+#67jI*y*PIb$QtDYq?_xf-)UUDfWlX43|X?{$wz*Kt2f7<}|2;>Wk!M1c4bi$xLe z@@6&*oW{oP>~F@{gTKzmkQDzkOtmC|Bh=de*#QQx1mn|4fGD>|N}hL}!VYewEll_> z-B@jk73$R2!)fwbGiGOxm8!d*b?=g2mk#v@VvUZ(n>v}<3_R9GPOF~hF+W+gn1qaM zw2&({P{W?ayLgx{*uBYQ%6+`K4&k%ZnWs_xdIn}AlnLto(P+Jygb1~h-?L-f>aj4)Ek7w~+afnTKjnT4#AV zjpygfk?ToeauC*hqK}dqOdh%nKjQsJ?8R7YXAumkOuquF(I3a2BV~B-8OrZTKzZX? z(Fd}ef)71yuoQ_+lRAySw??x)S9~yr9DO5ws&Jbt;;-Lopoi*?Oo&K*O&Gp)_`VPI zKXk43b!xM`G(BHduY;P{~JnN|RMY4=1OFb9*xd+oJt zeb7ZZ4DJk|>)|At!!%$;j!N6-VFHP6;|LQF#ttWMbTmp;=9*uX3jLwewGToloG6XV=VR6RV7zw4|xi=-cpxdv65EK$^agJwmXAILwve_4w5R6zWk-{(NV1bJL%GXiv zauvexwTjrjt)a+Ad4;_c##6-EPmFz+`YBZae!KYt?)}Wj@vsNkQZyGxWZwYdLfbU; zOPd%$OAX=Oua8dDRNM{Y}RO|Y4 zjr5i6xaI4|W&$~|tf!?FaZF4Apd=RzvTTFhDm8Ki8?Kl;i+Dru>n2LR~U*jJaZH;kx3GPA8w5JVR4-`<^hS^AVvH4?!sc+yRnB z$YI%ZmZ3)iQ-k8VvTrbLB83&eY)!GB;bV(*U>?)ae+#X7n<=R^PR0 zz~HN6h+6BF)q;;+S3W}5Lq<2uRG@a3w(=i(9X@;yABB)bC5dKl3A97c2dRj3==J5X zm1qGTr-fv5iKzkf=HWUF?AGhU$uYH?ul7$y8_!QQ*PCuFhzt-L&7(d)t;@kDwBjAV z=D`Cx5u=`^l~tFP0q6bP!p4k_{PN@4=%BScY@RU+EkOP(VxfefF2&N~P3{?_%?ww`(PEen7>_p(3E z_#KTMi*8@<^x3nkEvn?V;p}C;@_IZDZ@BH!K1q+fr8A=<4*eUo{in`GbeoI*iKNJH zx#zg}kJk9!`FjvSpLi70KaJS%pU(V2)c?OXzB3U&uoXldPV82{{oy|}_%=brKQ7G- z#wynT|5*ILPn?el#FiIvIJ;NR{GabkL^r08e!mt@S;AwJ@~+KRe0PMzscL``{O<$) zy!#QF44F2;CS|@bOsU8G_7WrwX0aqD(y|0&TKwE{h#x^4SjRk3t?*qDZUCj;tjPVr z)X5?7*-?Z4e96Adz5?qJ6hFzi<=x7`IM9e!vP>{Sb`GTZ`Br8G6c%&#_J@@o0dwBp z;-}eB+Nv}-0Xr7G5v12B^E>7lkrYtzUHw4u>Mz?z|JknH<^lvZV{UP?s`*vL;JjX5 z$7j%a^zzWz0^Le~RPAUl=k7XR@d;kag$@OYMTnay-Bdqp=a<+GRbEe~4w|?1j9bmL zS$0?mN*Op|^E62QgC~EVm;nkAqh+Bx>7<|JO)&*BrfP?ou9q|vQam7}6dU%f^yZ|$ z%BUwp*$te9zVKigsQr!d`k@6nCar)Rkn_e-3g1X{FbFE19w%-9KD{U(S;~%OmxnK& z!3Et!+ksj#VpB`di`RX%$q^>t!%z3B$;IRq&Nz{!9cIAO`rA{FlgFVZ+s-7mB&e+?sSs`z( zsO?C0>}y#mfj^;TmD%)T!UVVzPI&wQL8?@LL+5JDH>H_fX5R0-{trL(twZ^Au9Y8EQuFn!)tljzb5+yw8!o>%CSKFw6IPoIi~8dfH&+4yRYMnC}Ua ze(!M&i1a!&ElZ+)pP_Fm)!CRo4R62I)wTX}30l8vwGn-7_Jz8jX)xX$6;D?|GK47# ze>F3jexJaKX2xy2dSI5!k~RzMG!Xk?f^K>7Ya?CK<(Ez}BAxR)ugpOjJBc0>EGKEg z-Ed_w9X$hbOC`V&P_jQR;?k)o0VJnY38W|aZy>7 z#9esnQPuR?G5VjmlhRXxGJTw&XVnmzVn6!&ty|R*@p-<|t+3D{SHm85X}=yY57~PC;N4&0ECp6_JL~ zw-gSxr2S#@TRb~Py}DegBUwgeP~%*h9VyZwZ;D@h^9%WLX2@X2TOvDl!x?cK*U+odF0 zEoZa>V^@jpYd^qhbp!>9hEtXe4rS#MEJn|weM$E~c;}Fw2?==)s%zj5u@;urX}DJ_ zzFd^*2F~>BYis=~k_pW`H!lKF@{X7)PcSXCX6r^6xC^i zfLyYwUS$Ldr)K+?WNpapg{}_FcQ42g817R(~#({n@Cw*vN_evP~^;z zvE9_|3Hq`wsgpuEpTa}gX5Q~UrFr3?Ml&qy;0JbvpTRsi@YS4xW{RnqkwuTs+sJW8 zjLw)g!Y4D5`$KlwrtNT9ADnPybHw7en3r3kF|%`6j`i3IyMv*EnNid(8!kx_J}UL~ zqRow8u0nVNTQy;t^`Xc_?jI(wxDHpntf9EBFC~(#*Eze1din;ZDBlwUqs>wBUtpN6 zdZERSc*RB}OW|o>4N80z)>OT_sYK@}G`epbP@Z_ML}#_1Ux zv@4T@vJ@5UWCi0TeDa;a&dw0E^V-#BS0XWxIuSOqtLC@oVL8xY9)+B2SAAhN6;*UFzqgg|kS&}+eoV-RU8#9QUT;SL~s~$agsA5LQuby!Yg}j%iN*Mw0 z=ht|eWRNXx!Yh3b210(qK9UOMn;s&)i5-6h^_W)Hk>=%W$)uKdSdaCbN)AwYM z7XQ$C7U{M4&hUA($$8^fP%$ceF8m@E%O8yNW>HQaxzjrG*|=pXodT18aV5I|-RW6; z2nW~6s(1yqNW0WA_b7@n_=wa3Kr+|lX7I5umJ8kV$TsCprgXXPI1D0rstODHM@`)a zL@4b=*AeOXlL#&VZu3Sx5w`sK^FA+{CS=TQe7140z+vD<%sshw-5KVk$%{RHjd{vH zrZ>&BIkSCUV>1!D+RIn(8(EE42`C*IVW@qjt?!dxJ+3?}UIm*Y?0J5FCv{6>Zds!5 zfL_-2pK_PWyI81rUa`C~V#0(AIQuHVXSa2Q@M{U6K}lim+muwQ&fTS88#e1k+~c6% zgW8kkKlVoyNI~UuZThw8`FlV+7+*Ry++$=}QR#*8b(oK_=cb5T2tPE|I>`TTF5BY z%ks0-V8fX4|7h>5g6e3xg`ME8AvgqgcX!7UhQmSA)4EWU6!JNs`2wZ^g*NemC(fL)cfijhVzFxLO4tVePdK3{egGEu!>;2nLMd;wi-LBzstnyeR8& z@uUkSlvzZ#77f1bS# zRv#5g=W!U zs)CgoU%o#gsH(m=?qf{dp`T~dDM=`Th#p4aW6fFTwY=Nkye(=6Mox*Bg>gGEOawA5 zwsN6m*h5zJ64w^*eM)4xZoHScW|$II_Qj{o&KJzE32R^&y$!sJf`&9J*@!%)oDEVg zMc%~N3Yu`R-&H$dZ}Z-cq&tb<()hEydc-6t2+Cs_T9XK|_{Tq@#W^3|s0Sgd1_ zyf65#qu&`l-f&6+R=qsp`Id11tSefM$ne5KIkRssX@R^_p{2;7-QfHQd2Oee9rPLsP0Yx=1AeGYLAzssY z=~5$8FX+h!_4bbL=7DcBB|dlgeQNtEnRv_r0pa>)D01a#jN}W0!Xo;qFE(rea^O(h z{1=nUy{N^9`?o>kP9l-_JO^7MzVcmcy=#Z;9zU8RpgZZ@z4ju$vYuRKBix+~m6Sv6 zSp)*bsYJlr(2PJWSf7N#`9$_$hH7{+-6t^kp*r$*k4Kph3*dV86e=W$Q0#cP5-zrY z`l1{qlvevUb(4D+_{;u^Kv?A}*?t@t9!mUS9qMhREv?mffe04mh@-WvXk4g+24@x)6r{T(*cd|%^5q&i!?7g0>9gp>rywvWPXADX-APFtDHX|y5#+J z3~N~*9n^?RBU=?tUzhwy!o1CvIUS&zibzzY zM9Gdj4SaFb=1Tf7)$6rQ_VQ_hK+xbgEK$kqGMV9y)9q80`HK#Ebk>TbD(+&uc~v$Z z2K8)fl$Qh*a+n*1qc- z6pYT4(W0~IUfIDdmRVV4`!1R?ztlrh&eUC*_9Wbf9lk`|VeZ3OQ1M8FbOutqp6sBW z;@>REkNS8QLeWX{)QsgbrnkI&WsGs=XXfn5l(b$8N(`<}2frCf?fDWf7K$gIt-wW{ zq72*jIzONb^g3CA&<|uG(!{LXS{?KhSn|j)D%F4h@D}2Nf6EO;+7vdIguNBmh+bxEao z3{KlVU)$HBP$x^Gg0HPAtT2DWfeA&usDF@3)<%?-tBr|p+#S3Sr>-Lu_cu&k|DNQ~ zf+n+)pJ7T4G$jGJ3>hwNZ2KnKE$txSZt}j5$1~b*42UoF4-r#QBrM^z-fDg>bk&w% z#6AH|!0lHvy9KIlS?&K?Dx>yxuIj~GDE_Glpwemms&(Bv7%O;lYyU&G36e_{`@hMj(f(K*gT%&H9LKq^g;F-R{|2MxEiX z@6Iwzbtr4Vifn#otBj=$1rheTz_WP?df{RZmTtHD`ht80H8^ccKVDFH8Oux2($;1H zFO*oar7P>#lBQ1XlXshlg!4Hq@toO0mE+f$1d>Trn}>jF9{PH?BTS)B2L=SYTwyIC>{7!l0;U{1LA`l?69ddyR9S0VJ8R_Mr(kJk=Ph;0WTF0xM&ja_Bjunm zi$QDjEB{U>@aQ)w(jOND)idv39;uWapvHT{p1s*JB!~gwN=6-rFEhUaj+RGTVD>8z zqV>8l;Z;nX9sM)i#<8s$xw`1Sj)K5U)(JV9t1wIVNcE zYH?h&^SaD~>g7-kt(C2D*h2?=nC3>YJuoOEZ<|cgI^G7si4@X4kyBHmLQL7=uI42?E$9Afw+T~!c_NBVX$Y;IO-p#k5-$ZK zA6Fb$C>01LzbL5|B!gV3ykAJRG*op)IN&1M6Kt@&qxL3LjpbgrH^rWQBo)qg;eNCI7MRqo*pCg5h0#|(zqbTnP6`~*^v=#63Km17 z(T%Cw?@E*#oEZ%je~`4wwl|RG?}bm1H~&bL8%Z%ZpNO|4?)l4s&-Cfu(8xzg3B4pA z$LlPTrg(ufysx5yN1Lq?bJ0%iPerf`o)Q>I3~JK5;ex)vd=jRW4#_f4Mg9EoeI+l% z*?cH8l2u55Vgmh!hFrC3K?&I^VOPxgrOWU+9Hlb{mr1y8KR4(M3!&%R1&8tC2@`o3 zx7?u^`X?Pix!)iPP3R-Cf0tJZy2NoziO?*rvd6P_Ift)&(^2z94@KRqaf84jNm)}) zm72O`BohR*^Jg%%1=?DiRqz?hNER}Nm`ainni)DC0$F5@G2HUJW+l2p>=Hfi9o&>` zvHZx|k17*O#VvERZ=fI%0_hAuR5A~6dDSVSZlNp+*zvQ}^NVYCY_}p(#ZOSI0k6XN zR8em*L&hU-GF4~Ie{aMdaugs*7Y+O8dam)vv zXuwK6A4oI5SMs(v@K6xH8#>2pMa+(?hDjt@mJNzuQAx}!(@-Y8K?ESPrE{D7XuEaT@`054>5tkESvXQUo3&sPE?(J_$X@5a zCxcYvOh)Id3n3V_f;enl18x#69>x)0_jy?D5F*W^BuY@I>r6o? z{+ah2c*KmNr@}KWOd}6uOH3>*D-XGA3He}hI%N)%2`nQVit*zsRb74(BWY(}%h!)u zKRlo*tI5JSBliT(Nc9S^GwLRqid#&5izRmVVft_Dp%g0mx9 zVKiWI)$PXp%!97Te_B%ymDO=j|6oD9TEyOcuK%r9t@#n=!_eWnOwC_%Du-KO`MmvR zz@z9!byHHfzn}l*;q2ATWKnXIYmd$Ytu&wq{ZQcGQDdK1dUzleptPqGsFoNgoqbJ& zAlnr^;OZOc+)$<7^1+KcL>^@yB!WNU5NaA8!sF?7f2Kqyx(=9c!>)!GFkx#<_T_2F zlPiP~;3?Dy;~Bdp;|3P3I?$IpPNa7Iaqmko5-pc@lwQcHGGE|z*E(SMpo@`Ovrq*K zBj|fPP6aFx#$)8UzrH@IlY@u`rj#OI zPVIrgD0CbFIJXWSH?w|Jd*VtatT3ittl*f$Zn-P9CFo3`r5$VM(*x{?9B+wGc&~K< zcRZ_Y-8kOI*QD%#;!`1v+5=^89LZ)goJMKkyA$1gt*pPZ+(5@-I`dT*3c<^Tjg_@U z?=u53^+xHL)UJXZK*FKO%6}ccK&JDrNqih_|L^?Hl)FW-I4?dbCRAeEt%#S`?`{#` z=;V;~AQ<(Q(3U#E5{sBT*ux5N1+)(FYS7uYRA z{TAf%wl{W+p8u=w3vAIHyivf(}59XchYd>v#)yEQlv{;i`m zd>yzRKRedldkT6+COgrq;GU>|i~kn+Sq{&(kAZQU@1=-)r+2+7Sqk*0)f_!D=_Hx) z3~5g77)~m`wsrrezK=>bwy3$WjO@6O#cbh}MU~L_G_to9dm2YjJ!v|UO^w)Of&>QO zVNAy#5JhD>dendEK` zr{wr0a{xaMuQ9LWlM?~lDSz`oH_!A`cWO+~rZ!r;1fVt&vO+M%CR^Dy+=VB#g_nHG z*2}c^;D&!gHh_vmKGe-_kbgKnmClvS4Rw&ns>h+UJi7P&+Jk`9rpO1cpM1`(s8Up8 z?|I7&)sn4m_%q6HFLVtkRaY#HToeQG9!w$`jILL23)-JC*GH3q4Y`UrF<58oX2#Zp${7_nTL*kRuP<=17R5BH`fjaOR6aNOOzs7WxE6JbtHAIXd9 zxo%7^jz!f@VSXwHce#I$3`xj^z%Y zYnJ&PCH5gngQ2HN&TzkaCAdinyYH_VTbRODGzZ^!jZNY#i?SCq7L$HI0pa6}*DpDq zGd=3TBP}|`52&5Lp@WokwNj4vodrUV8RC8r3Cq&Ri4xL!<@VxaMbijSu?(9Xj1(KR ztK@r&OUN(XI^yM+(2QxL`NtYpdwDQ*(!&3UT`CVXz%sgv+4TXX2LBBmWj0-A3|xgVu= zN4zBLE;dJ#vK3}c2|R9j|IKxNH&N|+(uw1r)pR5fH@(DL3C&e4h8D+U zuiD6$2Wo66Tsm7t9-S$8a65AaBa~(K`%DR0{mPl_&}{T9INGa$GW9ZZ3b1JF(buJR zEa%@097s3etocxcRDD>7{ce_aG3;&*9|-t}Bs=W_0k`}eXl4U!3O4;JPki?0n#c$; zrC)3cmeMr>H8^K`cor6|#4jJ@z&PluD_>5%kC)kE+>=qw>ja-;od04}WD6h$4v#UP zbx9nrb}IXRcGhV&Jhb;d=VYJ#j!!5mxtTCY6{%oE6??n}ye<3wQ(R_X5i#JSM9r0X zaYZ)~nmUF796P;jfsQQ&pN-ZkI_aNxJl*2rEn~!-phBBHm-PqLHJI{cvI{*`f+M9y z%)1~T)~S_r2GwXEpS}n88S6|DW^s=Iz_ND@I%lu6!L(k#Q-z9$)$9I@5~K59+~MKW z(puj{X5{7rkC71NU@%$m%`={bt3>neLo0UtO)dtg+qMT5cF>I!`_7bnmt0^uQ`PCz zFDiGrNorPJEj?$7n&0U-Q{;3x$KjskK>rnZ>{%jRwL$zlEgsL9DE}r`tUPviORViY z?IZxRoh?k*Y%)aeAQbm!=lqBV_G%%cF9zQ0IU|h`2fKCSYm&Pl1sP&{G)!$}y$(ZJ`N)>R_UR9oME9CEi z{UAcPstv;;TZBS|7JBPeEJhlva@X_WR|TiDH)`0?FWH2X(ofQ9tr=s(l*4f&rV%|Q zVmXyy^UYs-p!}xC;1-uP)lwChD4}rs`hljcvC!o@BQtOfc9;3mpNq>V*TUcrxSJ9M zWO>{X$NS=Oj>@v-)IcER+m}o-G7mzNt>XPhDsLM&an&DwTckd{7-$bZyWW_79jUJ9 zP5Of;BQ&9uWj!puaW5cbt&tXe5tux770`NT`jK5noV9Z%#b}(@&ElB+`B@tEiB!~e z|9q`xk~!xfX<=`-aM{oT<76n{^ZA<hg#ziYlmXq-q5U8-q;>|b{(C>@r>W$V^2HI-yE?wYm^r}?~!@fdZ{`G zo6j>?f^}Ctalg%!HHQFH26nFI6{q{#k{~CIhC+`;w` zMql=v4u{RGJ}{n^W%>Jl7qsBC6(O@GE;q?RAI9M7VZhOEw^vFu^hhc&+q%wXj-UAkot#$|&!0 zX%E*iHok5u_Cof_;P)nf1weh|DOF|q`B02Wbr!TSt-#Bn*$4I7b$?bj6(Y!%|SQ7(wCuQkL!Xa#I~7v12J4;tMoL9p6%hZp5p4aEzsikp1S*-6(h98>dl zkh>fAbyr;x7!7hj3|O;bx3KrkUNM)W;p4sVMSTts0$14A){moek?}23ra?9|rvtTN zv%C-NI{@|SzH#{zC9x7C?XvRI@Bsw~zPud@9?B_CNQGeG!UgrYk=W zhRqQzwee-t2Ne8Fg)zvnTNezpo|pAZ(8QFn2^-1AhYX`2BVif+9W3CTRDI1huNU%} zVuNbi0BVQ`BD=#;)nv?dxif`}ZO}|C)6JxxR-k@A(|zl4LD#8V2XeUfLJGY0tGrmx zZ+xygUcq81F|YR?%86(#Y}{m`FXAMcB&RQk=o!D(OOkia^l%TZ^O1ax{&$1B;N$Dx zQ$b9!X;w5;6`PR+CJ5s|zLPQk^!a0B-Wv;^#KfbzF$1g1I$H^r5$)|g@O?_qyTW{~ zGkg8b&X~J2dRuIQ(Vtws0nQvKg;6(8@Cov;(|nAUFjEJIF&dp8|J#$pQ0N;d#b!C= zbDN$|B!&q8Rw3(8pQC*YF?SJPhQxI<_`K^HG!5O9kc0)*f;HdUKw5YjjWD!Cx3=35 z?pdHKA?IxP_0-F@lwNqjOPJA|nr7PfVWV`Vmgg2wL3iNkn2EdhJcyO_3^$WPehK+o zqN;0D$ZY!Vsf)FQAj92@*p-GI;Ust5iD#9FK#>tu6PU5TLW}Ks-apnd`Oo@z!FwNH z^Nv1MUb&90H^P|(45IY3=vgk{N7RLGlve$GY&&2fQ(P`p5`dyE6ye!P+9k6$?W97b zm@l0x!i=66nmoa0>kUD8G(YbdCtU9ksv#~}cQ^H1DmX*q{p76jv2ot;Gq~WV+1H12 zT6nVHd_hh&j!)r7X1loXHlZz|Ci^+JICt$>VO8wKH2(A!`LN6^_q@I?laByY5qsxJ zIb(^4Vcd6Fe1MrO0NIJZhEojj0y5x$hlB%soaGK8Eq_<8b)pb5K_W_go-n4RPPvkXqB zEf}V;y})*aWwJI+mZxa69jF&4pmo(!YyS*T?-tFC>i$71gz@c<%Y@>Ez*kura53|& zuf`gTeI~4{ftnA=?fV_uI1XP5eg6ntn)PGcca)ND&m?N}faj@ZlK`2=Q#CMb?niQ? z)!VsHV7j^il{Yn?doL0XpL_(Qu225t*Dg%HZXw9}QdC+V6MW$nuj>5TRhmk|O1QS-_(#lwBNjXzSXysw>H_`vB@hG81M2I@O59aL6*qmJLCQ zbTkZ2O^%aFIx}UJ>k^tI)&B?zuEdzTCn;AEXszxfKfQUsG}`Q zx>_VdqgTs!P`=Q{nb#4SsWt7;GST`-I8nYl0nE#tNcr-Y`PL{Rr5CJXX$||2TQGhX z{LZA}Q=qFZn+dzCctMm~sN&_U1(&YT`0s(xy2A8zV)+nW7OHR>uq#K^uUN*UutnF) z)7B@z4G?`bBevGWdSCnp>Z`TTz2;HA{9{=Z^nClo;Zk518r~;Uu$l~TkPV->pJS)U zOwhUDHZ`=W<`b+eNpK+>1u46Su`Y%xJ|pb`?5E@7r5pyog7#V;(L-x8W0yYHG%}Q2 zr)S0+9RYrv9iG&klN#onX2;j4?dF`OmbtQast%F9-t$%$>ow*oK9O+K;K~+deW&vo zw8XcnWL-z;Sk;zqTJDh<_(M+TER3a&y19#CrPjWT>s;J1um&AG_Jh6qx}h15d_a|Q zR+^PCG~5e4RFm^IDuA|wbcUQhWJL42sjXDU<`$wND4l+Me0> zji$#ww5AXB!Fgr{YazKjafv3+y0M&QQB2|YLN)}V<1z;xU?I&Pb4_2oQR!5Jw#>5e zmeg`X$q>V=T}&lbB%F7rnB|9gti%%2==w)wsTTg1JWBPD)X}AyvV5Sr4zOyLmZF#u zK3<}2H7WTvOqpTXGJu7Xu{Ju?0YSErwmlmy&dinPZO=Q>#ZXH-i-mt!GGTXjDEEWy z{3Zw|&Y6cpe^R9@2R5TqF7Hoda%*t@;jUKtwnyjElAEmJm~(NX#bmm$`t(wmR=Zvm zo6A@^X2KnNJHj0+H3SceT35{**4QvxcM%OpT0d})>@A=5Z)RL`V+s~W*#VQGQ&XfNE~XH)=K28sZo*R0A0)rhra%XU~??SZS;piPP+%Ph)Y}kDr_R=IE04O zcGqpf6r-(=(>CmL-3{TRqkkR9EVTX23-tp6bZFoZ(hXp9#fzja2b5F8$+UANH_0@e z%4nzr_K#52V8E2~no>lh%!B9n%QMV-%8a%vtm+j8tt7a z?WuuoDon3DLrLYQH;VuKT;EteL%(OD-EGQWo28d4hFx!r`!W8B0qJ}-h-9wIUs2Bc z%i2erY3416GsJlj_G}bowQ({D`%ZO{Q0o0qUJ4$U#ku*~3GW7dZ@3JRE6*%#<+nTt zHVeW#StQnXkW_@FWXB-q%(uh?XM=@mQtg13v3dYJyPmphH z+Cs4a6;5@@AIzug7)IhI(t852*4fg7Oay$Q74C1Al7cH2mS>Q7Fu6jrtA+9NI?c>- zG9pxt3+ zr5V!PzvrtiD#HZq@Zc?1iBCwvxxv=>*jV338H1>gp)gOuIPX0_iB@ku_sKgE9#>%R zP$di*pfgoaI=@@7A53ETLF_*)(9pycz#fs~Yd^p`;|JO|K@)K$T5J4nfX_a{dRoXK zAfv)Up{>Uy?p-iUytTb@Cib(FN}c3#C;*qvOsvU*V3XXfKt8!X+0tp#8+^?Jd;%LD zR_S#Ubw6)j2@`h{z{i4<)YB+27<#XViTyPGu}AnN>Zta*(Yd7Sc=+qb=F#m(0`rAP zb=#UgF*s0??gJR}3%80YVuV>3DUuhSvqzA(Ddpd6rN7sLcaBBOBUWdlh_p8|xOXuM z%(wSfPn(QNOZ3^F7Rme`QRVjWGE6p7+cTl=KTA?HyV7CMI3Qbp??u} z{VycnT7Q$?tK@1O3&^k=!26-n1ex|)vyT9@OMW~zt~adoN4)T(4}8X_Ds~h?mP$Er z!ISTaVJ7RhdpvQaG;(t=EcOm@coq`(A-ReA;^S9Vlx=v~Kn{hc*Dn3-6${}s$9NVrV(f`J*ZK2I^`@IdK14LPHCl4XQEX(O5q0KkkF-#Tliw_DT zCSjtUbHiY;D=4fdgh%PJ{YntZ|Dlp5I1TDy`d_DZ2L1T*Qc_I~>noFP1i%bP&G+wn zY>EO3g@`}o9bEnHw|pj$TsP@%42APsDM(mM4Dz2sE*l11c(*I~d+mkpt7>&<>quM(6y~xP@#8=Ggxw zb3_Oed?a4s9??ht-}Q%oy*+oR5BKvZJcVuK|7zw&_+z5o2*(=#r={~ht@flyeOzJG z6~g2HKd1iFb3sG>xD5vC7cH3o-I4eZ2>;uW|9AlZ9g+VO!~Q!W|DQ%==N(p~H%DSY TQmO3I#~~vjFJ2|8@Av-zYX0u# literal 0 HcmV?d00001 diff --git a/R/vignettes/images/old/hBayesDM_pipeLine.png b/R/vignettes/images/old/hBayesDM_pipeLine.png new file mode 100644 index 0000000000000000000000000000000000000000..093b03eb25416804fcc61f38d5b239caf55c1ca6 GIT binary patch literal 168786 zcmeFZXH-+`x(2FPKm`#{P>>=mD!q52(gcJENEHG|2Lb7X7E$R(Bjn$JiNzjLFQ*SKj&7=Y3{EUaBimT&KT&;lc$9<>&I6 z7cN{;yKv#s7qY9wGx0&x!51!Erm&Hdd#NlZ$MVwI(bDFv#f1yv5fR!(*7vn<%htPC zv9b4Sy%i2{%>`0$P{is8f2*j2&di89#4C$VvaXvS?DQvg^@|JDpMC==yz~*+NV(nK zT>YNq;XdV*Yz1H3b&knVmWQ$xM;~9Vhh=XjNeLy{)RoR!?D$~PQ0d+~wN|Jr>$UlM z@zJ+0(k2)4&nq>iq`1v|Eq#YFC1lwDv<|@7x8)2ZWypR) znHrvOiEykfXtb;5$7Cqt$rkPGeXH)zPsOK1Qvt7jo}NvHWmBQ#4GX78^P4g|(-rCBBNOF%i zL>`Z8fIL6&bZkL`@Cgtt z@P{^0lwZ5@Fp)o~@w%GcB`WiWPp;0<{@@y(T+?6i*gQz~18o^SKC+N@!~$IqU}#1I^y3yI0dn4Bp`Q3S02Wj}h;|aZ z$O;LSRy&BjUq)k&!u?ixs{oc38$WFb7t+j5QJ9gasQ+qmCiM7)p%$W$`({-jThF7D zD@Kj@4&zcoB)6DG)#AOhI^F(bo zaa0p;dP)<{E@vM8MfBBSjV|UJYaD1r)8>|zlom~1>F86+pz>_@ul6J-c*e0jhceBL zVY69Z68#ZLhfRvUJ07aTv2W;MqL1T~H`ar5?*?|hh_u!UnzT1yxPqF+MqyI~>nUko z^D3pDkhe4e(OW0Hi;)!<;_turhqF1{9q1;Vy&{hJG+8$FlQH_a&yD%f!>xm6oS>)Q z=_qo#-KB7i=Y6G^R9f>K%5_}!$yvb#s|e`DuNgktFHH!JME`K@-E)1H3m539&;Kr3 zWegMD$3=acS9-2`YA+}3l zYggBIl6-s~9v-|NLcEU7R(t{y5)yp;f_#F4Jj5$_Tp$jvrk*?wE^PlAl+Se`a!U`S-Sn8{|9xginB%pYN}+iC>jI zKPw4xwy_}2d_KOcfb<_%{;zZYo=2MReDeRXn1Ai`kF&(B%3hb|`%7%H*I%>(^e<>3W6D~1m7jbLXCVtSzoT&Rj>6?jSKAAM=e5aAIxdiL zJ~8=unKigIjy*>0;nVN!9M1zeR4jK6``r7+k=r}SB&3AU+SUnh02nv&dW`rojk)Z9 z;WEpGi)2qOklcFz-^2gP;lE1o|G^lZP+nf`%#_jJ;Sur^pQ1@r(r9|@TjTrc&7Wq) zA9|VTmA+Nv8_4Y|{(>Z%58us{k$viWHg9v>3k>69`pfpdHec@VaY#YBrQ;{fzrBI~&s&Lt7TrJSjD67F+PE`{?5kg{`_o>8)f+F~^bS*c-D442IwFl} z@H{IYP`74CBKyxyI7!Zq;J_IB>Y0Q1+P%G$hXZ36lHHVmmelg2`HTM~cK5 z*&jQ9jr2Fy^kn<;Z{Z@wNB5rn?SsGEms!$8Sd9wUNo_mx5*x{Q@2EPte_ClJjmb!bd!{+r zKTSdu$J^w{|BB<^Zt1_`_+N@b6o*U1O1G8=nXAe?;RFS$`u!$MrHk`L;^$r|R6`fZ z4erk`QpE)(JeMM3XWXZF^xuSVJkA0P9|3lkV?mrZcxy}?{rahO&Fgx`q^{NTW6dg>z?8%n-aHcZPp|xH&&PupDhx}iWvc;2gw+_+B3&9R4 zc{?mRLQkg5E5~%~R$SVtl-Pj;bX3Yt<+uTt#PM?MYf}hq{~_en#(qIr@n*%`WDUEmH@YIkW`Ap2fQTN^{Ot4)G`kmH%}#p%-IKB*MFzJx%8(9O z{}jh2i3zI=-x!&L-)xWp@7dU?ucS<#Uq!}giLPy0d}#u6yq4iuIk)YzQ@f^ERI81% z8htGH^xQuL# z&3xSC?6Je-wm#DybG}8#NNt$c*t$q!%vD8`t%h_|Kx)RM*F=xyddp{x-K`fbu^5}2 zwD`9L6!=Y8v!A=7z*N=g{6|XlaYL8BwQZN$Gq;f zMge+xyQJlAE*cp@aWl+lh7NkfcN3PuPG_9T&hYj#JHm+WjoCAng!e$`O7*+C=q|qv z32G>l-M0W*TilV5KBVz6_qK0?5tr<+|&HDb{QW8%*pV> z1p_}wYSYn!WcFVf_i(uUm6pD|;XeTNvlOjEOZ5ci$)Os>lxNYA?6s!{_naDU@_BMu zSz7-7&$^G;_)}A|aS3~?cF&BDp2hUa9FlJ|pPe540EKnjzv!5{Q;7fB7GJ{EA3I^ocu(O}K7f`9};Re0;S%9e?0 z>9Pp*GYtINL zDQ4E?eWI#WlePt%Vd|JI3+b^TVB?oLo7W+fM5k}Ua#?i3+_Kalp(5^xl26Dkqsk`a zf&H+Jsg=wkGkj{d#9KdHi1G-|ln zaJuaXT1_;Iye`4fSA{LJj6`4w?pt0+weYb0z6Ilry5mVZm>QGM?oZ2^`&CU%N?T{( z-MWVS(b8hBjq2}`qZ*c?DDb$8TR3dUFay5Sy6FgY??3BuN+;alEF8)KWz04-VQP+k z!)42NsX0h8|V<=IV{!)G5Wb%{;C zlf&RTj|+(*qtDAeXP~j67rH*+!KqceQ>bkAD`zczr=ryD}gsI-eS<#v@>aqX(@ zeP8TQ#FTIp-0bO%q3OcSyPz2?DE9^9N>BrTP;AR;ileNN8M;sNGRa5IdB*sk#iqsw zWr}7iaSRc6bgYe|d=JwgFPS0cCr7^4Q(4_Ed63md#)-X!990^eUx9M=X`B0N`IT{m zUwnS0aWDs5_1^Z1&s=wA#IMcDOE<+?Jhpe1#Bvd>b=SbjuZ`@&J=Gw;O@ok{M?V8a z6<)hta|=~VJE8X~`BC9_v(Uvzchqr0WmMm?v;O9IVf1T^WdQJ6Cp~Vpcl=As`#4mdl4h*QapS&@PY5&GdbI? z`grq8@?GOMTZHAss)++4nKW7T57Wq@Fg-ie%1|P#MaWz;2(6~`MI1& z=WC}WSFm3D9?=U3%kD)-^-_Bf^9G0Ye)l#&op_^|MDt-I>X&|a2^^q_PzL2y4NDCW z)0CP8wK{Ro)IQZ&VES{$Xx(@C4c%s2Pc3fsAN5-i90`VHr9|BnaaHn`Ph6S+fqDbE z6%*T9ZRN9^@6T|5e?usMHQ~4!##d=w$lduQ8}vG3>)_A9_bmLp-qbBb?7l>p@Rqhy zQ^;sYw?eCD+!+@7r-X~_q}@h<4pv4h2?A_ycZt>0Sac})4ga5x$r}xYUA4(X0KWH< z=iy92R$j8lFVDBh{aYVQD}aE|x!DN6J>-L%s;R%*r$t19S^%W@jMoH-qF z7#S7g@-e#avhP+e_Iy7GQf{c4a)Ti!K<>@gi*1cI$8)0Q<36}!ST%~96Sa- zciI$`-oE>N>st_$tu*0iiK5Y9Y`W#9uel4cLXRl7o!DezD+=e*o36%Pep;|yyk;ef zNeTpu*km6l*24?LmX@k6d%KTR^rJ`B&PMh9uK38`V<8r`Zns(6o|$aDO?If??)@A^yJ_}d%T0I7pM zdQW0igon=D(zAiJcuQJH`kWiH5u4)_jghmvA|DXBaAv5F9Lze(x@}GN zu$Id0Mr0krt-pFE+XmQO80%KOWcP9t<7v1)c4*nWDP&AxY@9|+i78GG6)}CVOO(>@ z+B@nHn8-DOErDXhaSDRN>sJ%Re$B3dzsY_B<81mnoL`2p=-IR|pQCJ4{0pI&lj_{f z&5#QxMzmm`4?l>O4nfQSw?)j(po7Xq`QCanbxl%9Vq>=PDRaYLU{iwg6Kc`$Cp=Q+ll3kkZLJf(L_Aj9K~c|oPai(e^=&` zOzFEW7Xa(>Hoe^(vsB*)fsGQ)c}?G)t|#n?<7fa5*AF?cffJRj>dsEz;#RgJt-PVq zT#{IGA^DAhw~gJ?=qs(~Ta7jZ{&C0!ve{#{K*mz}s@+(c>?7N%xZ>x`2J)I^jHcLNSNomcw-Y@M(cLXEg zki{I;C9$`7OMPxPm_QlPT#;e26!R2@z`kr{Sf1acFp8p#cpn|0^?sX5Np9sgPxah< zD4c!M8R>Rhd^3cFO+V$G6z&Sb zR3;;@KI;9bu(B9&t-0!6KWQ$e*1Q)76GnZ248@NTTZK8tzRFP4FA%uHL7T;(Mo zBsp$NaUSfw9Ty=OCN*om$M|Rt$xV}C-B3!at^>pkMOlWV%S1PA8ljRo)VCHQrafak zaC(bsG8uQLl-<#g(|yE1jJcj(pJpj_RDM`4_gMqQLCsyq!XQFLUQ{mpCz~kiCOsS5 zb?YTP_X&&F`%_+#B4Umcf+3Q>1?jr%l{xvkn`mmFzUw{FLwB{4EBsEjpLV$y*<(HgaUy zeD=Bp#*`337_1xv16yvX&e(--B@Z*0SnO>YpVeuL9V*PyH|)5jZH^Sr;WR8~3NIn@ zmkO8>@4nsK?%k@{P2{FNvwG%Zi0hMR-q&HrVJda-TW6w2md~aWaIwqNeF$+B^z3Ag z4XkBB1nF({?8FZRi}E^NyLrTj5+LJ&=p-BgSj$-{kID9ims@%{;M zPfV&fzV;2udziS4ow36x#l_Bs3{wh1L_Zj$x(102kyfXiy+`bEmsr2yAxl~)0`Su` z7SDwKaBQqN^>}JO&sit+Tjd98xMB-Ji#Y5tCOCs%_6%&F&XWRvI|o6-s)JT@m)mJT zFDgnC)@{00oB66(Y4xzS@ZejGvO=wCYj>~eK=$+7Kn#6Z6V^jwtkO)5KLivNZ>Kxq ziSki#ZTq%TfbcOmkPf4L(>3djN~0UaG8Ca-z4CvWVEl>?|9}Ydk$-tVPTQrA^S*RC z<)np$b>mK!*&z!7{yJf;4+b4cz&~)z@QZorO>Q( zp@1Pc$(qelzT`|mI-Y1SPO}OJeqSW$3&SSO&Yr>s%H2lV9_qv5U!R!`l}(iqa}JKl zyG#poQj447jlOqDIdJ6&D#R7zlSu@%v0Z$K?6uE6^Ou;d6^{EH{YJ{BhNI8|!2|&@ z-^ii~QQvHqV`7yd{yj}UNY*Z4{U>#r*MJK4^!Ho-od*y25uZmt+CDMqy|ugg9ehRE zRVi09O&1JV8TRz*^Odgw8{b{vb#A}${VmgkaNVW!^^|;ll>RiX%Erza(M;g*N@Iom z;sOt);?sgEL)VK97O|!4#pYYRm6VqXrbGAFZ34Zt^M&br`5U?>ZanG1A_;A%dokN{ ze&NwCb<&UFKtkzC5n?Fjkyduav;wX1^L4k%X%jw9MKkFYTHb;GA@%T<>s-CR`HkGh z?rHBvEEZL($1phD)r))R`Ic&z;?&(P?QPhk{R_*b!=54(KPS5Hbh~P**k0p2XZ{Y5 zvL;?xsfFb7YUQLIsg%m`orxBCt%x0Z9iq#v~y(PB!z3TG0{N-fc)(z~pzu6LL zEd~{E;0z6@i{XoELsV2+@DDe5fA5WRt?|_Jcrg^O9Ju+X4?EQMa5ssHsoq5yYinogvjEVz&t@zHv zp5?AsGplbh^<N#yh&>BtkQ<#x3>I?GDS9#_v=F%fqT**DXFNl(o$N9#5 z1kAoyg!Je!nL~Di-J_w5^m5LR6G1JniO`w(#%P{d*-xDOSg@BB zqK%ED-gUIHW00%2>(ezF6}xYU4@#flb}FRd<8{2n?bh-7belB<$N^E{OOlZC;xx}# ziRblpa*q9^AtMufv%>qOF%*=@veoNN5_BOj#x@a6UH#s8iA)LLo zWlM3A1C%ll;8SRFYGE@0XP+?`@y?Wy#Xm`@?E=@+p<8!vUp!3#e@Bt5(4t}kntJpg zABFj1-9hS-YVOZ~8@WIB%4|2+e0;39K>4%B1H9Br5fh(bgy6>##A-Ljn6Q`7C4w-F zHAjJiJkyffTJuM|bVHhn_@Qoy&Rr|vj_0Ne*Ms5yber1k8I8L$R;R~LY_NV2Im6F> zv^-5<*1p0;#76vw03~fVLz<|*tQ#%gvoQ9U38xrYx13nl4W)j-Uw4_wbKYOaA2;w` zC1cK(<6$Y*EaKbQtA~$D4PF8fZVDpn9!aMrMx*ib!1aAdZcv{(Oau$0b=uSPKKPm7 zIZ`pRYL{9H6KZri8dl{KX>7D1LJcXR+oyM#KW;GlDpejY!!io@YseSfH$ltEUV~3b zMVMahp$->qQ*POTb0PP+Y%ZIu3oD5BfqombczDAWmpJ zoMn)zuR5_AVZG*o4sHF|ELQub%ORM_W7;VsgJs&W`80-z;$_q_Wc$gq1Ky*2QQl5E z_S-~}*(=lszudq1R%4v#Z$rDQnwz_5+TkzmIHgR@?C-33=4--J$a^13B{Sp%MSeF zC-}xEM9pVD5^KW!B&&4D8!O{&zarUSN5NNjjCJAr;lPj~qIc{hpaHVyxRpF~Jnpv4 zyG6Uh^QhJbP+vL|%j=Hy{Gs$7pr#Zh!ts7!gl{SbTT|mXl z=O=Pj-F#|ah5&n4?%b4|-k-7l5n(WwgUz@XkYjK<*(9dB`DMsN+sb#d+iq|>60U;} zdbvG$NCaGI74Km^>0OTlOvujr6a4g-iD@u9vAnU2UqUI%27Hs-(M?>Z0y^FkuBq0DB>WNuwG}@jD*PQfLqGInYkZo+u|z%yJ)Wkmn0} zQ|~rkyNV8hgKTvgHyMU2g-{Kces_)Jg$Ym75h7U6tTu~DKdcaaVFWkEFVbb5?X3xy znj3Eix8+YNebgSS3%6`|S}hfRsd^%1L7TmvKXioLOPZn%pLoUn9QOO6J`W{WEe9fr zfIYkMad)onN0JBp+?N+Y^2bu2{IqMj?RKMN4)kJthaI>J1vbD4Z6ur+9j(WSH&u6G zHcA}LtbPt)I8#m71rrsYdC4ph2f$w}ZuY>9YJoV`GN68ofK2FN{aoeIL|?Ye4fsoa zPZ?-#-N)|U$jgh@W*EYBjeXrX$fMRX>`y88(bbxv)U!ljN8u?Oe#vo^y9oG+3>1Mw zB98jMp7D$_m-5DLrNQfn$=a{{LbZ?A86pHU$E(gk#5-G8Z1FSSbe?oLPuW*n_*hDk z%A{X4BD|pf14|7T^cNX_jy+9H3h={?yiFu}QhQO3TaL-}KZy;K$@ovkJO>67FwBV> z7qnndkaqm88*Fp3RrAyT-fwmu?Ajt?zo)ytf37#uJS$Geww9O-UmXJyfTYGh=KM~! ziw=IpU%^tKtDE+VaFYGVu7oCRmd;?DE;y-uPS@dz$vtsmiAQAgyfSeI%m*0Y1$Nk? zq|2ydh{Y5?q|XjKVpw?m`cadeHoMrYr`cW(@HFpX4f3o3+!fX{>jYGa5BjotNlo&%;a8?5{hofhYc?TO5&r*RO zM-<(Ow@-)MSQiXE-RBXDUwA<5;qGFuh$N|9^K8SGXPmw**!K&?5KY0Z7Q+orWrX6< zMJ&L5dEDozWi^eVIfe>?!44b7;;{##lZ_FN9w8Kf6;)3@>lUmGO{4mJAv%8 z{1F(>HL1Cz7fFH~mjO)-ciTbX9XEc>Gxut#OJqS<`(y)%9Y9dvLz$!>Kl2MepjWYZ zIqAux=*#x{kOyxEx7{=NYUV%mZ5(}EVPZxv0!BxeB@bs7OH9ZJr4s|_S6NV;zTdR^ z)*t=2(D&)@?>#6eieGPBw5=l|%rT4aTfSawxLTIrUXT(M*dz027vA(V>IhJ2WU<-oJ8ju-hcq>faQ$AsQQ5>;% z`}G*)_n;Bz13g*@BDEU9whnnTwR${m{tgVUSxvS(?{)sDo!C?phKlWH#;V({u1M|r zMa+4;KLx#yKf|v9)yfgfb0YUCbQJcno4N}r(rfA74j9S{oa(8GN7{GJK3X-K4{n1U zYFDuL8c-a}$DK;aYS=W5Iko+(^`O0`mfa)#7|buuQ@R1a!sqBPxMn|m_-~D$gAoA6 z=PEBlvwQf0iY%Pw+#{q3yAg5Ham3@Bs_sN|c-3Iq&K&00&_6*0XsKcQ`0NCP1o#pu zTI|{>zuf22vW{FABf81i{=)iRzp`S+7&4%Y$qIK~b{QEL%}5No1A%;A3)HYJkv4VDv%V zXH&peUDD*!71?qouk~-0y2r5@c=H1C#^MnTf@GdBR%1&C_i{j53zR5&Q)2mZE8fkj zC@xAJOp!Kfu+xCw%ITX3RQIZdY+O2wfUKoe+cWiB5itnI^~&rHB_dv-gSRI(-s63O z0>-%SaG?h|U3f`PHx#K-xN>rDNAGeM1;Z!sG756 zcWV=)4i6ArHCTG)*wbfi{S)QUgh|selu6aPRWKyJT`{6qco-IW=j4Q57vxB{dWU4= zKJ$YC$rB||U)Aq*%Cu)xY`u*KlLdEj!b94yHXj0I2gF!qA{SWR9j_#a(Y}6LngWqA zX1n9xQgUloE2{~U+1K6<@>vaGK0Z|Xar$ljyj8*1&F$CZy@PtmL~QTWMxE9dI{IN! z`sz&;U{{Wczh#t66Yq*u*BldX8wi%3i%Pe-ID*G~KkBbP3(nt?N~}$a_dE#GR*vpt zIlC60k=u6=HMI(jM^%&O&wMj#(%Jy1oJNcsvF+YGy!+ccRxSDL{?ra~jaW?lfe~E- zy!f+qic|W!aojXQ|F(7|zOVA=WbTZx`dSErY@FSqRx;6_HpXY~xsMk|ZMrsn#= zR!OICW{~F*vVjrf7tl;><~f3yag(o7RBL}(msdd2kOM}AM3++Q^kzU3uGHyKaRNWv zUa&JB06-S)9zt)nW5Qv`@;T2w)sJ)YWJi%6+ssjNS4eJf_0woJCvRQ5 z=>H_`GG2v1><~}+Fm}SEyu*)0jsxfpI=Sf1%#1yI=U{Y3UU}(==PSkaJGQmDLmu7Z z26jcO1=t4-X~!*>D20KaQK$m1AH59b1;ur?BeT5r&8^<1V`^!5$+{?Ss9%fkQs3&) z5A@9Pgj({DgNLCK)cGd^(qk54g=ueI2xQ>I3;?0`vtk;Vm-1o>NTuUAdi6|=2VYSr z*?a)`Y(Xo*@zwRrTV%IscI!?!fPNodvAs+BzAZc;%{P0tRZf$i#WKF7_%#;eO9VVg z#r(c55L+F}4VGZv{V|&653jX5hUtG4yBTSVM(PqJYcjKaM8Aw{pdq-y-Jm#6bmdRM z9_|qzk(mts9$9OR%4woR4OguXqTpFTV-rqZ4-sFG4>6Q>oytEoRNDFo*o}hX+cJ!6 z1Jp*CPCgLdF2*!CUhWoEJJES~@0LqtA$O+<-sk+F#B5zp*;`!DK{c8rUnw{ZE48 zpS-ufICPb_h|DzFaC4IXML7B&mxt34xkm|sgExu2X?B0HMx;HXS8rA$hzTcCI;h$K^ch}A_mo>&0n7`N#e=YrA zL;Cj<_c(cv@6A7w@0L=DG{L2Yk2XyIB&Wf9#B^I~N#X6E=KA-^Oj3yiKG#$a{=ScY zoBl7cNwT$M+gLROQ~Y)2Z-a!poKyTTH?txCw=8bGe_}=4>~=U^={ausKW@QahyG6I zTtuhYpTd;?B(U{x;%3v(tfK$4SN{HUwqn0j;_B0XVk@RmC35lMeD@k8_%1Z-lQjs3 zQT~Uy`nN9;36!7fZM@zam@DMd9ASx-$Xoy&xJEn#q)Sgc)PW^88{CG(Ag-Q?PuahZxaSofb+D(^pQ`kyoZgYf_FTk_wt|D?_TWGH_#i~mafzk>Qdo!$SxEz87}?8V)*HxI86 z9d*kSGVWZ>j4&0yq%A&qheDxiy^l5vvci-zqGkCMCeNQEi2v+WsNZYvglyG}sy@Z-3@BSd9~JRIm!4tletD%&v#l)8nC?N|9hXOQ%D&z8+^-$!)l~Pj zET~S1d?AB_1{=a`C$7M?denaDB~lovJv^ZuuhTa0Sv^5i(^q2^uW8Qi-(Y5=s&H+( z>9b<9z<#}lx$_@ky@SGEVKhc9tF1{L+sB1)5qb2VB*F4E2|RaYp|z3*gDK}J-3#RV zIxm+=8Lz}vp+u7L3davl*cN~_ET?o~jE8+OBqv+dxqFoMQn>>b7UgqbCcjqKHQ|;N zBZMlHFGB1R=*yIwkDjDd_p~iGYj_a!v%$js(@=Jw0-QP}+rEX0{32PnDoL|uflq%T zb)^H(M2_}g%I!60r2g>7Ogq&R4!^KP6=dvi*D&~(UA~PbEZXcIHe#`cxAC|4%w?2_ zFP|I0UW7!!tj1u2(2k2KON+OKs^kI?L!Q60(v!y%RQX({^pFU!^__?iaLvAEM%{>D zPFSuOb;l3S2V#wH+6ejrvs<*=^|rf8Sy2NH6wFH7z5$cYXmMK*Ky5-Ux)JSRn`~{C zRG@koFPnxlW$)U&@)I<##~bTs>{U6MlGpiy!~#BC9_pSjDkhT-ik;>f0Y#E}Y($(# zF%!dEo&^S_1=#B0u!yNUwJ}_kc!x(lW>WB1gMwYs&WeHpf@9?8BrWt~0 zMZMg$PqJ|WZzm1}t0Ut4wgVtTiR%Hc`S$a;cwD^Xof{mInF>V-QnrBc{%hfg(jkeE z(v&xY01W(7KR~s|+VPdw)1yz=8O=HQ(y6jQ>`mmhJ4@Fd)g@qr4`jJRGWN+t>2lv{ zWtN}8@sMyAgE`?@o#jTyAE-H)jQe{aUJv&jTh0+t_r!GeRUeIU_%U8cVZ+hpWO0}w zrD*{xV>7b{us9+`revZLnD`VFsn_>SL%<_Zg%rGh*u-7-i>ugRzlo0DZ$hoMbs9|3 zCejQQ*CeSFWgiW=Tak+2UH*>C9&iN}AFr2fMtAA6| zkJ#nP-pU)Uo6Gci0lB%MSQ@&c-(7sIzeZy#kvcDBd`PEPg^^>!Wf50^5zD&5JWa0&siUyVC+fJF zVVA4}%=hjTaX;M7#k^m0`qT0oW%%V`is8beethg_gkC zekSh-16NaGaLw^4h**q zQ){R~oqMFRilVW_5BEJL2&|1pw%51|?mG%;vUd%a)!by-d>SA9YejG8i)eCH5-?3L zSCYA-a7Wu|AWOk3eadhH?Gz%6((}`%hr`=MhV8PpGh>ejA(c|`-~w^2EK=2(940&H zXRltUM(BAK(#bZDDXV;qPW`^?l@nv`mox$>1%*8T6o>U3M`XS_Dt#-G_fS@pqwQK@c$ zzesMgX;#CYH#}XslD$L_r|DvlBvomA`lTpHEUAgjEw#{7>rwleyePjjR4`ObVy{{R zJ9V!N#^d{(eowe3>UlvoyMnY<^7bW1mL?;5gqeOVW;tyS>q+L;ncS@NdpPNn<(CM$ zX(mvqYb-Risk14U zH?do~=Da7G&N#a*ZYbS{fH<-mfxKQX)72c^C5P7*)|OTKpyC3Be>SN2#byDO+oY|u z@3>GNFDIc($9O*GV@!K(*~aWbvHdSj(-TgF^Tgipq^O``EH`(K`|0)}!+k#MK9h{9 z{fBZ%#D~f-UBp5T96H{K zGQ)CwkOLz8*qQ1>!ZK~3JrVVjS{NxwDbR&f&J4aJG1^U3IZ*l716TJDpu=}Tr~JDoqQ_5n z+3&eK^H=pX>GT-zBVR$cDU?34d${l|XB27Xw=Fw!pqy8vYqIds6!tQWIkSF4?UF)l zTW_nwrr+@6#aO$}xv*ANND#@hhKL`}!7soJ0kty;%6y*#y(H2gzhb|q_5sk((iAmrFFJWv3uo4Y+z+eaF{ix;sJxk-2AUPVAG^pheHyJ|XZ5&=qTJWLoE=6LN5SeJn=l%NTSB^iqqS82A1L3oR&!prG zQ>;|z;=On*4?$U_+>e!lp)c>VoCw(bz9)zvp1!r%#D1cbS&E(PSBsl4&ioes8*5-1 zn_I$nyWNFe$R3cEnE^^-d`PjjTLb3noLlpRBVQ4Ui4V%!B|}uGy~%?JCr+GTJ~t^1 zNG-mh(%kUHLvP!yN8UM0Y_UGHR@G+W=E54Z#(W*8G+T&n=*%qvI96cMO|PM!!bWDC zvFfuhS5OaIdsv%c!aVAjqvX2Z$TA)CVdJQgzK9sur?9^G8+l5)$1Lxu`EqmNmNt7y zLp94@>fYgRH;Dmdy3ea2u;}}1R!J@34u^StQL(r_`{M3`%){qKV#X;B01C}f7&h4Y{aXNyY-2lvIVW&_=dBF2C7g8G3w2N9#*WE1yyUWNMwP09v2na_yN~C+ zooRmF%Se zYdBh&C7x}$q;ScQYT?@nl=G-13a!Zx|ho0F6h8cB8G4FL{(OM~{;+^Uf&4;-Vfq7!AG z(=Q%*_Nhz*&ot+|H7+xg2C2$`PeH)%Nv(r_u!Qt62q%Y5dra(9J8u z(>A`+{jli3DldmpZ+n~1oFq7T-P;D^ zvCb3|d6p`7W6`|PSGGY$g>GQ`!H#qpUNjU_vjsLwV-C4)?C&)f zP=wt~_jrtkKoP}MUTMyjHe88QzqmX>V2ivu)=TX^4#_zI1?>Mm4E%K3_y9*`qtoCZ z1)?AJDm(*wg^a?rb`KM=XzHE?=TE!8p>LL<=H`@6peWWuO%JQ`e5`{_=ZWlu3>+`D zk10hTj_!z0RAx6gb1-Gtt+el!-tDeH){1g*b+NTiNL=4?+tRt`y;Jb=;Hkz$6>gH9 z-&^xwYgSpKnYXy+!N4U8D7Tw;dY$Hm0BvjE>7Q;csuZd+5G$LEJHhWFMrZrnH2MBz zA^8U{GiY!!2jf~NDCLuW3@FJk=SkGZ@*dWGuD69etF7}w{LB@y#|g+riFpD$Fz4`R zt)hBOV|+AD9_6X440!(W#g|`%r*n5n1TCeJ#`z^B61S8vif=mMfTcpuSRw9)R3&bP zPO4!)_M4r*8|QDJr;GJSK%h4>oysc94H?m1!=2qRd!FD8@iT@i>d~iJqi4&VKsy=k zh$U>jyg!#BAtM-HJ`Peub6SzeO{E=XWH~WgeW#W59e(Gvxp1Wv%>2u#VMP$#|Dp{Q zBcaR5E$-WTbLBpxw3ZN4)QM&5)|SXwDz}nnTpIVH+5C5^JWEN@c#(>2fKa1d!`VTpweN=y&>$2CkCF{ab;4Q7uX-~5B-rgE;}QiiEsZAH%)kE^<~ zkp*9dnlnvz4kQFXi!Ni=LO&bnGO;w%W%k4ErUb90s5B`CBXndzL0&cLT=Gf10B=3M zPvUUJg{t@>NNe3Nf=(S;CycW6fo&UV9M?e&J1z}xXFl~QDviZL^4c1!!V%TqB4>(E zrm}#nJO~SB!2T7FTZeM%bU}0xA8!HecsZl2GS$6piTP>qCo zdq$?n96LM!mWP2@o%>mL*mBOaIoX+_b~X#&K8OPE4Xh@(!QrD%lUg3bTclN%&|A(4|VR&oAK)h*JxsQNkdCiBiEik>uH_lo6D2!<3>r%bF+2ehpjUt0i>NKWz#u zEGww&oYok*P~x;reBg7M+Ooj_+T3c07!h)cd~wQ{F4jT!rCH44jB8&nf}>C(&5@gC zh0LsVm0_u+bJlKV?pEAV-ExT7m8ClF1iHKyRH*PaL}KCF+>Y{+XK`QXNbQ!;_xF#= z4!sPu?|DO9rp)7fgGSxgiba+V$#o72a6EqoO?K|=4}k-nFdXXh1wOMtfmJ7QW6na+wbaTLVZ(c; z@?&UA`6SDq^;#cN?{HsPWoA9oX7hgADB=F)H;kLin|-gDqYOTtqX};yhM;s;P#jJ@ zjxJdrompP)dh)Qv!+hg5XvZVx-cEch`b4uC*o_C>X2Sin=QTU}+MB9y5-Mc<8)sDY%VFli27h+0HCbO+yI{XQ&vnrE&Ik zlP;IO+&Gp|Lq&tQh5CxGK=?25|41{h#;K)P$BW9WRx@9*Aq?^^Hs_xm?%@mX;8bDned+50>t`_YaC+_!H}?j~xM z2Ih<+X*w1JThzL%Be{@JGD>CM!=N6susg8oxa2Wv!8as-m*}=oS*cjBu0 zf_63b(`%?T7nbg^u9Bi>=LpDnErX*VWk1nW^&)hZ^_Xmu%h61En~UT8K>NedKW`69AC$Lf>qTMpI_5+c-osjm5 zt`U^V$dSS#S za!a#f(~1|Bh-P*hemw8!cgFA$<#wrqqR1RS5LprBSDv?#U~Ap+vr$A47R#I^QO72h z!qvlfExAeRyZBdt5zh~IW4EFoXDdM&$l0~-e&kCqeFQ#NRkqgNi8h1KpT{4ED!>%a zTz@8!?kcUu!;DCHp?6fg+|8`$KxqI_^$=3|l5fi-yDgN?yEf%=Z`LBJB&C zuC{vjpB%&(TT_J+om28L@0;43N_*UH_wu5}_G!(ON5pgP`IngzCcW?1upgdYp1gm2 z8dKqzPNTA4Go_>SyrZ|j?zRi0$GO)FN6PL_zJ1Wu?6<|e)S8%l4z2uN&Y}DLRrK7r zG#tL`;5WN7L^0l&3-`ZbuF2p?XsGxhCZSsFna!%1ue??)o{SyJ&}VnkmRF+*PZIM2 z;KxCl6ybj0iYXUAjV1omfME|LU1_X_R7xP9a;{szg#4jfP*@9*kn3Ff!v}t-K)n(V zgN_<_x#oR(dyAGf-R2vSrHiXpOqOum4SyA2`a4~{|8}o?UwNNf{l@SKRc3?rx}rcm z%B(Lhh1z&?>Oh2iSa6kbAfN@+9Z^7`;_0|MZJ-hpfo3`d-E%0sZyUZ_Ev1Za+|z z53F;j9^tp`bXsgvGvS^ct4ro3%$`QAwd};!AITSr{Vl%u=?vN+E3Ne*GnW4~=Z0l7#a4{kat#)V5NLg@p?%UtwWnGlXh!A^b;y=h@#?r#>! zJ*W&IO~$2gjpxNb5yev?T&j<}G&YT@i|LitHJBL(dk&n>jwyc`ei!?YwI<3e+>>t+lp*fpOA1w}Yl{Awe-SUHjbXvP&XZM%>)@bk0lZIJuiLz z$6M1G*(Wb^`{i_~HzPf#M^k*FUnh7mHWN5(!y&0 zheFHUSo#_*8zHL~+j5-o>U~)GQqTpm)RNTFJqKUf6`yRo-cwfKAn-?u{eTC8aDQgU zbK~&g*0G1LuLat{2NWSf(yFxCtdGj3Jh#YtBDd8Sfhw#*nfjf}l7@PC1u9(msmG*h z_J$DBibq>-GnO-yT?$uI|7T{ffB!7(2jwJv*Tn6V#trQjb%`}3SZpN#$yH)@gm?9F zO9EN@<5S{zTquk3nQ|dVquUj+GkQv@qidfxC8CNc)Wf+`gmBf#c>v+0R^rJl*cFeX};E3cQ%+DoTIc8{nX$jojIwJAZOTq)z` zT@`id{?~?mr%R;1HvCo==CKocL1=V9F63TxYvbY^h>Eo`&Ws_c`-ALV)gHfHN9{N< z!5DK_f|I1Q>5iGI)-;rx)^%p)%kPuBjoa%_+>m0W_ZdkUTJaA>;T^0&leDJJw{vZ( z_F8Fw>`W1a(N9BRL}{oa<&px$2)6K8f9s0#hzQQT?~S?h*X9?W93u=2nS!2Yck(^# z|qh1P}!OP||)gLE#$HC9<+$wu7udLo%eCm}dKTj@v(3w~yyi)!3 z@U%o(R2_@1G|GqR-KYTph{c-SXD)vAWEC8kGkt2=!(LoP)GL_@w!MEbb0GMkMd?Y+ zSF(KZb;|k*w);<0!Q!C|v0b8V8Z^KHr{k5Bk;TedUTPK&vnTi@IZAf@vc%J@i8ipX zzm@ec{TzAxf7FbR=5qH}N6nXK)|CW&M~dUbol4%8N=|m+cf9Wv`A_>E8Z|O^uvdep z-Q%WA;nW;jw_Vjcu^8^r;)_Pp9Q#GlYQ(d4yR@=GV=pIjPAM=P{G7ffYE>c}^ey`{ ztlSvF1EHxo?{iupbTM_G&}+#=5~Je8#->r39fi4=^(!mFca|PZI7Ue#Fv6)|oy>IA z4WDoRLKU#}osvcWhDr{rE|;C#@}zcxh+E*DkfA}}Y=p&3i&kz#^hl`wb-!q#<{s%e ze)5L-R+aYLNTKY&{>c2dnIr>ogoLi<)~zOpd3rE|x%#o(eK=Qx7iAPrB)|KpXgxm?|IcDi{{ zRDTW1DU~{tryCVrEbYl3u+D^% zTHM97iq@bDqjxXHwaNzPCAeNN%_*o{aYQ@4GU@0wt+7||&&NdG>Xe=7-w?P@1G+=# z6^Su+!)O#1Zo7*dLN)r#U8rZ41V@JxD`E?|XJqM~wsqmRn{MY$+9Y7w=hi!n9g0=? zA)WuqTmIa!eUefl6yC)0l$#urKEsVenq$Ym3s;7bN8b4Ajypz5ksq}4hlXXN$8k#) z&AH3-ZPUQTD12eXO*USN`EI^3@us5K({8s@%Pdv`HmL99zUXwj?SC#C<7ML5TX*3! z10VQN@bEtVgni4cB%`fFRZ}Sa&NPP$obDXvekJ>rbgOB1$;~SRXnT(%bm?L?|Ct(3MMl$3yC*NbGgftcGTIwZg@vv@#(6%P(-Il~4Bk6rJ}9 z)@e3*=5G3ek*lO&QSPW|noj?GVq*8Zh_j6!Q)o6*5wBvh@il*Thi3TW+N?hoC7J4b zs3_CsTKz-l`hm+38h*0*y7O7%Z7On?aI|I=_LvG*Kp*}V?a-=0{~71{$Ynm!v6>}Z z{iQrsA3-pmAJEPJm8*lts5Q;_x(TqeU!X^I*IpTq)a6 z_0Fj3KNwZktUb308TW@ZVy(QDn=6pr4pdF6xO8%40nuliR?aN!JUtVk7=YAFo$-HN z`f{vjx-C7Y;gxQl7ZI>krf<_SUBD%zQn2{4>P1LZ$Ifyt9>;jGotm$XA6!^{~}2hBL$6nbHM<0B+c=ch|%kEJ8u%p1WzTD^EP5Z~cLb z$0FBh+{y1j@VDLa5>HgF)nDthnEp%5R5?TTxG>q^u7=w9^L(_Z-Zy$2onx;G^hGp7 z_$P1TY%Ajsn%H>0hF@nw4q{332T7}4l9+BL zr(%}YQxtc?H~V-iGm?(4mhwVo-TJx57WL@G5+L`Un=hPA0{<`S>c>aQ&127zigb+% zA!L+GxO}Xj<)=M-7y1@1%U@J4JwVsJC>0mm!&KuzEjX6m1!aAWycoBcdf?Qkn=5Li zDFwl&?3;&8f1rpACfPzSNVsH8nRzAlb`{9TkPw{$jrafeY@x}*-6q@ZSG4`G^f}vX-EES-9t{T1fM9;6G`4r3u*>NLME|R#_(0lQKSz;E1tF5l;Kk#4a`=GnJMY|} zfhd~L8vFCC#>)D(3e58HfIuy@x0SpCr&{!rQ@8Y{9yE7{&d2L{*cDmHoZ3|KK8Hk0 z)|#Vl5N{~2(qQqph~7Ze!=R~vEKK7mayG)8LA34-e%E&USK9m4*~L0Fz9!e^gkE*< z)3EBbZ}J(p8l{Tbv{Y1|7Ye4sV&4e36lkS6f3T3OW6B=zi%>?w%PP<(Z;y2`__S5v zRn)ars57rlmEA22bXW(klXB_8+C(W99=bw`rjQGbh_xZe_BjkMwKYUQ4X== zwca|>Bjp9IFOeIiHAy#Ia7N9invPtbq$9Ey0)VfsK8a8CmCYThY#Art*Vn^DTj&e; z!=(gtcHYlWVw5QY2cKdNTg1bi?mC}OYL-~vypvXFGus{ntLR+zIC91F!Xo`{TP9BL z{^6V=y&Mg0Wk2v7uw#gidc|Rb3nI>E=~4~JJdr~Fg0u|pOXl(J8P0m-#N4#o3G2o6 zW%1>beXrei9G~#`YOlaAmx291&K4uWgrI?Hx$lxhXl(sF-;d2x3@4yYgEMllMl?4D~(ULkgALgo2n^tKH-}rWP?tLvGA1 zwJ53my#lGD!gtZN9W^iEcd_(kPTLg7NuLD~bx@F8j8&CD$$fH6AW*0Sf(0pFc1BBA z$T)AHF#LSjyjJSn3tAi9MZusm>C+|`H7++z&S9z>_+*Ms?fb^eb~%Wb`U_C?{i(hU z+caWIUr}Q(P>5?!<*wxA=Y*aO2VtcMh>B0xs3H6Rj0xO5iB4tBGJbK|Sm5$(Pw~2e zQAd~o+IbVp_+TFUwUJ)lBdM$<%m(05r z{##C)cKE?&{_%C5xEGtwA_JT{oprs-qJN@ma*u+St8(c1%=mhXjd?fh(`|X#zAZgl z{O!)PctF8Il3jUIa9FVRR(bjhxwoL=n?=5SgB-4I<9XHXJ7Sl&E|TSH=<0GZ$SQm` zC9eMc31+`k{#Q)AyI@OENO#3{d9O524w6$d<1^271C1xg7b;5l<*JxXW^!AB|3;in zwA-Ptl$0}4?)zMTh|F&57J5Mvmt4jzU8~%(F;A?D>s{h4d1OaPPx3x@(>!kggHBU1 z(H5rs_4(c_U%icC+^=NLUgI}+)bw@w8%REq!X3bg+WGT9*Hn>~Eb%kRqrYNUg+*YN z)1Y)oK^`~l_wV}$%QaWYasNje{Wl$oH4H`NU6)2+JH1*ZDks52*M_aB<71d|vL)E& z<@G(`@K34MmJCnU(K)DqriF=V%O3HD?k7iy3YjX7Q|)73QN3j2w7~1ym+2?B-Gp=l z+r_T2QXO*1);rB_AENPdVZWIF^{-8T*(>Ji+a_|~oqcCz4Atq#J{iSis$w3D#OU;- z4!PGSSUfZ4ZiYDl?h$K0Za>kkfbLrv>BNZbU;j^z4HGOVI76XAcXd-<*#3R6TS*J5 zufN*o!U(h)xxA~sTx7LbC$aOHS$CGJz_YI_;>< zIX*W~Ul~m}|C7cwLrjguIQg{tu1NCHUnwj`!7ZtkAE*OF=!M11UBS^(iFHsXcX-v4 zC}=D5Az#xf$CiPXRBks4IjO^BuHiskNTFjDeAFA;Q^@E!P`B(QfaVYWd4$- zS}+rLGIo4m*){|pS^~z$)Ltj^LG83cp)lH93xe|4e%hn|4+<1AFa#L2CLouIj zqIkUfjg|88NrFuK($dK9vu4SqPHJ1wD9B{A+Q#F%xdJB9M;*Peb&=QrY?0{K)b)kT zZL%Sk)TT1d3_fm(kNK%15vS_p=ikEj>ilB-+r<-dsT~V#WEt5HuI%k3+|9o!PwA|E z-ZPqe^&Tc{shQDKUI;VgKDV#8WJ^R%d&NMpa*Q^ozueK;HaTB^$*D1^0%~Pw!^0EK|uU)0kcw%Ti}3YHzb)! zV=8UHZ#Te~%zSpV&hb|2Z#yDsLrm&)dgs>s=ZR@mb;|Y7Z@<#-B^6yfR|Z>ENP-eg z4U>!r(<|Nzp%3@E?R68nMFYnVWdxJtYfU(XiFdod&qf$t1#g&BkpEAX{AXD!N{AGi zFV|SgRB;`=sXD=kW4k){bNClDwiAsMWm;n=rs#Cm%1b}ZCM5^S1IYcjIs{)mSIvwOqe0)TLnTM3Nh?DKO~uu< zT){v9SUvgwJl614W!c~(m890er*ML;rtFfyLLPa;kU0Mof2oSw9u8$EFKoE#=#t49 zobBASCBq7wW+j(+8W?17FUr{GEij{47?n;!dPp&uv0*X5GV;2JHb-q%y9)%*cAORQbi+s)!7 zMkV9*w`nccL%O`zXc|VF?$+bFfQl14m%pklGiwcMH&!~CUWA=atxk726^ih>B!8@} zKl+NNDdGC&)@`dtE-Hd3--_$$8u$dyonRg;8S?pF%`4?jb5>_$h`NE1&9i*mlB;S~ zQrTnZaeX$#=HBO?It~Ix;OMF|ePr$TdofSV$1}F6k9d)(hW6BN1JlDm)@%Xy4jg0m z3V2sGIRbD^spK)PuIJlF7D4yiaB|TeOGfH_DIs@x=Y_nSy)mValTQ#`J7wD^;7pxN zV@e227R8vk(|jRqrnlWK45u^bAjT94U*n+VX^6~_!5!Svg@)LZe9i?6!cA?8aOF(~ zO3{m!)f>wzr3zFALn)Ugx+FRk$Ln&w6!R)t!xl)5S*-zNQBluu-4cY2uNgO3L$^6NVUDaN1K z@NHC4($m*mcUnJ0T}M=shb4KnNLsw~p-PVl*rBi2O)&@JGo}v&G<3MT`(qT^od*Vs z0o92aMiKj&GaKgm+WE>M@&kExe@=5CvEBk6`x%+)4QmxNk8Uqo%_NL!7Q#q;8YZR; zK%$wzP?^c&4`q7smCkg9E)CxD<+*eP=i zbd_HERH#b|d+B+tn>r>yg4C2>{HFE%K1MVel-b)%`2I=OLI%UEf6Vde>j^rq&-0lM ziXy%T%%R!zHP6b!^|vWhjTM;53T)e9+){j?98B~pxizVEk%uBYEf@+=~A0Eo=t4D zuPqS`Sg5yWo%rF{!&kmsxYF8Gn0)ViAPX(!M(jsEFnv_{HIkWXw`-u6k}{FQmH+q8=U-I7n*4QcPf|?n zpS*DY;j2s$deC(q^ZLgyCDGTfA3kzp-^=**oL?=pi8XYHHMA6*gT^Ad@@sm;I{5H^ zJ`*8sEcKH_^b}%QQnY%O?&Uu4Wbgd2k9|SyqWFALA|1qD<$^=*v}595h=wOMKzv;e zVrD39X?_PJf8?379~|8PAbuNjYVYI8;i%V3VzT{@z^ojf5&(;v5h9 zDrl_AR}G)D{F*Ja)^rwZCqpI4H~c(Y?inX?Cyj zx88nA#C3d@eR{!EaC2fV(`HbNjnZ@wPAj8X@I)u69T2R-abT&bfw)#}B;DLJH%q!$ z13h?rLwkW=8#dC(6amN4V?j|lGfZP1Qn{MKx??o{*$LOzH7r)T16X7L7u5l@t~4<``-s0lJ_F;Tq)OqakD! zRjRGLP~K5XxSHiP4?$V7z_bML6A=MU)9(PB?f_0r1tZRoJY%7}#;SutvO+>!byALmk*;xiX|Tp723?d=q7jyA&9Cg z$c?xM!~iVWwyJRwEZXDX0Gpc|-IoBgBk=fC6e{EFGNUMmovuMkkjvbF0Pxw<@3!Ea zV+TZDj^D5?6OmO>=?I!Cw$C_i9@Y7cdtukIKH>sxTItUP2X`{nUis)O;sZ&iX@3YO zA*>@`5`T!g`G@bbfj8AvWbmg)@=Kw#>7JCS>!^SiEOT@L34-+iWexhl^-JIJtb zumb~ya+91nSG(M-6;gUo4wj{xfu5aJbM|{Jxl1nwoXTAism;A1qoOkaW-4OO6CZkt zGSmu>i4~y3hUBJFkUi1QIkAl;4=9X-#)~gbKQvsgT|RudY(Ch)eadr>Wewm57Yv!; zg>B))zK0y|Zo(wy9G>zkk96>#Ixrkp_ZKG<%mpw#3My1!xLaS^kHM^QgB9A9rb2f~+gEuFa)FeozSMa;xBVPlsV1Et zDE#@@UV*wmvQ5JWCG?IzHU7#I+9~KoyWYQj8iTT_{zLVM*YJz}%S$3=1A~2?x8b>J zEc0sCSko~y&p7PKe)rXAnqIfHhCieya>w!px|6;-p6pBW3>b{>yr}1-53Id?de(OH zY#N>65$-_uM_>?b0u^4ijO-;D98iP&D&K-J3X-moB;lrtttEqe=C6E->E}PtpdKqi zXPEr#D%sYqdg~Pk_%Yj4z0x1RhWsO4+P3@*M6>BKY;eS|V!#hB!ViT|h*~%ReCM4g zJxA0-5NKm6I8Oc>zWx-2c%hSnd9?FtR;}dkP0q)|&w#lXX4p>&OfU18{A)te) z;5<a0>~{SrVOKX?u?YvxgzZf8YQh1YsR} zQ2AoAVdjlVP3yJl>_VK!pcv=JH&Wjo@!c-FSq7DKI4sI!oOce4T`c;56&T289((R& zYTC8PwJWcIa#7&;hDx`5S0rY;%3re3%Jw2W3;}0^j@SE20B@ZfelA)?WHt`%t?>{<^Z}!hxvn}%OO~7^k zqB0uy;5%M3-mW)}8^sZq#k|8g{E&j|T{HoG#&s>z^33%Q&-mr>0p^FyG8@!~sR&F6XSUWG5p?j4{11fH7Wf@pixsUPxsOBcu} z1b;Z^(4859U3~}$e)xfIPU5gdmNZ&kXa6nNS|@@3G3#NiE!|3ZvapbOpHqLg_$WWiUT)pYs&RUj#l%#;DbFJ~0;v(t^&PuF}LpLJxLZUKdK zqr`1y6uqT*HXpvCEd-uq;muzbVXOt-9LDA;A{h9tu?q4-VL^a#2;NNqNDOoM{=*{M zliWeMK8>Ne572gscoo*nIj;>~?bw&cQqHq9QK5N%ylOc2k|IYRRGme?LQQY7$6p-ciJ*2341q zo*&ftgrun%UK*1l4w*8n)EEW9!GR%T@=W{VlU_k3_C6| zAYwoLG4z0qI}^K7ip(^9xJvK`r|@md?_k2kU+-`GvfX+uf6cg@03-wUE%MAtj_^e_ zhtKtDn0e{Gv6u~`ew^Bx(D3du>VBP;I_&v{c^8B~<55l8x!VWtAgz+l(`CCGf=nT5 z5ZK++hrXDe3)qQ}C2J{tD2tGv6 zV$yfd`!;%ErdkL(l?4Mnm>pD+&R2l{`A18G_^-N_jNoW41eV^>c^z08XJs=x+F9#k zPweU2(1u?vcVzW9x4ocP{I-?LclENi&X3zi3r@pQ&clAnHsTHWsto!`!iUw#?R$mf zG&^oje9kQLomJcZ0q75~#_ayk^H_D6d6^5Hkj3;ve)N(fUzJVWJU=yvaIt0WomDk_bg@O$#}_kt9C=$I$L6fq*(6=-3h%bUr8sG$2Jczf#9Bb z7PpE(mD|6YS(T-DqPRuJlbK7sO8YVjHHSF1Qr$ABHapH!av=uda#Pw_koxCNGVdgI z^?8-NCsXG(G)eFCV8s#7e%{f{UjxdqEs41KCg@gr(42{6D%sdT)GDd3lOL@R*Api? z?&u}$4{J4lBpHkKQjwz8`P6OSoe}o8xN%;%W7xZowoxt8^l~_Px|~)ck44&;IEEZ> zy%qFtBKSfU|75*s-~N27G8=mU8gnum;(o1l;pptrQm8Pt z_o{V|3zM25(h+(_1-)T{?k^GC(2@yRu16x`k2u;w1rft<}DY_J_wFu220v_^ibgy*a$Bfft7N%C8Fny8H z=sEUfd|U2U8rvE#YbE9eM2fG^zDyOAf4ct-IM+JvZ@R}`4v&0jsnF$=%Yi9u{I&2o zLr1M=8mJvdJIbhxAa4{(&+3?xBX_`gKlkjL!D10smaGWGnO6wO@eR5_*InqpxFP>P z<8~(s+_OZI@24n_iujtHfCOe0iJp z_J!hHzr{LhpZ`v7hc3*=U>LCt3pRuKbhOzRu8Ep`!!Z}VA$T%|=V%q_^(UL&DI?EG zH**opS||b6{B-D+nWdY;f53m4eLS}Gx7nXY{_hAup3Kked|S`8-4^MFH})aFX^gXM zGLEl}Okn&rOafSbbqt=!-tzxNgZ5K&XHnRjD~H6wTd9nrs)QwfkvfG%IZw( znq5}7&X8yspL&oa>;2BZrDYj{+?%&H0E9w8D1;D?F9pQ6)BeWEfBgbi_ z=jN(4+1AxcI>w)@SsI$vlax8zcN_j%uZ9tG#3C3v-3_YlF1aOM^F18#RXIGDD!g}< zgdadE?R#r%G6Ju+Qg<1)1ZN)mC3Iffvs5sJ-KVY;iC#Zk=8kl3je=SN4s@vjE#8gPmkj6WRfe7w$*Qs;jHyPflSkTO@0Wu$t#aa zuRz3?ZKeL3BLHk}8)^)lgsK3f3K^l=&}wKRp!93~jvwDGNal+>VI@{2ZYS0*-Ye7l z+!q#WWRuCm{XT$kV_qAUAFI&E9FLuhUiwxE*MXe&#r;UJvop}C1n=~plWJ*^JMW99 zYD|i66;PQz%i?)RH2AHSzctXeo30$ucPD)KtuHuBEUGWkBU0Q=n8NNQb(#Bd_Y~Un{9gqJvs>m6Pj`DXRi*?G+GvRMedqPP6L5GO!Ep!% zlnzQXDZnOAX<8t9$#%W+H{pIivzn)b2bX#k!fHva>E8d`xUz{Y3ttexjkNroXaKebHxr^MKkaAx_dW@WpM#tIxZ*38WrdOu)`A zf=XcUk_pRK1h?pI`Z4XZ0`^nl&OP$FC%c6q`R;-c4xRc*h$#Ihf5^Fr7pi6*>()uXg=q$bpa-$3Es;ChMS=0pR0}tc3MfH zZ3FO_M35=`F=L(#WCrGfR1^^A*tHT$poWRhP)F@D@kUmB4JEK`!->=vg(@%K6zcJc z_BwY+TN3529L9dEh^OA1^!xICMP@F%K=_Fm!Aoy85FI*Ov!fZ|#VaA^o!nuDwzKgYzRN2-a<$+}K|t1YakBmp$xq z(T{(9M$_y*{=r0%LGX_Rx-9wSdjgfnLsnUtMhf=YY$arAAEKt26;P7u5~j8{Y`D7hj`VR2^Z-PRNw6gyCQaWe_ZBS@vw z&!fjiXag7lGlHr2KYDRkHx8c7Tb8Z9p;H;;mQ0#d4as2Qy6*F1r1owQ%l71i%I0IS zSFM3xWXV5kr#kap#}RrAPkK02<5yh3@N;3IDV3hxYV+a@2}!3a;Oi@PHKf*N!D zMmMqIYhFTDjaq&TOZdj)o7ls>r9lL%R;R8-k9qz(`hL~@nBZ4xgJ3?fZ8ize!8PWQ zkFy#oo>)Fp<x6--XDWGI3(D>2M6O_X+c`!mvy`uWcc`46U`b#Wb|4ddu)|NO4m7KHE9@MGgP0w>fok{8bYMUu?kt}VP#3TfVUBUxRKaFCLAqzlUopUk~QNfH1eZZjN-$KTfb(s^K{oU8>j=hX*K>+6-&T{ z=p$01wX45yq_;Ov_=e8bq=*1|)FpE$46S9#Gb0+$^xZV^Vy``hpeaHFdy)u1cN^=7 zzn$)8G&`Uz%+%P&Wu~Xwk}!4y#In0S^X{wXB4fJ;!S3oN25-@ z>=kg$|Ja$Zb+5ZO+*YV4K7O2L^9*nxJW@z~Z4~*HI6BrRlx;Wtzp|s!<9E2IIYRop z&U%45G0r=iI)Y^Pd%HA_t#9E*+uRuQx-Gy~|0fL#VrHap9IRX_iMxB392hh#hoLN6 ze6pMZOFGttA0;~{ZOGTuF}ah-7n15`ybqUzUipx0O0Uf-eQu*#-XVE_lUO(}OSDHoboK5IWp5eVO|kMkF^2v>##p zmjLTEKvS!}2jgZ_Wu0rmHjTfpw)Cbm?TY0qqPj52FU+^9Ek!94eN=d2g9d~&( z8FK{W-Ef_KF^KM9kn_kEfW@=FLD5Yw9shm@%qI1R^Ig3 zPte()FT4y}vwV)4q=C`{>O#Thc`G91$W}CUZ|ZAg4mGtZpFIVdf1G}`vNN)9MJ96e?@h#?-q=clds1vMRFw^Ci}y&QAzX6=pN5MDUyL$!x|s z*ycryznGi8-h{VlZBz9Pl57&gNz@Z~fM_e7NpGljH!8b_M%c zo1G;%&%1D;A8rrWrZx>RTl1#}zwLIhng6&cq~UXXy` zxkA3P$2TQ;bi!I7xaZCHSQ#Dq$$V-7gxPVZ5D3(jTG&U1~mfjFB(3?=Dw-K0c zLSZj;p>?=w7G|9kD;bx6clJ&Bz9Vz#DY_C0oYw@KKX13_l+NY)xwX+x!$`rEC&J=f6UgReL@{*4V0Fq}%TA%1V)DvZJZY2n>($*CU) z2*7*$7|;ZEcbZsF2wPM-IInQJ;W_{~`2Vo?oncLN+qwy%N>ij)<)e26DN;w=Ou|4;AE^OcT z#xHp#7hFe>Bamp*Rz{$%&LlW%vbk0y-N}#~fBXB`Cq$IOz{fxNO9yt8<*nOOV!l(X zrb+s3KHf8wt+a7_H69=&vx>Vn<@>>3)FbU5ZZ5sQ<$0|qQE$&B;hC3RXik^NHBTZkL%Cp_Z-jQDI?-v>|Ik3SoX0M&cxWw_zK9xdl&d3r z??_4Poxs$oB`B)b&KRK!@4w34oouiizlg#$H#xOFQgm=8r7xS6P0EDEdY2t{UH5|q z`8O;kv7GVfY~d+iT~o2SWZ-E}!V#50?PqCV^YPxV7;nZX}tic(MZLC2aOIM)T&bIN&D--;@XMF3a?>d5A+hci^1vaIee z24k;-cpF1Oe1M&DKY0#KT1a4|Tid5puC6Vk#*rAhEKnwe5Scf~jqN760`*kRBj_2h zc~=rjRo+=IchdqGOkZ(74?9^`Htz3jNr}o1;rZHYVEf*d1w}BTGUNZ59mE}5eRZml z0_qDGC?!oa@W$ls4K1YSf#U>lCDgi@=va->d@X3BaGHw@}@@!3Z!Rai_^qo!7KS2Q{n?;^3+J(6W`kuRPJ8jj+lIj!F|d299_x*PkG?jJ z+Mm3v)hPjKz(O7IHu&`rk)OHlmggzDrdA*#8E$F2ihWP^MLctv0Y=YcaDSetX(e||E zVbazA?biSL+*fG&2NtFeoBzGR|8iF&NzdgxmoBO%%n9|CI6nm49x_f&2rYlP8*7=Hy)z5j%k0 zM_2naI6(zhTA5(iWI)+8jVzWdEKeD-=|;T7Gajq{**<(hbo$g#jZo>kG4sd%odRS; zw!Ot&M~otH2yQAd%S8$=auWXGyB+6ulk+M1%@2Z8DwzqM%?CW$t)e1m6UzAOCFOCQ z$zKh>6CRZ*NVI@#f%3x6-D-?)#Z&fB}f<2X`cuh{K{Cx!9X`wW_fvm>Pcz ze);>R$V~1f1NXF1VXx7ZsJtwo+E;GCpV-uuk51yXGbZwZEfj102k9D?MYOfvgsC86 zGnoFV7@H_MIt|E22iANj#dVbC!Iq*gRQ2?Z$APZ7cWg!bA#E; z&)lqx#%&qR=C0Da@3`Ft51u}}7+*<$((vlqjk8v1$*VD>_44f_DTPl30iHt9j)+Oc zTy;PSSqD%hs_~H$2TPs*gXj0=d#^|24U9LforzYC9eU&F(mC@~%KJY{d}7CMb2G%w zFr}{F@zeVuTRGCIv_FtUD~$%ii(+?3YqpV}^7Pms^a}4{Fwko3R@~5p9`po?1gHUR zSx^5$lzU!4m4RKOPD6iLC^zOOP4ZFeZ-0|`=8j)^aBR{1UeI)XH!I1w=YDOvuzev_ zCs|`g0$s+pZ8EcI?LF`2mwBVxV!I)Nr z`*6RrG_9+1+9l1ZUh7foz`~XB4Hc{b?a5Sh=8CG}T*hOP!w&3$5`~ajuBPSDfxfNi!Z@H`TZzZ^284&05z>o?4 zNq%|CXlPwOc{b-=`0_#k%DD5GvY9-YLI}Vbi#zqwuu6Md0c*Pc^-~akvC~k1-*1HbQK~Od^++TRXd=bSM}Vq7`!}ki9@J5qy*g?+OFJVk!BWN-L8V z?HS?YrhbO)bvlWS^}}~A3hqt)(d$J6_^V9VAoG37#nCilWka0kY>P6Y3$H4p?!$?3 zFIIXlh=%B8E^yB`Pu(ok?kxmOf(X=@U9&%MXH8yYjq&FzwXana_-HX&6vUB>+w&)& z{D}b&grYUYmZp;?5j6WcANu-9XlcO1j(b|GLz0xQov(R%NUgK`HrIP~~q4ON% zql5DlkH3(D8ZH(BQ{OX^Q7RdU;^Z+5m&4~?gO4ag@M#II!R)ATj4;`0yP84`QU%g* zQ(cY28kEf#Oc6}~wPDbh!^`_944_;lj1aanYcy(ocg*N{RGbVB;`+d>s-x@?%#LcSyah?2&9wvX|U{PaK-DBqNi z%&~yxuLd6#-c1wRETybNCe|Cy?IjhrcoZiqE(E6!wXtFH`(7|pvTELouRf$_iYUD0`|8E~Dq(&6fFjxeSnI3P zqX?n07)StofRkD9xr+N2F!IyHv%7lIy-ynL>l`}N-=jrdjE2h}4ewV39xNJ%+U$Cr z&?RcnDs?xMH0N-%|M+>Noo&XR&SaAe<|sG$XnJ>T0j19IU#1I!2jMltFVCv5THy$aD~yP600`J30>XbcbW5+nMVkCNYL>&dF5+NINW zFbXN1DrH0pL97#yoN138&whH$>4u{|HsZt88Pw^Vq|pK zIig56u-{Sjtm${>#oex&dBwC#k96m=i-{VDaUR_icKgr!6cUBrCf#sn{anA#i3(#| zy3(1ZZ(jv)n3uq+9baJ=XG6Gm<^YdBocVJvQu@>k@<+Y>dU$R{!Ew5}u^Iis66cv% zW%@hPNC`{ydcR(l1J*>VPtCjo#Q1OcK`*>^%Ll=qWG`pDuh$#ZQ!lCxv*5k3Zv(~E zs#^C3k$Ffo@PtQpIylz+ICWNn04x}b^k!Yitd~}gnc!WF*B<7-Y3OvH9RJ$;tI5Us z`r%d3ZgymI;Fub3xIOk~dii=_3gjVbW;QEkd6O}_#KmU1&C75139pEtio#7*dvvZV zr&>jWq7GWWHik7epjd)B+jcWDrEqNjdA2{)ZEQ4nK1IaiYHwT`;L<*)2Xddgm9TzA z&DAC6xaY2k4V{Zhf6CQKcTXsK^&Ye)vJ7yZC9Z<@pyVd^U`+pLx~QgosukJ}_+W=&0|zG%#AA&oA><0Fx?tCIc7|lrNn#2^UhlI)UnT{4%b;d7 zon3P86A~G0XJV`{&r!+XusN17RjKyt`fUI*h?$u9_DJ8%&2}mak986B6u*Vmi)r{F zw`j?B%!orqyU*N*Cf!>!A7VM|68DjTiP~-F*ivoq34_5&!=@1q#ZP5r$Z8rW6cQ}w zMs|FBVIf^qxIKim(l3mwM7<_oO`#%h z9)Qd2P_;-GEC*kjhY*WhK_lfluAxy*&*!m)~eb>Bl6Ul_l+~ zgzEU$T1vMz&wu}R+me(6wX-A7b=$tn+R&WiR)10Gersw$_gT1Rv-~2nj^b>L@4hvU z9f38=rNqP~j{fs0RS&PQHe%vXMN*0kAF1Yw!ZBcg^7Z8pZ};=yE)Tf;-<`=?v!`4Fm>T zQ(_YB*7F8$e1Y+5nBDJ=I^YGd^G>lQ50paX28W{7XXrB_faVsf1-A$gp1%0*&#cx@ zmSe+A`5l==AN704!WPwww`?ab{JfpM<2k2k6i2XizxC6b>WrU>SkCqi^5P9s& zdMtf7_vIO8AO(>SkgwyJ*eQ}xKnFrT&!7oL>rrci!W8WSQpR>WJNez?**CN48k>5p zgI-J+KqV>Vz(&QXUJwL>1}u|VN+1(i21Byxd?KGWyKj;Ixf2r!n{()kk<(ec3z$&n zj`yi1LD(z+HcQ*&$W>wJa@Hx(X?9iKQtx>zyVvs!U8OwWw*H%mn9pPtMjt^ED(!DV zjjz@iw*TJ2(mOcpQmT7#O04ogbtCy>@?bxSaoLGV&O7`vEP?$B_C-&g@aC4JmfL1$ zsef2u-qmq@={WN%!1cY!ymckF!>NEB7lQ3WIECFQU2*DJ?B=rJgK0&V3vm{|k@|r$ zfe~5fkPdtjUvftczth1R)#1Jlb*9J3{xH4ao56Nc<{S4lt)do_1cv6k9pCV6_YOBm zXN(Y^2>c!(UiaZj;TEsPi2~;Cy!*qiCEPPK93OI4%9Qf5*;tSI89b8F-L+wT>?Ffo z1bR&R3@)CUV|P8#8?xv!qnGPGdVB7mH}>Asn}_e7ITTCY{FXU3lMrcVH4rRXXBND1 z&ThwRjYd=G?xt*IH%0Jq19pxP#s|<8^e)1k9VGgw+a00U+v$V3VaGRiRH3B9 za_)C`!_0xlw7cBpW)I)e!o%5mNELg*b`h$;P1nI?Hu z8aTWULTeMf07`!XOKm~#P|EAQDFE>(#|tmS-e(nYr|>uV<#om$!4FSxjq^9Go{wc+ za+z`KoKp;AKA1+XB|DxN*p>Km6-oE(rhpXhS@IUC?HBbJEZR~jrsNd9_84~2n>IK+ ze17(o8N`zsdBwHKpqiAPpoR5ErNQGhw?|`Qk7ij>WN!tUE5hQapEB_u#lYA3K7UbP z>1n;wC+V>bb~|~s5S?YF9DRUc&&u>op|FrwC~S(<{el+G9b>p*s|_0R$!g$$jim^! zH39KS$ONVp0dU=Fnd&uBCcN~)iewIxw4M>4^uFRc9hQ_~ilw?e^aPOtm>auiI^Q}n zA^o-9z7v*MlK$btCW?JNI_Ap*##7%3=eCwF(dA%I^!SZk?+JdPXoG;Q0l{cAkL>5C zglfL-d(QdztA(sC6Rp&u5mBETGeit>*3nXu(t2HMy9&HtXLxyUhl>Ugu>6-5`oS`C zg%SWiagwr^8uCT-#YgGQ-pC>V9{(`DT?PYjish z4l2BEg=6pUrmp_%VN8rpIqYFaLo^hm$M|8`MTadVsqn#J5VoBihOc`W9uabz1CVw? zL5t45p~ICIX!xvsvklIMmw+#4=Ex?vA|0h5$GbP);;DK)(WX`R;1BZUTnf9SN<8Jg zRyS0(3s?*LLtk*)yzay+4Ib4q$c!~dtBR>`$>wj*`2vumLfZ%vDvOZZ2wrfo)?=m{ z-#&4;>czIX8hVnaJh|2`gG?yskqbP>`%mX-RfvAoZh0QF4z^K+q@zVMfzCtndTgv-Did$0DU^^Lvh93fcKeZ->09V}vfhL2ppoB!XAOx1SBHvE zRuFX1gXxouuXdWww|n4UGSUWcQVaUhepfCsU+4@g6EBA47=wI!0wS%$*lF~|l|5}( zyQ!wQC*v8_&hP(PSLS;abr7d3MIFK0_4&uC+V_mU!^1tnpEeDFzkh|iS$Z(jr=jff zr<~zL%XXSGFt|TL2|4WOFU_dDDnl#>)Q)Bh)xAOdYB3f#aF$98BCGwG*usONov;Xj zjB;1Ed~$*gDSZjy)I=Ct148tL1V}$=_B-5YEO7Lz%f7 zA`PKUc)&DP%|~v}BDOl*>k%gfE%uB#j!W6>u1U8tPb(RwZ~W#(Lj=^Nd?i2(p(qU| zG;XX%qCNabRcDkd(JqwA{_4A0q+PNbnqc!aYW+$t(Dtx-V1pCud_npmIeq`6ilTQQZSAa}O2;B=0@uap?sMXA@(ST9d|7tNlLHf{;o&%s^eKZcM! zRqN8GbdBzmw_7GARpSHcLBW7!3cMo925OjVaNg@Di_r5nz^wH`UHU{DG78& z-?ih_es%#PmZGoUftAaSW^-jO6s+FPp8sNy8N@n?y#>vKaJO#W$air2^|XeFU5FMt zyrw)D!%;_LoEHosE8>c50Em^8)+?$6QZ^rxm4t6lJoEB@~{m6kLp)%xog^BqTXzOj zdv=@=qq4+9XS7Gg%jjvK9?aPO4ZII%*i>s_g1=zh}StfsCmEY31^*%jwNz(r)dLsQl3CvI-ftdvTHf@DDlKhOmuaPCRmM$ z&FGcwVM@exmMhiXuYOKhyAZVGyk0{I5A*U`)jTG2JCQ+{m1 zG_!u#DCSqe=hGP3H_N~z?P7&2dSOYf@oy5F6ztoB5AZpVwJl{rqaW^Q7|`3kj%VkwG-gL~rY|ZmkN|>$Rgd5bHbw zI?nuv!}x@IT6|@+8hxT=)V%VQNHG;{^{pM>{xggUSu4B9xx}+|mhZgES3jrdPmHMs zS-oS)oz%`a4!Gi31^bv&3eS6!>Mpb2kY0OqzqkA|4dHfJ>g4155f#_99Q7XCaH09L z*j)$cvd2I0D10)Wh=6WO**%5Ef=D0MTFDXaedt7$;jIlc3uGVRk*c1tak zIZ)kG?=PuY%>tJFqI2dt?Yzlv>g*0>tms9bkLZgUw@F(o^7UYf%{?vq#Zlo%8yccb z{p<5lL$)U5OYO=o{f-}+*x_xdmnlw91*=oiey!OirmZk<6_t6qh3edzk{ILL4jQ}i z56CSAXiGx7@M4eLt)w~jSEIk~3!Y}JdNR6!k5^~%>ZxImU8~fo)K+$?x}UL^sl#+# z^&M=zjL1ym=Ae?{T|VoxiAp9M=_6N?}gz19#jPqPc} zQOR7@PW*DDG+f57K%)?Hrb2-uvB$40y0=gSYpK5jY1*pT#sBP!(M}0`>!~ zrA{c?{C(5c8>=Ty);w80g(}q}@d-vSKIuduf>Vvs`RGv1E6d+8_0(z=KWw>SY7d{E z+|>a-I+PBPuH4A|3F5X`n(eG(k{tR2WGB*fZkKVbk!nDsw)st!*2UjRq-?>mO%Qmo z8{lMzMKci(pf!~TgHJe^uL9KSY^v19cI`Qo3m`TlLKiC6$b#z*Y`SmH=RuBPXkxQ- z*#z4Q3zM@Xhk{xFLMrM`Z(CpJgpaA|bGdx?^^;%0?!8b%;I87bdQ{^60wkxXMv@>w z{p1kM`A*tgI-kteV~c#b3B#!gEf4z8BHOygbM0RI3;6+f-2KhZ1OzRp|j@Z^m2 zj9X6yolbq*A}%&V)9s76d_DH=qqXF~^V2PgmcdPpH~Ta?T#7%77<9ZVpY-|Mbamvr z({FCJ%oFG%Xsr!C#-5nq52x`L!MydyQ7Sx}+wt%}?sbE6D}tsOP}kR{0NX=azI2ya z#1toinN+08rZ#cWQG{`v0~aNUefwZVTq^Zk;<}stBGq5Qq1aQo=^!dfvMhkvBIHtI z-~o%T-n;sr6<7gnGDc9oiyPPHgePx}*RtP@OBkGYXb{J9Sk(>Lr|KM^Qp(!dY&lJG zUu>{U{i@NzbbpS#K!V>0$y1K6FmpU!`{M`QuhcEe@2g5%{z=hM&>7_@QlP>X*ip1p zOQ7{j$j$JlYq53@#TD!+4uV<1tkaiKGX=V)x#?8fNf?O(vYlnB^V!~F#(uG=Ia0!UgygCkQ;IJ%~?t<)YJaM zPj~j7BFb9eV~QabB6L;!(U5_ym`zfG(URKP$H?{Q+X%4tu3G<=<@SAFq!SjI6Z5tf+I;=};S8()`^BH!o=;Vyu=s77B z`T7}j=*#xgS7vv4%| zo@!0(sdPeKNSEH37at&VAzO>m{n7xug!63q_Lz9(1aV+Wp3xIz!&_Fpbgd^-R8d4Q zs*<|&We^0aTS6Zh4~TK8XZcfl(BE({qxgQY&nJyBpXC1*1Hh6Dvb#1t{%damwsa*X zt(+#Bp=y3C?zL2d>;G_ilxVH>v*ck{jd;2&sViRH>xa(|oL)x;A~iyda}O(TWf-N~ z0?b6txwtRFmm~KtAy-Y1dwf#CSCX%)UFX`RFpB38E<&UkdWzu4REmwI>J)Or>Uoa^ zj#Z{RZp9n#`%)*8ShbfRA5X;ACKfrf`Pu}VD&M@kH1_4Zn(A;pFVOc;JJ@TNf_Xu0 zHCgM6>K!L=r^pBhPfZ7!4q^nc0hz^?80b8c)Ft*O$EB+k_(Y8U$@f*6P6DvIh+ue) zY7RFYXpBsth&3!+=;NdNRZUN9QnxbEq(?*hlByL%vPoQjW=~8v|q{HmI zeRv6$WMK)hVM;}!lT$~vMe1PtxeXVWhUwSSf6$ymz7L%<5<3-_Ni$k6<0H?Mv$D-T zoh2!o_Vh|!f8PaQgMQ40e)Vydn{76noDyJ0eOPFB2yKvH#GU|<+UB1n9VMiAH?2Rc(kH9aXVo}s)lbIV#BWJm zNP@BM#>}7|-3t}U`^AI#cuV8Ck@T9#!w=x20y5k>;vJ$Nk&7rqa3I*AP1|bI;p+$W zKG-eE-m~{Hc~vnJ*0jOAu-|fR*M$eis4=S%Gzto6()QuU3Y@%ycq`QjrFP|XurG&B zR!1kU*n)$`c37RWdmjtuf8&WJvrhhfS710OSfE>ktv{rE(2DvQZ%`Sjz@THi%#nly z;uW%7)#f*GVG{3aelY$)*BeZw}9F_2p^jV zl)$dFEw&AR2|Fk0UoJa4I*ty-S;^S1Srn*jav59R!kG_VfHGwb!7J#P3@%UEmnJdb zaFEP}x`|^ymEYdY-PThS#(n#7i-hBcmf@wtQ`S8mKr(3PB_aSC)4(X6(fz<~3;X~* zi;Btu`1va%Xp{R27(&OFmA&CfzUw~pM}qr!7d{vvkmfg5z9Zuz2dpStvtxq2Da}{i zBZFv#UkNlF3w&9Cg<4xGjz(zMh~DbTQ6G4nk>@0>w4ldV09U>I7~O-dwajV@Q+uT| zCg4z9DudeJl{Xi^;cf-3h%)-{^DTO(z~t9A4*LPpDF+{xl{~Iq6DwWr_Nf)r`l|dj z`t0-0$fvjZ9X+G-(%VYs zNdfwK*s`99U=rW=sBomylUtQG)bOzoKNZ3=e`0=|7TH^sl~nCe>_`H;MvsNf%Z_JR zi)$#I4p$y|J>}D|_PTGZ#Ks%JYxeNwo*5F(dc0kLqs{&fy4rbnc{!0vZ+IUcvj4?! z!@rHKXiE)rDmQ*uJlN+OP;9~xPo8Ngw|iN7WJQ4ILy22e8;9ob>vmjkRG38pxD|XU zm|lkBG#183ZIQ3uYZJc{`IS`0@0_o4$uPlj;}A1P4g*cm!myP#X02e$T(tekfN5cb zy^>q3@GIe@_(j5s7H`zZ`C;A!GUgef{Gl@b}U&xb4 zvuw5CuC)Rp8^7#^RpWSifPQWkUFi>i<(XZ`5C`<=3a-!*%KbfskwwZUupI{kmywc# z1nYZJRMj?e!wY)ycdWU3NkET>cL%$Z?iUSh61rX3(3XYU=9pqBym~@%s3Bx#uer%p zD5uYR8!X1#svb~qzNT6F+TiC_Xn>5H2et6IERh)dX=B=SC%FAOpN56u8-RKlyEAh= zw|2Q9SU|2z%F{?*4Fx-Ylsz=yv^%Fs#$e;66B)v1;}f5T`qjNHl{uOG;_>hQT*Qf? z^YMV;vKLg%z(SyeYHzq`J#r@K@mMKLdXtvloqeno8#rUQ*~9>x3vemG2fK zt=2dtUUrKhv5}Qu!%KNi(4_C_Eu?%6C{o8M`d%{eVF*dcg4 z8@2px)>q`OernvsO*>9I-%)NxxN78>1}n!a&5?w#*>|*)_0bn&jjJXOXpTRywL}zE zDv1w^Od%AJ#R4oz*}a=Z0wWsJIKaSs24Ye}xR`rzmXugg+Qw?Vzr5jc#LY`1j+P02 zr-zgmjL#UQO_X+Xl3iTUcww`Lash-sR+Hd5jNq4{kl}TGNUo?Re}c#L6qLJ}_fmYqGa-*3l7%ab-$tE>VtspAfs(%LiBad0Wx zPvtk;gp01=j+QyOGfGkRzQN755}e&_+93xe#;yBSyv`<(`j+f6@U*){rztAlxi_^} z$A(sN4b}_WDrGjJtlHgywv*6a$k$P2qy;o6Wgn=X(uPXbrB_j-lvvcC2KOIhpRpt3 zzBk1G?5I$KUvjcgoh}MM&_W?aIRFJ@c-Qn{@@5RcJA+lEBWYld@wI8BDMhN}zx*Kx`Qib7v!p3=Ncj)to{S2Mc$ z>7QNH`wFC}bl$H|H6`s5oBI)|6&OlHAb?Sb3-~$GuS}W0mP|%9qW}vSmBhY71OO)^ zFDbO=-8(OS6R9z^AySRFIgQQ(s1%#GUtxQc30IS%O$omvU zb;ZlGY#Fyu_DAdYngn!A^=}xAgFKsO4%J0P{ZFx84hO-3qx`Y_-lFu;6X9ufqKN)P#UIV?<#`_YELs_(Q>^fp;2aBl4=zC(}q@#Pgb_)Q?%DpIBtS zafto`h-IBnNdf;sms6!w%sSHu0w7vu)quaCgBsdbn&DiSD;P)GTOn@g9b_6#=vbD? zcfuSLUo&Uk+h%zKwhdK|@mO0PeLp6iSI zx4R-)9W-<&Sov-GScgdGO4*4a{C~P2Y$KC^gdgM(Av<-HDRfyK;12jVZYrtm;({A6 z?pawL6k{G%^41^N%`+{s&3N|&5TTk#75?XkKktvsv?H~gY4_CWaTJiy%?fdT`TDy+ zSb~gNW9ENyn|ITJViuXe@rb(e0{N$-OrYIYKthBl`oLlLgVytZ>t&-BW%B*9BC0~S zgbee7K8#YA_T)t>n zJu9|qL5*dY@5!}EPCgCrvZd=hUCA%~UQ#-1UiE|XpY-#8EZ;_PN-4|G-G{>UT2D(x zb7h4UsZRrb*n0izEn1Wj_#4x7N15Y|l7TkLRsQ73+ld zU&tru|I6clDmEq#s242{AS$D@&7Qcs2kVHDz#r#|4Uc@ z-FE)#iXe6Ae}2{f?-I$`{oglAky5*FUrNLA5~s_t`%$$|JE&--=R3X$JvOQL1|`JEKra0G9MmI*-}oFL$2n{l_VU~ zw7+o6G8OjNAIVXtH2t7|X_r>=hw|oRO{fIp=+?~q#Ztbfspa3Rlg74V@zduK57of? zs2rC)`blg53A^8$+O&@0himcOg`u@q^$X6yuxW?zC?esc%&wb)O;Q5l#Ee9+*@kiA z_k!H-ygi{gwUKE3^^q-V@iVSpTk7jcNy0;hS~vkeJ@3_OA0}gmD4vELqm&}>dfodF zxczi`f>elzg-gNclSWI)??lO2I~J+G*XBk=(5{Kw6UoPX;I&C3`ysqKZl@GgS-wZp zybxF?)VBa326RstF3o*WAB(zFNeWsUjKAf$@G^CYx)&DQ4(Q=(NTAY_Y*K-7GY5*3 z(@MaKDa?O{;daemQt@?OKSf2gM9Ge|Hh**ybRw*!1;t!4eFZuC>C)|(vcS62zI{jM zJMBS{cNg!wmyC?LeE>cel%4?>(#M)6Dd41+vMINq&uO! z&z*Mq(`VyH+3QRLq{DQ#rVqeYBZ-XM*8gfC=J*q$ywg@{QP^@K2(h``-&I)B+ z0<*h|@3=b;SG+Wf<@O8F$e#B{{_NHfzL?CvnfEp_bCc-oVN)gbYr76kO*n1|@4Zc$ zC)S$w+K>7F5u*KznTh1*cr)HeCO>8EpN7i!_o7{DpABC1Jm~mPHSobEXkj#}4ZSyc zS>Da$sSi?>NV^bbRb>lb=j?fG>2yIE`lG(*k~;;LSL?0x82=sxNp(DC0d;Y*MBR0^ zQB~Z0{2N8U)zLpx-p#dJ&zMRQ@-*LLd2Auee(TpBYEXEep`z8&v+2Apxuxp<#~bl< z6~wER#(!6OM>5++f`10~&9s5Sg4(t5XQfFI9G8WVswZ|)<1gksPZ;E9o+;Dn-l{ZG zJw=&v^_}f?c)x@Yal!b-4T|moJ~&?@`vDI1tIC#UBiFt5{=saXV-)=>%INowH;W1T z*%!5s{~751x7WLhQhuzJD^RidLYHNeuF@RS;DM4 za-~IFoiXItTJfN!XUZm23Lid9!D=C55q@MVHyFh$G5l;OeXzUL*N|$z6Z;%CayZ2P z!$cMny0c|In;d;oU5)F-A2UOeueXycN4&1n=Bm_Eh`YyWM;*Rb0%sXz+2BmEGsk8V ztho8p{jJ^4M+y(ZtWFk!Y-xoIKDwJFE1fFCup_cXN>@i`NM3ZmOIZrFU;nTW4~#fO zS^a1TLfmLs46!1|BH~YA8cHATP{~vXLkcWrGEHT zvinQCcPu2Lzol3yR+nZem9SZQ@{-huQtQt;mZm=iqI7yZR|&*<&N!LVLzWaPzg4?$ zrl=DxtKfDn=w~7>j%qG&46V@}sB$Sf}ftKjCG9)a|lmWLk;AoI}Eb1JOFptq{CnWBo;e z{(ee?T&~0mlCs!F2^Y3r)%|Hpp7r$jnMeNK0-8Q}OMSNy)T1tY_*cO0tg7P0Z}+I~ zD9CzfY#@Pm>yi0ILp*%{d(E86Y5VDmr7Fwaj(tKo3PR-K5KA7M8dK(rTe{du?_ij+ z!R@0mt?nxf#zGJ;zFax*o-PdC+cj7$6odpVg+HpOIK?dyx#oQbvQb4u6&1W&WRdJX z^Umj!@5|n?=a;-*`R}Zqot%?;D9)}HR%A4IPL{H_N=24+(h|@cb6AX#JpI-LpI+H@R${R--IBmrX6Xa3e z`1>)8n#oEVOWlvHc6sS9Er4-LH%I3PB<&PXB%CfqV@D3<{9}2J>629s=grpZ`bHi6 z=r8smNctrS;#N6d-ZNv<22-CN8n#xww{aEtMlP0&Urf9jCOoS|sp8^78UTk_t9Q!p_@4`=4OflMFC*hG-!67vtZ=lCF;_KYox~cPbj< z)Gbm2MKw*;(kvORT6x(4O@@m|bTut!Rd0w_i0(42m?$-GXhS3wV5(4xJ0ay;i4wcnMOIs2QWmfokP zm(5!J9%7O@vuq<&!rU?4RNmATFH|ua$DWShUzMSl`@d_my_a4=tozmXpLn(X>}h#; z#ww8EPtrQ=Q_5dOgcr`MUwqbgAP3+ziHT9V{}sW2^KEaGw}J^+XPqy`+o`te{QhapTCLGtb-pg8v#)%s;$i zHF?%<8n(*$s8kRKH`ZGFn+QLW)xKCvcY-e>`$|MWz9i@E7bmD6rbIRzX)JPoGMg%5 zs-_{hxksnZefD9BVyX-(J`WIe`B=$6r(R@jy|3#?;Reg4GrO2K%sQbzuchiKe7PW* zT3X{=6JL}#<3rUWC}x{zHv3~zZNbQR zSi!wJ?zD%3g6^YgW*xc0S&F>vO+KXyh0O(Zov~6ceVg;?t?8+db=GG*p7TyK$~j&l z7GB+f69|*+70h`99dy#$teXkryvwve?=VjgV6nb1l$L?|DP0zAd zK2HvhA&bX`g?Rf@XE}J+=hb8!)c{WrFdeQgorQO7NqabZ^)>#$D7SFVbxiL0gZl#p z3Qy;YWft7)NmUm}UArSnNGj*BY`>LwHY(-aM)hMT zNB3@vEib_XinP@`f!ngpkVq>3eXfh0*iX;2)oCS)bl>L4La6=mebUWdtvlKYaS?*b zoHo&UL_efngQ$LvFhnnn&3P!zaFL>KT>IVUnsAA(VE8MstaJLwS8<~r%Xx_j67>QR zcb6+EV??W9sFw+^E(Oe$$rOaC1w%S6BxX49g90A2(O5RMf=| zPjO}Gy^QM0c+r=eVXNNlUZ47;Dg)N@+R;T2^~L&X^X!{9mu43OrlNXZgr)s%Kyo7a z)XZQlpR|)ZOW*dS$Dj7ESh_3#wA{1oj(04B=Wt(S>iE?Un~R2CbR~72u3SWwrVz~k ziV@u&TpEzutH>|qZxA`#$g2!i#-+=2T)!8{z@WTSecxRqqdasszzjAj9~$Gk_xa=( zre?NeNSeRM8-8(H5w~+5%NsTv`g?OAS47{E0b-iveQ<7KhEtMHx&5ot=pFLt)y;rk z=;b`L@?NX$raVOL%R!kSZmI4c#oGVfLx1z5Gt+XLty`*JHusyUuByFF1=+;jNoe$m z7o)1>vT-S)C@}S z2Pr`!VZF*9FE_~P8^&CV@s41V1kV~$h`@Uc&eo+NY*oQ776zlqau`!aTLqE$dnx&R z(p{$yPO%gt{pmPgO9rk$k0=GmVezd&f21mVm)6+ri!DPJLBgJ=bwR>&A|)4{Cwm8e z2AFnnqF#1~W>o!mDJIDwEprc275jJShpQ!J1fk0<@(8t0*zLmE8)X-gw7YrY;qa=V zV;}>h<&X`2>a;U;gI1#Jx_HE`Y~nb#Fu{CI9al|YytA^>k%kL857UrBANn!LCdkygNq-pEQBnhs&e(4-SJQ(>9(z;q|o# z&lw8Ju$(ay_HIHT3J=wY_%zCWl4I?FEwMC_&Id^G))&?x5~45~74C>7o^^O{ix8L; zbaH;|E&5ua`TDs!AA9zxH<@(9$k4`u-TAWQ0+Bu=RqJo!x+m$8A4LpiO71bL{)L}l zA~=gkAK@>Fc0(kW91@=PE=eSIbjq`FsfJ4ZFk&V?tB@QLF4A|cXbvB3>@KCZAcaiA z{r8X`f20qY5Ty7axKB-l7k3_UChJ!=HbSgY2#;Nxh8n}dW70nBc-R|Hn18KLsnC?X zd)Q2jV*JH2Ry{13g_qQ<4AFuwJ{602q6iVr=%HdD>{r~Mz__1>$(Rvib@#QStbTav zPEfE}(?NKfe6#}GwvJm~5Y06+`Tu4!qw$lyB6mF9+m$B_Q`t?58ley&-m9ejBcxZ| zU>TmdaLX9%eiEQlQgi4zSj zPOqC9!?4Xo-rl*@p92jPy+#V2Ns)pPojWYJypl#{$jEera=S_2v%ZXvGLc6>NJF#9 zzlk-Jweojj1+br5_8J%9dllXt&+QA~=Z(cVmVMf|6;fMvFM(2;u=&?uKuVc$AL^Nv4%3fnH0Kc0cT2EKOOQ z^vd{f1Xq})Ngtzuuq4-{PX8QJU0+7pD)uscFu>i!*DRfFY+teBhrB{W@E14L*niL@oNr5$5k8rc zz#oUX%jlW$UGFVI%Bp6>-#|Lw@N{PDDaEO|-7VKip%^#b>i9xxC?B;|zVFFYK>TI! zZv5KA5BfRWiF`yqYonQnKT&4?`aWf|e9S$iq}KCaSWbd#A3>`l|3yasr=?o_M&AD- z>&)Y!{MY{zZPYu2%RN=3-NZ!xxzWr(q+l65c`#xlw} z7=vMk8OHB+&iR~ke&_p-$Na&=-1mLGulIGmUeD`&HxOBg)h*l4HKbVO>^0e5yJ^@f z!T}+ImFLOmQO~#(ayq(t_2@uIcFa?4)l$Zjv@nlfZe~AlwyH;p6M6jfXL! z=XUPtXimBkm^;`OGwuZL6b`+pL%@=;+?;jV6 z>4iC^{m%EMR5%F21+&w`2ir+w#I(4eYF%*D>aWpU+{!T2U~ZR(O+rcabj&j4y8!`v zRAP`NGwlo+Ep0tn-jk9ZTI|aM@l8|Y8q47#pu6EjQrRSq~eJb;KeJE9Yn?~g~sSkm>gKE2{zP|h*7x4&7 zf7e!N>ikdmpqS@yVFh6|VOc{Sinm;7nA&J`N|$meEV}^>d-y5gTa4F!`Hm$PUov#4r}co~d0Y%g#5#-ALZrB&?ju&n_- z$G-X$T?yeNF!G3Wy`=FV&IL)1ps|e|mZs!Rrs|Ic8j3pDHr5QqwphkOqyDKOlg+3>~UP_I|aPuun!Tzqoxk*xiE zy|88l_dy7qti87F-lUDU%bb6?O_7}U_m>G;Sv!Ks^|;k|)i4)#$DBHg-N)MBJf}!k zDo2KCq%wMX+~2$Rb7!`wRhZg-P*Xe4Mhgft{>GH942XfHb6m?u8j{F@-CWt5KcTvBEC5 z?0ADc!f`QD4L#JgYE{SP+pi|Fi3(&dT_&-S@TH*w?k?N@CzZLNsB1P3Wa_k!Xn(l% z{Nnd_6KiCoDidCLw8C5p(dO`D)uh^%DCcx~Om&n9J=nU@&`=guhN^LdWJoPT(1XIC zt5D&qNY!fZtFIRyGnIkSRe3XMD#V>V%H6Rdq{Ly?+Gtso$HbQrF%*Vsq?+#^v45q? zrF5g(q&REcGfgaqzw3=(A_z=dX7t|WSR@?yT@a~UtaQV+*=%<(Trmi^MnJ&lcL#>N z+96-;Xm_nh83W4)XS??msd}s6Th;Ae8M_2uWG?YYqn@<2G@wdxJ|}k^3|C{X{3941 ziM-e@@~^A!qU}}8U2^Y)pZco~ibW@T+_lRP*r>|R-TwU zpDVX-y8T$y?r5u9W|$nzB@^N`t9|sljbPN{t-1P?(h^9)1s}z*3$$MDIXtf))H5Lo zI(th$4@OakPJEfLkClP8Z+<)QPxfW@|Dm$yzyLGM^i6dia|e#I$2UCi-e0^j5WTwV z+)*mrKJ{PvEPi{FRCCE($i48p}1|;eSJ&6KYmv2ARq|t(_@!&_q z6%tZab?l@OG1&7`Dz41buHjPyb!sw3+<)70`s+(=+@w!ETGGXNxR6x~dQg1lvFAti zC>Tb?@vxE=UF*GgM}Ph&w})!I!mGq_e@>b8mp+EX$&O+fRvwiD)3QgO!oIQycgORl zSi&8^nEk0T#yI69O4+BR9omrPE5!g$89KyE60IkzF@Jrwb-=kNgLoe|9ay?z|A=N% z+D&dmCfmY59!#vc&XL8UPK4QhKi6+TtZH$2pp2n2eD4f40 zp(|})9%|XPc|VvddyHnZ%iB(T-E+m<4DK{4c$h1EomP+wt#K&s$1YwY78|k?`!O!G zaUPH=M2qP|Uy4Y4E1C8BgyZb@>>BcHiusdWrPm21S<<-Y?*3t-G3-c5_12IfRq> zA!V13)A8ahwZ5x;g6`ynx3&>iM6O~wUZ-xhlE?KXYg4{iPSiM3d!l*UZ~=L|q4mS{ zg}R~hhI`5!R|PGOb!wT)>(GQu5LYI7zAmYS7s}<%9?r~}ywj++Iu3DGL9`G8Y`n!r zuf2PcFhU+^vom`xHRB{|^X{48Q%d$elilR_RmD(B(Y}lu;xHAc1oU9A-zdpY zN<55l?8Bg-dOL+NC~{W)0@q5kEj|G5J~rhs>C*pHoK5hq;kL%Dw;EHQXAter7<3fQ zw!+fWePph|)Y+K%yc5K^splFsr?87r?o?FYB>a+G@U&JM0qAco%e{(r#&`X{B)#ljiCX!2etMgF%L5 zU!OQqIsy|gz!be*Xs5!h9Q2FP0JKt#jOh81_g}gpVs6gE8&@5_XeR%gOf4Caw#i1v6G36yLNLbl+tV!^`sX()E zz5fT;%#oZbWfs`1+{Z5Yu*!U_vf)b4M=d<`|IvU`Q^U3^kaLOU1UP?6y+1ff@QM>@ zeM`o^{EEO1QS0N)Gz{{@;5EjNckJ$Wk8NDujk$10MdhJNj0Ni7VRyTtRUsmZIG1sD zy90?I4%}hql778DVAZ{4b_cm+^P?}je|7AhlwpEugXy;m#zx>+w}DzVp*Rd{FXsVD z-630D?@}Gru{$n3j6*h)bu&v-x3qg}Q9={=P%^e1Dx&D#$|$Kq zSz*VHxrNy>@L)pJlj7c14!rmz?=}`v`LY}?0@}<@?A&nm8D27i9@IRpM?MIRN=OXd zcQ@yte1ghuh5r7S?Oc%CuA;qTI|5y>3~p14_yz;VUhlDZ9)QJU+vG(B4b-QGNRVFw zPpt8R^ZmRMjI|zi8b@rg@4Ttbab3Z;!*{u+oADu>6yT88U69@GvagX5x3Fs-fFD!? zr@LQJoVbj`M~QlQFUZEXT0Sm+{xtDA)j99994imryPY1_iUXt1yLqXu zunU`ft7nHg=Ak9?gUn*SY-W_WaZ;h&WQkO(jDoavcb>^F@z7b>CN*Ne+6mrFyHV z?=qeX=)OQ2?4r+WrI7)Hwo&{AGSx$T>aWkDaprc;!d7cB*MEw1b-Nz*kTlS6`Y#PWizwLd+xB_{4PI+Z{g@qPDJ#&~9tQGH zkvSB&@t;!s_)}4A__*Gf3N=TtZ?VtLkwKJSo1~LLf9ua};EVJ%B)04?A=o}<-Njj` zA!y#sMnqXP|N2SS&qZ1>cbx6lm>XOb{0-`yZTjb?u)P>~U!vJGkoTu*T)8GVeH%Mn5njt=2ZtHVOfQfehKAFJg=r*^qnuN z{o{rl|Im@A!$&$1m;9FW4|&(AV?=3?>nz<2gg@LT;WCo5*=ZhIj4* z0(dSX)mLjP!zm(I4@SxNo`7w#O!|1c-77FMBcQj6VRV%*F_^qj%i!lu*21v2 z`xg}95QH{~tJq1f1%h-)$67E`y^h(}r&}c!n|)7MtZ6|q5GX-`id1UYnKGOx zXms5H-068z!Qg=j7VcbGvf)d%bps6hNz7ro)NSjv zabED+#a2A7m~YhYd2zt%J18XF9ni1{ayoT7 zld?7LWmGTa2-Rf@8-CQ|mw)Nf-|Zw2!@{EZowaVLH(B`x;!-bkrswVtWRi6WvyQEi zDMlQ-8t#pr%v#>)Gbk#xO1Z+Y8TLK3HR^|FG=MY_{S@U9SsJVxPj@fBilK zlg~B~YAQR}{E*!`I=pSeDUvHoH z9#n5%AEqfadh8EdTpq1pIWb;gZrZL+3nqk)vhd|hh_gm2ytpQ%2!i>gVRB3y636?2 zg7RFM$>LQ-x%zgyOurfA`%2j=LkOaZ($iN>UdOIQ@9=1pWg=;A+E0z`G zp}8@5-z=HdqClb$K271}6UHVrs`uDj3u9jvC|I(zQX}wQHiEQ~}zkU9)8qy9~ z_qf6(M~eiye#GFXt*hK-{w!Vh9$h3%WmMtIu&FU~qt~4O#Wf1Bl5VphcQ9MV{dkx)2jA)Lzy83-a_4QZzMgH z(=-44RJiPEO*2A-+uno$mLcg>1Jane!o$|&vrJ4U=4Q+Ld|r9m<6NpfY*|bB1o~Mo zQ^B%9+(9ur?W_=^4qkb2A6S3w;|)3E&F2pIoig{lruSZQy`BB~5)No4hSH^v zYyJTJ(pU(I(>RazMrr$(PeW6qPm5Q3j<|A=l0DPZ*!%08*(j9*P(nHACkwBNJ$?BA zgn3lV#@1KH#}1xq*&XLMZJ+Ywtgux>A~OaEdEKymCwLFn{1Q252(K7WvCVvvlT%EclWXKXy4VH`yFD~ zZ-MJY8{qxzQ-aqNzJqXeFE39XZckO@fR1vP1(m~*CE{U;^-$1vQMjwt~?0`MHeE$m?6~;G;zM(OD&T0$v z;tz)shh$M}f3ET@qoAY}wZn{wx`uLzoDN)$@(+g8meO_ph;f;mi)P%c`uYcx zPsV#iy-34i?_H`hK;pi?`y}b`-KCJO!v@iip6Phb<3y9|un7!?bMQH&B~o_v)8~FX z8y0LCoNHEa*iE<*KZ*AXPyfij9LzjY9?|EL2l2?vu7Z>AA5tm}Bq(~-4!F<)I92-T zSR+A`YiFl_PCm1B+1n*(`YRn!IS)j^&onRZ4y*mehyZQw{^U|NFoZu)Kc$iFSd?lG z!Bp5exMGtr?T*+suO_m$bY9fB#JITH>?_CI%E|-4AI5kp?ogD;({hTDi1FOR9h7dv ze(iG8UzGDB?{z7S)u8iUt-H3SBKr_iMTPd@)6BgxUba2{T))M3(kJR%O1NG?CcfnC ztjl9UUizqfz6YfRFJ){eRF2VRgS%RDEqD;`em#%p`G8M5M`NLF_Zjb5dGuoYLe6YO zV*Lv4nh6KSfDWEeG zYNc+7$f%VN`5*M}o=snstUN^;(dW&eJ(8Nt{QOB2Hu&FeFuVSD-sZH?=o{Wz_kKW!IdgPuje_6Kn6#Lz1tigeDP27Yka(wPo?KCBxrLrr*=bUr(j6t+wZ6D|yW zr=aRScPqy~HMmUy-PyxFXwIC@a$W9+$cs`(#0tA)X1)H$1wEto$z~|tQMN{S)Fquq z8%)G{>oZgn{o|_Px>ofo&Mxo=rKH{K8<%nO(bSL;3hi_zY*(az|33w9(V2*`inIwB zI!JObS^+9F-n_g2@jJZ9zaCbS8#F*rljpqkf>CZbLSe*Vq)jiUD2mZ-#F+#F&w@*G z)C+tcfM?}(nKp)t7*bs8oQHC*BLtUwAWBI^T z_imc6#GR5e1h4i=a+<)HILh9dyzyaSp~unrVfC5^wEl|76MfKf@!^2NqThz9Xb8gp za7NrATU@OEL21(cP)0ZN6PUbDf}w8$d8Xtagj92$7!x}wjY|_X!s}om?&BV=>~4mT z%2U@xaRajKzxp${X7cD51QA*&H>BU-P891;#?9&2XPHACpU1b!Y0_USbfez%q|hkf z#u<26XJOPY0U03j_4{AyCEs3i);D4x^Qo|XRB1DQIdZtZAGD6kr9biSL`QeB(A=I% zny^&m>>tX!6OkBpZ9MFOUqWe)qAXMI)vd+0fuG9fiavd+wn6lBC;arWu}mi^1(XP8 zlSEfle~c%iLfVhodYp&DJm&Q=-MDYBr4~9Ol>NCf-L8yw}CLTU(K=hdffVdb&kbNl`wTwlrKbHdK3!HEhH4Q#* z!>e-*5e&|HS|R|J$7Ip$8z)-B5;;~n>_`Vz{25?r8BENB(&EHpDu=ne-kkpIyOCF< z5|xdIujn>0WM>~nn!hM*h&NBz=KpusSagm~=wZ6MUxFYBxE3qJHi&6P6d;|opV2)>W)*_m$5NG{LPw7Dk*CuJ^8 z2cDF)_IvCXNX2{M=Zp~nUP%CUIm4nk`sLXR8}7^Ewm(USoLJ#k|5N{)0pf_M~&IwWQ$2H#bz{Ej!0THbt3K8b1NoK-(#Pv;MKKu@ZF0nx;Ag0c zgwOS%nN>DaxLXzWNrUH%J=4}mME|9vvsdrlBxA8E_bfi$F)l^&A%hOdiI`!9eGY3aU1(B-zUOd=gMLZL4}-&@GZ z&CAoWAN4hgYscJZlP)2!j9*;AXWOu*=p&gI~hEBX_ zZZ+i^3Vg6PqswWVE@HkeHC^_B5L`|PXnHx%+7|Q4bGGg(Wn*TF%|-{+E(LzP$VRM&@_7K^Bo$_*^r?)7x&0(M)n* z{V{Tz+*aUFsQ(lK?6do1Eb5F$U>N+#4PM2DynSs<2kn(intWbN*}{b(C}&=n!ej`; zLAui390(>npiBw^3~Ee1OCz9TqIO{KTEm)EO(?)`&hbK(9=$p5CeEsC_LwL5GweJU z>FZ}JGK6ws?r!A?k`%L7@?vyG(qx@jzZ$>ds%Duz+8}F$oO0r18lIXa7rfF6Hdg`Pm>S(Go+3()j#lf zH|}CK+cTy-5q8;+#-drFy>_X-bq0t#E7&7Q;C4^iz*{1RR=bBQYKajQzv<;R&Nu3k z5O$`J&c%@OdCO4sARiPkwJ+ws?6)b@$^;t7kQzQ|jz+AF^nL%qo>pH{PR>A?&BzBZ zV^nuXm6y(am>DcA4Q<`-E%%&kqXktd<_u|;BELj2A<0Sj^9jp={gqbG)R*gO1S zeDz`&@Pc=P*zzcx%=5SP?vDppLY4wm45G!}lm3{&YH=#xZe z8q+>y%^1JyF}51SbzrpeQ1z%qPrl4UPFXcV=4XdLP`3R}dB{!{zutT1!B_xCU zJmxljQ9X&B&cOhD+0YPUN_~jc2SaKYm`IG%AHswy;XtKBsI&{hoykzxbwaERYg501 zL~@jd&B96yg!eJ!V|C7aUnat?=LJ{2=IDRkaz=u;PHS&DFN(yuvpUv42(-sGt7G7e z$1TAdZjM=dYu{*xyfW~^F!&ni#HSR^tKzv%4?Nljz+6ZU=Af}^Y_AyQPYihx7@i3r zr~_!r4rR(~Im@}RhCe5`3b1q%UeU6ZIIO}3KGD5G`qA!`yP_g*U4H$3kh?*=Ocfk#}ujVjJ36wd!)GT8-A^@v;vi=+UGrY$g^BiM{Dn| zoEwmG`QrMza3bR6&#d5078DwH%KOKaWd`>)m&3hz4G)MZ^Y)t5i^1&1nMcT+mS!V{ za*D;EEKV607?0a&@^OL4v|^Re`-}piI2v`m8K;xFN=|;gS%W0z2esCyXSFMvs(^1S-u>-#0Mdm|}tJEW7Ryn}r4(Na% zQjesMf^F|zajHr&%(`UWc}Mt&A?K+5w_MvsS0HOX3Nz3d%^NqaYjW+>3Tl)Urcxig z^Qt2xSaGVV(xR?l8USR8@e*kv#DV4QNf$!8Y&!N=7pr*^4u$@~v1Q0R!V5h}4V^E8 zh7={UKWz;v!{;CFZSli+OB@OhQ_McUYF%ku#e_Vd29C@%w%r5I9`wkanr^_eQ!gB@ zR%E;!ui?X=i@iRH3i-WNs60GUT%EX9RUt}=x52($i%!yrrd?t#a?g7*km;vO<-@I? z4}4QncWw}OGoklnrZn!BC6|7b6{ZnTn9`&OTExt{w=comID6q71?G=jUM*dONoCSg z=v3r`tT9!}ieX_C<+^IvXnm1_Q&xNX)=wqGG~p-Dxjvxx_`nr#fw|4yRJRgeC@Lw| zB9X_ewt#TPpb>hCDNsUxe<gg>VaOshOe1>vc+{u{ zbdyU~%&WS6LqCbp0^t{Ixbbz~0XfhDQ}he!Y!e!>FP;UCH&W+>cvgfya?dL>%75M& z*$S)vw!T3M${Q=cVr7M~S-DpLDpoQ@=6)Jb9v}AksC4M7A;lYu~r#bil_@GgLdC<@4``f0M?m^Lq9Tz8_l_K+(1`xezc7xJn>{Zi@ z;_Av9CD0?z&EXAM;5K{ug@_+x8@i0d(cy!;O8n)X3~Z==)UPD zQx9wwMO95T&vm5zccGTP1$yB&I`R~1zVcmJrIa!XL8waIUaa1-UN8c;*L$^1PCYT& zKO_(QskeP0wU=>vmjkAP;dERc99Z(1ky|T)=Yfp!bqSq=InkNo_nY_So5_X$+JvP8 zriZ^xGbPPL;wLUrwbBvc?-EXxx~)b(%&M0=^4R6tFt^QsC}f100hY15yQtAel~BQn zO49n|GkT#K)S%~%7452M`GjmNr^Eoj7zB2aKbuvB)Fr(}U%v1PS3I~haCDJ~tos@$ zMS}-!w(=c!dR2zzY%4U~`ld|SJ2s~7eMW2Qup4IQ&P#e2B>@Uh%BK0F*Kb;$+=PFL>urEH~rMO393Ief9ybeYby@5KnT^GQvo zY~{0J!~GINEB=%_Gq*FOT$sPMF2CQH!G57T&mHm9)SzD;p~{@T)SJ>h-bU;v$a7Dr zy2Ubw058(?nvE&4ge$;|2Z{x6%MB*~KG2!WojP6weaVTapd4qwruJKvu9*Rq?ze>^ zJT8j0)_*?pihNH!!pl%}#w0AJnEZy3KZmn|Q(`O__fb1nD&;Ip@G0smU^q&K9bA&S zFv3MrhR-Hpaw5lT4-UzOB->HXTRWDk9glGWDPlImEEgV^jTn-qbDY1s;SYea4afJC zO-QUR<&v*miE*B^CWnDQF~aZ!q~N#}@WDQhNi$T<0yNN1%!m(I^U!LD z?1RbkY4U#jXU5pwx{Pi~7He(ab{T=XYS+Gw4lh$s+8upCb$ZwCygdA>Dqwa$p$v4L znvI@wb1=P)lxS^y5N~_CaoRgx-kOn1YAC#H{4!eHVyA5ze^s!}`xI4GV!FH9_RybA zE_6t=5Ow(a9|70W9wZGEL<-K%2C>p-1CeE~nDCex_qNWXyt0g^Gi|t?fIHlZ;~xp_ zM`O-xREepKm&Y6HT3*=^M|3GY`~Odd2_W>Uz|XYuN<`7+2Vj6!&>QI zk?*$fYu~$zdH0XU7k|7YDx~0z&1?nwY4xy(U;TJ6p>;aUs%k(L*k7Hq{N=DAfFd8; zlq>#YQs~@BYWVnHr+40HL=`6qb(7djcrELH3Jc>;Z=N?+M32a`p+>5oq}<_1l_R#? zydWF_0y{8fBDZVsYh7li@m*%%z@`3-sd2IGN{egQXZ2YbsS6sfiCP-?Gw!grexcMuWs}gj5C7>VA$)0(6&50#LauK2&x*+K zpXzYQCyPb&)`HE~m#-n!E$A1S>#f_>X!G%v8-AnWX&S9*^xPt#yJ&H~o!L>?3hoAA zIE=uyDF-9uk%+ciy0HZ;?j;~5@vnSImDBkVhu`CV=B6KNgL46U{i$jzT3RPt*mM=F z^VzZ86)lnZGtS1NZLh+ugpDea?2rMGFoaq!XGG>mokAhJ? zjBhX6O?dsxZUIb;TEjNDg}CtIDd+xcPOL%0T3U;M8$`A`QR!9=?-o8N`1>pXuKK_> z|Kb-%hxnNgg^mtzb*Mu_CEJLqS3tBukzT}}itnqqg$mFeK&R@}zy4)Nsf%Ic3q7E* zJX8Z$y0Uc^6-^9J=%4!h@!GlyNawerWiUV!z)~qm$(NZIjrE$kT0h7Pp!14~)ZmYz zCes+*0SN~Tjhqy#Tw(?e%jGn@AIRv2W?t-zw&gOTS`GmA^yn{klZ@TI+Ot=x&bje^ zoc(H;HM{+T9R=AmL0G?9&LKomzTNo8J#^}|=9Y=HKVo__zh_xtN&Prh1b12+b-Ug` ze5PGPFO4KOu>nrxZ?Q5P#SP{XY!NdzW>{v;Uy>4+@bvS<;{A@%VcmW6WzO+(zdSr| zU*k#3zIv^gUO-e{3#rZ%Ljb<=>GV4%d#c03VpfXiPu;IbnO~1auUrIDr7}7)@ju9D zkqsSlGkF#CGuxg~@l}4DVj&2w8*xhdVL8a79cTf#l?=OQhDR%>DjMJ(bD|bWm+9Oy zDW&)st|{JxF#QBsFS=?t#y20JNtWXGwGm1hZ(Ut4Zi*5~Pwa>2 z)^up6>J?MpFpCG6yF(jR&D&{R8y8eFh^yw7_O(ve9g+OM24HF5as@xJW7pq+>e^M!c*{Y9gDAi@aVm`n9 zOYJ_rrBon*%0U_s8}4pYdwo}~@qfuVYO7a=HnM-l^Cm1L4MjXd_&&2AzQs^`gZ|=G z*BOQSrK7%7H-VfTDGAFjY5LUPqqIuD>9;*e(HZw1@La*m#9ua7qi&4RE!Xo}afcht zb7Hhk2?w48{MxUpZ^@ay3m%3-M%Se}N4gj5kh3+!V!g)_MBcX~H#tDY_s?Kl)fv7q zXRRt~NIR%Ozu4b?JyLC_zfUE!E={$@j#SyGN}V>d+XrZenfq&?Tdu3#5*O9f6iH~z z(h)7Vm&EbuQ-9*WGz-2hdWFnpl4!Ls2);dh@m-F$5X}R*>^qy5YxnIkfH??8JfzWf zmHbA7O%CXD6x;a#Z|Jy!HSCb)xx?-1_2;|O)`I`!%KUBb(RfD6u;!C{eoL*PdPxd z?US}+b>7kI`W1|3OVm`Lxj{HMX|Miet8{zdZ)f8iSa5F`LP_6 zB*7d_sLpA=1eH@J^FAUV+p=6`?UJ7krc2Uyf#6vFb5+c`Uknf>c1N21{Nw>F?Kv`6 zKZwi&E>~7vNtwba);%nkfV4c3WV=X*1mCk)1Ixd9kteDRz*YvP2W2qEEiKQc?6s0} z*=rkB&*|w{>F}{3L-`Fb3mTKZTQo~lkw2cQ>@D%`-%jb@+Mk;)V1OaVd?P)LL`Q&T zrf;Y~>Of{@JWLXF@dZ&mcFrU2dXq=x@F#87sqiuqdAMet&uy$+J3Fp1C*N(U-(n!& z?iBx1cNXpedQJ)MW9@{3yJgj{&5L(z4p?Cu(1LmVm;Uz83~-_Fgt{rOnviRG%(3K) zx=t4D$pmE?i+#)svlMd~%x>uO^OZH(IlTfJ%`cN_;=T7rs?2&{u>G}F-4fIvi2UL| zC(VG@zkKr*%BMr!(7^O{^BR9yo^AFM=laVy;6k>|6{}g8sKKnLQ?A2H~$S^_L z-%OFYdH-Dcs-VxZQH_=Dt7l;VLmGj!P5)5T@j-UhjkP-J)fLg83~b>_hq1_$yY*|U zXs<oA@c#&HiuF|9gq2i=NSCE>oMnidy<6S=9aP>q+V^)=xte z*|op(9so6FF6|xlu8!#!L64qzAM9P6I+()V&Q$dGc+ckHM^PQ?0xbCqSJh0TW`IrP zXr1Xi-312foJG=9)~|zoLXS{hi>SQ!LIYj(bQ7@`5Zs)4li6oz!a%hgg+tb>Wq%U0 zp}a%a0>%M?2q3o?k-MkPUjF$0N=4IJhRz{9<`#4?`yEO1c;Xc0_44KZdR-Zv@n`SF zEQWD+mai8@3ZI0#yc)ITJp1NfVOnN3hlLvV(GH=Qk!7TvoPEq-vWjx$1wTNn7dEf* z*5l2bzY(r>=fbZ!tDj>Pj?S5SPdx@F7Im3+P{I+)2~z7N^F!d`0C`1as1U|r4Y zoiKi`6J@~rOY~Awic*;xaBX+K>Zo7ciab~Jj|%Q&6UXGm(}<2t&^-oZPEy!2>^;R} zaIDZ;t^1@KAg)>U);P8&mOln-wS4z#1wH*4_W_bI0WS6WKA22>-jf(F zY6B#>=cA?j!-LoS7O4O-u)W!Zp4`&R{x*P zUF#QI_#jD#ZydG57C(Ao@ILDQ&wL5=^bCv!sbnnFW-uen)K}t}$qRbRCXb$dSQ=CV^oSjw69+X^)8~ zFk>ix8NxyFBoNX0ZvP$8e`gD|6?%n-8vt8(OXJOXZ{k3}6!)qmB$q}dWC;K$n{&dv zo^v=X^PLJnTx&l+Cg1u}d4WL($3!7xx+!kVF)^+5|Ptw~U`1yGW3QSh^tW6vGZmg@d0d&g#J3{S4BA*n<|y*~i*855uY8s*6+EN(I0< zsMx_MrlZ750=+%OX{j}eLrz%eQOZjAuz-H5Y*?diXQWn3KrZ7lK;Zmg37aYC;_FEk z_Tg00H#$Sd82N|iKX~6iKk*r!^T4(}YW4%$BVX=sqiS7;qa{5i=Dl}To>dd8fK=<| zedyWJsm<&kcWtXamdcLijwnz}rm$CS|$q@MaHH-JcFLjri z&`KLbjceWh`lL^gC_2NbPepdka*{4={UqM^h#Mvt-+c8Zj_1h&|7-oz8sO z98k~Tz^OR0s1adrvD06HF!d8o|3B9WIcJcuofyl3>dt=7IE=|Sb1j zm}V#imilfejfzLI8~~y&`PXc#A|4pS5WiGuk;$*^iSR(d;4*T!fcc}VgTRc{$;aAR zP1l;1GOZw@!+RtKc>^sKE_xbIC zGSHjrKSlgMKP4EQ1V$kYadv@D4a^%jw>+glYfMAykDW(Nm>olT6NfD8;OKLM+47F= zj~Z$_A2F*nc&QGJuCD*SB8{>v?MoAXYrfsZicQXz7ncUvs3rhbq6_gSPx~7@zbUnM zwC#bPnlsX@E#%5E7`FiQWoe%+>H@100bHh+KYsc)^r#gr!$oG79#b*sj<@>*w*hct z1M6-~qz1tcb@6%Un-+5jNYY@k)db7^O_zT>Y4P)+r&Ql51ds1bc zJh3igH&zb!fPPYkB&#hQ3%KyuyqYFV10LM$_*F7O^yvqnzTONw3)}E1*!g=g|2sKc zhz}n>%PqB}e++A7=`#`CMv9V-tp~yIAOjM9$_X6a%+dcnn84h4n6_G5Wn;Ic_@{*= zY*}LDh#dH105%*3zaO>?SyC z_~65;8#wnELNz@Hvddq;A&w5mbwrr9kW3prO#|mYxZt+}*7wH1HlZ%U0gXDY8Oxah1!TXP1)34w*Pl_xn4((He~c)fp9W2h zo)~;ig*a`N{Pq25-c=y-V6Q8F$^W0=I~{RT>pgW~QGPE`hKX!|k%j^+111S=6JV7M zBjyIOWwKm8zpF$mGZkXkSOT!r!cUFh_HyXqZZs}QT8043UNKG!W{Yeui{It^zLS>i zv$;!5lT8Qqew$l_HpHED!*7}7UmrEL|HRZpYy$%mStm9?&t_B=HTW!nLhr#^N`dwN z27+cpIEe}fF>Y3gI=hZ#og+XsmNECc!hfjrw!N;b*0pVubzZtpes z6N%|U#<>aA?KJ69jcH$$O(MT`Rer-(S|A?d^&BX_{Kp`TGvYvYcgps#YV>@OR{69> z2k>O=mqpA<1ugYc|59^5HbpI~uP;9M`|ed-10__e!KKXMh*{`wr1z=WP$0vvx} z1G=GT^<_@LGq2I2_PI~(_rOEI3zqbnl@_+#k!)Jd)t-j@dH$S^{^DgmS5Ea1mJk14 z-T(YDYVH?~b=;}E6%xMv7nKlyf&0Sk;*riM>a@l_*t^Yd4JqkqjlqMZyk;XMpM5hy z^9cw6TJ{|Zn_Z?#Xi>|y9($J4>L>xycxG!+6(C1(T)S6?IqNfJJe~@NQViaDv5nuj za4{nGqWm2ut^auhCnC)FHzyn!3?x36iV}`zBr|`h-p!J!^wF+XeYGiWQM=#>7}$;z z$TG9Cvl}Yiu{f}MGYz~ta|Q{kRGP&SNsX&gOa-)FiiU{~az_n*B21lJXC$=<4){e}NwH00*M&lPHJVeRqbTb64(I3xV9F6M*@{ zZmSdASG<3`f89_PF2jDzF(%)BG(OFM)-jsdFECB1y7FvQP?OOK z#(!nfk-`o*diD;mrPsysst(01wSm!I-lTO7S$oURPu9B!hPD2ktM5 zZMPqV-T~$rYgB=bD-~Q_DU4K6G}&GdzTh<^z5d!E`o@_Zf~qbj=+4@PM%3J`lZA$A z(d|UN3iIy%*moZv*q%fEj%DnB!yIhwjN~DZ*LSTxrrG5R>=W6y5w`5^yHM`mUO@ZS zTAnLzf-CMNuZQ#l|TT0clFQHXWf4+?j>O5sqp_g)Azpy3!vN-qY z&lYS_o73{RG!~;2Q_T|*zBlt3?jLS-KT&)eKRyVs2LZKtO>NpJ&Js%mT=SiG;th@L zFAr+dA{hSC&Ns@r5A(*@Ewwcb`Ql%^1#|E`AzfLJl{n`5?<_BsG^9#!_Y;2T-fwqcyicJADBIDt_0SXmFw>t60w=igiu}ty z^<5k92&0z5^$E=t#mDw3q-_g8Qt_qnNY$ga*A+?e(q6P8b2^YcrX^57K4dTaSf;6J zr}-1{iz0NLtY1uBMh6IlP4aG*|H%6a-VboCcH(;2@gM#$-O!KizP^EQA^!`lU{ z1_T`n5LfuQ6Hq}c-)N#WES5HZP_t#;gS%w{`!@}X%p(*BOov{KjVYsKNg>cAMpbE-d*wxIlXK@ zYsSW|^x>na!*At}^)(SDzzJrO8w!wARqyA4oJkC=F46J;Pl|LpG%m`iBjYrPyV z4mE}yt^Nxzq(8U9XAN**jVZ&kfy)9}j}X2q)BCSGxM~<9PM>`qJ6H3GG=wR#HU05^ z41idTBDiEcM1X^0rUs1wXr7~q5L?$f`k?=|xy|F55dFkQfD{8!D4V@y)7;`(k%h=e z*LgyRk0v!@$EO^3a8jKj_T}e%7MmYZcHZIr)v0A;nO$vVR*mTA+Y5^TuPPMJI3nfL z>)`@Fcftg733xOR8>Bt*%^L6LYy1B_sg&LE_TtQ9CsRc_%`K0OTQ>hmxFVBKIY5MqyrvSo2q}1a6y5A8_U!7=)g}{!+mMPO z97W_-lnJbIQ220qQ#Sj7*z-%qQEPdcaxxnK*&FzH=QiV6-7=fDt*(A@vB5^>gbCO1^~qFoVaMk>dxu z$_GGO+RyiJ00M?<^1wqn40rJaY5fn9CR65GZGw2Kv(7AonY z^zO6RiFG?DzJczMX!tTn>4kkY&bm4GxT&kEwuGnR(7usYgZA`T0332{h`9%UDxPpW z*EYk%_P-n9ae3;daeUMLrrVE<5x0PCi?j>0?IXa5D2GHoyzIlPep2h!pdz-fm8^2} zSWXL+eyIJPg>ISR=C-{Y-}(ktj#1vL3N`4>ifZRc zm4|*^KR+ny4Hu?0QSAYH>Axwzf7}8-$mt;^?6SgAe_(-4A^&YJOubRiuDd%Ub$?6@ zL7vqHK%LnaSufZ4?q>p>v!mVqmB0w=CWq?0RL!jU_WdQF2Hm#t^P*2T9XJ5#yUY>} z6I~hpRD(8lx#YtPrMNXQ=Ywe?Ri$@q5N(A?w*Y|cegu$X1`j@r$2UF~Z{7JA$TRw0 zmR01x7SwU~JW>Kkzj{> zMRzySAl?1D*?YV9dG(z4ukRbFFp7YZ*YXPsc`>ztSzO&b>shububilF-be zH~x%u_9*Yq;>@2j<-SM*EWD&kK^>;GmEq?|S`{U4^n`_x@TrN{7ahalGL1bQeMbAJ zOYNV#M)9~E55J;CtDC=SDzr&tjAOl_NZ^>E zyDe22tF8wgBm9t4WObWeTm|7Nqo**?r8YJ`Lg| zVuItTBLmu`2OVUol4X7I>?Tpz)V2=sM>M|iAJhfVVhzJC9+2{cbw-%&tAK))YrQCJ zT4gFJzL5#O8vQjuGbZ41ZbXoP2C7K#W}u@NBE2r+18C0D^~IF11v2ITD0cvG924q` z`{8{f1J`sVx;>^*hW$N|#rIv!`(Fe&ua7!z=6ig?w<#>whBT zXSWD>#r)*6IZy-{mb$v~abQ+k_Lud&{q z?kEPKOcDVJ*j1k$tVou0fuq0(!P^h5xUHl8qc0MZ(WqdlQawmF0ghFJ4i0y9@V9UA zB`}tC-t`8HZN$@{XkF~N#5=qfcQ$Ki5B`U7`INEy-EGQ_W_~#Y|pL8E{y(({65vLrDfOg@mRP#j^+!!GQ4VQHb|e4G_B&iTYvyJQ5zuymO@b z&6?`gpNN1o-uLf2!gvtU^Vb@?C8e)U0Xb)(u2qY_-8;+Ff#w1jq0#)w^*6ui+k*H*;!`4Qt|xSihcPrOU=gFnTW62zv)_3J#6 zMi@rg+5d@_zplg~AcA>AO07FzWi^O_a?58fL%k*HI$WO#PYEK0fOBd`LqxtwDup;UtRqHY_h@t^#djnNd_nwauKLt55@o z5#)fH&g}*ydAEedwR~3{)XLY1x2V2=y^W6Um8y-^K36vUZp^NIxF&d8J_}#sAKuPc zY&ddH?T`=P*o|rc$eD6m&MmZ!fVe8@e9VLzOA&I{0o9laVT?8vQYjKa zg%qepyiRHX{6UrosW$P?BxDck&~Ve<@Ihf5_FARNjkq#Bfs2TO!vxU!Jmhd*)53-@ z2!wfs!D{6lh{{rHFZXFWk{Uu*4edAvr)utMl61i0J#t%)m>A6HL0j<;gP7=4VDMf} zl-}=+SaKm>^&V!D^enj61R_HICj5nx4Wil7^ZhAs#d>|{l)&%M?y#&k@*K7!568JZ zke~uPu9qGrXBb#^6hpvn?D?U5;{sH!TV5U6EUm=tNLfDs#ONIa$<^MO#I>z< z95qBnIGfolvE_hZ)Hlk!m7Re_7!b3Ut0|V~3RukzS2Pua46Q29 z#Sg+7RS!@|Qs_VArTL{I*ovP9NtCZf(0CxV7##bf)<=8c`e@4(Vql0?-m7Zxm;egX z%V^1Vd>zGaE|{3MW2fvlNY5@ZBQ&>b`$_;aY7E zaFUcEQ*u5wjh699@2_ejF8`UbF0hA=Cl1`snOt zMh1u96%y>bw$#DJuKK8iU;R_i^e4h~Ul5jK#pC#C%u@k`3WY2>6%g0oGVaRRtz#G> zz8H?g!ieWHw_IOZ)yy5q=Z=N~Nc>!_!r>s8{QhxG&2}HJ-M9fE{nBRPYIPU)^HR4i zKFLz%IZ&fsa;0xb3x&Pw7cC~GIPt+BVHcK+a1CVW8;WxaYqnXYl2Utr%ryw#0?K{) z*6X=~W6Rw!oc*TnOyO_g|DZ*{uKol5|40Yq8cH1i9=ZMag$>Ja<0dvlf^}?79RGvK zY;Equ&RK~3mkPVi4x>}}$qHiqg0l$6Zjl%{rAU!dzh%U#S(NcQvyW1B8yt#A> zT9LEg?Kuv8tIo})UAw6__boR*6TJqRd%CH45G`f@Z7}9^Gyuf4|9`cJ5gQqg%kjlB=3rJ zA-_1@V%XdX0rD@Ar?6JlC39|b+8%Ana0-18qt1=9Jk_f9A7=hQ_;4B?c5EuP$^s>{ z7kf=ZlEbl1y}|WrHZlO0k({E~^$LYmdTTOp#z2qXW1+BavdgmHc>J0{H*jVq`y zaq?eJvn(^fkLeIN9VPn4Tn z#JWcoS}U|(RNL*))u?wZa*q0re%ziY$u^$YPwNSLDj@Xf6T6;aT$^bcYQnmRi5Y5J zq4om`NWEY7j7!zkzCev0fVXJ6e?0ol<$w6FUw2)r&6yI=PAp3dU*w2}N$p=A1V`W5 zc&TD^aEsP&l5(e%p(l`EgLB~+CE9S}wiKt%F_8Jyj959=CMeP(!{9!A+xXt7u@rE~ z2D69m7jj9)*;5a3z#}dYN63;FMgS@`dN!b%C z-L1}f=bp)L;iOh8d7nBW*SP*jK@hM^sl#o-K7HxI%gzauQDgU)skI@}@oc&^!lyvI z;fHg!+~e*`#PXF5;5ts|xLuV^)o&DnEg#GC6Bsnr%IKLw*BFb#HQv3^htiR--%8$m z%-Ea%it@|YxtBhZ`66P`kQv79BxP5{f*70ugbWMI*>2(e%fmPGIRW@`b>n{KVn^@u%J4l?xkHAag(sz!^|OCm1Is z6AKTs-8y$Fl=v3^CQQcYPKBwOnp6@GB`Bd|fHMt@;M@UCUojjBB)w2tM|p;U^jM+J z6S5?}*;?!>PF~BUWKKdfOnAw(1u7YY-vZoS>g1jw=Jh8stkW`mW<-5nf_|zS>~R~J z(urJ8A#PU&GOHFtl6Iniy!3Ih4YE>Z)bJUkH7Dh7E4iI-uA(f89gYR7-nWw2ea!?# zA2H--*i|qxnOFuu=S*jrh;MiZ zk*I`&2Ahn(vla%K?tB5KfjK!`JvxqgecuG&s7yVnz&sod1oa57)~8NrEvn^qrw62Y zX<5}sF0Re(l)uimZd)@++-C~(&gH}X6%Y_Xgq51hZenVo zRiLpA$!WMgZ@Gt3DvW}4NaV$xU!v0yDxdSViY#jgL_Pl|ZK8W8Nx-&Tqe;A5Ll%47 z0L{`U2Ot8-p}JAG<+k-=S9c|t=Rh67p&+Q;Xc*2KXq4LgV<~*o`d%@|(q8K@^hJN5 zYbnu*$f9q8RJ>RU$o5sN-7#=<*%)@tFw!!ERWJ;yt z+A%sn%kDsUYNOfWZPL>yv+7a*d3H88S$75CIT>bSDzA<17)^qDty{=ZD)SpXVf5y$ z$|tvz?Rbm93G(e`jLV=cGQ2=qSEu(>>-~FekUg-Kb%rZ-FbxYG!hN%^L4f+(ypc=# zfS}CfCY8vwlIV%R$31p+VP=eJ#OeY4Ca*Z7vGU%76TtAQy1fK?M!|-W0NwCI=!Kk?|^4qldR$5*=@shTW>{0lBcp#94%nf9GQ2I<$;oBC${ zbT3UVSE2rcml1IG$EpDx@>bumE|Q&5xun!$5q%$2(7nFsBH$?XxUH5n2uKP%4hE>IFnqhia!2mi7Jx7OOx@F@Qi$8ADLhuapdVv#t^L{lMeugIqg5U`&aQ z=3lhYjarSD8ToVGiPW22iXOd~Wo4gHKczci1wvokU8NdL)||OBtj}~qSo=)gypN5| zpiJq0`Rn52Yecc$x%LK%7j5uRv2h194nhFRur&06RK)t&3p;{r*t;lky1koC<5tCkj|0M)w<{lD1pOztqSrXZTq7tT_A4ec(%do_^gwoc?d2*gNi( zupHwPAj7$Qn~k!Yb?DHgP{=DqKCipa$|w$taw{?hTC#vr1 zpx`$)7}(%BYlIx@==8$pmt^)IKYobf17Xl>L|G~j#j84*NLF2+bEt2|h`!Tk_H?(M zFTX`qNrihET_)9m>i>gopoZ}AlmPGNjQAH))RXhOAkw>kEDnJg5 zU3)c0;vfbuNt)eP^gmFIpWgw_Cw8lqNC0I@GV8^@90?EqNb(3M}UbT$vx&5`tPsLhjROO z7;SV=_w~=1{PQkb_{p0MN-8qJJMQ`q*ql$zZfk`>C2Q1h_Q3~0z#z*?6?D8vMiJNj z?e;|M01!PiL$>Pq07-KGmU1%hG1i{Q&G{n;)bxO&`M=JSAksRQR1{J2-@jlC)Nn|T zMJ0s^Ke2Zw6X&so%cwrO&4mL3A!c2gh*{Fw$Iu5qZT|PT4dw&hkG0b}y9InGc?h>t z0UN$W`%bnjBKYki_ssz#hvQ)_$?aJTBugj&I&XErb0}j+sa{*knbZ%2 zKc%yr^Zrnh{rf_Hn0#$0+zFWaBDlYy;&c899A0!o@eP}$-BMQl>7VJ+)r0pFW=7)^SW%3{XyyBWKJm}*aclYVdHql7c(Sxkz3|Z|-?}5%s+QUc{QzhNj z$BM>rd{0T9!2bD?BsK4U1rkc9i@_NDaNGLIq;HFmN*}H5dSZ;Tn;m2fQgxk?9oqK{XR@<_u#dew(h0n@%a7c zi+v$~l4fA|R*d2=3;u1&f8O-tBDL#J;tDCb{4W<1k>nE^8qNxr`OoVznel$eKq+HD z@%g{GSp0%M>Q60zpSKbc`yn7BTb45AKbp#yPwxr1e?1(XBia9a;U~;6xRP(u{&l#2 zwU&D?AGrVC7o8hv|I5V;!N6n)rF;Hko1YW!@82Z2gL_WU3?C8xXKwv=v0*SUl7V8# z|I7Gtz=3<_Y@x^v{`;$cf0K>et{;mR@$i2c-~V4urhg_7-^nm#gbX|fn|mM6MYm)E z$B1MS&&<*K5EKpPMdQ)NNUq0;^2}eG)o)7)>2{Gxyq-$%=A(A=r|_`71OdKWCVmKYg~E1S!mUTkzMc(~iA^jeL)ggGnC0#%6QH@%YEOBj^Nh ze!R8GGGqP5t`P>Yk}OJ<;iVf^Zw-K`ZBgkeHQsYRj*;LSGwE=CG*yqZ7TsUpMva=^ zQ!w!8xgM)?rB#bJd&T(%`Y5#z*@s^4lXxpd_4VoTqTZ%8vIp*BNc|#KOP__Sfyy%} z3uj;s=nWoJ16q`Wekp3J$FojmcIWGvH4zHRlq|s{U_%qb1f9KtoX(b`P>}(jG45kS zVj?K_Y>?T>$t_Us%XwRV`z)o>u>LgPr*E!Tr@#Ai+TC3BUL9s`l@J@+h{6JqbCZSm zcMKL3O=V;1n^J5G@D*KjHI9povG^5c5AcIuBo;e^qZ2RcXl)tWG`pK%iG z(1U&(6%Hw}2oqQia&0H);^}JR`XTGIz<8%~1Mqa{kyNXrO3*5@NORL3=7cFXx7F>+ zma3Op@tcWiab}T-b&M6DcXhoLkg&~}vVam!;YieI@$7xR>|vRrY1qg&8grvsAl6X6$*|V-+^G4vC-4cAnK#|--Q#ec=ku|9Y%{E$17Uu`CotK=I`^Y|%9QhD zP@H}w`P<~gyCxbJQ&&Ik3h<+P5`w?EDcqApn5%`0s=Ajr`mtG+8=|TB&AdV>%$1aM2;HJk+0c-ijUbAzMg0sZrf&YuU8!Xp~>zta70KVtX@) z_uBrXZpWq1V#+gj&a`p?;RMQiJpi=+sLwPCxMptDaNug-!b&7F~5R;oeU2{h$(hQ&YHyC7*}!+UG!3iy-&k&F|e1=F{=gFQT=(O zS~+viNyE-p)qX(zvf2(2SZVEg4*L?`{_7!HhO-X|@em}T(9Ji5h}&^#Zmbl{ie(XwPppRA@ zpx-(jte;iqY6Y>kskxplXYW&g{_E4F*syAhBQkjWtmDaLqAw(>JTT~3q8Si&zI5@X zf)3pI>$?y4FVimaRn8x~i@-V?`|hYREI5f0+t0hd^Tl(=f+JF&@X=miB2Ip&O z!IK%P|7`L4N${r}BzQHaJa1})L`Y!Mk#<|eRDI{&ko^2nL7UJ6Ii<(+e{D-!ethKt z85choF^J~${0a$$I)}6Ki9O#xQ-E8UbHIy{aVO~)n-6~@WdVsvA$+B^D>ZFdKiWg@ z4O*#J39|KZZ04n!nEc~F@YRrhP<&MNR6se-<#=MN;LOn63aQ`x{LPMlCS=~m0PK9m zR(NU3KOGk-_~Ijbo>}5geGwISuWl~pmgxFondw~@yjIVaG2QRG<1*^bGgsMR@t;Wz zf}Jh(Yr<#=Xi-p=Uop#ZLorxvFZm5f=S8_e9uhAG1JS(98vMOjvbTd_3jCs6OVJVC-U~E=ZWnXO1uYqGjcaAVF=bwz=42s@Ur%cVS$OJFx378t}4LVm-%ND&}0B>dyfm zv$+4>EZpvST(eO~g57M@{ixBj&GOPOE9pjuHdD2+X)f0|x<`o_SSO5Sm4*9C%ojZA zu?}yyUZ%uWn_ww$W>gcu44trqrn0khCBubDc$bMsshYOk8xMq4(hAHdvwLbzQUaQd z&nSZy=I}znL^G?EF2RtKq?^mt zX>hb!y>M-fyh|##J+2kp;{xpJ`tq|Y4u#cbVS}r&(kOBD3#3)^rvysqa6kxVjh`KS zjMs{-Y02?uLmsK5Sc4*nW)W=1U9@$FgC>J2Jk>8EmlGXUV%jg45zbZdxidybm3w8W zOVZQHDkgN}S}j5EI;U;O&AB4_&Rse&4}UyrlEM6uQv$#{^K#r9@N2JDw_os@_Qp4T zC$icHGuQClk9uWyWWJr@Q8X>=X_zrWo9Xkc*YL5DA8uq=wqhS6mVd9dNbIgVl-oq- z)Dc6+rmdPjY2Y9 zm7y(&kdyvlJ=3b1ZdNOE2RJHKhOxGdG-rU$ng3*zt-zScM4f4__Pr8E={G%B60|$H z#V_XA3r5s$6s;t(%1z^x$x-Z@tPfXa!}MyP0dJ+^#jp!^7G@2&Y*UpwX+c|Cx!*LSI#0rZa_ zPQA3(5!m)64bp#ueKFeLc0&MdGb0Ubb^{&8{W`O>TUIC6Ug|g*aSJPvD;%!`PVvy6 z@70$OC?I27aDmqFe&ve+8D&ICa;#?Wo`xm4xe~LjFok@#Au?H^S@pS>NpNNCACM6} z5Sn+tJ#xT$IBPWFq-MtwICT1TiID?LQ;4N_%TOVtKFo89D)5Ux@N%}e-h`I4$SQKjI-x-T%Aeu_`#o66%~Vk;wIFx$`WJ8qz9iU zztA3@5^6nt&13p;m(6WEEtscsVIXsQL=lpJ|3ZULmU9osDenBLI$ssfPm1^IkU;$y zBGQMtlV3iWlT|x+wQc8RKj$HwvG2`99lWLCWFN|~;0IS5`eYTqHioG1;n!z^1#bMQ zpL;u3#t%Na0MECxq$%x2@JuL(KuM&F%l?WFy^w$c$62o@HX>~=CRZX~qFS{>Iq>v$ z0UF!ouTnt$4|?>hL5r@{N`^RoIF72p_U+7m zn_Vq=TQ`YN)>$U?N&O?^WI;6cUOG(Mi_bP7gKhTlQa9tOns}GKa^v6t_XwUOc&4FC z;_mUP{nghzBab4%uw@jEFe4X=pDjIlOznZPYTz7}e81OdSM2qe$-%uvf}K3oiU`KY zYn;jWo=I*=vJ=LmKQ_!oo^*w=n+O6-L-~rH9dok5EqT@5Pte@eO9mP@e@q#W$=Ao(316mXNyH9S) z4({&Q_Dp!)IUukd*DTT!T+E(BuMJ~i$%hEX^L9rZu^LKjz7*b*Ep5grqIXuM7t_Z% zS@jK+A8xhdZhOAI5@=Gwi#twN3t`PcJ$q3*RFTy%9rbL2>`5A>Zs{{>_+m|_)+3?q zx)WtPWu7MY$46{gWjO&E6Wq}wl{zChk`A=xzzcblfC^ehx3kbnaO^Nxa2JTa+kPlc zBdb3puc+3VoC5|SJuoA%-jv#ouSF}m)d_E|(K}4R6+NmtDI4k{AN_c0FzX`Rv759v zf(h#BIIOlEz2W+0n!BcceJXh15%o8cWp_l&s;iT}g|i6a`={`b?_8HAFpHSU&ySCf0DRsP2%y_8G!MMHIRF36+=;FYti9&L<$( zL%k#E7R9{BPZu$d5;M(bF1BmTRAtZ1@yK0TFb{n05*Jeqi>6spHbHYy)+ZF4-{wk2 z?jj?D?%rYK1oQYil{x(oc6_NW6TDIvxJ%F{{eA?E{JUtQw6;=ptKp=R(|%*%kJ+#=RZn`;jjw| zG@oefhK9B0WeSl^)UC^~psyp`tv|_3t@ycUYi)Ms4+i-5U zT2R%tfZ0|w@pzyLdHyP0@D7%$c~k0^vg!N-2Orda{?D13_1_`g&9>n_kUg%Y5CIff z5Qu6u5wA8ivC?%eT%`(HTyAhlJrvZuMj#5LH-0a#-$cBu32_CEtFYkX=G`W8yR{S_ ztnw}D46F|V$u&Szf6rnzj&Sz#;;u-fU~9E2eoXrfiJ>aF>KCZsa!V0^^8z=k7nA+- zD36r9uY{mU`^T1?8U9DJ#BX;MHw}iPN40`(U3S3(qQXxi-(UplFIwqcKSyWaF19#$ zpKPp5c`2$_0!NRxDA3-;vqjRQMVOvo7CAmz;qxs<6M_rtL%1?+T2scdI{YRfj){^{ z@{K^O_?S6#JDP(wn4`XhJexxV)}MN+g~b8`SaU8t3GzS zVcFh5yk!FV?eeMKDm7b+-T=*uAB)rLna?AVE2Sjv1lM98xwhSNeO<-F;IO~M-*mbg z*s?h7we<``!5%BMD0V13J)+?Nj zTw?O=lm;Ai=sR3=OsmmaK^B}_rEg{{x91439=4O-MpRpYclrWXJn~OYX`1FraMH`4& z8FWsB6ckFTL8C~UWlDv-Wv|?kUaq04^&uk#FO&@&c>Qk+O|t>%q6i&^PCp=*3r163 z@MY6j^MexdW9U9ee>m0^iQ#6RUnYH>Kj@VsQN*MRt*96W~Qmd-Awp}g2eRh*-% z`BCgDgU`!TzPS6{6^e;<(f8Qd{@xt-@wk?WLgYSV^Q6jC zM7C+vUN@b2ukvf-{%JRg`f!yslev1AHx{bEnL=?$-vJwrV4M^);UO74K%(x;)soJF^=GOK*)?pmb}@P`WSN(?egXl0I#k04EfW zeJ+kY7*E!2^5w{t$aJ@<`YD&c=)t6nQjenQ!fxKzmV@@*1^8{2x#@+Nxz0|x0(8k_ z=_H=JY1s`MMWYGF^w)!l0TT##xCY@d_|vuh`jzafR-WM(YnZe2c(P*FGL%}{pa>$B z(VM?T=*?XJ+VfUu#5Aa)-HQy&-Lsy529-}Q3z55-FACcIvr0ymrY;GDF) zJ?O6XT5BZVJe^;3yA)UiF-4HXT;5(&O~Afeu{jydY9{hY4+uZG3Q2_m+Ha6Q*lDsm z+xO(xpKnA;`~lQyN8N!~m35HkZ^gZ6s?uypo!Y1?N5rP+o!B%zW%8p{@a@g+x0SjB z3G7W8ORa9=o!Hy!_e1+4t24S7`z$9>*=M~vpLRf3t)k3Cv^wP2TsmyUwQa|wZu@^4 z&zHBPWWfxmYVI$lT^yrB9|kY#!c`~_+=F$7_sGQp(iIoRvpCJD4Pu@*(}4hFL?hy= z?5T;nl+QcWklFZthT(!y8#P80EbXZGe27{6yF!Ealz>(=y=+^*IOruu==7rqofvI5 z4C}0l-=4EJNNX{W)fP%CAKs!Q?MyH?*;ske z(BiYf9yL9})gRJ=^(}HjLxbIu7AT1=LwQuW^4Oael5)A_WDy{IEw_$!m>Df+P8XPL z;g*cOz}+<9{+Fr6u9b7~AZ}*DyhMv>SxbQT+9LNsc<>XgyVBPxiK6kBQ0u+uyw~ul zNV_ws#{KUyYNRYE!4d9jT%5(&PlX3AgJL456*RYAad43124U~#CEY|q$>Bt%t|BWrVKZM@^W=ZX|#a6=*BHXLapLX98 zIbl+&EToEZQ$5$6O=2$DW+SR5{zN)-y88o1@(i7w3oF#^kuL#V{dm8RFnz>Wm)b!KxPl@ko>}A3O{XRl_Yz|6c!#(?)hC%9H^&)8XzM7iK}glAj3XO|$oo z!yz$eF_Y}JKYjhXM_m2t?6Qw0F9QK4!YxmFY4?`4q?^I(aawox3kX_AG8B#rhfR3jE~w7-&R^)jjEXS6f#u2?&CTLSloL>a(nQ_kTs4y3M5of zCU&t_$3A`iVHv0_IV|jd;zL6kUX1;M(9`+$!I8%B=YlCJ!n`0$>9B~495LwA_(G+z z8)IR{O?0dNE0JiCgNhOzrdC8vl;9#AG;}#UD$2z3U?g!F8K=M?|quonQxnZnJIt>hY8r zyZGa=*d!;^szGL7B_$cP#$n)j5~#)y6^uDpMHCo z1!evuuRC@*y~18yC&rCq6TVq)*jAc0J*2%{pA%cOdoX%%-Z1g=Tr|N`#WX z=Tl$gz~S{p0_S3Y!oyP;-?XBo(xG@2Clj93IeiV<`bszw~HFpm&kenNWo zU_SIXY3M$2yH+swN;$|Hz0GYoMHj#mIpYxsV;)l-+S{C{?;m%#b!I>Ia_-c&2e&_Z z*k}cE+-z<->g+`$Rtl}6;+)}&0(F<8kyN_V9YSiqevsSxGn>qZ4|`Ne zUL$F4Y2YL}udHO?nXWQ66Vbp@rrjyVUe9 zNEvi|uV}E@_QqGv-W6YXH?d4EX&FT9*i0JYkd~8g*bHUYsIsgaGNBV3nWaOzgrN?- z5}r$Htf@Zr?Sx$KM_u};rgB(-Hbk6wA_n)E7A}mpzd1Dl{7>iA-IG4I>k<_(5e}x9 zP_E|@dCl{li_uli!`huL_+Y1=VknNz`L{#)Tmvx5i_A2irrmy_*E|5vp=hAz=vPS$ z#lcwfR*DH|2O1lCKN6w((-$>D#4Z;A(A+vd*@0vw3hizd)D(Q(a+^&ef26uQdmAFsrjB~Cp4?j_XuhieNKIWDP8u+c-?ftxoM8F8kTW%gp(N^2WU zS){5J@|;A33yI|7C-NgfKFft;M{coN{B;}U_30KiyrJH5i$m6C&JkpWUtk`!Vfh4~ zR9f}MoXpDc;`55MK%bwnHN1wGdpHkuC2~F0l*lKX8EW!rsf-N>{vPyAxP7z{iczT? zaP!(i{^`*WD378_KvdS4saoLKG!c)G_KxTB27hAS*){Z{NX&V<(!%Q+a4isP=#S?@?%}i0^o`FN zhzcA(yQH)P?2To#Xr{_NZUJB#lX|ZD`VSJP(o_vHQsBh4<4;#87hM6cd%+?#ynb}T zOD*pyvxci_kP`ilfI$>+ciV`wzU5H1b}o?m<5gkC(0mWHaw-k%6#E|f&NMdwNNguv zppv+LBi6!>=EoEox(HTYm>X@|cYpds)>>iQxj=(7s1GD;IhP>_#1GDxL=H<2Dh9=< zs|3*5pYdX!x~kU~MqJR}Joy?y9Umv!=C&F9&sqKcc1dWWsqSjb-K)f(>z+SByG`Am zR@U6OeEdGDZIw$Dz|m}@LMxP8H#elcUY~=t*u=15bCt9bEmK(CrI~i|*;1dGe(T-a zgqqr;o>XTziGwofBvsSRd@GQ$>RtA}*RdR_T6|`*Ur7Vc8`IPeSL6SM`+g%;F+o6d z6`fl35;SNrN~ofZ%d?1C9=nL2*~ddY$Fk_0xk&YLQ=S45AuIH##rKU!u-h2s&<&W; zv|qd)e3evgFvW4PdVzpF2z}{M^b%3aBCeQbeh*6AcLcN@wb^OhJkVI$7c903)PrS2 zf`c8EWbdAUEEwH4vyvVbuU#f0oI)!CkIL)Qg?E~szd5>5IDaX8E zTLtC0iR};Dpal1V1uqZ-@)TaT8Vyk*Vij)!OfJV zw6Ho4<~s#>a$2H35ZesPo&}$Od)yc$WYtP=lU|Pq2~q=Mn2VuUUZDPK`^>HKtO%#T zJn&&X)d%9N?>S?o%R&)DSzkd0y>HJY;^v$1_-Ro3S92T6iK36kQ)ypP%D(=zSPjw~>^$i5y;L(o!5DKXUF_u}3M!Aw=2*t*T{RCts~zSkjKsHT0OETJu2o!K$z zB7bkc*u%)>3ex*BWz;5W#z7l+7G%R4QJJR&SaS`0@>5#r*_>ZpZEUkbv4zZZU!92* z-PCuZk945V;?}SF20Jej1053;vnIM5+1WMW79`@a6G*WdB}$}p%QRE1@_TmYZvnwq z-VMG=1`|i!P(|$VG^lg;y7=Ax3%C}T<*=<4$B1ZVmDc}Gjpfw2y7Lg}o zB@j`agU^@P>h@1>91(bgdaq&+G#0$7D(qCVj|g-en1ZZ&tss;7X@MkjaH@0zLO~d- z&ml!kJYQ!4Sk8Szv~;|S7id{!yL4y|-ImD1JAHX>FD$&Bc|J;qv|H^&7j+^!_`n{g z;}}S#)U1O<8Lb+6`UVobp^aqo7~CFFV4?kYX2P4{6JDgDM5PhG&o9@I#!N<|C@L6% zkFi%zWk^GXoI7&lMh5j%9#Io*h!H*^jR?)1b6c1ei9NIId(aeAbT`@(^qH z&+Fc~7PnB-eZZ)C`{t+0*?gC!Wv->a7433HWq}M!C z|7N58n>Wd$;5+h}Wh0*dB+vXtZ6fI}0DPm#0w3u>?7s;GzrD%2C0iLuzFq$GpGjw> zMgY1m-&}aJ_`fiq3?-AD@=@P9fafIiX#|_`S|otFoqfQDI%UC;k@&X<_vagH6xiy_ z^+zpvQht1DfUu^Fok#T+o&&)h@HjYgUjNDtn{`E&~55Pv7z(FkI|MmI*xx*)0*U7Kqcc$tYVEJmU}t8 zb=i2NsA@aG>u#?NJkFqy=D+9P#w!h=q-_fHn`Wv#QHcGwwgfaMC0->1VHAk<`XE^p zNp|SpmSys7yv>K>%OCkjnaD)s2*JNcKD0LFAYp$_Mv2s@F-h3%*t0&!(tcXav~?Z|7d}bDHD@nXPrAMz&NB+Zf8WgCxBuTa|Fyt>E%1M*1vs9$ zBM#(7!n+__V7#LD-iC>Ig!v99j@JD;hfPcc?b)vL3k1F4%HGEwmMZh;K4O#D?sF+s zwC?4Fk#>or=mBdR@0+5_8o8-Rc*I{FaU5%B2OIkT46c73E?>y|xotXsJpbWOek?xF z4Vt#6^yen~@HM!L#c!e@wVBDn7~r`-mSH@jmtDj%hg9sTWwq(#R-R+No5pWAAIre& zeAx3f+%b1a6-d*@13xDo2V9WGC5;5kwUfW*pm1N;))eLFbo`_?ZG zU#wO2_tGu3jTnd1s}NoR??l_P(X2n{Gp>PutgJ*bX`-;dQx@&7XWp78e5mR1DIJSp zIs+;$Zyu8s;X&P}VhnE{?BOC#Rb0u0F87vz51$AC#nSk}yrF%*Z{KKi7W40C8DTVE z=s-9z+;^u*s)()-3;Xn#&G;RMaA^;^Io=9uzv|Qa;Sty?g6-V?v1DR_miIe>mv|3$ zd`JbRy584358sh@p??JJG3}R6*?i?BV6f%_6RS(6kNpK7UJbX?wI1CC`5Y@8Vesqj zjFWRFp$_@sP%Zs=CIJzRJN3IWqMpL!p_K5m6nwr0#Pp{Z2KwnjwhdcL`a9W#NHF87yDUPF#D$V1vwlgzJ1P`2xlwwi`gvoRjC;6{K%lUIU^1SnAK;5!kF{+E80)Q?SQ|MF~yA(dmR6;KxOLJ0qoaA3V1DTDZdtt5N1!Rzga`Mk-q2djdZ-z?0I@HzbL*kTKc9gbm(;Y2R}*V_}(T&AFd#;_-v9l!e;q40jV1 z{de{UX6wS9q#j#h_AfRmD7rFWe7z$Nl9|4~b6HXU^n#JEF-|#>PmEd%^VEkg(?K(U zv?4BrNluozYE}z=)2AJb@rhdUQWwC2Z)!3Gf5@@mV;i~YE zs}{IHE4!MSWFypc;%|As%bbkvf-5j3t{G>gM>o82s5aU_&qPBYEne~T zGOf6ArY#RWyBZamS9p32%7TQ<<|zXU@cVo>tu0 zd4u_SG+OL9caEgw=;e=n9LYW0m)y7$Rh@kYbj=msZKCN~OHqnFMVX;ievpdG-^L{c z$yz&Y{WA__jphuQ?r_2JV1EcRkt1rj&;WgMq#M;`JxmyD237^q6bGc4^N9Knr*OvBQ z(HWh&?(m|C)A*Rg-%G$UR+*62tU)a$*h9mNuYm9?5e4@4IZoZXpth!7Gp49eNiKwU zHG&J2&{Vnu!z8ARSDc=*>q-G_Z?4KQCDRX&_u>PPlr{Ck_Of2iVr20!lT z!XMh6*L383BOKBoVM6(uP>ZZa_oYiK4k02%22UW5*UhJanu%tLq;XIc)@?`TBhNU| zYC3zrKjfZk&CB`@R17CGfjgVg1b%FkNP5#c{a;fSUwg*5({z5-^{&JEvWQkgzp1r| z36`B^7XGm3(Yqw!cN+5wEgpgaAC)JpJBH1s@WxHumGMRugTB&qi31%>*7Ji`lmvyM zGU6?J3&+(mVe$(O*quysuh{jR*|G5>Z{$u5+Fz`r2b+b@Pa~nCl|?2YaLj$}l-G`O z+LI6#s;3D+WBgi+coXFM>=G`SDUpHxg7y9e>SF%2Wnf0PxCh07+nw~!9i9OvA7b5u zyd~V!<-1HCb(Yu;5yEYuGN&wbexKN>>Yo!Z`LUjeUdD9uaW2aIOp&xWGfG4B)ZlB$ zWsRf7iVK@>5{%s&)Wd0)c~*}Dqg}hb@8j6iaR))U3=a3k5ze2ljmmVg%ZNvTkv`sU z4n88Chrpj>&|mAN*qbUa@`&5(l$a1)s3nt63k&h(&n&$y@Pz8!L&;h`Rjh#&@usEx zSj?P85{-UvLfVUXb7}!!9ylPLaygx+8xZYAofUDzb-7IBM8%!?)%4S^XT7xuEa?Ds z&A=ZcOJx>5F{|}((#}Z+EAS4JuOM=uVH=sTZxxGg+JooDhuTWwuU7eDZpLIV@lJR= z7%ok-%v76nUv+;KBke-)l3CTCFJfuMZ3_~{!;92^LIBS!$`Gs2)xzq`rSaHX6)!Gt z3)}0vn8~>}^0CRP@4X9>?#@gYqcUG6-u=OTeH{jPqR6hxF(nO8anjn6B1MYCE8Xd+ zejNde@-tq08nZ5eu-ES`) zu{>Dqg%1}d)qh7$`oSbkohaxV%Sl>PN`M}2{pm9Awy?b`9NOyK-5#|Up<<01t5MHGhmd(Iygx^2X>eGjp)aoRJgW(rJn*QHys`J z5s^Czc12B0>fMoDm(a0Me;%zeQ~cZDn&kDtaQR<~`2WQ7+i_U(KQvrxK1GNXc79sn ztZA)ayEo*wVfc2OBYXc@O;xd}I^4P(|IMVi@rdRJP2UK#zO!fP@{|c`YgOL#ng+pQ zQeEcWMhk>3*Luf(Pl97Z_9{%PjhBtOXoyd%M#v%hp8{;8}G{;2Al4KqLGd6Xe8pn zZao3@2WB+|%J^I}3k_|oq`JYzZ{#m;uT};OUuOb*J}$fRMxxYG>mzan_EOR$GFm3 zv3yea!iNFK7@8`}HN@`&J^#}}Nu@33xDybW7E@s!`kKskJ?tQtPtMMN(!bxn%_t6s z!ctU#DdMMUd}vtCk&9pVnbQTr3)TjK@4hLzl%z~JrQ{PYC;n^O<(pQ@Of+ zB|Aq60aU1;ZD@q9>Ya2rgeTyVb@Kn!y)b!Wotk^7dJW&gJ`?ZFkrn|N`lfv`g*Xvmy%Y9EJ3;n!wovw7kV!@O)+HWRMg6X7!ZJ9FJ4 zdm$69bxSoRjp8zxt|)Nw_N)pKd)h`%t~uCZz@M~rma#^EUux&;o!L&kbXRThDIJdc z`g)jd*8cLU8$H)%m3C2zN`Y;s8`cjslWvN&!(ta3yYk8fy-JEFea9XiAw{?@F6rRH zC7O1u!c_jl?6+;ut&!H=i6WANG+p)kAEP^s;$rrsBMtpTEq$K8;sqnp4BZMRr8JR5 z-Tww*MkT|aY(QUO16}lLI-^F!4EUtc!Bl{Esh3>aX94LaBj8ZhUvKrgXRx+O zp;Dof;e*X2{vXSOLW#YPSfX&66=62HNd&~z9CCh`n@V|xsP{QT4iUtQ)k@b4K7VG; z^%Fbxql+<1Hk^`r?0ZA*J#;vPjjdkP_n9ekmLa-5QkuhS(XfI}``&(xFd=Adoo(L2 z*fsq%`()+Ov*A>Z5FJ8Wu3w5-Pc5zQN8a7gll%JV`oQ^#?HEWSbMPMTA!6z+M2`&H z6EMcy3yS)HW=>}-&MUWH_dlxz??`1m#G%GE-r72!+gFQE?6!Q93XAG!^DJW}X2!I0 z)KKe$|8LJ8r-hLR=^SY$rnS_)Ha}y{c_~@gBZV`eBQ`6}QuLe}9aXLEAbkZq*3i4(QS;*c-D z6K7$&zOV-_Z95A|Ic*&u#CXn=cZr>zU0oiAU3XJ9dFY3GdW;3?Ta4doGo3)UQ|_8P zV5*6ihO3P#mG`O}S3bW|{E_i=?{^+EXb@gLM(#@|T`KxNbNo*2bL#Is)vj-Fi4mh$ zP%G(C^VRG=+|lwTkW;(TVwzVI>$s2|4hXh4?3(ynRE zXR)cchtv>&3MWGDK4Z+=;8+e`Sg?m`5+3N+9>q~T^e_3hNJgfR~nVj|R^?U|`IR0L)C zU!f6~jdq}GCDUYI=5}kX#D!sl@2sB3rJ*~~VdR+Ou{L&eV}TV(Q4XL#cmoTkW3M4| zHzJeC+abk#Gzp-0HMdV^#CpKwB^Fh6e2Rf9XVG)2k$1Y4>QYK_F9A|&lY=h*oq~Q; zvK-9&^hGPW=p?gG0PyAkBYM&9A-s_ZLZES#X8-;tYoY3Yb^+Kh?PIjtHCBV1)1;I{ z`m8Zc)VA$CS&dbLqk{!TU14Bo9a%{Rj^S7n=M5Kz7dCkVmTleAkKLbUb9hu^{3L`b zjY#Z(hVBcE!o`OwvaS@o$>6MR^2$~kTDO+t`215xJ^Rh=Ey#?FU!r{u*keT8esS^I zUAZEnK^-}rB4b4ztW#1|N1`q!Kh5N|>+qqfZ4lKym;r&+1y$FI@jRCd(jfgM5j6(1)<`a%2u9 zpXf#4{fR+~<^z>~{$*r2oA`-+Lb|8ku}Fkp)-SSB5DL$A%AKhgUb`|evDUnMe{HcQ zz(%($1B-wRtIUF58`QiFHHfq*eGho4&Z(r@(pis!(cSIkG^3b%T&R9Wa1;qyg-j7I zss)P7IMF5=K5Nlz#k1T58j#!5nXR+69BUubK1m#P<-jMkwC>JQj9}du=vWwYh@oJT z<{7QXqhPl7PuZf$1(|s`xOKL-(6m3jn{_{-ad+4%P__9O7kXx?QB1cz@X*XWw&0Fm z@3W47F2M;$xs1=C9Ek!_iATeK#cS`Pu6mCaRefI*NIhnG;SN~}7A@F0|YmYHaY$e5EbJF}e0UU(W81koSl%|qU zKhxo`8q`{+2|;E29hGmK(J;L7NL{^~8%G{W{m)%VYNbBG%fZ7hUO5dOyb1#EwmJWK zxb+$Nk%O2-^gqXm8HD&q<|6%u?otUm6?CBP}`|0nsoX${f`#{>O;95u(fz& z|LpEE%qCs$9w%bIu~99KWye!cIZaF1XqFBb2AESt((jrU3a7lWvR)Z9*S?1_9GqAO zPy8w0^B~pv`Sid|-FNb9q|%;p#`pr+bbo$*5l?$f;UWcNgHKMM+QUx6b7vmZ0O_u* z!BeF=MGX`Z%jE7V?p zIc1rUD)QVR3I8zpERiM!w3NEuySx6pJ2Lk^*P&XNwLGl4H97XM0RnLHB_e(|iQcTu zWI?2Ft&$vF*tf@VTr2HUey$RfO>}^vOfmNRcdoB*cCDx$XibW$#~~^&81BrK$Srv< zu_)~OJL)Ln>Tl-8EKfUX zY#RCO*bTHg#^;qh@#J?oJSS{9(7WY>_6yieP-V>8oJ(JS?ilJ-3vEvOsrv9{~rqbFxbn&c?XVgco=^`mZ@@K4~4PQg{Sn>n2 z^W4_1d{)`nZUr7F5{+-my{1vnS&lPWI|+}taw)*#HCIE_%0PEkwv8t4&_QiA9)&Nd z66XFhnB$&&GV9MLu|H%NcX0Ruapc_m%kk?n(B0Jr3EuW9w1b0^Yb?775cqY##0xw0 z%{N5d0G9o|UC5Z?ScRjl=daDsUCZaHC?ceP3s#ofa{PgNn1uz=a|wyD%L{>biFOLx z=jfAbm!n$2AsfHC9aO3k(Z`Xt7${HqgsNW*UY0%|e$ucdabp|=pk0|T@jag|0L&@s z`7K0m^Kk7g{~10DOY~T9svZt{2#qd0Xi_<-I}oQ?^p+@Wb!C-1h!ISmDT&AnIziTc zcw)S4u3cp7_U)?*-WJK#qlws?*y+Jgvh9^P?FBn_U5CD;v_F={?r}Gkjrz%2FvMG_ zsuA(zOpXi$W?iM_lJ-e)A+gq=7$&ht@wdK~kd{A6Asj(J1Jt$U3vSxv`cg1?8lo(l zl63UhciQC(!?*{oc~2o3>V>YqJnatVP1kFXG2%?xRKTe*{MosRx{rZWQFI zK`XEi85%d=>iRX|{rIECyzc296B}=fr~9OtDREO0ceC|f(-p(OM>0oNK0V4-U@$Lc zir?iF?a4D|jKnK03uE@D8?2bkJdpbDa4n^wEW&#T^P*W-)6h#L2B}MzFWUe}{my(x z2qo7$W@j1B8(*|9Xs?Kdng{-V<_uXRMEIPz2&S8SC*DJ7N|O4J1HiuTjkqOOgC6NQ z&;G623{^UYIQfz#in|{vk~kpoew?b*Pdn=CVVUp}u#*z>uk;wh+7G7-MYafgzxZHXC!* zu`NY8Bn9>-!#EJ$;C0`7(7BvB*LszM#5Niya=-|5L^{Pp83$4w+M$nC-p56R4muv6 z6A2e~j%o#?n1O(O@;OT7jSotj_*y7idmJN19*pWRLy;zA-l9zVUq*LUm?}#6QP{t& zq;Pv5B*So6m-6s*J$jdj6Zr-+h7a^j(@)`=Jit$*5u~XJSS{c;SY*yRIxbwEWZYB; zzEkkpxeeQBY@#~H;f!Wc&pw(QsN@4F@hUJ+`7(Tq+B=F3I`_RPxXb-U6x z2XIpf8pADny??tcV_f)slh?eF1P~Y#_G}O*+?y6KHf$4fRtOXMY0P`<$@_~p)P?l( zYdNh??WxJ~hzq$L8z+$XdnfwDE7!jUE{=x`=mnXWb;sV)+hOwAYS5b8o zpK#G;M>u$O!N`yYkK1es$#Q=HJ8nU=EHLgKfrpJ2pW52$csIeXbc|~ygTIWKQ101L zJm%Mb2@lkiBn|_?Rv4_2F1~@4t+}cc4HaQi3;gy`G~WwRxvJsE^W_TpT(rxC{>^gO z8T8Jm=Sb{cLw4|)61DtV0j$_NQ!Wb5pZD=+zTLh`Bk60+Po=X(-hL_8ZCr;m)y5oDdm{qnKU2 zhm7sh7F2CY6x&4+R^06dTR-|5$;nJX)j1Y#OrdQP&a_>3k}cm3 z9;I`Fs+C8^y+j24yL89|7bYOxNiOOsO?%i}<0l(|(7+lrZ$9^v%p(x*D(R-Cww7*d z{}Z>i^WMoWu5yT_PY|62bdpG|-X3TsOK2uigN6csR_$mXsA>uS&;a0%xI&VdkN*fkCZg02ICuLBO#%#b&v)qi zgPk@hi|Y(6$@-47At;_-x({zIqR&Q#!1X@IQe>@2=C<^nsS(9?*)**#>Th z9Z~FQSGOKOhv&;9#p6odh6ad(e11VVMreZeAfN+11H-oMo4sTjfAl`??f2u@P=N|M zH`|b$dmzcZ*q^@)?xl>>riV>>O9jVM;g+)SLoLX#*cdCu>=AIW;|VO2bW!>dYZ;KP z&h%P#j}Q^XCs*0&Gq)h)hRr84xm0d?7j;ym_oZ!iBjK8(ZWLvTsGwJMx2p(zI9|LK z9Np4mXmvn(a%vgY-KW}P)t8Z~`npZn*tSo)Eu4_ua%A-tv^Oq(+Iyf z)Re+_cmsVr-teRT##c8F#Q(KJKYSG%S=|~J{u)c*{TiS)ng;D|_Skhh{a>eeiI?gB zOgUIGxwY`;n}36J#?+3t=cBfg+z$Pq0AgadnVAHmU480x(^90)>vWNzj(EL!htP5O z_t4qdBUxM-W%Iw)uDzQk3nFmR#9%?{c?f%syVU9^(=tne^gPC%ndMBA8kavv9uPo!A?7M)B)!Qm|;?S3dZ z#j!Q>$a)nF+oTj&1?C$qfaIqv`^T^4>8R%4N1AzUTqB+Au>?IFS?)&OEia&iTbs;O z+c$gyw)$7`e_QK>({Fu#8MlAUySw+)Z0ziGC2hQu3%G9+#S`ow({Umv>yPmhajqib zTGl>~{+xwXSCupYV)tg}R)$*bJXKOLtQ!ea@4u0_oU_|SiF8NSBaN1H|CeGH2_*QsYOnOF-?+qMl%NE9xUA-o=v+f#9I1QT$ zZI~s-SP4eDicBu@3We%TwlrkJY066|9@KFVm-qGL>oHed8`oZnyN7ii-QSd=7614? zv1LZ+C&I!X^vEXI5<6nBLEl!0TN!Viwv`|WTE{SZt(ZT}cAJ07YFQ!iD7!8@Iak2{ z2tAY&3=Pjd=}B}-BBpj$*Aese3`spNZ*buiLufu9&S2Az15Hl55vhRFt;`eM@1IoW zu|jh2T8Wf`rr&cCetmJ`pFnf~5|P`1fDE^u^wE@7W2Jp;P1r1U>yu z5^bUTnVMs&1yArt#d=7c`vd0@6(0=ze%gwcR!kkqVa@;!eN2hKh=5;(vU7F|N%C-= z(gFcqO@V?hD#9>_=7jUO`b89#dKEUyX0Jo`eg(HDbv9*y8dj@=?e9I{*x)}_XQ4^; ziN*6<#0fXZB$GVVsiT~bjA1EbPD7muHas!(p}VL>d{6YmP`8H(7vaQym}5b*4|juY z0Zl+WR+pIUAd%AlaVNvL@LOW_R>{x3{Td%ydt_upG_3T1*TW8lEGJvAcInj<0bVpI zyDRBKN z=hjH(1Ixu_!?T`(o@bnsiu)oZD+g#(O1PAh1xS}XN3q*&^jz3mRd3p)%g5hsz4*{^ zazKxj^tIc#FAO=qAtSkZ16$>sYIlr#fK%M8@Q?$ae5p7&`{&8ztj^sUwyh$%;LxuxKa(=FX-D{@qgxr=0J>ad?w)-{%rwI$^YE6It zX5Q1C71M`eB0Qrn-uuoq%)YEkg1J(mzNF2}&h*>MFi*T_;>Uo?;4%M6>3w>MO0B0y z*g6Soycr);xSzA!%O`gBj@#f!hiYY5ycFd6T#>U1{>blF=hV5d_PZ~}5Oi9!c?E8d zdw!>zYsL+JS7j)q=>5xFUXmLDC{QV(MADm-Ml0{XL5Vmm8zq;#>OZh@2sJ*2vhQei z*E;lP*o#^;Sk>tll<{#f#-@NWdGVJCJ^BA^3ANaoZ3ms_C%(AgF-b@&6(r}Zt8sfq zz5Ab3-9x@6wUr@<+s9t4CSs=v4s}d|l#S0{JkOw+*pnOlC0098jrRPnX4%U2x6e6v z&J-BlIoWhMm!!8X61s;-sugJsZV;~o;pQ(DfT>5zD?+7p%T}M$wi#e~y10;%El&>4FdGVg?c3%jLg*VjkZDcwJ7A656-JB{ZO( zEEgo;(T@*TF2u>6eSv1{he|#d@cCFyE;HWOn^)aYjNR(Ng(g=``frnb;Zss?M3c7! z^Y5R(xpjpRfPX_*GnUs?Ml}{sm73-cWu|M!gZWL`uDs#ra@cWBP9_8Ojg$ZQE7sQ%^RTAMt zI#3l*4f!t}EX()M37m8QSiiU6X@?tHowP^L_HfsnIsUnZ?`ygTM9`gC;`gGZeATmB zY7`T~c=6tkoFE6S;AApIJIBlXSnP>XUXK`z6dEhH8t9AfgQjGJuHllzje7WC#zpCh zps~(0kR`jL^i~~hkp&Swc3l!JX zy4D)!uw;iJwTIu2tLkAU;tLGXBm9D^ z&CQ=D((eP2%>^ykQ1sAe1phTraL|Kjj^lXfpQ}tVL=LHzoV){M6nhR6*GP54Pf^$T zE!D$GrWQ^@XFsXV3cZH|xf>a&Gx9dis}}B`FNHcbc#I%voH5S3Qb%H0mZWB``*%`b z+^m^i@%*&;Mj1fzW{vnf6c_3y_Nn4e+L#g){OSYSY*^M4X^y?C&27F|eO>;I;U9Ab zA}(096zgz}5kCj^w0D0M(~IiNy&`t$sav@%S-Uy&^mxW4T0}QO@}fQBkY4SAc4ZqHZPRQuZRF$XLmoVSi^1at1AD{^MZXR_& zkAe^BOKG25E=hw{$AlD;%{7*q46Q0!N{oh}0RcE1r_C$x&`@N9%KyEzYBAfi^DgNI=nlRR%6f()wMTlR zP)55S`nXs!U!mE4Fnb5*R@9SuigY*Q zoRE_dx$+vz;at}hlC+X%563U5w|jBVOZ*sI$%%i$HCY_5?Vmj_K2m zAXLQO$Nu^L6vNeUX(g&q@eH){gD>LE%tj+cK%b^WfurK=(7>{PXR^%b7u3am|s1nvjnZxbfuwvMdgCuFijR{%#-N-CQAGkPDHC%B*9c#O z;Kxj6#;MTF7g(A%|A<&A{!WJgz6T7xn3XoYCH&k*M{t;z$uidCh739g8jo4=t}M%0 z!$19xmHuUHw4b!eYS`_pzQuKaU9=5C^6)C+{+Sv9vXwG1)Uhzv>iya%;T39tdnC}$ zZd(>ImRpv4C6_iE12$SCefN*5^Dp!==R11O_OFPsM@3Aoj0l{`pM z9rI}sJmW%<=%SW_8jY34lEqr-5OyP@QrRH0qCQPI` zO&TCF-l#g+P!dQPNC2r=O&fyweJ5mYa~P@3{^V zInmrEjJ7r1uDo-7fM4psTA|dlK;H!4<7d9P}7^yJOMgEkxDlTSmyEG14eYJua<9s%8H|lAxYWK4|9<#yL+M;438> zIxb@-Q??B322Qu3()_m(dgoFSCk>O=%yR2(n01Y!$L)9*2q4@%iZmzoU;*LO!#*e) zUVGZT<4$rHOEc~)UercSVDx4E7U^m6!M|PmhLE7r)v-lC&{3DR&q*-4p1K|5?wNyA z#>sTls``N4MVeRLX96(sKinN{;`lu>yJOt5@kq11Nm;5Q_ealy!2!IE_6mD(i+?xN zz>8?9^63Tt&x03cV3bIebTFF)P}X`(g6wf1-l62IZ7&F64u;K!>eXr7Y?Tz=^I(3ewx6;piw6R&64x?o6`b%Lnp zY;Mjx&mzLKF4DZ5cTJi6#)e^P!*X0g0#d&G_mtJOHPy4LQ-QgImnbJR2Mja(X*Mrx zHXkY#!W}cu6l8C8#-9rUdpcO~)%Ih1xF`I!FBV_)7Jmz;r+@LG91oNrt?)9P7tLQV zw_cxA5#3pp?>;>I+=^}AlhdxROt)EFa&q9r$UO60uEz&bSl0sjvrPDfyt$BO1?a2q zzGSF3Y{rXpLuo|3D2=F3qL)}@KlQC#B%j0mMKqJT1{Zi&r0=ss0!r)l^#%Oq4ZRFg zXKix`fsZ`+U|;Of{-ZTLUEi(B$ZeB}FtLw%AiQS+eLz8I-6SMgB17q@6w&9NswS!- zDQ=$VCf(ejrkN)I>fb2-RwmwlMbpD!$2TyXnlGy$C$usl*D#1H(6V_lakDB$4GNSg z^KG{7CV`c;9&fILBQh4L^+&JoDMGI08_h}~jTR`z=s-5H^Wo$crDr>3^yBX#8Lp24 z%JG!r^T_B-X96qXf$zm*sz&%bUFRb9Pz9naO6iK*uf4wd$C3uwK>aEm`JmM>TW%(> zZ$$SLTIH^2UhvUkRdZDlV)ZW1Mm6bD<7NRppcu1C$m^+idrdUK(r}!aJNC`<(JuN~ zbO@6{N=a=Tvp%@7M0PglG8CmngaAm3EeiN3%BeY3ezkD<5(C;&zkYGWNM|IEg|cd) z>wL+z9>)ELlt8r#ud8eVsbAyqNzVFLk0Ekf;-ZXtgli5GuX6PfK4l#FqiZ?9ThKZC zb^{DlEkbd^nquQlcx%1Z_48+G@Z_8ZO4As_ouT0nSG8Xm33*mfQ}y?5O)<0@FOlQj z!`^S8Gqp1#X9&)pG!K0bsn$5}m$iOz2gHpn*Vj+yF0d@}Gp^-8i(xS0> z`MUDz@-*%^G8&<$e=lDpI{7cTPri#UARqKeL`u*J0Qnqu_%upaN3Mj=e+Oew%Q7IC zLi#i8PTMweYLV@m%?#O?xeS!`y*;aTBA1z%s*{ufI?}kZ9=dCq69pswjvu~YNDu)y zGC4}5HNxT#s5oP_oDtGKih?Rr^;w(AVt#kkRxaWvNV5mEHBsVMPZoeEaU=&vT9hU^ zQVGJkZ`|`J`c{yDm+!~a(3iK3U*g>L8)w4=W=i%=Amd^%TbKo8ETi}yt@F(!P-m$> z%}&5sxpy&r!p#7~YiKpuZslyc}k9Zr=!jQ7vbsc{_IxZpjM zTiPSKXYajUM*er~IP-hv^#uEabU|f3CLMR)O_!xe2$R`DP-Uxl;pz2=(@X|TsXQ!> zhff^Hh<{JAAOW1TG*}?>)vG!4j6?*~cEKNpAS?POdfT5l`-P%E3DUUx^cX$j!j4fp zJD0JupXB~kFg5OP+k{3_=$Taze<2$>@xoTRjHZg=raKRC^edTOsxZB~{lUl}hg#pI zYsRegW)ibWq(PL%T`K)jGbsPYN>fj)9jz=_OUS3xwEgR13HgjNJ^TYS%jXss?|Av3NEqdSWb@`tv=Egv{8@jne)^-{LskO&6}_d8KuX_OyKLIC7a%OH z=FzpH+$b7$6dmjwoJNke@^*c2lec*DM{<@Ig`07Kq%(ZqeLIA3Un2(^cY zo7}Meq$trl17$$K{0i)_->ZStqlFu~a41IYmT0l1w@R z*LutNHV+?@!M0 zAsXJ~F8sGsos(?aT3+p;+nMl>ZMkI7qs+GEc9`Dv>uU?+@c0sP^Bap|T2hlV73p?= z?WIpBy}wV&-Jzr#$YELxbqE3nKC3hTos{&K*Z(bmy$pB6|+;WT|B zfZ=9omFFh=9Ij?;|a+3dk$4BA|I2XUx^v+2mROj>XnTVUkH1^0XC?fug- z{`Htn(t((Xg4J2?zYh`_m6{DhSb{2T z&Z6PedYhl3C;t2b5R)Qhj3Df%D#ilI8eQ#BjkkW+kA!vZs~vZb`&qb+pQbM^abL`_ zxW2*W=d3s1XD7JKE}rPoqcn<|pw)>o?`VJaES**=8c?MhJ)udVy#K{|?)T(t-tfS0 zfq{X{*R4mDH08fV5${C>8;FiPv#$<)F7 z0qZ;3V>s1)+<$hEIY=OGd$e^5I4!BkGl*oaleB4T;p^?&i@G2>wI$d1id29Rg-f#K zeux8$>Bo;vl{Q_y=I|KH(Oy!y^L@Jc7%Y1T9uIp_<0T^`Gi3v-VgzRYo}L>uH})w# zcvWn+%C?kR(#U^i>XnP>h1$5RU-qc{y~-!%2y+Xr=4G=X+CTd|?O!~zv@klha?MS# z`hNw(H~q)@uz1VKLUq$rL}wDZvX?)C;J3(2gWq2}2emlo<_P45p1ki#M+fAM z-b@P(Q=GNcN*f%&}}!9{OY;woZoVJt<>!mIL?{>@M_fSz9>sj zE1DhQ2h* zmDlT!1>Eo^>2}M&LWQt;>qqWlz!{Kgf9;>cP~#^-WKz9wQ!9Rsglu~We8hQ7*#Ai7 zc*mD(lECnznzB;a&vLl#zB7u-D;@n&or3?23Z?qtIcNL13OT97%9fSA5X1D4 z)MTdvekhXr+lTwnj%b?^lM%O|wUajN6^@yO@JeQxXnXg--7ZXBeS1<)&X-_vQP+cK z>#+IV=wT25C&Ecr{`(=XYptxU-41Ulv#Xjkf`O{>XX0-Fj%F&K#N){jo z1M(DdjhWvTI|sJN2*^H=?InIY)pk3GhChzGfJ3GsSw{o+J=2`<1#_e!j(&5BV&8KA z!D3$~dB$M!>l!nh0jOxQb|*_1muB5AJt^ulewN#QB!6zm(qv>2q?I+IHS9 zgd`_kTQRrUvEQUTANG3axz})#ia>@p45rKq+3aWL^ZLy?x5l{MC{?>ES4Ow5PL~{o zyo~Wl`0CA0$uo7eyEgk|CmY{}T7Ett_l_i=lG1yF)sLv1Uc*x7UoV3J6Gn?&8yQHY zfZ)G!rGYOC%)@tnf&ytlnnJYIg3d3UlAo5#qgTU31&44n#PS^-fR>Hd6E_&`hb-sSEcmnDzaY;!(ptM|(e zdO6wF2vl=YX#(;^fG%mn5P?yWq2^sG54Bwck%t$6 zzDSnn{N!$q4NvhT$GK`eBiBp|62edW3hnEv?r5Ju`Ft+R8Qx@-I&A^!4i8l@I~Z)w z1lN`18osSXCf(S@yw?x!59&{u)zK(sQja|AA1_HqM%d*fUIhpN?cbe_s%^z)wfBFX zL^NcbdGYFxw{aLSWbb`pj*(*cm2@I>odY|8*^*)AOVWCa)LV?gK5e!~ue9DTVBiS^Vnh_dr(tp=UU_+4}XY0}Wi@mj{cV zYI3aY4uf%z`qbU$+j)7CH5jDi01=uIKIn*Ar|Hz{c1yYw&pE&=;Aud1Z{?`; zHgmY-9U5yrp=!q-T+%$bezQ8g6`R-OMEM81N*TsLs%i%{D~ZWbtCStg6WOBle3>&z8i}in<>sA1!E2f zZHG&n*+7JykSU_e8nks|r9R%KFvt9M*5SsnTKwVCGk(jiB80Hp&=1jrY-|;tL|AF0 zByG0Byy^Cn{+GD@Ppm3p5GJ3eN-^{qfp5sJUe&54{T3%X7$dQ1dKD88N|QSqxZjCn zwl12VDqj1>C@Bskx~JL2Il!Js|K_LjFev60a|IdZ)v@*o=8?3H3G$TYmMa*MB1<9I zPTU>dZOSnXV{>Y^Nqk!rdh2+ZP%gRM;p4+mtD>ZIV1~$-FSjO7sW3_I|JM0Z$3c6z zZC;~{*5_N>n7=Qf4cY<50f7xM4;H`u1u$GD!B1|*{fA-TyqMh1WQ>|`_sAv6#nQ9uIHSH$F0e9=+)|0q2=Z44Mhz`VknQjzV zewz`?`95=Itrh#kT{3;6R&w+70xo60o&Fb+SKYZCW^|H)wHibnQYfPRglG%=98&vjlxK_!(zm^XZ%scxY{rh=$@5q z)so^&F!nH)yr}2<@6!4IeZvCLg3$C45Vgpa-FE)xBSKc6KQFv^pj&Y4{U{Xh*92zk zx*dd0?G^r}KE$E-=M6t?qH$4P+)}$gbx;h!qW9bPyWpdx2^+0T<`$8PhT&t=%5QB% zFVnt%&r-QBd&wdJyRq5Y_4<8UdReacdA@8}BrI{;&M-&_JixY7t0t0 zM!Ji2l&#f-G%^?Yuzs|BqI%67>K2&^-ICr5KM7CxLmyD!xREvteIVl?ASwxH7{bL> zA$P0CIIYhxq~+%L2?4&h>U8tQK6P!gkD8m@(u+LfIJ$qDZPjv6F&?=wWG*&tMf$s$ zsM<(zCTT9uzP>(;15~Q9dM7=oH>{_SU1761M`6Fw8scPnTuqp0+MgTf%V0LWcK(&q z2v$$AhiwLTwqz&X{6FJ}FCQtk#UBqnBP9^uo&$hut}nLYA72D-^yea>HJQWp1A>*Z zNP19tYR1+0Gfsi_T7wy>H9^7+IWdi~1JVhm;P~57v8%5LlRG*l@9X2iX(@{t%qJOP zL!A~MJ}1V?XVB#`V#@dN|14pHh+y5))JlGeh_)Z=TX|A9@Gxg{J=q`!!ZBTzkNHug9UvwsYgo)}h1X?PH|e0Iv-lJL8`z8F%G>raqF49{4_!Hz=SN|D}7 z#rcZ;eaGH%d5c}d|JH-CU%jI3X8q|=MePMNhsLSk(O$BJZ!#Uqm!?=nuQ~?r@LH3M z^E+rFSWHTl3Wal9aO(fBbdAc`Z&_cGg?KZUt1ByJ%~V-`O0^Q=R@RVT&L zj{?8oTk)`WaW^0q;9@VQfrN$@zove~_qwt~Zvqb-#;!!&G%Qt=OG zFBe#%Ivr?f6$Qr9W?6k~9Y|Dy1$-+zoCF5lSE?B6KE%t>nPh}s?gydk723%tsz>gv zmS@D~p}R{iwh@Kz-to@~{Hhs6##LQv=or3@KBAeslwm(|B@!-`m>TtAKh;85G&xml z0ZMxBpDrYM2!}y~k$0!!LOg`>*zJ~jUZq8FSs;TQYY($w`pJqHj`0;xkI;f>UZM5& zFsALmC818$rX7wK35oS!k@#WyGlYYBi3C84CLUSbjDp!GE3HG zIGB(H>6B&=@%tOfyeXuY8JEju)95LfCskedx$#e<9xwrVY>i!R0yOaFTMiX`pUlR| zq-5yco0P|BomSW1=e=t8fSxUmXfU@^_28ObcBApeMmT6;-7K>>);VbEN0$Z5i^tB5 zf7*r&Fim=hUj@?68Qd$tJmVUst2(7|_KKvDazq~;%f+N)WlNC)`O0sN&SqCdiF-f|}{I$pzrQ6miiTw*BOVbD^td5IpXx5kG;oR*q45RK}WayR6h_00)2R7E?L}%O|2)}{@DCX5XE}rKjSWJQr)%u= zG6Y#NNTvKm@itm59g0bNkgJ4oH})at^$^h+(ht`CMlZ$xjw@*Y{1Sw-kB}HB|E`3L zL(Q9@E~ErwoOnSmg{4Ln{tG+<(*6Bt>%+2?Q1H3Xh_pOu>kR9YZtN>6zSqmur{R9hQ!!BCjb3AbUL=|l9Xl= zx78kZKsv=w7Npi@P4@rzdh4hr{O=E#9N`F+8Xydm5|A!|F+fF81QkWPn@OX@s7Z)) zH$wzby1N8v*rZ_yNDb+RG2)qiKYru+J?HsvJBM?|`@V0!?(2PT^#aAP9=Y5}wj<WzJ@CGq1)$~P{> z)VO9-;U=op0&sf0VD-9A*Lgtql=za=*H#4ih3=t#H(S)TzK7%*le}~}G~Z`9D>Tc_ zs3jS=?bBrBgo!-&@ZU_-B|4D(Z@Q-=I?E-&jb$4*x~IwqhS6;F2xlQb=G)SXb=N3X$_IyE75^YwG{>-! zj~ba*)n|3M?qYo4qSEIG@~!q$eysI*?$g5z3t-rwzj~FmqVaW)px!F4gC;)SR{UK# z*JVZ5SrSrYt&Z_o0TlIhP1JPM} zmQckiiiF=d2k&zN{ZLWV5%5aL#soh|Fa*|SqvG9dCjC;;MS!ZE6j}sr zz-?8}2<7q%C}g5Xj0KA4jAuHAKL76ft_A3DjFU3QkZ9Jtt>^vtz=MPu4&Gk6@#%`C zlFFG)H!1*EgUraDAQ(kCjN{%~wjyYob!YFoF<|oz`)^I*pxg1;+eqfy($BWcZmd?g zT}m`&$aA~T`^-4%T+R=W(3n82FqG8xlLCwiEWmYpJXv|C^_p>LV&d>}ca5d5_`Z(K zgV0W>P@nPQJquf`t(%jb>T77FuAdKFRGgr=FZiuahP|AxdMXQQ)ze zFu0>{)7m$qp623fB`#S#5e>2p3HAq+&nqI#GPJ0jY4fH{g_Rz-H+nJ+NSli4!is7QPjNr_=RIF+!mQ|;^ zhOn%H(+IDay#zL@7@#-S+0|=0v1;)dSgh-=^CQ{u?J=>_pOj-c778t)YlJihTKtho z3>EO}q92L#4pfTsp8F!4uom+#8vAcpc8ZwT7u-RoeyrTzykP=KRk0*v>9}@j=UA*2 z{Cj70z&&s)Qeewo&vuGZ?HT@G>`20tUMKgcNB{VvJ0?aX29pD$?HFZ)fj!OpW6qT8 z!Jp6B=X-zUWa~x~{tW3ta!M39yE=y|-qr^ilnpo*JCMOSS7`=GuM2H%vypM)z zB91j^Hv0zYTYj|T;`buxB5TMi6&U?6ej&I-LJSnEwky^bM+7)zqJgiW*qZ>8r zF-0{mJDo|ETU2k|bj^M+=xCrS`wSfU1NiOe>FyNkJj=czXNoFKZgd#g>DGP6DCeV~JZNKcBS*(!s#dC)te0R% zYrdm3j!x^`oWIAxeJLYP-OcuGj#$N5M@2BH%r6airafv@S1W-Na%ufj9Bt9jKA7Yt2H5i?1*yl z;!+cJOi|TAfC&Th1=X_r{kpI?h`~+|Y}KMWyln==745;44##sALrqQLc zUfQ$nJfG6^*GnrojYrM+($6?VI zm!Ikv$zAa*W;(JuG67Ie(ji`-+WKEdz6Vz{E_4KZ<#n6>aa+wxp<_p2bIW1nHT{;n zWS*aYe~7ZBqd`Vb!k0o$x}Ac<881^r+p{%48e`CIXJEnoxOF#%sbio^mvY~*mQ~wi z!3WyflCi|XIeO8XxBj~mqFv|h$&<3yo7l24M(zXUXOS&w1$(*+9iF_` z-t%Y;&TTW(lh*v-yZU=+HvA+Io0CVKna+1}$kzwmeS!Yx^z+ne#g&gL;zY1G%N0EnYz~X%i)l})`}GdY3~qYI}@tqI}%LO+GZ<}F!ig9zt<%HMpy9M zr6%~)NUOj5GX4=d`Z3jXhFD2YzW3Gn@vWA%(;2n%*;n#|#i4;k*@lL9$0T|wvDk_# z4y@&DU7z3lu46+Z%9lg>SeU$GG}^Q@R+>y*FI%^;Ba2xW;ieJVLZKh|J*jhN)_7CRpA2 z-@!dv_9W)&Zn8`Vz|>dC^EGn4c$GC3dsX1Xj*O^a*%M3IakKOXJe?^4UIf%TTu{_} zukFD(g#UTuLGWu_H1u(aW|YP z_8&M38p4A_+J-2)rPltD+rOkoNMLUUQ1RTW0CTt8Y@zR9jF1F8D0y2wf6oF4fe^!~ za?gyuNa098`rm9VAw{FIr$w4+4O_EV+`+!O!>D?Ch9l3$&7CT)7iEv)eHma~X{YcP zzCjl{=*Q7Tc1xOqTBmhE4MwuVx0(7$OCSdP&t@}qb{TASB%jo(K2{l|Br_TRS(xlHeUAgT>RAJ zb0S}Xqjh_3ea=;o)=|+^N&56g5#chsl_=nQlj*NX=Su_(qy1db9z31%pT-=?X01Ll zLvz=+oNQ>zHCs_0mfbHL)IQvGMXNK`48CfW$r(TkPQG$4vUK_~`FZ5a+xo|OfrG;n z!b^uiEtnCbb%RGALA!#?H&gVd&R-0vam-@A*Mhv{HTig^iv3UIxy~ghAQoB`$h%y+ z4@rC|SHG9%*CrhK5+?qoG42q%s)ZI9=c#x+7G2U&#g%%JR8q!|_x|iOf$8gdH7;cpJESnn-6&-jC*Va_Z%tf zvVfcJWDm`|WJ)|50hKw|1h5+3V{XHTG6fpKYr#kf7t7Kw9Gw*J__*Xf62~O&>qnj0 zL_liVz>QryTsNy5j~Z%Qs;8C_CPO$AY>?(GVyjw?FZEkQLB<6oJl-{2VF0rR7e+xo z7ZDsMoU%iCSW~ROnO-UM!;Wh58n_|qJR@lEJfLxOr|CUix;X0S8?ZTrZrG5^5mDN< zTZpt3?)>~39becP0#6Cpf7DHqcG~*t^$Mu_WhtaOfWP#_HG1~S>PC7kvSZa)#g|m4 zi*9X#5Ak(Bg=ptE)m^P~%8Fbg%EK2CW8E@yVv2xQl*IhbK>sn72+HkY9p9lo<|Q@t z{s2dz6&c}={jSd~QV*2L;r!mRvESc6F2B~)^NJx3SY4D`j@>O(pjZ!>g5i zE0yY?Xl3Ry-vRBWK}Yq**4`R602L||s>bnp`a9@aLOA(F=7Zj6tkrFWUDp-7+rJM%=$8MK^-I_Gw1>+2%8)P|iVTzx)(>xyLk^ z7rr#r^2%kfD3ZOh{d?Gsqv`Q`5FZ-$%Ly@~fasddiBgE>0V5TTEnOtaR$`yTLfQ0C zf;FrQ8uK49Nl__;wH<%$mtnhP1w1v_ZU71>{jf|s)&PYqPl; zT9QCb{07rvSr}##)_a}PG&8Q36nKwNs_=*S)1@A&kv8*j&`iP;1X@xK^!Yd7vZU4q z({mV6CD44xBaafx?dYXb`o6RuGD=55RJg{4%6sVe;bjUPVAw8ceR7Xz{52By;GuOg z?c=${KB~x56DNL)09&>Y6jVo9F?-_FJMh^;uT5G>ZkMV#*BWN%9e)F(-hYCi>XkWF zmlAfpV#d$u@5PInIuG=gXq~=xtNat}^at0Y=4NdYG*w!DaN&`i}h%j#ls!pBCTeB*g>tVR3AY0OJ zE7OJ=%n2! zIqwc;Z6$a^8pI7sASIS?mRi|WnV{NaQ+Oz*s*)THItu7Yz_#1{+kRLB zp;MpQrt6HAt+5k*Yv{vJ_6wA4971f-cBP)wA#g)|AJ!)YbGh>rQ&^&h#|Pd{v+8Po9-uYa>?XpGZ zUKqaD6>-%44QU8=nLo27yYq0q6EC{e<)FDn2HMh$!JhF*4`#=X2iOe`JUyx!6{9fX ze3X{0ll?kdvd{G_@8Giwq3AzU^FInG^j01UpH<^}?)Y10HoL}GOde@T!FQUh1Qzg= zkdTo+tLy~@yjF?l=f9nTe8GT#l?;)^(J!BiE9@ubIIrN)U3D{4<#wB62ifOkTIEkB ztkM)!Q#8HmPU7I?yMFHOPo6thP@K<@4+JYHK^V43Ggf+qw*x;gc`#J3Y+)o&trVmk zmy{n^;eV0B8rr{~$Vq*U5tLUv7QbIsE-1p+H%nRR+?`Z?%fb?}tyLI)Zg9niQ>_;{ zduEE6lE|RUAmP!SIef4#`&g!XS=67Ls#>Zy7`lud{-iYs)=E92WCXI}q#=HnR#{Hp zZMZO?qXO;%OwX@Y!!ep#)k^Uew)i!8)Yp4b*zB$A!j6rxfu0^hoX~|qbS^!@&UZGs zg}9uEq5Q6S+62l`g2lS?GzG2QjTc4P&yjTF%`0qv?2MnvDEmfZtUM(;?A<`!r_yT8 zggM*^Ws!K4c^94qW*4g;m1Ijg8{=HNPh1-7+}oE7T@ zXVR3=%}C5VNT$O!OK3;nyfW&eb*fwP_)2bG6vX{8``0-Y>0qM`E5y9{!JxQsa(K{caLGsEs~JpUIns zb5G8=|FoD}eKe8R$u&GnUxge8-}*JY_Lv`wkXZvw#%-4xhxapZSdVwB)Oj$0c7^OhUQ)U=Na&n9m zK!`1V-POq!^1hm|-yqT_ko(HQ2JfYPp~7!X6eaj`sGp|U0p#x;OBAK^$az}60r>(qrmZY**l-kf?ymPU+5C_`vx zqKVh6RTPT*H3K*TS)%uqJC#i3t1HiPjc4{mIRtIfTq|*12OdHcIe~4^J+`pZiP8b_ z)w`kN6`-T{Z8DjT%c{OYIrY%WZ%7&N9**4;5Xuqs=7%m$zq^Fib1y(3tpF#{bX`^i zG15!Htt9LqL4Ov8$wQH&57FA8n1SdYGKEq0T1*&e{$qOatnrwZtM#0A!G-C+#G+-4 zDl+*>#1^z4qb2E7QOl3Q?b|$!WJFZYPnY1Xj`>d)s!Z@L&vGN4ol*orK=kZPyV8JR z8m=j0E{tMDS>OJ-E%mm(;-|i6cM(YYoYXXi$!|4Dk2G!d3Z?CfI=DxLc=${x3{b01G5=DF#@5*_y4Dx8 z(0W29yv(zFv@AJwaADCckP#eGpx~UPck&gS>BdASe?{H3XsxX0l|}Yzoad9BoN3o3 zZkJKO(%K`otB9D>gfH!IO|Q0~RIEKQpUV2EZ*3b?%<+=yG}PimyX!W$?>bzvTV%G*u1JWs-k$QIb$9RALyxRFs-|q^@Kp;Zt7jE)IojeMW;J57Ir)T@ z$QaX^aUD#Y4|jDt<%%MFBrNcm4D@^W=zZe$3)hVjWmw42=`}DO3y&ykeP{0EY5@zlzCHCLl^M3Qr6r zTMl?k!oQwzapQ`^%h^XI!EYJ|dh2`#bUqVuam>~2qqTFG(zSvAf`F$oTcMBIl=ih# z9qpD3U-k_gp>5VBRIib;K}?*I?b}=5%6NkpN$nkM>DqltB_HoUDBsFP`y>~yrn5lH zC`Kuwk6hhfxsvKtXw{aLKq12PV-)Vvo18h))wcL=a}S4fVh2P7U@BbrEUF?RPkmN4 zAfW7A&iL}u8RD?wNe0a0u9bL4vr+li=w(<|XR$s@lsPS>x$f6J#FDnLVE9l=kk`rj z{Aw+0f+dhi?0%93a338~FYZ!1J*pVtGlVNw=oQY#-yea%{Op}M?y2&;qE!|+dXIhN zaQ>1uQm|c`^lj&I7#Sz$n=nKk{3ptU4l|pM9t&e3s}MV^Q27WE{dV%hAOm#^L1T~F zIowa+x-iE|kb2JanaNXw#NH$A=%dL@Cit2 z5^N?Ud}AT(#Y?N(Qk5E)9w$y}T&hvd)*O5{{)0r=u4S?mFK}j70f}8sI6B$uiZlO| zk*0ejH6u8YSR%Hj@}P5X#WmEo1e&QTTx{$n@Y@aZksZ&h;gC6)o^(l?|8#ouK+5KE!&!wqG)S-pG-f zVoX{3+UZWvquvCU)0@J=MX+aO9)nrth6Hln+54!3BX4mBq0sn%VM64K!au|;QwtS; z$fe1QyK!zgx9b#2EL+?y?hQ;)MrXX38@kmkQR^dK>BNsc=LH=deg7Dyb(mbB-?ube z?(ukZP2mxnOws8^d>Dwo#w1xx-8th^ttP+e>~-wtvpf^-a#YZOq=)WC%P-r(Ew>X` z-<~RYMJ0TxOmv~n^bjI`5qwUj>-1waxtQ{de*OTK=)s{2ud@{f-(qp5rP=yxpWno9}S)CWYKVUE+OlLX4iu&RMQ)f4lKL+d=AMqetFdh^=yV zchMWAu)g^Xy7|D;40oTRp5^ZCnQyM4_^yQs#xF^uu|Jc;A54kmJ8i zKpX9^+>RTQNtra(Qu~R z6Qi$``;;?!KCHuT-9d8R?kT{-%v^N;){0YT|4`w`#JSJD>*KW&g0QA)A1ncx2Je6A ze$t6oJwii%WzxTf`G5!~n?K?XiTbcnIUqZcy4VGNmfH#bm4JRhMbooMweryB=U+Bmu~%B^bO=Jd>n`7h%^JwV%-3H;oJQE65+Q4LJ{*k|&AC$kRt! zQ&+!P}~pF9Jj=>s<)sZWX|gDYHAYORZLOo0?Ikw8=ycE z7Pv|<6Xte=6W^M_I7FhZU+`OC(V~miDtsyps#WEYU!ys1Z$s1;*1m3hGw}2q3}Gv2 zia`^jA8OgZ)Hn7!E$s{+R~u7oACy0-8LcvPeSY&V-uSn@B*lZaQGWK-lXAX&Cl9@f zwxbsowmIQhJk`w5XS=q^%U*mt|lhRR&(kJy_CFaW*#V z!$3AgU>LuHQItT>g$;v?9}B&;i9KjVC{&uk%t`6p8416&e_E$cte8pKxA$FB{iw;_ z`AVt5_BZ`wGgjbm1FuA}Qk)N9_Lt>xOOFxMYVFi@scCMl4l;@&BZ?6Yg@M|;8 zTOn`Yrz|I=l9z5;K5T7+%yBiyJe0~eg%iZ1P)vA`^HWiNz^gv2GAq&PR_{OEm;dC; zU0kgRei2r{)FBoh3#~|LaCH$E3kx5%C>uV#n+o= zrx!qmeOA7j@E{`CQ6!GtiI%`=LuLigq6O5Xq4WpCkV~B6T&<5&GqC{vmgTr?NEV@q zG?Lrbf=UiYWl6Flv4dv#>>thdFFcc#}FcKuXu-&$OF zi!H+mv*-lF0M9oXHC!f@7-Gk}Z-nF*5`<*a{bH7#KjE5$QA8n7)-|>X*0_%r4mpwD98ov z-I#&F&%|u(t&V6hkU9?oeh%Q$a+8nHOxFT z4UMHD&6EU;y-|o=|HN+~sm-!*H{F~r=|~q)Nl`}v=o?;l&=yM9>T&hT((;uXk~C_z zwa*Cg?H?Q5tKG?Lb$vSyood>?(6Tmgyw}{6lA-p{%&hO@qfM6ht<}}}J)6{m1v#B! zcZT3kW+CgDdpe6zeG|%=bo&>&+bM73Ip2&-h2yTQ$U-;>B7?t`n>mkCgH^gPlP;UBwpS`z>8cqr z9&!~atK|aEUy#*3bETKX&5Tpz#)vhe7=NqBh-3P7#MKvr`z_J4 zE!WDVk&SuDqfd0h0%e>vIA{AlLu@$3Y z-HBETegh?bVZ+peUB+}XO-Xw@$}N7iE}3-cH6Q}RuL7J#`iv9$%}UJ1`Y7IXVupP6 z(qvt#drMz?DZ6y?y`RwT} z7!HXN{IDVPid2)$92ZQzwP9H|~FI-0BzU^tOt6o1w^NIiTF^C8gv0wq%6} zH=XJYIiwJ|x>`lTAFYeA@R3>Vf%DwV{43VL| z#~-%-um=y0`=kjC(GYSaHtRLBwcU&35HEn$&9b67hlR*{nRGuTz0tfCNn9uW!NDqr z{)6QrV131R>jnge9bx#wDFAVV;DuIe0>z!q;|wMny7y?(YTlQc!jj9QLXbVhjZ`@O zcIhYq6rBX1XJV5kbfZutQ_!QiEF5Q;Ju!5cbZLJx0%@M@=rfEU z{%WpLrQj<2^Doq&T90ad49)NuK?h9CkD9hcW5i?iiFcLbBSza&gDsD2#xviqD!%ru z#i@mI4m~&b(STnOZ%%AB)Ue(t)DV!i#p=srXf*!V`fc6-*=-vU(WN{MSS`k zltfpd+sUk}|DXe`pWDf3%WlnE>607r69un;PbpR&(M`S24#$5THCpc4H?q0T-$TIJ z%*SaElk9*SG~wlz*=bw=w}B)9oB^;bYT}r8Q*z~5cg;^yfDXu!;`=FSpOI@=n4d_O zakV$oDv9&?FnH~~xe{3sfJb-g+-uejr8=vo*Ib0ilP+nGwhazX%Qb)E0q9thtKQ@n zZF#n!Uegr5x3<*__aMjau^Io|ZAwQu7pLn2iZ}usAc>FffwrDMRH>BV8f)r~=zh_0 zc$I!jWbB-xEd>5jmp_&7bFitLgRE9jVBWetj}4I|M>%{~ASHvsJ!r^*{^rE;o@9uY zQ5c45dvEWJTy~3Haf=Iz0H>f1;n1%WLfj=6&>i@OVEh@~32nT^xly{dqh)){Pxj3+ zl3YTkZX>ewYjR9e){SGChVb zW5-SvjCZU&Pu_KvjC-;~4JTq|zs{YuTFTg53+*buKG~Zrgl!Sj-Z%>$$4t?arjEW; zGo1j*clpm*p&?qcr4b)TBaWm1-#j<+bP4%RBhy&maW>7C$Z7O_LRU%jg1(lr!A|)T z=ZjLzVOw{D&G2`lXMz(0OvR0#*b3tGM*1h`J`gp(csg?$lGC!!cnxg4RYylcDt=a} z&J+?tpw*+aBP?p4M4R%xDym&V%Oo3h!boFNihgg8@H(m14B7+{{^3aQ8_{(~r5Aaa&SMIKG?O(cGsmQH{ zS5IxG#bS{I$RAt)9~_r%qO&_^L;;?~O+&mGVrk;88Y;MI9CS>)E6Lxp(xRR}M`3)5 zOrDJ-j4eWN_`h6^AeJ~pjYgjmtqO;7zLGGGbY3FNfPLm5LBdJ}2YwqPB^Rt$aWMnI z`qMvlzxo_qQ(mg;Q7)+<<6pZ4Bx%0#^u}{t>qTXw=MA>Xnn+Jnmm;QLGTJZg>kYkJ(M17t4c_ z?6im8(7zJ2_;?{C<1~d;BC#TKx;v`VStj_nsNyyF)(3$j)kw(p(#}#{N7%l~WQjtnT4#9zM20xXwy_9FC&;`cg_O3Jce7?p&U+nlr2NrD%-B~G537iWX0 z6RTd6z1P`x4j{?$=4v%xV**9KqMqVOC-}9)Qa{{W(u=yh+2=OHl72^qS;KD{=PtW| zrNYsvtaD~Slgsj;^J6#ofkl9X_xaqLjO>TmANMu^@%lt8lb#Ot@xXw2PnWcHmT*bI zBtpoyI*V>2bes4&lCXQ^^l-C1ED&3Dxns_0ngY=+ulgYIh*e|t*&l*5p@!3v3LyP1 z?#m@#w>q;rjGl4ZzFW`QhiOg~8@=}*bn7M$`g>p=(8(#6XZ9h)$jln#RC$!%@2$_m zR-^jj0<^onWfUs7Q+VpRn1pB)v#~ii%tTeE%v??f$y)lOR)e`Nlvb#0@&q}9d}R2m z*`AodK&Nove0mn5*}w1=aF^-h-RfEr_rih;7EeHIexvfRA({eTQKkmIFkX2cx|OCo z2>pRVsuMzZsc(sD=2^^e96AAgDSFf|^zS~*NuCNLrBk*0vsGTB-U~+?dC4iCVfb_DtQfv(&vi2>k9^q{om z${IHc*k26%!$tW`N8=_T{RwLeBfJj?yH^k)(Y0t`t*VHWG>3-!0t3bGuWqGv?#59`dIv)%-eS!Buo*YZIQic2tZY`B(N(^# zz97a};fpa9=149}HOEV->v!8BkG!lr@HOzIrhl#GLiP_XM~A*SfwfPn_Rxf6I{z{{ zfHnT^H7BJ>nKfho+p1Q&u3J=m1T~zlskq1WgYp-D=-F=L6m=Mkp86%K)j(<)Rk!qk zwtRWhu#YO$C6WQfCN8z0Gn2+ufR$U0a5aI(9{|<2NK(XVYYCkiv$F;?Nwn_oq=pT1 zq13(XMzah2kSf;g$7OM#fNlErTX&OOO^ivw_80L5=F%uBR`*(iC{1+~vbt@o}Me>ZpV=xeG{c(LIjK2nn&AQGBU>Ej3B--^cPPJfKObzOd$UxBD zcj0KLyPefU##U&pHA<1(Z%t*v^40>mdRq`EpmS6vjDP~!c_wf2OVaAxqxFrfqE6M@ z4pXRp*I6;Z6Z7-j#pv}*(-d2FqW6gMv4O4Vva|DMM(zP>1;~{tr_Hr`X-I>(G$dr= zNKnhCU!5HA{l@URkJ-fmZ7S{KlpZF284sO15Cw{&Vn=Q3G(T4eBwsH~Lk}IlAbNkm z*^$6G;ntp*W?7MZk4o7n;#LPa5auV!Brnz7E28_IdT^s}lHIQ9o?On}~m;J?$zn zA7%lsbv?3pR`!E!{^n47&TRmHR6rzxp8{HRAI=Y*rJ5gMnl9vxq zLCwRF52qj%x~?{T9b2z6?MK`{7yj7_qM%s{919woYsaXd%%F}89VQ|=gNBs4;H#7MsbULYU~=w`8SY|*K`0_^^i!_+&`lNGL^LmX z|3aR9L!ecXW^bfNkZqeBcIZ}x!tBgIG6=<)>UVBr`uNFCNtydy_r9qz+gs*j^3uRL zk<_^?_rVEBMat}!oV=&F@nY%Ro!9r7Z!QmA9LqW7S|nXqf84S?TdVFxAJo!62B0Pc3AKm@T$vG~&XjPa#%aNLGq*qWV%RVT6%UNqS|H1ye) zk*D_|xsdQgXX|nQ6}VWdLnnAmzB(pauch!rHoI9WjD_J*#oeKsE{RdD8)sjh=@{$@ z>2$bEbI?aU6ez%!eO_i2mNFjw&0~jZQ1RR7JxgExH_7i$8<;hf$S*6ltuX;f%0&RK zqYaoo7)t#fvO*cZZGUvyD@SU6s_tpeg2=j{oqwYbl~FBFo(&iyl>2>9Yh}q-TRQUXe z&1YY2k>C0w#TR|>PH%wXS;#+G;a^{HEtLq7F&?Bi7xoN5dvpm?!VTwIT(Ee_zY^%y zRXYT89lfdJKSQBQKm6R#6xo+!q-&~FG&||I(nU4!9kjE^?Qc987 z2xVc3y|N9vq3<@5*CiJnE36JY$yP(47lC6TU&?-nvmPk_@XF2%*n7H6+Kf+fIxj2W zT;J_y9WI1NrN%e?LihtyKTTTMGRpPweK;e$vA+d^@1*`gXNPdl`>fuqJVW zzV_FKShb6%#sNEqJLIvjTF%^X5r3cdc4nDoIUb6u#^4abu*dTOF2Yce<_WX3hx+k$ zs{cuS?|?;!0k!K*x`bEMGgKqUGxHUMtDoRU;GF22B+F0P1W2l_j3!+|X_|nUY6U^A zf?G~Xy!owveAY_67D3HsF1ouuuCL@!>Ua|Vw!b+7*it;;;%k^Js3n}-;dXt+v-`yA z1h^QXteB=W>$pX75X9tf;}=v2F9V+vc6|E%5{x#cX60NZY^e;dhe|dl&0d>VC^R0^ zF|%-hrT9|s$oZqLN=Eu!vw-*!crJx?C1foqaP^V|6>0P(6-n_Qmr^OqGKu>SkQ1j> zF0?m24ZOM@gD{_R2A7)+0y&k4N>BW_91GmMx&65BEE}sV4`20kG$8K1&2c2e7O8%+ z^V+3JI5Of0JQg!bOh1W!1B`sz?Eg}UrQDTj(hQltq3 zXzq`>6pnc{5w@&Kv;2a!rvPxp7BEIlz|`g8h95SX)v7Emmr=C zcC}=okfJ?GzLuP#n&_U>@+{11zd#Y;1^4)llxiXQTjWm9H;50{_aq z>HVqis|LQ^nO%U!q>fT3HZ`JW;Y6GPWlkRNDfo?d!^fOcA@U-~;ZdGBGUmx7*r5v> zuVnp&D)mF(my##d|IC9~1fv;YxV%7-<0eBTEZ9+mS~ozYSUrQo2lru6+Q&TJ@>Dz4 zm%Tzi?0U;ot4z>-1@rZ0=l8QV+5F#osZw7>CQ1Xke!f2ip*hg?=rMYwN$<;Mir+LP zWddTuvnYqGWwmdj(?^W{mXm+H?%oxaP;PIL7aCKNj!8vEUyu6ZxYGP)sjXU%@>IEe zWgRzCT%ON9*V5$w4e!l9ResjI1>s;_n|g_P$G_l>x6Gb6ZayXKW)U995+Dr`QN}1& zo^ch-7QEaY$YP9vlgu|Kqv%xJ_2>(d_RcJ~Bd$WsMfvRnWr`g03`V^uM?haj=7}Z- zS4<=q@fSpxmLP`&{&Lr%bb@VcT|kYY&BhS&&$9j;wEFlJt8*d<-Eg za_iPhhA$)a$hIkZ1mOuYpsZH&H|x6q`XE4}wRw!~ zw24LZ{!e}kCA)nippx+RY#OAY_t%|+Gg?#f&HPK`zAepYT~G~GYFem~WhsG|blmdZ zZn(zqPJZXFY+XovK+C_t=XdBHMcjt<`8XDt#hX}Oi2Oxu<8T(G+&rym>|O8S$dzSb z(V9@5zE<{t_BT`ZKM|Ms@=7!rgXBhZh(_E$Pn`mKDcu6_WT9}F)>J;nz?dvTR0o`;yc}w zp67MI$h=AS{5b=V)@J(h$NvuXzjII8yHMp}TK8H>-GBb0{K(c~8!P4R;{x2+0$>o! zBIx$ZIop@|=_T%S?)ZUw^$HC764bJrEeq5&isHwOo4v`F;6zy6ZqEKn-2^aTaaX!=DGNy-L*X zvk$jT=MoP2En2RfK+7x!$8oQzJRf?2RloN;4vxwA;Yc@TY11@j8JJzigJI{}hr~|| zdD05xu-Uha3)anxbFLW}{XNjXfB2!l($2Ra_L1Yr2Da3?e{)osi1n*_N}{bNHS$kc zSsCSaUPCuH?-E8W)x0ikla@JAS)`*`D%4jbOm5+|c!>fF`FW3Eh9;6}%OJvN`*U4E z4cJUps_V6<#lcGoTP3Qqt55$b(0}nH|Is#NC)u%P24RGgR+RB`l=wwd+6$LR1LdB> z5!T}L$v&<`&9}0Nlh3P63F}<4dW#)?^cjtboxz zBjqmkg?29*r0@TIl>hhl6zOf^OR%ormIH}b8hBbE>#H>$o_ITm4zjaA3+B$_w`UC- z4>oT4qr&%qsp|)P z|67IqO`o~ZL}WtfiSMs1 z*&ug{?L+Qa=CALrU*7xS=u(BcSHs@JUC8-#N;wFOs^=$Ccq*F;h7t1ltxvQeb})0* z5MZ+S{ekEIKkt4C^_05Fv;My3557Ji*GBGRB~Bk5=ZzzE+lxEfb>btgi~?Qh#5?xK zWkm>*v}ZiOL>3~ydQ6S)=?;{fV_5KH7cOzne@-+3v9oPmoh#3STMYhRuHrvmk*Ze+ z;?>1Jk236&y>i&_iLEuMK%$O1zLRJr=i79np9A?#oz$WAf-@9YDfi{jE-L2L9%S(% zE?Yj`QOfvxOJ9}o_)=d_Cfz4zG^I^WI3TkCoALe5^Z!>_^50)!cTJuE9SZ&*_TDn6 zu4d~NCIkx(!QFzpy9Jlv?rs5syF+ky5AKrS?iwVxyA#}P!(GWE=Q-!Q@A>=PTXp}i zSJkdfub$m=_MBsk?j~ZOHbQ;U8Pmua-Icia$orU$!6>TIrCRe1UoEK}?5nH~CHu2O zArjGSwBOy#qv8!2-oD&BuSfx_f`egm7M@x!`~z@#u6l@!BSIA>BD zaM*uDDsy%4$ZwO5I8i^gKP4p5ZJB$)XD0^`2IOGm*Usci>a>Ok*OS&9e?b5QVd;GG z?Vexj*HHD}A577Oe z-Tn3aqa9{mN(Sz8QW}0o(wP2k53$KAc+MdCqAcaLHEjqE&kMudgv)Uwvq^dx|2>;e zy}{X}f8`*?=#6b5x(Jm|^7P_mwkkg6%%g0jCm;Hz`ZR?+66N<*(RW^_Ble1!f5n2o zg*bmd2tE>jA}{RzLeRlK{z9fV>>T(SB>Ogo;hOD%G(%tsIPakz6&7pW7zAf$5_(+r zo~ItvDd+wDud@JTx3#FvrEUX=anrqcA@L!uKVt+x0B06uVE!yQh^eR*WuH)eUvmFK zJMli~KRf-mPyMD0T^LE>j*b=kk1&FeUVonb+_9iBe7BUdJASnv2vx+D{r2r)O<29X zwK(ai4}VU^7plkAx0|O?4{n7!O^VKX#R*Jj!~CZ2jenoW@sBb6w`GV3B8^E7>Xx3? zq5Zk+Ub{de>YR{H1KEBi5UNL*PtCJm@2lLW>>_h>^Km9;m%1Gw*$gbxffn2%PA`+Y zf2`UQ(#f$efAitviznCE#ebaD_|KNi1t2~q?m?%Zz65}tY_v;;!4ChT5I<_6W+?$k zLNskl;;+rd|8ll}e_n+2d&dapy_4~(-M-y<0mEmKPy3DA;0$0sto@@kUOA={jP9Yp$9FaAph z|KC^tw^*PS(eJ}KBSYKOHMyRTjye_nT>Zbom!k-HIGw7i`*9tQ*D}gpB^L*VpPKHy z`=~|@tQFg=u7&Nt-090#7}{zh5yQ_Hs5lPQ3XC)W0N@}IBXjG{HvLh|1vDwq`+Yj{N9#y|HYN`fyCOn`gQ5be+fo| z;y^~~`854^-v8H&{_EAhbp1M!K_6PVRofWAZV zd|AZ*FO7KuMjRcGtN1U`7$*YgJA$)d9nXJh>>e;;T6DJNpQoby@4^x*7SMOxBVS9G z|BYe(ve5sVK>x>2pp@)4GTG04PpyxOE-2<~;LWp|2z5}Xz|JI6aJc%3Mx4Kn@&9v} zzaYSPJ{(X4F|?<|abDPecVeJz)F1E?alKgxhJ6i%r_gvc`3PrK{?%razf>OLP*VTl z$|6WIo}U)+>#{w~mb+$gVb5q5U&~n+^eOpG(|&F^X0wwP``60Zpa7^$CD*5O9ym4a za|wgQR+9?Lx7w zd>`k3%&(r)<$L?M?6&tBw&MjP8|qX?o55)(yz%gpfl}A1b(g;V?xedL&ZT4r`B3_g z7klN!*P(+4B`f@IxrJshm|Bq6Pp$9cGn?TIGu*Gm^CGa$EjKQ{k5fPGatK6OQm=X5 zt~9_=WK?@Fh13j@FJXFLm%&k=<|k@R+rMbJ9rkT-KRhk@YtZ!{O_y9QTg_E&jb&mM zH~%bA66mv>sIeP8Qkjmx)=N6FS};+gecC|cz~nr+92d~uUwc^xSL^%@oP3#pF)y&l z!&f+t{RjLi9bCxAv-^LWzoMkqSqW6jK@mTen6jc^rhd81*pw$WGySKg;dNe}c6 z)RmgO#UMEPJD1T8)J!XXsz|H47pj%Hw#K~ijuYQQR>8SSX-opovjB{?)0R`spsz;a zwUjhN$9 zGM*n@miZ-z0(9(?6VvVgS_}l2knh$5?1IWFxf?j|o*xO-?rs|9tkOfxl~MM=Anwo~ z0uUc;?40&{L$}U(^25;(d5&tkKjV5=9WJ~as$pU@J8IP4jqlL6p7oYKD!l&{yUn8^ zcGIi&IzTr+gEVSP(PuTSdTw0t4*83Kfu#XviByiu>1pkK(dk1~p_9Ne&KC&3`<;%o zq|Psvzhl22qQrV6M^@k54*}|{{jaJ9bu;dmx4th4JJ)l;Xkt;G2ro?DZQU*%EwTm& z1<6&9t6re%xgfhOG4#(!K2404)9FDbNg7|VYL8Yt0(xHxNAG1*7@bdA`=be!Mttsf zYP;XcEweQ&o6YLD)QosFZO5&l70SJ|-Vyfr+|>BBTMT&?T$%+d70S@JUJS?gwa@AK zNJq4c@NS)Pkmz~ehpyN3Ex(xRhJ$XmY->id`j{H+V>%cf7Omtkvt6*jeomR?b%Vo= zJn|&BdufgM5U8W0%`dxURn&};-yvOW!QL)kB0X(=;}5cK06i~*@R_&0tFinY>_FX! zJWq^HftFD|%L~bR9~dGCK&pP@Y1v-vZ}tvh`?5nrK$G%`SR;6_S+~W0^ddvwA*oBW~sf(R_$BcpgVwcWx0Xa1>dM;xnLcppD>c-NuXI zSAA6nmjrdI?&)eP**L#up`Zr|qaFmX0q_!CEkkA7dpov+?r9Tqudc{Q{ZyIzj&?H<%y&6%8O z1t()TFYNeYIB~3bT5Lq|K-E!h_2)|xV)(>6X2GIqJ5FRm!XfrwYoqBuw%@J_(9zaS zHy9>QA6xE@kKM6=J%N-L(baer=Z)i!!`xoXj^<+>yy)Am{JJr;j+DzQ`<@@qvpp}u zyQk#Vw0NPhK3&$|@A9AnGx?@FH=OisNAOxj4WyS-a_|IxIWrH(z^lr+ z6+uV=rQvfHkO$ZCd@UME$gj+jWb3dUYleo%d)8|xzmU8)MCf@|1vjVr7~DOQsz(yz z72sd!reFH&{GpcLg9A#hc^O}I@|9Fzo!}Q_MkT|O^f!heqv{ew6%vx9{(|~0Oae6q z?;Z53jgU>bF-iL*JV;1UK{ClGe=@(v$C*Wk8V?p+vs35W()LpuGmmDEQoZpH#g(NE z#zWg!ENLWEd^R|bH`Z}Yx~rHw!)<#v+f}-J;fqa{VD(c`_~Kj@^+p5K%qh931X4av zc1?nJaXkJiR=g2eTn^fw?M57mLM<}&P1x!zp9(wg?1B?7_r%%WS6cO1mJ-e%FU`Fx zEA|MzzCNx-h)zsp5f3pM@XpnFA55RHp&W`=cJ3nPi=q#K<;NM=nMkwOx0kJk((&9Z zNA@8@Ie}I~fqCHNy>3i20bh*R_eH~}JA_=RZFMXATnT>U__l;UxJAO?kZT`Gcu7M4&Dsl-1UH$W5FJJoVE; z=gKmTijJS8ICGtD{O0n~?vXV0v5hPKS}8?6gi(>9j0_0mJ2a_ri z1A5*IbT=m}L`ih%)QE(u#HQo?#zieP{3|QT@4myGs5-Uielw;LIheGa@Yrp3x*S|j zClbnFWNVB>Q8NZix-LTWL3;=}D_7XB8ehpDy8juJKDnXmRuCs1EGjW{u`{JzFPTW4 zDV;|p%&XrU8sRAg+UT0q`5J~EuOYi4y1eMFI+|Ul8RTv2*KLXOAaMzU{R$YakQNx< zbAr#Q$$FJ!*&JrWKkncmpd#q$v{aE{+_IUB8h6B$@*A-^tGTdcdU~>xMVPHbqVVqP zLVY)C9rm~rFks=lXG+-;`dh(kI~4LN!F{Bl5I+(p(Wa737LHQ#-8h#RHVAOoXp1=2 zn2sJh!_w8@v zEW5rv+E=J!Dtx>>4V%bQgJo>Y7VuGY*Q+9ZZNEFhRZw(bihbys zQ{Vz4CO63OEWY6rj;$Xmf?h%K$Qcg4Ai&J|!A?8AlhKI=5LmCoI&mR3AyAaF`KKW5 z@z|_pwfMJ3Z=j0Qt(7{wJ+O7xy3UXyf-1@M+H)`boa^9w%qa-zy^(|9lAULIFF+%f zmR$ME1~8=h@5(G-dV1d*{raCoQKZOJ2x6xpOh&3!P)cC_o^ z-goU|FR`Q^1xDXprghTU19GutzVI8@xFVM5J|8dM)kqasxDeX z)+?3E26qlwzPzyL!vX<&Y;vW|O1;R*mk^3g9CBXWKcmP#jPXc{&hEDi+|hOaG@?Es zb>`d%K}91IdXJrzH%My8$5?ORi_od3F@DaC$3v*JM3&xZf2J(dYN%8aIG8kXZs`!5 zShEhwR6hD)ke@?gcQ}18C<#7}Ktu^EDpaZXZk#thh8;z=Xfo$OvRbe{zjyAwVdqCA zSsv|0lsh$}Z>1JTa$46sf}8;E5(l+8p{X<+U1go{sg8V-K&cVKG$ipT!A zudo#2$YpIfLu68OQyQwY@4r_^aRf2mW$<}qq+w7g&RSqLIbf3K9bErR?rrmMk#%xf z42{p6tJF#GuqJRl!3laT=|l0}!6RA9Lw_1<4=kcktxA*QMQk9nf*CZQNI`BNjuU>& z`_=GSNDNY_v4eRQhj6M9jcV(R|Bask=4rB+{q-Sa1=ccZmp*GB3oU#R=ia<|sVt!_ zBY1>h4Cy*%UP)DXmMe^6c|tfQVKR~i6(t9yaKu&IJ862RkMkr_yj2V@d)XazHxKq7 z)he~sDu+p-6PhSNxEv?shjX?R4>#7d^|pEzb3|?z%DFD57P+4HX_{R2Db#Is-7oCB zV<550JD)9eT7<^FcU|9~&3D={h?t)IA=hx6Yd&o-(N#qoLDH!6U3)Q?QPre9gBq;Q zeKlmSJK*=0p7q(ByhT>O!vHS}Mv5#2lP`6h!JdK3QE9%^ZnKZTkIiiCw5Zb`u}}*l z^7KcUk(#LK>v_|K|Aq{-GBKCTO{KIr_KIe24buxervm-b=i!P8C#*qC#>7#SLaFeo zNI-4g*OgiwLU)W2X!&+jUm^7x2>L*@p#t}2N-HJR>^cmcIqC|~dK>#TuD3@TmBXhR zmgK;t!REU63D~ezA&-xF?-hm)Tvk{xo+o~5S?2D{li}IqdR{vcq^V0J!*0DO!#v?F ziVX$q?O>`*%t39)pQt5v3oVpNOmgqC%*#T#I^PnHJANQKexDLI-0&Xq1r#t>DzJUO z=dfe|Hu53+d2}bI+_cYFRq6LcOBQG`M}O+F#5pF??)=j--(dFhGP`>+aXI*2y>gRE zBw`*(B?6ZYWnM2iRrT!V%!61hKan6hIE`v=pH>K;t&CV|$0MCz)MR>bIbAI@`1yDN z5e)I>V+q8MLf3P9aD|vkh*PI7lHty>kOO~F+u8VE>BTD+h}kX8&#S+Q=UR?i(HfTd&ZRNVf!%unjgQ6@hrN&*~hZ@>YG`#Vf`Fz z+p@18g+eabcF3pmO^c$o&N8GpGD-Vb{p3f0O};BGo{pFo*&nSnZMjj}`a`@iCQBJ*blB+5 z{@|p*Q}IJUCWQI_{wvUjCO~&#z;CxlCs-r8@clbXw`kSc2BxPD5~NFgXynp zqv^>ZZz&L!nC@PUhyYi;N`mK?WJ&pfzbhU(S@{W>#j}Eu@qs#QAxT5lPl`-vi>izYhik0k;<-L<25qD;$|tJ4kQMBC;15G}!t^48 zHzMJ4eG}&3IUoq85W8i!9pA3h5$QG@LxFs=ZViL_GJRWjizV;Ji$!TwF^Fo3{y=RAI?# z?&G~A%3*kr36|ezzEongIc6hNf=h*lj><{dvn%wk3c)E@g3*LCAecDB%l+DtNC}lp zDcIrR;OE`#>6)-^!6DH)BNq3S1CvodBek$Q;|_NDAw=A>R8h*)mDBT@R|8W)vF})$ z2S-c>N=N*|-uMeK|4VD>e5>2n?e*eyM~Mn8Br;p2CsVDuQc9p2AXn1mt+opAVl2iH zMc>q(dJ+3t!OMH@F>|WKrGA~64HRY{SDKekq`mXZo8KeVv@#UeF{bv-K5EDdi%?M1S?iff#Y@uu7##8mg!THS0@ zxsP6#Y{^d4T4?y0aj!p|b|6uq+Ki;kC|3$pPSjUzWiuBLVp|G@1-U>b%AtPVPh(3D zg+hqHmj)O0A(eU3+5P#nwfu46TlU>S4vbHa%n`L$8>v=unB2r`8wtVA!WjazIrIJJi7A284j%hfab#2Vs zYQNKZL@mSn;z@A6n@$*mNbQnWW8`?qDz-hWq8?X*h>=X6nKP|33$j80gjRTNMT)qJd=5uPzbw_Fm%UxF=4nh7(?GF4n=Z zzAV)2^^v(b(zgN4!8XgtH5A-(e%|`h$X{=529c#M?hvB9cQ=g@)ez;9T zwrZb4_ni$&E%`xhq@6pA|>F(bsp1NH*{d$W6F1DqbXB6E@IRG-)vF=Y zlPLPs07*OZ52F(yqkLkW?M;eOzv|7disQ6XJkUvA%d^p^eA)~*K-&ZgCDpH{qj=aj zI^%i#tut#k%W+EXNsBOseB8yrB^CYApX~Bj;h|QSR|JnM*MzJ{=-bUhC_I>)6&B%af9ja%DNLdBvB;w+4LG7Eh3)6N44^9 zQFYP#;b1^e;X5fG+5}onn)|?^B@}y2S{gB_aTeQ}x*On|1X?i;tX4P8@E&+%0G#pR ze)GucjN^~G6sXr5QNXyOn_rNyeneo%1$c;*^N}Ls!9?K9D6I2g5`0@}7EOtxgdQ+g zm;U{eT+(&5yb^};tO*sTj$Sn}q1))~X46;_lak5VitolSQ)Zj%U4$m>$rQ?;<#9;G zMCRI>D|5_bd+RC6iyu~jit=~TJ(DeW@{y&i1X=Z4+9r42#U8j1W9@3Q2`SAdGyX=z z-;w6mGQJPfX=H!4en0t>0h8~t-bzhpH&`S}B}q^SjLbkh>gf96bh5y~j~Q)tO8{bb z{6%#nl_jnH`iM&u0O|a`{)Rb-wvo@QT-zJUv)XfyX!5>P4a_yzu6YLvQiEgBVbbHt z`AC?*Tphd}UP@rlvm5em)LZM_v)0p4xQ4wOJ{&499I-4F$X;GBRaGJ$qAfRK6OH1b zUB%;X4Oowk@T9)9%}g*x8E<02*AD(;s`#S7z(T}`+X2@H=1RtYOW1Q7PI%7sxD6G` z@z9pHKbWLqUi(N`6}cHx8y^c_-(d3F%M2%*x=47c?J!k%hG+bcYHdy%qMK-n_>ZyvEk!% zZ@#6G33~7FVyy+z`HBmV>?I79>W}RqrHPy`3=*h67+&*_$ZxcvMR6H7MM7JZMzo)Z%ATrBVFIBFG5klO0+X%85a1fm;2>uzhG zM4r$@EC)hLQ<+ULxB4pzJY0eiNsYEsn7m>{LgRisZQ%(bPV1xadI*}?zNBbVR-598 zXnH+*GaSxXQQ)v0;;0XQW&gbzsfP%r80kSHiqMBepw;L5NAavYtrp6nALulxqR~Ax ztCeTj5RmVeic0BM>9k0|CrieWQG6cNcb|W?zFM8O+Ulk&=M{xGkfX$oRf!AR>INj(QSDs z`|@m~RHEqacHBgbt&@$eRlkf1V`6Lou?Y(v_gL;Zk!zr}Iqd3|;TPW76%gySla}sO zJLAWrJR9-XRQrWd>x+GH2}LykJ>g^_If4hxuppblioxp5W4|_lU#)ikUDG|dpFCW& zg#Iiq?0g=KzLRX^ zPyLcw$oofz|BnX#%rBTCz;I=au+ADiIZfFRjuy;Rr#iC;K|T8?rfZg_5JE z%uGY1s8icag21llR}YR%^{ z`u$NttRWByB90!fO7_v0+|SN`XTiVlf!{$lgXBkxNNBaU48#r^>FUs5wZ{KoZ##rn z>}maV*n8)H*YN*r_g}yARf3o|KSU$=|K&gWLD*S${)vwg@HR5othNdZ=1V6_l1ZFJ z6nx=LoE}VO$YwcWk&H7YQ7Ntc{MZ=tk2(K`Vg9ra_-mlYQ2o5s7(hpn17MC8YIGfs z?g;ShZjRoQ$Yqfv5r9rRY(oxbYH)8fn(TGXujd@#Z|#WxT4(-0Lk9)^krg4rO`i@N z&2MV}DLfJvGmOP7D!eOT53)FE*AlYjc^A;0@);PMivUpUaYC!tW3%hSRj5vC83QJL z28PG24VrlL33>;}S&&RNO?!JpLr}Tm;VY$Leni=BYxe)tl~xho$UMVEA&Bh5DXdG@#&Nn52%+`5xqvz3i z29aQy8iPIp835kj0_*oLHm3#j6!bqe=_w90*={t*_j_jOQ-K=|IHRxm2n0TiB$mZc zYJ4LpK<0Eg$@gWt2GUyN5uc49|6Bm%P9jL>`HlJi24?a7ppi~Odqd-)9vRhr{!TCS zQ9@{LHG$DNq8bbXpk+aAQ83;7@5w$CzmQHKCR^^wK0H5G^aT@Qpbn;U+h}tr6n(Ig zH112jXmqbUYy9mAXHb4i4Yna3wQv=H{Ma=h& zxDk#Wzb+%`r=PPGLWNQC^z95(a0@3scyh$f51Kf3!$Cy#~gj(!iFt$+@pRyj_V>nH`0s&3Xx`bpE79zbjcZjaA{E=6b;AxXx1f_I%3+ z@*@2C>GH$*_R!n&N$ILE&-<{@yYsE3>mY`Qo3A-OtU;=mgYoPb+n--eI(CYh$3I)6BB-`Ey*J&STR*`#j&5ak^b#T9~&|?NJ9bx?P4D zu}z&eYUCZqlC~5C?*TVQ-Q-?9cuE?NW%SfWR`Gl@b`zr2x`EzdbULB|;Qn0I3Jk-C zE1^vQ#@H9~*Mr4Z*gX9+%#0z2gzUe=puL-|9_|hr*!JS7+GrPK5P3fRkZBnPkYB?g z5kvMKj$g`vTat*tWjJyCjWLwnV#H=4pE^9yEzFLdR|V(z=KiF%FCa}$rn-&*1M zvPeT9;7wL2yF>+xNpD?(XxCN~g>Qvy3-t+$#k7u6sW7b9Biei-7f&eQ_NvLjr_egj z$6+rD9h3e^GCYAwF(1R^yDD2L2C50*+@Timqe7kPi)|iW#{=brCc_Eip<|f?p$)60 zWekPKQ$^9CH^*gD&LX|!3b`Wp=1yZ-d^t1N95$JhDu=SA02aXc)$w54{Mx`+$&330 zkIgbjzF*;JiqPlr#=~%LY$lGb4HA3#%39BDN7ju+odiHh3mNLI2yq{lsJ1pR%OSvv zzL~6H=y`LgvYN}s@sdo)wtM#NjM`jY_IsU9J`DncRM zu!=#UK1nQ)h?OcUZgq_uu9ZT>1%xK@PY*x&U$sZl>l}uhZ*c^c8>B}TBF2yFy?*VN z$ul=dxb}DjaLZjRB(|%$BrYf2+fHHV$c(Wre^TvrJKnXzm$Vx8VVHWf1y0uh+dinD zL?k490YBrs&tCn_kUShiG(;hfdw3-_?z;64M!mdVWgq7Pyk@n$X<$e7=jm7Q7q&*9 z7eXe0Q*T}%_QdFFAr!uJ@Hc{27;Ye{%9K%l`v+) zt;4H?qCy&9_}uK*A_{-8k9eIp9Q3lW(Vti~tJl?fBqVR^U`NsbPaAOyozmQZ;{T-Y zz?||qSb(8FN{7^bd$7s?9I1CK^XK5F+xNB9niZPwTf!?;C&Cf7qkFUC{9bDzZ+uv+ z#V#1wq*pn*0kHp2)OSe?B=Mp%rGyGCz`Mr@xnAz}8GF8`)OddCTREB15G}iQai%qU zWF!ylk0fYQY<)NccBOeK9<8=T!4Iag>?Lpvj*6HnS7@^P%^bsFez3~%RUHusNziBP zE7cg!Qpzg;`*dyZ3ef6?mBC#AC1IKi(CW_jE3@UPbbD7u7qD4;9tkU4Tkx1=Hzm`k zuEf(X?B$2qkH0I;-wWXzKaCo(Nj@&!);mL>aF;W0250gEBdJcJ&lE<{ z7_=JfhUrBEFOqwq_XZ?#+{;V0K6uCY-kZJrXv8o`)4QV{JZvfb31d0#csn{_=OrG6 z4$x|`RqFMKBBm11OsgwzH9j0n2F_xJSVV{q#o>O1 zG!Y^EX-a$^hQ3h!4Zc9TN9QJ39)^7&WdIdgBql@F{w$ra_u@sWKLX!01e8*sW~V+Z zpuNEN>VBQC)3U)l;fPuR&(jH21rW;OC%7?`LhGVN2`n>z6Y0X}{~*!>jOP-mX8=;? zMf_wY1(g=(<9xL*;_b~+wmC;r3O7IDzba%hh!_Dc07}b89Yl>B6akmin+-wIQL^<4dPLU1vtN#mic zL}>WDfI~ioObb5`n@sMC;^7RgwLyvX_hY5dJN!4S2*Va|)&;+m8?Qa@ccjoTtXVa& zJ^+g|-E37h1u{K;@nlYqghJ~b?!@%#_xN!d0oAZ%?{g>zRZ1aF9+C4k*&X}LU#spm4s4- zJ;BUZ#E=DSXSYmEe`ZF0ox)%iVQ1e`>9lmupDvpuo2=&OhxT%Gn77Z(NN8y_uh@9v zqg$E^ZyPfTG@&kk$8zFViA2K^oVXdcIfEvy9el@gF)$$#UMfpSbG?%CzM@)Te&vZnJ%gM4WnOrx=#55$z z@kr`p2_94qAaIk)nj$oEMKja@`Nl`qFNVcn zP@?b7$aUI1(^%7@3P`Y^S&Rpzpd`@Bb$HZJFFsVWtZFzRy*ghwoMEAI|J3QL!EDm` zLdk|3v8{kgep;iaxSgA~VgGoe635nVz?y3|gQ1&tm&x{VUIQK5dy|0O-BCQ1-l%k$ zkk20FbPNsykL@F1#@Ed+fR>}f{LujZWX7tLtfdmqI-9!FP#E-ZTo53Ssv?q2=r!$! z0WFtL;D!CKjEe>*bR2$3}S;^0I3CG-Vv>8@UHE%%&rS z$4~}uBZ|Ao)P?-@okIFP`=Vb)C%*~#L$pBnTeR?z=WX5@s@XN)3bc{^%X7BW`0V|3 zt;zT2&lb?U$H_lg{$Rr)7{Jfs?w>BHdv)QWc`>I_xd>np&ZqZ~iQ9 zd-t(64Y{}cK~*CdpLtz zX4oR9SJ~w!!)e7K5#B+hh;(`vG8xd>SdZ>;eqNrq2tm)G}$*EoN)YYO|AUVTQDrZYI_gu)a(TW}U9Ky=$XuiBV{G zzZQESHeWh(KK`+xg8-M&aYif3N__Up=exkmQ^m}j?fOa$00o7hJ*JwWPIzpb&q*zYjvG)(?q!j14yl*fkw6bJ)p^x3pB#rWmXQ1 zEX-k!CuT-$45pc22`pm2zyVX>{h+nVvx*kAn>;n`<2qSu6-b#zyRpFzr^;X2ET8~>v^1wVd3*PoI z9~qP=n}u%)_Im-J(E8B=lWOjwz*ZN5Rd~9uk z+PYuW>DIvOIlK>^z;e^&yMKS@QViSu(s6HEklCdisUjNCoeC!+n&H>W)mBeuu`VKCe>P3cYGj^v5RzO*trYOmMP)^Fed(zAdfz-gch3Nn%ja9s-}{2# ze1@w7Gc{8nueuQm&Bmka90nxRVgyu*<5_LEMdu`piM!bzj zd%&pgyw4cc8b8yhV(#7L&k&BOOIaM2=jHBzknjVS<*%~b_N|KUxY5Bh`rWhaIEb8_UdR^oZdJ$arJF88{lQJTv8Eh_ZKOfolic zNbrfkm86u8*BKyPk}Uuo_Q!ku8NQ|3+#sGW@u=G-vRTVdw|R$IYZl*yki$Z~YB6^9Q4Soin=f>sUyQLNfQaX3OM4{5ST zPqMjC$d@C+OcU_IQxSc}uY=8Bg!-q(=IbwwUxDlXN{gQidAs6z7e)>|jMG|!ZJwum zsi5TgXg?(ByNjjH!NpP)QA62s#e#O7F`T0wSvX@{iNy-c46B#tM|r*c3iTeA-LXuq zMSj;ga=9$U$0&sT68J(uuALKjK%=&(N_I^Be7jSStc(6o3;w_j)1v^Tl)#|HE*oW5 z?fl{J=LP$pZfQ>RC5wj|+yFcih@@HohR4oFY15TZWhW{dkoM)-sQReg zKUZYGn+|iNX2=cuIexw6F+(VQsbOR8I<$RluRjh-h~C{ z3k7I~0TNXuT~w@gD zy_f9*`^5G332`K%(A9!YF`xF`YjHxD8D5`fGF;irSHq9Lc@Y?Uh^vFp?w%S=b{xJT z!;f#BHy5NNr!Nz!nV#XvK#^>N&!6!=R8p z%)ot`ypjv%ATxtR>YFI8EOSF=gedE74Nm8eOZDCHIWY|d8p;9U_(AeJv7}4kZkPR& zAwYwY!o^Y|1m%jYT{hLGqaS-v0Lh~=%`Le#jv}pKHKS_tm6oQSDefx$S<3ZZIUaWeG|Apz7l$#PFujcg_EE>7D$^7?&rgQ zV*K(>Hm$oDJF!bJ{D(KzDv`Puu&*KnKKbNb7=~BmaKtm@-`w@f5Jl!-x+{QDYy($k z3}^8X2CW2S{+J0T=vU7ZQ#y#%j2a`}uIZKqGl04a2n_0rzjuTijETy<;){9NXpk>c zRkpz=gE9NF_=C}bKwBUaO92qN?@(coM}ovh5N_hFuICo7mi|!7n#3DU#&SdBIlQDR zN5Ab;9r2+=iRurP}}sK@H#-Q$$HJqdN5h34tka^?tuE4 ziDww_2_u~97b2WlS=sIjLv1msU0K?f3JOL%{sCWT+W`vQpRv-cUEb7cpyyyqTg;WQ zjxC2dzEzHK-&yafNZ3V+V(}w;@L*CT3=FHDU{y9{h8XZez5{r#iNQ+UHlj1=p5zu9 z9eptod*fvYgIbYqHopZzT4N~JD@Si5LeJRy2qUZy$x}!~sTZ;@!@ELIqzhuN-dTKV zA(z)XMK^HfRRIJ;G&;*5i1_tq5WAed7p^PfmK=ktI#cOCHADMig?c8^Nwh&zMl|@J zlyKRGcdtXyw;c99$WI;vf}!_R*uZ9w($YmQe@GOc)!}BRVbX;qf}Q6gox*ml-!QngYQT&^_HYT)_CJ^Z+Vy0SQd zgG(*E$QwLFiQ6ZCLbJ#cn`QsI3UHOGbz}Oe- zwm1)swQqzHJQs&E39Uo^tQ_qpbXQ+7<1ORae#Uy`L*$!iupN1IHDKn!IZOqMPHi#- z2`R>0KEE}d*rmMT`^gTgf|n_!wBGMa$K*S9fwoMa{RrIfRsF+bPDMb8q>nb1-NDI+ zmWD0UcD$B-9EG>=9!d%C^?m9m-9H4$z>XMl@VP3z*;ai)V}olDY+xj^=)6T>E=%_Z zFE8C4m6o~h`@~vNoNC9OGkV!&=vbVv5e%!SQQS8>$1ZX(;`q@hzm#Lf zJ1%;Agn@a;qMW632%i~U-iS{i-v5-#;>|rJ*{?_6*uOdmF#hnon=njU2o=kFU(-wH zw*|VIiY6e$fZyv2PtJ43<)BZ9*k!T6-IyiXKUdAr2|(YVeTa&@7q4B|9@64_2yk)w z5>|DKY}@a2*#VqD-*Gxuw`KC) zhmR23WZyn^6kaJ542xfdD$5MlNO!E6FR6hx79hG6VC+``#btc`h5?LWwr(4ad>9e6 zOm=)}RtJA<1K~HPpUl`fcd77N&#{&GlmRwk zcLv$0$ihNEzD%oHp7LQSMA>nmt8>Y!y6RF+>Ow%cbIz)mgR%6dz~YCLAMxG;^X<;Z zxve*dn1nvs+IRxqFrtN%t!{*=uV+jnppc+Xa0j9ZXDeS5*)D!Fp*nBo{9nzzWmMJM z`uQzCf3c8F{-Zcs6&Lfkz< zIo{$x>Jbr&A2O(ZzNSn_7Xv!f6ma%!k7N%C6Xlzazan&mf}{6ljbj5OGD12>7naEErxSUDz^5;D(Ogg3g zk&&rkAN26gQRkZ*n;G>ug!Zb}n5aC!je^qnW=&Ey6Br!uy22&VJK$J1cFR&XO(yvu zcCd9zaR*Y&EXwUhg^rC>Xi%AA)m^EIb z(`SLlWqPehqGG_D& zcYYGknLcAh@9Y}ShT6XezKEBC%V+n$zPO33{nU~-G_9Hcv0X&COzSM4(VUuDxkrD! zjWSa1b!ALpi68#@3VIPPvbUAXuz3ZmXe-z}XD~||9cV?LtF8kO7e{hvBy;7bH4-f{ znhgSmzcHVZDSYEGSlZ^qu|MCt4wLU=Q5$r6`U=jup9;=lu|T(`{8K;lCXs-vMhbqm zz37xsZzlQMNnc2i=qY+^Bq` zb?#BZBt^`RFV2r&S0bSC$;Eu4FsRuKo}lziwXSpI>?K0@CT;WqfMS1EdL(cn z7RYW_N6o)#yi)`wm!MB~iViWBv-{73`7|TS0;Al$JSpbn6KE?qQrg8i#hUR4KN?i) zOB~DvozD~5-WK|N#c*a@&R;&OX9VQ`XZtbIl__dsNz!qh%BSHDtIb&a&c&1_qrgum zBU+VgQdV-$y>hE`GdcMdh)Xk-2YsXbtYrDva97PVO7ZrmAg%)|Rit-Cqe$pyq~R z6ss3R8=r2b|0JKai7RmJ9?o37KBb{+UmFtk8Q;m1iAx#jryFv$2_fT({fSczCo^Af zBi?0HNYRy^ONi6hhNK+;rMKJgp-4K-?+(e0B|!9%4_zA4Z5w!4jp28n{WANf)sY)+ zYIMx*w_^_}rBO~EU9Z3k`6*g)7-nTaR@+dnadW{LlwO?%mXFFPDIs$YBZ^^IO%Bqu zK8f=D=o)vX-Z)NHfcXnaT5ufte4e0XVHAfn0W5MWt`*;eqlqk5_U}xH!P{pcatOO! z#qQ?PB~>hF z);W2mHW%rPSZhbqtnw3g$Tbi`{g+8nfO3>E=z$>y@+RmgRRcnJLL=sJ$2H@W0)Bv# zNcL@o7bXHG87T1qR;`!2$9oWWut3TlJX4Wpi3&Xs5@vz~z)p$MD~Xa5xSvPDS4-@E z!hagB{v1+I;aY30;HSTpWl35HK|I0YEmFN5=4){p!5kb0oW%W}!Ma^3oKB|6)i%d7 z^WKf)hM3OuTqZ6t=9)vP$5e^Xkj6NX--|>|CAO>kcjD+p8({gMQ$q;J3ZjDeiSdM= zPLI|}Kpqkq9;C(n5x+A86s%4G9Rd;T3g2L*E;iK#-(=dYbiSGkx3%BI=SdP#+=$Ar zUZ}p@NG?QkEa)(LVmbO&nU+9)90ML()Kd8f;m;-aPutobD>NJt4Ed%?@dhjc2ewr4 zMH|wW(dO*wMig*@#rj`6rCyLvY$dqSSCF>W{CwJ((Zq z(0TqnXUmQ zH;Hk^s;g$v8JmPJlA{23uFFWQ`%WBYx@ZW~3_mYnA^LXi`e_!A+!FckOv#K^m2L$E zkp#p{LfwU*Fo57ll4x?@mnc*ps+$fdAv6pS^p9W&p@k`VjYdw{c(E@nGg1&q!6!Rg z-cBs-vLv!b`AR_RXT_J6zqFY@=rxQ;q|O-Temo|^>gV_$%+thC`~ncEZiT$)-Ge6b zT{-@_NT7-(hG~*ZK-%q*P^RE66CnQ=v-YUs{{E`MHsHYMXjOlApGx7|2Kyz8VH3g3O{UGQ3&ofLW4mkox|g{Gx#<*lJk=~ z7Jot3jUIaf=y8MPKdE2E_XVC>qFp)|8&R;hISPYzAcF)zl;Rk5!dq3NtNvOL{`qYl zImsgiKr@~*G|urpE-jQWk75D|f3tMqK)ROu)t8yb?){x5EFtmSWRaym&mhBTZ>_ft5)u%2ufZSxBVWQa%Cc-#!eqctMPk%uF<{hTdleXB9-+Y5DCAtedwAYzQfJ(sVA*+d zp0xiXUxT5H$K#?_|MPUx>E;8*eEoP-_3c_;MuGL<7rS2jnWo!Lix0J}d~gO{=QhK2 zr`nc`HcVVLuc&?Yia)1u=4Yj1Y-o6pR*Vryd<4waOplmF|OC^;VVIS z1~_0^!#NVV2YgoNNY2NuDl5Fc?X~w|=Lgm!`$fd{3vadO65}yA zV0+a3;@06B>VJ;0iik%x8qY_F?~gfh7(-<1ex<8Lnn4;m()D}?mx8jWD}ttCcnssY z70PTC(aUNZT?(MfQw5_xh@q0y|DJ)hxBH(z89D^~WLR5SZ3;n{xVSi&Z~Z`1`gpY( zleJQ}*)CbC#~eOa7m@s3XR*#nj+pO9T0PyLOr+xCbDPB`kJJ;%6}fL>IUHQ4*fidc zey;HyLx4qAKib5iUI<&P-$ePt8$PofkytMqBYUWtEL0iqiFv^7WxhEb;_v!Mqtq$K z+(dt}&hTQTC#LK$y!w3KMEt?xiJS7*_dNo*w8~;^y%~6*nU4VsTi9hSkW#J%{e}ry zM(4gdM(W2lygQuT#$rWf2s)Vz`ha0q-^Z5FmWO6QM#2&hlk9%>^0~WWtrB!Jt*QO6 zOX%jk(RUi(`-L||2fO7H)#OG|Su>Sp@s;8ieiv23lY{W=S08WBlO38)kB*8njXPex z8OavH$@N+5B|Tn=DC_*7?Rxh=u4Ev`BXYdGjdsx1_6L>daJIEz0sHFn3GZhs`!BA$ zC0lM}N*O{gW@^e5Z73pP>#!?PplYX@20CymSCu9KsoZ52=OmX= zgjHg+tF#x}G2neBSmCt!uJ6-=KdS9bHINE(&tB{P7nPXAZthL$cF4(eY4}TSX zWeEWhAiA>XL)IVU z`x#78&~mo@|7U%T6k#E6l^Qil&f&=isccpfNiPp007NP~abzyB&?uSXVrhKB7nFpT z+}q$%CQX_THr5rW-@r~>)n1<9jrWC{qx+SS(oMgdBB>F8XXZL2vPEyawP4QH11 zEg~9rmvl}Rmznmdh5|eXOBhnCSFFn&gSrvfMDSW~@PAyzBdS)r**4)23T}TuYDf6N zrhaVW5V&(=b>1V4qmuTv`En$(H&+*p>LN`2c958&P{WEgDh~d~eX}*kyD^O=rM^}_ zrcirC34zhPL)9)@pLU}ZT9r3o-ZfRJ+L0c6D#eT+2-Yq6Zf^XelTGFW=Zrx>^|e%* z@Xm&?wk-*|pUD!)LaI5OGjPBQ^-tu@wws5>pgxzMIx+y~KT+Jd705Ac`0P!UnzAUQ z3*>A~JH7jX17$VwE6z!E@jVilA+V{WeozGf-`;=(Nb1{<{0Q#+v@iUkY-zeudOD3g zH|=%#YFmKEn_tN+|Sg4V+=iWZPzmY-U*GXDfizcXi z&7Le4;R<97+j+{hJf;k~UQaOvBNisLTzUd|RxbQ2av3KmaYInk{$NY!K*|9ynXsd2 znRfFX|4ykws^~Ck?$o#T*>P49;bpYjtzlS9(sYGf0(=v57RhzC;)&TA?@?wM$0KcR*yZ?%vS**qUwGn5UAi1ET1ds6h!|sMEyDMUc+LTOx zVv%UqQG$k)cb0o|0!1R!2o$5CP?RbG?J8SjvkY1*Rwa_}l01|1mDQk$vxFzd|5`dh zVg`@EA}Ybl7+g4`az2j>9+C!`xbKP|M50H}cQ?a;ECbINP+R!}`ZL=)#u0U+`#E#o zD(XbqO$CFJ{Ms|lXQ_F)e}Y`?_x}xY9W;BTqB0mXxr;J{IA9l07k?59k_}T#>+ZFs zG9z6iT3P8DWfSm(U+<7q)WZ*Lfw;ePW7hSuK49j9Il>BA#%a;vN|EXbk^A?;@vrNR z^c=Wt2xTh?U`&RIirGwiciu}7u)iI>2q;IE8Z(Mdpv$280Kze_IOXkoK&NKrGu0|Un1n-M<^ zuF%fO0<@I9TWB-0(wSUr>nB+#n{Yg48c>_AeZK~hjI&Np;S=CIqCW7P_?b1lL-za z#>7Q#%!*)on)R!PVFzq_M0t!7n|*UE6PkPjcNY$Yst(q@Z%11o0E%DmStAw&Fy`Tb z(#QEVH-^hfE@~f0ASW-xW8l*PStEG1|Cu$y^Bd%2SV;bhKoOatRsIS`pulV(KqB)l z=*LUYa{l(79N!dB$KvkA0z6>+A=$vio~$o4@dpRU1LFw?j6_Gc-WDF*(a1uc5*4fM z-0S0Z6!?2b)Pn>}`=C}(tHDRguq;sZcgwonoE2E6P_6ANCDNB9hB!4aW3<`M{Ez`j z9YLWX^R~UXf@0*#n`GM)GgThaGq~SFHHrq^IC71OQFUccH`XO~CtyR@c<_5L)c7ig zIdWq+R%?Sqw6*c)vmsZjp*;3R5u;qX)r>j!(NaACf*qe(&S{l#KG3sKCoYk%C|;~H zv7tRE(VgtFYa#i%${TEm7yude{hr?nsGO*Ssfqxxpl$xbi={R%Rb`z%?-NJYrR#2d0?db4PWQKmJ}(^C-Bq-xF1TIxw3(tw=cU8R zgM8WRfYD&MczTx&G0z5id26`I5#H;2<%>@T4Zu=#wUzZ|W#oCP`T2J7Vi!!dIS0rD-N}!Nu}2;6u8b);14D+uPHc|v z?r3?xSFYM(crtrAC(bezG{dnRD4&Tne2DnHgGMkSMdc|$aUe~8?E$$KIAN~gz~96M z5gMYQjKXRGZ-PmMw9S9Z@RiLbREl_2b)H? zQt{aRSuK^$*9UWwV(*|W@2*H;RspWwEPsB^#2Z%Ij&%4-tMm)a;bOH-NNwT4gm=;A zVo5-XdV9k%sNR2j2oVdAyPCfS(n;?prn0qY+JlMn+3Z{2myo?mzZD=?gn>UH{4ZZg zXnT+x;*Y_K9AdCXLU9u+rGx1ms(rtWgh3F+l;VlRO8&OzfY*~if>t?r=ls67b#0mN7SwQX_-f!h9cu1f;0ESI} zc{S!9gxNJ5a($AjBRvxvYjA;O1=W=MlArkoD9SpUy@=oqq(CRhw9p$&8gv4P+#?{< zF`UZ=L1juTQzER2Ys%&3>@Yrg+fO9F(ZIeG3_x+OxC`=7Wg2{##3eH(x08e&1}{oI z7)u(U0%j!&dtUjXwtE(#`KTDF5Wda?u`SFHL1u~D&Jc|NJ6m=yY$Qa zaVBY?WcT$9t<#CaI70|v*dzs~Fr0jT-r_l^zxGMAL!abM zV<`3dH0kBS8PVR22m6MIMZo6l`7ZNpfohSafh@n7*47G2dd=nl5yMhs<`>xC5|Z$| zR?%DE9vk0)V~-uD+%H`5Qt5--U)-v@v3S2Rme+5f*C?ctO)T7dkN?;tQOkRZ;kucx zQ&~4!pT76w9Yl~fS4E$(IErAD_jMr>j~yD}SbPcx2wD7LG2D~y_B%=NR9@_a5-j*X z^jNO+K@ds^8_c>^I4bH*n~ne!SITQtT9-|J7#6~d?NbtvUAS{~KFPEeRD_FpqE~m4 zB;dQh0Y7mB4f~eBy_nN(LAUg)7kQK#x8!K9Sc84;lMg&cue1XX!bh zX+TYd+uq~=9pnNnfMImiuHZJ7kGleP)CA6)X0pO>45eRw1?8*dHHL9$iB9Ebi{DFf z9nOB4E#DlyUjki#m77WT0(;T(G{^Vb55+W34iM)>&hov+PFR&tlszRH&X`xfK{1v) zzWNMiw8cklxaHxG5E|2^&N;&t?^lF_WIrLglI$r`5@*@Cc3H623ys=KO2`6U#AL(( zNRwFJJCZ;k1#JXp^Z<(p_yAzotkruJvgrtr*RJIb{;gQoBVHefX(WMy@!bcU$UgnB z{R>&8wch!fp|=*yTBW)48%lpxjeg{k6BH0sO1nu+C&^PHqdpus zU1Q(UQzuko-*-nH)S|k&Yh39hMR{1t6`-j*O6cB)#W#T9kn^FFV zBOrkRc_aa7#41a?+XcQ_ncnxAXS&k5dMHN%%~q3y*U?E*jdzZcj*-5winPe8rft2b z(?2XLK$0mj-q8Y7f!?+F4Qiqfm9#Sk=(T1p8x0`X?LRq_c;2P@a_dA7i6uv2am( z?hJ4X$5Qk($!je;sBO*P{=vpLTplnzxfanqo2}Y)J;4gt^vmM@ibyr)y)FCIfOx6V*RDcR0Yvfw)f_5x{;lY8Hg;s3`+>7lm%6gkKZuwq9q?+4azLAVqGpYg`J{ z6gLVfV@Z)~R|-BLGp8{5&wMBl1c~E^XXq(OE}+8vaOV`#IJD#!dKc_<%k|7@gSh%9 z{L`!glZFiMo49l}(aiN)rNx%vi}jzIImhNu`|c0-6VORmy)x$Dc;z=J(6LBuO9iMY z_AL3jH6>Tqn`tY*zwO%=jDwCMK<(M*!M^V|KLI6yF-H(IE(#uo%L3jUpOLS+zcGeH zE(=EodAQ3Pp7Z1|#}k>4F9JkvDwX2^`(TAITC)fVw1+wrOXGv@wE*I?4{-3BS=N4Y zeu_UpG4i_&87Z#ZX~UDsH%x;LIVu`2(hS*O#d3ld7@aU&6BnOQ;9LbPqu_D6z)|Qv z2GgQ9`NR)nbuw+K^_A+Ybkl0QJ^M^93gbp$`O2r({>=w07-lFf)EO!%#zd)+B-%*S zb3x+_RJi-7BYVwYJd7-sXMch=+bn;AHnmr)hj-U%Bem%w!4Sc+`dk)X^BzsyO5pW% z$70SaBd6kXISBN0kQFKVEz<`jr9x#nXzdPmMt@DNV;NQC39v=Rng4op6iE3d*J@H=!KjdsN49++ zP5HE)VU^Oo`H|4P zC=NfvMk11yz$T^x#wJ^LTKt=~Q{Tof_Bf*9Y>eeaRsojc^G_vCmUR#*>h0Uq<+ zVO7lz_V!3D8~QMeeS)8h@WOE1N1Vf4*Uhn?pP1yftUxrP+0Li1gkbnd)mTc=GYKQ9 zOw@K4RiSu_z8_4plxj2D*3 z0~W&?wtw3n>NGe%T@Lwy>*`_>CIG5RfQeBdW$V0IqA|uahYj?H% zGiYFW#r?FYZCV{7LWa9<8lK(Z0{Kd3zZb7Ohf%IwRC7JMl3vc)Mx^qSa+SC*b)w-Q zsL6l?6g0KhwIyHha2-f*#d2{|_m4$jM=-KidQ&*L-Pm2Bq5s&YO0}}ViTr{-^pYR} zd+eYQe4Dx2-=#jThG>KwN*l&LlcjV0%q(o=mjXx#B=B*VfR*@2=dd%gpTceT1+^$j zQOZ?KYK=TEFOEkpz*K!PoXxcqoY0UGsoUD%y#L9&&SjzLmK)Ppbd!)X#qF$d3k|~a z6qV!2ZfK|iPI$Ou6G*oeD`K~u<0D`_d9h#p26EQ3*nFQpRp-Q+)4e`dI}Gv1hFv752+)D<73)V|$34ep390w z_`JRUrssopvOyRuB*Hb3rzFnyX|*AXS}9#N*Y8?(VU?f%AoGx1NVm!Tg@Y}*{xZ7o zosyJv^{A@i6k(7Gy~x^lw{{xxf|J$wJBsGp11q*hP`~J`#J1^tDwkmoE)pXF4QV%qbJ(~~qbEW~zVr>fGK`wZA^yX;? z?6aMveOMR;*RMMSNfohSU*=>nV9*0l1iUexSy9uey4D<7k0mvjv5ZlyAYLjl@=?ar z6>U+v+)hl+0=VK6{gZ;cIb6GAml~f+cg-QPOdk7;pk*Y|_^W^(%Eq{5*2xMJG`FW^ z+-g0mjv%aD8(KdAEM#$QINio~w##M>@5bdTfZLTVyS2gXd`IW(_G&p~+qpC2=pb+y zwOb2mP$AG@m0*2q*lVrsiCw*TNOd3aEg}cH=KONb@}4f+_tsS0?QWpdD3aP9y^$vM z6s(AEPMbk!xcdFgrneknNOKJl!9-B?_5OT=C{~POjyH&wUt@b{-Wi=&lJIw9IBMSt))-u{K&nyEYItA)9qAH1KiE;ulCs zN$S3sI7@AFgEC=gv}8qGVgX*D^e?+qGnwd-*`Y|SoHoE%H&QM*lCK-(15GC#iCUz& zZRcvw{p2`(^t&0nY+CTO#5?XUct;7{btnP!0z6T0aLrfyOG079WlLBprx|o=Pth2% zs-w*o3_9$iI>b~~I{SLna86_93U_7@%Z@8d8h&RY9!S20=@6DF{nLy`YtTsSmeGz) zP*?AO{u6MQIb0mO;)xwu_)=;-;pBPT&(L+OfuNc&1P7{Ga=7yLw^TDHPci25F-beH z@R5AwpP(1Z_9vA+Z@zPk97SZ-ND{U|S_%?#^F7<+4Ql^_PxMDM30g+V5cufdJ}_Qs$$@au0Tzx~jf=nD@QJKC;1 zk^PF`^^q?{4oFLrz<>0GR=nwQ8IBYN{k}2OT#(-SoJpRX9uWpz-Sr#AKI~2VW~*Q2 z{+KeUJ4u zmEb|78g>p=a!Di93M(|*S7{){EtG^LS>$Z_y7c|UI^i5(2r0if%4hkWk}VIjI8&^7 zf^_A0z0!&T`lAa(AB4y~8MI({j^42XSkR0S%tvSZp1`D=*sl=^0a@It_`)EN3>WigW<%kihCrx4``f)dN&c*h)=?Gv648S}kZ=?F>f=o> ze1!hY`Q*oI=@u}Wz#E2MDq8PlxrM*gR|ET&wsR;toTbkPzbwMLauiaZ4c{XNDgsOM zx6<+}zeSeDcn3jybCsujZAXzFSHKB9ZBdwtv*}%5AKv(w1M)2jij+DeUjVsgqrJ9#~&57Ay6k`(~X9KXM-dX=u-z(Yp?&P&_HBC+{(Dt zB1KjNTk=Y8uZd>mTXt2LjiFjRwQK7_MtjvzQvNZV#kJKUAEfd>$$Co3 zztR<(X{Za82Y-EtXawlh!qJBLhGjIpVN*1C9b}W)a zYvv{D1y4Cc9PoqZMB-oSH{+Hyy_G|R`EHDXK8qBRQ?}sKZGL%n4k1KsMwJeriw-+& z`LOH9#+(<`0UMz-2!}A*9n}!G{2B;E4;|>U76Y-|YRpK(TK*Vb(rs0^rzosX2BhmW zZnBu|BryHjur)KhTPv8e%HP{(FExgQaKdfdHa~bH*>va<8T1~6*G#gky#{9q?Ep=P z=VfEs?xdX(ed{UCwb=mNX~v(nLB#>1@cLS3^-{g06p%@*V@S&k{C@kISA0c8Nf#Bv z94`mBVmPHRL|!Ig zF*&NVP6CcG2wW!w#~83|i7Z|3p!*>XfX0-98n7 z5{Y@rgGLOO8rX{Sy>5o2qUtRS)AR4^`ybLAcICMwL55zSJ8;H@=yqsOdvWAgAfGE4iNRJ;CWfpF&DTH-v+h8@t%?Qe%q2?x({fy=uHc;S7O&s zJ72ao1fkq%!Nj)n_3|cRE)y}+LT0}^z0EvXigp!vfeKd!rNUfz*45V{(!Lm$Vtc4+ zNfb%AL@e~)qCqOuRGlXIXTmU|Noym$8;AjWr*=Dbh^WQ`hoE7Oz{C>Lfe#m-4-6U> zUTM4mi)H(+g92wi5S|jTEVUxRhLH(3cdr~MD-6!Ummu|Yp40z^llb5|2>WefYq+-r zB`iT|Ju&{PHJaO=H{{w#x1!C5p|q`bu=OKJuvs>WvJ)D*woA4=f$mh3$~@pw`@snp z#p8!s6gMb%(MfbT-dP+^j+z6>rC)TT`$-ov4Cxx4=fS!iAA)G4-pl>tKmvh0pfHOp zj3bG2pDc0m!4XrtU#7os3PAs~P@iK-s*FCl0qnNn>p%1Q^1~z*`>eg1g8hz33n*9~ zw7-bUKJ5*|K8UL{b{Jx;LSdoTy|CFqSJXd1>P<=%$y@8oGK?96)SmT8jn;C^3TXb> zhWaa~&M#J)T%&OF2dh6$C;c#yd;(sC&f=r`=iElS6y+zi+iC=BP>LkZg`NG}cX&W| zxYD|a#nSGU?HwX4=&8RbY)FDu%_; zOY95p)6PIhc3_|IoMCsAF2l{amF#t;g8^j-1Ef4zx#TsKtkd!ycV@lR59((Z}N@?em8E|cxj+!DZwbWw`zoKH&tHAp)9-_M>ts8 zcEoshHaZ!)PD0wjGSSX{HEphD)#JOFJwT+-@clikTf%5|pUAgca2U{VWG+1i8G=^% zFQQlETj)(JhU9)?2M3RdZ8o#sJ|@#MOWAJ8TG!vVu1Q#RvjY5`v`adj-% zn~!EE=lX>{>pLdtTeHDZV=C!tywi=SoZMwLG#J%_<(lk4c1BxYWu{zsx5UJyJEIB; zjPtdQ(Ek*EADi&MsleF{1U9I>YPN|>IZutfk%pSU@fxGH8l+qpDR4xX6sW2UT9BCT z5EcjA`knW!qz7!ppiRrbGDHl3T#aa9kPZlqA|HN2lAjxzBtYJP;^J{ zQ`$2@#c8(rKNlqY#OE{IdrT-`q0GRR47gZ%_ZLPsTPbNo>`tTW1MNZQ%J%gB_09OV2p5SZ19*d8_mAxi3kZ#dEJ+u@xuD$ zO+%miSL;_cv{_~#Sgm&zKhOqvu2`%$3zgGgqbJM7L!2!T)G(^gcQ~0+Ca*wXfJv7{ zVGG{&se1#VRsNwlWxjHTF)7)9pvEF!#EdQvK5!Vg4w?>;nz0`--TF|uaFi~zwpwEgb@-AsL%*fC zIVw@t?*!0zGYlQ^DO6cP{sr=GiMwd+^fk2I%a)<+Gs*e^qT1QL$B)dL1&icuGDhIm zLraqrbZI8v>eVYzq)2Fqgs(=J2$VlW-iSxg8rip=i3ayX!eP)W2QSBAkV;ZF!o2c} ze-cFQ2IaS`@{vmb2J%5YZ^*m2kCkGd{gbw|40P1(-fnM~k$G$HsO7%J$D;LdD%Lss=0Zm|P4&Gp z$-}N%udECr&iKmJm4f|84UGxylpXTVZ&4K$4wRJb>{0y8%nIEg-@7U$P9EWs^+_W! zA}YQuF)U`ie#6suBwqXa^z;z=kc0x2RapdM zAXpGRc_Wa*{`q#Kd#2mSwRB}c$`7yEMbyW3_LEAM#lDv(`7SGKK$keWU}0gacC-T# ze0raZwY3J}$9v5SVnRd{V_nn-cswX4n9W9Qt23zZXhSYIc}4 z3LYJ;sT*SjYy>`sJv%wX%pHAwEmdo0`!$9?snIVXXFn;P2K!nrxZGZp-0p~Y3d=J| zzfvI4mN=ql*2mQZsPlcHk+osw zL*8FsemLFm(NN7W`}yJ(YtJgQ-bXZdkFWPYNLBfKe^Xzo*sXP!_e(P^J9qNU>R5ps z>d;#+Ws~oQvtzL>HnY`uqq$6KNr78}Cfz77)7vWcb}OGhb6V8`M|#2xm)*kdrQ7!^ zi(%ESy-r0WYTcULqR8!9rDH~#=UpgBSxxRbJ(Znq?B;K2?bIT9YrP$govC)YFS3lt z;Kr4Nc=xA`ZkP`^Pw@lH0t-aiTum0tquUCLL_Ao%rti#2X=3l^8;A6IAop(c>9pyW z0f?vTAsKJ{HqloxtOo`SUP#46#}f=N7yKN^XfZIXo+(RX#i#tN*!p4XH`yAF<516Y zBo7%qZm*kD`p+r~%b0}(n5_sgPzTpYng)<&9>=wq!fNA=?g{N8>3bHe8Cca1x`W{O-RHgFd@GI5YSv5@|*n`ox+*? zeEiSqJ4qXIb2ZlHsH3aQ^gt*nSVoSRMr&|33r{kH#64lMUkoBNf+zhQx*U#$$6W|r ziqDsHbY~dex4>>*7@pm`=jJXyTPFiUE%Mjxn~X^FLswU4APJh}tYu{bQbLke&MW}WFH!he7*|IyKAX|fOW@q!A{n_^y4byr26DNhrM3wNjHZ|_zVFCX ztig3gJ9KcPMBD=Yc*)e9^IGB-C}TU8;~to0iaz$?ju{*X&xFdpj#6u~!Fd{`$PVl9ME`^0`9%B^)0@fWG@pnR+7rA$jH-#&Mn9<@Jyl zSTi#pEX_Lx24*>QYo+91Tn1-Z6}$LpftF9gH(UD8JyVtF83mYyi_f_1>g{-yZ(;X+ z@Yy|Gvt{1|nRNySU2+J>FLxCPzgb@FV$$uGxI7SUHI^EF2*xLRr|o*PO+iUzVpnBq z?iX#0e~FEKOW5wYJAv8ny?-2Tjxl->$N0qTaG^!x^iIG2edgU%jczOd%~G3^m4%;A zDm(jTuMazSVePtf^xe$s zaXI@Hilp%}uUy+0-P_~?U1fX4=P1DQU0B$l%2J{}E_^JZv`&$n@gSP!Pp7C|Zj^?4bz#g#VoMHxdrZ&57PzLGYwn(b{0;$l0|NYurq$hB9mZDv)~d| zAhii|a`vdQ1Sv#yc^h7h^{|H0)!XXF;as@<+`d#Q2z+z&De&rPZf%mw9*;3ya(nty z63TU)qt#5G2fT!ytrFdB>DBg)JhL?-AOb z8N36T;c6z(jRu%}uwonbASmwf3Sp6|uLu&jGx=6i%kNB4Q9q(-O`M}EMj zf*&DZKK{AeyLSTJ4a_Z#nL@AMZ;yzB=+L*}Z#I2D|lCmbO^*Q@X-+;KRyDRfaR14xs{B_6skahO7(h-*|d zDYal!o*eq#Dkb-&1!tQ&S&`3vq_J!Df@_w#ccXmlcpzN`u|FN8q~kPjv!9SKEDkx&5MK$VqYK3$1x z-vsh&!W7KM^IxEQ=%rH2E|!3^#Dx|Ub3anzRdYI7cn?Lubuz0`f%Oaq%TeZmUByqb zZzLE)mMxp{bdOa>Rhyv@sJ<-mP8f%s(Z%JDN$S6;mz3|4hu~7L5mpK#jEKj8@ z@D(Wku2cT_BtHO(t(-eh+LYvNKAwX75c&^t%khs&7f*hq#PX4eEq~$N&7NFR_p9~q z)gvV{bRyO+>h^0Qfw+oFUj^2S(Q50DzDrGmh|?n6P18lSdGkB-dp#AH%*@QR(I6sS zkX6)2pCiJl(44Kl4`U+ocf-o9&)SMbg3MrT;agu$pHB|n@ChYftX{`N?)fiN&I^Q= zBFXtz6p4o@R_`F2qIR54W=R|BPJY;-?i8Cmwl*I!P-q*2Va(gq6PJDH+vw{s`SiJ4?1feUS|3di-Avj(|Q z1x%0`|5(DlK4G!YpCkI& zhx1f{Gf-9gV&y~ZSwrDssh@MZ1lgd(er4iyE7?DFdvlM0&j#1$s+~d)9h!S(75*HQ z+n$hXN!%g#(OfJ7wHgJr&Ly=}x|Ey1FvK`0u&|FQG=k8f(lg`-WW z&m@{g`tKq1PaO}PhosE$A)I=b;$Oq--|qYCVfOIQ;kl(-Qe6K3{d)cLbl{Xv9ECK> zKKFmV^sm|K@2}{I`eqg|)W=2r->3NdEB4_~l4^M}-%0;Jeq}9;ui#un+yDPu9>0x+ z5{m^r##%33Q&+o9zJFfNKuW)7Or2fIKT^3GVp``LD8Xr-Q(jCa5r1D4nN~r#*Hu`C cUpXHTy~Mt!=IWfaLxF#!kS8!J}dh={IJL6wyBG?bJc>bbkvLf=4$h@M7A8(G*tFrt-f z^RVOK95#3Z2=gqKr{bbY00Z*sTZ0xCC7hErB<9$E+nnqUr}hs&6>Gc9(^b(6c(R#6 z`wiI?{E+vUW?rsdB#E4BZt@|oT>V+7-tQ>HR+_X}8nm@$39=h-n6;PfzuRKBcjI?U z>5JqzT4MV2N+AZ(6_1G653&NE-$&#!kmecdvj|v}Xp>0jQus8Op_RJ)t$s``;&UpR zR9WaAy0N{g`&CTwA(r@0k&dI%O-~fQiTE`yzn{2<#y~~&zUME?n(8uRW8tBtMoS}|5b+9Vs4G=o;5zl>8pL%p;LF!NLr5E z`#o!OoPapj`m+8vtEr%qgsB zhY?{&vH8Dt-ZRp z2mF1NFV5S2)lXpVInQ=B0)8L9PZlD(fTtve(tQj45R|8?c(T5NuUXK%eT~t938N7t z9A7|fCbDYbg!nz8MadvJ$)-iWbshrBy;81=au2RDAuZGY6sZGc>b%QtcaYP3>@Wr(|HoOX?X!=?El2B?=QQ8Vx#z?C?TL$L!FER&G^=U(2 zJG&C`eSKD;*yiDSewzrncl5PP7cu4Z6OjF~F!R^<O?D-AB7%8)C+d- zz<4jecMQQ$up!tm?5hPyob*7}YUi@BE1QSa_Y;RhCR4OOz3=@dyNzVWa?M8^Z|GR* z-Mb`~^{qt@cqnhaey%?ol3cNcqt6k^mSv%S&GxGZXWl6cCjN-4Avpr#jf@O8Tod_N zz%Ib2or?E9aVpt-N|Z1>Z8kp4V^7k>fS|Vxqz&$s)J!VUMARX?bDcOo4-F4-w*H8j8t#q?g0_{s@X}>BiH;_j9%LUs>YR11Nq@o^GEY&qdz` zUQTW-eDf$@7Yc5Wl+Hq;_dKWN6t2pM?4pB+b8`ZW^j_j!2=?KYtGbB?5fQ`PzkbAa zIpYNLp+KaetZ4X_csuJxGiT3KxT6WDy(>wSQ2fuYix9VzT=~`zqFY}>+Z!z-mKqf4P`UHDcQ?!| zSr-Lmx_>md9ZYonArbLS1tOAv`+Lq3@^zS{aXOrNDW8E^Y;}je`Y^%Pd=>CF?H>|_ z1giXu_!?UDM+6X#wx;;+I{oKR!+8KW?SCEeUlX}`GZ#Shvig7M0nP{cFBAUP+rbJ7 zL~tlksKtM?sDHNM1@c|{x8?pnZxdZ7A>jpHyTASXZ%0soYBK)qzCFBsTLH@Sy^N}Yr z5uuSbDV6InEY~*w^744`XabnhKIoQm_iVGQYN2^GX1-<;B72oAfAwS08@Hq1YMW_I z+SKdZ<|P=tc=A)As&h{EY*b0V_E)yQbJL~wyqDQse{NZ}fU`Vl!t>Q5*5)g`Gl~!K zA$DN4KOrdbYNhY$PdCkOmzJw}KvvqIVsnz1p zfbwl8=*sU5BY(ateHyMU7Cmpb*HJbs?(t_uOQgj>M_TB#;&se3!~{Av@Q6dQgz=Oz z%T2XuTXaA|Nl{y9O?kIVAOw<$SLR=xKbo@pl*Ev_E->sG+kqR$hpo`Wow6d~V(*aItb zVe>#t(r&W+C7*a#E!mlrekaJPzCU3{Fk&YFoba?+h$n>ou(q$+of|3eZpx9vONHH$ z^HuIIGb+xs6eeq zGEM*D?bjQNjb<)8+&KXUs?I!bD6RuuWC=3(<|mLC=F9zz6e#~GvE2xZ-6l}kiem9zi-XZ3(ua%r zu=2PK-9=jPJ|z=?HxN+{g4}4@3n6RuapLD3hh491{l(N2W#TM%e2Vnpbju11RI^Nv zAXs82o?J!bmP`ASz{<@^Mnuli(6inL&U4TrFEJ_>Cac$E1Z%4xc{?|HlB#7C2F*3E zPv%MYI_N8iZ4IT79@Nl}J|ZRNd+vf?Wm~kwZLb3CM@ug{bkL_mO-rREkDZRO3uxT+ zNJjEa7J59TJ5p3Qw7t1ue zS^Cot-%aQ7RVG%}Y<-~%h| zVY!pDRX63@x6jT$a4ZZ;K<@Obg~Xs9;spkoBPens8^pGxsscf0_QKCk(Ub}ds*k>;nkGkHZAE~$BCsTr*45i33fut z^`Lb(1K)q45l(Jkl7N~)*Z;EO1R5?6NG`-{2`H5v1L> zogxFNCA5A2_8}=fBJyxxxBT|c(5>^UQL6Lc{*7iq?JH}#B;|Pq zJXTk7ek72d<&fqdPI;7?#OelxJoFDu$Uog=ZqV)7fCS)zr#L;^^g9#N`&;>1B0Gm& zG;@?1i3nJA{mwTX>Fv)gC;bA5$z;bYG4ia)%sUMtI)tUuI14ZumCy2;{@roq{J`=) z#H}nzs3_f`l-H?@k{LXK-kM0>ow+)$;)Tg$`*j!CC8*%Z0a#4Ru80qsh0G-I2PDf% z)<73+X#n|KXwH-dYy$vJw@6Dch3VG`(%ju`n-yn4x@+AO*B zmcyO(>fQh{ZBQh?Qp85UO+r+qkS=@~N8iAf{6pu$w(FFaou>P&zUs7X505LD0vyjz zNLru#u=DYosSh_iz}LTqaR6Xy98*<4(Eh)|mC<)#r?kjUqt{Kvxn;)q?f`+Za);zE z9p87+zusC+EF8l|SAsn5%cF_0W!MgUER2kkVpu-tXh`Sv^zRlh>K)j8N>Vf5r8y;~ z!nwJh{@$N_wDSROuL`qxqKO49=C)r%T(Z`?Z>Ka?zXQh_b>ySF@+m|zO`E0!A~bYL z581@!Yx(b8)r39lt)nj6B)tSWPAeR~V2PWewm&lQ^A^B-E9ziDa$S{|JEex}1VzkS zsFe(rc;}>SRAl~U-h^AW-28ET&{RDp5x;)VIcm5K95`a#y16qWaBcY#QnX>YB@iIx z5clx--egLDyly!#?<9-?NS8C$Sz>ZP@Z~*qf9g|r^QSG7C;3-Z5-DVrCdP97cb}cc zJ6%C8BiSy0tLcc{I^MDRhBtMp`^|W~rj+3=1eubrfQis=<_t;=2s+c_3~-OioQoM; zvbRe2E-BU7 zxqzwX2JsfI-NlKt0S<5q6x+v}K-#ZDR+#p<=0 zfs2pNds;P|X0FnE+@4~rE*&ez;uGsju%*TU8KR8Yt05Xh@Qp8nr=?ASvx&*258KPf z3OCwz(PM+V7_ma|wdc!6?Ph}!o8mJT>wsu>T38|8L4ZY*cErVdspS_S$Se6$v%kA( zQDU<=>kNmM?l9)PYNXDhwf5URnRi+TOJLy2Z(*wzvCZgc5dyjXhiYvUP+*38rUaROnojeB4Y)H%QIyU z4ErrT`?=i4*SbdpfL8uH4W*89p99W+>(-vVih}b4jXv^P@_ndPj+WbR>Ej<~X+kdv zrRsG`oCk>KS2cw7a5@-5_<#fa^7C(6;1ixp6T>nSVNw0OvJUDpPfAHrNHjR|j_IWZ z-sNRm)W3%`-a|5W=*Ic}#;QN@T#qhPO1jAT39TWCockGk|+c0Bpq;RQ4X<=k+7q$heBT67MimU z4Q*Ns^e@~t4lFgG)9i5n-Q8&&3N4j}z~gAAaxA%3c`w01E*MoOm5+&rWGt=2}R<;T&7*PQ;Q_gc^?sTrc&h)YV(l+SK<_z zg8HvF%RPqsTg|;J!A>}~0N3)zMqDN20vBhct3HQ9P0BhI33L`lHY_r?QwrxmphM>D8ddK2)}6S}s-F3;y%x8Eo8HZ8hENYB!rO(+2ML8>Oa z{7%*~9iYxc$pewV0;=IKa)YU=+L&8HI)I1X#e8JXZ`hp;H=ICV1^!B5C4qLEZ|r|+ zj2!0gt<~^k-WH+X7?G?Ms>`IsnmMdxbya7bQTyb_jEm_$yF6-^<$?F-k+CE~Rp>-f zb0VTwQdpruKVwkBQ&m9bi zAOn6CI5$be%zX&twzU1C&+yF@Eyky(WYYCX|2v~y<{!htz7tYKBZha^hT64J<61&S zMmd|UwsI_illr^u6e8ONkAX{7=N6ZALzx42lYElW{I$o@4MB)S4tP7I%OvSd+&KJu{IkT}`?5FEa}<<94p zHioo=UV*#k2{&Bef-sKzHStLUzcQSeU=NJ?`%LNtLxz9Tm9Rh>+IgZJA!dHO^81 zOvzQ6`Rha?`$t_X&zD!<_RL@J3FrWM<|dc8GT({xR7owk&sL1Nf9xuYJNfG}FEx#{ z&1OSq)))}9xTlt150it6RgD$zF{I+a%(D&U#28bSWomvbtj$PdXXIWLK5xE)b4lK9;r&TJ&1wbo^PQzX3mh`-u&5OgWilaD9?u0|n!TW9a)vCAHC| z98S6WHn_{a3LVQh@y<=aM^eqEMs!%YeS59ppbZeQ{u@`~cR?e$aNEoT1B;?rJr{=~Q=AF)7MU8;s?#NowhQ4021Z zn)~h&(WaNVzHgXX5HHFWD_u(1cnz3;5n}FJsl3il%OeqBcl;W-CD8IC` zb8V?hAIJF=2kI{s2jUhsnH%<1X9w?1DdpKqWSWx{_LoVB>5_^MeMl`u{@jv>O#q^F z>*$nx|Kvq15bpN3vm;N9bi>KLgA{#J6Fr8dMbd@hWOBU91E#gvlN1|n#j`uUM5C#y z`s2Y)+*B{^e~R*fBs8COTD$@#*fiV5P9;;5+0wbgzs~n$D&!=8W7>hUEMI!OyeQB)njHHnE{7gSf!^~$ zmZf{=@)7$glS=|6_0<-U3A+hlzZ7wMduwGqQHh@*pAOE?jsr}-ZMb~;!ml&!gAV|G z%pFr7@g=}QQR9NFd2A0&Xf;9#sbiUZEGN}(sj+1EcE$=gnR#RP$tSwsd^4J8hH#aC zk9{G8TyVb=L-V6$o3z!2>yKR2**H=2_t?LBK;G1%5^MbQ*wikg7?H>y2#^(ZGVeGS`+%CofCBZsFb%kE*yI}7mi+Q-y0XeXlXhDD-K29 z^w2HdvGpp|iMPwYFw81CA2NIC7_MCFvA(K4znQ_%Dx+t95XIvCinfXLnGu8ESiK&$ z*V!;qkOw*!&=~_o<`?svJNz%lA)M;%5?i z(|J)rSoMxZTor`RL$|?Y z25TF_-$Ye=p&d!vP>OzT^7WijW6YERbhoI#;Md8cG#Yn{e8BsBMSVW2gnZAEPGhO5 z8Eu0YOSdVb{`q`-wvQM_M3ccao~qE&A-TLLqxQI#fC~1eL(0y7X~ZeU3tA0)1~Ax{ zDjg)#wY<{&{s<1`g+^}bl`w@vf?nPufG^dD@=VXjix50?_9`p-kcY@%OT#-h*ki-{ zCLI@kMOiB`skFYe-laTc>0ycVN}35tcR2!TOOxEWwzsi3MBPys1t&bZ3tPPmPKxw7 zV$GiElqxryKWraYVc`OjZE(lE`x5om7xPg*WdS8$#(=5G3{QEq!!Hj4aKbLIr}}9N z;|C*AO!Da~iHXjTRP7>p=NT77xeO(98|anov-z1l9sH*Wrk(6Cw8Q7`0-`n=JX_nY z4s|p|6;d3?B{#JqMbeK5cmFHfV$qH2$R7J1GGiqy>e#9GSOuc550-Z}DQI<(CT+UWJt;8uTk}sG4wfH>C$< zUC?u*;Y)5~zB4gsj{-<83slqH^66uuQ6j_i_v9&hd>FgWTzmVS+FqS^;84+Q)z~Rv zU9&%``^CanJ>ksX^-F2)Q)hl>FkR4>)(Wi^$ILsAZxmaEiy)v)u#|VAvG36Vuss*5#0Kaon;lryZnpl7 z3B3b(5XX3>`}n3yNt_Rge79cYtD^n)sNaaKSXnz-~Zii`ySPyEY@C&eUN<1=KlU7qH zLb71gqG^R}Kw9x~K?xk?i-V7MI_9t$d>l00;fPKF6L8mZ+ki%p5Z34Ml+`kTOFp(Q ze^`q#nOV-+DT=qO^IGv){Y|U%il?oq!Oas+1`D;r#$ z)bquFl11u{#*MsQSNKoGO?FrcSI7La{+0jy+KqliPBz33Cux+%9VFZqJ%XMY-g9l+ z+(7_jA72BT>1wrfR(u{u z(l5eZu$N=*S2FZS!?bE6GqJ}K;3%~1e|89YBhKNVS(jYKm5|X^t%uS}7X~;izW?(q zP`)CIW!g&4T;GfjRaidxF2%I3A-Qi{51TYS9Vab~Z3!AnOwqvdLlmvC98)e5w0zrZ zcX?=&=&rJk*4LY)WJ>B`>sbV_3Sc9q61NqA! z+pMxMt_FN~b4gIq*_=^w-T7ju+U0JS&(NH%!Mk(stb^y*^z-_1_((zkBpk>SsPR+D zhc}D&OKEbOo>ss=zoGj;uer~T{M;Y;%}UET!S=w#3?7G3Rfx{^yDQgZ+09;lzqG95 zsAw{31Wk9|Gx^K}1q6+&X9d{`m>DH9`KE|!To}1IeghA$B~WGA+?Ygpbov;Ix}O0;f;ItUHjEAvR0(*G2J*QdiLo4I)T z243Y{r?M83<3VbfOhnPT2!NDLs4n?G@O@~$8x_B|Y@FoZH?fJx>Zq`#FxqO`>#2<~4sq*@Q%=&XPP{e7}1G&Yu(uY8k zhY~v%)OHw^IIqc|H+jhgLn5%^n|e6KUdyq>q!`r^g46M{=8j6vAHRP!-ze+2;$1(ejD3KBOYlZlPPA{hFc#w`Je6%;{X;gWZ!J*?&_TgSW zI=6E`vx(D1dKzuzQYL3w;rYpFJ3iiaKb3MdHV8@~ufxh84&3MBpyvD0DS>Gut;lIC z`K?i=ZNkg?N=_G3B(`O?os)-7EWd44-4la{!dQT>tHf8UO^+o+>G@t7f!pJC?0U@c zFk?oEL3tE>DBoP7M^pTVgwb`-udAME&y{&6LM%kgd2Wr)^a z808S|YzSU{$Y$tuG!Q@fiqnP60=iEQuKJWPu2WWt=zj}-24H^;NyQiS#DO1icVmpw zxdpaFTX-f?PhXI;VIhgfp928r-(M}^qNrzL4h)WH6P#bCCiCoE{Dp?(kfH^>vJXF8 z$d*ogHbNwT^J@j|RcUqY<7TuUn4Hq;gS+>1mRwTC3-){lG$&qezBV~lq;FFlKGnpM z3mH$w@wJH+KttWV+_rD=7y_3KWGjYK@Rgk>bA6I6h}V^GSd_70j)tf+Th58mO&5(Q zM^5SWe!JfaD5{cLBQm-Q)S=>6iOB>@kpa=SSxVaX zcW$6ej6?X9i(lKlduS&d7jLM^;ljwm-&uGw!CnAfq8xSc7G6TBcR_y4WPiF`5X^NY z;F#MOXETG#_C;7{CS{V|Y*mXCs*u*#D!kvmneNCg(Em7-Qr0uj?ocU*8+5yLs7R%K z?S#9^hHBRF&X4MCUOAHvKX^%CuSS{e6nZ(GNsfx(viP>POh(Re4Mr>#KccNI3K@xT zvn%?dF(q6eDS@5y1TFrS8>9YjZ@j7S&uB?*_+mKj*J!!GFOrA$X2kb-g&ULNa1BJs zw2L>dkKY8yU*ym;ulvO-{NBcjA139~ysgXE)GR22;#HpuOsh(Hf}Gzc-_jWP{B!0rQ*FSL@&DCJ(Xl69RKX4C6q`_%Jlm6=^9Ibw)q!uR3>3F#Y9d4*^JSIRO4- zs^yVHwODio{Nb47lz_6=w#8HR<#j4afC_p*te||xQ?Sh6<@b}E7P$Jb-oa^NjnK;J zs6*>!hl_NeBxuda*3qsnQk0J-b;K`6e!0PM2wj9IkhkBmd++6NUlm-R*EMD};8&#L z6f$I!W2~touUC*p4jPVNX8@VL;p0Vn-(30soYYmjcF4GHa#8piC4s4cfeZIlL_DqbB1KbwVn3yXx7c%oJ zx$qR|u01Taydc>4;)@GHjmQ?)qfS=-*DDDH-d&_MC$aUao?>?q6=}GEPPK{T3ljQ% z7f^-WOr2`67XkfMHTmPl&_Z=soiSi+Ur`p5L@RHS)y$6NWoz)O#Mj$TC+N!3T|y%r zpUxuuHkC8(@@6?lir@Q_ z<3Lf7`37|&Gl=rLvT6V8Mlk`}YaVPDI6pO4X5^Iu9-pD+j=pEO#Jt<^=Wb#uW$Tz~ zyU6wnOP=dF;38`iTO?iaYKhd)ku`=DdTT%JRwe%;pC*7Pw6~2H z41?=s4B0+&Sd8=-kw-83;dYD^d)3c`j~R@>$)>Mv`S?G6t#N$V9V@c}ZfJe%<OCTos4s%yCyy;3Y3D&KBfNS~cubl{tdovIRZoNZqiS^}7L z8GY|lKV7@5m4w*qCeI+Y1)tug$$n}I+QN}5!97yncx5E3d&NEYAq ziHXgN32}Bl%rQR?G5`59|i-HiSTP^sMxC-K!uw7OREul7|k2uYnwY$xl~1|eS5tE zh(B)(Cp9M2hNBS5l;Tm!BG&6_bcRA&B0;f@=k^i#u~to*;|#CBQr$g50DhKpy<0^a z$2IK0kJYiS59kGf;t&~+s+f|9ql_Br& zLBnRy@vT!I{CVBY;?)2a8q?er%}@IeGxNLNqpVFrjU<2s@o6v+BTkS{%Pe`AxfMOdC_oZ}#K1JDTS0 zL#bAHAV!J*jI{SSxNU^v8t8~^~iC(!mppQgWybLc!|QFnYnIsKjg8u+!tsred;uO1--Hrgu58iF|Kg zrxJFyagXjhmE_`@2*erk6_Z`x4Xk_S;K7R zHiMQAF9Od#m7{iktmmA5E#Ol-=4ZS?UG!*U)=bb*K2c_brkm%~AAXV8|G2+!CWatj z>`+yH4nGeo)WA7D(;2xMvsP02nlHC2j2xdS3MtzR*C$J{@`V%z%uQZgI6H#DCIUf- z#rcmOn-{)Xny`D79r*Ls{?QdV6?H~P-Um;v)!#`6@PN!w!TgQ|aM34$OsUh-SVJ8n zALyJl>oZLK#KL!57?;Wx+}K?IG#Q>^TFS5e)c%FnBTTL_N<(wJ?ZmKXvv_CFT?34> z)R8_MZ1&h4@p72&cgYJ{mvJg2czx!mz`^c<^1bII2UC!uX(GPdotd2yi4Mnz9f`~ipAwqRPNO=^2@Py^s< zm>#ZacBDzZP#v2osS}nlzaTIfFLSwQdg@VeMF0`!t`48hFmEc|=YIwP7Shj=28nS+%|U$qYJoQbc1-4@5Y`ul7A`ov$MHOwc!Vj&03ZU)C(Bid->VEG)=C|~5%tdOKCKY_@7zl>3rZmDfXDW$LZ z2F}6o=7i%f%IHVm(JI72GZk$5?ZJ-!m=Aoqua)W5GJiV0(^Y`0oW5y4UxoOB{Wjj* z9oJYv8Z;w&`mG3~rZw%oDEO-BYwOl`jmaj?vi%ALnosxd;1r@=9NddO)sEVL9G0fF zFpiAM8E_2DJcE|O-XD*EPcIh%# zt^!De<>dEc=dNF(17n4lWyabPap7W-qy4(}_rt0NwtG^*LK zpp^6hnu@B2=t7?CfLh$tu(aCcV7-qSq2#DcCxYBCNReF4#cgAxWGOLTzb-O9zWzSY z(rP*}FOS50CIR-u zsbE=7(T$*P#lU^w^t{|IU|>;|{&VYCz{hZ`cRwVhyKptdKr$UR_}3 zo32ZT@WY{XAYDFF-IBR&kCv*dzsiGy4r(!yKh_wKuFI1?&xV(rVrBDVlH@v}2h~DM zM;R7A6(IV)6-+Sf6v#p~w*VGBD!Wvc*3xHp~!ejDvgYnW2cWq9^bmzge! ztBOIKx)Qms8Z>JspfbK;%NL(my+T!m@eRB$=Y(WH>84Rmq#n(+h#vg88U!r~@E%+I zxXKM($Z+YHB8#RkeRlMx`*oOZ4$;?Jd_Z{hzWap+PY=Z}tVf_g(*rWI_Q21IRO8~N z_6yjz^EE8C_|^+{l5=kR>TTV|Ip4F(c(#tH3TuI&UKLG(CF(oEHWQGelE9-%uA@u2 zZ>T~kB@KPKvQS3Tf%48dhyIsoy`=>K>$+K*L!$i-9QH)3?{bF?iXLBBK;$@^M+GpP z>h8vS-nB9DG)Py!BB}jj+x2=1TLLoR*ZU7Mw1`}nf#fbiHIttt>C4Y^0&b?+W&6e} z7Oc9WI(=jj?G8s~DfGVC?s48$L2^aV9i}6St4XUdZH}R}zY2(7Gu+akc2E0!gk;25 z2E64%1vy>L3%p5h-b6{`4^r&C!|2Jh2<6Z?!5z;49oTCs#7sO!ls=ez?-p6>CD!K+ zqR-iX2csi*JXwtP{xsF!?K7Jfo54CY&&(sJk|0Q1G*|17-}Q;RYZ^*ai#N=<_)rkz z4l3q`)R0o9ucdPhN)B;%^vq?N-s1wqlM_lr!aGhE>bS194ARD@n!LK3F9T~uc1P;M zf_WYz{^EB4Go(;bCJ`*YHtnynGUXhV)eS_88tw?gJOS)gH1&u>U!-hycIA!>o9UkU ze9oy^^mpgZ4a9Ed;c%2r{|d-}bW$On#&D6RCmhjbznRM*Zs}cjBK4nE=H2j;Q8f?(vns5kG#-oFN)V`y^ha!pcw!Z)JonspCUy)XF7I%}21oXcwiv zA=1jIKl^Q#fDZa^6wmU zIQ>+e-mRP8Qzb8Cunc%oo0tePLQ6Rvr*JtGQlV{`eb%1zDk~N~mHArE0Qi#^##Y}K zrn*O01!g)XH@GoP!c!}^abv8>OC-Yr$Tr z245E`5Ipmw`zyZ!|Jvx(IEP|!W?}xg83}vy5`+vjDQ8Weq<{WdJhBs=#}$E?A4*v% z5{$|#@2P(pS?aRe5^TU=!V`gu~=zjWNL!?8nj^do@J!g8`QiT%h$S7upP zWnS+YljSHaZ0Orfu1w;4*)E>)1XBta4q>wVF6p24M9)5zo!(Si!>LvBuA{yE@e zU79y2b2#X1wiSh%6Qe!eJ8W(nm+p^j6x8>(xD0BLHKQ|0Sn@!_VEJP&4-2Q+R3GX^ z5!f`Qo}@p;y}YUA*FM{yD>fG=NDZ4(!l&QydSp~3RcO1#NxWjnH}}FMWzmMGk5|P# zGwHDB&%=)^PgnVy7Ij*-Lr)uVnY75#U_d-Kmu)f}!M}ca`h&;AYgMS?l|jUeZjup3 zkjuSTeY&lm?+lN4Fm9OmVh|++{B}9k%Nms=UHTI8h@n!D4}RnjpkD~^u2x% zlpU?pX7<*oJP^{?`v$&N&qwQdWwMsAUz9&4HNYA~KS#D4moYWJmEJkS~6wk%<{wx}pG8wESMo2&x zrujIY%4Ss-0Eo)IdZ0CCezws50Tp_gN?i(V8!}h?r-_sWj?p9`60}7%5a@N+C%kOB zcktSDDV?xmR9F!AyeEl6Mt{8CwYhu1r`96M_o)8&HU?-+{?YB&-i1`={n`+X59`l`ve0~kGH4wccQz1e>(j(dan-*$|z`K#!Oym}LVRGbP zqgxQ}`=b~%+g&a_+(>sDqW?>UUU6klz_H@sZ6r`7IVVkR@bWq6C`L?M9%hI|u@S(L zQ%mUiC-DaWE(5ATy#qPAO6)Zu0lAW2NlZ}F<81srVH_fK`Y@AeF{pz!t8nET6lIH_L(((>u zs!Uo2Qj(jf5BCyIajV@$P6NCY&=Ke{AxIQ4QMz10m6wMRDi~BeL4cclTk*C@sgW*f z8N_)~3P?%V>4Y+};fHO8kg3e`$arueA-WgXo2~ShCX^CW0lEaUA#lO5Nn@B56I3m^ zr_=!5)$UYSdr6+WdO!_G-^Tf`v4C`&%CJ&XrS~|P{O#|4uQ^*T^9^&NXZZt zVX_A^yBXmcAs@#ZKM8$GmFSTjnY?&x$)^LapOj)^>eJ15#>HA%Fw+2-5r`G_Hkq_l zLv@67_Chb+uL5927zs7i=PDl#v|uayPI=p{xF`&>@P+HM zL%cVEdKqZ1UTwhIjLF{|g4`n1h zO|Qy&-im+sMfzq&zDp7J;J*=Z@sUM{P=a}2x#4!Z>&y4UW|h<8S0!~VqBG9U{7k_zcPhlF%{tl6+ZCez~uL>Q-5nV zoL&pp(R3Zs882`3)0}C~tk(&nO~@nUfvOl%-*x`zOd_ol*HTw~TpASgX1aV+%~7Av z<@P^kpAt4GcM}K~uFRxqji`$DdjHcC`2+^Z_ z+naxL5|mDVOu6zG149o$EM5(kg`KZn=97@~=a3XvfB21|o0=eE_D^jP1Mmi2|jU#oLbV0YEONS#Mg z#GPRs*`bAOB#SCPlZ>srI0kz+2&m-VF58nX7LSYlU9cJFQ$D7EX*Lq#o1T)yg1|QE zAa$01kU(<*B&IdTQ!|!PD=CLpd)mIOOLLCKLiPj_ZmV}UQ1ThJHNFpNG_6^#PlQ(_ zux5WhA_<`I=dDxczuubpr3?X(UEQwRLb!KRNfa}O6A_qN>jX@4CxG=c$v^m?3Tijs z*id;_yu2hhF|`pSF55p7zxLaztjA#)S=D)7%I{=Kr)eGa|NpEK~t!T zgSO*c|Gi`V)M42f^WAT&F&=*rP;@v`?EaDvZx}(Spz6DP*GV};`gt-04?D=HnAX6i z);F3~sikegmkGU&T3jcEbFGIvR$Yz`>!y20dk_{TPF%r#Ij$w5!o!p032K^**8-FM zy!jV7jpp{*@735v3m^2(Qs6qvX*Qp>eBfBWL(o%)Q&t*X;4&^U?bDYLqMvG6sh%@78$P2G%&d>1MEctq!Ah%~B zilJmKb;EvJ7R5ICfSrRucHe&~$-h%>6|mQz!}%r-2iAw~{T-P&gq1ybygouSs_<{Q z&=>+rVJi1Jo1kU?KEgkkRY$i8tV!8K6HdB+P_O=LL*5)hh<3nuc>QmBP{@rS@WFTT z2H8vghsEfnCy%7^{P%MpJe5@@Kx{M+dX4{^P5V#o*FSjPC?3LE1HQ`;|J^(ZT1|kO z|3j<)X2bO42y4ZN{krwH^Q5^$xSuPfset~DA02W)I2l4YPjvs{m)}z)qW{;o0RHQ~ z5nA|t_>x)zWdZzdQyhk+_(doD-M~cG&ly7$omOfRH_b(c^T<2T= zj!p4+UD2`s{jor_=pChhT9(h8e-=DOqK^r21T3=@Ad7llmDL`)YDhxy)w0jy*5$d7 z6D1NqHUi7D>P8>^0D&z<7uZL%@}(I=pU1;#J&Y29L;!^SD+Bx_L5LK3s3YLCoEb38 z?c(Z`8%}rjb?`zC8-=~1`cKgw0AOE-OaBI7|0x^>K0yO+CDJD>^M7@6_aq?k>kbQy zq5r3_fh7lI?Ny2IZ~l{uY(t)MGS8I+-+#CUi2^r}lLN^pME)bXNO;1cn+s$I|2E10 z@kGL_fTRjeL@e@uOxl1k#;X2njK5v#$us_IjDI;SU<_tsCaQFxOi@HQYPl`Za-M}a zEONRY>pyA1)IGuf*LbX*3=P{^)|Eq?4uA}dTgffT#3PC)uv6dK4~0^~YcxiVw=d0M zm}j&a4$D~9@rG~a1GCnBwM*^;#Z_ZcCwq&|^H$bDc*fr%=gYx#K659Y>Hmsp+ariL zT9zzVeN-7$cOhI0w*DzrHM483ay*+@E?Y?^Vm)VrY>|yOyM^l(W_%M@X38M~33)ox zrY)1w2I(eKq1oT|VoOP>aX{jE{XHLF5^a>!ZzXRB#_M^zx!mmJ;Xc}T&Zr=|OPldp zCOW9*H(&_bPTd~T{vglgLiPkC0S*Ikbxmo@)pVTV<{d@^7LzSXb}r{Em}+}TL)o!0 zxGn^J6Lj7C4w}w}tp^xt9aF3u%y!Y5ua-RQG*T=p`jk4(VKK|zw^nx#?juivC03;= zR6cX@?}>qmNBE}}${=^PL+eePL*>ZfLQy3z*Uqou=F^bM$XW*s{*{F;T)wLgjKyU4 zu}LIEsf&i%Suo1h7rt7mK ziSr4oKk5^_b~fIEEyRL(P2&Bn_lnaRR}f&tUK90!IJ+U_G04c|C<@C|-)*(Yp8?7` z?BCH4nq_}-(aQqD4S)(zXG2x(fTmimZoFRwp=^v)&Kmpa{j5obXtrd6j&suOc6Mae zPu|)oO^f>0hl^5c63w@{C-~GbIf09b8ckVrtP;^?DmWNR@`?JmVR$A>5&LB#v>LJy zBhSl*N0`qaXMwG81*j=FTrD3h_@VJ$EjS3WYa#!7_v4ms2x=x~96RYiVjsz|8h|Ng zvO!5F`}=78x8tRDy62uH>X}g5X

    OhYx?2Q5;*x&S63z9svn0L-xAtKsG~2 ztw@~EEJl-$_JR9HloWOf9h=chh;E=PFo8 znaXT8Kdtdt1g5GxTMxSLO#)lPaCFtZRSP=o5PiRXy^DugfnV0<93Iv#d&~Jq*t{iK zcBgBVU_d1K5h;8nHv;mogrV~d*7p!Qpa+_@ERZYbBUY`9gmW#aSHGD66=k0QNjLsR zn-pLc{&gRLZ)RMUM@MJcY(RneB@egD!&Y^f4yDbUtVMIHc$M~I;@!mmeD}?^4`Kt@ z%)BsGd_3TCdAYv48(;E%E9DmCwe%Z?EMdp%d40^i_;`Qhvj_%e+d!4i|D6&WqT8^* z++46!vY5-8y`KVp#EY?YoxDLG2Riau;?6q#?*41 z)wmh2mDK@YuKo8O9lgx=V0uRK2jN+@q6(9AE~m>|AW%tG>3#MBiN>xkQ*E_~ zSY5p&c5S=g(|fe%RrRm}0&T<`?_Nn`2?4vQLPVnWIb(5EQU_t zixfZ@O5^I`_DI}pyc%f`R6eDv(8ynkHCi&zUt7eEbuwtQP+SEIq1HRF%n05TIk2fu z8&4*5nbTx$34Z`miTFKVT&UjtuA23fhDotV?zyhfb5qTgOTdk7G3PZ|D`%l!cYCbL zyKw3U>TVFGbzF3+6i$U6P9lH%{Bi|uGM!q7E~PQyr0S>3b0smjsma5(%27Rx*rgKh zv9CDefs)l{b(iW^Xmfrh*&zHJ%U6q`8vE>zsZcHYL-S3+{gUj%b&nKem`6Mp62>By zKK=;A3dI)lc|;}lc~fxlU(5H{{t zNPFJzSB{j-^i;4O)~$EL?kbB(^BYD$cccCHv0%Abs-{+Xt=m7U>7J*OKGM6FF0HpZ zggsHf{>k!g-L6>8lX_;sgZY<@74z1ODo+X^6W4c=;4Rgy6q}YoSE#EjiWUnbyR~|x zMoaxdQhlvcMNLQL$_q%29R-ad57PAIpErSCQ}I5qC^**3gIv2}3sg>i5n-u_jjffq z90I|rl)mM9E$N5_a8lJ_9$0FB;Uy8qfFH+aa5iHWKSzv^Ad4vfa!*$c9}0c*`4~t` zLwoVz4YFhMS9ba|tBwdR-5k~d`ndi-nHS^bdnMUE6M5;HLY^o`oY1vY`~7kYE*Y`B z=w?pLvMz=7h9w*|hW(wFgto$jrLS*#PHL2vd!i<06+Ns5DMQjX(u|#TMn9**&BDcl zvoh^e=Q*>ur|eqM{(`_LK9of!!yfP!TsA zj=}isJy#irq@{%TgSCEHkB&XR^+|t zq_=t$t*mhl{&Fcdu8t$n-E=fao#UcSEMuElyRg8@xd7}4 z2{%>K0duFZg=&CACjsA3p`hLTnIxv%>jQ}Q9A%f`)c(j`R?Ger9>`A95cW$Tdy1zr zu|(qS;rMRt6K?CG4=EhBshKra=`oWU&*}%W!LzUEcj48EcEx8v2{Ffny@P|0rk1SP zYzcyS`nW8x+`Me+!hs+55x5N7Qp<{)eB?U3;L6z%2YD&8NDmiWhI;X5)f_1hm@3?A z^gPImz4ZZN3b-=2ms4yemw&&kgVwkiH}9iapEdk_drcFYOT@r@zqwjW zD=Tz=J}%+ABjkslCN1?FIhM6AIBrb_G|MyOt{F~{GOX*g_aT_@UGSs;zDBJXV>!av z*@#f-@93Dg{7A5tMn$fqdeqJF4K>JVLJ?c5iMB;l20)qc0&Wccz}6pE>f{HSNYX;>GtcOo4|%p4EqRIM{Mnz!#=gm5YorHLn#=s$G&2D zKIQ15QdMD-6dtH0bXlHOHk4j|XhS|~WW+a;gb3J)IpG~BQTt|_C}*e4Als)JU!po9 z-D_d~A$ZKuk<)Qq7ckS%#8tTMyyw|a;T&@)Lyh|8YA~qe_Yg@G_Ac-S2=UNn#3@iM zN@{4g3i5YZL~B`~oSWC{kstvzEB6#@o${S+?2ES(V$H`;G&)Yv#$6)79Wx55;Qqjy z4yjEas|qv0>1&uhoy@;0R4uRO%1Y7$mWp277o7vQKro&v zUhU4EVrysajJOxy#fc|;YHDEs{smmE0!QmGYx_9$%e~~YSGGlIb4?_|1+apDhxCAA z6$PI%CR)LK3++h|sSj#jsaBXkj5d)~VkvQ%W)0YJT zdF7=v6_cgft4GBMxLWK3A$i*Y0$06*f`*eJZl|TUlk&995piYg> z9^I;oH{ei@Km0h&&B6U-PL@rq*5>%9Yz&)tdL!=BDYE~zYgGC@3r zUT(D1IBS2*SPYj>zq>a+=CD)zUsB*(Y|=%MpIouIoS$+}=Pa^)>ZVp}P=Av7Jgv`9 zT!(>9CAp^sR)2c+?AvBr4Fxt+pn)SLlf-9sQ}fC4Jq6AYSY+m)yzfQQ9eA9{8aPs; zR$)ddN>(@Y(|;+g^d}Dqx{qcQ$&MTUz+NmFznvbaWlh&$x5$We8&IQ~hxOTK@^d}@ zdcZEIsc{z7ru3YhF)u7Gh;@5-an`Dp1n4-fAA1}%*S2EW@KJ!K_)JtnR&j&9_Z9Q%vZ_WF3WkHjnD z6Ge>JFZMAFY2{Ig@-4bTVE0G1{IQYfO{m^omVDxI09O5~xb9W(JK76nmZ=z{^)sMs zJ#vt)E!&ML!>2ms%!t{H1;c&oAat#9&gRAq}M^Q^EM{z=*Eia|KbVsuX=V-SrC;K~?z5#;iVkvudO(1*%?_Mh|!z#sG<8w-V z61PHf#Bhl90@|MTZB*AmK+|naoaLI3pCSRJBD&3o%O4@XP>Bn zH5Y9qMbl;u42EfA=E=H6O}$`h(8)WW_}AMR;jn?zW@Es{6>L)IacN^TvN$lI%Q?ow zW>Z@6Giw%h*BcG5@nw+D#&9I%R9b@U9?vo!i<09R6C5Q`*6FiKGp`4FGc={Z8o3_B ztly$9EE%E#^KYCg{|p?&OW-SeKQ48;)yAVax)579 zjP;R)ieS}g%a%{`8IxdAxglsOjE+n`1u3_3XU^{Z*@cpe(?{?MF9A{55lbukiT7N@ zn8%-wFLQK4Ms~bvf%SDgQ;JiHc^Ph>pl$!1tt*|)qR;fAi(9`tLiXTy!_nbZ$UN?D5dD?Sr7jxaZn^6_LfadLkf!9Vj`L z=xox^Jp-gVRU5c5o@(-J#3HLz>*~$dmeuPLmMRb}!P)wwmOax7S8l|9%AcGCs)Scr zHFdK7SHbyO)k(5R8<)#IK6%HS7SzCg9nPS1HEEf*U?&Z_pkO7(M#eBci`Foc8h@v$ z+ilN0V1-mAu}GaNds^V>voPN+tZQMr*mkbT=dA;8g*eOFGKetz3;h$1#GaavtUUp4IU}0 zyHzKR95zd_LxT5a8xKK=aZdS-37JU!7(c+WPP{T1Unne@KMyQeFY+(@7#l|B78(AP z?<_x=e-Sq0nbHk%(YhF!bws_2$+#oGxu!!0lv)T>92$hM2;yF0fUg>TRm`E9{4_zF zDh0;$Tg?1xMK6ACtVdR_TuDf7+2BDsh3Kqq6I$;m1g*wL7XYM)2{JkOO=d7Ysl-&s zG8%Cjdt=Kty8|=H2|fF}LJ`y7{u6~do^fw!^p0JjX)R>GYIj zi=sf65v!+0&sOkgp#LphX_Nh;IDoD^hv|851LzvV4MO+1wG?tk96B)Zo6toMP8C9| zrvvRw4iZiVD>O`+Lxq-iF6-rkYx*53)nCc09Hdxqm>iC!9z_O9jTU^(MyE{F_q(o= z<%ztiHmqD-GrT|>l;RUCsm^X`2M{tQQWWVjy-u3!WMsquH7KnYRH@{0!wJBD%u$c0 zGJo`ISn<(n&})LN^R&1dFn zmNqqUW`R=%&t3$RbiLX5H*NLyE?g@UK>Fzw2HRCkII{+>)M^3+z6w{*ncR{7IAc~6 zYYM`~AdQ#GI{3;1qOYnABoI2U;?%3xewuc_Wz1ryv}hCcesa6=^-J89#ijxFVH4yt z>iwa;36@$B%jrFx*sjEG86UBs99np)(SJe^rdsT37uFV(!Qo-MX)dW8u0}WG_7k6( zZIO!rs_ZZ;lySEYmlIb-Wk;?ixR8<4O+lHUBbT|c-T&pZOrR`UNds{WjZ>!1K?C#K z15abcqK@;t1D6Ib;#|r826s}5$^>_>Pc*RRfU6yCVE2p-h1=qC!UFo_iZ;nE-;JOW z-exu!UI|P0m?IBou#E2RZAq1Fbh!Zw$&1O4QO#uaS^Fa4{ISou=xk0|zt&zp!$ z{2CCSE}J}0g(FFMHPND-4J2xPOvaHE7?50yx%&weqO)d-lLC3J>=kj8ZsHFy;hhRB zd+XJKzZZa_Xs@Jh4fSe}ZKQ5vyBcG98^FpO$%p)_RLQ1U{OznARoP)if(wiRo5JB2 z{G_$G_+B+#FU?;t74lDdn%b6+k6=VYp9IEzFEP~(Uz-T}r4ES_!}nq71o_ObUkuD; zP^TmNUcPfZYQ9=~IhXvbe!-@TYhs2bJKVs6cEa^bi!=l%W0Vvw*#`8}E*Kx4qJkjN z9OvA0nG-#9-r|tzwVp_hnZTyS`|_J4CBg?_2jZw1jckJYm+z*nVW*?{cPv5$QJbao z7AlO*+s#&Q>$i_q{4pY4#aIqSN#L@~TUNsLEgOFbTfs1O67{1IeMwOyXbPh0tAvt9 zVAa$^QsDSallKpRdbf+~%y~howgV5o}iI_`U^1A zNQ@ZtsdGvl{?$`{*>&ACa7W;Zd#}C!RdJR0I~K%-DB+o5Gi7+}g&?GwWfvYXC?)SI z=($Wf@3hJmeG3t+q@U_@ks$utnM?rvYjh0h#px<{CVZn_x?Q#4omVsUzehVqi+ zj}UUel)X9I0BI5SrcrGzOTX##BpnJwpfaY2Z&9$cr>ST!Q)t3yxqONZGD>tKK3T-h z1c{B5gGF)^0R_cq22Ml6{pI{3fQh%n!rm)FT*(7ZC{rxgHegP9Y0jq-GtO@Tk zM-I_{_s85%SNWt@iS6ijX=~}y6NB;_m%JhU6)Y|nP{;m0?>H`N7??li3aHRdYj4V5 z1w^OLz11CPTt-)Q8y_3CP4l+VLq8u@3W7x$bx=3ujc?`yZm&`HcY-`X^gc^558V0q zlE-B`(@OpYMu2eb8)Ti&eHfX!sIa6Lk3d#x_?4r>RK)@W;my7t*7RAksWj^h*s88r zqHTNrw4RT^6wX#sE9^IK{4l;#!@Ow47)_hom&6E}o=qZYwbHf{&B2>H^H$n}3>Dtuya?DpEt z&)X8I0D21bKrG`kFX~eZ5i73P02&pxPb+q6bNjBAlY+?;Xj7CD$;&nX6ycf&1PNfK z2D*DR_-ailmW1z6{o3<`T;|&gbhDt?(xD27(;n$1E;r3;8W3#UlFsBFy#o!Cv^cmq zBtDNH4&FLr(@REs%GC|NZFnZ!Ff~!ZM^Bg>Z$5#NC_c#_W?%jbuO*3`yHud(v9<#2 z95HUnsCIa~zY-7C>398zHaE!9L#X)Yf7xl^3sySQdJ?2!7~I0UD2M^Caqy8N@I;%o z;J%zO+!NGgWP;iP)wR#Y*3hy#;2@tLEUPC~Yf9Fx6+vF0`u2P>pbex!D01HqR8@k4 z#(5r3BUb56BoxnMzhdPA1%}JrWyKo6@A9p$R}paxGd3MVucM0uFMLZVG;>PhO{Ijn ztwhF8n#t3grQ0>78C$NuxByI;GE#nRh`Y*4n7?y*zpJMsuQKtJlJay&_NWE0B%057 z_lhv7fHsP}OEnHL!21EM2@OKQda$uue~b`BPzYW^_|yg8#z)YyTwNHGj+BeQ@u~=< zwZGS~@7une@fXH@(+ghaik876bFBy3Ryy{ZRz9*?G+!eGZ?>-|Lg-4~H%{#aa*B-} zG`#;Do(2?GlzaS}h2sZj_;u3FQ=K=|NIJBE^JP zZfAuy00%b+x>sufJCP=})HZ-3{S`~yA3e)SBy?UWpd&4Qa%PJhM*cJILWJsE(@Xbt z4f|+pra?!LX!d*qqxeN@lure377abXH`b%6(Vo{z7~pr812u3xBwnLi&l&&qIcex< zS?2c0?E&Dy1S-EI=>`E_8~W1BY#v>;`rUilyvO5Up*m7}QwxC+y}fe}x)CY>UQzSd ztCyU<<@EOC-Q3pB?rLdY83~?e9RR1_(VW3oJu3bVFNMg7NHTGQoGF!VUGN9vwCY_| z^e67$ZTD7etPXgkd_rkzR_}|w?}2kB&cBNs2v+wU0v)&QWcouWr(-D)bS$SYd}Hmh zwoT&ucx~Xjz-oN%w38QE6^KZc?z*nDh`tMI>h7bSWVD0wT|ZZ z8QuI8Y2orv1n{}TS~O4GnYI6G&Tlxbr$@xrkguonfMU5|b--(*pis3g+?gHjXE=Vl zt~fvbLasA)f1t@rU1O)v?e7ML5@xIbS-wNh6yDkiCFyzqObzq}?{r`Y?XxRg0noK` z2AG*n_9h-(ll8fsUnVX5$icp0v6=31_+9n%>}ufT9_V`RV`LZON8rOxy=gJ3k;u~B zw(x3!?txgpF3so$*F`~LNyyq5a#QXJAgoFEOlryd_P}(0S1266#$QF1m*c+wcB6hh zf-ei{ub@|WYwRkbbguCfY2HUWxhca7D`_h=Kpfe|=3p%tRi>Fy=_~ZlJO#)_HQu;j zNvbq$+;H^9oPN)b?WuebcvTY&Gq-TE@3$b2vJJSSF2U zCjB&dVGD2e0fyYY$pfhW7ye{^TI8I!#JqrQqJH+)?-CP}_MXbR8C1e2YyP;RDs9|F zxso^+)XM*gl3nhLF)G=((}zX5wp{ej@Erc}Wk5k*-8I@}7Lss{$ElN-d0niOp9tQ+ z_RbitD%MC;jQ}{@!D}lchKL^8(hwE%6FW^$UX%XW5Eb`)yJY{{e(v=!Fc*4PWg7yT zvZW>%5=`X$t)mp^aC@yC;HY}NQ(X_BNdoiC6hx}H1l|>o>Nqj%wTV$0>TVJ8bEro_ z$S=dFFPOR|HdQCV#2(QGX{V z;bEqKXZJE#_~$s{3k?KQL#{UcfWS0>f^XdmiA4Fn{i*HlYxhOd>t9#(OZ(rSqjQr) zs9DXh_h~KXEo$wh7Mts^Y|7qw-Wyjw$h=i5|F|wVioMuvS|w}NjjdwV0zi)2U!xh+ z1v+`xryD6yZ1`Z72Ax;57(n*S!NTLwz=K_zP~(Ac&(W~Q9wHZQ!l^I^RuL6iAj5m9 zcnIy)W}WS>Cto-lYd?rYZFcl%7(FCTS`K#OIG#rSoJh64Pxcuzdk+@k1qFEL|pP+vp^Tk`R~OJUHX|3J{+rXDa)2E=+M0{3JQrdXfp!lfZ1qL(b2zJNjC<=iO8J}7`6U?i9 zqyxc`wTtlVYJrKX$t!Z%iQs6yH(n+adGa@|vW>wGX=Fd&9w``^HH2s+^EzNhZjdlX z1;Q5|*f}&nh=>TH@BYNznPhlzCZqq_8KyN%rW6|eOzT!HN_XkT<|E65&jQ`&5bFaj zh6frk_wIjcp_$S)x9Zy5E_ob@qMvT+sQ?l%z229V8KSorhYWk}bVbRMF}c}$E=*U? z`w3`LKINSZb!^6&_Pmv39w(`8}fJ~BNuCk z=4j8(T5rE=ytlN*xCLtw_kJSzAuD`v2r}~a+aNpsiKJx%_^!{@Ug=9%qrbxQaAC3w zFE-1nw(wW@0JBGw$9V=c*ACf{xJu8J2(o`W>*#W#ssT3DU$lhp8r?R%eGm;+@!q}4 zYH0^rw3xE8322qSGDVkQI2wXblNvT3WyZ}GVVU!V*};r!03f97W|7E{-KJ3;1Dy51 zeg6Qptkxh}Y%T-B#^u=4Zi@Iz1=1j(ZT<%W(00xmL9g~VezIFJKPo13B8RQ~GjxR| z(ncqnl&*&?NBYm<7yri?7BH271|al%3Mw~@|2WL`1^|0#?Gohw{OtcZcq;7k0hmr| z-5@{uXXx+W4gG)q1xo}-@gLrQNBoaNXH1CQ`%Fww58%fq5mmLUIJn>&G2&FgK(O_S zBKyDIo39LTV{D_g&w!Z?{rsGG`s$Q)cy$=x;TNnTL z9{`G|ZUD?#h2duY$9XmHr@1Zgk;Ga>*pOy$ulbW5E zVWs`v=qH~@Ui$t{s|$3{+d`IpLw20x;4k#Jl>&O|)H7#){^tUJI1%)KIQkQqdoa)X z_?=4Eq=$}t*iJp8O&w>0`S!NVTS4UC-xWhRnys|9UO#iiLNtGyxXieV(I+qLPrSZ` zAt&)~7ga#^ok#l~DDj_M>EDwj|NX-Mh(`Z4$$w4q?*XrWrm6pbn&jH&a`^c|be87) zD;_q92)0*v&lN@AychQS_U-3;-?xKmj3lJU-`M(_gF=x{$!K@}S}OdF_zY9xg{Yz* ziDKfX;g9xPk4p~b;TFkCf`B@wf+_iDYFE)O)+^khS<}s3 znVVquDU~#BNWYkCp%wyeIw;;(RC%!*TC^0(SMB~_1jA3mA`|%E-~Z@fTfKRzfVSiM zQi-PxF{INn>+;yN+GfZ(TupG@)_jOHq(asna-{N0`CV)5fRFz7r6M`=xtB9Ozx40_ z7<(jNe#F)ymahTniPlS}A^jRL-0#rYvH*K|U~hq>i!%h5j4864iYn|}L*~%)A|pu9 zlS^b}&#s~iIHxLg=Wrn}pSQRDVLJw^^zk+)72YtMUa7SekJU?E7c;A9eQ|H4zQgPWUy;Sc->a|@@g z?rfzYqV3a&1n!*6j)YAoa;?zK&srkax{$}BVOI>;z5 z#4w+ir-$b*h1DQFKVUBN4}q82=GuOzi*~p`#4TA{RF%@hZlNlxtzp<7x<$c77Q(5) zN$y4MAB;r98Nh-PbaUDOU^P9~9d zVTKpPswEKcu-vU$F{-(-xrIQXhL;f<5HSq8+ej;5>Z{nRU*F9$D&a>KxIrOdl~=|H zKA5Qk6c{O2Xv^4h@n|X}de+udo+8|m$Vo}Qxq1>@Lqe4tZ&CkO&`FGY)tP)a!{p() zBED{D=5>@~D#kG8(aPP69exK3U zV#i2TnP~v)al!spI|=#Up1eBvm)_PX(nufFiXzCqMX-T9jhtAzm4r}K*uL!e(<&YU z!+>&JPc}_tMIM@!4-dzfkFA0;6_yTBj@d&5b^!FZF^M-dD{rF<{neiOGN#z%H^%vu zOGRl6t3R6PU@-m$D5PjQKbk7w1S&;Dn<~Fkh0hnGZo002 zsJMlEUIzMfcXC}^)wv#RX7n7t+}Fb@(9Wfji1d*%tN6{|`N0L9_xWS0=|?ymY3CEd zrVZIok9ym}wh!?>02Pc8(;^WMhVG&e+V^2FMWTM9IIgosxIe-Da5S3Wy@mS@`CjA^ ztFK8)ot6TM>8TP`bMv)f8`JwVQvLCDknzL5vE^`TrG_anWLa5V<11M=hBen%_`x}m zSMRlGHwtG9UUt}Ikt5if(}q*V!^)ebH3}YB^!hHQSNyFf^rgYl@ZVBcgPDp^B zKNI{9y9V-qINjwLx+{rzsSsBxmTrYyJZeLZKrJuQk?^>=0#vt`CB zd|zje{EGCX2X411Ox3qDjqkN=BfLscJ^=BRneF-h$1f%r4Q8mwWQx%fh5lH0}k0C}`vG%M7-g>@^O_YkD#hTnWmCRN|CHoSj7aF6c+ z-NqjXEKbVB;%#KSL6ts~MR=T8vaAlymE$<6YtN>D*RgM*B(&-w6Ydvz9tr-w#z4{a zF8z!5>Q({RdngNcPIVxX~h=+^Cxa0KQzA@P;5dkIubaRMQ- zFc&-U=17bV7X0hV-tj{QomGBn7{7kvk}l6qS{C*ADTw;f8H&jm!9LgSo6^yY>&S5Q zAo!ujM|do79O5t*nl-okykAI(+2d@tkd2cj&kUr{4GU~~Fd=4rtmD?dJrPNtD8GM$ zZRBaR*ufe%K_dk2-1FlXGv2#cwaFn=*-B*c^g?o7Ifo==aOi(3-M9+oyD?>6K5T2pO=RcIco+A-4RwPc?4dGiDwNYE#}b@=^oe1|{? z6Fc`=tJ?O2VE*8nZeE!D)}IpSjmW5%n&fl6M0KHeoCd_|{3bd9aQXUqG#siUnXyi1 zR81BALr*z<&pCnQ`g2X_seK``dzG()ZWJ@|C=FfA87Hq*#3HR>q*L1*zsduDxzk^_ zcg;ym&~MLm@7(*;IW!d0G0?z>Q*`S(OG=qxE+`o|g0w~CiOe{^Tl;VH-A)&*djf38 z?p*Vsjnh3ssaFBKd&X4g?_D^Zw}zslP-3hbWG|y-$|wEE4wzj^k}ou&(8*-(vNxLP zY!0WES#`lmN!vP1sbqJ$+qm>hj4=}dLtSI1=+uN*^h7RN1KSI6X~V;eErT8@?~d%@ zuC{e#?Kngpe|Y3ixcGD`O^CXjGzlL^Ha)Z-LXoa-LywE}A&UUPufW@vb41}&3k@>d z)T4W(qwx&YzS&yB5;JLN>o$jeamWy!bHdSBc_7=lFCl5BZYUP~c}Iq1VNui4U>IYc zeA&J_T#~TSDpe=!O54>@9PYoqIU=Pq-}=+<{RrXqYG6z4Npr>>rPDy|o#^3>RN0Mg z9EhN-YpsP|NC^`0+8mu2)in6x%n|Oxz?@zjk+3Nw(c3>7Lb6N&wTa0%4u>u>-hI2X zJU7XyGuhB3<4=QbxP4R?-BwMu)!{^!kNPmvg+CQuwv@{B1GlU@jX|4|S(V|z8Lk0q zik)4*xKwTHPuhN$TqKLNeg@j^=Q!d#Z3~|CD!glbmuM6Ac5SC;1lg_P8|fpxy7$ea z{c0p*M3o-$I^qWBDr5WU#sQ5M-C`ZYQinYC!;Gf1ZZ~UFAb=&?TJheS!7LebFpvk$ zT0HwOiE%BPxk=?SNe-zKY^D*2vaa4dP^c4aRlFdgGy=&zpZ!M*U?eppSyEN0_teuf zH`eDRcZU|AEN+6wi>uHcB46oi0QA-f+w#k*6`G}9qAT?s9`JyhtY#!279fBZ0@p_ ze4-FVpFFnE$;E{H#`%+|zJ~_=MHWjW2C9)Gq&!YSPK{X*s~BB-D(wo*qw#ukvy`-Z^B$?ck}kbm zT7U@^4cW~;hIEvNBWA{EA3pa{HH^fEKSol*f z`<{w~4%t{*mkP8tHhLTJ16wOL11vJSmNfJ!aR|D*#3kNNxkD#;L|@N`3=TL#k(ec4 zmfBH$KshzB(h{?4b>N-tI3iD|Nw{0=SL)ZlSRdo^w##i|LYZng`)+$`1!s6n(P_FW zIdT--$E3ZP-+o@etHi8TsWMHJ5VzY_So^8jJT%_4SL=Fbl1dk;!} zR!od^TcLMlNpiu>XG*KJ&<@52T6n^n{*6+pfFlQoSaf-_U#;W{OGr>K zf?ZK1KkAB*?y-`ybB#m0Tfc3kuy>@B(nqT*2d+4X3+C<$;YTgd+akF+eXnaI_|cn{ zSjS<;fOc;Ys{G^(@0xo*_w={ipR~!<7dw}!i^v_xG7?zb+Ha*KG6JEZ?&RoOV{+aC3qQm?W>!w2F?%DBElOtBq`-Viv;z9q-M>W(+ zIUO?QZhE367p&6_z7TeS?wRF)vp_1~K$woWz7hBFVs<=CNR!Gckyh|o`k%e>+VnY} z4B^L$huA)$*l$Zm{ju(GoMY$?gvgd-sUIHAc=ShMKOmTW=dC4mMV3F2rAOx9%Vd0t z*Qmm4cx&Z|?Nn6g6bEO}t zF7Y_@Q-#O|oVG_IF~wk&RW*Bxh_?Eu|0Lw><%gG$7N^UmLGc;WUrzAzC`D~-+OpZC ziv-tk-h8DUM7KN{bF}2#c}~zc()Tl`S#j9L5Vuj@_QW-1P7aSVXjZ>+ObkBDdIqh^ zxFm{+F};k^W$)>A&islV?8}Af22F-`Av9Yc3F+Ohp4jKusSowbhLA9EQ>6Sp=Jx%> zuoF8P&c4}TN%)11)6VutEDW~Yg^!nQK*bHfQ_60Y&8DA`M*U1Nq0YG z5<40rImSVEO(i{Rebs@AZ&j$0_LRoQx11%98}A0DD!Xz_w6Tg|deP`kmCJU2p*|5a zA7AA5mBYb32LxY85A6GIG|0X6exI5)B%|Im>K`yQ922fMsyFta>9!Po71*|AWWv;v z&xnl{>aor&oAs5Ys~6!$n`<%RrK)pmnlUm1f4BVtLzuF4RL^#KlZ(8JDU)0iQ=R^h z84--=BeoS*7IPde%nNgEl}`?LGoj0Oj+KMAmc9$iz}$pEtwe;>TYrMt(Pf187z@6^ zkWSOnWgQI;mA4l*){Z%=hq}e`*wC&}caJce-n*oA2am+SY7SR&qFXT07?7Hc)1p2AG_Ep?B zOK&CDydQB{-a0ZN-6)KkP16u#uSD9!VQj{sCawDO)`8X#ie8B9i4#~(7mrc8q_!`S zdM!Mp;5cH*b_p}+%l?U~q2bEVn`p8ajcvL^}M9dIO^*br81gvD@u%y5DL6_Rs01oZZI2@kJ8uc z#(-HbzdQO043uAw3cty$&YuA%L@&*cI$BO0_}j6rnLV@iUU}Vr%_Ysdo%Tc0!kn*b z_Puc!Bq#i|60#M@Q}w4sE;3hT>pgB8E8K=c33uiX4e`NR=v4W5>hKH3(yR0+NA5C5 zGTC}Yg5bQ~H*>e9TrVW2FByn;YUq0hZu-@SpHTladdUpeC~oqC5}oAm=i z+zeP+fdm1aG%wO6Lg?~Dmr3(u=C>o1jJL+}-ywUmo@3=sm$j$2&5RI`I{0jjL0~~& zzlc?TFK%eada>mLAOb`7wa5{WyPkRMim<`QX&jI>WtB?qP03!go=F>%x2tG#d?Nw zemOYzp}Oc3Lo%@67wU!_@()OUZ+LZdnUva^e~{A1TSg)8ip9=ntJVvq{np64DIi3( zcyrM5s{`=gQ^h=?m)5zq<8CUDeU^-(MewFLC8`T*CtJxfI7U!Z^T8SA2oE;U!v^2bvG?db7VLfd`_Hb1D7^qzg+I8WN%XKXxWc_r%l;F(s+>t8-?9K zy%B=t_E&88b9OrdM%M48Y4i`)Q|ejkRljc=K|ch+eG;2P~lwomcdwS&A=rAGcN{(FGL7>SZS4tv8Nmn~f6A zE5s>pj)BIUUAl=5`l&UkBAj>!_xO2KqGq58NLEr*B?H~7G^~gz4PNYv)SF~XdsMej zSOD#wSCRnjm!N$H(5C-)KgsdxKA6xS)VRVD zBQqvLEsYzGC1r7K-fy1R-7Wx$L=UC$TI<=37UzY5zZZdK;K9*j_t|EXoC0~LGt;niyUgQ5-aY+X^9$jak!;%`qre%UY;&v(_nX= z{bfn$Ln{sw0<-}9H?`gWc35RMb_RUV@P@X{ILqL5Iov_c`t@T(Ds8FV!AQoq62Hql zLm3lTjHo#|Udm`%qeQ=YqSY-1B0-(EBIj=?zUY-jZGv{DOf^M)xJ zS2>ZFCenkC(a=$`9dTIqjl}#o=_bC3>{i^6RzD-Y$9kL?Zz63GwG6;zk`_^6eV_eS zlF)@kf_{9hK^{!getwr`Zrw#d^{5l8ae^6;fY@IrvMrVz@txkL<&d@sRP<`Ak--Kz9xvHH{^jq~_FNvK;e*ib2JZID1EP3 z4wPnORumm&4LA(mX3CT(t}P;<(Gt{KJ!lO?YLk??J?N-w&4_SI3oo&4zw(@)x%?zX z>)uws__E>Lp)L@{g?3!g!2D6)Zz}3j?9Ms`p4)XDb<9!*ii!U*M(ue*!Z?mU_#nWjJzq~M%ZKHHa@J47 z%-C9F6|XwcT0};9zeQu4*XX*XkzN)bi}JF93KffcfirHkjl=3XyR7w?$TvK$dSVK^ zb#8;W^_UT;E9Dam4V}tuvh=>cVWRm;3%zUPr(uj+xZP-pzO&-VQ2uz_FIeVx+!bK5 zSz(+(ai>I$?<4g$YA_gXBQRP{?Gu|SrG+8|J52}l_{IfNfIAKCuduj5XFn9kYhDPF z1rh}Cpkmd(k7@A*m;>eJduj4Ol?}!pcL`e%DZ7MTFS(UFbv0{-UQ^C#D=G`-`v85D zDBIGRX6kbB6N_K=k2F^`(Y&r-;h|+Y3XV%O{oXC_4&TP(3Q`hMxt5>ZgRtC6rQf5@ zYdaA&i>_ybyK@Z_gPB`gh3g)%hSL}tNITCu$un0pTBZ)5MA za!mKD3>p{h4u9B%Ws_#g#)Q6;UK>%JQ#K$r0Jkq8Q4RuOHmcX|pby1g?|bf(TRglpJs@B}-LoXQX7I_E?}Y>GOS_Sln@ z>dk=P1G<5{Z(xdzQ%FGjrJ+GKVIVHHOVY7lnT{NodMV=rcs?gEm=e}hLRSl9uAPzE zURJlOiYxu$t~^Rba)v!&tzvdUH7ksDZ90)o}BxTdLLx%P=~TwCqK$T>pr8MSZ4 z2us>7pn?c{#YA^WtzS7ZICj?&EIu-IUTNsUIejWD!09C*I=3BBJHcgA$@~}{QAk`@ z<(3USA@x2jJr&6r+L0u;$f30e(-k{5=qX4Fd}%upa8WB-ciiz}$>vLdYF9+!xUO{E zm)Qm?kUQIH3!|n^7D$Z3{hNw3D5n@32{C;lDRAn3CB*6duo62zVDn`2NUj-gAZM4JN+@DGqhIwioc=xc5Hdj8t;;ntAa^47&F z&FjhAhI0*Px6vTz@~D0fWe{-Ypjc?v*GY+B9ER(s*@YFaN&3yeHZ2 z7js72;sM}TwHx)KhZ*!0xFsOZW@?|hIC$)*-=P^pIZ|mo&9$#(_la(E3 zB(Du-3W(OceKY|RBGiVLYxKx)I$*9(K{c*k&wj%?{5z)o)X))M-n6IRyK~d^2p}CI zo4xXd+`2>|!{+({gBA;!QeQ&MO(pE{}SXE!m zMldPf4Aeu32_QAl*-@pgmAF3b9S%r^M5(+;^Bs}9-S?8K)v}pn0vE>b&G)%Y1#qI{ zu7(-F5@`*4EKNX+ywW)T4|i`F6=nOq4c~%Dj3P?kAV_y39f~5|-5?Ah-OWge(mlWs zf|PVgmmoDm2q;|x14DNW49qk5d-A`Y^?ZCkyle4^Sp(N~?c>_#KKF4P`}|(X$D+w* zJ^Yh*YFF>pcYL~0!DzXfZUPJeNUEvBNOwhk3G9W?{gtT)S zDjn~K*$mHVeydTHS*r%0EHB|Tx zyF`+ZjyAcQ$Cpuhl+wMh_G+fr0<2KKYN4vAcN+GAi`;7;LcU^}Uq4^i+NBdQzkT*8 zHCt-)bl-DyiCo%ACFEs#u(6ns(I9*;D}wCBN{~`s4V#HeZOV=ne-VV?)u;i1_#G z-~UZ6cJn%-K@gaj)-B9TY=>nL@#^Fut}nCO2&H5R+soOHJq|c)7d$vSJdV`I+h2qZ zhXPEQLq_-}9O^`Xu=YRlmcBUx-Tw|UJZsw^bK*<(Wi?XE-;3Ppk;g%MGgT|0<|15S zKzgw^mKAP;bMjwg<**_JOB(^p$3}+E$Tn7<%ND^dBg^0wC;!Gn;?R11{pK&8bY155 zYqX7tP%CM<3bTaA5AbDB+B4^P*vD=He4LTL5Lx0Nc|+~|jdy$RJLfe{qZ&zirDo=0 zhh}g?Sf>Hpsn35qr)MD2mGJaz;C{pnrsdx9io$&3(JvELpIN@iZ;&akheEfiKHw<6 zyz;?Gab*Oa)O#lmCrC}ZX@qBgQP13c<3tgsDA`fPVT_=IAjSxJY4yZTBap;|Nzdn% z2j;o2EIbb91bZaY35pw~hRr1UE^HUP9KT8~h+MHW7cuE5xyzG}%2#IcZvIUGyN-Tu zkxm^y_Wn1Sk9xf|GRUnDn4k1Xbbji-I#S8s9dJ>0F_7Td-&1rybF zHSOWW`^B8yP(x;K9RymX=Uvb~BZNzr+@yi!Z(lG_tJ zRJn6n_qfKvL$G3KMpfQx-IQ?;sS}^0?9r*`5TupJmZD=rIv;026VsV10=-y2gC5ZL z7@ooquX&SXsCWg%sV55g=A8hJBMFhPNNWI5r}mMBx5I&32+jN(_s@J!yEZ7dgpD9| zKy`!rfrN4`{%ax6wdi&3pQq#zN-1W{_*v%#g5@`wW4|8|(Z1bK+Y(tOu-)mWJ1>Jo z{E4hM+&eV5&s+Bi8OYBn6OP3XGymZSArE(?>uOH%Fh~WjU2Fd{UgYdmUuL|5aHm*V zSlH@=TfDHM#XPAC!wA#PY|a!y;O9pZS&b)oRr~4*_XCxEABUdIX@1(fN6MvZi=ACC zeE=h`BQDKOC8O4fr9O8GtdJBh@-KNbZ?1;tQNo>!O0~RxpQN60`FAxE;m0o<7Vyet zcpWO0e>oYcK^Utv`j0Q+r(kz!s`ux!ANTq#_>h>s_pEFG&Kl3W>jq=2Ig=Hed_!pv zT{((u&aq|&rYr}aC*D>1tADW&XA-Q+^sNWHW|{KYhRozosBg*7M!i!d`=iz#x6>ME z!FO?g4fM_kS37I#x7j4eh8$d7>SE;k;NnS(^YGBd|$p@SM7}@=r9~$q%nBg zuprVHnX2yW9#OAw*sJ^y`GTI(LIB7uq%SLA4 zWnVbDW*Y3D-HdpYeJLA^uNHxhHMK9!MCmPPXeUVyE*5_AqboR9 zba2u%pxhjZ-h}s`!5FR7tgJ)7Q-gBbR~f6lLau)HPbnbmf`C8+dDYRk8xiB!bc14a zm=@r8_qs(VS#~GZ;HU7tXT)nw09FjwV=yJ69LLdG-%!J;;s*>@>4z9Y#-dL1uR7OC zh9I>;(o5SM?1-Llfg@^cRl^i|Z|a`O>$w17pfiNAo&7mI$&%~}1huw7hKN0{%1p5( za;Iz7z0B#BFkoH^2^8*l1Ff(dR1a6*XG@5cw;)2=ZZg(6NYZPWC6rt>a(tttOQ29E zk%6B&<&4hLXB+1?abtKmrkPuI$kX}#n*0KlOEk6z`BI1Ij(t+|geZD&f70Uz31OcR zBrXS~ciJvQDa)k04w$BfzK~KIIQ07C;CUo{42}!uP{yPj3c*Kk1WOLBv1%hj1`RRX z%N8TAgh7}I#q@6IEz`LDeZoxTZ~)$LdYHZ z%0&8II6Lds2KxG-K;Munk1lADutB@NyqleZ*a;<}wf}6vPvWB~gAQ6!AnQW*Q=@uV zzuEZTLb`Nd$#HcB@pHh(qb0%VE@9*VM}_~yhI)^S{!Gu?ajFdXGP}5L6%G^<71yJ4 zl#J<;wSh02`1NglX{$_3lP5n)wgkE>(&lN+{VMwEYrxt8c}^N7!5%f#U(lY(AN8T8 zIms5EXtgdZc=N-zup!Rlc1{vZL^qCj+lLhol2+xK_GYI<^Oc#2i(^9a8PbU) zQH~GO;f?2yZA)74ki7Dn>AAj-%er%$xK3i)H9mfjtoRYu;O<`N*${94KT+fEI6ckc zYWxs)@zg+v_mV)=_pKcwAz}lTSJ+l^I7=G#peCC+U)6ZyG%S?@ye|?2wW2-X9OigF zj7{oJeuc8!?MjC|MNz^Vfk`z!`6oe1VXGQS66kjgG!9VjSutS<-CqCmt$mJP=5C*B z>Q;btIFAVmmsxEhU4QXl#tT=;mS=U z87vVrSX>M30YwyA@2Hht*>5ttZN-I4kCKT46jC=)h1G3R7tpRx8mS;7I7g_U>l3HF z6kpa^1BSH&E}^1f+QaLgSG-%gV9Yb#r9cqUs}4ysC0PzC8)L?}b5!o`F20rQO4|NG z+!fCy_PXy&~$0sbTfs|XjFX{pQ?*e55)XrmMu*Xf% zDY+`%h9fqVXc`5UN+0j0=?uw5w@DOzon-U-CMeS$lGL?DWjkstDWj!HxP~4ryvSHU zjU|JJ%mD(i)dDM(9Xje#afjkReImGX@}o2_H+2Z0}3r0{kd4#f7v*R?BQ;M ze$DphZ_$UjgKE*!L^B~_D@EV-Qol#OoTbAkgcZge*OUq*qcOS^urM%@_;4?&fp-#ls zt>!N9uR7x@Q1m1f*`JRg|7j*yY9iIsmx+siW0Jeu1C99u-89Jh35~3toiBU|Y;%k0 zrm7P>=1s=}SpvG_yz);kx=YDY%A_R5^yeG8>|LN4kA$6T zqL=;H6FGADlT+ItVk=JIG(N7LiR$#_>SZpjSm(xgcV079_NP;d(09^jj_Yzn7I_o7 ziEZI3^n4fxj*;ELT-RT<4d0FfzXkBx&yC{ge%m7r;<&c5I=3NkzKJ-j_>co82vN&T zUsbwp?ZzE1lhfNu&6DoFM0KEHZDEvPObPD|K5u2bZ@unR`4DipN>f`?XI@`>_V=r6 zv94fEODTcICNkcqih&F*!5$sD&*)_|a_@j`K5w)okIFP?xR+_Ld5X7}JCUFB;izxn zRM6<&2HRE>p?SP+_wm)`x+xkf6Iye{_WRYI41b(lM@~BpiiI}HavDmw%w}kNRB@Nh zfbVlfdB=S3g?%J4l-M_u5mo{^<*6%TT?XDzyXyH3Ou^gP9`?VHhcVPo+zul#CQhE+??>Qxi&eUz^+wRox-yJ5?QOuiV!JrTBu z7%m48QzFwf`{?nlFSdDNC!WIt@G5)D$I}P9FCtYA#(VTlP-#GA>{yK@C8Y&nhvO+A z^30SNU;uys2=E`MfDT>`uM99#c7bE6x6d8}4)lw8BDTW*M8d+CC?hy~?oEd5`8uXO zB=^obzxRi~zrt+xJtTIdIQRq4Z&0q~Ib4PhZJMEBiGJeSo|wkkXkvunNO-neiu_k4 zasDl;s@b$!`T>?JGzP|_oM7y3A6KRs=6=4~c7pTB<I`e= zOpG}`$B9CqB8)g-<;T7N(l%k7_}%CJnqdF((u=ysr6BzTwT<4kh`9T6b8!t;*B)hV z$=$Dw^yk=RO>FskX_NISm$6$^x zq_T`#Om*#P9G8l5yb)&RWM=H>IV&M6mC)OA#+<2od0m3XT!%M4R{943L9C*Vsu7z6 zfwzatm0hkB4HdGp!?>EcZgdSkUMKd-MpEyLmmr8S6JH|}#lSyX$w+y)pH+b-D`}d& zx2AsXVTNa}Wk7S#(IDmP3I{OZPpv$#2=Ac2`J|6Wh?v@8g3k;(*)8=$rT=kWbh|}O zqeV)I?o$I6S@2TGk8S=7-o9Y@O&>m;bW>E>yUN{!_4n;Eb>Ex=zYQvwcKlxE+huzm z%dmQNpKpNUWe_$PsV2S;Qz)&1ve|#5!W#D}rxnU%cKH1520f0Af=U#01YgwVpkuqj zBb{UgWyC$*Z3P3x0{&doe#Di^Zbz4+33AB z<@7EByHEV$`*63Qz(~=8qD+SNbTo@y9II9A=fst|ozFj&?dK#M-^UszsHPqU8BNuy zOS2yWGK^SEvzIuX_}Y!0o62|cvg{(CO4~&}6*YIELjt$ab*Gh6~%cC}B%hen$0gY0;JIlf^zylHO{0jM%*BzzL zp%3q5*cOz6|iGSxfx8nE&W@vsuxk?lV4{hufQeR-0#O^B1eU20)WnnH;DD zYU-V8Z&E>JbaxB?*Mv}--^?2*F9ugNWp>ery7njtOrTTGevLcaGk6*W@8J-Pw^xr}DPnokeg6f8Re2Haio{?Zx0hPUQFfC*ZO` zB5MDKL2p>?u3v=giX2}FaMR^KJd6qV%b?27p|r)v{Th4kc64b%ZXQF}&;FGoixhkN zy|MSMmqB|Vb~@XQ5&o!{v%Mbc9zmgZy+U(Tm*4(9Ejx_urc8AFkKDSIIzwkynmJsO1ydS}Mbd`2z_$&Rz_q0$q ztyR@wSFbF@RQx{}_-$>;3%m})nnI$wS8>eAZ#lAggOoBXHkmz@t>oe-@}qY&29>JF z|HKG7Qfo7iVz4V;j||?G{6g)f$x;W=aU=N%COs=Hze3i;H$s6PH%~L=iZq>?VZ>y7 zRtgv~Bb91Pmvq_ExUP&^mbxaDS@xLKi`mheFPowdz&>F904hUeHB_BjN;mcV=zMbm zVHepHMF5Aqqw%h1{m2yTMCr+a-gQ>3XZ&Ha<=O^03Q77l9I+@%q2GH&8ojUnHGfZU zUNbTQ?gr*|)ZjyA{J9qQucAc%d#$FD9qE$xqOlZXn?Gyaxorq}PkgTvPNOEM6=!87 zZakx9sKigr_81q)1_wnzBAWp6b)TPBQT!T%(vbbI=CoG{(6TjY1j4Kq46g_5NXdEE zMv?0V-5rT|x|>arT%JVq(N~^jaiv<&f1+d{0MFP&kvWD3ZIu5qh&(RJlyd1AA>uKB zUe?t1G-r}elsAqu zvyLkKbio-soM3W!4|X2b^;*^7QrwDJ<xH6=E9v}AFn2nvQRQSBjaN zp@K2U*idan4(*8L87;m97#WAE1i!iD65^+AmNcCT(f6yX`;$-gf4mKv{r^y2$XGQM zo@OlwMGD=g>k#u*BWon{Gh7FUw*eMJ2YaiFJ@5_u5E;+vqF?jE3z&xlT1TK zN3D3d6gXF}aWGOI0`MIy<<6HABH)vlYSi9ehE{om1lKB_o50l<9&K#zA?^R2?f`Ns zKmL!S=dYFS%fGnSiX(KY*fKE?{!vxG2T7zr045}jNFe+PQX!z8e^{}Z)m4o)XgO58 zNMDqN?yEC(EPC(qtyzmLu9}5BR$YNNDDLa`i1c{(NcD8$$eQ|@6HvE3RXZ- zC~=vQnb4j%l2$9MaqRc0c}bAOvlgcdm{2UmO=p}R%b1pu#$Uu?x>}}G z!n#1ly@=s)VK+B0!ebMoV`3><(x@4DVmL2o;C^lMk6*)i=z8Yc;(?!JBY8wC2a?k1 zJ&O{y_$a@`{ZqHLb}xH+{NMPIQgGR7I7oV30F^6+*LPI%=<)=gkBBxEB`M5BFi6_d z^&lv`i*@s-nRd<)bqPt=(-6n#o3Exfu# z@J|6%PdBY^TT~%E%aPW!0#W_*mqnk@Uv(*F zU@CCO2ol6tx&YypKMBsSPN{GP395!p}Q6)H6N&i+c$zV&le}%*A zsvhz;DgSzkdtI9SnkLn|?pWO+v1`5}&)}oKc5yYVqh9V_lOmF>Cl|*Nb{#Q^HI`Vb zjc_&1i%sca$$J?F*~Z#|3T9J=1!kMb4o~aQn|M#M9dWxcb-B!4c=HVoGNmoDY~nOX zFpBfNhfLXfq_(zA!Uo!|u0l$rct4rmBwgk6LfY{t5%aYtJ(6YqAkh7-Wx|1%h{+p? zH9X0t_!Lms2mS}SB<;=-u>QAA);-{x4^WpAr-pXuSBrkAnYLPRj&P1Onc-P`e^5=E za68ipVD?BjaYZIWw4BY&XN}k$(jCmHv7u{|(voh#>OSLePHHaP2k%NrAX*(b37@dr zwK|aKd4S`Iln6a>$nBM zP9pv8jWTpWQ)=EmJDOE#0~>DH0Mcc?VU`TxT5EjrcQ=mSS%FOxjdjXI3$5NJkWsA` z+PD6G^#K-X7&6Kw^IJi2rO$|9W6QNcC|$H=`1j+HU?ulhz*te3(9r$JQSzFhW4QLx zb!3Mb`uKP{DK2eGBaCkuZNMJ#HB8P3mzcu7X$t}~wRr?JVKbyxbLB&IP#wreaZd)D z(m^-tKOwGAXFh$-tP3oL!Oe#uAF=K1Um^Wj(?2kPQ17`IK6-OQ*?p2ae{p6fp5IC8 zwA}e}wLFrn!wzjvvQ4#Si48MPU+~J&QxQh-o|GfFi-uY z=`Lej%=^1)tD(1BQNqsiHdxV8&2o`*UNlWA#qG(>hZVjF^ZflRWomiHIp(6-tF%RA z(4znCSDmG0XpYI3_}$jQ-qbw-H_zm5@wx4vK)x9x_T;m)Zn}e$3=C_Zt6{1@pGM;R zX;57hwA53EbG2g_agu7k2lEuK*4+~Yoo8)wI-kUL&lYaaRzPpQZtf$7^D6fR9wDF*jZm2hE2#9qmBM2y{nz;k5WODp=W~=_4{LK{U{+WHKegt$UeOh4fn9I zkv!UONsij*Y3|bsTF)_qh4*s$D+^vCi5M|T)=rDVKtr07s`wm1>yNt(7CGN%=6vFh zk&7i`+U@R1-YvLHdQ+@o-@iXM8GWQ|0uCdx!R|}nPFP{*!hNyK8Dv_B!fQ)~nHagG zY?y_m^{xGQMkq3y;I(x;${fRfYWL$$GuX+-P=dH+v$6)qs4T!gKw{;1qq=}8&%Wr`KBC*RR>zFjR97~!9QM+RVi$s8ww7`ZFlLe!yZdM<+6%a?-<7rAl zDceQxYUzPqXs<)A@V9F*azzwv0XB;{((^_=aO%Mb-&X{ZGH}onCxG^TmABREt-_3) zWgPPYA5~MtkI!M07Pmi?znGq`?ED)lZs)(1_bk$-C^lS8cjk52ANLM4lAw$73NL2h zreP)Wb$L-Q-uaW_kyLkAB2rN^L!H4C)|0AIXIi>3%3+xe#B2{Rtnx{rfXo2ku897X z9h&4d8d8$wxPu$0W-?);*7vdCg>&-6@Y1f#^w2pL!YYV5q>1=$$t^(UJV!6GKAHHM z|2lEc0b#9SEI$Y(lb@mcu;c}w#<{+SY=EGvUhm)VaE`39yaZZfi1YRIow43^=pJTsF6~9cPPJ|*Bvqo z`!f>UNXxUO1(&>2Y|>cf<_Up6z$w4AByPr&=dn%&Vur}A{%DU zBy01xjDxbFU(Aq%{uf)WoOHK)E{Z0VvgaxETMZ(As0YGX9>=$xkn`^=tlzxLa{Th5 zfu?C}=3ho%^FO`e2~A0O2+E$4`*fJH=sc(E4I2q)*h>UdU&;5;m+Y<3TuW#=a z{2y$yVa-bAp2Pbn>DIGyZoi|tDjc7h4uf0qpaQhoPNPg++%|VwOE|y$W$uTXd$UwV z_RlxY=;^V$f9G_ik3wrnbyQ7QSHwRw0ulq)5r{6~A}gj=^U@uwAJ73{r#Dk*w+Jq&sn9KQ{|4c7b zKJ%2|0d_+y{ow6{`1RNK1(i?yuYuAJZf{gz5{{hS6qODs*&VaNRVkGUZS4&$8iwlp z@X+CA%g2FwmsytAwELoXlknsNU;&N>QK9F@1=GX2T%0CAq0aeRIelKe`&+w|-lJ@4 zV+!)^c6-Bs>sIg}`yJ5J#x8RM-)tq6A-1%pVzr|cVZ$)#=co`xNvGHJK+OxVTqNCHYqf`yLsb;U+q?6Wia2{=Q+52hUMK{$_K9}`c63%!aqb6NE zy*KyUi#Fl&b)TZdumBYa>u^$-+V6@$@WfTb1%Xd<(uMabo5_;Rh4hF*1YGRJvqPy0*6R!ds_ z+H;yu01Tu-1F9i_T!zQ%;Ve9L1%akea1(!nR+qG*6^0d0gs0Gt!eLjo{ACIIoTuWg zFD1K3t+jia1De8&-LKn9N=QcRXP|?&=(-`jRWH`nR4Z+CWV=R&Izfm=zd`y~W$Uce zSkl1#Glfe;tm_N-rhN$^FjsFO(Gx^YwD7Y9^5azHRdpYfDA%vGFE-vYxHPSequKh2 z37ZFm>0mWjgg11xJ{Y8|Y`w-u%G;be4%Rw80bpO{LQ-_1prP=c!MK>!7y1c$?28!cO>PA$OdoOfuuSkVOQNqu^AMJKG@>r~1i|C#EwRX0758l)$Bsh?91y}mRg%*V3XbV%jev3K);<5}${gp4zQ&DT zqXpN(zcb969Caq$ovyb&by*A($9z>Z*E2bP;x}wL8TjU~=WG8y4q(K&CteP6EJH5B zOMdx1EX-&@EXV_Eo{YX)(JZ=$xPPxr+|rw?OPN|EX$KV7&vN;UV-J+vR<$6XLT|@l zHAyDJ)#5B6T2@ow!RJnXH1iAaO-B}g8l%{MXz6PdYNf$(JCoYqY*_cQTIckU_N8;Y zwvMD&Y*!)_m~>&?TV@?As4mv-X({Mn+>~a$|Ku1E=iJ}@Olixyc-OSQZ;E4q4g9Fr z=CMCj*Py}uJQ0#infT=E_HL;Kp+0!~3zS7N>|+v}X)*7pN+}6`)I3{94C3l&zg|OU zg$zy-`Uu~@i(mbN&}s~#E45qfjg0u_!!2Z<9*v{3{_Q^FXbOgcQiN}9$tKKSa6gU6 zBI9%Yip2<1)Bb{|{6oKkNhE#*ZbTmOPXq|vD?d-=rG;zWKYCD~#4#M;7%`sO^7V$d&2HYEO>D}??! zL`2JKlr~%7Nmo>yf!|5!%n!_S*a0O+KzAt3DBIv%SdJ0_q-*~UTm&?4!+wi135!Gi ztQW>%I~i4zE_t$Jm~t|4{rzM$q#@s!W}boL}f7+~G0+O!EQ zgWr2fBBZ1zB!^~|tms-G66-_9V=Oga&IxG`sUAZJSxsO+Ml{bBykY~r=;z9%>GFQSlk;Li_q@>qCI8bmvhsdEQIA-N zPq*la!epx*uk#_uZ@B-7t?D^E$%*06Z)g20QMdHkiA~<~uA`A`yI$^XCf;X-OzHTt zU@`=8sJJ$>KaG1*RRZWMj;rDm@MT0Sh9T0I@>a!?=j9ZY*MnQ%rGleSC{vF`^8WVF zGi78}7}N=u1(k;<5mMQBl%dB1jWO@i$(=_*{0?+BCL<3BdoYxbdiZPcD_1QNc7OLQx9$ zSZR}7KUsImS>;W1hm}u8GPxh zC-Yu$9A5HBQidbYsov<-Egz9o#$jpSSHi8e$chH7CcYCN`oT*L{Y$;j@7qk74Q4u* z9_Lnm|6>+_T#GMoVz?iV4AZVTr9o5J-kGr5?WIw?;8dd%pugPHs82FF=OQ7$pT&{i zrTC*yLnB0_6J?6l^fNM$7z)QDI`wC3sec2f2KUc6m@HM{pnBqthI)nZo30}*u(Whe zVOMxq<*Z1&*2Qh(bgpa}`UdY@_43^wCKTJ>#^qf$pWH~iW6U5ArBj!EL!)Gj_yQa*>nN zByZd6BlBc7`HRi`Y~PtQJ8Tr)XM-gYAnNY6^a0)jN2M(mr>laq-*9b#SAeo?y!A?) zt}>z7d8tQ>^4cMoxS&PG1cQE1(Qr_T4RD%cjP3yawn>k3GYoOd&|<>uo4v4?jHy60 z?n%x2gqo!rl-!cHR`mYf*$A8=8f^B4ouCZ6Q9UarJsnkO)H*1kyZN=9!jBlf-T~iZ zs~O z-Hmpk-CBGve{lS7>d8ffq$%`(B8077uAkX-3I%@BkZB=&SzB)Tmz=rx01X~?{A178 z6_CZnW?|y^UVS|7t0BI{wx)7ZV1~tjrTajebX*) zMoyMtwy4`uN!IpY@43AbO4PuFnTP(cfZv66ekpwrI-lq;Be?F6B~)mE*@`&ydh&5Z zon(4)REN~BA9B|d?~S(B_XHkOA}&mu1P-=mww&4dPrS$F9oJq0W(Lbf2^i$kw81LZDsV2;$+#p?IY+$%3B_aNB3xP_khv;MyKfUEX zbnCm1%B_jgRG8VkeQl^y6WadvZyaCo%C3oCvkkiLZT|V{P!{k;{6f;v7rY$evtQuZ zp@Hn5N^pV^(`|Gy@-IzMG(Kr07onI5qIfz=BEz1|``r1Dw4pI_ETyn6ttP%E-R)~l z`3Fl(CT?e@Y)#TY-wxif4_5+L??jK_Gr_6gPd1hOwf?qn!HX9x$zasbz5JB!(9hC8 zjhctolfJ8N@TfR3jL;F8u$O&jG?dId=*PN1ESS;KaN8C>!a%@0A+XNgs9?frK@FTI zqww+F(zp!2`HZ$D&p$qj^X9iIcvI1TUpUE!rj^BY4{Aj!dEGcb@}-Puaoh59?~afw{Tf4WgfyBMfM{-%~KiPR$V9ndtU(7%+$z?x!Quld6F~Q@gAS;aMkI}+&l1r^V?_?u^X^B~z zty=(7%t=*ciUw!0ke?oQx&Kq^*<9T>V>f}V=`XMQvipA$_#%^Tq)MM3P{z|7rh(l2 z+RXe?_h=S?po4pl6BgPecD;qs?{zV z7vv*eNAMBSqJ5w2jyLSJ>Un+3{-(Pz=yy+2j6LYhOLfv*n!^!vXdDf}@C#lVk&X3A zX4>q`fm8QPRdy9_rRx%vwN*pbK+dFF_`Qs%*7^I}IqY7^pOr#Um>@P_QCF6iA%3)t z49n?ukG3w-?UNnv4E!O+7Z4i|9)q-E(4aB_zFUJPq+rw1u;@nH2EE0_5AmO!yQX|THgyT51$(97 zV+*gXyOL)xbE0*6^f{TS5Ao5p6Xflsbklsjk(uNi6{+{mKI?Ly_6D~&#ed?M?wOL; zMX~(TOf|DBi)e+r@%x7nBltsTF!4~Xg58-ZO+(oU8#?Xt$fP7J)nj5V*VlWOcGOWI z^=x%X;N1ZwaG4tsTWv+-omuY`jRw+Ge)Hg1|3TK`|10xo2>vSq|7+R1@R-OgGnP!5 zLSfzosW6{_pvzRRU9HW5;}f^vmF=YuyHqm$e&=MPDVZseMsN*|_99a{@1*(%InX@s zmU7cqc=Gl6kj`?=-49T&;2ySPy8McG4&|uHJVIBBxzu7GOUYoEZCgOR)j2?SUUv!Z zl{K{&U4{rYHVeKH+e1 zwLOQuJY~8=4~${z z(6{lRK+qe)d%1byfYS_+e9u?Jdyb>GCBMu4qvCdXz<`0)tM&_JJT&@nCv+MBy5qjh zA9Pk1&r&8Xr-ObFshOEc1hREdBFOz^Dxy;90@ICmZ|L)(M_F2;Cw3 zb`@jEozYtJoav|QxR~1dxLRv6{{$ABg-2u}9lQPwV9$|_dmDx~)RUa}%Vq^FA19Vl zCqO0zVDR692RSmtzaMY_6%yJD=8)5M<&_ zodu~ByE0#BTSbzMdZrs7hIm8q3Hioax@oezQZ7J6-2jOmJRw(0-oi_1%|y2+!Is?y zX!o{cVznXIkaV5lcj5KjRA~4n#SrKf>j1s>Ww6Jz+MU6D#{(=-r1i^b<>i>%WU-r- zWw@LOS@FtH(or;o^-Hhlhf{0F#BX8|m8;B0NJqSn9R+?SHq_;;*gla*tGk9WR7wWK zf^tpX*;5|J9VNU9l8~om0YBo<==?IMcAxk|0}O#dz(0JVWkrkt>Eat-=NHe21+*RO zfO3PBlTL~vgi=&G!S|i6(ls06L9ZRo@g^LO4U!~to_gCwp6vwA4hlTY75z?19L?*Q zr$5&ipHOgSSLbCIbMmu)zFyg8H|zBMlGc7!Pe0E2+8F5+Zc2f_-4-0%+Uef3{=nyc zYw>>SehZiYR_K=73x;$(rBM33YbK?f+0asKoY}h=Z1~`onYtz1bd=t|u9b1GsvA6Q zAX)S`O*7dk@0efK3ZWGcNZPzKGjaiOn%Q^F`?C*8htFO1>9JXx0%BqO+ptANB*(f# zqS^>KGpCgCT};ETOKg$n77CL*uX>Re&jzeFLIl*95?&cn1~0}gP5=r5Nm|mtI zmRbfNhw_5GEUogtj>ApMs!8NFugzl32*+rFVE8dV@fDVsfOHT7z?T=%+cd}L zToOPPlSk$h1e4?!31Oe!y`)M~&-#rWO+}b`^Ds2pqvLd#Zh_BtP@#3p6K*)aRn0AX z^X>$F4^{uzN{7r2r2$?RxphCD90+;#$`vo4#^@#Vf(QZJyQ;BteBvD>w*rqnRk^<` z8-<;C5W>gJm%+_Drhzxn{rLXfKpWvH1wJm0ZolU@9PscSIh?gX6*yyP17-3&+q6m= z>U*l7;SK)P@aTavx;>^^TV~s|u_C^y1+CX!QrLc44b~yA5ll#<&s94-;Gyy@ZmQ%M zJlkWvalq=^99et*#X1{~)tLmD)R1Oc^XIzK`h3>T>#7|oEF zYv(US^LL&ahTn2$bZ88MBDzsY-X!fhm&M&IBd4QDAmSe?lP}WhqNjwrqHHe2U0>&7 z?B(ByBEuQ29=}AQ81ecb&X0j!=~^@UT)Vh!M4qfUv+JD(E5xr+rn}3`Ho$1c*!juz zgYzIP`)T70+fOl)Jn6v~n+FeSiH#RwRBuJuaz1;^+KKzlQ{N|MC1Q2{Tj-;+Z(Cyn zqAV{G>e_TB1;*dD#q|pMaxb!!eEP)tx8oR6^Wu~WM_oS&qj&GxFr3=1;u*nyKp05$ z<$*E-m)gc0Gr28#H;p61vvRT8B^^X#CB)E%&(3Lp_A8DD5lRxSC5tTNavR>rDj#id zBPY(A25Fl{{0#=wRo}N#7<6b%n_+z*Ytnz-?v$t?EHd9pW^!Y&O&~vy%G$-|;P}M+ zV|9J`a{{ftDOp8OKF-?aj1zS^%OaDRv4|v~Ps3CKxWK!>ID;d%@G1;kCpaD$)BP&6 zy&);C+@wX5xFw92j8-s6#U2``&XQ<*)E4?o#u4Bck1*!tk|p4Eya@Nh4V@% zuj^Mx#bv9vff+)t?;)xnsGrC4^GKEIhW4sb*!qw4V-H8xefP*|ierRHbOT!I6PDgl zdLECu3`elFhyAKl)qgP0%^rT+{I;95{K5qVe%fB+a-+=uY(X+|UeNt8`xyv`|LohC z->=3x3(bZ4ut@61U5ap-7W1-UzPWe6xI%?V!n*kjz#2TE=TX=*^R4nRqN&asgy_-U zZ9Zd*)7OjC?&bn17=G6Yz+ccP$s4)I0abYx3$M5g2AEt$b`f0wY^S|YF&ojzcVfFQ zfCt4&(;NEFnyO=3=XBS#wnXLhAY*B~{&j+{o~TXfLb;45CH6dxOoA@$@$6}kv5eyS zgqt<-l%HqQgN5Zm3UGlRVnOP+JNM6VW2|#{O9vj_k%-wvQT>Lh4$M?jC$sJ&` ztK7>J@wc}_a*HSbqKqi16lN84COW|rPT2zHnCDXP)oVo4iG*jG|3T4^DWov0V!tCO ze+cwhMN~RaVMr^F1Db$zOzSaLrLMqT_s3ohN+U?&(z*vs-q#>sHC=xd7$&W&-G8n9k!jsHlVjU^ls7ta z|5P;(b~IHz$%{EF`$oS3mF#0|bGvXyVcMQA8>!pDaB_~+u@F?hgCMu$FJXV-zCsKc z8i>iVv4foT;wBVm5KVP$ELAio0O|pY%AzsSK+)RpM<>Xz?1$@S#hn_`4$*%tC>}pm;LE z4v!0P!2)N$7awCI%5n;jp%2-;j{b0ClvzkHIVQRJMI)$jiC@ls4HFj zK{@7S-YlvkF3fM2i6@%@kq~?T#KgUP zy6n1AOwO$AUycY0=+cs7a`5oAJNRh9)8IE1Fu}vvQlO-7=TOkL0f+?|)1!J#Ia{faawUMwbdrOH|eZUGEvu)WLWCmW+ue@s`y`Vfcie zL`FvHPm0?>A_8m+Q|&ub>aNO*-!GegPuT@o;oY)2>ZG_!HSJO#T>vP;$GKr;?oL%s zL0(j(yLm%tI!YT!6ad_)T#2Y~`Lu-a$0=8=a2cx#MVerf25P=9-b zp2ZS$wLOozyzEUjG(*dY$_ac4n}~pvf-GYsQ<Y3WkN}rv)Qw zdk{U;tcgAal@5j^%dNis80!)TZBn)-nGZ|$Gl}^#||7dfc4{ zSkuDjoZbcBx6!GMEM8KpB~Fa^s+uK-|dC-&i2#p^!dld_lQg-eD1-;v72z&ch#m(G#~jc zaWnAsTV*I#Vp8*p6#txhNw?YB)7B+Wdedj=tY^j`)9TVHX7x zZ;Bk6P*KXD!WnVhpxEsITlznhG5vo9-T#*v^#4;d`-=tl$bOE+D*sLk{{NEOV)eRy z!p|0OXe#e!eg7Bb>;G|N055e$y~>T||E~V~KR*Ba-8q{TAbCFs{c%Bd^C4_3=^e6$ znJ@Og&jA15lzIQJfAs&q&jd!gX|2DU#gxW55@`VV;zQ5hk_N)M1vx{Ut<2%IOZqRk zl`VAjD_GiDrLY3xUh|9#8Ujy1!s9^4-fryUV1H-%h^2y8inCp}knHIH8qOT*3>8ib zl2NjK62IMh)kGLFb;?fWVZK;-aoxCm4fgjKs5~Ld5uilydVOD~Ky)JIToac{L*e_c zL~*_T2ny&BSAoN9o=*NWV3-E-dO5Yr9$it|a2vtlVfP)a`@r(Tb|`Tfy65^Kuh3&# zBC4p>wbJBeWRR?i_bIY@lH51t&;SnZv5WMNFjt<wp5`4`@-08K zT)xFleG1Jl<=_7A@S$3IsG9083NWpafb(v(n^8G>6#nb9U^72zd*uv=JIO^J`fVAu z=x}W&FsHeX&OBQ$$+2*Emsykt2Z(BIFGmKeeBWjf;Ry`XyDqc5ao8UqHxLeC6~E~~ zWj~oN7IRc-+DBDYNGw_F+>A{RnF@@_e~T}3ZZo(F1fbuyQfiB^M)zy+ z?GC0J>HDU<;5X4&fgr)%T|%I-;2P)#f;JFbLV)0I4Fq?0x1=F>fZ*=#p5X58(m=ocpP4&z?=$n< z&+mt4t+USBeb!lh_NiS}yXseUs#LF?ke_N0h4eHx4OMrQ|HvGd#wr(1J_QyVVvV5- zN9-S7_IMcFYu!13am{e4F(b`UZcY2lak1?F%1A~zgLpLCH=V||^RJe?$|1U~wRySr zyEC{`JScYVi?ANUDMmGOKx7)p?XUMkXpse|YUs!FNMFx}!momgE2{|rKZ_TC7?7J@ z$XiqS8xrNCwc$c4QaOhA4RZ$qRkfvyL{uC(McOT%^q$!C{&`OJb+5cr3fI3V2+a|Yq6K&MDHI(Oht6Ma-I`JI@Qo zh>>jM|0Y8=3$dC8+DnSF?aht*-FGe4cS0{s&JS}rjge06xfb*G(|bIm$yO|r$WC|otaG%AM^=rZ&P*8tw^%><<#fQGTT}Kl308;L!XZ$T=;!4IvgQ4H;=`S z$g*0%OpF>C`Ir{fHnau*R*~NRKmpL2SHCk0s`eTsE#^xcakplXr^af=CR7G1b8_yJ zu)uy4iTcjiT&fM;+Aj^iJ8uDOxq~=j_}oz`)NfzTiy@H@rXz*akG=XMOcWTvX0E#m zJFCT@MA_mpn8oA6<0Ep#$HD5hj`qQ;=Un87B+}-7w|Sh2a%K6jsXpUEu+r%BX6s(^ zcSD~>@YM7oIU>&S5>f3vH|QYnd#tu#^cT}5sg5J_p8;(ajajETook&=fBV1&ll{d) zaic}ihtUz#R}U&3ISugdr*}vfiQ=Z0E=FQ&>T67E9!7cnlHAkt88r=uSV=Qc_J8LU zC!i*&_-OrLP~X6qJ877gb8JX)PS*BUZ6U)lw)j+p%__-{+`ATcReJ`+ zaP-bYq>Ndq_ZloQb|}AbiEtp}tKV}6nCc48FUII%g;BHSh~kF$KZ#`1SQp~|mG3R& zMRzwuF)!(fb2&$$@P~m2?>1>EfYs5U;Q+%9_ZX*)Xx_tQ$G)|=BDf;2J@s2#apdcw z7Ny;cDrCr<=IZbKhu2h8VakM!xR9D(AH?pv>hs07iIZ_~yayakd{3^ZhxL0I&x`Hr z#*3_-BTT3mQ}rW-k#&dp?i0LwmJe^i>YAMpE!|H~MmIGBNmH_71&e<$V|*;sRdl}SVoGGU$gf^ii7;e)y~m_Zw=!YT*;Abmv?51j;o`{N04Kd8=H zgRg7OeY|(FoU+`O(-2J+cb0EjPY!Orij8IQn2E2Xd4AbW@qyOwP~n*uF0V$14~xN#V%#)sxwOYbSSaz}uHA&)9oo7G zYq-!lJp9rwe3n3zA)PW1Fjv_H77{ujIbQ6piivaSU9-j>A{qlE4l_y|rwwKNh?B_2 z-DPH6!=@_9H_=g4!|)g>Vxy`Y0;%v(p`XzhXc3^i0QfyON+_fbaAPd3Pr|`|u*$k! z(S0%)o$A3y!P|U2Yug|%;u_jG)EmWQ`+h{KSRgl^7H?tG0`I!^K_PVxS}Kc7p_txU zhAfp;0WiS;0m7*49~8)}`$EaOehTCgk&v(8ztcNvUJLHnBAU?JM3M_6Ph5hUg_hv} zvnheOxFo>ED{h!O118GAj1ool3v5P%^GHa#v$6m#_uv8>^@irc;Mk^>9jX9s%P2K+ zQqAPbNw^vmGdNQQZ_j?J5J0hL~F zaOTE|FpTd7NxUsy)W>}J_X*UP9&xWChlPQ0SdXtqB>`bS#L?Z9ZnI>7dG=E%B3gzV zjkD)cB=-`hJT}c*iwDw*LRCORU+-mh15=p=)ZWgn>D)*GF01lzyAFB2+bDd|x8YWWVQN~7I53^y0}O@~L_%3cZhZ$I&Y-wbAe4nC2d^S#g?z%}ZkRUKW;OR)(nAWyN{xG@J1fQ-g2IGP-4tS1k$=jJ!a9YZaRAsQ_!GorPtZ$J7wyfS?w-AoyKmC)i%# zxDX>6H980P(zL$q-41QG13jUA--R?PSVSiMX}Ijg`Az&cy_WMlYP3yrq{lp$kQe43 zn5HCmKJCLY>N6tJL-w_b&uf!bR@Sp=@BMbaPP9PtOW&>6zD|wNH6rXjVy+K+PX)0T z>k3cQTx99nY*Sk_j8GzaoH~aqElxOp!_$vQ>3GI3*A@YNn+Pkf!fp$ZrpH}mNf)|t zVIP+7nM`XmAZ|TeV zR>ugSInW0f*Xc~3py>He4wSDO*O3Mz+Xts-pjE%@d$vU@W(&10Vism0U#pTHai@}7 z##>S=jwmk*86e*YI6K(RI~Vf7QP&{2@>>S%6D>lZKbf#1sS>1#Bgnc2o;%dxM*+#? z=@M>&z+WnsXL#dXk%?S2$ow2X^MWHtnCfQ})n$MoEa>|F80oWbDNgUp1X4~Hcp z-*Zr??$Bp1FxRfbXGXT#YxZ6Icwzu5Cpwym8Pe{?@EyevA7cW!-F?VURogX|p)%Gp zMe?)Bq2##AfQlGkNhqBUKCu@_c*0&)9!c@xS`3%EdR=4-e}{-|G);-3{2-{ohu?I5 zfjP_hx(4&CBYs>*(_A`%hWhE2haFNX#)ufT!vK6Br?l^xW_X@nKAmnF1b<) zh3va-KhN|d=<)ptUK_LY(ioD#&NeaC-qFLkyuedaZ+Urr3xm1mplp6lsDBG9hVJ6f z63zU;dCavK^QcrBi0^Ic{SM zMS`a(0cQ4k%qvt zZ`4r~^l=V6*?k|TbrzYmVR1qZ9eg%k1M76x%%$Ji{gdzqSNErrk^(q!iGI{()p=k zG0b?d^ZF}WSqe`bB;6BJ1Qjx1wr+8rxkD)V01|evSL9lEY1w7*Yl{dbH&>DFRCHen zC%5Spt6d38zwfk*+5LfdVcqy=dkz2+C40k22y2;aeJvHB;B{EnGeY}%Se7W$i zpfwg@3O6UJ(L{*A=xVUjjoC!aXsh5C43iZxY!gj+!MmCUyJRnWL#A*z*JM|$&3VK7 zMUcPayE}V%BpF6O?@7yxzHrlwcx20Zhe`n5u}AKPWJ?(l5Os|?Urn1C_xx76GoXF| z{!ufNREdN`#ei}lNJ(IFh1L=oJA0c6lRw750=KS-Wpce!O`n(S*IoToS_X-DfS+({ z3-v(HGnZ@&=E_Clu!U0!+dG-{QllR>#7rG5Y|BMP>x@E$c^_j?z0kYv!Z7GlSGO;a z!2QVJuA;DOIT*g&vV~gS$^x?i3%vnoN>a&Nzz{%q74N?uh9NTeIL`JS#d}5A!r=wM zPwNz97B{)nKuK7jW+t}^mTh3m(*lHL>7X0-q4qDP_I>!?CmmNGkTmKofId&1RCC_4 z@-c^2Bb8FU`@Bza`JL;xjn3#09))z>`L5*1W8k|{8iJ5dLF)KDT<>*HRd!vLqH4#uu>*4Cy$SI8N0IO4 zwvOPQ)yIhn)*@v~`QCtTtz!sVj8mzILR*9ab$PV%T^CnzCS?l?3iK5UDqC-$i%W>L~73t*0ymwWh&D-9`;sF@5gAALU z98%s&vSepl7J`i-@|5tIN}!ib(Hr6X@<*w~V^8!`p5T?J^;U$G>cNR4px&GWIjoiu zM#P(`84u}UUG|5cMY7m2;{^;AA)cDIDr}ez%S0n)J*;nRMe0&71pqc^gU{WgaNga; z;JQ7B2st)dJBYqp6EKSm6y_gsqMOY6j3=>jgHTyUf@heAjfB0jf;5Db@RaLU(d6(f+W73jqjP zrW}l^cVbhd)9W;KHq1tULP!)Ha&69Ge+X8qj@X{IZwW5KHnyV31EiGZEevqEPZy5! z^zoPsDA6z`nVZ)W{jO(-ukc}T?Rh|a4BYvQ|6vhZKuoT>ZprYflCIO{yu|jdDa-p2 zmnvQ1UMn(xr5?la+Tp_d{IQ#P)LkR9N#DV`x9nR5f3u|fH=p+{M$&J$IR4D%aM(_@ zno}{AzMT{h*1dbDh#%jGKL+1D(9o_6^GvIXv0@xW1+cPa=eMlgOC`9*;rm3KttKN^ zvico5=R|o=KPXQWbkhN3=@MkT*aG5`#SPv-mBTg45|EirBJ{%!gp43eoB)SKz-DZ3 zUR}A{F9Sfh*4^52ZCGJSGax&E(frsm_P4ImJYvqkpN>nv%{E`}y}8fXMFBTIt76=Z zsD)5CspWiMf7UeuHQ2$5jLndC5P!@sWM^WZ^sz$KvL!L7i=e|g?)i(~egio{%Tq^@ z?EJ?shP|r23<-`G4>>GinR&@_Eyk}?;beor$L3AEGhUZ2zH}IcQ}buRS;>Xq+SCi8 z8(srEj^+vm&ZFal57zTRr)g!JRphqzxY<7ymtQC#{l2!I6F2E;>O|+?>g${t3|7MH zZ$Nh38P6YybFCrjc`f@I-0aX%m?#Pb4Tf&K0E_fDng&C@ZNUCe(!lPdOCrP=(50xm zA2mGvZ;vKvKP(+vk8Eo>@VdD!C$V#^g9Zh$rk#pdIGzl`m9W=v5|~^72k^4XB05_W zej%ga66I;nSCRNF?9 zDgTcjUpR{;?W=ZN#K_xT3Tf;s^X^;?#CKDSxoA+GIgiE;BewN?j@uI@eqpKw(qu#T zAw)sa=(xD6URqrz%=j7V$4@gou1}^%w^z)rhB0EoRCtLpgavqQL$lk5WeX;CGX6Ik zchp`I`{CD;rxHG8u0KZUZ5}AgQ~9^W4Wu3Wi*7i%x{TiA$>Kbpg44o2awQwxEY*wO z#P9UrgL=(Piy3^4&?Z@2@b7JQ>+=U(<2K5O(G^|vHV$)rew_gICO*$U%BgzoCQS0h z65NC(yt31y#}*(pWL($~MfaO1@nu0_dRyJ;Fcu6@r^yK$oh-W)hhTa1oV6O%o{4Se zOFJ24(NVcX7St@U+mb{o;nB%v=|FCPF^3;HzVY@`@S|bz z*6oH~UsXlQ!<%e7$jeZ}Qa!KATz<4o5u~tvNELcfmn!b!g9tr(04Y^v8PPj_#r0_; z;;uDX=I|9t=verA?LM`9%Wdc5=viNo zSKF?*QFSmrF+b8A&|bv;>;}Ig2w4zd_2J$YY8O6Ayu{H6cJ?=ynmAl!V;y=I-0U$l2@dH$+y zZq8x-NNj3woXUc+tN!A7mf=&pAO;_9$x8^tq?9`(%m~Qyh6{ z2m|dk>&SN@dRpT?(|^Ly+wG=}VF2G-;q?`VlwEf~46_mWdw%j`X+ z>#+m&)R}FiG}GBKA1x}g4(bmY))C9E?0DB_e3QbNW?j*Vu6-;m+;L!4fFnY^#`x$2 z{bXvuyY6jMUa0#aBEP_cSzDo)PeQNLF z3C*F1f0huVNkMW?<1TF^5Y!#e^`l68&vQdCdco?V5+cST^qv>@hM$Fp>WJ$`uua~3 z?bwtlx1FS$BG){|MP!~x%EkCYuV{U?#J_viVx@7rc5esj;=U;PX{Pr@H-2}>#w zran%Ph-UN+iO!Y7k~KE+ZImG3M+FIyhSYc|EOLHozn%R^&^uZQtryf{LpPqK0*kvZ zp|wjb)iRTS>OPH$WE9Ty+;U3G>`3j@OBEc z_m*YUno+#b5+iyJ_*t-EqSkzpu=K6+9EJ|k!e$gH7bAZ59Brw6)H2qm-BspO0ce_sx=|akDo1w_g7PFlLC*b-;ka> z4L-$eFrow@%hH@7TfYd+Ib+M!NfpC6dpZq!$cXV_Sf&_Lp{Jct+g~w0 zK4^gsGs+`el2m3JeNxTQf`dV{zm=JF?uH3(x}f#c?M_ECPzb2&5pz-;prgi{x%zn$Mnr(X?R=g_7pBK)YP!vbSU-^3DJC;%5bQ_Pl~!M_jkim9f%vGAh}rD` z74x&52KM|IT^mo2dIz6v#$S$CgAv{gvGi!01GGQ)X^-lG1pE@Wk%IJH$Q;LB(kJ+$ zN(B7MwU-H5eH7S{>Vdy({B~oa(?e>BDo>bqeZ6tn0UjAF%LW@vt2$uW@FzuB0KY6F zFeTI=jxW^H=+){MY`cMWtn;j0XRuSV{gksTLGM|KbXCH1^5FWH@mC;7BvFer!H$Cl z@dW~Dux7Me{_;irWQi15(TOkxp_ox;yZ9=yqb8I9w_O|n6VX_@bYYx=|i<8pyWaQiK=3Md}XY@}jNb6;le1=hp17u9DE z_a^BrGL}9FRl*61d+lG5Hcz-hUCqXZ9^KIcg#=r&S{{FQFnq?OLPBwZ^QbcY>bSue z7g4~ukD`xr(S2J?8Aa}^h_DM$&jo%o0NGuFnxN;-p=lQjTxad*S$}=@)_l{sZ*7JO zACa`6WL9RTy{7Z!H=Ew$l98*>1-0_#(3)0sjlPWAW7kn%I->Q%TIaYAB_?qP{h`(w z4_yBS-buN1u*mQ5dChXtQ65P!TSAE|eLI2PrPZn0Huk9hRWyZA@`^ND*mli2G44(7 z=+juG4NK$lTpfQsO-4jkgTTU?eH*Qi#3dPcF>sM3yb-GQReqYCtvff^To9~hUl{;a zC{AKYXm|ViTzRCwm`tU|uA}?@t8_RO)21U@CcnQX+1^tg6 zKi6*E!;B^9*{I%mgjtR+_$9_l~6dsM&Gc*8sfRFf4U@r(GU`z?5Fy?2* z>l)w(I0vq%l5>uEp1Xce};ObJr6rZvpBt?oGVbl4Cp&fY}^ zFXl>#5kAT^RsP1tj9g>zNtGZ0dxVS8=>V(a%#9}^fJTuWUb`?Tf0zpCK)9>Kh+~T_g zzF>^C`30TMHe4^6>PiH?QTch$+ey^v6v^e9A=hY-2}g(2_lndGvg;3IOa>zUgR)H6 z!ld7ucl6Sjn#3JHGOM|c!21qFUJt%_e22VPVwG~ET;IAP4CGvaJj#s4S&AH}oBCP9 zLThnfPJM=!hk2lNMnN3-jlNhtZCp#mqHQE2lrk6cCxx$L17x5=Dlid`)_Ml~CY5Ch zg#Sp->_~5>J2p3uBCru`^!f$azL0~MbM2hzMUL8p{g?>ksh?UhU>qBO=LI4VQNpEU zK<{v9(r!}qX*Q^~N&Xh;9Frsi8r8>J3+MDOOvSVpfAzvn7fcdn8CFIZw7Rte1-y#|^Uh=$l}?QMR1U%QqqKPU$w24iBrd7J(!OvZCon2;??^5u{T4wZ#{ zUY`(^O+>ZQK&V+ZvC`PLQy%VD{2g(Q@6H++{V0@a3P=>&;^~7zcx7$CBm_qo6c08X zuU9gX`>c`|S{qIerj?U#>3{Nae)bqZIqTLX?_Yw}O9nK@HOk#Yc%<_G0)z@1Hj)%k zQJNx5X`Wz)G8=`X%y3~FBQVl)@}~6|7kln5!jw*om9h%Nv<;;#5B-cR3*FQixRsyS$V5^#X&QCN zP^^T}tHlg2ro&@2sYw$17A9i@l#H_zZ8&0r*Vc~TV8f+7dN#zvp9mgtN13aC)v%mr zaR})V_@^)()vz7K7-1Q2C#7-|UNUk2Ab3c}!WihzhQ zhr^>+eqDbcjxrRPH-3^b&0B@EQD8ZiZcOl1h#Wq<5=Nl3Zt;CyJW~T_)^oFH37)z! z6*PLHmAS-d6kZ6ZB;jl)=A#-n>D-;_vQkN_ZqH-z?jtlMRD-ZZ-`RJU4?I-{=(Zt` z8H;~}XPzEa_VmmI8tW=v3WF1py_Ty7F!rkJdbHx%C5Ll zreft458j8^0!&=Pgf?M+uVLyGiE2tpdj?f4y0ovbMm$WHPL&h;^9W?Rn6Xaa`783H zJSm+;-n2N`P;@uu2_K;1V};GzblDEUvx3}D#u%8G+aw&sZuApFz!==1%r};!;Bwl^|~*8AMTu#h8Wr~ydtY<5p^F{qMS(TCtT^HexyJSXkkBNvHERqsK8 zqIyXFuyToB^rrC@!^jpGX+Y@IkZ-K$}A~4FrWSCP+^zJ&sw(q*lyUJa)gf&%?9LSxgQEU zB=4N2tqZ(DgjB%|P;>=Wsw;MihT9-o@u__&(}X`TM|ji}B;(ue+ovDOypfD{8L#~wz4mlLih#3hN4vCg65{FvV_FH_ zXw`ce>&>`_f3auPNuv`~mIF=4?n2>g7 zb|>Ifdr;6HuFbqfeC87Tbz|pB!e5}ifyW^|bb+X?`AA@G5ypVy?wM8My`!wn{CaW; zu%9fnj`)HQd*F$CVhNg0`KOg5{2Iu%BZriroXdi5=QhcKmcU`2a~iIOGAeh;m|u2l zm<#8G+4B=c0rL@&HDv}}k_wqHXv}#6%_F$psq;jn^F?5hr(Mnm!n*bD1@m+;HA!HI z#ptt=r`-~7ymvd#q6awHMAAdhZR7G2VzKbzo|4I$oY*X(qvsW+H_MZjNsI;35KJXg0l9bH}+Vc8kolct79j z$P$3!VA4l0f(Nt5}G<@WTa%W7nVz{WdGY&ZCW+`or8MGkOVz zcq`KNmt5)Dw0nvxP4bKUVMGKY%sPh7jV$w!_PPB2*?z8dEyi&B%?S{JHwZn#xSpl+ zJEzdRBlOUk07z1%+ae9-U})R@2)&S2^axzQ8#9hJHXBT1J?1&`@79-bPC!Ic-dGN) z%yC)#%3=sNHp}x9IK>REXn~pn{|3U&{oYkJZ43M;Vd3CI8-DJPc?esu-}&N(SfVUC zga^s~@ZAe&gV?EcuVe8R5Lu)jD?b8@HIjDX1Yk$SbPN_}sK*LpAW>gMTMB-hP zJ9k;o3j9yqk5*8oVQUA&FAT3V6Hx0I?5FW6;J4e@LF;0XXp`lQ`BCdTQvKg&f{c@x3%b zWUxbV;w_j$S#9T_XK7%*FCa8$wU>h5UKUnR=#{XQQYRfDdoA~N6ILl>-FAV>AD>To zOA< zZ3z+kQp}XSZ`XpPJ~)NtB2rO8EZAzTfDTT&E9U^l<8PDN+?-pyBQ#Z=cvK@k1lY7C zz<LVYZ0-0a>(7_7vIwss+G#1(_sdofKp18Q2ik^f-=}aAolI{CJTQiipy5gK`aP zP7FI|sJ-+zI9=X(b!BrpK+zf%=oYRd+xC;o){WLzms?YX{i~>tJ-}mwU7M!I3v><1 z-$Q9YYI*#J3U|99BQ3NK_Trb>EQ_qr+Dw3O6Hrk{#4lF5mqDDvNEr;{Z)#XO{IN4uxVtXnMTA$jY(im6|UPv%HNqT}?H76Hb0<4(N6$ z1$Aq0C8X|zuA>Mq$M-h}OcU~tnM$A01cYR$LXXKU&%$*Y<ve?%fz(!d~0 zW+_F^yE#w;myk6y>@(7(l}P8wz9C%id`b;@OG{B8yGEbT{(QpHL=hYt6r)YIjWnZ&s*n2M<;! zoUmpJP7jj@+7Z8I=0xsbX;hn5!oG0?=Z&%NCqt;67L{EH^x5{yQ2#VD>!?q&OZB`s zn^@ieuHY>JEO)rHuN{73G5>O`xMs2afN+GPd z`>dnICpM+)7nK#OhWvQ#-Q$kpllL0SZ;FLhrpl_CF)2-z^rgti{7^j^=fGVg@B^R? z;k$%C7hwk~Qoe~;NdAV%azfY`6f)1U$PloVFR~+&AJSy#J1BDmAEB3F;(H{9Se{qa zNne`ZH0oShgGDw~5jVvv)6}Uy2c#(x_Y~H?hL!;N!>HK(uXphE0Un%kQDRbqKWWg7 zcN$p^RAkpvaCPs1rMmNM4(THE|rzj zt~CQD0BwjUF;Mi8K_S1;x&ZUIMcn}}B6WAdpkk0SU$|N&B442$XQvx3?VtqoH28pL z7&s3V_3*HfBCr&msxOiZ3wfjuC&&MVj!xj* zE)vlDS#JN)s5JWfjcJO$ps8V|(Q`b>XH(SH#64AGL=$~L0B(=`7X|!r5W#^c&4cgm z9h3lpT>b6v%gt<;wGg+(Dia?Rthi%{T|!%AeBJJIw|BUVGn@0*(#z7w<9?gqm8Zng zV0!laZADNDBFW=?Y6;IZg5__#@z*`R$d?Nn7CK%L>0(Vm|0 zHyWErB4NvCwc=-`ygcZA6DLuPY+oMulIIs>&Ij5H3GeAPz!8=Ii?RMrnE!N^VSN5I z-S7i196aa2m;Sf8{q50zpTPe+|33=+|A7LZX=8`7%-1@*u6j%?#5{>h^>!g&2D z%3eCZK9?!Z{W>mn8l|iF)Ug)w1E}w$QSyiEzl0K@fWXM0kLB4V#5#$6OQl^QGhR^s z#>WQbe@Vw&Tzf}AaoKbRXzCDh>Zq0Fw59s;38qXqWHj>G>0fl9Va8=KCrelmsAv4U z#Q$H;D&Zit?3e$G@G*QsXn|>eN#_63nSa-u|MNUcM3_SV;@5wlERC{~^yzOy-D$rUF!kl; zyN_{nJsb1gS;BA)Hcbxj)?ijSqI)}6nL7`U4^orci8`CcTUE0huk1Hk6%{_+0zrp= z&Qj)^+)~VJ3o=zzw+0gE;X;iqAy|~^x^=eG#|g?DW#?N%Cj+!RqCBu$vo8x;ME{Al z60?ZTSnT34qOKw7{3lKyYxYeoH~Z5NsjC)KIxeuIchlELKEF!&W}#rgHTF6#gP1 zm$I?RE7Pt2z@9V)2#$*KH@Q3C3HiJ;n566ThU&b6Vkmh;liKUDackpbID_9%D0icTeu%ktKHvh54;-YXc2QB~~-oQCu_UxKQrEV_%kjR1`viNxjNsZR`W~~dp zhdG#1PNoY2=k5PFvv~QA+jLN1rpXPXYRHBOPqz1Ltb5yZ59)gOCqEmqZmZvy{*K&a zud&kczf<}jV;p6KMgd7yS{4l4$N?uTM&*-6YHrMQhHjqs{BFp;GRij zwL9>NoE18%u97lJdNYIBuoR6>SX`u2Ll*Jtw1Uc_!FK7x73@U3NWbYxgOydeq)4r= zq#)Zv=3^prT^M?^M$H7N~CChTLzGpwxI9d)LP;jK@43>KNgCYwV@(?q+F{<%*>*{~#ABoCW42$NC zZYQ=^7h@O%Ai2)|6k#{pmV5UiRkMAkrpUni4wuxuQNc%SrF5>vffeOz#cEPq6!~xS9fabTF4fK< zIpJLhCj=<;ou?QQ?YSHN`jnhDySm~7|0qzq;v=q4$f2kHvFvNyiTP3zTS{>hxn==t z-{B{!KO51)9f$X}aKt**IBwwuxoh*==+LWC9VUAaX$tHhQjh%JsH{q_R12~{>vg4A zNMiF*hg}~{z<*YnvK2dx{fyi8S^sTYY9Mah8)A7hPv~*R6Y1aU!$RM|vUhdhqt^Ct z9&mqDXX;_QTs2zEpr)%17D#zypz2#06_$m;?{=uYsMz&|l(%`8i(%Lbq$-u0q^`JV zP6e}GzjRt3P-{GIT!S`TXSI{A9(uildgnu^L?9Agu%l^}T4C1*LdOkp{d)U*v2PN7 z?@6;U1hB^o*4ye`kIMINH!upJKzKp%$0miFm*3xuP)yXW=#e9S>g5<4dJBTf#M_E{ zqjdXu>3_JO2}I|33+X%}=-^SNQGO6x^r14Qtq6e~{o9U}CQoAoQWIfN&J@slO})%q z4T*2Eculr=*ier%Ih=9RAQEbr4~ov*Q_8=QiSp1J$$l&%`Dj)0+c0N?l-cKLgOKVz z$D=sQV7Nazk>Aey>T+shW-G#a-O#Wm(tl8K4MT9MS!Q`fdB=YQQqRH7fR#z|yYvX@-b*$a`K{e&_5f*FAlor(2hzo(mdTlJpSYJ>yS9v4P*LjH!rvE<@pFOv+U)Pxnn9 zQ#b;;$b1wRwQ6^z`l2aIWs>0+V;#=g(?w1J9-DD%dj?uGpG&=1wmGk0Ic*oC80h=S z2n8eWbJwyUTrF<5OuRq9ypZosijS_|u(?Q9K*ra+6`HaUUh_x~x{M3D%`fC%$t;(- zjUnfk-Vw0YvTImwE-5Ev*PT`IdAbX^rWW;xrTS@DZ64pYWz%3_ZzA&)>ZP{ZTZ?=PbJ>=QYO#(72|Sg%b3XOXly_aH$HAn+3u zSVgEIbu#<2h>XuVefsE}cbp5IC^xYgIrj&joxu)aVv#qAh5J#>&lz32*VCXqn+3kT zI9*9LaIw1f$D&E#ck6Kj5`~M+ffCNfad~tik%1vyn9=5ZD6Ut}?UM4f)856iT;*S6 zH?u#p3mO~O!!xEf!Ht)57SSDsJ~rSIO?W1gQaxp4a*uiC!=3Uty?z9-xT29d z_XAfv7X5WF)=}AOrPWmYm-H{1oEtctTZuTnhEr*KJe<9(SqUeMn9ddz?&9-0-~8qA z<`s$FSx$RsGqrc>51H5Ai0$)bt`+QBpw`0lc#)Nx?w;wc?PwMw#ptZ8YD`QyMyQ8H zmSo4{s_|otgx9K{*(6hP%eOviQUoJ(htW^B`%zw*OGh4dKXhu<+YH3JTRuHlSWf2s zo?h>9+T7})JIX1;eJ;Y;h}$bq0;eHK(*7~n!>#FP!1dmzKt^da^~~?mw5t140}Zb~ zw?v!}>va{0v7w>gn(GL$T;**2`x z=KVvAniFXe5%d?2tl>o|9>u7`I>^9%9ipwuBHetw@3ZJnf^<3$d$dp7##F%i*FNAT<&! z>kUcx)6wM)oO3=e?<@WrfGR3xqM_G2@b9BEGp4|N)%Ht_x@8?8W&!zCp+Vim<*cr- z9Hc;nYog2`rYE4noEVeo?tBY->jkRTl8dH|O> z?&4a{bmcQHbo}5S_Wl#C%$fF3s_&X58uXMd@TZny#SHlxE}O$YJ0BNnV}7Kmxxt^j z&U4yi$Q8~6FUP;Db=P|+6_P|s9L@er`&!&0OAWIsvtaD4c9L4NM;3jOTxheH5)$Pm z%dz9PnAEu1!WtBB zjLZab#HO5=B8OK1smZZBs!6Zm6!5X$DCHUL6)(4w!Y8(={1#&F)b$_?P-fHXQt`*j zmVypJsv~7N(r>TZ0j&bsF|NHnRDj`A?jM@vxckB6^Mmg`wcX!t@LyIx=W8>e_G#j9 z{`ZZaA*4M{PJBa4ERqgkQI8AGmMsZfR-ABjKFZY!i%`L|wy3a2dOwAWaHTL&rP4ZW ze`N~(aW$W{f~@doJz{@SUL&wTu?wLX{X z%~ndmAj^r*tub-l=5vrs9$^%c701Pj>>M#e4=^ee^j?O>Tvf0jh=`pAv8S;|vRUQlS*X zMG@fU+U}A^0sbaeFG=#ti4}=jJGfemh}~pf)H44QB+OWzJElz^yYb)sqIek^TF}&# zlCZo_e8iV1(u?M|yH!lA><=q&rIfq#WjV7`2~&=yw=Nu0T_JF_5{<{i#QT@*1i9i- zsdsn7-WqeCm4=B8Zq#YSr^~YVd4M>#>xa55Gp7UII887dV({~-;wm(U8dD{Xa zqm?D3SP@r5 zV%cLx(BPre_{gAIDOxp&FXjSddJc+l(igxlahsJ&Mi)t(a)|bv z9qiiX`~DjKeklv*VKTs{)oiA$?TN$XtVeT?^KEI;tGk;OAJeD4r1`>^UEE@QL7y1! zGT+?tdR^@sI$v0U zzBi*^M1OXc4rb5~^qn z-R9v$OFr;tE#_$3m_1UWVzfb@XUa0?Qhval_ z(^3Q8mnF$Qw`;O3Iu3o&FQ$*`+~l-2I!z;cBq9j_27>}sGqZds_g*MjKgkIC2F2hH z0hSN9--Y_{xB;dk>940*3o^;l`H+H1-)<-1_ZWP7-McYNFsx>Yi~$Nt2tZZa5d~$w z4}$WP1`Y9>!?--AWUuyJF5Qi z%1tt<6QAo7*D;9vZ}8batmIe{B$>AqVFQLSftFK<#t*eix`5k@2^qUWG<<`Jh*C__ z$&XoX2JAo4-YoO$zSS^A&T06z1ITXOy-`;U00`2kNeR=L2iT6bQ=T;~nDc8O8=F()*nJ`B!uZxkLJ*e?V zyH}AY27mMOzTROQNaDiSb$E6AdYb*TM^wSmI%&J(>{PMXK_NnRSM zhRqPA!nC9LD|s`{*DtqKWS$=S28P0?fwkKsu=ip!;+6f1(2Pjs-Vr*m);M$KaO&B2 zEU}G9r%qsm&|7m@1R)zCS=hVcLlj`DBW(1J1Wf!tv?X5=oq~_I^GFd_18gc^#fqWHZ;|SwLb? zjvufV2h=(|m0d)jQtHNiShHZe=3iOo1{sil%s4GkMjbLDM+G%C0*!Fl{g8R(FK8sob9Pz-M==}qL?&{aw@cf}?(H$dOK51U{L*^=4&dmB5BONhNVwZe8 zo$Cxl-C!%z1r{gbY$3_RhkVx;@r|5H0`9(Rfx*Th-*|FILPee_Xtm%1wt;FuaBWd< zF)f45#I3TkP@;yS9<{GhXR8Z2U1&VF^+vxunAZwcD|U*unA)9jaz+{yFuOl)u4pH= z4oT#`Ue?r=NxAU&kzPdmM^FB7ui!XbO{@NEZn^bBpOen+xjg@!BGMh>!w$ETSz!WQ zYQ-a?$I38lG2bSojyKg=iQ<+)`r&YL?35og(bS(n-C)$l-JBM1q-ZJQAVIb9kcTr! z2FsW`%>LKJ>2JIjcH?eq4A^N9w{1$|Lk{YWEt`O_bx@e~oWg$}SZN&@X2|_kAF%gU9BRy(lViPcuDFQq zs9?D@FCDS4O+KI1AsS3aeA7X?2CG4G9`^IqO;cZozD zK)){_dCXhxk!v1JB9H}wQg^%yZt^E<54niw&ihC=|so;EYQ~Sq2 zLFY}aR|PN20tT5?rk6u0u9Rl+Na}3S3kYOWD_?}n8JEFsJCmqAVn@<&d^&~`!x;*=~1qNpmH+)y#@;6hk*Ka<)Z4db}vyKFfXb14EAls z5*=7ff8sZA*Y+HJ7wTmk-kwMDUyt%N1TB5N7E!qRE;E-)etIJok#A$v# z<0HiPj)V`boFf)JX14*+Q(mE22SMnHw)8O9ztgMGF2P5jj`#= z6*?D`f3>asO>k|GF2(FxSwXh#trGDxY82)1|KaMn0IvQvuyWs8MB}mB4NiYh&x*j>g!MZhWDxI%! zrL4Z8jpjgDxIA3Y5O5{*ec(7_v%XHUCi;2QRSn0e`T>MQZj?(MMpl#=ygi9A!|3wI z4|xL?l^nN~Q?J9bShHbjdSt0=B%P-rEdN0`3|b9B>O|O|W4ETX33})RkFQ_O*A47E z-6Q^}s!h~d9h=SRK?i0j+s3NyH?W#!YKw~D^XD$R?}+0GsVtEH^w?*=_?#~E4*6s} z)LqSTs<2?6-{(P*&0%vmi6@#WWJUq#R}=`MDqrz!o~%9BGNFpS#f=&^{mspY@~}CS zs5_SGId44s#oFQLYnMd!r0k4D9gi~ptK`<-a?F*=6+VrNQBUMM(rVsMdd8lFJP#zK zU+s2^OKjvMrcW92y5YOi2PuCJNX`Td*V3h}gFo;`a6TZnoHahKI~UcU=)^{s`dAD#C6B&MXHc zXs@1c6&@ZJzbQ29shVd6G=&o-4J}xupAFw#vkL)`!?d5vtp>7qtHH#b2R9I69ti{U z+m~%8V}4zSNB!GG8OMs&7hs8(fb~NB-?+6=DP-6BATV0ul9%}4#fR>_JN*FnEU?=d zfXFPlj>qqIBH~f`HbXtreY;h<#@S8vqp+Ng$OpLik(@lDb&7EH%GHz4OlJkMlDL)j zxJ*IO#sD)tgE7eXq#uo2Bk_fN{&>DdYTH|2tJR4PP)RDd)N@3;S*SJ{1m(;^i_+9US zy@$1J)r}@>GP)I7#GG}r^4)VSTEawVA<_qK=MDWPE}w3$gncsp#?5C{t~d^UQ*nVG zhCvEv6KQ=y_v7*u-TqJSGNsnG(I4F>vMI6+D!uR7lSd6>*|Q8TTyr1oqplV%G1g_b zPZ3?EP5oZyGa87~49JpLd%u0H+tk{mQnH;A{i7wB68W=;9w3T8Zl-HO2x%wChqLYP zuWbuRE`Y|KOgFe}G`sGiLe-_B-FN6);BeZlg0Of>lVvcq*DwzCL>Oz&JE5d{)SSVv zTC3YipwG{m-(pXF)8TdW6W<&XxYrl8ukwfN*Oa!iks;eF$uq4Zz#6WJ!j5X(ieSvbCtwy711J#TmVWRJHzAve$ z|N56K0+pl4WZHgf)F>Qi0Ul>|GP@ow5`@e1x}3hhqZyE*Z{+?q7v{kFwqr`y1j1SY zUtNPwFej-|Tlfh@lL4gC1!fva6(9|@9a9O#9*HPw>I%M|upHB~Q!S7k>y1Nv8!h+& zdE5+=-K^hbk<4+)jhEsZn&DaJb_%-C`gXqKTeOl-%8gyrw3mOcMcpX%k1v=k4y9E2 zaJO&ZUi2K$rc0+;pB*M$L3pdvaLy1N zh)}Xe9Vm=U6r2v&`dw!!Q@oX2Y2`yj$vJFwDsUj(7otg_BuZ35fadF{e;Ur80EiEl zbTFWbQr+j&r!YLwZ&F>aWIr~-59vY3N~@(Bm$5o6_B*5PTVJte_vLS}A+K)sE9flt zCT%Y}Vy4b$_A3nEumwh|$F5H)jmcjx&FZpQOlhsLJeGc_Xr0lG{MIw2$IyDwm|-YT zGKF^ewaKZCziql$yL>E~p*^jzAQO-hMa@2w3XOdA{jgIa zS9fdeIVdpa71pQhQJ#u~FBvA55iI%dSf26TcINg?Id5w~w;#9NOERQ{KOyZ?>fzHj z=-J_wBjX;1M^_y#2+;40m>V2~9l}HBldy#Tsk2!U+1a9J$$9*G&xq@1f3ym&F$dwd z;dA}Eq;tW~LB6c@&!#B(;A|qBL@jB6HSawp)3+p&K*CEFQFu*SAtVS26&)Nj_ZdBpDJ;n@!T@6L0v*<9VgS=6YnL zUe=V(?)Lq|&m5Bxnv3H3q_Oz~quG^msahM{!11e4hBisQhZA`~*TVU}4O1%rs|la) zjt4$x;bbi`sM-d!tzeymT{6I+F-JPW};ysM9ws>=ed=P18+} zlXFju%B#P<@Afq78(eT`R3*g%+CJ{cbUEn@@jYF)Ged571Y*u}{B>$b&3sNJF^J`O zC&aL;@k(mCZ&QEt2nWo#cK_fvh|BzNET6K{8xXbV2i#;X+vS~Ni|L_@^>H0@fRFl1U*ylIXy>Ptc^CUM%0m*Zkdv;`?&Avi z?Ltl~76`AsKhxTIHFLysd{AW5S{Lh66id%2i7~wOYJ)dyYb4b*l(eWUNv?OH?se53JUz5#$aV!PmqYyanF@Ig@DAR0E3<|NGKxxxM zS6WnNxAhS~%cnP4{JSx#pAZID9lnbIl>~FKS+&Taik*~B^D3Q~joq*pkvYXF0iVT0 z$n4@PuZ7zGDPN_P9ptF_?q{3RWFjKa2|0S5v8AoGqd(sU)08IBN}+uUiwQL7#uckE zu+I!8w#W`b zDaxqZU6Ol;t+?-qI?rmiUhFLAw=_By6doFU|_E6P%9$RBFtLn>3G&-o%PMD0OU_o{=aU>SIe%;t0PP!hL4c^y`_4Yrc9Gi(K)|gyF&R;(t%kh6vz=by>C-xq1K5i?` z@GP%kPr{;-m$-|Bczh=rE$a#0`L6RZiFs+^=UeeYe^?&K1+KPkiK^iviF&nBMv8!M zN($>9Tl(07EraCwiv3RP{C)dFHPr0&_I7 z{$D5UzuFZ^xMg48&;5EK@qhH+v5>_v-dPSO?QH5-1pAP7*ZV#_a1zGVbVu3C!t*Oh%;P)H_}4#4 zz_qq3Hf%=y1+Ne~F89?2`>Pmgp&J3MmwD2uiLhN@nr$}j^+peRpq$&tdehgcCf{dI zCl6Xo7N^}f*3jBNcJpPLEVr9Z(fl~&jrsAoIb$+PbZI!aONfTkIgxGo;TbM)o@|~A zE5F6Vc1zImZ+LMtvljXggOG5W^=~{*dybeD4|@y4-sguMRQD9Ly8ZNTq-JX0$cc=2 zl|H22h=!{lhC}+7jHTMPUk>&FqZ794w2JLwEi!pry)=WF3=7uCU_^brH&6Gt`fj&d zvqOcOv&K#r1HV4##kF3}eDVPQh$`5ct2Ni|Uj^rW(VWidV|2H7>@H<89$c6#SS%N> zYk$02W%W*omr+bxYUThj;IzE*y_&yoZOMD$+oI@J%Fq|$?L_`Ic2H%&cV~cvoeyaA z+h>h;^m{Afkk^Uaq@w~iQ$*e*a3I0YWJ33qfK<;-HdBFAw-DFv^LTugU0k1Gip5Z6 zm1Evg{=O=~7SeCM{pLjHl*)0xWAeXvR%TK^E~ZmV#3De28YbglG-3=S2`iw}bWu^$ zo_^B|IUno$X>S?rV%kFz7V~|g7gt(Ad^kC{2;N~wHhty%@mhBzLe18)PE2hpO+SPi^pA5)jfd+yxya+gagMx`C;O}AV&2oJ!Q@O+U+Unui3d^yJk69shH%qPXv zmTnA{Go#%ugmCpecE_OuC%lv1bO0SDA+F&0_K0bSbb~0#kDge-^hE~JYDoeEmF@{MA?CP@FeEcWUK{ zRhnVKWzNqQIkvi_ivFq$h;|jLw8MZyiC?WTV(?#kC+3XAuU5Y`#Bj=Wpl==5(-Ju` zSZjW~$0e^H*F&Ys&}pCyyj*d`M-zfYv2fpsJ6pFOZt?uYS6w&@@4*iSl)GW}!#`d% z`EA<|B3ldfhi^Sz8|rB3eJ+g7^2W@E>F|ocywG=<&kwWH#1&);go0zx9Nv|1P2qM< zF=Ggu>ZMvO{lL)v5SYSg?=EqFC4;PjX2Uc5+qRi*+7BlLml6@R26yY2g7HXLk5xME z;dTcholU1w?{%XDg&7I<7p@m!QB;aNK>j_wc=eF0%1+@5A9KWGp|}v#KTv*@Gjgd; zIyVHd7d1@z^er!{VTJY6Me%qaiLRhH2U*`qqi-z&jgj7{Q0Yah(a+~N)ZQdx z2tj%IJoUk9Cu}v9oo4d0V{N(1MsLHO+?k9gne|YlfDc(tNF;64c04*+P%E~@c?dyS zrgwSrIfi?hyGf6e`uy7VO&?HFMzvUU=|?Bb;1Nn;5wG@)+^YPrR$9ApH!*{lNBX~1 z{1z`EQ4P7?haEO^OgsMh#xYEfv93U^zpy3CJ1PCS{rQPzwm%+R8PPMwNC6M0glH5* z-uy6ZW++P)AO=dadpD2pH|-P1+C$OwiB#Sw{SKD)%Euss-VW`0u$ot3)cpt-TCJ#v zJcmGbU!%jo|J8UI3HyK<;gchJkHu$Z1eB?*WzYT84_*yd1xszw)e0TJgUvD-P0O_m z6RosqIVE11h*(ProJi$yeZry-A)X*$7k&dXa{Jbj#Aq@HQk50}ETTK0uN!~m$~j=Z zqxZYsq=0ca6Qy%;aN-A7umP8^G#8_)fsM{Ebi@-=4aNMHsf%Zgr#owk$u(L3-QVp4 zgjjH4znlQ$1YSLkBZBZN99cGglO)nmQAUKku&@C2QZRBEI{#0OmdBt08H}FRa>rb2F z06;6(9DBqsN4_!}w>xEKp*15h8Wou0N^ry^+YMK)&=J?~b`(ukfTJ8;GblOu`2VE3^*@iZx~ay|`DCH@-(iS?`GWI!P(dGQwK$b3-aT)| z8ZLE;_we)U`|g+BddPxALrn&D?yRSd+ZjNiZu;+QBy!Cr-V~dIT%tdZDB6F!61r;q z;aF)jz{0z*tSLJvmH2BuI)Rt*x_jm-LgSeY&*_xHIdErK7blq z#_OPJ7UbQ?d3lc`l;s|Mck}JOnfP31_Cu6qZ)v1N7@t>W z$|k}wrBaoymT9zp7kX^Pn?t~s$5UwT_lKPx5tD!)!{j)~SpGiCJBsUBei#tOJQLUf zEAUnW6yap*gkOH-$uK#hCpJ|SI5>6U<|u#Nw-Hb&ZH{RhY$JtND)pVaFIjfvOTFg+ z7nZPbo#iY;dME2UTz|3zOFiBpajes#zua4u`mNKp?R248GhuJ&0LwlLX%KS&KS>M6NfX<)1M-L^n1fl{V2% z_#pKQ#RCPbvwIu2LkywWpN?K=!~zbE{f*!UDse<;tqy^Q6SnDgU!K2==zP~x3a|)^ z1=tK3o=#ADG;GjUYrL-k_)z@{n)a1a@qsbjEmf74MT(dL4xbd;w{d@q)4aVxb5-V2FIVzq+jLJWsXF?i_ zqD0Wn@(BVM+SQ1-S>rae)nrnyGWdOVC8)VFJMNF$i*1_rv>peh0Z02MTPvL1v5Ak; zZlX)x*Az{@m1=KCZa+t~VOL`a+(mF{O2eq93;q(OEUC990Zn&%qhqgs(GJ%b=%0MP z*ri=;pw(p8?c1B&x^>DiV`R>^nxDd|wwGZEV!3d~r8=22bfseP_((<^Pcs`s)#y~K z(`qxFqdmMUvM6O(I|rPn-Zh<4=3$@MzIpEZ!yc>Uhzpdo9Qni=K&jE#rbLgJDa z09)*UuwKzQ=M`jH7@Ngi1$Efd3Crc^L6amPgD-Xe5db8L;rL4_`5v>{t_4rsBpKd! zi|=zDeJ0izJM^OZFZPayLJlt{bE{nSE}FJeRKfcIjx7o%vBBp?JaoIc@BqA&XY2+5b?d)Zq z-D85JIY!`4Eta(%w>^2(Qinb4!cxk3@YO;Ze}*#RDxcEsV5h}iaUop$K0lV^ta;UR zEsA|{)ft$p_;zRp z^lHW_miP~+f?LO z99KQ6BjJBD?_eB%r>Qc?N*UJcec$#=k8ug$Haq}*^G=F$*tN-ch(s~6tBIbGTYpr+ zYtj}aLgxM|vIIxR?Q7%@tC`Yo{7?NTFl74y82lCZ98a0)wBgMn!37t5ufyUwF@7;l zt~zQCi~i(N3jehk%D_IP71$TpB7yk$f3pB4H+V}lIh<>q`auqQxp8!S9;mU72RaK< zd>8U>=jE(TQ=sO7vf~~kG6%#rXm(b`kId{>`nYVce}I46*Fom0e&I83#yXL;Zs^#( zVMYZb0*}e`ax3xe)`KQ>fI9Is+2+Y}a3>CR~0} z$8#G6IbQn)GlG|Z(xVo74&~jOv`|jhgV*8x7{&(EP7F*tHOM@GY0Cnl{EQ>;QOCq) zf4W&AL+H~$!JO1IWoAp!&T=;!)EFh^L$Fi=ZAs(DY#m*%Q`x!bq#JHf0+S(jA<**9 z>%8aK&I9sA;uouNb(X#=+xvm?a2Vp_{HR_hY`sm+?Sqa>NFx|InfjD|tAs#)%iR6- zS(f;oc`LB$duzO=^6fXU9f)(kf2uc;frM5@Arko`+vOG^3VTf&(AT?l@tKVyG7nrV z73+6t8cnUI76U8?_jJCDWw%`ngnfRM&NmcW;3clncG|ugb#;t_GL0svH0U$!c zk}PkD9j$`2Dul-I#f=J=!=`F64I_c8L}Tbxk=)?Z2X|avb+ya9MB?a%*k_}7iyC5k z(hh!Jbj)OUAPO#wtjFK}B3_DpV{#Bs67v1j3Pd`rbN_S^iy`OddXa%7oiEG8fdJJ2 zRTl@LVaa4KM_A>bf(JmyR8}dTQtcy79ph*v8n4?A1^fA9{dgaY@mz2o&{+#4K0Gec ziB?<+M&&%d67cJ7Qfb}1b6yKXGm;%iHh4$)-kGynu|A!?WNM{c$)w3Xtv6D)7@zgG zy4)<(HV`G7RiV;uO2cjO3{}u851XXhSEoxOU=&MvUBZ~OuGO?(uESiY=M+R19{P^D zT*uovi1s`1V!?u5uAh7F7zZ7Iw9B1VH|rbDgs;CLX3;>rB^T#MF<>59ayiGdm$mdA z=SK?FYrO(n@lip$6Y>7m#oUS!l2i+Ywk|H#y8XaqdluXiWP0wpe5*jvGhy5Dqs{%C z+ghUA7y8;UX$*iW#ro$15&n#fK!^tCOz}Wc+q>l;xbq!hlM`O4Zi1V)O9s{gfCC$b z3fHE5h=JFHP)LvjIan`bXkC$mbfn&s= z%UAJ3CE^yHE}o{+{ssW}p&gV&i{}i!-~#p5HzjpCzK_Xw)?ED0W>bZYPqkeww>I-< z`dk^Xpsg>FX`)bM?;mV9^=M_jd`V|O$DqTtmmJ`Nn+x7aGc?!upnEr6rolFgS~Et6 zOdi_tCouSapvDVweQ93|x6Rl?HTG7Ps!$xKQ5-vn1_3nuaN76Dy0)9wozVpDb=bQu z?{~R+oqu9WoL)7!l#`U;;_s~|>PK>Vld?yJ5sh!Qb(nXHYw+o>g zW#UJiqr69*n9qMOL&5r+t1oec00OTWbyxTRcfM`y+jDzd6 zQ;bn-T?Ip-?c89ZJ-H};)E$yhf;3Nnb&%s=7MSEta*GCJ8!f916@LB!U%+g`y+4|s z8B2}OG}kjuMBx96&0=#hGtQ*lSH%h~f)i_7hxvS6&nP)F4X|KZv-S{_xu5gT+V)B> zz9<6%x4ovVal@31J`Yk&<4UsBMHVwHe`;WG%|j;8Zg@$Qs9CMS^zSfGPv_A|;!Q}7LW7_I21GpyN~~=4ZRb~VXSS>AHnKN4 zBX7lCU0WRDZTZX5{i>2sE%c?mjBv&GO2G&heGtnLi^V}@1FjE*FVmEU&++4wfrlA+ z5>X!#^a|@+Btnl?31Ed8j<|uD#$y-)WMfi!;sIH&o%G1UF#%t30tTk7Kbp4QIMXP> zq~5rr?LZWT!3HV26`?oBE3CMew;D4n!FoJPeG%LhrigrU?lqfS>9p7IKJ)GplxgP{ zdvyW2hKx1Ie7=75Gfy(T$c&NAiP_gOLVu}=WB0adk*$LYgiSX(i=gzqF_PqgzH##t zzBKf=^)FDiufyL_zp+w|7zkb{74h?So8iJPMgE(4S#XpBD#vpyqe24Kajyrq>Ig~9 zLg@jyLq6nnJ%HS^apbz1c%l$Vykvdp_A_JNS7EM7DB;wNq$=&e{?Ysf2;;fPJq56Q z9*ZA*w@K=J%e5LWTVy`Ur1UR-0UDomW9-*I zC!8`XI!VZ|xRrWDJ*iOnhUSx-!}6rtHSq10<&9*U z;^GfLJ`-Z_bL{Z_(S+gGWSb5@bOSA^^vH{~;9$t~3?Rahq_{1`1Mg}Sgd#%1Cia`_ zvjgN1$b>lXTCVhtdF3Yz!siHYi~;&L8ihY{MHRB+bit)q;M~njT~F#yd5=3dfw|hz z!lKHb;zMX(KgLje4x}YBa(y1VP9C*CO7}coOm1{NYGlbg_ZV5M;oV8wY02bq9J^gM zZB%T#w+ULBDOImM0-~nseI*ez9Ht-? zG@PQQV6&sgTA&bD8xj$2y{-TP_6QRx&xa@@{|r9I*$3Yl+Ok|FI|Q*I8+&tsG;n)e z1i|SbO=L2YSUJ`qdn9#@-_=qH?5MdwJvUT(FI#QstNd;xOjb1e>DR~3lhHNK;5y3! z7W)Ek?9G+GX~xB#Z%qOAJ7Z9G+@UC-Nn|DwJI_ z2)Lm|wrFZlT%6qHjOc!vSaNSMkyxXP5C$nXM$qQz>5|)d$}@i##`~WC6OT*o5a1-2 z>R^2pk6z1RGF(bUJ@-xy`dj|{6mHHa**iJw`D)R8-|*3DlaVo(be{$LKBBQE4ma)} zQL@H^geJPT@m~JS#>|6qj@cupnVv`d3*@@wdg69Oyf-qgpWBFdfIop0gNi%e$q25< z0ilF=T5ax?j|7}2TffoLULUqti2%<4Q;<{unbpiLrsaI}S@9@_+qp^oA{ZGO?p40v zr>7Oko|vZlqa7utg-tFJ6yVlUFMd+70J8xZ`sfTk&Hm-q<~*YTX5auLld0P|5-Lh> zW1&$Souf1(L;3RhB%vouh|hR`CY+swPNSAZ=2Vx1?MI~D`Bq$Jx!vH>Qa>sTG4x^B z!&-0!%gEVEy`^o-sXo{B;t(%&lIUOG?r%T*AIJsAS#w#_vr?5p0YgA9vr+#|rA$Lz zrgS>e??BRz_EpM7%=I(75kw1C9m@CjyPzHYy@`!-X(C%5AyBqxXsS^ACkm$7H1HS< zJ|huxZlu>q=Ma19tPW3y(=lVmnEFTCznWJQF|Jab>kkUmsTx;k78WPRnPB zPtiT0rbADUo`)VpOW^|oFd9MltkWN_MS^AHd&9?&QXAn>+G_U!KU^wx7QPFjDmEuH zS`z=6gl#gfdu{7g2H$a&&5{~tTuk*UgDUA% z_L=hIxl?WyMrE-mHYH(Ia?vcQM4#-DGa!=3Kqj_G!j3T}ARq|iAHXhHZruOfPfZXu zKLAnJ7ABtoJk7Cm@#Er%uRry>`@c5)U%UQaKl=M%SjRAwelIm$CjNY@8@1H@oZ#~a z9)iUPi;k7!UtsN9p*-Iii}Q!|>|gPRF~%LIlDo4{-Xjz{d-m?9PP4(z0tzYwj%=N{|-|EWn|;w*56fzqC`q$7N>R#k7YSpD8OT`qe#c|=bJq1k#0Uu z7xfUy_~L;|eNdjy<6D-AFyhCkFnl&f(47R$vYTNWq36LsGU5+{z|#nd6S;X8l5t$Bx#x-dhiFn;g_h1_}X}^4lg@Jj%knXfY&{!%ycjg($8Ar9$b-nXnDAV=t*a<&Om(2dG z^q7h5O>8PmDu5&pVhvK9f&i4=qfzR(CCD~^>aB#_ z3hn-EMRb!hfn=3IaFdusE6w}9zjn?%R#v{bOOHe*6|_alN6CbZ4}N|t@DE=>7gq}m z(a9{7noZbr;DM7h(BtTC^TXrAxp3nhMCriogY0&`%)LwmXLeIfKMhO~m7omhAWlR7 zyjG<`C+FZmSR)hJtUYh-j{Z`eeE|I*)xb}X7zq{sBBbSV#&;`gk3$skt;t}9pDMlQ z&6=fk#}wWVE>Ce%Hkc=zEcdqOp&x`F{;Qn$2@=8&#&JoPw`bVoJ4$l99I#Y7?ZL1- zNm1_sdmE4AXg-ZN^{+Q_FvC4rP*+^;wlQtwCMeh20u;x{&2DBQm#n*9+&H^|LzK^$>wNr>@#9mxzf(p4c7?eVr90Q z)muArpoctKo#j}*seDJR$V#F^k@y`7R_K)Y1$q3f5DAj%qLk{iD&Jin>s0`~LMyXj zFP!^9RjN^6gtgG@QnfK6X!9<-uu~k(1)azvRXT&W_kaKitcw%D4RK3mT zV%OgE_eizlj&8Y-*|&P@=A?%ftR`+V_sxF)kGpr#Tz##RWea6zs{yUj8Tkl#6TGN| zH!-y~-e1P=9%?a>;5MVhZ7TiO#lmIlux;BPwS1aBm%~G=`K9F3!?XN%wcoV$2Rce& zu`pacdS;ng7n3~6X)C&^6<=KnUGGc(ma{?3y#-f^uW&>+J1fsmi9o(@#)d)n327oF zFHbVQZa;xe(<~rX-}T9KtJd65M*h;h=3UfFv7UJYM29Li8&-Ux0fnLuK^r*6JEeoa zwm+#6Fwy~5WT5^T7AfW>#L=itOmRM{Y!Bad!})=X=PH}6%I1OFY68}^6oK5ffB^l} zG{AS+S$*XBw9jI1sk!dqY$&p~yorf-c6)!pR-oD)m$Ne1EPVcKi^kZwFmCZ_+0)Qy zxn)e|aMA!ka11@lC7cutanfXZa7ZaknhR=WS^o5O>8EH{|1w>J3LYjoT?8 zT`%(tjtupYL~;yi zZyvWd4CQ{wu`Zh}jlkN!0(hQ&*OieUY>hIr0OVd+F{^hO{BW8t>pG_lu;lG~ z5o~-$nS%bD6j%|{+>+*LU$Gks;jq)8FU;@)4%V`KY>PmN0moJy*nABB z8;fy!aQz|e7{6x$-*XC^0HXEKp|Ho@M?)WldUO`^Nrxhp5}o?1flsG~kxN#VAdBY) zVBST%ibFkF)ENw%j8Yp?Bh)|kG_b`D-u^xxT@!*GNuSxtNUtUa8*wIXBD3S(1?s+ zz``rEJNbHHKG zv#yU$$VtlW(R@{Mx0BhP%>RXDl3w>oq}zMF@s~l*r);5NyI4oGUp{M-CZ^hfuf+Qa zY=?(N>Fs5jy*-hKCn=ke@Dq_wMS9`f>j8m4I)^k2&rmaDBXD z=@?V$L(BG++kTzuEw2|C9e)cp0VX!^mxyg5knahdsRkyD_``2EHOjwium7p0Q2sw) z5HX0I8q80a@jxtN7m24E!ShOLWf}{pNYi33M<8BLYoT_7$EBh@g1MyF(;N3uKwljI zN>fysQkuP(C$&nkbo~`_qTAY^imle`-Z`0_G{@xO^n3GsBY>{UX|@?n!&7W~SaWIA z@7m6;GfKvkPys-Ck%q%2!9Qy|ls_+K|E?|TNE)?XYVS@bb%5+Ie;!IbaNDEGORY%O zGq>a10ESYGH3)%##MOuEMG#j%c?mobQn7N8hdRZOLo-57`rcL|iL(h=8#?|}*Alzc zoD1;E8Xt@(|zc|8^_iU&T==DAqD46RnzE9Yo3~Jw7IXOZYk5ku9p* z9DKZV-^t6@;JHpd`i@3bD}|MdMw5$5r^Tp#Qe3jMPNhT{c?AQQ_%WKkVB8ns)cmzb z(Yax40f^voeqnAc*SgIC(7CF`6sP3VWrgLUH#3D1Jj|DOq-H6fy=24xx#Ii;KOxnc zI|d%gIbA|BH_IA=LeP=o)Ih+5lO?4`j?a$xFSaghdF8s7G1Aln4?0m75kH z`@aZe5NXk_i}WpOiT?Th%JZ^*O201MPy(HUc*P2EmM3#LRs;EE%A;B31{;y^c0%j=MH1XcaO zhGV}$88}#LX|?ZKwSCrqx_-VV*OzSR$d}pQTCgoxIJ@5~aL*=LMI*@g0QmW$3?s}5 z*<}J)CFu$I4yB z0>&W0rzg+mM_>z=0-UDgQHx^|q08KwLO{O#*gM4G_>K;LhM~MfcDu-5LUteF9zJGu zt-#Bi?;tLt!EH`v#lzbP!I}WW@?xb^E`;YwW^F|?yd1Itn8|hEqFpro?EJkQC=|Sf zklRvq9^~Jm%AqSkc~Ut2bPZah%TKmxIXKjc(nqbXxqEg^i8Y^goi}<{Hrwu-1_1s8 zuXb6zP~%d<{bIu2Vf|+TSBcw5Fw6tmPM7dVFNMO>e8-vl^-Q(b}+e|Xp(y9rqUs95e3z9hIYhdQc4EIn1s*R3h4gBB!}e9 zz?P42*iOdz$u53#w~t{AnVPj?J1eQMq2K7Gi(xiKN8t{+s$h0M3q4y8#R$Es`V)5z z17u9zKi<1muJBtNPF@{W#JnKE5ssJh+GVOQn7By{5YeTYTpruAP%S!YEoWnm076i) z+6a&9a_W4zA<rMDz;)TzX#iY);9#S?6tJLK-ARLB?6Nfl-d}(3 z`0qn(mCXNxXvo}*n|pHJ8wyFu$2-dHEov?`7`O!5WJB0|k6i%Iy=Ij0V+tN_1r|1A zP^K^c09ulc=ONS{z{qgKN8r!qevtwt4qbkG_zLoYT=kSYzd10riJ|;ti zA&SIBJ&UIVrv|lhtuB{#M%!iSiSl<5ywz>ZGO1e_1TGC_e5G5@R|{Rhv>MOF!{OXE zVCfg>1~oRq3<_{~9EAR-a0(hh6Mf&mUQt^ZGy_tZ-(?E5xi1dy%{J5Lz6rj8e?@V+ z*v41KUNj3NmaqYsLD@ybqeEGc=1{Ip$Z@33;;`al8e0_pRYI2YSKw_zZWJ&MPY(3u zl6i0T*j{sKQRd~EYDvJr(6~8AR9r@GD!pQwp*U)wni@GZXFX0?jI50p`97O&#;chi z!b_*K;!7|n<`!yC(L*JjCBAIg65bk5wm4s)BaQsq_qKu&30|fLjK)llpk+#K8^QgV z#FQs{(@AfAE3aJ1Z%}M9BI9bMUk|Pf=YNTeMU@3(*D(w=IttX~AdF@_Mw_?!{%TgR zB({h>_PCU_n`Mf1g#oN=4U|0VKk)0#y??U+zSA@`Bg=EIL&>%3ArC|f8Y}J&8m$}o zL#1P}!+fE8)@j)>0GDnrb$ju@1{P7~J0o<4fU~jpgw5~&gN86g1)E1fM;I@>EaD0G zbXt18_to8m#f-)H`qY=d<-()1GHFS8V%1I9giEALn{wwLP3+;;~s|5%Riqxcti6YVf{Sogght zaHjj zt%Oz%jAg;)db%-TKkN;MlM%sv;eubGNrz@oDu*p&x>Q5#YchnnLm=|a{+JTs za-N*taa~fuZ*zr`q){5U6+&a<5ZQ_Pzn|+*lO2j^KB4L>=l2Kt6<3acy}x<5vWz$|I&Eq5nFLUw-W;dEo@9`+(sY^K9JT z&Z><=H#0oBQ#*Zp>i6l}ZZsMZMIbnP(ML2ZZPR*5oo);u0+Lq`CtVHOwinrjT%N#*I{X|kqX4RW{ZAB`rT6NE z`M&=DV!lmQv!We3;gAcOa)=(`hWC-l8Q{*mo`K?O3eJ7x&iLW18s+({QJC|M{R5d zoyqd)EY073MK);wRi2u!)fh4f&^J6|48BNdK6mi!WZXa0a{W`BRtvZs`K$Kmn+nC} z!YpIGDD{eE&UD?@P~Hm?CxpcR*98K<3MK$Ef}hik@!QE?_Q@Nt^!*={ih9ceX?Ic8 z3LX3yVWXaq9%gIs*XB0EzWw12c|4&RX=L^2cmDL+cR$&OX8F;?!+r}`5oh@N_`$&- zTKj1YF*<29U3hZn75h&DkoyO&AKFr)@ju#F(sq>HjwY}NFmvkS!KX-z1O}=53xMKu z^OOq2m;t81p4$L07oI;bw*8w&#|-zm-k;SWcGC+>^E#W)=X&B#7Vym{5_$v}goW*u zbnd0JJtP2u485~*z7v@4t}&6pIur*br?B(!}@x!*osPoF_bB8LEHp%sS1yfH`0lQaef_i6~1lvRh*{Z$$JO|j7Q?Rh(r zlSyZy6Wmw!Q&N3L*$1AV{ZlcO%UVpeRVAG)PEFr=mzmGed`iw35=G(%oH> zL(34-?_$R}XK(lU@P2r|{NM9s|F+5u&$HrQ*L_{reXoPD7zR5F9|CYEK#$iJk^?yE zPJ3spg+$<~_3e^+D?OEMc9r;XaxVkf4M3kvWxxwc*;f}>xHQ64V&cu?!ZukM!HZoZ z(MB&(Jz}{c*dD@R@K{gSI$DUoeSB^YCy_!o0HnjQSJQtJJPqeS{Y*T-Wi54KHCvwQ zMW4SjyNgbQ0bLWcQhDur|7^^3A&bde{~C_f%Tgw_<jnpp_wh{#1|&^P^%hn>_Sa_&+4jI@X_hq;o;Y<-M9f(=9hYG_5QPGXT zmHF4QgBVu{5@n8{6{jhC$9cR7?z2>^+jYW?aSw;tpS;0chPMmF%zKY<=dSMFbS1k`B$u+9IsnDt>xM+cJXtEiQ+{{ z3{2v~)nO%K%V)}~+-Tu3d#n8h@mz*;5v-Y_$wHgLqTgSy!T9o#+tusWKyDj7J(BlG z2~}U!hTU0zetZumwl7#?6odr0zGLX+laj+;7ukZgIDfadV}<&rqAzy*k!uUHnS!xC z0`~_6#9!VYlduC~$X)T8cO#bQ6Aso^v+t!k>Bs+igg#JAS9n{IDGkBWGT9ij1OjN% z<8=|*g%-&>B;QfJELCWOw#8H?Ekqt;(Af$*OY4E`_t&Yt>k2mnfI?EWmNz-QDHgMF z7R2{et_Vw@#qDUxXK}MKq*LGFOlP99p=0Q=L-P7U%gGgi-pIU}&D?%gWDAV%cnI61#;tCc@=&vlZTv+1Xi?A?z0OP8s^gS%yuz;ZI*o56xv-Vq zDFJm31C{617NkEV_FtCz<>o$R_y{t9tcSYK=(4%4Q4+gN{`~RPe0Q-QRslS_{oPutEf=RN7}n#v znSH}0mc8~MX`QjxvUdpVh_megBI%pQC$2zhoI*j9KX(b>aoZN{>kT$HhRl3T`~+Qt z`3lbBi?2FsenGD$tDdxbLM=IeHf%CIu1pS~D?Gmtym%Rm^Sr=_f z&tbFx0jp}jh46nsDcSo=kDpj}+-3hx?>P2^WNARns}m$=HVvQ0YCVFEZf2_*L{Jq6 z5ekGSFt+}6EfUH!uAmp!9|u$M=rBUz!VU~^Y>!IIHT^Dp;&P9Uev=x+cb>1BQ+k;B zh#oDLP7mu%IseC(0Ob%nBCyc?XV0;f6byiKFVxD#JvlusO8DCx_>3Cq)9xZAQckZp5 zN^!RkxP9Kme`aSgqnop|25Fy}ZNFOlgRJi`2pd}NFsLC#&TUvdCcD1imq|$-m2OQi zKZgIedyfwePg6B}|HKB?;00hw6}SJ9lCm;HCB`Q6W@fgm#XcwM^B=GlqZX9RI zE%rLx`h+&UVJ3&s)L+fs*7GU3Oi$l^wNHHGyX)w*1Gb*j%=oQ+*RQW!-D;LuTndN^ zv3&kWik`vC%H(eTkrnmF^>~*YRFW^~$YDlER=mlb_9;8ty;oxicu7JFg3y1bsek;R zF0NJS9ub3795%kXQ;ZD{sN?vT{!lSu0Xip6ZMLWO>xHNjUr702gFIN!;gX~D+9J0O ztY#V4D~MrfxT|AywEcQ~#NYZ4z4>M=!g;0sV(1GA@#SUA>ABc19qI&~-LYh0H*^yP ztgD7VqbxJKEm9h<4U6$l4AU-#j?uWmvm~gdvZ^}uiHibvI6#nE3x^gbse^}eRPNVf zK+Sjir8qSkq&{J`y}cMT`VB|G)-3`behvhxH=?p!5frDx_qhnZ75#!A=NtmWw<)P_ z@whtaXDJ$5ju1>O<^cVt2^2g_u^N1S4Y+|D!f7+)RN?xz1%-w+2HHgi-=|}(>+L4F zVCV?=2MFwI7p>+Jkn1@0h0G?nb=gBnhl{Qg3>Snm(I~tnjbzg8vX;8kAw-@zM8FFusLAZuS+${jNZ>th*$f~ui(~g9VlFp%)mz!II z+@)x9a&YMq1*{iUuN}Ph>Brt&)a{cg15<$nSsjr>0woE%_M7!j&R#r$h$<2@zrRl^p>m?k{+d#IAeOQ6 zr>9J2J9D3K#UqVxoPw)Vp2Zw&ENWa01c=(m%?&q6TdC3=kznjQ1PlKuX4RJ=Wf_8> zWFIu8-lyd*c5%7|s@Awe2jc<{W!{qY3K=qNl;_t9`o`2X2uF6c8k+!Sb}9(u>fQf+ zywb1Wi1_4KdcM3^Jw zR=@&1IG^#hsk|d*4c&b)6mdW;UWvklI6{y6vr9(S!CvpS_Y zdRDP=0#~JSidZW)l@mxsg>Z7&#ocOqu3vY(yP9-~>TF2eKnyr`lT<6i{-`c}{xb!T zjuSmA&D>Op`Cf-Hm#Fx_Wy0snYoat&Jv}{04glFR5Nvcb82<`!4co*OyBVKdr+Q7J z)G+7&VDWzE^JLa=tkk^A_-O2>VM(bDUWEUK+&(jn(J zS4RpP+BM;8dz-QcdgK0-ooQKS^PEIYYXf8tRE7%kcLWUNA6(AQJALwq}oaFg4) z?cJTFPp$TR&=U5sMq5R@fg2Fi#@l4Wjdfs#VzV7BdI$Vz52P-p10{b)aZ&HW08!-Ra-`iYc_Q*|AVA&tE#_({|r za8Ye!y3G=H(mKrepq{3}5gr*`qJLvn0EoLb<$F>J*iQzCV?l-CNc!L(BW|5aKgf=E(@4gn`XwNFZj04!Ef_e9v5A_=&(UA`8%;=}B*w%N1=#!*pm; z=TbA7<@mWS=%r?Eld@HYrnSjm4oZzxhh8qL#)67_Q)+rhKi(z6CiPF+LMaTzp z1M+E!3;(h_c+i=&&|6Jst4ws&ijb|y(83!7u>!6_)xKTLeE;MQAVoF%Fw3eAX#C9f zx*r3sLw}NoGzjoloxoW06t3WKb6Vr-4K^zb`R4D7qXH8oU-KSGk1zjxl&?r*#Wp3# z=;ha~l_V6`eX^}_X$|R0hEY4GWzH9|)U20rYf^=&VuXv`e&m*8wq1M#$ZT~tp8T&j zifxW-3@s@6{dA+J*F&|ApLE0DkE?rHr_!+8CkXllE{zlSQG_T_xf&d}@#TWj8G2CY z_ECC#21XUlr8(xEZwTyUlq(G5C)~Pxt=N!#bTOCJxEwSs$sB^?*gFko+ZWUBX%qgr zMrt6tKfT2>%uz)wtCbK){>wRVd35pNt(sU~dM7WlR&b>T%dKG9olOd0}<^*D^q_D2*3F{Ze0lgY;{Lp22FNX9H}ZS zIr{!TeLeU)+8e+xv5dx-^9v~WEma5@A_5p$%v?eJ4{Gc`P{VKR#Q)bHzZ)&4%aj8g zon)7D(H2HdoDJO61Bh;t2W3{Do|$ukWRSg-QSooj1td`xnZB|{5>c`+nJvr;2aSZS zX${amZgdQz4j>nD-tSr}YHDwhjewg%K%#N}mKXt}dD)e26rRfu=N~v`#wN>M?I36$ zrh>cDbT4D{4FK>=d+y@?GAikffRyL_sL=b8;T?Yf0B2?spUm0^U?U28Q5$CrOK*|$BL)2aclc z$yY=>00^NYJcaicKEzH)>%*9MfBDcYWnfCdDjBP?!gOLu50bj%a9Xdb9Gr{xh=~8{ zkq976_wWt4Yr%E%mm&HsGCw5&)N}qVhp zda7R@^Y?X(bNQ^9iB-z~ynJ6j0wjOIgY3VvKEJPq0CYOA24rsWlfC&RWc@Y^pTJy& zKMHmEbxJOOUXvDV4bs<7m+60*<=?)aRvSFrC)Fr|e;$}?!9Zn3zu#rR_|Nb5jSZ9y zXtNIdV+`?^2>{ZU9&IS>?cYR}KlW+BV_=zTVN($M=YhFPh^`=4%t!bxzpL6-ZnQM) z&jHBJlv!vgVIV<4Q&DN)2F%@*syw2j?T*j%e|{0|ERJ`aLs2l^-^r7|Z?S)V?SEbe z|NHF!#^rzO<)6>k|AXUlO-AwH&t3q3#+Cntfd7Sn|AR~Y|K;+2ogkyv7BB%{g43-( z4uhHms#Hi;WM}`~2KvWlX}A`Q4b6dtrEZUQ9g7+RQ|FB42h9u&-yQ~!-!V^mI_?Ij zc3iCYIiY~BpJ$~TijS^K2k_D3b#=RwTr72^iB2#$J#PQ*q3!Q?6Ip3}=eu)xuM zlkJ&0^R7LoC`YoM@J(qLZUri_60~6c{W5jG-O&hzDN^s+`qYYl;7bi~|g=fr<@T;FI}I@FWOv z*4k1CRft>??{K12{V+8+n0xlPn8)jtdSRMS=HGu zBdLq-+WYtIyshc+UeJg?-CCcnJ)5mJ92giNAPsfB`d^^b6?80F025{Jnu99_|8csW zk^&6NPx7y4{_9Vr(_Ug&ocCRV{{_Q#U1C@SJ>5U!yjz%;7&areO7S0mH{okt@NiuZ z8UG8;3#0`YmNu4e6j*Tn38(S`Us^aUH29zX4PaQpOAH(QV432-U|0+AaJ*VkgnxbI zKV$W^K!9NbH9HI#{_%I?eB%HZ)`IQke;F9rOAJe>3bXw$@0NIpVIc}~|2!~valp9Z z$+0^B-P-fVs}h!ihfB;={x1Xbe|yqzy%A-?C2Cti_dDB9D9|lc1pa^8;k-vNU_+_4 zM>C<09yxpb)0)V*^k^wO=HfvQkdXoH5?Q(He`b3BoDYdkO{I3(w6?Z>Q0M6?x?WHU z49Z(Euki0++gYNaVLCAb>P}2_yB0N-lB1Ev1)9aVB@hepJI={~c661S^Sz=glpb4E zD>q%)B7Qfl`WIuBMazK3V^2Fw=YfOy#gFd~s$HysGxZhV&E&xS%3MHz4XtjVk#Yx} zl<7bph&ONzAw{yedd}(_`dVuK7r42PjC|V){elq7Uv)hXZds42FOHcu%NKuph;~c2 zOnEU}SRMf-=9T4U#&n`jmxY5k*wGQk&ahP^I}DUs=#1w&C%Lq5t-S{;D7y9` zi+<}XHn8>S0XvEhs5yKfJU>(CHGL%CQ()XMBk2%?jepaFE#k6TAsf;7B;<|v?i=Oi zQ2I1KE~m5oTo0;6F|`1|z}2L^(Pbu3;jy0*0S-X%?0X>uPAe(y9iSgPMKrxBFD$L5 zGl9GE@EC;6clOiOFS}{jO^Ly~39&pl(g*K$&P5}=(v#)`Pjw1y3&eVwcO~@A!#Bx} zfo=04Zz=t1l}C%{wQ@phcSDtlE?T_L3|6s39RNrD4v6E%6O|5WtR{NjYSYS8&cVf1 z_O^8-Y%Z(43v*3DLXHz9=4!g-HjiYP7bpJM`Lqq!Bra-S&xFwUlqJ@O?yf&_GJH_Y?hANdq>uK=tGn@XrA&z{4S_b2#6(_^ONf!P2uL-YWT1a zr4(w-&WWf~!tMjSB*(?EZ43TX!#UG~`r5W(9n0+g^tD;P6Z9C!Pvbe@oK9$uVW}Y> zURF#IcaNWRXfOnxANhWB%Yg|8VrIy@5E|ddzUN1lHF~zG71o1_;Q9sjNtaobPWC^8 z&btA0y*rT<9;}AJRqeGSZA0*^d1gDGa?4)o{_~6S_ic9&!?S?ZefGfckQ?PKWZ6Th z)!O{f=|_{mvoW6bd~FV?&juyn(9p%%k*33=6;dME(dir7qqD9rk+{X%n}K-ZJN*t$i?H$bGk0z{pX!D)=JvuKwJu*+Md;I4ai<6z`udUaohk#&d&t=D`M}tCd4#c`2sF2Jy zZH?Fk6F0FpG5rYC^(~c;8ojmlyZJjG@Qw}rrFYzY{9qwFcoY+HVzg0RAO@nkc~8YM zX?1%hC920o&OT|&lHCapC{MMR9Qi@VyWsz=l$P8h4rHCE+psa;d@dpnGP*qdOe z%CvIsy_iVLCEw)PD6@dJ_-j`84`G-2q`rc-sbcbBZq;l{jc;O3bm!?n(ms!q7wRir z_c<813=&BLO~=yBN&ket0kOn9hPT?sEiKJXV3rTKPRv9^fo57a!>>6ui>N7kiAToF z!k-MNFz|^0Ox&OO9`#7+f$KV{M3_DoXR8UdJ|`KQOC3~jOM(Ex0ML;hZV!x`qc&;T zmMO0BHI(a+g_iK8Dd*mDXjC5Do<5Y6)8<5Nz~VFtfqzG+O&pPgTZlX!)R2aS4y@a4 zYHvzEN|FV!OdZ%#Nvz##d_k{}T`-^H9+C}PIX-GWPZcYDz}3&@<0a;qqmM5`&G z=FmDRVdRN?pBb>OJlF-}?;d-Ox3P@s5%2YmbPD7!svSG1+6NE{ChN~0x_q&mN*elV8eNdv=P=fuO+B>7n3DM`&XLIu!mI zSu4q4E7q3h7V}>AAv5Mv!oH^X+L(j0v?NyHLq<<&3Gk`z-|BelZBI$aIAXxTlhH)v;ed|;Cc^c zG|39`z7E3`%1#eXU^+KhY>V3Bg`#r(aibL>4)&Djwj%}42fZw7SA6&Sl`xPF`D>@3jErDowqe#Q@gs^^hTLi(TjiUn=xA0kGc6^L@Rui%S?m|v7w5f_Js<98 zp)X@FYnGU)xTiRC5Z@*63Ij!_v_Lypf*@1Q_J%P#(LpmdpRu#jnt^rrsQKEWVCwe$ zl|*n+F9GlkBZ;Vh)N=F1{7W))gz;Ruz%rsmRDNvqMO-hw@YG!D=yg4!0|QX!jSF0E zUC!;g( zBfX-ZFSKASDO`-m`2lNbX!>b$b)!#`-}a=_gIB&j zi1uz2L?SK!bjNqpseALx&;Fcc@nEXwgfGXu1NE9AacZ>6c?BDyP_&dG9GOcb;Ir}R zo@c2g%l@l~bYw>6&bk4(25LHq8UL=$9Cy@U1eG{C0+HpVzVZr+p$IThXPFVo>{uTk z)Iab%tEi;R^)9 z4%in-1F|BeCcuK|z0OF;lU49aJlinm=)nz^y81O&%IDvRfEnEe3j!TXx2rZBh3Ta~ ziA0{sfNRxTzlyE%Zh@Ap>1WUc`~ZM5EX5Lx^i#xK(e;F8By2Q)g7{A$rl?L@TjXJg zfX}{YP})yKSQO*id)2wj?75Tjlm`b<*0^s|mr=4>QEczFug!N4z5BE+y)9@3v-KZx zS&2eQ=5OCX&(S+RPsM@5-34NdF;7VZsDwBcA`cq_WabJ=`L=R!;m0R5dX&y&k?FZJ z4RC3$L(9#Ha#_)?HD?{-*7a0IVYKLUNQmxSeRtZS-IGwZ?`*$s#vHI2eJ?lTf+&;G z;f3f?8ey@sxWl*PSk_*K2%}wNEpDs4aFmQnjR2WCPfX~IH#}sDb0S*A z<5d}h@bEXO5Y&r9Z_gPZevMCLBZ^tP)}${^`FWmv35EpJfveS6pN)W-L9jMRR#-FJ z+v=#NNlB)+4#T|=v)XrFxD*Ww$X>G&GnDFyf&RjCg-EClx_WfF&i2)4Y-F;j?f5#r*VkA0h2hr6G-b1(59TUj*avcmfi}40 ze`jYJv{aD^>ecHU5=GS8Ku4w^Mx(V{THWrovsrpZ$G#|vY2TAgv_e>640hk}qo9o$ zfgbPdbnDtqmi!RmL9C!8=|ZfcSw5ORlP}USMmgsx@Ga^o7a&-Mmcn$>fGi>CXASWs z+R^A6I&E;yVM!Hn;fD4NnYVBASK-5f-=o{N>Dl?`4a>|5n`!PafEb zzL!Q=ygL!KYhiR8U6*{J&b&2AfYnl7d*(D7pHi|HpMgr~c%Bq#BL|M1kfKcx%b3yuB-=M-tlu_#waJ5n7r$g zq{~aBmcZ&MydE5OvH>)fN`sMB>CeVX?2l`tCgc>vdOy0$QwU7vy(pjigpv z=gmIru0&pYin=BOe6v@GQH!A-S6lqPhVx@~0ToS@H;bJWzRcN3k2tK}5ThsDf6IB= zbyR_a9Y56*_2f7^JaNP;LK&4&76NAYdw?9O)g-J@SAo_Fd2nO`2-c|T*N1yvIB*#HVbx2KL<8Y)hy2G52Y5G!iqT_ z!L@Gb@-Uh}#QB*e=}PhvvsK>VQ?S9W9dm3%}weo@5#bIYv`WdBTqc=-_ za`ZhZ(|k)Fj5U7aZ=h*nvRYdAI5zg0$pU0Xi_ivr5BkDWoxdi80E^kz_y`AWn;4pv zdrHRp%bfRPu<^l(g-q>riJ%dBjFi0SmA;@HTGUAj4N=daF|gpt>SNs61s8lyq1N}; zgg5nS1O|qNPw_>i=8rwDNxHA}q%cr*6pc1!3-4r~gs5B4vL7Z>JnXUI3(~1V3Km;v z7D8%*eQ9C+2K?`F4d?M^tPABFy2ty3my*x{MRh57l0_~O8skEFF{Qltc|j{EN2z$#<5_?!j~Ps#Md#0ic1 z3dY@v&pdtx75lLx68I^ZK3mLdsq!d7HGf8d;{Nz52l0pPa-EYm4(<9o*rzJ``1Th5 z+#n=2aXP)MdiHNVUyLrafK5WQ(qj+XmB@U*l-3T4m3BJ~&Axd9%LAYHXW$f|54!r6 zoKJ!wH^TRPIf>q(j>!UN;dM=Eh6!HcxmwVbj#yUWGRj`8L3yJt{10AjP0MFC_028` zU%OZDrj8mohu0ue+-IJzFmxyS)kg`vK^hBKB}IriNHGSEc#uH`U%2-VU2|OlKVE*F zQiNxU**%5)qe@ZTfu8#~dGnC&lXI*N+u}M4?3I+eh)7CaXh|}Z$hgj9NPdRsKEtM& z{Q!eq$vPAtm-lSyvo$2m;3>NKmeXf)ls_$Eq^PESk7luF#f`bRLx5Z8t)I5T(WFC z;f=1NZ(KGKqf$<`$#iSSgmoDh+DUktG4lP?4jGblLg=?to{Bn13^>k`79@t9RLoJDIj}xTwJz~YZO>oHtMzPPLHWe$tn}nT zJg9-I%E{<`1a*IVzJa`WZ>9o$Nv&7dkQ{{H-0~tZDiLb8@shG9Bvr;nAMK*QH{f%bcDTelNsg@wk&)A=OuGxXAzX7$OsttCgcpauzLBqXmYxBq=00d!?SU zRF7|Kl5d6nKrnX~5;Kf~7)j_XEuBchwzF!*+_eGtQr_@u^?;Qq8>MpCufX}3vX*mm zKUuI879M;froSWfZSxl8T|XQvLZypw@umSQwDu{ZPlwCBVHKMrdYx;ZfhFjn=FLGf zW4WL@b@VeO`0(OT{Gf{k=Fbo9-8ctZZri?Fs=W~HR;TE-axa!bA8-}*qwPEh%Hksx zFV*2&UlZiMExy#?bz_U()RbI{T;F^Oy9eU<)JjQm(zm;5(Rkw$$mKgF3$TNrhc~|C zOVr#c<&8-&hrd4jK+ZZ^H$fq2^KJ2QzBZ?Onj?QZTblUR=X+fyrcaOQD#rP`w-o%! z6a%rl$`y0TlOb@BX`6HII*vA9YU+UPKK8h1xkLY6?QP-+snh;2{nZ=HoZ`tMf>;p0 z{R2FQLf5>sHxwNEzPf2C`KsR}Ror^g2Fs=@&EwTZeDs%-zf~}kq*_1qNPXi{{=>Vd zqw~;&6(w@3%ut0_^GK3dqK~KKHL`-E2nPN z7w2WU`uoZo(yvE23?|}~9i{nZd;$mcM`X++DFipe3Det=;Nim@e&pYHwQ09ar>uM>UGA2%fx-B*x$|BbuE|tC`nK)+puQGW8Y=c)Lv6QdCR^`FjPyI z?liRk`W%hlrtHiB&z=03sfJuapcQSvu@wS_@iXe17uBI1Od-Z~HeQtFQ=!UBOZekd z%UYeJ4`Azi|F-qXRwBy5*8fpodd8bpEvvOZh&WNFiO>~j-M=ba1?r=On)>wKig6lz z?!&&f*TT&bL;Zhu)`8l#2`Zj0k3S9q(^da2eRlWpk36F~uK zit8x#`M?Zt5oGjbMn}qTh zGXqZA?@RCC0txwaZfIR-akf06uQn|Ds-C&jdjcjCc%Ihhe|`s{9bcLDlqFJc>*8$N z&yIrMCU>`J`H=GY^w`#cEiRX}^2te(-sGQ=of-aSu3~M9MXc~-^O=bk0i%^kn9jpKJM!VJTqe+wqMsyLB|Y|PJ7tE;?h?I zM9hkb0{u*uQII2Ai}JWY7D{o#Z1zS^i&<=>7(TC$`FbRQ%O%S(&aGwvo6w!%MiVfZ z0vFcy*T3#=%T$l|_@mQJ00-vH=Ni$?tXlTY^kKUTKKgEWLei3hKj*91YR;nuSWrhC zu$73#CX_NGtk!rt4b(f(S;AkAA!OEezQ3h+UQs0DZ6a~k``DObj*V9Ae<1szTr>qO zcm@D^%B#@U2rAnfg@WoU@WY%+2gU6xP>_xvSkSH2Vq?|C7zZH$EX@~AB3ijO=n;fK zT3o3F%x@Lw3kc0a^efVg`qXM^Pvg88RD?V9r3cig1l7VLAKR)_izDVB;QeQI@qM_J z=&zk+(h8)cb+qCx+Fr1$!P=@2o?1*T^LRh*PPRq5${RRg%(8*vwc2#~;rxUG$&`n*4j7EZ5((H99jO*~A) z4d6kacz6yvvpQE=y|X_*J=$_A;HOyacR(ep*ecqDD_ic)j9E!X&R8g^!yStOsb$)? zmX*TA5;(%__XMyryyjmfl^y}^e~Kcjx^!gKrJAnK9N$CjG9CBR!B!fyVcwwwC=$?M1ywY<47G)>5d>f~++F2FU~oB6zGeq=?HuU8M$ zhHE{2(rWL!udwMk`|K{ZSHxGvtOCZI?ChfPvM=sWZJ8QYxDaT%?9d?Tf;&OqIBwYz zc5fYakCp)aa%bJE@OFJIe|stCq#~JA+t&l}qtp43dMBMrgW2?jtyHwfmgdz|B{_xY zhK=I%Tynw|yXe`R$V6qG7j&BqLr7B6=jmfZDKrA1aJl`9gKe* zG+PPFAH6Ss$X&C+r$5-Gf(UASk#i}sgj1e_o}Mp+LsCZ3ENYQu?G2S4GGi&xWlqR} z_q*ekX$q1xVt~Tzf;#_bCc7HWciv293<_l}EWs;Z7N}_%A@h$`LR zaX6%AQLSC7IK&VdhY8kd0?%y08(P+?-@NutQN0Nd-m6w!*AN$sof3puDn?MkJlcfqRO(7vT=@uwV|DFdKHa*XKVX>~zbf36tQ zm%9y9l}iW+s*^w5I5po%i*&|^*yAE?_##+7bp3SeC7}*nP8T?d!r#~{YEWKFp-9Rgs@~yo2uOT5+@-3i09OyIe`eUSP=zh(a z1hQ?upVcrvu5?l++@Av6f%l0ZJC;3`w4U+fSF*<*;hpE2`OY z(y|WM*(?^C*aNgFs4;BVUOckDutaG1hPX0w`XL6IXP_#NHEpyk)1V zcyuXkl}+NOAYkxEy8G_sAiYgj+c^*?EepWwr zOZDCBu`x%dJCKEhmAvGiA(JdohZnkM8p?}RUkz=uT_qyrqgA#%{ji|JPaCg9;S%#b zbjfzqDpe!pChZSMg%B&>dFW~*2x}6BEUI?J*gKJkda2?_nAtFTCDr>`99+fb{S$8x z1oa|%Ucb`|((jx;G50sk3}Um!zse$lEVyw9A6;@rzE7ww@4q-+=LS^{oeby3ed#-e zl{0G=Ct>_OJ9hZbg=2n$!r(OJ50IaVS!MEU81e3iIg|!u&pm6r{~0hsI_QvDrk!#A zY_fuLb@lj$PSRU(hMKE(0(fGtV`iqNTn3?TF(cJOjk)I-&j75L*1%BWx9o`p4XXyG z4I8FC2W!)`p77A;Xgn@7{6I4@aj~$1i2u2V9GqUy)h>h|p#=RJ=s;V*&4N#ZIq$Jc z$db4fJoi?gU?fsF4*I@o3BO|*nKMoH;f-%h9z*Q<9uVJNQVLqWwYf|yLfz=n1He_L zmEbN%Lo-0JfUXp3T1|CPRy2rN`1y31;z#!(7yK8t{96x`LGT?$>o3#w1(p%cHzQw$ z98?7zi_jGCqw^N@9qP<$nm;`Ll!;#(f>$q&@Gz7TgBQ5PJ)g9z9Ov*xbqhNU-B)TC z95pbV#hZ{WJ{ZmOYdFRV&~jFoP3jN4=-J=-vj(&2G6eVYTfMw$+2tSEok%WLFALf@ z$|u!$wX)#!gi9yfd=g8uHTnI%UhX+$A!jA;MQjK)!~MgHPp2BnLsg}wUs1|#Wzh+4 za8cK}@~~8e@~er97Z3AEagx34Y5VeZXdMs617zN-w)e)gzAu33@Ds1{J|{ywB;+Y? zLuY|+&J$ZMFHn?b>IvG-K0DEEwF|MRjw_n`>$mAIuNC}Atq zq24eOe%{WK<}wJUH9?fkI47k$G-3u#??rV%KJuCbHp3#83KBtyPdd=Qb*YsiXUnVy z{jGbbc%~UXBfHa$dNQU)FHRaRoGUA>2U`qjI>T^{u9jV=H;ihH(v z#qNVII;Ey+j{=!Ys@-tBD7BB304LltVbifKhx!^UQ-TGV)sp&Lsh%N`7!idY_m*UG zJMH4`5eo~@vG9&-y_R8?-+iBd^4?lgS@6+#THkfUtJqra?b0eml>=(qN524Tjq_fE z)ZpTx2QyLcBcpsX$((PfbK_ZtaljV$I3r|?l; z`PJ?a_^M!V(wg2oM-Z2yR}I$v zL`YG6Ymewka{8$_o~NPM7mH$}x{Z^j?K490qvhXqO|*t<-%e8HuTl(at7ROzdR`=^ zrf%uH`fwCl8I{T*Jyofo@b)9`C1~BOVng?+&}lt6mlnSu{_eB&%(!+uU?nQ~1CxE) zT#g?9`AgB^BLpJXAnvpCay~->zfS4&sa4EV2S3=+8a?W$*-BbzVsX-YfeVy(2|ho9 z3a#b@5ZM)NZ<>kc-179s?>e)oj{jhf%{yq(kiqbDd2j5^ltvAgBb?fn-*(&NvCR_4 zXtA-e%g4eEJ%GOMyO_{!@;`Ji5lD3F;%yx}k~h*RGqsI5Wm1bD+afO`uR1qeKS?(b zJ+tj?&;o3@rv!e)x|h*Kyt?R8?8mb!mO295ol3K56V0!Tf>IC1Oe4$BXmjL*DaKv9 zdy!C@8YE(*F>aY6XL)6PapHlT%y!KUn|&$77-qdtI~S++M&m0c@Ht)I{pyqC%sFp__k};4CN2Lrk~k3UuLznZOR z-veqB;qoKu*LL*38HQh10-w{1w@wm&pohP?h?T9uv#}a_n^FJuYyb3cedp9$R1`XhMzh=F~I%P5&+&6=N1lHs`WK_UgszrMtR^$lY1t(}sqaqwgI&{9XsCbsC)~lD%Fddb7vcUOF(CL+?!M z^+Ys&6YMU3avlte#_QMdu{Q*$C{S}ofBEab|A9yT_Qye&*O1_vC;$5B-+u;tX=9&> zNmQmiCCd_)0{KW?XaE^sH?fAqr$$o%FY~drD%)FNVCv{C2-Fo*Q511HW#KQwx>R}mqEXBPf>w4r{F;KJQqCR3`@~N5B%f1PV z?9>upE9u`)gYm5qm|Z@{!YSCNBxRu04b^SEq}o!p|bd{yKL5{*_=L|M!Rg zjlt!H`rkVEKRO1Aj2L9r4}C~JD|nFMwc>7*aG>};7DW4y*Yh~UYhkoC;*V2$=Xcr_ zr3_EzI0l-K8(pW&-8wM8Z6ulI=UHRlq^Z-kT%a+(a&0#xI#0*4SCb;^1SCQ@Kn!@x+9cd|_zWvqZJX#IuMiq4SPSyHSedJ-g(s$lv4se=O5< zQVfQ6YYGfs^{Cvawl4}KTyzDjwJLLThxgDOW;dBNTmF2?bVKY2?SNh!jY|E#gMV1- zJTBz7->47f$RGM?Yf7nmU7(b9^#*eE1 zd{ z(eX~TBqXAqe+P>$Afx$;!`GAWpBaKITj9UFX5;lR6YRWX#3?_mZ%(0>s0tQ~i{9tf zC40>?5tpO0s?i~`G?~ELdrDocx$>VqSU?d|I zA`~Kyzv;L)!#LzY>>@jE$5p_>(<2_NAMAxine8!OdH545zNj*vd;a)Gn%mEG>=~0K zcL8HU>YMMHUvdTu9-4|Eg59uoCn|NJfabND#OlPa2-FH1gVBsAgC}mClg9hL0+SdVfAn>bkL>7139@ zk0tW`8H~0Yr2px=B4H(=9!*g1^A7;nv!IBF>E}sP%vlbdEDvS zAo!NuigmL`qk3CpeU4C9-(UJbx_^0Nf8IsEx9szI8h z(h(7*x0vE1ZAm3vowrJ))|6|UV~b@nA5)C?A`0~_Rk*nwHI3Z^Cu?%%g^Lg7f^E zk5VH-_52=8!FCOT)J^!r9KoJzR<^ftt>2#F`;L#PO4V}}nEJ(A;bg|XPi{Th%^Nu}FHa`|dJQqni00+#4&3*C&>;_Q?%v!t&c050qA(@Jc1DK6az;Li zwb-nsmrt@*2EzI77w=co@u3mNk(;$X*^mzfaoKTMhc zVhh~tliWjG@m&yG;0l(xJkax2T|xIXTk zs@BXv`B&=K6?3Alo`aG5F9-SWEU)O%EsKIXWUC)$p5MtflloX=+!)YXuW}@%jietp z>IiwcIvF0CMmOibNi3)F)0&PX*)@vB=Zyf}f-Ey$#ij^2ho^Ok(=p%{@P`pmffk~rD?H-o5aVQT}38pvJZD! z`@YEGY<^2*dBum9v?EK>igRC>E*ar7`4}lh_spu8Dx>JG+RyC9pz}=hFW%F4gYi-K zJhkY|Fchc>KYHkzbZ770W6C_m4P9u%(Up%HVr`me!;6@>8yUpqTChtx-mW6$<`~;i zq8c1zj`;4C4GWDVc1(Q`|D%vSTh6y^wrDW6L`0tT;-h~ID`vzjdvFkel0|h9eAg{( z?X54dI*B44ok$UV+Vci_Z-v)YET=Sd&wTSV-l`kAKR`tl$((vl2qLv(Sy+lbdIL@5w{CSx#x& zFCbiLW-&*)thq_`yocL2<;EC@U#<#El(Qcl!Sb$Ei(WtvNfu!AJS{oOi;TTQC$IP4 zrP7?RQc+Gvvul*JQ{F$flN<62`o5VenPQ}6HTmbM{BOK{20fzE2U1B zNoSHDGyN9iR|h65978I~_fB$7oua#&mo>AOhpUd+nj^wi~G?XBSEIxDnS z-HdpY%%R^k%=PgKLF%d&%yi>zNz<;UR*vO$D&z}e+;yo8qnDvY9w2gyM z5r=K2U}4R08xBQc*nUM%y1^_YQYhELE>{Ei_}~hMdY@ZuFFWss4~C1W_bv%Oho{C4tJEzx?oC?=s-O;+kg zzX(*0K!>rzj;JHh@j245{>jM#qaSXwaoe6HJ7m`4TB(BSUZ>^K2j|Rp9R#WZ^9-)u z!S^@|(|$fh_wJJ-2zFz1oiQ+a;An~r(ob$w*u`f3x=j2=R;1vZZu}%cJBporkGXsPAS%bF%CgrUGTkN^Lx64 zyL8aK=U*Aq&S;MB5c^0`7(7iyG4 zUm+-qV%D&cIa$GO4q~|B$9}EWuqxsIVe2cy+T515H&oE#TCBJihu{>7L(m|_3zSkQ z1a~MUDaGBbxVt+9_u?+W-Ce(Q?{m)g+Rt1Uf0$Wo*4#4ll$Eq~9PUBx^>T^jPy)@O zqf3dMalq%)Zj?`Si(arqnE&%R`3+j#)e-a~6>k3C)%%)7Wn}CNX>L7*Q#%}K(tYq> z`gBbOGs<-!0I1E^KXsX;^BwYB1cn)AU=|jYWxep&d%UfKkp$RhYFFh7E7?-^a!--b zxC^8uDH82o8C{6|I3g-yEQ>M7!-cP+?k@;#n|u`^55SVTM7evJ?qe<~Of_WAzrn6;@N{Ih>6A5}Fz;wM-Y}!Fl@q9cAIUak z*pu%BiuIO&+g7@&l_f^cqwV+L>@r0bRD;aILdB?y*;TD_EOK<0g zZkXpac<17(L}omMQ=WX6v}@@_)-#JTah9yH{)x1qfyPJA|KrV&iuJAVzoMRpzouBD zJ|bq{Un;oQfQ-V3A>yc87;f;Pa?uybbbv5^f^Jk0kk$OkC*{-(YJ~SW$p6ga`Fb^s z=%HM&btK-ofvxP{y8xu`{9aW&$EqP73l4OB9O_Z&zhv`$ETO;G{;@^C-8~LnMg_#p z#{h4BIzQ(3Hb*d_c39S{zB4qK#OK4LVkvjae&1@BbHv)GGRaf%+#&QV>Y~mvhqjI* zfLOY3WW=%vV?V54qA5moguXcV!+L4rde3X1X3S_#(*~<8UoBv1V9IHBL5A$u&^#*C zi)Z%7IB!3`4SO*0j7M;an<9m=f|f$eT6ximc|Snsx~8qgP4?xBho_a`$`x(cvg&$P z(wT!x$ssT4-D?AmE9b^D@!xm5m(kJs$+i1Z;BWrCDVaHlKKDB7ccGO+@Ob+F?yGva zey<}rBi7!AU>VTOm(pE6C3ModXVCzzfp{%U`)`Bs!SPG9_d0SZND9NW8kGdtZB$D3 zVv$`pER0x#nqd1HR;x|)-vUe4cR666$l9z`fMyQr%sz7!t@6z;_GEPczdJL4Sr2V^ zz5#X**Ozros)#wRVXg?rvHe^!U7B#VRZeF};Pp#lwy)t=B5)wvKrY3btA@9gZRyq0 zSc4tQ5RmZ2iSs6((S3UpO&l<#+|8p-R;)%&+m1U^ND(F+n1Iw ze;qS%_}@6Njk~P3jb9+^Lq~vb6clCLHHlFOAdj02K^?}kE+w+*#Tl-gAT20zGiy+f zL$$@dBm{pPoAQ6Yt}VILE}$Vq?kIo&bwY&X75->Oc&_80wM_&}giX{;NZ~H9C73N# zYwVTShiWc@(}c99GMPjCZd9vv9*nU;BRPOv8Y&bv3AL0LqIxB3RbXKN@zlg^i8PS? zG|BbFq+>=(D?~1&jI3$$b54!?=t2%uTOR+#iKC&$7FVdiChcjd`n@OhaG)Mx8aw%$ zed=?C?PofMGxEl*e%GoSDS<|GV9GaAU%fZa3tW7HHhnm+Kq(Q3kS>;l8+)AMZnESumRjEj%7DNO6eR=|8m*O^tw65biZT#7W=eL#>&u_N10y6fSnx2X09OAmNM`g#T4 zTzZMV+B}@CLdtGy=qM_^Mzk5=6S`3RA~{zpYlTQ({7Sjhh*JUGm%wqKfmKFx6R|cp zFRR9*1a)XK&8$)X!HZ;_o%uWw^6_pNm^OsQO}F2*wOu#R}~@^ z&!AwpPVv-LI!6+&rPIxiM&GShwQ+Mk6(!(~C#+vWI)DG+SCi%P#ebIFHdMpM*Q`0aiJYwy(3Fp=;w;PE%7ua(z2{vGpN0ey+J>q5usA*UMakT>Tqu z)?F;v_c#z|O-TMkb4r8eG)6ev8euv2~`pbbV9ANY>cNuWbP*G+gnJ|WE!g4lRu3VwOe{x{B~n|50~!Z zd7}bgV!J~ZB=f=zjN@gI8U%U9YyH^RE74#VFyymt9SS@h26+zA`IvY4B&z?8afolDcz8~b----HB_X*8^R|a9J^%ef`}|Q%pF8@j zE^dGL2nn1kCXE@_dVhvR!;0sI;$QJ5Ie8{~Hcjpv+-j4Ue~pE_ z_57>S-}rci`R~T@Xc@n;Shb3pOB`e&8+BvbP)gEIN9}rDu&U29MI7Eh-mG1t)^qtl z|8$Xy08olimHy2lQon2u3^g=65%{SM)!5iJgnBr4ZH|28Lq(XCvFEQ3&aJbbCZ&c& z4tZQvRm`(O1t`{3gB{qL-vPil!rIP9{67tL^DGa|*D&P}#BOzs+u=&WLqw>%aIPw7 zd3Qm8|8s-ZXu+R@p|x>Vj?2NJPt)lhavXXSkB{BlgPDc>y?kX3E%&m{=VtdPIwR%L z#3M)bJip`0!3pkoU3qMlkydb>ubW7gRUr^eq-i);E6!{{KY{4{Lto^+O8b%Bz!<%x z**ow_eGm^8Tf16+&`av@O2`&SSq|OkYrJIE39YF>lXE7{&a$xPj_@DfLZVV`El`ut-7LAqohZIb z&03;U38SVQvi7$s{}c>9Cg7%X5-hM!q$6LGXksxzo{~=l{OfNxoeZ4|SAc z`L3SEdKQ{Sk!s07N4@Bpi?)c(u6c8vUfrhkSfLXqn?D4zA`9~c$`AK>zl!mp(0O7( z#(59-+)sYcF6=_<`i8>Q@+{15(eiDjfF#bS1bm;izO>r^}P2 z%#UXqorh9g45efC4gqTnL7N8LKld2xsd6`}wUFL&~X^#X)C??BtM&$Y8Xf@+_Gm`iz>2P)lXZ zl0Fmu=6R{)hacBbFf*iFV*AQ(k6-W-96K!gpB+5HL?v6WMYd1jSPt6sDT68o{wu03 zi7>_7I~Kr06HR{9?HpU?TUiir{BvfMU|BOa)}+3^D_g;Ich^Cx2fXPe<>(>6gN=E|G;r(l%P@h>%v%Iceid;^#kl~I)^ zn<+URZb}{1NBTjg&6EIr6fVfE!6xqkh&b2m>`F82HOVe;obr`NOWx&*^+DN_b`poo z{yQwxXF-^9tR1zuOMtZ@rQh!v%%Cc2*Pcm)qO5m?b8PsV`qqf1xq-o?_WJXff4cf5 zn$!B~%anR;^LM$hu9ZW~l&Mk6`Z&<>6z0LyXmqXf{9fmyG+{%P-*@ahOcFK|!^DyR zIH!2?dhJfjs?}q2Y>vYOqBptZ%bwP+_vYb@CcXYHcRPf;MSVtL zMoQ=13R^*B*r$Z#LPokD7|g@|0;nAL1cEZxe+AjP7BQ}nw@B4VuiR&}G#!iJQ`w7^ zT=Tg3u{8(72rB_we9PR-c!u0MMk~)&r%`zzJGuZp6Jd3JL&NW%FWAF9h4t5h%uJXC z5E4z^mAkJ+oO7C>rddfFzNekaxp9sGh??qhEx1mx?){2jc4!eTts(cDef(|Ovsr~X z0}NA5>IV#y;tZ+&Yty16bTW+dodz~dRVG&X#;U(su`v|U8_}y?FxFzA?wH@1ZhVbW zHULZA4Q{-G4N1=4o8a4e4r|`i`&O($?;vN-9S_j0yd99lTrVO16`~nMkG3^V4jdf0 zLI_plnaQE)VXFGQd~~GHecVB#AYaoh$5v0$i>dn<7%!YKU2dj!pXao2igV#N_GND- zvbsg^!FkF<=N$DuehG2GZRY~FOzI959*&{+tmKL`$>jwfj8>8rZoQyH!pJEz{5chS zFBOfuING_pd$tpxLq=`I#Lc3(ka?UI0ss9+E$eAB+{H^G0Kcn2sBN5%Bmfx$6I-oC z0<4s!2F8Eb6;5@M_$=Sf2NOp41QNsQxV%q*HX3$;3TiM03XlaL{jmvuJecj#1`1$< zF}$*S*(6%2_>6L{;}Fpny!Dp&D8L^lYflK*M6!^Zi2DH`UE(0<>r?-Dr}MVby6NuI zrR78MYJI_76xFm5xx!5az@_yq8;c{2osVM#`kI-bc5kgO?RJo4jCJ~;BGglNZ19X< zGQqKes4k853&tW%{SR<7VF-p^^04jTtgL+?%}g@&u{8?zD1()SI8p)|q{S%W&M#^U zY!Zv(jLRK*ggUbh_4Y^TCbB+~(~JhfXJRs!7U(xlBMt6h;fG0lF<8uc^AXF3_Y~>h zxqf)|e!vPi(K<1jtThGUUaSE+-`zE`rPSQiIeK77tJh4Q)!4lRDs%4!mlqtVfpYJ5 zNkDA?!8l^PN9`5A2*>?SpSNww5p1j*Xp(AzQ#fwPq$vuFrluk{Tvs{WjEE$S2UFG?Xp09V*SA= zTC4e*g3p~J;MTqsRZcD6q}GtAch}O%8G$To(Le8jDLM0nq5e_sHk$XCaQ$?ZpSNg1 z$=&9m(G@4&5*LFkNW-xSlDokJtvA*2rz-dvMk=hsP08%WWVeL`J){dIoe!h0SmY=s zTw1^JKn2?c$-k~ZSR&EXTi76rm|agy3k;lL+x1Vkk>PQ`X_zC(F%0MH*hnLH7I54k zTd@b+IMzyBLcEQ?;r0UqB`?E)KG_-Y#kKP7Tp+k|(B3{WE`Rf-B8#&|PlXd& zA3&k?fhXh6gNyG1{U_k91-5YBlw$M*+wRNYV+**wuUpuOx86xp^ua}wLBx2~FiGJp zkPIIJyaE-)VSsw_k=G#pmXZy&zm3@0n6kd}JgP_@cicYtfy*UC(_1y{x<^ZtRDFTn z-N|KIl*Ad>E*3)Pna}?uMk?61?ssR7d8M;`2Y2U>jl+0f+#ZPq5wM>%*T0M)oT`Y< zLCP*|1|hHf5=yL+N;NlP4273x;y?Jj5u1I;qrv9y+kX?yMH4vdJLqqPss6RK=@DZF3$uQE$3$i4_O zF*xLQ++nnZn4~=3mwK>`V^YB8uj`3pC3yAsu7Ap3`|cPBMkrNkbr|u_MMD?_7%Z(% zj%N}1!l`SwBlZ$4%6gd0!nRm+!LaTpM`mSoiW3xTWgvE;1J8AsIzezT%K zM-$E0F;Rb78_5ljPbJu<#ahyQEO_od=WUP}(FqT?px-aSClMGkV^{UAl1N#Rpyq&w zXW7ng@l>93qua@-_v|FR{SJ1Ga7n;<2)##T{F4(3xT~*|(o!jeyB&#aV>yl_dyUrT zimo(;DRe(Ct%XRwVXNGLmFt7y36nV2s6a~VN^Nht98ZMGy!n0HMaXIXc=W0C-4@@N zI09^23C2`3r_SN{NdCIlq~_rI@i*S>YstXgX=&kH&Sz^5fyGa!ty^b^cw~NFJTt+j zl51O(mlPgzw5}qwpnNdOPU-$?g#cPBoNc-BnJ;rKYrp(|!yV$;fsn>$wRHJeA|9A` zuk}2Wc5@;xu@$;O=VKBHQpRX89@MNJHL&v`Vfs6$z;WQYa!L;1=n_HUh2C8u{zGDi zYLnr-!3cH!CH)h(NB~PlO2FCrpgei%x~*C`RY4b_n^@(89h21$IC(l12v}cgelemY z=OHJwsTCS!+bh-`SozYjf{01{!%v|zpa!{Udmkx+vVXrJEm|tS4}(h^?hYy`-DXBz z2=wQ=rEhgWFUEuQ$+t`aJtOj`V<(I82#L31X=hmxmPiO48QCu@UgP~L_#MbpJR;bx>-lmm-!Qj4umg6Tnz&^<+Y4JF(9&Xa$C zb(0``)IzY@4<3eEQ6D*cy`d{@2hii+twvfW90b6@Rm-w>u`BG#$ZMTCo_fL;Z~IPHOrzLZ)IRz+<8cJ0#xQl@E)GUr@rmtenmN|8baIc#|y2US8_~VbCIkq^KwHWVZDT)rx zXw`5G!?*jdRg~N*L!6|fiZ`=98*K>p-m>B)Z}U_JciF$RBU&L4RRL&R7nOzLoCebX z-p)tZgcYaMAWq%~ZsS}_#XxK0|9a~F{2*CjXHAal_1}E%@MKdmvsXD`=TLn?n-;w@ zlpttZ^$^$Qce93!y|sQOqt0P&1@it0j{MfKE02T%vxOKy4EyguH&14-^b3yh=|iB; z!j?@u-x-VbmzitHPLhQr$eVoQ390SxUr(i@P2MJBY-mA{>nRaT8a z;nZcbNgk(xK!CeMX1UGhs#58JavM&?D-c0PenKe6Iqs=1X5yuW?yfOCW{MyI*}&y<_vmLH)$NsD%wWA4~3f` zV~^SrJnGd+#IwH{K)oNPG>YShEU-xG3PP8Y)$=@XoM(Gtfi4qW5-z=AW8Y^>W;~Y! z^WPhi(2G#7yaA?ABIFyM%83wk?ZN=Q&HQxlj9z$TEEdX}hHW0K%5bfNoDB^|!i}U7 z=(g1BmoD4j)9H<)Moaovt4-;|wl+1ZKRy*)TmBAt%qZeByAXW0vrJ?0HQz3BH0VaL zz?R$~UHTO>N0WR)TtVf_`HC|JBb7zR+Dp*lWx+mx!!Egmtw{v{{OMG@jeJD#mPTFU z$Jopl=h;tZ_|J;@@9sOHJ%|D=afb=+D6el*&K-%mym3I|;nXCVc~3D5Y&+png>_tO zA=^qg-Q91SFM{rm|2mN|F))c>>7cWv?Sgyvz2C|sCF#dJ_Hc)L-d+`DHZq$%(27V1 z5GE^5qcvRwn8u2FRed|ISrS9&fIhBPEN10D2_WXfX&n9h$r_!4aae@j)GP7H>~Ab@ zpj&ybpUm|w%$>%uhF9hxEcK!T3kaUN+y4s<>y*|vDbkh!$bEQYW$(Ev6M0f+P==-0 z8dIe6l%(2?ChoRZodhbU`@P-OBSoyU767+G*E?Qv7ht*SIh0V1R z2TN59Jg1XKD`kiHHu2Y_8;GxscortZSK|B=F?fIbK4hN%nZc)TC`Zobx`f*U$53aC z>8huqeb?}nPY+*w{c|&)4-}HN*+nk7Lc1aY&&^R&Is`E%;5kvvqwL-&AH$pqOPvtP zx6J-pDOzO|@?uNLD#3llw~h^OxCz`A)(ks?L6JklrJ>H*Nls}w-FJ|4>ii;c5%2-q zy+Sj($Ns{F`Q$TEm{ftF-scc=kqpjX(^&W;-dY zt|()KNklwI`$aNqFO%wg)Q+Y&oFjW!iN%7eVIEgU*T17D)O_0)-Y2R)3> zVhh!-8yVQkTA6!C7EJ$#pZ{u`h7i_X5pE@Sy>&`Nq))R|Qj77zn?f204whXOiya7} zJL~=cJG4VW(d{LW{$1;S=V<@Q|0@d_>q+FdM-ks@Rl4$=vOCp}ZPh3g0KjN1SvD01 z`h-J`DsPYN@M&gqb@U(Xt-N)F^5xrQ9Rrcq&kz1j6)mGzrce^>d*WuUhV1jIFp39x zXfwfvc-$wm6LSK?s2Qc9b4yh|1UL|w)vlttOT=y|Sm!tA?yj_#A+~V86y3{}(etcmsEDof0;gNq#-?bes~$v^66(>`Eo{*(8mf9KsO8h7tJ3wh1P zKs!uxap6~wcb$^mFqYUic>3J|vOoh5q^#YoJnXlEyP69{?#|TiOo!z8m0_1#xVwK_ zd=0)BwLUxKl~L*XyCS)oNIYRm&Hg2QW)_RZV{_xk21=_;gs}^U(7ZOLm$Ff`Hpg`g1#-ubcIh{x7scegCG5~>3pcIe2nVNN|w7CYEpTi|GiSgZ-)?h zirh|Sl{DB7umE)o7(D5e=xqeeY#Apol3m$f327CG`F0l=E=3S<3i!+)f(hM2_s6h1 z6wA9-kQBxnj`_TH#gaT#UO8s>QY`nnPJg42btpmhMi=G}m45!55#9)qeQ`>KYsdlN zHNXJOecZ2Q3Dxjv!r8Ob?Xp0x8fy4fsD=!?Y4is}CB)u{$1p4WI-0OlFp#xyQXQ#> zB}$@N9mYvkJ+1Zj!bar{@pG9cmF0GCi&t6EQL!{#jYVa}QK#`F-K*oqD^Ci)><=MR zjX@C5zsPo?ol>Qzvkp#RvY_8%7i}sP|!V3Rt{$}}Caw`)@%aEXNdJB@IqRE{Y26qS1E z&{ge3IS`Y_pmeuI|6+V=mi{XTT@48ps4*0D&KAliM0GQpP+nJ?%uEU~I<}PGwiI(KYaDliZ73J%CWgJ^q;Vyk^yQ%LCX` z$4@vH11@RnszyF&d*N{MIaa2JZ(UR}VxQSu2)KFdjwGvbe#g}nbUSUvp4ac~53It4 zzce$e@Ppk!d)Jy;b>*h1+yHB5(?xaIZdz_2ZrubmpQX^fwxcM|{kjsRk)6-1%a7Zg zo5o#9{Q{tO=TSE;(#D3r;hK+jM2yX`I2BDaYBnF4D$Dxro7wXPcE#!QTd|%%Uc&r^ zX%7!uS?rg<;3Ve&hcq?1QU7hKE7pCeoP$hCVsBHM)<){2tWC z-@lr77gSbrJojE)>qOJFE!R3aDEvg32y9O+fU$0%OcMbh23KVvp!38I?$>H`ja)SNF$r;*|BlS$Y#KA64C^ zS5J&_jN|oKt1mS;goaGD4ONCTrT2f(lj~Jn@CS?Q(-$S>l@1=#_HQfh!yYMju<%c@ zH1cnk12G%@)OZQz0Q#<)Ki$!Go=s<`;640J?j7{na{Hud)AK;LN2=BGVM6!X7Y8#% zORdc0^kp@Ij2Y}D4?aXwL({euoujEnDdsfW3ClM}Pg-pwL%OX=j;P&(#C;Y0|n zm6u-*I>fGjnlm!?dTxOWLm6I;fdaF~%SV^eCC|-cfWj;GpWci}jJ_l1k0KD{#-o5R z<_|)!`kg*N{2T3%QTAV_Xhp{8>A+vYKEc#s7svu%$n%`ct1txy+oWSqdUhlqKLysT zBJ&o$F)DB~W6vhS+aBonLqj-b9=(`sQK+usUGTJF`@Q)`Cm@wQPiUdVM(4Qf+0+lO z0eRZNU&D#uRFV|BYBCG!WBwk{y$aQL+U69K-8yJ@&!?Up#GnzQPYtn3#uyCJ@A(B` zRwTD|MXGp-zY(Ferg@`+rgGU+@RX%*s#1>w(34r|zDid@yV65ls9U1v#12Ev2Wzyy zVuC@V&qp8}7;?JHa|VnUg(WlCJcL`H;%+E778sKme<}8mIICazD6bczlg@V`-)}&h zLtZs4GnLiq_2D4|ml-+N5Kbv$aeRA+m^d!=wrN7$?r8$bw63FJ7$L#iq;c~;x9`q^ zO7(84d`_mNL*wb+pEs2elb$`m4v;M*4R?oaaRpKiL!3tfVkRsBVLZteR*(8i*JyX3FQ*u-cNw#LrJxuqWA>1bR zB{Yp@0avW|P6*18W3NU3t2Hf{{R6KR$9E8+Nl0bv5$+T*AnHS(4(S55s!d`WA3aCa zMOj<2r`C0VG1s#*)@R8#`ik}1?wTu!uzG!2K<#oo>7#*(KFVJ7Mv zoAWpaC{N%W=HXV|$c`qU@`~7yh^UW+G#jjFVh7jqtt=}6UluchvAZjArqwsALlN!K zdBPrvh1DY|+nk|lMD>enKb|^M>nDPj3UxiM&1F?>_lpc|4-)8S=qQC4`?WU34-98L zXS@!*JQ68Z#e+hTU7CT_LmyTI^A2@s#~-((4*lFxw;`1brq&gu+aCDoUsGpighfV< zmAbstYRx!%ri!`hQg6Qb%D<*14gzvHeG^=Thqj3QE?W#Myw}h**qN~l_B2>Y&r!Hk zW58~(oaKGA+OU(tuL!3h=bX70ji!g|>=@yenR}m(RUO^j29D{gViSKt+o}6`_U(oO zDXD4YC+gi-!e#xH96Cqb)%`#eb7>4{&_KOpw7k??9lK0@ow`>WL>0}Na(iLdygP1r zZzH3!J4^OA+4@ByW|dz-VYJ)^RZ%~RT7_WGT%wcWS=*F3-L}w*nx6<=;}KIdK5r=m zY^Voy*9oD^m>q2Ui07(S?dd-gF2)V1HCpaXICo6_Be91r$ixO_!f-?CQ(6&_5V@6f zMPT%LPfHLtZiV5pk!pA|4*v}}V$j?EqlT*VlbDb`sbBqc;BCzA%DFwYxXjo-#gsBj zW(?|l!43Or&SLXBy_95pjS8{Ubg%||lufx+7v&BOGNhtjMt!$|owkoHy1^<|UlWZ12Mt@17!Z5-l#SGkpVO-?rv!Qu~Gcy5#z z2!1)HLjLUU>73x3FWJNd;p~v@9@aopDJrQrA?(DWs8QX;2`aCUN1W8RZ&=aN)3UD5 zbQ7JN3?jQXg<(RpaPS z+9mSCIX80p3>AAc2%dgM0~Wsf3loZK$fX1)d+4E2J8ijCpucZct4V1dT~CgWtoBNk zT*?aYeqWd1IKX%t5V&RjoggKZ>KUJpc3xQU4qLiXyoNO;6HngnRt8o@w@c+l*sMdV z#0~ND%lC2|dpRKkFXmE|X&8O95=D+L^$i6gj|O*hyv20N{-%mv`uUnpFm2&nH4y!W zK>PI#i5&OD__sWu`q!{wL88dQXsHyGWJ^BqOf|WmS^=VQSjS5nbaqUz5vJ{#pFFgy zkQO!L3FxEBy=K)loScWiw*|$S6{{?l9VTNKeDab>Xn{HB)I_qsN9Dy#mFb3b&=Z9ZrkZ-@;sSG|I{9tY)$*+~A^7Ec1R|3G~LjbimiE@e*nl#jE zZtCm1UD5?8%*ace?(N9$IO7u0Yrwo4)qdKQQOen}K&*eE4!^PfY^6+1IQ7QO^REi! z#E7mdH42v z31fZy5r!OTug=#BT2N9GozSZhNKD076Mx7|8Hc)H;6siudQXwvj1hAWI`pYTy?g8T zBtLKbGV4;yTl?36=A5AjThY9QIrG!Abo`qbbTvvaKlHr9X0A^Em+}tL zv?e7h4!B#Im;~H)CjFB&3B)jK1hoalH2QzPiR-*w?RePrO$N~}-p@rv+Q84+JI;yH z2PKyfG8_sLRwoS{iIuX(JCUdE%|N4~iQ1i>`AziGPn@xk7Fh!j+|_*bO_mp>*}(H$ z$>0;O0JSCD;~F}SkX7rD(?Y~BkiQ1equb}Vk>**QqcRi#VWEiD0-O!YEhl=yB5G0F zxsNU>I6!LYJ0Df@2ZY_mP|gD5ISTayia2wxkJtdKePXD#;on*M>~r<~QZ0Kg;L=r? zmE$Omru!svm2C4>^rP`dKU02)KvTxPcRCwtnBu+;etwpzWe;L}rz4uI`}d!Vq(&ev z-@oGjNQAaWYwbLQsSfOAO;ccAgOVvqIL6n25jd`5h6wzNbo}<&*AENK`M=_v#+72m9kr%%D=y+3E7!Kybjh}y z_5Cms|4^)nT;Sk9hsQa~xnS8i{=Y?1Uz6cB$&RL{#ofmq5biMlI;U?E1QFrG7f4I4 zpB14f7b+M})X|H^3%Mf6m2y=Aj2ES)D5bPrHHZN$Y>N{Mf=7l?m&_x1r|i)yCKQXe zp`Ek}#hXp9TD!ke3Nf2 zgJ;Vac*aR58sx6)Ql(9BO624jWcDfIU7RBqq^vI;%+gx|8jGBG^mP@hKau+fPSf5Ncv37@2|vga%EPxdjRiFVo2} zqQjuQ4xs_uhkI=ygpt!EaL`kaWaA8-Qd@Zg5Yid*O!elVb=V7>!N_Z@SxbC4#nM_ z!J^t7@(EIj5!=$!8 z@l;?CKkCD%jlB-PjnG)!c}|Mk=D{9eWraDGMg*sna$VQ2^1 znOv0B@0B)%@xve&Aq*FjT!&&V(OHl8|GxQ0A25C?!~M*yXHkyQYH|iv-t9+W8#xtm zp%@$+(XkFwS_gA5bT&juz$& zDQ=21w=l>obiH(S2nTz)?Tp!|Mbda$gyVow`S`3}I>B}ygYaKY;BM~Vr7zm4yHNSC z&~*o(orxr`r|nvc#1mf#Z&IiD(KI%dB<*GOn?%>#y-#Ec5ZBBI6ZoQ7aW|>AcjW5( zFnFyKG=SC|X=6XrXK2M&0>bz6h;(fnZ8)0;HY<+=c(yJzx!By4Vf>qHyi)pav~ zLJ-DZFa-Ud&i(D5zS2T0e5dHelkMSCY)lSWj!nw_&p?K*s6raodEXprIyrN_TFZ?EB1C)E5_oXgzZSrceD3Rdphu_I;@T=wI) z0<1l7(X=?&YLjOehkRs9CPNy)I#9|3R2oNd=yuoEYGL}!2Tbk;Rk(sSIS0wm_p%J; z!0JS3!>^`v22VhNX&jy2EufQG7!r(9vepa$?$yPKY;k(s(2np@;w#@R4V`6L8Wr^W z8^P;As`ROzu|Y=*$y8iX6_zbSFPiGRl?Z=TSa`S=nQ*y4kFfZ8g2 zxlz={3vEQrH>DofeG2pz!DJ`z*LTer6VW+Qt;sqzG>LATXJ*83vz)u4%9s4D;Mz+@ z8~oKge&#rQzE~ie#G-dDP=p{L(L{JHn5BosUY-tJ@>FWr#lBuJPFhG3b+{R_L{h;;s(YXxzb0XO@;@o`b6_ci{O@|9eq^{-iU&_ZtcH9*5HG z9l_BmpFx9O9cdv!@eEIn4)9x!0+~7n(MiAVy3kY>Pp!6_n`OjQMZ>8VX}AAt@e#sZ z^Tqlc!x#~%MAH|ym)>w6NE}qFq6+hRhZ5qjw)tm?L<|p~t*`?{^?wf!!ajkDS>=vz zgVVsvq

  • ^4-CH3-RYvl$;!R!?-Z{zzsd9Yh8z+3spaII~SRA5W#xpZ9>~K5@7Wr zr1{0VilkaIB9}&PA>vK>0m1nEM+kwXy}ZFG*2$z?UUrU@C`qm?HwoC%!7Oe?Y;|ww>c-NQZlZo{ z4pbO-8$8;HmkO-q36n~ZTB7u%Su>kvV?zh1gyDt~i)oqtPJiplzUugEIH01$W$m7+ zurcX2LEaqxOHtDxh_W~_b?A%md$>EF)#m;pb!D?2+_u8?^?Ai%Rf^|55_4V6n^Iyh z`ZVtC03T8UOOssI`;WjeHx{nyx&L{f47rSRcgR1v<)y_$+&_bg>=XArs@rhvvmECr z%I%Vn;rHwJCedr)QDDu`z-PCS*=6ElGr1yA6t# z$=RqLyXbIHIt9pHzX^<4SIqXnN5_?4G4~zE9&nyHkc!QGfk#Q}UOX_En_^mN|7u{& zbEVB64TMK^sa(iypZYHc;(xBCU(a8mj7!@)1R5xHdw`y8RwMH@>S8Rd2m{nhE-iB7 zB$*#DCuqkD%HDD}RXGagCee-#3O5yWTOwG#TyIi>Hkz6^HFlVJMI5*Kd`}D|=o6b) zRExIeT|rT1c7Qj0#(<}MWhmYV@!1aG=}(w;v?xtn9K(}n`8>;zs;92Wr`Qp_+(G$c z*s~i-7q+CW@5|qA}rxyQFKR+I3|hwLk!nWcdC z5n`shk3>LFe4OKx7bwA&XljHkCd-aoLp+`-%RU%yTX*1(4p1&!*oK-*=qAq6D>Z4B z60GI6`x5ZFKWo_i0|=T~^7J4OU(}m`xfyoM3LM5S$)tdM zXToj_3=GuaSf=?>xkp}j8I}O=#gSxH+On0rHhQL_yoZlgYV#?QZ?o?T-EI7Fvcv;1 zI@e~ynO>K2Ve{xY0>WP0=|aubt9!Jx(;2pUBnGK05K`UQ8AZBMqM>$v+{Lje_0(%> z;Aj{+03{9|usq{KjAqiqckw1&!5wt9@g>@Ou;lZ-l&<&rm3p=~6Bt-2&W{JUdwL5Q z!U${yQM1VLC|^7v5kp4(0|<5e^#?$19j$I%o4|YD%ZSb-L`sCAp{ZNNtIQ= z?$M8D;WQDDdF)P2{%Sj+Bcu9_)01A8TI{hjb+TNRU0~OT_ySkMB?jpasjf|4F||Xv zh2%m#W*3)ZmUD=V4d3>-Vh>}p$xrL5C@1HbP7ryBZvKU`V-8bB9wtA(LQm^k&BDez zwNEEdlZB9($wWmpdDb(KPr-j&CIXb1Tm9MuiPt`28?t6Kwd0=QsJnO<*5y|UIIH@l zq~a$V1@GvlD-~Rlm|qeIiHg7c^5~r_#tkHgn2((|Bu)isL}NF8Y}SP8KRGL6)c$m7 zJQM$t zYoU?G{T52VV(g#66X&@rZP0||3GT)f8doUlQN$sq$vbL}MaSnWWq@}-YwI(-B$u}5 zwRPk^vlJF0lVjvpM)89IkMCUlRO!<+KJDJ;Sf}9(R~xa0+7zhLrN!Cje0$YK4U#QD z#5)gpcQ<>S_jaaFD1T40urd`9JJOCEm=SbE9LAWP#I`c?-iD~RG4VZ6OXKz!_r{a>QgJm>#MWgS$~$U}{Z<0H6qaQ|M%#AfxN%KSa*ol$@y9uipB}j?EvSNyYry zQbKUk0{a5JX0uz?zPGNv&rQRlj&E%lI8THFGs|)A%K5JDJ9?9$&WCTA^UsqzpV1dB zSHIW{s5c2+J>ZsZWWXq7*o+oFRo-`}7ng*0^-+RmkKJh47)9xbkr9VpN?jx4v|~7$2c(qd>cj4Sh_4W1e>b{- z@^Dy%d(y7%h!=9`(K84Y`ZIdZysm{1LM>cMoZIIm3Ch`VPTChC&hkLJmbct(9x^05Z$-IL21Ue`QZj{aDJ7z*hWNFy2Yw|Y{aUH!rCuZeDBaTO*kPnJMoEsmQSlHokmvYAtn+dt#}l{MxL!W zb(XwvPafW`sGg1zK-X36XASBNbA-Y3=UXQM$f2DRKg##%qx(LaC$nam(}ZfAg}itX z##ElWFpaS<{+1$iK2HEPoA8kF!hBnM29AThB!w7(h}P3O!H0;5&u}xY!@vER1o0O21otXzf+)yn zP*vo>cRHy7fXvuhzefo;Z2>;Z1d?HkXcST{<*lnvD6Q3+nu|zIBlI@vy30A@sK$DK zhPD1+>$8%b@oIs0koi6&b}n?>!A$pu+=RD;y+w008=Kn3C6skN&106onKNhP$@?8Y zx6okn%0eg8#-m#Ux#rv|M@ek1<8}9;I53>}u(h`$?}7eI=>Ix4WKjJBu+nuWx@r%p zd})N9et!wQr|VGq!MAc_{s(pB2FmHHs0_WU4WWs{k!ij%v@}3An!zGMnlTBw4Iw1t zxvU6o{UBlLvNlcT!}OtYmCShJ>2@? z)`SgkNDX&bdR;6f^-TO1PP+sQTMYi`aQo9n6cn7e9=vI87@zt7d!A-E)gawi3Sh*e(LT z%EEZZ2VOOi8gi~pLAIEu)Z3bCq)1)*@jco?jXd~(A~cgUJgHJJK-UzAEMw2mrnF)5 zM!c{|mduEyf6Hp)*Bb4g2lw0Zm?4VSy{rbYkRYAF{IcYbYGD`A4M$dNaq)oYCk%HH z=l_#j=kK?%kmpSlQ?D!qkh=&(AlY4_sgUe5s7tQv>LYxjTI3=-oe`piGj_wq@t~8gEW}`DSyhNlnT+(2+Q6b(8Q=Z_{}|i8R9`83_2odj^q}Bjc*QE+8qv+5oCB z$ucpC-X2LvfzH_ZTfxQ^fyy$=zSG`s174)}Ul?!JahWoao#Pt4%5Ra=WEK> z@r-#27jZh)GdTr!2lJP)HH1wjEdFve(mKHv@loB6P=oO5<1Jz*Zq;QCDQap0a@w^O zCjG20CEocFU6bx5p=f9?*-9E)IOQ;W`i<=67FmA(n@zGf=hO0%;|oyS$V2vL{!7g# zoY423wO1vF2ke>+qZhwSC6Ji`9XVF2)`WS!C5bx#d@&$nSJ`3TY6RxOskifu2qLlkmbriBumP}1?C*4Lxuh2-@0ag z=^F$*5`t`NL^ct-{d*Za1vnCOusT=zBi#!S+LEojYx3>0xpa9AS1B`&R+iKoGkan` zjU?Y#eowo(58me=#EO^nTX}DpBY|Y-;sqi zJVfZROMkd`-W1Ls9vBYGCrm^Z|NgNnw*1JHa3YW9>_6xO4hg!R-DJ|Fp;U*{V+}=0 zzkpovqY5OM$7%#l16%LuJtSSmGv;DGt-Vxl+98%p&hbp;TSbz{6fwTCcD-Cf3Yun>ulow`$Qlz;E?5w@?_s>-kZmF zWV*DJCA}TSPrG8GrIw7;o^cjF_^dG!p9)yW+g?SyVO;#c;)Fy<1aAhyf-xV^wSqq# zd_f7zw_Vx2fF3{U?(Y`p`s;NKcspGteqR7X?)@>b_KyQ{y-yhetEkPL(Dl-p?#iuA zyJ+aYl#E6o&+5|v&JQ!*)40^LOOE_t9O931q^tSlf!wKRt9sEDDg1tsZq$VxffI`$%|U^(~R_gA>Ztkv`^P zqs`6Vi7)i@mGZw_Z4!!?{TVvEhEVgnM(rGoR9VH=pU+&De5%)6^$F8Ey0f0hpp&{U z%{R$9Umu{xbjeq~38aa&u9j1AkiXxWw?M3VBbUx%W5M*i zc#CPvHfd$0XP*RI)4NiyHhz$F1lCOoFdD9n`{1FV^YQxqGXI`}_<;4^v$i+Ug^BI$ zw5dZw!=30D&TI=W`$YVxS&}!3Ta7#S-N4FpX*4V{)DmmgYR?}r^c4>Bi-2KT_j&pJ}b4~LQ7G+4t#-ZE#M(V_hQ(y5*A21l8Y|R+ka0Wye#vp zf!R~vM*Mhnsj6>sjk1_rgX1HNtn_zfF&%~m(!B8S3@Ik=kFtE)e^==-9w_sp#7C=h4^&0K!<-WtSvH;2C*~UhRE5_idnfaD!>_&Z&t2mYAzx7>#)hZfNXvXj3vX>z zkEx12Ozn+&xliOFWb(je{QHW%4qe)$ihM4{TXHl+KCu3iUH+jDT~7B5zuaI0UE1Fb z696UjF?}nO*G*pyieRhk{4F%>>(i#g>n_7ZS?u|GI22VQCVM%RL*)em+w!_}Vp@$P|o zo2^CIOr{hql>tu)Q*rE1^F+yY-H~^ZJ`bx&+uxj9lC<5rTV6%MFCy<^V8_ybav z!__x-OIrF^WXd%!a#Xd(41dLO3k-jL^zgaGUF(Fp&DZ;eZ8MTnIVHiL`n|(tsfYav zW8K^fC1##iAl}I}0&&ho@lvUtagg-E6~}TCd&XkLmOaIxfsm?B}trk2YMme=uvc>m$9Iz@GHk zuvOS%cmuWsSwR`QNspgd*(HxF>qdw2%JT?y)TzbPa9wgM3o}DJhlhU^LNL67eAEmj zUmy!(pnx7RrsYl)1c-y~IMym{(3W6_2Wa6J&jRSc^{z>W=-}2)w7l#{oA6fG<5w2> z?W2)$nahl_`*}pfx2ERvqv0gsWnsHWpXL>wo%jgVS>gNIPb7q+vEXg(N6wu=vZ5a1 z$#LQDid&h*dcmeC?&P*s@1S=|8NOh#<}3AVPJ2$NQJ00t7p@*k7LLKsaQX2=RhDap zYqXYt{?oJLg(3+rVysM=t_9i|g7a@Y53GuQc&_rksnEjc za6-dtHI!8M1=-Rs8b$7%(0%EC$kv+$GRQW@{9*c~`&aY5nF0UY(F-+9JWa$Cp9FaM z=(BV^cwKit%G&cWXVN^f7*q;s$HdHTRg~wVI{QRpL=(jhz#m2XcYEApWc}XNhtNc_ zQz@bHa0;~3Aa+Ap^c?Z>9wf;D>KFV~M>=k+B8v|ZuJv!^;ErW4m+D5q!=s`034saa zoQ~`~k)IHpvf^7MpSG|aRznSA{PG2a--_{Rf4#!~bo>@sL_t^1-akCQ0yTL$ryBws zd;1NoC`xz?JZ3e_@DRJ}Oee;peLY;N)`M!>+NoY__KP;n98FkY_s{M&*-U}29nZat zegRbzRv{h*I~*)KhzCz!nTv8+}9pnZ&)OuOyPq5Tm~2_Xv(3a^!v*HGQzaLA8NJL(99We8uwMt+o+!+#Ql!I{3)FoOo{4pcm^H+5ev& zXwuvkPLFKxwZC#>5;Kgnqk4{5DI3fr^YF{V=4M3lNb7J?Q6Taku7*6VHD^g_P@NW8 zlZ~R``b7AY0{#PXfcdU1`G(IZxF<-p?=7R!SF4-}m9PQug+qTAYSYka+Z_v=lt>)HyKzvtvTc&R*OBSl6MfmsGw($a&LEG|E zHK&O}>~pH8##k3MLYtR38%}Jl%xFG`9r+=|2=A~Ha5kRk?D5plEyV{F*#7%E!!46< zvR-aMpg=nj^-HHPdQk-xNAa&6pr3{|zZ!S>s*fcpb#;imj9F@J!7fT({V=V{TR~<(74}9%*%8b{vs{90> z=_pAYwnVn84+a}GDiy>lhHXzH%iUOpMuwhRAd^bB{?3P&yWwn2hM9A@I?taa3rn)U zwBouu$fU-gcIi%2matu>ebBEcy-(%}U43S$w%v&|x>@Pm@OU93Q8Fp|?2NGOZ@Dq~ ztVLs~TdDo`Q!;dlqTGMg(RE(eD6kOu`LO5tEOp+$s6pFIJLm7tN25%BmxB1FvQ_NZ zIwxovc~KuTb;5g@Kc3LkicBXT^f+h1&#dh{SL`7-l--T4?bS=&=(iIPZIq8iNTws9 zaklu-?Epw1*`+bEk3xybOATb+lXrOX$(Jeo_rkZwiYH#mkF)=xtQHZ5#S;~cjGR7| z;0PQ2MH8LFeuDJ|cwL)_{ZeQnMUqtFkHWW2*E)(Zzthn=+2$Zo;WOfSl2cje-QFH4 zS9N~!=V-aZFC5fP_|D9?bzC_k*kgc`H_~ki1J-KRTKnJp2SnZ?XZ8fUBr| z-_7SfXwmx^8yY9#fGf0;Wtz=~a;CjCi<5#x!CkPaP z?%SCVS^OIq9d@U7Db6)}x~WW!NHk~Zp4-O(5$3(#kWAdBH#*^=^doz|;X5b6m})3u zw#pzoyk#VsEthhgLcG-_M6$CklO`6g%K!0v$k1#@fN9q_GtWJ{fF+T$D7HPY09iWu z^GlKA<)LP#r?ZrvHu9_$X*80YL>gJ@KX%Ibu6Iy8r&k}zaMRx~vG@?~K{W8WLOIyg zy|2Zf zb4yxYiAsliBp&a%t&7bn3mAC+&A2wkYDza3io*kNybE#nW;jRNyY9qC#H*Cu5%ph) zpEfHQG~+}O z#CTB!^+C)QO2b(&O~g1HQMSMoF@9SbTorq}0O9;z>~cs4qD9Ey^B)y{u1PxMf|C@zD? zG+YrTOxC_jO~H}Nesnp}{h7-{L`I?{Ax+%^y3Y)(%y;AKw{&Ns*h!5RT5V@fXZJpU zTu6b$8ea3Y&ZqsjvB}q*+4FHi53F2mLGeRSc+{3=tcYFe6Mx@~nn#+m;9@0g6ppg>hhSsH`;uL(No0vAtzKBJ|Ar1=GZIrqDxD6 zB)V$)b#2gov_>zk8?VZ!YKh7UaU~{Ji&1SkkxxRSw{fn;o&z4 zm6>*L&w^u!!-3&|xw%O|!%Jnhee+6xfC36*q8N&=cef-iRbI2b0UuW4y+58Jvil9I z9BoEq!46Vh9|fvE!uQUH#t`!CluK!_WpVIZajqPGN@=K83RN%%?yZPy5LtNjh~_m8 zhoTtyEeTn$H#qh9Dd2e=LEm~(LFv)Q6j*a(NdmJDJK~uqu0mEPz!aL=^UNs9e2G9z zXsfVn zxY{h+TlkY7==HKon(|#WB5Kzs_&oQU^}czTk-u^Xs?t9nT0zwA%j2A^FMZ}az+|-k zcZKqJ_@==5r^Gi43^Cd!&JZ`;ehn)FP=HQARA(nzsD8O6v}6QkeHbeooX#s|?~ux; z=IOK+-PMOVWh1W|eXHmhQ<{Vuk5RZ#lHCbd338X>M+$y>_{fDYZ$_gA8P3+vS+}4z=KoGBQ=8`_jxe1In&hjqvB$H=1>MU5ze#r!7R}R+g>YAv>aIobF`XZr8}B;nK!NpZdnBbMkPL zp_8*D@lS!*8RDIyxMCg5s#b`VmWQ+WmyyQ?8Unh6%sqYg;{>(<%5ceXb|$?2Nwxfia4T42P91>$WY!+)RA$YIAVqx1_(3+c{Xj6!|=Ho95kS3z}r;0FPkFm`K3Lo z6A`|}MwH|hMFGYqDwC?9mgsH~W3v-k?ZH7x;HD;^)G74Xi+4RtyD@b8Nq#MlC+d(t z`^SsEC1SWKlIJsS0cr+ ztsBg-uuqaC4``=(FLu$^PP71`Xy=&&TO;x-P|HjQf+C%*%JjtmnQ#(-a%5EAP7=Y7 zcH%{5|2)M2m<*U7`jbh!b}0vv?YU|>$u(dKkK3W+h6|fI5G2|)RU|0W5V*~!SbD@>;Uy~(r~p; zZbtzcY53O{i<-p!9o+J}F|y1XBD}#BDXD27yy)Z}weQ|QxhwFe3vFTsAssqy)i*p@ z-M-$CZG*`XdDSR5-hLSjp=863Kt+TW=XY+05u_znz=9NXDy5gjQjWnmW+JtBwb@9z zS>X+Nj|@XBHp5$kwm_!U8x2dqD{0ajy%P+@xGq>5B?gv)E=F{qFaZ zA`UE^6cHam`LbTE*ub{0=-Ysg5$UK3D%=pr!)2$ExaFuj4{eV}qeOSOlg9GnM2}SY z!s*P!R$k)1S`PQA<*rXqWFp^h3y0}PjggMxv2I`CaXUa)ZZR*%cxI`l$Y$EW^C}tt zR@4wwS*AoM1)a_ddE(%BO=3y46D{qi{7KL3UUhV zj6N#Q`(4rQbfnT~Y{Rci74?!|w`O)8KKMTL5Y{NoMYhM?tSXR|nuNNEhBtRZ%qbUz zKnuKEZGHTXl{&!YN?Dz~+rXc~_YOv=ia$VCmMXHf~r z1kK^W6!3{x6mc#eyGTB?03ul2w7muVhE!ZbHyYZuJM2=ZoUb#)@~GfJ_9&dlm`hI^ zVg_|L-6|3R(lX8SYk4->+5o94M(N}4I^bL$*UlKT)ynz{RNpCkO6@C*D;YCHAuDwg z>=1Si=?Xdhw2L5-<#*NTjgL_52ig|qqVMQwO=UoW9&Bbz`_uR1M22EK%oR@q9V!Ib zn<|Bg09+Yg61@Fgp^ZVyqWc5JBXvA|JZEszi8C5#y}~D-NKx+=9(fAo zfvcX{IAjd_r>p&a(5p^M5m$1)7D13jA}=l?FGWO#&LGXJpaUq)5{qWOV8X==Hibjq z`~)Lj-q1;by20fqgn-XNq6FhH53RB@JX1QSLEut3^R=|3#rN3UL$cnVYtL;)D|aU<_&aVy zT+TH*1z)}FJZL$ji{>{Oh>~9HHBo$C492(+*cZccdEVDlS;(nJS0r9n(673qRB-XAbL924 zQF~_@Cl15!Q9r~0bHxwujwzm)lS&o6gzk@AKpG{G2_>RV-Yd+!>jGcu^XGj71u{FHr~A*D}9zA3D+CenDbzH@I>DOa@4KX8kYH(b8h+3nQew% zm$^zZY3O%mngw*&a#WW8>KJ|5wl8|+yM!-@7|+HW zK(isjV93s2xfJ~Ff5rITy14oPL)?#(JzQ&`C(SEmQ>DeVh$ zFElcN16xr-B!aW{6KAG;I96bWQLR0B^(#%;Cn)a4>s?+ihA&jE_dpJ@aOw(n|4f$l z5w)xjaA~<2PC2G~cDG)Njq#z4KfQ*Zh~mY-ZxxN9j2?G={=f1CD*8uVXXNu3p~t$g zJ`mk`kUeL!0x_Ztlt56&1ihN&N(dsPg}h=2v8e~e`v(FQ9KL32EQQ12FW**Sw(~pq zdY%Sh8LyX81(uR54aE=M3l6HIx(g*Fk#Gp4jmda(zV`}#F^FT2@1hV(L(#FzStvfz zpMq4sC(Za(cQctZ8NLeyXmbQU-@vX>%sh{?ZaJfqMYhLzK1$iAh10C(gW{@*8MMJ6 zyk5Fw9h_H#T_Z4NYA&4chbG|j$%aSo-Agw}F68rU6{c}D#B?-?a>Y;at~4ca_z{Ee zlILcwBF_;(N=u}H|LOx%sxdwx(>}(a0%wCpx0MkJjK7k^SI~#0t(n^@+S5P)`NXZ4 z-rQ5f8e}y6zPrUyCx|>hs-mG=G3t(U3$R5z8%Y2dN_44UIm-O#`r-BNBk*AZp;xSV za1z(o-3il-S%fE+{FS$_e3l26m09Aa45d^=wIZw~SETnPx%!BT2+nboaI?Ynxs^@) zDlQ2py}hcgD32ZXl&z86JlNc((WfLDp4HhFDg>Y3-8pv)h%7a3PrNrxvt~(*&bWQW zMIM2n{I#TFshRm8fd^W`a)F`^5L3^U;o8rvhncg7&nt>wE5@w4O zFmS9jGw0gV?ivr);OE(WAY7MTB$(rJI+!sg^Uhh^%vp*#Cc`rtGf3xc%;Wa{8^NUR zDGp5mp`cmgEU(bfU#0-^iE%|pB8%p=!6Nd6og~U%dvignsWJ@wA{2*+_AoHf)6HP@ zZ}9dE>HRCVGhroxpL-9Owp3TRVf}~Tsh;ha$S+wiv!fVPdd}NV=vYD4qe}FEuW%Bm zlANTtJYKwTO_AcVTK<^F(YN5*vlN~aw8AzuvBotJMU=nt$BUb}w2Wajic4Qz4ls%Q zKvZ{u!&Em~z70q_!uSw>R?aiDG#4Y7hiAC4iKX|pXG#pH6x>dmKPaqI8^$u<0h&n- zZjeM53u$y+M$g@d7;b8Yu(_~Z`1_B8ccMFvlRei}*mHDgSpi`z=_u={l}OS`$+ zZETJJOHY`nj9-pCpuZcPB>cGm$S^YMl)#kE{?fX|;)`Z&PH8?)8s_@NEH{LRI?mWf z*`{GlJmHA2=~sk4q8eCo5Gb*ufbZMJxpQmwjTL+lOAXv%I`yg6Z^g>yrmkJJB6@<1 zsc82wf3S1%jn|yVa}X##JV>kEe2Knn6E1W3(*9Dd&`!i=LE-@JCrZ$DTq_FX$%8jN z=8i}p9rcXvXP&Q7DmlyF1O8^_2VGH^if?!hS7oaGUj?#qUT6^avAnYaxlu-}yz$9p zp{767%EpENRHWE*q{o)yhu)V-cDyb4XcfoKjI|pB59lfgaiGnj9p!}$PAk=>+QME* z3%`wdWyrVDxU2cuYrEntZ3156Frs_IWU^4+jf-FTy^_`BtsX2oXn7^542T{2f)b3UwC7G#_h9@%;C`+Ht7 zQ$odt(b%NoWZ-m=&f4~iilTE3;d18@k`#{+nfvD-!sabIDBHgNGG_0;Nj(FEe2+ji z^z=L`#JPSu%pBd{f8amDjQ-={uP~x0_1>3nZ;ImYDJYL=eDsCBxMUvUYyD`>2m#pD zs(OamHV+Hg&FL!(aDBZ-=sn6iri>Hht{_r-8^O%}tvJMPh!mbAtvmZhQ5TIgECbV= z)L?H&zttorgqfkyJV+T9q6Tg>zHIECWNfBT`66w_Qe1E#9Z=bIe!mzwdzf73h%2e) z5Cs!r$Cr;W>6B^cWm(FdG(0}`O?e8_Xp%;AMql3y&l){0|Q9@^zo-WS{8FN=% z|LzB#T({FSq)2$82xLXX3ou~!4)(X?q+7;3N5=EeZ2=19KM|rQD0Z^<6b7$SY)6!a zzOhDMBdl+<34=NPlehg~Jm+vG!$b>D8q~ro3(uc~P^ob6PhV|l>75xdcVYvQ`D04o zgZ>cykUNG~9QJOTZ&DeCS%ZeopPDe4=zITX4U3s`4^6DaI*Y=*T#^lBTXwPz=KPNC ze%gm8uCf4YxA?-Zdk5$0uRQ=Y9tB0^+%fGvm?8sieacEPDiu2#V;#tNFW_1cwLt+! zA@Ang$z>?2hApB(yic#48Y|_iEQ-a!ZPXvlL0_>NE%d>s`zA+)5OODR`XiIg-K2Y{ zO5VgoM`R$e1BBq~RvlKMZInCwNHj77kP(4*J-VhXZFs}-BKL+XMn<5zeZ7?3&MnA| z^9`4Yhji3DA$#@%Mjeh>3cy2E9iPb-d7GGhfz zED80K`(PEuO(=L_!Sbq_w4X9%pb_bd0L|CycZ?|gv?&97?W}G*!(U%*>HCOzIHJFo zuqv)T{_?TgrJjL9`cisQ#b!9-q17=KEP7BTc_{U>?6ng}*a*z*{1J)S?WG&cFKO`w zGT9G*Tr_9Aw)T1BzVHnwc?5qP&D?nU!+#+NaB68MzSGplR>q9N!FwmIWv)Fb12dal zWWf?fw?J$rF%$Bc&MazBhEhS36fB>w`Lt!acNS!9Gi&>_HRk2xgB~}`&Em{|enQ+2 z_95q?&*$7>8d@qed(H>_BSR!)p3rb_4j$ZBYOI~H7QmxAPtuT29?e(E{UrCd03!$o z1qxYlC;@iSHbZxy?;JSso0&vG#;0kaKJx0<(r`t;H4rc1cvvvQZ49iw)7BO~%AYS9 zaaEB5h|mF?t1U<%I=C;3w{h98g%KHKUL(7N7DwIZ;vizDTO-e!(r9CjAgdyhN@_@R zZ;Uy65Foi==&|~E5}#JyyO)sjWuhB1;_*?;$T$1`lfE0YLl(nStU;+58aklJs4+r8 zL|Jcec`0YzdM7ec2oP76K(~J_5FB;KFnIyvv0$L|T4J|X6&AfGZo;4x%7-&9-bN?DYrK3mK!xJTrwsT3UFm=D4us_Sci;jbL#cmLIC5YV0? z&TVNvYCidgqk&G|$t3wt&$c?wC`-DU{|Ad#_ z2)5bZ#Z}W2_{f{og{J_#U>{mQ4oI?c97`n(1{P>h{Q@UONe4sUfsoN@B99d+FZIp4lNW`z1WuHj+4Mm_gf~QY9m;9E)9LQAlhLkhClMIZfZ{9v#sMbw4@Y6 z!G10BF2VJcai9*w8&g}zj*C9LEAK)aj+j+VSEhw_;7{l06dp=<9WlS1OX$0q zdK(i*c>9XTC#ezRswkFJLWi(`n!C$SMlvzQJ*RX0i16EjGOx-)e!x@i4A<00@3{S8 zr_VPB7^Ft?I9GX|%Kq>OJ$?zn9nB;?u<_huw=*Nyc*EG8 zbL z@x4^BuxwfUBe&Z%tlb9G(>zLvO{Q1_nj6hp)#e)X7;qfeS(pzL@4x$^n!_8Aho3km z|5=dQ?FMIod1#=Y!I^3#-)_k~@NuhYx4wwm=6o03e&K`MLvG615kn>j&tXrL=m5J+ z4tR5+piSm%>T&elJnOfE*7Kj_Ur9y%R_AUsHmU1kiheulr}dxEM8G{JZXyB^641k5rKhQDZ{rwoUlF`9J1WyJAQDnvXpsjK_XX!3 zgYwC4Og;0qr(toRQ0w66AGDWYEI9rh(cN>gNgkRpkL5~yb=V@v*&tn?63Duj73meP z8#8^SeX7mFh}wJI+Hf}a%YT-KfYO&ot|CdcZE|Xoi;Br+zfFatuU|FXp$TT zP&d+;;XG5l2Q_B^9T0;=>*RT19?>i~RrS0|xH`F0rC3+x)x8 zMRaH?PO_k$HE1MXd1-OV0Ll`l_+xkc1->RZqJ)#4{JmHvESHRtf3LgES)ottYh5TA zZL|{KnJxWKUc6tD$S66J&LYtvnm6N7Z9e69j(Zz>V!EH#1V& zDBGR1r8rq5ufzEo1&5Q{zr>bZF^=vOMf5hG<;$eNl)eN{maGMBFN==a%eyXZD{H>M zw3Srs*W?iGc{K+iaL_(;M-f}r(u@_vAS1sXO{9DJ0Iv`jtSMD`Z*IeDHhgcAbyDzKHL4r>T-QU-T$P)6{ov5w(!cpLTx2oWIHtRZ5B#dsWm-)Fi@^MAR^$ zqN8)I^TDct459FJQBenPVev~>d;0R;&&S~J8YzQ_NW=}#L{z$uKV%skOxkYn%h zo)witk@&8nmujuI$CX;$G4$ZMHlew5UoiM$RZuPTa(|fc_>B{4*yMty#!;*iUqIZt z`Bti%#uLLv8}t}lvOULQ)zr~|w@@RvP}Q)2g~=!er1$vw!U$j=^yFZJhgl&p%ZRn+cMUW41ZKCxt$4TA zS%j@XS948aj(nq(R(-}yQfFCiR5*Ohn~6_tM5!^mf!%H)(zcM5rm#di(zqfytiW9d!`+{7Tm|Y5NK`x$duAH8{nl&9}1psVJC-c5hNotSCVUt)B~9I&m;G zNS^5W9vp(?E%p^b!zet5PtP#)`bHm>_Y6Ra^gD0J6*4C0-uc1e9t6I zodGhqVpF|9%7fH6frp?tN;+z_Sp|`jhgPWXd6~PMXfNYRSt<5$w2duh`1b~mPeozs zpx9{HJOaTBuaINy5$uky@%VSQF!mf@s2ExyAq-z)Bp(>#{o0*?CLyGH=%G(2%Pq#r z9V+V21Y$uuGx-8087FD^sT+ov)^kmi2i9)dn6K+eO&MZ*0Xzu=Ua^6d5-dp~+;xk{ zwz`9Kk?GSI>hFgroxKU99puHvI@b+RuP%hNQ%XwCow_i!dRJR9h61AH#mZA0RP991 zb%@<8yuw-Lp19(Q#X6IMuE}7VMue3@sSK1meMoFTZV8whFet8Fn^l^G4M&>6)SK35 zSCgx(?|U&dc;AK~tZR(y^ph`V*j}0ab{`0_#x!BcM4;W&>kbM4aislyJ28`$3GLcf zR?VnV4dBjIQ)_fRntAD%Eenx1g#KZgK1ch9Q8l6Ohr6BuKbU4@&`3|%%w?JeUGuE1 zA_VkB@cyX+gun^?kN%R`eUF9c5%1+jupYc#T++8vDN=kajw9VB0VF_gjL9tZv7ggNvo(PX!`EIXZ|PpDRG zE+KbH&p}SnUrvRR&t0^2(4o5q{oq0p^!R-bIvFFQ!M8&W3$wY6206CKTNwMj(>J&$ zi!*i4t}~1V1p8c6KPu>?epVx%&3Z?)^;2BiN1RRhHRt5qxv2}wgDr%lwhnNi5BAU$ zEVnTisQz_Ujb1^Am2dSWu^;R2v4t`|3(yat1);f~*F$3B%hxQZz!+Arxwokfk?qv{RqR;$GOE4v+Kvh1lyCrf644u8vq(RQHTd8(24G0Q=Tg((aH-WSrsm*{Y@> zQSitbPR znN}+yb9-oCEJ&OAgh2Zma4{4}2%f4Sm0l9^A5tWY@>J6N^K$BUjjO^;O@D<%ENc+e zFcDmx!)r6S6VJ(G5cpm!h{s?Bpk4bg^r1?0KRXknoTN5@sfQR)J@M89G>-p)dFT0D z^2+O8Cjr6C_punC;>kef-S{~PVlWgj89K{s|FZ?vcv>iuA*yJ0VkF zgF)c+WB=^Bq3A0u6KOvU`ouem$rtf71LuenLg)*SL~TN8K45JPM>zx|yBIcjoN3F! zXG%zSgPY%5DS}X%9tXyW`-Y*?N}dQIOQ!IEaQ)cyBbCm7O64!V3T0fe5f~jDtD5{P z_P#~+S0=^@hz&@F`oQl9Il(Hwx7;9LAtIw?huZ*UL&uY%wP;q#sNS*d}(n7$0&GGwyTp1bnLnWLX z1^x(PdKC-87|mI+*OrK+3=dr4Nt?C_0hFhv*uS&I-3d5u?%D%^}0VKP%rC8iv?uC^sTfS`CA;ah;8!nI|4 z03@}Q?iQu27B;vt^Nz1RJSC}U^u@)iBfPV`=sD7;#ul4CnpXXHzL?mATGdO2?*2sI z+F5wc{ZFd|p+$Ky!(5VxF+_okl23&7>B}G33&nIt=@dh})X z5Wr<5pj8GhXVM>RSd_8AZ*ba}11FSpruQE)yJ4F^BS7N7k^ zJ|7M2gjb)_0NvExVnuJOsCr0nK5jH)WfP42aT|`*qklhxC@B3CufLnVv-Ro(oRt@t zEP>on3%6HcJX!H1;ef(-8TU{4qX+z@SG+!MkHY}PM#jYOhCI8B9NU}dM_JIM&)8p{ zTcQFLs_Kc_bOe^{PT85pZ(&UG)j>M}lU2{7b!c%)c%E9#aQ+(<`FCogj+Mi*$EP#X z(l^oHrE%$!Q~SC6`a|GR@b@Bec&`;P>&^mKjNQ4js0xZ&Hr&m{n{1u(N_|{WpyW1N zjLUm!VCPvzn68!5bx0Af*GvqZWXfOr`QO141;W8}yDz7u1Ma*zST*~0fy%LynE*6w z$o&(Ky^$-hLV;xwYp>c~KHY7c7~`3u&93XK^JUx#pAWUsYLj;p!@;#134K?kj7H4O zPikeJpl6qjiqk$U{MQ@(vwklG6+7k$>$^GVD@uGhhAOTtE+CR0w|a@?=!#s8NhNc# zd@wSpxJEwdZ9}PXmFToNEXtfC=H1z(^?SxA{-v`5w*^Thx{Tw_z=H%hDN!6~Sc$$y zn*`?#5AD?uJ0YZ=PV#@=+mGo_>Y)OQ^ftC_9_ehouJvD3vJP@Qc_JP&6qBp(==L@c zMg>@WF*!j}Iq2sdGhJIymCdlYfgPdD+=f`rRoB(N8LF^Jjc%*nIKjJ=aC_H2oFHBwed$>j)2@tJJpHI!#W&yg=MUpcZ_*MK*hd`IuQ9+%&C} zTmNxl9L9gE(Lc3HNgQQBITi{&$N?DXt4wuT*dAGoj@iDdlT-I^G?m~xshV1V6VMQQ zzE3;DLwp+nBH?h*SF$L}MeUZ{pde_f6Z>Oz3ttd<3#cnZ_J1~0iFBx>%HrUKmwf%F z^E$`8D*U8f3ab5>hKme65C_zj3_Da1uHwN zaCEoryy&Kh+Mn3)fByTgeR@iGXabEA(THnZn7d>H?R6aI5@KkneHX>MX>b?IAPbq<#Bg=Hr(&b;G{BN(NL@SC61DOXB zn*N^VKYtN|`Vo3BuEfw=_AAxjtNlx#{un^kF(+)ZD5W=%Wjz1=-Jd@hfHCKABmUra; zGTHyzp#Qf)e@gH_1?<=O{}u@<;31l8_VNJlOI>N8TquN`8MVK80YX{~H2DJG`=3Yb z|D*lbA)OJAg3Cs;h5f&nR9oumw)$?~f?kUr6McTG2^6%QkoP)Y{r~8C>!`N7Zd({A zEzsh{-QC@#&_HpDyF-EE?#10(v_Mv(^vI2kyddRyhg+0sch)e_D3C;2|_N^NgWR>0-$NuEhsMXHu+ZQd`( z8!BFeqWUO3p8Am%bOws5t5aWbc@ut_*lLh=8OoWS#`?+yQ~11C${ zJzM1Jn!EGXyLiw(0}A|JWacA z_%vaRgxhL4^$MTavwfMfwAOa5joC^In?Y6m@jh0$;3nNpM6K1!ppXqN=`T=R0DYm9 zuRe~d68-Op|12np{m`j@dTnLe6~x=KKe0-*n|?p5Gd>*lo4m+zhv36Zqe;K3%aTp$ z)z|$r?!!XbOgvo3*ZmxV{Y3tfr+cid^L05pzaiM%f&j-IRMkXU=(pm_I-A1+YFV7^ z*O9JY%;5W?@C!gO-f~8lMe!@mZuoXTycR9@3Q2@t6-kk{D*Ie7@Llbu2To&aIHQep zOFswR0WLTq%QQ=isN??ymBP^7k8AU1e!aiAEU1?v^-=l8tRsRc1;~41ws3-OCrT#$ zZ9f)k_;pc;+}&6LEl<8q#Im$rzM84pfgS{ni>~REYF}Ag%E|p*G5C@IeS$2YwJ@!E zQj;QYk?8Z^nZZAR{Lh@h@N|U{+Tjpu^AX6U2csWLPC}Cr$rJ`w?x)?_6TGm1^Jp4& z6{_WjnV%A9E)Qq<)XNYZJD-o==#*vbSZ=lx>#1~6ITHZ|@z06;OE^q99h4pn%( zUQz`A2xPF%HalLt5_X#c-K&@BVp`5cU`N*aE!8oDhgn6Vf|fF~V@2pISYIGqs{#w6 z*bTY;3DV{|Tkzz*6br{|t6RQ1l3Fw@ay_q-vl@6l9mq}5et#lMJ31J@GSa=@t_MBH zH~4YbF(6*T3h73hx`Yzrb%1AWenemoH;?6Jw397nA*d6<<{dYT#RwF|ACa}o`8=Ez zS2<|ux%A=Vf*^uE5J@9;y|P}DnN$n?O&tk}y*0i0IvbrCQoo0iKL!_BezSwM+8{^PW8J@v-fi;GzghL*KGdI6iReadG7Z@?tgsJy!OU! ziqdVt$qIX9J2qO)*Q_o(-%zI0xQDNPZDfOdiW$_3lFmI_8P?14^i}ba*5F@vm*w4! ze!cpq_S{DE?+heW;xw0=&h9oWaNmwUoXO9M8CBy4#Tq3h<;k;-V3X{^M&-78)@q18 zUbg%HcP0tT=4m zsm-Ep@%0)M#tn0AOL8%`s{$EjRxucvr;Zj-FCIJGu~tU zVU~bPbsCRNOWP|$`|4bWt?KDS#kOpjZwxjFxbF8(+`ugZy^(v?W%6m?^G}+re=NVC z1?45pl`bi@LcqLMw|{)&p+0m^Y1SuGA>wXo;hBCasK_(jA;LMJD0kJ)tgw+Z?)NKB z<2#w4*+>@fFWYvXFMGqGlHjTf=M(Q(tGN>#8`Zk*#4`hron!D$@_jL}U|l%57Z!lJ zI!pmU$-n0BEL?Xl2%aL3@@CWHo?b75{X1IWW74X4GGG&i7 zcyy%c*JxRuYd_CtO~&SkzPgIG3^d;;O?+-JJ-6Men*jK%zkkTN2=m1B*)2KQE0f;e1+w{DzLMEoV49@ zw5s@=ZQc*amA$?EOoR7}Z3wyD1DNwEN~LBuKcn|3>Yc2Lj?)M4vYr1BkVp-*g)+To{C zyvneN7Jk;Sr2%d>{=V7wia4(K*>n3DaC08eC%r4DncU>n0+h3hU7zAV(1TuF(%-KH zb$O@q6_v83MDxqqnl#Gx@=V7?L7r>&60-Q@kU-mYqfp{pAQ`qXe9Q@Z8V?eu;G-4i zuw9M>2B&#&U_E2e-D8Np)S1xl!65_!IaHkXIz`b~^Mii#e7xdsTqIF7FL9VDA||n( zOqu$|qNkqWIK&U}+s*l4Qqx)Qe)i7DDl8S_Ef60~Y5d=v$%h=!n==4?gVgSZ-upjU znwt8eb^6qp^S-t~pJbIUzOovy7foQP2pRS)R9n=uprO@O%4jwlJ8Dd6hKHuJlw9E2 z7FV^`I4{g_-G8VKeNJ>r?ns9HUU=pT0)@Fkb}mc$zC3hm z_1w!KFM1a&VcN6m{FbQt{vdzLc@aEg-QNPQ!EqehXe5HveIkmSM=kSJwbctvZ7DgMbiXR&;OGcx|ap&XJv`W2IAU&W49No24p+vojy6(OTyTKIH1HP z7sWV)#WN0+Ep?BETP0$*W%`M#Ad+!Cc?qc@xat(Kl;$*Zb{f~{XEgMh-(T36O~63- zX8AU(6HBZ@f1?-Ipm@Qs4rA5_Sp^jB`Ef|MD6+>X*M{vq8K}|-Bw1fhWqjt93KvnJG+ByrsC>9g5)(F(1YeXpC_xzNwCBtGLPvkL*X3|lG`159)M2(=#}LLE5IaA0X#8V;-LHKxUB+mv>-e#;=a5_#mGGG%QVI;B7=iy8>rD zOyMxIC@0d!#-kLOyHkG zv-|}mqDr|!y)Yg?S{h#L>0?Zx=5_(Wqe)RzUQbT5zgM!Ot2EuX^!;I-CvzqQ2%}c; z=#QvJ(`uEoyw8(J=p&P{@Mbbj0#KR^OZpvEcY9@;>Qj&Z`dL1~e>CQjchwj#)!~(z zz33riESZAu9Q&b}Wzie){yfh0d&$almy^S1AxWc%kYoVp_qvTzC9W1fB6ct4q=18n z2?)8Z$W_7_(wP=l+I-S&vyhf6Hte>(acArf07g?iaiHA|HQ#8synSrud&^L7z?H9^ zSoi$Qlpd`cx0*F=!)c%T8xMlai~lDM2jR zz6~f9e2Csd`(?sRak5lEWxXT~p8Of|n{N6tegb*Q{8$no&mq&sVP;!aHG6K74;;zd zT`#@aCND10C&W^T`dp}fjCj@acI&e1!u7Tt`t)|tF0|VZn7#oWj}q7&*1X3I!L{PW z`gXOe<0UQm42W_Wvf0pA4K$%Y30A8Qo6>xOilU%#Y?h;$3`}k-C5ZWl3yK10t#4)1Ze(ho1bgAmw~kgr0n$wdp_X zqZkhcox#%&X*XaUtMz1+^z+=R7O4tXG)SNpVubgHD6;(up})30!wW)>%u%^Fv9#l` z9Qrog_^L|?I#@$Xd>-QPgv*?k-<(i@F@>k6JOChT%jmvWgo|O@}LjZEGlUu zu7BY&ng3TjdHHCgBXD^ALac~q_UZsqWRJ;LD&L37Vlte^wj1D(%u(Jw@XBlXR?W41 z@xZ)YrRHZGwOy)T7WO3&n-OPFyI>SMZ&1BJ={?S(s7fAeX?zAA?T~8tl`+?GfmC=B ztATiOKz;Oe%nv~~+vJrpi$gv+6^_H>eVDx$+3G6fp(NUZxG&grYW9;0)Ph?XY23JX z%JPe8&yGi@ee6gmGnh;snBu9aLZf>y+WZ7V+8pP=12#JF|zwEA2Mzq zJ|*5L!GDd*i0t8~zGGAHKuwF$cc^?i&%W)FS#S4G_p|T0@NxM@-R7^W$+D52=FB4S zUX(ASu&6AK{vDSur2egnBEIwqh-fAn@>kw+BhmY7?#wTu9xk*i$8h zJ$mwu#q@66+-~@K|8i#1%>KaWOEDB^Ih~GO$9KRBv;FxBR77Z!nw7@NhV*zd2=r=2 zQ3AK?^K(QfOw$oPUtU%F8kfkT1M8-=ET_l14E6@3M_#5VzK7z z`e(Ul>eD?A1I9erVX{Hn%!i{|QH(gG<2e+?PAR-S=z3uBK~kzE2_v5(^DQ(G<Hk#r+fK@28K%P3``($Yj^@5A;yExr zH_=ex;!ta@qCpXR9adLN50=iI5RDR{(gyh+Sw++}j{EtZSVchCzWoNuO!(-k2uU!u67MZJJrbk!OFbpS+P@5b)H~{S3R~w#hS*W3}cZyNf@V zf^>XysDZaJnCVvyPlSB6<|oRmk?4A|{PU#wR5adNR3pdf%V?H%O@kz9>&>eD9Vy!! zqZTM+G=-gg($)*-6IC1#l>zEO433^VEu^w|uQ2~4!}WCce$l$x>~crQWk$`dG5a0A z$e#|Gwq}Yn-tKEp`Jbv_WkkH&LXY|L0rhq9yUJpE0ZUgi=0B)jTpf?DYYN7$bwJ4Y zQ6-ff?xvw)>X_xn2@xF8ldr5U)LCgx1Ec;4{Ff;@4~Ov$-5txIe}V4kD`8Y|2jFS7 z{uo-zzy(+YqLL;zzvXIfLf$fsI@=Y99ThQhitG0!yoE{=wANWs4nzFX`ZW@YtDWus zUDmC>)`FA9z1Mmqvk=j}@2>GxE14#-!tVnj_Sb=!anNEtyIrCfQP}2QiA-6MRb%Cn z8`Gu#!rZykK>Kax=le6#AL^yqE)FS3=ExR!Mq$xDC++QlOdpgMRdM%emY`bIdHc$_ zd6-8=e{Q#Ui!}^<-XR!yuyJzC=X}V&PUqRlPd~bA|I;<*8UDKF&L2TwAQR)l3(I4$@8rjI-f z>4UU9akl5g_K1$IYCzL3d@&MQWv(J~yhPvG4dk`TGH(+)-Plt}+x``#(40l@el(L^7#%SN^3@%u_u_T1^4i}eFuPi5TS zoEBNA{aooo=Q8X-0AEhAvS~RsBK99~;iM z>+G_RXu{L)FA3BNKXUsBZ!PvW_AC}O+>C@-sjRPe-V`clXL5u#>f+h_1pPTTJ>Ont zC83cgDAo6#@Z;htv(W)XL)2$VmpV{~w)zRm@eE-sXOA|8&X9pUM&>U^tz-yN@k05* z=5{qQ{Wd=Oc(*LC)8bom+U}0Ysbt!IS<9|XgP!_o+Ji)ukA?~~lQZP+J|Rm)JgQbi zT3NPPsw)7p%t5?09;^qzCl*_mw~Fpg#DrhoZ6}T=QN!#;Ozh!sJMEg<2nJxRvp_doO9rt-Bx*m3zvEfoAqaTf;&sX-Zipum>43Ap+yx; zP^CVZ6mmo%hisq(aBs%~^|QHO7gGMcdIrn!jg4UhB~W{4=(*&#wTGa1X_jhL>t;a# zZWU~KGiR`-@!l)s{mo*{0R`p9^aE=|S@}|fc5T)h(zGxn zNkB=q#%~t;&w~Fbv7TZ=05H+Fs)ROd)VS7aea1PybB)UU~rCN zfy>tY$wu_^!5yAZz^H~EtB>ZwTea@<-2g~Ye8*^dZF|s;p(V<9GyKGAAdX&q5I7X1 zuHEHO0UYo~phy(@AVJ8?-h^vLkSZd4<81x*#5M5%s-oD+2%6mLU9SWACkq>P>c!sk z5ig$Fs??b1Ug*!UfJ7v07A&%Sgs0NDfF&FS6U?G>-MsA9OPLOz{V*mGo+HgeZrmNTD1z5qn^BrI2Mo2lKj%NN)dk-qoGw87Ez zMe4$GdhQ!D=VJOyW0)!5|5?YxNDa~4FK0k4mq9d))UqeE*@xg>HDu)A`sMSW%uOM$ zYg{XUlQ*?;Zf+Qou6g7iXRb9hwH$j@>7WLV=Zk+bX=<3`Kql$kLy`%--m_GNg{Sg- znwYK_^J#QM7~+EVz5IQ@U9{ZlRquIJY3nIJ1CSHNjAi?oy8a{bf8`U<4>6n}xvR3w zgRTa-)W@A@yRr1T)3$qqCN9P@*OchIhGI+O<|ardus3~8zm`-dEtj-789(y|!P1L*8*CTB~*!b`R87Cq#o_%N+R5vOP zNWPtI>2ww#yLp+v30L+RshGN6jVw*iOC+o)vt6hC$}cf!4_CMZV*MaiUi$RW0yObT z-}Qh^F86E^gWa=mLS??0ri*mEtibhP@B}>g3_=TignKvl%JO7_0YKVjF#>5^k0sWg zD<5AdAxpaG{Evqv?gXCA_w1cC0Gbf~OR(Rzy+>PHVhbQy48ZMiU$RHB%u{29tZBM&zXu&zHQ8^VHAQ_Q zHP;wUVumW>)QO6n=A>0K7Ed-;xgJdF04SVm%_V50><3V@&Z+%uou`7`+4P<+_bY1S z$*z5IHh3v&;WH!b8Th5vS_7;nuLzKL1Ok_D9?4?ggIk|-+DzR8qU`kp085YbBS#0w z_@MLOH^IeTg$ShH+BkB zqRtw0>eV@#9DfOH1aV?6&T#fwaj-0qz&4wYzHaR{EBt77iJG2zS@B{$92i8~6)TrP zYL_c4qY_^1!?L_Q^ z4|2)-cT$9{uH5oD$7eu*lD;b(o2uIBJYT8Sc6DAckWa1BH)5a+hJXDAcHzF{4v7{k z^`jWf1@|&COhK)`}o1WS5zo_VWy(A(D{Vq_BM^F(ECUu@P(-?Q=Q(^l^ zRV`>Da7iAoC;Ybxl)qk--)*BmYEBDy1OzL9lz*Bn6`V4BjgIwiarm76|MNg3d)`E_ zR~$ug7=Vltd=ymO{89miuwyh!RI2rIlCnsr-pSdkLL5|Y8;j+>G!OIN@BOcTBJBo8 zPimQV0yE$CKyK*D-MjfPAM~m@Ery6+MXgAc*^i(@`xjA8S+-JUaWeFT^ZqXKp zBy=&70dBnsk&>|iYLceIvONx~$7<=VgZ~x*|MNx3{Jm)a&xgnh){z+pjj2nKdms$9 zjw`m53f`_;6=_xTcwV%IprjW*6V^KLQ_m*^{jZ1EPilbgp?AAts+m)nj!D;(hu*Jw zsnwlYy~azenMejWYw(B54*>Jx(}AV*=W6bT{zsY?^Z)1n0EDB6zt9q`-~Dl>L)C`+ z(#R=+*Z*ZvmceGsOUtr<7f)|04);>t5bziIxSgus;5rLx{-^oi|t&ke}J41|Vr_jjDy} zc0fEwRmU~(E^Pu*bdhvCMdPN&|FyFI{W##6I3eH@6DtQ?Q=sZg=q^f5sH~1S2D83E)G+g9!ucMZ)AnX)P(|S}zIJ1*XT3lfZ{vm!- zB+J^W%u}_g;bGLOK5zHDNTq}R%ZOrL3UK2fXxYg3yKwMSe&Pd2LDjucn%w_^rUOOl zBAxGW*c9$?u3XJ}wnRya$U!dC_Gr0LC226d98h@khQ87#(`LekzlFH~gvvuWRK6d- zd?y-gmej9q*7&QbBe5CFP(>Pw>|Qczuqe;z&B?8}Z*>=`6<@agcX7`f%nuzlzIpWq&4x;5X8*v@3b? zgwt+oJ_kAc{8j09UBRMs-mrLDmBy9V#u1mv2ELyHoEiZOM1sJdj@Cns#j7k|$V-9a zu?#`1S=`5sP)n=1GClC^mZFNwA&*iopu?--pdLNEoOlvKiJKPE$R-#>Js+O# zCv7hR@P6?a-&U8jU%p}%9aG+LqmVjqe?4^@HlH<6y`3TG783PGm(s5zCSEzKzqm*Z?P4_Xvt+GRXz|VJy?nV5n5Edbm!ZgJ z>2M|t<8fxT6beQ_dZKq$+5G1eRxxxyEkD%&;JU#$6=2y(Tss!XbT}XhyJ|mexiGFZ z8>UH(XIFg~&+$rJXk(%Rrmp%=PL=FW%ns;Z{vjHlQ;AUSY|VYX;yZM=;L~c$DWGh- zCPNJjiZcZ3i<9-!@?r!}zA7y#&A(~hisA8GL$-H6X*>f2JsqUeR50W)-ID7zOT_aM zZSnMq-Sbkc*87eWLT$ECh44(JUdbe38v#DVUdxd&g$~WdVvYNO(a$$kx{ZOVu_S@h z{Cm1a2-Z-*nKErtUpxEsY*vaU;2uvjA4~|2=L&o!|B*J!J4u1le1z=kq*>x3Cp3Qa z>p=_=+xiQ$sO+%n|B~j9l6+fv8WubVc|~M>kyn*-Wxmw~I_UA) zbsqeCLsFOmvv(TM=Y5IVmAgZ4R00v!aOv*=fnEiu_wo37G9|~`2hk+10J=Tg;Y|6i za>XcM%h6&q?H521IVw^fzQ=V@u3r=Bm~$u9cux{ud61Z8$Vo@3N&5|?A`$pcJBk<+ z{GZK*)e8Ij>ryZv@P<=c7g%;JS76G{JD>Vy@}%%sO}|_5Jbe4JjvG5Tcmb+f!>XN{ zy886#QFiXOf`^FvvD+{KpbREM7=nbKZZ&I9+oi^jFRzfMNZtdu&O#t|gN)jvn@`(Rg`p3$TGsP<6Voo|9w{X7fH+Ua zyJiM?R-t`VGX!W07HeatvK7u=!3)OUfM*n9#g12nEsp!ywDn81nO#QMX{90{0B=>> zSpvM{YT&nY&`xJMwkSTYZ3O|_I%@bTWpgHJDK;ezdm8s1voqmcK`<4PZ?-*Am)qvl#|?2LUrhyqCj9S12&UBw;Fxsi{LTDhmE@I7Jz78 z4-NqND~h*N?}7BMWFH+(STA>2SLNV^WSFgkC&zw|_9E!0lPu-xZiLef42>fexK!d@ zd-zmlHguhou(jnDmo9zOZ#G0_!LDW?CNzLz$X2A5e|P8hPEes1h~n$;->Mp%-8 zyAy}utXTDNOUaB6c0r-1iZg)6VNNR-d9cqZJ88LiO578Dh-AH<;Dl#~yJ|WlLF7u= z)kv?dCE;wsdgN?R?R%d>9Bab5;0Jv%Z4w*(&4({0+3YR}+^GyOf?qWzqfi;sCQ2=~ zsuMa2loi40s2N_nsc08?!EAD*Q=mU)5xhSELmi_He@8(u_q(W0EEHz#nu2HSu(?}e z5wY^STHcVYSM?2^9Qn?$#xx7)%d+QTV%2ZwLTNmT3c{9nz-%(x)=49S9`)-?n{%d> zDxh$w0S_gzFyb5IT%6gf%waRO_XJF?D^)wW0{nSUljE5O%oeU&z*gt%vX|kewk^fd z4z>{f4|Y(1ukeLD`}JsyswK-uB_|78Kfgi3qJl+U{RFu6JR0q@^<^CaxnRgqTlmuV z&C3OrSv@{P!`+{NTIU=Y-rs?J^j`b}F)Zr~%Y(^#+2Hrix!2OuKh)QGZzg5!et%eX z-_lA&5~8K^0QEWKSb{R^j%+UhQ`Z!+&_Pa;r*@5ooe`hS#3m-U$2nf`!Q=r${n}H_ zvmGdN*CM^M(b3Y z=L(_8C)3A@?xVa>oCW@EoKI!3Hv(@6paVFhVJMpWr!f7HgZ;k~##%5juawCL4mDdFxakNE83Gt!6uU zgFaxzqG|5dNQCD1A68XragH6AXk{I)yX)_E@V9HfW$91}oMT6@NbG)9b>&WQF)El` zJHv^Z&(FX5*zTLy5n~jsf+r8)u)_A70G=@E|`wJ>24)r^DOa$vKLuSy}5Z6 zawKco-N(kWT+>`>IVC!DNj!XDSY%j|Abqb~*ScXL{(0VV?(ejJDw%7}D0ABL>d|zG zJYB~Uv=O+g3@KdeYXMwijQ3PR>G_TGVXo5!S^`9`egX*|Cwtyv5gl^YKkRM5(g`zL zy0mcq&b-8&m>BBSTKRVFrsLDppVHtp9GW;I{7pUKf2g_k?iOR z&o+}-#dX9#?Si0m_E~K8$4*uwkow+oYjLb&w{j6iZF}X0J&dj9TCg;sB&+26GBx+P zjZ{xm4@N4dHH-_m9vKA)={Zl8VlxS$gTG-J*4kaoOfsm=xvXh?l7Wis9I z0#Doh8a})FxMuRb{gRAyytGF1B1k5c>{uUgCqj1C!)faVut}W1xFjvrwdw=B6(yv% zZ?V=d9JcxY-EbT)8uQYlK@vm)kL_onF zho@4e%wuo*6=l>-GWpss&Rw1rNru#pC~6Q**f~1tvyX}e;LarJ z4K8~-F}8X!gT8)u-%RxkdTm~57@kTZ%reou;5q94p>8f5*v8i(-vDd7f2NQB(|4Cy zXzy~V-xrgi-ijsdRYAMt1YfPJ6ESa%?(oFMT+M{TFFeyy|ov-n-dIO5&NLt-t)iY~&Yee91AZOe}y^jTJF~JYn7>kH=MQKI(Wi z#298&E&PwX3b_kodw+iw9$fI*e(0w)%6l< zR7J-XaN=o-Jfg^a`_41|%&8Z?JzXeW;`dZJ^0}t}3B@Jxy=C{DIx18Voy`8!>|#wF z_*p;Uus$(^doy%mI;txMK4Ndt`m`R+J29BU8*hwOb0BhI6D08B*pthiyKDIH@ytiUURhyOuXPf-8NMvZfe2nTP znB2{&B>3D$a7c!1$7|46SDgr~hs%0Rm+Eu6Q-b~pDgVmh>4=6Z5>axUTpgh#1lag9 z{$`QSHbe{g76>>oa~Phwn>-bTuzR&)lIcLD)$@1&Yxt9oY~JIV$e(2{{qv-}E(Z%zMu-Qb9QF+hUvf3#0W=i2gzlm(( zbUk3%9tWBQly7edKdAa;gGPJ#@PuXrt;iMNzH%D(Ah$x{lZu^Kb3-ot{aI74eXkdi zfpbD6%YJn~p=R(qRY8ub9bTu4m`G$pt`ujkI_Vb5^n6-RSV6f>(RrF5*AMd7G@=Q! zzFE-Mn+^LWQ%@^=^H=seh>mY5lVuwI)A541)&a}whoPsSdCWYeHv4OALV7PGx8mK=rM_C#FB(3TLHs8yTz42QmakoLn3)umHBCY5-G~i8V#(K_#YK_*F#V7~ zjl>`xPxXr;K~+pVKyr|R9|4vciIxc8k0T-ed*ByStnUu*pZ;?efD&wc4!pRMa=^gj zqt&sItDSlG(UDC<(P{J2Lgq;(c&_@bzQ(6Xl^RPX4f|dMFKIKH{>AQJh`!1p(o`2rJ!h+LmaGzH43A>fq2uiSUjTkUvu1|yqWp2oq`t-UcR$FP)sfd`x zw$9w1wiq||XG@3aTtzTr0_6Oy`6Ho-9opDbvp^0Fo63(J)Qt1Wy#jM85O>#VVnh!7 z$)p;5kxh~x;W+U+sHl9-&<=C#mDQ7YfeTo*+XIiJ4d`n5xIJ(d6so;?+IO#ivD+$7 zNVZTW(_pPVm`7Z(J(9v+oFekb_!Es7=3TvZEyVa_nLXrAD%1O1O9jOcLiv73IjuE?KA?f zF+3hD=nDoSGklflAoW)2D6jX=a;2X%^n`?71~lIMw#A!6O;hPX8GQCXFkLD5DKddG zqeZ}vn;r%GiV^pB<0@Us&f`c@!N9GcShtOt?Psa>Sp6DEQkA2|XmFaUAK|Q3H2kgE zb)(45%kJhab_k;OyzJhNZQ{9(i%OgHcwFtEovg((*h8V0Q)&u9g#^XtuY%cef%9}J z_?>p=iWntWOT}zU^o1kaD9jHC?OMv$b{xF6`dGKQk4evqN|d+sx*-Ff%K5&L`UzQS z(xL}j`Qw;!F_}@mA4!s@F!L6QVUvWR*}iq41fhf}_lP)P_oVWBoFiiMex4t+tQVQ3 zhZKCwEt~Rf&>{O$&--h+?5taA8RMJYczWFl7?83M@!gJSdMzDlO_32ESE5JPe4fUh zRbjF^I|Fin1*JVms*rW6{-WQ|d3k&v<9dR35V5TYln1#8X*7bPZ527?Q#rb?0K zDcl~yzQof89#@)Nlan$S(!*W7vfu+ z*{jB?=h$sMqO$A_B%s1o%ORv=*d4RS@btJL-?`j-TX7}t6L0wv!N<}@I z4zIq#yjf4OyY_`b4o4;L&dbpm$_5j#RM|lMeot793zUtV37p~zna-N=MV=5vrp@pC zThh8wrm}o!-WW4DIL&Lm%}X_wRi-iS72r%8?TJ3(-ye`RtMU|Vp6->ex;7F&OxdU8 z!sk&|jdVs4btZe=*wUFeMq%40hRo28ECJ>eI+42`3{AYw4g=3Z|F`c7l4%t&92+%j zK2bEvlZhZPA#WrE;*k}+n4C<+V%Any_ZO5ana=g+aypg7#{y_8IUKKwRiKZhzlP7a z?2(*t^Q|IQqWL*mf(Uuz@2=npRqQjjX@)o}LKz5S#^%M;+FiL;bzur$C0eli{#tt_ zGEJMzdM8d9vc1b%yjvSRWUepyB$q^V`}dFo*((eV)r_E4kx#)1+dxV~n=CLt-l7<}kR zf)0Kvo1UaNeJ{_g_?F_bb_>vboQr0O1wH13)sn<}RvZ5oF#G=LV!SPKt z{E1x~qO=?pdtigkEytlshD1Z=hVV>c!yB!=!qW2ltHJrB>**^sT){(5ODozmpb-G^ z3l|Za;VX>1cBrp>Q)cZiNj}X+dretB^~g8a49ulxyLf%KmJ3zw`x9I89~AVG*;R&A z;Y6InRGR2GvUi^z+qnsJ7e#~IAMQC zs(=f+FuIW%8hc)uUi0M|c{bgu=2jmmK@mCwgpdJu{hi+)DZ7bwJqeW_vk29%8vWJ) zE+}6z8QFrM8wvA^ehYSDB;$$8eRkzOIxv|@wthSkPnycLr)JgqZg^wAE`_>ItEbIo zTpl~S&jF2<^A~2FB*X`L*n>8sZnLXzB1ZJZ+v#?$NDqx~A5cf6Ra$CrSoJdl{Ik&- ztn?e~BR;6OeK{8aLaHHr7ZI$F`>&Byr+FR%pM+jR+M}N8ZwUzF8bv*~oL?__XMeYS ztThCOFV~%h{ai~Vz96+kl9KS7h08YD8FHb!ro=H=|5YxnVrKSwxC=a!f#T#n(Q`yV z1W1rED1InQAe$sJ8{3Gp}{ruicXJX%(BD;YoXU?7i<&2%zne$&yNnuT0o$&zM?snTmu!97 zwP~`E(-F?7FU_X$~cu`gfl`oS74Sk8qBxcXz?>Ub~JVdv!(b`MW)48ET8HF)_` z1C?YMWb|an_g3atJ1*s=cn)G>UOY!41|Moe2O4SH7tr-#c{}2R|LwVJ?weE$3?h79 z@wg?m8|K^J!HJ zS~yenpxZBZKD&OK3V(CzrU`er)ZvKd=h@jEp{%FYzjhb;lJ1E`mrtITrJXmVaA z(K+~%zL>&}TC7-tmnG_#5MV3%VDI^QFiyxrMs4Qctrq&3h1zVn2F{?mm%VN1ygb>F z_(Qa?+icRD)mmMC(yjsoj>mSLLz7xepMV)-I)f> z`H=XbJL9zo$|zH2zG4P^L6ztYdNBhr|M|CBtUkU5+&hZF#C-HWF#cY`V{d!-u5!`b zCEuY-T!Cj~6&^xZJ>h?`^uhaMm(+ikC%dd@9+z}=8+c@g^B03*><%M5IE)=Xv;Qgu zB&oinH?O5RO6Ir5-x?L!e`)5EW;8%lnc=~@)M$dgm;ygYSS)K^1{KaYB8z4^QQT(!Sol5v%$rOchp)Zb2H*mRaM;k1_wo>f2){6 zeD8)Ncy*=s))su@QC(^rak@|~?uo{4+~Z-jSN;UiEbZt}6Km6y=L0V$&z!bV5xEG1 z==M*G5;N8wGyJHCf(rm}z9JUnk#3K5jNbk7F>9Wv6=21^n&cY4yEnp<-rz5sEl8|UUxB;6jyw9OX(ax9Y>9dPV&CpfpHVB>03z857)_K2XDZ*1>a8gM=8Ea~ zQ_%7g-9Xye2L3awHH1GBgCXJ5#OjWG4Ic#bou&6QpNdEfvS4sFON7RvpapvcJ)K@3 zx7T2TeVMq`at(bX(jLoS`L(_tBWHlQGaIVd;kOHUcFb z%`q|b!{g~fJ2Pa5y@oX?cWx|rW-U)K$oOND9^?_U^aMs5rUAy4Z1*vqviTDsPKfl; zRJno4YYfR?lBYXC-v;dNB2}aW5uXPKFLvu$TGefq4LSyR9lu;RZ(oQYRGP1d=`_w> z)vOBCHF14xkk@b}%?DP9v&X7LJ4=60flGCus#UKhc=Y0@=fkb9T8_`A?i3VM87=e% z{hPh&u?D+lZJ*=T(ZXA}lWvIUa@T!PBppv9BZLDY%S7*ZnQb{$MKmNi4=6Z2v$66NDim z^ha--{PMLjJ(myYWm6Gi>tq{n`s->S{U9m=zCTW9(<%-tZ1H&2?_OTGA8GePZOCwh zx>ojI!c!xvU~dr{!K`C%y2seZHAv6%f1Zaszx`eBvH}W_k8_jxjoadkg#pYkO;}#C z{*WAgMX`pRG}1dY(G!u!89$()q1z}Bc%_RmiWvivDyOYCP@B7&nmaqp;v@!+K;}(U z?wYV&>3j2yOVV%I5L`ajJ!iv&$z}O5B+X{UQzDX|-%w2DY$uu1Y%{Zdhkv~S2i`-o z?lVWxAw=lp^dS%rcH$@eMY->rG9;BgmdHocCmlAVYoRFU~<;%cA0@yy= zwW_INN}%ER#@WG9aa1sCR($PYU^y$9ZuGi|alhO7y!g-=V7E}ELSa3tTO{*c(XYI7 z|9941Y_<{{>z^xv*A0+s8!8Eok)(_7Iy7*C$8Q4~PN&NaUo0nn4d!dDhlrG6_Nc`gg6Eco?U+oCWCyjYT>|eTrnmLN zC41W+T~kjY3)+1j&F-T9q*xJDC~p5s74;)xX0TblEuM=Eg;g2IjJZ!{A?6;ahe5t3 zO7Sv+$MZ**G+d}EG(O*kP2#p*Zb+~Bgm2U+%v*yCyKD+jYVT*+?1kT{OZ^wT5Qjlx zCI{O66V7JG<8}y`3^Tn>)MWSyjaz@ltZdqKfm%hLsFvlDm>Q|%jeZhP8X)6JGcKJ_ z7_=EfPEhI}XGB#oT^+lWyAQ53_fH?3x|A)#0)cAUL3MA0pYB(-S44T6w!G;SKrMcF zUDQitVUy8L-bZ^bJ(4LL;i_8rah%zx$nJQYn=b)#49t$yR|ljtM#yFxr%~6dvq4(o zp6g@Aoq&a`ON4W*Q(euHu6Gd>@}n`zMbmFKBUxjHZ%+M<`(wUwpaEpI3?vhj(j`%0 zHRBmF%4s@4wPf?{r~5Cz-PpSq6@Z!OGp?|UT`9FnybyQW|);G;TKn~pv3epIIbi<*M?p6^=>5%S5QVEfi z?(PO9rBgzX?yhg)eZKqL8sqgJhJzEEz1LoI&R^{k1I)aqbi!g}-)*A<(1mf&E1uL| zv>XkhYJ7Uf|7V*<2!zx9h=QTBJ0*>@UCAZM85M%d150c*?4>6xd$x2)is2K_bn5|| zvdGFULE{-g)q`$LurNg9lN>xcpxG$U8m9yV8BXM7KH{eP)Avo;jLZy0kI1?Xd^3b2 zN#B?_-f}$5^v~4jUYBcEVY)%BR~XNa0^PPT@^*xK!#v-TM_?G&+@Y~!P*(Xur-Md9 z|M08RZNE{zsw~~{xa_Z~lRs_Ky=uI>RupZDGo14Ml!;~VFt&zMo}-^TF3?hs4GE{` zNW3Y9N-vw(NLH!$<;GNMrNhn6zAv2c8)p(A;xy}J7C%kdhdCGYgy%ZFwIERvIWoo} zLby00Ok@(m(r|4zKeuVVXl|uM{|5r_2Y5q8=&iMsE*JNbMxXS!zF?x_jeh%sQ1r_{ zB)KrQA1Y7c0Y*(omE}5&$Es(!s^&&(+>m}-Z92S3Fil&KX+@d=@mf1Zd3Z5&!KtQ< z?{2uGxK6r#q|&0JW4ImvP8r4;RQa<$qMWk%eHqyVE zl)ryKtszv;qx#z}Hy4j0Lz@{UGma(j*=EOg_5&myXx_xn{M1=)`=R=PUMf3)=fDDi&)tYBv(BjaZt z=VBv;+O8X{p0W;`#~;tHk4hNJ6dRd3P`M<$K{wfsQym~91w=Dk3SRNHW2DY5MJfU| zbBz}bQ!QR$0&n7YT@Qnkc=l8uY{7G*U?f?58n+d|3d8DwAR)v1$Ne5&!haAH!)9nl zyy6&QxP<1*31P$P&-$7uV#obohXheTiY6!OdoGefGq^SO)lYJ7;e(j!G*TY^W3q@s z7*>yeW`DVwFmA6~q=cAET7NrelNV^&&HF{{cIVJy=;c^B-d~eg6y21bMRXKC`jBJ% z%(#&C@5OPC617Xhama&-hR#sU{*fM3UMh{-k~LYe?__6DNlXpXyR zpxj{#E|UpM9x2Ql%tSga^z2Oew0?QqBFSoSI6}aJW(16 z=P^ONY(n@9No0M+U`#-3n_n5?-Xh@*jis6Z$-kZ~nuk8O*>cjD%M!}iJS75D|N6T> zA7=CqjPkD^`>61b6mCSLW;y=>PX9qn{$OVJcg)}a6X~RG&9dlJV8jXreIWRtR#qhI z#oO89;@baSKHvpD>|)-yq5&gjW%*$38et*S*ev2qmKbIXQh0I|h#0x>|7yn2a=tip z?O+A+NP5JIMS|#Pbzl?dJ{&Dz4WEH3%|FSs$I&ugDg8WAmJcHd7 za6n?%J|!&+R6^%J9bZ`O&UMgu-7Z(>gN(eEkV%7Y=2Ne#-kfwvQw}`6{|>)aVAAfq zyFCl3b==DNJyTVE4$x8>i&3+TOe)L!h4c6A_SZRpDkCLvVKOw%xz^I!E_Y%B4wc64 zbQ_JsiO5ZtG&t2o2xs%_+-!fstrQ5l6f6o2l0Ad zCe=Fa#8w6>JOL0fqy4Ij*jWBaTJMM4QtfxFHSj*+#81PFI|zbV_1}bgUOfvD@?1!m z^bVj=p^1EBir6@Z{BM`#9~X%_3hqM=wXkHN5j*zzau}M)#%LeSyS0=ywT{EG4t=k0 zh*q3FiBbYEmU$9;qUK>T+RwMLe^^rx08IzS7%>U7nP?QUDI*wW*W}2cpYn7 zg^)nW#UCk}b%7;RD{{0L5$-v?Y zoQo?Jb3-yQHAU8o_c~vS20;&|G~$8Zmx16F5BLdq243E*r}K0!b?|@v^hNSpzx*%a zWBt+7fln=}jWQmLI+fuG1J}SN)tN1~UU^B#mX^H(eCM#yRk*dH}>+p*ZwuxX9>2^k4=k+zYKf+40NpA`9?fP_8#kdqBv_ z^0e0wTA(?EQ)iElhyP@3th>Sej4!0i`gx+HWc(9Hg(xHQTYY}O`&L&D3$QAtUTRz) z^f*4(Ob)hnD0+2zv(V%|cKUlnaJf68!0ZC_aA+|sy4Dqs;8~;#2S9p>dZFHP@f2Y% zSsYg$Nq}M@x_$Rwa9#WU9E^QVgdZMbQSsb>iJCx3=;EzdP(aVnf8RMG60~63kWjL5 z!Fp(BvA2NPRc7kGf3>IMhW?=umW!t`_IpRZBJ7|au0tQVpG27HQ=(ouB3b7m1*#RfSLZ~z|3Qy zx_x3|a`{HE<;=ivr2d3zr(vd)?=D|3c=M|&YFP5qAoRDo*tO;$Qze1A4=A<=%ws=u z$5uP)YqNF-6ed(JE#L8tIhIz062+k<5yj+ft2?fErc^0SF~m1_BOZ%rE7Kq)WMl%bO+9qB>8HYm8A)u3YNlrZ1oWk@Da zn_ADTedu~|_48Y8>iNf$_$TTlD%IO^CReBUCO4PJ5=l=_U%s|`{y%oUr6{--tDrkF zsW}LU!=n#;Xa)1PSo{TZ0 z0|kPYEO?6~nsapUtF`a*`BPg;(44+ldP2`jd2A5G^sEYT16j=FTLQgAvrj9NT3+0w zXMMrzn%mvTU)Ge}NbT<1Q|hDf*Z44}bR757!p8{Db*lwNM;*c{8*kJH^%Z@T(ORzU zrRw(yKeO&6-5spBLybQIGGnA>uacJYwf4Uc~zCTxInlecIL(yu=?&@^WWUh9XtJ-0M0zbH_5626lR&vz9L07oB6JO(lvI+me1QyFu!;~e_J#+dGByH+vJ-zgHmG)loW z4y_+A;K2jJ%FLIxchcMAA`p|KhZNUoL|l1wK)z=V<6OtNFF7V%{V?=N@tlbvpW%Qj zyycBtCQEjzZq9#c?f>4rMOJCRjXrVMJ&;qqSaV+|51%K^4tpmrIiN|eX<@EG`pv+O| zeM=#i#k?GO`GiVjvLsz;z=#|}&X^YD@DSj4ywHfbmO8m+6lZ|QAC``cvM8+V9Y{aBeTRhOB#ti}_DXH4~D8S>7-|lvCheqwLG0vOUu@D>~qa zki)h)9Zc1&#O>S#Av>5%;4N%A&wCa8V@q{I30P?BoyZ?IM8w;9c;SMw6tla;#7Mzv zvg}KB`#bZ(tm*ApSzSvlq*a!zlkt6p{KxLDAi2Nm{6DLHFdb-%BC@8#y~}>1704&s z*OIFwFh8%lk(olm;ZC{V=CgKfTJ+H#S0UyXkRN%ieLu^}RL4<`DgJG?Io5W(ZZRw) zfDeH`D14l57Y=l}!6TXcVUl5wLL*39q7Q84!kia(2!cD_rK8**Ox6aammbg5H_89G zz+-$t8+4EHb3W{3RQJu|*+p87!I2cn-&@+hZv|sMru8n_5{2pW&2Ze=CIx!kj;7LkelUT~5F$rMKbs_ceuSK|m?c-x#Qg5jgLUFG!<8 z89F1`%ejCT|E2F)+yz0xgNK4(xooPl-(iG9wSokNIMkqe5rDpDz-nH3F60v@o%60( zMxKGPL%Hd#9qaY!ta-9S|J>!#q8DYSvLpHH>bG@OG@V>U>RW<4?hICVta=}1et-}2 zkJZ60`Uct^xg`ReT!ak1J4nsM*B=G3n!T(VB|>{YlG@8SYvU4Fm_h+xuN=E#OD68I z@jc7JgLUt3Wq49iS-DA>LhW_onfF z)d6tVFpzT_nk|>xyHf@)NZ~vPNunMuTPau$qI}qgR9w{w1U&=!+E0RN<}FKpalQos zrJlqkUTI6sB7`(^q;&450zN`&zPrL>>%zL#{v3Cp78AByAL6o%X2XtxK_t^JBu|5S zQh^USyTePKqN1N)Wb+|pF$S|iGoQ%S86*RaH55dKDXVk@rXDx=|Ezf6MH)Ln_jC^Nq#Oe1l5e5(~s#$0#oX$Dfa?pKs-x-gMLvGhJ4U4dQB6nyd zp<=D6bM+^QAfnSZylq2EgxBe9iJoZsa&bt})$R3Bpv%=xF6fURe%l1{jt~G;p+Q#{ zWJ+hZ9Oo)trJlx9GIKGw_<=jC9qOa@byQ}ntcPm=7_C|M3vn0i!7L{G27;cx)IsRt zcpgK`9~1w!FLp?7IXlGSljGK!>aqm0|LG=v<~S+Gm(+xP?0{NI>b^xVcO-gU&>8C~ z3taEh>Ud6K`UjB+H5OTxZJc2V5ixn!JO(O_KM;kR5VL-Ld8Nx`r|O5#do`S1qMACn zBDGs3q_SF8Qj+xc^|vAyi(V}`Eb>=;ME+|Z1>NI3oCPElfTS`;gqO(%iJ;39XcytNxO_AslIN7WWBKsD*WG;`V8RKun+% z$wI91I_%?I&Q2uZJqd7gJ;cM#Riex^dp;z1_5}M?DSL{bdsY&c1>8x1x`)z>N(NRk zDrb{yR!j*$l+;diwnkR{cTdr4H*b%B*wt@*>f)Je@hYI33+*Ev@XJz?VXd?^<|40m zJrc5;6BdJcDV-ykw6q)xh7z zhs0+2RW(=La07Q22f`oo%*w6kdN6fX0*TE~D0OgaYU=8+Rw?X}g4=7y@MqR@wfWdN zQ&FcoGnkWuu*_#p3C2s}*g3zQ?`~OQD{rM%hYaTmI*=Ng3}7ehAgE1p<=Xn>#O2eU z!Bbfp?u`4IQMr5NV`^0=d6wUZkS0! zK5+rFFGc1DKnh=xe1!A#HS=6gEnaRqyOf<=#bdC?nY$Y)sX<5)jLC6%_IvJv8#bmF zGsFx6qIXS7Aec6e5H&%jBSHrK!ei3iOI_l343%w1JAs&qscPV_h6_lwe{#H!JTwb5 z420}bD&6p}zcl*JaR;3#J2)tny%}fLW|{cU`Hev}e`xrbrJt*jCgo%&U{!pVP}!C-J+7e=7MM8!xF^=H%| z7rxAkZ?vt-@E6c5zGlYyS?fw|c^*yu4ufSvr+S-Bb9f8f&}O%rN;uEm%M`fgDKDD7d{d8z8b#x zA`_FFDiusPkfY%KJl-cbfSZ6>0~52@4^ z9+8yjORN2;rl>(kAg0`?4Y`#aOY713?!4tMegn6I?@!fx*%459W9T3;$g#xbu17dg zS&SHj`wMU&w~*yBJkAT~Ww76^M`kP;4IUIDVT~Jl-Ds0cs>NQ5(QqCH#~pl(K-?Rk zm12%<3Br+}pe?ppAI$uo|9Hy&kpk}aSkP%}*@-*b^E3hW3 z){~t*)eq=V$nd%&HW|wsTwWU>T|M-v{#_#-g=R*N@(oebQiN6wG_=waU!J~t_&DtO zoB$p?DtcQS`S+98;f)V-2Vs|dUyf-j5t-idG zQls!(72%LlBQk>2`G|2B<2}0L=Bnm=?-Bbg!ZvNA6hJS~e$71GN+o-qHtcXviM@R) z+tT*|6$ci%c>Kwxxg_-B!~G5d=y!Q+m&3kS$aAyGXWM`iBuPp z(abjeSX|VLR$wlE3AwX30y&}M{0F)LU-)J|-|e6<>+N}i?NTM(h!A?*E+r;^`h# zfIavedIXV8<2&it{g}Kf9{vn^O7;H0nlh{o*2;+d@)%sz0P0(hu5h1 z_$)e7x^JsYZ;yL4({xJ7Bs^50h7d3HFP-ls42bEA$7%pp=9W_UmQt`jaY0T*Y{>&? zPdNk)fjm>$T|@F5WEXX0q6Ok3lSyXx;Sdw0AT$uKg;QFj1W_JVfr7sU{X$L=wKt9nqzrqxx#`7)O;JCEKz;`4YQrY6>q9);soa)_i2Ov+-egxhHQ$ciJ8TAIpg366 zRaENvWAAi$Uz`+emdwnagxFd@l2L8oiTYdXDffN{!$O@^8{ci3Z%t$$bHie@I32#x zJ?QYYWQ?T|Jq~FNAB4Z*?(OFee@Js@3=h}BuXhFQ$1o_P$~$oPT1(EiUc8F7L`E@} zF)D;8>haqN87~-7`7MUjEDhW4Y1dd&ECrR~yEvP=dV@I?tMq3PRO%;~Ib+?MOP=tR zm#y2?pI^)bH4Cw()V(7dC3Kw&oaVnRlwtU7BWcqxtCi~YJEP^`B9m}=FH7O7P*~q( zo`A<$;O)#wYX~U=M77tlgqs_~VyViW;K{PU>EmP#_SqF56qq2-bht z4R0$56$fhJ*^=Y_Zo@od_(+aISjuJ4$#|(Tx*6H&N5x-G@8SErr;cRUe?Qp||2@H? z6WIB6uhO#3_w|MbDG%|I<3@~MQkv8GHVp3 zydKNr(rz^FDb%Lzo6)v8_py%_c()c#{q48=*)mQFgx(+N^)7*uNaaJKVF?_b#m{o- z$aSPpRO}W*P1l=~H9+ifC}yQvk{2v~dbY!OB=10HKQ$?Y#}VixvzgNw%%y&%C%jXO zQe$&hE`T-BS8S@^kh^D#y*K?-3dajepi50sD>Q#`KbHP+Z!JPbXY=Kos(i8Z=?Y=d zgBa2FC-7(`_Li-Zi{V@#@0dR3^!2ONk9$$KvV>m3qOp<0NAUD;&$iGx`1BX&jg^t% zL&A~yJBz#9;Tx|&lPenL_3|b|&kM8M9#foB=Gil&)&_}$m4<>W#Z<*WpAE*ngLysY z5obCf;QD;_YfhxL9pIMf$C2Av0V%dzpTw&_!9*_mJ?Q`&dk@$X6L`(+ zC+S3YOh>sy6?m``Ko!DyfS`qZ12iwka3?Kyp7I^t9jXmZl=OIzx5SSrx_>%_jC(bD z)D~;Z+Cz8mp$;N=bVzG|mgm)G+zmlky^^rL>VAappjcXYepOgFuiFvS)9r}SlY{$;kXZMr^o$M=YqV%#AG&AS zD^v^@5`6Ofda!oWwHzeK-YG3eE}RyX)R^|jvZ&#SDY-If9Gq2qRk%3~Q8x10rX$-S zp~J4*Ot2?y>&wQESKJN0n%{A z7P{5U@tvoyGp(zx(IL-GBnoG;TbA0I7&_e*w=!^=fw0%vc(i{^ym$yXGfkOhJO2(tmiU;}9(jMnayfs(``K(^x!F}ifRl@qt#6v1 z=V+6!$^t|%_J=P6NS7gFaq*Vfn0Lpa4{ri2oK~H^6xkt2BZrX~&*Y5+RkFjEkU^N0 zjROKpv3{eciw6xlz7W^`?ZfW56fh~-^8QCQkGfmYLz!t5%+L?tDm3zDYaKQkrE1&7 zNXj^{6=7)1udj|>gL;MOX5n`;XiA@Sa-)T0c`NPATrl$UvHSd^1djWQ!VSlhXB*$13BX6cLh#=x)V{J98O#&h zcfi-Pgf<)j1``i(gwf~(4OLquY)4V=iwrsd=f)@%Wk6l}+Sl^4fo)6M2^=+u9^Q@Q z>RKLM8wlID_4ilo3QNo^E| zRqfVKeD~oj@*t{e$X^kMA|*m_RKL;7J!;sX_5gL#QB?=$y=)nc`XHhfx1T@>fNx2X zo}j{iK_YOXdO;PeG+nmRqnKbvRT~Y(@6)3SRt-K6a`Pc&i(P83$E1co_GjGmfeRSV zOa6IYI{Gc;`D2#eEi~0iQI84mbP$zy(H&Iw;;?$>J9&Y-Mp`mREGF$iDeY2e<@i;U z$El*>)}+zuH(PpnJNcA5tJ#O~cF)P53zVL(DvBgMvziGDwvflkf2BKqc0S?_O>*Xvfk*8@jap-n1K2>TwEVvS7KUP zX;Hf~X_OcTSLtN5TiQWX$p|L~F-e^s;TySzo|nJ$y%_pq6ZA}!O8UZDOg^4Jp8K-+ z$Yz4DUoNp&z5%~^ck@w}r~Jo9S<`D}-RKCx0kKS))R#XN8<&6md{2f{%qVNal)h{7 z=puuFrz%(kzz_@sTC>&St5064hc(g7y$mLQX8Uv9KE7I2Mq7q%j(92NDMGk66M~HG z#fbD#Ds!V+>I3{Yqc7iXd#YM;s83ERnUkZTYu5fEK?_!e@GWcZS(=f-2-3;dlm^#L@yczBx}NjnewL*%^jrq#z@#Ln;OV;L>|Cfk zi|V`#a)1KIMuToGM!vL2%`q+$5oE|dB%6|5V%yipW_pu;TLJs9JEQ6UUD;O|E;tt= z>V8wEPVSseC4wymhfqE{E~{A8)h>tx>$<*S$rswW>`aRJ?f84xHI7+kaSzCZ69_(W zGn6GX&%NeXo-~Rc?q9~vO6G^>IK90RUr~dn4qh#RYB6tS16K&+60|>DMpYf<1=(s^ z$#U3jGYI%uAKckDsC7BO(2gL{j?1r?O8?cbP8deusZ>V+rI11+n0+(W%P2@Gb-D4R!^}<#&!=?@32mh@Mn1V*l{>mDxNcxs=;HsUXxvNzP&oYvq*Sudavn0MbOgc(KUV*{)bwEJVUUJP)Z&(j#$kPTd0~#ALcK< z@nsjq5QLa8)Z=R1QL&Pkthdeokk@qftew@Y-Tz^&|FFhNx7a+yc`|BRNb zO&vzmjj;U((33B+hI7~G^w;xw8z-#z)opq3gG<0g`vxs0eAEA3GL0uY%J+iUM%S=B zg&oxv%H(!xDz$lRonEqsoDkTT!F1uTCyuAg?^_~Q>MDkRkkr2QZiPof1SDx$Lf3ru z$CMhwN*XfU$pU3uy8MoJ6gY1p$it@UqqSg9N@CIaCt|y-uTDG2yHM^x_qf7`DCY$I z1|j^sMnX`!AX8`mu*N;J{@of`+|gdJcEMta-Z6Ms0+l#}hqN)CktdpHxO_MzLYSIT zLe1@_5^DMW-pS;rzOx-Pe`4lb@JgA9aa(bo@}Jy%NEmdNXX&h_eprbU_2w27%2<RW@3=mUxjYW6#=6mgk{y?q}R=wEd#r)j??G*WB6W*VamuW!Vr zGt@vxeUTgKF8JaKcl7ax95jn+*TR3N+OVTU>K%wHIcOMz^}jrpkHZhsUIfg@n3E4) zso3yqa-z4b{rJ|sFMUgqsOAepg}znWf2f{GRN;rx$;N%kyR<$fmOcNp@mkY$rZV!~ z)Zo!1>%8wYRxFaJNJ4FW*VYNbH1Z3qK5rHvS6$m7}^J~+7=1v5?ntN?%eZfUN-Cwd@n8e zJe5qefI8bVQWgCUT^w|GXy>|Qp^m>|8-j69WQok=P1E67g0}kE<&Sx83^PU(T-t@# zpW9o0HZSiqNn!$uS$aHYKcRM#L}DN1Lk_k;oxs0yzdrs!t)NEcVCqnQQe~>Fjp04; z(+rxexpSIDXHJOAOO!qlBojJeD}QMWxh4)u17Kb;Pi*wQ=Jc#!%+bP60+=Hn+Umwx z5C^R_qlD7WHhqJTTaOp!X+kH(-+RZ3YUH0U( zYrjzB+vUCHSp}Ritc$F`21|Bk^hNhQ*Z#wYVaCx_r5NwQQyVKy0VHS~QH(!b@59@_ z@#7WEO^d2lJt_MI?RgxU!tXjODZk{ib2bG57d#LA+JWpS-X zr7`OQnw>0lD;gS|7&Xgwe<6uC9WyutxuI&$+B*ur&vU;${yk{Y=6f*4Re|B>t^DSk z!bFiNTHAwVZ7YmmRqMo`*$=JX+YU9k z9E8tWtCKyW<3g*E@TbbsD7vJKz_MQMcO2jG_M8h@C8v^)X8tw{eSn7SCGvI9cv8Yt zsGZDiyyBIYZ#R+{iroX%Y*|xYsakU;LRYB0>~V{vmiNnV3A#J>mt#FD<9oxC(( zO>Wc@NH|ZHuvXWBD3U(rG9(=49?h%_8ePV9yNG=2^NXV)VU_3aNkc2&&bvXi$!Lvd zV`vSDtB=(32H__yr~Y{57;&mkws-_1$Iq*+n0&=PZth85)-I%VlWbf)$5zW4VvmaY z2M+lcwNE8VUHWRsjf9Q)mHjU_k)(5U1U>EeZ;x?CYd;j}CXQH75=wb~!ee|fGO7Fy z(cit8hFOikzG=bfZHI|+Q{0o$Zq*j2pCwU4`89z*C`qR)nqou-D3|HppUuO+Xyblq z#QS>>(@A4%wRee1VdOm^S#svL=Pu~65A!}prC&~&MoO?TNDIi(`nZ>CBd}I#cE*0B z%@k_?Dz6>Cg=UPei?^~r-bHK$Ot&&xS+Of1vVJm}UGnN2TKk)6y4f=Th%Flu%)Wo; z7LEhXumUB$C}=LhM6Qe)M6|rs{p?mn9Yc8hnrT!|{l&U>{DreK=8q4hK)@R=o|M0W zx}wVB)Xv|z%e&e}6s6x|Hu}G0(!epL*WjdGVIoQsyxONalMizH9P@;=4jD|wK&V+P z-~YG8JJy`)*X_`#vK7|(vIIZnG3{pOuo#7Vfy_3d@bMUuq|<-nIFyEIVE!hM=61Ox ziaYGNLp-ghYeX81(G1rvp7aXbAj(( zE63qxoV-!grS{CKIUhAp=^Vg5y}`dWqyCV5hB^@7LVyboGB*TX*o~{hUq9&T+|*_X zi%I)Om1AB)jJ?Z5p(`rseRH2U>Kjlx?D5y1A!(c!`(C6xv&v(%*wI-`-=mk)fPy?! zB1e!PQ9KLPdPG}R76Y;M!Zh4QJ%Aj|!u*<$`zY>sIM2p; zlHDIc*+fE~#$Y=OwxI8)F6`vtO9xabm}Bt`NTyC?Ommk&J399={KWiU6s6wSGW$T) zNnke{$Bj{F0cL5sl|4amwqGv9fcrKTPq}B11`sAl!aOfeESdYr zf{;s-ALh?3Ud#ff^!s~H1;@uLOa?L2`LiF~Ssr}i*~`>^Rv-(!>7$H7!auk;Y#&2YKAjIV-o59x+nzFbvNdX6`GH>dUbXV_v6C+quy#Yt<$GL17wxZ3 z$B~0a!~()sw7=+s=D2SLZ<376>*8=jXToMX$cPd$zS48X|G?qSu6Uz_V@fLN0i>Ac z(({t2FdsdfaQ7QZ-dot_u1uTawpEa9wVbtwc=>0qk+JZVngls(C9Al(< z`Y}DL!*jol$Z?7{U8A^-s!t7}7_jg#Li?U&oM1t^V$1P-8v3W4=uuEH(E@D>)p_T}3AXCrF<*jtu$(QrBN}V9#kP_WrLcU!P zL~TpHHywcK1EEhy4_VunNsT?y@3#)&VvGV&2*)wPq$(5`{CBAup(t<%&&UskO};*n zA&icku2>)8iKr0taM2Ad$aVV6Mkk>n(f=2%soY>**d$Fq;-o%SI4$3N&R5u0nOYwK zgbyJXN5c46f^qWO3!b{Osu}GaI!>>PhRqQPwER$qMtd=DtPKxzTW-r!D1*2Yn|XaIjc0eiCZiYP96?bVK!fd3HQcO2vT%1`!hZ24(JrM^`hjlj-X@y_9G)R zb5c&Oi!fsBGpFG?7TcP_Ctqew9QvYpiueP4T$ryOeP<{R7^9){^%03jiE8R&LVjOc za{Rj@fZ;M>eMN`(X~hR+e=-xw=2IQkO;GPXh}Hd+mOjG)#2qd#Wv={m4mq|z9l@jO zx7uIgvjN6w-7{L}FGly)?d~Syi%Mp(4tbSN02&<)h@8j6Z~H=c?swgDP0MtVuU^5> z;MNs-FRyusE-_A#4xSYB;TL&cM5chOS$|xr-t@t2m!9u=M-}2Ev?-i?HoiP?>)ZG0 zvhxaKzoYj1z!#75&QF3@-Y;EiZI@ofCfU4l;w>U`skY*mXuQbr6ZqlOiwUfDt=7HK zXL)jZdm>+!b@Iz6I{LA~5Wzykkvcc5-OsFSb@I0$V*mLLw#hwFu>Ru|=91)R`0~XZW#c_xDj`k6afpvYK(Yov{+&1NLjE3FK+;1OxHD%rae_?l@p9SkV$} zR(-oIxooK38fTxbL`iSxz!hkAz6i)RU_fgp<$7~Wow{{CZ#se&%WH@8z}$PEFqJO? z&+xtLL^S(&RM#dq(bUPt(*V5{yQT&(Q?ux-_QohV?A<2oRcf&u8#5gDRJxOPQoVPU zJzVn~WibwQ%lPbddre=tkFp;JAU@_LNIPDbh)vVDL2N`2P|yxj`qLF&qZ7W@G3$*c zmZYOHkc)FM&>^XbC79yxTJ6(af#&2FUxEml4ja&AW7;2-=WIU;?X`#I)J`s)`7g#> zYT-CGig?l_Yyb(+*DK)SU^m`G;L`nA57%z^x7UcO9BL%@qxa*GEk#6XLQbiWAO662 zK})n8lR>$`ob3oT~za71RU*;p%Mx_L7p&F~lt=Gt)##*#L2NMC2 zZL>L4{Th1){(#Il6?w}n+Tu2t=P0*kG!$t3S@L9~f3dHT667|^upggEww&jqf2(@> zsyWf$f&_RFD9US(PN!_+rF6Ach_z4LX0c*J+p@`5S30nA6V22f&8{%I7lg5$yn?0h*Y$?+HB3@Cql3w?g+_#n_TJAM*yeXvo#+rrIY zQj`NIKYDWCX-M%d6d$KeQ~$aNMfYasGXK@*O;lB{>)WTV27#(uKp?spBIj$V2}}~q zCo4nT7%eRx7opmG#rhFYbp5Uht4V7j+#HnFa`$F<2EERIyZ8T?okTpSlz}LVSaAuN z505jE)VwMROYod1TtrMj$z(W7k(EsU)ygZMq2FA->$+)g`l6-*Aj5BL9&p+LPsQ~ zXF=*IRi|@L*ddKKPK2S97I9o`?9!_S;%GjAod^;2d#PQ`iJ4snR*u*khu`}T0;KyA zOKQVEG}66Zg7+8~YzApMB#XgE2meF1JWL%ebYmJvhSbTpJf;Ycg;?s1{QZ|uY?0VF zpw7^mfJ2+%b0E6+Cu3kBur+#{2{5DK!$;?w%LM~s=8vN=^pzm+6B4VQE%`(IG)-TQ zjxi9`mmr^8>UX0BIfo5SJ{r`>x4(G3sDo*V zSbe|G05Fr!5ARh72GThR8S&9;N4zCB%y;BApPq?lz3Q+SsXNL;var|+4Qkl48-4c= zw;*=GN-_2x@LT=UAxEVLl}XW3`_rk^{{H4B#rmgHY4kEY4x7V1?EU+d-q^tqq|1A! zb}+7}WT)OC)xZAkU!7flT|r>_yB9*kc7y(HTKwnr{pVX~5P&m_)`Y|r`gdjdS3BW9 z%Z(8s@WIRk(4BSs_e%Wdi|*fv?E^@2Y>o&4$p2Sf|M$24Ir>s`@YOqFu~+}oHTcgX zB=QLs@rL~fPLiTP`j4yjp9eY>0KVFZy6@&+>*RmF=w22qL3M9-lBdP;{Xb66wwO0e zwIO=-{r_>ksb~OW#zm78gWC3goE|4&&-|({V=w%FoNp*LgwBp!GM4VkmhfG0M)9xf zKi;{&77u80sVrXD(U6_IOr^#0xjK^!CmTQPB_)pf%k}x^ocr{OQb|5IK$kl5B9AFM UnXwNqhXem$l8O?=Vn)9I4=pEv7XSbN literal 0 HcmV?d00001 diff --git a/R/vignettes/images/old/hBayesDM_pipeLine_v3.png b/R/vignettes/images/old/hBayesDM_pipeLine_v3.png new file mode 100644 index 0000000000000000000000000000000000000000..90ccfa5abb26585111bf453b00a95abb6ededb7a GIT binary patch literal 190507 zcmeFZXIPWn)-EcjSP*!XCLl$6s7jTNO7AVf5Q_9(rB@LFrAqIF-a-$(BqAamq<12{ z3895RXeWMat^KXF_kPdM^W$9Kk6c&sWM)2NjydKS_cO-0^I1znk>oz@{TnxKkSHt3 z>D;(++v>)RoBMZ(@K(@+LZq8P=4wg4=yoinl8reJnQb@PH zvu0r((sd9DbuW=2VIxTZ3FX$c1T8FxI3+8K%rb9Uoa_yy_71%eY`x6Yl-KfqzMVnw z9ohJvi4#LMCtb&%bf0Z@oQYGq?leSeGqPwWO+qluwxwp#a?k%L3!Uw^*KCcxz1dv$ zCOM9RfGWL`ms()?U3ly#Def;H!*i&Ka`m<8xs6KI2t_oBz3L&T*Wdh1W+oP{pcRb@ z^ps4ljArol65>y>1pCFneoq2lTpXj!wgJP5*=FN&+3)PeFvb-SIi&9ywU43CAJ8W*(b71XiACiKX$vv@cSD*X{{ zYIx2q%(k(l{av*z9;$G0ciG-CpyA1{x#WzvhcZN8<`>heiXNin49n;4lp&x674;EM z12;a39Pupkei|0t=+o_(hLt6BWp;ReZxl8-Je0Wg^>Yg({h-_J+)+3CaAR#-IKAJ` zNA3dG=A(2H;06`j-SXc&`k2gn=b|trF@*Aa$fuxOg_kEA%U3n?Du3Oiv7=p64&sZ? zCx!5@7&#Vg4y!((79D3)rP{#->*QR^!6V(?SAmIO+ELM(v%JmQJ7?NmL)89I|4a+0n`|59QKD6MJB|=e6+mwco5(!XLP|!q}_B`= zyz1#O2{~&Ts^hWN`qDaw#v5UXp=d3!C$rDmu*Rn9}&5XRCEIKOSw ze(}Sk#&C;ARJ&oBCaXn%=*;A7=R)9w`@*i^SJ0~dL8Du@!9Z)RTIX+vH}AZFw2nk8 zRQn@oU7x$?k6aH{^~n?hlY7%Ps*a5$!Onc73gs!cadu}Q8jWbV^5c->@H(D$jsVWf zAMJwyeF1|3LjfH|gmDu6Su2PoJ{QJ!CO=N>j%W=~epEh>8?CpJ?aNi3vN%vOP`P~* zS<*HYIOKS6*Zh_CbZ~OT&N&s7KU<2P)SPj>_}mb|2O{`TZMh@@=t0iv zFAy)uqgY>CL?w#oVVm@ia+#Af*K}qyI~z2M34jZ2Wq{&a>o0IdoD`gcQUi~` zwUzwC9WU&SBWRTutg*-$t2V5L?P*8*Wc$sP!OBpTTV>)N+v7|r%=$)wXX>6fYe342 z+XQ!IZxH_V{>Ck)8~^z>2lepr@A+U@dHadSu5Y!r;G^{JA4IlfT_3yo8B=ze9wM?C;GQ2EQtk{^#cW`<6t2`~Lmo zf8Oc;A7+r>ASM;RV@hZebXkA?D}-#m7#gt7p6T*RbfzGo<@t}t`Qgw{<*)zPg=`S} z{%#|x#S7&(9Dv&@cFJ`9;YzM46qoV#!@YUV{AYX%P?W?l(ZA_D*!U@4$@+_WcMid& zdh>$A(mPHF3<$ev9xDd0N)U+iaI?7^nK$>V zT(<;W_Aj`sL~A38vaDzu$4DjId14om_q$SzxA)r72u=umSh{u2>}ar+H&QAAN+yHr z+4cHL;2MaXvF8`v8V+J$OsPrOxYWLiWVs&GlxccT(z$oiKT!}CbTJJGytX{MQ~6GG z!T(+21%J?)YGY@^oJf39@;B+L)r7qP_)uAdc@Kqd(}LGR{kEat9sah{Jc*@(1UHoU zc1gbYWwh9OsuQyyQ@Ycv?LxdN&qEsWwV03ZL;vzVY2KJGEzU~jt{hQhl=8JUkCkem z6R=P^UZhHOM$BL8ZE+4sW9J9z3SckZq5=e~&WWwF-o^O+KIpzQh3+m#@~%&U?JJ}= z#0Y}(?&hTOMp(#1Dl!%!@oKiP)!P}7zB)Dc!nR{u{M1bA_FoYYbJh z?%Zp;9Fo{`w$5BiHgHoezF)MU!+$v}b3NaVIhoUL(0j?ka<6B37@qcxwq5DyR^Y`_ zcxs{9Sw~ENvLrh8&}1Icw7(p=;BkFnZTHcVjM+2KzfjsHfnyF#I+q4Yc+tem5zLIT zjB{NZ2-Lp?n^M@0P@C-eE*s!1{Kdl9?jx|>?I3N?;&`|7;0GPQ;Gh7XzkL{kv?C)j zF<-t1iwZ(<>?te*ua61si3eoRZBB4?5!VDY0cDm(@`l@lw0~(PM@o(;_i|}7y3AN6 zyY5uBdm7!Yc+hQESZI%)i$eJlJ;uKAIs1+?0PE}+OiE>j%d*@`?Vq|^LGaf}H)D`H zwUOIriTy9Hah#UI7~MVMz)P4(GZeBWV)HX6^;5aMJ?_9F#^s+7CUy+qa1aERMSTmF3azufS zH+?AY#lA_VeEiX6(v3A_s_6MXOYHZDy>1)TA!MFobH^I2jKQACot{NW=&gGbB)5pn zntR3+w!^$$GekW7(io-77qP?(Q>LM(ue-iF=8t$eoBU?ce}}hmF59ogJmy6nTckA& zTQ8xg>Nda$zPBQID}oj8DwNWt7^Y+fk=7-NNx*V{ehu3z<59Wj`)At__c-}^?QVu0 z2Jih2&S^h1l966jEK)Y-A)$RnmD%OfzaD?O#d+P;=0Wn8Z<=3avsRz)W3k3vp732Y z?u`}la^kyMd@aA&zJ%)B)B2IoErC2^oG{Cdn~oZU{vMt|#ahvGPu(Auy7u30;pA_> zsBPWZYh7^b=bQ6io}YOXJY${V*km@pI7KF+sxRoVb|Q;7lQ~;W7;i$JE=MX=xm42x zh7y^T1`BFPUk(U@uJ$SrhqS$@$PmE9(@>R6M%!Kx>+`nS#CgUxU z(@w^DJz=WMNoHGv2fLX^!|0>0*jY7A3V1+u-R}2Eh*MfK)nI?oiXqUsXDzO2a zm(+*$`$fP}Jm)+MjR{ZP`Dk05#*WLd9}?Ta5&G_-u08n?$2y5052Y9C#uUaVS2fp% zB+yZrVLLQyN1#YVdG*DiQ$#OERp01_i2Cx$oU@mvx|$0lGl)Co zhN!)UJ%SmJIGc8yZD>}^Z6|fIa!j0&rJ0x{efDxZ7_O<%EpK@lq?-;Ldje9lsJ}_8 zu4>z;qv5|ecK|uGwRG&1bjL}|?}@88YSf>%sX4C6p|toheeyk(Dag?1hB!&63*@-f zehvF%9kv+QF7Vf)iD&j+HR78!3ssno%*%i@p0n{;3Of_;kvd%OZbmvAKA@XPW@r&} z^4siMDpv>?%Qb7!rng-Gh7Y$;JD!{D=ttp0yoMk%X4mBFm4PFiGurnsPF*|Y)o+-;7?C1EH@EFxT=x^(xEl}2(#T$ zeYK9$T`yGg)_8b>XDM#f;J7aCDuQaZWez9J_$K3}BcS>5<3ZdJxdAeea3{s~DG(FM zQN00BU1dIKkl1sHl{t_DKC<)bPj4_rz7Z|m#npo~!uKAIBPY|vl$GC^=lFIt0uq+Q z!eqhtCz53OhVbQf6h(g7$F}nERUv`}WRkk}7qS$CXo^r>->Edsx&F$gZ?=8PoVdTF zZMbbzD27cMW{QebZ+Y}w-kQ%o86m;VKETcxxS0aY4PKS#*yO*E#y2^LPE(>a$zbj?82dCL_tFrsws}q zqvm5OxG1SPoc^TnAyJQZq@wYw(ZdHxo+FQvXEZLXjwR(0uZI=;ip^sr_&|H-N4TQ- z0CfAv`)^d8Va=#iowA2M(}9f~P&bu%*CQ;KF`$4|H~o>_opX%>^@PKv9mk!d>-~00 zBT(1gn{%gaQThmR>Z`__mxY5OGv62NZoa&lmphr8VghmN$bq#K!RQx&?dr|pNQ#6( zE-Sx6|K2<~_}e<00c^+n%*S%X9K(UoFXN9@)ns6{K+gGG5(fb7-wL@IXYfowm7ziU_4GQ3 z(Q3hCviCkiy={B&cgDc$^HYg8u&72IzQni^(xy#jFOR+fd4T=I@RynnjX7tuMd6#| zd1w3Bl9(@KBU%MEDJ5CR{uqgdx)R;T&Tc5W-H#hPBtx(#hHhJhH3ur*&P`RGKbux! zc8)LOmW3z)=uUw=d!3je^5fxe`++9*^{i-J8S^#N<Qdj?NP($6bQ|Q%cw|9qCP(?|_EPGJR zVcJ0d*raI16hDZ8Xa<@H(-H6+d2gKFk%}nHEg8&1-60J2B!67Ea4+5%`_TLC$47c% zbV4>-7BmwMe3+-O;zZ!>_La)kmUn|#O*{;uEK*o~6(`_9W?v0kK8QZu_jNPL`uUnn zrY%-SZ!=jS04aW)27~MI$N^#Ql|RdzK)-SvvQQx#@L=vU z+J$4Av(V>TFGu$-{xOtv+6$#-U;RaUGB6HZm4NzIzM(2%f{zCqXfbCR8|!R~TTAkN zl3+!v&sHm;%Slm7V#q3#N6B$-zSbdDq!mbC-H3TTnpkU^nZ8inSnnnL>rGt=)FYRK zl)-geI{^QMfmy{>4$fCTGMl@C7kNCYYR&GEU++4ua4dLV@McUC(I1HRXZ&%q6y-hA@ zgAL3(_01PB3+*xht}N&CMP#RAHHqhbivfNXXL!y^H*(CC1`yujC`cudM|N8PJzj+H3f^Ac^Ce8D{Utxfu$;tq+54eUrw73h)z)iZ9fFyRl>S=Q zvzh~;(y-3-0O^U9ebDE4r*f{uubcl0C?Y&tOjcD?#)F)9)0oV!bs8U6RL3Hh`?yz% z#Re1MfqgKA9^rUidr_c@osyA7bOctRI<9ba#mZ7>$~ll61#MulH~dT?Qk$KGsj12o z&Qb(1BOVsVX|E}6C9~N%WIH7XJ^~IPYMPs)~zki3@<0dERr#n0#8v9KjJ zQdM>9ti2>w@{W6CstdnJkRT2)A7=t~zd}+2tdhvU5}Up=B&~UUWz#k;To#S-VC!-l zJ`;9PeG-chJfn9`TB zJ-AO_>NNMA$9KFVE`GtQq>eKJVTuFMn}wux&!83gJ{?9_QI=>V*gW$jKR4>3?cuiQ zQUkfFD55kcR)Ub*Wzz;c&f?OB+H2ptshuKmyrHflEvUW!vVY!|ck`b)emD|0E6mIh z!B@#@DFNG;9?OtvHM-I$uc=DZI~L0vBOjfa1n!6Rp%Yg}0q;Yp468M3j;Fp6>-cXW zEFcD^<4)t}@g=-c(U_V;CBe0C-EXiODVgWH$!Jx6sF=~4s$bAU@8m3VK{~9x&)k(JMV`IaKb<4-_z_mFXMR4hCt?cr6!IIzhFtn z;6a^7Ikucen=8bQoK+q)h(OXAC&*XNs?oBJ{Ibz0_?bu;c=VlCu-9df0me&Ddd0<% zWZG^&+ya`n-F0aeygRHqlEK5_W_}t9Kt8-%m+@68AEE8pmx^y03ke&M zxB`p9Qz@sN%a@oLr4KXm0mpuO4Uh>65_M<~i_W&KT}&duRhm1LM;*Ih&(DDpGcM@i zF|V8Lt~$1yDMo4&?nr|l;*ON%LluBh8#Ubw5`A#*kAZ0xG;C~1;y^oDU+veNPox~# z0~qyZ8-5?IB-P%T?Wrkh>z@Ua@YERimKHKoEpegUR&oRq|7G|K$TWe)+@g0|*x!~A zx+l}8*H`=vdipb2TT@8SJ82$Dr{_mYV-2;3y5Xou+OhC-%I4(*)XJ#j&ezRK%B;5yICZvJdG{_b zE*?H-?t|QeiIEobbdF7?tM@e=875+J6}?*WyN_v>yyINu!Ml_b%>u?R`mE^bg9e_D zd~LXxXO~Y-H$^@5k@6|0@txbNetprHH6bXGdeA!rqNaa4%mM9Cww#MVURKNve)}kx z_08MTJC5QEk^Jpc?%XrWHo4Bt^(paWff7wQjyKV=eE17Rg%q>Ad%C0rpFlkNrIT@D zxR%g9*KKE=ME(hEUokG-_WpDvUo)!@WE^$G*L)$rMHo}xF?%$~jJ43+GhP-gjM(+Lg2@vgl}wX4b2L4COdR~eWX_j zLg^(VU9`*VM&uYp=F653hb}vhX7!7`$yJBBJ;!Or#?zc00iL#fO40v$`mVR)V$F9% zB02w5pulKNu`jA%SpHg)D}iq2{3wqoxhRCK5JJO#v=uvt$pGxohmqI;RNz!^exfz5 z3dh8WvO`gJOg-GH77sbOk6dV*$cih(C4(02xQl}yd@gGD3-TEG9Dh}B0GjxkB=9X# zJjOPQ)M!fd@Opr`9(-LcO@icw@tmGU&Y!59;igDLzsFt*()~&-bO5DI9Om#*800`NgRVENR5$XQk5iJNF#o9LoDXUf*trbk5bPN{hq^2s-)rS*9;wpg` zvC=2gZvfo4-?;s1)duzMS0IoYfy4>JI_RcNuc;W+yL`(WdLI?HY?)^QKbiE?KdNMD z9jsY@V?!SxEIkkpyBq|v6=Em&G$E@>2PJ&hm$UZW+GS*qNr!$=8_a78tv ziSs{9YQaFS#yPAo@~>4<(gX&QHM3=~7E`N~pWKPSdo-`&p zDPE-XZQ>(=OznK{(Tn{u`o~=8F7+QT=B8T{R9Pp^Yps$xK3H+FCH?BX5cbOyc1d~i zq?9r!KA6Yl^0)Q^^;MBadB-g&7JP>3FMI**-nLPq)v2pa!yzY6`XGXaNMaioW3uo*;)QvkH!pccKR>Q0&#|GO)9vBHn&M7+P*2bv^V zovN934u)@ygT{g_9GiADB*dZ{s@{^s`#uuTc~dn|=4FyUH3tj?H4+)}rj5|1=6dg# z1yyFy;3U}_cX5M>2c&BT%ommA?d$2i-7xXVaUS71`_JNBwO1#30@}l)I-t#^_g^kH zlXxqVf$B^5D?}$DUJL19&`KNU6gM8Drl&a{npGw8gGs{ZA0$QlVw%N)v`XX73QL8z z23eLwO;G1MH$z=!FBQ}4#{nXtMpW@`UOzw8!7}S`5WCg1+HVaz67zVwA?9GPOm;&! z*m4~(tn1b;Vud2LC(l4A6Aol3csS7$5#^2xjPzsg z;&zzu0^3ImFZbI<%v>AHeXn}Wl%Jv%M~8YdI#dSW9u8Mlx36@3Wh*yAxu|b%5;Eg< zJ2_F_aipZKMLA-mc6B!Bg2iC!b>y?+&sN?xC+nf>dn$XVlV|-9-hCKNBHkJ_A0|G4Jg;(dlpj*i5E*F!vU^>-a;1i}FC$5?yCd81@RA!e?+ zfX2%x#w0%y!q9lV)v&4QaM_tZc?R{S{uNDwoNlLLAkJc@~(rFbZxo{F7A-NI`0S-bC@6Yg{rEQO__0^H*VRg>6D);L-U60AL=N8 zIhjp*U-%aKE7wqvMhx0cmSW__SiKsya&&t5@zp4xhk1qH1bCv|c&K*hckueq>yU@N z!CZ_*Gma8{$`6r+R?uP8)DYUGb-P57(U0fwa(71w>}vaSYWVTt@ZxsEC+t+Ovnk84 z$&Xgt$Ux;7t^ufFDJ_>1I~zMih^hI22;Evuw=(F`6ATLkmuH4#yt%AQ{ zKd2oPFPkl(YxQrVsY9~>T^r@wu4gx|W#9snW_bo=^7G?JUH@IKe(>d92TP#W65MLH z7+?_4s)2qLVwBs1U+Y8YMwHDxSqL{@Tw9-#Wi6C>)NR{MFC0@bwul^dH9XdWwcT zL)iE9T*J>eE2AmqQ0iD8wm)SQ)ZTw|el29$J}SPi4s4{8tlu+ zww8p8PcnhI@rGf;CIiOeL(u(-9~hoqarT|Fbg8P~R+pz7XMk~NV9++FT?dVagYbfO z1l+2S{!he-7oRX$->>0lB(=SNuf}%6>&1={1z54&KJw^^)y}PteMI$)?!S}3;4_Wo=PI`DIGMiI+>pKWy1+S$ zsH@;KVVTSD-P25xDGgDgdMg5)^@y_;%5n`7-$=iMu4)7_R)C}xL_L+2;qz)?2i-w+ zUD6!)l7@>66LLRM_@L4Vjc%@J&emNPc$D(+VnrG24l~j*yeGDsqHOkk8;-%S>o+Ef zx>xptndKlfNvynzaS~`kJ^XNnzUohDQ@OF$#qHYuO*7(U z#d-$2Ipp^Er>NxT>dd?`btv{g_K<6%vI*hOwYB|@$^DJ7pcvEQ12 zoh!W9JYT3a6G>$^00n&Ms*F3r=fYnRKk-QJ+k0jkQ8@qM2|lF(9HR$n!a|CXs{BaF zL&~aBSU-eAFAztYB!HmlmBa^>(GOh8w`B!L=qMx|OCR!WZDM(g2^X|1w zk)O;qeL<73c_GrkC!4NxuT%s|<&+Sa(-v<@krNuyS(|@q(Y`|o$I)|Cz=1*G_L--W z1pq5yCavN7)2OP7ZJWW-2RqDz7Sjt<4-yD8c;n0)d4`exk2s^p zrW@TVKh(_cgoIrG7;#uM1E!^rnHXvIQFCFBl94_1iZbyU_}L-!P86_bC|UF|Tpd>P||(I{e)8y}gyAO>%_0rK_g zuXnTypXP^M4s<)Q1b$iD`xONra=SzCYs1xVv6vyILAO6H8`yrBq{cmX{{&xs_+%9n zW;*sQ^v@ZcxmBIYo8dzCz3QPc@@F&!%ueWB8>IlR((VXx%q0VA)lxvZ6~-<~Og5{= zCB6@nBa7~YCxMdbLL^<2=-I27*)thnlD!;y>8ms^_wq|LPWzQ38z-CZI7AErf-dcW z@-T%@po46y?NSz~{FsJ>&PJ~uie}C0?`Ofdohn z=ltXh4mxieHUTwxj%&Y8F`9II?(jxvnqNLipDWWQa_5Ql{Y%M#KHn>gwyX;(_9Ofg z#({g17_KHrt!M4t z7yYu`n(ouo8i+$o-2@Xi`+Q05DS+nM)(IA$M+2($P9Ak)&!{N%kh}6+Pus)MIhWrT zSNWDwjx4FIoo0?~q6$EnVA5NSNNkiNe#RD3v) z6%4AGb8dAUOsZ?Ad%y@qPBBu~?CImGYKrlgO zUFCqRE9VdrLmLvh7Pc>i=RAVLPryA~L&byDmQNv)Q<~~izuwF;>(q~K7}VTCJTLS~ zx9Je9N={%3 z02qZQqybzeI0O|Nt;0`f_9Y1<(|}L>O6r@@tK@@`04r0&Ixb2k8Syp%n+>F&pPiv71;tW}}4r3h}n-Y6=10 znd`<}ns$;bcuMR&K9?)^f>y$^wZOwT-rfj@KuD4w*2v3ar@S8Yq>$(BIH-raT}R_k zT2u6+y&`>5rd`YXq}%Gl7`G5`2HWNNnSIu@Cmyr)93^A+`c9=94T$=BbQvaVKVh7D zg^~;$?*k0mKSqGFh|e{3er?nH6b_fe2Of2=p{?hP33X_TlxaRAM{3Nn*dOoT%}A$OT%pf6-`C~fDuh)w|1jyB z@58rNb71f0J`z0oL2C5#n}sc=eMPGY(|a@F(KC`N!*xKm&f|-~R0Ys&+`zlx zvtJ{}<-U|@gyFgk?!S4f>KRqCk308!FO2~*mI7)5TlFh=_%*-2F{;T;5nH#VtgIor z=&pOHH&K&lxU@`ht&+9~^62~&Ld=hNq2p3v9hf32XkSxKX*ssH`(4cAS-X4*;$Di0 zkL7}c&C6ouf~Z<8IYhiG{xpyJJsIX{ZJ*aFj^fUwxLX#hrvdb`jj&=6!`G?jTdd^S zE%(l-jr6d|UDS|rF0~iWz#dQ6a`gL@RVG_c^oqAj1_udM;bd)@?%Sm`gkPaM;|&R@ zSJwkRvr3k2t)5reN6EpS>%B)~*bBmfp)Y*?4B5Y%Qutk}CI>lT)m4ZQh07t&@Of{3+_6Hd^+8ELx&$4N^gEC=jF zDC(QXt<)kY;9zF^k%Cds{l%ZuZr)Wr50Aa<7lwuJQoO`_?}rrUFQ4G1!y@#+@>?sJ-L>-hv$$!P+kReL$7)G3 zMOi-|Jc?|&?yj6v*9!gm#*cH4(P-j3RMf6=Mh*EB()OQ=+``7f>JJ?_m-HyX)wV6F z$u_K~p6}jfD=IWbjH#B$VmInYsM*Fioo3y$p0#BbbLf}r`zR$yZWS+V3}aNz013I) zBd|Gq%A3}fczXH1@v%{<<%k12<&!ro6Xa^$o+7clz9YZ60$cr=Nzlq zUzJu))C&IYRPm=)Q{t=)x@i);huFxT=5X}aO_0DRgV^CZM-9eYCiWH0Ru?>O%88RT z2LwW!XINO}VR+nxzOffQ5;sw`+_k(ap$8JFXHAQ(j)8eVD={i!rPwD6?(MoB~)jEr7x#ed9W{(m1;7fH>Hs`7z7 z#PvpT#5#7q{n|kqm?Da_bo!*kf;Ia?TzruWNz(6@?B9{S1`=ZDY*C7lW4v|0J}i~m zm;|;?5Ru47*dE07IELUh-HP`!9oruPEy0(PyoXO{AANGJT|99u1~ZA)+DuQZ&k6fG zZy31*OC0@%FVt{k{<>;g@Z76riFL0kenVj2P)-3KRFUekm+EqaZ@DqvqW#lI{LtW1 z@;AOiz>m7+-WT{Wz2_`oyKUrIs+26Kt=i{i6TmSgpw@8@N%F;!*glndHy z`)KPkD4$LrC$aqcaQ6hhG>*yl>dZxBuR!b*ODYkIaPgYliD5Lz@Hq|7(HoCM^xLwP zvKaxe<1Lc-Ov-VeZSOE7Y(VY+j{V$0>`_7=ZXS0?oV1o_nl^sD`_t$3+1zm4GR{gpu@el z-xE=F%3$<`%vA&*p%K`{=`6YUka<?FupD)!iO09N4!ihEyQlYZ8J zm6-1w?O==BP8888Cd=R<*1Ko3osZ35g{n=R)W$Y@mFx|lS12TjzHyfF!qHgE6f;Q9H z;ic;Q$`Oxy1*Z86*n`^2-cIBu@pFSm`h3y(u`)4E^k%wltiFu(a)=LYwJN%Bp#@(v z$dc}cSL1ynffRM(6yY(yOe|v-RaQv^2@zQ?0rV55}U=dLSI%>|2b(xchJowFYe07(M1V#E;xr)P$}Cm@qSu+ znz#%?i92Fa*zE3-&bjM)^(=)ezAVU;oGpJQ)RfmHU040N{4a_sP&Gyce0wraq&9Es z_DG`#bw+$(iBvHH>q|<)kma%0JTmSH{u-W4o^F)uT$cg@rSs?WocdEX;}LctLA$!> z;xxTQ7RTr9hy}-eU1SJNIYW9674i!(8iC4{k0@dvN?I1>i_9%wtYeBPPhh>SL4Boy zIIbld`EG4`<&u_M2q3IL{OM1Z?*Niyn$_*}dYY1*CV}Xwx-^ZYC~6~JKak^yL7p!J)R?O#b9w9}xD(lD zCrp7sA2kt;3HH)9@o4)QT?W-lK`6mpi-8+i0eK_uPvj;U6_|e7;_;QlB~c4cfJX`G zY|yn$nL^W-6!8s%8^x2%FW?#IjA(VSc#*@$((BGCCX31d0QL^8X(A@3$Y`5i-J+`% z4j|p`%J6h<9ae5GSJ7bkpp$3?&MYISp4w}*u1s%_17>gQo!fTV-B8k|=YnAz+-@OL z069XA98~a0RdUy$&82Yj+wokkU|cA7vdTewh&tQMJ7~bN3oT&6B)w;9GKR`HsnNvs z*c1{h;n<>_eLOa*l)ZCusLS5T8fh%Ww?f}HtE}=0JhOvkRz*Mt%3cNKD)qvVBt*M@ zUkQXM;Vp_VK}Lrq4aqZ$Vn9SZhojc(AX{eD84|}|wr9Do$N0Q<#R3{*1n@Zji*3a`!yoLlW473+RP1rGI!? z_*opdv}|Z=Iy%(br=VgC=2I5ZeFfqN=LN?gr^j_j?1WhGjxyb+!4oxxH=~d?+8k`e z4U(U#z5JF&#f4+2ZyNO@vzLBDxC#TGLOf1Z#emOheO;g(3LZyX&^ESZ2@x90S# z3{Qn-ipaI*)TrWUg&2d{4sf?da|TX|F6JDErB(>?rGphRzr+nBuSWt+J83>>;?rt% zB_8RJTZ}3G)H^MWWhZzm^3#oU-FO%v!?ta{o+VuRaxN(d+d96fUrQrs<~}L5ajgBK zc=6eCLDZ*)$0FsRFGW?#XYF5T9336@oFfbpRV|s+pGVd^j8%Q6BmP5zdYg0kR9tvg z*itq^{>(lR)lmSYzs|(D484s|YJPdQxcF=b3<39)g?JVL3GGgMXn6rTJ$)P%v}}u) zF9p;Z>HAG$xvsVxEHsI?ShI#ypH=|!)7`j+zHh&}5=zCN)^(Gt&7l8_Z8~huMaUE6KN&T=99yvP7vVQ zw_2g>xQaocF}0BoPw{Y=8E-HKK0iDc=I{pNnBzC#2AQ0Eq)tAGEsXQD0GEqncn9Vb zxOVjzpX4tyIqdZc=U0E!Y2ng|>uRP_1NA9`LdN zI_DSX=KRnes-C!+zzQzi^^R}07oXc66p|_1Mk?1dZ*#%qwg$|d?yZgvMiRj%6fP

    2dg&T*hYi}0n)iW)e@NxTrLgT4(dyMbOWNBtfnqo0K=VA2 zpurWz8PDj-aAK>$LrcMO(L4cPhcCBXTRcyG+=p|l;vat$m>fE|`zu+`WoASUMe}GL z{|lJ2>vJyWLC-I^8#V>H+blz!GHm!#X!xg6n>Fre{;zwLlPQ9YrKBz<^sg>Vi}@JrdD^Yn_Y|#f7LOr5ratX{?kbn~OcFssopl7eY>6*B2e=CfnjaX5PY9l69TN$<=EmIhB;tdk zle55!KA7IudhK67<52mV25W#+j(${^S7%_SOu8Et1%Q!7b)ZcV9|-RQ+? ze&ImgQ^vVb&%U4Q-BGd0<_gqllTJl@Qfv@un*ZZ(KQEBvp>#n5z*09 z-OIo$p=50U^W4{%ymKIs9>jI=MN9yn10?~=TYFb?7-Rcalj}Q{g^sgBQQc{Q0q~yM z$uNZ|1%tFpsvzmlIaj4Y1YZjK*bnp_2$dtlf!Q9sKp&+S(jYq_rj5_?RrLiup8?hq zZ)XW4zl{D6lQ0lj9L}7S@RwS#DPLbkZ*%I5D&|oHyZ)X{6uV=e@(EYm!Ef?ZMX2XF zEsN^3)xvEhRtOSFxyfk7-5=A6k8sFkE5=iX*YNMT!(I9mEOGo0d0y~s zMb2(n;TSYA30VY)E^_xLI=U7t4UQ)ysZ5Lf-Y zIIkop^>~v4-&PSrpHM`7+0S2D=j@{xbHQj1X|}rcb>_Z~R8r<3ZsDQJJC>IY|3XA6 z%OmwO{T-;kSAOfn%mjGlXAD)DCL25qt6oXX#7>AjCLLQQ+zP5YtYCy{3vOo2Bx9ris2TmYM<>BnW-no%wh9}!@3sC=8-={!C{o4o zpz+-Hi3&~{R|CPRgfnC%m6HZEoC*Lf)s95&^wKuM$YGXi7w;-UQ8 zrP?16E#g*|(^C6BTd(8(gqUxxG=u&IkuM?kYxIc@=`3hI-ZIoZe3Qnb+EIbAl))A_ zge>TKEwQSrNUo4eU>+AuGvX8e5{D^UKw^B8Pw+kHA?m#YZ;}_a97tZ>I+EJ?&f0CPXpDM;*;_i4O}Ha zWFNmYg!;K5AVtxoJE6YRhqdkHx_8ils-Iip*+#x6mfaPauq2_zzY7_izD^d?{!KO3J{ho-C)m{G>-> zjXzc-r^kwC)^E&UnKXm43`1!i)|C_LcGO0jzb6_N z5|VvGm}=EMM#3m2cMhDM!=ei#%ag`eHW!EYTiCPxs8bsZN01_D44bj(Y?;)h(YKwt z(jP?I1sN9StKoi@pM^A+I|t+jfaGQbKn_O-%S~}S~ zZ?r_uPw*P1$$tIkZdh`-^ICs3*vJjZA7-UIC3fkg^|2lnyY-}WXMV6w#NkScwwq}P z9tjsk{U9clyDgbX6ye-(IjyOr#(8vy1ejMA#{I0>gr+0q`>u}-k9qR9myIXJy7g-j zi|4a@kH_1_{FvQs23B!f3=b|2LbLb6)&#wyv#g)j6wB;qmis#x14+#CCdC@XsxK^I z`|#}e2iUsAJBWCJD)ZL!wKfhctxc_U?Wv^hcs)yt;P0&3=oc2Z%sgn6QT88=B++P}cQdduv-G}P zpDHSzrp;yiqgU9Dm(bXb z(GOL|tSDzf)0zWQvW?X$FA4-kEk+-*v)#dJk7}tp8^|Qf&xq4Mju}YdZb5#p{;Mu& zO@(4r;1pz(cA$Is!u16I#l(s)Vl?lVZv@TsEv_P>$5o4{@o@_q;q9$-bghT*@!Rp* z{9iJafoC%rp~!LdFD(B^vu;(BarQJ!O2sV&t=&`D52Aa}l1MXHs-gO5gS!1bhw<}7=io8Vv)knRgH z-fcPjTluenygoqq%LeJM!+7K0T5ysmTtsJ?~{_7D!BLU8{_n$AqFPjUq?#r)q+6Y0X6GavoO3EZ<-YNIQjn)bIJ);vp*YJlFGdH|N4vnpZ;EhGd%E@zzvCin->1Z zsWEnqDogL~Wb~P~r)FJoyQ6I?v?p8RR|Ee@hJOUT{$woOP;5JZK8SaZl!l>n<*{F@ znA*sOcfNWs=%|HMicuak9g@GuKK_ku8w#yqzSpK)FLm_Ln(Rd~?GYy`k4;>4smrpW+87Ug zsY2WAIRa}#ue=k|P-Un+Jsel%AiT=`Tw}$+eUBtez49namp3^Y7){Jyo)2I=yIxQJ zV&c#X{RbPyTb#k8X-t?KKMlknBd4G`j4pjGL0rGV>OPV^0EV3=Eq~D?Z)c?!zqdo$ zFK=f0YZ$J(=}Zu8$+nvSt28y;e18w1eAV*vG$o$#asOqC(3oTS5Xb8M2^TWg(5l8o zl$ImV4S3d$6Af=I=H8Zqn4q z7BhaLo;a;6zt#W^6^A?W*_$@(8beHd#0Vk8;ei7~Tk0Znw zq32mk6no;6@eCu>R)>&Jf@{`VkgT8O-Aj4eJgQ_X`7`XRD`TsvJGUsz?Pj)^v7eq< zO!b2py~*uIX@Gkd9QVj(uUW9IV%Oy2Mwtx!`H4h5I}YUPr06EO-Iz4mHAfxxu+TFW z;+1GuhPG~_z6zn0c}a{x=ZvwH3>QBxIWIBbyKHYJpGqoJ91dvTR3^Ijg@SLN8R*%B zk+{)KItFs`U}w@-8#SaM{~t$-vo?jVyU1KMdSQ0W`e|N8-X%Q<8ePOSDTq=m59sKR z!K(V0kS8V2_g{PbH*+5xa)E$;&D1<+OAqi zjf`U9$UM1D`~zY;8U+|J9oS98ZX!JdGki~hme{^m8 zym2V?T8DY6R#k3&mS{YXXCvGm9jCNd`lX0KQ(4ut7eW_bkqqm1m_3%pcp)@Z=vO&a zQqEc5okaM>C`6n@NrVp(AuCs?4!OGOGfTwzfk&9J59rsxdy2Jw_SHA;^@jF7a(<{s z<4#mAeM$ERe|IRdShq_acMVn)NO|GH-x~kiA#p>$7q+xIJ;oZr*#N%zc_o~7fhjJy zQcNL|#gN+1vpU(j_;&Fe|6-&ZiGPR|ZeLoYf`EdQ_U7aUfU#l?RSF+L1MeGr}rn(^> zOgvW#m>$R4FWe6Ugqds3buZ8djYpf$lff@i(#v(JKck+PBF+Qb}eF|wA;a^v9@X zcQ25wsy3VjEOC?n90_BG$jkBSoDe%YfBjllbMsCL?!uM;NKXR`_7oY8SEo07QdqOr z=CsQ`{D{sZYvt8@2M5zOw(GeQO$f-H-<_1xpWD2jsmD}M>{6aaTEuKFxR!$Nz6EwP z#}_JACk1Y9uz!}n7-E~Fpj9FdqObMYsO2RIORrnj^-pZ0Rs`m{gQIC?NQ9tusNmHG zbtves2v?KQXy~VUZwntvX>&1r}!+LJ=fo0<>$dgB+$AK|Eu6t~C+fLkGd%^C16J_)`C zDhe-W%IfSD>dhNVq>n^sW$hs_OIHt84r=Ky6^W`MxU6h>HuikVa+Ld)P+S6PlfNvR zo=yFQ!++eZV~;4T8fVTvk<>UjNd_V<+cC92EJky^SycPBees?#a7ZiHs^WVw4;kXc z7)oEl<1v1GM@Ofcp_0ZF{BGO1<*vlf76z?~^Lfu34Hp9#?jD)NNY;qIs|OX;+oBF1 z)z%ikD(B*MA^D|K$@FXiDN*7=*Io)Dm8w2?~+XwC(L5H29uW*5xd*y_bB4>VO+2l64|zZE1vEX2s^Ee1kyw9#_q*1 z?=r+qHELY&ZLwGpB>BJ>6-;28p`-aCu=?!F%-8vOWtugap{#2hNX>7FzRy3EOzZI% zi*<{lxEpW%IlE3<4_iumU#v6DZ2C2+VnQ<;rEh>b5nMgYr+KgD`D)>xylH^R7AdOB z@U#UMK}lBh08|CPWtQW?ntv-Jd(Vg+g)=h>d#wBAfG?O|#5P$$29wJx}v z0GazX8|gLN%Yxbur`A{usFO`j2on#{!R*uEDC1o4MyMz`E($>Ropx^g;JSYaPf3x6 ziiV@{_B(PWEWaBW&UfV{rCdg67Udc@9Xem-J%|(U2 z=!MNz8`x1A#`4msV-#XV9m>ghrGPr8fIQ_mV^1lLATJB-uu#P%m!o6HTA7pf4EYVP zD%AJp=pYFyY!?Ef6nFd1(&e(|V=YcUiEDesc2;y%SAvssCwxx>@#VV*g-v&#Y6AL4 zeymNG7P#7ju?``!B^Tc}85IhXHa6I&vf&(vx8TjreUJuM9J;6_pxw1qKG@2fjbkQ| zwy57lfrW*;sx}s(7(*1Zqx<&lV8IR>y)Rf-u~LH>7q2cx9+opqqIM;$q&%7d3>Ty< z0n00Z@FB|~X}-6@@zjX`v5GsOsD2@4o0pDxU3M$_X0IZUvERQ~cJ>lzxKaMz;6=*@ zQiW+(0N!UyR52Lf)koxxvj4y)4ZlN)a95~(iJx{)lzD*R^zRU{XoEalm!*i)MWBTR&DBY1`9O~m} zq9VPgGf?TGlj8b(tTj78BV&i3>Se)=@alW$*PyEV-!?Vb`no6j7a9U*^y*4B!jai$ zQ(f{LXIx5D{`c$y{AlqE^9~W96mOAh94)Ao&Bqz`++{ev5p#9bbg?9PHtGFX1|nYv zG~H<(<4{f8qdVHhDq{Lyrpn@L;rAwkRJoP7&qRAXLAPh>XxG@TI>TW3p~pXq6fCch zTUr!*1Fk|UT`t$U(0_M;iI$jzoHe3uYgJQ^i!1nIRtiv^z5`_2!r~ODEAzzvh|B6F z-IJlEgVDc}h(hD4)g^IX=e>X8^FHSJo^4zl7*HKBH>I7k(|r{dJS2?U-0txP;qTMp zf}c|xhq6Z5(yRE&y7ehZt_1bgl+XP-nKj=2i_pryKY!%Dc%6;d5$=7a!^?6Cjc`>_ zIkUatls(LDn$?zOOorpwtQtqkUIYFf|Ir@%m0xn-CKsCGGo|9;H|%}kb`sh-OZYE*Vz~W(S~4HSF-x_tWoEQf*8S0HM?8l;|l5j1$48MvJ61=$bnva!dbr;BD^4u5arC=?|Xz0z-% zirtQh*t?a_%aozki}h`OnTKd~&jA(1&^&iHWg+uW!KD`MzWU%#9H<|n=bGE^$6}fb z{#16rew z7IWv1dJHCw8LH+e9!Fo?Thf|!OonlIQL*W3V-|ne``+s!S37;iHU&)lobbc0?AFiS zz-t0Y6P+|e!OGZbq9D@tT-SctHUB=u>ZjU!-GiZz@#O^ z<1?{(V2?bH;<7eBB#;jYUqp58y&i*%Jxz7j^N)nyiSxT~D@8j^z>4D1nRZ0oJw{EL zdwjR*Bz|oBXg0N$HN6;BM&KNG6NuTGM7F4MHHWF~ioOo4vo178`ps{^-s|CeLm8`2 z-mbXNSY5Zy74!UberLb4zRh&@;`jDlf>!7ks%t6&vdyA|$ntnq?YT ztqTqIY)0|jf1dYeY6U|3s{+;Ma&He-yZpgjfh!kr)^BDzbK)F5{4`(do8`5)~=oqmM{Rc;K| z(;}sPHt~?c#l8+)#m#evzHd)KME3UId^ML@R=@bELH7@MZ+yO~NXe4KlhyQ)B6N78F!d68}`1JJ|!qU$Z-2kuSx}x8MO0Ue{kbhHPhQdvfUe9x$CXr5h=0lH11@( z6uVd5uN*8F0VWU{1TARH|0)s_w7+X6Rp=fshXayq=HXY*0QRolbM;W*YZpIZ0eBh@ zIS<(Z#-F*KMu%(a)O&>2cy8p(<3rtma^;THI&0Kh?0_d9l;Yu;9!JD;ckP~XcH2wx zY(j2xxuW!P_~R_`)SEMX;EV3Cn`0@ZvS(ePIWuJ&f5;Wxp%(ph#gF`F!&ln2*!QQ_ z`PDb^Tr9H1s(qfziBH>k< z6JH3;Prn#E(_P!5x9`r`wu26Nrg(6@AGaFS>fFk?<0mmE(cMd#T*WZMB%v0crI;lT zw=?L+ECcVY8Fez* zy(Da%659!FX-RHNRCMiI9Q`4InmiQ~v4E;btpRcSV=T-?t^^G>?ZBO|ROi?5>sb45 zcNmK#Nms9XRv(UACGQ$Jn^{n>dAQ)oE$iCX(cjUBv;4}4>OLGd;`by2Dr9Ab4J5+F zXiasHD3p?<^HEK^>w(w4PtN~&iq9&m6ig(($+vR?D?esjiL-M*=bG`5Bewrh7JzxFZdP$uY;6u^IH*+fVqo=6N`#8V*!D90h zZ#V3CVT>-q-RI9qcL9?|!Tl@jsu)9+l1L|7w7dcVYq!HbC&SlAi zCpki4M8r*GRh@OUg|PTxdv+bbY3(KcauPdd^ef#7`%Bv0t4|US00q~gwZ)HY)CJSEC`G9$_?eZ>Zv5Q@+`O6TXQGcNJgkSCGc*(^W zN$(_E%X`a1JNIa6$BCgZNHv4EZjjkXa2;q1HsnAl?!9{A-8wg8E5SK2?3bT&tM1{A!jKO zj&Qk0EqCn|sDQ{z9{NXKWs@jEpsm&^g9U3Xj5|Bs5pF=z$uOEfW;`IiARU*PoY}&l z{&9*e6P*E`R@?_ABLV950Rbx4NCyrSW#6H9T1~iahxJX{9FsnJ41w}gwcLD4FwWO& zzMb~;EHL1lYhc!kjQC&khcjzGwb-4)PZ}Rh6kvB z-vt1q0DBKoa-I~Sk1hEP^ws+{9%MNoIzoyf%Ug#(MUX%j(XEu-g~ceSO&kqLiON%) z$9HmmzMLG%m4|SGM%_>D-Ufe6blnS2^lY{HZkK2#5128d0o2)&v{|iCj#0@)ZYsto z=hl>yuz;=p0H9AF%_BSQ`8lt;9R{6RhP{Pfi3HcLfs*3nUH!3gl1*~HYV@^z=>#oU z5+`F|*$>i@`NaLP-IYStLI~zt3T_sH^^Mdw19k z5}&fA8n#<-wWd&JlaU0xd@!OxzuVoUt%kg$zl@h44?OSe#d7RcsPy?lUf_j#6biTR zrVx#nCQP^iZ%<$UC7nZY2_736`!yyNx9JLs#6PO`N$EmRymvfjd5Tj>_FcGA!#Ez8 zX6w7i2EMt;A9f6)sY$8z5M1``AUv(&;cjQT>`rYXDr*q!BP5}o+ThOm#L^EJPqz36 zSc_W8+ZoJr?Uog}C{m)G)C6M%tTTf`K!)`{Zuy%}u`X6WP~Z-qT*PCt|1?G`rHM0o zioOj&l0+35>MpfQ*S^@z%Ja##GmpfUhct)$1hGV?O;D#cww5dE2ra#Y@O3nG(!7WM zIq2j?95MfM56Zt@_V8JfaW0+MT>1@aXX?3Oi(7#idZC>qQS`!yHbYlv_m9j?9{c{VFXusYG!Xpp216b{o){Dm!V4iX} zEN{m6CV9_){fv%g3Akx*w=mE~;gTZ^YYpDjo98T`aZ89NWo^Ou%4qQ7E1ch_Cgr|N zz&>{$wiuyl0$Lax%XrF;-X^KoE5YaVq7E$qV>ynbz_(igR~H!rh0HHNe7geeVGPiM zeI=8OO;QZDn z%!^u)^4LbcJ(^mvQAqe{9TT*I=;!ci7jz8>%nDBCfvZ!01Q&I{!A9w!l;O|$kx-jw zAKzD7TeV;RDCe8>v!x_i_S50{`XPLoKOKa3gu;2xRO;2=+p8YgHaBJnSWk2C^B@iC z>R38!e{f%rTI_u;gS|la5Pg#S+<{|Ke&m7s!>cI%^*v@lIO<%UFtE8U2jpOToKsJ~eb`-@8AvxupE$0B z)dseAd*^ZS+pSnHc3;xC)8A}pCQaOm`zw;;lVF=_?z-0)_UrApEGN^2Jdp}8jqU)- z!`>$Zb)UDTRW^vs<-_O`?w%{-1fyG)ry#~O zVd0QldAxKn+LD@8j)Fq2SqL#X4qKiiR?hqBbFBJ5N|-k(HwPn`*$nZgW3>KkFM-1iXsTG|(Q^=3yo*JtrZSCggB=e#q&kp~YlW zXXtPpGr)HC=Z5B|K*^#R?k%njltOWLDehX_NwMPY$(KIzzR!Ekch9(g z-aEz_V~>nvC3~;zx#s>&pG!DU!Q&`(S2SP3j@@tVjc!Pqq|||QjO5491D{FXsFMwp zBj`UDetu8*Bd3A+B{>l&9zhMeG`OFotA8%s(@k$hq%1{EqRi$I$Vr)N)eR9)zox=M{{~w;Bzs)|F z<>yNt)HnHgo|cL~psdle0`s9qqO`ku1Mc*L;{wEZ%7PMd9c`w?^zW33f1C>&l-wYR zM2Vk|luy9h$YW|I-+D)}Qoz0sPJwyN64al0{+XxqpXdCZ6&DUt5+p)HC49UPb3f~3 zv?vk#efwzcbZyQUB2x)Op`2S5{tEe5u=KyYxfFsgEf_eFga+MWYU$nJr4!Gd<-O=Y zW?ygDF6IA+^WY!a)^CqO(3)CcM0}|d-pl9CT8IwWCp_C0THrcrIo_Ec?NFxacZ z#}q7PNnc1+Hd+(g*?VRH+F#i1wx7N%yvSY(2bpxbYv?FU@-q&%GyL?&JI=?uTjm>) z)NnMCTeM8DKC!_js=5Rec zq}zi3#Y-(|UDd@E^fs-?+84@<*kN^)F6v=qi_S^;^fp6@f{sU|AaY?7)NRi$qYkrQ z@6iIHeo{_8_0`N~`ci(H+wApp6nQsy-})dSw+#KM%EZ z9a(ZFDh9osmKt6lG8R}d-LuB#LB>@cLrwFaLlCVi-J%4 z9WoTlW78k&Ek_q78wK$6Cn7$N@rmK!KVwn+3(S23yHtxm+IKYO2a#T_Q%>g~-GppN zyr6&Dq{SRrTl}JI;Mc;Cu?9$ujv<<7IigjA>-uZV16SOPq^1T} zQSs=DwAKg@>h34c(@rdbgR-N$#ACH4TUgY7Vef9C4B*cwEbG*U-Li^IP@ELO{UIu4 zb{gr|x6DgX-L9bVyzN!^B>Par-JWgG>$%Exk-#qUPsyIxa1TTn_FxoT|GrzAx@H}( z&#SucO7i~1yd%~8oVnR+Olru>qi~jmH0j$HwDDQw%XN#5K_6@4FoHhpuK&WbIodas zUF+$->fB7`=`>RSR}MAQ+Kw(eU*7);_;Q?Nchd|+4lJX{f5bKn0H;wyzAPr-x=Ka< z^b1)>4@n<4G$%tyJdRibDe5YO7C)q z<7)Ia7-#O#+pKnn54+2DJ&@?|(i5CerhWppqKFHm?hQh!0DMe%nb`4q8g1}{K%G#p zIytP>pN9_sb$ZgL;Jybu-gO!x(1kpyKS{6$+2~fP1K`-L3#LDppbi}!>Ej@_&z^*A z4ys4;hwFs+yn_&2qV~2dRLJ+E^^#ku+K}4dU0S$&?TK?_hEH`5`q|6%yA+bCwJ&(R zyW7&~D|4efQmtsf>p>Hl53~%%M8~N(4;Q>1A{N*TKFZ542b=k28xee;ZTT?hQROP# zvBmnodq(B}=b{_9gFQddwVh3dS78$JV0Aixb(e^LB(&F~wtgsKU-S0#BPHqd?eq(Y zxe1e@&9++6Q>&%S$ZdbA>#_s1J*mOvt)`8TfFzs+>WZ;?fsH(5%)m0oHY=Y@LQ>+? z&5?>F=QRUz=^ongS2e#I8xH;@O!8@i3X(uZbF%{Z~e}y_vnidh+Ye^zP9e$H%566p z;qGSey?zo2gc%Ae2W-2~Etg54QE<{Gzx!U{%67^QN z76f1Pt=mJQip^28Hm0!(KJ}mHR6lD2b#{ydMXc{9tVZoi<0u5FQ-iUB0ubs8b3%1o zB}eQG1#W)*(M8Z9K(+&`8n&C3IT#WAQ~8&O2?2Q@X*b`6UasmU^+5&c8r&HAlsKl? zb3Naa^PP3H*R@}8U0HbX&+kSjh}R)+9H{BKrh8%S_k3cN!57%xn+o@+yPP9qgh_j1 z2%jBJ%5jKIl*})lcXwhk>g6^#2giPp>@)Nkb6tXuX2+foOz7NMQT{g3-84BoQ^7ABbDOdwSi-Geowa|e9zfMPHlxE%ncJrU9 zq{KuT3Z29G0@LdAWw?FKa*uz9M*gdv^u(!QF#gZe> zQB+I0mir>ozwX?;amsjkNh8cG#kJwS)(~1I#?NXdy|RY7Oc9+|59rV7V-(|2ll2*0 ztZ?icYO{*y*1QzsLIjkJs%|!3>%T+kUid1N7#0^UF~l!Lb2FMc-kLxsy5APG%?r!_ zK*?+7lyty9nPCt?4w~EX#KXI_-e{nOoX}2)q3x6MiuZP6%^&N9`<32vSm^0Z48T}- z9fIy6<)ki&@IjXKZrL{JwUcP^CKCYBkM)z~hxFy%FYCX11j0{j2`K_Yr~(gz#?u&0 zB&+WtLRJfPuvSN;k*R7_BS`xB*AdScuS6~O?~=i?v2l3;orroZzk3iGp?gXasol5? zE`Mx-_2c6`+?|n%Ggj;|>hOV!C>~cgA9gc`u<@i~(@k~KHb8|U(FxoxuG}_>3Rl z3(LG`o$aSh`7PL6h7CU4w``v*Q0Pw)2B+0R^T=Oya`~ame<+!OJ(Jk56440Di%el; z5TCD7ymTfvxVxtGI*aq{fcD_T{Pbemlbm-o>?6Vh?eEsXzpXU3-(^P-R^c9Rn)#yY zD}`qgyjG8Aw8>@-;$l3Z0BZt{! zZb`0Kl(z>`IGY2|=?2H9#hycXHZ#qB0BI&tZl2Ab=sgh1dIy-F(l+UgFP~oIAG{d6 zMu_!8)JKYTWP>LG!g|d%rkEdxqmmzXs|Q0@UB;fqo{lpCzZ)A00)1BD7TtS>MX}A4 zbiAm}a<4>%)*7I;b@NstM8V&FBYjyXtdSd+mk8hMY##B6rrP2{Yx&%#7Z>9wDgM|P zQpN|hXZ7|iX;O8try#*f(xSk{$jik3DzzC{U{z>*98F^1d9(}eC2!)57)WY@*hK>I zE5uAXiMp-2jgX_u5#Y*hCR{^Xcl`T!@eN7Z0qbEZSyvw%`rE8>tnz{f7M$#Y%6HC# z?BdwK@drUz)oKkXAMKf8&5>c*IL6_(TwDeiR1dx8FCSYCWL+Y9>ifFHOsE-Y~DvSO?B06vo1<4PUXWYTTz=%K9#?k-b_dy!5!cLYsPaO zwyeI}OlSc&O)|+kMx|xtq-8CyoQ|HINsc?qiK?zNk!GG6lyeF8q_k&;W+9`E0~|FB zz71kwd>AC3*Y?yfpgXGr`!>7ArPqjvP;Cx7?aucP7i1F#9*q2qJMx_}D0AWEZi&hi zZ0g7k<~}5p%Pt3b%EkS}SCE+k%Be!G`9yd(667;9yTuqrlz#e*E4N)aF$@T(ED1jk zh(7GJ4U^bH-Q_?_eD?bgL@U&9UGi8sWs?=@BC3lU<-v2J2lv7O>$0-B--_ZiOp($& z7(1%VK2Pf``jMJh6Vf2s9hX{7AGz#g9+&E0C*~lP8zGw%kWPu?(ut26f;*6^ zYdJtsn@nli%vrz1+wfT6YHa?+e<`U;T;N8i`*1Zthl<%=etF`oXu#8)MCbu)bNtSf zb@iQV9)n$BwxBo>-<`MvyoF!;wuIpdlfIctE{Cw{k3vUeb!Ft}A*SjOGa#O2BqcVqm@i%ZDg3HL!bg&tTVA5x2zB&1jN z%t?YaYa5Hv1^_P6`#cn>&w3x;5cszcf&67@nQMm-mUBP?kst~9??MozAH9pz3zcjJ z^t(|AH?+Oqt6#?~;hlJDz2@c_m{2q!w~S2tOuf5kwdny43oFU`n8o8%_b`?1{gd!G z*9=zj<4jlXe`IwjWSXaL$Z33?BaLUZ;DO)BzHMQh#@sCsHAhtcCHnC3MYM~6pNv(W z?x!!h8@GEGeo7o@f$Yz=duyjysAEvNtN7e11UFC|twa%)Yv5qr@^0KjGN~t|e-a-< z(n;s*jZj2;Fc1u5lvp?wOH^ph7vo<`mRHmauMB~28GE5`Xx0wm1$RD&N?gyzbJZHW zp}3t1L8E%zupodEx^9DlyuzhLmOCDv7`3KGaSH~256ucn`nJVo+YX_L6d8@y#iL4H zupAvkU@;7NVeF)HW|(u0d160LV(VV{8Sa_*y3kfA>jQBRui!4R@N|X|SMRd@&!>=L z$CcP#O^^b?L(oqLl`z)U9M`)3b^9aP)MY0Ou*7?Gs^p!PQd^XF$BpHJp6t-iaKOe0 z*QN-Iw%sq0+Eky!3aecd16~O;KPuT&hG^+7BEa4eSVVL3~?oWMur(JojlF z%V1ZMBsZzK?6pASbXV8=5$Bqa$Cca+e*`ns#xD(r>D)B&SqqN9UUqZ2U|O3r=11$u zCCil6{B$pprQkGV{~3ytLF9d?+pR9@E&ZXkLi!@#)es77ED7(Wx&=#`3C8`N^`a&Bji zjdOadFN%vI+>2|LPuL*2;xw=P>P%r7dw)u&p3%^sr?oQFEf8rq%)X!Hx;c9n1LM$y z)}Y5KQJLaGLW;r~_@(GJBV=(nxcmf^PB5z8wpiBlx9}mp9FCrl{T|WYTI@P9!0+z<* zD|2d2sv9q*SYMR{rjM~jV>wfZEJVPLPxFRV-hQ7bAvTr?(f`MPA4L_mT~ zE$|(4J4Y1n@F>n%{exV?CQZTJM+Vz?7qY%##xYuch?=YGPD*}|IsfuV^ zrpZT7;e~B(CKhLHWcH)OMK<$KUrPh?J{mP1SeD*mqshuqZ^gnc^`i9Q_2zJJ zm98jV{se#l$La=26cpjJ{=!etIYMrV_%!(#FR+96XHZ?-!3p_ROk4NRJf7ezV|PL- z^|3%xnviA2w#J-W&;G9G&q3kCigk=9@rk>_%e@;({x6Onpm95G)|dGK;wUbwO^e>$ zDMKd#82|m7j_mJfDGf&TT z#Nb%VZL9wmqkVZGm`wnp5Qgw@!3O+_AGhfzH&y)boizQsf9HfU8w|`ysovNKf?V&4 z_cB28lRjd0UQK^9l7qadSH3u3nQso=F)RGTj-G9>K#aMqDGH?&{X8OtkJUJDu7-oE z^(7F`TPFS1*impsex2TS%J#-?zQEr$R_cXndJj^FJhGvsh5auZ_$eYzvISB{(SnsV z#xu3ZM6XXN^Yp39W2DXxLTi^!mgna@^O*P2CIC4+Xro*n9wnQvECIK!NAVsLNKK0R z;O*2`IaQJQ(uC0aaonrqW*tdh|6m@5fFQ}tu$rqDtW9cFYMw;xQSwT!GpQk0O&Z-E z=A{rVR9V@z(jJDT7iQ~--96P0j4D(w_WZyu4rgw?k-A2%>Rid|g7C}DHCJF^Hw$vt zdFOCr&x9*cFwpD}d%HL8VAeV6H^Zdw+(6;~rm?aoZMD3*7n}yE

    %eBwC?ka1Cvyk zCR+0Axxdv2JbZbg*k>9jZFE9ZkEx+;(FtOFlP=y`!u?(A>=*j}dzDhk>qI`JO4 zc<&0dKWY^^%9_w`L`_{M8=~vqkK=eg7k1@Z zB13r9J!Kr0uiByA3VM(d$EkyMN9i`%7UVKV-9(IZJ(p<}*OwB>uiQHW-9(NP@=c3w zyklGr>*pDERAybf+Ty3Q7!mQoa*MLYpYj3$mvPwyZhL0};sx5ZKSip|9Ir6H!d8eq zqS~}LbE1h3qS<5mylSk&t8Y%Y)Vk@Dq1Gll^HiDP5{m0iN%#CdZ4y@bzx8NCXGMbr zY4p0wQBnnV9it0^n~w6RPYn-<_D&7(7bIjFZch%~L(S)HMctx6CkjrGhIJyp0k;T| z&KLS^^GQQYOXYbOJO`rAsl$Qj7(2U>TacZvFoDJ6D|!LF01M%acYy!ki;g+gw_WRx z0#z{&shZpeZAA501jmp-8+$p|+;O6?Gh%+$$X5^$=Ef(^UT~s~u|^$c0Fjsx!Hr;~ zfdM>({R}_xz|t>nvASUp-w*^K(jSt}&hR$^oA$>C`?1Cz`QWDR!wW7Nxtxm=mUBk; zzfsU|59!ohNi+hT*v?nyaSvpZx&gjC@ThPI-rd%U(l4phePaHJI0Z$##9ZVjetPqR zi1&SXq)ww4!f{B=)7#6yJyp2Q(KVl0iD_4Jr3iO&H81_3ZMMd=84j7k@yI8XdgnxA z08;_5j*`0d@asDfeuR-_@2~5-jH$i_Z`Zq*u(L%$IHIwiNnC?*zwey_JewFI zrQsPQe;+JLFd+G{r}cR?>bib^!=KZsv0(<|?1U!T8la>>a;6f2X-0}mLQDhv;wTDq z#So}o#&_Cu3iz@rD)xAKm`CT^XhT1x1F~@^z-!kg;1+hozdAv}yLireEt9%Df>wZh zkMSf**Sq|iNffFL+f`e6=JGoTd&Y2I^0nn%5YFI~g-}(`H$##{3yuPr1BXvO*sA^}&llvgLE+1)3|NKtZBOV6+ z8~5iAdL+J~=6)FfqpX4j!?O=H6ZKKx2Yx0JAe>{9jlYIyq+QH7(HPT`76G^MkGnUE zg)-9tlg#N(M-lGFSi~eRFfjYoKoikAMCGi}$ex%Qp9q1{yv!#!Ic(3rk0ESxV=p-O z#Yv1Lz!|X^JO(XAt}$d1Ca0{dI0buAViMoVT|`LJPbw<8K297qIUKbe$>%BmP`Qw% zA5wnqgVej<`Te!Cy!kuhj4hR-i(n`@0cG=n>=|{~7A;D%-eo7~7F$y>5b^gxBZ4bQ z?%cc=2AY+XAZ(W+2(~WB8YNcu*iy%l8*t3ELDCqEtyBHcDZ*fdHZ?5SeqYmvw6zC||gn;tu6^JzHp=@xb7c#$*Fl#@rWwkXUZ6F+{Z=+6)k6Hjx+t}>7eSlqEz`- zcF3AxZnb?~Oo6dmUdDW{nZ!VPuhra5Fb&%-eUx?@>C1DOHqr0?Si#VJlqJGdb@ixl z=$hSBKWIJ5D$JtKoyJfsxy0l^d)!gyS*~T0MXIdNySf_YT+f|5-#07nva5)W$mWl7 zK5>$Tay3s(8w~;(XKZyBjjMXDM$3kdYhFShymzm}27Zp7y0Z{L_Iv~daKp|TbN6gP ziDtH0iR&h1?L56lR~eCnW5i5O*Aj@A+KXG=dvr@`1lUC)a&c$G0XsoxHwPf6_X5FV zHgb(V0k@Wn0wkT^5l2@LH|6I@71G(M#N~&h5GaW_S*(#PX6u7wKw#6Kf?$`$rQH=q zL90rW@=yFGBO-eaEF9V79yO=$>2#F(FM`z&ArU*BkyuBv8Y`?Ecw#0VVa#EFtZX1+ z4StZ`TT+j6PJA&NHBD-xn^KfFfj{QaKr9>V!1H!$Dk-_vlhk1kXqvuG<0t4L1g|>E7%PrtFN$ zR8)uLC7DKBqSx!T--@Ql2)+~b(Rd3;@B|>&4%^U~CH9TI>~18e^CFuW@u-?yc6EV1 zcR$qcD#_FbYks|}nLqekJ{h*qaxayw!U~SCIJQB8fl;-}TgM4>0Xp(5bSM`G!oxPr znn9aPZ%cwex4;#~C7H`A8wMdIx^!Y~Sj|Slr>=q1G9Ro^RwwRj`<(4n2SBz}b9;8v zE){Mc&%ygCeNZA2sI{K;33~~HGYTZ8@useY*+PEl#ZQ6S+%Q^{YOk8D7JkDiqV9H1 zWd`q53dS=ODH$`5e3C26ml5F|tJ=lGED+2UpFDWFow(Hy*(@j8n0w z=a4r9m><`$mTB_IrM|kmrl&G0aHt6 zug%BLI`OQ%(tTc_Vk6LdlSB*_?jKu*D3Vvm8U8H!gREN?~A#TXXEd1t0cdxQmBK3UIQzzfN zKJS$hQaJe+x+OWsD}y3M?rzhm2qar{UWTn2%Epmj%_2&k137M z+7B^}&z{cLnMlp@@qgCUsVp)?GPlBAZq*g8{ z^Stuspv?p*K1`ZGOK=~6c8~uuEwq@p)L#bf(k;!5A*9?%H*kFbMD34ir=ap7Bw6{$ zVS%9)p`-DdULs{q8)We(KoqQ2>IgS)BgnsaoQ6@|4Y zT^c0fylsf-WGuZeFkR7O5><)>^3zk!K01?o_wzg&C-)0xCZ}w7!%`GkRjTb?N5t${ zATfBq#_@e$>k4iF=n#sFic;DSGDY|<^CYg}GT^!sm1(b(eJ>X!1c|9hP+Mw7#rax0{*8@|C4b&UnKxvNikE7$~w<|I^=E=_YC$P0&s=-iVi0(6eLSx*=i8(d-DP|;c3I5Ip)LOYPlj9at&g0n6lcScOVd(bD!y%Pl@3T-i}4L@ zw#49Or$$cPD@u*R0oU#!3N%?!50`36x=dWD)?E{gLZj@YP&{uR8p>&L^Djr%cl2xp zzol26B_$NC)X*hdWLI5(U!2ezjRAF#v?hl(`aXXmBmm06b zL5kKMNllJY>%*aDF$d*ZH1HLT(8%t0L0k@F^~`GQo4>La{TT^NKQj-99}KeW7;fof zGnF#n06y&CvAhL-@up#)Rwrh6+ztx(<)aZQg%T-jS?ojOkigTmGyt9Xxj9?O;?!3S z#n$0(skAw-j!O$LR7koH->i7@A-#9abyPDxZe@HnpJ8^J(v%eqN{9$BN176mb>-x7 zy^eVx)A(%;CB2{!c(}U<_4Yy6F9nkLb9W_Kl$?J}Qe^-XY0m`CT7NA3knDtx&NJEd%iGLNQbsWDp#cDsDi z*Ez&KEUaoxSui)sb&m>B4^Lg<<1F!(7WsmSSWhntc>tUGuja`gH!7HZ zeR8>yThRw)%<4)*He<u?SB*Lq%bTz@6?`*unp&hdI_wmR>-aA)8gR)5YM?n#|XN zYz&|g_hKD)S5if32BNKD^F7#;U@F-Io6tsQvPP0@F9f+ z)-n^>3pR}~Xx3p;Bt1k47-Jp@l{i7wxCrmBxOjcmsleM$DAUpl2tz@PmOWX#Xngmlud`f)cl*5kB{O}89iv}A^d^(*~%LZjS zd^SSKD!_`a`_;dvN2qVCuVXR4EiXm~*Yl&zaD1bRrtD30=Dm5$#aT78Q=~Co{8wGF zWr?@u&s|5Nx7rFsi$3_+j1q~-T*8lZW;eyj&L~Q+DE6E53aJt}?`6kA2s;JjTp_E4 z^*&3;Gbad1IO{*Y7<5fEM_v8qLU18hxh#;FZ<}}7aVugRA^pwY1|MFKmXodtdkW7) z=WNnTGStr1*F2)qWR(qHm5^mLtTjaaRAJ)k;!b|*fDJmm<%J)+WJv2P`(qrzanlU6 z{o7np>y0?0%Qmgc+821l;{Yj;+!G68Eot?MZiQaRud1{=J(rDbe4=o^cR4yy+@;WN z()5TcE=gO2umuQ23L_!sVu64$nN16z%Bvh3M~y*B(2GZ+qmS1-TpQ4Ohl`l4m4b4| zt*w<&Jh|3o)$ZB(UWH}v(Bfttm9>Q}vL@I9*|6b_H<{&)xBv#sANlV1i3jvGTAf^= z97{?BP-_9wm${KBA)qvZq-P(Ui4zq2$^|EUdaEa9&Eo~nj1>ks4A;3b?Im==1G24A z;;UNszIx) zVE(8VcupSHg=My|LMit__HO94WIy`f@|gS# zyR#gWL{?8awCwEEv{{cQ2WEH9r(yQfo`MH8DQ&df(gGMhy33w<5vfcD4gjL$xnsWi zAL3i=CI=3@U!XnQipxs6u>B#CQ9Kw{aM%_^lX%>UvOQ{EBdQ~%6 zTG)NG4T$j%)Oq>_ElHk|1MMM{;jA)~sk!9j$LXsP>>@rD+T82lvFA`QiSIe*J0l>t zmE9>$#Jh3B+LQR<8}(8~GA_>q^ zA#)Z_706SYi!e3Em-MvcV%a-Vt1D+lF1>zZGL_91U^tJ2L<2!-+BqhsWO)sPTvVB7 zaq1*1*kMsUG=D?_|BS@I;VB}*#lGcpoK22m4jg#}c9a;%TKl4SN%zP1if`e4G(x^R z`=s)zOmi3MS5sk}8qyoIb&b9ay2|`pOZgX}7;q%uVsRX(2Y1XD<;gP0ptu{C6 ziRizaDm`q2hUosVrt;V*-eLV2%iKdXef?HtD$;r>Ws1>^4I5q{Mndp?3!Mtw?{H^i z@H6r}4axTe|G3>CHrh$q_+Yi1a&_yWqyh=WKa6w*lehhUPOeA#;tUn3|6PY0eV{V!YcPbis2HGG+61lpr9-!x-8Pj$fi zBU~`lAF}$|E0kHQsDnR1%k@x5?QB% zh&aXdX}z61-xm5{F#Y3fMT#I%z>nzR6!#+~Lo}bCiioKkV*2-|X4-UbMZ-V-1Fjh2 zC8t1kfDG>DYsLyh2$hgC!D5`So2+&W9aX_U5se`-ux`F0Q3jZk-*Zu=fg|XlSTE+Q zB}OQxCI7c6@Ege#ae~+le#p7)<>7hwtYc>_-%w=G{v|yAdA)zXm+VHHyv;h*?1KOI z*gyW^UxR^AM4QK}JQnwVi7Vh4Ktw#X!2b2nJb0qN2mZ_V ze|~&~O;`z4$)NwAsKt=|EUptx0*URvQ|KQfx9WlYCeRFSh5t#mC4vN4x<6s$vCg#@ZG7vUGM zM|hAV=;`6ciVe>EpP2QJReg>>nDyLJ!tH0|QjKN1y+(orL{__a(ze2^WCO4_+4*b# z>p|)CmkEst(F?Da1R0j2;fU8K>tDSN7rxo+)Y&Md2sn$I*x%i(FVC*ELz{N=>TFnq zjCaA93go4X^|E^W976R&A7glaC``1x@`O2>CBh zM-67sr}S%D-HMf8KQv>)Y>rm|HIEFB4<}MrQwsE2X&g7j&F&J>54ZmJqUA-Ze!AO3 zA9dUBe?2HU-(IysToGbvLrennz9D0MP?OhNCzP>!5tY&v#YBX02hH`_8mhMXGQ(~x zsyn`!^J|F7StIWQ_sef>RKT{@Z*RX8b=y3|Q<9fZ0}V;Eh2{UWKmIEk2Wc@T;?V=u zg}zi}3b`wzg2ORKO4or<#$L`^ZwTap^J-d|{mSf{b}9b+rudUbY;4tG!NuOfnWN`+ ziYIuOlNIkl4_d}tx%_Gk2n@X62%&v%!+AiY(y2l>`Ar)eiI3ZH$p;XXd{#mn00C6{ zJ{(yS%D*HRx){BP<2mFv=>Th;tWg&~9JjtLH)wqA`O70MRvw}!znVwDPl@oK`1Cg> z(P*O;_71LVg&k-gueL&UoI4hcQA~hOM&HbOBHBRHd5DUVsAYP!(w=*hihRxidXbv~ z1ukmhcUq*D5=xsRaHI6%J}&dIR^M;`9~g=IH{5lVknt9mE_L|FynM5PI9oJkkH!p# ztpr~p^Q<8Bq@^_Zek*3EWs>lvmWI0MncXO%;BI#GH;n=X&-*j*x>l*q6OOyF%0EQg zki01r(mtfakQ(u?TM0s}u~>?*9O=*z@p>#7PGzqg7X%v`tvW9Am*xfN7wc3)G;;jC z`a*8b~FV}`=Rh3_`MPCB7)A9TNYgu&cOt*-u#Ymcp7VC$J8&n5qj zF7d1FwEYQcc+R#{xAh7+&1*Wj*Tv`@#V-p~1&Sxc$?4IvwcpGey4zQEpcAXjyH9TE zw;i`Edy=jOR4HzRV|GS;Kli&@b{-)-4|{d7;?BxCT1pP9$*MAupN5nLJY6x>+s|W_ z*t_~47bwLDE;l-BHx=nrJaV`^zW(;%F6L?^*5+bRQPf>5l?_njdsuz^&1tp&`HVy_ zJ+{PSaHhC_TesGWrw8x1Pxt;*>Ru|!f2k-!nO!iZD>S+ZsZ-#1i-b-z?8=s00xd zJeKsLoJ_o}m5g_<*>nzD^fcS|XA&`tci{@0cgIy{&$rh}DmRG2pRg$J?R#t{XEp=B z`!7_b{V3KDy8>CqZ$!3ok%2oO_P?GK#u${nv%Pw}=m~qk-0$AH5`4O>`4uN1u9I#} zesO(Io379XgNfO}q{=;FQQ%X(9L8eb(_kjxFJUGbPZwytx9Y8{vE1PJBc?T;vQ?Il z(^zX96@T|pfT`Ta7|ASt=v0Wd-K<^ygLgnGmVb$X z6U%PqHl_e?DrDGap3C>QP9&-JB9XD*1i|NE4)eU(ON_MsraS=MXVG$A32c!-Md*9F z5%9`*wE`I<{R>BXU6In($o1rBnWblqE}OhL>$YEWI0_8xZDgil^!Rplpj!%)9*J%r zSi+YXxyf!YD6zwfq6sTeQ{m*zW4%H@arig1X{$hq@actnhRXD8}X$a97M%6ckS(#HNWQT7)^xs>`8~2NYME z5HYBJ&qvDQ{;iu&*sR;&7vw>t6dXT7EqWV_G@~^T@4eUmVXf`XqVR~(*8>xynrkp2@sdJU$@}`PRA7PFvmcNLZT8oV~3kY7zRa)G1o1T(|q6!oD!BQ;>d$o+md+GLuAG;uiB$ixqaS>+E>C zj)^>jVZMcJRuTK;A)!$2{eXTzV@;U}wA60({>7?Jnf{gj)z6PZ67|`6SKj8yivcCZ zkkOrwdUw0SDF&T3H5UpdsE+sV5KR)}DIZ!O+$8&cSD6IU*fEUiMZc}MAlQBsU;0f( zSJ4n=w$tV{INN#o1*ea{<`E4k4%icl_fhi{HJEen=nnV~d-V=vEf4IWN{IhsPPF2b=bUn|R&}dmc|N z+XTF42l4_UVnRyrAIl`-k$RjVk4P{TMP@cmV67ZGLEYPQA)CiK`hF)%ZwUtgxl0RY z)&F6Zu8&|>=ywxg$PH#OdoMupt6mOPogc zDZU5g@!!0T!ZkVq}1E<1xE5$Lzt5`$_`1I$_(}h9bq(W-9*JfY!p4tx-Wc}8!U@2;Bly`pj``Z>+ zxoGDvgrCf+ZwbY{2a4g?;Je;?yd%LvPqI|_ZrC9|x>8Rb^d+?BK- zdKEEd{4++n^mgNVYO|cuYYF$f#aipM*rv4mUJk{d27Ehu0YHniEL&?_2PmNAb^n3C1)m}$0bCA|T&5gA=SqvK5NWLWC z)6YvecAZBPCv?7obe|qrJ@XzB$rwJ->4?1-`bu9l119T|xtWl75`lU_qP&(p2R!I; zM%xcKY&RpV#m%}!XEza99eQpb=1sC-mR)NIl#fjq|Dm1gjlTSW0SJV{|-^X-(E7&zA5leE!sxh!ug z{XK^Jxry7*ENu+QGT~w;(AlM9d4!`?r?Mx=v<_WRJnxC4=5sMPV>pNqeDDAL| zd|gc!J|5xxiu~j_5Jqf#Jk%YZhU;B+H4%GmZdVC2kM}sMsRC}pVzp${ff1Z-I_3Aq z#%rAOA)2P5$iPZ>=W%K*t&-^!w-vhm_$VQ*G2(>VZhVs{smJ0@{|~j2Jo0YuAx0#p zSk&TW938hW+Z!Ups$i+8YI%|8S#G^8eZ>B^(^w`+TNGxAsK_QiO6Pf-oG^bJyIv;13qCR`=8 z+rmRo>iO5wF;S)d6GswbTOBTcr;s^9#pE>!X85A*pB7Jxx;!G(4!Td+gQvv9D?bq5 zhE61eICca+8SM~`fxF=`{9IKtW4|zXR~d!BBj5TNaP9hN$ShNl^&Sa{j9BV)&8QuJ z|6*bQv7@|V9%J=3Tq5+8(8%>-Y@gcL7t^dDIMEA!^I&k@uQ(uZBXTK1m5L~Wpx)5s4K7R3sJKg6}amk7aR@X(*E+%|L@ zg6H3xQeFN%gIrD^mBv%jRTgocUCVYO}j{ACFS*=t0{F1!chtN8@n-yDI@8Qg0?nJHF=Z!>+Ep?%!j>K@Uq=u`!p7=4&(mm zDVzDq`&@#|x~uJ7DWYu;v5XMa#fIw9{T~Q|94l%~E+w*L&hcEv{6AMMx@xTMTw1=_ zc8~NvdR{q3o0-qKcqt@CH4(-?kAq@>$T*?*)8pMA+I^S1t$0Fz5SUjy+WSfHSLUC< z9Vs(*-0*x1O2R~q-IXq{LN-SYb5Xa76?6I z%g@Kj6&@4D+I!FY8TY}mc!g2C2HlC+ZE2eIwySH2wg|=_{*_{h@AZH}dha{kUVggw zqe{e5^FhV_7rQ?|H~g^8|E|l$xsjMvFK*vKGe`Vd11V6oX{Ad9C3LP!#)YXh1QusR zXkNVeYKdJ?dM2w4KNIc{${1;UIEUvHleds5d8G)C}wd8jWDy|Q4?UG7pnn5$i$~~fmyA@FO^R? z!RDM`U*TvN#5SDlM1D_rhWd(!YE3^n{~ia8#00?}BgHkxds}#z`n3RW&g_#mW}ImdBv_Dv8dev?su2Wm`=>)bObrifYpmSVBhP95pxG__$t_cqIoM5&muQuJ*i3Pb^4WvhLcVC z1%lNE5_uh1=)ErWzVF4)hNf5dZdiy+^TAWicO85FSb?VNQ+Wz#nL^f0IEKZL&gAtgJp=kG zb%1in_Q#D(Z2>H7cdmS$%zQY7{`PhE4?LN1W))HkvU?WqqD_$og-o|S0xC5rlFZs; z2&BOaNal>D@?%Yh5zn(eDZ$r)o<)FcK*E3wB~`1ko~$#fA%hXtn+*AIHUgd% z!(3k%&wU|RL*~<`N@GtzO4Dq3Yw-{wJ?y=aW7v1}@Jov`Jl}$fcCdkqgwF?kR=i2a zSbx{c#JgY2gdDfb(`^Xb7`>z;@pB6rp{h$3RXqGwpr5i27k-a5l1VR^Lsyj20A19WJm9?EPUbU5GUqoV*S%_NH2qXfldcFI2Hdu@@8oJtB#ExTMvoQ z{CC>ZX<@BLe&@&RQ67IDk_5Umm#?h(1kN63cwq^=JBphxy*WDv=gpWN2|lu_l6RU3 zfK-aAE_+O@t^Gc>j%I)wUYH4oe)Z8~G1g+7{SewlN+w@=(_5+JsZQ{>)bKqJU&%}3 zAm~n~mI(K3_Pb%SX*?9wR*>EFdC}H{5V(AI>K+8d}))n&-9%&ep z*vsxy%=$1t#Y|6+Rt@25N}}Ib=Fc}64pZsgIw^@k;ruqCgWo>qf0dhb4I`V^Ecp)Q z^IC3Eny4ljJ35%Mh<-Fc_soQa=aWozR*~$?GtzqEUUG2;o|-oSoi#z~qxN#GT8^QA z@25Z82fOas=w#FH+MhW%lYRXgBP5R7^u(v6;ItOWM{n3rmJGJdV9aXn0+rZnHK;|h z()e&vj{99l$#ZWj6y`~^aR>{1Z_vq$$OS#P(FtB9ushjg0H3;{gh!OVl#Y2@I9Kb< z*m)AdnEqJTl&jpv&$&!_uiA`r?An$tu@~mZwL^uqE}8uC3^l9%-ory&n!7rf|PE zP^ddtyZp4tfnn`w)6%z^Ut$0^hjkCwlh`~6a>V(JW$I4RV6!$%a;nLCxU5*+9vd}sgX!#WS$rxoTRuC`| zb}K;242NADaDyRp4pAOwF3_nS=p;eHV|)Hs&5S35bYMlBp9Y}-EWO6X9|GMdcDBp_ z5Tq%5J0+JDCi4NQP3r7RY>m~jE!iw)=C%ZZc78}|A_#=r_u^sd2Oelff>o(Xf4dE- z5izVj=fT>0HK>g?VH$qCYFEi^|8nBDM9(ko95^;V`Zrr6PqN3<&7PJo=@#%0ZDi$O zeaDuV2oGfe;R1gc&{F8Cz8c!$`Ooy4Jd}L+@bX@vc=Cg>Zj_ayXS5 zPm%L6SH)uKRol8RQpvb|mwthdDQ(&Us~c zYH6fZqt=)53wgpJg+Qn0rQjkyO}S0Wjm}|2?bG4>GWk8$Y!q7oOnTs26HnJ;M0;42 z09Y@$wqsERDUJKs@MeTe>24EENU*Y_4D~Cg-*Bjz>~x8zCg%sj|FQ<617TX;p5L0k zP~svg&*fqFGA23N+^(}*?lb)p%Z^&s{Qc-|+9DO}JR2X=wA5_H9hUYz>xm+VDJ4fZ ziA|bX9oB@G$PWVfyMCGDZ#SY8=;s3OWs{c}8`O8OEMgB98|N7{#2!+0(dGi!o?t!d z3fY@9Sr7R8d&T%nZ4aR3K84Y~PRPrtP zHsd`Kw4Bv#HGlxzSfa>pI%@BBL{%%MFGQDzLl|i?jQ(zEb)x*)J?QCJ$gW-1(Mfes zr26oclLr?ci#yaKPp3Jpuix=*QmJ$wIz^5xaTuaEhPy;ec~ieuAzg zaXnC{^2TUxCxm4EM>Bh9++vVaBm-e_x7j#U0>VF+*skutIPP;PmiC5nL)E4lB_kw_ zVkAG@yu!tpt3v#9v#mO1f0o4CD4UiyPZQCUx8zaTtkavRkcpax(w5P{s^{q)Xz&gKY)LSgeuE+LL>x7}I}hvrt}6mlC5O#_P<0;v__;z@ zuYNJBm32%`Tn3S#DT;*YIi3K5g{v`5lVcCYow@TF2}GjX2vabxQusJQv>0@Pgh-1U$wy(4u{^3{9u2x zMrcgTQ{;$a-TLzOrRjF&Q;UAblb^)9-BsU{(EbKTGyhMKnEo!mqB_%Rs1>#T#b4@P z_KwGWZ1K3+oQS-?wOY=Rv&a@VW31!?WL+t3O4LmOt=**G&9A>*HUq-Ve&x~hvg4&@ zC6Yotp3Xc;CS2c}FzOHaiY5%9-&%986Mn-~I3EkoorK){(2qo;1NKk(6K00c)-kE3 zO-cXD!&(yrVXK~C3qmh6JO&0Op^GuZ_oY!Z!)INZPgU~B5qq&F`qvUfC$qDo8Rr*k z2s-JQQ~CBmp&DdFtaFSH^|;{RwV!k;5&f}+`xXQ0I<*f@hfBEc&qM@AUbO?8!tZnc zRe~@R!t1_R{bbC6v_xc*k}D}2L0T_TDt4gaq&3f4kc86-=m&`H z(?jZ12q^qJ&sSo56igy^)h&E?nekN6jIEHM1_fH19s&wFBthrJHg5Oi7Xk+_z+g#W zF<$R3rkNI=-S0Sy`UgAlk!#hVX|r%BCQCn>q|w{5m*|VY$U)YP5O56-yKDOuYikfu z#}znfd-{`ku*ea&uQ%0lbNJwl(BCHOJmRZfBu1xK;ul2s*5#J8GK#Q-vCd%k-R$a% z`<8@t?i4VG8bgvH2j+8axA6t8M*~J(1U5ZRPXTEVgw{c}^tL1T)rYcMR!*I^Jk7T` zf~}eZFXfr?B%AmorNMk3XDhbN4B3S;+UQhW&chWVb5m8Q*F3sReGD;QH*KZ}x<{uW zW|&WDL}BK4tkVlG9koEGO)7q$3pX4ke4|K5OGNashtAo)_>ucq3VF;JJaj5IRw>iV zSb=-78g9~VU7sD0=Cu+bJZ-wp1`hGBj|xD$A~E^HycaqK`NZk*+{9VW6j<5FT}jey z;<5Ttk_rN!iCOus&NG*M_iwm7;8i#_m=t)0xL}HLMJR{XHSf+^wmiKK!9M!K^Y)Q0 zj6^sCukhO76>^EEE&t6%JJoi343TCGNO8%@gZpZ&OLC#f@N$2?HdW|K;z!IV;z=IhrD1`TZJ%5pH`YV1 z!gR|UFkuu!iFHn4%knMd*Bb4RzD&H3Wc-V6y9=~2B-@u&(4k4R1i1EKPi8%F7DUAQ z?}8&`WY#^B%hUDKA!PB^|Ge@O^7D4yf%DQ;Tb$h;y@i*T@kCKHC`?I2(Sosq64jR}h{?Zp{Lbc9H&z97-syx73U(Ok)Zu=Ww}VHyu;VOJ(dv86{QlsOx<3OEUQ;#8NAK~# z4DT{jExx7NEWow70oDB~C^-k<6sT(K7)VDaeD@XLmcaH9-fUbKD5F=jhc;wm0J`9Ndtu^a-0U=jNf0k`x&6EhiOv zY7Bgrhx0Tc1G+RyQkS0=8-=rGetzh3D;orM_t|G24TY~KUe`<8mLcT14dss{IU^1YcB3m#wH$7jlFuvxZF)F@04xN-{^px$C-ind_H7vq9^Uqlv|zr-%XpxmHX zF`Qj_yior=Nx-#Cwy#G0m;zuDuqu4m7HrD*k$~oGMD0CvZ?U*Auv&J|^jOLf==Wf< z8}L|#MGvPY7tkvO_Clkb9sx2UgUD$w=JI!wK^NmIe8id;nU2pLn8SBw<091#yKsj~ z5byM~>7$$NWXa|@u_mZ4&?Y0_wCE4&T6BZDiG>lJCtaG)R)lykyVRge&jSrlskRE! z?HDE0kn%HEPfo&P3eO9^YU@Ri+f#28&8_hH^*VRn<^W{O%WA#_C*=(`c>-UV*FzlN z!G5VpKd-mCzL)9!vMsaVskorv86KK~d!J#1BUboh*QxNW>9@4FvG9mHBJ>X7rxY(L zM=gutC(0aQ-xV*zpjrD@$@dD&uIECS3U@+m-jBId`qr)qO{_1{5_O5a^3cF(e`Ory zEb^C|Dy%+4%Baoy?X;RX>;Ue7OfYm3OYzRifqJ0Zkg=KaPbAf%0oXSHU9;Bf-PVQd z4hS~Xx^>MVfw4gsGGb&0`S~CSWB*@H4ijyeen)9Vcqg74<_v(V?BDQWkx6q@t*KwN zqh%VV88d3VyF6NCMx$_-Uw*M($*g9JZ-MzGP>-0qXcdNrcKUp0Y#m3PVwFwXhR{b3 zdv9X^D1ew$(AOs~cGoit=t9%goZ$j>nPBew8L`l6C83;7X7)%F zQ$>1B?l+ruC7^x^x4c3oT8~`9S*PCL9q(S@e}gAr!VWaUTXNgCKcql$Yl2d69Y|Q& zcCR_Bt=#@;cdnm&w%n4qZH_g*eW8a~tX8z119D!KFoWM@_!SgrGEE-V#$z!Rcz)KE zc?*Yc><%cu3O$!*hu7xg8z0mgjVp?1&I&`%ygr5zj%lCwXfW_RJ>1EwrhD;$a~!2? z>LiR4{4hgo3~xKg_-p!qA23T$eCi5`pPu1(Y~+bg;3mAnLTpqNawL9y(f<_$XUz4*lVk=Xfk6G=J9Cj?44(X|Oz*!B0L(w%%|SpGAvF zxgd$A|7Ir>R9INjpw2Dzpz{YL3G?OkKO9!{4-ueD!@KyirY#8#mTmIzI&L;9kdC#8 zE@(i?)+2|mD57ENaSzWVpE6``3<|p6AM;* z@-QPtuIWB%#aw+HV#p^f3L#y6)OQ<>0EF>9 zUS@9gNx=ppsH9x*Ene}3)&%hgB8El*%1f^p0{q&Y^PEYAYNG)+L5g=J5}{(3R^f7skfpeR0T=f;<|-VV9s zX)^=>KN?5-osAoz)(GyciBpsV%-Wyp@RKlKv?c0v+(vCtWk@}dYHRyd` z4kpoMncsQc5S&Wx0`22Vqx z&eaH;KId0zNxHkozEEPNJe)Pr-2r;T(iV4yZZ!ahG7y8~YP8{{ubd@y8bi$aIrk%5 zJ?7J7_j$d&vvF|7kS|b(_-0YtMpYGhOBdjFDG+|V!$uN#^Y5W!|GL29IK9Ab(N5qL z!wGlYelW-}BCk$b9z%gR!eym1kv_cs^3iOf8$dwCws*r2vP{RK-R6R%PE%Ivziy6a z`kxX(C~##SnC^B{Kw4BvSDlq68`_?Xp>)cVoagK^ zWH4HbQ9MVV+yB7I|H0?~`%51Q0Mkc9O#NS%^q+$T{2Zx`45TBphiUi!@*4kN|B4Tc z05hS(|1SdXL=|B-@+W+5i{_g>Q z{(-XmFM;^@aP{k%V0GNfm+)jBjG~d8jAP!{J{NereyN-xmRlb<_SaMW%Kkig&Sp&( z{}1f8RT9o>?q0H31KJG0z`lEnckmSUD&{jKhkJ3F{qBnv(3QN8QT9*$+K4SmCcujm7L zdXypK{mRn`8XxWsvzX#u9#k2K_s&!)Qv3apXO-4*TAd(|#HL9S;K2kHzy9qfh)ClA z!4s5^GrsV5Q+7R@s=&=b+HbrJ`QMK(QV<33T~xjOk_0V!G@doHh&hhS(*x|YSCa>9 zUO6gl@Y!)i)GMhs^Z(tQW_7Kpy7gJojdw-n)@*Yv~0ImHSuxg2w;c@Qv8srNbK+R zON2p0d>0bXBWC%m3q4oZlUhrF!G0jl2zXA&x-NdPN~lvy!pee zE?#Im;I`!JN~d^U1{Bz>PtPVG6oP?E{E3|Pt?PB;@V@=Uj>1&2CR(-W82ZiNmkjf< z-hyMO|vZOp{h7B4OJ%iZ&;B(6j4 zSUPzf=w9J5UGwV9@omowPN0X`^}$4u&6X~ zrHw~U>K7RjgRVwKVmjXHALb#359cEvp`-7sC-8^BE*8)@QIXjC*dc zehb)rG?nuemGlvsl`1*}C)IO{=0Iv?pH4nq2AhMP^fr9ft0tuL$OYdN)L7ZXnp|5> zWe+T}LpIzQoR1cU^A?_GgfHoP7b97X;5LX~+tO>_1f4MpIk_qBeD z@iWR&@F|0O*X3~Oq4xIiu8JTlmO-1!?&E~rGqn!z-tMfAIdc0|BW`iG?M1+aEAygcRpUCw_nnE zJ0tSi5hy9$a_kT-7jJ`ZfmM4EFMo5kIpfbay7e7=@|Daio>`NY#=17Ya-`&~|0foe zZ0S@YxwC||U&WVWl8{|quT-L657T9<9cR(?K^GjCJAC>GtbfgJ#Z(b#Lhpx!nVyK3 z0pB-t-SV@C3pA?v?2Nu6oi!d`j=4Gj(NgfE3-ny$c zpL#DECY#Kq6AF4ZmFEG6uxQEaPCX#1A3Q8@?y)X=%UlY{5|$`=(V1e)dJh!nTJl6b zT_T~sXaJs_-|E_Gsmb4M?Ry#(jg+i_WhF4&CGsi_&7noEYboRTs1P9Cqly z%RQ3>y0R+A%`@rg_6@$jDS-Q4I3>(qx*BSrI*bYwML6}km#RRY+6ghg+CsG;H?2Y9 z&X72@K-nTU+)_Gh-0H(} zo4qALpsYHSq^#`ycoTH_47d^td*#csTxXmH;vZu~ucm|$?H%?URBiWo`4s@rZZp7&Vk92hX+aJ68j?A1BHzNvIh_f4 z^#E*fir=4yPYK18)2M6ygnQYtG*br%k(@c5W|&5^VWMoP!C6cfeS$|4%EjQ~#fM`| z!-lh%DN%fO4|-a5bKEZ&Mx8_vv5wxP=6FMcSAxj*_ROxUCq`54;4V#d=U4WPUS0Bu zWr8CZ9Jow|+~hMdJr1jD3k|xYT_tjbpSS)mGH_`4vPxw9OBuQCZG2dOi1M~nV3LTJ^Evz)3x+-wouc}vgfKh0;P%i))6n^Rtr#(+qtDwIa_M)E8=3BLs&laFb zUI66|0MWwdP;R-js{ z;^x5q4@DwKHX$#}thnsKAp_pRoVn5C<6{aKF-Hb;iG=o}{BSjKpS0c(6x zG`HZ6Z<*(nKnlcDx3**VGRP-BYK9AS-maYt+DGR;u-%ouBk|V#%9i_Md7)UC!|`L8jv88bk3uFs z@o`=)>;2YL7;PBC9YL&@SX4JUM6#xLrWYoKyjoN)ea8Z z%g5NYX@=I5H+w!kOhFw;6ZkUhx7$$9=g9wu*GL5{e}DhLWk9N0zuFh!XH{raZkep(RV+5SdXCyI{A%;B z*tzkEbaiB7HUQFRI|@tJiqkE|V(k4{WE@5)I-!4_!eseKE1!iKaJ0j7SfNQ2oLg@> zd)9X2Uv}LO3|ox6nfIjqoz^pPdCqI3a>&76I@acoBqjuUx;s8>GXQEt|3TeYFKY$| zaRmZUTQ1mml~JGL_VjA0;Z44qSs+dcv0QE6J?r+$NHavU|Ks+7$-a{JTT2vHr_LO{33UGX@v8EUesGuZh^c+T{>64?@){HWK&Bdm8rTvDqHp zGe|Yr&lhW6g|u%QN1l(ACLR40Oolxw=K*1ddX#TVb6mj)AGLi?NFFLNQEBA1)*=z0 z<)e{H5}*fBvW$7{=&cD`A?P~%A)|#HWf1!M)o)P&Rg={TByjsJn9FLeO67=*V3&39 zY5TQH;HpNeJu(^JAUD7{7(q=z3+cb{7J^tc(9RjxCUiXbzK6TVGRz=sJ6`lNPVeW6 z_`b|COpuSLxMaVcd(`O(5>9KpTAEkDAR3qRmVx_|G02B`sQ2}|Lm_jBwC zB3P^PF*(^{u|fHkxB!SMnJrxWd}=jn8~OD=L8U>OTdflhfsK8v-#- z^zhe`wP3SeQ7(Yys%4UAhKezt3*c=6{q=573f!&*_)(XLZSs`f`dKlCm;nAuYWg~7 zAc=#__INq*S_J7OS^7}&%aDCk^Qy2T*(A?T8mp)VLDiO?6<^;iYj}Vr4_HMt?t0Vt23`q!SVtW zWvpa~{9hzRlN$}X1{|j>*Z2Lg$dmn0N5IH)AaER0V&Od9pPusx8I7u)yf6rIkBp+> zeb=j(=i9l}!jVftHL5x_!naikNpOB+bAr7Tje^x={Ur9%T(?*j zxkNPgfgF;_B~{`-+c1$Y6We3boaJJJD1JwNuGiy@rr_7zih+^|`&X-Np|##*d_cBYgOddeX1jseMs9rB^f?HVY%-Hf0g>|uDxS3afO$O1e@3mo`>t%*D){S{#|bBh zhK3L3d4EhzM#IP^7bG8}!6ts^CJS=%9qD2tAXxJ1IPu{fV!fLFhGMhZ9ViGr(fm1eh*uzs zK-+MtdLP+wfSEb6Q?*S@F8}T6Fh)=*a(^tpF4(*z(!!{SJau)dw>$4MvWgTJh67!t z%TZgO0C#d_n%7b0IV^lB(dz&rt6!r3brt{tSq-Q4f(E{ayN0{|JNu?y&S;{mPoW4s zentr+*qK)PRb8eYgAuhr=bwEp|NCG^Ez{ycM?x*m(B#Y2Cho^&3^#wCt|d_{y%j#|t-clY!TWG1?|ctKy59ue>7759 z*S}D1c6;p@rBQPbtxrQB1S^R|q?kKQ=0-9kU$hzV=GfNmzC<`YR%v_7&kir$1bTRv z???4=1{OT(D+vG(eZ9-PXGE9j?4=s&0o)7MlTBLnXSWyEY}_PU`|Z+A*G#wd+kqDA z#hyo}%Ln7x`G{>$@TXlA0Kjq22%q^deXfbe05@0i);A`=Bi1+4+bA0@;#ggUMIr3;e{{Gy(2!(QP7uMyRlM-DxoN?;VNN5e_{WILTeS89jlh z56LcS1W@2#I*+9SqQuoa=i=Ot%nJEw3Dv*hlwQgt57q@47+nHlm!4_EuiipV@bpDn ztFQJ&42z5U=gjUfC$_2gSdKqUjvd?b<7FxN_|Qeh-{_XJX7WZTK{kw-H{_TUxDEgDLV!k4I^*A zf7{{MTrcnBXGDk3s^UP_1HVCs@AX9#q^g8zpW>)%9MJTokN75qtUmaIGmz-}HJY)? z><>Lxnpv!`9;3fhaMAHMDZD*CCD~+4?HLgPFAyw3O%++P0SaEWC~bbf4`x1d{S(8i zsV%;{dKzq2H2SG&1y1+4KuXm9{mk2O!@GEnwNt55wNu2W#hUr=lV0%SeMpsrQN{}!AZm$pE;>OBzSB=bS0cWNxBTfv{3YGYxsQt5-&PA z@#qKpn%<+f(@Eow6|wDMXFlgXgoW#7$;|6#zr1a+{f_Ze_k>G3>E#*k7Kk+}=we$xws2X}u>f|!L(h=EuE zR4}pqM`^2BB%~wzJrK~e)eaqre~muV7Yo@3E!^kP!G(n#oMS~KN)-ugQ#A~fdNw$8P0Hezc1^B=9>1DTDEjG-cq^W zY!`tp4wFpho#;I#*_;a8dIm0!3!vdaYqx5V$k?#dk$K1|uJBSp_&P57cEw0M^Fi0| zf+S8DBO&?uxt({t%xByh+B6eF+;PUY(s4}cuRp20b$p|E_ukB=OdwG`8BL?5dvNkW zF`FT8u6kdFJx7She=?}tw0c(Hb}nCx<<6Zs$q+%KeI1Lq&86*qrwKo!!tYS23_XoM z&dXNS1MOAFI3gDt$=2q1Kz=dDCFJRLKVt9xJjBhSqZ-aej*xSr7__q_o2yD)kF=i6 zlQw60T`WSuPwLP40bH$VZ59)FEup(QmCk>~yM~8eykm<_;g{g+GYflydS2_eA4fmv z;p=$7(;jyX1sT??djgLp{KJeB{BlA^K9z4b7FssY!})izf<DACxWfg=a%PYBZno;{PlapCuR4)@d`TrK?Q%uRT_BvxC6PX?#-2z4 zk|pn2%liDYl}-L6Sr1_i>Ap-EPNHj`J`HDgu_Y<}DFfu^s%3R-SLE# zA9ZMh$LaZjWDD>)4_+|zN)e9FUa9%k4!#S<3S`!t<}v1iC6RR}@iqnDc7gKqDRg;RfAqwjn!bgDEIY9u>t@j96MGQI*0-DbO>49lsU z?Rkwu2`5fTL+z7)AFS!;>&BA+=u<}gK5Or@SMXuozizBefb^zGe9D`lL28r>bZMox z-w*`$1x}e_OeZu$Xy4xzA3C~|H9U3yx;lPk={Z%7cZAfz~x|1OyzUT06 zar3eA7yC&5Xq4VqWMAJv4sW~unU)pQK7Y{bN^RzA6ns}C^pw2sb}|FZd%}mJ7qUK> zq%Tfi$xZrFwyJcve~^~;wuoMDNF2uckZA*1+#oEQE{6*uD{W%e$_mB)HkGa=8+gg@ zFcABtQ$i%t4EsnaM}Q_e^zq9k3>Is*9$n7$5o z(DUBS041#Rzqd1kFp>61$aK_tdjLEVB{I^hb$ycj5(S5vp<8OA^F4zh-ftLlp!vjwB=f1KhY^6F-G7Y`H3AAgVef~jHmW@Pl2s0A5o zSu(;Bkju`V#(?N}{~?XS;ue9o(tC=(vZb_dK2VzS%gmZTJnfwsB4|!c~zV zqCH<~=CAfzbUf3CON3wXr%%~VQEHvK}zS4IfJrawx_l2Sx_Zn?jjM?OF$;>A8l@Z@sbt1K{^ zAg9JcV#gExaXMr-r5syra_si%gCV76JSZKs1mO`fh^90SNR$YMJ3;S0=veQGpnpn$ ztD6obFVA0S%Dlf?wv)4@;Os+PI(iIOjdWw}JbpJb8RLR@+;pB)8~P-Db2h`*ujzxp zyCB^YhWsF8O)i!3H7V2HwAGkcEMx3AI+QY5-eT!$S#2>4p_v%SED-=YHkITGT?d zqFJJ4lJhQ5nFrn&O!;ypfkx8$A*x2v;;pLegp7+Tk+$wrYB$GzDFm|@Nf|iu;h~bp zy65iJ&N5*M)}Rf&+rSAs?C(ZOgbj1UtJlg&2R~H2TD?Uwl*wb2c+I z7gFvkzD}s*4HTYLB1@;ojioVfq)sj6p@abq*ex^IS`W?bDF*71gCR>w<5JJwGmkk^$6pP@2q!3~sVgLR3XCTIW z4wbjvAQ4BJ`~(Z_FZpYZz&8JE8Bas1BCVUa+2WS_uhsXiFD+G0_{&;G)GAwl0#&|- z`T3UoYhL9Z5FE=ik>m8e*5BpSiRN2$%2)PeH&dcb{Q)@;(Mqv)P`}mVII5v2GkV$|2y%|N?adHO)6PRp?`$-8cg4-^ zXEP0}X3Mx0t++CWj5a9lBfgED1NpIAI``UBKg5nnQ!U0mYf6u;k@T!LvOBrCf?lyD zi)}~*Ot@D3P2x%UvB+yd=a)x~99cpxw5^6T#K66T#ENUu>f-!2@5ef$Rei;bcBuXX zRe~I~2GeoI$Aq7RVkHIA9--1HWU==U1kcef~$QadLk@>B@# zv;G#~>}YTa-NGirD$2D-@YJv{?Rzu2S0AqIMuK_{s>}+jXNtDTMk(SO9FJ826pLSu zkl_U`5qW$b!R`480_MAG^Q$AnZ^%qq*SNgsbAGqB-iSB}jH)_M9qzS)x6S!tlzvQu z>5xdk+bQuXeNg*3|C7{En^yX+ne4+hp6QX$)q;my-SFu0qN?=CL`M)7>M5Emz+`3S z$m?d3yJj_74Xh+SjLQ^$j%DEf?geVH7p?0+^8m8B1(vgWLOM-Ov7Dx(ib(EHiZAb+ z`&{8q(H}geqE|FJ@6f6OaY*^cFd#1qMe+T?&t;AjCjxH!`Bkg0?gd(}T3e3S13r~& zXCUeqVC7G*`-pe#tO9A+0!bOwf*^_`^F=a;MLwg=pYr&*C?a~Qz9fg-6xDZ^o^)7_ z`;+m&o88?NaJEo9rfclD%aWM)PVc|x${X553O3@$a$!%ND)mWytg-wYLH3?3^l}%GA#nYok?M%r-YomH$oIRzW z&6^V7ekg*#jtxh75{QvZBiOD-_$Y$F_hS3QTH&U>||4}Gk9oz9c;$O zUsBCCI=Jerf+9tu4JY455U`LI0wg3=G!C#`e%fQ~a>IRkC z8~fs+*tl0^7P_1dlAfbme(|MjruG2#C<h>8qx+o=~C&3&<2H}W{N_ev)oQq6;DF2s(FF-W`8-vq(Hc8b~I?InWWE) z->RnZ*t$e=Suic~;f@>cf$2-Po=6a8ZQtbE&=Y-XCO{ z&#pea?sZuYL#eqjh$1|Ms^*Vl{GR$z(|m1!MYXXi=XO4X^=)!N>bPyxm?J7NV+c`@ z!{e7MKJKrq(?j%b1w{YDk)wE1=a@$O_uGe`DaF`m{3TKtq?-Q2TN zStJPT8UbQZs4ZKl$-JxO^LoO9Q}=UJj-WOGRFd8e+@TGS4XJSZ;}guFb^=GFFJ#yL z{z9+nI?+T2hfLzLQ~@HG zgc;(rF2PHX`wGd*-jWYd7Zgh2$=s46NQUYgFNQuQ7n+Qe*MP9DrGjb_%0=c3*1MU< zCWfuK)F+rSUL)ep5@vx<){xh5w%H#ur^CYJ_G z>yyZ7rE}Ruq+Rt)uv1@hbcL3DeS^aEDVEZi=)D0kDY{k7gfR)9L-^sV<{2Fl`Y#!; zrk{M$^U<~;GVufsngU7jmBv=lzNkM5WU$v-m!%sFT4AzqFY12LTO#VjG2KW!wTxWh z2O%y%XZYo3&y59|&o+L(6ki@l(y3?f&L4N_l%?R%L3lINpw;MtW;0UexI~B!t`*n9wm@8$LNdm7q;Z4@ z?bRprNlw#GVhp6LD|mv06O%1pgFdG;nxtF3cDFljJ;sN4yPUNHM`p+&j#AS3sp`(rCT;yi|i1%OZn74!v}X^YI(E}OeF!h%HWt@i3u%fV5U8VQaq7-l&v z((W56zA@e!KXSR7@yOD_9+UAlzAY9o+jo6h-`zI5$?%cVX=6r}q(Mf#+9?D+T^%nM z^CvK*mB}SvPAaI^&f!A%lqT_uOQNb_vE75>@N(8@VDQl?Mz62meM@-6Y%6?mjo} z_x;x1-#+L2`}mJ*7PDraxu3YN>sObA)*avN^iTzqcKiH5as?4;?<0kB5WL9%RK}57 z4^l&q#iu-U{!EIVRtGBQIK2#x^u;}OZTAVpJI(gHe z2G5STkN*XYaVawLQ-9*`^#QMkIlck>IWy1Ol)w%%qT!Gn$pUWvZTCtsnL$ikA_T{aR-96u-vhVI zaH&oIJ2M8exT~4B*jvkBv5BsN&)HM6CQjX}W{>hPtWnBa*{a?O>l?51@ z*KU!Id^f&}pz#RK#P@z8ECz)hN2vY)n?`DwLITE48*5@&JM|`NV6YYVpXc?npJLzp z45o@L=b!WJ89c3egKdb$+yn)~_a+25##8!MJM0>*rf{S+B=|9*yHu{cj_v|xH@ilm z=1ipBC#Z43AW};e#k!#J*5afE!%{W3Or1Na(PGZ0b;zL9{2~6T`=f4lT9OT`z4O%= zH)kBwi29>f@?URnpy5U{v2Q$uzgQ*F16Y@GZzTNgefLRjkR&R>N1+kM(E5@-)8h>+DmA{{(x9(B3))4c~h zp{^`yVmsoYzkg(XaxJJ2Y@=pOp`?Kj;^sMt&uYqvlp|up;~xU zccu+`ZN}frHTKy4Txt!jatsK5v!@|Sl*2AY952Ld_x1%XZm%ib22Nxj7{An*z;?fD z4X7ksB1{_iOk$6jUEeEqUw~2~1bM+3#A`hwdG;_Xq0Oi~QuzJIuEdnySB2nozMiW4 zFhs#pp31?xwM)f zhYJ z4hr3A8ijo4sI3{N^anGe(&Ey|8T=~D6Yz#8Q)~J*l2}l=)@?I^|dcSIXILEVT(zfS2WF}hyKqJ{&g_mCcR%zFC5AXQb;1vp3C`S!S^^`x660P zVGWljYA9GW_{(&~&KSTuC!N6+gB+%(;TNM>ZBs6`ULzL7hVDhky2CewrSVfTrvJd0 z4&f~&Z2sQ~_$1m!A05mefZd#@)p-;W=&Dq%^?pNVIU;Nimgg4_z5FQea z>|NDD|kiOw| zE!Fju8s1!vmOFozbMuC8zxnH!{v2Q+Gr}07a{r-#Ddwz3vv(sS9wd}*n}8JbUp zK#T()*+{&fULyHF$8diY{rAO^mr$0B;}76}7a1#<75Yq8Jvc^3#(Nw9bgWAi_H#Cp zG@N=}+Z9)_u`eLkpyLPOR>|^r*R~(*_uRvr5853SD$FMFv%JjEaJxJq;n#hh^k3Df z|L61nyI{0z2tbAiB)NU^puJFT_3wMLk~vkuG&yZKolPsz8~Hf*lulqiE5qZ)UMP{* zewe<`c=`Om10+`j12v-S_j@3Z{KbAL@6~U!triSsIS}vj4vVDhUVB_FZ=G4|+2piN zpd=d5FQ;BRmoISV8n-pEN9Becn_FU#@iZG_zXY9HD;Ta@Q|&{n#9z14e;47;K^BWF z0LJKt9|k>$PG$}oqta2eEU6UCTxcsgbiF{&$&mCo;AyEpi+(S7T{H_0Pj|jyPa#?0 z^YHJv>LbZFE;lEHesH_fj_>37q^z*L0Q;tX@|#B~szDYw_truDYG?NA&%8B`aO2>~ z=!uO$>*2PrfbzF<@!z!qS4OxTZq@lxWRA&L!YJqm<<=7&Q@y$OLGh3Vn6&RH$|1r* zK=3TWe<*Plf-Wp_oU7eh0o2z7TdmmZz<6|-4eU_z>&coNR8t+ValAtO;kxHT99~aC z|35A?EQD~o4@C>p9Genw#FJIlJSXePjaz(CZ$N6xiiOU4Kg}3@a$SBX#f{pwsN;K@ z*S)r9aC!H|gRticGc(e^K30fMl#-KEQ5&5P!Pf0;R(Wg279$r(=6^62Jjt?I?>7zZ znD>AxByid}PN17#Z1o6Adg^e`SFZEO9{&Dyka2mj4z0TYWe&|YK%Bzp zJRZ4>&1Q{2^T?|#Xnm$}YLX}nsE$sOyCqE`ekXbxx=et^L1XQqlJ z7EI8v(=6k6v1n?Q1^7B8I_;z@WK3#Ynme<#qGP!eVm*c*pOusIo!2_ACr)rYqF0!$ zIm@Mf_30(WYF9+#SSWeIScSRf4*-FfwabQt#aHJtf2@jiJ|5CAIb4CKzALidHq4QVGl?U8c$x%hg9EoRE8JJbKiK}? zt`@*s62ONyxLt9{X}Xm^m&QL@eW`3}9V%Zr7q3KUf3|C$BNhWwldPAce{79eM)TROg+v*j+#)aN8*d%YbDNA#{|vADWypVRV(J6?v z5tCZ}RNqvvRq9W`^>R*Tx$;fzjl@E@=%lv_0avUE5^s2cPQP??yD%(Tl|`QdzOFyI<=@l(&ZcWEmi5E??%Ggybj)GzlATmp-7Hb9*{S zN*RKRX_VQm7N%5icdIoWyE|Wd<|ATowVtopnDGFG>!&qNi(q5I^jduLf^dZy&rDvt zJrCvnps4y-nX%|eN^|4ZtfOK0wS7}iw-_Y}czVC~q3HSdvPD9?#&YMC>z!u{Z!gx1 zz_aBp8*r*dmh+}<7(^P5_YWid*3a!URl1#v1t-7n@!514drLQvXf7GpSUS*8msB%H z9{sh>3FAYBtMdAQtX$e_`;nmRrDF6#MOb)C?Rn`yG)N04F(MzzXz=^~%ck~5j^kCn zdaYQXX7~(vkc!nM3CA<3u&D)FbrIH`%}F&o9xQF==yrz$XXOAABOfC=WQJ2T^WV8euw?$jo0>Ht&{jJjxTa)DLQ~KI$^5r zLLLO$L&tv`kFNvFHS4T0=5U{*KrStbPPH@_aF#$WET90=Xd*}1O~>ip$jS?c-~mZ* z2A9Q0hH~W}R*AQ6;m?C`M+L@oGKD{rROWnToFAUpC!@Lzs>c?ap+u0Q0g&1#P6@)l z9|21=R=@ z)3Yox+y}KXqDhq!nkB1=?=VK`UTQdy3e>gVEoyhD7_bi?w4I7WnQ-t95-hoXsj^qK@by^HQZ|k|vP`CFIde2a?lk}OM+d!HqO7OU6-f8mR9QQocZ1A4r zql`&SxVkyhbQ#HG6uhgOkR;WKrT=GBDm?|~+d{}LecT!dF{`;34 zr-2mnfnGnrgS{UyG2P&JilKGQ1Eg+5cp-Y57_l;azS)VEIvfqBD<18#>-oR0P&R%9 zc7qTFfMFG@ zN}Z40amyHKNa`wT9X;giK4Uz5LaUBstdyhRG?pUFILWoS-@%E}rz;`e1U$8}_FEh+ zlGoH&wEHw+cFL9Ww-Bx|RA^lRe-n3i%>`=g0~Jer}ks_2vmxM{Uv)gvb*yG z-Kn&5ys@-SBIpvKeu^d-p)JbrAd=MA?iuFPm=8)h$VIGB0D`zoRNhafVQbdZiL9pM zgrRdA#YPz03cwG{t3i|^ECWExCUtO|)&$d={sAz-9w_ubn=~yg0t5#o7a;0LrzNJe z5G@=Z7O7TQh3%;mWJLhG8ONgzAH|!RNbn*==!^L%>&3AtW|)SXvW(Z*doahDLabmL zB-2IOeqGL5wQr)H)A96`sFZ5VaY%R(`Lwm=DDM5F`>7UYeu$*%-Q~xDjG4~QSGkkt zu!9mAvfyP<)bd=D!x)q;gib+~bIvM!slrLEDHj-<=&G00o}N)_@t?Lg-U`2W4;{L@S<@FGyo-M9Xu7@sGEP_PyFZ#cx-5zunTEB2! zqObv*(eR6nXQ=2ukS|bi3V^3z)T6eJoNYA%2aT%MFf73J^+mQkYBzKoF;209`dx$7 znf88@2i)R)F^IuzCwH_atpIgD_a6@~$fIy8Gptf%_wa=@nL=0gQbj#pe?SZdt>l6) zDD>#{Vky`VAaro*^!QIGSG;>c*(cNnx#v)K;`UQvG^2o$TXO!u;)Lr-F?D<6qHwU3 zA(qDK0-e7TE@-+nd874iENE8V#Yv_Xqi13TpB znXlk#fK(q|$C1on2?zm@^w3pW>0VmQDsvI|Nw@+Z08F)Ysv`}r=_(~O$xf~xR+ z(u%+{{wZz(VI|jM0v3wRHK?D+le6e$z70~uQfwki2oaTPU{G#-dQ?C9`%p#I-ar3k z-bLz#srQJ=5pB!mn6mPtL&v+@*`UF1QbYbx^s=9NF?lft>`1^xw@V%VaD|yd#&K40 zreeKvW>~`Xpw6N;EU5Su+@%~M3WvPu#PcCuEz^exh+=Fv^cDGfJ8|&v8#dZF2q65N zP7$OYE7v9hG|G7y>#F&)(@{mxdPXiPW-6Ohvo8UYvkIg-?lB1x-U+c@fmiVJpXU=z zz{#1X;da8a8f5WT$Aj=1FKo@V^x6GQK$%AEsp1S-1I|u00t64_|7vbH9CvN&+F4%w zkLm_XhOemc&cX518L&T$a?hKE*W6^C(2HJWEV5oI=fwtGzdG5T@JI*4RV}7z@>R<} zfG+6$smh7pqMer6*{)?BP~eb6gM+EDK`#q9v>NtA#O;^+9@%)$CE%<(Qat=NtY zI+=4}_k%QE?LIrZo|BI!qk!qw_ ziPKa1CwamN$jqYK{2TW>j@r_@K^qxjkS~{7-bVKX_H5p6#Ld|%H%?wD*{<%%vi{|? zC3m+Zg@bqOH$jg`Z{-d}Wks9gh5hU&Z@BJRXZJg|MzP!WUhj8&HS+H-b2U9?>Dfhi z9msay%HppQ`}V_yftZzdpkd`Dw|n(w)EqYd(fU<&6{c7ff=&3_{+ESCU=m8`*3^l` z4W>qBeIj)DY`+?_1rLdPm+rpxJ*Tl+{(OdhQENW^8ri!NY$UfTqo8O}c)8 z&8X>m5afJ1IWAy4j^$c9o^6|a+mM+5Up0|J5T1UX%2LBUXPaHvaQSfx-uRQy>42cP z!uI$?&((`Vx05m6n^3mL?{JmRZf?$mfuoe5JgcE|ggjQ}Th%i?o(@<{r8kIzCvIr5 zq8h)smlja+B1L*%BWu^#Z88h+gAkqfb4KL?FLkQmTbu?{qHTNu?CEcpu3p=q1l&v| zD{jAw;X{F7RX+-wS!B(>(so_RvStKc@=Mk06HvQR>0~D^Qt53~Oj}C(^vgY2*}J#q z@;?)QGo(@9kxRm9QLYqKv~5|(Dh!;0D)?iD-XCbqbj?YkfA3UozJE_!ZYij+I6%Zl za(QB^!zHQ#2L3p6y1PPysLjwXTt;Ky-Y03W?{b^T?JK9|Z|f1LMLnm=Q=dY!fQ8E(eo7Iw%S&T_|zQq@`flj%%$ zP_l94P6gAC)g}!o71WHDG! z>csBKI9@pa-b<^qw>DcoSrO1HIEIusVUTE)PD-S=S8E9;74#nNj7)=tJ3AH68!;TxKvXuno>1bOjUFqME=n;xH%UdUaF+Alyg?3MijD8>_L ztLb=Mp%wo<3hhd{D9V?n*6MG563~afb0MJI%uY5Zs&vyFGUsvn2kOLOLZB2x#f+tE zV{$eVn|MBzf0N8`5{o3yvERz15dsfRUnLdQ)X*#=G!XL zC9I?k=0ih#TPhrsvjU0C!4yaqOzG+B&0@xl>eItCJ6@<$K>@qq0r-xT+dXghk2LCNTQtiV zv4)C8t=7TktNJ_b@MC}uPRytM(MD~p#-L){p1*FtfO79566ZNkdr&EZAvyS978Chn z1JrN1C|D*L&#M-wsZ6YQt)HiohO9RTUIgp*#~#!%DQw{1hG?%PHR4Zh49)7Jkn^=3 zxOjd^{o3?O1=Cf_fU#>=2u0SWF2-{1I$N*R<%tz5BA(aJnqq9)ipEsgRMonk1P2|I zX;3-G^i7rTv3B3f!xyL#RMhi=72Aot9x+{ZiB_pq%9)J*s5}lHo?y#IoKlAkYJDM{ z^SVW0*5Rw`gO&w`%vy`fTk@X1+!Tnr{-E=80_t&1jXOc5FMQwd5W~aJ?ip_81B|5t z(SX5K$asPJ)?;@7#^RR44k#{eNc(C!q0sa_MCew`FHQE396EJU37*)L4vP_ZUA;Z- z&EAARfa(VFg){q9cnQuq@Wr}(X5J{2k92g_kvm3U!P)z|`S)akJxf_vhD0Pe! z+a_uxZ8Dw`c{a{O^T+qQ6)n<#6j3`!hn^B}3oj8O->^vhY}R#{DjS=+>erB`@G1R= z7Rqt0_wy2(nLQuc=;rQhCj{5XU^SKL^&Bu@Ron2Oo*&m?WT1ONN?1H@MVy7o3fz_4 z^SXE5gid7z4c`h?wb?-R%930x8t6V68dd*be5J9yzvv@!jWF`_>-LY$k%=GT6q^>) zm@tFJrqL&yZ+EhVuzKyA{;EsSkvo#L;GqYW{~VHSyt=KZ$)DceZ4BZz$oRz0C=PiirRVv8a?+J5$;tU z8U6f~MH_s=xx)<3FJF6r?8nEQ&BrXY5ehIGQC)PD3yh;dKkoD(r4To|Lr^2$rU_io z?p%l_0X5F$$Kdg0(g8%NSKha~F`5O@#>T)po-5Sy%a~cMr!(zlLq6SLD)h)#?hLRB zS)6&AQH1S4c{x3;Y4jFkF_2s^U6?kigpqe&R7Q95X6kE`P?lM;^5crl1MS#Wa-TJ+ zJc6~X_j=y?B_kt=EsxEL%`^PI2U32%1fZ2|;iy!zG*73z!^z42Q2ef>1u;!F`Hl17 z<1E+{EfbR7OYOB$)XLrFD%9`KXHZ%CutqKy4y@V7EgS4%k%t6;sRtTdmmx$+1*MaLrl}e}`EhK(O&ykN@uW zhGiPI%XewS%;|IpN30-xClXh#^Ygx3zUNNfrV~hAV@O~uVvA9FzxLA!REDd+zq2*w zT5(d)(o>2#O@$yRm8C%PU+1w1A999#R1j*IMhxeWvwE>kxVgErx|)b|u?SC=^H}6c z)I67S{#UWF&*q|fugqz-+ZC0nw^m?z)d25`z$)lJT1|? z)59LgtMDE58ot7P!mfoTqyRdixuJW7*j{w(nD1rcoGA+=q)`GZotunR=yEB(6%O$- zk$!*&R}qZ@9uXTA`<4#zWZbbd@F`{*j=NwL-M%_j*gSxzvC_)$-h{!T#dQy)xjo!J zmDMWdX1O-Bwr*jv-|wI}UUpO-iwx-Sdte0VPgK%OLV%(VWNF7bcxgRPg(Y1+#I#0C zAL)%t6JtMU3tBdQtL-eDEWjYZO&WsauxXsJn9Qr_ZaO@T;r zz8{`U$3nt(B9;tFGzP(hjnrv2M1296uU?>(AWZ2DQl4^$lmVZJOd#u>8!VM&oeI!g z6rUT01cS#t<`(D7nx2Qc| znB}=CrosdT2F;vfB9c7h;USJ2Rno}z3lxVC*@PI;PTg(p!W8$ca+$7UyZI&r!Qd`Zl z5XCby`8{Qt_dqW3!=aF)0Z&T(wKFgd#KEb%<6$E(2~W^(zaspKBl|GCyJI0yx9p1Y zW;NwNbOv1Z(cq~>t7w7>8vjgCw?Dk z(j5U#C3w;ugjM5(pu~GXa^4(GYKu5OVv40gAy8R+P#p&P~Hum0P>vV7%M*0)t9;!%2PyIk4BS<>Ng=xGu z0~tB*=!k}8a*qT><|j%Kx;kEAz2o0b$>aN1?_?{hnjc#0J1w=LQBkCXL0X@%W#R)# zxRsLmco~Tj}j)Nwcv&Jo!pCJ0e7ZYcSC;3NBeV(+%}JI z7I~m1Os++Kq8|r-qFZP#gk(&5Pllk-8Ww6fTcaf52xD>B9v9MVl}!8;khtq`)oRhS ziEy4s{e#Gxfy3|gZE%J=A_?Aven^r9E1JC_x!C)5~I&9R)xuB%7XNDlBSX4inAgZ;!kExGI;&Zs8o)+f%Y^6gq7YB^}|xbtCf< z|J^Gg@bIa^Xf&lm?u|p|gU47tVQJ#abE$2$NF(TrslM}%>f%`?oBP#>DTb;Hb@maP z_iz-D*-3jjar#jm*^)fy%@z}Vh0++68!~A82ZkovpB!jd{~WOU3u^-KiYKJX56(dR z_ISj?S`Q3FzwbuT3hyr#)ZsWO52F$e;ml;nyF2oo0N(4zKNT?>Vaj*f& z366-mmhgMP>}L}DfCIL# zD{}4q0s8XPEm_VjoQU6rYm^N=VdbM1`XtEqB{w;NKN5~_A;|z)kz6K=E!`r#Dg)+C zZ=bz9@NC(DHM+~|#Xoe=Zo_Ni>Y~T_5IgJ!Y$4Aws5R|tfg=LR?NVIaoPz(7Jb&H3 z_9H5Qw{WS+R=2!-$>V?jgm-QzIi|8W$S5I^2?d4DnZyu zTl`9CXx9kk-GX)~<-_qKL~M)j(^%Co_%9$1jXTc^D;cp}3vuKC-sA-S?}3FR)RYzbywc_GwB%&+?L#lnrpN*5K{f)|5}{}X@I z3y|}Z{s8llP%fE4I+T*T6l~SRo=4CsuP+Gc;#yVQaaXmp-)`5HuJY~uYaSFH9V}l- zoB}T)?Eq0N7lNdRk2uzDP+)JU=}D5y+Z--)lt442Lw{Q=+U1M0&eodGwpfRRb6ndK z%wa+64JWX_Tnsp1Q({1fEnh|ZK?tT=v0y7(FqhC2D?EWv8j%@HleC_RhEeF&?UmCd zN*W@q^^+GiTECX`&q|7adT7~ST^#_VY(}_UB<9GAUu6lXU}Bz}ZOlJK(+F@?v|~{) z)#WNN(`@#oe5szovSesn1L0jgMSCf{zG&TRi0Oj3x@)HI7ADd6Uez`hsBeqAy?`w> z>%6)pkya<8`KI+2AOR2IJ-(3sMcr_~y;jq!n4gcHByFo10@5+U0hW(%KVu=RZkd($ zLD=9R)1eDX+4*&lCK=&`U|EQCnHJ^}ki*|QYloWE>CWpYL383_$!g@G(#QJ?X%p4M z=OG4PnzlnEwuEEu%@4+pt(*`i1R?|JU$~9GP@nDODHk5#O&EO-hl6``@kUHo0SEaC z=NwD8f1w@2Hb>GS^CITkx}Y?-480J|6j9l(5xBj<>HEW8*J*N@Dlshy(s+ISanobA zmbUn`!)VwQfulQxsXDK@8gN&}&X(KMYO0h*Q%SJCzlTqxM~TS_=>^-42X7Ew0q) z&OI!6ITUPnb%ZR{9{y`9eGs~wg~VE(Ih{|xeNnlMF}zR4=3^0vbI-ehbKn&c2De21 zklFp~b=;(A_n;}5ljI;?A{ExO(e0`Gy$rXs2xogWh(76S6-Cf+fu1p{x2V8TFC$3z z>lY}si^{k$qsNtajWn!y3NL?~NZln;I~<#^vI^3M-j<&WVLrrJ12h#{HiEjQ5nN{r z3tON>nS;NO3vmM`uE=+oB_xCAzQTq);lhZp&0(JJ zJ3+=XG81o8cy;QT!^w~fv^SLRPN&0)Ena`zsyroMCqUUzcRBj)xO`2sL59hO=Wkab znCX^NjY=8Y9nEB@_wm|Rv>(x+K>6lP4(HHuMQRwrO-*vatGZ(gHL$ES7Uf`OMd!jW9{w z?2&|hEZBSB95#>o=$GwOjuUJ-SC}Qi#qE0sxT2+#ro40mUVuPx{?h=R%DE{CzJ&bC z=J59-cT``!RGw7{e0!!c&?xU=rM}?g>+&4%R$pRe?mLaH4-m?=);gzM9`p=CUPuRd zuIJ3=7%D{oq$&u`V4J7!m+jGuykKw-VI!on!ZtmXfELU-%zHu5f*1j}d4Yg5u&8ZR z#g#~O#9_YSm(kjsizTUeK3$i*nB;BzkNtg-&wgGTUU8f2sbq*6o|F~ZucKSCXTZ41 zrCH=n^zi0xy}>BIYNs1I`MaH|t&R)sM(t|>8dgz?G$t@`>0ucB(V_&k0=LNBhGS;sFc!xz8B~4HdZ< zZO^+jiTK>q%Jz*mL<~utyNZOE!#9WAjeT1M!%K+jJPY_yh^bh88~L0LF^p%ctm(;p zi^@bd*}Gv%!l*<_x#K%{Rl`Oiq7P`53+ljRp9Vv8->T@1_4#a4*qqljzul9vX4EX_ zzt;0lJ@My(bHBJhAhehlV9=-sjMYKDcMhikSQbyl_sVoQi1VS%GEVqR0p|N7@bplR#QZmHfwoB-2 zq29$YM=8Hx1Zf(|)<=;}V<7p>RtA2LYOOEf$DZ(2{j^1)LLRLCfr*osgb0|cCt?Zx zyn-0GoLg96=JHF-NKj9X($RX}&3tsJz0hRXJrl+PQtsbzWP>o30?zz^?0cKnG^*)$ z;uNuk+6r@2hOX2yBRR?CyQp|B^FSayO45i!Gp!cd4syRuoU%YM&9g@OcE|=4-}g`I zKmG4tkL_?`+!w9A?bT;jXL|<~aJ&*0lb|gL&k<53<2kI17V9!qRY43V={88B z)J@7hr8Lmd_8)CKw!o=Q-7wS6;V-LRxT*EH+x6^YQ%!}CeGT!xSR-jTet%Xl@9Fut zS%$k`u)t67_WTuoQa?qt7oQweXR-Bi=To%`vaLx&Lppd9uY~jEuz5+81syv*kb{vD z8h!O$L`%RZSX^iA@|DV+%l=@hx!-wT1Wjc;uYCqA;S!4IYxb;LGwisve(K8cI~Y-Y zvIP|HPd{pc7Cw_B!(tZoI$>p2#`iR#L(3{vG1vVN)y)ctT`U===s`qUD!IjRt2FUl zM%=3QO5~@93otjA%~1><%a*5qJo~2eVOs!e zZp@cA!4rr3i=s;>0n6*aJRt7Z$WA9+>?{V#bUU%e3j?2(5|d?WD&i?rg-HYLv*u>nlWWc(tRM*DNppy;Po%^jDNrUGo55mIGq7uj6Bd^VGCq!)5Blve_Rm zUQ>w`JO2D)Sf-Q_uu#5sdp57@ob&tL8}CjvK#P5FbeT9>;SnczV%1>gV<&;+6Z+Ur z>NADq)v2grgDbM`Fvs?nEm;8-vO`vo!>7@G&)VrjH11mVNkH3IO3Q6LJZV^|h(Tn3 zKJBvCH|pGda}#A=$=q;`{zlMdCEVNbn;J^KG)%pv_M!Yiel#Z7lP7jgj0QZ~KXFS$ zE$mb|9hoGrCB8UFcHYVksGkTO=pP}y_w~O={zb<5^!$r+EiWEJAIwcPsPW=RzN~W{ z*Vv$_>63#SnjN-(j9`H|QQhGU*4b!`21y=bocY20>ZOAp`h?3~b5+bj1AMC)7LsRi zDWGuFufl=C2l+-Y_;&{{SgC&YQ^$v53Lw{9f)+~WoO zqF*L*R>>p=8_6nz{;muDbBV^G-G^i>W|RU1bZ%q+yi2{(B5s^M@97FcAdp0gr#C~B z+#rDlH|%o)_nJrg*$togw?e)Z7B^*P-Y<~kY77S)CjG^UAi=;%c&|;A8D8+yxAT@J zAJ3ptVi|+|DsphJ-th1@E3isVYzG-Oi=8!)enEYnHMG5kjr8ybwI~b?Gl&VEDsfp6N zNJKZdk@gfzCkyMq>n^P7ZBG;qie142VT&Uv>vx|AOUK{R%grVSwl~e#9>2emP8hZ& z-njs*`x-U5Hy4mbD7e6RF!s$??!6oXuueSO$CRha1A zMUTAmfcS0C6mJYaWw2(zd9S&KN;Lu%5pDdYivD7{6WW-uiq?Ovf*O`0ZaQ0OpS!N1 zw`DX_RrhfjXwB{w)n^)a;W^Eg*+Antw!gST`%*$|L|J~WCS--;8>2D>ntzb9=ltnT zk}wIG$4I7Pj&#D5379O;ih*EgE@)@FJ$isiC;W4{6?;B>mV5p>g)?EGhIA)vjo7zi z;akH|zp2fv3cp=3_IqOB@R%^o{-vOpbFR^YK`4V;ufM@7O05H~a{@bWYiR2cikHY< zedU*_TE~Ddd*kL`p6%Q|z5+;76wlp$3KzT7^^w(|AcB8xC6l-k%6;>0R}NsnLJmfR zPZR$+&Z|*dqD-^hE@fZLfoR`EV>PG-gFl8m{Gbj0Qxl##{06+84| zsoHCJkEcxzo%Vkw07T+=Az21}&Y4fA^<$n7U|gUdqDHJA1Z-?krY&ka0O-+9O;;37 z@IXv`noIkWWdheg;98nG3Qm;FIq`7U7t>!w`Ob&kXa?+khZWNl@4?XIA=+bbT1bhhv;b-YC2qyVu72QE{`1S)N zivHyva+WXO8cG3%4PuDY=!3Eux8SHU%70d-1PIrC!E`hFw^!z*gv)J41A^Y4ghx)> z30cxFs#o@opovn$2@2L;e7Jk?)Muo=aPe`LS7G{mmg6hFPzOwgN4JduU@M= zQuFw?&$qc6tHfEDF8p4_9Xd*3p8q_yySUQ!c)r)%@yGRXySzdG-rq`H+E2`86gF4( zW;V#)K1u4^Yq*N%#$tpJ`3vgqhg0yuq8p$YC;gmG)b*%ff=Jn|c}3(r6y=*I>As-z zv1N9RLEp3$HIYmBq<$hkjz&THZq$)LqiN_4I|MPT)!(TgC{JZz_$weOPKk#%WE%6!6H;)nS zh0}}m9{o~>9TzFKn1AlNe@XEIgle2`Z}&VV32yt5?%ooF^#eFNr`opishDk;AJvm# z+kw7`q!RYBr2VgK$Ye?KlrBPKEv8nN$DAO;u?Cf&FH?2kTeV!2@Y&CY9uMK*5tay@ z+bKD8jbET3OnW|gMGEYH()dDGBxQ*ro#;X78CK7T#gKC$U{KQ-u6PQePS! zm5)E4rEh0$s7j}^xH8`SJT~RB%{Kj1`22WJaEHUV&=-cl;O%TJ0K7~L6Vc|{jctH$ zGaA)F zyWcrvOgNMQh)6m-r^APA9iwp-u$VtBDPz~qs@FAA=wvlgt#+w)yj>8|F~QhXQ~tnW z^B+cTjbh1F=uRx z!IX@x3Cm9s&lk>0?hznY2BqKVKw=N$KR+y>Xga!1s>2EyLYJ_(-VM2D(Qe49r+^!E zdDi_KVHlnHqv`=NzVif%u3rpixR?&wuUm?=Eq^3|sYc@CGD0;Q0JZC?e{lF{?UHXh zuXI(l!#2fsJ;LN*i60#?(d*CYZ|shuTDB8OT5P^F|7rCf(&lehL01SQ-M;u(RQujZ z#G4(jY4agrXLPS6=e9gy7hNXQNUpYBR|Ia^%s101 zEWaplM;ux832m_VsrmNmS_-xZrYSh!@mTwaY%q#|$&tfJLv6hE0=3Zc+N%j;C3{1Y zo+V)JahGy9LlYkfqOqkO%i&rdb6bsv8^|P4>`M@Rj&P%1Q2wf?hezBU1{a`vVC3;) zN3yUEkPT(PLak2Shp;yyffb1iiT9g--cQ5FnpPi8Z7%s@)#_$6FHW9sZr)|DGaNNx zrVe#^=H{?R;AXlV^ONf$halBqN9oNzb*C9BCf~xG&pOHqdI~)>2MoW)A*U=T9Dih) zk9N^bMbKlk4n5?Pj#t2;u9weR7a^&_Li-UCk?p+tQ$?VMNWD9@{@1-j*Nt&6lGUYC zn}~ZW7>IfgmqB5AuuZCiHVKTUR?wz335cT$|EMN10_xf1ohFs{YrX|?$HdYQqTCI zE40HpU`4D+P@xr?YVu;L6TW71o}Uz?#*szBJRs}~@q<+)cZO2H+hXaB!x;koKmLwI zm&WmC)Y6l3eE#7)uYdPWtT+;m1M?y9q%K}5VuNvH{51*VWJ!aOVJ3bzr9F^o?$g1Z;UawJLxJ^bw|w zDAUBQCITi^4n*5)KmV|QE%B*X3W2)G#R?(mi=El~*DvlMUFW^*g-?(c%*KCr z+lhq*{QJDoa)bB*PLDr#wk*gWs7n6(;!_qt6ei??1cOa6VEdjBvtQwtA|^Rocr5si z|B&9dk$RaPyPQPBxLfj<*%uPjBuefDxqs_^|Mv?a1YrH+_L4zK5c$8}27>1>fQiR~ zs@wVh_^|sz91SR#=aG1c&CvhVIQ|DU{p$monC@lsezeY0|yw+@Hg~| z82)uh{IjvI1pd{S}J-A Z-9E6Ht+&k{h6DfJypj Date: Fri, 30 Aug 2019 15:04:27 +0900 Subject: [PATCH 151/163] Update README --- R/README.Rmd | 16 +++++++++------- R/README.md | 6 ++++-- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/R/README.Rmd b/R/README.Rmd index 459f7472..6c2a697d 100644 --- a/R/README.Rmd +++ b/R/README.Rmd @@ -2,7 +2,7 @@ output: github_document --- -# hBayesDM-R +# hBayesDM [![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) [![Build Status](https://travis-ci.org/CCS-Lab/hBayesDM.svg?branch=master)](https://travis-ci.org/CCS-Lab/hBayesDM) @@ -14,13 +14,15 @@ output: github_document ## Quick Links - - **Tutorial**: (R) - - **Mailing list**: - - - **Bug reports**: - - **Contributing**: See the - [Wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of this repository. +- **Mailing list**: + +- **Bug reports**: +- **Contributing**: See the [Wiki][wiki] of this repository. +- **Python interface for hBayesDM**: [PyPI][hbayesdm-pypi], [documentation][hbayesdm-py-doc] +[hbayesdm-pypi]: https://pypi.org/project/hbayesdm/ +[hbayesdm-py-doc]: https://hbayesdm.readthedocs.io +[wiki]: https://github.com/CCS-Lab/hBayesDM/wiki ## Getting Started diff --git a/R/README.md b/R/README.md index 7fc55125..c94b0fbc 100644 --- a/R/README.md +++ b/R/README.md @@ -1,5 +1,5 @@ -# hBayesDM-R +# hBayesDM [![Project Status: Active – The project has reached a stable, usable state and is being actively @@ -18,12 +18,14 @@ hBayesDM uses [Stan](http://mc-stan.org/) for Bayesian inference. ## Quick Links - - **Tutorial**: (R) - **Mailing list**: - **Bug reports**: - **Contributing**: See the [Wiki](https://github.com/CCS-Lab/hBayesDM/wiki) of this repository. + - **Python interface for hBayesDM**: + [PyPI](https://pypi.org/project/hbayesdm/), + [documentation](https://hbayesdm.readthedocs.io) ## Getting Started From 45ae8bb827cde02f002d96fad95fcd6724292230 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:04:36 +0900 Subject: [PATCH 152/163] Update NEWS --- R/NEWS.md | 53 ++++++++++++++++++++++++++++------------------------- 1 file changed, 28 insertions(+), 25 deletions(-) diff --git a/R/NEWS.md b/R/NEWS.md index 0df27aac..3ec72463 100644 --- a/R/NEWS.md +++ b/R/NEWS.md @@ -1,30 +1,33 @@ # hBayesDM 1.0.0 -- Major changes - - Now, hBayesDM has both R and Python version, with same models included! - You can run hBayesDM with a language you prefer! - - Models in hBayesDM are now specified as YAML files. Using the YAML files, - R and Python codes are generated automatically. If you want to contribute - hBayesDM by adding a model, what you have to do is just to write a Stan file - and to specify its information! You can find how to do in the hBayesDM wiki - (https://github.com/CCS-Lab/hBayesDM/wiki). - - Model functions try to use parameter estimates using variational Bayesian - methods as its initial values for MCMC sampling by default (#96). If VB - estimation fails, then it uses random values instead. - - The `data` argument for model functions can handle a data.frame object (#2, #98). - - `choiceRT_lba` and `choiceRT_lba_single` are temporarily removed since their codes - are not suitable to the new package structure. We plan to re-add the models - in future versions. - - The Cumulative Model for Cambridge Gambling Task is added (`cgt_cm`; #108). -- Minor changes - - The `tau` parameter in all models for the risk aversion task is modified to - be bounded to [0, 30] (#77, #78). - - `bart_4par` is fixed to compute subject-wise log-likelihood (#82). - - `extract_ic` is fixed for its wrong `rep` function usage (#94, #100). - - The drift rate (`delta` parameter) in `choiceRT_ddm` and `choiceRT_ddm_single` is - unbounded and now it is estimated between [-Inf, Inf] (#95, #107). - - Fix a preprocessing error in `choiceRT_ddm` and `choiceRT_ddm_single` (#95, #109). - - Fix `igt_orl` for a wrong Matt trick operation (#110). +## Major changes + +* Now, hBayesDM has both R and Python version, with same models included! +You can run hBayesDM with a language you prefer! +* Models in hBayesDM are now specified as YAML files. Using the YAML files, +R and Python codes are generated automatically. If you want to contribute +hBayesDM by adding a model, what you have to do is just to write a Stan file +and to specify its information! You can find how to do in the hBayesDM wiki +(https://github.com/CCS-Lab/hBayesDM/wiki). +* Model functions try to use parameter estimates using variational Bayesian +methods as its initial values for MCMC sampling by default (#96). If VB +estimation fails, then it uses random values instead. +* The `data` argument for model functions can handle a data.frame object (#2, #98). +* `choiceRT_lba` and `choiceRT_lba_single` are temporarily removed since their codes +are not suitable to the new package structure. We plan to re-add the models +in future versions. +* The Cumulative Model for Cambridge Gambling Task is added (`cgt_cm`; #108). + +## Minor changes + +* The `tau` parameter in all models for the risk aversion task is modified to +be bounded to [0, 30] (#77, #78). +* `bart_4par` is fixed to compute subject-wise log-likelihood (#82). +* `extract_ic` is fixed for its wrong `rep` function usage (#94, #100). +* The drift rate (`delta` parameter) in `choiceRT_ddm` and `choiceRT_ddm_single` is +unbounded and now it is estimated between [-Inf, Inf] (#95, #107). +* Fix a preprocessing error in `choiceRT_ddm` and `choiceRT_ddm_single` (#95, #109). +* Fix `igt_orl` for a wrong Matt trick operation (#110). # hBayesDM 0.7.2 From a05d41c1265f8a56684890ac457181d81b4c40bc Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:13:29 +0900 Subject: [PATCH 153/163] Ignore .rda files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 54b3923e..b2332d2c 100644 --- a/.gitignore +++ b/.gitignore @@ -268,6 +268,7 @@ dmypy.json # Session Data files .RData +*.rda # User-specific files .Ruserdata From 8c5b07fa041913b56c3277fc706e77c605defe50 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:23:23 +0900 Subject: [PATCH 154/163] Update getting started --- R/vignettes/getting_started.Rmd | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/R/vignettes/getting_started.Rmd b/R/vignettes/getting_started.Rmd index d7dd2604..043fe0ab 100644 --- a/R/vignettes/getting_started.Rmd +++ b/R/vignettes/getting_started.Rmd @@ -53,8 +53,9 @@ Detailed specification of Bayesian models is not available in text yet (stay tun ## Tasks & models implemented in hBayesDM -There are -Table: As of hBayesDM v1.0.0 (`r format(Sys.time(), '%B %d, %Y')`) +See [here][list-tasks-models] for the list of tasks and models implemented in hBayesDM. + +[list-tasks-models]: http://ccs-lab.github.io/hBayesDM/reference/index.html ## How to install hBayesDM @@ -203,7 +204,7 @@ When model fitting is complete, you see this message and data are stored into `o 3. `parVals`: Posterior samples of all parameters. Extracted by `rstan::extract(rstan_object, permuted=T)`. **Note that hyper (group) mean parameters are indicated by `mu_PARAMETER` (e.g., `mu_xi`, `mu_ep`, `mu_rho`).** 4. `fit`: RStan object (i.e., `fit = stan(file='gng_m1.stan', ...)` ). 5. `rawdata`: Raw trial-by-trial data used for modeling. Raw data are provided in the output to allow users to easily access data and compare trial-by-trial model-based regressors (e.g., prediction errors) with choice data. -6. `modelRegressor` (optional): Trial-by-trial model-based regressors such as prediction errors, the values of the chosen option, etc. For each model, we pre-select appropriate model-based regressors. Currently (version 0.2.3.3), this feature is available only for the orthogonalized Go/NoGo task. +6. `modelRegressor` (optional): Trial-by-trial model-based regressors such as prediction errors, the values of the chosen option, etc. For each model, we pre-select appropriate model-based regressors. ``` > output1$allIndPars @@ -317,7 +318,8 @@ plotHDI( diffDist ) # plot the group mean differences In model-based neuroimaging [e.g., @o2007model], model-based time series of a latent cognitive process are generated by computational models, and then time series data are convolved with a hemodynamic response function and regressed again fMRI or EEG data. This model-based neuroimaging approach has been particularly popular in cognitive neuroscience. -The biggest challenge for performing model-based fMRI/EEG is to learn how to extract trial-by-trial model-based regressors. The hBayesDM package allows users to easily extract model-based regressors that can be used for model-based fMRI or EEG analysis. **Note that in the current version (version 0.4.0), only the orthogonalized Go/NoGo task provides model-based regressors**. The hBayesDM package currently provides the following model-based regressors. With the trial-by-trial regressors, users can easily use their favorite neuroimaging package (e.g., in Statistical Parametric Mapping (SPM; http://www.fil.ion.ucl.ac.uk/spm/) to perform model-based fMRI analysis. See our [paper](https://www.mitpressjournals.org/doi/abs/10.1162/CPSY_a_00002) (**Extracting Trial-by-Trial Regressors for Model-Based fMRI/EEG Analysis**) for more details. +The biggest challenge for performing model-based fMRI/EEG is to learn how to extract trial-by-trial model-based regressors. The hBayesDM package allows users to easily extract model-based regressors that can be used for model-based fMRI or EEG analysis. +The hBayesDM package currently provides the following model-based regressors. With the trial-by-trial regressors, users can easily use their favorite neuroimaging package (e.g., in Statistical Parametric Mapping (SPM; http://www.fil.ion.ucl.ac.uk/spm/) to perform model-based fMRI analysis. See our [paper](https://www.mitpressjournals.org/doi/abs/10.1162/CPSY_a_00002) (**Extracting Trial-by-Trial Regressors for Model-Based fMRI/EEG Analysis**) for more details. As an example, if you would like to extract trial-by-trial stimulus values (i.e., expected value of stimulus on each trial), first fit a model like the follwoing (set the `modelRegressor` input variable to `TRUE`. Its default value is `FALSE`): @@ -384,7 +386,6 @@ Simply put, _posterior predictive checks_ refer to when a fitted model is used t From v0.5.0, users can run posterior predictive checks on all models except drift-diffusion models in hBayesDM. Simulated data from posterior predictive checks are contained in `hBayesDM_OUTPUT$parVals$y_pred`. In a future release, we will include a function/command that can conveniently summarize and plot posterior predictive checks. In the mean time, users can program their own codes like the following: ```{r eval=FALSE} - ## fit example data with the gng_m3 model and run posterior predictive checks x = gng_m3(data="example", niter=2000, nwarmup=1000, nchain=4, ncore=4, inc_postpred = TRUE) From 365a698a2b57ce3727a7472a424ed5d4bd76d444 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:29:42 +0900 Subject: [PATCH 155/163] Ignore docs/ --- R/.gitignore | 1 + R/docs/authors.html | 171 ------ R/docs/docsearch.css | 148 ------ R/docs/docsearch.js | 85 --- R/docs/index.html | 215 -------- R/docs/link.svg | 12 - R/docs/news/index.html | 380 -------------- R/docs/pkgdown.css | 236 --------- R/docs/pkgdown.js | 115 ----- R/docs/pkgdown.yml | 5 - R/docs/reference/HDIofMCMC.html | 165 ------ R/docs/reference/bandit2arm_delta.html | 339 ------------ .../reference/bandit4arm2_kalman_filter.html | 340 ------------ R/docs/reference/bandit4arm_2par_lapse.html | 339 ------------ R/docs/reference/bandit4arm_4par.html | 339 ------------ R/docs/reference/bandit4arm_lapse.html | 339 ------------ R/docs/reference/bandit4arm_lapse_decay.html | 339 ------------ .../reference/bandit4arm_singleA_lapse.html | 339 ------------ R/docs/reference/bart_par4.html | 339 ------------ R/docs/reference/cgt_cm.html | 343 ------------ R/docs/reference/choiceRT_ddm.html | 355 ------------- R/docs/reference/choiceRT_ddm_single.html | 356 ------------- R/docs/reference/choiceRT_lba.html | 313 ----------- R/docs/reference/choiceRT_lba_single.html | 313 ----------- R/docs/reference/cra_exp.html | 342 ------------ R/docs/reference/cra_linear.html | 342 ------------ R/docs/reference/dbdm_prob_weight.html | 347 ------------- R/docs/reference/dd_cs.html | 340 ------------ R/docs/reference/dd_cs_single.html | 340 ------------ R/docs/reference/dd_exp.html | 340 ------------ R/docs/reference/dd_hyperbolic.html | 341 ------------ R/docs/reference/dd_hyperbolic_single.html | 341 ------------ R/docs/reference/estimate_mode.html | 155 ------ R/docs/reference/extract_ic.html | 179 ------- R/docs/reference/gng_m1.html | 338 ------------ R/docs/reference/gng_m2.html | 338 ------------ R/docs/reference/gng_m3.html | 338 ------------ R/docs/reference/gng_m4.html | 338 ------------ R/docs/reference/hBayesDM-package.html | 253 --------- R/docs/reference/hBayesDM_model.html | 260 ---------- R/docs/reference/igt_orl.html | 351 ------------- R/docs/reference/igt_pvl_decay.html | 350 ------------- R/docs/reference/igt_pvl_delta.html | 350 ------------- R/docs/reference/igt_vpp.html | 349 ------------- R/docs/reference/index.html | 487 ------------------ R/docs/reference/multiplot.html | 163 ------ R/docs/reference/peer_ocu.html | 344 ------------- R/docs/reference/plot.hBayesDM.html | 175 ------- R/docs/reference/plotDist.html | 189 ------- R/docs/reference/plotHDI.html | 189 ------- R/docs/reference/plotInd.html | 184 ------- R/docs/reference/printFit.html | 189 ------- R/docs/reference/prl_ewa.html | 339 ------------ R/docs/reference/prl_fictitious.html | 340 ------------ .../reference/prl_fictitious_multipleB.html | 341 ------------ R/docs/reference/prl_fictitious_rp.html | 341 ------------ R/docs/reference/prl_fictitious_rp_woa.html | 341 ------------ R/docs/reference/prl_fictitious_woa.html | 340 ------------ R/docs/reference/prl_rp.html | 339 ------------ R/docs/reference/prl_rp_multipleB.html | 341 ------------ R/docs/reference/pst_gainloss_Q.html | 341 ------------ R/docs/reference/ra_noLA.html | 339 ------------ R/docs/reference/ra_noRA.html | 339 ------------ R/docs/reference/ra_prospect.html | 339 ------------ R/docs/reference/rdt_happiness.html | 346 ------------- R/docs/reference/rhat.html | 170 ------ R/docs/reference/ts_par4.html | 352 ------------- R/docs/reference/ts_par6.html | 351 ------------- R/docs/reference/ts_par7.html | 351 ------------- R/docs/reference/ug_bayes.html | 337 ------------ R/docs/reference/ug_delta.html | 337 ------------ R/docs/reference/wcs_sql.html | 339 ------------ 72 files changed, 1 insertion(+), 20840 deletions(-) delete mode 100644 R/docs/authors.html delete mode 100644 R/docs/docsearch.css delete mode 100644 R/docs/docsearch.js delete mode 100644 R/docs/index.html delete mode 100644 R/docs/link.svg delete mode 100644 R/docs/news/index.html delete mode 100644 R/docs/pkgdown.css delete mode 100644 R/docs/pkgdown.js delete mode 100644 R/docs/pkgdown.yml delete mode 100644 R/docs/reference/HDIofMCMC.html delete mode 100644 R/docs/reference/bandit2arm_delta.html delete mode 100644 R/docs/reference/bandit4arm2_kalman_filter.html delete mode 100644 R/docs/reference/bandit4arm_2par_lapse.html delete mode 100644 R/docs/reference/bandit4arm_4par.html delete mode 100644 R/docs/reference/bandit4arm_lapse.html delete mode 100644 R/docs/reference/bandit4arm_lapse_decay.html delete mode 100644 R/docs/reference/bandit4arm_singleA_lapse.html delete mode 100644 R/docs/reference/bart_par4.html delete mode 100644 R/docs/reference/cgt_cm.html delete mode 100644 R/docs/reference/choiceRT_ddm.html delete mode 100644 R/docs/reference/choiceRT_ddm_single.html delete mode 100644 R/docs/reference/choiceRT_lba.html delete mode 100644 R/docs/reference/choiceRT_lba_single.html delete mode 100644 R/docs/reference/cra_exp.html delete mode 100644 R/docs/reference/cra_linear.html delete mode 100644 R/docs/reference/dbdm_prob_weight.html delete mode 100644 R/docs/reference/dd_cs.html delete mode 100644 R/docs/reference/dd_cs_single.html delete mode 100644 R/docs/reference/dd_exp.html delete mode 100644 R/docs/reference/dd_hyperbolic.html delete mode 100644 R/docs/reference/dd_hyperbolic_single.html delete mode 100644 R/docs/reference/estimate_mode.html delete mode 100644 R/docs/reference/extract_ic.html delete mode 100644 R/docs/reference/gng_m1.html delete mode 100644 R/docs/reference/gng_m2.html delete mode 100644 R/docs/reference/gng_m3.html delete mode 100644 R/docs/reference/gng_m4.html delete mode 100644 R/docs/reference/hBayesDM-package.html delete mode 100644 R/docs/reference/hBayesDM_model.html delete mode 100644 R/docs/reference/igt_orl.html delete mode 100644 R/docs/reference/igt_pvl_decay.html delete mode 100644 R/docs/reference/igt_pvl_delta.html delete mode 100644 R/docs/reference/igt_vpp.html delete mode 100644 R/docs/reference/index.html delete mode 100644 R/docs/reference/multiplot.html delete mode 100644 R/docs/reference/peer_ocu.html delete mode 100644 R/docs/reference/plot.hBayesDM.html delete mode 100644 R/docs/reference/plotDist.html delete mode 100644 R/docs/reference/plotHDI.html delete mode 100644 R/docs/reference/plotInd.html delete mode 100644 R/docs/reference/printFit.html delete mode 100644 R/docs/reference/prl_ewa.html delete mode 100644 R/docs/reference/prl_fictitious.html delete mode 100644 R/docs/reference/prl_fictitious_multipleB.html delete mode 100644 R/docs/reference/prl_fictitious_rp.html delete mode 100644 R/docs/reference/prl_fictitious_rp_woa.html delete mode 100644 R/docs/reference/prl_fictitious_woa.html delete mode 100644 R/docs/reference/prl_rp.html delete mode 100644 R/docs/reference/prl_rp_multipleB.html delete mode 100644 R/docs/reference/pst_gainloss_Q.html delete mode 100644 R/docs/reference/ra_noLA.html delete mode 100644 R/docs/reference/ra_noRA.html delete mode 100644 R/docs/reference/ra_prospect.html delete mode 100644 R/docs/reference/rdt_happiness.html delete mode 100644 R/docs/reference/rhat.html delete mode 100644 R/docs/reference/ts_par4.html delete mode 100644 R/docs/reference/ts_par6.html delete mode 100644 R/docs/reference/ts_par7.html delete mode 100644 R/docs/reference/ug_bayes.html delete mode 100644 R/docs/reference/ug_delta.html delete mode 100644 R/docs/reference/wcs_sql.html diff --git a/R/.gitignore b/R/.gitignore index 01dcbd20..d5af0a6e 100644 --- a/R/.gitignore +++ b/R/.gitignore @@ -1,2 +1,3 @@ +docs/ hBayesDM*.tar.gz hBayesDM.Rcheck/ diff --git a/R/docs/authors.html b/R/docs/authors.html deleted file mode 100644 index 3174b5fa..00000000 --- a/R/docs/authors.html +++ /dev/null @@ -1,171 +0,0 @@ - - - - - - - - -Citation and Authors • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/R/docs/docsearch.css b/R/docs/docsearch.css deleted file mode 100644 index e5f1fe1d..00000000 --- a/R/docs/docsearch.css +++ /dev/null @@ -1,148 +0,0 @@ -/* Docsearch -------------------------------------------------------------- */ -/* - Source: https://github.com/algolia/docsearch/ - License: MIT -*/ - -.algolia-autocomplete { - display: block; - -webkit-box-flex: 1; - -ms-flex: 1; - flex: 1 -} - -.algolia-autocomplete .ds-dropdown-menu { - width: 100%; - min-width: none; - max-width: none; - padding: .75rem 0; - background-color: #fff; - background-clip: padding-box; - border: 1px solid rgba(0, 0, 0, .1); - box-shadow: 0 .5rem 1rem rgba(0, 0, 0, .175); -} - -@media (min-width:768px) { - .algolia-autocomplete .ds-dropdown-menu { - width: 175% - } -} - -.algolia-autocomplete .ds-dropdown-menu::before { - display: none -} - -.algolia-autocomplete .ds-dropdown-menu [class^=ds-dataset-] { - padding: 0; - background-color: rgb(255,255,255); - border: 0; - max-height: 80vh; -} - -.algolia-autocomplete .ds-dropdown-menu .ds-suggestions { - margin-top: 0 -} - -.algolia-autocomplete .algolia-docsearch-suggestion { - padding: 0; - overflow: visible -} - -.algolia-autocomplete .algolia-docsearch-suggestion--category-header { - padding: .125rem 1rem; - margin-top: 0; - font-size: 1.3em; - font-weight: 500; - color: #00008B; - border-bottom: 0 -} - -.algolia-autocomplete .algolia-docsearch-suggestion--wrapper { - float: none; - padding-top: 0 -} - -.algolia-autocomplete .algolia-docsearch-suggestion--subcategory-column { - float: none; - width: auto; - padding: 0; - text-align: left -} - -.algolia-autocomplete .algolia-docsearch-suggestion--content { - float: none; - width: auto; - padding: 0 -} - -.algolia-autocomplete .algolia-docsearch-suggestion--content::before { - display: none -} - -.algolia-autocomplete .ds-suggestion:not(:first-child) .algolia-docsearch-suggestion--category-header { - padding-top: .75rem; - margin-top: .75rem; - border-top: 1px solid rgba(0, 0, 0, .1) -} - -.algolia-autocomplete .ds-suggestion .algolia-docsearch-suggestion--subcategory-column { - display: block; - padding: .1rem 1rem; - margin-bottom: 0.1; - font-size: 1.0em; - font-weight: 400 - /* display: none */ -} - -.algolia-autocomplete .algolia-docsearch-suggestion--title { - display: block; - padding: .25rem 1rem; - margin-bottom: 0; - font-size: 0.9em; - font-weight: 400 -} - -.algolia-autocomplete .algolia-docsearch-suggestion--text { - padding: 0 1rem .5rem; - margin-top: -.25rem; - font-size: 0.8em; - font-weight: 400; - line-height: 1.25 -} - -.algolia-autocomplete .algolia-docsearch-footer { - width: 110px; - height: 20px; - z-index: 3; - margin-top: 10.66667px; - float: right; - font-size: 0; - line-height: 0; -} - -.algolia-autocomplete .algolia-docsearch-footer--logo { - background-image: url("data:image/svg+xml;utf8,"); - background-repeat: no-repeat; - background-position: 50%; - background-size: 100%; - overflow: hidden; - text-indent: -9000px; - width: 100%; - height: 100%; - display: block; - transform: translate(-8px); -} - -.algolia-autocomplete .algolia-docsearch-suggestion--highlight { - color: #FF8C00; - background: rgba(232, 189, 54, 0.1) -} - - -.algolia-autocomplete .algolia-docsearch-suggestion--text .algolia-docsearch-suggestion--highlight { - box-shadow: inset 0 -2px 0 0 rgba(105, 105, 105, .5) -} - -.algolia-autocomplete .ds-suggestion.ds-cursor .algolia-docsearch-suggestion--content { - background-color: rgba(192, 192, 192, .15) -} diff --git a/R/docs/docsearch.js b/R/docs/docsearch.js deleted file mode 100644 index b35504cd..00000000 --- a/R/docs/docsearch.js +++ /dev/null @@ -1,85 +0,0 @@ -$(function() { - - // register a handler to move the focus to the search bar - // upon pressing shift + "/" (i.e. "?") - $(document).on('keydown', function(e) { - if (e.shiftKey && e.keyCode == 191) { - e.preventDefault(); - $("#search-input").focus(); - } - }); - - $(document).ready(function() { - // do keyword highlighting - /* modified from https://jsfiddle.net/julmot/bL6bb5oo/ */ - var mark = function() { - - var referrer = document.URL ; - var paramKey = "q" ; - - if (referrer.indexOf("?") !== -1) { - var qs = referrer.substr(referrer.indexOf('?') + 1); - var qs_noanchor = qs.split('#')[0]; - var qsa = qs_noanchor.split('&'); - var keyword = ""; - - for (var i = 0; i < qsa.length; i++) { - var currentParam = qsa[i].split('='); - - if (currentParam.length !== 2) { - continue; - } - - if (currentParam[0] == paramKey) { - keyword = decodeURIComponent(currentParam[1].replace(/\+/g, "%20")); - } - } - - if (keyword !== "") { - $(".contents").unmark({ - done: function() { - $(".contents").mark(keyword); - } - }); - } - } - }; - - mark(); - }); -}); - -/* Search term highlighting ------------------------------*/ - -function matchedWords(hit) { - var words = []; - - var hierarchy = hit._highlightResult.hierarchy; - // loop to fetch from lvl0, lvl1, etc. - for (var idx in hierarchy) { - words = words.concat(hierarchy[idx].matchedWords); - } - - var content = hit._highlightResult.content; - if (content) { - words = words.concat(content.matchedWords); - } - - // return unique words - var words_uniq = [...new Set(words)]; - return words_uniq; -} - -function updateHitURL(hit) { - - var words = matchedWords(hit); - var url = ""; - - if (hit.anchor) { - url = hit.url_without_anchor + '?q=' + escape(words.join(" ")) + '#' + hit.anchor; - } else { - url = hit.url + '?q=' + escape(words.join(" ")); - } - - return url; -} diff --git a/R/docs/index.html b/R/docs/index.html deleted file mode 100644 index 386e1f71..00000000 --- a/R/docs/index.html +++ /dev/null @@ -1,215 +0,0 @@ - - - - - - - -Hierarchical Bayesian Modeling of Decision-Making Tasks • hBayesDM - - - - - - - - - -
    -
    - - - -
    -
    - - - - -
    - - -

    hBayesDM (hierarchical Bayesian modeling of Decision-Making tasks) is a user-friendly package that offers hierarchical Bayesian analysis of various computational models on an array of decision-making tasks. hBayesDM uses Stan for Bayesian inference.

    - -
    -

    -Getting Started

    -
    -

    -Prerequisite

    -

    To install hBayesDM for R, RStan needs to be properly installed before you proceed. For detailed instructions on having RStan ready prior to installing hBayesDM, please go to this link: https://github.com/stan-dev/rstan/wiki/RStan-Getting-Started

    -
    -
    -

    -Installation

    -

    The lastest stable version of hBayesDM can be installed from CRAN by running the following command in R:

    -
    install.packages("hBayesDM")  # Install hBayesDM from CRAN
    -

    or you can also install from GitHub with:

    - -

    If you want to use the latest development version of hBayesDM, run the following in R:

    -
    # `devtools` is required to install hBayesDM from GitHub
    -if (!require(devtools)) install.packages("devtools")
    -
    -devtools::install_github("CCS-Lab/hBayesDM", ref="develop", subdir="R")
    -
    -
    -

    -Building at once

    -

    By default, you will have to wait for compilation when you run each model for the first time. If you plan on runnning several different models and want to pre-build all models during installation time, set an environment variable BUILD_ALL to true, like the following. We highly recommend you only do so when you have multiple cores available, since building all models at once takes quite a long time to complete.

    -
    Sys.setenv(BUILD_ALL = "true")  # Build *all* models at installation time
    -Sys.setenv(MAKEFLAGS = "-j 4")  # Use 4 cores for build (or any other number you want)
    -
    -install.packages("hBayesDM")                    # Install from CRAN
    -# or
    -devtools::install_github("CCS-Lab/hBayesDM/R")  # Install from GitHub
    -
    -
    - -
    -
    - - - -
    - - -
    - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - diff --git a/R/docs/link.svg b/R/docs/link.svg deleted file mode 100644 index 88ad8276..00000000 --- a/R/docs/link.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - diff --git a/R/docs/news/index.html b/R/docs/news/index.html deleted file mode 100644 index 19980f47..00000000 --- a/R/docs/news/index.html +++ /dev/null @@ -1,380 +0,0 @@ - - - - - - - - -Changelog • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    -

    -hBayesDM 1.0.0 Unreleased -

    -
      -
    • Major changes -
        -
      • Now, hBayesDM has both R and Python version, with same models included! You can run hBayesDM with a language you prefer!
      • -
      • Models in hBayesDM are now specified as YAML files. Using the YAML files, R and Python codes are generated automatically. If you want to contribute hBayesDM by adding a model, what you have to do is just to write a Stan file and to specify its information! You can find how to do in the hBayesDM wiki (https://github.com/CCS-Lab/hBayesDM/wiki).
      • -
      • Model functions try to use parameter estimates using variational Bayesian methods as its initial values for MCMC sampling by default (#96). If VB estimation fails, then it uses random values instead.
      • -
      • The data argument for model functions can handle a data.frame object (#2, #98).
      • -
      • -choiceRT_lba and choiceRT_lba_single are temporarily removed since their codes are not suitable to the new package structure. We plan to re-add the models in future versions.
      • -
      • The Cumulative Model for Cambridge Gambling Task is added (cgt_cm; #108).
      • -
      -
    • -
    • Minor changes -
        -
      • The tau parameter in all models for the risk aversion task is modified to be bounded to [0, 30] (#77, #78).
      • -
      • -bart_4par is fixed to compute subject-wise log-likelihood (#82).
      • -
      • -extract_ic is fixed for its wrong rep function usage (#94, #100).
      • -
      • The drift rate (delta parameter) in choiceRT_ddm and choiceRT_ddm_single is unbounded and now it is estimated between [-Inf, Inf] (#95, #107).
      • -
      • Fix a preprocessing error in choiceRT_ddm and choiceRT_ddm_single (#95, #109).
      • -
      • Fix igt_orl for a wrong Matt trick operation (#110).
      • -
      -
    • -
    -
    -
    -

    -hBayesDM 0.7.2 2019-02-12 -

    -
      -
    • Add three new models for the bandit4arm task: bandit4arm_2par_lapse, bandit4arm_lapse_decay and bandit4arm_singleA_lapse.
    • -
    • Fix various (minor) errors.
    • -
    -
    -
    -

    -hBayesDM 0.7.1 2019-01-21 -

    -
      -
    • Make it usable without manually loading rstan.
    • -
    • Remove an annoying warning about using ..insensitive_data_columns.
    • -
    -
    -
    -

    -hBayesDM 0.7.0 2018-12-13 -

    -
      -
    • Now, in default, you should build a Stan file into a binary for the first time to use it. To build all the models on installation, you should set an environmental variable BUILD_ALL to true before installation.
    • -
    • Now all the implemented models are refactored using hBayesDM_model function. You don’t have to change anything to use them, but developers can easily implement new models now!
    • -
    • We added a Kalman filter model for 4-armed bandit task (bandit4arm2_kalman_filter; Daw et al., 2006) and a probability weighting function for general description-based tasks (dbdm_prob_weight; Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008).
    • -
    • Initial values of parameter estimation for some models are updated as plausible values, and the parameter boundaries of several models are fixed (see more on issue #63 and #64 in Github).
    • -
    • Exponential and linear models for choice under risk and ambiguity task now have four model regressors: sv, sv_fix, sv_var, and p_var.
    • -
    • Fix the Travix CI settings and related codes to be properly passed.
    • -
    -
    -
    -

    -hBayesDM 0.6.3 Unreleased -

    -
      -
    • Update the dependencies on rstan (>= 2.18.1)
    • -
    • No changes on model files, as same as the version 0.6.2
    • -
    -
    -
    -

    -hBayesDM 0.6.2 Unreleased -

    -
      -
    • Fix an error on choiceRT_ddm (#44)
    • -
    -
    -
    -

    -hBayesDM 0.6.1 Unreleased -

    -
      -
    • Solve an issue with built binary files.
    • -
    • Fix an error on peer_ocu with misplaced parentheses.
    • -
    -
    -
    -

    -hBayesDM 0.6.0 2018-09-11 -

    -
      -
    • Add new tasks (Balloon Analogue Risk Task, Choice under Risk and Ambiguity Task, Probabilistic Selection Task, Risky Decision Task (a.k.a. Happiness task), Wisconsin Card Sorting Task)
    • -
    • Add a new model for the Iowa Gambling Task (igt_orl)
    • -
    • Change priors (Half-Cauchy(0, 5) –> Half-Cauchy(0, 1) or Half-Normal(0, 0.2)
    • -
    • printFit function now provides LOOIC weights and/or WAIC weights
    • -
    -
    -
    -

    -hBayesDM 0.5.1 Unreleased -

    -
      -
    • Add models for the Two Step task
    • -
    • Add models without indecision point parameter (alpha) for the PRL task (prl_*_woa.stan)
    • -
    • Model-based regressors for the PRL task are now available
    • -
    • For the PRL task & prl_fictitious.stan & prl_fictitious_rp.stan –> change the range of alpha (indecision point) from [0, 1] to [-Inf, Inf]
    • -
    -
    -
    -

    -hBayesDM 0.5.0 2018-01-03 -

    -
      -
    • Support variational Bayesian methods (vb=TRUE)
    • -
    • Allow posterior predictive checks, except for drift-diffusion models (inc_postpred=TRUE)
    • -
    • Add the peer influence task (Chung et al., 2015, USE WITH CAUTION for now and PLEASE GIVE US FEEDBACK!)
    • -
    • Add ‘prl_fictitious_rp’ model
    • -
    • Made changes to be compatible with the newest Stan version (e.g., // instead of # for commenting).
    • -
    • In ’prl_*’ models, ‘rewlos’ is replaced by ‘outcome’ so that column names and labels would be consistent across tasks as much as possible.
    • -
    • Email feature is disabled as R mail package does not allow users to send anonymous emails anymore.
    • -
    • When outputs are saved as a file (*.RData), the file name now contains the name of the data file.
    • -
    -
    -
    -

    -hBayesDM 0.4.0 2017-05-23 -

    -
      -
    • Add a choice reaction time task and evidence accumulation models -
        -
      • Drift diffusion model (both hierarchical and single-subject)
      • -
      • Linear Ballistic Accumulator (LBA) model (both hierarchical and single-subject)
      • -
      -
    • -
    • Add PRL models that can fit multiple blocks
    • -
    • Add single-subject versions for the delay discounting task (dd_hyperbolic_single and dd_cs_single).
    • -
    • Standardize variable names across all models (e.g., rewlos –> outcome for all models)
    • -
    • Separate versions for CRAN and GitHub. All models/features are identical but the GitHub version contains precompilled models.
    • -
    -
    -
    -

    -hBayesDM 0.3.1 Unreleased -

    -
      -
    • Remove dependence on the modeest package. Now use a built-in function to estimate the mode of a posterior distribution.
    • -
    • Rewrite the “printFit” function.
    • -
    -
    -
    -

    -hBayesDM 0.3.0 2017-01-22 -

    -
      -
    • Made several changes following the guidelines for R packages providing interfaces to Stan.
    • -
    • Stan models are precompiled and models will run immediately when called.
    • -
    • The default number of chains is set to 4.
    • -
    • The default value of adapt_delta is set to 0.95 to reduce the potential for divergences.
    • -
    • The “printFit” function uses LOOIC by default. Users can select WAIC or both (LOOIC & WAIC) if needed.
    • -
    -
    -
    -

    -hBayesDM 0.2.3.3 2016-12-28 -

    -
      -
    • Add help files
    • -
    • Add a function for checking Rhat values (rhat).
    • -
    • Change a link to its tutorial website
    • -
    -
    -
    -

    -hBayesDM 0.2.3.2 2016-12-21 -

    -
      -
    • Use wide normal distributions for unbounded parameters (gng_* models).
    • -
    • Automatic removal of rows (trials) containing NAs.
    • -
    -
    -
    -

    -hBayesDM 0.2.3.1 2016-09-30 -

    -
      -
    • Add a function for plotting individual parameters (plotInd)
    • -
    -
    -
    -

    -hBayesDM 0.2.3 2016-07-17 -

    -
      -
    • Add a new task: the Ultimatum Game
    • -
    • Add new models for the Probabilistic Reversal Learning and Risk Aversion tasks
    • -
    • ‘bandit2arm’ -> change its name to ‘bandit2arm_delta’. Now all model names are in the same format (i.e., TASK_MODEL).
    • -
    • Users can extract model-based regressors from gng_m* models
    • -
    • Include the option of customizing control parameters (adapt_delta, max_treedepth, stepsize)
    • -
    • ‘plotHDI’ function -> add ‘fontSize’ argument & change the color of histogram
    • -
    -
    -
    -

    -hBayesDM 0.2.1 2016-04-03 -

    -
    -

    -Bug fixes

    -
      -
    • All models: Fix errors when indPars=“mode”
    • -
    • ra_prospect model: Add description for column names of a data (*.txt) file
    • -
    -
    -
    -

    -Change

    -
      -
    • Change standard deviations of ‘b’ and ‘pi’ priors in gng_* models
    • -
    -
    -
    -
    -

    -hBayesDM 0.2.0 2016-03-25 -

    -

    Initially released.

    -
    -
    - - - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/pkgdown.css b/R/docs/pkgdown.css deleted file mode 100644 index c03fb08d..00000000 --- a/R/docs/pkgdown.css +++ /dev/null @@ -1,236 +0,0 @@ -/* Sticky footer */ - -/** - * Basic idea: https://philipwalton.github.io/solved-by-flexbox/demos/sticky-footer/ - * Details: https://github.com/philipwalton/solved-by-flexbox/blob/master/assets/css/components/site.css - * - * .Site -> body > .container - * .Site-content -> body > .container .row - * .footer -> footer - * - * Key idea seems to be to ensure that .container and __all its parents__ - * have height set to 100% - * - */ - -html, body { - height: 100%; -} - -body > .container { - display: flex; - height: 100%; - flex-direction: column; - - padding-top: 60px; -} - -body > .container .row { - flex: 1 0 auto; -} - -footer { - margin-top: 45px; - padding: 35px 0 36px; - border-top: 1px solid #e5e5e5; - color: #666; - display: flex; - flex-shrink: 0; -} -footer p { - margin-bottom: 0; -} -footer div { - flex: 1; -} -footer .pkgdown { - text-align: right; -} -footer p { - margin-bottom: 0; -} - -img.icon { - float: right; -} - -img { - max-width: 100%; -} - -/* Fix bug in bootstrap (only seen in firefox) */ -summary { - display: list-item; -} - -/* Typographic tweaking ---------------------------------*/ - -.contents .page-header { - margin-top: calc(-60px + 1em); -} - -/* Section anchors ---------------------------------*/ - -a.anchor { - margin-left: -30px; - display:inline-block; - width: 30px; - height: 30px; - visibility: hidden; - - background-image: url(./link.svg); - background-repeat: no-repeat; - background-size: 20px 20px; - background-position: center center; -} - -.hasAnchor:hover a.anchor { - visibility: visible; -} - -@media (max-width: 767px) { - .hasAnchor:hover a.anchor { - visibility: hidden; - } -} - - -/* Fixes for fixed navbar --------------------------*/ - -.contents h1, .contents h2, .contents h3, .contents h4 { - padding-top: 60px; - margin-top: -40px; -} - -/* Static header placement on mobile devices */ -@media (max-width: 767px) { - .navbar-fixed-top { - position: absolute; - } - .navbar { - padding: 0; - } -} - - -/* Sidebar --------------------------*/ - -#sidebar { - margin-top: 30px; -} -#sidebar h2 { - font-size: 1.5em; - margin-top: 1em; -} - -#sidebar h2:first-child { - margin-top: 0; -} - -#sidebar .list-unstyled li { - margin-bottom: 0.5em; -} - -.orcid { - height: 16px; - vertical-align: middle; -} - -/* Reference index & topics ----------------------------------------------- */ - -.ref-index th {font-weight: normal;} - -.ref-index td {vertical-align: top;} -.ref-index .icon {width: 40px;} -.ref-index .alias {width: 40%;} -.ref-index-icons .alias {width: calc(40% - 40px);} -.ref-index .title {width: 60%;} - -.ref-arguments th {text-align: right; padding-right: 10px;} -.ref-arguments th, .ref-arguments td {vertical-align: top;} -.ref-arguments .name {width: 20%;} -.ref-arguments .desc {width: 80%;} - -/* Nice scrolling for wide elements --------------------------------------- */ - -table { - display: block; - overflow: auto; -} - -/* Syntax highlighting ---------------------------------------------------- */ - -pre { - word-wrap: normal; - word-break: normal; - border: 1px solid #eee; -} - -pre, code { - background-color: #f8f8f8; - color: #333; -} - -pre code { - overflow: auto; - word-wrap: normal; - white-space: pre; -} - -pre .img { - margin: 5px 0; -} - -pre .img img { - background-color: #fff; - display: block; - height: auto; -} - -code a, pre a { - color: #375f84; -} - -a.sourceLine:hover { - text-decoration: none; -} - -.fl {color: #1514b5;} -.fu {color: #000000;} /* function */ -.ch,.st {color: #036a07;} /* string */ -.kw {color: #264D66;} /* keyword */ -.co {color: #888888;} /* comment */ - -.message { color: black; font-weight: bolder;} -.error { color: orange; font-weight: bolder;} -.warning { color: #6A0366; font-weight: bolder;} - -/* Clipboard --------------------------*/ - -.hasCopyButton { - position: relative; -} - -.btn-copy-ex { - position: absolute; - right: 0; - top: 0; - visibility: hidden; -} - -.hasCopyButton:hover button.btn-copy-ex { - visibility: visible; -} - -/* mark.js ----------------------------*/ - -mark { - background-color: rgba(255, 255, 51, 0.5); - border-bottom: 2px solid rgba(255, 153, 51, 0.3); - padding: 1px; -} - -/* vertical spacing after htmlwidgets */ -.html-widget { - margin-bottom: 10px; -} diff --git a/R/docs/pkgdown.js b/R/docs/pkgdown.js deleted file mode 100644 index eb7e83d2..00000000 --- a/R/docs/pkgdown.js +++ /dev/null @@ -1,115 +0,0 @@ -/* http://gregfranko.com/blog/jquery-best-practices/ */ -(function($) { - $(function() { - - $("#sidebar") - .stick_in_parent({offset_top: 40}) - .on('sticky_kit:bottom', function(e) { - $(this).parent().css('position', 'static'); - }) - .on('sticky_kit:unbottom', function(e) { - $(this).parent().css('position', 'relative'); - }); - - $('body').scrollspy({ - target: '#sidebar', - offset: 60 - }); - - $('[data-toggle="tooltip"]').tooltip(); - - var cur_path = paths(location.pathname); - var links = $("#navbar ul li a"); - var max_length = -1; - var pos = -1; - for (var i = 0; i < links.length; i++) { - if (links[i].getAttribute("href") === "#") - continue; - // Ignore external links - if (links[i].host !== location.host) - continue; - - var nav_path = paths(links[i].pathname); - - var length = prefix_length(nav_path, cur_path); - if (length > max_length) { - max_length = length; - pos = i; - } - } - - // Add class to parent
  • , and enclosing
  • if in dropdown - if (pos >= 0) { - var menu_anchor = $(links[pos]); - menu_anchor.parent().addClass("active"); - menu_anchor.closest("li.dropdown").addClass("active"); - } - }); - - function paths(pathname) { - var pieces = pathname.split("/"); - pieces.shift(); // always starts with / - - var end = pieces[pieces.length - 1]; - if (end === "index.html" || end === "") - pieces.pop(); - return(pieces); - } - - // Returns -1 if not found - function prefix_length(needle, haystack) { - if (needle.length > haystack.length) - return(-1); - - // Special case for length-0 haystack, since for loop won't run - if (haystack.length === 0) { - return(needle.length === 0 ? 0 : -1); - } - - for (var i = 0; i < haystack.length; i++) { - if (needle[i] != haystack[i]) - return(i); - } - - return(haystack.length); - } - - /* Clipboard --------------------------*/ - - function changeTooltipMessage(element, msg) { - var tooltipOriginalTitle=element.getAttribute('data-original-title'); - element.setAttribute('data-original-title', msg); - $(element).tooltip('show'); - element.setAttribute('data-original-title', tooltipOriginalTitle); - } - - if(ClipboardJS.isSupported()) { - $(document).ready(function() { - var copyButton = ""; - - $(".examples, div.sourceCode").addClass("hasCopyButton"); - - // Insert copy buttons: - $(copyButton).prependTo(".hasCopyButton"); - - // Initialize tooltips: - $('.btn-copy-ex').tooltip({container: 'body'}); - - // Initialize clipboard: - var clipboardBtnCopies = new ClipboardJS('[data-clipboard-copy]', { - text: function(trigger) { - return trigger.parentNode.textContent; - } - }); - - clipboardBtnCopies.on('success', function(e) { - changeTooltipMessage(e.trigger, 'Copied!'); - e.clearSelection(); - }); - - clipboardBtnCopies.on('error', function() { - changeTooltipMessage(e.trigger,'Press Ctrl+C or Command+C to copy'); - }); - }); - } -})(window.jQuery || window.$) diff --git a/R/docs/pkgdown.yml b/R/docs/pkgdown.yml deleted file mode 100644 index 1996019a..00000000 --- a/R/docs/pkgdown.yml +++ /dev/null @@ -1,5 +0,0 @@ -pandoc: 2.2.3.2 -pkgdown: 1.3.0 -pkgdown_sha: ~ -articles: [] - diff --git a/R/docs/reference/HDIofMCMC.html b/R/docs/reference/HDIofMCMC.html deleted file mode 100644 index f4f9a3ec..00000000 --- a/R/docs/reference/HDIofMCMC.html +++ /dev/null @@ -1,165 +0,0 @@ - - - - - - - - -Compute Highest-Density Interval — HDIofMCMC • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Computes the highest density interval from a sample of representative values, estimated as shortest credible interval. -Downloaded from John Kruschke's website http://www.indiana.edu/~kruschke/DoingBayesianDataAnalysis/

    - -
    - -
    HDIofMCMC(sampleVec, credMass = 0.95)
    - -

    Arguments

    - - - - - - - - - - -
    sampleVec

    A vector of representative values from a probability distribution (e.g., MCMC samples).

    credMass

    A scalar between 0 and 1, indicating the mass within the credible interval that is to be estimated.

    - -

    Value

    - -

    A vector containing the limits of the HDI

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit2arm_delta.html b/R/docs/reference/bandit2arm_delta.html deleted file mode 100644 index a1b01a4d..00000000 --- a/R/docs/reference/bandit2arm_delta.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Rescorla-Wagner (Delta) Model — bandit2arm_delta • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 2-Armed Bandit Task using Rescorla-Wagner (Delta) Model. -It has the following parameters: A (learning rate), tau (inverse temperature).

    -
      -
    • Task: 2-Armed Bandit Task (Erev et al., 2010; Hertwig et al., 2004)

    • -
    • Model: Rescorla-Wagner (Delta) Model

    • -
    - -
    - -
    bandit2arm_delta(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit2arm_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 2-Armed Bandit Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., et al. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47. http://doi.org/10.1002/bdm.683

    -

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions From Experience and the Effect of Rare Events in Risky Choice. Psychological Science, 15(8), 534-539. http://doi.org/10.1111/j.0956-7976.2004.00715.x

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit2arm_delta(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit2arm_delta(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm2_kalman_filter.html b/R/docs/reference/bandit4arm2_kalman_filter.html deleted file mode 100644 index f85e16a7..00000000 --- a/R/docs/reference/bandit4arm2_kalman_filter.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Kalman Filter — bandit4arm2_kalman_filter • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task (modified) using Kalman Filter. -It has the following parameters: lambda (decay factor), theta (decay center), beta (inverse softmax temperature), mu0 (anticipated initial mean of all 4 options), sigma0 (anticipated initial sd (uncertainty factor) of all 4 options), sigmaD (sd of diffusion noise).

    -
      -
    • Task: 4-Armed Bandit Task (modified)

    • -
    • Model: Kalman Filter (Daw et al., 2006)

    • -
    - -
    - -
    bandit4arm2_kalman_filter(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit4arm2_kalman_filter").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task (modified), there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    outcome

    Integer value representing the outcome of the given trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Yoonseo Zoh <zohyos7@gmail.com> - -

    References

    - -

    Daw, N. D., O'Doherty, J. P., Dayan, P., Seymour, B., & Dolan, R. J. (2006). Cortical substrates for exploratory decisions in humans. Nature, 441(7095), 876-879.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit4arm2_kalman_filter(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit4arm2_kalman_filter(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm_2par_lapse.html b/R/docs/reference/bandit4arm_2par_lapse.html deleted file mode 100644 index 2af77a80..00000000 --- a/R/docs/reference/bandit4arm_2par_lapse.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) — bandit4arm_2par_lapse • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise). -It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), xi (noise).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise) (Aylward et al., 2018)

    • -
    - -
    - -
    bandit4arm_2par_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit4arm_2par_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit4arm_2par_lapse(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit4arm_2par_lapse(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm_4par.html b/R/docs/reference/bandit4arm_4par.html deleted file mode 100644 index be3717d1..00000000 --- a/R/docs/reference/bandit4arm_4par.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -4 Parameter Model, without C (choice perseveration) — bandit4arm_4par • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 4 Parameter Model, without C (choice perseveration). -It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), R (reward sensitivity), P (punishment sensitivity).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 4 Parameter Model, without C (choice perseveration) (Seymour et al., 2012)

    • -
    - -
    - -
    bandit4arm_4par(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit4arm_4par").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit4arm_4par(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit4arm_4par(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm_lapse.html b/R/docs/reference/bandit4arm_lapse.html deleted file mode 100644 index 4fce923e..00000000 --- a/R/docs/reference/bandit4arm_lapse.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -5 Parameter Model, without C (choice perseveration) but with xi (noise) — bandit4arm_lapse • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 5 Parameter Model, without C (choice perseveration) but with xi (noise). -It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), R (reward sensitivity), P (punishment sensitivity), xi (noise).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise) (Seymour et al., 2012)

    • -
    - -
    - -
    bandit4arm_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit4arm_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Seymour, Daw, Roiser, Dayan, & Dolan (2012). Serotonin Selectively Modulates Reward Value in Human Decision-Making. J Neuro, 32(17), 5833-5842.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit4arm_lapse(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit4arm_lapse(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm_lapse_decay.html b/R/docs/reference/bandit4arm_lapse_decay.html deleted file mode 100644 index c4d61b73..00000000 --- a/R/docs/reference/bandit4arm_lapse_decay.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). — bandit4arm_lapse_decay • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).. -It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), R (reward sensitivity), P (punishment sensitivity), xi (noise), d (decay rate).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro). (Aylward et al., 2018)

    • -
    - -
    - -
    bandit4arm_lapse_decay(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit4arm_lapse_decay").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit4arm_lapse_decay(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit4arm_lapse_decay(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bandit4arm_singleA_lapse.html b/R/docs/reference/bandit4arm_singleA_lapse.html deleted file mode 100644 index ae2fa35d..00000000 --- a/R/docs/reference/bandit4arm_singleA_lapse.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. — bandit4arm_singleA_lapse • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the 4-Armed Bandit Task using 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.. -It has the following parameters: A (learning rate), R (reward sensitivity), P (punishment sensitivity), xi (noise).

    -
      -
    • Task: 4-Armed Bandit Task

    • -
    • Model: 4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P. (Aylward et al., 2018)

    • -
    - -
    - -
    bandit4arm_singleA_lapse(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bandit4arm_singleA_lapse").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the 4-Armed Bandit Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on the given trial: 1, 2, 3, or 4.

    -
    gain

    Floating point value representing the amount of currency won on the given trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on the given trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Aylward, Valton, Ahn, Bond, Dayan, Roiser, & Robinson (2018) Altered decision-making under uncertainty in unmedicated mood and anxiety disorders. PsyArxiv. 10.31234/osf.io/k5b8m

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bandit4arm_singleA_lapse(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bandit4arm_singleA_lapse(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/bart_par4.html b/R/docs/reference/bart_par4.html deleted file mode 100644 index 1babdb6a..00000000 --- a/R/docs/reference/bart_par4.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Re-parameterized version of BART model with 4 parameters — bart_par4 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Balloon Analogue Risk Task using Re-parameterized version of BART model with 4 parameters. -It has the following parameters: phi (prior belief of balloon not bursting), eta (updating rate), gam (risk-taking parameter), tau (inverse temperature).

    -
      -
    • Task: Balloon Analogue Risk Task

    • -
    • Model: Re-parameterized version of BART model with 4 parameters (van Ravenzwaaij et al., 2011)

    • -
    - -
    - -
    bart_par4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "pumps", "explosion". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"bart_par4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Balloon Analogue Risk Task, there should be 3 columns of data with the - labels "subjID", "pumps", "explosion". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    pumps

    The number of pumps.

    -
    explosion

    0: intact, 1: burst

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Harhim Park <hrpark12@gmail.com>, Jaeyeong Yang <jaeyeong.yang1125@gmail.com>, Ayoung Lee <aylee2008@naver.com>, Jeongbin Oh <ows0104@gmail.com>, Jiyoon Lee <nicole.lee2001@gmail.com>, Junha Jang <andy627robo@naver.com> - -

    References

    - -

    van Ravenzwaaij, D., Dutilh, G., & Wagenmakers, E. J. (2011). Cognitive model decomposition of the BART: Assessment and application. Journal of Mathematical Psychology, 55(1), 94-105.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- bart_par4(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- bart_par4(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/cgt_cm.html b/R/docs/reference/cgt_cm.html deleted file mode 100644 index 8423f533..00000000 --- a/R/docs/reference/cgt_cm.html +++ /dev/null @@ -1,343 +0,0 @@ - - - - - - - - -Cumulative Model — cgt_cm • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Cambridge Gambling Task using Cumulative Model. -It has the following parameters: alpha (probability distortion), c (color bias), rho (relative loss sensitivity), beta (discounting rate), gamma (choice sensitivity).

    -
      -
    • Task: Cambridge Gambling Task (Rogers et al., 1999)

    • -
    • Model: Cumulative Model

    • -
    - -
    - -
    cgt_cm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "y_hat_col", "y_hat_bet", "bet_utils".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -Not available for this model.

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"cgt_cm").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Cambridge Gambling Task, there should be 7 columns of data with the - labels "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gamble_type

    Integer value representng whether the bets on the current trial were presented in descending (0) or ascending (1) order.

    -
    percentage_staked

    Integer value representing the bet percentage (not proportion) selected on the current trial: 5, 25, 50, 75, or 95.

    -
    trial_initial_points

    Floating point value representing the number of points that the subject has at the start of the current trial (e.g., 100, 150, etc.).

    -
    assessment_stage

    Integer value representing whether the current trial is a practice trial (0) or a test trial (1). Only test trials are used for model fitting.

    -
    red_chosen

    Integer value representing whether the red color was chosen (1) versus the blue color (0).

    -
    n_red_boxes

    Integer value representing the number of red boxes shown on the current trial: 1, 2, 3,..., or 9.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Nathaniel Haines <haines.175@osu.edu> - -

    References

    - -

    Rogers, R. D., Everitt, B. J., Baldacchino, A., Blackshaw, A. J., Swainson, R., Wynne, K., Baker, N. B., Hunter, J., Carthy, T., London, M., Deakin, J. F. W., Sahakian, B. J., Robbins, T. W. (1999). Dissociable deficits in the decision-making cognition of chronic amphetamine abusers, opiate abusers, patients with focal damage to prefrontal cortex, and tryptophan-depleted normal volunteers: evidence for monoaminergic mechanisms. Neuropsychopharmacology, 20, 322–339.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- cgt_cm(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- cgt_cm(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/choiceRT_ddm.html b/R/docs/reference/choiceRT_ddm.html deleted file mode 100644 index 760bbbb7..00000000 --- a/R/docs/reference/choiceRT_ddm.html +++ /dev/null @@ -1,355 +0,0 @@ - - - - - - - - -Drift Diffusion Model — choiceRT_ddm • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Choice Reaction Time Task using Drift Diffusion Model. -It has the following parameters: alpha (boundary separation), beta (bias), delta (drift rate), tau (non-decision time).

    -
      -
    • Task: Choice Reaction Time Task

    • -
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • -
    - -
    - -
    choiceRT_ddm(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "RT". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -Not available for this model.

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"choiceRT_ddm").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Reaction Time Task, there should be 3 columns of data with the - labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    -
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    Note

    - -

    Notes: -Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. -Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    - -

    References

    - -

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- choiceRT_ddm(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- choiceRT_ddm(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/choiceRT_ddm_single.html b/R/docs/reference/choiceRT_ddm_single.html deleted file mode 100644 index d78001c8..00000000 --- a/R/docs/reference/choiceRT_ddm_single.html +++ /dev/null @@ -1,356 +0,0 @@ - - - - - - - - -Drift Diffusion Model — choiceRT_ddm_single • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Individual Bayesian Modeling of the Choice Reaction Time Task using Drift Diffusion Model. -It has the following parameters: alpha (boundary separation), beta (bias), delta (drift rate), tau (non-decision time).

    -
      -
    • Task: Choice Reaction Time Task

    • -
    • Model: Drift Diffusion Model (Ratcliff, 1978)

    • -
    - -
    - -
    choiceRT_ddm_single(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "RT". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -Not available for this model.

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    RTbound

    Floating point value representing the lower bound (i.e., minimum allowed) reaction time. Defaults to 0.1 (100 milliseconds).

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"choiceRT_ddm_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Reaction Time Task, there should be 3 columns of data with the - labels "subjID", "choice", "RT". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Choice made for the current trial, coded as 1/2 to indicate lower/upper boundary or left/right choices (e.g., 1 1 1 2 1 2).

    -
    RT

    Choice reaction time for the current trial, in **seconds** (e.g., 0.435 0.383 0.314 0.309, etc.).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    Note

    - -

    Notes: -Note that this implementation is NOT the full Drift Diffusion Model as described in Ratcliff (1978). This implementation estimates the drift rate, boundary separation, starting point, and non-decision time; but not the between- and within-trial variances in these parameters. -Code for this model is based on codes/comments by Guido Biele, Joseph Burling, Andrew Ellis, and potential others @ Stan mailing.

    - -

    References

    - -

    Ratcliff, R. (1978). A theory of memory retrieval. Psychological Review, 85(2), 59-108. http://doi.org/10.1037/0033-295X.85.2.59

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- choiceRT_ddm_single(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- choiceRT_ddm_single(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/choiceRT_lba.html b/R/docs/reference/choiceRT_lba.html deleted file mode 100644 index 05802d60..00000000 --- a/R/docs/reference/choiceRT_lba.html +++ /dev/null @@ -1,313 +0,0 @@ - - - - - - - - -Choice Reaction Time task, linear ballistic accumulator modeling — choiceRT_lba • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of choice/reaction time data with the following parameters: "d" (boundary), "A" (upper boundary of starting point), "v" (drift rate), "tau" (non-decision time). -The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24.

    -

    MODEL: -Brown and Heathcote LBA model - multiple subjects. Note that this implementation estimates a different drift rate -for each condition-choice pair. For example, if the task involves deciding between two stimuli on each trial, and -there are two different conditions throughout the task (e.g. speed versus accuracy), a total of 4 (2 stimuli by 2 conditions) -drift rates will be estimated. For details on implementation, see Annis et al. (2016).

    - -
    - -
    choiceRT_lba(data = "choose", niter = 3000, nwarmup = 1000,
    -  nchain = 2, ncore = 2, nthin = 1, inits = "random",
    -  indPars = "mean", saveDir = NULL, modelRegressor = FALSE,
    -  vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95,
    -  stepsize = 1, max_treedepth = 10)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labelled as follows: "subjID", "choice", "RT", and "condition". See Details below for more information.

    niter

    Number of iterations, including warm-up.

    nwarmup

    Number of iterations used for warm-up only.

    nchain

    Number of chains to be run.

    ncore

    Integer value specifying how many CPUs to run the MCMC sampling on. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.

    inits

    Character value specifying how the initial values should be generated. Options are "fixed" or "random" or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".

    saveDir

    Path to directory where .RData file of model output (modelData) can be saved. Leave blank if not interested.

    modelRegressor

    Exporting model-based regressors? TRUE or FALSE. Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file size). Defaults to FALSE.

    adapt_delta

    Floating point number representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps that the MCMC sampler can take on each new iteration. See Details below.

    - -

    Value

    - -

    modelData A class 'hBayesDM' object with the following components:

    -
    model

    Character string with the name of the model ("choiceRT_lba").

    -
    allIndPars

    'data.frame' containing the summarized parameter - values (as specified by 'indPars') for each subject.

    -
    parVals

    A 'list' where each element contains posterior samples - over different model parameters.

    -
    fit

    A class 'stanfit' object containing the fitted model.

    -
    rawdata

    "data.frame" containing the raw data used to fit the model, as specified by the user.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name of the file, including the file extension -(e.g. ".txt"), that contains the behavioral data for the subject of interest for the current analysis. -The file should be a tab-delimited text (.txt) file whose rows represent trial-by-trial observations and columns -represent variables. For choice/reaction time tasks, there should be four columns of data -with the labels "subjID", "choice", "RT", and "condition". It is not necessary for the columns to be in this particular order, -however it is necessary that they be labelled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject within data-set to be analyzed.

    -
    "choice"

    An integer representing the choice made on the current trial. (e.g., 1 1 3 2 1 2).

    -
    "RT"

    A floating number the choice reaction time in seconds. (e.g., 0.435 0.383 0.314 0.309, etc.).

    -
    "condition"

    An integer representing the condition of the current trail (e.g., 1 2 3 4).

    -

    *Note: The data.txt file may contain other columns of data (e.g. "Reaction_Time", "trial_number", etc.), but only the data with the column -names listed above will be used for analysis/modeling. As long as the columns above are present and labelled correctly, -there is no need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored upon the -beginning of each chain. For those familiar with Bayesian methods, this value is equivalent to a burn-in sample. -Due to the nature of MCMC sampling, initial values (where the sampling chain begins) can have a heavy influence -on the generated posterior distributions. The nwarmup argument can be set to a high number in order to curb the -effects that initial values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling sequences) should be -used to draw samples from the posterior distribution. Since the posteriors are generated from a sampling -process, it is good practice to run multiple chains to ensure that a representative posterior is attained. When -sampling is completed, the multiple chains may be checked for convergence with the plot(myModel, type = "trace") -command. The chains should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC samples being chosen -to generate the posterior distributions. By default, nthin is equal to 1, hence every sample is used to -generate the posterior.

    -

    Contol Parameters: adapt_delta, stepsize, and max_treedepth are advanced options that give the user more control -over Stan's MCMC sampler. The Stan creators recommend that only advanced users change the default values, as alterations -can profoundly change the sampler's behavior. Refer to Hoffman & Gelman (2014, Journal of Machine Learning Research) for -more information on the functioning of the sampler control parameters. One can also refer to section 58.2 of the -Stan User's Manual for a less technical description of these arguments.

    - -

    References

    - -

    Brown, S. D., & Heathcote, A. (2008). The simplest complete model of choice response time: Linear ballistic accumulation. -Cognitive Psychology, 57(3), 153-178. http://doi.org/10.1016/j.cogpsych.2007.12.002

    -

    Annis, J., Miller, B. J., & Palmeri, T. J. (2016). Bayesian inference with Stan: A tutorial on adding custom distributions. -Behavior research methods, 1-24.

    -

    Hoffman, M. D., & Gelman, A. (2014). The No-U-turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. The -Journal of Machine Learning Research, 15(1), 1593-1623.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- choiceRT_lba(data = "example", niter = 2000, nwarmup = 1000, nchain = 3, ncore = 3)
    -
    -# Visually check convergence of the sampling chains (should like like 'hairy caterpillars')
    -plot(output, type = 'trace')
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/choiceRT_lba_single.html b/R/docs/reference/choiceRT_lba_single.html deleted file mode 100644 index bfa95fbb..00000000 --- a/R/docs/reference/choiceRT_lba_single.html +++ /dev/null @@ -1,313 +0,0 @@ - - - - - - - - -Choice Reaction Time task, linear ballistic accumulator modeling — choiceRT_lba_single • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Individual Bayesian Modeling of choice/reaction time data with the following parameters: "d" (boundary), "A" (upper boundary of starting point), "v" (drift rate), "tau" (non-decision time). -The model published in Annis, J., Miller, B. J., & Palmeri, T. J. (2016). Bayesian inference with Stan: A tutorial on adding custom distributions. Behavior research methods, 1-24.

    -

    MODEL: -Brown and Heathcote LBA model - single subject. Note that this implementation estimates a different drift rate -for each condition-choice pair. For example, if the task involves deciding between two stimuli on each trial, and -there are two different conditions throughout the task (e.g. speed versus accuracy), a total of 4 (2 stimuli by 2 conditions) -drift rates will be estimated. For details on implementation, see Annis et al. (2016).

    - -
    - -
    choiceRT_lba_single(data = "choose", niter = 3000, nwarmup = 1000,
    -  nchain = 2, ncore = 2, nthin = 1, inits = "random",
    -  indPars = "mean", saveDir = NULL, modelRegressor = FALSE,
    -  vb = FALSE, inc_postpred = FALSE, adapt_delta = 0.95,
    -  stepsize = 1, max_treedepth = 10)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    A .txt file containing the data to be modeled. Data columns should be labelled as follows: "subjID", "choice", "RT", and "condition". See Details below for more information.

    niter

    Number of iterations, including warm-up.

    nwarmup

    Number of iterations used for warm-up only.

    nchain

    Number of chains to be run.

    ncore

    Integer value specifying how many CPUs to run the MCMC sampling on. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is high.

    inits

    Character value specifying how the initial values should be generated. Options are "fixed" or "random" or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options are: "mean", "median", or "mode".

    saveDir

    Path to directory where .RData file of model output (modelData) can be saved. Leave blank if not interested.

    modelRegressor

    Exporting model-based regressors? TRUE or FALSE. Currently not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file size). Defaults to FALSE.

    adapt_delta

    Floating point number representing the target acceptance probability of a new sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps that the MCMC sampler can take on each new iteration. See Details below.

    - -

    Value

    - -

    modelData A class 'hBayesDM' object with the following components:

    -
    model

    Character string with the name of the model ("choiceRT_lba_single").

    -
    allIndPars

    'data.frame' containing the summarized parameter - values (as specified by 'indPars') for each subject.

    -
    parVals

    A 'list' where each element contains posterior samples - over different model parameters.

    -
    fit

    A class 'stanfit' object containing the fitted model.

    -
    rawdata

    "data.frame" containing the raw data used to fit the model, as specified by the user.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name of the file, including the file extension -(e.g. ".txt"), that contains the behavioral data of all subjects of interest for the current analysis. -The file should be a tab-delimited text (.txt) file whose rows represent trial-by-trial observations and columns -represent variables. For choice/reaction time tasks, there should be four columns of data -with the labels "choice", "RT", and "condition". It is not necessary for the columns to be in this particular order, -however it is necessary that they be labelled correctly and contain the information below:

    -
    "subjID"

    A unique identifier for each subject within data-set to be analyzed.

    -
    "choice"

    An integer representing the choice made on the current trial. (e.g., 1 1 3 2 1 2).

    -
    "RT"

    A floating number the choice reaction time in seconds. (e.g., 0.435 0.383 0.314 0.309, etc.).

    -
    "condition"

    An integer representing the condition of the current trail (e.g., 1 2 3 4).

    -

    *Note: The data.txt file may contain other columns of data (e.g. "Reaction_Time", "trial_number", etc.), but only the data with the column -names listed above will be used for analysis/modeling. As long as the columns above are present and labelled correctly, -there is no need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored upon the -beginning of each chain. For those familiar with Bayesian methods, this value is equivalent to a burn-in sample. -Due to the nature of MCMC sampling, initial values (where the sampling chain begins) can have a heavy influence -on the generated posterior distributions. The nwarmup argument can be set to a high number in order to curb the -effects that initial values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling sequences) should be -used to draw samples from the posterior distribution. Since the posteriors are generated from a sampling -process, it is good practice to run multiple chains to ensure that a representative posterior is attained. When -sampling is completed, the multiple chains may be checked for convergence with the plot(myModel, type = "trace") -command. The chains should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC samples being chosen -to generate the posterior distributions. By default, nthin is equal to 1, hence every sample is used to -generate the posterior.

    -

    Contol Parameters: adapt_delta, stepsize, and max_treedepth are advanced options that give the user more control -over Stan's MCMC sampler. The Stan creators recommend that only advanced users change the default values, as alterations -can profoundly change the sampler's behavior. Refer to Hoffman & Gelman (2014, Journal of Machine Learning Research) for -more information on the functioning of the sampler control parameters. One can also refer to section 58.2 of the -Stan User's Manual for a less technical description of these arguments.

    - -

    References

    - -

    Brown, S. D., & Heathcote, A. (2008). The simplest complete model of choice response time: Linear ballistic accumulation. -Cognitive Psychology, 57(3), 153-178. http://doi.org/10.1016/j.cogpsych.2007.12.002

    -

    Annis, J., Miller, B. J., & Palmeri, T. J. (2016). Bayesian inference with Stan: A tutorial on adding custom distributions. -Behavior research methods, 1-24.

    -

    Hoffman, M. D., & Gelman, A. (2014). The No-U-turn sampler: adaptively setting path lengths in Hamiltonian Monte Carlo. The -Journal of Machine Learning Research, 15(1), 1593-1623.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model and store results in "output"
    -output <- choiceRT_lba_single(data = "example", niter = 2000, nwarmup = 1000, nchain = 3, ncore = 3)
    -
    -# Visually check convergence of the sampling chains (should like like 'hairy caterpillars')
    -plot(output, type = 'trace')
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/cra_exp.html b/R/docs/reference/cra_exp.html deleted file mode 100644 index c8293eae..00000000 --- a/R/docs/reference/cra_exp.html +++ /dev/null @@ -1,342 +0,0 @@ - - - - - - - - -Exponential Subjective Value Model — cra_exp • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using Exponential Subjective Value Model. -It has the following parameters: alpha (risk attitude), beta (ambiguity attitude), gamma (inverse temperature).

    -
      -
    • Task: Choice Under Risk and Ambiguity Task

    • -
    • Model: Exponential Subjective Value Model (Hsu et al., 2005)

    • -
    - -
    - -
    cra_exp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"cra_exp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the - labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    prob

    Objective probability of the variable lottery.

    -
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    -
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    -
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    -
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang <jaeyeong.yang1125@gmail.com> - -

    References

    - -

    Hsu, M., Bhatt, M., Adolphs, R., Tranel, D., & Camerer, C. F. (2005). Neural systems responding to degrees of uncertainty in human decision-making. Science, 310(5754), 1680-1683. https://doi.org/10.1126/science.1115327

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- cra_exp(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- cra_exp(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/cra_linear.html b/R/docs/reference/cra_linear.html deleted file mode 100644 index 1fc9bf13..00000000 --- a/R/docs/reference/cra_linear.html +++ /dev/null @@ -1,342 +0,0 @@ - - - - - - - - -Linear Subjective Value Model — cra_linear • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Choice Under Risk and Ambiguity Task using Linear Subjective Value Model. -It has the following parameters: alpha (risk attitude), beta (ambiguity attitude), gamma (inverse temperature).

    -
      -
    • Task: Choice Under Risk and Ambiguity Task

    • -
    • Model: Linear Subjective Value Model (Levy et al., 2010)

    • -
    - -
    - -
    cra_linear(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "sv", "sv_fix", "sv_var", "p_var".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"cra_linear").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Choice Under Risk and Ambiguity Task, there should be 6 columns of data with the - labels "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    prob

    Objective probability of the variable lottery.

    -
    ambig

    Ambiguity level of the variable lottery (0 for risky lottery; greater than 0 for ambiguous lottery).

    -
    reward_var

    Amount of reward in variable lottery. Assumed to be greater than zero.

    -
    reward_fix

    Amount of reward in fixed lottery. Assumed to be greater than zero.

    -
    choice

    If the variable lottery was selected, choice == 1; otherwise choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang <jaeyeong.yang1125@gmail.com> - -

    References

    - -

    Levy, I., Snell, J., Nelson, A. J., Rustichini, A., & Glimcher, P. W. (2010). Neural representation of subjective value under risk and ambiguity. Journal of Neurophysiology, 103(2), 1036-1047.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- cra_linear(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- cra_linear(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dbdm_prob_weight.html b/R/docs/reference/dbdm_prob_weight.html deleted file mode 100644 index db16b04a..00000000 --- a/R/docs/reference/dbdm_prob_weight.html +++ /dev/null @@ -1,347 +0,0 @@ - - - - - - - - -Probability Weight Function — dbdm_prob_weight • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Description Based Decison Making Task using Probability Weight Function. -It has the following parameters: tau (probability weight function), rho (subject utility function), lambda (loss aversion parameter), beta (inverse softmax temperature).

    -
      -
    • Task: Description Based Decison Making Task

    • -
    • Model: Probability Weight Function (Erev et al., 2010; Hertwig et al., 2004; Jessup et al., 2008)

    • -
    - -
    - -
    dbdm_prob_weight(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"dbdm_prob_weight").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Description Based Decison Making Task, there should be 8 columns of data with the - labels "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    opt1hprob

    Possiblity of getting higher value of outcome(opt1hval) when choosing option 1.

    -
    opt2hprob

    Possiblity of getting higher value of outcome(opt2hval) when choosing option 2.

    -
    opt1hval

    Possible (with opt1hprob probability) outcome of option 1.

    -
    opt1lval

    Possible (with (1 - opt1hprob) probability) outcome of option 1.

    -
    opt2hval

    Possible (with opt2hprob probability) outcome of option 2.

    -
    opt2lval

    Possible (with (1 - opt2hprob) probability) outcome of option 2.

    -
    choice

    If option 1 was selected, choice == 1; else if option 2 was selected, choice == 2.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Yoonseo Zoh <zohyos7@gmail.com> - -

    References

    - -

    Erev, I., Ert, E., Roth, A. E., Haruvy, E., Herzog, S. M., Hau, R., ... & Lebiere, C. (2010). A choice prediction competition: Choices from experience and from description. Journal of Behavioral Decision Making, 23(1), 15-47.

    -

    Hertwig, R., Barron, G., Weber, E. U., & Erev, I. (2004). Decisions from experience and the effect of rare events in risky choice. Psychological science, 15(8), 534-539.

    -

    Jessup, R. K., Bishara, A. J., & Busemeyer, J. R. (2008). Feedback produces divergence from prospect theory in descriptive choice. Psychological Science, 19(10), 1015-1022.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- dbdm_prob_weight(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- dbdm_prob_weight(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dd_cs.html b/R/docs/reference/dd_cs.html deleted file mode 100644 index 3c87802a..00000000 --- a/R/docs/reference/dd_cs.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Constant-Sensitivity (CS) Model — dd_cs • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Delay Discounting Task using Constant-Sensitivity (CS) Model. -It has the following parameters: r (exponential discounting rate), s (impatience), beta (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • -
    - -
    - -
    dd_cs(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"dd_cs").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- dd_cs(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- dd_cs(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dd_cs_single.html b/R/docs/reference/dd_cs_single.html deleted file mode 100644 index 518bdc76..00000000 --- a/R/docs/reference/dd_cs_single.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Constant-Sensitivity (CS) Model — dd_cs_single • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Individual Bayesian Modeling of the Delay Discounting Task using Constant-Sensitivity (CS) Model. -It has the following parameters: r (exponential discounting rate), s (impatience), beta (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Constant-Sensitivity (CS) Model (Ebert et al., 2007)

    • -
    - -
    - -
    dd_cs_single(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"dd_cs_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ebert, J. E. J., & Prelec, D. (2007). The Fragility of Time: Time-Insensitivity and Valuation of the Near and Far Future. Management Science. http://doi.org/10.1287/mnsc.1060.0671

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- dd_cs_single(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- dd_cs_single(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dd_exp.html b/R/docs/reference/dd_exp.html deleted file mode 100644 index ea56ddb0..00000000 --- a/R/docs/reference/dd_exp.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Exponential Model — dd_exp • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Delay Discounting Task using Exponential Model. -It has the following parameters: r (exponential discounting rate), beta (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Exponential Model (Samuelson, 1937)

    • -
    - -
    - -
    dd_exp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"dd_exp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Samuelson, P. A. (1937). A Note on Measurement of Utility. The Review of Economic Studies, 4(2), 155. http://doi.org/10.2307/2967612

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- dd_exp(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- dd_exp(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dd_hyperbolic.html b/R/docs/reference/dd_hyperbolic.html deleted file mode 100644 index 7e98ab21..00000000 --- a/R/docs/reference/dd_hyperbolic.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Hyperbolic Model — dd_hyperbolic • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Delay Discounting Task using Hyperbolic Model. -It has the following parameters: k (discounting rate), beta (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Hyperbolic Model (Mazur, 1987)

    • -
    - -
    - -
    dd_hyperbolic(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"dd_hyperbolic").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- dd_hyperbolic(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- dd_hyperbolic(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/dd_hyperbolic_single.html b/R/docs/reference/dd_hyperbolic_single.html deleted file mode 100644 index 6b365334..00000000 --- a/R/docs/reference/dd_hyperbolic_single.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Hyperbolic Model — dd_hyperbolic_single • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Individual Bayesian Modeling of the Delay Discounting Task using Hyperbolic Model. -It has the following parameters: k (discounting rate), beta (inverse temperature).

    -
      -
    • Task: Delay Discounting Task

    • -
    • Model: Hyperbolic Model (Mazur, 1987)

    • -
    - -
    - -
    dd_hyperbolic_single(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"dd_hyperbolic_single").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Delay Discounting Task, there should be 6 columns of data with the - labels "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    delay_later

    An integer representing the delayed days for the later option (e.g. 1, 6, 28).

    -
    amount_later

    A floating point number representing the amount for the later option (e.g. 10.5, 13.4, 30.9).

    -
    delay_sooner

    An integer representing the delayed days for the sooner option (e.g. 0).

    -
    amount_sooner

    A floating point number representing the amount for the sooner option (e.g. 10).

    -
    choice

    If amount_later was selected, choice == 1; else if amount_sooner was selected, choice == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Mazur, J. E. (1987). An adjustment procedure for studying delayed reinforcement.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- dd_hyperbolic_single(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- dd_hyperbolic_single(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/estimate_mode.html b/R/docs/reference/estimate_mode.html deleted file mode 100644 index 0a08a4fc..00000000 --- a/R/docs/reference/estimate_mode.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - - - - - -Function to estimate mode of MCMC samples — estimate_mode • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Based on codes from 'http://stackoverflow.com/questions/2547402/is-there-a-built-in-function-for-finding-the-mode' -see the comment by Rasmus Baath

    - -
    - -
    estimate_mode(x)
    - -

    Arguments

    - - - - - - -
    x

    MCMC samples or some numeric or array values.

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/extract_ic.html b/R/docs/reference/extract_ic.html deleted file mode 100644 index 32229540..00000000 --- a/R/docs/reference/extract_ic.html +++ /dev/null @@ -1,179 +0,0 @@ - - - - - - - - -Extract Model Comparison Estimates — extract_ic • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Extract Model Comparison Estimates

    - -
    - -
    extract_ic(model_data = NULL, ic = "looic", ncore = 2)
    - -

    Arguments

    - - - - - - - - - - - - - - -
    model_data

    Object returned by 'hBayesDM' model function

    ic

    Information Criterion. 'looic', 'waic', or 'both'

    ncore

    Number of cores to use when computing LOOIC

    - -

    Value

    - -

    IC Leave-One-Out and/or Watanabe-Akaike information criterion estimates.

    - - -

    Examples

    -
    # NOT RUN {
    -library(hBayesDM)
    -output = bandit2arm_delta("example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 1)
    -# To show the LOOIC model fit estimates (a detailed report; c)
    -extract_ic(output)
    -# To show the WAIC model fit estimates
    -extract_ic(output, ic = "waic")
    -# }
    -
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/gng_m1.html b/R/docs/reference/gng_m1.html deleted file mode 100644 index a8b823cc..00000000 --- a/R/docs/reference/gng_m1.html +++ /dev/null @@ -1,338 +0,0 @@ - - - - - - - - -RW + noise — gng_m1 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise. -It has the following parameters: xi (noise), ep (learning rate), rho (effective size).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW + noise (Guitart-Masip et al., 2012)

    • -
    - -
    - -
    gng_m1(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"gng_m1").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- gng_m1(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- gng_m1(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/gng_m2.html b/R/docs/reference/gng_m2.html deleted file mode 100644 index acd414ab..00000000 --- a/R/docs/reference/gng_m2.html +++ /dev/null @@ -1,338 +0,0 @@ - - - - - - - - -RW + noise + bias — gng_m2 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise + bias. -It has the following parameters: xi (noise), ep (learning rate), b (action bias), rho (effective size).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW + noise + bias (Guitart-Masip et al., 2012)

    • -
    - -
    - -
    gng_m2(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"gng_m2").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- gng_m2(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- gng_m2(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/gng_m3.html b/R/docs/reference/gng_m3.html deleted file mode 100644 index 15f42206..00000000 --- a/R/docs/reference/gng_m3.html +++ /dev/null @@ -1,338 +0,0 @@ - - - - - - - - -RW + noise + bias + pi — gng_m3 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW + noise + bias + pi. -It has the following parameters: xi (noise), ep (learning rate), b (action bias), pi (Pavlovian bias), rho (effective size).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW + noise + bias + pi (Guitart-Masip et al., 2012)

    • -
    - -
    - -
    gng_m3(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"gng_m3").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Guitart-Masip, M., Huys, Q. J. M., Fuentemilla, L., Dayan, P., Duzel, E., & Dolan, R. J. (2012). Go and no-go learning in reward and punishment: Interactions between affect and effect. Neuroimage, 62(1), 154-166. http://doi.org/10.1016/j.neuroimage.2012.04.024

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- gng_m3(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- gng_m3(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/gng_m4.html b/R/docs/reference/gng_m4.html deleted file mode 100644 index 4d7a8a61..00000000 --- a/R/docs/reference/gng_m4.html +++ /dev/null @@ -1,338 +0,0 @@ - - - - - - - - -RW (rew/pun) + noise + bias + pi — gng_m4 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Orthogonalized Go/Nogo Task using RW (rew/pun) + noise + bias + pi. -It has the following parameters: xi (noise), ep (learning rate), b (action bias), pi (Pavlovian bias), rhoRew (reward sensitivity), rhoPun (punishment sensitivity).

    -
      -
    • Task: Orthogonalized Go/Nogo Task

    • -
    • Model: RW (rew/pun) + noise + bias + pi (Cavanagh et al., 2013)

    • -
    - -
    - -
    gng_m4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "cue", "keyPressed", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "Qgo", "Qnogo", "Wgo", "Wnogo", "SV".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"gng_m4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Orthogonalized Go/Nogo Task, there should be 4 columns of data with the - labels "subjID", "cue", "keyPressed", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    cue

    Nominal integer representing the cue shown for that trial: 1, 2, 3, or 4.

    -
    keyPressed

    Binary value representing the subject's response for that trial (where Press == 1; No press == 0).

    -
    outcome

    Ternary value representing the outcome of that trial (where Positive feedback == 1; Neutral feedback == 0; Negative feedback == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Cavanagh, J. F., Eisenberg, I., Guitart-Masip, M., Huys, Q., & Frank, M. J. (2013). Frontal Theta Overrides Pavlovian Learning Biases. Journal of Neuroscience, 33(19), 8541-8548. http://doi.org/10.1523/JNEUROSCI.5754-12.2013

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- gng_m4(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- gng_m4(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/hBayesDM-package.html b/R/docs/reference/hBayesDM-package.html deleted file mode 100644 index 4560a2d8..00000000 --- a/R/docs/reference/hBayesDM-package.html +++ /dev/null @@ -1,253 +0,0 @@ - - - - - - - - -Hierarchical Bayesian Modeling of Decision-Making Tasks — hBayesDM-package • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Fit an array of decision-making tasks with computational models in a hierarchical Bayesian framework. Can perform hierarchical Bayesian analysis of various computational models with a single line of coding. -Bolded tasks, followed by their respective models, are itemized below.

    -
    -
    Bandit

    2-Armed Bandit (Rescorla-Wagner (delta)) --- bandit2arm_delta
    - 4-Armed Bandit with fictive updating + reward/punishment sensitvity (Rescorla-Wagner (delta)) --- bandit4arm_4par
    - 4-Armed Bandit with fictive updating + reward/punishment sensitvity + lapse (Rescorla-Wagner (delta)) --- bandit4arm_lapse

    -
    Bandit2

    Kalman filter --- bandit4arm2_kalman_filter

    -
    Choice RT

    Drift Diffusion Model --- choiceRT_ddm
    - Drift Diffusion Model for a single subject --- choiceRT_ddm_single
    - Linear Ballistic Accumulator (LBA) model --- choiceRT_lba
    - Linear Ballistic Accumulator (LBA) model for a single subject --- choiceRT_lba_single

    -
    Choice under Risk and Ambiguity

    Exponential model --- cra_exp
    - Linear model --- cra_linear

    -
    Description-Based Decision Making

    probability weight function --- dbdm_prob_weight

    -
    Delay Discounting

    Constant Sensitivity --- dd_cs
    - Constant Sensitivity for a single subject --- dd_cs_single
    - Exponential --- dd_exp
    - Hyperbolic --- dd_hyperbolic
    - Hyperbolic for a single subject --- dd_hyperbolic_single

    -
    Orthogonalized Go/Nogo

    RW + Noise --- gng_m1
    - RW + Noise + Bias --- gng_m2
    - RW + Noise + Bias + Pavlovian Bias --- gng_m3
    - RW(modified) + Noise + Bias + Pavlovian Bias --- gng_m4

    -
    Iowa Gambling

    Outcome-Representation Learning --- igt_orl
    - Prospect Valence Learning-DecayRI --- igt_pvl_decay
    - Prospect Valence Learning-Delta --- igt_pvl_delta
    - Value-Plus_Perseverance --- igt_vpp

    -
    Peer influence task

    OCU model --- peer_ocu

    -
    Probabilistic Reversal Learning

    Experience-Weighted Attraction --- prl_ewa
    - Fictitious Update --- prl_fictitious
    - Fictitious Update w/o alpha (indecision point) --- prl_fictitious_woa
    - Fictitious Update and multiple blocks per subject --- prl_fictitious_multipleB
    - Reward-Punishment --- prl_rp
    - Reward-Punishment and multiple blocks per subject --- prl_rp_multipleB
    - Fictitious Update with separate learning for Reward-Punishment --- prl_fictitious_rp
    - Fictitious Update with separate learning for Reward-Punishment w/o alpha (indecision point) --- prl_fictitious_rp_woa

    -
    Probabilistic Selection Task

    Q-learning with two learning rates --- pst_gainloss_Q

    -
    Risk Aversion

    Prospect Theory (PT) --- ra_prospect
    - PT without a loss aversion parameter --- ra_noLA
    - PT without a risk aversion parameter --- ra_noRA

    -
    Risky Decision Task

    Happiness model --- rdt_happiness

    -
    Two-Step task

    Full model (7 parameters) --- ts_par7
    - 6 parameter model (without eligibility trace, lambda) --- ts_par6
    - 4 parameter model --- ts_par4

    -
    Ultimatum Game

    Ideal Bayesian Observer --- ug_bayes
    - Rescorla-Wagner (delta) --- ug_delta

    - -
    - -
    - - -

    References

    - -

    Please cite as: -Ahn, W.-Y., Haines, N., & Zhang, L. (2017). Revealing neuro-computational mechanisms of reinforcement learning and decision-making with the hBayesDM package. Computational Psychiatry. 1, 24-57. https://doi.org/10.1162/CPSY_a_00002

    - -

    See also

    - -

    For tutorials and further readings, visit : http://rpubs.com/CCSL/hBayesDM.

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/hBayesDM_model.html b/R/docs/reference/hBayesDM_model.html deleted file mode 100644 index 3d17ca81..00000000 --- a/R/docs/reference/hBayesDM_model.html +++ /dev/null @@ -1,260 +0,0 @@ - - - - - - - - -hBayesDM Model Base Function — hBayesDM_model • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    The base function from which all hBayesDM model functions are created.

    -

    Contributor: Jethro Lee

    - -
    - -
    hBayesDM_model(task_name, model_name, model_type = "", data_columns,
    -  parameters, regressors = NULL, postpreds = "y_pred",
    -  stanmodel_arg = NULL, preprocess_func)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    task_name

    Character value for name of task. E.g. "gng".

    model_name

    Character value for name of model. E.g. "m1".

    model_type

    Character value for modeling type: "" OR "single" OR -"multipleB".

    data_columns

    Character vector of necessary column names for the data. E.g. -c("subjID", "cue", "keyPressed", "outcome").

    parameters

    List of parameters, with information about their lower bound, plausible value, -upper bound. E.g. list("xi" = c(0, 0.1, 1), "ep" = c(0, 0.2, 1), "rho" = c(0, exp(2), -Inf)).

    regressors

    List of regressors, with information about their extracted dimensions. E.g. -list("Qgo" = 2, "Qnogo" = 2, "Wgo" = 2, "Wnogo" = 2). OR if model-based regressors are -not available for this model, NULL.

    postpreds

    Character vector of name(s) for the trial-level posterior predictive -simulations. Default is "y_pred". OR if posterior predictions are not yet available for -this model, NULL.

    stanmodel_arg

    Leave as NULL (default) for completed models. Else should either be a -character value (specifying the name of a Stan file) OR a stanmodel object (returned as -a result of running stan_model).

    preprocess_func

    Function to preprocess the raw data before it gets passed to Stan. Takes -(at least) two arguments: a data.table object raw_data and a list object -general_info. Possible to include additional argument(s) to use during preprocessing. -Should return a list object data_list, which will then directly be passed to Stan.

    - -

    Value

    - -

    A specific hBayesDM model function.

    - -

    Details

    - -

    task_name: Typically same task models share the same data column requirements.

    -

    model_name: Typically different models are distinguished by their different list of - parameters.

    -

    model_type is one of the following three:

    -
    ""

    Modeling of multiple subjects. (Default hierarchical Bayesian analysis.)

    -
    "single"

    Modeling of a single subject.

    -
    "multipleB"

    Modeling of multiple subjects, where multiple blocks exist within - each subject.

    -
    -

    data_columns must be the entirety of necessary data columns used at some point in the R - or Stan code. I.e. "subjID" must always be included. In the case of 'multipleB' type - models, "block" should also be included as well.

    -

    parameters is a list object, whose keys are the parameters of this model. Each parameter - key must be assigned a numeric vector holding 3 elements: the parameter's lower bound, - plausible value, and upper bound.

    -

    regressors is a list object, whose keys are the model-based regressors of this model. - Each regressor key must be assigned a numeric value indicating the number of dimensions its - data will be extracted as. If model-based regressors are not available for this model, this - argument should just be NULL.

    -

    postpreds defaults to "y_pred", but any other character vector holding - appropriate names is possible (c.f. Two-Step Task models). If posterior predictions are not yet - available for this model, this argument should just be NULL.

    -

    stanmodel_arg can be used by developers, during the developmental stage of creating a - new model function. If this argument is passed a character value, the Stan file with the - corresponding name will be used for model fitting. If this argument is passed a - stanmodel object, that stanmodel object will be used for model fitting. When - creation of the model function is complete, this argument should just be left as NULL.

    -

    preprocess_func is the part of the code that is specific to the model, and is thus - written in the specific model R file.
    -Arguments for this function are:

    -
    raw_data

    A data.table that holds the raw user data, which was read by using - fread.

    -
    general_info

    A list that holds the general informations about the raw data, i.e. - subjs, n_subj, t_subjs, t_max, b_subjs, b_max.

    -
    ...

    Optional additional argument(s) that specific model functions may want to - include. Examples of such additional arguments currently being used in hBayesDM models are: - RTbound (choiceRT_ddm models), payscale (igt models), and trans_prob (ts - models).

    -

    Return value for this function should be:

    -
    data_list

    A list with appropriately named keys (as required by the model Stan - file), holding the fully preprocessed user data.

    -

    NOTE: Syntax for data.table slightly differs from that of data.frame. If you want to use - raw_data as a data.frame when writing the preprocess_func, simply begin with the - line: raw_data <- as.data.frame(raw_data).
    -NOTE: Because of allowing case & underscore insensitive column names in user data, - raw_data columns must now be referenced by their lowercase non-underscored versions, - e.g. "subjid", within the code of the preprocess function.

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/igt_orl.html b/R/docs/reference/igt_orl.html deleted file mode 100644 index b4f4f365..00000000 --- a/R/docs/reference/igt_orl.html +++ /dev/null @@ -1,351 +0,0 @@ - - - - - - - - -Outcome-Representation Learning Model — igt_orl • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Outcome-Representation Learning Model. -It has the following parameters: Arew (reward learning rate), Apun (punishment learning rate), K (perseverance decay), betaF (outcome frequency weight), betaP (perseverance weight).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Outcome-Representation Learning Model (Haines et al., 2018)

    • -
    - -
    - -
    igt_orl(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"igt_orl").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Nate Haines <haines.175@osu.edu> - -

    References

    - -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Haines, N., Vassileva, J., & Ahn, W.-Y. (2018). The Outcome-Representation Learning Model: A Novel Reinforcement Learning Model of the Iowa Gambling Task. Cognitive Science. https://doi.org/10.1111/cogs.12688

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- igt_orl(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- igt_orl(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/igt_pvl_decay.html b/R/docs/reference/igt_pvl_decay.html deleted file mode 100644 index 12c9a964..00000000 --- a/R/docs/reference/igt_pvl_decay.html +++ /dev/null @@ -1,350 +0,0 @@ - - - - - - - - -Prospect Valence Learning (PVL) Decay-RI — igt_pvl_decay • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Prospect Valence Learning (PVL) Decay-RI. -It has the following parameters: A (decay rate), alpha (outcome sensitivity), cons (response consistency), lambda (loss aversion).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Prospect Valence Learning (PVL) Decay-RI (Ahn et al., 2014)

    • -
    - -
    - -
    igt_pvl_decay(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"igt_pvl_decay").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Ahn, W.-Y., Vasilev, G., Lee, S.-H., Busemeyer, J. R., Kruschke, J. K., Bechara, A., & Vassileva, J. (2014). Decision-making in stimulant and opiate addicts in protracted abstinence: evidence from computational modeling with pure users. Frontiers in Psychology, 5, 1376. http://doi.org/10.3389/fpsyg.2014.00849

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- igt_pvl_decay(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- igt_pvl_decay(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/igt_pvl_delta.html b/R/docs/reference/igt_pvl_delta.html deleted file mode 100644 index 21942462..00000000 --- a/R/docs/reference/igt_pvl_delta.html +++ /dev/null @@ -1,350 +0,0 @@ - - - - - - - - -Prospect Valence Learning (PVL) Delta — igt_pvl_delta • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Prospect Valence Learning (PVL) Delta. -It has the following parameters: A (learning rate), alpha (outcome sensitivity), cons (response consistency), lambda (loss aversion).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Prospect Valence Learning (PVL) Delta (Ahn et al., 2008)

    • -
    - -
    - -
    igt_pvl_delta(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"igt_pvl_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- igt_pvl_delta(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- igt_pvl_delta(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/igt_vpp.html b/R/docs/reference/igt_vpp.html deleted file mode 100644 index 9a538fca..00000000 --- a/R/docs/reference/igt_vpp.html +++ /dev/null @@ -1,349 +0,0 @@ - - - - - - - - -Value-Plus-Perseverance — igt_vpp • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Iowa Gambling Task using Value-Plus-Perseverance. -It has the following parameters: A (learning rate), alpha (outcome sensitivity), cons (response consistency), lambda (loss aversion), epP (gain impact), epN (loss impact), K (decay rate), w (RL weight).

    -
      -
    • Task: Iowa Gambling Task (Ahn et al., 2008)

    • -
    • Model: Value-Plus-Perseverance (Worthy et al., 2013)

    • -
    - -
    - -
    igt_vpp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "gain", "loss". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    payscale

    Raw payoffs within data are divided by this number. Used for scaling data. Defaults to 100.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"igt_vpp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Iowa Gambling Task, there should be 4 columns of data with the - labels "subjID", "choice", "gain", "loss". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer indicating which deck was chosen on that trial (where A==1, B==2, C==3, and D==4).

    -
    gain

    Floating point value representing the amount of currency won on that trial (e.g. 50, 100).

    -
    loss

    Floating point value representing the amount of currency lost on that trial (e.g. 0, -50).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Ahn, W. Y., Busemeyer, J. R., & Wagenmakers, E. J. (2008). Comparison of decision learning models using the generalization criterion method. Cognitive Science, 32(8), 1376-1402. http://doi.org/10.1080/03640210802352992

    -

    Worthy, D. A., & Todd Maddox, W. (2013). A comparison model of reinforcement-learning and win-stay-lose-shift decision-making processes: A tribute to W.K. Estes. Journal of Mathematical Psychology, 59, 41-49. http://doi.org/10.1016/j.jmp.2013.10.001

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- igt_vpp(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- igt_vpp(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/index.html b/R/docs/reference/index.html deleted file mode 100644 index 181147b8..00000000 --- a/R/docs/reference/index.html +++ /dev/null @@ -1,487 +0,0 @@ - - - - - - - - -Function reference • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -

    Tasks & Models

    -

    -
    -

    bandit2arm_delta()

    -

    Rescorla-Wagner (Delta) Model

    -

    bandit4arm2_kalman_filter()

    -

    Kalman Filter

    -

    bandit4arm_2par_lapse()

    -

    3 Parameter Model, without C (choice perseveration), R (reward sensitivity), and P (punishment sensitivity). But with xi (noise)

    -

    bandit4arm_4par()

    -

    4 Parameter Model, without C (choice perseveration)

    -

    bandit4arm_lapse()

    -

    5 Parameter Model, without C (choice perseveration) but with xi (noise)

    -

    bandit4arm_lapse_decay()

    -

    5 Parameter Model, without C (choice perseveration) but with xi (noise). Added decay rate (Niv et al., 2015, J. Neuro).

    -

    bandit4arm_singleA_lapse()

    -

    4 Parameter Model, without C (choice perseveration) but with xi (noise). Single learning rate both for R and P.

    -

    bart_par4()

    -

    Re-parameterized version of BART model with 4 parameters

    -

    choiceRT_ddm()

    -

    Drift Diffusion Model

    -

    choiceRT_ddm_single()

    -

    Drift Diffusion Model

    -

    cra_exp()

    -

    Exponential Subjective Value Model

    -

    cra_linear()

    -

    Linear Subjective Value Model

    -

    dbdm_prob_weight()

    -

    Probability Weight Function

    -

    dd_cs()

    -

    Constant-Sensitivity (CS) Model

    -

    dd_cs_single()

    -

    Constant-Sensitivity (CS) Model

    -

    dd_exp()

    -

    Exponential Model

    -

    dd_hyperbolic()

    -

    Hyperbolic Model

    -

    dd_hyperbolic_single()

    -

    Hyperbolic Model

    -

    gng_m1()

    -

    RW + noise

    -

    gng_m2()

    -

    RW + noise + bias

    -

    gng_m3()

    -

    RW + noise + bias + pi

    -

    gng_m4()

    -

    RW (rew/pun) + noise + bias + pi

    -

    igt_orl()

    -

    Outcome-Representation Learning Model

    -

    igt_pvl_decay()

    -

    Prospect Valence Learning (PVL) Decay-RI

    -

    igt_pvl_delta()

    -

    Prospect Valence Learning (PVL) Delta

    -

    igt_vpp()

    -

    Value-Plus-Perseverance

    -

    peer_ocu()

    -

    Other-Conferred Utility (OCU) Model

    -

    prl_ewa()

    -

    Experience-Weighted Attraction Model

    -

    prl_fictitious()

    -

    Fictitious Update Model

    -

    prl_fictitious_multipleB()

    -

    Fictitious Update Model

    -

    prl_fictitious_rp()

    -

    Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE)

    -

    prl_fictitious_rp_woa()

    -

    Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point)

    -

    prl_fictitious_woa()

    -

    Fictitious Update Model, without alpha (indecision point)

    -

    prl_rp()

    -

    Reward-Punishment Model

    -

    prl_rp_multipleB()

    -

    Reward-Punishment Model

    -

    pst_gainloss_Q()

    -

    Gain-Loss Q Learning Model

    -

    ra_noLA()

    -

    Prospect Theory, without loss aversion (LA) parameter

    -

    ra_noRA()

    -

    Prospect Theory, without risk aversion (RA) parameter

    -

    ra_prospect()

    -

    Prospect Theory

    -

    rdt_happiness()

    -

    Happiness Computational Model

    -

    ts_par4()

    -

    Hybrid Model, with 4 parameters

    -

    ts_par6()

    -

    Hybrid Model, with 6 parameters

    -

    ts_par7()

    -

    Hybrid Model, with 7 parameters (original model)

    -

    ug_bayes()

    -

    Ideal Observer Model

    -

    ug_delta()

    -

    Rescorla-Wagner (Delta) Model

    -

    wcs_sql()

    -

    Sequential Learning Model

    -

    Functions

    -

    -
    -

    estimate_mode()

    -

    Function to estimate mode of MCMC samples

    -

    extract_ic()

    -

    Extract Model Comparison Estimates

    -

    HDIofMCMC()

    -

    Compute Highest-Density Interval

    -

    multiplot()

    -

    Function to plot multiple figures

    -

    plotDist()

    -

    Plots the histogram of MCMC samples.

    -

    plotHDI()

    -

    Plots highest density interval (HDI) from (MCMC) samples and prints HDI in the R console. HDI is indicated by a red line.

    -

    plotInd()

    -

    Plots individual posterior distributions, using the stan_plot function of the rstan package

    -

    printFit()

    -

    Print model-fits (mean LOOIC or WAIC values in addition to Akaike weights) of hBayesDM Models

    -

    rhat()

    -

    Function for extracting Rhat values from an hBayesDM object

    -
    - - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/multiplot.html b/R/docs/reference/multiplot.html deleted file mode 100644 index 5e4b9563..00000000 --- a/R/docs/reference/multiplot.html +++ /dev/null @@ -1,163 +0,0 @@ - - - - - - - - -Function to plot multiple figures — multiplot • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Plots multiple figures -Based on codes from 'http://www.cookbook-r.com/Graphs/Multiple_graphs_on_one_page_(ggplot2)/'

    - -
    - -
    multiplot(..., plots = NULL, cols = NULL)
    - -

    Arguments

    - - - - - - - - - - - - - - -
    ...

    Plot objects

    plots

    List containing plot objects

    cols

    Number of columns within the multi-figure plot

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/peer_ocu.html b/R/docs/reference/peer_ocu.html deleted file mode 100644 index 7b559757..00000000 --- a/R/docs/reference/peer_ocu.html +++ /dev/null @@ -1,344 +0,0 @@ - - - - - - - - -Other-Conferred Utility (OCU) Model — peer_ocu • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Peer Influence Task using Other-Conferred Utility (OCU) Model. -It has the following parameters: rho (risk preference), tau (inverse temperature), ocu (other-conferred utility).

    -
      -
    • Task: Peer Influence Task (Chung et al., 2015)

    • -
    • Model: Other-Conferred Utility (OCU) Model

    • -
    - -
    - -
    peer_ocu(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"peer_ocu").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Peer Influence Task, there should be 8 columns of data with the - labels "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    condition

    0: solo, 1: info (safe/safe), 2: info (mix), 3: info (risky/risky).

    -
    p_gamble

    Probability of receiving a high payoff (same for both options).

    -
    safe_Hpayoff

    High payoff of the safe option.

    -
    safe_Lpayoff

    Low payoff of the safe option.

    -
    risky_Hpayoff

    High payoff of the risky option.

    -
    risky_Lpayoff

    Low payoff of the risky option.

    -
    choice

    Which option was chosen? 0: safe, 1: risky.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Harhim Park <hrpark12@gmail.com> - -

    References

    - -

    Chung, D., Christopoulos, G. I., King-Casas, B., Ball, S. B., & Chiu, P. H. (2015). Social signals of safety and risk confer utility and have asymmetric effects on observers' choices. Nature Neuroscience, 18(6), 912-916.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- peer_ocu(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- peer_ocu(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/plot.hBayesDM.html b/R/docs/reference/plot.hBayesDM.html deleted file mode 100644 index 68d4705a..00000000 --- a/R/docs/reference/plot.hBayesDM.html +++ /dev/null @@ -1,175 +0,0 @@ - - - - - - - - -General Purpose Plotting for hBayesDM. This function plots hyper parameters. — plot.hBayesDM • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    General Purpose Plotting for hBayesDM. This function plots hyper parameters.

    - -
    - -
    # S3 method for hBayesDM
    -plot(x = NULL, type = "dist", ncols = NULL,
    -  fontSize = NULL, binSize = NULL, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - -
    x

    Model output of class hBayesDM

    type

    Character value that specifies the plot type. Options are: "dist", "trace", or "simple". Defaults to "dist".

    ncols

    Integer value specifying how many plots there should be per row. Defaults to the number of parameters.

    fontSize

    Integer value specifying the size of the font used for plotting. Defaults to 10.

    binSize

    Integer value specifying how wide the bars on the histogram should be. Defaults to 30.

    ...

    Additional arguments to be passed on

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/plotDist.html b/R/docs/reference/plotDist.html deleted file mode 100644 index 34a3318a..00000000 --- a/R/docs/reference/plotDist.html +++ /dev/null @@ -1,189 +0,0 @@ - - - - - - - - -Plots the histogram of MCMC samples. — plotDist • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Plots the histogram of MCMC samples.

    - -
    - -
    plotDist(sample = NULL, Title = NULL, xLab = "Value",
    -  yLab = "Density", xLim = NULL, fontSize = NULL, binSize = NULL,
    -  ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    sample

    MCMC samples

    Title

    Character value containing the main title for the plot

    xLab

    Character value containing the x label

    yLab

    Character value containing the y label

    xLim

    Vector containing the lower and upper x-bounds of the plot

    fontSize

    Size of the font to use for plotting. Defaults to 10

    binSize

    Size of the bins for creating the histogram. Defaults to 30

    ...

    Arguments that can be additionally supplied to geom_histogram

    - -

    Value

    - -

    h1 Plot object

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/plotHDI.html b/R/docs/reference/plotHDI.html deleted file mode 100644 index aa397b75..00000000 --- a/R/docs/reference/plotHDI.html +++ /dev/null @@ -1,189 +0,0 @@ - - - - - - - - -Plots highest density interval (HDI) from (MCMC) samples and prints HDI in the R console. HDI is indicated by a red line. — plotHDI • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Based on John Kruschke's codes http://www.indiana.edu/~kruschke/DoingBayesianDataAnalysis/

    - -
    - -
    plotHDI(sample = NULL, credMass = 0.95, Title = NULL,
    -  xLab = "Value", yLab = "Density", fontSize = NULL, binSize = 30,
    -  ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    sample

    MCMC samples

    credMass

    A scalar between 0 and 1, indicating the mass within the credible interval that is to be estimated.

    Title

    Character value containing the main title for the plot

    xLab

    Character value containing the x label

    yLab

    Character value containing the y label

    fontSize

    Integer value specifying the font size to be used for the plot labels

    binSize

    Integer value specifyin ghow wide the bars on the histogram should be. Defaults to 30.

    ...

    Arguments that can be additionally supplied to geom_histogram

    - -

    Value

    - -

    A vector containing the limits of the HDI

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/plotInd.html b/R/docs/reference/plotInd.html deleted file mode 100644 index 99c5918c..00000000 --- a/R/docs/reference/plotInd.html +++ /dev/null @@ -1,184 +0,0 @@ - - - - - - - - -Plots individual posterior distributions, using the stan_plot function of the rstan package — plotInd • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Plots individual posterior distributions, using the stan_plot function of the rstan package

    - -
    - -
    plotInd(obj = NULL, pars, show_density = T, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - -
    obj

    An output of the hBayesDM. Its class should be 'hBayesDM'.

    pars

    (from stan_plot's help file) Character vector of parameter names. If unspecified, show all user-defined parameters or the first 10 (if there are more than 10)

    show_density

    T(rue) or F(alse). Show the density (T) or not (F)?

    ...

    (from stan_plot's help file) Optional additional named arguments passed to stan_plot, which will be passed to geoms. See stan_plot's help file.

    - - -

    Examples

    -
    # NOT RUN {
    -# Run a model
    -output <- dd_hyperbolic("example", 2000, 1000, 3, 3)
    -
    -# Plot the hyper parameters ('k' and 'beta')
    -plot(output)
    -
    -# Plot individual 'k' (discounting rate) parameters
    -plotInd(output, "k")
    -
    -# Plot individual 'beta' (inverse temperature) parameters
    -plotInd(output, "beta")
    -
    -# Plot individual 'beta' parameters but don't show density
    -plotInd(output, "beta", show_density = F)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/printFit.html b/R/docs/reference/printFit.html deleted file mode 100644 index 08c2bfd5..00000000 --- a/R/docs/reference/printFit.html +++ /dev/null @@ -1,189 +0,0 @@ - - - - - - - - -Print model-fits (mean LOOIC or WAIC values in addition to Akaike weights) of hBayesDM Models — printFit • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Print model-fits (mean LOOIC or WAIC values in addition to Akaike weights) of hBayesDM Models

    - -
    - -
    printFit(..., ic = "looic", ncore = 2, roundTo = 3)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - -
    ...

    Model objects output by hBayesDM functions (e.g. output1, output2, etc.)

    ic

    Which model comparison information criterion to use? 'looic', 'waic', or 'both

    ncore

    Number of corse to use when computing LOOIC

    roundTo

    Number of digits to the right of the decimal point in the output

    - -

    Value

    - -

    modelTable A table with relevant model comparison data. LOOIC and WAIC weights are computed as Akaike weights.

    - - -

    Examples

    -
    # NOT RUN {
    -# Run two models and store results in "output1" and "output2"
    -output1 <- dd_hyperbolic("example", 2000, 1000, 3, 3)
    -
    -output2 <- dd_exp("example", 2000, 1000, 3, 3)
    -
    -# Show the LOOIC model fit estimates
    -printFit(output1, output2)
    -
    -# To show the WAIC model fit estimates
    -printFit(output1, output2, ic = "waic")
    -
    -# To show both LOOIC and WAIC
    -printFit(output1, output2, ic = "both")
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_ewa.html b/R/docs/reference/prl_ewa.html deleted file mode 100644 index 32fc41c6..00000000 --- a/R/docs/reference/prl_ewa.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Experience-Weighted Attraction Model — prl_ewa • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Experience-Weighted Attraction Model. -It has the following parameters: phi (1 - learning rate), rho (experience decay factor), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Experience-Weighted Attraction Model (Ouden et al., 2013)

    • -
    - -
    - -
    prl_ewa(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "ew_c", "ew_nc".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_ewa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_ewa(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_ewa(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_fictitious.html b/R/docs/reference/prl_fictitious.html deleted file mode 100644 index 7f56cbab..00000000 --- a/R/docs/reference/prl_fictitious.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Fictitious Update Model — prl_fictitious • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model. -It has the following parameters: eta (learning rate), alpha (indecision point), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Fictitious Update Model (Glascher et al., 2009)

    • -
    - -
    - -
    prl_fictitious(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_fictitious").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_fictitious(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_fictitious(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_fictitious_multipleB.html b/R/docs/reference/prl_fictitious_multipleB.html deleted file mode 100644 index 82f53316..00000000 --- a/R/docs/reference/prl_fictitious_multipleB.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Fictitious Update Model — prl_fictitious_multipleB • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model. -It has the following parameters: eta (learning rate), alpha (indecision point), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Fictitious Update Model (Glascher et al., 2009)

    • -
    - -
    - -
    prl_fictitious_multipleB(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "block", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_fictitious_multipleB").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the - labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    block

    A unique identifier for each of the multiple blocks within each subject.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_fictitious_multipleB(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_fictitious_multipleB(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_fictitious_rp.html b/R/docs/reference/prl_fictitious_rp.html deleted file mode 100644 index 87733062..00000000 --- a/R/docs/reference/prl_fictitious_rp.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) — prl_fictitious_rp • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE). -It has the following parameters: eta_pos (learning rate, +PE), eta_neg (learning rate, -PE), alpha (indecision point), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE) (Glascher et al., 2009; Ouden et al., 2013)

    • -
    - -
    - -
    prl_fictitious_rp(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_fictitious_rp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_fictitious_rp(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_fictitious_rp(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_fictitious_rp_woa.html b/R/docs/reference/prl_fictitious_rp_woa.html deleted file mode 100644 index 3f3ec12a..00000000 --- a/R/docs/reference/prl_fictitious_rp_woa.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) — prl_fictitious_rp_woa • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point). -It has the following parameters: eta_pos (learning rate, +PE), eta_neg (learning rate, -PE), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Fictitious Update Model, with separate learning rates for positive and negative prediction error (PE), without alpha (indecision point) (Glascher et al., 2009; Ouden et al., 2013)

    • -
    - -
    - -
    prl_fictitious_rp_woa(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_fictitious_rp_woa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_fictitious_rp_woa(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_fictitious_rp_woa(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_fictitious_woa.html b/R/docs/reference/prl_fictitious_woa.html deleted file mode 100644 index 05d0119b..00000000 --- a/R/docs/reference/prl_fictitious_woa.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - - - -Fictitious Update Model, without alpha (indecision point) — prl_fictitious_woa • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Fictitious Update Model, without alpha (indecision point). -It has the following parameters: eta (learning rate), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Fictitious Update Model, without alpha (indecision point) (Glascher et al., 2009)

    • -
    - -
    - -
    prl_fictitious_woa(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe_c", "pe_nc", "dv".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_fictitious_woa").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Glascher, J., Hampton, A. N., & O'Doherty, J. P. (2009). Determining a Role for Ventromedial Prefrontal Cortex in Encoding Action-Based Value Signals During Reward-Related Decision Making. Cerebral Cortex, 19(2), 483-495. http://doi.org/10.1093/cercor/bhn098

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_fictitious_woa(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_fictitious_woa(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_rp.html b/R/docs/reference/prl_rp.html deleted file mode 100644 index 6dcf65af..00000000 --- a/R/docs/reference/prl_rp.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Reward-Punishment Model — prl_rp • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Reward-Punishment Model. -It has the following parameters: Apun (punishment learning rate), Arew (reward learning rate), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Reward-Punishment Model (Ouden et al., 2013)

    • -
    - -
    - -
    prl_rp(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_rp").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_rp(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_rp(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/prl_rp_multipleB.html b/R/docs/reference/prl_rp_multipleB.html deleted file mode 100644 index 769ff020..00000000 --- a/R/docs/reference/prl_rp_multipleB.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Reward-Punishment Model — prl_rp_multipleB • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Multiple-Block Hierarchical Bayesian Modeling of the Probabilistic Reversal Learning Task using Reward-Punishment Model. -It has the following parameters: Apun (punishment learning rate), Arew (reward learning rate), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Reversal Learning Task

    • -
    • Model: Reward-Punishment Model (Ouden et al., 2013)

    • -
    - -
    - -
    prl_rp_multipleB(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "block", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -For this model they are: "ev_c", "ev_nc", "pe".

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"prl_rp_multipleB").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Reversal Learning Task, there should be 4 columns of data with the - labels "subjID", "block", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    block

    A unique identifier for each of the multiple blocks within each subject.

    -
    choice

    Integer value representing the option chosen on that trial: 1 or 2.

    -
    outcome

    Integer value representing the outcome of that trial (where reward == 1, and loss == -1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang (for model-based regressors) <jaeyeong.yang1125@gmail.com>, Harhim Park (for model-based regressors) <hrpark12@gmail.com> - -

    References

    - -

    Ouden, den, H. E. M., Daw, N. D., Fernandez, G., Elshout, J. A., Rijpkema, M., Hoogman, M., et al. (2013). Dissociable Effects of Dopamine and Serotonin on Reversal Learning. Neuron, 80(4), 1090-1100. http://doi.org/10.1016/j.neuron.2013.08.030

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- prl_rp_multipleB(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- prl_rp_multipleB(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/pst_gainloss_Q.html b/R/docs/reference/pst_gainloss_Q.html deleted file mode 100644 index dd7e057c..00000000 --- a/R/docs/reference/pst_gainloss_Q.html +++ /dev/null @@ -1,341 +0,0 @@ - - - - - - - - -Gain-Loss Q Learning Model — pst_gainloss_Q • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Probabilistic Selection Task using Gain-Loss Q Learning Model. -It has the following parameters: alpha_pos (learning rate for positive feedbacks), alpha_neg (learning rate for negative feedbacks), beta (inverse temperature).

    -
      -
    • Task: Probabilistic Selection Task

    • -
    • Model: Gain-Loss Q Learning Model (Frank et al., 2007)

    • -
    - -
    - -
    pst_gainloss_Q(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "type", "choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"pst_gainloss_Q").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Probabilistic Selection Task, there should be 4 columns of data with the - labels "subjID", "type", "choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    type

    Two-digit number indicating which pair of stimuli were presented for that trial, e.g. 12, 34, or 56. The digit on the left (tens-digit) indicates the presented stimulus for option1, while the digit on the right (ones-digit) indicates that for option2. Code for each stimulus type (1~6) is defined as for 80% (type 1), 20% (type 2), 70% (type 3), 30% (type 4), 60% (type 5), 40% (type 6). The modeling will still work even if different probabilities are used for the stimuli; however, the total number of stimuli should be less than or equal to 6.

    -
    choice

    Whether the subject chose the left option (option1) out of the given two options (i.e. if option1 was chosen, 1; if option2 was chosen, 0).

    -
    reward

    Amount of reward earned as a result of the trial.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Jaeyeong Yang <jaeyeong.yang1125@gmail.com> - -

    References

    - -

    Frank, M. J., Moustafa, A. A., Haughey, H. M., Curran, T., & Hutchison, K. E. (2007). Genetic triple dissociation reveals multiple roles for dopamine in reinforcement learning. Proceedings of the National Academy of Sciences, 104(41), 16311-16316.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- pst_gainloss_Q(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- pst_gainloss_Q(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ra_noLA.html b/R/docs/reference/ra_noLA.html deleted file mode 100644 index c9f08f8b..00000000 --- a/R/docs/reference/ra_noLA.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Prospect Theory, without loss aversion (LA) parameter — ra_noLA • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory, without loss aversion (LA) parameter. -It has the following parameters: rho (risk aversion), tau (inverse temperature).

    -
      -
    • Task: Risk Aversion Task

    • -
    • Model: Prospect Theory, without loss aversion (LA) parameter (Sokol-Hessner et al., 2009)

    • -
    - -
    - -
    ra_noLA(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ra_noLA").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risk Aversion Task, there should be 5 columns of data with the - labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ra_noLA(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ra_noLA(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ra_noRA.html b/R/docs/reference/ra_noRA.html deleted file mode 100644 index 277d55eb..00000000 --- a/R/docs/reference/ra_noRA.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Prospect Theory, without risk aversion (RA) parameter — ra_noRA • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory, without risk aversion (RA) parameter. -It has the following parameters: lambda (loss aversion), tau (inverse temperature).

    -
      -
    • Task: Risk Aversion Task

    • -
    • Model: Prospect Theory, without risk aversion (RA) parameter (Sokol-Hessner et al., 2009)

    • -
    - -
    - -
    ra_noRA(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ra_noRA").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risk Aversion Task, there should be 5 columns of data with the - labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ra_noRA(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ra_noRA(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ra_prospect.html b/R/docs/reference/ra_prospect.html deleted file mode 100644 index 7c7fa27a..00000000 --- a/R/docs/reference/ra_prospect.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Prospect Theory — ra_prospect • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Risk Aversion Task using Prospect Theory. -It has the following parameters: rho (risk aversion), lambda (loss aversion), tau (inverse temperature).

    -
      -
    • Task: Risk Aversion Task

    • -
    • Model: Prospect Theory (Sokol-Hessner et al., 2009)

    • -
    - -
    - -
    ra_prospect(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "gain", "loss", "cert", "gamble". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ra_prospect").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risk Aversion Task, there should be 5 columns of data with the - labels "subjID", "gain", "loss", "cert", "gamble". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option. "cert" is assumed to be zero or greater than zero.

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Sokol-Hessner, P., Hsu, M., Curley, N. G., Delgado, M. R., Camerer, C. F., Phelps, E. A., & Smith, E. E. (2009). Thinking like a Trader Selectively Reduces Individuals' Loss Aversion. Proceedings of the National Academy of Sciences of the United States of America, 106(13), 5035-5040. http://www.pnas.org/content/106/13/5035

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ra_prospect(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ra_prospect(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/rdt_happiness.html b/R/docs/reference/rdt_happiness.html deleted file mode 100644 index 03105a5a..00000000 --- a/R/docs/reference/rdt_happiness.html +++ /dev/null @@ -1,346 +0,0 @@ - - - - - - - - -Happiness Computational Model — rdt_happiness • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Risky Decision Task using Happiness Computational Model. -It has the following parameters: w0 (baseline), w1 (weight of certain rewards), w2 (weight of expected values), w3 (weight of reward prediction errors), gam (forgetting factor), sig (standard deviation of error).

    -
      -
    • Task: Risky Decision Task

    • -
    • Model: Happiness Computational Model (Rutledge et al., 2014)

    • -
    - -
    - -
    rdt_happiness(data = NULL, niter = 4000, nwarmup = 1000,
    -  nchain = 4, ncore = 1, nthin = 1, inits = "vb",
    -  indPars = "mean", modelRegressor = FALSE, vb = FALSE,
    -  inc_postpred = FALSE, adapt_delta = 0.95, stepsize = 1,
    -  max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"rdt_happiness").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Risky Decision Task, there should be 9 columns of data with the - labels "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    gain

    Possible (50%) gain outcome of a risky option (e.g. 9).

    -
    loss

    Possible (50%) loss outcome of a risky option (e.g. 5, or -5).

    -
    cert

    Guaranteed amount of a safe option.

    -
    type

    loss == -1, mixed == 0, gain == 1

    -
    gamble

    If gamble was taken, gamble == 1; else gamble == 0.

    -
    outcome

    Result of the trial.

    -
    happy

    Happiness score.

    -
    RT_happy

    Reaction time for answering the happiness score.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Harhim Park <hrpark12@gmail.com> - -

    References

    - -

    Rutledge, R. B., Skandali, N., Dayan, P., & Dolan, R. J. (2014). A computational and neural model of momentary subjective well-being. Proceedings of the National Academy of Sciences, 111(33), 12252-12257.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- rdt_happiness(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- rdt_happiness(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/rhat.html b/R/docs/reference/rhat.html deleted file mode 100644 index 8d44cc95..00000000 --- a/R/docs/reference/rhat.html +++ /dev/null @@ -1,170 +0,0 @@ - - - - - - - - -Function for extracting Rhat values from an hBayesDM object — rhat • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    A convenience function for extracting Rhat values from an hBayesDM object. Can also -check if all Rhat values are less than or equal to a specified value. -If variational inference was used, an error message will be displayed.

    - -
    - -
    rhat(fit = NULL, less = NULL)
    - -

    Arguments

    - - - - - - - - - - -
    fit

    Model output of class hBayesDM

    less

    A numeric value specifying how to check Rhat values. Defaults to FALSE.

    - -

    Value

    - -

    If 'less' is specified, then rhat(fit, less) will return TRUE if all Rhat values are -less than or equal to 'less'. If any values are greater than 'less', rhat(fit, less) will -return FALSE. If 'less' is left unspecified (NULL), rhat(fit) will return a data.frame object -containing all Rhat values.

    - - -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ts_par4.html b/R/docs/reference/ts_par4.html deleted file mode 100644 index a322c627..00000000 --- a/R/docs/reference/ts_par4.html +++ /dev/null @@ -1,352 +0,0 @@ - - - - - - - - -Hybrid Model, with 4 parameters — ts_par4 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 4 parameters. -It has the following parameters: a (learning rate for both stages 1 & 2), beta (inverse temperature for both stages 1 & 2), pi (perseverance), w (model-based weight).

    -
      -
    • Task: Two-Step Task (Daw et al., 2011)

    • -
    • Model: Hybrid Model, with 4 parameters (Daw et al., 2011; Wunderlich et al., 2012)

    • -
    - -
    - -
    ts_par4(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ts_par4").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Two-Step Task, there should be 4 columns of data with the - labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    -
    reward

    Reward after Level 2 (0 or 1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Harhim Park <hrpark12@gmail.com> - -

    References

    - -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Wunderlich, K., Smittenaar, P., & Dolan, R. J. (2012). Dopamine enhances model-based over model-free choice behavior. Neuron, 75(3), 418-424.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ts_par4(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ts_par4(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ts_par6.html b/R/docs/reference/ts_par6.html deleted file mode 100644 index 640fd994..00000000 --- a/R/docs/reference/ts_par6.html +++ /dev/null @@ -1,351 +0,0 @@ - - - - - - - - -Hybrid Model, with 6 parameters — ts_par6 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 6 parameters. -It has the following parameters: a1 (learning rate in stage 1), beta1 (inverse temperature in stage 1), a2 (learning rate in stage 2), beta2 (inverse temperature in stage 2), pi (perseverance), w (model-based weight).

    -
      -
    • Task: Two-Step Task (Daw et al., 2011)

    • -
    • Model: Hybrid Model, with 6 parameters (Daw et al., 2011)

    • -
    - -
    - -
    ts_par6(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ts_par6").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Two-Step Task, there should be 4 columns of data with the - labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    -
    reward

    Reward after Level 2 (0 or 1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Harhim Park <hrpark12@gmail.com> - -

    References

    - -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ts_par6(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ts_par6(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ts_par7.html b/R/docs/reference/ts_par7.html deleted file mode 100644 index 4ffc3b28..00000000 --- a/R/docs/reference/ts_par7.html +++ /dev/null @@ -1,351 +0,0 @@ - - - - - - - - -Hybrid Model, with 7 parameters (original model) — ts_par7 • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Two-Step Task using Hybrid Model, with 7 parameters (original model). -It has the following parameters: a1 (learning rate in stage 1), beta1 (inverse temperature in stage 1), a2 (learning rate in stage 2), beta2 (inverse temperature in stage 2), pi (perseverance), w (model-based weight), lambda (eligibility trace).

    -
      -
    • Task: Two-Step Task (Daw et al., 2011)

    • -
    • Model: Hybrid Model, with 7 parameters (original model) (Daw et al., 2011)

    • -
    - -
    - -
    ts_par7(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "level1_choice", "level2_choice", "reward". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred_step1", "y_pred_step2"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, it's possible to set model-specific argument(s) as follows:

    -
    trans_prob

    Common state transition probability from Stage (Level) 1 to Stage (Level) 2. Defaults to 0.7.

    - - - - - - - - -
    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ts_par7").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Two-Step Task, there should be 4 columns of data with the - labels "subjID", "level1_choice", "level2_choice", "reward". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    level1_choice

    Choice made for Level (Stage) 1 (1: stimulus 1, 2: stimulus 2).

    -
    level2_choice

    Choice made for Level (Stage) 2 (1: stimulus 3, 2: stimulus 4, 3: stimulus 5, 4: stimulus 6).
    Note that, in our notation, choosing stimulus 1 in Level 1 leads to stimulus 3 & 4 in Level 2 with a common (0.7 by default) transition. Similarly, choosing stimulus 2 in Level 1 leads to stimulus 5 & 6 in Level 2 with a common (0.7 by default) transition. To change this default transition probability, set the function argument `trans_prob` to your preferred value.

    -
    reward

    Reward after Level 2 (0 or 1).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Harhim Park <hrpark12@gmail.com> - -

    References

    - -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    -

    Daw, N. D., Gershman, S. J., Seymour, B., Ben Seymour, Dayan, P., & Dolan, R. J. (2011). Model-Based Influences on Humans' Choices and Striatal Prediction Errors. Neuron, 69(6), 1204-1215. http://doi.org/10.1016/j.neuron.2011.02.027

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ts_par7(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ts_par7(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ug_bayes.html b/R/docs/reference/ug_bayes.html deleted file mode 100644 index 2ce7c902..00000000 --- a/R/docs/reference/ug_bayes.html +++ /dev/null @@ -1,337 +0,0 @@ - - - - - - - - -Ideal Observer Model — ug_bayes • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using Ideal Observer Model. -It has the following parameters: alpha (envy), beta (guilt), tau (inverse temperature).

    -
      -
    • Task: Norm-Training Ultimatum Game

    • -
    • Model: Ideal Observer Model (Xiang et al., 2013)

    • -
    - -
    - -
    ug_bayes(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "offer", "accept". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ug_bayes").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Norm-Training Ultimatum Game, there should be 3 columns of data with the - labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    -
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Xiang, T., Lohrenz, T., & Montague, P. R. (2013). Computational Substrates of Norms and Their Violations during Social Exchange. Journal of Neuroscience, 33(3), 1099-1108. http://doi.org/10.1523/JNEUROSCI.1642-12.2013

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ug_bayes(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ug_bayes(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/ug_delta.html b/R/docs/reference/ug_delta.html deleted file mode 100644 index 80c69926..00000000 --- a/R/docs/reference/ug_delta.html +++ /dev/null @@ -1,337 +0,0 @@ - - - - - - - - -Rescorla-Wagner (Delta) Model — ug_delta • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Norm-Training Ultimatum Game using Rescorla-Wagner (Delta) Model. -It has the following parameters: alpha (envy), tau (inverse temperature), ep (norm adaptation rate).

    -
      -
    • Task: Norm-Training Ultimatum Game

    • -
    • Model: Rescorla-Wagner (Delta) Model (Gu et al., 2015)

    • -
    - -
    - -
    ug_delta(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "offer", "accept". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"ug_delta").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Norm-Training Ultimatum Game, there should be 3 columns of data with the - labels "subjID", "offer", "accept". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    offer

    Floating point value representing the offer made in that trial (e.g. 4, 10, 11).

    -
    accept

    1 or 0, indicating whether the offer was accepted in that trial (where accepted == 1, rejected == 0).

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    - -

    References

    - -

    Gu, X., Wang, X., Hula, A., Wang, S., Xu, S., Lohrenz, T. M., et al. (2015). Necessary, Yet Dissociable Contributions of the Insular and Ventromedial Prefrontal Cortices to Norm Adaptation: Computational and Lesion Evidence in Humans. Journal of Neuroscience, 35(2), 467-473. http://doi.org/10.1523/JNEUROSCI.2906-14.2015

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- ug_delta(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- ug_delta(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - diff --git a/R/docs/reference/wcs_sql.html b/R/docs/reference/wcs_sql.html deleted file mode 100644 index dc613dc5..00000000 --- a/R/docs/reference/wcs_sql.html +++ /dev/null @@ -1,339 +0,0 @@ - - - - - - - - -Sequential Learning Model — wcs_sql • hBayesDM - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    -
    - - - -
    - -
    -
    - - -
    - -

    Hierarchical Bayesian Modeling of the Wisconsin Card Sorting Task using Sequential Learning Model. -It has the following parameters: r (reward sensitivity), p (punishment sensitivity), d (decision consistency or inverse temperature).

    -
      -
    • Task: Wisconsin Card Sorting Task

    • -
    • Model: Sequential Learning Model (Bishara et al., 2010)

    • -
    - -
    - -
    wcs_sql(data = NULL, niter = 4000, nwarmup = 1000, nchain = 4,
    -  ncore = 1, nthin = 1, inits = "vb", indPars = "mean",
    -  modelRegressor = FALSE, vb = FALSE, inc_postpred = FALSE,
    -  adapt_delta = 0.95, stepsize = 1, max_treedepth = 10, ...)
    - -

    Arguments

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    data

    Data to be modeled. It should be given as a data.frame object, -a filepath for a tab-seperated txt file, "example" to use example data, or -"choose" to choose data with an interactive window. -Columns in the dataset must include: -"subjID", "choice", "outcome". See Details below for more information.

    niter

    Number of iterations, including warm-up. Defaults to 4000.

    nwarmup

    Number of iterations used for warm-up only. Defaults to 1000.

    nchain

    Number of Markov chains to run. Defaults to 4.

    ncore

    Number of CPUs to be used for running. Defaults to 1.

    nthin

    Every i == nthin sample will be used to generate the posterior distribution. -Defaults to 1. A higher number can be used when auto-correlation within the MCMC sampling is -high.

    inits

    Character value specifying how the initial values should be generated. -Possible options are "vb" (default), "fixed", "random", or your own initial values.

    indPars

    Character value specifying how to summarize individual parameters. Current options -are: "mean", "median", or "mode".

    modelRegressor

    Whether to export model-based regressors (TRUE or FALSE). -Not available for this model.

    vb

    Use variational inference to approximately draw from a posterior distribution. Defaults -to FALSE.

    inc_postpred

    Include trial-level posterior predictive simulations in model output (may greatly increase file -size). Defaults to FALSE. -If set to TRUE, it includes: "y_pred"

    adapt_delta

    Floating point value representing the target acceptance probability of a new -sample in the MCMC chain. Must be between 0 and 1. See Details below.

    stepsize

    Integer value specifying the size of each leapfrog step that the MCMC sampler can -take on each new iteration. See Details below.

    max_treedepth

    Integer value specifying how many leapfrog steps the MCMC sampler can take -on each new iteration. See Details below.

    ...

    For this model, there is no model-specific argument.

    - -

    Value

    - -

    A class "hBayesDM" object modelData with the following components:

    -
    model

    Character value that is the name of the model (\code"wcs_sql").

    -
    allIndPars

    Data.frame containing the summarized parameter values (as specified by - indPars) for each subject.

    -
    parVals

    List object containing the posterior samples over different parameters.

    -
    fit

    A class stanfit object that contains the fitted Stan - model.

    -
    rawdata

    Data.frame containing the raw data used to fit the model, as specified by - the user.

    - - -
    modelRegressor

    List object containing the extracted model-based regressors.

    -
    - - -

    Details

    - -

    This section describes some of the function arguments in greater detail.

    -

    data should be assigned a character value specifying the full path and name (including - extension information, e.g. ".txt") of the file that contains the behavioral data-set of all - subjects of interest for the current analysis. The file should be a tab-delimited text - file, whose rows represent trial-by-trial observations and columns represent variables.
    -For the Wisconsin Card Sorting Task, there should be 3 columns of data with the - labels "subjID", "choice", "outcome". It is not necessary for the columns to be in this particular order, - however it is necessary that they be labeled correctly and contain the information below:

    -
    subjID

    A unique identifier for each subject in the data-set.

    -
    choice

    Integer value indicating which deck was chosen on that trial: 1, 2, 3, or 4.

    -
    outcome

    1 or 0, indicating the outcome of that trial: correct == 1, wrong == 0.

    -

    *Note: The file may contain other columns of data (e.g. "ReactionTime", "trial_number", - etc.), but only the data within the column names listed above will be used during the modeling. - As long as the necessary columns mentioned above are present and labeled correctly, there is no - need to remove other miscellaneous data columns.

    -

    nwarmup is a numerical value that specifies how many MCMC samples should not be stored - upon the beginning of each chain. For those familiar with Bayesian methods, this is equivalent - to burn-in samples. Due to the nature of the MCMC algorithm, initial values (i.e. where the - sampling chains begin) can have a heavy influence on the generated posterior distributions. The - nwarmup argument can be set to a high number in order to curb the effects that initial - values have on the resulting posteriors.

    -

    nchain is a numerical value that specifies how many chains (i.e. independent sampling - sequences) should be used to draw samples from the posterior distribution. Since the posteriors - are generated from a sampling process, it is good practice to run multiple chains to ensure - that a reasonably representative posterior is attained. When the sampling is complete, it is - possible to check the multiple chains for convergence by running the following line of code: - plot(output, type = "trace"). The trace-plot should resemble a "furry caterpillar".

    -

    nthin is a numerical value that specifies the "skipping" behavior of the MCMC sampler, - using only every i == nthin samples to generate posterior distributions. By default, - nthin is equal to 1, meaning that every sample is used to generate the posterior.

    -

    Control Parameters: adapt_delta, stepsize, and max_treedepth are - advanced options that give the user more control over Stan's MCMC sampler. It is recommended - that only advanced users change the default values, as alterations can profoundly change the - sampler's behavior. Refer to 'The No-U-Turn Sampler: Adaptively Setting Path Lengths in - Hamiltonian Monte Carlo (Hoffman & Gelman, 2014, Journal of Machine Learning Research)' for - more information on the sampler control parameters. One can also refer to 'Section 34.2. HMC - Algorithm Parameters' of the Stan User's Guide - and Reference Manual, or to the help page for stan for a less technical - description of these arguments.

    -

    Contributors

    -Dayeong Min <mindy2801@snu.ac.kr> - -

    References

    - -

    Bishara, A. J., Kruschke, J. K., Stout, J. C., Bechara, A., McCabe, D. P., & Busemeyer, J. R. (2010). Sequential learning models for the Wisconsin card sort task: Assessing processes in substance dependent individuals. Journal of Mathematical Psychology, 54(1), 5-13.

    - -

    See also

    - -

    We refer users to our in-depth tutorial for an example of using hBayesDM: - https://rpubs.com/CCSL/hBayesDM

    - - -

    Examples

    -
    # NOT RUN {
    -# Run the model with a given data.frame as df
    -output <- wcs_sql(
    -  data = df, niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Run the model with example data
    -output <- wcs_sql(
    -  data = "example", niter = 2000, nwarmup = 1000, nchain = 4, ncore = 4)
    -
    -# Visually check convergence of the sampling chains (should look like 'hairy caterpillars')
    -plot(output, type = "trace")
    -
    -# Check Rhat values (all Rhat values should be less than or equal to 1.1)
    -rhat(output)
    -
    -# Plot the posterior distributions of the hyper-parameters (distributions should be unimodal)
    -plot(output)
    -
    -# Show the WAIC and LOOIC model fit estimates
    -printFit(output)
    -# }
    -
    - -
    - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -
    - - - - - - From 10de7cc9e1bae4291674bf6b98b130853bdd303b Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:30:13 +0900 Subject: [PATCH 156/163] Update pkgdown settings --- R/_pkgdown.yml | 4 ++++ R/pkgdown/extra.css | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+) create mode 100644 R/pkgdown/extra.css diff --git a/R/_pkgdown.yml b/R/_pkgdown.yml index dcef2390..8afcc034 100644 --- a/R/_pkgdown.yml +++ b/R/_pkgdown.yml @@ -35,3 +35,7 @@ articles: toc: depth: 1 + +template: + params: + bootswatch: flatly diff --git a/R/pkgdown/extra.css b/R/pkgdown/extra.css new file mode 100644 index 00000000..e7316bc2 --- /dev/null +++ b/R/pkgdown/extra.css @@ -0,0 +1,20 @@ +@import url('https://fonts.googleapis.com/css?family=Roboto|Roboto+Condensed|Roboto+Mono&display=swap'); + +html, body { + font-size: 14px; + font-family: 'Roboto', sans-serif; + line-height: 1.5em; +} + +h1, h2, h3, h4, h5, h6 { + font-family: 'Roboto Condensed', sans-serif; +} + +code, pre { + font-family: 'Roboto Mono', monospace; +} + +#tocnav li ul li > a { + margin-left: 1.5rem; +} + From b24ed1725ccf06fd6f0ace6c4e3dfe95bd6f0913 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 15:34:44 +0900 Subject: [PATCH 157/163] Add lines to deploy hBayesDM documentation for R --- .travis.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.travis.yml b/.travis.yml index 7a4321d2..9e577578 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,3 +63,12 @@ after_failure: after_success: - source $ROOTPATH/travis/after-success.sh + +before_deploy: + - cd $ROOTPATH + - Rscript -e 'remotes::install_cran("pkgdown", quiet = T, repos = "https://cran.rstudio.com")' + +deploy: + provider: script + script: Rscript -e 'pkgdown::deploy_site_github()' + skip_cleanup: true From 26aee4c8999a7f2ea63d6ec162a1fcb64ad810f4 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 16:17:56 +0900 Subject: [PATCH 158/163] Update DATE --- R/DESCRIPTION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/DESCRIPTION b/R/DESCRIPTION index 5a9e4657..ba42af33 100644 --- a/R/DESCRIPTION +++ b/R/DESCRIPTION @@ -1,7 +1,7 @@ Package: hBayesDM Title: Hierarchical Bayesian Modeling of Decision-Making Tasks Version: 1.0.0 -Date: 2019-02-11 +Date: 2019-08-30 Author: Woo-Young Ahn [aut, cre], Nate Haines [aut], From 6fc2e75f7d48e02b6f6264eba4c5d2b29a49e763 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 16:18:20 +0900 Subject: [PATCH 159/163] Ignore pkgdown --- R/.Rbuildignore | 1 + 1 file changed, 1 insertion(+) diff --git a/R/.Rbuildignore b/R/.Rbuildignore index 06470931..c43d14fa 100644 --- a/R/.Rbuildignore +++ b/R/.Rbuildignore @@ -7,6 +7,7 @@ \.o$ ^docs/ ^man-roxygen/ +^pkgdown/ ^src/stan_files/.*\.o$ ^src/stan_files/.*\.cc$ ^src/stan_files/.*\.hpp$ From 04225e3f5b0d03623b9223393c9adbe644b8b3c4 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 16:25:40 +0900 Subject: [PATCH 160/163] Deploy only on the master branch --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index 9e577578..d3505ae7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -72,3 +72,5 @@ deploy: provider: script script: Rscript -e 'pkgdown::deploy_site_github()' skip_cleanup: true + on: + branch: master From 14b8220244a2145a4a0f9be7bc05fa6f192e4dc2 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 17:25:38 +0900 Subject: [PATCH 161/163] Fix preprocess_funcs for NOTE --- R/R/preprocess_funcs.R | 44 +++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/R/R/preprocess_funcs.R b/R/R/preprocess_funcs.R index 4263af80..0a5f0d8d 100644 --- a/R/R/preprocess_funcs.R +++ b/R/R/preprocess_funcs.R @@ -18,7 +18,7 @@ bandit2arm_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] choice[i, 1:t] <- DT_subj$choice outcome[i, 1:t] <- DT_subj$outcome @@ -49,7 +49,7 @@ bandit4arm2_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] choice[i, 1:t] <- DT_subj$choice outcome[i, 1:t] <- DT_subj$outcome @@ -84,7 +84,7 @@ bandit4arm_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] rew[i, 1:t] <- DT_subj$gain los[i, 1:t] <- -1 * abs(DT_subj$loss) @@ -122,7 +122,7 @@ bart_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] pumps[i, 1:t] <- DT_subj$pumps explosion[i, 1:t] <- DT_subj$explosion @@ -191,8 +191,8 @@ choiceRT_single_preprocess_func <- function(raw_data, general_info, RTbound = 0. # Currently class(raw_data) == "data.table" # Data.tables for upper and lower boundary responses - DT_upper <- raw_data[choice == 2] - DT_lower <- raw_data[choice == 1] + DT_upper <- raw_data[raw_data$choice == 2] + DT_lower <- raw_data[raw_data$choice == 1] # Wrap into a list for Stan data_list <- list( @@ -228,7 +228,7 @@ cra_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] choice[i, 1:t] <- DT_subj$choice prob[i, 1:t] <- DT_subj$prob @@ -270,7 +270,7 @@ dbdm_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] opt1hprob[i, 1:t] <- DT_subj$opt1hprob opt2hprob[i, 1:t] <- DT_subj$opt2hprob @@ -317,7 +317,7 @@ dd_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] delay_later[i, 1:t] <- DT_subj$delaylater amount_later[i, 1:t] <- DT_subj$amountlater @@ -387,7 +387,7 @@ gng_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] cue[i, 1:t] <- DT_subj$cue pressed[i, 1:t] <- DT_subj$keypressed @@ -425,7 +425,7 @@ igt_preprocess_func <- function(raw_data, general_info, payscale = 100) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] Ydata[i, 1:t] <- DT_subj$choice RLmatrix[i, 1:t] <- DT_subj$gain - abs(DT_subj$loss) @@ -467,7 +467,7 @@ peer_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] condition[i, 1:t] <- DT_subj$condition p_gamble[i, 1:t] <- DT_subj$pgamble @@ -513,7 +513,7 @@ prl_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] choice[i, 1:t] <- DT_subj$choice outcome[i, 1:t] <- sign(DT_subj$outcome) # use sign @@ -550,12 +550,12 @@ prl_multipleB_preprocess_func <- function(raw_data, general_info) { # Write from raw_data to the data arrays for (i in 1:n_subj) { subj <- subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] blocks_of_subj <- unique(DT_subj$block) for (b in 1:b_subjs[i]) { curr_block <- blocks_of_subj[b] - DT_curr_block <- DT_subj[block == curr_block] + DT_curr_block <- DT_subj[DT_subj$block == curr_block] t <- t_subjs[i, b] choice[i, b, 1:t] <- DT_curr_block$choice @@ -597,7 +597,7 @@ pst_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] option1[i, 1:t] <- DT_subj$type %/% 10 option2[i, 1:t] <- DT_subj$type %% 10 @@ -639,7 +639,7 @@ ra_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] gain[i, 1:t] <- DT_subj$gain loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount @@ -685,7 +685,7 @@ rdt_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] gain[i, 1:t] <- DT_subj$gain loss[i, 1:t] <- abs(DT_subj$loss) # absolute loss amount @@ -734,7 +734,7 @@ ts_preprocess_func <- function(raw_data, general_info, trans_prob = 0.7) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] level1_choice[i, 1:t] <- DT_subj$level1choice level2_choice[i, 1:t] <- DT_subj$level2choice @@ -773,7 +773,7 @@ ug_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] offer[i, 1:t] <- DT_subj$offer accept[i, 1:t] <- DT_subj$accept @@ -816,7 +816,7 @@ wcs_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { subj <- subjs[i] t <- t_subjs[i] - DT_subj <- raw_data[subjid == subj] + DT_subj <- raw_data[raw_data$subjid == subj] DT_subj_choice <- DT_subj$choice DT_subj_outcome <- DT_subj$outcome @@ -883,7 +883,7 @@ cgt_preprocess_func <- function(raw_data, general_info) { for (i in 1:n_subj) { t <- t_subjs[i] - DT_subj <- raw_data[subjid == subjs[i]] + DT_subj <- raw_data[raw_data$subjid == subjs[i]] col_chosen [i, 1:t] <- ifelse(DT_subj$redchosen == 1, 1, 2) bet_chosen [i, 1:t] <- DT_subj$bet_time From e87c316b7cbaa644a3e6481e37cc97084e6c0811 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 17:36:46 +0900 Subject: [PATCH 162/163] Update cran-comments.md --- R/cran-comments.md | 37 +------------------------------------ 1 file changed, 1 insertion(+), 36 deletions(-) diff --git a/R/cran-comments.md b/R/cran-comments.md index 179b0048..e1398f95 100644 --- a/R/cran-comments.md +++ b/R/cran-comments.md @@ -6,42 +6,7 @@ ## R CMD check results -There were 2 NOTE: - -- These messages occur since it uses 'data.table'. It works fine when users run it - with 'data.table' installed. -``` -* checking R code for possible problems ... NOTE -bandit2arm_preprocess_func: no visible binding for global variable - ‘subjid’ -bandit4arm2_preprocess_func: no visible binding for global variable - ‘subjid’ -bandit4arm_preprocess_func: no visible binding for global variable - ‘subjid’ -bart_preprocess_func: no visible binding for global variable ‘subjid’ -cgt_preprocess_func: no visible binding for global variable ‘subjid’ -choiceRT_single_preprocess_func: no visible binding for global variable - ‘choice’ -cra_preprocess_func: no visible binding for global variable ‘subjid’ -dbdm_preprocess_func: no visible binding for global variable ‘subjid’ -dd_preprocess_func: no visible binding for global variable ‘subjid’ -gng_preprocess_func: no visible binding for global variable ‘subjid’ -igt_preprocess_func: no visible binding for global variable ‘subjid’ -peer_preprocess_func: no visible binding for global variable ‘subjid’ -prl_multipleB_preprocess_func: no visible binding for global variable - ‘subjid’ -prl_multipleB_preprocess_func: no visible binding for global variable - ‘block’ -prl_preprocess_func: no visible binding for global variable ‘subjid’ -pst_preprocess_func: no visible binding for global variable ‘subjid’ -ra_preprocess_func: no visible binding for global variable ‘subjid’ -rdt_preprocess_func: no visible binding for global variable ‘subjid’ -ts_preprocess_func: no visible binding for global variable ‘subjid’ -ug_preprocess_func: no visible binding for global variable ‘subjid’ -wcs_preprocess_func: no visible binding for global variable ‘subjid’ -Undefined global functions or variables: - block choice subjid -``` +There was 1 NOTE: - To compile hBayesDM using rstan, GNU make is required. ``` From c57494880e73a85efdf689f602e85eaefa868670 Mon Sep 17 00:00:00 2001 From: Jaeyeong Yang Date: Fri, 30 Aug 2019 23:03:29 +0900 Subject: [PATCH 163/163] Change documentations --- Python/hbayesdm/models/_bandit2arm_delta.py | 8 ++------ Python/hbayesdm/models/_bandit4arm2_kalman_filter.py | 8 ++------ Python/hbayesdm/models/_bandit4arm_2par_lapse.py | 8 ++------ Python/hbayesdm/models/_bandit4arm_4par.py | 8 ++------ Python/hbayesdm/models/_bandit4arm_lapse.py | 8 ++------ Python/hbayesdm/models/_bandit4arm_lapse_decay.py | 8 ++------ Python/hbayesdm/models/_bandit4arm_singleA_lapse.py | 8 ++------ Python/hbayesdm/models/_bart_par4.py | 8 ++------ Python/hbayesdm/models/_cgt_cm.py | 8 ++------ Python/hbayesdm/models/_choiceRT_ddm.py | 8 ++------ Python/hbayesdm/models/_choiceRT_ddm_single.py | 8 ++------ Python/hbayesdm/models/_cra_exp.py | 8 ++------ Python/hbayesdm/models/_cra_linear.py | 8 ++------ Python/hbayesdm/models/_dbdm_prob_weight.py | 8 ++------ Python/hbayesdm/models/_dd_cs.py | 8 ++------ Python/hbayesdm/models/_dd_cs_single.py | 8 ++------ Python/hbayesdm/models/_dd_exp.py | 8 ++------ Python/hbayesdm/models/_dd_hyperbolic.py | 8 ++------ Python/hbayesdm/models/_dd_hyperbolic_single.py | 8 ++------ Python/hbayesdm/models/_gng_m1.py | 8 ++------ Python/hbayesdm/models/_gng_m2.py | 8 ++------ Python/hbayesdm/models/_gng_m3.py | 8 ++------ Python/hbayesdm/models/_gng_m4.py | 8 ++------ Python/hbayesdm/models/_igt_orl.py | 8 ++------ Python/hbayesdm/models/_igt_pvl_decay.py | 8 ++------ Python/hbayesdm/models/_igt_pvl_delta.py | 8 ++------ Python/hbayesdm/models/_igt_vpp.py | 8 ++------ Python/hbayesdm/models/_peer_ocu.py | 8 ++------ Python/hbayesdm/models/_prl_ewa.py | 8 ++------ Python/hbayesdm/models/_prl_fictitious.py | 8 ++------ Python/hbayesdm/models/_prl_fictitious_multipleB.py | 8 ++------ Python/hbayesdm/models/_prl_fictitious_rp.py | 8 ++------ Python/hbayesdm/models/_prl_fictitious_rp_woa.py | 8 ++------ Python/hbayesdm/models/_prl_fictitious_woa.py | 8 ++------ Python/hbayesdm/models/_prl_rp.py | 8 ++------ Python/hbayesdm/models/_prl_rp_multipleB.py | 8 ++------ Python/hbayesdm/models/_pst_gainloss_Q.py | 8 ++------ Python/hbayesdm/models/_ra_noLA.py | 8 ++------ Python/hbayesdm/models/_ra_noRA.py | 8 ++------ Python/hbayesdm/models/_ra_prospect.py | 8 ++------ Python/hbayesdm/models/_rdt_happiness.py | 8 ++------ Python/hbayesdm/models/_ts_par4.py | 8 ++------ Python/hbayesdm/models/_ts_par6.py | 8 ++------ Python/hbayesdm/models/_ts_par7.py | 8 ++------ Python/hbayesdm/models/_ug_bayes.py | 8 ++------ Python/hbayesdm/models/_ug_delta.py | 8 ++------ Python/hbayesdm/models/_wcs_sql.py | 8 ++------ commons/templates/PY_DOCS_TEMPLATE.txt | 8 ++------ 48 files changed, 96 insertions(+), 288 deletions(-) diff --git a/Python/hbayesdm/models/_bandit2arm_delta.py b/Python/hbayesdm/models/_bandit2arm_delta.py index 9f798a67..3504d2f5 100644 --- a/Python/hbayesdm/models/_bandit2arm_delta.py +++ b/Python/hbayesdm/models/_bandit2arm_delta.py @@ -95,13 +95,9 @@ def bandit2arm_delta( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py index 9fbf2696..f84421ea 100644 --- a/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py +++ b/Python/hbayesdm/models/_bandit4arm2_kalman_filter.py @@ -102,13 +102,9 @@ def bandit4arm2_kalman_filter( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py index 630feeb6..1caec205 100644 --- a/Python/hbayesdm/models/_bandit4arm_2par_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_2par_lapse.py @@ -98,13 +98,9 @@ def bandit4arm_2par_lapse( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bandit4arm_4par.py b/Python/hbayesdm/models/_bandit4arm_4par.py index e947485a..617c8f96 100644 --- a/Python/hbayesdm/models/_bandit4arm_4par.py +++ b/Python/hbayesdm/models/_bandit4arm_4par.py @@ -100,13 +100,9 @@ def bandit4arm_4par( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bandit4arm_lapse.py b/Python/hbayesdm/models/_bandit4arm_lapse.py index 4b825f4d..157f4b58 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse.py @@ -102,13 +102,9 @@ def bandit4arm_lapse( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py index d81887e2..1f5b71ff 100644 --- a/Python/hbayesdm/models/_bandit4arm_lapse_decay.py +++ b/Python/hbayesdm/models/_bandit4arm_lapse_decay.py @@ -104,13 +104,9 @@ def bandit4arm_lapse_decay( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py index 4a8aae00..55da5cae 100644 --- a/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py +++ b/Python/hbayesdm/models/_bandit4arm_singleA_lapse.py @@ -100,13 +100,9 @@ def bandit4arm_singleA_lapse( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_bart_par4.py b/Python/hbayesdm/models/_bart_par4.py index aca370e4..f7600521 100644 --- a/Python/hbayesdm/models/_bart_par4.py +++ b/Python/hbayesdm/models/_bart_par4.py @@ -103,13 +103,9 @@ def bart_par4( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "pumps", "explosion". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "pumps", "explosion". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_cgt_cm.py b/Python/hbayesdm/models/_cgt_cm.py index 239b19d7..01a5bc8a 100644 --- a/Python/hbayesdm/models/_cgt_cm.py +++ b/Python/hbayesdm/models/_cgt_cm.py @@ -111,13 +111,9 @@ def cgt_cm( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "gamble_type", "percentage_staked", "trial_initial_points", "assessment_stage", "red_chosen", "n_red_boxes". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_choiceRT_ddm.py b/Python/hbayesdm/models/_choiceRT_ddm.py index d889bd51..07b93f10 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm.py +++ b/Python/hbayesdm/models/_choiceRT_ddm.py @@ -102,13 +102,9 @@ def choiceRT_ddm( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "RT". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "RT". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_choiceRT_ddm_single.py b/Python/hbayesdm/models/_choiceRT_ddm_single.py index bdb9da03..2fdfbfd9 100644 --- a/Python/hbayesdm/models/_choiceRT_ddm_single.py +++ b/Python/hbayesdm/models/_choiceRT_ddm_single.py @@ -102,13 +102,9 @@ def choiceRT_ddm_single( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "RT". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "RT". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_cra_exp.py b/Python/hbayesdm/models/_cra_exp.py index 1f31ccdf..98d188fd 100644 --- a/Python/hbayesdm/models/_cra_exp.py +++ b/Python/hbayesdm/models/_cra_exp.py @@ -105,13 +105,9 @@ def cra_exp( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_cra_linear.py b/Python/hbayesdm/models/_cra_linear.py index 542cf77c..cce9dcfa 100644 --- a/Python/hbayesdm/models/_cra_linear.py +++ b/Python/hbayesdm/models/_cra_linear.py @@ -105,13 +105,9 @@ def cra_linear( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "prob", "ambig", "reward_var", "reward_fix", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_dbdm_prob_weight.py b/Python/hbayesdm/models/_dbdm_prob_weight.py index 50cede01..d3c786d7 100644 --- a/Python/hbayesdm/models/_dbdm_prob_weight.py +++ b/Python/hbayesdm/models/_dbdm_prob_weight.py @@ -110,13 +110,9 @@ def dbdm_prob_weight( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "opt1hprob", "opt2hprob", "opt1hval", "opt1lval", "opt2hval", "opt2lval", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_dd_cs.py b/Python/hbayesdm/models/_dd_cs.py index 8fb4ae9f..d64ac5f2 100644 --- a/Python/hbayesdm/models/_dd_cs.py +++ b/Python/hbayesdm/models/_dd_cs.py @@ -102,13 +102,9 @@ def dd_cs( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_dd_cs_single.py b/Python/hbayesdm/models/_dd_cs_single.py index 206cf8f4..adb2ac16 100644 --- a/Python/hbayesdm/models/_dd_cs_single.py +++ b/Python/hbayesdm/models/_dd_cs_single.py @@ -102,13 +102,9 @@ def dd_cs_single( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_dd_exp.py b/Python/hbayesdm/models/_dd_exp.py index f1e3d47c..51504dfc 100644 --- a/Python/hbayesdm/models/_dd_exp.py +++ b/Python/hbayesdm/models/_dd_exp.py @@ -100,13 +100,9 @@ def dd_exp( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_dd_hyperbolic.py b/Python/hbayesdm/models/_dd_hyperbolic.py index 74482823..36a54358 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic.py +++ b/Python/hbayesdm/models/_dd_hyperbolic.py @@ -100,13 +100,9 @@ def dd_hyperbolic( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_dd_hyperbolic_single.py b/Python/hbayesdm/models/_dd_hyperbolic_single.py index 27e19781..5e1a9db0 100644 --- a/Python/hbayesdm/models/_dd_hyperbolic_single.py +++ b/Python/hbayesdm/models/_dd_hyperbolic_single.py @@ -100,13 +100,9 @@ def dd_hyperbolic_single( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "delay_later", "amount_later", "delay_sooner", "amount_sooner", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_gng_m1.py b/Python/hbayesdm/models/_gng_m1.py index a334d4e6..cea64ccd 100644 --- a/Python/hbayesdm/models/_gng_m1.py +++ b/Python/hbayesdm/models/_gng_m1.py @@ -101,13 +101,9 @@ def gng_m1( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_gng_m2.py b/Python/hbayesdm/models/_gng_m2.py index e7462fb3..d7cfc916 100644 --- a/Python/hbayesdm/models/_gng_m2.py +++ b/Python/hbayesdm/models/_gng_m2.py @@ -103,13 +103,9 @@ def gng_m2( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_gng_m3.py b/Python/hbayesdm/models/_gng_m3.py index 78dd8e93..81a70703 100644 --- a/Python/hbayesdm/models/_gng_m3.py +++ b/Python/hbayesdm/models/_gng_m3.py @@ -106,13 +106,9 @@ def gng_m3( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_gng_m4.py b/Python/hbayesdm/models/_gng_m4.py index 1eb31993..75b1581e 100644 --- a/Python/hbayesdm/models/_gng_m4.py +++ b/Python/hbayesdm/models/_gng_m4.py @@ -108,13 +108,9 @@ def gng_m4( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "cue", "keyPressed", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_igt_orl.py b/Python/hbayesdm/models/_igt_orl.py index a1875675..8ca7562d 100644 --- a/Python/hbayesdm/models/_igt_orl.py +++ b/Python/hbayesdm/models/_igt_orl.py @@ -102,13 +102,9 @@ def igt_orl( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_igt_pvl_decay.py b/Python/hbayesdm/models/_igt_pvl_decay.py index 22cf816f..6af73133 100644 --- a/Python/hbayesdm/models/_igt_pvl_decay.py +++ b/Python/hbayesdm/models/_igt_pvl_decay.py @@ -100,13 +100,9 @@ def igt_pvl_decay( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_igt_pvl_delta.py b/Python/hbayesdm/models/_igt_pvl_delta.py index bde340e0..3aceabbd 100644 --- a/Python/hbayesdm/models/_igt_pvl_delta.py +++ b/Python/hbayesdm/models/_igt_pvl_delta.py @@ -100,13 +100,9 @@ def igt_pvl_delta( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_igt_vpp.py b/Python/hbayesdm/models/_igt_vpp.py index 92e63edb..5c4b255d 100644 --- a/Python/hbayesdm/models/_igt_vpp.py +++ b/Python/hbayesdm/models/_igt_vpp.py @@ -108,13 +108,9 @@ def igt_vpp( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "gain", "loss". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "gain", "loss". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_peer_ocu.py b/Python/hbayesdm/models/_peer_ocu.py index b988950a..fee5dff6 100644 --- a/Python/hbayesdm/models/_peer_ocu.py +++ b/Python/hbayesdm/models/_peer_ocu.py @@ -106,13 +106,9 @@ def peer_ocu( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "condition", "p_gamble", "safe_Hpayoff", "safe_Lpayoff", "risky_Hpayoff", "risky_Lpayoff", "choice". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_ewa.py b/Python/hbayesdm/models/_prl_ewa.py index 3791642f..4f7096da 100644 --- a/Python/hbayesdm/models/_prl_ewa.py +++ b/Python/hbayesdm/models/_prl_ewa.py @@ -100,13 +100,9 @@ def prl_ewa( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_fictitious.py b/Python/hbayesdm/models/_prl_fictitious.py index 1e1a52e1..86dd7e2c 100644 --- a/Python/hbayesdm/models/_prl_fictitious.py +++ b/Python/hbayesdm/models/_prl_fictitious.py @@ -101,13 +101,9 @@ def prl_fictitious( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_fictitious_multipleB.py b/Python/hbayesdm/models/_prl_fictitious_multipleB.py index 072580ab..89001207 100644 --- a/Python/hbayesdm/models/_prl_fictitious_multipleB.py +++ b/Python/hbayesdm/models/_prl_fictitious_multipleB.py @@ -103,13 +103,9 @@ def prl_fictitious_multipleB( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "block", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "block", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_fictitious_rp.py b/Python/hbayesdm/models/_prl_fictitious_rp.py index f842ac71..105ffb16 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp.py @@ -104,13 +104,9 @@ def prl_fictitious_rp( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py index b40ec495..6db44471 100644 --- a/Python/hbayesdm/models/_prl_fictitious_rp_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_rp_woa.py @@ -102,13 +102,9 @@ def prl_fictitious_rp_woa( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_fictitious_woa.py b/Python/hbayesdm/models/_prl_fictitious_woa.py index 0f52d118..c408824a 100644 --- a/Python/hbayesdm/models/_prl_fictitious_woa.py +++ b/Python/hbayesdm/models/_prl_fictitious_woa.py @@ -99,13 +99,9 @@ def prl_fictitious_woa( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_rp.py b/Python/hbayesdm/models/_prl_rp.py index ecbdeb67..d8725822 100644 --- a/Python/hbayesdm/models/_prl_rp.py +++ b/Python/hbayesdm/models/_prl_rp.py @@ -99,13 +99,9 @@ def prl_rp( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_prl_rp_multipleB.py b/Python/hbayesdm/models/_prl_rp_multipleB.py index 1aa80ddd..5189c019 100644 --- a/Python/hbayesdm/models/_prl_rp_multipleB.py +++ b/Python/hbayesdm/models/_prl_rp_multipleB.py @@ -101,13 +101,9 @@ def prl_rp_multipleB( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "block", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "block", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_pst_gainloss_Q.py b/Python/hbayesdm/models/_pst_gainloss_Q.py index 2e0b4dac..9ab71910 100644 --- a/Python/hbayesdm/models/_pst_gainloss_Q.py +++ b/Python/hbayesdm/models/_pst_gainloss_Q.py @@ -98,13 +98,9 @@ def pst_gainloss_Q( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "type", "choice", "reward". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "type", "choice", "reward". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ra_noLA.py b/Python/hbayesdm/models/_ra_noLA.py index 5933bd49..b8cd03cf 100644 --- a/Python/hbayesdm/models/_ra_noLA.py +++ b/Python/hbayesdm/models/_ra_noLA.py @@ -98,13 +98,9 @@ def ra_noLA( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ra_noRA.py b/Python/hbayesdm/models/_ra_noRA.py index 39628b76..02414a6d 100644 --- a/Python/hbayesdm/models/_ra_noRA.py +++ b/Python/hbayesdm/models/_ra_noRA.py @@ -98,13 +98,9 @@ def ra_noRA( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ra_prospect.py b/Python/hbayesdm/models/_ra_prospect.py index eec648c4..269b4bd5 100644 --- a/Python/hbayesdm/models/_ra_prospect.py +++ b/Python/hbayesdm/models/_ra_prospect.py @@ -100,13 +100,9 @@ def ra_prospect( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "gamble". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_rdt_happiness.py b/Python/hbayesdm/models/_rdt_happiness.py index 2ae268b5..2df26560 100644 --- a/Python/hbayesdm/models/_rdt_happiness.py +++ b/Python/hbayesdm/models/_rdt_happiness.py @@ -114,13 +114,9 @@ def rdt_happiness( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "gain", "loss", "cert", "type", "gamble", "outcome", "happy", "RT_happy". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ts_par4.py b/Python/hbayesdm/models/_ts_par4.py index 07fb3ae2..33aaa6b7 100644 --- a/Python/hbayesdm/models/_ts_par4.py +++ b/Python/hbayesdm/models/_ts_par4.py @@ -101,13 +101,9 @@ def ts_par4( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ts_par6.py b/Python/hbayesdm/models/_ts_par6.py index d2476e2b..036c0e37 100644 --- a/Python/hbayesdm/models/_ts_par6.py +++ b/Python/hbayesdm/models/_ts_par6.py @@ -105,13 +105,9 @@ def ts_par6( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ts_par7.py b/Python/hbayesdm/models/_ts_par7.py index ed64b5a8..64494490 100644 --- a/Python/hbayesdm/models/_ts_par7.py +++ b/Python/hbayesdm/models/_ts_par7.py @@ -107,13 +107,9 @@ def ts_par7( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "level1_choice", "level2_choice", "reward". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ug_bayes.py b/Python/hbayesdm/models/_ug_bayes.py index df627276..23cae0ed 100644 --- a/Python/hbayesdm/models/_ug_bayes.py +++ b/Python/hbayesdm/models/_ug_bayes.py @@ -96,13 +96,9 @@ def ug_bayes( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "offer", "accept". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "offer", "accept". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_ug_delta.py b/Python/hbayesdm/models/_ug_delta.py index d640319a..92899a81 100644 --- a/Python/hbayesdm/models/_ug_delta.py +++ b/Python/hbayesdm/models/_ug_delta.py @@ -96,13 +96,9 @@ def ug_delta( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "offer", "accept". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "offer", "accept". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/Python/hbayesdm/models/_wcs_sql.py b/Python/hbayesdm/models/_wcs_sql.py index d241f4a3..b1079b25 100644 --- a/Python/hbayesdm/models/_wcs_sql.py +++ b/Python/hbayesdm/models/_wcs_sql.py @@ -96,13 +96,9 @@ def wcs_sql( Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: "subjID", "choice", "outcome". data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: "subjID", "choice", "outcome". niter Number of iterations, including warm-up. Defaults to 4000. diff --git a/commons/templates/PY_DOCS_TEMPLATE.txt b/commons/templates/PY_DOCS_TEMPLATE.txt index 28c00358..d56cf622 100644 --- a/commons/templates/PY_DOCS_TEMPLATE.txt +++ b/commons/templates/PY_DOCS_TEMPLATE.txt @@ -32,13 +32,9 @@ Parameters ---------- - example - Whether to use the example data provided by hBayesDM. - datafile - Path for a TSV file containing the data to be modeled. - Data columns should be labeled as: {data_columns}. data - Pandas DataFrame object holding the data to be modeled. + Data to be modeled. It should be given as a Pandas DataFrame object, + a filepath for a data file, or ``"example"`` for example data. Data columns should be labeled as: {data_columns}. niter Number of iterations, including warm-up. Defaults to 4000.
  • -
    - - - -
    - -
    -
    - - -

    Ahn W, Haines N, Zhang L (2017). -“Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the hBayesDM Package.” -Computational Psychiatry, 1, 24–57. -https://doi.org/10.1162/CPSY_a_00002. -

    -
    @Article{hBayesDM,
    -  title = {Revealing Neurocomputational Mechanisms of Reinforcement Learning and Decision-Making With the {hBayesDM} Package},
    -  author = {Woo-Young Ahn and Nathaniel Haines and Lei Zhang},
    -  journal = {Computational Psychiatry},
    -  year = {2017},
    -  volume = {1},
    -  pages = {24--57},
    -  url = {https://doi.org/10.1162/CPSY_a_00002},
    -}
    - - -
      -
    • -

      Woo-Young Ahn. Author, maintainer. -

      -
    • -
    • -

      Nate Haines. Author. -

      -
    • -
    • -

      Lei Zhang. Author. -

      -
    • -
    • -

      Harhim Park. Contributor. -

      -
    • -
    • -

      Jaeyeong Yang. Contributor. -

      -
    • -
    • -

      Jethro Lee. Contributor. -

      -
    • -
    - -
    - -
    - - -
    - - -
    -

    Site built with pkgdown 1.3.0.

    -
    -
    -

    ~R?<6VGjPuyQCD-o1Lao#V zHfx%oY?~j)Kh=4dhb@#oc+t?5jZ4A(;I+z((CNWRZZ{^fgEz*$xsl@g$drUDTDK4T zWoqSWw)hhK$+~zWLF$?*m}rXVL$=~R4VXdMK`xmcEU!s;_u%!>R=z?F&r3^S*~2y` z>a*>m^R#}ESX7$X8PsZ|@nsiXm3qBk@ z7h-B~xEuk%fs_Fmz#+P&VtGcP1@)WyEBJ=0n@Vm6Iv+pO#`(K*m0uXyuNm{xeoyP8 z^5fBo;KO6B!R`swHPuLmtEqWSe+SIzDu~S1RYRSUPve}DCjSZK?vbNZAB&5g1KwOP+F$-)D;NmA__5*|3}U0)59yG_JSzA{_fWLK+N zL>L;BF67`0A%K82esBnEThNZ^0by{k?AxEeIx;F7e$Y+?N~`$q>s3%k%sl!`?tx^{ zQhh#$W=OvC;l?c-34!ps<<{n%84IAH=BkE#r3P#5j~A$mcoK?2cPSCW@Ef;$6K|| zxqejlNnz9hSjN`L=hs!s@!q3O`68o`SX*!7N<#S8=50cMV!tKcIK{iiA_Tl8nd@${ zSLYA^a6V4R|IqTSn z+B+`pqOZGOL*;j|p5a`!H++L00APj{o+~P-Oh4Wr_#>V2P#T1>)m)X0!iGT_0i|4WCO= zr-1J=tBd~~KUj9^kxio+l|GRXgrev1KXmjjd-`vF>Q56q5qr1FIe7zr68^=j`cKw$ zOER9PTp1v2@!+5IrvLu+Kj}}KRrmtp$&_gSyT4^)8>!+`9#aTffq&=5{>Mbe`gkaJ zzLT-m{C^h%zy3-#e5kfJA))zO);NwEPkFw?3?3T(zo-S@c~f5%7WKEX;`x3NGgmk( zAHKh1X6M-d(b}y3>pLdyP5=5YfFFM=_mv8M@j(G_N$S5*G1Jd_yaqjYVNcEfR_-9S zXLj2mB(%q!OdEkp?LU%LZ(}Tc zcsgqa@*~~SAbe-gu)aN1?%`O8sL70;sBdQZU;Y^%u0-cT+QBEC(dkNW{Ux$lU1K_* z+zQ0DqIN|Wy;p#4x7hv*X#JO)<_!JHmSQMSDK9yF#mB`)!0edxb!A5!YYB^cka!Mt zaf-SY7o9EJsU(I6ACCHH{KL1)Qse&(;s0UptKy>kx^NXiQjqSD9=fDU6i~XmQ()-s z5JU+90f}MgknV1fhC#YPx?5t%^YZ`tJICvD^WDh&kazaG_g;JDvz|R;DeZmSMJNUb znsFJ}g@oR9f#Vi4&iyAUR1X-Ahb@PX<8R6RZ;n?VawoTI`5qYmFbrNXZHcB|;AaAx zDclC^8@`X$1d{XJ%AHkGX#`N_ZHy<0e1B82w_KVeuK$vJ{*R}^UL5t(MAc=y4)p^N zlqS?g*s=vCZl4x7oFgH1JYU-wY20W2Xn(1q!ykEd=_R5d{!z5k`2jm0Zu(B{&s*<= zEzSu+#|`|m?!=P@EUF{RXVgErBmv@Vz%b6n)X)E9avue*Jdi_tq2v6cBk;Ue5*UU} z4)Kq^5tR}d;Ba)6)6)NA`Tp;Bt_Ox03i&Ml=lO=w0DcYsw;z9AG%7-1n0gLult0hc z5%3@gdcFdF;-8j&jXJ=DmyjOe&+}#c0(d?vS~$3WUNk*mn2nA%RDYgt7qEM}WU?lI zUNk^R!e;&_B>%pb^(Ya^%y@vr2Yy3ox>)qK>!V8;#P0#PD;9`MBbK{~Ey?)IdY{++ z3PNunV8bE7z{|>q)i%_NFH)v_^Q5Yj#Ml1JDUo5w%Ms5JdZLcl zwX(XU1_!zceHY9V^a(_Mj>@v{D~Q6^Rw%y^A(I6zstjV1@1&^DA9P|t&#I8;2UY^T z6WXxEZhocu-@$V!etnf?a}+vks?_ydk6&PG+*={gG78LHSwSxD6lC+Zij|x8;aH_p1X|^} z9w_=d*=;02A&yd;^vS*T35IUx*scc$jZmQry-mPlHek`CY(rE#^!+^|&5C<@(0Ev| za{2Sht7Sp>?s8RAA06wv`)iW*gH0>Lbzgf==VXSVCdZe!Urr`4s?Qh3@=uO|N4l5t z3~zTd_q~l&cBT-$dg22~!YdF+z4w;=Z?CG493Fzmu+4tvQkU1Z4lDJWsf8`QBLP%p z_jlKgGZv7c1(z}TCo7)b`!77jn$Np?*%6;|1Ojvt2W6cfnT>4QrlkC!Nz8(L+u5)r zr0h8}+)bF^Kz%5MlU)n^DF-7(GXz!X(Cu_?`?g8B#p|B@1o8P69YV7)r%*d0Y{7?H zqoP@@qSrpzL)T?V~cZpp7`d~+;^E&sK zgmF^==#H(?@yEm~krZJ#%wzoe0|+kS6vW5aa(vEAY|$<*S7CCLq0H~sF{L!ZP6t=# z_1y*d4x3;i*F{)E1PJ+vYZbDUUdb_5=FQ}v^t>VgJ2^sJe=dW&v%tdy6-n4^cI6F) ztLFKvTS~qK_RJMwp)~P!$o`{Z&L}~kxY&y$Qs!Zsl6s4hJOSXv7OV@4Hy-4G@n(Q` z$l-dw)_oJ|ZcuWK`Y%OFjo|bFX#F~Q)&Spt-QFHfIMAof1N=vBcJOs5sk59VtRD*T z_k(}T$LzsIL_j#%Os%ygo&VO*xS8rLc>AJRGT>rT_b_~3;t|5a>vV5@;t0_OX8@Hn z--#s@s#4=EgCSrg^qBR;Y{9Tj-wFP;Ch6Y+f7)NI2xx;9Eh zLvzWqr@ng1?O;MPG*n$k*!0_QZ}0TKS=9F*#od`KZ1Jz&%S;$}3-Hap?@ZgBwSjnw z9t4WFQcnuVNl2J z-d*Qm5vMeff9dVO4iuL><`3a)PIe93AH~*cmjr89zrdhw$<8$uB0%~~MjTx%>Ie-;iZe_gs})~EtKH(|Y*jaj)3C_JwAYIoqfehlAxpVp{gwx| zwF?^u%@+oM{d&9RJ&0Lj~p_ zncsdII#xNCI+HM(DB+cZrc(t-@KKrfPJUiW-0jIWy@FG`@?rCpM>r#mXo*D)li_BX ze+w=I(9P^jufthu5_K(-%&7^6e$0S9nO1IjAa2~leIm~T+8!~K9d*z=*1AqF2@a2> zpQUm=q9GO$mW+l25Sx_p>QCNJRHWeVH?K#s7X7aG4|KRW;+Q3FBkk|zCN$6dz3j7W za`_Pg5d(Mk>r7Ia_Zb%2TQv>J?Zf)S?v(Qf1$U2{BDngGoUiWNk)cC|ZQb*uq)eKg z+c^nR8&M)Cv?xvR4T!P;fC^pW>>-OqQNI7lG+n@7(t3AQ0I6AUo4dTS%7bc@hoy5( zYMFLz`pjy)(^%NVx-)LAea8T;QuOdotI_zZM&W2uw z5hd)-^DT^E651-Hp7Ua#GJC0!=b@fBxZDVqa5lwO+#-uu=49q8;S zaI&l~g4XMRku7a9IRhATiejL|u7&IQNf)c!W7e~x6gAJ^;!Jz2My_vKpkKdkhzh9U zhuU_25pm1m>coiecNv=)?m3@}VliMy)e`eT9d?WA64>6#nzE;g7zBCS(i)+Ubi0MYJ?Vj2!~OEvrsD27ki}@uqs(hLKzju zN1a%ui6O7Zc$*X>bCp_v=9_*3UGNCQ-utXb4bFjpFs9=pKafh4=+zcSSix;sOuBGV zwXILl=Cvbl^Kg;X3|rWUjL0G%Y!UD!pi83CCv{q&oX#G4H4Nn%DXArewx|u zFFeoY-q9yzn`n%vZ%zT!g9WQB04D4~60$Z~sP}c6>Uhh5Bx5!En+usu{7MbJS!u4N zC3(Kc8&04Lx5fLU8_G=Ncv#Wks^!mqCCpo0s5H+oDo=Oe8 zqoFo(#Yk`OH(UsJD~NdfY4^}~-jJ7auHMpNk~9+hG-(&F&6-C$gWPs0P&)Nwp_ zUWcSfbm1f^02`6<`suJ(>Gbc~x9<*?Vrq6v8tsVGn+#ouKhwaGT5D@}Cmu>Ibd&cVRnrpNp&jn>pXh*e1%Y ztmTZQ!Fi#g-iN3EaD*MZi5X-xZEYaQ2IcwJ_|+2=8p42?X6A)9?-08I_Ozf0{();& z6WAd0Y;D?Iivi8N**@cX4!*}D-h5ML_-D>`fTj;CeupzeI|uj*g>e_wn#?KvW&L&a zqMxpiH@;Bq80M#~57gFd;0zN&3X1y;i4;DNRD`KF;G8M5wcebF%FI^61Phrc{FQ*S zJulgs<`t&xd`svYlT*gx3gjrH?-OIpD~S>z?fmmTr$dB>mU-SlbS!UknUT1kiX)>y zLjN<&L_?;^byAz*&Y=)t-U4EKwe~`d_ezFK;Obk-XtuJ8Va#qSz>g_BmJ>kYyIci| zoXo4IL?J~DtDIy^C#h3;dUex(LN#O!Fn0|{biM^a)z}z}gW++mABaAVvKDm+Pt0Kn zi4huj7+W0C`j<~IdL@64dCvRG|GY9y68eP$Ol(dY(Yko2EKQEz>Matk=av0gsAlC^K5AM2j?dVkHWOC&)iFYe>9o2m{&9KbzO_<6+AnCanMWoa5yta zFFG0&$5Pd$`A!nZ^v;L16x;J0jAyH7B}HLgx~JvZ6N}L&lKbO4G4Wu2Q)%uA2~b0c zV$J?|4a_NvimtrqWRIKZw$GZ^R!WC>ztHQjID#kKVt>hW@noXU&8b3f3kqq~x<^sh z)g7Z@!)*a;d1uHNZ`pJB_r^c)M`E@DvU(+ZtkJJS8CAoT{C1ac=V^T3Px7P)+rcRt zvV1Oq8X7-9%9ixkU`yq=`{9G4QeD|7jPCqAZp63l! zexk^)fG3&CZ(hEQ^|x=mbz3`B_47A-Me-$g`$=j3i;8r48I>7+@N}73v}jKTwJePn z38;jw;7v}`DgH4iFJ+X$qax-AU}S7jOgsvX>oP{ly^h6U!csq;iZxrkdWeV`a&)!S z%?n_!7N44Y^-snTlauN9z89^28G5_C&SY+>Y@W>&D2NQuNt&m$0-~|taX$PodY!aNLm^(H@Nb+MPbTx@~H%(j`C^=gVqBAwI zUyHx#xy_h6F8*fwx^J|gD|KD4=*>(sl&S4q;yIv6>_?G5Ad z`-<3d1J@2nPWgV%0i9(sqSgZo_0)N=V~`hVD=j z!c$8r<{eTJH9*b{E5z^@UuM0;cTEo~$$Q~j|DCKyd&LL0K7=9(hB z^;Dxz7~d5fwwJ7|{h?;ZOKWjfS*id>kxck(Tov~8OLEJc0GBF3)dLcO7h)r=U;5knGswGu&4%tussFuU$Z_6&LX3rwR-Q<> z*wnUL#c=3S^77}U{mSYykpfsTz^n_48&==!_)x}fUP;_33nQ22pzY@m0;=3QGEGl^H_hU$}S8~{bjeH((o%POA`kpvQLwlRg`dhB zYyfIKcO%C5d z0>f5NcPMdCz;`@AX;tF{c$pu|v4^!0TsREhDx3#cYkyeU_NVKbe5PAjEh2aQ3YIaR zT;I|jU{looT${yWnKR`15CgtMmE&7*$k{9P7ZFZ;8Ul1w)%)K!ZrJJt2QZ|!UwheN zQcl|Mg2a65Drz?f$3i|_4zRcSEYNNy+pt-F%lGYw*Poi}kbx0k@5I`9NIaRy_-;!k z1E?sDyQ*8RQ5J~ntaI?D;DQ3?X{Ni~w}j>V0$*pO&B12Ksc)vzu@1h&k9L5Oj5Mug z8x9B=*iGN3=)7JV($iDzHZBFEvd6NhNLV<+^)2n5Wv+7dOEWA+Y0nAyl2Ul@m2cJ8 zEcso(|JgGs?D#qq)AdS$`uSTRiOL&^wn_C932|_#Ks5)ns~wqcWu}U6O`%MehgSEE zmYG>siHTwvfSC(?jAEA!v{r4_)AH;gw6z2b_cJW+i`Zv!+V8x&z?1S7a%CZyDXE>g zJ#VridDRDIs8Q4-Z59sWzgRiCq8C;*ReQns+^oB@xN?;3?6-~YGFE==GtxMkfP?^c zQz;*}-sl(g{dsspLvNMW^QVBevDSGYO#jV|FbQ)=F6Nps>!jSp8H zRB;bwYF#&3+6p}^G+l_1NDTFyY_n+c(??Cg7u$N@6DREHJQtP{<`wcDw~3GYpZdlF zYnit=3>;~2tUPa3AARSw+D2yEw&E=-F@L_HeYkp~=SEUqNsePc_=2B;(HeV6niyjg za-bcxQQ>RBQ>^%8I`!ZU2JTuMUe>Q(1nCV7+S*XJSt~2$36VSx9* zWAQ*~vR{ac{`a?u0mV7d1Lj&juOp#cJ6^grnNneJw)lk>nE}Zet8eXs`vM)#P#}cy zw5l4i-}Mfz1&~Z6b-`gXUTBLPCTADfDL31BX_;dxgKQPO)FXbx>shLYJvXz0+8hHC zTI-|XsdEIF5qCf}rnMKPZ_?zm3j&ZTjm7GA3&8olk6{)dv0VN_iT5$c;AKV9c^(MU zlhw5HB+0y;57a$sWU!bB9 zpUzYVv{?#9lx+CbmK7xR(X(%mx5L6($jxb)ZJ`PF^z2tdYuF;r6or8m{i>4;QdG`d_8T9fr8y#_{m_uqKBx@ zV&2i~0l z)p16i%s(8i!d4U-IO~8E^ZY{^3tJ;ZIh7Dd@lM|t5tN@0G&ng@A=xR9VrodPSND1d z-p(7(pb&)+UliIuR%{zQ}$O9H&L}bU^byD8_T61^bEv`L(JB`T!cooRg z=$no&exbg1m18eZGq+84AD>^+w7&*2b5rOz`idXX@hcy27`pb&XC^sCC!Rn?ep0Vq zV=)S4C()!K>4Xf8c!WFLTof?p|d1Vo%ubQxCD)|LHdfflxX z?%COdJ68{pOh9~$MV_{M15_`{Q+w)lSJCW7hRp{ZD^X_3jlhR$=&|x&hgW)6?|bPD zz;N0REq$jxy4BSp=x$Ym3zeVx;4ih;jr&IG?5~&6nu?i{cXQTD-LL93wTzJnOIT8R z+>Z?JuQbzc$8ELOLBrP|=KXQxby}bf7s!KootR$p+>;@uMl(giC30_C0; zoKwTk?uFc5(6U4CWJRELk_ z6bow0^4QX(%u?4PWza%@$j0c(!bC3g9!gc3NF%-G%%f}?)h9$uj5eT7o34?qY6Y>{ zRL|-Ojqplez`<$M2_LdLn?Pr)qryz9QPuG5KlSUE5!x3Jc({H9fEGUEbFn+_l1`nJ zFs$K!SJHY4BlV>8e!hi(;JLFPRRQXJ!VXlf@?}f54oa<<>Bfvh$qCYQ0N>rz-Kbe< zDq*B0%-C!%F2*9M=rpcWp*EIVDZx*tMVD_5T*(iORS@i(&_!RlIUROi#^|e_G7xGp z1bW<}6q?eI2=#v60!D14Qs4jftYp!PnZ4;q0zdSpzx@Of=9S0;8to4MG~jtJeR@1~ zClVIG?YboNQflnYSi?t5auRLitp)3kVgna|XiAA>hB+OrX_i=qx`h z;Nr!Ddu9F(cC0Si@ew*)n6hJ2wZQlB2EJBPbP~6D8yD|(LM-qR04_z#w6S;=nB^v= z47I~9_*Ns-Yn>heBO1?*y`1|L4pxmRMIHG~IS{LffcG5&S_Al>#u7XI72(07yy6h1XajamnW(Ue(no z9X+Su-mfo*60AqoU-wmC-vW&3BrsiFpbWTaS6n?AMsRjAOkrmZt^o48U3moM?V}Fu z6_*G?Pl6tW`#<~Zlu|(hq;M4Zmk}8*9;;aR0kUGZ;5y}MIs3c2i$Hg9h<6UGAZiX2 z1O4)19%_Q3U*5(S>5;5qvre9EWkpF%Mp?h<+l1}cE>RTAZ68iiqXV-_5sRp`CbIec z^BPm6jeyY;kkSPyQd>Mwd`?j;`y7z_r43Wmz;N!Ej(*m-bFA%V{N9LHKMXo_&5s|+rJ5< z7WdQ$=#$@PzJE?-OC^pSr=EJ!lvXjyw!47K4&0J3jPe)XkDmmUV)I;Y))(H{_Zu^- zTD7Ax%#yYy)#?*4C2!M3&rMfrXRD3GSyJmzub8N}#B3hh@eM6o=kfk`I&=TFHLQ!~ z24fxiDlj|Xj45*^-!iTj;l%;;H{@^I2&bH<-(!m z=sSi;J>ES)u4ER0NYNLl8|6Af>>8}bOTW$G!PoIgi5cPA(L1#=oRQ*fxOIzeOuX8C zYCJdV#vG}RINYijNe1YF;)0JMi`enF@%XpoKpWlmV_p8n{hGOMtLeCck0uppl_r?e ztJl7wV>K2aU4fgUPVUv(m_B}r!bD~bDtwsllj%Je=7`Y{96hk5LBT`Jk6A=?*8$n__eYt>u#Hro2kaeC()N$G3K-{ z0|gmu%hI`kTS>a%a?G-m)A?Gj^GqT$`BOkn*FW@JJXH`pmlTPuE6sHax(GR%_(cWBhKHkzHG+*PFc0~A*W6@iQ%ZyxwRIrX`nrE0J|#QYPw3c z2uUSQXjL+UA2|}h{RXf2EKI{a;IsubogbftcaEE6+H7=Zn6M!g9&Dxyp$G#D91qmb zjwHPmbI?mLWTfQ)fVUq)#g~a*D2N5hq6z3KuC;VMg+BKg^MooYUs&IjxZf*9q5Z1y zd}5hhAom(6g=-_f@UyqDNOB4&nTT3#1&_wi9e8d$876tJplT+ui^M9fF~DbKolVRz zU=kmr=c5yz+Q-BX_xr5}I7n7xB}GsLky?L>NS*;M4N=QZBxD!;Lx3)o@Qa27%(P2i z%JZ*$&K$Q1`rptkM2v_hlZ}xlT3*{ZS-i*e5HQYA$Gb*DTZQ@^E+mzSplVSt`^1@K zP3g_))Eq&Qg?14stKmQVq6tkyk)5pksP||^<7RR%NY5_TNX$y=H)NjSrm|G{Tzbsi z`c*A{zDm1F&HjAF|qSYFwP==iBBv3{qBW+SQQS zCBM1+eL-MCLh_n0=4Q1i!kL!-fxKRx0iy*le8)^kfc=`QW7~J4?BEvH>!2|NZE{5LMO>*rssIsXv3P;((ja zz^F=}asD0M^B-R*&WeyV+0gs)Aqx|UqvhCzzd#!+hqJ(I)*CHl&mjE&!d?;mn`i2a z2vo5+^Thsf+sJ>yy{I6MN-!#B9FYDzJ_zt~zXip={d<$jf4tn^md^~h-VHMJk@^4e zUw=oC%RDrr;}yL@`_Iw;4eJZMNZ1URSMTAPpMNdjzkgIh1Kgt=BRZ9TVG{oP+a9)N3we_tcuvZDk;KY=ZXeG73bxd2Jr+ch*H19||zdW9%e zNrgPSBz^V)2jQQK5ho>%RunE94*jG{$5AVpE=DUBXwR~x1^vb=QjMWIpL2-<$nwHPig%BA|-ce&8LrQ3kxP|GAmcuFcrjn2~6^AnZuhUZy1@QnRzh~ zK-%obs&;#%+7I(?q?r0u4mUgFU?4te7w35XzaRfwg8z~N4g#V?Zx$Y|S)#v$1B+No z&%BhiiAgwCPvFI$WhUa;Qw0_7YayeLqxbZBY$+^q_yo_TvIeuHBweMs`g;Y(EcsY-)6!gb^)?QK7~ zI1DCisRy^4>r@K~W!y?;-f@n82sEN1LU|hWw;#!z4kZ*Z$G zsOoWzuR%JQBM=-2S6V>GHjifn0e z3?b15JoL#&lRr{xPP>yAd9#q#kI@rH7!B33QU|PvLMzP?pH6!R!x1TvJ;wDle{ou6 zqwhDmWb>H>&r$mRoWm5B=ZbQ(6DbeOouw`mwXYJhbA}Y<9UanJ~jBo!|);Xvzs>nRi>Lf%KBQ zx;k~DNVEb8kSY#SqozFp1rKfhvL7|!aowG^sSeAld(HK2OYb)k23EIkJ8DB})+O+J z3BS*wQ{pNRNLZMsZzn9upAK8g2mf90HfW!e=VEbQn`4pPgz~1B2<6k-TZX@jW`Ro$rp!+!^r8Elkj^Q)%%rL4nZZsLSkD*7h_yCvu`Z32q=i#=*oMAxZ zZ_1Z(3#gKs(pJf>rbrna1;n2yvm#&zYlNZ4w0-4^^1r_oxh7;ae9~uP8@6s-Ote> zD*VFW4upMg)s8L^kAD4T`*>JEo**HVe+~A^@>iMxE45f_@NJ$rd=72PfNh@1L1`lm zC;)YKsL&2D8%H8yj;@6!NC#)&c<8YQ-d$EkUzEv-oyuYb`(^ltnLVasf#NKqwiLY8 z^%d9uje0MPP<}^>F?2OEG@bI&vhwFme@jwX*dyr(%6YqN(z$J+tLWqK2wboaw-j-q z_IB%QvUGBIMgu-QY+vSAq>nUvGq$N1i` zMQXAOuSB#c6tQsS3Pi@Ekm~E{;!&(Ky3_l0^nTs@;#HTw_|b_F($D%OX+beU2e!7Y zqpkke?^2Jx2f?CFaGX~>sE%qp#iB5Kj}eYHQS|mG6^vKjujU1#ve_lycvcirQ$F`0 zU%cneVTH!VNaKvn2{AOQ;+gB*eOOyE@R;w4krLt`cMd!XDp?n4V_3Vyl522i&{ObI zRs%K_E{$qo4?vu{>DPJopY&fAMNi!izTTirzy*uF>Wvkwp)3fBh$`~-$o(p~ZW)%y z+WWz>#(?rps$bds3%r9*DL`7y@EctF3GYci%wMkml(b&>Q7V&g+6s%9Z&hSK+0zu3 zvJEXkHk)ZS3H|&5*BSn7)@hARbc*h!cM{*w`%P`BB6}u^;$~J|FO=IZA!|6vMe@Mi zZiiv1+w05I=Y*B*^Q3V#i*iFgScYsjQPKL`pw)NFHV9EQ^y-t`F79|lg%;uNo{0-O ze;rkAVj`BfkrD&x#Cc+dImnJf-KGQ|dS7T(vk;IHI&ZdLEU(rdW~C*B|8= zQSOd5h3J%eKC_|Qoy-e?hD@Q-OKw8yvpG18n?MZipYiP1S+bA3LqtL*LWHeG6`1Yi zJw>V7wy%TvP_^4N_U=AE;#17#aGXaLd0cj$21W-0f_%5{gR68WnAY}Bo06CDD#nT_g2~%N{p~jfcvk`lTMf80AVw-7I_l{7%R6SX(&pen2?qVA4HL>x84fa*$uiS0BlqB4fExSv! z8hJWuqsUFSv9KOK|6%-N)UmcQ0;b=!y|=P5&{PXLW=n{YjugTH8&^tao#Oddj=l7r zRM*Qxy1dtG=~-*l3TPEWA4bubgq~PpwZ!UQEuOeDgRM30s!v)|I%<1!><{FPT`W+* zi}Wn-FHp1fEn|=lmSPv}(MQ8G=XRd-i^-JLSnL)G@qz%?f^2!uOZl!a7(JLlRd{}u zpa0ce(RlT(Gt;Nz!JNkiTi&Eg*T>uzuK(Zy7_e8V^%*c+vr9)TJj92WKh|YA2tT1w z-aribE-bomy<8RR z5}P(~`XWfoU(1&rlOZ>L8;+*z^kgoAvCK+$0m{TGd38upb^}jbCr$&(72Zp_l@-?eZ`HfBaauhQ{ zjtKGzjAq_}a7h;kqrnO<70YJir;;-ooq5Yg@+s-eR1-;Ce578Uh=Gb3%|dojTyQ)m z>w@(KbIp-hVQ?mrAS9H7^y@^k;g=v*M0SSe%C!SVIs6tHDs%L{NtxOc#W*pWxr(ss z;;}x8GKR*g=Ji93ZyHYvDHB?bNgu8IzA8J1Dh9{e_uIplwpNdtHlA=_a zP!5_&E^auO&Q78CEz7Cgwm)(1<1}^@Rp)>>64Pi~nFyKB5N$-qF~|=LGjo&nG-dnt zSm5IEUV||oDYo*}N(4nG%<(3_D8L z84}8EhQ4tF$=9)whVp(9)OV`bXi>zTOdYnw>0$P%lD&7KZyuy5wYK3tlXC(B5c;%x zDor+n^D1P-9kU`ZyGQ%^F5>9+u6l6MjW~AqP}I8N50Ko~dvN=x*!rkou_a%&&#jZd zdv9&U$Bp25=KFb3&YSi&B1@x&S$pS_FLQi9ZfrOAd}UuPkP1R+&|NA<&_O022Wk~G zUKvx&!CVCQpb2QL*c493ntiQtO7#B9)IMpDESEPjG`Et0Cv4AC@jjVC*OwpPqXs2Z zR8~UKVX7Y?NGIM!5H#y7Xk#HuBSZ;#6J_?Q%*b{0>2Bw6pMU71yliK;7Z~S+g6}47 zepl`jj-OVw5wB3U@6g{#8JlO2!i(bi&=+;T9^We1)8tK!*A!TO>ruJ&EwO7-?~$h0 zwJYl*%5Aenf*Evg7N6X!812n9!#&}*07jM03XqQ^_|+!rv=3D#Cc@rhk5Z3 z+NR*xOcxz5d82wag|E4*qOY>J>r&B*eSY+5q zI~EdqEPh#4v#*3?tAA5E8Sk<%yoS0sbtt7HnJra%R#-qLW^MgZwRoyTXdUlW0Mj5Q z_;}O>%)9-Bv~jpU-MmQ|W^YQ^sOWO+0hv`G=KVPHqjFRVKErj|QI&R14vBq#9%I1M z)9aQIfQkHN3f;3Kxnmuv*#=3>@b>YUQ;s7QtY1Eiik<%@q zJm_ppSo_^Il`LHKRr~mXRpBanliJGIye0QrI0KWFbUBuKc*UEB(U`84OOCy055df< zr?~^=aMhlDQWNTZguyFqT0g_@Cmf9pXjc%Uef9=5@+}vwMg|mvmhS-~B#vzCxVnmI z(O<=S?DETI0_B+-CHT>2Tg(I*8eE#D&8)^gIhbrBOggw|Wx3tPS0 zu8D|1wmRnXf7bVPE!$c{A|n+n2@debml!-a4+JBDi=>B_Trk*kB?M@US8qGcg9y8) zToJa9n!|pr`=UiZ6+OgVqhu#z)w;^#-j=TCNvmoK52b_&492>n+|z;SsN`+)B4m;F zvM46;*dfeqG-)$YjhUK^$61ze%-#_pK*RRvK=6gUMGJ#N^i9UfbUEAshQ8d64>i)do3C{rdN@yB{oph*F!Ud!XtP=X~#;ZJ+i zWgL~$gnfQ8WnPmGGKTMt!FqQ`?VWAtprMnB$1H4#EKE_ps#OV+8ZNg_rQq7-A&($4 z;-}f*7fG2IY0qy~k~F(p@!pY(_0d**A~p2394$ahaD%fm_mxGA{z`4&f8+(vLin*n zH@J?wX8GbiPyFg4L~s<)`=v`tP2}=D77L<83^b`Yzc#!V(y}$dLL)ZL^V;@WGy#}>wU}VY)CtJ&Va-pGZ@YadfA@RG=*UEgO zak~|c9t9W~%~39<(Jr=2D=!=hgPMv;Bi8@&`}Z9pd;#xfth#SgtT$e$_J(&6=6LM6 zXcbmfej=FHW?*y-slP0`5W_nrUsmnV1C@hCbd>~aV0Th}ty4LBPi(9WnsP!(F(TBT zf}1PqXNm!({^M5i%y_k!?yO>1ONigJ650INx$9P=PT8oK_f}{RuThd`$`cT+7 zC7J=535Ogut~VaGCKhkx+A;*v|GJ`WpWt}B=={BVXnfV2YBS{>dj53?qcv+A`SeXmS40 zvf$2t>T$oOZt@!Z$SApQ^G0Kw;HcHX40&f8q8n~RET(6=!~I*+^>HpRHtl81!OlZ& z1k(xG5vZjnNI78{DjIvrSM7_giCnXWiWOynd=Ta-gXE~0*R;8rt=qbg;G$(yDIvP^ zd$Vl_-%hZ!AVOp#w%R-Srk8eEA<(1yOgBXMFVzh;U$IL{(01Iz&rQ%z*cZLec1c|F z9L;+>u(aO^9^;JZfy`)yTr4*+fJ;7NbWQ7yoh|W2HbvN&MmW}q)7y?C<`vTMM(@`7 zz8?Hh4_icE-#mKChEswIKKl9X9$$kX86~`vt>X}Jb@QyG?M&~+jm<9V`B#_A zrylpCXj<2oAIs%R)TtY0LM~%Lsa2pL!d=SIo4|!#my_BN$?5X1l zoBX_*f_W;a=$HK#yo~;tHwSmheDz3;I7og$bkHv4Bqrf3)_0j(BL1vpkOH4m^*ko( zq@A`s`!Dru7SiA4R(NOxuiwL4UQn32cUq3XyEo})&Z}_I8J@NrYH3`%uPLi;^LzeK zBIv@9)BHLF6|^&S94XF=Q2ks)ISs%5I(pj)p5PTJJei&%WM;g0-leOZ!WkZn4sgC+ zH&fGf9mcu*yatbE`6}jIRy08v$WJVW`S!|xe6R{4oVBz%JVk{qRYCXA_majtMZUk~ zXHTJ4T53?}0~a~hWF}VJNQ1~54RH8n3HrXulu2JM5rFW2oc4mr{Hdik#Z(0?KdkF} zk=Ce-RH}IJDPhBH!+IOC_=Xx1LHwG~a?KU%R8;^y}{P=pLsnW8!cEw&usGKMWit-j>Gj(wU zC=5~w-_8wCuaj8HZ^RGj_Op?ATH3+Tv!qQ<7nod}TOJVb(4g%b@BnCKBk3eme(z z+~h+$s`$vEY=r%tCX{JRT9)gi&PkcRSqvBUbeYm_o1Hxm)D~2cE)FF`r*C8!W_1Fh zV)|a^93-dtR%W||luHF=`Ky46-H0ItUi_TSoIa5aki$DmSp2(E+e25lxou@t=AAh? z=Qf{WpipNQ_wY$@b+#P;ZxMs^w#NaIwB-KLD>o3(`Cf#19!}5T;3ZdCjs+C^2%W zs`3ylVmLoFV7}gX$6Mj2j?JcA)JQKZUPRr*CMnG1&K`Km8HF!tV)JWErS4T?qj<~! z-!>`QHwMLtxmGn5E%k$rK7T8NTs-&B^Oi`kDA)DLpa=`+1-FW)V~Qwl1gIBbB4Ie* zc&!#p7nDx+%=sUrQ^nAAQ6)E_nR}be%M`{)%QPLwL4-m}_XaT+4Z#^@L+Vo_x;&zS z=3v2(p5WDiXTjHsBf>Pdr9QOA^RxPA?kAM=M@L?%N2JS|2+s4c3dMdy&9H6-(waH) zO-$k*ekPn*o*c$<+BWoyPqK72EG>4V< zo(8e^AUn)0_A54Jg176*l&cnDSoRWKTVNmQTDikavTxq+x^UOVR3*jI9z<6v$)-}# zMByWb{hdAE=6T$YPJ2)m3SOK#Qx11F0=b&lp7Uc>7N_in(+IUro9Q7Fh0SuSCi!Vu zBj;ap`P9bZ_e#_J(#uF)`t-u=;Ivo;yznD`lsTNY6|PR~Z`qI$>F>UQ1}!YsXM$3` z>bTbNIA0dhgo4KI5TYuz>t(Sw(Y#U6EpcT#$i@c<hL5cjhdAQXEoQLb+K3_kbqgdyGi0UE1z#(!LBMVbi;oBFkty>u5pP zE!JmCYz=ss_Hri+!R8hFj|*)BIIn+&jcRHMC9~(Yq@(E4W%hVqNoGa=iuNfVHYSc6 z?l;2dkYIN@GLXA_MuK;87}kT^O2PT-BDIlu(y`&UXxa!=)0pj~2Su2A#ZReDf)S@N zg^dLXX04vRb@R$#k_Fc$LBH*DYYz^(-snI-doM5d)p=5jwutm6Q|smJYZgB+?N~wtf;|fzB?H*#-MlCRz+Lu0-y}Nzof4bOy zQ^@T8&CF*uhcj1hk}qoBZ*%yxBG=*WP{T`velV4K(N=G2;o7U70b_mIbckO@OX=5v zTe(q%_sKl3hr9O19(Sz2`m2W+~PmsHEUw3!)wmi*uf^qF+o8>4xk1x z8R^}MgqelKV+6{_U}C{=4ODk@ z08r^Idzef(v?g-vD~aASvorxdJqNCZDgZZHIx)}r<9WGDKS{3^TE9%SCNwJ@(TM{E zxZ>4@Dj+VX)$^A~+wirNFs1dE=yR!6U#_#T`pFStlE30tand;OEX2MQ#oA(m$^iFA zME`9j&?duT=;{GlfW~0LFtJI0e>&4#zMU z&Kk#Z`~AhG)lMShC}$P^@#mkbaG5;{ZIi;_xIS2&76=nY|{E=JIN#8g`hI5 zT)O3uFX|kHKZ4e{%)T{B<9c{e2PT+GVrN*_n`!YZh-rCji~{*>e{y!Gc#^0?F!85{c!nd513EfjdB3Md!Yd;FUXjBRAa9Y#nw#Yct zXxhaNCyb^dacdzwQNC+z(R_Hj--%22{3r8q<#jj23Cl<``A4Z2I9w(M3FGc%_Epl% zNQCp3xEaMga+tLK*cGDH?o%Jor8IhCFpX`;_-a zW=?;tn4TSp#i&_NUaw}P0Dw!b>R2&g@vG3)cQXOmHiFWgrJ>8*Q9cnhMY7H1X*?}Y zn>t`E9f!Ul;NjNqm;Naz4#zk%-i9PU80Ft_KS%?9DPcI#CSe2q3-+4|md4guNR9cb z6x|}X3(?J&i0sj{>nV6#CRBfmXtq;i^5^P z4whgqINsY&7|7YI;z}E~&R#<l-oGAuj{esiw37c@FtTJ_u6m~HchZ#grW3#mE%U$+)SzLs}3+DO<>LqW|wq%&i zIs^>+rE=ZebZpdx?51k=6b_-#jVv*cA3IflR<#PXUg%ya2KFl%u~u|JQ=RnRg!_Ji z(}{7BjVoF-a?FAplGtFVi%+A`5JrD@R)Hgr})_bqNo>s46h7jT{8 z(OxWBwL)HIuF0*Os!??t#vy_0(fy5f3nT2r$wsrq&N|_*r3+|mnIeU>^~8_FDt7ko z)-%y)RojOu_Jz9M8bzr@5a}b8-G}fwB1&L@Xs)1+{3kc8hvKnk2@3Rw@u!g8(L={$ zQ$2en*ohX8qjZ;z`LV?L?%L+BKh&0Z@vz0BPQSGZ7|N5$&X`NX+#x%~bTw=xlBDoA zxV2OujsiBeB`w&kDGE+1~uiV^<*}8 zjK<+MR+}W5>|A4@Dq9&lh^0zwbugA zYz+TQWwaUjW7e6p2BwmBN<0nzIdHSaT=O?-bB@^TrI_PpcgfCoy~TR=Q~0V7JObXA znO!~(zW%7ody=x=!M>i(=Y0bw$2*{Q*4R2xWw>k-p0-}rM7sAX`m9}a2Rlz-u2IeR zn)pa_%ZU}>rxO2SY2BtzMO%-v^Q0UB?P|Bxu7dhoL6m_!t=i8w$O5@c0rZ?}1i*Xp z;!*o#VDQjN`i$mu8D0NNtB)9TDNjFv=BzG$KXf=1R4&TTCrXU!ZliJ{LnKHOUm%03 z{`5|1^cZ@f8XJw8b`e?@`xZ^mjmXIo{QX^g;4O<4pMW8Bih1Fx1coV5VHvtC4qnrZ zyfgnScTT7L^yxDbFQ&`^=L!5ejRH+yXVNMr6co))9tufsN|}^0rk}Zhr+#3T7$_A+ z2Wh%!`TpfaWuNJ-+?K5P*2{r^* z1t;2tQ?FEq2K)`S4;?5$wz51zH+Hhfz=D_DrkY<@wKjs_=1Ws!=Vt^|HyGa%^cdRN zeQ)p8(ls#jPnQHBDGK_zqAu!%lCJ$%(yC>yt{DuKDJ7P1az9RdlC$M8HoZ?D0siX7 zvo28Y`qDu`@g8)2vaph8Si&?0N0n}!G)3?BvMkYl@w?;){f)=dDO3`AZ%9^s#H|?b zTgkPVSn9IhFfTt0r2!b^3oS;4)e&WFMp9A+eHdGYRnDvEVl>6P=@S)kJXOrj>?~9* zOdm%;m(u6?(;G2nUKDvJ60LVD&H+(%Gd`BolNU%uuw81TS7^wg6%5|HO^@%>e50Ww zA(QyVOoUIbW8^Rb7^|tSWZ}CpL<+1@8YH4yp@{b0b}OCw>=FI~TCDZNgHMkvC?+lpvmdiZq-(ERdlJ2WXc&EL@YSg z9y=5>?jG(mhKj7aBh#`>Mxz-}WY|ZU;l}4J$zPN6xa2LM)!qpRa}iiPXZ`utNt=$p zx`BgB%x9n0)!Sn3=wytnSTtfER z2g0G2zS9EY?JHw)sk-+(O%FUO`hRgpeURc9^eKg3-%-dPb}w(!%7TL`txQpBMn8FD z4A35;!yJkXnCl!NIXT^8MB|xrfLs<&;~KRNt;^Er74xB^^yb|k*8~P#9Vjg!u@cJpYCX#SCGds>lA_^tX8l%H*J0D2P)@1SYD2_Qt9<~oTn26bBNq-lV| zY1mB~W$CJ9Q@Sr0HkdN*D*p2iezxv#nGVRPTkw zGS2Rw%KrteTAq1#HFg-?I^crjwGYRBdkxO$klRJ>t+o5_l3}5t z`Xre{pkvs zClojnd9inm9kBRn0(DMc@E>^o=3dnA9B#24q;0t%%bqIR$VspoR#rvg=k&A9tWn95 z*q9a!o2)(snXR9Sxv0FmU1Sr?E}vsv16C6Kw^*Y6k61#gdRARQomC^QE>*j!k{5aS z)3?Tw-lcwMkOm`984evx9_Ul{ikb9DzyQpY1P*Mr-?Ojk+_WK;TJ~{6^S=tT;?cEG zJ1?72@xMkh7_D4Smgbq`CcNbL8A1ESO|GD!a9Dkyy3ealgc?oeAJ3W) z&=BOy=FG`8nU}==TQ(7?+$%c?MGf=L1oJ)hxFYuv*!rp2dGXH4H|@*4W906r+ zOK<=R3Ap_iw6j(U3GKSKkjJGmHkr?7naKDU8bIpYl7`fVyHTOZ-OW8S%eJhzd<^G+ zoYW(6hL*mf=4=FX=cBCyo$mqJ@B;68PDVeQ(#_x#M=*)#I{o({4kHyX5Q#2=t zzl_&kF<875`#iT0)7DbsW4{}83Y0-}ac67}I@AhJZ zD>sCGfE^P>T}hSw6f*?n*BDKY0#KHjx-%Kl6FsDR3SUpH?I7<@2%rLj(bAk*-)b@u z2DnoCUttL=wpbY@e}=w9=o#a4sEi^tSd)^#>%fF1q@z?Ly-Edj{i8=D>?ybVA*o$) z1Asth8jC+WOacqV>L!FDIHg1|ciB z|O{ zzeL{HJ&7`cmExa{TF z1)L>DB;Sng&!CT;a}80&$q&QD@u#TnsOcJE!7;@2u98?o zLslKP$csCL9N6o12I5IJ)G;@On@jj*^BLVV?iy##`p;Pt)(9j=tY0d`j=k)0{ZuQ= zl$;~dUhYUU6jc8FD&kHZcmLzAK)094@^$x!NIHkSf^4aG#!@DP!35<|=zQTNYQY$!K-cw2u#w42oai^!eVZ zb={wS8!>3;O{}|4xoqAwbMyr@e7`rLXW5No4OVje zM(qB`zoJNlLOhnuziUKH3BcpA;Ta)l$%$NSu$ zjaLqy=s6Sd{sUZ)(9X z1ovu}epznWX_*G@hqLAta;3%j=EZhwgzy5!C8xoF@KY4%%ig>vAKAVh2T`zx%{WEAy&F~bOac$oeR&Xh>`MweKeCC z0CcAIdbGgxsDi~Jnf%OMJE!QyC#q!g^QcV{B}2`MSF> zX8Ul?jy}?z0d4Ni#p=gBqFsHgr0Zxa8^hGQ0%9E(67$Dqg5@ye`y}eU>)q+y!=p}! zIV=5^r5xP0@S};cQOz>urcnzY{mo|SRb6EJ?ZP*Z*)yKUk7tPw4{tl*%kWq1daVlf zUiG*B5$0?$#CTpBH?;EK=xA@K4DhfEh!02j(Z3^uNRBx}dai7j`7wyS%bUCyxrbh3 zLTOfYvpyKOqy6ylKCxnl4{%MCED|fPz6Q<6O^>XC<}E@~D4A5dKZlsvMC}!!YHt6b z2m^@cayf)Xf%sqtsg9Xj_b~+l!~}~Kjv6Z03CG(!m63Q&=#W`y#%jd$dR7a3MMi`9 z$gby=kC!Ha{yPp1WPtGge}5=1_NwiaEY{AS*-xbjv@I8&$!0&jNAs0jJOn!zi*E7{ z&6dF-N8pNo#K5mfAX86YlVh|^0ssMlv@k(k|E`C?G zuLt7oA>^XAZSD8D>BVxvD?^H}qL>x?(&MFrObfoLOaZsiBoZ{4VSm5%(hFh9*~+7j z%?XonR<43?`tNmQe(sA?^krebyPWa&Z}*pl6=%^$1CPX*O9(L#5b6)UsT@0#w;(kn zY;0Ww$nfwP48|@m*l6?p`GYYH<=i8c)SZn^9MLuxrPWGfB6S3H6oq#Ru%sl8zQ#A+eo(0p+HdX zKQij<c04D+ws5Pu~`X{-P4+Cc;4|@T8w(A z#am&a{>!C>PmM726u1+6_}6;L`h!joyqYy1%qy8u6#FP6d~BGNLVmK|lpC!E3W5Vo z04*O}+di25xq2ah0mUm)Y_(Eu3rZ()3F}!f-MKcVLP{wL6uj4n3fLE|+*-fP|9oB& zFft{n?4w@gdR&bnBK)d=+-Hg^wz?ru@-F)I^Q^@28@$h@Q7xse2d#CJNxr%nPOyfE<73Xd1@8V2%~Q0O zbePC)2Z@Ov?8nB}&X!r@XzuZbF^pU|So+>iqD=z2)8_hZ|xn z=jKv3$URH$n&+|lm#`-Y0sn;EhS{t}y5T>wqraY2_zmen8UHD=|CINx=c0IL$?39# z3*2ZD_`|fauu-a5N&p21ArskW^ruI{^|VF&Pa1Bx31sj;&;*}a1w%_%nOki=V*@)LN% zfVFt2Ih{dtLbjMm4ohzK`gwZ_(Z{{IVuHtDIfC`SvCL15RQ}oG40IjId1B@78e;8a z29RMfk{`3{1tuJN>l8Bywd1`&--hYlh7B~s;c!o^j)j5S6%*|y_(d4&cGK(JxXU6X zN5Y_@IM*Na&4!Ko7a2cQmm-Knti!w#HxCs5)T??H@^Lev4z&vYqFJnNYe;1HWp<&c znWgJ@R9hg9DiSk}wE(hL=-d}+lSC(42L6|%TnXqKI%Cx-BctbyZ!J;v^YT1y)W&a^ zdvR}im(xUd>eswxKIV%(Eo<9hk4KNftrhjHWa^vF-Uv8aWvf&FC2DMyyj;Hv89i4c z-Ys<_ad(jbo;&qoSzp~G50Q<) z({>hgF`)G$%GRx#eBc_|xdBaR zor#xv>cihRA(@ddgUl^@HuRaIS6uFJ4FuFeB^t$7`6jEbsLU~{csL0M;9*zPG^O+e zwtTnReIFi@;DOjcU2BDihIp2O7WI&PaNF~M(2g=cNV5ZvEv6eLCEM*7dgUB@ZeGTz zf`eGSiarJ2Cz2Ymr}smENSvo#NJ@}wT@cfW>9u$Yxh#hYdc}!oz3$V zary1@xp2~`?w2}&(e+lHx<%BqLt<&|hzJ%isRUTVa~}7lB}oJ=>e=@)WAE&2siCIV zs7dYWtU;)+S+>%(ek6m5`uw4DE~zSx`hsqzw1*ELR zM3oIOJT9&l>nbkCopy72GviCC_tdq6>uF)e-vkfKe6s@@Q10Q+bA>LzEz%%mHXjVL zv^{$6svOGCrQGBJ1|}R4-l$fgv@jC*;R71wCp3QOr=|uRQB}#_g(!N;(tcop&E*0# zX_+6u(6;C1Vc;s&l7I4rz!cKLZVEm;F(q6zXTKO(w}*&Rl?-m?t60b~|ICKzrZ(Q- z=9gbJ=o?2rTs=3q4B|HBoGSdGaI34>0AB&FGp?ejy;FFrA{b10%+=SsPnEDp?am7% zw*8cr?qzanY|EMB>c~Af%}83D2_Ndh$v8yoaV6p=Ti(pWwBKb5T9lcd0H_*4vTG}} z-f9HK#xKau@l$fXt1g)6(68={4#Xh{6&tEsgj=5)F_f8^)}mgViV%tzw(;fPqsv?W z$P4hHA?STzFHUG0*`{vLiL@0cD=K`}>y;)`_;T>?5ZC2p4lDO0Z3;ImXMI(FB{K^txY<>mg%roI$wbZ3L)9Tu8G{1EZTO_y%?O6@f zIfQpjjV)tc-0g*m;{&dJ2aue)M-I! z!3Isq*{s}HoiN9Mb9I;u_2OD|RmlLTfO$rewjvC-;CLq z#Cxeet|A235qrHx@+cA&O^Z0CK3BTbxYc5$)6E1*3?fAemQg7pB2Ypt=A`$RHSGx@%-`$rt6_NkGC1y(W_mF5BlaW2_K)kb_m)v zdlX5N#|%5owF7n>O4@@7m$V0F`&mximnpDFqxE~V{@%C=GZaXaa%P~gVQ1EtG_Hh6 zYn#;$U`%``au6j@6qJtArxA@b|Ftd~{5hJwq-r|!x?EjKK^k?~S?%&#Afq#@@i>a# zv)Xl+hYvc~8$wgF^9n=SIz&;%VQ$jb+aA4dvQHNU&}V6{r^+fQ-gmnzid$=KW20+- z%#31P<+|h^l7t*pB?mvN>t;N=(x-0q!GwjRWft$kLfW^5o+eDwc0MVCbK0pq{>sm% z){nY0Qnr&Z|Dx_S%Y`bIMFE)a3jg1h)&QR65-i|yjF`-y9q8oC^1{tf2;)Xz#*w8uT3hhoA2 zd2S1>KHhOHaAy6X00GB+j7?A7Crkt| z)oo7J&N3wze(l-i;iGaWtQx|JGbZ`MiP^{4B4ieKjN8u|lhxq~^*D=?+Dt0k{K~RW z&WbLgDycm}BOBgL;256_tZ&TtSP? zxa7o1>nos4Mx%Vn^H=o-Q&;ORFZW`dK4dmXqB9UqDe-d{Z1c(Q6#QwiC|uy_+V$Dc z6lPCKIIFtVAtkMIC*Wwmg;5!M#AWB#ifx)Tc)hE%Al;eMcO1#fa!K&cDP$bq@xn;8 zv8#diulf5`qy*o(r$Lno>L1(8w?HR2Th*$tp{+F$IX}Tfk(J%R?6P~|T-$B^oD%3r z@Q;ua-Cn!@waoF`BaB}h(;7ocFeu&QJ$aNk!SRY=q%hw zja?%h+b11LFlxq#$D`4t07CQh-x`t=%2S6An4Ljd+O5+P`nNKdbu7P{%pPx2O2kju z`I&Q!6pze3+8TOj3a3Vz%ny&qfLmBJ9(ec26eKq4n%<5Ook&C*Gn{=6uP7HOqFQ6U zk`7(BwSH!mReb?k%t;@UFohDp-v6r(Lt>suscW%bad#vh8mK14Z<9K^flY z)Lai7n5TS0Lp~0rer+ZFT*`RlX$K7~z?()Nb zpJh4NUf#%x9Wbz44}vg2MBLsR6!uqQbXbg~=AN9?^8F>-ya%2v&vGp5so(gC_;UU1 zcdB`VgYLL1=-Pfx;4g$k*f9JgAq5plykNN06&7=9)kQ?g>4(o}*FYdm;5g`~H`S=O zWJpM8cYi&2$f`t(Yx`q26fd3vcX#8_WI0OMc1o`Ttj zc3&v9vt87N^q;ua&TZmsESqzaY>o_|znF>@$SlLXT+J0B5)yOFTV`yNao#xr%~(Z* z;e?05H>=8};N+*eehgqi!`}osprNr8#?yppre7 z&o_OYpzE)QlxO!AwX(8`qomQq?AxOm@KDq7byW}T-8CG$vs;DR3sn&JjgPLUylWx| z-8Yt^BQ9h?p>Px8A0)R$?*ltjeqoc$xyUI@knO4~PN6(UIWZRo3?$X(3pTI0U3NmMF4w@-sMA~zv@aqk4Luk~Y-qg{_Q~fO zj>AO9_9dMMbl=dQN3{BR%-(fg$I2e1^DV3oQm-0>Vwp7R_}IN)YsR>Cxa1G7qrMLK zL7s@{FDbU?xx{y}U)XnreH=@J)9`=6g*_y)3if0GNCN?i{R~*{ z`6@`UYZfymv@foC1`FVbfEF!JweaFXAf)b|u{*|_YmpPx`+$d(C`hx}fz6vM%Yl%m zMk9SKi1P16-Au2wK^>QX?~Nu{Zqs?CO)E`{rk#CCVG`C5jn#rb%$WopPI8@&i(I?U zF33c-=|IWd()6Od z*^Ts>pHTR&!5g;9eQhPhtExBFl3-J1*0>d&Xol`sW(R02VFQ8&=t( z*%ELogEneW6U(x0lc_XDoAQkwDK6Y#2C86N1$7}hl{U4Di5)KWBRu1H_YSxN+OXQb z`04x?0vf1Y-iSBHxYD0T0j8KR<9J@(hp903wR3|w`Is>c>H!J(!I)Urcqm`i;8IoE zIZdB}a}D6B>!vgIESx=!i?L5fDj+oy-mf7e6#pJvN568SQF zZZ{}V`u#0}y|#Qn>tVS1^%EsS)lAjuYIUDjE!8@@weu@6DNtrOUQVI^HJ#_Ru=9CH zG@24(L1t7c2UVTrhX_FZ3^_(Xrbb7j1hzM|R)ag&{Uo;MHiIL0(gna%evkw1a@k&? z?pj4M`BfC9DJpsm=x+A2hZF+yEJ%_u%6*>WohYqFneAmComIbH&J!&|RX{ z;sR0?^HcRonrkZEarLq1fwSM<<;Ow!=a!>k0u#3f1m5^EsvJ@f7kE!r;OV(`6|nQCt6Zf;xE0f;l_FB2|D8l#-`u-~#wlDePj8vgzK z(*DQu>p*G0nfToW26Bk)vYp+m1EKi}KhHzZWEaBW(p12u4t!;NB79ck!CsNQYH7*? zcCd~|9OEE3+zjkh-$o6aFFJ1Aq6D7h8G6(A-F%@K0<_W47#O0ej^zZRjA|G74tJg> z1q_e>WXM5YTTuVzN8;F`3Hu;;eL_aQ0uHmCORvfyZw2AP~VjZgvhcILUAK?{{zZ-QuP4=?i83t@kM&LDHS zt4{eZwnMqML^^BAW-B}E4bIA1UPl`vr9)$pb5y4Ak^%JRsG;Kp7>zMAVsz7Kf@otM zS&ZVbNHLmr=N@(7o%&NaSVKj)%3M`(xFq)*-q@npsB=-+X_a~tcDojeFtUx?Vw(_9 z@)g^ca2elJ$r75^XlKnwX)ZUwhiug0AMc^zAH|M`h~>Sg{*i8kI)l+HetS>$)+bGl zH`W{+G)B0aT`6~#q8MIpd66FP_!P;`9Z;xAN~dRQwLYwv(y5eZO6DQ?`a=qc)6oIG z^_%8fWb*6_YpA??$XL)u&1#4UNxXwVKFZUNaq{Z-8HcsV+Ml|7%HSL3cL}=6e&f}r zgRFu*kTQC25zWup)+)P^7mCW>5!*0hkZya3YlomVtEKjC)*3Z<1NkgN*b2gTAP_U2P{%ckpIaPS&yh*Jp;&M5I@z`o)0h2{d7V|~*Y7O<%llW; zLXn&Gj&i;2gE+JW`D5oD{8_?Tf((3+OB5jx|A0o5Ccb`~k+1xCb3kYMt&yRTNu&0Z z#`7xl@cl(him)Gh;}i?rlH3QiqP3bPq1}=5sy0ZSlBsb?yDlLg7(%T*8dF{B{K0t5 zXS#umH{l?Hve(Dk#U=1l{&`DQmNo3Xdct!lKz^IzQZq*b2?*PR2phNZY1@nK+x2VZ zu)YvFc+Zsj6vSRdeNP&H|0%iv%R%WMOFaW#g@LZ|N2831M~xR9e0pSop2svH4?>v; za>(MvCVzL4hfe|*dP>sQXS^~%+FC3l|E;*BV**e)b6yvl}?(mUJa+3RNeStx?_`6 za4k^4hiIwSl*>~=Ea?~Qferf*%k9+GIjZBB+DT+x{UfW0&5@0LM8FzctFXXJFTcc# zUJCYUImEMScl$F~jE>)^$0C(-xlWAdq}OR$a@%TGu~ayPv(mbSc7`@X4uzI!u!x)+ z+`bD}Ssv|_I^xvqa9kcx#H(R^yp2;jOJp=n3c*G33V2SQFU#1_9|1DZh59`o{oa3K z2&<467-7y7;6=4-RlnD)?@^ptthdpRb6;8_+F#|lCPg;G;mB1;3?1RHDBzjc#(Fs$ zJtD2|#5GNwPmNFYI$Ukl?Jb8Ncf##*LRw$vEY5Zon@6ImYRDzpl+(7LBhka?L8OfW z`^mPBp5CU*XCPqe{v&h)QM7M))aDV3)?*XfcdLVONqL#x!{Qq)BbPC2zzQLz3l?|& zkQ>&2S{@$ut^$xKhg`zFL(5X*8w-=_Yoejk%>m2}40k3ZaL2Bs~~m zZTN@L=uaDKo6!)Z@ z4(i1pW{PPYnS@Jx^)bOC#>>Qm0VD(2%9|&qbhRbNoT%i8k#XrEsN2{~HimSgM>q&U zPgfSm&Z9EB-fP<~#-}1wTm;--X(o4oM8x$0?J}5OktCH8k(UfkKjhNa2i>c=X#2`G z{s7urkR3juNlKp+A1fzkJpFUBUuZZ`)q!qu zHdAxF*-SgRbyRhSQ`H^V( zid|p$$wA@-qS?L7aI@~^WoOWaqxG(l(tdWr?>c*(?7{XF*j+ffWkDJlt?sJi{U?#HhTS;FW1LB&Psn;d9KNnNCt$ae zIL*3|F8kqSuPIk4I-PIPXukj+$(^}bmfNpuiWSFbp6wYjDHX(dfPNj~jP(fc9sDs! zyXPPxG;C-e8@qiaZZ_G&=(V}|zIIPZ0cIwl9-~=<`Yn;LWMB^bAfd$^>3uC|0&9up zjtRBHhT&1yh(Xi{GM-i^2vTXqs=t}@xX;i&kk2#=E%d5l9v=mLu;3Qlwe6WzL>P(M zzimh!kMVQU*$BB-8wg4=bvBG-bkY31sms0cz6sLBC1!mRT$g zyoyaY#465mdE;S)${(wD_opnE*Uj&Ko5?A*kgd&PJ(+KK9P2hoE=E?x6qH@Xm`XV! ztwt(-r`I!K`)i+|b!s>U+g=)pL5}I$pWhS+%CT2_A066`ZKQv&CGkFIroC9TS&Ma( z3|bp_5?R}VWf5<&2gP*Zbj%8uyWDTN4{^wpnB(qm;HCQm3bKh>ZU%~|3(mGQ1=R|1 zyaff?Yg?}arafX$w_EM9tQ|P5CRSvYZ&rL@@v?DeZWYrqxmQjL+L=uvZDk1uskG^T zL^KrkpU>$Bv3Rgz*5y)OBu#WxNe2Oj`Jc+fzP$~>KmT5-cAbnG zYJ5gg{9AbehKqizYrt`8o{UWJevoHkY)b1hEYxDZ7^coF!}3;Y-z?38C~@?p(AdUJ z+O_ZFr*+w9Ns|w}y?pt+=07vFSm?iE{Ar+uTdR$J$Fma{;&tfb{aLUhxXKu5ACa+SJuA+57fZu+ZI4B@?z@#H z+i0;HALlW?sZ$d>ZlM%-)w6MljRZ2y7V;pFIe-oZK<|JS)3ql~8x_RK#)^B9Hy&2E z^D(qs>GzRPNT1}G0GU6UK4C@mI;$$wvN~Tzo=gRMk)jntu8$cpwUw3KTMaqkgq#ya z5k?=BhehFxWotR*ma=Yd#QuSnR?jvcVAkEQk(6p zFM{qkS+oBp)$h5_VOCiUl}!X@u+AqZoL##Crnjt^ba>gvr9Jp0E+DkfI~L_mWS(A8 z4ArkKyp&~Y83ofJQGJ-WuOt&Cf!{r;Q7Ue(1<*C{70?*6*AADf$`I5VOHu0|WN1ubiOWWQ`;MP)00k=B1=RO`g#LpoS2 znH&UHqwBD{%q$KM@cx={+Y}6#W|YCBbJ|33-7D&@|BN7=c|ic1^Lt9`kxZk%j|8lM zR;FX0W_58&(GR#6GS|&!?qf1%^^%yf7on;#xl(ez)qKYWwZdlJ-*`lzN`JoR)Lo0a zhOp-M?@0f|-|OAu_p>4 z21;Prx9n0gPW#)Yx+JJ*KGVN3Rc`u?c!Gmu!q}?ICPbWpqzEjmkWeVXubdZ+;UyW< zlP13y1((Na@K=9nDHw9FJ>26QGq};!OJQ;l;6ZtZyr};ZF;pn7`S(?c6Bkrc>6=4e z^We^&?P`lbqiR+6J;x<7L1&SyFBi?8hVv?VJ?fXO{=NL}5fAIsH}qEqLf5<2Lp>Zg z_JtsmWXLtT$Xb+4^E{Ue8FTFAMcW!@>zAM9>e_g-@mwEo8((#Dl%6|X!l+xToNjak z8Rn!Rv%;>2nG8X;;oQZe{?8h!a}4K1LLs!I@>&@7$-HD(VCJ1|On&%uMD^@B(dsfO z{9!0V{8p(J(M0>ry7x^mk6&=ZF0i^gW9O(8NK!8?MX%bjL~EOxLR17Ok+ZCkoWDl z)SkPsiT}l8-1l^w(JYdh1b?eyX(Fam0#cQNIG44%B>W9-cW}9d6QQ5YTi{RgCAlq#D0;sL}VG0Sm+lvJiwj@byE?pL!S$1}|C!8;G)g3kXS z5nIn82u%9=5EMGhJoARbbEl#?;iB21D0oB};H$rGrqilg)hDvUIw+-(%0@XL!A{R2 z!)GKn*AnJUI7}90p>QizoiCilff31!8wCiJiCj$CT6ps;4lfU_7hyQNqk}d|seubt zgaIoxj{Amlw^;DhylA;DwdBjXT4AG`2baT$14mR@RHcsGb{4>_qJO%zhzIYQF^%8$ zFwx)Yb%WTF9Dts^3jLIxJ_5}f!qz??oy8BK4Hei_fXH)tBsL#clsO(3>uk5H9f~aj z`CJ--WuNX$Bhth!1|x_%CwRbKs&*;mS;`*7f@r!eS--L_Lg@dV{RGx|K!!JQ(y&v$6b+7+-)kI~*Mqmup?a2~ilpajg2t zO;emlg=!xC;*3&uQgi9V zFFj`~s!ppwd(6P3z>?|&RmUYQy?EFNozq3nt6AX&LQ@SR{YJG$eXf=hZg620qE*4@ zWv*ibK0P9RaFEARV;L`x#?k4+V&>rJLUm+k>Q&3nuna0FItJ_dMaR#WlXnY3B;O-M z9fg*CLyjKbIiAnUjCH zK-M>@Y%)(>IQBMfIZp3y{V^>a50V+4d2(TUNy_xh)iP&U10pR3+^C=Y=(K4juuC6V z*Elfq+lIc|c~0QvzfINun;ZAvll=whY5Q;Ac}dKC)?n>HEDu^tC0kims7u@livMrJ z|7Go-GTQm6yHMV3PX6!5r-b;VjtG-4|1U<_|Cl-dZ-=k%aHotcKtA%$mHvM|c<`~0 zY6J=x)PM64QH7%C_99V3!oU5WkN>~D*Z<%C*KcdNKR5M@O45A9Sk33PkClfg|67p< zkBtmGD_HrS%1AxfX4>YVF@!h}RT7S}86v7^Hf}z_{o&T#3p0X(n4LxBVAVPDQFb)av^JB)a0 zEcV;keOYDgwM2uDJUoaVrYYO1gYq(4S<+K(XsLYLI$OU>o_JnH^^U%Ny`z=NySr=f00RVqyUROy_SyUFI?q0D)%WlF z@m5Vub^dRw?|7@L-a0`C_x$F}oqTxwz30SIY`ukMT5=Y#gs`&8+ZW_B;sr zjd3VS%w0CDpj^xEvMyitWu78i!RiN9St`fxV}UZEi1(1EJi*&U_@Iw{2TpwMuNIi1 zA8%h*-9*5kP(1*~4|Q=KD5lMsf#OGfmUJhDi@&$j*XB~3RS#O*b~3yk2E7FN>`o%x zr+qwDEO~&t%Bra=nN-mKxmOsQOu~ZYfPMZa#VBjp<2c zGxJF`Sn;j5RWs;}-jHI8L}b`X&nXCUwAon^7O#%wOf)zJ2UiiL2y@K^_703gPJpOt zf=8#PrP?Cf!8!U1XP2L|i&72;$aqHkNi=P)t1J&NbuGloHe6%8tcp7wt^e{h;XwDOH8+sjQT3)y;J2sK5>fV(aUQR zTh*1AE5NFFG8IV}(EQQr2UP90VsAL~v4_H6rN|+t!nIYeV`2H6%oq(7*BSh;Eg;9j z4GWOGe)U7=FROSEuBvRtY?&~@F_7#f#2Mt-z>f}6>xY~%i`kquwWdchBXV-mX!N4C z@hfM%&r+0(fGDaJrIeG!AfACosV$zPKWER;Cc6b|Ul(ABsoXV2F*yB<;O3sY_zMAq zLol2hJAoC1)rju*>YU$1a62p4^q!%s1Mn)#S@ejpt=rGs{a^KPB=6pfsA}p(sQm=0 zvZ>Bn>Ri7TQY`dhKgIbTfBy}#tJd{Jes#k_Iz50n|9;2 z%umJkL1z_zy~iOs*6mtm>sglIbm!8-pJ?B~qP$~( zXL&raqT?DmdLY4#Yo6&}`@JQhJc`6mXSa8IXRke~UyyfLbYV)tsgog_**@U5OfFQ9 zg#{juR$JJ;eMkGe)iL6eLS<%)M5l>z4xS?h z6M|gQ+XA_Re$C~i;}m@BEjX3UW|n)J<@IZv&1@1g)$eUg?^&CObK$-#KszoqeXCx3 z?ZlpsX^l$UDFWQvtUAv)jH1ZRj6Om0HXkX94#-pxOevs8=3gPdUZP?WN#nsXEo?u- zRH<89XXeIzqC`N55Q7kK*Zc;rC%D9q*7TjN1OMe{W5#|ks9ek!Jvx<)gzL>-gc>H} z#|TdUODyTtIP>4Id!`U1=BJF%tkb|i3fX4`%^)^sG)6dclZ0?g4vm0{BSly!b`_6h zUISm5R;YHu_C06pJt%BhMkt_)^vvB*=LhEB?_um=UAr`qoJYt^6PJa7Uqd~aof~V< zsg9{Q4v=Gn?nddnAvM^8lz7NuwLo`vd7aIM11)kXo4@dmJql=7T$yK9#R8F5#M>Y8 z4UUITB+rES_P-QmSy!Wqzn*8)B17ALUPndnVYhvnQTGg2U;cjO5A?{3E&CYp3S*mb zCe-aK$R@@9I#XBBOT(S~lUn=iHh~-*eu%qc2gvF(f2gRY6nr4vnUisDrVWOz{RDL- zKCJ=w`prE$lf1Mq9R0T{4LPw^ewTO`mK^FH9bg{qRlUGhwmxK^Y^H+LJ1!M#a`s$v zNXZ7{R#4&xL1>-8NA0yPr36{OwZyT^!g%3E@@f4T_-v_)j09tRJ8qYOXzyjj(ZOgM z+naY0y|i0YEXvl{?*})1kmyaRf&hlu7`~mmr@N54IV`=Tb~UbJ9pLlok~V8(Q`Tt? zu3m7Ij9$tTwPIpT)2>c-^Uk;R{ahhkTjLWm!&3Qu?XUF;$vhj7&R`Ch5OzTf#iV?MGl zWWF9;0t!Uui9jXqqCB=JR|I=@xa0_Fjk;)8?m{O!C*BN}X^R^r|4x|RLC80xgciL~ zfb{nb*ffO`g7%z$EO zs$}=y%}jy)wYhsZuzr8@2ow_>iNLVV3U_E2L2&WIMCSmmhyu?@QGnaxu@PW=OE4xT zSAS>9)&ukiS;ThB93YD7Sqe=$%5kC~%)o<;@EmnRR;k(&giB&1x2jQ}MEgX6rDJs% zE=al}#;%k7l;cF#t1=yeAMulUMHzD_S&=<0@27zgH8rUoE1#>7q#denYR0C$N1c;~6{`KB|iJWGrKH6nmXo>PS8{&mHn!O; z3`qG9xUUDmXjEHkjK4ynuK+a%BEee@!*K1HX~!#Ia*eXypJLuNfmFm^YoB~E-Pp71 z#TLOPN!`NM$+V*)Ew4O}XI^6L=sgDW^DVjbJq9$^Mb<+0YoW$%a?aUxcC$=z&xD{E z86xQ#)%l$|jkVk`u&F_G$@vgAOpf`kkF;NXEDsW^MsJHY1-^A0^XK2jg?CFs$VPD7 zq7!~_Ljg{H7pNpK9NGtCA4Vna+(1Ryk_k;>w@H>@s{_Le2`?7lde#@cV}#Ep;s6qJ z8bFhQeWVeGgMmgmL#xz0uGxpMwrq2TSq=2`cIN0$fkX;g_329JR=Ixsq2VB)`7O6; zvs*ZjidP+T$o4=;Y3=*4v$XLAiw${#_n_8q%U1Ne-?1VM;ReV1?Bz_)107S#uj-BO zDt!p|E`?;U>4@WPQM*h)C!U7B9s3KA@tR>*p1ZQA(2?4>rM`CUtz{q6f&CAkp#W_k z?DOA~Rrr-?!$%^3z3C(DtBpQ;9eu=?`MPUxF;4Lz%EC3Q^{`9!DF@SKfGjz{?qfM6geR*I4>3KM%=M=%9Z7dSl29!!#AFMt|_9 zA_Q+`qIDwQpbn~tm^EzN;SZ7I-bqBL-&PMrY!fp4(V4PlWGHb6=keYa6>Gv}(VTFA zsMmbH*MQ(%CS7N@v)ZC%rJOTqndE4V3TdN|W~7+Hqo90k=>b*_IJGwvE?<=lVy?fD zhTCr&fRSs#nAo@QG{}MH32L}m@jJ&d3AS#zcX>x-zeSSX{s2y&$oufM%x-<{T1MKS zT(#2Ol4=`9+F)Kae<`P?$7KV&=xw77xP~UWI)t$UA7G97#_9T4GDsg?yObqv-ox~_ z$gyjsGa<>nm{-S-CZf+2w2dVXEBa!V$-`Pn?&HYm)(o^*(0T^Y!ul{zCWZ&z1Uy5k zw~t1`aJ7Y9=9(h`v;&f(xhcfsPxO!6!$^pSPqYCl%vZaR_hk8e!^zi59$#&SV&~Tf zMyoaB#DK5)+|11$;vEi?(d!b0vuSuh0Ww! zE1pvG^MJySYDTd|MnlOVlWDSiMa__KPL0``E$;94;usv94m=}utJ6^KjXO=o%F z&w2HI-wpKZ4)rF~{4+ose(Xap5N*K2pWql1o~5QH7G;~8`MeM6J{w|$eLd*XUaJ$^ z(*Pnq3eR`Com*y_W0+_9MrJ(y;02ydQM`qxuE=d(2j9AXlnVJJ2)CP8sffmL+ku-p z@bG43swL&?HM03*mc)u_-}7EyaQ>0fnmk_?-MM{E&wCrU^nG5ljjTGjTDvBg*?WV;}ikCq9ktb|7k$0pOfz4{ohW3RNA?n*wUtFjidXFn!{ZstO7(}s#9p?N;} z7L9r4^FXC1KV|_-y`qkH4=l`@tQZ-F*!$w56`Es3N{?dsc%WUb`?n}}JW~Xo`)?gT z(5@4p!Qr1_N3(aZ$f7p9X&B)(XGT9r`#Iu-O6@l(;{HnVcVt<*d9d3abgRZVC0LfA zpQD1h2>lg7?tnm_?E~|xN$8{1?&^qYZfYe9-#+^o3CzghvG9;j8`fT4vjfIv3O3kK zgWVnuo}OSv5~7 zzrKSp(YQ{uqL?8Jv-Q4)RMK5Y! z6Ze@H!|+YjBUc&g`eC;AH$1~uZt)gOe4|UGV=J>mNvaaCf?=gdXJBh*{bnY>pU$Pw zT-;tBEM$5+jd8U>^;a|0V^ zP8(uy(x8sxm$FnWG}}ad4#*t@$UjK72FD9x2Uz$Pb>r${p~^;X=SUrx>cMM!5k_pT zaKVIM9(Vm;kgh|%)dsa0SoyX#Pn?T8^?fyoA6)(ev>9>p+3qirlG-NgbSH(Af>p`p zD0<2|{1fLP2PpC@!3s~6gMr%U!ypJ{xe1<2?sGwk;gBlOV$`wa7041=FYfl?f^=Ff zpV8CWnVqPr_JX!@y7_4xg|0;L6MuI89PsSmo)|?jS}g+UU3_!TaLmWMC9Q7CuK$i} z8mY0{T0J#PFsphMwl4wTH?A8v?Q3_>a#G3d0l(>E1nJozgiKC=a230#EF+~OuQpbp zQV0gui9lh2-AS3A(~x7MlV*hIM4$c|-^;1M*_#UnHhWi3LI zaIonzBU?XA_94y5o8>7AE%_@n04@gEN%18{zg_AFzyXt)6(>zteoNhDr!}h*i}Fv0 zyc}6E*6uT9=L*EoH1<-j^w-;4Qq_I~k3)(LpQ4>w<{{N%h6h0T_4F>4^4q8GCM)uErFh70sK~S(D|P4Ec}9gVVslmRWYL`j z7qvb5=XgXvUCz;@wQhFttsl8tn`r>C7~IISxg{kyaYL;qc!o<}_RY$f8ZYgvXf%Ak z-d^w5ZIb~#zBqjRF4M?q=C)qcEP&olGWEJ2sH{#%0xJ=C*+B22YX_Nopq=&ge2y~o zh&Lo0?*m4HZ~$W33(9hJ%y;Cr)4{tnSv^ce+)w$*%v4ZO>TwdKnca4xL)XdHIJ_qh z{?lKEt|tW_aJ(465!iDl)d`l$aY>D6MH+G3;b(&!7k-K74YdNx`KY)*E+W5P^eL)w z4_dp=!C$oOyR9U@Al!cLBRnNAAvkrim0>?Va6T`w;;%fmb?Bo_R;*9nJ$1ct^5TAE z>3#3ImTY1*Sbx6xhKVPu{gGGu7TyNk2$&U%IU}Zf3wGd7m3k({J?uq2psMS43Q-q^ z1%A&tbSKn?U5J!TDh}KUc-=&!8GqI<+r7)L+}2x?Rg2im)Pn-!&@YlM&tyU%2pc;5 zV9lshmVA~lS&p^aQvp+gXKh%Q#uDBXJa3rVvqe@Mwy_SHiu`(Z9!d+(b}8=A!-`|( zP!!)$d)v9~lxeM4FH()>hGw?0{yD|%iPe3H*^@g+%?j zAtKO}aqiIW?zXUtK^g_k$-}tN49v$mTQ_0$BofQa+0E~DEvj5^x)P2Fhjcfv_B_pe zYsZ9$+q!~>jq|Zw%XSs&G1@y_-u67BQA4+rzE%5WNIhufCBCjK!$1B!rQ@*o_FH58 zYUht7k3r8#d18I;54haL;lPh8dRNu}3KsNjV#+kagg1$?>Vyy(Y!Nri3hoaS&7QBg zT5@mPzF9#=n+g?^45R+)9r$V6f!{Q%PUhkrM@o7Cv=ORP3`k({oVyXXj6fw}Bd|b( zBEA0pCjoKYOaZZV_V_>+`DuxMzN|7Di zw{nBV6-CK%=7JUcxuJpi>#CI;<>qVhI&_{F^pE{(jHgQTXVhVoBMEc&g%5v{ zM17TN7jh*xJPQ5JRS;&bdMrbPFloPrGm32(Hw#Lxr@tFlqv0-^h(IlUsp~bcmMN-_ z?@i%_13_*vbk3kf=3}uXZ_g{yKDrbVhBoYeU-T=BG}h^Sn@n$yaJN^3P>x3HejM#K zAB2mt*?GeYSj#u$54>WI_<{PK%6#Knm1^@Tn`8WT7((b*1w8^2t0p)VwNFjw+{8v{ za8pX?O#B)v8dDG>f>9$I{5zI2>M_cob_hwBuW8`Q!OB_@g=^4E51n4Ux_w^~ni7@5 ze~q13$|)I(!ElsEgau7Fl~3 z$nu%yHq#t#hEMz)lNctN8We~5oOy$w*c%bagx$ewgL*51+U& z!bkZQ=BWXsDQzih!n(;Gkp9s~j2TCqZ}RAC&Iu@|QfxwH+5Y+`5mNw`?dt}H$#|4x zuHQ$T+Joa={<({Od2!ILG2I&!4pM_oK%25}t)p*J7=ca~{|j1$S=h?nQZJD`_J`(# z?XX*4=lU041^SVOi3`c5J4s&QIw zgw${_aamyro~V(|8)_w7j#KPxN=sOy6ODd)-5RGDC)~2^memo`@K~>6CQ!3)?|m>S z%DyPboWw2sh5jIA!WLyw@B}V8$$?OB$Os&?vhsWUnt@Gc?8q~?tc!14!R%!_-1`mD zSWDyKewA;@v&LNTDBBm@hw8v@b)QnA(!q5(+ak|KL z9zg@*A7V3&`948&?zg|7R#JL`uwL06giel_6KzCi-cNRR^oVchC{4WS{t)^oKVv3d zjWZy;X9}3dB060K*=mvzFNlvtfKA%hR35?LM&fH657e24NHF0J*1gq z%}9Wv#hEdJdl0s#QN-=AvT5HgC$-LWRqourvl2>jaw`#5Um{z69o;oMt*Un;Sa4etzSqyJwmRC!mp&%w(yn}(AnWN$fq`pLrUK8dxv!zQQ55xaa6@nCNLG_n6OteRbLl<^v?&UZZq z^UMLeR&J4ct60*x!51vF;r=W+m1xO-`o+ZG`Z1yr4Q>tuvJ3M>>Wy}H;4(C|dfC2- zPC?pEffyOAum&|x-6QxRRm1@7%Y=Te*VFHSqG;-eA%z`@$;QsQhG_4>_uE9&^7Bq$ zImbp#6Q}-C^Aa!OHQ$}+O}}XrW=tOiOnwC%BFeWt9&PdvL}@4fWfPxg-#Ro(k>!*s z*326uh)NCtO1~(6!9Ki&&)OFWrr_ljBwXfIX%H@V-?V>^4HIB(ZBu6%UHYf%lUUn~ zU?_L+e3FgT(D6@$yk&h%2mFW^GW=+W<*Xh-4G40Oe-v@<@`1kCr|={vWa{Z7Ms6kR z6Jt%J0?dkideOR~EcU}(u5w#5O`Lg-FO~e%^y_td0pVfyJce-i^PWLYg3X6mn7;Qr z4a6_uR}n+Ln9p<%6!kQOMpl3wK3zz13KqrmWU;DVD1^s5@H*5rVsvuy(`uqEI;npU z1}-HP?zR?}LoSA4c#}+ftHN4CZT4m}#`p9iV=a-!#jq5#fhEW(FNsb^!d(-uSlX)d zNAY4v@@E(aypUQPLV1JZ64Kpl9jj7bd}*X8Rr5SUVx$uetPddCY-WAgr?W&BZ05w% zWc=X7C@T0ZrmF!`Ma)Ni{%Rc@mAR9WrH7ME#mgX7b=m*wKtC>sy`3#dcs(97bb~#F zg7Tf!HK0GRF9Kq7rFHs4Y_2t?9Of`^7sio}S7GEJGcm+P6D=&vsyWF(Nq z-+vI)?dAj#w3p%oEuhOZ~MJ6nH?3AeZ1tP(Afv6Vp_;}PuvEa74eP2kZeu)!Xz93Kj zX_t0s37R5=Cv9%%5uOq7f4CJxN5}u|qC%?Ke0s zu1JLctt6za5T7j%Lgb^3;Iw{$@fdh7is_ziB;sBlz%CVg8Fc}QCql=wz(J74j1`1B zo+Tt{#1N0<+coqH%_O*bkJ!hF4wEb9}a*$dKs)gG;gHCvmf7AlqX`H?4%PTa+J|foDl^{1s*yP0h%^jgY1p2TtX$= zx@JdC&CJ@SlcB2IXy2K9%?dfD0qLP1c%Da43Tv&g<&o4;C0OBsHy9%L^YpCy`AYmM zrB_ENupjhvn&h)E=_gfFgoyDyk*Z`(bzVB@dn0S@E;@#=OkLka7*%4N+AZf;LmLxJ zIlW}7z^|k(T5Di>6=NW{2$B%TAg5IF#Mli)8@Z0bf&JUjPDR!8^go!>ymEU?rBtfl zbH|ofa)am5%WPazaep-{xPdd@Pevjgn(=2>Zfrxn!hnW-BT5#}=N)_HiDX8B1>ilM zM*+@x)_Gq+9rr&uh(XWXsCIe22fejo&?lyzv=Nq}8%z2%6m@z6UWgV%7tiZ5wtn^h z6B{i1N_J66Z|EB#p|w~|$;9Mt>mkj5#BV?-x<-`Z8__Ief73MfLx2{8nGyHR*E7ot zf`kL2E1dgwivh{jN|1Y*x{8_Sq|bbyhuKJ&?=xO|pmf3_5Bpk|W#aDHAo*zDkI& zCG<3JeDQSmzcLNZz{em%1w8Y_eOWo(^Dc>3!<#Kz2K-35R59vYsYaHBSafj`EV-xu z$`S`#|rEO#QQvbp|MH7zc}pW`{E#G>0?q&{v3(<5 zEk}YZVYxV34F5`4@08SM2V@*^Zp;w*|C0Sam-1BF(cm4d4mIZ-NT}0Z8|*RZa3aUU zHlRDnB(>%u&J{^MDf1_#5F;IVYz)Zaqj=x%@5K^yjKxTA8L9tac&t;xm-^9611<^u zmK~53dubeNC)zM^qJUq(r#p|GrF|#*7`}oYyKTXUm+?ds!Iy)Q_nh@7<^d_v&K>d= zWnjI&uWy(#D9vkZN&w9N5g5X^axVa?!PuPe=Ms}oTmGtJ+K-~1i10^|BT*W-G5vxR zqUUTPg`NCoEdVNimv_k=@Khyy$fSDL1DmK4Z7HB0Sgz=Gu~)2y%Wm;=ni)?JDsq6F zJI3+$^JN59b*o#FbLV3C)F;6FVv;{2yf8~q5?Zvli0*LwMj*(vCinsvVg{SA|4yO# z<0kkrx@6+*2@#0fwEo?>@H6TLF4NhaaFu7VF4z_Mm{dH#dJT;o#PI<4}ho$8tAB zie~YO-hklPbCF_>HX;}v>>cXf|KSIaMT28#W`u&MeOf{y@&uE^VZycLZk;Magvtk$w%snenuJeebiYxtUZ8^7?lxy!0*HX z&^9q_D-~WS^mc(dpn+lyHT~iCgL3{?wJhS#;3bnjVFTHkTKO!ghi(n`v?r&q{8ZY> zP#!hw#qh6he3EzdMb?0Yk;Vq27TDBv0qDdNzL;FRR$}o&wlDON@QLy01_$R!^vVT3 zY7vP;u~k=VRQ?rGZ;VtgbHhwB6Z{dPfeXZ4L1jV@t%GC-mJcBUWMDB2=U}nM!$9By zqwYXof6r29312`R$$=+eW3+F_WY8vFXG|%Gq>o{iz_rNxs?XFKQ79@@5H*-jd_E=8 z*sQ5UI?^jA7&In!e2#Q6Om&db3O+9`k@l5Z!-Z6|B}ZPw)VZ~0y^dMrA)!yt^xX8W zLd#~h+?c9J@mZr^`lR6+_rU__x+ZD-4&J8w7}UWE0%cT%opGMPRsCF$8f2V{$S5NZm14afE*+3N$L&mdHu~ z&4f+7^{7UR-qFdAY6Ssf80US7??eA$V$rNqrLQN9LMZ6J4bPiaqj(UOK=R9byN>F_ zz~9jn`xN&D%nm)wz}zvh$Z&~|9#mhc00p6+uPJAYWd-TZ&;j#MTpde_ zA(*KmqY*i2QN3Ud_(=~Uz#6QW*Y{W>4)V#5vHkptn1OfhvgDUHD(cA7vXNj@BrYW? zvGqXL^9Ze0>9@#&jbUH&G7-Ze&f%+?4d6-^Z>FigmnQ!qU*q6_OP=yi*56!KRlFKw z!rDg_^pd|dDP1C1?nlYV^>iCF7Pdth2M4i)cvrY4XIfQ0-QhYM1GTQ00oAR2&2XZh zo6jetXnfkEoLR_MBuKC3Y+I!G<+9J?_|t+u0mTE#8G6P;jODwlPH5PKw{w`&#FQVMqn)ClJCzQ5(dKE45!s8 zG1o>{K4-YX<@aW3;ybM%3Nf58urH6ga~v%-QUfN3O9_K5BmvcM3HlR*g@@kf{^KE2 z#t|Xzqy}oxYiZP{?}vggAtv+D=e$b7!9$Rh*c8@4H2a^d(Kgaf(#OruYIuP0$yM4` z3Fo@~b8_w%eh`5eis|&{T*h5B%;3*1K1nXzs^C=Y0Z|d$IUw3P4Swk+ur_J`9>h{p zjsC2HDUoOVHtK>(43MfKh7;L~iz{bpEF z5_E6&e^rqPhnNeZ_2F#dGesbq)C9^BF|Vq1ynBIv9MU;?poCe(VE^%nv4i&6Idh=!!~5{pvx#mZ@L)bQolXgc_ESJkFOWi^4+}%HC~vj)BKY9B zqifXLL8sXW$kj)dcyw@(gcE*Dn=Z(P+ARLjpA{X`4#z_p}kZNXpNi8a4eB4pk#7%~B*qp>73`?%>)o#zt~MoPpIS%g<^ zB}g$0!e$7|8p_|IX*;|8&tO0};mZG7s1)fItb_~_uHRbPS%L;+6k&3tHTv}Jt|vM2 zcZNUz?7uMd5D^r6NHjtvSfiE1B=byRA=Z(F1`-6TkW^RFI|Z)q~W;{IK+b29X| zkWqe6*@t`~dfmmSJt3?4^^Z>JaozdK#U$c;CcUlSWhhY@p`wz{;=g#;oP?FIPJ>|_ ztHW|AWd1?%f6(Xe-%_Yha!Pb$8C;+5*|h(Qihu9*|9<&@6!`xO1^mU(hS8X|;oik? zj>L;%y~l~&`j^Z=8hA3B+hwXwPs!gn47SLShMOM%;nN((l<@x$>3@j7F{M9Xp=@_Q zx#I242leTh{IXQ@X!~(I_$fHNW&jg)UWj4Vlu1* z4VR$lIcbbNSo_})@K4bC_g|7ghAEZ*>5qSPN|5slS*CVfCLF_AfB| z_XcJ9oA-6n@RQ(wjr*S(A)1YU;pP9fr&6CF0F{WGH4y)Qo9um^HO$`@`0o@nhQKE^ z9(N?E1UCItWcN+F7x5+l!$bEI37@V`~b|D5R!WTG+7|9z#9wM$bA_gN?S z*Kz+}$bvrX!%uqmcv^YT?eBBYk8xQxr$CRrDEhPIlH~gFG*b2c0;lxpUNPM2=Qsnm z|9YES85TWunbTB<5*Dozi+ewJ@YCbzhUMu`-JF(epr=DJpWDqdJrS?7lFe$PG>O+% z@4%NFeh&d2$~m^>J)!iW|AmEPP@tC=GmNvl1bt4_E`RxaW9qzn%i*{qN8g|8botBW zpsKB`oQUs5Sv)dDX?yUw;4K#Kx_*%yQA+pCvG#nnn;MqUmc*9smuR9kVJvzza%)~8 zv=Efje8D?|Ff(inY6f+a(JyDnV;^$HQeO*BT8dKyleYT001M@dW7B=3kYjq^5XO5U z!H2(dNiCKCb}vh22^*Kups`Jo_=|Rvt&-C(In_no#ZS5p`oR0n48Ge)=NCO<7EfKT zV_p9|kISM`i@Bfgj{5x5yQpMy@@0~F)$7|o0zg@#$@J#r=Wo^`%W$;C{!4w-}!Tzn^Sq zskXZoZPVv!A1(WH@LQar6aZ!^udBDj=V-ZXUcKv+_1^#QYiqj$^BxS~2>oD_V(xn4 z)ii62P-k};p;9l!P#DHQT&=f=&fvW!>WED*hLDp0vHVM^ME9{PyWLOCY-eS`2H#h` z2E#rQ%jbuUg$#}{!G3b7+|910ytFNM3rc;E^VVOg%;JQ|-S@$hKIYtqh|8;23 zV-Jw{chD1dC69CNJNnttwoq&G?#g;f=vAlC@z;3iTTI%LMd<2%4HfoVLQ>ygpFQC!na!L~T^2Et~ z3viXzb-9SHsPtr3%Pg+G=s>jD5>@JaYOMZqK?Wxy&EdLjVtT>4$?;;*N5!1>&PX2j zGrxzIM`oD&KXMLrYuCm21@0XXe~Bvw0t*Wa6P?TCfYL_Yq@^pPv-jP#nIKXOUuGwx~ zFWs|^*Xn#SYw)`L5bO#OlH}`*)syBWuEmft)#`bhqr+;N(QC6p%eCfU-t8!5UZjV~ zDM5n#m&el@J)zY@UgHZv@4K24$rOh4;zu1FA^YQxdQa9E!T-r14hs{WMXqZ`%jI@x z#use2+YHn^!RIu$&-A=mt|w^~AuLGeI(^6br1MQ8ChNCC(XC{(r^rOcbvmxCRryT5 znb)BI7VO|aajtD~=ZmLE2KQaw54NfH;;QzG5cZd)o`)Kvq0YF;jho@bJxY%TVD)h_ zI~Q_<+s3+l$AcvoD|eMjB2Cvf+qTNU(~IvpJEABaQ}Dpsb#=^nMhLoMs9moA%-Bex zGiqSnDJ>o>RTT2k?l=>{X|P%pQd=W*B#b?1(6OQ5CC@wdKI=iHKVjVqLK;&||AZCe z;ofXI@t|{kxF|!Q%pWj7tF5%|q^P)1rpi^lX}ygQcreB+1frJr6pi_9-nz~Ecyt+j zIqUo~2=j{JJ$~}L_|z?%udu510NXGNeO@YNm&^0o@>|WXTJ5vxLhYu-fXSO1uNQd_ zV9aZ`|8wi<-BDeWysJMWG56K)O9>G(hsP}(D-!%E!S3!}g!j3gWtAu8Q5)ZFJ{a=r zaag-QvN8_^TMaimFqbas9hQC|5lmxhw_VSiH;R3)c^qDyQi%)bFI5z^+|U z;OR`3jD)3m&OY~V>SptfI*z@M!M4N@H2{-YPffL4O?|4q&$^@Q^(V;ebQhvNzL~+W zQ9~xBWvj%d#kKf;LqDqDY@fpGZr_C)l2Q6iv@4-iH@%3MN+u~WUB}{C4tD%urPicl zd7|dHvPqCMWcft-1u*f67>j2s2t&0{W=N10-U!hYNu;~Lpl5ClOQlF|z!}(xV7eKN zJEMqv^gO^9YKy>PX(%0Hv&vc4v_V`l$yc4x`f#v%WbDbA<@15X3hnSv;v5!%}y)m)kX0M{-^R6SDqzI@BUh|3~evw{3iUZCd~6 zc7m}(s8b=7f?z~p>eTyYseF;>Et>_2_gPDQxJmVwQm45mgnWV5;pwS!qe0+MOtkZJ zjoy{2I>#K_F~FA}wb%MAhKQiN(?^n~;Q4Mh^@Dj3@%af1{D-50nw__l>Q!r=jaJLG zdxr?71e{M*W-3P1ZQ3ldSSaBJ2Ehidj43vspHrBg0aJe2tlJd7$GQ?jh2KH586JBZ zpoRb%C8En+x?abao#yv)V^!yhI?5kIv~4Y_r#oNo&C261lec#Y5H@jmz z)fi3pf0Qj~Y#BvEpj$=g=`0yY+GsvweLczduE95>f9*ec*(lJ`tm9vcmgC4`e}f4) zt(mMqo%xPC!|_$TELqgbRovCp+3 zHy@I`my?OA;R+p6&395lKDaFJ=~}tU;x$B+P1Wv{$D~pmqu$}wgm@I{v>6DvQzD4^ zbYeoxX{|v;qF7$JZ0M@%%0Av(v(RKmZ{rtiSg+gm@`xx9I&W5R9=7~TNs(V-`DefM(1wOm zTpz3xE}B5z1e%)XSh3-M^sDZCAT>xDn8iusIR(aD9#&rpL}!oyxycQZY?dzoHtk%C zsN)3#1n&b8Zl~eZtR12qWMxL{9zM^NS1ZiaHJ;}ViSlH1{jsDnZ7t~+{hkjJ6@Sjl zGA#Y(veTBNhl5g$XfARrDVJ?snR?V>si;I^LNySz@&(&h;NW09@!H zY+VjPq@~_7$Ng!jY^URuXo2WjMCY!~9KW)XG3G9XC>j-L3C$!xi=(`c%7@#d<=n_~ zOPDN81(tW1=*)fAC61JWG$jfYb| zX|pWQMh>U>01qomWzD+pB9`+TH+zE9RbMW~a*J3;$%#$%Q3$y|Pv!AD?g*tf{2Wst z#(}73$yW}GSt;Gvo8w#=qoiP(s8j*352--L;v5?ui`kOq`DNn-yu0+){?_%5M|}=+ zrGr4MD-{Y!VCE!kWIJmhK^?$mxxp%Qy!N!b<6zOQh}Vrc?H4{YjLlez?n1+2^&q7H$f7ES# zf7}d&39J%5Qk@KdW4ltuL1aTwY6myiMi=TR!;Kj=7n)9l_%}vUp+?o)ta2Ib4@eS{ zrQun@N~kfoy=nQC!8bS2%yoV?BaRp~{p)kkU#E2b=0&w55_;jx(X%T6HA1UBE+@Yx z5fzvnuRE)FbVyL>saNYh;!mloZ+~&lus*}*=zKY*GRt;x?dD@6;DkxOvNy<9K-A-y zs({{hfH)i;YNvw)r>d#Cu9YA9hUU@9{@9%rlJ{+#S(`>e?k;$>jwT1zvtmi3`x=oBz&1kfm@OLnk6sG$D4__ zhf(m4)ghyO^y8=J1g^!oz!co*SjOWmrm^us&Y{B(u_i)nXOV?JPuLs3a4jKc-eY}_SH9$P#uWNImiMZSJmY&W!IF_1 z!vo36a2(=T$FVxxtADKAC?JJk}z2Kh1!*pplO=~-#KMm#_C8! zdn#Ks6K*fV_a+b6|1AjNUm;6s5u!QHelTt5XZfQd+xz^%hQsB6+o!JsVq$jyDLMnz zzi-z;Jb&Uz)Qr0~QSlI)0@o~M84M!_ac_QHzrSg9PvJ>}{c<&cL!x}z!KJb19a4dz znJ)pU4-t2ViVZtR;@wTd7x4@bz%5CnYjGqx{5mZmI?@wZdZ#YBL%~1Vh^XAA97yg|KcMo(GdVjNz!ZukzE4FNEQV<>HKSw9ZmKwX8nRYj zt>OjUyf=ps^};7CFSbIfWs_BexQ$X^hAoHUN*qrw=2TxLw~F;Pl`&>Mi7lL9Be3jCeMb|IW>tuNXeFANO-b z2m(D4|A()yjEZ{u{+0$wMM-f`6jU0dVJImn0qO1r0qGJDP-^IulJ4#pkZzD3TDrUI zIo$vK)qU~2VX+o#7T-8$pHJ;QyLw8rjZT)>LE!)d`;bV5xI4As!CD(Er>NPirX{&I z`Z%g0V8+e^odGY33_4URHk2E7GE@Un^*fB0*!J`{m)n6ywWAX(-Z{YUNj{KI7(dQ` z`$U~}V$(-nhg0v}+}E*ZX{;UG{h@5;(K-8z+A?PL8>4lqF;#BovfU)wGmm4T*Hq{C zrk-!l{1hQ}EZ%kJ@6d*Ou=--KhsJ2VzmccipC>MYz8!<6IBd5cFjtLu9FHp3ZK{@q zC5>#1Zg$~J32|R^hFwRE+des)pH8rbrY%OkwoA%YEz?b(^bhtobQ(mb1pM(Cq*&u2 zyYzD^o%Vg8&?ye^+QD6}_UjC;Puef2T*IQ{hSsSHXcqrl|9#?#%2Zgz-|BkxVEzFF zh8nIHt692)aXe|P|ETff_Dw|dtBYwn3SajP^RqLvdUb~CdHHSM@l*`l4Z0$c-SJDN zqjFQbnv3Ic$noaD&gUKX`jrcS!3~v0A5eUyZ!Dh4?0al>d&B3V*#m&aXPUzXxcem$ z+G6sMbix?=LF?vJJ7nqu>p}4miV?d0*y0%&L_w2mk!C#@L!VG|ShwGPInu{*%NXkT z{K=iZQ* zwf|)wVu=q)tJ^FzGiwX*rREtZ&}5$;irkIU4d=T(g?p5;8*Loi185+dZuL{ulf$1) zo>&lC@1dhOV?;-TQGuF$1U^S%#^Ls8p0C^2ZTkvq?Tzy(AldRA78wt$aM#Oc#OKPT z@%$cbX7mI|06$X`;9yM}x8l7av4r);UQG$6AFRf?<-zVdl-ienz3xXC4<#XA@Evt4 zxeYeQa5u)(8ACIK+a*1#Bra2BZx>-12Vc^YzZqjUP;jOfcdNw zirv$HSB@K0v`A0FUptjJ$q|wGN7E?%To?IQvS* zn3&b)E#VS~#Lvs(3G?>I7xU7Tug!b^7+p*nE8s$7ogKPg`zqdCWp%rF@g(-MkG^u9 zvZNTzh{?R&aK66a$g^F3(-nOA@ch$+x(of_)j4)91&=(B8HsHGF(bKC;@stjs2jD@ zhSRYzbCDzm1nI^uylSn2@d0E#*-1}I$S5I%^st_c=LBuQjPF##|LslH^lz=>HRiX$ z$&=3WUg+&{M%d9zF4Hz*t8Z_t4N2iHMUPS zxH{&!9LpQ`I#Vd*DdqUN5~6#|o1ku^l&y~yX}=b2eo%Y;h)(65CBOn=T`{|l&iAHS z??W&TAHtVim*Xt>dmpu5WV>IKe^jr`S5EeXikbsW_%YPVeotqk1+TtxrfjviT4^^u z;vmQ4diEnzw_fnjosxvxq{9E$2lw#9O8sH(CikCXclAHbbG!SFYkTG8$#ch2`|HE} zA3x;#2C|g_sjYZNve)9qbhz>rT`~dVeh>cfH)Tjbz1}v86}%yW{inx4SHcCZ|7?Rg zC6BIs=?>3P$kovfUpFVp)RfgGj3;#OP{jKb$hk3UnoS$U(QpN68!EBX(IHu z3(M2L2$ewlgEJs%R@k7jIjtu8=DMA~u_@@?TW}x2fZ*)++Wq>voGTTVf7*aQ;FD_H zi^}cTD06)>H2_5VOqQFAS1WGy5A~c|sinJ}&~p566M7oDod$~Ex%|4v2ecqxo6<{N zaUg>j{Q0%>jwt!{K|5-r$F-WddP^70fMS8Vi?mgB(a^$LeyCWpcUBJz zI-{w6$T;l=b(u_JlH%#?lqC9N;&oM7CLv2ixhTIo_v!Q-7X5<<$2)cKPN`x9wZw>e z74z#2oaa*agvpGOo4@%g|In36;AST(<`5sXUAy@83fspvkz4wxhW{yaO`!l0#e#XGh;UVI2(c~Hs4+yp4ErKS z1W~O$LDW68z1TB5cip9z11vF9TT-khmJ6%-BJFf$Ts>&j&VUb5iuD>2cfoZ7yr)bp zPj|l!#Ia(HDyQrz)#b^yQl9c{?pT3(YOs&f%*}5}@-}Aug1r~hfPx)PUJaau&J`82L{I%#u97trgY@~%R@nb)ON&H z29lpy?y-+%ND?wzWSn&cn8HwytBu(#2Q2zZ4M}azVlzO&i5d}uG!zDx#=3pke?k2v zMQerpKB-)lcz5g4BVLQ>Z930^~#K~*R2JIi{$Mm){r=!Gp5jUCg~TXJCkLSV_g~yWm<=8 z3G*h}s%;S@^-v!?)I`nl^%?uEsj4Wt;k`RdIyRKlhdD=3$CH++ zKutPBJ02NzkeUKzXSC)}psdG|=;fE=BSJR)G3>5B4&}iJdKSHQI-~D@3Uo+<_E-K$ zb5!64S=tPES+6%)g-xQKj@~hlhT}0iQ|ND~`B+kRN2|(7sr6jl@iXYj1m;{1D|H_J zBHN3yitg$$Qp{LEn5rWK$7eY?F%Y1@JYn3*Ynt^Ax6iB7J`P;pP$zcNNs!LQfJP%JR;a>|JoV2 z?bZ+ibL}A2y~l4f43uB4TjjgEZsgcF8(RGfmKn>2!ECP6efHw-f@l8_ z2tZd4QytNq6jPC-a=wZpxg9$GPEstge&;a&mm}7WS{~>2%;`qqi{I}>x3SD!>|J2G z80jNTi3bu9_jL5cY~<>iwyg9)BJuMF+Ohs~os=G^ z#k_spQGo_On+9Cm)PO0)lGa$7Kj&dJD74osWQP*G%j?u2;zufVw)Q;Tb~)aqhO}C_ z@JhsTwjNK0{w$np>CR%W+l7u=oerQ8YvkT1W+2?u-Mb=iiIs;K1QbDW;NITs<; zWPn5#-onStq$Q&S05yi3P&E z$UPNq6RNu5=R&7?-~)v#=4s_jb&hMxj!3cs`tqEKl~B@tVbs=bzEaRfeN@#r3d;qv zQwSj~!JOTGLVzq85r>adX*DCfYPdnV&OQdk5j85O&~OC?B<)=4 zWWgnWxuY9Lt9ebIlLktkO`Ck9M33bO6^Kuuu0E3eTAWf5Gq2BhhTwzB2>*`+@AZ!G zi=fqXbxiBtumY4QV)lz%)sFN=Nrzt+^S5~J1<|uq|M;~TkTVD)-*cgURjgTMWk}g? zUL+j>!_z8gnRVS|bN012piHv4Ivf}ia9cF4WNbM4?qtptlH&!e}{}hqz%pc=OpLzQk<9rqfn!oQ{`skA(0Z-Onj(#@X_jr@sN`u|% zO@8!0*yb35pay4bP>CKrZn^!aN_>QIotgnC=-4C<1wtL#e%(lNVb;^}F|VhJ@@C{l zza2+@v4iowar)sfMx4u{*TXU8Yv7v)DSuRVJ1Q_9nBs%Hn08upzo1$OgQk4To#zx~ zXN8BojE)a@$J7E0T*i?$V(q^?&^UvUY?_gL)#UXDB2t~<_BMsNqY>&v;c zMMT&h5{p}Tl~zN7h4pshYEsfg%c2wJnsRX^!_#|BlymjYtJ$T8!~VcUttoF%S})|u zMCSB(Vw|Tm6BIMszDV{hNROUh+ORsJ0CSLueC0FIYun775oQw&_>E(n5nR!83C+A) zk8-NsnTE`;g~@>FzqES+H%uKrk|0;6LIEbF0{LYY%XGEcBQd^9%l06cfzpn zIO@%4C@hfZ_v+bKuI4DlNQP`N+QQI#9abS#38=**Z#$MsI$) zk@$eUN4XGOp_?TUF}AbRcnTsOYIDx5tUlXQRcn<2mDr}ExpfFa`^OrFe1#k0ZK}?5fl>id>E8+#1%K4wO=L;%J zc#}UQ1e3&YAI*hiwpWU_SC1U9$(q!yRBP@M3a0tQYDMC9(kTH}9RZ%>A+aL!#ZSPw z%kJ?4ApG;zjB9!=5bEt+>SwXi7LV2;W+>P}Usn^A={M>d9=6uqXc@lyU_7O z+WGX}@mof=<&8LL&O!N^9GX(_77=+x4l$&T9Abu_BosanO*ESiI(_i25Gt8nS%iUa zL0|6GaPN(57mile2G*eFoU65ELn~Ilc99m<2tZ^}s+TiThDEE+`l5(5?E#Nc@#rBrl59_j^Wg<3+JL3ziosG3KCQ*7>TirVreDuy z7_WknwUVY6bhHULHV(TUfgWEL6weL^91jknFHR#dC}dsz&i7GiFr!~MZIwKzWo(B+ zMNcO5`<6#=Ohx%+2`N((^`(^79-?d%4ydy+`=cJgGH zy2lFLgW0>Er+K5vj-Tr~=*2Mj7!Zw1An~I%iJKd2O$Z_|zg@>@-@FZ$Bl3vtWB)Xd zs#?Tel@ta-2aHY_sOGXDwU*!l2a@D(Qo{%HTF~dADo>cM;+N?VRi1Car}pm?(LFDU z!H=noRT#)@AD=4A>y2fWz~PKpKlCE?FM>l^rPcd;JA&CKDmH0dG#7<@<)-~NQK&Y*)2uLyWKR9WZ^bC<=S(Z7!#Xe{ zOQU29ZOkb=tUoRJ48v7Zi zq{E|p3UgFQfVYl>cdfnkh}GdUrY#-x)CX?)Sl+N(Vsg}=> zEbFz=z1CLH9=`c-UTJxW{`4f)pDK3JtkoLw)sV2bks8%KRJm7LJ&ih*5jz?UnfEN$)ex$;eGuF z{|MQYJWwg4wm*4@Lmg{FDD|l&*j{^IxA=Hd89gw1;bup9Yx{np{n+HisM+Cz=R9;g z)!X_UtQW;&c~$BCXTtS&=G`k5WzI;QzDY*7o2=zrqpGLn)l$Ua=9&@ulW5UzW3T*h zq{v%|mxvkh8r^)EW8*ITa?O?Q^MG8N+gVLPeh0Uxa-Gd0&+!4;iW3!Iw6;eaZCM)3 z{m}p|g%Q-%{RfzfR_@`T&bp2797*daSew7~$?isf+o#yCL={eA`Bu#VvBjU5A@H+wvxa zAn3GuOy?XAGg?*L&*1ZU7pG|S15yxJrdn)O`JcxHvf^X;s)aor7;vGPPCW?}lIO)9 zkg=Yg0PkbTXJ>m1FQdL#MwV)Oo&2#|iOWElJrhr0_KstJl$Y8~BJv^TWV`5ouT zM}E~W@Yfy=rOsG?TmH?z?q>C0zT$4uGxNve*+T+VFY{gTy!OZ4o=Cg4p3|iDzz!Ff z>Np8F9qeJkM!nwubB}k!D&9wOiW%diEhq8SeQb0?=~0pvj`BHQ`Q$Fep$!*0uyUB@&V-Lg>s+*}r&=8b?v7gj6QGSeo(pLXt~QvD8x z3X4hP#Eb?aTHaEj>&32L{n(NpV_{|sR8px1?LiXU6P6PUYUN)~IEql}4kzHA$z)sy zSXGWX5N$et9lmzUshxn^>nu@3TIYJ&Z}I7KGk&u* z!~g+#uTo|dXDoP^RceM__#YW5rL9x(vUas6ogZeMEUXkDpQirttZ_RZu-cWXvL48s ztG1DDJ!Z9hH3qd9`Z?)Uim2F3?tev&8PxQkhm9#|IW0hiNcT_c1GBZk*$o(fwCD$O z(XJrST@ZYbuR{=YY{6|`D_UXU{M}_G?gZqo=RW%5Q3P?)NWme{dYDFif=PuoTL34a zXwdh23?Gx-!k^1l-e&F8m*v*`efEJS!>=oua;B^;%yXtKwY# zygqJUg0dV)$mPzun5TGjCtPFgVd5g4Ecl_`H7R#df5Arr$$fER9$N!Xcj1*n6sXq0 zO2UB2E{kEm!r4VF{RtusB;*Y+Oiuw-F!+JsNQB4uJBv6CBqhE74M}0Og^wf)H`?%GA@aVhQ$FRCzh#l<5iy-Pm0vSxb^OPFocNf{NL;_dR zv3T6hLftwy3Iz~vz4~RYFZv!quTY)WGyEy$7VUftDK1VZ3b-9Tv=({}oKkEC_$mxE zSNTin3E=JoPs^LKaRs#(qmw<`ah58rcAKEkGhcShINozy=6MFJuM~_StOImu_>Z@H z>&L7^DDcsrdYqv3$>WZlt;JLxa{|zm@#_#ZtrzHuBiE|&=*_UPPVG}n*4*uUpuTzfSScxc_{g;|Xk8z}BKgU$-299{+Lw>Ve>9IYVt}PiDe!zFS^{>q=X@E0 zi_0;c79;${c&^d0C%2XY70>zJX!1MX?FbpU(|!A*Zcrmi?#9?WJYlxm3Un5n*NHWlelI?eg2|p zsMJvT_)lQOD2SVW0EWVw_AoM*G6Uo$MzjVsm8WPk8skhnD ztDIZzZTI2Xkwa&k4O?%}lq$c;{c9Eg(EI}VK=P98ZMNO~gpD+qf?~_H0?EK1NG{G@ zh}XZpov*xfo~D^xXeNnf)PDJi{^FqLl&Y}k7D{(~;g9T(>&m+q29g5oT@}Wmwf|=y zcT!un+2Xz(pL2?Y@%ED3v!cV=Z^jT_NH<_c*3f77CiKHbGivXP-46=^;*)~?`SN&A z8wWg|@6rx<>ef9e4wbc+JCdVyyA1D74FxqwsHMp=FzU`)6vAb7n!V#(!ft@!*u|~> z^3hmC&c__TUlvI9E?@1db#yv(_Ck2Lv-0LQMteSYT=#Jh4*L#Kxe@~B2mRXfcBT-6 zi_J`bCok6~{Ec@Hd(x=9@r?_AB zzVSiAHFsw>R}ySbLR)4EK{t|JK;fd`#`#cT&T+z9e|ue2Py~ zsz-7VV0^r{k*81&H`ePZ6Y>pQT3EWikFHg(6rjp80KNRey&Nfos7BK7kYsfKxiL~R z``*5n%WQeAZWA(qo_k1U2W*>d$e{5FVzrVcHr0(P`G`?84#ZP+}{lvOU-;^B5N^Z;;7iweE=?H;;X4``>Xw~Ph#D8;Z0qVR%MmJVy%n%+=&xZ<$0f~_jfinK8mP`M{FBz-91!G z(5zs*q}wC>G$ZRcVEL~iozNbvF3uy4_0eJ*G2(BDgwI`yiqsz)JpXpVp3=^%BjQrf z2O93%zN>&sKFd(XI49rW3LB}hn5|cGKa$IxG6(ASd+U^%!2+le!=6setDd`{Du2NQN ze-(%=nI$vuGIfmZIV(XE@*&CFA9nq|pb;c_>Gn32eRj^nRg7=Y8faXF)Hk=g@LM&b zY?e&|G32e5Ma&3Q5Pxa)y*H-pc60t+?;8p2Vobn=;KI%fPx&((j>y>U*l-r*=;;lz z@_N8;qb9?xxzRN6m>c}!tdwnOZHS7SpTzJG zC}IQ8%NM71MvnDhW{dZ6UZY_r6*4rGd(n}xpHd$r$&>dH8MoMw?5C6-k zjj6nMSfI|a_9pU;qE^sGL)#vcm=Sr*aXt|O9Vf|Xg+8d%MDosWrhXHI#iM^wJ{*uU zuATf00(Vh_r4CV-9HANo7OGbXRf*8mc-ei2MNu%?g07h(vF6;5HByi|=Vk6X3OLYc zAW+xEVAYLq$->{P+W1dfEyka)?5}e(|8~!Lqg?oUo7!63qMF$AbVj>;f#Mn+=NRM` z#@pXt5i@zTTp)nG^@Jrkp9GWYB=p(vQu7pB?M1A>y)-&bUeIQ7;T1Gadl9#yKH4t z??(gc2a&Rm;!Qpwbb^%IR3Bk)29iBLCpnGpVRBp5BPtu@`5&}z3XrV1NL-%aj==am z3UECmXo}t+B1n?vbtw>=g>~xusnErs@KT~7dzVW3gLpP`*;egmM{g25Qn7S2b?5|< zNO_%d0oahMMFn6xT$z8vRa?1P1@HD!DHp$7-!7a_Nm#(RUfe%aM$s!?xRKp6*hv<+3ktwUuLPN z+Y_@f-tNE5(k-+c^9QZ#1bg=xjq+A{lm!ODZ3jYe6nlr##704yX3S46UmeruJES>? zBPf9q?f#?4tKS2)8ud;|TK;p@}qHPHNpDg@e zrO0dW?x;G-HJ?~W%2shD+A0eRf*|=P7XDVUW3tRRX2rh}UHJ3PwCb*} ziJL1%&zgWAeli0-@urz$I={ySQ#pS*IWH=mX01>y2hQ^XpYOz7Y=f{`qE=iaMH zn<-^Gzn9TeLl(}oZl{d4TKH7jNANGiA&S5APX~fG_O8U*5h_MApb<$8qi^pQw)u5( z1CLi#sv8RD&4lBmqiQ(O`{$-|@5ni@e`JcL0;aLUx^s}h#v3o=TD$(Oyw}8v-8v*Wl1Kc_6ZtO#d!cXX1nzmgAQ&9u zxZpIDLC5`Lme>7(p0=z`8u#l+j-6Dc(r}0Taa6=J(v9Q@7co}~Y5kOstr(`%_ zeKxu;r{k?x%4A2+t~>nb-(Ol`6({F=;{JI@y~1o~ss*pd)w1gdqXr#U##Tn;edFnt za|~zzb0Vi4J$eLGfBQ<0k5o*%bKX z0m=7raDJ&UYPn@Ee6RGwesVzT02l-PWtp%>^zf!>=niFa5&M@4aQ>iLVEJ}d6tx;BUQjNs?x z5kh|R$nmwA!v0j#zVX^B*N7_kufz^EL<8A9BJCUC2 zTGv@tZ;x7cg`Zm;Tr(vY**rl#5|-pi9&C2SrLh#W-v#TN3GAVvrOe> z^$*V1Lc`m>2ylAYEIwrxajQIxl4PiXYS=_m{omsixPRXyH*YnG{KsOv`6~SSVx12K z1sOB_rvU6}jbs#)aR~d>YtRz*G=|$;>ti1W$oB~MnrE*sF25Oy^u~!pdmB^gz>@9k zTQMvctZ%}N?>F+85}0q(mWU&NdbvuAmzRyMr!s`04O+lUHRJ&h7lrjYbELpE|Jq*6 z*o-Ok85io0!>o$e$_$H7`V%k1rd@ZJ55~30dyAsam*dTM*eE<_sO~_&x+$MOs*xt1 zs1T;xL6<>7gbK0a*2L83T6vmx5=MXD&zJj;Ud#7>mbVXT6VJ$sD99zqJ<;yQ2g#q=DXy+8R@aL-t7cBhFFWfTo@5Ajh5JuB-bmC7bcK_y%9{Vo z!A^2su}WwZQ0LsBGQ@|T)lMBQhERaN_1NMmdOf2>+E#QKOxdaIWbSxd0fQMRCK>*{ zq9}oLKo$3Py(2PhZ6pV_7Kns|dXOpb#=|P(PaGN3{N%@cc6(7uWn@&NmOaHnjh1MY z8rzbiwIPj@?J26ephVpcnPGg8V0Zwy01#>Y8n=5cK>% zd-3mO^YpK`+{)v{m&ImreL>S1`i#&Ux>D#YX-}C=jLsGk7)Z=$st(_oDCt8tWP?cV z&f<%o?l>B}Z`?sO!hZZlkJ}}WphK-R@8d(2#w*wL;Z=*vSZN>>8jfVkp7AHYrwOh7 z=f4eoad+oGFlm_b+WNV&64seEoFmBdOfoBuN>;8fPbbe2ft11rm}^`kP5GHLBep9yBldiM424lbAtld6vhoG!a5=KX@N0yB7*aT0XgvY3ND^$>9*CpuhZSvp$I2Bdg zkl=kgE8KkihQ;~7t`u~4uaEEY)|}3k*#7x$96y3~+sQH4Tgq3qi>R2X%*Gv4-vllE zENGt--O6hdxa?*L`^-0Rq~m{>RxL{%Ch>T^Q8eqGGg0(lMS zWcfP2dJYH&`@214o2?iVRl>k~5}Fpm{cN_*$P?_kFONHk`*oX<=QI>nmOd^2c0UJt zyTRW44m)RFhg!J_*3>WC-PxIwLE4YmZ(95ZLaAW9P{OuY=F%5=i#$7w>~J_Lf$%4MUI_+(35hx+Xj*exF~^7+kyN5F5webDFh z6TH(pDGWHiulDzcn3SQ!sD25q8y@@IAJlq9U(IAnizUb&e5bJrPxesLK`!Unjz=JU zNE6MHm}?XU8a(2GRj}SZ(coB#EZ=dXgdx)h=+!50)7N$H-@eMDkAiV!elIt7%mHAJ zV|G;U?z#T=`T}d3+dimn{IxWeqSZ*)F%V9{$0nh;K(d|K|aL|e$&b+Y>B3F2rSF3}b+ z`a@#&fw%~gS}*mbD(#1qFC%&5Op|#M*QEr5yrs>yT}CDM1OKNOAff(#r?WR7vlPsc z6z)Ua5#&J97s`iZS$xujqCA)ate2ms(6Zva`(;YVt_U&`#ZG z0WPHf8#nrdb~o{CHQ7C>Kczvk73u;hM6v2it2OLNmd_iLvpri7L*TB}7r}4dl`HHa zDq!p$#uC2w6|Xpw{1nW`3qSF2!gp#hfR7|(>J<6pA@cqM023g_C*J?PRPG*;&|Svy z&Gn_W&MBYc4m6I-?k5g~OT!j8NQ$695zu)m>&e!0FAM#nKW?2!rVNGo*ztKm5uyFY z6Ix)1cDi92#{dmEZcV79HS6Xzl2+o!_|JcrcV>H<&*>Su`|`D%DJO#IUgyVQL*?#xp|n9>Z0CDJC5HF*aVHI;yIjaOw~}oel*e$;H>9lPV*2f9o9BfIT18nF zF#li{6UAc;j0%}QO|U2bv`J1n_e9f|A`Pa{-tyR89F0rOpQy4fM-a;L4p%zdMd6pL z<~+h+{7D8Vb>o_j+L_Y%+P8VQ-9+?iLLuB6s5u%nnUM^d{pCNd6<7K$J5l4Ry7v|w zC&u8!8iQ2B7l>Si(^U#E3d0&DXx4e5{q{`1;%H;^y~nxX_mVs#9Q7#|_v2BFN<1HQ zdt%FT^-L+>PYLh4maXA@NeILhlOF6b=M(-XhyP1TUP%#KGsEFZ?GO zs3N8qPd(iNa4F5)K|9V%Wo>uAF4BwOuE*ROEKg-m@rk$Q;B)2Z+g2#WWJ_D4o6uqI zV?21%kIKwMp3@dOe49V5#_8ZZ$ZXq5ux}|;xQ^y)zC5|#t{vO zn3xRo+~T!-dbVR&zWQmPhVydH9oXEWI%!5Oi&r?QSv5K@a2yg5i~ESzbG71|5X-!1 zufBA3SQHAd)%MtO3qDQkIzy@XxI-L^VUT45xs`et!(5eh5y0N8zyDNf3}+Tt+>Bck zJj&e5l=h4r`&~6D5AFvhp=o0g%cc@^4|xIgir=&)%82V<{N~@~GCluGr4-xW6X&4@i?i61ZsZ~VRX^0(_oPQ7wCbRFOI=(h+ z{f1PDwms)*&0JvGl>!LL{Ho&ob59)j_+{o=Pm+PK4gE;9p+Mtu&^xskotV`4Ww|7CRvI-u|k@e zJ`NkfTl@L!u}si1<~wQu9d;I)_s{k%YwxJO{`yiw+U~p}hR*&sa`biXmrBk{VL>`q%Q16i-mNsE_LmkoUiEa~ zsu+{qLn!_OJOBF4iwQa2-~RT-z4GJFg8(xDhl8adYJu~w+pp-PqEw%VcAA2^e{?JJA?RKB%s+->*%=g-z!fHfr>E*(7NzcG3r0>ikC4o?3OLrW^eK> z@;>^|di6O!ufr*;4Z(=ByQfDRkZv%B?!-eLPH&uQP~?gDgiop@RT z8KGN2er^!n>bg3Mvpv$g9Y1X<3TkTEms=XumF3=luDp9*ymTMRO_+zK%Mm%FK*)(N zN55o66V>lJ1o_J9gx%52#_FA^85?9VkotZeOr9K8Kn9p3o%*uy<1wYGC|ZmG$*ue{ zyH$tD68*fy{12(|2|m@I7JkZvN$t`qGlZ=VYe#@J-RXH_mr*B^>D?z)rgDy8x&QDTGdBr$h z<&Qg)E5+Kq{<+w{2z*2>Y;Y*Xs#dI;a@ok4lH&HFHJB{jHQ1fybC`{$_w3GBRWR%f zot2wRcBHc3WdB4X7E<3)K5^Y4V-0A1{XzF47_=oVrAbs{=pAg5GzQMtiw3->y`3?a z1N@g)E4vdV^b^#v#4qpgTaBRV%+i!KoDx)zkl!F_jGXgQAaHVoTYH4)pG%_}+eC5{ z1LD?)YQPwSOFq^Of)#q&0PVyG`QGnobSFhZypJ-Ab<3toQ-eEfGei+HemiW4n z74yczMT2DsX+(Sn96x|2rUeHwqUD(qF4S-}P{Y_|2fWdeyMKz<^@BB+T6whqIC;O)(WW%O`V&sW+(sR~fV!zMagiASa}R(*v}Hq9l@@xMUyu_xOlKOd_M-xdS^%ycjI#Wpq;&X z*FT0$L`Ua1!`A(RVt2jwz;&*ApQGk-r|qLot)262|DwT5m*yoRG%J?)o-OtFft{8%Xc=yv`5u4LmhzEa7OKkV$t5sRyreTnuEKjX9llon&u@{|PW+@wjr7FJ030ARoHoP{@KZf{AfoOz@ug zongBj_F1gY-6CJ+i9`9$P=B#Qj&!$)$=UEmU zru%6=-hKd1g)MGhY~m;&Qgofpd73IbuO`=fk%*fGww-MZipS!Wo2gx=zhUtExKd<8#W z!Mwxmj$BkZj7<9N|SoFa)KR=1&m@AnMw?wBLjKbG4cbcTPp( z6MoPzE4<`k9A>H*ryasmSaW_%124CpYkBcToBH~KPcLDk&X?qS&X*N5Eg;U#bToPC zUwM~uml<^ZX8EKe8gRdlJfA)V#UGI0$QZ5oP2x!VZj<6hK#UyZhhH+K#|Qx8jX=p& zDhN48TCpGA+(b4-lqy+~PO$W~P>n{oH$(XuB#^F)vW6@sDb{ zLvQqR-(l&$gz&Y1$Nb>?s>?R!#q0t-u8%t^9lLAn_?dF>`W>tC>0cf1pSc`|*sU@@ zm~fvhFplv-edPM=Y8KD z&sSa1Y;_{TG=ZhcLeE2L91!oOo3+WFu=~$Hm}i~#X@2!zNSEu;V7YfZSt(%jmBZ}U zAwZqhBEFN1?+@0~^bgkBugtzY-I>vM^>EuPkdt91PqCxv6%Qs8bD|-Im3^_pQ?IBU ztkl7qz6NF4!b1hOdm{yDYzq$n79b{Oe|TA!Cl5tKmEpgxf0X@FF(?X@h>~!_y8l2T zXNeT$qGspyPh3i<&+k}?pJ|JqedRkanIl6g?!Y5M84yqu^v}_cX@OP1rs*}{I9MLv zr_xcxM2PiZFk}j}C0U;q6sR@$tc1Ajw>@KVIiei8O6@gf}!+4|-eB-S*5K9={e8E3B~+xu;yj^CBt{1w&$ z8|==PF&euT-j4sPApCt1LV%OvN<@_C3}^ChiBFbBZCwHsVF*-d7N<}6!@0IueZdC@ zJ=#5ZkV(FVpF4Vdf0(fyd(ztgw1+$SOd3O$ zf6DpxdRNhQ%2KutR?oxrUNV}Ou2)I*2_r}?vi2wM$SM%?9`??#yL(lBD)^9^%X>Dz z`McayQazD^hsCyc+^zY?2iJ1h;`d+E^}aY1*#tykL8Y zWi$z$G+emJm7zT95cK|8wt?0d8hqZpfJl1vp@M9?o6mlBvopR(RQ$K~;#SoQ8el$v z|HlF~^*h4*h(Y5;%A*J%$_dglkb8M4V`h8-aVD;ndGa%s?oAFf^`Vw6~Dz=HnGyU+Bd zCw?@?&f@=fHDzjBKkRDbWEzuVo{Iz+sYyc2=RoE&Q9G^uL0mK|l?}2B9vf5MI+h=< ztE>#?LU~G*YV{r~58UOrHqN`U|9*V`PsiP{Qp+hL@<Eg`l@BdGh{{&?DN+~duD_tZIdyDh-g@02{ zR+2gGPl_tvjUSRcm+>6CRO*H9mpg*Ah3nWqQt~qyopx#_g$SUp9<%;)1po>og$&9{ z_ghBBfx8N|wMn(Tn(XOI&t{2`EOj~lEoGjsvhO36d=sBIm)bI#$yyvFUIiR3|q;yJ3_s}IE-Q7s{(8B-&-{H~cecq?O-@o5)t#_?iI^%si_nCdp*=O%- zUwiLex4@$ix44H9BB(35zd-wx=SVMPPq;gbjGi{pB|}@GV zmU5Du*2@wjjf^+>;lXOyy)pZVCOXQ-CIgFd)r`im@F^^nEG6Y0!Rl~i@I?nitJM_s-gY(9M8ZptT}PUF3)gzG{6BE{8sZ8hz~#LRYY2gb6&cBR zv60Z?@(~*p@ClkF$knc%O3(pWXf9DvOuFL4oEFPZj2|4L9p6wy+P4w>Zp}KN9h_Q} zThVr8$k)f=M$otC6nKf&lk`6{GRI6yZ#dp0rm`OS9^ zhJ4f}^P7P>PK%-OIn$^+o~QjK_q3&r8eWY_|4rz-B~Z0DPZU6shL;{Auy%~??y)U< z-4k$omVx+Uv`SI-9~58MyQOv?G_rHv_P@wa=tb8nx0+tG%Clb~S&Y>3lRiH?$7!dP zCw>T!52wy|B>f708FCu8^Y3;lOC_5s*V1?p48Ks0IfHqhL#E!Vr9B(C4lw-EOuvtY zw@qeBP3v4^jM->4{s8eA4!@9J?r#OO8P^r@N}vO#hhY^9_bL~|Hm28dB}!+50Fd|K zWC7myg|L{eqnHT4Pr@SL4n-Amb@l>l5|b_`o9V?)7K0|cWF6oN_QMtd)T=_wZbGw? zIN8mvNgqF)?WB=13M#lhJB1Ew35wuk zP6hqiB|bK$YjS}12C5>s98EKjKb<~MX+P_t4hSZHLR}uyKC3^+%4k|VgBFFUoqtUw zmh`wmNGoVgT+B}k(3p=dWY=jj-yAjVwNZOoObBQC)w&DevogESHC!bOte;8r&aA03 z4_wMCmuR1~4vuIgk7HQsP~Cgt)Jj-er$r!P$B+zxw^Afg3AiV|_KHW0b0)`e?@I0) z*te5fvkj}KwOHWxBzxV>)jSNB=SmK4XG=?L7|Dw>M4oUcfiU&sm&ebDKMe8RGy|9& zn4UNE!T}mlR6LRS$-_aS;CqZX&lbGPMm zZ^oTz5y>fswM0HDyP?5oK5bj0FNU1v=QwdKP1=6oML*!ZCz7L- zEPORfz2H@!VV4}v)oyB%Xjx7UGX=&!DKtnkHYmoecF+>Sg{I@aB@Gl%YkwI<a~&gs5eyt9)5mhEpBjxZ84mO%?g}%-3ldc{rLh)|VX$FfKwK%IBufII5zo?~W)GWn| zXLN^ehx$`&F9{xPGd{~w!nusx-q+f-0$5FTeS3IRjO40pjDBrLn!d|y^}WGzIf3w% zgS=!i_tRTlsf2<_isWvqn)XkeWSjhG<68TzS@Xz!b?@Nz1FxYCL=lW=nJSX~+4u&l zolw#t;{cY44XPHh@fMRhIEkU_2>gmk>hyxAay3O~Z$1ETl0uTxdPdL7B#Sh{GIKxz zU|<8|DfxdnzcCk|>P=NhT4uIUA<{HTIF-IURh^6l_3hPTZs)=}WSMY^60|V3=XaWd zKVQZ)67urY)CWHJC;U*aIo`;SXl1Ama~&L{#wp zbl?L^!vrgkl;;^PUvACTJ?(wB_2gg#$SJ%>W4y|>%(C|*Y}cjAyJBuY)|$wZrz!BB zbz1{OJE1)UCg64h-;>985D|qgrt7vwZ|pP%?@z^;OwJwgY<`co7_A>$v0*V?C2dui z?&|@cz5__o&QdvQchD>kH*#&@)7qC2FUz3LI4id)d2?o`O;JvhTC?F`fVr_PmJaqf zs!6Y&)_J;H_Hpw?^y%5FWswz^FlOdLdC569Ir&@UZV-+E zM0%N=Ik^N_N41RMOuAlxln|>{a&Kc*&Y1uC#^lFcMLE>Bj zk%<4tH~M9aLk#fyB#$M7&R&3Wv5RVw*K`ti4?_VcIDl&EJTa0voIUImMe8P}rO3O6 z?*{kkV|tY1NUxmXGv&JSh>+XLKx;uE;iTrJ(|Gs&oIH{7L;+ZspeOkr{UA7#ytN1e zu!zn>*dZD?{Id?q%R{(c5qQzjtcI1KP63mi)iw=aUs3ZpS#X$=6UXSCk`Y*gq|4Q0 z#GBeC;Hk-Vf*lt-XuboT^y^$~Pc;PGtRrHF4jMPlO!AsmiDc8k5(~YfDBZ|rBMj*} z)r+Ktu$tn(P5o6=yC*iMThst0=!wi$FW3m2=&1^O%$&nx3x{1ug4P0$wkDmOH`d7d zb!N(zrYFie4VOAvX2fH@PeQlC9zV)?Clkrv<#47t0nI>e6yU^+$PNWQeaF7l^yT)q z(4kZi5(y;=sOb7FB+?>Zv?4qxE;jHZs>IgPG)MGz@6VF%!zFH|Fq${nAXjgyHP;ip zTa^F@$r_W+NZS65>RYcK(4btpLzBZ4a!#Aae4QiPELp-nTrzr`jkIs3|4J>vXS(Ep zpiip;k7){Vvg*N#1MqH(9Oa_d9EczOeQS6hi5{DEn&OV<`Y9{oB&5uhjoyx>-0>nw z>oz0VeW?bu5C^LZZFeetwhn4NO9v`JxAs(@?P-ZFqaSItvwdl_B4el6DmI*r8RW{a`BV%L4QEY*!ipr)?KIW~2)ou-(X z>@vAPEC8n6(_R#XWrUYKx7)6qK7%Ho_J2`I4^;cio49&dhI^xk@DYgIR__9!ykf!C z@NI*I4$mMd?SH`{zv;)vkCKD;Hj>G_6YONF3nibWcl>zOnWh90aCZWyUVU61U6Rv; zt5S_iyG0&h?oNuhZW}Ot$^v?LH(n!7HiFr1?JuJza9L+eHmdugcgvmle6$J~HpS5b zmKLi)8!F*yUXrO+9G7ugh>cH?W&_p$jTurd!aza=4QeHDASweO+|;Tqm9e{!aTRXa zA%eeZDSot9)tdqTZf8=u3rzlNnxC za!<%75^sAhO>ibS^A`l-M|aa3$1aHdMLAG5q9{C>+~hBQnJOXCYT$j2h+D%ntG$GP z8YKy4r|&#@DMrjCNUg=ksFd~s+Tl-^n`cF@j++=YreVC&vy}lNwNAxP-hzjGlNULvmJx@5lVw4BCPc7Exr$|dqopz7*|J<+xb0bt(I z8>q zdwrTt$cViQg46B5gA)U(aVIk}Cmw>J&jmFfeU1X6W+QFV<=lafG6>@@%jPO-3Vm`T zDa;(CC~o7jx2#!=Bf0Z9XJOzPgvfqj4`xHW_%q8BY;imE#YQCkeFJ zw>`{IwlL%+_1r-eMlfWfKuF7JYr;xv$Z36uR(T2d2j!p$fR&wZF)v86)(rtsD))e9 z2al25EURZ~&S{aIQQj20f|vKD#MZrT@+)&v14pLYouwFC&93DbUj$ zyMC^%E&1pP8(ev%0eobS(=Fq@waoCX9H3NE$UMoo3w6&=%za(sz2X>8fB2J}X}&zM zeE;Ds=1{)pXP#c`kITuRERU+)w4-s1y~lPfSJyOUPAdpH%}(b;q0=s z=c$8>)v&t4lEb^d#k$;<8A$?cYqzgq(27gVo#_o;uGdUFAyR8vj$!c!WT&p9@=@Ry zXLQCaa{o1oDRmX1W$mene&enwicOh?B}ZWyFool?lMULN z02j$>!7t97kJSRxy|QHhOs@xgjfTb~YdhqeS6h70>MECo6&3t_mjGk?r&58i|MJXD zJv<(Jn_%BL*=AF;oQd1R_;&RYn|MytCpmd>OiitBl%-^7k<%{4S?_(}g#e4Ei+V9m zY;+Jebx$)>FO>VC`8|29BC}ydPDhalIt}M1w>R(JtgRc#l8=*FJ~>qejt66jk>w}u zR`bGz_HF*|?rgFUQ)Xp8%FgW&a8}d-;JKJAI)X{JWsyS1h?MPuGJJaBqY~oBaw0Fa zsoiJJ=wTx%!;lEv&ff?BpWQ}jgfd}QtdO8zT#NKU!{~oj!<+NLcKgopDCcx2sLpO7YV~lX_X4mi4_|=LX_%adMxN4343rYv zB_rd%8t<1P$)ZUnn?jzSnjz)`;7dzU6lKE&Al`_@AQ*4vAJ%sQGK-Y9t8xPJnxtvV zocNApP10{x*rSwSH5-rq)AzOicE;`7|K#fa5{QyivQj#63Yjq|&nXw+HFUw20ASD>O{>k?;@1}_<|2BWkmJoiV{tzh2qgl|?T|SBEJALN_-GrM ze90X;fL(|+lOZ1>?k`ojp&HrCx1s-SMt)mel=P6B4`td5U%`x2AM`SICanZTUz7OD zY4-cao0nvee=?_jf+xZ=gp|y2@E#t?zk1id{A{e-;QAb4!}dQ(1TZ9EfR5Siz7kLU z&jWyLPWIJbO)GpUEx=7NbvNT-MjcSRWXcZYO@1i$LK3^%T(ZU_fR(?u)On}Bk;V=A zo=*Dy-@c%Y5G9SZjW9FDSBz2s6;M3a!D@XE*k&q`Oc#RlzOn}=1nQrC{x;2Xc*xk|nvEpc6WX`hCfRwLZ#gLb*bitUze_D2?jW_Il(EDzm~ z7OIio{yxr;%W<9@*x6vFCB1PvP%Y4DBBgkdGTLj~qVZQ-3~*6p-l(9PI`Z<|{hOqI zTeVVXK;Bt4kzc9*YG?j@a#{q4aFp^Ukly&0>ptN6Kbe8Q_7`}5gN0^-0`5P=wk`*t zBm%DgldbbhP^J&TQqNPM#rP{F@YnS$N^t#F&rQd_T7EzG3ZxXo?&8!k!+rn8_W5mq z5EXoI{kri7-oKvJzqXPl*b4GFgYAvK(C5F*q%a*Oxc;t0jO=f#72Hrz64mSY z{1}8T@fG9gTu<$K0O?{(PFn$VKEnj!gSPZF^;y8nPhr2t7@`H4Z#jG-=21C~yV0sP zXIFx#)~^mI9d{G8e<0hnvD1XNGQ(@xLx=H`EhKD*&7e^%cIgw8yPNF~2>|xaznE(L zfEn{$InMLXA?82smUz>^5Owum{yV-gu#;`kjh+9v|IhCr8u|aG$S8Y&$-Z!-XZzm{ zy?Q|qZ(EIi%WS0!VUbiPi)-Pe@!d;-R{HevcvgjVO>G71wJB) zv5k}eUo76=KjKF(5&zr%oWJx>d*(KA|3xB{%$*!Rf8Ouk_QxoiHGo49-#AY8FB2iG z7is?c`2Q-;_a(p?;r1=z`xkWrCV2GkPyG4p|7Rx{?fzfIOi6}%etv4LQDXE7&;msO z3zW`rVEV?H+37J1ovp8b1d;Kj8MKELjX5NaFIPEkEN+5! z+riG?1Mey2s4+V8s!_MwAe-06#;oR1kL9G|1>Apw45YlSCiT*m7N+)X}keR=r20c7@cToWfyIAhT(C^-60n1ylL7Q`}3NMl`$A zt@6%x+9Xm3zKziWXZZ7aFJ!XeYYuymATUsFt-A(*dxxzF4z=ob!bv0;-qikAj2i3N zx(ZV8{Ye$1lzbm@t|VdogtU@T{K;tc`X#BBIzD)uWVLaO&iM!{VXp?m+%9VDmO4TX zhZ?p3Yn;ZqX4g+HnpxyY<>JkItOoCgvXp5Tc`xg9?0{c(-C7I_m5M;>6KM%;!h>R) zFVRUNEoTpJ@oaO4?k$N238*a&$coRnD=wx-iCxc7i026l7mZoHzDg0)9!@I@8s#mu zQg{b$f48qJI|UWz05$3n=ymYwNqO@+z*}%pJZRFIG#2mTONl^?lWz5&Zc_*0@858_ zPJTtThaRv7NkiPXD7oqf5F(Z+pjcLBPo4{F@8SrK3CQ-rb_ise^Or3Ru~Id93l zw#&ZFc-2xPn^Dgf=cMy^rq*5DUe1($Qe8f0z5e{9^Y*ao1LK#zH9kZ@HgZ$I~W>1VDK#0C3- zv);(!HCB02JTyaE`J0vNbS(W)Q3cg(gU>umL@Yuv9|P~+w_A$vk7SF4qJWkOVF65a z+$jt0t6%)nME!C1c3UYX_f9I5FV2ww#?%cm@x1;Yg=|WnjB~%VjNux}W6ES99XH?N ziQqRG?A)4z(`ffVrjW&H?CL;z0M1O^;fNM=2ozAE;)Rv;3St-EF8Qg!-UkbxA8*q5 zyZ7svhtu4nj}wX?D9QAGrWO?)pUj!im?&^>Ne)Aivs>gl(FK(rS}7Z2Eo9&6Je|vs z&v{$yceXx$TQ$l_bou$SJcMDl{>%-`2PJPZs0Yuf;zjnGdlCis44#cV&V)^TJSrR2 zA(YpjyD{c8;Ct9(M(oPn=HMEBRemRPxb{KNL) z)7E802ATwnOZfu=XdB2&-cmmUQiaDEq5O7AKwPdraHhK!j@{<)C5FJnb0r~H8)_|7 zTH~-Lx)6AOj-U@0TI0JM)zMw}stG;DK*?gY04{_)2sdr0ojHG9P?^U*{IuF>3uK|q zi#QhnjTggd8LJI-hg+3*ozi3$TS@w#9B#;hl9$tlp+Q|SY?mEK zKeKDk%#xtGYo||Sw(sWAW4n5!$o&I#@T11dF2!lw3IItmu0pc)D5AZT-5n z9h8aGv*-2RX~f7-hS{YY!Ui*gRq|i*tQw^}$16}zDS%JCl&nO`wRP7egonr3pJc5y zBFXm4HI-Xdb4n3fxuaqG1NN!G;jDmrcoU1=ih*X=NaCH`o6ZdUL1D&gDAJFT;rVV! zn>S*qj{@0xY-p&xM%`(VacOW*RXNQXaJY=ukr$tk-1>AnE;d`>0;dQ#?KGJhTv`@v zuDp_BE3{}qKUF=--VaaiuaLMFtRBoXTyJmdP3)o;qaY4ZmcMHrpxFG@A?!GUDXF*I zFyI3pl8@A0?%Vq5S6kjJ+XbY8s!MQU12~+z3?FT80M*}h^u3$nwzKE-$idnm5^B3u zHd@swkMYW@vxYE(@an97u)s1K1f#sB+^VMn%jrapvb#E)Ptfb^m|=9-@}*t-Lo38) z;ElV?4bw;O38+0ZbqA$=_!bX3t8&!j<*IJH7kT3bwBg< zsqOXQ9M25@>5xO#+MSa`_idh}_Iu0f-$9)Xt|czyb~WsTPBnY6JV<|`CT=ba22$P* zwU}(E1~zDIY8y;p2>0V3{+=N0co@^V{dA<)BXnmPaCGaCr%m1m(M7S2QldD@5 zq)vIzEIQ5hxVE?RXxF$D6_^fY#Pn3#pC`bZ@PwwgT=xu&)oWUX=KvpiFD6q|g2di$ zk*Lpp!M(4h%R9QczH6ln)=I&a->jPBv7GUJg>UJzsCtIiIc-HEPS^Qx*BP_xSDnBm z4G4?=_hV!pixB$q*tfGct6D{`#m5d)U4`OSPjA7_Q(e99b-P!^Px-qjzAoMNubJ#J zn=+66>fUFfliE|1(d7JOTQQ-e-CY4yZbCL`?U9#J@svV_gr3uCpXiAF3e5pcSi|NQ zWrD3D6I3I41-g2YsxtG@6IHgbh6!M?cw`tR&pI8*`l6%dsO80))}aeH)??t2A55xS z`7nEtTxb1JE8TZxuM$)cQ)e2S7`D~~T@h-F+!j7@`8=`j;D%LoKNBqtteNCuWU)-{ z8?WED+Cx8;%TcK3b>0~1w&qpQ8{xmcvH_|+>S|2qoAUg~(XNIn_9m22c#d9K1zfZr zd6Ba&4@QG$uivMsSy8lpCf|u$POe!FY_8KF?s-h-L}kgrdx_StncDl%^^z5}0%gEd z-6(qoB&7@1lP3*4c9vB;e_xJ%K6&zBhF=IUTty1Jr@srprxwcVbo76lSb|aa()`qT z4;nG^*&eKpZ5|FqvN7{kclosoLY!~0lCo?G#z-M?o00yYb3TZM{X-gf$VzYJ)UoxnRd~}dlOQjmEm+J@06{a+Jt@Uo@xXuM1#W*`lKcVkqw__7Rej)*fVLA% zG=86A>-4^H^^cj_j1LtUpN=@PTjOwND=UO>oR^CE{Y=&qZ z3fhpEYPH11mpR!3%0{O3eDjAG9~GvW9iQ24xmJDXa2AxGBRIHvlHIjn3Q?68e`6*N zHC(3AYLed%HW)`XXr8CIk)0Sz!wmxyd121nhvt-<`kew|vG%36vwn01Q2HCG!=^Zcdx_W5{Di)9!_gxTz zPS}yU(|{PLV_Y;_Egkh^kP=!VAQ0uIa40cyZ)iNNVaK)p*u*uw>f7Ua_;#)9k=5zl z{UIMsc9Xsa9bFoEpK2Y3^&f{a&4=W98;@v{u=*#;?R|>f734qS!D9z#QA^W1uHx*S z@vU;CCwbE3z0=xG3sg}D?RR8S+tgb4Czz0chW^|)3 z=8w`9FPh!nA0aJLVCpre&ZPGC-uD9Bt2Sr+xkrUdVgB)3e(vyCnt5{eiBXd(kCpjz z_Nj=@JsaTIc2j8Wk8&R5h+!qUZa1&6iJ;g2sD-nVH+Gm!b2CsgZ#b)^@P@`UY=Zh+ zTN^`J;qGjiT+|>-rdy@n2Eu_O#>>TelHb_)EI2rhzsM_i+I+5Kk-wu2KFLYuifJLj@POEs>#& z?wq74DKAZ6(MPEt^+fxu!wjJZ(Fo@nU6~R#jR7_f?_%Z9mAnk7jfWiWk7UCpZK1|> zQVX3qL}zxy4`56#ICsU$2vZ1MC;c8owlSjX{dosn_FCt9Dcq$jBLAG?I%y48Z;-s6 zxfHTxg0DG+9HIl-#cIQfDuRaS!+PJo*yQ6|QMst+5nB9O*{-@NFzmH(a<|z2(`vO> zB(s?An}*!VsxWt{An-TEE^=4$JK9%8v1qmJWg(OA0Sy)h7y895U$ID?#$IjD-VDbO zGMmJKet9v~OS9)C9?W8MQCV9b*a9}?HrSN61Hh)7{4sM)zB?(oV%qonJh8k#?B!IF zU$vPA`3JBoSH3e3RhvR?IYbLgu2df=CAl6Q_jMb7wQRN#gp4N})6%hByH)~7TJ?%r zpV8ex$R1V)IJ3v>>e*1r1L(+rP;{)FuvQ;Bt_Io&`xUiR2^_dzpTmRO>@I85%OGKO z6aJ_`kdwmV{R2!wi`r-VDAqUSR%E|?)THIG)tPWp*J}48e!_9l8($3JvC%hNbmSrv zc$*!Ya51XujX(=7%Wp&}nYoe=J=lke;Xtq9WV_engGyB;F&_;bkp^_}ErAXDqm}k6 z3^J!4#AgTSZ4StWMhA(NL@uPr{V6?OPy5cCFKcs82I(MNy`wMf2^ovz=||`T%7Yw`*B(`*f7DjQ+(lgJ#T7FQgbb`!sE|&hl2W zfL+~r6(G#NUpwMy``9JJb(PTORyYaUYNvzdgA6v}HPCb|`bvJZUV{7a^5`pjtWSjk ze${N$cUgkZ1F&$bA5yT7CRx$vH1c~ffqKU2S_T7Y(%UZll3nr&ZPPI?)4e+7uu)k= z?$^7{VC`ep@R@2FRW0z|#7+gp6lQv-({y(xjpyECF?0e2Nx?m7JyCj26y7_SU~G3K z0E?k=uu($JMpsVuwR32D)>2TbomfGUh%dTfC3fvhLDf24dEi=MC)p^$eyb#C`DZvd zeq_LcC}`$CS}f-udkg2^u-mWDEPU;?-xc+sey!ny%P_Uhs5@0bs>>E%j(zs%`}XG+ ziY3y)RK5KBa&wR2ah#^C&1X_n8H3_1JlV)d7Elt+_GZP284H)>J-+G{@=yw|_k%&> zI4G5b%m@P{ryeiwL0r(X7{j))?=X`NoWAiWVe+)oWqQ!iks_t_R>A2PJ-^1~mW}w! zjS_XaQP8VdW1==uG$bR!ijHPIY3-VDsG9r(R&fj`p;UKXQqG?hZ4XHsw1pVt@;J(% zT*c9=1w`1C#@0>TOCO~A)YF71FX%*2R@o+*=>w9!MdHS-DYF5C& zbXA$wCchhEf9H&EzLtP9_~S8S(7XPw7=K<-6*yZj|DbhJBTdb6V`N{j81~t{bCFl?C zJ$E3PX*^?vCEVcE0q1V=enQOdS4sW?c!N1*;^_05ql96eJe91lXj1Q?Ms-nYJFiZo{hBVZD`b+1ie$7OodESe& z&}+9w7i+3qVddzYl^pJ%*_*p9>flLl16^72cnWd;RX@hvOoF?d25Nf^C*_=#2q$8zoSz=jHs(2`on)e3q>gt|cuO@$RB$PrrLkisdXAuf4{xMpo+R z96_gAQ^NWwP9e70LD1OR?67KRB#_1s(v`r+rHrj%c`tN0d9)NAVb3a7LcUuwL+DD@ zqCXoNKR9VsEiboX^V-gGQrTLG=zU%3!=Uq!FG5 zt?Q|pCWdACn#Qi>78M4bL4#fXk%U7OxPp|Kt8?U4gGcq( zWCkn>J|KsSHlF!(2dwaXYT<}g_|5WiddJEXgN7^qBCIkpbZ9F=y_p|IpAu{srxzdZ z6dS$bM6D6d`j}47ZI)=zR<&=!e5cPrDPn=rqVSX3#T5xRw$#b~H!e9V>}P~CHY!hB(FeR zuuQJ`g)8=qO~&(3ZB3xfme6X4BATe?)4ul88pNaHH;x)SL1DSW>?u|O(SU`E^v~D= zEpTn)ZtrPCgXe|=JB&%+QSnZ@rxgFTzYhW}?oO{gae(>I>Gb`dUgzd@LmgJ^G(|nN z2##(@I1^hJby>TI(pN156k?Z8i&5=uppMRGc$DiPmdf$1bH*=>4%l%utv&h|p^NK* zE(-)iL!PbX@85+k=R2Satc2bGIU@x)FsAyM!}H?>JeWaHt3D#VAxkAKFNXtvi_8b@ zrLM~lv`WNsT#JLp3gbFge%|wmSRxkS1cYl@Rkm1bs1c5v@{o4AHSfDAE;D@xwU zBPcZR#{Q$cS-buBEZ0!_iFTHkZrs|zhSq-~;i?kR(9X}}8FU0v;yNa0m?P}i0~W-M zvbF-pEGRn?R3sFQ>^6gxTWLgeb{9)ob6+cHZ(v7P&X5Z=&#(M$S()cJpf5CDnmoY7 zd%{|~QN{R(y+yS zFFy)YHfZ}moA_uVt59JBKj3tnRD~rYM{MDo_1^Yc*p}C$4BGCJU7+`7U>v`xKILBV zCKDy>2iAIO`@=Dk!&~|)GRJ58RJz^?92r;)6;0pXZ9k&M6ccTjgp2_2fO;kky`n~x z7sE*Z)glfvx~7|U?E<9H2F6NOpj%(ZCwnb+LWVt)@yQfUi^(D|o<5m$LY8W*Cc+&XX&@!OJ0z64<-r@=dWtQJfi)tFa}PkzXB+vDe64MVBoh^! zvwiVPCl6u=@2|yK*TpO}3c*g=j(?dLTV&I9Zo`LJAFoO2i)xB7INM{9NZE| z<0R$H8{P;Lq1AX_;lK!n6NKeDfif94>sutb*Er9XGg`$M0ztw!>K#dIlv+ga6xvnl z`m;UR2GR>@n0!1sX??)X{!{l@U4VD!K9Rp*l=L=h`FM-KawyI`8vyuLo$A^eTBv+k z><*gsZ6=v=8aNO}n^b~m`A9bd$Ek6R$hM6~p{Ga0rsTkr2Ao|wn1yrA+cgs@EpGV) z-eIVO-EBa{E>96kW(IpM9`#@swl>&Q54yiRdw0+d{Jt1Ayl&$g5L7G^x*~x3PEJ>& ztAny6B+k481jy6WJb?s}B=UhwzxeyN;UzuL@FjtAcvQQcViW^hbXTyTs6t|JjqeUT zbGMFaT-ORMjE~uzH`8{I@L-#9naOr%Vc4Ts0hfW%_5%63iU?09vJ!LgSu(K4QYvK= ze)LOHI7*1tnPYIz*qwa+3=btS7WWM7xR*k3#U6SK27hrk!M=wFB#YR>pC#I8?p4^c z1_u&z(Cy3hl5WX0Otz2j{Ad!86%C&8giqVWw^wGiBRL+|LD2&b3mw*!5R2i8#Z#Z4 zq`Txz-WR93lRx511~$h_5}r@8Rv30hZaaJp1t1fGaiTzxWzkM5(rs~%d1_eSmF(TA z-20`v>1#LG2Aa} zvn7s%t!o`xwguY8ZaU-L8H=$2O1^2Bex}pj{pV7%5h8-wXPyRtL-NVxGLvt<)h=jH zA2J0QR}{K+Szk?m=uI^Gb#jkqX|F_CX3@n###bf28%IH z49(@89+=T6yOxrRj?*y%98)U-OOfo#@5RJk=Lloh-IhDkR1@lRR8Yt?uLb^Hh8hA@kZa;J z3SHa^TiP?FFTyFj@cVmeyHZ0qZ8l0X*b+WP*XPDEJSu;Z8!Y?`;!olA0#FC^qfFmy z91JQ(A!2-^*HlFnBI*2Z*9H1Z1xNR_gkk}A&ZnY)<@UFk_i)hPQjH;G+ zJcvemUA11pFB`m%EK}cq_l^W46i@=l&naL{9`P27aLN?c))7IU4iF_P8&>c0vw;F# zA?wiSppAvsQ87LEhq2hC?2k6`K!=Zn+~D$f=#Ue7Ep&)&h`HZmW*rY7!dUJs1|A30 zZ47 zyZe(}O8K?{xx6N2ZN^-CXSB8O@LYl~>INssuF+t}%z?}IB=d00a#cnJ+@fFZvYXYn z722@kmzJ;t>QN5v@S%=EX6QF*{|2bZ58Z0_vZsE=%tha7sEtz?E$ka@0AsZn`@&y5 zlVHBWeR4NFLr_nc(hp_AHtL+9=KN%1OQ$p<1>N`%PxvyPpreR`YsOTR*-77A46IHggcaE55+%HmpP(rwR8UEdH;mM-*(c2(P0k$yU8 zc9-$WOSC_}xJug+vALz80U8t z=mW)MCz}ex-PP;+&h)(I)Q>6q6Uzfr9Bsc2C++o8ijO9~y0opqtQYVwZ>$uoKUr>7C%|2SR{C1>Q z3`w=gkCQyAswRYc414$Z=dpi=R!TU?RCoN%Lx5lL_W9(G62Y=BrjFZe36d20yhVeV ztfgqlg1c!2V2L3t#k{W!m&yZUcP|bm2adauaVOT=m4kEEnC-Qjo~WAjTSSPi6o=uLaO6Vr;E zLdN!RvRa;gC$V;j0)2XE2KV#;I&&F-%7>fiNUldF^u9IO=A&IXh&9i*E%4PF4R=mN zgl)2AiTY?0cLn$~C@Tsqu_@jOgy*qKi=J$xSc-{^Xc4-!lF+yx6~H}`LN!0;d|r9iv3bH8q2(|jO4Yxg6Gws z)hR6Z^SxkbD0v06dGL1bgxSy}LEIBiIXomD`1` z+6z+49IBVgf)@sVht0QXT6Fa@cg0Azw>id6QIe;;9X-ZROW_IA`lBArZl3XuBN`&= z!xuRd=61b_0$Uefo>fA9^Sk7J?vAIJtAhfuC~|zoRX_YlELL*wf1GgV@tlxJl8Zxl zAN5?@4oi`lZ3+=x9_QpL<~k2rV84U&SD-$6N{C?o#F zW#pZ&>!4lP-x-08=^sKs$2;EZ-HIHG7isg$an?G8wfzz#^7BHCZ5LNX;}T{; z$MW!F#ew6kmsVX^2G+&xG))N3EsP~Lo$oAP5sO~vj--cn@^^>o_B|$+HI%*P-|dIl zT2!rNG}WJ~!`Yr#o|ftIp`7t|e(@(ikw?-ToXvWgj&|v!O1|MTnoZ?0qFwWrD=ec- zUI~#_m+?R}dG8poL1<9Uf&}6IC-l|QQ%p#4rX3rmZci$>-SiBT>XvyQyhQc%ZdP$< zO+9Ha+(F+}h+WBiEMr!LA{D!dEefVMNy>B7j(R}Dd!gHBO%&zo`h+5@=32=$nvI6d zH;^hm!WXDm?9<##KHU6kP< zg{hb)Ambn4%IH{;B{X^{|N3Gkz~W}+BLKB8IIj{i|H~`74?HiSuXCj_4t)2&yyV1C zaJv+mRrjOi8&Ku`HjakO-^qIh>46}8E*3L z^43H7bUAm;3o`#IZ7>UW`GJ@)dWQsJe}DcTqyNvp2vdWbNOmg*{#Dvw7SI)eI2zd; zga0q`rsUlMVl1w z0H4l)UirxOpC3)Fpf|(6{gj50bDnPx_k?OkzpU;{0r~BO@8!3ZYcI}EJ*{h38LExV ze$AY|%p3Q0&{M>0NXKA{D7yAlPntBnJZKK{gC(66;*>9$Vo0OyG(HcI6P6OJQY-T8 zUyDc10j280OE&~)H|UtstN)7PLOhnTxQw=N*6UON^!HfIoG35M{9~mZNX!A$ zsv1vf`N(wnp4P2tHhb(pt&Y{mhD5VM!}3p?LUMCrNcV7!e^?IxSr`9%^FImvPXhms zOQ7K^KDUv_XYSp3Iyw?M2Vrrf3mtKmd9O=sYU=<7-p@DKLXgL?^Fy}> zJwABMJVXW`D6HAGbD9nqh+ulrle9$>yij-dyU4yqezkX~sE+ zys(DYOJ;uG;82Oyby|&X_)~dzXw^|*xvLw$z~yNuX=7#dr-LrwE~MV9u0+;eM`^NC zWdgisI7C8c9jS(7<%P$PQ%=Td3;7{j2-5;;G*jDWAmGA1IEd> zYG`82AKEp#;rKy=^}-jOEfJ2IoYC*pW83J5NZhr7*AxZH>kXS}(%yvr?$|U6;H3P1 z=TgAt0{p~;FBUPmnXB6vD`_qM>jyG_M?J<%KAx`8*>d5C?>Ao{uyT$$p*87(T@}o1 zsXIna$X^%8Js@7wT(iou*yT$M&YBOhGcdYfVbxh$c=8A+QjvwdAQ^+Q zjKq7g*72p=2dGG!YrRaYB`3*qR$8dY>L?H4_Og!Prr^%x9WPnDQquk4yJ8%Ddd31z z5SfSGyWNrpd@_X>3G9+|BrBZ?Hv@T&m_o9H)>eg*3e4N0&SNNAX*Qa6`@i3vl(-pK z0qu&YPz&h^??A`!dW{yKe@AdIT`O%cy*1qzG-Q`K%l#M)|T%vq?bGirx5X_ zX)4AA&s#{drXnqUa0)^?B;MRVGmfMW*wS(7J9zY=4vUqb3&He(Qel&60QQx^hqMC< zRb9I0X;E_}DNK)Gi%LM+5Y>3yf<*O9;rd1VHxK_RKXJrqgc_N0 z=GXWI)}Rmi*DtcuqV)BzITGn_9eVrwkGp0itp*d(pe|$wUYY(fAgW{~n)qfq>VZc# zLr{XUYCFQgM@7(SuCn?q<9$!rkNbQ_aN~$t9XhQ8Zz$dI-dvPY>4yU*y3_YV201xl z`YVqgPb_^#)=Se(65V<`3+?EYnf#0=9lw##)2q|t>d0qNo8OylMy7APg*8FY7E~-1 z9aX|twsmHk;mR8CLE2VJD)Q+O6 zqXT;nj_!8Hzt0Q@>h0x*O$_!Gmq%WX-Diq&f~cK6pS>6eT#z|LTh=Ox4bpcZe^AEAs@a_s9;{fUh_rU1nj7 zR{~LdXm4SW-1B<27S(7rf9o%MFt5kxnUa;OOCO@90+kJR-rF&XwbAO{l zeY}URM)`VuXM~UjRUg*_ah^5zGz(PJRKf2zC)2tx%z1^J)jgS3_>o5?f;riqfrrnQ zcxK0p81?XAUsr0zCD%uMuttMIUwD5n@&7!EAhx_vnfF-qFv9Ixs#SvGnJl0`4$@z4 zdzcJV#Bw2PgDoHG+`_YhBYKF^z6yD%%%vgi48J@UGEC_c4kus_ee)VbVkpl01_i(op#DGtbkHZarDWWa&O% zX%iR;x^7T&-#*Nx-xOkOnFv1@dhAh`-`xP_o{S&fw_*`z3^@WPA+i-JF5}g57LcwX zq@@|Uy97apdT550A%r1^9AKE=JkR^P_r34E|IS+9wPu}t&e?l^Vjudo!64{`j($4Z z*tMw|_02xd8xs0h+HMly_}#zAej-_+kEJ8|rfx5bGXjIP056(SzNZsO;4nXXlIu*u z3t@qp(KwpIR;3KkdC!6YbZkYDGwPZI^ni5vzh(g>+Nu3M+E|k^;+l78kn~BDkkBf) znnN5-)A*;#M?s8-GB9!l{Q1?eYw$c@zbV8GM^A$Tn8zuj(hZ(#+0tkEp0eLNG!sjk zIE$=>f991pS#n?pt(k7>O_5o0&(l&$Us52F=Qc!J;RoLc&qf9;S(eu(Yk21LiSzJ) zY;|Gka)A{i=j1H?Kb^>B|nLcW0wD?}eH zIu>`pUF_v2Yl1^X-5fz@4T|tKYN>#`GeK7c?1KvP-CUQmY=R%KKn`GZb`s)O?0Q3S z4yx?!Lx=yUgZ?>Qu2L(Gpn(t*!s)Vi7r3#EZlm3I!vuH?`+KNjB^0o`Rh(K&@xj9$ zkYqqn@G~52H-s}co7j!$jd3_9#cpH7_ehu^7s%?lesSIp;`?CnuMZ^=te1k& zet&m%T9OCi8nDe1PB<0w#C_l#ur1fszi8Psf2NzOXT6Q3Upla45M3$E zU0I%n(hN${lXLz)n8Bb?U|{5IG7y(*=YoR8byFEOej3E>C%*(^N3AoiHx)7ii~-t) zymX8e)S;9dhJYW`f(j?GefhhS`pr)hx4fSJuTtO^V|U0J-j%Sp za0%6PMnCzBudZPl0!xAOa*U6q0<9^m;FHHUs@+*n~H@yV_qcC^3V zH+zcw{h8Q5nqeuww`BwmTbA~1N`FlIsT~c*>-?z#e?=)50ObMIZ4Cxqeg4-)L&G(y z39?46&NBM*c|N*A*Dd{DA@M^?HPZqPV=Z-^Vn&{>5N_QqX`Df4n zD$To2kE~^1t-AgFoRN=f`pfP&_HqRjZJu}hOtwK>Rl0GjXCC>V&9$r&m(%9|1x3n+ zbtAV=CrZ%)3fQ0i;Ag>K1|YzM(?o&zyND%bKSjh%Vmt9+b1GfYPhFs?_KKfJ7F7U+ zIx_j)>1hsrBir_GXScVeHJUOpRWNgf^BpEPQyj*~5YTxyJGuaR`xtR~soRp<>u!*$ zG*=A(k|uCAFeq&r>a)q{hWq2-UOy4EiD3q@)Pi!>-;!_dUq@ z<5P<{4C7fh22^hRkX4-9ZQZcn`$J+4Wybb>MUs$AF+?TIOY3kqs%*?)pS&k z2QYO1K>nZO9K!f6mzfBJtzf>8Xn*YRg*+alDlG3E+DG$Qoh7(MF(}Ul3Uq*W$aqB? z{k-mN!Q!8(fxdY9s17)gVc7n%t9Kt`WdrzOUA*bHn-axYvp9ZuYN=eykxOj-)qkfa zf7ri&F8rkQH4%j|v^_u86+HKa!PbAmr9#6nx#AvpRj`Dxk8%{vnhLDS1aatG=)TSkH?fo zm{dj=gxIJ9lGj_7(`bcK87(25JeH}q_S|NoChbPG`Tn$yP1Y}@1DmXLygG9ZvTUy29YQS27aF=cesCE=k=Ypn9YbNN;GJZ zJZQQ23X;tBE`*MIJ$KI0_>lDG1Dw~QN&TjSl@<~zV_Y>L3(c`Zc#6DTI$zBHb`vEG7S{BZ`|aVDV4n-S3>Rkd zZ1YonHni<42hTj*pZg0-6WQto4oJFk=fGTj#ipuzfiCkjR6IK5c`E8~nMV{>emb|d zzB^U=0(q}~H-D6xe+Cy(zA&_M;h^aOmzUyvhgKA8KcGzi_ICce(6VUPVdk>q1LA?0ZIy4NF~5cG z)Sw3=y$(*njS1PN5@;>K#;LbK-`?+!COr!g6>MEtNUgBnF_Uo6qVUX#^tudfYT`+> zPZj4y{=E~8^2j~E-BEhXvD1{G+WG;ZU5==x{v2hW3vm^=}DT4wly?`Bbv zKq`*iyPwFbFM>7B^hVoL6RY=!mHXG0zs}h}9e1&u_;3^HV)pg>vDHEnnkqeT)sNui z{9Q41?Zp$umim!Xml5Uq6RU@Xg-kOeLX93O{<4Z9tK8v`^Bo?2UNfJ9SUCMfLco$pts!9d8ec)vsBc0mV;)<*v&#Rp6DZP zMW+Qjo+>gj>9tfTN;PifJp2IOD>V~n}dtqrc8{@1bTYTr@$Zah)VP zb_u@N;1iWC5|SK|0@?>Udh;$(!ly^}5w)+xNxNRi;4kGu$6H4ck_QZTN5r3bBpu!X z`gd76!%IRWha%i6YX#0;VzK#GW=A^PFw-d9V@toX`p=)8^d39nqNK&n26^La$(q~r z^0HBpHyi*rjg{ZM-gxI-uF>cp8zb6;se^h+tlRA|IV4k4J-C)M8KHLZ3_sS2xTq`e z9imf(sh>ft=s0Zo%{pJIhIg_iR}9x2x$)54Z84ybnZg*))4Hb=Q$n8fO6usHs=|J()7r zSsIrW)tC@XsQH&sFE>^u0sSmDJISI+?ZwkjYYuN_m49+5U1>di82iD@?jr53xbL4I zqJx5@_fftBm+5`P2G?Pp^#ghyQm^Mw)O`dc)GLuMCuOLvu)w|+Va*^U{{%IGhU_D| zI1?Y?njKL`C<@+;;4VyNCUoxI-xV1oZ2R3hGh9#EQ7s0SXmj85Y({QfzL6@}1>=ni zo2^a_ab0XuiRZ~{>O|Jxs1!?tE+(cKdnnM?%s>24;bU zKI+I}L@#Lm-rRZVlU;cSSS5Gz@VLMx7&%8PVyMM*vCH;q@cAp?#cv1ivFO+`cG9-9P!gzAJyFeuh2$(|&9$`o~`@&DyJ;9RUxB5yZx{n}gB+T&K-J zB1Mx(`C+HWgxgfWz8j?@KnS3(`sHtObpdq0!SC%n@}QPEbH4xs&a7FBsK{5j z-%(H}_iXHhuK%)no5Vhw1J_g^s$l`>ew5I>|I3B#k8@3Z+2;<`$O|pfR1N&mueGLt zhQT*rubUw&DgX}?iT0@hfGIOr}t4o8R9FK{?i~5Jc*ZJ;GYz<%@ zbBq$>XrNT!zSHLO28ZvL2-2=bY_ zJ>W5+=z;ga`j9fU_*1-ib?HGq;s#kU=3n`UlWlsq>+m;8aR27b&bu2y$I$UcwW;|A z&OOWUpCL9JLFx9F9}Vmplc;ZvlG-*kB(KTm#=l!SRcpG`Oe#j^dWu#|khJ#Pf1M#; zmEsP&tK)8pMtqe>Dz+005c_&IHd>Q#xg{{VAebz~3hHe2G7jE6$tvt9*i<;oOQ0>gvW0*(pby(A`I$Wda@0Z{{9ggN@*QhgdZ$y3#$!EvQ(pkii$z z1FCT*GgGhKt+Vsz9ISgDPww_PoYfPKNRlh!q=>Oqi zwxuoe!mjwKSO0_FTJFl=7?zFxhfJu}_ukU*H=T-~faePw+-$oqDj2g~>NA;z?WE#n zw3}?H-vHZZcgIoWHWK=+GQ#?KS&AnToC_O0;n{Prb-vzY57Za3R{B`7{kYnz>gQk~xt% zPff}`FE3iYz@X2y$ilBJh@P8NR@a|zaoV|~9!nbS^19i151Ms({;wj*n6bMd%ZGY0 zZkYXfPD%Zie~C5FcSHXpbSy6#ePR}>)9m5;>`7d#NcxRO%GS|ZRNH3F3;S;ukLtJT za!m6T@owTnIQV-vakupa(rk4tkP-qoA>=>P`_W#V=55OKK1hOW+JYH-t(bLm73j^k z`hv;s|L+7VfxUpt$3l9jl~3EyIyo=;eFamN>nAbTKjyA;k6Uj!k8UpkyGvbd7~D*f6mJRMRHND`qb8kO8><@=e*Odr2|SH z*62idoAtI1TsK0+`zg$%ILm0-;MvCeTEFPj>f?@|Bs(efxA;b5kix@G%+po73+VpF zFxJOMmuVOu$XVg*?T@`V3IM*c9RTI3hX>S*^S*tZI+6l0a6Pz19%{Hat1j3ZG-)tW zmLG|8(a+|>rE0i!;9?P|`;W|W%oi)s6VIDw|D3kZs`{m9Z|w5c&lc=19v6l+}0@S;2-2G7B&2{s6=hm9}?#`p5<*I|;RNqFX1~Nh4mdtnK zRcdt0w&b+GbrD{rSKjL+dY|Us(TrJC1tGJh2C_3Mfab6SRWLSy4_Lx zHLr2(OqBy@CZVdWEAl>e?9tK?_HTv-G%#Tpb;Sp+;~#b^_*aNhZq=n)zOK1LOBmbK z1nx1d#I#4~HBx8OC~l+ao!AHV!{>obui7S zc4jsZevalPasmzo9f4PhB}z zAWk_WsYn4JS7H&}21Yhc_n(lT&?<|7k(Gblag!oP{rZPHDPfCmY=-@fg|^~L&i1*K{osoZ2P$f_8H%>nct5uwkm9Q-;@jD)-_DsqE zb?9BR*Jn)j8?YAt*?he|qa2f>di_}=Zn>s3+jfFh&Kfs2(qV54dNr6Sw0vgt`j>c~ zZ~1POn%Acg2yc+~#ZQ-WN7wZ6)*94n#qG`62%3wJ9@B4g3;}uJ1tkHY803Za%z`r1 zKmFe2R~kQYM!wU0Wc9zIQqS0(n&h;|wjNah=PlB8GiM_RA<38?pE*4milsL3AW-$y zC1gT+6eEI?iyIbkH>*<%_>OkHlPgNU-?hfch4vO8c%S6JaxU~p3Gc3p-AnV==UX2Nj`kS{Ra{!Tx&b4Sz zWxQm+1?j&`Vz>QDUJ~Jzm~yLYY`8cH81&1wYwC{OGt`lb45IhIn4}9*@bPRa@-+fI zowV}o=Hfg-&0t$sFF6nI-qGQVTN&L#@UlX+0S?db0xaQwa{R6gBAqOnk{cw&(mBk~ z;SX_Z-Sh-#Tx6dyAjQ{8`Bl_WBf`J%d)hz^OL!|NcI|?YFBWGJK}cs6fuf%cFE?=G z>|EnD4~9;CcKX~>T57JLjUo3;_m<@!;VNym;jicS7ye8JA;I-Lf6M)XWQmrcyXxd; zpNi#2lm1p|pyIlFo{xu)V^I#4^a+2HeIGs$h1<*?9?V08eWHu#mf?n~b@-6+cDVgc z?1z(6(c89}4n9cW!3ntoxH63fhG}1XYSW+qx0wrA^i@A9 zHuEx#X*!=Bt)kN zK?H_DU;Vbz19~kuZCST%wtCgTh-&(Mn~3e;@vG!phVm;%OdRFQZZZDwamk?=e^jPe zt2G6OKABY zvbtAST6?G5(~h&ZWuD=U%b#hH13C>y>}D=~dl)Oi6+4L4H9gzUjmtt?7h#$a4!qkb zU~#6F%8+c`=Z*hgc+7p*5r6sdzC_!S9;yR$3WNtL^fFKU&$IQ+(>K6AW2)Z)` zLQ$CL)pooAa!qL;L8HHxli|CsgoW?eK||>fver0^y-$|UaY9@N`=FKM?@lslfcG)w!w(kFI>D+ z9$A!heq<6{SEk?)ADe)o@u?a=MXF`-SMgXj^0DybBW!a$nuG4RzaYicmZr zv+;JYZq_7_Lpl1;**sf9wfMtcx2=4<=S}m0rl3PwO2w8=h{NJZ(Sq{)?xFKebYE+1 zYD)xHflI;1iq!v`ys3@j@8tz`YMTdW`y_owTBMj|GS8}l)er&NtHK*oihd+9g>mv zXGC8<+?>NvT%8}e67|?&8_*qfGr36DCDR7E>)QN@=gJj(d2U+_GgT)_x4z+1# zZY;&kOK0WbXIRoBSlB;*$>I}ic?e8UfW*XezPN()ddpA z(mJ zh;>d0=|H%1+`~>_c(p)J%v!`u_hyVv3-gj*!pMz2+a(P{*HNLpFO$jB%IluNg$P)A zM3feRo|j@$lI2mN?zyPG>tBoOClGJTFvKV)W(dtTPHUBv$HjKoZvb$?)C2-=GL=UT zLyyYB@Dw$vR9N`us4L{Sm74e-+_#MbovJ8!X^jbW%M`p65V9|BrKH&|gUdSO;HNV- z&rs!WI72&x)`+`b=+C6dL4~JX z%AO|;{)3=1lQ_olG3&od6meQ+eT4vccJEI&kJ54;f3v4)IyRK@XM?oEdfv=Tjo%_S zESS$$3ba`81ihH?1=C1{I=37h_jB*Ir#)kt``-b;Gw_LDu!>2n?R1sT9={`WlLKkpvQ|4j?4vodZ&FF+ z72OLsO-GRXnR!?#7^P|~KlRi~!^X<27-ptD#eR82gSqd0D= zfYT@LUOxBM>92?+zU@cy>z~jYZTZ_mTb3SC_1Tltom>O6%Ocj#E6SZet_&#j|5~2b zOg24M)}iWVUV#vFJ}GLA4mL~|vmgS^!-{a>`@u_W?*0jPKE);WYBkWI zI1PuUHR-6-wynDAHOzpTFU0?#Si-esu)T$tb`rd$#2YR4Wm1~fbViwFV)eiB*PYs% zi{d6U=r4sE*VF|E67;$nU8-nkfYN-xPJnSa^z;qo>cI25QCnKlF*&U7fWz$w(x!v2 z6~2wodNQ}1K}wlU`yGu_ps5iqM8==nU#wQ*3)xc)8ezIxJ>{*$FbS-$-`*mCvcFP^zhsqdRQrM3g5 z409B(7TO)tijLoHIc@QA_!e;hPwmX|uF#0h6)vwim+zsZTiI$Kn}W1A`eW|n9zQ7J zT^d2H*6PPJYI@B^A{KbYvc2dyoesNG5>8V_Qcc?uE=2t7{L*WfnsV;>z=aJhRLW$@Z_dPdLT`7t$*m!5#PR+<0;*#n_xZk}s z8jUB=$1@?s$pQHQ$Vks(!5ii|3C)7KWOGwPCtv--EBbL)j@ZN2nn5$QE=rm>fSF&kz~?g0IBwb~_1W7(L2|%Ub|4igbq(pcwAgauV;H>FPa|#SbMCJ~@}6?b zfF=wS-BO4+(rX?w<#J)Zh&YvFSMFw?AFL-kNytv4y&LUUw&qgYSjCadw+yct=DecU z)UkM3#&{}L_q`M-psuz|taZGoIHm!@gH8s#*ZIZO=N&D02EfSljpjmHIg@`i&o-8N zH9UYbAa^5(70x#J-<2>>Rju{L$Xb7V8EtF(`;ikNvHr zCwG70d$oUiU(eCgQNGcN8-ia(q0@;%Dtd4H3$HFHFC!VVmn##${cGnx5EgM#A#6(w!*_h_OB$~yyXVWmhxbbAF92pq-G<>&g(lb`h71u(DYQsSy*C6Wx-r=C$o??aTqUd#0z?@j#OP9>ph zf!Dy7w1%a%pmBAzJ-m}0kV!EK&SftStb}tRK4$!JF^@5G{y;k$i^|(@hy+csnX^i(Bzq}=D7xCN0X)0G8LrVNVD-s z;~lQuS*AwuL9~UBuYFPxILnp-Bu$eYgYJBLDKRy+FIV7X=0>*f#Q&4z6?Nx}k8+G@ zt$D63u1S;ADM={J&?6vBe7f=5)pUvgEG4xPW}njE=CsfgnY3rTb!SU6BB_9i=@q1# zGk$oS#Y^9+ZP+q1Ctqc_8{1a0;$7F$=Oo7L zW{Mv4Qjg%h-`Zi|XiF~LtiIa*XPVF^ZCU8g54mt{PMiCl@)5GIA>h%BqxjT7e5}^| zGr#)%#;p_U?i0LwgwLpdG@9j1OZ1PEvP)^7h1$FiOUont2lBHUUwjz$CMuH=If^?` zqaSu&lZ~udIoAvv-bDC0^ZxSlr^vS`^S>Ptu1^1f zk6x)Mp&7X0pFq6cRv;i7perB!+k7jL5&BvM)%m$4@sR>8=}y&%ruyxHVRf}6!*Z4Z zxrJUA-fk*{Vf?kEx5}K}_r26Tnh}0{@1f)}^>zJyw8g_|6}Ix3%YwM4bhYkILrVp| zSnbkz)MrzU5#kZEZ2phSTjZnk#Sqv!C@d#3JO^Y5hoR84B>rZog?{3_R7mE0maR6( zmf0!bg4U|C5Boufvi&O@!(VvBvQsx+Lr_zV%6`&H@Z=e=&&<)dp6Biz+IVzAJqko~ zbmjLwcg`j|_kREUb5E#C0_aH3C#H}8n{7wAgLv4yBlmvYJh~6QePhEZMqddl#ypMI zi)b;X@WZ8{hRJ6#Jml*NYxS#wA2N^BGz=zlH*Q^8N4SJ-8pl$cBKzGFCqUySzwUag~aR;>vTq53UZb@?W>SL|Oz(#-qUJd3Vb|ssz52IxY z_n=h5=N|h|K76E!?1Jd+8yUL!sn!s(EI}Zxw}RLFQ*9vZNb_%}CbbL!Co^=~A1B~s zPX-W`o`Ih-n`*|wJJjt+J|%As^^R>8z{B5C@>b(uOw?-jz3!#1vE+3T((XM6&iN_U7)a#rs9D@G&jbtz;Axqn^Zt85Ar z;5rOd6+`wQqVWyn;f40CY3_>(@KNiTcV;yKj{k2rNC*~b?PI5$x+)rZdZcm6WaUIM zufWLugplkCUI%+X8(Z&{#vSp@NrP{sbt@r-sJ<6x@~8>{zxpC}k$cI;=5_fONS#(m z&bW{oIY%%u`#}+)2H{sX+|qkpr#L2iE>PjH$hQz?vlHic;+&NRjXwNT2*|*z*}k!l z4LJkPN*i|F;wRoXZkCR3L0sVU#NK7~dk5576VoA-6n6~?&sAQ4m&-{9AcFn;O#MIj z^K6|yjK0*e%s4tzia0$}(2JX!yKwdmsW+Fp!Z<6FO}q?J z1C_WZAf6^68pH3c24$Yk$V(*iBr{q{RSO2QAx=NNncP<3*m$dj&4Fk^$O=pYa^ey0 zgyCreLK)GffG*^(ty>5R>HFEyL3Xb3ll|MlIt1~;!!#;Xv)nx%!KY?0UT$L?EgN7W zZ=c%6HtG5^-A~Qryer3bAV=fAmbb!O|0ZO7uppedF`RjtZ!U?RE!ob3Qu@Z7oWa5* zmS8`Ey9iHjXVIofgH7A`7)WMKk+vIdZ0=HWN=(rYzJZP--;|5aLfHUIseCe?lTAi- zR%%^8;UIq9vezMh&!(-@ZNQPHa>s_WbOznW)nKMn4AG**rZH`$kh>Q=S?|5Ql@n3z zze$C{%RKi04W_>ym`6}@H+5;&7suJ@H2$*RSKO7z;3VkcL2PaO;=1<9)HC_y=;U2d zI~74*s;Ai0CvKBRSV8hrVmB~8Q<)IRpqu1Bf$<9(^Z(X78i`Uc%=UyxR;7y}lF(;2 zG5J=N122`080KHhed%82YsvfB_1>ylSQEPr5x23rtY15t*60etC{wyAI>^Yr+KQe` zu_c^BH?1~o4m<)xU+n9j#xm&$Vn>S^S2 zpTpMF?5@!7-p=WSTAUKiB-y*=7g|rSIGC&vMrWPm^wXdV*0GWuqZriTPo081NI58* zxCS{`fP2Ec+-)*#0+Rorl&nZOqWNdF(%Do;uc=b2hYGCxGGc!($WhMtS5efxRq7u; zmk(g*AQ37~oWE>bD@W8#_}&WvOGe)V&0? zFQPh;%8f_$l5_)($ffdNOD#8EmD7DQ5pf7m#Z+APZ?E1h#&`(^QW}+v6B{(>KM?!)jb^dX4H*v=|4KEG2NEv~(0d(i#1oNupQO1L{=!^;X>k zbb8b&F~t73D^k_>8QjO@%?*YC9 z?$JWdz>ie38I?*G#%ah`jj8LBg;!RHf!Uc4>vpvR#f{NTh*(YIV_?wy(C60H6Maxa z7Qr#}UqPs^qdwbD9^G`OACNYX7)ecI+I*L#Uv=-w`{5AAOaAh*z#!VlKKdwgvZqZA z8?HuJexlZ~GyB3Y#UNx{fjeHuTCil8w8*dBQzh$kess-W#Lni!D%7xlHupHrt- z^rSuij=%>a@gNV*7rEhndaK422+w#QjaBSt;$B<8ttF-o!`03zguUvEn?_uxf&u9L z0B}d=J@@GEu+D~`5+%pPrM{0%l^3VX4U5x%Fb5}xPk9dqlqK+)>}L}a|D{86;SpY) zF<5QZWrM93&r=zIFPyy&9e0WJ1sN|J^4>EndwZsOS~17UP;aApNLQ$(2`Y_#cYEMg zV`%-NqrC}kV)uFd*F}c7q?h%*>bg6}_%#=B=(1Vqj_Z1r(tLs$&{&-*A9mswBK`QH zO)YC3mG7%G)3Bu>E+2N)a^Ya({HM8(&P+asTT<^{n#v@ z6^J`8mo~p{19Pqm^O2{_Fjq~<@07XaB@33sZ7H4DAv4QVgyDfubedvFzZWdSe%E08V=6MAh zx0I}Vwnl@vs{2^=%vAsa4TE#m@hNkP+`b)<;BA?1<4mK4w4ccxUI|%dh)pU&amB62 zc`>cKtQ6W;W(pcZnM;X`KdhE|Lnk49u@WsC)ba^v3YJuyIRF zOG@O+x^|iCoxCC*cF0NA_(W~fJN+Ms5HmVdewFL` zaim7Po^z@IjnTel{8aOuDQDDI>GR3l8SKwHt344)EpvP#I41)WGPJ?3?H5mXHlc+r zD$MCznEoHz_Md?>uY>L`9R?22)Y#ePKQE?)cE;7WNZ$wvT-1ojHBKV4W%+Z#V#{t>yrX-RbQ^yva@spNq5JgsbZF>O!!@j4Ta{mHc(k8f_#K&JMn)vt-mRVc|Z!2==4JH#MZG^YQ@9LHpv)V|<>4s2l2@d5-`P2+SFbE3@oW93}l0 zga&A>F>QJ5w1cLeX!xdVdzo@9@vL$;D?N31LbhYfgAGb=1l(f$GvrQ#Hp@Py*`3+EZU`FrBO36e6y)w_V?x*GYR`Wi89ls>}2Tqi8h{8AjW+ zDIcfcqcwR(c&;JLuFG=UydA`zH5z1p(cQFr;XdIY9&kzUf|xn!Th7l&f0WoDmuTxr zeZF`HGGE&rw?cTD!^E3L9xB0>x2iMPGjYyHyh%Ih&{{d;ULN@;!{$ZKsNHV*+Ic(7 zA!AWCHdR4!0aVp&?j#CbR4(y69u7i|S?vAi6rS$9_bcv-ux`})_)G81_N3iiQs9P#jH2tfUS0MHpJPU!qp<% zcmf6z6yQ|b?!|b1d}&*XRaox=kUwqg=AxiOqYB8~Nkf6E85}Yq7`*DlK%KhRA}C8w z$812P7d_4;-0|TNYQ2H_2!XR^Z?u-)P7B%Ck8Ld`-rM=i+lz!CY`vq_H4M}}&{ zFeTV|w4Le1`f)44ZZW=GEbA+q!`z|F*nPpbHKC?^8A;1@fX!-*xhE03nWM0ud~WL> z(kD7kNN^JvaFv5)wGKS00W4#hw0o?GnvF*I{4aL+BlX>bQ<{FM>A`T9Dg41>pGMd8 zCqW-Y4LN^E(Myi#vX@ueL7)0VG$z&ibl)n20eh#gC3fSkMEJ$7$4+ln(>dFc>)g9e zUBPo@=;EmiPWz$%qUNE|wddHMWN?TnRN{?>XZCT|_y)lNe@mpNvl5q~CXokQeFScRGUC4v?{%Q=+|BaH0=^oWFWq&e)z6-XR|9R`8o&DO!3?c$ z23RumP(}DC-&OM1g<%=zC;*4YQjb+<_V zzR;tt3PnJe6Ay;v!AvhFYIgHJ>kdI@7IZb==T=`yp-A!dE}F{l4__SBTSfC5ZU9A+ zG*jOq+&gPwpa2wy9oHmxZ#{1Ky)c1qk<2)baq1jrU8*iDxAism(-W>KS>dHw;uO=f z^+>u>7{No|uDro>Syh;{acV1vN+fG?a)M;9)d-f)tI*X9VS@zH>x7_*uHQ^pY*H-Wc!%#(RFW$)8%M3JVxYWte}e`ps2pOC=)Q-*+$G zxjIbH>pep7_=eH2{x7Qwk3(FF=i+5xsK)|JiwF*~atn0}vOTb!xYJ7u4T;m%b|L zIVNn>Lmf9mVE(*(Vox1yYs;V!%e-Kvm)4i3d9h6v+DR+lLqJ_8Bs0`ZCmqzJv$S%@ zQr+3%9@!MHrU^GJ>C_~}Pro3Tou8d3hbjTMk&U*7M^4L&1IrGP4A^!F ztglFNfWBAk{woYIxTe3yWYm#eji|wCnBj3__*;D)z3;KGyj@4hi(+7y-E4JP0L%o> z3%;~Z(l=0Qdb$hJx|UpB=zf~C^a2OX!`Bt0xy+x~{mrxNo(c1nQJ?jKGi({FfHRDh zishrJ&smZCo7S$1@h1Wtu}vAto0)5aZz^9*6!;bL$~%%u1Si@+-1=+!Efmj_$UXhq zq#cvqvCk-t?A8??ppGsWN=HW2Z+9MT(`t-3od&2Kd*xsZQ{3a5Wph*I@yg!P_qvjj z6;!{oZ@sRIrdOVY;C_?}F@@8~Pv9HQ$xu`7sKEMC54LE?2<(G?8kY z(cM71xJqZR87@}LZ7PkW`KPV~A8?$e&<0`#5P;LO#&KTV)#OJ`!3*}CjZy^K2&(CR zYEQ?`uwP@P|1}Fh&6_=0bs)rUidtgBXV%!8q(~v8gn-ljbxWfIXcPNf6OB+4|)m%Qoe68tPi@U+{+UmkGWsW zq^{qhFY#c$+BkqPbik@Bs?UvQ;|J(mj>9E{4E=c8ozPGlUs$h#aXe~OGbELULtDnB zzgRCWaPmo}*?<@5^5xN~1>T#}_X~s!gbSCG_v@UAA9{`Gcb20k*-m^L_c-{qGrGBU zvBANkr2>~fHeq=&iLz0bv3UDq=Ry*$Pk`x3-_=h965)lep7OwdU5mMFgs?#ufG30@ z4pvnig3l*!t-a*`3{nFan(T=l)BR0XM6djLF^*HKf5DRL@}Gv0skJ7%zYQZkbeC_0 zw^a@Vrbrd|;^ZCFkOPH33OH$`RpR|g<{o>-H0hY)3$zp*5?CR)Ii2mm6`4mRwWLIY zf72%X$eZw?o9=Uu6v(5$N8)4I)ull zJd)h800p~PfFxq&m+G1|UhSFiyarI;<%T;+EMh4s-D5siN-rMa1oDUqFVzy=<-LdW zNuu>HE=$*4ZcC~4bN8IrdHXVO;R6P&dbF<2EZ!1NnejLL3&8bAVdyS`1qX24&Up|V545j)AGb_) znhzs}Woo+rNysCkg7j`vr_YC~HDGfgvM;ytwB%x+s+GR|CZytvfQ~#Lv_5Y*_^~r< zFs~?@y*t1!;OxEBkD*89gbt8>ivf8BY3*^sUhBxtw|D!sUoMoxYfJ=B3R8oM{%B?P zA6->{MzrUid1eCTHo3yv0OenZ16r!Br#H-Y^_124<$G#Hu=?8`&p2G0%PzhHUHJDk z=4phr%;5|bVi8+^eTsRm8eE}DU~Z#sEMb7QhzW6#(pE;^cZH5)p}1;S*&7ApJ~f}i zw=MhyO>JCv2mWOYheBYyYj2t_eXyVW?L<}y!&@x55nmpK@69&HsxUAM2a*{)-tvRP z)jT#)j1PPjG;~6=w~~CyNf@ZL|1&%7x8zJ{DhZmh@yFM2X12zX{)y-A($O{U3$w3{ zAq&oEnj~er*kn}oZlxC=cA*s9mb;3JbD<4iv40~pckUL-9LI?%$H%ow4t@&?mMo;W zkA0SQIx>e2AhcLvSwGh=Ff}p9cCeKSGsjozqnFfrT~U81NTqe~N|C&62HSUr?pbA5 znLe8HFiUPFsVqduzAS?|o#9m7Io_avU*y2XR_nM_g6QQ1 z^t^-+VEY}k9D(-5n0@=~UmWni)b015-!)+4rcT_jPD2u}=ff8+KdgS(3wTGCVNNIx z%VRhIi-ED2Tm4*Tb*~ie30srKHMb78KDKikcUO6s5Bmg{0c`;k-Wj@ONM0s&Zgwb1 zE#EsP+azuDc#zLOc0WlE(@z~aI-~5_(2b9IZu)O_mI5%KITId1p{GPI6e@hbklT`L zLue87!uF!gs8m(3>c*jvh1&?*m@0Zyf!Kr(NQn7DkVbyM}%k_)e^0Dt3|5)+d zn3-i%{^U(F+if%B?a8klO7W$E-{NUiCqa^eJDicwZ2PRjC2pu=&QTOr;mypm^|Y_a z%J}Tgcc;3QD0@gbBwks>znA_$+wc6IwA|i~$b^h}pZWghy25i={7oV3Q<^|j9gQD8 zA?|Jp!-DiY=hRe1Gxu$LRnX!X=}qBJ%|rj1tpyN zBj^j7$$ZAyViByp_@&X%qXNnbR@5!4m98w+`%?Um=EfPRrVkjxpAQbW!l6xKd>LT= zRUh_uh5qy2up4Re6}F^zUv_;EA>YA1%@lUhQf05bSAs%f2J7h+y+}twPOUl~Ds$bj zo6y4>JKBwH_@XGGwX#a07mv+Eii8VN^4qnJ@d^jV9$Tcgb0JT~Kf*7YrdIK=_UlV8 z%7UMYE>OcW4Q~Q~<;(x{@c%xslrn}&h{MZ}07JO-8?69p>gC~X#8n@a`Is-U0;Pw} zRLj*r?jPqZoHR_V#zhW8>qPkoggAeG!E8V1X|$aAVjpFi-XA|sGYz-u21Rf!YAF;( z58fS&C_Y-d3siyCe+$_Cm0tg^1Z#YD*ho>sPnT#H6vBP>BNB?=4A)q53z`%?mW!Hy z{8W=U6;Ry6NlK6{`7-Tmg`lW9`53BGmmgxUK|*7Dki{(@jDGLW*@k`}0k+ABy`MVm z%TobP*(a2 z@!SWl9wV%Mk!Vk$fdDO$-z#AL%Afyt-!*7e5^BHz<-}qAo@-eM#HOeb59czUrcEz` z|2Y8HZ&}}*RR*^OuEC9d{_BPBI9t=;Xu^A;1^?6SCs+GvWg)S3A;kONIz>d0S!k+x zEerO3`=&3LR{RwDUi_VU74wB%`;k$=}*y{(D8A zxWIPfK(!4#a`s#QQVKc4cO&V~hCX72iJAixQKKpblh6eOtyQZjaD{10E((I_=sK1$ z8p27&jW2y0oO>ZK|7&cu3z6uPHB>})eYj7}EuJnF6+z$6%+liMih1ek}N@tygDf*N#Fc zhUMq+PF>=K4PD`c0Rm_WFm!L4CKhZ)H!^*kvfT=w@SMi`HulT_V*j;jHI-?3-DUWsr!KjYLyx$s}ZK#ZASO` zquzKARv#IAKyePb4CYIvT662JFa9T*a}o2Wzct^OB5O9l#cBgcL<}APJ z4R>nBxm?6CyRYR2AMFp8ePb9Pr4uJ(eYlbnd_WbACruk1n)AJm+xF0GIRBW6K8!!q zXXVW;C3wtw+rD=N&Zsj-Px$tYjiuCns6ANdd+5cm;Kl? zXpf02)9`)V{K7Hxj7M`hx)`ZL%bR>|1ZU>aoXn*)bI z8@RRo<)zHaBexORrPn5L=E66E^Tzfc&JQbwN>kEl?R$mFiq#L(J$m^x2Ky*(_vhUz z`rXf*FlXw~HHO{!N$zpG4m7(tJ*G|-1qEkY)jTcxNK%PY>QASy@Tg(Cj7=G>``E9O zjo%?2&%pJj2%~B0gFAqV$NF1k@lJ}DF1F`pHnoNG?eFDj9{gBOKdh>V-;T&UH}r3~ zMdM7*|K;5$*0(5YCWn~D9b;V)%T`SN(mf~&YFePYb!qyIj8fe2c7tSF#LfO|MFXt zwXkz8fJf>!p5 zz~%L|_C#{AIFvbmm?0SaBh|$SZTN^0<78d`N=-b&Ht^%d(%E@0Om4?j)34kk_)3C~ zbV;v+63bSBPjhNjF}}uEhi_l^k<6DC$1rP;-fGn_u!~=gA7q{S z*+Cn3tT!krmzyPRs)k*s)g#!;!Pb=2i;-zOq&nIjOjC}CQGlXZTHJvBu%N8nNF7?U zRaaG=54?}5gv=|nZERC;s?E!K7FL6RT7*HjW?1_ZCPT%s=1(=;{FKSuS(3f280VOn=+HE*1g4Bp-Ug>vMsA2EmB$e2`bCgl| zski~S@g6c7leaAjoMLV&bR|R&n49lt=!7dm42r@7X~Fw}m_ zV0K(_55M?4IsGPaqZf{K8M@kI)%lTD z#FGQ1YJm^**PC#4LJWrtwhVgIQv_-5d|&QQ`H*RQE3>QfT!LImRcq84NPaCUc-<9L z)mo1Z;Si}zM1V>)?j(0JJ~q(@tk47}ZM++=8DCP?$^MEb>rgM8LMcFfY5im54Hj$p z$&bN`#Y4{Gls804P`u4A9Qfgg;qy-*mk+g?A z;QDk;vtD<7(cXB8NK$2rd|MsgI3G3AT4%wy4KL*J$N7U2_w!K2$*;~}SE1%PACW`3 ztz`;FriZu-T>2j)jT<^WU-sE%j5UoSr919@w~n*RO`@_=$6y|2=6DC8&ku6DuYm0x=@b~rE=hbRiI{MQ=hL^!|tW_;o7pq8zA1?jv- z%C*ivElEUNTf*nTJUy*fzV zy0!YDjjG2nsqf+Ypb#T7quQJ=0~JR4!sf%ZnxucBje!P?FB~h<#Y4gDs4Ws=8?;&^ z?>0n8q_xP^kQ5HN4!MtH{CqOR7mzpf##$P~vhu=U+*mn-@b*{=b5r@#tcA+e&;p!o zS3fC(0F^1}S=!4VmRB>sU{iC6p!GUBQbef`gzYDV*0o>j^>ZX<kthkw4cV?8>~qhykun{&G^HOjV=ypXC^&IbzS(jC4Z0Or zY#)vC1w3}IfEBeDr<=!zbR$w&lXMIc48rLZ_dd<9n)*6X}j)wj?4bt_uehi=O}+ZlY#_eVa*b%L3f zSAt$;0zP96#~Dqdf?i{ZH(ub0=ISXd-(NjSe%MMH(+Mw$QO@upeW@aNeX*wmeOONK zz`h>xScV(CW@{%fcsYK|Z>i&`u|J7rM~R-9xtq3KZYi9MuTh~o%Y(z7lAa~j@k#Ga zBk9uY9dwQ4q=E#=_Y&oX?CEi>9t#0x*0K!>>6H#=UoeUHbF%aS(+*1wXJ8_WR1|7K z8Jum>nSN!ZkyK{Kwje~*=r(1XZQRo~@fEz_i3wMeWyU_=@VBfs=-95w3W*!t7vYhZ+OM6AO^xiQkI zinyPph~x`mz&DGQcWziKqLZw9CxMjiMFsQL^~E_;&g1q8HyqH$7)WhSAf;b|kRafc zG{5ZnouAmgx5RiL)zUs>KmWhH(_u*12GtmV_@P=0*4+h1o#il-0oRBqDWTpFyVR+opR68`Hd#Pr7&IaZrM zy3d7nm72!mdt4gV&|#!ScTB*e3aohijwFBcBk}!=^n}Gvbi0xJe#TJJ>L&tiTk6C2 ztOm*43C0?waW6;qSsfqTKfQ7-Qu$Gqy4I5!hq1M0=(t6~^V8Gq6KC4Z5op9x-ZO0n zMt-i?`_-be^EcKpfl1Qyc?Ce~WH@}&2jHog!TDDaFlF+$KG$qlgZ0Tsi%A~8ae4&W zO1_|~2BNmz%_ey{Kty#*KDEBeWWvbi3pA%1p01JjdS0`4e&sn0u3U-6uCtwH21**> z-liD*^(@%I5o`g`N!<9U#za|f#`m0N-f6R0PYEzrPtHh!mqbm6)Pl0AeI%PM&O)*x z=n({Lej9p>E$tY#?avZ4T$47?yDV1!7~y)cH8EaKXds}M=R=BifafzVQj@z|}r=9MvAKN}ZuJ(S_FnyIuHm86! z1#hI%hbaH{kv&Sw-OhlR^PZB>R3q}}$V`fVn0-O?tXdCX83gRBQRF!FbYd7+h*El% zfqW3xD`Z4)0FXa(zVE_w^BeZt8=4S%2+^jBXl(rnWF4;dj?Y$}`>v;<2oFNa`Rc-% z1{gi%P?~KgC_JkLMsrK5n$uckggDT2z~9Bwc&=gV^Pcra!0WNTrt&E?zs~wx3jCsx z9$Vjg<%Ys@s*TlWj(pg5l+*Vm@YUdY%4wt*i{w3L%SH<|(8I!>b>#JG)VOz>ac#-w zt_R`fo3kD-URP)oZ+~Mm;F-78n`j!kM}2R}j^2q=+0wLCmm+!|dKg#2nuJH=*5GIX zy_Gf9c4Gj|)ha{R=p0~-1;x)h{CzDOLcxmQs3Ua#JUgGv$rSbx_cZcWULM_cdk7G{ zhL<9io;Y7#O8oka+uybACuxvD!X1lfmfUs3sC0eDx_{0vqU7}9yHzRSu#(pe#?jjD z1dmnRNc0P&zb;{MDz*>smsRiC0781ZtHh{@7(x_k4nLfdB&a@fqRKOdS*yfjJ>gwn zMJIqAY+eoEOB3P7lUtfccFw(O5SctI{&4Zc&qPj?ev^*l7TBj+LClKGqc{0Xhyv;W z_2UF&BVYF6Ufi0DxfgtPm~J@BCu-iM_J#Eoma#09Hr-pa&sGa|X^RdVzogN6#_TXa$U5G=HWvVveF103`q{^Jec#{G zkoFto6j1COX>nJ5fiTpQ(b0tEOZ4%fE~kqJ6$SwiFeG|}T8IPk0Kk(M*7`u}W<7#7 z3R5kT3`59!k!}r?FmA3zq{a1=&I(OB!}k*H)8HQ3I-WV&ljrApT+Q zE}P8ly>YK}5kXcbs$KJ5p5wyljFDp)-S@}2voHXg(>7QRVVBA;2~V6Fu_RZqIx+<- ze!fFCnHqkN{G*sKkL!a<&Nd-%QvQFtzP~{7UcK}%$Um;3Zmo0UAC{JMY2BI8pMTO1 zTYbTbCOL!1NPv_=+(brGegF~3sXCiZ2KcSvh#m$G?VkC)=a`>rqh z_GmFfzDh6Sf)yf)I344=@55TD_Y&X6mN)IFf%iR=@$I7xn<;}hnyZ!M^?%yP(Jn~5 zPi)&eA)!6}Zml>$!yBy9@FxAEboNT9=8tF>*LwPFj0xod=4PBpfoxSf$IfG{DXnFt zgFHMuq%S@Mu|WuU3`WL1&u|HE-=20VVVQ04o~fl?uq~i=^=2G-^lpoiT5%-;O6P23 z)6O%jQtx;Md#{BjYlx>=1Npo zN$vOJb)4dE7)zQiSr%SO4;lV(uJr{5t1mu_sioKT+eZpfxC>x&qHRR*n)%5qlQ9|i zMH_$9SR^Q3pt2kq>6OzQp4Th)p$K(EU`cn-AQXape1swYd)WtlI<5&I0|?z z+83-4G8j@{qC zkyv^av&R~=S-kp|Fbjb^OFy*#Xw5RrGTcNr(pV0zIfFG zEN+NzSMp=p;?@fR{HiW2fjl9rW|Xq6eeJGxCmC$`hGt;? zQ<>zUy{4X&!Zt6oyie6&HCh+bG+Kh>Nr>xe@YSQjiAJIK+CvHBUF4WyAyIMAY@*sa zTYr{Lr!d-Tzl>z1BXRDyT`j@49*Ge_xfG-(YixHf&m(&wg9|BIza`@cdfK9a{L?p2iL4sux z9b9dmiv}=T`_3ohQob_)eg{0Iyz_vlZZh?`7j$=U$gR$H4lDM*0vmzmzLVb6Mqf*& zTLD~%Us}akdX&)0-&EnRc}^r(ISv+#lwdW?@%dNX>M1e3_J1dtwTH7ycS_fBa6;!; z)<>F++QP3J%HV(;>hoz*qKo?Nz&XaKLRy^}_{+zOsg`A3TDK^$7H0q@mt3VQaqtvr zygnW3so~4#ZL*uRRx{(%r$uJ7n}ZPyfe#lb5M3#jRs?Ww0D>sPjW_UdtUq)t5$-x8|sO(r#mpHEpfOD=(t=Q9+J+PrUD2v zivZ#DaJ+gSE}9p%4kL=;HM`5C(H#UQzecA&k|w%i&(Yyp5NOSFaOqh?-;FWK?O;ig zeQs5yX_MGjK9(yjV9BI2qg3<=?VD;p@I3>gXL~~x(gf6qq=|doBTPy$m2k6Z@~Sc^ zGx9i~;Ap+Mh$|8C$vjlrnKql^DbpdOk1pef@_+1KQe*@H*8(pAfVG>T2WH;s-oWRA zi8mlEz-#hsDAj5fXCx?d>?UYow?}pC390;UFIf$hSw->E4?UI zVk#Ud-*S>CNM#2`3D7x%-BW{Pj=9MW$+Z1(L8{wlrBUKi`ypUMy=;LjF0KW5l7eZn zDw|5*=Xtn6WPLBl={2kM1j6=_b}@&D##ri0*2R~r0rO9_mWPH&iH$V$wdU$Ui(|Hv z4Od?)Fz`nc0^jLt02hEcl;b-`%rFMt{y^#Y0prq^k}>HHh$ z)Sp!tB>!FxhL#3znCGo}^atTyRvFyg{v|R3! z4iXcTv-=k1`jW%(1L<+D5V;XPYaLxFJs^_YEAo>BmGGX_d^ z-Ox|I8R6ygD3j48Ccf!F4?Q527u>U8*c+^I`tG_BY9~vJeNCe$z6J<-P2_zE)bo%> ziw^w+HpwyRwEKc5h-*p|U6-8GQ4!2b1Ggzjy;g@YC{pHzy7mI%^4UJDal@Lmug#NN z)9RzFk_HF!*SqRR*~JKI64y~jg@Uc+7re!)tN% zByq4@56uVJ2)7#w5+Q!ilymi?@#nNy$8?QfEIvTqfN3Mw{gUUI5^rF@zTwWxN;DCv z_m7^bv+L(yTMJCipiVyngk3iq7yh#=QPWZD!^oz4MIw6s6#;k(NPhF|QoVBwwm<+a zrUUf`rl3xtNgA9ojX!#eujXBz=G*u7H_DQ?s+m^#v#R>Z$E+>45*HEv`oWJA&TZ`v z?=@hiHr151t_32@i7;+pNhynFDneHJ=+m~jZ=6~saSS+o!|!0v7tWG?2gk_C{`Xx< zZQCkH_X!)Tk3M}qY8(N^%6-==SR3VD3e2DCRPc@#+OQwHWP{>R)y6da zhJ)r3GhU}9ssN{u{GDXQUEdS{v12VgbZO^VpY1~noI%dq?evknz8Jw1ip2U-O-hpq z8BX61$& z>q7UAM;|(^{gjlibUgjqj)A#HP=fuj4#pvo76G*ay4W zBN;ebK#F}9N{9I&3lw5hd-G%QS-xhcJmFa0-+q?4Wkwv-G5valgL5g% zGFIGXOA;q+1M5yKYR~dUx>@2NR@`9cP6WUa>=bMhkw407#%snY6gS}BYq=)8oz^bm zRY}*{?1sba0!dwLgN2VhKwhD8LS-|vLZd71@;X%-_llEbS`2R1?071^gMFIUC*WXReyjsI=%jp zIY{oS5IDo3RMXSoVc_BnAq#(7Hoviq2hwO!ueSSK5C6odt;R)6Z{PRhsv!<&7T#zq z_CO0E1fF`J#R|qE@&WEUb23aUT9!nkxLs{Z(JLIs68MbY{7TlBEXdB<=>y+9gC7Od z*P{V5zKy6$V>U|G9e!17N#Y}^;h*r*X^S>oy2N9b#j0y9lA!3FE;VW)_v6OZ#*jb! z#4U*h)8$SQhvP7qZ}lC}rLbT++#z8n72FgCFI_rjtw@?A0b7B1fy3BRuMnjM?w5L| zABhq3X(0<VyDCj1*G0F|hVyCq8b0N6?h z>jGIHlil!zwoaLUZ+B_z9}ssl9^|qAY~3cVIlV*-*$7n!pw1vgtoi z`sXqF$#$%aGpBvz;rD1>WDKJHQr1h?RyS*?PO+(_#Qz}|N1&rag-GTIk;@?^Wuu$X z)1?FzL+B&j-!VnI6MOq`pB!A(so+>nV02}lj3+VFzucisDLX53Ifz5$O|-mqN$G7_ z-@!p%=k0KU`zdD{tjfkcAp<+OLE>JU4>N~LfwX~OalJNfpU}Lfx1!`JT_bp)A}^)qzYThzk3b+1ffZbZ>%bdlAY}9;E-J)B;A8u2 z!-DK;V#< zNjXmXilI7gc^kdv)*4FuK0WW?)m2*R;c))&JOX6oUR*2BxNrEWbT%X;y$b!4r;Zkf z(wKVIoIicF4rJ4cMxE~>+~eC%U-dEfzUObp0F@i*w`Rg*v>v@tgqNnvzbc_#?&FcT zV}k5~qm6;5a32nrFhK2Mz%DrXMGfFgSmPE~tMIr2?j;J5z10T#l;v}@@y54M6`3L3 zbu+-JO9PJ@oP?J&!~ByJ4qE?r=kul<1Ln6>j(4q1I1fP8zMT+&f)|1+QK-3a1rV!L zppMv&=>47J9SE7#dCv+2r?7f4<+&DSON}g9n`aqj{Yw!-H>bhT7j8xH9*%FaZ2SX- zT`OT*7iZDNtjqQIg9z{TD_0ZaMR(!TD7v&cyJyTkb99e-68b;&Iw7+CiDOY#d@cG> z73Ug0=~Yd>MXMC+F5(^Tbw^X%O7gesnLgt`C5eNClVbcv9%tCB>op>gyOL9cNhexF zJ!9xV2Prs3NN?Rf%$K%#!r)gvfg`;gG{0$oZA=DSW` znx`5#P~H?5%=<8&)&bXkq)`{`B0i$+5yW1_83#E&?O5i4=H%+7@hHT#KC08`3w6!< zViJ=f+a29{ih>?vha2X!kXOyL=SJdh_`nlDN0MlevtM=rqoaG{)zgi0?29MA2&;vv z7OtEbj1SXp>=jsj>1kw6vDZI1g(G6FMCDE z|8R5*_~GOev-W&KLOlKASaB|%l0kFcJd_6rzjG`(Qr~|PRpILkDJ>F}VA|5WrEzHN zK(k&g-N)}}h@8oYd9Q#oL{Se*OF&fJZ@|)+P{8xG9RAf657loxUZ7678Z2{xQ& zwlNFu4A>mBbNUYA1(41-a#BBq(+#rVyiVyslC0wW3FTWO3K;E3%N#hHZ^W?n@D~s0 z5RCE`7e58Adm0x$dMZV&R;$hdDYC`iJ>!p@bQZL}$0KwlSx+O~Nq~G$0IqtAE!CIK zhc(7V3awEDLdwj*lV(W+s7Ws{VR;*5NA<%eM!8=N7uktgml#<_%#VK5lxY@ z9tCY!DbCd0l6CFZM6}Ry129q5NDmPSSg+tTXh83%VKvmz1=fCkWk<8| zr%K{xv>$PZwkULTEtGly)~$0Cl76xYjkl__^Y@ZQE}CzMs|rCnWjx-~OXnqG%Ze24VJ}19-6;Oa12EtPFmUKD+W+-Lii|s; zsDc?~!Xe7Vt8wP?`70B7(KAp<6~FF~7?@ixw~(=$ip@D2|h z$?$VpE&_y+Pb!j?=ofP3Oo2lq%t%K`3#Vh_(_N9&(2;1vxzKfwfPQCKd7P2&h~{^bb@!{VmCAkltS zstE0e{K=!4qza3>4F@Lj>agEC&b;tXZ>g@p^4;FFXK{4>^(78Kq-?|sp|2~!cFX2m zd`a6Xw@&KOM@-eduYev#LZK~&6G+e^Yx`r96o6=e+oFL}rf~a;lUM;qyoF(kOIgCG zwn5z0IoAX&lFi?kB#`~s#u(zK4VV*=ccEQq-Y2rz?na@9X4IIQ^LXs@I(mclJo}8~ zwM>k|Ujpd7!hESy!cFd*b2#l&uR%Lu~6+gjop96xph>;kt zb%0P@cF5>ix5Elrg2xJdS@4z4284ZyV#el6!K;5k0+9!Mg?;=og$pJPYpmkiDwm_S zc^G*6A)jc}Hol#wWL%dLGiw21_$DfLC4Kl#zBw8^G_k`}%7~j7ke48y^e>fSFTEE> z$0KHF)K+~3^7t_pHMXh)W})QC`765h2Vni}iLfQCdruQb{{%O!osfm3F4fNsS~- z(~bm9PTtpK2$9S7CFTBDAI+Gmu0%ZFZC*WxFQN1-rxVQnNdu0~1`1*Im?w$;6*&KM zCxqT)!7)S{UkQ-E2wgJ+=SOz3qnHmu|EX16B)n}RJ$r<@wVj7$h)zG@X;5X@zl3@I z!-W-u0h}LJWc-g=3?slxK2m zeHn|CD&iTpDa?+QS3Lf|e0~5N*lh9D8@iv}YU9UG`YT@(Fh&RBnm5X*(^h+y4D*P7 zoM4rwy&&>88rS0J4R57ttg>JTY&5_N#x`&A59EzgPgl7Ia33}U^qczLCK^@d$E~^b zHp!0^u=A<`C7gR+rx%cW8UpPvh{|vX0UL4Q`>MUen{+%=14M`2RU;FH^AbxlC;PXY z>BiS-d;R)a+Ld^Yr&J z4e2^g*tsQ2MaS7oN(+r}dg_P*>f;9NKeDxRc3xKaNx=Yqj$NItvORZJ3~?*uTM>$r z*1R5mTK;KI)@L48iy}3q9_!0;6^q=JR<$j&e5W5&#`D^G1!DhPvy+q%X138BU%=M$ zQg)Xqbrrr^U^`sWQiK-yuZ9k}QBJ%7RX(jLsK}E`R8_$de*rBRnw8`PP zT4{YI2I>Vkqk?ERKi~#umy{G-t&xXYdxX=N$c9_qQHM{0rScZNrxJL(<>YJ$*9gI- z#&1Dx!xR$B%1Reg2?ee8G$E1VN}s~%($YNR^wicRBR@{lovpDgN>3)3@arv<-?+vG z++|(9j5>)HIX8%{A04-2aZGd8Eq;zD`rsU1>>rosHqq^mA#YG~KGs@7@6qOdYxoKQ zFe|)DB_xZpsN`E+pf8J6gU(*Pys+s`3bBnQ546@hZ=~#qksFiE+Ypqz1N(F()jHjU zSnxq^!R5*1GQY#dfCxFm@2F@i9=76Zx)1s;-=Mzzz@|5Q(w0Rw)vFU>)kDW)3>rR} zUM9(kW<}i&u}v*7z9dIS3);~;gz$BJrDJrAQkE*s>0@qulF> zFWs>wwDi<5-Gb&Ys^GL|!Qz-8M;x!Mz!zRXiFLSf z*VAxvZLA{gyhXNQY_8Zhuz!@>AVRZEO}Ig_dVxN(%mhIKp95W)IH^fK=}El_ z{)1`EXlLhu^*(;>C=qzCkh9j|5HE%$9um2CP19KwgHMhplQG|wML>ft6EaWUOgYR( z=98xJ=K5AC(CeNZY{zHhxQIZCp+sb#B-)?*j`ra^)+Tzm0V&(1q2Um&XNnz# z8<0O1&Vo1POX00honR5&y~}52v5pWr`Y_oD`SlxB6e5i?R?cDP$oGmn8 zXNvVK3}w5B(=L8tX$*t}Deun%DUz7i%|DaALyug4Kp4o4-QneIyNEI@Gu}PQy$y01 zljZJhDRs$<+))6}E4=ngtkwW_eE!1#;3f#$65tbo>YrzO2{vu9W{$gGrn&gxT-$n% zh1|c1@3=OPZ>CBv7fUs|escqQnyN}&3$C9jV0kOK4luipji`Qofq>p=k0GFpQ{{S% zD9Ilg{1H zRQ|;mrL1oyPu|Wxfr){sz4RD{Dq`ccP~*aw8f$A;YOH=A-ntQTV+iR@NeXr#E;QzY zWQ#9^<*mCjGG>f_`MipN*85uAYm3})QuKmONw>+-Pj{8SLmEV?QYZt}vZ>mKD+|JB zgcFahZ5)sT^6xI5Dm*Ou0NI+{!8W0d4lA*TgL=pSW-JULK#TNp9fi8v%{pl~0X3e0 z(2Q)C)}6Z;%48xQWYI?U#yEb2yH6nRtryBXoNGUlhZNi=rV=fzP?cg+Q)!DX$52ip zc>VE~f1@58xB#*~8xfbZbgDniQvfWrqQ>3hS308bq{Ch0w0K!y%yn(*TBOkWoHvUR zA1jE%y`6|EDIwC^!VSWvgs|<--u~I22TXUKWkDqUE6GQm0pfcRrjj*#;XJ5|sSF<{ zpvan5=SXY(S+&RHALg+C^>6X;uH=%xK$t()4g)3t)7&zU)ms4+^PF!nOK3VFklIA` zgXONyji{F7fJa|&Z%L9%>I5FmY_&75iEh(QiAanY$Pw{JFMv~Z1&(m1VLOK48sFiR zFDdlLo(e7eBBuO95(WNnR{cr8B0B@z z4f=0N&(4ghjQwAKV-+Q-PN&=aphTq5F5qIk){)cP=28F)| zp(+!nKbfMsJTMcdo^wHvpc(7W-xP@x@VqRoscgvh618IeKN(z_1Nu1mQ|;5afNKZh zTEnX!YUPCEjjw#lNG4swENXl7u?IHSbp0SW87X!-dJM zG+b04@KR6h4vDWA^N%DZ4rVnCY{#o2_f`{c%@QAU6Hxyl*uRJM|9xMu z3FmS}uV42yHXyb^{Q@B~rilmNY_#z#o9O#+q#d|>>|{KqE|^sTiXGr?9WFjHYijHb zb~q_*hVsNs30n33u(P^Ke48i|rq!~uUCvREs`e-S{FiF|f$wq1V1~_tH?Tos&C{gR zM`n7dJRwO)5obS3(p-R<7~*%(u$63X888}@iyWBqjp~UKIkhejmFm!xm6W?Fbe4=~ zm_3pjPw@?}TJ?`j#zK7C2W|D&<@$d}FyMizJs0)tyt!QCPpSWQ6O9hXc3zz_6T>6T zdl3-VR)#%eipmx@EtINm9zGq{e3;ImnfC&GD=yjNr;xA%zijlpzKK=gI|_Lv&u@o= zw%e28=!`UTx;}>%&xn6=tyQ~QDfs(K`;B2e;}ZJxOlJNvCjZi|!Ej-pd_7k(UTJD@ z)g~fnrUbZ_M9hS&)MpbNcPTbtKpn9{?G@=u8!=bA5GL9=ohXn`xA4@Hr@Pnh8f%-L z6dsXWpC75uoU#`Scb+>e0ZlKd$T;MJ*q~e~__x!eNCM2zRsQ3pfB(tA9w1cVY4hX1mMx3IW{!%I5s(m+XYn4VEf z!h8!%WY#Z_A)agp_U#DKMFGYL>EI0EP{px-eBovBGZsOK(~M9$`JMipJ+-bR7s!TA zqLGInt;9h&big?_ptU3859>&!7>?-BYeUUDN5t@t0p~~=CeN~-4ccI3ACP`L&oxH~ zE#?(`Pt7qvgbC;h z?Fd{a@4l>D92hpW2yt%WS7}sG!49^I6oK@@wIQb^lW+!Mt{-doxfz=J6OTzg!XLi_ zTcF(KTW~_?k|~qReHaJBJpQqC{+1aIeV!00tU|6s>e6aaNsYkk zRjX|lO>}R{DeZrb!;SrrEDg%M8q7Ml6kIR?X(c^NsxC+nXNG;kS(2#Evp|75=nz)o zE~oJQD5G%r!C$dOSKA2MY%%}XpZ~7RQs0%aX2m5+M!UK`VrUxJtj14rXo|f{He}YQ zk#v_A4zMOp9fCc}j-3aXn#(NejWdIc-?)-eN4Od;`G)kcZ(PGcOklTJ_B1Jx;r|P3 z@;m#3kwnc7z&KXO0ZJ}Hw}}NVkfAfPs|)vQI<~2yzG1azCpeg2xv_cfTaaE%c{|-c zrU=7nJ2!|2cwXauw`a2e&>D&fd_R6Z%j{#%d;JfE`NwPuZ&7@&Qp4*nFV-l{LIAzt zv|GzrQn_%4)Gk-RRpfrtWWfwJzWOnnYa#U)YWmOFQM5#~p`+~CzjdJK^$>3x_a89) zzU|3xE~CjD`VZh_*zTMIfSSk3pB=1Ktb zYx*Ch+j{mDG5COwOABv3X81mZ{r6Xql_9!BFSlpiIO<;(mGf4CT-;)zDZ(wt#rp@$ z|BHM)Fn1_lcky`Hc({IVUl3T>aMK9*8ni_F-@mY*9H!Z`Ey3cEs8%Z`#cJoYSU(27 zr{W)C|L-g7|Nq(l`x3~J{Vr1(h)wguR;i|FjxCw^{O@pb@M}BoeiA*ij{0r>gJ=7< zILw>7{hANzW!$gh=GZg;rlQ{;5GTv*Y2Mpvn`*ImDl$t@K2HB&_v$JCKkU7ASXJM) zKCU!U0s_*Yl7f_UD=DSY-J9;t4I(Ao4JzHS>F(Zich{!7`7S)4^SS5Vd+z!B_s8#f zd>)p#9@m_6jXB0U-tmsP(oz4#B89lI!*41YGPmXWa$PuxeVX$WH+n0~pm;3OCVxi< zz(;$Zo_c_&D;h88}Z+9_kZIz z3U58&CZMAAcY)$v^q-~VML z|M`@^-SrOu(X01=WBDg6`*+*58}%^(`IZp!pVmo;1-L0lmHA(i#{WG7Zc3ppb6I8x z_J4XDEj4fx3tH#jWBfN46f_81@laF{$G^VVe|;f;t~-F67(p#B|HVlDmqS_m4kQ-b zE&XEu<|%)#*3Wq0rVa++1DgN$z`c{7|J3K8iJwOKPmlZmjp_fh#)H;CXGofRYegUmOz~8o|A(xKfknYxzR8vx3m$;->reQgQdB zDn@tLA(;(j0JsF-%Wc%3%p?T8t8KI28e%D7lKwCL;a~00TLYWc%}N4atjzf+ZDT7v z33sNLH?*Yln__%|>c&gb3HzPLBTWd%Q(yH|4z+{3S2It^~B?OEY-khBlT7reb zTuz{U+iPBD(N_>!=5&w*eOrtm==_q;InQLly{@2=Kw(;n>5SEGbD%t}My-N_<7VH@ zIh=6x-)|aVcRtgj^PyMFOAO(({QMzbNt1@gYfen5NJDqQd6jS!=)r*Ie6gxlHR*6W zukS@Cz5cCglw!J(>ZaA>mTg|ym4;uAjwX|KU*WP6m}KC1oJ%;-zLNVqM-`$JIA{e; zB+mR=AHDY-HGPdFRqtKXt?|i(`rW5>WGW4J8?4bhMRgz5ok>ln$g7Pc8)1<2zu&jO zj-?GCgjASKvcDE%pnbab9k-HA@a}zsh?*+-?b#&x25{VaDRj3&`6f8gZ8K%l5Tqxk z-sLdK4?3UTED7G}6J0hB6(qRBFkOvNCgUt_IFIIPNkTMaD-1@l-?okO&O;~V`AlY= z(U{!qv?#+I#_yQkg02mGW@Sn%Jwo?v2bOqq0EQ+zObCF^cTG0@h-G6`>lb^>&oEy1 zu|)lQzuo&yNVq?qF~~d4C`Bsv){xBel4#cVBPREFvURJ=`ZQ^!44jLc&PG4o#*CR9 zYYNZamsYwY(=N^#WBr5_fhnC%{)s}*;~w>|h2PHq55vZLRDdJuo5q~(djwsb3?hOW z&N!IkG##}~eNANGVuGK5CR1H^1~%HTv@2d)4gZd*S&MWV$tpRU;7y2V7k=DHB=@9m~~Dq>{oPCQq@8nNak@^m}nEJUiPclQ^7%P>@HB1$4#Pe*mrNJ|GGG4gzXXk>7jyp zUAxYaAJX^k#@IH{dL(0TjpWXhVguq#6?21YL!6`ogVbNSO-y2QBvsIQ(^Xj$g-dG~L*=~}R za7!HJbiB#kZdnXN%^KHadyj;;g${Ozb-F#N*0DTcw!z&BxQG ze|PMIn!T*Ehi9!W2*lUu}E{ju7do@vPZ#%(pLwSvrn_+(uNZ?UhU3aaIk^; zc~`0}^iy3naFgN+kJ< z_?cv}3V4s#!=342n4E409sx4s@3+33T*Ev*O}C*IdK?>_?_R}zK`s+y8rLr{{NZ>$ ze}Q5LWpCKczYHQE;J&meAJ%R6v zP3F^g>soEMz4Qr_1c_78CG>;Y3&+vo#d$|~25rZYk6iXg(;mRWeVXPl9%`R+KN)Sk z8~W;!Ky6;B%678kV?|T|3wF?hP^wg2e+RzyJ}BUp?QwbBMUK|qQ*XkN2O|Az!+y?1 zS>invd@wQeeDElm-?D4=3IvVHBqCr7^eL_rg(N_q>#K89in zr|@aZwApMUw_Tf(Ij&oMBl#kgQVkL*n#3-0@ZxTxn>u4;jP-b);siH`<=kiWMd~DT zYRHF+3fa`2C_zrUjeh2Iu?Fh9OO@n2Ttj@=#ssG9&lJZQ-eF`edlsqZBN;tP$M1_y zM+N)82S;8kGTBO0nT)oWS1*fw$qKBuze&rXfJ&$aKa4B8C8hEgN0Jp*Kw?<*T}CpF z9F^X3*w^mlt*#N#E=La8qX^_EsWYZxw4Dy;4QJP#fCkA? z7kVl42K64kJ!I8!H8=ahrEk0LCO6Z-@nel3Xvo?^xiMscc~!bI&GNzLn$>9FVq-)q ztmNWHp*p$2yC%MeIH|skOP}~?2en@pO$Arcig${63Kdx1bJs`eC>N*OC=2x@%u7#i z#29-Q-42#;n%YKkqYNK|1qB`)w?n^Pp#&HBd=Ao3b9eqUkkE9|lUECbLS}I23WTY} zG>5L;a7sBfe}d*@n@)DyQQI&FiRG7UUyj16m5X5*AnRAV!>ghO(~}g}XT(OUmp;m@ zJod&0VT0V+>b;n>^-2%G|1x{LH=AbO#swbP11Hy}o0pIXadd@ME=3o9(dQopW@2}_ z{7?#Ab$G!=DjrIrCvdat0eds&c{870i_eSICn}`qddj5@dd_FKlK%nW*>ZW*#u(NaQWZ@IdA)vp10``#V5?(jo2!8s)cEX~I{`70dUDOBl8=n& zbccP+ytLyyu&9;$M@Z@6Gs>d%^iUr8tQk+L#T% zM*E~-*NQrukmi}F^@?%&si!^$(OBQ<0XU4|;M}N{PZQbdH{73ei~=}Xbm7*lkSjCv z#U|7ZHJjl&h+Q~L@LprqfzP3nCwUsD;3${3nB0CSuxxsw0k~nNIk<&wpn8VHrk$`- zvVd1rDe37pQI2trV!kuv+ez*N{%1e1byUDfv8<|sa)qg4U9iY_o+zvjlkixgcBi+ZWJx=-AO5zE>Ftwe6%rRuSDig)#gp*XUt@y*`gHjIY7 zFxKGBv&oyFlo?!Zou8UdQ?m zv3k|4pB*4WIgOibm)g$D!akWz)=VB3^GwRU4xZ@)*e4RrR9@@zNLZe&Bu;Zm$4#(W z;IWgTHjlY$H=cL$K>WNpWTR}OPwE(<${sR>f?907YQoDw-8(U(fJ+lvR_p>#Ygyl# zVYdnE2ZAPK+L@gsiO^8p^Twj65C`(C#6+H?C;Pr@+ln>mZLvGEwb*-n->y!BsW<3J zUlm1KW=iR6mFPh+KnN(1apV+eJI13YcR$Pv(aYlIKt{viGCEp%TD+!@RQpyG#$8+y zx~Lz<-v#}F2Y$Fy-c2mGzSZNf4A+5YJ$>5u?e?%OgCeS4dh~(q;TEC-qGx&+kkIIY z8kzDB_A_S;p`_DYwSX-REyr0XR1z*h!X=a(SMB<|q0Nz8N@6KB7$6CrOFywPb=<91 z*ZYceUejU=nhICr%*TA!^QWXo%L|vupJ6x|t6AsZgTaAr{*<>UA2@!6i3rM-%x;!g z2rn028fp~(-mEf(D|iwQdF&ZX;~AvyjV39?x)+H%+h=DW`yTm>Y5e`e+jC174fx9s zw|iB)Dd4L6B(47flKm->yWwf2F-gG3w@J~}1{ z4k+T|tAnFvC7yR;E2oTFGi_G>RP+=x+F(xsGJR=!H2i+-uH8`A?(n~IgX732)X#PH zoaX8*8?QJj!vL(c=qav0%~0^PY1Z;4ZfaxjFs#I6lFl>vRMO08LYqs`?sYe{2FZzLTsA#2=3O&q+7nZ*vnUpV+h-NS z&4U*$v^ilN=n@4N&o*av#`B>?b^Asak=8W)#x(QC>%-tZ@mk7-?`_1U&Nn!lD7akA zEWbbchX#6lE$N@)J>awAOzu2(2_a&~ve-`p-^ZRDS7>TG^hAm}Vm+5e7=B>)6#!bi2{4_2Lrhz0SUZLY@AT;haRc>F5Ei&*#0R#x8MyyqpFx-}e@FZo7>ac54wS zyMBEb#2hhIra|`v9fkgmf0zK7$Jiw1Fn9MA2q4<&((=7+X8g|!=!n+QJD`*k2kkRy zf)BU)pDfzhJL51AVD6nU?mOT)fowDXGbzexzK88|D(&9oE@V%0A&&WS6N4&_*#gy9 ztz?jco!|b)?c+Sl+^7L8O&7jcgj@S>?U}fNlZWK zaQtZUW+W-otaE%HMFUoP2EX3}O91H!#Bk)wo)xlw6BE`Xl>ECiO=cKP+#4Q z0!f5pZcVKN@g)SOOFrAvq}J)c{GwFiP1f@SX5DB8f5$wB_cuYyj7@KgcL}e(nyu1$ zk5rB3DWr-u@UN-$=pBY*A2(*1FohKf1bnAhzr>NMW+*jA7&QH~$6&n|To?#@BdbIm z<6d8c)e_uU9VvGgHoIxRXFq&qfZ1&iV`{fm5fGx9X0O*19X>;IbFh6Ks*3i!>RrK1jS;_gQC7CdNlAca>R)Z9o#T(MwS zBGC8=V7hvVt=(Y1T6~PT438$%gYXM+wkj%^w-nJ?LaKX=8G)Md!SW z>n7%P%$av4aHG(9KxQaJMCMDbh>r*%-3RjV`GJg%te)ve^DuHuTXTkBXlB)%t$2RC zOCXF8(p1X5w;C4Kt#n~HU36Q0&rD(d3O$>FWax5Ezw)r|0F$}RzsbNTRnKGXV!^G& zQ=mj6t&Wftl-p{9iOhtM##^7@?{*Jl4pnT{;vudw0zu?`ovpaIY&M0Z7n|#G?a#Gi zPqZ-JqJ8my4cchwsa$X+eq%wMcAZuZAtc&jFX_Oa=6xV^%$*BbRlFf$pQwU>Hlay) z)8upCKTK$YKaTzsUj5Fv0=};ivg~g)Dl5{`tk1Ie{ga>t2UpvjMa}ZZ1e?>`(Ko@m zpJDTc^49WyOCmo|GTUznZSSbThUy>TbcjRg#4RELiNOIWv%2NQ{Hz})DjRj_08cWP zXi^0Je3HPOJr<0)qY=w`Wv}jFnQ8|({xav;^+=(*IDar{D_szBQ~Y$mIsrws`2FGo zX1H=bP47K!g8@So&4R0sU5^9v*_KVC%U(rxfqt%blTGO?bd33rP1Iruk7mubFOb9Y z4qt59q;j3as`#rHYkib5XF%KBt(Z)GBL(kHC0m2Mdk?p7tC3LBcYe7X-SJgzzV=KK zunu;S;<|kUHruu5H(t8E(Sey9MKVt4)BE7sJE0oSyZuTmx{_R05PzD7Bk|2=csBn! zM~@N9g1dvJKB&4Czge7%^(+zVTF-a+^I*0j7VE~=N;shP8F3nYW0zFlL&qMGnsg1*76w^ z{e?>2nKlaZ`N6Y^`~=#AfkT-$)PdD(sG9>RG3~Xqyu^2t%FV5*8hSD;^*qSWQLH)%- z%ehQGyO~m7@MuvfY@mV?bhC0TczgKpCb5&~H|N5XI)}Y_R~80<-d`2J5?xYhy58|$ z=PT4dEtIZD^}{F#GYs?)ok9q+mQVQ>4GfzArsbWG9;v25EH>DFEA4STXG-w>BCcb! z-6*Z0k6ORda;m7z+S;<-W|)Up+w&@-DKVpqcXxk7nsw zt*+9iBDbji_@zxXbUb5-dGyF%iGlF$W>G(FPG2c&K{}_Q&T$7I_l#{{wF94FwPFn2 z`fz?pxzf}VAm7Mmj>&h3Fw4+cG@uL?6bp5SzMBg>4JBn_iYP25TB&F*ZX#2Eg_T4# zXFm$YHIy=NMXoj(O_~LW5Oh|$?)P(R%j}#INgPy0aCmE8Qzn0+jrS+fCl^jm3s+z1 zvXLlAtc8YW%FHHONxeLm#<#5h)ZcnF-`oXINGN0xaGVs4#pT=d{!~{rr;f z`guk4msp(rnoYgBWcXP7L$kS?GX@8$Ow*DY9BK}sok=E7d3seUo<7Gx{#pN`Qq)a` z{RfJ8@04QO^%yLq0Tlm_wsD&0rStFlkkQtA5Sx^pW=!N}5j7prkvCJ_q*{mbE)MpS zY~r`PGwCTvd9`%gehQ}WYR5@*9^=!iaOn2OJMB^S)>-7sCYP%o(3iLfVLM(>Ws=Zz$ zFKB`%t7D5%lHDsIw&`;L%6T1tv6kcS}YsG#*Io`c)md5l9worD$9jjT52Y((fas zFsq7PD=a<26XTw-_*~_r50q~^n|eaoX{ZH07O|vut~!V(6Esz)eQ9Tb7ekb_f=Dj2_e|t;G^M%F#=F)A47shYQQ~L6khW&HVsuDpE6N(j9bc!LGcx+cvk8`-3wnc1Pcd>zEcDCVg3Xe@E0W_$NVs zC#COISy@bajkEtQmkr+ghGxlzCFmY9Ken6^rKaL}T~4z3@Z&96^rRoh=&Q`uAg0FA zA{s>h9M6rV{;seOe7;@gL>{!P?w4>0XN{h}F_W|5vCtQt;jus(Ej;onumL>tg`cbT zqu5;=2%qtGSLPs_NvVCs`pHMBu@&;lk@CXiGyz)+7@Kpf7 zkCs!8ZdxQXiBrlMZ|~=H?r4Q$*QCOPcw}oSA`ENZX)zy(`1O0Mh1SN{27{hgFo|#S zSP;Zg-6s>HlGGu@R=UhSPm1GKE8MzAGQsNE3=>_x5Qa~fg11e9**5B>_!Itp;4ZbR*=T!P2BcnkXDeonG=eyJibSBzJkYfa3$Kfe*@-d z<6RHy?1}2&FU#GxT=A-blg`!xYL({t#f)IEPiS%DHMY&MfbJywhx?fdGjk8?-jz=L z&~ta(V7sB8l4T1cbdDyNr>c`0v+gGrd(x{$X{2UOK^Qx{MHXCq4jYWgWtR-e9*)X( zvlfFiDK^0suvAym=mNvDfaqDRqU9aOSY8gqmVPuY!%_p30VVxn zZzN-N@ZzWh{8u4k3CDVJ4J#>TXNFD78NvSU^J%^F?~kWkK+Vos7Lz(!B#>8LH6{7W z`BiMTo7o#Xp-uIszFNVZU>V2zVJvAc?`kYp`&URK^Dxvd8xf=BR!0CEPoEi-W|>EK zCUn2ZwwQU8#t1dy(B3-^2xA@9BH&+kxn^1CElVsRt!QS{{7%U6ZO zrqJ!A>BbkNfS{EXp<4C$-zEbN+le5^oYNV)BWE@?cERj!hxUg)SV*(VH|kZEaRrh| zX_5~|3a1g`3o$8950&~|-*%l5#hQ}u>Vm}_Xrqn<51V0Su~S8{s)cF}I6>p$71=K0 zyQkUa&x)>4aesiyYtQtM z7{>6pGJVX)k@On?g8%$E1p3RmsM7&xccgzI*U-}J(Hk=OL1EVTMOxITY+XCOqk6|K zs>aPy<*=B8R5IEg+lNyo)%hZi&+e_?*@b&dviVPk#?R*-tD8L8qeTITe-#)guu!)e zFlzmf^P+!9SBF+}}xip)K&C5x~ zIb~Z~d{LEL8^sCbFe$6pLnE2I)C^dBBK*zGLsRxiA5l*ABpj$rZ$vh{8a7jVRmAT( zdwqd#V?p?+M)dCeK#?7)Ds;yCy%+vHK4I6}y@Y2Ziq$QW98wdDn2t@P9J^F?T7!2N z+NP;)`zPHgZ|~OGwgnsAo#t$?z%eGI*}R_hYFIMjLm(jE^&3E!sgT{hRS)1a-_~&I zMY?6ha}Qnq>5reNqX`4zb%$deKn8!Xl0dW2*k$@l8$|1UwCX=gyRc!pP(PLX`wEY0 ztirf-Op)>v^<7uPOX?R(81&|cMB+d8)2qt*l5}x%XooS3?k;+2=}3BO*&y?^kHtuS z^bjM~`=L)%@HeW`39Q43my^F3zu$|8Fpkz|$pn!=a*SHPM`T&d{tkh=I}0T|sdTu0 zJ+C=fDeSXy7HkAfjGXf7XA}f-z&b1cq%520u6Bm{mOwiNY!ZmzE%USvn<-aH825&G zf@FOtW*p_?;M-q?k7lClo#o)XY$2pb|bCa zOX@JwjiUC5tsF=b%>HWIHz};I)CE)kb>pBt6gT_&zqC$KCxU>h-3DbA~#P zWXR!SU9iAoSo!M6Lk;_4yKTQB(#0aY@eeF=#6(m7NUSNQ^6* zpJ~>mT55TmNveX(8~L>wTq{zZJQa7S>>wpy*$x#Py?5F=BJ`wxLYf=w4epx{5@YdCMuub-jI5ML_uDBko| zKv{FA^HJbY9=3S6S$taZs;e;md?y=9TU%h5dZPuS{@P@?X-Ip8+S{t-4YKURcXW2zW|`(n|d zYGFojFEjnOU#a~z&NKq2yCglApJvNOy2@%J1A`2?73u6;K znVsLQ(eQlnA*X;n4SHlW0}e(bo@}*wZ$8zmvie;g=Y|A0R1K?^Fqlr7^>%I-v*|ZIJ?xiCj?l&jpwhg#i2^!u2Kj#*6CMmpV z$GqRH4EcT`@ueAyt-ejYNb{Qk_k-rOCb(|=tB@eOKnK&KrjJH`X5vE|UQe;MbOWF3 zf6H5XIu2lC@&R;m;2X>tv>LNT=r5Uv%Z-P70qXi_w!yH^WBq9d4F?ACd!L{>UA5Nu zKf+TK%J3*c$dmy}vGCBg$~6NZ)ylqW-5%E0@m=?kL9)L8r7~La8KHRFxM>s>HevCZ z_G96TyZXac-hR&;QbQ|zNJEc$=96yWf3#1n;d$?jo4qtX5|cIClNRH9>3Ov-nWdNy zf1KB3IG&JS?AgxaLd0PXO7hZ2z!c{)TrQu}@mYd~^yUfXP9#Y_l=3AG7?TRPhcgHs z$`ouInO^@Cj*`AP&z_o`X+bv|{{RT0)NS${qm-w+{Pwqp0O++5tDbt6?4n|Jj>RjF zeb#u`O0be+s$UG{N&(VAQCLS&#Q;CoF~A2{Jr>LBe+#OS49a~XpTOAt(0D3XV#h7f zPE>p`f04)a5xLtc1>X^uyT8`Yhg@2{N3VM{JaKiN&Wlb;75g!S$K0*mR}nYq@R{yTn_U^ILTmume@9QW*KFTKleMfkoZC zW1UGvJ;sr$wP>8H_2|n#F_;!k^x$4ZrQ336mCCIc1^2(T4n+%kH>g(?9h+P^`S7E> zX0}t;Bg+cdJ+ap2K~-Di(9X-w4{<0jA_?rac31(PEx5*Xad{ESm7LMwgp(C>lp`Q4l4iNEN3|EkP|jV z#%}uM6S^j5Fc1*|v|-_Fd`9#pSt_{gL@p5|sK{sgup zn{*yiQ0$hyJ+jU``x$^qQnv$TX3^}xj1^Sil3CPsogTj%FmqC-whio(H%W~22q$mtt%b zmI6gJ!-_9r&cJoqnp7NTb*Ce4Bku;kO;Zj16vhP#L^h6X9rI3BM&~p60FkU?0ZcgN z2OtiQ*|E1P`Fm-A3t#CcX1yF%jSVdz?H4%KsP9e3=J>9+m`|qkITL}R`4BXZ`RI|& zpAm>Y*}4`x6H9#z2gr_F}K?6F0HsQ2{k^~K`)V~r{v zkcnkzqe}hV^(%s-(&(P+QOG(HknoS8aZ?@wLZ75bEw^3(PB75y8P-L7H~?3=tmQS% zRKOCkU*ks`@Bf!hdaMPITO}ZqRnOht9F*aJfO^vmI@#PT3HY|=QIC0Y;eyJ;2lzDF zw{VEd@rB*6?gF)n5e9Bg1(*c$YG_j(! z|G$p&`0R~(y5rEWz4$=J+gez)Fuhya)$&Jc$8FchX+8B$?KH_lrv%U?Cjv=uIb77z z5MU|CzCyyzAL(~^SOFfKeZJ0#W3>w_PMM}NVIZYAil#GZv#+AXQ{fd{nkP@I07Y{+ zKnA~cvx|f-U+jB8T-IZjkg6Gitf?tlg3GcVrAOL*`z$$&{ySm+|ZlgDBaDa%115j&!FEq$bIcEi03Le02 zlTWEdheeVoB*Wo_BhX(7+ddr(O8Dnl0J8KbIZMR`RGaAH1J)>!Q6vek(+%DPt}(o# zkbEu@35SFE9IuDM8r-DjnlPU-UGG-srnuX0^orLaKN6Hra(cVF_w`mgMPCVj`>r>ZQw*OPedhSU4>t&HEkInSwkE^( zaY|G|+Zj$rUfG{lx14l6T$1I68s~XsTXp=XFnHpXWjVP%_|5(93fJO*6n4Is^xFY; zywabLZr}7yKI;dq>M{Sx(^!TC+RhM|*bIn}t?}KD7j!gBE|wO*tg$Ngqnaw^Gf>#b zKfwacqb&z+&8cJ(?e*FNGJmUA$`ze6!MJVJ|XrqEO63j@>EUc zcMArgHynP@v#oCC4P*23wujI$tE;8|Otb6W;!{a#MbIhz$qs<*?LX2#mNk9o=3J#6|9i zrn^JnbB_UXYVks(+sNW3FG0~-jPFQ<64O8#Ez{%>1^pK~dBSSf1;y7&n^mpf`dtzD zsRZ=ovA~NSKkrI|i6NLDuU&sH)S3mL)W_lc$v2YSl?1a@#n1a=K5qOSewghq6-h+Lj0L-z-s+a&cMpaW#tOyF~Y z&?@%6kFT-cHYjyHlTNy&AW}@3;C=nN3Gatjhrr&E(?KK6`R+7@AC}d}jMPd7q|aTU znuf1!(izE#_a-ZcQ_i+Wk$*I}arWk3-t@E>9jzQG0PPak@0V153K0(Hd|^4-OpBxQ zxW6scJ6cKq3P+pLu`JZfr16rj;ls^{G8HROm}09rB)pVrav`h}63=So$Uvd0veg6cWB-4{Yy4d^lm@e%E}6 z_g5^j2pZ`vNvO*~1COmjk;BjTA+s$g2yfsv2kz*zWQc_fJ_&Jz6XTVkBnyH+6=>d2 zogK7c5q~Y$vM|b)BYz`keE(W*W}*<8D6(vZwSJ!mPK`;Gkolgb$W&c=kzBogJgpDuQA?y zpx+qFO%utMrDl}1dJ@5+?-;?PO=qVV$MhhZq0#z1u)A%%{v#yvl_)(lc75ju-WNEw z=e_}ya&pdYttU8IHPi`3JI7iN?#zp2zTJI2^dtAgxc=nv@yex_Xkq)GmK6z^jf}s z1j%VAlJyz(Wqm$ma9&>_OyPC<@^nyRtA(8Z8ea0L@rinIu?mZ3Fd3<=R>R>Cqh>9u zBswW?rQGgflcH<+K$a^-sUOAk{rjz%w>|{i#DCmPqnKBqs`l#wd9cEm(9y->BrU7` z=77Gvtu+qt@3aU@nm5x%A>FUBP2v;%o`y(9x#ly}8z8v*HTOjM_dDb-$!{Rk{61f8 zTbn2ZZ#5Rp55g9Nn*+SYaVlr{&O>K1`&76M&()tva5NP_YS zVUBK_8ZKSX6n+%7#fzC~@xzS0@%+kYB(7JIHB9Z|XKy8N$3;R9JD^&HhhyH`EUlEFsC%z^}L>oQ*JmmjTrCCLgnJ0Tu%GnKb5Hf zatP~qyv9aUd~-P6A74HTTUBd1S6)0r zE3yzDM*wG-;C0(6AszxMR)YODE1}`Ij8o=a9-OaW78N04`WIJGriN&>(D>M2zZZ6z zl=>uPM1Q<^n@KrsxO}=y+eUGjs8_gc1o~a@i z8{MHWd5*r38$08RmW`P8x#f6`4#b8b6ZCYyl#7YkIoPg)?nKSYw~Y#xTm+wns}p6kG^PTbn}8ph0|;2 z8ayN4NKy;VXi*#(yk=EOvb1;4Sgo1OkhEga@1!_xMW#lf?d<$xn;JqoE#FFyryU|{ zc|32`h*warlU|kp?M0l}R`S{BG?HFI?MQ=&&!a?xV%a#cW|r@+MFS5SZ~TzXfawT} zWO9EDD2V^2D*MwaP$75ZYq90RZB%_)zOt3%EWIXgNw5`eH)S)k86c}3E-$+G{%J9m z%x3{Kwlh$Se|PdQ40pahX!2hfS7wWFhvT!Th-KFOP^VSIC9oosAr&Wjy30NM zEn1X+sBgZ;t|EE3 z!0C^m$m_AxqUfI1I`FC!aLzeq-{rCTf|pwl0utDo zRgM3sQ`5=g>poP4_zj_9PH3_}uf5zWQoo3i8CKTnp%w2llENsxusC-~e6nIZSIusl zr$m*gw{GHb&v1|%OT-5o=SR5+BIB3l(R1}sskO&R(cV?BKBNg^*dg;GFq7MFDa}O$ z4O0qQ8pDH@78*cM63Kqq-}pF2XJVl))cP+}V_P(0Di!fD z1`fT*aJp~9f^AptXm5`hEdUAT*a)uU3giykY&M{HEsEBm*X=&u{m#~ke z-u@i+scv9ynsh(Ch)WM(kH12nd=qW?6rCf2`PB6eksEEqvM)AsBm$JC73(Ddu#(A$ zbooSxA8-bd5$Jqgf1o8=s5x1GE37$+lU}ICAE(#~!S=f#FnmPh7r5rDS0)C4`kGn~ z|AxgYH$b(UdUwIX^+CvGuQX(5Bx^MZ-xiJ8nY(ROb+9+CSQdfq%@daJCkK|=#Bi}^ z+w%Sk7>7mU^)6s-@<^D`ALRZVaAWx;jh=Ur=)qrAleuiX_&k3Q6iXy>IUDPZ6(|?0 zeML6T--lNkUmh6IBqilDF6WU5BNu$dBwYTKi9uI4Ygp1EX$jS2u+owpd&IC;A(s4r zX}ZsJVbk&kidS8;5_I3C$Vgk?9{h4RR&3vF(o)VFiy4tI)Gj`Apt<(Lz*)eH_B2NT)DPOWbhwcet2=eKMjzFbIV7AaF? z8k~rCvBknPU}T1Qy4n$wBT;C_IAJ4EmMc|R(h%O}^HNdnyhkF8xmqE6hGmLS{LPcB zUO_mgHHg&JJnmgejMj&~;zIQ;gcZ(Ix>4d|Cgd~J!Fg@ZX*sfTWUT%0b}$AJ z!#AhA)DW`y2sn@~dA-GQLdX-s!}g9+8^LO)gHWw6%OS9AkDFf{4yy~>n7Pje^$_Og zD$GJ|g)}}r672D~I1_gCStL>Av>Ug`-M*;MV!iG&6kgD(v!D15uQ|gy-G}O7HZ+m|-mhFtsNGvXa(u8UsLn$~T)~wO?F`eV5n^58@!BLj z><*`_JB^>G%*KD9bEq#v2}akywx1}d**bOph=zXsIUxQ=ZX-NWKee8s+y2pPa6nG6 zOqq(f*X!rJ;cfAJE>XTp1X&_Z2U;lz*EF)bj0+xBS))Ff_{gX&7*@s+lw3>IHkv(k z%XJ46-EQI*#2MCDh;&H#Y85q5&zPlselCuRW(wpdBg1&`swN`uYQJI~|4s$Ki=^Ci z4ICL)08T-LA4c797aX7M5?vnY=c5k(7ue+o3k_7y$Kcg+^U@CM(>NHv8GK?6m#;L( zvtSd2P57)qs(!1=O6;}!qr74TpN$*SqeYgPj)P3ATmnrzfK(&PVv7%pwD$8tIs@`6 zN|xVyP4py5lVPSl3pj(i&mq+pWPH{0G-MC1UF+`wOs<2VhxU|_&QDm}&5uy3mqL3L z6E(ZHs}tv-fEit@j9>f-8=>yYxe87S)a@tXzp}h17om7L#}d~53?1el4C8zJ1;w*s z^sJ*NQ|>5=fJ{u%Tlh<(#VjSq;-~$wKEpqGy*2*RCL9Qb;i4-H{59LSqQi#*eE;>$ zXG}R^Y3O&*Vt%ojD;4)FCCq(cXt=a!bl!sMP3W=ck7&ERL?P2Ac?`#Jx*}6y?`TH( ztY;BrUJ@?q53egUZ!Xd(dtx#+-tQuRq4*vcf2IJu))tBwo#o}%_S)|wsXZdjVR!J$ zRY1}-GhQ_R8SN{6AVnJBHeNk!4epFX5W49(4k!+r9@w**(T0v&-@Eeb;ipbAJ97xd z^-ayyA$MirzYKVIrzya?A32b!m_HD%oHOvQDsm)ChDoiQId#C#?zfZH98bK@2MWU^ zb{Gwh{Yy{;L^j3x6(|BFaw;*tP_wR3)U)C#HcWE9NBQD`-EPmuDv;jRn#_b zPe)~;_KcH#6#FSDy2lZS7>^LPb-NnxeDU4aH$lR(U^PW3;oSABNocIIvZ7MCv5PkV zf@6VvJhZI3fjnb30DF&Cf{5PxM6oXx+j6oCk?@gzu}1dZlrwVrRHw30FA}W2JFxGBm+7kt|{)|nMkL|@3RcSna4OD#VKP7|Wm_#I0Sc>g#~wl zYj6t|g1fr}cM0wmoZ#*n+yjB&zHoxO6Z{t0@7doz_niCvq9u(6MXgzLj?w$mM*$~p zAcS;He|X{)u6LeawL zz>v)+6`!{EbHL3f!OpG#8`RUIebv&{wr%AS<}_2wVYemqtNcx3x=_Pq2A@=<@F2U- zJxJRAY(+*E`cHXAZN|3JBmE7i%!Wk-+(!gvC(l3eE5&+#-!N8+{=`=%90xuvsT@0s z9nn$(cm9@uD4Gqe>mP3*yAzm&QdJEBMdu(GjuR}M z5`q^Ry&e>YE>h!h>AM3|wvHnlXx0zxaJ1`Vf*SnCY+Mc9ATk4%oIHXso;s*;FG`?t zsv#Elkf*9t&d%U|!Awm+9My5BEcco`UV6dDsl~UU15~g+Z%=-shxMojB)8}0)u&siP5xNoq=Z|Zf@oW^&pInw!H^Cj| zaSG%#m}eCn1B&Pc!BtF{$bfh;zjQc}rVi)hk>9=4=2l&J`(bO=x7wod1_N3Y7NqK8 z^k$ziKgXIjmLSRDzMD6K$!Jb|xTz@3*McLksL_%Gp`4k>Y{lS)P}*EN!ZOQNrz#5R zwfoEj&Y}Ja!7q(^daE-~DttUA2YUp5Y%4TZXG5vaN6ji^a|a9<4I#B)QlX~w8c=ojHu|LJ;1POGWD*LgNA)O~dBb>V$oJ*xODczXD)Ls{uZhTkl8TE!#pDB=Hqu4U zZRn?a$a6HKn>i+9Bdu!v%zvHYH5C3D5Z|l633zDudrQR0SufQZC-Z#Xp#_xBwE?5% z8y_qe5U67AOb-+1ol=I$_o|{2v7+6XQp%7XITQxgh4bx{eCcd>NZ!iuE(41$(dPC9 zWD;K(VmJb!&pW$R92N>xug&nUEdIclP*8(bDH%_?$PeVTMxOb(`hCy}x3dL9U(==r z4*g8tqQ;sxhqm{H417SE+cRfeodsnK8t`(Ln-y?!6R7)h`KQx3c=loP5-ag9GEig5 zo;)nC;+HK9R7mP<%=aQWbN#5*WYr=8)s@*v$8C+6i%n?skiDWYEnqiUt^Rf9iWdT$;Q8UWEtK)w=#2U(p z0vVPI%y!4$5mBF&^x+&xSXc{*SIhAWdLdR**|JJ9?kMfAe$6 zP-$Y1)CbU}We#vy(Fl86po50!HK?!NSHd$vmp;Xuoe%g*n>1Qo6Tp6jtwk+H>TerL zpf2_v6E3{ub25Raz<2Z>nkYS0wxJxNDVP-THJ+QR&=iyS{XdyS-roebVOVan0@0vFx( z(Z9+EDHNpGFM1LXCnn|RRdz)e=k{6DnS23Kw)bxm6eOmb0D<4CtHagP_**MnMX|g` z{)s_lACZ>9L}1`f0R`x)hIK07z1~n1vCw0dH-_B%_=W4?&*`Ta{!+bwn!6`~tyd2+ z+t|NY@6qM^Gf2jLU4n6N8!FoX2oG~meIs^joh~{DM1g+PayQ;(*>b^RtQ>(_5Q9`Q zdP!tTZ3Slb7#VuK{IdH)TBjdL6#%~o&*!K>8pAAWz=6N1nyfG6C&`~Zb?CxWwSUTN z9TABBtsK$M;YkBNK7Y=*upbdlGvD&^CT8M>HRg)pVb+>P%N@=^OMDahmN0J`iE9cA zcY3)!f4|W%7f5F5q?oSUy^|W*%p-^3##zR)AmvdT>Q-y{mf>i)|k5E_k3Bh zTtbR_(_m_lm!SbpfaFM0sm(MM`d(Oof6y3MS4j=`Q9P4dBDnlbbpIf<@}AvBblipn0T&uI)07 zEWCNH(zTwkJHH(F+8fWzsSjKWxbcz9fF)gkMf!>(75gmmlZ{;u)nRf>e0pcsQUTGQFUq@V z9PrvS8O`SxFn!MYIt)E_AmdLJsmw(mqT^KWe9x`j&e&i+OUa?IxLdV?pjtQDEA_kl z5Bj8q1V{wf^1W)Npu|>Wt49-mf7imzCJ09A`aMMkl>w7?cR^}3qwn5D%-tO_Z4}>TxV$t=~`B-*Er(g}A z=KOR|eIRslcg6__yRlbX?>eBheyfyD(HO0Fw~P-7kc*u40({T=Q)1GFF}Tg6Nn?t5 z=lvvKJjU{4xTH+)+L#HRp*a(+nSsCKPP}4>O^kG1Q5&`7A_nMGNN9)Z`aR(yRsX8>= z8v~I;Z?BT*w8ftv3mMz>ujxzmI)afOpX#Yrgo`fWRPa4d;}*Lc!*1~`mMD~*a)^A9 zw1Q@!|0Ob40aYw15Q244=p>fUb5$>&Tc%S~G3VbejfuJ80r9XM+>8s)Z`1x}o~$wC zKDG-z1Y6>-`E|`nM(I^U**EC>IOqY6n4m-BH1ElB3w3eH$K)?dI30c31s-6($l(eu z4Wwa2^wFYXEd!u!z``f!blT1&9$gV*87cFW$0B6S$<>sSFSJgX2uNr9eQB`R$ZGN6r&;NOejiTQ`tKuv2xW!hw&xkO5IibtcNcavN#s)(HO&pz7o;=9s(UHSE zBPU~gRgkBtvHQ#^60JgsRex11`Yhl!BK>p$sS1=pKQkH+li<2HwaghR!oSpZ!wda6ij*A`1x85)X~GJOzgoya&xZWthJG29#QdjL;W zdVJ!jRc{Ixb{lSsx=3M3HU^6J;r`HrDeSCDw+sDGy?I<(SQMv6b?^TE<^Ts((Akt+ zX{m4iVxewp6waT|^IY~fRrzmLPuXZ4E_c3du^}Tb{;TbS4fs=Ka>L0&`e!iq#Ucg@ zMaAOf*)~rm5%X#TVX9uWc#;BPv^Tsx~nl2 zDh4ILv-_Hol+qDUQK4eBBFi9T0s!XY+M-a0Uhmo*hV5{KcJ_eXfK*=S( z6@2$b%YZ?BEUpW0`pU&3AQ#Q&VGh`XwG7nCxf1i|-lFveKoceEVX+65oG#QND%|!A zeUBI2(tq<{4{p%7I?|MbgW-4zX)t}h+f}SJAD84~0XtWeuN7;R?cXlIC#Vz}hp#m> z0wX;L?=rleLjy2OC&p(^_ zhT`o2gQpHYeqQN-7ekVlm#WFS6V_88Cr(*>!53updDoejC5(?R>SUv@=zaNkYNw@-!EoJ!mY z4pqn18pn;;u_;^5ZCsFV z%xNk{0-xXOadN4g%BMcwiwZ%rxPV|CNslcxOWDVm&vWd+4cMk@ zEthWMJ(coU$^3H@a0o z0>yH(edHCX`&#D5R%g@PT_1z7^=%x*YGDhjjqDO+NPIZt)Iu(ola2Fs8o5x%b)#c*B!({Ys#g>fn2r{|?*!+P6RYi$LD#`TRl~#PGdN z4W2`PUT+~F@zJ|s2@#Mc;y$6yHz%E^HZ^@4JE}@TVhas=L^-CCXOUv1!WXhTH!K@s zxAi@|rdK1BHR8meW(b_)i@k0tK8AEp|Jwj+HellmG zJ*+=mZUW9bdUUWdmqG1e|&8SHYu?J3_5n+6gS=vUO#iJ1jk;6oE| zQ^j(;Q+uYpO86D}S|Yybmu8s}6kS4PhDno)23D28!fU=hfLO zv*eRu%+=4YB*457XGad~b~K$$RYOl=dmIb?yJ7Jk&ifa^Bc7Al#M;_(S%+v-DBrv< z-lEjBUw%+yvs>l{)4R+k4_0uW_EcE}>~lq6?fdSGar9&DIh=lXJx(At5QW)E%Kk|0 z_u{8KW8%JU?BAEXD`TT499IF%M_?U|15%<oa&Z^!x( z@F@(QbaS%|%`|8sTn$7Veng4dHN(WnXYoZlwF%%|F+;`K^8sQ3Z<=9utI1EJVHvP6zK9zsFCCUxMV%jXujbRJ?dSZN_ zTz50#`jG!raS8hLaFYf2*nY8r3Miz@I>-cd{=vO>n+7o6Um%838C2jAr;y~n%3}*Z ze-DoNVo_TwsI(=UvO7v-IX4VsVe@GzAuHpcF7v&&`ynWj1X7m@Lyp;N6J5ZQKJZrAE<)TTy zaQ;|s?@bC8Xa}!bX*V%qdEMCLQ0|Uu2ZB$F&CUz~6h_2gv5pQPe$Hp`V16;^0dtD4 zBKdn>uj2L5{P&lhNC>R@TK5w|t|LjptjnEf^s_Hw&Ms1r8b-A(jwci4>f=UCCFs0| zXE6*KHH`c8yF>AcAOx}^75LVkHaEbk#2poPfP``K+JIF>7mdtnfBhY;2+Q9c)w;ED zaY=;&1NZjLw#5J5o4=*wKR_U`w}$!ahiS^=gcPAB#x$9833z|M3TvYibfXVp6smwV z^;+c}=t2OoE*>qmJ$Wgy5Tk549?VonTO>{v0=-q69_f4d??1?hBmnB7<33eb3uBE8e#7|Rcl5vC-~arV!0V^K zt8wVW0btI&H4X&0_^J`q+ksmd>}1Mh4`ppLYyut>>F5fNI12d;mLO zmd6f-a@`yzHT9Zf-J9os`5b{O0DS~b~HTGtaDCg1yguUDBa46sP)G2hBl>3K0l4kgirmgsd94rM01g4!XdA2b_`)523t=^y($U>3x z!kqW(U`)9KJDx<}wAwfiCzTF-WtwM9*RMBYJ|842W&$paS@&My>QH^&>~^tUSQWfE zfEAW`yyn-cK>Fc~6w1EiI(HAKB&kf6A_3Vh%yi^!Bmii5oI>beQKE|@u(A<}(?n+c z-C6$IGyTt+JE4r@{ro{WPh8^s{_svbowNFl`(!r%ZMjB=XOX`UGaNFzxvvb7O>shhwD9MS~Q&o^B%D@3x&Oq++}`qqz|YC1tvN9*Tf=hWj0* z-}4j&vPj1+bun5YFtlJV2vRbYZ(ruLon($R4 zph%#yba?y|OQGMH;7Ejh;*%)`Y(G41!!3RPxhehKh|p6oAjp@x{Cv(wSRQN*q|+`; zOP_c&z0U$FQFs3fT}>+~InL9lb+a9QIUgkW42eM`A}54IS0F*RdUd< zibrr)sRYA#V;S4!bt?$8y>M*a?nD&V4W@Emy%jyzDlugeJ4+OrXaRn;*&>RFXu22; z+w0a2Ti>@u-)8x{G^3tCwp91>@W2$4mCY@o&PPc6jNF4~y%02&M0W zktheYe)2ruaX_M8g z)riZ^`mtaGBEZGh-mcsluHQFLT&7*~-Mp#4ND$mg|8ff@Rql&Y1)vRvYeHx8@H)Rf z?9Ss%w^9G|VTVM50l-{Z&)aA5SMXgj-%oUFSWUz8Cobqz)4iO$*XQN=dML3;wOAR~ zRB&-9m933RM>eheyZ4R(Y$>cw2w(9YO8!4$x;+I-wkgBge5Kr^<)bKjO`&Y z?7#3pc^AJq$NIJMzwGo72mC&3=KHoSk^M}4N#S##^Pu(1-Y0E+r#}_Q) zZRW?OOHW-YT>2`05<#_k(`(oVR*IuW54zkrc4@JSs|&}X!N9=akVvLSDG>;J7dOW5 zT0s?fI#rQ?O1p(}!N9xx z%E6^W;8e%LqPcmCwhm~#P~c*lXEBI6B9NK=>Z2+g%o4NU@!IWL&z*FX2qPuB@YmS! z(0_qi|JMpVgN8!Fjl!9)(k-BjChM95ctT)Ps8&78f_Z}(0eIrlvD44K7Rg?r{>?05 zG7`giU;mu{p2i093JUji2@Vd=qE|7TQ;Wn4 zqY;eQD84(EKU%;zB{IQfH6!Nz$B@z{LLmcJZhW5-z#n^5aaG5Fa0u2<8VecCt$M7UndeUl@ycnG1ubk?XQ$-7#9>wm|?9( zLW{RrIJ_w1pI*^As!~K7z=~C)USiDPI=1}~)E>^xb1Cb2*(49HVp1_|;TnKu zHqI-|w@1{AO<)pKq2GAk3COfLY;elFtC0h5;)UaSq3~Z1I!M2s<-N6! z{bLd3e~D0&6mY5G_~CykumUk5r>Y>imi<4s8@a}aRp0c-LDqt@yWhs*oBj#O|5B7< zW6Q-GrH0szzm{Mg+=0$5V!de$VZq5sboxd4&ri448a$rsq!*8Oi8DNV_h6(d7#Zle z+*i2HuPcMUV3@Lpr0%L07K<(FqTUQ8SGZz{qY^u)!@KGPy4GIdMA-96DT-)bjZt4n z!Di*YafXKZZ93=vw$*IrngY!FF)HA{SNbhi1Vpky^@Ib9r^jOzs%#ehYiLXyW;9yK z4@YDC0Cb7l}R|zWCj3t4DL3lS+bMFx;P=QP;=FTZ`iP`8f@Gd#7L=4#nCmW6543go4loj2~`r*qzQRctn4VW%8yL;Y&)`;<0q5zTcI1d`%Sq z^@P75%vnqlkE2SuEGdc4s@|uIDv6K+nZyydXk^z*xXlm9I=yuOVG}kQ*~qn#fdaRA ze*(;)9G?bW2_ZKwk(dc#f|6ue4I5`cz;2@2#`uca9HZL$TKp;#F$D_*)I9HG41}dd z#|V)jjD};`x?gO)ktxopKie}ihS|h?4WexJoSzSBx{QwiNNzHiCDP^AG0z^uUvDx; zRAHdgCqtsn#0yq@b0uwRbLo@Qu_Gkpo`|78M3coNMm&ePXtgUpATOO?&)-9%jih{| zij}A)od_RhtwjGyF6K((^chzX9bv{it`&~m4Ef1={YP~9W_k4w1Xb(3jdwL5809YyxVe4wDQ{o`r6^ zwVRWM?N=D5>)A8xpeehI6#2L~NDiAjvt|KXq>Jzurri=+{a3Rt{ZUm2*!bfnt68%w$@Z*XCVmS2@p|2uRq@$z$;bh zFmikkJfAF=X!Ay)XaE82{&WvZr4o~&cX#oYVrZ58yjzSw1IU<&0}YQ$y)oa$wq@BxxMG1$c~JX#T*C_ES1BgYkeBO#A6b_k1H1 zCE8WHKuXdz1#MO0Vk1#0ityS+DXtDk2vL)}xf8_@TZWfQ>_R!ec7HiFd^Wi4mkqZ$ zQn94cEJ50+BLt>A?heH>Rq6EW`v7#t#CtX~Lt8Gvr)PAY=XEfH;R)IIAua?cZ1B#^ zh7yZQa(9CyBt+}jT&%u)cce_GXy~O}BT(EcDGAuTzn0@n%md0ey?x6ZfuIS3b;pY> z#m%dI%3f@LF(qdc_saHj$sP>?sx!nPRC)j=%fe?fD&3o_Ec1r5Pt@y0*%I_7fZuZP zCptBI?P;9htiBtFTL-!X$gn=xD(%<;~^irEr_DAuWPeNM0^wv8PB|HaR-`% zMfC8v(hPn#7!qphIf1;7rMII}Fq6@XW!(0wAodHOa7w*3+%Epsi6<7?R3|-cx1Q1X zK3MDG!8QEPPR8P9q9R`y0PtDX_ut>63EQA|Jv#Y=AzNU*Twk7bC(CB}uY_W}yB_QG zJM(`vsP~sY0MG>;Atx<`;IabgX16oA!S;h3@())JCj5sR!CeMyygVR26eU7S3B>yt zhkh|>S7~O3roa1~mLH*YbhZIEv`#LjHGv`fwPI@~{uQCps5e91wq5azBWFXJQs3^A zi)bat67*Wmv~%tz=0yy9&G?9ao#7AweO&Q;XtbP|bqq<>1)_^r@=*nO9bSqUg>rs5 zm2VB^foG{qqtPmaD*hWhC&I#y15U<;rRwRz9BD)}GV-%xhdklCPk`{Brx@(`*z1wb}bZa!7QMR3`|Y2u-PZM{oMz0?b38<~WV zulQiAhZpNZBT`d~^CY9^JT;ePk-nItwX0 z!h|$$LjP&+gSEI@g#R{Zf3P9VCl@*;JTa zqFx<&wO;YIn5`HqbwTZDbL>uEvqa;LJjCmN+y|?4S~`;!rH#g7eRjvcVLF8PSNPAZ zFXUX=tF>M<^mD%(k~BTTRLmAA1Q%=reSDmF=$s*<$yGALE|sQ0Xq9!lT<4vcBQIq` ztc2zv3i^ygf>H>O19;_;WN&9NgBT)`2H4iIU3Y$se;6l?I=;}-S-(aaiKpxuJ^hps zJ5LaxBo3ktTU3Hs%@UfeTuRe(Ue}G>#{v5KWV8zXLim;OJdevdr~X<&p9ZaY-F9~g z-a_(PUS7U?0gqM+$n35(c0s(4<0;it3CD!uYp>Zyk5#Ygm=eutFPN|u=*th1AZZLB zN2bo}Dd?~Fer3P+yW`3)X!6$tmoG$evR+~+ofVw`+4^L7N{tf7^0%+dS>h|;PM<dc50AJy4~*SXY6dqh{fv#t znlkoI^Ot(pZr9-Hmb>bALiYC<%BFxYa+=6#5g+c|HvtePC;w&=Si@F;!mU{41bQ>u zy@}w~Y`02nw4_>Up~77N%Qr3j{c}Vm@&z^C^j{PcmBa|24ZVN~kkOD6UjT#w%N&9r zdzKEE*s%;wes=Z??em)@vi-Ste;Fo>pMt)Rf6RPj<2ryYilSGk+90PvzuNX$8kq8g zy0zG9JSYn^i*9+JDs>XtkrP8>*x+y9$5o+{guLm1s0U3n+iH_{NsY^%?m-%jWg31F z?o@VsrcqoKhyE$q&q$Sef0g$3N6fJ}8^!t`P5@#$KPV>%7awQ=<|vdWWco2y$GUR$ zGsd(S^x(BBW^e zB$U(KZV43J1%|_Dp9%jauB)G#k;fNhS*dp}>)ozkM*YqZ*&S+1baV7DGILKtC=%P% zb}~Uwn_Db)e7yCR+^;w3We}CL0x+z6$klw-LAOk#6$fZw?&0U#t{*$jN8D2cDtER0 zR|Q=!os#jjUEGbrafolEY1TM8EyyBqA&!`~5^wnUaux03YJlwCJP(OW27#}{RIu}?` zvgS|%+oR#>p9FEv0O>0#f>se((DvJ(C4gpzNrxmKiBk^R8C_x^e0Pw$gzU~ap539{ zeFyK-Q;uW8(_UOGzxckMr*XB~Gx=ln>if>NM@E_3*!}hZY{?q?Uod@*VQxYeOMLm~ zW$IlVmKNMq?<9te{2`}cf%n!HKL^pe{y5W~NEe1q6pJYMSh~xhPQ-O2nh*-US;s2Y|?y)6=W&%2vc?er%G? z0;@>P;n6@((&VPWL<94Kfh$B>2qGtKiC=@BrCT!C!h&%!cRiZ}Laz(<{cw)%I30HrRu z1tp=x;*XO3c*?eYp45v1Zm2gbdKQE|p#2=uurrE&XB^WrfuL7r#`~q?_^z+B-9t`8 zCldU-9K%b#I$-s1T=+D%^iwcm2enAtp5>F(>zIH^8aIVTlHE7cVO6?qRPPdZr{$LU zPd~o9#UuOysH7>ra&ub7VB@yW&yCx)F{P5JV7N*(`4;Fmw*c@}Sx;bRoKAEibH?*d z*Y8edno%afB*au;F<{@P25@lR$gkW3(Rio?xG^x0gQjH=z`MWBV1^e@m>Lo)((Y*> zQ;_sIn-j#p6`2WsfGDyvEF%)TP(cIAQP8NmTl}eNymg_(ApA&a;eg{bY3oso> z|BOLn8GiiRwq{As{Qs<)58bis$9bQAC(Yo&$xrjC19N>HO9MKglD+2`4!Tya5>D7 z^B~c##?ux_%LM)f!W+P#UkhaMu0m}&oxvxdLVn3)1LBtYR~Or#uSi1-1WQyGS35w5 z#M-@I-u$%2&8~OXS2{g#=PK*b%-8DtW7L>gryI5^6GxU^4{*{AYl}@*mkQ}7L>efk zMxH8?8ykJw4fh-1P29t{nr^by%5%F=d$x9UgZnPZpmrEyXb^*c9kBA|0})Qa*GK`v zUU0h^-%;(F;*F(t70LF$oH5kY`o!`23Pc+U3kc?Aie6~Wcu z&7YX#pmyg?o%I3T@%0uHk|ZUJv!U3Y_oklx6=3rYR$o3-Ei3Vut{F`LK5B!K?Mwj6lRX?KHt3& zTIgIP7JhumKdkI#w)mDYC_$Y^{ox(kxyIgQ?^;~S!<9l;;~4TI07o6W!^`hIh6>v@ z?6itVr@u~jA2Jn>_{)aOVWso|51PC3OtzSNNkCHYExCZsHFCUcG;+VJiHTbae@J`f zhNI8pA0dkVjl1~{FBcCKdp7zLr`G1BQzz(bobFR>$Rbb$@HSE}iZ-CO6`#}q`daA% z1cyF*o1Y2cDwp58g5M?yE{8W+q>`0 zfok{MD2+X|zddo`G^H14ugm<(E!7>=yA+ZmaY87N9KdiAKcOAWoKCU59vqoMXFi(f z#p;{UjnATx#=afLXOLE+fN#;W4&AQibyXs2(h(mD4ZZ^`N;-EexOfK^xLl;_iKmC= zVc)D|Xp?Iy46o&77WCRPn;+N`;^C6$h=Qu@aF)|%6V&z{v%Ve1{D~m}J?Fn2PtmuM z14^CFj(Y5oR=dwm<}WXTY1mHE@KZ%VoG=UBQ4lUD-!IsRqjuL2wB4^kds_frZep@q zc+nb0RgISam^t7>tW8Y7e3odYTxyOWkQxKfs zOXDD+&9G|Aj*(87fbO- z)u_q+9ERZIZ!U$E5R)$#Rt_#@Ni?_Xz%Ul&I6CV^Je?O{QWK6lU2s2)G<%_689aRZ z4v=b<=pklBQHcM3)}9TdS`Q%N(QIu}6FB_8$U0wn5E9puehd(R*y1$!&RzPzPc87U zTX|!B=CK1;r5cDWofXt7FmsKt$nc^erD0TiNH zHhlIpQNJsG=~XGaTgV0lBx&QbX5mJJ@pnuOb70m4)lQ7l@4@OukZC;J>2JXJo|YyMU@!h~9J}Su z>0Y&Q535;(+>asoXLC?dLcp<_L zb_>Bkigh!%clAacW5+8%ZW65oI~#0+yNHQr3UE*F`C}1!J>AyzQIu)W;XN0Bnt-2Y5VRMTIw8 zUhIU{)hGyv5YPd^r2}K&CR+xe_$5}ULE>`Lmz#5vdu~wl| zdf2pT`gF%J7v2~0>To7>->tJKHx2p3V4MDy8W z-yr-#PH2-uFx0+1qA1JqWJcAO3x;R0b*BSHo=4=7mpkVdR-g1B2;>NGH}>3Ifgxij zf$n$l2qKa|oQa_^yoA^Rac8bzWO{>*eb9su;rSJ*t%G-vz508A%4d_Ycs5YB6 z9*?W&#KsFX1K8rK+Es>qMotb}l!z9k3iH}%bC=cgcm*501#tc7KhyrX|DY=j|4!ez z3XeU-(bhL1dVLrojZ0aD!ht66&@lm1lE3uc+r-}-hpG^Vi&(1HG(ZPH0WY?bQ?Ay- zi(IVinew%xpw>*-J7s^d8`IXVXd3RY{qUGJbh=iIzEyqkDkVAhS@Yxdca5=8Tzw~g{)6d5KH!K|CcM|@XB+}x-(d_d$jWmH4wEfr^(vIG$ zKV94>J>2%s5`%CUA*I&`TK+V}9B|{-fAg_alVlkkL25$O<~81TA4|<27%ai`g7z@| zfgf0ZN&gU;4Js@?o}b%3^2`%2G6+8-4W)wLLdg02xa##Sfrjsf-xx>NG^b^W{4!Y|eU%Z&H>LqC%`Tpe<1Vb1hGwPoR(vbS)26g}HL>xP+ z7CLjphqR~g*R+%5SZVhlu8PD!fP=|2o`0N3tf@&El@bNv+Yw1i#pz&CTAVs*b z1CT^%-A*2JlXw?cPTj`58p=EQTH%32-5WOZF;-spI(c9O-7=+C^6d{552nLe*eRTT zMr6{u(TogE?c77;C*9PJ6n?*kZ-j%9*SSgYYXAc>nr?%hG zJ_!(v)9;P-4u5@$3&f{KF_PAylKc7|m;wFb0ZB6t_4G*O0zJ-;Uf*oT%gt)=5v}+? zvA%zJJ9Huh7-Tkp0rSgvI8`wGEb&9g-CEmK0t>9 zL3L0R!RTv`97`cPnbF;aN914>R z;6;-E=wp1|Ma2V}S#~)Zi;+cbw5v<|8P|7ld9#(=!z26ugAnRQfFXn$*R0pD({#Rq z{fv~;_M2%Wl?`4E_WcH6-Z`cFA+D6M`Fv8Ch2KE8F+=1rO&3C)>c=ZWkdoJg`*{#m zg;f+QhJ>hBKRLO3%q3^3N)e{S8c|$s&7;kAs@vrXi4>RxJj>s4pPwJnsVJibHIF?U z-WzS6ZJde&kWT+ZE>aN0y9b4K?>k~s2xi&l2HT|`_OgAL0FdoOo$KHX&`{3Zt^*1^ z23-c#&q$-W3f-+jOCRL8!cHnP1t++7h4V+8=ZDqs01bu*TUGr!(J38>;1$m)u-n7G ze^4_Pd2F^J5cQkLmo(tNdebw4zJaGTD_gCVT&q{Mh#?C_>vLbBl3!k*%2~?2l7Z39 zZoj%YH*jPJe}6Y`{g_CjD18t2ko2QfPw2z{0q zyPmD1Hd@s$cX(C0-(S5cB-%F%Oj0VkW>&GRZEXL%i|aA6@rO>OAYrmi_(DdtA^Qt3 zh05&!Kh?Foh=vk9LvX3(^Sk4ww_J_sEt20U>DxHIa+dBB^OcA0%V!NiuM7hjBJssW ztqN&`kQ>!LpR;kIT{2}wL)oeB)--@M!Q(JTZTi4vZ~iMaYmRM2(<3><4Q>O`M93f_ zNmW-#tGY@$x};8na3$!QkdGDW5WMb3cgr7NMaM)IfqES#@GZTqEhRw^5a_jP(=u{} zhC=+1)w->z61C9gmI$`}{&8L;1bMu1arRD%+^s6Wfahh--+)z3uo+_^>fgn{|Ho_u z7>I#Df9Ke|eU-r%)c)oP<;??_0b7uQA>I$r@e8@~!o6p|`u#Zz$b9AvOjxJ;R5tSf zUfH?c`l*izxRmdgKrm1}IT;8m@`W#iq(bitEs%AKK? z0c(1s5%-@72HDv!6cWSAiR;wvS9_$9JAlIR4IjuP-hpcu?rib_Pi<%DI6gk6IQ(!H zL)vm!TSacTQUilpkor`Gg9LVQ`Y^J?i?#i?qix^%sGaTi;xS-w7F;8rS;xrX5)ec+ z?u7rX7x(~&Tu+RSEItnSF`ma_T7a9o{o>w0G~Ym}sy7{x8M-{kz4oif2;g|BNqI@s z3~Plo#=0c(w|{i{jLRI=nMMZ2`*=LyuOc8pRw(|{Q)dj;b0>< z2hA@o8^{9&3mG{Xmi56z2kxoO$iZK57*JhX*s2O z^Zay(G{h`IgB9Y>^y=fjrt^-05Lq~yP>Iyx0a?4wtRvgDdi>|_%x0s66haRbwNz2r zUL564o+FQUaw8^KkW*cLf_zN}90+d>klUk<)_bzHNXl<0C*}dy+EnBlThdIP#Fs@e zjqNuDH|1P+pZE9gs9>29Lt6ZT0+sE7!bO*Sr z6TUFo2aEMz??`i)U`k=cn+9D>ZNXk>b2x7zyg_%34~~j;O{bmOl*P-!FVm<;A}=4? zPX9PgcSz3yMHYGM4?HG97C=Wc?0hEg3Is-eRH+S=D3j3gBM8l^tLe2l?~-ORYC0iC zg?aj~?N1j$ViUg%#joTkWLy}58xX$1!b~@j@m1o`r^0!H6l+}$=Nle6ZVxQ#H7mUG zpRxfP8$WXzKvkz*imz=3gocEp2yvL_jcVv3@S5Ads#m7V+;>i3gdFvZWvwCXO>GK= z?4krp&Q+wz>9o6t47;6@%E)#2_4qzlJ`{`z3&hU;LOvAuSNtF1Zz=O%M~DcBa$P-o zQ7&aHqDVmw<7Xg<`H!@?vlV_3`D;`Ux5otAL0K8PKM+>GL?Mz|ngEEKx3rl?QrF(> z7n5uy&7abd3GGoRe3C0bv#_WrMHmru5}y&MCTWzM}oKVFcm`gJgLIL9`*} zpH7w~OtpQ)#+vN)WttUIo4`Q0v4E{41+pL)!#83=dE08~Gp8 zlnk>V%)aTaxsUV6E>R@a%_R*hpZPsts&Yires@?+8cxniE>6?iFT2 zVi1ibOS<3bUcE@a*YEl7c|G&I&Ue1gd7bmT&N=VTqkGQ4+AyD<%@!V%ZnZx@NDpyG zg2^Q`R`LcT&3hh)JcTm_pnqHG6%+RUc2CeIqhG-rPfsuHs1jGV04+rfp}vcjLT# ziX1_9eN}81=$9GslodJkRuB`YS?tr)$eEO1h;6Axev9K#LmH(AUCnB}z2frhbo-M- zHbbMHr((S%L}W@1_eEWLddgCc^r3pUwJ+L_x_e|Uv>L^PEdhyxW0naR^I+H^;Hxy@ zp~s4Yg3N6ZH*{te)nZkTQznZ(-^B;+RRTlWzc?E&ecCwq8LiPI^ zQ8co!c$uYm(-+D7^tedU+vK{+cr?yC(Xrq6ewEJOjry#mhtGKEyzr8a3t-UK0?edv z6^*{dudhr5Vi$C2RCN&)Rr!|h!UMv$nKaf@$YBmIA_WY3XM?v9T0u)GBXk6kV@IWC zY19lL3x|m!>mWk_dOmCNWn@ zoYymji|Im!R+s6SM)4u-e`twX3Iudn1>$PkGaL9I(*(FC`dyBK!yePAmLv}&GY$P_ z!?74!lfSM#gi5KoG2dJaDo2?ig7`-3<^fA2Xk%R-{Ad$-I#FpO#VPi5vp$(25q%Vh z$+`zRYGUJTFGVPr)5I`APBmbQVzE;FhHt&gF9mLtVSl{Q?#uR^cj@WiMD?Q)CPUs7 zPH?|FKmyIg89gJCb|^`cOcjChf>jvrsMd zvc#4>mC3Ddit_W-(}Ac*CjGpO_Q`ze!tnd$feD?`-)1DXZ$F1_LNMnx$@<(yu#YD7 zA;D-x4DSy15cAH+-1%SAqD~M91ElP+nQl(ga3j^(oFvsV-r%k3lve-8V#`aaU$b7w zWL%fcg67AxLRKzPBw zBg-|;Tr&?5?ffa4Xj*6UfKk)JxE)L%1oOy9(AP)^daz&eYSc|kPGSiEMqaOe@NM+g zhYO|Ua3Oz#qbb2b8QLXZ$3I_eb(2!gt?VrPRk7)#l<{VPVva~t(!z{@ zSQ8*3B(q_bDUe<2*!Zt>)4jHM##r6G{i*|lkDjE< zJTsV<4&(An(DA8yjB&D7m|%%nC4Tdl2@9}Xg=$#bR$PCNP zZG#?m*Ek$Yzv5PmN2a43uJk_l=#%A4%Z)Xad1OEBQxT(%H=x83t|o%+a`{J-+)FKw z<)`{N*6felqPCS;07idy(%qQ)D~V@S8iGO~zVTQ@kg$gg?$Cx>&;Dl}=MPLNo$O*( zzkJEli_1%K;f9kPP;ty~SazjpZ?fE0#C;bO4OS7(1^wEh-QRcbX+DleYqg^3vERiW zAilZ};yaf}Vf%*~eGyHAm4^&8kP@Wnw;cDWK-3HPgdvGoSAN<%TPiaqNyP>|$M1nj zMMdL%PPBJk>$dLNNqKAZIh;!%WJKs4Np&h*ELKyIyYwHxX93g}tB6FKAn5T~C~J7mj9Ot)ohW)#WZLx=a1;+g zU6^W+Ac-mwY0k-GOB5L*-xG(kV0eZr&m0z~<(9q99;%4jk`Xv&WuQMt?3`fiJ8+|y zIyuS0rhh&*Qf8rg{?(rJw1I_Sy&kob%F4==vfhaz?Q5gNH<)?{2$JU+9L_YHvX28n zeTP;*_;4x)8ByTWf%i3t5?Sqo|2;ha56GLt_eTTa_OnemMcJW&&ugD;r8C=b z(v?U4jS3`GxGv&Wn5fP8g|{i+{gCo6$1P7`GVPWLNm1)j$h9{__``V67c|aH?ppIU zv&yNyA$-dOK#TaLwKS|vNk2Fv1cG5>>Aha@>pbXd5?ghLAF8T0SNHh4|IZbsaalnj z$tVAl16%oD{<_FB=ep#TII7FMNk%l1O3VvLSMh~$h?d*)CE7^|k5$*Cn_38@Dee|$dEeLvo?$ z9U{F6K{_OaFMiMSobx{C9RI)TUFS#EA~&=4o;@@7?Af!gD-qh7$`oXbWJE+n6sjsO zbcu*aphQGh_eif3p4hKYd?g~fMq#g@pslK)z@qK$YGdzYO++Lf6Jua%_rQQgw$;Oy zjeSVZNjTKARE~m!A_*v*SKk7ipBHsWRTZ6K-LN{|9s1ZeBreo?k*B4o9r$!Blja+u z=`9P_5!I}0y+8^X$ILhjmu&rMi1tQQ@pihDP`Z6f?Sl1g;6WB9J7Blj7DKYpTyB(_ zKy!sIqY6MTxa1KY_d$j?{(X2Z{q;OU9VTAWvX{i7TI45077OG(DC7CgA^h(Q>lOLlzQaZ?nR;H1>hS zK+oYr+U>Z3?o*{(*rzoU=D8XU+vOXp`L8p*=5uS`U@Y)C&R^|951hM7hSPIo-(xIH zu)HE1>x(+y)XNidlyIa=j?O`i4}Q+3W+vQ`yB0dn?s3OD>t*k+uWpHC^auF8 zz@4}GsT>En=Sb{q25ua@PX*k-6{RJI(0&W~0L@c+cD%lXubor7eU-t1aa9${pIAU? zEU;|qT)Z)?ev4jgoLQZ2`#e}T_wq$ul>6Ii(De$P=oqaTK=ao2nNIf*ePB*tsAxqn zw1cAVCJEQaC*kd68b(*|SaHc;N7GH&m4#4V7o@4AVTd%(v!TDg{zZCUvOazAYe#1? zzPHyr^nL*BQ6$3H)j32BGkn4t{j9T!ab;1yQC zNNJ7ZqzCZ3R+#fCH77+gX|aimwrC;U;!MSPiTc*uSC>LOFHQ6&VSJR0VQl@}SQ68A z+;{U@lTAJ`oyH}GtQNzeGxM`;oZvAp&Y|!pbj9#@lY0+%pcVeI>z9*}hmQfH^YvxA zy^-|ph;F6>x8Eyn&hK86?PjVYOzjby6YWDf_fFA8 z9SgyI&Re9G&vmAPQ!BTzbU6aqGE9_~%xfiBlMa61l?ZGt@u3FZz(9ZVO)@tV&{adc z(rd61o7!*+u)1Fx;|vPb-iRV}#v%wLC8y8Lcr z#JArPU1K4-LMl)6?|;e<-?_In_pGA2X~C;Kxn@L&S_qMq!@kT|^5}n%{>K=HG{Kl~ z$_ooEj$UdZa5cw=!~Z(!|2Y@Z9Ql8K{dX;bv)SLSx@y_*M!fr*(0LXS5)Bc!Qpo)8 zn*XQ#hAJ@^G(gZ;mFC|E|IgL_z+BVD9|zoIx%jIcw5a9JW|OAX^8Y6*{h2Em!ufW; z39U}F8UCMB|GQQF6A7wj4*;o2{&VVox8kAHm1M+)8L^7oUsX-|qpDS7BG-TO)jy}r zMc>TD?VM1{pa0E8f2_LS#95c};@N+3_kU=9p-Hsribx|GrTVL?>;zS1(kt2j8ax|5 z1iNF~uf;?Dsw(FnRr{PcbpC30#9e~jF&u9X75=8Gj4DCZ|93*<%Z5xk548bVut3LY zXFGPcb+;f+&ik&GbZzGA{l0l4%NPS>D=E!Cg8c_yaCOHy^5SHtXL>Vp7gYZ?qe=~os6e+>xv4IZid^fINpVQNl+i~ewr90VUixcODCJ9D( z6|R4D7H!&<-2S{-oF(d)zD&unx0+_&;`W`P#JRTL|I!P3+1k4J{eFS(;FF6(m$WQz z2jd4)JKgl={$;J}3jqaogCdhAF58ujTlHMt8i5M|mqAUJ!8gADp7+7D&#%bgECNlw zYi8pLaP-P}EC%6Yo8yy>4M&;l5{1JLb=8~Em&Z+uWzSD$bvhzn%JcSH@S+Xa**Y0t z=JvMn3BtZU-N6ZgoC`JM{1EHLGG}9?6)<D zbWCE^(wB?l{VW1#W9YosjjZ)+*IFV7DZm9;N%!4=86lVB zXJhr$V5fmOa4)!ZJ3C-*zBn1|G@>~F_`b_V@yFMZ4smZPW3DZn4@2j2hQ+%8tpkFZ zDMl4V6Ii*kmAfZv#;%GZx@<%Yit?#Z*@OBMHA;+}d-F}`?|ih#CU29*l|DHn)UEl= zVmE#3HvYJ2oT&d{sgv@Vnuc8nr!k1q_^j%>+n$jovErI_g2ieyM0pNa5i@=#0Y)7G z8EoQF5fngml>?hkr);dwj~B1qeHni?B*-6V{hb{@FB{1HDk|Q!gat1ev@Q^ z&d4s9WG*$+b@y?m6Wssd`od#UyB{(j{{3s5id8RyzR;$cFF4mM_=}``5!9?W<~I%K zvEiQ--2Xg_><^!qL?${+!m}5puSv!b9Xyl*D9=2|!E|7UOg;NJc5@Y|m@rTjCopWj33Bgc-Vmh?z!Zv*ziSyfmd zl;|2*Q$8fk&@9LcXNEEz(-OjH#G4O@p$PnWs*$)&k@zsS=bmM>-Bw)ZmmE(z)u9ufD@f>5 z3G7b=s_rX&#G88xlx{+^`bO-`!NAlmU|7OB7dk;0~|LUY5Sr67wfu z`)jF}v0?{Jj64wQd0z96V;?WGgmtOS0Vk@Vx9A*K*9V zX)o@p!<9)Dr?t(Ke5XMw8O1RPtyc9uYK}( zzdygfZJg;0W9V_elh;5ki`(lOuaUbr98;-&^MN5PRLQPP%=S~@epw0Z!!7**Fr{nI zQXglR!ttXYb$_-~r-_NQnn`o$d{)3$lHUqRnE~M;$7n<`^npw+RV^KTd^e^JxTsbi zv}=CpUWY#{8F_m7&pM!zWzV|iOXl^bcm9=S{X@QdN1Uf*HX>Pny=(ZhY6(Ze5=Cc zJ57_huDqNz`S_#T8#jngaXaBALkUU}-_q{i5J1ejw3GCIQaf zPjCEVGYtk(-|GSbo0jSKZAurfM)g_ zVyI+2;ET<>nY!+?&P~ld5217I*oA-*Pu6bBDv7l>{e117Yr=O(s5{IlVsZuq)suHQ zqq{uV0J#m+7#h8oce1P+|mU_7tGu8+FLi-E6KF+<^VrPEd%fbHNv% zg`lQ@orcMLMOKK_nOM>p1?q^zdG)7%@k5cWCof%fuomggJ+6;mD&O(D7giEfgo%~H zL<28Tu3G5#(>9rd+?Dneoht|SJ?Bbx;E#Ma%xV!n1MM*#Z0Eb&fk74DopeWxDsbUv zKp+SOidxh-pgRnAoMcc#9S7p`@4&a$TrJw2*@rpj=(2nUaoJtIsF`Ym>z(WUv12sruJHdV%S7~IUJ9mHdJ_x!FxQ1x1Rz^=|NCyv~y0+P1> zB$*la^M(t9M~acgxM6$udA=%W;@s;lW0P=#AeK97y?=r~Xgc|$dC)~ercRKZV_LaD zWvI*cX0@GLLn{n(P}RUNo|-5X_7o;3S9`S035TT94I3bQS=F^btUifN3y{`;!`dDm zvpnx6*gMCgsH6CqD)Wydz8h-KFB~f^l9PqgsLBlkGX?$RdTwvn5X$gYi4(F&Y*2Ba zf#SL~>UxR(#+kFTWO(3~z9Z?TY&%Bs;A!AAt)aNa@J&VLS?CNBMjd+4y3CHp)J zf`DW908P=7pL(+2ywhuWeirBTvgpTHK+}Q)Z&M<-1&6NHCqgL|qI5B?(mcqMlj+*r zy>r@%Ze|9TNN9APxLWBY6q-n`>~-A`5g#AX3He#>Smjm$YlCAEF=CpP@eLmqG%in- z4|zmP91PqY_&#|o__>X*ERXU+&HYwWA|3IM&)Pb2`W+qqvs0&PFm>+r!LE#d3OZUOeYDEHjeZJ<%23nv=d% z@zC!Fq^Sh=JI-zWhg`**qQk_OmXTFizoZgGiw4liB0tL6!!ZC(D=T88%I+WK;e zes{g9aW2rqCyGM!%p+;a)%>dUpvoQVXIXBmZo1>#qdD~4a`z4J-s)$G*3mWxc6qt3 z@1Ds${Xl2i@NDS5OWRY%dTmI#$33-5_YF+Y;B^uMAEd)$?sB>Imp15efY>Y_(Es#c zW-Taf7m)vn>w#)IqUerm>4jXhLTg9YPHDlRvCLOw%uFOT_w4bkZ4TM12BczdRVE_z zs4?NWisHKI9mM(*2TwL|p#_T-fuW{Bq(0=X_L#Jrd`IGxF1Y}}ICv)Py z1#T-15se5zA$^5bpWlhtdK<;>Zdfw#CHcA0#VrQmU-0Ev&52a&zMw|B=q=JSPYOk3 zV8qN>>rR`89=TuW;^UKc3vJ(y)GKe1Updmu81MJK9}dGh+=!9GpK79+xos6jh@KfK zd1{bNorT~*z6kW>j6~o1_^{CqJz}p(->Cr@XCfLVB4h zQ#mxBb*%zMaxF4+GCGR#N(b}NH;9AH*q?v}9!CnJlz-wAB7rg@R=x4PofRFomF6qs ztO4ECj8~*E9j#K6flost%|S^HR!?cTolfdCgL%ZzI;7Q{C#VsbUmw`6b3?kYG($Of z+#5S4m9}9!?8Mk!%t}`iwjr`GcUwox?+if_m0=A>h|51UcVrC zXB8kE)dx41Xyjk7`aDu}#nV6AC-aAH3$-@9Scz@o#pKR%}EK@Ykr z7PoQYSAKC!)$sL!D{bNWVe7=J_>MF)VGoNYnG;xz;L#*Q^Y_3i0YkY*$ffJ!RfTksAHpVd*`%;L;; zSDVXD+F$>3sH%4$XRzL(PlxW{=xmAvb^=HICXr~51;HOk@xC9OQ8Z!j{Q>Gho?G3# zf?XaEbHuiGZL+fcYL2sKh2$&d?eIc^GoFa3TQWOh9JK4~bmX?5-+wE;zfo|5@DHDe zhVziEZCZeuA=ntwyqek{h>Dr`u5*86IC`MXZH1(E;^bxIPi;D}f{fbljJ(pc+$-%C zdflNv<%KMg;Nywv!10ons&u;>%-$cRmVea|22C2SfEYp*g&68yteRS{iR#_sOsTXV zwtMsJ`U{7v?=Md41MbUka$H0U4LsTSZ6)RPgW~H=Zm7Zt=IONZ=?q^{(!^rldgqg= zaQUXY_eR)ukM(a~7L(dBhSF=?i7?bY;v`>C+r)yOyyL1Uh!LB6a^713PuBAWIb^v_ zfOJS;O7A7%V@8TKfhlUnBjcq-vbz}*b)%8IlAy0hHNCMHtBi}0yw%Zq9vR$kFAiVD z96G)*0|u0zjvKZ1GV;5|S+PY8D-VJl7IH55AhOQNz;0^pP%9EXUI#SMk8w^=>uFfN zk}oZDdlR2d$gyXgQF7`23B^pM(+dMV6uT^BB>3VkLhgP5L0Y(h0szQ!yun>lBpz=G z0STyhA{a9fmpiTnmKTmI7r(n0q^wh!LGcxja&dZRBX%9d{1w3^f!K<(%zZMK6g}=P zeeWb)_AaX_MdqeuF+IcQDIeS^S*rLu*jI}U!HqcWTf=*tUa_<^Ki^l z>VTZLPW3u84VJ{)t{1VBAje8^B;%KI!9H?Qo$!i1O9Sbf&x`s;*3SMN=g&Mc1Bf-|4O>mox^h2DLJQop-M2S8%?gSK&JTy>-@xxa=XBV%^0gkTUv6d=gHkeN7?Cn33)?^=$l0ELTeZL71Fk*#lfk3;!okNsXmDri_Hc782U9qxPWjc7md-nU*+M2-q7 z%?F+OBlu{J#K5nLxExiWvSqhMHfq+TcRw#HxzWY_&`RZND-8Ge9;?SgNu2^oLN`Ok3}2C{kND((2=_){SN0<)37(5j2LG z?xz$bs-3>PFn4mRhD&X0LANMHdDHez)-hVMNU)9?iGV{5v(3aFS?J$vJzgXe~f zWa^ORuAz^e6R#?_o(p8S@Lm2sQcGB6-M-l@*3aBc%RnMczBS#&?ynZuUDzQUJd2W?eF*(^YPsJ^{ z{=wnoYR-66;IFk5C8VO@FAcG2cK6H@(YP(;iwUb+1>VuOg4%lD7Ql`pw@esR9yr&P zXo??C+3lG?!jo8|#%eI}ZQ0TjFT0q$XSng$c9f7*dT@UgkehX@`7~i{9$``4hRTDZvN5)+J#yO9;!Q@KFU17g3a}n$Z z&-mSF!nK7z^K!WB(g>hVB1_IQ?9~>c_?y|!jK zmZ3_WFc2-xEeK2GaA!zd18aGyEWlam&HJ7d;wU&Y79aA2cLhv`%M08lbj8V=r(Q4$ zC(^e_tMCwKlKTHBC?8)D8;jzp=R-8LC<9dx^DeEQ6}UqSVZ#o6!-KCiqOyznA|vh? zf+W;cX}rM~9%Eo!XXuYnCB=*OMPN#S`Hx4^)j?iuVofK0s|k{U88+jptcv0EzxUNx zp#04#m#+nomHbE399zRMDErht<>Y za;18jhi2mv8rxGFEgcD$wy6y<=lw^Ry;(T&9TlS2xhc6CfqtX#=@;`9OF18;4hg&7 z*m?d$pRp&{>R0b`B$dCaz3yOIZgQYp}U&F&jTM5*l9{4K;pBRK*h?%;F`GSwKJ>s2EubPV{$^DV6Z6|9 zzO*lt`EK3kReYlx!L4X3E~K(pc%}%Hdh&qfwipA>$<>eZM5p}3I)%4@bnvqp&rH&9 z$3t<)H%}yp^Z+>as~~DDsK%ckr?G8xl}?R~PzQ4mu89miL~UC-sd69o70~oC9OlmI zKkaZ$si1GBR?%xY-=0CP50eE~gkzm%UI9juSNTSG*7He$r3OrD#lWMIP+yP}8)Y1i*ygusZ5pyGPW&s7!{7-O z*yB1QHc=W^j-6pIK?UZXl|}Aww2dTG{OTU=ZA)LdhnA(6I~J|zN7 zR{$@jx{t5i2y88p>2K(zII!(^;L2{-i8+4(Ml|NSk;=hkkxSE^Eu9|dbN241U_!KqSE#-o5Oj+nrq*XSIi`@)T!u53S~BSYpT%X zsk@Fd6c4>Qc~7#Mp~ZbR+4G3S`d6)uS5QK;nAFUiqO)yO+hr$TdBdJ7gJ3Fm`WSnk zwmbakE+MmsW&GCs=cTB=|(7DDXMyVphtvjc+UD&W1(?p8-Uq)_%U z7zr!*h3cd%Nw;&*D21uWa;h&lq}sPsk&URk>rdpI%+>_&YakCH&>S=}#}I7UV}3H* zJRms|g?k-}7ri+1c0w7A*i|PlF{)N|D^HlKBFs9H3_WqiI*{?H(RYd{_RYIe5=--a zx5h?MNc2kvLMr-rWZDOxp&YB~7BBCE64O(+XjMJj|G@X4J=sEaxoe{F1AI#gPNo&* zTnwt)0CR1z&_A+H4rN_Rxm93{OiMOg{4y(TV{NQ1T;yq(Dal)OIjRmY&=CU$0GVFP zY_IZdKVi12(h~le@Zi?LiExXvq6GI+)41I^&Z81X*;F3uKULh;Pv2V2W*t);Y%O=t zSJFQ1eCKR($kjq-Q-BSF#L0+(uOp-N=rGJvJ^@>;CtzzD>P14>(FQ1IfOj}&6Vt5^RLvxa= zz*9hSQNz0+2oGX(M$$d0)QcrsjKfK{<0VMk1n+;8Nue02+zcjDwC;v-UYwo5D7AQU z(Ei&1sKi3~Jj1cUGI?VG-UL3p?@+TAyPGA_eWPZ}?acO4t}bP~+k@!xsD)_l3RK0Q!rD))O&LC7kos_%#n>oepm|Vw^4^|-nP=e^e!rnA6nfc6`@<2Kefz|@^1=+?R zrAKex8e5wFM>w=Fq>sb4y3jsge=nC&Ml$>lXw5ES_k7zl8-1z#EW|A8B~k!1Q7;lY z8%?7FRDdq`VVr<-$=Npt{J~e<22~iW^U_bPWC-n0-g<~3IMM4R@SIVm`s>#>zxmH% z#&aOht!RgYM_$B{L|Aea zGuu^kR{HDa)XOdlU9+w)6q+Qv^O;;_`-GZQOQS;B2%Xn&h?QdCOMt%F*rVaenesRw z;GrdBYhfWM_c7*o52BI8jFX&puJ}vR)5zpd^UOrE5KLTVR7hb8j8BQ`NzRvj%giId zG3JM>8Zd8oI^X6J-Vr;;rk{BQT=x9Iok*HK9$vHQ(z@6Dmy^6y_t(rWGQy1UA$OA0>b9RR3)n54_o?r z{h>nZNvS=uu-_W435F{z+Cd>TF@^9)zh|Gogst6tvw}!H{{STBvqMO}nX4(AKUJ~N zNqtWz?MF)y8D&TlP?Vn>2j-B}_}-va^^k$hQcV?n&z?45zoQL_wkjxix!WhcCX>3P z22r;jo^ktc>6Scw*`EZ&6DsVkxDeOWWFo&8cx}{KAJ3R-g(=8i zn3asPKHiBv8fF{GRk7&9iD^>*J#x}&zv}v=s1bI~5@jm0PS{sMQ(tVwr_6JYdFnT7 zOYDEN>K8Hx3%pXh6xMC#`nJZf@aR*Ix`1d9Xfi^I&m@}MA7x2*Ip&h zwlH_Bg%9YO zNfuob|Eo03kiGpC702gHj&$$EjM69Pve<2-&uj@Ty59kIqGk=Fvytc+n#B9lGlFmj z4WwyfamaAa-puM6m7xpeZtanEr=&=mGH@_&Y23VDZH%=s-UU9)YhB6wW^~{Vh2VSE zNh`2EfLpOaLxI<*T_Nl)HgkB{AE(1IhL|NbM9u3JLiWV?k;b=8*OMVUyg5BLeiH7nL4XfnJKkaV+|fFj zgxNb`_6e0#b71Ji*cOXga0-e6kQleGQxUYS6}2pLK4m$QQ>+;t5>{Rk+fddBzdo*Z z=vQ=Qy5#2G#%|NCOivHwlH&ZO9Yg9wox#ySO9_N zNsEnyVli61a#`@8Y6IJ_Ll94Is8gH~i@ zS24I}1m{T>nDR3_6jd@;_mk+176osyHCT2sXz5zv2TcMw6otC2ngQKEVWQjm+by`W z0ipRLu>~WyZM&Q34tzcmnHVLo>vGE`z6{cV!DHu#EBRk~oxSRKH}--&j8n1BvP;x) z-+!j|{9JX!Um~2@UB#-ls(ay~??#^Q*Upvp#oFtNRqeZyP2U|VLQ66i46%=z?`0An zT@^TLd!>n`;9O-izV?LM{)7Xwc#nAg51OoL{neG$PaqS0Ht@-!L zUI+XxN;COf`j&Ch?D?ew-els~RF6L_Kk~EQYWoY5x}|h|h+}P$@R?}QJI#d?Klg;- z=<6!`&GnYem)bYx<39Aa`1CM&m$p1_^Tp*-cpu}WFD;efs+5!WE8>d@~R{n{zG2aL3R|d z>WE|!Ft{v_UQeg2#u&8ganolqIOHW+x$-MpMR1Hd>hDto1vb8Mi) z*2%#(%PV6*=L)q&2Ju74w=5ru_jJ3xxzEw9C zhs|J{fqcON8c%Z`gI9)lp+)stdN`!wpc-$LcyHx1@tV!9hp+vzINUF8^FY1O^AnS1 z-s#qJY@%OoM_!kL(|mGQkEcly2r`eNrm)-4vWL#ov8PET*($fB_6ojfndB>q0^%1g zzg#V=`>?L5vofL9`VrY8@!*SAW?uBFDOD-pNUW)(T> zub8C~1Z%e|AB?tnw8io3TUmakjo*6C2rxy9>->XHIeSp3O}CKAHzgo(UR>Ur^L3Vr zg=C@|0Qn#9SF4LzLxB_En`R+bQH2kqzB(+9w@d(5OOhBSUYi^r3GTm9%Cz{Y7}G2r z+1G@I%*rLUq>#Meat}{Z7N*dgkQ@9w)OV1bzk4%f4vxHk2vGG)II;ZbURI??DCt{R z;}6zcxXttS4OPcpt2ewD1B(ff%vFm{f4hLJXIV0*G-Qu?q3-f*gAg2`E`U`vG{W855Gs5jR8TS7eM-T{!gY3?jHWam<~*5uRH%VCM3qqPY;E2(4tA^@fmdtGa^x z48LQVrvE(*i(TrRE4`Lr6D@4@6QCEO2$qUMlNX=F2*fDYtKft zB5m-<`B!YZ>a|$5`|?!%FW4|Y8Av^^{wc5G)d3$0ik-srUT8IO13>uL(fk6PA8gKy z=BLpHsB&@ouuL;Y|YnC z%C#AdVEa>91ak*lMIvM?-4?eqqTdKkDRQAKY1*7Jj(3zxmc~$Fw03>e&$yE;5>=a+ z(h|ZC($P9{Bzn@L#qJyu^S)ymeR{M#x`m$#h*{Jy2PM1_78uQM+E{wt! z;6NVDfJUbE@ra|;%Jau2kku5Cf;`2x$aq2I^ED_l5^jf*VWX`U#KmaF;I54 zPPNKVTwGnz(~3#Cg*Rz!BwR?H3$Op9C6AL?2VAX;PFlw_A;U@5a?s2zJp)-;Cwc&M zrO>djUZ94Q-2LX$%wrUuYWAB0T8!5ZsaS{E=w5Fy`Zy=VZVTqAWc+p0lvI}KFi1KI z#*og~_Oey30<+v@ya0nL=wzxpe=k%y%jAIjYw&~>y`T)z3m-_5C5 zTf*Hi4&--slj6?rl67Br;)OO@Yb?lE*~9aFnRUmdI}8!GRN|?U@K4}o{o6AuoN{NXZSuK+%}8)*2K(}z2{%;s zSc3~UXT#`->g23}vW8XNr^YGIv3sE={#gO|v)^t~K7>?%xH&YY#KVRGp9KsVUk5XA z4$I9UG1c@9Ztd7+iNQoM`%b-rY^+NFz8Z?nZ2fZm zVHe_HFeQ3|IRUd^87H@JVT0W%fY^X%2#}@p$gw=uDf9THapN!N9_3<1d?>!^LUw>} z(q!7xBA3$M$s;TCm%#NtkhFz3IR4FuBHc|bMeKe-m8cz;E+i`M1wa3|a@ux;tA| zBSq?LF4WFuOUHG0Cv+*r&dstzeKnFbY)JY_I(e`Pk`IPxQ3Bt|8 zbT_c`u~I-T8LQy>;p~9o*db>p-s*D$Ed!lh`+@+_*as0wzb*Tc?rKzWw0|JUcdABT zWm*_dtyNK#on7df5G?T^-#)&N%>K>+JzWb#Tj6n~2O#IQe;o7IBMTd65y5Rma*Dzj z-G2B?6r%(I`q?1&pg!gKFPgeJRkf+Nq}8i_P+#Z_L@YT+kfHeLW}}Do%Hrkp07v4VAWn+k?A}<;fpK%dG`jOBgu}t8 zdgcWiDttC_=gEQXIz9P_*s~A_6pKakc9`!A`BjPhSfy%HAv7Ur8046NP00eNxCQHo z4#K@%x_${iH`3NCSz){A4+e0*aljI#({ZwZ~uo8u8iLW{x9>rrEs7f2|GktempiAz^Q20Pyh!@kco;`J>lo(>- z#(psthTw%FXz+*z{Ep%pMBZ!l4^~HbRxygGxUVNCFwt95wxpvrnz+0V&OqKCI{g0X zF&<|u>S0?l@9oi( z3+xWph00goxz|2P{lcG-Gk4QB9Ax{O02PigoJ#iaiTPn)ju94JDvc}QW7i+C%P2i@ zTt$~V1{jAfW^G^(AQdCO33ul|vLN3XMz}#=9C#ub{rB&{)(dk-Xm~YLkMfnKG#mGf z%=+0c3Aw)f+lm;7E<$LUrq*YGQC_yIQz@jF#@Ry8Es<3T{^mGpy-yC#l0I4dqn~HnIUsxm&Rg{X zCs|%w)t2zwxPu*4qYEzdO&!&k;99tm8fQo&zb7z^4Vl&RY0bkW_dTIiOUvS8qeA7* z&LI!VM6W($jS8K2CQ_@TvZ*7S0WF4QE(5Kac~)yWyk@op)W@eDbad10{$Pv!;F3Um zc(3m92vIDZ@5pOWc(L|pFvB!2#H)@@Fi}KX>_bOo>-6D87hl%-0xuI|uH^Qd2b&lX ze1!@iFRcCoZ6{EWzN%6{Kz`T>^P{r2Q}>IWbpb?Y2bi3ggd?^hWt`p`AVQdpWrQOD zzO36UK?sF@Mvdi9V%>4(baz1*`#bC)kSg995nS0cKT%mT=RSgYo@H{@7l?xS2#%H%zGu%y-ON-D?rbgiJH0D^Kka~*p$!*;UV{%Q6aS1E1<(_ zj+KHxV#T@5%*Lxyo}Vl#Km1*i?_9b?Bng{4d zY>MBi-hW0>uy>FGbk_|;8oXChvkoofE9(xLttj1i9XME77vN5My0van=C)-wYiff3(osdR&z|8n3X^d8Y^1ew)8X;3Y z26=9~G5B#+0JvF2jO7vZAa;BgOyQ6|l1BB$)Y~!0jfX}CXBD>fTPKga(W)h%mV~qP z_CHg6+7QsN`rR5q;ZiB=WHhrHD^r4*7%9e~Jf#Lt<3CuqKT5D3;IZc2W_NNTQxaI7 z5i4LIq)XwuIWSv7Z>{WZz}p~%R=r?FY{k~@Y-L>mLRhBA0niKw+6Sslo)?pUH{L-ShRec;~UU(f*K8U+hP&XuD8?P&M6r8|mU0P4*8K7M3PcvwK zMP*n_*{t!DmWEF0Ums0I7+r{d(vY4CC1BUREs9f^ti5gtw?9X!pA9E7_ZFhrm~RAq zf;u5Jo1;lf_RqFK#-Q`^yS~K+!VV^V<^0^b=SbxmM%e{}7yH?oQmL2hBK$V>chmsc z$tHI4YOWg9s6!LCA?g@;wVP1F(OfBO+lmk-6O8sKE*CS=@;UFPj+tXl-9XT(UqWc= zDOM>QSR^x2{JEcM>|$+#jazN`a7=?@JHJ~?F zOZdV-(k;IO2{F{K0J_nurjS*nt*?1hnMjP(ilM5u-PxA%!aePIA;cmqPmA})37u0q zSZ^E+t~-pDccRBOjV}ii&8MCh&Qz$(04tiP3 zeb@pm6#>&go6}V^)71h*w@qX)!(?oJ9ZCIK47ZXtF`>)(2ilA6HW28M#z>P|BpT*! zMkNP0%qt6Ke31DCBF9*mDD{aoIrr#Wf$0hI@-y)Laei>Vy>sv|YtaT=+iEV*XkNdg zzGe$CXwY*57^h`_6i;V`s^4N&L`$5h=x3F*D7xg^e6k1&+b)xK+YWP2ESaxdySfks%vSzGQ6b#y5VXt=`Nm9~rTDWb&D`53 zlj>(lj^Forzmxgh4+U}Fle4dXs*bR<;BWuo+ElU-sKLBk2bBLH-pJpe{y{b-fY?`~ z5d!T0uH+7$G_#C}4BZws(3z}iS!3ihe?xr+?FUOpwgf}}P-I#@5TQHyk$m}N!IYeP zaKb&j?~J{OW%EMI_C#Cw{Xa_4C=wc%I_x)Z{F82JeQlyCwg-_7U646GUDg?JZQl_s zkppbxp(-1`tf!enKnXkm&!+xZsipnAcVq-6j=b32pT<*z&B}9+et@n$uDh}ihU4J< zEhOtXhzVztKU`t$dspUUX~GiU{*$RQuP7hYsG879fNBC^!T^M)E8<{?r5j`7F|-*g zLpW-#25IK_4g!69hjE=iSXLt*md3&Y7DMUtlagKvMi{$CaPYYPB>fXs^tVrcc2Ug( znijSe0=EGtvD^^3`sLr>?+WA`fdzGd`NRv(aq!k<>S*lM!gyPL$fH{K8{gi+4F9&DJDZS5)m z86IlmCqkn#|4LdmWB(C_G@p|_e67l%@mIbu`VU|DM3IQ-uMz@(nA}m#Y8n4D_kR-% z?)byx#zYYj{Z+#34}qHABSrYH1nM9yLgb+7uMi0Q|A?J`Cwdc}wyO{%1lD-$|4wbs zCUEaM1c-^=uJwzJ{Oh*>{)40aUljLGAV`}H7>r)~D`i`T(A^6%zee=mbotj%hpYsN z_Ky{X1PA>)cl}?c*a-MjxOR{d{cTydHxvmH0Z$E8@BZuE|F1*+IJu^z#qq?dw@Kj> zR`iIG^KU-CLV9>F_z@a<85GA6vGP|-{@3yUy1f4#;cvn7FZce>PXFJhe|x3=H=_PG zqW)2T{9j^+AM4k`*hisdOaf`*Jr>TymNS4G(`ami>*_!2B!ZsWjKKnYRt@X%L+nrf zKla`#F0QRfA3i}69D;_Bpush`TS5r#7MwuP#-SldrwNeY!JWnlPH=A|xVyW%J2d~z zIq#gAZ)VP1e;4l!&95(duU%EEfdo9N^&tmj*WByiQEc85`Bz> z1d3_OAmhK#DTAK&oP-JRjl+}4K89fPe$j!O3U2xSB`@bcpgAz_&%q=HZcD8Im4#I| z&UeKTmG?}Lj6ZP8?yb3BIzv!OQfW_|ZiJS63EC6g*W+(!hBER0^;~G^D~|KYj}V7` zferb=@NgQ{q2RipkWs{tF0)R^v*jLdvR=SGqvgKk6oaAOR6wp{=$PRAe+gvG?%x9W&nO94$iD^hUk%$!%ILIF zmNmdnJQ1z_?tI2J-Yll;>^$www_4yhT~7$#yqoQIZjNgZOX>F+i;R$2(@2Gw9e3Mp zFC>`Pc{~~9_DGH3^DIC!F`^_UQp>yGc~XPPUe9sXXdrS0kls`iY3aRIoX6&yUCf4g zUtd-e35^}+5FKp1{LAp6nt0mI0Iv$bku!RlUsSuz#J5vW*1nYFwj8iO3bb-VH&Q%o z!CP6e?GwYUKfxnE2|d`9Zt~_m^NCu84+@ChJ)K}Qlu~994R(zV172W2-*w7N#V1DB zRsh`1m_rETm-H1!N2!&&60(VN8t1n^XxJ#=GepAi@f8<9X}mkvy_j_x;32%Ejn!Lr zopaHunzqWXnRA`#W34}I!S-u?GpDrZM`B<7OGpLJyVJ8-M=A`>jU7mdv$$v2{ z*DsphKj#wbw2`s{3_?v{`0aSfi=P2UPT>pO&|)~EmOA*Npm<}Zk3f;%xUJ&w%V)k! zm3{A&bB%e=9NWbnxoz7x>@NT?&NWBqvit!Lij2eG)F-f!w4a@5%f#^3 zhcV)hE45b&EkFSzOUu#E-~ZD45{rTrLJ6h>^1l4fE&4-U49Yh;7AaV1;2d zakS!b4nu@5@*}h;MR7Itti>}E5XQx3M!!72Gy`-^P59v&qK}Ci6MelBbI37gPzXi_%()Zw9Zm}8nCIx3O@g<1;wlGfj}XuKcM@R zgIC=PusfUIQFqrL|PX84z->9KjWBq4*sV6+5vS?aGZDL#e9Fr>vlBP34@tjtN;=eS$ay;!^5T4 zl?%_KHj@2c9pph_Q-Fg16>2rmZx6rq_r31;u4~{mR^!y))p$3elq|FotTM5YXg4bA z@n2${zEmpLoT#32xD!`DKhmm}p+MD1e}A%Tf)g+|q+uVxf)e;DwXIRo{iKht^$qxD zuc}U(-2IhcRr=LVA|U(ONF~v&Rjy0G#gt3c(>utq{PG1@SN3KBJ`t6p0Hpg1GTWSjshqxY##Ly+TFtOS$)oj2Z^ZMX9m!VvC_$_DS=>`0L&N53U;Mjaj zLo1}%Y)|-f@XbEJsZ!EKh+9ja;?`m*E+KmUTgFz(YAqkP>O1FhtCeZ~u?wJBpKpEFbO(sDhNJe{K6Y$wM5-q-tdd84Z z4x&7&Akq+%&F1EN1-~9EPQRnxo?}RTaWBeQ#bEVj!DT{!f8iccHS%j(0g#i^=(@Ib zVZ|&!;^gSNnqJO5Vv9X9eq1QL(AgX()JK0IHE!n(goW91mIE8a@l(7;GJT4<8sBbz7* zz$BA@_}CTr8K@onj+dwYFq1L+$$Bz)zPF^ML?KPsC~)NP$PKQh-S)J5vIejLt8ABs zv+;a4!BzO>auDtZCH5b>lT!fJZg@)kfSS-vKN^ z)c$%a2e;nlOj;YKfC@(HT(*`QetQ&dc|Irlq7v^t{-jM=-ztHH_01c>urJ(Fd!Sap zM4OtzAi^9RE5Q{jzg9vs0w-kX#~R_j<#QloMbY)R3No||lWqe{^vUH`hh*y@Qj+s^ zuk$H-oQYtxW&6Zj@QS$A?$5b$V4r4Hrk&lKL9D{2Q$LRl2IlRwR?Y$JZRYw*;3x7m zWF2iCjI(SNqpHZC7*u(@Yqd=utSTa$;W$}zms*=oxADWAB2G_R7d=e)u#ay6tDtVb zHh#^7dnD&}O9^t^`M9@m@e88|*8v|*$|JJN@4(sHA)zqA?Vpd-tUmc?I9keXJmNx< z#WWeL;q3lqpsJ3zy`F%Fj^r&ECrp0nZ9>9i!CN3f@V4G<0U>b_@EY=%K29&in}V|+ z_g3$s_LQYW^(!ODA-7*F-I&PY86ZNbmjr(A&9^An~-HM~sH(Zd29uGot>z+|5E)}}2d?2ibP znc#d4I2ZC8OZ`r($UTgPeKo*4WAl~pz=W9d^v0?@_g6HXRBU)TSY61Rco$i9{igbQ z56J0wjmGDQHu$D3G6Gjv*6f}>n1hO7kQdO-Dbx+U)Gb=k5H5`WqI@-w9Fg`^N3=Djvvd=*4x#p22FIvRKW z?Hv#8!kc(5VfeQ%zpKrYfzW9u?o#zuv-u%AH+3_D#cvuo zW$_vnRWli=IqOqY?g4?hezUSckz9`%kzi%b?*j0JT2T^`^ zlNWPb6$~cssvSBWD{^(zX6wz8j5=6v2Z`S$k=a%oH;sP}`1BhHss=>JC{|iKqS40D z4o(D5&v1KP-i*Rk#d>1xKz&}R?7T#}(eOi2?4-_Gt%K;FVUAa8vAC)zFqd&1qNU5P zz&3AT6NFeXLi!X;dZma5O8F7{bqAiDG?<{*9I?SiUYKmvMxM#Z{SQ?&Hn!M}23FkF z(ofYdGv8ELCU-E9`WPTtKXoB@XDi(j(YpZQlo-QYn&gPq?=F?!tEcQb$d~OtDN|PY zg`9KjKaEKhUq_BfB;@;-P^EL^hkVL=%IcR9R?NaMT%a@nq~4e0jE#{PyRU*dQc8#M zT9pO;G@j*rxG{dR(}Ky~TQAaOXk&)`gU;%^Z?f+LYn4g|U42zBC%(U-^*!LgLr)AW z!*3$Fhkkl%$9Tresv(H=QwU-8S8DBbBX0IZ5P-k6!Vt0{3ub_T)(4vaI+5$A@yku3 zzd=`Tu7sG#1RdB5Zn&lNT%1K9U5{*F?H^>{&XF|MeW>gFrekd_s>D1^rh51@If1V9 z9NV1SvR$7+YT~S`GI@GjhmAh-dyGq(*W(N^o%`&*0_F@a4d#VzH}jzaFVS^Y!#x{% z`X0aI+1rcjSL!yPkmzdp%TnV#Ka8wQNYkwvE&x%X9NhuALmiyZE(`B`$%45xxZFF9 zTsa;~zp5*>!}Fls_$ryRTblGrP2nmZ@m;GRD5y|jC6dp=dS^+UgAklq1^_4(k~Uf8 zXAr>o?!=1QY*YVGN07TMLWeAVK5yuPSIvKBK87=i$(Q@oj+Mw5&s605?@t+d- zy_pM2B**G_^to&bm_#hQ7t(tosiXNI-ELqF2EAROhus$o?7pjR!~B&#!{MI`A-Byl zg!7fU#&qT-W%>vhG1QB~^g_`^9V2FRP%M}^Qd#=WuM#LA%~ z%wNAN2Nvx_f`k~4^SaDp^@%$a+{102>`ZY^YgHT<0(-1)+Nn>sv3nDkwH}?O>+$P0 zjj;fQ9fEU7{SvU_Xe&ESJRy!=Pq71i-7SM6a+tzfOYYB#AsW>}KYE%oeT|97*4b$w zd5jPqwI-(+Xmmk*egKR27(Ak)gmqJYPqU~3DqhS8kuGZq03b6~IwM(qbDE=E$okA^ z{8zWC^#p4{HGCm8i?&~7;ojyrcNx)=E0Lr^ny=0DNFAmNjFW2~eLr|jA+LcKQT+&7XpEeTShlWra#h2tRz0j z+0%YA4yxO$w2WPLDr8VP>cJP<^8`~fy?B7tp2;_DyMY^6?~U`zq)qahW*iUty>9jv zJp_=UW zanJ+Y-7+fU6SMj`UAq}5_BtsXxWFDN)4g|oLO>Un zX*gb(Bi64UOQT@BVLl$RwrUnirOW^w!o&_~0{Y9tXzyjL)Dn|Iu+~3!JXn3+Mf{F$ z;12K0oMrwUN*iUm*7u?xWAOStap;wB1$h_X4r>6jB$tNE-YA4-Ie2 zHsL!4Udfl_IMW9xnqR3kn$TaD=R!2zl%k@izYBam7=bHx%h4zn4>yYnzcgbMy1(oY zTQ1{m7-nra9X@gx4;3`wz2n?*j2bWPQ5)~kRf#E^@B5P$Sp&V7e+8}WKnWta%!@5t zT;k(2AgX)znoY8a;0IK^koOyW!p@pe`_9YEu1_f!6B||YgJo7iaS~9sCF@M$)E4j7 z&J&L~R@#rIUii~vk%m#8%SSO`H)n zFFH&*An@y+3nY^!4+hxJjx3$b@D8jyKB2dFSius$?y{K=U z_1;}sNFyc-9_84z(u4`)XIXc=1P4EkvB7>0fY}^52d|Kzt`C4N+i^V!5)WG3y>3pR zj%L$y^ii}zraj^GK=o)(9vRK=mEX06Rdx0Rbg?KKS!fFtf}D(zsTwVW04l0CZgDj* zo8R5jBg~L8@(4dapuCL2Nt7iFNtiC@HODkm=UzOk*Rsj>8)GaOULM~SSV+wy4(*P5 zx6{xTc(oH~XD3A(U>H++a=laBVV;xnd0THyt$X)f=Fj6%(1Go%MqvO`IeqrX+MK_y zHr^!gDZ0lFt2s7qyvdoiq>N%bil#1z2Mpl(QCES(ExaVAM8@^MVecHN`~%sBWgqxIWXlfL1leBX>-Cgz{_bp+GWb zH2k{oRq@S6#0)7<=d%b@w1$%Uzd~o*(Q7Z-mfX0AmMU z;znZ{$yZ+-c{{w`ngBf%-(|jMo;%1GuYL`n*PrwYy?&%SLmlf;aS3G-G8;XsJeQ8< zTdz}hyBv~`WLBWV~pg-R^*friB;EYt(FPIMxVMURp zQJxc!vn-}HLObPq#0ch*OgzuoqI75VEMxejv+RFyzae&yj!QNlduA7NIW$|agQ$;k zNy}t(84%oIq0jFJ%F!tifW|jB&`_g1?`@dCdK^b}-c52#p4BU|`Z(|_QRYj)%@%-n z>OIZehwSLW82a1`D7u-H+?OW-83j;W0KvgaQe1Snxbbt;@zd&KmQFYSl;Z}(tmS!?$rCbwN>?E zt*ANZvD|Usb(OL38!hGsVXTMgbqE7>dOz`+lg?w7?Kd#{7W8mb`)V8|gOu4P(9h^b zSGw%4_rHmU;h`0RDj*a3V4kQ_tH8?oXj}d<>G1cpp4HoV`LhpbSGdVs20uFh2LBQ> zV%1w`4OzpHlwD5GBq|6Dv<_b?7n(Dk!XWX^k8TGqcvWq)?y)EV@dF6&Kqu`wsEOQz zAH12CF;y{C_QL9Rcr&nP2oh&m7qCS{TJ^Q+BBN!bcWoB72HpZJf-(0h%skzep)Tl0V8AF0{dON59S8(zr~M}6n`dY;2VkT$OpdFqQNmNQ%W|RuFjp(fMDrB) ziDX+;vmW@o?$|hUsE5TJfHG$RNHf7j zK=58H(d2ofM+ndO&p0dKOfLBMKo6%o(CY~ZD|0Ox$yIVe^$5)KRISP^I!E|QZM;s6pj+ml!)b|vxRlbA zRT)6FJawJBBIUIvvTBg)Zvy~$M=!S*M1Umy=(P|YLjut2O%K2)N2&GwLI?-S4UZSP zI-~QRCw)f_)1l9+fO6E;Q--)YZ!s@GldF&}VHsZ$IU?wras(#wdmTN6evu13Fo2V3 z&89z{{FJFtkk|HF-A7ZcS_?B{-g-~5YPcTAY1@ZHfs{+H0`+l4-T^9!9v(19tETDR z9VuqkAjF!S^c4WeIled1CGuB|s%9u!JMiiLpqp55-w62}DuM>I*#R+$3`IG7jg@0> z7|m4(2RM6ycFWSy8j+?pm22|&U-EhOu3hmz<>UBxdswL_b{}YSk-|5CsO4HQz78;8 zpXl?CI02Yo5Nty6FCleO2UP<^{cN%`XA_18R)vrgU|VU~qunuT=`H?&gbMJL>HhK7 z=Zkn1Fr}HfL`?#}U1LvvUT>MsXy59O9aQkbSs&B*R?!>T`g^u1sQ##(O>bySjJnP4 zSv!zN9K;xWwuA^wTbWJ6xfc)kRy{Gs5zTRey|qv9rHA=>qAjF)AUV&h|~83-|*9 z{2|-K62W9Tu9MuSHE_Y#!$K71_y^RLpi}K&oSM2x)PM=kCEoyi_@e7zj3A?Uk>m96zf% z8`qVV8AYpkH^!r!`YuAyf(b@q==n=cC(blNCLQWu-uxI<`Cmp}S5Qx{2ZVtRw9W-~ zBq%x6kDU2J`dnbN)Zbk~pdSc4c`Aw8AdL#H(RpnnC~DHQUc#fCbN}fygVi#%_NNB5 zSw-x%UTY(UAl*};9@FG1PaygEGX$1vClWRRa53ogdEasrl6LoiAHZk+0WRfF`utUR z5Fz`r%a9aFTg)*)h6rfLZ)*EC-t)MN=7Jb##+4Zy>^EE(@B!TVwXBts)Z1D;ZNay% zfXQ4Y!?P0aK>wlRs#gK``jqX-IY){Fbw=VPqWc^1AF;Dp z-o+8x#e=tI!^VgrG`f-3oyEN8JGemB#BgNk7Y=VfUGla(|MoHZ{tFMr@=E!kbAMDX z^0-AO)(G*?vuD-?zpVT-O4eZz#G}MKzLGxPnco-1)ysiI#^As31P(KICS={0Rm)vJ z;4UASip*Y4qPg^T6H~lzDD32nxb}HY4-z*qIvtV@TZz|Sr>Tnoz_0_F ziU0LZD#;G;1QHIX^no^pYh8Q|Rh@t03%P*qGLrS|YB*3o>ap1nrzu%&pa0AV4Lsoa z2pjcqJ_eBV0t>MDou;#amqJWD5vEL^QM1`mN(a{bRcI z`mssVEW4)gfP^`o8-Of7p#y4n8w&9l=-jJnug&aP>LNUbfG-7}JV=(ZD)gAM+(U%K z0$H&%*FX-$x)&p{VJbx>R+y9Q`eB=!xM8QH6({JORK9}K5L#)0!Gw-^vc$n4dV@4^ z+eW;Sq&1450zF0{NBO26kKwOe=%*x>AdU8&-{fyRpRzF^^BJ2T}7o-V;Rf%Sxm8ORiyQ6_MPhw)`di8 zL!;8%v^v|?@>^tQ7pw2g=G`|G8t8>}Os@bA3qeRZBKR71ost=F&FRRMaS(hxE<@32 zIbe^2)QA5ARg$6fR)2~GlFW(@U8Uqmn>)$9vz)FGwLwGq4E>@fB78QgHgw9XkfAjd zv*U+OuO3j$_F-yNNq5M(rtW~ZE;;=8p=9hRi@R^C*+>y`99S&gm}&${nIY;I#}X`A zuqZFFyDg=RJK+HmFD0&m=K(>pFE%}BVu`!S82ehK7@wn;g`0m!-scE)TuE#G0`&M; zt@v517kzA#9NN+Cx*80f-Sg-nf!^T#w@x3G2uebKlWZh8^()47j(iqOn~UeKsDsA} z;6;QriMY-Ht&*#`8AFH&_X5G}m>tS?E||_=U@@QPwPM|{HJQ2j3lRM|0^_NuL zuLyz5akTk9G)_+`_V+Xf-$cQ#8-9j}j;X6jvEiA@OHnX-7ZKc!u2K}K^%gNv|r1$ascgqt7NFs|G@CT)HMMOF^;kar8U(V;D z74G~DNYGx!P(=?(T1N8W2AZ-gvZKIw&YW%l4bOjH`@`@-YS6=gTY4{b9^U3- zBp@7Rj!^u^l;7VI1Ao1#0aCS>1v_LOI1ux{2;$ElBLsvY*DRa*zdh+M*-gs*iv^x@e!Q{wKYssjDak%GDhLZuF#i7Lzt23THUQ75 zj1{x{Kh%!_s6WeyR{5XhwnxkHd3xvGn%?kzT~mz&N+X4{KfymQYg&m?$<~#Ww)XD_ZustLCIL7CNR+h#Q(bl z|L4a14;}u0jriZ+{;k`8*u(#ps!2^62i5Y3$y|Wt@ctZ?RuqcyH z(;xV6-=egk1sgRwK6)x4o}{wRqxW0F`z`IuK@Xy6N+YDM)HpPsx5hZ6uVnjg7d#XT zjTJ`b>cHRwg7tzk;f12|Jgc#ZF^zny0|5do6y#=c4!nPV{96S7zq5ij%}uj!hGb5k zK3Hedi{2*u=QGnSJuPaC>UjA7hs6#OBh%-lA#mbykKvfH==A7neyF|zD(ge3bmK23 zP4XsSC3bzhfq8{d(|cO{8k?Fis(uLH0aso3?`Mg^$nqlcn@Y-q3EWf@l8+D^0qC3arMQrVuk_3RLv*)HU`|sK2RgdXJ~(YJiEe5q9NEr%1%7g9P?P| zq%hDTwq*jbEu8Pi&p;?^SYBh5IQbh5q$$)T zihgCP6ppYFYYcRZ6~vT-$*n!9Ettp#4&?V5UQat+TO=m;E`g%^TGhB+`MoR%9R~kg zYOH$2uavW4psNHd#yjQZj=fUY0e`gK9RHYVfiq!UnEc^eZal)*^~0_LGld-QIkR37pMG$) zW?}&-W37gls=-2$d#Nx$1T+!C(*Sz+?U#sm7Z-gu3C~Y+$H)H2)dfwjyu$!aTT~R1 zjvWj=?GRUE&)DE|iD1W@71Go5N91YiVywdBGq$q2_dc>6==#Qt3jih-_B$lw2OeS7=pS_JO!k<%=H6(7NDXFSt-R--<+`tqgtbCf# ziK4MCjW74<`BJd)#~jacbi)ECm9m@?XY7^4@!}yf_1rG;p%+C{ddy=6`I@_TmRZf@ zJS~1HQv%?y>t~jIygD=QdYnKl?n6lyExH6A(|KgppLOL!Q(f0bdlo3ZuPBndVV3zO zXXt$W!tMJU5vb@h5ljxHL6`J$Zg;@a}o1^8Kw%+**=6Z$~+9or8Oe>E3O8n}} z9(HVpd?y00oFqJo4?$_58gcAn+P(9Z4&O-MA5)|j*KMCiuFG@@a{pz2OQw|Lr%R@A zX)l2B=@9k|T67}=yQ_Gs&bH$8BvoY^X6-<>H?8lb_t>T@X z9~WJ^GKHj5ERBa4%ob^<`r4%5|LH?$;vY3etq0{r`svDIwZTPeGn1&d)8*#2&gg z_XNrwTIZVk-grTSPQX5w8xFUQ%WSa+8MkH3h>XAuU#3_jf~*cP02P#cdi-PN$yX>O zyYl<;ZTf{F(w-f2sz3;WT6kKLCBqMQZlzE?VuwD~lVO$v_r`wxLFDaF5zypxlXGbP2$3M0sK-~~cU>^D zgAs{3p1tEorl!}N?|b*-49i`65wlkq>U0zP6wD0VdBqTsXH9Tcrvu)CyfkKo3OKr8 z5UJy*jAgfEW_snf zyL!uc>EVJ6KD!*=scRu_QAF~zp-rMKXN9PI-38k{6=i%uRnA!86jnZER8E8yO!YWr zSX-hqsjW%dyXY5vua1*@C`F%h$PXD^60bMCc#xK6n+nV_MLK(#)-~oc{@Zo*Z<8=g z{-5KGxCdH@Cn)T2HKb$@uk0YF&W#@-I3#s??W^b{e4Don#T@iua%gsmVNSoGFX(?j zDx21tzcMP2f4q(2m|s&+6IOE7SoiWdXzhSxmzp^;IX?aP*<&LL+?}qF5Bae?gBeyU z-2LpylP5Tg27?u~PcD6uE}=3cim?NX?A=y7q=N{o=H0%X;{Msp0PD=xR3H)#pX?UdtH;|zKaI;@-;&7^+vBNS^A%s zgloATWvC@DFsw!#5iU(dnV-48n?XafZjwQ=4vi)#_1~i&jjq*;?R(Si19EA6Yl28- zFs(@y8Gqbe-P9)=FJb_upbdu06s2}ezvY^`F51#jVaowQUX!uZE1K=}4flo}qPE|o zP4PtXl9M#j*}t|?H4}LQmWI|?RbKw+St#`T0eFG9Wa!Yk`NumdqysSg2R#|n$oxIc z2_eSuZLMs*{JQ4Shn8=?VVHKrx3yWOX)jXocR%VUR4MQ96dM6Pv1i%4mL#L|l z*^e(vUMm3>WAm~}6k@xMDRTUU1a82orJ$+0`n5$Jde-fxRjzKa^0I~A4;Paeldc!_ zy0}zSJGovosZ{hoxByCK1eQ9`#0>Hd_s;^tRz=eKYo`K`*OU8_QA`uyt^5&er5J`z8AkM%1sXbWOPb%QTt1%Bx>aPfdvEC|fk`%B}1%;p|W=5|GeBc+> zRIEu!D>@crJ-*-A7D>O9Lb|=v zag!~nwAB*c(Nl6=^I)DYEOGd7qs5FckzL6AVeSN$&EQgNlHf2Ns+UgPecr=@b{~G~ zt*dMhH8;+cdPnLL!ZTwyS0f{&0Be5QU$jAVA7tNn=+^yBH}Odab4qk=B}5VXHB2C` z33Fh6{Q?n~I0{vq8;{c4p_S~c|zmEpnC_4?-7{54AGG{_CL$7SS>QgV9 zB{f4eAD^)idq*y37L)8dZ#^OEFc#oi6UjYfS_+8D?k{V}ldKx{-6?f<3&d(>8!xf- zJzE!s)_&pHXIWAO%=)T|sb6p-tCzgEDhz+T|CQFDDJY%P@Sbv_NUBN$MYP}`k?20F z>8AY!6>Q^@kxGeJYaND?KX3w*hvAfa2|ENZ)`b~+TyGJ)K6Q6CoE(t~sX63kQ zYz>wj6%ck0F_1I*2G;GpU`;}OvG3hmXafpKq}LeCBuYX0c&x z+!)c_YwY3O@#!>S(lu~8{a#Tt=xi=_t8V5hJQtB?IwxR*U6|T=fa1X`TCaGu6K|r} zDdd>`MQnNJsd0G##+Cl~F?a^yz%meTX1sg3jWRmywJCjykc%}RGT9X}X z#XsGMUb7vrDyPU@_3B@{D{5-vrPL5Vj&jr(dJ*~~0=b1+ zLLl+%9kM_fWH;|s5Vv3z+6s!Pk5T2S!uT6~fzJ#0r6!3zBTNj_D5Wm%%eHhDZq^tk zO-@XPh18ZpThy0?kW?$RL4}Ui_b6>;m8?1S3xdf71Y%RsdxdUQrNK}AdS5Vi$rK6^omQ&gfAkT7y>0##h$RMAeq!nEZvR;4rVoxTH(DxJ+A zIcT4_sGMFgE7;_)CbsgMxDS`#)$^9Q*T)7Q#MwZHKf(H%>%qZRktl3omQQlbFq<($ z2Mdwb+^wS)2#2-^;ymc|Vj^)+uGDOKE=a>+G-OD4Iq?=TuaNvI=&&5*nke)enyOMc>VFFBv! z9Q!q7M!;i1b@0MpaibF{+2&lbJ8o;cFj7M~0keTC3CClDw#Cl(i5|j`(C`C8Bs>qV zaiB{@kUClgQ7`VXffyPXDpQ{8FIJj$kp$(UWA@2$B}5UO3=XdFSJuLU>5UMp$5XTuh=#TpK%TYXbJT%lz$Qja(POzuIS` zj@|eWNz0oB4C+tC#1^8|px~kFSEhx17~aWSyUQ4f{c}uCVdiFDO2G5=H8}cHl!ZNF z7(W-u$xciQzdeHFaPN$2AUXARFL}{ZDK%!vk9-WhQ{Y!}<6bKh(+giN;G2_;6i{i` zC7S=`^6CJ+A;n|I`t|}%*l--idG!rr=mr$88WqjOF`cCPg3RMeN$Ym>MTz?o%e0Tt zwV{TACa=1G^ajX2JM6^#bB}Iz3$vOirm`lR=3^Hd$$OssRPM7u0im1%7HdECuv%b4 zOq;y9qVTP6b(=CV6JE%zJy|VRB2J{*`$~afK*wxs=5gZ_=m3$B6tX#WKL(k75K9#B z@9--+ybII8rpmrav+=*K9u3u@Y?lB~9L$p|*iP1Clw?WYd zno}?ZrB0eo%a-uF-QK0{TTV&FuFL!NW3z=>sLj~eIv?xsYYjqRi*40A++v=|^==~q z^>NSJq+8lO{DAG>TWpQez2rgE+;bC^isd;^!q8Z4A7%95HwNs@8ZA&`am6kL>gf;g z5OR~U;tA7=WU*+I(qaXS2E1B6L2t`o_C8t+0U73JIGsa6qtO{19}<;*S1rbsjywG`n&nsn2*_TXWh%Z03`}^NpY*m)hIH<4}hmtdaMcR9?`Kq~#$txW!AM zu`Qd;?d#1gnI|3jWR~47Q^LYh{v7k)YG1#vt+aEPs@9OuAJ3ps$Ku`aRJD0 zoI%HDxQ=`Mh1GpkW%}L*&Iz2LqWXTu5)y61CpIy%J6tnQ>B}e%EhZM-0ph68!ZNtZ z?14+}byGq@tliqv$H*U}AL$nF&h7Hk-~l<~A=bCwC=uZuQ*ZL`5Xp z`3m>0ZS?3mI*O!vEUDlk4#mz$^W34>}pG#ZuHwh);;#^Q7HRr zFPWk$F&~|~?=G-*KD#d?SWuypM(*`l%-$(0u~AE7o%|pNz3^VO0o)N}nz*H8FQ z*F81;YL>IO^mvk4p1qE3#vS3Xb?Chz1LG|8u8M^#Y`i76`}C1$Uivttq~ep>?N$KEQ z?JurRz3HEc6@#&hHjb;Du7ol@**?VJ^%f;3;p`35B^HY{2}QSTVlch-IPV!c4(dyE z?CxUHEx^dbG94%$!1ldEO)}qY>bg)p^J$wnIx>qYIXY6XjMpoabS?|JqgN9(Q$t}t z_hdNvhEzPlIN#2dimTgOl0~+~&9@`^b<7l{@(Zpj2U-VC1A*YCS&-0FXk*_J#GF~{ zy_O|hlB;YToYS+A4z{6jQH_>6u-RV<7MiY5DOY^4N*EKyD9`_rW9=!6!Q4;2}KHx2#N8Zk~UQc9L*$Tji>W_ z&Ufq_ib_`lIkgx}aye(gUgNuUZ;tFRbuo9JJY?d^y-m(UQ*~Y9(lT_H6}&bd;ZfQ;@SmfRaJzd%#(%zI+6B^x83adb?(Dy zw&QT)Fj@+Ef_e^7W)B#<{ZaOcsiDz8CPlx5ln>m#R2;G+NL%(R)J}@a?9BY(p+MMox$1eYQ zlAB6bI|3n_cM&Xk?{`TpJ3_zbU`uZr%0?B}G*yOV=An}5V#;_S zPoRna2_ut&aGWIYx6Rv$o(S0=PpW^jMzm@eeMZRhz>Y&A@7eMSZUb6QkU)BD&NP7G z#_%nGiRN)tP{}-!8^SKqwLE-8CJY8d_&<6#A)5!+ps<5(Ee5LiIy$Iq6r+;48Iqo` z^OJeCw6}ai<*imiXDLZuEmWP#xHyq;kQO=4d}xz*Q} zo>b*m6QAdKhUIxPbM7Z^^_jAIjJ`H_CA0N}4Ao8NO&mkX;Ar?8jjaq?qMD`}4z+oh zjCtfCZj~@Qdk#}`dPS-@j0ByoE~irgL&rwGo!QZk*0BY`U>BQotEi)p@(45$pO|dJ z#Pm%rvW`U+Pqkl2i>{%vVPY>fq^~=xyaxb$pM!_X-kAoQjp>=PsMdjQJN%oi(4?NUr>cC?6=Qrl~-TWC9& zT6R?1TQM&155sEk81Z$HVNoqEEwiYBFAf9hL?p_( zC^jCQh{=@D=hD&jyw^#x1h8tYT^04oj9VVHx_fmuIT;GQS9dy-vJA)Iz&u!vPn|E3 zUl>{kxGp-NZt+6>sQu$M2pe$hr~zwitBaa#I|fDXe+!In^ZJYYEhbrzf+#{KY5f z_yae=N!5LKN$Kx&X#A|jzSBfiYffqzHB=yD9k3hqnTK5eZ2B;Wc{IRM?BxL|wN^_h zf-$?VNx3)9FU#ac>Pvy5*UHxIs9(I=r7NJM#6&-k4&M!MonfvIt4GsT!MNYId0|!X+bPF6ec8 z>vW(UC8ZZKtk@IF4^(_QE}+RRCOBpz_zi>@8i3F`Z$~=L`}IUDQxK9hF2vDK3?IwS|uQDv7)kpHI>GZ7_Sjy{!Afs*ADC z)l7o~NAwP!KV+Nd&r4RLaZ~s%Ue-fg*3KgWm1;THqq?q%RVELX-Brd3Z|`lhCY)M! z8JFkQUFI5!V?A1w`$fisNn3o7%&US>BIndr>HHw&y=XzChwLi9#*+XlbpO(_L-FVM zvB)0TZoEbg>*HhgrfAK zARxW>5|Ad+dj~-xN)-X=M5)qy??HO+O$Y($fdm4flN){ibLYNyX3cz^HJ?}k^0c$h zdG`L5eXNCE`M-SoU_p$1n>YSCRFLj`mI|<&i63O*GO5MG#+oL;dnc79ff&OH2RQRv zoxOs|MQWz%=%#Dgryfb7RG_Uo=c8O43BgxsoM@*o`fUHz1i>pm9bl6j&wl1Edq^}S@^g&!5&Wgdy zv$fUBtLxFGpFKa{vWH7l=eU1yPqez~%8dpKz*+x+{yR4B{T6-!WsNOozdVN-!+LN! z=At10(4c%`RfRWio!Qf1B70eu4!)W_D?#LMuX$Vs7~Pxv$zyTdHT7f@9$@&zU0d21!OZ)r80 zY#;9mUT@E8T^W(N)*ZtP^x=%xf0owEJAP`^4>K~ULitgj4R=6H7^}; zpcp;IQOaBYK~S$r`+cYDA3JVfHc|rw$BiN$q0Z{Hz|Z-|^u-Dgz~#wGM?0N_Bf!PS z4ES?|!NawYjYQ|BCa~PQVb`WV?#nsr)b|8NE;KRGtCIyEZ#Eo|2m~3{4KWF-HOIW3 z_6THlNf)?0sOP!DY0#khRymQL-L4(-Gor(&yAc8oY(?t87;wff*>E7Yn3EUI$UK+j zrYZr-);g}~VzW7KG7*eAT00UjSvo)|p*n+aKIY8soLZGDk0-bB(E9O1{?`l*JQ5mgBZj3cBpIGp)LvvyDF|^6InxyfLEJ~$lU`BnpReQW_X8qpm=B7@ zpC*KBX6jpJORyyLqBtF&Fdiq+)}$uu=}JI#nbX$;?vj89rD;@HQd>GTj$EiURXohr zRE}Nxne}92xG5hUkz8A8)@xb5ru1^|<1ybdBZ<@K1JH1!4auVp z`3s=0otu|O=CZ8^z0T9nQNWaAji}vnRxv61VWcl=>*mD|*+ODCryO;Yu~79zpX?dg z@CZ@56O_iigy!`ox+8G}9VY`f5WDAe&jhx72_Rw|pcp^1(hMfRCP#0r)?^+2=nAYp zm9$8~e0${I4xuD@RC`JSjvs!}ai=sQVvFEb@l(o5Cbl{CNBIt0-lWfVp!-7^@hETw zLNoPokcywy-7pufU+DKL)RH3E{rUT6o5Ti$`kJcIBivi0K@Ap?Iax!&6CYgw1q2d$ zG=mX5RU!4Si&R3f6(s5bgFj(Oo6U~23J51a|DuQpOrDl>e#h_Lr;xjHY46aB9}oI~ z04OHrnfliAnz*XX7kn6HYbqVrGn{$uP~LRSrL@+HX4Of`f?iWpU6)s{vaKa*b(Ebn zWnp30O*cwYDuvd7Ia)$zM(jega{useP7{k!vBQ0BSE%QwQIt*+x%~MxoD2d%r8tbg zd%+?bO;0zJTDK4l<6cx=S%1a4PqG+RW5sZi%fs7p?>v8~fl_^|{u9Z#s)vBDOQOTq zz>5&oHf2~Kx8>P!YVX3IdKyRWXPR#-1}+N5H_XEVvYOVFCb>)QR|a@C3HKA3S1_j( zeV5L9AiAj>2^F;sG648(#qQ#g9ahRszT{F@Sm7kgnlijT-9M*o zN4V16Bj@vI`2+5;>!Bgnp|r1~BPw~*wl2pS!WA~UeM-bd*f%R(W$Z4FBqNV2wfe0Q z1)23gD*jwwSyj%0`G?f2uMr;d!mUN_f(}qqz4PcVoXAmZFPaWQuXPcuWJL)Vw+y%8 zW1Y{%@RF#3zvhco&Nja%qF4QK+@BNB@ryC`YOn|9tk;HC*}M6UwS#YNfm~;1YE3GjrhI0Ljrn5rJJ+s|9UED$!iL9c&qCyD1hfrnz$=&a* z*yK6&NYUrnnb_iv_SHN`YZ|Wj>;_(BfLDo?D5Yl;atizOj-95B zQtX*3l*04l(1d0@m|)XhEWN^5&x4?w{YL3-{x&M0EmJVzJyN#a%xa_X`Y!B#1TC3c z@=*sZkrS=tI2AV+CON+c;NO~9_?L9A-TmQ8`t{HfX9J&>r1YL;A(sdE#Rb2-;kn|6 zSxoV|n>v;;soC0SlH*Mqd)lk<9}MFy@#6F$^$slq8HGDRB0e4?%baO#%yAjKl#~~f zvOX*cN}-YyOR24_c1uexckohkEPT;AbJg@BhsX(XhQEKtW=L+0Rnd$zq%KNLX)K8S zV!YM4v4$G3-?*lEPe|3g!Wt`PXPeEAdP9D!4`Ydxx_~4rCva}%?)p`uR$-4atZ|&E0OvPj+^WZG0`8k25_S8h%V|gznBF*x4{G z!pcfCO$?|1i$lRea)SC^<8t6Z)$^-ITfLni{C3@6`yzrC1_yRnN^2I;VmRuR)gI$B z2??lR@<{p|EzP>HusAUCx$3iD(Xs4-H`+*AOF+A@9#zB$YEC)(IV~ZnT$EWBV_`Oj z*eed_iP2bmbDFKbe8znf)V&NXIvK~mwD&@3J_S@PCT~9}ZqmQTCrTwAzgKtG0Nv|% zcbB!u+n&{DH#AVaUEaWOeW{zgwNl#zi;JIxsmGgQH6|qo`S-3Djo+RENZ3_GM zOsunCI5Y`_ah4z~u@b;Rv&!@PsbExDTpO_dEthTd;AgU;`};zZ`d`A`(j* zmhPjB+%K@=A@#Q6gJ)s}f()8^SwLFW4ZObFFIJn8c3!Hz%{7spVA`_BbNq8;00wUB z%}BpMkM#=Wc4fe*?5-U z4Ti{!WSNEb>QMko(%EJi9+rLjz`lR;i61 zH$Cl?2hIl^!1;C zj-lEXOtzlpT)L)1DNc6D;;`zGZg|}CrJc1>37_ajfx~H`&fs8MbS=3@m)K+ZtQ=kt zKOSCx{}5E>i^#dq;wIwYxK!x|e^AQ6QO&y0DulvQ=tAO0^cvT4CBUsh_eZYgw8IOr z5g`VnhGh<~+Fg+@noBt&!K-^J8seR3NJi?o`5(icu;-szL$p+PCpU8HmLA1dmJvCe2J)#FTe zEGRu%7^QkCz>}ec--hwK7dyQ1vfx7Z(+B%Rb^W{hve#x7IcFCTRmhS+*#9{Zo>A6_ zg1Htl?9ddeT*3NjYFtSc2I6{1pU*&mGuxlRnjyX=y@{|7U9E0WKqm&WO3Er;tUI5; z-odKwViB&0c6F1w(zY)U^_El~8#-WI)aL;dv_$5&uLb2i#Yzm|Cg(1)2#SKm)S+K{ z*H4rkbB~cM<)cy5{r!#g2dur7G+c5ok-A$_MyDi8blX()cC^k57m@BMcf>?1q^KRn zk7^1Y8oK@nsXjc7UeDeGltg!qoOrZOHQFApy~~<-^)sy0YD2tI!Nz65ethBbPwxDc zk1|&r=4AkuD%_LZ6>;k0EU{dXP*2s1Y{h5jM@6H^1QYlY$bnB05*L8<0Jlp}C-JHS zxY!#l!!L;{dt$MFcBgmPHkvqdL30^OUM2_c7lY8UBP3u!+?s(A+R6gj?E4#968A^S z+my4(ZbK8FWw#n^SuQTw`(~GXiralLtZ#f`zT9+Okog%h8*!R_zq6pmO!opoJ(t*m zEzBEv+HCg!iwd*@&mRsU$NiJahWk68tm>q z9|#X4yz3D5(pub3(dSmvS~pdxD^cPXp`S@(bRYnH_vXRb?>`~_QB~Dl#;u8>&cK(C zHEEiV!>SAi4$E-6M-FcHVJkSn0_?Fg^qogj7YkZXUb8=%skHhB_7m``eP9A3lU3q*C#ux z7Cux|n(IbLk6_KMtq842K!n`ejh(II)P|4GsY$Pwh}4|R^> ziOcWh(lFgU^X`*-7dR<^CgW)V>VZ5tfD7YEd3)i;uP|`~WNN~~b(?$g`|^Ne@KBpk zRW&fBNdK!*fo#-SX!m6ml58_XWycqwmtuZV<5_-?@aJ8dWNooaQ0X9{Mdt|=00%r7 zVL86U1N3vh3}r~|tTXc?)1$Qx&ElH5$ZbBPMT*RTWdMo}gT&D$p+&<$p6Ausmm{4D zk#H~D&~-1ENV?3qrH?Jpphonz^?SHTHd%=8*OtFJx(|A_ex~6pm3p@dDha2a2l--b zn>tW6GweB%FyIaoI z*n!7t7I%JiRp?qN_`m!acz3FJ^uGl4-^~clANFMJV|E%mcL@KwS_JV(gLOixG*lH_qX8K3M2xygdn5Or*)mmTX+p^)eyej+u?ughTwgEaBTYxisOFOlF2Qf3 zcmh&`rL59gd&jX*_Ohp>1<$G$!AZAU6Gr6(TVHg<-9FpC#MZ$JBElYZXZB?dlVbs=pU(Wt&bj z_s_N?*ww{)SqJnxkRB3GS3|F|RP}`hCoeK>`Hpv;mOyH6Ezb<{l>Q#houKJA1_*Pv z&U&y@x++Tn&kb@XTgPE8VgbTC^^o@Cy+)OqCDeu<)FNC7-+Y<%UuY`;^R#A1B7mD z`Z!#;BZgfz`#5l~qvPEwUHDml>D(v!>z1&}7p`g|ZB?^s4_tozT-b)T^I@&p`xOzp z#4HiIH3WW`?s)~Wtm|4@`QG3Emq-eCduO~QhpIrZAUPS2%EhQcIgBNazD)MNxPI$) zB2!x>PDiIh>ioosiE@F|mFgV#=r6ZOPD)CMd%d2Dq5E*U!+m$#M}_6@I*uKKkuG21 zc%S{LMCbh41Kh#YS_{ip+k6DBs^bd+L9Tj%yAn>Hgfg%p>Q0LE+Xg&@X9I}GQ^+tE zrUN@RYYM^=SLPeon?^`{?CtKV67B-LSdXwXW`60-goWxq-@PTFENYN5{`0p0 z=&La0zi*XKEzz_2E&}&lshMcLIZbKLJf0-Iel^$rRGJ&9xVI$#S9d?MNIH@SJK#Wu zog)vI6jM`i~6%6$sO1l2$SEvm+HMzes!tKc8I9I%2e;`oA3J{{#N1E=H`P{2ba# zJ5On*<}dH>8Xp)al!m>9|2S>_{jl<)I?SG9Fpc#wVR3kN;oM5bHv)HH(zv%wqY?QS~|^H8PC2hzLkd^H6e7S~o2%&pYk- zlF~EU$?i0F)bykN&tI6rWvs_2Je0!E+0xJt5z*pehw7Aau;aQKM?c<`NC;1Ge0@VS z9QPGg{>M2-Qq`GBRU|?E+I+>c=o-;;Lj-ndJ}4PIedla&^`ISl18H7##!3nox$Ym{ zC%D{-!g?TpzqTCJ$qbsS-%gg(Y8Y($*OKv!X0Hb1Co9yJVlOS#7T%y#7++!dDJNBS z_Eu!I(PIT_StW=`jk9!naDN$eoXfL(ZGNHB3?}%|U~VCITP)T-;iAIE<(qi=@i8^bMa*#NP8HjaJAK!?0$6>R6Xib-{j=lYDE&p=OfuNi7 z^+qj7HPvH#|F{%xIRJTgk_n-fhmRN5vqzvqF6FZnh7o5CqAl01`=9J%WzWt4yB!r{10Krrc6gQ+Hep=0@K!e4gCbBrkJ;ns|2{A5 zNZs#Fa>8?xEe-uYy0>&~J`SCKYRwx?@+Ne!IjSq*gXDD}zpb+f()WH?(yQ)G;)I74 z6irl22M@Osd~?=bi>oz=e6=qRemQI61tc#d>7+4MFmY0@`GsN#(BhmcOgvNn*qbY{ zdwfRu#=ZX6`U9%t@u%*pwZaPYQ0jAsMm1s}wJe|i?g>f9hk_ASX~RwEBk@jsr5fmz z-@0p+p_|-xe!z-ZVeaYwI!j7~sw!vT>u0{be&iLy`s^X`?)mP;f;#7)UfhaBeyJYq zQguDQ;>J=kOb3%SDCNaP$GU{p_#3<6*vMMjY@vFKsCr6Rzf07EUrPPT(w&KCS#Mk( z1&rGo^YLWfQyXB_HexdBCEPQt&_T!)k0lXbnAHT(cK&SRT5EGJ2) zFLStS|Mx^Xn~mZ+{Ml6!nXLUb8^8`RWE`k?=?V_YXK z!Ls#{l9)F;0kSC~BvAk2B86 z+i`QHKY-y$D0}M6$M?jXTQF=nRQ8>xsEIcB7XJ8s!tCe!->7_-KIDr&DQn+n*T#%P z*yIhYWgD8&695~SOgbNuZ|%5`%IKdNlphOyuQ4A5(3O`#&ocdQ1ZkEp^Q`=M_Az zDW3_4Mc&}t4ra?6IPQT*%HuQ6l?@Kem7?L3=V2Vfyl=er$a<^oK>7nF+~Y>y7p_9i z)f%8@5M`?b#Gg!lf$XD*i}DRBFxQijaAkZqhTWQGY;}%wffh9^pTrsJ2T7+^dw!eI z2C{2Fg&vjdRUXMMm^@ld1VzGwA91-ew~0ADU2FCD*rd27ox+LYsvT_oHqQ~T;D}DL zWZk6b%uXO)-$va`IUHIeS|A^B5M$4AZXI*F7*I6LM*f<{YFIgrdO*bsOT&ydKOguA zI$>p{qPMK5OsWAWWYj>tb|ZYZb;&%lWX#6risv(`x0H+Cgnrs?Z210!*smT5mc^lA z>1KJT`lZW(=LW zjD#gg*P6sL*05np8lx;;@3s5=&$Zk=Cogn(B1LNLOo!NhLLSE?&&uc@or18ncFIa# zTY02wB)TkAR9gD^j6KFVqtBy3PM~5=EyR~$qN2W^!!JuM2?`kDO?y^tqx}5M`21xZ zQZvQHC7UYJ=c+*{yWH2Kr1?c%C{XvBDs}bck+6gcXeLD73U_2EXQ)f33e6vE88AXJ zUhuVc@KwPWMb+RQ#nfsLD8&3WI(j})Lw`3R!VS8f${V&3p=5$fMnTuOYc|k2@F=-a zvr0B&bu~IjXf-BFaXRD`#nMh~EY;=Ne)-U{Io2+6dk=Qw{TKMY9(i7=aJ`MQUq=g- zZL13X&0)L04tHjoNz`QID_5%Km*N~DxV(L#**}bxvy$ls2eGs&Pi&dU`~Duv&@j|4 z8C%j5ZRnlZ`iv8$R68vX&yB_P`n>Gu00CFuvc+ZxuzUs^5kfUYjlxn z9A>C8C^{|KrYv5jtZWBE1F*ib6)0Dh*7pQ>%K(1nfsnh`*Z0gWArEV4;jC1H1Uyy= zXFhzPACAR%O45!wd7Ssa@M6)ub7jT0)b!vAVwd0mmRr@??X8eXm5OQ65WL`|k`9v; zl=9@_U^*rPTb_-21O2%%@fQZ@*>0$VJUV*cpnBvd_E+)l@ z@M=Kob1$>xYDA4Q>T9CUFWL%t;C#{o#!@snMQyX7vaaS`=bz^u0ve&6YiCuWtF4m0 zL$M|`MQ?oO-Y_gLmVo_oWc4ok6h;4##_F47uFcFvI7`hJJIg%c`63RaA{UveQBKk$ z_CwG*3wx%Yzw8#Er7GuZe2pUOURrgMlka41l|FhS)(~qS7~E^;E`peuO9U=&?v$;2`>Jdz#=N3F{N{boJuSkmaZcOq zW~If`UaV~(%c+<%YlE{9AXG5841BS)7iL!9`&%&7&G0%E@;t&Nn2|=7-J6%MYVL4S zz%{_3g&=dGyLgvTo&c($inNywIR^GCn8oHq&H*}dLz+0X`-`Z=xYSp z#@qi;AJdc7NgAf=xM~0(;qJC~I$eua_ydYKdbxOtL+A!bN~sE5R5;=;j%~K!Ncj0h z^io*$_U6rl0Mi!_qPE_pav;JY`SIsoPu)a#s6EQ!@;^FY&c#_$e9%$x(0{v3VBCT8 zhZ*;VmELnSp!>4P@Te99CAx{&qzu(XMxb0a-NQrj=|xm;CGC*X?4%t8%hIj{PP~Z9s3*A&8_+ipeizd_Pw5JM4^?F9_|q+) z^XybB;}RRn+l%i4W|}qpjje*$e8Z&$JGD=n57;c6Y_N!)8qb>xjo`?YSxvFOflU}x zv?!`^RHkmAv?4J%*HkN-$mHapvhjUaN(sBkef@p?@zu|%R1RZn^Jc5G)M6KMiq4v_ zRN&Jr7I)DA)hgjtk!xwdzesQVnIwg`pNWcGbUou*jc;oLk8`Sp*OLR2uacRG!>G$b zf{(?BEe*9HLkYef!#1O$=_Rij5EhT6s(F85(v$hvi!EWt$+7lK+@>LVcuuo;o&}5Pr^|ejdqy1c>sgSy)c1b`L&0&CFhDZ zomr|P$qqNRF=1;%p=IZ+n$};}8Gn`^7MYvqWMVc?v<%wzAS%<8<}0`Z0|P%)Gx;N- zL&-_c2xHW;Kzad-tdpOmI=@eIW=>Z?`$vz$g{0ZMMb7eNk>l>p*L+iFB!t8W^0WtLM6dOv;Kx-!m+qL<(_}%y8`s`# zG$>l^U^yqzrOBaW^f9Oy;)fR*H?uX6iz`uid#*k8(ha39ltrIV*V}e_tjO-pH6gJ| zn}5z;Ov*5MDi3^cZ%&p4VI~HV4p99rFh-JDiUpsss|r9nv}g_7?T6Jja}wX)SjUM& zQZR-dfD<*DFEUi%ug4y9Ly`3S*?~@{E(kHgd_1?egU{E!w%9JH+qN z;stSzv9YmfKwERG+qG`eT)^We%}rCv&Bs~}Lu`NRnNvDCjZk#d^H*kU$LQfe(zfH_ zc}XXp5Sntwr0n*^kcy;o<<^$iW-a5Fg0S52CaVWQD(BHwSB$WE{ApMXWcVpN6D3!F z)NgrYO~=DV;^b*`pS+=f4#6gz*5^8@@K%QcVIqZ^>Od5T5F3D(WlJ2cH9cMGU~EgE@VL{o+y`crdEjkRNx>wq zLA1>F=39%bA$TsRSA=&a?{=}mDtL%3EMu5+>2KK9Y9AyQyA4iL{|G@!4wum}rS5I= zyStpY(K;=3I)-b+@PJ1NuEzN$xQEb;x|nSIXbrfrs>`EJMM5@#+_dqL2V`uP87CT2 z=-4XQ1Nqh8Ik~~L9wqAm$4+*+UZsL#+-oNbA46RWM~X+6yaT>5G)3@JHFgAuq5d2U zJ$nox3Q%Pa;B39jq}_obO_+UkK2?;96w%DET}egE+M%9XXV+ZAYvtHeU|q>Xgg#3< z!(101WN3hS z93x{Wrp;D&`8STa8@G4$338#nfthy1nH_$LHd2zOgBdAj0s^_g9mw?;BW?d6v@KF~ zBgzKD8KSF$|l_s1&CZUQr7@$M*lZ_NJJfX3Q@HzP=fbY(qirSA;kZ42j$Q z<}zmJxQ<9pfF1<`*Vf^WwTNdrO_BS@G~D1C$y*gY%e)=1Wuuecbb#}fV-~=lnC->Zqr0MYju5qX z&cJXrdQk)+KEQko&QfKy$_js4$Bodj+0gv}`*3mpVE$et$n+Vn`ZB9G`U;a9!tiA{uiE+_w(PCi$59UM*h9v@sI-H+~gfJE$>BG<5E0dLxTU23| zEDXRLcdj--jQ467~yC9aVRnvk4L_lFV*^O3j~?VIsX9?KF?@xo;yOvymW>jIJdf78Vio zr9*bGI9W4HZDlNdlnHPN)<{3UYjSk8;D-f`HoVqsSVJE$RG!e_Hh;iS8Xf8V^zmN7A7 z)A%#qBd#_~a#?{Urm*BJrj6i41yw-p)}Gx)&jNaLFQ}yJN0(68;sD9J%F?j-+vb`Q;NM5-|Dk2^T{9@B`%k=mqf9fgN-~?Yfm* z#3ftV)i=Sp_EVTylvce#$5uAHW4vL6rYBQ%33Bw@x>Owmhn~-SqHtiqP;_I_fz~` zhME>=ZPzinfe&Dvcyl@;y*p_o@rQ1S*yS-A}n&bB()oe5@~GHX9&n#Rsr;-zTJumgDug{PM;sK z#P4Sp=eRj!Ci`_jlp{Nfo~1f-xPz`=u01Sq;Wo>c4l{p!BENUyM)Dku%(CPPmr&cO z=u^JW?YX{GwXsptFFHyT9cH<<9yKf>5XQv}z}jRxLNXgKUvY%iOaX|>448+K557&& za<#GP*^3I4T{}e9Y}Z+r?1K{}LjCNPMmeNX98URNobY$c4hr?CDpH6qW2nq;N2nZd zffJh#6q9`ll70ifcZndLHl}h(?+&F&xeS}lJ^?Zc%Qai|-)%QK2UW#Qs9Z{6iEbAZ zoe#5H(80xrCAk+?5TLe)fk@+i>6HXuFUw45`Qux*HDj;IrMKQCY*kv)%rD$3f zoyAjiyREV+Ykwtnc@vOgo^t@TEr%UvUUD|1&YUJ>akEL9_A9?#&zxn?i<5v|RfxC( znz^0E45Sh*1@%1m5dtb z>eoH)NBU5Ve6DL!f9ZOso?XXcZNq+@L+Y2%o>j@`-l4={U`UiAZ)#Y4weyR?KnUG=-%iQ#?lLA7eq`fsvy^wkr&zP0jK^1 zx48n#1UF!?u-qOou(By+IOywe<6BFhp&{1ZMQo^X6lHE{d6LsMLL6`#a%kVa)^5&H z58Nc7cM1}i4RFD4UD6L|k+(4youd{wjWSL1OC>S72c>dvTGaB-F~<=!TC|b;ER)JA zlj$dZD(#g&hTXPzl|nbP2x$gWY)C&2)J7xgh(o#to(02d9Q-dpCKyy%hS!{mRIz%=JK;3l6vERhhXhPxq_T^Ev)Z6oN%W+1gJl+g=ZlO! z4~o=4S>|y2Nj>JSHFr444^0L%$<#$O(!}~DOhcZ*Gxbq-;whAI-tla|#JdW~8BQW@ zd4jkXMC=oJ|4J^h9xsSxUj2SH7y{+}1CC0&p`>o~IN3WE^N}fu2+FPx;&R1gb=z$# zfd@v*!HeG>2X`ac!!H`*tIDggt3G#ewET2$L59Mf-E}NQbk9kKrIgf201kuU&lz;&B3fZg==5Kms&m@d=Rg@RlTEOj`iu z77WV^#0=Yp*Fb27;E-BSp{Y|uebU*A_)W|@EpeDulMW08k%ge;)A@7^e+p;%iBNB+ zvnd>{3dssuE~r}Z-`R3L{C>`kL1VPAa+F zLs_;bE)~Te_~XmQ+d^VV=qb_2WxhnpB};`wXeV|OYTV>8>IHo@*orzCA}rp|e9rPc zPGT}2Q{vM_*rMoG%$Vacawyl{sD;P8`6GEZN1lMbX_2D(>-t&iiMG;m!-gGw zm=8pZT0nr2C-~*so!U~X)J~VP@uQ?(ONf+@ zO-b$A#r=L=usR5n*%>a1$tXBtQwsr~`|1X5Bi4cjWb@|NtYTyYwXzUJbv0QC!Ke;77gC|zOXBlg;55iYR~oSr}XS-p2~~)ashmoN6mM{hRVG}8TaIBtN~V)|XjL(EWrUWwjd|-ex8#0E z`py1XWn zIU5O)q7&tHf@1ntg3(Iucs`acl!#V169Sm%-rN$%)3(Zey4T*Lg#Ly#OIU=-27(^m z-XQnG=VSc;^6|~2!A1(T0Nu{wb5`n)Eg!wnDuLrXa5awzZmi=vBO~L{QTON=8l~M!?Q_Ut3`Ei^HBR1i8tM68`WJXo~)Ys8y8oInF${KRNT0;cM zvoYQLBc=9iUVo86_rgu}%LZF`B`AN>9yHA9~)5WJxX0kfU!~62J1<*YA zb}9t=xKY55Uhfh6C;=Ghm)>3FZyxE_ zU{>*#0hs)1>j?e=nrntCQ`yZRSD?cBR&JQqgFE;1=c;>2=M8rkQLq z!7m7I#p#>#Z;n(84W>*r^ z!NSzjvWF0I0FZ5|Kvp5k_yQtwJi!Y7ud6(=Oa0Nuk@#PUV#uIF?n1TAf{pLp?>e0l z3jgl!sDK+q1NMj9BX<*80K~VDqB4!k8?_SUmB*2;c0>kikO8=T@P^8PWiMn3mKGqwWn5K<7Y<{L%7mCC6pAj?tO8BAOmK)j;Km%qQM# zBx)C~;Voqr%v8?&xd{nG=67^MtJlEe6j%%dO2kXdp~iho87_>J=^nOaFN-~$93D~? zWabJp_wTIw*>%0D+NAv*zDR}UKGEI0F;${2!+Z#~qdgN1o!c~WWcDG8o9j0-zu}qG z)3wN{Ol9V_c3~I zJ4fmbgl{x;GW_te%_9HGcixWOK{U1d`9M0hIJ{SIoF>3{wP!lL2~$LFy!#vV5TApy7SK-0d>AIQgV&EeW86vUR`9}|I$BdP~`2`z;nJ;)`DLqwoVzI+J9`d>3x+uC+Xjff# zKsE;IB72V!+x1}yNs)1Gf17<<5*wIlwVhB?tA|B``kLZ;YTWs<*^8n-aUhKK!s52A zoOXnsW|ftktKp!l2}H(?rN|+t_+f7ESE=Q(%%)S)Y}!$+U!nE2sT6SK=XnEUOWyZ^ zwqeEjg*r2a#+Rm#-K9y!r0O>8%Q3%kcgka{bP4OszjeP*@vf=)skDWk%R9-0vnjd* z35&9G9@3>js@{!Y*gg8OpWvtHi58-(kKO0od(#4XB@359y8u%wPReJ?eS0c?A9;rj z9lEcod5hxAK$&=E_Xeemk-Bpnfo>1o{eKSAExvC}A<^&di$vv6)7XhpOn2WMOF9~Q zmhC7(Z1?*D|7Ieq-LQM&$8(%44Ey3g z7>ZjQC%346R6O1?U5YlVA7vCTe6A{5Op0SnjstpQ%~sh(4}{Z= zK%7K|QC}1mM$_ArlP#48lH9na_q?k8+Pkcyq*wb91>(Cz@{PiZ)Xvj+_jCFUa70Zy z{dhWJ*FZk|sS&&6s{+5j4u05%U-Nn`4A3I_W-og!U2sJeSbQ8RMC0W2 z%;HuvdF8fQkooqBIcFN>SnXw<)y6W=gHrAg!JDLpd%{GUW>n{Jy5`5iqE*OMdl=*w zy*EYZCC(cPg?tNzRYKY$7N6@K<@zZq{#9H};NwmgI3WzPt14qUusT~$_`JM9=6Irj zPmQn+y~dpW60hmBtrU#h?3?I$)phfaLb; z0=QK1$QC{7^bC?G(5dBtnh!7vK(s##nCB>E*$;AqzKsULhcw0ybv$PrhL2Gf?^3F0 zrU~}%IJh7H;L?-ah*a0HNf+!~h5yIJ?*gw zDl)dGg>_WihSAPQP47Azd>2{WRUe+@-WUVy*u|S94FlCLwYq_(<|%OJ+A#k$&oiO! zoJx=w4_)sj%2AupVN8LPLd3F5Ws zD(aeFmFG(G(Kq(fC|JpT?ou-ptA?(1{qvSvk5=yI>v)5^|8s>5AM+fNvluwMISKlP zNO;tlPn`dZa649;&-$&{71fR^oC zcXznhq}vlXK>ji94(fphp1{oo@vW1;8NIPZ*!}f%lzX<{#c%Hp7QFHKMlDix+KiRQAN7S?9xqB&d5PCNJp=gI+WUa<0t% z1ci>HGYc-X$kRaPJt{8+%M2Ny%%x(0N z^PB$lcT|K{5nYPoQ2ea;lBs+|NyxKVGvoX2R>n$A7Hbj)i+ly5$ZhGLF(2-TT_5MW zq?_HZ;3(ty&GMTs`7xh?w3@NQ>JvUz!G=sc#;(Genn%WZO`1EsX_-r6=BJBhh7@Uf z81fAF+T6WP9jmY%+LlDiHop-C=QkQ=>P?1LvVTF47lL2<@9aP6z}+V}GQ$r(D7i}2 z2TpHgs0QV!7<(Y*3t)K0dP zsL40muQvhkM+5|0k3P!g4gWuV=|6vC5+P6Rl;IH1`0owq|5tVFf6iqkPWNdT3bT^A zB>gM9{%=?CzkT}NPdYNafzG@5-+uq!zwA0Kv)Nf#4S02PXswfx!vE3GOgE`Mp~2zPq&__rt%cYpUzc zt(m^(bRT)1zNZIzJIVHUBKRVpNSvR0Rr;kOXCt`KFww2FuxE?z@#BVjUTeuHKUr2z z21nj!?j~M=B?O-Rn$_aKvZiY0sgLwN%duO!vYp#)M);lBC1S3CVA)4pFP$gb5nJKxiQ=S057WKq)sqd zY`SFStiRVWMP_#-^g^l(P;4yq$BE0pGHh#X%*}>ml`5MM~sQ*c~w$2_Z zRsqdI3Me27e&Yu?w{4hRGdkUPPdP6lSa_V!Cc3?7(EeiD!+AcID)$alroJ8L#i<(f zh7!8+ZB21q|FA2iK}3r`+22C3WwN0=w>oq&c)!e#=?d$Md?lTj^638D3!%zyTFIWG zpACtt%)XbjURS{E1mO?bZW}_a6==9Vu6u>wb0uEI|NE#0L28WTv-b74?9==kP`}%~ zd~wEoT}NK1x`zj;u9VR_{E)tmTkvKr{vH06t}w+JkJnP6=B3K}P*cQY7wPF6UnaedrSU1{^P9RMxj5jUqwl4|Eo~8|esZ!RjxA8J!p&bkO`J z9g|+dub}*3SpI-S?E+{}BcndI(h$Hs; zYmqrkZ7>EN52Uj-U&U9wkJqDEM$G>;P0+Jk8g#Ms`ecWeiYRy=(>U+ant$~A9sBfz zwF{u_N8smP<895l&R)Q72i|7kT=J?wiqCm~Y(bs+;?VJL0$YuV^ni2TXqwuw9IueW zn{>CZUf%-Tzuq;uA@Qw(ds}l!pYM2AbCY-mh2FR|cnZ07w(9KMIT9I|Z4$^+B&NT# z`}a4p;6-hm`7{NEZu=uadkz6Tmy;bgdFCD-orGAx5kSRa2uSp8$zi@N*1PRsH4@HM zMa*dnTlh*tLZl@9p-9HX>~`9>ty@qxNzElCkyvITvh-=#A1q zYcsUU3{Q`{*o*MjczD<(F{e_@+R>R(k4(iYHl>**XsttOoWFCE1m7Ygi{6kmM=HJ4 z&Wdcx?2B7=EM<_auHT!uxi2jak!1eHO0$YwT%Y07>#GK{?H%H`FVlPHLkduBLFTw$ zT2R5C&HCSc(gnC{YeoHnxs2-75wk__UrpvcGww~`uWYxer{am?N^W`{A<@@MVikg^ z(M{3fwT%-;{pzGP$`oFoh=OH;Ky`<8$cIoN_k%{CaD(N&f5CKT$$RHRH+(#-rb+3= zK;zSSi7aQ~8HK=`8^C%u0{PZkgsI(fq(e%~FhuMnqMP87BLzk2 zfQhW;$izssh!tccNP*f@G6HI$QKSgKGz-Pub}GbQ0Oa1jFB)$q6G%6IXS&b~(Tmg( zSU!kgivUExrdP9j4>Y@g4+e3vl}?MrnyIpeuNP%IzR@whdP37Oq*n>}_KNNsM@b|& zv8l+(FpdsPh~ew`%DG=n&7jYTBa9$<5_8^ zpu`K+;vJttYguf@s=f_q-hBki$zR87D}^BOW=iX@x}@kZ3{Q5+^KA4}d2nSta><`c z4dEFk#}zP$>Ul=)YJd#^RGD!(TV4wyKP29Zab7FatRjUqPkz(0_+Z`eb!nQ`B_WHp zGpWoS?OJFKBxxW_g_j!8RzjMLq(noD6y~MPowB(X+;mpP;DqnI$?}cR<6Cvc(Zh> zNOHO>xYe`K#2F?-DW*FcK6Cnq?vl;?25eo{Jrg3`iP6L3poJttI$rr7Nv?btN1 zw29NiE%q1`V+NC52z!}AXBQdl2|_z2Hk-m}=Qq7!#P z<4h@}l6Z*{$&1Hp;C9dt5VUfn*hz=i6#WT&1N*qDlK0ukGA${M%nJpA%(G1(3Tz9K zt;L}&V#Y9`ijkNYr zfCAYtio=aGg$8A)Uu+@Vz2-W7C6*(}tw*(6{3&)57DCSVfOAqfjfFP~YHE9PgH--a z%o_8_R9i_LNE!aX8s|ZxTzFYJs7M_V_%=T2Woe4E9#cp+8rkP{DS}#{jwLzKes37e zG~uMBCybE9G%2kg_EG(Z=VI6l8yJ;RDjfw11P?!h-;As6LUPxi$C z&!VVV@q&Fu7d+C~XvcuCmDFKsBh?zwSJyrj8@|;$nUN(T_7qD&huy1gi z9e8v!t1e%A6<*}7cgLx2d|ejd8|X6Bcy7k@MEnI`z7A)r`L^gZT$6MRWEwjyTU<*RLpD3^`9M2DvAW zindA{zevn!+GIV$x1+f)Lf37RPPKuq1Oj^#L_yE*{91i8DDA09Uf}2?^KRbA&#h>L z!V>h1q?>v%wqI& zkI~c_-wSdo&fz~dKIk=s*}J1KT9nCzFFIcjWzRN^kh_ zM(ZQ*rlT>Wd1kF;#hj;!8pN~fG=nex+saaS zG@Jbgxcnt4aX=ngXw#2!bU?wSRGI1hB zo(X4`sejZqFQX=tO@{qZt4?rZA!({^pI2%5KL4@jUB}=O1SrWC2kWQOI;_Qqa{%Pn zR@Q4ArthMmsKfn;mlZ7S%5!tBRc$k^x&B+AWXA;bJ+bkU#a(vXl^mf=!Kj5XI0nFii&&3b!tzdh$brAb z@b5C!fcv)esqReLV1b!6`J*;BS0ZN?vcyit*c8+$74AL>tj$ti`F7I>y_oic$j3ym zejG|V9-iGb%v*At;gGIyu)e^gwwdNWXa3tEBC>T+P0CpjLMHO?N$kN}HgnU%r zVaFYt89vIFEX-ZD$hcnvliIxgP;qbCTji{Nt>n6srQd(LCJ>A`bi`Osr9nD+fv;63 z^v)u_ei?2WIABT;k(mNVR&5o22+_V2^(zWw)Ge1K+9~Q`)!G>U=64AT=ka&wZ6Nmj zU%^Tb{xO@h(IV8xFz;$(Mn??vKW-sq9;LLcD2T>ERKSU6J+!_)zk+lmman8pj{mlk z^Ej}Ez;^73pG}>OtuwDpt=thMRL`yVHoi@BI3YyiQ*qW-a-9$WSYL;%&;|nFN-kle z%^nFDa4S!mV-fEtG3LdS?PDR&@>L^D5nhXG;U9O?7-|LOd*odR@3Dql z;J^q6Fr=x_ZI#irW>h4$X*6C9cv-GnBwao)ly4K^0Y(`B)rqf}n6VI_wDm~xHhvzx zPIK|;xhXod*pq#K<|t)%UDj=4yvJSiV?V+*^=Q7vt;A$Mf;z>S0j8Tyh*ib{Dm;xb z?-*CU*Gb{8zd#?k(k;gtumc}LA72|T3Sqt6(VkIy7sbGRTig}JmaHtFeypw!Hn=M` zckZvH{AkAQ)km~bR=TAofW+ zhFkW|^m~nNvsP#QUyA@*l& zgO^ViR}bG3F7n4hxWr!OL2oSrPa-QSQJ20dg5j2COYVm&7#ZM9`brG#UW;G{*`-N` zWU;qjQZiwylYt0(e zZ2_O{nFh(_U~9W$9Zl?|yEj@MYw$7zKJ zFoqv`X(~JRXuykQ3>!aqo;7hr9s3s}0=jB&0g5}!Wgg2cxbFAzVKRL`VL5D5s?1o# zZ)>sd@R%v~e&Y#*k=yZo7^L~Z#<6*Y2Y72NT@|lDCrIrh%13g}7udE1z27jN)7&xi zrW-d=S&rI48<(Q6eZl5@PlK*tbQ*^mo3&enrzb&m!isf-PwdOxM{{SXH6pJyP?>$D ze2=fbYOhnW2IVqkp*I~~qg4;KtM!6wBgEITD9oKX0D!j9I-(pOYFyHwf>*R$7{-v$ ztl>M)inwhq{rhrlc=A$`d3B*7@r{iV!cUw@LYYle(rjVs39YH~rI;2{e-dt$Fu66Oy$1-rY zZv91$W3nd8mZG;Sc;+}r_ha44`YZV(OVeyFRz>74x6xZzf(B^bGZ+~@ZN$w?lB5=H zE-0bYH-o3=oSe@m0X!qULn|6oDbEI4!Db~`CtMcuh+b#uukq?M2)v$5FI*srK~1z= z=t8cwO~cbopM6O{zyb5K`yT}Bjg%};za9uqW4E>wpHEpZA7z>1%HABge|sWeFqJ@$ zFa2wdLKZaVG!9|5_B~3HPke)GCmw4Rw+u6@-k|w~IJDN$+V<_=-glRZZMS@T#XL__ z9>hNntzJ5vJq|KYA?xJ@m;jxIYl?~xuMPZ)K(bvHSR8wtKb8tu zXO0C-3|`Cyuo)gbL!X#{FOA8Lj7#BJ?_X+oboSlJrF+NXgE~&vGm#ot;DVh}kaBn` z-_-%9W-+9W7Jtm>V;QH$QJi50yjoB}NWU(v8NAHrUdI0rt#ynVuVnQNy3k=)OKfX! zYvBY{ol}u~siEV-utoLrGBaI}Y|E9l%J}+{)K9KGo(gw`Jr32cW$><-n`&oy&bC6h z*Dbd~9~#QzRmCzq>l(LY`Hk+;$2(*@>bYMQ%ceQIi%KlOqf9<$2op;OyKBsM4^=uRwrDjg^ETMZA40$*iw(-APzl zFU7UAzNLpTTH!VZUq>I4^2X}Y&&MX^^`!#(`{>WU`foSN9_-rQE`C);U`Sa+|2c6; zn9vaH!C*qJg;nAZvx!b(gMLIPpsheb9k&^^%)vK=NO=P}ezrJG|JLM!R*Qt4_Sg^w zf9N+gC3r4>>bsNrOGQlE9f?ItZp)IeEoq6YSj(4NrMsQD*gW>y*SxUbwUWAnA%XGu zHf?wEYCBzqflRU3M3FKp97BV}U+i?)dV{8_i&VFaSq1vm$=y?T?T%iX+|S*JnPz+I zymdg?qCpDRtG#l1AUZIpluB(nm}QI=DJ1_azPXpjnmzQ5`9u49kJ!MWuaoWDU_uTb zx_n_p1wqnir%4edJPbYZw1MG%0P0pKgK>kHEx-yLnk}53va6!Q^YQ@JY@R)Hr zo;jg3b&NRe849mb`kdN&YyH-pc8dyyPq?(*r>(cP)Q|BJL_k2i!?mrlz#?<0^DgT? zJ~QrWn*x<5Z};04<^TeOEt6m^)w-Ki49JHUu2?dX_>X!W-(c&KySk8jl*Qruyj(oB zj#undmkH&~EA%cT-Dj~aEe|VGp)NSK)KT3ZlRDy@CO-&LQS>rKr(Zv_JyMLI4~7Y+Oe#pMzS&?G3v5z@twNBKpWs%@}Nw%!YuX&#-5dEaNgih0sEI>rAtXt zOQOe-UDnOM-5Z27!87icqe-i9hnbxr4d$fD-u6v?4d9LmP#}nsT4pz;cR&Gc&RJyj zZ_t*j(Y8OM9|RLIBx%X2)j-yi0EVOXx(rT(`vx%JD4|7z8`Pm4uvW6>#>SjLn((i-6sGSm z78mhjg2>b1i>>0J#~Qm%Bp5B)83cX^6+Cj@L|=d2g=>rtPQ}xsvvR8IZ1Tu}(oY1D z1oyaY3VN4*`L+ntZtLDbK4%ztF%g!vvm;W4C7>iww96q`{$xf^Chx>o$)bdtwn zkd0bKQhS8&rDA`|eJyo9n9mlo6yG1fOT&_v95k*9CL(=n@zapwtr{9_NV=SZ}M zvP*v|>O1%GR4MU0lR=1-+YyKojxxnor=HR**n<%E8|mdsz29z-deIxKzVb-iPWnml z!TR(t+0BFOb=b;8Ocy1-q znX#p9DY*soB!|XVN}piW+$2rB+Dor3#`6(o3Z+;?O$dhRNi=ayHRm|4Zb7mL77SN9 z9_lF&k(<+(wH`_C7AaS=%6gI>Lo0FwThEslOzGnS_!DeXp8RHVz$J!NCl`A235zmBxpu#)v`pfFsove@Gg;781={XP&|<4TogX<$2JW z-lZH~_=t0R5GV``fu%$pTqKRgx(kcw9|29^T}to=wUE9EFK`OMW9}8I#>MNHCkFp_ zOEkyGv>`HV1#iPBtv~K!X?b*W--xke(8K1zhRXX&?|3Q%!bpXz09^P0v8hAL0`200 z&w$Cm$Y1#U<9qNEAC^#a1Ot-eR!A!-13&U>-K)DH#fT6n;1D1dorz z(H1P*b~*dfET{o z#{0}Hszop?i`H*ruOA3!{#I$l6b?W#TPZ)_Dr>iJ8zXy)Nl@MczGs9U$n;~ z$%CXi{|j{VWo&XlxeK@r)PEaEgR_eWwxjlmd*ay7{Op3WAbE_O_}+`c6k@fB1YO#j zxMb%u9zXO)uIdq+P`7>BiVYm0ocz*quv-JFd$gNH9_KS4xvQ1yqw|f(r-6fa2Tx{g z#R1Jh_{?NAOdNkDD-qJ(JWs!HSBXU@T;Cs6aNiV%Kd3b|?8fRn@C`o3H1^20Bm#s) z?ypb~F+FAk!?DCDj)mk4iMsR%!Ygy-we&rYU1QtdW%$m${7e&vX{*P5tSpeuxl)Sq zs~`2+LlRz9{8jMI1hc(%C#F*6mb})2Gngx0k~WSM%dXfrBy|zlc}dA0xGucOaaGi% z*~ypnu(`-=o{8!rpL{phn6T}-=47-A^twu0o`7w70il014?_wm`-|9BS+Ud-w04(103tu447|Aoo%C?laT(~;kJ}xie{949m z|4Qg>aMj~^yVy~6Lk7itYXa3Ax5*W~ZXy=7iUL zuo|#dfqPaik(dPE5z4(k%aif`;U*&)Huf8AS6uo)C-U^MpMIMRER2%=4Bc7~;UfxG z_UG>4%iHf5cJ#;ipW&yGyVS9Xg|dBC%8Dl+CLQ6{;u_%UcZkXH{j#w^Yc=#+N((EE z=ULx6`dMU!es>v_A44ZD-5nZ=HA$~3e$XJZI{)hMiB%F?j(A#XS%J>3_o8KHSfQ~c z@=ZP;+w14G;;`4 zp?@c~oBl0;-?;BZMRtY-mfS|!3uaUCdn9r0c7COgWy!+qKt0r0;^ZRUG-VuS`uEp) zY_rzaiZ%PzBQ>`$-{x6(Ox_a}65w@o_JscZI-opeQpglG<55aBEvjqC4t)-G2J|(> z>>hwk!`$dim(@Nj=D^y5_-y;z7@y-QVLc}StobxV@qG4)80w@sorDD#N7&QHKBeM+ ziEVUVcXE~k0|%sIVo5Lah12t$^%g0BugDGyP_L=|283xY22|t=agmez;Vc-fuxqP> z{(kZT;(#+A!Rza{ei80Z^td%x;A`a3XInBPpP;e*KXi>e!UXs73EnxtZEmu#0( z<|HD>4Sx%7_HF1h*IG8b@HW$y>8BS;R6y2r@}36 z2k#Sne+7SZis_%mz;y{5*ml_TP=N>405ogjt=|j%ZvpZ-U>7dpQ*S6LkP0MHuGXdY>^qvCca&y!D4N7*l4mLw!+1tb}FXdjK!hC&+)Cr z3<<(LNOZVErWoVMS@PJ7`bVKjg?2(qJKW_dZyTJq+E&Q1Y*1tA)UDu1Cw*#$`p>ac z1`NI$d$SlS$HEZtyZ8dMXq6Lbbi{y>D)5KYbe1pbyicz$7|KHk7F|4-ZPE{)M&{`0 zTYtf}=23^owwy&$X zV1-u6kH=XaM%*LOn~5f2G;|peEZiW1XXWW}e`22QkL$?c-N%c{nfJCpSeIsR>zO5f zJnbg))W}P{oxyfnCIMaG3Meod=O$DWMt{kqEi{L${cY|$+_?aU)!bFoYh$N~g65eQ zZZ}~ugz(zLr&IJvcQz2KfX1z8)NK+mnaw^6T?ajX)wK~|fU^t!^p5=L+Ns!r=^ou_ zJmmBPOp}yVc=a#L>Qq>;PJI8_rQ`X>{`{Z7wF!53amAL)u%D|_^z1}6REsn5aHCfR z=hFxknTbBbvFlJXmhGdK{SLWh*BEcyF4YUV88JX1(F&_Wl4EgJ$8d(qQI81@6$h6WeU!~ zH#rY0xQ8C_V4NjH*d1_dvL}g)>@2M2uSC~y2RpI7C z_^2^AhXXAG#}YKfN>Lz_8=154?6{S-<;IxO=BN;88_zXq)p{fdIw!s3$1}it<;wP{ z)}tqa24vOLOn=v0=&nvyBKPPd0`P8$p*u`rS*p9m`AI00V6y%$34J3K;~R~{6yI39 zJRAusc^qZd62C)gQt=49w7k5 z*e4c;O^f^3iY%VDonHR<*8z*kB~lpQnI4vK@Az~~rQY<)#l;oES->;A4yS7@+Mb3~ zjS5Nb?@wP6-&^b_37SvwEj@ce75nTq$i_%k<(ZD9nmrlBI{;$Jgzf6+ksiZbpQ9V? zL=3pS(VELtx~u+{_LV{Ph@I>SYEECnnevWhLp1)aNi;Sn`~sQ`=wkSBxyh&;($r=vEoGx3 zyudryqfFo@-Ss1;wKn9lGIWTZ=+4x*RV|NzOqvF{WRAvh4+u^o6Sy&7{J z%EqmSZO04}@taQltbZ$<5SOc%I`?%jy8NoC^L*{&dyj84Cd68;bND#-J$?pPS@FlX z0+7%n5-kd|68BIzbHE?Veagvvq#^TdXoJq8wuEQ`YUYdJDRyHYNRpS3WU5y_$)1HP63FReNaYOJuq`PPR*0daz}TwfC?A<3 z(u&w3NydnHKvuVORtn;G@6N5;DqK(FqHsu5nt2eh+{LHrsZt$j;2T+A=N5fU{c?+`tG z4F$IhZuu@LMKQsYHgk~qvr8&P&OQJ@ko}*MCs=c#(hB08yrX|&v zXs>SjSgz1!10FAE%g5FoDK3drxpd@60-FdmIq=h=1@z{uPg`r$?n{3 z_j&|7vvap7K%eF-pxicdTVJmkdkXG*%Zrt z9h~be1r%qHroHsLWu=nkARH<>#ntMV9C6W%E&IEDb!k_9yuY2Rs30tF)u8BbBB6CU zWrGf)m*XTvMUP5F3*W_U27a&_@(yP~*?YkOOWRD+ZC6Gq%lk#k%f_;i`k)BpYgQ+? z9q82r%xNenhg3qgYLqsv3|ZBwFirNps2~`oJ{$h@O9~ghr6CqIF?gP;2MLbiw3o^3 zJKRNe$!yBwd47>^IS3D)=U5pU80`De9U~UjNPXx((>pfs^WCuXhVi(jJo~U&iO`|y z?QZnskC(wDI9*&AK5-Qs#A8!bTIodB;wtj`F-u$cKT}oaI4%dbe6-S^>e_gQTh8T4 zYFOKj7cKWGYds{@d4(=}Z;hGTmFBt&4cYG7^y}VL9MVvJdsuHv9wEZ|>Y`R)?q6Jy zHHSZKCqg5Yz;$$2Jh3JZq&}^MlWPN;EsTGVTwJyGes4M;aZ^7^dw-pg27l(343$QF z&RO4;?M!|W^ky3uO%FMcAz~225-DJjWsYN z*t2MoQ06aYEF|!)XW0)ScRaL>c%LFW7F+NEH*%;REAPA=+%^;ADUAzph{)HaiuJ2u zaUs~`h@BA2Y~N%mN?>sraZA_Jo(r;~F~wEUP!4@QC8&i}hEIEXh<~#7i9Rlvl{83& zKQL{c8lbaaFdH%ZhwFE)%-IzHXMe7JJqO~S6G8uOOPDe^4391<5=hh$3`5|ITyc;G zRizl?C?v-5Ivos(@t>ze;U}S&O>|Wd+1q-D7V+_$Apgfg@A1e@i~R)D(_p-b-YEWH z98BN@6D&A5J6`HoO;tR>q&Sr`%`UoP6cy?P#)*|(#g8x4o_OIg zRX94le`Vbb(v7ZnV@7J4XClut422tJUX#vPgz z{gXl8BDudNJgKX$X0ai4Au?fZ*S+w?r2tXQsDY!?Jipp1f60)yXj&n$pj6Mec_)8L zgNM>V4Y-bvVrwg8=kPCJNw{&82N6epWTi)rf~A2g##zBqvHk12%-ro>M3JXrt@!9W z^-(NTU;9H6x6t%A`<^SbW6En$i%a*xSPk(GPD5yy2D8C-I;kxx%A0Jd1&%%n^;AZG zk+wcTyq46?l+ILPu#N=~yTY?!7! zRe}RO@Rp|ZEQ;G$|Aw0 zD$g4AxzAC;bC(RSSPX#htGT_+4dF0;8yO2dBDlo<`dv##X*juplr72kvd!>SC=VWi ztFqsy*QWiZ>P4J199R3tFOE1M<-CsM>Ae-K(#^a4lej8AYt%U*Y3k@?k(88!OJbxQDNcF*)J&50fC|A;{jYyZK}Wwx5#=DrFGN2Wew8X+ zg2b)Y9}PTB?WYG1qa`GxbTjz3`~hVC63&1Bq`}1RW%!T=_G>)hsoVe8WBl`Z{-^Ul z68QgE0;3Em^lvueiK)9d6|r?0-iSxRak_sRBZiD##raR$H}sVP<{L8TBW>-`^1D?0 zc!+;o>pxnWvbpEwzQ`R@Bf=@Cm`?>(GL>!+z%eQg;`XoANE2FuSHW01dy_k-^Rm+| zTQe0lf~)=sO+EJGzZOl(8wBLExBdti)7c^7*^K`h$3F-3ua0yYI@|{Q#P8kw_p$tQ zP=D{x;3C;Qs3Gb8f-}7TXDui*!R=m}#HKQY}R)%pIL82`5cDj_{51L4bL z_@(`u-u@+~zg_$vVf{^y|F6ROsqTJDs{K$1a?wY{Io4+`_@JK;RG4YY{1R|&w!dHo z(k6UmTyeG|#aQFV#|J_PxH{CU(&NzD$@2((rCC%28WfIvc)TujSnCFfd$1X|MU3T( z-9KeuQ%jb3AN~L*KVHnne-igQ^LHOnnof{}k@`8Y!$GirEUAC*YyxQR-&V8BKHjgD z{+fmvn|RJ_JIqxW>M1!*eKr!R5c^&I(fjC5j*wMbJCPdJC3@7pWNq6*uO8Um2k+=oG}mYb=Wqs)8QB2%K*KgGW%I$}b+}m5w#` z8JsJ`W}W4~Chx`bd$Lfm9QDnDY-#MC9`+pWE_@L0-ON!uDz$B^zLienelB{Q$zJQe zJ(j_wPUp>yc+ogLTp5YkqnAjw0^XnRpv4`EnDCg&n{L`5#)aKEcHI5SO`hXuT_)x= zVaGk&W-EFz$&@S?ja|_d;FD|O!j~e=I|V|aGA-kL)p+qdn+%Ul@xPdW>=^iR<;hEk zBjmy=nM$QEHw~j%w-CwXGAok2vrE|C`YnDsC~lw7)AQ8MZoSkv-7S52B&MxfsXZ>W zB^&|QvHIl?tLyzdhwn}V(G!)4c=ZbAWf5k~BJUmfQcX@4xqH@NwbhCMq2`1Mr$Zp9YoK_M3Vy3N#$M z>P|0ncDH>L-G~qmt>ZBIUTHtO`u+ZcjM1k#kqXL!Df~ z$1*{e2i?y`t+L=nsJt~MGk9SoM==-N6bhpBT(7cya+p`qO=sfxHaMCj*6bjlr#W`29I^wu5&tdj z@ZStx=m?s1+o{i5w}it`+>rnG{og&cS|T35-XLQT%pH8+^g7-g8ZQ$IdpF*l-fRNX zEHChn`prK5Zk%ZM(o92S+mk2cyK)w)xMpT5rC7_S!r+{8z1ktL(DAS$25^2hTns@+GyLFBjVApRA-U#G-6Tu zdR~0GL+lYg9WTS<%z|%a9IHhQc3QPvi&L(YYuP|AfP zbqODp`LPVC3W)%3)+T7b*cXy4PW@tqNq6Si`(j0@Pd3v}ZxRN) z=05Ovh;Q`@BO?Av*Jy!TBmo{Zia`0J%JNlRePdt{8R*cwYZbUvpt9o{@N?87ja{y8 z`f7|@C?V5lWnQQ2!CiyL!9`Q2th2_Yt667a+bU=pmFlEm{$Wati?zb&mF_3bYN9V) z0zH*F5c}})t>k3=!oagk|Kl$%`-PyVhRt6oTe}|@zBj+Pj0tHuf4U#2ZiD3EGuOO% zH?AlCYhxov;+{{y&D_zs`V@_5z*WZUdL`)1!FP$Su*O8{gch#uo0J^!TQ8Ba;=QDU z>c%Pjsqxuca-0YFImQFbg@@~+KCcCwd>gsdPXz|8D$}}yj9yiT9ubNE*jqd&Tr)hV zF97M7vO_>8>(5tw0`K;Wm^ylia0KqIK@uMuXRJ1eovF#DC3}1O71^mlo%zRA32XJv z*cRe40xp429xh0vFx!gHBBX?r^yHzJ0sPM*Gm! z;tW-2%xyY?I~3;OHB;>;tw~-gLXnLKeBJFah$J0qIHrx5Y;TEqU;kblt5J)#s1y{D z4_z^df<@s$W;0)|6_I4BE^sJ`GI!p{^@ZL}fsu)c-}Yy3UWuw{Nz@NPv6o2$gd{^e z9bB%ff&Q5`GvHDJo7e&B#$2B8myKoh)x%y3&FDf~B5JJ%eiF@`}U%@YadX6~EQ1Uo9#zpy+TVits6^m*aVBtrmRAc<*vUSh_-B|L%78oaa*NAm;syCt zo#fzfqR|LhvuhQFh#$ph{sVP@Q(k9-$s6M6+-qz~vWjrjYqp6ycXE&g zL5TloJlEArtAUrbMOc>u4XE74{V7xqv2ku}QKcl>+P{#&zkk#NY>tT*?ypy&T(9Vf zu|AuDB$UZr3;b*>&111i7F>F|K9Pj9?i%*HZ2dlJ^~pG?32j%+6_mL=Sm3Rr9t%y5 z*w_l>erU@Y{Ly5e7|30lWD{)|wm5TxxwRPb{Bv_E@$kac5$Y)Wj;Vd7Z*R2$$w(Nw zjoHq)Ph0_yVTU(9B_TD96SSo!n43HJ;rx?H$CKp`u`8OL-q1&X69qGTi0e*ELO@hFPi{|lSCo}5o%Q*~ zh}ad$Qo(XuiWcQ-?Ow&fI?u;t`5nKjUmi;sQ(01U1a2Z0Jl?Ia;*{vconA+JMp^w0 z8~o4UiH07X!U#zyw6wp?##=- zYTKE(RmHN-U7v6dTdYp;B?aL>f5xYRN%)FW#be?HNwC(GeK}#KewWcMT|K93XQ}&k z&fP$u%K+CHt6@W)!{XU<69yZE->c4C0Nmd>49rp_C3W3gB}T2Z{;`YRAadFCErmtX+ju|3a|NdjpTzrq07&|t$V=c~ zjHJS>uY%0P{EV0DDWvXdN7EJe{5=nEe8~p5#4Tu!W^E%b97QgxXW(BzN~f68Cwiyn z7?+L;BtTWPa@y3ZMTKMn`?;P`WH>}pY7P1`r zr0e5#Ise%+4}L6nk*y!puQ#z}vFl#tt`F&se5A$%v@7&HN5HY_I#G&BVKMCN)LW_E z^4*(cDz0*#2Rf?nID#-rl5C5Vvj0px;pwNe4NVH|@S1k%)c0Edtq@3JfT`qivm`4d z_*eG3cM_vM-&Wd+d9%z4+Q0vXM=Enm%~H&z!SzIO`X6bbt>rZ)q>jO{O34CNX8wCR z(>V8}TLL!bTgB?6tnbF&pb?-Vx_PHhOOjXNppWyC$T{;vepD^6@dliwc`94<3T*z0 z#&MG2-dc73!DpXB6(xaYvk5V>6bYU%fX5yKOTvkUqo!fW-$_w=9$O~ZUe5(*hnmf0 z_~3CGoMaTtC$HT9ratW8Y|tfcZ-`4Q@Wo$u&4A(8PNPNY2Z3~t)HA;t$IFT zZVYdFK69qHLF+NGNg_PNO|emqQabR4D?~iqd=ZyEiuKAu7z&RfvHQtB_x;TC;7my# z?W*!VUj9^fT(t%gFrYHEsHf_&&~I}C@~Bc9p$wU1X6w2nEl^TT2HUI?t97l|JB15p zn#Rgwfk$?0h&UxQ22)4} z=>enEASZ0!TC<)pkChE+B+>s|;qWw>CLGN==9C!nt#o|$ZcE%ihnO0Bpz6!lhg;0C zgND{!0f{b`e!iIS>TN&&B9=haS8QXyCLO(+C#%K{k4XMR=Su^X=#_#A-3CGgj^Q@H z-tpi8Pq-B}rBH#jrMWR@hvLoy+1ZQE(NGs?U4jRam76%j_bsy?$$7UVtGyrf79+2| zWMQFY(s*ul)}`8G zfkelJf6h(2|IU;~mDuksJ}RPhCj;L}Jf!}EM~FZ^J)zafi}~P`T0WM|k&-;v~i98?RlEj$rfqn4g zL$jT{V}X)^bQZvqAEBt3og+%2p_D4V>&>_D3>le&tkIa3Isd&G93-K$TUJ7_k_cu; zKYzS#wD!yC+&_gt6tz7kfP9EOA8=%Oxra5`)xvSw2w60_51mvp#f=~emIR6vf}qAc#KIL=n(>-Nyvscn~cVPNm@s&k$8WGy9WO+w!S(ns&Ly|LPBC_ zL4-j;5b2giK@bs8xd+s^se*erfJis%1 z-u=F7tzWFQd&8-UKw=jyPsDEUmLte<4 z?lx$sbEgLdRIm_9{sG`rF_%&Lm>}6p`{W|+b10<)QZZTJpwe0Q4mE$RbCIv0*Y`Kq z5k0WpPW)xsvH(${JdHatouRxd;jfHc&Fx_F6 z=4@MGP8{6raq~hhe!}ESggH;M*vbc~Bs#sDm8oFqdF)^8y3ZsG0L}${-~Q=^K|##B zFjGpiCdA%c#!H^+Sv|Ob?jIb{P%>sJH%S8klN0ADd~`a2#4<*xDCb8e%#9SvHAyKc zRZYk!srLXP(f@L(|9Jl+LP9A5lD7j8B@7BfA63323YFBKjV77JPce+lqjzlv zfb=+|lYq9(%C4@j_k|Wfaea&Qn2<-T2Uslf40iUjWUrQJBCGX+Ct0<}8(r>BvWlKM2w+)er$;*KGETfOLnlQmsUaJxLPN`KUqpASWK`3w? zeN0cy`MJn zPi}DOg5K|I>+R9{YSe?ML`)J%I>~UzU_9HWdqBO6x3w?MjbA=0GNBpC?%eaax?Hs^~ecVm)7mYMK5@>f3JdG2vgguvDIdN zaLOOU^HeCCMp$g)1Nrs-NZLFZezUJeYyjw*F|crDrUk~(`2V76KQMC7k219l$4piD8X9xBX(*m zJngS-;d`)VVfjQ)Mx927&6I+jDRnTeh=Ig>7dFm8OLY@Kh{)1KCUmh=@4p{7AK=Ko zMV&+ZtJExZlpb-kPD;kNHONJ}TP#7ogH$>=uP2phR`qJ9OFajAn?iTJuFq_k!Hb^q z2zZ7E%H;qhH)^mqksnA{uG1KA`FHKi$@xc|FKcg9)S9lRjg>BwS5B2o)J7ABqdE!x z=13Z&jS)onW!|mZVVF8N6eA=L)ZPo_+@-DZzGieDWZN9%W+Bai%xB$><=+APiaQR^ zr{VP5!ZRN63re$Qm)leJD=#l6&AbU;@XOk6X&064DaU6`=diTXAa+ z;`Tb^IR7IR>BT|f9x+yzeb$wPG9fBoyPmc=$bkQ3XG%PIDSF;GAEfz4`oGz1A+&H4 zdkFezaw4hK!y~NFet7F}pTOi7QNldMr)c5C`I3r!H;W!tX3qUD2qwnO$CemX+q_J;2w7vJx?2lpOouZd znb2C6(=QKNa{h2nP$c{9)7asCy*+M4}7luRRh zrpaf!;Fvz;v5}-;abwrkn7sq@bbBIR4J=bcIL|NztD#2Y?OB<{4QVB^9b?pDl_xxd zl!CQRf!#E_vk2Pcv@d|kEV z5{aBdz z07Kk~Jq#pB@62!N3)d&^d}UL@+c{pAOYH(=_z(@`X+P7v6+O>%b+=!S$c5nf!jq;* zjh*&*x5KE@3t?vuCFp3mN6&RQ75L@LX~v!>8+W(o<@<6&+PD+>cFG!-&-F-H#3$+X zyk8!N)`1!jb?nej*GIi}TLOGnr&b|*->3SU4#tF!C(7aGtN7M-t+lZgcWREkSA)YT z>c6bszrhuM;zo@LHtmvm$2ItR@msFA$GMLxt=J;RObOz6388m;NC3uucNZ2+D(q@I zWrOBD>b^Pk!<8KU@;9vUp$&tMtjm@CZok`qW$u3ZLzt_+IR?JvA%L<~0w1ZaY9$fV zUlpIj!5c}Ne?nMP=c+Uhu36HtB}WTzoe&-oSL+F2l~#{oG&3C(jWY|rlW7y*(X*rfn70dyuKV?hkdfT-e3z_Ws>F- z>M|mzYO=8@xs&GV^NY&H`Qy(Y^|k@3f2h6#<79+}vI2hNmj`qih}uBo@wslYLN6}H zFOJ~5RYz#7eQn-%=BordL5^v242tP39=h9u91y*tc*&W^LHd-XyVI=-8P}`J^a@kO z`p+;B8M&>lGrL-w1994>D_~pzoEw%Fm$&ge%LkP&L$3F^rwYbPugHCVg;8Y^m0)`w zwBG)a5LchZW>1Jdj^s{6Y|$LFPu?6FwnJ-EJQfW$M!dgy zoXyF8v8Q3WVc9l$eYQ83+qk+cAMu1h<->r;6o3)a)Svq#^EIoVlW+0ZBm9@CKuU+7 zNNlL8-&aiA?{G_p6uhxaZ@?{5EtbuEaFc;>j=?|^i!{zZFQjM%RNAVSXf5x)*B5!5 zx3S65zELapjsMOb*F)l|@nPg;*?MIkzy#(JEEY96H!FmjF;^OQ*Yu*HEmSAhnbnzw z)Ehq)Go^-m01tOx4lcLo;$VDjAQB?WSdeg;_l|pqLLn*fK>)VU#oZeajS!_mmD5)} zx8-M|vUukOqasM3$so8B71LP2Of+Vv$@EvIXqV75nX@nonlz3@KGAQ_&pPqNhE4*; zF$HcC{l7=ONs1y4j9p%Az3XerWidIkrS;CH+1_n+%lfVkJeB9SWCU+(b0h{%H0rnuz?8syzpofJn5iNYK?4m+nnhL=~Puik(etlp? zHqG1J&O|@c)I-AhiSqcHXoD<6y+$NF=iNL9ARzjrpdrMw87O=z&wihA^+2)rptZ`Q ziK3^IJbyB^UhbhN2c?#T{P@G_`F|dX_c&vE<6;%Kr91@TaE1xeV=h#F#4^3p$+{k8+Xc5+ZD_;xJUr* zC2h!G8#7mZE-k{?L-HE~YazGV?P58Gi^C-8vo~e=vyBq7LBuC2Wq3wfB&%e+H)p%S zTF5CM?ZO^Fq<1q8{;209A!Egn?4lyhKmFjRsZTV7C9;Ug$~7UtGjet+{Me1=4#2L5 zcw%KYM@!islOD+mZVZjt0`NbUpXE5L7F0|9vBX=V#+%C&sb1ptTQ?5h)rf6HSutYJ zas0;Uq@r+@F8w^p2utBmM2@i=HZ z?J4#Z+teU?Gh^uFM+Xt&c9Jg}^DKJcwoz+V18R%8wQb z@UC$_(7?3|O4z;p`kd*$v+IAaRN@B5fH`BTIAOr}JNV?HMyC3Jw3xVpyb5N$paGke*amXu1 z*km)$c#EdT8W3owY{1Ok*W#Xm7v_GY!*4yiP>;j9T5U05cXjHrLBI)(K8^>SZ}fQIfIE=%s4r(bJO zpjjlarIaT;|Kly#h9n)X(nx>EFiTp8MZer8A9hDAMigu^!QPNrC!20+HNq3m$a6IKi& zY?mUrPw`!;ZNONP{7}L&QSJJ&gadTM_$?SJ+_t(>%WEN0=D*R9#SG6`kH^e<2KZ+v zCl8JB!V*1k#SW1dtE;EjnJt~$|E{?oF+9kI1KQD#uZ2-wbT3loh{jtj?n(3vz*aR+5=Fsw|}id1wXXqrEDxV`@Jhu*(rzRp{US_7Rn5dYxx=vaHMC5B{ISMr^ zLd)CpN@JH2L}hSdH=boBHP`(&7=SuSHLcUzWx{#ruYI1H0YYU&!eYy=6^&9<4nqZ z8(0LrsPVadRr{s;ONb{g5gZ|LbuYf%7}j*`84oo`U2?v*fw#$ardY0y1FGOxrYQHj zs==~I!6-M=3dc=;nA*6@vr_GIr8FVY?Pezfk--i$*>4GDyW|_kmrJQ?LP-aG%ZNsK3#B})S z&#&!8g=AhO0@?-5{9xiL7ewBb`0<1ya-9IiG4+eNeAjEy$5S@>E>buneDQ8Q;f22m z?WeP3K5^`#2lX@Izyx18DL#7s=*lRg!FcB)@9QfM+~Bu?p@Y^lL01c_!x$~w;h=ea zr_ZqV6fk4T&liK#W<&EFkkkTXfpUJ~=0|M^`WHl(eMATTd5b%8TY&t~MZi=x#R?P& zmg`?CC2t0W&G0cmk8j<8C#ZDZM;@FH7ZQ}|Ki>R{ix^-Xqd%g9y*RlIE^MZXg1>zo z9ir^AZUJ=t*p%tTs~(DZrgS%9+o?dbK7Mr3BPq{+6qOiG9|l@}4EirW1|RLs*!^nS zDH3+_XnBO!wE-YDb9zGj7(m$-U1@10`O}QjGx@$6Yy^e)MtK}{E0W_U0Y;lwJePS^ zl5POrHQqSD7xeWluqtPxmrjI`>k$ZmhsEp-AgL2~+b0)9lYk&MscA-YeaMSnA?mrv zTF%WeZnF%v(hmE4+rm5bwz=I}juJ-ezUFMxQcG{33-h_vg6df;(8~}`2I;rBMCKCg zAu$3Tg`aUQ0k#B=BtbW2Dd{xchbI0sE!>g8TpG>fyBT7~eOnURBSTZY;dUZ@?w9L@ z9%mP#=ab`u9ZNU&UM85Ek+3HElHb+Es=$fW(OKmmD7N8Sa@2L=k$X3}Kp*Q77q|B9 zi3BkxJKV=pwe_iN`-FovtclCuwe56C8i39lMErzIrxW=RHWm=qoqGI5pX)$g@Ax=* zrPNjrSPA&28#!vPTDPSo7v;TXI1rm|Sn2kS6mw8ONbw=gJ36@pkiG)I z1|TFpRJAv4^J7R_n0YDq0p+)daB2Z5>L-SHWW`paGH&1s&nV9){H zfu8}}Sd^HD7-6^8> z9jf9bi4w!%@d1Iw3OMIUKFCjt!+{-@vHRYJAx!zkHe7n_Sgk)cFx7q~zZ9TJkkr z0~ovaPd3|G9CQqTV&4}V$)}%fbb6W! zVg?fQ+sbEfg1%B!hqhvZ(|F2*0G^J!{H5k1NV^vbxUTf%5*M&vO4N$ zM9odk)QGFJ={BSJV}IB{!#CY}0&K)i5>kx=vTtHT?DM~ft??w;YTbMo6qK#(N zxu`Xx=&AT2@yo)r%0pUAK~zz^wZIN+V0xF`%JgAK@Z0i?ULC!Pd*zv-3ku-PGEilt zv_wl<0l5lv8b6=YGt8}<{o$$#ACd(2*^nc<2I;@-Gc=OJ7ZtX!4>UnhEf1SDdyYYz zE(W6SO7s)SAMWPZhSA-vH1W_Ti3*EZ$B>uwJChfJNwm;AF&Vf#R|(Z)4-01{w^`$lZ6QQ z=%FH-%x?|_|Ba%(O;l?Lkae*N=!oQF2{iP!z?r6vlKMy;ns=M%VBpjZozkEO-x^5+ zXJ&M3)xMeZmb{f5q_N z(I^e6b}QjYzpX{{>`CX!p=Z+z2%dGfh~o4evNl`;EF-begby}>iMto8*ng55tlW>)C zJw-z-kC)84SE-*qQ>J60o3a_Tq!EQA@{`G5bFGWq-C*nhSbqohrOL$#&>fw$JB3aF z*>M>As75B$_ouE;Dkyl)gc!Y7nn*LJvxM>ABKOlM%nyM02Ij}lzBfGrCf}tHG^;Xwz5Qfj{dVQ<$m!+E+287k@ z47Q7_%*`$c6&5wtX-%lrY9~F*q@=rX#5(0k9tA^LHOCeUUpg z!ciF5X8<0cb@s$QlZ$7~hiQNvdn5Zy_a1WhV?};@h*zktf2*YH3WHsl18MzXYtR88 zKUa5*UxMG3ct>0KzBGh2k*7bD2h2>D-dn`m!+RemWZX=DMu;Le6%k#-^Thj--FJYm z8@#ZXWDECUF+grO8_tZ@{5HR?$d%i~9I%A@d=l!iqvOPv;hkxBi;sHs1vdn!+KT`T zR0#Lcrj;(GT6eqeO#5y|i(ym3F_}B43Q&w;_qDnT*!v`?Ma%W&Ug}}dy{p~C&pu7< zy0)Z($M8=9&eE5Qqh99Y?7VVi`=f|Ao$_KD!?RP@) z#{;7mt(O}-mH=t%;Zo>ns;^c65sZ|w-kpKSnuR%<;kWENm=0Zh64R(r@vOI=W0SY9 zwN&5oX4R~4+Fsx_yhC|vi0QeQ(b*(RnB~6-*mwlK26V~JfXIWR#tsrn46N|ay3Jz6 zfSs6m^-tl>ezVn}&^*LMo38Tgjawo{mmsEhgtpHG8c*STV=Kn>MISYk?p}=Se&51M zJfnbRqC(y6iESKBkR|1%4qb#hoBqxf*j6!@2RF%i!U zeOek^;gm%PbxrVLW{s)z4liK5LIB%j3kUJX0|B$i>TMr!&hj>WZ& zFyr;nlph+A_fy8#CkMU>+|@}=IdtNxMU(MkPlW(Rp?-Hx#+3hM&q!ixJpkw6PY*e1 zs=Xo%SVi6JAM?5FRYP|dJ+B{{HYLAKDn#WeP&L}Xn(|+%ymU^R0O*JTnJ_(4$xA)m zKv?tL-_1zFhtSg?hL5&MT&ozW+0p^L z7JD?52W72wwP*KbZxNt#`tzIjcCp;LnMb;;$NQq@a;-(}>1t6wg0XR35@#@}TYlQ? z0zOmHmtR(U^e4WvX~$pb<4C>t%T_Jk-?`k(FSxg;PM6(k;&bwun4*R-fBm{)H9`a^ zh;T(UEeB1W#sHcI-nJvHdLm9B3f-nrE+Maq!k*oEzp)oTU_ZO+I@j(ZrKN)MWx_L1BLfdx2NHZz?m59=~XbgzV~TN zdoa1{-t6<4^k+LG!o|g^#hodX6vKRDI=-u)#f~|R9yC;X^#Bbr!Le`Aw{h%;;ZE=W zdMV>2yxyG(Z z`_%7A+M^e%xU702a=wX%Ks%5ztorAp-D{OH9c9C5484iYCUedeqjvXegzki@JpxL8 zwQ>mX%6|jL={-sPmLG!%P$?zu&Jw~s`3O@IcPD>QWIXcq|1zAJOP2ADn1 z@_w~OMS8oPi9tk!NPzZSz?&Z1x%ZlKyW$McH_1)*G(Qopnj271iiJ+zTZ$CY^3sFP zA$J60cASQdL4e4lvb?GbNT%c9kglpn_YTc#PsTbC7PXd7w0kNr`Ty0-Z zGUun}5xnFq076c0kuA)sRHZld-eKR{dvOSw{vdj`obhz0hkCw$wyC$WFP^1(5?-Vf zBxra;`}&aJVHZaa<2^$M3HfpA$7T9P*st9!h|j%JC%IJd?kl9Ft8i9zZiTww1Xkjfe>EyqD`B?A7X#GO*jf?AM>#UW z#ropTXC44ZHP}dIYxM*r@K2AQDc$>6&y=Hhm+y=%TA zpEcaK(u|~07eCg&VSo)?H#|=Ljt}f(z`UFCF*A_?aiE^`9r+`@^2ePxzWSE0p8L9E zBZRh?Nz0vvHlB7HcCE_gwa8(mZl@k4>QZj5ty*I)>kTGcWxFD&c~?W@8N_@nnY7v? zx*6x3DVov_c*U~XE;{ef;zJT*=CKHDmDUH<x^2n4i1ln&)yM8HwLoiz$s!}J)S>+!Xs|zM zF>@Kr_NTlTqvyD4A9hK3o~D z0VPXKC4oR`Vr|k-ZpC<}-F z1RclNA5yxL2;`piN1p6A52^qkvtt5`cDz25Z=%SRQzz>37U!}Ziqo7}7qJSeL(48o zdV>#4qWvp+eiGMDWF$FqBeoizwNpD|I)FW@H*I8IBg;DBuUSsbU-H+6crbQJX9dH# zH!RiL#pL!`Ly;=SRIja`;;3ye*b4kT3f79cvUWb|COE}klnmyy5(xfS)+CMZuU`Oo zon@Q*smZAKyW#GeGByh`5Pu(Iz}Nt#3%x_{vC}wc`O+QsZ6lT(!|)}T-?~X;q4|S4 zIF`)d;$bj0*z1k>$&uCw9S`lUsyd9>hORavnE16$NG?cn{@&H*e7Qa&_f@ba;z5x9 z&_Zw5^BR_O2g!~d@@+oO#r2sY=Uhs^100oc)?$*w%mB=%=oa9Io(7sVTU%a7W!uC~ z3vRVmu7N&P5$gGziDy3}X{hhJ|Ni!LGb>N$xOWpO0@mw8zL|yGjGD(RY9FS9nxsdc zNb3QVKf(2Gyq?Jf$7{5biSTtHI>}%Kcx_D z_YH5-KZv`zW>UAL9hCp7{x!R{i{@Lr7cn(2pKH0%D&l_rEx&Rg3-GNwg|v!fKR>V# zvxR>6t@2*$)7xjSr25#0?#^#vBVSGThtiTf%^(>_j|joRgH)vOf#Z-UG7Wx-;+FCo zJ;WD`xm@Mc0)#h%H>lT>Cn5vgVcwGRAD^?>Nrn@?0lQAC)#rYQt1N9r*Y?<`q0Sg( z0(#D%So(ZXIansg+~$L#t#*HZe@t>P^_7%gs3G&+1RVJ2(0R9%YGtBID5G{;4|A zG*qT5T#}(@^<&~%tGlKBvqhPWn9uWotJR%9YhnO|wF=8IcDQ_^kp+dYWc;n{vg&pxt&QW2O zeBR&H^&6>_exn)-T}x)he}S$BhKa%)%hB?owbW2`+ns2O-S3u_=@z})PZjmV0%{Dq zD|>^)nbDK?J+YeHBHL8SS@+Bka~<$Jkgs{8A-{gqiKj1h+9j(e)^&1a zHMaC8vGLut04K%ujM{#(nI^VP>q41sLLv=_dVm#paL#wZ47mhaTZ!z_D|GDJT*ui8 zAAViGYSxP2nPk2w{-|)$3uiBq+x^roejAhtvj5?}zV_|e*4Li|H#=4B$b9682ofC~ z@~DGIMZ$M|>RCBk-Z$ISB-zKnGHmDjJ)-k_fR6Kz7voGnl$(7oP4;jMc}3cHOK}mX zL;ABCKP<1>ZX{F2$NDq|OK}w;{3j=gHowR`q6) zfjBrz_oE}meZFJm%(beknRnj>^& zZ(O^w@79d`@8z|4nQ76H?&I;k%&Lq=4D__%93LNtE;u6(+$nwo4 zxUcnGj~$>g$8WB*K9x0Y=uOPcivmC`qcLvU@%`bZAR3M4AOy!hB_+^ZSxvxdpHiI|Z zwn4&S7+4|xW_{HAdcBd$ z391oy*T>x7{A8Ab!EOBgr=tmgS3FricegJ)N+GOBU28Ot^4$Y8Od^SYds$(WI26Dy zB^7bfI_0-^`-xA@&usqT!7zO>hJP1l0j`ctm9fQqsO#nkg8#7i^6S-Uv%be^6PtFY zH`9f7&U>zjM*UpO>7Tl}vF!tK@DF#WO{mOIfI)l#B$+>Vmrg?dljQ&Lss3IJz=sa? zyL^La?X@61k`+4Bxf%yT&9vpfXySk(mR8=X%$*bImo7aZa+qzm~?>2dnyOmg|X3lTU|xxsuzr|JPRf-yoC|^+2euht5=?|Cb$3>Fth$Wkj zZy@W&NSf;7hCFFR;kzOn^CE1f!{S0(;K-F7N-7qm((L&EPo(?j$8})hpsPXXs^=*XwSYk@?d44jDwO&h%JAtr4w9(_) z2e?`>`RPsCUTu>6W96ICXlUi_$Lsoy{`v$Lk<Oc`EvvdF^(ugmqPRKU*NTWsyB!zY}^I|NL*RSzT5 zZrYo!-I0sL!#unA!^vqj_3Iyz=>q;ms#g7tn=WxlJo+*9iymq~n=HNeHDL_5$v}TO zq$~!Pls3K|?lZE#wh~s(bLqokI={ToXm>fRU2mH!+-eJZb;Mq-QCY-myU?jS^4xo7RA5O~Vz`)^)=JZEjKC3a0v1?;?v}|xlYV(>d zVdge|&q4iQ8fg1%`DMf0WCy49+ybCdYz)2GhX5=l(`-tCeX&?-C;Pg~-=-}me-j+` zDw%?{hHi9l_GT;P_ZzZPfbMY=-0E?~n?;*>{$}mnX~f-HUWzopvd>Zf5wI&mdfCpU zO7kLmeeZU`tRsaYgG$;41HZVqz1$!_STu{(Fp~If-u+o;PQ1GRYAC_1pfZWUjbS`i z`+Q1SLI3-r4n*_QQ@QMm3J!2sepLIN#Z2UnTDWjP4IPjhHY+W?hbg`rN!TAZhcy+0 z&bA%%wTOqsNy`6<59)sL5cYSL zkz>aKe4}`^Zy)$y;N^T7gs0e{5{8nHL82G&Y{Gp7pLte6F@~OF3YFyzD`fMKNJn!X z)0^iqw3@J_H5z{>vdncmq4ZjfnZ{4Q-zq_TRyFM65idSTAg{0n5h?AjDp>wK2KqDL z;!_>Bv|R1ntws0uW~Y7Ukz&e>SlNf%y`sKw7Sf2a#rq}wz+VbC`s~@~zkAsK5zWvr zmELALZPTwMTfOf}pN~B{9*i11UJsM^egEQ}aCiFaqmTvL&{Lo+909V?D*n(0@xO;) zqW5`zE`GPCGsqkXe{_e9q{q;rEfIOoL*yt6;0_tcLmo5c(br8At2@q=(NsCC*H>9i z)mNE~vJ0;K2<0zbJn9tyK|@@Yf&d*Ed2m}EeY_r3la?-E1ASGI2(PL8t6OUsQlfDb zA-`Jhd9I**{;KXSje-rDG#PGkJK)KLy0U`gW7r>%R9ZahbZ#nhx2d)o)bd^9H2s5WiHe=^xWk|FL38A|5U3<5#)0He&ldYx*q!p3J*f&+e*<(SK# zA1P}Q0zE0t-afm#Qn04p!L|Ous$o}CGBzz8v5O@>_pRT%#=ViHBi7f8bvBR3o$!I2 zd7M&Ciri?mia)KQaW&I6HwC(t6s}GhC4p?kH~VF-c2{wRu0rDa3?}cbN05pT5H%e# zAQkZ$-CI@I9$&?x5~_(RoDlXgsUZx&7H6@6-VECLFv?$eiMbzkSlP5Ue_d5B?Y1fx zO_B}ntE7wmdjCcDe>q6}=NH9%42(G-2JBR})fIl`R&#(ab55-Tm=~iZcDZKbj=D7D zHfXQNWU<257g)Ri0hCOdu3p(fOct9rptu-qmTDZ;5>#R7ULGXzn6_Ad>rzZ&&Peq- zo~xVM2gXx8huj>;dR;7&QH!+gH~pUDuQco$&j{lZMM zVhMw^Z;_E+X3W41qBe5$Ac}&7Y;VXyKlz^28sXgzh5c z4O6V-oBzm0bWCVKI0+4s0jHS;{L*qcScEy8PMIY=Ez?h4D5Za~``PEtzQZ2_`(yF6 z9)Dl4|74qYaI#um{pG{E8r8#_%Y)4tPtpr-r4Mnhth8t$jQMHP9r43ZUzQX}HOGES zc0GhSO!%p%wX*xoQT^Ja@@U+b?oiLi{a2^r{c-B)|0qE7=y51+rk+9+#YKDVTEp0d zdU9GdmIXK#{tP9R&pv;w=N7o&z6r?Y2N3c8kFE)@X4gKTtJ0cf&3-=A=@NB&ZM@v6 z8ADOOo-ANzgglb2RZ8qk5x6te@`itr)Ac?TWgPB3GMqNxLdVJZ=69Fx{%Z1v(7(pZ zf0PCiI+!(a`&;AS4}cY50b!M|tJ{2*aJ}FB7xSBIf1doVO_)x-AC>!h+#U3$P3Rzq znl^49hVAiq{_Ak#Jb>D)k*z#6>-({aK6DY(_4a1FD2~P)h^*k(xPEZU#6Co;7P{|1 zGfsx_J1CiNH|Yc;z<9D?-bQW-P&zHO9wvZPMMjZ)hk#Iz2|?7@!R6E4R@9pBYOQ<^ zwc}?(fV4)9jQnh43ayD#DptEKn8sCiyEsJEIahB4@58R^{tIs~KL_fzcM6zO{}I3> z=oZoI7lj5m4tMv;r6W8>3J46;%Jq8Frej>;-s1{Z3ihkfuu%)=!Nval{t?*5XQ+ZJfOL(e(i&F|}3G1n9)% zUlXd-0g?&%xPf&=hhUHd#K0swNxkhR=J>B%SeE9CJQJXrVFZb%k{<%6Gew^6YAw+= zBjTB#gcnfc_sk@e(*H{az26bh)9A^M&w*(SXOutVHdO=KfCfpvH)FlloAwq2$)<5& zt`*a{Dw6>7;!UeEs#Me3{3aYaiNge||2Sm8k8dA66>{p+&3iLzj2|T~_MTDxiG?c= z@ogrKw1owa`*v1x!1tl|bz0?%Y@efr&Ba#FwOGZ)NXy4&sbBqnv1$qxIe#NZUF|Ba z4-53pzXOJrReBwD2hW89Q^~kp7r#BM&e3TMb!qyXO_5a^PX%PMMj-gb?v5`0pG;=q zU*gsB@FR^k|N7f3mSVK`OoyHw4-*Dk&&901wV{^78y(>Tvr7knU;mlw!Z_P27sO$y zasSolQqqzNtf{o+#!XrWX~{Ma)!#u~euuS>f`}TzeO`KC#h#+wDGshp`nNv+kG1rZ z2z>-FOmBJBD_o5EeNoqp5C8aEcg(n{-KN-yISat+55~Ul&%?fw$b?d`G0LZ;ldBQ{ zUMFJJ%TIx|r}lBLI*?MryFf>UB#@0vnOIbd_BXx*_Vxy4hWYC#arRiCT8{_jCJcz9 zzUDS7)O{NQ5S<7f(akW=GwT0y6sjLWYn%UU%mc%tJl#*Hh_h&XUs%pIImb2^8x-RE zTx|og8%l1U1>^@;|$nn0m%_>g~3L%vl$c`ecd8g-fgqIRYpMQLhyBh;W zNo>YTx&Yhw`hIiodd;Ij;1+<;A0jK*i~sLph!HiML7y=esCUtB-=SQ?WLxb6@00)D zeErPGeAB6G63-?96@Q8T5Z}&})tas2$(HBG!BxoK2>|zj*(PGI5_G4BCfBX%KUNSA zr!*V&@tDXR44^N=Mn&S zgB8jd@qaLp4?1+^%pY5i{|$W5WVOfnc)ytgs_u{eF!y~-R+X=>8c_o;XpjTa4N?ro5rWsmwR){ z@AOAL0Ud{zO92rh3rycP~YueU`2AXNoeZ0oZdC=VispG6-4=Th1omsN{b zyA9=$ei?l=bi-Y%87bb@Xby_s%z#gzI>L+{uUJ;(I7Ar-6MRj=#E<*P#-EDeA3dIJ zaS!l9U4HaLwcYu|Y(>asOXt5YR!dt0u*g6qI{9$-Q4YYQCV@k3z&2trQzkbQ)uvIoay+RF?(;?Qd=f)l*vz$h z27AY`YDxn$I@o}~tQ7e=>q}$@JW)NAU_;j6aA|WbE9^ zeFAW@*L=NiFAaWUq3(;bJb6!{t?`|5wY$qvmBLTI9mdv$lri2xCAYuhg04kZIPDhM z(Sx5&eW^FNJnlaBy>o4IL%tK#=Fn*<0MaUDta)1$&)|XgM4tXyS+hqUP`>O5z>^BJ zPG15wT!_O;NWNgT+o**9L7O%MpmOEk61NMs0H(orsM`aXRM|xyje8PYtTD;aoGnFw zqk_(nkO>)SeDnoACKEwJD?Im>Smxo>Q@4WG%B;V zp1^&`C)*El6Zm5g^Y!mTNOISKQic!0p27AIkD^Kxw!^Uq-9-Szo8`A_ITDAiRrNQF zE9?~js>*8Y7Q5OyZ8!*{$w_~Zv0Uh?P(b|OXe}p5V!1OAF%cb7B7d~&S1>&^IVnU7a ze4LN*LBn(nKJDhr?)I~ZmTZ}*Jir1zxF_^3j3NQ3Qw9M1Z#p5dZG3_VJOyB*7? zSf$X!L-tmqNWFY@{{Vc~pyY9G%M7(bTpWS=ASmZjixAyTUA;yJf|O}V@aYU0jSS6H zyKde#t#o7ytZri(c?Y%1&QZ(|URV#$fwTkV8nne!!HlSv*0U<2!sN>86uc?OV#WaC$40H|ERB^!cSURtrqPi0C1 zy6G)CB=YU%i25LNeobvGDU+C?2d(H5D|e|R-T*FB4KL&i`L9SC&y=+j7UQM%V1yBY z?KA0>SDEg~z5r_n>xH>SA@Oc@hJeW<129*!T;!O@2=u40JtgnfSD@Oz1S+2Va1`CA z276-R!onAn+;W++u@eY1)pEU=n?7Xdw!B*muiYYJHQ~=}T;_H&xA#vd{-uBy(w>gbO#YDRb4$A59?qa(@HgkPix~iyyYj4Y>NbU(BdgKe<*zfDg@7rN z<@%Lx10N-cs0C|dcs{ng0u5glr_q$?Hg`7vIKn3yOE(Am-mp0$YQh?nd+}de6L!$I zn>AnueEt_q?ZBMLOvx8v+UgNuE#rMf69(|=D;z+57GAmW`)g30P4&4JK*Q+GXK(G6 zM|};QqstUFl(w7}feQD#eS4VdIGZ;Nv6kCwhXKtjA}QtS7!V%#t-aiTLYfhkfyGYAum-qD!=yE{mboLZo){sj z?1`jdO&*;R^c8loCMVd~5sWg{01CzJJ`OAJ>F&gl1cT|X|ExXlYqcMc2ZFv`ly(aZ zBk_PD0C~3wFpg_ggUR#ln?z)khT80u;1rTZRe`S z8R8D;vSr{l308#a!`OTHfZdD<&;d#|cuGU@`d=jI99VyI{g<`>Uv~HBD`3U(@BWR# z|Jf1X*&qF^RepC&mtTDtkHhQP-OXm4Qk5~4wlqd0e&MsFRPj>zz(<;!_NW`sN5T!p zdqH9zdJeS;PW+Cn&T&Bm+2+!V27|qMr#5h=$(xiul>BFlN$_6|Mq?A}s|rB$y#qvy(*rHR8b z^4#5y`z6DLc*@5>o-#&??;w@zCa>CK)KPdnmkA#AX1cl zFMNDM2Ra2CFU6r;y~(0L&PKYAG4^t^O8rOWOqceLeWCsTteOl zJS&Xf6QCV5j!i=WO0$aGYOX3^!S^mq2><}L@6wR@JgLZic6`c9pbdHb4t3x)S8bNB zQmiIJ>+O^ba2BkYWU;_?y+Xg|cE#^*=zDjbzCKis9Mt^r&mH?;C9OmgF3}BvV^8YR z?}?&%!>(B4*$P=7X7p5f^Z&=*TSi5>es92nq=#bPU}yz0Rr6Kb*B#h%htH^X$9!zV@}RO{YQo4f{&c zc5Rzp>YI+iaSJ z+UZ?25p;Bu0e*9`1-l}K^-Kdq2x{1qR>E#1vyHV&W4fuXBJkn*R!E}#Af*IbiSt&Q zBP5t=uNd1miHv(s_E(gicT>o5&3qdz$Y0A8_6-vNu~9)B>j~d@(oO$*`DWd>R}kM~ zO-09Ex5$k>qY}PMemLSYl%@RD;NJjy-roUxH|+e%Ojm16C`cJEXDwdxI!%4v{ntqV z=0q|6xzxy{uwk=0$a-DZ^FW_{BPjfuk*VydJDR7FY!R%Q8{Phal4J7d7regf61fpu z`-gNq`wh`2+|%9*Xd02@ zIRuPjPNg+m9f|c zCIfmbI9F=L*Xx{W&jBIko+iC*;AgzqoY5_IJGhddmTCq&#V`v3`!%Ch|5yi6p3h>B zn=E%<9v8QnWBGJ6SC1PU>x)|ct;OWY5lCh2-e}4B;$NP0@{eRyQG%Q}1S`aDeyu&( zw{v8n&~s1fIM5of@&g%*GA{&B$(>**#_@xqZ42}$H>7}{mEo}J>1y;Kc>&XIv3}kO zYjrf=J`0wwbqhXE#-N+S7$>5WC5s)!E<1&pq{vPX zM(Yg{$(aX?Q(;oF-naSjh}2JmUD9`#_d(pYZ7ZdQvZ>KrV`jg^(Sb8rI*>#=T1Xhr z!qJEkZ z<+0z+n7;Crve=_4;(oofjsJ=6pA!B63M@=beg}44KGXvt8JUV&Bn8*Ih90H5*wrz+ zlZ?C=sz)C{PaA*utEE5b1g=POah7kO0fq5+t&11-xDgkNDi%4%KB0*Hqjklo<0nL5 zuV$#gw5r=noN_`$t*=x}@26|qcI>U&D=|%kcKXpP6$PjJ5~W0{JdMuqddnNBRlU23 zY&(b6eboe1JqA9HZv&)w4n(%3CrS|XR!;^9%WY4OL=>AUKMUlW#@uo{`AU{9{>Te@ zI>B**s^C*5?B=m4`v@Y0lDTG5WdaS+#{}p8uGixUCv8W0;O%NmmaV?tsS({DBiueY z2vQ(+j?pOrK;mF}2PFV2?*j=@rT1ECFFs?JMs1v(9uz!ASy$Aao!&&rN1TQaki*(& zVG4rwmCvgksKb^_i^TqPA~7JbsCb1t>_6)C^ZE~V(%UHv`G>yRp373ukITw19ho2j zonD8p^+RmvAeB(XqNU-ldQg-kjwcbFurZaIj#VF2MHj)-3b;ZXOkkALca0y-}f}f{Mqx0zK#pyWSS*d@kEg3;4*vDmXKnF9k>mIH#H4+P!GB zx3XbjilfNd0J2Pz{S|c5qLc6GQp4M2{??I`J zWB%y?4im!yrSl;9+@6BT0Yz5=(ncC;f}^)NKzR=Df!jVe-=^D$s#5~QVcn^#0sw-5 z_IHa&*gDuEoyTH=zY4%u)ANEK*Y-~CG)X06b7zib$cwok%bsqpvEr=4l8hl=rz z3ztl+JQvai#mHFH{Db0evL%G9L783X+7A^afwl$ek$|2AzV`DVTt8RuOlrUYaHR0d zC>N+Pkh>Ghu%+tN^_XuyLGK;nxJ}a8hbnF4p$&y4pK-zvZdRWKWa%fww=-hkFU#Hd z-$bqLCAn_X33;7^({`s{%n|j{GQrhNyJS6`+LA(CWcPI8OVOmwBlm4k!5D8>Bum97 zIwD-{>1|R+iLSQx2dcu1fD2uev^Jbm!6h_V*wcMWKY`aVO zvljvkTW3*Q@>=gf2sp9eLMCk>S2aDH>y6%QQZJufFSp=`P7AfXy9ztI4bnkO3B37y zU40Ra-YHRum>;T(Qap8Ec_StmHA3~Y?yYX!)#e8@n~Ixo*07WYN{O>53QP z0=B|Bf=D#>=5!6whZCi-&&23?#=ilCDR>{~JSu<@rCoLhOnCdg(3NLORUWUDC|rSY z^e`R#mKw>3In~;ozszM+f5!cf%h zP^!IT8YgWorz8O8cPa8YL|fG#JqIv9ugTED=hXa|B(nADT)(x6y{sMq*CxWyYF38U zP|tpxR`tSDB~g@lW};2&QSY6{9tfys3Za6m3O=FnVk8bzyoG>4+1{={NyS(PkR@uf zPleQ$+-^Gn6Jv5-Go_*4sZ{kCh!{tx=GK=PT)KAqwkYDp&@rDGZPM}w7Bdg62d4o+ z4MlCdJo(7ba_EMFQFB%BZ1KO<2Lnd_Agw z+iOOroXi4L*H6uN&<&`iDy{fKPUtoX(5BOX9(L1d>Om`3R>GKC%Q+6C?KmEMBRAg* zA?mRnd3r*>%Qloh`4*}r9fU7)?{VBrAx?!=DIH%;|z#g1}now#pdG=-3SE@BI@c@ zh~TqK{k2B|yoV@&jQl9MST6SuXuTJj(RwvM4UGxywSi?n?V~KCEzNkgscv_To_{mu z+MVYw1RdArM~8$!-dcb1o%M8e)b?zsd|gm+FxaC-v}V)^qV9RTUtY`4vmisa(NmY? z$K+8;2O0-{Xkt%d%8?CcY++<2w}}fCjW-p}-I94u(IkA0vq>TkgJSG*nVx4PKM@Av}`?Jry z+`m3OTjbwv#IW=KtFu0*eohp@7TE>6c-0P1kN6!zvr+WRT)VmO5rdA8{W**vam84k;5Q^?sj^p-2$l!Jx+LHK7jBR{J%ZaVX8ji*sKf$?0gG&#!xAt>UB6 z!;33|#*mqF3RiwMF4TI%LHf<@;-R51pQfgN@btT?2vQh6P$Uz%)MH4=& z*?(Z$!{dEIA#;DebZwX%AgdLTC#VBCfYT|qx$4iMcifIGBo5Lf=Q}m>B^-puSIuaz z|6q9n61wZHAS<*I!gW;$T)wE}jr`v)uaBYVmg?=e?>J#X`^~b>bR_Lt^y4f{K-&oi zaH!CiJ1I7;*uXBCgtMA7lV4y5cM+LSc1tE&U4btc!|fY3lO%i3qH z5@*%H7+zb3zaJguf!ZI_u5ZMIYFdJIe#{Jb&S7f#g5x$PEaR&+WLqE5hqIiZX38Mv zX<7k#mR`NOZH@IRYydfVMvTOJ+NWIO7~?-AC_0$L?B-!5!jlwqWujP&Jb$Ea(ok6b z?F@(dpS`p56osIkZ^YHZuTf$q zyRBaE5$Fl^N!bO>YW`;0>|>>xKgZmh&8ro+QM+BnGkoDXpOJnpw|K1P1Uk${xv90_9+?jfQ7)%PbuRNZQtRmO9T30IZrU` zEFHTVyeNQBT#4fSv#@2fMG8pFxY?fUEq1O?3z>J#6zHIga^1lbs~Jz0_gK!TFG7zN zN{THe$*saGL;3)%2ppcYIiSxFLZ-IwP2omCG^kAK^*!eqS?tRN39p&q69+au*#gEQm%NG)?szrl>q`z~T#Zzd&M z0#y}O`~*I)j^@}UUc0B=EDV9FfWA6P!1ebbL`So#=xUZ48|`!%PF~>pc(1=ehr(qc z&V|?024pD|*=BSA%T&N;vSQ<2pC~Gh+t~VbyX*OyYQZ6b^a`1%XdU+W&q{%eOol<% zm32B$bZ3~O%C_Bm@(~=uHa&Z&R_ea74v>?O*yJ}&5No`cKS_8!qy+~uPa+w?lZ)4r^mAW;ovex1J8K#tqTSezIw;sunF9J@tDb}uo+ZoN#sp(ueM(F?o z3!B}7c@Y1i<=5E&$K-|15v4`$mtT^4rGr@{}yFsVNPI{T29Y?7CrvvLm&^5jYzhYD2*G-m_C=t zJ|6E;FILla=z1;>Vgt+K_UEXiyvKn!q)b$4>7qZ@oqqG7pJ*tg>jL@IXRMpKs-2Zy zGghrh&#s*F5dY@7Mq^Iod_O0BDvNoj135*Knq@#YNtl_JESgemwK6+7rl0Q>s+pV> zo{ucIGzy{goOvuP{to)M*$DQx*#tJh9FF_1n|rnw5@x3Z%T4UUr);}OGiR$m0xrER z`B?o45hed7W)4FWvx_01<$wu%G2gCMzm zvnReRT1DA{B8`19r3dVv!;bwg@-=BG1#EpgjW}|OFzl+jRGpB%Stk1ab_`fNnS2bY$@Y_3ne}*{;0Ikw(z5qnlV&xx0^veM#zhIQWF8w^;w-7MdTuEhYNJg(LR#60n0?UxZdy0R8c< zpBwiqc%zLp>tFG7a3{^;;L?|g$FabHE0Zb}FgWh`{IgV!`Od>05YK*EX>tF%K^f?P zv1dPZ@&X#`Jf1D^AEz%RgSXuROoe0?6HeAYi{{74Jp7FfC>WsUKjm zZ$1Wd?UcZW?~>9&k7hsR`*H&Cx$Hagum0h(q2RJRcMR8l?p(r_3ge&mn)j#vh;(@kmUc zrYMeQQWK|pG4I(aEQS6s#r#vmOAL#-AX-LUMa2B}kI3VZA3|En$8i2{bOFmyG@wgu z{PzF-=C_gjeJTF;(|^YK!;JfHg8a82{#y|LEr@?Pkp9~q{$JQ0#P8r=dz8`ngyIiw z0q66X%XZprZzOn`XXIlyp4FxL z7e0^UoJzG-{q?rQzhQy)(NFWA=R8|npD0^xdXJ2xm1eT`VRg6QW_!Xb4?Nzh2?P}P zYU$52&%byfaLgK~jRIp|YEU4w`@HrbDjVrXa7t$v`6!@5s+0N4!{z#vfQ1Wi&`?;i zP$5}cWIEL6_*V~2{1^}PDO99$-)ywZY{l|aIRyHQ2wzrh=<_cVG%A!sUmwbZ(&^*? z@HFQ+&&s=BClKYG%@zv1cIes3Mo`c9`GlFmYh(h=J0{_J1Gf|h9{Lb;J!nfMucG=b zlwv5vyd}S)!u-+Mkd7hi_gAq`)IIF^Zi1C$`qPg+2f*^xnmPc|XHN;Nlt1 zJf08xr`YrSckiIu7yo57iQ@ogv!@}V^TzKW#GmIEmS+{tzkIOQW#H;2c(FuDKK#Su z{L%HtJGCYJ^T+*uAC$L&BYjc%j^97TRzKeZGF4Q+%#Xi+I_DGM#^WSEzk>hA%l+X# z84kO6{%sF^lL~@}%fdM{SO4Xl`RA&JUp~EE9N3f6`I*4Ye;E$|t-}6olM3?!@f6o) zy6vOi?eOQH0OA!YZs@$VRe)QS$)nW$b0dCRrO2caQ0J4Kp9{aaodjsh}C@M=}e>d zd>Kr)#GqGTk;VnE&+YEghx!OPtw#csvrT{K$S`3QRv2T~Cjt|J$8j0SAt%bFmz{&t zzwMD0>g(5Zkj`-3aM@p@JU24K9=5w^3ZOv90{Ik5mxF01P%~fwiMsbB^)1ki)aeym zy;}up3DcjZ$w0c|KWw55=k>3@@HafNij>^~+JAmrR z%w@z#8Fmvt@I0jCRG$G#CB^PX+uF1<%>ktI(OQ<_N=bqp`4#XxK(fuWA89`JwQ*Xu z$(PVOIsn!k#ikAl2`;9q7wQfIQnNFXSh7uxU_KC6B7eK)`$s}nmcW#yA+Vy05k9l#Zj*{Da|%edmP)cCWV2N$u97meN{;IZ`$RECs{y^^hl8GF@9BoUNe!PFmyl=^Fh<&6&?s#kJZ^<>4>A zPrpH}5TXN#dJ~$5w}u4Kp<<_Br5)g6-3}!$8!>mN-yMEY`NU`f6$h`fA^ zH8$4~F@K8MhylIc8hb~h8^ENDP!>IE&U|m(x%~^&ATOPBp;TX13jGp^&wA2W|UO?boL?MA4g) zTDtUv)ZE5lK=_P+BwzW)Z<`c@K6Yu5Px-NdI0GTpF77j{8FiFH|1MFRsc8d)BoLIe zK+uESK?TMSn4af!^zm|5iqxV9daF%h^fmb}+hiQmllvJ&(if|H;tf;>&^FPc9%ul0 zHP0EB<y|k%ew%Ek zOKG9Uid?_QDTrs{r#nvIMkI6qrC73VkJbD{q|+LIZLRru)9i;EGbH^=BemWOsLrmh zHnTEuuLr}8+1_tJ&*0orh?iP)#sv=){ zx*eJowAp8J;cfQQv|^~eIh(9v8( zh8`3$m@07*+G90d+yZ}M#$qp)Kr=b7U}@ozSO7#%Z9#fK9z(I~YGW8h+LHv>tX+s? zh3DX7mT@&2*F4kc1c^}fp?9ESif>;Q1BoS7kuf<6b6HGcX#wqIdot5>+uTlpu8xAq zz-OPHS0@9q^c_Ht;!cXkew_J4X@{fXXNMY)IQ!hI4thDdYXG08Cm`RYNWDPYM5{TH zLl3^2;ZiHoN1IiDlSZ#QzPqbY+`KRb0YYgCMq6Z3tAOm` zne!||uD_@GGo5bX8@&yes*vsDxUd)xAr=;nMSIZpX*>EzE8oOnS9KK4K5EwCvRpMF zYjs`*=z|r{OBR90llzd_-*713^WqKlT2yG;iNW3V_akZliM7e@xl?Jaonw>8woQ!{`q!ik-6Gc#R$QW;2lfM7xjhp$DLMn4FQh|QyTU#v+5Q%KKapY zgE`q_MECTvhS-ili_>f{-|-b1yt4uQ`lUm_+oQ78>fKs#+glm&SilWzSg9HU98Ksn z@&m2VsF7uXJ8%Q67X>Cc#i%M*`I(CH{9tSvuo^mb-NLk5cjh?eELVEw+hl6Q#RY@YrkiI5qJ~(08H6-yjN|55NyyFW^J-3!=?#SM?pl=B5pmW_5?&OnbL(Q#pk2kk;GTc&;QW z!tIz|K6`(BM=g(^jmL^*RwZ{A@AL}T>D_x%QiiEj-$DJ9rSsZelc0G7zM(Ct02O&r zz30(8C*ZW~#*v3WCI>i}shslw)vf%9>cz+j9Vnf((sRMzJ+!#p{{BEl%6oV*#j{?o zP9awZYLXAF-(5hCsOu3{ceBpcuJTF}f|gj8h|=3-jgO4ZdTTmn*SPXkPFAKs?iRZI zq)~5YEVa-IQ4cY2sL90k2fdXAMMa!?A-g)>d7&cvu@#z-*>G(!2;2ME&1A7Ud|vkF zQ?AC-!0lQV0!?qHU6g1QasBH79l>YpOJA~9ol?^FaRKaGxGdc=5G9T7oBOt-mD%Nx zY(hwDp8%)c^`nRIFYOghRxT#Hp+{k6ccb?BXzpa!)hI^zbte^0cJ|EFkvuSASu0oU#7u!diGzoEXsTP#nr&x}&!p(T z9;7)eO;k!TtiKS?O;Qi&qNr|tsd{x?mlog*ic}V=`zR?6M8W`V`TS@jAav8_18^`_6mD^A`hnGDTc*7X8m435$ft(_tB=VZAGxN z$LL|+J~A=YxG(3i-w$1t<8<%xN`Y*zbSQ3J=59$}`URV=B->8b-q&#byW*QL_tzQw zVS-@BdV(I^E_0i8T#b4$KHonL zA*AGU2gxd%O6CWx)AC76JMyZHt^1s`HkGJhLDS=qm)0Q^{2SL?zhIE@!>6@DhE`5w3OgDTW#h(1npdL zdfep4$3Xo;Evv3AH$Qmq?8(eborIG)rk2AB#|Hcc-5%cVVD-%v*U&bHblq@w=4O4w zK%&aY*#i1S`U104>A`bU@G8?}FVE+VV~uZshV~A+up)=N!2|wY$L~wAnMAhIQ(ggz^+&_0vs`W1hE;m6`oJ8I9ramG1jf7v7okzVq$6nlGDG{Q9`BO5M zr+0EvZ1?5Mr*I(MAZkHLy|xFOx?^&7&AFkevWcsMlPd~mx6NE}EbwXg=dRTbF=&Ar zjOhfWAs6lb3pq{+fXw8L(C>?X%`is%IF)70_0s}2+&}h8#Y3_(CDR|ZrDVnB`-D8& zrl%9tr0Jb!`6{Uh#%6~b@J$6nscZ~H~8j?<(o8|`-&qtzvca$c3IVp{bV zf);${2AD?8X>?w?0oCxmu^%?H%Z^M(3jS^NX#!d24zFAA5)tc!=FKb#E%eG6k45k+ z!L^Q@+g-LGKM8Z!1S^HA5n(b^TyWoGyD7bh^DLYQig&uO?Ma{8q9smm#dlUW9Rs&( zG4qF=m~(EZ58tsqbE$)VXcN)lKDg0)1bm#Z5~THpS7typZZM1|9{;CH<1|t%eAP?g z@+r9j(ru2Rj7|H+o8jBxUKge5?&}!3Z%U}$KrZh^sY~g4s46)9kWhQ0mWdymS`e$9 zA)%Lt@ng`CxCJYq1q*Asi4SUlN#EJMK z4x$;k3Q%>fGPNZY5=)&ER>M`Le-a%ReeE@-GGwQglv(Gkkcnn&^{n7kqBNkRKH4{( z{$+F0Xy!e>Zr2`6kDh8(teEvSXwMg`G?f+DA6@^9vmqqX-w;Tc!wsB$YZDgS?TOzJtecYF94&kj5p znPdZ(6wOfkj7fq4{CrqCHG!9@qXT)elvo*3C5oNkF0$MWlt|D>6H${Jd!@Bmn0y0p zd|qp|RR<}qK7EjHI=w9Q!TA1gX*od^TLh+fxBkicc0+1P&CCYLMo+m*UaOUj@CU;l z!KseSYT)jVLr|A_zw;24Po)cQCU^k95vAxCf1jOVTH0&UnxfgcIX#v*V)oven&P@% z#fRl-HH8ThC(VoY)k&Twt9q|BPvk1?%r&h8Et`a5$#LKvsk4CON+gH?9ba5;Zo3lI z;#~dJ$U~h`Y(X7WONqLaAePm3{_eByN47y^X^9~NeEcfRV@i?NQfjoKr!VG;+CFO0 z?}vzA!({e|*INeB_pxG|EhF7QiPxU|+=<*wy@TMnaVxWiTo>%=KX;Kjh^3=0@!i}` z1RdadA~&?BQhaNkX(a1mUovX;GGpVjz&F<{AD7T76M230zWG!viC>$dooz!WyVwKrtUA_lv##j+3>Q9G$C@ zr4-UaV77{a;%>MKveC(af;3Rc1}aqj+v|YSw=UQygK2e5k;RR~0%q^NXsP6#yFenf zh0`?c^MD55NixDmbgXl^gLd-XO6Dr7DZZrjEFS`h161ZpbTHKJEyH-kNJx9#f56b~() zT!~@NH&NE`1T#MeVA_WmCzB_G@{AdBCMxI139*3v?Qc=6zT(^1r5JBjDokpM2z!Tsmm_?Un+|5o?6LUclzWViR-e`Cw9d5>#75n^X?ks6F!L#>9D0;A}oKpK|lV; zD%M_zDso7?{Sy@W#v}}RxlWXn8#Gnn{UQ=KRYVd)bQ!O;sh!%I$0eKbOG4(w0Sf)`N$m{^}xEUFukN1m&9RnFO+!`;t#M3-XbBiGtn`IQxsQ4fVd4DPMc~ z->VtacUx?3ak$`;v#17KnoUBYI_1c5SFMu=FCT z@S1fRcE`Wtc-ATNx45=#Ri+T;=`(JqV;d)v(^hlDDZ6a|2^$IT}V zsB0fhg014`{18X@EdKmrA*Kj?EYGHdr;G)d@n6JKo(xKytsD107<6lD2s9xWImdB9 zhsEN(i&sNY;|(V)+mKt<2tiul?K{JVq;2Ux`lCL*gfuSP`ejX*p08<~8o9Wb6zpew zhq<9yI*uA@_tojOVC2cK=!EvQEa}<|#TlY8;leqe-UT-9SC|Yj1KpUeGF8*eD_i%s zaSpw)qaJFU0?_Ug`BB-&5%Q80_wC2+#RBY`qwHC>E*j*^iWW0Gl(E~dCQ3~JA0Jl9 z-e0EVx|S6m{qopAPA?N?SaHig)7u{ul`KEVloMghw^Ev~&Ce(GpuJ^>XjIreJG~!a zrKdOXsvS7oM$qSdTu>`>_}5pGPSH4uWo2fTS|OFxfkW+} zqbSWl^u;ch=~BNnIuX}OFFg@>j2>isL7%W}t3kKyqKf-_O^&RioM)klE7yC9-{{JA zOgZs0Ew*9xRFs>+?j3D55lL&|fF?8v>t6&F&jot1fcWAEI(Ft8PKPJPxM@nV)&vxR zmnlD%DPbi+d=dP&h}#q1E`-5YRuYwr_1utU+PI#3qK|MbFwj0nA2$zTVVq&XQZ*M?7D_!f{ z=;?@fGoi!0#$jbrR!?}au`XghxVCv3K9cwd-ezGt(Nk>Dg(jy*rZaCPBv6uW-4|R= zkI<&68`xb)v!%fLfZiQa#;`-P)bRyU4WpUA^#zG_>lez$+}rE2Sb@%uDd%gJ-T55s zYUin%^Q^_9F_+D(%a8@>ue2I-3+d06b>r(To4fCz=OzmgiZ|$}4f$AQRT#c7GI~au zt6sR@>)0kA*PBtAF-mP!`$|hyfS~?(g>8V|1SDG+BruP&(H|(k>!o-?DjyzW1()>Z zY4|O2`HFwU;zmwXEhew7>s+kVgB)cD(JuR+o(!nv_NFRJEDtI75{IHRj3#rolVQKu zD+={VDh1}Ooffy76oeX*(hFbXpv8J7bZ91c(^f@bca>$K(8F4#XJd8l=FruHX_m~~ z;~9gvAn|+HR6M3PPborAY5u{8b@pS&I4r{+`5 zirm$!*9IaqX*5o0>@R^zDS*<3966WrxSiIujfTsuLC&L0h}QJY@$NDt=*rU2br+gv z8FVhfm!0;Ub!ESclK!Z&I=ImEVV75|)6`o1OnJ(tjrNsX(C*s!5PMpRof2Hei5bX$ zooBq>ll1RC>_t2}(E>%u7q%Ae5SCP1@BpbY3eA*s!ek6F@eOO_$^_k9<`oKk7CK-1=%HP|%=h#-)*dv8? zZJM^t`6iu==}?uOQAH-mIi$}G55(^u=O!u5a~sbboCl;q#_}(EPNIjviW6%GpzI>~ zKy(S!p@R;QPW2+j zJYI4Xv$(c$9}9LYI(6v6PgMJy&S( z#LU;BJN@?e^-o;^;$r_bE5zjXX1^rS<0{*E^Bx|5$Avhf&WAk#6mQPsy15CnLWh_o zj-gMs+OFRIAOlz3{5dM4W)>^7xykr&^ZSq8!`ytKUQqu5O|7=`pBRWdHO)Vo{6+|b zhiZm%RNB}x`A#F46>YQXm_Ti28c;Zd6TT(C`{D`C6`CdXt1ugn(sj-uj7(iSFK--3 zox}1(S?=Ce^bq-CZz?R>)t8Dnp-<~1oarm}aqo5e&vAG5@Eq2v;^+%kZ-4B2?+n_a z`UTyqZnyB*1Mr|+%sm}*S9S&4=E0BS z_G&o7*za?rZ;nbXFxAQ?*YsxTd8H&aJYjIUEkr5FVJH; zoc)S^d28XO9bfD>rmg#fO8nf@lAzNyGTe;d;fbpH#Uv`J!IvA0%*x+E1*j?gN$!(R z?+7Dt2%-h;U6nwMDgMAUe5$3XYKL6=4Y*EgC_M+*UO9=bbCdUMb=6QG+r@ulhL>K@Pu!!qYN?s%RfyURmVCsSLH_6Y1JWgGpn zt57L7pV3lYzG?dioy~Ehl*iN64)dLCg{5bn+y|u#OQU^KC&ftNkyMzfl9>46h7ap6 zIV&`F>Fk8E*vYWFNUb>e*>p#RsK;kkf8Y2kO`%-06e~lwCO$q*nHq%cBPUOIKZ_Nc z2IfwbnW1`|n19%at(pr(!R_2U7>Mvp##nDS?=uQ*E90U6Snv5$E9Dwl(8 zrvN{6>~d{~N1^a+|(?D;#q>`(98iLme9u6v9t zo$hnx1~uk;r7)#O+n&iiwOt7v0*lFxP8-u6@V>?II4+NRuEoT-Dkmqf77DgEgWq?% zn}i=?)#l!%3CX;*da3P|g1W2wP9>XghaFhmju|Z&#?LfULCe>DRQPad@AxZ0S744Y0er&Q!=Z{w6}>aEG#FGdEmk?+1Nex9UEi2aS+HXG4T~$x^QM ze{gix72!dwCd$sK&BZ-2nl@>B>u(EWWEtFGE;O)UJ3x-8R#SS1-zWq(K9xc}l4CWz z7#T3nzrr5Ag=RF1#k6|rI?jd5I-W|xBh!=21WrebZ!7bTY!yA9^+ zdyJc!RVV8b?F~Eg>WA$6io}d&iSRI~TIF7C@O;|E4cR%6L;Fli#N}KYm!5fkcyHP` zjnWC>wjr$JwY}?#iH>rnV-WQglPJbKJ!0_O6FB9_b79!*qu7LP9Odq#hp>gacg_x3 zl_~S%Gw;caibc0IDR3?}4jLv{L?7}^-u2Lp@m%>1@^dR_GKmljqOGG`?;!T_7jVWq zS+j-g6NN21eVT`!3P348vS1aYk?QErwgS$ws9o-OE@#9XZ_&?gSy9?;^)+piFv-{- z@5~jlDdv`@6`UU0uEfh40NafqQ4z4d2lFXS-#t1U^8l2YO7zo`jX6 z%~k~av#!fz?vMS=ObSS=qS>l-<-X*u@0mZN?YU&Oa9m6VvlpqNGabFYz*?~`FhRYq zt(meACm=VAKSRBTK2zlTp=~Hj5ATK0o{kE;3ZnBVvja)JL?1Dj*}!pavb(@|3g9no z^08zm%V#zr-mN(tY7xhN3=+q|$migixh;n!C5}bo1$RE2gm~^Z68hKvIthN63V-|F zfn|S~&@KJxf4!=I6ufA1v?;4K=q;ZAzkP{wV$hy4+da+x_m{lIE9C(%D&O18+4^4> z3{!ad()Q~^Y4P)iaSd!(ui1cGigV$c*X@m-+>ZOxO$|$kuXYLE{&JZFyu(--^SFcvDPU<@_NN#9 z3k2#cOM>bgKImGr1I|>g?R+Uol{2`(P zt-Z7Y!p%=*4hTxez31n%?tJ1!UVse)H)>5)FWSQX5Vf0ja?CE8t_Q7fi*%<}4Y7b> zp3u2lC*P3t_1B;dPKdwr>xGa?7YIga+{rrny1J7y3BZ_VD~EK&hAG@HubM(aDOjWwD>H{*HB^u ziy}nf*CXebgXh=p4W#@O6tohPhyIDD@(y3`XPxrh((Pv5d@YIB=wsPcK$otgWH=LLbr>xi0_+Rp4ejY!% z|33Ya9K$Brd&Reg4}}jiAeTDMFPa*KHx<}w|7MA#c^W6_A`-h};byASe4?=H${mZ~ z7iA(EZn<9!*gSXemrl(^XTR{=M3x-31canA<*N11p?h5SxvR$S)bo+d7}i&e^8fK_+-!v_ub?u`bO{PryQ>Oo@w{Wr{=#IlMqb{SjDV= z8nx-26khbBLz>ryj{7w}Wr-hY6Fh&8om3*;d)l%4kru&$eeZkyl!BkONA&$kVhpwmy|R~fi;%lQ5IXU824|$ut5k#LN zQgay~7oD%(Bt7>})owP3$#dN^9jmR>CDtUFxctNLEUg2H^S!)^DdK2G; zcZlQ6I-$vB2eE{6!xd^+Blo;eSav=w)B$v6Ft5#JX(20aHeOE6$@2nC*xO!=eo$eB6N!OK+Q$Sl^z#J?%Sms)ZTRx~ zIr>#LSeDKRVwo7bAMR|9eQ-fqStyk^w9n%}B>fSN&eX10u6GZ=cN@(0LX%|N2!Av> zf85&Gny7e?;++DeN_>M>tr_{mbEDkgYP07HStr5g&4b5X`{Z#PLt&@Ek%8k`gsEi= zbgonM+WZ+GJimB*%KGgy?$R}v+bZz-jD_Gbgvje^VdKMmmmI~rQx@N5iVa#^&c|UV z6u~aiU6`NRF6Y08BN6moe1VnBl8*gWTM`+iWGmndjdV+B_+q`*4jECBfIV zy<4=t)irq_Y*A9*gWD$~QQNlRTbyhqefSDUxjteenj+^qT@V!)GXcx8^Z7*d) zT7;x?csWIqkh|KZK)YM~0j~vyF{!b~lF5>8_5f~>bSqg)RB^4x>=aLCB0V=eqBZJ3 zcd)L{o)gS$xa5qn{X8yNwH?NG$d@o^oCdwrclUdCw9O7COtVyWQp5-e?HFrX)SYVy zB5X%2)4KjlX=c?2%Q(Mj0f?F%b^7xfM*$jOWSyb;B?(a9)0mRt>w8m$=H zvtW_cD#^bj27B!;hQPUnG-{7A;V&R%DA3q>0e3Qera@yj+Oa>c!_+$eJdWB%L%Ogk zWmYs`Ck7=3C#ub&qJ$lrNowntoOJKpk1Uh@TBYxXb#ntvdQa=y{PigekxMPD4)bcn zly-$pWyXW>#?FI-)#688+vCuz8wKA5#J0!YJ$nA8PyUL_J_95q3+WvQ-?8xw3<x*ax~uM+g07@npo&Fx6Mf<|oE^&B3Y}~|-cH7d9A_yJ4Iy1@H>`a-jjy<}JC7Pr z=}Z2y!=|?pDTifyl^=iEP_@4@ac?+k%^Ie5T=F3Zi!S-ACrlc@?Y(ttOZj!oUhl*E zA>-YbZGXtU~wYniS;Ul$4Y7wg=EFO zwa1JPBl7N2ht!Nzu%bul2v)^kXik#w#CVY1(8kh$*cFOj-*{XT)8~cIN)zSPM{nz<5S8)qLgG+<86nBD^Ab|i0B*2%o_Sx^=?-}F#|NiiU z!3ZPIoX?!|zT}=CLxWHXBUj!^yBzoLb)-sE>{fml>7JFrO>mSz)U# z3EP5q?^Ql;9{mjK((50>s|9Fxr};I zNrU~%ymV@)E_OwpGy0xxg;^8ksrIZmZ_uKpeb;v)}su21^-Gt!Tr($qt=mCwPslz z)a-u9z@BGWO7KWq$#gl%kNL2T&@kJGEFMZQ*D^sH^};1JCt-9L8>WNKS#oowg>HUN z^W)NEoaT)VUET6afgOi9dex=?1&M9jod>1SX?Uadqlu#H;{cl>^Qie z*Hv|QXDe~YFrqi)eHrW(cvYd%<=kI4^EVQ5cf^TH7;o^6IWsD$VPEFWzrHB?Ye#{T zXOErnc3jb^OAxl&`#+(1Xh5nkLICAkcPuz+;!(h z-(wvHx*}7nDLI)+&ktF%w{9QkN-!>g6t%6;^NcgW`KQ9#R^EBwNOy;QmvFkhc8P+^ zWr%u|srmEWL}Cb_hB0s|`2MU{fpHcMvhu9bp2J*$J*_Vvd$QnlPHk2BcqiB8Ni-}L zZnNx5(<7g-UK$L1Z~uwe?5JQH`B$x;y|VE}pVwNWI`9uy<~UU9SLpJO<#WF)NAdKi z&>H16cYZ5XD)WW>4AqWiy4v$AC}6lrxGmOu^No&$@}qsg{!H>j2~vx5(wo;FH54-6 zUqKyMcgU;5H?{MqzX2W1jC){ZYEj_SU1V>0E0v&Vph$j!EO9Mi8tD21WC|>6W^yuj z*bm&OkWr6JTRQWjAhs?A4A<}lAHo6_6^o+dbCX8J2c{DboPvK8l85jFHYvaYhty{+ zPTBD;g+CEq=Rh4HALS){XSDQc{<_t}+NZSkr&)g2h^AU0e!~naE>2`|dpK-r3~9N@ zs|7_ZPb~)IY>`QhjfjO1#V~(IcH08818FU>i;3f3RD+j9V*rH(2c6q*p0XAHG~ zTA%3blhevSfyP3^403+Z)mBt=(aKut^wv+P;(s*a&Y;>b-(m#DtGr zE-;VEv5{&tcU!+!2*0$tI!4E6s&xM3SqsDKq8L5RQcbz6LE>-cfjKFw7q4ZhPAlAC z;YCJuhALX1aKEs_>682R>#DSuq7v6Z#=QHhIeS)YhMp$8dGe4zLg6IL;im^$)_QT@ zN+*z@AmQd#g=6XTv4;9F!F5!yow)Z80e*SPd zmCpT&AMmrc>BMekvoD@vSnIfRl?=AO#|m}bWx=CK&{F(azpre?yOK7m(F$f4U8^c4 zclraP{ztmYl;#+$|u1Zj@;;< zsbBa@KeDEbD%~*J#7}{#rC8lT z#DsUusEiS^@9M$dha{Jtoaz>vQ0&6)J8Xi;5A?ny$ELd}Q$c8@hb6CFOQZuQ;v4}6 z?K(t*>0e?L0Jeb;S}^#J_&&voh<)t|!262ZcGszfL7g}A>(eG9MdWu-1tLpvkmp?o zj(^fu@`mK9`$}i_-lS2wc)SK_D?;||+XJ)-yu_B$&*%JI=IS>kB`#RHX8L@K1#f9X z-V6|N+6sGCaI+>C`gYG-j5h36_`}<);^;4ZR~p(V_&v zZO8*VMlRQ=C&D_GOI&Zfr+znI zRNJ)Zgw`6Ux0Elr-egHj#A8B5g7B8Ud(+-oyQ_F%4n>Md+!CD1 zxPRhi=2pe5BI}Q)As8ByEoUz<)<~NlwKrQ=mZ(yh2YxIzz4#dyEq@ws#0roN(8#n` z<(88;2>->aJs<3KdXebvLiH&2!=)0Nx$k4+^|o|UT(RE|za3Wk1Uzel6sTSQOFc97 zEi`H-Ae8JfE=sP*-^DL+wXFSlStU!^NK$#5@$ZQy2}v(2y^)(LDq(0Ul&h=e*YyA? zqOHjA6YZ$=1>uKlq|M@SsMj8DDFPjb-j6vfp;W5W(u_#V2J$jH#p|MXrHEP@giBjg z=#8GRb{s`dRf`F`1JeDVCQY0AAKOJysC^Y!Xt2m&rDn0Tg~T0Ay$`%aj)_nnCo((j zOHQ7Pw*cbI`}T|5?=_zwbzFiDXP!GibQi|H(Ec_kUSR!elF+}hl5~-JMQ2Z%ztCke zc{MCBbqu=Sp%*}g^v66l&_tj=} zW#_lsI--RGFZ;hN2nd=n?*;P{-@q@%`%*u9duQAj!Ix*x(#18x+-;+8!|C~kl!|Wl zTAqqI6^V>Us73x{rJah+e%<|1Nw^$vo!zJ_fVb^RO+Fmj4o~A}9A8%>x^LjucUv}9 zXMLT`r;6sR0(RDIM>m!Op9->Vul-zzMYJ0_Y>sf0?7AV`{gw;5H@a`Kg*@u+Eij(| zH@r=szjxO6cDHb1#q*j&^gE4xA+KwsEioqMNoYq^T>!L5i_rezSD!n}#ck)5_W|@9 zj-;DcUy*mN6FT5E`P1#;WPR9$4qWvN9mcF(#`U}C{>{rpc6Y|BdoZsu3@L>YbJGKq z*-QF%D|>F#UGBX9(j;(u_^dL&+lq7a>rhZhvb^m?Q(8mgTk8TOGW6P%lYPJTH#t;$ zoZIvWqxMH>ubdQ;%3Uq$wz2{L*a>J#hwgBwAR(>4&u?4=Vg2^JSMIqOAH1Eo@KFhR zYG);N=@S>sS@-?#fplP2z4Pj0uXAqh-PN*rtZ~2aXlVb}w}!5vYicGvMEvK9I%G0v z6IS6Z={ff8XI!It&)>OZ^{IQQ{NUJ06x{^FgZJM#8D^fYH}hA~uHBO2v#EtWaiGy4 zDs0Bksd3!w!Y2*rUt1lIsAOcZ^0J}gYbab}!X+|s}o z%kk4HzY9T0f~$f+0N%Dm9zRHw-eMbjGDc)hqObG67upM9r#TFzeQ4L;dky81<#aNU z4=VbAtz9uTRsdfy>?2pMGS$9E&}*uB@*{|zLK&!Ao`c$)dkWh(iBa?3nQf6=j=&~6 zV&9$^kDiwY?_v1(2h}x}e=|!L8H^2qD_T4&XAb{nhcpbXFI#MP+^1PN^V(9{x)v8( zS*A5#Uqs#OCriPe?ZemMJ1#UPovJpDlHWOw3ugVuBQ;0*SDh={)NqLP8}z4I*YA#P zrXG4KWfu#7X%#jdjY}`B8-;*VStVj+kr3|qu0<5@hQCshJ zWwbtUA~P-`oj6RNaorT6$e}vxTGn9K9CrbEN+j(#A?q!^g>)Y&?c9N8GLsT zS^UwF7=SqizVAz3$&2Z#n|{pTa(K_T`oLFhUGgek(aWXC_d?L~6aKU6Fek{ybf%qz z%5FRWmqvNz$=(@k7Ke{Cb#Im*Oa4U(D@j~(8Rne2N67lk8~yI@(3V|Wc09fmu#Q95 zjNKW#7&{sBX5MvnZh5ja4`S732D~whhSzLxMvr2Hi;J}BH6HZ&ZsM7180RysKQ^pv zu1LFEZH}furU^Aaa=)AJ--OkQ0no2!{J*S^qS^~@{Y<#yqU7pDeZTo z-fLtt6-*zjne!4gnU?d$@AIxrK$X~s^!A_AhuwaAvKeCTZsNOj82c{=p}1f2M~L^?idQM{EG+l7G?u(eOp*u~ZU5AjG_M{}b=U z%=uGinB>dn^60i_nKZ#qPpoquzZx+EUMSX@CtcGyfLQ$~mq(l3I#bXYHfT%S)O*vF zNQ=;Ss~^GkdMyIz$df)u=VYppUEJ1I-yiDe_2Erjmb_Ql9@k2c-Qd`e?LsiU=>I3A zLuxiSJceFlxSrI{bAsbT#qv@J^B*_RZ+v)X)OzmiFcc>{8RncC*B3pMq$r@mQ*6=u zd29*Vq|aRe0WREX@HF^3)(`v!AG>W{uSEFXapv9P-ZCH9d5)QX!$#*Z$u5?fTBsRf zRI?}^NKQ~ZLX3rvos9{MC6D^3_K;1pUqeNcD70o2N7Dy` zMEFYtU*m6#HL8TVsCp?Ek1G;*U-i;t`d0k44XOIwdCRBe3s16Px^ZafWQ|_AtB2sR zDC4GKQP>y2Ws4AVuEI%uPpVa)$#)0YTAjE^Gv0(7X z73E!vEv|z0BVI$eNNvDRYsGHlz1qYC+S#?4C!2;Id_ysvc6oI4SP9hdud^<;i*wD= zj`h+rVJ+^v?cBIXCDTLpQ}?SCEs z%>hmW2$c4VO~tS#$j2`guSfOgg^PXpDl@XeOro6GhWs}0=D6JN$+(9FsvBEwT-Gl_ zOV3W9`z4v-<_8j%s%G3$(z9?!|=VlZ@I2AMsm(nr-s=;0;`^`3iZ@^h5)P2 zA8ZiRXKrr|s#Uq_Wj79}Ej>YvPjqssEre1Vef?8`**VyikU@l~pX&0lg_AcuYk+